diff --git a/.core_files.yaml b/.core_files.yaml index 3f92ed87a84..2624c4432be 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -6,6 +6,7 @@ core: &core - homeassistant/helpers/** - homeassistant/package_constraints.txt - homeassistant/util/** + - mypy.ini - pyproject.toml - requirements.txt - setup.cfg @@ -14,6 +15,7 @@ core: &core base_platforms: &base_platforms - homeassistant/components/air_quality/** - homeassistant/components/alarm_control_panel/** + - homeassistant/components/assist_satellite/** - homeassistant/components/binary_sensor/** - homeassistant/components/button/** - homeassistant/components/calendar/** @@ -61,6 +63,7 @@ components: &components - homeassistant/components/auth/** - homeassistant/components/automation/** - homeassistant/components/backup/** + - homeassistant/components/blueprint/** - homeassistant/components/bluetooth/** - homeassistant/components/cloud/** - homeassistant/components/config/** @@ -77,6 +80,7 @@ components: &components - homeassistant/components/group/** - homeassistant/components/hassio/** - homeassistant/components/homeassistant/** + - homeassistant/components/homeassistant_hardware/** - homeassistant/components/http/** - homeassistant/components/image/** - homeassistant/components/input_boolean/** @@ -109,6 +113,7 @@ components: &components - homeassistant/components/tag/** - homeassistant/components/template/** - homeassistant/components/timer/** + - homeassistant/components/trace/** - homeassistant/components/usb/** - homeassistant/components/webhook/** - homeassistant/components/websocket_api/** @@ -124,9 +129,13 @@ tests: &tests - tests/*.py - tests/auth/** - tests/backports/** + - tests/components/conftest.py + - tests/components/diagnostics/** - tests/components/history/** + - tests/components/light/common.py - tests/components/logbook/** - tests/components/recorder/** + - tests/components/repairs/** - tests/components/sensor/** - tests/hassfest/** - tests/helpers/** diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 2b15a65ff1d..44c38afdec6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ "name": "Home Assistant Dev", "context": "..", "dockerFile": "../Dockerfile.dev", - "postCreateCommand": "script/setup", + "postCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder} && script/setup", "postStartCommand": "script/bootstrap", "containerEnv": { "PYTHONASYNCIODEBUG": "1" @@ -12,7 +12,12 @@ }, // Port 5683 udp is used by Shelly integration "appPort": ["8123:8123", "5683:5683/udp"], - "runArgs": ["-e", "GIT_EDITOR=code --wait"], + "runArgs": [ + "-e", + "GIT_EDITOR=code --wait", + "--security-opt", + "label=disable" + ], "customizations": { "vscode": { "extensions": [ @@ -53,7 +58,13 @@ ], "[python]": { "editor.defaultFormatter": "charliermarsh.ruff" - } + }, + "json.schemas": [ + { + "fileMatch": ["homeassistant/components/*/manifest.json"], + "url": "./script/json_schemas/manifest_schema.json" + } + ] } } } diff --git a/.dockerignore b/.dockerignore index 7fde7f33fa5..cf975f4215f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -7,6 +7,7 @@ docs # Development .devcontainer .vscode +.tool-versions # Test related files tests diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index ad3205c51c8..9deb34d20e9 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,2 +1 @@ -custom: https://www.nabucasa.com -github: balloob +custom: https://www.openhomefoundation.org diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index e1f95c5c0a9..8f419cca1da 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -10,7 +10,7 @@ on: env: BUILD_TYPE: core - DEFAULT_PYTHON: "3.12" + DEFAULT_PYTHON: "3.13" PIP_TIMEOUT: 60 UV_HTTP_TIMEOUT: 60 UV_SYSTEM_PYTHON: "true" @@ -27,12 +27,12 @@ jobs: publish: ${{ steps.version.outputs.publish }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 with: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: translations path: translations.tar.gz @@ -90,11 +90,11 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v6 + uses: dawidd6/action-download-artifact@v7 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/frontend @@ -105,7 +105,7 @@ jobs: - name: Download nightly wheels of intents if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v6 + uses: dawidd6/action-download-artifact@v7 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/intents-package @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -126,7 +126,7 @@ jobs: env: UV_PRERELEASE: allow run: | - python3 -m pip install "$(grep '^uv' < requirements_test.txt)" + python3 -m pip install "$(grep '^uv' < requirements.txt)" uv pip install packaging tomli uv pip install . python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}" @@ -242,7 +242,7 @@ jobs: - green steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set build additional args run: | @@ -279,7 +279,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Initialize git uses: home-assistant/actions/helpers/git-init@master @@ -316,14 +316,15 @@ jobs: packages: write id-token: write strategy: + fail-fast: false matrix: registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"] steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Install Cosign - uses: sigstore/cosign-installer@v3.6.0 + uses: sigstore/cosign-installer@v3.7.0 with: cosign-release: "v2.2.3" @@ -450,10 +451,10 @@ jobs: if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true' steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -482,3 +483,56 @@ jobs: export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}" twine upload dist/* --skip-existing + + hassfest-image: + name: Build and test hassfest image + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write + needs: ["init"] + if: github.repository_owner == 'home-assistant' + env: + HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest + HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }} + steps: + - name: Checkout repository + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build Docker image + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 + with: + context: . # So action will not pull the repository again + file: ./script/hassfest/docker/Dockerfile + load: true + tags: ${{ env.HASSFEST_IMAGE_TAG }} + + - name: Run hassfest against core + run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace + + - name: Push Docker image + if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' + id: push + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 + with: + context: . # So action will not pull the repository again + file: ./script/hassfest/docker/Dockerfile + push: true + tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest + + - name: Generate artifact attestation + if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' + uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + with: + subject-name: ${{ env.HASSFEST_IMAGE_NAME }} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f6ffa439d9b..9d6f207382d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,12 +37,12 @@ on: type: boolean env: - CACHE_VERSION: 10 + CACHE_VERSION: 11 UV_CACHE_VERSION: 1 - MYPY_CACHE_VERSION: 8 - HA_SHORT_VERSION: "2024.9" + MYPY_CACHE_VERSION: 9 + HA_SHORT_VERSION: "2025.1" DEFAULT_PYTHON: "3.12" - ALL_PYTHON_VERSIONS: "['3.12']" + ALL_PYTHON_VERSIONS: "['3.12', '3.13']" # 10.3 is the oldest supported version # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) # 10.6 is the current long-term-support @@ -93,7 +93,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Generate partial Python venv restore key id: generate_python_cache_key run: | @@ -231,16 +231,16 @@ jobs: - info steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.2.0 with: path: venv key: >- @@ -252,11 +252,11 @@ jobs: python -m venv venv . venv/bin/activate python --version - pip install "$(grep '^uv' < requirements_test.txt)" + pip install "$(grep '^uv' < requirements.txt)" uv pip install "$(cat requirements_test.txt | grep pre-commit)" - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} lookup-only: true @@ -277,16 +277,16 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -295,7 +295,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -317,16 +317,16 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -335,7 +335,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -357,16 +357,16 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -375,7 +375,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -429,17 +429,32 @@ jobs: . venv/bin/activate pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files + lint-hadolint: + name: Check ${{ matrix.file }} + runs-on: ubuntu-24.04 + needs: + - info + if: | + github.event.inputs.pylint-only != 'true' + && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' + strategy: + fail-fast: false + matrix: + file: + - Dockerfile + - Dockerfile.dev + - script/hassfest/docker/Dockerfile + steps: + - name: Check out code from GitHub + uses: actions/checkout@v4.2.2 - name: Register hadolint problem matcher run: | echo "::add-matcher::.github/workflows/matchers/hadolint.json" - - name: Check Dockerfile - uses: docker://hadolint/hadolint:v1.18.2 + - name: Check ${{ matrix.file }} + uses: docker://hadolint/hadolint:v2.12.0 with: - args: hadolint Dockerfile - - name: Check Dockerfile.dev - uses: docker://hadolint/hadolint:v1.18.2 - with: - args: hadolint Dockerfile.dev + args: hadolint ${{ matrix.file }} base: name: Prepare dependencies @@ -451,32 +466,31 @@ jobs: python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Generate partial uv restore key id: generate-uv-key run: | - uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3) + uv_version=$(cat requirements.txt | grep uv | cut -d '=' -f 3) echo "version=${uv_version}" >> $GITHUB_OUTPUT echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{ env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.2.0 with: path: venv - lookup-only: true key: >- ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - name: Restore uv wheel cache if: steps.cache-venv.outputs.cache-hit != 'true' - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.2.0 with: path: ${{ env.UV_CACHE_DIR }} key: >- @@ -510,12 +524,32 @@ jobs: python -m venv venv . venv/bin/activate python --version - pip install "$(grep '^uv' < requirements_test.txt)" + pip install "$(grep '^uv' < requirements.txt)" uv pip install -U "pip>=21.3.1" setuptools wheel uv pip install -r requirements.txt python -m script.gen_requirements_all ci uv pip install -r requirements_all_pytest.txt -r requirements_test.txt uv pip install -e . --config-settings editable_mode=compat + - name: Dump pip freeze + run: | + python -m venv venv + . venv/bin/activate + python --version + uv pip freeze >> pip_freeze.txt + - name: Upload pip_freeze artifact + uses: actions/upload-artifact@v4.4.3 + with: + name: pip-freeze-${{ matrix.python-version }} + path: pip_freeze.txt + overwrite: true + - name: Remove pip_freeze + run: rm pip_freeze.txt + - name: Remove generated requirements_all + if: steps.cache-venv.outputs.cache-hit != 'true' + run: rm requirements_all_pytest.txt requirements_all_wheels_*.txt + - name: Check dirty + run: | + ./script/check_dirty hassfest: name: Check hassfest @@ -535,16 +569,16 @@ jobs: sudo apt-get -y install \ libturbojpeg - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -568,16 +602,16 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -600,37 +634,41 @@ jobs: && github.event.inputs.mypy-only != 'true' || github.event.inputs.audit-licenses-only == 'true') && needs.info.outputs.requirements == 'true' + strategy: + fail-fast: false + matrix: + python-version: ${{ fromJson(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/checkout@v4.2.2 + - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: - python-version: ${{ env.DEFAULT_PYTHON }} + python-version: ${{ matrix.python-version }} check-latest: true - - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment + - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true key: >- ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - - name: Run pip-licenses + - name: Extract license data run: | . venv/bin/activate - pip-licenses --format=json --output-file=licenses.json + python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json - name: Upload licenses - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: - name: licenses - path: licenses.json - - name: Process licenses + name: licenses-${{ github.run_number }}-${{ matrix.python-version }} + path: licenses-${{ matrix.python-version }}.json + - name: Check licenses run: | . venv/bin/activate - python -m script.licenses + python -m script.licenses check licenses-${{ matrix.python-version }}.json pylint: name: Check pylint @@ -645,16 +683,16 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -692,16 +730,16 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -737,10 +775,10 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -753,7 +791,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -761,7 +799,7 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - name: Restore mypy cache - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.2.0 with: path: .mypy_cache key: >- @@ -800,6 +838,12 @@ jobs: needs: - info - base + - gen-requirements-all + - hassfest + - lint-other + - lint-ruff + - lint-ruff-format + - mypy name: Split tests for full run steps: - name: Install additional OS dependencies @@ -812,16 +856,16 @@ jobs: libturbojpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -833,7 +877,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: pytest_buckets path: pytest_buckets.txt @@ -876,16 +920,16 @@ jobs: libturbojpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -925,7 +969,8 @@ jobs: -qq \ --timeout=9 \ --durations=10 \ - -n auto \ + --numprocesses auto \ + --snapshot-details \ --dist=loadfile \ ${cov_params[@]} \ -o console_output_style=count \ @@ -934,14 +979,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -996,16 +1041,16 @@ jobs: libturbojpeg \ libmariadb-dev-compat - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -1047,7 +1092,8 @@ jobs: python3 -b -X dev -m pytest \ -qq \ --timeout=20 \ - -n 1 \ + --numprocesses 1 \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=10 \ @@ -1060,7 +1106,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1068,7 +1114,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1079,7 +1125,7 @@ jobs: ./script/check_dirty pytest-postgres: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 services: postgres: image: ${{ matrix.postgresql-group }} @@ -1119,19 +1165,21 @@ jobs: sudo apt-get -y install \ bluez \ ffmpeg \ - libturbojpeg \ + libturbojpeg + sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y + sudo apt-get -y install \ postgresql-server-dev-14 - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -1173,7 +1221,8 @@ jobs: python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ - -n 1 \ + --numprocesses 1 \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=0 \ @@ -1187,7 +1236,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1195,7 +1244,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1217,19 +1266,18 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.8 with: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v4.5.0 + uses: codecov/codecov-action@v5.1.1 with: fail_ci_if_error: true flags: full-suite token: ${{ secrets.CODECOV_TOKEN }} - version: v0.6.0 pytest-partial: runs-on: ubuntu-24.04 @@ -1268,16 +1316,16 @@ jobs: libturbojpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.0.2 + uses: actions/cache/restore@v4.2.0 with: path: venv fail-on-cache-miss: true @@ -1319,7 +1367,8 @@ jobs: python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ - -n auto \ + --numprocesses auto \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=0 \ @@ -1329,14 +1378,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1355,15 +1404,14 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.8 with: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v4.5.0 + uses: codecov/codecov-action@v5.1.1 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} - version: v0.6.0 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a4653a833c4..d3efa8ebaa3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -21,14 +21,14 @@ jobs: steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.26.5 + uses: github/codeql-action/init@v3.27.9 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.26.5 + uses: github/codeql-action/analyze@v3.27.9 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 0ab95510480..3fffc41e60c 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -19,10 +19,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 694208d30ac..a36b3073aab 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -32,11 +32,11 @@ jobs: architectures: ${{ steps.info.outputs.architectures }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -46,7 +46,7 @@ jobs: python -m venv venv . venv/bin/activate python --version - pip install "$(grep '^uv' < requirements_test.txt)" + pip install "$(grep '^uv' < requirements.txt)" uv pip install -r requirements.txt - name: Get information @@ -64,11 +64,8 @@ jobs: - name: Write env-file run: | ( - echo "GRPC_BUILD_WITH_BORING_SSL_ASM=false" echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true" echo "GRPC_PYTHON_BUILD_WITH_CYTHON=true" - echo "GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=true" - echo "GRPC_PYTHON_LDFLAGS=-lpthread -Wl,-wrap,memcpy -static-libgcc" # Fix out of memory issues with rust echo "CARGO_NET_GIT_FETCH_WITH_CLI=true" @@ -82,14 +79,15 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: env_file path: ./.env_file + include-hidden-files: true overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: requirements_diff path: ./requirements_diff.txt @@ -101,7 +99,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.3 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt @@ -114,11 +112,11 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312"] + abi: ["cp312", "cp313"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Download env_file uses: actions/download-artifact@v4.1.8 @@ -130,16 +128,22 @@ jobs: with: name: requirements_diff + - name: Adjust build env + run: | + # Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine + sed -i "/uv/d" requirements.txt + sed -i "/uv/d" requirements_diff.txt + - name: Build wheels - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "libffi-dev;openssl-dev;yaml-dev;nasm" - skip-binary: aiohttp + apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev" + skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements.txt" @@ -152,11 +156,11 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312"] + abi: ["cp312", "cp313"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.7 + uses: actions/checkout@v4.2.2 - name: Download env_file uses: actions/download-artifact@v4.1.8 @@ -173,26 +177,6 @@ jobs: with: name: requirements_all_wheels - - name: Split requirements all - run: | - # We split requirements all into multiple files. - # This is to prevent the build from running out of memory when - # resolving packages on 32-bits systems (like armhf, armv7). - - split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - - - name: Create requirements for cython<3 - run: | - # Some dependencies still require 'cython<3' - # and don't yet use isolated build environments. - # Build these first. - # grpcio: https://github.com/grpc/grpc/issues/33918 - # pydantic: https://github.com/pydantic/pydantic/issues/7689 - - touch requirements_old-cython.txt - cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt - cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt - - name: Adjust build env run: | if [ "${{ matrix.arch }}" = "i386" ]; then @@ -201,60 +185,56 @@ jobs: # Do not pin numpy in wheels building sed -i "/numpy/d" homeassistant/package_constraints.txt + # Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine + sed -i "/uv/d" requirements.txt + sed -i "/uv/d" requirements_diff.txt - - name: Build wheels (old cython) - uses: home-assistant/wheels@2024.07.1 - with: - abi: ${{ matrix.abi }} - tag: musllinux_1_2 - arch: ${{ matrix.arch }} - wheels-key: ${{ secrets.WHEELS_KEY }} - env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad - constraints: "homeassistant/package_constraints.txt" - requirements-diff: "requirements_diff.txt" - requirements: "requirements_old-cython.txt" - pip: "'cython<3'" + - name: Split requirements all + run: | + # We split requirements all into multiple files. + # This is to prevent the build from running out of memory when + # resolving packages on 32-bits systems (like armhf, armv7). + + split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - name: Build wheels (part 1) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" - name: Build wheels (part 2) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" - name: Build wheels (part 3) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" diff --git a/.gitignore b/.gitignore index 9bbf5bb81d4..241255253c5 100644 --- a/.gitignore +++ b/.gitignore @@ -79,6 +79,7 @@ pytest-*.txt .pydevproject .python-version +.tool-versions # emacs auto backups *~ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ab5e59139cf..6ecae762dcd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.2 + rev: v0.8.3 hooks: - id: ruff args: @@ -18,7 +18,7 @@ repos: exclude_types: [csv, json, html] exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-executables-have-shebangs stages: [manual] @@ -83,14 +83,14 @@ repos: pass_filenames: false language: script types: [text] - files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements_test.txt)$ + files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$ - id: hassfest-metadata name: hassfest-metadata - entry: script/run-in-env.sh python3 -m script.hassfest -p metadata + entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker pass_filenames: false language: script types: [text] - files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$ + files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$ - id: hassfest-mypy-config name: hassfest-mypy-config entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config diff --git a/.strict-typing b/.strict-typing index 07aed7b4ca1..899b22af35f 100644 --- a/.strict-typing +++ b/.strict-typing @@ -41,6 +41,7 @@ homeassistant.util.unit_system # --- Add components below this line --- homeassistant.components homeassistant.components.abode.* +homeassistant.components.acaia.* homeassistant.components.accuweather.* homeassistant.components.acer_projector.* homeassistant.components.acmeda.* @@ -95,6 +96,7 @@ homeassistant.components.aruba.* homeassistant.components.arwn.* homeassistant.components.aseko_pool_live.* homeassistant.components.assist_pipeline.* +homeassistant.components.assist_satellite.* homeassistant.components.asuswrt.* homeassistant.components.autarco.* homeassistant.components.auth.* @@ -110,6 +112,7 @@ homeassistant.components.bitcoin.* homeassistant.components.blockchain.* homeassistant.components.blue_current.* homeassistant.components.blueprint.* +homeassistant.components.bluesound.* homeassistant.components.bluetooth.* homeassistant.components.bluetooth_adapters.* homeassistant.components.bluetooth_tracker.* @@ -122,6 +125,7 @@ homeassistant.components.bryant_evolution.* homeassistant.components.bthome.* homeassistant.components.button.* homeassistant.components.calendar.* +homeassistant.components.cambridge_audio.* homeassistant.components.camera.* homeassistant.components.canary.* homeassistant.components.cert_expiry.* @@ -133,12 +137,14 @@ homeassistant.components.co2signal.* homeassistant.components.command_line.* homeassistant.components.config.* homeassistant.components.configurator.* +homeassistant.components.cookidoo.* homeassistant.components.counter.* homeassistant.components.cover.* homeassistant.components.cpuspeed.* homeassistant.components.crownstone.* homeassistant.components.date.* homeassistant.components.datetime.* +homeassistant.components.deako.* homeassistant.components.deconz.* homeassistant.components.default_config.* homeassistant.components.demo.* @@ -164,6 +170,7 @@ homeassistant.components.easyenergy.* homeassistant.components.ecovacs.* homeassistant.components.ecowitt.* homeassistant.components.efergy.* +homeassistant.components.eheimdigital.* homeassistant.components.electrasmart.* homeassistant.components.electric_kiwi.* homeassistant.components.elevenlabs.* @@ -205,10 +212,14 @@ homeassistant.components.geo_location.* homeassistant.components.geocaching.* homeassistant.components.gios.* homeassistant.components.glances.* +homeassistant.components.go2rtc.* homeassistant.components.goalzero.* homeassistant.components.google.* homeassistant.components.google_assistant_sdk.* +homeassistant.components.google_cloud.* +homeassistant.components.google_photos.* homeassistant.components.google_sheets.* +homeassistant.components.govee_ble.* homeassistant.components.gpsd.* homeassistant.components.greeneye_monitor.* homeassistant.components.group.* @@ -260,6 +271,7 @@ homeassistant.components.ios.* homeassistant.components.iotty.* homeassistant.components.ipp.* homeassistant.components.iqvia.* +homeassistant.components.iron_os.* homeassistant.components.islamic_prayer_times.* homeassistant.components.isy994.* homeassistant.components.jellyfin.* @@ -278,6 +290,7 @@ homeassistant.components.lawn_mower.* homeassistant.components.lcn.* homeassistant.components.ld2410_ble.* homeassistant.components.led_ble.* +homeassistant.components.lektrico.* homeassistant.components.lidarr.* homeassistant.components.lifx.* homeassistant.components.light.* @@ -294,9 +307,7 @@ homeassistant.components.london_underground.* homeassistant.components.lookin.* homeassistant.components.luftdaten.* homeassistant.components.madvr.* -homeassistant.components.mailbox.* homeassistant.components.manual.* -homeassistant.components.map.* homeassistant.components.mastodon.* homeassistant.components.matrix.* homeassistant.components.matter.* @@ -311,16 +322,19 @@ homeassistant.components.minecraft_server.* homeassistant.components.mjpeg.* homeassistant.components.modbus.* homeassistant.components.modem_callerid.* +homeassistant.components.mold_indicator.* homeassistant.components.monzo.* homeassistant.components.moon.* homeassistant.components.mopeka.* homeassistant.components.motionmount.* homeassistant.components.mqtt.* +homeassistant.components.music_assistant.* homeassistant.components.my.* homeassistant.components.mysensors.* homeassistant.components.myuplink.* homeassistant.components.nam.* homeassistant.components.nanoleaf.* +homeassistant.components.nasweb.* homeassistant.components.neato.* homeassistant.components.nest.* homeassistant.components.netatmo.* @@ -330,6 +344,7 @@ homeassistant.components.nfandroidtv.* homeassistant.components.nightscout.* homeassistant.components.nissan_leaf.* homeassistant.components.no_ip.* +homeassistant.components.nordpool.* homeassistant.components.notify.* homeassistant.components.notion.* homeassistant.components.number.* @@ -337,7 +352,9 @@ homeassistant.components.nut.* homeassistant.components.onboarding.* homeassistant.components.oncue.* homeassistant.components.onewire.* +homeassistant.components.onkyo.* homeassistant.components.open_meteo.* +homeassistant.components.openai_conversation.* homeassistant.components.openexchangerates.* homeassistant.components.opensky.* homeassistant.components.openuv.* @@ -345,11 +362,13 @@ homeassistant.components.oralb.* homeassistant.components.otbr.* homeassistant.components.overkiz.* homeassistant.components.p1_monitor.* +homeassistant.components.panel_custom.* homeassistant.components.peco.* homeassistant.components.persistent_notification.* homeassistant.components.pi_hole.* homeassistant.components.ping.* homeassistant.components.plugwise.* +homeassistant.components.powerfox.* homeassistant.components.powerwall.* homeassistant.components.private_ble_device.* homeassistant.components.prometheus.* @@ -362,6 +381,7 @@ homeassistant.components.pvoutput.* homeassistant.components.qnap_qsw.* homeassistant.components.rabbitair.* homeassistant.components.radarr.* +homeassistant.components.radio_browser.* homeassistant.components.rainforest_raven.* homeassistant.components.rainmachine.* homeassistant.components.raspberry_pi.* @@ -370,6 +390,7 @@ homeassistant.components.recollect_waste.* homeassistant.components.recorder.* homeassistant.components.remote.* homeassistant.components.renault.* +homeassistant.components.reolink.* homeassistant.components.repairs.* homeassistant.components.rest.* homeassistant.components.rest_command.* @@ -384,11 +405,13 @@ homeassistant.components.romy.* homeassistant.components.rpi_power.* homeassistant.components.rss_feed_template.* homeassistant.components.rtsp_to_webrtc.* +homeassistant.components.russound_rio.* homeassistant.components.ruuvi_gateway.* homeassistant.components.ruuvitag_ble.* homeassistant.components.samsungtv.* homeassistant.components.scene.* homeassistant.components.schedule.* +homeassistant.components.schlage.* homeassistant.components.scrape.* homeassistant.components.script.* homeassistant.components.search.* @@ -396,8 +419,10 @@ homeassistant.components.select.* homeassistant.components.sensibo.* homeassistant.components.sensirion_ble.* homeassistant.components.sensor.* +homeassistant.components.sensoterra.* homeassistant.components.senz.* homeassistant.components.sfr_box.* +homeassistant.components.shell_command.* homeassistant.components.shelly.* homeassistant.components.shopping_list.* homeassistant.components.simplepush.* @@ -407,15 +432,19 @@ homeassistant.components.skybell.* homeassistant.components.slack.* homeassistant.components.sleepiq.* homeassistant.components.smhi.* +homeassistant.components.smlight.* homeassistant.components.snooz.* +homeassistant.components.solarlog.* homeassistant.components.sonarr.* homeassistant.components.speedtestdotnet.* +homeassistant.components.spotify.* homeassistant.components.sql.* +homeassistant.components.squeezebox.* homeassistant.components.ssdp.* homeassistant.components.starlink.* homeassistant.components.statistics.* homeassistant.components.steamist.* -homeassistant.components.stookalert.* +homeassistant.components.stookwijzer.* homeassistant.components.stream.* homeassistant.components.streamlabswater.* homeassistant.components.stt.* @@ -423,6 +452,7 @@ homeassistant.components.suez_water.* homeassistant.components.sun.* homeassistant.components.surepetcare.* homeassistant.components.switch.* +homeassistant.components.switch_as_x.* homeassistant.components.switchbee.* homeassistant.components.switchbot_cloud.* homeassistant.components.switcher_kis.* @@ -470,6 +500,7 @@ homeassistant.components.update.* homeassistant.components.uptime.* homeassistant.components.uptimerobot.* homeassistant.components.usb.* +homeassistant.components.uvc.* homeassistant.components.vacuum.* homeassistant.components.vallox.* homeassistant.components.valve.* @@ -490,6 +521,7 @@ homeassistant.components.whois.* homeassistant.components.withings.* homeassistant.components.wiz.* homeassistant.components.wled.* +homeassistant.components.workday.* homeassistant.components.worldclock.* homeassistant.components.xiaomi_ble.* homeassistant.components.yale_smart_alarm.* diff --git a/.vscode/settings.default.json b/.vscode/settings.default.json index 681698d08b3..ace0a988bf5 100644 --- a/.vscode/settings.default.json +++ b/.vscode/settings.default.json @@ -6,5 +6,13 @@ // https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings "python.testing.pytestEnabled": false, // https://code.visualstudio.com/docs/python/linting#_general-settings - "pylint.importStrategy": "fromEnvironment" + "pylint.importStrategy": "fromEnvironment", + "json.schemas": [ + { + "fileMatch": [ + "homeassistant/components/*/manifest.json" + ], + "url": "./script/json_schemas/manifest_schema.json" + } + ] } diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 2495249af66..7425e7a2533 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -16,7 +16,7 @@ { "label": "Pytest", "type": "shell", - "command": "python3 -m pytest --timeout=10 tests", + "command": "${command:python.interpreterPath} -m pytest --timeout=10 tests", "dependsOn": ["Install all Test Requirements"], "group": { "kind": "test", @@ -31,7 +31,7 @@ { "label": "Pytest (changed tests only)", "type": "shell", - "command": "python3 -m pytest --timeout=10 --picked", + "command": "${command:python.interpreterPath} -m pytest --timeout=10 --picked", "group": { "kind": "test", "isDefault": true @@ -56,6 +56,20 @@ }, "problemMatcher": [] }, + { + "label": "Pre-commit", + "type": "shell", + "command": "pre-commit run --show-diff-on-failure", + "group": { + "kind": "test", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "new" + }, + "problemMatcher": [] + }, { "label": "Pylint", "type": "shell", @@ -75,7 +89,23 @@ "label": "Code Coverage", "detail": "Generate code coverage report for a given integration.", "type": "shell", - "command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto", + "command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto", + "dependsOn": ["Compile English translations"], + "group": { + "kind": "test", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "new" + }, + "problemMatcher": [] + }, + { + "label": "Update syrupy snapshots", + "detail": "Update syrupy snapshots for a given integration.", + "type": "shell", + "command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName} --snapshot-update", "dependsOn": ["Compile English translations"], "group": { "kind": "test", @@ -133,7 +163,7 @@ "label": "Compile English translations", "detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.", "type": "shell", - "command": "python3 -m script.translations develop --all", + "command": "${command:python.interpreterPath} -m script.translations develop --all", "group": { "kind": "build", "isDefault": true @@ -143,7 +173,7 @@ "label": "Run scaffold", "detail": "Add new functionality to a integration using a scaffold.", "type": "shell", - "command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}", + "command": "${command:python.interpreterPath} -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}", "group": { "kind": "build", "isDefault": true @@ -153,7 +183,7 @@ "label": "Create new integration", "detail": "Use the scaffold to create a new integration.", "type": "shell", - "command": "python3 -m script.scaffold integration", + "command": "${command:python.interpreterPath} -m script.scaffold integration", "group": { "kind": "build", "isDefault": true diff --git a/CODEOWNERS b/CODEOWNERS index 1618b18a8be..f1c6aa4aea5 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -40,6 +40,8 @@ build.json @home-assistant/supervisor # Integrations /homeassistant/components/abode/ @shred86 /tests/components/abode/ @shred86 +/homeassistant/components/acaia/ @zweckj +/tests/components/acaia/ @zweckj /homeassistant/components/accuweather/ @bieniu /tests/components/accuweather/ @bieniu /homeassistant/components/acmeda/ @atmurray @@ -48,6 +50,7 @@ build.json @home-assistant/supervisor /tests/components/adax/ @danielhiversen /homeassistant/components/adguard/ @frenck /tests/components/adguard/ @frenck +/homeassistant/components/ads/ @mrpasztoradam /homeassistant/components/advantage_air/ @Bre77 /tests/components/advantage_air/ @Bre77 /homeassistant/components/aemet/ @Noltari @@ -143,6 +146,8 @@ build.json @home-assistant/supervisor /tests/components/aseko_pool_live/ @milanmeu /homeassistant/components/assist_pipeline/ @balloob @synesthesiam /tests/components/assist_pipeline/ @balloob @synesthesiam +/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam +/tests/components/assist_satellite/ @home-assistant/core @synesthesiam /homeassistant/components/asuswrt/ @kennedyshead @ollo69 /tests/components/asuswrt/ @kennedyshead @ollo69 /homeassistant/components/atag/ @MatsNL @@ -228,14 +233,16 @@ build.json @home-assistant/supervisor /homeassistant/components/bsblan/ @liudger /tests/components/bsblan/ @liudger /homeassistant/components/bt_smarthub/ @typhoon2099 -/homeassistant/components/bthome/ @Ernst79 -/tests/components/bthome/ @Ernst79 +/homeassistant/components/bthome/ @Ernst79 @thecode +/tests/components/bthome/ @Ernst79 @thecode /homeassistant/components/buienradar/ @mjj4791 @ties @Robbie1221 /tests/components/buienradar/ @mjj4791 @ties @Robbie1221 /homeassistant/components/button/ @home-assistant/core /tests/components/button/ @home-assistant/core /homeassistant/components/calendar/ @home-assistant/core /tests/components/calendar/ @home-assistant/core +/homeassistant/components/cambridge_audio/ @noahhusby +/tests/components/cambridge_audio/ @noahhusby /homeassistant/components/camera/ @home-assistant/core /tests/components/camera/ @home-assistant/core /homeassistant/components/cast/ @emontnemery @@ -277,6 +284,8 @@ build.json @home-assistant/supervisor /tests/components/control4/ @lawtancool /homeassistant/components/conversation/ @home-assistant/core @synesthesiam /tests/components/conversation/ @home-assistant/core @synesthesiam +/homeassistant/components/cookidoo/ @miaucl +/tests/components/cookidoo/ @miaucl /homeassistant/components/coolmaster/ @OnFreund /tests/components/coolmaster/ @OnFreund /homeassistant/components/counter/ @fabaff @@ -294,6 +303,8 @@ build.json @home-assistant/supervisor /tests/components/date/ @home-assistant/core /homeassistant/components/datetime/ @home-assistant/core /tests/components/datetime/ @home-assistant/core +/homeassistant/components/deako/ @sebirdman @balake @deakolights +/tests/components/deako/ @sebirdman @balake @deakolights /homeassistant/components/debugpy/ @frenck /tests/components/debugpy/ @frenck /homeassistant/components/deconz/ @Kane610 @@ -353,6 +364,8 @@ build.json @home-assistant/supervisor /tests/components/dsmr/ @Robbie1221 /homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna /tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna +/homeassistant/components/duke_energy/ @hunterjm +/tests/components/duke_energy/ @hunterjm /homeassistant/components/duotecno/ @cereal2nd /tests/components/duotecno/ @cereal2nd /homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @andarotajo @@ -374,6 +387,8 @@ build.json @home-assistant/supervisor /homeassistant/components/efergy/ @tkdrob /tests/components/efergy/ @tkdrob /homeassistant/components/egardia/ @jeroenterheerdt +/homeassistant/components/eheimdigital/ @autinerd +/tests/components/eheimdigital/ @autinerd /homeassistant/components/electrasmart/ @jafar-atili /tests/components/electrasmart/ @jafar-atili /homeassistant/components/electric_kiwi/ @mikey0000 @@ -487,8 +502,8 @@ build.json @home-assistant/supervisor /tests/components/freebox/ @hacf-fr @Quentame /homeassistant/components/freedompro/ @stefano055415 /tests/components/freedompro/ @stefano055415 -/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185 -/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185 +/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185 +/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185 /homeassistant/components/fritzbox/ @mib1185 @flabbamann /tests/components/fritzbox/ @mib1185 @flabbamann /homeassistant/components/fritzbox_callmonitor/ @cdce8p @@ -535,6 +550,8 @@ build.json @home-assistant/supervisor /tests/components/github/ @timmo001 @ludeeus /homeassistant/components/glances/ @engrbm87 /tests/components/glances/ @engrbm87 +/homeassistant/components/go2rtc/ @home-assistant/core +/tests/components/go2rtc/ @home-assistant/core /homeassistant/components/goalzero/ @tkdrob /tests/components/goalzero/ @tkdrob /homeassistant/components/gogogate2/ @vangorra @@ -547,11 +564,14 @@ build.json @home-assistant/supervisor /tests/components/google_assistant/ @home-assistant/cloud /homeassistant/components/google_assistant_sdk/ @tronikos /tests/components/google_assistant_sdk/ @tronikos -/homeassistant/components/google_cloud/ @lufton +/homeassistant/components/google_cloud/ @lufton @tronikos +/tests/components/google_cloud/ @lufton @tronikos /homeassistant/components/google_generative_ai_conversation/ @tronikos /tests/components/google_generative_ai_conversation/ @tronikos /homeassistant/components/google_mail/ @tkdrob /tests/components/google_mail/ @tkdrob +/homeassistant/components/google_photos/ @allenporter +/tests/components/google_photos/ @allenporter /homeassistant/components/google_sheets/ @tkdrob /tests/components/google_sheets/ @tkdrob /homeassistant/components/google_tasks/ @allenporter @@ -572,8 +592,8 @@ build.json @home-assistant/supervisor /tests/components/group/ @home-assistant/core /homeassistant/components/guardian/ @bachya /tests/components/guardian/ @bachya -/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r -/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r +/homeassistant/components/habitica/ @tr4nt0r +/tests/components/habitica/ @tr4nt0r /homeassistant/components/hardkernel/ @home-assistant/core /tests/components/hardkernel/ @home-assistant/core /homeassistant/components/hardware/ @home-assistant/core @@ -603,8 +623,8 @@ build.json @home-assistant/supervisor /tests/components/hlk_sw16/ @jameshilliard /homeassistant/components/holiday/ @jrieger @gjohansson-ST /tests/components/holiday/ @jrieger @gjohansson-ST -/homeassistant/components/home_connect/ @DavidMStraub -/tests/components/home_connect/ @DavidMStraub +/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98 +/tests/components/home_connect/ @DavidMStraub @Diegorro98 /homeassistant/components/homeassistant/ @home-assistant/core /tests/components/homeassistant/ @home-assistant/core /homeassistant/components/homeassistant_alerts/ @home-assistant/core @@ -629,6 +649,8 @@ build.json @home-assistant/supervisor /tests/components/homewizard/ @DCSBL /homeassistant/components/honeywell/ @rdfurman @mkmer /tests/components/honeywell/ @rdfurman @mkmer +/homeassistant/components/html5/ @alexyao2015 +/tests/components/html5/ @alexyao2015 /homeassistant/components/http/ @home-assistant/core /tests/components/http/ @home-assistant/core /homeassistant/components/huawei_lte/ @scop @fphammerle @@ -643,6 +665,8 @@ build.json @home-assistant/supervisor /tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock /homeassistant/components/husqvarna_automower/ @Thomas55555 /tests/components/husqvarna_automower/ @Thomas55555 +/homeassistant/components/husqvarna_automower_ble/ @alistair23 +/tests/components/husqvarna_automower_ble/ @alistair23 /homeassistant/components/huum/ @frwickst /tests/components/huum/ @frwickst /homeassistant/components/hvv_departures/ @vigonotion @@ -707,8 +731,8 @@ build.json @home-assistant/supervisor /tests/components/ios/ @robbiet480 /homeassistant/components/iotawatt/ @gtdiehl @jyavenard /tests/components/iotawatt/ @gtdiehl @jyavenard -/homeassistant/components/iotty/ @pburgio -/tests/components/iotty/ @pburgio +/homeassistant/components/iotty/ @shapournemati-iotty +/tests/components/iotty/ @shapournemati-iotty /homeassistant/components/iperf3/ @rohankapoorcom /homeassistant/components/ipma/ @dgomes /tests/components/ipma/ @dgomes @@ -721,6 +745,8 @@ build.json @home-assistant/supervisor /tests/components/iron_os/ @tr4nt0r /homeassistant/components/isal/ @bdraco /tests/components/isal/ @bdraco +/homeassistant/components/iskra/ @iskramis +/tests/components/iskra/ @iskramis /homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair /tests/components/islamic_prayer_times/ @engrbm87 @cpfair /homeassistant/components/israel_rail/ @shaiu @@ -731,6 +757,8 @@ build.json @home-assistant/supervisor /tests/components/ista_ecotrend/ @tr4nt0r /homeassistant/components/isy994/ @bdraco @shbatm /tests/components/isy994/ @bdraco @shbatm +/homeassistant/components/ituran/ @shmuelzon +/tests/components/ituran/ @shmuelzon /homeassistant/components/izone/ @Swamp-Ig /tests/components/izone/ @Swamp-Ig /homeassistant/components/jellyfin/ @j-stienstra @ctalkington @@ -797,8 +825,12 @@ build.json @home-assistant/supervisor /tests/components/leaone/ @bdraco /homeassistant/components/led_ble/ @bdraco /tests/components/led_ble/ @bdraco +/homeassistant/components/lektrico/ @lektrico +/tests/components/lektrico/ @lektrico /homeassistant/components/lg_netcast/ @Drafteed @splinter98 /tests/components/lg_netcast/ @Drafteed @splinter98 +/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration +/tests/components/lg_thinq/ @LG-ThinQ-Integration /homeassistant/components/lidarr/ @tkdrob /tests/components/lidarr/ @tkdrob /homeassistant/components/lifx/ @Djelibeybi @@ -843,8 +875,8 @@ build.json @home-assistant/supervisor /tests/components/lupusec/ @majuss @suaveolent /homeassistant/components/lutron/ @cdheiser @wilburCForce /tests/components/lutron/ @cdheiser @wilburCForce -/homeassistant/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151 -/tests/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151 +/homeassistant/components/lutron_caseta/ @swails @danaues @eclair4151 +/tests/components/lutron_caseta/ @swails @danaues @eclair4151 /homeassistant/components/lyric/ @timmo001 /tests/components/lyric/ @timmo001 /homeassistant/components/madvr/ @iloveicedgreentea @@ -907,6 +939,8 @@ build.json @home-assistant/supervisor /tests/components/modern_forms/ @wonderslug /homeassistant/components/moehlenhoff_alpha2/ @j-a-n /tests/components/moehlenhoff_alpha2/ @j-a-n +/homeassistant/components/monarch_money/ @jeeftor +/tests/components/monarch_money/ @jeeftor /homeassistant/components/monoprice/ @etsinko @OnFreund /tests/components/monoprice/ @etsinko @OnFreund /homeassistant/components/monzo/ @jakemartin-icl @@ -928,6 +962,8 @@ build.json @home-assistant/supervisor /homeassistant/components/msteams/ @peroyvind /homeassistant/components/mullvad/ @meichthys /tests/components/mullvad/ @meichthys +/homeassistant/components/music_assistant/ @music-assistant +/tests/components/music_assistant/ @music-assistant /homeassistant/components/mutesync/ @currentoor /tests/components/mutesync/ @currentoor /homeassistant/components/my/ @home-assistant/core @@ -942,8 +978,8 @@ build.json @home-assistant/supervisor /tests/components/nam/ @bieniu /homeassistant/components/nanoleaf/ @milanmeu @joostlek /tests/components/nanoleaf/ @milanmeu @joostlek -/homeassistant/components/neato/ @Santobert -/tests/components/neato/ @Santobert +/homeassistant/components/nasweb/ @nasWebio +/tests/components/nasweb/ @nasWebio /homeassistant/components/nederlandse_spoorwegen/ @YarmoM /homeassistant/components/ness_alarm/ @nickw444 /tests/components/ness_alarm/ @nickw444 @@ -974,6 +1010,8 @@ build.json @home-assistant/supervisor /tests/components/nice_go/ @IceBotYT /homeassistant/components/nightscout/ @marciogranzotto /tests/components/nightscout/ @marciogranzotto +/homeassistant/components/niko_home_control/ @VandeurenGlenn +/tests/components/niko_home_control/ @VandeurenGlenn /homeassistant/components/nilu/ @hfurubotten /homeassistant/components/nina/ @DeerMaximum /tests/components/nina/ @DeerMaximum @@ -982,6 +1020,8 @@ build.json @home-assistant/supervisor /homeassistant/components/noaa_tides/ @jdelaney72 /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe /tests/components/nobo_hub/ @echoromeo @oyvindwe +/homeassistant/components/nordpool/ @gjohansson-ST +/tests/components/nordpool/ @gjohansson-ST /homeassistant/components/notify/ @home-assistant/core /tests/components/notify/ @home-assistant/core /homeassistant/components/notify_events/ @matrozov @papajojo @@ -1004,6 +1044,8 @@ build.json @home-assistant/supervisor /tests/components/nut/ @bdraco @ollo69 @pestevez /homeassistant/components/nws/ @MatthewFlamm @kamiyo /tests/components/nws/ @MatthewFlamm @kamiyo +/homeassistant/components/nyt_games/ @joostlek +/tests/components/nyt_games/ @joostlek /homeassistant/components/nzbget/ @chriscla /tests/components/nzbget/ @chriscla /homeassistant/components/obihai/ @dshokouhi @ejpenney @@ -1011,6 +1053,8 @@ build.json @home-assistant/supervisor /homeassistant/components/octoprint/ @rfleming71 /tests/components/octoprint/ @rfleming71 /homeassistant/components/ohmconnect/ @robbiet480 +/homeassistant/components/ohme/ @dan-r +/tests/components/ohme/ @dan-r /homeassistant/components/ollama/ @synesthesiam /tests/components/ollama/ @synesthesiam /homeassistant/components/ombi/ @larssont @@ -1023,6 +1067,7 @@ build.json @home-assistant/supervisor /homeassistant/components/onewire/ @garbled1 @epenet /tests/components/onewire/ @garbled1 @epenet /homeassistant/components/onkyo/ @arturpragacz +/tests/components/onkyo/ @arturpragacz /homeassistant/components/onvif/ @hunterjm /tests/components/onvif/ @hunterjm /homeassistant/components/open_meteo/ @frenck @@ -1064,10 +1109,10 @@ build.json @home-assistant/supervisor /tests/components/ovo_energy/ @timmo001 /homeassistant/components/p1_monitor/ @klaasnicolaas /tests/components/p1_monitor/ @klaasnicolaas +/homeassistant/components/palazzetti/ @dotvav +/tests/components/palazzetti/ @dotvav /homeassistant/components/panel_custom/ @home-assistant/frontend /tests/components/panel_custom/ @home-assistant/frontend -/homeassistant/components/panel_iframe/ @home-assistant/frontend -/tests/components/panel_iframe/ @home-assistant/frontend /homeassistant/components/peco/ @IceBotYT /tests/components/peco/ @IceBotYT /homeassistant/components/pegel_online/ @mib1185 @@ -1082,8 +1127,6 @@ build.json @home-assistant/supervisor /tests/components/pi_hole/ @shenxn /homeassistant/components/picnic/ @corneyl /tests/components/picnic/ @corneyl -/homeassistant/components/pilight/ @trekky12 -/tests/components/pilight/ @trekky12 /homeassistant/components/ping/ @jpbede /tests/components/ping/ @jpbede /homeassistant/components/plaato/ @JohNan @@ -1098,6 +1141,8 @@ build.json @home-assistant/supervisor /tests/components/point/ @fredrike /homeassistant/components/poolsense/ @haemishkyd /tests/components/poolsense/ @haemishkyd +/homeassistant/components/powerfox/ @klaasnicolaas +/tests/components/powerfox/ @klaasnicolaas /homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson /tests/components/powerwall/ @bdraco @jrester @daniel-simpson /homeassistant/components/private_ble_device/ @Jc2k @@ -1113,8 +1158,8 @@ build.json @home-assistant/supervisor /homeassistant/components/proximity/ @mib1185 /tests/components/proximity/ @mib1185 /homeassistant/components/proxmoxve/ @jhollowe @Corbeno -/homeassistant/components/prusalink/ @balloob @Skaronator -/tests/components/prusalink/ @balloob @Skaronator +/homeassistant/components/prusalink/ @balloob +/tests/components/prusalink/ @balloob /homeassistant/components/ps4/ @ktnrg45 /tests/components/ps4/ @ktnrg45 /homeassistant/components/pure_energie/ @klaasnicolaas @@ -1217,8 +1262,8 @@ build.json @home-assistant/supervisor /tests/components/roku/ @ctalkington /homeassistant/components/romy/ @xeniter /tests/components/romy/ @xeniter -/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous -/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous +/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous +/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous /homeassistant/components/roon/ @pavoni /tests/components/roon/ @pavoni /homeassistant/components/rpi_power/ @shenxn @swetoast @@ -1275,6 +1320,8 @@ build.json @home-assistant/supervisor /tests/components/sensorpro/ @bdraco /homeassistant/components/sensorpush/ @bdraco /tests/components/sensorpush/ @bdraco +/homeassistant/components/sensoterra/ @markruys +/tests/components/sensoterra/ @markruys /homeassistant/components/sentry/ @dcramer @frenck /tests/components/sentry/ @dcramer @frenck /homeassistant/components/senz/ @milanmeu @@ -1309,6 +1356,8 @@ build.json @home-assistant/supervisor /tests/components/siren/ @home-assistant/core @raman325 /homeassistant/components/sisyphus/ @jkeljo /homeassistant/components/sky_hub/ @rogerselwyn +/homeassistant/components/sky_remote/ @dunnmj @saty9 +/tests/components/sky_remote/ @dunnmj @saty9 /homeassistant/components/skybell/ @tkdrob /tests/components/skybell/ @tkdrob /homeassistant/components/slack/ @tkdrob @fletcherau @@ -1316,6 +1365,8 @@ build.json @home-assistant/supervisor /homeassistant/components/sleepiq/ @mfugate1 @kbickar /tests/components/sleepiq/ @mfugate1 @kbickar /homeassistant/components/slide/ @ualex73 +/homeassistant/components/slide_local/ @dontinelli +/tests/components/slide_local/ @dontinelli /homeassistant/components/slimproto/ @marcelveldt /tests/components/slimproto/ @marcelveldt /homeassistant/components/sma/ @kellerza @rklomp @@ -1327,6 +1378,7 @@ build.json @home-assistant/supervisor /homeassistant/components/smarttub/ @mdz /tests/components/smarttub/ @mdz /homeassistant/components/smarty/ @z0mbieprocess +/tests/components/smarty/ @z0mbieprocess /homeassistant/components/smhi/ @gjohansson-ST /tests/components/smhi/ @gjohansson-ST /homeassistant/components/smlight/ @tl-sl @@ -1360,30 +1412,26 @@ build.json @home-assistant/supervisor /tests/components/spaceapi/ @fabaff /homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87 /tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87 -/homeassistant/components/spider/ @peternijssen -/tests/components/spider/ @peternijssen /homeassistant/components/splunk/ @Bre77 /homeassistant/components/spotify/ @frenck @joostlek /tests/components/spotify/ @frenck @joostlek /homeassistant/components/sql/ @gjohansson-ST @dougiteixeira /tests/components/sql/ @gjohansson-ST @dougiteixeira -/homeassistant/components/squeezebox/ @rajlaud -/tests/components/squeezebox/ @rajlaud +/homeassistant/components/squeezebox/ @rajlaud @pssc @peteS-UK +/tests/components/squeezebox/ @rajlaud @pssc @peteS-UK /homeassistant/components/srp_energy/ @briglx /tests/components/srp_energy/ @briglx /homeassistant/components/starline/ @anonym-tsk /tests/components/starline/ @anonym-tsk /homeassistant/components/starlink/ @boswelja /tests/components/starlink/ @boswelja -/homeassistant/components/statistics/ @ThomDietrich -/tests/components/statistics/ @ThomDietrich +/homeassistant/components/statistics/ @ThomDietrich @gjohansson-ST +/tests/components/statistics/ @ThomDietrich @gjohansson-ST /homeassistant/components/steam_online/ @tkdrob /tests/components/steam_online/ @tkdrob /homeassistant/components/steamist/ @bdraco /tests/components/steamist/ @bdraco /homeassistant/components/stiebel_eltron/ @fucm -/homeassistant/components/stookalert/ @fwestenberg @frenck -/tests/components/stookalert/ @fwestenberg @frenck /homeassistant/components/stookwijzer/ @fwestenberg /tests/components/stookwijzer/ @fwestenberg /homeassistant/components/stream/ @hunterjm @uvjustin @allenporter @@ -1392,8 +1440,8 @@ build.json @home-assistant/supervisor /tests/components/stt/ @home-assistant/core /homeassistant/components/subaru/ @G-Two /tests/components/subaru/ @G-Two -/homeassistant/components/suez_water/ @ooii -/tests/components/suez_water/ @ooii +/homeassistant/components/suez_water/ @ooii @jb101010-2 +/tests/components/suez_water/ @ooii @jb101010-2 /homeassistant/components/sun/ @Swamp-Ig /tests/components/sun/ @Swamp-Ig /homeassistant/components/sunweg/ @rokam @@ -1412,10 +1460,10 @@ build.json @home-assistant/supervisor /tests/components/switchbee/ @jafar-atili /homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski /tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski -/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland -/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland -/homeassistant/components/switcher_kis/ @thecode -/tests/components/switcher_kis/ @thecode +/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur +/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur +/homeassistant/components/switcher_kis/ @thecode @YogevBokobza +/tests/components/switcher_kis/ @thecode @YogevBokobza /homeassistant/components/switchmate/ @danielhiversen @qiz-li /homeassistant/components/syncthing/ @zhulik /tests/components/syncthing/ @zhulik @@ -1451,8 +1499,8 @@ build.json @home-assistant/supervisor /tests/components/tedee/ @patrickhilker @zweckj /homeassistant/components/tellduslive/ @fredrike /tests/components/tellduslive/ @fredrike -/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core -/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core +/homeassistant/components/template/ @PhracturedBlue @home-assistant/core +/tests/components/template/ @PhracturedBlue @home-assistant/core /homeassistant/components/tesla_fleet/ @Bre77 /tests/components/tesla_fleet/ @Bre77 /homeassistant/components/tesla_wall_connector/ @einarhauks @@ -1493,6 +1541,8 @@ build.json @home-assistant/supervisor /tests/components/tomorrowio/ @raman325 @lymanepp /homeassistant/components/totalconnect/ @austinmroczek /tests/components/totalconnect/ @austinmroczek +/homeassistant/components/touchline_sl/ @jnsgruk +/tests/components/touchline_sl/ @jnsgruk /homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696 /tests/components/tplink/ @rytilahti @bdraco @sdb9696 /homeassistant/components/tplink_omada/ @MarkGodwin @@ -1517,6 +1567,8 @@ build.json @home-assistant/supervisor /tests/components/transmission/ @engrbm87 @JPHutchins /homeassistant/components/trend/ @jpbede /tests/components/trend/ @jpbede +/homeassistant/components/triggercmd/ @rvmey +/tests/components/triggercmd/ @rvmey /homeassistant/components/tts/ @home-assistant/core /tests/components/tts/ @home-assistant/core /homeassistant/components/tuya/ @Tuya @zlinoliver @frenck @@ -1533,6 +1585,8 @@ build.json @home-assistant/supervisor /tests/components/unifi/ @Kane610 /homeassistant/components/unifi_direct/ @tofuSCHNITZEL /homeassistant/components/unifiled/ @florisvdk +/homeassistant/components/unifiprotect/ @RaHehl +/tests/components/unifiprotect/ @RaHehl /homeassistant/components/upb/ @gwww /tests/components/upb/ @gwww /homeassistant/components/upc_connect/ @pvizeli @fabaff @@ -1600,6 +1654,8 @@ build.json @home-assistant/supervisor /tests/components/waqi/ @joostlek /homeassistant/components/water_heater/ @home-assistant/core /tests/components/water_heater/ @home-assistant/core +/homeassistant/components/watergate/ @adam-the-hero +/tests/components/watergate/ @adam-the-hero /homeassistant/components/watson_tts/ @rutkai /homeassistant/components/watttime/ @bachya /tests/components/watttime/ @bachya @@ -1621,6 +1677,8 @@ build.json @home-assistant/supervisor /tests/components/webostv/ @thecode /homeassistant/components/websocket_api/ @home-assistant/core /tests/components/websocket_api/ @home-assistant/core +/homeassistant/components/weheat/ @jesperraemaekers +/tests/components/weheat/ @jesperraemaekers /homeassistant/components/wemo/ @esev /tests/components/wemo/ @esev /homeassistant/components/whirlpool/ @abmantis @mkmer @@ -1638,6 +1696,8 @@ build.json @home-assistant/supervisor /tests/components/wiz/ @sbidy /homeassistant/components/wled/ @frenck /tests/components/wled/ @frenck +/homeassistant/components/wmspro/ @mback2k +/tests/components/wmspro/ @mback2k /homeassistant/components/wolflink/ @adamkrol93 @mtielen /tests/components/wolflink/ @adamkrol93 @mtielen /homeassistant/components/workday/ @fabaff @gjohansson-ST @@ -1658,6 +1718,8 @@ build.json @home-assistant/supervisor /tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG /homeassistant/components/xiaomi_tv/ @simse /homeassistant/components/xmpp/ @fabaff @flowolf +/homeassistant/components/yale/ @bdraco +/tests/components/yale/ @bdraco /homeassistant/components/yale_smart_alarm/ @gjohansson-ST /tests/components/yale_smart_alarm/ @gjohansson-ST /homeassistant/components/yalexs_ble/ @bdraco diff --git a/Dockerfile b/Dockerfile index 7ead7bc7e4f..630fc19496c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,12 +7,13 @@ FROM ${BUILD_FROM} # Synchronize with homeassistant/core.py:async_stop ENV \ S6_SERVICES_GRACETIME=240000 \ - UV_SYSTEM_PYTHON=true + UV_SYSTEM_PYTHON=true \ + UV_NO_CACHE=true ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.2.27 +RUN pip3 install uv==0.5.8 WORKDIR /usr/src @@ -29,15 +30,9 @@ RUN \ if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \ uv pip install homeassistant/home_assistant_*.whl; \ fi \ - && if [ "${BUILD_ARCH}" = "i386" ]; then \ - linux32 uv pip install \ - --no-build \ - -r homeassistant/requirements_all.txt; \ - else \ - uv pip install \ - --no-build \ - -r homeassistant/requirements_all.txt; \ - fi + && uv pip install \ + --no-build \ + -r homeassistant/requirements_all.txt ## Setup Home Assistant Core COPY . homeassistant/ @@ -50,4 +45,19 @@ RUN \ # Home Assistant S6-Overlay COPY rootfs / +# Needs to be redefined inside the FROM statement to be set for RUN commands +ARG BUILD_ARCH +# Get go2rtc binary +RUN \ + case "${BUILD_ARCH}" in \ + "aarch64") go2rtc_suffix='arm64' ;; \ + "armhf") go2rtc_suffix='armv6' ;; \ + "armv7") go2rtc_suffix='arm' ;; \ + *) go2rtc_suffix=${BUILD_ARCH} ;; \ + esac \ + && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ + && chmod +x /bin/go2rtc \ + # Verify go2rtc can be executed + && go2rtc --version + WORKDIR /config diff --git a/Dockerfile.dev b/Dockerfile.dev index d05c6df425c..5a3f1a2ae64 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:1-3.12 +FROM mcr.microsoft.com/devcontainers/python:1-3.13 SHELL ["/bin/bash", "-o", "pipefail", "-c"] @@ -35,6 +35,9 @@ RUN \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* +# Add go2rtc binary +COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc + # Install uv RUN pip3 install uv diff --git a/README.rst b/README.rst index 061b44a75f0..85c632f7eb1 100644 --- a/README.rst +++ b/README.rst @@ -7,8 +7,6 @@ Check out `home-assistant.io `__ for `a demo `__, `installation instructions `__, `tutorials `__ and `documentation `__. -This is a project of the `Open Home Foundation `__. - |screenshot-states| Featured integrations @@ -22,9 +20,14 @@ components If you run into issues while using Home Assistant or during development of a component, check the `Home Assistant help section `__ of our website for further help and information. +|ohf-logo| + .. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg :target: https://www.home-assistant.io/join-chat/ .. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png :target: https://demo.home-assistant.io .. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png :target: https://home-assistant.io/integrations/ +.. |ohf-logo| image:: https://www.openhomefoundation.org/badges/home-assistant.png + :alt: Home Assistant - A project from the Open Home Foundation + :target: https://www.openhomefoundation.org/ diff --git a/build.yaml b/build.yaml index 13618740ab8..fafdd876f75 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io diff --git a/homeassistant/__main__.py b/homeassistant/__main__.py index 4c870e94b24..b9d98832705 100644 --- a/homeassistant/__main__.py +++ b/homeassistant/__main__.py @@ -9,6 +9,7 @@ import os import sys import threading +from .backup_restore import restore_backup from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__ FAULT_LOG_FILENAME = "home-assistant.log.fault" @@ -182,6 +183,9 @@ def main() -> int: return scripts.run(args.script) config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config)) + if restore_backup(config_dir): + return RESTART_EXIT_CODE + ensure_config_path(config_dir) # pylint: disable-next=import-outside-toplevel diff --git a/homeassistant/auth/__init__.py b/homeassistant/auth/__init__.py index b74fd587fab..afe3b2d7aa3 100644 --- a/homeassistant/auth/__init__.py +++ b/homeassistant/auth/__init__.py @@ -12,7 +12,6 @@ from typing import Any, cast import jwt -from homeassistant import data_entry_flow from homeassistant.core import ( CALLBACK_TYPE, HassJob, @@ -20,13 +19,14 @@ from homeassistant.core import ( HomeAssistant, callback, ) +from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util from . import auth_store, jwt_wrapper, models from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config -from .models import AuthFlowResult +from .models import AuthFlowContext, AuthFlowResult from .providers import AuthProvider, LoginFlow, auth_provider_from_config from .providers.homeassistant import HassAuthProvider @@ -98,7 +98,7 @@ async def auth_manager_from_config( class AuthManagerFlowManager( - data_entry_flow.FlowManager[AuthFlowResult, tuple[str, str]] + FlowManager[AuthFlowContext, AuthFlowResult, tuple[str, str]] ): """Manage authentication flows.""" @@ -113,9 +113,9 @@ class AuthManagerFlowManager( self, handler_key: tuple[str, str], *, - context: dict[str, Any] | None = None, + context: AuthFlowContext | None = None, data: dict[str, Any] | None = None, - ) -> LoginFlow: + ) -> LoginFlow[Any]: """Create a login flow.""" auth_provider = self.auth_manager.get_auth_provider(*handler_key) if not auth_provider: @@ -124,13 +124,17 @@ class AuthManagerFlowManager( async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]], + flow: FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]], result: AuthFlowResult, ) -> AuthFlowResult: - """Return a user as result of login flow.""" + """Return a user as result of login flow. + + This method is called when a flow step returns FlowResultType.ABORT or + FlowResultType.CREATE_ENTRY. + """ flow = cast(LoginFlow, flow) - if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: + if result["type"] != FlowResultType.CREATE_ENTRY: return result # we got final result diff --git a/homeassistant/auth/jwt_wrapper.py b/homeassistant/auth/jwt_wrapper.py index 3aa3ac63764..464df006f5f 100644 --- a/homeassistant/auth/jwt_wrapper.py +++ b/homeassistant/auth/jwt_wrapper.py @@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads JWT_TOKEN_CACHE_SIZE = 16 MAX_TOKEN_SIZE = 8192 -_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss") +_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti") _VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | { "require": [] diff --git a/homeassistant/auth/mfa_modules/__init__.py b/homeassistant/auth/mfa_modules/__init__.py index d57a274c7ff..8a6430d770a 100644 --- a/homeassistant/auth/mfa_modules/__init__.py +++ b/homeassistant/auth/mfa_modules/__init__.py @@ -4,8 +4,9 @@ from __future__ import annotations import logging import types -from typing import Any +from typing import Any, Generic +from typing_extensions import TypeVar import voluptuous as vol from voluptuous.humanize import humanize_error @@ -34,6 +35,12 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed") _LOGGER = logging.getLogger(__name__) +_MultiFactorAuthModuleT = TypeVar( + "_MultiFactorAuthModuleT", + bound="MultiFactorAuthModule", + default="MultiFactorAuthModule", +) + class MultiFactorAuthModule: """Multi-factor Auth Module of validation function.""" @@ -71,7 +78,7 @@ class MultiFactorAuthModule: """Return a voluptuous schema to define mfa auth module's input.""" raise NotImplementedError - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -95,11 +102,14 @@ class MultiFactorAuthModule: raise NotImplementedError -class SetupFlow(data_entry_flow.FlowHandler): +class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]): """Handler for the setup flow.""" def __init__( - self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str + self, + auth_module: _MultiFactorAuthModuleT, + setup_schema: vol.Schema, + user_id: str, ) -> None: """Initialize the setup flow.""" self._auth_module = auth_module diff --git a/homeassistant/auth/mfa_modules/notify.py b/homeassistant/auth/mfa_modules/notify.py index d2010dc2c9d..b60a3012aac 100644 --- a/homeassistant/auth/mfa_modules/notify.py +++ b/homeassistant/auth/mfa_modules/notify.py @@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule): return sorted(unordered_services) - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> NotifySetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule): await self.hass.services.async_call("notify", notify_service, data) -class NotifySetupFlow(SetupFlow): +class NotifySetupFlow(SetupFlow[NotifyAuthModule]): """Handler for the setup flow.""" def __init__( @@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow): ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user_id) - # to fix typing complaint - self._auth_module: NotifyAuthModule = auth_module self._available_notify_services = available_notify_services self._secret: str | None = None self._count: int | None = None diff --git a/homeassistant/auth/mfa_modules/totp.py b/homeassistant/auth/mfa_modules/totp.py index e9055b45f05..625b273f39a 100644 --- a/homeassistant/auth/mfa_modules/totp.py +++ b/homeassistant/auth/mfa_modules/totp.py @@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule): self._users[user_id] = ota_secret # type: ignore[index] return ota_secret - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> TotpSetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -174,20 +174,19 @@ class TotpAuthModule(MultiFactorAuthModule): return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1)) -class TotpSetupFlow(SetupFlow): +class TotpSetupFlow(SetupFlow[TotpAuthModule]): """Handler for the setup flow.""" + _ota_secret: str + _url: str + _image: str + def __init__( self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user.id) - # to fix typing complaint - self._auth_module: TotpAuthModule = auth_module self._user = user - self._ota_secret: str = "" - self._url: str | None = None - self._image: str | None = None async def async_step_init( self, user_input: dict[str, str] | None = None @@ -214,12 +213,11 @@ class TotpSetupFlow(SetupFlow): errors["base"] = "invalid_code" else: - hass = self._auth_module.hass ( self._ota_secret, self._url, self._image, - ) = await hass.async_add_executor_job( + ) = await self._auth_module.hass.async_add_executor_job( _generate_secret_and_qr_code, str(self._user.name), ) diff --git a/homeassistant/auth/models.py b/homeassistant/auth/models.py index 7192f6345e1..6f45dab2b36 100644 --- a/homeassistant/auth/models.py +++ b/homeassistant/auth/models.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import datetime, timedelta -from functools import cached_property +from ipaddress import IPv4Address, IPv6Address import secrets from typing import Any, NamedTuple import uuid @@ -11,9 +11,10 @@ import uuid import attr from attr import Attribute from attr.setters import validate +from propcache import cached_property from homeassistant.const import __version__ -from homeassistant.data_entry_flow import FlowResult +from homeassistant.data_entry_flow import FlowContext, FlowResult from homeassistant.util import dt as dt_util from . import permissions as perm_mdl @@ -23,7 +24,16 @@ TOKEN_TYPE_NORMAL = "normal" TOKEN_TYPE_SYSTEM = "system" TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token" -AuthFlowResult = FlowResult[tuple[str, str]] + +class AuthFlowContext(FlowContext, total=False): + """Typed context dict for auth flow.""" + + credential_only: bool + ip_address: IPv4Address | IPv6Address + redirect_uri: str + + +AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]] @attr.s(slots=True) diff --git a/homeassistant/auth/providers/__init__.py b/homeassistant/auth/providers/__init__.py index debdd0b1a05..02f99e7bd71 100644 --- a/homeassistant/auth/providers/__init__.py +++ b/homeassistant/auth/providers/__init__.py @@ -5,14 +5,16 @@ from __future__ import annotations from collections.abc import Mapping import logging import types -from typing import Any +from typing import Any, Generic +from typing_extensions import TypeVar import voluptuous as vol from voluptuous.humanize import humanize_error -from homeassistant import data_entry_flow, requirements +from homeassistant import requirements from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import FlowHandler from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.importlib import async_import_module from homeassistant.util import dt as dt_util @@ -21,7 +23,14 @@ from homeassistant.util.hass_dict import HassKey from ..auth_store import AuthStore from ..const import MFA_SESSION_EXPIRATION -from ..models import AuthFlowResult, Credentials, RefreshToken, User, UserMeta +from ..models import ( + AuthFlowContext, + AuthFlowResult, + Credentials, + RefreshToken, + User, + UserMeta, +) _LOGGER = logging.getLogger(__name__) DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed") @@ -38,6 +47,8 @@ AUTH_PROVIDER_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider") + class AuthProvider: """Provider of user authentication.""" @@ -97,7 +108,7 @@ class AuthProvider: # Implement by extending class - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]: """Return the data flow for logging in with auth provider. Auth provider should extend LoginFlow and return an instance. @@ -184,12 +195,15 @@ async def load_auth_provider_module( return module -class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]): +class LoginFlow( + FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]], + Generic[_AuthProviderT], +): """Handler for the login flow.""" _flow_result = AuthFlowResult - def __init__(self, auth_provider: AuthProvider) -> None: + def __init__(self, auth_provider: _AuthProviderT) -> None: """Initialize the login flow.""" self._auth_provider = auth_provider self._auth_module_id: str | None = None diff --git a/homeassistant/auth/providers/command_line.py b/homeassistant/auth/providers/command_line.py index 43cde284a25..74630d925e1 100644 --- a/homeassistant/auth/providers/command_line.py +++ b/homeassistant/auth/providers/command_line.py @@ -6,14 +6,14 @@ import asyncio from collections.abc import Mapping import logging import os -from typing import Any, cast +from typing import Any import voluptuous as vol from homeassistant.const import CONF_COMMAND from homeassistant.exceptions import HomeAssistantError -from ..models import AuthFlowResult, Credentials, UserMeta +from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow CONF_ARGS = "args" @@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider): super().__init__(*args, **kwargs) self._user_meta: dict[str, dict[str, Any]] = {} - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> CommandLineLoginFlow: """Return a flow to login.""" return CommandLineLoginFlow(self) @@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider): ) -class CommandLineLoginFlow(LoginFlow): +class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow): if user_input is not None: user_input["username"] = user_input["username"].strip() try: - await cast( - CommandLineAuthProvider, self._auth_provider - ).async_validate_login(user_input["username"], user_input["password"]) + await self._auth_provider.async_validate_login( + user_input["username"], user_input["password"] + ) except InvalidAuthError: errors["base"] = "invalid_auth" diff --git a/homeassistant/auth/providers/homeassistant.py b/homeassistant/auth/providers/homeassistant.py index ec39bdbdcdc..522e5d77a29 100644 --- a/homeassistant/auth/providers/homeassistant.py +++ b/homeassistant/auth/providers/homeassistant.py @@ -17,7 +17,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.storage import Store -from ..models import AuthFlowResult, Credentials, UserMeta +from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow STORAGE_VERSION = 1 @@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider): await data.async_load() self.data = data - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow: """Return a flow to login.""" return HassLoginFlow(self) @@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider): pass -class HassLoginFlow(LoginFlow): +class HassLoginFlow(LoginFlow[HassAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow): if user_input is not None: try: - await cast(HassAuthProvider, self._auth_provider).async_validate_login( + await self._auth_provider.async_validate_login( user_input["username"], user_input["password"] ) except InvalidAuth: diff --git a/homeassistant/auth/providers/insecure_example.py b/homeassistant/auth/providers/insecure_example.py index 8bcf7569f5a..a92f5b55848 100644 --- a/homeassistant/auth/providers/insecure_example.py +++ b/homeassistant/auth/providers/insecure_example.py @@ -4,14 +4,13 @@ from __future__ import annotations from collections.abc import Mapping import hmac -from typing import Any, cast import voluptuous as vol from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError -from ..models import AuthFlowResult, Credentials, UserMeta +from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow USER_SCHEMA = vol.Schema( @@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError): class ExampleAuthProvider(AuthProvider): """Example auth provider based on hardcoded usernames and passwords.""" - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> ExampleLoginFlow: """Return a flow to login.""" return ExampleLoginFlow(self) @@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider): return UserMeta(name=name, is_active=True) -class ExampleLoginFlow(LoginFlow): +class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow): if user_input is not None: try: - cast(ExampleAuthProvider, self._auth_provider).async_validate_login( + self._auth_provider.async_validate_login( user_input["username"], user_input["password"] ) except InvalidAuthError: diff --git a/homeassistant/auth/providers/trusted_networks.py b/homeassistant/auth/providers/trusted_networks.py index 564633073fc..799fd4d2e16 100644 --- a/homeassistant/auth/providers/trusted_networks.py +++ b/homeassistant/auth/providers/trusted_networks.py @@ -25,7 +25,13 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.network import is_cloud_connection from .. import InvalidAuthError -from ..models import AuthFlowResult, Credentials, RefreshToken, UserMeta +from ..models import ( + AuthFlowContext, + AuthFlowResult, + Credentials, + RefreshToken, + UserMeta, +) from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow type IPAddress = IPv4Address | IPv6Address @@ -98,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider): """Trusted Networks auth provider does not support MFA.""" return False - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> TrustedNetworksLoginFlow: """Return a flow to login.""" assert context is not None ip_addr = cast(IPAddress, context.get("ip_address")) @@ -208,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider): self.async_validate_access(ip_address(remote_ip)) -class TrustedNetworksLoginFlow(LoginFlow): +class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]): """Handler for the login flow.""" def __init__( @@ -229,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow): ) -> AuthFlowResult: """Handle the step of the form.""" try: - cast( - TrustedNetworksAuthProvider, self._auth_provider - ).async_validate_access(self._ip_address) + self._auth_provider.async_validate_access(self._ip_address) except InvalidAuthError: return self.async_abort(reason="not_allowed") diff --git a/homeassistant/backports/functools.py b/homeassistant/backports/functools.py index bad4236f9c8..1b032c65966 100644 --- a/homeassistant/backports/functools.py +++ b/homeassistant/backports/functools.py @@ -9,6 +9,7 @@ import it. from __future__ import annotations +# pylint: disable-next=hass-deprecated-import from functools import cached_property as _cached_property, partial from homeassistant.helpers.deprecation import ( diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py new file mode 100644 index 00000000000..f9250e3129e --- /dev/null +++ b/homeassistant/backup_restore.py @@ -0,0 +1,181 @@ +"""Home Assistant module to handle restoring backups.""" + +from __future__ import annotations + +from collections.abc import Iterable +from dataclasses import dataclass +import hashlib +import json +import logging +from pathlib import Path +import shutil +import sys +from tempfile import TemporaryDirectory + +from awesomeversion import AwesomeVersion +import securetar + +from .const import __version__ as HA_VERSION + +RESTORE_BACKUP_FILE = ".HA_RESTORE" +KEEP_BACKUPS = ("backups",) +KEEP_DATABASE = ( + "home-assistant_v2.db", + "home-assistant_v2.db-wal", +) + + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class RestoreBackupFileContent: + """Definition for restore backup file content.""" + + backup_file_path: Path + password: str | None + remove_after_restore: bool + restore_database: bool + restore_homeassistant: bool + + +def password_to_key(password: str) -> bytes: + """Generate a AES Key from password. + + Matches the implementation in supervisor.backups.utils.password_to_key. + """ + key: bytes = password.encode() + for _ in range(100): + key = hashlib.sha256(key).digest() + return key[:16] + + +def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None: + """Return the contents of the restore backup file.""" + instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE) + try: + instruction_content = json.loads(instruction_path.read_text(encoding="utf-8")) + return RestoreBackupFileContent( + backup_file_path=Path(instruction_content["path"]), + password=instruction_content["password"], + remove_after_restore=instruction_content["remove_after_restore"], + restore_database=instruction_content["restore_database"], + restore_homeassistant=instruction_content["restore_homeassistant"], + ) + except (FileNotFoundError, KeyError, json.JSONDecodeError): + return None + + +def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None: + """Delete all files and directories in the config directory except entries in the keep list.""" + keep_paths = [config_dir.joinpath(path) for path in keep] + entries_to_remove = sorted( + entry for entry in config_dir.iterdir() if entry not in keep_paths + ) + + for entry in entries_to_remove: + entrypath = config_dir.joinpath(entry) + + if entrypath.is_file(): + entrypath.unlink() + elif entrypath.is_dir(): + shutil.rmtree(entrypath) + + +def _extract_backup( + config_dir: Path, + restore_content: RestoreBackupFileContent, +) -> None: + """Extract the backup file to the config directory.""" + with ( + TemporaryDirectory() as tempdir, + securetar.SecureTarFile( + restore_content.backup_file_path, + gzip=False, + mode="r", + ) as ostf, + ): + ostf.extractall( + path=Path(tempdir, "extracted"), + members=securetar.secure_path(ostf), + filter="fully_trusted", + ) + backup_meta_file = Path(tempdir, "extracted", "backup.json") + backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8")) + + if ( + backup_meta_version := AwesomeVersion( + backup_meta["homeassistant"]["version"] + ) + ) > HA_VERSION: + raise ValueError( + f"You need at least Home Assistant version {backup_meta_version} to restore this backup" + ) + + with securetar.SecureTarFile( + Path( + tempdir, + "extracted", + f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}", + ), + gzip=backup_meta["compressed"], + key=password_to_key(restore_content.password) + if restore_content.password is not None + else None, + mode="r", + ) as istf: + istf.extractall( + path=Path(tempdir, "homeassistant"), + members=securetar.secure_path(istf), + filter="fully_trusted", + ) + if restore_content.restore_homeassistant: + keep = list(KEEP_BACKUPS) + if not restore_content.restore_database: + keep.extend(KEEP_DATABASE) + _clear_configuration_directory(config_dir, keep) + shutil.copytree( + Path(tempdir, "homeassistant", "data"), + config_dir, + dirs_exist_ok=True, + ignore=shutil.ignore_patterns(*(keep)), + ) + elif restore_content.restore_database: + for entry in KEEP_DATABASE: + entrypath = config_dir / entry + + if entrypath.is_file(): + entrypath.unlink() + elif entrypath.is_dir(): + shutil.rmtree(entrypath) + + for entry in KEEP_DATABASE: + shutil.copy( + Path(tempdir, "homeassistant", "data", entry), + config_dir, + ) + + +def restore_backup(config_dir_path: str) -> bool: + """Restore the backup file if any. + + Returns True if a restore backup file was found and restored, False otherwise. + """ + config_dir = Path(config_dir_path) + if not (restore_content := restore_backup_file_content(config_dir)): + return False + + logging.basicConfig(stream=sys.stdout, level=logging.INFO) + backup_file_path = restore_content.backup_file_path + _LOGGER.info("Restoring %s", backup_file_path) + try: + _extract_backup( + config_dir=config_dir, + restore_content=restore_content, + ) + except FileNotFoundError as err: + raise ValueError(f"Backup file {backup_file_path} does not exist") from err + if restore_content.remove_after_restore: + backup_file_path.unlink(missing_ok=True) + _LOGGER.info("Restore complete, restarting") + return True diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 7a68b2515e9..767716dbe27 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool: return False +def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool: + # If only cadata is passed, we can ignore it + kwargs = mapped_args.get("kwargs") + return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs) + + @dataclass(slots=True, frozen=True) class BlockingCall: """Class to hold information about a blocking call.""" @@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( original_func=SSLContext.load_verify_locations, object=SSLContext, function="load_verify_locations", - check_allowed=None, + check_allowed=_check_load_verify_locations_call_allowed, strict=False, strict_core=False, skip_for_tests=True, diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 742a293e4c4..1034223051c 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -70,6 +70,7 @@ from .const import ( REQUIRED_NEXT_PYTHON_VER, SIGNAL_BOOTSTRAP_INTEGRATIONS, ) +from .core_config import async_process_ha_core_config from .exceptions import HomeAssistantError from .helpers import ( area_registry, @@ -479,7 +480,7 @@ async def async_from_config_dict( core_config = config.get(core.DOMAIN, {}) try: - await conf_util.async_process_ha_core_config(hass, core_config) + await async_process_ha_core_config(hass, core_config) except vol.Invalid as config_err: conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass) async_notify_setup_error(hass, core.DOMAIN) @@ -514,7 +515,7 @@ async def async_from_config_dict( issue_registry.async_create_issue( hass, core.DOMAIN, - "python_version", + f"python_version_{required_python_version}", is_fixable=False, severity=issue_registry.IssueSeverity.WARNING, breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE, diff --git a/homeassistant/brands/aqara.json b/homeassistant/brands/aqara.json new file mode 100644 index 00000000000..672a8350c63 --- /dev/null +++ b/homeassistant/brands/aqara.json @@ -0,0 +1,5 @@ +{ + "domain": "aqara", + "name": "Aqara", + "iot_standards": ["matter", "zigbee"] +} diff --git a/homeassistant/brands/google.json b/homeassistant/brands/google.json index 7c6ebc044e9..028fa544a5f 100644 --- a/homeassistant/brands/google.json +++ b/homeassistant/brands/google.json @@ -5,10 +5,10 @@ "google_assistant", "google_assistant_sdk", "google_cloud", - "google_domains", "google_generative_ai_conversation", "google_mail", "google_maps", + "google_photos", "google_pubsub", "google_sheets", "google_tasks", diff --git a/homeassistant/brands/husqvarna.json b/homeassistant/brands/husqvarna.json new file mode 100644 index 00000000000..a01eba75232 --- /dev/null +++ b/homeassistant/brands/husqvarna.json @@ -0,0 +1,5 @@ +{ + "domain": "husqvarna", + "name": "Husqvarna", + "integrations": ["husqvarna_automower", "husqvarna_automower_ble"] +} diff --git a/homeassistant/brands/lg.json b/homeassistant/brands/lg.json index 350db80b5f3..02bd58c0d1c 100644 --- a/homeassistant/brands/lg.json +++ b/homeassistant/brands/lg.json @@ -1,5 +1,5 @@ { "domain": "lg", "name": "LG", - "integrations": ["lg_netcast", "lg_soundbar", "webostv"] + "integrations": ["lg_netcast", "lg_soundbar", "lg_thinq", "webostv"] } diff --git a/homeassistant/brands/roth.json b/homeassistant/brands/roth.json new file mode 100644 index 00000000000..21542b5b641 --- /dev/null +++ b/homeassistant/brands/roth.json @@ -0,0 +1,5 @@ +{ + "domain": "roth", + "name": "Roth", + "integrations": ["touchline", "touchline_sl"] +} diff --git a/homeassistant/brands/sky.json b/homeassistant/brands/sky.json new file mode 100644 index 00000000000..3ab0cbbe5bd --- /dev/null +++ b/homeassistant/brands/sky.json @@ -0,0 +1,5 @@ +{ + "domain": "sky", + "name": "Sky", + "integrations": ["sky_hub", "sky_remote"] +} diff --git a/homeassistant/brands/slide.json b/homeassistant/brands/slide.json new file mode 100644 index 00000000000..808a54affc3 --- /dev/null +++ b/homeassistant/brands/slide.json @@ -0,0 +1,5 @@ +{ + "domain": "slide", + "name": "Slide", + "integrations": ["slide", "slide_local"] +} diff --git a/homeassistant/brands/yale.json b/homeassistant/brands/yale.json index 53dc9b43569..a0e7c6bd453 100644 --- a/homeassistant/brands/yale.json +++ b/homeassistant/brands/yale.json @@ -1,5 +1,11 @@ { "domain": "yale", "name": "Yale", - "integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"] + "integrations": [ + "august", + "yale_smart_alarm", + "yalexs_ble", + "yale_home", + "yale" + ] } diff --git a/homeassistant/components/__init__.py b/homeassistant/components/__init__.py index 030e23628d6..d01f51c3951 100644 --- a/homeassistant/components/__init__.py +++ b/homeassistant/components/__init__.py @@ -6,52 +6,3 @@ Component design guidelines: format ".". - Each component should publish services only under its own domain. """ - -from __future__ import annotations - -import logging - -from homeassistant.core import HomeAssistant, split_entity_id -from homeassistant.helpers.frame import report -from homeassistant.helpers.group import expand_entity_ids - -_LOGGER = logging.getLogger(__name__) - - -def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool: - """Load up the module to call the is_on method. - - If there is no entity id given we will check all. - """ - report( - ( - "uses homeassistant.components.is_on." - " This is deprecated and will stop working in Home Assistant 2024.9, it" - " should be updated to use the function of the platform directly." - ), - error_if_core=True, - ) - - if entity_id: - entity_ids = expand_entity_ids(hass, [entity_id]) - else: - entity_ids = hass.states.entity_ids() - - for ent_id in entity_ids: - domain = split_entity_id(ent_id)[0] - - try: - component = getattr(hass.components, domain) - - except ImportError: - _LOGGER.error("Failed to call %s.is_on: component not found", domain) - continue - - if not hasattr(component, "is_on"): - _LOGGER.warning("Integration %s has no is_on method", domain) - continue - - if component.is_on(ent_id): - return True - - return False diff --git a/homeassistant/components/abode/__init__.py b/homeassistant/components/abode/__init__.py index a27eda2cf12..0542e362268 100644 --- a/homeassistant/components/abode/__init__.py +++ b/homeassistant/components/abode/__init__.py @@ -4,8 +4,10 @@ from __future__ import annotations from dataclasses import dataclass, field from functools import partial +from pathlib import Path from jaraco.abode.client import Client as Abode +import jaraco.abode.config from jaraco.abode.exceptions import ( AuthenticationException as AbodeAuthenticationException, Exception as AbodeException, @@ -93,6 +95,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: password = entry.data[CONF_PASSWORD] polling = entry.data[CONF_POLLING] + # Configure abode library to use config directory for storing data + jaraco.abode.config.paths.override(user_data=Path(hass.config.path("Abode"))) + # For previous config entries where unique_id is None if entry.unique_id is None: hass.config_entries.async_update_entry( diff --git a/homeassistant/components/abode/alarm_control_panel.py b/homeassistant/components/abode/alarm_control_panel.py index b58a4757785..4ec59ca4c39 100644 --- a/homeassistant/components/abode/alarm_control_panel.py +++ b/homeassistant/components/abode/alarm_control_panel.py @@ -7,13 +7,9 @@ from jaraco.abode.devices.alarm import Alarm from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -44,14 +40,14 @@ class AbodeAlarm(AbodeDevice, AlarmControlPanelEntity): _device: Alarm @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self._device.is_standby: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED if self._device.is_away: - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY if self._device.is_home: - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_HOME return None def alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/abode/binary_sensor.py b/homeassistant/components/abode/binary_sensor.py index 0f1372dc8be..ca9679a5aaa 100644 --- a/homeassistant/components/abode/binary_sensor.py +++ b/homeassistant/components/abode/binary_sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import cast -from jaraco.abode.devices.sensor import BinarySensor +from jaraco.abode.devices.binary_sensor import BinarySensor from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, diff --git a/homeassistant/components/abode/config_flow.py b/homeassistant/components/abode/config_flow.py index 57cad604274..01b6c7f568f 100644 --- a/homeassistant/components/abode/config_flow.py +++ b/homeassistant/components/abode/config_flow.py @@ -102,15 +102,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN): existing_entry = await self.async_set_unique_id(self._username) if existing_entry: - self.hass.config_entries.async_update_entry( - existing_entry, data=config_data - ) - # Reload the Abode config entry otherwise devices will remain unavailable - self.hass.async_create_task( - self.hass.config_entries.async_reload(existing_entry.entry_id) - ) - - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(existing_entry, data=config_data) return self.async_create_entry( title=cast(str, self._username), data=config_data @@ -120,9 +112,6 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form( step_id="user", data_schema=vol.Schema(self.data_schema) diff --git a/homeassistant/components/abode/icons.json b/homeassistant/components/abode/icons.json index 00175628d9a..4ce4e55cab6 100644 --- a/homeassistant/components/abode/icons.json +++ b/homeassistant/components/abode/icons.json @@ -7,8 +7,14 @@ } }, "services": { - "capture_image": "mdi:camera", - "change_setting": "mdi:cog", - "trigger_automation": "mdi:play" + "capture_image": { + "service": "mdi:camera" + }, + "change_setting": { + "service": "mdi:cog" + }, + "trigger_automation": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/abode/light.py b/homeassistant/components/abode/light.py index d69aad80875..e2d0a331f0a 100644 --- a/homeassistant/components/abode/light.py +++ b/homeassistant/components/abode/light.py @@ -9,18 +9,16 @@ from jaraco.abode.devices.light import Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from . import AbodeSystem from .const import DOMAIN @@ -44,13 +42,13 @@ class AbodeLight(AbodeDevice, LightEntity): _device: Light _attr_name = None + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN def turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable: - self._device.set_color_temp( - int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP])) - ) + if ATTR_COLOR_TEMP_KELVIN in kwargs and self._device.is_color_capable: + self._device.set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN]) return if ATTR_HS_COLOR in kwargs and self._device.is_color_capable: @@ -85,10 +83,10 @@ class AbodeLight(AbodeDevice, LightEntity): return None @property - def color_temp(self) -> int | None: + def color_temp_kelvin(self) -> int | None: """Return the color temp of the light.""" if self._device.has_color: - return color_temperature_kelvin_to_mired(self._device.color_temp) + return int(self._device.color_temp) return None @property diff --git a/homeassistant/components/abode/manifest.json b/homeassistant/components/abode/manifest.json index 225edea40ca..c1ffb9f699b 100644 --- a/homeassistant/components/abode/manifest.json +++ b/homeassistant/components/abode/manifest.json @@ -9,5 +9,6 @@ }, "iot_class": "cloud_push", "loggers": ["jaraco.abode", "lomond"], - "requirements": ["jaraco.abode==5.2.1"] + "requirements": ["jaraco.abode==6.2.1"], + "single_config_entry": true } diff --git a/homeassistant/components/abode/strings.json b/homeassistant/components/abode/strings.json index 4b98b69eb19..b3d57042754 100644 --- a/homeassistant/components/abode/strings.json +++ b/homeassistant/components/abode/strings.json @@ -28,7 +28,6 @@ "invalid_mfa_code": "Invalid MFA code" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/homeassistant/components/acaia/__init__.py b/homeassistant/components/acaia/__init__.py new file mode 100644 index 00000000000..44f21533e98 --- /dev/null +++ b/homeassistant/components/acaia/__init__.py @@ -0,0 +1,31 @@ +"""Initialize the Acaia component.""" + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import AcaiaConfigEntry, AcaiaCoordinator + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.SENSOR, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool: + """Set up acaia as config entry.""" + + coordinator = AcaiaCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/acaia/binary_sensor.py b/homeassistant/components/acaia/binary_sensor.py new file mode 100644 index 00000000000..ecb7ac06eb5 --- /dev/null +++ b/homeassistant/components/acaia/binary_sensor.py @@ -0,0 +1,61 @@ +"""Binary sensor platform for Acaia scales.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from aioacaia.acaiascale import AcaiaScale + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AcaiaConfigEntry +from .entity import AcaiaEntity + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +@dataclass(kw_only=True, frozen=True) +class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription): + """Description for Acaia binary sensor entities.""" + + is_on_fn: Callable[[AcaiaScale], bool] + + +BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = ( + AcaiaBinarySensorEntityDescription( + key="timer_running", + translation_key="timer_running", + device_class=BinarySensorDeviceClass.RUNNING, + is_on_fn=lambda scale: scale.timer_running, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AcaiaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensors.""" + + coordinator = entry.runtime_data + async_add_entities( + AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS + ) + + +class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity): + """Representation of an Acaia binary sensor.""" + + entity_description: AcaiaBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return self.entity_description.is_on_fn(self._scale) diff --git a/homeassistant/components/acaia/button.py b/homeassistant/components/acaia/button.py new file mode 100644 index 00000000000..a41233bfc17 --- /dev/null +++ b/homeassistant/components/acaia/button.py @@ -0,0 +1,63 @@ +"""Button entities for Acaia scales.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from aioacaia.acaiascale import AcaiaScale + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AcaiaConfigEntry +from .entity import AcaiaEntity + +PARALLEL_UPDATES = 0 + + +@dataclass(kw_only=True, frozen=True) +class AcaiaButtonEntityDescription(ButtonEntityDescription): + """Description for acaia button entities.""" + + press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]] + + +BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = ( + AcaiaButtonEntityDescription( + key="tare", + translation_key="tare", + press_fn=lambda scale: scale.tare(), + ), + AcaiaButtonEntityDescription( + key="reset_timer", + translation_key="reset_timer", + press_fn=lambda scale: scale.reset_timer(), + ), + AcaiaButtonEntityDescription( + key="start_stop", + translation_key="start_stop", + press_fn=lambda scale: scale.start_stop_timer(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AcaiaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up button entities and services.""" + + coordinator = entry.runtime_data + async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS) + + +class AcaiaButton(AcaiaEntity, ButtonEntity): + """Representation of an Acaia button.""" + + entity_description: AcaiaButtonEntityDescription + + async def async_press(self) -> None: + """Handle the button press.""" + await self.entity_description.press_fn(self._scale) diff --git a/homeassistant/components/acaia/config_flow.py b/homeassistant/components/acaia/config_flow.py new file mode 100644 index 00000000000..fb2639fc886 --- /dev/null +++ b/homeassistant/components/acaia/config_flow.py @@ -0,0 +1,149 @@ +"""Config flow for Acaia integration.""" + +import logging +from typing import Any + +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice +from aioacaia.helpers import is_new_scale +import voluptuous as vol + +from homeassistant.components.bluetooth import ( + BluetoothServiceInfoBleak, + async_discovered_service_info, +) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS, CONF_NAME +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) + +from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for acaia.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + self._discovered: dict[str, Any] = {} + self._discovered_devices: dict[str, str] = {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + + errors: dict[str, str] = {} + + if user_input is not None: + mac = user_input[CONF_ADDRESS] + try: + is_new_style_scale = await is_new_scale(mac) + except AcaiaDeviceNotFound: + errors["base"] = "device_not_found" + except AcaiaError: + _LOGGER.exception("Error occurred while connecting to the scale") + errors["base"] = "unknown" + except AcaiaUnknownDevice: + return self.async_abort(reason="unsupported_device") + else: + await self.async_set_unique_id(format_mac(mac)) + self._abort_if_unique_id_configured() + + if not errors: + return self.async_create_entry( + title=self._discovered_devices[mac], + data={ + CONF_ADDRESS: mac, + CONF_IS_NEW_STYLE_SCALE: is_new_style_scale, + }, + ) + + for device in async_discovered_service_info(self.hass): + self._discovered_devices[device.address] = device.name + + if not self._discovered_devices: + return self.async_abort(reason="no_devices_found") + + options = [ + SelectOptionDict( + value=device_mac, + label=f"{device_name} ({device_mac})", + ) + for device_mac, device_name in self._discovered_devices.items() + ] + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ADDRESS): SelectSelector( + SelectSelectorConfig( + options=options, + mode=SelectSelectorMode.DROPDOWN, + ) + ) + } + ), + errors=errors, + ) + + async def async_step_bluetooth( + self, discovery_info: BluetoothServiceInfoBleak + ) -> ConfigFlowResult: + """Handle a discovered Bluetooth device.""" + + self._discovered[CONF_ADDRESS] = discovery_info.address + self._discovered[CONF_NAME] = discovery_info.name + + await self.async_set_unique_id(format_mac(discovery_info.address)) + self._abort_if_unique_id_configured() + + try: + self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale( + discovery_info.address + ) + except AcaiaDeviceNotFound: + _LOGGER.debug("Device not found during discovery") + return self.async_abort(reason="device_not_found") + except AcaiaError: + _LOGGER.debug( + "Error occurred while connecting to the scale during discovery", + exc_info=True, + ) + return self.async_abort(reason="unknown") + except AcaiaUnknownDevice: + _LOGGER.debug("Unsupported device during discovery") + return self.async_abort(reason="unsupported_device") + + return await self.async_step_bluetooth_confirm() + + async def async_step_bluetooth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle confirmation of Bluetooth discovery.""" + + if user_input is not None: + return self.async_create_entry( + title=self._discovered[CONF_NAME], + data={ + CONF_ADDRESS: self._discovered[CONF_ADDRESS], + CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE], + }, + ) + + self.context["title_placeholders"] = placeholders = { + CONF_NAME: self._discovered[CONF_NAME] + } + + self._set_confirm_only() + return self.async_show_form( + step_id="bluetooth_confirm", + description_placeholders=placeholders, + ) diff --git a/homeassistant/components/acaia/const.py b/homeassistant/components/acaia/const.py new file mode 100644 index 00000000000..c603578763d --- /dev/null +++ b/homeassistant/components/acaia/const.py @@ -0,0 +1,4 @@ +"""Constants for component.""" + +DOMAIN = "acaia" +CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale" diff --git a/homeassistant/components/acaia/coordinator.py b/homeassistant/components/acaia/coordinator.py new file mode 100644 index 00000000000..bd915b42408 --- /dev/null +++ b/homeassistant/components/acaia/coordinator.py @@ -0,0 +1,86 @@ +"""Coordinator for Acaia integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from aioacaia.acaiascale import AcaiaScale +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import CONF_IS_NEW_STYLE_SCALE + +SCAN_INTERVAL = timedelta(seconds=15) + +_LOGGER = logging.getLogger(__name__) + +type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator] + + +class AcaiaCoordinator(DataUpdateCoordinator[None]): + """Class to handle fetching data from the scale.""" + + config_entry: AcaiaConfigEntry + + def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + _LOGGER, + name="acaia coordinator", + update_interval=SCAN_INTERVAL, + config_entry=entry, + ) + + self._scale = AcaiaScale( + address_or_ble_device=entry.data[CONF_ADDRESS], + name=entry.title, + is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], + notify_callback=self.async_update_listeners, + ) + + @property + def scale(self) -> AcaiaScale: + """Return the scale object.""" + return self._scale + + async def _async_update_data(self) -> None: + """Fetch data.""" + + # scale is already connected, return + if self._scale.connected: + return + + # scale is not connected, try to connect + try: + await self._scale.connect(setup_tasks=False) + except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex: + _LOGGER.debug( + "Could not connect to scale: %s, Error: %s", + self.config_entry.data[CONF_ADDRESS], + ex, + ) + self._scale.device_disconnected_handler(notify=False) + return + + # connected, set up background tasks + if not self._scale.heartbeat_task or self._scale.heartbeat_task.done(): + self._scale.heartbeat_task = self.config_entry.async_create_background_task( + hass=self.hass, + target=self._scale.send_heartbeats(), + name="acaia_heartbeat_task", + ) + + if not self._scale.process_queue_task or self._scale.process_queue_task.done(): + self._scale.process_queue_task = ( + self.config_entry.async_create_background_task( + hass=self.hass, + target=self._scale.process_queue(), + name="acaia_process_queue_task", + ) + ) diff --git a/homeassistant/components/acaia/diagnostics.py b/homeassistant/components/acaia/diagnostics.py new file mode 100644 index 00000000000..2d9f4511804 --- /dev/null +++ b/homeassistant/components/acaia/diagnostics.py @@ -0,0 +1,31 @@ +"""Diagnostics support for Acaia.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import AcaiaConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, + entry: AcaiaConfigEntry, +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + scale = coordinator.scale + + # collect all data sources + return { + "model": scale.model, + "device_state": ( + asdict(scale.device_state) if scale.device_state is not None else "" + ), + "mac": scale.mac, + "last_disconnect_time": scale.last_disconnect_time, + "timer": scale.timer, + "weight": scale.weight, + } diff --git a/homeassistant/components/acaia/entity.py b/homeassistant/components/acaia/entity.py new file mode 100644 index 00000000000..bef1ac313ca --- /dev/null +++ b/homeassistant/components/acaia/entity.py @@ -0,0 +1,46 @@ +"""Base class for Acaia entities.""" + +from dataclasses import dataclass + +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import AcaiaCoordinator + + +@dataclass +class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]): + """Common elements for all entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: AcaiaCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._scale = coordinator.scale + formatted_mac = format_mac(self._scale.mac) + self._attr_unique_id = f"{formatted_mac}_{entity_description.key}" + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, formatted_mac)}, + manufacturer="Acaia", + model=self._scale.model, + suggested_area="Kitchen", + connections={(CONNECTION_BLUETOOTH, self._scale.mac)}, + ) + + @property + def available(self) -> bool: + """Returns whether entity is available.""" + return super().available and self._scale.connected diff --git a/homeassistant/components/acaia/icons.json b/homeassistant/components/acaia/icons.json new file mode 100644 index 00000000000..59b316a36ce --- /dev/null +++ b/homeassistant/components/acaia/icons.json @@ -0,0 +1,24 @@ +{ + "entity": { + "binary_sensor": { + "timer_running": { + "default": "mdi:timer", + "state": { + "on": "mdi:timer-play", + "off": "mdi:timer-off" + } + } + }, + "button": { + "tare": { + "default": "mdi:scale-balance" + }, + "reset_timer": { + "default": "mdi:timer-refresh" + }, + "start_stop": { + "default": "mdi:timer-play" + } + } + } +} diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json new file mode 100644 index 00000000000..36551e9c695 --- /dev/null +++ b/homeassistant/components/acaia/manifest.json @@ -0,0 +1,30 @@ +{ + "domain": "acaia", + "name": "Acaia", + "bluetooth": [ + { + "manufacturer_id": 16962 + }, + { + "local_name": "ACAIA*" + }, + { + "local_name": "PYXIS-*" + }, + { + "local_name": "LUNAR-*" + }, + { + "local_name": "PROCHBT001" + } + ], + "codeowners": ["@zweckj"], + "config_flow": true, + "dependencies": ["bluetooth_adapters"], + "documentation": "https://www.home-assistant.io/integrations/acaia", + "integration_type": "device", + "iot_class": "local_push", + "loggers": ["aioacaia"], + "quality_scale": "platinum", + "requirements": ["aioacaia==0.1.11"] +} diff --git a/homeassistant/components/acaia/quality_scale.yaml b/homeassistant/components/acaia/quality_scale.yaml new file mode 100644 index 00000000000..62573e38799 --- /dev/null +++ b/homeassistant/components/acaia/quality_scale.yaml @@ -0,0 +1,106 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions are defined. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: + status: exempt + comment: | + Device is expected to be offline most of the time, but needs to connect quickly once available. + unique-config-entry: done + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: | + Handled by coordinator. + parallel-updates: done + reauthentication-flow: + status: exempt + comment: | + No authentication required. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + No IP discovery. + discovery: + status: done + comment: | + Bluetooth discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Device type integration. + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + No noisy/non-essential entities. + entity-translations: done + exception-translations: + status: exempt + comment: | + No custom exceptions. + icon-translations: done + reconfiguration-flow: + status: exempt + comment: | + Only parameter that could be changed (MAC = unique_id) would force a new config entry. + repair-issues: + status: exempt + comment: | + No repairs/issues. + stale-devices: + status: exempt + comment: | + Device type integration. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + Bluetooth connection. + strict-typing: done diff --git a/homeassistant/components/acaia/sensor.py b/homeassistant/components/acaia/sensor.py new file mode 100644 index 00000000000..7ba44958eca --- /dev/null +++ b/homeassistant/components/acaia/sensor.py @@ -0,0 +1,146 @@ +"""Sensor platform for Acaia.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale +from aioacaia.const import UnitMass as AcaiaUnitOfMass + +from homeassistant.components.sensor import ( + RestoreSensor, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorExtraStoredData, + SensorStateClass, +) +from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AcaiaConfigEntry +from .entity import AcaiaEntity + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +@dataclass(kw_only=True, frozen=True) +class AcaiaSensorEntityDescription(SensorEntityDescription): + """Description for Acaia sensor entities.""" + + value_fn: Callable[[AcaiaScale], int | float | None] + + +@dataclass(kw_only=True, frozen=True) +class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription): + """Description for Acaia sensor entities with dynamic units.""" + + unit_fn: Callable[[AcaiaDeviceState], str] | None = None + + +SENSORS: tuple[AcaiaSensorEntityDescription, ...] = ( + AcaiaDynamicUnitSensorEntityDescription( + key="weight", + device_class=SensorDeviceClass.WEIGHT, + native_unit_of_measurement=UnitOfMass.GRAMS, + state_class=SensorStateClass.MEASUREMENT, + unit_fn=lambda data: ( + UnitOfMass.OUNCES + if data.units == AcaiaUnitOfMass.OUNCES + else UnitOfMass.GRAMS + ), + value_fn=lambda scale: scale.weight, + ), + AcaiaDynamicUnitSensorEntityDescription( + key="flow_rate", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, + suggested_display_precision=1, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda scale: scale.flow_rate, + ), +) +RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = ( + AcaiaSensorEntityDescription( + key="battery", + device_class=SensorDeviceClass.BATTERY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda scale: ( + scale.device_state.battery_level if scale.device_state else None + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AcaiaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + + coordinator = entry.runtime_data + entities: list[SensorEntity] = [ + AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS + ] + entities.extend( + AcaiaRestoreSensor(coordinator, entity_description) + for entity_description in RESTORE_SENSORS + ) + async_add_entities(entities) + + +class AcaiaSensor(AcaiaEntity, SensorEntity): + """Representation of an Acaia sensor.""" + + entity_description: AcaiaDynamicUnitSensorEntityDescription + + @property + def native_unit_of_measurement(self) -> str | None: + """Return the unit of measurement of this entity.""" + if ( + self._scale.device_state is not None + and self.entity_description.unit_fn is not None + ): + return self.entity_description.unit_fn(self._scale.device_state) + return self.entity_description.native_unit_of_measurement + + @property + def native_value(self) -> int | float | None: + """Return the state of the entity.""" + return self.entity_description.value_fn(self._scale) + + +class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor): + """Representation of an Acaia sensor with restore capabilities.""" + + entity_description: AcaiaSensorEntityDescription + _restored_data: SensorExtraStoredData | None = None + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + + self._restored_data = await self.async_get_last_sensor_data() + if self._restored_data is not None: + self._attr_native_value = self._restored_data.native_value + self._attr_native_unit_of_measurement = ( + self._restored_data.native_unit_of_measurement + ) + + if self._scale.device_state is not None: + self._attr_native_value = self.entity_description.value_fn(self._scale) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if self._scale.device_state is not None: + self._attr_native_value = self.entity_description.value_fn(self._scale) + self._async_write_ha_state() + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available or self._restored_data is not None diff --git a/homeassistant/components/acaia/strings.json b/homeassistant/components/acaia/strings.json new file mode 100644 index 00000000000..e0e97b7c2ff --- /dev/null +++ b/homeassistant/components/acaia/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "flow_title": "{name}", + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "unsupported_device": "This device is not supported." + }, + "error": { + "device_not_found": "Device could not be found.", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "bluetooth_confirm": { + "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" + }, + "user": { + "description": "[%key:component::bluetooth::config::step::user::description%]", + "data": { + "address": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "address": "Select Acaia scale you want to set up" + } + } + } + }, + "entity": { + "binary_sensor": { + "timer_running": { + "name": "Timer running" + } + }, + "button": { + "tare": { + "name": "Tare" + }, + "reset_timer": { + "name": "Reset timer" + }, + "start_stop": { + "name": "Start/stop timer" + } + } + } +} diff --git a/homeassistant/components/accuweather/__init__.py b/homeassistant/components/accuweather/__init__.py index 3d52df765e6..c046933d5d5 100644 --- a/homeassistant/components/accuweather/__init__.py +++ b/homeassistant/components/accuweather/__init__.py @@ -2,13 +2,11 @@ from __future__ import annotations -from dataclasses import dataclass import logging from accuweather import AccuWeather from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -16,7 +14,9 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION from .coordinator import ( + AccuWeatherConfigEntry, AccuWeatherDailyForecastDataUpdateCoordinator, + AccuWeatherData, AccuWeatherObservationDataUpdateCoordinator, ) @@ -25,17 +25,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR, Platform.WEATHER] -@dataclass -class AccuWeatherData: - """Data for AccuWeather integration.""" - - coordinator_observation: AccuWeatherObservationDataUpdateCoordinator - coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator - - -type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData] - - async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool: """Set up AccuWeather as config entry.""" api_key: str = entry.data[CONF_API_KEY] @@ -50,6 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) coordinator_observation = AccuWeatherObservationDataUpdateCoordinator( hass, + entry, accuweather, name, "observation", @@ -58,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator( hass, + entry, accuweather, name, "daily forecast", diff --git a/homeassistant/components/accuweather/coordinator.py b/homeassistant/components/accuweather/coordinator.py index 26fadd6806c..40ff3ad2c87 100644 --- a/homeassistant/components/accuweather/coordinator.py +++ b/homeassistant/components/accuweather/coordinator.py @@ -1,6 +1,9 @@ """The AccuWeather coordinator.""" +from __future__ import annotations + from asyncio import timeout +from dataclasses import dataclass from datetime import timedelta import logging from typing import TYPE_CHECKING, Any @@ -8,6 +11,7 @@ from typing import TYPE_CHECKING, Any from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError from aiohttp.client_exceptions import ClientConnectorError +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import ( @@ -23,6 +27,17 @@ EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceed _LOGGER = logging.getLogger(__name__) +@dataclass +class AccuWeatherData: + """Data for AccuWeather integration.""" + + coordinator_observation: AccuWeatherObservationDataUpdateCoordinator + coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator + + +type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData] + + class AccuWeatherObservationDataUpdateCoordinator( DataUpdateCoordinator[dict[str, Any]] ): @@ -31,6 +46,7 @@ class AccuWeatherObservationDataUpdateCoordinator( def __init__( self, hass: HomeAssistant, + config_entry: AccuWeatherConfigEntry, accuweather: AccuWeather, name: str, coordinator_type: str, @@ -48,6 +64,7 @@ class AccuWeatherObservationDataUpdateCoordinator( super().__init__( hass, _LOGGER, + config_entry=config_entry, name=f"{name} ({coordinator_type})", update_interval=update_interval, ) @@ -73,6 +90,7 @@ class AccuWeatherDailyForecastDataUpdateCoordinator( def __init__( self, hass: HomeAssistant, + config_entry: AccuWeatherConfigEntry, accuweather: AccuWeather, name: str, coordinator_type: str, @@ -90,6 +108,7 @@ class AccuWeatherDailyForecastDataUpdateCoordinator( super().__init__( hass, _LOGGER, + config_entry=config_entry, name=f"{name} ({coordinator_type})", update_interval=update_interval, ) diff --git a/homeassistant/components/accuweather/diagnostics.py b/homeassistant/components/accuweather/diagnostics.py index 85c06a6140a..9f35c47b886 100644 --- a/homeassistant/components/accuweather/diagnostics.py +++ b/homeassistant/components/accuweather/diagnostics.py @@ -8,7 +8,7 @@ from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant -from . import AccuWeatherConfigEntry, AccuWeatherData +from .coordinator import AccuWeatherConfigEntry, AccuWeatherData TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE} diff --git a/homeassistant/components/accuweather/manifest.json b/homeassistant/components/accuweather/manifest.json index 24a8180eef8..75f4a265b5f 100644 --- a/homeassistant/components/accuweather/manifest.json +++ b/homeassistant/components/accuweather/manifest.json @@ -7,7 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["accuweather"], - "quality_scale": "platinum", - "requirements": ["accuweather==3.0.0"], + "requirements": ["accuweather==4.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/accuweather/sensor.py b/homeassistant/components/accuweather/sensor.py index fac3a2a4ba3..001edc5f197 100644 --- a/homeassistant/components/accuweather/sensor.py +++ b/homeassistant/components/accuweather/sensor.py @@ -18,6 +18,7 @@ from homeassistant.const import ( UV_INDEX, UnitOfIrradiance, UnitOfLength, + UnitOfPressure, UnitOfSpeed, UnitOfTemperature, UnitOfTime, @@ -27,7 +28,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import AccuWeatherConfigEntry from .const import ( API_METRIC, ATTR_CATEGORY, @@ -40,6 +40,7 @@ from .const import ( MAX_FORECAST_DAYS, ) from .coordinator import ( + AccuWeatherConfigEntry, AccuWeatherDailyForecastDataUpdateCoordinator, AccuWeatherObservationDataUpdateCoordinator, ) @@ -279,6 +280,15 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]), translation_key="realfeel_temperature_shade", ), + AccuWeatherSensorDescription( + key="RelativeHumidity", + device_class=SensorDeviceClass.HUMIDITY, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: cast(int, data), + translation_key="humidity", + ), AccuWeatherSensorDescription( key="Precipitation", device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, @@ -288,6 +298,16 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( attr_fn=lambda data: {"type": data["PrecipitationType"]}, translation_key="precipitation", ), + AccuWeatherSensorDescription( + key="Pressure", + device_class=SensorDeviceClass.PRESSURE, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + native_unit_of_measurement=UnitOfPressure.HPA, + value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]), + translation_key="pressure", + ), AccuWeatherSensorDescription( key="PressureTendency", device_class=SensorDeviceClass.ENUM, @@ -295,9 +315,19 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( value_fn=lambda data: cast(str, data["LocalizedText"]).lower(), translation_key="pressure_tendency", ), + AccuWeatherSensorDescription( + key="Temperature", + device_class=SensorDeviceClass.TEMPERATURE, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]), + translation_key="temperature", + ), AccuWeatherSensorDescription( key="UVIndex", state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, native_unit_of_measurement=UV_INDEX, value_fn=lambda data: cast(int, data), attr_fn=lambda data: {ATTR_LEVEL: data["UVIndexText"]}, @@ -324,6 +354,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( AccuWeatherSensorDescription( key="Wind", device_class=SensorDeviceClass.WIND_SPEED, + entity_registry_enabled_default=False, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR, value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]), diff --git a/homeassistant/components/accuweather/system_health.py b/homeassistant/components/accuweather/system_health.py index eab16498248..f5efaf3079f 100644 --- a/homeassistant/components/accuweather/system_health.py +++ b/homeassistant/components/accuweather/system_health.py @@ -9,8 +9,8 @@ from accuweather.const import ENDPOINT from homeassistant.components import system_health from homeassistant.core import HomeAssistant, callback -from . import AccuWeatherConfigEntry from .const import DOMAIN +from .coordinator import AccuWeatherConfigEntry @callback diff --git a/homeassistant/components/accuweather/weather.py b/homeassistant/components/accuweather/weather.py index 72d717f2703..7d754278d91 100644 --- a/homeassistant/components/accuweather/weather.py +++ b/homeassistant/components/accuweather/weather.py @@ -33,7 +33,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utc_from_timestamp -from . import AccuWeatherConfigEntry, AccuWeatherData from .const import ( API_METRIC, ATTR_DIRECTION, @@ -43,7 +42,9 @@ from .const import ( CONDITION_MAP, ) from .coordinator import ( + AccuWeatherConfigEntry, AccuWeatherDailyForecastDataUpdateCoordinator, + AccuWeatherData, AccuWeatherObservationDataUpdateCoordinator, ) diff --git a/homeassistant/components/acer_projector/manifest.json b/homeassistant/components/acer_projector/manifest.json index 58a2372e42a..026374bf53d 100644 --- a/homeassistant/components/acer_projector/manifest.json +++ b/homeassistant/components/acer_projector/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/acer_projector", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pyserial==3.5"] } diff --git a/homeassistant/components/acmeda/__init__.py b/homeassistant/components/acmeda/__init__.py index d6491767dcc..62a62795a05 100644 --- a/homeassistant/components/acmeda/__init__.py +++ b/homeassistant/components/acmeda/__init__.py @@ -3,6 +3,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er from .hub import PulseHub @@ -17,6 +18,9 @@ async def async_setup_entry( hass: HomeAssistant, config_entry: AcmedaConfigEntry ) -> bool: """Set up Rollease Acmeda Automate hub from a config entry.""" + + await _migrate_unique_ids(hass, config_entry) + hub = PulseHub(hass, config_entry) if not await hub.async_setup(): @@ -28,6 +32,19 @@ async def async_setup_entry( return True +async def _migrate_unique_ids(hass: HomeAssistant, entry: AcmedaConfigEntry) -> None: + """Migrate pre-config flow unique ids.""" + entity_registry = er.async_get(hass) + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + for reg_entry in registry_entries: + if isinstance(reg_entry.unique_id, int): # type: ignore[unreachable] + entity_registry.async_update_entity( # type: ignore[unreachable] + reg_entry.entity_id, new_unique_id=str(reg_entry.unique_id) + ) + + async def async_unload_entry( hass: HomeAssistant, config_entry: AcmedaConfigEntry ) -> bool: diff --git a/homeassistant/components/acmeda/cover.py b/homeassistant/components/acmeda/cover.py index d96675de10c..77099e86adc 100644 --- a/homeassistant/components/acmeda/cover.py +++ b/homeassistant/components/acmeda/cover.py @@ -14,8 +14,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AcmedaConfigEntry -from .base import AcmedaBase from .const import ACMEDA_HUB_UPDATE +from .entity import AcmedaEntity from .helpers import async_add_acmeda_entities @@ -44,7 +44,7 @@ async def async_setup_entry( ) -class AcmedaCover(AcmedaBase, CoverEntity): +class AcmedaCover(AcmedaEntity, CoverEntity): """Representation of an Acmeda cover device.""" _attr_name = None diff --git a/homeassistant/components/acmeda/base.py b/homeassistant/components/acmeda/entity.py similarity index 96% rename from homeassistant/components/acmeda/base.py rename to homeassistant/components/acmeda/entity.py index 7596374684d..63432886b4d 100644 --- a/homeassistant/components/acmeda/base.py +++ b/homeassistant/components/acmeda/entity.py @@ -11,7 +11,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import ACMEDA_ENTITY_REMOVE, DOMAIN, LOGGER -class AcmedaBase(entity.Entity): +class AcmedaEntity(entity.Entity): """Base representation of an Acmeda roller.""" _attr_should_poll = False @@ -67,7 +67,7 @@ class AcmedaBase(entity.Entity): @property def unique_id(self) -> str: """Return the unique ID of this roller.""" - return self.roller.id # type: ignore[no-any-return] + return str(self.roller.id) @property def device_id(self) -> str: diff --git a/homeassistant/components/acmeda/manifest.json b/homeassistant/components/acmeda/manifest.json index a8b3c7c829f..0c35904cac6 100644 --- a/homeassistant/components/acmeda/manifest.json +++ b/homeassistant/components/acmeda/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/acmeda", "iot_class": "local_push", "loggers": ["aiopulse"], - "requirements": ["aiopulse==0.4.4"] + "requirements": ["aiopulse==0.4.6"] } diff --git a/homeassistant/components/acmeda/sensor.py b/homeassistant/components/acmeda/sensor.py index be9f37b03dc..f5df1bf013d 100644 --- a/homeassistant/components/acmeda/sensor.py +++ b/homeassistant/components/acmeda/sensor.py @@ -9,8 +9,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AcmedaConfigEntry -from .base import AcmedaBase from .const import ACMEDA_HUB_UPDATE +from .entity import AcmedaEntity from .helpers import async_add_acmeda_entities @@ -39,7 +39,7 @@ async def async_setup_entry( ) -class AcmedaBattery(AcmedaBase, SensorEntity): +class AcmedaBattery(AcmedaEntity, SensorEntity): """Representation of an Acmeda cover sensor.""" _attr_device_class = SensorDeviceClass.BATTERY diff --git a/homeassistant/components/actiontec/device_tracker.py b/homeassistant/components/actiontec/device_tracker.py index 8cab6552857..b1b9c81c674 100644 --- a/homeassistant/components/actiontec/device_tracker.py +++ b/homeassistant/components/actiontec/device_tracker.py @@ -9,7 +9,7 @@ from typing import Final import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -36,7 +36,7 @@ def get_scanner( hass: HomeAssistant, config: ConfigType ) -> ActiontecDeviceScanner | None: """Validate the configuration and return an Actiontec scanner.""" - scanner = ActiontecDeviceScanner(config[DOMAIN]) + scanner = ActiontecDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -51,7 +51,6 @@ class ActiontecDeviceScanner(DeviceScanner): self.last_results: list[Device] = [] data = self.get_actiontec_data() self.success_init = data is not None - _LOGGER.info("Scanner initialized") def scan_devices(self) -> list[str]: """Scan for new devices and return a list with found device IDs.""" @@ -70,7 +69,7 @@ class ActiontecDeviceScanner(DeviceScanner): Return boolean if scanning successful. """ - _LOGGER.info("Scanning") + _LOGGER.debug("Scanning") if not self.success_init: return False @@ -79,7 +78,7 @@ class ActiontecDeviceScanner(DeviceScanner): self.last_results = [ device for device in actiontec_data if device.timevalid > -60 ] - _LOGGER.info("Scan successful") + _LOGGER.debug("Scan successful") return True def get_actiontec_data(self) -> list[Device] | None: diff --git a/homeassistant/components/actiontec/manifest.json b/homeassistant/components/actiontec/manifest.json index ff9cf85614f..e7aa33f1baf 100644 --- a/homeassistant/components/actiontec/manifest.json +++ b/homeassistant/components/actiontec/manifest.json @@ -3,5 +3,6 @@ "name": "Actiontec", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/actiontec", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/adax/climate.py b/homeassistant/components/adax/climate.py index ac381ff46d5..15022ba3c9f 100644 --- a/homeassistant/components/adax/climate.py +++ b/homeassistant/components/adax/climate.py @@ -75,7 +75,6 @@ class AdaxDevice(ClimateEntity): ) _attr_target_temperature_step = PRECISION_WHOLE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None: """Initialize the heater.""" diff --git a/homeassistant/components/adax/config_flow.py b/homeassistant/components/adax/config_flow.py index 3e8ca646cad..0a995fc6b85 100644 --- a/homeassistant/components/adax/config_flow.py +++ b/homeassistant/components/adax/config_flow.py @@ -130,7 +130,7 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN): async_get_clientsession(self.hass), account_id, password ) if token is None: - _LOGGER.info("Adax: Failed to login to retrieve token") + _LOGGER.debug("Adax: Failed to login to retrieve token") errors["base"] = "cannot_connect" return self.async_show_form( step_id="cloud", diff --git a/homeassistant/components/adguard/config_flow.py b/homeassistant/components/adguard/config_flow.py index c07967ec2c5..6fd50967c22 100644 --- a/homeassistant/components/adguard/config_flow.py +++ b/homeassistant/components/adguard/config_flow.py @@ -7,7 +7,6 @@ from typing import Any from adguardhome import AdGuardHome, AdGuardHomeConnectionError import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, @@ -18,6 +17,7 @@ from homeassistant.const import ( CONF_VERIFY_SSL, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DOMAIN diff --git a/homeassistant/components/adguard/icons.json b/homeassistant/components/adguard/icons.json index 9c5df8a4a45..18527c0ed98 100644 --- a/homeassistant/components/adguard/icons.json +++ b/homeassistant/components/adguard/icons.json @@ -66,10 +66,20 @@ } }, "services": { - "add_url": "mdi:link-plus", - "remove_url": "mdi:link-off", - "enable_url": "mdi:link-variant", - "disable_url": "mdi:link-variant-off", - "refresh": "mdi:refresh" + "add_url": { + "service": "mdi:link-plus" + }, + "remove_url": { + "service": "mdi:link-off" + }, + "enable_url": { + "service": "mdi:link-variant" + }, + "disable_url": { + "service": "mdi:link-variant-off" + }, + "refresh": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/ads/__init__.py b/homeassistant/components/ads/__init__.py index f5742718b12..892390a91eb 100644 --- a/homeassistant/components/ads/__init__.py +++ b/homeassistant/components/ads/__init__.py @@ -1,12 +1,6 @@ """Support for Automation Device Specification (ADS).""" -import asyncio -from asyncio import timeout -from collections import namedtuple -import ctypes import logging -import struct -import threading import pyads import voluptuous as vol @@ -19,42 +13,38 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType +from .const import CONF_ADS_VAR, DATA_ADS, DOMAIN, AdsType +from .hub import AdsHub + _LOGGER = logging.getLogger(__name__) -DATA_ADS = "data_ads" - -# Supported Types -ADSTYPE_BOOL = "bool" -ADSTYPE_BYTE = "byte" -ADSTYPE_DINT = "dint" -ADSTYPE_INT = "int" -ADSTYPE_UDINT = "udint" -ADSTYPE_UINT = "uint" ADS_TYPEMAP = { - ADSTYPE_BOOL: pyads.PLCTYPE_BOOL, - ADSTYPE_BYTE: pyads.PLCTYPE_BYTE, - ADSTYPE_DINT: pyads.PLCTYPE_DINT, - ADSTYPE_INT: pyads.PLCTYPE_INT, - ADSTYPE_UDINT: pyads.PLCTYPE_UDINT, - ADSTYPE_UINT: pyads.PLCTYPE_UINT, + AdsType.BOOL: pyads.PLCTYPE_BOOL, + AdsType.BYTE: pyads.PLCTYPE_BYTE, + AdsType.INT: pyads.PLCTYPE_INT, + AdsType.UINT: pyads.PLCTYPE_UINT, + AdsType.SINT: pyads.PLCTYPE_SINT, + AdsType.USINT: pyads.PLCTYPE_USINT, + AdsType.DINT: pyads.PLCTYPE_DINT, + AdsType.UDINT: pyads.PLCTYPE_UDINT, + AdsType.WORD: pyads.PLCTYPE_WORD, + AdsType.DWORD: pyads.PLCTYPE_DWORD, + AdsType.REAL: pyads.PLCTYPE_REAL, + AdsType.LREAL: pyads.PLCTYPE_LREAL, + AdsType.STRING: pyads.PLCTYPE_STRING, + AdsType.TIME: pyads.PLCTYPE_TIME, + AdsType.DATE: pyads.PLCTYPE_DATE, + AdsType.DATE_AND_TIME: pyads.PLCTYPE_DT, + AdsType.TOD: pyads.PLCTYPE_TOD, } CONF_ADS_FACTOR = "factor" CONF_ADS_TYPE = "adstype" CONF_ADS_VALUE = "value" -CONF_ADS_VAR = "adsvar" -CONF_ADS_VAR_BRIGHTNESS = "adsvar_brightness" -CONF_ADS_VAR_POSITION = "adsvar_position" -STATE_KEY_STATE = "state" -STATE_KEY_BRIGHTNESS = "brightness" -STATE_KEY_POSITION = "position" - -DOMAIN = "ads" SERVICE_WRITE_DATA_BY_NAME = "write_data_by_name" @@ -73,16 +63,7 @@ CONFIG_SCHEMA = vol.Schema( SCHEMA_SERVICE_WRITE_DATA_BY_NAME = vol.Schema( { - vol.Required(CONF_ADS_TYPE): vol.In( - [ - ADSTYPE_INT, - ADSTYPE_UINT, - ADSTYPE_BYTE, - ADSTYPE_BOOL, - ADSTYPE_DINT, - ADSTYPE_UDINT, - ] - ), + vol.Required(CONF_ADS_TYPE): vol.Coerce(AdsType), vol.Required(CONF_ADS_VALUE): vol.Coerce(int), vol.Required(CONF_ADS_VAR): cv.string, } @@ -116,9 +97,9 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def handle_write_data_by_name(call: ServiceCall) -> None: """Write a value to the connected ADS device.""" - ads_var = call.data[CONF_ADS_VAR] - ads_type = call.data[CONF_ADS_TYPE] - value = call.data[CONF_ADS_VALUE] + ads_var: str = call.data[CONF_ADS_VAR] + ads_type: AdsType = call.data[CONF_ADS_TYPE] + value: int = call.data[CONF_ADS_VALUE] try: ads.write_by_name(ads_var, value, ADS_TYPEMAP[ads_type]) @@ -133,181 +114,3 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: ) return True - - -# Tuple to hold data needed for notification -NotificationItem = namedtuple( # noqa: PYI024 - "NotificationItem", "hnotify huser name plc_datatype callback" -) - - -class AdsHub: - """Representation of an ADS connection.""" - - def __init__(self, ads_client): - """Initialize the ADS hub.""" - self._client = ads_client - self._client.open() - - # All ADS devices are registered here - self._devices = [] - self._notification_items = {} - self._lock = threading.Lock() - - def shutdown(self, *args, **kwargs): - """Shutdown ADS connection.""" - - _LOGGER.debug("Shutting down ADS") - for notification_item in self._notification_items.values(): - _LOGGER.debug( - "Deleting device notification %d, %d", - notification_item.hnotify, - notification_item.huser, - ) - try: - self._client.del_device_notification( - notification_item.hnotify, notification_item.huser - ) - except pyads.ADSError as err: - _LOGGER.error(err) - try: - self._client.close() - except pyads.ADSError as err: - _LOGGER.error(err) - - def register_device(self, device): - """Register a new device.""" - self._devices.append(device) - - def write_by_name(self, name, value, plc_datatype): - """Write a value to the device.""" - - with self._lock: - try: - return self._client.write_by_name(name, value, plc_datatype) - except pyads.ADSError as err: - _LOGGER.error("Error writing %s: %s", name, err) - - def read_by_name(self, name, plc_datatype): - """Read a value from the device.""" - - with self._lock: - try: - return self._client.read_by_name(name, plc_datatype) - except pyads.ADSError as err: - _LOGGER.error("Error reading %s: %s", name, err) - - def add_device_notification(self, name, plc_datatype, callback): - """Add a notification to the ADS devices.""" - - attr = pyads.NotificationAttrib(ctypes.sizeof(plc_datatype)) - - with self._lock: - try: - hnotify, huser = self._client.add_device_notification( - name, attr, self._device_notification_callback - ) - except pyads.ADSError as err: - _LOGGER.error("Error subscribing to %s: %s", name, err) - else: - hnotify = int(hnotify) - self._notification_items[hnotify] = NotificationItem( - hnotify, huser, name, plc_datatype, callback - ) - - _LOGGER.debug( - "Added device notification %d for variable %s", hnotify, name - ) - - def _device_notification_callback(self, notification, name): - """Handle device notifications.""" - contents = notification.contents - - hnotify = int(contents.hNotification) - _LOGGER.debug("Received notification %d", hnotify) - - # get dynamically sized data array - data_size = contents.cbSampleSize - data = (ctypes.c_ubyte * data_size).from_address( - ctypes.addressof(contents) - + pyads.structs.SAdsNotificationHeader.data.offset - ) - - try: - with self._lock: - notification_item = self._notification_items[hnotify] - except KeyError: - _LOGGER.error("Unknown device notification handle: %d", hnotify) - return - - # Parse data to desired datatype - if notification_item.plc_datatype == pyads.PLCTYPE_BOOL: - value = bool(struct.unpack(" bool: - """Return False if state has not been updated yet.""" - return self._state_dict[STATE_KEY_STATE] is not None diff --git a/homeassistant/components/ads/binary_sensor.py b/homeassistant/components/ads/binary_sensor.py index 6ee17e07f0f..72a12506dc1 100644 --- a/homeassistant/components/ads/binary_sensor.py +++ b/homeassistant/components/ads/binary_sensor.py @@ -17,7 +17,9 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsEntity +from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE +from .entity import AdsEntity +from .hub import AdsHub DEFAULT_NAME = "ADS binary sensor" PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( @@ -36,11 +38,11 @@ def setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Binary Sensor platform for ADS.""" - ads_hub = hass.data.get(DATA_ADS) + ads_hub = hass.data[DATA_ADS] - ads_var = config[CONF_ADS_VAR] - name = config[CONF_NAME] - device_class = config.get(CONF_DEVICE_CLASS) + ads_var: str = config[CONF_ADS_VAR] + name: str = config[CONF_NAME] + device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) ads_sensor = AdsBinarySensor(ads_hub, name, ads_var, device_class) add_entities([ads_sensor]) @@ -49,7 +51,13 @@ def setup_platform( class AdsBinarySensor(AdsEntity, BinarySensorEntity): """Representation of ADS binary sensors.""" - def __init__(self, ads_hub, name, ads_var, device_class): + def __init__( + self, + ads_hub: AdsHub, + name: str, + ads_var: str, + device_class: BinarySensorDeviceClass | None, + ) -> None: """Initialize ADS binary sensor.""" super().__init__(ads_hub, name, ads_var) self._attr_device_class = device_class or BinarySensorDeviceClass.MOVING diff --git a/homeassistant/components/ads/const.py b/homeassistant/components/ads/const.py new file mode 100644 index 00000000000..ea78fb41785 --- /dev/null +++ b/homeassistant/components/ads/const.py @@ -0,0 +1,41 @@ +"""Support for Automation Device Specification (ADS).""" + +from __future__ import annotations + +from enum import StrEnum +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from .hub import AdsHub + +DOMAIN = "ads" + +DATA_ADS: HassKey[AdsHub] = HassKey(DOMAIN) + +CONF_ADS_VAR = "adsvar" + +STATE_KEY_STATE = "state" + + +class AdsType(StrEnum): + """Supported Types.""" + + BOOL = "bool" + BYTE = "byte" + INT = "int" + UINT = "uint" + SINT = "sint" + USINT = "usint" + DINT = "dint" + UDINT = "udint" + WORD = "word" + DWORD = "dword" + LREAL = "lreal" + REAL = "real" + STRING = "string" + TIME = "time" + DATE = "date" + DATE_AND_TIME = "dt" + TOD = "tod" diff --git a/homeassistant/components/ads/cover.py b/homeassistant/components/ads/cover.py index b0dded8d4d5..c7b0f4f2f8a 100644 --- a/homeassistant/components/ads/cover.py +++ b/homeassistant/components/ads/cover.py @@ -11,6 +11,7 @@ from homeassistant.components.cover import ( ATTR_POSITION, DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, + CoverDeviceClass, CoverEntity, CoverEntityFeature, ) @@ -20,14 +21,9 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - CONF_ADS_VAR, - CONF_ADS_VAR_POSITION, - DATA_ADS, - STATE_KEY_POSITION, - STATE_KEY_STATE, - AdsEntity, -) +from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE +from .entity import AdsEntity +from .hub import AdsHub DEFAULT_NAME = "ADS Cover" @@ -35,10 +31,13 @@ CONF_ADS_VAR_SET_POS = "adsvar_set_position" CONF_ADS_VAR_OPEN = "adsvar_open" CONF_ADS_VAR_CLOSE = "adsvar_close" CONF_ADS_VAR_STOP = "adsvar_stop" +CONF_ADS_VAR_POSITION = "adsvar_position" + +STATE_KEY_POSITION = "position" PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend( { - vol.Optional(CONF_ADS_VAR): cv.string, + vol.Required(CONF_ADS_VAR): cv.string, vol.Optional(CONF_ADS_VAR_POSITION): cv.string, vol.Optional(CONF_ADS_VAR_SET_POS): cv.string, vol.Optional(CONF_ADS_VAR_CLOSE): cv.string, @@ -59,14 +58,14 @@ def setup_platform( """Set up the cover platform for ADS.""" ads_hub = hass.data[DATA_ADS] - ads_var_is_closed = config.get(CONF_ADS_VAR) - ads_var_position = config.get(CONF_ADS_VAR_POSITION) - ads_var_pos_set = config.get(CONF_ADS_VAR_SET_POS) - ads_var_open = config.get(CONF_ADS_VAR_OPEN) - ads_var_close = config.get(CONF_ADS_VAR_CLOSE) - ads_var_stop = config.get(CONF_ADS_VAR_STOP) - name = config[CONF_NAME] - device_class = config.get(CONF_DEVICE_CLASS) + ads_var_is_closed: str = config[CONF_ADS_VAR] + ads_var_position: str | None = config.get(CONF_ADS_VAR_POSITION) + ads_var_pos_set: str | None = config.get(CONF_ADS_VAR_SET_POS) + ads_var_open: str | None = config.get(CONF_ADS_VAR_OPEN) + ads_var_close: str | None = config.get(CONF_ADS_VAR_CLOSE) + ads_var_stop: str | None = config.get(CONF_ADS_VAR_STOP) + name: str = config[CONF_NAME] + device_class: CoverDeviceClass | None = config.get(CONF_DEVICE_CLASS) add_entities( [ @@ -90,16 +89,16 @@ class AdsCover(AdsEntity, CoverEntity): def __init__( self, - ads_hub, - ads_var_is_closed, - ads_var_position, - ads_var_pos_set, - ads_var_open, - ads_var_close, - ads_var_stop, - name, - device_class, - ): + ads_hub: AdsHub, + ads_var_is_closed: str, + ads_var_position: str | None, + ads_var_pos_set: str | None, + ads_var_open: str | None, + ads_var_close: str | None, + ads_var_stop: str | None, + name: str, + device_class: CoverDeviceClass | None, + ) -> None: """Initialize AdsCover entity.""" super().__init__(ads_hub, name, ads_var_is_closed) if self._attr_unique_id is None: diff --git a/homeassistant/components/ads/entity.py b/homeassistant/components/ads/entity.py new file mode 100644 index 00000000000..f51ede2bbc8 --- /dev/null +++ b/homeassistant/components/ads/entity.py @@ -0,0 +1,70 @@ +"""Support for Automation Device Specification (ADS).""" + +import asyncio +from asyncio import timeout +import logging +from typing import Any + +from homeassistant.helpers.entity import Entity + +from .const import STATE_KEY_STATE +from .hub import AdsHub + +_LOGGER = logging.getLogger(__name__) + + +class AdsEntity(Entity): + """Representation of ADS entity.""" + + _attr_should_poll = False + + def __init__(self, ads_hub: AdsHub, name: str, ads_var: str) -> None: + """Initialize ADS binary sensor.""" + self._state_dict: dict[str, Any] = {} + self._state_dict[STATE_KEY_STATE] = None + self._ads_hub = ads_hub + self._ads_var = ads_var + self._event: asyncio.Event | None = None + self._attr_unique_id = ads_var + self._attr_name = name + + async def async_initialize_device( + self, + ads_var: str, + plctype: type, + state_key: str = STATE_KEY_STATE, + factor: int | None = None, + ) -> None: + """Register device notification.""" + + def update(name, value): + """Handle device notifications.""" + _LOGGER.debug("Variable %s changed its value to %d", name, value) + + if factor is None: + self._state_dict[state_key] = value + else: + self._state_dict[state_key] = value / factor + + asyncio.run_coroutine_threadsafe(async_event_set(), self.hass.loop) + self.schedule_update_ha_state() + + async def async_event_set(): + """Set event in async context.""" + self._event.set() + + self._event = asyncio.Event() + + await self.hass.async_add_executor_job( + self._ads_hub.add_device_notification, ads_var, plctype, update + ) + try: + async with timeout(10): + await self._event.wait() + except TimeoutError: + _LOGGER.debug("Variable %s: Timeout during first update", ads_var) + + @property + def available(self) -> bool: + """Return False if state has not been updated yet.""" + return self._state_dict[STATE_KEY_STATE] is not None diff --git a/homeassistant/components/ads/hub.py b/homeassistant/components/ads/hub.py new file mode 100644 index 00000000000..9eb35ab6243 --- /dev/null +++ b/homeassistant/components/ads/hub.py @@ -0,0 +1,151 @@ +"""Support for Automation Device Specification (ADS).""" + +from collections import namedtuple +import ctypes +import logging +import struct +import threading + +import pyads + +_LOGGER = logging.getLogger(__name__) + +# Tuple to hold data needed for notification +NotificationItem = namedtuple( # noqa: PYI024 + "NotificationItem", "hnotify huser name plc_datatype callback" +) + + +class AdsHub: + """Representation of an ADS connection.""" + + def __init__(self, ads_client): + """Initialize the ADS hub.""" + self._client = ads_client + self._client.open() + + # All ADS devices are registered here + self._devices = [] + self._notification_items = {} + self._lock = threading.Lock() + + def shutdown(self, *args, **kwargs): + """Shutdown ADS connection.""" + + _LOGGER.debug("Shutting down ADS") + for notification_item in self._notification_items.values(): + _LOGGER.debug( + "Deleting device notification %d, %d", + notification_item.hnotify, + notification_item.huser, + ) + try: + self._client.del_device_notification( + notification_item.hnotify, notification_item.huser + ) + except pyads.ADSError as err: + _LOGGER.error(err) + try: + self._client.close() + except pyads.ADSError as err: + _LOGGER.error(err) + + def register_device(self, device): + """Register a new device.""" + self._devices.append(device) + + def write_by_name(self, name, value, plc_datatype): + """Write a value to the device.""" + + with self._lock: + try: + return self._client.write_by_name(name, value, plc_datatype) + except pyads.ADSError as err: + _LOGGER.error("Error writing %s: %s", name, err) + + def read_by_name(self, name, plc_datatype): + """Read a value from the device.""" + + with self._lock: + try: + return self._client.read_by_name(name, plc_datatype) + except pyads.ADSError as err: + _LOGGER.error("Error reading %s: %s", name, err) + + def add_device_notification(self, name, plc_datatype, callback): + """Add a notification to the ADS devices.""" + + attr = pyads.NotificationAttrib(ctypes.sizeof(plc_datatype)) + + with self._lock: + try: + hnotify, huser = self._client.add_device_notification( + name, attr, self._device_notification_callback + ) + except pyads.ADSError as err: + _LOGGER.error("Error subscribing to %s: %s", name, err) + else: + hnotify = int(hnotify) + self._notification_items[hnotify] = NotificationItem( + hnotify, huser, name, plc_datatype, callback + ) + + _LOGGER.debug( + "Added device notification %d for variable %s", hnotify, name + ) + + def _device_notification_callback(self, notification, name): + """Handle device notifications.""" + contents = notification.contents + hnotify = int(contents.hNotification) + _LOGGER.debug("Received notification %d", hnotify) + + # Get dynamically sized data array + data_size = contents.cbSampleSize + data_address = ( + ctypes.addressof(contents) + + pyads.structs.SAdsNotificationHeader.data.offset + ) + data = (ctypes.c_ubyte * data_size).from_address(data_address) + + # Acquire notification item + with self._lock: + notification_item = self._notification_items.get(hnotify) + + if not notification_item: + _LOGGER.error("Unknown device notification handle: %d", hnotify) + return + + # Data parsing based on PLC data type + plc_datatype = notification_item.plc_datatype + unpack_formats = { + pyads.PLCTYPE_BYTE: " None: """Set up the light platform for ADS.""" - ads_hub = hass.data.get(DATA_ADS) + ads_hub = hass.data[DATA_ADS] - ads_var_enable = config[CONF_ADS_VAR] - ads_var_brightness = config.get(CONF_ADS_VAR_BRIGHTNESS) - name = config[CONF_NAME] + ads_var_enable: str = config[CONF_ADS_VAR] + ads_var_brightness: str | None = config.get(CONF_ADS_VAR_BRIGHTNESS) + name: str = config[CONF_NAME] add_entities([AdsLight(ads_hub, ads_var_enable, ads_var_brightness, name)]) @@ -57,7 +55,13 @@ def setup_platform( class AdsLight(AdsEntity, LightEntity): """Representation of ADS light.""" - def __init__(self, ads_hub, ads_var_enable, ads_var_brightness, name): + def __init__( + self, + ads_hub: AdsHub, + ads_var_enable: str, + ads_var_brightness: str | None, + name: str, + ) -> None: """Initialize AdsLight entity.""" super().__init__(ads_hub, name, ads_var_enable) self._state_dict[STATE_KEY_BRIGHTNESS] = None diff --git a/homeassistant/components/ads/manifest.json b/homeassistant/components/ads/manifest.json index 0a2cd118a19..683c3cb619f 100644 --- a/homeassistant/components/ads/manifest.json +++ b/homeassistant/components/ads/manifest.json @@ -1,9 +1,10 @@ { "domain": "ads", "name": "ADS", - "codeowners": [], + "codeowners": ["@mrpasztoradam"], "documentation": "https://www.home-assistant.io/integrations/ads", "iot_class": "local_push", "loggers": ["pyads"], + "quality_scale": "legacy", "requirements": ["pyads==3.4.0"] } diff --git a/homeassistant/components/ads/select.py b/homeassistant/components/ads/select.py new file mode 100644 index 00000000000..39f813dec27 --- /dev/null +++ b/homeassistant/components/ads/select.py @@ -0,0 +1,86 @@ +"""Support for ADS select entities.""" + +from __future__ import annotations + +import pyads +import voluptuous as vol + +from homeassistant.components.select import ( + PLATFORM_SCHEMA as SELECT_PLATFORM_SCHEMA, + SelectEntity, +) +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType + +from .const import CONF_ADS_VAR, DATA_ADS +from .entity import AdsEntity +from .hub import AdsHub + +DEFAULT_NAME = "ADS select" + +CONF_OPTIONS = "options" + +PLATFORM_SCHEMA = SELECT_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ADS_VAR): cv.string, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Required(CONF_OPTIONS): vol.All(cv.ensure_list, [cv.string]), + } +) + + +def setup_platform( + hass: HomeAssistant, + config: ConfigType, + add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up an ADS select device.""" + ads_hub = hass.data[DATA_ADS] + + ads_var: str = config[CONF_ADS_VAR] + name: str = config[CONF_NAME] + options: list[str] = config[CONF_OPTIONS] + + entity = AdsSelect(ads_hub, ads_var, name, options) + + add_entities([entity]) + + +class AdsSelect(AdsEntity, SelectEntity): + """Representation of an ADS select entity.""" + + def __init__( + self, + ads_hub: AdsHub, + ads_var: str, + name: str, + options: list[str], + ) -> None: + """Initialize the AdsSelect entity.""" + super().__init__(ads_hub, name, ads_var) + self._attr_options = options + self._attr_current_option = None + + async def async_added_to_hass(self) -> None: + """Register device notification.""" + await self.async_initialize_device(self._ads_var, pyads.PLCTYPE_INT) + self._ads_hub.add_device_notification( + self._ads_var, pyads.PLCTYPE_INT, self._handle_ads_value + ) + + def select_option(self, option: str) -> None: + """Change the selected option.""" + if option in self._attr_options: + index = self._attr_options.index(option) + self._ads_hub.write_by_name(self._ads_var, index, pyads.PLCTYPE_INT) + self._attr_current_option = option + + def _handle_ads_value(self, name: str, value: int) -> None: + """Handle the value update from ADS.""" + if 0 <= value < len(self._attr_options): + self._attr_current_option = self._attr_options[value] + self.schedule_update_ha_state() diff --git a/homeassistant/components/ads/sensor.py b/homeassistant/components/ads/sensor.py index 483fe2cd725..09579161a94 100644 --- a/homeassistant/components/ads/sensor.py +++ b/homeassistant/components/ads/sensor.py @@ -5,41 +5,54 @@ from __future__ import annotations import voluptuous as vol from homeassistant.components.sensor import ( + CONF_STATE_CLASS, + DEVICE_CLASSES_SCHEMA as SENSOR_DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + STATE_CLASSES_SCHEMA as SENSOR_STATE_CLASSES_SCHEMA, + SensorDeviceClass, SensorEntity, + SensorStateClass, ) -from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT +from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME, CONF_UNIT_OF_MEASUREMENT from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from .. import ads -from . import ( - ADS_TYPEMAP, - CONF_ADS_FACTOR, - CONF_ADS_TYPE, - CONF_ADS_VAR, - STATE_KEY_STATE, - AdsEntity, -) +from . import ADS_TYPEMAP, CONF_ADS_FACTOR, CONF_ADS_TYPE +from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsType +from .entity import AdsEntity +from .hub import AdsHub DEFAULT_NAME = "ADS sensor" + PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_ADS_VAR): cv.string, vol.Optional(CONF_ADS_FACTOR): cv.positive_int, - vol.Optional(CONF_ADS_TYPE, default=ads.ADSTYPE_INT): vol.In( - [ - ads.ADSTYPE_INT, - ads.ADSTYPE_UINT, - ads.ADSTYPE_BYTE, - ads.ADSTYPE_DINT, - ads.ADSTYPE_UDINT, - ] + vol.Optional(CONF_ADS_TYPE, default=AdsType.INT): vol.All( + vol.Coerce(AdsType), + vol.In( + [ + AdsType.BOOL, + AdsType.BYTE, + AdsType.INT, + AdsType.UINT, + AdsType.SINT, + AdsType.USINT, + AdsType.DINT, + AdsType.UDINT, + AdsType.WORD, + AdsType.DWORD, + AdsType.LREAL, + AdsType.REAL, + ] + ), ), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=""): cv.string, + vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA, + vol.Optional(CONF_STATE_CLASS): SENSOR_STATE_CLASSES_SCHEMA, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, } ) @@ -51,15 +64,26 @@ def setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up an ADS sensor device.""" - ads_hub = hass.data.get(ads.DATA_ADS) + ads_hub = hass.data[DATA_ADS] - ads_var = config[CONF_ADS_VAR] - ads_type = config[CONF_ADS_TYPE] - name = config[CONF_NAME] - unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) - factor = config.get(CONF_ADS_FACTOR) + ads_var: str = config[CONF_ADS_VAR] + ads_type: AdsType = config[CONF_ADS_TYPE] + name: str = config[CONF_NAME] + factor: int | None = config.get(CONF_ADS_FACTOR) + device_class: SensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) + state_class: SensorStateClass | None = config.get(CONF_STATE_CLASS) + unit_of_measurement: str | None = config.get(CONF_UNIT_OF_MEASUREMENT) - entity = AdsSensor(ads_hub, ads_var, ads_type, name, unit_of_measurement, factor) + entity = AdsSensor( + ads_hub, + ads_var, + ads_type, + name, + factor, + device_class, + state_class, + unit_of_measurement, + ) add_entities([entity]) @@ -67,12 +91,24 @@ def setup_platform( class AdsSensor(AdsEntity, SensorEntity): """Representation of an ADS sensor entity.""" - def __init__(self, ads_hub, ads_var, ads_type, name, unit_of_measurement, factor): + def __init__( + self, + ads_hub: AdsHub, + ads_var: str, + ads_type: AdsType, + name: str, + factor: int | None, + device_class: SensorDeviceClass | None, + state_class: SensorStateClass | None, + unit_of_measurement: str | None, + ) -> None: """Initialize AdsSensor entity.""" super().__init__(ads_hub, name, ads_var) - self._attr_native_unit_of_measurement = unit_of_measurement self._ads_type = ads_type self._factor = factor + self._attr_device_class = device_class + self._attr_state_class = state_class + self._attr_native_unit_of_measurement = unit_of_measurement async def async_added_to_hass(self) -> None: """Register device notification.""" diff --git a/homeassistant/components/ads/switch.py b/homeassistant/components/ads/switch.py index 803b95a7d8a..0412a127c95 100644 --- a/homeassistant/components/ads/switch.py +++ b/homeassistant/components/ads/switch.py @@ -17,7 +17,8 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsEntity +from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE +from .entity import AdsEntity DEFAULT_NAME = "ADS Switch" @@ -36,10 +37,10 @@ def setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up switch platform for ADS.""" - ads_hub = hass.data.get(DATA_ADS) + ads_hub = hass.data[DATA_ADS] - name = config[CONF_NAME] - ads_var = config[CONF_ADS_VAR] + name: str = config[CONF_NAME] + ads_var: str = config[CONF_ADS_VAR] add_entities([AdsSwitch(ads_hub, name, ads_var)]) diff --git a/homeassistant/components/ads/valve.py b/homeassistant/components/ads/valve.py new file mode 100644 index 00000000000..b94215ec9ea --- /dev/null +++ b/homeassistant/components/ads/valve.py @@ -0,0 +1,84 @@ +"""Support for ADS valves.""" + +from __future__ import annotations + +import pyads +import voluptuous as vol + +from homeassistant.components.valve import ( + DEVICE_CLASSES_SCHEMA as VALVE_DEVICE_CLASSES_SCHEMA, + PLATFORM_SCHEMA as VALVE_PLATFORM_SCHEMA, + ValveDeviceClass, + ValveEntity, + ValveEntityFeature, +) +from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType + +from .const import CONF_ADS_VAR, DATA_ADS +from .entity import AdsEntity +from .hub import AdsHub + +DEFAULT_NAME = "ADS valve" + +PLATFORM_SCHEMA = VALVE_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ADS_VAR): cv.string, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_DEVICE_CLASS): VALVE_DEVICE_CLASSES_SCHEMA, + } +) + + +def setup_platform( + hass: HomeAssistant, + config: ConfigType, + add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up an ADS valve device.""" + ads_hub = hass.data[DATA_ADS] + + ads_var: str = config[CONF_ADS_VAR] + name: str = config[CONF_NAME] + device_class: ValveDeviceClass | None = config.get(CONF_DEVICE_CLASS) + + entity = AdsValve(ads_hub, ads_var, name, device_class) + + add_entities([entity]) + + +class AdsValve(AdsEntity, ValveEntity): + """Representation of an ADS valve entity.""" + + _attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE + + def __init__( + self, + ads_hub: AdsHub, + ads_var: str, + name: str, + device_class: ValveDeviceClass | None, + ) -> None: + """Initialize AdsValve entity.""" + super().__init__(ads_hub, name, ads_var) + self._attr_device_class = device_class + self._attr_reports_position = False + self._attr_is_closed = True + + async def async_added_to_hass(self) -> None: + """Register device notification.""" + await self.async_initialize_device(self._ads_var, pyads.PLCTYPE_BOOL) + + def open_valve(self, **kwargs) -> None: + """Open the valve.""" + self._ads_hub.write_by_name(self._ads_var, True, pyads.PLCTYPE_BOOL) + self._attr_is_closed = False + + def close_valve(self, **kwargs) -> None: + """Close the valve.""" + self._ads_hub.write_by_name(self._ads_var, False, pyads.PLCTYPE_BOOL) + self._attr_is_closed = True diff --git a/homeassistant/components/advantage_air/__init__.py b/homeassistant/components/advantage_air/__init__.py index 752c1ec26fc..8be1b719993 100644 --- a/homeassistant/components/advantage_air/__init__.py +++ b/homeassistant/components/advantage_air/__init__.py @@ -55,6 +55,7 @@ async def async_setup_entry( coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="Advantage Air", update_method=async_get, update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL), diff --git a/homeassistant/components/advantage_air/climate.py b/homeassistant/components/advantage_air/climate.py index 8da46cc7463..d07a3182ed7 100644 --- a/homeassistant/components/advantage_air/climate.py +++ b/homeassistant/components/advantage_air/climate.py @@ -102,7 +102,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity): _attr_max_temp = 32 _attr_min_temp = 16 _attr_name = None - _enable_turn_on_off_backwards_compatibility = False _support_preset = ClimateEntityFeature(0) def __init__(self, instance: AdvantageAirData, ac_key: str) -> None: @@ -261,7 +260,6 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity): _attr_target_temperature_step = PRECISION_WHOLE _attr_max_temp = 32 _attr_min_temp = 16 - _enable_turn_on_off_backwards_compatibility = False def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None: """Initialize an AdvantageAir Zone control.""" diff --git a/homeassistant/components/advantage_air/icons.json b/homeassistant/components/advantage_air/icons.json index a4168f440cf..8651c9d9eaf 100644 --- a/homeassistant/components/advantage_air/icons.json +++ b/homeassistant/components/advantage_air/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_time_to": "mdi:timer-cog" + "set_time_to": { + "service": "mdi:timer-cog" + } } } diff --git a/homeassistant/components/advantage_air/manifest.json b/homeassistant/components/advantage_air/manifest.json index a07d14896eb..553a641b603 100644 --- a/homeassistant/components/advantage_air/manifest.json +++ b/homeassistant/components/advantage_air/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/advantage_air", "iot_class": "local_polling", "loggers": ["advantage_air"], - "quality_scale": "platinum", "requirements": ["advantage-air==0.4.4"] } diff --git a/homeassistant/components/aemet/__init__.py b/homeassistant/components/aemet/__init__.py index e242d62a580..9ec52faec00 100644 --- a/homeassistant/components/aemet/__init__.py +++ b/homeassistant/components/aemet/__init__.py @@ -1,10 +1,9 @@ """The AEMET OpenData component.""" -from dataclasses import dataclass import logging from aemet_opendata.exceptions import AemetError, TownNotFound -from aemet_opendata.interface import AEMET, ConnectionOptions +from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME @@ -13,20 +12,10 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client from .const import CONF_STATION_UPDATES, PLATFORMS -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator _LOGGER = logging.getLogger(__name__) -type AemetConfigEntry = ConfigEntry[AemetData] - - -@dataclass -class AemetData: - """Aemet runtime data.""" - - name: str - coordinator: WeatherUpdateCoordinator - async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool: """Set up AEMET OpenData as config entry.""" @@ -34,9 +23,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo api_key = entry.data[CONF_API_KEY] latitude = entry.data[CONF_LATITUDE] longitude = entry.data[CONF_LONGITUDE] - station_updates = entry.options.get(CONF_STATION_UPDATES, True) + update_features: int = UpdateFeature.FORECAST + if entry.options.get(CONF_STATION_UPDATES, True): + update_features |= UpdateFeature.STATION - options = ConnectionOptions(api_key, station_updates) + options = ConnectionOptions(api_key, update_features) aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options) try: await aemet.select_coordinates(latitude, longitude) @@ -46,7 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo except AemetError as err: raise ConfigEntryNotReady(err) from err - weather_coordinator = WeatherUpdateCoordinator(hass, aemet) + weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet) await weather_coordinator.async_config_entry_first_refresh() entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator) diff --git a/homeassistant/components/aemet/config_flow.py b/homeassistant/components/aemet/config_flow.py index 6b2eca3f5c9..e2b0b436c8c 100644 --- a/homeassistant/components/aemet/config_flow.py +++ b/homeassistant/components/aemet/config_flow.py @@ -45,7 +45,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(f"{latitude}-{longitude}") self._abort_if_unique_id_configured() - options = ConnectionOptions(user_input[CONF_API_KEY], False) + options = ConnectionOptions(user_input[CONF_API_KEY]) aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options) try: await aemet.select_coordinates(latitude, longitude) diff --git a/homeassistant/components/aemet/coordinator.py b/homeassistant/components/aemet/coordinator.py index 8d179ccdb02..2e8534c7466 100644 --- a/homeassistant/components/aemet/coordinator.py +++ b/homeassistant/components/aemet/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import timeout +from dataclasses import dataclass from datetime import timedelta import logging from typing import Any, Final, cast @@ -19,6 +20,7 @@ from aemet_opendata.helpers import dict_nested_value from aemet_opendata.interface import AEMET from homeassistant.components.weather import Forecast +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -29,6 +31,16 @@ _LOGGER = logging.getLogger(__name__) API_TIMEOUT: Final[int] = 120 WEATHER_UPDATE_INTERVAL = timedelta(minutes=10) +type AemetConfigEntry = ConfigEntry[AemetData] + + +@dataclass +class AemetData: + """Aemet runtime data.""" + + name: str + coordinator: WeatherUpdateCoordinator + class WeatherUpdateCoordinator(DataUpdateCoordinator): """Weather data update coordinator.""" @@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): def __init__( self, hass: HomeAssistant, + entry: AemetConfigEntry, aemet: AEMET, ) -> None: """Initialize coordinator.""" @@ -44,6 +57,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=WEATHER_UPDATE_INTERVAL, ) diff --git a/homeassistant/components/aemet/diagnostics.py b/homeassistant/components/aemet/diagnostics.py index cc39d1adc32..bc366fc6d44 100644 --- a/homeassistant/components/aemet/diagnostics.py +++ b/homeassistant/components/aemet/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from aemet_opendata.const import AOD_COORDS -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -15,7 +15,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from . import AemetConfigEntry +from .coordinator import AemetConfigEntry TO_REDACT_CONFIG = [ CONF_API_KEY, diff --git a/homeassistant/components/aemet/manifest.json b/homeassistant/components/aemet/manifest.json index 3696e16b437..5c9d1ff7e5a 100644 --- a/homeassistant/components/aemet/manifest.json +++ b/homeassistant/components/aemet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aemet", "iot_class": "cloud_polling", "loggers": ["aemet_opendata"], - "requirements": ["AEMET-OpenData==0.5.4"] + "requirements": ["AEMET-OpenData==0.6.3"] } diff --git a/homeassistant/components/aemet/sensor.py b/homeassistant/components/aemet/sensor.py index 83d490f7fe2..88eb34b6f84 100644 --- a/homeassistant/components/aemet/sensor.py +++ b/homeassistant/components/aemet/sensor.py @@ -55,7 +55,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import AemetConfigEntry from .const import ( ATTR_API_CONDITION, ATTR_API_FORECAST_CONDITION, @@ -87,7 +86,7 @@ from .const import ( ATTR_API_WIND_SPEED, CONDITIONS_MAP, ) -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator from .entity import AemetEntity @@ -249,6 +248,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = ( name="Rain", native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, + state_class=SensorStateClass.MEASUREMENT, ), AemetSensorEntityDescription( key=ATTR_API_RAIN_PROB, @@ -263,6 +263,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = ( name="Snow", native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, + state_class=SensorStateClass.MEASUREMENT, ), AemetSensorEntityDescription( key=ATTR_API_SNOW_PROB, diff --git a/homeassistant/components/aemet/weather.py b/homeassistant/components/aemet/weather.py index 341b81d71c4..a156652eadd 100644 --- a/homeassistant/components/aemet/weather.py +++ b/homeassistant/components/aemet/weather.py @@ -27,9 +27,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AemetConfigEntry from .const import CONDITIONS_MAP -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator from .entity import AemetEntity diff --git a/homeassistant/components/aftership/icons.json b/homeassistant/components/aftership/icons.json index 1222ab0873d..105d3cef3ec 100644 --- a/homeassistant/components/aftership/icons.json +++ b/homeassistant/components/aftership/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "add_tracking": "mdi:package-variant-plus", - "remove_tracking": "mdi:package-variant-minus" + "add_tracking": { + "service": "mdi:package-variant-plus" + }, + "remove_tracking": { + "service": "mdi:package-variant-minus" + } } } diff --git a/homeassistant/components/agent_dvr/alarm_control_panel.py b/homeassistant/components/agent_dvr/alarm_control_panel.py index f098184321f..23328315e42 100644 --- a/homeassistant/components/agent_dvr/alarm_control_panel.py +++ b/homeassistant/components/agent_dvr/alarm_control_panel.py @@ -5,12 +5,7 @@ from __future__ import annotations from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -65,37 +60,37 @@ class AgentBaseStation(AlarmControlPanelEntity): self._attr_available = self._client.is_available armed = self._client.is_armed if armed is None: - self._attr_state = None + self._attr_alarm_state = None return if armed: prof = (await self._client.get_active_profile()).lower() - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY if prof == CONF_HOME_MODE_NAME: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME elif prof == CONF_NIGHT_MODE_NAME: - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT else: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" await self._client.disarm() - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command. Uses custom mode.""" await self._client.arm() await self._client.set_active_profile(CONF_AWAY_MODE_NAME) - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command. Uses custom mode.""" await self._client.arm() await self._client.set_active_profile(CONF_HOME_MODE_NAME) - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command. Uses custom mode.""" await self._client.arm() await self._client.set_active_profile(CONF_NIGHT_MODE_NAME) - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT diff --git a/homeassistant/components/agent_dvr/icons.json b/homeassistant/components/agent_dvr/icons.json index 6550d01641e..7dfb4a847f6 100644 --- a/homeassistant/components/agent_dvr/icons.json +++ b/homeassistant/components/agent_dvr/icons.json @@ -1,9 +1,19 @@ { "services": { - "start_recording": "mdi:record-rec", - "stop_recording": "mdi:stop", - "enable_alerts": "mdi:bell-alert", - "disable_alerts": "mdi:bell-off", - "snapshot": "mdi:camera" + "start_recording": { + "service": "mdi:record-rec" + }, + "stop_recording": { + "service": "mdi:stop" + }, + "enable_alerts": { + "service": "mdi:bell-alert" + }, + "disable_alerts": { + "service": "mdi:bell-off" + }, + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/agent_dvr/manifest.json b/homeassistant/components/agent_dvr/manifest.json index 9a6c528c336..4ec14296363 100644 --- a/homeassistant/components/agent_dvr/manifest.json +++ b/homeassistant/components/agent_dvr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/agent_dvr", "iot_class": "local_polling", "loggers": ["agent"], - "requirements": ["agent-py==0.0.23"] + "requirements": ["agent-py==0.0.24"] } diff --git a/homeassistant/components/air_quality/__init__.py b/homeassistant/components/air_quality/__init__.py index 9a80ee39e86..1e2a0525f29 100644 --- a/homeassistant/components/air_quality/__init__.py +++ b/homeassistant/components/air_quality/__init__.py @@ -13,11 +13,13 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType, StateType +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN _LOGGER: Final = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[AirQualityEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -54,7 +56,7 @@ PROP_TO_ATTR: Final[dict[str, str]] = { async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the air quality component.""" - component = hass.data[DOMAIN] = EntityComponent[AirQualityEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[AirQualityEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -63,14 +65,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[AirQualityEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[AirQualityEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class AirQualityEntity(Entity): diff --git a/homeassistant/components/airgradient/__init__.py b/homeassistant/components/airgradient/__init__.py index 7ee8ac6a3c7..3b27d6cda5e 100644 --- a/homeassistant/components/airgradient/__init__.py +++ b/homeassistant/components/airgradient/__init__.py @@ -2,18 +2,14 @@ from __future__ import annotations -from dataclasses import dataclass - -from airgradient import AirGradientClient, get_model_name +from airgradient import AirGradientClient from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN -from .coordinator import AirGradientConfigCoordinator, AirGradientMeasurementCoordinator +from .coordinator import AirGradientCoordinator PLATFORMS: list[Platform] = [ Platform.BUTTON, @@ -25,15 +21,7 @@ PLATFORMS: list[Platform] = [ ] -@dataclass -class AirGradientData: - """AirGradient data class.""" - - measurement: AirGradientMeasurementCoordinator - config: AirGradientConfigCoordinator - - -type AirGradientConfigEntry = ConfigEntry[AirGradientData] +type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) -> bool: @@ -43,27 +31,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) entry.data[CONF_HOST], session=async_get_clientsession(hass) ) - measurement_coordinator = AirGradientMeasurementCoordinator(hass, client) - config_coordinator = AirGradientConfigCoordinator(hass, client) + coordinator = AirGradientCoordinator(hass, client) - await measurement_coordinator.async_config_entry_first_refresh() - await config_coordinator.async_config_entry_first_refresh() + await coordinator.async_config_entry_first_refresh() - device_registry = dr.async_get(hass) - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, measurement_coordinator.serial_number)}, - manufacturer="AirGradient", - model=get_model_name(measurement_coordinator.data.model), - model_id=measurement_coordinator.data.model, - serial_number=measurement_coordinator.data.serial_number, - sw_version=measurement_coordinator.data.firmware_version, - ) - - entry.runtime_data = AirGradientData( - measurement=measurement_coordinator, - config=config_coordinator, - ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/airgradient/button.py b/homeassistant/components/airgradient/button.py index b59188ebdd4..32a9b5adedf 100644 --- a/homeassistant/components/airgradient/button.py +++ b/homeassistant/components/airgradient/button.py @@ -15,8 +15,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, AirGradientConfigEntry -from .coordinator import AirGradientConfigCoordinator +from . import AirGradientConfigEntry +from .const import DOMAIN +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -47,8 +48,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up AirGradient button entities based on a config entry.""" - model = entry.runtime_data.measurement.data.model - coordinator = entry.runtime_data.config + coordinator = entry.runtime_data + model = coordinator.data.measures.model added_entities = False @@ -57,7 +58,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): entities = [AirGradientButton(coordinator, CO2_CALIBRATION)] @@ -67,7 +68,8 @@ async def async_setup_entry( async_add_entities(entities) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -87,11 +89,10 @@ class AirGradientButton(AirGradientEntity, ButtonEntity): """Defines an AirGradient button.""" entity_description: AirGradientButtonEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientButtonEntityDescription, ) -> None: """Initialize airgradient button.""" diff --git a/homeassistant/components/airgradient/coordinator.py b/homeassistant/components/airgradient/coordinator.py index c3def0b1f33..03d58645853 100644 --- a/homeassistant/components/airgradient/coordinator.py +++ b/homeassistant/components/airgradient/coordinator.py @@ -2,24 +2,35 @@ from __future__ import annotations +from dataclasses import dataclass from datetime import timedelta from typing import TYPE_CHECKING from airgradient import AirGradientClient, AirGradientError, Config, Measures from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import LOGGER +from .const import DOMAIN, LOGGER if TYPE_CHECKING: from . import AirGradientConfigEntry -class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]): +@dataclass +class AirGradientData: + """Class for AirGradient data.""" + + measures: Measures + config: Config + + +class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]): """Class to manage fetching AirGradient data.""" config_entry: AirGradientConfigEntry + _current_version: str def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None: """Initialize coordinator.""" @@ -33,25 +44,27 @@ class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]): assert self.config_entry.unique_id self.serial_number = self.config_entry.unique_id - async def _async_update_data(self) -> _DataT: + async def _async_setup(self) -> None: + """Set up the coordinator.""" + self._current_version = ( + await self.client.get_current_measures() + ).firmware_version + + async def _async_update_data(self) -> AirGradientData: try: - return await self._update_data() + measures = await self.client.get_current_measures() + config = await self.client.get_config() except AirGradientError as error: raise UpdateFailed(error) from error - - async def _update_data(self) -> _DataT: - raise NotImplementedError - - -class AirGradientMeasurementCoordinator(AirGradientCoordinator[Measures]): - """Class to manage fetching AirGradient data.""" - - async def _update_data(self) -> Measures: - return await self.client.get_current_measures() - - -class AirGradientConfigCoordinator(AirGradientCoordinator[Config]): - """Class to manage fetching AirGradient data.""" - - async def _update_data(self) -> Config: - return await self.client.get_config() + if measures.firmware_version != self._current_version: + device_registry = dr.async_get(self.hass) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, self.serial_number)} + ) + assert device_entry + device_registry.async_update_device( + device_entry.id, + sw_version=measures.firmware_version, + ) + self._current_version = measures.firmware_version + return AirGradientData(measures, config) diff --git a/homeassistant/components/airgradient/diagnostics.py b/homeassistant/components/airgradient/diagnostics.py new file mode 100644 index 00000000000..dfc3262193a --- /dev/null +++ b/homeassistant/components/airgradient/diagnostics.py @@ -0,0 +1,18 @@ +"""Diagnostics support for Airgradient.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import AirGradientConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: AirGradientConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + return asdict(entry.runtime_data.data) diff --git a/homeassistant/components/airgradient/entity.py b/homeassistant/components/airgradient/entity.py index 4de07904bba..588a799610b 100644 --- a/homeassistant/components/airgradient/entity.py +++ b/homeassistant/components/airgradient/entity.py @@ -1,5 +1,7 @@ """Base class for AirGradient entities.""" +from airgradient import get_model_name + from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -15,6 +17,12 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]): def __init__(self, coordinator: AirGradientCoordinator) -> None: """Initialize airgradient entity.""" super().__init__(coordinator) + measures = coordinator.data.measures self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, coordinator.serial_number)}, + manufacturer="AirGradient", + model=get_model_name(measures.model), + model_id=measures.model, + serial_number=coordinator.serial_number, + sw_version=measures.firmware_version, ) diff --git a/homeassistant/components/airgradient/manifest.json b/homeassistant/components/airgradient/manifest.json index fed4fafdc74..13764142697 100644 --- a/homeassistant/components/airgradient/manifest.json +++ b/homeassistant/components/airgradient/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/airgradient", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["airgradient==0.8.0"], + "requirements": ["airgradient==0.9.1"], "zeroconf": ["_airgradient._tcp.local."] } diff --git a/homeassistant/components/airgradient/number.py b/homeassistant/components/airgradient/number.py index 139357f3753..7fd282ddd8b 100644 --- a/homeassistant/components/airgradient/number.py +++ b/homeassistant/components/airgradient/number.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN -from .coordinator import AirGradientConfigCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -62,8 +62,8 @@ async def async_setup_entry( ) -> None: """Set up AirGradient number entities based on a config entry.""" - model = entry.runtime_data.measurement.data.model - coordinator = entry.runtime_data.config + coordinator = entry.runtime_data + model = coordinator.data.measures.model added_entities = False @@ -72,7 +72,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): entities = [] @@ -84,7 +84,8 @@ async def async_setup_entry( async_add_entities(entities) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -104,11 +105,10 @@ class AirGradientNumber(AirGradientEntity, NumberEntity): """Defines an AirGradient number entity.""" entity_description: AirGradientNumberEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientNumberEntityDescription, ) -> None: """Initialize AirGradient number.""" @@ -119,7 +119,7 @@ class AirGradientNumber(AirGradientEntity, NumberEntity): @property def native_value(self) -> int | None: """Return the state of the number.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) async def async_set_native_value(self, value: float) -> None: """Set the selected value.""" diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml new file mode 100644 index 00000000000..8d62e8515fc --- /dev/null +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/airgradient/select.py b/homeassistant/components/airgradient/select.py index 532f7167dff..af56802d842 100644 --- a/homeassistant/components/airgradient/select.py +++ b/homeassistant/components/airgradient/select.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN, PM_STANDARD, PM_STANDARD_REVERSE -from .coordinator import AirGradientConfigCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -144,13 +144,11 @@ async def async_setup_entry( ) -> None: """Set up AirGradient select entities based on a config entry.""" - coordinator = entry.runtime_data.config - measurement_coordinator = entry.runtime_data.measurement + coordinator = entry.runtime_data + model = coordinator.data.measures.model async_add_entities([AirGradientSelect(coordinator, CONFIG_CONTROL_ENTITY)]) - model = measurement_coordinator.data.model - added_entities = False @callback @@ -158,7 +156,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): entities: list[AirGradientSelect] = [ @@ -179,7 +177,8 @@ async def async_setup_entry( async_add_entities(entities) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -201,11 +200,10 @@ class AirGradientSelect(AirGradientEntity, SelectEntity): """Defines an AirGradient select entity.""" entity_description: AirGradientSelectEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientSelectEntityDescription, ) -> None: """Initialize AirGradient select.""" @@ -216,7 +214,7 @@ class AirGradientSelect(AirGradientEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the state of the select.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) async def async_select_option(self, option: str) -> None: """Change the selected option.""" diff --git a/homeassistant/components/airgradient/sensor.py b/homeassistant/components/airgradient/sensor.py index f431c49ed2a..497d4cc0488 100644 --- a/homeassistant/components/airgradient/sensor.py +++ b/homeassistant/components/airgradient/sensor.py @@ -32,7 +32,7 @@ from homeassistant.helpers.typing import StateType from . import AirGradientConfigEntry from .const import PM_STANDARD, PM_STANDARD_REVERSE -from .coordinator import AirGradientConfigCoordinator, AirGradientMeasurementCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -218,7 +218,7 @@ async def async_setup_entry( ) -> None: """Set up AirGradient sensor entities based on a config entry.""" - coordinator = entry.runtime_data.measurement + coordinator = entry.runtime_data listener: Callable[[], None] | None = None not_setup: set[AirGradientMeasurementSensorEntityDescription] = set( MEASUREMENT_SENSOR_TYPES @@ -232,7 +232,7 @@ async def async_setup_entry( not_setup = set() sensors = [] for description in sensor_descriptions: - if description.value_fn(coordinator.data) is None: + if description.value_fn(coordinator.data.measures) is None: not_setup.add(description) else: sensors.append(AirGradientMeasurementSensor(coordinator, description)) @@ -248,64 +248,65 @@ async def async_setup_entry( add_entities() entities = [ - AirGradientConfigSensor(entry.runtime_data.config, description) + AirGradientConfigSensor(coordinator, description) for description in CONFIG_SENSOR_TYPES ] - if "L" in coordinator.data.model: + if "L" in coordinator.data.measures.model: entities.extend( - AirGradientConfigSensor(entry.runtime_data.config, description) + AirGradientConfigSensor(coordinator, description) for description in CONFIG_LED_BAR_SENSOR_TYPES ) - if "I" in coordinator.data.model: + if "I" in coordinator.data.measures.model: entities.extend( - AirGradientConfigSensor(entry.runtime_data.config, description) + AirGradientConfigSensor(coordinator, description) for description in CONFIG_DISPLAY_SENSOR_TYPES ) async_add_entities(entities) -class AirGradientMeasurementSensor(AirGradientEntity, SensorEntity): +class AirGradientSensor(AirGradientEntity, SensorEntity): """Defines an AirGradient sensor.""" - entity_description: AirGradientMeasurementSensorEntityDescription - coordinator: AirGradientMeasurementCoordinator - def __init__( self, - coordinator: AirGradientMeasurementCoordinator, - description: AirGradientMeasurementSensorEntityDescription, + coordinator: AirGradientCoordinator, + description: SensorEntityDescription, ) -> None: """Initialize airgradient sensor.""" super().__init__(coordinator) self.entity_description = description self._attr_unique_id = f"{coordinator.serial_number}-{description.key}" + +class AirGradientMeasurementSensor(AirGradientSensor): + """Defines an AirGradient sensor.""" + + entity_description: AirGradientMeasurementSensorEntityDescription + @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.measures) -class AirGradientConfigSensor(AirGradientEntity, SensorEntity): +class AirGradientConfigSensor(AirGradientSensor): """Defines an AirGradient sensor.""" entity_description: AirGradientConfigSensorEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientConfigSensorEntityDescription, ) -> None: """Initialize airgradient sensor.""" - super().__init__(coordinator) - self.entity_description = description - self._attr_unique_id = f"{coordinator.serial_number}-{description.key}" + super().__init__(coordinator, description) self._attr_entity_registry_enabled_default = ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL ) @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) diff --git a/homeassistant/components/airgradient/switch.py b/homeassistant/components/airgradient/switch.py index 60c3f83ae5e..329f704e755 100644 --- a/homeassistant/components/airgradient/switch.py +++ b/homeassistant/components/airgradient/switch.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN -from .coordinator import AirGradientConfigCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -46,7 +46,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up AirGradient switch entities based on a config entry.""" - coordinator = entry.runtime_data.config + coordinator = entry.runtime_data added_entities = False @@ -55,7 +55,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): async_add_entities( @@ -63,7 +63,8 @@ async def async_setup_entry( ) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -82,11 +83,10 @@ class AirGradientSwitch(AirGradientEntity, SwitchEntity): """Defines an AirGradient switch entity.""" entity_description: AirGradientSwitchEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientSwitchEntityDescription, ) -> None: """Initialize AirGradient switch.""" @@ -97,7 +97,7 @@ class AirGradientSwitch(AirGradientEntity, SwitchEntity): @property def is_on(self) -> bool: """Return the state of the switch.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" diff --git a/homeassistant/components/airgradient/update.py b/homeassistant/components/airgradient/update.py index 95e64930ea6..47e71cb4e65 100644 --- a/homeassistant/components/airgradient/update.py +++ b/homeassistant/components/airgradient/update.py @@ -1,13 +1,14 @@ """Airgradient Update platform.""" from datetime import timedelta -from functools import cached_property + +from propcache import cached_property from homeassistant.components.update import UpdateDeviceClass, UpdateEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AirGradientConfigEntry, AirGradientMeasurementCoordinator +from . import AirGradientConfigEntry, AirGradientCoordinator from .entity import AirGradientEntity SCAN_INTERVAL = timedelta(hours=1) @@ -20,18 +21,17 @@ async def async_setup_entry( ) -> None: """Set up Airgradient update platform.""" - data = config_entry.runtime_data + coordinator = config_entry.runtime_data - async_add_entities([AirGradientUpdate(data.measurement)], True) + async_add_entities([AirGradientUpdate(coordinator)], True) class AirGradientUpdate(AirGradientEntity, UpdateEntity): """Representation of Airgradient Update.""" _attr_device_class = UpdateDeviceClass.FIRMWARE - coordinator: AirGradientMeasurementCoordinator - def __init__(self, coordinator: AirGradientMeasurementCoordinator) -> None: + def __init__(self, coordinator: AirGradientCoordinator) -> None: """Initialize the entity.""" super().__init__(coordinator) self._attr_unique_id = f"{coordinator.serial_number}-update" @@ -44,7 +44,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): @property def installed_version(self) -> str: """Return the installed version of the entity.""" - return self.coordinator.data.firmware_version + return self.coordinator.data.measures.firmware_version async def async_update(self) -> None: """Update the entity.""" diff --git a/homeassistant/components/airly/manifest.json b/homeassistant/components/airly/manifest.json index 233625ab04a..ccd37589e8c 100644 --- a/homeassistant/components/airly/manifest.json +++ b/homeassistant/components/airly/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["airly"], - "quality_scale": "platinum", "requirements": ["airly==1.1.0"] } diff --git a/homeassistant/components/airnow/__init__.py b/homeassistant/components/airnow/__init__.py index cff6b8c2795..2047a9d41bc 100644 --- a/homeassistant/components/airnow/__init__.py +++ b/homeassistant/components/airnow/__init__.py @@ -15,7 +15,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN # noqa: F401 from .coordinator import AirNowDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/airnow/config_flow.py b/homeassistant/components/airnow/config_flow.py index e839acdcb7b..d0ab16e9758 100644 --- a/homeassistant/components/airnow/config_flow.py +++ b/homeassistant/components/airnow/config_flow.py @@ -1,5 +1,7 @@ """Config flow for AirNow integration.""" +from __future__ import annotations + import logging from typing import Any @@ -12,7 +14,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant, callback @@ -120,12 +121,12 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> AirNowOptionsFlowHandler: """Return the options flow.""" - return AirNowOptionsFlowHandler(config_entry) + return AirNowOptionsFlowHandler() -class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AirNowOptionsFlowHandler(OptionsFlow): """Handle an options flow for AirNow.""" async def async_step_init( @@ -136,12 +137,7 @@ class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry): return self.async_create_entry(data=user_input) options_schema = vol.Schema( - { - vol.Optional(CONF_RADIUS): vol.All( - int, - vol.Range(min=5), - ), - } + {vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))} ) return self.async_show_form( diff --git a/homeassistant/components/airnow/const.py b/homeassistant/components/airnow/const.py index 054a5cbfea7..1198f68128d 100644 --- a/homeassistant/components/airnow/const.py +++ b/homeassistant/components/airnow/const.py @@ -14,10 +14,32 @@ ATTR_API_POLLUTANT = "Pollutant" ATTR_API_REPORT_DATE = "DateObserved" ATTR_API_REPORT_HOUR = "HourObserved" ATTR_API_REPORT_TZ = "LocalTimeZone" -ATTR_API_REPORT_TZINFO = "LocalTimeZoneInfo" ATTR_API_STATE = "StateCode" ATTR_API_STATION = "ReportingArea" ATTR_API_STATION_LATITUDE = "Latitude" ATTR_API_STATION_LONGITUDE = "Longitude" DEFAULT_NAME = "AirNow" DOMAIN = "airnow" + +SECONDS_PER_HOUR = 3600 + +# AirNow seems to only use standard time zones, +# but we include daylight savings for completeness/futureproofing. +US_TZ_OFFSETS = { + "HST": -10 * SECONDS_PER_HOUR, + "HDT": -9 * SECONDS_PER_HOUR, + # AirNow returns AKT instead of AKST or AKDT, use standard + "AKT": -9 * SECONDS_PER_HOUR, + "AKST": -9 * SECONDS_PER_HOUR, + "AKDT": -8 * SECONDS_PER_HOUR, + "PST": -8 * SECONDS_PER_HOUR, + "PDT": -7 * SECONDS_PER_HOUR, + "MST": -7 * SECONDS_PER_HOUR, + "MDT": -6 * SECONDS_PER_HOUR, + "CST": -6 * SECONDS_PER_HOUR, + "CDT": -5 * SECONDS_PER_HOUR, + "EST": -5 * SECONDS_PER_HOUR, + "EDT": -4 * SECONDS_PER_HOUR, + "AST": -4 * SECONDS_PER_HOUR, + "ADT": -3 * SECONDS_PER_HOUR, +} diff --git a/homeassistant/components/airnow/coordinator.py b/homeassistant/components/airnow/coordinator.py index 35f8a0e0abf..32185080d25 100644 --- a/homeassistant/components/airnow/coordinator.py +++ b/homeassistant/components/airnow/coordinator.py @@ -12,7 +12,6 @@ from pyairnow.errors import AirNowError from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util import dt as dt_util from .const import ( ATTR_API_AQI, @@ -27,7 +26,6 @@ from .const import ( ATTR_API_REPORT_DATE, ATTR_API_REPORT_HOUR, ATTR_API_REPORT_TZ, - ATTR_API_REPORT_TZINFO, ATTR_API_STATE, ATTR_API_STATION, ATTR_API_STATION_LATITUDE, @@ -98,9 +96,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # Copy Report Details data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE] data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR] - data[ATTR_API_REPORT_TZINFO] = await dt_util.async_get_time_zone( - obv[ATTR_API_REPORT_TZ] - ) + data[ATTR_API_REPORT_TZ] = obv[ATTR_API_REPORT_TZ] # Copy Station Details data[ATTR_API_STATE] = obv[ATTR_API_STATE] diff --git a/homeassistant/components/airnow/sensor.py b/homeassistant/components/airnow/sensor.py index 722c0d6f4a9..1abf93514a5 100644 --- a/homeassistant/components/airnow/sensor.py +++ b/homeassistant/components/airnow/sensor.py @@ -4,9 +4,10 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime from typing import Any +from dateutil import parser + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -34,12 +35,13 @@ from .const import ( ATTR_API_PM25, ATTR_API_REPORT_DATE, ATTR_API_REPORT_HOUR, - ATTR_API_REPORT_TZINFO, + ATTR_API_REPORT_TZ, ATTR_API_STATION, ATTR_API_STATION_LATITUDE, ATTR_API_STATION_LONGITUDE, DEFAULT_NAME, DOMAIN, + US_TZ_OFFSETS, ) ATTRIBUTION = "Data provided by AirNow" @@ -69,6 +71,18 @@ def station_extra_attrs(data: dict[str, Any]) -> dict[str, Any]: return {} +def aqi_extra_attrs(data: dict[str, Any]) -> dict[str, Any]: + """Process extra attributes for main AQI sensor.""" + return { + ATTR_DESCR: data[ATTR_API_AQI_DESCRIPTION], + ATTR_LEVEL: data[ATTR_API_AQI_LEVEL], + ATTR_TIME: parser.parse( + f"{data[ATTR_API_REPORT_DATE]} {data[ATTR_API_REPORT_HOUR]}:00 {data[ATTR_API_REPORT_TZ]}", + tzinfos=US_TZ_OFFSETS, + ).isoformat(), + } + + SENSOR_TYPES: tuple[AirNowEntityDescription, ...] = ( AirNowEntityDescription( key=ATTR_API_AQI, @@ -76,16 +90,7 @@ SENSOR_TYPES: tuple[AirNowEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.AQI, value_fn=lambda data: data.get(ATTR_API_AQI), - extra_state_attributes_fn=lambda data: { - ATTR_DESCR: data[ATTR_API_AQI_DESCRIPTION], - ATTR_LEVEL: data[ATTR_API_AQI_LEVEL], - ATTR_TIME: datetime.strptime( - f"{data[ATTR_API_REPORT_DATE]} {data[ATTR_API_REPORT_HOUR]}", - "%Y-%m-%d %H", - ) - .replace(tzinfo=data[ATTR_API_REPORT_TZINFO]) - .isoformat(), - }, + extra_state_attributes_fn=aqi_extra_attrs, ), AirNowEntityDescription( key=ATTR_API_PM10, diff --git a/homeassistant/components/airq/manifest.json b/homeassistant/components/airq/manifest.json index 2b23928aba8..1ae7da14875 100644 --- a/homeassistant/components/airq/manifest.json +++ b/homeassistant/components/airq/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aioairq"], - "requirements": ["aioairq==0.3.2"] + "requirements": ["aioairq==0.4.3"] } diff --git a/homeassistant/components/airthings/__init__.py b/homeassistant/components/airthings/__init__.py index 22138c7d4fc..14e2f28370f 100644 --- a/homeassistant/components/airthings/__init__.py +++ b/homeassistant/components/airthings/__init__.py @@ -42,6 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=_update_method, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/airthings_ble/__init__.py b/homeassistant/components/airthings_ble/__init__.py index 79384eed4ef..1c3c6084739 100644 --- a/homeassistant/components/airthings_ble/__init__.py +++ b/homeassistant/components/airthings_ble/__init__.py @@ -2,75 +2,27 @@ from __future__ import annotations -from datetime import timedelta -import logging - -from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice -from bleak_retry_connector import close_stale_connections_by_address - -from homeassistant.components import bluetooth -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util.unit_system import METRIC_SYSTEM -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, MAX_RETRIES_AFTER_STARTUP +from .const import MAX_RETRIES_AFTER_STARTUP +from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] -_LOGGER = logging.getLogger(__name__) - -AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator[AirthingsDevice] -AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator] - async def async_setup_entry( hass: HomeAssistant, entry: AirthingsBLEConfigEntry ) -> bool: """Set up Airthings BLE device from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - address = entry.unique_id - - is_metric = hass.config.units is METRIC_SYSTEM - assert address is not None - - await close_stale_connections_by_address(address) - - ble_device = bluetooth.async_ble_device_from_address(hass, address) - - if not ble_device: - raise ConfigEntryNotReady( - f"Could not find Airthings device with address {address}" - ) - - airthings = AirthingsBluetoothDeviceData(_LOGGER, is_metric) - - async def _async_update_method() -> AirthingsDevice: - """Get data from Airthings BLE.""" - try: - data = await airthings.update_device(ble_device) - except Exception as err: - raise UpdateFailed(f"Unable to fetch data: {err}") from err - - return data - - coordinator: AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator( - hass, - _LOGGER, - name=DOMAIN, - update_method=_async_update_method, - update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), - ) - + coordinator = AirthingsBLEDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() # Once its setup and we know we are not going to delay # the startup of Home Assistant, we can set the max attempts # to a higher value. If the first connection attempt fails, # Home Assistant's built-in retry logic will take over. - airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP) + coordinator.airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP) entry.runtime_data = coordinator diff --git a/homeassistant/components/airthings_ble/coordinator.py b/homeassistant/components/airthings_ble/coordinator.py new file mode 100644 index 00000000000..81009dcea81 --- /dev/null +++ b/homeassistant/components/airthings_ble/coordinator.py @@ -0,0 +1,68 @@ +"""The Airthings BLE integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice +from bleak.backends.device import BLEDevice +from bleak_retry_connector import close_stale_connections_by_address + +from homeassistant.components import bluetooth +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.unit_system import METRIC_SYSTEM + +from .const import DEFAULT_SCAN_INTERVAL, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +type AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator] + + +class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]): + """Class to manage fetching Airthings BLE data.""" + + ble_device: BLEDevice + config_entry: AirthingsBLEConfigEntry + + def __init__(self, hass: HomeAssistant, entry: AirthingsBLEConfigEntry) -> None: + """Initialize the coordinator.""" + self.airthings = AirthingsBluetoothDeviceData( + _LOGGER, hass.config.units is METRIC_SYSTEM + ) + super().__init__( + hass, + _LOGGER, + config_entry=entry, + name=DOMAIN, + update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), + ) + + async def _async_setup(self) -> None: + """Set up the coordinator.""" + address = self.config_entry.unique_id + + assert address is not None + + await close_stale_connections_by_address(address) + + ble_device = bluetooth.async_ble_device_from_address(self.hass, address) + + if not ble_device: + raise ConfigEntryNotReady( + f"Could not find Airthings device with address {address}" + ) + self.ble_device = ble_device + + async def _async_update_data(self) -> AirthingsDevice: + """Get data from Airthings BLE.""" + try: + data = await self.airthings.update_device(self.ble_device) + except Exception as err: + raise UpdateFailed(f"Unable to fetch data: {err}") from err + + return data diff --git a/homeassistant/components/airthings_ble/manifest.json b/homeassistant/components/airthings_ble/manifest.json index b86bc314819..fe2cc0eeb36 100644 --- a/homeassistant/components/airthings_ble/manifest.json +++ b/homeassistant/components/airthings_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/airthings_ble", "iot_class": "local_polling", - "requirements": ["airthings-ble==0.9.0"] + "requirements": ["airthings-ble==0.9.2"] } diff --git a/homeassistant/components/airthings_ble/sensor.py b/homeassistant/components/airthings_ble/sensor.py index b1ae7d533d8..0dfd82a38c4 100644 --- a/homeassistant/components/airthings_ble/sensor.py +++ b/homeassistant/components/airthings_ble/sensor.py @@ -34,8 +34,8 @@ from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.unit_system import METRIC_SYSTEM -from . import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator from .const import DOMAIN, VOLUME_BECQUEREL, VOLUME_PICOCURIE +from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/airtouch4/climate.py b/homeassistant/components/airtouch4/climate.py index dbb6f02859b..0af920bd7a9 100644 --- a/homeassistant/components/airtouch4/climate.py +++ b/homeassistant/components/airtouch4/climate.py @@ -95,7 +95,6 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, ac_number, info): """Initialize the climate device.""" @@ -205,7 +204,6 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = AT_GROUP_MODES - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, group_number, info): """Initialize the climate device.""" diff --git a/homeassistant/components/airtouch5/__init__.py b/homeassistant/components/airtouch5/__init__.py index 8aab41d72cb..f0c7ba8123c 100644 --- a/homeassistant/components/airtouch5/__init__.py +++ b/homeassistant/components/airtouch5/__init__.py @@ -9,8 +9,6 @@ from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.COVER] type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient] @@ -19,8 +17,6 @@ type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient] async def async_setup_entry(hass: HomeAssistant, entry: Airtouch5ConfigEntry) -> bool: """Set up Airtouch 5 from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - # Create API instance host = entry.data[CONF_HOST] client = Airtouch5SimpleClient(host) diff --git a/homeassistant/components/airtouch5/climate.py b/homeassistant/components/airtouch5/climate.py index 2d5740b1837..16566f5d664 100644 --- a/homeassistant/components/airtouch5/climate.py +++ b/homeassistant/components/airtouch5/climate.py @@ -124,7 +124,6 @@ class Airtouch5ClimateEntity(ClimateEntity, Airtouch5Entity): _attr_translation_key = DOMAIN _attr_target_temperature_step = 1 _attr_name = None - _enable_turn_on_off_backwards_compatibility = False class Airtouch5AC(Airtouch5ClimateEntity): @@ -262,7 +261,7 @@ class Airtouch5AC(Airtouch5ClimateEntity): _LOGGER.debug("Argument `temperature` is missing in set_temperature") return - await self._control(temp=temp) + await self._control(setpoint=SetpointControl.CHANGE_SETPOINT, temp=temp) class Airtouch5Zone(Airtouch5ClimateEntity): diff --git a/homeassistant/components/airtouch5/manifest.json b/homeassistant/components/airtouch5/manifest.json index 312a627d0e8..58ef8668ebe 100644 --- a/homeassistant/components/airtouch5/manifest.json +++ b/homeassistant/components/airtouch5/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airtouch5", "iot_class": "local_push", "loggers": ["airtouch5py"], - "requirements": ["airtouch5py==0.2.10"] + "requirements": ["airtouch5py==0.2.11"] } diff --git a/homeassistant/components/airvisual/__init__.py b/homeassistant/components/airvisual/__init__.py index 60fdbf12ca1..d2e5e7169b9 100644 --- a/homeassistant/components/airvisual/__init__.py +++ b/homeassistant/components/airvisual/__init__.py @@ -34,13 +34,8 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, ) -from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_CITY, @@ -53,6 +48,8 @@ from .const import ( LOGGER, ) +type AirVisualConfigEntry = ConfigEntry[DataUpdateCoordinator] + # We use a raw string for the airvisual_pro domain (instead of importing the actual # constant) so that we can avoid listing it as a dependency: DOMAIN_AIRVISUAL_PRO = "airvisual_pro" @@ -91,10 +88,9 @@ def async_get_cloud_coordinators_by_api_key( ) -> list[DataUpdateCoordinator]: """Get all DataUpdateCoordinator objects related to a particular API key.""" return [ - coordinator - for entry_id, coordinator in hass.data[DOMAIN].items() - if (entry := hass.config_entries.async_get_entry(entry_id)) - and entry.data.get(CONF_API_KEY) == api_key + entry.runtime_data + for entry in hass.config_entries.async_entries(DOMAIN) + if entry.data.get(CONF_API_KEY) == api_key and hasattr(entry, "runtime_data") ] @@ -172,7 +168,7 @@ def _standardize_geography_config_entry( hass.config_entries.async_update_entry(entry, **entry_updates) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> bool: """Set up AirVisual as config entry.""" if CONF_API_KEY not in entry.data: # If this is a migrated AirVisual Pro entry, there's no actual setup to do; @@ -208,6 +204,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=async_get_geography_id(entry.data), # We give a placeholder update interval in order to create the coordinator; # then, below, we use the coordinator's presence (along with any other @@ -220,8 +217,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(async_reload_entry)) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + entry.runtime_data = coordinator # Reassess the interval between 2 server requests async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY]) @@ -231,7 +227,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> bool: """Migrate an old config entry.""" version = entry.version @@ -388,56 +384,18 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> bool: """Unload an AirVisual config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - if CONF_API_KEY in entry.data: - # Re-calculate the update interval period for any remaining consumers of - # this API key: - async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY]) + if unload_ok and CONF_API_KEY in entry.data: + # Re-calculate the update interval period for any remaining consumers of + # this API key: + async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY]) return unload_ok -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -class AirVisualEntity(CoordinatorEntity): - """Define a generic AirVisual entity.""" - - def __init__( - self, - coordinator: DataUpdateCoordinator, - entry: ConfigEntry, - description: EntityDescription, - ) -> None: - """Initialize.""" - super().__init__(coordinator) - - self._attr_extra_state_attributes = {} - self._entry = entry - self.entity_description = description - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - await super().async_added_to_hass() - - @callback - def update() -> None: - """Update the state.""" - self.update_from_latest_data() - self.async_write_ha_state() - - self.async_on_remove(self.coordinator.async_add_listener(update)) - - self.update_from_latest_data() - - @callback - def update_from_latest_data(self) -> None: - """Update the entity from the latest data.""" - raise NotImplementedError diff --git a/homeassistant/components/airvisual/config_flow.py b/homeassistant/components/airvisual/config_flow.py index 2d7a0d8886e..7643d541070 100644 --- a/homeassistant/components/airvisual/config_flow.py +++ b/homeassistant/components/airvisual/config_flow.py @@ -16,7 +16,12 @@ from pyairvisual.cloud_api import ( from pyairvisual.errors import AirVisualError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY, @@ -140,12 +145,11 @@ class AirVisualFlowHandler(ConfigFlow, domain=DOMAIN): valid_keys.add(user_input[CONF_API_KEY]) - if existing_entry := await self.async_set_unique_id(self._geo_id): - self.hass.config_entries.async_update_entry(existing_entry, data=user_input) - self.hass.async_create_task( - self.hass.config_entries.async_reload(existing_entry.entry_id) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_API_KEY: user_input[CONF_API_KEY]}, ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=f"Cloud API ({self._geo_id})", diff --git a/homeassistant/components/airvisual/diagnostics.py b/homeassistant/components/airvisual/diagnostics.py index 348bb249b0f..2e7c60364f9 100644 --- a/homeassistant/components/airvisual/diagnostics.py +++ b/homeassistant/components/airvisual/diagnostics.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY, @@ -15,9 +14,9 @@ from homeassistant.const import ( CONF_UNIQUE_ID, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import CONF_CITY, DOMAIN +from . import AirVisualConfigEntry +from .const import CONF_CITY CONF_COORDINATES = "coordinates" CONF_TITLE = "title" @@ -37,10 +36,10 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: AirVisualConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: DataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data return { "entry": async_redact_data(entry.as_dict(), TO_REDACT), diff --git a/homeassistant/components/airvisual/entity.py b/homeassistant/components/airvisual/entity.py new file mode 100644 index 00000000000..db480e560c7 --- /dev/null +++ b/homeassistant/components/airvisual/entity.py @@ -0,0 +1,47 @@ +"""The AirVisual component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import callback +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + + +class AirVisualEntity(CoordinatorEntity): + """Define a generic AirVisual entity.""" + + def __init__( + self, + coordinator: DataUpdateCoordinator, + entry: ConfigEntry, + description: EntityDescription, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._attr_extra_state_attributes = {} + self._entry = entry + self.entity_description = description + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + + @callback + def update() -> None: + """Update the state.""" + self.update_from_latest_data() + self.async_write_ha_state() + + self.async_on_remove(self.coordinator.async_add_listener(update)) + + self.update_from_latest_data() + + @callback + def update_from_latest_data(self) -> None: + """Update the entity from the latest data.""" + raise NotImplementedError diff --git a/homeassistant/components/airvisual/sensor.py b/homeassistant/components/airvisual/sensor.py index df0e3da1f45..88a670edb82 100644 --- a/homeassistant/components/airvisual/sensor.py +++ b/homeassistant/components/airvisual/sensor.py @@ -26,8 +26,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import AirVisualEntity -from .const import CONF_CITY, DOMAIN +from . import AirVisualConfigEntry +from .const import CONF_CITY +from .entity import AirVisualEntity ATTR_CITY = "city" ATTR_COUNTRY = "country" @@ -105,10 +106,12 @@ POLLUTANT_UNITS = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AirVisualConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up AirVisual sensors based on a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( AirVisualGeographySensor(coordinator, entry, description, locale) for locale in GEOGRAPHY_SENSOR_LOCALES diff --git a/homeassistant/components/airvisual/strings.json b/homeassistant/components/airvisual/strings.json index 397a41bf24b..148b1368a19 100644 --- a/homeassistant/components/airvisual/strings.json +++ b/homeassistant/components/airvisual/strings.json @@ -32,7 +32,7 @@ } }, "error": { - "general_error": "[%key:common::config_flow::error::unknown%]", + "unknown": "[%key:common::config_flow::error::unknown%]", "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]", "location_not_found": "Location not found", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" diff --git a/homeassistant/components/airvisual_pro/__init__.py b/homeassistant/components/airvisual_pro/__init__.py index 7397f279021..3b3ac6df232 100644 --- a/homeassistant/components/airvisual_pro/__init__.py +++ b/homeassistant/components/airvisual_pro/__init__.py @@ -24,15 +24,9 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DOMAIN, LOGGER +from .const import LOGGER PLATFORMS = [Platform.SENSOR] @@ -87,6 +81,7 @@ async def async_setup_entry( coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name="Node/Pro data", update_interval=UPDATE_INTERVAL, update_method=async_get_data, @@ -120,28 +115,3 @@ async def async_unload_entry( await entry.runtime_data.node.async_disconnect() return unload_ok - - -class AirVisualProEntity(CoordinatorEntity): - """Define a generic AirVisual Pro entity.""" - - def __init__( - self, coordinator: DataUpdateCoordinator, description: EntityDescription - ) -> None: - """Initialize.""" - super().__init__(coordinator) - - self._attr_unique_id = f"{coordinator.data['serial_number']}_{description.key}" - self.entity_description = description - - @property - def device_info(self) -> DeviceInfo: - """Return device registry information for this entity.""" - return DeviceInfo( - identifiers={(DOMAIN, self.coordinator.data["serial_number"])}, - manufacturer="AirVisual", - model=self.coordinator.data["status"]["model"], - name=self.coordinator.data["settings"]["node_name"], - hw_version=self.coordinator.data["status"]["system_version"], - sw_version=self.coordinator.data["status"]["app_version"], - ) diff --git a/homeassistant/components/airvisual_pro/config_flow.py b/homeassistant/components/airvisual_pro/config_flow.py index ebdbc807b18..c2d136f3102 100644 --- a/homeassistant/components/airvisual_pro/config_flow.py +++ b/homeassistant/components/airvisual_pro/config_flow.py @@ -14,7 +14,7 @@ from pyairvisual.node import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from .const import DOMAIN, LOGGER @@ -76,23 +76,17 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: ConfigEntry | None = None + _reauth_entry_data: Mapping[str, Any] - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import a config entry from `airvisual` integration (see #83882).""" + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) + self._reauth_entry_data = entry_data return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -104,10 +98,8 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): step_id="reauth_confirm", data_schema=STEP_REAUTH_SCHEMA ) - assert self._reauth_entry - validation_result = await async_validate_credentials( - self._reauth_entry.data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD] + self._reauth_entry_data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD] ) if validation_result.errors: @@ -117,13 +109,9 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): errors=validation_result.errors, ) - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | user_input + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, str] | None = None diff --git a/homeassistant/components/airvisual_pro/entity.py b/homeassistant/components/airvisual_pro/entity.py new file mode 100644 index 00000000000..bc28fa36e52 --- /dev/null +++ b/homeassistant/components/airvisual_pro/entity.py @@ -0,0 +1,37 @@ +"""The AirVisual Pro integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN + + +class AirVisualProEntity(CoordinatorEntity): + """Define a generic AirVisual Pro entity.""" + + def __init__( + self, coordinator: DataUpdateCoordinator, description: EntityDescription + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._attr_unique_id = f"{coordinator.data['serial_number']}_{description.key}" + self.entity_description = description + + @property + def device_info(self) -> DeviceInfo: + """Return device registry information for this entity.""" + return DeviceInfo( + identifiers={(DOMAIN, self.coordinator.data["serial_number"])}, + manufacturer="AirVisual", + model=self.coordinator.data["status"]["model"], + name=self.coordinator.data["settings"]["node_name"], + hw_version=self.coordinator.data["status"]["system_version"], + sw_version=self.coordinator.data["status"]["app_version"], + ) diff --git a/homeassistant/components/airvisual_pro/sensor.py b/homeassistant/components/airvisual_pro/sensor.py index 895ba7d3244..66726832843 100644 --- a/homeassistant/components/airvisual_pro/sensor.py +++ b/homeassistant/components/airvisual_pro/sensor.py @@ -22,7 +22,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AirVisualProConfigEntry, AirVisualProEntity +from . import AirVisualProConfigEntry +from .entity import AirVisualProEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/airzone/__init__.py b/homeassistant/components/airzone/__init__.py index 754dfe90dce..5d1f9f051a3 100644 --- a/homeassistant/components/airzone/__init__.py +++ b/homeassistant/components/airzone/__init__.py @@ -24,6 +24,7 @@ PLATFORMS: list[Platform] = [ Platform.CLIMATE, Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, Platform.WATER_HEATER, ] diff --git a/homeassistant/components/airzone/climate.py b/homeassistant/components/airzone/climate.py index 5e5e1c126de..4ed54286cff 100644 --- a/homeassistant/components/airzone/climate.py +++ b/homeassistant/components/airzone/climate.py @@ -85,6 +85,7 @@ HVAC_MODE_LIB_TO_HASS: Final[dict[OperationMode, HVACMode]] = { OperationMode.HEATING: HVACMode.HEAT, OperationMode.FAN: HVACMode.FAN_ONLY, OperationMode.DRY: HVACMode.DRY, + OperationMode.AUX_HEATING: HVACMode.HEAT, OperationMode.AUTO: HVACMode.HEAT_COOL, } HVAC_MODE_HASS_TO_LIB: Final[dict[HVACMode, OperationMode]] = { @@ -135,7 +136,6 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity): _attr_name = None _speeds: dict[int, str] = {} _speeds_reverse: dict[str, int] = {} - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -157,9 +157,10 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity): self._attr_temperature_unit = TEMP_UNIT_LIB_TO_HASS[ self.get_airzone_value(AZD_TEMP_UNIT) ] - self._attr_hvac_modes = [ + _attr_hvac_modes = [ HVAC_MODE_LIB_TO_HASS[mode] for mode in self.get_airzone_value(AZD_MODES) ] + self._attr_hvac_modes = list(dict.fromkeys(_attr_hvac_modes)) if ( self.get_airzone_value(AZD_SPEED) is not None and self.get_airzone_value(AZD_SPEEDS) is not None @@ -273,12 +274,18 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity): self._attr_min_temp = self.get_airzone_value(AZD_TEMP_MIN) if self.supported_features & ClimateEntityFeature.FAN_MODE: self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED)) - if self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE: + if ( + self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + and self._attr_hvac_mode == HVACMode.HEAT_COOL + ): self._attr_target_temperature_high = self.get_airzone_value( AZD_COOL_TEMP_SET ) self._attr_target_temperature_low = self.get_airzone_value( AZD_HEAT_TEMP_SET ) + self._attr_target_temperature = None else: + self._attr_target_temperature_high = None + self._attr_target_temperature_low = None self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET) diff --git a/homeassistant/components/airzone/diagnostics.py b/homeassistant/components/airzone/diagnostics.py index 6c75b750eaf..2945df7b6fb 100644 --- a/homeassistant/components/airzone/diagnostics.py +++ b/homeassistant/components/airzone/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from aioairzone.const import API_MAC, AZD_MAC -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import CONF_UNIQUE_ID from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index 31ff7423ad6..01fde7eb2fb 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.8.2"] + "requirements": ["aioairzone==0.9.7"] } diff --git a/homeassistant/components/airzone/switch.py b/homeassistant/components/airzone/switch.py new file mode 100644 index 00000000000..93136810604 --- /dev/null +++ b/homeassistant/components/airzone/switch.py @@ -0,0 +1,122 @@ +"""Support for the Airzone switch.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Final + +from aioairzone.const import API_ON, AZD_ON, AZD_ZONES + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import AirzoneConfigEntry +from .coordinator import AirzoneUpdateCoordinator +from .entity import AirzoneEntity, AirzoneZoneEntity + + +@dataclass(frozen=True, kw_only=True) +class AirzoneSwitchDescription(SwitchEntityDescription): + """Class to describe an Airzone switch entity.""" + + api_param: str + + +ZONE_SWITCH_TYPES: Final[tuple[AirzoneSwitchDescription, ...]] = ( + AirzoneSwitchDescription( + api_param=API_ON, + device_class=SwitchDeviceClass.SWITCH, + key=AZD_ON, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AirzoneConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Add Airzone switch from a config_entry.""" + coordinator = entry.runtime_data + + added_zones: set[str] = set() + + def _async_entity_listener() -> None: + """Handle additions of switch.""" + + zones_data = coordinator.data.get(AZD_ZONES, {}) + received_zones = set(zones_data) + new_zones = received_zones - added_zones + if new_zones: + async_add_entities( + AirzoneZoneSwitch( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in ZONE_SWITCH_TYPES + if description.key in zones_data.get(system_zone_id) + ) + added_zones.update(new_zones) + + entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) + _async_entity_listener() + + +class AirzoneBaseSwitch(AirzoneEntity, SwitchEntity): + """Define an Airzone switch.""" + + entity_description: AirzoneSwitchDescription + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() + + @callback + def _async_update_attrs(self) -> None: + """Update switch attributes.""" + self._attr_is_on = self.get_airzone_value(self.entity_description.key) + + +class AirzoneZoneSwitch(AirzoneZoneEntity, AirzoneBaseSwitch): + """Define an Airzone Zone switch.""" + + def __init__( + self, + coordinator: AirzoneUpdateCoordinator, + description: AirzoneSwitchDescription, + entry: ConfigEntry, + system_zone_id: str, + zone_data: dict[str, Any], + ) -> None: + """Initialize.""" + super().__init__(coordinator, entry, system_zone_id, zone_data) + + self._attr_name = None + self._attr_unique_id = ( + f"{self._attr_unique_id}_{system_zone_id}_{description.key}" + ) + self.entity_description = description + + self._async_update_attrs() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + param = self.entity_description.api_param + await self._async_update_hvac_params({param: True}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + param = self.entity_description.api_param + await self._async_update_hvac_params({param: False}) diff --git a/homeassistant/components/airzone_cloud/__init__.py b/homeassistant/components/airzone_cloud/__init__.py index b1d7900f2e8..5baa0bcea10 100644 --- a/homeassistant/components/airzone_cloud/__init__.py +++ b/homeassistant/components/airzone_cloud/__init__.py @@ -17,6 +17,7 @@ PLATFORMS: list[Platform] = [ Platform.CLIMATE, Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, Platform.WATER_HEATER, ] diff --git a/homeassistant/components/airzone_cloud/climate.py b/homeassistant/components/airzone_cloud/climate.py index 3658c073795..b98473072e4 100644 --- a/homeassistant/components/airzone_cloud/climate.py +++ b/homeassistant/components/airzone_cloud/climate.py @@ -177,7 +177,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): _attr_name = None _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def _init_attributes(self) -> None: """Init common climate device attributes.""" @@ -194,12 +193,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ) - if ( - self.get_airzone_value(AZD_SPEED) is not None - and self.get_airzone_value(AZD_SPEEDS) is not None - ): - self._initialize_fan_speeds() - @callback def _handle_coordinator_update(self) -> None: """Update attributes when the coordinator updates.""" @@ -214,8 +207,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[ self.get_airzone_value(AZD_ACTION) ] - if self.supported_features & ClimateEntityFeature.FAN_MODE: - self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED)) if self.get_airzone_value(AZD_POWER): self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[ self.get_airzone_value(AZD_MODE) @@ -224,14 +215,20 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity): self._attr_hvac_mode = HVACMode.OFF self._attr_max_temp = self.get_airzone_value(AZD_TEMP_SET_MAX) self._attr_min_temp = self.get_airzone_value(AZD_TEMP_SET_MIN) - if self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE: + if ( + self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + and self._attr_hvac_mode == HVACMode.HEAT_COOL + ): self._attr_target_temperature_high = self.get_airzone_value( AZD_TEMP_SET_COOL_AIR ) self._attr_target_temperature_low = self.get_airzone_value( AZD_TEMP_SET_HOT_AIR ) + self._attr_target_temperature = None else: + self._attr_target_temperature_high = None + self._attr_target_temperature_low = None self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET) @@ -246,6 +243,22 @@ class AirzoneDeviceClimate(AirzoneClimate): _speeds: dict[int, str] _speeds_reverse: dict[str, int] + def _init_attributes(self) -> None: + """Init common climate device attributes.""" + super()._init_attributes() + if ( + self.get_airzone_value(AZD_SPEED) is not None + and self.get_airzone_value(AZD_SPEEDS) is not None + ): + self._initialize_fan_speeds() + + @callback + def _async_update_attrs(self) -> None: + """Update climate attributes.""" + super()._async_update_attrs() + if self.supported_features & ClimateEntityFeature.FAN_MODE: + self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED)) + def _initialize_fan_speeds(self) -> None: """Initialize fan speeds.""" azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS) @@ -304,6 +317,10 @@ class AirzoneDeviceClimate(AirzoneClimate): async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" + hvac_mode = kwargs.get(ATTR_HVAC_MODE) + if hvac_mode is not None: + await self.async_set_hvac_mode(hvac_mode) + params: dict[str, Any] = {} if ATTR_TEMPERATURE in kwargs: params[API_SETPOINT] = { @@ -327,9 +344,6 @@ class AirzoneDeviceClimate(AirzoneClimate): } await self._async_update_params(params) - if ATTR_HVAC_MODE in kwargs: - await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE]) - class AirzoneDeviceGroupClimate(AirzoneClimate): """Define an Airzone Cloud DeviceGroup base class.""" @@ -360,6 +374,10 @@ class AirzoneDeviceGroupClimate(AirzoneClimate): async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" + hvac_mode = kwargs.get(ATTR_HVAC_MODE) + if hvac_mode is not None: + await self.async_set_hvac_mode(hvac_mode) + params: dict[str, Any] = {} if ATTR_TEMPERATURE in kwargs: params[API_PARAMS] = { @@ -370,9 +388,6 @@ class AirzoneDeviceGroupClimate(AirzoneClimate): } await self._async_update_params(params) - if ATTR_HVAC_MODE in kwargs: - await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE]) - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set hvac mode.""" params: dict[str, Any] = { diff --git a/homeassistant/components/airzone_cloud/diagnostics.py b/homeassistant/components/airzone_cloud/diagnostics.py index 516a8fcb165..b6744e36d8c 100644 --- a/homeassistant/components/airzone_cloud/diagnostics.py +++ b/homeassistant/components/airzone_cloud/diagnostics.py @@ -21,7 +21,7 @@ from aioairzone_cloud.const import ( RAW_WEBSERVERS, ) -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index b691770e934..0e21e57ec52 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.2"] + "requirements": ["aioairzone-cloud==0.6.10"] } diff --git a/homeassistant/components/airzone_cloud/select.py b/homeassistant/components/airzone_cloud/select.py index 9bc0bdd1f5b..895796a1073 100644 --- a/homeassistant/components/airzone_cloud/select.py +++ b/homeassistant/components/airzone_cloud/select.py @@ -2,14 +2,19 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from typing import Any, Final -from aioairzone_cloud.common import AirQualityMode +from aioairzone_cloud.common import AirQualityMode, OperationMode from aioairzone_cloud.const import ( API_AQ_MODE_CONF, + API_MODE, API_VALUE, AZD_AQ_MODE_CONF, + AZD_MASTER, + AZD_MODE, + AZD_MODES, AZD_ZONES, ) @@ -28,7 +33,10 @@ class AirzoneSelectDescription(SelectEntityDescription): """Class to describe an Airzone select entity.""" api_param: str - options_dict: dict[str, str] + options_dict: dict[str, Any] + options_fn: Callable[[dict[str, Any], dict[str, Any]], list[str]] = ( + lambda zone_data, value: list(value) + ) AIR_QUALITY_MAP: Final[dict[str, str]] = { @@ -37,6 +45,35 @@ AIR_QUALITY_MAP: Final[dict[str, str]] = { "auto": AirQualityMode.AUTO, } +MODE_MAP: Final[dict[str, int]] = { + "cool": OperationMode.COOLING, + "dry": OperationMode.DRY, + "fan": OperationMode.VENTILATION, + "heat": OperationMode.HEATING, + "heat_cool": OperationMode.AUTO, + "stop": OperationMode.STOP, +} + + +def main_zone_options( + zone_data: dict[str, Any], + options: dict[str, int], +) -> list[str]: + """Filter available modes.""" + modes = zone_data.get(AZD_MODES, []) + return [k for k, v in options.items() if v in modes] + + +MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( + AirzoneSelectDescription( + api_param=API_MODE, + key=AZD_MODE, + options_dict=MODE_MAP, + options_fn=main_zone_options, + translation_key="modes", + ), +) + ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( AirzoneSelectDescription( @@ -59,7 +96,19 @@ async def async_setup_entry( coordinator = entry.runtime_data # Zones - async_add_entities( + entities: list[AirzoneZoneSelect] = [ + AirzoneZoneSelect( + coordinator, + description, + zone_id, + zone_data, + ) + for description in MAIN_ZONE_SELECT_TYPES + for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items() + if description.key in zone_data and zone_data.get(AZD_MASTER) + ] + + entities.extend( AirzoneZoneSelect( coordinator, description, @@ -71,6 +120,8 @@ async def async_setup_entry( if description.key in zone_data ) + async_add_entities(entities) + class AirzoneBaseSelect(AirzoneEntity, SelectEntity): """Define an Airzone Cloud select.""" @@ -110,6 +161,11 @@ class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect): self._attr_unique_id = f"{zone_id}_{description.key}" self.entity_description = description + + self._attr_options = self.entity_description.options_fn( + zone_data, description.options_dict + ) + self.values_dict = {v: k for k, v in description.options_dict.items()} self._async_update_attrs() diff --git a/homeassistant/components/airzone_cloud/sensor.py b/homeassistant/components/airzone_cloud/sensor.py index 9f0ee01aca2..70d2fd079d4 100644 --- a/homeassistant/components/airzone_cloud/sensor.py +++ b/homeassistant/components/airzone_cloud/sensor.py @@ -12,7 +12,16 @@ from aioairzone_cloud.const import ( AZD_AQ_PM_10, AZD_CPU_USAGE, AZD_HUMIDITY, + AZD_INDOOR_EXCHANGER_TEMP, + AZD_INDOOR_RETURN_TEMP, + AZD_INDOOR_WORK_TEMP, AZD_MEMORY_FREE, + AZD_OUTDOOR_CONDENSER_PRESS, + AZD_OUTDOOR_DISCHARGE_TEMP, + AZD_OUTDOOR_ELECTRIC_CURRENT, + AZD_OUTDOOR_EVAPORATOR_PRESS, + AZD_OUTDOOR_EXCHANGER_TEMP, + AZD_OUTDOOR_TEMP, AZD_TEMP, AZD_THERMOSTAT_BATTERY, AZD_THERMOSTAT_COVERAGE, @@ -32,7 +41,9 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfElectricCurrent, UnitOfInformation, + UnitOfPressure, UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback @@ -48,6 +59,78 @@ from .entity import ( ) AIDOO_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = ( + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_INDOOR_EXCHANGER_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="indoor_exchanger_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_INDOOR_RETURN_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="indoor_return_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_INDOOR_WORK_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="indoor_work_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_CONDENSER_PRESS, + native_unit_of_measurement=UnitOfPressure.KPA, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_condenser_press", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_DISCHARGE_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_discharge_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_ELECTRIC_CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_electric_current", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_EVAPORATOR_PRESS, + native_unit_of_measurement=UnitOfPressure.KPA, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_evaporator_press", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_EXCHANGER_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_exchanger_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_temp", + ), SensorEntityDescription( device_class=SensorDeviceClass.TEMPERATURE, key=AZD_TEMP, diff --git a/homeassistant/components/airzone_cloud/strings.json b/homeassistant/components/airzone_cloud/strings.json index eb9529c7ca5..6e0f9adcd66 100644 --- a/homeassistant/components/airzone_cloud/strings.json +++ b/homeassistant/components/airzone_cloud/strings.json @@ -36,6 +36,17 @@ "on": "On", "auto": "Auto" } + }, + "modes": { + "name": "Mode", + "state": { + "cool": "[%key:component::climate::entity_component::_::state::cool%]", + "dry": "[%key:component::climate::entity_component::_::state::dry%]", + "fan": "[%key:component::climate::entity_component::_::state::fan_only%]", + "heat": "[%key:component::climate::entity_component::_::state::heat%]", + "heat_cool": "[%key:component::climate::entity_component::_::state::heat_cool%]", + "stop": "Stop" + } } }, "sensor": { @@ -45,6 +56,33 @@ "free_memory": { "name": "Free memory" }, + "indoor_exchanger_temp": { + "name": "Indoor exchanger temperature" + }, + "indoor_return_temp": { + "name": "Indoor return temperature" + }, + "indoor_work_temp": { + "name": "Indoor working temperature" + }, + "outdoor_condenser_press": { + "name": "Outdoor condenser pressure" + }, + "outdoor_discharge_temp": { + "name": "Outdoor discharge temperature" + }, + "outdoor_electric_current": { + "name": "Outdoor electric current" + }, + "outdoor_evaporator_press": { + "name": "Outdoor evaporator pressure" + }, + "outdoor_exchanger_temp": { + "name": "Outdoor exchanger temperature" + }, + "outdoor_temp": { + "name": "Outdoor temperature" + }, "thermostat_coverage": { "name": "Signal percentage" } diff --git a/homeassistant/components/airzone_cloud/switch.py b/homeassistant/components/airzone_cloud/switch.py new file mode 100644 index 00000000000..0eb907ff792 --- /dev/null +++ b/homeassistant/components/airzone_cloud/switch.py @@ -0,0 +1,115 @@ +"""Support for the Airzone Cloud switch.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Final + +from aioairzone_cloud.const import API_POWER, API_VALUE, AZD_POWER, AZD_ZONES + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import AirzoneCloudConfigEntry +from .coordinator import AirzoneUpdateCoordinator +from .entity import AirzoneEntity, AirzoneZoneEntity + + +@dataclass(frozen=True, kw_only=True) +class AirzoneSwitchDescription(SwitchEntityDescription): + """Class to describe an Airzone switch entity.""" + + api_param: str + + +ZONE_SWITCH_TYPES: Final[tuple[AirzoneSwitchDescription, ...]] = ( + AirzoneSwitchDescription( + api_param=API_POWER, + device_class=SwitchDeviceClass.SWITCH, + key=AZD_POWER, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AirzoneCloudConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Add Airzone Cloud switch from a config_entry.""" + coordinator = entry.runtime_data + + # Zones + async_add_entities( + AirzoneZoneSwitch( + coordinator, + description, + zone_id, + zone_data, + ) + for description in ZONE_SWITCH_TYPES + for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items() + if description.key in zone_data + ) + + +class AirzoneBaseSwitch(AirzoneEntity, SwitchEntity): + """Define an Airzone Cloud switch.""" + + entity_description: AirzoneSwitchDescription + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() + + @callback + def _async_update_attrs(self) -> None: + """Update switch attributes.""" + self._attr_is_on = self.get_airzone_value(self.entity_description.key) + + +class AirzoneZoneSwitch(AirzoneZoneEntity, AirzoneBaseSwitch): + """Define an Airzone Cloud Zone switch.""" + + def __init__( + self, + coordinator: AirzoneUpdateCoordinator, + description: AirzoneSwitchDescription, + zone_id: str, + zone_data: dict[str, Any], + ) -> None: + """Initialize.""" + super().__init__(coordinator, zone_id, zone_data) + + self._attr_name = None + self._attr_unique_id = f"{zone_id}_{description.key}" + self.entity_description = description + + self._async_update_attrs() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + param = self.entity_description.api_param + params: dict[str, Any] = { + param: { + API_VALUE: True, + } + } + await self._async_update_params(params) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + param = self.entity_description.api_param + params: dict[str, Any] = { + param: { + API_VALUE: False, + } + } + await self._async_update_params(params) diff --git a/homeassistant/components/alarm_control_panel/__init__.py b/homeassistant/components/alarm_control_panel/__init__.py index b09d5867d26..4c5e201df8f 100644 --- a/homeassistant/components/alarm_control_panel/__init__.py +++ b/homeassistant/components/alarm_control_panel/__init__.py @@ -2,11 +2,12 @@ from __future__ import annotations +import asyncio from datetime import timedelta -from functools import cached_property, partial import logging -from typing import Any, Final, final +from typing import TYPE_CHECKING, Any, Final, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -25,33 +26,25 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import make_entity_service_schema -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey -from .const import ( # noqa: F401 - _DEPRECATED_FORMAT_NUMBER, - _DEPRECATED_FORMAT_TEXT, - _DEPRECATED_SUPPORT_ALARM_ARM_AWAY, - _DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS, - _DEPRECATED_SUPPORT_ALARM_ARM_HOME, - _DEPRECATED_SUPPORT_ALARM_ARM_NIGHT, - _DEPRECATED_SUPPORT_ALARM_ARM_VACATION, - _DEPRECATED_SUPPORT_ALARM_TRIGGER, +from .const import ( ATTR_CHANGED_BY, ATTR_CODE_ARM_REQUIRED, DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) _LOGGER: Final = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[AlarmControlPanelEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA: Final = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE: Final = cv.PLATFORM_SCHEMA_BASE @@ -69,7 +62,7 @@ ALARM_SERVICE_SCHEMA: Final = make_entity_service_schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for sensors.""" - component = hass.data[DOMAIN] = EntityComponent[AlarmControlPanelEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -122,14 +115,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[AlarmControlPanelEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[AlarmControlPanelEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class AlarmControlPanelEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -141,6 +132,7 @@ CACHED_PROPERTIES_WITH_ATTR_ = { "changed_by", "code_arm_required", "supported_features", + "alarm_state", } @@ -148,6 +140,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A """An abstract class for alarm control entities.""" entity_description: AlarmControlPanelEntityDescription + _attr_alarm_state: AlarmControlPanelState | None = None _attr_changed_by: str | None = None _attr_code_arm_required: bool = True _attr_code_format: CodeFormat | None = None @@ -156,6 +149,78 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A ) _alarm_control_panel_option_default_code: str | None = None + __alarm_legacy_state: bool = False + + def __init_subclass__(cls, **kwargs: Any) -> None: + """Post initialisation processing.""" + super().__init_subclass__(**kwargs) + if any(method in cls.__dict__ for method in ("_attr_state", "state")): + # Integrations should use the 'alarm_state' property instead of + # setting the state directly. + cls.__alarm_legacy_state = True + + def __setattr__(self, name: str, value: Any, /) -> None: + """Set attribute. + + Deprecation warning if setting '_attr_state' directly + unless already reported. + """ + if name == "_attr_state": + self._report_deprecated_alarm_state_handling() + return super().__setattr__(name, value) + + @callback + def add_to_platform_start( + self, + hass: HomeAssistant, + platform: EntityPlatform, + parallel_updates: asyncio.Semaphore | None, + ) -> None: + """Start adding an entity to a platform.""" + super().add_to_platform_start(hass, platform, parallel_updates) + if self.__alarm_legacy_state: + self._report_deprecated_alarm_state_handling() + + @callback + def _report_deprecated_alarm_state_handling(self) -> None: + """Report on deprecated handling of alarm state. + + Integrations should implement alarm_state instead of using state directly. + """ + report_usage( + "is setting state directly." + f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'" + " property and return its state using the AlarmControlPanelState enum", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.11", + integration_domain=self.platform.platform_name if self.platform else None, + exclude_integrations={DOMAIN}, + ) + + @final + @property + def state(self) -> str | None: + """Return the current state.""" + if (alarm_state := self.alarm_state) is not None: + return alarm_state + if self._attr_state is not None: + # Backwards compatibility for integrations that set state directly + # Should be removed in 2025.11 + if TYPE_CHECKING: + assert isinstance(self._attr_state, str) + return self._attr_state + return None + + @cached_property + def alarm_state(self) -> AlarmControlPanelState | None: + """Return the current alarm control panel entity state. + + Integrations should overwrite this or use the '_attr_alarm_state' + attribute to set the alarm status using the 'AlarmControlPanelState' enum. + """ + return self._attr_alarm_state + @final @callback def code_or_default_code(self, code: str | None) -> str | None: @@ -191,7 +256,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A """Check if arm code is required, raise if no code is given.""" if not (_code := self.code_or_default_code(code)) and self.code_arm_required: raise ServiceValidationError( - f"Arming requires a code but none was given for {self.entity_id}", translation_domain=DOMAIN, translation_key="code_arm_required", translation_placeholders={ @@ -291,12 +355,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A @cached_property def supported_features(self) -> AlarmControlPanelEntityFeature: """Return the list of supported features.""" - features = self._attr_supported_features - if type(features) is int: # noqa: E721 - new_features = AlarmControlPanelEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features + return self._attr_supported_features @final @property @@ -334,13 +393,3 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A self._alarm_control_panel_option_default_code = default_code return self._alarm_control_panel_option_default_code = None - - -# As we import constants of the const module here, we need to add the following -# functions to check for deprecated constants again -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/alarm_control_panel/const.py b/homeassistant/components/alarm_control_panel/const.py index 2e8fe98da3b..f9a5887513c 100644 --- a/homeassistant/components/alarm_control_panel/const.py +++ b/homeassistant/components/alarm_control_panel/const.py @@ -1,22 +1,29 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum -from functools import partial from typing import Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) - DOMAIN: Final = "alarm_control_panel" ATTR_CHANGED_BY: Final = "changed_by" ATTR_CODE_ARM_REQUIRED: Final = "code_arm_required" +class AlarmControlPanelState(StrEnum): + """Alarm control panel entity states.""" + + DISARMED = "disarmed" + ARMED_HOME = "armed_home" + ARMED_AWAY = "armed_away" + ARMED_NIGHT = "armed_night" + ARMED_VACATION = "armed_vacation" + ARMED_CUSTOM_BYPASS = "armed_custom_bypass" + PENDING = "pending" + ARMING = "arming" + DISARMING = "disarming" + TRIGGERED = "triggered" + + class CodeFormat(StrEnum): """Code formats for the Alarm Control Panel.""" @@ -24,12 +31,6 @@ class CodeFormat(StrEnum): NUMBER = "number" -# These constants are deprecated as of Home Assistant 2022.5, can be removed in 2025.1 -# Please use the CodeFormat enum instead. -_DEPRECATED_FORMAT_TEXT: Final = DeprecatedConstantEnum(CodeFormat.TEXT, "2025.1") -_DEPRECATED_FORMAT_NUMBER: Final = DeprecatedConstantEnum(CodeFormat.NUMBER, "2025.1") - - class AlarmControlPanelEntityFeature(IntFlag): """Supported features of the alarm control panel entity.""" @@ -41,27 +42,6 @@ class AlarmControlPanelEntityFeature(IntFlag): ARM_VACATION = 32 -# These constants are deprecated as of Home Assistant 2022.5 -# Please use the AlarmControlPanelEntityFeature enum instead. -_DEPRECATED_SUPPORT_ALARM_ARM_HOME: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_HOME, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_AWAY: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_AWAY, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_NIGHT: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_NIGHT, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_TRIGGER: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.TRIGGER, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, "2025.1" -) -_DEPRECATED_SUPPORT_ALARM_ARM_VACATION: Final = DeprecatedConstantEnum( - AlarmControlPanelEntityFeature.ARM_VACATION, "2025.1" -) - CONDITION_TRIGGERED: Final = "is_triggered" CONDITION_DISARMED: Final = "is_disarmed" CONDITION_ARMED_HOME: Final = "is_armed_home" @@ -69,10 +49,3 @@ CONDITION_ARMED_AWAY: Final = "is_armed_away" CONDITION_ARMED_NIGHT: Final = "is_armed_night" CONDITION_ARMED_VACATION: Final = "is_armed_vacation" CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass" - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/alarm_control_panel/device_condition.py b/homeassistant/components/alarm_control_panel/device_condition.py index 227fc31413e..6d343bbe605 100644 --- a/homeassistant/components/alarm_control_panel/device_condition.py +++ b/homeassistant/components/alarm_control_panel/device_condition.py @@ -13,13 +13,6 @@ from homeassistant.const import ( CONF_DOMAIN, CONF_ENTITY_ID, CONF_TYPE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -31,7 +24,7 @@ from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA from homeassistant.helpers.entity import get_supported_features from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN +from . import DOMAIN, AlarmControlPanelState from .const import ( CONDITION_ARMED_AWAY, CONDITION_ARMED_CUSTOM_BYPASS, @@ -109,19 +102,19 @@ def async_condition_from_config( ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == CONDITION_TRIGGERED: - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED elif config[CONF_TYPE] == CONDITION_DISARMED: - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED elif config[CONF_TYPE] == CONDITION_ARMED_HOME: - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif config[CONF_TYPE] == CONDITION_ARMED_AWAY: - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif config[CONF_TYPE] == CONDITION_ARMED_NIGHT: - state = STATE_ALARM_ARMED_NIGHT + state = AlarmControlPanelState.ARMED_NIGHT elif config[CONF_TYPE] == CONDITION_ARMED_VACATION: - state = STATE_ALARM_ARMED_VACATION + state = AlarmControlPanelState.ARMED_VACATION elif config[CONF_TYPE] == CONDITION_ARMED_CUSTOM_BYPASS: - state = STATE_ALARM_ARMED_CUSTOM_BYPASS + state = AlarmControlPanelState.ARMED_CUSTOM_BYPASS registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID]) diff --git a/homeassistant/components/alarm_control_panel/device_trigger.py b/homeassistant/components/alarm_control_panel/device_trigger.py index 557666720e8..a488cf10870 100644 --- a/homeassistant/components/alarm_control_panel/device_trigger.py +++ b/homeassistant/components/alarm_control_panel/device_trigger.py @@ -15,13 +15,6 @@ from homeassistant.const import ( CONF_FOR, CONF_PLATFORM, CONF_TYPE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry as er @@ -29,7 +22,7 @@ from homeassistant.helpers.entity import get_supported_features from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from . import DOMAIN, AlarmControlPanelState from .const import AlarmControlPanelEntityFeature BASIC_TRIGGER_TYPES: Final[set[str]] = {"triggered", "disarmed", "arming"} @@ -129,19 +122,19 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Attach a trigger.""" if config[CONF_TYPE] == "triggered": - to_state = STATE_ALARM_TRIGGERED + to_state = AlarmControlPanelState.TRIGGERED elif config[CONF_TYPE] == "disarmed": - to_state = STATE_ALARM_DISARMED + to_state = AlarmControlPanelState.DISARMED elif config[CONF_TYPE] == "arming": - to_state = STATE_ALARM_ARMING + to_state = AlarmControlPanelState.ARMING elif config[CONF_TYPE] == "armed_home": - to_state = STATE_ALARM_ARMED_HOME + to_state = AlarmControlPanelState.ARMED_HOME elif config[CONF_TYPE] == "armed_away": - to_state = STATE_ALARM_ARMED_AWAY + to_state = AlarmControlPanelState.ARMED_AWAY elif config[CONF_TYPE] == "armed_night": - to_state = STATE_ALARM_ARMED_NIGHT + to_state = AlarmControlPanelState.ARMED_NIGHT elif config[CONF_TYPE] == "armed_vacation": - to_state = STATE_ALARM_ARMED_VACATION + to_state = AlarmControlPanelState.ARMED_VACATION state_config = { state_trigger.CONF_PLATFORM: "state", diff --git a/homeassistant/components/alarm_control_panel/icons.json b/homeassistant/components/alarm_control_panel/icons.json index 915448a9962..0295699bae9 100644 --- a/homeassistant/components/alarm_control_panel/icons.json +++ b/homeassistant/components/alarm_control_panel/icons.json @@ -15,12 +15,26 @@ } }, "services": { - "alarm_arm_away": "mdi:shield-lock", - "alarm_arm_home": "mdi:shield-home", - "alarm_arm_night": "mdi:shield-moon", - "alarm_arm_custom_bypass": "mdi:security", - "alarm_disarm": "mdi:shield-off", - "alarm_trigger": "mdi:bell-ring", - "alarm_arm_vacation": "mdi:shield-airplane" + "alarm_arm_away": { + "service": "mdi:shield-lock" + }, + "alarm_arm_home": { + "service": "mdi:shield-home" + }, + "alarm_arm_night": { + "service": "mdi:shield-moon" + }, + "alarm_arm_custom_bypass": { + "service": "mdi:security" + }, + "alarm_disarm": { + "service": "mdi:shield-off" + }, + "alarm_trigger": { + "service": "mdi:bell-ring" + }, + "alarm_arm_vacation": { + "service": "mdi:shield-airplane" + } } } diff --git a/homeassistant/components/alarm_control_panel/reproduce_state.py b/homeassistant/components/alarm_control_panel/reproduce_state.py index 5a3d79fe2ed..765514e98ec 100644 --- a/homeassistant/components/alarm_control_panel/reproduce_state.py +++ b/homeassistant/components/alarm_control_panel/reproduce_state.py @@ -16,28 +16,21 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import Context, HomeAssistant, State -from . import DOMAIN +from . import DOMAIN, AlarmControlPanelState _LOGGER: Final = logging.getLogger(__name__) VALID_STATES: Final[set[str]] = { - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.TRIGGERED, } @@ -65,19 +58,19 @@ async def _async_reproduce_state( service_data = {ATTR_ENTITY_ID: state.entity_id} - if state.state == STATE_ALARM_ARMED_AWAY: + if state.state == AlarmControlPanelState.ARMED_AWAY: service = SERVICE_ALARM_ARM_AWAY - elif state.state == STATE_ALARM_ARMED_CUSTOM_BYPASS: + elif state.state == AlarmControlPanelState.ARMED_CUSTOM_BYPASS: service = SERVICE_ALARM_ARM_CUSTOM_BYPASS - elif state.state == STATE_ALARM_ARMED_HOME: + elif state.state == AlarmControlPanelState.ARMED_HOME: service = SERVICE_ALARM_ARM_HOME - elif state.state == STATE_ALARM_ARMED_NIGHT: + elif state.state == AlarmControlPanelState.ARMED_NIGHT: service = SERVICE_ALARM_ARM_NIGHT - elif state.state == STATE_ALARM_ARMED_VACATION: + elif state.state == AlarmControlPanelState.ARMED_VACATION: service = SERVICE_ALARM_ARM_VACATION - elif state.state == STATE_ALARM_DISARMED: + elif state.state == AlarmControlPanelState.DISARMED: service = SERVICE_ALARM_DISARM - elif state.state == STATE_ALARM_TRIGGERED: + elif state.state == AlarmControlPanelState.TRIGGERED: service = SERVICE_ALARM_TRIGGER await hass.services.async_call( diff --git a/homeassistant/components/alarm_control_panel/strings.json b/homeassistant/components/alarm_control_panel/strings.json index 6dac4d069a1..5f718280566 100644 --- a/homeassistant/components/alarm_control_panel/strings.json +++ b/homeassistant/components/alarm_control_panel/strings.json @@ -130,7 +130,7 @@ }, "alarm_trigger": { "name": "Trigger", - "description": "Enables an external alarm trigger.", + "description": "Trigger the alarm manually.", "fields": { "code": { "name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]", @@ -138,5 +138,10 @@ } } } + }, + "exceptions": { + "code_arm_required": { + "message": "Arming requires a code but none was given for {entity_id}." + } } } diff --git a/homeassistant/components/alarmdecoder/alarm_control_panel.py b/homeassistant/components/alarmdecoder/alarm_control_panel.py index 7375320f800..cf72133ea12 100644 --- a/homeassistant/components/alarmdecoder/alarm_control_panel.py +++ b/homeassistant/components/alarmdecoder/alarm_control_panel.py @@ -7,16 +7,10 @@ import voluptuous as vol from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - ATTR_CODE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv @@ -106,15 +100,15 @@ class AlarmDecoderAlarmPanel(AlarmDecoderEntity, AlarmControlPanelEntity): def _message_callback(self, message): """Handle received messages.""" if message.alarm_sounding or message.fire_alarm: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED elif message.armed_away: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY elif message.armed_home and (message.entry_delay_off or message.perimeter_only): - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT elif message.armed_home: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME else: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED self._attr_extra_state_attributes = { "ac_power": message.ac_power, diff --git a/homeassistant/components/alarmdecoder/config_flow.py b/homeassistant/components/alarmdecoder/config_flow.py index 779951dd0b0..093ed220973 100644 --- a/homeassistant/components/alarmdecoder/config_flow.py +++ b/homeassistant/components/alarmdecoder/config_flow.py @@ -157,7 +157,7 @@ class AlarmDecoderFlowHandler(ConfigFlow, domain=DOMAIN): class AlarmDecoderOptionsFlowHandler(OptionsFlow): """Handle AlarmDecoder options.""" - selected_zone: str | None = None + selected_zone: str def __init__(self, config_entry: ConfigEntry) -> None: """Initialize AlarmDecoder options flow.""" diff --git a/homeassistant/components/alarmdecoder/icons.json b/homeassistant/components/alarmdecoder/icons.json index 80835a049c8..ccb89749d2d 100644 --- a/homeassistant/components/alarmdecoder/icons.json +++ b/homeassistant/components/alarmdecoder/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "alarm_keypress": "mdi:dialpad", - "alarm_toggle_chime": "mdi:abc" + "alarm_keypress": { + "service": "mdi:dialpad" + }, + "alarm_toggle_chime": { + "service": "mdi:abc" + } } } diff --git a/homeassistant/components/alarmdecoder/strings.json b/homeassistant/components/alarmdecoder/strings.json index dd698201b09..ccf1d965855 100644 --- a/homeassistant/components/alarmdecoder/strings.json +++ b/homeassistant/components/alarmdecoder/strings.json @@ -22,7 +22,8 @@ } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "create_entry": { "default": "Successfully connected to AlarmDecoder." @@ -37,7 +38,7 @@ "title": "Configure AlarmDecoder", "description": "What would you like to edit?", "data": { - "edit_select": "Edit" + "edit_selection": "Edit" } }, "arm_settings": { diff --git a/homeassistant/components/alert/__init__.py b/homeassistant/components/alert/__init__.py index c49e14f2c6f..12341c158c0 100644 --- a/homeassistant/components/alert/__init__.py +++ b/homeassistant/components/alert/__init__.py @@ -2,18 +2,8 @@ from __future__ import annotations -from collections.abc import Callable -from datetime import timedelta -from typing import Any - import voluptuous as vol -from homeassistant.components.notify import ( - ATTR_DATA, - ATTR_MESSAGE, - ATTR_TITLE, - DOMAIN as DOMAIN_NOTIFY, -) from homeassistant.const import ( CONF_ENTITY_ID, CONF_NAME, @@ -22,22 +12,12 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_IDLE, - STATE_OFF, STATE_ON, ) -from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant -from homeassistant.exceptions import ServiceNotFound +from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.event import ( - async_track_point_in_time, - async_track_state_change_event, -) -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType -from homeassistant.util.dt import now from .const import ( CONF_ALERT_MESSAGE, @@ -52,6 +32,7 @@ from .const import ( DOMAIN, LOGGER, ) +from .entity import AlertEntity ALERT_SCHEMA = vol.Schema( { @@ -83,9 +64,9 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Alert component.""" - component = EntityComponent[Alert](LOGGER, DOMAIN, hass) + component = EntityComponent[AlertEntity](LOGGER, DOMAIN, hass) - entities: list[Alert] = [] + entities: list[AlertEntity] = [] for object_id, cfg in config[DOMAIN].items(): if not cfg: @@ -104,7 +85,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: data = cfg.get(CONF_DATA) entities.append( - Alert( + AlertEntity( hass, object_id, name, @@ -131,183 +112,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_add_entities(entities) return True - - -class Alert(Entity): - """Representation of an alert.""" - - _attr_should_poll = False - - def __init__( - self, - hass: HomeAssistant, - entity_id: str, - name: str, - watched_entity_id: str, - state: str, - repeat: list[float], - skip_first: bool, - message_template: Template | None, - done_message_template: Template | None, - notifiers: list[str], - can_ack: bool, - title_template: Template | None, - data: dict[Any, Any], - ) -> None: - """Initialize the alert.""" - self.hass = hass - self._attr_name = name - self._alert_state = state - self._skip_first = skip_first - self._data = data - - self._message_template = message_template - self._done_message_template = done_message_template - self._title_template = title_template - - self._notifiers = notifiers - self._can_ack = can_ack - - self._delay = [timedelta(minutes=val) for val in repeat] - self._next_delay = 0 - - self._firing = False - self._ack = False - self._cancel: Callable[[], None] | None = None - self._send_done_message = False - self.entity_id = f"{DOMAIN}.{entity_id}" - - async_track_state_change_event( - hass, [watched_entity_id], self.watched_entity_change - ) - - @property - def state(self) -> str: - """Return the alert status.""" - if self._firing: - if self._ack: - return STATE_OFF - return STATE_ON - return STATE_IDLE - - async def watched_entity_change(self, event: Event[EventStateChangedData]) -> None: - """Determine if the alert should start or stop.""" - if (to_state := event.data["new_state"]) is None: - return - LOGGER.debug("Watched entity (%s) has changed", event.data["entity_id"]) - if to_state.state == self._alert_state and not self._firing: - await self.begin_alerting() - if to_state.state != self._alert_state and self._firing: - await self.end_alerting() - - async def begin_alerting(self) -> None: - """Begin the alert procedures.""" - LOGGER.debug("Beginning Alert: %s", self._attr_name) - self._ack = False - self._firing = True - self._next_delay = 0 - - if not self._skip_first: - await self._notify() - else: - await self._schedule_notify() - - self.async_write_ha_state() - - async def end_alerting(self) -> None: - """End the alert procedures.""" - LOGGER.debug("Ending Alert: %s", self._attr_name) - if self._cancel is not None: - self._cancel() - self._cancel = None - - self._ack = False - self._firing = False - if self._send_done_message: - await self._notify_done_message() - self.async_write_ha_state() - - async def _schedule_notify(self) -> None: - """Schedule a notification.""" - delay = self._delay[self._next_delay] - next_msg = now() + delay - self._cancel = async_track_point_in_time( - self.hass, - HassJob( - self._notify, name="Schedule notify alert", cancel_on_shutdown=True - ), - next_msg, - ) - self._next_delay = min(self._next_delay + 1, len(self._delay) - 1) - - async def _notify(self, *args: Any) -> None: - """Send the alert notification.""" - if not self._firing: - return - - if not self._ack: - LOGGER.info("Alerting: %s", self._attr_name) - self._send_done_message = True - - if self._message_template is not None: - message = self._message_template.async_render(parse_result=False) - else: - message = self._attr_name - - await self._send_notification_message(message) - await self._schedule_notify() - - async def _notify_done_message(self) -> None: - """Send notification of complete alert.""" - LOGGER.info("Alerting: %s", self._done_message_template) - self._send_done_message = False - - if self._done_message_template is None: - return - - message = self._done_message_template.async_render(parse_result=False) - - await self._send_notification_message(message) - - async def _send_notification_message(self, message: Any) -> None: - if not self._notifiers: - return - - msg_payload = {ATTR_MESSAGE: message} - - if self._title_template is not None: - title = self._title_template.async_render(parse_result=False) - msg_payload[ATTR_TITLE] = title - if self._data: - msg_payload[ATTR_DATA] = self._data - - LOGGER.debug(msg_payload) - - for target in self._notifiers: - try: - await self.hass.services.async_call( - DOMAIN_NOTIFY, target, msg_payload, context=self._context - ) - except ServiceNotFound: - LOGGER.error( - "Failed to call notify.%s, retrying at next notification interval", - target, - ) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Async Unacknowledge alert.""" - LOGGER.debug("Reset Alert: %s", self._attr_name) - self._ack = False - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Async Acknowledge alert.""" - LOGGER.debug("Acknowledged Alert: %s", self._attr_name) - self._ack = True - self.async_write_ha_state() - - async def async_toggle(self, **kwargs: Any) -> None: - """Async toggle alert.""" - if self._ack: - return await self.async_turn_on() - return await self.async_turn_off() diff --git a/homeassistant/components/alert/entity.py b/homeassistant/components/alert/entity.py new file mode 100644 index 00000000000..629047b15ba --- /dev/null +++ b/homeassistant/components/alert/entity.py @@ -0,0 +1,206 @@ +"""Support for repeating alerts when conditions are met.""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import timedelta +from typing import Any + +from homeassistant.components.notify import ( + ATTR_DATA, + ATTR_MESSAGE, + ATTR_TITLE, + DOMAIN as DOMAIN_NOTIFY, +) +from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON +from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant +from homeassistant.exceptions import ServiceNotFound +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.event import ( + async_track_point_in_time, + async_track_state_change_event, +) +from homeassistant.helpers.template import Template +from homeassistant.util.dt import now + +from .const import DOMAIN, LOGGER + + +class AlertEntity(Entity): + """Representation of an alert.""" + + _attr_should_poll = False + + def __init__( + self, + hass: HomeAssistant, + entity_id: str, + name: str, + watched_entity_id: str, + state: str, + repeat: list[float], + skip_first: bool, + message_template: Template | None, + done_message_template: Template | None, + notifiers: list[str], + can_ack: bool, + title_template: Template | None, + data: dict[Any, Any], + ) -> None: + """Initialize the alert.""" + self.hass = hass + self._attr_name = name + self._alert_state = state + self._skip_first = skip_first + self._data = data + + self._message_template = message_template + self._done_message_template = done_message_template + self._title_template = title_template + + self._notifiers = notifiers + self._can_ack = can_ack + + self._delay = [timedelta(minutes=val) for val in repeat] + self._next_delay = 0 + + self._firing = False + self._ack = False + self._cancel: Callable[[], None] | None = None + self._send_done_message = False + self.entity_id = f"{DOMAIN}.{entity_id}" + + async_track_state_change_event( + hass, [watched_entity_id], self.watched_entity_change + ) + + @property + def state(self) -> str: + """Return the alert status.""" + if self._firing: + if self._ack: + return STATE_OFF + return STATE_ON + return STATE_IDLE + + async def watched_entity_change(self, event: Event[EventStateChangedData]) -> None: + """Determine if the alert should start or stop.""" + if (to_state := event.data["new_state"]) is None: + return + LOGGER.debug("Watched entity (%s) has changed", event.data["entity_id"]) + if to_state.state == self._alert_state and not self._firing: + await self.begin_alerting() + if to_state.state != self._alert_state and self._firing: + await self.end_alerting() + + async def begin_alerting(self) -> None: + """Begin the alert procedures.""" + LOGGER.debug("Beginning Alert: %s", self._attr_name) + self._ack = False + self._firing = True + self._next_delay = 0 + + if not self._skip_first: + await self._notify() + else: + await self._schedule_notify() + + self.async_write_ha_state() + + async def end_alerting(self) -> None: + """End the alert procedures.""" + LOGGER.debug("Ending Alert: %s", self._attr_name) + if self._cancel is not None: + self._cancel() + self._cancel = None + + self._ack = False + self._firing = False + if self._send_done_message: + await self._notify_done_message() + self.async_write_ha_state() + + async def _schedule_notify(self) -> None: + """Schedule a notification.""" + delay = self._delay[self._next_delay] + next_msg = now() + delay + self._cancel = async_track_point_in_time( + self.hass, + HassJob( + self._notify, name="Schedule notify alert", cancel_on_shutdown=True + ), + next_msg, + ) + self._next_delay = min(self._next_delay + 1, len(self._delay) - 1) + + async def _notify(self, *args: Any) -> None: + """Send the alert notification.""" + if not self._firing: + return + + if not self._ack: + LOGGER.info("Alerting: %s", self._attr_name) + self._send_done_message = True + + if self._message_template is not None: + message = self._message_template.async_render(parse_result=False) + else: + message = self._attr_name + + await self._send_notification_message(message) + await self._schedule_notify() + + async def _notify_done_message(self) -> None: + """Send notification of complete alert.""" + LOGGER.info("Alerting: %s", self._done_message_template) + self._send_done_message = False + + if self._done_message_template is None: + return + + message = self._done_message_template.async_render(parse_result=False) + + await self._send_notification_message(message) + + async def _send_notification_message(self, message: Any) -> None: + if not self._notifiers: + return + + msg_payload = {ATTR_MESSAGE: message} + + if self._title_template is not None: + title = self._title_template.async_render(parse_result=False) + msg_payload[ATTR_TITLE] = title + if self._data: + msg_payload[ATTR_DATA] = self._data + + LOGGER.debug(msg_payload) + + for target in self._notifiers: + try: + await self.hass.services.async_call( + DOMAIN_NOTIFY, target, msg_payload, context=self._context + ) + except ServiceNotFound: + LOGGER.error( + "Failed to call notify.%s, retrying at next notification interval", + target, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Async Unacknowledge alert.""" + LOGGER.debug("Reset Alert: %s", self._attr_name) + self._ack = False + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Async Acknowledge alert.""" + LOGGER.debug("Acknowledged Alert: %s", self._attr_name) + self._ack = True + self.async_write_ha_state() + + async def async_toggle(self, **kwargs: Any) -> None: + """Async toggle alert.""" + if self._ack: + return await self.async_turn_on() + return await self.async_turn_off() diff --git a/homeassistant/components/alert/icons.json b/homeassistant/components/alert/icons.json index 7f5258706d2..5d8613ec592 100644 --- a/homeassistant/components/alert/icons.json +++ b/homeassistant/components/alert/icons.json @@ -1,7 +1,13 @@ { "services": { - "toggle": "mdi:bell-ring", - "turn_off": "mdi:bell-off", - "turn_on": "mdi:bell-alert" + "toggle": { + "service": "mdi:bell-ring" + }, + "turn_off": { + "service": "mdi:bell-off" + }, + "turn_on": { + "service": "mdi:bell-alert" + } } } diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index 03ba353bb5b..c5b4ad15904 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -26,30 +26,24 @@ from homeassistant.components import ( ) from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.components.climate import HVACMode +from homeassistant.components.lock import LockState from homeassistant.const import ( ATTR_CODE_FORMAT, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, STATE_IDLE, - STATE_LOCKED, - STATE_LOCKING, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, - STATE_UNLOCKING, UnitOfLength, UnitOfMass, UnitOfTemperature, @@ -323,6 +317,7 @@ class Alexa(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -409,6 +404,7 @@ class AlexaPowerController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -442,7 +438,7 @@ class AlexaPowerController(AlexaCapability): elif self.entity.domain == remote.DOMAIN: is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN) elif self.entity.domain == vacuum.DOMAIN: - is_on = self.entity.state == vacuum.STATE_CLEANING + is_on = self.entity.state == vacuum.VacuumActivity.CLEANING elif self.entity.domain == timer.DOMAIN: is_on = self.entity.state != STATE_IDLE elif self.entity.domain == water_heater.DOMAIN: @@ -475,6 +471,7 @@ class AlexaLockController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -500,10 +497,10 @@ class AlexaLockController(AlexaCapability): raise UnsupportedProperty(name) # If its unlocking its still locked and not unlocked yet - if self.entity.state in (STATE_UNLOCKING, STATE_LOCKED): + if self.entity.state in (LockState.UNLOCKING, LockState.LOCKED): return "LOCKED" # If its locking its still unlocked and not locked yet - if self.entity.state in (STATE_LOCKING, STATE_UNLOCKED): + if self.entity.state in (LockState.LOCKING, LockState.UNLOCKED): return "UNLOCKED" return "JAMMED" @@ -529,6 +526,7 @@ class AlexaSceneController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -568,6 +566,7 @@ class AlexaBrightnessController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -617,6 +616,7 @@ class AlexaColorController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -675,6 +675,7 @@ class AlexaColorTemperatureController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -721,6 +722,7 @@ class AlexaSpeaker(AlexaCapability): "fr-FR", # Not documented as of 2021-12-04, see PR #60489 "it-IT", "ja-JP", + "nl-NL", } def name(self) -> str: @@ -778,6 +780,7 @@ class AlexaStepSpeaker(AlexaCapability): "es-ES", "fr-FR", # Not documented as of 2021-12-04, see PR #60489 "it-IT", + "nl-NL", } def name(self) -> str: @@ -807,6 +810,7 @@ class AlexaPlaybackController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -822,13 +826,19 @@ class AlexaPlaybackController(AlexaCapability): """ supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) - operations = { - media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next", - media_player.MediaPlayerEntityFeature.PAUSE: "Pause", - media_player.MediaPlayerEntityFeature.PLAY: "Play", - media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous", - media_player.MediaPlayerEntityFeature.STOP: "Stop", - } + operations: dict[ + cover.CoverEntityFeature | media_player.MediaPlayerEntityFeature, str + ] + if self.entity.domain == cover.DOMAIN: + operations = {cover.CoverEntityFeature.STOP: "Stop"} + else: + operations = { + media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next", + media_player.MediaPlayerEntityFeature.PAUSE: "Pause", + media_player.MediaPlayerEntityFeature.PLAY: "Play", + media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous", + media_player.MediaPlayerEntityFeature.STOP: "Stop", + } return [ value @@ -859,6 +869,7 @@ class AlexaInputController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -1104,6 +1115,7 @@ class AlexaThermostatController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -1245,6 +1257,7 @@ class AlexaPowerLevelController(AlexaCapability): "fr-CA", "fr-FR", "it-IT", + "nl-NL", "ja-JP", } @@ -1320,13 +1333,13 @@ class AlexaSecurityPanelController(AlexaCapability): raise UnsupportedProperty(name) arm_state = self.entity.state - if arm_state == STATE_ALARM_ARMED_HOME: + if arm_state == AlarmControlPanelState.ARMED_HOME: return "ARMED_STAY" - if arm_state == STATE_ALARM_ARMED_AWAY: + if arm_state == AlarmControlPanelState.ARMED_AWAY: return "ARMED_AWAY" - if arm_state == STATE_ALARM_ARMED_NIGHT: + if arm_state == AlarmControlPanelState.ARMED_NIGHT: return "ARMED_NIGHT" - if arm_state == STATE_ALARM_ARMED_CUSTOM_BYPASS: + if arm_state == AlarmControlPanelState.ARMED_CUSTOM_BYPASS: return "ARMED_STAY" return "DISARMED" @@ -1723,6 +1736,7 @@ class AlexaRangeController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2066,6 +2080,7 @@ class AlexaToggleController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2212,6 +2227,7 @@ class AlexaPlaybackStateReporter(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2267,6 +2283,7 @@ class AlexaSeekController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2360,6 +2377,7 @@ class AlexaEqualizerController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2470,6 +2488,7 @@ class AlexaCameraStreamController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } diff --git a/homeassistant/components/alexa/const.py b/homeassistant/components/alexa/const.py index 4862e4d8a8c..27e9bbd5b67 100644 --- a/homeassistant/components/alexa/const.py +++ b/homeassistant/components/alexa/const.py @@ -59,6 +59,7 @@ CONF_SUPPORTED_LOCALES = ( "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", ) diff --git a/homeassistant/components/alexa/entities.py b/homeassistant/components/alexa/entities.py index ca7b389a0f1..8c139d66369 100644 --- a/homeassistant/components/alexa/entities.py +++ b/homeassistant/components/alexa/entities.py @@ -559,6 +559,10 @@ class CoverCapabilities(AlexaEntity): ) if supported & cover.CoverEntityFeature.SET_TILT_POSITION: yield AlexaRangeController(self.entity, instance=f"{cover.DOMAIN}.tilt") + if supported & ( + cover.CoverEntityFeature.STOP | cover.CoverEntityFeature.STOP_TILT + ): + yield AlexaPlaybackController(self.entity, instance=f"{cover.DOMAIN}.stop") yield AlexaEndpointHealth(self.hass, self.entity) yield Alexa(self.entity) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 3571f436ff6..04bef105546 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from collections.abc import Callable, Coroutine import logging import math @@ -9,6 +10,7 @@ from typing import Any from homeassistant import core as ha from homeassistant.components import ( + alarm_control_panel, button, camera, climate, @@ -51,7 +53,6 @@ from homeassistant.const import ( SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, - STATE_ALARM_DISARMED, UnitOfTemperature, ) from homeassistant.helpers import network @@ -358,7 +359,7 @@ async def async_api_set_color_temperature( await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_KELVIN: kelvin}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: kelvin}, blocking=False, context=context, ) @@ -375,14 +376,14 @@ async def async_api_decrease_color_temp( ) -> AlexaResponse: """Process a decrease color temperature request.""" entity = directive.entity - current = int(entity.attributes[light.ATTR_COLOR_TEMP]) - max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS]) + current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN]) + min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN]) - value = min(max_mireds, current + 50) + value = max(min_kelvin, current - 500) await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value}, blocking=False, context=context, ) @@ -399,14 +400,14 @@ async def async_api_increase_color_temp( ) -> AlexaResponse: """Process an increase color temperature request.""" entity = directive.entity - current = int(entity.attributes[light.ATTR_COLOR_TEMP]) - min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS]) + current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN]) + max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN]) - value = max(min_mireds, current - 50) + value = min(max_kelvin, current + 500) await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value}, blocking=False, context=context, ) @@ -526,6 +527,7 @@ async def async_api_unlock( "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", }: msg = ( @@ -764,9 +766,25 @@ async def async_api_stop( entity = directive.entity data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id} - await hass.services.async_call( - entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context - ) + if entity.domain == cover.DOMAIN: + supported: int = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) + feature_services: dict[int, str] = { + cover.CoverEntityFeature.STOP.value: cover.SERVICE_STOP_COVER, + cover.CoverEntityFeature.STOP_TILT.value: cover.SERVICE_STOP_COVER_TILT, + } + await asyncio.gather( + *( + hass.services.async_call( + entity.domain, service, data, blocking=False, context=context + ) + for feature, service in feature_services.items() + if feature & supported + ) + ) + else: + await hass.services.async_call( + entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context + ) return directive.response() @@ -1083,7 +1101,13 @@ async def async_api_arm( arm_state = directive.payload["armState"] data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id} - if entity.state != STATE_ALARM_DISARMED: + # Per Alexa Documentation: users are not allowed to switch from armed_away + # directly to another armed state without first disarming the system. + # https://developer.amazon.com/en-US/docs/alexa/device-apis/alexa-securitypanelcontroller.html#arming + if ( + entity.state == alarm_control_panel.AlarmControlPanelState.ARMED_AWAY + and arm_state != "ARMED_AWAY" + ): msg = "You must disarm the system before you can set the requested arm state." raise AlexaSecurityPanelAuthorizationRequired(msg) @@ -1133,7 +1157,7 @@ async def async_api_disarm( # Per Alexa Documentation: If you receive a Disarm directive, and the # system is already disarmed, respond with a success response, # not an error response. - if entity.state == STATE_ALARM_DISARMED: + if entity.state == alarm_control_panel.AlarmControlPanelState.DISARMED: return response payload = directive.payload diff --git a/homeassistant/components/alpha_vantage/manifest.json b/homeassistant/components/alpha_vantage/manifest.json index c94da6bf487..cdfa847d115 100644 --- a/homeassistant/components/alpha_vantage/manifest.json +++ b/homeassistant/components/alpha_vantage/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/alpha_vantage", "iot_class": "cloud_polling", "loggers": ["alpha_vantage"], + "quality_scale": "legacy", "requirements": ["alpha-vantage==2.3.1"] } diff --git a/homeassistant/components/amazon_polly/manifest.json b/homeassistant/components/amazon_polly/manifest.json index b057967d1e2..e7fbf8edc74 100644 --- a/homeassistant/components/amazon_polly/manifest.json +++ b/homeassistant/components/amazon_polly/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/amazon_polly", "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], + "quality_scale": "legacy", "requirements": ["boto3==1.34.131"] } diff --git a/homeassistant/components/amazon_polly/tts.py b/homeassistant/components/amazon_polly/tts.py index 1fc972fa3a1..62852848a9c 100644 --- a/homeassistant/components/amazon_polly/tts.py +++ b/homeassistant/components/amazon_polly/tts.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections import defaultdict import logging from typing import Any, Final @@ -114,6 +115,8 @@ def get_engine( all_voices: dict[str, dict[str, str]] = {} + all_engines: dict[str, set[str]] = defaultdict(set) + all_voices_req = polly_client.describe_voices() for voice in all_voices_req.get("Voices", []): @@ -124,8 +127,12 @@ def get_engine( language_code: str | None = voice.get("LanguageCode") if language_code is not None and language_code not in supported_languages: supported_languages.append(language_code) + for engine in voice.get("SupportedEngines"): + all_engines[engine].add(voice_id) - return AmazonPollyProvider(polly_client, config, supported_languages, all_voices) + return AmazonPollyProvider( + polly_client, config, supported_languages, all_voices, all_engines + ) class AmazonPollyProvider(Provider): @@ -137,13 +144,16 @@ class AmazonPollyProvider(Provider): config: ConfigType, supported_languages: list[str], all_voices: dict[str, dict[str, str]], + all_engines: dict[str, set[str]], ) -> None: """Initialize Amazon Polly provider for TTS.""" self.client = polly_client self.config = config self.supported_langs = supported_languages self.all_voices = all_voices + self.all_engines = all_engines self.default_voice: str = self.config[CONF_VOICE] + self.default_engine: str = self.config[CONF_ENGINE] self.name = "Amazon Polly" @property @@ -159,12 +169,12 @@ class AmazonPollyProvider(Provider): @property def default_options(self) -> dict[str, str]: """Return dict include default options.""" - return {CONF_VOICE: self.default_voice} + return {CONF_VOICE: self.default_voice, CONF_ENGINE: self.default_engine} @property def supported_options(self) -> list[str]: """Return a list of supported options.""" - return [CONF_VOICE] + return [CONF_VOICE, CONF_ENGINE] def get_tts_audio( self, @@ -179,9 +189,14 @@ class AmazonPollyProvider(Provider): _LOGGER.error("%s does not support the %s language", voice_id, language) return None, None + engine = options.get(CONF_ENGINE, self.default_engine) + if voice_id not in self.all_engines[engine]: + _LOGGER.error("%s does not support the %s engine", voice_id, engine) + return None, None + _LOGGER.debug("Requesting TTS file for text: %s", message) resp = self.client.synthesize_speech( - Engine=self.config[CONF_ENGINE], + Engine=engine, OutputFormat=self.config[CONF_OUTPUT_FORMAT], SampleRate=self.config[CONF_SAMPLE_RATE], Text=message, diff --git a/homeassistant/components/amberelectric/__init__.py b/homeassistant/components/amberelectric/__init__.py index 9d9eef49b36..29d8f166f2a 100644 --- a/homeassistant/components/amberelectric/__init__.py +++ b/homeassistant/components/amberelectric/__init__.py @@ -1,33 +1,31 @@ """Support for Amber Electric.""" -from amberelectric import Configuration -from amberelectric.api import amber_api +import amberelectric from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant -from .const import CONF_SITE_ID, DOMAIN, PLATFORMS +from .const import CONF_SITE_ID, PLATFORMS from .coordinator import AmberUpdateCoordinator +type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: """Set up Amber Electric from a config entry.""" - configuration = Configuration(access_token=entry.data[CONF_API_TOKEN]) - api_instance = amber_api.AmberApi.create(configuration) + configuration = amberelectric.Configuration(access_token=entry.data[CONF_API_TOKEN]) + api_client = amberelectric.ApiClient(configuration) + api_instance = amberelectric.AmberApi(api_client) site_id = entry.data[CONF_SITE_ID] coordinator = AmberUpdateCoordinator(hass, api_instance, site_id) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/amberelectric/binary_sensor.py b/homeassistant/components/amberelectric/binary_sensor.py index cd06fb04f39..a9fa00d0129 100644 --- a/homeassistant/components/amberelectric/binary_sensor.py +++ b/homeassistant/components/amberelectric/binary_sensor.py @@ -8,12 +8,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTRIBUTION, DOMAIN +from . import AmberConfigEntry +from .const import ATTRIBUTION from .coordinator import AmberUpdateCoordinator PRICE_SPIKE_ICONS = { @@ -85,11 +85,11 @@ class AmberDemandWindowBinarySensor(AmberPriceGridSensor): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: AmberConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a config entry.""" - coordinator: AmberUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data price_spike_description = BinarySensorEntityDescription( key="price_spike", diff --git a/homeassistant/components/amberelectric/config_flow.py b/homeassistant/components/amberelectric/config_flow.py index a94700c27d1..c25258e2e33 100644 --- a/homeassistant/components/amberelectric/config_flow.py +++ b/homeassistant/components/amberelectric/config_flow.py @@ -3,8 +3,8 @@ from __future__ import annotations import amberelectric -from amberelectric.api import amber_api -from amberelectric.model.site import Site, SiteStatus +from amberelectric.models.site import Site +from amberelectric.models.site_status import SiteStatus import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -23,11 +23,15 @@ API_URL = "https://app.amber.com.au/developers" def generate_site_selector_name(site: Site) -> str: """Generate the name to show in the site drop down in the configuration flow.""" + # For some reason the generated API key returns this as any, not a string. Thanks pydantic + nmi = str(site.nmi) if site.status == SiteStatus.CLOSED: - return site.nmi + " (Closed: " + site.closed_on.isoformat() + ")" # type: ignore[no-any-return] + if site.closed_on is None: + return f"{nmi} (Closed)" + return f"{nmi} (Closed: {site.closed_on.isoformat()})" if site.status == SiteStatus.PENDING: - return site.nmi + " (Pending)" # type: ignore[no-any-return] - return site.nmi # type: ignore[no-any-return] + return f"{nmi} (Pending)" + return nmi def filter_sites(sites: list[Site]) -> list[Site]: @@ -35,7 +39,7 @@ def filter_sites(sites: list[Site]) -> list[Site]: filtered: list[Site] = [] filtered_nmi: set[str] = set() - for site in sorted(sites, key=lambda site: site.status.value): + for site in sorted(sites, key=lambda site: site.status): if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi: filtered.append(site) filtered_nmi.add(site.nmi) @@ -56,7 +60,8 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN): def _fetch_sites(self, token: str) -> list[Site] | None: configuration = amberelectric.Configuration(access_token=token) - api: amber_api.AmberApi = amber_api.AmberApi.create(configuration) + api_client = amberelectric.ApiClient(configuration) + api = amberelectric.AmberApi(api_client) try: sites: list[Site] = filter_sites(api.get_sites()) diff --git a/homeassistant/components/amberelectric/coordinator.py b/homeassistant/components/amberelectric/coordinator.py index a95aa3fa529..57028e07d21 100644 --- a/homeassistant/components/amberelectric/coordinator.py +++ b/homeassistant/components/amberelectric/coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from amberelectric import ApiException -from amberelectric.api import amber_api -from amberelectric.model.actual_interval import ActualInterval -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.forecast_interval import ForecastInterval -from amberelectric.model.interval import Descriptor +import amberelectric +from amberelectric.models.actual_interval import ActualInterval +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.forecast_interval import ForecastInterval +from amberelectric.models.price_descriptor import PriceDescriptor +from amberelectric.rest import ApiException from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -31,22 +31,22 @@ def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) - def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is on the general channel.""" - return interval.channel_type == ChannelType.GENERAL # type: ignore[no-any-return] + return interval.channel_type == ChannelType.GENERAL def is_controlled_load( interval: ActualInterval | CurrentInterval | ForecastInterval, ) -> bool: """Return true if the supplied interval is on the controlled load channel.""" - return interval.channel_type == ChannelType.CONTROLLED_LOAD # type: ignore[no-any-return] + return interval.channel_type == ChannelType.CONTROLLEDLOAD def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is on the feed in channel.""" - return interval.channel_type == ChannelType.FEED_IN # type: ignore[no-any-return] + return interval.channel_type == ChannelType.FEEDIN -def normalize_descriptor(descriptor: Descriptor) -> str | None: +def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None: """Return the snake case versions of descriptor names. Returns None if the name is not recognized.""" if descriptor is None: return None @@ -71,7 +71,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator): """AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read.""" def __init__( - self, hass: HomeAssistant, api: amber_api.AmberApi, site_id: str + self, hass: HomeAssistant, api: amberelectric.AmberApi, site_id: str ) -> None: """Initialise the data service.""" super().__init__( @@ -93,12 +93,13 @@ class AmberUpdateCoordinator(DataUpdateCoordinator): "grid": {}, } try: - data = self._api.get_current_price(self.site_id, next=48) + data = self._api.get_current_prices(self.site_id, next=48) + intervals = [interval.actual_instance for interval in data] except ApiException as api_exception: raise UpdateFailed("Missing price data, skipping update") from api_exception - current = [interval for interval in data if is_current(interval)] - forecasts = [interval for interval in data if is_forecast(interval)] + current = [interval for interval in intervals if is_current(interval)] + forecasts = [interval for interval in intervals if is_forecast(interval)] general = [interval for interval in current if is_general(interval)] if len(general) == 0: @@ -137,7 +138,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator): interval for interval in forecasts if is_feed_in(interval) ] - LOGGER.debug("Fetched new Amber data: %s", data) + LOGGER.debug("Fetched new Amber data: %s", intervals) return result async def _async_update_data(self) -> dict[str, Any]: diff --git a/homeassistant/components/amberelectric/manifest.json b/homeassistant/components/amberelectric/manifest.json index 51be42cfa68..401eb1629a1 100644 --- a/homeassistant/components/amberelectric/manifest.json +++ b/homeassistant/components/amberelectric/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/amberelectric", "iot_class": "cloud_polling", "loggers": ["amberelectric"], - "requirements": ["amberelectric==1.1.1"] + "requirements": ["amberelectric==2.0.12"] } diff --git a/homeassistant/components/amberelectric/sensor.py b/homeassistant/components/amberelectric/sensor.py index aafdd730a0c..cdf40e5804d 100644 --- a/homeassistant/components/amberelectric/sensor.py +++ b/homeassistant/components/amberelectric/sensor.py @@ -8,22 +8,22 @@ from __future__ import annotations from typing import Any -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.forecast_interval import ForecastInterval +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.forecast_interval import ForecastInterval from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CURRENCY_DOLLAR, PERCENTAGE, UnitOfEnergy from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTRIBUTION, DOMAIN +from . import AmberConfigEntry +from .const import ATTRIBUTION from .coordinator import AmberUpdateCoordinator, normalize_descriptor UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}" @@ -52,7 +52,7 @@ class AmberSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity): self, coordinator: AmberUpdateCoordinator, description: SensorEntityDescription, - channel_type: ChannelType, + channel_type: str, ) -> None: """Initialize the Sensor.""" super().__init__(coordinator) @@ -73,7 +73,7 @@ class AmberPriceSensor(AmberSensor): """Return the current price in $/kWh.""" interval = self.coordinator.data[self.entity_description.key][self.channel_type] - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: return format_cents_to_dollars(interval.per_kwh) * -1 return format_cents_to_dollars(interval.per_kwh) @@ -87,9 +87,9 @@ class AmberPriceSensor(AmberSensor): return data data["duration"] = interval.duration - data["date"] = interval.date.isoformat() + data["date"] = interval.var_date.isoformat() data["per_kwh"] = format_cents_to_dollars(interval.per_kwh) - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: data["per_kwh"] = data["per_kwh"] * -1 data["nem_date"] = interval.nem_time.isoformat() data["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh) @@ -120,7 +120,7 @@ class AmberForecastSensor(AmberSensor): return None interval = intervals[0] - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: return format_cents_to_dollars(interval.per_kwh) * -1 return format_cents_to_dollars(interval.per_kwh) @@ -142,10 +142,10 @@ class AmberForecastSensor(AmberSensor): for interval in intervals: datum = {} datum["duration"] = interval.duration - datum["date"] = interval.date.isoformat() + datum["date"] = interval.var_date.isoformat() datum["nem_date"] = interval.nem_time.isoformat() datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh) - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: datum["per_kwh"] = datum["per_kwh"] * -1 datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh) datum["start_time"] = interval.start_time.isoformat() @@ -196,11 +196,11 @@ class AmberGridSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: AmberConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a config entry.""" - coordinator: AmberUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data current: dict[str, CurrentInterval] = coordinator.data["current"] forecasts: dict[str, list[ForecastInterval]] = coordinator.data["forecasts"] diff --git a/homeassistant/components/amberelectric/strings.json b/homeassistant/components/amberelectric/strings.json index ccdc2374142..684a5a2a0cc 100644 --- a/homeassistant/components/amberelectric/strings.json +++ b/homeassistant/components/amberelectric/strings.json @@ -10,7 +10,7 @@ }, "site": { "data": { - "site_nmi": "Site NMI", + "site_id": "Site NMI", "site_name": "Site Name" }, "description": "Select the NMI of the site you would like to add" diff --git a/homeassistant/components/ambient_network/__init__.py b/homeassistant/components/ambient_network/__init__.py index b286fb7fbc9..e9443a676b5 100644 --- a/homeassistant/components/ambient_network/__init__.py +++ b/homeassistant/components/ambient_network/__init__.py @@ -8,28 +8,30 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN from .coordinator import AmbientNetworkDataUpdateCoordinator +type AmbientNetworkConfigEntry = ConfigEntry[AmbientNetworkDataUpdateCoordinator] + PLATFORMS: list[Platform] = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: AmbientNetworkConfigEntry +) -> bool: """Set up the Ambient Weather Network from a config entry.""" api = OpenAPI() coordinator = AmbientNetworkDataUpdateCoordinator(hass, api) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: AmbientNetworkConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ambient_network/manifest.json b/homeassistant/components/ambient_network/manifest.json index 553adb240b0..4800ffcb29d 100644 --- a/homeassistant/components/ambient_network/manifest.json +++ b/homeassistant/components/ambient_network/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aioambient"], - "requirements": ["aioambient==2024.01.0"] + "requirements": ["aioambient==2024.08.0"] } diff --git a/homeassistant/components/ambient_network/sensor.py b/homeassistant/components/ambient_network/sensor.py index 132fc7dbd0d..336745f88ff 100644 --- a/homeassistant/components/ambient_network/sensor.py +++ b/homeassistant/components/ambient_network/sensor.py @@ -10,7 +10,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, @@ -29,7 +28,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from .const import DOMAIN +from . import AmbientNetworkConfigEntry from .coordinator import AmbientNetworkDataUpdateCoordinator from .entity import AmbientNetworkEntity @@ -271,12 +270,12 @@ SENSOR_DESCRIPTIONS = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: AmbientNetworkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Ambient Network sensor entities.""" - coordinator: AmbientNetworkDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data if coordinator.config_entry is not None: async_add_entities( AmbientNetworkSensor( diff --git a/homeassistant/components/ambient_station/manifest.json b/homeassistant/components/ambient_station/manifest.json index 046ab9f73e9..a14de5f37c5 100644 --- a/homeassistant/components/ambient_station/manifest.json +++ b/homeassistant/components/ambient_station/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["aioambient"], - "requirements": ["aioambient==2024.01.0"] + "requirements": ["aioambient==2024.08.0"] } diff --git a/homeassistant/components/amcrest/icons.json b/homeassistant/components/amcrest/icons.json index efba49d6b56..e284bc15259 100644 --- a/homeassistant/components/amcrest/icons.json +++ b/homeassistant/components/amcrest/icons.json @@ -1,15 +1,37 @@ { "services": { - "enable_recording": "mdi:record-rec", - "disable_recording": "mdi:stop", - "enable_audio": "mdi:volume-high", - "disable_audio": "mdi:volume-off", - "enable_motion_recording": "mdi:motion-sensor", - "disable_motion_recording": "mdi:motion-sensor-off", - "goto_preset": "mdi:pan", - "set_color_bw": "mdi:palette", - "start_tour": "mdi:panorama", - "stop_tour": "mdi:panorama-outline", - "ptz_control": "mdi:pan" + "enable_recording": { + "service": "mdi:record-rec" + }, + "disable_recording": { + "service": "mdi:stop" + }, + "enable_audio": { + "service": "mdi:volume-high" + }, + "disable_audio": { + "service": "mdi:volume-off" + }, + "enable_motion_recording": { + "service": "mdi:motion-sensor" + }, + "disable_motion_recording": { + "service": "mdi:motion-sensor-off" + }, + "goto_preset": { + "service": "mdi:pan" + }, + "set_color_bw": { + "service": "mdi:palette" + }, + "start_tour": { + "service": "mdi:panorama" + }, + "stop_tour": { + "service": "mdi:panorama-outline" + }, + "ptz_control": { + "service": "mdi:pan" + } } } diff --git a/homeassistant/components/amcrest/manifest.json b/homeassistant/components/amcrest/manifest.json index 8b8d87092c4..7d8f8f9e6c8 100644 --- a/homeassistant/components/amcrest/manifest.json +++ b/homeassistant/components/amcrest/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/amcrest", "iot_class": "local_polling", "loggers": ["amcrest"], + "quality_scale": "legacy", "requirements": ["amcrest==1.9.8"] } diff --git a/homeassistant/components/ampio/manifest.json b/homeassistant/components/ampio/manifest.json index bc9c09d817a..17fc3eb3d96 100644 --- a/homeassistant/components/ampio/manifest.json +++ b/homeassistant/components/ampio/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ampio", "iot_class": "cloud_polling", "loggers": ["asmog"], + "quality_scale": "legacy", "requirements": ["asmog==0.0.6"] } diff --git a/homeassistant/components/analytics/__init__.py b/homeassistant/components/analytics/__init__.py index a49fe15b41f..9bcddcb868f 100644 --- a/homeassistant/components/analytics/__init__.py +++ b/homeassistant/components/analytics/__init__.py @@ -10,12 +10,15 @@ from homeassistant.core import Event, HassJob, HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_call_later, async_track_time_interval from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .analytics import Analytics from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN) + async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool: """Set up the analytics integration.""" @@ -52,7 +55,7 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool: websocket_api.async_register_command(hass, websocket_analytics) websocket_api.async_register_command(hass, websocket_analytics_preferences) - hass.data[DOMAIN] = analytics + hass.data[DATA_COMPONENT] = analytics return True @@ -65,7 +68,7 @@ def websocket_analytics( msg: dict[str, Any], ) -> None: """Return analytics preferences.""" - analytics: Analytics = hass.data[DOMAIN] + analytics = hass.data[DATA_COMPONENT] connection.send_result( msg["id"], {ATTR_PREFERENCES: analytics.preferences, ATTR_ONBOARDED: analytics.onboarded}, @@ -87,7 +90,7 @@ async def websocket_analytics_preferences( ) -> None: """Update analytics preferences.""" preferences = msg[ATTR_PREFERENCES] - analytics: Analytics = hass.data[DOMAIN] + analytics = hass.data[DATA_COMPONENT] await analytics.save_preferences(preferences) await analytics.send_analytics() diff --git a/homeassistant/components/analytics/analytics.py b/homeassistant/components/analytics/analytics.py index 01c8bf22787..b63475c80a4 100644 --- a/homeassistant/components/analytics/analytics.py +++ b/homeassistant/components/analytics/analytics.py @@ -29,6 +29,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.entity_registry as er +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.storage import Store from homeassistant.helpers.system_info import async_get_system_info from homeassistant.loader import ( @@ -136,7 +137,7 @@ class Analytics: @property def supervisor(self) -> bool: """Return bool if a supervisor is present.""" - return hassio.is_hassio(self.hass) + return is_hassio(self.hass) async def load(self) -> None: """Load preferences.""" @@ -261,18 +262,19 @@ class Analytics: integrations.append(integration.domain) if supervisor_info is not None: + supervisor_client = hassio.get_supervisor_client(hass) installed_addons = await asyncio.gather( *( - hassio.async_get_addon_info(hass, addon[ATTR_SLUG]) + supervisor_client.addons.addon_info(addon[ATTR_SLUG]) for addon in supervisor_info[ATTR_ADDONS] ) ) addons.extend( { - ATTR_SLUG: addon[ATTR_SLUG], - ATTR_PROTECTED: addon[ATTR_PROTECTED], - ATTR_VERSION: addon[ATTR_VERSION], - ATTR_AUTO_UPDATE: addon[ATTR_AUTO_UPDATE], + ATTR_SLUG: addon.slug, + ATTR_PROTECTED: addon.protected, + ATTR_VERSION: addon.version, + ATTR_AUTO_UPDATE: addon.auto_update, } for addon in installed_addons ) diff --git a/homeassistant/components/analytics/manifest.json b/homeassistant/components/analytics/manifest.json index 955c4a813f4..5142a86ad97 100644 --- a/homeassistant/components/analytics/manifest.json +++ b/homeassistant/components/analytics/manifest.json @@ -1,7 +1,7 @@ { "domain": "analytics", "name": "Analytics", - "after_dependencies": ["energy", "recorder"], + "after_dependencies": ["energy", "hassio", "recorder"], "codeowners": ["@home-assistant/core", "@ludeeus"], "dependencies": ["api", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/analytics", diff --git a/homeassistant/components/analytics_insights/config_flow.py b/homeassistant/components/analytics_insights/config_flow.py index 909290b1035..c36755f5403 100644 --- a/homeassistant/components/analytics_insights/config_flow.py +++ b/homeassistant/components/analytics_insights/config_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -27,6 +26,7 @@ from homeassistant.helpers.selector import ( ) from .const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -45,9 +45,11 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> HomeassistantAnalyticsOptionsFlowHandler: """Get the options flow for this handler.""" - return HomeassistantAnalyticsOptionsFlowHandler(config_entry) + return HomeassistantAnalyticsOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -55,8 +57,12 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: - if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get( - CONF_TRACKED_CUSTOM_INTEGRATIONS + if all( + [ + not user_input.get(CONF_TRACKED_ADDONS), + not user_input.get(CONF_TRACKED_INTEGRATIONS), + not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS), + ] ): errors["base"] = "no_integrations_selected" else: @@ -64,6 +70,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): title="Home Assistant Analytics Insights", data={}, options={ + CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []), CONF_TRACKED_INTEGRATIONS: user_input.get( CONF_TRACKED_INTEGRATIONS, [] ), @@ -77,6 +84,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): session=async_get_clientsession(self.hass) ) try: + addons = await client.get_addons() integrations = await client.get_integrations() custom_integrations = await client.get_custom_integrations() except HomeassistantAnalyticsConnectionError: @@ -99,6 +107,13 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, data_schema=vol.Schema( { + vol.Optional(CONF_TRACKED_ADDONS): SelectSelector( + SelectSelectorConfig( + options=list(addons), + multiple=True, + sort=True, + ) + ), vol.Optional(CONF_TRACKED_INTEGRATIONS): SelectSelector( SelectSelectorConfig( options=options, @@ -118,7 +133,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): ) -class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): +class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow): """Handle Homeassistant Analytics options.""" async def async_step_init( @@ -127,14 +142,19 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): """Manage the options.""" errors: dict[str, str] = {} if user_input is not None: - if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get( - CONF_TRACKED_CUSTOM_INTEGRATIONS + if all( + [ + not user_input.get(CONF_TRACKED_ADDONS), + not user_input.get(CONF_TRACKED_INTEGRATIONS), + not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS), + ] ): errors["base"] = "no_integrations_selected" else: return self.async_create_entry( title="", data={ + CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []), CONF_TRACKED_INTEGRATIONS: user_input.get( CONF_TRACKED_INTEGRATIONS, [] ), @@ -148,6 +168,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): session=async_get_clientsession(self.hass) ) try: + addons = await client.get_addons() integrations = await client.get_integrations() custom_integrations = await client.get_custom_integrations() except HomeassistantAnalyticsConnectionError: @@ -168,6 +189,13 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): data_schema=self.add_suggested_values_to_schema( vol.Schema( { + vol.Optional(CONF_TRACKED_ADDONS): SelectSelector( + SelectSelectorConfig( + options=list(addons), + multiple=True, + sort=True, + ) + ), vol.Optional(CONF_TRACKED_INTEGRATIONS): SelectSelector( SelectSelectorConfig( options=options, @@ -184,6 +212,6 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): ), }, ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/analytics_insights/const.py b/homeassistant/components/analytics_insights/const.py index 56ea3f59794..1a01755f9ed 100644 --- a/homeassistant/components/analytics_insights/const.py +++ b/homeassistant/components/analytics_insights/const.py @@ -4,6 +4,7 @@ import logging DOMAIN = "analytics_insights" +CONF_TRACKED_ADDONS = "tracked_addons" CONF_TRACKED_INTEGRATIONS = "tracked_integrations" CONF_TRACKED_CUSTOM_INTEGRATIONS = "tracked_custom_integrations" diff --git a/homeassistant/components/analytics_insights/coordinator.py b/homeassistant/components/analytics_insights/coordinator.py index 2f863bf7771..701f1a8dbd4 100644 --- a/homeassistant/components/analytics_insights/coordinator.py +++ b/homeassistant/components/analytics_insights/coordinator.py @@ -12,11 +12,13 @@ from python_homeassistant_analytics import ( HomeassistantAnalyticsConnectionError, HomeassistantAnalyticsNotModifiedError, ) +from python_homeassistant_analytics.models import Addon from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -31,6 +33,9 @@ if TYPE_CHECKING: class AnalyticsData: """Analytics data class.""" + active_installations: int + reports_integrations: int + addons: dict[str, int] core_integrations: dict[str, int] custom_integrations: dict[str, int] @@ -51,6 +56,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic update_interval=timedelta(hours=12), ) self._client = client + self._tracked_addons = self.config_entry.options.get(CONF_TRACKED_ADDONS, []) self._tracked_integrations = self.config_entry.options[ CONF_TRACKED_INTEGRATIONS ] @@ -60,6 +66,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic async def _async_update_data(self) -> AnalyticsData: try: + addons_data = await self._client.get_addons() data = await self._client.get_current_analytics() custom_data = await self._client.get_custom_integrations() except HomeassistantAnalyticsConnectionError as err: @@ -68,6 +75,9 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic ) from err except HomeassistantAnalyticsNotModifiedError: return self.data + addons = { + addon: get_addon_value(addons_data, addon) for addon in self._tracked_addons + } core_integrations = { integration: data.integrations.get(integration, 0) for integration in self._tracked_integrations @@ -76,7 +86,20 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic integration: get_custom_integration_value(custom_data, integration) for integration in self._tracked_custom_integrations } - return AnalyticsData(core_integrations, custom_integrations) + return AnalyticsData( + data.active_installations, + data.reports_integrations, + addons, + core_integrations, + custom_integrations, + ) + + +def get_addon_value(data: dict[str, Addon], name_slug: str) -> int: + """Get addon value.""" + if name_slug in data: + return data[name_slug].total + return 0 def get_custom_integration_value( diff --git a/homeassistant/components/analytics_insights/icons.json b/homeassistant/components/analytics_insights/icons.json index 705578dbc6b..8c52e5e944f 100644 --- a/homeassistant/components/analytics_insights/icons.json +++ b/homeassistant/components/analytics_insights/icons.json @@ -6,6 +6,12 @@ }, "custom_integrations": { "default": "mdi:puzzle-edit" + }, + "total_active_installations": { + "default": "mdi:puzzle" + }, + "total_reports_integrations": { + "default": "mdi:puzzle" } } } diff --git a/homeassistant/components/analytics_insights/manifest.json b/homeassistant/components/analytics_insights/manifest.json index 3c484d36df7..841cf1caf42 100644 --- a/homeassistant/components/analytics_insights/manifest.json +++ b/homeassistant/components/analytics_insights/manifest.json @@ -7,6 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["python_homeassistant_analytics"], - "requirements": ["python-homeassistant-analytics==0.7.0"], + "requirements": ["python-homeassistant-analytics==0.8.0"], "single_config_entry": true } diff --git a/homeassistant/components/analytics_insights/sensor.py b/homeassistant/components/analytics_insights/sensor.py index f7a77743b94..324ca6991d2 100644 --- a/homeassistant/components/analytics_insights/sensor.py +++ b/homeassistant/components/analytics_insights/sensor.py @@ -29,6 +29,20 @@ class AnalyticsSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[AnalyticsData], StateType] +def get_addon_entity_description( + name_slug: str, +) -> AnalyticsSensorEntityDescription: + """Get addon entity description.""" + return AnalyticsSensorEntityDescription( + key=f"addon_{name_slug}_active_installations", + translation_key="addons", + name=name_slug, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="active installations", + value_fn=lambda data: data.addons.get(name_slug), + ) + + def get_core_integration_entity_description( domain: str, name: str ) -> AnalyticsSensorEntityDescription: @@ -57,6 +71,26 @@ def get_custom_integration_entity_description( ) +GENERAL_SENSORS = [ + AnalyticsSensorEntityDescription( + key="total_active_installations", + translation_key="total_active_installations", + entity_registry_enabled_default=False, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="active installations", + value_fn=lambda data: data.active_installations, + ), + AnalyticsSensorEntityDescription( + key="total_reports_integrations", + translation_key="total_reports_integrations", + entity_registry_enabled_default=False, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="active installations", + value_fn=lambda data: data.reports_integrations, + ), +] + + async def async_setup_entry( hass: HomeAssistant, entry: AnalyticsInsightsConfigEntry, @@ -69,6 +103,13 @@ async def async_setup_entry( analytics_data.coordinator ) entities: list[HomeassistantAnalyticsSensor] = [] + entities.extend( + HomeassistantAnalyticsSensor( + coordinator, + get_addon_entity_description(addon_name_slug), + ) + for addon_name_slug in coordinator.data.addons + ) entities.extend( HomeassistantAnalyticsSensor( coordinator, @@ -85,6 +126,12 @@ async def async_setup_entry( ) for integration_domain in coordinator.data.custom_integrations ) + + entities.extend( + HomeassistantAnalyticsSensor(coordinator, entity_description) + for entity_description in GENERAL_SENSORS + ) + async_add_entities(entities) diff --git a/homeassistant/components/analytics_insights/strings.json b/homeassistant/components/analytics_insights/strings.json index 3b770f189a4..10d3c19a2f6 100644 --- a/homeassistant/components/analytics_insights/strings.json +++ b/homeassistant/components/analytics_insights/strings.json @@ -3,10 +3,12 @@ "step": { "user": { "data": { + "tracked_addons": "Addons", "tracked_integrations": "Integrations", "tracked_custom_integrations": "Custom integrations" }, "data_description": { + "tracked_addons": "Select the addons you want to track", "tracked_integrations": "Select the integrations you want to track", "tracked_custom_integrations": "Select the custom integrations you want to track" } @@ -17,17 +19,19 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { - "no_integration_selected": "You must select at least one integration to track" + "no_integrations_selected": "You must select at least one integration to track" } }, "options": { "step": { "init": { "data": { + "tracked_addons": "[%key:component::analytics_insights::config::step::user::data::tracked_addons%]", "tracked_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_integrations%]", "tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_custom_integrations%]" }, "data_description": { + "tracked_addons": "[%key:component::analytics_insights::config::step::user::data_description::tracked_addons%]", "tracked_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_integrations%]", "tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_custom_integrations%]" } @@ -37,13 +41,19 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "error": { - "no_integration_selected": "[%key:component::analytics_insights::config::error::no_integration_selected%]" + "no_integrations_selected": "[%key:component::analytics_insights::config::error::no_integrations_selected%]" } }, "entity": { "sensor": { "custom_integrations": { "name": "{custom_integration_domain} (custom)" + }, + "total_active_installations": { + "name": "Total active installations" + }, + "total_reports_integrations": { + "name": "Total reported integrations" } } } diff --git a/homeassistant/components/android_ip_webcam/__init__.py b/homeassistant/components/android_ip_webcam/__init__.py index 3772fe4642b..92bb0add445 100644 --- a/homeassistant/components/android_ip_webcam/__init__.py +++ b/homeassistant/components/android_ip_webcam/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from pydroid_ipcam import PyDroidIPCam -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -15,8 +14,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN -from .coordinator import AndroidIPCamDataUpdateCoordinator +from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, @@ -26,7 +24,9 @@ PLATFORMS: list[Platform] = [ ] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: AndroidIPCamConfigEntry +) -> bool: """Set up Android IP Webcam from a config entry.""" websession = async_get_clientsession(hass) cam = PyDroidIPCam( @@ -40,16 +40,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = AndroidIPCamDataUpdateCoordinator(hass, entry, cam) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: AndroidIPCamConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/android_ip_webcam/binary_sensor.py b/homeassistant/components/android_ip_webcam/binary_sensor.py index 3ec03a59342..1846889bfda 100644 --- a/homeassistant/components/android_ip_webcam/binary_sensor.py +++ b/homeassistant/components/android_ip_webcam/binary_sensor.py @@ -7,12 +7,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, MOTION_ACTIVE -from .coordinator import AndroidIPCamDataUpdateCoordinator +from .const import MOTION_ACTIVE +from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator from .entity import AndroidIPCamBaseEntity BINARY_SENSOR_DESCRIPTION = BinarySensorEntityDescription( @@ -24,16 +23,12 @@ BINARY_SENSOR_DESCRIPTION = BinarySensorEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AndroidIPCamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the IP Webcam sensors from config entry.""" - coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] - - async_add_entities([IPWebcamBinarySensor(coordinator)]) + async_add_entities([IPWebcamBinarySensor(config_entry.runtime_data)]) class IPWebcamBinarySensor(AndroidIPCamBaseEntity, BinarySensorEntity): diff --git a/homeassistant/components/android_ip_webcam/camera.py b/homeassistant/components/android_ip_webcam/camera.py index 2149e40b6e1..95d4fb9f67a 100644 --- a/homeassistant/components/android_ip_webcam/camera.py +++ b/homeassistant/components/android_ip_webcam/camera.py @@ -3,7 +3,6 @@ from __future__ import annotations from homeassistant.components.mjpeg import MjpegCamera, filter_urllib3_logging -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -15,21 +14,17 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import AndroidIPCamDataUpdateCoordinator +from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AndroidIPCamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the IP Webcam camera from config entry.""" filter_urllib3_logging() - coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] - - async_add_entities([IPWebcamCamera(coordinator)]) + async_add_entities([IPWebcamCamera(config_entry.runtime_data)]) class IPWebcamCamera(MjpegCamera): diff --git a/homeassistant/components/android_ip_webcam/coordinator.py b/homeassistant/components/android_ip_webcam/coordinator.py index 1647b6890c1..fd6e1fcc4b9 100644 --- a/homeassistant/components/android_ip_webcam/coordinator.py +++ b/homeassistant/components/android_ip_webcam/coordinator.py @@ -15,19 +15,22 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +type AndroidIPCamConfigEntry = ConfigEntry[AndroidIPCamDataUpdateCoordinator] + class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]): """Coordinator class for the Android IP Webcam.""" + config_entry: AndroidIPCamConfigEntry + def __init__( self, hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AndroidIPCamConfigEntry, cam: PyDroidIPCam, ) -> None: """Initialize the Android IP Webcam.""" self.hass = hass - self.config_entry: ConfigEntry = config_entry self.cam = cam super().__init__( self.hass, diff --git a/homeassistant/components/android_ip_webcam/sensor.py b/homeassistant/components/android_ip_webcam/sensor.py index 7ccb0661a6c..9b2454d6c09 100644 --- a/homeassistant/components/android_ip_webcam/sensor.py +++ b/homeassistant/components/android_ip_webcam/sensor.py @@ -13,14 +13,12 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN -from .coordinator import AndroidIPCamDataUpdateCoordinator +from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator from .entity import AndroidIPCamBaseEntity @@ -120,19 +118,21 @@ SENSOR_TYPES: tuple[AndroidIPWebcamSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AndroidIPCamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the IP Webcam sensors from config entry.""" - coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data sensor_types = [ sensor for sensor in SENSOR_TYPES if sensor.key - in [*coordinator.cam.enabled_sensors, "audio_connections", "video_connections"] + in [ + *coordinator.cam.enabled_sensors, + "audio_connections", + "video_connections", + ] ] async_add_entities( IPWebcamSensor(coordinator, description) for description in sensor_types diff --git a/homeassistant/components/android_ip_webcam/switch.py b/homeassistant/components/android_ip_webcam/switch.py index 038c3330d82..f813415df0b 100644 --- a/homeassistant/components/android_ip_webcam/switch.py +++ b/homeassistant/components/android_ip_webcam/switch.py @@ -9,13 +9,11 @@ from typing import Any from pydroid_ipcam import PyDroidIPCam from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import AndroidIPCamDataUpdateCoordinator +from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator from .entity import AndroidIPCamBaseEntity @@ -113,14 +111,12 @@ SWITCH_TYPES: tuple[AndroidIPWebcamSwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AndroidIPCamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the IP Webcam switches from config entry.""" - coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data switch_types = [ switch for switch in SWITCH_TYPES diff --git a/homeassistant/components/androidtv/__init__.py b/homeassistant/components/androidtv/__init__.py index 34b324db169..4ffa0e24777 100644 --- a/homeassistant/components/androidtv/__init__.py +++ b/homeassistant/components/androidtv/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Mapping from dataclasses import dataclass +import logging import os from typing import Any @@ -40,6 +41,7 @@ from .const import ( CONF_ADB_SERVER_IP, CONF_ADB_SERVER_PORT, CONF_ADBKEY, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, DEFAULT_ADB_SERVER_PORT, DEVICE_ANDROIDTV, @@ -66,6 +68,8 @@ RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES] _INVALID_MACS = {"ff:ff:ff:ff:ff:ff"} +_LOGGER = logging.getLogger(__name__) + @dataclass class AndroidTVRuntimeData: @@ -106,7 +110,7 @@ def _setup_androidtv( adb_log = f"using Python ADB implementation with adbkey='{adbkey}'" else: - # Use "pure-python-adb" (communicate with ADB server) + # Communicate via ADB server signer = None adb_log = ( "using ADB server at" @@ -131,15 +135,16 @@ async def async_connect_androidtv( ) aftv = await async_androidtv_setup( - config[CONF_HOST], - config[CONF_PORT], - adbkey, - config.get(CONF_ADB_SERVER_IP), - config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), - state_detection_rules, - config[CONF_DEVICE_CLASS], - timeout, - signer, + host=config[CONF_HOST], + port=config[CONF_PORT], + adbkey=adbkey, + adb_server_ip=config.get(CONF_ADB_SERVER_IP), + adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), + state_detection_rules=state_detection_rules, + device_class=config[CONF_DEVICE_CLASS], + auth_timeout_s=timeout, + signer=signer, + log_errors=False, ) if not aftv.available: @@ -157,6 +162,32 @@ async def async_connect_androidtv( return aftv, None +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", entry.version, entry.minor_version + ) + + if entry.version == 1: + new_options = {**entry.options} + + # Migrate MinorVersion 1 -> MinorVersion 2: New option + if entry.minor_version < 2: + new_options = {**new_options, CONF_SCREENCAP_INTERVAL: 0} + + hass.config_entries.async_update_entry( + entry, options=new_options, minor_version=2, version=1 + ) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + entry.version, + entry.minor_version, + ) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool: """Set up Android Debug Bridge platform.""" diff --git a/homeassistant/components/androidtv/config_flow.py b/homeassistant/components/androidtv/config_flow.py index 1ed4b0f6782..afaba5175da 100644 --- a/homeassistant/components/androidtv/config_flow.py +++ b/homeassistant/components/androidtv/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_PORT from homeassistant.core import callback @@ -34,7 +34,7 @@ from .const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -43,7 +43,7 @@ from .const import ( DEFAULT_EXCLUDE_UNNAMED_APPS, DEFAULT_GET_SOURCES, DEFAULT_PORT, - DEFAULT_SCREENCAP, + DEFAULT_SCREENCAP_INTERVAL, DEVICE_CLASSES, DOMAIN, PROP_ETHMAC, @@ -76,6 +76,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + MINOR_VERSION = 2 @callback def _show_setup_form( @@ -131,7 +132,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN): return RESULT_CONN_ERROR, None dev_prop = aftv.device_properties - _LOGGER.info( + _LOGGER.debug( "Android device at %s: %s = %r, %s = %r", user_input[CONF_HOST], PROP_ETHMAC, @@ -185,16 +186,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN): return OptionsFlowHandler(config_entry) -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle an option flow for Android Debug Bridge.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - - self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {}) - self._state_det_rules: dict[str, Any] = self.options.setdefault( - CONF_STATE_DETECTION_RULES, {} + self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {})) + self._state_det_rules: dict[str, Any] = dict( + config_entry.options.get(CONF_STATE_DETECTION_RULES, {}) ) self._conf_app_id: str | None = None self._conf_rule_id: str | None = None @@ -236,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): SelectOptionDict(value=k, label=v) for k, v in apps_list.items() ] rules = [RULES_NEW_ID, *self._state_det_rules] - options = self.options + options = self.config_entry.options data_schema = vol.Schema( { @@ -253,10 +252,12 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS ), ): bool, - vol.Optional( - CONF_SCREENCAP, - default=options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP), - ): bool, + vol.Required( + CONF_SCREENCAP_INTERVAL, + default=options.get( + CONF_SCREENCAP_INTERVAL, DEFAULT_SCREENCAP_INTERVAL + ), + ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=15)), vol.Optional( CONF_TURN_OFF_COMMAND, description={ diff --git a/homeassistant/components/androidtv/const.py b/homeassistant/components/androidtv/const.py index ee279c0fb3a..0d9bdc8f6c0 100644 --- a/homeassistant/components/androidtv/const.py +++ b/homeassistant/components/androidtv/const.py @@ -9,6 +9,7 @@ CONF_APPS = "apps" CONF_EXCLUDE_UNNAMED_APPS = "exclude_unnamed_apps" CONF_GET_SOURCES = "get_sources" CONF_SCREENCAP = "screencap" +CONF_SCREENCAP_INTERVAL = "screencap_interval" CONF_STATE_DETECTION_RULES = "state_detection_rules" CONF_TURN_OFF_COMMAND = "turn_off_command" CONF_TURN_ON_COMMAND = "turn_on_command" @@ -18,7 +19,7 @@ DEFAULT_DEVICE_CLASS = "auto" DEFAULT_EXCLUDE_UNNAMED_APPS = False DEFAULT_GET_SOURCES = True DEFAULT_PORT = 5555 -DEFAULT_SCREENCAP = True +DEFAULT_SCREENCAP_INTERVAL = 5 DEVICE_ANDROIDTV = "androidtv" DEVICE_FIRETV = "firetv" diff --git a/homeassistant/components/androidtv/entity.py b/homeassistant/components/androidtv/entity.py index 470a4950ebc..fa583bb2777 100644 --- a/homeassistant/components/androidtv/entity.py +++ b/homeassistant/components/androidtv/entity.py @@ -67,7 +67,7 @@ def adb_decorator[_ADBDeviceT: AndroidTVEntity, **_P, _R]( return await func(self, *args, **kwargs) except LockNotAcquiredException: # If the ADB lock could not be acquired, skip this command - _LOGGER.info( + _LOGGER.debug( ( "ADB command %s not executed because the connection is" " currently in use" @@ -151,5 +151,5 @@ class AndroidTVEntity(Entity): # Using "adb_shell" (Python ADB implementation) self.exceptions = ADB_PYTHON_EXCEPTIONS else: - # Using "pure-python-adb" (communicate with ADB server) + # Communicate via ADB server self.exceptions = ADB_TCP_EXCEPTIONS diff --git a/homeassistant/components/androidtv/icons.json b/homeassistant/components/androidtv/icons.json index 0127d60a72e..d7c646dfdfc 100644 --- a/homeassistant/components/androidtv/icons.json +++ b/homeassistant/components/androidtv/icons.json @@ -1,8 +1,16 @@ { "services": { - "adb_command": "mdi:console", - "download": "mdi:download", - "upload": "mdi:upload", - "learn_sendevent": "mdi:remote" + "adb_command": { + "service": "mdi:console" + }, + "download": { + "service": "mdi:download" + }, + "upload": { + "service": "mdi:upload" + }, + "learn_sendevent": { + "service": "mdi:remote" + } } } diff --git a/homeassistant/components/androidtv/manifest.json b/homeassistant/components/androidtv/manifest.json index 2d0b062c750..e30d03fc2d5 100644 --- a/homeassistant/components/androidtv/manifest.json +++ b/homeassistant/components/androidtv/manifest.json @@ -6,10 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/androidtv", "integration_type": "device", "iot_class": "local_polling", - "loggers": ["adb_shell", "androidtv", "pure_python_adb"], - "requirements": [ - "adb-shell[async]==0.4.4", - "androidtv[async]==0.0.73", - "pure-python-adb[async]==0.3.0.dev0" - ] + "loggers": ["adb_shell", "androidtv"], + "requirements": ["adb-shell[async]==0.4.4", "androidtv[async]==0.0.75"] } diff --git a/homeassistant/components/androidtv/media_player.py b/homeassistant/components/androidtv/media_player.py index 75cf6ead6c3..728411ddf42 100644 --- a/homeassistant/components/androidtv/media_player.py +++ b/homeassistant/components/androidtv/media_player.py @@ -2,10 +2,9 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta import hashlib import logging -from typing import Any from androidtv.constants import APPS, KEYS from androidtv.setup_async import AndroidTVAsync, FireTVAsync @@ -23,19 +22,19 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import Throttle +from homeassistant.util.dt import utcnow from . import AndroidTVConfigEntry from .const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, DEFAULT_EXCLUDE_UNNAMED_APPS, DEFAULT_GET_SOURCES, - DEFAULT_SCREENCAP, + DEFAULT_SCREENCAP_INTERVAL, DEVICE_ANDROIDTV, SIGNAL_CONFIG_ENTITY, ) @@ -48,8 +47,6 @@ ATTR_DEVICE_PATH = "device_path" ATTR_HDMI_INPUT = "hdmi_input" ATTR_LOCAL_PATH = "local_path" -MIN_TIME_BETWEEN_SCREENCAPS = timedelta(seconds=60) - SERVICE_ADB_COMMAND = "adb_command" SERVICE_DOWNLOAD = "download" SERVICE_LEARN_SENDEVENT = "learn_sendevent" @@ -125,7 +122,8 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity): self._app_name_to_id: dict[str, str] = {} self._get_sources = DEFAULT_GET_SOURCES self._exclude_unnamed_apps = DEFAULT_EXCLUDE_UNNAMED_APPS - self._screencap = DEFAULT_SCREENCAP + self._screencap_delta: timedelta | None = None + self._last_screencap: datetime | None = None self.turn_on_command: str | None = None self.turn_off_command: str | None = None @@ -159,7 +157,13 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity): self._exclude_unnamed_apps = options.get( CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS ) - self._screencap = options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP) + screencap_interval: int = options.get( + CONF_SCREENCAP_INTERVAL, DEFAULT_SCREENCAP_INTERVAL + ) + if screencap_interval > 0: + self._screencap_delta = timedelta(minutes=screencap_interval) + else: + self._screencap_delta = None self.turn_off_command = options.get(CONF_TURN_OFF_COMMAND) self.turn_on_command = options.get(CONF_TURN_ON_COMMAND) @@ -183,7 +187,7 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity): async def _async_get_screencap(self, prev_app_id: str | None = None) -> None: """Take a screen capture from the device when enabled.""" if ( - not self._screencap + not self._screencap_delta or self.state in {MediaPlayerState.OFF, None} or not self.available ): @@ -193,11 +197,18 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity): force: bool = prev_app_id is not None if force: force = prev_app_id != self._attr_app_id - await self._adb_get_screencap(no_throttle=force) + await self._adb_get_screencap(force) - @Throttle(MIN_TIME_BETWEEN_SCREENCAPS) - async def _adb_get_screencap(self, **kwargs: Any) -> None: - """Take a screen capture from the device every 60 seconds.""" + async def _adb_get_screencap(self, force: bool = False) -> None: + """Take a screen capture from the device every configured minutes.""" + time_elapsed = self._screencap_delta is not None and ( + self._last_screencap is None + or (utcnow() - self._last_screencap) >= self._screencap_delta + ) + if not (force or time_elapsed): + return + + self._last_screencap = utcnow() if media_data := await self._adb_screencap(): self._media_image = media_data, "image/png" self._attr_media_image_hash = hashlib.sha256(media_data).hexdigest()[:16] @@ -306,7 +317,7 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity): msg, title="Android Debug Bridge", ) - _LOGGER.info("%s", msg) + _LOGGER.debug("%s", msg) @adb_decorator() async def service_download(self, device_path: str, local_path: str) -> None: diff --git a/homeassistant/components/androidtv/strings.json b/homeassistant/components/androidtv/strings.json index 3032e9ac6ef..b6f5d494d0f 100644 --- a/homeassistant/components/androidtv/strings.json +++ b/homeassistant/components/androidtv/strings.json @@ -31,7 +31,7 @@ "apps": "Configure applications list", "get_sources": "Retrieve the running apps as the list of sources", "exclude_unnamed_apps": "Exclude apps with unknown name from the sources list", - "screencap": "Use screen capture for album art", + "screencap_interval": "Interval in minutes between screen capture for album art (set 0 to disable)", "state_detection_rules": "Configure state detection rules", "turn_off_command": "ADB shell turn off command (leave empty for default)", "turn_on_command": "ADB shell turn on command (leave empty for default)" diff --git a/homeassistant/components/androidtv_remote/config_flow.py b/homeassistant/components/androidtv_remote/config_flow.py index 813c0eda14b..3500e4ff47b 100644 --- a/homeassistant/components/androidtv_remote/config_flow.py +++ b/homeassistant/components/androidtv_remote/config_flow.py @@ -16,10 +16,11 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME from homeassistant.core import callback @@ -58,13 +59,10 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize a new AndroidTVRemoteConfigFlow.""" - self.api: AndroidTVRemote | None = None - self.reauth_entry: ConfigEntry | None = None - self.host: str | None = None - self.name: str | None = None - self.mac: str | None = None + api: AndroidTVRemote + host: str + name: str + mac: str async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -72,13 +70,11 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: - self.host = user_input["host"] - assert self.host + self.host = user_input[CONF_HOST] api = create_api(self.hass, self.host, enable_ime=False) try: await api.async_generate_cert_if_missing() self.name, self.mac = await api.async_get_name_and_mac() - assert self.mac await self.async_set_unique_id(format_mac(self.mac)) self._abort_if_unique_id_configured(updates={CONF_HOST: self.host}) return await self._async_start_pair() @@ -94,7 +90,6 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): async def _async_start_pair(self) -> ConfigFlowResult: """Start pairing with the Android TV. Navigate to the pair flow to enter the PIN shown on screen.""" - assert self.host self.api = create_api(self.hass, self.host, enable_ime=False) await self.api.async_generate_cert_if_missing() await self.api.async_start_pairing() @@ -108,14 +103,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: pin = user_input["pin"] - assert self.api await self.api.async_finish_pairing(pin) - if self.reauth_entry: + if self.source == SOURCE_REAUTH: await self.hass.config_entries.async_reload( - self.reauth_entry.entry_id + self._get_reauth_entry().entry_id ) return self.async_abort(reason="reauth_successful") - assert self.name return self.async_create_entry( title=self.name, data={ @@ -155,10 +148,21 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.debug("Android TV device found via zeroconf: %s", discovery_info) self.host = discovery_info.host self.name = discovery_info.name.removesuffix("._androidtvremote2._tcp.local.") - self.mac = discovery_info.properties.get("bt") - if not self.mac: + if not (mac := discovery_info.properties.get("bt")): return self.async_abort(reason="cannot_connect") - await self.async_set_unique_id(format_mac(self.mac)) + self.mac = mac + existing_config_entry = await self.async_set_unique_id(format_mac(mac)) + # Sometimes, devices send an invalid zeroconf message with multiple addresses + # and one of them, which could end up being in discovery_info.host, is from a + # different device. If any of the discovery_info.ip_addresses matches the + # existing host, don't update the host. + if existing_config_entry and len(discovery_info.ip_addresses) > 1: + existing_host = existing_config_entry.data[CONF_HOST] + if existing_host != self.host: + if existing_host in [ + str(ip_address) for ip_address in discovery_info.ip_addresses + ]: + self.host = existing_host self._abort_if_unique_id_configured( updates={CONF_HOST: self.host, CONF_NAME: self.name} ) @@ -189,9 +193,6 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): self.host = entry_data[CONF_HOST] self.name = entry_data[CONF_NAME] self.mac = entry_data[CONF_MAC] - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -220,13 +221,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): return AndroidTVRemoteOptionsFlowHandler(config_entry) -class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AndroidTVRemoteOptionsFlowHandler(OptionsFlow): """Android TV Remote options flow.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {}) + self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {})) self._conf_app_id: str | None = None @callback diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index e24fcc5d653..d9c2dd05c44 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["androidtvremote2"], - "quality_scale": "platinum", - "requirements": ["androidtvremote2==0.1.1"], + "requirements": ["androidtvremote2==0.1.2"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/homeassistant/components/androidtv_remote/media_player.py b/homeassistant/components/androidtv_remote/media_player.py index 554aa2f2946..cdc307a0472 100644 --- a/homeassistant/components/androidtv_remote/media_player.py +++ b/homeassistant/components/androidtv_remote/media_player.py @@ -8,6 +8,7 @@ from typing import Any from androidtvremote2 import AndroidTVRemote, ConnectionClosed from homeassistant.components.media_player import ( + BrowseMedia, MediaClass, MediaPlayerDeviceClass, MediaPlayerEntity, @@ -15,7 +16,6 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.components.media_player.browse_media import BrowseMedia from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback diff --git a/homeassistant/components/anel_pwrctrl/manifest.json b/homeassistant/components/anel_pwrctrl/manifest.json index 48cc3b96ec0..67c881a3db2 100644 --- a/homeassistant/components/anel_pwrctrl/manifest.json +++ b/homeassistant/components/anel_pwrctrl/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/anel_pwrctrl", "iot_class": "local_polling", "loggers": ["anel_pwrctrl"], + "quality_scale": "legacy", "requirements": ["anel-pwrctrl-homeassistant==0.0.1.dev2"] } diff --git a/homeassistant/components/anova/__init__.py b/homeassistant/components/anova/__init__.py index 7503de8ea10..4ae4750b9a9 100644 --- a/homeassistant/components/anova/__init__.py +++ b/homeassistant/components/anova/__init__.py @@ -13,22 +13,20 @@ from anova_wifi import ( WebsocketFailure, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client -from .const import DOMAIN from .coordinator import AnovaCoordinator -from .models import AnovaData +from .models import AnovaConfigEntry, AnovaData PLATFORMS = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bool: """Set up Anova from a config entry.""" api = AnovaApi( aiohttp_client.async_get_clientsession(hass), @@ -62,17 +60,36 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: assert api.websocket_handler is not None devices = list(api.websocket_handler.devices.values()) coordinators = [AnovaCoordinator(hass, device) for device in devices] - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AnovaData( - api_jwt=api.jwt, coordinators=coordinators, api=api - ) + entry.runtime_data = AnovaData(api_jwt=api.jwt, coordinators=coordinators, api=api) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - anova_data: AnovaData = hass.data[DOMAIN].pop(entry.entry_id) # Disconnect from WS - await anova_data.api.disconnect_websocket() + await entry.runtime_data.api.disconnect_websocket() return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bool: + """Migrate entry.""" + _LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version) + + if entry.version > 1: + # This means the user has downgraded from a future version + return False + + if entry.version == 1 and entry.minor_version == 1: + new_data = {**entry.data} + if CONF_DEVICES in new_data: + new_data.pop(CONF_DEVICES) + + hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2) + + _LOGGER.debug( + "Migration to version %s:%s successful", entry.version, entry.minor_version + ) + + return True diff --git a/homeassistant/components/anova/config_flow.py b/homeassistant/components/anova/config_flow.py index 6e331ccf4a2..bc4723b1dba 100644 --- a/homeassistant/components/anova/config_flow.py +++ b/homeassistant/components/anova/config_flow.py @@ -6,7 +6,7 @@ from anova_wifi import AnovaApi, InvalidLogin import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -16,6 +16,7 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): """Sets up a config flow for Anova.""" VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, str] | None = None @@ -42,8 +43,6 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): data={ CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], - # this can be removed in a migration to 1.2 in 2024.11 - CONF_DEVICES: [], }, ) diff --git a/homeassistant/components/anova/models.py b/homeassistant/components/anova/models.py index 8caf16eeae1..eef8180cf88 100644 --- a/homeassistant/components/anova/models.py +++ b/homeassistant/components/anova/models.py @@ -4,8 +4,12 @@ from dataclasses import dataclass from anova_wifi import AnovaApi +from homeassistant.config_entries import ConfigEntry + from .coordinator import AnovaCoordinator +type AnovaConfigEntry = ConfigEntry[AnovaData] + @dataclass class AnovaData: diff --git a/homeassistant/components/anova/sensor.py b/homeassistant/components/anova/sensor.py index e5fe9ededfd..aa572a0ee9b 100644 --- a/homeassistant/components/anova/sensor.py +++ b/homeassistant/components/anova/sensor.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from anova_wifi import AnovaMode, AnovaState, APCUpdateSensor -from homeassistant import config_entries from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -19,10 +18,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN from .coordinator import AnovaCoordinator from .entity import AnovaDescriptionEntity -from .models import AnovaData +from .models import AnovaConfigEntry @dataclass(frozen=True, kw_only=True) @@ -99,11 +97,11 @@ SENSOR_DESCRIPTIONS: list[AnovaSensorEntityDescription] = [ async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: AnovaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Anova device.""" - anova_data: AnovaData = hass.data[DOMAIN][entry.entry_id] + anova_data = entry.runtime_data for coordinator in anova_data.coordinators: setup_coordinator(coordinator, async_add_entities) diff --git a/homeassistant/components/anthemav/__init__.py b/homeassistant/components/anthemav/__init__.py index 4efeb9245c8..9616d554424 100644 --- a/homeassistant/components/anthemav/__init__.py +++ b/homeassistant/components/anthemav/__init__.py @@ -13,14 +13,16 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import ANTHEMAV_UPDATE_SIGNAL, DEVICE_TIMEOUT_SECONDS, DOMAIN +from .const import ANTHEMAV_UPDATE_SIGNAL, DEVICE_TIMEOUT_SECONDS + +type AnthemavConfigEntry = ConfigEntry[anthemav.Connection] PLATFORMS = [Platform.MEDIA_PLAYER] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AnthemavConfigEntry) -> bool: """Set up Anthem A/V Receivers from a config entry.""" @callback @@ -41,7 +43,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (OSError, DeviceError) as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = avr + entry.runtime_data = avr await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -56,16 +58,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AnthemavConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - avr = hass.data[DOMAIN][entry.entry_id] + avr = entry.runtime_data + _LOGGER.debug("Close avr connection") + avr.close() - if avr is not None: - _LOGGER.debug("Close avr connection") - avr.close() - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) return unload_ok diff --git a/homeassistant/components/anthemav/media_player.py b/homeassistant/components/anthemav/media_player.py index 1dbfdf275f2..be5a6ad2258 100644 --- a/homeassistant/components/anthemav/media_player.py +++ b/homeassistant/components/anthemav/media_player.py @@ -4,7 +4,6 @@ from __future__ import annotations import logging -from anthemav.connection import Connection from anthemav.protocol import AVR from homeassistant.components.media_player import ( @@ -13,13 +12,13 @@ from homeassistant.components.media_player import ( MediaPlayerEntityFeature, MediaPlayerState, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_MODEL from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import AnthemavConfigEntry from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) @@ -27,7 +26,7 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AnthemavConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry.""" @@ -35,7 +34,7 @@ async def async_setup_entry( mac_address = config_entry.data[CONF_MAC] model = config_entry.data[CONF_MODEL] - avr: Connection = hass.data[DOMAIN][config_entry.entry_id] + avr = config_entry.runtime_data _LOGGER.debug("Connection data dump: %s", avr.dump_conndata) diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py index 01e16ec5350..fa43a3c4bcc 100644 --- a/homeassistant/components/anthropic/config_flow.py +++ b/homeassistant/components/anthropic/config_flow.py @@ -87,10 +87,13 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN): except anthropic.APIConnectionError: errors["base"] = "cannot_connect" except anthropic.APIStatusError as e: - if isinstance(e.body, dict): - errors["base"] = e.body.get("error", {}).get("type", "unknown") - else: - errors["base"] = "unknown" + errors["base"] = "unknown" + if ( + isinstance(e.body, dict) + and (error := e.body.get("error")) + and error.get("type") == "authentication_error" + ): + errors["base"] = "authentication_error" except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -118,7 +121,6 @@ class AnthropicOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/anthropic/const.py b/homeassistant/components/anthropic/const.py index 4ccf2c88faa..0dbf9c51ac1 100644 --- a/homeassistant/components/anthropic/const.py +++ b/homeassistant/components/anthropic/const.py @@ -8,7 +8,7 @@ LOGGER = logging.getLogger(__package__) CONF_RECOMMENDED = "recommended" CONF_PROMPT = "prompt" CONF_CHAT_MODEL = "chat_model" -RECOMMENDED_CHAT_MODEL = "claude-3-5-sonnet-20240620" +RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307" CONF_MAX_TOKENS = "max_tokens" RECOMMENDED_MAX_TOKENS = 1024 CONF_TEMPERATURE = "temperature" diff --git a/homeassistant/components/aosmith/__init__.py b/homeassistant/components/aosmith/__init__.py index c42096cd3a7..dd60f69c4b9 100644 --- a/homeassistant/components/aosmith/__init__.py +++ b/homeassistant/components/aosmith/__init__.py @@ -16,6 +16,8 @@ from .coordinator import AOSmithEnergyCoordinator, AOSmithStatusCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER] +type AOSmithConfigEntry = ConfigEntry[AOSmithData] + @dataclass class AOSmithData: @@ -26,7 +28,7 @@ class AOSmithData: energy_coordinator: AOSmithEnergyCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool: """Set up A. O. Smith from a config entry.""" email = entry.data[CONF_EMAIL] password = entry.data[CONF_PASSWORD] @@ -55,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) await energy_coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AOSmithData( + entry.runtime_data = AOSmithData( client, status_coordinator, energy_coordinator, @@ -66,9 +68,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aosmith/config_flow.py b/homeassistant/components/aosmith/config_flow.py index 6d74a9936ae..a6a0712c4f7 100644 --- a/homeassistant/components/aosmith/config_flow.py +++ b/homeassistant/components/aosmith/config_flow.py @@ -23,7 +23,7 @@ class AOSmithConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_email: str | None = None + _reauth_email: str async def _async_validate_credentials( self, email: str, password: str @@ -85,21 +85,16 @@ class AOSmithConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle user's reauth credentials.""" errors: dict[str, str] = {} - if user_input is not None and self._reauth_email is not None: - email = self._reauth_email + if user_input: password = user_input[CONF_PASSWORD] - entry_id = self.context["entry_id"] - if entry := self.hass.config_entries.async_get_entry(entry_id): - error = await self._async_validate_credentials(email, password) - if error is None: - self.hass.config_entries.async_update_entry( - entry, - data=entry.data | user_input, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") - errors["base"] = error + error = await self._async_validate_credentials(self._reauth_email, password) + if error is None: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates=user_input, + ) + errors["base"] = error return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/aosmith/diagnostics.py b/homeassistant/components/aosmith/diagnostics.py index 96b049b904f..94726731f75 100644 --- a/homeassistant/components/aosmith/diagnostics.py +++ b/homeassistant/components/aosmith/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import AOSmithData -from .const import DOMAIN +from . import AOSmithConfigEntry TO_REDACT = { "address", @@ -31,10 +29,10 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: AOSmithConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: AOSmithData = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data all_device_info = await data.client.get_all_device_info() return async_redact_data(all_device_info, TO_REDACT) diff --git a/homeassistant/components/aosmith/manifest.json b/homeassistant/components/aosmith/manifest.json index 21580b87286..a928a6677cb 100644 --- a/homeassistant/components/aosmith/manifest.json +++ b/homeassistant/components/aosmith/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/aosmith", "iot_class": "cloud_polling", - "requirements": ["py-aosmith==1.0.8"] + "requirements": ["py-aosmith==1.0.12"] } diff --git a/homeassistant/components/aosmith/sensor.py b/homeassistant/components/aosmith/sensor.py index e33c388af8b..b1c9852f647 100644 --- a/homeassistant/components/aosmith/sensor.py +++ b/homeassistant/components/aosmith/sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from py_aosmith.models import Device as AOSmithDevice, HotWaterStatus +from py_aosmith.models import Device as AOSmithDevice from homeassistant.components.sensor import ( SensorDeviceClass, @@ -11,13 +11,11 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfEnergy +from homeassistant.const import PERCENTAGE, UnitOfEnergy from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AOSmithData -from .const import DOMAIN +from . import AOSmithConfigEntry from .coordinator import AOSmithEnergyCoordinator, AOSmithStatusCoordinator from .entity import AOSmithEnergyEntity, AOSmithStatusEntity @@ -33,26 +31,19 @@ STATUS_ENTITY_DESCRIPTIONS: tuple[AOSmithStatusSensorEntityDescription, ...] = ( AOSmithStatusSensorEntityDescription( key="hot_water_availability", translation_key="hot_water_availability", - device_class=SensorDeviceClass.ENUM, - options=["low", "medium", "high"], - value_fn=lambda device: HOT_WATER_STATUS_MAP.get( - device.status.hot_water_status - ), + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda device: device.status.hot_water_status, ), ) -HOT_WATER_STATUS_MAP: dict[HotWaterStatus, str] = { - HotWaterStatus.LOW: "low", - HotWaterStatus.MEDIUM: "medium", - HotWaterStatus.HIGH: "high", -} - async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AOSmithConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up A. O. Smith sensor platform.""" - data: AOSmithData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( AOSmithStatusSensorEntity(data.status_coordinator, description, junction_id) diff --git a/homeassistant/components/aosmith/strings.json b/homeassistant/components/aosmith/strings.json index 0ca4e2e9094..c88b9cab783 100644 --- a/homeassistant/components/aosmith/strings.json +++ b/homeassistant/components/aosmith/strings.json @@ -28,12 +28,7 @@ "entity": { "sensor": { "hot_water_availability": { - "name": "Hot water availability", - "state": { - "low": "Low", - "medium": "Medium", - "high": "High" - } + "name": "Hot water availability" }, "energy_usage": { "name": "Energy usage" diff --git a/homeassistant/components/aosmith/water_heater.py b/homeassistant/components/aosmith/water_heater.py index dceba13ba34..f3dc8b3413f 100644 --- a/homeassistant/components/aosmith/water_heater.py +++ b/homeassistant/components/aosmith/water_heater.py @@ -12,14 +12,12 @@ from homeassistant.components.water_heater import ( WaterHeaterEntity, WaterHeaterEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AOSmithData -from .const import DOMAIN +from . import AOSmithConfigEntry from .coordinator import AOSmithStatusCoordinator from .entity import AOSmithStatusEntity @@ -46,10 +44,12 @@ DEFAULT_OPERATION_MODE_PRIORITY = [ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AOSmithConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up A. O. Smith water heater platform.""" - data: AOSmithData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( AOSmithWaterHeaterEntity(data.status_coordinator, junction_id) diff --git a/homeassistant/components/apache_kafka/__init__.py b/homeassistant/components/apache_kafka/__init__.py index 5d458262e28..68d3f58a63a 100644 --- a/homeassistant/components/apache_kafka/__init__.py +++ b/homeassistant/components/apache_kafka/__init__.py @@ -38,7 +38,7 @@ CONFIG_SCHEMA = vol.Schema( vol.Required(CONF_TOPIC): cv.string, vol.Optional(CONF_FILTER, default={}): FILTER_SCHEMA, vol.Optional(CONF_SECURITY_PROTOCOL, default="PLAINTEXT"): vol.In( - ["PLAINTEXT", "SASL_SSL"] + ["PLAINTEXT", "SSL", "SASL_SSL"] ), vol.Optional(CONF_USERNAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, @@ -53,7 +53,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Activate the Apache Kafka integration.""" conf = config[DOMAIN] - kafka = hass.data[DOMAIN] = KafkaManager( + kafka = KafkaManager( hass, conf[CONF_IP_ADDRESS], conf[CONF_PORT], @@ -94,7 +94,7 @@ class KafkaManager: port: int, topic: str, entities_filter: EntityFilter, - security_protocol: Literal["PLAINTEXT", "SASL_SSL"], + security_protocol: Literal["PLAINTEXT", "SSL", "SASL_SSL"], username: str | None, password: str | None, ) -> None: diff --git a/homeassistant/components/apache_kafka/manifest.json b/homeassistant/components/apache_kafka/manifest.json index f6593631bc0..05baaac32a2 100644 --- a/homeassistant/components/apache_kafka/manifest.json +++ b/homeassistant/components/apache_kafka/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/apache_kafka", "iot_class": "local_push", "loggers": ["aiokafka", "kafka_python"], + "quality_scale": "legacy", "requirements": ["aiokafka==0.10.0"] } diff --git a/homeassistant/components/apcupsd/__init__.py b/homeassistant/components/apcupsd/__init__.py index 7293a42f7e7..44edc5c151f 100644 --- a/homeassistant/components/apcupsd/__init__.py +++ b/homeassistant/components/apcupsd/__init__.py @@ -2,22 +2,22 @@ from __future__ import annotations -import logging from typing import Final from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN from .coordinator import APCUPSdCoordinator -_LOGGER = logging.getLogger(__name__) +type APCUPSdConfigEntry = ConfigEntry[APCUPSdCoordinator] PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR) -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: APCUPSdConfigEntry +) -> bool: """Use config values to set up a function enabling status retrieval.""" host, port = config_entry.data[CONF_HOST], config_entry.data[CONF_PORT] coordinator = APCUPSdCoordinator(hass, host, port) @@ -25,17 +25,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b await coordinator.async_config_entry_first_refresh() # Store the coordinator for later uses. - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][config_entry.entry_id] = coordinator + config_entry.runtime_data = coordinator # Forward the config entries to the supported platforms. await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: APCUPSdConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok and DOMAIN in hass.data: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/apcupsd/binary_sensor.py b/homeassistant/components/apcupsd/binary_sensor.py index 5f86ceb6eec..cd9e60f7ae4 100644 --- a/homeassistant/components/apcupsd/binary_sensor.py +++ b/homeassistant/components/apcupsd/binary_sensor.py @@ -2,24 +2,21 @@ from __future__ import annotations -import logging from typing import Final from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from . import APCUPSdConfigEntry from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 -_LOGGER = logging.getLogger(__name__) _DESCRIPTION = BinarySensorEntityDescription( key="statflag", translation_key="online_status", @@ -30,11 +27,11 @@ _VALUE_ONLINE_MASK: Final = 0b1000 async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: APCUPSdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up an APCUPSd Online Status binary sensor.""" - coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data # Do not create the binary sensor if APCUPSd does not provide STATFLAG field for us # to determine the online status. diff --git a/homeassistant/components/apcupsd/const.py b/homeassistant/components/apcupsd/const.py index 56bf229579d..974c860afb8 100644 --- a/homeassistant/components/apcupsd/const.py +++ b/homeassistant/components/apcupsd/const.py @@ -6,4 +6,4 @@ DOMAIN: Final = "apcupsd" CONNECTION_TIMEOUT: int = 10 # Field name of last self test retrieved from apcupsd. -LASTSTEST: Final = "laststest" +LAST_S_TEST: Final = "laststest" diff --git a/homeassistant/components/apcupsd/diagnostics.py b/homeassistant/components/apcupsd/diagnostics.py index d375a8bc248..fa0908f3144 100644 --- a/homeassistant/components/apcupsd/diagnostics.py +++ b/homeassistant/components/apcupsd/diagnostics.py @@ -5,19 +5,17 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import APCUPSdCoordinator, APCUPSdData +from . import APCUPSdConfigEntry TO_REDACT = {"SERIALNO", "HOSTNAME"} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: APCUPSdConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: APCUPSdCoordinator = hass.data[DOMAIN][entry.entry_id] - data: APCUPSdData = coordinator.data + coordinator = entry.runtime_data + data = coordinator.data return async_redact_data(data, TO_REDACT) diff --git a/homeassistant/components/apcupsd/manifest.json b/homeassistant/components/apcupsd/manifest.json index b20e0c8aacf..3713b74fff7 100644 --- a/homeassistant/components/apcupsd/manifest.json +++ b/homeassistant/components/apcupsd/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apcupsd", "iot_class": "local_polling", "loggers": ["apcaccess"], - "quality_scale": "silver", "requirements": ["aioapcaccess==0.4.2"] } diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index f2b5943bdf0..9e0abcb1dd9 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -10,7 +10,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, UnitOfApparentPower, @@ -25,7 +24,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, LASTSTEST +from . import APCUPSdConfigEntry +from .const import LAST_S_TEST from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 @@ -156,8 +156,8 @@ SENSORS: dict[str, SensorEntityDescription] = { device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), - LASTSTEST: SensorEntityDescription( - key=LASTSTEST, + LAST_S_TEST: SensorEntityDescription( + key=LAST_S_TEST, translation_key="last_self_test", ), "lastxfer": SensorEntityDescription( @@ -406,11 +406,11 @@ INFERRED_UNITS = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: APCUPSdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the APCUPSd sensors from config entries.""" - coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data # The resource keys in the data dict collected in the coordinator is in upper-case # by default, but we use lower cases throughout this integration. @@ -422,7 +422,7 @@ async def async_setup_entry( # periodical (or manual) self test since last daemon restart. It might not be available # when we set up the integration, and we do not know if it would ever be available. Here we # add it anyway and mark it as unknown initially. - for resource in available_resources | {LASTSTEST}: + for resource in available_resources | {LAST_S_TEST}: if resource not in SENSORS: _LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper()) continue diff --git a/homeassistant/components/apple_tv/__init__.py b/homeassistant/components/apple_tv/__init__.py index 08372aa79ae..f4417134b37 100644 --- a/homeassistant/components/apple_tv/__init__.py +++ b/homeassistant/components/apple_tv/__init__.py @@ -32,14 +32,16 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import CONF_CREDENTIALS, CONF_IDENTIFIERS, CONF_START_OFF, DOMAIN +from .const import ( + CONF_CREDENTIALS, + CONF_IDENTIFIERS, + CONF_START_OFF, + DOMAIN, + SIGNAL_CONNECTED, + SIGNAL_DISCONNECTED, +) _LOGGER = logging.getLogger(__name__) @@ -49,9 +51,6 @@ DEFAULT_NAME_HP = "HomePod" BACKOFF_TIME_LOWER_LIMIT = 15 # seconds BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes -SIGNAL_CONNECTED = "apple_tv_connected" -SIGNAL_DISCONNECTED = "apple_tv_disconnected" - PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] AUTH_EXCEPTIONS = ( @@ -120,64 +119,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -class AppleTVEntity(Entity): - """Device that sends commands to an Apple TV.""" - - _attr_should_poll = False - _attr_has_entity_name = True - _attr_name = None - atv: AppleTVInterface | None = None - - def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None: - """Initialize device.""" - self.manager = manager - self._attr_unique_id = identifier - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, identifier)}, - name=name, - ) - - async def async_added_to_hass(self) -> None: - """Handle when an entity is about to be added to Home Assistant.""" - - @callback - def _async_connected(atv: AppleTVInterface) -> None: - """Handle that a connection was made to a device.""" - self.atv = atv - self.async_device_connected(atv) - self.async_write_ha_state() - - @callback - def _async_disconnected() -> None: - """Handle that a connection to a device was lost.""" - self.async_device_disconnected() - self.atv = None - self.async_write_ha_state() - - if self.manager.atv: - # ATV is already connected - _async_connected(self.manager.atv) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, f"{SIGNAL_CONNECTED}_{self.unique_id}", _async_connected - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_DISCONNECTED}_{self.unique_id}", - _async_disconnected, - ) - ) - - def async_device_connected(self, atv: AppleTVInterface) -> None: - """Handle when connection is made to device.""" - - def async_device_disconnected(self) -> None: - """Handle when connection was lost to device.""" - - class AppleTVManager(DeviceListener): """Connection and power manager for an Apple TV. @@ -375,7 +316,7 @@ class AppleTVManager(DeviceListener): f"Protocol(s) {missing_protocols_str} not yet found for {name}," " waiting for discovery." ) - _LOGGER.info( + _LOGGER.debug( "Protocol(s) %s not yet found for %s, trying later", missing_protocols_str, name, @@ -394,7 +335,7 @@ class AppleTVManager(DeviceListener): self._connection_attempts = 0 if self._connection_was_lost: - _LOGGER.info( + _LOGGER.warning( 'Connection was re-established to device "%s"', self.config_entry.data[CONF_NAME], ) diff --git a/homeassistant/components/apple_tv/config_flow.py b/homeassistant/components/apple_tv/config_flow.py index 71c26244203..b0741cc9c61 100644 --- a/homeassistant/components/apple_tv/config_flow.py +++ b/homeassistant/components/apple_tv/config_flow.py @@ -8,7 +8,7 @@ from collections.abc import Awaitable, Callable, Mapping from ipaddress import ip_address import logging from random import randrange -from typing import Any +from typing import Any, Self from pyatv import exceptions, pair, scan from pyatv.const import DeviceModel, PairingRequirement, Protocol @@ -98,8 +98,11 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 scan_filter: str | None = None + all_identifiers: set[str] atv: BaseConfig | None = None atv_identifiers: list[str] | None = None + _host: str # host in zeroconf discovery info, should not be accessed by other flows + host: str | None = None # set by _async_aggregate_discoveries, for other flows protocol: Protocol | None = None pairing: PairingHandler | None = None protocols_to_pair: deque[Protocol] | None = None @@ -157,7 +160,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): "type": "Apple TV", } self.scan_filter = self.unique_id - self.context["identifier"] = self.unique_id return await self.async_step_restore_device() async def async_step_restore_device( @@ -192,7 +194,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): self.device_identifier, raise_on_progress=False ) assert self.atv - self.context["all_identifiers"] = self.atv.all_identifiers + self.all_identifiers = set(self.atv.all_identifiers) return await self.async_step_confirm() return self.async_show_form( @@ -207,7 +209,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): """Handle device found via zeroconf.""" if discovery_info.ip_address.version == 6: return self.async_abort(reason="ipv6_not_supported") - host = discovery_info.host + self._host = host = discovery_info.host service_type = discovery_info.type[:-1] # Remove leading . name = discovery_info.name.replace(f".{service_type}.", "") properties = discovery_info.properties @@ -255,7 +257,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): # as two separate flows. # # To solve this, all identifiers are stored as - # "all_identifiers" in the flow context. When a new service is discovered, the + # "all_identifiers" in the flow. When a new service is discovered, the # code below will check these identifiers for all active flows and abort if a # match is found. Before aborting, the original flow is updated with any # potentially new identifiers. In the example above, when service C is @@ -277,32 +279,32 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): self._async_check_and_update_in_progress(host, unique_id) # Host must only be set AFTER checking and updating in progress # flows or we will have a race condition where no flows move forward. - self.context[CONF_ADDRESS] = host + self.host = host @callback def _async_check_and_update_in_progress(self, host: str, unique_id: str) -> None: """Check for in-progress flows and update them with identifiers if needed.""" - for flow in self._async_in_progress(include_uninitialized=True): - context = flow["context"] - if ( - context.get("source") != SOURCE_ZEROCONF - or context.get(CONF_ADDRESS) != host - ): - continue - if ( - "all_identifiers" in context - and unique_id not in context["all_identifiers"] - ): - # Add potentially new identifiers from this device to the existing flow - context["all_identifiers"].append(unique_id) + if self.hass.config_entries.flow.async_has_matching_flow(self): raise AbortFlow("already_in_progress") + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + if ( + other_flow.context.get("source") != SOURCE_ZEROCONF + or other_flow.host != self._host + ): + return False + if self.unique_id is not None: + # Add potentially new identifiers from this device to the existing flow + other_flow.all_identifiers.add(self.unique_id) + return True + async def async_found_zeroconf_device( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle device found after Zeroconf discovery.""" assert self.atv - self.context["all_identifiers"] = self.atv.all_identifiers + self.all_identifiers = set(self.atv.all_identifiers) # Also abort if an integration with this identifier already exists await self.async_set_unique_id(self.device_identifier) # but be sure to update the address if its changed so the scanner @@ -310,7 +312,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured( updates={CONF_ADDRESS: str(self.atv.address)} ) - self.context["identifier"] = self.unique_id return await self.async_step_confirm() async def async_find_device_wrapper( @@ -390,7 +391,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN): """Handle user-confirmation of discovered node.""" assert self.atv if user_input is not None: - expected_identifier_count = len(self.context["all_identifiers"]) + expected_identifier_count = len(self.all_identifiers) # If number of services found during device scan mismatch number of # identifiers collected during Zeroconf discovery, then trigger a new scan # with hopes of finding all services. diff --git a/homeassistant/components/apple_tv/const.py b/homeassistant/components/apple_tv/const.py index 5fb169ec259..dd215337f1c 100644 --- a/homeassistant/components/apple_tv/const.py +++ b/homeassistant/components/apple_tv/const.py @@ -6,3 +6,6 @@ CONF_CREDENTIALS = "credentials" CONF_IDENTIFIERS = "identifiers" CONF_START_OFF = "start_off" + +SIGNAL_CONNECTED = "apple_tv_connected" +SIGNAL_DISCONNECTED = "apple_tv_disconnected" diff --git a/homeassistant/components/apple_tv/entity.py b/homeassistant/components/apple_tv/entity.py new file mode 100644 index 00000000000..ad8364e2927 --- /dev/null +++ b/homeassistant/components/apple_tv/entity.py @@ -0,0 +1,71 @@ +"""The Apple TV integration.""" + +from __future__ import annotations + +from pyatv.interface import AppleTV as AppleTVInterface + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from . import AppleTVManager +from .const import DOMAIN, SIGNAL_CONNECTED, SIGNAL_DISCONNECTED + + +class AppleTVEntity(Entity): + """Device that sends commands to an Apple TV.""" + + _attr_should_poll = False + _attr_has_entity_name = True + _attr_name = None + atv: AppleTVInterface | None = None + + def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None: + """Initialize device.""" + self.manager = manager + self._attr_unique_id = identifier + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + name=name, + ) + + async def async_added_to_hass(self) -> None: + """Handle when an entity is about to be added to Home Assistant.""" + + @callback + def _async_connected(atv: AppleTVInterface) -> None: + """Handle that a connection was made to a device.""" + self.atv = atv + self.async_device_connected(atv) + self.async_write_ha_state() + + @callback + def _async_disconnected() -> None: + """Handle that a connection to a device was lost.""" + self.async_device_disconnected() + self.atv = None + self.async_write_ha_state() + + if self.manager.atv: + # ATV is already connected + _async_connected(self.manager.atv) + + self.async_on_remove( + async_dispatcher_connect( + self.hass, f"{SIGNAL_CONNECTED}_{self.unique_id}", _async_connected + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_DISCONNECTED}_{self.unique_id}", + _async_disconnected, + ) + ) + + def async_device_connected(self, atv: AppleTVInterface) -> None: + """Handle when connection is made to device.""" + + def async_device_disconnected(self) -> None: + """Handle when connection was lost to device.""" diff --git a/homeassistant/components/apple_tv/manifest.json b/homeassistant/components/apple_tv/manifest.json index 9a053829516..b10a14af32b 100644 --- a/homeassistant/components/apple_tv/manifest.json +++ b/homeassistant/components/apple_tv/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/apple_tv", "iot_class": "local_push", "loggers": ["pyatv", "srptools"], - "requirements": ["pyatv==0.15.0"], + "requirements": ["pyatv==0.16.0"], "zeroconf": [ "_mediaremotetv._tcp.local.", "_companion-link._tcp.local.", diff --git a/homeassistant/components/apple_tv/media_player.py b/homeassistant/components/apple_tv/media_player.py index 9fb9dee46e1..c6b71c64b4f 100644 --- a/homeassistant/components/apple_tv/media_player.py +++ b/homeassistant/components/apple_tv/media_player.py @@ -42,8 +42,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import AppleTvConfigEntry, AppleTVEntity, AppleTVManager +from . import AppleTvConfigEntry, AppleTVManager from .browse_media import build_app_list +from .entity import AppleTVEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/apple_tv/remote.py b/homeassistant/components/apple_tv/remote.py index 8950a46388d..7f2c9f1b591 100644 --- a/homeassistant/components/apple_tv/remote.py +++ b/homeassistant/components/apple_tv/remote.py @@ -19,7 +19,8 @@ from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AppleTvConfigEntry, AppleTVEntity +from . import AppleTvConfigEntry +from .entity import AppleTVEntity _LOGGER = logging.getLogger(__name__) @@ -85,7 +86,7 @@ class AppleTVRemote(AppleTVEntity, RemoteEntity): if not attr_value: raise ValueError("Command not found. Exiting sequence") - _LOGGER.info("Sending command %s", single_command) + _LOGGER.debug("Sending command %s", single_command) if hold_secs >= 1: await attr_value(action=InputAction.Hold) diff --git a/homeassistant/components/application_credentials/__init__.py b/homeassistant/components/application_credentials/__init__.py index 22deb124859..50b272cc1fa 100644 --- a/homeassistant/components/application_credentials/__init__.py +++ b/homeassistant/components/application_credentials/__init__.py @@ -15,7 +15,7 @@ from typing import Any, Protocol import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_CLIENT_ID, @@ -36,6 +36,7 @@ from homeassistant.loader import ( async_get_integration, ) from homeassistant.util import slugify +from homeassistant.util.hass_dict import HassKey __all__ = ["ClientCredential", "AuthorizationServer", "async_import_client_credential"] @@ -45,7 +46,7 @@ DOMAIN = "application_credentials" STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 -DATA_STORAGE = "storage" +DATA_COMPONENT: HassKey[ApplicationCredentialsStorageCollection] = HassKey(DOMAIN) CONF_AUTH_DOMAIN = "auth_domain" DEFAULT_IMPORT_NAME = "Import from configuration.yaml" @@ -150,7 +151,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: id_manager, ) await storage_collection.async_load() - hass.data[DOMAIN][DATA_STORAGE] = storage_collection + hass.data[DATA_COMPONENT] = storage_collection collection.DictStorageCollectionWebsocket( storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS @@ -175,7 +176,6 @@ async def async_import_client_credential( """Import an existing credential from configuration.yaml.""" if DOMAIN not in hass.data: raise ValueError("Integration 'application_credentials' not setup") - storage_collection = hass.data[DOMAIN][DATA_STORAGE] item = { CONF_DOMAIN: domain, CONF_CLIENT_ID: credential.client_id, @@ -183,7 +183,7 @@ async def async_import_client_credential( CONF_AUTH_DOMAIN: auth_domain if auth_domain else domain, } item[CONF_NAME] = credential.name if credential.name else DEFAULT_IMPORT_NAME - await storage_collection.async_import_item(item) + await hass.data[DATA_COMPONENT].async_import_item(item) class AuthImplementation(config_entry_oauth2_flow.LocalOAuth2Implementation): @@ -222,8 +222,7 @@ async def _async_provide_implementation( if not platform: return [] - storage_collection = hass.data[DOMAIN][DATA_STORAGE] - credentials = storage_collection.async_client_credentials(domain) + credentials = hass.data[DATA_COMPONENT].async_client_credentials(domain) if hasattr(platform, "async_get_auth_implementation"): return [ await platform.async_get_auth_implementation(hass, auth_domain, credential) @@ -246,8 +245,7 @@ async def _async_config_entry_app_credentials( ): return None - storage_collection = hass.data[DOMAIN][DATA_STORAGE] - for item in storage_collection.async_items(): + for item in hass.data[DATA_COMPONENT].async_items(): item_id = item[CONF_ID] if ( item[CONF_DOMAIN] == config_entry.domain diff --git a/homeassistant/components/apprise/manifest.json b/homeassistant/components/apprise/manifest.json index 4e838a5e25b..4f3c4d7ef4e 100644 --- a/homeassistant/components/apprise/manifest.json +++ b/homeassistant/components/apprise/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/apprise", "iot_class": "cloud_push", "loggers": ["apprise"], - "requirements": ["apprise==1.8.0"] + "quality_scale": "legacy", + "requirements": ["apprise==1.9.0"] } diff --git a/homeassistant/components/aprilaire/__init__.py b/homeassistant/components/aprilaire/__init__.py index fd7fd745c5d..90293798ed3 100644 --- a/homeassistant/components/aprilaire/__init__.py +++ b/homeassistant/components/aprilaire/__init__.py @@ -6,14 +6,12 @@ import logging from pyaprilaire.const import Attribute -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.device_registry import format_mac -from .const import DOMAIN -from .coordinator import AprilaireCoordinator +from .coordinator import AprilaireConfigEntry, AprilaireCoordinator PLATFORMS: list[Platform] = [ Platform.CLIMATE, @@ -25,7 +23,7 @@ PLATFORMS: list[Platform] = [ _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AprilaireConfigEntry) -> bool: """Set up a config entry for Aprilaire.""" host = entry.data[CONF_HOST] @@ -34,15 +32,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = AprilaireCoordinator(hass, entry.unique_id, host, port) await coordinator.start_listen() - hass.data.setdefault(DOMAIN, {})[entry.unique_id] = coordinator - - async def ready_callback(ready: bool): + async def ready_callback(ready: bool) -> None: if ready: mac_address = format_mac(coordinator.data[Attribute.MAC_ADDRESS]) if mac_address != entry.unique_id: raise ConfigEntryAuthFailed("Invalid MAC address") + entry.runtime_data = coordinator + entry.async_on_unload(coordinator.stop_listen) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) async def _async_close(_: Event) -> None: @@ -63,12 +62,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AprilaireConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - coordinator: AprilaireCoordinator = hass.data[DOMAIN].pop(entry.unique_id) - coordinator.stop_listen() - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aprilaire/climate.py b/homeassistant/components/aprilaire/climate.py index 2876d621aef..194453046e6 100644 --- a/homeassistant/components/aprilaire/climate.py +++ b/homeassistant/components/aprilaire/climate.py @@ -16,19 +16,17 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( - DOMAIN, FAN_CIRCULATE, PRESET_PERMANENT_HOLD, PRESET_TEMPORARY_HOLD, PRESET_VACATION, ) -from .coordinator import AprilaireCoordinator +from .coordinator import AprilaireConfigEntry from .entity import BaseAprilaireEntity HVAC_MODE_MAP = { @@ -64,14 +62,14 @@ FAN_MODE_MAP = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AprilaireConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Add climates for passed config_entry in HA.""" - coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id] - - async_add_entities([AprilaireClimate(coordinator, config_entry.unique_id)]) + async_add_entities( + [AprilaireClimate(config_entry.runtime_data, config_entry.unique_id)] + ) class AprilaireClimate(BaseAprilaireEntity, ClimateEntity): diff --git a/homeassistant/components/aprilaire/coordinator.py b/homeassistant/components/aprilaire/coordinator.py index 7674ff070a6..737fd768140 100644 --- a/homeassistant/components/aprilaire/coordinator.py +++ b/homeassistant/components/aprilaire/coordinator.py @@ -9,6 +9,7 @@ from typing import Any import pyaprilaire.client from pyaprilaire.const import MODELS, Attribute, FunctionalDomain +from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback import homeassistant.helpers.device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo @@ -22,6 +23,8 @@ WAIT_TIMEOUT = 30 _LOGGER = logging.getLogger(__name__) +type AprilaireConfigEntry = ConfigEntry[AprilaireCoordinator] + class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol): """Coordinator for interacting with the thermostat.""" @@ -112,7 +115,7 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol): self.client.stop_listen() async def wait_for_ready( - self, ready_callback: Callable[[bool], Awaitable[bool]] + self, ready_callback: Callable[[bool], Awaitable[None]] ) -> bool: """Wait for the client to be ready.""" diff --git a/homeassistant/components/aprilaire/humidifier.py b/homeassistant/components/aprilaire/humidifier.py index 62c8a184be2..254cc0ac789 100644 --- a/homeassistant/components/aprilaire/humidifier.py +++ b/homeassistant/components/aprilaire/humidifier.py @@ -14,13 +14,11 @@ from homeassistant.components.humidifier import ( HumidifierEntity, HumidifierEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN -from .coordinator import AprilaireCoordinator +from .coordinator import AprilaireConfigEntry, AprilaireCoordinator from .entity import BaseAprilaireEntity HUMIDIFIER_ACTION_MAP: dict[StateType, HumidifierAction] = { @@ -41,12 +39,12 @@ DEHUMIDIFIER_ACTION_MAP: dict[StateType, HumidifierAction] = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AprilaireConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Aprilaire humidifier devices.""" - coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id] + coordinator = config_entry.runtime_data assert config_entry.unique_id is not None diff --git a/homeassistant/components/aprilaire/select.py b/homeassistant/components/aprilaire/select.py index 504453f7463..d8f6137f53d 100644 --- a/homeassistant/components/aprilaire/select.py +++ b/homeassistant/components/aprilaire/select.py @@ -9,12 +9,10 @@ from typing import cast from pyaprilaire.const import Attribute from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import AprilaireCoordinator +from .coordinator import AprilaireConfigEntry, AprilaireCoordinator from .entity import BaseAprilaireEntity AIR_CLEANING_EVENT_MAP = {0: "off", 3: "event_clean", 4: "allergies"} @@ -25,12 +23,12 @@ FRESH_AIR_MODE_MAP = {0: "off", 1: "automatic"} async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AprilaireConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Aprilaire select devices.""" - coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id] + coordinator = config_entry.runtime_data assert config_entry.unique_id is not None diff --git a/homeassistant/components/aprilaire/sensor.py b/homeassistant/components/aprilaire/sensor.py index 249c1b3850f..e1909746364 100644 --- a/homeassistant/components/aprilaire/sensor.py +++ b/homeassistant/components/aprilaire/sensor.py @@ -13,14 +13,12 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN -from .coordinator import AprilaireCoordinator +from .coordinator import AprilaireConfigEntry, AprilaireCoordinator from .entity import BaseAprilaireEntity DEHUMIDIFICATION_STATUS_MAP: dict[StateType, str] = { @@ -76,12 +74,12 @@ def get_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AprilaireConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Aprilaire sensor devices.""" - coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id] + coordinator = config_entry.runtime_data assert config_entry.unique_id is not None diff --git a/homeassistant/components/aprs/device_tracker.py b/homeassistant/components/aprs/device_tracker.py index 67d0736e526..fc23fc5e436 100644 --- a/homeassistant/components/aprs/device_tracker.py +++ b/homeassistant/components/aprs/device_tracker.py @@ -159,7 +159,7 @@ class AprsListenerThread(threading.Thread): self.ais.set_filter(self.server_filter) try: - _LOGGER.info( + _LOGGER.debug( "Opening connection to %s with callsign %s", self.host, self.callsign ) self.ais.connect() @@ -170,7 +170,7 @@ class AprsListenerThread(threading.Thread): except (AprsConnectionError, LoginError) as err: self.start_complete(False, str(err)) except OSError: - _LOGGER.info( + _LOGGER.debug( "Closing connection to %s with callsign %s", self.host, self.callsign ) diff --git a/homeassistant/components/aprs/manifest.json b/homeassistant/components/aprs/manifest.json index 63826f5a385..7518405f1ec 100644 --- a/homeassistant/components/aprs/manifest.json +++ b/homeassistant/components/aprs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aprs", "iot_class": "cloud_push", "loggers": ["aprslib", "geographiclib", "geopy"], + "quality_scale": "legacy", "requirements": ["aprslib==0.7.2", "geopy==2.3.0"] } diff --git a/homeassistant/components/aps/__init__.py b/homeassistant/components/aps/__init__.py new file mode 100644 index 00000000000..7af88840958 --- /dev/null +++ b/homeassistant/components/aps/__init__.py @@ -0,0 +1 @@ +"""Virtual integration: Arizona Public Service (APS).""" diff --git a/homeassistant/components/aps/manifest.json b/homeassistant/components/aps/manifest.json new file mode 100644 index 00000000000..347fd74a7bf --- /dev/null +++ b/homeassistant/components/aps/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "aps", + "name": "Arizona Public Service (APS)", + "integration_type": "virtual", + "supported_by": "opower" +} diff --git a/homeassistant/components/apsystems/__init__.py b/homeassistant/components/apsystems/__init__.py index 372ce52e049..c437f5584db 100644 --- a/homeassistant/components/apsystems/__init__.py +++ b/homeassistant/components/apsystems/__init__.py @@ -38,6 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ApSystemsConfigEntry) -> ip_address=entry.data[CONF_IP_ADDRESS], port=entry.data.get(CONF_PORT, DEFAULT_PORT), timeout=8, + enable_debounce=True, ) coordinator = ApSystemsDataCoordinator(hass, api) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index 6ba4f01dbc8..e56cb826840 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -5,12 +5,17 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData +from APsystemsEZ1 import ( + APsystemsEZ1M, + InverterReturnedError, + ReturnAlarmInfo, + ReturnOutputData, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import LOGGER +from .const import DOMAIN, LOGGER @dataclass @@ -36,12 +41,18 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): async def _async_setup(self) -> None: try: - max_power = (await self.api.get_device_info()).maxPower + device_info = await self.api.get_device_info() except (ConnectionError, TimeoutError): raise UpdateFailed from None - self.api.max_power = max_power + self.api.max_power = device_info.maxPower + self.api.min_power = device_info.minPower async def _async_update_data(self) -> ApSystemsSensorData: - output_data = await self.api.get_output_data() - alarm_info = await self.api.get_alarm_info() + try: + output_data = await self.api.get_output_data() + alarm_info = await self.api.get_alarm_info() + except InverterReturnedError: + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="inverter_error" + ) from None return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info) diff --git a/homeassistant/components/apsystems/manifest.json b/homeassistant/components/apsystems/manifest.json index 9376d21ba28..a58530b05e2 100644 --- a/homeassistant/components/apsystems/manifest.json +++ b/homeassistant/components/apsystems/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apsystems", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["apsystems-ez1==2.2.1"] + "requirements": ["apsystems-ez1==2.4.0"] } diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index 51e7130587f..01e991f5188 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -26,7 +26,6 @@ async def async_setup_entry( class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): """Base sensor to be used with description.""" - _attr_native_min_value = 30 _attr_native_step = 1 _attr_device_class = NumberDeviceClass.POWER _attr_mode = NumberMode.BOX @@ -42,6 +41,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): self._api = data.coordinator.api self._attr_unique_id = f"{data.device_id}_output_limit" self._attr_native_max_value = data.coordinator.api.max_power + self._attr_native_min_value = data.coordinator.api.min_power async def async_update(self) -> None: """Set the state with the value fetched from the inverter.""" diff --git a/homeassistant/components/apsystems/sensor.py b/homeassistant/components/apsystems/sensor.py index afeb9d071ab..f87bc0f3f26 100644 --- a/homeassistant/components/apsystems/sensor.py +++ b/homeassistant/components/apsystems/sensor.py @@ -12,12 +12,11 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, SensorStateClass, - StateType, ) from homeassistant.const import UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import DiscoveryInfoType +from homeassistant.helpers.typing import DiscoveryInfoType, StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import ApSystemsConfigEntry, ApSystemsData diff --git a/homeassistant/components/apsystems/strings.json b/homeassistant/components/apsystems/strings.json index e02f86c2730..b3a10ca49a7 100644 --- a/homeassistant/components/apsystems/strings.json +++ b/homeassistant/components/apsystems/strings.json @@ -72,5 +72,10 @@ "name": "Inverter status" } } + }, + "exceptions": { + "inverter_error": { + "message": "Inverter returned an error" + } } } diff --git a/homeassistant/components/apsystems/switch.py b/homeassistant/components/apsystems/switch.py index 93a21ec9f05..73914845445 100644 --- a/homeassistant/components/apsystems/switch.py +++ b/homeassistant/components/apsystems/switch.py @@ -5,6 +5,7 @@ from __future__ import annotations from typing import Any from aiohttp.client_exceptions import ClientConnectionError +from APsystemsEZ1 import InverterReturnedError from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.core import HomeAssistant @@ -40,7 +41,7 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity): """Update switch status and availability.""" try: status = await self._api.get_device_power_status() - except (TimeoutError, ClientConnectionError): + except (TimeoutError, ClientConnectionError, InverterReturnedError): self._attr_available = False else: self._attr_available = True diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index 332cd16e749..1ee89035d93 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -56,7 +56,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): refresh_token = await api.authenticate( user_input[CONF_EMAIL], user_input[CONF_PASSWORD] ) - except ApiException: + except (ApiException, TimeoutError): errors["base"] = "cannot_connect" except AuthenticationFailed: errors["base"] = "invalid_auth" diff --git a/homeassistant/components/aquacell/coordinator.py b/homeassistant/components/aquacell/coordinator.py index dd5dfcd2d0d..ee4afb451b9 100644 --- a/homeassistant/components/aquacell/coordinator.py +++ b/homeassistant/components/aquacell/coordinator.py @@ -56,7 +56,7 @@ class AquacellCoordinator(DataUpdateCoordinator[dict[str, Softener]]): so entities can quickly look up their data. """ - async with asyncio.timeout(10): + async with asyncio.timeout(30): # Check if the refresh token is expired expiry_time = ( self.refresh_token_creation_time @@ -72,7 +72,7 @@ class AquacellCoordinator(DataUpdateCoordinator[dict[str, Softener]]): softeners = await self.aquacell_api.get_all_softeners() except AuthenticationFailed as err: raise ConfigEntryError from err - except AquacellApiException as err: + except (AquacellApiException, TimeoutError) as err: raise UpdateFailed(f"Error communicating with API: {err}") from err return {softener.dsn: softener for softener in softeners} diff --git a/homeassistant/components/aqualogic/manifest.json b/homeassistant/components/aqualogic/manifest.json index 783e4c8c204..cc807e4bb19 100644 --- a/homeassistant/components/aqualogic/manifest.json +++ b/homeassistant/components/aqualogic/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aqualogic", "iot_class": "local_push", "loggers": ["aqualogic"], + "quality_scale": "legacy", "requirements": ["aqualogic==2.6"] } diff --git a/homeassistant/components/aquostv/manifest.json b/homeassistant/components/aquostv/manifest.json index 1bac2bdfb5f..6fc1092d33c 100644 --- a/homeassistant/components/aquostv/manifest.json +++ b/homeassistant/components/aquostv/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aquostv", "iot_class": "local_polling", "loggers": ["sharp_aquos_rc"], + "quality_scale": "legacy", "requirements": ["sharp_aquos_rc==0.3.2"] } diff --git a/homeassistant/components/aranet/__init__.py b/homeassistant/components/aranet/__init__.py index 3a2bc266653..81b3dae04de 100644 --- a/homeassistant/components/aranet/__init__.py +++ b/homeassistant/components/aranet/__init__.py @@ -15,12 +15,14 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) +type AranetConfigEntry = ConfigEntry[ + PassiveBluetoothProcessorCoordinator[Aranet4Advertisement] +] + def _service_info_to_adv( service_info: BluetoothServiceInfoBleak, @@ -28,30 +30,25 @@ def _service_info_to_adv( return Aranet4Advertisement(service_info.device, service_info.advertisement) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AranetConfigEntry) -> bool: """Set up Aranet from a config entry.""" address = entry.unique_id assert address is not None - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - PassiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=_service_info_to_adv, - ) + coordinator = PassiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=_service_info_to_adv, ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload( - coordinator.async_start() - ) # only start after all platforms have had a chance to subscribe + # only start after all platforms have had a chance to subscribe + entry.async_on_unload(coordinator.async_start()) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AranetConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aranet/sensor.py b/homeassistant/components/aranet/sensor.py index 1dc4b9f956e..d7fbd0e4b3b 100644 --- a/homeassistant/components/aranet/sensor.py +++ b/homeassistant/components/aranet/sensor.py @@ -8,12 +8,10 @@ from typing import Any from aranet4.client import Aranet4Advertisement from bleak.backends.device import BLEDevice -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, PassiveBluetoothDataUpdate, PassiveBluetoothEntityKey, - PassiveBluetoothProcessorCoordinator, PassiveBluetoothProcessorEntity, ) from homeassistant.components.sensor import ( @@ -38,7 +36,8 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ARANET_MANUFACTURER_NAME, DOMAIN +from . import AranetConfigEntry +from .const import ARANET_MANUFACTURER_NAME @dataclass(frozen=True) @@ -174,20 +173,17 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: AranetConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Aranet sensors.""" - coordinator: PassiveBluetoothProcessorCoordinator[Aranet4Advertisement] = hass.data[ - DOMAIN - ][entry.entry_id] processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update) entry.async_on_unload( processor.async_add_entities_listener( Aranet4BluetoothSensorEntity, async_add_entities ) ) - entry.async_on_unload(coordinator.async_register_processor(processor)) + entry.async_on_unload(entry.runtime_data.async_register_processor(processor)) class Aranet4BluetoothSensorEntity( diff --git a/homeassistant/components/arcam_fmj/config_flow.py b/homeassistant/components/arcam_fmj/config_flow.py index 514445ea604..6c037591688 100644 --- a/homeassistant/components/arcam_fmj/config_flow.py +++ b/homeassistant/components/arcam_fmj/config_flow.py @@ -22,6 +22,9 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str + port: int + async def _async_set_unique_id_and_update( self, host: str, port: int, uuid: str ) -> None: @@ -74,16 +77,11 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" - context = self.context - placeholders = { - "host": context[CONF_HOST], - } - context["title_placeholders"] = placeholders + placeholders = {"host": self.host} + self.context["title_placeholders"] = placeholders if user_input is not None: - return await self._async_check_and_create( - context[CONF_HOST], context[CONF_PORT] - ) + return await self._async_check_and_create(self.host, self.port) return self.async_show_form( step_id="confirm", description_placeholders=placeholders @@ -101,7 +99,6 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN): await self._async_set_unique_id_and_update(host, port, uuid) - context = self.context - context[CONF_HOST] = host - context[CONF_PORT] = DEFAULT_PORT + self.host = host + self.port = DEFAULT_PORT return await self.async_step_confirm() diff --git a/homeassistant/components/arcam_fmj/media_player.py b/homeassistant/components/arcam_fmj/media_player.py index 00b46a7024a..7a133777a0a 100644 --- a/homeassistant/components/arcam_fmj/media_player.py +++ b/homeassistant/components/arcam_fmj/media_player.py @@ -11,6 +11,7 @@ from arcam.fmj import ConnectionFailed, SourceCodes from arcam.fmj.state import State from homeassistant.components.media_player import ( + BrowseError, BrowseMedia, MediaClass, MediaPlayerEntity, @@ -18,7 +19,6 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.components.media_player.errors import BrowseError from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError diff --git a/homeassistant/components/arest/manifest.json b/homeassistant/components/arest/manifest.json index 53732d15064..be43b3aafc9 100644 --- a/homeassistant/components/arest/manifest.json +++ b/homeassistant/components/arest/manifest.json @@ -3,5 +3,6 @@ "name": "aREST", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/arest", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/arris_tg2492lg/device_tracker.py b/homeassistant/components/arris_tg2492lg/device_tracker.py index 58daead34f2..c3650587690 100644 --- a/homeassistant/components/arris_tg2492lg/device_tracker.py +++ b/homeassistant/components/arris_tg2492lg/device_tracker.py @@ -7,7 +7,7 @@ from arris_tg2492lg import ConnectBox, Device import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -31,7 +31,7 @@ async def async_get_scanner( hass: HomeAssistant, config: ConfigType ) -> ArrisDeviceScanner | None: """Return the Arris device scanner if successful.""" - conf = config[DOMAIN] + conf = config[DEVICE_TRACKER_DOMAIN] url = f"http://{conf[CONF_HOST]}" websession = async_get_clientsession(hass) connect_box = ConnectBox(websession, url, conf[CONF_PASSWORD]) diff --git a/homeassistant/components/arris_tg2492lg/manifest.json b/homeassistant/components/arris_tg2492lg/manifest.json index fa7673b4276..98778de5f2a 100644 --- a/homeassistant/components/arris_tg2492lg/manifest.json +++ b/homeassistant/components/arris_tg2492lg/manifest.json @@ -2,10 +2,10 @@ "domain": "arris_tg2492lg", "name": "Arris TG2492LG", "codeowners": ["@vanbalken"], - "dependencies": [], "documentation": "https://www.home-assistant.io/integrations/arris_tg2492lg", "integration_type": "hub", "iot_class": "local_polling", "loggers": ["arris_tg2492lg"], + "quality_scale": "legacy", "requirements": ["arris-tg2492lg==2.2.0"] } diff --git a/homeassistant/components/aruba/device_tracker.py b/homeassistant/components/aruba/device_tracker.py index 4959ff7ef03..ef622ef9826 100644 --- a/homeassistant/components/aruba/device_tracker.py +++ b/homeassistant/components/aruba/device_tracker.py @@ -10,7 +10,7 @@ import pexpect import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -38,7 +38,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArubaDeviceScanner | None: """Validate the configuration and return a Aruba scanner.""" - scanner = ArubaDeviceScanner(config[DOMAIN]) + scanner = ArubaDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None diff --git a/homeassistant/components/aruba/manifest.json b/homeassistant/components/aruba/manifest.json index 0d1fabf51b8..c98dda754cd 100644 --- a/homeassistant/components/aruba/manifest.json +++ b/homeassistant/components/aruba/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aruba", "iot_class": "local_polling", "loggers": ["pexpect", "ptyprocess"], + "quality_scale": "legacy", "requirements": ["pexpect==4.6.0"] } diff --git a/homeassistant/components/arve/__init__.py b/homeassistant/components/arve/__init__.py index 91e38da4c60..a1b4aa7042e 100644 --- a/homeassistant/components/arve/__init__.py +++ b/homeassistant/components/arve/__init__.py @@ -2,33 +2,28 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import ArveCoordinator +from .coordinator import ArveConfigEntry, ArveCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool: """Set up Arve from a config entry.""" coordinator = ArveCoordinator(hass) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/arve/coordinator.py b/homeassistant/components/arve/coordinator.py index b053e30336b..f02220e28e2 100644 --- a/homeassistant/components/arve/coordinator.py +++ b/homeassistant/components/arve/coordinator.py @@ -21,11 +21,13 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER +type ArveConfigEntry = ConfigEntry[ArveCoordinator] + class ArveCoordinator(DataUpdateCoordinator[ArveSensProData]): """Arve coordinator.""" - config_entry: ConfigEntry + config_entry: ArveConfigEntry devices: ArveDevices def __init__(self, hass: HomeAssistant) -> None: diff --git a/homeassistant/components/arve/sensor.py b/homeassistant/components/arve/sensor.py index f95b26b0451..64d9f6f8874 100644 --- a/homeassistant/components/arve/sensor.py +++ b/homeassistant/components/arve/sensor.py @@ -11,7 +11,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, @@ -21,8 +20,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import ArveCoordinator +from .coordinator import ArveConfigEntry from .entity import ArveDeviceEntity @@ -85,10 +83,10 @@ SENSORS: tuple[ArveDeviceEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: ArveConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up Arve device based on a config entry.""" - coordinator: ArveCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( ArveDevice(coordinator, description, sn) diff --git a/homeassistant/components/arwn/manifest.json b/homeassistant/components/arwn/manifest.json index 15eb656e974..8cabb045b64 100644 --- a/homeassistant/components/arwn/manifest.json +++ b/homeassistant/components/arwn/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/arwn", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/aseko_pool_live/__init__.py b/homeassistant/components/aseko_pool_live/__init__.py index 5773b3eb5b9..52d74398818 100644 --- a/homeassistant/components/aseko_pool_live/__init__.py +++ b/homeassistant/components/aseko_pool_live/__init__.py @@ -4,58 +4,43 @@ from __future__ import annotations import logging -from aioaseko import APIUnavailable, InvalidAuthCredentials, MobileAccount +from aioaseko import Aseko, AsekoNotLoggedIn -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN -from .coordinator import AsekoDataUpdateCoordinator +from .coordinator import AsekoConfigEntry, AsekoDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) PLATFORMS: list[str] = [Platform.BINARY_SENSOR, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AsekoConfigEntry) -> bool: """Set up Aseko Pool Live from a config entry.""" - account = MobileAccount( - async_get_clientsession(hass), - username=entry.data[CONF_EMAIL], - password=entry.data[CONF_PASSWORD], - ) + aseko = Aseko(entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD]) try: - units = await account.get_units() - except InvalidAuthCredentials as err: + await aseko.login() + except AsekoNotLoggedIn as err: raise ConfigEntryAuthFailed from err - except APIUnavailable as err: - raise ConfigEntryNotReady from err - - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = [] - - for unit in units: - coordinator = AsekoDataUpdateCoordinator(hass, unit) - await coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id].append((unit, coordinator)) + coordinator = AsekoDataUpdateCoordinator(hass, aseko) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AsekoConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: AsekoConfigEntry +) -> bool: """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) diff --git a/homeassistant/components/aseko_pool_live/binary_sensor.py b/homeassistant/components/aseko_pool_live/binary_sensor.py index 79953565769..c8cc31dc795 100644 --- a/homeassistant/components/aseko_pool_live/binary_sensor.py +++ b/homeassistant/components/aseko_pool_live/binary_sensor.py @@ -8,16 +8,13 @@ from dataclasses import dataclass from aioaseko import Unit from homeassistant.components.binary_sensor import ( - BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import AsekoDataUpdateCoordinator +from .coordinator import AsekoConfigEntry from .entity import AsekoEntity @@ -25,63 +22,40 @@ from .entity import AsekoEntity class AsekoBinarySensorEntityDescription(BinarySensorEntityDescription): """Describes an Aseko binary sensor entity.""" - value_fn: Callable[[Unit], bool] + value_fn: Callable[[Unit], bool | None] -UNIT_BINARY_SENSORS: tuple[AsekoBinarySensorEntityDescription, ...] = ( +BINARY_SENSORS: tuple[AsekoBinarySensorEntityDescription, ...] = ( AsekoBinarySensorEntityDescription( key="water_flow", - translation_key="water_flow", - value_fn=lambda unit: unit.water_flow, - ), - AsekoBinarySensorEntityDescription( - key="has_alarm", - translation_key="alarm", - value_fn=lambda unit: unit.has_alarm, - device_class=BinarySensorDeviceClass.SAFETY, - ), - AsekoBinarySensorEntityDescription( - key="has_error", - translation_key="error", - value_fn=lambda unit: unit.has_error, - device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="water_flow_to_probes", + value_fn=lambda unit: unit.water_flow_to_probes, ), ) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AsekoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Aseko Pool Live binary sensors.""" - data: list[tuple[Unit, AsekoDataUpdateCoordinator]] = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data + units = coordinator.data.values() async_add_entities( - AsekoUnitBinarySensorEntity(unit, coordinator, description) - for unit, coordinator in data - for description in UNIT_BINARY_SENSORS + AsekoBinarySensorEntity(unit, coordinator, description) + for description in BINARY_SENSORS + for unit in units + if description.value_fn(unit) is not None ) -class AsekoUnitBinarySensorEntity(AsekoEntity, BinarySensorEntity): - """Representation of a unit water flow binary sensor entity.""" +class AsekoBinarySensorEntity(AsekoEntity, BinarySensorEntity): + """Representation of an Aseko binary sensor entity.""" entity_description: AsekoBinarySensorEntityDescription - def __init__( - self, - unit: Unit, - coordinator: AsekoDataUpdateCoordinator, - entity_description: AsekoBinarySensorEntityDescription, - ) -> None: - """Initialize the unit binary sensor.""" - super().__init__(unit, coordinator) - self.entity_description = entity_description - self._attr_unique_id = f"{self._unit.serial_number}_{entity_description.key}" - @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Return the state of the sensor.""" - return self.entity_description.value_fn(self._unit) + return self.entity_description.value_fn(self.unit) diff --git a/homeassistant/components/aseko_pool_live/config_flow.py b/homeassistant/components/aseko_pool_live/config_flow.py index cd2f0e4ac7f..e93eb803d62 100644 --- a/homeassistant/components/aseko_pool_live/config_flow.py +++ b/homeassistant/components/aseko_pool_live/config_flow.py @@ -6,12 +6,11 @@ from collections.abc import Mapping import logging from typing import Any -from aioaseko import APIUnavailable, InvalidAuthCredentials, WebAccount +from aioaseko import Aseko, AsekoAPIError, AsekoInvalidCredentials import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_UNIQUE_ID -from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -30,19 +29,14 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): } ) - reauth_entry: ConfigEntry | None = None - - async def get_account_info(self, email: str, password: str) -> dict: + async def get_account_info(self, email: str, password: str) -> dict[str, Any]: """Get account info from the mobile API and the web API.""" - session = async_get_clientsession(self.hass) - - web_account = WebAccount(session, email, password) - web_account_info = await web_account.login() - + aseko = Aseko(email, password) + user = await aseko.login() return { CONF_EMAIL: email, CONF_PASSWORD: password, - CONF_UNIQUE_ID: web_account_info.user_id, + CONF_UNIQUE_ID: user.user_id, } async def async_step_user( @@ -50,7 +44,6 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the initial step.""" - self.reauth_entry = None errors = {} if user_input is not None: @@ -58,9 +51,9 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): info = await self.get_account_info( user_input[CONF_EMAIL], user_input[CONF_PASSWORD] ) - except APIUnavailable: + except AsekoAPIError: errors["base"] = "cannot_connect" - except InvalidAuthCredentials: + except AsekoInvalidCredentials: errors["base"] = "invalid_auth" except Exception: _LOGGER.exception("Unexpected exception") @@ -77,19 +70,18 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): async def async_store_credentials(self, info: dict[str, Any]) -> ConfigFlowResult: """Store validated credentials.""" - if self.reauth_entry: - self.hass.config_entries.async_update_entry( - self.reauth_entry, + await self.async_set_unique_id(info[CONF_UNIQUE_ID]) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=info[CONF_EMAIL], data={ CONF_EMAIL: info[CONF_EMAIL], CONF_PASSWORD: info[CONF_PASSWORD], }, ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - await self.async_set_unique_id(info[CONF_UNIQUE_ID]) self._abort_if_unique_id_configured() return self.async_create_entry( @@ -101,18 +93,13 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - - return await self.async_step_reauth_confirm(user_input) + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( - self, user_input: Mapping | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" @@ -122,9 +109,9 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): info = await self.get_account_info( user_input[CONF_EMAIL], user_input[CONF_PASSWORD] ) - except APIUnavailable: + except AsekoAPIError: errors["base"] = "cannot_connect" - except InvalidAuthCredentials: + except AsekoInvalidCredentials: errors["base"] = "invalid_auth" except Exception: _LOGGER.exception("Unexpected exception") diff --git a/homeassistant/components/aseko_pool_live/coordinator.py b/homeassistant/components/aseko_pool_live/coordinator.py index a7f2d5ad5ac..96893912361 100644 --- a/homeassistant/components/aseko_pool_live/coordinator.py +++ b/homeassistant/components/aseko_pool_live/coordinator.py @@ -5,34 +5,34 @@ from __future__ import annotations from datetime import timedelta import logging -from aioaseko import Unit, Variable +from aioaseko import Aseko, Unit +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from .const import DOMAIN + _LOGGER = logging.getLogger(__name__) +type AsekoConfigEntry = ConfigEntry[AsekoDataUpdateCoordinator] -class AsekoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Variable]]): + +class AsekoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Unit]]): """Class to manage fetching Aseko unit data from single endpoint.""" - def __init__(self, hass: HomeAssistant, unit: Unit) -> None: + def __init__(self, hass: HomeAssistant, aseko: Aseko) -> None: """Initialize global Aseko unit data updater.""" - self._unit = unit - - if self._unit.name: - name = self._unit.name - else: - name = f"{self._unit.type}-{self._unit.serial_number}" + self._aseko = aseko super().__init__( hass, _LOGGER, - name=name, + name=DOMAIN, update_interval=timedelta(minutes=2), ) - async def _async_update_data(self) -> dict[str, Variable]: + async def _async_update_data(self) -> dict[str, Unit]: """Fetch unit data.""" - await self._unit.get_state() - return {variable.type: variable for variable in self._unit.variables} + units = await self._aseko.get_units() + return {unit.serial_number: unit for unit in units} diff --git a/homeassistant/components/aseko_pool_live/entity.py b/homeassistant/components/aseko_pool_live/entity.py index 6f0979da2e7..038e0a175d3 100644 --- a/homeassistant/components/aseko_pool_live/entity.py +++ b/homeassistant/components/aseko_pool_live/entity.py @@ -3,6 +3,7 @@ from aioaseko import Unit from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN @@ -14,20 +15,44 @@ class AsekoEntity(CoordinatorEntity[AsekoDataUpdateCoordinator]): _attr_has_entity_name = True - def __init__(self, unit: Unit, coordinator: AsekoDataUpdateCoordinator) -> None: + def __init__( + self, + unit: Unit, + coordinator: AsekoDataUpdateCoordinator, + description: EntityDescription, + ) -> None: """Initialize the aseko entity.""" super().__init__(coordinator) + self.entity_description = description self._unit = unit - - if self._unit.type == "Remote": - self._device_model = "ASIN Pool" - else: - self._device_model = f"ASIN AQUA {self._unit.type}" - self._device_name = self._unit.name if self._unit.name else self._device_model - + self._attr_unique_id = f"{self.unit.serial_number}{self.entity_description.key}" self._attr_device_info = DeviceInfo( - name=self._device_name, - identifiers={(DOMAIN, str(self._unit.serial_number))}, - manufacturer="Aseko", - model=self._device_model, + identifiers={(DOMAIN, self.unit.serial_number)}, + serial_number=self.unit.serial_number, + name=unit.name or unit.serial_number, + manufacturer=( + self.unit.brand_name.primary + if self.unit.brand_name is not None + else None + ), + model=( + self.unit.brand_name.secondary + if self.unit.brand_name is not None + else None + ), + configuration_url=f"https://aseko.cloud/unit/{self.unit.serial_number}", + ) + + @property + def unit(self) -> Unit: + """Return the aseko unit.""" + return self.coordinator.data[self._unit.serial_number] + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return ( + super().available + and self.unit.serial_number in self.coordinator.data + and self.unit.online ) diff --git a/homeassistant/components/aseko_pool_live/icons.json b/homeassistant/components/aseko_pool_live/icons.json index 2f8a77fc417..f7672734cee 100644 --- a/homeassistant/components/aseko_pool_live/icons.json +++ b/homeassistant/components/aseko_pool_live/icons.json @@ -1,16 +1,28 @@ { "entity": { "binary_sensor": { - "water_flow": { + "water_flow_to_probes": { "default": "mdi:waves-arrow-right" } }, "sensor": { + "air_temperature": { + "default": "mdi:thermometer-lines" + }, + "electrolyzer": { + "default": "mdi:lightning-bolt" + }, "free_chlorine": { - "default": "mdi:flask" + "default": "mdi:pool" + }, + "redox": { + "default": "mdi:pool" + }, + "salinity": { + "default": "mdi:pool" }, "water_temperature": { - "default": "mdi:coolant-temperature" + "default": "mdi:pool-thermometer" } } } diff --git a/homeassistant/components/aseko_pool_live/manifest.json b/homeassistant/components/aseko_pool_live/manifest.json index a340408ad71..628a9732188 100644 --- a/homeassistant/components/aseko_pool_live/manifest.json +++ b/homeassistant/components/aseko_pool_live/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aseko_pool_live", "iot_class": "cloud_polling", "loggers": ["aioaseko"], - "requirements": ["aioaseko==0.2.0"] + "requirements": ["aioaseko==1.0.0"] } diff --git a/homeassistant/components/aseko_pool_live/sensor.py b/homeassistant/components/aseko_pool_live/sensor.py index a4ddea9ad89..3fe7cdd5272 100644 --- a/homeassistant/components/aseko_pool_live/sensor.py +++ b/homeassistant/components/aseko_pool_live/sensor.py @@ -2,77 +2,109 @@ from __future__ import annotations -from aioaseko import Unit, Variable +from collections.abc import Callable +from dataclasses import dataclass + +from aioaseko import Unit from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, + SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry +from homeassistant.const import UnitOfElectricPotential, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType -from .const import DOMAIN -from .coordinator import AsekoDataUpdateCoordinator +from .coordinator import AsekoConfigEntry from .entity import AsekoEntity +@dataclass(frozen=True, kw_only=True) +class AsekoSensorEntityDescription(SensorEntityDescription): + """Describes an Aseko sensor entity.""" + + value_fn: Callable[[Unit], StateType] + + +SENSORS: list[AsekoSensorEntityDescription] = [ + AsekoSensorEntityDescription( + key="airTemp", + translation_key="air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.air_temperature, + ), + AsekoSensorEntityDescription( + key="electrolyzer", + translation_key="electrolyzer", + native_unit_of_measurement="g/h", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.electrolyzer, + ), + AsekoSensorEntityDescription( + key="free_chlorine", + translation_key="free_chlorine", + native_unit_of_measurement="mg/l", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.cl_free, + ), + AsekoSensorEntityDescription( + key="ph", + device_class=SensorDeviceClass.PH, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.ph, + ), + AsekoSensorEntityDescription( + key="rx", + translation_key="redox", + native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.redox, + ), + AsekoSensorEntityDescription( + key="salinity", + translation_key="salinity", + native_unit_of_measurement="kg/m³", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.salinity, + ), + AsekoSensorEntityDescription( + key="waterTemp", + translation_key="water_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda unit: unit.water_temperature, + ), +] + + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AsekoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Aseko Pool Live sensors.""" - data: list[tuple[Unit, AsekoDataUpdateCoordinator]] = hass.data[DOMAIN][ - config_entry.entry_id - ] - + coordinator = config_entry.runtime_data + units = coordinator.data.values() async_add_entities( - VariableSensorEntity(unit, variable, coordinator) - for unit, coordinator in data - for variable in unit.variables + AsekoSensorEntity(unit, coordinator, description) + for description in SENSORS + for unit in units + if description.value_fn(unit) is not None ) -class VariableSensorEntity(AsekoEntity, SensorEntity): - """Representation of a unit variable sensor entity.""" +class AsekoSensorEntity(AsekoEntity, SensorEntity): + """Representation of an Aseko unit sensor entity.""" - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__( - self, unit: Unit, variable: Variable, coordinator: AsekoDataUpdateCoordinator - ) -> None: - """Initialize the variable sensor.""" - super().__init__(unit, coordinator) - self._variable = variable - - translation_key = { - "Air temp.": "air_temperature", - "Cl free": "free_chlorine", - "Water temp.": "water_temperature", - }.get(self._variable.name) - if translation_key is not None: - self._attr_translation_key = translation_key - else: - self._attr_name = self._variable.name - - self._attr_unique_id = f"{self._unit.serial_number}{self._variable.type}" - self._attr_native_unit_of_measurement = self._variable.unit - - self._attr_icon = { - "rx": "mdi:test-tube", - "waterLevel": "mdi:waves", - }.get(self._variable.type) - - self._attr_device_class = { - "airTemp": SensorDeviceClass.TEMPERATURE, - "waterTemp": SensorDeviceClass.TEMPERATURE, - "ph": SensorDeviceClass.PH, - }.get(self._variable.type) + entity_description: AsekoSensorEntityDescription @property - def native_value(self) -> int | None: + def native_value(self) -> StateType: """Return the state of the sensor.""" - variable = self.coordinator.data[self._variable.type] - return variable.current_value + return self.entity_description.value_fn(self.unit) diff --git a/homeassistant/components/aseko_pool_live/strings.json b/homeassistant/components/aseko_pool_live/strings.json index 7f77b9ec69b..2805b60cdfd 100644 --- a/homeassistant/components/aseko_pool_live/strings.json +++ b/homeassistant/components/aseko_pool_live/strings.json @@ -21,25 +21,32 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unique_id_mismatch": "The user identifier does not match the previous identifier" } }, "entity": { "binary_sensor": { - "water_flow": { - "name": "Water flow" - }, - "alarm": { - "name": "Alarm" + "water_flow_to_probes": { + "name": "Water flow to probes" } }, "sensor": { "air_temperature": { "name": "Air temperature" }, + "electrolyzer": { + "name": "Electrolyzer" + }, "free_chlorine": { "name": "Free chlorine" }, + "redox": { + "name": "Redox potential" + }, + "salinity": { + "name": "Salinity" + }, "water_temperature": { "name": "Water temperature" } diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index 8ee053162b0..ec6d8a646b6 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import AsyncIterable +from typing import Any import voluptuous as vol @@ -16,6 +17,7 @@ from .const import ( DATA_LAST_WAKE_UP, DOMAIN, EVENT_RECORDING, + OPTION_PREFERRED, SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH, @@ -57,6 +59,7 @@ __all__ = ( "PipelineNotFound", "WakeWordSettings", "EVENT_RECORDING", + "OPTION_PREFERRED", "SAMPLES_PER_CHUNK", "SAMPLE_RATE", "SAMPLE_WIDTH", @@ -99,7 +102,7 @@ async def async_pipeline_from_audio_stream( wake_word_phrase: str | None = None, pipeline_id: str | None = None, conversation_id: str | None = None, - tts_audio_output: str | None = None, + tts_audio_output: str | dict[str, Any] | None = None, wake_word_settings: WakeWordSettings | None = None, audio_settings: AudioSettings | None = None, device_id: str | None = None, diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index ff2b122187a..1fabc7790e7 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -22,8 +22,8 @@ class EnhancedAudioChunk: timestamp_ms: int """Timestamp relative to start of audio stream (milliseconds)""" - is_speech: bool | None - """True if audio chunk likely contains speech, False if not, None if unknown""" + speech_probability: float | None + """Probability that audio chunk contains speech (0-1), None if unknown""" class AudioEnhancer(ABC): @@ -70,27 +70,27 @@ class MicroVadSpeexEnhancer(AudioEnhancer): ) self.vad: MicroVad | None = None - self.threshold = 0.5 if self.is_vad_enabled: self.vad = MicroVad() - _LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold) + _LOGGER.debug("Initialized microVAD") def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" - is_speech: bool | None = None + speech_probability: float | None = None assert len(audio) == BYTES_PER_CHUNK if self.vad is not None: # Run VAD - speech_prob = self.vad.Process10ms(audio) - is_speech = speech_prob > self.threshold + speech_probability = self.vad.Process10ms(audio) if self.audio_processor is not None: # Run noise suppression and auto gain audio = self.audio_processor.Process10ms(audio).audio return EnhancedAudioChunk( - audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech + audio=audio, + timestamp_ms=timestamp_ms, + speech_probability=speech_probability, ) diff --git a/homeassistant/components/assist_pipeline/const.py b/homeassistant/components/assist_pipeline/const.py index f7306b89a54..300cb5aad2a 100644 --- a/homeassistant/components/assist_pipeline/const.py +++ b/homeassistant/components/assist_pipeline/const.py @@ -22,3 +22,5 @@ SAMPLE_CHANNELS = 1 # mono MS_PER_CHUNK = 10 SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit + +OPTION_PREFERRED = "preferred" diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index c22b7391d33..3a59d8f87f1 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -1,11 +1,12 @@ { "domain": "assist_pipeline", "name": "Assist pipeline", + "after_dependencies": ["repairs"], "codeowners": ["@balloob", "@synesthesiam"], "dependencies": ["conversation", "stt", "tts", "wake_word"], "documentation": "https://www.home-assistant.io/integrations/assist_pipeline", "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.0"] + "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"] } diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 342f811c99b..7dda24c4023 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -16,6 +16,7 @@ import time from typing import Any, Literal, cast import wave +import hass_nabucasa import voluptuous as vol from homeassistant.components import ( @@ -26,11 +27,13 @@ from homeassistant.components import ( wake_word, websocket_api, ) -from homeassistant.components.tts.media_source import ( +from homeassistant.components.tts import ( generate_media_source_id as tts_generate_media_source_id, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent from homeassistant.helpers.collection import ( CHANGE_UPDATED, CollectionError, @@ -109,6 +112,7 @@ PIPELINE_FIELDS: VolDictType = { vol.Required("tts_voice"): vol.Any(str, None), vol.Required("wake_word_entity"): vol.Any(str, None), vol.Required("wake_word_id"): vol.Any(str, None), + vol.Optional("prefer_local_intents"): bool, } STORED_PIPELINE_RUNS = 10 @@ -322,6 +326,7 @@ async def async_update_pipeline( tts_voice: str | None | UndefinedType = UNDEFINED, wake_word_entity: str | None | UndefinedType = UNDEFINED, wake_word_id: str | None | UndefinedType = UNDEFINED, + prefer_local_intents: bool | UndefinedType = UNDEFINED, ) -> None: """Update a pipeline.""" pipeline_data: PipelineData = hass.data[DOMAIN] @@ -345,6 +350,7 @@ async def async_update_pipeline( ("tts_voice", tts_voice), ("wake_word_entity", wake_word_entity), ("wake_word_id", wake_word_id), + ("prefer_local_intents", prefer_local_intents), ) if val is not UNDEFINED } @@ -398,6 +404,7 @@ class Pipeline: tts_voice: str | None wake_word_entity: str | None wake_word_id: str | None + prefer_local_intents: bool = False id: str = field(default_factory=ulid_util.ulid_now) @@ -421,6 +428,7 @@ class Pipeline: tts_voice=data["tts_voice"], wake_word_entity=data["wake_word_entity"], wake_word_id=data["wake_word_id"], + prefer_local_intents=data.get("prefer_local_intents", False), ) def to_json(self) -> dict[str, Any]: @@ -438,6 +446,7 @@ class Pipeline: "tts_voice": self.tts_voice, "wake_word_entity": self.wake_word_entity, "wake_word_id": self.wake_word_id, + "prefer_local_intents": self.prefer_local_intents, } @@ -504,7 +513,7 @@ class AudioSettings: is_vad_enabled: bool = True """True if VAD is used to determine the end of the voice command.""" - silence_seconds: float = 0.5 + silence_seconds: float = 0.7 """Seconds of silence after voice command has ended.""" def __post_init__(self) -> None: @@ -538,7 +547,7 @@ class PipelineRun: language: str = None # type: ignore[assignment] runner_data: Any | None = None intent_agent: str | None = None - tts_audio_output: str | None = None + tts_audio_output: str | dict[str, Any] | None = None wake_word_settings: WakeWordSettings | None = None audio_settings: AudioSettings = field(default_factory=AudioSettings) @@ -780,7 +789,9 @@ class PipelineRun: # speaking the voice command. audio_chunks_for_stt.extend( EnhancedAudioChunk( - audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False + audio=chunk_ts[0], + timestamp_ms=chunk_ts[1], + speech_probability=None, ) for chunk_ts in result.queued_audio ) @@ -827,7 +838,7 @@ class PipelineRun: if wake_word_vad is not None: chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate - if not wake_word_vad.process(chunk_seconds, chunk.is_speech): + if not wake_word_vad.process(chunk_seconds, chunk.speech_probability): raise WakeWordTimeoutError( code="wake-word-timeout", message="Wake word was not detected" ) @@ -906,6 +917,13 @@ class PipelineRun: metadata, self._speech_to_text_stream(audio_stream=stream, stt_vad=stt_vad), ) + except (asyncio.CancelledError, TimeoutError): + raise # expected + except hass_nabucasa.auth.Unauthenticated as src_error: + raise SpeechToTextError( + code="cloud-auth-failed", + message="Home Assistant Cloud authentication failed", + ) from src_error except Exception as src_error: _LOGGER.exception("Unexpected error during speech-to-text") raise SpeechToTextError( @@ -953,7 +971,7 @@ class PipelineRun: if stt_vad is not None: chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate - if not stt_vad.process(chunk_seconds, chunk.is_speech): + if not stt_vad.process(chunk_seconds, chunk.speech_probability): # Silence detected at the end of voice command self.process_event( PipelineEvent( @@ -998,29 +1016,80 @@ class PipelineRun: if self.intent_agent is None: raise RuntimeError("Recognize intent was not prepared") + if self.pipeline.conversation_language == MATCH_ALL: + # LLMs support all languages ('*') so use pipeline language for + # intent fallback. + input_language = self.pipeline.language + else: + input_language = self.pipeline.conversation_language + self.process_event( PipelineEvent( PipelineEventType.INTENT_START, { "engine": self.intent_agent, - "language": self.pipeline.conversation_language, + "language": input_language, "intent_input": intent_input, "conversation_id": conversation_id, "device_id": device_id, + "prefer_local_intents": self.pipeline.prefer_local_intents, }, ) ) try: - conversation_result = await conversation.async_converse( - hass=self.hass, + user_input = conversation.ConversationInput( text=intent_input, + context=self.context, conversation_id=conversation_id, device_id=device_id, - context=self.context, - language=self.pipeline.conversation_language, + language=input_language, agent_id=self.intent_agent, ) + processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT + + conversation_result: conversation.ConversationResult | None = None + if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT: + # Sentence triggers override conversation agent + if ( + trigger_response_text + := await conversation.async_handle_sentence_triggers( + self.hass, user_input + ) + ) is not None: + # Sentence trigger matched + trigger_response = intent.IntentResponse( + self.pipeline.conversation_language + ) + trigger_response.async_set_speech(trigger_response_text) + conversation_result = conversation.ConversationResult( + response=trigger_response, + conversation_id=user_input.conversation_id, + ) + # Try local intents first, if preferred. + elif self.pipeline.prefer_local_intents and ( + intent_response := await conversation.async_handle_intents( + self.hass, user_input + ) + ): + # Local intent matched + conversation_result = conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + processed_locally = True + + if conversation_result is None: + # Fall back to pipeline conversation agent + conversation_result = await conversation.async_converse( + hass=self.hass, + text=user_input.text, + conversation_id=user_input.conversation_id, + device_id=user_input.device_id, + context=user_input.context, + language=user_input.language, + agent_id=user_input.agent_id, + ) except Exception as src_error: _LOGGER.exception("Unexpected error during intent recognition") raise IntentRecognitionError( @@ -1033,7 +1102,10 @@ class PipelineRun: self.process_event( PipelineEvent( PipelineEventType.INTENT_END, - {"intent_output": conversation_result.as_dict()}, + { + "processed_locally": processed_locally, + "intent_output": conversation_result.as_dict(), + }, ) ) @@ -1052,12 +1124,15 @@ class PipelineRun: if self.pipeline.tts_voice is not None: tts_options[tts.ATTR_VOICE] = self.pipeline.tts_voice - if self.tts_audio_output is not None: + if isinstance(self.tts_audio_output, dict): + tts_options.update(self.tts_audio_output) + elif isinstance(self.tts_audio_output, str): tts_options[tts.ATTR_PREFERRED_FORMAT] = self.tts_audio_output if self.tts_audio_output == "wav": # 16 Khz, 16-bit mono tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = SAMPLE_RATE tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = SAMPLE_CHANNELS + tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH try: options_supported = await tts.async_support_options( @@ -1216,7 +1291,7 @@ class PipelineRun: yield EnhancedAudioChunk( audio=sub_chunk, timestamp_ms=timestamp_ms, - is_speech=None, # no VAD + speech_probability=None, # no VAD ) timestamp_ms += MS_PER_CHUNK diff --git a/homeassistant/components/assist_pipeline/repair_flows.py b/homeassistant/components/assist_pipeline/repair_flows.py new file mode 100644 index 00000000000..d3d9633bd06 --- /dev/null +++ b/homeassistant/components/assist_pipeline/repair_flows.py @@ -0,0 +1,55 @@ +"""Repairs implementation for the cloud integration.""" + +from __future__ import annotations + +from typing import cast + +import voluptuous as vol + +from homeassistant.components.assist_satellite import DOMAIN as ASSIST_SATELLITE_DOMAIN +from homeassistant.components.repairs import RepairsFlow +from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers import entity_registry as er + +REQUIRED_KEYS = ("entity_id", "entity_uuid", "integration_name") + + +class AssistInProgressDeprecatedRepairFlow(RepairsFlow): + """Handler for an issue fixing flow.""" + + def __init__(self, data: dict[str, str | int | float | None] | None) -> None: + """Initialize.""" + if not data or any(key not in data for key in REQUIRED_KEYS): + raise ValueError("Missing data") + self._data = data + + async def async_step_init(self, _: None = None) -> FlowResult: + """Handle the first step of a fix flow.""" + return await self.async_step_confirm_disable_entity() + + async def async_step_confirm_disable_entity( + self, + user_input: dict[str, str] | None = None, + ) -> FlowResult: + """Handle the confirm step of a fix flow.""" + if user_input is not None: + entity_registry = er.async_get(self.hass) + entity_entry = entity_registry.async_get( + cast(str, self._data["entity_uuid"]) + ) + if entity_entry: + entity_registry.async_update_entity( + entity_entry.entity_id, disabled_by=er.RegistryEntryDisabler.USER + ) + return self.async_create_entry(data={}) + + description_placeholders: dict[str, str] = { + "assist_satellite_domain": ASSIST_SATELLITE_DOMAIN, + "entity_id": cast(str, self._data["entity_id"]), + "integration_name": cast(str, self._data["integration_name"]), + } + return self.async_show_form( + step_id="confirm_disable_entity", + data_schema=vol.Schema({}), + description_placeholders=description_placeholders, + ) diff --git a/homeassistant/components/assist_pipeline/select.py b/homeassistant/components/assist_pipeline/select.py index 5d011424e6e..c7e4846aad7 100644 --- a/homeassistant/components/assist_pipeline/select.py +++ b/homeassistant/components/assist_pipeline/select.py @@ -9,12 +9,10 @@ from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import collection, entity_registry as er, restore_state -from .const import DOMAIN +from .const import DOMAIN, OPTION_PREFERRED from .pipeline import AssistDevice, PipelineData, PipelineStorageCollection from .vad import VadSensitivity -OPTION_PREFERRED = "preferred" - @callback def get_chosen_pipeline( diff --git a/homeassistant/components/assist_pipeline/strings.json b/homeassistant/components/assist_pipeline/strings.json index 8fa67879fc3..804d43c3a0a 100644 --- a/homeassistant/components/assist_pipeline/strings.json +++ b/homeassistant/components/assist_pipeline/strings.json @@ -7,7 +7,7 @@ }, "select": { "pipeline": { - "name": "Assist pipeline", + "name": "Assistant", "state": { "preferred": "Preferred" } @@ -21,5 +21,17 @@ } } } + }, + "issues": { + "assist_in_progress_deprecated": { + "title": "{integration_name} in progress binary sensors are deprecated", + "fix_flow": { + "step": { + "confirm_disable_entity": { + "description": "The {integration_name} in progress binary sensor `{entity_id}` is deprecated.\n\nMigrate your configuration to use the corresponding `{assist_satellite_domain}` entity and then click SUBMIT to disable the in progress binary sensor and fix this issue." + } + } + } + } } } diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 4782d14dee4..c7fe1bc10c7 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -75,7 +75,7 @@ class AudioBuffer: class VoiceCommandSegmenter: """Segments an audio stream into voice commands.""" - speech_seconds: float = 0.3 + speech_seconds: float = 0.1 """Seconds of speech before voice command has started.""" command_seconds: float = 1.0 @@ -96,6 +96,12 @@ class VoiceCommandSegmenter: timed_out: bool = False """True a timeout occurred during voice command.""" + before_command_speech_threshold: float = 0.2 + """Probability threshold for speech before voice command.""" + + in_command_speech_threshold: float = 0.5 + """Probability threshold for speech during voice command.""" + _speech_seconds_left: float = 0.0 """Seconds left before considering voice command as started.""" @@ -124,7 +130,7 @@ class VoiceCommandSegmenter: self._reset_seconds_left = self.reset_seconds self.in_command = False - def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: + def process(self, chunk_seconds: float, speech_probability: float | None) -> bool: """Process samples using external VAD. Returns False when command is done. @@ -134,7 +140,7 @@ class VoiceCommandSegmenter: self._timeout_seconds_left -= chunk_seconds if self._timeout_seconds_left <= 0: - _LOGGER.warning( + _LOGGER.debug( "VAD end of speech detection timed out after %s seconds", self.timeout_seconds, ) @@ -142,7 +148,12 @@ class VoiceCommandSegmenter: self.timed_out = True return False + if speech_probability is None: + speech_probability = 0.0 + if not self.in_command: + # Before command + is_speech = speech_probability > self.before_command_speech_threshold if is_speech: self._reset_seconds_left = self.reset_seconds self._speech_seconds_left -= chunk_seconds @@ -160,24 +171,29 @@ class VoiceCommandSegmenter: if self._reset_seconds_left <= 0: self._speech_seconds_left = self.speech_seconds self._reset_seconds_left = self.reset_seconds - elif not is_speech: - # Silence in command - self._reset_seconds_left = self.reset_seconds - self._silence_seconds_left -= chunk_seconds - self._command_seconds_left -= chunk_seconds - if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0): - # Command finished successfully - self.reset() - _LOGGER.debug("Voice command finished") - return False else: - # Speech in command. - # Reset silence counter if enough speech. - self._reset_seconds_left -= chunk_seconds - self._command_seconds_left -= chunk_seconds - if self._reset_seconds_left <= 0: - self._silence_seconds_left = self.silence_seconds + # In command + is_speech = speech_probability > self.in_command_speech_threshold + if not is_speech: + # Silence in command self._reset_seconds_left = self.reset_seconds + self._silence_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds + if (self._silence_seconds_left <= 0) and ( + self._command_seconds_left <= 0 + ): + # Command finished successfully + self.reset() + _LOGGER.debug("Voice command finished") + return False + else: + # Speech in command. + # Reset silence counter if enough speech. + self._reset_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds + if self._reset_seconds_left <= 0: + self._silence_seconds_left = self.silence_seconds + self._reset_seconds_left = self.reset_seconds return True @@ -226,6 +242,9 @@ class VoiceActivityTimeout: reset_seconds: float = 0.5 """Seconds of speech before resetting timeout.""" + speech_threshold: float = 0.5 + """Threshold for speech.""" + _silence_seconds_left: float = 0.0 """Seconds left before considering voice command as stopped.""" @@ -241,12 +260,15 @@ class VoiceActivityTimeout: self._silence_seconds_left = self.silence_seconds self._reset_seconds_left = self.reset_seconds - def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: + def process(self, chunk_seconds: float, speech_probability: float | None) -> bool: """Process samples using external VAD. Returns False when timeout is reached. """ - if is_speech: + if speech_probability is None: + speech_probability = 0.0 + + if speech_probability > self.speech_threshold: # Speech self._reset_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: diff --git a/homeassistant/components/assist_satellite/__init__.py b/homeassistant/components/assist_satellite/__init__.py new file mode 100644 index 00000000000..dd940e8cdbe --- /dev/null +++ b/homeassistant/components/assist_satellite/__init__.py @@ -0,0 +1,80 @@ +"""Base class for assist satellite entities.""" + +import logging + +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.typing import ConfigType + +from .connection_test import ConnectionTestView +from .const import ( + CONNECTION_TEST_DATA, + DATA_COMPONENT, + DOMAIN, + AssistSatelliteEntityFeature, +) +from .entity import ( + AssistSatelliteAnnouncement, + AssistSatelliteConfiguration, + AssistSatelliteEntity, + AssistSatelliteEntityDescription, + AssistSatelliteWakeWord, +) +from .errors import SatelliteBusyError +from .websocket_api import async_register_websocket_api + +__all__ = [ + "DOMAIN", + "AssistSatelliteAnnouncement", + "AssistSatelliteEntity", + "AssistSatelliteConfiguration", + "AssistSatelliteEntityDescription", + "AssistSatelliteEntityFeature", + "AssistSatelliteWakeWord", + "SatelliteBusyError", +] + +_LOGGER = logging.getLogger(__name__) + +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + component = hass.data[DATA_COMPONENT] = EntityComponent[AssistSatelliteEntity]( + _LOGGER, DOMAIN, hass + ) + await component.async_setup(config) + + component.async_register_entity_service( + "announce", + vol.All( + cv.make_entity_service_schema( + { + vol.Optional("message"): str, + vol.Optional("media_id"): str, + } + ), + cv.has_at_least_one_key("message", "media_id"), + ), + "async_internal_announce", + [AssistSatelliteEntityFeature.ANNOUNCE], + ) + hass.data[CONNECTION_TEST_DATA] = {} + async_register_websocket_api(hass) + hass.http.register_view(ConnectionTestView()) + + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a config entry.""" + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) diff --git a/homeassistant/components/assist_satellite/connection_test.mp3 b/homeassistant/components/assist_satellite/connection_test.mp3 new file mode 100644 index 00000000000..ced3bedc684 Binary files /dev/null and b/homeassistant/components/assist_satellite/connection_test.mp3 differ diff --git a/homeassistant/components/assist_satellite/connection_test.py b/homeassistant/components/assist_satellite/connection_test.py new file mode 100644 index 00000000000..956542dacf3 --- /dev/null +++ b/homeassistant/components/assist_satellite/connection_test.py @@ -0,0 +1,43 @@ +"""Assist satellite connection test.""" + +import logging +from pathlib import Path + +from aiohttp import web + +from homeassistant.components.http import KEY_HASS, HomeAssistantView + +from .const import CONNECTION_TEST_DATA + +_LOGGER = logging.getLogger(__name__) + +CONNECTION_TEST_CONTENT_TYPE = "audio/mpeg" +CONNECTION_TEST_FILENAME = "connection_test.mp3" +CONNECTION_TEST_URL_BASE = "/api/assist_satellite/connection_test" + + +class ConnectionTestView(HomeAssistantView): + """View to serve an audio sample for connection test.""" + + requires_auth = False + url = f"{CONNECTION_TEST_URL_BASE}/{{connection_id}}" + name = "api:assist_satellite_connection_test" + + async def get(self, request: web.Request, connection_id: str) -> web.Response: + """Start a get request.""" + _LOGGER.debug("Request for connection test with id %s", connection_id) + + hass = request.app[KEY_HASS] + connection_test_data = hass.data[CONNECTION_TEST_DATA] + + connection_test_event = connection_test_data.pop(connection_id, None) + + if connection_test_event is None: + return web.Response(status=404) + + connection_test_event.set() + + audio_path = Path(__file__).parent / CONNECTION_TEST_FILENAME + audio_data = await hass.async_add_executor_job(audio_path.read_bytes) + + return web.Response(body=audio_data, content_type=CONNECTION_TEST_CONTENT_TYPE) diff --git a/homeassistant/components/assist_satellite/const.py b/homeassistant/components/assist_satellite/const.py new file mode 100644 index 00000000000..61ac7ecb39d --- /dev/null +++ b/homeassistant/components/assist_satellite/const.py @@ -0,0 +1,28 @@ +"""Constants for assist satellite.""" + +from __future__ import annotations + +import asyncio +from enum import IntFlag +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from .entity import AssistSatelliteEntity + +DOMAIN = "assist_satellite" + +DATA_COMPONENT: HassKey[EntityComponent[AssistSatelliteEntity]] = HassKey(DOMAIN) +CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey( + f"{DOMAIN}_connection_tests" +) + + +class AssistSatelliteEntityFeature(IntFlag): + """Supported features of Assist satellite entity.""" + + ANNOUNCE = 1 + """Device supports remotely triggered announcements.""" diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py new file mode 100644 index 00000000000..ba8b54f7da2 --- /dev/null +++ b/homeassistant/components/assist_satellite/entity.py @@ -0,0 +1,430 @@ +"""Assist satellite entity.""" + +from abc import abstractmethod +import asyncio +from collections.abc import AsyncIterable +import contextlib +from dataclasses import dataclass +from enum import StrEnum +import logging +import time +from typing import Any, Final, Literal, final + +from homeassistant.components import media_source, stt, tts +from homeassistant.components.assist_pipeline import ( + OPTION_PREFERRED, + AudioSettings, + PipelineEvent, + PipelineEventType, + PipelineStage, + async_get_pipeline, + async_get_pipelines, + async_pipeline_from_audio_stream, + vad, +) +from homeassistant.components.media_player import async_process_play_media_url +from homeassistant.components.tts import ( + generate_media_source_id as tts_generate_media_source_id, +) +from homeassistant.core import Context, callback +from homeassistant.helpers import entity +from homeassistant.helpers.entity import EntityDescription + +from .const import AssistSatelliteEntityFeature +from .errors import AssistSatelliteError, SatelliteBusyError + +_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes + +_LOGGER = logging.getLogger(__name__) + + +class AssistSatelliteState(StrEnum): + """Valid states of an Assist satellite entity.""" + + IDLE = "idle" + """Device is waiting for user input, such as a wake word or a button press.""" + + LISTENING = "listening" + """Device is streaming audio with the voice command to Home Assistant.""" + + PROCESSING = "processing" + """Home Assistant is processing the voice command.""" + + RESPONDING = "responding" + """Device is speaking the response.""" + + +class AssistSatelliteEntityDescription(EntityDescription, frozen_or_thawed=True): + """A class that describes Assist satellite entities.""" + + +@dataclass(frozen=True) +class AssistSatelliteWakeWord: + """Available wake word model.""" + + id: str + """Unique id for wake word model.""" + + wake_word: str + """Wake word phrase.""" + + trained_languages: list[str] + """List of languages that the wake word was trained on.""" + + +@dataclass +class AssistSatelliteConfiguration: + """Satellite configuration.""" + + available_wake_words: list[AssistSatelliteWakeWord] + """List of available available wake word models.""" + + active_wake_words: list[str] + """List of active wake word ids.""" + + max_active_wake_words: int + """Maximum number of simultaneous wake words allowed (0 for no limit).""" + + +@dataclass +class AssistSatelliteAnnouncement: + """Announcement to be made.""" + + message: str + """Message to be spoken.""" + + media_id: str + """Media ID to be played.""" + + media_id_source: Literal["url", "media_id", "tts"] + + +class AssistSatelliteEntity(entity.Entity): + """Entity encapsulating the state and functionality of an Assist satellite.""" + + entity_description: AssistSatelliteEntityDescription + _attr_should_poll = False + _attr_supported_features = AssistSatelliteEntityFeature(0) + _attr_pipeline_entity_id: str | None = None + _attr_vad_sensitivity_entity_id: str | None = None + + _conversation_id: str | None = None + _conversation_id_time: float | None = None + + _run_has_tts: bool = False + _is_announcing = False + _wake_word_intercept_future: asyncio.Future[str | None] | None = None + _attr_tts_options: dict[str, Any] | None = None + _pipeline_task: asyncio.Task | None = None + + __assist_satellite_state = AssistSatelliteState.IDLE + + @final + @property + def state(self) -> str | None: + """Return state of the entity.""" + return self.__assist_satellite_state + + @property + def pipeline_entity_id(self) -> str | None: + """Entity ID of the pipeline to use for the next conversation.""" + return self._attr_pipeline_entity_id + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Entity ID of the VAD sensitivity to use for the next conversation.""" + return self._attr_vad_sensitivity_entity_id + + @property + def tts_options(self) -> dict[str, Any] | None: + """Options passed for text-to-speech.""" + return self._attr_tts_options + + @callback + @abstractmethod + def async_get_configuration(self) -> AssistSatelliteConfiguration: + """Get the current satellite configuration.""" + + @abstractmethod + async def async_set_configuration( + self, config: AssistSatelliteConfiguration + ) -> None: + """Set the current satellite configuration.""" + + async def async_intercept_wake_word(self) -> str | None: + """Intercept the next wake word from the satellite. + + Returns the detected wake word phrase or None. + """ + if self._wake_word_intercept_future is not None: + raise SatelliteBusyError("Wake word interception already in progress") + + # Will cause next wake word to be intercepted in + # async_accept_pipeline_from_satellite + self._wake_word_intercept_future = asyncio.Future() + + _LOGGER.debug("Next wake word will be intercepted: %s", self.entity_id) + + try: + return await self._wake_word_intercept_future + finally: + self._wake_word_intercept_future = None + + async def async_internal_announce( + self, + message: str | None = None, + media_id: str | None = None, + ) -> None: + """Play and show an announcement on the satellite. + + If media_id is not provided, message is synthesized to + audio with the selected pipeline. + + If media_id is provided, it is played directly. It is possible + to omit the message and the satellite will not show any text. + + Calls async_announce with message and media id. + """ + await self._cancel_running_pipeline() + + media_id_source: Literal["url", "media_id", "tts"] | None = None + + if message is None: + message = "" + + if not media_id: + media_id_source = "tts" + # Synthesize audio and get URL + pipeline_id = self._resolve_pipeline() + pipeline = async_get_pipeline(self.hass, pipeline_id) + + tts_options: dict[str, Any] = {} + if pipeline.tts_voice is not None: + tts_options[tts.ATTR_VOICE] = pipeline.tts_voice + + if self.tts_options is not None: + tts_options.update(self.tts_options) + + media_id = tts_generate_media_source_id( + self.hass, + message, + engine=pipeline.tts_engine, + language=pipeline.tts_language, + options=tts_options, + ) + + if media_source.is_media_source_id(media_id): + if not media_id_source: + media_id_source = "media_id" + media = await media_source.async_resolve_media( + self.hass, + media_id, + None, + ) + media_id = media.url + + if not media_id_source: + media_id_source = "url" + + # Resolve to full URL + media_id = async_process_play_media_url(self.hass, media_id) + + if self._is_announcing: + raise SatelliteBusyError + + self._is_announcing = True + self._set_state(AssistSatelliteState.RESPONDING) + + try: + # Block until announcement is finished + await self.async_announce( + AssistSatelliteAnnouncement(message, media_id, media_id_source) + ) + finally: + self._is_announcing = False + self._set_state(AssistSatelliteState.IDLE) + + async def async_announce(self, announcement: AssistSatelliteAnnouncement) -> None: + """Announce media on the satellite. + + Should block until the announcement is done playing. + """ + raise NotImplementedError + + async def async_accept_pipeline_from_satellite( + self, + audio_stream: AsyncIterable[bytes], + start_stage: PipelineStage = PipelineStage.STT, + end_stage: PipelineStage = PipelineStage.TTS, + wake_word_phrase: str | None = None, + ) -> None: + """Triggers an Assist pipeline in Home Assistant from a satellite.""" + await self._cancel_running_pipeline() + + if self._wake_word_intercept_future and start_stage in ( + PipelineStage.WAKE_WORD, + PipelineStage.STT, + ): + if start_stage == PipelineStage.WAKE_WORD: + self._wake_word_intercept_future.set_exception( + AssistSatelliteError( + "Only on-device wake words currently supported" + ) + ) + return + + # Intercepting wake word and immediately end pipeline + _LOGGER.debug( + "Intercepted wake word: %s (entity_id=%s)", + wake_word_phrase, + self.entity_id, + ) + + if wake_word_phrase is None: + self._wake_word_intercept_future.set_exception( + AssistSatelliteError("No wake word phrase provided") + ) + else: + self._wake_word_intercept_future.set_result(wake_word_phrase) + self._internal_on_pipeline_event(PipelineEvent(PipelineEventType.RUN_END)) + return + + device_id = self.registry_entry.device_id if self.registry_entry else None + + # Refresh context if necessary + if ( + (self._context is None) + or (self._context_set is None) + or ((time.time() - self._context_set) > entity.CONTEXT_RECENT_TIME_SECONDS) + ): + self.async_set_context(Context()) + + assert self._context is not None + + # Reset conversation id if necessary + if self._conversation_id_time and ( + (time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC + ): + self._conversation_id = None + self._conversation_id_time = None + + # Set entity state based on pipeline events + self._run_has_tts = False + + assert self.platform.config_entry is not None + self._pipeline_task = self.platform.config_entry.async_create_background_task( + self.hass, + async_pipeline_from_audio_stream( + self.hass, + context=self._context, + event_callback=self._internal_on_pipeline_event, + stt_metadata=stt.SpeechMetadata( + language="", # set in async_pipeline_from_audio_stream + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_stream, + pipeline_id=self._resolve_pipeline(), + conversation_id=self._conversation_id, + device_id=device_id, + tts_audio_output=self.tts_options, + wake_word_phrase=wake_word_phrase, + audio_settings=AudioSettings( + silence_seconds=self._resolve_vad_sensitivity() + ), + start_stage=start_stage, + end_stage=end_stage, + ), + f"{self.entity_id}_pipeline", + ) + + try: + await self._pipeline_task + finally: + self._pipeline_task = None + + async def _cancel_running_pipeline(self) -> None: + """Cancel the current pipeline if it's running.""" + if self._pipeline_task is not None: + self._pipeline_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._pipeline_task + + self._pipeline_task = None + + @abstractmethod + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Handle pipeline events.""" + + @callback + def _internal_on_pipeline_event(self, event: PipelineEvent) -> None: + """Set state based on pipeline stage.""" + if event.type is PipelineEventType.WAKE_WORD_START: + self._set_state(AssistSatelliteState.IDLE) + elif event.type is PipelineEventType.STT_START: + self._set_state(AssistSatelliteState.LISTENING) + elif event.type is PipelineEventType.INTENT_START: + self._set_state(AssistSatelliteState.PROCESSING) + elif event.type is PipelineEventType.INTENT_END: + assert event.data is not None + # Update timeout + self._conversation_id_time = time.monotonic() + self._conversation_id = event.data["intent_output"]["conversation_id"] + elif event.type is PipelineEventType.TTS_START: + # Wait until tts_response_finished is called to return to waiting state + self._run_has_tts = True + self._set_state(AssistSatelliteState.RESPONDING) + elif event.type is PipelineEventType.RUN_END: + if not self._run_has_tts: + self._set_state(AssistSatelliteState.IDLE) + + self.on_pipeline_event(event) + + @callback + def _set_state(self, state: AssistSatelliteState) -> None: + """Set the entity's state.""" + self.__assist_satellite_state = state + self.async_write_ha_state() + + @callback + def tts_response_finished(self) -> None: + """Tell entity that the text-to-speech response has finished playing.""" + self._set_state(AssistSatelliteState.IDLE) + + @callback + def _resolve_pipeline(self) -> str | None: + """Resolve pipeline from select entity to id. + + Return None to make async_get_pipeline look up the preferred pipeline. + """ + if not (pipeline_entity_id := self.pipeline_entity_id): + return None + + if (pipeline_entity_state := self.hass.states.get(pipeline_entity_id)) is None: + raise RuntimeError("Pipeline entity not found") + + if pipeline_entity_state.state != OPTION_PREFERRED: + # Resolve pipeline by name + for pipeline in async_get_pipelines(self.hass): + if pipeline.name == pipeline_entity_state.state: + return pipeline.id + + return None + + @callback + def _resolve_vad_sensitivity(self) -> float: + """Resolve VAD sensitivity from select entity to enum.""" + vad_sensitivity = vad.VadSensitivity.DEFAULT + + if vad_sensitivity_entity_id := self.vad_sensitivity_entity_id: + if ( + vad_sensitivity_state := self.hass.states.get(vad_sensitivity_entity_id) + ) is None: + raise RuntimeError("VAD sensitivity entity not found") + + vad_sensitivity = vad.VadSensitivity(vad_sensitivity_state.state) + + return vad.VadSensitivity.to_seconds(vad_sensitivity) diff --git a/homeassistant/components/assist_satellite/errors.py b/homeassistant/components/assist_satellite/errors.py new file mode 100644 index 00000000000..cd05f374521 --- /dev/null +++ b/homeassistant/components/assist_satellite/errors.py @@ -0,0 +1,11 @@ +"""Errors for assist satellite.""" + +from homeassistant.exceptions import HomeAssistantError + + +class AssistSatelliteError(HomeAssistantError): + """Base class for assist satellite errors.""" + + +class SatelliteBusyError(AssistSatelliteError): + """Satellite is busy and cannot handle the request.""" diff --git a/homeassistant/components/assist_satellite/icons.json b/homeassistant/components/assist_satellite/icons.json new file mode 100644 index 00000000000..a98c3aefc5b --- /dev/null +++ b/homeassistant/components/assist_satellite/icons.json @@ -0,0 +1,12 @@ +{ + "entity_component": { + "_": { + "default": "mdi:account-voice" + } + }, + "services": { + "announce": { + "service": "mdi:bullhorn" + } + } +} diff --git a/homeassistant/components/assist_satellite/manifest.json b/homeassistant/components/assist_satellite/manifest.json new file mode 100644 index 00000000000..68a3ceafd4f --- /dev/null +++ b/homeassistant/components/assist_satellite/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "assist_satellite", + "name": "Assist Satellite", + "codeowners": ["@home-assistant/core", "@synesthesiam"], + "dependencies": ["assist_pipeline", "http", "stt", "tts"], + "documentation": "https://www.home-assistant.io/integrations/assist_satellite", + "integration_type": "entity", + "quality_scale": "internal" +} diff --git a/homeassistant/components/assist_satellite/services.yaml b/homeassistant/components/assist_satellite/services.yaml new file mode 100644 index 00000000000..e7fefc4705f --- /dev/null +++ b/homeassistant/components/assist_satellite/services.yaml @@ -0,0 +1,16 @@ +announce: + target: + entity: + domain: assist_satellite + supported_features: + - assist_satellite.AssistSatelliteEntityFeature.ANNOUNCE + fields: + message: + required: false + example: "Time to wake up!" + selector: + text: + media_id: + required: false + selector: + text: diff --git a/homeassistant/components/assist_satellite/strings.json b/homeassistant/components/assist_satellite/strings.json new file mode 100644 index 00000000000..7f1426ef529 --- /dev/null +++ b/homeassistant/components/assist_satellite/strings.json @@ -0,0 +1,30 @@ +{ + "title": "Assist satellite", + "entity_component": { + "_": { + "name": "Assist satellite", + "state": { + "idle": "[%key:common::state::idle%]", + "listening": "Listening", + "responding": "Responding", + "processing": "Processing" + } + } + }, + "services": { + "announce": { + "name": "Announce", + "description": "Let the satellite announce a message.", + "fields": { + "message": { + "name": "Message", + "description": "The message to announce." + }, + "media_id": { + "name": "Media ID", + "description": "The media ID to announce instead of using text-to-speech." + } + } + } + } +} diff --git a/homeassistant/components/assist_satellite/websocket_api.py b/homeassistant/components/assist_satellite/websocket_api.py new file mode 100644 index 00000000000..c81648c6ee3 --- /dev/null +++ b/homeassistant/components/assist_satellite/websocket_api.py @@ -0,0 +1,205 @@ +"""Assist satellite Websocket API.""" + +import asyncio +from dataclasses import asdict, replace +from typing import Any + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.util import uuid as uuid_util + +from .connection_test import CONNECTION_TEST_URL_BASE +from .const import ( + CONNECTION_TEST_DATA, + DATA_COMPONENT, + DOMAIN, + AssistSatelliteEntityFeature, +) +from .entity import AssistSatelliteEntity + +CONNECTION_TEST_TIMEOUT = 30 + + +@callback +def async_register_websocket_api(hass: HomeAssistant) -> None: + """Register the websocket API.""" + websocket_api.async_register_command(hass, websocket_intercept_wake_word) + websocket_api.async_register_command(hass, websocket_get_configuration) + websocket_api.async_register_command(hass, websocket_set_wake_words) + websocket_api.async_register_command(hass, websocket_test_connection) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "assist_satellite/intercept_wake_word", + vol.Required("entity_id"): cv.entity_domain(DOMAIN), + } +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_intercept_wake_word( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Intercept the next wake word from a satellite.""" + satellite = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) + if satellite is None: + connection.send_error( + msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found" + ) + return + + async def intercept_wake_word() -> None: + """Push an intercepted wake word to websocket.""" + try: + wake_word_phrase = await satellite.async_intercept_wake_word() + connection.send_message( + websocket_api.event_message( + msg["id"], + {"wake_word_phrase": wake_word_phrase}, + ) + ) + except HomeAssistantError as err: + connection.send_error(msg["id"], "home_assistant_error", str(err)) + + task = hass.async_create_task(intercept_wake_word(), "intercept_wake_word") + connection.subscriptions[msg["id"]] = task.cancel + connection.send_message(websocket_api.result_message(msg["id"])) + + +@callback +@websocket_api.websocket_command( + { + vol.Required("type"): "assist_satellite/get_configuration", + vol.Required("entity_id"): cv.entity_domain(DOMAIN), + } +) +def websocket_get_configuration( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get the current satellite configuration.""" + satellite = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) + if satellite is None: + connection.send_error( + msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found" + ) + return + + config_dict = asdict(satellite.async_get_configuration()) + config_dict["pipeline_entity_id"] = satellite.pipeline_entity_id + config_dict["vad_entity_id"] = satellite.vad_sensitivity_entity_id + + connection.send_result(msg["id"], config_dict) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "assist_satellite/set_wake_words", + vol.Required("entity_id"): cv.entity_domain(DOMAIN), + vol.Required("wake_word_ids"): [str], + } +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_set_wake_words( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Set the active wake words for the satellite.""" + satellite = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) + if satellite is None: + connection.send_error( + msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found" + ) + return + + config = satellite.async_get_configuration() + + # Don't set too many active wake words + actual_ids = msg["wake_word_ids"] + if len(actual_ids) > config.max_active_wake_words: + connection.send_error( + msg["id"], + websocket_api.ERR_NOT_SUPPORTED, + f"Maximum number of active wake words is {config.max_active_wake_words}", + ) + return + + # Verify all ids are available + available_ids = {ww.id for ww in config.available_wake_words} + for ww_id in actual_ids: + if ww_id not in available_ids: + connection.send_error( + msg["id"], + websocket_api.ERR_NOT_SUPPORTED, + f"Wake word id is not supported: {ww_id}", + ) + return + + await satellite.async_set_configuration( + replace(config, active_wake_words=actual_ids) + ) + connection.send_result(msg["id"]) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "assist_satellite/test_connection", + vol.Required("entity_id"): cv.entity_domain(DOMAIN), + } +) +@websocket_api.async_response +async def websocket_test_connection( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Test the connection between the device and Home Assistant. + + Send an announcement to the device with a special media id. + """ + component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN] + satellite = component.get_entity(msg["entity_id"]) + if satellite is None: + connection.send_error( + msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found" + ) + return + if not (satellite.supported_features or 0) & AssistSatelliteEntityFeature.ANNOUNCE: + connection.send_error( + msg["id"], + websocket_api.ERR_NOT_SUPPORTED, + "Entity does not support announce", + ) + return + + # Announce and wait for event + connection_test_data = hass.data[CONNECTION_TEST_DATA] + connection_id = uuid_util.random_uuid_hex() + connection_test_event = asyncio.Event() + connection_test_data[connection_id] = connection_test_event + + hass.async_create_background_task( + satellite.async_internal_announce( + media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}" + ), + f"assist_satellite_connection_test_{msg['entity_id']}", + ) + + try: + async with asyncio.timeout(CONNECTION_TEST_TIMEOUT): + await connection_test_event.wait() + connection.send_result(msg["id"], {"status": "success"}) + except TimeoutError: + connection.send_result(msg["id"], {"status": "timeout"}) + finally: + connection_test_data.pop(connection_id, None) diff --git a/homeassistant/components/asuswrt/bridge.py b/homeassistant/components/asuswrt/bridge.py index 4e928d63666..bc6f0fe6fd2 100644 --- a/homeassistant/components/asuswrt/bridge.py +++ b/homeassistant/components/asuswrt/bridge.py @@ -5,6 +5,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from collections import namedtuple from collections.abc import Awaitable, Callable, Coroutine +from datetime import datetime import functools import logging from typing import Any, cast @@ -40,17 +41,23 @@ from .const import ( PROTOCOL_HTTPS, PROTOCOL_TELNET, SENSORS_BYTES, + SENSORS_CPU, SENSORS_LOAD_AVG, + SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, SENSORS_TEMPERATURES_LEGACY, + SENSORS_UPTIME, ) SENSORS_TYPE_BYTES = "sensors_bytes" SENSORS_TYPE_COUNT = "sensors_count" +SENSORS_TYPE_CPU = "sensors_cpu" SENSORS_TYPE_LOAD_AVG = "sensors_load_avg" +SENSORS_TYPE_MEMORY = "sensors_memory" SENSORS_TYPE_RATES = "sensors_rates" SENSORS_TYPE_TEMPERATURES = "sensors_temperatures" +SENSORS_TYPE_UPTIME = "sensors_uptime" WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024 @@ -346,6 +353,7 @@ class AsusWrtHttpBridge(AsusWrtBridge): async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]: """Return a dictionary of available sensors for this bridge.""" + sensors_cpu = await self._get_available_cpu_sensors() sensors_temperatures = await self._get_available_temperature_sensors() sensors_loadavg = await self._get_loadavg_sensors_availability() return { @@ -353,20 +361,49 @@ class AsusWrtHttpBridge(AsusWrtBridge): KEY_SENSORS: SENSORS_BYTES, KEY_METHOD: self._get_bytes, }, + SENSORS_TYPE_CPU: { + KEY_SENSORS: sensors_cpu, + KEY_METHOD: self._get_cpu_usage, + }, SENSORS_TYPE_LOAD_AVG: { KEY_SENSORS: sensors_loadavg, KEY_METHOD: self._get_load_avg, }, + SENSORS_TYPE_MEMORY: { + KEY_SENSORS: SENSORS_MEMORY, + KEY_METHOD: self._get_memory_usage, + }, SENSORS_TYPE_RATES: { KEY_SENSORS: SENSORS_RATES, KEY_METHOD: self._get_rates, }, + SENSORS_TYPE_UPTIME: { + KEY_SENSORS: SENSORS_UPTIME, + KEY_METHOD: self._get_uptime, + }, SENSORS_TYPE_TEMPERATURES: { KEY_SENSORS: sensors_temperatures, KEY_METHOD: self._get_temperatures, }, } + async def _get_available_cpu_sensors(self) -> list[str]: + """Check which cpu information is available on the router.""" + try: + available_cpu = await self._api.async_get_cpu_usage() + available_sensors = [t for t in SENSORS_CPU if t in available_cpu] + except AsusWrtError as exc: + _LOGGER.warning( + ( + "Failed checking cpu sensor availability for ASUS router" + " %s. Exception: %s" + ), + self.host, + exc, + ) + return [] + return available_sensors + async def _get_available_temperature_sensors(self) -> list[str]: """Check which temperature information is available on the router.""" try: @@ -415,3 +452,25 @@ class AsusWrtHttpBridge(AsusWrtBridge): async def _get_temperatures(self) -> Any: """Fetch temperatures information from the router.""" return await self._api.async_get_temperatures() + + @handle_errors_and_zip(AsusWrtError, None) + async def _get_cpu_usage(self) -> Any: + """Fetch cpu information from the router.""" + return await self._api.async_get_cpu_usage() + + @handle_errors_and_zip(AsusWrtError, None) + async def _get_memory_usage(self) -> Any: + """Fetch memory information from the router.""" + return await self._api.async_get_memory_usage() + + async def _get_uptime(self) -> dict[str, Any]: + """Fetch uptime from the router.""" + try: + uptimes = await self._api.async_get_uptime() + except AsusWrtError as exc: + raise UpdateFailed(exc) from exc + + last_boot = datetime.fromisoformat(uptimes["last_boot"]) + uptime = uptimes["uptime"] + + return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False)) diff --git a/homeassistant/components/asuswrt/const.py b/homeassistant/components/asuswrt/const.py index 5ce37207145..7790750538e 100644 --- a/homeassistant/components/asuswrt/const.py +++ b/homeassistant/components/asuswrt/const.py @@ -27,7 +27,20 @@ PROTOCOL_TELNET = "telnet" # Sensors SENSORS_BYTES = ["sensor_rx_bytes", "sensor_tx_bytes"] SENSORS_CONNECTED_DEVICE = ["sensor_connected_device"] +SENSORS_CPU = [ + "cpu_total_usage", + "cpu1_usage", + "cpu2_usage", + "cpu3_usage", + "cpu4_usage", + "cpu5_usage", + "cpu6_usage", + "cpu7_usage", + "cpu8_usage", +] SENSORS_LOAD_AVG = ["sensor_load_avg1", "sensor_load_avg5", "sensor_load_avg15"] +SENSORS_MEMORY = ["mem_usage_perc", "mem_free", "mem_used"] SENSORS_RATES = ["sensor_rx_rates", "sensor_tx_rates"] SENSORS_TEMPERATURES_LEGACY = ["2.4GHz", "5.0GHz", "CPU"] SENSORS_TEMPERATURES = [*SENSORS_TEMPERATURES_LEGACY, "5.0GHz_2", "6.0GHz"] +SENSORS_UPTIME = ["sensor_last_boot", "sensor_uptime"] diff --git a/homeassistant/components/asuswrt/device_tracker.py b/homeassistant/components/asuswrt/device_tracker.py index d2330801bd5..95d2e4c8000 100644 --- a/homeassistant/components/asuswrt/device_tracker.py +++ b/homeassistant/components/asuswrt/device_tracker.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -71,11 +71,6 @@ class AsusWrtDevice(ScannerEntity): """Return true if the device is connected to the network.""" return self._device.is_connected - @property - def source_type(self) -> SourceType: - """Return the source type.""" - return SourceType.ROUTER - @property def hostname(self) -> str | None: """Return the hostname of device.""" diff --git a/homeassistant/components/asuswrt/icons.json b/homeassistant/components/asuswrt/icons.json index a4e44496a2f..b5b2c35f742 100644 --- a/homeassistant/components/asuswrt/icons.json +++ b/homeassistant/components/asuswrt/icons.json @@ -24,6 +24,21 @@ }, "load_avg_15m": { "default": "mdi:cpu-32-bit" + }, + "cpu_usage": { + "default": "mdi:cpu-32-bit" + }, + "cpu_core_usage": { + "default": "mdi:cpu-32-bit" + }, + "memory_usage": { + "default": "mdi:memory" + }, + "memory_free": { + "default": "mdi:memory" + }, + "memory_used": { + "default": "mdi:memory" } } } diff --git a/homeassistant/components/asuswrt/router.py b/homeassistant/components/asuswrt/router.py index 1244db34ed5..330c4bcfb67 100644 --- a/homeassistant/components/asuswrt/router.py +++ b/homeassistant/components/asuswrt/router.py @@ -290,7 +290,7 @@ class AsusWrtRouter: if self._connect_error: self._connect_error = False - _LOGGER.info("Reconnected to ASUS router %s", self.host) + _LOGGER.warning("Reconnected to ASUS router %s", self.host) self._connected_devices = len(wrt_devices) consider_home: int = self._options.get( diff --git a/homeassistant/components/asuswrt/sensor.py b/homeassistant/components/asuswrt/sensor.py index 69470882153..fb43e574379 100644 --- a/homeassistant/components/asuswrt/sensor.py +++ b/homeassistant/components/asuswrt/sensor.py @@ -11,10 +11,12 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + PERCENTAGE, EntityCategory, UnitOfDataRate, UnitOfInformation, UnitOfTemperature, + UnitOfTime, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,9 +32,12 @@ from .const import ( KEY_SENSORS, SENSORS_BYTES, SENSORS_CONNECTED_DEVICE, + SENSORS_CPU, SENSORS_LOAD_AVG, + SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, + SENSORS_UPTIME, ) from .router import AsusWrtRouter @@ -46,6 +51,19 @@ class AsusWrtSensorEntityDescription(SensorEntityDescription): UNIT_DEVICES = "Devices" +CPU_CORE_SENSORS: tuple[AsusWrtSensorEntityDescription, ...] = tuple( + AsusWrtSensorEntityDescription( + key=sens_key, + translation_key="cpu_core_usage", + translation_placeholders={"core_id": str(core_id)}, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=1, + ) + for core_id, sens_key in enumerate(SENSORS_CPU[1:], start=1) +) CONNECTION_SENSORS: tuple[AsusWrtSensorEntityDescription, ...] = ( AsusWrtSensorEntityDescription( key=SENSORS_CONNECTED_DEVICE[0], @@ -167,6 +185,61 @@ CONNECTION_SENSORS: tuple[AsusWrtSensorEntityDescription, ...] = ( entity_registry_enabled_default=False, suggested_display_precision=1, ), + AsusWrtSensorEntityDescription( + key=SENSORS_MEMORY[0], + translation_key="memory_usage", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=1, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_MEMORY[1], + translation_key="memory_free", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.MEGABYTES, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=2, + factor=1024, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_MEMORY[2], + translation_key="memory_used", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.MEGABYTES, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=2, + factor=1024, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_UPTIME[0], + translation_key="last_boot", + device_class=SensorDeviceClass.TIMESTAMP, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_UPTIME[1], + translation_key="uptime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_CPU[0], + translation_key="cpu_usage", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=1, + ), + *CPU_CORE_SENSORS, ) diff --git a/homeassistant/components/asuswrt/strings.json b/homeassistant/components/asuswrt/strings.json index 4c8386dcd00..bab40f281f5 100644 --- a/homeassistant/components/asuswrt/strings.json +++ b/homeassistant/components/asuswrt/strings.json @@ -88,6 +88,27 @@ }, "6ghz_temperature": { "name": "6GHz Temperature" + }, + "cpu_usage": { + "name": "CPU usage" + }, + "cpu_core_usage": { + "name": "CPU core {core_id} usage" + }, + "memory_usage": { + "name": "Memory usage" + }, + "memory_free": { + "name": "Memory free" + }, + "memory_used": { + "name": "Memory used" + }, + "last_boot": { + "name": "Last boot" + }, + "uptime": { + "name": "Uptime" } } }, diff --git a/homeassistant/components/atag/__init__.py b/homeassistant/components/atag/__init__.py index 85732485165..89f95f77870 100644 --- a/homeassistant/components/atag/__init__.py +++ b/homeassistant/components/atag/__init__.py @@ -1,91 +1,29 @@ """The ATAG Integration.""" -from asyncio import timeout -from datetime import timedelta -import logging - -from pyatag import AtagException, AtagOne - -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) -_LOGGER = logging.getLogger(__name__) +from .coordinator import AtagConfigEntry, AtagDataUpdateCoordinator DOMAIN = "atag" PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AtagConfigEntry) -> bool: """Set up Atag integration from a config entry.""" - async def _async_update_data(): - """Update data via library.""" - async with timeout(20): - try: - await atag.update() - except AtagException as err: - raise UpdateFailed(err) from err - return atag - - atag = AtagOne( - session=async_get_clientsession(hass), **entry.data, device=entry.unique_id - ) - coordinator = DataUpdateCoordinator[AtagOne]( - hass, - _LOGGER, - name=DOMAIN.title(), - update_method=_async_update_data, - update_interval=timedelta(seconds=60), - ) - + coordinator = AtagDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator if entry.unique_id is None: - hass.config_entries.async_update_entry(entry, unique_id=atag.id) + hass.config_entries.async_update_entry(entry, unique_id=coordinator.atag.id) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AtagConfigEntry) -> bool: """Unload Atag config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok - - -class AtagEntity(CoordinatorEntity[DataUpdateCoordinator[AtagOne]]): - """Defines a base Atag entity.""" - - def __init__( - self, coordinator: DataUpdateCoordinator[AtagOne], atag_id: str - ) -> None: - """Initialize the Atag entity.""" - super().__init__(coordinator) - - self._id = atag_id - self._attr_name = DOMAIN.title() - self._attr_unique_id = f"{coordinator.data.id}-{atag_id}" - - @property - def device_info(self) -> DeviceInfo: - """Return info for device registry.""" - return DeviceInfo( - identifiers={(DOMAIN, self.coordinator.data.id)}, - manufacturer="Atag", - model="Atag One", - name="Atag Thermostat", - sw_version=self.coordinator.data.apiversion, - ) + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/atag/climate.py b/homeassistant/components/atag/climate.py index ff66839926f..a362b71fbc8 100644 --- a/homeassistant/components/atag/climate.py +++ b/homeassistant/components/atag/climate.py @@ -12,13 +12,13 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_TEMPERATURE, Platform +from homeassistant.const import ATTR_TEMPERATURE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.enum import try_parse_enum -from . import DOMAIN, AtagEntity +from .coordinator import AtagConfigEntry, AtagDataUpdateCoordinator +from .entity import AtagEntity PRESET_MAP = { "Manual": "manual", @@ -32,11 +32,10 @@ HVAC_MODES = [HVACMode.AUTO, HVACMode.HEAT] async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: AtagConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Load a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities([AtagThermostat(coordinator, Platform.CLIMATE)]) + async_add_entities([AtagThermostat(entry.runtime_data, "climate")]) class AtagThermostat(AtagEntity, ClimateEntity): @@ -47,51 +46,50 @@ class AtagThermostat(AtagEntity, ClimateEntity): _attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, coordinator, atag_id): + def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None: """Initialize an Atag climate device.""" super().__init__(coordinator, atag_id) - self._attr_temperature_unit = coordinator.data.climate.temp_unit + self._attr_temperature_unit = coordinator.atag.climate.temp_unit @property def hvac_mode(self) -> HVACMode | None: """Return hvac operation ie. heat, cool mode.""" - return try_parse_enum(HVACMode, self.coordinator.data.climate.hvac_mode) + return try_parse_enum(HVACMode, self.coordinator.atag.climate.hvac_mode) @property def hvac_action(self) -> HVACAction | None: """Return the current running hvac operation.""" - is_active = self.coordinator.data.climate.status + is_active = self.coordinator.atag.climate.status return HVACAction.HEATING if is_active else HVACAction.IDLE @property def current_temperature(self) -> float | None: """Return the current temperature.""" - return self.coordinator.data.climate.temperature + return self.coordinator.atag.climate.temperature @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - return self.coordinator.data.climate.target_temperature + return self.coordinator.atag.climate.target_temperature @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., auto, manual, fireplace, extend, etc.""" - preset = self.coordinator.data.climate.preset_mode + preset = self.coordinator.atag.climate.preset_mode return PRESET_INVERTED.get(preset) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" - await self.coordinator.data.climate.set_temp(kwargs.get(ATTR_TEMPERATURE)) + await self.coordinator.atag.climate.set_temp(kwargs.get(ATTR_TEMPERATURE)) self.async_write_ha_state() async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" - await self.coordinator.data.climate.set_hvac_mode(hvac_mode) + await self.coordinator.atag.climate.set_hvac_mode(hvac_mode) self.async_write_ha_state() async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - await self.coordinator.data.climate.set_preset_mode(PRESET_MAP[preset_mode]) + await self.coordinator.atag.climate.set_preset_mode(PRESET_MAP[preset_mode]) self.async_write_ha_state() diff --git a/homeassistant/components/atag/coordinator.py b/homeassistant/components/atag/coordinator.py new file mode 100644 index 00000000000..6d542471384 --- /dev/null +++ b/homeassistant/components/atag/coordinator.py @@ -0,0 +1,41 @@ +"""The ATAG Integration.""" + +from asyncio import timeout +from datetime import timedelta +import logging + +from pyatag import AtagException, AtagOne + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + +type AtagConfigEntry = ConfigEntry[AtagDataUpdateCoordinator] + + +class AtagDataUpdateCoordinator(DataUpdateCoordinator[None]): + """Atag data update coordinator.""" + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize Atag coordinator.""" + super().__init__( + hass, + _LOGGER, + name="Atag", + update_interval=timedelta(seconds=60), + ) + + self.atag = AtagOne( + session=async_get_clientsession(hass), **entry.data, device=entry.unique_id + ) + + async def _async_update_data(self) -> None: + """Update data via library.""" + async with timeout(20): + try: + await self.atag.update() + except AtagException as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/atag/entity.py b/homeassistant/components/atag/entity.py new file mode 100644 index 00000000000..895c869cf64 --- /dev/null +++ b/homeassistant/components/atag/entity.py @@ -0,0 +1,30 @@ +"""The ATAG Integration.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import DOMAIN +from .coordinator import AtagDataUpdateCoordinator + + +class AtagEntity(CoordinatorEntity[AtagDataUpdateCoordinator]): + """Defines a base Atag entity.""" + + def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None: + """Initialize the Atag entity.""" + super().__init__(coordinator) + + self._id = atag_id + self._attr_name = DOMAIN.title() + self._attr_unique_id = f"{coordinator.atag.id}-{atag_id}" + + @property + def device_info(self) -> DeviceInfo: + """Return info for device registry.""" + return DeviceInfo( + identifiers={(DOMAIN, self.coordinator.atag.id)}, + manufacturer="Atag", + model="Atag One", + name="Atag Thermostat", + sw_version=self.coordinator.atag.apiversion, + ) diff --git a/homeassistant/components/atag/sensor.py b/homeassistant/components/atag/sensor.py index 25a3de34556..bd39f0b3458 100644 --- a/homeassistant/components/atag/sensor.py +++ b/homeassistant/components/atag/sensor.py @@ -1,7 +1,6 @@ """Initialization of ATAG One sensor platform.""" from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, UnitOfPressure, @@ -11,7 +10,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, AtagEntity +from .coordinator import AtagConfigEntry, AtagDataUpdateCoordinator +from .entity import AtagEntity SENSORS = { "Outside Temperature": "outside_temp", @@ -27,43 +27,43 @@ SENSORS = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AtagConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Initialize sensor platform from config entry.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data async_add_entities([AtagSensor(coordinator, sensor) for sensor in SENSORS]) class AtagSensor(AtagEntity, SensorEntity): """Representation of a AtagOne Sensor.""" - def __init__(self, coordinator, sensor): + def __init__(self, coordinator: AtagDataUpdateCoordinator, sensor: str) -> None: """Initialize Atag sensor.""" super().__init__(coordinator, SENSORS[sensor]) self._attr_name = sensor - if coordinator.data.report[self._id].sensorclass in ( + if coordinator.atag.report[self._id].sensorclass in ( SensorDeviceClass.PRESSURE, SensorDeviceClass.TEMPERATURE, ): - self._attr_device_class = coordinator.data.report[self._id].sensorclass - if coordinator.data.report[self._id].measure in ( + self._attr_device_class = coordinator.atag.report[self._id].sensorclass + if coordinator.atag.report[self._id].measure in ( UnitOfPressure.BAR, UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT, PERCENTAGE, UnitOfTime.HOURS, ): - self._attr_native_unit_of_measurement = coordinator.data.report[ + self._attr_native_unit_of_measurement = coordinator.atag.report[ self._id ].measure @property def native_value(self): """Return the state of the sensor.""" - return self.coordinator.data.report[self._id].state + return self.coordinator.atag.report[self._id].state @property def icon(self): """Return icon.""" - return self.coordinator.data.report[self._id].icon + return self.coordinator.atag.report[self._id].icon diff --git a/homeassistant/components/atag/water_heater.py b/homeassistant/components/atag/water_heater.py index 8bae3df7436..6b013b36885 100644 --- a/homeassistant/components/atag/water_heater.py +++ b/homeassistant/components/atag/water_heater.py @@ -7,24 +7,25 @@ from homeassistant.components.water_heater import ( STATE_PERFORMANCE, WaterHeaterEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, AtagEntity +from .coordinator import AtagConfigEntry +from .entity import AtagEntity OPERATION_LIST = [STATE_OFF, STATE_ECO, STATE_PERFORMANCE] async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AtagConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Initialize DHW device from config entry.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities([AtagWaterHeater(coordinator, Platform.WATER_HEATER)]) + async_add_entities( + [AtagWaterHeater(config_entry.runtime_data, Platform.WATER_HEATER)] + ) class AtagWaterHeater(AtagEntity, WaterHeaterEntity): @@ -36,30 +37,30 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity): @property def current_temperature(self): """Return the current temperature.""" - return self.coordinator.data.dhw.temperature + return self.coordinator.atag.dhw.temperature @property def current_operation(self): """Return current operation.""" - operation = self.coordinator.data.dhw.current_operation + operation = self.coordinator.atag.dhw.current_operation return operation if operation in self.operation_list else STATE_OFF async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" - if await self.coordinator.data.dhw.set_temp(kwargs.get(ATTR_TEMPERATURE)): + if await self.coordinator.atag.dhw.set_temp(kwargs.get(ATTR_TEMPERATURE)): self.async_write_ha_state() @property def target_temperature(self): """Return the setpoint if water demand, otherwise return base temp (comfort level).""" - return self.coordinator.data.dhw.target_temperature + return self.coordinator.atag.dhw.target_temperature @property def max_temp(self) -> float: """Return the maximum temperature.""" - return self.coordinator.data.dhw.max_temp + return self.coordinator.atag.dhw.max_temp @property def min_temp(self) -> float: """Return the minimum temperature.""" - return self.coordinator.data.dhw.min_temp + return self.coordinator.atag.dhw.min_temp diff --git a/homeassistant/components/aten_pe/manifest.json b/homeassistant/components/aten_pe/manifest.json index 3b4ade637cb..1e2c74f2636 100644 --- a/homeassistant/components/aten_pe/manifest.json +++ b/homeassistant/components/aten_pe/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@mtdcr"], "documentation": "https://www.home-assistant.io/integrations/aten_pe", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["atenpdu==0.3.2"] } diff --git a/homeassistant/components/atome/manifest.json b/homeassistant/components/atome/manifest.json index cafe24e2e13..f00dd5ea757 100644 --- a/homeassistant/components/atome/manifest.json +++ b/homeassistant/components/atome/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/atome", "iot_class": "cloud_polling", "loggers": ["pyatome"], + "quality_scale": "legacy", "requirements": ["pyAtome==0.1.1"] } diff --git a/homeassistant/components/august/__init__.py b/homeassistant/components/august/__init__.py index 53aa3cdffd8..434db46384b 100644 --- a/homeassistant/components/august/__init__.py +++ b/homeassistant/components/august/__init__.py @@ -6,15 +6,16 @@ from pathlib import Path from typing import cast from aiohttp import ClientResponseError +from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from yalexs.manager.gateway import Config as YaleXSConfig from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, issue_registry as ir from .const import DOMAIN, PLATFORMS from .data import AugustData @@ -24,7 +25,27 @@ from .util import async_create_august_clientsession type AugustConfigEntry = ConfigEntry[AugustData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +@callback +def _async_create_yale_brand_migration_issue( + hass: HomeAssistant, entry: AugustConfigEntry +) -> None: + """Create an issue for a brand migration.""" + ir.async_create_issue( + hass, + DOMAIN, + "yale_brand_migration", + breaks_in_ha_version="2024.9", + learn_more_url="https://www.home-assistant.io/integrations/yale", + translation_key="yale_brand_migration", + is_fixable=False, + severity=ir.IssueSeverity.CRITICAL, + translation_placeholders={ + "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" + }, + ) + + +async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Set up August from a config entry.""" session = async_create_august_clientsession(hass) august_gateway = AugustGateway(Path(hass.config.config_dir), session) @@ -40,6 +61,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True +async def async_remove_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> None: + """Remove an August config entry.""" + ir.async_delete_issue(hass, DOMAIN, "yale_brand_migration") + + async def async_unload_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -51,6 +77,8 @@ async def async_setup_august( """Set up the August component.""" config = cast(YaleXSConfig, entry.data) await august_gateway.async_setup(config) + if august_gateway.api.brand == Brand.YALE_HOME: + _async_create_yale_brand_migration_issue(hass, entry) await august_gateway.async_authenticate() await august_gateway.async_refresh_access_token_if_needed() data = entry.runtime_data = AugustData(hass, august_gateway) diff --git a/homeassistant/components/august/binary_sensor.py b/homeassistant/components/august/binary_sensor.py index 6a56692bcd6..fb877252010 100644 --- a/homeassistant/components/august/binary_sensor.py +++ b/homeassistant/components/august/binary_sensor.py @@ -109,12 +109,11 @@ async def async_setup_entry( for description in SENSOR_TYPES_DOORBELL ) - for doorbell in data.doorbells: - entities.extend( - AugustDoorbellBinarySensor(data, doorbell, description) - for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL - ) - + entities.extend( + AugustDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) diff --git a/homeassistant/components/august/button.py b/homeassistant/components/august/button.py index 406475db601..79f2b67888a 100644 --- a/homeassistant/components/august/button.py +++ b/homeassistant/components/august/button.py @@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AugustConfigEntry -from .entity import AugustEntityMixin +from .entity import AugustEntity async def async_setup_entry( @@ -18,7 +18,7 @@ async def async_setup_entry( async_add_entities(AugustWakeLockButton(data, lock, "wake") for lock in data.locks) -class AugustWakeLockButton(AugustEntityMixin, ButtonEntity): +class AugustWakeLockButton(AugustEntity, ButtonEntity): """Representation of an August lock wake button.""" _attr_translation_key = "wake" diff --git a/homeassistant/components/august/camera.py b/homeassistant/components/august/camera.py index 4e569e2a91e..f4398455256 100644 --- a/homeassistant/components/august/camera.py +++ b/homeassistant/components/august/camera.py @@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AugustConfigEntry, AugustData from .const import DEFAULT_NAME, DEFAULT_TIMEOUT -from .entity import AugustEntityMixin +from .entity import AugustEntity _LOGGER = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def async_setup_entry( ) -class AugustCamera(AugustEntityMixin, Camera): +class AugustCamera(AugustEntity, Camera): """An implementation of an August security camera.""" _attr_translation_key = "camera" diff --git a/homeassistant/components/august/config_flow.py b/homeassistant/components/august/config_flow.py index 2a1a20a9dc4..640b04b384f 100644 --- a/homeassistant/components/august/config_flow.py +++ b/homeassistant/components/august/config_flow.py @@ -9,10 +9,10 @@ from typing import Any import aiohttp import voluptuous as vol from yalexs.authenticator_common import ValidationResult -from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND +from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND, Brand from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback @@ -28,6 +28,12 @@ from .const import ( from .gateway import AugustGateway from .util import async_create_august_clientsession +# The Yale Home Brand is not supported by the August integration +# anymore and should migrate to the Yale integration +AVAILABLE_BRANDS = BRANDS_WITHOUT_OAUTH.copy() +del AVAILABLE_BRANDS[Brand.YALE_HOME] + + _LOGGER = logging.getLogger(__name__) @@ -87,7 +93,6 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): self._aiohttp_session: aiohttp.ClientSession | None = None self._user_auth_details: dict[str, Any] = {} self._needs_reset = True - self._mode: str | None = None super().__init__() async def async_step_user( @@ -118,7 +123,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): vol.Required( CONF_BRAND, default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND), - ): vol.In(BRANDS_WITHOUT_OAUTH), + ): vol.In(AVAILABLE_BRANDS), vol.Required( CONF_LOGIN_METHOD, default=self._user_auth_details.get( @@ -141,7 +146,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle validation (2fa) step.""" if user_input: - if self._mode == "reauth": + if self.source == SOURCE_REAUTH: return await self.async_step_reauth_validate(user_input) return await self.async_step_user_validate(user_input) @@ -182,8 +187,6 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self._user_auth_details = dict(entry_data) - self._mode = "reauth" - self._needs_reset = True return await self.async_step_reauth_validate() async def async_step_reauth_validate( diff --git a/homeassistant/components/august/entity.py b/homeassistant/components/august/entity.py index babf5c587fb..28c722354ba 100644 --- a/homeassistant/components/august/entity.py +++ b/homeassistant/components/august/entity.py @@ -20,7 +20,7 @@ from .const import MANUFACTURER DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] -class AugustEntityMixin(Entity): +class AugustEntity(Entity): """Base implementation for August device.""" _attr_should_poll = False @@ -87,7 +87,7 @@ class AugustEntityMixin(Entity): self._update_from_data() -class AugustDescriptionEntity(AugustEntityMixin): +class AugustDescriptionEntity(AugustEntity): """An August entity with a description.""" def __init__( diff --git a/homeassistant/components/august/event.py b/homeassistant/components/august/event.py index b65f72272a3..49b14630337 100644 --- a/homeassistant/components/august/event.py +++ b/homeassistant/components/august/event.py @@ -63,22 +63,17 @@ async def async_setup_entry( ) -> None: """Set up the august event platform.""" data = config_entry.runtime_data - entities: list[AugustEventEntity] = [] - - for lock in data.locks: - detail = data.get_device_detail(lock.device_id) - if detail.doorbell: - entities.extend( - AugustEventEntity(data, lock, description) - for description in TYPES_DOORBELL - ) - - for doorbell in data.doorbells: - entities.extend( - AugustEventEntity(data, doorbell, description) - for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL - ) - + entities: list[AugustEventEntity] = [ + AugustEventEntity(data, lock, description) + for description in TYPES_DOORBELL + for lock in data.locks + if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell + ] + entities.extend( + AugustEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) @@ -86,7 +81,6 @@ class AugustEventEntity(AugustDescriptionEntity, EventEntity): """An august event entity.""" entity_description: AugustEventEntityDescription - _attr_has_entity_name = True _last_activity: Activity | None = None @callback diff --git a/homeassistant/components/august/lock.py b/homeassistant/components/august/lock.py index 5382c710229..fe5d90371ad 100644 --- a/homeassistant/components/august/lock.py +++ b/homeassistant/components/august/lock.py @@ -19,7 +19,7 @@ from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.util.dt as dt_util from . import AugustConfigEntry, AugustData -from .entity import AugustEntityMixin +from .entity import AugustEntity _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,7 @@ async def async_setup_entry( async_add_entities(AugustLock(data, lock) for lock in data.locks) -class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity): +class AugustLock(AugustEntity, RestoreEntity, LockEntity): """Representation of an August lock.""" _attr_name = None diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 42f5dfd8409..d0b41411c96 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -4,10 +4,6 @@ "codeowners": ["@bdraco"], "config_flow": true, "dhcp": [ - { - "hostname": "yale-connect-plus", - "macaddress": "00177A*" - }, { "hostname": "connect", "macaddress": "D86162*" @@ -20,6 +16,10 @@ "hostname": "connect", "macaddress": "2C9FFB*" }, + { + "hostname": "connect", + "macaddress": "789C85*" + }, { "hostname": "august*", "macaddress": "E076D0*" @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.4.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/august/sensor.py b/homeassistant/components/august/sensor.py index 7a4c1a92358..b7c0d618492 100644 --- a/homeassistant/components/august/sensor.py +++ b/homeassistant/components/august/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar, cast +from typing import Any, cast from yalexs.activity import ActivityType, LockOperationActivity from yalexs.doorbell import Doorbell @@ -42,7 +42,7 @@ from .const import ( OPERATION_METHOD_REMOTE, OPERATION_METHOD_TAG, ) -from .entity import AugustDescriptionEntity, AugustEntityMixin +from .entity import AugustDescriptionEntity, AugustEntity def _retrieve_device_battery_state(detail: LockDetail) -> int: @@ -55,14 +55,13 @@ def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: return detail.battery_percentage -_T = TypeVar("_T", LockDetail, KeypadDetail) - - @dataclass(frozen=True, kw_only=True) -class AugustSensorEntityDescription(SensorEntityDescription, Generic[_T]): +class AugustSensorEntityDescription[T: LockDetail | KeypadDetail]( + SensorEntityDescription +): """Mixin for required keys.""" - value_fn: Callable[[_T], int | None] + value_fn: Callable[[T], int | None] SENSOR_TYPE_DEVICE_BATTERY = AugustSensorEntityDescription[LockDetail]( @@ -114,7 +113,7 @@ async def async_setup_entry( async_add_entities(entities) -class AugustOperatorSensor(AugustEntityMixin, RestoreSensor): +class AugustOperatorSensor(AugustEntity, RestoreSensor): """Representation of an August lock operation sensor.""" _attr_translation_key = "operator" @@ -198,10 +197,12 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreSensor): self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] -class AugustBatterySensor(AugustDescriptionEntity, SensorEntity, Generic[_T]): +class AugustBatterySensor[T: LockDetail | KeypadDetail]( + AugustDescriptionEntity, SensorEntity +): """Representation of an August sensor.""" - entity_description: AugustSensorEntityDescription[_T] + entity_description: AugustSensorEntityDescription[T] _attr_device_class = SensorDeviceClass.BATTERY _attr_native_unit_of_measurement = PERCENTAGE diff --git a/homeassistant/components/august/strings.json b/homeassistant/components/august/strings.json index 772a8dca479..e3c97535a55 100644 --- a/homeassistant/components/august/strings.json +++ b/homeassistant/components/august/strings.json @@ -1,4 +1,10 @@ { + "issues": { + "yale_brand_migration": { + "title": "Yale Home has a new integration", + "description": "Add the [Yale integration]({migrate_url}), and remove the August integration as soon as possible to avoid an interruption in service. The Yale Home brand will stop working with the August integration soon and will be removed in a future release." + } + }, "config": { "error": { "unhandled": "Unhandled error: {error}", @@ -14,7 +20,7 @@ "validation": { "title": "Two factor authentication", "data": { - "code": "Verification code" + "verification_code": "Verification code" }, "description": "Please check your {login_method} ({username}) and enter the verification code below. Codes may take a few minutes to arrive." }, diff --git a/homeassistant/components/august/util.py b/homeassistant/components/august/util.py index 6972913ba22..5449d048613 100644 --- a/homeassistant/components/august/util.py +++ b/homeassistant/components/august/util.py @@ -63,16 +63,11 @@ def _activity_time_based(latest: Activity) -> Activity | None: """Get the latest state of the sensor.""" start = latest.activity_start_time end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION - if start <= _native_datetime() <= end: + if start <= datetime.now() <= end: return latest return None -def _native_datetime() -> datetime: - """Return time in the format august uses without timezone.""" - return datetime.now() - - def retrieve_online_state( data: AugustData, detail: DoorbellDetail | LockDetail ) -> bool: diff --git a/homeassistant/components/aurora/__init__.py b/homeassistant/components/aurora/__init__.py index 273f6c6fec2..b6c47cf36b2 100644 --- a/homeassistant/components/aurora/__init__.py +++ b/homeassistant/components/aurora/__init__.py @@ -4,6 +4,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from .const import CONF_THRESHOLD, DEFAULT_THRESHOLD from .coordinator import AuroraDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] @@ -21,9 +22,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: AuroraConfigEntry) -> bo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def update_listener(hass: HomeAssistant, entry: AuroraConfigEntry) -> None: + """Handle options update.""" + entry.runtime_data.threshold = int( + entry.options.get(CONF_THRESHOLD, DEFAULT_THRESHOLD) + ) + # refresh the state of the visibility alert binary sensor + await entry.runtime_data.async_request_refresh() + + async def async_unload_entry(hass: HomeAssistant, entry: AuroraConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aurora/coordinator.py b/homeassistant/components/aurora/coordinator.py index 422dff83922..9771cc53652 100644 --- a/homeassistant/components/aurora/coordinator.py +++ b/homeassistant/components/aurora/coordinator.py @@ -38,8 +38,8 @@ class AuroraDataUpdateCoordinator(DataUpdateCoordinator[int]): ) self.api = AuroraForecast(async_get_clientsession(hass)) - self.latitude = int(self.config_entry.data[CONF_LATITUDE]) - self.longitude = int(self.config_entry.data[CONF_LONGITUDE]) + self.latitude = round(self.config_entry.data[CONF_LATITUDE]) + self.longitude = round(self.config_entry.data[CONF_LONGITUDE]) self.threshold = int( self.config_entry.options.get(CONF_THRESHOLD, DEFAULT_THRESHOLD) ) diff --git a/homeassistant/components/aurora/manifest.json b/homeassistant/components/aurora/manifest.json index 018e8ab8135..d94707bfa81 100644 --- a/homeassistant/components/aurora/manifest.json +++ b/homeassistant/components/aurora/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aurora", "iot_class": "cloud_polling", "loggers": ["auroranoaa"], - "requirements": ["auroranoaa==0.0.3"] + "requirements": ["auroranoaa==0.0.5"] } diff --git a/homeassistant/components/aurora/strings.json b/homeassistant/components/aurora/strings.json index 09ec86bdf4d..5ba3a1273fd 100644 --- a/homeassistant/components/aurora/strings.json +++ b/homeassistant/components/aurora/strings.json @@ -14,14 +14,15 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "options": { "step": { "init": { "data": { - "threshold": "Threshold (%)" + "forecast_threshold": "Threshold (%)" } } } diff --git a/homeassistant/components/aurora_abb_powerone/__init__.py b/homeassistant/components/aurora_abb_powerone/__init__.py index 8d236b30d97..749d40aeb5c 100644 --- a/homeassistant/components/aurora_abb_powerone/__init__.py +++ b/homeassistant/components/aurora_abb_powerone/__init__.py @@ -10,21 +10,15 @@ # and add the following to the end of script/bootstrap: # sudo chmod 777 /dev/ttyUSB0 -import logging - -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import AuroraAbbDataUpdateCoordinator +from .coordinator import AuroraAbbConfigEntry, AuroraAbbDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] -_LOGGER = logging.getLogger(__name__) - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AuroraAbbConfigEntry) -> bool: """Set up Aurora ABB PowerOne from a config entry.""" comport = entry.data[CONF_PORT] @@ -32,19 +26,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = AuroraAbbDataUpdateCoordinator(hass, comport, address) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AuroraAbbConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - # It should not be necessary to close the serial port because we close - # it after every use in sensor.py, i.e. no need to do entry["client"].close() - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aurora_abb_powerone/config_flow.py b/homeassistant/components/aurora_abb_powerone/config_flow.py index 47c349ab48a..0b6e41257fc 100644 --- a/homeassistant/components/aurora_abb_powerone/config_flow.py +++ b/homeassistant/components/aurora_abb_powerone/config_flow.py @@ -45,7 +45,7 @@ def validate_and_connect( ret[ATTR_SERIAL_NUMBER] = client.serial_number() ret[ATTR_MODEL] = f"{client.version()} ({client.pn()})" ret[ATTR_FIRMWARE] = client.firmware(1) - _LOGGER.info("Returning device info=%s", ret) + _LOGGER.debug("Returning device info=%s", ret) except AuroraError: _LOGGER.warning("Could not connect to device=%s", comport) raise diff --git a/homeassistant/components/aurora_abb_powerone/coordinator.py b/homeassistant/components/aurora_abb_powerone/coordinator.py index 6a84869b2e5..c3d05da95f3 100644 --- a/homeassistant/components/aurora_abb_powerone/coordinator.py +++ b/homeassistant/components/aurora_abb_powerone/coordinator.py @@ -6,6 +6,7 @@ from time import sleep from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError from serial import SerialException +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -14,6 +15,9 @@ from .const import DOMAIN, SCAN_INTERVAL _LOGGER = logging.getLogger(__name__) +type AuroraAbbConfigEntry = ConfigEntry[AuroraAbbDataUpdateCoordinator] + + class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]): """Class to manage fetching AuroraAbbPowerone data.""" @@ -78,9 +82,9 @@ class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]): finally: if self.available != self.available_prev: if self.available: - _LOGGER.info("Communication with %s back online", self.name) + _LOGGER.warning("Communication with %s back online", self.name) else: - _LOGGER.info( + _LOGGER.warning( "Communication with %s lost", self.name, ) diff --git a/homeassistant/components/aurora_abb_powerone/sensor.py b/homeassistant/components/aurora_abb_powerone/sensor.py index 6e3ebb5f5c9..29d5cab2667 100644 --- a/homeassistant/components/aurora_abb_powerone/sensor.py +++ b/homeassistant/components/aurora_abb_powerone/sensor.py @@ -14,7 +14,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_SERIAL_NUMBER, EntityCategory, @@ -31,7 +30,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import AuroraAbbDataUpdateCoordinator from .const import ( ATTR_DEVICE_NAME, ATTR_FIRMWARE, @@ -40,6 +38,7 @@ from .const import ( DOMAIN, MANUFACTURER, ) +from .coordinator import AuroraAbbConfigEntry, AuroraAbbDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) ALARM_STATES = list(AuroraMapping.ALARM_STATES.values()) @@ -130,12 +129,12 @@ SENSOR_TYPES = [ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AuroraAbbConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up aurora_abb_powerone sensor based on a config entry.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data data = config_entry.data entities = [AuroraSensor(coordinator, data, sens) for sens in SENSOR_TYPES] diff --git a/homeassistant/components/aussie_broadband/__init__.py b/homeassistant/components/aussie_broadband/__init__.py index 1fc7e47ebde..52b48b1d0d6 100644 --- a/homeassistant/components/aussie_broadband/__init__.py +++ b/homeassistant/components/aussie_broadband/__init__.py @@ -2,28 +2,27 @@ from __future__ import annotations -from datetime import timedelta -import logging - from aiohttp import ClientError from aussiebb.asyncio import AussieBB from aussiebb.const import FETCH_TYPES -from aussiebb.exceptions import AuthenticationException, UnrecognisedServiceType +from aussiebb.exceptions import AuthenticationException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_ID +from .coordinator import ( + AussieBroadbandConfigEntry, + AussieBroadbandDataUpdateCoordinator, +) -_LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: AussieBroadbandConfigEntry +) -> bool: """Set up Aussie Broadband from a config entry.""" # Login to the Aussie Broadband API and retrieve the current service list client = AussieBB( @@ -43,41 +42,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ClientError as exc: raise ConfigEntryNotReady from exc - # Create an appropriate refresh function - def update_data_factory(service_id): - async def async_update_data(): - try: - return await client.get_usage(service_id) - except UnrecognisedServiceType as err: - raise UpdateFailed(f"Service {service_id} was unrecognised") from err - - return async_update_data - # Initiate a Data Update Coordinator for each service for service in services: - service["coordinator"] = DataUpdateCoordinator( - hass, - _LOGGER, - name=service["service_id"], - update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL), - update_method=update_data_factory(service[SERVICE_ID]), + service["coordinator"] = AussieBroadbandDataUpdateCoordinator( + hass, client, service["service_id"] ) await service["coordinator"].async_config_entry_first_refresh() # Setup the integration - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { - "client": client, - "services": services, - } + entry.runtime_data = services await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: AussieBroadbandConfigEntry +) -> bool: """Unload the config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aussie_broadband/config_flow.py b/homeassistant/components/aussie_broadband/config_flow.py index b434ab69ae5..72ff0b3b2b2 100644 --- a/homeassistant/components/aussie_broadband/config_flow.py +++ b/homeassistant/components/aussie_broadband/config_flow.py @@ -22,13 +22,14 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _reauth_username: str + def __init__(self) -> None: """Initialize the config flow.""" self.data: dict = {} self.options: dict = {CONF_SERVICES: []} self.services: list[dict[str, Any]] = [] self.client: AussieBB | None = None - self._reauth_username: str | None = None async def async_auth(self, user_input: dict[str, str]) -> dict[str, str] | None: """Reusable Auth Helper.""" @@ -92,22 +93,16 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] | None = None - if user_input and self._reauth_username: + if user_input: data = { CONF_USERNAME: self._reauth_username, CONF_PASSWORD: user_input[CONF_PASSWORD], } if not (errors := await self.async_auth(data)): - entry = await self.async_set_unique_id(self._reauth_username.lower()) - if entry: - self.hass.config_entries.async_update_entry( - entry, - data=data, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry(title=self._reauth_username, data=data) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/aussie_broadband/const.py b/homeassistant/components/aussie_broadband/const.py index ad19b7d8a27..ecc0bb89de4 100644 --- a/homeassistant/components/aussie_broadband/const.py +++ b/homeassistant/components/aussie_broadband/const.py @@ -1,6 +1,8 @@ """Constants for the Aussie Broadband integration.""" +from typing import Final + DEFAULT_UPDATE_INTERVAL = 30 DOMAIN = "aussie_broadband" -SERVICE_ID = "service_id" +SERVICE_ID: Final = "service_id" CONF_SERVICES = "services" diff --git a/homeassistant/components/aussie_broadband/coordinator.py b/homeassistant/components/aussie_broadband/coordinator.py new file mode 100644 index 00000000000..844442985c0 --- /dev/null +++ b/homeassistant/components/aussie_broadband/coordinator.py @@ -0,0 +1,53 @@ +"""Coordinator for the Aussie Broadband integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Any, TypedDict + +from aussiebb.asyncio import AussieBB +from aussiebb.exceptions import UnrecognisedServiceType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEFAULT_UPDATE_INTERVAL + +_LOGGER = logging.getLogger(__name__) + + +class AussieBroadbandServiceData(TypedDict, total=False): + """Aussie Broadband service information, extended with the coordinator.""" + + coordinator: AussieBroadbandDataUpdateCoordinator + description: str + name: str + service_id: str + type: str + + +type AussieBroadbandConfigEntry = ConfigEntry[list[AussieBroadbandServiceData]] + + +class AussieBroadbandDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Aussie Broadand data update coordinator.""" + + def __init__(self, hass: HomeAssistant, client: AussieBB, service_id: str) -> None: + """Initialize Atag coordinator.""" + super().__init__( + hass, + _LOGGER, + name=f"Aussie Broadband {service_id}", + update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL), + ) + self._client = client + self._service_id = service_id + + async def _async_update_data(self) -> dict[str, Any]: + """Update data via library.""" + try: + return await self._client.get_usage(self._service_id) + except UnrecognisedServiceType as err: + raise UpdateFailed(f"Service {self._service_id} was unrecognised") from err diff --git a/homeassistant/components/aussie_broadband/diagnostics.py b/homeassistant/components/aussie_broadband/diagnostics.py index c71cfd090da..9c68c068bb0 100644 --- a/homeassistant/components/aussie_broadband/diagnostics.py +++ b/homeassistant/components/aussie_broadband/diagnostics.py @@ -5,16 +5,15 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .coordinator import AussieBroadbandConfigEntry TO_REDACT = ["address", "ipAddresses", "description", "discounts", "coordinator"] async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: AussieBroadbandConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" return { @@ -23,6 +22,6 @@ async def async_get_config_entry_diagnostics( "service": async_redact_data(service, TO_REDACT), "usage": async_redact_data(service["coordinator"].data, ["historical"]), } - for service in hass.data[DOMAIN][config_entry.entry_id]["services"] + for service in config_entry.runtime_data ] } diff --git a/homeassistant/components/aussie_broadband/manifest.json b/homeassistant/components/aussie_broadband/manifest.json index 877a46a3650..456b8962461 100644 --- a/homeassistant/components/aussie_broadband/manifest.json +++ b/homeassistant/components/aussie_broadband/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aussie_broadband", "iot_class": "cloud_polling", "loggers": ["aussiebb"], - "requirements": ["pyaussiebb==0.0.15"] + "requirements": ["pyaussiebb==0.1.4"] } diff --git a/homeassistant/components/aussie_broadband/sensor.py b/homeassistant/components/aussie_broadband/sensor.py index 49796b3f6cd..49da78da8de 100644 --- a/homeassistant/components/aussie_broadband/sensor.py +++ b/homeassistant/components/aussie_broadband/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass import re -from typing import Any, cast +from typing import cast from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfInformation, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -22,6 +21,11 @@ from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, SERVICE_ID +from .coordinator import ( + AussieBroadbandConfigEntry, + AussieBroadbandDataUpdateCoordinator, + AussieBroadbandServiceData, +) @dataclass(frozen=True) @@ -117,28 +121,34 @@ SENSOR_DESCRIPTIONS: tuple[SensorValueEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AussieBroadbandConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Aussie Broadband sensor platform from a config entry.""" async_add_entities( [ AussieBroadandSensorEntity(service, description) - for service in hass.data[DOMAIN][entry.entry_id]["services"] + for service in entry.runtime_data for description in SENSOR_DESCRIPTIONS if description.key in service["coordinator"].data ] ) -class AussieBroadandSensorEntity(CoordinatorEntity, SensorEntity): +class AussieBroadandSensorEntity( + CoordinatorEntity[AussieBroadbandDataUpdateCoordinator], SensorEntity +): """Base class for Aussie Broadband metric sensors.""" _attr_has_entity_name = True entity_description: SensorValueEntityDescription def __init__( - self, service: dict[str, Any], description: SensorValueEntityDescription + self, + service: AussieBroadbandServiceData, + description: SensorValueEntityDescription, ) -> None: """Initialize the sensor.""" super().__init__(service["coordinator"]) diff --git a/homeassistant/components/autarco/__init__.py b/homeassistant/components/autarco/__init__.py index 0e29b25ad80..f42bfdf4a0e 100644 --- a/homeassistant/components/autarco/__init__.py +++ b/homeassistant/components/autarco/__init__.py @@ -4,11 +4,12 @@ from __future__ import annotations import asyncio -from autarco import Autarco +from autarco import Autarco, AutarcoConnectionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .coordinator import AutarcoDataUpdateCoordinator @@ -25,7 +26,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> b password=entry.data[CONF_PASSWORD], session=async_get_clientsession(hass), ) - account_sites = await client.get_account() + + try: + account_sites = await client.get_account() + except AutarcoConnectionError as err: + await client.close() + raise ConfigEntryNotReady from err coordinators: list[AutarcoDataUpdateCoordinator] = [ AutarcoDataUpdateCoordinator(hass, client, site) for site in account_sites diff --git a/homeassistant/components/autarco/config_flow.py b/homeassistant/components/autarco/config_flow.py index a66f14047a7..294fa685fb8 100644 --- a/homeassistant/components/autarco/config_flow.py +++ b/homeassistant/components/autarco/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from autarco import Autarco, AutarcoAuthenticationError, AutarcoConnectionError @@ -20,6 +21,12 @@ DATA_SCHEMA = vol.Schema( } ) +STEP_REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Autarco.""" @@ -55,3 +62,40 @@ class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, data_schema=DATA_SCHEMA, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication request from Autarco.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication confirmation.""" + errors = {} + + reauth_entry = self._get_reauth_entry() + if user_input is not None: + client = Autarco( + email=reauth_entry.data[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.get_account() + except AutarcoAuthenticationError: + errors["base"] = "invalid_auth" + except AutarcoConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + data_schema=STEP_REAUTH_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/autarco/coordinator.py b/homeassistant/components/autarco/coordinator.py index 82eb4439a86..dd8786bca25 100644 --- a/homeassistant/components/autarco/coordinator.py +++ b/homeassistant/components/autarco/coordinator.py @@ -4,11 +4,21 @@ from __future__ import annotations from typing import NamedTuple -from autarco import AccountSite, Autarco, Inverter, Solar +from autarco import ( + AccountSite, + Autarco, + AutarcoAuthenticationError, + AutarcoConnectionError, + Battery, + Inverter, + Site, + Solar, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, SCAN_INTERVAL @@ -18,6 +28,8 @@ class AutarcoData(NamedTuple): solar: Solar inverters: dict[str, Inverter] + site: Site + battery: Battery | None class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]): @@ -29,7 +41,7 @@ class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]): self, hass: HomeAssistant, client: Autarco, - site: AccountSite, + account_site: AccountSite, ) -> None: """Initialize global Autarco data updater.""" super().__init__( @@ -39,11 +51,24 @@ class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]): update_interval=SCAN_INTERVAL, ) self.client = client - self.site = site + self.account_site = account_site async def _async_update_data(self) -> AutarcoData: """Fetch data from Autarco API.""" + battery = None + try: + site = await self.client.get_site(self.account_site.public_key) + solar = await self.client.get_solar(self.account_site.public_key) + inverters = await self.client.get_inverters(self.account_site.public_key) + if site.has_battery: + battery = await self.client.get_battery(self.account_site.public_key) + except AutarcoAuthenticationError as err: + raise ConfigEntryAuthFailed(err) from err + except AutarcoConnectionError as err: + raise UpdateFailed(err) from err return AutarcoData( - solar=await self.client.get_solar(self.site.public_key), - inverters=await self.client.get_inverters(self.site.public_key), + solar=solar, + inverters=inverters, + site=site, + battery=battery, ) diff --git a/homeassistant/components/autarco/diagnostics.py b/homeassistant/components/autarco/diagnostics.py index d1b082fd307..c865a38ffd8 100644 --- a/homeassistant/components/autarco/diagnostics.py +++ b/homeassistant/components/autarco/diagnostics.py @@ -18,9 +18,9 @@ async def async_get_config_entry_diagnostics( return { "sites_data": [ { - "id": coordinator.site.site_id, - "name": coordinator.site.system_name, - "health": coordinator.site.health, + "id": coordinator.account_site.site_id, + "name": coordinator.account_site.system_name, + "health": coordinator.account_site.health, "solar": { "power_production": coordinator.data.solar.power_production, "energy_production_today": coordinator.data.solar.energy_production_today, @@ -37,6 +37,23 @@ async def async_get_config_entry_diagnostics( } for inverter in coordinator.data.inverters.values() ], + **( + { + "battery": { + "flow_now": coordinator.data.battery.flow_now, + "net_charged_now": coordinator.data.battery.net_charged_now, + "state_of_charge": coordinator.data.battery.state_of_charge, + "discharged_today": coordinator.data.battery.discharged_today, + "discharged_month": coordinator.data.battery.discharged_month, + "discharged_total": coordinator.data.battery.discharged_total, + "charged_today": coordinator.data.battery.charged_today, + "charged_month": coordinator.data.battery.charged_month, + "charged_total": coordinator.data.battery.charged_total, + } + } + if coordinator.data.battery is not None + else {} + ), } for coordinator in autarco_data ], diff --git a/homeassistant/components/autarco/icons.json b/homeassistant/components/autarco/icons.json new file mode 100644 index 00000000000..782e8b604bb --- /dev/null +++ b/homeassistant/components/autarco/icons.json @@ -0,0 +1,48 @@ +{ + "entity": { + "sensor": { + "power_production": { + "default": "mdi:flash" + }, + "energy_production_today": { + "default": "mdi:solar-power" + }, + "energy_production_month": { + "default": "mdi:solar-power" + }, + "energy_production_total": { + "default": "mdi:solar-power" + }, + "out_ac_power": { + "default": "mdi:flash" + }, + "out_ac_energy_total": { + "default": "mdi:solar-power" + }, + "flow_now": { + "default": "mdi:flash" + }, + "state_of_charge": { + "default": "mdi:home-battery" + }, + "discharged_today": { + "default": "mdi:battery-arrow-down" + }, + "discharged_month": { + "default": "mdi:battery-arrow-down" + }, + "discharged_total": { + "default": "mdi:battery-arrow-down" + }, + "charged_today": { + "default": "mdi:battery-arrow-up" + }, + "charged_month": { + "default": "mdi:battery-arrow-up" + }, + "charged_total": { + "default": "mdi:battery-arrow-up" + } + } + } +} diff --git a/homeassistant/components/autarco/manifest.json b/homeassistant/components/autarco/manifest.json index f0900472b1e..0567aeba722 100644 --- a/homeassistant/components/autarco/manifest.json +++ b/homeassistant/components/autarco/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/autarco", "iot_class": "cloud_polling", - "requirements": ["autarco==2.0.0"] + "requirements": ["autarco==3.1.0"] } diff --git a/homeassistant/components/autarco/quality_scale.yaml b/homeassistant/components/autarco/quality_scale.yaml new file mode 100644 index 00000000000..d2e1455af7e --- /dev/null +++ b/homeassistant/components/autarco/quality_scale.yaml @@ -0,0 +1,99 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: | + The entity.py file is not used in this integration. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: | + This integration only polls data using a coordinator. + Since the integration is read-only and poll-only (only provide sensor + data), there is no need to implement parallel updates. + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + docs-data-update: done + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: exempt + comment: | + This is an service, which doesn't integrate with any devices. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that should disabled by default. + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/autarco/sensor.py b/homeassistant/components/autarco/sensor.py index 2352cdee060..c870197a504 100644 --- a/homeassistant/components/autarco/sensor.py +++ b/homeassistant/components/autarco/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from autarco import Inverter, Solar +from autarco import Battery, Inverter, Solar from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,7 +13,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy, UnitOfPower +from homeassistant.const import PERCENTAGE, UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -25,6 +25,81 @@ from .const import DOMAIN from .coordinator import AutarcoDataUpdateCoordinator +@dataclass(frozen=True, kw_only=True) +class AutarcoBatterySensorEntityDescription(SensorEntityDescription): + """Describes an Autarco sensor entity.""" + + value_fn: Callable[[Battery], StateType] + + +SENSORS_BATTERY: tuple[AutarcoBatterySensorEntityDescription, ...] = ( + AutarcoBatterySensorEntityDescription( + key="flow_now", + translation_key="flow_now", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda battery: battery.flow_now, + ), + AutarcoBatterySensorEntityDescription( + key="state_of_charge", + translation_key="state_of_charge", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda battery: battery.state_of_charge, + ), + AutarcoBatterySensorEntityDescription( + key="discharged_today", + translation_key="discharged_today", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, + value_fn=lambda battery: battery.discharged_today, + ), + AutarcoBatterySensorEntityDescription( + key="discharged_month", + translation_key="discharged_month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, + value_fn=lambda battery: battery.discharged_month, + ), + AutarcoBatterySensorEntityDescription( + key="discharged_total", + translation_key="discharged_total", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda battery: battery.discharged_total, + ), + AutarcoBatterySensorEntityDescription( + key="charged_today", + translation_key="charged_today", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, + value_fn=lambda battery: battery.charged_today, + ), + AutarcoBatterySensorEntityDescription( + key="charged_month", + translation_key="charged_month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, + value_fn=lambda battery: battery.charged_month, + ), + AutarcoBatterySensorEntityDescription( + key="charged_total", + translation_key="charged_total", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda battery: battery.charged_total, + ), +) + + @dataclass(frozen=True, kw_only=True) class AutarcoSolarSensorEntityDescription(SensorEntityDescription): """Describes an Autarco sensor entity.""" @@ -46,6 +121,7 @@ SENSORS_SOLAR: tuple[AutarcoSolarSensorEntityDescription, ...] = ( translation_key="energy_production_today", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, value_fn=lambda solar: solar.energy_production_today, ), AutarcoSolarSensorEntityDescription( @@ -53,6 +129,7 @@ SENSORS_SOLAR: tuple[AutarcoSolarSensorEntityDescription, ...] = ( translation_key="energy_production_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, value_fn=lambda solar: solar.energy_production_month, ), AutarcoSolarSensorEntityDescription( @@ -117,9 +194,52 @@ async def async_setup_entry( for description in SENSORS_INVERTER for inverter in coordinator.data.inverters ) + if coordinator.data.battery: + entities.extend( + AutarcoBatterySensorEntity( + coordinator=coordinator, + description=description, + ) + for description in SENSORS_BATTERY + ) async_add_entities(entities) +class AutarcoBatterySensorEntity( + CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity +): + """Defines an Autarco battery sensor.""" + + entity_description: AutarcoBatterySensorEntityDescription + _attr_has_entity_name = True + + def __init__( + self, + *, + coordinator: AutarcoDataUpdateCoordinator, + description: AutarcoBatterySensorEntityDescription, + ) -> None: + """Initialize Autarco sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = ( + f"{coordinator.account_site.site_id}_battery_{description.key}" + ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{coordinator.account_site.site_id}_battery")}, + entry_type=DeviceEntryType.SERVICE, + manufacturer="Autarco", + name="Battery", + ) + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + assert self.coordinator.data.battery is not None + return self.entity_description.value_fn(self.coordinator.data.battery) + + class AutarcoSolarSensorEntity( CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity ): @@ -138,9 +258,11 @@ class AutarcoSolarSensorEntity( super().__init__(coordinator) self.entity_description = description - self._attr_unique_id = f"{coordinator.site.site_id}_solar_{description.key}" + self._attr_unique_id = ( + f"{coordinator.account_site.site_id}_solar_{description.key}" + ) self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{coordinator.site.site_id}_solar")}, + identifiers={(DOMAIN, f"{coordinator.account_site.site_id}_solar")}, entry_type=DeviceEntryType.SERVICE, manufacturer="Autarco", name="Solar", diff --git a/homeassistant/components/autarco/strings.json b/homeassistant/components/autarco/strings.json index 2eff962a13a..a053cd36e09 100644 --- a/homeassistant/components/autarco/strings.json +++ b/homeassistant/components/autarco/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "description": "Connect to your Autarco account to get information about your solar panels.", + "description": "Connect to your Autarco account, to get information about your sites.", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" @@ -11,6 +11,16 @@ "email": "The email address of your Autarco account.", "password": "The password of your Autarco account." } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The password for {email} is no longer valid.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::autarco::config::step::user::data_description::password%]" + } } }, "error": { @@ -18,11 +28,36 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { "sensor": { + "flow_now": { + "name": "Flow now" + }, + "state_of_charge": { + "name": "State of charge" + }, + "discharged_today": { + "name": "Discharged today" + }, + "discharged_month": { + "name": "Discharged month" + }, + "discharged_total": { + "name": "Discharged total" + }, + "charged_today": { + "name": "Charged today" + }, + "charged_month": { + "name": "Charged month" + }, + "charged_total": { + "name": "Charged total" + }, "power_production": { "name": "Power production" }, diff --git a/homeassistant/components/auth/__init__.py b/homeassistant/components/auth/__init__.py index cef7af4df92..27eed49e5ca 100644 --- a/homeassistant/components/auth/__init__.py +++ b/homeassistant/components/auth/__init__.py @@ -159,6 +159,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import OAuth2AuthorizeCallba from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey from . import indieauth, login_flow, mfa_setup_flow @@ -166,7 +167,7 @@ DOMAIN = "auth" type StoreResultType = Callable[[str, Credentials], str] type RetrieveResultType = Callable[[str, str], Credentials | None] - +DATA_STORE: HassKey[StoreResultType] = HassKey(DOMAIN) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) DELETE_CURRENT_TOKEN_DELAY = 2 @@ -177,14 +178,14 @@ def create_auth_code( hass: HomeAssistant, client_id: str, credential: Credentials ) -> str: """Create an authorization code to fetch tokens.""" - return cast(StoreResultType, hass.data[DOMAIN])(client_id, credential) + return hass.data[DATA_STORE](client_id, credential) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Component to allow users to login.""" store_result, retrieve_result = _create_auth_code_store() - hass.data[DOMAIN] = store_result + hass.data[DATA_STORE] = store_result hass.http.register_view(TokenView(retrieve_result)) hass.http.register_view(RevokeTokenView()) diff --git a/homeassistant/components/auth/login_flow.py b/homeassistant/components/auth/login_flow.py index 3664c3ca5c9..d27235123b9 100644 --- a/homeassistant/components/auth/login_flow.py +++ b/homeassistant/components/auth/login_flow.py @@ -80,7 +80,7 @@ import voluptuous_serialize from homeassistant import data_entry_flow from homeassistant.auth import AuthManagerFlowManager, InvalidAuthError -from homeassistant.auth.models import AuthFlowResult, Credentials +from homeassistant.auth.models import AuthFlowContext, AuthFlowResult, Credentials from homeassistant.components import onboarding from homeassistant.components.http import KEY_HASS from homeassistant.components.http.auth import async_user_not_allowed_do_auth @@ -322,11 +322,11 @@ class LoginFlowIndexView(LoginFlowBaseView): try: result = await self._flow_mgr.async_init( handler, - context={ - "ip_address": ip_address(request.remote), # type: ignore[arg-type] - "credential_only": data.get("type") == "link_user", - "redirect_uri": redirect_uri, - }, + context=AuthFlowContext( + ip_address=ip_address(request.remote), # type: ignore[arg-type] + credential_only=data.get("type") == "link_user", + redirect_uri=redirect_uri, + ), ) except data_entry_flow.UnknownHandler: return self.json_message("Invalid handler specified", HTTPStatus.NOT_FOUND) diff --git a/homeassistant/components/auth/mfa_setup_flow.py b/homeassistant/components/auth/mfa_setup_flow.py index 8ae55396fa9..c9efb081a01 100644 --- a/homeassistant/components/auth/mfa_setup_flow.py +++ b/homeassistant/components/auth/mfa_setup_flow.py @@ -11,7 +11,9 @@ import voluptuous_serialize from homeassistant import data_entry_flow from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import FlowContext import homeassistant.helpers.config_validation as cv +from homeassistant.util.hass_dict import HassKey WS_TYPE_SETUP_MFA = "auth/setup_mfa" SCHEMA_WS_SETUP_MFA = vol.All( @@ -31,7 +33,7 @@ SCHEMA_WS_DEPOSE_MFA = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend( {vol.Required("type"): WS_TYPE_DEPOSE_MFA, vol.Required("mfa_module_id"): str} ) -DATA_SETUP_FLOW_MGR = "auth_mfa_setup_flow_manager" +DATA_SETUP_FLOW_MGR: HassKey[MfaFlowManager] = HassKey("auth_mfa_setup_flow_manager") _LOGGER = logging.getLogger(__name__) @@ -43,7 +45,7 @@ class MfaFlowManager(data_entry_flow.FlowManager): self, handler_key: str, *, - context: dict[str, Any], + context: FlowContext | None, data: dict[str, Any], ) -> data_entry_flow.FlowHandler: """Create a setup flow. handler is a mfa module.""" @@ -57,7 +59,11 @@ class MfaFlowManager(data_entry_flow.FlowManager): async def async_finish_flow( self, flow: data_entry_flow.FlowHandler, result: data_entry_flow.FlowResult ) -> data_entry_flow.FlowResult: - """Complete an mfs setup flow.""" + """Complete an mfa setup flow. + + This method is called when a flow step returns FlowResultType.ABORT or + FlowResultType.CREATE_ENTRY. + """ _LOGGER.debug("flow_result: %s", result) return result @@ -85,7 +91,7 @@ def websocket_setup_mfa( async def async_setup_flow(msg: dict[str, Any]) -> None: """Return a setup flow for mfa auth module.""" - flow_manager: MfaFlowManager = hass.data[DATA_SETUP_FLOW_MGR] + flow_manager = hass.data[DATA_SETUP_FLOW_MGR] if (flow_id := msg.get("flow_id")) is not None: result = await flow_manager.async_configure(flow_id, msg.get("user_input")) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 8ab9c478bc4..bd8af526d75 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -6,10 +6,10 @@ from abc import ABC, abstractmethod import asyncio from collections.abc import Callable, Mapping from dataclasses import dataclass -from functools import cached_property, partial import logging from typing import Any, Protocol, cast +from propcache import cached_property import voluptuous as vol from homeassistant.components import websocket_api @@ -19,7 +19,7 @@ from homeassistant.const import ( ATTR_MODE, ATTR_NAME, CONF_ALIAS, - CONF_CONDITION, + CONF_CONDITIONS, CONF_DEVICE_ID, CONF_ENTITY_ID, CONF_EVENT_DATA, @@ -47,22 +47,9 @@ from homeassistant.core import ( split_entity_id, valid_entity_id, ) -from homeassistant.exceptions import ( - ConditionError, - ConditionErrorContainer, - ConditionErrorIndex, - HomeAssistantError, - ServiceNotFound, - TemplateError, -) +from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError from homeassistant.helpers import condition import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstant, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.issue_registry import ( @@ -92,23 +79,19 @@ from homeassistant.helpers.trace import ( trace_get, trace_path, ) -from homeassistant.helpers.trigger import ( - TriggerActionType, - TriggerData, - TriggerInfo, - async_initialize_triggers, -) +from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util.dt import parse_datetime +from homeassistant.util.hass_dict import HassKey from .config import AutomationConfig, ValidationStatus from .const import ( - CONF_ACTION, + CONF_ACTIONS, CONF_INITIAL_STATE, CONF_TRACE, - CONF_TRIGGER, CONF_TRIGGER_VARIABLES, + CONF_TRIGGERS, DEFAULT_INITIAL_STATE, DOMAIN, LOGGER, @@ -116,6 +99,7 @@ from .const import ( from .helpers import async_get_blueprints from .trace import trace_automation +DATA_COMPONENT: HassKey[EntityComponent[BaseAutomationEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" @@ -141,20 +125,6 @@ class IfAction(Protocol): """AND all conditions.""" -# AutomationActionType, AutomationTriggerData, -# and AutomationTriggerInfo are deprecated as of 2022.9. -# Can be removed in 2025.1 -_DEPRECATED_AutomationActionType = DeprecatedConstant( - TriggerActionType, "TriggerActionType", "2025.1" -) -_DEPRECATED_AutomationTriggerData = DeprecatedConstant( - TriggerData, "TriggerData", "2025.1" -) -_DEPRECATED_AutomationTriggerInfo = DeprecatedConstant( - TriggerInfo, "TriggerInfo", "2025.1" -) - - @bind_hass def is_on(hass: HomeAssistant, entity_id: str) -> bool: """Return true if specified automation entity_id is on. @@ -168,14 +138,12 @@ def _automations_with_x( hass: HomeAssistant, referenced_id: str, property_name: str ) -> list[str]: """Return all automations that reference the x.""" - if DOMAIN not in hass.data: + if DATA_COMPONENT not in hass.data: return [] - component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN] - return [ automation_entity.entity_id - for automation_entity in component.entities + for automation_entity in hass.data[DATA_COMPONENT].entities if referenced_id in getattr(automation_entity, property_name) ] @@ -184,12 +152,10 @@ def _x_in_automation( hass: HomeAssistant, entity_id: str, property_name: str ) -> list[str]: """Return all x in an automation.""" - if DOMAIN not in hass.data: + if DATA_COMPONENT not in hass.data: return [] - component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN] - - if (automation_entity := component.get_entity(entity_id)) is None: + if (automation_entity := hass.data[DATA_COMPONENT].get_entity(entity_id)) is None: return [] return list(getattr(automation_entity, property_name)) @@ -261,11 +227,9 @@ def automations_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list if DOMAIN not in hass.data: return [] - component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN] - return [ automation_entity.entity_id - for automation_entity in component.entities + for automation_entity in hass.data[DATA_COMPONENT].entities if automation_entity.referenced_blueprint == blueprint_path ] @@ -273,12 +237,10 @@ def automations_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list @callback def blueprint_in_automation(hass: HomeAssistant, entity_id: str) -> str | None: """Return the blueprint the automation is based on or None.""" - if DOMAIN not in hass.data: + if DATA_COMPONENT not in hass.data: return None - component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN] - - if (automation_entity := component.get_entity(entity_id)) is None: + if (automation_entity := hass.data[DATA_COMPONENT].get_entity(entity_id)) is None: return None return automation_entity.referenced_blueprint @@ -286,7 +248,7 @@ def blueprint_in_automation(hass: HomeAssistant, entity_id: str) -> str | None: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up all automations.""" - hass.data[DOMAIN] = component = EntityComponent[BaseAutomationEntity]( + hass.data[DATA_COMPONENT] = component = EntityComponent[BaseAutomationEntity]( LOGGER, DOMAIN, hass ) @@ -489,6 +451,7 @@ class UnavailableAutomationEntity(BaseAutomationEntity): ) async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() async_delete_issue( self.hass, DOMAIN, f"{self.entity_id}_validation_{self._validation_status}" @@ -968,7 +931,7 @@ async def _create_automation_entities( action_script = Script( hass, - config_block[CONF_ACTION], + config_block[CONF_ACTIONS], name, DOMAIN, running_description="automation actions", @@ -981,7 +944,7 @@ async def _create_automation_entities( # and so will pass them on to the script. ) - if CONF_CONDITION in config_block: + if CONF_CONDITIONS in config_block: cond_func = await _async_process_if(hass, name, config_block) if cond_func is None: @@ -991,20 +954,20 @@ async def _create_automation_entities( # Add trigger variables to variables variables = None - if CONF_TRIGGER_VARIABLES in config_block: + if CONF_TRIGGER_VARIABLES in config_block and CONF_VARIABLES in config_block: variables = ScriptVariables( dict(config_block[CONF_TRIGGER_VARIABLES].as_dict()) ) - if CONF_VARIABLES in config_block: - if variables: - variables.variables.update(config_block[CONF_VARIABLES].as_dict()) - else: - variables = config_block[CONF_VARIABLES] + variables.variables.update(config_block[CONF_VARIABLES].as_dict()) + elif CONF_TRIGGER_VARIABLES in config_block: + variables = config_block[CONF_TRIGGER_VARIABLES] + elif CONF_VARIABLES in config_block: + variables = config_block[CONF_VARIABLES] entity = AutomationEntity( automation_id, name, - config_block[CONF_TRIGGER], + config_block[CONF_TRIGGERS], cond_func, action_script, initial_state, @@ -1144,40 +1107,15 @@ async def _async_process_if( hass: HomeAssistant, name: str, config: dict[str, Any] ) -> IfAction | None: """Process if checks.""" - if_configs = config[CONF_CONDITION] + if_configs = config[CONF_CONDITIONS] - checks: list[condition.ConditionCheckerType] = [] - for if_config in if_configs: - try: - checks.append(await condition.async_from_config(hass, if_config)) - except HomeAssistantError as ex: - LOGGER.warning("Invalid condition: %s", ex) - return None - - def if_action(variables: Mapping[str, Any] | None = None) -> bool: - """AND all conditions.""" - errors: list[ConditionErrorIndex] = [] - for index, check in enumerate(checks): - try: - with trace_path(["condition", str(index)]): - if check(hass, variables) is False: - return False - except ConditionError as ex: - errors.append( - ConditionErrorIndex( - "condition", index=index, total=len(checks), error=ex - ) - ) - - if errors: - LOGGER.warning( - "Error evaluating condition in '%s':\n%s", - name, - ConditionErrorContainer("condition", errors=errors), - ) - return False - - return True + try: + if_action = await condition.async_conditions_from_config( + hass, if_configs, LOGGER, name + ) + except HomeAssistantError as ex: + LOGGER.warning("Invalid condition: %s", ex) + return None result: IfAction = if_action # type: ignore[assignment] result.config = if_configs @@ -1242,9 +1180,7 @@ def websocket_config( msg: dict[str, Any], ) -> None: """Get automation config.""" - component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN] - - automation = component.get_entity(msg["entity_id"]) + automation = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) if automation is None: connection.send_error( @@ -1258,11 +1194,3 @@ def websocket_config( "config": automation.raw_config, }, ) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/automation/blueprints/motion_light.yaml b/homeassistant/components/automation/blueprints/motion_light.yaml index ad9c6f0286b..11900708b19 100644 --- a/homeassistant/components/automation/blueprints/motion_light.yaml +++ b/homeassistant/components/automation/blueprints/motion_light.yaml @@ -35,24 +35,24 @@ blueprint: mode: restart max_exceeded: silent -trigger: - platform: state +triggers: + trigger: state entity_id: !input motion_entity from: "off" to: "on" -action: +actions: - alias: "Turn on the light" - service: light.turn_on + action: light.turn_on target: !input light_target - alias: "Wait until there is no motion from device" wait_for_trigger: - platform: state + trigger: state entity_id: !input motion_entity from: "on" to: "off" - alias: "Wait the number of seconds that has been set" delay: !input no_motion_wait - alias: "Turn off the light" - service: light.turn_off + action: light.turn_off target: !input light_target diff --git a/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml b/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml index e1e3bd5b2f6..e072aad2565 100644 --- a/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml +++ b/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml @@ -25,8 +25,8 @@ blueprint: filter: integration: mobile_app -trigger: - platform: state +triggers: + trigger: state entity_id: !input person_entity variables: @@ -36,13 +36,13 @@ variables: person_entity: !input person_entity person_name: "{{ states[person_entity].name }}" -condition: +conditions: condition: template # The first case handles leaving the Home zone which has a special state when zoning called 'home'. # The second case handles leaving all other zones. value_template: "{{ zone_entity == 'zone.home' and trigger.from_state.state == 'home' and trigger.to_state.state != 'home' or trigger.from_state.state == zone_state and trigger.to_state.state != zone_state }}" -action: +actions: - alias: "Notify that a person has left the zone" domain: mobile_app type: notify diff --git a/homeassistant/components/automation/config.py b/homeassistant/components/automation/config.py index cc4e9aba7fb..fe74865ca92 100644 --- a/homeassistant/components/automation/config.py +++ b/homeassistant/components/automation/config.py @@ -16,6 +16,7 @@ from homeassistant.config import config_per_platform, config_without_domain from homeassistant.const import ( CONF_ALIAS, CONF_CONDITION, + CONF_CONDITIONS, CONF_DESCRIPTION, CONF_ID, CONF_VARIABLES, @@ -30,11 +31,13 @@ from homeassistant.util.yaml.input import UndefinedSubstitution from .const import ( CONF_ACTION, + CONF_ACTIONS, CONF_HIDE_ENTITY, CONF_INITIAL_STATE, CONF_TRACE, CONF_TRIGGER, CONF_TRIGGER_VARIABLES, + CONF_TRIGGERS, DOMAIN, LOGGER, ) @@ -52,7 +55,41 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema( ) +def _backward_compat_schema(value: Any | None) -> Any: + """Backward compatibility for automations.""" + + if not isinstance(value, dict): + return value + + # `trigger` has been renamed to `triggers` + if CONF_TRIGGER in value: + if CONF_TRIGGERS in value: + raise vol.Invalid( + "Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only." + ) + value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER) + + # `condition` has been renamed to `conditions` + if CONF_CONDITION in value: + if CONF_CONDITIONS in value: + raise vol.Invalid( + "Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only." + ) + value[CONF_CONDITIONS] = value.pop(CONF_CONDITION) + + # `action` has been renamed to `actions` + if CONF_ACTION in value: + if CONF_ACTIONS in value: + raise vol.Invalid( + "Cannot specify both 'action' and 'actions'. Please use 'actions' only." + ) + value[CONF_ACTIONS] = value.pop(CONF_ACTION) + + return value + + PLATFORM_SCHEMA = vol.All( + _backward_compat_schema, cv.deprecated(CONF_HIDE_ENTITY), script.make_script_schema( { @@ -63,16 +100,20 @@ PLATFORM_SCHEMA = vol.All( vol.Optional(CONF_TRACE, default={}): TRACE_CONFIG_SCHEMA, vol.Optional(CONF_INITIAL_STATE): cv.boolean, vol.Optional(CONF_HIDE_ENTITY): cv.boolean, - vol.Required(CONF_TRIGGER): cv.TRIGGER_SCHEMA, - vol.Optional(CONF_CONDITION): cv.CONDITIONS_SCHEMA, + vol.Required(CONF_TRIGGERS): cv.TRIGGER_SCHEMA, + vol.Optional(CONF_CONDITIONS): cv.CONDITIONS_SCHEMA, vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA, vol.Optional(CONF_TRIGGER_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA, - vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA, + vol.Required(CONF_ACTIONS): cv.SCRIPT_SCHEMA, }, script.SCRIPT_MODE_SINGLE, ), ) +AUTOMATION_BLUEPRINT_SCHEMA = vol.All( + _backward_compat_schema, blueprint.schemas.BLUEPRINT_SCHEMA +) + async def _async_validate_config_item( # noqa: C901 hass: HomeAssistant, @@ -151,7 +192,9 @@ async def _async_validate_config_item( # noqa: C901 uses_blueprint = True blueprints = async_get_blueprints(hass) try: - blueprint_inputs = await blueprints.async_inputs_from_config(config) + blueprint_inputs = await blueprints.async_inputs_from_config( + _backward_compat_schema(config) + ) except blueprint.BlueprintException as err: if warn_on_errors: LOGGER.error( @@ -199,8 +242,8 @@ async def _async_validate_config_item( # noqa: C901 automation_config.raw_config = raw_config try: - automation_config[CONF_TRIGGER] = await async_validate_trigger_config( - hass, validated_config[CONF_TRIGGER] + automation_config[CONF_TRIGGERS] = await async_validate_trigger_config( + hass, validated_config[CONF_TRIGGERS] ) except ( vol.Invalid, @@ -216,10 +259,10 @@ async def _async_validate_config_item( # noqa: C901 ) return automation_config - if CONF_CONDITION in validated_config: + if CONF_CONDITIONS in validated_config: try: - automation_config[CONF_CONDITION] = await async_validate_conditions_config( - hass, validated_config[CONF_CONDITION] + automation_config[CONF_CONDITIONS] = await async_validate_conditions_config( + hass, validated_config[CONF_CONDITIONS] ) except ( vol.Invalid, @@ -239,8 +282,8 @@ async def _async_validate_config_item( # noqa: C901 return automation_config try: - automation_config[CONF_ACTION] = await script.async_validate_actions_config( - hass, validated_config[CONF_ACTION] + automation_config[CONF_ACTIONS] = await script.async_validate_actions_config( + hass, validated_config[CONF_ACTIONS] ) except ( vol.Invalid, diff --git a/homeassistant/components/automation/const.py b/homeassistant/components/automation/const.py index e6be35494d7..c4ac636282e 100644 --- a/homeassistant/components/automation/const.py +++ b/homeassistant/components/automation/const.py @@ -3,7 +3,9 @@ import logging CONF_ACTION = "action" +CONF_ACTIONS = "actions" CONF_TRIGGER = "trigger" +CONF_TRIGGERS = "triggers" CONF_TRIGGER_VARIABLES = "trigger_variables" DOMAIN = "automation" diff --git a/homeassistant/components/automation/helpers.py b/homeassistant/components/automation/helpers.py index 6aefa2b150a..c529fbd504e 100644 --- a/homeassistant/components/automation/helpers.py +++ b/homeassistant/components/automation/helpers.py @@ -28,6 +28,14 @@ async def _reload_blueprint_automations( @callback def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints: """Get automation blueprints.""" + # pylint: disable-next=import-outside-toplevel + from .config import AUTOMATION_BLUEPRINT_SCHEMA + return blueprint.DomainBlueprints( - hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_automations + hass, + DOMAIN, + LOGGER, + _blueprint_in_use, + _reload_blueprint_automations, + AUTOMATION_BLUEPRINT_SCHEMA, ) diff --git a/homeassistant/components/automation/icons.json b/homeassistant/components/automation/icons.json index 9b68825ffd1..f1e0f26ef65 100644 --- a/homeassistant/components/automation/icons.json +++ b/homeassistant/components/automation/icons.json @@ -9,10 +9,20 @@ } }, "services": { - "turn_on": "mdi:robot", - "turn_off": "mdi:robot-off", - "toggle": "mdi:robot", - "trigger": "mdi:robot", - "reload": "mdi:reload" + "turn_on": { + "service": "mdi:robot" + }, + "turn_off": { + "service": "mdi:robot-off" + }, + "toggle": { + "service": "mdi:robot" + }, + "trigger": { + "service": "mdi:robot" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/automation/strings.json b/homeassistant/components/automation/strings.json index d8a3fa14f40..88410658afc 100644 --- a/homeassistant/components/automation/strings.json +++ b/homeassistant/components/automation/strings.json @@ -42,7 +42,7 @@ "step": { "confirm": { "title": "[%key:component::automation::issues::service_not_found::title%]", - "description": "The automation \"{name}\" (`{entity_id}`) has an unknown action: `{service}`.\n\nThis error prevents the automation from running correctly. Maybe this action is no longer available, or perhaps a typo caused it.\n\nTo fix this error, [edit the automation]({edit}) and remove this action.\n\nClick on SUBMIT below to confirm you have fixed this automation." + "description": "The automation \"{name}\" (`{entity_id}`) has an unknown action: `{service}`.\n\nThis error prevents the automation from running correctly. Maybe this action is no longer available, or perhaps a typo caused it.\n\nTo fix this error, [edit the automation]({edit}) and remove this action.\n\nSelect **Submit** below to confirm you have fixed this automation." } } } diff --git a/homeassistant/components/avea/manifest.json b/homeassistant/components/avea/manifest.json index 43c46c96e66..7e6c080481e 100644 --- a/homeassistant/components/avea/manifest.json +++ b/homeassistant/components/avea/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/avea", "iot_class": "local_polling", "loggers": ["avea"], + "quality_scale": "legacy", "requirements": ["avea==1.5.1"] } diff --git a/homeassistant/components/avion/manifest.json b/homeassistant/components/avion/manifest.json index 505dca870a7..8488e949af3 100644 --- a/homeassistant/components/avion/manifest.json +++ b/homeassistant/components/avion/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/avion", "iot_class": "assumed_state", + "quality_scale": "legacy", "requirements": ["avion==0.10"] } diff --git a/homeassistant/components/awair/__init__.py b/homeassistant/components/awair/__init__.py index aa810bf532b..528c658eff1 100644 --- a/homeassistant/components/awair/__init__.py +++ b/homeassistant/components/awair/__init__.py @@ -2,14 +2,13 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN from .coordinator import ( AwairCloudDataUpdateCoordinator, + AwairConfigEntry, AwairDataUpdateCoordinator, AwairLocalDataUpdateCoordinator, ) @@ -17,7 +16,9 @@ from .coordinator import ( PLATFORMS = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: AwairConfigEntry +) -> bool: """Set up Awair integration from a config entry.""" session = async_get_clientsession(hass) @@ -33,28 +34,21 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][config_entry.entry_id] = coordinator + config_entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_update_listener(hass: HomeAssistant, entry: AwairConfigEntry) -> None: """Handle options update.""" - coordinator: AwairLocalDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - if entry.title != coordinator.title: + if entry.title != entry.runtime_data.title: await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: AwairConfigEntry +) -> bool: """Unload Awair configuration.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) diff --git a/homeassistant/components/awair/config_flow.py b/homeassistant/components/awair/config_flow.py index a6efc3640f9..88985b0db10 100644 --- a/homeassistant/components/awair/config_flow.py +++ b/homeassistant/components/awair/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import Any, Self, cast from aiohttp.client_exceptions import ClientError from python_awair import Awair, AwairLocal, AwairLocalDevice @@ -26,16 +26,17 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 _device: AwairLocalDevice + host: str async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle zeroconf discovery.""" - host = discovery_info.host - LOGGER.debug("Discovered device: %s", host) + self.host = discovery_info.host + LOGGER.debug("Discovered device: %s", self.host) - self._device, _ = await self._check_local_connection(host) + self._device, _ = await self._check_local_connection(self.host) if self._device is not None: await self.async_set_unique_id(self._device.mac_address) @@ -45,7 +46,6 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN): ) self.context.update( { - "host": host, "title_placeholders": { "model": self._device.model, "device_id": self._device.device_id, @@ -119,12 +119,16 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN): def _get_discovered_entries(self) -> dict[str, str]: """Get discovered entries.""" entries: dict[str, str] = {} - for flow in self._async_in_progress(): - if flow["context"]["source"] == SOURCE_ZEROCONF: - info = flow["context"]["title_placeholders"] - entries[flow["context"]["host"]] = ( - f"{info['model']} ({info['device_id']})" - ) + + flows = cast( + set[Self], + self.hass.config_entries.flow._handler_progress_index.get(DOMAIN) or set(), # noqa: SLF001 + ) + for flow in flows: + if flow.source != SOURCE_ZEROCONF: + continue + info = flow.context["title_placeholders"] + entries[flow.host] = f"{info['model']} ({info['device_id']})" return entries async def async_step_local( @@ -205,10 +209,9 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN): _, error = await self._check_cloud_connection(access_token) if error is None: - entry = await self.async_set_unique_id(self.unique_id) - assert entry - self.hass.config_entries.async_update_entry(entry, data=user_input) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) if error != "invalid_access_token": return self.async_abort(reason=error) diff --git a/homeassistant/components/awair/coordinator.py b/homeassistant/components/awair/coordinator.py index b63efff7733..78f0d9d65f2 100644 --- a/homeassistant/components/awair/coordinator.py +++ b/homeassistant/components/awair/coordinator.py @@ -26,6 +26,8 @@ from .const import ( UPDATE_INTERVAL_LOCAL, ) +type AwairConfigEntry = ConfigEntry[AwairDataUpdateCoordinator] + @dataclass class AwairResult: diff --git a/homeassistant/components/awair/manifest.json b/homeassistant/components/awair/manifest.json index 25257bc3e1c..a0fbd350dab 100644 --- a/homeassistant/components/awair/manifest.json +++ b/homeassistant/components/awair/manifest.json @@ -3,11 +3,6 @@ "name": "Awair", "codeowners": ["@ahayworth", "@danielsjf"], "config_flow": true, - "dhcp": [ - { - "macaddress": "70886B1*" - } - ], "documentation": "https://www.home-assistant.io/integrations/awair", "iot_class": "local_polling", "loggers": ["python_awair"], diff --git a/homeassistant/components/awair/sensor.py b/homeassistant/components/awair/sensor.py index b9a226e9c2c..c92009d9b1b 100644 --- a/homeassistant/components/awair/sensor.py +++ b/homeassistant/components/awair/sensor.py @@ -46,7 +46,7 @@ from .const import ( ATTRIBUTION, DOMAIN, ) -from .coordinator import AwairDataUpdateCoordinator, AwairResult +from .coordinator import AwairConfigEntry, AwairDataUpdateCoordinator DUST_ALIASES = [API_PM25, API_PM10] @@ -132,15 +132,14 @@ SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AwairConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Awair sensor entity based on a config entry.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data entities = [] - data: list[AwairResult] = coordinator.data.values() - for result in data: + for result in coordinator.data.values(): if result.air_data: entities.append(AwairSensor(result.device, coordinator, SENSOR_TYPE_SCORE)) device_sensors = result.air_data.sensors.keys() @@ -293,6 +292,7 @@ class AwairSensor(CoordinatorEntity[AwairDataUpdateCoordinator], SensorEntity): identifiers={(DOMAIN, self._device.uuid)}, manufacturer="Awair", model=self._device.model, + model_id=self._device.device_type, name=( self._device.name or cast(ConfigEntry, self.coordinator.config_entry).title diff --git a/homeassistant/components/awair/strings.json b/homeassistant/components/awair/strings.json index 731cd5db8dd..a7c5c647af8 100644 --- a/homeassistant/components/awair/strings.json +++ b/homeassistant/components/awair/strings.json @@ -9,7 +9,7 @@ } }, "local": { - "description": "Follow [these instructions]({url}) on how to enable the Awair Local API.\n\nClick submit when done." + "description": "Follow [these instructions]({url}) on how to enable the Awair Local API.\n\nSelect **Submit** when done." }, "local_pick": { "data": { @@ -45,6 +45,7 @@ "already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]", "unreachable": "[%key:common::config_flow::error::cannot_connect%]" }, "flow_title": "{model} ({device_id})" diff --git a/homeassistant/components/aws/config_flow.py b/homeassistant/components/aws/config_flow.py index 8c80b0d487d..090d9747a64 100644 --- a/homeassistant/components/aws/config_flow.py +++ b/homeassistant/components/aws/config_flow.py @@ -1,6 +1,5 @@ """Config flow for AWS component.""" -from collections.abc import Mapping from typing import Any from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -13,11 +12,6 @@ class AWSFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, user_input: Mapping[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - - return self.async_create_entry(title="configuration.yaml", data=user_input) + return self.async_create_entry(title="configuration.yaml", data=import_data) diff --git a/homeassistant/components/aws/manifest.json b/homeassistant/components/aws/manifest.json index 6238bffce36..12149e4388a 100644 --- a/homeassistant/components/aws/manifest.json +++ b/homeassistant/components/aws/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aws", "iot_class": "cloud_push", "loggers": ["aiobotocore", "botocore"], + "quality_scale": "legacy", "requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"] } diff --git a/homeassistant/components/axis/__init__.py b/homeassistant/components/axis/__init__.py index f1d8d1d4b63..e6c6fab47a1 100644 --- a/homeassistant/components/axis/__init__.py +++ b/homeassistant/components/axis/__init__.py @@ -52,6 +52,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> # Home Assistant 2023.2 hass.config_entries.async_update_entry(config_entry, version=3) - _LOGGER.info("Migration to version %s successful", config_entry.version) + _LOGGER.debug("Migration to version %s successful", config_entry.version) return True diff --git a/homeassistant/components/axis/config_flow.py b/homeassistant/components/axis/config_flow.py index 63cac941423..592b1e2d41f 100644 --- a/homeassistant/components/axis/config_flow.py +++ b/homeassistant/components/axis/config_flow.py @@ -13,10 +13,12 @@ import voluptuous as vol from homeassistant.components import dhcp, ssdp, zeroconf from homeassistant.config_entries import ( SOURCE_IGNORE, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_HOST, @@ -57,9 +59,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> AxisOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> AxisOptionsFlowHandler: """Get the options flow for this handler.""" - return AxisOptionsFlowHandler(config_entry) + return AxisOptionsFlowHandler() def __init__(self) -> None: """Initialize the Axis config flow.""" @@ -87,27 +91,30 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): else: serial = api.vapix.serial_number - await self.async_set_unique_id(format_mac(serial)) - - self._abort_if_unique_id_configured( - updates={ - CONF_PROTOCOL: user_input[CONF_PROTOCOL], - CONF_HOST: user_input[CONF_HOST], - CONF_PORT: user_input[CONF_PORT], - CONF_USERNAME: user_input[CONF_USERNAME], - CONF_PASSWORD: user_input[CONF_PASSWORD], - } - ) - - self.config = { + config = { CONF_PROTOCOL: user_input[CONF_PROTOCOL], CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], - CONF_MODEL: api.vapix.product_number, } + await self.async_set_unique_id(format_mac(serial)) + + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=config + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data_updates=config + ) + self._abort_if_unique_id_configured() + + self.config = config | {CONF_MODEL: api.vapix.product_number} + return await self._create_entry(serial) data = self.discovery_schema or { @@ -149,12 +156,12 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): return self.async_create_entry(title=title, data=self.config) async def async_step_reconfigure( - self, user_input: Mapping[str, Any] | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Trigger a reconfiguration flow.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - return await self._redo_configuration(entry.data, keep_password=True) + return await self._redo_configuration( + self._get_reconfigure_entry().data, keep_password=True + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -259,7 +266,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): return await self.async_step_user() -class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AxisOptionsFlowHandler(OptionsFlow): """Handle Axis device options.""" config_entry: AxisConfigEntry @@ -277,8 +284,7 @@ class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Manage the Axis device stream options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) schema = {} diff --git a/homeassistant/components/axis/hub/__init__.py b/homeassistant/components/axis/hub/__init__.py index e68f902b628..8fd80989ca2 100644 --- a/homeassistant/components/axis/hub/__init__.py +++ b/homeassistant/components/axis/hub/__init__.py @@ -1,4 +1,6 @@ """Internal functionality not part of HA infrastructure.""" -from .api import get_axis_api # noqa: F401 -from .hub import AxisHub # noqa: F401 +from .api import get_axis_api +from .hub import AxisHub + +__all__ = ["AxisHub", "get_axis_api"] diff --git a/homeassistant/components/axis/hub/event_source.py b/homeassistant/components/axis/hub/event_source.py index 7f2bfe7c982..d295639d1a6 100644 --- a/homeassistant/components/axis/hub/event_source.py +++ b/homeassistant/components/axis/hub/event_source.py @@ -9,8 +9,7 @@ from axis.models.mqtt import ClientState from axis.stream_manager import Signal, State from homeassistant.components import mqtt -from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN -from homeassistant.components.mqtt.models import ReceiveMessage +from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN, ReceiveMessage from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_send diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index e028736f4ca..9758af60178 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -29,8 +29,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["axis"], - "quality_scale": "platinum", - "requirements": ["axis==62"], + "requirements": ["axis==64"], "ssdp": [ { "manufacturer": "AXIS" diff --git a/homeassistant/components/axis/strings.json b/homeassistant/components/axis/strings.json index 8c302dba201..da1963deacd 100644 --- a/homeassistant/components/axis/strings.json +++ b/homeassistant/components/axis/strings.json @@ -8,7 +8,8 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "port": "[%key:common::config_flow::data::port%]" + "port": "[%key:common::config_flow::data::port%]", + "protocol": "Protocol" }, "data_description": { "host": "The hostname or IP address of the Axis device.", @@ -25,7 +26,10 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "link_local_address": "Link local addresses are not supported", - "not_axis_device": "Discovered device not an Axis device" + "not_axis_device": "Discovered device not an Axis device", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The serial number of the device does not match the previous serial number" } }, "options": { diff --git a/homeassistant/components/azure_data_explorer/__init__.py b/homeassistant/components/azure_data_explorer/__init__.py index 34f2c438d14..c416fc1cba9 100644 --- a/homeassistant/components/azure_data_explorer/__init__.py +++ b/homeassistant/components/azure_data_explorer/__init__.py @@ -16,19 +16,18 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import MATCH_ALL from homeassistant.core import Event, HomeAssistant, State from homeassistant.exceptions import ConfigEntryError -from homeassistant.helpers.entityfilter import FILTER_SCHEMA +from homeassistant.helpers.entityfilter import FILTER_SCHEMA, EntityFilter from homeassistant.helpers.event import async_call_later from homeassistant.helpers.json import ExtendedJSONEncoder from homeassistant.helpers.typing import ConfigType from homeassistant.util.dt import utcnow +from homeassistant.util.hass_dict import HassKey from .client import AzureDataExplorerClient from .const import ( CONF_APP_REG_SECRET, CONF_FILTER, CONF_SEND_INTERVAL, - DATA_FILTER, - DATA_HUB, DEFAULT_MAX_DELAY, DOMAIN, FILTER_STATES, @@ -46,6 +45,7 @@ CONFIG_SCHEMA = vol.Schema( }, extra=vol.ALLOW_EXTRA, ) +DATA_COMPONENT: HassKey[EntityFilter] = HassKey(DOMAIN) # fixtures for both init and config flow tests @@ -63,10 +63,10 @@ async def async_setup(hass: HomeAssistant, yaml_config: ConfigType) -> bool: Adds an empty filter to hass data. Tries to get a filter from yaml, if present set to hass data. """ - - hass.data.setdefault(DOMAIN, {DATA_FILTER: FILTER_SCHEMA({})}) if DOMAIN in yaml_config: - hass.data[DOMAIN][DATA_FILTER] = yaml_config[DOMAIN].pop(CONF_FILTER) + hass.data[DATA_COMPONENT] = yaml_config[DOMAIN].pop(CONF_FILTER) + else: + hass.data[DATA_COMPONENT] = FILTER_SCHEMA({}) return True @@ -83,15 +83,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except KustoAuthenticationError: return False - hass.data[DOMAIN][DATA_HUB] = adx + entry.async_on_unload(adx.async_stop) await adx.async_start() return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - adx = hass.data[DOMAIN].pop(DATA_HUB) - await adx.async_stop() return True @@ -107,7 +105,7 @@ class AzureDataExplorer: self.hass = hass self._entry = entry - self._entities_filter = hass.data[DOMAIN][DATA_FILTER] + self._entities_filter = hass.data[DATA_COMPONENT] self._client = AzureDataExplorerClient(entry.data) diff --git a/homeassistant/components/azure_data_explorer/const.py b/homeassistant/components/azure_data_explorer/const.py index a88a6b8b94f..d6ab0bb499c 100644 --- a/homeassistant/components/azure_data_explorer/const.py +++ b/homeassistant/components/azure_data_explorer/const.py @@ -16,9 +16,8 @@ CONF_APP_REG_SECRET = "client_secret" CONF_AUTHORITY_ID = "authority_id" CONF_SEND_INTERVAL = "send_interval" CONF_MAX_DELAY = "max_delay" -CONF_FILTER = DATA_FILTER = "filter" +CONF_FILTER = "filter" CONF_USE_QUEUED_CLIENT = "use_queued_ingestion" -DATA_HUB = "hub" STEP_USER = "user" diff --git a/homeassistant/components/azure_devops/config_flow.py b/homeassistant/components/azure_devops/config_flow.py index ffb0abf609a..13666343e1d 100644 --- a/homeassistant/components/azure_devops/config_flow.py +++ b/homeassistant/components/azure_devops/config_flow.py @@ -42,17 +42,6 @@ class AzureDevOpsFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors or {}, ) - async def _show_reauth_form(self, errors: dict[str, str]) -> ConfigFlowResult: - """Show the reauth form to the user.""" - return self.async_show_form( - step_id="reauth", - description_placeholders={ - "project_url": f"{self._organization}/{self._project}" - }, - data_schema=vol.Schema({vol.Required(CONF_PAT): str}), - errors=errors or {}, - ) - async def _check_setup(self) -> dict[str, str] | None: """Check the setup of the flow.""" errors: dict[str, str] = {} @@ -106,24 +95,33 @@ class AzureDevOpsFlowHandler(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = { "project_url": f"{self._organization}/{self._project}", } + return await self.async_step_reauth_confirm() - await self.async_set_unique_id(f"{self._organization}_{self._project}") - - errors = await self._check_setup() - if errors is not None: - return await self._show_reauth_form(errors) - - entry = await self.async_set_unique_id(self.unique_id) - assert entry - self.hass.config_entries.async_update_entry( - entry, - data={ - CONF_ORG: self._organization, - CONF_PROJECT: self._project, - CONF_PAT: self._pat, + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + errors: dict[str, str] | None = None + if user_input is not None: + errors = await self._check_setup() + if errors is None: + self.hass.config_entries.async_update_entry( + self._get_reauth_entry(), + data={ + CONF_ORG: self._organization, + CONF_PROJECT: self._project, + CONF_PAT: self._pat, + }, + ) + return self.async_abort(reason="reauth_successful") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={ + "project_url": f"{self._organization}/{self._project}" }, + data_schema=vol.Schema({vol.Required(CONF_PAT): str}), + errors=errors or {}, ) - return self.async_abort(reason="reauth_successful") def _async_create_entry(self) -> ConfigFlowResult: """Handle create entry.""" diff --git a/homeassistant/components/azure_devops/coordinator.py b/homeassistant/components/azure_devops/coordinator.py index 22dbe32c103..21fb76560c3 100644 --- a/homeassistant/components/azure_devops/coordinator.py +++ b/homeassistant/components/azure_devops/coordinator.py @@ -6,8 +6,14 @@ import logging from typing import Final from aioazuredevops.client import DevOpsClient -from aioazuredevops.models.builds import Build +from aioazuredevops.helper import ( + WorkItemTypeAndState, + work_item_types_states_filter, + work_items_by_type_and_state, +) +from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project +from aioazuredevops.models.work_item_type import Category import aiohttp from homeassistant.config_entries import ConfigEntry @@ -20,6 +26,7 @@ from .const import CONF_ORG, DOMAIN from .data import AzureDevOpsData BUILDS_QUERY: Final = "?queryOrder=queueTimeDescending&maxBuildsPerDefinition=1" +IGNORED_CATEGORIES: Final[list[Category]] = [Category.COMPLETED, Category.REMOVED] def ado_exception_none_handler(func: Callable) -> Callable: @@ -105,13 +112,60 @@ class AzureDevOpsDataUpdateCoordinator(DataUpdateCoordinator[AzureDevOpsData]): BUILDS_QUERY, ) + @ado_exception_none_handler + async def _get_work_items( + self, project_name: str + ) -> list[WorkItemTypeAndState] | None: + """Get the work items.""" + + if ( + work_item_types := await self.client.get_work_item_types( + self.organization, + project_name, + ) + ) is None: + # If no work item types are returned, return an empty list + return [] + + if ( + work_item_ids := await self.client.get_work_item_ids( + self.organization, + project_name, + # Filter out completed and removed work items so we only get active work items + states=work_item_types_states_filter( + work_item_types, + ignored_categories=IGNORED_CATEGORIES, + ), + ) + ) is None: + # If no work item ids are returned, return an empty list + return [] + + if ( + work_items := await self.client.get_work_items( + self.organization, + project_name, + work_item_ids, + ) + ) is None: + # If no work items are returned, return an empty list + return [] + + return work_items_by_type_and_state( + work_item_types, + work_items, + ignored_categories=IGNORED_CATEGORIES, + ) + async def _async_update_data(self) -> AzureDevOpsData: """Fetch data from Azure DevOps.""" # Get the builds from the project builds = await self._get_builds(self.project.name) + work_items = await self._get_work_items(self.project.name) return AzureDevOpsData( organization=self.organization, project=self.project, builds=builds, + work_items=work_items, ) diff --git a/homeassistant/components/azure_devops/data.py b/homeassistant/components/azure_devops/data.py index 6d9e2069b67..ff34bc90c24 100644 --- a/homeassistant/components/azure_devops/data.py +++ b/homeassistant/components/azure_devops/data.py @@ -2,7 +2,8 @@ from dataclasses import dataclass -from aioazuredevops.models.builds import Build +from aioazuredevops.helper import WorkItemTypeAndState +from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project @@ -13,3 +14,4 @@ class AzureDevOpsData: organization: str project: Project builds: list[Build] + work_items: list[WorkItemTypeAndState] diff --git a/homeassistant/components/azure_devops/icons.json b/homeassistant/components/azure_devops/icons.json index de720b46106..ea6b4c632ea 100644 --- a/homeassistant/components/azure_devops/icons.json +++ b/homeassistant/components/azure_devops/icons.json @@ -3,6 +3,9 @@ "sensor": { "latest_build": { "default": "mdi:pipe" + }, + "work_item_count": { + "default": "mdi:ticket" } } } diff --git a/homeassistant/components/azure_devops/manifest.json b/homeassistant/components/azure_devops/manifest.json index 48ceee5f9d8..5086e44ab0f 100644 --- a/homeassistant/components/azure_devops/manifest.json +++ b/homeassistant/components/azure_devops/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/azure_devops", "iot_class": "cloud_polling", "loggers": ["aioazuredevops"], - "requirements": ["aioazuredevops==2.1.1"] + "requirements": ["aioazuredevops==2.2.1"] } diff --git a/homeassistant/components/azure_devops/sensor.py b/homeassistant/components/azure_devops/sensor.py index 029d3d875dc..fd47115214a 100644 --- a/homeassistant/components/azure_devops/sensor.py +++ b/homeassistant/components/azure_devops/sensor.py @@ -8,7 +8,8 @@ from datetime import datetime import logging from typing import Any -from aioazuredevops.models.builds import Build +from aioazuredevops.helper import WorkItemState, WorkItemTypeAndState +from aioazuredevops.models.build import Build from homeassistant.components.sensor import ( SensorDeviceClass, @@ -29,12 +30,19 @@ _LOGGER = logging.getLogger(__name__) @dataclass(frozen=True, kw_only=True) class AzureDevOpsBuildSensorEntityDescription(SensorEntityDescription): - """Class describing Azure DevOps base build sensor entities.""" + """Class describing Azure DevOps build sensor entities.""" attr_fn: Callable[[Build], dict[str, Any] | None] = lambda _: None value_fn: Callable[[Build], datetime | StateType] +@dataclass(frozen=True, kw_only=True) +class AzureDevOpsWorkItemSensorEntityDescription(SensorEntityDescription): + """Class describing Azure DevOps work item sensor entities.""" + + value_fn: Callable[[WorkItemState], datetime | StateType] + + BASE_BUILD_SENSOR_DESCRIPTIONS: tuple[AzureDevOpsBuildSensorEntityDescription, ...] = ( # Attributes are deprecated in 2024.7 and can be removed in 2025.1 AzureDevOpsBuildSensorEntityDescription( @@ -116,6 +124,16 @@ BASE_BUILD_SENSOR_DESCRIPTIONS: tuple[AzureDevOpsBuildSensorEntityDescription, . ), ) +BASE_WORK_ITEM_SENSOR_DESCRIPTIONS: tuple[ + AzureDevOpsWorkItemSensorEntityDescription, ... +] = ( + AzureDevOpsWorkItemSensorEntityDescription( + key="work_item_count", + translation_key="work_item_count", + value_fn=lambda work_item_state: len(work_item_state.work_items), + ), +) + def parse_datetime(value: str | None) -> datetime | None: """Parse datetime string.""" @@ -134,7 +152,7 @@ async def async_setup_entry( coordinator = entry.runtime_data initial_builds: list[Build] = coordinator.data.builds - async_add_entities( + entities: list[SensorEntity] = [ AzureDevOpsBuildSensor( coordinator, description, @@ -143,8 +161,22 @@ async def async_setup_entry( for description in BASE_BUILD_SENSOR_DESCRIPTIONS for key, build in enumerate(initial_builds) if build.project and build.definition + ] + + entities.extend( + AzureDevOpsWorkItemSensor( + coordinator, + description, + key, + state_key, + ) + for description in BASE_WORK_ITEM_SENSOR_DESCRIPTIONS + for key, work_item_type_state in enumerate(coordinator.data.work_items) + for state_key, _ in enumerate(work_item_type_state.state_items) ) + async_add_entities(entities) + class AzureDevOpsBuildSensor(AzureDevOpsEntity, SensorEntity): """Define a Azure DevOps build sensor.""" @@ -162,8 +194,8 @@ class AzureDevOpsBuildSensor(AzureDevOpsEntity, SensorEntity): self.entity_description = description self.item_key = item_key self._attr_unique_id = ( - f"{self.coordinator.data.organization}_" - f"{self.build.project.id}_" + f"{coordinator.data.organization}_" + f"{coordinator.data.project.id}_" f"{self.build.definition.build_id}_" f"{description.key}" ) @@ -185,3 +217,48 @@ class AzureDevOpsBuildSensor(AzureDevOpsEntity, SensorEntity): def extra_state_attributes(self) -> Mapping[str, Any] | None: """Return the state attributes of the entity.""" return self.entity_description.attr_fn(self.build) + + +class AzureDevOpsWorkItemSensor(AzureDevOpsEntity, SensorEntity): + """Define a Azure DevOps work item sensor.""" + + entity_description: AzureDevOpsWorkItemSensorEntityDescription + + def __init__( + self, + coordinator: AzureDevOpsDataUpdateCoordinator, + description: AzureDevOpsWorkItemSensorEntityDescription, + wits_key: int, + state_key: int, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + self.entity_description = description + self.wits_key = wits_key + self.state_key = state_key + self._attr_unique_id = ( + f"{coordinator.data.organization}_" + f"{coordinator.data.project.id}_" + f"{self.work_item_type.name}_" + f"{self.work_item_state.name}_" + f"{description.key}" + ) + self._attr_translation_placeholders = { + "item_type": self.work_item_type.name, + "item_state": self.work_item_state.name, + } + + @property + def work_item_type(self) -> WorkItemTypeAndState: + """Return the work item.""" + return self.coordinator.data.work_items[self.wits_key] + + @property + def work_item_state(self) -> WorkItemState: + """Return the work item state.""" + return self.work_item_type.state_items[self.state_key] + + @property + def native_value(self) -> datetime | StateType: + """Return the state.""" + return self.entity_description.value_fn(self.work_item_state) diff --git a/homeassistant/components/azure_devops/strings.json b/homeassistant/components/azure_devops/strings.json index 8a17169fb6b..f5fe5cd06a7 100644 --- a/homeassistant/components/azure_devops/strings.json +++ b/homeassistant/components/azure_devops/strings.json @@ -16,7 +16,7 @@ "description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.", "title": "Add Azure DevOps Project" }, - "reauth": { + "reauth_confirm": { "data": { "personal_access_token": "[%key:component::azure_devops::config::step::user::data::personal_access_token%]" }, @@ -60,6 +60,9 @@ }, "url": { "name": "{definition_name} latest build url" + }, + "work_item_count": { + "name": "{item_type} {item_state} work items" } } }, diff --git a/homeassistant/components/azure_event_hub/__init__.py b/homeassistant/components/azure_event_hub/__init__.py index 668444f9990..bc9d34e728e 100644 --- a/homeassistant/components/azure_event_hub/__init__.py +++ b/homeassistant/components/azure_event_hub/__init__.py @@ -7,6 +7,7 @@ from collections.abc import Callable from datetime import datetime import json import logging +from types import MappingProxyType from typing import Any from azure.eventhub import EventData, EventDataBatch @@ -19,11 +20,12 @@ from homeassistant.const import MATCH_ALL from homeassistant.core import Event, HomeAssistant, State from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entityfilter import FILTER_SCHEMA +from homeassistant.helpers.entityfilter import FILTER_SCHEMA, EntityFilter from homeassistant.helpers.event import async_call_later from homeassistant.helpers.json import JSONEncoder from homeassistant.helpers.typing import ConfigType from homeassistant.util.dt import utcnow +from homeassistant.util.hass_dict import HassKey from .client import AzureEventHubClient from .const import ( @@ -35,13 +37,13 @@ from .const import ( CONF_FILTER, CONF_MAX_DELAY, CONF_SEND_INTERVAL, - DATA_FILTER, - DATA_HUB, DEFAULT_MAX_DELAY, DOMAIN, FILTER_STATES, ) +type AzureEventHubConfigEntry = ConfigEntry[AzureEventHub] + _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( @@ -61,6 +63,7 @@ CONFIG_SCHEMA = vol.Schema( }, extra=vol.ALLOW_EXTRA, ) +DATA_COMPONENT: HassKey[EntityFilter] = HassKey(DOMAIN) async def async_setup(hass: HomeAssistant, yaml_config: ConfigType) -> bool: @@ -71,10 +74,10 @@ async def async_setup(hass: HomeAssistant, yaml_config: ConfigType) -> bool: If config is empty after getting the filter, return, otherwise emit deprecated warning and pass the rest to the config flow. """ - hass.data.setdefault(DOMAIN, {DATA_FILTER: FILTER_SCHEMA({})}) if DOMAIN not in yaml_config: + hass.data[DATA_COMPONENT] = FILTER_SCHEMA({}) return True - hass.data[DOMAIN][DATA_FILTER] = yaml_config[DOMAIN].pop(CONF_FILTER) + hass.data[DATA_COMPONENT] = yaml_config[DOMAIN].pop(CONF_FILTER) if not yaml_config[DOMAIN]: return True @@ -92,33 +95,37 @@ async def async_setup(hass: HomeAssistant, yaml_config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: AzureEventHubConfigEntry +) -> bool: """Do the setup based on the config entry and the filter from yaml.""" - hass.data.setdefault(DOMAIN, {DATA_FILTER: FILTER_SCHEMA({})}) hub = AzureEventHub( hass, entry, - hass.data[DOMAIN][DATA_FILTER], + hass.data[DATA_COMPONENT], ) try: await hub.async_test_connection() except EventHubError as err: raise ConfigEntryNotReady("Could not connect to Azure Event Hub") from err - hass.data[DOMAIN][DATA_HUB] = hub + entry.runtime_data = hub + entry.async_on_unload(hub.async_stop) entry.async_on_unload(entry.add_update_listener(async_update_listener)) await hub.async_start() return True -async def async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_update_listener( + hass: HomeAssistant, entry: AzureEventHubConfigEntry +) -> None: """Update listener for options.""" - hass.data[DOMAIN][DATA_HUB].update_options(entry.options) + entry.runtime_data.update_options(entry.options) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: AzureEventHubConfigEntry +) -> bool: """Unload a config entry.""" - hub = hass.data[DOMAIN].pop(DATA_HUB) - await hub.async_stop() return True @@ -129,7 +136,7 @@ class AzureEventHub: self, hass: HomeAssistant, entry: ConfigEntry, - entities_filter: vol.Schema, + entities_filter: EntityFilter, ) -> None: """Initialize the listener.""" self.hass = hass @@ -172,7 +179,7 @@ class AzureEventHub: await self.async_send(None) await self._queue.join() - def update_options(self, new_options: dict[str, Any]) -> None: + def update_options(self, new_options: MappingProxyType[str, Any]) -> None: """Update options.""" self._send_interval = new_options[CONF_SEND_INTERVAL] diff --git a/homeassistant/components/azure_event_hub/config_flow.py b/homeassistant/components/azure_event_hub/config_flow.py index 264daa683bc..baed866042e 100644 --- a/homeassistant/components/azure_event_hub/config_flow.py +++ b/homeassistant/components/azure_event_hub/config_flow.py @@ -102,8 +102,6 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial user step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") if user_input is None: return self.async_show_form(step_id=STEP_USER, data_schema=BASE_SCHEMA) @@ -124,7 +122,9 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): step_id=STEP_CONN_STRING, data_schema=CONN_STRING_SCHEMA, errors=errors, - description_placeholders=self._data[CONF_EVENT_HUB_INSTANCE_NAME], + description_placeholders={ + "event_hub_instance_name": self._data[CONF_EVENT_HUB_INSTANCE_NAME] + }, last_step=True, ) @@ -144,7 +144,9 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): step_id=STEP_SAS, data_schema=SAS_SCHEMA, errors=errors, - description_placeholders=self._data[CONF_EVENT_HUB_INSTANCE_NAME], + description_placeholders={ + "event_hub_instance_name": self._data[CONF_EVENT_HUB_INSTANCE_NAME] + }, last_step=True, ) @@ -154,17 +156,13 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): options=self._options, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from configuration.yaml.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if CONF_SEND_INTERVAL in import_config: - self._options[CONF_SEND_INTERVAL] = import_config.pop(CONF_SEND_INTERVAL) - if CONF_MAX_DELAY in import_config: - self._options[CONF_MAX_DELAY] = import_config.pop(CONF_MAX_DELAY) - self._data = import_config + if CONF_SEND_INTERVAL in import_data: + self._options[CONF_SEND_INTERVAL] = import_data.pop(CONF_SEND_INTERVAL) + if CONF_MAX_DELAY in import_data: + self._options[CONF_MAX_DELAY] = import_data.pop(CONF_MAX_DELAY) + self._data = import_data errors = await validate_data(self._data) if errors: return self.async_abort(reason=errors["base"]) diff --git a/homeassistant/components/azure_event_hub/const.py b/homeassistant/components/azure_event_hub/const.py index 174fdddc6a1..59a287ac6ca 100644 --- a/homeassistant/components/azure_event_hub/const.py +++ b/homeassistant/components/azure_event_hub/const.py @@ -16,8 +16,7 @@ CONF_EVENT_HUB_SAS_KEY = "event_hub_sas_key" CONF_EVENT_HUB_CON_STRING = "event_hub_connection_string" CONF_SEND_INTERVAL = "send_interval" CONF_MAX_DELAY = "max_delay" -CONF_FILTER = DATA_FILTER = "filter" -DATA_HUB = "hub" +CONF_FILTER = "filter" STEP_USER = "user" STEP_SAS = "sas" diff --git a/homeassistant/components/azure_event_hub/manifest.json b/homeassistant/components/azure_event_hub/manifest.json index c6d5835fd1d..45fbf8c4a56 100644 --- a/homeassistant/components/azure_event_hub/manifest.json +++ b/homeassistant/components/azure_event_hub/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/azure_event_hub", "iot_class": "cloud_push", "loggers": ["azure"], - "requirements": ["azure-eventhub==5.11.1"] + "requirements": ["azure-eventhub==5.11.1"], + "single_config_entry": true } diff --git a/homeassistant/components/azure_event_hub/strings.json b/homeassistant/components/azure_event_hub/strings.json index 3f05e4b8e35..d17c4a385c0 100644 --- a/homeassistant/components/azure_event_hub/strings.json +++ b/homeassistant/components/azure_event_hub/strings.json @@ -31,14 +31,13 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "cannot_connect": "Connecting with the credentials from the configuration.yaml failed, please remove from yaml and use the config flow.", "unknown": "Connecting with the credentials from the configuration.yaml failed with an unknown error, please remove from yaml and use the config flow." } }, "options": { "step": { - "options": { + "init": { "title": "Options for the Azure Event Hub.", "data": { "send_interval": "Interval between sending batches to the hub." diff --git a/homeassistant/components/azure_service_bus/manifest.json b/homeassistant/components/azure_service_bus/manifest.json index 059f6300aec..31c1edac686 100644 --- a/homeassistant/components/azure_service_bus/manifest.json +++ b/homeassistant/components/azure_service_bus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/azure_service_bus", "iot_class": "cloud_push", "loggers": ["azure"], + "quality_scale": "legacy", "requirements": ["azure-servicebus==7.10.0"] } diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index 2f9019300db..f1a6f3be196 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -1,40 +1,85 @@ """The Backup integration.""" -from homeassistant.components.hassio import is_hassio from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, LOGGER +from .agent import ( + BackupAgent, + BackupAgentError, + BackupAgentPlatformProtocol, + LocalBackupAgent, +) +from .const import DATA_MANAGER, DOMAIN from .http import async_register_http_views -from .manager import BackupManager +from .manager import ( + BackupManager, + BackupPlatformProtocol, + BackupReaderWriter, + CoreBackupReaderWriter, + CreateBackupEvent, + ManagerBackup, + NewBackup, + WrittenBackup, +) +from .models import AddonInfo, AgentBackup, Folder from .websocket import async_register_websocket_handlers +__all__ = [ + "AddonInfo", + "AgentBackup", + "ManagerBackup", + "BackupAgent", + "BackupAgentError", + "BackupAgentPlatformProtocol", + "BackupPlatformProtocol", + "BackupReaderWriter", + "CreateBackupEvent", + "Folder", + "LocalBackupAgent", + "NewBackup", + "WrittenBackup", +] + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Backup integration.""" - backup_manager = BackupManager(hass) - hass.data[DOMAIN] = backup_manager - with_hassio = is_hassio(hass) + reader_writer: BackupReaderWriter + if not with_hassio: + reader_writer = CoreBackupReaderWriter(hass) + else: + # pylint: disable-next=import-outside-toplevel, hass-component-root-import + from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter + + reader_writer = SupervisorBackupReaderWriter(hass) + + backup_manager = BackupManager(hass, reader_writer) + hass.data[DATA_MANAGER] = backup_manager + await backup_manager.async_setup() + async_register_websocket_handlers(hass, with_hassio) - if with_hassio: - if DOMAIN in config: - LOGGER.error( - "The backup integration is not supported on this installation method, " - "please remove it from your configuration" - ) - return True - async def async_handle_create_service(call: ServiceCall) -> None: """Service handler for creating backups.""" - await backup_manager.generate_backup() + agent_id = list(backup_manager.local_backup_agents)[0] + await backup_manager.async_create_backup( + agent_ids=[agent_id], + include_addons=None, + include_all_addons=False, + include_database=True, + include_folders=None, + include_homeassistant=True, + name=None, + password=None, + ) - hass.services.async_register(DOMAIN, "create", async_handle_create_service) + if not with_hassio: + hass.services.async_register(DOMAIN, "create", async_handle_create_service) async_register_http_views(hass) diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py new file mode 100644 index 00000000000..44bc9b298e8 --- /dev/null +++ b/homeassistant/components/backup/agent.py @@ -0,0 +1,121 @@ +"""Backup agents for the Backup integration.""" + +from __future__ import annotations + +import abc +from collections.abc import AsyncIterator, Callable, Coroutine +from pathlib import Path +from typing import Any, Protocol + +from propcache import cached_property + +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError + +from .models import AgentBackup + + +class BackupAgentError(HomeAssistantError): + """Base class for backup agent errors.""" + + +class BackupAgentUnreachableError(BackupAgentError): + """Raised when the agent can't reach its API.""" + + _message = "The backup agent is unreachable." + + +class BackupAgent(abc.ABC): + """Backup agent interface.""" + + domain: str + name: str + + @cached_property + def agent_id(self) -> str: + """Return the agent_id.""" + return f"{self.domain}.{self.name}" + + @abc.abstractmethod + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + + @abc.abstractmethod + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + + @abc.abstractmethod + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + + @abc.abstractmethod + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + + @abc.abstractmethod + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + + +class LocalBackupAgent(BackupAgent): + """Local backup agent.""" + + @abc.abstractmethod + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup. + + The method should return the path to the backup file with the specified id. + """ + + +class BackupAgentPlatformProtocol(Protocol): + """Define the format of backup platforms which implement backup agents.""" + + async def async_get_backup_agents( + self, + hass: HomeAssistant, + **kwargs: Any, + ) -> list[BackupAgent]: + """Return a list of backup agents.""" + + @callback + def async_register_backup_agents_listener( + self, + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, + ) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed. + + :return: A function to unregister the listener. + """ diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py new file mode 100644 index 00000000000..ef4924161c2 --- /dev/null +++ b/homeassistant/components/backup/backup.py @@ -0,0 +1,125 @@ +"""Local backup support for Core and Container installations.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable, Coroutine +import json +from pathlib import Path +from tarfile import TarError +from typing import Any + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.hassio import is_hassio + +from .agent import BackupAgent, LocalBackupAgent +from .const import DOMAIN, LOGGER +from .models import AgentBackup +from .util import read_backup + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the local backup agent.""" + if is_hassio(hass): + return [] + return [CoreLocalBackupAgent(hass)] + + +class CoreLocalBackupAgent(LocalBackupAgent): + """Local backup agent for Core and Container installations.""" + + domain = DOMAIN + name = "local" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup agent.""" + super().__init__() + self._hass = hass + self._backup_dir = Path(hass.config.path("backups")) + self._backups: dict[str, AgentBackup] = {} + self._loaded_backups = False + + async def _load_backups(self) -> None: + """Load data of stored backup files.""" + backups = await self._hass.async_add_executor_job(self._read_backups) + LOGGER.debug("Loaded %s local backups", len(backups)) + self._backups = backups + self._loaded_backups = True + + def _read_backups(self) -> dict[str, AgentBackup]: + """Read backups from disk.""" + backups: dict[str, AgentBackup] = {} + for backup_path in self._backup_dir.glob("*.tar"): + try: + backup = read_backup(backup_path) + backups[backup.backup_id] = backup + except (OSError, TarError, json.JSONDecodeError, KeyError) as err: + LOGGER.warning("Unable to read backup %s: %s", backup_path, err) + return backups + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + raise NotImplementedError + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + self._backups[backup.backup_id] = backup + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + if not self._loaded_backups: + await self._load_backups() + return list(self._backups.values()) + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + if not self._loaded_backups: + await self._load_backups() + + if not (backup := self._backups.get(backup_id)): + return None + + backup_path = self.get_backup_path(backup_id) + if not await self._hass.async_add_executor_job(backup_path.exists): + LOGGER.debug( + ( + "Removing tracked backup (%s) that does not exists on the expected" + " path %s" + ), + backup.backup_id, + backup_path, + ) + self._backups.pop(backup_id) + return None + + return backup + + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup.""" + return self._backup_dir / f"{backup_id}.tar" + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Delete a backup file.""" + if await self.async_get_backup(backup_id) is None: + return + + backup_path = self.get_backup_path(backup_id) + await self._hass.async_add_executor_job(backup_path.unlink, True) + LOGGER.debug("Deleted backup located at %s", backup_path) + self._backups.pop(backup_id) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py new file mode 100644 index 00000000000..26ce691a4cc --- /dev/null +++ b/homeassistant/components/backup/config.py @@ -0,0 +1,471 @@ +"""Provide persistent configuration for the backup integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from dataclasses import dataclass, field, replace +from datetime import datetime, timedelta +from enum import StrEnum +from typing import TYPE_CHECKING, Self, TypedDict + +from cronsim import CronSim + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.event import async_call_later, async_track_point_in_time +from homeassistant.helpers.typing import UNDEFINED, UndefinedType +from homeassistant.util import dt as dt_util + +from .const import LOGGER +from .models import Folder + +if TYPE_CHECKING: + from .manager import BackupManager, ManagerBackup + +# The time of the automatic backup event should be compatible with +# the time of the recorder's nightly job which runs at 04:12. +# Run the backup at 04:45. +CRON_PATTERN_DAILY = "45 4 * * *" +CRON_PATTERN_WEEKLY = "45 4 * * {}" + + +class StoredBackupConfig(TypedDict): + """Represent the stored backup config.""" + + create_backup: StoredCreateBackupConfig + last_attempted_strategy_backup: str | None + last_completed_strategy_backup: str | None + retention: StoredRetentionConfig + schedule: StoredBackupSchedule + + +@dataclass(kw_only=True) +class BackupConfigData: + """Represent loaded backup config data.""" + + create_backup: CreateBackupConfig + last_attempted_strategy_backup: datetime | None = None + last_completed_strategy_backup: datetime | None = None + retention: RetentionConfig + schedule: BackupSchedule + + @classmethod + def from_dict(cls, data: StoredBackupConfig) -> Self: + """Initialize backup config data from a dict.""" + include_folders_data = data["create_backup"]["include_folders"] + if include_folders_data: + include_folders = [Folder(folder) for folder in include_folders_data] + else: + include_folders = None + retention = data["retention"] + + if last_attempted_str := data["last_attempted_strategy_backup"]: + last_attempted = dt_util.parse_datetime(last_attempted_str) + else: + last_attempted = None + + if last_attempted_str := data["last_completed_strategy_backup"]: + last_completed = dt_util.parse_datetime(last_attempted_str) + else: + last_completed = None + + return cls( + create_backup=CreateBackupConfig( + agent_ids=data["create_backup"]["agent_ids"], + include_addons=data["create_backup"]["include_addons"], + include_all_addons=data["create_backup"]["include_all_addons"], + include_database=data["create_backup"]["include_database"], + include_folders=include_folders, + name=data["create_backup"]["name"], + password=data["create_backup"]["password"], + ), + last_attempted_strategy_backup=last_attempted, + last_completed_strategy_backup=last_completed, + retention=RetentionConfig( + copies=retention["copies"], + days=retention["days"], + ), + schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])), + ) + + def to_dict(self) -> StoredBackupConfig: + """Convert backup config data to a dict.""" + if self.last_attempted_strategy_backup: + last_attempted = self.last_attempted_strategy_backup.isoformat() + else: + last_attempted = None + + if self.last_completed_strategy_backup: + last_completed = self.last_completed_strategy_backup.isoformat() + else: + last_completed = None + + return StoredBackupConfig( + create_backup=self.create_backup.to_dict(), + last_attempted_strategy_backup=last_attempted, + last_completed_strategy_backup=last_completed, + retention=self.retention.to_dict(), + schedule=self.schedule.to_dict(), + ) + + +class BackupConfig: + """Handle backup config.""" + + def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None: + """Initialize backup config.""" + self.data = BackupConfigData( + create_backup=CreateBackupConfig(), + retention=RetentionConfig(), + schedule=BackupSchedule(), + ) + self._manager = manager + + def load(self, stored_config: StoredBackupConfig) -> None: + """Load config.""" + self.data = BackupConfigData.from_dict(stored_config) + self.data.schedule.apply(self._manager) + + async def update( + self, + *, + create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED, + retention: RetentionParametersDict | UndefinedType = UNDEFINED, + schedule: ScheduleState | UndefinedType = UNDEFINED, + ) -> None: + """Update config.""" + if create_backup is not UNDEFINED: + self.data.create_backup = replace(self.data.create_backup, **create_backup) + if retention is not UNDEFINED: + new_retention = RetentionConfig(**retention) + if new_retention != self.data.retention: + self.data.retention = new_retention + self.data.retention.apply(self._manager) + if schedule is not UNDEFINED: + new_schedule = BackupSchedule(state=schedule) + if new_schedule.to_dict() != self.data.schedule.to_dict(): + self.data.schedule = new_schedule + self.data.schedule.apply(self._manager) + + self._manager.store.save() + + +@dataclass(kw_only=True) +class RetentionConfig: + """Represent the backup retention configuration.""" + + copies: int | None = None + days: int | None = None + + def apply(self, manager: BackupManager) -> None: + """Apply backup retention configuration.""" + if self.days is not None: + self._schedule_next(manager) + else: + self._unschedule_next(manager) + + def to_dict(self) -> StoredRetentionConfig: + """Convert backup retention configuration to a dict.""" + return StoredRetentionConfig( + copies=self.copies, + days=self.days, + ) + + @callback + def _schedule_next( + self, + manager: BackupManager, + ) -> None: + """Schedule the next delete after days.""" + self._unschedule_next(manager) + + async def _delete_backups(now: datetime) -> None: + """Delete backups older than days.""" + self._schedule_next(manager) + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return backups older than days to delete.""" + # we need to check here since we await before + # this filter is applied + if self.days is None: + return {} + now = dt_util.utcnow() + return { + backup_id: backup + for backup_id, backup in backups.items() + if dt_util.parse_datetime(backup.date, raise_on_error=True) + + timedelta(days=self.days) + < now + } + + await _delete_filtered_backups(manager, _backups_filter) + + manager.remove_next_delete_event = async_call_later( + manager.hass, timedelta(days=1), _delete_backups + ) + + @callback + def _unschedule_next(self, manager: BackupManager) -> None: + """Unschedule the next delete after days.""" + if (remove_next_event := manager.remove_next_delete_event) is not None: + remove_next_event() + manager.remove_next_delete_event = None + + +class StoredRetentionConfig(TypedDict): + """Represent the stored backup retention configuration.""" + + copies: int | None + days: int | None + + +class RetentionParametersDict(TypedDict, total=False): + """Represent the parameters for retention.""" + + copies: int | None + days: int | None + + +class StoredBackupSchedule(TypedDict): + """Represent the stored backup schedule configuration.""" + + state: ScheduleState + + +class ScheduleState(StrEnum): + """Represent the schedule state.""" + + NEVER = "never" + DAILY = "daily" + MONDAY = "mon" + TUESDAY = "tue" + WEDNESDAY = "wed" + THURSDAY = "thu" + FRIDAY = "fri" + SATURDAY = "sat" + SUNDAY = "sun" + + +@dataclass(kw_only=True) +class BackupSchedule: + """Represent the backup schedule.""" + + state: ScheduleState = ScheduleState.NEVER + cron_event: CronSim | None = field(init=False, default=None) + + @callback + def apply( + self, + manager: BackupManager, + ) -> None: + """Apply a new schedule. + + There are only three possible state types: never, daily, or weekly. + """ + if self.state is ScheduleState.NEVER: + self._unschedule_next(manager) + return + + if self.state is ScheduleState.DAILY: + self._schedule_next(CRON_PATTERN_DAILY, manager) + else: + self._schedule_next( + CRON_PATTERN_WEEKLY.format(self.state.value), + manager, + ) + + @callback + def _schedule_next( + self, + cron_pattern: str, + manager: BackupManager, + ) -> None: + """Schedule the next backup.""" + self._unschedule_next(manager) + now = dt_util.now() + if (cron_event := self.cron_event) is None: + seed_time = manager.config.data.last_completed_strategy_backup or now + cron_event = self.cron_event = CronSim(cron_pattern, seed_time) + next_time = next(cron_event) + + if next_time < now: + # schedule a backup at next daily time once + # if we missed the last scheduled backup + cron_event = CronSim(CRON_PATTERN_DAILY, now) + next_time = next(cron_event) + # reseed the cron event attribute + # add a day to the next time to avoid scheduling at the same time again + self.cron_event = CronSim(cron_pattern, now + timedelta(days=1)) + + async def _create_backup(now: datetime) -> None: + """Create backup.""" + manager.remove_next_backup_event = None + config_data = manager.config.data + self._schedule_next(cron_pattern, manager) + + # create the backup + try: + await manager.async_create_backup( + agent_ids=config_data.create_backup.agent_ids, + include_addons=config_data.create_backup.include_addons, + include_all_addons=config_data.create_backup.include_all_addons, + include_database=config_data.create_backup.include_database, + include_folders=config_data.create_backup.include_folders, + include_homeassistant=True, # always include HA + name=config_data.create_backup.name, + password=config_data.create_backup.password, + with_strategy_settings=True, + ) + except Exception: # noqa: BLE001 + # another more specific exception will be added + # and handled in the future + LOGGER.exception("Unexpected error creating automatic backup") + + # delete old backups more numerous than copies + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return oldest backups more numerous than copies to delete.""" + # we need to check here since we await before + # this filter is applied + if config_data.retention.copies is None: + return {} + return dict( + sorted( + backups.items(), + key=lambda backup_item: backup_item[1].date, + )[: len(backups) - config_data.retention.copies] + ) + + await _delete_filtered_backups(manager, _backups_filter) + + manager.remove_next_backup_event = async_track_point_in_time( + manager.hass, _create_backup, next_time + ) + + def to_dict(self) -> StoredBackupSchedule: + """Convert backup schedule to a dict.""" + return StoredBackupSchedule(state=self.state) + + @callback + def _unschedule_next(self, manager: BackupManager) -> None: + """Unschedule the next backup.""" + if (remove_next_event := manager.remove_next_backup_event) is not None: + remove_next_event() + manager.remove_next_backup_event = None + + +@dataclass(kw_only=True) +class CreateBackupConfig: + """Represent the config for async_create_backup.""" + + agent_ids: list[str] = field(default_factory=list) + include_addons: list[str] | None = None + include_all_addons: bool = False + include_database: bool = True + include_folders: list[Folder] | None = None + name: str | None = None + password: str | None = None + + def to_dict(self) -> StoredCreateBackupConfig: + """Convert create backup config to a dict.""" + return { + "agent_ids": self.agent_ids, + "include_addons": self.include_addons, + "include_all_addons": self.include_all_addons, + "include_database": self.include_database, + "include_folders": self.include_folders, + "name": self.name, + "password": self.password, + } + + +class StoredCreateBackupConfig(TypedDict): + """Represent the stored config for async_create_backup.""" + + agent_ids: list[str] + include_addons: list[str] | None + include_all_addons: bool + include_database: bool + include_folders: list[Folder] | None + name: str | None + password: str | None + + +class CreateBackupParametersDict(TypedDict, total=False): + """Represent the parameters for async_create_backup.""" + + agent_ids: list[str] + include_addons: list[str] | None + include_all_addons: bool + include_database: bool + include_folders: list[Folder] | None + name: str | None + password: str | None + + +async def _delete_filtered_backups( + manager: BackupManager, + backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]], +) -> None: + """Delete backups parsed with a filter. + + :param manager: The backup manager. + :param backup_filter: A filter that should return the backups to delete. + """ + backups, get_agent_errors = await manager.async_get_backups() + if get_agent_errors: + LOGGER.debug( + "Error getting backups; continuing anyway: %s", + get_agent_errors, + ) + + # only delete backups that are created by the backup strategy + backups = { + backup_id: backup + for backup_id, backup in backups.items() + if backup.with_strategy_settings + } + + LOGGER.debug("Total strategy backups: %s", backups) + + filtered_backups = backup_filter(backups) + + if not filtered_backups: + return + + # always delete oldest backup first + filtered_backups = dict( + sorted( + filtered_backups.items(), + key=lambda backup_item: backup_item[1].date, + ) + ) + + if len(filtered_backups) >= len(backups): + # Never delete the last backup. + last_backup = filtered_backups.popitem() + LOGGER.debug("Keeping the last backup: %s", last_backup) + + LOGGER.debug("Backups to delete: %s", filtered_backups) + + if not filtered_backups: + return + + backup_ids = list(filtered_backups) + delete_results = await asyncio.gather( + *(manager.async_delete_backup(backup_id) for backup_id in filtered_backups) + ) + agent_errors = { + backup_id: error + for backup_id, error in zip(backup_ids, delete_results, strict=True) + if error + } + if agent_errors: + LOGGER.error( + "Error deleting old copies: %s", + agent_errors, + ) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index 9573d522b56..c2070a37b2d 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -1,16 +1,34 @@ """Constants for the Backup integration.""" -from logging import getLogger +from __future__ import annotations +from logging import getLogger +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from .manager import BackupManager + +BUF_SIZE = 2**20 * 4 # 4MB DOMAIN = "backup" +DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN) LOGGER = getLogger(__package__) EXCLUDE_FROM_BACKUP = [ "__pycache__/*", ".DS_Store", + ".HA_RESTORE", "*.db-shm", "*.log.*", "*.log", "backups/*.tar", + "tmp_backups/*.tar", "OZW_Log.txt", + "tts/*", +] + +EXCLUDE_DATABASE_FROM_BACKUP = [ + "home-assistant_v2.db", + "home-assistant_v2.db-wal", ] diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 793192aa623..73a8c8eb602 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -2,49 +2,105 @@ from __future__ import annotations +import asyncio from http import HTTPStatus +from typing import cast +from aiohttp import BodyPartReader from aiohttp.hdrs import CONTENT_DISPOSITION -from aiohttp.web import FileResponse, Request, Response +from aiohttp.web import FileResponse, Request, Response, StreamResponse -from homeassistant.components.http import KEY_HASS, HomeAssistantView +from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify -from .const import DOMAIN -from .manager import BackupManager +from .const import DATA_MANAGER @callback def async_register_http_views(hass: HomeAssistant) -> None: """Register the http views.""" hass.http.register_view(DownloadBackupView) + hass.http.register_view(UploadBackupView) class DownloadBackupView(HomeAssistantView): """Generate backup view.""" - url = "/api/backup/download/{slug}" + url = "/api/backup/download/{backup_id}" name = "api:backup:download" async def get( self, request: Request, - slug: str, - ) -> FileResponse | Response: + backup_id: str, + ) -> StreamResponse | FileResponse | Response: """Download a backup file.""" if not request["hass_user"].is_admin: return Response(status=HTTPStatus.UNAUTHORIZED) + try: + agent_id = request.query.getone("agent_id") + except KeyError: + return Response(status=HTTPStatus.BAD_REQUEST) - manager: BackupManager = request.app[KEY_HASS].data[DOMAIN] - backup = await manager.get_backup(slug) + manager = request.app[KEY_HASS].data[DATA_MANAGER] + if agent_id not in manager.backup_agents: + return Response(status=HTTPStatus.BAD_REQUEST) + agent = manager.backup_agents[agent_id] + backup = await agent.async_get_backup(backup_id) - if backup is None or not backup.path.exists(): + # We don't need to check if the path exists, aiohttp.FileResponse will handle + # that + if backup is None: return Response(status=HTTPStatus.NOT_FOUND) - return FileResponse( - path=backup.path.as_posix(), - headers={ - CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" - }, - ) + headers = { + CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" + } + if agent_id in manager.local_backup_agents: + local_agent = manager.local_backup_agents[agent_id] + path = local_agent.get_backup_path(backup_id) + return FileResponse(path=path.as_posix(), headers=headers) + + stream = await agent.async_download_backup(backup_id) + response = StreamResponse(status=HTTPStatus.OK, headers=headers) + await response.prepare(request) + async for chunk in stream: + await response.write(chunk) + return response + + +class UploadBackupView(HomeAssistantView): + """Generate backup view.""" + + url = "/api/backup/upload" + name = "api:backup:upload" + + @require_admin + async def post(self, request: Request) -> Response: + """Upload a backup file.""" + try: + agent_ids = request.query.getall("agent_id") + except KeyError: + return Response(status=HTTPStatus.BAD_REQUEST) + manager = request.app[KEY_HASS].data[DATA_MANAGER] + reader = await request.multipart() + contents = cast(BodyPartReader, await reader.next()) + + try: + await manager.async_receive_backup(contents=contents, agent_ids=agent_ids) + except OSError as err: + return Response( + body=f"Can't write backup file: {err}", + status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + except HomeAssistantError as err: + return Response( + body=f"Can't upload backup file: {err}", + status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + except asyncio.CancelledError: + return Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) + + return Response(status=HTTPStatus.CREATED) diff --git a/homeassistant/components/backup/icons.json b/homeassistant/components/backup/icons.json index cba4fb22831..bd5ff4a81ee 100644 --- a/homeassistant/components/backup/icons.json +++ b/homeassistant/components/backup/icons.json @@ -1,5 +1,7 @@ { "services": { - "create": "mdi:cloud-upload" + "create": { + "service": "mdi:cloud-upload" + } } } diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index e3331836202..66977e568e4 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -2,45 +2,183 @@ from __future__ import annotations +import abc import asyncio -from dataclasses import asdict, dataclass +from collections.abc import AsyncIterator, Callable, Coroutine +from dataclasses import dataclass +from enum import StrEnum import hashlib import io import json from pathlib import Path +import shutil import tarfile -from tarfile import TarError import time -from typing import Any, Protocol, cast +from typing import TYPE_CHECKING, Any, Protocol, TypedDict +import aiohttp from securetar import SecureTarFile, atomic_contents_add +from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import integration_platform from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util -from homeassistant.util.json import json_loads_object -from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER - -BUF_SIZE = 2**20 * 4 # 4MB +from .agent import ( + BackupAgent, + BackupAgentError, + BackupAgentPlatformProtocol, + LocalBackupAgent, +) +from .config import BackupConfig +from .const import ( + BUF_SIZE, + DATA_MANAGER, + DOMAIN, + EXCLUDE_DATABASE_FROM_BACKUP, + EXCLUDE_FROM_BACKUP, + LOGGER, +) +from .models import AgentBackup, Folder +from .store import BackupStore +from .util import make_backup_dir, read_backup -@dataclass(slots=True) -class Backup: +@dataclass(frozen=True, kw_only=True, slots=True) +class NewBackup: + """New backup class.""" + + backup_job_id: str + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ManagerBackup(AgentBackup): """Backup class.""" - slug: str - name: str - date: str - path: Path - size: float + agent_ids: list[str] + failed_agent_ids: list[str] + with_strategy_settings: bool - def as_dict(self) -> dict: - """Return a dict representation of this backup.""" - return {**asdict(self), "path": self.path.as_posix()} + +@dataclass(frozen=True, kw_only=True, slots=True) +class WrittenBackup: + """Written backup class.""" + + backup: AgentBackup + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]] + release_stream: Callable[[], Coroutine[Any, Any, None]] + + +class BackupManagerState(StrEnum): + """Backup state type.""" + + IDLE = "idle" + CREATE_BACKUP = "create_backup" + RECEIVE_BACKUP = "receive_backup" + RESTORE_BACKUP = "restore_backup" + + +class CreateBackupStage(StrEnum): + """Create backup stage enum.""" + + ADDON_REPOSITORIES = "addon_repositories" + ADDONS = "addons" + AWAIT_ADDON_RESTARTS = "await_addon_restarts" + DOCKER_CONFIG = "docker_config" + FINISHING_FILE = "finishing_file" + FOLDERS = "folders" + HOME_ASSISTANT = "home_assistant" + UPLOAD_TO_AGENTS = "upload_to_agents" + + +class CreateBackupState(StrEnum): + """Create backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +class ReceiveBackupStage(StrEnum): + """Receive backup stage enum.""" + + RECEIVE_FILE = "receive_file" + UPLOAD_TO_AGENTS = "upload_to_agents" + + +class ReceiveBackupState(StrEnum): + """Receive backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +class RestoreBackupStage(StrEnum): + """Restore backup stage enum.""" + + ADDON_REPOSITORIES = "addon_repositories" + ADDONS = "addons" + AWAIT_ADDON_RESTARTS = "await_addon_restarts" + AWAIT_HOME_ASSISTANT_RESTART = "await_home_assistant_restart" + CHECK_HOME_ASSISTANT = "check_home_assistant" + DOCKER_CONFIG = "docker_config" + DOWNLOAD_FROM_AGENT = "download_from_agent" + FOLDERS = "folders" + HOME_ASSISTANT = "home_assistant" + REMOVE_DELTA_ADDONS = "remove_delta_addons" + + +class RestoreBackupState(StrEnum): + """Receive backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ManagerStateEvent: + """Backup state class.""" + + manager_state: BackupManagerState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class IdleEvent(ManagerStateEvent): + """Backup manager idle.""" + + manager_state: BackupManagerState = BackupManagerState.IDLE + + +@dataclass(frozen=True, kw_only=True, slots=True) +class CreateBackupEvent(ManagerStateEvent): + """Backup in progress.""" + + manager_state: BackupManagerState = BackupManagerState.CREATE_BACKUP + stage: CreateBackupStage | None + state: CreateBackupState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ReceiveBackupEvent(ManagerStateEvent): + """Backup receive.""" + + manager_state: BackupManagerState = BackupManagerState.RECEIVE_BACKUP + stage: ReceiveBackupStage | None + state: ReceiveBackupState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class RestoreBackupEvent(ManagerStateEvent): + """Backup restore.""" + + manager_state: BackupManagerState = BackupManagerState.RESTORE_BACKUP + stage: RestoreBackupStage | None + state: RestoreBackupState class BackupPlatformProtocol(Protocol): @@ -53,42 +191,170 @@ class BackupPlatformProtocol(Protocol): """Perform operations after a backup finishes.""" -class BackupManager: - """Backup manager for the Backup integration.""" +class BackupReaderWriter(abc.ABC): + """Abstract class for reading and writing backups.""" - def __init__(self, hass: HomeAssistant) -> None: + @abc.abstractmethod + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Create a backup.""" + + @abc.abstractmethod + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + + @abc.abstractmethod + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup.""" + + +class BackupManager: + """Define the format that backup managers can have.""" + + def __init__(self, hass: HomeAssistant, reader_writer: BackupReaderWriter) -> None: """Initialize the backup manager.""" self.hass = hass - self.backup_dir = Path(hass.config.path("backups")) - self.backing_up = False - self.backups: dict[str, Backup] = {} self.platforms: dict[str, BackupPlatformProtocol] = {} - self.loaded_backups = False - self.loaded_platforms = False + self.backup_agent_platforms: dict[str, BackupAgentPlatformProtocol] = {} + self.backup_agents: dict[str, BackupAgent] = {} + self.local_backup_agents: dict[str, LocalBackupAgent] = {} + + self.config = BackupConfig(hass, self) + self._reader_writer = reader_writer + self.known_backups = KnownBackups(self) + self.store = BackupStore(hass, self) + + # Tasks and flags tracking backup and restore progress + self._backup_task: asyncio.Task[WrittenBackup] | None = None + self._backup_finish_task: asyncio.Task[None] | None = None + + # Backup schedule and retention listeners + self.remove_next_backup_event: Callable[[], None] | None = None + self.remove_next_delete_event: Callable[[], None] | None = None + + # Latest backup event and backup event subscribers + self.last_event: ManagerStateEvent = IdleEvent() + self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = [] + + async def async_setup(self) -> None: + """Set up the backup manager.""" + stored = await self.store.load() + if stored: + self.config.load(stored["config"]) + self.known_backups.load(stored["backups"]) + + await self.load_platforms() + + @property + def state(self) -> BackupManagerState: + """Return the state of the backup manager.""" + return self.last_event.manager_state @callback - def _add_platform( + def _add_platform_pre_post_handler( self, - hass: HomeAssistant, integration_domain: str, platform: BackupPlatformProtocol, ) -> None: - """Add a platform to the backup manager.""" + """Add a backup platform.""" if not hasattr(platform, "async_pre_backup") or not hasattr( platform, "async_post_backup" ): - LOGGER.warning( - "%s does not implement required functions for the backup platform", - integration_domain, - ) return + self.platforms[integration_domain] = platform - async def pre_backup_actions(self) -> None: - """Perform pre backup actions.""" - if not self.loaded_platforms: - await self.load_platforms() + @callback + def _async_add_backup_agent_platform( + self, + integration_domain: str, + platform: BackupAgentPlatformProtocol, + ) -> None: + """Add backup agent platform to the backup manager.""" + if not hasattr(platform, "async_get_backup_agents"): + return + self.backup_agent_platforms[integration_domain] = platform + + @callback + def listener() -> None: + LOGGER.debug("Loading backup agents for %s", integration_domain) + self.hass.async_create_task( + self._async_reload_backup_agents(integration_domain) + ) + + if hasattr(platform, "async_register_backup_agents_listener"): + platform.async_register_backup_agents_listener(self.hass, listener=listener) + + listener() + + async def _async_reload_backup_agents(self, domain: str) -> None: + """Add backup agent platform to the backup manager.""" + platform = self.backup_agent_platforms[domain] + + # Remove all agents for the domain + for agent_id in list(self.backup_agents): + if self.backup_agents[agent_id].domain == domain: + del self.backup_agents[agent_id] + for agent_id in list(self.local_backup_agents): + if self.local_backup_agents[agent_id].domain == domain: + del self.local_backup_agents[agent_id] + + # Add new agents + agents = await platform.async_get_backup_agents(self.hass) + self.backup_agents.update({agent.agent_id: agent for agent in agents}) + self.local_backup_agents.update( + { + agent.agent_id: agent + for agent in agents + if isinstance(agent, LocalBackupAgent) + } + ) + + async def _add_platform( + self, + hass: HomeAssistant, + integration_domain: str, + platform: Any, + ) -> None: + """Add a backup platform manager.""" + self._add_platform_pre_post_handler(integration_domain, platform) + self._async_add_backup_agent_platform(integration_domain, platform) + LOGGER.debug("Backup platform %s loaded", integration_domain) + LOGGER.debug("%s platforms loaded in total", len(self.platforms)) + LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) + LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents)) + + async def async_pre_backup_actions(self) -> None: + """Perform pre backup actions.""" pre_backup_results = await asyncio.gather( *( platform.async_pre_backup(self.hass) @@ -100,11 +366,8 @@ class BackupManager: if isinstance(result, Exception): raise result - async def post_backup_actions(self) -> None: + async def async_post_backup_actions(self) -> None: """Perform post backup actions.""" - if not self.loaded_platforms: - await self.load_platforms() - post_backup_results = await asyncio.gather( *( platform.async_post_backup(self.hass) @@ -116,130 +379,706 @@ class BackupManager: if isinstance(result, Exception): raise result - async def load_backups(self) -> None: - """Load data of stored backup files.""" - backups = await self.hass.async_add_executor_job(self._read_backups) - LOGGER.debug("Loaded %s backups", len(backups)) - self.backups = backups - self.loaded_backups = True - async def load_platforms(self) -> None: """Load backup platforms.""" await integration_platform.async_process_integration_platforms( - self.hass, DOMAIN, self._add_platform, wait_for_platforms=True + self.hass, + DOMAIN, + self._add_platform, + wait_for_platforms=True, ) LOGGER.debug("Loaded %s platforms", len(self.platforms)) - self.loaded_platforms = True + LOGGER.debug("Loaded %s agents", len(self.backup_agents)) - def _read_backups(self) -> dict[str, Backup]: - """Read backups from disk.""" - backups: dict[str, Backup] = {} - for backup_path in self.backup_dir.glob("*.tar"): - try: - with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file: - if data_file := backup_file.extractfile("./backup.json"): - data = json_loads_object(data_file.read()) - backup = Backup( - slug=cast(str, data["slug"]), - name=cast(str, data["name"]), - date=cast(str, data["date"]), - path=backup_path, - size=round(backup_path.stat().st_size / 1_048_576, 2), - ) - backups[backup.slug] = backup - except (OSError, TarError, json.JSONDecodeError, KeyError) as err: - LOGGER.warning("Unable to read backup %s: %s", backup_path, err) - return backups + async def _async_upload_backup( + self, + *, + backup: AgentBackup, + agent_ids: list[str], + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + ) -> dict[str, Exception]: + """Upload a backup to selected agents.""" + agent_errors: dict[str, Exception] = {} - async def get_backups(self) -> dict[str, Backup]: - """Return backups.""" - if not self.loaded_backups: - await self.load_backups() + LOGGER.debug("Uploading backup %s to agents %s", backup.backup_id, agent_ids) - return self.backups + sync_backup_results = await asyncio.gather( + *( + self.backup_agents[agent_id].async_upload_backup( + open_stream=open_stream, + backup=backup, + ) + for agent_id in agent_ids + ), + return_exceptions=True, + ) + for idx, result in enumerate(sync_backup_results): + if isinstance(result, Exception): + agent_errors[agent_ids[idx]] = result + LOGGER.exception( + "Error during backup upload - %s", result, exc_info=result + ) + return agent_errors - async def get_backup(self, slug: str) -> Backup | None: - """Return a backup.""" - if not self.loaded_backups: - await self.load_backups() + async def async_get_backups( + self, + ) -> tuple[dict[str, ManagerBackup], dict[str, Exception]]: + """Get backups. - if not (backup := self.backups.get(slug)): - return None + Return a dictionary of Backup instances keyed by their ID. + """ + backups: dict[str, ManagerBackup] = {} + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) - if not backup.path.exists(): - LOGGER.debug( - ( - "Removing tracked backup (%s) that does not exists on the expected" - " path %s" - ), - backup.slug, - backup.path, - ) - self.backups.pop(slug) - return None + list_backups_results = await asyncio.gather( + *(agent.async_list_backups() for agent in self.backup_agents.values()), + return_exceptions=True, + ) + for idx, result in enumerate(list_backups_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + for agent_backup in result: + if (backup_id := agent_backup.backup_id) not in backups: + if known_backup := self.known_backups.get(backup_id): + failed_agent_ids = known_backup.failed_agent_ids + with_strategy_settings = known_backup.with_strategy_settings + else: + failed_agent_ids = [] + with_strategy_settings = False + backups[backup_id] = ManagerBackup( + agent_ids=[], + addons=agent_backup.addons, + backup_id=backup_id, + date=agent_backup.date, + database_included=agent_backup.database_included, + failed_agent_ids=failed_agent_ids, + folders=agent_backup.folders, + homeassistant_included=agent_backup.homeassistant_included, + homeassistant_version=agent_backup.homeassistant_version, + name=agent_backup.name, + protected=agent_backup.protected, + size=agent_backup.size, + with_strategy_settings=with_strategy_settings, + ) + backups[backup_id].agent_ids.append(agent_ids[idx]) - return backup + return (backups, agent_errors) - async def remove_backup(self, slug: str) -> None: - """Remove a backup.""" - if (backup := await self.get_backup(slug)) is None: - return + async def async_get_backup( + self, backup_id: str + ) -> tuple[ManagerBackup | None, dict[str, Exception]]: + """Get a backup.""" + backup: ManagerBackup | None = None + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) - await self.hass.async_add_executor_job(backup.path.unlink, True) - LOGGER.debug("Removed backup located at %s", backup.path) - self.backups.pop(slug) + get_backup_results = await asyncio.gather( + *( + agent.async_get_backup(backup_id) + for agent in self.backup_agents.values() + ), + return_exceptions=True, + ) + for idx, result in enumerate(get_backup_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + if not result: + continue + if backup is None: + if known_backup := self.known_backups.get(backup_id): + failed_agent_ids = known_backup.failed_agent_ids + with_strategy_settings = known_backup.with_strategy_settings + else: + failed_agent_ids = [] + with_strategy_settings = False + backup = ManagerBackup( + agent_ids=[], + addons=result.addons, + backup_id=result.backup_id, + date=result.date, + database_included=result.database_included, + failed_agent_ids=failed_agent_ids, + folders=result.folders, + homeassistant_included=result.homeassistant_included, + homeassistant_version=result.homeassistant_version, + name=result.name, + protected=result.protected, + size=result.size, + with_strategy_settings=with_strategy_settings, + ) + backup.agent_ids.append(agent_ids[idx]) - async def generate_backup(self) -> Backup: - """Generate a backup.""" - if self.backing_up: - raise HomeAssistantError("Backup already in progress") + return (backup, agent_errors) + async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]: + """Delete a backup.""" + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) + + delete_backup_results = await asyncio.gather( + *( + agent.async_delete_backup(backup_id) + for agent in self.backup_agents.values() + ), + return_exceptions=True, + ) + for idx, result in enumerate(delete_backup_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + + if not agent_errors: + self.known_backups.remove(backup_id) + + return agent_errors + + async def async_receive_backup( + self, + *, + agent_ids: list[str], + contents: aiohttp.BodyPartReader, + ) -> None: + """Receive and store a backup file from upload.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS) + ) try: - self.backing_up = True - await self.pre_backup_actions() - backup_name = f"Core {HAVERSION}" - date_str = dt_util.now().isoformat() - slug = _generate_slug(date_str, backup_name) + await self._async_receive_backup(agent_ids=agent_ids, contents=contents) + except Exception: + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.FAILED) + ) + raise + else: + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.COMPLETED) + ) + finally: + self.async_on_backup_event(IdleEvent()) + + async def _async_receive_backup( + self, + *, + agent_ids: list[str], + contents: aiohttp.BodyPartReader, + ) -> None: + """Receive and store a backup file from upload.""" + contents.chunk_size = BUF_SIZE + self.async_on_backup_event( + ReceiveBackupEvent( + stage=ReceiveBackupStage.RECEIVE_FILE, + state=ReceiveBackupState.IN_PROGRESS, + ) + ) + written_backup = await self._reader_writer.async_receive_backup( + agent_ids=agent_ids, + stream=contents, + suggested_filename=contents.filename or "backup.tar", + ) + self.async_on_backup_event( + ReceiveBackupEvent( + stage=ReceiveBackupStage.UPLOAD_TO_AGENTS, + state=ReceiveBackupState.IN_PROGRESS, + ) + ) + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + await written_backup.release_stream() + self.known_backups.add(written_backup.backup, agent_errors, False) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool = False, + ) -> NewBackup: + """Create a backup.""" + new_backup = await self.async_initiate_backup( + agent_ids=agent_ids, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + name=name, + password=password, + with_strategy_settings=with_strategy_settings, + ) + assert self._backup_finish_task + await self._backup_finish_task + return new_backup + + async def async_initiate_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool = False, + ) -> NewBackup: + """Initiate generating a backup.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + + if with_strategy_settings: + self.config.data.last_attempted_strategy_backup = dt_util.now() + self.store.save() + + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.IN_PROGRESS) + ) + try: + return await self._async_create_backup( + agent_ids=agent_ids, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + name=name, + password=password, + with_strategy_settings=with_strategy_settings, + ) + except Exception: + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + self.async_on_backup_event(IdleEvent()) + raise + + async def _async_create_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool, + ) -> NewBackup: + """Initiate generating a backup.""" + if not agent_ids: + raise HomeAssistantError("At least one agent must be selected") + if any(agent_id not in self.backup_agents for agent_id in agent_ids): + raise HomeAssistantError("Invalid agent selected") + if include_all_addons and include_addons: + raise HomeAssistantError( + "Cannot include all addons and specify specific addons" + ) + + backup_name = name or f"Core {HAVERSION}" + new_backup, self._backup_task = await self._reader_writer.async_create_backup( + agent_ids=agent_ids, + backup_name=backup_name, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + on_progress=self.async_on_backup_event, + password=password, + ) + self._backup_finish_task = self.hass.async_create_task( + self._async_finish_backup(agent_ids, with_strategy_settings), + name="backup_manager_finish_backup", + ) + return new_backup + + async def _async_finish_backup( + self, agent_ids: list[str], with_strategy_settings: bool + ) -> None: + if TYPE_CHECKING: + assert self._backup_task is not None + try: + written_backup = await self._backup_task + except Exception as err: # noqa: BLE001 + LOGGER.debug("Generating backup failed", exc_info=err) + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + else: + LOGGER.debug( + "Generated new backup with backup_id %s, uploading to agents %s", + written_backup.backup.backup_id, + agent_ids, + ) + self.async_on_backup_event( + CreateBackupEvent( + stage=CreateBackupStage.UPLOAD_TO_AGENTS, + state=CreateBackupState.IN_PROGRESS, + ) + ) + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + await written_backup.release_stream() + if with_strategy_settings: + # create backup was successful, update last_completed_strategy_backup + self.config.data.last_completed_strategy_backup = dt_util.now() + self.store.save() + self.known_backups.add( + written_backup.backup, agent_errors, with_strategy_settings + ) + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) + ) + finally: + self._backup_task = None + self._backup_finish_task = None + self.async_on_backup_event(IdleEvent()) + + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Initiate restoring a backup.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS) + ) + try: + await self._async_restore_backup( + backup_id=backup_id, + agent_id=agent_id, + password=password, + restore_addons=restore_addons, + restore_database=restore_database, + restore_folders=restore_folders, + restore_homeassistant=restore_homeassistant, + ) + except Exception: + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED) + ) + raise + finally: + self.async_on_backup_event(IdleEvent()) + + async def _async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Initiate restoring a backup.""" + agent = self.backup_agents[agent_id] + if not await agent.async_get_backup(backup_id): + raise HomeAssistantError( + f"Backup {backup_id} not found in agent {agent_id}" + ) + + async def open_backup() -> AsyncIterator[bytes]: + return await agent.async_download_backup(backup_id) + + await self._reader_writer.async_restore_backup( + backup_id=backup_id, + open_stream=open_backup, + agent_id=agent_id, + password=password, + restore_addons=restore_addons, + restore_database=restore_database, + restore_folders=restore_folders, + restore_homeassistant=restore_homeassistant, + ) + + @callback + def async_on_backup_event( + self, + event: ManagerStateEvent, + ) -> None: + """Forward event to subscribers.""" + if (current_state := self.state) != (new_state := event.manager_state): + LOGGER.debug("Backup state: %s -> %s", current_state, new_state) + self.last_event = event + for subscription in self._backup_event_subscriptions: + subscription(event) + + @callback + def async_subscribe_events( + self, + on_event: Callable[[ManagerStateEvent], None], + ) -> Callable[[], None]: + """Subscribe events.""" + + def remove_subscription() -> None: + self._backup_event_subscriptions.remove(on_event) + + self._backup_event_subscriptions.append(on_event) + return remove_subscription + + +class KnownBackups: + """Track known backups.""" + + def __init__(self, manager: BackupManager) -> None: + """Initialize.""" + self._backups: dict[str, KnownBackup] = {} + self._manager = manager + + def load(self, stored_backups: list[StoredKnownBackup]) -> None: + """Load backups.""" + self._backups = { + backup["backup_id"]: KnownBackup( + backup_id=backup["backup_id"], + failed_agent_ids=backup["failed_agent_ids"], + with_strategy_settings=backup["with_strategy_settings"], + ) + for backup in stored_backups + } + + def to_list(self) -> list[StoredKnownBackup]: + """Convert known backups to a dict.""" + return [backup.to_dict() for backup in self._backups.values()] + + def add( + self, + backup: AgentBackup, + agent_errors: dict[str, Exception], + with_strategy_settings: bool, + ) -> None: + """Add a backup.""" + self._backups[backup.backup_id] = KnownBackup( + backup_id=backup.backup_id, + failed_agent_ids=list(agent_errors), + with_strategy_settings=with_strategy_settings, + ) + self._manager.store.save() + + def get(self, backup_id: str) -> KnownBackup | None: + """Get a backup.""" + return self._backups.get(backup_id) + + def remove(self, backup_id: str) -> None: + """Remove a backup.""" + if backup_id not in self._backups: + return + self._backups.pop(backup_id) + self._manager.store.save() + + +@dataclass(kw_only=True) +class KnownBackup: + """Persistent backup data.""" + + backup_id: str + failed_agent_ids: list[str] + with_strategy_settings: bool + + def to_dict(self) -> StoredKnownBackup: + """Convert known backup to a dict.""" + return { + "backup_id": self.backup_id, + "failed_agent_ids": self.failed_agent_ids, + "with_strategy_settings": self.with_strategy_settings, + } + + +class StoredKnownBackup(TypedDict): + """Stored persistent backup data.""" + + backup_id: str + failed_agent_ids: list[str] + with_strategy_settings: bool + + +class CoreBackupReaderWriter(BackupReaderWriter): + """Class for reading and writing backups in core and container installations.""" + + _local_agent_id = f"{DOMAIN}.local" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup reader/writer.""" + self._hass = hass + self.temp_backup_dir = Path(hass.config.path("tmp_backups")) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Initiate generating a backup.""" + date_str = dt_util.now().isoformat() + backup_id = _generate_backup_id(date_str, backup_name) + + if include_addons or include_all_addons or include_folders: + raise HomeAssistantError( + "Addons and folders are not supported by core backup" + ) + if not include_homeassistant: + raise HomeAssistantError("Home Assistant must be included in backup") + + backup_task = self._hass.async_create_task( + self._async_create_backup( + agent_ids=agent_ids, + backup_id=backup_id, + backup_name=backup_name, + include_database=include_database, + date_str=date_str, + on_progress=on_progress, + password=password, + ), + name="backup_manager_create_backup", + eager_start=False, # To ensure the task is not started before we return + ) + + return (NewBackup(backup_job_id=backup_id), backup_task) + + async def _async_create_backup( + self, + *, + agent_ids: list[str], + backup_id: str, + backup_name: str, + date_str: str, + include_database: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> WrittenBackup: + """Generate a backup.""" + manager = self._hass.data[DATA_MANAGER] + + local_agent_tar_file_path = None + if self._local_agent_id in agent_ids: + local_agent = manager.local_backup_agents[self._local_agent_id] + local_agent_tar_file_path = local_agent.get_backup_path(backup_id) + + on_progress( + CreateBackupEvent( + stage=CreateBackupStage.HOME_ASSISTANT, + state=CreateBackupState.IN_PROGRESS, + ) + ) + try: + # Inform integrations a backup is about to be made + await manager.async_pre_backup_actions() backup_data = { - "slug": slug, - "name": backup_name, - "date": date_str, - "type": "partial", - "folders": ["homeassistant"], - "homeassistant": {"version": HAVERSION}, "compressed": True, + "date": date_str, + "homeassistant": { + "exclude_database": not include_database, + "version": HAVERSION, + }, + "name": backup_name, + "protected": password is not None, + "slug": backup_id, + "type": "partial", + "version": 2, } - tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar") - size_in_bytes = await self.hass.async_add_executor_job( + + tar_file_path, size_in_bytes = await self._hass.async_add_executor_job( self._mkdir_and_generate_backup_contents, - tar_file_path, backup_data, + include_database, + password, + local_agent_tar_file_path, ) - backup = Backup( - slug=slug, - name=backup_name, + backup = AgentBackup( + addons=[], + backup_id=backup_id, + database_included=include_database, date=date_str, - path=tar_file_path, - size=round(size_in_bytes / 1_048_576, 2), + folders=[], + homeassistant_included=True, + homeassistant_version=HAVERSION, + name=backup_name, + protected=password is not None, + size=size_in_bytes, + ) + + async_add_executor_job = self._hass.async_add_executor_job + + async def send_backup() -> AsyncIterator[bytes]: + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + + async def open_backup() -> AsyncIterator[bytes]: + return send_backup() + + async def remove_backup() -> None: + if local_agent_tar_file_path: + return + await async_add_executor_job(tar_file_path.unlink, True) + + return WrittenBackup( + backup=backup, open_stream=open_backup, release_stream=remove_backup ) - if self.loaded_backups: - self.backups[slug] = backup - LOGGER.debug("Generated new backup with slug %s", slug) - return backup finally: - self.backing_up = False - await self.post_backup_actions() + # Inform integrations the backup is done + await manager.async_post_backup_actions() def _mkdir_and_generate_backup_contents( self, - tar_file_path: Path, backup_data: dict[str, Any], - ) -> int: + database_included: bool, + password: str | None, + tar_file_path: Path | None, + ) -> tuple[Path, int]: """Generate backup contents and return the size.""" - if not self.backup_dir.exists(): - LOGGER.debug("Creating backup directory") - self.backup_dir.mkdir() + if not tar_file_path: + tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar" + make_backup_dir(tar_file_path.parent) + + excludes = EXCLUDE_FROM_BACKUP + if not database_included: + excludes = excludes + EXCLUDE_DATABASE_FROM_BACKUP outer_secure_tarfile = SecureTarFile( tar_file_path, "w", gzip=False, bufsize=BUF_SIZE @@ -252,18 +1091,136 @@ class BackupManager: tar_info.mtime = int(time.time()) outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj) with outer_secure_tarfile.create_inner_tar( - "./homeassistant.tar.gz", gzip=True + "./homeassistant.tar.gz", + gzip=True, + key=password_to_key(password) if password is not None else None, ) as core_tar: atomic_contents_add( tar_file=core_tar, - origin_path=Path(self.hass.config.path()), - excludes=EXCLUDE_FROM_BACKUP, + origin_path=Path(self._hass.config.path()), + excludes=excludes, arcname="data", ) + return (tar_file_path, tar_file_path.stat().st_size) - return tar_file_path.stat().st_size + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + temp_file = Path(self.temp_backup_dir, suggested_filename) + + async_add_executor_job = self._hass.async_add_executor_job + await async_add_executor_job(make_backup_dir, self.temp_backup_dir) + f = await async_add_executor_job(temp_file.open, "wb") + try: + async for chunk in stream: + await async_add_executor_job(f.write, chunk) + finally: + await async_add_executor_job(f.close) + + try: + backup = await async_add_executor_job(read_backup, temp_file) + except (OSError, tarfile.TarError, json.JSONDecodeError, KeyError) as err: + LOGGER.warning("Unable to parse backup %s: %s", temp_file, err) + raise + + manager = self._hass.data[DATA_MANAGER] + if self._local_agent_id in agent_ids: + local_agent = manager.local_backup_agents[self._local_agent_id] + tar_file_path = local_agent.get_backup_path(backup.backup_id) + await async_add_executor_job(shutil.move, temp_file, tar_file_path) + else: + tar_file_path = temp_file + + async def send_backup() -> AsyncIterator[bytes]: + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + + async def open_backup() -> AsyncIterator[bytes]: + return send_backup() + + async def remove_backup() -> None: + if self._local_agent_id in agent_ids: + return + await async_add_executor_job(temp_file.unlink, True) + + return WrittenBackup( + backup=backup, open_stream=open_backup, release_stream=remove_backup + ) + + async def async_restore_backup( + self, + backup_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup. + + This will write the restore information to .HA_RESTORE which + will be handled during startup by the restore_backup module. + """ + + if restore_addons or restore_folders: + raise HomeAssistantError( + "Addons and folders are not supported in core restore" + ) + if not restore_homeassistant and not restore_database: + raise HomeAssistantError( + "Home Assistant or database must be included in restore" + ) + + manager = self._hass.data[DATA_MANAGER] + if agent_id in manager.local_backup_agents: + local_agent = manager.local_backup_agents[agent_id] + path = local_agent.get_backup_path(backup_id) + remove_after_restore = False + else: + async_add_executor_job = self._hass.async_add_executor_job + path = self.temp_backup_dir / f"{backup_id}.tar" + stream = await open_stream() + await async_add_executor_job(make_backup_dir, self.temp_backup_dir) + f = await async_add_executor_job(path.open, "wb") + try: + async for chunk in stream: + await async_add_executor_job(f.write, chunk) + finally: + await async_add_executor_job(f.close) + + remove_after_restore = True + + def _write_restore_file() -> None: + """Write the restore file.""" + Path(self._hass.config.path(RESTORE_BACKUP_FILE)).write_text( + json.dumps( + { + "path": path.as_posix(), + "password": password, + "remove_after_restore": remove_after_restore, + "restore_database": restore_database, + "restore_homeassistant": restore_homeassistant, + } + ), + encoding="utf-8", + ) + + await self._hass.async_add_executor_job(_write_restore_file) + await self._hass.services.async_call("homeassistant", "restart", {}) -def _generate_slug(date: str, name: str) -> str: - """Generate a backup slug.""" +def _generate_backup_id(date: str, name: str) -> str: + """Generate a backup ID.""" return hashlib.sha1(f"{date} - {name}".lower().encode()).hexdigest()[:8] diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index 1ec9b748cda..b399043e013 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -1,11 +1,12 @@ { "domain": "backup", "name": "Backup", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/core"], "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/backup", "integration_type": "system", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["securetar==2024.2.1"] + "requirements": ["cronsim==2.6", "securetar==2024.11.0"] } diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py new file mode 100644 index 00000000000..6306d9f1fec --- /dev/null +++ b/homeassistant/components/backup/models.py @@ -0,0 +1,61 @@ +"""Models for the backup integration.""" + +from __future__ import annotations + +from dataclasses import asdict, dataclass +from enum import StrEnum +from typing import Any, Self + + +@dataclass(frozen=True, kw_only=True) +class AddonInfo: + """Addon information.""" + + name: str + slug: str + version: str + + +class Folder(StrEnum): + """Folder type.""" + + SHARE = "share" + ADDONS = "addons/local" + SSL = "ssl" + MEDIA = "media" + + +@dataclass(frozen=True, kw_only=True) +class AgentBackup: + """Base backup class.""" + + addons: list[AddonInfo] + backup_id: str + date: str + database_included: bool + folders: list[Folder] + homeassistant_included: bool + homeassistant_version: str | None # None if homeassistant_included is False + name: str + protected: bool + size: int + + def as_dict(self) -> dict: + """Return a dict representation of this backup.""" + return asdict(self) + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> Self: + """Create an instance from a JSON serialization.""" + return cls( + addons=[AddonInfo(**addon) for addon in data["addons"]], + backup_id=data["backup_id"], + date=data["date"], + database_included=data["database_included"], + folders=[Folder(folder) for folder in data["folders"]], + homeassistant_included=data["homeassistant_included"], + homeassistant_version=data["homeassistant_version"], + name=data["name"], + protected=data["protected"], + size=data["size"], + ) diff --git a/homeassistant/components/backup/store.py b/homeassistant/components/backup/store.py new file mode 100644 index 00000000000..ddabead24f9 --- /dev/null +++ b/homeassistant/components/backup/store.py @@ -0,0 +1,52 @@ +"""Store backup configuration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, TypedDict + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.storage import Store + +from .const import DOMAIN + +if TYPE_CHECKING: + from .config import StoredBackupConfig + from .manager import BackupManager, StoredKnownBackup + +STORE_DELAY_SAVE = 30 +STORAGE_KEY = DOMAIN +STORAGE_VERSION = 1 + + +class StoredBackupData(TypedDict): + """Represent the stored backup config.""" + + backups: list[StoredKnownBackup] + config: StoredBackupConfig + + +class BackupStore: + """Store backup config.""" + + def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None: + """Initialize the backup manager.""" + self._hass = hass + self._manager = manager + self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY) + + async def load(self) -> StoredBackupData | None: + """Load the store.""" + return await self._store.async_load() + + @callback + def save(self) -> None: + """Save config.""" + self._store.async_delay_save(self._data_to_save, STORE_DELAY_SAVE) + + @callback + def _data_to_save(self) -> StoredBackupData: + """Return data to save.""" + return { + "backups": self._manager.known_backups.to_list(), + "config": self._manager.config.data.to_dict(), + } diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py new file mode 100644 index 00000000000..1d8252cc30b --- /dev/null +++ b/homeassistant/components/backup/util.py @@ -0,0 +1,111 @@ +"""Local backup support for Core and Container installations.""" + +from __future__ import annotations + +import asyncio +from pathlib import Path +from queue import SimpleQueue +import tarfile +from typing import cast + +import aiohttp + +from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonObjectType, json_loads_object + +from .const import BUF_SIZE +from .models import AddonInfo, AgentBackup, Folder + + +def make_backup_dir(path: Path) -> None: + """Create a backup directory if it does not exist.""" + path.mkdir(exist_ok=True) + + +def read_backup(backup_path: Path) -> AgentBackup: + """Read a backup from disk.""" + + with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file: + if not (data_file := backup_file.extractfile("./backup.json")): + raise KeyError("backup.json not found in tar file") + data = json_loads_object(data_file.read()) + addons = [ + AddonInfo( + name=cast(str, addon["name"]), + slug=cast(str, addon["slug"]), + version=cast(str, addon["version"]), + ) + for addon in cast(list[JsonObjectType], data.get("addons", [])) + ] + + folders = [ + Folder(folder) + for folder in cast(list[str], data.get("folders", [])) + if folder != "homeassistant" + ] + + homeassistant_included = False + homeassistant_version: str | None = None + database_included = False + if ( + homeassistant := cast(JsonObjectType, data.get("homeassistant")) + ) and "version" in homeassistant: + homeassistant_version = cast(str, homeassistant["version"]) + database_included = not cast( + bool, homeassistant.get("exclude_database", False) + ) + + return AgentBackup( + addons=addons, + backup_id=cast(str, data["slug"]), + database_included=database_included, + date=cast(str, data["date"]), + folders=folders, + homeassistant_included=homeassistant_included, + homeassistant_version=homeassistant_version, + name=cast(str, data["name"]), + protected=cast(bool, data.get("protected", False)), + size=backup_path.stat().st_size, + ) + + +async def receive_file( + hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path +) -> None: + """Receive a file from a stream and write it to a file.""" + queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = SimpleQueue() + + def _sync_queue_consumer() -> None: + with path.open("wb") as file_handle: + while True: + if (_chunk_future := queue.get()) is None: + break + _chunk, _future = _chunk_future + if _future is not None: + hass.loop.call_soon_threadsafe(_future.set_result, None) + file_handle.write(_chunk) + + fut: asyncio.Future[None] | None = None + try: + fut = hass.async_add_executor_job(_sync_queue_consumer) + megabytes_sending = 0 + while chunk := await contents.read_chunk(BUF_SIZE): + megabytes_sending += 1 + if megabytes_sending % 5 != 0: + queue.put_nowait((chunk, None)) + continue + + chunk_future = hass.loop.create_future() + queue.put_nowait((chunk, chunk_future)) + await asyncio.wait( + (fut, chunk_future), + return_when=asyncio.FIRST_COMPLETED, + ) + if fut.done(): + # The executor job failed + break + + queue.put_nowait(None) # terminate queue consumer + finally: + if fut is not None: + await fut diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 8deba33c8ba..7dacc39f9ba 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -7,21 +7,31 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback -from .const import DOMAIN, LOGGER -from .manager import BackupManager +from .config import ScheduleState +from .const import DATA_MANAGER, LOGGER +from .manager import ManagerStateEvent +from .models import Folder @callback def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None: """Register websocket commands.""" + websocket_api.async_register_command(hass, backup_agents_info) + if with_hassio: websocket_api.async_register_command(hass, handle_backup_end) websocket_api.async_register_command(hass, handle_backup_start) - return + websocket_api.async_register_command(hass, handle_details) websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) - websocket_api.async_register_command(hass, handle_remove) + websocket_api.async_register_command(hass, handle_create_with_strategy_settings) + websocket_api.async_register_command(hass, handle_delete) + websocket_api.async_register_command(hass, handle_restore) + websocket_api.async_register_command(hass, handle_subscribe_events) + + websocket_api.async_register_command(hass, handle_config_info) + websocket_api.async_register_command(hass, handle_config_update) @websocket_api.require_admin @@ -33,13 +43,17 @@ async def handle_info( msg: dict[str, Any], ) -> None: """List all stored backups.""" - manager: BackupManager = hass.data[DOMAIN] - backups = await manager.get_backups() + manager = hass.data[DATA_MANAGER] + backups, agent_errors = await manager.async_get_backups() connection.send_result( msg["id"], { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + }, "backups": list(backups.values()), - "backing_up": manager.backing_up, + "last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup, + "last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup, }, ) @@ -47,24 +61,102 @@ async def handle_info( @websocket_api.require_admin @websocket_api.websocket_command( { - vol.Required("type"): "backup/remove", - vol.Required("slug"): str, + vol.Required("type"): "backup/details", + vol.Required("backup_id"): str, } ) @websocket_api.async_response -async def handle_remove( +async def handle_details( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: - """Remove a backup.""" - manager: BackupManager = hass.data[DOMAIN] - await manager.remove_backup(msg["slug"]) + """Get backup details for a specific backup.""" + backup, agent_errors = await hass.data[DATA_MANAGER].async_get_backup( + msg["backup_id"] + ) + connection.send_result( + msg["id"], + { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + }, + "backup": backup, + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/delete", + vol.Required("backup_id"): str, + } +) +@websocket_api.async_response +async def handle_delete( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Delete a backup.""" + agent_errors = await hass.data[DATA_MANAGER].async_delete_backup(msg["backup_id"]) + connection.send_result( + msg["id"], + { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + } + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/restore", + vol.Required("backup_id"): str, + vol.Required("agent_id"): str, + vol.Optional("password"): str, + vol.Optional("restore_addons"): [str], + vol.Optional("restore_database", default=True): bool, + vol.Optional("restore_folders"): [vol.Coerce(Folder)], + vol.Optional("restore_homeassistant", default=True): bool, + } +) +@websocket_api.async_response +async def handle_restore( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Restore a backup.""" + await hass.data[DATA_MANAGER].async_restore_backup( + msg["backup_id"], + agent_id=msg["agent_id"], + password=msg.get("password"), + restore_addons=msg.get("restore_addons"), + restore_database=msg["restore_database"], + restore_folders=msg.get("restore_folders"), + restore_homeassistant=msg["restore_homeassistant"], + ) connection.send_result(msg["id"]) @websocket_api.require_admin -@websocket_api.websocket_command({vol.Required("type"): "backup/generate"}) +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/generate", + vol.Required("agent_ids"): [str], + vol.Optional("include_addons"): [str], + vol.Optional("include_all_addons", default=False): bool, + vol.Optional("include_database", default=True): bool, + vol.Optional("include_folders"): [vol.Coerce(Folder)], + vol.Optional("include_homeassistant", default=True): bool, + vol.Optional("name"): str, + vol.Optional("password"): str, + } +) @websocket_api.async_response async def handle_create( hass: HomeAssistant, @@ -72,8 +164,46 @@ async def handle_create( msg: dict[str, Any], ) -> None: """Generate a backup.""" - manager: BackupManager = hass.data[DOMAIN] - backup = await manager.generate_backup() + + backup = await hass.data[DATA_MANAGER].async_initiate_backup( + agent_ids=msg["agent_ids"], + include_addons=msg.get("include_addons"), + include_all_addons=msg["include_all_addons"], + include_database=msg["include_database"], + include_folders=msg.get("include_folders"), + include_homeassistant=msg["include_homeassistant"], + name=msg.get("name"), + password=msg.get("password"), + ) + connection.send_result(msg["id"], backup) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/generate_with_strategy_settings", + } +) +@websocket_api.async_response +async def handle_create_with_strategy_settings( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate a backup with stored settings.""" + + config_data = hass.data[DATA_MANAGER].config.data + backup = await hass.data[DATA_MANAGER].async_initiate_backup( + agent_ids=config_data.create_backup.agent_ids, + include_addons=config_data.create_backup.include_addons, + include_all_addons=config_data.create_backup.include_all_addons, + include_database=config_data.create_backup.include_database, + include_folders=config_data.create_backup.include_folders, + include_homeassistant=True, # always include HA + name=config_data.create_backup.name, + password=config_data.create_backup.password, + with_strategy_settings=True, + ) connection.send_result(msg["id"], backup) @@ -86,12 +216,11 @@ async def handle_backup_start( msg: dict[str, Any], ) -> None: """Backup start notification.""" - manager: BackupManager = hass.data[DOMAIN] - manager.backing_up = True + manager = hass.data[DATA_MANAGER] LOGGER.debug("Backup start notification") try: - await manager.pre_backup_actions() + await manager.async_pre_backup_actions() except Exception as err: # noqa: BLE001 connection.send_error(msg["id"], "pre_backup_actions_failed", str(err)) return @@ -108,14 +237,107 @@ async def handle_backup_end( msg: dict[str, Any], ) -> None: """Backup end notification.""" - manager: BackupManager = hass.data[DOMAIN] - manager.backing_up = False + manager = hass.data[DATA_MANAGER] LOGGER.debug("Backup end notification") try: - await manager.post_backup_actions() + await manager.async_post_backup_actions() except Exception as err: # noqa: BLE001 connection.send_error(msg["id"], "post_backup_actions_failed", str(err)) return connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"}) +@websocket_api.async_response +async def backup_agents_info( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Return backup agents info.""" + manager = hass.data[DATA_MANAGER] + connection.send_result( + msg["id"], + { + "agents": [{"agent_id": agent_id} for agent_id in manager.backup_agents], + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/config/info"}) +@websocket_api.async_response +async def handle_config_info( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Send the stored backup config.""" + manager = hass.data[DATA_MANAGER] + connection.send_result( + msg["id"], + { + "config": manager.config.data.to_dict(), + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/config/update", + vol.Optional("create_backup"): vol.Schema( + { + vol.Optional("agent_ids"): vol.All(list[str]), + vol.Optional("include_addons"): vol.Any(list[str], None), + vol.Optional("include_all_addons"): bool, + vol.Optional("include_database"): bool, + vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None), + vol.Optional("name"): vol.Any(str, None), + vol.Optional("password"): vol.Any(str, None), + }, + ), + vol.Optional("retention"): vol.Schema( + { + vol.Optional("copies"): vol.Any(int, None), + vol.Optional("days"): vol.Any(int, None), + }, + ), + vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)), + } +) +@websocket_api.async_response +async def handle_config_update( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Update the stored backup config.""" + manager = hass.data[DATA_MANAGER] + changes = dict(msg) + changes.pop("id") + changes.pop("type") + await manager.config.update(**changes) + connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"}) +@websocket_api.async_response +async def handle_subscribe_events( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Subscribe to backup events.""" + + def on_event(event: ManagerStateEvent) -> None: + connection.send_message(websocket_api.event_message(msg["id"], event)) + + manager = hass.data[DATA_MANAGER] + on_event(manager.last_event) + connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event) + connection.send_result(msg["id"]) diff --git a/homeassistant/components/baf/climate.py b/homeassistant/components/baf/climate.py index 38407813d37..c30d49e8c9d 100644 --- a/homeassistant/components/baf/climate.py +++ b/homeassistant/components/baf/climate.py @@ -40,7 +40,6 @@ class BAFAutoComfort(BAFEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = [HVACMode.OFF, HVACMode.FAN_ONLY] _attr_translation_key = "auto_comfort" - _enable_turn_on_off_backwards_compatibility = False @callback def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/baf/fan.py b/homeassistant/components/baf/fan.py index d0ba668373a..8f7aab40b79 100644 --- a/homeassistant/components/baf/fan.py +++ b/homeassistant/components/baf/fan.py @@ -46,7 +46,7 @@ class BAFFan(BAFEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_preset_modes = [PRESET_MODE_AUTO] _attr_speed_count = SPEED_COUNT _attr_name = None diff --git a/homeassistant/components/baf/light.py b/homeassistant/components/baf/light.py index 2fb36ed874f..4c0b1e353fe 100644 --- a/homeassistant/components/baf/light.py +++ b/homeassistant/components/baf/light.py @@ -8,16 +8,12 @@ from aiobafi6 import Device, OffOnAuto from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ColorMode, LightEntity, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from . import BAFConfigEntry from .entity import BAFEntity @@ -77,25 +73,17 @@ class BAFStandaloneLight(BAFLight): def __init__(self, device: Device) -> None: """Init a standalone light.""" super().__init__(device) - self._attr_min_mireds = color_temperature_kelvin_to_mired( - device.light_warmest_color_temperature - ) - self._attr_max_mireds = color_temperature_kelvin_to_mired( - device.light_coolest_color_temperature - ) + self._attr_max_color_temp_kelvin = device.light_warmest_color_temperature + self._attr_min_color_temp_kelvin = device.light_coolest_color_temperature @callback def _async_update_attrs(self) -> None: """Update attrs from device.""" super()._async_update_attrs() - self._attr_color_temp = color_temperature_kelvin_to_mired( - self._device.light_color_temperature - ) + self._attr_color_temp_kelvin = self._device.light_color_temperature async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None: - self._device.light_color_temperature = color_temperature_mired_to_kelvin( - color_temp - ) + if (color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None: + self._device.light_color_temperature = color_temp await super().async_turn_on(**kwargs) diff --git a/homeassistant/components/baidu/manifest.json b/homeassistant/components/baidu/manifest.json index 8213b7cbe5e..32f14100b81 100644 --- a/homeassistant/components/baidu/manifest.json +++ b/homeassistant/components/baidu/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/baidu", "iot_class": "cloud_push", "loggers": ["aip"], + "quality_scale": "legacy", "requirements": ["baidu-aip==1.6.6"] } diff --git a/homeassistant/components/balboa/__init__.py b/homeassistant/components/balboa/__init__.py index 7e220bd46f8..7838db16820 100644 --- a/homeassistant/components/balboa/__init__.py +++ b/homeassistant/components/balboa/__init__.py @@ -14,7 +14,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.event import async_track_time_interval import homeassistant.util.dt as dt_util -from .const import CONF_SYNC_TIME, DEFAULT_SYNC_TIME, DOMAIN +from .const import CONF_SYNC_TIME, DEFAULT_SYNC_TIME _LOGGER = logging.getLogger(__name__) @@ -30,8 +30,10 @@ PLATFORMS = [ KEEP_ALIVE_INTERVAL = timedelta(minutes=1) SYNC_TIME_INTERVAL = timedelta(hours=1) +type BalboaConfigEntry = ConfigEntry[SpaClient] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: BalboaConfigEntry) -> bool: """Set up Balboa Spa from a config entry.""" host = entry.data[CONF_HOST] @@ -44,41 +46,34 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Failed to get spa info at %s", host) raise ConfigEntryNotReady("Unable to configure") - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = spa + entry.runtime_data = spa await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await async_setup_time_sync(hass, entry) entry.async_on_unload(entry.add_update_listener(update_listener)) + entry.async_on_unload(spa.disconnect) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BalboaConfigEntry) -> bool: """Unload a config entry.""" - _LOGGER.debug("Disconnecting from spa") - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - await spa.disconnect() - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: BalboaConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) -async def async_setup_time_sync(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_setup_time_sync(hass: HomeAssistant, entry: BalboaConfigEntry) -> None: """Set up the time sync.""" if not entry.options.get(CONF_SYNC_TIME, DEFAULT_SYNC_TIME): return _LOGGER.debug("Setting up daily time sync") - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async def sync_time(now: datetime) -> None: now = dt_util.as_local(now) diff --git a/homeassistant/components/balboa/binary_sensor.py b/homeassistant/components/balboa/binary_sensor.py index d3352208cd9..b8c62ce8abf 100644 --- a/homeassistant/components/balboa/binary_sensor.py +++ b/homeassistant/components/balboa/binary_sensor.py @@ -12,19 +12,20 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa's binary sensors.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data entities = [ BalboaBinarySensorEntity(spa, description) for description in BINARY_SENSOR_DESCRIPTIONS diff --git a/homeassistant/components/balboa/climate.py b/homeassistant/components/balboa/climate.py index 8cd9e93e539..76b02f0e165 100644 --- a/homeassistant/components/balboa/climate.py +++ b/homeassistant/components/balboa/climate.py @@ -14,7 +14,6 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_TEMPERATURE, PRECISION_HALVES, @@ -24,6 +23,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import BalboaConfigEntry from .const import DOMAIN from .entity import BalboaEntity @@ -45,10 +45,12 @@ TEMPERATURE_UNIT_MAP = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa climate entity.""" - async_add_entities([BalboaClimateEntity(hass.data[DOMAIN][entry.entry_id])]) + async_add_entities([BalboaClimateEntity(entry.runtime_data)]) class BalboaClimateEntity(BalboaEntity, ClimateEntity): @@ -63,7 +65,6 @@ class BalboaClimateEntity(BalboaEntity, ClimateEntity): ) _attr_translation_key = DOMAIN _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, client: SpaClient) -> None: """Initialize the climate entity.""" diff --git a/homeassistant/components/balboa/fan.py b/homeassistant/components/balboa/fan.py index bf7425f0e64..3ecfec53a1e 100644 --- a/homeassistant/components/balboa/fan.py +++ b/homeassistant/components/balboa/fan.py @@ -5,11 +5,10 @@ from __future__ import annotations import math from typing import Any, cast -from pybalboa import SpaClient, SpaControl +from pybalboa import SpaControl from pybalboa.enums import OffOnState, UnknownState from homeassistant.components.fan import FanEntity, FanEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.percentage import ( @@ -17,15 +16,17 @@ from homeassistant.util.percentage import ( ranged_value_to_percentage, ) -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa's pumps.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async_add_entities(BalboaPumpFanEntity(control) for control in spa.pumps) @@ -37,7 +38,7 @@ class BalboaPumpFanEntity(BalboaEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_translation_key = "pump" def __init__(self, control: SpaControl) -> None: diff --git a/homeassistant/components/balboa/light.py b/homeassistant/components/balboa/light.py index 5dc8d48ef9d..21e4dfc5e08 100644 --- a/homeassistant/components/balboa/light.py +++ b/homeassistant/components/balboa/light.py @@ -4,23 +4,24 @@ from __future__ import annotations from typing import Any, cast -from pybalboa import SpaClient, SpaControl +from pybalboa import SpaControl from pybalboa.enums import OffOnState, UnknownState from homeassistant.components.light import ColorMode, LightEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa's lights.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async_add_entities(BalboaLightEntity(control) for control in spa.lights) diff --git a/homeassistant/components/balboa/select.py b/homeassistant/components/balboa/select.py index 9c3074350c5..e88e40ab063 100644 --- a/homeassistant/components/balboa/select.py +++ b/homeassistant/components/balboa/select.py @@ -1,22 +1,23 @@ """Support for Spa Client selects.""" -from pybalboa import SpaClient, SpaControl +from pybalboa import SpaControl from pybalboa.enums import LowHighRange from homeassistant.components.select import SelectEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa select entity.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async_add_entities([BalboaTempRangeSelectEntity(spa.temperature_range)]) diff --git a/homeassistant/components/bang_olufsen/__init__.py b/homeassistant/components/bang_olufsen/__init__.py index 07b9d0befe1..be99f8b5b7d 100644 --- a/homeassistant/components/bang_olufsen/__init__.py +++ b/homeassistant/components/bang_olufsen/__init__.py @@ -8,6 +8,7 @@ from aiohttp.client_exceptions import ( ClientConnectorError, ClientOSError, ServerTimeoutError, + WSMessageTypeError, ) from mozart_api.exceptions import ApiException from mozart_api.mozart_client import MozartClient @@ -17,6 +18,7 @@ from homeassistant.const import CONF_HOST, CONF_MODEL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.device_registry as dr +from homeassistant.util.ssl import get_default_context from .const import DOMAIN from .websocket import BangOlufsenWebsocket @@ -30,10 +32,12 @@ class BangOlufsenData: client: MozartClient +type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData] + PLATFORMS = [Platform.MEDIA_PLAYER] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool: """Set up from a config entry.""" # Remove casts to str @@ -48,7 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: model=entry.data[CONF_MODEL], ) - client = MozartClient(host=entry.data[CONF_HOST]) + client = MozartClient(host=entry.data[CONF_HOST], ssl_context=get_default_context()) # Check API and WebSocket connection try: @@ -59,6 +63,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ServerTimeoutError, ApiException, TimeoutError, + WSMessageTypeError, ) as error: await client.close_api_client() raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error @@ -66,10 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: websocket = BangOlufsenWebsocket(hass, entry, client) # Add the websocket and API client - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BangOlufsenData( - websocket, - client, - ) + entry.runtime_data = BangOlufsenData(websocket, client) # Start WebSocket connection await client.connect_notifications(remote_control=True, reconnect=True) @@ -79,15 +81,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: BangOlufsenConfigEntry +) -> bool: """Unload a config entry.""" # Close the API client and WebSocket notification listener - hass.data[DOMAIN][entry.entry_id].client.disconnect_notifications() - await hass.data[DOMAIN][entry.entry_id].client.close_api_client() + entry.runtime_data.client.disconnect_notifications() + await entry.runtime_data.client.close_api_client() - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bang_olufsen/config_flow.py b/homeassistant/components/bang_olufsen/config_flow.py index 76e4656129e..e1c1c7ab538 100644 --- a/homeassistant/components/bang_olufsen/config_flow.py +++ b/homeassistant/components/bang_olufsen/config_flow.py @@ -14,6 +14,7 @@ from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_MODEL from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig +from homeassistant.util.ssl import get_default_context from .const import ( ATTR_FRIENDLY_NAME, @@ -25,6 +26,7 @@ from .const import ( DEFAULT_MODEL, DOMAIN, ) +from .util import get_serial_number_from_jid class EntryData(TypedDict, total=False): @@ -87,7 +89,9 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN): errors={"base": _exception_map[type(error)]}, ) - self._client = MozartClient(self._host) + self._client = MozartClient( + host=self._host, ssl_context=get_default_context() + ) # Try to get information from Beolink self method. async with self._client: @@ -107,7 +111,7 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) self._beolink_jid = beolink_self.jid - self._serial_number = beolink_self.jid.split(".")[2].split("@")[0] + self._serial_number = get_serial_number_from_jid(beolink_self.jid) await self.async_set_unique_id(self._serial_number) self._abort_if_unique_id_configured() @@ -136,7 +140,7 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="ipv6_address") # Check connection to ensure valid address is received - self._client = MozartClient(self._host) + self._client = MozartClient(self._host, ssl_context=get_default_context()) async with self._client: try: diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 748b4baf621..9f0649e610b 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -7,20 +7,19 @@ from typing import Final from mozart_api.models import Source, SourceArray, SourceTypeEnum -from homeassistant.components.media_player import MediaPlayerState, MediaType +from homeassistant.components.media_player import ( + MediaPlayerState, + MediaType, + RepeatMode, +) class BangOlufsenSource: """Class used for associating device source ids with friendly names. May not include all sources.""" - URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer") - BLUETOOTH: Final[Source] = Source(name="Bluetooth", id="bluetooth") - CHROMECAST: Final[Source] = Source(name="Chromecast built-in", id="chromeCast") LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn") SPDIF: Final[Source] = Source(name="Optical", id="spdif") - NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio") - DEEZER: Final[Source] = Source(name="Deezer", id="deezer") - TIDAL: Final[Source] = Source(name="Tidal", id="tidal") + URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer") BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = { @@ -36,6 +35,17 @@ BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = { "unknown": MediaPlayerState.IDLE, } +# Dict used for translating Home Assistant settings to device repeat settings. +BANG_OLUFSEN_REPEAT_FROM_HA: dict[RepeatMode, str] = { + RepeatMode.ALL: "all", + RepeatMode.ONE: "track", + RepeatMode.OFF: "none", +} +# Dict used for translating device repeat settings to Home Assistant settings. +BANG_OLUFSEN_REPEAT_TO_HA: dict[str, RepeatMode] = { + value: key for key, value in BANG_OLUFSEN_REPEAT_FROM_HA.items() +} + # Media types for play_media class BangOlufsenMediaType(StrEnum): @@ -68,6 +78,7 @@ class BangOlufsenModel(StrEnum): class WebsocketNotification(StrEnum): """Enum for WebSocket notification types.""" + ACTIVE_LISTENING_MODE = "active_listening_mode" PLAYBACK_ERROR = "playback_error" PLAYBACK_METADATA = "playback_metadata" PLAYBACK_PROGRESS = "playback_progress" @@ -78,6 +89,11 @@ class WebsocketNotification(StrEnum): VOLUME = "volume" # Sub-notifications + BEOLINK = "beolink" + BEOLINK_PEERS = "beolinkPeers" + BEOLINK_LISTENERS = "beolinkListeners" + BEOLINK_AVAILABLE_LISTENERS = "beolinkAvailableListeners" + CONFIGURATION = "configuration" NOTIFICATION = "notification" REMOTE_MENU_CHANGED = "remoteMenuChanged" @@ -117,20 +133,6 @@ VALID_MEDIA_TYPES: Final[tuple] = ( MediaType.CHANNEL, ) -# Sources on the device that should not be selectable by the user -HIDDEN_SOURCE_IDS: Final[tuple] = ( - "airPlay", - "bluetooth", - "chromeCast", - "generator", - "local", - "dlna", - "qplay", - "wpl", - "pl", - "beolink", - "usbIn", -) # Fallback sources to use in case of API failure. FALLBACK_SOURCES: Final[SourceArray] = SourceArray( @@ -138,23 +140,26 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( Source( id="uriStreamer", is_enabled=True, - is_playable=False, + is_playable=True, name="Audio Streamer", type=SourceTypeEnum(value="uriStreamer"), + is_seekable=False, ), Source( id="bluetooth", is_enabled=True, - is_playable=False, + is_playable=True, name="Bluetooth", type=SourceTypeEnum(value="bluetooth"), + is_seekable=False, ), Source( id="spotify", is_enabled=True, - is_playable=False, + is_playable=True, name="Spotify Connect", type=SourceTypeEnum(value="spotify"), + is_seekable=True, ), Source( id="lineIn", @@ -162,6 +167,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( is_playable=True, name="Line-In", type=SourceTypeEnum(value="lineIn"), + is_seekable=False, ), Source( id="spdif", @@ -169,6 +175,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( is_playable=True, name="Optical", type=SourceTypeEnum(value="spdif"), + is_seekable=False, ), Source( id="netRadio", @@ -176,6 +183,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( is_playable=True, name="B&O Radio", type=SourceTypeEnum(value="netRadio"), + is_seekable=False, ), Source( id="deezer", @@ -183,6 +191,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( is_playable=True, name="Deezer", type=SourceTypeEnum(value="deezer"), + is_seekable=True, ), Source( id="tidalConnect", @@ -190,6 +199,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( is_playable=True, name="Tidal Connect", type=SourceTypeEnum(value="tidalConnect"), + is_seekable=True, ), ] ) @@ -200,3 +210,20 @@ BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event" CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS" + +# Beolink Converter NL/ML sources need to be transformed to upper case +BEOLINK_JOIN_SOURCES_TO_UPPER = ( + "aux_a", + "cd", + "ph", + "radio", + "tp1", + "tp2", +) +BEOLINK_JOIN_SOURCES = ( + *BEOLINK_JOIN_SOURCES_TO_UPPER, + "beoradio", + "deezer", + "spotify", + "tidal", +) diff --git a/homeassistant/components/bang_olufsen/diagnostics.py b/homeassistant/components/bang_olufsen/diagnostics.py new file mode 100644 index 00000000000..cab7eae5e25 --- /dev/null +++ b/homeassistant/components/bang_olufsen/diagnostics.py @@ -0,0 +1,40 @@ +"""Support for Bang & Olufsen diagnostics.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import BangOlufsenConfigEntry +from .const import DOMAIN + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: BangOlufsenConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + data: dict = { + "config_entry": config_entry.as_dict(), + "websocket_connected": config_entry.runtime_data.client.websocket_connected, + } + + if TYPE_CHECKING: + assert config_entry.unique_id + + # Add media_player entity's state + entity_registry = er.async_get(hass) + if entity_id := entity_registry.async_get_entity_id( + MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id + ): + if state := hass.states.get(entity_id): + state_dict = dict(state.as_dict()) + + # Remove context as it is not relevant + state_dict.pop("context") + data["media_player"] = state_dict + + return data diff --git a/homeassistant/components/bang_olufsen/icons.json b/homeassistant/components/bang_olufsen/icons.json new file mode 100644 index 00000000000..fec0bf20937 --- /dev/null +++ b/homeassistant/components/bang_olufsen/icons.json @@ -0,0 +1,9 @@ +{ + "services": { + "beolink_join": { "service": "mdi:location-enter" }, + "beolink_expand": { "service": "mdi:location-enter" }, + "beolink_unexpand": { "service": "mdi:location-exit" }, + "beolink_leave": { "service": "mdi:close-circle-outline" }, + "beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" } + } +} diff --git a/homeassistant/components/bang_olufsen/manifest.json b/homeassistant/components/bang_olufsen/manifest.json index 3cc9fdb5cd1..b29fe9731de 100644 --- a/homeassistant/components/bang_olufsen/manifest.json +++ b/homeassistant/components/bang_olufsen/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bang_olufsen", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mozart-api==3.4.1.8.6"], + "requirements": ["mozart-api==4.1.1.116.4"], "zeroconf": ["_bangolufsen._tcp.local."] } diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 8bc97858d0d..282ecdd2ae5 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -3,15 +3,21 @@ from __future__ import annotations from collections.abc import Callable +import contextlib +from datetime import timedelta import json import logging -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast +from aiohttp import ClientConnectorError from mozart_api import __version__ as MOZART_API_VERSION -from mozart_api.exceptions import ApiException +from mozart_api.exceptions import ApiException, NotFoundException from mozart_api.models import ( Action, Art, + BeolinkLeader, + ListeningModeProps, + ListeningModeRef, OverlayPlayRequest, OverlayPlayRequestTextToSpeechTextToSpeech, PlaybackContentMetadata, @@ -19,6 +25,7 @@ from mozart_api.models import ( PlaybackProgress, PlayQueueItem, PlayQueueItemType, + PlayQueueSettings, RenderingState, SceneProperties, SoftwareUpdateState, @@ -31,6 +38,7 @@ from mozart_api.models import ( VolumeState, ) from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork +import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( @@ -41,71 +49,146 @@ from homeassistant.components.media_player import ( MediaPlayerEntityFeature, MediaPlayerState, MediaType, + RepeatMode, async_process_play_media_url, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MODEL +from homeassistant.const import CONF_MODEL, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.util.dt import utcnow -from . import BangOlufsenData +from . import BangOlufsenConfigEntry from .const import ( + BANG_OLUFSEN_REPEAT_FROM_HA, + BANG_OLUFSEN_REPEAT_TO_HA, BANG_OLUFSEN_STATES, + BEOLINK_JOIN_SOURCES, + BEOLINK_JOIN_SOURCES_TO_UPPER, CONF_BEOLINK_JID, CONNECTION_STATUS, DOMAIN, FALLBACK_SOURCES, - HIDDEN_SOURCE_IDS, VALID_MEDIA_TYPES, BangOlufsenMediaType, BangOlufsenSource, WebsocketNotification, ) from .entity import BangOlufsenEntity +from .util import get_serial_number_from_jid + +PARALLEL_UPDATES = 0 + +SCAN_INTERVAL = timedelta(seconds=30) _LOGGER = logging.getLogger(__name__) BANG_OLUFSEN_FEATURES = ( MediaPlayerEntityFeature.BROWSE_MEDIA | MediaPlayerEntityFeature.CLEAR_PLAYLIST + | MediaPlayerEntityFeature.GROUPING | MediaPlayerEntityFeature.MEDIA_ANNOUNCE | MediaPlayerEntityFeature.NEXT_TRACK | MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.SEEK + | MediaPlayerEntityFeature.REPEAT_SET | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.SHUFFLE_SET | MediaPlayerEntityFeature.STOP | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.VOLUME_MUTE | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.SELECT_SOUND_MODE ) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BangOlufsenConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Media Player entity from config entry.""" - data: BangOlufsenData = hass.data[DOMAIN][config_entry.entry_id] - # Add MediaPlayer entity - async_add_entities(new_entities=[BangOlufsenMediaPlayer(config_entry, data.client)]) + async_add_entities( + new_entities=[ + BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) + ] + ) + + # Register actions. + platform = async_get_current_platform() + + jid_regex = vol.Match( + r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$" + ) + + platform.async_register_entity_service( + name="beolink_join", + schema={ + vol.Optional("beolink_jid"): jid_regex, + vol.Optional("source_id"): vol.In(BEOLINK_JOIN_SOURCES), + }, + func="async_beolink_join", + ) + + platform.async_register_entity_service( + name="beolink_expand", + schema={ + vol.Exclusive("all_discovered", "devices", ""): cv.boolean, + vol.Exclusive( + "beolink_jids", + "devices", + "Define either specific Beolink JIDs or all discovered", + ): vol.All( + cv.ensure_list, + [jid_regex], + ), + }, + func="async_beolink_expand", + ) + + platform.async_register_entity_service( + name="beolink_unexpand", + schema={ + vol.Required("beolink_jids"): vol.All( + cv.ensure_list, + [jid_regex], + ), + }, + func="async_beolink_unexpand", + ) + + platform.async_register_entity_service( + name="beolink_leave", + schema=None, + func="async_beolink_leave", + ) + + platform.async_register_entity_service( + name="beolink_allstandby", + schema=None, + func="async_beolink_allstandby", + ) class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): """Representation of a media player.""" - _attr_icon = "mdi:speaker-wireless" _attr_name = None _attr_device_class = MediaPlayerDeviceClass.SPEAKER - _attr_supported_features = BANG_OLUFSEN_FEATURES def __init__(self, entry: ConfigEntry, client: MozartClient) -> None: """Initialize the media player.""" @@ -122,6 +205,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): serial_number=self._unique_id, ) self._attr_unique_id = self._unique_id + self._attr_should_poll = True # Misc. variables. self._audio_sources: dict[str, str] = {} @@ -133,6 +217,13 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): self._sources: dict[str, str] = {} self._state: str = MediaPlayerState.IDLE self._video_sources: dict[str, str] = {} + self._sound_modes: dict[str, int] = {} + + # Beolink compatible sources + self._beolink_sources: dict[str, bool] = {} + self._remote_leader: BeolinkLeader | None = None + # Extra state attributes for showing Beolink: peer(s), listener(s), leader and self + self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {} async def async_added_to_hass(self) -> None: """Turn on the dispatchers.""" @@ -140,9 +231,13 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): signal_handlers: dict[str, Callable] = { CONNECTION_STATUS: self._async_update_connection_state, + WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes, + WebsocketNotification.BEOLINK: self._async_update_beolink, + WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink, WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error, - WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata, + WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink, WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress, + WebsocketNotification.PLAYBACK_SOURCE: self._async_update_sources, WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state, WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources, WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change, @@ -183,6 +278,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): if product_state.playback: if product_state.playback.metadata: self._playback_metadata = product_state.playback.metadata + self._remote_leader = product_state.playback.metadata.remote_leader if product_state.playback.progress: self._playback_progress = product_state.playback.progress if product_state.playback.source: @@ -201,10 +297,25 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # If the device has been updated with new sources, then the API will fail here. await self._async_update_sources() - # Set the static entity attributes that needed more information. - self._attr_source_list = list(self._sources.values()) + await self._async_update_sound_modes() - async def _async_update_sources(self) -> None: + # Update beolink attributes and device name. + await self._async_update_name_and_beolink() + + async def async_update(self) -> None: + """Update queue settings.""" + # The WebSocket event listener is the main handler for connection state. + # The polling updates do therefore not set the device as available or unavailable + with contextlib.suppress(ApiException, ClientConnectorError, TimeoutError): + queue_settings = await self._client.get_settings_queue(_request_timeout=5) + + if queue_settings.repeat is not None: + self._attr_repeat = BANG_OLUFSEN_REPEAT_TO_HA[queue_settings.repeat] + + if queue_settings.shuffle is not None: + self._attr_shuffle = queue_settings.shuffle + + async def _async_update_sources(self, _: Source | None = None) -> None: """Get sources for the specific product.""" # Audio sources @@ -231,10 +342,22 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): self._audio_sources = { source.id: source.name for source in cast(list[Source], sources.items) - if source.is_enabled - and source.id - and source.name - and source.id not in HIDDEN_SOURCE_IDS + if source.is_enabled and source.id and source.name and source.is_playable + } + + # Some sources are not Beolink expandable, meaning that they can't be joined by + # or expand to other Bang & Olufsen devices for a multi-room experience. + # _source_change, which is used throughout the entity for current source + # information, lacks this information, so source ID's and their expandability is + # stored in the self._beolink_sources variable. + self._beolink_sources = { + source.id: ( + source.is_multiroom_available + if source.is_multiroom_available is not None + else False + ) + for source in cast(list[Source], sources.items) + if source.id } # Video sources from remote menu @@ -260,19 +383,21 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Combine the source dicts self._sources = self._audio_sources | self._video_sources + self._attr_source_list = list(self._sources.values()) + # HASS won't necessarily be running the first time this method is run if self.hass.is_running: self.async_write_ha_state() - @callback - def _async_update_playback_metadata(self, data: PlaybackContentMetadata) -> None: + async def _async_update_playback_metadata_and_beolink( + self, data: PlaybackContentMetadata + ) -> None: """Update _playback_metadata and related.""" self._playback_metadata = data - # Update current artwork. + # Update current artwork and remote_leader. self._media_image = get_highest_resolution_artwork(self._playback_metadata) - - self.async_write_ha_state() + await self._async_update_beolink() @callback def _async_update_playback_error(self, data: PlaybackError) -> None: @@ -319,6 +444,181 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): self.async_write_ha_state() + async def _async_update_name_and_beolink(self) -> None: + """Update the device friendly name.""" + beolink_self = await self._client.get_beolink_self() + + # Update device name + device_registry = dr.async_get(self.hass) + assert self.device_entry is not None + + device_registry.async_update_device( + device_id=self.device_entry.id, + name=beolink_self.friendly_name, + ) + + await self._async_update_beolink() + + async def _async_update_beolink(self) -> None: + """Update the current Beolink leader, listeners, peers and self.""" + + self._beolink_attributes = {} + + assert self.device_entry is not None + assert self.device_entry.name is not None + + # Add Beolink self + self._beolink_attributes = { + "beolink": {"self": {self.device_entry.name: self._beolink_jid}} + } + + # Add Beolink peers + peers = await self._client.get_beolink_peers() + + if len(peers) > 0: + self._beolink_attributes["beolink"]["peers"] = {} + for peer in peers: + self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = ( + peer.jid + ) + + # Add Beolink listeners / leader + self._remote_leader = self._playback_metadata.remote_leader + + # Create group members list + group_members = [] + + # If the device is a listener. + if self._remote_leader is not None: + # Add leader if available in Home Assistant + leader = self._get_entity_id_from_jid(self._remote_leader.jid) + group_members.append( + leader + if leader is not None + else f"leader_not_in_hass-{self._remote_leader.friendly_name}" + ) + + # Add self + group_members.append(self.entity_id) + + self._beolink_attributes["beolink"]["leader"] = { + self._remote_leader.friendly_name: self._remote_leader.jid, + } + + # If not listener, check if leader. + else: + beolink_listeners = await self._client.get_beolink_listeners() + beolink_listeners_attribute = {} + + # Check if the device is a leader. + if len(beolink_listeners) > 0: + # Add self + group_members.append(self.entity_id) + + # Get the entity_ids of the listeners if available in Home Assistant + group_members.extend( + [ + listener + if ( + listener := self._get_entity_id_from_jid( + beolink_listener.jid + ) + ) + is not None + else f"listener_not_in_hass-{beolink_listener.jid}" + for beolink_listener in beolink_listeners + ] + ) + # Update Beolink attributes + for beolink_listener in beolink_listeners: + for peer in peers: + if peer.jid == beolink_listener.jid: + # Get the friendly names for the listeners from the peers + beolink_listeners_attribute[peer.friendly_name] = ( + beolink_listener.jid + ) + break + self._beolink_attributes["beolink"]["listeners"] = ( + beolink_listeners_attribute + ) + + self._attr_group_members = group_members + + self.async_write_ha_state() + + def _get_entity_id_from_jid(self, jid: str) -> str | None: + """Get entity_id from Beolink JID (if available).""" + + unique_id = get_serial_number_from_jid(jid) + + entity_registry = er.async_get(self.hass) + return entity_registry.async_get_entity_id( + Platform.MEDIA_PLAYER, DOMAIN, unique_id + ) + + def _get_beolink_jid(self, entity_id: str) -> str: + """Get beolink JID from entity_id.""" + + entity_registry = er.async_get(self.hass) + + # Check for valid bang_olufsen media_player entity + entity_entry = entity_registry.async_get(entity_id) + + if ( + entity_entry is None + or entity_entry.domain != Platform.MEDIA_PLAYER + or entity_entry.platform != DOMAIN + or entity_entry.config_entry_id is None + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_grouping_entity", + translation_placeholders={"entity_id": entity_id}, + ) + + config_entry = self.hass.config_entries.async_get_entry( + entity_entry.config_entry_id + ) + if TYPE_CHECKING: + assert config_entry + + # Return JID + return cast(str, config_entry.data[CONF_BEOLINK_JID]) + + async def _async_update_sound_modes( + self, active_sound_mode: ListeningModeProps | ListeningModeRef | None = None + ) -> None: + """Update the available sound modes.""" + sound_modes = await self._client.get_listening_mode_set() + + if active_sound_mode is None: + active_sound_mode = await self._client.get_active_listening_mode() + + # Add the key to make the labels unique (As labels are not required to be unique on B&O devices) + for sound_mode in sound_modes: + label = f"{sound_mode.name} ({sound_mode.id})" + + self._sound_modes[label] = sound_mode.id + + if sound_mode.id == active_sound_mode.id: + self._attr_sound_mode = label + + # Set available options + self._attr_sound_mode_list = list(self._sound_modes) + + self.async_write_ha_state() + + @property + def supported_features(self) -> MediaPlayerEntityFeature: + """Flag media player features that are supported.""" + features = BANG_OLUFSEN_FEATURES + + # Add seeking if supported by the current source + if self._source_change.is_seekable is True: + features |= MediaPlayerEntityFeature.SEEK + + return features + @property def state(self) -> MediaPlayerState: """Return the current state of the media player.""" @@ -394,38 +694,19 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): @property def source(self) -> str | None: """Return the current audio source.""" - - # Try to fix some of the source_change chromecast weirdness. - if hasattr(self._playback_metadata, "title"): - # source_change is chromecast but line in is selected. - if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name: - return BangOlufsenSource.LINE_IN.name - - # source_change is chromecast but bluetooth is selected. - if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name: - return BangOlufsenSource.BLUETOOTH.name - - # source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket, - # And the source has not changed. - if self._source_change.id in ( - BangOlufsenSource.BLUETOOTH.id, - BangOlufsenSource.LINE_IN.id, - BangOlufsenSource.SPDIF.id, - ): - return BangOlufsenSource.CHROMECAST.name - - # source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork - # So i assume that it is bluetooth and not chromecast - if ( - hasattr(self._playback_metadata, "art") - and self._playback_metadata.art is not None - and len(self._playback_metadata.art) == 0 - and self._source_change.id == BangOlufsenSource.CHROMECAST.id - ): - return BangOlufsenSource.BLUETOOTH.name - return self._source_change.name + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return information that is not returned anywhere else.""" + attributes: dict[str, Any] = {} + + # Add Beolink attributes + if self._beolink_attributes: + attributes.update(self._beolink_attributes) + + return attributes + async def async_turn_off(self) -> None: """Set the device to "networkStandby".""" await self._client.post_standby() @@ -465,17 +746,12 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): async def async_media_seek(self, position: float) -> None: """Seek to position in ms.""" - if self._source_change.id == BangOlufsenSource.DEEZER.id: - await self._client.seek_to_position(position_ms=int(position * 1000)) - # Try to prevent the playback progress from bouncing in the UI. - self._attr_media_position_updated_at = utcnow() - self._playback_progress = PlaybackProgress(progress=int(position)) + await self._client.seek_to_position(position_ms=int(position * 1000)) + # Try to prevent the playback progress from bouncing in the UI. + self._attr_media_position_updated_at = utcnow() + self._playback_progress = PlaybackProgress(progress=int(position)) - self.async_write_ha_state() - else: - raise HomeAssistantError( - translation_domain=DOMAIN, translation_key="non_deezer_seeking" - ) + self.async_write_ha_state() async def async_media_previous_track(self) -> None: """Send the previous track command.""" @@ -485,6 +761,20 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): """Clear the current playback queue.""" await self._client.post_clear_queue() + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set playback queues to repeat.""" + await self._client.set_settings_queue( + play_queue_settings=PlayQueueSettings( + repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] + ) + ) + + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set playback queues to shuffle.""" + await self._client.set_settings_queue( + play_queue_settings=PlayQueueSettings(shuffle=shuffle), + ) + async def async_select_source(self, source: str) -> None: """Select an input source.""" if source not in self._sources.values(): @@ -507,6 +797,21 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Video await self._client.post_remote_trigger(id=key) + async def async_select_sound_mode(self, sound_mode: str) -> None: + """Select a sound mode.""" + # Ensure only known sound modes known by the integration can be activated. + if sound_mode not in self._sound_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_sound_mode", + translation_placeholders={ + "invalid_sound_mode": sound_mode, + "valid_sound_modes": ", ".join(list(self._sound_modes)), + }, + ) + + await self._client.activate_listening_mode(id=self._sound_modes[sound_mode]) + async def async_play_media( self, media_type: MediaType | str, @@ -664,3 +969,92 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): media_content_id, content_filter=lambda item: item.media_content_type.startswith("audio/"), ) + + async def async_join_players(self, group_members: list[str]) -> None: + """Create a Beolink session with defined group members.""" + + # Use the touch to join if no entities have been defined + # Touch to join will make the device connect to any other currently-playing + # Beolink compatible B&O device. + # Repeated presses / calls will cycle between compatible playing devices. + if len(group_members) == 0: + await self.async_beolink_join() + return + + # Get JID for each group member + jids = [self._get_beolink_jid(group_member) for group_member in group_members] + await self.async_beolink_expand(jids) + + async def async_unjoin_player(self) -> None: + """Unjoin Beolink session. End session if leader.""" + await self.async_beolink_leave() + + # Custom actions: + async def async_beolink_join( + self, beolink_jid: str | None = None, source_id: str | None = None + ) -> None: + """Join a Beolink multi-room experience.""" + # Touch to join + if beolink_jid is None: + await self._client.join_latest_beolink_experience() + # Join a peer + elif beolink_jid and source_id is None: + await self._client.join_beolink_peer(jid=beolink_jid) + # Join a peer and select specific source + elif beolink_jid and source_id: + # Beolink Converter NL/ML sources need to be in upper case + if source_id in BEOLINK_JOIN_SOURCES_TO_UPPER: + source_id = source_id.upper() + + await self._client.join_beolink_peer(jid=beolink_jid, source=source_id) + + async def async_beolink_expand( + self, beolink_jids: list[str] | None = None, all_discovered: bool = False + ) -> None: + """Expand a Beolink multi-room experience with a device or devices.""" + + # Ensure that the current source is expandable + if not self._beolink_sources[cast(str, self._source_change.id)]: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_source", + translation_placeholders={ + "invalid_source": cast(str, self._source_change.id), + "valid_sources": ", ".join(list(self._beolink_sources)), + }, + ) + + # Expand to all discovered devices + if all_discovered: + peers = await self._client.get_beolink_peers() + + for peer in peers: + try: + await self._client.post_beolink_expand(jid=peer.jid) + except NotFoundException: + _LOGGER.warning("Unable to expand to %s", peer.jid) + + # Try to expand to all defined devices + elif beolink_jids: + for beolink_jid in beolink_jids: + try: + await self._client.post_beolink_expand(jid=beolink_jid) + except NotFoundException: + _LOGGER.warning( + "Unable to expand to %s. Is the device available on the network?", + beolink_jid, + ) + + async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None: + """Unexpand a Beolink multi-room experience with a device or devices.""" + # Unexpand all defined devices + for beolink_jid in beolink_jids: + await self._client.post_beolink_unexpand(jid=beolink_jid) + + async def async_beolink_leave(self) -> None: + """Leave the current Beolink experience.""" + await self._client.post_beolink_leave() + + async def async_beolink_allstandby(self) -> None: + """Set all connected Beolink devices to standby.""" + await self._client.post_beolink_allstandby() diff --git a/homeassistant/components/bang_olufsen/services.yaml b/homeassistant/components/bang_olufsen/services.yaml new file mode 100644 index 00000000000..7c3a2d659bd --- /dev/null +++ b/homeassistant/components/bang_olufsen/services.yaml @@ -0,0 +1,96 @@ +beolink_allstandby: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + +beolink_expand: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + all_discovered: + required: false + example: false + selector: + boolean: + jid_options: + collapsed: false + fields: + beolink_jids: + required: false + example: >- + [ + 1111.2222222.33333333@products.bang-olufsen.com, + 4444.5555555.66666666@products.bang-olufsen.com + ] + selector: + object: + +beolink_join: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + jid_options: + collapsed: false + fields: + beolink_jid: + required: false + example: 1111.2222222.33333333@products.bang-olufsen.com + selector: + text: + source_id: + required: false + example: tidal + selector: + select: + translation_key: "source_ids" + options: + - beoradio + - deezer + - spotify + - tidal + - radio + - tp1 + - tp2 + - cd + - aux_a + - ph + +beolink_leave: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + +beolink_unexpand: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + jid_options: + collapsed: false + fields: + beolink_jids: + required: true + example: >- + [ + 1111.2222222.33333333@products.bang-olufsen.com, + 4444.5555555.66666666@products.bang-olufsen.com + ] + selector: + object: diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index cf5b212d424..b4aac78756c 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -1,4 +1,8 @@ { + "common": { + "jid_options_name": "JID options", + "jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity." + }, "config": { "error": { "api_exception": "[%key:common::config_flow::error::cannot_connect%]", @@ -7,7 +11,7 @@ "invalid_ip": "Invalid IPv4 address" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::single_instance_allowed%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" }, "flow_title": "{name}", @@ -25,13 +29,92 @@ } } }, + "selector": { + "source_ids": { + "options": { + "beoradio": "ASE Beoradio", + "deezer": "ASE / Mozart Deezer", + "spotify": "ASE / Mozart Spotify", + "tidal": "Mozart Tidal", + "aux_a": "Beolink Converter NL/ML AUX_A", + "cd": "Beolink Converter NL/ML CD", + "ph": "Beolink Converter NL/ML PH", + "radio": "Beolink Converter NL/ML RADIO", + "tp1": "Beolink Converter NL/ML TP1", + "tp2": "Beolink Converter NL/ML TP2" + } + } + }, + "services": { + "beolink_allstandby": { + "name": "Beolink all standby", + "description": "Set all Connected Beolink devices to standby." + }, + "beolink_expand": { + "name": "Beolink expand", + "description": "Expand current Beolink experience.", + "fields": { + "all_discovered": { + "name": "All discovered", + "description": "Expand Beolink experience to all discovered devices." + }, + "beolink_jids": { + "name": "Beolink JIDs", + "description": "Specify which Beolink JIDs will join current Beolink experience." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + }, + "beolink_join": { + "name": "Beolink join", + "description": "Join a Beolink experience.", + "fields": { + "beolink_jid": { + "name": "Beolink JID", + "description": "Manually specify Beolink JID to join." + }, + "source_id": { + "name": "Source", + "description": "Specify which source to join, behavior varies between hardware platforms. Source names prefaced by a platform name can only be used when connecting to that platform. For example \"ASE Beoradio\" can only be used when joining an ASE device, while ”ASE / Mozart Deezer” can be used with ASE or Mozart devices. A defined Beolink JID is required." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + }, + "beolink_leave": { + "name": "Beolink leave", + "description": "Leave a Beolink experience." + }, + "beolink_unexpand": { + "name": "Beolink unexpand", + "description": "Unexpand from current Beolink experience.", + "fields": { + "beolink_jids": { + "name": "Beolink JIDs", + "description": "Specify which Beolink JIDs will leave from current Beolink experience." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + } + }, "exceptions": { "m3u_invalid_format": { "message": "Media sources with the .m3u extension are not supported." }, - "non_deezer_seeking": { - "message": "Seeking is currently only supported when using Deezer" - }, "invalid_source": { "message": "Invalid source: {invalid_source}. Valid sources are: {valid_sources}" }, @@ -40,6 +123,12 @@ }, "play_media_error": { "message": "An error occurred while attempting to play {media_type}: {error_message}." + }, + "invalid_grouping_entity": { + "message": "Entity with id: {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?" + }, + "invalid_sound_mode": { + "message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}." } } } diff --git a/homeassistant/components/bang_olufsen/util.py b/homeassistant/components/bang_olufsen/util.py index c54b3059ee4..e375b58e8ac 100644 --- a/homeassistant/components/bang_olufsen/util.py +++ b/homeassistant/components/bang_olufsen/util.py @@ -16,3 +16,8 @@ def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry: assert device return device + + +def get_serial_number_from_jid(jid: str) -> str: + """Get serial number from Beolink JID.""" + return jid.split(".")[2].split("@")[0] diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index 0c0a5096d91..bc817226b61 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -5,6 +5,7 @@ from __future__ import annotations import logging from mozart_api.models import ( + ListeningModeProps, PlaybackContentMetadata, PlaybackError, PlaybackProgress, @@ -14,7 +15,7 @@ from mozart_api.models import ( VolumeState, WebsocketNotificationTag, ) -from mozart_api.mozart_client import MozartClient +from mozart_api.mozart_client import BaseWebSocketResponse, MozartClient from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -50,6 +51,9 @@ class BangOlufsenWebsocket(BangOlufsenBase): self._client.get_notification_notifications(self.on_notification_notification) self._client.get_on_connection_lost(self.on_connection_lost) self._client.get_on_connection(self.on_connection) + self._client.get_active_listening_mode_notifications( + self.on_active_listening_mode + ) self._client.get_playback_error_notifications( self.on_playback_error_notification ) @@ -59,6 +63,9 @@ class BangOlufsenWebsocket(BangOlufsenBase): self._client.get_playback_progress_notifications( self.on_playback_progress_notification ) + self._client.get_playback_source_notifications( + self.on_playback_source_notification + ) self._client.get_playback_state_notifications( self.on_playback_state_notification ) @@ -89,6 +96,14 @@ class BangOlufsenWebsocket(BangOlufsenBase): _LOGGER.error("Lost connection to the %s", self.entry.title) self._update_connection_status() + def on_active_listening_mode(self, notification: ListeningModeProps) -> None: + """Send active_listening_mode dispatch.""" + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}", + notification, + ) + def on_notification_notification( self, notification: WebsocketNotificationTag ) -> None: @@ -96,7 +111,21 @@ class BangOlufsenWebsocket(BangOlufsenBase): # Try to match the notification type with available WebsocketNotification members notification_type = try_parse_enum(WebsocketNotification, notification.value) - if notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: + if notification_type in ( + WebsocketNotification.BEOLINK_PEERS, + WebsocketNotification.BEOLINK_LISTENERS, + WebsocketNotification.BEOLINK_AVAILABLE_LISTENERS, + ): + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.BEOLINK}", + ) + elif notification_type is WebsocketNotification.CONFIGURATION: + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}", + ) + elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: async_dispatcher_send( self.hass, f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", @@ -136,6 +165,14 @@ class BangOlufsenWebsocket(BangOlufsenBase): notification, ) + def on_playback_source_notification(self, notification: Source) -> None: + """Send playback_source dispatch.""" + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}", + notification, + ) + def on_source_change_notification(self, notification: Source) -> None: """Send source_change dispatch.""" async_dispatcher_send( @@ -165,12 +202,13 @@ class BangOlufsenWebsocket(BangOlufsenBase): sw_version=software_status.software_version, ) - def on_all_notifications_raw(self, notification: dict) -> None: + def on_all_notifications_raw(self, notification: BaseWebSocketResponse) -> None: """Receive all notifications.""" + debug_notification = { + "device_id": self._device.id, + "serial_number": int(self._unique_id), + **notification, + } - # Add the device_id and serial_number to the notification - notification["device_id"] = self._device.id - notification["serial_number"] = int(self._unique_id) - - _LOGGER.debug("%s", notification) - self.hass.bus.async_fire(BANG_OLUFSEN_WEBSOCKET_EVENT, notification) + _LOGGER.debug("%s", debug_notification) + self.hass.bus.async_fire(BANG_OLUFSEN_WEBSOCKET_EVENT, debug_notification) diff --git a/homeassistant/components/bayesian/binary_sensor.py b/homeassistant/components/bayesian/binary_sensor.py index 192d7987311..6d203c344f2 100644 --- a/homeassistant/components/bayesian/binary_sensor.py +++ b/homeassistant/components/bayesian/binary_sensor.py @@ -5,7 +5,8 @@ from __future__ import annotations from collections import OrderedDict from collections.abc import Callable import logging -from typing import Any +import math +from typing import TYPE_CHECKING, Any, NamedTuple from uuid import UUID import voluptuous as vol @@ -50,6 +51,7 @@ from .const import ( ATTR_OCCURRED_OBSERVATION_ENTITIES, ATTR_PROBABILITY, ATTR_PROBABILITY_THRESHOLD, + CONF_NUMERIC_STATE, CONF_OBSERVATIONS, CONF_P_GIVEN_F, CONF_P_GIVEN_T, @@ -66,18 +68,74 @@ from .issues import raise_mirrored_entries, raise_no_prob_given_false _LOGGER = logging.getLogger(__name__) -NUMERIC_STATE_SCHEMA = vol.Schema( - { - CONF_PLATFORM: "numeric_state", - vol.Required(CONF_ENTITY_ID): cv.entity_id, - vol.Optional(CONF_ABOVE): vol.Coerce(float), - vol.Optional(CONF_BELOW): vol.Coerce(float), - vol.Required(CONF_P_GIVEN_T): vol.Coerce(float), - vol.Optional(CONF_P_GIVEN_F): vol.Coerce(float), - }, - required=True, +def _above_greater_than_below(config: dict[str, Any]) -> dict[str, Any]: + if config[CONF_PLATFORM] == CONF_NUMERIC_STATE: + above = config.get(CONF_ABOVE) + below = config.get(CONF_BELOW) + if above is None and below is None: + _LOGGER.error( + "For bayesian numeric state for entity: %s at least one of 'above' or 'below' must be specified", + config[CONF_ENTITY_ID], + ) + raise vol.Invalid( + "For bayesian numeric state at least one of 'above' or 'below' must be specified." + ) + if above is not None and below is not None: + if above > below: + _LOGGER.error( + "For bayesian numeric state 'above' (%s) must be less than 'below' (%s)", + above, + below, + ) + raise vol.Invalid("'above' is greater than 'below'") + return config + + +NUMERIC_STATE_SCHEMA = vol.All( + vol.Schema( + { + CONF_PLATFORM: CONF_NUMERIC_STATE, + vol.Required(CONF_ENTITY_ID): cv.entity_id, + vol.Optional(CONF_ABOVE): vol.Coerce(float), + vol.Optional(CONF_BELOW): vol.Coerce(float), + vol.Required(CONF_P_GIVEN_T): vol.Coerce(float), + vol.Optional(CONF_P_GIVEN_F): vol.Coerce(float), + }, + required=True, + ), + _above_greater_than_below, ) + +def _no_overlapping(configs: list[dict]) -> list[dict]: + numeric_configs = [ + config for config in configs if config[CONF_PLATFORM] == CONF_NUMERIC_STATE + ] + if len(numeric_configs) < 2: + return configs + + class NumericConfig(NamedTuple): + above: float + below: float + + d: dict[str, list[NumericConfig]] = {} + for _, config in enumerate(numeric_configs): + above = config.get(CONF_ABOVE, -math.inf) + below = config.get(CONF_BELOW, math.inf) + entity_id: str = str(config[CONF_ENTITY_ID]) + d.setdefault(entity_id, []).append(NumericConfig(above, below)) + + for ent_id, intervals in d.items(): + intervals = sorted(intervals, key=lambda tup: tup.above) + + for i, tup in enumerate(intervals): + if len(intervals) > i + 1 and tup.below > intervals[i + 1].above: + raise vol.Invalid( + f"Ranges for bayesian numeric state entities must not overlap, but {ent_id} has overlapping ranges, above:{tup.above}, below:{tup.below} overlaps with above:{intervals[i+1].above}, below:{intervals[i+1].below}." + ) + return configs + + STATE_SCHEMA = vol.Schema( { CONF_PLATFORM: CONF_STATE, @@ -107,7 +165,8 @@ PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( vol.Required(CONF_OBSERVATIONS): vol.Schema( vol.All( cv.ensure_list, - [vol.Any(NUMERIC_STATE_SCHEMA, STATE_SCHEMA, TEMPLATE_SCHEMA)], + [vol.Any(TEMPLATE_SCHEMA, STATE_SCHEMA, NUMERIC_STATE_SCHEMA)], + _no_overlapping, ) ), vol.Required(CONF_PRIOR): vol.Coerce(float), @@ -211,10 +270,11 @@ class BayesianBinarySensor(BinarySensorEntity): self.observations_by_entity = self._build_observations_by_entity() self.observations_by_template = self._build_observations_by_template() - self.observation_handlers: dict[str, Callable[[Observation], bool | None]] = { + self.observation_handlers: dict[ + str, Callable[[Observation, bool], bool | None] + ] = { "numeric_state": self._process_numeric_state, "state": self._process_state, - "multi_state": self._process_multi_state, } async def async_added_to_hass(self) -> None: @@ -342,8 +402,9 @@ class BayesianBinarySensor(BinarySensorEntity): for observation in self.observations_by_entity[entity]: platform = observation.platform - observation.observed = self.observation_handlers[platform](observation) - + observation.observed = self.observation_handlers[platform]( + observation, observation.multi + ) local_observations[observation.id] = observation return local_observations @@ -408,9 +469,7 @@ class BayesianBinarySensor(BinarySensorEntity): if len(entity_observations) == 1: continue for observation in entity_observations: - if observation.platform != "state": - continue - observation.platform = "multi_state" + observation.multi = True return observations_by_entity @@ -437,14 +496,23 @@ class BayesianBinarySensor(BinarySensorEntity): return observations_by_template - def _process_numeric_state(self, entity_observation: Observation) -> bool | None: + def _process_numeric_state( + self, entity_observation: Observation, multi: bool = False + ) -> bool | None: """Return True if numeric condition is met, return False if not, return None otherwise.""" - entity = entity_observation.entity_id + entity_id = entity_observation.entity_id + # if we are dealing with numeric_state observations entity_id cannot be None + if TYPE_CHECKING: + assert entity_id is not None + + entity = self.hass.states.get(entity_id) + if entity is None: + return None try: if condition.state(self.hass, entity, [STATE_UNKNOWN, STATE_UNAVAILABLE]): return None - return condition.async_numeric_state( + result = condition.async_numeric_state( self.hass, entity, entity_observation.below, @@ -452,10 +520,24 @@ class BayesianBinarySensor(BinarySensorEntity): None, entity_observation.to_dict(), ) + if result: + return True + if multi: + state = float(entity.state) + if ( + entity_observation.below is not None + and state == entity_observation.below + ): + return True + return None except ConditionError: return None + else: + return False - def _process_state(self, entity_observation: Observation) -> bool | None: + def _process_state( + self, entity_observation: Observation, multi: bool = False + ) -> bool | None: """Return True if state conditions are met, return False if they are not. Returns None if the state is unavailable. @@ -467,24 +549,13 @@ class BayesianBinarySensor(BinarySensorEntity): if condition.state(self.hass, entity, [STATE_UNKNOWN, STATE_UNAVAILABLE]): return None - return condition.state(self.hass, entity, entity_observation.to_state) + result = condition.state(self.hass, entity, entity_observation.to_state) + if multi and not result: + return None except ConditionError: return None - - def _process_multi_state(self, entity_observation: Observation) -> bool | None: - """Return True if state conditions are met, otherwise return None. - - Never return False as all other states should have their own probabilities configured. - """ - - entity = entity_observation.entity_id - - try: - if condition.state(self.hass, entity, entity_observation.to_state): - return True - except ConditionError: - return None - return None + else: + return result @property def extra_state_attributes(self) -> dict[str, Any]: diff --git a/homeassistant/components/bayesian/const.py b/homeassistant/components/bayesian/const.py index 5d3f978cedc..cac4237b4ec 100644 --- a/homeassistant/components/bayesian/const.py +++ b/homeassistant/components/bayesian/const.py @@ -8,6 +8,7 @@ ATTR_PROBABILITY_THRESHOLD = "probability_threshold" CONF_OBSERVATIONS = "observations" CONF_PRIOR = "prior" CONF_TEMPLATE = "template" +CONF_NUMERIC_STATE = "numeric_state" CONF_PROBABILITY_THRESHOLD = "probability_threshold" CONF_P_GIVEN_F = "prob_given_false" CONF_P_GIVEN_T = "prob_given_true" diff --git a/homeassistant/components/bayesian/helpers.py b/homeassistant/components/bayesian/helpers.py index cc8966a90b6..2af3a331775 100644 --- a/homeassistant/components/bayesian/helpers.py +++ b/homeassistant/components/bayesian/helpers.py @@ -33,6 +33,7 @@ class Observation: below: float | None value_template: Template | None observed: bool | None = None + multi: bool = False id: uuid.UUID = field(default_factory=uuid.uuid4) def to_dict(self) -> dict[str, str | float | bool | None]: diff --git a/homeassistant/components/bayesian/icons.json b/homeassistant/components/bayesian/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/bayesian/icons.json +++ b/homeassistant/components/bayesian/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/bbox/device_tracker.py b/homeassistant/components/bbox/device_tracker.py index 7157c47830c..12174d395f7 100644 --- a/homeassistant/components/bbox/device_tracker.py +++ b/homeassistant/components/bbox/device_tracker.py @@ -10,7 +10,7 @@ import pybbox import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -34,7 +34,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> BboxDeviceScanner | None: """Validate the configuration and return a Bbox scanner.""" - scanner = BboxDeviceScanner(config[DOMAIN]) + scanner = BboxDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -54,7 +54,6 @@ class BboxDeviceScanner(DeviceScanner): self.last_results: list[Device] = [] self.success_init = self._update_info() - _LOGGER.info("Scanner initialized") def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" @@ -78,7 +77,7 @@ class BboxDeviceScanner(DeviceScanner): Returns boolean if scanning successful. """ - _LOGGER.info("Scanning") + _LOGGER.debug("Scanning") box = pybbox.Bbox(ip=self.host) result = box.get_all_connected_devices() @@ -96,5 +95,5 @@ class BboxDeviceScanner(DeviceScanner): self.last_results = last_results - _LOGGER.info("Scan successful") + _LOGGER.debug("Scan successful") return True diff --git a/homeassistant/components/bbox/manifest.json b/homeassistant/components/bbox/manifest.json index 9035bea74bc..67e54ae2359 100644 --- a/homeassistant/components/bbox/manifest.json +++ b/homeassistant/components/bbox/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bbox", "iot_class": "local_polling", "loggers": ["pybbox"], + "quality_scale": "legacy", "requirements": ["pybbox==0.0.5-alpha"] } diff --git a/homeassistant/components/beewi_smartclim/manifest.json b/homeassistant/components/beewi_smartclim/manifest.json index 3555f9181bb..baf41be4345 100644 --- a/homeassistant/components/beewi_smartclim/manifest.json +++ b/homeassistant/components/beewi_smartclim/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/beewi_smartclim", "iot_class": "local_polling", "loggers": ["beewi_smartclim"], + "quality_scale": "legacy", "requirements": ["beewi-smartclim==0.0.10"] } diff --git a/homeassistant/components/binary_sensor/__init__.py b/homeassistant/components/binary_sensor/__init__.py index 0b3e423e339..f31c3d102b0 100644 --- a/homeassistant/components/binary_sensor/__init__.py +++ b/homeassistant/components/binary_sensor/__init__.py @@ -4,10 +4,10 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import cached_property, partial import logging from typing import Literal, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -15,19 +15,15 @@ from homeassistant.const import STATE_OFF, STATE_ON, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey _LOGGER = logging.getLogger(__name__) DOMAIN = "binary_sensor" +DATA_COMPONENT: HassKey[EntityComponent[BinarySensorEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -123,101 +119,14 @@ class BinarySensorDeviceClass(StrEnum): DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(BinarySensorDeviceClass)) - -# DEVICE_CLASS* below are deprecated as of 2021.12 -# use the BinarySensorDeviceClass enum instead. DEVICE_CLASSES = [cls.value for cls in BinarySensorDeviceClass] -_DEPRECATED_DEVICE_CLASS_BATTERY = DeprecatedConstantEnum( - BinarySensorDeviceClass.BATTERY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_BATTERY_CHARGING = DeprecatedConstantEnum( - BinarySensorDeviceClass.BATTERY_CHARGING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_CO = DeprecatedConstantEnum( - BinarySensorDeviceClass.CO, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_COLD = DeprecatedConstantEnum( - BinarySensorDeviceClass.COLD, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_CONNECTIVITY = DeprecatedConstantEnum( - BinarySensorDeviceClass.CONNECTIVITY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DOOR = DeprecatedConstantEnum( - BinarySensorDeviceClass.DOOR, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_GARAGE_DOOR = DeprecatedConstantEnum( - BinarySensorDeviceClass.GARAGE_DOOR, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_GAS = DeprecatedConstantEnum( - BinarySensorDeviceClass.GAS, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_HEAT = DeprecatedConstantEnum( - BinarySensorDeviceClass.HEAT, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_LIGHT = DeprecatedConstantEnum( - BinarySensorDeviceClass.LIGHT, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_LOCK = DeprecatedConstantEnum( - BinarySensorDeviceClass.LOCK, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_MOISTURE = DeprecatedConstantEnum( - BinarySensorDeviceClass.MOISTURE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_MOTION = DeprecatedConstantEnum( - BinarySensorDeviceClass.MOTION, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_MOVING = DeprecatedConstantEnum( - BinarySensorDeviceClass.MOVING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_OCCUPANCY = DeprecatedConstantEnum( - BinarySensorDeviceClass.OCCUPANCY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_OPENING = DeprecatedConstantEnum( - BinarySensorDeviceClass.OPENING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PLUG = DeprecatedConstantEnum( - BinarySensorDeviceClass.PLUG, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_POWER = DeprecatedConstantEnum( - BinarySensorDeviceClass.POWER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PRESENCE = DeprecatedConstantEnum( - BinarySensorDeviceClass.PRESENCE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PROBLEM = DeprecatedConstantEnum( - BinarySensorDeviceClass.PROBLEM, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_RUNNING = DeprecatedConstantEnum( - BinarySensorDeviceClass.RUNNING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SAFETY = DeprecatedConstantEnum( - BinarySensorDeviceClass.SAFETY, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SMOKE = DeprecatedConstantEnum( - BinarySensorDeviceClass.SMOKE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SOUND = DeprecatedConstantEnum( - BinarySensorDeviceClass.SOUND, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_TAMPER = DeprecatedConstantEnum( - BinarySensorDeviceClass.TAMPER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_UPDATE = DeprecatedConstantEnum( - BinarySensorDeviceClass.UPDATE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_VIBRATION = DeprecatedConstantEnum( - BinarySensorDeviceClass.VIBRATION, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_WINDOW = DeprecatedConstantEnum( - BinarySensorDeviceClass.WINDOW, "2025.1" -) # mypy: disallow-any-generics async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for binary sensors.""" - component = hass.data[DOMAIN] = EntityComponent[BinarySensorEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[BinarySensorEntity]( logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL ) @@ -227,14 +136,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[BinarySensorEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[BinarySensorEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class BinarySensorEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -293,11 +200,3 @@ class BinarySensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) if (is_on := self.is_on) is None: return None return STATE_ON if is_on else STATE_OFF - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/bitcoin/manifest.json b/homeassistant/components/bitcoin/manifest.json index 6f5fd678009..b208e904cab 100644 --- a/homeassistant/components/bitcoin/manifest.json +++ b/homeassistant/components/bitcoin/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bitcoin", "iot_class": "cloud_polling", "loggers": ["blockchain"], + "quality_scale": "legacy", "requirements": ["blockchain==1.4.4"] } diff --git a/homeassistant/components/bizkaibus/manifest.json b/homeassistant/components/bizkaibus/manifest.json index b47df75bbe5..5a333546401 100644 --- a/homeassistant/components/bizkaibus/manifest.json +++ b/homeassistant/components/bizkaibus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bizkaibus", "iot_class": "cloud_polling", "loggers": ["bizkaibus"], + "quality_scale": "legacy", "requirements": ["bizkaibus==0.1.1"] } diff --git a/homeassistant/components/blackbird/icons.json b/homeassistant/components/blackbird/icons.json index f080fb5f857..815a45ba174 100644 --- a/homeassistant/components/blackbird/icons.json +++ b/homeassistant/components/blackbird/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_all_zones": "mdi:home-sound-in" + "set_all_zones": { + "service": "mdi:home-sound-in" + } } } diff --git a/homeassistant/components/blackbird/manifest.json b/homeassistant/components/blackbird/manifest.json index d75b69dfaf8..a0f4b0c383c 100644 --- a/homeassistant/components/blackbird/manifest.json +++ b/homeassistant/components/blackbird/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/blackbird", "iot_class": "local_polling", "loggers": ["pyblackbird"], + "quality_scale": "legacy", "requirements": ["pyblackbird==0.6"] } diff --git a/homeassistant/components/blackbird/media_player.py b/homeassistant/components/blackbird/media_player.py index 46cabaf4099..37672e98e0b 100644 --- a/homeassistant/components/blackbird/media_player.py +++ b/homeassistant/components/blackbird/media_player.py @@ -103,7 +103,7 @@ def setup_platform( devices = [] for zone_id, extra in config[CONF_ZONES].items(): - _LOGGER.info("Adding zone %d - %s", zone_id, extra[CONF_NAME]) + _LOGGER.debug("Adding zone %d - %s", zone_id, extra[CONF_NAME]) unique_id = f"{connection}-{zone_id}" device = BlackbirdZone(blackbird, sources, zone_id, extra[CONF_NAME]) hass.data[DATA_BLACKBIRD][unique_id] = device diff --git a/homeassistant/components/blebox/__init__.py b/homeassistant/components/blebox/__init__.py index 77b9618a5e3..983f5750036 100644 --- a/homeassistant/components/blebox/__init__.py +++ b/homeassistant/components/blebox/__init__.py @@ -4,7 +4,6 @@ import logging from blebox_uniapi.box import Box from blebox_uniapi.error import Error -from blebox_uniapi.feature import Feature from blebox_uniapi.session import ApiHost from homeassistant.config_entries import ConfigEntry @@ -17,12 +16,12 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity -from .const import DEFAULT_SETUP_TIMEOUT, DOMAIN, PRODUCT +from .const import DEFAULT_SETUP_TIMEOUT from .helpers import get_maybe_authenticated_session +type BleBoxConfigEntry = ConfigEntry[Box] + _LOGGER = logging.getLogger(__name__) PLATFORMS = [ @@ -38,7 +37,7 @@ PLATFORMS = [ PARALLEL_UPDATES = 0 -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BleBoxConfigEntry) -> bool: """Set up BleBox devices from a config entry.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] @@ -58,46 +57,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Identify failed at %s:%d (%s)", api_host.host, api_host.port, ex) raise ConfigEntryNotReady from ex - domain = hass.data.setdefault(DOMAIN, {}) - domain_entry = domain.setdefault(entry.entry_id, {}) - product = domain_entry.setdefault(PRODUCT, product) + entry.runtime_data = product await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BleBoxConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok - - -class BleBoxEntity[_FeatureT: Feature](Entity): - """Implements a common class for entities representing a BleBox feature.""" - - def __init__(self, feature: _FeatureT) -> None: - """Initialize a BleBox entity.""" - self._feature = feature - self._attr_name = feature.full_name - self._attr_unique_id = feature.unique_id - product = feature.product - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, product.unique_id)}, - manufacturer=product.brand, - model=product.model, - name=product.name, - sw_version=product.firmware_version, - configuration_url=f"http://{product.address}", - ) - - async def async_update(self) -> None: - """Update the entity state.""" - try: - await self._feature.async_update() - except Error as ex: - _LOGGER.error("Updating '%s' failed: %s", self.name, ex) + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/blebox/binary_sensor.py b/homeassistant/components/blebox/binary_sensor.py index 7eb6fd1e5a2..2aa86059ee2 100644 --- a/homeassistant/components/blebox/binary_sensor.py +++ b/homeassistant/components/blebox/binary_sensor.py @@ -1,18 +1,17 @@ """BleBox binary sensor entities.""" from blebox_uniapi.binary_sensor import BinarySensor as BinarySensorFeature -from blebox_uniapi.box import Box from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, PRODUCT, BleBoxEntity +from . import BleBoxConfigEntry +from .entity import BleBoxEntity BINARY_SENSOR_TYPES = ( BinarySensorEntityDescription( @@ -24,15 +23,13 @@ BINARY_SENSOR_TYPES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ BleBoxBinarySensorEntity(feature, description) - for feature in product.features.get("binary_sensors", []) + for feature in config_entry.runtime_data.features.get("binary_sensors", []) for description in BINARY_SENSOR_TYPES if description.key == feature.device_class ] diff --git a/homeassistant/components/blebox/button.py b/homeassistant/components/blebox/button.py index 940fe7f8f6f..90356c8ae14 100644 --- a/homeassistant/components/blebox/button.py +++ b/homeassistant/components/blebox/button.py @@ -2,28 +2,25 @@ from __future__ import annotations -from blebox_uniapi.box import Box import blebox_uniapi.button from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BleBoxEntity -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry +from .entity import BleBoxEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox button entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] - entities = [ - BleBoxButtonEntity(feature) for feature in product.features.get("buttons", []) + BleBoxButtonEntity(feature) + for feature in config_entry.runtime_data.features.get("buttons", []) ] async_add_entities(entities, True) diff --git a/homeassistant/components/blebox/climate.py b/homeassistant/components/blebox/climate.py index 24f036dcd49..2c528d50e3e 100644 --- a/homeassistant/components/blebox/climate.py +++ b/homeassistant/components/blebox/climate.py @@ -3,7 +3,6 @@ from datetime import timedelta from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.climate from homeassistant.components.climate import ( @@ -12,13 +11,12 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BleBoxEntity -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry +from .entity import BleBoxEntity SCAN_INTERVAL = timedelta(seconds=5) @@ -39,14 +37,13 @@ BLEBOX_TO_HVACACTION = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox climate entity.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] - entities = [ - BleBoxClimateEntity(feature) for feature in product.features.get("climates", []) + BleBoxClimateEntity(feature) + for feature in config_entry.runtime_data.features.get("climates", []) ] async_add_entities(entities, True) @@ -60,7 +57,6 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False @property def hvac_modes(self): diff --git a/homeassistant/components/blebox/const.py b/homeassistant/components/blebox/const.py index ff6a6b33af6..e9ea1922302 100644 --- a/homeassistant/components/blebox/const.py +++ b/homeassistant/components/blebox/const.py @@ -1,7 +1,6 @@ """Constants for the BleBox devices integration.""" DOMAIN = "blebox" -PRODUCT = "product" DEFAULT_SETUP_TIMEOUT = 10 diff --git a/homeassistant/components/blebox/cover.py b/homeassistant/components/blebox/cover.py index bb75c88ca2a..4f2a7eeef11 100644 --- a/homeassistant/components/blebox/cover.py +++ b/homeassistant/components/blebox/cover.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.cover from blebox_uniapi.cover import BleboxCoverState @@ -14,14 +13,13 @@ from homeassistant.components.cover import ( CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BleBoxEntity -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry +from .entity import BleBoxEntity BLEBOX_TO_COVER_DEVICE_CLASSES = { "gate": CoverDeviceClass.GATE, @@ -32,27 +30,27 @@ BLEBOX_TO_COVER_DEVICE_CLASSES = { BLEBOX_TO_HASS_COVER_STATES = { None: None, # all blebox covers - BleboxCoverState.MOVING_DOWN: STATE_CLOSING, - BleboxCoverState.MOVING_UP: STATE_OPENING, - BleboxCoverState.MANUALLY_STOPPED: STATE_OPEN, - BleboxCoverState.LOWER_LIMIT_REACHED: STATE_CLOSED, - BleboxCoverState.UPPER_LIMIT_REACHED: STATE_OPEN, + BleboxCoverState.MOVING_DOWN: CoverState.CLOSING, + BleboxCoverState.MOVING_UP: CoverState.OPENING, + BleboxCoverState.MANUALLY_STOPPED: CoverState.OPEN, + BleboxCoverState.LOWER_LIMIT_REACHED: CoverState.CLOSED, + BleboxCoverState.UPPER_LIMIT_REACHED: CoverState.OPEN, # extra states of gateController product - BleboxCoverState.OVERLOAD: STATE_OPEN, - BleboxCoverState.MOTOR_FAILURE: STATE_OPEN, - BleboxCoverState.SAFETY_STOP: STATE_OPEN, + BleboxCoverState.OVERLOAD: CoverState.OPEN, + BleboxCoverState.MOTOR_FAILURE: CoverState.OPEN, + BleboxCoverState.SAFETY_STOP: CoverState.OPEN, } async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ - BleBoxCoverEntity(feature) for feature in product.features.get("covers", []) + BleBoxCoverEntity(feature) + for feature in config_entry.runtime_data.features.get("covers", []) ] async_add_entities(entities, True) @@ -98,17 +96,17 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): @property def is_opening(self) -> bool | None: """Return whether cover is opening.""" - return self._is_state(STATE_OPENING) + return self._is_state(CoverState.OPENING) @property def is_closing(self) -> bool | None: """Return whether cover is closing.""" - return self._is_state(STATE_CLOSING) + return self._is_state(CoverState.CLOSING) @property def is_closed(self) -> bool | None: """Return whether cover is closed.""" - return self._is_state(STATE_CLOSED) + return self._is_state(CoverState.CLOSED) async def async_open_cover(self, **kwargs: Any) -> None: """Fully open the cover position.""" diff --git a/homeassistant/components/blebox/entity.py b/homeassistant/components/blebox/entity.py new file mode 100644 index 00000000000..14e87349a62 --- /dev/null +++ b/homeassistant/components/blebox/entity.py @@ -0,0 +1,39 @@ +"""Base entity for the BleBox devices integration.""" + +import logging + +from blebox_uniapi.error import Error +from blebox_uniapi.feature import Feature + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class BleBoxEntity[_FeatureT: Feature](Entity): + """Implements a common class for entities representing a BleBox feature.""" + + def __init__(self, feature: _FeatureT) -> None: + """Initialize a BleBox entity.""" + self._feature = feature + self._attr_name = feature.full_name + self._attr_unique_id = feature.unique_id + product = feature.product + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, product.unique_id)}, + manufacturer=product.brand, + model=product.model, + name=product.name, + sw_version=product.firmware_version, + configuration_url=f"http://{product.address}", + ) + + async def async_update(self) -> None: + """Update the entity state.""" + try: + await self._feature.async_update() + except Error as ex: + _LOGGER.error("Updating '%s' failed: %s", self.name, ex) diff --git a/homeassistant/components/blebox/light.py b/homeassistant/components/blebox/light.py index 1f994db7243..c3c9de8be51 100644 --- a/homeassistant/components/blebox/light.py +++ b/homeassistant/components/blebox/light.py @@ -6,13 +6,12 @@ from datetime import timedelta import logging from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.light from blebox_uniapi.light import BleboxColorMode from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -21,12 +20,12 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util -from . import BleBoxEntity -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry +from .entity import BleBoxEntity _LOGGER = logging.getLogger(__name__) @@ -35,13 +34,13 @@ SCAN_INTERVAL = timedelta(seconds=5) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ - BleBoxLightEntity(feature) for feature in product.features.get("lights", []) + BleBoxLightEntity(feature) + for feature in config_entry.runtime_data.features.get("lights", []) ] async_add_entities(entities, True) @@ -60,6 +59,9 @@ COLOR_MODE_MAP = { class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): """Representation of BleBox lights.""" + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 154 Mireds + def __init__(self, feature: blebox_uniapi.light.Light) -> None: """Initialize a BleBox light.""" super().__init__(feature) @@ -77,9 +79,9 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): return self._feature.brightness @property - def color_temp(self): - """Return color temperature.""" - return self._feature.color_temp + def color_temp_kelvin(self) -> int: + """Return the color temperature value in Kelvin.""" + return color_util.color_temperature_mired_to_kelvin(self._feature.color_temp) @property def color_mode(self): @@ -87,12 +89,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): Set values to _attr_ibutes if needed. """ - color_mode_tmp = COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF) - if color_mode_tmp == ColorMode.COLOR_TEMP: - self._attr_min_mireds = 1 - self._attr_max_mireds = 255 - - return color_mode_tmp + return COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF) @property def supported_color_modes(self): @@ -140,7 +137,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): rgbw = kwargs.get(ATTR_RGBW_COLOR) brightness = kwargs.get(ATTR_BRIGHTNESS) effect = kwargs.get(ATTR_EFFECT) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) rgbww = kwargs.get(ATTR_RGBWW_COLOR) feature = self._feature value = feature.sensible_on_value @@ -148,9 +145,10 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): if rgbw is not None: value = list(rgbw) - if color_temp is not None: + if color_temp_kelvin is not None: value = feature.return_color_temp_with_brightness( - int(color_temp), self.brightness + int(color_util.color_temperature_kelvin_to_mired(color_temp_kelvin)), + self.brightness, ) if rgbww is not None: @@ -162,9 +160,12 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): value = list(rgb) if brightness is not None: - if self.color_mode == ATTR_COLOR_TEMP: + if self.color_mode == ColorMode.COLOR_TEMP: value = feature.return_color_temp_with_brightness( - self.color_temp, brightness + color_util.color_temperature_kelvin_to_mired( + self.color_temp_kelvin + ), + brightness, ) else: value = feature.apply_brightness(value, brightness) diff --git a/homeassistant/components/blebox/manifest.json b/homeassistant/components/blebox/manifest.json index a2c6495cc56..83ec27f6eef 100644 --- a/homeassistant/components/blebox/manifest.json +++ b/homeassistant/components/blebox/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/blebox", "iot_class": "local_polling", "loggers": ["blebox_uniapi"], - "requirements": ["blebox-uniapi==2.4.2"], + "requirements": ["blebox-uniapi==2.5.0"], "zeroconf": ["_bbxsrv._tcp.local."] } diff --git a/homeassistant/components/blebox/sensor.py b/homeassistant/components/blebox/sensor.py index fa11f6d6680..c0abff31257 100644 --- a/homeassistant/components/blebox/sensor.py +++ b/homeassistant/components/blebox/sensor.py @@ -1,6 +1,5 @@ """BleBox sensor entities.""" -from blebox_uniapi.box import Box import blebox_uniapi.sensor from homeassistant.components.sensor import ( @@ -9,7 +8,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, LIGHT_LUX, @@ -27,8 +25,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BleBoxEntity -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry +from .entity import BleBoxEntity SENSOR_TYPES = ( SensorEntityDescription( @@ -117,14 +115,13 @@ SENSOR_TYPES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ BleBoxSensorEntity(feature, description) - for feature in product.features.get("sensors", []) + for feature in config_entry.runtime_data.features.get("sensors", []) for description in SENSOR_TYPES if description.key == feature.device_class ] diff --git a/homeassistant/components/blebox/strings.json b/homeassistant/components/blebox/strings.json index b179f0d097b..18c689e093d 100644 --- a/homeassistant/components/blebox/strings.json +++ b/homeassistant/components/blebox/strings.json @@ -15,7 +15,9 @@ "description": "Set up your BleBox to integrate with Home Assistant.", "data": { "host": "[%key:common::config_flow::data::ip%]", - "port": "[%key:common::config_flow::data::port%]" + "password": "[%key:common::config_flow::data::password%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]" }, "title": "Set up your BleBox device" } diff --git a/homeassistant/components/blebox/switch.py b/homeassistant/components/blebox/switch.py index a68b9f01cf2..c6f439e27c5 100644 --- a/homeassistant/components/blebox/switch.py +++ b/homeassistant/components/blebox/switch.py @@ -3,29 +3,27 @@ from datetime import timedelta from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.switch from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BleBoxEntity -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry +from .entity import BleBoxEntity SCAN_INTERVAL = timedelta(seconds=5) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox switch entity.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ - BleBoxSwitchEntity(feature) for feature in product.features.get("switches", []) + BleBoxSwitchEntity(feature) + for feature in config_entry.runtime_data.features.get("switches", []) ] async_add_entities(entities, True) diff --git a/homeassistant/components/blink/__init__.py b/homeassistant/components/blink/__init__.py index d21994ecc8f..f6516434cd2 100644 --- a/homeassistant/components/blink/__init__.py +++ b/homeassistant/components/blink/__init__.py @@ -2,6 +2,7 @@ from copy import deepcopy import logging +from typing import Any from aiohttp import ClientError from blinkpy.auth import Auth @@ -9,7 +10,6 @@ from blinkpy.blinkpy import Blink import voluptuous as vol from homeassistant.components import persistent_notification -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.const import ( CONF_FILE_PATH, CONF_FILENAME, @@ -24,7 +24,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -40,13 +40,11 @@ SERVICE_SAVE_RECENT_CLIPS_SCHEMA = vol.Schema( CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -async def _reauth_flow_wrapper(hass, data): +async def _reauth_flow_wrapper( + hass: HomeAssistant, entry: BlinkConfigEntry, data: dict[str, Any] +) -> None: """Reauth flow wrapper.""" - hass.add_job( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=data - ) - ) + entry.async_start_reauth(hass, data=data) persistent_notification.async_create( hass, ( @@ -57,16 +55,16 @@ async def _reauth_flow_wrapper(hass, data): ) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> bool: """Handle migration of a previous version config entry.""" _LOGGER.debug("Migrating from version %s", entry.version) data = {**entry.data} if entry.version == 1: data.pop("login_response", None) - await _reauth_flow_wrapper(hass, data) + await _reauth_flow_wrapper(hass, entry, data) return False if entry.version == 2: - await _reauth_flow_wrapper(hass, data) + await _reauth_flow_wrapper(hass, entry, data) return False return True @@ -79,10 +77,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> bool: """Set up Blink via config entry.""" - hass.data.setdefault(DOMAIN, {}) - _async_import_options_from_data_if_missing(hass, entry) session = async_get_clientsession(hass) blink = Blink(session=session) @@ -104,7 +100,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady await coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = coordinator + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -113,7 +110,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @callback def _async_import_options_from_data_if_missing( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BlinkConfigEntry ) -> None: options = dict(entry.options) if CONF_SCAN_INTERVAL not in entry.options: @@ -123,8 +120,6 @@ def _async_import_options_from_data_if_missing( hass.config_entries.async_update_entry(entry, options=options) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> bool: """Unload Blink entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/blink/alarm_control_panel.py b/homeassistant/components/blink/alarm_control_panel.py index 0ad15cf0d31..bfb8aa9a3a0 100644 --- a/homeassistant/components/blink/alarm_control_panel.py +++ b/homeassistant/components/blink/alarm_control_panel.py @@ -9,13 +9,9 @@ from blinkpy.blinkpy import Blink, BlinkSyncModule from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ATTRIBUTION, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_DISARMED, -) +from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -23,16 +19,18 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_ATTRIBUTION, DEFAULT_BRAND, DOMAIN -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Blink Alarm Control Panels.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data sync_modules = [] for sync_name, sync_module in coordinator.api.sync.items(): @@ -80,8 +78,10 @@ class BlinkSyncModuleHA( self.sync.attributes["associated_cameras"] = list(self.sync.cameras) self.sync.attributes[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION self._attr_extra_state_attributes = self.sync.attributes - self._attr_state = ( - STATE_ALARM_ARMED_AWAY if self.sync.arm else STATE_ALARM_DISARMED + self._attr_alarm_state = ( + AlarmControlPanelState.ARMED_AWAY + if self.sync.arm + else AlarmControlPanelState.DISARMED ) async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/blink/binary_sensor.py b/homeassistant/components/blink/binary_sensor.py index 2f0a56a901c..c11d4cfea23 100644 --- a/homeassistant/components/blink/binary_sensor.py +++ b/homeassistant/components/blink/binary_sensor.py @@ -9,7 +9,6 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo @@ -23,7 +22,7 @@ from .const import ( TYPE_CAMERA_ARMED, TYPE_MOTION_DETECTED, ) -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -47,11 +46,13 @@ BINARY_SENSORS_TYPES: tuple[BinarySensorEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the blink binary sensors.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data entities = [ BlinkBinarySensor(coordinator, camera, description) diff --git a/homeassistant/components/blink/camera.py b/homeassistant/components/blink/camera.py index cce9100a0bd..56a84135a9b 100644 --- a/homeassistant/components/blink/camera.py +++ b/homeassistant/components/blink/camera.py @@ -10,7 +10,6 @@ from requests.exceptions import ChunkedEncodingError import voluptuous as vol from homeassistant.components.camera import Camera -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -28,7 +27,7 @@ from .const import ( SERVICE_SAVE_VIDEO, SERVICE_TRIGGER, ) -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -38,11 +37,13 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Blink Camera.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data entities = [ BlinkCamera(coordinator, name, camera) for name, camera in coordinator.api.cameras.items() diff --git a/homeassistant/components/blink/config_flow.py b/homeassistant/components/blink/config_flow.py index 62f15bd6e10..e37df26aaa8 100644 --- a/homeassistant/components/blink/config_flow.py +++ b/homeassistant/components/blink/config_flow.py @@ -10,7 +10,7 @@ from blinkpy.auth import Auth, LoginError, TokenRefreshFailed from blinkpy.blinkpy import Blink, BlinkSetupError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_PIN, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -61,6 +61,8 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN): session=async_get_clientsession(self.hass), ) await self.async_set_unique_id(user_input[CONF_USERNAME]) + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() try: await validate_input(self.auth) diff --git a/homeassistant/components/blink/coordinator.py b/homeassistant/components/blink/coordinator.py index e71ff4e449e..7278dabe083 100644 --- a/homeassistant/components/blink/coordinator.py +++ b/homeassistant/components/blink/coordinator.py @@ -8,6 +8,7 @@ from typing import Any from blinkpy.blinkpy import Blink +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -16,6 +17,8 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = 300 +type BlinkConfigEntry = ConfigEntry[BlinkUpdateCoordinator] + class BlinkUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """BlinkUpdateCoordinator - In charge of downloading the data for a site.""" diff --git a/homeassistant/components/blink/diagnostics.py b/homeassistant/components/blink/diagnostics.py index 88ff2aff928..255f58fc369 100644 --- a/homeassistant/components/blink/diagnostics.py +++ b/homeassistant/components/blink/diagnostics.py @@ -4,24 +4,21 @@ from __future__ import annotations from typing import Any -from blinkpy.blinkpy import Blink - from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .coordinator import BlinkConfigEntry TO_REDACT = {"serial", "macaddress", "username", "password", "token", "unique_id"} async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BlinkConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - api: Blink = hass.data[DOMAIN][config_entry.entry_id].api + api = config_entry.runtime_data.api data = { camera.name: dict(camera.attributes.items()) diff --git a/homeassistant/components/blink/icons.json b/homeassistant/components/blink/icons.json index 615a3c4c6dc..bea67b25f6d 100644 --- a/homeassistant/components/blink/icons.json +++ b/homeassistant/components/blink/icons.json @@ -12,10 +12,20 @@ } }, "services": { - "record": "mdi:video-box", - "trigger_camera": "mdi:image-refresh", - "save_video": "mdi:file-video", - "save_recent_clips": "mdi:file-video", - "send_pin": "mdi:two-factor-authentication" + "record": { + "service": "mdi:video-box" + }, + "trigger_camera": { + "service": "mdi:image-refresh" + }, + "save_video": { + "service": "mdi:file-video" + }, + "save_recent_clips": { + "service": "mdi:file-video" + }, + "send_pin": { + "service": "mdi:two-factor-authentication" + } } } diff --git a/homeassistant/components/blink/sensor.py b/homeassistant/components/blink/sensor.py index 8a807b9303e..e0b5989cc80 100644 --- a/homeassistant/components/blink/sensor.py +++ b/homeassistant/components/blink/sensor.py @@ -10,15 +10,18 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, UnitOfTemperature +from homeassistant.const import ( + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_BRAND, DOMAIN, TYPE_TEMPERATURE, TYPE_WIFI_STRENGTH -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -33,6 +36,8 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=TYPE_WIFI_STRENGTH, translation_key="wifi_strength", + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), @@ -40,11 +45,13 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Initialize a Blink sensor.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data entities = [ BlinkSensor(coordinator, camera, description) for camera in coordinator.api.cameras diff --git a/homeassistant/components/blink/services.py b/homeassistant/components/blink/services.py index bb2cbf575dd..dd5d1e37627 100644 --- a/homeassistant/components/blink/services.py +++ b/homeassistant/components/blink/services.py @@ -5,18 +5,14 @@ from __future__ import annotations import voluptuous as vol from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_DEVICE_ID, CONF_PIN +from homeassistant.const import CONF_PIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN +from .coordinator import BlinkConfigEntry -SERVICE_UPDATE_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), - } -) SERVICE_SEND_PIN_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]), @@ -30,6 +26,7 @@ def setup_services(hass: HomeAssistant) -> None: async def send_pin(call: ServiceCall): """Call blink to send new pin.""" + config_entry: BlinkConfigEntry | None for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]: if not (config_entry := hass.config_entries.async_get_entry(entry_id)): raise ServiceValidationError( @@ -43,7 +40,7 @@ def setup_services(hass: HomeAssistant) -> None: translation_key="not_loaded", translation_placeholders={"target": config_entry.title}, ) - coordinator = hass.data[DOMAIN][entry_id] + coordinator = config_entry.runtime_data await coordinator.api.auth.send_auth_key( coordinator.api, call.data[CONF_PIN], diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index bd0e7789816..6e2384e5d5b 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -11,7 +11,7 @@ "2fa": { "title": "Two-factor authentication", "data": { - "2fa": "Two-factor code" + "pin": "Two-factor code" }, "description": "Enter the PIN sent via email or SMS" } diff --git a/homeassistant/components/blink/switch.py b/homeassistant/components/blink/switch.py index ab9b825ded1..8eabd5c0e59 100644 --- a/homeassistant/components/blink/switch.py +++ b/homeassistant/components/blink/switch.py @@ -9,7 +9,6 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -17,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_BRAND, DOMAIN, TYPE_CAMERA_ARMED -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SwitchEntityDescription( @@ -30,11 +29,11 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config: ConfigEntry, + config_entry: BlinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Blink switches.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data async_add_entities( BlinkSwitch(coordinator, camera, description) diff --git a/homeassistant/components/blinksticklight/manifest.json b/homeassistant/components/blinksticklight/manifest.json index 70fac896ff2..d3592b6af6e 100644 --- a/homeassistant/components/blinksticklight/manifest.json +++ b/homeassistant/components/blinksticklight/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/blinksticklight", "iot_class": "local_polling", "loggers": ["blinkstick"], + "quality_scale": "legacy", "requirements": ["BlinkStick==1.2.0"] } diff --git a/homeassistant/components/blockchain/manifest.json b/homeassistant/components/blockchain/manifest.json index 2e58dc5aa03..6c9182ee0c4 100644 --- a/homeassistant/components/blockchain/manifest.json +++ b/homeassistant/components/blockchain/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/blockchain", "iot_class": "cloud_polling", "loggers": ["pyblockchain"], + "quality_scale": "legacy", "requirements": ["python-blockchain-api==0.0.2"] } diff --git a/homeassistant/components/bloomsky/__init__.py b/homeassistant/components/bloomsky/__init__.py deleted file mode 100644 index c2a46baaeb3..00000000000 --- a/homeassistant/components/bloomsky/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Support for BloomSky weather station.""" - -from datetime import timedelta -from http import HTTPStatus -import logging - -import requests -import voluptuous as vol - -from homeassistant.const import CONF_API_KEY, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import discovery -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from homeassistant.util import Throttle -from homeassistant.util.unit_system import METRIC_SYSTEM - -_LOGGER = logging.getLogger(__name__) - -PLATFORMS = [Platform.BINARY_SENSOR, Platform.CAMERA, Platform.SENSOR] - -DOMAIN = "bloomsky" - -# The BloomSky only updates every 5-8 minutes as per the API spec so there's -# no point in polling the API more frequently -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300) - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.Schema({vol.Required(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA -) - - -def setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the BloomSky integration.""" - api_key = config[DOMAIN][CONF_API_KEY] - - try: - bloomsky = BloomSky(api_key, hass.config.units is METRIC_SYSTEM) - except RuntimeError: - return False - - hass.data[DOMAIN] = bloomsky - - for platform in PLATFORMS: - discovery.load_platform(hass, platform, DOMAIN, {}, config) - - return True - - -class BloomSky: - """Handle all communication with the BloomSky API.""" - - # API documentation at http://weatherlution.com/bloomsky-api/ - API_URL = "http://api.bloomsky.com/api/skydata" - - def __init__(self, api_key, is_metric): - """Initialize the BookSky.""" - self._api_key = api_key - self._endpoint_argument = "unit=intl" if is_metric else "" - self.devices = {} - self.is_metric = is_metric - _LOGGER.debug("Initial BloomSky device load") - self.refresh_devices() - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - def refresh_devices(self): - """Use the API to retrieve a list of devices.""" - _LOGGER.debug("Fetching BloomSky update") - response = requests.get( - f"{self.API_URL}?{self._endpoint_argument}", - headers={"Authorization": self._api_key}, - timeout=10, - ) - if response.status_code == HTTPStatus.UNAUTHORIZED: - raise RuntimeError("Invalid API_KEY") - if response.status_code == HTTPStatus.METHOD_NOT_ALLOWED: - _LOGGER.error("You have no bloomsky devices configured") - return - if response.status_code != HTTPStatus.OK: - _LOGGER.error("Invalid HTTP response: %s", response.status_code) - return - # Create dictionary keyed off of the device unique id - self.devices.update({device["DeviceID"]: device for device in response.json()}) diff --git a/homeassistant/components/bloomsky/binary_sensor.py b/homeassistant/components/bloomsky/binary_sensor.py deleted file mode 100644 index 12d55f971e1..00000000000 --- a/homeassistant/components/bloomsky/binary_sensor.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Support the binary sensors of a BloomSky weather station.""" - -from __future__ import annotations - -import voluptuous as vol - -from homeassistant.components.binary_sensor import ( - PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA, - BinarySensorDeviceClass, - BinarySensorEntity, -) -from homeassistant.const import CONF_MONITORED_CONDITIONS -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN - -SENSOR_TYPES = {"Rain": BinarySensorDeviceClass.MOISTURE, "Night": None} - -PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All( - cv.ensure_list, [vol.In(SENSOR_TYPES)] - ) - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the available BloomSky weather binary sensors.""" - # Default needed in case of discovery - if discovery_info is not None: - return - - sensors = config[CONF_MONITORED_CONDITIONS] - bloomsky = hass.data[DOMAIN] - - for device in bloomsky.devices.values(): - for variable in sensors: - add_entities([BloomSkySensor(bloomsky, device, variable)], True) - - -class BloomSkySensor(BinarySensorEntity): - """Representation of a single binary sensor in a BloomSky device.""" - - def __init__(self, bs, device, sensor_name): - """Initialize a BloomSky binary sensor.""" - self._bloomsky = bs - self._device_id = device["DeviceID"] - self._sensor_name = sensor_name - self._attr_name = f"{device['DeviceName']} {sensor_name}" - self._attr_unique_id = f"{self._device_id}-{sensor_name}" - self._attr_device_class = SENSOR_TYPES.get(sensor_name) - - def update(self) -> None: - """Request an update from the BloomSky API.""" - self._bloomsky.refresh_devices() - - self._attr_is_on = self._bloomsky.devices[self._device_id]["Data"][ - self._sensor_name - ] diff --git a/homeassistant/components/bloomsky/camera.py b/homeassistant/components/bloomsky/camera.py deleted file mode 100644 index f07dd1e9d14..00000000000 --- a/homeassistant/components/bloomsky/camera.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Support for a camera of a BloomSky weather station.""" - -from __future__ import annotations - -import logging - -import requests - -from homeassistant.components.camera import Camera -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up access to BloomSky cameras.""" - if discovery_info is not None: - return - - bloomsky = hass.data[DOMAIN] - - for device in bloomsky.devices.values(): - add_entities([BloomSkyCamera(bloomsky, device)]) - - -class BloomSkyCamera(Camera): - """Representation of the images published from the BloomSky's camera.""" - - def __init__(self, bs, device): - """Initialize access to the BloomSky camera images.""" - super().__init__() - self._attr_name = device["DeviceName"] - self._id = device["DeviceID"] - self._bloomsky = bs - self._url = "" - self._last_url = "" - # last_image will store images as they are downloaded so that the - # frequent updates in home-assistant don't keep poking the server - # to download the same image over and over. - self._last_image = "" - self._logger = logging.getLogger(__name__) - self._attr_unique_id = self._id - - def camera_image( - self, width: int | None = None, height: int | None = None - ) -> bytes | None: - """Update the camera's image if it has changed.""" - try: - self._url = self._bloomsky.devices[self._id]["Data"]["ImageURL"] - self._bloomsky.refresh_devices() - # If the URL hasn't changed then the image hasn't changed. - if self._url != self._last_url: - response = requests.get(self._url, timeout=10) - self._last_url = self._url - self._last_image = response.content - except requests.exceptions.RequestException as error: - self._logger.error("Error getting bloomsky image: %s", error) - return None - - return self._last_image diff --git a/homeassistant/components/bloomsky/manifest.json b/homeassistant/components/bloomsky/manifest.json deleted file mode 100644 index 65d302df239..00000000000 --- a/homeassistant/components/bloomsky/manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "domain": "bloomsky", - "name": "BloomSky", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/bloomsky", - "iot_class": "cloud_polling" -} diff --git a/homeassistant/components/bloomsky/sensor.py b/homeassistant/components/bloomsky/sensor.py deleted file mode 100644 index 6d99506bd44..00000000000 --- a/homeassistant/components/bloomsky/sensor.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Support the sensor of a BloomSky weather station.""" - -from __future__ import annotations - -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorDeviceClass, - SensorEntity, -) -from homeassistant.const import ( - AREA_SQUARE_METERS, - CONF_MONITORED_CONDITIONS, - PERCENTAGE, - UnitOfElectricPotential, - UnitOfPressure, - UnitOfTemperature, -) -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN - -# These are the available sensors -SENSOR_TYPES = [ - "Temperature", - "Humidity", - "Pressure", - "Luminance", - "UVIndex", - "Voltage", -] - -# Sensor units - these do not currently align with the API documentation -SENSOR_UNITS_IMPERIAL = { - "Temperature": UnitOfTemperature.FAHRENHEIT, - "Humidity": PERCENTAGE, - "Pressure": UnitOfPressure.INHG, - "Luminance": f"cd/{AREA_SQUARE_METERS}", - "Voltage": UnitOfElectricPotential.MILLIVOLT, -} - -# Metric units -SENSOR_UNITS_METRIC = { - "Temperature": UnitOfTemperature.CELSIUS, - "Humidity": PERCENTAGE, - "Pressure": UnitOfPressure.MBAR, - "Luminance": f"cd/{AREA_SQUARE_METERS}", - "Voltage": UnitOfElectricPotential.MILLIVOLT, -} - -# Device class -SENSOR_DEVICE_CLASS = { - "Temperature": SensorDeviceClass.TEMPERATURE, - "Humidity": SensorDeviceClass.HUMIDITY, - "Pressure": SensorDeviceClass.PRESSURE, - "Voltage": SensorDeviceClass.VOLTAGE, -} - -# Which sensors to format numerically -FORMAT_NUMBERS = ["Temperature", "Pressure", "Voltage"] - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_TYPES): vol.All( - cv.ensure_list, [vol.In(SENSOR_TYPES)] - ) - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the available BloomSky weather sensors.""" - # Default needed in case of discovery - if discovery_info is not None: - return - - sensors = config[CONF_MONITORED_CONDITIONS] - bloomsky = hass.data[DOMAIN] - - for device in bloomsky.devices.values(): - for variable in sensors: - add_entities([BloomSkySensor(bloomsky, device, variable)], True) - - -class BloomSkySensor(SensorEntity): - """Representation of a single sensor in a BloomSky device.""" - - def __init__(self, bs, device, sensor_name): - """Initialize a BloomSky sensor.""" - self._bloomsky = bs - self._device_id = device["DeviceID"] - self._sensor_name = sensor_name - self._attr_name = f"{device['DeviceName']} {sensor_name}" - self._attr_unique_id = f"{self._device_id}-{sensor_name}" - self._attr_device_class = SENSOR_DEVICE_CLASS.get(sensor_name) - self._attr_native_unit_of_measurement = SENSOR_UNITS_IMPERIAL.get(sensor_name) - if self._bloomsky.is_metric: - self._attr_native_unit_of_measurement = SENSOR_UNITS_METRIC.get(sensor_name) - - def update(self) -> None: - """Request an update from the BloomSky API.""" - self._bloomsky.refresh_devices() - state = self._bloomsky.devices[self._device_id]["Data"][self._sensor_name] - self._attr_native_value = ( - f"{state:.2f}" if self._sensor_name in FORMAT_NUMBERS else state - ) diff --git a/homeassistant/components/blue_current/__init__.py b/homeassistant/components/blue_current/__init__.py index e852dfc8c6e..6d0ccd7b6db 100644 --- a/homeassistant/components/blue_current/__init__.py +++ b/homeassistant/components/blue_current/__init__.py @@ -22,6 +22,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE +type BlueCurrentConfigEntry = ConfigEntry[Connector] + PLATFORMS = [Platform.SENSOR] CHARGE_POINTS = "CHARGE_POINTS" DATA = "data" @@ -32,9 +34,10 @@ OBJECT = "object" VALUE_TYPES = ["CH_STATUS"] -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: BlueCurrentConfigEntry +) -> bool: """Set up Blue Current as a config entry.""" - hass.data.setdefault(DOMAIN, {}) client = Client() api_token = config_entry.data[CONF_API_TOKEN] connector = Connector(hass, config_entry, client) @@ -50,29 +53,25 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) await client.wait_for_charge_points() - hass.data[DOMAIN][config_entry.entry_id] = connector + config_entry.runtime_data = connector await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: BlueCurrentConfigEntry +) -> bool: """Unload the Blue Current config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) class Connector: """Define a class that connects to the Blue Current websocket API.""" def __init__( - self, hass: HomeAssistant, config: ConfigEntry, client: Client + self, hass: HomeAssistant, config: BlueCurrentConfigEntry, client: Client ) -> None: """Initialize.""" self.config = config diff --git a/homeassistant/components/blue_current/config_flow.py b/homeassistant/components/blue_current/config_flow.py index a3aaf60cc39..c8593b7d51c 100644 --- a/homeassistant/components/blue_current/config_flow.py +++ b/homeassistant/components/blue_current/config_flow.py @@ -14,7 +14,7 @@ from bluecurrent_api.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_TOKEN from .const import DOMAIN, LOGGER @@ -26,7 +26,6 @@ class BlueCurrentConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the config flow for Blue Current.""" VERSION = 1 - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,19 +52,16 @@ class BlueCurrentConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" else: - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: await self.async_set_unique_id(customer_id) self._abort_if_unique_id_configured() return self.async_create_entry(title=email, data=user_input) - if self._reauth_entry.unique_id == customer_id: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input + reauth_entry = self._get_reauth_entry() + if reauth_entry.unique_id == customer_id: + return self.async_update_reload_and_abort( + reauth_entry, data=user_input ) - await self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") return self.async_abort( reason="wrong_account", @@ -79,7 +75,4 @@ class BlueCurrentConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a reauthorization flow request.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/blue_current/sensor.py b/homeassistant/components/blue_current/sensor.py index 4c590544984..be39e9571ec 100644 --- a/homeassistant/components/blue_current/sensor.py +++ b/homeassistant/components/blue_current/sensor.py @@ -8,7 +8,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, UnitOfElectricCurrent, @@ -19,7 +18,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import Connector +from . import BlueCurrentConfigEntry, Connector from .const import DOMAIN from .entity import BlueCurrentEntity, ChargepointEntity @@ -211,10 +210,12 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BlueCurrentConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Blue Current sensors.""" - connector: Connector = hass.data[DOMAIN][entry.entry_id] + connector = entry.runtime_data sensor_list: list[SensorEntity] = [ ChargePointSensor(connector, sensor, evse_id) for evse_id in connector.charge_points diff --git a/homeassistant/components/bluemaestro/__init__.py b/homeassistant/components/bluemaestro/__init__.py index c25ceb44759..3d358148fab 100644 --- a/homeassistant/components/bluemaestro/__init__.py +++ b/homeassistant/components/bluemaestro/__init__.py @@ -14,27 +14,26 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) +type BlueMaestroConfigEntry = ConfigEntry[PassiveBluetoothProcessorCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: BlueMaestroConfigEntry) -> bool: """Set up BlueMaestro BLE device from a config entry.""" address = entry.unique_id assert address is not None data = BlueMaestroBluetoothDeviceData() - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - PassiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=data.update, - ) + coordinator = PassiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=data.update, ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload( coordinator.async_start() @@ -42,9 +41,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: BlueMaestroConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bluemaestro/sensor.py b/homeassistant/components/bluemaestro/sensor.py index 75d448c9b9d..57702d4ff31 100644 --- a/homeassistant/components/bluemaestro/sensor.py +++ b/homeassistant/components/bluemaestro/sensor.py @@ -8,11 +8,9 @@ from bluemaestro_ble import ( Units, ) -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, PassiveBluetoothDataUpdate, - PassiveBluetoothProcessorCoordinator, PassiveBluetoothProcessorEntity, ) from homeassistant.components.sensor import ( @@ -32,7 +30,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN +from . import BlueMaestroConfigEntry from .device import device_key_to_bluetooth_entity_key SENSOR_DESCRIPTIONS = { @@ -117,13 +115,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: BlueMaestroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the BlueMaestro BLE sensors.""" - coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update) entry.async_on_unload( processor.async_add_entities_listener( diff --git a/homeassistant/components/blueprint/__init__.py b/homeassistant/components/blueprint/__init__.py index 92d94708e0f..913f1ca517c 100644 --- a/homeassistant/components/blueprint/__init__.py +++ b/homeassistant/components/blueprint/__init__.py @@ -8,6 +8,7 @@ from . import websocket_api from .const import CONF_USE_BLUEPRINT, DOMAIN # noqa: F401 from .errors import ( # noqa: F401 BlueprintException, + BlueprintInUse, BlueprintWithNameException, FailedToLoad, InvalidBlueprint, @@ -15,7 +16,11 @@ from .errors import ( # noqa: F401 MissingInput, ) from .models import Blueprint, BlueprintInputs, DomainBlueprints # noqa: F401 -from .schemas import is_blueprint_instance_config # noqa: F401 +from .schemas import ( # noqa: F401 + BLUEPRINT_INSTANCE_FIELDS, + BLUEPRINT_SCHEMA, + is_blueprint_instance_config, +) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) diff --git a/homeassistant/components/blueprint/importer.py b/homeassistant/components/blueprint/importer.py index c231a33991a..c10da532324 100644 --- a/homeassistant/components/blueprint/importer.py +++ b/homeassistant/components/blueprint/importer.py @@ -16,7 +16,7 @@ from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.util import yaml from .models import Blueprint -from .schemas import is_blueprint_config +from .schemas import BLUEPRINT_SCHEMA, is_blueprint_config COMMUNITY_TOPIC_PATTERN = re.compile( r"^https://community.home-assistant.io/t/[a-z0-9-]+/(?P\d+)(?:/(?P\d+)|)$" @@ -126,7 +126,7 @@ def _extract_blueprint_from_community_topic( continue assert isinstance(data, dict) - blueprint = Blueprint(data) + blueprint = Blueprint(data, schema=BLUEPRINT_SCHEMA) break if blueprint is None: @@ -169,7 +169,7 @@ async def fetch_blueprint_from_github_url( raw_yaml = await resp.text() data = yaml.parse_yaml(raw_yaml) assert isinstance(data, dict) - blueprint = Blueprint(data) + blueprint = Blueprint(data, schema=BLUEPRINT_SCHEMA) parsed_import_url = yarl.URL(import_url) suggested_filename = f"{parsed_import_url.parts[1]}/{parsed_import_url.parts[-1]}" @@ -211,7 +211,7 @@ async def fetch_blueprint_from_github_gist_url( continue assert isinstance(data, dict) - blueprint = Blueprint(data) + blueprint = Blueprint(data, schema=BLUEPRINT_SCHEMA) break if blueprint is None: @@ -238,7 +238,7 @@ async def fetch_blueprint_from_website_url( raw_yaml = await resp.text() data = yaml.parse_yaml(raw_yaml) assert isinstance(data, dict) - blueprint = Blueprint(data) + blueprint = Blueprint(data, schema=BLUEPRINT_SCHEMA) parsed_import_url = yarl.URL(url) suggested_filename = f"homeassistant/{parsed_import_url.parts[-1][:-5]}" @@ -256,7 +256,7 @@ async def fetch_blueprint_from_generic_url( data = yaml.parse_yaml(raw_yaml) assert isinstance(data, dict) - blueprint = Blueprint(data) + blueprint = Blueprint(data, schema=BLUEPRINT_SCHEMA) parsed_import_url = yarl.URL(url) suggested_filename = f"{parsed_import_url.host}/{parsed_import_url.parts[-1][:-5]}" @@ -273,7 +273,11 @@ FETCH_FUNCTIONS = ( async def fetch_blueprint_from_url(hass: HomeAssistant, url: str) -> ImportedBlueprint: - """Get a blueprint from a url.""" + """Get a blueprint from a url. + + The returned blueprint will only be validated with BLUEPRINT_SCHEMA, not the domain + specific schema. + """ for func in FETCH_FUNCTIONS: with suppress(UnsupportedUrl): imported_bp = await func(hass, url) diff --git a/homeassistant/components/blueprint/models.py b/homeassistant/components/blueprint/models.py index 02a215ca103..f32c3f04989 100644 --- a/homeassistant/components/blueprint/models.py +++ b/homeassistant/components/blueprint/models.py @@ -44,7 +44,7 @@ from .errors import ( InvalidBlueprintInputs, MissingInput, ) -from .schemas import BLUEPRINT_INSTANCE_FIELDS, BLUEPRINT_SCHEMA +from .schemas import BLUEPRINT_INSTANCE_FIELDS class Blueprint: @@ -56,10 +56,11 @@ class Blueprint: *, path: str | None = None, expected_domain: str | None = None, + schema: Callable[[Any], Any], ) -> None: """Initialize a blueprint.""" try: - data = self.data = BLUEPRINT_SCHEMA(data) + data = self.data = schema(data) except vol.Invalid as err: raise InvalidBlueprint(expected_domain, path, data, err) from err @@ -197,6 +198,7 @@ class DomainBlueprints: logger: logging.Logger, blueprint_in_use: Callable[[HomeAssistant, str], bool], reload_blueprint_consumers: Callable[[HomeAssistant, str], Awaitable[None]], + blueprint_schema: Callable[[Any], Any], ) -> None: """Initialize a domain blueprints instance.""" self.hass = hass @@ -206,6 +208,7 @@ class DomainBlueprints: self._reload_blueprint_consumers = reload_blueprint_consumers self._blueprints: dict[str, Blueprint | None] = {} self._load_lock = asyncio.Lock() + self._blueprint_schema = blueprint_schema hass.data.setdefault(DOMAIN, {})[domain] = self @@ -233,7 +236,10 @@ class DomainBlueprints: raise FailedToLoad(self.domain, blueprint_path, err) from err return Blueprint( - blueprint_data, expected_domain=self.domain, path=blueprint_path + blueprint_data, + expected_domain=self.domain, + path=blueprint_path, + schema=self._blueprint_schema, ) def _load_blueprints(self) -> dict[str, Blueprint | BlueprintException | None]: diff --git a/homeassistant/components/blueprint/websocket_api.py b/homeassistant/components/blueprint/websocket_api.py index 9d3329d8195..3be925c7c8f 100644 --- a/homeassistant/components/blueprint/websocket_api.py +++ b/homeassistant/components/blueprint/websocket_api.py @@ -18,6 +18,7 @@ from homeassistant.util import yaml from . import importer, models from .const import DOMAIN from .errors import BlueprintException, FailedToLoad, FileAlreadyExists +from .schemas import BLUEPRINT_SCHEMA @callback @@ -174,7 +175,9 @@ async def ws_save_blueprint( try: yaml_data = cast(dict[str, Any], yaml.parse_yaml(msg["yaml"])) - blueprint = models.Blueprint(yaml_data, expected_domain=domain) + blueprint = models.Blueprint( + yaml_data, expected_domain=domain, schema=BLUEPRINT_SCHEMA + ) if "source_url" in msg: blueprint.update_metadata(source_url=msg["source_url"]) except HomeAssistantError as err: diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py index cbe95fc3abf..82fe9b00d57 100644 --- a/homeassistant/components/bluesound/__init__.py +++ b/homeassistant/components/bluesound/__init__.py @@ -2,8 +2,8 @@ from dataclasses import dataclass -import aiohttp from pyblu import Player, SyncStatus +from pyblu.errors import PlayerUnreachableError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform @@ -14,7 +14,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .media_player import setup_services +from .services import setup_services CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -22,14 +22,14 @@ PLATFORMS = [Platform.MEDIA_PLAYER] @dataclass -class BluesoundData: +class BluesoundRuntimeData: """Bluesound data class.""" player: Player sync_status: SyncStatus -type BluesoundConfigEntry = ConfigEntry[BluesoundData] +type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -51,14 +51,10 @@ async def async_setup_entry( async with Player(host, port, session=session, default_timeout=10) as player: try: sync_status = await player.sync_status(timeout=1) - except TimeoutError as ex: - raise ConfigEntryNotReady( - f"Timeout while connecting to {host}:{port}" - ) from ex - except aiohttp.ClientError as ex: + except PlayerUnreachableError as ex: raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex - config_entry.runtime_data = BluesoundData(player, sync_status) + config_entry.runtime_data = BluesoundRuntimeData(player, sync_status) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) diff --git a/homeassistant/components/bluesound/config_flow.py b/homeassistant/components/bluesound/config_flow.py index aae527187d2..050b3ee4eac 100644 --- a/homeassistant/components/bluesound/config_flow.py +++ b/homeassistant/components/bluesound/config_flow.py @@ -3,8 +3,8 @@ import logging from typing import Any -import aiohttp from pyblu import Player, SyncStatus +from pyblu.errors import PlayerUnreachableError import voluptuous as vol from homeassistant.components import zeroconf @@ -43,7 +43,7 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): ) as player: try: sync_status = await player.sync_status(timeout=1) - except (TimeoutError, aiohttp.ClientError): + except PlayerUnreachableError: errors["base"] = "cannot_connect" else: await self.async_set_unique_id( @@ -79,7 +79,7 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): ) as player: try: sync_status = await player.sync_status(timeout=1) - except (TimeoutError, aiohttp.ClientError): + except PlayerUnreachableError: return self.async_abort(reason="cannot_connect") await self.async_set_unique_id( @@ -105,7 +105,7 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): discovery_info.host, self._port, session=session ) as player: sync_status = await player.sync_status(timeout=1) - except (TimeoutError, aiohttp.ClientError): + except PlayerUnreachableError: return self.async_abort(reason="cannot_connect") await self.async_set_unique_id(format_unique_id(sync_status.mac, self._port)) @@ -127,7 +127,9 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): ) return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None) -> ConfigFlowResult: + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the zeroconf setup.""" assert self._sync_status is not None assert self._host is not None diff --git a/homeassistant/components/bluesound/const.py b/homeassistant/components/bluesound/const.py index b7da4e31702..b1be33f6770 100644 --- a/homeassistant/components/bluesound/const.py +++ b/homeassistant/components/bluesound/const.py @@ -2,9 +2,5 @@ DOMAIN = "bluesound" INTEGRATION_TITLE = "Bluesound" -SERVICE_CLEAR_TIMER = "clear_sleep_timer" -SERVICE_JOIN = "join" -SERVICE_SET_TIMER = "set_sleep_timer" -SERVICE_UNJOIN = "unjoin" ATTR_BLUESOUND_GROUP = "bluesound_group" ATTR_MASTER = "master" diff --git a/homeassistant/components/bluesound/icons.json b/homeassistant/components/bluesound/icons.json index 8c886f12dfd..2c5e95291c1 100644 --- a/homeassistant/components/bluesound/icons.json +++ b/homeassistant/components/bluesound/icons.json @@ -1,8 +1,16 @@ { "services": { - "join": "mdi:link-variant", - "unjoin": "mdi:link-variant-off", - "set_sleep_timer": "mdi:sleep", - "clear_sleep_timer": "mdi:sleep-off" + "join": { + "service": "mdi:link-variant" + }, + "unjoin": { + "service": "mdi:link-variant-off" + }, + "set_sleep_timer": { + "service": "mdi:sleep" + }, + "clear_sleep_timer": { + "service": "mdi:sleep-off" + } } } diff --git a/homeassistant/components/bluesound/manifest.json b/homeassistant/components/bluesound/manifest.json index 64b8e8abffc..462112a8b78 100644 --- a/homeassistant/components/bluesound/manifest.json +++ b/homeassistant/components/bluesound/manifest.json @@ -6,7 +6,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/bluesound", "iot_class": "local_polling", - "requirements": ["pyblu==0.4.0"], + "requirements": ["pyblu==1.0.4"], "zeroconf": [ { "type": "_musc._tcp.local." diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 92f47977ee5..38ef78fad3a 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -7,10 +7,10 @@ from asyncio import CancelledError, Task from contextlib import suppress from datetime import datetime, timedelta import logging -from typing import TYPE_CHECKING, Any, NamedTuple +from typing import TYPE_CHECKING, Any -from aiohttp.client_exceptions import ClientError from pyblu import Input, Player, Preset, Status, SyncStatus +from pyblu.errors import PlayerUnreachableError import voluptuous as vol from homeassistant.components import media_source @@ -24,18 +24,8 @@ from homeassistant.components.media_player import ( async_process_play_media_url, ) from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_HOST, - CONF_HOSTS, - CONF_NAME, - CONF_PORT, -) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, - ServiceCall, -) +from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, issue_registry as ir @@ -46,19 +36,9 @@ from homeassistant.helpers.device_registry import ( ) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import Throttle import homeassistant.util.dt as dt_util -from .const import ( - ATTR_BLUESOUND_GROUP, - ATTR_MASTER, - DOMAIN, - INTEGRATION_TITLE, - SERVICE_CLEAR_TIMER, - SERVICE_JOIN, - SERVICE_SET_TIMER, - SERVICE_UNJOIN, -) +from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE from .utils import format_unique_id if TYPE_CHECKING: @@ -66,6 +46,8 @@ if TYPE_CHECKING: _LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = timedelta(minutes=15) + DATA_BLUESOUND = DOMAIN DEFAULT_PORT = 11000 @@ -74,9 +56,7 @@ NODE_RETRY_INITIATION = timedelta(minutes=3) SYNC_STATUS_INTERVAL = timedelta(minutes=5) -UPDATE_CAPTURE_INTERVAL = timedelta(minutes=30) -UPDATE_PRESETS_INTERVAL = timedelta(minutes=30) -UPDATE_SERVICES_INTERVAL = timedelta(minutes=30) +POLL_TIMEOUT = 120 PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { @@ -93,29 +73,6 @@ PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( } ) -BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) - -BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id}) - - -class ServiceMethodDetails(NamedTuple): - """Details for SERVICE_TO_METHOD mapping.""" - - method: str - schema: vol.Schema - - -SERVICE_TO_METHOD = { - SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA), - SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA), - SERVICE_SET_TIMER: ServiceMethodDetails( - method="async_increase_timer", schema=BS_SCHEMA - ), - SERVICE_CLEAR_TIMER: ServiceMethodDetails( - method="async_clear_timer", schema=BS_SCHEMA - ), -} - async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: """Import config entry from configuration.yaml.""" @@ -160,33 +117,6 @@ async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: ) -def setup_services(hass: HomeAssistant) -> None: - """Set up services for Bluesound component.""" - - async def async_service_handler(service: ServiceCall) -> None: - """Map services to method of Bluesound devices.""" - if not (method := SERVICE_TO_METHOD.get(service.service)): - return - - params = { - key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID - } - if entity_ids := service.data.get(ATTR_ENTITY_ID): - target_players = [ - player for player in hass.data[DOMAIN] if player.entity_id in entity_ids - ] - else: - target_players = hass.data[DOMAIN] - - for player in target_players: - await getattr(player, method.method)(**params) - - for service, method in SERVICE_TO_METHOD.items(): - hass.services.async_register( - DOMAIN, service, async_service_handler, schema=method.schema - ) - - async def async_setup_entry( hass: HomeAssistant, config_entry: BluesoundConfigEntry, @@ -201,7 +131,7 @@ async def async_setup_entry( ) hass.data[DATA_BLUESOUND].append(bluesound_player) - async_add_entities([bluesound_player]) + async_add_entities([bluesound_player], update_before_add=True) async def async_setup_platform( @@ -237,9 +167,10 @@ class BluesoundPlayer(MediaPlayerEntity): """Initialize the media player.""" self.host = host self.port = port - self._polling_task: Task[None] | None = None # The actual polling task. + self._poll_status_loop_task: Task[None] | None = None + self._poll_sync_status_loop_task: Task[None] | None = None self._id = sync_status.id - self._last_status_update = None + self._last_status_update: datetime | None = None self._sync_status = sync_status self._status: Status | None = None self._inputs: list[Input] = [] @@ -247,7 +178,7 @@ class BluesoundPlayer(MediaPlayerEntity): self._muted = False self._master: BluesoundPlayer | None = None self._is_master = False - self._group_name = None + self._group_name: str | None = None self._group_list: list[str] = [] self._bluesound_device_name = sync_status.name self._player = player @@ -273,20 +204,119 @@ class BluesoundPlayer(MediaPlayerEntity): via_device=(DOMAIN, format_mac(sync_status.mac)), ) - @staticmethod - def _try_get_index(string, search_string): - """Get the index.""" - try: - return string.index(search_string) - except ValueError: - return -1 + async def _poll_status_loop(self) -> None: + """Loop which polls the status of the player.""" + while True: + try: + await self.async_update_status() + except PlayerUnreachableError: + _LOGGER.error( + "Node %s:%s is offline, retrying later", self.host, self.port + ) + await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) + except CancelledError: + _LOGGER.debug( + "Stopping the polling of node %s:%s", self.host, self.port + ) + return + except: # noqa: E722 - this loop should never stop + _LOGGER.exception( + "Unexpected error for %s:%s, retrying later", self.host, self.port + ) + await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) - async def force_update_sync_status(self) -> bool: + async def _poll_sync_status_loop(self) -> None: + """Loop which polls the sync status of the player.""" + while True: + try: + await self.update_sync_status() + except PlayerUnreachableError: + await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) + except CancelledError: + raise + except: # noqa: E722 - all errors must be caught for this loop + await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) + + async def async_added_to_hass(self) -> None: + """Start the polling task.""" + await super().async_added_to_hass() + + self._poll_status_loop_task = self.hass.async_create_background_task( + self._poll_status_loop(), + name=f"bluesound.poll_status_loop_{self.host}:{self.port}", + ) + self._poll_sync_status_loop_task = self.hass.async_create_background_task( + self._poll_sync_status_loop(), + name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}", + ) + + async def async_will_remove_from_hass(self) -> None: + """Stop the polling task.""" + await super().async_will_remove_from_hass() + + assert self._poll_status_loop_task is not None + if self._poll_status_loop_task.cancel(): + # the sleeps in _poll_loop will raise CancelledError + with suppress(CancelledError): + await self._poll_status_loop_task + + assert self._poll_sync_status_loop_task is not None + if self._poll_sync_status_loop_task.cancel(): + # the sleeps in _poll_sync_status_loop will raise CancelledError + with suppress(CancelledError): + await self._poll_sync_status_loop_task + + self.hass.data[DATA_BLUESOUND].remove(self) + + async def async_update(self) -> None: + """Update internal status of the entity.""" + if not self.available: + return + + with suppress(PlayerUnreachableError): + await self.async_update_presets() + await self.async_update_captures() + + async def async_update_status(self) -> None: + """Use the poll session to always get the status of the player.""" + etag = None + if self._status is not None: + etag = self._status.etag + + try: + status = await self._player.status( + etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5 + ) + + self._attr_available = True + self._last_status_update = dt_util.utcnow() + self._status = status + + self.async_write_ha_state() + except PlayerUnreachableError: + self._attr_available = False + self._last_status_update = None + self._status = None + self.async_write_ha_state() + _LOGGER.error( + "Client connection error, marking %s as offline", + self._bluesound_device_name, + ) + raise + + async def update_sync_status(self) -> None: """Update the internal status.""" - sync_status = await self._player.sync_status() + etag = None + if self._sync_status: + etag = self._sync_status.etag + sync_status = await self._player.sync_status( + etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5 + ) self._sync_status = sync_status + self._group_list = self.rebuild_bluesound_group() + if sync_status.master is not None: self._is_master = False master_id = f"{sync_status.master.ip}:{sync_status.master.port}" @@ -307,134 +337,18 @@ class BluesoundPlayer(MediaPlayerEntity): slaves = self._sync_status.slaves self._is_master = slaves is not None - return True + self.async_write_ha_state() - async def _start_poll_command(self): - """Loop which polls the status of the player.""" - while True: - try: - await self.async_update_status() - except (TimeoutError, ClientError): - _LOGGER.error( - "Node %s:%s is offline, retrying later", self.host, self.port - ) - await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) - except CancelledError: - _LOGGER.debug( - "Stopping the polling of node %s:%s", self.host, self.port - ) - return - except Exception: - _LOGGER.exception( - "Unexpected error in %s:%s, retrying later", self.host, self.port - ) - await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) - - async def async_added_to_hass(self) -> None: - """Start the polling task.""" - await super().async_added_to_hass() - - self._polling_task = self.hass.async_create_background_task( - self._start_poll_command(), - name=f"bluesound.polling_{self.host}:{self.port}", - ) - - async def async_will_remove_from_hass(self) -> None: - """Stop the polling task.""" - await super().async_will_remove_from_hass() - - assert self._polling_task is not None - if self._polling_task.cancel(): - await self._polling_task - - self.hass.data[DATA_BLUESOUND].remove(self) - - async def async_update(self) -> None: - """Update internal status of the entity.""" - if not self.available: - return - - with suppress(TimeoutError): - await self.async_update_sync_status() - await self.async_update_presets() - await self.async_update_captures() - - async def async_update_status(self): - """Use the poll session to always get the status of the player.""" - etag = None - if self._status is not None: - etag = self._status.etag - - try: - status = await self._player.status(etag=etag, poll_timeout=120, timeout=125) - - self._attr_available = True - self._last_status_update = dt_util.utcnow() - self._status = status - - group_name = status.group_name - if group_name != self._group_name: - _LOGGER.debug("Group name change detected on device: %s", self.id) - self._group_name = group_name - - # rebuild ordered list of entity_ids that are in the group, master is first - self._group_list = self.rebuild_bluesound_group() - - # the sleep is needed to make sure that the - # devices is synced - await asyncio.sleep(1) - await self.async_trigger_sync_on_all() - elif self.is_grouped: - # when player is grouped we need to fetch volume from - # sync_status. We will force an update if the player is - # grouped this isn't a foolproof solution. A better - # solution would be to fetch sync_status more often when - # the device is playing. This would solve a lot of - # problems. This change will be done when the - # communication is moved to a separate library - with suppress(TimeoutError): - await self.force_update_sync_status() - - self.async_write_ha_state() - except (TimeoutError, ClientError): - self._attr_available = False - self._last_status_update = None - self._status = None - self.async_write_ha_state() - _LOGGER.error( - "Client connection error, marking %s as offline", - self._bluesound_device_name, - ) - raise - - async def async_trigger_sync_on_all(self): - """Trigger sync status update on all devices.""" - _LOGGER.debug("Trigger sync status on all devices") - - for player in self.hass.data[DATA_BLUESOUND]: - await player.force_update_sync_status() - - @Throttle(SYNC_STATUS_INTERVAL) - async def async_update_sync_status(self): - """Update sync status.""" - await self.force_update_sync_status() - - @Throttle(UPDATE_CAPTURE_INTERVAL) - async def async_update_captures(self) -> list[Input] | None: + async def async_update_captures(self) -> None: """Update Capture sources.""" inputs = await self._player.inputs() self._inputs = inputs - return inputs - - @Throttle(UPDATE_PRESETS_INTERVAL) - async def async_update_presets(self) -> list[Preset] | None: + async def async_update_presets(self) -> None: """Update Presets.""" presets = await self._player.presets() self._presets = presets - return presets - @property def state(self) -> MediaPlayerState: """Return the state of the device.""" @@ -444,12 +358,13 @@ class BluesoundPlayer(MediaPlayerEntity): if self.is_grouped and not self.is_master: return MediaPlayerState.IDLE - status = self._status.state - if status in ("pause", "stop"): - return MediaPlayerState.PAUSED - if status in ("stream", "play"): - return MediaPlayerState.PLAYING - return MediaPlayerState.IDLE + match self._status.state: + case "pause": + return MediaPlayerState.PAUSED + case "stream" | "play": + return MediaPlayerState.PLAYING + case _: + return MediaPlayerState.IDLE @property def media_title(self) -> str | None: @@ -522,7 +437,7 @@ class BluesoundPlayer(MediaPlayerEntity): if duration is None: return None - return duration + return int(duration) @property def media_position_updated_at(self) -> datetime | None: @@ -566,6 +481,11 @@ class BluesoundPlayer(MediaPlayerEntity): """Return the device name as returned by the device.""" return self._bluesound_device_name + @property + def sync_status(self) -> SyncStatus: + """Return the sync status.""" + return self._sync_status + @property def source_list(self) -> list[str] | None: """List of available input sources.""" @@ -658,7 +578,7 @@ class BluesoundPlayer(MediaPlayerEntity): return shuffle - async def async_join(self, master): + async def async_join(self, master: str) -> None: """Join the player to a group.""" master_device = [ device @@ -693,23 +613,34 @@ class BluesoundPlayer(MediaPlayerEntity): def rebuild_bluesound_group(self) -> list[str]: """Rebuild the list of entities in speaker group.""" - if self._group_name is None: + if self.sync_status.master is None and self.sync_status.slaves is None: return [] - device_group = self._group_name.split("+") + player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND] - sorted_entities = sorted( - self.hass.data[DATA_BLUESOUND], - key=lambda entity: entity.is_master, - reverse=True, - ) - return [ - entity.name - for entity in sorted_entities - if entity.bluesound_device_name in device_group + leader_sync_status: SyncStatus | None = None + if self.sync_status.master is None: + leader_sync_status = self.sync_status + else: + required_id = f"{self.sync_status.master.ip}:{self.sync_status.master.port}" + for x in player_entities: + if x.sync_status.id == required_id: + leader_sync_status = x.sync_status + break + + if leader_sync_status is None or leader_sync_status.slaves is None: + return [] + + follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.slaves] + follower_names = [ + x.sync_status.name + for x in player_entities + if x.sync_status.id in follower_ids ] + follower_names.insert(0, leader_sync_status.name) + return follower_names - async def async_unjoin(self): + async def async_unjoin(self) -> None: """Unjoin the player from a group.""" if self._master is None: return @@ -717,11 +648,11 @@ class BluesoundPlayer(MediaPlayerEntity): _LOGGER.debug("Trying to unjoin player: %s", self.id) await self._master.async_remove_slave(self) - async def async_add_slave(self, slave_device: BluesoundPlayer): + async def async_add_slave(self, slave_device: BluesoundPlayer) -> None: """Add slave to master.""" await self._player.add_slave(slave_device.host, slave_device.port) - async def async_remove_slave(self, slave_device: BluesoundPlayer): + async def async_remove_slave(self, slave_device: BluesoundPlayer) -> None: """Remove slave to master.""" await self._player.remove_slave(slave_device.host, slave_device.port) @@ -729,7 +660,7 @@ class BluesoundPlayer(MediaPlayerEntity): """Increase sleep time on player.""" return await self._player.sleep_timer() - async def async_clear_timer(self): + async def async_clear_timer(self) -> None: """Clear sleep timer on player.""" sleep = 1 while sleep > 0: @@ -753,6 +684,9 @@ class BluesoundPlayer(MediaPlayerEntity): if preset.name == source: url = preset.url + if url is None: + raise ServiceValidationError(f"Source {source} not found") + await self._player.play_url(url) async def async_clear_playlist(self) -> None: @@ -824,24 +758,24 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_volume_up(self) -> None: """Volume up the media player.""" if self.volume_level is None: - return None + return new_volume = self.volume_level + 0.01 new_volume = min(1, new_volume) - return await self.async_set_volume_level(new_volume) + await self.async_set_volume_level(new_volume) async def async_volume_down(self) -> None: """Volume down the media player.""" if self.volume_level is None: - return None + return new_volume = self.volume_level - 0.01 new_volume = max(0, new_volume) - return await self.async_set_volume_level(new_volume) + await self.async_set_volume_level(new_volume) async def async_set_volume_level(self, volume: float) -> None: """Send volume_up command to media player.""" - volume = int(volume * 100) + volume = int(round(volume * 100)) volume = min(100, volume) volume = max(0, volume) diff --git a/homeassistant/components/bluesound/services.py b/homeassistant/components/bluesound/services.py new file mode 100644 index 00000000000..06a507420f8 --- /dev/null +++ b/homeassistant/components/bluesound/services.py @@ -0,0 +1,68 @@ +"""Support for Bluesound devices.""" + +from __future__ import annotations + +from typing import NamedTuple + +import voluptuous as vol + +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv + +from .const import ATTR_MASTER, DOMAIN + +SERVICE_CLEAR_TIMER = "clear_sleep_timer" +SERVICE_JOIN = "join" +SERVICE_SET_TIMER = "set_sleep_timer" +SERVICE_UNJOIN = "unjoin" + +BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) + +BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id}) + + +class ServiceMethodDetails(NamedTuple): + """Details for SERVICE_TO_METHOD mapping.""" + + method: str + schema: vol.Schema + + +SERVICE_TO_METHOD = { + SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA), + SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA), + SERVICE_SET_TIMER: ServiceMethodDetails( + method="async_increase_timer", schema=BS_SCHEMA + ), + SERVICE_CLEAR_TIMER: ServiceMethodDetails( + method="async_clear_timer", schema=BS_SCHEMA + ), +} + + +def setup_services(hass: HomeAssistant) -> None: + """Set up services for Bluesound component.""" + + async def async_service_handler(service: ServiceCall) -> None: + """Map services to method of Bluesound devices.""" + if not (method := SERVICE_TO_METHOD.get(service.service)): + return + + params = { + key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID + } + if entity_ids := service.data.get(ATTR_ENTITY_ID): + target_players = [ + player for player in hass.data[DOMAIN] if player.entity_id in entity_ids + ] + else: + target_players = hass.data[DOMAIN] + + for player in target_players: + await getattr(player, method.method)(**params) + + for service, method in SERVICE_TO_METHOD.items(): + hass.services.async_register( + DOMAIN, service, async_service_handler, schema=method.schema + ) diff --git a/homeassistant/components/bluetooth/manager.py b/homeassistant/components/bluetooth/manager.py index 9355fca6cdc..e192423484c 100644 --- a/homeassistant/components/bluetooth/manager.py +++ b/homeassistant/components/bluetooth/manager.py @@ -20,7 +20,9 @@ from homeassistant.core import ( callback as hass_callback, ) from homeassistant.helpers import discovery_flow +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from .const import DOMAIN from .match import ( ADDRESS, CALLBACK, @@ -75,12 +77,18 @@ class HomeAssistantBluetoothManager(BluetoothManager): self, service_info: BluetoothServiceInfoBleak ) -> None: """Trigger discovery for matching domains.""" + discovery_key = discovery_flow.DiscoveryKey( + domain=DOMAIN, + key=service_info.address, + version=1, + ) for domain in self._integration_matcher.match_domains(service_info): discovery_flow.async_create_flow( self.hass, domain, {"source": config_entries.SOURCE_BLUETOOTH}, service_info, + discovery_key=discovery_key, ) @hass_callback @@ -110,12 +118,21 @@ class HomeAssistantBluetoothManager(BluetoothManager): except Exception: _LOGGER.exception("Error in bluetooth callback") + if not matched_domains: + return # avoid creating DiscoveryKey if there are no matches + + discovery_key = discovery_flow.DiscoveryKey( + domain=DOMAIN, + key=service_info.address, + version=1, + ) for domain in matched_domains: discovery_flow.async_create_flow( self.hass, domain, {"source": config_entries.SOURCE_BLUETOOTH}, service_info, + discovery_key=discovery_key, ) def _address_disappeared(self, address: str) -> None: @@ -145,6 +162,11 @@ class HomeAssistantBluetoothManager(BluetoothManager): continue seen.add(address) self._async_trigger_matching_discovery(service_info) + async_dispatcher_connect( + self.hass, + config_entries.signal_discovered_config_entry_removed(DOMAIN), + self._handle_config_entry_removed, + ) def async_register_callback( self, @@ -230,3 +252,16 @@ class HomeAssistantBluetoothManager(BluetoothManager): unregister = super().async_register_scanner(scanner, connection_slots) return partial(self._async_unregister_scanner, scanner, unregister) + + @hass_callback + def _handle_config_entry_removed( + self, + entry: config_entries.ConfigEntry, + ) -> None: + """Handle config entry changes.""" + for discovery_key in entry.discovery_keys[DOMAIN]: + if discovery_key.version != 1 or not isinstance(discovery_key.key, str): + continue + address = discovery_key.key + _LOGGER.debug("Rediscover address %s", address) + self.async_rediscover_address(address) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 93b07cb120c..e25c077b57f 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -14,12 +14,12 @@ ], "quality_scale": "internal", "requirements": [ - "bleak==0.22.2", - "bleak-retry-connector==3.5.0", - "bluetooth-adapters==0.19.4", + "bleak==0.22.3", + "bleak-retry-connector==3.6.0", + "bluetooth-adapters==0.20.2", "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.20.0", - "dbus-fast==2.23.0", - "habluetooth==3.3.2" + "dbus-fast==2.24.3", + "habluetooth==3.6.0" ] } diff --git a/homeassistant/components/bluetooth/passive_update_coordinator.py b/homeassistant/components/bluetooth/passive_update_coordinator.py index df06a7c534b..be232f87b24 100644 --- a/homeassistant/components/bluetooth/passive_update_coordinator.py +++ b/homeassistant/components/bluetooth/passive_update_coordinator.py @@ -98,7 +98,7 @@ class PassiveBluetoothDataUpdateCoordinator( self.async_update_listeners() -class PassiveBluetoothCoordinatorEntity( +class PassiveBluetoothCoordinatorEntity( # pylint: disable=hass-enforce-class-module BaseCoordinatorEntity[_PassiveBluetoothDataUpdateCoordinatorT] ): """A class for entities using DataUpdateCoordinator.""" diff --git a/homeassistant/components/bluetooth/passive_update_processor.py b/homeassistant/components/bluetooth/passive_update_processor.py index 3e7e4e96659..8f66a3582ea 100644 --- a/homeassistant/components/bluetooth/passive_update_processor.py +++ b/homeassistant/components/bluetooth/passive_update_processor.py @@ -597,6 +597,7 @@ class PassiveBluetoothDataProcessor[_T, _DataT]: self.async_update_listeners(new_data, was_available, changed_entity_keys) +# pylint: disable-next=hass-enforce-class-module class PassiveBluetoothProcessorEntity[ _PassiveBluetoothDataProcessorT: PassiveBluetoothDataProcessor[Any, Any] ](Entity): diff --git a/homeassistant/components/bluetooth_le_tracker/device_tracker.py b/homeassistant/components/bluetooth_le_tracker/device_tracker.py index 24b03b2f566..25e620ff15d 100644 --- a/homeassistant/components/bluetooth_le_tracker/device_tracker.py +++ b/homeassistant/components/bluetooth_le_tracker/device_tracker.py @@ -194,7 +194,7 @@ async def async_setup_scanner( # noqa: C901 if track_new: if mac not in devs_to_track and mac not in devs_no_track: - _LOGGER.info("Discovered Bluetooth LE device %s", mac) + _LOGGER.debug("Discovered Bluetooth LE device %s", mac) hass.async_create_task( async_see_device(mac, service_info.name, new_device=True) ) diff --git a/homeassistant/components/bluetooth_le_tracker/manifest.json b/homeassistant/components/bluetooth_le_tracker/manifest.json index 79f885cad18..4abf5f7607e 100644 --- a/homeassistant/components/bluetooth_le_tracker/manifest.json +++ b/homeassistant/components/bluetooth_le_tracker/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/bluetooth_le_tracker", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/bluetooth_tracker/icons.json b/homeassistant/components/bluetooth_tracker/icons.json index 650bf0b6d19..217f1240893 100644 --- a/homeassistant/components/bluetooth_tracker/icons.json +++ b/homeassistant/components/bluetooth_tracker/icons.json @@ -1,5 +1,7 @@ { "services": { - "update": "mdi:update" + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/bluetooth_tracker/manifest.json b/homeassistant/components/bluetooth_tracker/manifest.json index 0a0356e6669..8fb35b311c9 100644 --- a/homeassistant/components/bluetooth_tracker/manifest.json +++ b/homeassistant/components/bluetooth_tracker/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bluetooth_tracker", "iot_class": "local_polling", "loggers": ["bluetooth", "bt_proximity"], + "quality_scale": "legacy", "requirements": ["bt-proximity==0.2.1", "PyBluez==0.22"] } diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 9e43cfc4187..7b6fb4119db 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -2,12 +2,10 @@ from __future__ import annotations -from dataclasses import dataclass import logging import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE_ID, CONF_ENTITY_ID, CONF_NAME, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -18,7 +16,7 @@ from homeassistant.helpers import ( import homeassistant.helpers.config_validation as cv from .const import ATTR_VIN, CONF_READ_ONLY, DOMAIN -from .coordinator import BMWDataUpdateCoordinator +from .coordinator import BMWConfigEntry, BMWDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -49,19 +47,9 @@ PLATFORMS = [ SERVICE_UPDATE_STATE = "update_state" -type BMWConfigEntry = ConfigEntry[BMWData] - - -@dataclass -class BMWData: - """Class to store BMW runtime data.""" - - coordinator: BMWDataUpdateCoordinator - - @callback def _async_migrate_options_from_data_if_missing( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BMWConfigEntry ) -> None: data = dict(entry.data) options = dict(entry.options) @@ -127,7 +115,7 @@ async def _async_migrate_entries( return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool: """Set up BMW Connected Drive from a config entry.""" _async_migrate_options_from_data_if_missing(hass, entry) @@ -137,11 +125,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Set up one data coordinator per account/config entry coordinator = BMWDataUpdateCoordinator( hass, - entry=entry, + config_entry=entry, ) await coordinator.async_config_entry_first_refresh() - entry.runtime_data = BMWData(coordinator) + entry.runtime_data = coordinator # Set up all platforms except notify await hass.config_entries.async_forward_entry_setups( @@ -175,7 +163,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms( diff --git a/homeassistant/components/bmw_connected_drive/binary_sensor.py b/homeassistant/components/bmw_connected_drive/binary_sensor.py index 65bdfca997b..5a58c707d6a 100644 --- a/homeassistant/components/bmw_connected_drive/binary_sensor.py +++ b/homeassistant/components/bmw_connected_drive/binary_sensor.py @@ -26,6 +26,8 @@ from .const import UNIT_MAP from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) @@ -201,7 +203,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the BMW binary sensors from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities = [ BMWBinarySensor(coordinator, vehicle, description, hass.config.units) diff --git a/homeassistant/components/bmw_connected_drive/button.py b/homeassistant/components/bmw_connected_drive/button.py index e6bd92b92d7..a7c31d0ef79 100644 --- a/homeassistant/components/bmw_connected_drive/button.py +++ b/homeassistant/components/bmw_connected_drive/button.py @@ -16,12 +16,14 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .entity import BMWBaseEntity if TYPE_CHECKING: from .coordinator import BMWDataUpdateCoordinator +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) @@ -53,7 +55,6 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = ( BMWButtonEntityDescription( key="deactivate_air_conditioning", translation_key="deactivate_air_conditioning", - name="Deactivate air conditioning", remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning_stop(), is_available=lambda vehicle: vehicle.is_remote_climate_stop_enabled, ), @@ -71,7 +72,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the BMW buttons from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWButton] = [] @@ -109,6 +110,10 @@ class BMWButton(BMWBaseEntity, ButtonEntity): try: await self.entity_description.remote_function(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 636274a01ad..04fb3842dfa 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -7,23 +7,38 @@ from typing import Any from bimmer_connected.api.authentication import MyBMWAuthentication from bimmer_connected.api.regions import get_region_from_name -from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from httpx import RequestError import voluptuous as vol from homeassistant.config_entries import ( - ConfigEntry, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_SOURCE, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig +from homeassistant.util.ssl import get_default_context from . import DOMAIN -from .const import CONF_ALLOWED_REGIONS, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN +from .const import ( + CONF_ALLOWED_REGIONS, + CONF_CAPTCHA_REGIONS, + CONF_CAPTCHA_TOKEN, + CONF_CAPTCHA_URL, + CONF_GCID, + CONF_READ_ONLY, + CONF_REFRESH_TOKEN, +) +from .coordinator import BMWConfigEntry DATA_SCHEMA = vol.Schema( { @@ -35,7 +50,20 @@ DATA_SCHEMA = vol.Schema( translation_key="regions", ) ), - } + }, + extra=vol.REMOVE_EXTRA, +) +RECONFIGURE_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + }, + extra=vol.REMOVE_EXTRA, +) +CAPTCHA_SCHEMA = vol.Schema( + { + vol.Required(CONF_CAPTCHA_TOKEN): str, + }, + extra=vol.REMOVE_EXTRA, ) @@ -48,10 +76,14 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, data[CONF_USERNAME], data[CONF_PASSWORD], get_region_from_name(data[CONF_REGION]), + hcaptcha_token=data.get(CONF_CAPTCHA_TOKEN), + verify=get_default_context(), ) try: await auth.login() + except MyBMWCaptchaMissingError as ex: + raise MissingCaptcha from ex except MyBMWAuthError as ex: raise InvalidAuth from ex except (MyBMWAPIError, RequestError) as ex: @@ -71,76 +103,134 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None + data: dict[str, Any] = {} + + _existing_entry_data: Mapping[str, Any] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors: dict[str, str] = {} + errors: dict[str, str] = self.data.pop("errors", {}) - if user_input is not None: + if user_input is not None and not errors: unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}" + await self.async_set_unique_id(unique_id) - if not self._reauth_entry: - await self.async_set_unique_id(unique_id) + # Unique ID cannot change for reauth/reconfigure + if self.source not in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: self._abort_if_unique_id_configured() + # Store user input for later use + self.data.update(user_input) + + # North America and Rest of World require captcha token + if ( + self.data.get(CONF_REGION) in CONF_CAPTCHA_REGIONS + and CONF_CAPTCHA_TOKEN not in self.data + ): + return await self.async_step_captcha() + info = None try: - info = await validate_input(self.hass, user_input) - entry_data = { - **user_input, - CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), - CONF_GCID: info.get(CONF_GCID), - } + info = await validate_input(self.hass, self.data) + except MissingCaptcha: + errors["base"] = "missing_captcha" except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" + finally: + self.data.pop(CONF_CAPTCHA_TOKEN, None) if info: - if self._reauth_entry: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=entry_data - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - ) - return self.async_abort(reason="reauth_successful") + entry_data = { + **self.data, + CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), + CONF_GCID: info.get(CONF_GCID), + } + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=entry_data + ) + if self.source == SOURCE_RECONFIGURE: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data=entry_data, + ) return self.async_create_entry( title=info["title"], data=entry_data, ) schema = self.add_suggested_values_to_schema( - DATA_SCHEMA, self._reauth_entry.data if self._reauth_entry else {} + DATA_SCHEMA, + self._existing_entry_data or self.data, ) return self.async_show_form(step_id="user", data_schema=schema, errors=errors) + async def async_step_change_password( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show the change password step.""" + existing_data = ( + dict(self._existing_entry_data) if self._existing_entry_data else {} + ) + + if user_input is not None: + return await self.async_step_user(existing_data | user_input) + + return self.async_show_form( + step_id="change_password", + data_schema=RECONFIGURE_SCHEMA, + description_placeholders={ + CONF_USERNAME: existing_data[CONF_USERNAME], + CONF_REGION: existing_data[CONF_REGION], + }, + ) + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] + self._existing_entry_data = entry_data + return await self.async_step_change_password() + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + self._existing_entry_data = self._get_reconfigure_entry().data + return await self.async_step_change_password() + + async def async_step_captcha( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show captcha form.""" + if user_input and user_input.get(CONF_CAPTCHA_TOKEN): + self.data[CONF_CAPTCHA_TOKEN] = user_input[CONF_CAPTCHA_TOKEN].strip() + return await self.async_step_user(self.data) + + return self.async_show_form( + step_id="captcha", + data_schema=CAPTCHA_SCHEMA, + description_placeholders={ + "captcha_url": CONF_CAPTCHA_URL.format(region=self.data[CONF_REGION]) + }, ) - return await self.async_step_user() @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: BMWConfigEntry, ) -> BMWOptionsFlow: """Return a MyBMW option flow.""" - return BMWOptionsFlow(config_entry) + return BMWOptionsFlow() -class BMWOptionsFlow(OptionsFlowWithConfigEntry): +class BMWOptionsFlow(OptionsFlow): """Handle a option flow for MyBMW.""" async def async_step_init( @@ -184,3 +274,7 @@ class CannotConnect(HomeAssistantError): class InvalidAuth(HomeAssistantError): """Error to indicate there is invalid auth.""" + + +class MissingCaptcha(HomeAssistantError): + """Error to indicate the captcha token is missing.""" diff --git a/homeassistant/components/bmw_connected_drive/const.py b/homeassistant/components/bmw_connected_drive/const.py index 98d4acbfc91..750289e9d0a 100644 --- a/homeassistant/components/bmw_connected_drive/const.py +++ b/homeassistant/components/bmw_connected_drive/const.py @@ -8,10 +8,15 @@ ATTR_DIRECTION = "direction" ATTR_VIN = "vin" CONF_ALLOWED_REGIONS = ["china", "north_america", "rest_of_world"] +CONF_CAPTCHA_REGIONS = ["north_america", "rest_of_world"] CONF_READ_ONLY = "read_only" CONF_ACCOUNT = "account" CONF_REFRESH_TOKEN = "refresh_token" CONF_GCID = "gcid" +CONF_CAPTCHA_TOKEN = "captcha_token" +CONF_CAPTCHA_URL = ( + "https://bimmer-connected.readthedocs.io/en/stable/captcha/{region}.html" +) DATA_HASS_CONFIG = "hass_config" diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 6e0ed2ab670..b54d9245bbd 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -7,7 +7,12 @@ import logging from bimmer_connected.account import MyBMWAccount from bimmer_connected.api.regions import get_region_from_name -from bimmer_connected.models import GPSPosition, MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + GPSPosition, + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from httpx import RequestError from homeassistant.config_entries import ConfigEntry @@ -15,39 +20,53 @@ from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.ssl import get_default_context -from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS +from .const import ( + CONF_GCID, + CONF_READ_ONLY, + CONF_REFRESH_TOKEN, + DOMAIN as BMW_DOMAIN, + SCAN_INTERVALS, +) _LOGGER = logging.getLogger(__name__) +type BMWConfigEntry = ConfigEntry[BMWDataUpdateCoordinator] + + class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): """Class to manage fetching BMW data.""" account: MyBMWAccount + config_entry: BMWConfigEntry - def __init__(self, hass: HomeAssistant, *, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, *, config_entry: BMWConfigEntry) -> None: """Initialize account-wide BMW data updater.""" self.account = MyBMWAccount( - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - get_region_from_name(entry.data[CONF_REGION]), + config_entry.data[CONF_USERNAME], + config_entry.data[CONF_PASSWORD], + get_region_from_name(config_entry.data[CONF_REGION]), observer_position=GPSPosition(hass.config.latitude, hass.config.longitude), + verify=get_default_context(), ) - self.read_only = entry.options[CONF_READ_ONLY] - self._entry = entry + self.read_only: bool = config_entry.options[CONF_READ_ONLY] - if CONF_REFRESH_TOKEN in entry.data: + if CONF_REFRESH_TOKEN in config_entry.data: self.account.set_refresh_token( - refresh_token=entry.data[CONF_REFRESH_TOKEN], - gcid=entry.data.get(CONF_GCID), + refresh_token=config_entry.data[CONF_REFRESH_TOKEN], + gcid=config_entry.data.get(CONF_GCID), ) super().__init__( hass, _LOGGER, - name=f"{DOMAIN}-{entry.data['username']}", - update_interval=timedelta(seconds=SCAN_INTERVALS[entry.data[CONF_REGION]]), + config_entry=config_entry, + name=f"{BMW_DOMAIN}-{config_entry.data[CONF_USERNAME]}", + update_interval=timedelta( + seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]] + ), ) # Default to false on init so _async_update_data logic works @@ -59,30 +78,42 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): try: await self.account.get_vehicles() + except MyBMWCaptchaMissingError as err: + # If a captcha is required (user/password login flow), always trigger the reauth flow + raise ConfigEntryAuthFailed( + translation_domain=BMW_DOMAIN, + translation_key="missing_captcha", + ) from err except MyBMWAuthError as err: # Allow one retry interval before raising AuthFailed to avoid flaky API issues if self.last_update_success: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=BMW_DOMAIN, + translation_key="update_failed", + translation_placeholders={"exception": str(err)}, + ) from err # Clear refresh token and trigger reauth if previous update failed as well self._update_config_entry_refresh_token(None) - raise ConfigEntryAuthFailed(err) from err + raise ConfigEntryAuthFailed( + translation_domain=BMW_DOMAIN, + translation_key="invalid_auth", + ) from err except (MyBMWAPIError, RequestError) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=BMW_DOMAIN, + translation_key="update_failed", + translation_placeholders={"exception": str(err)}, + ) from err if self.account.refresh_token != old_refresh_token: self._update_config_entry_refresh_token(self.account.refresh_token) - _LOGGER.debug( - "bimmer_connected: refresh token %s > %s", - old_refresh_token, - self.account.refresh_token, - ) def _update_config_entry_refresh_token(self, refresh_token: str | None) -> None: """Update or delete the refresh_token in the Config Entry.""" data = { - **self._entry.data, + **self.config_entry.data, CONF_REFRESH_TOKEN: refresh_token, } if not refresh_token: data.pop(CONF_REFRESH_TOKEN) - self.hass.config_entries.async_update_entry(self._entry, data=data) + self.hass.config_entries.async_update_entry(self.config_entry, data=data) diff --git a/homeassistant/components/bmw_connected_drive/device_tracker.py b/homeassistant/components/bmw_connected_drive/device_tracker.py index 8266576e1d5..74df8693f7a 100644 --- a/homeassistant/components/bmw_connected_drive/device_tracker.py +++ b/homeassistant/components/bmw_connected_drive/device_tracker.py @@ -7,7 +7,7 @@ from typing import Any from bimmer_connected.vehicle import MyBMWVehicle -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -16,6 +16,8 @@ from .const import ATTR_DIRECTION from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) @@ -25,7 +27,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW tracker from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWDeviceTracker] = [] for vehicle in coordinator.account.vehicles: @@ -47,7 +49,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): _attr_force_update = False _attr_translation_key = "car" - _attr_icon = "mdi:car" + _attr_name = None def __init__( self, @@ -56,9 +58,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): ) -> None: """Initialize the Tracker.""" super().__init__(coordinator, vehicle) - self._attr_unique_id = vehicle.vin - self._attr_name = None @property def extra_state_attributes(self) -> dict[str, Any]: @@ -84,8 +84,3 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): and self.vehicle.vehicle_location.location else None ) - - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS diff --git a/homeassistant/components/bmw_connected_drive/diagnostics.py b/homeassistant/components/bmw_connected_drive/diagnostics.py index a3a8f5f942e..3f357c3ae79 100644 --- a/homeassistant/components/bmw_connected_drive/diagnostics.py +++ b/homeassistant/components/bmw_connected_drive/diagnostics.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, Any from bimmer_connected.utils import MyBMWJSONEncoder -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry @@ -16,6 +16,8 @@ from homeassistant.helpers.device_registry import DeviceEntry from . import BMWConfigEntry from .const import CONF_REFRESH_TOKEN +PARALLEL_UPDATES = 1 + if TYPE_CHECKING: from bimmer_connected.vehicle import MyBMWVehicle @@ -49,7 +51,7 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, config_entry: BMWConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data coordinator.account.config.log_responses = True await coordinator.account.get_vehicles(force_init=True) @@ -75,7 +77,7 @@ async def async_get_device_diagnostics( hass: HomeAssistant, config_entry: BMWConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data coordinator.account.config.log_responses = True await coordinator.account.get_vehicles(force_init=True) diff --git a/homeassistant/components/bmw_connected_drive/lock.py b/homeassistant/components/bmw_connected_drive/lock.py index 3dfc0b1c4d4..4bec12e796b 100644 --- a/homeassistant/components/bmw_connected_drive/lock.py +++ b/homeassistant/components/bmw_connected_drive/lock.py @@ -14,11 +14,14 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + DOOR_LOCK_STATE = "door_lock_state" + _LOGGER = logging.getLogger(__name__) @@ -28,7 +31,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW lock from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data if not coordinator.read_only: async_add_entities( @@ -67,7 +70,11 @@ class BMWLock(BMWBaseEntity, LockEntity): # Set the state to unknown if the command fails self._attr_is_locked = None self.async_write_ha_state() - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex finally: # Always update the listeners to get the latest state self.coordinator.async_update_listeners() @@ -87,7 +94,11 @@ class BMWLock(BMWBaseEntity, LockEntity): # Set the state to unknown if the command fails self._attr_is_locked = None self.async_write_ha_state() - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex finally: # Always update the listeners to get the latest state self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 304973b816f..81928a59a52 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], - "quality_scale": "platinum", - "requirements": ["bimmer-connected[china]==0.16.1"] + "requirements": ["bimmer-connected[china]==0.17.2"] } diff --git a/homeassistant/components/bmw_connected_drive/notify.py b/homeassistant/components/bmw_connected_drive/notify.py index 56523351e66..dfa0939e81f 100644 --- a/homeassistant/components/bmw_connected_drive/notify.py +++ b/homeassistant/components/bmw_connected_drive/notify.py @@ -20,7 +20,9 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN, BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry + +PARALLEL_UPDATES = 1 ATTR_LOCATION_ATTRIBUTES = ["street", "city", "postal_code", "country"] @@ -51,7 +53,7 @@ def get_service( targets = {} if ( config_entry - and (coordinator := config_entry.runtime_data.coordinator) + and (coordinator := config_entry.runtime_data) and not coordinator.read_only ): targets.update({v.name: v for v in coordinator.account.vehicles}) @@ -90,7 +92,7 @@ class BMWNotificationService(BaseNotificationService): except (vol.Invalid, TypeError, ValueError) as ex: raise ServiceValidationError( - translation_domain=DOMAIN, + translation_domain=BMW_DOMAIN, translation_key="invalid_poi", translation_placeholders={ "poi_exception": str(ex), @@ -104,4 +106,8 @@ class BMWNotificationService(BaseNotificationService): try: await vehicle.remote_services.trigger_send_poi(poi) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex diff --git a/homeassistant/components/bmw_connected_drive/number.py b/homeassistant/components/bmw_connected_drive/number.py index 54519ff9e6b..c6a328ecc20 100644 --- a/homeassistant/components/bmw_connected_drive/number.py +++ b/homeassistant/components/bmw_connected_drive/number.py @@ -18,10 +18,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) @@ -59,7 +61,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW number from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWNumber] = [] @@ -107,6 +109,10 @@ class BMWNumber(BMWBaseEntity, NumberEntity): try: await self.entity_description.remote_service(self.vehicle, value) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/select.py b/homeassistant/components/bmw_connected_drive/select.py index 323768ad9eb..385b45fd9fa 100644 --- a/homeassistant/components/bmw_connected_drive/select.py +++ b/homeassistant/components/bmw_connected_drive/select.py @@ -15,10 +15,12 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) @@ -66,7 +68,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW lock from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWSelect] = [] @@ -121,6 +123,10 @@ class BMWSelect(BMWBaseEntity, SelectEntity): try: await self.entity_description.remote_service(self.vehicle, option) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/sensor.py b/homeassistant/components/bmw_connected_drive/sensor.py index fe0e835622b..b7be367d57d 100644 --- a/homeassistant/components/bmw_connected_drive/sensor.py +++ b/homeassistant/components/bmw_connected_drive/sensor.py @@ -34,6 +34,8 @@ from . import BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) @@ -80,7 +82,6 @@ SENSOR_TYPES: list[BMWSensorEntityDescription] = [ BMWSensorEntityDescription( key="fuel_and_battery.charging_target", translation_key="charging_target", - device_class=SensorDeviceClass.BATTERY, native_unit_of_measurement=PERCENTAGE, suggested_display_precision=0, is_available=lambda v: v.is_lsc_enabled and v.has_electric_drivetrain, @@ -192,7 +193,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW sensors from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities = [ BMWSensor(coordinator, vehicle, description) diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 8121ab6f65f..edb0d5cfb12 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -2,27 +2,57 @@ "config": { "step": { "user": { + "description": "Connect to your MyBMW/MINI Connected account to retrieve vehicle data.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "region": "ConnectedDrive Region" + }, + "data_description": { + "username": "The email address of your MyBMW/MINI Connected account.", + "password": "The password of your MyBMW/MINI Connected account.", + "region": "The region of your MyBMW/MINI Connected account." + } + }, + "captcha": { + "title": "Are you a robot?", + "description": "A captcha is required for BMW login. Visit the external website to complete the challenge and submit the form. Copy the resulting token into the field below.\n\n{captcha_url}\n\nNo data will be exposed outside of your Home Assistant instance.", + "data": { + "captcha_token": "Captcha token" + }, + "data_description": { + "captcha_token": "One-time token retrieved from the captcha challenge." + } + }, + "change_password": { + "description": "Update your MyBMW/MINI Connected password for account `{username}` in region `{region}`.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::bmw_connected_drive::config::step::user::data_description::password%]" } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "missing_captcha": "Captcha validation missing" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "options": { "step": { "account_options": { "data": { - "read_only": "Read-only (only sensors and notify, no execution of services, no lock)" + "read_only": "Read-only mode" + }, + "data_description": { + "read_only": "Only retrieve values and send POI data, but don't offer any services that can change the vehicle state." } } } @@ -64,6 +94,9 @@ "activate_air_conditioning": { "name": "Activate air conditioning" }, + "deactivate_air_conditioning": { + "name": "Deactivate air conditioning" + }, "find_vehicle": { "name": "Find vehicle" } @@ -148,7 +181,8 @@ "cooling": "Cooling", "heating": "Heating", "inactive": "Inactive", - "standby": "Standby" + "standby": "Standby", + "ventilation": "Ventilation" } }, "front_left_current_pressure": { @@ -197,6 +231,18 @@ "exceptions": { "invalid_poi": { "message": "Invalid data for point of interest: {poi_exception}" + }, + "missing_captcha": { + "message": "Login requires captcha validation" + }, + "invalid_auth": { + "message": "[%key:common::config_flow::error::invalid_auth%]" + }, + "remote_service_error": { + "message": "Error executing remote service on vehicle. {exception}" + }, + "update_failed": { + "message": "Error updating vehicle data. {exception}" } } } diff --git a/homeassistant/components/bmw_connected_drive/switch.py b/homeassistant/components/bmw_connected_drive/switch.py index e8a02efdcfc..600ad41165a 100644 --- a/homeassistant/components/bmw_connected_drive/switch.py +++ b/homeassistant/components/bmw_connected_drive/switch.py @@ -14,10 +14,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) @@ -67,7 +69,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the MyBMW switch from config entry.""" - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data entities: list[BMWSwitch] = [] @@ -109,8 +111,11 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity): try: await self.entity_description.remote_service_on(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex - + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() async def async_turn_off(self, **kwargs: Any) -> None: @@ -118,6 +123,9 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity): try: await self.entity_description.remote_service_off(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex - + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bond/button.py b/homeassistant/components/bond/button.py index a2d88bc6f6a..42915c7dc0b 100644 --- a/homeassistant/components/bond/button.py +++ b/homeassistant/components/bond/button.py @@ -237,6 +237,20 @@ BUTTONS: tuple[BondButtonEntityDescription, ...] = ( mutually_exclusive=Action.SET_POSITION, argument=STEP_SIZE, ), + BondButtonEntityDescription( + key=Action.OPEN_NEXT, + name="Open Next", + translation_key="open_next", + mutually_exclusive=None, + argument=None, + ), + BondButtonEntityDescription( + key=Action.CLOSE_NEXT, + name="Close Next", + translation_key="close_next", + mutually_exclusive=None, + argument=None, + ), ) diff --git a/homeassistant/components/bond/icons.json b/homeassistant/components/bond/icons.json index 35743d20e65..b150d1c1fa3 100644 --- a/homeassistant/components/bond/icons.json +++ b/homeassistant/components/bond/icons.json @@ -84,6 +84,12 @@ }, "decrease_position": { "default": "mdi:minus-box" + }, + "open_next": { + "default": "mdi:plus-box" + }, + "close_next": { + "default": "mdi:minus-box" } }, "light": { @@ -96,12 +102,26 @@ } }, "services": { - "set_fan_speed_tracked_state": "mdi:fan", - "set_switch_power_tracked_state": "mdi:toggle-switch-variant", - "set_light_power_tracked_state": "mdi:lightbulb", - "set_light_brightness_tracked_state": "mdi:lightbulb-on", - "start_increasing_brightness": "mdi:brightness-7", - "start_decreasing_brightness": "mdi:brightness-1", - "stop": "mdi:stop" + "set_fan_speed_tracked_state": { + "service": "mdi:fan" + }, + "set_switch_power_tracked_state": { + "service": "mdi:toggle-switch-variant" + }, + "set_light_power_tracked_state": { + "service": "mdi:lightbulb" + }, + "set_light_brightness_tracked_state": { + "service": "mdi:lightbulb-on" + }, + "start_increasing_brightness": { + "service": "mdi:brightness-7" + }, + "start_decreasing_brightness": { + "service": "mdi:brightness-1" + }, + "stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/bond/manifest.json b/homeassistant/components/bond/manifest.json index 08e4fb007b7..1d4c110f4fd 100644 --- a/homeassistant/components/bond/manifest.json +++ b/homeassistant/components/bond/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bond", "iot_class": "local_push", "loggers": ["bond_async"], - "quality_scale": "platinum", "requirements": ["bond-async==0.2.1"], "zeroconf": ["_bond._tcp.local."] } diff --git a/homeassistant/components/bosch_shc/config_flow.py b/homeassistant/components/bosch_shc/config_flow.py index 6279f3ca932..58601152da5 100644 --- a/homeassistant/components/bosch_shc/config_flow.py +++ b/homeassistant/components/bosch_shc/config_flow.py @@ -39,16 +39,21 @@ HOST_SCHEMA = vol.Schema( ) -def write_tls_asset(hass: HomeAssistant, filename: str, asset: bytes) -> None: +def write_tls_asset( + hass: HomeAssistant, folder: str, filename: str, asset: bytes +) -> None: """Write the tls assets to disk.""" - makedirs(hass.config.path(DOMAIN), exist_ok=True) - with open(hass.config.path(DOMAIN, filename), "w", encoding="utf8") as file_handle: + makedirs(hass.config.path(DOMAIN, folder), exist_ok=True) + with open( + hass.config.path(DOMAIN, folder, filename), "w", encoding="utf8" + ) as file_handle: file_handle.write(asset.decode("utf-8")) def create_credentials_and_validate( hass: HomeAssistant, host: str, + unique_id: str, user_input: dict[str, Any], zeroconf_instance: zeroconf.HaZeroconf, ) -> dict[str, Any] | None: @@ -57,13 +62,15 @@ def create_credentials_and_validate( result = helper.register(host, "HomeAssistant") if result is not None: - write_tls_asset(hass, CONF_SHC_CERT, result["cert"]) - write_tls_asset(hass, CONF_SHC_KEY, result["key"]) + # Save key/certificate pair for each registered host separately + # otherwise only the last registered host is accessible. + write_tls_asset(hass, unique_id, CONF_SHC_CERT, result["cert"]) + write_tls_asset(hass, unique_id, CONF_SHC_KEY, result["key"]) session = SHCSession( host, - hass.config.path(DOMAIN, CONF_SHC_CERT), - hass.config.path(DOMAIN, CONF_SHC_KEY), + hass.config.path(DOMAIN, unique_id, CONF_SHC_CERT), + hass.config.path(DOMAIN, unique_id, CONF_SHC_KEY), True, zeroconf_instance, ) @@ -143,11 +150,16 @@ class BoschSHCConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: zeroconf_instance = await zeroconf.async_get_instance(self.hass) + # unique_id uniquely identifies the registered controller and is used + # to save the key/certificate pair for each controller separately + unique_id = self.info["unique_id"] + assert unique_id try: result = await self.hass.async_add_executor_job( create_credentials_and_validate, self.hass, self.host, + unique_id, user_input, zeroconf_instance, ) @@ -167,20 +179,23 @@ class BoschSHCConfigFlow(ConfigFlow, domain=DOMAIN): else: assert result entry_data = { - CONF_SSL_CERTIFICATE: self.hass.config.path(DOMAIN, CONF_SHC_CERT), - CONF_SSL_KEY: self.hass.config.path(DOMAIN, CONF_SHC_KEY), + # Each host has its own key/certificate pair + CONF_SSL_CERTIFICATE: self.hass.config.path( + DOMAIN, unique_id, CONF_SHC_CERT + ), + CONF_SSL_KEY: self.hass.config.path( + DOMAIN, unique_id, CONF_SHC_KEY + ), CONF_HOST: self.host, CONF_TOKEN: result["token"], CONF_HOSTNAME: result["token"].split(":", 1)[1], } - existing_entry = await self.async_set_unique_id(self.info["unique_id"]) + existing_entry = await self.async_set_unique_id(unique_id) if existing_entry: - self.hass.config_entries.async_update_entry( + return self.async_update_reload_and_abort( existing_entry, data=entry_data, ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=cast(str, self.info["title"]), diff --git a/homeassistant/components/braviatv/config_flow.py b/homeassistant/components/braviatv/config_flow.py index b3ad55dbb7d..db5c72d7932 100644 --- a/homeassistant/components/braviatv/config_flow.py +++ b/homeassistant/components/braviatv/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import Any, cast from urllib.parse import urlparse from aiohttp import CookieJar @@ -11,7 +11,7 @@ from pybravia import BraviaAuthError, BraviaClient, BraviaError, BraviaNotSuppor import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID, CONF_HOST, CONF_MAC, CONF_NAME, CONF_PIN from homeassistant.helpers import instance_id from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -37,7 +37,6 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize config flow.""" self.client: BraviaClient | None = None self.device_config: dict[str, Any] = {} - self.entry: ConfigEntry | None = None def create_client(self) -> None: """Create Bravia TV client from config.""" @@ -86,13 +85,12 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN): async def async_reauth_device(self) -> ConfigFlowResult: """Reauthorize Bravia TV device from config.""" - assert self.entry assert self.client await self.async_connect_device() - self.hass.config_entries.async_update_entry(self.entry, data=self.device_config) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self.device_config + ) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -147,7 +145,7 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN): self.device_config[CONF_CLIENT_ID] = client_id self.device_config[CONF_NICKNAME] = nickname try: - if self.entry: + if self.source == SOURCE_REAUTH: return await self.async_reauth_device() return await self.async_create_device() except BraviaAuthError: @@ -183,7 +181,7 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self.device_config[CONF_PIN] = user_input[CONF_PIN] try: - if self.entry: + if self.source == SOURCE_REAUTH: return await self.async_reauth_device() return await self.async_create_device() except BraviaAuthError: @@ -207,8 +205,9 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: ssdp.SsdpServiceInfo ) -> ConfigFlowResult: """Handle a discovered device.""" - parsed_url = urlparse(discovery_info.ssdp_location) - host = parsed_url.hostname + # We can cast the hostname to str because the ssdp_location is not bytes and + # not a relative url + host = cast(str, urlparse(discovery_info.ssdp_location).hostname) await self.async_set_unique_id(discovery_info.upnp[ssdp.ATTR_UPNP_UDN]) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) @@ -246,6 +245,5 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) self.device_config = {**entry_data} return await self.async_step_authorize() diff --git a/homeassistant/components/braviatv/entity.py b/homeassistant/components/braviatv/entity.py index ac08543b875..75540b316a7 100644 --- a/homeassistant/components/braviatv/entity.py +++ b/homeassistant/components/braviatv/entity.py @@ -1,6 +1,6 @@ """A entity class for Bravia TV integration.""" -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import BraviaTVCoordinator @@ -28,3 +28,7 @@ class BraviaTVEntity(CoordinatorEntity[BraviaTVCoordinator]): model=model, name=f"{ATTR_MANUFACTURER} {model}", ) + if coordinator.client.mac is not None: + self._attr_device_info["connections"] = { + (CONNECTION_NETWORK_MAC, coordinator.client.mac) + } diff --git a/homeassistant/components/braviatv/media_player.py b/homeassistant/components/braviatv/media_player.py index 8d45cf4a439..4de167a6def 100644 --- a/homeassistant/components/braviatv/media_player.py +++ b/homeassistant/components/braviatv/media_player.py @@ -7,6 +7,7 @@ from typing import Any from homeassistant.components.media_player import ( BrowseError, + BrowseMedia, MediaClass, MediaPlayerDeviceClass, MediaPlayerEntity, @@ -14,7 +15,6 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.components.media_player.browse_media import BrowseMedia from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback diff --git a/homeassistant/components/bring/__init__.py b/homeassistant/components/bring/__init__.py index f55e75c70bf..80b7a843cc0 100644 --- a/homeassistant/components/bring/__init__.py +++ b/homeassistant/components/bring/__init__.py @@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN from .coordinator import BringDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.TODO] +PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.TODO] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bring/config_flow.py b/homeassistant/components/bring/config_flow.py index c675eda3cd2..b8ee9d1e6ae 100644 --- a/homeassistant/components/bring/config_flow.py +++ b/homeassistant/components/bring/config_flow.py @@ -33,11 +33,13 @@ STEP_USER_DATA_SCHEMA = vol.Schema( vol.Required(CONF_EMAIL): TextSelector( TextSelectorConfig( type=TextSelectorType.EMAIL, + autocomplete="email", ), ), vol.Required(CONF_PASSWORD): TextSelector( TextSelectorConfig( type=TextSelectorType.PASSWORD, + autocomplete="current-password", ), ), } @@ -48,7 +50,7 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Bring!.""" VERSION = 1 - reauth_entry: BringConfigEntry | None = None + reauth_entry: BringConfigEntry info: BringAuthResponse async def async_step_user( @@ -72,9 +74,7 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) + self.reauth_entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -83,10 +83,9 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN): """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self.reauth_entry - if user_input is not None: if not (errors := await self.validate_input(user_input)): + self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( self.reauth_entry, data=user_input ) diff --git a/homeassistant/components/bring/coordinator.py b/homeassistant/components/bring/coordinator.py index 439eb552de4..7678213f117 100644 --- a/homeassistant/components/bring/coordinator.py +++ b/homeassistant/components/bring/coordinator.py @@ -11,7 +11,7 @@ from bring_api import ( BringParseException, BringRequestException, ) -from bring_api.types import BringItemsResponse, BringList +from bring_api.types import BringItemsResponse, BringList, BringUserSettingsResponse from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL @@ -32,6 +32,7 @@ class BringDataUpdateCoordinator(DataUpdateCoordinator[dict[str, BringData]]): """A Bring Data Update Coordinator.""" config_entry: ConfigEntry + user_settings: BringUserSettingsResponse def __init__(self, hass: HomeAssistant, bring: Bring) -> None: """Initialize the Bring data coordinator.""" @@ -81,3 +82,17 @@ class BringDataUpdateCoordinator(DataUpdateCoordinator[dict[str, BringData]]): list_dict[lst["listUuid"]] = BringData(**lst, **items) return list_dict + + async def _async_setup(self) -> None: + """Set up coordinator.""" + + await self.async_refresh_user_settings() + + async def async_refresh_user_settings(self) -> None: + """Refresh user settings.""" + try: + self.user_settings = await self.bring.get_all_user_settings() + except (BringAuthException, BringRequestException, BringParseException) as e: + raise UpdateFailed( + "Unable to connect and retrieve user settings from bring" + ) from e diff --git a/homeassistant/components/bring/diagnostics.py b/homeassistant/components/bring/diagnostics.py new file mode 100644 index 00000000000..f4193a9993c --- /dev/null +++ b/homeassistant/components/bring/diagnostics.py @@ -0,0 +1,16 @@ +"""Diagnostics support for Bring.""" + +from __future__ import annotations + +from homeassistant.core import HomeAssistant + +from . import BringConfigEntry +from .coordinator import BringData + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: BringConfigEntry +) -> dict[str, BringData]: + """Return diagnostics for a config entry.""" + + return config_entry.runtime_data.data diff --git a/homeassistant/components/bring/entity.py b/homeassistant/components/bring/entity.py new file mode 100644 index 00000000000..5b6bf975764 --- /dev/null +++ b/homeassistant/components/bring/entity.py @@ -0,0 +1,36 @@ +"""Base entity for the Bring! integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import BringData, BringDataUpdateCoordinator + + +class BringBaseEntity(CoordinatorEntity[BringDataUpdateCoordinator]): + """Bring base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: BringDataUpdateCoordinator, + bring_list: BringData, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self._list_uuid = bring_list["listUuid"] + + self.device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + name=bring_list["name"], + identifiers={ + (DOMAIN, f"{coordinator.config_entry.unique_id}_{self._list_uuid}") + }, + manufacturer="Bring! Labs AG", + model="Bring! Grocery Shopping List", + configuration_url=f"https://web.getbring.com/app/lists/{list(self.coordinator.data.keys()).index(self._list_uuid)}", + ) diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index 1c6c3bdeca0..c670ef87700 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -1,5 +1,26 @@ { "entity": { + "sensor": { + "urgent": { + "default": "mdi:run-fast" + }, + "discounted": { + "default": "mdi:brightness-percent" + }, + "convenient": { + "default": "mdi:fridge-outline" + }, + "list_language": { + "default": "mdi:earth" + }, + "list_access": { + "default": "mdi:account-lock", + "state": { + "shared": "mdi:account-group", + "invitation": "mdi:account-multiple-plus" + } + } + }, "todo": { "shopping_list": { "default": "mdi:cart" @@ -7,6 +28,8 @@ } }, "services": { - "send_message": "mdi:cellphone-message" + "send_message": { + "service": "mdi:cellphone-message" + } } } diff --git a/homeassistant/components/bring/manifest.json b/homeassistant/components/bring/manifest.json index 17c742415ff..ff24a991350 100644 --- a/homeassistant/components/bring/manifest.json +++ b/homeassistant/components/bring/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bring", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["bring-api==0.8.1"] + "requirements": ["bring-api==0.9.1"] } diff --git a/homeassistant/components/bring/quality_scale.yaml b/homeassistant/components/bring/quality_scale.yaml new file mode 100644 index 00000000000..1fdb3f13f1b --- /dev/null +++ b/homeassistant/components/bring/quality_scale.yaml @@ -0,0 +1,72 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Only entity services + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: todo + dependency-transparency: done + docs-actions: done + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: The integration registers no events + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: handled by coordinator + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Integration is a service and has no devices. + discovery: + status: exempt + comment: Integration is a service and has no devices. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + no repairs + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/bring/sensor.py b/homeassistant/components/bring/sensor.py new file mode 100644 index 00000000000..bd33ce9bf88 --- /dev/null +++ b/homeassistant/components/bring/sensor.py @@ -0,0 +1,128 @@ +"""Sensor platform for the Bring! integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import StrEnum + +from bring_api import BringUserSettingsResponse +from bring_api.const import BRING_SUPPORTED_LOCALES + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import BringConfigEntry +from .coordinator import BringData, BringDataUpdateCoordinator +from .entity import BringBaseEntity +from .util import list_language, sum_attributes + +PARALLEL_UPDATES = 0 + + +@dataclass(kw_only=True, frozen=True) +class BringSensorEntityDescription(SensorEntityDescription): + """Bring Sensor Description.""" + + value_fn: Callable[[BringData, BringUserSettingsResponse], StateType] + + +class BringSensor(StrEnum): + """Bring sensors.""" + + URGENT = "urgent" + CONVENIENT = "convenient" + DISCOUNTED = "discounted" + LIST_LANGUAGE = "list_language" + LIST_ACCESS = "list_access" + + +SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = ( + BringSensorEntityDescription( + key=BringSensor.URGENT, + translation_key=BringSensor.URGENT, + value_fn=lambda lst, _: sum_attributes(lst, "urgent"), + ), + BringSensorEntityDescription( + key=BringSensor.CONVENIENT, + translation_key=BringSensor.CONVENIENT, + value_fn=lambda lst, _: sum_attributes(lst, "convenient"), + ), + BringSensorEntityDescription( + key=BringSensor.DISCOUNTED, + translation_key=BringSensor.DISCOUNTED, + value_fn=lambda lst, _: sum_attributes(lst, "discounted"), + ), + BringSensorEntityDescription( + key=BringSensor.LIST_LANGUAGE, + translation_key=BringSensor.LIST_LANGUAGE, + value_fn=( + lambda lst, settings: x.lower() + if (x := list_language(lst["listUuid"], settings)) + else None + ), + entity_category=EntityCategory.DIAGNOSTIC, + options=[x.lower() for x in BRING_SUPPORTED_LOCALES], + device_class=SensorDeviceClass.ENUM, + ), + BringSensorEntityDescription( + key=BringSensor.LIST_ACCESS, + translation_key=BringSensor.LIST_ACCESS, + value_fn=lambda lst, _: lst["status"].lower(), + entity_category=EntityCategory.DIAGNOSTIC, + options=["registered", "shared", "invitation"], + device_class=SensorDeviceClass.ENUM, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BringConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensor platform.""" + coordinator = config_entry.runtime_data + + async_add_entities( + BringSensorEntity( + coordinator, + bring_list, + description, + ) + for description in SENSOR_DESCRIPTIONS + for bring_list in coordinator.data.values() + ) + + +class BringSensorEntity(BringBaseEntity, SensorEntity): + """A sensor entity.""" + + entity_description: BringSensorEntityDescription + + def __init__( + self, + coordinator: BringDataUpdateCoordinator, + bring_list: BringData, + entity_description: BringSensorEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator, bring_list) + self.entity_description = entity_description + self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{self._list_uuid}_{self.entity_description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + + return self.entity_description.value_fn( + self.coordinator.data[self._list_uuid], + self.coordinator.user_settings, + ) diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index e3e700d75f9..7331f68a161 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -1,4 +1,7 @@ { + "common": { + "shopping_list_items": "items" + }, "config": { "step": { "user": { @@ -23,7 +26,57 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unique_id_mismatch": "The login details correspond to a different account. Please re-authenticate to the previously configured account." + } + }, + "entity": { + "sensor": { + "urgent": { + "name": "Urgent", + "unit_of_measurement": "[%key:component::bring::common::shopping_list_items%]" + }, + "convenient": { + "name": "On occasion", + "unit_of_measurement": "[%key:component::bring::common::shopping_list_items%]" + }, + "discounted": { + "name": "Discount only", + "unit_of_measurement": "[%key:component::bring::common::shopping_list_items%]" + }, + "list_language": { + "name": "Region & language", + "state": { + "de-at": "Austria", + "de-ch": "Switzerland (German)", + "de-de": "Germany", + "en-au": "Australia", + "en-ca": "Canada", + "en-gb": "United Kingdom", + "en-us": "United States", + "es-es": "Spain", + "fr-ch": "Switzerland (French)", + "fr-fr": "France", + "hu-hu": "Hungary", + "it-ch": "Switzerland (Italian)", + "it-it": "Italy", + "nb-no": "Norway", + "nl-nl": "Netherlands", + "pl-pl": "Poland", + "pt-br": "Portugal", + "ru-ru": "Russia", + "sv-se": "Sweden", + "tr-tr": "Türkiye" + } + }, + "list_access": { + "name": "List access", + "state": { + "registered": "Private", + "shared": "Shared", + "invitation": "Invitation pending" + } + } } }, "exceptions": { diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index 4fb90860899..c53b5788b68 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -23,7 +23,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import BringConfigEntry from .const import ( @@ -33,6 +32,9 @@ from .const import ( SERVICE_PUSH_NOTIFICATION, ) from .coordinator import BringData, BringDataUpdateCoordinator +from .entity import BringBaseEntity + +PARALLEL_UPDATES = 0 async def async_setup_entry( @@ -43,16 +45,10 @@ async def async_setup_entry( """Set up the sensor from a config entry created in the integrations UI.""" coordinator = config_entry.runtime_data - unique_id = config_entry.unique_id - - if TYPE_CHECKING: - assert unique_id - async_add_entities( BringTodoListEntity( coordinator, bring_list=bring_list, - unique_id=unique_id, ) for bring_list in coordinator.data.values() ) @@ -71,13 +67,11 @@ async def async_setup_entry( ) -class BringTodoListEntity( - CoordinatorEntity[BringDataUpdateCoordinator], TodoListEntity -): +class BringTodoListEntity(BringBaseEntity, TodoListEntity): """A To-do List representation of the Bring! Shopping List.""" _attr_translation_key = "shopping_list" - _attr_has_entity_name = True + _attr_name = None _attr_supported_features = ( TodoListEntityFeature.CREATE_TODO_ITEM | TodoListEntityFeature.UPDATE_TODO_ITEM @@ -86,16 +80,11 @@ class BringTodoListEntity( ) def __init__( - self, - coordinator: BringDataUpdateCoordinator, - bring_list: BringData, - unique_id: str, + self, coordinator: BringDataUpdateCoordinator, bring_list: BringData ) -> None: - """Initialize BringTodoListEntity.""" - super().__init__(coordinator) - self._list_uuid = bring_list["listUuid"] - self._attr_name = bring_list["name"] - self._attr_unique_id = f"{unique_id}_{self._list_uuid}" + """Initialize the entity.""" + super().__init__(coordinator, bring_list) + self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{self._list_uuid}" @property def todo_items(self) -> list[TodoItem]: diff --git a/homeassistant/components/bring/util.py b/homeassistant/components/bring/util.py new file mode 100644 index 00000000000..b706156a3d3 --- /dev/null +++ b/homeassistant/components/bring/util.py @@ -0,0 +1,40 @@ +"""Utility functions for Bring.""" + +from __future__ import annotations + +from bring_api import BringUserSettingsResponse + +from .coordinator import BringData + + +def list_language( + list_uuid: str, + user_settings: BringUserSettingsResponse, +) -> str | None: + """Get the lists language setting.""" + try: + list_settings = next( + filter( + lambda x: x["listUuid"] == list_uuid, + user_settings["userlistsettings"], + ) + ) + + return next( + filter( + lambda x: x["key"] == "listArticleLanguage", + list_settings["usersettings"], + ) + )["value"] + + except (StopIteration, KeyError): + return None + + +def sum_attributes(bring_list: BringData, attribute: str) -> int: + """Count items with given attribute set.""" + return sum( + item["attributes"][0]["content"][attribute] + for item in bring_list["purchase"] + if len(item.get("attributes", [])) + ) diff --git a/homeassistant/components/broadlink/climate.py b/homeassistant/components/broadlink/climate.py index dbfd982795c..25a6bbd60a5 100644 --- a/homeassistant/components/broadlink/climate.py +++ b/homeassistant/components/broadlink/climate.py @@ -52,7 +52,6 @@ class BroadlinkThermostat(BroadlinkEntity, ClimateEntity): ) _attr_target_temperature_step = PRECISION_HALVES _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: BroadlinkDevice) -> None: """Initialize the climate entity.""" diff --git a/homeassistant/components/broadlink/config_flow.py b/homeassistant/components/broadlink/config_flow.py index 2d79ba4bea1..c9b2fb46608 100644 --- a/homeassistant/components/broadlink/config_flow.py +++ b/homeassistant/components/broadlink/config_flow.py @@ -5,7 +5,7 @@ import errno from functools import partial import logging import socket -from typing import TYPE_CHECKING, Any +from typing import Any import broadlink as blk from broadlink.exceptions import ( @@ -37,9 +37,7 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the Broadlink flow.""" - self.device: blk.Device | None = None + device: blk.Device async def async_set_device( self, device: blk.Device, raise_on_progress: bool = True @@ -131,8 +129,6 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): ) return await self.async_step_auth() - if TYPE_CHECKING: - assert self.device if device.mac == self.device.mac: await self.async_set_device(device, raise_on_progress=False) return await self.async_step_auth() @@ -158,10 +154,10 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_auth(self): + async def async_step_auth(self) -> ConfigFlowResult: """Authenticate to the device.""" device = self.device - errors = {} + errors: dict[str, str] = {} try: await self.hass.async_add_executor_job(device.auth) @@ -211,7 +207,11 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): ) return self.async_show_form(step_id="auth", errors=errors) - async def async_step_reset(self, user_input=None, errors=None): + async def async_step_reset( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Guide the user to unlock the device manually. We are unable to authenticate because the device is locked. @@ -234,7 +234,9 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): {CONF_HOST: device.host[0], CONF_TIMEOUT: device.timeout} ) - async def async_step_unlock(self, user_input=None): + async def async_step_unlock( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Unlock the device. The authentication succeeded, but the device is locked. @@ -288,10 +290,12 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_finish(self, user_input=None): + async def async_step_finish( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Choose a name for the device and create config entry.""" device = self.device - errors = {} + errors: dict[str, str] = {} # Abort reauthentication flow. self._abort_if_unique_id_configured( @@ -314,10 +318,10 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): step_id="finish", data_schema=vol.Schema(data_schema), errors=errors ) - async def async_step_import(self, import_info): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a device.""" - self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) - return await self.async_step_user(import_info) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/broadlink/device.py b/homeassistant/components/broadlink/device.py index 2518cd65bd3..75b6236a473 100644 --- a/homeassistant/components/broadlink/device.py +++ b/homeassistant/components/broadlink/device.py @@ -15,7 +15,7 @@ from broadlink.exceptions import ( ) from typing_extensions import TypeVar -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -200,10 +200,4 @@ class BroadlinkDevice(Generic[_ApiT]): self.api.host[0], ) - self.hass.async_create_task( - self.hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_NAME: self.name, **self.config.data}, - ) - ) + self.config.async_start_reauth(self.hass, data={CONF_NAME: self.name}) diff --git a/homeassistant/components/broadlink/remote.py b/homeassistant/components/broadlink/remote.py index 710b4a34a11..18a3a82017c 100644 --- a/homeassistant/components/broadlink/remote.py +++ b/homeassistant/components/broadlink/remote.py @@ -377,7 +377,7 @@ class BroadlinkRemote(BroadlinkEntity, RemoteEntity, RestoreEntity): device.api.check_frequency ) if is_found: - _LOGGER.info("Radiofrequency detected: %s MHz", frequency) + _LOGGER.debug("Radiofrequency detected: %s MHz", frequency) break else: await device.async_request(device.api.cancel_sweep_frequency) diff --git a/homeassistant/components/broadlink/strings.json b/homeassistant/components/broadlink/strings.json index 5150a521363..17c98f0182f 100644 --- a/homeassistant/components/broadlink/strings.json +++ b/homeassistant/components/broadlink/strings.json @@ -43,6 +43,7 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_host": "[%key:common::config_flow::error::invalid_host%]", "unknown": "[%key:common::config_flow::error::unknown%]" } diff --git a/homeassistant/components/brother/config_flow.py b/homeassistant/components/brother/config_flow.py index 4536cb9c4d5..d9130b96300 100644 --- a/homeassistant/components/brother/config_flow.py +++ b/homeassistant/components/brother/config_flow.py @@ -2,14 +2,14 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import Any from brother import Brother, SnmpError, UnsupportedModelError import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.components.snmp import async_get_snmp_engine -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -53,7 +53,6 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize.""" self.brother: Brother self.host: str | None = None - self.entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -141,30 +140,15 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reconfigure( - self, _: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - - if TYPE_CHECKING: - assert entry is not None - - self.entry = entry - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" + entry = self._get_reconfigure_entry() errors = {} - if TYPE_CHECKING: - assert self.entry is not None - if user_input is not None: try: - await validate_input(self.hass, user_input, self.entry.unique_id) + await validate_input(self.hass, user_input, entry.unique_id) except InvalidHost: errors[CONF_HOST] = "wrong_host" except (ConnectionError, TimeoutError): @@ -174,20 +158,18 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN): except AnotherDevice: errors["base"] = "another_device" else: - self.hass.config_entries.async_update_entry( - self.entry, - data=self.entry.data | {CONF_HOST: user_input[CONF_HOST]}, + return self.async_update_reload_and_abort( + entry, + data_updates={CONF_HOST: user_input[CONF_HOST]}, ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reconfigure_successful") return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( data_schema=RECONFIGURE_SCHEMA, - suggested_values=self.entry.data | (user_input or {}), + suggested_values=entry.data | (user_input or {}), ), - description_placeholders={"printer_name": self.entry.title}, + description_placeholders={"printer_name": entry.title}, errors=errors, ) diff --git a/homeassistant/components/brother/manifest.json b/homeassistant/components/brother/manifest.json index d9c8e36aa1d..fa70f3a5dc5 100644 --- a/homeassistant/components/brother/manifest.json +++ b/homeassistant/components/brother/manifest.json @@ -8,8 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], - "quality_scale": "platinum", - "requirements": ["brother==4.3.0"], + "requirements": ["brother==4.3.1"], "zeroconf": [ { "type": "_printer._tcp.local.", diff --git a/homeassistant/components/brother/sensor.py b/homeassistant/components/brother/sensor.py index e86eb59d6bc..d49ebdf07ca 100644 --- a/homeassistant/components/brother/sensor.py +++ b/homeassistant/components/brother/sensor.py @@ -30,8 +30,6 @@ from .const import DOMAIN ATTR_COUNTER = "counter" ATTR_REMAINING_PAGES = "remaining_pages" -UNIT_PAGES = "p" - _LOGGER = logging.getLogger(__name__) @@ -52,7 +50,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="page_counter", translation_key="page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.page_counter, @@ -60,7 +57,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="bw_counter", translation_key="bw_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.bw_counter, @@ -68,7 +64,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="color_counter", translation_key="color_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.color_counter, @@ -76,7 +71,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="duplex_unit_pages_counter", translation_key="duplex_unit_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.duplex_unit_pages_counter, @@ -92,7 +86,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="drum_remaining_pages", translation_key="drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.drum_remaining_pages, @@ -100,7 +93,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="drum_counter", translation_key="drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.drum_counter, @@ -116,7 +108,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="black_drum_remaining_pages", translation_key="black_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.black_drum_remaining_pages, @@ -124,7 +115,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="black_drum_counter", translation_key="black_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.black_drum_counter, @@ -140,7 +130,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="cyan_drum_remaining_pages", translation_key="cyan_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.cyan_drum_remaining_pages, @@ -148,7 +137,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="cyan_drum_counter", translation_key="cyan_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.cyan_drum_counter, @@ -164,7 +152,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="magenta_drum_remaining_pages", translation_key="magenta_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.magenta_drum_remaining_pages, @@ -172,7 +159,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="magenta_drum_counter", translation_key="magenta_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.magenta_drum_counter, @@ -188,7 +174,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="yellow_drum_remaining_pages", translation_key="yellow_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.yellow_drum_remaining_pages, @@ -196,7 +181,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="yellow_drum_counter", translation_key="yellow_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.yellow_drum_counter, diff --git a/homeassistant/components/brother/strings.json b/homeassistant/components/brother/strings.json index d7f8f4a1b89..b502ed7e3b9 100644 --- a/homeassistant/components/brother/strings.json +++ b/homeassistant/components/brother/strings.json @@ -18,7 +18,7 @@ "type": "[%key:component::brother::config::step::user::data::type%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update configuration for {printer_name}.", "data": { "host": "[%key:common::config_flow::data::host%]" @@ -46,61 +46,75 @@ "name": "Status" }, "page_counter": { - "name": "Page counter" + "name": "Page counter", + "unit_of_measurement": "pages" }, "bw_pages": { - "name": "B/W pages" + "name": "B/W pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "color_pages": { - "name": "Color pages" + "name": "Color pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "duplex_unit_page_counter": { - "name": "Duplex unit page counter" + "name": "Duplex unit page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "drum_remaining_life": { "name": "Drum remaining lifetime" }, "drum_remaining_pages": { - "name": "Drum remaining pages" + "name": "Drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "drum_page_counter": { - "name": "Drum page counter" + "name": "Drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "black_drum_remaining_life": { "name": "Black drum remaining lifetime" }, "black_drum_remaining_pages": { - "name": "Black drum remaining pages" + "name": "Black drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "black_drum_page_counter": { - "name": "Black drum page counter" + "name": "Black drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "cyan_drum_remaining_life": { "name": "Cyan drum remaining lifetime" }, "cyan_drum_remaining_pages": { - "name": "Cyan drum remaining pages" + "name": "Cyan drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "cyan_drum_page_counter": { - "name": "Cyan drum page counter" + "name": "Cyan drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "magenta_drum_remaining_life": { "name": "Magenta drum remaining lifetime" }, "magenta_drum_remaining_pages": { - "name": "Magenta drum remaining pages" + "name": "Magenta drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "magenta_drum_page_counter": { - "name": "Magenta drum page counter" + "name": "Magenta drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "yellow_drum_remaining_life": { "name": "Yellow drum remaining lifetime" }, "yellow_drum_remaining_pages": { - "name": "Yellow drum remaining pages" + "name": "Yellow drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "yellow_drum_page_counter": { - "name": "Yellow drum page counter" + "name": "Yellow drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "belt_unit_remaining_life": { "name": "Belt unit remaining lifetime" diff --git a/homeassistant/components/browser/icons.json b/homeassistant/components/browser/icons.json index 7c971009fd7..680aaf14b86 100644 --- a/homeassistant/components/browser/icons.json +++ b/homeassistant/components/browser/icons.json @@ -1,5 +1,7 @@ { "services": { - "browse_url": "mdi:web" + "browse_url": { + "service": "mdi:web" + } } } diff --git a/homeassistant/components/brunt/__init__.py b/homeassistant/components/brunt/__init__.py index bec281d1902..c488c813b3b 100644 --- a/homeassistant/components/brunt/__init__.py +++ b/homeassistant/components/brunt/__init__.py @@ -2,79 +2,22 @@ from __future__ import annotations -from asyncio import timeout -import logging - -from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError -from brunt import BruntClientAsync, Thing - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DATA_BAPI, DATA_COOR, DOMAIN, PLATFORMS, REGULAR_INTERVAL - -_LOGGER = logging.getLogger(__name__) +from .const import PLATFORMS +from .coordinator import BruntConfigEntry, BruntCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BruntConfigEntry) -> bool: """Set up Brunt using config flow.""" - session = async_get_clientsession(hass) - bapi = BruntClientAsync( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], - session=session, - ) - try: - await bapi.async_login() - except ServerDisconnectedError as exc: - raise ConfigEntryNotReady("Brunt not ready to connect.") from exc - except ClientResponseError as exc: - raise ConfigEntryAuthFailed( - f"Brunt could not connect with username: {entry.data[CONF_USERNAME]}." - ) from exc - - async def async_update_data() -> dict[str | None, Thing]: - """Fetch data from the Brunt endpoint for all Things. - - Error 403 is the API response for any kind of authentication error (failed password or email) - Error 401 is the API response for things that are not part of the account, could happen when a device is deleted from the account. - """ - try: - async with timeout(10): - things = await bapi.async_get_things(force=True) - return {thing.serial: thing for thing in things} - except ServerDisconnectedError as err: - raise UpdateFailed(f"Error communicating with API: {err}") from err - except ClientResponseError as err: - if err.status == 403: - raise ConfigEntryAuthFailed from err - if err.status == 401: - _LOGGER.warning("Device not found, will reload Brunt integration") - await hass.config_entries.async_reload(entry.entry_id) - raise UpdateFailed from err - - coordinator = DataUpdateCoordinator( - hass, - _LOGGER, - name="brunt", - update_method=async_update_data, - update_interval=REGULAR_INTERVAL, - ) + coordinator = BruntCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = {DATA_BAPI: bapi, DATA_COOR: coordinator} + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BruntConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/brunt/config_flow.py b/homeassistant/components/brunt/config_flow.py index ecb2dd41d6f..3baea9b98cc 100644 --- a/homeassistant/components/brunt/config_flow.py +++ b/homeassistant/components/brunt/config_flow.py @@ -11,8 +11,8 @@ from aiohttp.client_exceptions import ServerDisconnectedError from brunt import BruntClientAsync import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN @@ -56,8 +56,6 @@ class BruntConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -82,22 +80,22 @@ class BruntConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self._reauth_entry - username = self._reauth_entry.data[CONF_USERNAME] + reauth_entry = self._get_reauth_entry() + username = reauth_entry.data[CONF_USERNAME] if user_input is None: return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, - description_placeholders={"username": username}, + description_placeholders={ + CONF_USERNAME: username, + CONF_NAME: reauth_entry.title, + }, ) user_input[CONF_USERNAME] = username errors = await validate_input(user_input) @@ -106,9 +104,10 @@ class BruntConfigFlow(ConfigFlow, domain=DOMAIN): step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, errors=errors, - description_placeholders={"username": username}, + description_placeholders={ + CONF_USERNAME: username, + CONF_NAME: reauth_entry.title, + }, ) - self.hass.config_entries.async_update_entry(self._reauth_entry, data=user_input) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) diff --git a/homeassistant/components/brunt/const.py b/homeassistant/components/brunt/const.py index 4c246d28d64..0d9323cbf07 100644 --- a/homeassistant/components/brunt/const.py +++ b/homeassistant/components/brunt/const.py @@ -10,8 +10,6 @@ NOTIFICATION_ID = "brunt_notification" NOTIFICATION_TITLE = "Brunt Cover Setup" ATTRIBUTION = "Based on an unofficial Brunt SDK." PLATFORMS = [Platform.COVER] -DATA_BAPI = "bapi" -DATA_COOR = "coordinator" CLOSED_POSITION = 0 OPEN_POSITION = 100 diff --git a/homeassistant/components/brunt/coordinator.py b/homeassistant/components/brunt/coordinator.py new file mode 100644 index 00000000000..b07ec2c0c88 --- /dev/null +++ b/homeassistant/components/brunt/coordinator.py @@ -0,0 +1,80 @@ +"""The brunt component.""" + +from __future__ import annotations + +from asyncio import timeout +import logging + +from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError +from brunt import BruntClientAsync, Thing + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import REGULAR_INTERVAL + +_LOGGER = logging.getLogger(__name__) + +type BruntConfigEntry = ConfigEntry[BruntCoordinator] + + +class BruntCoordinator(DataUpdateCoordinator[dict[str | None, Thing]]): + """Config entry data.""" + + bapi: BruntClientAsync + config_entry: BruntConfigEntry + + def __init__( + self, + hass: HomeAssistant, + config_entry: BruntConfigEntry, + ) -> None: + """Initialize the Brunt coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=config_entry, + name="brunt", + update_interval=REGULAR_INTERVAL, + ) + + async def _async_setup(self) -> None: + session = async_get_clientsession(self.hass) + + self.bapi = BruntClientAsync( + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + session=session, + ) + try: + await self.bapi.async_login() + except ServerDisconnectedError as exc: + raise ConfigEntryNotReady("Brunt not ready to connect.") from exc + except ClientResponseError as exc: + raise ConfigEntryAuthFailed( + f"Brunt could not connect with username: {self.config_entry.data[CONF_USERNAME]}." + ) from exc + + async def _async_update_data(self) -> dict[str | None, Thing]: + """Fetch data from the Brunt endpoint for all Things. + + Error 403 is the API response for any kind of authentication error (failed password or email) + Error 401 is the API response for things that are not part of the account, could happen when a device is deleted from the account. + """ + try: + async with timeout(10): + things = await self.bapi.async_get_things(force=True) + return {thing.serial: thing for thing in things} + except ServerDisconnectedError as err: + raise UpdateFailed(f"Error communicating with API: {err}") from err + except ClientResponseError as err: + if err.status == 403: + raise ConfigEntryAuthFailed from err + if err.status == 401: + _LOGGER.warning("Device not found, will reload Brunt integration") + await self.hass.config_entries.async_reload(self.config_entry.entry_id) + raise UpdateFailed from err diff --git a/homeassistant/components/brunt/cover.py b/homeassistant/components/brunt/cover.py index 519885fe542..bb97f42bd36 100644 --- a/homeassistant/components/brunt/cover.py +++ b/homeassistant/components/brunt/cover.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Any from aiohttp.client_exceptions import ClientResponseError -from brunt import BruntClientAsync, Thing +from brunt import Thing from homeassistant.components.cover import ( ATTR_POSITION, @@ -13,49 +13,39 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( ATTR_REQUEST_POSITION, ATTRIBUTION, CLOSED_POSITION, - DATA_BAPI, - DATA_COOR, DOMAIN, FAST_INTERVAL, OPEN_POSITION, REGULAR_INTERVAL, ) +from .coordinator import BruntConfigEntry, BruntCoordinator async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: BruntConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the brunt platform.""" - bapi: BruntClientAsync = hass.data[DOMAIN][entry.entry_id][DATA_BAPI] - coordinator: DataUpdateCoordinator[dict[str | None, Thing]] = hass.data[DOMAIN][ - entry.entry_id - ][DATA_COOR] + coordinator = entry.runtime_data async_add_entities( - BruntDevice(coordinator, serial, thing, bapi, entry.entry_id) + BruntDevice(coordinator, serial, thing, entry.entry_id) for serial, thing in coordinator.data.items() ) -class BruntDevice( - CoordinatorEntity[DataUpdateCoordinator[dict[str | None, Thing]]], CoverEntity -): +class BruntDevice(CoordinatorEntity[BruntCoordinator], CoverEntity): """Representation of a Brunt cover device. Contains the common logic for all Brunt devices. @@ -73,16 +63,14 @@ class BruntDevice( def __init__( self, - coordinator: DataUpdateCoordinator[dict[str | None, Thing]], + coordinator: BruntCoordinator, serial: str | None, thing: Thing, - bapi: BruntClientAsync, entry_id: str, ) -> None: """Init the Brunt device.""" super().__init__(coordinator) self._attr_unique_id = serial - self._bapi = bapi self._thing = thing self._entry_id = entry_id @@ -167,7 +155,7 @@ class BruntDevice( async def _async_update_cover(self, position: int) -> None: """Set the cover to the new position and wait for the update to be reflected.""" try: - await self._bapi.async_change_request_position( + await self.coordinator.bapi.async_change_request_position( position, thing_uri=self._thing.thing_uri ) except ClientResponseError as exc: @@ -182,7 +170,7 @@ class BruntDevice( """Update the update interval after each refresh.""" if ( self.request_cover_position - == self._bapi.last_requested_positions[self._thing.thing_uri] + == self.coordinator.bapi.last_requested_positions[self._thing.thing_uri] and self.move_state == 0 ): self.coordinator.update_interval = REGULAR_INTERVAL diff --git a/homeassistant/components/bryant_evolution/climate.py b/homeassistant/components/bryant_evolution/climate.py index dd31097a1ee..2d54ced8217 100644 --- a/homeassistant/components/bryant_evolution/climate.py +++ b/homeassistant/components/bryant_evolution/climate.py @@ -77,7 +77,6 @@ class BryantEvolutionClimate(ClimateEntity): HVACMode.OFF, ] _attr_fan_modes = ["auto", "low", "med", "high"] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/bryant_evolution/config_flow.py b/homeassistant/components/bryant_evolution/config_flow.py index a6b07daf96b..2e5a094948d 100644 --- a/homeassistant/components/bryant_evolution/config_flow.py +++ b/homeassistant/components/bryant_evolution/config_flow.py @@ -10,7 +10,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_FILENAME -from homeassistant.helpers.typing import UNDEFINED from .const import CONF_SYSTEM_ZONE, DOMAIN @@ -68,20 +67,16 @@ class BryantConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: system_zone = await _enumerate_sz(user_input[CONF_FILENAME]) if len(system_zone) != 0: - our_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert our_entry is not None, "Could not find own entry" return self.async_update_reload_and_abort( - entry=our_entry, + self._get_reconfigure_entry(), data={ CONF_FILENAME: user_input[CONF_FILENAME], CONF_SYSTEM_ZONE: system_zone, }, - unique_id=UNDEFINED, - reason="reconfigured", ) errors["base"] = "cannot_connect" return self.async_show_form( - step_id="reconfigure", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + step_id="reconfigure", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, ) diff --git a/homeassistant/components/bryant_evolution/strings.json b/homeassistant/components/bryant_evolution/strings.json index 1ce9d58bb10..ec816d3d961 100644 --- a/homeassistant/components/bryant_evolution/strings.json +++ b/homeassistant/components/bryant_evolution/strings.json @@ -1,6 +1,11 @@ { "config": { "step": { + "reconfigure": { + "data": { + "filename": "[%key:component::bryant_evolution::config::step::user::data::filename%]" + } + }, "user": { "data": { "filename": "Serial port filename" @@ -13,7 +18,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "exceptions": { diff --git a/homeassistant/components/bsblan/__init__.py b/homeassistant/components/bsblan/__init__.py index 5ce90db5043..623bfbfef56 100644 --- a/homeassistant/components/bsblan/__init__.py +++ b/homeassistant/components/bsblan/__init__.py @@ -15,10 +15,12 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import CONF_PASSKEY, DOMAIN +from .const import CONF_PASSKEY from .coordinator import BSBLanUpdateCoordinator -PLATFORMS = [Platform.CLIMATE] +PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER] + +type BSBLanConfigEntry = ConfigEntry[BSBLanData] @dataclasses.dataclass @@ -32,7 +34,7 @@ class BSBLanData: static: StaticState -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool: """Set up BSB-Lan from a config entry.""" # create config using BSBLANConfig @@ -57,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: info = await bsblan.info() static = await bsblan.static_values() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BSBLanData( + entry.runtime_data = BSBLanData( client=bsblan, coordinator=coordinator, device=device, @@ -70,11 +72,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool: """Unload BSBLAN config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - # Cleanup - del hass.data[DOMAIN][entry.entry_id] - if not hass.data[DOMAIN]: - del hass.data[DOMAIN] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index ae7116143df..2833d6549b4 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -15,15 +15,14 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.const import ATTR_TEMPERATURE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.enum import try_parse_enum -from . import BSBLanData +from . import BSBLanConfigEntry, BSBLanData from .const import ATTR_TARGET_TEMPERATURE, DOMAIN from .entity import BSBLanEntity @@ -43,18 +42,12 @@ PRESET_MODES = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: BSBLanConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up BSBLAN device based on a config entry.""" - data: BSBLanData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( - [ - BSBLANClimate( - data, - ) - ] - ) + data = entry.runtime_data + async_add_entities([BSBLANClimate(data)]) class BSBLANClimate(BSBLanEntity, ClimateEntity): @@ -72,7 +65,6 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): _attr_preset_modes = PRESET_MODES _attr_hvac_modes = HVAC_MODES - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -82,26 +74,19 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): super().__init__(data.coordinator, data) self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate" - self._attr_min_temp = float(data.static.min_temp.value) - self._attr_max_temp = float(data.static.max_temp.value) - if data.static.min_temp.unit in ("°C", "°C"): - self._attr_temperature_unit = UnitOfTemperature.CELSIUS - else: - self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT + self._attr_min_temp = data.static.min_temp.value + self._attr_max_temp = data.static.max_temp.value + self._attr_temperature_unit = data.coordinator.client.get_temperature_unit @property def current_temperature(self) -> float | None: """Return the current temperature.""" - if self.coordinator.data.state.current_temperature.value == "---": - # device returns no current temperature - return None - - return float(self.coordinator.data.state.current_temperature.value) + return self.coordinator.data.state.current_temperature.value @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - return float(self.coordinator.data.state.target_temperature.value) + return self.coordinator.data.state.target_temperature.value @property def hvac_mode(self) -> HVACMode | None: @@ -126,15 +111,14 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set preset mode.""" - # only allow preset mode when hvac mode is auto - if self.hvac_mode == HVACMode.AUTO: - await self.async_set_data(preset_mode=preset_mode) - else: + if self.hvac_mode != HVACMode.AUTO and preset_mode != PRESET_NONE: raise ServiceValidationError( + "Preset mode can only be set when HVAC mode is set to 'auto'", translation_domain=DOMAIN, translation_key="set_preset_mode_error", translation_placeholders={"preset_mode": preset_mode}, ) + await self.async_set_data(preset_mode=preset_mode) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" @@ -148,11 +132,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): if ATTR_HVAC_MODE in kwargs: data[ATTR_HVAC_MODE] = kwargs[ATTR_HVAC_MODE] if ATTR_PRESET_MODE in kwargs: - # If preset mode is None, set hvac to auto - if kwargs[ATTR_PRESET_MODE] == PRESET_NONE: - data[ATTR_HVAC_MODE] = HVACMode.AUTO - else: - data[ATTR_HVAC_MODE] = kwargs[ATTR_PRESET_MODE] + if kwargs[ATTR_PRESET_MODE] == PRESET_ECO: + data[ATTR_HVAC_MODE] = PRESET_ECO + elif kwargs[ATTR_PRESET_MODE] == PRESET_NONE: + data[ATTR_HVAC_MODE] = PRESET_NONE + try: await self.coordinator.client.thermostat(**data) except BSBLANError as err: diff --git a/homeassistant/components/bsblan/coordinator.py b/homeassistant/components/bsblan/coordinator.py index 3320c0f7500..be9030d95b0 100644 --- a/homeassistant/components/bsblan/coordinator.py +++ b/homeassistant/components/bsblan/coordinator.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from datetime import timedelta from random import randint -from bsblan import BSBLAN, BSBLANConnectionError, State +from bsblan import BSBLAN, BSBLANConnectionError, HotWaterState, Sensor, State from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -19,6 +19,8 @@ class BSBLanCoordinatorData: """BSBLan data stored in the Home Assistant data object.""" state: State + sensor: Sensor + dhw: HotWaterState class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): @@ -53,7 +55,12 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): async def _async_update_data(self) -> BSBLanCoordinatorData: """Get state and sensor data from BSB-Lan device.""" try: + # initialize the client, this is cached and will only be called once + await self.client.initialize() + state = await self.client.state() + sensor = await self.client.sensor() + dhw = await self.client.hot_water_state() except BSBLANConnectionError as err: host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown" raise UpdateFailed( @@ -61,4 +68,4 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): ) from err self.update_interval = self._get_update_interval() - return BSBLanCoordinatorData(state=state) + return BSBLanCoordinatorData(state=state, sensor=sensor, dhw=dhw) diff --git a/homeassistant/components/bsblan/diagnostics.py b/homeassistant/components/bsblan/diagnostics.py index 3b42d47e1d3..5a8e5c1c4c5 100644 --- a/homeassistant/components/bsblan/diagnostics.py +++ b/homeassistant/components/bsblan/diagnostics.py @@ -4,21 +4,23 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import BSBLanData -from .const import DOMAIN +from . import BSBLanConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BSBLanConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: BSBLanData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data return { "info": data.info.to_dict(), "device": data.device.to_dict(), - "state": data.coordinator.data.state.to_dict(), + "coordinator_data": { + "state": data.coordinator.data.state.to_dict(), + "sensor": data.coordinator.data.sensor.to_dict(), + }, + "static": data.static.to_dict(), } diff --git a/homeassistant/components/bsblan/entity.py b/homeassistant/components/bsblan/entity.py index 0c507938794..252c397f4f2 100644 --- a/homeassistant/components/bsblan/entity.py +++ b/homeassistant/components/bsblan/entity.py @@ -22,10 +22,10 @@ class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]): def __init__(self, coordinator: BSBLanUpdateCoordinator, data: BSBLanData) -> None: """Initialize BSBLan entity.""" super().__init__(coordinator, data) - host = self.coordinator.config_entry.data["host"] - mac = self.coordinator.config_entry.data["mac"] + host = coordinator.config_entry.data["host"] + mac = data.device.MAC self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, data.device.MAC)}, + identifiers={(DOMAIN, mac)}, connections={(CONNECTION_NETWORK_MAC, format_mac(mac))}, name=data.device.name, manufacturer="BSBLAN Inc.", diff --git a/homeassistant/components/bsblan/manifest.json b/homeassistant/components/bsblan/manifest.json index 6cd8608c42d..aa9c03abf4a 100644 --- a/homeassistant/components/bsblan/manifest.json +++ b/homeassistant/components/bsblan/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["bsblan"], - "requirements": ["python-bsblan==0.6.2"] + "requirements": ["python-bsblan==1.2.1"] } diff --git a/homeassistant/components/bsblan/sensor.py b/homeassistant/components/bsblan/sensor.py new file mode 100644 index 00000000000..c13b4ad7650 --- /dev/null +++ b/homeassistant/components/bsblan/sensor.py @@ -0,0 +1,80 @@ +"""Support for BSB-Lan sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import BSBLanConfigEntry, BSBLanData +from .coordinator import BSBLanCoordinatorData +from .entity import BSBLanEntity + + +@dataclass(frozen=True, kw_only=True) +class BSBLanSensorEntityDescription(SensorEntityDescription): + """Describes BSB-Lan sensor entity.""" + + value_fn: Callable[[BSBLanCoordinatorData], StateType] + + +SENSOR_TYPES: tuple[BSBLanSensorEntityDescription, ...] = ( + BSBLanSensorEntityDescription( + key="current_temperature", + translation_key="current_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.sensor.current_temperature.value, + ), + BSBLanSensorEntityDescription( + key="outside_temperature", + translation_key="outside_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.sensor.outside_temperature.value, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: BSBLanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up BSB-Lan sensor based on a config entry.""" + data = entry.runtime_data + async_add_entities(BSBLanSensor(data, description) for description in SENSOR_TYPES) + + +class BSBLanSensor(BSBLanEntity, SensorEntity): + """Defines a BSB-Lan sensor.""" + + entity_description: BSBLanSensorEntityDescription + + def __init__( + self, + data: BSBLanData, + description: BSBLanSensorEntityDescription, + ) -> None: + """Initialize BSB-Lan sensor.""" + super().__init__(data.coordinator, data) + self.entity_description = description + self._attr_unique_id = f"{data.device.MAC}-{description.key}" + self._attr_temperature_unit = data.coordinator.client.get_temperature_unit + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/bsblan/strings.json b/homeassistant/components/bsblan/strings.json index 7a67d353803..a73a89ca1cc 100644 --- a/homeassistant/components/bsblan/strings.json +++ b/homeassistant/components/bsblan/strings.json @@ -31,6 +31,22 @@ }, "set_data_error": { "message": "An error occurred while sending the data to the BSBLAN device" + }, + "set_temperature_error": { + "message": "An error occurred while setting the temperature" + }, + "set_operation_mode_error": { + "message": "An error occurred while setting the operation mode" + } + }, + "entity": { + "sensor": { + "current_temperature": { + "name": "Current Temperature" + }, + "outside_temperature": { + "name": "Outside Temperature" + } } } } diff --git a/homeassistant/components/bsblan/water_heater.py b/homeassistant/components/bsblan/water_heater.py new file mode 100644 index 00000000000..318408a9124 --- /dev/null +++ b/homeassistant/components/bsblan/water_heater.py @@ -0,0 +1,107 @@ +"""BSBLAN platform to control a compatible Water Heater Device.""" + +from __future__ import annotations + +from typing import Any + +from bsblan import BSBLANError + +from homeassistant.components.water_heater import ( + STATE_ECO, + STATE_OFF, + WaterHeaterEntity, + WaterHeaterEntityFeature, +) +from homeassistant.const import ATTR_TEMPERATURE, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import BSBLanConfigEntry, BSBLanData +from .const import DOMAIN +from .entity import BSBLanEntity + +PARALLEL_UPDATES = 1 + +# Mapping between BSBLan and HA operation modes +OPERATION_MODES = { + "Eco": STATE_ECO, # Energy saving mode + "Off": STATE_OFF, # Protection mode + "On": STATE_ON, # Continuous comfort mode +} + +OPERATION_MODES_REVERSE = {v: k for k, v in OPERATION_MODES.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: BSBLanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up BSBLAN water heater based on a config entry.""" + data = entry.runtime_data + async_add_entities([BSBLANWaterHeater(data)]) + + +class BSBLANWaterHeater(BSBLanEntity, WaterHeaterEntity): + """Defines a BSBLAN water heater entity.""" + + _attr_name = None + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE + | WaterHeaterEntityFeature.OPERATION_MODE + ) + + def __init__(self, data: BSBLanData) -> None: + """Initialize BSBLAN water heater.""" + super().__init__(data.coordinator, data) + self._attr_unique_id = format_mac(data.device.MAC) + self._attr_operation_list = list(OPERATION_MODES_REVERSE.keys()) + + # Set temperature limits based on device capabilities + self._attr_temperature_unit = data.coordinator.client.get_temperature_unit + self._attr_min_temp = data.coordinator.data.dhw.reduced_setpoint.value + self._attr_max_temp = data.coordinator.data.dhw.nominal_setpoint_max.value + + @property + def current_operation(self) -> str | None: + """Return current operation.""" + current_mode = self.coordinator.data.dhw.operating_mode.desc + return OPERATION_MODES.get(current_mode) + + @property + def current_temperature(self) -> float | None: + """Return the current temperature.""" + return self.coordinator.data.dhw.dhw_actual_value_top_temperature.value + + @property + def target_temperature(self) -> float | None: + """Return the temperature we try to reach.""" + return self.coordinator.data.dhw.nominal_setpoint.value + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = kwargs.get(ATTR_TEMPERATURE) + try: + await self.coordinator.client.set_hot_water(nominal_setpoint=temperature) + except BSBLANError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_temperature_error", + ) from err + + await self.coordinator.async_request_refresh() + + async def async_set_operation_mode(self, operation_mode: str) -> None: + """Set new operation mode.""" + bsblan_mode = OPERATION_MODES_REVERSE.get(operation_mode) + try: + await self.coordinator.client.set_hot_water(operating_mode=bsblan_mode) + except BSBLANError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_operation_mode_error", + ) from err + + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/bt_home_hub_5/device_tracker.py b/homeassistant/components/bt_home_hub_5/device_tracker.py index 60ded009d5f..cbd06381578 100644 --- a/homeassistant/components/bt_home_hub_5/device_tracker.py +++ b/homeassistant/components/bt_home_hub_5/device_tracker.py @@ -8,7 +8,7 @@ import bthomehub5_devicelist import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -30,7 +30,7 @@ def get_scanner( hass: HomeAssistant, config: ConfigType ) -> BTHomeHub5DeviceScanner | None: """Return a BT Home Hub 5 scanner if successful.""" - scanner = BTHomeHub5DeviceScanner(config[DOMAIN]) + scanner = BTHomeHub5DeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -41,7 +41,6 @@ class BTHomeHub5DeviceScanner(DeviceScanner): def __init__(self, config): """Initialise the scanner.""" - _LOGGER.info("Initialising BT Home Hub 5") self.host = config[CONF_HOST] self.last_results = {} @@ -69,7 +68,7 @@ class BTHomeHub5DeviceScanner(DeviceScanner): def update_info(self): """Ensure the information from the BT Home Hub 5 is up to date.""" - _LOGGER.info("Scanning") + _LOGGER.debug("Scanning") data = bthomehub5_devicelist.get_devicelist(self.host) diff --git a/homeassistant/components/bt_home_hub_5/manifest.json b/homeassistant/components/bt_home_hub_5/manifest.json index c2d708d9a02..e260d443dc7 100644 --- a/homeassistant/components/bt_home_hub_5/manifest.json +++ b/homeassistant/components/bt_home_hub_5/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bt_home_hub_5", "iot_class": "local_polling", "loggers": ["bthomehub5_devicelist"], + "quality_scale": "legacy", "requirements": ["bthomehub5-devicelist==0.1.1"] } diff --git a/homeassistant/components/bt_smarthub/device_tracker.py b/homeassistant/components/bt_smarthub/device_tracker.py index 4b52f38ff31..29f60bd317f 100644 --- a/homeassistant/components/bt_smarthub/device_tracker.py +++ b/homeassistant/components/bt_smarthub/device_tracker.py @@ -9,7 +9,7 @@ from btsmarthub_devicelist import BTSmartHub import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -33,7 +33,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> BTSmartHubScanner | None: """Return a BT Smart Hub scanner if successful.""" - info = config[DOMAIN] + info = config[DEVICE_TRACKER_DOMAIN] smarthub_client = BTSmartHub( router_ip=info[CONF_HOST], smarthub_model=info.get(CONF_SMARTHUB_MODEL) ) @@ -67,7 +67,7 @@ class BTSmartHubScanner(DeviceScanner): if self.get_bt_smarthub_data(): self.success_init = True else: - _LOGGER.info("Failed to connect to %s", self.smarthub.router_ip) + _LOGGER.warning("Failed to connect to %s", self.smarthub.router_ip) def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" @@ -88,7 +88,7 @@ class BTSmartHubScanner(DeviceScanner): if not self.success_init: return - _LOGGER.info("Scanning") + _LOGGER.debug("Scanning") if not (data := self.get_bt_smarthub_data()): _LOGGER.warning("Error scanning devices") return diff --git a/homeassistant/components/bt_smarthub/manifest.json b/homeassistant/components/bt_smarthub/manifest.json index 8f2dc631e80..31dd99a493f 100644 --- a/homeassistant/components/bt_smarthub/manifest.json +++ b/homeassistant/components/bt_smarthub/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bt_smarthub", "iot_class": "local_polling", "loggers": ["btsmarthub_devicelist"], + "quality_scale": "legacy", "requirements": ["btsmarthub-devicelist==0.2.3"] } diff --git a/homeassistant/components/bthome/config_flow.py b/homeassistant/components/bthome/config_flow.py index 5a3d90f1355..24fdddf2cc7 100644 --- a/homeassistant/components/bthome/config_flow.py +++ b/homeassistant/components/bthome/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.components.bluetooth import ( BluetoothServiceInfoBleak, async_discovered_service_info, ) -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ADDRESS from .const import DOMAIN @@ -161,9 +161,6 @@ class BTHomeConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry is not None - device: DeviceData = entry_data["device"] self._discovered_device = device @@ -182,10 +179,10 @@ class BTHomeConfigFlow(ConfigFlow, domain=DOMAIN): if bindkey: data["bindkey"] = bindkey - if entry_id := self.context.get("entry_id"): - entry = self.hass.config_entries.async_get_entry(entry_id) - assert entry is not None - return self.async_update_reload_and_abort(entry, data=data) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_create_entry( title=self.context["title_placeholders"]["name"], diff --git a/homeassistant/components/bthome/device_trigger.py b/homeassistant/components/bthome/device_trigger.py index c49664b1146..d60089a9bf5 100644 --- a/homeassistant/components/bthome/device_trigger.py +++ b/homeassistant/components/bthome/device_trigger.py @@ -2,11 +2,14 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, + InvalidDeviceAutomationConfig, +) from homeassistant.components.homeassistant.triggers import event as event_trigger from homeassistant.const import ( CONF_DEVICE_ID, @@ -31,7 +34,7 @@ from .const import ( EVENT_TYPE, ) -TRIGGERS_BY_EVENT_CLASS = { +EVENT_TYPES_BY_EVENT_CLASS = { EVENT_CLASS_BUTTON: { "press", "double_press", @@ -43,54 +46,71 @@ TRIGGERS_BY_EVENT_CLASS = { EVENT_CLASS_DIMMER: {"rotate_left", "rotate_right"}, } -SCHEMA_BY_EVENT_CLASS = { - EVENT_CLASS_BUTTON: DEVICE_TRIGGER_BASE_SCHEMA.extend( - { - vol.Required(CONF_TYPE): vol.In([EVENT_CLASS_BUTTON]), - vol.Required(CONF_SUBTYPE): vol.In( - TRIGGERS_BY_EVENT_CLASS[EVENT_CLASS_BUTTON] - ), - } - ), - EVENT_CLASS_DIMMER: DEVICE_TRIGGER_BASE_SCHEMA.extend( - { - vol.Required(CONF_TYPE): vol.In([EVENT_CLASS_DIMMER]), - vol.Required(CONF_SUBTYPE): vol.In( - TRIGGERS_BY_EVENT_CLASS[EVENT_CLASS_DIMMER] - ), - } - ), -} +TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( + {vol.Required(CONF_TYPE): str, vol.Required(CONF_SUBTYPE): str} +) + + +def get_event_classes_by_device_id(hass: HomeAssistant, device_id: str) -> list[str]: + """Get the supported event classes for a device. + + Events for BTHome BLE devices are dynamically discovered + and stored in the device config entry when they are first seen. + """ + device_registry = dr.async_get(hass) + device = device_registry.async_get(device_id) + if TYPE_CHECKING: + assert device is not None + + config_entries = [ + hass.config_entries.async_get_entry(entry_id) + for entry_id in device.config_entries + ] + bthome_config_entry = next( + entry for entry in config_entries if entry and entry.domain == DOMAIN + ) + return bthome_config_entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) + + +def get_event_types_by_event_class(event_class: str) -> set[str]: + """Get the supported event types for an event class. + + If the device has multiple buttons they will have + event classes like button_1 button_2, button_3, etc + but if there is only one button then it will be + button without a number postfix. + """ + return EVENT_TYPES_BY_EVENT_CLASS.get(event_class.split("_")[0], set()) async def async_validate_trigger_config( hass: HomeAssistant, config: ConfigType ) -> ConfigType: """Validate trigger config.""" - return SCHEMA_BY_EVENT_CLASS.get(config[CONF_TYPE], DEVICE_TRIGGER_BASE_SCHEMA)( # type: ignore[no-any-return] - config - ) + config = TRIGGER_SCHEMA(config) + event_class = config[CONF_TYPE] + event_type = config[CONF_SUBTYPE] + device_id = config[CONF_DEVICE_ID] + event_classes = get_event_classes_by_device_id(hass, device_id) + + if event_class not in event_classes: + raise InvalidDeviceAutomationConfig( + f"BTHome trigger {event_class} is not valid for device_id '{device_id}'" + ) + + if event_type not in get_event_types_by_event_class(event_class): + raise InvalidDeviceAutomationConfig( + f"BTHome trigger {event_type} is not valid for device_id '{device_id}'" + ) + + return config async def async_get_triggers( hass: HomeAssistant, device_id: str ) -> list[dict[str, Any]]: """Return a list of triggers for BTHome BLE devices.""" - device_registry = dr.async_get(hass) - device = device_registry.async_get(device_id) - assert device is not None - config_entries = [ - hass.config_entries.async_get_entry(entry_id) - for entry_id in device.config_entries - ] - bthome_config_entry = next( - iter(entry for entry in config_entries if entry and entry.domain == DOMAIN), - None, - ) - assert bthome_config_entry is not None - event_classes: list[str] = bthome_config_entry.data.get( - CONF_DISCOVERED_EVENT_CLASSES, [] - ) + event_classes = get_event_classes_by_device_id(hass, device_id) return [ { # Required fields of TRIGGER_BASE_SCHEMA @@ -102,14 +122,7 @@ async def async_get_triggers( CONF_SUBTYPE: event_type, } for event_class in event_classes - for event_type in TRIGGERS_BY_EVENT_CLASS.get( - event_class.split("_")[0], - # If the device has multiple buttons they will have - # event classes like button_1 button_2, button_3, etc - # but if there is only one button then it will be - # button without a number postfix. - (), - ) + for event_type in get_event_types_by_event_class(event_class) ] diff --git a/homeassistant/components/bthome/manifest.json b/homeassistant/components/bthome/manifest.json index 42fbe794918..ad06f648d14 100644 --- a/homeassistant/components/bthome/manifest.json +++ b/homeassistant/components/bthome/manifest.json @@ -15,7 +15,7 @@ "service_data_uuid": "0000fcd2-0000-1000-8000-00805f9b34fb" } ], - "codeowners": ["@Ernst79"], + "codeowners": ["@Ernst79", "@thecode"], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/bthome", diff --git a/homeassistant/components/bthome/sensor.py b/homeassistant/components/bthome/sensor.py index 64e6d61cefb..417df9f5068 100644 --- a/homeassistant/components/bthome/sensor.py +++ b/homeassistant/components/bthome/sensor.py @@ -364,7 +364,7 @@ SENSOR_DESCRIPTIONS = { ): SensorEntityDescription( key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}", device_class=SensorDeviceClass.CONDUCTIVITY, - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, state_class=SensorStateClass.MEASUREMENT, ), } diff --git a/homeassistant/components/buienradar/__init__.py b/homeassistant/components/buienradar/__init__.py index 3bf593b2dab..bea0102be40 100644 --- a/homeassistant/components/buienradar/__init__.py +++ b/homeassistant/components/buienradar/__init__.py @@ -6,25 +6,26 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .util import BrData PLATFORMS = [Platform.CAMERA, Platform.SENSOR, Platform.WEATHER] +type BuienRadarConfigEntry = ConfigEntry[dict[Platform, BrData]] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: BuienRadarConfigEntry) -> bool: """Set up buienradar from a config entry.""" - hass.data.setdefault(DOMAIN, {}).setdefault(entry.entry_id, {}) + entry.runtime_data = {} await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_update_options)) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BuienRadarConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - entry_data = hass.data[DOMAIN].pop(entry.entry_id) for platform in PLATFORMS: - if (data := entry_data.get(platform)) and ( + if (data := entry.runtime_data.get(platform)) and ( unsub := data.unsub_schedule_update ): unsub() @@ -32,6 +33,8 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def async_update_options(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def async_update_options( + hass: HomeAssistant, config_entry: BuienRadarConfigEntry +) -> None: """Update options.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/buienradar/camera.py b/homeassistant/components/buienradar/camera.py index e9a7d2517cb..45ff2d6de52 100644 --- a/homeassistant/components/buienradar/camera.py +++ b/homeassistant/components/buienradar/camera.py @@ -10,13 +10,13 @@ import aiohttp import voluptuous as vol from homeassistant.components.camera import Camera -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_COUNTRY_CODE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import BuienRadarConfigEntry from .const import CONF_DELTA, DEFAULT_COUNTRY, DEFAULT_DELTA, DEFAULT_DIMENSION _LOGGER = logging.getLogger(__name__) @@ -29,7 +29,9 @@ SUPPORTED_COUNTRY_CODES = ["NL", "BE"] async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BuienRadarConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up buienradar radar-loop camera component.""" config = entry.data diff --git a/homeassistant/components/buienradar/const.py b/homeassistant/components/buienradar/const.py index c82970ed318..fd92afd59b0 100644 --- a/homeassistant/components/buienradar/const.py +++ b/homeassistant/components/buienradar/const.py @@ -2,6 +2,7 @@ DOMAIN = "buienradar" +DEFAULT_TIMEOUT = 60 DEFAULT_TIMEFRAME = 60 DEFAULT_DIMENSION = 700 diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index 69c762c1bc1..712f765237e 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -28,7 +28,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_LATITUDE, @@ -49,10 +48,10 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import BuienRadarConfigEntry from .const import ( CONF_TIMEFRAME, DEFAULT_TIMEFRAME, - DOMAIN, STATE_CONDITION_CODES, STATE_CONDITIONS, STATE_DETAILED_CONDITIONS, @@ -690,7 +689,9 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BuienRadarConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Create the buienradar sensor.""" config = entry.data @@ -723,7 +724,7 @@ async def async_setup_entry( # create weather data: data = BrData(hass, coordinates, timeframe, entities) - hass.data[DOMAIN][entry.entry_id][Platform.SENSOR] = data + entry.runtime_data[Platform.SENSOR] = data await data.async_update() async_add_entities(entities) @@ -741,6 +742,7 @@ class BrSensor(SensorEntity): ) -> None: """Initialize the sensor.""" self.entity_description = description + self._data: BrData | None = None self._measured = None self._attr_unique_id = ( f"{coordinates[CONF_LATITUDE]:2.6f}{coordinates[CONF_LONGITUDE]:2.6f}" @@ -755,17 +757,29 @@ class BrSensor(SensorEntity): if description.key.startswith(PRECIPITATION_FORECAST): self._timeframe = None + async def async_added_to_hass(self) -> None: + """Handle entity being added to hass.""" + if self._data is None: + return + self._update() + @callback def data_updated(self, data: BrData): - """Update data.""" - if self._load_data(data.data) and self.hass: + """Handle data update.""" + self._data = data + if not self.hass: + return + self._update() + + def _update(self): + """Update sensor data.""" + _LOGGER.debug("Updating sensor %s", self.entity_id) + if self._load_data(self._data.data): self.async_write_ha_state() @callback def _load_data(self, data): # noqa: C901 """Load the sensor with relevant data.""" - # Find sensor - # Check if we have a new measurement, # otherwise we do not have to update the sensor if self._measured == data.get(MEASURED): @@ -888,7 +902,7 @@ class BrSensor(SensorEntity): if sensor_type.startswith(PRECIPITATION_FORECAST): result = {ATTR_ATTRIBUTION: data.get(ATTRIBUTION)} if self._timeframe is not None: - result[TIMEFRAME_LABEL] = "%d min" % (self._timeframe) + result[TIMEFRAME_LABEL] = f"{self._timeframe} min" self._attr_extra_state_attributes = result diff --git a/homeassistant/components/buienradar/util.py b/homeassistant/components/buienradar/util.py index b641644cebe..a7267320de3 100644 --- a/homeassistant/components/buienradar/util.py +++ b/homeassistant/components/buienradar/util.py @@ -1,9 +1,9 @@ """Shared utilities for different supported platforms.""" -from asyncio import timeout from datetime import datetime, timedelta from http import HTTPStatus import logging +from typing import Any import aiohttp from buienradar.buienradar import parse_data @@ -27,12 +27,12 @@ from buienradar.constants import ( from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE -from homeassistant.core import CALLBACK_TYPE, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util -from .const import SCHEDULE_NOK, SCHEDULE_OK +from .const import DEFAULT_TIMEOUT, SCHEDULE_NOK, SCHEDULE_OK __all__ = ["BrData"] _LOGGER = logging.getLogger(__name__) @@ -59,10 +59,10 @@ class BrData: load_error_count: int = WARN_THRESHOLD rain_error_count: int = WARN_THRESHOLD - def __init__(self, hass, coordinates, timeframe, devices): + def __init__(self, hass: HomeAssistant, coordinates, timeframe, devices) -> None: """Initialize the data object.""" self.devices = devices - self.data = {} + self.data: dict[str, Any] | None = {} self.hass = hass self.coordinates = coordinates self.timeframe = timeframe @@ -93,15 +93,15 @@ class BrData: resp = None try: websession = async_get_clientsession(self.hass) - async with timeout(10): - resp = await websession.get(url) - + async with websession.get( + url, timeout=aiohttp.ClientTimeout(total=DEFAULT_TIMEOUT) + ) as resp: result[STATUS_CODE] = resp.status result[CONTENT] = await resp.text() if resp.status == HTTPStatus.OK: result[SUCCESS] = True else: - result[MESSAGE] = "Got http statuscode: %d" % (resp.status) + result[MESSAGE] = f"Got http statuscode: {resp.status}" return result except (TimeoutError, aiohttp.ClientError) as err: diff --git a/homeassistant/components/buienradar/weather.py b/homeassistant/components/buienradar/weather.py index 02e1f444c9c..8b71032bace 100644 --- a/homeassistant/components/buienradar/weather.py +++ b/homeassistant/components/buienradar/weather.py @@ -39,7 +39,6 @@ from homeassistant.components.weather import ( WeatherEntity, WeatherEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -54,8 +53,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -# Reuse data and API logic from the sensor implementation -from .const import DEFAULT_TIMEFRAME, DOMAIN +from . import BuienRadarConfigEntry +from .const import DEFAULT_TIMEFRAME from .util import BrData _LOGGER = logging.getLogger(__name__) @@ -93,7 +92,9 @@ CONDITION_MAP = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BuienRadarConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the buienradar platform.""" config = entry.data @@ -113,7 +114,7 @@ async def async_setup_entry( # create weather data: data = BrData(hass, coordinates, DEFAULT_TIMEFRAME, entities) - hass.data[DOMAIN][entry.entry_id][Platform.WEATHER] = data + entry.runtime_data[Platform.WEATHER] = data await data.async_update() async_add_entities(entities) @@ -130,7 +131,7 @@ class BrWeather(WeatherEntity): _attr_should_poll = False _attr_supported_features = WeatherEntityFeature.FORECAST_DAILY - def __init__(self, config, coordinates): + def __init__(self, config, coordinates) -> None: """Initialize the platform with a data instance and station name.""" self._stationname = config.get(CONF_NAME, "Buienradar") self._attr_name = self._stationname or f"BR {'(unknown station)'}" diff --git a/homeassistant/components/button/__init__.py b/homeassistant/components/button/__init__.py index 3955fabdf00..14dc09ca33e 100644 --- a/homeassistant/components/button/__init__.py +++ b/homeassistant/components/button/__init__.py @@ -4,10 +4,10 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import cached_property import logging from typing import final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -19,11 +19,13 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN, SERVICE_PRESS _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[ButtonEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -47,7 +49,7 @@ DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(ButtonDeviceClass)) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Button entities.""" - component = hass.data[DOMAIN] = EntityComponent[ButtonEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[ButtonEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -63,14 +65,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[ButtonEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[ButtonEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class ButtonEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/button/icons.json b/homeassistant/components/button/icons.json index 71956124d7f..1364fb2d056 100644 --- a/homeassistant/components/button/icons.json +++ b/homeassistant/components/button/icons.json @@ -14,6 +14,8 @@ } }, "services": { - "press": "mdi:gesture-tap-button" + "press": { + "service": "mdi:gesture-tap-button" + } } } diff --git a/homeassistant/components/caldav/__init__.py b/homeassistant/components/caldav/__init__.py index 3111460e968..1d50e6d309a 100644 --- a/homeassistant/components/caldav/__init__.py +++ b/homeassistant/components/caldav/__init__.py @@ -17,7 +17,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import DOMAIN +type CalDavConfigEntry = ConfigEntry[caldav.DAVClient] _LOGGER = logging.getLogger(__name__) @@ -25,16 +25,14 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.CALENDAR, Platform.TODO] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: CalDavConfigEntry) -> bool: """Set up CalDAV from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - client = caldav.DAVClient( entry.data[CONF_URL], username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], ssl_verify_cert=entry.data[CONF_VERIFY_SSL], - timeout=10, + timeout=30, ) try: await hass.async_add_executor_job(client.principal) @@ -50,7 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except DAVError as err: raise ConfigEntryNotReady("CalDAV client error") from err - hass.data[DOMAIN][entry.entry_id] = client + entry.runtime_data = client await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/caldav/calendar.py b/homeassistant/components/caldav/calendar.py index 7591722b1ab..fb53947a723 100644 --- a/homeassistant/components/caldav/calendar.py +++ b/homeassistant/components/caldav/calendar.py @@ -15,7 +15,6 @@ from homeassistant.components.calendar import ( CalendarEvent, is_offset_reached, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, @@ -30,8 +29,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import CalDavConfigEntry from .api import async_get_calendars -from .const import DOMAIN from .coordinator import CalDavUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -110,6 +109,7 @@ async def async_setup_platform( entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) coordinator = CalDavUpdateCoordinator( hass, + None, calendar=calendar, days=days, include_all_day=True, @@ -127,6 +127,7 @@ async def async_setup_platform( entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) coordinator = CalDavUpdateCoordinator( hass, + None, calendar=calendar, days=days, include_all_day=False, @@ -141,12 +142,11 @@ async def async_setup_platform( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CalDavConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CalDav calendar platform for a config entry.""" - client: caldav.DAVClient = hass.data[DOMAIN][entry.entry_id] - calendars = await async_get_calendars(hass, client, SUPPORTED_COMPONENT) + calendars = await async_get_calendars(hass, entry.runtime_data, SUPPORTED_COMPONENT) async_add_entities( ( WebDavCalendarEntity( @@ -154,6 +154,7 @@ async def async_setup_entry( async_generate_entity_id(ENTITY_ID_FORMAT, calendar.name, hass=hass), CalDavUpdateCoordinator( hass, + entry, calendar=calendar, days=CONFIG_ENTRY_DEFAULT_DAYS, include_all_day=True, @@ -206,7 +207,8 @@ class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarE if self._supports_offset: self._attr_extra_state_attributes = { "offset_reached": is_offset_reached( - self._event.start_datetime_local, self.coordinator.offset + self._event.start_datetime_local, + self.coordinator.offset, # type: ignore[arg-type] ) if self._event else False diff --git a/homeassistant/components/caldav/config_flow.py b/homeassistant/components/caldav/config_flow.py index 9e1d1098f45..26f758953f2 100644 --- a/homeassistant/components/caldav/config_flow.py +++ b/homeassistant/components/caldav/config_flow.py @@ -9,7 +9,7 @@ from caldav.lib.error import AuthorizationError, DAVError import requests import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL from homeassistant.helpers import config_validation as cv @@ -32,7 +32,6 @@ class CalDavConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for caldav.""" VERSION = 1 - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -91,9 +90,6 @@ class CalDavConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -101,22 +97,18 @@ class CalDavConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} if error := await self._test_connection(user_input): errors["base"] = error else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], }, step_id="reauth_confirm", data_schema=vol.Schema( diff --git a/homeassistant/components/caldav/const.py b/homeassistant/components/caldav/const.py index 7a94a74c7a1..2efbff8b5a0 100644 --- a/homeassistant/components/caldav/const.py +++ b/homeassistant/components/caldav/const.py @@ -1,4 +1,4 @@ -"""Constands for CalDAV.""" +"""Constants for CalDAV.""" from typing import Final diff --git a/homeassistant/components/caldav/coordinator.py b/homeassistant/components/caldav/coordinator.py index 3a10b567167..eb09e3f5452 100644 --- a/homeassistant/components/caldav/coordinator.py +++ b/homeassistant/components/caldav/coordinator.py @@ -6,6 +6,9 @@ from datetime import date, datetime, time, timedelta from functools import partial import logging import re +from typing import TYPE_CHECKING + +import caldav from homeassistant.components.calendar import CalendarEvent, extract_offset from homeassistant.core import HomeAssistant @@ -14,6 +17,9 @@ from homeassistant.util import dt as dt_util from .api import get_attr_value +if TYPE_CHECKING: + from . import CalDavConfigEntry + _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) @@ -23,11 +29,20 @@ OFFSET = "!!" class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): """Class to utilize the calendar dav client object to get next event.""" - def __init__(self, hass, calendar, days, include_all_day, search): + def __init__( + self, + hass: HomeAssistant, + entry: CalDavConfigEntry | None, + calendar: caldav.Calendar, + days: int, + include_all_day: bool, + search: str | None, + ) -> None: """Set up how we are going to search the WebDav calendar.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=f"CalDAV {calendar.name}", update_interval=MIN_TIME_BETWEEN_UPDATES, ) @@ -35,7 +50,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): self.days = days self.include_all_day = include_all_day self.search = search - self.offset = None + self.offset: timedelta | None = None async def async_get_events( self, hass: HomeAssistant, start_date: datetime, end_date: datetime @@ -109,7 +124,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): _start_of_tomorrow = start_of_tomorrow if _start_of_today <= start_dt < _start_of_tomorrow: new_event = event.copy() - new_vevent = new_event.instance.vevent + new_vevent = new_event.instance.vevent # type: ignore[attr-defined] if hasattr(new_vevent, "dtend"): dur = new_vevent.dtend.value - new_vevent.dtstart.value new_vevent.dtend.value = start_dt + dur diff --git a/homeassistant/components/caldav/todo.py b/homeassistant/components/caldav/todo.py index e8cd4fc9334..cbd7963b595 100644 --- a/homeassistant/components/caldav/todo.py +++ b/homeassistant/components/caldav/todo.py @@ -18,14 +18,13 @@ from homeassistant.components.todo import ( TodoListEntity, TodoListEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import CalDavConfigEntry from .api import async_get_calendars, get_attr_value -from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -46,12 +45,11 @@ TODO_STATUS_MAP_INV: dict[TodoItemStatus, str] = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CalDavConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CalDav todo platform for a config entry.""" - client: caldav.DAVClient = hass.data[DOMAIN][entry.entry_id] - calendars = await async_get_calendars(hass, client, SUPPORTED_COMPONENT) + calendars = await async_get_calendars(hass, entry.runtime_data, SUPPORTED_COMPONENT) async_add_entities( ( WebDavTodoListEntity( diff --git a/homeassistant/components/calendar/__init__.py b/homeassistant/components/calendar/__init__.py index b94a6eb935f..40d6952fa64 100644 --- a/homeassistant/components/calendar/__init__.py +++ b/homeassistant/components/calendar/__init__.py @@ -16,8 +16,11 @@ from dateutil.rrule import rrulestr import voluptuous as vol from homeassistant.components import frontend, http, websocket_api -from homeassistant.components.websocket_api import ERR_NOT_FOUND, ERR_NOT_SUPPORTED -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ( + ERR_NOT_FOUND, + ERR_NOT_SUPPORTED, + ActiveConnection, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import ( @@ -30,7 +33,7 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_point_in_time from homeassistant.helpers.template import DATE_STR_FORMAT @@ -40,6 +43,8 @@ from homeassistant.util.json import JsonValueType from .const import ( CONF_EVENT, + DATA_COMPONENT, + DOMAIN, EVENT_DESCRIPTION, EVENT_DURATION, EVENT_END, @@ -67,7 +72,6 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -DOMAIN = "calendar" ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -282,7 +286,7 @@ SERVICE_GET_EVENTS_SCHEMA: Final = vol.All( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for calendars.""" - component = hass.data[DOMAIN] = EntityComponent[CalendarEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[CalendarEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -315,14 +319,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[CalendarEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[CalendarEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) def get_date(date: dict[str, Any]) -> datetime.datetime: @@ -481,9 +483,15 @@ def is_offset_reached( return start + offset_time <= dt_util.now(start.tzinfo) +class CalendarEntityDescription(EntityDescription, frozen_or_thawed=True): + """A class that describes calendar entities.""" + + class CalendarEntity(Entity): """Base class for calendar event entities.""" + entity_description: CalendarEntityDescription + _entity_component_unrecorded_attributes = frozenset({"description"}) _alarm_unsubs: list[CALLBACK_TYPE] | None = None @@ -699,8 +707,7 @@ async def handle_calendar_event_create( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle creation of a calendar event.""" - component: EntityComponent[CalendarEntity] = hass.data[DOMAIN] - if not (entity := component.get_entity(msg["entity_id"])): + if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])): connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found") return @@ -740,8 +747,7 @@ async def handle_calendar_event_delete( ) -> None: """Handle delete of a calendar event.""" - component: EntityComponent[CalendarEntity] = hass.data[DOMAIN] - if not (entity := component.get_entity(msg["entity_id"])): + if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])): connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found") return @@ -786,8 +792,7 @@ async def handle_calendar_event_update( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle creation of a calendar event.""" - component: EntityComponent[CalendarEntity] = hass.data[DOMAIN] - if not (entity := component.get_entity(msg["entity_id"])): + if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])): connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found") return diff --git a/homeassistant/components/calendar/const.py b/homeassistant/components/calendar/const.py index e667510325b..821fe24c383 100644 --- a/homeassistant/components/calendar/const.py +++ b/homeassistant/components/calendar/const.py @@ -1,6 +1,19 @@ """Constants for calendar components.""" +from __future__ import annotations + from enum import IntFlag +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import CalendarEntity + +DOMAIN = "calendar" +DATA_COMPONENT: HassKey[EntityComponent[CalendarEntity]] = HassKey(DOMAIN) CONF_EVENT = "event" diff --git a/homeassistant/components/calendar/icons.json b/homeassistant/components/calendar/icons.json index e4e526fe75c..a28adcf317e 100644 --- a/homeassistant/components/calendar/icons.json +++ b/homeassistant/components/calendar/icons.json @@ -9,8 +9,11 @@ } }, "services": { - "create_event": "mdi:calendar-plus", - "get_events": "mdi:calendar-month", - "list_events": "mdi:calendar-month" + "create_event": { + "service": "mdi:calendar-plus" + }, + "get_events": { + "service": "mdi:calendar-month" + } } } diff --git a/homeassistant/components/calendar/services.yaml b/homeassistant/components/calendar/services.yaml index 2e926fbdeed..9701293c0be 100644 --- a/homeassistant/components/calendar/services.yaml +++ b/homeassistant/components/calendar/services.yaml @@ -36,22 +36,6 @@ create_event: example: "Conference Room - F123, Bldg. 002" selector: text: -list_events: - target: - entity: - domain: calendar - fields: - start_date_time: - example: "2022-03-22 20:00:00" - selector: - datetime: - end_date_time: - example: "2022-03-22 22:00:00" - selector: - datetime: - duration: - selector: - duration: get_events: target: entity: diff --git a/homeassistant/components/calendar/strings.json b/homeassistant/components/calendar/strings.json index 83a7d01d8ae..c0127c20d05 100644 --- a/homeassistant/components/calendar/strings.json +++ b/homeassistant/components/calendar/strings.json @@ -82,41 +82,23 @@ }, "end_date_time": { "name": "End time", - "description": "Returns active events before this time (exclusive). Cannot be used with 'duration'." + "description": "Returns active events before this time (exclusive). Cannot be used with Duration." }, "duration": { "name": "Duration", - "description": "Returns active events from start_date_time until the specified duration." - } - } - }, - "list_events": { - "name": "List event", - "description": "Lists events on a calendar within a time range.", - "fields": { - "start_date_time": { - "name": "[%key:component::calendar::services::get_events::fields::start_date_time::name%]", - "description": "[%key:component::calendar::services::get_events::fields::start_date_time::description%]" - }, - "end_date_time": { - "name": "[%key:component::calendar::services::get_events::fields::end_date_time::name%]", - "description": "[%key:component::calendar::services::get_events::fields::end_date_time::description%]" - }, - "duration": { - "name": "[%key:component::calendar::services::get_events::fields::duration::name%]", - "description": "[%key:component::calendar::services::get_events::fields::duration::description%]" + "description": "Returns active events from Start time for the specified duration." } } } }, "issues": { "deprecated_service_calendar_list_events": { - "title": "Detected use of deprecated action `calendar.list_events`", + "title": "Detected use of deprecated action calendar.list_events", "fix_flow": { "step": { "confirm": { "title": "[%key:component::calendar::issues::deprecated_service_calendar_list_events::title%]", - "description": "Use `calendar.get_events` instead which supports multiple entities.\n\nPlease replace this action and adjust your automations and scripts and select **submit** to close this issue." + "description": "Use `calendar.get_events` instead which supports multiple entities.\n\nPlease replace this action and adjust your automations and scripts and select **Submit** to close this issue." } } } diff --git a/homeassistant/components/calendar/trigger.py b/homeassistant/components/calendar/trigger.py index 523a634704c..ca69a4b662f 100644 --- a/homeassistant/components/calendar/trigger.py +++ b/homeassistant/components/calendar/trigger.py @@ -23,7 +23,8 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from . import DOMAIN, CalendarEntity, CalendarEvent +from . import CalendarEntity, CalendarEvent +from .const import DATA_COMPONENT, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -94,7 +95,7 @@ type QueuedEventFetcher = Callable[[Timespan], Awaitable[list[QueuedCalendarEven def get_entity(hass: HomeAssistant, entity_id: str) -> CalendarEntity: """Get the calendar entity for the provided entity_id.""" - component: EntityComponent[CalendarEntity] = hass.data[DOMAIN] + component: EntityComponent[CalendarEntity] = hass.data[DATA_COMPONENT] if not (entity := component.get_entity(entity_id)) or not isinstance( entity, CalendarEntity ): diff --git a/homeassistant/components/cambridge_audio/__init__.py b/homeassistant/components/cambridge_audio/__init__.py new file mode 100644 index 00000000000..8b910bb81bb --- /dev/null +++ b/homeassistant/components/cambridge_audio/__init__.py @@ -0,0 +1,69 @@ +"""The Cambridge Audio integration.""" + +from __future__ import annotations + +import asyncio +import logging + +from aiostreammagic import StreamMagicClient +from aiostreammagic.models import CallbackType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS + +PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER, Platform.SELECT, Platform.SWITCH] + +_LOGGER = logging.getLogger(__name__) + +type CambridgeAudioConfigEntry = ConfigEntry[StreamMagicClient] + + +async def async_setup_entry( + hass: HomeAssistant, entry: CambridgeAudioConfigEntry +) -> bool: + """Set up Cambridge Audio integration from a config entry.""" + + client = StreamMagicClient(entry.data[CONF_HOST], async_get_clientsession(hass)) + + async def _connection_update_callback( + _client: StreamMagicClient, _callback_type: CallbackType + ) -> None: + """Call when the device is notified of changes.""" + if _callback_type == CallbackType.CONNECTION: + if _client.is_connected(): + _LOGGER.warning("Reconnected to device at %s", entry.data[CONF_HOST]) + else: + _LOGGER.warning("Disconnected from device at %s", entry.data[CONF_HOST]) + + await client.register_state_update_callbacks(_connection_update_callback) + + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await client.connect() + except STREAM_MAGIC_EXCEPTIONS as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": client.host, + }, + ) from err + entry.runtime_data = client + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: CambridgeAudioConfigEntry +) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.disconnect() + return unload_ok diff --git a/homeassistant/components/cambridge_audio/config_flow.py b/homeassistant/components/cambridge_audio/config_flow.py new file mode 100644 index 00000000000..6f5a92feac0 --- /dev/null +++ b/homeassistant/components/cambridge_audio/config_flow.py @@ -0,0 +1,119 @@ +"""Config flow for Cambridge Audio.""" + +import asyncio +from typing import Any + +from aiostreammagic import StreamMagicClient +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS + +DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) + + +class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): + """Cambridge Audio configuration flow.""" + + VERSION = 1 + + def __init__(self) -> None: + """Initialize the config flow.""" + self.data: dict[str, Any] = {} + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + self.data[CONF_HOST] = host = discovery_info.host + + await self.async_set_unique_id(discovery_info.properties["serial"]) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) + client = StreamMagicClient(host, async_get_clientsession(self.hass)) + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await client.connect() + except STREAM_MAGIC_EXCEPTIONS: + return self.async_abort(reason="cannot_connect") + + self.data[CONF_NAME] = client.info.name + + self.context["title_placeholders"] = { + "name": self.data[CONF_NAME], + } + await client.disconnect() + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self.data[CONF_NAME], + data={CONF_HOST: self.data[CONF_HOST]}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={ + "name": self.data[CONF_NAME], + }, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + if not user_input: + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + ) + return await self.async_step_user(user_input) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + if user_input: + client = StreamMagicClient( + user_input[CONF_HOST], async_get_clientsession(self.hass) + ) + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await client.connect() + except STREAM_MAGIC_EXCEPTIONS: + errors["base"] = "cannot_connect" + else: + await self.async_set_unique_id( + client.info.unit_id, raise_on_progress=False + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={CONF_HOST: user_input[CONF_HOST]}, + ) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title=client.info.name, + data={CONF_HOST: user_input[CONF_HOST]}, + ) + finally: + await client.disconnect() + return self.async_show_form( + step_id="user", + data_schema=DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/cambridge_audio/const.py b/homeassistant/components/cambridge_audio/const.py new file mode 100644 index 00000000000..eae417ffe39 --- /dev/null +++ b/homeassistant/components/cambridge_audio/const.py @@ -0,0 +1,23 @@ +"""Constants for the Cambridge Audio integration.""" + +import asyncio +import logging + +from aiostreammagic import StreamMagicConnectionError, StreamMagicError + +DOMAIN = "cambridge_audio" + +LOGGER = logging.getLogger(__package__) + +STREAM_MAGIC_EXCEPTIONS = ( + StreamMagicConnectionError, + StreamMagicError, + asyncio.CancelledError, + TimeoutError, +) + +CONNECT_TIMEOUT = 5 + +CAMBRIDGE_MEDIA_TYPE_PRESET = "preset" +CAMBRIDGE_MEDIA_TYPE_AIRABLE = "airable" +CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO = "internet_radio" diff --git a/homeassistant/components/cambridge_audio/diagnostics.py b/homeassistant/components/cambridge_audio/diagnostics.py new file mode 100644 index 00000000000..a670b1f32eb --- /dev/null +++ b/homeassistant/components/cambridge_audio/diagnostics.py @@ -0,0 +1,23 @@ +"""Diagnostics platform for Cambridge Audio.""" + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import CambridgeAudioConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: CambridgeAudioConfigEntry +) -> dict[str, Any]: + """Return diagnostics for the provided config entry.""" + client = entry.runtime_data + return { + "display": client.display.to_dict(), + "info": client.info.to_dict(), + "now_playing": client.now_playing.to_dict(), + "play_state": client.play_state.to_dict(), + "presets_list": client.preset_list.to_dict(), + "sources": [s.to_dict() for s in client.sources], + "update": client.update.to_dict(), + } diff --git a/homeassistant/components/cambridge_audio/entity.py b/homeassistant/components/cambridge_audio/entity.py new file mode 100644 index 00000000000..de7a3e31765 --- /dev/null +++ b/homeassistant/components/cambridge_audio/entity.py @@ -0,0 +1,70 @@ +"""Base class for Cambridge Audio entities.""" + +from collections.abc import Awaitable, Callable, Coroutine +from functools import wraps +from typing import Any, Concatenate + +from aiostreammagic import StreamMagicClient +from aiostreammagic.models import CallbackType + +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, STREAM_MAGIC_EXCEPTIONS + + +def command[_EntityT: CambridgeAudioEntity, **_P]( + func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Wrap async calls to raise on request error.""" + + @wraps(func) + async def decorator(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Wrap all command methods.""" + try: + await func(self, *args, **kwargs) + except STREAM_MAGIC_EXCEPTIONS as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={ + "function_name": func.__name__, + "entity_id": self.entity_id, + }, + ) from exc + + return decorator + + +class CambridgeAudioEntity(Entity): + """Defines a base Cambridge Audio entity.""" + + _attr_has_entity_name = True + + def __init__(self, client: StreamMagicClient) -> None: + """Initialize Cambridge Audio entity.""" + self.client = client + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, client.info.unit_id)}, + name=client.info.name, + manufacturer="Cambridge Audio", + model=client.info.model, + serial_number=client.info.unit_id, + configuration_url=f"http://{client.host}", + ) + + async def _state_update_callback( + self, _client: StreamMagicClient, _callback_type: CallbackType + ) -> None: + """Call when the device is notified of changes.""" + self._attr_available = _client.is_connected() + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register callback handlers.""" + await self.client.register_state_update_callbacks(self._state_update_callback) + + async def async_will_remove_from_hass(self) -> None: + """Remove callbacks.""" + self.client.unregister_state_update_callbacks(self._state_update_callback) diff --git a/homeassistant/components/cambridge_audio/icons.json b/homeassistant/components/cambridge_audio/icons.json new file mode 100644 index 00000000000..b4346a7fe8e --- /dev/null +++ b/homeassistant/components/cambridge_audio/icons.json @@ -0,0 +1,28 @@ +{ + "entity": { + "select": { + "display_brightness": { + "default": "mdi:brightness-7", + "state": { + "bright": "mdi:brightness-7", + "dim": "mdi:brightness-6", + "off": "mdi:brightness-3" + } + }, + "audio_output": { + "default": "mdi:audio-input-stereo-minijack" + } + }, + "switch": { + "pre_amp": { + "default": "mdi:volume-high", + "state": { + "off": "mdi:volume-low" + } + }, + "early_update": { + "default": "mdi:update" + } + } + } +} diff --git a/homeassistant/components/cambridge_audio/manifest.json b/homeassistant/components/cambridge_audio/manifest.json new file mode 100644 index 00000000000..14a389587d2 --- /dev/null +++ b/homeassistant/components/cambridge_audio/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "cambridge_audio", + "name": "Cambridge Audio", + "codeowners": ["@noahhusby"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/cambridge_audio", + "integration_type": "device", + "iot_class": "local_push", + "loggers": ["aiostreammagic"], + "quality_scale": "platinum", + "requirements": ["aiostreammagic==2.10.0"], + "zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."] +} diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py new file mode 100644 index 00000000000..9896effb07d --- /dev/null +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -0,0 +1,340 @@ +"""Support for Cambridge Audio AV Receiver.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any + +from aiostreammagic import ( + RepeatMode as CambridgeRepeatMode, + ShuffleMode, + StreamMagicClient, + TransportControl, +) + +from homeassistant.components.media_player import ( + MediaPlayerDeviceClass, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + MediaType, + RepeatMode, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import CambridgeAudioConfigEntry +from .const import ( + CAMBRIDGE_MEDIA_TYPE_AIRABLE, + CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, + CAMBRIDGE_MEDIA_TYPE_PRESET, + DOMAIN, +) +from .entity import CambridgeAudioEntity, command + +BASE_FEATURES = ( + MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.PLAY_MEDIA +) + +PREAMP_FEATURES = ( + MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP +) + +TRANSPORT_FEATURES: dict[TransportControl, MediaPlayerEntityFeature] = { + TransportControl.PLAY: MediaPlayerEntityFeature.PLAY, + TransportControl.PAUSE: MediaPlayerEntityFeature.PAUSE, + TransportControl.TRACK_NEXT: MediaPlayerEntityFeature.NEXT_TRACK, + TransportControl.TRACK_PREVIOUS: MediaPlayerEntityFeature.PREVIOUS_TRACK, + TransportControl.TOGGLE_REPEAT: MediaPlayerEntityFeature.REPEAT_SET, + TransportControl.TOGGLE_SHUFFLE: MediaPlayerEntityFeature.SHUFFLE_SET, + TransportControl.SEEK: MediaPlayerEntityFeature.SEEK, + TransportControl.STOP: MediaPlayerEntityFeature.STOP, +} + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: CambridgeAudioConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Cambridge Audio device based on a config entry.""" + client: StreamMagicClient = entry.runtime_data + async_add_entities([CambridgeAudioDevice(client)]) + + +class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity): + """Representation of a Cambridge Audio Media Player Device.""" + + _attr_name = None + _attr_media_content_type = MediaType.MUSIC + _attr_device_class = MediaPlayerDeviceClass.RECEIVER + + def __init__(self, client: StreamMagicClient) -> None: + """Initialize an Cambridge Audio entity.""" + super().__init__(client) + self._attr_unique_id = client.info.unit_id + + @property + def supported_features(self) -> MediaPlayerEntityFeature: + """Supported features for the media player.""" + controls = self.client.now_playing.controls + features = BASE_FEATURES + if self.client.state.pre_amp_mode: + features |= PREAMP_FEATURES + if TransportControl.PLAY_PAUSE in controls: + features |= MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PAUSE + for control in controls: + feature = TRANSPORT_FEATURES.get(control) + if feature: + features |= feature + return features + + @property + def state(self) -> MediaPlayerState: + """Return the state of the device.""" + media_state = self.client.play_state.state + if media_state == "NETWORK": + return MediaPlayerState.STANDBY + if self.client.state.power: + if media_state == "play": + return MediaPlayerState.PLAYING + if media_state == "pause": + return MediaPlayerState.PAUSED + if media_state == "connecting": + return MediaPlayerState.BUFFERING + if media_state in ("stop", "ready"): + return MediaPlayerState.IDLE + return MediaPlayerState.ON + return MediaPlayerState.OFF + + @property + def source_list(self) -> list[str]: + """Return a list of available input sources.""" + return [item.name for item in self.client.sources] + + @property + def source(self) -> str | None: + """Return the current input source.""" + return next( + ( + item.name + for item in self.client.sources + if item.id == self.client.state.source + ), + None, + ) + + @property + def media_title(self) -> str | None: + """Title of current playing media.""" + return self.client.play_state.metadata.title + + @property + def media_artist(self) -> str | None: + """Artist of current playing media, music track only.""" + return self.client.play_state.metadata.artist + + @property + def media_album_name(self) -> str | None: + """Album name of current playing media, music track only.""" + return self.client.play_state.metadata.album + + @property + def media_image_url(self) -> str | None: + """Image url of current playing media.""" + return self.client.play_state.metadata.art_url + + @property + def media_duration(self) -> int | None: + """Duration of the current media.""" + return self.client.play_state.metadata.duration + + @property + def media_position(self) -> int | None: + """Position of the current media.""" + return self.client.play_state.position + + @property + def media_position_updated_at(self) -> datetime: + """Last time the media position was updated.""" + return self.client.position_last_updated + + @property + def is_volume_muted(self) -> bool | None: + """Volume mute status.""" + return self.client.state.mute + + @property + def volume_level(self) -> float | None: + """Current pre-amp volume level.""" + volume = self.client.state.volume_percent or 0 + return volume / 100 + + @property + def shuffle(self) -> bool: + """Current shuffle configuration.""" + return self.client.play_state.mode_shuffle != ShuffleMode.OFF + + @property + def repeat(self) -> RepeatMode | None: + """Current repeat configuration.""" + mode_repeat = RepeatMode.OFF + if self.client.play_state.mode_repeat == CambridgeRepeatMode.ALL: + mode_repeat = RepeatMode.ALL + return mode_repeat + + @command + async def async_media_play_pause(self) -> None: + """Toggle play/pause the current media.""" + await self.client.play_pause() + + @command + async def async_media_pause(self) -> None: + """Pause the current media.""" + controls = self.client.now_playing.controls + if ( + TransportControl.PAUSE not in controls + and TransportControl.PLAY_PAUSE in controls + ): + await self.client.play_pause() + else: + await self.client.pause() + + @command + async def async_media_stop(self) -> None: + """Stop the current media.""" + await self.client.stop() + + @command + async def async_media_play(self) -> None: + """Play the current media.""" + controls = self.client.now_playing.controls + if ( + TransportControl.PLAY not in controls + and TransportControl.PLAY_PAUSE in controls + ): + await self.client.play_pause() + else: + await self.client.play() + + @command + async def async_media_next_track(self) -> None: + """Skip to the next track.""" + await self.client.next_track() + + @command + async def async_media_previous_track(self) -> None: + """Skip to the previous track.""" + await self.client.previous_track() + + @command + async def async_select_source(self, source: str) -> None: + """Select the source.""" + for src in self.client.sources: + if src.name == source: + await self.client.set_source_by_id(src.id) + break + + @command + async def async_turn_on(self) -> None: + """Power on the device.""" + await self.client.power_on() + + @command + async def async_turn_off(self) -> None: + """Power off the device.""" + await self.client.power_off() + + @command + async def async_volume_up(self) -> None: + """Step the volume up.""" + await self.client.volume_up() + + @command + async def async_volume_down(self) -> None: + """Step the volume down.""" + await self.client.volume_down() + + @command + async def async_set_volume_level(self, volume: float) -> None: + """Set the volume level.""" + await self.client.set_volume(int(volume * 100)) + + @command + async def async_mute_volume(self, mute: bool) -> None: + """Set the mute state.""" + await self.client.set_mute(mute) + + @command + async def async_media_seek(self, position: float) -> None: + """Seek to a position in the current media.""" + await self.client.media_seek(int(position)) + + @command + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set the shuffle mode for the current queue.""" + shuffle_mode = ShuffleMode.OFF + if shuffle: + shuffle_mode = ShuffleMode.ALL + await self.client.set_shuffle(shuffle_mode) + + @command + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set the repeat mode for the current queue.""" + repeat_mode = CambridgeRepeatMode.OFF + if repeat in {RepeatMode.ALL, RepeatMode.ONE}: + repeat_mode = CambridgeRepeatMode.ALL + await self.client.set_repeat(repeat_mode) + + @command + async def async_play_media( + self, media_type: MediaType | str, media_id: str, **kwargs: Any + ) -> None: + """Play media on the Cambridge Audio device.""" + + if media_type not in { + CAMBRIDGE_MEDIA_TYPE_PRESET, + CAMBRIDGE_MEDIA_TYPE_AIRABLE, + CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, + }: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unsupported_media_type", + translation_placeholders={"media_type": media_type}, + ) + + if media_type == CAMBRIDGE_MEDIA_TYPE_PRESET: + try: + preset_id = int(media_id) + except ValueError as ve: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="preset_non_integer", + translation_placeholders={"preset_id": media_id}, + ) from ve + preset = None + for _preset in self.client.preset_list.presets: + if _preset.preset_id == preset_id: + preset = _preset + if not preset: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_preset", + translation_placeholders={"preset_id": media_id}, + ) + await self.client.recall_preset(preset.preset_id) + + if media_type == CAMBRIDGE_MEDIA_TYPE_AIRABLE: + preset_id = int(media_id) + await self.client.play_radio_airable("Radio", preset_id) + + if media_type == CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO: + await self.client.play_radio_url("Radio", media_id) diff --git a/homeassistant/components/cambridge_audio/quality_scale.yaml b/homeassistant/components/cambridge_audio/quality_scale.yaml new file mode 100644 index 00000000000..e5cafdd6368 --- /dev/null +++ b/homeassistant/components/cambridge_audio/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions beyond play media which is setup by the media player entity. + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: + status: exempt + comment: | + This integration is not a hub and as such only represents a single device. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: + status: exempt + comment: | + This integration is not a hub and only represents a single device. + discovery-update-info: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/cambridge_audio/select.py b/homeassistant/components/cambridge_audio/select.py new file mode 100644 index 00000000000..6bfe83c2539 --- /dev/null +++ b/homeassistant/components/cambridge_audio/select.py @@ -0,0 +1,124 @@ +"""Support for Cambridge Audio select entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass, field + +from aiostreammagic import StreamMagicClient +from aiostreammagic.models import DisplayBrightness + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import CambridgeAudioConfigEntry +from .entity import CambridgeAudioEntity, command + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class CambridgeAudioSelectEntityDescription(SelectEntityDescription): + """Describes Cambridge Audio select entity.""" + + options_fn: Callable[[StreamMagicClient], list[str]] = field(default=lambda _: []) + load_fn: Callable[[StreamMagicClient], bool] = field(default=lambda _: True) + value_fn: Callable[[StreamMagicClient], str | None] + set_value_fn: Callable[[StreamMagicClient, str], Awaitable[None]] + + +async def _audio_output_set_value_fn(client: StreamMagicClient, value: str) -> None: + """Set the audio output using the display name.""" + audio_output_id = next( + (output.id for output in client.audio_output.outputs if value == output.name), + None, + ) + assert audio_output_id is not None + await client.set_audio_output(audio_output_id) + + +def _audio_output_value_fn(client: StreamMagicClient) -> str | None: + """Convert the current audio output id to name.""" + return next( + ( + output.name + for output in client.audio_output.outputs + if client.state.audio_output == output.id + ), + None, + ) + + +CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = ( + CambridgeAudioSelectEntityDescription( + key="display_brightness", + translation_key="display_brightness", + options=[ + DisplayBrightness.BRIGHT.value, + DisplayBrightness.DIM.value, + DisplayBrightness.OFF.value, + ], + entity_category=EntityCategory.CONFIG, + load_fn=lambda client: client.display.brightness != DisplayBrightness.NONE, + value_fn=lambda client: client.display.brightness, + set_value_fn=lambda client, value: client.set_display_brightness( + DisplayBrightness(value) + ), + ), + CambridgeAudioSelectEntityDescription( + key="audio_output", + translation_key="audio_output", + entity_category=EntityCategory.CONFIG, + options_fn=lambda client: [ + output.name for output in client.audio_output.outputs + ], + load_fn=lambda client: len(client.audio_output.outputs) > 0, + value_fn=_audio_output_value_fn, + set_value_fn=_audio_output_set_value_fn, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: CambridgeAudioConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Cambridge Audio select entities based on a config entry.""" + + client: StreamMagicClient = entry.runtime_data + entities: list[CambridgeAudioSelect] = [ + CambridgeAudioSelect(client, description) + for description in CONTROL_ENTITIES + if description.load_fn(client) + ] + async_add_entities(entities) + + +class CambridgeAudioSelect(CambridgeAudioEntity, SelectEntity): + """Defines a Cambridge Audio select entity.""" + + entity_description: CambridgeAudioSelectEntityDescription + + def __init__( + self, + client: StreamMagicClient, + description: CambridgeAudioSelectEntityDescription, + ) -> None: + """Initialize Cambridge Audio select.""" + super().__init__(client) + self.entity_description = description + self._attr_unique_id = f"{client.info.unit_id}-{description.key}" + options_fn = description.options_fn(client) + if options_fn: + self._attr_options = options_fn + + @property + def current_option(self) -> str | None: + """Return the state of the select.""" + return self.entity_description.value_fn(self.client) + + @command + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.set_value_fn(self.client, option) diff --git a/homeassistant/components/cambridge_audio/strings.json b/homeassistant/components/cambridge_audio/strings.json new file mode 100644 index 00000000000..9f5e031815b --- /dev/null +++ b/homeassistant/components/cambridge_audio/strings.json @@ -0,0 +1,77 @@ +{ + "config": { + "flow_title": "{name}", + "step": { + "user": { + "description": "Set up your Cambridge Audio Streamer to integrate with Home Assistant.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Cambridge Audio Streamer." + } + }, + "discovery_confirm": { + "description": "Do you want to setup {name}?" + }, + "reconfigure": { + "description": "Reconfigure your Cambridge Audio Streamer.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::cambridge_audio::config::step::user::data_description::host%]" + } + } + }, + "error": { + "cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect." + }, + "abort": { + "wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "select": { + "display_brightness": { + "name": "Display brightness", + "state": { + "bright": "Bright", + "dim": "Dim", + "off": "[%key:common::state::off%]" + } + }, + "audio_output": { + "name": "Audio output" + } + }, + "switch": { + "pre_amp": { + "name": "Pre-Amp" + }, + "early_update": { + "name": "Early update" + } + } + }, + "exceptions": { + "unsupported_media_type": { + "message": "Unsupported media type for Cambridge Audio device: {media_type}" + }, + "missing_preset": { + "message": "Missing preset for media_id: {preset_id}" + }, + "preset_non_integer": { + "message": "Preset must be an integer, got: {preset_id}" + }, + "entry_cannot_connect": { + "message": "Error while connecting to {host}" + }, + "command_error": { + "message": "Error executing {function_name} on entity {entity_id}" + } + } +} diff --git a/homeassistant/components/cambridge_audio/switch.py b/homeassistant/components/cambridge_audio/switch.py new file mode 100644 index 00000000000..065a1da4f94 --- /dev/null +++ b/homeassistant/components/cambridge_audio/switch.py @@ -0,0 +1,86 @@ +"""Support for Cambridge Audio switch entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from aiostreammagic import StreamMagicClient + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import CambridgeAudioConfigEntry +from .entity import CambridgeAudioEntity, command + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class CambridgeAudioSwitchEntityDescription(SwitchEntityDescription): + """Describes Cambridge Audio switch entity.""" + + value_fn: Callable[[StreamMagicClient], bool] + set_value_fn: Callable[[StreamMagicClient, bool], Awaitable[None]] + + +CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = ( + CambridgeAudioSwitchEntityDescription( + key="pre_amp", + translation_key="pre_amp", + entity_category=EntityCategory.CONFIG, + value_fn=lambda client: client.state.pre_amp_mode, + set_value_fn=lambda client, value: client.set_pre_amp_mode(value), + ), + CambridgeAudioSwitchEntityDescription( + key="early_update", + translation_key="early_update", + entity_category=EntityCategory.CONFIG, + value_fn=lambda client: client.update.early_update, + set_value_fn=lambda client, value: client.set_early_update(value), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: CambridgeAudioConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Cambridge Audio switch entities based on a config entry.""" + async_add_entities( + CambridgeAudioSwitch(entry.runtime_data, description) + for description in CONTROL_ENTITIES + ) + + +class CambridgeAudioSwitch(CambridgeAudioEntity, SwitchEntity): + """Defines a Cambridge Audio switch entity.""" + + entity_description: CambridgeAudioSwitchEntityDescription + + def __init__( + self, + client: StreamMagicClient, + description: CambridgeAudioSwitchEntityDescription, + ) -> None: + """Initialize Cambridge Audio switch.""" + super().__init__(client) + self.entity_description = description + self._attr_unique_id = f"{client.info.unit_id}-{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.value_fn(self.client) + + @command + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.entity_description.set_value_fn(self.client, True) + + @command + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.entity_description.set_value_fn(self.client, False) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index 859ced1ba86..725fc84adc3 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -4,21 +4,23 @@ from __future__ import annotations import asyncio import collections -from collections.abc import Awaitable, Callable, Iterable +from collections.abc import Awaitable, Callable, Coroutine from contextlib import suppress -from dataclasses import asdict +from dataclasses import asdict, dataclass from datetime import datetime, timedelta from enum import IntFlag -from functools import cached_property, partial +from functools import partial import logging import os from random import SystemRandom import time -from typing import Any, Final, cast, final +from typing import Any, Final, final from aiohttp import hdrs, web import attr +from propcache import cached_property, under_cached_property import voluptuous as vol +from webrtc_models import RTCIceCandidateInit, RTCIceServer from homeassistant.components import websocket_api from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView @@ -48,41 +50,61 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.deprecation import ( DeprecatedConstantEnum, all_with_deprecated_constants, check_if_deprecated_constant, + deprecated_function, dir_with_deprecated_constants, ) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_time_interval +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.network import get_url from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass -from .const import ( # noqa: F401 - _DEPRECATED_STREAM_TYPE_HLS, - _DEPRECATED_STREAM_TYPE_WEB_RTC, +from .const import ( CAMERA_IMAGE_TIMEOUT, CAMERA_STREAM_SOURCE_TIMEOUT, CONF_DURATION, CONF_LOOKBACK, DATA_CAMERA_PREFS, - DATA_RTSP_TO_WEB_RTC, + DATA_COMPONENT, DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM, SERVICE_RECORD, + CameraState, StreamType, ) +from .helper import get_camera_from_entity_id from .img_util import scale_jpeg_camera_image from .prefs import CameraPreferences, DynamicStreamSettings # noqa: F401 +from .webrtc import ( + DATA_ICE_SERVERS, + CameraWebRTCLegacyProvider, + CameraWebRTCProvider, + WebRTCAnswer, + WebRTCCandidate, # noqa: F401 + WebRTCClientConfiguration, + WebRTCError, + WebRTCMessage, # noqa: F401 + WebRTCSendMessage, + async_get_supported_legacy_provider, + async_get_supported_provider, + async_register_ice_servers, + async_register_rtsp_to_web_rtc_provider, # noqa: F401 + async_register_webrtc_provider, # noqa: F401 + async_register_ws, +) _LOGGER = logging.getLogger(__name__) + ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -97,9 +119,11 @@ ATTR_FILENAME: Final = "filename" ATTR_MEDIA_PLAYER: Final = "media_player" ATTR_FORMAT: Final = "format" -STATE_RECORDING: Final = "recording" -STATE_STREAMING: Final = "streaming" -STATE_IDLE: Final = "idle" +# These constants are deprecated as of Home Assistant 2024.10 +# Please use the StreamType enum instead. +_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10") +_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10") +_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10") class CameraEntityFeature(IntFlag): @@ -109,17 +133,6 @@ class CameraEntityFeature(IntFlag): STREAM = 2 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Pleease use the CameraEntityFeature enum instead. -_DEPRECATED_SUPPORT_ON_OFF: Final = DeprecatedConstantEnum( - CameraEntityFeature.ON_OFF, "2025.1" -) -_DEPRECATED_SUPPORT_STREAM: Final = DeprecatedConstantEnum( - CameraEntityFeature.STREAM, "2025.1" -) - -RTSP_PREFIXES = {"rtsp://", "rtsps://", "rtmp://"} - DEFAULT_CONTENT_TYPE: Final = "image/jpeg" ENTITY_IMAGE_URL: Final = "/api/camera_proxy/{0}?token={1}" @@ -154,10 +167,17 @@ class Image: content: bytes = attr.ib() +@dataclass(frozen=True) +class CameraCapabilities: + """Camera capabilities.""" + + frontend_stream_types: set[StreamType] + + @bind_hass async def async_request_stream(hass: HomeAssistant, entity_id: str, fmt: str) -> str: """Request a stream for a camera entity.""" - camera = _get_camera_from_entity_id(hass, entity_id) + camera = get_camera_from_entity_id(hass, entity_id) return await _async_stream_endpoint_url(hass, camera, fmt) @@ -215,7 +235,7 @@ async def async_get_image( width and height will be passed to the underlying camera. """ - camera = _get_camera_from_entity_id(hass, entity_id) + camera = get_camera_from_entity_id(hass, entity_id) return await _async_get_image(camera, timeout, width, height) @@ -237,7 +257,7 @@ async def _async_get_stream_image( @bind_hass async def async_get_stream_source(hass: HomeAssistant, entity_id: str) -> str | None: """Fetch the stream source for a camera entity.""" - camera = _get_camera_from_entity_id(hass, entity_id) + camera = get_camera_from_entity_id(hass, entity_id) return await camera.stream_source() @@ -246,7 +266,7 @@ async def async_get_mjpeg_stream( hass: HomeAssistant, request: web.Request, entity_id: str ) -> web.StreamResponse | None: """Fetch an mjpeg stream from a camera entity.""" - camera = _get_camera_from_entity_id(hass, entity_id) + camera = get_camera_from_entity_id(hass, entity_id) try: stream = await camera.handle_async_mjpeg_stream(request) @@ -313,74 +333,9 @@ async def async_get_still_stream( return response -def _get_camera_from_entity_id(hass: HomeAssistant, entity_id: str) -> Camera: - """Get camera component from entity_id.""" - if (component := hass.data.get(DOMAIN)) is None: - raise HomeAssistantError("Camera integration not set up") - - if (camera := component.get_entity(entity_id)) is None: - raise HomeAssistantError("Camera not found") - - if not camera.is_on: - raise HomeAssistantError("Camera is off") - - return cast(Camera, camera) - - -# An RtspToWebRtcProvider accepts these inputs: -# stream_source: The RTSP url -# offer_sdp: The WebRTC SDP offer -# stream_id: A unique id for the stream, used to update an existing source -# The output is the SDP answer, or None if the source or offer is not eligible. -# The Callable may throw HomeAssistantError on failure. -type RtspToWebRtcProviderType = Callable[[str, str, str], Awaitable[str | None]] - - -def async_register_rtsp_to_web_rtc_provider( - hass: HomeAssistant, - domain: str, - provider: RtspToWebRtcProviderType, -) -> Callable[[], None]: - """Register an RTSP to WebRTC provider. - - The first provider to satisfy the offer will be used. - """ - if DOMAIN not in hass.data: - raise ValueError("Unexpected state, camera not loaded") - - def remove_provider() -> None: - if domain in hass.data[DATA_RTSP_TO_WEB_RTC]: - del hass.data[DATA_RTSP_TO_WEB_RTC] - hass.async_create_task(_async_refresh_providers(hass)) - - hass.data.setdefault(DATA_RTSP_TO_WEB_RTC, {}) - hass.data[DATA_RTSP_TO_WEB_RTC][domain] = provider - hass.async_create_task(_async_refresh_providers(hass)) - return remove_provider - - -async def _async_refresh_providers(hass: HomeAssistant) -> None: - """Check all cameras for any state changes for registered providers.""" - - component: EntityComponent[Camera] = hass.data[DOMAIN] - await asyncio.gather( - *(camera.async_refresh_providers() for camera in component.entities) - ) - - -def _async_get_rtsp_to_web_rtc_providers( - hass: HomeAssistant, -) -> Iterable[RtspToWebRtcProviderType]: - """Return registered RTSP to WebRTC providers.""" - providers: dict[str, RtspToWebRtcProviderType] = hass.data.get( - DATA_RTSP_TO_WEB_RTC, {} - ) - return providers.values() - - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the camera component.""" - component = hass.data[DOMAIN] = EntityComponent[Camera]( + component = hass.data[DATA_COMPONENT] = EntityComponent[Camera]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -392,9 +347,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.http.register_view(CameraMjpegStream(component)) websocket_api.async_register_command(hass, ws_camera_stream) - websocket_api.async_register_command(hass, ws_camera_web_rtc_offer) websocket_api.async_register_command(hass, websocket_get_prefs) websocket_api.async_register_command(hass, websocket_update_prefs) + websocket_api.async_register_command(hass, ws_camera_capabilities) + async_register_ws(hass) await component.async_setup(config) @@ -450,19 +406,31 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: SERVICE_RECORD, CAMERA_SERVICE_RECORD, async_handle_record_service ) + @callback + def get_ice_servers() -> list[RTCIceServer]: + if hass.config.webrtc.ice_servers: + return hass.config.webrtc.ice_servers + return [ + RTCIceServer( + urls=[ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + ), + ] + + async_register_ice_servers(hass, get_ice_servers) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[Camera] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[Camera] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) CACHED_PROPERTIES_WITH_ATTR_ = { @@ -488,6 +456,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): # Entity Properties _attr_brand: str | None = None _attr_frame_interval: float = MIN_STREAM_INTERVAL + # Deprecated in 2024.12. Remove in 2025.6 _attr_frontend_stream_type: StreamType | None _attr_is_on: bool = True _attr_is_recording: bool = False @@ -498,8 +467,11 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_state: None = None # State is determined by is_on _attr_supported_features: CameraEntityFeature = CameraEntityFeature(0) + __supports_stream: CameraEntityFeature | None = None + def __init__(self) -> None: """Initialize a camera.""" + self._cache: dict[str, Any] = {} self.stream: Stream | None = None self.stream_options: dict[str, str | bool | float] = {} self.content_type: str = DEFAULT_CONTENT_TYPE @@ -507,7 +479,25 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): self._warned_old_signature = False self.async_update_token() self._create_stream_lock: asyncio.Lock | None = None - self._rtsp_to_webrtc = False + self._webrtc_provider: CameraWebRTCProvider | None = None + self._legacy_webrtc_provider: CameraWebRTCLegacyProvider | None = None + self._supports_native_sync_webrtc = ( + type(self).async_handle_web_rtc_offer != Camera.async_handle_web_rtc_offer + ) + self._supports_native_async_webrtc = ( + type(self).async_handle_async_webrtc_offer + != Camera.async_handle_async_webrtc_offer + ) + self._deprecate_attr_frontend_stream_type_logged = False + if type(self).frontend_stream_type != Camera.frontend_stream_type: + report_usage( + ( + f"is overwriting the 'frontend_stream_type' property in the {type(self).__name__} class," + " which is deprecated and will be removed in Home Assistant 2025.6, " + ), + core_integration_behavior=ReportBehavior.ERROR, + exclude_integrations={DOMAIN}, + ) @cached_property def entity_picture(self) -> str: @@ -526,19 +516,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Flag supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> CameraEntityFeature: - """Return the supported features as CameraEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = CameraEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - @cached_property def is_recording(self) -> bool: """Return true if the device is recording.""" @@ -577,11 +554,29 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): frontend which camera attributes and player to use. The default type is to use HLS, and components can override to change the type. """ + # Deprecated in 2024.12. Remove in 2025.6 + # Use the camera_capabilities instead if hasattr(self, "_attr_frontend_stream_type"): + if not self._deprecate_attr_frontend_stream_type_logged: + report_usage( + ( + f"is setting the '_attr_frontend_stream_type' attribute in the {type(self).__name__} class," + " which is deprecated and will be removed in Home Assistant 2025.6, " + ), + core_integration_behavior=ReportBehavior.ERROR, + exclude_integrations={DOMAIN}, + ) + + self._deprecate_attr_frontend_stream_type_logged = True return self._attr_frontend_stream_type - if CameraEntityFeature.STREAM not in self.supported_features_compat: + if CameraEntityFeature.STREAM not in self.supported_features: return None - if self._rtsp_to_webrtc: + if ( + self._webrtc_provider + or self._legacy_webrtc_provider + or self._supports_native_sync_webrtc + or self._supports_native_async_webrtc + ): return StreamType.WEB_RTC return StreamType.HLS @@ -631,14 +626,69 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Integrations can override with a native WebRTC implementation. """ - stream_source = await self.stream_source() - if not stream_source: - return None - for provider in _async_get_rtsp_to_web_rtc_providers(self.hass): - answer_sdp = await provider(stream_source, offer_sdp, self.entity_id) - if answer_sdp: - return answer_sdp - raise HomeAssistantError("WebRTC offer was not accepted by any providers") + + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + """Handle the async WebRTC offer. + + Async means that it could take some time to process the offer and responses/message + will be sent with the send_message callback. + This method is used by cameras with CameraEntityFeature.STREAM. + An integration overriding this method must also implement async_on_webrtc_candidate. + + Integrations can override with a native WebRTC implementation. + """ + if self._supports_native_sync_webrtc: + try: + answer = await deprecated_function( + "async_handle_async_webrtc_offer", + breaks_in_ha_version="2025.6", + )(self.async_handle_web_rtc_offer)(offer_sdp) + except ValueError as ex: + _LOGGER.error("Error handling WebRTC offer: %s", ex) + send_message( + WebRTCError( + "webrtc_offer_failed", + str(ex), + ) + ) + except TimeoutError: + # This catch was already here and should stay through the deprecation + _LOGGER.error("Timeout handling WebRTC offer") + send_message( + WebRTCError( + "webrtc_offer_failed", + "Timeout handling WebRTC offer", + ) + ) + else: + if answer: + send_message(WebRTCAnswer(answer)) + else: + _LOGGER.error("Error handling WebRTC offer: No answer") + send_message( + WebRTCError( + "webrtc_offer_failed", + "No answer on WebRTC offer", + ) + ) + return + + if self._webrtc_provider: + await self._webrtc_provider.async_handle_async_webrtc_offer( + self, offer_sdp, session_id, send_message + ) + return + + if self._legacy_webrtc_provider and ( + answer := await self._legacy_webrtc_provider.async_handle_web_rtc_offer( + self, offer_sdp + ) + ): + send_message(WebRTCAnswer(answer)) + else: + raise HomeAssistantError("Camera does not support WebRTC") def camera_image( self, width: int | None = None, height: int | None = None @@ -677,10 +727,10 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def state(self) -> str: """Return the camera state.""" if self.is_recording: - return STATE_RECORDING + return CameraState.RECORDING if self.is_streaming: - return STATE_STREAMING - return STATE_IDLE + return CameraState.STREAMING + return CameraState.IDLE @cached_property def is_on(self) -> bool: @@ -748,36 +798,130 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): async def async_internal_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_internal_added_to_hass() - # Avoid calling async_refresh_providers() in here because it - # it will write state a second time since state is always - # written when an entity is added to hass. - self._rtsp_to_webrtc = await self._async_use_rtsp_to_webrtc() + self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM + await self.async_refresh_providers(write_state=False) - async def async_refresh_providers(self) -> None: + async def async_refresh_providers(self, *, write_state: bool = True) -> None: """Determine if any of the registered providers are suitable for this entity. This affects state attributes, so it should be invoked any time the registered providers or inputs to the state attributes change. - - Returns True if any state was updated (and needs to be written) """ - old_state = self._rtsp_to_webrtc - self._rtsp_to_webrtc = await self._async_use_rtsp_to_webrtc() - if old_state != self._rtsp_to_webrtc: - self.async_write_ha_state() + old_provider = self._webrtc_provider + old_legacy_provider = self._legacy_webrtc_provider + new_provider = None + new_legacy_provider = None - async def _async_use_rtsp_to_webrtc(self) -> bool: - """Determine if a WebRTC provider can be used for the camera.""" - if CameraEntityFeature.STREAM not in self.supported_features_compat: - return False - if DATA_RTSP_TO_WEB_RTC not in self.hass.data: - return False - stream_source = await self.stream_source() - return any( - stream_source and stream_source.startswith(prefix) - for prefix in RTSP_PREFIXES + # Skip all providers if the camera has a native WebRTC implementation + if not ( + self._supports_native_sync_webrtc or self._supports_native_async_webrtc + ): + # Camera doesn't have a native WebRTC implementation + new_provider = await self._async_get_supported_webrtc_provider( + async_get_supported_provider + ) + + if new_provider is None: + # Only add the legacy provider if the new provider is not available + new_legacy_provider = await self._async_get_supported_webrtc_provider( + async_get_supported_legacy_provider + ) + + if old_provider != new_provider or old_legacy_provider != new_legacy_provider: + self._webrtc_provider = new_provider + self._legacy_webrtc_provider = new_legacy_provider + self._invalidate_camera_capabilities_cache() + if write_state: + self.async_write_ha_state() + + async def _async_get_supported_webrtc_provider[_T]( + self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]] + ) -> _T | None: + """Get first provider that supports this camera.""" + if CameraEntityFeature.STREAM not in self.supported_features: + return None + + return await fn(self.hass, self) + + @callback + def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: + """Return the WebRTC client configuration adjustable per integration.""" + return WebRTCClientConfiguration() + + @final + @callback + def async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: + """Return the WebRTC client configuration and extend it with the registered ice servers.""" + config = self._async_get_webrtc_client_configuration() + + if not self._supports_native_sync_webrtc: + # Until 2024.11, the frontend was not resolving any ice servers + # The async approach was added 2024.11 and new integrations need to use it + ice_servers = [ + server + for servers in self.hass.data.get(DATA_ICE_SERVERS, []) + for server in servers() + ] + config.configuration.ice_servers.extend(ice_servers) + + config.get_candidates_upfront = ( + self._supports_native_sync_webrtc + or self._legacy_webrtc_provider is not None ) + return config + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle a WebRTC candidate.""" + if self._webrtc_provider: + await self._webrtc_provider.async_on_webrtc_candidate(session_id, candidate) + else: + raise HomeAssistantError("Cannot handle WebRTC candidate") + + @callback + def close_webrtc_session(self, session_id: str) -> None: + """Close a WebRTC session.""" + if self._webrtc_provider: + self._webrtc_provider.async_close_session(session_id) + + @callback + def _invalidate_camera_capabilities_cache(self) -> None: + """Invalidate the camera capabilities cache.""" + self._cache.pop("camera_capabilities", None) + + @final + @under_cached_property + def camera_capabilities(self) -> CameraCapabilities: + """Return the camera capabilities.""" + frontend_stream_types = set() + if CameraEntityFeature.STREAM in self.supported_features: + if self._supports_native_sync_webrtc or self._supports_native_async_webrtc: + # The camera has a native WebRTC implementation + frontend_stream_types.add(StreamType.WEB_RTC) + else: + frontend_stream_types.add(StreamType.HLS) + + if self._webrtc_provider or self._legacy_webrtc_provider: + frontend_stream_types.add(StreamType.WEB_RTC) + + return CameraCapabilities(frontend_stream_types) + + @callback + def async_write_ha_state(self) -> None: + """Write the state to the state machine. + + Schedules async_refresh_providers if support of streams have changed. + """ + super().async_write_ha_state() + if self.__supports_stream != ( + supports_stream := self.supported_features & CameraEntityFeature.STREAM + ): + self.__supports_stream = supports_stream + self._invalidate_camera_capabilities_cache() + self.hass.async_create_task(self.async_refresh_providers()) + class CameraView(HomeAssistantView): """Base CameraView.""" @@ -868,6 +1012,24 @@ class CameraMjpegStream(CameraView): raise web.HTTPBadRequest from err +@websocket_api.websocket_command( + { + vol.Required("type"): "camera/capabilities", + vol.Required("entity_id"): cv.entity_id, + } +) +@websocket_api.async_response +async def ws_camera_capabilities( + hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] +) -> None: + """Handle get camera capabilities websocket command. + + Async friendly. + """ + camera = get_camera_from_entity_id(hass, msg["entity_id"]) + connection.send_result(msg["id"], asdict(camera.camera_capabilities)) + + @websocket_api.websocket_command( { vol.Required("type"): "camera/stream", @@ -885,7 +1047,7 @@ async def ws_camera_stream( """ try: entity_id = msg["entity_id"] - camera = _get_camera_from_entity_id(hass, entity_id) + camera = get_camera_from_entity_id(hass, entity_id) url = await _async_stream_endpoint_url(hass, camera, fmt=msg["format"]) connection.send_result(msg["id"], {"url": url}) except HomeAssistantError as ex: @@ -898,53 +1060,6 @@ async def ws_camera_stream( ) -@websocket_api.websocket_command( - { - vol.Required("type"): "camera/web_rtc_offer", - vol.Required("entity_id"): cv.entity_id, - vol.Required("offer"): str, - } -) -@websocket_api.async_response -async def ws_camera_web_rtc_offer( - hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] -) -> None: - """Handle the signal path for a WebRTC stream. - - This signal path is used to route the offer created by the client to the - camera device through the integration for negotiation on initial setup, - which returns an answer. The actual streaming is handled entirely between - the client and camera device. - - Async friendly. - """ - entity_id = msg["entity_id"] - offer = msg["offer"] - camera = _get_camera_from_entity_id(hass, entity_id) - if camera.frontend_stream_type != StreamType.WEB_RTC: - connection.send_error( - msg["id"], - "web_rtc_offer_failed", - ( - "Camera does not support WebRTC," - f" frontend_stream_type={camera.frontend_stream_type}" - ), - ) - return - try: - answer = await camera.async_handle_web_rtc_offer(offer) - except (HomeAssistantError, ValueError) as ex: - _LOGGER.error("Error handling WebRTC offer: %s", ex) - connection.send_error(msg["id"], "web_rtc_offer_failed", str(ex)) - except TimeoutError: - _LOGGER.error("Timeout handling WebRTC offer") - connection.send_error( - msg["id"], "web_rtc_offer_failed", "Timeout handling WebRTC offer" - ) - else: - connection.send_result(msg["id"], {"answer": answer}) - - @websocket_api.websocket_command( {vol.Required("type"): "camera/get_prefs", vol.Required("entity_id"): cv.entity_id} ) @@ -953,8 +1068,9 @@ async def websocket_get_prefs( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle request for account info.""" - prefs: CameraPreferences = hass.data[DATA_CAMERA_PREFS] - stream_prefs = await prefs.get_dynamic_stream_settings(msg["entity_id"]) + stream_prefs = await hass.data[DATA_CAMERA_PREFS].get_dynamic_stream_settings( + msg["entity_id"] + ) connection.send_result(msg["id"], asdict(stream_prefs)) @@ -971,14 +1087,14 @@ async def websocket_update_prefs( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle request for account info.""" - prefs: CameraPreferences = hass.data[DATA_CAMERA_PREFS] - changes = dict(msg) changes.pop("id") changes.pop("type") entity_id = changes.pop("entity_id") try: - entity_prefs = await prefs.async_update(entity_id, **changes) + entity_prefs = await hass.data[DATA_CAMERA_PREFS].async_update( + entity_id, **changes + ) except HomeAssistantError as ex: _LOGGER.error("Error setting camera preferences: %s", ex) connection.send_error(msg["id"], "update_failed", str(ex)) @@ -986,6 +1102,46 @@ async def websocket_update_prefs( connection.send_result(msg["id"], entity_prefs) +class _TemplateCameraEntity: + """Class to warn when the `entity_id` template variable is accessed. + + Can be removed in HA Core 2025.6. + """ + + def __init__(self, camera: Camera, service: str) -> None: + """Initialize.""" + self._camera = camera + self._entity_id = camera.entity_id + self._hass = camera.hass + self._service = service + + def _report_issue(self) -> None: + """Create a repair issue.""" + ir.async_create_issue( + self._hass, + DOMAIN, + f"deprecated_filename_template_{self._entity_id}_{self._service}", + breaks_in_ha_version="2025.6.0", + is_fixable=True, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_filename_template", + translation_placeholders={ + "entity_id": self._entity_id, + "service": f"{DOMAIN}.{self._service}", + }, + ) + + def __getattr__(self, name: str) -> Any: + """Forward to the camera entity.""" + self._report_issue() + return getattr(self._camera, name) + + def __str__(self) -> str: + """Forward to the camera entity.""" + self._report_issue() + return str(self._camera) + + async def async_handle_snapshot_service( camera: Camera, service_call: ServiceCall ) -> None: @@ -993,7 +1149,9 @@ async def async_handle_snapshot_service( hass = camera.hass filename: Template = service_call.data[ATTR_FILENAME] - snapshot_file = filename.async_render(variables={ATTR_ENTITY_ID: camera}) + snapshot_file = filename.async_render( + variables={ATTR_ENTITY_ID: _TemplateCameraEntity(camera, SERVICE_SNAPSHOT)} + ) # check if we allow to access to that file if not hass.config.is_allowed_path(snapshot_file): @@ -1069,7 +1227,9 @@ async def async_handle_record_service( raise HomeAssistantError(f"{camera.entity_id} does not support record service") filename = service_call.data[CONF_FILENAME] - video_path = filename.async_render(variables={ATTR_ENTITY_ID: camera}) + video_path = filename.async_render( + variables={ATTR_ENTITY_ID: _TemplateCameraEntity(camera, SERVICE_RECORD)} + ) await stream.async_record( video_path, diff --git a/homeassistant/components/camera/const.py b/homeassistant/components/camera/const.py index ad863f374d1..65862e66dab 100644 --- a/homeassistant/components/camera/const.py +++ b/homeassistant/components/camera/const.py @@ -1,20 +1,22 @@ """Constants for Camera component.""" -from enum import StrEnum -from functools import partial -from typing import Final +from __future__ import annotations -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) +from enum import StrEnum +from typing import TYPE_CHECKING, Final + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import Camera + from .prefs import CameraPreferences DOMAIN: Final = "camera" +DATA_COMPONENT: HassKey[EntityComponent[Camera]] = HassKey(DOMAIN) -DATA_CAMERA_PREFS: Final = "camera_prefs" -DATA_RTSP_TO_WEB_RTC: Final = "rtsp_to_web_rtc" +DATA_CAMERA_PREFS: HassKey[CameraPreferences] = HassKey("camera_prefs") PREF_PRELOAD_STREAM: Final = "preload_stream" PREF_ORIENTATION: Final = "orientation" @@ -28,6 +30,14 @@ CAMERA_STREAM_SOURCE_TIMEOUT: Final = 10 CAMERA_IMAGE_TIMEOUT: Final = 10 +class CameraState(StrEnum): + """Camera entity states.""" + + RECORDING = "recording" + STREAMING = "streaming" + IDLE = "idle" + + class StreamType(StrEnum): """Camera stream type. @@ -41,17 +51,3 @@ class StreamType(StrEnum): HLS = "hls" WEB_RTC = "web_rtc" - - -# These constants are deprecated as of Home Assistant 2022.5 -# Please use the StreamType enum instead. -_DEPRECATED_STREAM_TYPE_HLS = DeprecatedConstantEnum(StreamType.HLS, "2025.1") -_DEPRECATED_STREAM_TYPE_WEB_RTC = DeprecatedConstantEnum(StreamType.WEB_RTC, "2025.1") - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/camera/diagnostics.py b/homeassistant/components/camera/diagnostics.py index 1edda5079b4..3408ab3a0af 100644 --- a/homeassistant/components/camera/diagnostics.py +++ b/homeassistant/components/camera/diagnostics.py @@ -7,8 +7,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import _get_camera_from_entity_id from .const import DOMAIN +from .helper import get_camera_from_entity_id async def async_get_config_entry_diagnostics( @@ -22,7 +22,7 @@ async def async_get_config_entry_diagnostics( if entity.domain != DOMAIN: continue try: - camera = _get_camera_from_entity_id(hass, entity.entity_id) + camera = get_camera_from_entity_id(hass, entity.entity_id) except HomeAssistantError: continue diagnostics[entity.entity_id] = ( diff --git a/homeassistant/components/camera/helper.py b/homeassistant/components/camera/helper.py new file mode 100644 index 00000000000..5e84b18dda8 --- /dev/null +++ b/homeassistant/components/camera/helper.py @@ -0,0 +1,28 @@ +"""Camera helper functions.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .const import DATA_COMPONENT + +if TYPE_CHECKING: + from . import Camera + + +def get_camera_from_entity_id(hass: HomeAssistant, entity_id: str) -> Camera: + """Get camera component from entity_id.""" + component = hass.data.get(DATA_COMPONENT) + if component is None: + raise HomeAssistantError("Camera integration not set up") + + if (camera := component.get_entity(entity_id)) is None: + raise HomeAssistantError("Camera not found") + + if not camera.is_on: + raise HomeAssistantError("Camera is off") + + return camera diff --git a/homeassistant/components/camera/icons.json b/homeassistant/components/camera/icons.json index 37e71c80a67..982074cd553 100644 --- a/homeassistant/components/camera/icons.json +++ b/homeassistant/components/camera/icons.json @@ -8,12 +8,26 @@ } }, "services": { - "disable_motion_detection": "mdi:motion-sensor-off", - "enable_motion_detection": "mdi:motion-sensor", - "play_stream": "mdi:play", - "record": "mdi:record-rec", - "snapshot": "mdi:camera", - "turn_off": "mdi:video-off", - "turn_on": "mdi:video" + "disable_motion_detection": { + "service": "mdi:motion-sensor-off" + }, + "enable_motion_detection": { + "service": "mdi:motion-sensor" + }, + "play_stream": { + "service": "mdi:play" + }, + "record": { + "service": "mdi:record-rec" + }, + "snapshot": { + "service": "mdi:camera" + }, + "turn_off": { + "service": "mdi:video-off" + }, + "turn_on": { + "service": "mdi:video" + } } } diff --git a/homeassistant/components/camera/manifest.json b/homeassistant/components/camera/manifest.json index b1df158a260..9c56d97f910 100644 --- a/homeassistant/components/camera/manifest.json +++ b/homeassistant/components/camera/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/camera", "integration_type": "entity", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.1"] + "requirements": ["PyTurboJPEG==1.7.5"] } diff --git a/homeassistant/components/camera/media_source.py b/homeassistant/components/camera/media_source.py index 4bb6ed5f921..701457afc3e 100644 --- a/homeassistant/components/camera/media_source.py +++ b/homeassistant/components/camera/media_source.py @@ -5,21 +5,20 @@ from __future__ import annotations import asyncio from homeassistant.components.media_player import BrowseError, MediaClass -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER from homeassistant.const import ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.entity_component import EntityComponent from . import Camera, _async_stream_endpoint_url -from .const import DOMAIN, StreamType +from .const import DATA_COMPONENT, DOMAIN, StreamType async def async_get_media_source(hass: HomeAssistant) -> CameraMediaSource: @@ -59,13 +58,13 @@ class CameraMediaSource(MediaSource): async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" - component: EntityComponent[Camera] = self.hass.data[DOMAIN] + component = self.hass.data[DATA_COMPONENT] camera = component.get_entity(item.identifier) if not camera: raise Unresolvable(f"Could not resolve media item: {item.identifier}") - if (stream_type := camera.frontend_stream_type) is None: + if not (stream_types := camera.camera_capabilities.frontend_stream_types): return PlayMedia( f"/api/camera_proxy_stream/{camera.entity_id}", camera.content_type ) @@ -77,7 +76,7 @@ class CameraMediaSource(MediaSource): url = await _async_stream_endpoint_url(self.hass, camera, HLS_PROVIDER) except HomeAssistantError as err: # Handle known error - if stream_type != StreamType.HLS: + if StreamType.HLS not in stream_types: raise Unresolvable( "Camera does not support MJPEG or HLS streaming." ) from err @@ -96,19 +95,21 @@ class CameraMediaSource(MediaSource): can_stream_hls = "stream" in self.hass.config.components async def _filter_browsable_camera(camera: Camera) -> BrowseMediaSource | None: - stream_type = camera.frontend_stream_type - if stream_type is None: + stream_types = camera.camera_capabilities.frontend_stream_types + if not stream_types: return _media_source_for_camera(self.hass, camera, camera.content_type) if not can_stream_hls: return None content_type = FORMAT_CONTENT_TYPE[HLS_PROVIDER] - if stream_type != StreamType.HLS and not (await camera.stream_source()): + if StreamType.HLS not in stream_types and not ( + await camera.stream_source() + ): return None return _media_source_for_camera(self.hass, camera, content_type) - component: EntityComponent[Camera] = self.hass.data[DOMAIN] + component = self.hass.data[DATA_COMPONENT] results = await asyncio.gather( *(_filter_browsable_camera(camera) for camera in component.entities), return_exceptions=True, diff --git a/homeassistant/components/camera/strings.json b/homeassistant/components/camera/strings.json index 90b053ec087..4a7e9aafc6e 100644 --- a/homeassistant/components/camera/strings.json +++ b/homeassistant/components/camera/strings.json @@ -35,6 +35,23 @@ } } }, + "issues": { + "deprecated_filename_template": { + "title": "Detected use of deprecated template variable", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::camera::issues::deprecated_filename_template::title%]", + "description": "The pre-defined template variable `entity_id` was used when performing action `{service}` targeting camera entity `{entity_id}`. The pre-defined template variable `entity_id` is being removed from the `filename` parameter of `{service}`.\n\nPlease update your automations and scripts to use a manually defined variable instead and select **Submit** to close this issue." + } + } + } + }, + "legacy_webrtc_provider": { + "title": "Detected use of legacy WebRTC provider registered by {legacy_integration}", + "description": "The {legacy_integration} integration has registered a legacy WebRTC provider. Home Assistant prefers using the built-in modern WebRTC provider registered by the {builtin_integration} integration.\n\nBenefits of the built-in integration are:\n\n- The camera stream is started faster.\n- More camera devices are supported.\n\nTo fix this issue, you can either keep using the built-in modern WebRTC provider and remove the {legacy_integration} integration or remove the {builtin_integration} integration to use the legacy provider, and then restart Home Assistant." + } + }, "services": { "turn_off": { "name": "[%key:common::action::turn_off%]", @@ -58,7 +75,7 @@ "fields": { "filename": { "name": "Filename", - "description": "Template of a filename. Variable available is `entity_id`." + "description": "Full path to filename." } } }, @@ -82,7 +99,7 @@ "fields": { "filename": { "name": "[%key:component::camera::services::snapshot::fields::filename::name%]", - "description": "Template of a filename. Variable available is `entity_id`. Must be mp4." + "description": "Full path to filename. Must be mp4." }, "duration": { "name": "Duration", diff --git a/homeassistant/components/camera/webrtc.py b/homeassistant/components/camera/webrtc.py new file mode 100644 index 00000000000..3630acf1cfe --- /dev/null +++ b/homeassistant/components/camera/webrtc.py @@ -0,0 +1,504 @@ +"""Helper for WebRTC support.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +import asyncio +from collections.abc import Awaitable, Callable, Iterable +from dataclasses import asdict, dataclass, field +from functools import cache, partial, wraps +import logging +from typing import TYPE_CHECKING, Any, Protocol + +from mashumaro import MissingField +import voluptuous as vol +from webrtc_models import ( + RTCConfiguration, + RTCIceCandidate, + RTCIceCandidateInit, + RTCIceServer, +) + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv, issue_registry as ir +from homeassistant.helpers.deprecation import deprecated_function +from homeassistant.util.hass_dict import HassKey +from homeassistant.util.ulid import ulid + +from .const import DATA_COMPONENT, DOMAIN, StreamType +from .helper import get_camera_from_entity_id + +if TYPE_CHECKING: + from . import Camera + +_LOGGER = logging.getLogger(__name__) + + +DATA_WEBRTC_PROVIDERS: HassKey[set[CameraWebRTCProvider]] = HassKey( + "camera_webrtc_providers" +) +DATA_WEBRTC_LEGACY_PROVIDERS: HassKey[dict[str, CameraWebRTCLegacyProvider]] = HassKey( + "camera_webrtc_legacy_providers" +) +DATA_ICE_SERVERS: HassKey[list[Callable[[], Iterable[RTCIceServer]]]] = HassKey( + "camera_webrtc_ice_servers" +) + + +_WEBRTC = "WebRTC" + + +@dataclass(frozen=True) +class WebRTCMessage: + """Base class for WebRTC messages.""" + + @classmethod + @cache + def _get_type(cls) -> str: + _, _, name = cls.__name__.partition(_WEBRTC) + return name.lower() + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the message.""" + data = asdict(self) + data["type"] = self._get_type() + return data + + +@dataclass(frozen=True) +class WebRTCSession(WebRTCMessage): + """WebRTC session.""" + + session_id: str + + +@dataclass(frozen=True) +class WebRTCAnswer(WebRTCMessage): + """WebRTC answer.""" + + answer: str + + +@dataclass(frozen=True) +class WebRTCCandidate(WebRTCMessage): + """WebRTC candidate.""" + + candidate: RTCIceCandidate | RTCIceCandidateInit + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the message.""" + return { + "type": self._get_type(), + "candidate": self.candidate.to_dict(), + } + + +@dataclass(frozen=True) +class WebRTCError(WebRTCMessage): + """WebRTC error.""" + + code: str + message: str + + +type WebRTCSendMessage = Callable[[WebRTCMessage], None] + + +@dataclass(kw_only=True) +class WebRTCClientConfiguration: + """WebRTC configuration for the client. + + Not part of the spec, but required to configure client. + """ + + configuration: RTCConfiguration = field(default_factory=RTCConfiguration) + data_channel: str | None = None + get_candidates_upfront: bool = False + + def to_frontend_dict(self) -> dict[str, Any]: + """Return a dict that can be used by the frontend.""" + data: dict[str, Any] = { + "configuration": self.configuration.to_dict(), + "getCandidatesUpfront": self.get_candidates_upfront, + } + if self.data_channel is not None: + data["dataChannel"] = self.data_channel + return data + + +class CameraWebRTCProvider(ABC): + """WebRTC provider.""" + + @property + @abstractmethod + def domain(self) -> str: + """Return the integration domain of the provider.""" + + @callback + @abstractmethod + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + + @abstractmethod + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + + @abstractmethod + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle the WebRTC candidate.""" + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" + return ## This is an optional method so we need a default here. + + +class CameraWebRTCLegacyProvider(Protocol): + """WebRTC provider.""" + + async def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + + async def async_handle_web_rtc_offer( + self, camera: Camera, offer_sdp: str + ) -> str | None: + """Handle the WebRTC offer and return an answer.""" + + +@callback +def async_register_webrtc_provider( + hass: HomeAssistant, + provider: CameraWebRTCProvider, +) -> Callable[[], None]: + """Register a WebRTC provider. + + The first provider to satisfy the offer will be used. + """ + if DOMAIN not in hass.data: + raise ValueError("Unexpected state, camera not loaded") + + providers = hass.data.setdefault(DATA_WEBRTC_PROVIDERS, set()) + + @callback + def remove_provider() -> None: + providers.remove(provider) + hass.async_create_task(_async_refresh_providers(hass)) + + if provider in providers: + raise ValueError("Provider already registered") + + providers.add(provider) + hass.async_create_task(_async_refresh_providers(hass)) + return remove_provider + + +async def _async_refresh_providers(hass: HomeAssistant) -> None: + """Check all cameras for any state changes for registered providers.""" + _async_check_conflicting_legacy_provider(hass) + + component = hass.data[DATA_COMPONENT] + await asyncio.gather( + *(camera.async_refresh_providers() for camera in component.entities) + ) + + +type WsCommandWithCamera = Callable[ + [websocket_api.ActiveConnection, dict[str, Any], Camera], + Awaitable[None], +] + + +def require_webrtc_support( + error_code: str, +) -> Callable[[WsCommandWithCamera], websocket_api.AsyncWebSocketCommandHandler]: + """Validate that the camera supports WebRTC.""" + + def decorate( + func: WsCommandWithCamera, + ) -> websocket_api.AsyncWebSocketCommandHandler: + """Decorate func.""" + + @wraps(func) + async def validate( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Validate that the camera supports WebRTC.""" + entity_id = msg["entity_id"] + camera = get_camera_from_entity_id(hass, entity_id) + if StreamType.WEB_RTC not in ( + stream_types := camera.camera_capabilities.frontend_stream_types + ): + connection.send_error( + msg["id"], + error_code, + ( + "Camera does not support WebRTC," + f" frontend_stream_types={stream_types}" + ), + ) + return + + await func(connection, msg, camera) + + return validate + + return decorate + + +@websocket_api.websocket_command( + { + vol.Required("type"): "camera/webrtc/offer", + vol.Required("entity_id"): cv.entity_id, + vol.Required("offer"): str, + } +) +@websocket_api.async_response +@require_webrtc_support("webrtc_offer_failed") +async def ws_webrtc_offer( + connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera +) -> None: + """Handle the signal path for a WebRTC stream. + + This signal path is used to route the offer created by the client to the + camera device through the integration for negotiation on initial setup. + The ws endpoint returns a subscription id, where ice candidates and the + final answer will be returned. + The actual streaming is handled entirely between the client and camera device. + + Async friendly. + """ + offer = msg["offer"] + session_id = ulid() + connection.subscriptions[msg["id"]] = partial( + camera.close_webrtc_session, session_id + ) + + connection.send_message(websocket_api.result_message(msg["id"])) + + @callback + def send_message(message: WebRTCMessage) -> None: + """Push a value to websocket.""" + connection.send_message( + websocket_api.event_message( + msg["id"], + message.as_dict(), + ) + ) + + send_message(WebRTCSession(session_id)) + + try: + await camera.async_handle_async_webrtc_offer(offer, session_id, send_message) + except HomeAssistantError as ex: + _LOGGER.error("Error handling WebRTC offer: %s", ex) + send_message( + WebRTCError( + "webrtc_offer_failed", + str(ex), + ) + ) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "camera/webrtc/get_client_config", + vol.Required("entity_id"): cv.entity_id, + } +) +@websocket_api.async_response +@require_webrtc_support("webrtc_get_client_config_failed") +async def ws_get_client_config( + connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera +) -> None: + """Handle get WebRTC client config websocket command.""" + config = camera.async_get_webrtc_client_configuration().to_frontend_dict() + connection.send_result( + msg["id"], + config, + ) + + +def _parse_webrtc_candidate_init(value: Any) -> RTCIceCandidateInit: + """Validate and parse a WebRTCCandidateInit dict.""" + try: + return RTCIceCandidateInit.from_dict(value) + except (MissingField, ValueError) as ex: + raise vol.Invalid(str(ex)) from ex + + +@websocket_api.websocket_command( + { + vol.Required("type"): "camera/webrtc/candidate", + vol.Required("entity_id"): cv.entity_id, + vol.Required("session_id"): str, + vol.Required("candidate"): _parse_webrtc_candidate_init, + } +) +@websocket_api.async_response +@require_webrtc_support("webrtc_candidate_failed") +async def ws_candidate( + connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera +) -> None: + """Handle WebRTC candidate websocket command.""" + await camera.async_on_webrtc_candidate(msg["session_id"], msg["candidate"]) + connection.send_message(websocket_api.result_message(msg["id"])) + + +@callback +def async_register_ws(hass: HomeAssistant) -> None: + """Register camera webrtc ws endpoints.""" + + websocket_api.async_register_command(hass, ws_webrtc_offer) + websocket_api.async_register_command(hass, ws_get_client_config) + websocket_api.async_register_command(hass, ws_candidate) + + +async def async_get_supported_provider( + hass: HomeAssistant, camera: Camera +) -> CameraWebRTCProvider | None: + """Return the first supported provider for the camera.""" + providers = hass.data.get(DATA_WEBRTC_PROVIDERS) + if not providers or not (stream_source := await camera.stream_source()): + return None + + for provider in providers: + if provider.async_is_supported(stream_source): + return provider + + return None + + +async def async_get_supported_legacy_provider( + hass: HomeAssistant, camera: Camera +) -> CameraWebRTCLegacyProvider | None: + """Return the first supported provider for the camera.""" + providers = hass.data.get(DATA_WEBRTC_LEGACY_PROVIDERS) + if not providers or not (stream_source := await camera.stream_source()): + return None + + for provider in providers.values(): + if await provider.async_is_supported(stream_source): + return provider + + return None + + +@callback +def async_register_ice_servers( + hass: HomeAssistant, + get_ice_server_fn: Callable[[], Iterable[RTCIceServer]], +) -> Callable[[], None]: + """Register a ICE server. + + The registering integration is responsible to implement caching if needed. + """ + servers = hass.data.setdefault(DATA_ICE_SERVERS, []) + + def remove() -> None: + servers.remove(get_ice_server_fn) + + servers.append(get_ice_server_fn) + return remove + + +# The following code is legacy code that was introduced with rtsp_to_webrtc and will be deprecated/removed in the future. +# Left it so custom integrations can still use it. + +_RTSP_PREFIXES = {"rtsp://", "rtsps://", "rtmp://"} + +# An RtspToWebRtcProvider accepts these inputs: +# stream_source: The RTSP url +# offer_sdp: The WebRTC SDP offer +# stream_id: A unique id for the stream, used to update an existing source +# The output is the SDP answer, or None if the source or offer is not eligible. +# The Callable may throw HomeAssistantError on failure. +type RtspToWebRtcProviderType = Callable[[str, str, str], Awaitable[str | None]] + + +class _CameraRtspToWebRTCProvider(CameraWebRTCLegacyProvider): + def __init__(self, fn: RtspToWebRtcProviderType) -> None: + """Initialize the RTSP to WebRTC provider.""" + self._fn = fn + + async def async_is_supported(self, stream_source: str) -> bool: + """Return if this provider is supports the Camera as source.""" + return any(stream_source.startswith(prefix) for prefix in _RTSP_PREFIXES) + + async def async_handle_web_rtc_offer( + self, camera: Camera, offer_sdp: str + ) -> str | None: + """Handle the WebRTC offer and return an answer.""" + if not (stream_source := await camera.stream_source()): + return None + + return await self._fn(stream_source, offer_sdp, camera.entity_id) + + +@deprecated_function("async_register_webrtc_provider", breaks_in_ha_version="2025.6") +def async_register_rtsp_to_web_rtc_provider( + hass: HomeAssistant, + domain: str, + provider: RtspToWebRtcProviderType, +) -> Callable[[], None]: + """Register an RTSP to WebRTC provider. + + The first provider to satisfy the offer will be used. + """ + if DOMAIN not in hass.data: + raise ValueError("Unexpected state, camera not loaded") + + legacy_providers = hass.data.setdefault(DATA_WEBRTC_LEGACY_PROVIDERS, {}) + + if domain in legacy_providers: + raise ValueError("Provider already registered") + + provider_instance = _CameraRtspToWebRTCProvider(provider) + + @callback + def remove_provider() -> None: + legacy_providers.pop(domain) + hass.async_create_task(_async_refresh_providers(hass)) + + legacy_providers[domain] = provider_instance + hass.async_create_task(_async_refresh_providers(hass)) + + return remove_provider + + +@callback +def _async_check_conflicting_legacy_provider(hass: HomeAssistant) -> None: + """Check if a legacy provider is registered together with the builtin provider.""" + builtin_provider_domain = "go2rtc" + if ( + (legacy_providers := hass.data.get(DATA_WEBRTC_LEGACY_PROVIDERS)) + and (providers := hass.data.get(DATA_WEBRTC_PROVIDERS)) + and any(provider.domain == builtin_provider_domain for provider in providers) + ): + for domain in legacy_providers: + ir.async_create_issue( + hass, + DOMAIN, + f"legacy_webrtc_provider_{domain}", + is_fixable=False, + is_persistent=False, + issue_domain=domain, + learn_more_url="https://www.home-assistant.io/integrations/go2rtc/", + severity=ir.IssueSeverity.WARNING, + translation_key="legacy_webrtc_provider", + translation_placeholders={ + "legacy_integration": domain, + "builtin_integration": builtin_provider_domain, + }, + ) diff --git a/homeassistant/components/canary/alarm_control_panel.py b/homeassistant/components/canary/alarm_control_panel.py index a7d5dc8ab98..69600e4bbc7 100644 --- a/homeassistant/components/canary/alarm_control_panel.py +++ b/homeassistant/components/canary/alarm_control_panel.py @@ -10,14 +10,9 @@ from canary.model import Location from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -70,18 +65,18 @@ class CanaryAlarm( return self.coordinator.data["locations"][self._location_id] @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self.location.is_private: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED mode = self.location.mode if mode.name == LOCATION_MODE_AWAY: - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY if mode.name == LOCATION_MODE_HOME: - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_HOME if mode.name == LOCATION_MODE_NIGHT: - return STATE_ALARM_ARMED_NIGHT + return AlarmControlPanelState.ARMED_NIGHT return None diff --git a/homeassistant/components/canary/config_flow.py b/homeassistant/components/canary/config_flow.py index 6ae7632a7e2..17e660e96ac 100644 --- a/homeassistant/components/canary/config_flow.py +++ b/homeassistant/components/canary/config_flow.py @@ -52,21 +52,16 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return CanaryOptionsFlowHandler(config_entry) + return CanaryOptionsFlowHandler() - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} default_username = "" @@ -106,10 +101,6 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): class CanaryOptionsFlowHandler(OptionsFlow): """Handle Canary client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/canary/manifest.json b/homeassistant/components/canary/manifest.json index 4d5adf4a32b..9383bc91556 100644 --- a/homeassistant/components/canary/manifest.json +++ b/homeassistant/components/canary/manifest.json @@ -7,5 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/canary", "iot_class": "cloud_polling", "loggers": ["canary"], - "requirements": ["py-canary==0.5.4"] + "requirements": ["py-canary==0.5.4"], + "single_config_entry": true } diff --git a/homeassistant/components/canary/strings.json b/homeassistant/components/canary/strings.json index 9555756deff..699e8b25e11 100644 --- a/homeassistant/components/canary/strings.json +++ b/homeassistant/components/canary/strings.json @@ -14,7 +14,6 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "unknown": "[%key:common::config_flow::error::unknown%]" } }, diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 22351f5d2f7..03a3f2ea1f8 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -41,29 +41,25 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> CastOptionsFlowHandler: """Get the options flow for this handler.""" - return CastOptionsFlowHandler(config_entry) + return CastOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return await self.async_step_config() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by zeroconf discovery.""" - if self._async_in_progress() or self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - await self.async_set_unique_id(DOMAIN) return await self.async_step_confirm() - async def async_step_config(self, user_input=None): + async def async_step_config( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the setup.""" errors = {} data = {CONF_KNOWN_HOSTS: self._known_hosts} @@ -90,7 +86,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="config", data_schema=vol.Schema(fields), errors=errors ) - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the setup.""" data = self._get_data() @@ -111,18 +109,19 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): class CastOptionsFlowHandler(OptionsFlow): """Handle Google Cast options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Google Cast options flow.""" - self.config_entry = config_entry self.updated_config: dict[str, Any] = {} - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the Google Cast options.""" return await self.async_step_basic_options() - async def async_step_basic_options(self, user_input=None): + async def async_step_basic_options( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the Google Cast options.""" - errors = {} + errors: dict[str, str] = {} current_config = self.config_entry.data if user_input is not None: bad_hosts, known_hosts = _string_to_list( @@ -139,9 +138,9 @@ class CastOptionsFlowHandler(OptionsFlow): self.hass.config_entries.async_update_entry( self.config_entry, data=self.updated_config ) - return self.async_create_entry(title="", data=None) + return self.async_create_entry(title="", data={}) - fields = {} + fields: dict[vol.Marker, type[str]] = {} suggested_value = _list_to_string(current_config.get(CONF_KNOWN_HOSTS)) _add_with_suggestion(fields, CONF_KNOWN_HOSTS, suggested_value) @@ -152,9 +151,11 @@ class CastOptionsFlowHandler(OptionsFlow): last_step=not self.show_advanced_options, ) - async def async_step_advanced_options(self, user_input=None): + async def async_step_advanced_options( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the Google Cast options.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: bad_cec, ignore_cec = _string_to_list( user_input.get(CONF_IGNORE_CEC, ""), IGNORE_CEC_SCHEMA @@ -169,9 +170,9 @@ class CastOptionsFlowHandler(OptionsFlow): self.hass.config_entries.async_update_entry( self.config_entry, data=self.updated_config ) - return self.async_create_entry(title="", data=None) + return self.async_create_entry(title="", data={}) - fields = {} + fields: dict[vol.Marker, type[str]] = {} current_config = self.config_entry.data suggested_value = _list_to_string(current_config.get(CONF_UUID)) _add_with_suggestion(fields, CONF_UUID, suggested_value) @@ -204,5 +205,7 @@ def _string_to_list(string, schema): return invalid, items -def _add_with_suggestion(fields, key, suggested_value): +def _add_with_suggestion( + fields: dict[vol.Marker, type[str]], key: str, suggested_value: str +) -> None: fields[vol.Optional(key, description={"suggested_value": suggested_value})] = str diff --git a/homeassistant/components/cast/helpers.py b/homeassistant/components/cast/helpers.py index 865ea1ac3f6..228c69b65ec 100644 --- a/homeassistant/components/cast/helpers.py +++ b/homeassistant/components/cast/helpers.py @@ -80,7 +80,7 @@ class ChromecastInfo: "+label%3A%22integration%3A+cast%22" ) - _LOGGER.info( + _LOGGER.debug( ( "Fetched cast details for unknown model '%s' manufacturer:" " '%s', type: '%s'. Please %s" diff --git a/homeassistant/components/cast/icons.json b/homeassistant/components/cast/icons.json index e19ea0b07b2..a43411eaad3 100644 --- a/homeassistant/components/cast/icons.json +++ b/homeassistant/components/cast/icons.json @@ -1,5 +1,7 @@ { "services": { - "show_lovelace_view": "mdi:view-dashboard" + "show_lovelace_view": { + "service": "mdi:view-dashboard" + } } } diff --git a/homeassistant/components/cast/manifest.json b/homeassistant/components/cast/manifest.json index 1d06ae23ca2..0650f267544 100644 --- a/homeassistant/components/cast/manifest.json +++ b/homeassistant/components/cast/manifest.json @@ -14,6 +14,7 @@ "documentation": "https://www.home-assistant.io/integrations/cast", "iot_class": "local_polling", "loggers": ["casttube", "pychromecast"], - "requirements": ["PyChromecast==14.0.1"], + "requirements": ["PyChromecast==14.0.5"], + "single_config_entry": true, "zeroconf": ["_googlecast._tcp.local."] } diff --git a/homeassistant/components/cast/media_player.py b/homeassistant/components/cast/media_player.py index 028a01e6f22..28db97a857d 100644 --- a/homeassistant/components/cast/media_player.py +++ b/homeassistant/components/cast/media_player.py @@ -693,7 +693,7 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity): # an arbitrary cast app, generally for UX. if "app_id" in app_data: app_id = app_data.pop("app_id") - _LOGGER.info("Starting Cast app by ID %s", app_id) + _LOGGER.debug("Starting Cast app by ID %s", app_id) await self.hass.async_add_executor_job(self._start_app, app_id) if app_data: _LOGGER.warning( diff --git a/homeassistant/components/cast/strings.json b/homeassistant/components/cast/strings.json index ce622e48aae..9c49813bd83 100644 --- a/homeassistant/components/cast/strings.json +++ b/homeassistant/components/cast/strings.json @@ -12,9 +12,6 @@ } } }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - }, "error": { "invalid_known_hosts": "Known hosts must be a comma separated list of hosts." } @@ -56,7 +53,7 @@ }, "view_path": { "name": "View path", - "description": "The path of the dashboard view to show." + "description": "The URL path of the dashboard view to show." } } } diff --git a/homeassistant/components/ccm15/climate.py b/homeassistant/components/ccm15/climate.py index a6e5d2cab61..3db8c3e1016 100644 --- a/homeassistant/components/ccm15/climate.py +++ b/homeassistant/components/ccm15/climate.py @@ -70,7 +70,6 @@ class CCM15Climate(CoordinatorEntity[CCM15Coordinator], ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, ac_host: str, ac_index: int, coordinator: CCM15Coordinator diff --git a/homeassistant/components/cert_expiry/config_flow.py b/homeassistant/components/cert_expiry/config_flow.py index 8f937ef61ea..c351435a73e 100644 --- a/homeassistant/components/cert_expiry/config_flow.py +++ b/homeassistant/components/cert_expiry/config_flow.py @@ -74,7 +74,7 @@ class CertexpiryConfigFlow(ConfigFlow, domain=DOMAIN): title=title, data={CONF_HOST: host, CONF_PORT: port}, ) - if self.context["source"] == SOURCE_IMPORT: + if self.source == SOURCE_IMPORT: _LOGGER.error("Config import failed for %s", user_input[CONF_HOST]) return self.async_abort(reason="import_failed") else: @@ -94,13 +94,3 @@ class CertexpiryConfigFlow(ConfigFlow, domain=DOMAIN): ), errors=self._errors, ) - - async def async_step_import( - self, - user_input: Mapping[str, Any] | None = None, - ) -> ConfigFlowResult: - """Import a config entry. - - Only host was required in the yaml file all other fields are optional - """ - return await self.async_step_user(user_input) diff --git a/homeassistant/components/cert_expiry/entity.py b/homeassistant/components/cert_expiry/entity.py new file mode 100644 index 00000000000..f412f16fba8 --- /dev/null +++ b/homeassistant/components/cert_expiry/entity.py @@ -0,0 +1,23 @@ +"""Counter for the days until an HTTPS (TLS) certificate will expire.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import CertExpiryDataUpdateCoordinator + + +class CertExpiryEntity(CoordinatorEntity[CertExpiryDataUpdateCoordinator]): + """Defines a base Cert Expiry entity.""" + + _attr_has_entity_name = True + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return additional sensor state attributes.""" + return { + "is_valid": self.coordinator.is_cert_valid, + "error": str(self.coordinator.cert_error), + } diff --git a/homeassistant/components/cert_expiry/sensor.py b/homeassistant/components/cert_expiry/sensor.py index f52ff8a40d8..4fd0846f0f3 100644 --- a/homeassistant/components/cert_expiry/sensor.py +++ b/homeassistant/components/cert_expiry/sensor.py @@ -2,62 +2,17 @@ from __future__ import annotations -from datetime import datetime, timedelta -from typing import Any +from datetime import datetime -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorDeviceClass, - SensorEntity, -) -from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_START -from homeassistant.core import Event, HomeAssistant, callback -import homeassistant.helpers.config_validation as cv +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CertExpiryConfigEntry, CertExpiryDataUpdateCoordinator -from .const import DEFAULT_PORT, DOMAIN - -SCAN_INTERVAL = timedelta(hours=12) - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up certificate expiry sensor.""" - - @callback - def schedule_import(_: Event) -> None: - """Schedule delayed import after HA is fully started.""" - async_call_later(hass, 10, do_import) - - @callback - def do_import(_: datetime) -> None: - """Process YAML import.""" - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=dict(config) - ) - ) - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_import) +from . import CertExpiryConfigEntry +from .const import DOMAIN +from .coordinator import CertExpiryDataUpdateCoordinator +from .entity import CertExpiryEntity async def async_setup_entry( @@ -73,20 +28,6 @@ async def async_setup_entry( async_add_entities(sensors, True) -class CertExpiryEntity(CoordinatorEntity[CertExpiryDataUpdateCoordinator]): - """Defines a base Cert Expiry entity.""" - - _attr_has_entity_name = True - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return additional sensor state attributes.""" - return { - "is_valid": self.coordinator.is_cert_valid, - "error": str(self.coordinator.cert_error), - } - - class SSLCertificateTimestamp(CertExpiryEntity, SensorEntity): """Implementation of the Cert Expiry timestamp sensor.""" diff --git a/homeassistant/components/channels/icons.json b/homeassistant/components/channels/icons.json index cbbda1ef623..ad5504a5422 100644 --- a/homeassistant/components/channels/icons.json +++ b/homeassistant/components/channels/icons.json @@ -1,7 +1,13 @@ { "services": { - "seek_forward": "mdi:skip-forward", - "seek_backward": "mdi:skip-backward", - "seek_by": "mdi:timer-check-outline" + "seek_forward": { + "service": "mdi:skip-forward" + }, + "seek_backward": { + "service": "mdi:skip-backward" + }, + "seek_by": { + "service": "mdi:timer-check-outline" + } } } diff --git a/homeassistant/components/channels/manifest.json b/homeassistant/components/channels/manifest.json index 0455ca2e8ad..9476e006eda 100644 --- a/homeassistant/components/channels/manifest.json +++ b/homeassistant/components/channels/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/channels", "iot_class": "local_polling", "loggers": ["pychannels"], + "quality_scale": "legacy", "requirements": ["pychannels==1.2.3"] } diff --git a/homeassistant/components/cisco_ios/device_tracker.py b/homeassistant/components/cisco_ios/device_tracker.py index 485a825b51f..1f78f95c259 100644 --- a/homeassistant/components/cisco_ios/device_tracker.py +++ b/homeassistant/components/cisco_ios/device_tracker.py @@ -9,7 +9,7 @@ from pexpect import pxssh import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -34,7 +34,7 @@ PLATFORM_SCHEMA = vol.All( def get_scanner(hass: HomeAssistant, config: ConfigType) -> CiscoDeviceScanner | None: """Validate the configuration and return a Cisco scanner.""" - scanner = CiscoDeviceScanner(config[DOMAIN]) + scanner = CiscoDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -52,7 +52,6 @@ class CiscoDeviceScanner(DeviceScanner): self.last_results = {} self.success_init = self._update_info() - _LOGGER.info("Initialized cisco_ios scanner") async def async_get_device_name(self, device: str) -> str | None: """Get the firmware doesn't save the name of the wireless device.""" diff --git a/homeassistant/components/cisco_ios/manifest.json b/homeassistant/components/cisco_ios/manifest.json index dd0d4213973..ba0678c167f 100644 --- a/homeassistant/components/cisco_ios/manifest.json +++ b/homeassistant/components/cisco_ios/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cisco_ios", "iot_class": "local_polling", "loggers": ["pexpect", "ptyprocess"], + "quality_scale": "legacy", "requirements": ["pexpect==4.6.0"] } diff --git a/homeassistant/components/cisco_mobility_express/device_tracker.py b/homeassistant/components/cisco_mobility_express/device_tracker.py index 38d2c78c66a..2c7398ae172 100644 --- a/homeassistant/components/cisco_mobility_express/device_tracker.py +++ b/homeassistant/components/cisco_mobility_express/device_tracker.py @@ -8,7 +8,7 @@ from ciscomobilityexpress.ciscome import CiscoMobilityExpress import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -42,7 +42,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> CiscoMEDeviceScanner | None: """Validate the configuration and return a Cisco ME scanner.""" - config = config[DOMAIN] + config = config[DEVICE_TRACKER_DOMAIN] controller = CiscoMobilityExpress( config[CONF_HOST], diff --git a/homeassistant/components/cisco_mobility_express/manifest.json b/homeassistant/components/cisco_mobility_express/manifest.json index 02786e80cd8..f9ee1c92ed1 100644 --- a/homeassistant/components/cisco_mobility_express/manifest.json +++ b/homeassistant/components/cisco_mobility_express/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cisco_mobility_express", "iot_class": "local_polling", "loggers": ["ciscomobilityexpress"], + "quality_scale": "legacy", "requirements": ["ciscomobilityexpress==0.3.9"] } diff --git a/homeassistant/components/cisco_webex_teams/__init__.py b/homeassistant/components/cisco_webex_teams/__init__.py index 0a8714806a1..5932f2ed680 100644 --- a/homeassistant/components/cisco_webex_teams/__init__.py +++ b/homeassistant/components/cisco_webex_teams/__init__.py @@ -1 +1 @@ -"""Component to integrate the Cisco Webex Teams cloud.""" +"""Component to integrate the Cisco Webex cloud.""" diff --git a/homeassistant/components/cisco_webex_teams/manifest.json b/homeassistant/components/cisco_webex_teams/manifest.json index 822919213c2..85cfeb7eddf 100644 --- a/homeassistant/components/cisco_webex_teams/manifest.json +++ b/homeassistant/components/cisco_webex_teams/manifest.json @@ -2,9 +2,9 @@ "domain": "cisco_webex_teams", "name": "Cisco Webex Teams", "codeowners": ["@fbradyirl"], - "disabled": "Integration library not compatible with Python 3.12", "documentation": "https://www.home-assistant.io/integrations/cisco_webex_teams", "iot_class": "cloud_push", - "loggers": ["webexteamssdk"], - "requirements": ["webexteamssdk==1.1.1;python_version<'3.12'"] + "loggers": ["webexpythonsdk"], + "quality_scale": "legacy", + "requirements": ["webexpythonsdk==2.0.1"] } diff --git a/homeassistant/components/cisco_webex_teams/notify.py b/homeassistant/components/cisco_webex_teams/notify.py index b93ebb273dd..74d033c62d4 100644 --- a/homeassistant/components/cisco_webex_teams/notify.py +++ b/homeassistant/components/cisco_webex_teams/notify.py @@ -1,11 +1,11 @@ -"""Cisco Webex Teams notify component.""" +"""Cisco Webex notify component.""" from __future__ import annotations import logging import voluptuous as vol -from webexteamssdk import ApiError, WebexTeamsAPI, exceptions +from webexpythonsdk import ApiError, WebexAPI, exceptions from homeassistant.components.notify import ( ATTR_TITLE, @@ -30,9 +30,9 @@ def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, -) -> CiscoWebexTeamsNotificationService | None: - """Get the CiscoWebexTeams notification service.""" - client = WebexTeamsAPI(access_token=config[CONF_TOKEN]) +) -> CiscoWebexNotificationService | None: + """Get the Cisco Webex notification service.""" + client = WebexAPI(access_token=config[CONF_TOKEN]) try: # Validate the token & room_id client.rooms.get(config[CONF_ROOM_ID]) @@ -40,11 +40,11 @@ def get_service( _LOGGER.error(error) return None - return CiscoWebexTeamsNotificationService(client, config[CONF_ROOM_ID]) + return CiscoWebexNotificationService(client, config[CONF_ROOM_ID]) -class CiscoWebexTeamsNotificationService(BaseNotificationService): - """The Cisco Webex Teams Notification Service.""" +class CiscoWebexNotificationService(BaseNotificationService): + """The Cisco Webex Notification Service.""" def __init__(self, client, room): """Initialize the service.""" @@ -62,5 +62,5 @@ class CiscoWebexTeamsNotificationService(BaseNotificationService): self.client.messages.create(roomId=self.room, html=f"{title}{message}") except ApiError as api_error: _LOGGER.error( - "Could not send CiscoWebexTeams notification. Error: %s", api_error + "Could not send Cisco Webex notification. Error: %s", api_error ) diff --git a/homeassistant/components/citybikes/manifest.json b/homeassistant/components/citybikes/manifest.json index e163b85ec08..8dac7def832 100644 --- a/homeassistant/components/citybikes/manifest.json +++ b/homeassistant/components/citybikes/manifest.json @@ -3,5 +3,6 @@ "name": "CityBikes", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/citybikes", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/clementine/manifest.json b/homeassistant/components/clementine/manifest.json index 88e7f35f49a..42fe81d0e9b 100644 --- a/homeassistant/components/clementine/manifest.json +++ b/homeassistant/components/clementine/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/clementine", "iot_class": "local_polling", "loggers": ["clementineremote"], + "quality_scale": "legacy", "requirements": ["python-clementine-remote==1.0.1"] } diff --git a/homeassistant/components/clickatell/manifest.json b/homeassistant/components/clickatell/manifest.json index 31456b25c64..3c5ee8b0053 100644 --- a/homeassistant/components/clickatell/manifest.json +++ b/homeassistant/components/clickatell/manifest.json @@ -3,5 +3,6 @@ "name": "Clickatell", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/clickatell", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/clicksend/manifest.json b/homeassistant/components/clicksend/manifest.json index 41bd10108f4..8a43428026b 100644 --- a/homeassistant/components/clicksend/manifest.json +++ b/homeassistant/components/clicksend/manifest.json @@ -3,5 +3,6 @@ "name": "ClickSend SMS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/clicksend", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/clicksend_tts/manifest.json b/homeassistant/components/clicksend_tts/manifest.json index ffa35fd070f..eb884e41203 100644 --- a/homeassistant/components/clicksend_tts/manifest.json +++ b/homeassistant/components/clicksend_tts/manifest.json @@ -3,5 +3,6 @@ "name": "ClickSend TTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/clicksend_tts", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index 6097e4f1346..ca85979f19a 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -2,13 +2,12 @@ from __future__ import annotations -import asyncio from datetime import timedelta import functools as ft -from functools import cached_property import logging from typing import Any, Literal, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -26,34 +25,15 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.temperature import display_temp as show_temp from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_issue_tracker, async_suggest_report_issue +from homeassistant.util.hass_dict import HassKey from homeassistant.util.unit_conversion import TemperatureConverter from .const import ( # noqa: F401 - _DEPRECATED_HVAC_MODE_AUTO, - _DEPRECATED_HVAC_MODE_COOL, - _DEPRECATED_HVAC_MODE_DRY, - _DEPRECATED_HVAC_MODE_FAN_ONLY, - _DEPRECATED_HVAC_MODE_HEAT, - _DEPRECATED_HVAC_MODE_HEAT_COOL, - _DEPRECATED_HVAC_MODE_OFF, - _DEPRECATED_SUPPORT_AUX_HEAT, - _DEPRECATED_SUPPORT_FAN_MODE, - _DEPRECATED_SUPPORT_PRESET_MODE, - _DEPRECATED_SUPPORT_SWING_MODE, - _DEPRECATED_SUPPORT_TARGET_HUMIDITY, - _DEPRECATED_SUPPORT_TARGET_TEMPERATURE, - _DEPRECATED_SUPPORT_TARGET_TEMPERATURE_RANGE, ATTR_AUX_HEAT, ATTR_CURRENT_HUMIDITY, ATTR_CURRENT_TEMPERATURE, @@ -69,6 +49,8 @@ from .const import ( # noqa: F401 ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_PRESET_MODES, + ATTR_SWING_HORIZONTAL_MODE, + ATTR_SWING_HORIZONTAL_MODES, ATTR_SWING_MODE, ATTR_SWING_MODES, ATTR_TARGET_TEMP_HIGH, @@ -86,6 +68,7 @@ from .const import ( # noqa: F401 FAN_ON, FAN_TOP, HVAC_MODES, + INTENT_GET_TEMPERATURE, PRESET_ACTIVITY, PRESET_AWAY, PRESET_BOOST, @@ -99,6 +82,7 @@ from .const import ( # noqa: F401 SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, SWING_BOTH, @@ -113,6 +97,7 @@ from .const import ( # noqa: F401 _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[ClimateEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -149,7 +134,7 @@ SET_TEMPERATURE_SCHEMA = vol.All( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up climate entities.""" - component = hass.data[DOMAIN] = EntityComponent[ClimateEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[ClimateEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -175,7 +160,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SET_HVAC_MODE, {vol.Required(ATTR_HVAC_MODE): vol.Coerce(HVACMode)}, - "async_set_hvac_mode", + "async_handle_set_hvac_mode_service", ) component.async_register_entity_service( SERVICE_SET_PRESET_MODE, @@ -201,7 +186,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SET_HUMIDITY, {vol.Required(ATTR_HUMIDITY): vol.Coerce(int)}, - "async_set_humidity", + async_service_humidity_set, [ClimateEntityFeature.TARGET_HUMIDITY], ) component.async_register_entity_service( @@ -216,20 +201,24 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "async_handle_set_swing_mode_service", [ClimateEntityFeature.SWING_MODE], ) + component.async_register_entity_service( + SERVICE_SET_SWING_HORIZONTAL_MODE, + {vol.Required(ATTR_SWING_HORIZONTAL_MODE): cv.string}, + "async_handle_set_swing_horizontal_mode_service", + [ClimateEntityFeature.SWING_HORIZONTAL_MODE], + ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[ClimateEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[ClimateEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class ClimateEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -255,6 +244,8 @@ CACHED_PROPERTIES_WITH_ATTR_ = { "fan_modes", "swing_mode", "swing_modes", + "swing_horizontal_mode", + "swing_horizontal_modes", "supported_features", "min_temp", "max_temp", @@ -299,6 +290,8 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_supported_features: ClimateEntityFeature = ClimateEntityFeature(0) _attr_swing_mode: str | None _attr_swing_modes: list[str] | None + _attr_swing_horizontal_mode: str | None + _attr_swing_horizontal_modes: list[str] | None _attr_target_humidity: float | None = None _attr_target_temperature_high: float | None _attr_target_temperature_low: float | None @@ -308,115 +301,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): __climate_reported_legacy_aux = False - __mod_supported_features: ClimateEntityFeature = ClimateEntityFeature(0) - # Integrations should set `_enable_turn_on_off_backwards_compatibility` to False - # once migrated and set the feature flags TURN_ON/TURN_OFF as needed. - _enable_turn_on_off_backwards_compatibility: bool = True - - def __getattribute__(self, __name: str) -> Any: - """Get attribute. - - Modify return of `supported_features` to - include `_mod_supported_features` if attribute is set. - """ - if __name != "supported_features": - return super().__getattribute__(__name) - - # Convert the supported features to ClimateEntityFeature. - # Remove this compatibility shim in 2025.1 or later. - _supported_features: ClimateEntityFeature = super().__getattribute__( - "supported_features" - ) - _mod_supported_features: ClimateEntityFeature = super().__getattribute__( - "_ClimateEntity__mod_supported_features" - ) - if type(_supported_features) is int: # noqa: E721 - _features = ClimateEntityFeature(_supported_features) - self._report_deprecated_supported_features_values(_features) - else: - _features = _supported_features - - if not _mod_supported_features: - return _features - - # Add automatically calculated ClimateEntityFeature.TURN_OFF/TURN_ON to - # supported features and return it - return _features | _mod_supported_features - - @callback - def add_to_platform_start( - self, - hass: HomeAssistant, - platform: EntityPlatform, - parallel_updates: asyncio.Semaphore | None, - ) -> None: - """Start adding an entity to a platform.""" - super().add_to_platform_start(hass, platform, parallel_updates) - - def _report_turn_on_off(feature: str, method: str) -> None: - """Log warning not implemented turn on/off feature.""" - report_issue = self._suggest_report_issue() - if feature.startswith("TURN"): - message = ( - "Entity %s (%s) does not set ClimateEntityFeature.%s" - " but implements the %s method. Please %s" - ) - else: - message = ( - "Entity %s (%s) implements HVACMode(s): %s and therefore implicitly" - " supports the %s methods without setting the proper" - " ClimateEntityFeature. Please %s" - ) - _LOGGER.warning( - message, - self.entity_id, - type(self), - feature, - method, - report_issue, - ) - - # Adds ClimateEntityFeature.TURN_OFF/TURN_ON depending on service calls implemented - # This should be removed in 2025.1. - if self._enable_turn_on_off_backwards_compatibility is False: - # Return if integration has migrated already - return - - supported_features = self.supported_features - if supported_features & CHECK_TURN_ON_OFF_FEATURE_FLAG: - # The entity supports both turn_on and turn_off, the backwards compatibility - # checks are not needed - return - - if not supported_features & ClimateEntityFeature.TURN_OFF and ( - type(self).async_turn_off is not ClimateEntity.async_turn_off - or type(self).turn_off is not ClimateEntity.turn_off - ): - # turn_off implicitly supported by implementing turn_off method - _report_turn_on_off("TURN_OFF", "turn_off") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - ClimateEntityFeature.TURN_OFF - ) - - if not supported_features & ClimateEntityFeature.TURN_ON and ( - type(self).async_turn_on is not ClimateEntity.async_turn_on - or type(self).turn_on is not ClimateEntity.turn_on - ): - # turn_on implicitly supported by implementing turn_on method - _report_turn_on_off("TURN_ON", "turn_on") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - ClimateEntityFeature.TURN_ON - ) - - if (modes := self.hvac_modes) and len(modes) >= 2 and HVACMode.OFF in modes: - # turn_on/off implicitly supported by including more modes than 1 and one of these - # are HVACMode.OFF - _modes = [_mode for _mode in modes if _mode is not None] - _report_turn_on_off(", ".join(_modes or []), "turn_on/turn_off") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - def _report_legacy_aux(self) -> None: """Log warning and create an issue if the entity implements legacy auxiliary heater.""" @@ -429,7 +313,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ( "%s::%s implements the `is_aux_heat` property or uses the auxiliary " "heater methods in a subclass of ClimateEntity which is " - "deprecated and will be unsupported from Home Assistant 2024.10." + "deprecated and will be unsupported from Home Assistant 2025.4." " Please %s" ), self.platform.platform_name, @@ -451,7 +335,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): self.hass, DOMAIN, f"deprecated_climate_aux_{self.platform.platform_name}", - breaks_in_ha_version="2024.10.0", + breaks_in_ha_version="2025.4.0", is_fixable=False, is_persistent=False, issue_domain=self.platform.platform_name, @@ -512,6 +396,9 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ClimateEntityFeature.SWING_MODE in supported_features: data[ATTR_SWING_MODES] = self.swing_modes + if ClimateEntityFeature.SWING_HORIZONTAL_MODE in supported_features: + data[ATTR_SWING_HORIZONTAL_MODES] = self.swing_horizontal_modes + return data @final @@ -563,6 +450,9 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ClimateEntityFeature.SWING_MODE in supported_features: data[ATTR_SWING_MODE] = self.swing_mode + if ClimateEntityFeature.SWING_HORIZONTAL_MODE in supported_features: + data[ATTR_SWING_HORIZONTAL_MODE] = self.swing_horizontal_mode + if ClimateEntityFeature.AUX_HEAT in supported_features: data[ATTR_AUX_HEAT] = STATE_ON if self.is_aux_heat else STATE_OFF if ( @@ -690,24 +580,55 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """ return self._attr_swing_modes + @cached_property + def swing_horizontal_mode(self) -> str | None: + """Return the horizontal swing setting. + + Requires ClimateEntityFeature.SWING_HORIZONTAL_MODE. + """ + return self._attr_swing_horizontal_mode + + @cached_property + def swing_horizontal_modes(self) -> list[str] | None: + """Return the list of available horizontal swing modes. + + Requires ClimateEntityFeature.SWING_HORIZONTAL_MODE. + """ + return self._attr_swing_horizontal_modes + @final @callback def _valid_mode_or_raise( self, - mode_type: Literal["preset", "swing", "fan"], - mode: str, - modes: list[str] | None, + mode_type: Literal["preset", "horizontal_swing", "swing", "fan", "hvac"], + mode: str | HVACMode, + modes: list[str] | list[HVACMode] | None, ) -> None: """Raise ServiceValidationError on invalid modes.""" if modes and mode in modes: return modes_str: str = ", ".join(modes) if modes else "" - if mode_type == "preset": - translation_key = "not_valid_preset_mode" - elif mode_type == "swing": - translation_key = "not_valid_swing_mode" - elif mode_type == "fan": - translation_key = "not_valid_fan_mode" + translation_key = f"not_valid_{mode_type}_mode" + if mode_type == "hvac": + report_issue = async_suggest_report_issue( + self.hass, + integration_domain=self.platform.platform_name, + module=type(self).__module__, + ) + _LOGGER.warning( + ( + "%s::%s sets the hvac_mode %s which is not " + "valid for this entity with modes: %s. " + "This will stop working in 2025.4 and raise an error instead. " + "Please %s" + ), + self.platform.platform_name, + self.__class__.__name__, + mode, + modes_str, + report_issue, + ) + return raise ServiceValidationError( translation_domain=DOMAIN, translation_key=translation_key, @@ -749,6 +670,12 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Set new target fan mode.""" await self.hass.async_add_executor_job(self.set_fan_mode, fan_mode) + @final + async def async_handle_set_hvac_mode_service(self, hvac_mode: HVACMode) -> None: + """Validate and set new preset mode.""" + self._valid_mode_or_raise("hvac", hvac_mode, self.hvac_modes) + await self.async_set_hvac_mode(hvac_mode) + def set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" raise NotImplementedError @@ -771,6 +698,26 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Set new target swing operation.""" await self.hass.async_add_executor_job(self.set_swing_mode, swing_mode) + @final + async def async_handle_set_swing_horizontal_mode_service( + self, swing_horizontal_mode: str + ) -> None: + """Validate and set new horizontal swing mode.""" + self._valid_mode_or_raise( + "horizontal_swing", swing_horizontal_mode, self.swing_horizontal_modes + ) + await self.async_set_swing_horizontal_mode(swing_horizontal_mode) + + def set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set new target horizontal swing operation.""" + raise NotImplementedError + + async def async_set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set new target horizontal swing operation.""" + await self.hass.async_add_executor_job( + self.set_swing_horizontal_mode, swing_horizontal_mode + ) + @final async def async_handle_set_preset_mode_service(self, preset_mode: str) -> None: """Validate and set new preset mode.""" @@ -908,16 +855,72 @@ async def async_service_aux_heat( await entity.async_turn_aux_heat_off() +async def async_service_humidity_set( + entity: ClimateEntity, service_call: ServiceCall +) -> None: + """Handle set humidity service.""" + humidity = service_call.data[ATTR_HUMIDITY] + min_humidity = entity.min_humidity + max_humidity = entity.max_humidity + _LOGGER.debug( + "Check valid humidity %d in range %d - %d", + humidity, + min_humidity, + max_humidity, + ) + if humidity < min_humidity or humidity > max_humidity: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="humidity_out_of_range", + translation_placeholders={ + "humidity": str(humidity), + "min_humidity": str(min_humidity), + "max_humidity": str(max_humidity), + }, + ) + + await entity.async_set_humidity(humidity) + + async def async_service_temperature_set( entity: ClimateEntity, service_call: ServiceCall ) -> None: """Handle set temperature service.""" + if ( + ATTR_TEMPERATURE in service_call.data + and not entity.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_target_temperature_entity_feature", + ) + if ( + ATTR_TARGET_TEMP_LOW in service_call.data + and not entity.supported_features + & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_target_temperature_range_entity_feature", + ) + hass = entity.hass - kwargs = {} + kwargs: dict[str, Any] = {} min_temp = entity.min_temp max_temp = entity.max_temp temp_unit = entity.temperature_unit + if ( + (target_low_temp := service_call.data.get(ATTR_TARGET_TEMP_LOW)) + and (target_high_temp := service_call.data.get(ATTR_TARGET_TEMP_HIGH)) + and target_low_temp > target_high_temp + ): + # Ensure target_low_temp is not higher than target_high_temp. + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="low_temp_higher_than_high_temp", + ) + for value, temp in service_call.data.items(): if value in CONVERTIBLE_ATTRIBUTE: kwargs[value] = check_temp = TemperatureConverter.convert( @@ -949,13 +952,3 @@ async def async_service_temperature_set( kwargs[value] = temp await entity.async_set_temperature(**kwargs) - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/climate/const.py b/homeassistant/components/climate/const.py index b74169430d4..111401a2251 100644 --- a/homeassistant/components/climate/const.py +++ b/homeassistant/components/climate/const.py @@ -1,14 +1,6 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum -from functools import partial - -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) class HVACMode(StrEnum): @@ -37,15 +29,6 @@ class HVACMode(StrEnum): FAN_ONLY = "fan_only" -# These HVAC_MODE_* constants are deprecated as of Home Assistant 2022.5. -# Please use the HVACMode enum instead. -_DEPRECATED_HVAC_MODE_OFF = DeprecatedConstantEnum(HVACMode.OFF, "2025.1") -_DEPRECATED_HVAC_MODE_HEAT = DeprecatedConstantEnum(HVACMode.HEAT, "2025.1") -_DEPRECATED_HVAC_MODE_COOL = DeprecatedConstantEnum(HVACMode.COOL, "2025.1") -_DEPRECATED_HVAC_MODE_HEAT_COOL = DeprecatedConstantEnum(HVACMode.HEAT_COOL, "2025.1") -_DEPRECATED_HVAC_MODE_AUTO = DeprecatedConstantEnum(HVACMode.AUTO, "2025.1") -_DEPRECATED_HVAC_MODE_DRY = DeprecatedConstantEnum(HVACMode.DRY, "2025.1") -_DEPRECATED_HVAC_MODE_FAN_ONLY = DeprecatedConstantEnum(HVACMode.FAN_ONLY, "2025.1") HVAC_MODES = [cls.value for cls in HVACMode] # No preset is active @@ -92,6 +75,10 @@ SWING_BOTH = "both" SWING_VERTICAL = "vertical" SWING_HORIZONTAL = "horizontal" +# Possible horizontal swing state +SWING_HORIZONTAL_ON = "on" +SWING_HORIZONTAL_OFF = "off" + class HVACAction(StrEnum): """HVAC action for climate devices.""" @@ -106,14 +93,6 @@ class HVACAction(StrEnum): PREHEATING = "preheating" -# These CURRENT_HVAC_* constants are deprecated as of Home Assistant 2022.5. -# Please use the HVACAction enum instead. -_DEPRECATED_CURRENT_HVAC_OFF = DeprecatedConstantEnum(HVACAction.OFF, "2025.1") -_DEPRECATED_CURRENT_HVAC_HEAT = DeprecatedConstantEnum(HVACAction.HEATING, "2025.1") -_DEPRECATED_CURRENT_HVAC_COOL = DeprecatedConstantEnum(HVACAction.COOLING, "2025.1") -_DEPRECATED_CURRENT_HVAC_DRY = DeprecatedConstantEnum(HVACAction.DRYING, "2025.1") -_DEPRECATED_CURRENT_HVAC_IDLE = DeprecatedConstantEnum(HVACAction.IDLE, "2025.1") -_DEPRECATED_CURRENT_HVAC_FAN = DeprecatedConstantEnum(HVACAction.FAN, "2025.1") CURRENT_HVAC_ACTIONS = [cls.value for cls in HVACAction] @@ -134,6 +113,8 @@ ATTR_HVAC_MODES = "hvac_modes" ATTR_HVAC_MODE = "hvac_mode" ATTR_SWING_MODES = "swing_modes" ATTR_SWING_MODE = "swing_mode" +ATTR_SWING_HORIZONTAL_MODE = "swing_horizontal_mode" +ATTR_SWING_HORIZONTAL_MODES = "swing_horizontal_modes" ATTR_TARGET_TEMP_HIGH = "target_temp_high" ATTR_TARGET_TEMP_LOW = "target_temp_low" ATTR_TARGET_TEMP_STEP = "target_temp_step" @@ -145,12 +126,15 @@ DEFAULT_MAX_HUMIDITY = 99 DOMAIN = "climate" +INTENT_GET_TEMPERATURE = "HassClimateGetTemperature" + SERVICE_SET_AUX_HEAT = "set_aux_heat" SERVICE_SET_FAN_MODE = "set_fan_mode" SERVICE_SET_PRESET_MODE = "set_preset_mode" SERVICE_SET_HUMIDITY = "set_humidity" SERVICE_SET_HVAC_MODE = "set_hvac_mode" SERVICE_SET_SWING_MODE = "set_swing_mode" +SERVICE_SET_SWING_HORIZONTAL_MODE = "set_swing_horizontal_mode" SERVICE_SET_TEMPERATURE = "set_temperature" @@ -166,35 +150,4 @@ class ClimateEntityFeature(IntFlag): AUX_HEAT = 64 TURN_OFF = 128 TURN_ON = 256 - - -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the ClimateEntityFeature enum instead. -_DEPRECATED_SUPPORT_TARGET_TEMPERATURE = DeprecatedConstantEnum( - ClimateEntityFeature.TARGET_TEMPERATURE, "2025.1" -) -_DEPRECATED_SUPPORT_TARGET_TEMPERATURE_RANGE = DeprecatedConstantEnum( - ClimateEntityFeature.TARGET_TEMPERATURE_RANGE, "2025.1" -) -_DEPRECATED_SUPPORT_TARGET_HUMIDITY = DeprecatedConstantEnum( - ClimateEntityFeature.TARGET_HUMIDITY, "2025.1" -) -_DEPRECATED_SUPPORT_FAN_MODE = DeprecatedConstantEnum( - ClimateEntityFeature.FAN_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_PRESET_MODE = DeprecatedConstantEnum( - ClimateEntityFeature.PRESET_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_SWING_MODE = DeprecatedConstantEnum( - ClimateEntityFeature.SWING_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_AUX_HEAT = DeprecatedConstantEnum( - ClimateEntityFeature.AUX_HEAT, "2025.1" -) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) + SWING_HORIZONTAL_MODE = 512 diff --git a/homeassistant/components/climate/icons.json b/homeassistant/components/climate/icons.json index ea6c504ce25..8f4ffa6b19f 100644 --- a/homeassistant/components/climate/icons.json +++ b/homeassistant/components/climate/icons.json @@ -51,20 +51,50 @@ "on": "mdi:arrow-oscillating", "vertical": "mdi:arrow-up-down" } + }, + "swing_horizontal_mode": { + "default": "mdi:circle-medium", + "state": { + "off": "mdi:arrow-oscillating-off", + "on": "mdi:arrow-expand-horizontal" + } } } } }, "services": { - "set_fan_mode": "mdi:fan", - "set_humidity": "mdi:water-percent", - "set_swing_mode": "mdi:arrow-oscillating", - "set_temperature": "mdi:thermometer", - "set_aux_heat": "mdi:radiator", - "set_preset_mode": "mdi:sofa", - "set_hvac_mode": "mdi:hvac", - "turn_on": "mdi:power-on", - "turn_off": "mdi:power-off", - "toggle": "mdi:toggle-switch" + "set_fan_mode": { + "service": "mdi:fan" + }, + "set_humidity": { + "service": "mdi:water-percent" + }, + "set_swing_mode": { + "service": "mdi:arrow-oscillating" + }, + "set_swing_horizontal_mode": { + "service": "mdi:arrow-expand-horizontal" + }, + "set_temperature": { + "service": "mdi:thermometer" + }, + "set_aux_heat": { + "service": "mdi:radiator" + }, + "set_preset_mode": { + "service": "mdi:sofa" + }, + "set_hvac_mode": { + "service": "mdi:hvac" + }, + "turn_on": { + "service": "mdi:power-on" + }, + "turn_off": { + "service": "mdi:power-off" + }, + "toggle": { + "service": "mdi:toggle-switch" + } } } diff --git a/homeassistant/components/climate/intent.py b/homeassistant/components/climate/intent.py index 53d0891fcda..9a8dfdda4ec 100644 --- a/homeassistant/components/climate/intent.py +++ b/homeassistant/components/climate/intent.py @@ -7,9 +7,7 @@ import voluptuous as vol from homeassistant.core import HomeAssistant from homeassistant.helpers import intent -from . import DOMAIN - -INTENT_GET_TEMPERATURE = "HassClimateGetTemperature" +from . import DOMAIN, INTENT_GET_TEMPERATURE async def async_setup_intents(hass: HomeAssistant) -> None: diff --git a/homeassistant/components/climate/reproduce_state.py b/homeassistant/components/climate/reproduce_state.py index 99357777fba..d38e243cb62 100644 --- a/homeassistant/components/climate/reproduce_state.py +++ b/homeassistant/components/climate/reproduce_state.py @@ -14,6 +14,7 @@ from .const import ( ATTR_HUMIDITY, ATTR_HVAC_MODE, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -23,6 +24,7 @@ from .const import ( SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ) @@ -76,6 +78,14 @@ async def _async_reproduce_states( ): await call_service(SERVICE_SET_SWING_MODE, [ATTR_SWING_MODE]) + if ( + ATTR_SWING_HORIZONTAL_MODE in state.attributes + and state.attributes[ATTR_SWING_HORIZONTAL_MODE] is not None + ): + await call_service( + SERVICE_SET_SWING_HORIZONTAL_MODE, [ATTR_SWING_HORIZONTAL_MODE] + ) + if ( ATTR_FAN_MODE in state.attributes and state.attributes[ATTR_FAN_MODE] is not None diff --git a/homeassistant/components/climate/services.yaml b/homeassistant/components/climate/services.yaml index 12a8e6f001f..68421bf2386 100644 --- a/homeassistant/components/climate/services.yaml +++ b/homeassistant/components/climate/services.yaml @@ -131,7 +131,20 @@ set_swing_mode: fields: swing_mode: required: true - example: "horizontal" + example: "on" + selector: + text: + +set_swing_horizontal_mode: + target: + entity: + domain: climate + supported_features: + - climate.ClimateEntityFeature.SWING_HORIZONTAL_MODE + fields: + swing_horizontal_mode: + required: true + example: "on" selector: text: diff --git a/homeassistant/components/climate/significant_change.py b/homeassistant/components/climate/significant_change.py index 0c4cdd4ac6a..2b7e2c5d8b1 100644 --- a/homeassistant/components/climate/significant_change.py +++ b/homeassistant/components/climate/significant_change.py @@ -19,6 +19,7 @@ from . import ( ATTR_HUMIDITY, ATTR_HVAC_ACTION, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -34,6 +35,7 @@ SIGNIFICANT_ATTRIBUTES: set[str] = { ATTR_HVAC_ACTION, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, ATTR_TEMPERATURE, @@ -70,6 +72,7 @@ def async_check_significant_change( ATTR_HVAC_ACTION, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_SWING_HORIZONTAL_MODE, ]: return True diff --git a/homeassistant/components/climate/strings.json b/homeassistant/components/climate/strings.json index 1af21815b9f..6d8b2c5449d 100644 --- a/homeassistant/components/climate/strings.json +++ b/homeassistant/components/climate/strings.json @@ -123,6 +123,16 @@ "swing_modes": { "name": "Swing modes" }, + "swing_horizontal_mode": { + "name": "Horizontal swing mode", + "state": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" + } + }, + "swing_horizontal_modes": { + "name": "Horizontal swing modes" + }, "target_temp_high": { "name": "Upper target temperature" }, @@ -161,19 +171,19 @@ }, "set_temperature": { "name": "Set target temperature", - "description": "Sets target temperature.", + "description": "Sets the temperature setpoint.", "fields": { "temperature": { - "name": "Temperature", - "description": "Target temperature." + "name": "Target temperature", + "description": "The temperature setpoint." }, "target_temp_high": { - "name": "Target temperature high", - "description": "High target temperature." + "name": "Upper target temperature", + "description": "The max temperature setpoint." }, "target_temp_low": { - "name": "Target temperature low", - "description": "Low target temperature." + "name": "Lower target temperature", + "description": "The min temperature setpoint." }, "hvac_mode": { "name": "HVAC mode", @@ -221,6 +231,16 @@ } } }, + "set_swing_horizontal_mode": { + "name": "Set horizontal swing mode", + "description": "Sets horizontal swing operation mode.", + "fields": { + "swing_horizontal_mode": { + "name": "Horizontal swing mode", + "description": "Horizontal swing operation mode." + } + } + }, "turn_on": { "name": "[%key:common::action::turn_on%]", "description": "Turns climate device on." @@ -264,11 +284,26 @@ "not_valid_swing_mode": { "message": "Swing mode {mode} is not valid. Valid swing modes are: {modes}." }, + "not_valid_horizontal_swing_mode": { + "message": "Horizontal swing mode {mode} is not valid. Valid horizontal swing modes are: {modes}." + }, "not_valid_fan_mode": { "message": "Fan mode {mode} is not valid. Valid fan modes are: {modes}." }, "temp_out_of_range": { "message": "Provided temperature {check_temp} is not valid. Accepted range is {min_temp} to {max_temp}." + }, + "low_temp_higher_than_high_temp": { + "message": "Target temperature low can not be higher than Target temperature high." + }, + "humidity_out_of_range": { + "message": "Provided humidity {humidity} is not valid. Accepted range is {min_humidity} to {max_humidity}." + }, + "missing_target_temperature_entity_feature": { + "message": "Set temperature action was used with the target temperature parameter but the entity does not support it." + }, + "missing_target_temperature_range_entity_feature": { + "message": "Set temperature action was used with the target temperature low/high parameter but the entity does not support it." } } } diff --git a/homeassistant/components/cloud/assist_pipeline.py b/homeassistant/components/cloud/assist_pipeline.py index f3a591d6eda..c97e5bdc0a2 100644 --- a/homeassistant/components/cloud/assist_pipeline.py +++ b/homeassistant/components/cloud/assist_pipeline.py @@ -1,6 +1,7 @@ """Handle Cloud assist pipelines.""" import asyncio +from typing import Any from homeassistant.components.assist_pipeline import ( async_create_default_pipeline, @@ -98,7 +99,7 @@ async def async_migrate_cloud_pipeline_engine( # is an after dependency of cloud await async_setup_pipeline_store(hass) - kwargs: dict[str, str] = {pipeline_attribute: engine_id} + kwargs: dict[str, Any] = {pipeline_attribute: engine_id} pipelines = async_get_pipelines(hass) for pipeline in pipelines: if getattr(pipeline, pipeline_attribute) == DOMAIN: diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py new file mode 100644 index 00000000000..e826c229321 --- /dev/null +++ b/homeassistant/components/cloud/backup.py @@ -0,0 +1,202 @@ +"""Backup platform for the cloud integration.""" + +from __future__ import annotations + +import base64 +from collections.abc import AsyncIterator, Callable, Coroutine +import hashlib +from typing import Any, Self + +from aiohttp import ClientError, ClientTimeout, StreamReader +from hass_nabucasa import Cloud, CloudError +from hass_nabucasa.cloud_api import ( + async_files_delete_file, + async_files_download_details, + async_files_list, + async_files_upload_details, +) + +from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError +from homeassistant.core import HomeAssistant, callback + +from .client import CloudClient +from .const import DATA_CLOUD, DOMAIN + +_STORAGE_BACKUP = "backup" + + +async def _b64md5(stream: AsyncIterator[bytes]) -> str: + """Calculate the MD5 hash of a file.""" + file_hash = hashlib.md5() + async for chunk in stream: + file_hash.update(chunk) + return base64.b64encode(file_hash.digest()).decode() + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the cloud backup agent.""" + cloud = hass.data[DATA_CLOUD] + if not cloud.is_logged_in: + return [] + + return [CloudBackupAgent(hass=hass, cloud=cloud)] + + +class ChunkAsyncStreamIterator: + """Async iterator for chunked streams. + + Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields + bytes instead of tuple[bytes, bool]. + """ + + __slots__ = ("_stream",) + + def __init__(self, stream: StreamReader) -> None: + """Initialize.""" + self._stream = stream + + def __aiter__(self) -> Self: + """Iterate.""" + return self + + async def __anext__(self) -> bytes: + """Yield next chunk.""" + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv[0] + + +class CloudBackupAgent(BackupAgent): + """Cloud backup agent.""" + + domain = DOMAIN + name = DOMAIN + + def __init__(self, hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None: + """Initialize the cloud backup sync agent.""" + super().__init__() + self._cloud = cloud + self._hass = hass + + @callback + def _get_backup_filename(self) -> str: + """Return the backup filename.""" + return f"{self._cloud.client.prefs.instance_id}.tar" + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + if not await self.async_get_backup(backup_id): + raise BackupAgentError("Backup not found") + + try: + details = await async_files_download_details( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to get download details") from err + + try: + resp = await self._cloud.websession.get(details["url"]) + resp.raise_for_status() + except ClientError as err: + raise BackupAgentError("Failed to download backup") from err + + return ChunkAsyncStreamIterator(resp.content) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + if not backup.protected: + raise BackupAgentError("Cloud backups must be protected") + + base64md5hash = await _b64md5(await open_stream()) + + try: + details = await async_files_upload_details( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + metadata=backup.as_dict(), + size=backup.size, + base64md5hash=base64md5hash, + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to get upload details") from err + + try: + upload_status = await self._cloud.websession.put( + details["url"], + data=await open_stream(), + headers=details["headers"] | {"content-length": str(backup.size)}, + timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h + ) + upload_status.raise_for_status() + except (TimeoutError, ClientError) as err: + raise BackupAgentError("Failed to upload backup") from err + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + if not await self.async_get_backup(backup_id): + return + + try: + await async_files_delete_file( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to delete backup") from err + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + try: + backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to list backups") from err + + return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups] + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + backups = await self.async_list_backups() + + for backup in backups: + if backup.backup_id == backup_id: + return backup + + return None diff --git a/homeassistant/components/cloud/client.py b/homeassistant/components/cloud/client.py index 01c8de77156..ee46fa42125 100644 --- a/homeassistant/components/cloud/client.py +++ b/homeassistant/components/cloud/client.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from datetime import datetime from http import HTTPStatus import logging @@ -11,12 +12,14 @@ from typing import Any, Literal import aiohttp from hass_nabucasa.client import CloudClient as Interface, RemoteActivationNotAllowed +from webrtc_models import RTCIceServer from homeassistant.components import google_assistant, persistent_notification, webhook from homeassistant.components.alexa import ( errors as alexa_errors, smart_home as alexa_smart_home, ) +from homeassistant.components.camera.webrtc import async_register_ice_servers from homeassistant.components.google_assistant import smart_home as ga from homeassistant.const import __version__ as HA_VERSION from homeassistant.core import Context, HassJob, HomeAssistant, callback @@ -27,7 +30,7 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from homeassistant.util.aiohttp import MockRequest, serialize_response from . import alexa_config, google_config -from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN +from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN, PREF_ENABLE_CLOUD_ICE_SERVERS from .prefs import CloudPreferences _LOGGER = logging.getLogger(__name__) @@ -60,6 +63,7 @@ class CloudClient(Interface): self._alexa_config_init_lock = asyncio.Lock() self._google_config_init_lock = asyncio.Lock() self._relayer_region: str | None = None + self._cloud_ice_servers_listener: Callable[[], None] | None = None @property def base_path(self) -> Path: @@ -187,6 +191,49 @@ class CloudClient(Interface): if is_new_user: await gconf.async_sync_entities(gconf.agent_user_id) + async def setup_cloud_ice_servers(_: datetime) -> None: + async def register_cloud_ice_server( + ice_servers: list[RTCIceServer], + ) -> Callable[[], None]: + """Register cloud ice server.""" + + def get_ice_servers() -> list[RTCIceServer]: + return ice_servers + + return async_register_ice_servers(self._hass, get_ice_servers) + + async def async_register_cloud_ice_servers_listener( + prefs: CloudPreferences, + ) -> None: + is_cloud_ice_servers_enabled = ( + self.cloud.is_logged_in + and not self.cloud.subscription_expired + and prefs.cloud_ice_servers_enabled + ) + if is_cloud_ice_servers_enabled: + if self._cloud_ice_servers_listener is None: + self._cloud_ice_servers_listener = await self.cloud.ice_servers.async_register_ice_servers_listener( + register_cloud_ice_server + ) + elif self._cloud_ice_servers_listener: + self._cloud_ice_servers_listener() + self._cloud_ice_servers_listener = None + + async def async_prefs_updated(prefs: CloudPreferences) -> None: + updated_prefs = prefs.last_updated + + if ( + updated_prefs is None + or PREF_ENABLE_CLOUD_ICE_SERVERS not in updated_prefs + ): + return + + await async_register_cloud_ice_servers_listener(prefs) + + await async_register_cloud_ice_servers_listener(self._prefs) + + self._prefs.async_listen_updates(async_prefs_updated) + tasks = [] if self._prefs.alexa_enabled and self._prefs.alexa_report_state: @@ -195,6 +242,8 @@ class CloudClient(Interface): if self._prefs.google_enabled: tasks.append(enable_google) + tasks.append(setup_cloud_ice_servers) + if tasks: await asyncio.gather(*(task(None) for task in tasks)) @@ -222,6 +271,10 @@ class CloudClient(Interface): self._google_config.async_deinitialize() self._google_config = None + if self._cloud_ice_servers_listener: + self._cloud_ice_servers_listener() + self._cloud_ice_servers_listener = None + @callback def user_message(self, identifier: str, title: str, message: str) -> None: """Create a message for user to UI.""" diff --git a/homeassistant/components/cloud/config_flow.py b/homeassistant/components/cloud/config_flow.py index 932291c2bfa..92fbf78378b 100644 --- a/homeassistant/components/cloud/config_flow.py +++ b/homeassistant/components/cloud/config_flow.py @@ -18,6 +18,4 @@ class CloudConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the system step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="Home Assistant Cloud", data={}) diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 5e9fb2e9dc7..65d239f2b10 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -43,6 +43,7 @@ PREF_GOOGLE_SETTINGS_VERSION = "google_settings_version" PREF_TTS_DEFAULT_VOICE = "tts_default_voice" PREF_GOOGLE_CONNECTED = "google_connected" PREF_REMOTE_ALLOW_REMOTE_ENABLE = "remote_allow_remote_enable" +PREF_ENABLE_CLOUD_ICE_SERVERS = "cloud_ice_servers_enabled" DEFAULT_TTS_DEFAULT_VOICE = ("en-US", "JennyNeural") DEFAULT_DISABLE_2FA = False DEFAULT_ALEXA_REPORT_STATE = True @@ -87,3 +88,5 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update") STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text" TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech" + +LOGIN_MFA_TIMEOUT = 60 diff --git a/homeassistant/components/cloud/google_config.py b/homeassistant/components/cloud/google_config.py index 3586823ca11..43dd5279d35 100644 --- a/homeassistant/components/cloud/google_config.py +++ b/homeassistant/components/cloud/google_config.py @@ -478,7 +478,7 @@ class CloudGoogleConfig(AbstractConfig): self.async_schedule_google_sync_all() @callback - async def _handle_device_registry_updated( + def _handle_device_registry_updated( self, event: Event[dr.EventDeviceRegistryUpdatedData] ) -> None: """Handle when device registry updated.""" diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index b1931515745..2f49d261792 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -9,6 +9,7 @@ import dataclasses from functools import wraps from http import HTTPStatus import logging +import time from typing import Any, Concatenate import aiohttp @@ -31,6 +32,7 @@ from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util.location import async_detect_location_info @@ -39,9 +41,11 @@ from .assist_pipeline import async_create_cloud_pipeline from .client import CloudClient from .const import ( DATA_CLOUD, + LOGIN_MFA_TIMEOUT, PREF_ALEXA_REPORT_STATE, PREF_DISABLE_2FA, PREF_ENABLE_ALEXA, + PREF_ENABLE_CLOUD_ICE_SERVERS, PREF_ENABLE_GOOGLE, PREF_GOOGLE_REPORT_STATE, PREF_GOOGLE_SECURE_DEVICES_PIN, @@ -68,6 +72,10 @@ _CLOUD_ERRORS: dict[type[Exception], tuple[HTTPStatus, str]] = { } +class MFAExpiredOrNotStarted(auth.CloudError): + """Multi-factor authentication expired, or not started.""" + + @callback def async_setup(hass: HomeAssistant) -> None: """Initialize the HTTP API.""" @@ -100,6 +108,11 @@ def async_setup(hass: HomeAssistant) -> None: _CLOUD_ERRORS.update( { + auth.InvalidTotpCode: (HTTPStatus.BAD_REQUEST, "Invalid TOTP code."), + auth.MFARequired: ( + HTTPStatus.UNAUTHORIZED, + "Multi-factor authentication required.", + ), auth.UserNotFound: (HTTPStatus.BAD_REQUEST, "User does not exist."), auth.UserNotConfirmed: (HTTPStatus.BAD_REQUEST, "Email not confirmed."), auth.UserExists: ( @@ -111,6 +124,10 @@ def async_setup(hass: HomeAssistant) -> None: HTTPStatus.BAD_REQUEST, "Password change required.", ), + MFAExpiredOrNotStarted: ( + HTTPStatus.BAD_REQUEST, + "Multi-factor authentication expired, or not started. Please try again.", + ), } ) @@ -205,19 +222,57 @@ class GoogleActionsSyncView(HomeAssistantView): class CloudLoginView(HomeAssistantView): """Login to Home Assistant cloud.""" + _mfa_tokens: dict[str, str] = {} + _mfa_tokens_set_time: float = 0 + url = "/api/cloud/login" name = "api:cloud:login" @require_admin @_handle_cloud_errors @RequestDataValidator( - vol.Schema({vol.Required("email"): str, vol.Required("password"): str}) + vol.Schema( + vol.All( + { + vol.Required("email"): str, + vol.Exclusive("password", "login"): str, + vol.Exclusive("code", "login"): str, + }, + cv.has_at_least_one_key("password", "code"), + ) + ) ) async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle login request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] - await cloud.login(data["email"], data["password"]) + + try: + email = data["email"] + password = data.get("password") + code = data.get("code") + + if email and password: + await cloud.login(email, password) + + else: + if ( + not self._mfa_tokens + or time.time() - self._mfa_tokens_set_time > LOGIN_MFA_TIMEOUT + ): + raise MFAExpiredOrNotStarted + + # Voluptuous should ensure that code is not None because password is + assert code is not None + + await cloud.login_verify_totp(email, code, self._mfa_tokens) + self._mfa_tokens = {} + self._mfa_tokens_set_time = 0 + + except auth.MFARequired as mfa_err: + self._mfa_tokens = mfa_err.mfa_tokens + self._mfa_tokens_set_time = time.time() + raise if "assist_pipeline" in hass.config.components: new_cloud_pipeline_id = await async_create_cloud_pipeline(hass) @@ -439,15 +494,16 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]: @websocket_api.websocket_command( { vol.Required("type"): "cloud/update_prefs", - vol.Optional(PREF_ENABLE_GOOGLE): bool, - vol.Optional(PREF_ENABLE_ALEXA): bool, vol.Optional(PREF_ALEXA_REPORT_STATE): bool, + vol.Optional(PREF_ENABLE_ALEXA): bool, + vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool, + vol.Optional(PREF_ENABLE_GOOGLE): bool, vol.Optional(PREF_GOOGLE_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str), + vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All( vol.Coerce(tuple), validate_language_voice ), - vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, } ) @websocket_api.async_response diff --git a/homeassistant/components/cloud/icons.json b/homeassistant/components/cloud/icons.json index 06ee7eb2f19..32888fa75c7 100644 --- a/homeassistant/components/cloud/icons.json +++ b/homeassistant/components/cloud/icons.json @@ -1,6 +1,10 @@ { "services": { - "remote_connect": "mdi:cloud", - "remote_disconnect": "mdi:cloud-off" + "remote_connect": { + "service": "mdi:cloud" + }, + "remote_disconnect": { + "service": "mdi:cloud-off" + } } } diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 529f4fb9be9..7ee8cf46b86 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -1,12 +1,18 @@ { "domain": "cloud", "name": "Home Assistant Cloud", - "after_dependencies": ["assist_pipeline", "google_assistant", "alexa"], + "after_dependencies": [ + "alexa", + "assist_pipeline", + "backup", + "google_assistant" + ], "codeowners": ["@home-assistant/cloud"], "dependencies": ["auth", "http", "repairs", "webhook"], "documentation": "https://www.home-assistant.io/integrations/cloud", "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.81.1"] + "requirements": ["hass-nabucasa==0.87.0"], + "single_config_entry": true } diff --git a/homeassistant/components/cloud/prefs.py b/homeassistant/components/cloud/prefs.py index 9f76c16a113..ae4b2794e1b 100644 --- a/homeassistant/components/cloud/prefs.py +++ b/homeassistant/components/cloud/prefs.py @@ -32,6 +32,7 @@ from .const import ( PREF_CLOUD_USER, PREF_CLOUDHOOKS, PREF_ENABLE_ALEXA, + PREF_ENABLE_CLOUD_ICE_SERVERS, PREF_ENABLE_GOOGLE, PREF_ENABLE_REMOTE, PREF_GOOGLE_CONNECTED, @@ -162,20 +163,21 @@ class CloudPreferences: async def async_update( self, *, - google_enabled: bool | UndefinedType = UNDEFINED, alexa_enabled: bool | UndefinedType = UNDEFINED, - remote_enabled: bool | UndefinedType = UNDEFINED, - google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, - cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, - cloud_user: str | UndefinedType = UNDEFINED, alexa_report_state: bool | UndefinedType = UNDEFINED, - google_report_state: bool | UndefinedType = UNDEFINED, - tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, - remote_domain: str | None | UndefinedType = UNDEFINED, alexa_settings_version: int | UndefinedType = UNDEFINED, - google_settings_version: int | UndefinedType = UNDEFINED, + cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED, + cloud_user: str | UndefinedType = UNDEFINED, + cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, google_connected: bool | UndefinedType = UNDEFINED, + google_enabled: bool | UndefinedType = UNDEFINED, + google_report_state: bool | UndefinedType = UNDEFINED, + google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, + google_settings_version: int | UndefinedType = UNDEFINED, remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, + remote_domain: str | None | UndefinedType = UNDEFINED, + remote_enabled: bool | UndefinedType = UNDEFINED, + tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, ) -> None: """Update user preferences.""" prefs = {**self._prefs} @@ -184,20 +186,21 @@ class CloudPreferences: { key: value for key, value in ( - (PREF_ENABLE_GOOGLE, google_enabled), - (PREF_ENABLE_ALEXA, alexa_enabled), - (PREF_ENABLE_REMOTE, remote_enabled), - (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), - (PREF_CLOUDHOOKS, cloudhooks), - (PREF_CLOUD_USER, cloud_user), (PREF_ALEXA_REPORT_STATE, alexa_report_state), - (PREF_GOOGLE_REPORT_STATE, google_report_state), (PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version), - (PREF_GOOGLE_SETTINGS_VERSION, google_settings_version), - (PREF_TTS_DEFAULT_VOICE, tts_default_voice), - (PREF_REMOTE_DOMAIN, remote_domain), + (PREF_CLOUD_USER, cloud_user), + (PREF_CLOUDHOOKS, cloudhooks), + (PREF_ENABLE_ALEXA, alexa_enabled), + (PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled), + (PREF_ENABLE_GOOGLE, google_enabled), + (PREF_ENABLE_REMOTE, remote_enabled), (PREF_GOOGLE_CONNECTED, google_connected), + (PREF_GOOGLE_REPORT_STATE, google_report_state), + (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), + (PREF_GOOGLE_SETTINGS_VERSION, google_settings_version), (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_REMOTE_DOMAIN, remote_domain), + (PREF_TTS_DEFAULT_VOICE, tts_default_voice), ) if value is not UNDEFINED } @@ -239,6 +242,7 @@ class CloudPreferences: PREF_ALEXA_REPORT_STATE: self.alexa_report_state, PREF_CLOUDHOOKS: self.cloudhooks, PREF_ENABLE_ALEXA: self.alexa_enabled, + PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled, PREF_ENABLE_GOOGLE: self.google_enabled, PREF_ENABLE_REMOTE: self.remote_enabled, PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose, @@ -362,6 +366,14 @@ class CloudPreferences: """ return self._prefs.get(PREF_TTS_DEFAULT_VOICE, DEFAULT_TTS_DEFAULT_VOICE) # type: ignore[no-any-return] + @property + def cloud_ice_servers_enabled(self) -> bool: + """Return if cloud ICE servers are enabled.""" + cloud_ice_servers_enabled: bool = self._prefs.get( + PREF_ENABLE_CLOUD_ICE_SERVERS, True + ) + return cloud_ice_servers_enabled + async def get_cloud_user(self) -> str: """Return ID of Home Assistant Cloud system user.""" user = await self._load_cloud_user() @@ -409,6 +421,7 @@ class CloudPreferences: PREF_ENABLE_ALEXA: True, PREF_ENABLE_GOOGLE: True, PREF_ENABLE_REMOTE: False, + PREF_ENABLE_CLOUD_ICE_SERVERS: True, PREF_GOOGLE_CONNECTED: False, PREF_GOOGLE_DEFAULT_EXPOSE: DEFAULT_EXPOSED_DOMAINS, PREF_GOOGLE_ENTITY_CONFIGS: {}, diff --git a/homeassistant/components/cloud/strings.json b/homeassistant/components/cloud/strings.json index b71ccc0dfa0..1da91f67813 100644 --- a/homeassistant/components/cloud/strings.json +++ b/homeassistant/components/cloud/strings.json @@ -1,10 +1,4 @@ { - "config": { - "step": {}, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - } - }, "system_health": { "info": { "can_reach_cert_server": "Reach certificate server", @@ -25,7 +19,7 @@ }, "issues": { "deprecated_gender": { - "title": "The `{deprecated_option}` text-to-speech option is deprecated", + "title": "The {deprecated_option} text-to-speech option is deprecated", "fix_flow": { "step": { "confirm": { @@ -74,12 +68,12 @@ }, "services": { "remote_connect": { - "name": "Remote connect", - "description": "Makes the instance UI accessible from outside of the local network by using Home Assistant Cloud." + "name": "Enable remote access", + "description": "Makes the instance UI accessible from outside of the local network by enabling your Home Assistant Cloud connection." }, "remote_disconnect": { - "name": "Remote disconnect", - "description": "Disconnects the Home Assistant UI from the Home Assistant Cloud. You will no longer be able to access your Home Assistant instance from outside your local network." + "name": "Disable remote access", + "description": "Disconnects the instance UI from Home Assistant Cloud. This disables access to it from outside your local network." } } } diff --git a/homeassistant/components/cloud/system_health.py b/homeassistant/components/cloud/system_health.py index 0e65aa93eaf..ac50c2fb49b 100644 --- a/homeassistant/components/cloud/system_health.py +++ b/homeassistant/components/cloud/system_health.py @@ -33,6 +33,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: data["remote_connected"] = cloud.remote.is_connected data["alexa_enabled"] = client.prefs.alexa_enabled data["google_enabled"] = client.prefs.google_enabled + data["cloud_ice_servers_enabled"] = client.prefs.cloud_ice_servers_enabled data["remote_server"] = cloud.remote.snitun_server data["certificate_status"] = cloud.remote.certificate_status data["instance_id"] = client.prefs.instance_id diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py index 8cf18c08314..4dbee10fbaf 100644 --- a/homeassistant/components/cloud/tts.py +++ b/homeassistant/components/cloud/tts.py @@ -221,7 +221,7 @@ class CloudProvider(Provider): def __init__(self, cloud: Cloud[CloudClient]) -> None: """Initialize cloud provider.""" self.cloud = cloud - self.name = "Cloud" + self.name = "Home Assistant Cloud" self._language, self._voice = cloud.client.prefs.tts_default_voice cloud.client.prefs.async_listen_updates(self._sync_prefs) diff --git a/homeassistant/components/cloudflare/config_flow.py b/homeassistant/components/cloudflare/config_flow.py index 704e4c0fd47..c3845a447e4 100644 --- a/homeassistant/components/cloudflare/config_flow.py +++ b/homeassistant/components/cloudflare/config_flow.py @@ -10,7 +10,7 @@ import pycfdns import voluptuous as vol from homeassistant.components import persistent_notification -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_TOKEN, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -77,8 +77,6 @@ class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None = None - def __init__(self) -> None: """Initialize the Cloudflare config flow.""" self.cloudflare_config: dict[str, Any] = {} @@ -89,7 +87,6 @@ class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Cloudflare.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -98,24 +95,19 @@ class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN): """Handle re-authentication with Cloudflare.""" errors: dict[str, str] = {} - if user_input is not None and self.entry: + if user_input is not None: _, errors = await self._async_validate_or_error(user_input) if not errors: - self.hass.config_entries.async_update_entry( - self.entry, + reauth_entry = self._get_reauth_entry() + return self.async_update_reload_and_abort( + reauth_entry, data={ - **self.entry.data, + **reauth_entry.data, CONF_API_TOKEN: user_input[CONF_API_TOKEN], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - - return self.async_abort(reason="reauth_successful") - return self.async_show_form( step_id="reauth_confirm", data_schema=DATA_SCHEMA, @@ -126,9 +118,6 @@ class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - persistent_notification.async_dismiss(self.hass, "cloudflare_setup") errors: dict[str, str] = {} diff --git a/homeassistant/components/cloudflare/icons.json b/homeassistant/components/cloudflare/icons.json index 6bf6d773fc3..2d452716c94 100644 --- a/homeassistant/components/cloudflare/icons.json +++ b/homeassistant/components/cloudflare/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_records": "mdi:dns" + "update_records": { + "service": "mdi:dns" + } } } diff --git a/homeassistant/components/cloudflare/manifest.json b/homeassistant/components/cloudflare/manifest.json index 0f689aa3e03..8529a0b9bad 100644 --- a/homeassistant/components/cloudflare/manifest.json +++ b/homeassistant/components/cloudflare/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/cloudflare", "iot_class": "cloud_push", "loggers": ["pycfdns"], - "requirements": ["pycfdns==3.0.0"] + "requirements": ["pycfdns==3.0.0"], + "single_config_entry": true } diff --git a/homeassistant/components/cloudflare/strings.json b/homeassistant/components/cloudflare/strings.json index 75dc8f079c7..8c8ec57b074 100644 --- a/homeassistant/components/cloudflare/strings.json +++ b/homeassistant/components/cloudflare/strings.json @@ -30,12 +30,11 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "services": { diff --git a/homeassistant/components/cmus/manifest.json b/homeassistant/components/cmus/manifest.json index f7591599022..9678dc52a68 100644 --- a/homeassistant/components/cmus/manifest.json +++ b/homeassistant/components/cmus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cmus", "iot_class": "local_polling", "loggers": ["pbr", "pycmus"], + "quality_scale": "legacy", "requirements": ["pycmus==0.1.1"] } diff --git a/homeassistant/components/co2signal/__init__.py b/homeassistant/components/co2signal/__init__.py index 1b69a06d12d..e84ba387194 100644 --- a/homeassistant/components/co2signal/__init__.py +++ b/homeassistant/components/co2signal/__init__.py @@ -9,7 +9,6 @@ from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN # noqa: F401 from .coordinator import CO2SignalCoordinator PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/co2signal/config_flow.py b/homeassistant/components/co2signal/config_flow.py index bf5d645638f..0d357cce199 100644 --- a/homeassistant/components/co2signal/config_flow.py +++ b/homeassistant/components/co2signal/config_flow.py @@ -13,7 +13,7 @@ from aioelectricitymaps import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY_CODE, @@ -42,7 +42,6 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 _data: dict | None - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -128,19 +127,23 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle the reauth step.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) + return await self.async_step_reauth_confirm() + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the reauth step.""" data_schema = vol.Schema( { vol.Required(CONF_API_KEY): cv.string, } ) - return await self._validate_and_create("reauth", data_schema, entry_data) + return await self._validate_and_create( + "reauth_confirm", data_schema, user_input + ) async def _validate_and_create( - self, step_id: str, data_schema: vol.Schema, data: Mapping[str, Any] + self, step_id: str, data_schema: vol.Schema, data: Mapping[str, Any] | None ) -> ConfigFlowResult: """Validate data and show form if it is invalid.""" errors: dict[str, str] = {} @@ -158,16 +161,14 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): except ElectricityMapsError: errors["base"] = "unknown" else: - if self._reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self._reauth_entry, - data={ - CONF_API_KEY: data[CONF_API_KEY], - }, + self._get_reauth_entry(), + data_updates={CONF_API_KEY: data[CONF_API_KEY]}, ) return self.async_create_entry( - title=get_extra_name(data) or "CO2 Signal", + title=get_extra_name(data) or "Electricity Maps", data=data, ) diff --git a/homeassistant/components/co2signal/strings.json b/homeassistant/components/co2signal/strings.json index 7444cde73d7..a4ec916bd42 100644 --- a/homeassistant/components/co2signal/strings.json +++ b/homeassistant/components/co2signal/strings.json @@ -19,7 +19,7 @@ "country_code": "Country code" } }, - "reauth": { + "reauth_confirm": { "data": { "api_key": "[%key:common::config_flow::data::access_token%]" } diff --git a/homeassistant/components/coinbase/config_flow.py b/homeassistant/components/coinbase/config_flow.py index 616fdaf8f7a..8b7b4b9e313 100644 --- a/homeassistant/components/coinbase/config_flow.py +++ b/homeassistant/components/coinbase/config_flow.py @@ -158,16 +158,12 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Coinbase.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/color_extractor/icons.json b/homeassistant/components/color_extractor/icons.json index 07b449ffc54..9dab17a9f3b 100644 --- a/homeassistant/components/color_extractor/icons.json +++ b/homeassistant/components/color_extractor/icons.json @@ -1,5 +1,7 @@ { "services": { - "turn_on": "mdi:lightbulb-on" + "turn_on": { + "service": "mdi:lightbulb-on" + } } } diff --git a/homeassistant/components/comed_hourly_pricing/manifest.json b/homeassistant/components/comed_hourly_pricing/manifest.json index 791a824af8f..a3a29903ac7 100644 --- a/homeassistant/components/comed_hourly_pricing/manifest.json +++ b/homeassistant/components/comed_hourly_pricing/manifest.json @@ -3,5 +3,6 @@ "name": "ComEd Hourly Pricing", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/comed_hourly_pricing", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/comelit/__init__.py b/homeassistant/components/comelit/__init__.py index 478be85c1d4..12f28ef206d 100644 --- a/homeassistant/components/comelit/__init__.py +++ b/homeassistant/components/comelit/__init__.py @@ -19,6 +19,7 @@ BRIDGE_PLATFORMS = [ ] VEDO_PLATFORMS = [ Platform.ALARM_CONTROL_PANEL, + Platform.BINARY_SENSOR, Platform.SENSOR, ] diff --git a/homeassistant/components/comelit/alarm_control_panel.py b/homeassistant/components/comelit/alarm_control_panel.py index b325de25e97..b3bd6664bf8 100644 --- a/homeassistant/components/comelit/alarm_control_panel.py +++ b/homeassistant/components/comelit/alarm_control_panel.py @@ -10,21 +10,12 @@ from aiocomelit.const import ALARM_AREAS, AlarmAreaState from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN @@ -112,7 +103,7 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel return super().available @property - def state(self) -> StateType: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the alarm.""" _LOGGER.debug( @@ -123,16 +114,16 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel ) if self._area.human_status == AlarmAreaState.ARMED: if self._area.armed == ALARM_AREA_ARMED_STATUS[AWAY]: - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY if self._area.armed == ALARM_AREA_ARMED_STATUS[NIGHT]: - return STATE_ALARM_ARMED_NIGHT - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_NIGHT + return AlarmControlPanelState.ARMED_HOME return { - AlarmAreaState.DISARMED: STATE_ALARM_DISARMED, - AlarmAreaState.ENTRY_DELAY: STATE_ALARM_DISARMING, - AlarmAreaState.EXIT_DELAY: STATE_ALARM_ARMING, - AlarmAreaState.TRIGGERED: STATE_ALARM_TRIGGERED, + AlarmAreaState.DISARMED: AlarmControlPanelState.DISARMED, + AlarmAreaState.ENTRY_DELAY: AlarmControlPanelState.DISARMING, + AlarmAreaState.EXIT_DELAY: AlarmControlPanelState.ARMING, + AlarmAreaState.TRIGGERED: AlarmControlPanelState.TRIGGERED, }.get(self._area.human_status) async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/comelit/binary_sensor.py b/homeassistant/components/comelit/binary_sensor.py new file mode 100644 index 00000000000..30b642584f8 --- /dev/null +++ b/homeassistant/components/comelit/binary_sensor.py @@ -0,0 +1,62 @@ +"""Support for sensors.""" + +from __future__ import annotations + +from aiocomelit import ComelitVedoZoneObject +from aiocomelit.const import ALARM_ZONES + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ComelitVedoSystem + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Comelit VEDO presence sensors.""" + + coordinator: ComelitVedoSystem = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id) + for device in coordinator.data[ALARM_ZONES].values() + ) + + +class ComelitVedoBinarySensorEntity( + CoordinatorEntity[ComelitVedoSystem], BinarySensorEntity +): + """Sensor device.""" + + _attr_has_entity_name = True + _attr_device_class = BinarySensorDeviceClass.MOTION + + def __init__( + self, + coordinator: ComelitVedoSystem, + zone: ComelitVedoZoneObject, + config_entry_entry_id: str, + ) -> None: + """Init sensor entity.""" + self._api = coordinator.api + self._zone = zone + super().__init__(coordinator) + # Use config_entry.entry_id as base for unique_id + # because no serial number or mac is available + self._attr_unique_id = f"{config_entry_entry_id}-presence-{zone.index}" + self._attr_device_info = coordinator.platform_device_info(zone, "zone") + + @property + def is_on(self) -> bool: + """Presence detected.""" + return self.coordinator.data[ALARM_ZONES][self._zone.index].status_api == "0001" diff --git a/homeassistant/components/comelit/climate.py b/homeassistant/components/comelit/climate.py index 0b88367c0fa..6dc7c7e26d9 100644 --- a/homeassistant/components/comelit/climate.py +++ b/homeassistant/components/comelit/climate.py @@ -100,7 +100,6 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/comelit/config_flow.py b/homeassistant/components/comelit/config_flow.py index 4cd8b749031..46fc13796a0 100644 --- a/homeassistant/components/comelit/config_flow.py +++ b/homeassistant/components/comelit/config_flow.py @@ -14,7 +14,7 @@ from aiocomelit.api import ComelitCommonApi from aiocomelit.const import BRIDGE import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -68,10 +68,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Comelit.""" VERSION = 1 - _reauth_entry: ConfigEntry | None - _reauth_host: str - _reauth_port: int - _reauth_type: str async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -106,31 +102,26 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - self._reauth_host = entry_data[CONF_HOST] - self._reauth_port = entry_data.get(CONF_PORT, DEFAULT_PORT) - self._reauth_type = entry_data.get(CONF_TYPE, BRIDGE) - - self.context["title_placeholders"] = {"host": self._reauth_host} + self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" - assert self._reauth_entry errors = {} + reauth_entry = self._get_reauth_entry() + entry_data = reauth_entry.data + if user_input is not None: try: await validate_input( self.hass, { - CONF_HOST: self._reauth_host, - CONF_PORT: self._reauth_port, - CONF_TYPE: self._reauth_type, + CONF_HOST: entry_data[CONF_HOST], + CONF_PORT: entry_data.get(CONF_PORT, DEFAULT_PORT), + CONF_TYPE: entry_data.get(CONF_TYPE, BRIDGE), } | user_input, ) @@ -142,23 +133,19 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, + return self.async_update_reload_and_abort( + reauth_entry, data={ - CONF_HOST: self._reauth_host, - CONF_PORT: self._reauth_port, + CONF_HOST: entry_data[CONF_HOST], + CONF_PORT: entry_data.get(CONF_PORT, DEFAULT_PORT), CONF_PIN: user_input[CONF_PIN], - CONF_TYPE: self._reauth_type, + CONF_TYPE: entry_data.get(CONF_TYPE, BRIDGE), }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: self._reauth_entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: entry_data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/comelit/cover.py b/homeassistant/components/comelit/cover.py index 011ed81b5cb..5169217ebc5 100644 --- a/homeassistant/components/comelit/cover.py +++ b/homeassistant/components/comelit/cover.py @@ -7,7 +7,7 @@ from typing import Any from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON -from homeassistant.components.cover import STATE_CLOSED, CoverDeviceClass, CoverEntity +from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -85,7 +85,7 @@ class ComelitCoverEntity( if self._last_action: return self._last_action == STATE_COVER.index("closing") - return self._last_state == STATE_CLOSED + return self._last_state == CoverState.CLOSED @property def is_closing(self) -> bool: diff --git a/homeassistant/components/comelit/diagnostics.py b/homeassistant/components/comelit/diagnostics.py new file mode 100644 index 00000000000..afa57831eae --- /dev/null +++ b/homeassistant/components/comelit/diagnostics.py @@ -0,0 +1,93 @@ +"""Diagnostics support for Comelit integration.""" + +from __future__ import annotations + +from typing import Any + +from aiocomelit import ( + ComelitSerialBridgeObject, + ComelitVedoAreaObject, + ComelitVedoZoneObject, +) +from aiocomelit.const import BRIDGE + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PIN, CONF_TYPE +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import ComelitBaseCoordinator + +TO_REDACT = {CONF_PIN} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + coordinator: ComelitBaseCoordinator = hass.data[DOMAIN][entry.entry_id] + + dev_list: list[dict[str, Any]] = [] + dev_type_list: list[dict[int, Any]] = [] + + for dev_type in coordinator.data: + dev_type_list = [] + for sensor_data in coordinator.data[dev_type].values(): + if isinstance(sensor_data, ComelitSerialBridgeObject): + dev_type_list.append( + { + sensor_data.index: { + "name": sensor_data.name, + "status": sensor_data.status, + "human_status": sensor_data.human_status, + "protected": sensor_data.protected, + "val": sensor_data.val, + "zone": sensor_data.zone, + "power": sensor_data.power, + "power_unit": sensor_data.power_unit, + } + } + ) + if isinstance(sensor_data, ComelitVedoAreaObject): + dev_type_list.append( + { + sensor_data.index: { + "name": sensor_data.name, + "human_status": sensor_data.human_status.value, + "p1": sensor_data.p1, + "p2": sensor_data.p2, + "ready": sensor_data.ready, + "armed": sensor_data.armed, + "alarm": sensor_data.alarm, + "alarm_memory": sensor_data.alarm_memory, + "sabotage": sensor_data.sabotage, + "anomaly": sensor_data.anomaly, + "in_time": sensor_data.in_time, + "out_time": sensor_data.out_time, + } + } + ) + if isinstance(sensor_data, ComelitVedoZoneObject): + dev_type_list.append( + { + sensor_data.index: { + "name": sensor_data.name, + "human_status": sensor_data.human_status.value, + "status": sensor_data.status, + "status_api": sensor_data.status_api, + } + } + ) + dev_list.append({dev_type: dev_type_list}) + + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "type": entry.data.get(CONF_TYPE, BRIDGE), + "device_info": { + "last_update success": coordinator.last_update_success, + "last_exception": repr(coordinator.last_exception), + "devices": dev_list, + }, + } diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index b9264d16f69..d7417ad4aad 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "quality_scale": "silver", - "requirements": ["aiocomelit==0.9.0"] + "requirements": ["aiocomelit==0.9.1"] } diff --git a/homeassistant/components/comfoconnect/__init__.py b/homeassistant/components/comfoconnect/__init__.py index 8a54c863083..4e0671fd134 100644 --- a/homeassistant/components/comfoconnect/__init__.py +++ b/homeassistant/components/comfoconnect/__init__.py @@ -66,7 +66,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: _LOGGER.error("Could not connect to ComfoConnect bridge on %s", host) return False bridge = bridges[0] - _LOGGER.info("Bridge found: %s (%s)", bridge.uuid.hex(), bridge.host) + _LOGGER.debug("Bridge found: %s (%s)", bridge.uuid.hex(), bridge.host) # Setup ComfoConnect Bridge ccb = ComfoConnectBridge(hass, bridge, name, token, user_agent, pin) diff --git a/homeassistant/components/comfoconnect/fan.py b/homeassistant/components/comfoconnect/fan.py index 4e30b3ee3dc..2295fdb4e8e 100644 --- a/homeassistant/components/comfoconnect/fan.py +++ b/homeassistant/components/comfoconnect/fan.py @@ -68,7 +68,7 @@ class ComfoConnectFan(FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_preset_modes = PRESET_MODES current_speed: float | None = None diff --git a/homeassistant/components/comfoconnect/manifest.json b/homeassistant/components/comfoconnect/manifest.json index ae9a092f5d9..4157cb6c311 100644 --- a/homeassistant/components/comfoconnect/manifest.json +++ b/homeassistant/components/comfoconnect/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/comfoconnect", "iot_class": "local_push", "loggers": ["pycomfoconnect"], + "quality_scale": "legacy", "requirements": ["pycomfoconnect==0.5.1"] } diff --git a/homeassistant/components/command_line/icons.json b/homeassistant/components/command_line/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/command_line/icons.json +++ b/homeassistant/components/command_line/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/command_line/manifest.json b/homeassistant/components/command_line/manifest.json index 3e76cf4a6a6..2a54f500504 100644 --- a/homeassistant/components/command_line/manifest.json +++ b/homeassistant/components/command_line/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@gjohansson-ST"], "documentation": "https://www.home-assistant.io/integrations/command_line", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["jsonpath==0.82.2"] } diff --git a/homeassistant/components/command_line/sensor.py b/homeassistant/components/command_line/sensor.py index 7c31af165f9..e4c1370d5f7 100644 --- a/homeassistant/components/command_line/sensor.py +++ b/homeassistant/components/command_line/sensor.py @@ -187,13 +187,11 @@ class CommandSensor(ManualTriggerSensorEntity): SensorDeviceClass.TIMESTAMP, }: self._attr_native_value = value - self._process_manual_data(value) - return - - if value is not None: + elif value is not None: self._attr_native_value = async_parse_date_datetime( value, self.entity_id, self.device_class ) + self._process_manual_data(value) self.async_write_ha_state() diff --git a/homeassistant/components/compensation/manifest.json b/homeassistant/components/compensation/manifest.json index e166ca716cb..ac82938b97b 100644 --- a/homeassistant/components/compensation/manifest.json +++ b/homeassistant/components/compensation/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@Petro31"], "documentation": "https://www.home-assistant.io/integrations/compensation", "iot_class": "calculated", - "requirements": ["numpy==1.26.0"] + "quality_scale": "legacy", + "requirements": ["numpy==2.2.0"] } diff --git a/homeassistant/components/concord232/alarm_control_panel.py b/homeassistant/components/concord232/alarm_control_panel.py index 661a2beacc0..02453b56376 100644 --- a/homeassistant/components/concord232/alarm_control_panel.py +++ b/homeassistant/components/concord232/alarm_control_panel.py @@ -13,18 +13,10 @@ from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - CONF_CODE, - CONF_HOST, - CONF_MODE, - CONF_NAME, - CONF_PORT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) +from homeassistant.const import CONF_CODE, CONF_HOST, CONF_MODE, CONF_NAME, CONF_PORT from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -75,7 +67,6 @@ class Concord232Alarm(AlarmControlPanelEntity): """Representation of the Concord232-based alarm panel.""" _attr_code_format = CodeFormat.NUMBER - _attr_state: str | None _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME | AlarmControlPanelEntityFeature.ARM_AWAY @@ -107,21 +98,21 @@ class Concord232Alarm(AlarmControlPanelEntity): return if part["arming_level"] == "Off": - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif "Home" in part["arming_level"]: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME else: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY def alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - if not self._validate_code(code, STATE_ALARM_DISARMED): + if not self._validate_code(code, AlarmControlPanelState.DISARMED): return self._alarm.disarm(code) def alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - if not self._validate_code(code, STATE_ALARM_ARMED_HOME): + if not self._validate_code(code, AlarmControlPanelState.ARMED_HOME): return if self._mode == "silent": self._alarm.arm("stay", "silent") @@ -130,7 +121,7 @@ class Concord232Alarm(AlarmControlPanelEntity): def alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - if not self._validate_code(code, STATE_ALARM_ARMED_AWAY): + if not self._validate_code(code, AlarmControlPanelState.ARMED_AWAY): return self._alarm.arm("away") @@ -138,10 +129,7 @@ class Concord232Alarm(AlarmControlPanelEntity): """Validate given code.""" if self._code is None: return True - if isinstance(self._code, str): - alarm_code = self._code - else: - alarm_code = self._code.render(from_state=self._attr_state, to_state=state) + alarm_code = self._code check = not alarm_code or code == alarm_code if not check: _LOGGER.warning("Invalid code given for %s", state) diff --git a/homeassistant/components/concord232/binary_sensor.py b/homeassistant/components/concord232/binary_sensor.py index a1dcbc222f7..2b86e72e63c 100644 --- a/homeassistant/components/concord232/binary_sensor.py +++ b/homeassistant/components/concord232/binary_sensor.py @@ -80,7 +80,7 @@ def setup_platform( client.zones.sort(key=lambda zone: zone["number"]) for zone in client.zones: - _LOGGER.info("Loading Zone found: %s", zone["name"]) + _LOGGER.debug("Loading Zone found: %s", zone["name"]) if zone["number"] not in exclude: sensors.append( Concord232ZoneSensor( diff --git a/homeassistant/components/concord232/manifest.json b/homeassistant/components/concord232/manifest.json index e0aea5d64d9..ebd1d68064b 100644 --- a/homeassistant/components/concord232/manifest.json +++ b/homeassistant/components/concord232/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/concord232", "iot_class": "local_polling", "loggers": ["concord232", "stevedore"], + "quality_scale": "legacy", "requirements": ["concord232==0.15.1"] } diff --git a/homeassistant/components/config/automation.py b/homeassistant/components/config/automation.py index ccc36dc4430..f2646aa5451 100644 --- a/homeassistant/components/config/automation.py +++ b/homeassistant/components/config/automation.py @@ -5,11 +5,8 @@ from __future__ import annotations from typing import Any import uuid -from homeassistant.components.automation.config import ( - DOMAIN, - PLATFORM_SCHEMA, - async_validate_config_item, -) +from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN +from homeassistant.components.automation.config import async_validate_config_item from homeassistant.config import AUTOMATION_CONFIG_PATH from homeassistant.const import CONF_ID, SERVICE_RELOAD from homeassistant.core import HomeAssistant, callback @@ -27,13 +24,15 @@ def async_setup(hass: HomeAssistant) -> bool: """post_write_hook for Config View that reloads automations.""" if action != ACTION_DELETE: await hass.services.async_call( - DOMAIN, SERVICE_RELOAD, {CONF_ID: config_key} + AUTOMATION_DOMAIN, SERVICE_RELOAD, {CONF_ID: config_key} ) return ent_reg = er.async_get(hass) - entity_id = ent_reg.async_get_entity_id(DOMAIN, DOMAIN, config_key) + entity_id = ent_reg.async_get_entity_id( + AUTOMATION_DOMAIN, AUTOMATION_DOMAIN, config_key + ) if entity_id is None: return @@ -42,11 +41,10 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view( EditAutomationConfigView( - DOMAIN, + AUTOMATION_DOMAIN, "config", AUTOMATION_CONFIG_PATH, cv.string, - PLATFORM_SCHEMA, post_write_hook=hook, data_validator=async_validate_config_item, ) @@ -68,7 +66,16 @@ class EditAutomationConfigView(EditIdBasedConfigView): updated_value = {CONF_ID: config_key} # Iterate through some keys that we want to have ordered in the output - for key in ("alias", "description", "trigger", "condition", "action"): + for key in ( + "alias", + "description", + "triggers", + "trigger", + "conditions", + "condition", + "actions", + "action", + ): if key in new_value: updated_value[key] = new_value[key] diff --git a/homeassistant/components/config/category_registry.py b/homeassistant/components/config/category_registry.py index ade35fddadc..27268928823 100644 --- a/homeassistant/components/config/category_registry.py +++ b/homeassistant/components/config/category_registry.py @@ -5,7 +5,7 @@ from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import category_registry as cr, config_validation as cv diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index b16701f8bd0..5794819995d 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -46,6 +46,13 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options)) hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options)) + hass.http.register_view( + SubentryManagerFlowIndexView(hass.config_entries.subentries) + ) + hass.http.register_view( + SubentryManagerFlowResourceView(hass.config_entries.subentries) + ) + websocket_api.async_register_command(hass, config_entries_get) websocket_api.async_register_command(hass, config_entry_disable) websocket_api.async_register_command(hass, config_entry_get_single) @@ -54,6 +61,9 @@ def async_setup(hass: HomeAssistant) -> bool: websocket_api.async_register_command(hass, config_entries_progress) websocket_api.async_register_command(hass, ignore_config_flow) + websocket_api.async_register_command(hass, config_subentry_delete) + websocket_api.async_register_command(hass, config_subentry_list) + return True @@ -285,6 +295,63 @@ class OptionManagerFlowResourceView( return await super().post(request, flow_id) +class SubentryManagerFlowIndexView( + FlowManagerIndexView[config_entries.ConfigSubentryFlowManager] +): + """View to create subentry flows.""" + + url = "/api/config/config_entries/subentries/flow" + name = "api:config:config_entries:subentries:flow" + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + @RequestDataValidator( + vol.Schema( + { + vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)), + vol.Optional("show_advanced_options", default=False): cv.boolean, + }, + extra=vol.ALLOW_EXTRA, + ) + ) + async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + """Handle a POST request. + + handler in request is [entry_id, subentry_type]. + """ + return await super()._post_impl(request, data) + + def get_context(self, data: dict[str, Any]) -> dict[str, Any]: + """Return context.""" + context = super().get_context(data) + context["source"] = config_entries.SOURCE_USER + return context + + +class SubentryManagerFlowResourceView( + FlowManagerResourceView[config_entries.ConfigSubentryFlowManager] +): + """View to interact with the subentry flow manager.""" + + url = "/api/config/config_entries/subentries/flow/{flow_id}" + name = "api:config:config_entries:subentries:flow:resource" + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + async def get(self, request: web.Request, /, flow_id: str) -> web.Response: + """Get the current state of a data_entry_flow.""" + return await super().get(request, flow_id) + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + async def post(self, request: web.Request, flow_id: str) -> web.Response: + """Handle a POST request.""" + return await super().post(request, flow_id) + + @websocket_api.require_admin @websocket_api.websocket_command({"type": "config_entries/flow/progress"}) def config_entries_progress( @@ -463,9 +530,12 @@ async def ignore_config_flow( ) return + context = config_entries.ConfigFlowContext(source=config_entries.SOURCE_IGNORE) + if "discovery_key" in flow["context"]: + context["discovery_key"] = flow["context"]["discovery_key"] await hass.config_entries.flow.async_init( flow["handler"], - context={"source": config_entries.SOURCE_IGNORE}, + context=context, data={"unique_id": flow["context"]["unique_id"], "title": msg["title"]}, ) connection.send_result(msg["id"]) @@ -585,3 +655,62 @@ async def _async_matching_config_entries_json_fragments( ) or (filter_is_not_helper and entry.domain not in integrations) ] + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + "type": "config_entries/subentries/list", + "entry_id": str, + } +) +@websocket_api.async_response +async def config_subentry_list( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """List subentries of a config entry.""" + entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) + if entry is None: + return + + result = [ + { + "subentry_id": subentry.subentry_id, + "title": subentry.title, + "unique_id": subentry.unique_id, + } + for subentry_id, subentry in entry.subentries.items() + ] + connection.send_result(msg["id"], result) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + "type": "config_entries/subentries/delete", + "entry_id": str, + "subentry_id": str, + } +) +@websocket_api.async_response +async def config_subentry_delete( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Delete a subentry of a config entry.""" + entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) + if entry is None: + return + + try: + hass.config_entries.async_remove_subentry(entry, msg["subentry_id"]) + except config_entries.UnknownSubEntry: + connection.send_error( + msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found" + ) + return + + connection.send_result(msg["id"]) diff --git a/homeassistant/components/config/device_registry.py b/homeassistant/components/config/device_registry.py index 8bc9133b0df..8b114041672 100644 --- a/homeassistant/components/config/device_registry.py +++ b/homeassistant/components/config/device_registry.py @@ -8,7 +8,7 @@ import voluptuous as vol from homeassistant import loader from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.decorators import require_admin +from homeassistant.components.websocket_api import require_admin from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr diff --git a/homeassistant/components/config/entity_registry.py b/homeassistant/components/config/entity_registry.py index bf7a9087d56..aed04943975 100644 --- a/homeassistant/components/config/entity_registry.py +++ b/homeassistant/components/config/entity_registry.py @@ -8,8 +8,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import websocket_api -from homeassistant.components.websocket_api import ERR_NOT_FOUND -from homeassistant.components.websocket_api.decorators import require_admin +from homeassistant.components.websocket_api import ERR_NOT_FOUND, require_admin from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( config_validation as cv, diff --git a/homeassistant/components/config/floor_registry.py b/homeassistant/components/config/floor_registry.py index f3c9793d25e..afa74e7f9b8 100644 --- a/homeassistant/components/config/floor_registry.py +++ b/homeassistant/components/config/floor_registry.py @@ -5,7 +5,7 @@ from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import floor_registry as fr from homeassistant.helpers.floor_registry import FloorEntry diff --git a/homeassistant/components/config/label_registry.py b/homeassistant/components/config/label_registry.py index d02b9849d46..f60a3fca245 100644 --- a/homeassistant/components/config/label_registry.py +++ b/homeassistant/components/config/label_registry.py @@ -5,7 +5,7 @@ from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, label_registry as lr from homeassistant.helpers.label_registry import LabelEntry diff --git a/homeassistant/components/config/scene.py b/homeassistant/components/config/scene.py index d44c2bb87b4..2f0fc180c0b 100644 --- a/homeassistant/components/config/scene.py +++ b/homeassistant/components/config/scene.py @@ -6,7 +6,7 @@ from typing import Any import uuid from homeassistant.components.scene import ( - DOMAIN, + DOMAIN as SCENE_DOMAIN, PLATFORM_SCHEMA as SCENE_PLATFORM_SCHEMA, ) from homeassistant.config import SCENE_CONFIG_PATH @@ -27,13 +27,13 @@ def async_setup(hass: HomeAssistant) -> bool: async def hook(action: str, config_key: str) -> None: """post_write_hook for Config View that reloads scenes.""" if action != ACTION_DELETE: - await hass.services.async_call(DOMAIN, SERVICE_RELOAD) + await hass.services.async_call(SCENE_DOMAIN, SERVICE_RELOAD) return ent_reg = er.async_get(hass) entity_id = ent_reg.async_get_entity_id( - DOMAIN, HOMEASSISTANT_DOMAIN, config_key + SCENE_DOMAIN, HOMEASSISTANT_DOMAIN, config_key ) if entity_id is None: @@ -43,11 +43,11 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view( EditSceneConfigView( - DOMAIN, + SCENE_DOMAIN, "config", SCENE_CONFIG_PATH, cv.string, - PLATFORM_SCHEMA, + data_schema=PLATFORM_SCHEMA, post_write_hook=hook, ) ) diff --git a/homeassistant/components/config/script.py b/homeassistant/components/config/script.py index c39aad4fcdb..aa83329d124 100644 --- a/homeassistant/components/config/script.py +++ b/homeassistant/components/config/script.py @@ -4,11 +4,8 @@ from __future__ import annotations from typing import Any -from homeassistant.components.script import DOMAIN -from homeassistant.components.script.config import ( - SCRIPT_ENTITY_SCHEMA, - async_validate_config_item, -) +from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN +from homeassistant.components.script.config import async_validate_config_item from homeassistant.config import SCRIPT_CONFIG_PATH from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant, callback @@ -25,12 +22,14 @@ def async_setup(hass: HomeAssistant) -> bool: async def hook(action: str, config_key: str) -> None: """post_write_hook for Config View that reloads scripts.""" if action != ACTION_DELETE: - await hass.services.async_call(DOMAIN, SERVICE_RELOAD) + await hass.services.async_call(SCRIPT_DOMAIN, SERVICE_RELOAD) return ent_reg = er.async_get(hass) - entity_id = ent_reg.async_get_entity_id(DOMAIN, DOMAIN, config_key) + entity_id = ent_reg.async_get_entity_id( + SCRIPT_DOMAIN, SCRIPT_DOMAIN, config_key + ) if entity_id is None: return @@ -39,11 +38,10 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view( EditScriptConfigView( - DOMAIN, + SCRIPT_DOMAIN, "config", SCRIPT_CONFIG_PATH, cv.slug, - SCRIPT_ENTITY_SCHEMA, post_write_hook=hook, data_validator=async_validate_config_item, ) diff --git a/homeassistant/components/config/view.py b/homeassistant/components/config/view.py index 980c0f82dd1..14d89356c92 100644 --- a/homeassistant/components/config/view.py +++ b/homeassistant/components/config/view.py @@ -33,9 +33,9 @@ class BaseEditConfigView[_DataT: (dict[str, dict[str, Any]], list[dict[str, Any] config_type: str, path: str, key_schema: Callable[[Any], str], - data_schema: Callable[[dict[str, Any]], Any], *, post_write_hook: Callable[[str, str], Coroutine[Any, Any, None]] | None = None, + data_schema: Callable[[dict[str, Any]], Any] | None = None, data_validator: Callable[ [HomeAssistant, str, dict[str, Any]], Coroutine[Any, Any, dict[str, Any] | None], @@ -51,6 +51,12 @@ class BaseEditConfigView[_DataT: (dict[str, dict[str, Any]], list[dict[str, Any] self.post_write_hook = post_write_hook self.data_validator = data_validator self.mutation_lock = asyncio.Lock() + if (self.data_schema is None and self.data_validator is None) or ( + self.data_schema is not None and self.data_validator is not None + ): + raise ValueError( + "Must specify exactly one of data_schema or data_validator" + ) def _empty_config(self) -> _DataT: """Empty config if file not found.""" @@ -112,7 +118,8 @@ class BaseEditConfigView[_DataT: (dict[str, dict[str, Any]], list[dict[str, Any] if self.data_validator: await self.data_validator(hass, config_key, data) else: - self.data_schema(data) + # We either have a data_schema or a data_validator, ignore mypy + self.data_schema(data) # type: ignore[misc] except (vol.Invalid, HomeAssistantError) as err: return self.json_message( f"Message malformed: {err}", HTTPStatus.BAD_REQUEST diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index a3d0cebd1fc..8d0eb72a73b 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations import json import logging -from typing import Any from aiohttp import client_exceptions from pyControl4.account import C4Account @@ -23,11 +22,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client, device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) from .const import ( API_RETRY_TIMES, @@ -166,41 +160,3 @@ async def get_items_of_category(hass: HomeAssistant, entry: ConfigEntry, categor for item in director_all_items if "categories" in item and category in item["categories"] ] - - -class Control4Entity(CoordinatorEntity[Any]): - """Base entity for Control4.""" - - def __init__( - self, - entry_data: dict, - coordinator: DataUpdateCoordinator[Any], - name: str | None, - idx: int, - device_name: str | None, - device_manufacturer: str | None, - device_model: str | None, - device_id: int, - ) -> None: - """Initialize a Control4 entity.""" - super().__init__(coordinator) - self.entry_data = entry_data - self._attr_name = name - self._attr_unique_id = str(idx) - self._idx = idx - self._controller_unique_id = entry_data[CONF_CONTROLLER_UNIQUE_ID] - self._device_name = device_name - self._device_manufacturer = device_manufacturer - self._device_model = device_model - self._device_id = device_id - - @property - def device_info(self) -> DeviceInfo: - """Return info of parent Control4 device of entity.""" - return DeviceInfo( - identifiers={(DOMAIN, str(self._device_id))}, - manufacturer=self._device_manufacturer, - model=self._device_model, - name=self._device_name, - via_device=(DOMAIN, self._controller_unique_id), - ) diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index aa7839b4383..19fae1ef7ca 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp.client_exceptions import ClientError from pyControl4.account import C4Account @@ -23,7 +23,7 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_USERNAME, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.helpers.device_registry import format_mac @@ -49,7 +49,9 @@ DATA_SCHEMA = vol.Schema( class Control4Validator: """Validates that config details can be used to authenticate and communicate with Control4.""" - def __init__(self, host, username, password, hass): + def __init__( + self, host: str, username: str, password: str, hass: HomeAssistant + ) -> None: """Initialize.""" self.host = host self.username = username @@ -126,6 +128,8 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: controller_unique_id = hub.controller_unique_id + if TYPE_CHECKING: + assert hub.controller_unique_id mac = (controller_unique_id.split("_", 3))[2] formatted_mac = format_mac(mac) await self.async_set_unique_id(formatted_mac) @@ -150,17 +154,15 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Control4.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/control4/director_utils.py b/homeassistant/components/control4/director_utils.py index 10e9486ee89..5e57237337c 100644 --- a/homeassistant/components/control4/director_utils.py +++ b/homeassistant/components/control4/director_utils.py @@ -37,7 +37,7 @@ async def update_variables_for_config_entry( try: return await _update_variables_for_config_entry(hass, entry, variable_names) except BadToken: - _LOGGER.info("Updating Control4 director token") + _LOGGER.debug("Updating Control4 director token") await refresh_tokens(hass, entry) return await _update_variables_for_config_entry(hass, entry, variable_names) diff --git a/homeassistant/components/control4/entity.py b/homeassistant/components/control4/entity.py new file mode 100644 index 00000000000..fdb22e6578d --- /dev/null +++ b/homeassistant/components/control4/entity.py @@ -0,0 +1,51 @@ +"""The Control4 integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import CONF_CONTROLLER_UNIQUE_ID, DOMAIN + + +class Control4Entity(CoordinatorEntity[Any]): + """Base entity for Control4.""" + + def __init__( + self, + entry_data: dict, + coordinator: DataUpdateCoordinator[Any], + name: str | None, + idx: int, + device_name: str | None, + device_manufacturer: str | None, + device_model: str | None, + device_id: int, + ) -> None: + """Initialize a Control4 entity.""" + super().__init__(coordinator) + self.entry_data = entry_data + self._attr_name = name + self._attr_unique_id = str(idx) + self._idx = idx + self._controller_unique_id = entry_data[CONF_CONTROLLER_UNIQUE_ID] + self._device_name = device_name + self._device_manufacturer = device_manufacturer + self._device_model = device_model + self._device_id = device_id + + @property + def device_info(self) -> DeviceInfo: + """Return info of parent Control4 device of entity.""" + return DeviceInfo( + identifiers={(DOMAIN, str(self._device_id))}, + manufacturer=self._device_manufacturer, + model=self._device_model, + name=self._device_name, + via_device=(DOMAIN, self._controller_unique_id), + ) diff --git a/homeassistant/components/control4/light.py b/homeassistant/components/control4/light.py index d7cfd44dc43..927f4643619 100644 --- a/homeassistant/components/control4/light.py +++ b/homeassistant/components/control4/light.py @@ -23,9 +23,10 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from . import Control4Entity, get_items_of_category +from . import get_items_of_category from .const import CONF_DIRECTOR, CONTROL4_ENTITY_TYPE, DOMAIN from .director_utils import update_variables_for_config_entry +from .entity import Control4Entity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/control4/manifest.json b/homeassistant/components/control4/manifest.json index 765f0dce78c..3088ebf8672 100644 --- a/homeassistant/components/control4/manifest.json +++ b/homeassistant/components/control4/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/control4", "iot_class": "local_polling", "loggers": ["pyControl4"], - "requirements": ["pyControl4==1.1.0"], + "requirements": ["pyControl4==1.2.0"], "ssdp": [ { "st": "c4:director" diff --git a/homeassistant/components/control4/media_player.py b/homeassistant/components/control4/media_player.py index 72aa44faaed..9e3421817a3 100644 --- a/homeassistant/components/control4/media_player.py +++ b/homeassistant/components/control4/media_player.py @@ -24,9 +24,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from . import Control4Entity from .const import CONF_DIRECTOR, CONF_DIRECTOR_ALL_ITEMS, CONF_UI_CONFIGURATION, DOMAIN from .director_utils import update_variables_for_config_entry +from .entity import Control4Entity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/conversation/__init__.py b/homeassistant/components/conversation/__init__.py index a7b163d69bd..898b7b2cf4f 100644 --- a/homeassistant/components/conversation/__init__.py +++ b/homeassistant/components/conversation/__init__.py @@ -35,6 +35,8 @@ from .const import ( ATTR_CONVERSATION_ID, ATTR_LANGUAGE, ATTR_TEXT, + DATA_COMPONENT, + DATA_DEFAULT_ENTITY, DOMAIN, HOME_ASSISTANT_AGENT, OLD_HOME_ASSISTANT_AGENT, @@ -42,24 +44,27 @@ from .const import ( SERVICE_RELOAD, ConversationEntityFeature, ) -from .default_agent import async_get_default_agent, async_setup_default_agent +from .default_agent import DefaultAgent, async_setup_default_agent from .entity import ConversationEntity from .http import async_setup as async_setup_conversation_http from .models import AbstractConversationAgent, ConversationInput, ConversationResult +from .trace import ConversationTraceEventType, async_conversation_trace_append __all__ = [ "DOMAIN", "HOME_ASSISTANT_AGENT", "OLD_HOME_ASSISTANT_AGENT", + "ConversationEntity", + "ConversationEntityFeature", + "ConversationInput", + "ConversationResult", + "ConversationTraceEventType", + "async_conversation_trace_append", "async_converse", "async_get_agent_info", "async_set_agent", "async_setup", "async_unset_agent", - "ConversationEntity", - "ConversationInput", - "ConversationResult", - "ConversationEntityFeature", ] _LOGGER = logging.getLogger(__name__) @@ -129,7 +134,6 @@ def async_get_conversation_languages( all conversation agents. """ agent_manager = get_agent_manager(hass) - entity_component: EntityComponent[ConversationEntity] = hass.data[DOMAIN] agents: list[ConversationEntity | AbstractConversationAgent] if agent_id: @@ -145,7 +149,7 @@ def async_get_conversation_languages( agents = [agent] else: - agents = list(entity_component.entities) + agents = list(hass.data[DATA_COMPONENT].entities) for info in agent_manager.async_get_agent_info(): agent = agent_manager.async_get_agent(info.id) assert agent is not None @@ -203,12 +207,36 @@ async def async_prepare_agent( await agent.async_prepare(language) +async def async_handle_sentence_triggers( + hass: HomeAssistant, user_input: ConversationInput +) -> str | None: + """Try to match input against sentence triggers and return response text. + + Returns None if no match occurred. + """ + default_agent = async_get_agent(hass) + assert isinstance(default_agent, DefaultAgent) + + return await default_agent.async_handle_sentence_triggers(user_input) + + +async def async_handle_intents( + hass: HomeAssistant, user_input: ConversationInput +) -> intent.IntentResponse | None: + """Try to match input against registered intents and return response. + + Returns None if no match occurred. + """ + default_agent = async_get_agent(hass) + assert isinstance(default_agent, DefaultAgent) + + return await default_agent.async_handle_intents(user_input) + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Register the process service.""" - entity_component: EntityComponent[ConversationEntity] = EntityComponent( - _LOGGER, DOMAIN, hass - ) - hass.data[DOMAIN] = entity_component + entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass) + hass.data[DATA_COMPONENT] = entity_component await async_setup_default_agent( hass, entity_component, config.get(DOMAIN, {}).get("intents", {}) @@ -246,8 +274,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def handle_reload(service: ServiceCall) -> None: """Reload intents.""" - agent = async_get_default_agent(hass) - await agent.async_reload(language=service.data.get(ATTR_LANGUAGE)) + await hass.data[DATA_DEFAULT_ENTITY].async_reload( + language=service.data.get(ATTR_LANGUAGE) + ) hass.services.async_register( DOMAIN, @@ -266,11 +295,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[ConversationEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[ConversationEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) diff --git a/homeassistant/components/conversation/agent_manager.py b/homeassistant/components/conversation/agent_manager.py index 8202b9a0ed4..7516d9d22ef 100644 --- a/homeassistant/components/conversation/agent_manager.py +++ b/homeassistant/components/conversation/agent_manager.py @@ -10,10 +10,13 @@ import voluptuous as vol from homeassistant.core import Context, HomeAssistant, async_get_hass, callback from homeassistant.helpers import config_validation as cv, singleton -from homeassistant.helpers.entity_component import EntityComponent -from .const import DOMAIN, HOME_ASSISTANT_AGENT, OLD_HOME_ASSISTANT_AGENT -from .default_agent import async_get_default_agent +from .const import ( + DATA_COMPONENT, + DATA_DEFAULT_ENTITY, + HOME_ASSISTANT_AGENT, + OLD_HOME_ASSISTANT_AGENT, +) from .entity import ConversationEntity from .models import ( AbstractConversationAgent, @@ -51,11 +54,10 @@ def async_get_agent( ) -> AbstractConversationAgent | ConversationEntity | None: """Get specified agent.""" if agent_id is None or agent_id in (HOME_ASSISTANT_AGENT, OLD_HOME_ASSISTANT_AGENT): - return async_get_default_agent(hass) + return hass.data[DATA_DEFAULT_ENTITY] if "." in agent_id: - entity_component: EntityComponent[ConversationEntity] = hass.data[DOMAIN] - return entity_component.get_entity(agent_id) + return hass.data[DATA_COMPONENT].get_entity(agent_id) manager = get_agent_manager(hass) diff --git a/homeassistant/components/conversation/const.py b/homeassistant/components/conversation/const.py index 14b2d1d4955..619a41fd002 100644 --- a/homeassistant/components/conversation/const.py +++ b/homeassistant/components/conversation/const.py @@ -1,6 +1,17 @@ """Const for conversation integration.""" +from __future__ import annotations + from enum import IntFlag +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from .default_agent import DefaultAgent + from .entity import ConversationEntity DOMAIN = "conversation" DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"} @@ -15,6 +26,9 @@ ATTR_CONVERSATION_ID = "conversation_id" SERVICE_PROCESS = "process" SERVICE_RELOAD = "reload" +DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN) +DATA_DEFAULT_ENTITY: HassKey[DefaultAgent] = HassKey(f"{DOMAIN}_default_entity") + class ConversationEntityFeature(IntFlag): """Supported features of the conversation entity.""" diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 05b4d194d33..66ffb25fa1a 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -3,8 +3,10 @@ from __future__ import annotations import asyncio +from collections import OrderedDict from collections.abc import Awaitable, Callable, Iterable from dataclasses import dataclass +from enum import Enum, auto import functools import logging from pathlib import Path @@ -12,14 +14,22 @@ import re import time from typing import IO, Any, cast -from hassil.expression import Expression, ListReference, Sequence -from hassil.intents import Intents, SlotList, TextSlotList, WildcardSlotList +from hassil.expression import Expression, ListReference, Sequence, TextChunk +from hassil.intents import ( + Intents, + SlotList, + TextSlotList, + TextSlotValue, + WildcardSlotList, +) from hassil.recognize import ( MISSING_ENTITY, RecognizeResult, - UnmatchedTextEntity, recognize_all, + recognize_best, ) +from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity +from hassil.trie import Trie from hassil.util import merge_dict from home_assistant_intents import ErrorKey, get_intents, get_languages import yaml @@ -44,7 +54,12 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_state_added_domain from homeassistant.util.json import JsonObjectType, json_loads_object -from .const import DEFAULT_EXPOSED_ATTRIBUTES, DOMAIN, ConversationEntityFeature +from .const import ( + DATA_DEFAULT_ENTITY, + DEFAULT_EXPOSED_ATTRIBUTES, + DOMAIN, + ConversationEntityFeature, +) from .entity import ConversationEntity from .models import ConversationInput, ConversationResult from .trace import ConversationTraceEventType, async_conversation_trace_append @@ -55,21 +70,14 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"] REGEX_TYPE = type(re.compile("")) TRIGGER_CALLBACK_TYPE = Callable[ - [str, RecognizeResult, str | None], Awaitable[str | None] + [ConversationInput, RecognizeResult], Awaitable[str | None] ] METADATA_CUSTOM_SENTENCE = "hass_custom_sentence" METADATA_CUSTOM_FILE = "hass_custom_file" -DATA_DEFAULT_ENTITY = "conversation_default_entity" ERROR_SENTINEL = object() -@core.callback -def async_get_default_agent(hass: core.HomeAssistant) -> DefaultAgent: - """Get the default agent.""" - return hass.data[DATA_DEFAULT_ENTITY] - - def json_load(fp: IO[str]) -> JsonObjectType: """Wrap json_loads for get_intents.""" return json_loads_object(fp.read()) @@ -103,6 +111,77 @@ class SentenceTriggerResult: matched_triggers: dict[int, RecognizeResult] +class IntentMatchingStage(Enum): + """Stages of intent matching.""" + + EXPOSED_ENTITIES_ONLY = auto() + """Match against exposed entities only.""" + + UNEXPOSED_ENTITIES = auto() + """Match against unexposed entities in Home Assistant.""" + + FUZZY = auto() + """Capture names that are not known to Home Assistant.""" + + +@dataclass(frozen=True) +class IntentCacheKey: + """Key for IntentCache.""" + + text: str + """User input text.""" + + language: str + """Language of text.""" + + device_id: str | None + """Device id from user input.""" + + +@dataclass(frozen=True) +class IntentCacheValue: + """Value for IntentCache.""" + + result: RecognizeResult | None + """Result of intent recognition.""" + + stage: IntentMatchingStage + """Stage where result was found.""" + + +class IntentCache: + """LRU cache for intent recognition results.""" + + def __init__(self, capacity: int) -> None: + """Initialize cache.""" + self.cache: OrderedDict[IntentCacheKey, IntentCacheValue] = OrderedDict() + self.capacity = capacity + + def get(self, key: IntentCacheKey) -> IntentCacheValue | None: + """Get value for cache or None.""" + if key not in self.cache: + return None + + # Move the key to the end to show it was recently used + self.cache.move_to_end(key) + return self.cache[key] + + def put(self, key: IntentCacheKey, value: IntentCacheValue) -> None: + """Put a value in the cache, evicting the least recently used item if necessary.""" + if key in self.cache: + # Update value and mark as recently used + self.cache.move_to_end(key) + elif len(self.cache) >= self.capacity: + # Evict the oldest item + self.cache.popitem(last=False) + + self.cache[key] = value + + def clear(self) -> None: + """Clear the cache.""" + self.cache.clear() + + def _get_language_variations(language: str) -> Iterable[str]: """Generate language codes with and without region.""" yield language @@ -162,12 +241,19 @@ class DefaultAgent(ConversationEntity): self._config_intents: dict[str, Any] = config_intents self._slot_lists: dict[str, SlotList] | None = None + # Used to filter slot lists before intent matching + self._exposed_names_trie: Trie | None = None + self._unexposed_names_trie: Trie | None = None + # Sentences that will trigger a callback (skipping intent recognition) - self._trigger_sentences: list[TriggerData] = [] + self.trigger_sentences: list[TriggerData] = [] self._trigger_intents: Intents | None = None self._unsub_clear_slot_list: list[Callable[[], None]] | None = None self._load_intents_lock = asyncio.Lock() + # LRU cache to avoid unnecessary intent matching + self._intent_cache = IntentCache(capacity=128) + @property def supported_languages(self) -> list[str]: """Return a list of supported languages.""" @@ -214,13 +300,10 @@ class DefaultAgent(ConversationEntity): async_listen_entity_updates(self.hass, DOMAIN, self._async_clear_slot_list), ] - async def async_recognize( - self, user_input: ConversationInput - ) -> RecognizeResult | SentenceTriggerResult | None: + async def async_recognize_intent( + self, user_input: ConversationInput, strict_intents_only: bool = False + ) -> RecognizeResult | None: """Recognize intent from user input.""" - if trigger_result := await self._match_triggers(user_input.text): - return trigger_result - language = user_input.language or self.hass.config.language lang_intents = await self.async_get_or_load_intents(language) @@ -232,6 +315,16 @@ class DefaultAgent(ConversationEntity): slot_lists = self._make_slot_lists() intent_context = self._make_intent_context(user_input) + if self._exposed_names_trie is not None: + # Filter by input string + text_lower = user_input.text.strip().lower() + slot_lists["name"] = TextSlotList( + name="name", + values=[ + result[2] for result in self._exposed_names_trie.find(text_lower) + ], + ) + start = time.monotonic() result = await self.hass.async_add_executor_job( @@ -241,6 +334,7 @@ class DefaultAgent(ConversationEntity): slot_lists, intent_context, language, + strict_intents_only, ) _LOGGER.debug( @@ -252,56 +346,36 @@ class DefaultAgent(ConversationEntity): async def async_process(self, user_input: ConversationInput) -> ConversationResult: """Process a sentence.""" - language = user_input.language or self.hass.config.language - conversation_id = None # Not supported - - result = await self.async_recognize(user_input) # Check if a trigger matched - if isinstance(result, SentenceTriggerResult): - # Gather callback responses in parallel - trigger_callbacks = [ - self._trigger_sentences[trigger_id].callback( - result.sentence, trigger_result, user_input.device_id - ) - for trigger_id, trigger_result in result.matched_triggers.items() - ] - - # Use first non-empty result as response. - # - # There may be multiple copies of a trigger running when editing in - # the UI, so it's critical that we filter out empty responses here. - response_text: str | None = None - response_set_by_trigger = False - for trigger_future in asyncio.as_completed(trigger_callbacks): - trigger_response = await trigger_future - if trigger_response is None: - continue - - response_text = trigger_response - response_set_by_trigger = True - break + if trigger_result := await self.async_recognize_sentence_trigger(user_input): + # Process callbacks and get response + response_text = await self._handle_trigger_result( + trigger_result, user_input + ) # Convert to conversation result - response = intent.IntentResponse(language=language) + response = intent.IntentResponse( + language=user_input.language or self.hass.config.language + ) response.response_type = intent.IntentResponseType.ACTION_DONE - - if response_set_by_trigger: - # Response was explicitly set to empty - response_text = response_text or "" - elif not response_text: - # Use translated acknowledgment for pipeline language - translations = await translation.async_get_translations( - self.hass, language, DOMAIN, [DOMAIN] - ) - response_text = translations.get( - f"component.{DOMAIN}.agent.done", "Done" - ) - response.async_set_speech(response_text) return ConversationResult(response=response) + # Match intents + intent_result = await self.async_recognize_intent(user_input) + return await self._async_process_intent_result(intent_result, user_input) + + async def _async_process_intent_result( + self, + result: RecognizeResult | None, + user_input: ConversationInput, + ) -> ConversationResult: + """Process user input with intents.""" + language = user_input.language or self.hass.config.language + conversation_id = None # Not supported + # Intent match or failure lang_intents = await self.async_get_or_load_intents(language) @@ -437,113 +511,274 @@ class DefaultAgent(ConversationEntity): slot_lists: dict[str, SlotList], intent_context: dict[str, Any] | None, language: str, + strict_intents_only: bool, ) -> RecognizeResult | None: """Search intents for a match to user input.""" - custom_result: RecognizeResult | None = None - name_result: RecognizeResult | None = None - best_results: list[RecognizeResult] = [] - best_text_chunks_matched: int | None = None - for result in recognize_all( + skip_exposed_match = False + + # Try cache first + cache_key = IntentCacheKey( + text=user_input.text, language=language, device_id=user_input.device_id + ) + cache_value = self._intent_cache.get(cache_key) + if cache_value is not None: + if (cache_value.result is not None) and ( + cache_value.stage == IntentMatchingStage.EXPOSED_ENTITIES_ONLY + ): + _LOGGER.debug("Got cached result for exposed entities") + return cache_value.result + + # Continue with matching, but we know we won't succeed for exposed + # entities only. + skip_exposed_match = True + + if not skip_exposed_match: + start_time = time.monotonic() + strict_result = self._recognize_strict( + user_input, lang_intents, slot_lists, intent_context, language + ) + _LOGGER.debug( + "Checked exposed entities in %s second(s)", + time.monotonic() - start_time, + ) + + # Update cache + self._intent_cache.put( + cache_key, + IntentCacheValue( + result=strict_result, + stage=IntentMatchingStage.EXPOSED_ENTITIES_ONLY, + ), + ) + + if strict_result is not None: + # Successful strict match with exposed entities + return strict_result + + if strict_intents_only: + # Don't try matching against all entities or doing a fuzzy match + return None + + # Try again with all entities (including unexposed) + skip_unexposed_entities_match = False + if cache_value is not None: + if (cache_value.result is not None) and ( + cache_value.stage == IntentMatchingStage.UNEXPOSED_ENTITIES + ): + _LOGGER.debug("Got cached result for all entities") + return cache_value.result + + # Continue with matching, but we know we won't succeed for all + # entities. + skip_unexposed_entities_match = True + + if not skip_unexposed_entities_match: + unexposed_entities_slot_lists = { + **slot_lists, + "name": self._get_unexposed_entity_names(user_input.text), + } + + start_time = time.monotonic() + strict_result = self._recognize_strict( + user_input, + lang_intents, + unexposed_entities_slot_lists, + intent_context, + language, + ) + + _LOGGER.debug( + "Checked all entities in %s second(s)", time.monotonic() - start_time + ) + + # Update cache + self._intent_cache.put( + cache_key, + IntentCacheValue( + result=strict_result, stage=IntentMatchingStage.UNEXPOSED_ENTITIES + ), + ) + + if strict_result is not None: + # Not a successful match, but useful for an error message. + # This should fail the intent handling phase (async_match_targets). + return strict_result + + # Try again with missing entities enabled + skip_fuzzy_match = False + if cache_value is not None: + if (cache_value.result is not None) and ( + cache_value.stage == IntentMatchingStage.FUZZY + ): + _LOGGER.debug("Got cached result for fuzzy match") + return cache_value.result + + # We know we won't succeed for fuzzy matching. + skip_fuzzy_match = True + + maybe_result: RecognizeResult | None = None + if not skip_fuzzy_match: + start_time = time.monotonic() + best_num_matched_entities = 0 + best_num_unmatched_entities = 0 + best_num_unmatched_ranges = 0 + for result in recognize_all( + user_input.text, + lang_intents.intents, + slot_lists=slot_lists, + intent_context=intent_context, + allow_unmatched_entities=True, + ): + if result.text_chunks_matched < 1: + # Skip results that don't match any literal text + continue + + # Don't count missing entities that couldn't be filled from context + num_matched_entities = 0 + for matched_entity in result.entities_list: + if matched_entity.name not in result.unmatched_entities: + num_matched_entities += 1 + + num_unmatched_entities = 0 + num_unmatched_ranges = 0 + for unmatched_entity in result.unmatched_entities_list: + if isinstance(unmatched_entity, UnmatchedTextEntity): + if unmatched_entity.text != MISSING_ENTITY: + num_unmatched_entities += 1 + elif isinstance(unmatched_entity, UnmatchedRangeEntity): + num_unmatched_ranges += 1 + num_unmatched_entities += 1 + else: + num_unmatched_entities += 1 + + if ( + (maybe_result is None) # first result + or (num_matched_entities > best_num_matched_entities) + or ( + # Fewer unmatched entities + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities < best_num_unmatched_entities) + ) + or ( + # Prefer unmatched ranges + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities == best_num_unmatched_entities) + and (num_unmatched_ranges > best_num_unmatched_ranges) + ) + or ( + # More literal text matched + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities == best_num_unmatched_entities) + and (num_unmatched_ranges == best_num_unmatched_ranges) + and ( + result.text_chunks_matched + > maybe_result.text_chunks_matched + ) + ) + or ( + # Prefer match failures with entities + (result.text_chunks_matched == maybe_result.text_chunks_matched) + and (num_unmatched_entities == best_num_unmatched_entities) + and (num_unmatched_ranges == best_num_unmatched_ranges) + and ( + ("name" in result.entities) + or ("name" in result.unmatched_entities) + ) + ) + ): + maybe_result = result + best_num_matched_entities = num_matched_entities + best_num_unmatched_entities = num_unmatched_entities + best_num_unmatched_ranges = num_unmatched_ranges + + # Update cache + self._intent_cache.put( + cache_key, + IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY), + ) + + _LOGGER.debug( + "Did fuzzy match in %s second(s)", time.monotonic() - start_time + ) + + return maybe_result + + def _get_unexposed_entity_names(self, text: str) -> TextSlotList: + """Get filtered slot list with unexposed entity names in Home Assistant.""" + if self._unexposed_names_trie is None: + # Build trie + self._unexposed_names_trie = Trie() + for name_tuple in self._get_entity_name_tuples(exposed=False): + self._unexposed_names_trie.insert( + name_tuple[0].lower(), + TextSlotValue.from_tuple(name_tuple, allow_template=False), + ) + + # Build filtered slot list + text_lower = text.strip().lower() + return TextSlotList( + name="name", + values=[ + result[2] for result in self._unexposed_names_trie.find(text_lower) + ], + ) + + def _get_entity_name_tuples( + self, exposed: bool + ) -> Iterable[tuple[str, str, dict[str, Any]]]: + """Yield (input name, output name, context) tuples for entities.""" + entity_registry = er.async_get(self.hass) + + for state in self.hass.states.async_all(): + entity_exposed = async_should_expose(self.hass, DOMAIN, state.entity_id) + if exposed and (not entity_exposed): + # Required exposed, entity is not + continue + + if (not exposed) and entity_exposed: + # Required not exposed, entity is + continue + + # Checked against "requires_context" and "excludes_context" in hassil + context = {"domain": state.domain} + if state.attributes: + # Include some attributes + for attr in DEFAULT_EXPOSED_ATTRIBUTES: + if attr not in state.attributes: + continue + context[attr] = state.attributes[attr] + + if ( + entity := entity_registry.async_get(state.entity_id) + ) and entity.aliases: + for alias in entity.aliases: + alias = alias.strip() + if not alias: + continue + + yield (alias, alias, context) + + # Default name + yield (state.name, state.name, context) + + def _recognize_strict( + self, + user_input: ConversationInput, + lang_intents: LanguageIntents, + slot_lists: dict[str, SlotList], + intent_context: dict[str, Any] | None, + language: str, + ) -> RecognizeResult | None: + """Search intents for a strict match to user input.""" + return recognize_best( user_input.text, lang_intents.intents, slot_lists=slot_lists, intent_context=intent_context, language=language, - ): - # User intents have highest priority - if (result.intent_metadata is not None) and result.intent_metadata.get( - METADATA_CUSTOM_SENTENCE - ): - if (custom_result is None) or ( - result.text_chunks_matched > custom_result.text_chunks_matched - ): - custom_result = result - - # Clear builtin results - best_results = [] - name_result = None - continue - - # Prioritize results with a "name" slot, but still prefer ones with - # more literal text matched. - if ( - ("name" in result.entities) - and (not result.entities["name"].is_wildcard) - and ( - (name_result is None) - or (result.text_chunks_matched > name_result.text_chunks_matched) - ) - ): - name_result = result - - if (best_text_chunks_matched is None) or ( - result.text_chunks_matched > best_text_chunks_matched - ): - # Only overwrite if more literal text was matched. - # This causes wildcards to match last. - best_results = [result] - best_text_chunks_matched = result.text_chunks_matched - elif result.text_chunks_matched == best_text_chunks_matched: - # Accumulate results with the same number of literal text matched. - # We will resolve the ambiguity below. - best_results.append(result) - - if custom_result is not None: - # Prioritize user intents - return custom_result - - if name_result is not None: - # Prioritize matches with entity names above area names - return name_result - - if best_results: - # Successful strict match - return best_results[0] - - # Try again with missing entities enabled - maybe_result: RecognizeResult | None = None - for result in recognize_all( - user_input.text, - lang_intents.intents, - slot_lists=slot_lists, - intent_context=intent_context, - allow_unmatched_entities=True, - ): - if result.text_chunks_matched < 1: - # Skip results that don't match any literal text - continue - - # Don't count missing entities that couldn't be filled from context - num_unmatched_entities = 0 - for entity in result.unmatched_entities_list: - if isinstance(entity, UnmatchedTextEntity): - if entity.text != MISSING_ENTITY: - num_unmatched_entities += 1 - else: - num_unmatched_entities += 1 - - if maybe_result is None: - # First result - maybe_result = result - best_num_unmatched_entities = num_unmatched_entities - elif num_unmatched_entities < best_num_unmatched_entities: - # Fewer unmatched entities - maybe_result = result - best_num_unmatched_entities = num_unmatched_entities - elif num_unmatched_entities == best_num_unmatched_entities: - if (result.text_chunks_matched > maybe_result.text_chunks_matched) or ( - (result.text_chunks_matched == maybe_result.text_chunks_matched) - and ("name" in result.unmatched_entities) # prefer entities - ): - # More literal text chunks matched, but prefer entities to areas, etc. - maybe_result = result - - if (maybe_result is not None) and maybe_result.unmatched_entities: - # Failed to match, but we have more information about why in unmatched_entities - return maybe_result - - # Complete match failure - return None + best_metadata_key=METADATA_CUSTOM_SENTENCE, + best_slot_name="name", + ) async def _build_speech( self, @@ -629,6 +864,9 @@ class DefaultAgent(ConversationEntity): self._lang_intents.pop(language, None) _LOGGER.debug("Cleared intents for language: %s", language) + # Intents have changed, so we must clear the cache + self._intent_cache.clear() + async def async_prepare(self, language: str | None = None) -> None: """Load intents for a language.""" if language is None: @@ -813,10 +1051,15 @@ class DefaultAgent(ConversationEntity): if self._unsub_clear_slot_list is None: return self._slot_lists = None + self._exposed_names_trie = None + self._unexposed_names_trie = None for unsub in self._unsub_clear_slot_list: unsub() self._unsub_clear_slot_list = None + # Slot lists have changed, so we must clear the cache + self._intent_cache.clear() + @core.callback def _make_slot_lists(self) -> dict[str, SlotList]: """Create slot lists with areas and entity names/aliases.""" @@ -825,48 +1068,15 @@ class DefaultAgent(ConversationEntity): start = time.monotonic() - entity_registry = er.async_get(self.hass) - states = [ - state - for state in self.hass.states.async_all() - if async_should_expose(self.hass, DOMAIN, state.entity_id) - ] - - # Gather exposed entity names. + # Gather entity names, keeping track of exposed names. + # We try intent recognition with only exposed names first, then all names. # # NOTE: We do not pass entity ids in here because multiple entities may # have the same name. The intent matcher doesn't gather all matching # values for a list, just the first. So we will need to match by name no # matter what. - entity_names = [] - for state in states: - # Checked against "requires_context" and "excludes_context" in hassil - context = {"domain": state.domain} - if state.attributes: - # Include some attributes - for attr in DEFAULT_EXPOSED_ATTRIBUTES: - if attr not in state.attributes: - continue - context[attr] = state.attributes[attr] - - entity = entity_registry.async_get(state.entity_id) - - if not entity: - # Default name - entity_names.append((state.name, state.name, context)) - continue - - if entity.aliases: - for alias in entity.aliases: - if not alias.strip(): - continue - - entity_names.append((alias, alias, context)) - - # Default name - entity_names.append((state.name, state.name, context)) - - _LOGGER.debug("Exposed entities: %s", entity_names) + exposed_entity_names = list(self._get_entity_name_tuples(exposed=True)) + _LOGGER.debug("Exposed entities: %s", exposed_entity_names) # Expose all areas. areas = ar.async_get(self.hass) @@ -898,9 +1108,17 @@ class DefaultAgent(ConversationEntity): floor_names.append((alias, floor.name)) + # Build trie + self._exposed_names_trie = Trie() + name_list = TextSlotList.from_tuples(exposed_entity_names, allow_template=False) + for name_value in name_list.values: + assert isinstance(name_value.text_in, TextChunk) + name_text = name_value.text_in.text.strip().lower() + self._exposed_names_trie.insert(name_text, name_value) + self._slot_lists = { "area": TextSlotList.from_tuples(area_names, allow_template=False), - "name": TextSlotList.from_tuples(entity_names, allow_template=False), + "name": name_list, "floor": TextSlotList.from_tuples(floor_names, allow_template=False), } @@ -970,7 +1188,7 @@ class DefaultAgent(ConversationEntity): ) -> core.CALLBACK_TYPE: """Register a list of sentences that will trigger a callback when recognized.""" trigger_data = TriggerData(sentences=sentences, callback=callback) - self._trigger_sentences.append(trigger_data) + self.trigger_sentences.append(trigger_data) # Force rebuild on next use self._trigger_intents = None @@ -987,7 +1205,7 @@ class DefaultAgent(ConversationEntity): # This works because the intents are rebuilt on every # register/unregister. str(trigger_id): {"data": [{"sentences": trigger_data.sentences}]} - for trigger_id, trigger_data in enumerate(self._trigger_sentences) + for trigger_id, trigger_data in enumerate(self.trigger_sentences) }, } @@ -1010,18 +1228,20 @@ class DefaultAgent(ConversationEntity): @core.callback def _unregister_trigger(self, trigger_data: TriggerData) -> None: """Unregister a set of trigger sentences.""" - self._trigger_sentences.remove(trigger_data) + self.trigger_sentences.remove(trigger_data) # Force rebuild on next use self._trigger_intents = None - async def _match_triggers(self, sentence: str) -> SentenceTriggerResult | None: + async def async_recognize_sentence_trigger( + self, user_input: ConversationInput + ) -> SentenceTriggerResult | None: """Try to match sentence against registered trigger sentences. Calls the registered callbacks if there's a match and returns a sentence trigger result. """ - if not self._trigger_sentences: + if not self.trigger_sentences: # No triggers registered return None @@ -1033,7 +1253,7 @@ class DefaultAgent(ConversationEntity): matched_triggers: dict[int, RecognizeResult] = {} matched_template: str | None = None - for result in recognize_all(sentence, self._trigger_intents): + for result in recognize_all(user_input.text, self._trigger_intents): if result.intent_sentence is not None: matched_template = result.intent_sentence.text @@ -1050,12 +1270,86 @@ class DefaultAgent(ConversationEntity): _LOGGER.debug( "'%s' matched %s trigger(s): %s", - sentence, + user_input.text, len(matched_triggers), list(matched_triggers), ) - return SentenceTriggerResult(sentence, matched_template, matched_triggers) + return SentenceTriggerResult( + user_input.text, matched_template, matched_triggers + ) + + async def _handle_trigger_result( + self, result: SentenceTriggerResult, user_input: ConversationInput + ) -> str: + """Run sentence trigger callbacks and return response text.""" + + # Gather callback responses in parallel + trigger_callbacks = [ + self.trigger_sentences[trigger_id].callback(user_input, trigger_result) + for trigger_id, trigger_result in result.matched_triggers.items() + ] + + # Use first non-empty result as response. + # + # There may be multiple copies of a trigger running when editing in + # the UI, so it's critical that we filter out empty responses here. + response_text = "" + response_set_by_trigger = False + for trigger_future in asyncio.as_completed(trigger_callbacks): + trigger_response = await trigger_future + if trigger_response is None: + continue + + response_text = trigger_response + response_set_by_trigger = True + break + + if response_set_by_trigger: + # Response was explicitly set to empty + response_text = response_text or "" + elif not response_text: + # Use translated acknowledgment for pipeline language + language = user_input.language or self.hass.config.language + translations = await translation.async_get_translations( + self.hass, language, DOMAIN, [DOMAIN] + ) + response_text = translations.get( + f"component.{DOMAIN}.conversation.agent.done", "Done" + ) + + return response_text + + async def async_handle_sentence_triggers( + self, user_input: ConversationInput + ) -> str | None: + """Try to input sentence against sentence triggers and return response text. + + Returns None if no match occurred. + """ + if trigger_result := await self.async_recognize_sentence_trigger(user_input): + return await self._handle_trigger_result(trigger_result, user_input) + + return None + + async def async_handle_intents( + self, + user_input: ConversationInput, + ) -> intent.IntentResponse | None: + """Try to match sentence against registered intents and return response. + + Only performs strict matching with exposed entities and exact wording. + Returns None if no match occurred. + """ + result = await self.async_recognize_intent(user_input, strict_intents_only=True) + if not isinstance(result, RecognizeResult): + # No error message on failed match + return None + + conversation_result = await self._async_process_intent_result( + result, user_input + ) + return conversation_result.response def _make_error_result( @@ -1067,7 +1361,6 @@ def _make_error_result( """Create conversation result with error code and text.""" response = intent.IntentResponse(language=language) response.async_set_error(error_code, response_text) - return ConversationResult(response, conversation_id) @@ -1094,6 +1387,10 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str if matched_area_entity := result.entities.get("area"): matched_area = matched_area_entity.text.strip() + matched_floor: str | None = None + if matched_floor_entity := result.entities.get("floor"): + matched_floor = matched_floor_entity.text.strip() + if unmatched_name := unmatched_text.get("name"): if matched_area: # device in area @@ -1101,6 +1398,12 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str "entity": unmatched_name, "area": matched_area, } + if matched_floor: + # device on floor + return ErrorKey.NO_ENTITY_IN_FLOOR, { + "entity": unmatched_name, + "floor": matched_floor, + } # device only return ErrorKey.NO_ENTITY, {"entity": unmatched_name} @@ -1183,17 +1486,62 @@ def _get_match_error_response( if reason == intent.MatchFailedReason.STATE: # Entity is not in correct state - assert match_error.constraints.states - state = next(iter(match_error.constraints.states)) - if match_error.constraints.domains: + assert constraints.states + state = next(iter(constraints.states)) + if constraints.domains: # Translate if domain is available - domain = next(iter(match_error.constraints.domains)) + domain = next(iter(constraints.domains)) state = translation.async_translate_state( hass, state, domain, None, None, None ) return ErrorKey.ENTITY_WRONG_STATE, {"state": state} + if reason == intent.MatchFailedReason.ASSISTANT: + # Not exposed + if constraints.name: + if constraints.area_name: + return ErrorKey.NO_ENTITY_IN_AREA_EXPOSED, { + "entity": constraints.name, + "area": constraints.area_name, + } + if constraints.floor_name: + return ErrorKey.NO_ENTITY_IN_FLOOR_EXPOSED, { + "entity": constraints.name, + "floor": constraints.floor_name, + } + return ErrorKey.NO_ENTITY_EXPOSED, {"entity": constraints.name} + + if constraints.device_classes: + device_class = next(iter(constraints.device_classes)) + + if constraints.area_name: + return ErrorKey.NO_DEVICE_CLASS_IN_AREA_EXPOSED, { + "device_class": device_class, + "area": constraints.area_name, + } + if constraints.floor_name: + return ErrorKey.NO_DEVICE_CLASS_IN_FLOOR_EXPOSED, { + "device_class": device_class, + "floor": constraints.floor_name, + } + return ErrorKey.NO_DEVICE_CLASS_EXPOSED, {"device_class": device_class} + + if constraints.domains: + domain = next(iter(constraints.domains)) + + if constraints.area_name: + return ErrorKey.NO_DOMAIN_IN_AREA_EXPOSED, { + "domain": domain, + "area": constraints.area_name, + } + if constraints.floor_name: + return ErrorKey.NO_DOMAIN_IN_FLOOR_EXPOSED, { + "domain": domain, + "floor": constraints.floor_name, + } + return ErrorKey.NO_DOMAIN_EXPOSED, {"domain": domain} + # Default error return ErrorKey.NO_INTENT, {} diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index 591298cbac1..8134ecb0eee 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -6,12 +6,8 @@ from collections.abc import Iterable from typing import Any from aiohttp import web -from hassil.recognize import ( - MISSING_ENTITY, - RecognizeResult, - UnmatchedRangeEntity, - UnmatchedTextEntity, -) +from hassil.recognize import MISSING_ENTITY, RecognizeResult +from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity import voluptuous as vol from homeassistant.components import http, websocket_api @@ -19,7 +15,6 @@ from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import MATCH_ALL from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers import config_validation as cv, intent -from homeassistant.helpers.entity_component import EntityComponent from homeassistant.util import language as language_util from .agent_manager import ( @@ -28,14 +23,8 @@ from .agent_manager import ( async_get_agent, get_agent_manager, ) -from .const import DOMAIN -from .default_agent import ( - METADATA_CUSTOM_FILE, - METADATA_CUSTOM_SENTENCE, - DefaultAgent, - SentenceTriggerResult, - async_get_default_agent, -) +from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY +from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE from .entity import ConversationEntity from .models import ConversationInput @@ -47,6 +36,7 @@ def async_setup(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_process) websocket_api.async_register_command(hass, websocket_prepare) websocket_api.async_register_command(hass, websocket_list_agents) + websocket_api.async_register_command(hass, websocket_list_sentences) websocket_api.async_register_command(hass, websocket_hass_agent_debug) @@ -113,13 +103,11 @@ async def websocket_list_agents( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """List conversation agents and, optionally, if they support a given language.""" - entity_component: EntityComponent[ConversationEntity] = hass.data[DOMAIN] - country = msg.get("country") language = msg.get("language") agents = [] - for entity in entity_component.entities: + for entity in hass.data[DATA_COMPONENT].entities: supported_languages = entity.supported_languages if language and supported_languages != MATCH_ALL: supported_languages = language_util.matches( @@ -163,6 +151,26 @@ async def websocket_list_agents( connection.send_message(websocket_api.result_message(msg["id"], {"agents": agents})) +@websocket_api.websocket_command( + { + vol.Required("type"): "conversation/sentences/list", + } +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_list_sentences( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict +) -> None: + """List custom registered sentences.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + + sentences = [] + for trigger_data in agent.trigger_sentences: + sentences.extend(trigger_data.sentences) + + connection.send_result(msg["id"], {"trigger_sentences": sentences}) + + @websocket_api.websocket_command( { vol.Required("type"): "conversation/agent/homeassistant/debug", @@ -176,46 +184,41 @@ async def websocket_hass_agent_debug( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Return intents that would be matched by the default agent for a list of sentences.""" - agent = async_get_default_agent(hass) - assert isinstance(agent, DefaultAgent) - results = [ - await agent.async_recognize( - ConversationInput( - text=sentence, - context=connection.context(msg), - conversation_id=None, - device_id=msg.get("device_id"), - language=msg.get("language", hass.config.language), - agent_id=None, - ) - ) - for sentence in msg["sentences"] - ] + agent = hass.data[DATA_DEFAULT_ENTITY] # Return results for each sentence in the same order as the input. result_dicts: list[dict[str, Any] | None] = [] - for result in results: + for sentence in msg["sentences"]: + user_input = ConversationInput( + text=sentence, + context=connection.context(msg), + conversation_id=None, + device_id=msg.get("device_id"), + language=msg.get("language", hass.config.language), + agent_id=None, + ) result_dict: dict[str, Any] | None = None - if isinstance(result, SentenceTriggerResult): + + if trigger_result := await agent.async_recognize_sentence_trigger(user_input): result_dict = { # Matched a user-defined sentence trigger. # We can't provide the response here without executing the # trigger. "match": True, "source": "trigger", - "sentence_template": result.sentence_template or "", + "sentence_template": trigger_result.sentence_template or "", } - elif isinstance(result, RecognizeResult): - successful_match = not result.unmatched_entities + elif intent_result := await agent.async_recognize_intent(user_input): + successful_match = not intent_result.unmatched_entities result_dict = { # Name of the matching intent (or the closest) "intent": { - "name": result.intent.name, + "name": intent_result.intent.name, }, # Slot values that would be received by the intent "slots": { # direct access to values entity_key: entity.text or entity.value - for entity_key, entity in result.entities.items() + for entity_key, entity in intent_result.entities.items() }, # Extra slot details, such as the originally matched text "details": { @@ -224,7 +227,7 @@ async def websocket_hass_agent_debug( "value": entity.value, "text": entity.text, } - for entity_key, entity in result.entities.items() + for entity_key, entity in intent_result.entities.items() }, # Entities/areas/etc. that would be targeted "targets": {}, @@ -233,24 +236,26 @@ async def websocket_hass_agent_debug( # Text of the sentence template that matched (or was closest) "sentence_template": "", # When match is incomplete, this will contain the best slot guesses - "unmatched_slots": _get_unmatched_slots(result), + "unmatched_slots": _get_unmatched_slots(intent_result), } if successful_match: result_dict["targets"] = { state.entity_id: {"matched": is_matched} - for state, is_matched in _get_debug_targets(hass, result) + for state, is_matched in _get_debug_targets(hass, intent_result) } - if result.intent_sentence is not None: - result_dict["sentence_template"] = result.intent_sentence.text + if intent_result.intent_sentence is not None: + result_dict["sentence_template"] = intent_result.intent_sentence.text # Inspect metadata to determine if this matched a custom sentence - if result.intent_metadata and result.intent_metadata.get( + if intent_result.intent_metadata and intent_result.intent_metadata.get( METADATA_CUSTOM_SENTENCE ): result_dict["source"] = "custom" - result_dict["file"] = result.intent_metadata.get(METADATA_CUSTOM_FILE) + result_dict["file"] = intent_result.intent_metadata.get( + METADATA_CUSTOM_FILE + ) else: result_dict["source"] = "builtin" diff --git a/homeassistant/components/conversation/icons.json b/homeassistant/components/conversation/icons.json index b39a1603b15..658783f9ae2 100644 --- a/homeassistant/components/conversation/icons.json +++ b/homeassistant/components/conversation/icons.json @@ -1,6 +1,10 @@ { "services": { - "process": "mdi:message-processing", - "reload": "mdi:reload" + "process": { + "service": "mdi:message-processing" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index d7a308b8b2b..41c9a2d2691 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"] } diff --git a/homeassistant/components/conversation/models.py b/homeassistant/components/conversation/models.py index 902b52483e0..10218e76751 100644 --- a/homeassistant/components/conversation/models.py +++ b/homeassistant/components/conversation/models.py @@ -23,11 +23,33 @@ class ConversationInput: """User input to be processed.""" text: str + """User spoken text.""" + context: Context + """Context of the request.""" + conversation_id: str | None + """Unique identifier for the conversation.""" + device_id: str | None + """Unique identifier for the device.""" + language: str + """Language of the request.""" + agent_id: str | None = None + """Agent to use for processing.""" + + def as_dict(self) -> dict[str, Any]: + """Return input as a dict.""" + return { + "text": self.text, + "context": self.context.as_dict(), + "conversation_id": self.conversation_id, + "device_id": self.device_id, + "language": self.language, + "agent_id": self.agent_id, + } @dataclass(slots=True) diff --git a/homeassistant/components/conversation/trigger.py b/homeassistant/components/conversation/trigger.py index 0a4cbfcb7e5..24eb54c5694 100644 --- a/homeassistant/components/conversation/trigger.py +++ b/homeassistant/components/conversation/trigger.py @@ -4,7 +4,8 @@ from __future__ import annotations from typing import Any -from hassil.recognize import PUNCTUATION, RecognizeResult +from hassil.recognize import RecognizeResult +from hassil.util import PUNCTUATION_ALL import voluptuous as vol from homeassistant.const import CONF_COMMAND, CONF_PLATFORM @@ -14,14 +15,14 @@ from homeassistant.helpers.script import ScriptRunResult from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import UNDEFINED, ConfigType -from .const import DOMAIN -from .default_agent import DefaultAgent, async_get_default_agent +from .const import DATA_DEFAULT_ENTITY, DOMAIN +from .models import ConversationInput def has_no_punctuation(value: list[str]) -> list[str]: """Validate result does not contain punctuation.""" for sentence in value: - if PUNCTUATION.search(sentence): + if PUNCTUATION_ALL.search(sentence): raise vol.Invalid("sentence should not contain punctuation") return value @@ -62,7 +63,7 @@ async def async_attach_trigger( job = HassJob(action) async def call_action( - sentence: str, result: RecognizeResult, device_id: str | None + user_input: ConversationInput, result: RecognizeResult ) -> str | None: """Call action with right context.""" @@ -83,12 +84,13 @@ async def async_attach_trigger( trigger_input: dict[str, Any] = { # Satisfy type checker **trigger_data, "platform": DOMAIN, - "sentence": sentence, + "sentence": user_input.text, "details": details, "slots": { # direct access to values entity_name: entity["value"] for entity_name, entity in details.items() }, - "device_id": device_id, + "device_id": user_input.device_id, + "user_input": user_input.as_dict(), } # Wait for the automation to complete @@ -110,7 +112,4 @@ async def async_attach_trigger( # two trigger copies for who will provide a response. return None - default_agent = async_get_default_agent(hass) - assert isinstance(default_agent, DefaultAgent) - - return default_agent.register_trigger(sentences, call_action) + return hass.data[DATA_DEFAULT_ENTITY].register_trigger(sentences, call_action) diff --git a/homeassistant/components/cookidoo/__init__.py b/homeassistant/components/cookidoo/__init__.py new file mode 100644 index 00000000000..bb78f2a569d --- /dev/null +++ b/homeassistant/components/cookidoo/__init__.py @@ -0,0 +1,49 @@ +"""The Cookidoo integration.""" + +from __future__ import annotations + +from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig + +from homeassistant.const import ( + CONF_COUNTRY, + CONF_EMAIL, + CONF_LANGUAGE, + CONF_PASSWORD, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.TODO] + + +async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool: + """Set up Cookidoo from a config entry.""" + + cookidoo = Cookidoo( + async_get_clientsession(hass), + CookidooConfig( + email=entry.data[CONF_EMAIL], + password=entry.data[CONF_PASSWORD], + localization=CookidooLocalizationConfig( + country_code=entry.data[CONF_COUNTRY].lower(), + language=entry.data[CONF_LANGUAGE], + ), + ), + ) + + coordinator = CookidooDataUpdateCoordinator(hass, cookidoo, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py new file mode 100644 index 00000000000..120ab162a6c --- /dev/null +++ b/homeassistant/components/cookidoo/config_flow.py @@ -0,0 +1,247 @@ +"""Config flow for Cookidoo integration.""" + +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from cookidoo_api import ( + Cookidoo, + CookidooAuthException, + CookidooConfig, + CookidooLocalizationConfig, + CookidooRequestException, + get_country_options, + get_localization_options, +) +import voluptuous as vol + +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + SOURCE_USER, + ConfigFlow, + ConfigFlowResult, +) +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + CountrySelector, + CountrySelectorConfig, + LanguageSelector, + LanguageSelectorConfig, + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +AUTH_DATA_SCHEMA = { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), +} + + +class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Cookidoo.""" + + COUNTRY_DATA_SCHEMA: dict + LANGUAGE_DATA_SCHEMA: dict + + user_input: dict[str, Any] + + async def async_step_reconfigure( + self, user_input: dict[str, Any] + ) -> ConfigFlowResult: + """Perform reconfigure upon an user action.""" + return await self.async_step_user(user_input) + + async def async_step_user( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle the user step as well as serve for reconfiguration.""" + errors: dict[str, str] = {} + + if user_input is not None and not ( + errors := await self.validate_input(user_input) + ): + if self.source == SOURCE_USER: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + self.user_input = user_input + return await self.async_step_language() + await self.generate_country_schema() + suggested_values: dict = {} + if self.source == SOURCE_RECONFIGURE: + reconfigure_entry = self._get_reconfigure_entry() + suggested_values = { + **suggested_values, + **reconfigure_entry.data, + } + if user_input is not None: + suggested_values = {**suggested_values, **user_input} + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + data_schema=vol.Schema( + {**AUTH_DATA_SCHEMA, **self.COUNTRY_DATA_SCHEMA} + ), + suggested_values=suggested_values, + ), + description_placeholders={"cookidoo": "Cookidoo"}, + errors=errors, + ) + + async def async_step_language( + self, + language_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Async language step to set up the connection.""" + errors: dict[str, str] = {} + if language_input is not None and not ( + errors := await self.validate_input(self.user_input, language_input) + ): + if self.source == SOURCE_USER: + return self.async_create_entry( + title="Cookidoo", data={**self.user_input, **language_input} + ) + reconfigure_entry = self._get_reconfigure_entry() + return self.async_update_reload_and_abort( + reconfigure_entry, + data={ + **reconfigure_entry.data, + **self.user_input, + **language_input, + }, + ) + + await self.generate_language_schema() + return self.async_show_form( + step_id="language", + data_schema=vol.Schema(self.LANGUAGE_DATA_SCHEMA), + description_placeholders={"cookidoo": "Cookidoo"}, + errors=errors, + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + + reauth_entry = self._get_reauth_entry() + + if user_input is not None: + if not ( + errors := await self.validate_input({**reauth_entry.data, **user_input}) + ): + if user_input[CONF_EMAIL] != reauth_entry.data[CONF_EMAIL]: + self._async_abort_entries_match( + {CONF_EMAIL: user_input[CONF_EMAIL]} + ) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=self.add_suggested_values_to_schema( + data_schema=vol.Schema(AUTH_DATA_SCHEMA), + suggested_values={CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, + ), + description_placeholders={"cookidoo": "Cookidoo"}, + errors=errors, + ) + + async def generate_country_schema(self) -> None: + """Generate country schema.""" + self.COUNTRY_DATA_SCHEMA = { + vol.Required(CONF_COUNTRY): CountrySelector( + CountrySelectorConfig( + countries=[ + country.upper() for country in await get_country_options() + ], + ) + ) + } + + async def generate_language_schema(self) -> None: + """Generate language schema.""" + self.LANGUAGE_DATA_SCHEMA = { + vol.Required(CONF_LANGUAGE): LanguageSelector( + LanguageSelectorConfig( + languages=[ + option.language + for option in await get_localization_options( + country=self.user_input[CONF_COUNTRY].lower() + ) + ], + native_name=True, + ), + ), + } + + async def validate_input( + self, + user_input: dict[str, Any], + language_input: dict[str, Any] | None = None, + ) -> dict[str, str]: + """Input Helper.""" + + errors: dict[str, str] = {} + + data_input: dict[str, Any] = {} + + if self.source == SOURCE_RECONFIGURE: + reconfigure_entry = self._get_reconfigure_entry() + data_input = {**data_input, **reconfigure_entry.data} + data_input = {**data_input, **user_input} + if language_input: + data_input = {**data_input, **language_input} + else: + data_input[CONF_LANGUAGE] = ( + await get_localization_options(country=data_input[CONF_COUNTRY].lower()) + )[0] # Pick any language to test login + + session = async_get_clientsession(self.hass) + cookidoo = Cookidoo( + session, + CookidooConfig( + email=data_input[CONF_EMAIL], + password=data_input[CONF_PASSWORD], + localization=CookidooLocalizationConfig( + country_code=data_input[CONF_COUNTRY].lower(), + language=data_input[CONF_LANGUAGE], + ), + ), + ) + try: + await cookidoo.login() + if language_input: + await cookidoo.get_additional_items() + except CookidooRequestException: + errors["base"] = "cannot_connect" + except CookidooAuthException: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + return errors diff --git a/homeassistant/components/cookidoo/const.py b/homeassistant/components/cookidoo/const.py new file mode 100644 index 00000000000..37c584404a0 --- /dev/null +++ b/homeassistant/components/cookidoo/const.py @@ -0,0 +1,3 @@ +"""Constants for the Cookidoo integration.""" + +DOMAIN = "cookidoo" diff --git a/homeassistant/components/cookidoo/coordinator.py b/homeassistant/components/cookidoo/coordinator.py new file mode 100644 index 00000000000..ad86d1fb9f1 --- /dev/null +++ b/homeassistant/components/cookidoo/coordinator.py @@ -0,0 +1,101 @@ +"""DataUpdateCoordinator for the Cookidoo integration.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from cookidoo_api import ( + Cookidoo, + CookidooAdditionalItem, + CookidooAuthException, + CookidooException, + CookidooIngredientItem, + CookidooRequestException, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +type CookidooConfigEntry = ConfigEntry[CookidooDataUpdateCoordinator] + + +@dataclass +class CookidooData: + """Cookidoo data type.""" + + ingredient_items: list[CookidooIngredientItem] + additional_items: list[CookidooAdditionalItem] + + +class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]): + """A Cookidoo Data Update Coordinator.""" + + config_entry: CookidooConfigEntry + + def __init__( + self, hass: HomeAssistant, cookidoo: Cookidoo, entry: CookidooConfigEntry + ) -> None: + """Initialize the Cookidoo data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=90), + config_entry=entry, + ) + self.cookidoo = cookidoo + + async def _async_setup(self) -> None: + try: + await self.cookidoo.login() + except CookidooRequestException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="setup_request_exception", + ) from e + except CookidooAuthException as e: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="setup_authentication_exception", + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, + ) from e + + async def _async_update_data(self) -> CookidooData: + try: + ingredient_items = await self.cookidoo.get_ingredient_items() + additional_items = await self.cookidoo.get_additional_items() + except CookidooAuthException: + try: + await self.cookidoo.refresh_token() + except CookidooAuthException as exc: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="setup_authentication_exception", + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, + ) from exc + _LOGGER.debug( + "Authentication failed but re-authentication was successful, trying again later" + ) + return self.data + except CookidooException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_exception", + ) from e + + return CookidooData( + ingredient_items=ingredient_items, additional_items=additional_items + ) diff --git a/homeassistant/components/cookidoo/entity.py b/homeassistant/components/cookidoo/entity.py new file mode 100644 index 00000000000..5c8f3ec8441 --- /dev/null +++ b/homeassistant/components/cookidoo/entity.py @@ -0,0 +1,30 @@ +"""Base entity for the Cookidoo integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import CookidooDataUpdateCoordinator + + +class CookidooBaseEntity(CoordinatorEntity[CookidooDataUpdateCoordinator]): + """Cookidoo base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: CookidooDataUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + name="Cookidoo", + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + manufacturer="Vorwerk International & Co. KmG", + model="Cookidoo - Thermomix® recipe portal", + ) diff --git a/homeassistant/components/cookidoo/icons.json b/homeassistant/components/cookidoo/icons.json new file mode 100644 index 00000000000..36c0724331a --- /dev/null +++ b/homeassistant/components/cookidoo/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "todo": { + "ingredient_list": { + "default": "mdi:cart-plus" + }, + "additional_item_list": { + "default": "mdi:cart-plus" + } + } + } +} diff --git a/homeassistant/components/cookidoo/manifest.json b/homeassistant/components/cookidoo/manifest.json new file mode 100644 index 00000000000..59d58200fdf --- /dev/null +++ b/homeassistant/components/cookidoo/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "cookidoo", + "name": "Cookidoo", + "codeowners": ["@miaucl"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/cookidoo", + "integration_type": "service", + "iot_class": "cloud_polling", + "quality_scale": "silver", + "requirements": ["cookidoo-api==0.10.0"] +} diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml new file mode 100644 index 00000000000..95a35829079 --- /dev/null +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -0,0 +1,90 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No service actions implemented + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No service actions implemented + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: exempt + comment: No special external action required + entity-event-setup: + status: exempt + comment: No callbacks are implemented + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: Offloaded to coordinator + entity-unavailable: + status: done + comment: Offloaded to coordinator + action-exceptions: + status: done + comment: Only providing todo actions + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: No options flow + + # Gold + entity-translations: done + entity-device-class: + status: exempt + comment: currently no platform with device classes + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: No disabled entities implemented + discovery: + status: exempt + comment: Nothing to discover + stale-devices: + status: exempt + comment: No stale entities possible + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: + status: exempt + comment: No dynamic entities available + discovery-update-info: + status: exempt + comment: No discoverable entities implemented + repair-issues: + status: exempt + comment: No issues/repairs + docs-use-cases: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json new file mode 100644 index 00000000000..14344bed13d --- /dev/null +++ b/homeassistant/components/cookidoo/strings.json @@ -0,0 +1,80 @@ +{ + "config": { + "step": { + "user": { + "title": "Setup {cookidoo}", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]", + "country": "Country" + }, + "data_description": { + "email": "Email used to access your {cookidoo} account.", + "password": "Password used to access your {cookidoo} account.", + "country": "Pick your country for the {cookidoo} content." + } + }, + "language": { + "title": "Setup {cookidoo}", + "data": { + "language": "[%key:common::config_flow::data::language%]" + }, + "data_description": { + "language": "Pick your language for the {cookidoo} content." + } + }, + "reauth_confirm": { + "title": "Login again to {cookidoo}", + "description": "Please log in to {cookidoo} again to continue using this integration.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::cookidoo::config::step::user::data_description::email%]", + "password": "[%key:component::cookidoo::config::step::user::data_description::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "entity": { + "todo": { + "ingredient_list": { + "name": "Shopping list" + }, + "additional_item_list": { + "name": "Additional purchases" + } + } + }, + "exceptions": { + "todo_save_item_failed": { + "message": "Failed to save {name} to Cookidoo shopping list" + }, + "todo_update_item_failed": { + "message": "Failed to update {name} in Cookidoo shopping list" + }, + "todo_delete_item_failed": { + "message": "Failed to delete {count} item(s) from Cookidoo shopping list" + }, + "setup_request_exception": { + "message": "Failed to connect to server, try again later" + }, + "setup_authentication_exception": { + "message": "Authentication failed for {email}, check your email and password" + }, + "update_exception": { + "message": "Unable to connect and retrieve data from cookidoo" + } + } +} diff --git a/homeassistant/components/cookidoo/todo.py b/homeassistant/components/cookidoo/todo.py new file mode 100644 index 00000000000..4a70dadc65a --- /dev/null +++ b/homeassistant/components/cookidoo/todo.py @@ -0,0 +1,185 @@ +"""Todo platform for the Cookidoo integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooException, + CookidooIngredientItem, +) + +from homeassistant.components.todo import ( + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator +from .entity import CookidooBaseEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: CookidooConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the todo list from a config entry created in the integrations UI.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [ + CookidooIngredientsTodoListEntity(coordinator), + CookidooAdditionalItemTodoListEntity(coordinator), + ] + ) + + +class CookidooIngredientsTodoListEntity(CookidooBaseEntity, TodoListEntity): + """A To-do List representation of the ingredients in the Cookidoo Shopping List.""" + + _attr_translation_key = "ingredient_list" + _attr_supported_features = TodoListEntityFeature.UPDATE_TODO_ITEM + + def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_ingredients" + + @property + def todo_items(self) -> list[TodoItem]: + """Return the todo ingredients.""" + return [ + TodoItem( + uid=item.id, + summary=item.name, + description=item.description or "", + status=( + TodoItemStatus.COMPLETED + if item.is_owned + else TodoItemStatus.NEEDS_ACTION + ), + ) + for item in self.coordinator.data.ingredient_items + ] + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an ingredient to the To-do list. + + Cookidoo ingredients can be changed in state, but not in summary or description. This is currently not possible to distinguish in home assistant and just fails silently. + """ + try: + if TYPE_CHECKING: + assert item.uid + await self.coordinator.cookidoo.edit_ingredient_items_ownership( + [ + CookidooIngredientItem( + id=item.uid, + name="", + description="", + is_owned=item.status == TodoItemStatus.COMPLETED, + ) + ] + ) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_update_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + +class CookidooAdditionalItemTodoListEntity(CookidooBaseEntity, TodoListEntity): + """A To-do List representation of the additional items in the Cookidoo Shopping List.""" + + _attr_translation_key = "additional_item_list" + _attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + ) + + def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_additional_items" + + @property + def todo_items(self) -> list[TodoItem]: + """Return the todo items.""" + + return [ + TodoItem( + uid=item.id, + summary=item.name, + status=( + TodoItemStatus.COMPLETED + if item.is_owned + else TodoItemStatus.NEEDS_ACTION + ), + ) + for item in self.coordinator.data.additional_items + ] + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + + try: + if TYPE_CHECKING: + assert item.summary + await self.coordinator.cookidoo.add_additional_items([item.summary]) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_save_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an item to the To-do list.""" + + try: + if TYPE_CHECKING: + assert item.uid + assert item.summary + new_item = CookidooAdditionalItem( + id=item.uid, + name=item.summary, + is_owned=item.status == TodoItemStatus.COMPLETED, + ) + await self.coordinator.cookidoo.edit_additional_items_ownership([new_item]) + await self.coordinator.cookidoo.edit_additional_items([new_item]) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_update_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item from the To-do list.""" + + try: + await self.coordinator.cookidoo.remove_additional_items(uids) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_delete_item_failed", + translation_placeholders={"count": str(len(uids))}, + ) from e + + await self.coordinator.async_refresh() diff --git a/homeassistant/components/coolmaster/climate.py b/homeassistant/components/coolmaster/climate.py index d3cb7122109..29be416d57e 100644 --- a/homeassistant/components/coolmaster/climate.py +++ b/homeassistant/components/coolmaster/climate.py @@ -55,7 +55,6 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity): """Representation of a coolmaster climate device.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, unit_id, info, supported_modes): """Initialize the climate device.""" diff --git a/homeassistant/components/counter/icons.json b/homeassistant/components/counter/icons.json index 1e0ef54bbb7..59cd0bb7121 100644 --- a/homeassistant/components/counter/icons.json +++ b/homeassistant/components/counter/icons.json @@ -1,8 +1,16 @@ { "services": { - "decrement": "mdi:numeric-negative-1", - "increment": "mdi:numeric-positive-1", - "reset": "mdi:refresh", - "set_value": "mdi:counter" + "decrement": { + "service": "mdi:numeric-negative-1" + }, + "increment": { + "service": "mdi:numeric-positive-1" + }, + "reset": { + "service": "mdi:refresh" + }, + "set_value": { + "service": "mdi:counter" + } } } diff --git a/homeassistant/components/counter/strings.json b/homeassistant/components/counter/strings.json index fb1f6467f4a..2c52fb43b9f 100644 --- a/homeassistant/components/counter/strings.json +++ b/homeassistant/components/counter/strings.json @@ -29,19 +29,19 @@ "services": { "decrement": { "name": "Decrement", - "description": "Decrements a counter." + "description": "Decrements a counter by its step size." }, "increment": { "name": "Increment", - "description": "Increments a counter." + "description": "Increments a counter by its step size." }, "reset": { "name": "Reset", - "description": "Resets a counter." + "description": "Resets a counter to its initial value." }, "set_value": { "name": "Set", - "description": "Sets the counter value.", + "description": "Sets the counter to a specific value.", "fields": { "value": { "name": "Value", diff --git a/homeassistant/components/cover/__init__.py b/homeassistant/components/cover/__init__.py index 90d2b644810..9ce526712f0 100644 --- a/homeassistant/components/cover/__init__.py +++ b/homeassistant/components/cover/__init__.py @@ -6,14 +6,14 @@ from collections.abc import Callable from datetime import timedelta from enum import IntFlag, StrEnum import functools as ft -from functools import cached_property import logging from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( +from homeassistant.const import ( # noqa: F401 SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -41,17 +41,37 @@ from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey -from .const import DOMAIN +from .const import DOMAIN, INTENT_CLOSE_COVER, INTENT_OPEN_COVER # noqa: F401 _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[CoverEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL = timedelta(seconds=15) +class CoverState(StrEnum): + """State of Cover entities.""" + + CLOSED = "closed" + CLOSING = "closing" + OPEN = "open" + OPENING = "opening" + + +# STATE_* below are deprecated as of 2024.11 +# when imported from homeassistant.components.cover +# use the CoverState enum instead. +_DEPRECATED_STATE_CLOSED = DeprecatedConstantEnum(CoverState.CLOSED, "2025.11") +_DEPRECATED_STATE_CLOSING = DeprecatedConstantEnum(CoverState.CLOSING, "2025.11") +_DEPRECATED_STATE_OPEN = DeprecatedConstantEnum(CoverState.OPEN, "2025.11") +_DEPRECATED_STATE_OPENING = DeprecatedConstantEnum(CoverState.OPENING, "2025.11") + + class CoverDeviceClass(StrEnum): """Device class for cover.""" @@ -69,36 +89,8 @@ class CoverDeviceClass(StrEnum): DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(CoverDeviceClass)) - -# DEVICE_CLASS* below are deprecated as of 2021.12 -# use the CoverDeviceClass enum instead. DEVICE_CLASSES = [cls.value for cls in CoverDeviceClass] -_DEPRECATED_DEVICE_CLASS_AWNING = DeprecatedConstantEnum( - CoverDeviceClass.AWNING, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_BLIND = DeprecatedConstantEnum( - CoverDeviceClass.BLIND, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_CURTAIN = DeprecatedConstantEnum( - CoverDeviceClass.CURTAIN, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DAMPER = DeprecatedConstantEnum( - CoverDeviceClass.DAMPER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DOOR = DeprecatedConstantEnum(CoverDeviceClass.DOOR, "2025.1") -_DEPRECATED_DEVICE_CLASS_GARAGE = DeprecatedConstantEnum( - CoverDeviceClass.GARAGE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_GATE = DeprecatedConstantEnum(CoverDeviceClass.GATE, "2025.1") -_DEPRECATED_DEVICE_CLASS_SHADE = DeprecatedConstantEnum( - CoverDeviceClass.SHADE, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SHUTTER = DeprecatedConstantEnum( - CoverDeviceClass.SHUTTER, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_WINDOW = DeprecatedConstantEnum( - CoverDeviceClass.WINDOW, "2025.1" -) + # mypy: disallow-any-generics @@ -116,27 +108,6 @@ class CoverEntityFeature(IntFlag): SET_TILT_POSITION = 128 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the CoverEntityFeature enum instead. -_DEPRECATED_SUPPORT_OPEN = DeprecatedConstantEnum(CoverEntityFeature.OPEN, "2025.1") -_DEPRECATED_SUPPORT_CLOSE = DeprecatedConstantEnum(CoverEntityFeature.CLOSE, "2025.1") -_DEPRECATED_SUPPORT_SET_POSITION = DeprecatedConstantEnum( - CoverEntityFeature.SET_POSITION, "2025.1" -) -_DEPRECATED_SUPPORT_STOP = DeprecatedConstantEnum(CoverEntityFeature.STOP, "2025.1") -_DEPRECATED_SUPPORT_OPEN_TILT = DeprecatedConstantEnum( - CoverEntityFeature.OPEN_TILT, "2025.1" -) -_DEPRECATED_SUPPORT_CLOSE_TILT = DeprecatedConstantEnum( - CoverEntityFeature.CLOSE_TILT, "2025.1" -) -_DEPRECATED_SUPPORT_STOP_TILT = DeprecatedConstantEnum( - CoverEntityFeature.STOP_TILT, "2025.1" -) -_DEPRECATED_SUPPORT_SET_TILT_POSITION = DeprecatedConstantEnum( - CoverEntityFeature.SET_TILT_POSITION, "2025.1" -) - ATTR_CURRENT_POSITION = "current_position" ATTR_CURRENT_TILT_POSITION = "current_tilt_position" ATTR_POSITION = "position" @@ -146,12 +117,12 @@ ATTR_TILT_POSITION = "tilt_position" @bind_hass def is_closed(hass: HomeAssistant, entity_id: str) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(entity_id, STATE_CLOSED) + return hass.states.is_state(entity_id, CoverState.CLOSED) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for covers.""" - component = hass.data[DOMAIN] = EntityComponent[CoverEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[CoverEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -231,14 +202,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[CoverEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[CoverEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class CoverEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -303,15 +272,15 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return the state of the cover.""" if self.is_opening: self._cover_is_last_toggle_direction_open = True - return STATE_OPENING + return CoverState.OPENING if self.is_closing: self._cover_is_last_toggle_direction_open = False - return STATE_CLOSING + return CoverState.CLOSING if (closed := self.is_closed) is None: return None - return STATE_CLOSED if closed else STATE_OPEN + return CoverState.CLOSED if closed else CoverState.OPEN @final @property @@ -331,10 +300,6 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def supported_features(self) -> CoverEntityFeature: """Flag supported features.""" if (features := self._attr_supported_features) is not None: - if type(features) is int: # noqa: E721 - new_features = CoverEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features return features supported_features = ( diff --git a/homeassistant/components/cover/const.py b/homeassistant/components/cover/const.py index dd3e8b435c9..e9bbf81e5f5 100644 --- a/homeassistant/components/cover/const.py +++ b/homeassistant/components/cover/const.py @@ -1,3 +1,6 @@ """Constants for cover entity platform.""" DOMAIN = "cover" + +INTENT_OPEN_COVER = "HassOpenCover" +INTENT_CLOSE_COVER = "HassCloseCover" diff --git a/homeassistant/components/cover/device_condition.py b/homeassistant/components/cover/device_condition.py index 9c746284fe5..f1d89a0e1eb 100644 --- a/homeassistant/components/cover/device_condition.py +++ b/homeassistant/components/cover/device_condition.py @@ -12,10 +12,6 @@ from homeassistant.const import ( CONF_DOMAIN, CONF_ENTITY_ID, CONF_TYPE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -27,7 +23,7 @@ from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA from homeassistant.helpers.entity import get_supported_features from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN, CoverEntityFeature +from . import DOMAIN, CoverEntityFeature, CoverState # mypy: disallow-any-generics @@ -128,13 +124,13 @@ def async_condition_from_config( if config[CONF_TYPE] in STATE_CONDITION_TYPES: if config[CONF_TYPE] == "is_open": - state = STATE_OPEN + state = CoverState.OPEN elif config[CONF_TYPE] == "is_closed": - state = STATE_CLOSED + state = CoverState.CLOSED elif config[CONF_TYPE] == "is_opening": - state = STATE_OPENING + state = CoverState.OPENING elif config[CONF_TYPE] == "is_closing": - state = STATE_CLOSING + state = CoverState.CLOSING def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" diff --git a/homeassistant/components/cover/device_trigger.py b/homeassistant/components/cover/device_trigger.py index 302b1d4340a..0f65ef80a7f 100644 --- a/homeassistant/components/cover/device_trigger.py +++ b/homeassistant/components/cover/device_trigger.py @@ -19,10 +19,6 @@ from homeassistant.const import ( CONF_PLATFORM, CONF_TYPE, CONF_VALUE_TEMPLATE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry as er @@ -30,7 +26,7 @@ from homeassistant.helpers.entity import get_supported_features from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN, CoverEntityFeature +from . import DOMAIN, CoverEntityFeature, CoverState POSITION_TRIGGER_TYPES = {"position", "tilt_position"} STATE_TRIGGER_TYPES = {"opened", "closed", "opening", "closing"} @@ -147,13 +143,13 @@ async def async_attach_trigger( """Attach a trigger.""" if config[CONF_TYPE] in STATE_TRIGGER_TYPES: if config[CONF_TYPE] == "opened": - to_state = STATE_OPEN + to_state = CoverState.OPEN elif config[CONF_TYPE] == "closed": - to_state = STATE_CLOSED + to_state = CoverState.CLOSED elif config[CONF_TYPE] == "opening": - to_state = STATE_OPENING + to_state = CoverState.OPENING elif config[CONF_TYPE] == "closing": - to_state = STATE_CLOSING + to_state = CoverState.CLOSING state_config = { CONF_PLATFORM: "state", diff --git a/homeassistant/components/cover/icons.json b/homeassistant/components/cover/icons.json index f2edaaa0893..91775fe634d 100644 --- a/homeassistant/components/cover/icons.json +++ b/homeassistant/components/cover/icons.json @@ -78,15 +78,35 @@ } }, "services": { - "close_cover": "mdi:arrow-down-box", - "close_cover_tilt": "mdi:arrow-bottom-left", - "open_cover": "mdi:arrow-up-box", - "open_cover_tilt": "mdi:arrow-top-right", - "set_cover_position": "mdi:arrow-down-box", - "set_cover_tilt_position": "mdi:arrow-top-right", - "stop_cover": "mdi:stop", - "stop_cover_tilt": "mdi:stop", - "toggle": "mdi:arrow-up-down", - "toggle_cover_tilt": "mdi:arrow-top-right-bottom-left" + "close_cover": { + "service": "mdi:arrow-down-box" + }, + "close_cover_tilt": { + "service": "mdi:arrow-bottom-left" + }, + "open_cover": { + "service": "mdi:arrow-up-box" + }, + "open_cover_tilt": { + "service": "mdi:arrow-top-right" + }, + "set_cover_position": { + "service": "mdi:arrow-down-box" + }, + "set_cover_tilt_position": { + "service": "mdi:arrow-top-right" + }, + "stop_cover": { + "service": "mdi:stop" + }, + "stop_cover_tilt": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:arrow-up-down" + }, + "toggle_cover_tilt": { + "service": "mdi:arrow-top-right-bottom-left" + } } } diff --git a/homeassistant/components/cover/intent.py b/homeassistant/components/cover/intent.py index 7580cff063a..dfc7d0f69a0 100644 --- a/homeassistant/components/cover/intent.py +++ b/homeassistant/components/cover/intent.py @@ -4,10 +4,7 @@ from homeassistant.const import SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER from homeassistant.core import HomeAssistant from homeassistant.helpers import intent -from . import DOMAIN, CoverDeviceClass - -INTENT_OPEN_COVER = "HassOpenCover" -INTENT_CLOSE_COVER = "HassCloseCover" +from . import DOMAIN, INTENT_CLOSE_COVER, INTENT_OPEN_COVER, CoverDeviceClass async def async_setup_intents(hass: HomeAssistant) -> None: diff --git a/homeassistant/components/cover/reproduce_state.py b/homeassistant/components/cover/reproduce_state.py index 59f3df61795..307fe5f11bd 100644 --- a/homeassistant/components/cover/reproduce_state.py +++ b/homeassistant/components/cover/reproduce_state.py @@ -15,10 +15,6 @@ from homeassistant.const import ( SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import Context, HomeAssistant, State @@ -28,11 +24,17 @@ from . import ( ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN, + CoverState, ) _LOGGER = logging.getLogger(__name__) -VALID_STATES = {STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING} +VALID_STATES = { + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, +} async def _async_reproduce_state( @@ -72,9 +74,9 @@ async def _async_reproduce_state( == state.attributes.get(ATTR_CURRENT_POSITION) ): # Open/Close - if state.state in [STATE_CLOSED, STATE_CLOSING]: + if state.state in [CoverState.CLOSED, CoverState.CLOSING]: service = SERVICE_CLOSE_COVER - elif state.state in [STATE_OPEN, STATE_OPENING]: + elif state.state in [CoverState.OPEN, CoverState.OPENING]: if ( ATTR_CURRENT_POSITION in cur_state.attributes and ATTR_CURRENT_POSITION in state.attributes diff --git a/homeassistant/components/cppm_tracker/device_tracker.py b/homeassistant/components/cppm_tracker/device_tracker.py index a7a1a1b99e8..b6fdc0a8889 100644 --- a/homeassistant/components/cppm_tracker/device_tracker.py +++ b/homeassistant/components/cppm_tracker/device_tracker.py @@ -9,7 +9,7 @@ from clearpasspy import ClearPass import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -36,11 +36,13 @@ _LOGGER = logging.getLogger(__name__) def get_scanner(hass: HomeAssistant, config: ConfigType) -> CPPMDeviceScanner | None: """Initialize Scanner.""" + config = config[DEVICE_TRACKER_DOMAIN] + data = { - "server": config[DOMAIN][CONF_HOST], + "server": config[CONF_HOST], "grant_type": GRANT_TYPE, - "secret": config[DOMAIN][CONF_API_KEY], - "client": config[DOMAIN][CONF_CLIENT_ID], + "secret": config[CONF_API_KEY], + "client": config[CONF_CLIENT_ID], } cppm = ClearPass(data) if cppm.access_token is None: diff --git a/homeassistant/components/cppm_tracker/manifest.json b/homeassistant/components/cppm_tracker/manifest.json index d8c387cdbf4..ca2fdf71a45 100644 --- a/homeassistant/components/cppm_tracker/manifest.json +++ b/homeassistant/components/cppm_tracker/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/cppm_tracker", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["clearpasspy==1.0.2"] } diff --git a/homeassistant/components/cpuspeed/config_flow.py b/homeassistant/components/cpuspeed/config_flow.py index ac35cc0fc4f..21dc577b5bf 100644 --- a/homeassistant/components/cpuspeed/config_flow.py +++ b/homeassistant/components/cpuspeed/config_flow.py @@ -23,7 +23,6 @@ class CPUSpeedFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" await self.async_set_unique_id(DOMAIN) - self._abort_if_unique_id_configured() if user_input is None: return self.async_show_form(step_id="user") diff --git a/homeassistant/components/cpuspeed/manifest.json b/homeassistant/components/cpuspeed/manifest.json index ff3a41d9c09..0c7f549a1b9 100644 --- a/homeassistant/components/cpuspeed/manifest.json +++ b/homeassistant/components/cpuspeed/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/cpuspeed", "integration_type": "device", "iot_class": "local_push", - "requirements": ["py-cpuinfo==9.0.0"] + "requirements": ["py-cpuinfo==9.0.0"], + "single_config_entry": true } diff --git a/homeassistant/components/cpuspeed/strings.json b/homeassistant/components/cpuspeed/strings.json index e82c6a0db12..6f4b3133b1b 100644 --- a/homeassistant/components/cpuspeed/strings.json +++ b/homeassistant/components/cpuspeed/strings.json @@ -8,7 +8,6 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::single_instance_allowed%]", "not_compatible": "Unable to get CPU information, this integration is not compatible with your system" } } diff --git a/homeassistant/components/crownstone/config_flow.py b/homeassistant/components/crownstone/config_flow.py index 0e707c0805a..2a96098421a 100644 --- a/homeassistant/components/crownstone/config_flow.py +++ b/homeassistant/components/crownstone/config_flow.py @@ -49,7 +49,7 @@ class BaseCrownstoneFlowHandler(ConfigEntryBaseFlow): cloud: CrownstoneCloud def __init__( - self, flow_type: str, create_entry_cb: Callable[..., ConfigFlowResult] + self, flow_type: str, create_entry_cb: Callable[[], ConfigFlowResult] ) -> None: """Set up flow instance.""" self.flow_type = flow_type @@ -177,7 +177,7 @@ class CrownstoneConfigFlowHandler(BaseCrownstoneFlowHandler, ConfigFlow, domain= elif auth_error.type == "LOGIN_FAILED_EMAIL_NOT_VERIFIED": errors["base"] = "account_not_verified" except CrownstoneUnknownError: - errors["base"] = "unknown_error" + errors["base"] = "unknown" # show form again, with the errors if errors: @@ -213,18 +213,19 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Crownstone options.""" super().__init__(OPTIONS_FLOW, self.async_create_new_entry) - self.entry = config_entry - self.updated_options = config_entry.options.copy() + self.options = config_entry.options.copy() async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Crownstone options.""" - self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][self.entry.entry_id].cloud + self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][ + self.config_entry.entry_id + ].cloud spheres = {sphere.name: sphere.cloud_id for sphere in self.cloud.cloud_data} - usb_path = self.entry.options.get(CONF_USB_PATH) - usb_sphere = self.entry.options.get(CONF_USB_SPHERE) + usb_path = self.config_entry.options.get(CONF_USB_PATH) + usb_sphere = self.config_entry.options.get(CONF_USB_SPHERE) options_schema = vol.Schema( {vol.Optional(CONF_USE_USB_OPTION, default=usb_path is not None): bool} @@ -243,14 +244,14 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): if user_input[CONF_USE_USB_OPTION] and usb_path is None: return await self.async_step_usb_config() if not user_input[CONF_USE_USB_OPTION] and usb_path is not None: - self.updated_options[CONF_USB_PATH] = None - self.updated_options[CONF_USB_SPHERE] = None + self.options[CONF_USB_PATH] = None + self.options[CONF_USB_SPHERE] = None elif ( CONF_USB_SPHERE_OPTION in user_input and spheres[user_input[CONF_USB_SPHERE_OPTION]] != usb_sphere ): sphere_id = spheres[user_input[CONF_USB_SPHERE_OPTION]] - self.updated_options[CONF_USB_SPHERE] = sphere_id + self.options[CONF_USB_SPHERE] = sphere_id return self.async_create_new_entry() @@ -260,7 +261,7 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): """Create a new entry.""" # these attributes will only change when a usb was configured if self.usb_path is not None and self.usb_sphere_id is not None: - self.updated_options[CONF_USB_PATH] = self.usb_path - self.updated_options[CONF_USB_SPHERE] = self.usb_sphere_id + self.options[CONF_USB_PATH] = self.usb_path + self.options[CONF_USB_SPHERE] = self.usb_sphere_id - return super().async_create_entry(title="", data=self.updated_options) + return super().async_create_entry(title="", data=self.options) diff --git a/homeassistant/components/crownstone/devices.py b/homeassistant/components/crownstone/entity.py similarity index 96% rename from homeassistant/components/crownstone/devices.py rename to homeassistant/components/crownstone/entity.py index 4995702701d..cb06a5fb00d 100644 --- a/homeassistant/components/crownstone/devices.py +++ b/homeassistant/components/crownstone/entity.py @@ -10,7 +10,7 @@ from homeassistant.helpers.entity import Entity from .const import CROWNSTONE_INCLUDE_TYPES, DOMAIN -class CrownstoneBaseEntity(Entity): +class CrownstoneEntity(Entity): """Base entity class for Crownstone devices.""" _attr_should_poll = False diff --git a/homeassistant/components/crownstone/light.py b/homeassistant/components/crownstone/light.py index 37904408606..16faa3a36d2 100644 --- a/homeassistant/components/crownstone/light.py +++ b/homeassistant/components/crownstone/light.py @@ -24,7 +24,7 @@ from .const import ( SIG_CROWNSTONE_STATE_UPDATE, SIG_UART_STATE_CHANGE, ) -from .devices import CrownstoneBaseEntity +from .entity import CrownstoneEntity from .helpers import map_from_to if TYPE_CHECKING: @@ -39,7 +39,7 @@ async def async_setup_entry( """Set up crownstones from a config entry.""" manager: CrownstoneEntryManager = hass.data[DOMAIN][config_entry.entry_id] - entities: list[CrownstoneEntity] = [] + entities: list[CrownstoneLightEntity] = [] # Add Crownstone entities that support switching/dimming for sphere in manager.cloud.cloud_data: @@ -47,10 +47,10 @@ async def async_setup_entry( if crownstone.type in CROWNSTONE_INCLUDE_TYPES: # Crownstone can communicate with Crownstone USB if manager.uart and sphere.cloud_id == manager.usb_sphere_id: - entities.append(CrownstoneEntity(crownstone, manager.uart)) + entities.append(CrownstoneLightEntity(crownstone, manager.uart)) # Crownstone can't communicate with Crownstone USB else: - entities.append(CrownstoneEntity(crownstone)) + entities.append(CrownstoneLightEntity(crownstone)) async_add_entities(entities) @@ -65,7 +65,7 @@ def hass_to_crownstone_state(value: int) -> int: return map_from_to(value, 0, 255, 0, 100) -class CrownstoneEntity(CrownstoneBaseEntity, LightEntity): +class CrownstoneLightEntity(CrownstoneEntity, LightEntity): """Representation of a crownstone. Light platform is used to support dimming. diff --git a/homeassistant/components/cups/manifest.json b/homeassistant/components/cups/manifest.json index 3e5b46770fb..c8f19236ce7 100644 --- a/homeassistant/components/cups/manifest.json +++ b/homeassistant/components/cups/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/cups", "iot_class": "local_polling", - "requirements": ["pycups==1.9.73"] + "quality_scale": "legacy", + "requirements": ["pycups==2.0.4"] } diff --git a/homeassistant/components/currencylayer/manifest.json b/homeassistant/components/currencylayer/manifest.json index d66331c4ab0..82d9d4050d4 100644 --- a/homeassistant/components/currencylayer/manifest.json +++ b/homeassistant/components/currencylayer/manifest.json @@ -3,5 +3,6 @@ "name": "currencylayer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/currencylayer", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/daikin/__init__.py b/homeassistant/components/daikin/__init__.py index 4da6bcee50b..c58578071ee 100644 --- a/homeassistant/components/daikin/__init__.py +++ b/homeassistant/components/daikin/__init__.py @@ -3,9 +3,7 @@ from __future__ import annotations import asyncio -from datetime import timedelta import logging -from typing import Any from aiohttp import ClientConnectionError from pydaikin.daikin_base import Appliance @@ -23,15 +21,13 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo -from homeassistant.util import Throttle +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC from .const import DOMAIN, KEY_MAC, TIMEOUT +from .coordinator import DaikinCoordinator _LOGGER = logging.getLogger(__name__) -PARALLEL_UPDATES = 0 -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] @@ -43,19 +39,32 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if entry.unique_id is None or ".local" in entry.unique_id: hass.config_entries.async_update_entry(entry, unique_id=conf[KEY_MAC]) - daikin_api = await daikin_api_setup( - hass, - conf[CONF_HOST], - conf.get(CONF_API_KEY), - conf.get(CONF_UUID), - conf.get(CONF_PASSWORD), - ) - if not daikin_api: - return False + session = async_get_clientsession(hass) + host = conf[CONF_HOST] + try: + async with asyncio.timeout(TIMEOUT): + device: Appliance = await DaikinFactory( + host, + session, + key=entry.data.get(CONF_API_KEY), + uuid=entry.data.get(CONF_UUID), + password=entry.data.get(CONF_PASSWORD), + ) + _LOGGER.debug("Connection to %s successful", host) + except TimeoutError as err: + _LOGGER.debug("Connection to %s timed out in 60 seconds", host) + raise ConfigEntryNotReady from err + except ClientConnectionError as err: + _LOGGER.debug("ClientConnectionError to %s", host) + raise ConfigEntryNotReady from err - await async_migrate_unique_id(hass, entry, daikin_api) + coordinator = DaikinCoordinator(hass, device) - hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: daikin_api}) + await coordinator.async_config_entry_first_refresh() + + await async_migrate_unique_id(hass, entry, device) + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -70,83 +79,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def daikin_api_setup( - hass: HomeAssistant, - host: str, - key: str | None, - uuid: str | None, - password: str | None, -) -> DaikinApi | None: - """Create a Daikin instance only once.""" - - session = async_get_clientsession(hass) - try: - async with asyncio.timeout(TIMEOUT): - device: Appliance = await DaikinFactory( - host, session, key=key, uuid=uuid, password=password - ) - _LOGGER.debug("Connection to %s successful", host) - except TimeoutError as err: - _LOGGER.debug("Connection to %s timed out", host) - raise ConfigEntryNotReady from err - except ClientConnectionError as err: - _LOGGER.debug("ClientConnectionError to %s", host) - raise ConfigEntryNotReady from err - except Exception: # noqa: BLE001 - _LOGGER.error("Unexpected error creating device %s", host) - return None - - return DaikinApi(device) - - -class DaikinApi: - """Keep the Daikin instance in one place and centralize the update.""" - - def __init__(self, device: Appliance) -> None: - """Initialize the Daikin Handle.""" - self.device = device - self.name = device.values.get("name", "Daikin AC") - self.ip_address = device.device_ip - self._available = True - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self, **kwargs: Any) -> None: - """Pull the latest data from Daikin.""" - try: - await self.device.update_status() - self._available = True - except ClientConnectionError: - _LOGGER.warning("Connection failed for %s", self.ip_address) - self._available = False - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return self._available - - @property - def device_info(self) -> DeviceInfo: - """Return a device description for device registry.""" - info = self.device.values - return DeviceInfo( - connections={(CONNECTION_NETWORK_MAC, self.device.mac)}, - manufacturer="Daikin", - model=info.get("model"), - name=info.get("name"), - sw_version=info.get("ver", "").replace("_", "."), - ) - - async def async_migrate_unique_id( - hass: HomeAssistant, config_entry: ConfigEntry, api: DaikinApi + hass: HomeAssistant, config_entry: ConfigEntry, device: Appliance ) -> None: """Migrate old entry.""" dev_reg = dr.async_get(hass) ent_reg = er.async_get(hass) old_unique_id = config_entry.unique_id - new_unique_id = api.device.mac + new_unique_id = device.mac new_mac = dr.format_mac(new_unique_id) - new_name = api.name + new_name = device.values.get("name", "Daikin AC") @callback def _update_unique_id(entity_entry: er.RegistryEntry) -> dict[str, str] | None: diff --git a/homeassistant/components/daikin/climate.py b/homeassistant/components/daikin/climate.py index fc54d4b0427..751683656f2 100644 --- a/homeassistant/components/daikin/climate.py +++ b/homeassistant/components/daikin/climate.py @@ -5,14 +5,11 @@ from __future__ import annotations import logging from typing import Any -import voluptuous as vol - from homeassistant.components.climate import ( ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, - PLATFORM_SCHEMA as CLIMATE_PLATFORM_SCHEMA, PRESET_AWAY, PRESET_BOOST, PRESET_ECO, @@ -23,18 +20,11 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - CONF_HOST, - CONF_NAME, - UnitOfTemperature, -) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi +from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, @@ -42,12 +32,11 @@ from .const import ( ATTR_STATE_ON, ATTR_TARGET_TEMPERATURE, ) +from .coordinator import DaikinCoordinator +from .entity import DaikinEntity _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = CLIMATE_PLATFORM_SCHEMA.extend( - {vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string} -) HA_STATE_TO_DAIKIN = { HVACMode.FAN_ONLY: "fan", @@ -93,25 +82,12 @@ HA_ATTR_TO_DAIKIN = { DAIKIN_ATTR_ADVANCED = "adv" -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Old way of setting up the Daikin HVAC platform. - - Can only be called when a user accidentally mentions the platform in their - config. But even in that case it would have been ignored. - """ - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up Daikin climate based on config_entry.""" daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) - async_add_entities([DaikinClimate(daikin_api)], update_before_add=True) + async_add_entities([DaikinClimate(daikin_api)]) def format_target_temperature(target_temperature: float) -> str: @@ -119,25 +95,21 @@ def format_target_temperature(target_temperature: float) -> str: return str(round(float(target_temperature) * 2, 0) / 2).rstrip("0").rstrip(".") -class DaikinClimate(ClimateEntity): +class DaikinClimate(DaikinEntity, ClimateEntity): """Representation of a Daikin HVAC.""" _attr_name = None - _attr_has_entity_name = True _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(HA_STATE_TO_DAIKIN) _attr_target_temperature_step = 1 _attr_fan_modes: list[str] _attr_swing_modes: list[str] - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, api: DaikinApi) -> None: + def __init__(self, coordinator: DaikinCoordinator) -> None: """Initialize the climate device.""" - - self._api = api - self._attr_fan_modes = api.device.fan_rate - self._attr_swing_modes = api.device.swing_modes - self._attr_device_info = api.device_info + super().__init__(coordinator) + self._attr_fan_modes = self.device.fan_rate + self._attr_swing_modes = self.device.swing_modes self._list: dict[str, list[Any]] = { ATTR_HVAC_MODE: self._attr_hvac_modes, ATTR_FAN_MODE: self._attr_fan_modes, @@ -150,13 +122,13 @@ class DaikinClimate(ClimateEntity): | ClimateEntityFeature.TARGET_TEMPERATURE ) - if api.device.support_away_mode or api.device.support_advanced_modes: + if self.device.support_away_mode or self.device.support_advanced_modes: self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE - if api.device.support_fan_rate: + if self.device.support_fan_rate: self._attr_supported_features |= ClimateEntityFeature.FAN_MODE - if api.device.support_swing_mode: + if self.device.support_swing_mode: self._attr_supported_features |= ClimateEntityFeature.SWING_MODE async def _set(self, settings: dict[str, Any]) -> None: @@ -185,22 +157,23 @@ class DaikinClimate(ClimateEntity): _LOGGER.error("Invalid temperature %s", value) if values: - await self._api.device.set(values) + await self.device.set(values) + await self.coordinator.async_refresh() @property def unique_id(self) -> str: """Return a unique ID.""" - return self._api.device.mac + return self.device.mac @property def current_temperature(self) -> float | None: """Return the current temperature.""" - return self._api.device.inside_temperature + return self.device.inside_temperature @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - return self._api.device.target_temperature + return self.device.target_temperature async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" @@ -212,8 +185,8 @@ class DaikinClimate(ClimateEntity): ret = HA_STATE_TO_CURRENT_HVAC.get(self.hvac_mode) if ( ret in (HVACAction.COOLING, HVACAction.HEATING) - and self._api.device.support_compressor_frequency - and self._api.device.compressor_frequency == 0 + and self.device.support_compressor_frequency + and self.device.compressor_frequency == 0 ): return HVACAction.IDLE return ret @@ -221,7 +194,7 @@ class DaikinClimate(ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return current operation ie. heat, cool, idle.""" - daikin_mode = self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1] + daikin_mode = self.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1] return DAIKIN_TO_HA_STATE.get(daikin_mode, HVACMode.HEAT_COOL) async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: @@ -231,7 +204,7 @@ class DaikinClimate(ClimateEntity): @property def fan_mode(self) -> str: """Return the fan setting.""" - return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE])[1].title() + return self.device.represent(HA_ATTR_TO_DAIKIN[ATTR_FAN_MODE])[1].title() async def async_set_fan_mode(self, fan_mode: str) -> None: """Set fan mode.""" @@ -240,7 +213,7 @@ class DaikinClimate(ClimateEntity): @property def swing_mode(self) -> str: """Return the fan setting.""" - return self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE])[1].title() + return self.device.represent(HA_ATTR_TO_DAIKIN[ATTR_SWING_MODE])[1].title() async def async_set_swing_mode(self, swing_mode: str) -> None: """Set new target temperature.""" @@ -250,18 +223,18 @@ class DaikinClimate(ClimateEntity): def preset_mode(self) -> str: """Return the preset_mode.""" if ( - self._api.device.represent(HA_ATTR_TO_DAIKIN[ATTR_PRESET_MODE])[1] + self.device.represent(HA_ATTR_TO_DAIKIN[ATTR_PRESET_MODE])[1] == HA_PRESET_TO_DAIKIN[PRESET_AWAY] ): return PRESET_AWAY if ( HA_PRESET_TO_DAIKIN[PRESET_BOOST] - in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] + in self.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_BOOST if ( HA_PRESET_TO_DAIKIN[PRESET_ECO] - in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] + in self.device.represent(DAIKIN_ATTR_ADVANCED)[1] ): return PRESET_ECO return PRESET_NONE @@ -269,46 +242,45 @@ class DaikinClimate(ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set preset mode.""" if preset_mode == PRESET_AWAY: - await self._api.device.set_holiday(ATTR_STATE_ON) + await self.device.set_holiday(ATTR_STATE_ON) elif preset_mode == PRESET_BOOST: - await self._api.device.set_advanced_mode( + await self.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_ON ) elif preset_mode == PRESET_ECO: - await self._api.device.set_advanced_mode( + await self.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_ON ) elif self.preset_mode == PRESET_AWAY: - await self._api.device.set_holiday(ATTR_STATE_OFF) + await self.device.set_holiday(ATTR_STATE_OFF) elif self.preset_mode == PRESET_BOOST: - await self._api.device.set_advanced_mode( + await self.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_BOOST], ATTR_STATE_OFF ) elif self.preset_mode == PRESET_ECO: - await self._api.device.set_advanced_mode( + await self.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_OFF ) + await self.coordinator.async_refresh() @property def preset_modes(self) -> list[str]: """List of available preset modes.""" ret = [PRESET_NONE] - if self._api.device.support_away_mode: + if self.device.support_away_mode: ret.append(PRESET_AWAY) - if self._api.device.support_advanced_modes: + if self.device.support_advanced_modes: ret += [PRESET_ECO, PRESET_BOOST] return ret - async def async_update(self) -> None: - """Retrieve latest state.""" - await self._api.async_update() - async def async_turn_on(self) -> None: """Turn device on.""" - await self._api.device.set({}) + await self.device.set({}) + await self.coordinator.async_refresh() async def async_turn_off(self) -> None: """Turn device off.""" - await self._api.device.set( + await self.device.set( {HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVACMode.OFF]} ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/daikin/coordinator.py b/homeassistant/components/daikin/coordinator.py new file mode 100644 index 00000000000..35d998b4ba2 --- /dev/null +++ b/homeassistant/components/daikin/coordinator.py @@ -0,0 +1,30 @@ +"""Coordinator for Daikin integration.""" + +from datetime import timedelta +import logging + +from pydaikin.daikin_base import Appliance + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class DaikinCoordinator(DataUpdateCoordinator[None]): + """Class to manage fetching Daikin data.""" + + def __init__(self, hass: HomeAssistant, device: Appliance) -> None: + """Initialize global Daikin data updater.""" + super().__init__( + hass, + _LOGGER, + name=device.values.get("name", DOMAIN), + update_interval=timedelta(seconds=60), + ) + self.device = device + + async def _async_update_data(self) -> None: + await self.device.update_status() diff --git a/homeassistant/components/daikin/entity.py b/homeassistant/components/daikin/entity.py new file mode 100644 index 00000000000..704ce226416 --- /dev/null +++ b/homeassistant/components/daikin/entity.py @@ -0,0 +1,25 @@ +"""Base entity for Daikin.""" + +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import DaikinCoordinator + + +class DaikinEntity(CoordinatorEntity[DaikinCoordinator]): + """Base entity for Daikin.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: DaikinCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.device = coordinator.device + info = self.device.values + self._attr_device_info = DeviceInfo( + connections={(CONNECTION_NETWORK_MAC, self.device.mac)}, + manufacturer="Daikin", + model=info.get("model"), + name=info.get("name"), + sw_version=info.get("ver", "").replace("_", "."), + ) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index 0d93c0e25ad..f794d97a9ba 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.4"], + "requirements": ["pydaikin==2.13.8"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/homeassistant/components/daikin/sensor.py b/homeassistant/components/daikin/sensor.py index a17a80f2065..d2d6ef02fc3 100644 --- a/homeassistant/components/daikin/sensor.py +++ b/homeassistant/components/daikin/sensor.py @@ -23,9 +23,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi +from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_COMPRESSOR_FREQUENCY, ATTR_COOL_ENERGY, @@ -38,6 +37,8 @@ from .const import ( ATTR_TOTAL_ENERGY_TODAY, ATTR_TOTAL_POWER, ) +from .coordinator import DaikinCoordinator +from .entity import DaikinEntity @dataclass(frozen=True, kw_only=True) @@ -132,19 +133,6 @@ SENSOR_TYPES: tuple[DaikinSensorEntityDescription, ...] = ( ) -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Old way of setting up the Daikin sensors. - - Can only be called when a user accidentally mentions the platform in their - config. But even in that case it would have been ignored. - """ - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -173,26 +161,20 @@ async def async_setup_entry( async_add_entities(entities) -class DaikinSensor(SensorEntity): +class DaikinSensor(DaikinEntity, SensorEntity): """Representation of a Sensor.""" - _attr_has_entity_name = True entity_description: DaikinSensorEntityDescription def __init__( - self, api: DaikinApi, description: DaikinSensorEntityDescription + self, coordinator: DaikinCoordinator, description: DaikinSensorEntityDescription ) -> None: """Initialize the sensor.""" + super().__init__(coordinator) self.entity_description = description - self._attr_device_info = api.device_info - self._attr_unique_id = f"{api.device.mac}-{description.key}" - self._api = api + self._attr_unique_id = f"{self.device.mac}-{description.key}" @property def native_value(self) -> float | None: """Return the state of the sensor.""" - return self.entity_description.value_func(self._api.device) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self._api.async_update() + return self.entity_description.value_func(self.device) diff --git a/homeassistant/components/daikin/switch.py b/homeassistant/components/daikin/switch.py index af94e98a337..669048ac45e 100644 --- a/homeassistant/components/daikin/switch.py +++ b/homeassistant/components/daikin/switch.py @@ -8,41 +8,27 @@ from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi +from . import DOMAIN +from .coordinator import DaikinCoordinator +from .entity import DaikinEntity DAIKIN_ATTR_ADVANCED = "adv" DAIKIN_ATTR_STREAMER = "streamer" DAIKIN_ATTR_MODE = "mode" -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Old way of setting up the platform. - - Can only be called when a user accidentally mentions the platform in their - config. But even in that case it would have been ignored. - """ - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up Daikin climate based on config_entry.""" - daikin_api: DaikinApi = hass.data[DAIKIN_DOMAIN][entry.entry_id] - switches: list[DaikinZoneSwitch | DaikinStreamerSwitch | DaikinToggleSwitch] = [] + daikin_api: DaikinCoordinator = hass.data[DOMAIN][entry.entry_id] + switches: list[SwitchEntity] = [] if zones := daikin_api.device.zones: switches.extend( - [ - DaikinZoneSwitch(daikin_api, zone_id) - for zone_id, zone in enumerate(zones) - if zone[0] != "-" - ] + DaikinZoneSwitch(daikin_api, zone_id) + for zone_id, zone in enumerate(zones) + if zone[0] != "-" ) if daikin_api.device.support_advanced_modes: # It isn't possible to find out from the API responses if a specific @@ -53,100 +39,86 @@ async def async_setup_entry( async_add_entities(switches) -class DaikinZoneSwitch(SwitchEntity): +class DaikinZoneSwitch(DaikinEntity, SwitchEntity): """Representation of a zone.""" - _attr_has_entity_name = True _attr_translation_key = "zone" - def __init__(self, api: DaikinApi, zone_id: int) -> None: + def __init__(self, coordinator: DaikinCoordinator, zone_id: int) -> None: """Initialize the zone.""" - self._api = api + super().__init__(coordinator) self._zone_id = zone_id - self._attr_device_info = api.device_info - self._attr_unique_id = f"{api.device.mac}-zone{zone_id}" + self._attr_unique_id = f"{self.device.mac}-zone{zone_id}" @property def name(self) -> str: """Return the name of the sensor.""" - return self._api.device.zones[self._zone_id][0] + return self.device.zones[self._zone_id][0] @property def is_on(self) -> bool: """Return the state of the sensor.""" - return self._api.device.zones[self._zone_id][1] == "1" - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self._api.async_update() + return self.device.zones[self._zone_id][1] == "1" async def async_turn_on(self, **kwargs: Any) -> None: """Turn the zone on.""" - await self._api.device.set_zone(self._zone_id, "zone_onoff", "1") + await self.device.set_zone(self._zone_id, "zone_onoff", "1") + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" - await self._api.device.set_zone(self._zone_id, "zone_onoff", "0") + await self.device.set_zone(self._zone_id, "zone_onoff", "0") + await self.coordinator.async_refresh() -class DaikinStreamerSwitch(SwitchEntity): +class DaikinStreamerSwitch(DaikinEntity, SwitchEntity): """Streamer state.""" _attr_name = "Streamer" - _attr_has_entity_name = True _attr_translation_key = "streamer" - def __init__(self, api: DaikinApi) -> None: - """Initialize streamer switch.""" - self._api = api - self._attr_device_info = api.device_info - self._attr_unique_id = f"{api.device.mac}-streamer" + def __init__(self, coordinator: DaikinCoordinator) -> None: + """Initialize switch.""" + super().__init__(coordinator) + self._attr_unique_id = f"{self.device.mac}-streamer" @property def is_on(self) -> bool: """Return the state of the sensor.""" - return ( - DAIKIN_ATTR_STREAMER in self._api.device.represent(DAIKIN_ATTR_ADVANCED)[1] - ) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self._api.async_update() + return DAIKIN_ATTR_STREAMER in self.device.represent(DAIKIN_ATTR_ADVANCED)[1] async def async_turn_on(self, **kwargs: Any) -> None: """Turn the zone on.""" - await self._api.device.set_streamer("on") + await self.device.set_streamer("on") + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" - await self._api.device.set_streamer("off") + await self.device.set_streamer("off") + await self.coordinator.async_refresh() -class DaikinToggleSwitch(SwitchEntity): +class DaikinToggleSwitch(DaikinEntity, SwitchEntity): """Switch state.""" - _attr_has_entity_name = True _attr_translation_key = "toggle" - def __init__(self, api: DaikinApi) -> None: + def __init__(self, coordinator: DaikinCoordinator) -> None: """Initialize switch.""" - self._api = api - self._attr_device_info = api.device_info - self._attr_unique_id = f"{self._api.device.mac}-toggle" + super().__init__(coordinator) + self._attr_unique_id = f"{self.device.mac}-toggle" @property def is_on(self) -> bool: """Return the state of the sensor.""" - return "off" not in self._api.device.represent(DAIKIN_ATTR_MODE) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self._api.async_update() + return "off" not in self.device.represent(DAIKIN_ATTR_MODE) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the zone on.""" - await self._api.device.set({}) + await self.device.set({}) + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" - await self._api.device.set({DAIKIN_ATTR_MODE: "off"}) + await self.device.set({DAIKIN_ATTR_MODE: "off"}) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/danfoss_air/manifest.json b/homeassistant/components/danfoss_air/manifest.json index 9eea3221bbe..57cb1aa7218 100644 --- a/homeassistant/components/danfoss_air/manifest.json +++ b/homeassistant/components/danfoss_air/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/danfoss_air", "iot_class": "local_polling", "loggers": ["pydanfossair"], + "quality_scale": "legacy", "requirements": ["pydanfossair==0.1.0"] } diff --git a/homeassistant/components/datadog/manifest.json b/homeassistant/components/datadog/manifest.json index 4ae24a80c6c..ca9681effca 100644 --- a/homeassistant/components/datadog/manifest.json +++ b/homeassistant/components/datadog/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/datadog", "iot_class": "local_push", "loggers": ["datadog"], + "quality_scale": "legacy", "requirements": ["datadog==0.15.0"] } diff --git a/homeassistant/components/date/__init__.py b/homeassistant/components/date/__init__.py index 7914c6d2984..622ec574542 100644 --- a/homeassistant/components/date/__init__.py +++ b/homeassistant/components/date/__init__.py @@ -3,10 +3,10 @@ from __future__ import annotations from datetime import date, timedelta -from functools import cached_property import logging from typing import final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -16,11 +16,13 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN, SERVICE_SET_VALUE _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[DateEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -37,7 +39,7 @@ async def _async_set_value(entity: DateEntity, service_call: ServiceCall) -> Non async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Date entities.""" - component = hass.data[DOMAIN] = EntityComponent[DateEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[DateEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -51,14 +53,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[DateEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[DateEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class DateEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/date/icons.json b/homeassistant/components/date/icons.json index 80ec2691285..b139b897210 100644 --- a/homeassistant/components/date/icons.json +++ b/homeassistant/components/date/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:calendar-edit" + "set_value": { + "service": "mdi:calendar-edit" + } } } diff --git a/homeassistant/components/datetime/__init__.py b/homeassistant/components/datetime/__init__.py index f418f81da03..8aef34ddcbd 100644 --- a/homeassistant/components/datetime/__init__.py +++ b/homeassistant/components/datetime/__init__.py @@ -3,10 +3,10 @@ from __future__ import annotations from datetime import UTC, datetime, timedelta -from functools import cached_property import logging from typing import final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -16,11 +16,13 @@ from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey from .const import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[DateTimeEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -40,7 +42,7 @@ async def _async_set_value(entity: DateTimeEntity, service_call: ServiceCall) -> async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Date/Time entities.""" - component = hass.data[DOMAIN] = EntityComponent[DateTimeEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[DateTimeEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -58,14 +60,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[DateTimeEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[DateTimeEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class DateTimeEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/datetime/icons.json b/homeassistant/components/datetime/icons.json index 563d03e2a8f..d7e9fca8e5c 100644 --- a/homeassistant/components/datetime/icons.json +++ b/homeassistant/components/datetime/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:calendar-edit" + "set_value": { + "service": "mdi:calendar-edit" + } } } diff --git a/homeassistant/components/ddwrt/device_tracker.py b/homeassistant/components/ddwrt/device_tracker.py index 5d31d16a530..d72496e4d1e 100644 --- a/homeassistant/components/ddwrt/device_tracker.py +++ b/homeassistant/components/ddwrt/device_tracker.py @@ -10,7 +10,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -50,7 +50,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> DdWrtDeviceScanner | None: """Validate the configuration and return a DD-WRT scanner.""" try: - return DdWrtDeviceScanner(config[DOMAIN]) + return DdWrtDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) except ConnectionError: return None diff --git a/homeassistant/components/ddwrt/manifest.json b/homeassistant/components/ddwrt/manifest.json index 98ea17b0659..9a2b2470131 100644 --- a/homeassistant/components/ddwrt/manifest.json +++ b/homeassistant/components/ddwrt/manifest.json @@ -3,5 +3,6 @@ "name": "DD-WRT", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ddwrt", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/deako/__init__.py b/homeassistant/components/deako/__init__.py new file mode 100644 index 00000000000..7a169defe01 --- /dev/null +++ b/homeassistant/components/deako/__init__.py @@ -0,0 +1,54 @@ +"""The deako integration.""" + +from __future__ import annotations + +import logging + +from pydeako import Deako, DeakoDiscoverer, FindDevicesError + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +_LOGGER: logging.Logger = logging.getLogger(__name__) + +PLATFORMS: list[Platform] = [Platform.LIGHT] + +type DeakoConfigEntry = ConfigEntry[Deako] + + +async def async_setup_entry(hass: HomeAssistant, entry: DeakoConfigEntry) -> bool: + """Set up deako.""" + _zc = await zeroconf.async_get_instance(hass) + discoverer = DeakoDiscoverer(_zc) + + connection = Deako(discoverer.get_address) + + await connection.connect() + try: + await connection.find_devices() + except FindDevicesError as exc: + _LOGGER.warning("Error finding devices: %s", exc) + await connection.disconnect() + raise ConfigEntryNotReady(exc) from exc + + # If deako devices are advertising on mdns, we should be able to get at least one device + devices = connection.get_devices() + if len(devices) == 0: + await connection.disconnect() + raise ConfigEntryNotReady(devices) + + entry.runtime_data = connection + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: DeakoConfigEntry) -> bool: + """Unload a config entry.""" + await entry.runtime_data.disconnect() + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/deako/config_flow.py b/homeassistant/components/deako/config_flow.py new file mode 100644 index 00000000000..273cbf2795e --- /dev/null +++ b/homeassistant/components/deako/config_flow.py @@ -0,0 +1,26 @@ +"""Config flow for deako.""" + +from pydeako import DeakoDiscoverer, DevicesNotFoundException + +from homeassistant.components import zeroconf +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_flow + +from .const import DOMAIN, NAME + + +async def _async_has_devices(hass: HomeAssistant) -> bool: + """Return if there are devices that can be discovered.""" + _zc = await zeroconf.async_get_instance(hass) + discoverer = DeakoDiscoverer(_zc) + + try: + await discoverer.get_address() + except DevicesNotFoundException: + return False + else: + # address exists, there's at least one device + return True + + +config_entry_flow.register_discovery_flow(DOMAIN, NAME, _async_has_devices) diff --git a/homeassistant/components/deako/const.py b/homeassistant/components/deako/const.py new file mode 100644 index 00000000000..f6b688b9b07 --- /dev/null +++ b/homeassistant/components/deako/const.py @@ -0,0 +1,5 @@ +"""Constants for Deako.""" + +# Base component constants +NAME = "Deako" +DOMAIN = "deako" diff --git a/homeassistant/components/deako/light.py b/homeassistant/components/deako/light.py new file mode 100644 index 00000000000..75b01935c9a --- /dev/null +++ b/homeassistant/components/deako/light.py @@ -0,0 +1,96 @@ +"""Binary sensor platform for integration_blueprint.""" + +from typing import Any + +from pydeako import Deako + +from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import DeakoConfigEntry +from .const import DOMAIN + +# Model names +MODEL_SMART = "smart" +MODEL_DIMMER = "dimmer" + + +async def async_setup_entry( + hass: HomeAssistant, + config: DeakoConfigEntry, + add_entities: AddEntitiesCallback, +) -> None: + """Configure the platform.""" + client = config.runtime_data + + add_entities([DeakoLightEntity(client, uuid) for uuid in client.get_devices()]) + + +class DeakoLightEntity(LightEntity): + """Deako LightEntity class.""" + + _attr_has_entity_name = True + _attr_name = None + _attr_is_on = False + _attr_available = True + + client: Deako + + def __init__(self, client: Deako, uuid: str) -> None: + """Save connection reference.""" + self.client = client + self._attr_unique_id = uuid + + dimmable = client.is_dimmable(uuid) + + model = MODEL_SMART + self._attr_color_mode = ColorMode.ONOFF + if dimmable: + model = MODEL_DIMMER + self._attr_color_mode = ColorMode.BRIGHTNESS + + self._attr_supported_color_modes = {self._attr_color_mode} + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, uuid)}, + name=client.get_name(uuid), + manufacturer="Deako", + model=model, + ) + + client.set_state_callback(uuid, self.on_update) + self.update() # set initial state + + def on_update(self) -> None: + """State update callback.""" + self.update() + self.schedule_update_ha_state() + + async def control_device(self, power: bool, dim: int | None = None) -> None: + """Control entity state via client.""" + assert self._attr_unique_id is not None + await self.client.control_device(self._attr_unique_id, power, dim) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + dim = None + if ATTR_BRIGHTNESS in kwargs: + dim = round(kwargs[ATTR_BRIGHTNESS] / 2.55, 0) + await self.control_device(True, dim) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the device.""" + await self.control_device(False) + + def update(self) -> None: + """Call to update state.""" + assert self._attr_unique_id is not None + state = self.client.get_state(self._attr_unique_id) or {} + self._attr_is_on = bool(state.get("power", False)) + if ( + self._attr_supported_color_modes is not None + and ColorMode.BRIGHTNESS in self._attr_supported_color_modes + ): + self._attr_brightness = int(round(state.get("dim", 0) * 2.55)) diff --git a/homeassistant/components/deako/manifest.json b/homeassistant/components/deako/manifest.json new file mode 100644 index 00000000000..f4f4782530b --- /dev/null +++ b/homeassistant/components/deako/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "deako", + "name": "Deako", + "codeowners": ["@sebirdman", "@balake", "@deakolights"], + "config_flow": true, + "dependencies": ["zeroconf"], + "documentation": "https://www.home-assistant.io/integrations/deako", + "iot_class": "local_polling", + "loggers": ["pydeako"], + "requirements": ["pydeako==0.6.0"], + "single_config_entry": true, + "zeroconf": ["_deako._tcp.local."] +} diff --git a/homeassistant/components/deako/strings.json b/homeassistant/components/deako/strings.json new file mode 100644 index 00000000000..6bb292d74a9 --- /dev/null +++ b/homeassistant/components/deako/strings.json @@ -0,0 +1,13 @@ +{ + "config": { + "step": { + "confirm": { + "description": "Please confirm setting up the Deako integration" + } + }, + "abort": { + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + } + } +} diff --git a/homeassistant/components/debugpy/icons.json b/homeassistant/components/debugpy/icons.json index b3bb4dde23a..88086382059 100644 --- a/homeassistant/components/debugpy/icons.json +++ b/homeassistant/components/debugpy/icons.json @@ -1,5 +1,7 @@ { "services": { - "start": "mdi:play" + "start": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json index fc52557fa5a..078af8c67a5 100644 --- a/homeassistant/components/debugpy/manifest.json +++ b/homeassistant/components/debugpy/manifest.json @@ -6,5 +6,5 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["debugpy==1.8.1"] + "requirements": ["debugpy==1.8.11"] } diff --git a/homeassistant/components/deconz/alarm_control_panel.py b/homeassistant/components/deconz/alarm_control_panel.py index ae230c783f9..678e441a7a9 100644 --- a/homeassistant/components/deconz/alarm_control_panel.py +++ b/homeassistant/components/deconz/alarm_control_panel.py @@ -10,38 +10,30 @@ from pydeconz.models.sensor.ancillary_control import ( ) from homeassistant.components.alarm_control_panel import ( - DOMAIN, + DOMAIN as ALARM_CONTROl_PANEL_DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub DECONZ_TO_ALARM_STATE = { - AncillaryControlPanel.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - AncillaryControlPanel.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - AncillaryControlPanel.ARMED_STAY: STATE_ALARM_ARMED_HOME, - AncillaryControlPanel.ARMING_AWAY: STATE_ALARM_ARMING, - AncillaryControlPanel.ARMING_NIGHT: STATE_ALARM_ARMING, - AncillaryControlPanel.ARMING_STAY: STATE_ALARM_ARMING, - AncillaryControlPanel.DISARMED: STATE_ALARM_DISARMED, - AncillaryControlPanel.ENTRY_DELAY: STATE_ALARM_PENDING, - AncillaryControlPanel.EXIT_DELAY: STATE_ALARM_PENDING, - AncillaryControlPanel.IN_ALARM: STATE_ALARM_TRIGGERED, + AncillaryControlPanel.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + AncillaryControlPanel.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + AncillaryControlPanel.ARMED_STAY: AlarmControlPanelState.ARMED_HOME, + AncillaryControlPanel.ARMING_AWAY: AlarmControlPanelState.ARMING, + AncillaryControlPanel.ARMING_NIGHT: AlarmControlPanelState.ARMING, + AncillaryControlPanel.ARMING_STAY: AlarmControlPanelState.ARMING, + AncillaryControlPanel.DISARMED: AlarmControlPanelState.DISARMED, + AncillaryControlPanel.ENTRY_DELAY: AlarmControlPanelState.PENDING, + AncillaryControlPanel.EXIT_DELAY: AlarmControlPanelState.PENDING, + AncillaryControlPanel.IN_ALARM: AlarmControlPanelState.TRIGGERED, } @@ -60,7 +52,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ alarm control panel devices.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[ALARM_CONTROl_PANEL_DOMAIN] = set() @callback def async_add_sensor(_: EventType, sensor_id: str) -> None: @@ -79,7 +71,7 @@ class DeconzAlarmControlPanel(DeconzDevice[AncillaryControl], AlarmControlPanelE """Representation of a deCONZ alarm control panel.""" _update_key = "panel" - TYPE = DOMAIN + TYPE = ALARM_CONTROl_PANEL_DOMAIN _attr_code_format = CodeFormat.NUMBER _attr_supported_features = ( @@ -105,7 +97,7 @@ class DeconzAlarmControlPanel(DeconzDevice[AncillaryControl], AlarmControlPanelE super().async_update_callback() @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the control panel.""" if self._device.panel in DECONZ_TO_ALARM_STATE: return DECONZ_TO_ALARM_STATE[self._device.panel] diff --git a/homeassistant/components/deconz/binary_sensor.py b/homeassistant/components/deconz/binary_sensor.py index 0b3461b7a12..a5496d3bc10 100644 --- a/homeassistant/components/deconz/binary_sensor.py +++ b/homeassistant/components/deconz/binary_sensor.py @@ -18,7 +18,7 @@ from pydeconz.models.sensor.vibration import Vibration from pydeconz.models.sensor.water import Water from homeassistant.components.binary_sensor import ( - DOMAIN, + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, @@ -29,7 +29,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ATTR_DARK, ATTR_ON -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub ATTR_ORIENTATION = "orientation" @@ -165,7 +165,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ binary sensor.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[BINARY_SENSOR_DOMAIN] = set() @callback def async_add_sensor(_: EventType, sensor_id: str) -> None: @@ -189,7 +189,7 @@ async def async_setup_entry( class DeconzBinarySensor(DeconzDevice[SensorResources], BinarySensorEntity): """Representation of a deCONZ binary sensor.""" - TYPE = DOMAIN + TYPE = BINARY_SENSOR_DOMAIN entity_description: DeconzBinarySensorDescription def __init__( diff --git a/homeassistant/components/deconz/button.py b/homeassistant/components/deconz/button.py index a915ca56a33..ecf28b5e22c 100644 --- a/homeassistant/components/deconz/button.py +++ b/homeassistant/components/deconz/button.py @@ -9,7 +9,7 @@ from pydeconz.models.scene import Scene as PydeconzScene from pydeconz.models.sensor.presence import Presence from homeassistant.components.button import ( - DOMAIN, + DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass, ButtonEntity, ButtonEntityDescription, @@ -19,7 +19,7 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice, DeconzSceneMixin +from .entity import DeconzDevice, DeconzSceneMixin from .hub import DeconzHub @@ -51,7 +51,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ button entity.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[BUTTON_DOMAIN] = set() @callback def async_add_scene(_: EventType, scene_id: str) -> None: @@ -83,7 +83,7 @@ async def async_setup_entry( class DeconzSceneButton(DeconzSceneMixin, ButtonEntity): """Representation of a deCONZ button entity.""" - TYPE = DOMAIN + TYPE = BUTTON_DOMAIN def __init__( self, @@ -119,7 +119,7 @@ class DeconzPresenceResetButton(DeconzDevice[Presence], ButtonEntity): _attr_entity_category = EntityCategory.CONFIG _attr_device_class = ButtonDeviceClass.RESTART - TYPE = DOMAIN + TYPE = BUTTON_DOMAIN async def async_press(self) -> None: """Store reset presence state.""" diff --git a/homeassistant/components/deconz/climate.py b/homeassistant/components/deconz/climate.py index 45a50d44e36..690f943379d 100644 --- a/homeassistant/components/deconz/climate.py +++ b/homeassistant/components/deconz/climate.py @@ -13,7 +13,7 @@ from pydeconz.models.sensor.thermostat import ( ) from homeassistant.components.climate import ( - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, FAN_AUTO, FAN_HIGH, FAN_LOW, @@ -34,7 +34,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ATTR_LOCKED, ATTR_OFFSET, ATTR_VALVE -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub DECONZ_FAN_SMART = "smart" @@ -81,7 +81,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ climate devices.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[CLIMATE_DOMAIN] = set() @callback def async_add_climate(_: EventType, climate_id: str) -> None: @@ -98,10 +98,9 @@ async def async_setup_entry( class DeconzThermostat(DeconzDevice[Thermostat], ClimateEntity): """Representation of a deCONZ thermostat.""" - TYPE = DOMAIN + TYPE = CLIMATE_DOMAIN _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: Thermostat, hub: DeconzHub) -> None: """Set up thermostat device.""" diff --git a/homeassistant/components/deconz/config_flow.py b/homeassistant/components/deconz/config_flow.py index d017e2c5c65..ed54701f656 100644 --- a/homeassistant/components/deconz/config_flow.py +++ b/homeassistant/components/deconz/config_flow.py @@ -20,7 +20,6 @@ from pydeconz.utils import ( import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( SOURCE_HASSIO, ConfigEntry, @@ -31,6 +30,7 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import ( CONF_ALLOW_CLIP_SENSOR, @@ -74,9 +74,11 @@ class DeconzFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> DeconzOptionsFlowHandler: """Get the options flow for this handler.""" - return DeconzOptionsFlowHandler(config_entry) + return DeconzOptionsFlowHandler() def __init__(self) -> None: """Initialize the deCONZ config flow.""" @@ -299,11 +301,6 @@ class DeconzOptionsFlowHandler(OptionsFlow): gateway: DeconzHub - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize deCONZ options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -315,8 +312,7 @@ class DeconzOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage the deconz devices options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) schema_options = {} for option, default in ( diff --git a/homeassistant/components/deconz/cover.py b/homeassistant/components/deconz/cover.py index b83c62c3367..030c4b12709 100644 --- a/homeassistant/components/deconz/cover.py +++ b/homeassistant/components/deconz/cover.py @@ -12,7 +12,7 @@ from pydeconz.models.light.cover import Cover from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, CoverDeviceClass, CoverEntity, CoverEntityFeature, @@ -21,7 +21,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub DECONZ_TYPE_TO_DEVICE_CLASS = { @@ -38,7 +38,7 @@ async def async_setup_entry( ) -> None: """Set up covers for deCONZ component.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[COVER_DOMAIN] = set() @callback def async_add_cover(_: EventType, cover_id: str) -> None: @@ -54,7 +54,7 @@ async def async_setup_entry( class DeconzCover(DeconzDevice[Cover], CoverEntity): """Representation of a deCONZ cover.""" - TYPE = DOMAIN + TYPE = COVER_DOMAIN def __init__(self, cover_id: str, hub: DeconzHub) -> None: """Set up cover device.""" diff --git a/homeassistant/components/deconz/deconz_event.py b/homeassistant/components/deconz/deconz_event.py index 56cbf47b4e3..d6d2ddf1373 100644 --- a/homeassistant/components/deconz/deconz_event.py +++ b/homeassistant/components/deconz/deconz_event.py @@ -25,7 +25,7 @@ from homeassistant.helpers import device_registry as dr from homeassistant.util import slugify from .const import ATTR_DURATION, ATTR_ROTATION, CONF_ANGLE, CONF_GESTURE, LOGGER -from .deconz_device import DeconzBase +from .entity import DeconzBase from .hub import DeconzHub CONF_DECONZ_EVENT = "deconz_event" diff --git a/homeassistant/components/deconz/device_trigger.py b/homeassistant/components/deconz/device_trigger.py index ec988feb3cf..2aeeece3ac5 100644 --- a/homeassistant/components/deconz/device_trigger.py +++ b/homeassistant/components/deconz/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger @@ -169,6 +169,30 @@ FRIENDS_OF_HUE_SWITCH = { (CONF_LONG_RELEASE, CONF_BOTTOM_BUTTONS): {CONF_EVENT: 6003}, } +RODRET_REMOTE_MODEL = "RODRET Dimmer" +RODRET_REMOTE = { + (CONF_SHORT_RELEASE, CONF_TURN_ON): {CONF_EVENT: 1002}, + (CONF_LONG_PRESS, CONF_TURN_ON): {CONF_EVENT: 1001}, + (CONF_LONG_RELEASE, CONF_TURN_ON): {CONF_EVENT: 1003}, + (CONF_SHORT_RELEASE, CONF_TURN_OFF): {CONF_EVENT: 2002}, + (CONF_LONG_PRESS, CONF_TURN_OFF): {CONF_EVENT: 2001}, + (CONF_LONG_RELEASE, CONF_TURN_OFF): {CONF_EVENT: 2003}, +} + +SOMRIG_REMOTE_MODEL = "SOMRIG shortcut button" +SOMRIG_REMOTE = { + (CONF_SHORT_PRESS, CONF_BUTTON_1): {CONF_EVENT: 1000}, + (CONF_SHORT_RELEASE, CONF_BUTTON_1): {CONF_EVENT: 1002}, + (CONF_LONG_PRESS, CONF_BUTTON_1): {CONF_EVENT: 1001}, + (CONF_LONG_RELEASE, CONF_BUTTON_1): {CONF_EVENT: 1003}, + (CONF_DOUBLE_PRESS, CONF_BUTTON_1): {CONF_EVENT: 1004}, + (CONF_SHORT_PRESS, CONF_BUTTON_2): {CONF_EVENT: 2000}, + (CONF_SHORT_RELEASE, CONF_BUTTON_2): {CONF_EVENT: 2002}, + (CONF_LONG_PRESS, CONF_BUTTON_2): {CONF_EVENT: 2001}, + (CONF_LONG_RELEASE, CONF_BUTTON_2): {CONF_EVENT: 2003}, + (CONF_DOUBLE_PRESS, CONF_BUTTON_2): {CONF_EVENT: 2004}, +} + STYRBAR_REMOTE_MODEL = "Remote Control N2" STYRBAR_REMOTE = { (CONF_SHORT_RELEASE, CONF_DIM_UP): {CONF_EVENT: 1002}, @@ -600,6 +624,8 @@ REMOTES = { HUE_TAP_REMOTE_MODEL: HUE_TAP_REMOTE, HUE_WALL_REMOTE_MODEL: HUE_WALL_REMOTE, FRIENDS_OF_HUE_SWITCH_MODEL: FRIENDS_OF_HUE_SWITCH, + RODRET_REMOTE_MODEL: RODRET_REMOTE, + SOMRIG_REMOTE_MODEL: SOMRIG_REMOTE, STYRBAR_REMOTE_MODEL: STYRBAR_REMOTE, SYMFONISK_SOUND_CONTROLLER_MODEL: SYMFONISK_SOUND_CONTROLLER, TRADFRI_ON_OFF_SWITCH_MODEL: TRADFRI_ON_OFF_SWITCH, diff --git a/homeassistant/components/deconz/deconz_device.py b/homeassistant/components/deconz/entity.py similarity index 98% rename from homeassistant/components/deconz/deconz_device.py rename to homeassistant/components/deconz/entity.py index 8551ad33cf5..f45c35ada44 100644 --- a/homeassistant/components/deconz/deconz_device.py +++ b/homeassistant/components/deconz/entity.py @@ -138,7 +138,7 @@ class DeconzDevice[_DeviceT: _DeviceType](DeconzBase[_DeviceT], Entity): """Return True if device is available.""" if isinstance(self._device, PydeconzScene): return self.hub.available - return self.hub.available and self._device.reachable # type: ignore[union-attr] + return self.hub.available and self._device.reachable class DeconzSceneMixin(DeconzDevice[PydeconzScene]): diff --git a/homeassistant/components/deconz/fan.py b/homeassistant/components/deconz/fan.py index 67c759afeda..26e4d3328b8 100644 --- a/homeassistant/components/deconz/fan.py +++ b/homeassistant/components/deconz/fan.py @@ -7,7 +7,11 @@ from typing import Any from pydeconz.models.event import EventType from pydeconz.models.light.light import Light, LightFanSpeed -from homeassistant.components.fan import DOMAIN, FanEntity, FanEntityFeature +from homeassistant.components.fan import ( + DOMAIN as FAN_DOMAIN, + FanEntity, + FanEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -16,7 +20,7 @@ from homeassistant.util.percentage import ( percentage_to_ordered_list_item, ) -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub ORDERED_NAMED_FAN_SPEEDS: list[LightFanSpeed] = [ @@ -34,7 +38,7 @@ async def async_setup_entry( ) -> None: """Set up fans for deCONZ component.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[FAN_DOMAIN] = set() @callback def async_add_fan(_: EventType, fan_id: str) -> None: @@ -53,7 +57,7 @@ async def async_setup_entry( class DeconzFan(DeconzDevice[Light], FanEntity): """Representation of a deCONZ fan.""" - TYPE = DOMAIN + TYPE = FAN_DOMAIN _default_on_speed = LightFanSpeed.PERCENT_50 _attr_supported_features = ( @@ -61,7 +65,6 @@ class DeconzFan(DeconzDevice[Light], FanEntity): | FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: Light, hub: DeconzHub) -> None: """Set up fan.""" diff --git a/homeassistant/components/deconz/hub/__init__.py b/homeassistant/components/deconz/hub/__init__.py index e484bd5bb59..b816ceafad7 100644 --- a/homeassistant/components/deconz/hub/__init__.py +++ b/homeassistant/components/deconz/hub/__init__.py @@ -1,4 +1,6 @@ """Internal functionality not part of HA infrastructure.""" -from .api import get_deconz_api # noqa: F401 -from .hub import DeconzHub # noqa: F401 +from .api import get_deconz_api +from .hub import DeconzHub + +__all__ = ["DeconzHub", "get_deconz_api"] diff --git a/homeassistant/components/deconz/icons.json b/homeassistant/components/deconz/icons.json index 5b22daee53f..a7fb0859eec 100644 --- a/homeassistant/components/deconz/icons.json +++ b/homeassistant/components/deconz/icons.json @@ -1,7 +1,13 @@ { "services": { - "configure": "mdi:cog", - "device_refresh": "mdi:refresh", - "remove_orphaned_entries": "mdi:bookmark-remove" + "configure": { + "service": "mdi:cog" + }, + "device_refresh": { + "service": "mdi:refresh" + }, + "remove_orphaned_entries": { + "service": "mdi:bookmark-remove" + } } } diff --git a/homeassistant/components/deconz/light.py b/homeassistant/components/deconz/light.py index cb834f9eee7..b1df32efc31 100644 --- a/homeassistant/components/deconz/light.py +++ b/homeassistant/components/deconz/light.py @@ -12,13 +12,15 @@ from pydeconz.models.light.light import Light, LightAlert, LightColorMode, Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, - DOMAIN, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, + DOMAIN as LIGHT_DOMAIN, EFFECT_COLORLOOP, FLASH_LONG, FLASH_SHORT, @@ -30,16 +32,35 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import color_hs_to_xy +from homeassistant.util.color import ( + color_hs_to_xy, + color_temperature_kelvin_to_mired, + color_temperature_mired_to_kelvin, +) from .const import DOMAIN as DECONZ_DOMAIN, POWER_PLUGS -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub DECONZ_GROUP = "is_deconz_group" EFFECT_TO_DECONZ = { EFFECT_COLORLOOP: LightEffect.COLOR_LOOP, - "None": LightEffect.NONE, + "none": LightEffect.NONE, + # Specific to Philips Hue + "candle": LightEffect.CANDLE, + "cosmos": LightEffect.COSMOS, + "enchant": LightEffect.ENCHANT, + "fire": LightEffect.FIRE, + "fireplace": LightEffect.FIREPLACE, + "glisten": LightEffect.GLISTEN, + "loop": LightEffect.LOOP, + "opal": LightEffect.OPAL, + "prism": LightEffect.PRISM, + "sparkle": LightEffect.SPARKLE, + "sunbeam": LightEffect.SUNBEAM, + "sunrise": LightEffect.SUNRISE, + "sunset": LightEffect.SUNSET, + "underwater": LightEffect.UNDERWATER, # Specific to Lidl christmas light "carnival": LightEffect.CARNIVAL, "collide": LightEffect.COLLIDE, @@ -125,7 +146,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ lights and groups from a config entry.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[LIGHT_DOMAIN] = set() @callback def async_add_light(_: EventType, light_id: str) -> None: @@ -170,8 +191,10 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( ): """Representation of a deCONZ light.""" - TYPE = DOMAIN + TYPE = LIGHT_DOMAIN _attr_color_mode = ColorMode.UNKNOWN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN def __init__(self, device: _LightDeviceT, hub: DeconzHub) -> None: """Set up light.""" @@ -208,8 +231,17 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( if device.effect is not None: self._attr_supported_features |= LightEntityFeature.EFFECT self._attr_effect_list = [EFFECT_COLORLOOP] - if device.model_id in ("HG06467", "TS0601"): - self._attr_effect_list = XMAS_LIGHT_EFFECTS + + # For lights that report supported effects. + if isinstance(device, Light): + if device.supported_effects is not None: + self._attr_effect_list = [ + EFFECT_TO_DECONZ[el] + for el in device.supported_effects + if el in EFFECT_TO_DECONZ + ] + if device.model_id in ("HG06467", "TS0601"): + self._attr_effect_list = XMAS_LIGHT_EFFECTS @property def color_mode(self) -> str | None: @@ -232,9 +264,11 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( return self._device.brightness @property - def color_temp(self) -> int | None: + def color_temp_kelvin(self) -> int | None: """Return the CT color value.""" - return self._device.color_temp + if self._device.color_temp is None: + return None + return color_temperature_mired_to_kelvin(self._device.color_temp) @property def hs_color(self) -> tuple[float, float] | None: @@ -260,8 +294,10 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( if ATTR_BRIGHTNESS in kwargs: data["brightness"] = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: - data["color_temperature"] = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + data["color_temperature"] = color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if ATTR_HS_COLOR in kwargs: if ColorMode.XY in self._attr_supported_color_modes: @@ -314,14 +350,18 @@ class DeconzLight(DeconzBaseLight[Light]): """Representation of a deCONZ light.""" @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._device.max_color_temp or super().max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + if max_color_temp_mireds := self._device.max_color_temp: + return color_temperature_mired_to_kelvin(max_color_temp_mireds) + return super().min_color_temp_kelvin @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._device.min_color_temp or super().min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + if min_color_temp_mireds := self._device.min_color_temp: + return color_temperature_mired_to_kelvin(min_color_temp_mireds) + return super().max_color_temp_kelvin @callback def async_update_callback(self) -> None: diff --git a/homeassistant/components/deconz/lock.py b/homeassistant/components/deconz/lock.py index 8729d7de793..50375e99778 100644 --- a/homeassistant/components/deconz/lock.py +++ b/homeassistant/components/deconz/lock.py @@ -8,12 +8,12 @@ from pydeconz.models.event import EventType from pydeconz.models.light.lock import Lock from pydeconz.models.sensor.door_lock import DoorLock -from homeassistant.components.lock import DOMAIN, LockEntity +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub @@ -24,7 +24,7 @@ async def async_setup_entry( ) -> None: """Set up locks for deCONZ component.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[LOCK_DOMAIN] = set() @callback def async_add_lock_from_light(_: EventType, lock_id: str) -> None: @@ -53,7 +53,7 @@ async def async_setup_entry( class DeconzLock(DeconzDevice[DoorLock | Lock], LockEntity): """Representation of a deCONZ lock.""" - TYPE = DOMAIN + TYPE = LOCK_DOMAIN @property def is_locked(self) -> bool: diff --git a/homeassistant/components/deconz/manifest.json b/homeassistant/components/deconz/manifest.json index 2f58cacfa2c..93ae8e392c8 100644 --- a/homeassistant/components/deconz/manifest.json +++ b/homeassistant/components/deconz/manifest.json @@ -7,8 +7,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pydeconz"], - "quality_scale": "platinum", - "requirements": ["pydeconz==116"], + "requirements": ["pydeconz==118"], "ssdp": [ { "manufacturer": "Royal Philips Electronics", diff --git a/homeassistant/components/deconz/number.py b/homeassistant/components/deconz/number.py index f29caf97b52..53461960573 100644 --- a/homeassistant/components/deconz/number.py +++ b/homeassistant/components/deconz/number.py @@ -13,7 +13,7 @@ from pydeconz.models.sensor import SensorBase as PydeconzSensorBase from pydeconz.models.sensor.presence import Presence from homeassistant.components.number import ( - DOMAIN, + DOMAIN as NUMBER_DOMAIN, NumberEntity, NumberEntityDescription, ) @@ -22,7 +22,7 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub @@ -74,7 +74,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ number entity.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[NUMBER_DOMAIN] = set() @callback def async_add_sensor(_: EventType, sensor_id: str) -> None: @@ -99,7 +99,7 @@ async def async_setup_entry( class DeconzNumber(DeconzDevice[SensorResources], NumberEntity): """Representation of a deCONZ number entity.""" - TYPE = DOMAIN + TYPE = NUMBER_DOMAIN entity_description: DeconzNumberDescription def __init__( diff --git a/homeassistant/components/deconz/scene.py b/homeassistant/components/deconz/scene.py index f121c3107b0..70b9f3f21b5 100644 --- a/homeassistant/components/deconz/scene.py +++ b/homeassistant/components/deconz/scene.py @@ -6,12 +6,12 @@ from typing import Any from pydeconz.models.event import EventType -from homeassistant.components.scene import DOMAIN, Scene +from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, Scene from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzSceneMixin +from .entity import DeconzSceneMixin from .hub import DeconzHub @@ -22,7 +22,7 @@ async def async_setup_entry( ) -> None: """Set up scenes for deCONZ integration.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[SCENE_DOMAIN] = set() @callback def async_add_scene(_: EventType, scene_id: str) -> None: @@ -39,7 +39,7 @@ async def async_setup_entry( class DeconzScene(DeconzSceneMixin, Scene): """Representation of a deCONZ scene.""" - TYPE = DOMAIN + TYPE = SCENE_DOMAIN async def async_activate(self, **kwargs: Any) -> None: """Activate the scene.""" diff --git a/homeassistant/components/deconz/select.py b/homeassistant/components/deconz/select.py index 7f3f8cca060..cbd96a4faf9 100644 --- a/homeassistant/components/deconz/select.py +++ b/homeassistant/components/deconz/select.py @@ -11,13 +11,13 @@ from pydeconz.models.sensor.presence import ( PresenceConfigTriggerDistance, ) -from homeassistant.components.select import DOMAIN, SelectEntity +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN, SelectEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub SENSITIVITY_TO_DECONZ = { @@ -35,7 +35,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ button entity.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[SELECT_DOMAIN] = set() @callback def async_add_air_purifier_sensor(_: EventType, sensor_id: str) -> None: @@ -85,7 +85,7 @@ class DeconzAirPurifierFanMode(DeconzDevice[AirPurifier], SelectEntity): AirPurifierFanMode.SPEED_5.value, ] - TYPE = DOMAIN + TYPE = SELECT_DOMAIN @property def current_option(self) -> str: @@ -113,7 +113,7 @@ class DeconzPresenceDeviceModeSelect(DeconzDevice[Presence], SelectEntity): PresenceConfigDeviceMode.UNDIRECTED.value, ] - TYPE = DOMAIN + TYPE = SELECT_DOMAIN @property def current_option(self) -> str | None: @@ -140,7 +140,7 @@ class DeconzPresenceSensitivitySelect(DeconzDevice[Presence], SelectEntity): _attr_entity_category = EntityCategory.CONFIG _attr_options = list(SENSITIVITY_TO_DECONZ) - TYPE = DOMAIN + TYPE = SELECT_DOMAIN @property def current_option(self) -> str | None: @@ -171,7 +171,7 @@ class DeconzPresenceTriggerDistanceSelect(DeconzDevice[Presence], SelectEntity): PresenceConfigTriggerDistance.NEAR.value, ] - TYPE = DOMAIN + TYPE = SELECT_DOMAIN @property def current_option(self) -> str | None: diff --git a/homeassistant/components/deconz/sensor.py b/homeassistant/components/deconz/sensor.py index e67c0129147..241ba015c67 100644 --- a/homeassistant/components/deconz/sensor.py +++ b/homeassistant/components/deconz/sensor.py @@ -10,6 +10,7 @@ from typing import Generic, TypeVar from pydeconz.interfaces.sensors import SensorResources from pydeconz.models.event import EventType from pydeconz.models.sensor import SensorBase as PydeconzSensorBase +from pydeconz.models.sensor.air_purifier import AirPurifier from pydeconz.models.sensor.air_quality import AirQuality from pydeconz.models.sensor.carbon_dioxide import CarbonDioxide from pydeconz.models.sensor.consumption import Consumption @@ -27,7 +28,7 @@ from pydeconz.models.sensor.temperature import Temperature from pydeconz.models.sensor.time import Time from homeassistant.components.sensor import ( - DOMAIN, + DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -47,6 +48,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfPressure, UnitOfTemperature, + UnitOfTime, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -54,7 +56,7 @@ from homeassistant.helpers.typing import StateType import homeassistant.util.dt as dt_util from .const import ATTR_DARK, ATTR_ON -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub PROVIDES_EXTRA_ATTRIBUTES = ( @@ -77,6 +79,7 @@ ATTR_EVENT_ID = "event_id" T = TypeVar( "T", + AirPurifier, AirQuality, CarbonDioxide, Consumption, @@ -108,6 +111,19 @@ class DeconzSensorDescription(Generic[T], SensorEntityDescription): ENTITY_DESCRIPTIONS: tuple[DeconzSensorDescription, ...] = ( + DeconzSensorDescription[AirPurifier]( + key="air_purifier_filter_run_time", + supported_fn=lambda device: True, + update_key="filterruntime", + name_suffix="Filter time", + value_fn=lambda device: device.filter_run_time, + instance_check=AirPurifier, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.DAYS, + suggested_display_precision=1, + ), DeconzSensorDescription[AirQuality]( key="air_quality", supported_fn=lambda device: device.supports_air_quality, @@ -320,7 +336,7 @@ async def async_setup_entry( ) -> None: """Set up the deCONZ sensors.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[SENSOR_DOMAIN] = set() known_device_entities: dict[str, set[str]] = { description.key: set() @@ -377,7 +393,7 @@ async def async_setup_entry( class DeconzSensor(DeconzDevice[SensorResources], SensorEntity): """Representation of a deCONZ sensor.""" - TYPE = DOMAIN + TYPE = SENSOR_DOMAIN entity_description: DeconzSensorDescription def __init__( diff --git a/homeassistant/components/deconz/siren.py b/homeassistant/components/deconz/siren.py index deb1c98f151..982a0bd1b9e 100644 --- a/homeassistant/components/deconz/siren.py +++ b/homeassistant/components/deconz/siren.py @@ -9,7 +9,7 @@ from pydeconz.models.light.siren import Siren from homeassistant.components.siren import ( ATTR_DURATION, - DOMAIN, + DOMAIN as SIREN_DOMAIN, SirenEntity, SirenEntityFeature, ) @@ -17,7 +17,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub @@ -28,7 +28,7 @@ async def async_setup_entry( ) -> None: """Set up sirens for deCONZ component.""" hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[SIREN_DOMAIN] = set() @callback def async_add_siren(_: EventType, siren_id: str) -> None: @@ -45,7 +45,7 @@ async def async_setup_entry( class DeconzSiren(DeconzDevice[Siren], SirenEntity): """Representation of a deCONZ siren.""" - TYPE = DOMAIN + TYPE = SIREN_DOMAIN _attr_supported_features = ( SirenEntityFeature.TURN_ON | SirenEntityFeature.TURN_OFF diff --git a/homeassistant/components/deconz/strings.json b/homeassistant/components/deconz/strings.json index c06a07e6ce5..52059aa8785 100644 --- a/homeassistant/components/deconz/strings.json +++ b/homeassistant/components/deconz/strings.json @@ -18,7 +18,7 @@ }, "link": { "title": "Link with deCONZ", - "description": "Unlock your deCONZ gateway to register with Home Assistant.\n\n1. Go to deCONZ Settings > Gateway > Advanced\n2. Press \"Authenticate app\" button" + "description": "Unlock your deCONZ gateway to register with Home Assistant.\n\n1. Go to deCONZ Settings > Gateway > Advanced\n2. Select the **Authenticate app** button" }, "hassio_confirm": { "title": "deCONZ Zigbee gateway via Home Assistant add-on", diff --git a/homeassistant/components/deconz/switch.py b/homeassistant/components/deconz/switch.py index e176d9c7710..c79cd7b28db 100644 --- a/homeassistant/components/deconz/switch.py +++ b/homeassistant/components/deconz/switch.py @@ -7,13 +7,13 @@ from typing import Any from pydeconz.models.event import EventType from pydeconz.models.light.light import Light -from homeassistant.components.switch import DOMAIN, SwitchEntity +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import POWER_PLUGS -from .deconz_device import DeconzDevice +from .entity import DeconzDevice from .hub import DeconzHub @@ -27,7 +27,7 @@ async def async_setup_entry( Switches are based on the same device class as lights in deCONZ. """ hub = DeconzHub.get_hub(hass, config_entry) - hub.entities[DOMAIN] = set() + hub.entities[SWITCH_DOMAIN] = set() @callback def async_add_switch(_: EventType, switch_id: str) -> None: @@ -46,7 +46,7 @@ async def async_setup_entry( class DeconzPowerPlug(DeconzDevice[Light], SwitchEntity): """Representation of a deCONZ power plug.""" - TYPE = DOMAIN + TYPE = SWITCH_DOMAIN @property def is_on(self) -> bool: diff --git a/homeassistant/components/decora/manifest.json b/homeassistant/components/decora/manifest.json index bef42f8b4ab..64dc01d09a1 100644 --- a/homeassistant/components/decora/manifest.json +++ b/homeassistant/components/decora/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/decora", "iot_class": "local_polling", "loggers": ["bluepy", "decora"], + "quality_scale": "legacy", "requirements": ["bluepy==1.3.0", "decora==0.6"] } diff --git a/homeassistant/components/decora_wifi/manifest.json b/homeassistant/components/decora_wifi/manifest.json index 0bead527e78..25892dc3e64 100644 --- a/homeassistant/components/decora_wifi/manifest.json +++ b/homeassistant/components/decora_wifi/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/decora_wifi", "iot_class": "cloud_polling", "loggers": ["decora_wifi"], + "quality_scale": "legacy", "requirements": ["decora-wifi==1.4"] } diff --git a/homeassistant/components/default_config/manifest.json b/homeassistant/components/default_config/manifest.json index cbadb704a42..8299fe43f09 100644 --- a/homeassistant/components/default_config/manifest.json +++ b/homeassistant/components/default_config/manifest.json @@ -9,10 +9,10 @@ "conversation", "dhcp", "energy", + "go2rtc", "history", "homeassistant_alerts", "logbook", - "map", "media_source", "mobile_app", "my", diff --git a/homeassistant/components/delijn/manifest.json b/homeassistant/components/delijn/manifest.json index d25dab4234e..b87242d6e94 100644 --- a/homeassistant/components/delijn/manifest.json +++ b/homeassistant/components/delijn/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/delijn", "iot_class": "cloud_polling", "loggers": ["pydelijn"], + "quality_scale": "legacy", "requirements": ["pydelijn==1.1.0"] } diff --git a/homeassistant/components/deluge/__init__.py b/homeassistant/components/deluge/__init__.py index 62367e81af4..f4608b37006 100644 --- a/homeassistant/components/deluge/__init__.py +++ b/homeassistant/components/deluge/__init__.py @@ -17,10 +17,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_WEB_PORT, DEFAULT_NAME, DOMAIN +from .const import CONF_WEB_PORT from .coordinator import DelugeDataUpdateCoordinator PLATFORMS = [Platform.SENSOR, Platform.SWITCH] @@ -61,24 +59,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: DelugeConfigEntry) -> bo async def async_unload_entry(hass: HomeAssistant, entry: DelugeConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class DelugeEntity(CoordinatorEntity[DelugeDataUpdateCoordinator]): - """Representation of a Deluge entity.""" - - _attr_has_entity_name = True - - def __init__(self, coordinator: DelugeDataUpdateCoordinator) -> None: - """Initialize a Deluge entity.""" - super().__init__(coordinator) - self._server_unique_id = coordinator.config_entry.entry_id - self._attr_device_info = DeviceInfo( - configuration_url=( - f"http://{coordinator.api.host}:{coordinator.api.web_port}" - ), - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, - manufacturer=DEFAULT_NAME, - name=DEFAULT_NAME, - sw_version=coordinator.api.deluge_version, - ) diff --git a/homeassistant/components/deluge/config_flow.py b/homeassistant/components/deluge/config_flow.py index 0a04a17a991..d58f23464d1 100644 --- a/homeassistant/components/deluge/config_flow.py +++ b/homeassistant/components/deluge/config_flow.py @@ -10,13 +10,7 @@ from deluge_client.client import DelugeRPCClient import voluptuous as vol from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult -from homeassistant.const import ( - CONF_HOST, - CONF_PASSWORD, - CONF_PORT, - CONF_SOURCE, - CONF_USERNAME, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME import homeassistant.helpers.config_validation as cv from .const import ( @@ -44,12 +38,10 @@ class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): user_input[CONF_HOST] == entry.data[CONF_HOST] and user_input[CONF_PORT] == entry.data[CONF_PORT] ): - if self.context.get(CONF_SOURCE) == SOURCE_REAUTH: - self.hass.config_entries.async_update_entry( + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( entry, data=user_input ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_abort(reason="already_configured") return self.async_create_entry( title=DEFAULT_NAME, diff --git a/homeassistant/components/deluge/const.py b/homeassistant/components/deluge/const.py index 91e08da3470..a76817519da 100644 --- a/homeassistant/components/deluge/const.py +++ b/homeassistant/components/deluge/const.py @@ -1,17 +1,45 @@ """Constants for the Deluge integration.""" +import enum import logging from typing import Final CONF_WEB_PORT = "web_port" -CURRENT_STATUS = "current_status" -DATA_KEYS = ["upload_rate", "download_rate", "dht_upload_rate", "dht_download_rate"] DEFAULT_NAME = "Deluge" DEFAULT_RPC_PORT = 58846 DEFAULT_WEB_PORT = 8112 DOMAIN: Final = "deluge" -DOWNLOAD_SPEED = "download_speed" - LOGGER = logging.getLogger(__package__) -UPLOAD_SPEED = "upload_speed" + +class DelugeGetSessionStatusKeys(enum.Enum): + """Enum representing the keys that get passed into the Deluge RPC `core.get_session_status` xml rpc method. + + You can call `core.get_session_status` with no keys (so an empty list in deluge-client.DelugeRPCClient.call) + to get the full list of possible keys, but it seems to basically be a all of the session statistics + listed on this page: https://www.rasterbar.com/products/libtorrent/manual-ref.html#session-statistics + and a few others + + there is also a list of deprecated keys that deluge will translate for you and issue a warning in the log: + https://github.com/deluge-torrent/deluge/blob/7f3f7f69ee78610e95bea07d99f699e9310c4e08/deluge/core/core.py#L58 + + """ + + DHT_DOWNLOAD_RATE = "dht_download_rate" + DHT_UPLOAD_RATE = "dht_upload_rate" + DOWNLOAD_RATE = "download_rate" + UPLOAD_RATE = "upload_rate" + + +class DelugeSensorType(enum.StrEnum): + """Enum that distinguishes the different sensor types that the Deluge integration has. + + This is mainly used to avoid passing strings around and to distinguish between similarly + named strings in `DelugeGetSessionStatusKeys`. + """ + + CURRENT_STATUS_SENSOR = "current_status" + DOWNLOAD_SPEED_SENSOR = "download_speed" + UPLOAD_SPEED_SENSOR = "upload_speed" + PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR = "protocol_traffic_upload_speed" + PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR = "protocol_traffic_download_speed" diff --git a/homeassistant/components/deluge/coordinator.py b/homeassistant/components/deluge/coordinator.py index 11557561be8..7f4bf9e884e 100644 --- a/homeassistant/components/deluge/coordinator.py +++ b/homeassistant/components/deluge/coordinator.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DATA_KEYS, LOGGER +from .const import LOGGER, DelugeGetSessionStatusKeys if TYPE_CHECKING: from . import DelugeConfigEntry @@ -46,7 +46,7 @@ class DelugeDataUpdateCoordinator( _data = await self.hass.async_add_executor_job( self.api.call, "core.get_session_status", - DATA_KEYS, + [iter_member.value for iter_member in list(DelugeGetSessionStatusKeys)], ) data[Platform.SENSOR] = {k.decode(): v for k, v in _data.items()} data[Platform.SWITCH] = await self.hass.async_add_executor_job( diff --git a/homeassistant/components/deluge/entity.py b/homeassistant/components/deluge/entity.py new file mode 100644 index 00000000000..5873abb3199 --- /dev/null +++ b/homeassistant/components/deluge/entity.py @@ -0,0 +1,30 @@ +"""The Deluge integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DEFAULT_NAME, DOMAIN +from .coordinator import DelugeDataUpdateCoordinator + + +class DelugeEntity(CoordinatorEntity[DelugeDataUpdateCoordinator]): + """Representation of a Deluge entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: DelugeDataUpdateCoordinator) -> None: + """Initialize a Deluge entity.""" + super().__init__(coordinator) + self._server_unique_id = coordinator.config_entry.entry_id + self._attr_device_info = DeviceInfo( + configuration_url=( + f"http://{coordinator.api.host}:{coordinator.api.web_port}" + ), + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + manufacturer=DEFAULT_NAME, + name=DEFAULT_NAME, + sw_version=coordinator.api.deluge_version, + ) diff --git a/homeassistant/components/deluge/sensor.py b/homeassistant/components/deluge/sensor.py index fd4bf36889c..5ebf3d01eeb 100644 --- a/homeassistant/components/deluge/sensor.py +++ b/homeassistant/components/deluge/sensor.py @@ -17,16 +17,21 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import DelugeConfigEntry, DelugeEntity -from .const import CURRENT_STATUS, DATA_KEYS, DOWNLOAD_SPEED, UPLOAD_SPEED +from . import DelugeConfigEntry +from .const import DelugeGetSessionStatusKeys, DelugeSensorType from .coordinator import DelugeDataUpdateCoordinator +from .entity import DelugeEntity def get_state(data: dict[str, float], key: str) -> str | float: """Get current download/upload state.""" - upload = data[DATA_KEYS[0]] - data[DATA_KEYS[2]] - download = data[DATA_KEYS[1]] - data[DATA_KEYS[3]] - if key == CURRENT_STATUS: + upload = data[DelugeGetSessionStatusKeys.UPLOAD_RATE.value] + download = data[DelugeGetSessionStatusKeys.DOWNLOAD_RATE.value] + protocol_upload = data[DelugeGetSessionStatusKeys.DHT_UPLOAD_RATE.value] + protocol_download = data[DelugeGetSessionStatusKeys.DHT_DOWNLOAD_RATE.value] + + # if key is CURRENT_STATUS, we just return whether we are uploading / downloading / idle + if key == DelugeSensorType.CURRENT_STATUS_SENSOR: if upload > 0 and download > 0: return "seeding_and_downloading" if upload > 0 and download == 0: @@ -34,7 +39,20 @@ def get_state(data: dict[str, float], key: str) -> str | float: if upload == 0 and download > 0: return "downloading" return STATE_IDLE - kb_spd = float(upload if key == UPLOAD_SPEED else download) / 1024 + + # if not, return the transfer rate for the given key + rate = 0.0 + if key == DelugeSensorType.DOWNLOAD_SPEED_SENSOR: + rate = download + elif key == DelugeSensorType.UPLOAD_SPEED_SENSOR: + rate = upload + elif key == DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR: + rate = protocol_download + else: + rate = protocol_upload + + # convert to KiB/s and round + kb_spd = rate / 1024 return round(kb_spd, 2 if kb_spd < 0.1 else 1) @@ -47,27 +65,51 @@ class DelugeSensorEntityDescription(SensorEntityDescription): SENSOR_TYPES: tuple[DelugeSensorEntityDescription, ...] = ( DelugeSensorEntityDescription( - key=CURRENT_STATUS, + key=DelugeSensorType.CURRENT_STATUS_SENSOR.value, translation_key="status", - value=lambda data: get_state(data, CURRENT_STATUS), + value=lambda data: get_state( + data, DelugeSensorType.CURRENT_STATUS_SENSOR.value + ), device_class=SensorDeviceClass.ENUM, options=["seeding_and_downloading", "seeding", "downloading", "idle"], ), DelugeSensorEntityDescription( - key=DOWNLOAD_SPEED, - translation_key="download_speed", + key=DelugeSensorType.DOWNLOAD_SPEED_SENSOR.value, + translation_key=DelugeSensorType.DOWNLOAD_SPEED_SENSOR.value, device_class=SensorDeviceClass.DATA_RATE, native_unit_of_measurement=UnitOfDataRate.KILOBYTES_PER_SECOND, state_class=SensorStateClass.MEASUREMENT, - value=lambda data: get_state(data, DOWNLOAD_SPEED), + value=lambda data: get_state( + data, DelugeSensorType.DOWNLOAD_SPEED_SENSOR.value + ), ), DelugeSensorEntityDescription( - key=UPLOAD_SPEED, - translation_key="upload_speed", + key=DelugeSensorType.UPLOAD_SPEED_SENSOR.value, + translation_key=DelugeSensorType.UPLOAD_SPEED_SENSOR.value, device_class=SensorDeviceClass.DATA_RATE, native_unit_of_measurement=UnitOfDataRate.KILOBYTES_PER_SECOND, state_class=SensorStateClass.MEASUREMENT, - value=lambda data: get_state(data, UPLOAD_SPEED), + value=lambda data: get_state(data, DelugeSensorType.UPLOAD_SPEED_SENSOR.value), + ), + DelugeSensorEntityDescription( + key=DelugeSensorType.PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR.value, + translation_key=DelugeSensorType.PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR.value, + device_class=SensorDeviceClass.DATA_RATE, + native_unit_of_measurement=UnitOfDataRate.KILOBYTES_PER_SECOND, + state_class=SensorStateClass.MEASUREMENT, + value=lambda data: get_state( + data, DelugeSensorType.PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR.value + ), + ), + DelugeSensorEntityDescription( + key=DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR.value, + translation_key=DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR.value, + device_class=SensorDeviceClass.DATA_RATE, + native_unit_of_measurement=UnitOfDataRate.KILOBYTES_PER_SECOND, + state_class=SensorStateClass.MEASUREMENT, + value=lambda data: get_state( + data, DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR.value + ), ), ) diff --git a/homeassistant/components/deluge/strings.json b/homeassistant/components/deluge/strings.json index 52706f39894..6adde8ef7df 100644 --- a/homeassistant/components/deluge/strings.json +++ b/homeassistant/components/deluge/strings.json @@ -17,10 +17,12 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { @@ -37,6 +39,12 @@ "download_speed": { "name": "Download speed" }, + "protocol_traffic_download_speed": { + "name": "Protocol traffic download speed" + }, + "protocol_traffic_upload_speed": { + "name": "Protocol traffic upload speed" + }, "upload_speed": { "name": "Upload speed" } diff --git a/homeassistant/components/deluge/switch.py b/homeassistant/components/deluge/switch.py index cfae0244ebd..d81f02eee29 100644 --- a/homeassistant/components/deluge/switch.py +++ b/homeassistant/components/deluge/switch.py @@ -9,8 +9,9 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DelugeConfigEntry, DelugeEntity +from . import DelugeConfigEntry from .coordinator import DelugeDataUpdateCoordinator +from .entity import DelugeEntity async def async_setup_entry( diff --git a/homeassistant/components/demo/alarm_control_panel.py b/homeassistant/components/demo/alarm_control_panel.py index f9b791668e8..d34830042d7 100644 --- a/homeassistant/components/demo/alarm_control_panel.py +++ b/homeassistant/components/demo/alarm_control_panel.py @@ -4,20 +4,10 @@ from __future__ import annotations import datetime +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.manual.alarm_control_panel import ManualAlarm from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_ARMING_TIME, - CONF_DELAY_TIME, - CONF_TRIGGER_TIME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import CONF_ARMING_TIME, CONF_DELAY_TIME, CONF_TRIGGER_TIME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -39,36 +29,36 @@ async def async_setup_entry( True, False, { - STATE_ALARM_ARMED_AWAY: { + AlarmControlPanelState.ARMED_AWAY: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_HOME: { + AlarmControlPanelState.ARMED_HOME: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_NIGHT: { + AlarmControlPanelState.ARMED_NIGHT: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_VACATION: { + AlarmControlPanelState.ARMED_VACATION: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_DISARMED: { + AlarmControlPanelState.DISARMED: { CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_CUSTOM_BYPASS: { + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_TRIGGERED: { + AlarmControlPanelState.TRIGGERED: { CONF_ARMING_TIME: datetime.timedelta(seconds=5) }, }, diff --git a/homeassistant/components/demo/climate.py b/homeassistant/components/demo/climate.py index ff0ed5746ca..d5b763caa5a 100644 --- a/homeassistant/components/demo/climate.py +++ b/homeassistant/components/demo/climate.py @@ -43,6 +43,7 @@ async def async_setup_entry( target_humidity=None, current_humidity=None, swing_mode=None, + swing_horizontal_mode=None, hvac_mode=HVACMode.HEAT, hvac_action=HVACAction.HEATING, target_temp_high=None, @@ -60,6 +61,7 @@ async def async_setup_entry( target_humidity=67.4, current_humidity=54.2, swing_mode="off", + swing_horizontal_mode="auto", hvac_mode=HVACMode.COOL, hvac_action=HVACAction.COOLING, target_temp_high=None, @@ -78,6 +80,7 @@ async def async_setup_entry( target_humidity=None, current_humidity=None, swing_mode="auto", + swing_horizontal_mode=None, hvac_mode=HVACMode.HEAT_COOL, hvac_action=None, target_temp_high=24, @@ -95,7 +98,6 @@ class DemoClimate(ClimateEntity): _attr_name = None _attr_should_poll = False _attr_translation_key = "ubercool" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -109,6 +111,7 @@ class DemoClimate(ClimateEntity): target_humidity: float | None, current_humidity: float | None, swing_mode: str | None, + swing_horizontal_mode: str | None, hvac_mode: HVACMode, hvac_action: HVACAction | None, target_temp_high: float | None, @@ -129,6 +132,8 @@ class DemoClimate(ClimateEntity): self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY if swing_mode is not None: self._attr_supported_features |= ClimateEntityFeature.SWING_MODE + if swing_horizontal_mode is not None: + self._attr_supported_features |= ClimateEntityFeature.SWING_HORIZONTAL_MODE if HVACMode.HEAT_COOL in hvac_modes or HVACMode.AUTO in hvac_modes: self._attr_supported_features |= ( ClimateEntityFeature.TARGET_TEMPERATURE_RANGE @@ -147,9 +152,11 @@ class DemoClimate(ClimateEntity): self._hvac_action = hvac_action self._hvac_mode = hvac_mode self._current_swing_mode = swing_mode + self._current_swing_horizontal_mode = swing_horizontal_mode self._fan_modes = ["on_low", "on_high", "auto_low", "auto_high", "off"] self._hvac_modes = hvac_modes self._swing_modes = ["auto", "1", "2", "3", "off"] + self._swing_horizontal_modes = ["auto", "rangefull", "off"] self._target_temperature_high = target_temp_high self._target_temperature_low = target_temp_low self._attr_device_info = DeviceInfo( @@ -242,6 +249,16 @@ class DemoClimate(ClimateEntity): """List of available swing modes.""" return self._swing_modes + @property + def swing_horizontal_mode(self) -> str | None: + """Return the swing setting.""" + return self._current_swing_horizontal_mode + + @property + def swing_horizontal_modes(self) -> list[str]: + """List of available swing modes.""" + return self._swing_horizontal_modes + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" if kwargs.get(ATTR_TEMPERATURE) is not None: @@ -266,6 +283,11 @@ class DemoClimate(ClimateEntity): self._current_swing_mode = swing_mode self.async_write_ha_state() + async def async_set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set new swing mode.""" + self._current_swing_horizontal_mode = swing_horizontal_mode + self.async_write_ha_state() + async def async_set_fan_mode(self, fan_mode: str) -> None: """Set new fan mode.""" self._current_fan_mode = fan_mode diff --git a/homeassistant/components/demo/config_flow.py b/homeassistant/components/demo/config_flow.py index 468d9cb042b..53c1678aa81 100644 --- a/homeassistant/components/demo/config_flow.py +++ b/homeassistant/components/demo/config_flow.py @@ -37,12 +37,9 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - - return self.async_create_entry(title="Demo", data=import_info) + return self.async_create_entry(title="Demo", data=import_data) class OptionsFlowHandler(OptionsFlow): @@ -50,7 +47,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init( diff --git a/homeassistant/components/demo/fan.py b/homeassistant/components/demo/fan.py index 064ee3bb4f7..42e7f9e2434 100644 --- a/homeassistant/components/demo/fan.py +++ b/homeassistant/components/demo/fan.py @@ -100,7 +100,6 @@ class BaseDemoFan(FanEntity): _attr_should_poll = False _attr_translation_key = "demo" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/demo/icons.json b/homeassistant/components/demo/icons.json index d9e1d405490..eafcbb9161a 100644 --- a/homeassistant/components/demo/icons.json +++ b/homeassistant/components/demo/icons.json @@ -19,6 +19,13 @@ "auto": "mdi:arrow-oscillating", "off": "mdi:arrow-oscillating-off" } + }, + "swing_horizontal_mode": { + "state": { + "rangefull": "mdi:pan-horizontal", + "auto": "mdi:compare-horizontal", + "off": "mdi:arrow-oscillating-off" + } } } } @@ -75,6 +82,8 @@ } }, "services": { - "randomize_device_tracker_data": "mdi:dice-multiple" + "randomize_device_tracker_data": { + "service": "mdi:dice-multiple" + } } } diff --git a/homeassistant/components/demo/light.py b/homeassistant/components/demo/light.py index c859fef3b76..ec98a056b3e 100644 --- a/homeassistant/components/demo/light.py +++ b/homeassistant/components/demo/light.py @@ -7,12 +7,14 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_WHITE, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityFeature, @@ -28,7 +30,7 @@ LIGHT_COLORS = [(56, 86), (345, 75)] LIGHT_EFFECT_LIST = ["rainbow", "none"] -LIGHT_TEMPS = [240, 380] +LIGHT_TEMPS = [4166, 2631] SUPPORT_DEMO = {ColorMode.HS, ColorMode.COLOR_TEMP} SUPPORT_DEMO_HS_WHITE = {ColorMode.HS, ColorMode.WHITE} @@ -100,6 +102,9 @@ class DemoLight(LightEntity): _attr_name = None _attr_should_poll = False + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + def __init__( self, unique_id: str, @@ -185,8 +190,8 @@ class DemoLight(LightEntity): return self._rgbww_color @property - def color_temp(self) -> int: - """Return the CT color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" return self._ct @property @@ -216,9 +221,9 @@ class DemoLight(LightEntity): if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: self._color_mode = ColorMode.COLOR_TEMP - self._ct = kwargs[ATTR_COLOR_TEMP] + self._ct = kwargs[ATTR_COLOR_TEMP_KELVIN] if ATTR_EFFECT in kwargs: self._effect = kwargs[ATTR_EFFECT] diff --git a/homeassistant/components/demo/lock.py b/homeassistant/components/demo/lock.py index c17e10edd85..1f25445af7f 100644 --- a/homeassistant/components/demo/lock.py +++ b/homeassistant/components/demo/lock.py @@ -5,17 +5,8 @@ from __future__ import annotations import asyncio from typing import Any -from homeassistant.components.lock import LockEntity, LockEntityFeature +from homeassistant.components.lock import LockEntity, LockEntityFeature, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,10 +21,10 @@ async def async_setup_entry( """Set up the Demo config entry.""" async_add_entities( [ - DemoLock("Front Door", STATE_LOCKED), - DemoLock("Kitchen Door", STATE_UNLOCKED), - DemoLock("Poorly Installed Door", STATE_UNLOCKED, False, True), - DemoLock("Openable Lock", STATE_LOCKED, True), + DemoLock("Front Door", LockState.LOCKED), + DemoLock("Kitchen Door", LockState.UNLOCKED), + DemoLock("Poorly Installed Door", LockState.UNLOCKED, False, True), + DemoLock("Openable Lock", LockState.LOCKED, True), ] ) @@ -61,56 +52,56 @@ class DemoLock(LockEntity): @property def is_locking(self) -> bool: """Return true if lock is locking.""" - return self._state == STATE_LOCKING + return self._state == LockState.LOCKING @property def is_unlocking(self) -> bool: """Return true if lock is unlocking.""" - return self._state == STATE_UNLOCKING + return self._state == LockState.UNLOCKING @property def is_jammed(self) -> bool: """Return true if lock is jammed.""" - return self._state == STATE_JAMMED + return self._state == LockState.JAMMED @property def is_locked(self) -> bool: """Return true if lock is locked.""" - return self._state == STATE_LOCKED + return self._state == LockState.LOCKED @property def is_open(self) -> bool: """Return true if lock is open.""" - return self._state == STATE_OPEN + return self._state == LockState.OPEN @property def is_opening(self) -> bool: """Return true if lock is opening.""" - return self._state == STATE_OPENING + return self._state == LockState.OPENING async def async_lock(self, **kwargs: Any) -> None: """Lock the device.""" - self._state = STATE_LOCKING + self._state = LockState.LOCKING self.async_write_ha_state() await asyncio.sleep(LOCK_UNLOCK_DELAY) if self._jam_on_operation: - self._state = STATE_JAMMED + self._state = LockState.JAMMED else: - self._state = STATE_LOCKED + self._state = LockState.LOCKED self.async_write_ha_state() async def async_unlock(self, **kwargs: Any) -> None: """Unlock the device.""" - self._state = STATE_UNLOCKING + self._state = LockState.UNLOCKING self.async_write_ha_state() await asyncio.sleep(LOCK_UNLOCK_DELAY) - self._state = STATE_UNLOCKED + self._state = LockState.UNLOCKED self.async_write_ha_state() async def async_open(self, **kwargs: Any) -> None: """Open the door latch.""" - self._state = STATE_OPENING + self._state = LockState.OPENING self.async_write_ha_state() await asyncio.sleep(LOCK_UNLOCK_DELAY) - self._state = STATE_OPEN + self._state = LockState.OPEN self.async_write_ha_state() diff --git a/homeassistant/components/demo/manifest.json b/homeassistant/components/demo/manifest.json index 887a82a0078..be3456b5619 100644 --- a/homeassistant/components/demo/manifest.json +++ b/homeassistant/components/demo/manifest.json @@ -5,5 +5,6 @@ "dependencies": ["conversation", "group", "zone"], "documentation": "https://www.home-assistant.io/integrations/demo", "iot_class": "calculated", - "quality_scale": "internal" + "quality_scale": "internal", + "single_config_entry": true } diff --git a/homeassistant/components/demo/notify.py b/homeassistant/components/demo/notify.py index 9aab2572957..7524517e6e8 100644 --- a/homeassistant/components/demo/notify.py +++ b/homeassistant/components/demo/notify.py @@ -2,7 +2,11 @@ from __future__ import annotations -from homeassistant.components.notify import DOMAIN, NotifyEntity, NotifyEntityFeature +from homeassistant.components.notify import ( + DOMAIN as NOTIFY_DOMAIN, + NotifyEntity, + NotifyEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -35,7 +39,7 @@ class DemoNotifyEntity(NotifyEntity): self._attr_unique_id = unique_id self._attr_supported_features = NotifyEntityFeature.TITLE self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, unique_id)}, + identifiers={(NOTIFY_DOMAIN, unique_id)}, name=device_name, ) diff --git a/homeassistant/components/demo/strings.json b/homeassistant/components/demo/strings.json index aa5554e9fcc..da72b33d3ca 100644 --- a/homeassistant/components/demo/strings.json +++ b/homeassistant/components/demo/strings.json @@ -42,6 +42,13 @@ "auto": "Auto", "off": "[%key:common::state::off%]" } + }, + "swing_horizontal_mode": { + "state": { + "rangefull": "Full range", + "auto": "Auto", + "off": "[%key:common::state::off%]" + } } } } diff --git a/homeassistant/components/demo/update.py b/homeassistant/components/demo/update.py index 7e53f5ce8ca..3fa037f6b02 100644 --- a/homeassistant/components/demo/update.py +++ b/homeassistant/components/demo/update.py @@ -75,6 +75,21 @@ async def async_setup_entry( support_release_notes=True, release_url="https://www.example.com/release/1.93.3", device_class=UpdateDeviceClass.FIRMWARE, + update_steps=10, + ), + DemoUpdate( + unique_id="update_support_decimal_progress", + device_name="Demo Update with Decimal Progress", + title="Philips Lamps Firmware", + installed_version="1.93.3", + latest_version="1.94.2", + support_progress=True, + release_summary="Added support for effects", + support_release_notes=True, + release_url="https://www.example.com/release/1.93.3", + device_class=UpdateDeviceClass.FIRMWARE, + display_precision=2, + update_steps=1000, ), ] ) @@ -106,10 +121,13 @@ class DemoUpdate(UpdateEntity): support_install: bool = True, support_release_notes: bool = False, device_class: UpdateDeviceClass | None = None, + display_precision: int = 0, + update_steps: int = 100, ) -> None: """Initialize the Demo select entity.""" self._attr_installed_version = installed_version self._attr_device_class = device_class + self._attr_display_precision = display_precision self._attr_latest_version = latest_version self._attr_release_summary = release_summary self._attr_release_url = release_url @@ -119,6 +137,7 @@ class DemoUpdate(UpdateEntity): identifiers={(DOMAIN, unique_id)}, name=device_name, ) + self._update_steps = update_steps if support_install: self._attr_supported_features |= ( UpdateEntityFeature.INSTALL @@ -136,12 +155,14 @@ class DemoUpdate(UpdateEntity): ) -> None: """Install an update.""" if self.supported_features & UpdateEntityFeature.PROGRESS: - for progress in range(0, 100, 10): - self._attr_in_progress = progress + self._attr_in_progress = True + for progress in range(0, self._update_steps, 1): + self._attr_update_percentage = progress / (self._update_steps / 100) self.async_write_ha_state() await _fake_install() self._attr_in_progress = False + self._attr_update_percentage = None self._attr_installed_version = ( version if version is not None else self.latest_version ) diff --git a/homeassistant/components/demo/vacuum.py b/homeassistant/components/demo/vacuum.py index d4c3820d29e..3dd945ab82e 100644 --- a/homeassistant/components/demo/vacuum.py +++ b/homeassistant/components/demo/vacuum.py @@ -7,12 +7,8 @@ from typing import Any from homeassistant.components.vacuum import ( ATTR_CLEANED_AREA, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -91,16 +87,11 @@ class StateDemoVacuum(StateVacuumEntity): """Initialize the vacuum.""" self._attr_name = name self._attr_supported_features = supported_features - self._state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._fan_speed = FAN_SPEEDS[1] self._cleaned_area: float = 0 self._battery_level = 100 - @property - def state(self) -> str: - """Return the current state of the vacuum.""" - return self._state - @property def battery_level(self) -> int: """Return the current battery level of the vacuum.""" @@ -123,33 +114,33 @@ class StateDemoVacuum(StateVacuumEntity): def start(self) -> None: """Start or resume the cleaning task.""" - if self._state != STATE_CLEANING: - self._state = STATE_CLEANING + if self._attr_activity != VacuumActivity.CLEANING: + self._attr_activity = VacuumActivity.CLEANING self._cleaned_area += 1.32 self._battery_level -= 1 self.schedule_update_ha_state() def pause(self) -> None: """Pause the cleaning task.""" - if self._state == STATE_CLEANING: - self._state = STATE_PAUSED + if self._attr_activity == VacuumActivity.CLEANING: + self._attr_activity = VacuumActivity.PAUSED self.schedule_update_ha_state() def stop(self, **kwargs: Any) -> None: """Stop the cleaning task, do not return to dock.""" - self._state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self.schedule_update_ha_state() def return_to_base(self, **kwargs: Any) -> None: """Return dock to charging base.""" - self._state = STATE_RETURNING + self._attr_activity = VacuumActivity.RETURNING self.schedule_update_ha_state() event.call_later(self.hass, 30, self.__set_state_to_dock) def clean_spot(self, **kwargs: Any) -> None: """Perform a spot clean-up.""" - self._state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING self._cleaned_area += 1.32 self._battery_level -= 1 self.schedule_update_ha_state() @@ -167,12 +158,12 @@ class StateDemoVacuum(StateVacuumEntity): "persistent_notification", service_data={"message": "I'm here!", "title": "Locate request"}, ) - self._state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self.async_write_ha_state() async def async_clean_spot(self, **kwargs: Any) -> None: """Locate the vacuum's position.""" - self._state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING self.async_write_ha_state() async def async_send_command( @@ -182,9 +173,9 @@ class StateDemoVacuum(StateVacuumEntity): **kwargs: Any, ) -> None: """Send a command to the vacuum.""" - self._state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self.async_write_ha_state() def __set_state_to_dock(self, _: datetime) -> None: - self._state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self.schedule_update_ha_state() diff --git a/homeassistant/components/denon/manifest.json b/homeassistant/components/denon/manifest.json index d94e8a264e3..9e840b43fcf 100644 --- a/homeassistant/components/denon/manifest.json +++ b/homeassistant/components/denon/manifest.json @@ -3,5 +3,6 @@ "name": "Denon Network Receivers", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/denon", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/denonavr/config_flow.py b/homeassistant/components/denonavr/config_flow.py index 9a7d2a30438..9ff05411588 100644 --- a/homeassistant/components/denonavr/config_flow.py +++ b/homeassistant/components/denonavr/config_flow.py @@ -52,10 +52,6 @@ CONFIG_SCHEMA = vol.Schema({vol.Optional(CONF_HOST): str}) class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -119,7 +115,7 @@ class DenonAvrFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/denonavr/icons.json b/homeassistant/components/denonavr/icons.json index ec6bc0854f9..33d7f1bd3d9 100644 --- a/homeassistant/components/denonavr/icons.json +++ b/homeassistant/components/denonavr/icons.json @@ -1,7 +1,13 @@ { "services": { - "get_command": "mdi:console", - "set_dynamic_eq": "mdi:tune", - "update_audyssey": "mdi:waveform" + "get_command": { + "service": "mdi:console" + }, + "set_dynamic_eq": { + "service": "mdi:tune" + }, + "update_audyssey": { + "service": "mdi:waveform" + } } } diff --git a/homeassistant/components/denonavr/manifest.json b/homeassistant/components/denonavr/manifest.json index 9188009bde5..328ab504bd1 100644 --- a/homeassistant/components/denonavr/manifest.json +++ b/homeassistant/components/denonavr/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/denonavr", "iot_class": "local_push", "loggers": ["denonavr"], - "requirements": ["denonavr==0.11.6"], + "requirements": ["denonavr==1.0.1"], "ssdp": [ { "manufacturer": "Denon", diff --git a/homeassistant/components/denonavr/media_player.py b/homeassistant/components/denonavr/media_player.py index a7d8565d6a4..03d1b00cfaf 100644 --- a/homeassistant/components/denonavr/media_player.py +++ b/homeassistant/components/denonavr/media_player.py @@ -125,7 +125,6 @@ async def async_setup_entry( unique_id = f"{config_entry.unique_id}-{receiver_zone.zone}" else: unique_id = f"{config_entry.entry_id}-{receiver_zone.zone}" - await receiver_zone.async_setup() entities.append( DenonDevice( receiver_zone, @@ -233,7 +232,7 @@ def async_log_errors[_DenonDeviceT: DenonDevice, **_P, _R]( ) finally: if available and not self.available: - _LOGGER.info( + _LOGGER.warning( "Denon AVR receiver at host %s is available again", self._receiver.host, ) @@ -301,6 +300,8 @@ class DenonDevice(MediaPlayerEntity): async def async_will_remove_from_hass(self) -> None: """Clean up the entity.""" + if self._receiver.telnet_connected: + await self._receiver.async_telnet_disconnect() self._receiver.unregister_callback(ALL_TELNET_EVENTS, self._telnet_callback) @async_log_errors diff --git a/homeassistant/components/denonavr/receiver.py b/homeassistant/components/denonavr/receiver.py index abee5ed74d2..cbafe35cfc5 100644 --- a/homeassistant/components/denonavr/receiver.py +++ b/homeassistant/components/denonavr/receiver.py @@ -3,9 +3,11 @@ from __future__ import annotations from collections.abc import Callable +import contextlib import logging from denonavr import DenonAVR +from denonavr.exceptions import AvrProcessingError import httpx _LOGGER = logging.getLogger(__name__) @@ -93,9 +95,11 @@ class ConnectDenonAVR: await receiver.async_setup() # Do an initial update if telnet is used. if self._use_telnet: - await receiver.async_update() - if self._update_audyssey: - await receiver.async_update_audyssey() + for zone in receiver.zones.values(): + with contextlib.suppress(AvrProcessingError): + await zone.async_update() + if self._update_audyssey: + await zone.async_update_audyssey() await receiver.async_telnet_connect() self._receiver = receiver diff --git a/homeassistant/components/derivative/sensor.py b/homeassistant/components/derivative/sensor.py index 36719b43ccb..77ce5169d8d 100644 --- a/homeassistant/components/derivative/sensor.py +++ b/homeassistant/components/derivative/sensor.py @@ -5,14 +5,15 @@ from __future__ import annotations from datetime import datetime, timedelta from decimal import Decimal, DecimalException import logging -from typing import TYPE_CHECKING import voluptuous as vol from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, RestoreSensor, SensorEntity, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -160,7 +161,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity): self._attr_device_info = device_info self._sensor_source_id = source_entity self._round_digits = round_digits - self._state: float | int | Decimal = 0 + self._attr_native_value = round(Decimal(0), round_digits) # List of tuples with (timestamp_start, timestamp_end, derivative) self._state_list: list[tuple[datetime, datetime, Decimal]] = [] @@ -188,7 +189,10 @@ class DerivativeSensor(RestoreSensor, SensorEntity): restored_data.native_unit_of_measurement ) try: - self._state = Decimal(restored_data.native_value) # type: ignore[arg-type] + self._attr_native_value = round( + Decimal(restored_data.native_value), # type: ignore[arg-type] + self._round_digits, + ) except SyntaxError as err: _LOGGER.warning("Could not restore last state: %s", err) @@ -238,6 +242,16 @@ class DerivativeSensor(RestoreSensor, SensorEntity): except AssertionError as err: _LOGGER.error("Could not calculate derivative: %s", err) + # For total inreasing sensors, the value is expected to continuously increase. + # A negative derivative for a total increasing sensor likely indicates the + # sensor has been reset. To prevent inaccurate data, discard this sample. + if ( + new_state.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + and new_derivative < 0 + ): + return + # add latest derivative to the window list self._state_list.append( (old_state.last_updated, new_state.last_updated, new_derivative) @@ -258,12 +272,11 @@ class DerivativeSensor(RestoreSensor, SensorEntity): if elapsed_time > self._time_window: derivative = new_derivative else: - derivative = Decimal(0) + derivative = Decimal(0.00) for start, end, value in self._state_list: weight = calculate_weight(start, end, new_state.last_updated) derivative = derivative + (value * Decimal(weight)) - - self._state = derivative + self._attr_native_value = round(derivative, self._round_digits) self.async_write_ha_state() self.async_on_remove( @@ -271,11 +284,3 @@ class DerivativeSensor(RestoreSensor, SensorEntity): self.hass, self._sensor_source_id, calc_derivative ) ) - - @property - def native_value(self) -> float | int | Decimal: - """Return the state of the sensor.""" - value = round(self._state, self._round_digits) - if TYPE_CHECKING: - assert isinstance(value, (float, int, Decimal)) - return value diff --git a/homeassistant/components/derivative/strings.json b/homeassistant/components/derivative/strings.json index 4b66c893d57..bfdf861a019 100644 --- a/homeassistant/components/derivative/strings.json +++ b/homeassistant/components/derivative/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Derivative sensor", + "title": "Create Derivative sensor", "description": "Create a sensor that estimates the derivative of a sensor.", "data": { "name": "[%key:common::config_flow::data::name%]", diff --git a/homeassistant/components/devialet/config_flow.py b/homeassistant/components/devialet/config_flow.py index 4c097ae6f86..41acfa4b5a7 100644 --- a/homeassistant/components/devialet/config_flow.py +++ b/homeassistant/components/devialet/config_flow.py @@ -23,12 +23,13 @@ class DevialetFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _host: str + _model: str + _name: str + _serial: str + def __init__(self) -> None: """Initialize flow.""" - self._host: str | None = None - self._name: str | None = None - self._model: str | None = None - self._serial: str | None = None self._errors: dict[str, str] = {} async def async_validate_input(self) -> ConfigFlowResult | None: @@ -72,7 +73,7 @@ class DevialetFlowHandler(ConfigFlow, domain=DOMAIN): self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by zeroconf discovery.""" - LOGGER.info("Devialet device found via ZEROCONF: %s", discovery_info) + LOGGER.debug("Devialet device found via ZEROCONF: %s", discovery_info) self._host = discovery_info.host self._name = discovery_info.name.split(".", 1)[0] diff --git a/homeassistant/components/device_automation/__init__.py b/homeassistant/components/device_automation/__init__.py index 5e196f40aa1..a75a4216475 100644 --- a/homeassistant/components/device_automation/__init__.py +++ b/homeassistant/components/device_automation/__init__.py @@ -15,7 +15,7 @@ import voluptuous as vol import voluptuous_serialize from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.const import ( ATTR_ENTITY_ID, CONF_DEVICE_ID, @@ -481,8 +481,11 @@ async def websocket_device_automation_get_condition_capabilities( @websocket_api.websocket_command( { vol.Required("type"): "device_automation/trigger/capabilities", - vol.Required("trigger"): DEVICE_TRIGGER_BASE_SCHEMA.extend( - {}, extra=vol.ALLOW_EXTRA + # The frontend responds with `trigger` as key, while the + # `DEVICE_TRIGGER_BASE_SCHEMA` expects `platform1` as key. + vol.Required("trigger"): vol.All( + cv._trigger_pre_validator, # noqa: SLF001 + DEVICE_TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), ), } ) diff --git a/homeassistant/components/device_tracker/__init__.py b/homeassistant/components/device_tracker/__init__.py index 92c961eb148..313373e3181 100644 --- a/homeassistant/components/device_tracker/__init__.py +++ b/homeassistant/components/device_tracker/__init__.py @@ -2,29 +2,20 @@ from __future__ import annotations -from functools import partial - from homeassistant.const import ATTR_GPS_ACCURACY, STATE_HOME # noqa: F401 from homeassistant.core import HomeAssistant -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from .config_entry import ( # noqa: F401 ScannerEntity, + ScannerEntityDescription, TrackerEntity, + TrackerEntityDescription, async_setup_entry, async_unload_entry, ) from .const import ( # noqa: F401 - _DEPRECATED_SOURCE_TYPE_BLUETOOTH, - _DEPRECATED_SOURCE_TYPE_BLUETOOTH_LE, - _DEPRECATED_SOURCE_TYPE_GPS, - _DEPRECATED_SOURCE_TYPE_ROUTER, ATTR_ATTRIBUTES, ATTR_BATTERY, ATTR_DEV_ID, @@ -70,13 +61,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the device tracker.""" async_setup_legacy_integration(hass, config) return True - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/device_tracker/config_entry.py b/homeassistant/components/device_tracker/config_entry.py index 14b2d02b5f4..50fc3d2d936 100644 --- a/homeassistant/components/device_tracker/config_entry.py +++ b/homeassistant/components/device_tracker/config_entry.py @@ -3,9 +3,10 @@ from __future__ import annotations import asyncio -from functools import cached_property from typing import final +from propcache import cached_property + from homeassistant.components import zone from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -24,10 +25,11 @@ from homeassistant.helpers.device_registry import ( EventDeviceRegistryUpdatedData, ) from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.typing import StateType +from homeassistant.util.hass_dict import HassKey from .const import ( ATTR_HOST_NAME, @@ -40,6 +42,9 @@ from .const import ( SourceType, ) +DATA_COMPONENT: HassKey[EntityComponent[BaseTrackerEntity]] = HassKey(DOMAIN) +DATA_KEY: HassKey[dict[str, tuple[str, str]]] = HassKey(f"{DOMAIN}_mac") + # mypy: disallow-any-generics @@ -50,7 +55,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if component is not None: return await component.async_setup_entry(entry) - component = hass.data[DOMAIN] = EntityComponent[BaseTrackerEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[BaseTrackerEntity]( LOGGER, DOMAIN, hass ) component.register_shutdown() @@ -60,8 +65,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload an entry.""" - component: EntityComponent[BaseTrackerEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) @callback @@ -93,16 +97,15 @@ def _async_register_mac( unique_id: str, ) -> None: """Register a mac address with a unique ID.""" - data_key = "device_tracker_mac" mac = dr.format_mac(mac) - if data_key in hass.data: - hass.data[data_key][mac] = (domain, unique_id) + if DATA_KEY in hass.data: + hass.data[DATA_KEY][mac] = (domain, unique_id) return # Setup listening. # dict mapping mac -> partial unique ID - data = hass.data[data_key] = {mac: (domain, unique_id)} + data = hass.data[DATA_KEY] = {mac: (domain, unique_id)} @callback def handle_device_event(ev: Event[EventDeviceRegistryUpdatedData]) -> None: @@ -168,6 +171,7 @@ class BaseTrackerEntity(Entity): _attr_device_info: None = None _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_source_type: SourceType @cached_property def battery_level(self) -> int | None: @@ -178,8 +182,10 @@ class BaseTrackerEntity(Entity): return None @property - def source_type(self) -> SourceType | str: + def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" + if hasattr(self, "_attr_source_type"): + return self._attr_source_type raise NotImplementedError @property @@ -193,9 +199,30 @@ class BaseTrackerEntity(Entity): return attr -class TrackerEntity(BaseTrackerEntity): +class TrackerEntityDescription(EntityDescription, frozen_or_thawed=True): + """A class that describes tracker entities.""" + + +CACHED_TRACKER_PROPERTIES_WITH_ATTR_ = { + "latitude", + "location_accuracy", + "location_name", + "longitude", +} + + +class TrackerEntity( + BaseTrackerEntity, cached_properties=CACHED_TRACKER_PROPERTIES_WITH_ATTR_ +): """Base class for a tracked device.""" + entity_description: TrackerEntityDescription + _attr_latitude: float | None = None + _attr_location_accuracy: int = 0 + _attr_location_name: str | None = None + _attr_longitude: float | None = None + _attr_source_type: SourceType = SourceType.GPS + @cached_property def should_poll(self) -> bool: """No polling for entities that have location pushed.""" @@ -212,22 +239,22 @@ class TrackerEntity(BaseTrackerEntity): Value in meters. """ - return 0 + return self._attr_location_accuracy @cached_property def location_name(self) -> str | None: """Return a location name for the current location of the device.""" - return None + return self._attr_location_name @cached_property def latitude(self) -> float | None: """Return latitude value of the device.""" - return None + return self._attr_latitude @cached_property def longitude(self) -> float | None: """Return longitude value of the device.""" - return None + return self._attr_longitude @property def state(self) -> str | None: @@ -264,23 +291,42 @@ class TrackerEntity(BaseTrackerEntity): return attr -class ScannerEntity(BaseTrackerEntity): +class ScannerEntityDescription(EntityDescription, frozen_or_thawed=True): + """A class that describes tracker entities.""" + + +CACHED_SCANNER_PROPERTIES_WITH_ATTR_ = { + "ip_address", + "mac_address", + "hostname", +} + + +class ScannerEntity( + BaseTrackerEntity, cached_properties=CACHED_SCANNER_PROPERTIES_WITH_ATTR_ +): """Base class for a tracked device that is on a scanned network.""" + entity_description: ScannerEntityDescription + _attr_hostname: str | None = None + _attr_ip_address: str | None = None + _attr_mac_address: str | None = None + _attr_source_type: SourceType = SourceType.ROUTER + @cached_property def ip_address(self) -> str | None: """Return the primary ip address of the device.""" - return None + return self._attr_ip_address @cached_property def mac_address(self) -> str | None: """Return the mac address of the device.""" - return None + return self._attr_mac_address @cached_property def hostname(self) -> str | None: """Return hostname of the device.""" - return None + return self._attr_hostname @property def state(self) -> str: diff --git a/homeassistant/components/device_tracker/const.py b/homeassistant/components/device_tracker/const.py index 964b7faab9b..c9e4d4e910a 100644 --- a/homeassistant/components/device_tracker/const.py +++ b/homeassistant/components/device_tracker/const.py @@ -4,16 +4,9 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import partial import logging from typing import Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.signal_type import SignalType LOGGER: Final = logging.getLogger(__package__) @@ -34,19 +27,6 @@ class SourceType(StrEnum): BLUETOOTH_LE = "bluetooth_le" -# SOURCE_TYPE_* below are deprecated as of 2022.9 -# use the SourceType enum instead. -_DEPRECATED_SOURCE_TYPE_GPS: Final = DeprecatedConstantEnum(SourceType.GPS, "2025.1") -_DEPRECATED_SOURCE_TYPE_ROUTER: Final = DeprecatedConstantEnum( - SourceType.ROUTER, "2025.1" -) -_DEPRECATED_SOURCE_TYPE_BLUETOOTH: Final = DeprecatedConstantEnum( - SourceType.BLUETOOTH, "2025.1" -) -_DEPRECATED_SOURCE_TYPE_BLUETOOTH_LE: Final = DeprecatedConstantEnum( - SourceType.BLUETOOTH_LE, "2025.1" -) - CONF_SCAN_INTERVAL: Final = "interval_seconds" SCAN_INTERVAL: Final = timedelta(seconds=12) @@ -72,10 +52,3 @@ ATTR_IP: Final = "ip" CONNECTED_DEVICE_REGISTERED = SignalType[dict[str, str | None]]( "device_tracker_connected_device_registered" ) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/device_tracker/icons.json b/homeassistant/components/device_tracker/icons.json index c89053701ba..4e5b82576cf 100644 --- a/homeassistant/components/device_tracker/icons.json +++ b/homeassistant/components/device_tracker/icons.json @@ -8,6 +8,8 @@ } }, "services": { - "see": "mdi:account-eye" + "see": { + "service": "mdi:account-eye" + } } } diff --git a/homeassistant/components/device_tracker/legacy.py b/homeassistant/components/device_tracker/legacy.py index 15cb67f5ee8..5dff5837b4b 100644 --- a/homeassistant/components/device_tracker/legacy.py +++ b/homeassistant/components/device_tracker/legacy.py @@ -5,12 +5,12 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Coroutine, Sequence from datetime import datetime, timedelta -from functools import cached_property import hashlib from types import ModuleType from typing import Any, Final, Protocol, final import attr +from propcache import cached_property import voluptuous as vol from homeassistant import util diff --git a/homeassistant/components/device_tracker/strings.json b/homeassistant/components/device_tracker/strings.json index d6e36d92300..294333a5d80 100644 --- a/homeassistant/components/device_tracker/strings.json +++ b/homeassistant/components/device_tracker/strings.json @@ -48,7 +48,7 @@ "services": { "see": { "name": "See", - "description": "Records a seen tracked device.", + "description": "Manually update the records of a seen legacy device tracker in the known_devices.yaml file.", "fields": { "mac": { "name": "MAC address", diff --git a/homeassistant/components/devolo_home_control/__init__.py b/homeassistant/components/devolo_home_control/__init__.py index 7755e0f22b4..e86b7b753c8 100644 --- a/homeassistant/components/devolo_home_control/__init__.py +++ b/homeassistant/components/devolo_home_control/__init__.py @@ -18,7 +18,7 @@ from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.device_registry import DeviceEntry -from .const import CONF_MYDEVOLO, DEFAULT_MYDEVOLO, GATEWAY_SERIAL_PATTERN, PLATFORMS +from .const import GATEWAY_SERIAL_PATTERN, PLATFORMS type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]] @@ -102,5 +102,4 @@ def configure_mydevolo(conf: dict[str, Any] | MappingProxyType[str, Any]) -> Myd mydevolo = Mydevolo() mydevolo.user = conf[CONF_USERNAME] mydevolo.password = conf[CONF_PASSWORD] - mydevolo.url = conf.get(CONF_MYDEVOLO, DEFAULT_MYDEVOLO) return mydevolo diff --git a/homeassistant/components/devolo_home_control/binary_sensor.py b/homeassistant/components/devolo_home_control/binary_sensor.py index 349780304c6..449b1c7659f 100644 --- a/homeassistant/components/devolo_home_control/binary_sensor.py +++ b/homeassistant/components/devolo_home_control/binary_sensor.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import DevoloHomeControlConfigEntry -from .devolo_device import DevoloDeviceEntity +from .entity import DevoloDeviceEntity DEVICE_CLASS_MAPPING = { "Water alarm": BinarySensorDeviceClass.MOISTURE, diff --git a/homeassistant/components/devolo_home_control/climate.py b/homeassistant/components/devolo_home_control/climate.py index 29177ae2437..1f407eb6804 100644 --- a/homeassistant/components/devolo_home_control/climate.py +++ b/homeassistant/components/devolo_home_control/climate.py @@ -56,7 +56,6 @@ class DevoloClimateDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, ClimateEntit _attr_precision = PRECISION_TENTHS _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, homecontrol: HomeControl, device_instance: Zwave, element_uid: str diff --git a/homeassistant/components/devolo_home_control/config_flow.py b/homeassistant/components/devolo_home_control/config_flow.py index 0687a4a907f..e15204af7c2 100644 --- a/homeassistant/components/devolo_home_control/config_flow.py +++ b/homeassistant/components/devolo_home_control/config_flow.py @@ -8,12 +8,17 @@ from typing import Any import voluptuous as vol from homeassistant.components import zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from . import configure_mydevolo -from .const import CONF_MYDEVOLO, DEFAULT_MYDEVOLO, DOMAIN, SUPPORTED_MODEL_TYPES +from .const import DOMAIN, SUPPORTED_MODEL_TYPES from .exceptions import CredentialsInvalid, UuidChanged @@ -22,21 +27,19 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _reauth_entry: ConfigEntry + def __init__(self) -> None: """Initialize devolo Home Control flow.""" self.data_schema = { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, } - self._reauth_entry: ConfigEntry | None = None - self._url = DEFAULT_MYDEVOLO async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self.show_advanced_options: - self.data_schema[vol.Required(CONF_MYDEVOLO, default=self._url)] = str if user_input is None: return self._show_form(step_id="user") try: @@ -71,10 +74,7 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauthentication.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - self._url = entry_data[CONF_MYDEVOLO] + self._reauth_entry = self._get_reauth_entry() self.data_schema = { vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str, vol.Required(CONF_PASSWORD): str, @@ -100,7 +100,6 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): async def _connect_mydevolo(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Connect to mydevolo.""" - user_input[CONF_MYDEVOLO] = user_input.get(CONF_MYDEVOLO, self._url) mydevolo = configure_mydevolo(conf=user_input) credentials_valid = await self.hass.async_add_executor_job( mydevolo.credentials_valid @@ -109,7 +108,7 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): raise CredentialsInvalid uuid = await self.hass.async_add_executor_job(mydevolo.uuid) - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: await self.async_set_unique_id(uuid) self._abort_if_unique_id_configured() return self.async_create_entry( @@ -117,7 +116,6 @@ class DevoloHomeControlFlowHandler(ConfigFlow, domain=DOMAIN): data={ CONF_PASSWORD: mydevolo.password, CONF_USERNAME: mydevolo.user, - CONF_MYDEVOLO: mydevolo.url, }, ) diff --git a/homeassistant/components/devolo_home_control/const.py b/homeassistant/components/devolo_home_control/const.py index eb48a6d269e..bd2282ad99f 100644 --- a/homeassistant/components/devolo_home_control/const.py +++ b/homeassistant/components/devolo_home_control/const.py @@ -5,7 +5,6 @@ import re from homeassistant.const import Platform DOMAIN = "devolo_home_control" -DEFAULT_MYDEVOLO = "https://www.mydevolo.com" PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, @@ -15,6 +14,5 @@ PLATFORMS = [ Platform.SIREN, Platform.SWITCH, ] -CONF_MYDEVOLO = "mydevolo_url" GATEWAY_SERIAL_PATTERN = re.compile(r"\d{16}") SUPPORTED_MODEL_TYPES = ["2600", "2601"] diff --git a/homeassistant/components/devolo_home_control/devolo_multi_level_switch.py b/homeassistant/components/devolo_home_control/devolo_multi_level_switch.py index 3072cb01f2e..3e2d551d1f8 100644 --- a/homeassistant/components/devolo_home_control/devolo_multi_level_switch.py +++ b/homeassistant/components/devolo_home_control/devolo_multi_level_switch.py @@ -3,7 +3,7 @@ from devolo_home_control_api.devices.zwave import Zwave from devolo_home_control_api.homecontrol import HomeControl -from .devolo_device import DevoloDeviceEntity +from .entity import DevoloDeviceEntity class DevoloMultiLevelSwitchDeviceEntity(DevoloDeviceEntity): diff --git a/homeassistant/components/devolo_home_control/devolo_device.py b/homeassistant/components/devolo_home_control/entity.py similarity index 100% rename from homeassistant/components/devolo_home_control/devolo_device.py rename to homeassistant/components/devolo_home_control/entity.py diff --git a/homeassistant/components/devolo_home_control/manifest.json b/homeassistant/components/devolo_home_control/manifest.json index eb85e827551..a9715fffa84 100644 --- a/homeassistant/components/devolo_home_control/manifest.json +++ b/homeassistant/components/devolo_home_control/manifest.json @@ -8,7 +8,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["devolo_home_control_api"], - "quality_scale": "gold", "requirements": ["devolo-home-control-api==0.18.3"], "zeroconf": ["_dvl-deviceapi._tcp.local."] } diff --git a/homeassistant/components/devolo_home_control/sensor.py b/homeassistant/components/devolo_home_control/sensor.py index 134e45a137e..61a63419732 100644 --- a/homeassistant/components/devolo_home_control/sensor.py +++ b/homeassistant/components/devolo_home_control/sensor.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import DevoloHomeControlConfigEntry -from .devolo_device import DevoloDeviceEntity +from .entity import DevoloDeviceEntity DEVICE_CLASS_MAPPING = { "battery": SensorDeviceClass.BATTERY, diff --git a/homeassistant/components/devolo_home_control/strings.json b/homeassistant/components/devolo_home_control/strings.json index eeae9aa2e2f..1eaf64564c2 100644 --- a/homeassistant/components/devolo_home_control/strings.json +++ b/homeassistant/components/devolo_home_control/strings.json @@ -12,15 +12,21 @@ "user": { "data": { "username": "Email / devolo ID", - "password": "[%key:common::config_flow::data::password%]", - "mydevolo_url": "mydevolo URL" + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "Email address you used to register the central unit at mydevolo.", + "password": "Password of your mydevolo account." } }, "zeroconf_confirm": { "data": { "username": "[%key:component::devolo_home_control::config::step::user::data::username%]", - "password": "[%key:common::config_flow::data::password%]", - "mydevolo_url": "[%key:component::devolo_home_control::config::step::user::data::mydevolo_url%]" + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::devolo_home_control::config::step::user::data_description::username%]", + "password": "[%key:component::devolo_home_control::config::step::user::data_description::password%]" } } } diff --git a/homeassistant/components/devolo_home_control/switch.py b/homeassistant/components/devolo_home_control/switch.py index dd3248be315..a6f16229046 100644 --- a/homeassistant/components/devolo_home_control/switch.py +++ b/homeassistant/components/devolo_home_control/switch.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import DevoloHomeControlConfigEntry -from .devolo_device import DevoloDeviceEntity +from .entity import DevoloDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/devolo_home_network/__init__.py b/homeassistant/components/devolo_home_network/__init__.py index 59aafb1eb9c..7f6784f2404 100644 --- a/homeassistant/components/devolo_home_network/__init__.py +++ b/homeassistant/components/devolo_home_network/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +from asyncio import Semaphore from dataclasses import dataclass import logging from typing import Any @@ -32,13 +33,14 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import UpdateFailed from .const import ( CONNECTED_PLC_DEVICES, CONNECTED_WIFI_CLIENTS, DOMAIN, FIRMWARE_UPDATE_INTERVAL, + LAST_RESTART, LONG_UPDATE_INTERVAL, NEIGHBORING_WIFI_NETWORKS, REGULAR_FIRMWARE, @@ -46,6 +48,7 @@ from .const import ( SWITCH_GUEST_WIFI, SWITCH_LEDS, ) +from .coordinator import DevoloDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -57,7 +60,7 @@ class DevoloHomeNetworkData: """The devolo Home Network data.""" device: Device - coordinators: dict[str, DataUpdateCoordinator[Any]] + coordinators: dict[str, DevoloDataUpdateCoordinator[Any]] async def async_setup_entry( @@ -67,6 +70,7 @@ async def async_setup_entry( zeroconf_instance = await zeroconf.async_get_async_instance(hass) async_client = get_async_client(hass) device_registry = dr.async_get(hass) + semaphore = Semaphore(1) try: device = Device( @@ -79,7 +83,6 @@ async def async_setup_entry( ) except DeviceNotFound as err: raise ConfigEntryNotReady( - f"Unable to connect to {entry.data[CONF_IP_ADDRESS]}", translation_domain=DOMAIN, translation_key="connection_failed", translation_placeholders={"ip_address": entry.data[CONF_IP_ADDRESS]}, @@ -94,7 +97,11 @@ async def async_setup_entry( try: return await device.device.async_check_firmware_available() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_connected_plc_devices() -> LogicalNetwork: """Fetch data from API endpoint.""" @@ -103,7 +110,11 @@ async def async_setup_entry( try: return await device.plcnet.async_get_network_overview() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_guest_wifi_status() -> WifiGuestAccessGet: """Fetch data from API endpoint.""" @@ -112,10 +123,14 @@ async def async_setup_entry( try: return await device.device.async_get_wifi_guest_access() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err except DevicePasswordProtected as err: raise ConfigEntryAuthFailed( - err, translation_domain=DOMAIN, translation_key="password_wrong" + translation_domain=DOMAIN, translation_key="password_wrong" ) from err async def async_update_led_status() -> bool: @@ -125,7 +140,28 @@ async def async_setup_entry( try: return await device.device.async_get_led_setting() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err + + async def async_update_last_restart() -> int: + """Fetch data from API endpoint.""" + assert device.device + update_sw_version(device_registry, device) + try: + return await device.device.async_uptime() + except DeviceUnavailable as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err + except DevicePasswordProtected as err: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, translation_key="password_wrong" + ) from err async def async_update_wifi_connected_station() -> list[ConnectedStationInfo]: """Fetch data from API endpoint.""" @@ -134,7 +170,11 @@ async def async_setup_entry( try: return await device.device.async_get_wifi_connected_station() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_wifi_neighbor_access_points() -> list[NeighborAPInfo]: """Fetch data from API endpoint.""" @@ -143,56 +183,82 @@ async def async_setup_entry( try: return await device.device.async_get_wifi_neighbor_access_points() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def disconnect(event: Event) -> None: """Disconnect from device.""" await device.async_disconnect() - coordinators: dict[str, DataUpdateCoordinator[Any]] = {} + coordinators: dict[str, DevoloDataUpdateCoordinator[Any]] = {} if device.plcnet: - coordinators[CONNECTED_PLC_DEVICES] = DataUpdateCoordinator( + coordinators[CONNECTED_PLC_DEVICES] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=CONNECTED_PLC_DEVICES, + semaphore=semaphore, update_method=async_update_connected_plc_devices, update_interval=LONG_UPDATE_INTERVAL, ) if device.device and "led" in device.device.features: - coordinators[SWITCH_LEDS] = DataUpdateCoordinator( + coordinators[SWITCH_LEDS] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=SWITCH_LEDS, + semaphore=semaphore, update_method=async_update_led_status, update_interval=SHORT_UPDATE_INTERVAL, ) - if device.device and "update" in device.device.features: - coordinators[REGULAR_FIRMWARE] = DataUpdateCoordinator( + if device.device and "restart" in device.device.features: + coordinators[LAST_RESTART] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, + name=LAST_RESTART, + semaphore=semaphore, + update_method=async_update_last_restart, + update_interval=SHORT_UPDATE_INTERVAL, + ) + if device.device and "update" in device.device.features: + coordinators[REGULAR_FIRMWARE] = DevoloDataUpdateCoordinator( + hass, + _LOGGER, + config_entry=entry, name=REGULAR_FIRMWARE, + semaphore=semaphore, update_method=async_update_firmware_available, update_interval=FIRMWARE_UPDATE_INTERVAL, ) if device.device and "wifi1" in device.device.features: - coordinators[CONNECTED_WIFI_CLIENTS] = DataUpdateCoordinator( + coordinators[CONNECTED_WIFI_CLIENTS] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=CONNECTED_WIFI_CLIENTS, + semaphore=semaphore, update_method=async_update_wifi_connected_station, update_interval=SHORT_UPDATE_INTERVAL, ) - coordinators[NEIGHBORING_WIFI_NETWORKS] = DataUpdateCoordinator( + coordinators[NEIGHBORING_WIFI_NETWORKS] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=NEIGHBORING_WIFI_NETWORKS, + semaphore=semaphore, update_method=async_update_wifi_neighbor_access_points, update_interval=LONG_UPDATE_INTERVAL, ) - coordinators[SWITCH_GUEST_WIFI] = DataUpdateCoordinator( + coordinators[SWITCH_GUEST_WIFI] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=SWITCH_GUEST_WIFI, + semaphore=semaphore, update_method=async_update_guest_wifi_status, update_interval=SHORT_UPDATE_INTERVAL, ) diff --git a/homeassistant/components/devolo_home_network/binary_sensor.py b/homeassistant/components/devolo_home_network/binary_sensor.py index c96d0273a50..5752956ffb5 100644 --- a/homeassistant/components/devolo_home_network/binary_sensor.py +++ b/homeassistant/components/devolo_home_network/binary_sensor.py @@ -15,13 +15,13 @@ from homeassistant.components.binary_sensor import ( from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import DevoloHomeNetworkConfigEntry from .const import CONNECTED_PLC_DEVICES, CONNECTED_TO_ROUTER +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 def _is_connected_to_router(entity: DevoloBinarySensorEntity) -> bool: @@ -78,7 +78,7 @@ class DevoloBinarySensorEntity( def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[LogicalNetwork], + coordinator: DevoloDataUpdateCoordinator[LogicalNetwork], description: DevoloBinarySensorEntityDescription, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/button.py b/homeassistant/components/devolo_home_network/button.py index ca17b572522..06822ff199e 100644 --- a/homeassistant/components/devolo_home_network/button.py +++ b/homeassistant/components/devolo_home_network/button.py @@ -22,7 +22,7 @@ from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, IDENTIFY, PAIRING, RESTART, START_WPS from .entity import DevoloEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/devolo_home_network/config_flow.py b/homeassistant/components/devolo_home_network/config_flow.py index 63d86d46e8a..7c8dccd1a7b 100644 --- a/homeassistant/components/devolo_home_network/config_flow.py +++ b/homeassistant/components/devolo_home_network/config_flow.py @@ -12,10 +12,11 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_PASSWORD +from homeassistant.const import CONF_IP_ADDRESS, CONF_NAME, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.helpers.httpx_client import get_async_client +from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, PRODUCT, SERIAL_NUMBER, TITLE _LOGGER = logging.getLogger(__name__) @@ -48,6 +49,9 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str + _reauth_entry: DevoloHomeNetworkConfigEntry + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -88,7 +92,7 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): updates={CONF_IP_ADDRESS: discovery_info.host} ) - self.context[CONF_HOST] = discovery_info.host + self.host = discovery_info.host self.context["title_placeholders"] = { PRODUCT: discovery_info.properties["Product"], CONF_NAME: discovery_info.hostname.split(".")[0], @@ -103,7 +107,7 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): title = self.context["title_placeholders"][CONF_NAME] if user_input is not None: data = { - CONF_IP_ADDRESS: self.context[CONF_HOST], + CONF_IP_ADDRESS: self.host, CONF_PASSWORD: "", } return self.async_create_entry(title=title, data=data) @@ -112,13 +116,17 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={"host_name": title}, ) - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauthentication.""" - if entry := self.hass.config_entries.async_get_entry(self.context["entry_id"]): - self.context[CONF_HOST] = data[CONF_IP_ADDRESS] - self.context["title_placeholders"][PRODUCT] = ( - entry.runtime_data.device.product - ) + self._reauth_entry = self._get_reauth_entry() + self.host = entry_data[CONF_IP_ADDRESS] + placeholders = { + **self.context["title_placeholders"], + PRODUCT: self._reauth_entry.runtime_data.device.product, + } + self.context["title_placeholders"] = placeholders return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -131,13 +139,8 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=STEP_REAUTH_DATA_SCHEMA, ) - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert reauth_entry is not None - data = { - CONF_IP_ADDRESS: self.context[CONF_HOST], + CONF_IP_ADDRESS: self.host, CONF_PASSWORD: user_input[CONF_PASSWORD], } - return self.async_update_reload_and_abort(reauth_entry, data=data) + return self.async_update_reload_and_abort(self._reauth_entry, data=data) diff --git a/homeassistant/components/devolo_home_network/const.py b/homeassistant/components/devolo_home_network/const.py index 4caa4f5b60b..92b97d59423 100644 --- a/homeassistant/components/devolo_home_network/const.py +++ b/homeassistant/components/devolo_home_network/const.py @@ -23,6 +23,7 @@ CONNECTED_TO_ROUTER = "connected_to_router" CONNECTED_WIFI_CLIENTS = "connected_wifi_clients" IDENTIFY = "identify" IMAGE_GUEST_WIFI = "image_guest_wifi" +LAST_RESTART = "last_restart" NEIGHBORING_WIFI_NETWORKS = "neighboring_wifi_networks" PAIRING = "pairing" PLC_RX_RATE = "plc_rx_rate" diff --git a/homeassistant/components/devolo_home_network/coordinator.py b/homeassistant/components/devolo_home_network/coordinator.py new file mode 100644 index 00000000000..c0af9668279 --- /dev/null +++ b/homeassistant/components/devolo_home_network/coordinator.py @@ -0,0 +1,41 @@ +"""Base coordinator.""" + +from asyncio import Semaphore +from collections.abc import Awaitable, Callable +from datetime import timedelta +from logging import Logger + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + + +class DevoloDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): + """Class to manage fetching data from devolo Home Network devices.""" + + def __init__( + self, + hass: HomeAssistant, + logger: Logger, + *, + config_entry: ConfigEntry, + name: str, + semaphore: Semaphore, + update_interval: timedelta, + update_method: Callable[[], Awaitable[_DataT]], + ) -> None: + """Initialize global data updater.""" + super().__init__( + hass, + logger, + config_entry=config_entry, + name=name, + update_interval=update_interval, + update_method=update_method, + ) + self._semaphore = semaphore + + async def _async_update_data(self) -> _DataT: + """Fetch the latest data from the source.""" + async with self._semaphore: + return await super()._async_update_data() diff --git a/homeassistant/components/devolo_home_network/device_tracker.py b/homeassistant/components/devolo_home_network/device_tracker.py index 960069191ee..583f022df84 100644 --- a/homeassistant/components/devolo_home_network/device_tracker.py +++ b/homeassistant/components/devolo_home_network/device_tracker.py @@ -8,21 +8,18 @@ from devolo_plc_api.device_api import ConnectedStationInfo from homeassistant.components.device_tracker import ( DOMAIN as DEVICE_TRACKER_DOMAIN, ScannerEntity, - SourceType, ) from homeassistant.const import STATE_UNKNOWN, UnitOfFrequency from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import DevoloHomeNetworkConfigEntry from .const import CONNECTED_WIFI_CLIENTS, DOMAIN, WIFI_APTYPE, WIFI_BANDS +from .coordinator import DevoloDataUpdateCoordinator -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 async def async_setup_entry( @@ -32,7 +29,7 @@ async def async_setup_entry( ) -> None: """Get all devices and sensors and setup them via config entry.""" device = entry.runtime_data.device - coordinators: dict[str, DataUpdateCoordinator[list[ConnectedStationInfo]]] = ( + coordinators: dict[str, DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]] = ( entry.runtime_data.coordinators ) registry = er.async_get(hass) @@ -52,7 +49,7 @@ async def async_setup_entry( ) ) tracked.add(station.mac_address) - async_add_entities(new_entities) + async_add_entities(new_entities) @callback def restore_entities() -> None: @@ -84,21 +81,23 @@ async def async_setup_entry( ) -class DevoloScannerEntity( - CoordinatorEntity[DataUpdateCoordinator[list[ConnectedStationInfo]]], ScannerEntity +# The pylint disable is needed because of https://github.com/pylint-dev/pylint/issues/9138 +class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module + CoordinatorEntity[DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]], + ScannerEntity, ): """Representation of a devolo device tracker.""" def __init__( self, - coordinator: DataUpdateCoordinator[list[ConnectedStationInfo]], + coordinator: DevoloDataUpdateCoordinator[list[ConnectedStationInfo]], device: Device, mac: str, ) -> None: """Initialize entity.""" super().__init__(coordinator) self._device = device - self._mac = mac + self._attr_mac_address = mac @property def extra_state_attributes(self) -> dict[str, str]: @@ -140,17 +139,7 @@ class DevoloScannerEntity( if station.mac_address == self.mac_address ) - @property - def mac_address(self) -> str: - """Return mac_address.""" - return self._mac - - @property - def source_type(self) -> SourceType: - """Return tracker source type.""" - return SourceType.ROUTER - @property def unique_id(self) -> str: """Return unique ID of the entity.""" - return f"{self._device.serial_number}_{self._mac}" + return f"{self._device.serial_number}_{self.mac_address}" diff --git a/homeassistant/components/devolo_home_network/entity.py b/homeassistant/components/devolo_home_network/entity.py index 9d469ccfb16..93ec1b9a3a2 100644 --- a/homeassistant/components/devolo_home_network/entity.py +++ b/homeassistant/components/devolo_home_network/entity.py @@ -9,15 +9,14 @@ from devolo_plc_api.device_api import ( ) from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork +from homeassistant.const import ATTR_CONNECTIONS from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN +from .coordinator import DevoloDataUpdateCoordinator type _DataType = ( LogicalNetwork @@ -26,6 +25,7 @@ type _DataType = ( | list[NeighborAPInfo] | WifiGuestAccessGet | bool + | int ) @@ -44,7 +44,6 @@ class DevoloEntity(Entity): self._attr_device_info = DeviceInfo( configuration_url=f"http://{self.device.ip}", - connections={(CONNECTION_NETWORK_MAC, self.device.mac)}, identifiers={(DOMAIN, str(self.device.serial_number))}, manufacturer="devolo", model=self.device.product, @@ -52,6 +51,10 @@ class DevoloEntity(Entity): serial_number=self.device.serial_number, sw_version=self.device.firmware_version, ) + if self.device.mac: + self._attr_device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, self.device.mac) + } self._attr_translation_key = self.entity_description.key self._attr_unique_id = ( f"{self.device.serial_number}_{self.entity_description.key}" @@ -59,14 +62,14 @@ class DevoloEntity(Entity): class DevoloCoordinatorEntity[_DataT: _DataType]( - CoordinatorEntity[DataUpdateCoordinator[_DataT]], DevoloEntity + CoordinatorEntity[DevoloDataUpdateCoordinator[_DataT]], DevoloEntity ): """Representation of a coordinated devolo home network device.""" def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[_DataT], + coordinator: DevoloDataUpdateCoordinator[_DataT], ) -> None: """Initialize a devolo home network device.""" super().__init__(coordinator) diff --git a/homeassistant/components/devolo_home_network/image.py b/homeassistant/components/devolo_home_network/image.py index 58052d3021e..240686ed3bb 100644 --- a/homeassistant/components/devolo_home_network/image.py +++ b/homeassistant/components/devolo_home_network/image.py @@ -13,14 +13,14 @@ from homeassistant.components.image import ImageEntity, ImageEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator import homeassistant.util.dt as dt_util from . import DevoloHomeNetworkConfigEntry from .const import IMAGE_GUEST_WIFI, SWITCH_GUEST_WIFI +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -66,7 +66,7 @@ class DevoloImageEntity(DevoloCoordinatorEntity[WifiGuestAccessGet], ImageEntity def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[WifiGuestAccessGet], + coordinator: DevoloDataUpdateCoordinator[WifiGuestAccessGet], description: DevoloImageEntityDescription, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/manifest.json b/homeassistant/components/devolo_home_network/manifest.json index 27fd08898c0..d10e14f9081 100644 --- a/homeassistant/components/devolo_home_network/manifest.json +++ b/homeassistant/components/devolo_home_network/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["devolo_plc_api"], - "quality_scale": "platinum", "requirements": ["devolo-plc-api==1.4.1"], "zeroconf": [ { diff --git a/homeassistant/components/devolo_home_network/sensor.py b/homeassistant/components/devolo_home_network/sensor.py index 2fd8ab9220c..220ab66312a 100644 --- a/homeassistant/components/devolo_home_network/sensor.py +++ b/homeassistant/components/devolo_home_network/sensor.py @@ -4,8 +4,9 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime, timedelta from enum import StrEnum -from typing import Any, Generic, TypeVar +from typing import Any from devolo_plc_api.device_api import ConnectedStationInfo, NeighborAPInfo from devolo_plc_api.plcnet_api import REMOTE, DataRate, LogicalNetwork @@ -19,28 +20,37 @@ from homeassistant.components.sensor import ( from homeassistant.const import EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util.dt import utcnow from . import DevoloHomeNetworkConfigEntry from .const import ( CONNECTED_PLC_DEVICES, CONNECTED_WIFI_CLIENTS, + LAST_RESTART, NEIGHBORING_WIFI_NETWORKS, PLC_RX_RATE, PLC_TX_RATE, ) +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 -_CoordinatorDataT = TypeVar( - "_CoordinatorDataT", - bound=LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo], -) -_ValueDataT = TypeVar( - "_ValueDataT", - bound=LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo], + +def _last_restart(runtime: int) -> datetime: + """Calculate uptime. As fetching the data might also take some time, let's floor to the nearest 5 seconds.""" + now = utcnow() + return ( + now + - timedelta(seconds=runtime) + - timedelta(seconds=(now.timestamp() - runtime) % 5) + ) + + +type _CoordinatorDataType = ( + LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo] | int ) +type _SensorDataType = int | float | datetime class DataRateDirection(StrEnum): @@ -51,16 +61,17 @@ class DataRateDirection(StrEnum): @dataclass(frozen=True, kw_only=True) -class DevoloSensorEntityDescription( - SensorEntityDescription, Generic[_CoordinatorDataT] -): +class DevoloSensorEntityDescription[ + _CoordinatorDataT: _CoordinatorDataType, + _SensorDataT: _SensorDataType, +](SensorEntityDescription): """Describes devolo sensor entity.""" - value_func: Callable[[_CoordinatorDataT], float] + value_func: Callable[[_CoordinatorDataT], _SensorDataT] -SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { - CONNECTED_PLC_DEVICES: DevoloSensorEntityDescription[LogicalNetwork]( +SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any, Any]] = { + CONNECTED_PLC_DEVICES: DevoloSensorEntityDescription[LogicalNetwork, int]( key=CONNECTED_PLC_DEVICES, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, @@ -68,18 +79,20 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { {device.mac_address_from for device in data.data_rates} ), ), - CONNECTED_WIFI_CLIENTS: DevoloSensorEntityDescription[list[ConnectedStationInfo]]( + CONNECTED_WIFI_CLIENTS: DevoloSensorEntityDescription[ + list[ConnectedStationInfo], int + ]( key=CONNECTED_WIFI_CLIENTS, state_class=SensorStateClass.MEASUREMENT, value_func=len, ), - NEIGHBORING_WIFI_NETWORKS: DevoloSensorEntityDescription[list[NeighborAPInfo]]( + NEIGHBORING_WIFI_NETWORKS: DevoloSensorEntityDescription[list[NeighborAPInfo], int]( key=NEIGHBORING_WIFI_NETWORKS, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_func=len, ), - PLC_RX_RATE: DevoloSensorEntityDescription[DataRate]( + PLC_RX_RATE: DevoloSensorEntityDescription[DataRate, float]( key=PLC_RX_RATE, entity_category=EntityCategory.DIAGNOSTIC, name="PLC downlink PHY rate", @@ -88,7 +101,7 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { value_func=lambda data: getattr(data, DataRateDirection.RX, 0), suggested_display_precision=0, ), - PLC_TX_RATE: DevoloSensorEntityDescription[DataRate]( + PLC_TX_RATE: DevoloSensorEntityDescription[DataRate, float]( key=PLC_TX_RATE, entity_category=EntityCategory.DIAGNOSTIC, name="PLC uplink PHY rate", @@ -97,6 +110,13 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { value_func=lambda data: getattr(data, DataRateDirection.TX, 0), suggested_display_precision=0, ), + LAST_RESTART: DevoloSensorEntityDescription[int, datetime]( + key=LAST_RESTART, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + value_func=_last_restart, + ), } @@ -109,7 +129,7 @@ async def async_setup_entry( device = entry.runtime_data.device coordinators = entry.runtime_data.coordinators - entities: list[BaseDevoloSensorEntity[Any, Any]] = [] + entities: list[BaseDevoloSensorEntity[Any, Any, Any]] = [] if device.plcnet: entities.append( DevoloSensorEntity( @@ -139,6 +159,14 @@ async def async_setup_entry( peer, ) ) + if device.device and "restart" in device.device.features: + entities.append( + DevoloSensorEntity( + entry, + coordinators[LAST_RESTART], + SENSOR_TYPES[LAST_RESTART], + ) + ) if device.device and "wifi1" in device.device.features: entities.append( DevoloSensorEntity( @@ -157,8 +185,11 @@ async def async_setup_entry( async_add_entities(entities) -class BaseDevoloSensorEntity( - Generic[_CoordinatorDataT, _ValueDataT], +class BaseDevoloSensorEntity[ + _CoordinatorDataT: _CoordinatorDataType, + _ValueDataT: _CoordinatorDataType, + _SensorDataT: _SensorDataType, +]( DevoloCoordinatorEntity[_CoordinatorDataT], SensorEntity, ): @@ -167,35 +198,41 @@ class BaseDevoloSensorEntity( def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[_CoordinatorDataT], - description: DevoloSensorEntityDescription[_ValueDataT], + coordinator: DevoloDataUpdateCoordinator[_CoordinatorDataT], + description: DevoloSensorEntityDescription[_ValueDataT, _SensorDataT], ) -> None: """Initialize entity.""" self.entity_description = description super().__init__(entry, coordinator) -class DevoloSensorEntity(BaseDevoloSensorEntity[_CoordinatorDataT, _CoordinatorDataT]): +class DevoloSensorEntity[ + _CoordinatorDataT: _CoordinatorDataType, + _ValueDataT: _CoordinatorDataType, + _SensorDataT: _SensorDataType, +](BaseDevoloSensorEntity[_CoordinatorDataT, _ValueDataT, _SensorDataT]): """Representation of a generic devolo sensor.""" - entity_description: DevoloSensorEntityDescription[_CoordinatorDataT] + entity_description: DevoloSensorEntityDescription[_CoordinatorDataT, _SensorDataT] @property - def native_value(self) -> float: + def native_value(self) -> int | float | datetime: """State of the sensor.""" return self.entity_description.value_func(self.coordinator.data) -class DevoloPlcDataRateSensorEntity(BaseDevoloSensorEntity[LogicalNetwork, DataRate]): +class DevoloPlcDataRateSensorEntity( + BaseDevoloSensorEntity[LogicalNetwork, DataRate, float] +): """Representation of a devolo PLC data rate sensor.""" - entity_description: DevoloSensorEntityDescription[DataRate] + entity_description: DevoloSensorEntityDescription[DataRate, float] def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[LogicalNetwork], - description: DevoloSensorEntityDescription[DataRate], + coordinator: DevoloDataUpdateCoordinator[LogicalNetwork], + description: DevoloSensorEntityDescription[DataRate, float], peer: str, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/strings.json b/homeassistant/components/devolo_home_network/strings.json index 97348c5c43c..4b683b5d2fa 100644 --- a/homeassistant/components/devolo_home_network/strings.json +++ b/homeassistant/components/devolo_home_network/strings.json @@ -6,11 +6,17 @@ "description": "[%key:common::config_flow::description::confirm_setup%]", "data": { "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "data_description": { + "ip_address": "IP address of your devolo Home Network device. This can be found in the devolo Home Network App on the device dashboard." } }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Password you protected the device with." } }, "zeroconf_confirm": { @@ -60,6 +66,9 @@ "connected_wifi_clients": { "name": "Connected Wi-Fi clients" }, + "last_restart": { + "name": "Last restart of the device" + }, "neighboring_wifi_networks": { "name": "Neighboring Wi-Fi networks" }, @@ -91,6 +100,9 @@ }, "password_wrong": { "message": "The used password is wrong" + }, + "update_failed": { + "message": "Error while updating the data: {error}" } } } diff --git a/homeassistant/components/devolo_home_network/switch.py b/homeassistant/components/devolo_home_network/switch.py index c3400916d78..8ff35dcc4b6 100644 --- a/homeassistant/components/devolo_home_network/switch.py +++ b/homeassistant/components/devolo_home_network/switch.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar +from typing import Any from devolo_plc_api.device import Device from devolo_plc_api.device_api import WifiGuestAccessGet @@ -15,19 +15,19 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, SWITCH_GUEST_WIFI, SWITCH_LEDS +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 -_DataT = TypeVar("_DataT", bound=WifiGuestAccessGet | bool) +type _DataType = WifiGuestAccessGet | bool @dataclass(frozen=True, kw_only=True) -class DevoloSwitchEntityDescription(SwitchEntityDescription, Generic[_DataT]): +class DevoloSwitchEntityDescription[_DataT: _DataType](SwitchEntityDescription): """Describes devolo switch entity.""" is_on_func: Callable[[_DataT], bool] @@ -81,7 +81,9 @@ async def async_setup_entry( async_add_entities(entities) -class DevoloSwitchEntity(DevoloCoordinatorEntity[_DataT], SwitchEntity): +class DevoloSwitchEntity[_DataT: _DataType]( + DevoloCoordinatorEntity[_DataT], SwitchEntity +): """Representation of a devolo switch.""" entity_description: DevoloSwitchEntityDescription[_DataT] @@ -89,7 +91,7 @@ class DevoloSwitchEntity(DevoloCoordinatorEntity[_DataT], SwitchEntity): def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[_DataT], + coordinator: DevoloDataUpdateCoordinator[_DataT], description: DevoloSwitchEntityDescription[_DataT], ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/update.py b/homeassistant/components/devolo_home_network/update.py index 29c0c8762b9..5091ce8e1e7 100644 --- a/homeassistant/components/devolo_home_network/update.py +++ b/homeassistant/components/devolo_home_network/update.py @@ -20,13 +20,13 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, REGULAR_FIRMWARE +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -79,7 +79,7 @@ class DevoloUpdateEntity(DevoloCoordinatorEntity, UpdateEntity): def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator, + coordinator: DevoloDataUpdateCoordinator, description: DevoloUpdateEntityDescription, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/dexcom/__init__.py b/homeassistant/components/dexcom/__init__.py index 5ff95fae47e..e93e8e66358 100644 --- a/homeassistant/components/dexcom/__init__.py +++ b/homeassistant/components/dexcom/__init__.py @@ -6,12 +6,12 @@ import logging from pydexcom import AccountError, Dexcom, GlucoseReading, SessionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_SERVER, DOMAIN, MG_DL, PLATFORMS, SERVER_OUS +from .const import CONF_SERVER, DOMAIN, PLATFORMS, SERVER_OUS _LOGGER = logging.getLogger(__name__) @@ -32,11 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SessionError as error: raise ConfigEntryNotReady from error - if not entry.options: - hass.config_entries.async_update_entry( - entry, options={CONF_UNIT_OF_MEASUREMENT: MG_DL} - ) - async def async_update_data(): try: return await hass.async_add_executor_job(dexcom.get_current_glucose_reading) @@ -46,6 +41,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator[GlucoseReading]( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=SCAN_INTERVAL, @@ -54,8 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator - entry.async_on_unload(entry.add_update_listener(update_listener)) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -66,8 +60,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 17bd1b3f7a8..90917e0ce2c 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -7,16 +7,10 @@ from typing import Any from pydexcom import AccountError, Dexcom, SessionError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) -from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME -from homeassistant.core import callback +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from .const import CONF_SERVER, DOMAIN, MG_DL, MMOL_L, SERVER_OUS, SERVER_US +from .const import CONF_SERVER, DOMAIN, SERVER_OUS, SERVER_US DATA_SCHEMA = vol.Schema( { @@ -62,36 +56,3 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - - @staticmethod - @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> DexcomOptionsFlowHandler: - """Get the options flow for this handler.""" - return DexcomOptionsFlowHandler(config_entry) - - -class DexcomOptionsFlowHandler(OptionsFlow): - """Handle a option flow for Dexcom.""" - - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): - """Handle options flow.""" - if user_input is not None: - return self.async_create_entry(title="", data=user_input) - - data_schema = vol.Schema( - { - vol.Optional( - CONF_UNIT_OF_MEASUREMENT, - default=self.config_entry.options.get( - CONF_UNIT_OF_MEASUREMENT, MG_DL - ), - ): vol.In({MG_DL, MMOL_L}), - } - ) - return self.async_show_form(step_id="init", data_schema=data_schema) diff --git a/homeassistant/components/dexcom/const.py b/homeassistant/components/dexcom/const.py index 487a844eb2b..66999e51e4b 100644 --- a/homeassistant/components/dexcom/const.py +++ b/homeassistant/components/dexcom/const.py @@ -5,9 +5,6 @@ from homeassistant.const import Platform DOMAIN = "dexcom" PLATFORMS = [Platform.SENSOR] -MMOL_L = "mmol/L" -MG_DL = "mg/dL" - CONF_SERVER = "server" SERVER_OUS = "EU" diff --git a/homeassistant/components/dexcom/sensor.py b/homeassistant/components/dexcom/sensor.py index 10b30f39fcb..850678e7ac9 100644 --- a/homeassistant/components/dexcom/sensor.py +++ b/homeassistant/components/dexcom/sensor.py @@ -6,7 +6,7 @@ from pydexcom import GlucoseReading from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME +from homeassistant.const import CONF_USERNAME, UnitOfBloodGlucoseConcentration from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -15,7 +15,7 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, ) -from .const import DOMAIN, MG_DL +from .const import DOMAIN TRENDS = { 1: "rising_quickly", @@ -36,13 +36,10 @@ async def async_setup_entry( """Set up the Dexcom sensors.""" coordinator = hass.data[DOMAIN][config_entry.entry_id] username = config_entry.data[CONF_USERNAME] - unit_of_measurement = config_entry.options[CONF_UNIT_OF_MEASUREMENT] async_add_entities( [ DexcomGlucoseTrendSensor(coordinator, username, config_entry.entry_id), - DexcomGlucoseValueSensor( - coordinator, username, config_entry.entry_id, unit_of_measurement - ), + DexcomGlucoseValueSensor(coordinator, username, config_entry.entry_id), ], ) @@ -73,6 +70,10 @@ class DexcomSensorEntity( class DexcomGlucoseValueSensor(DexcomSensorEntity): """Representation of a Dexcom glucose value sensor.""" + _attr_device_class = SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION + _attr_native_unit_of_measurement = ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER + ) _attr_translation_key = "glucose_value" def __init__( @@ -80,18 +81,15 @@ class DexcomGlucoseValueSensor(DexcomSensorEntity): coordinator: DataUpdateCoordinator, username: str, entry_id: str, - unit_of_measurement: str, ) -> None: """Initialize the sensor.""" super().__init__(coordinator, username, entry_id, "value") - self._attr_native_unit_of_measurement = unit_of_measurement - self._key = "mg_dl" if unit_of_measurement == MG_DL else "mmol_l" @property def native_value(self): """Return the state of the sensor.""" if self.coordinator.data: - return getattr(self.coordinator.data, self._key) + return self.coordinator.data.mg_dl return None diff --git a/homeassistant/components/dhcp/__init__.py b/homeassistant/components/dhcp/__init__.py index 0897729ec72..2de676ef52a 100644 --- a/homeassistant/components/dhcp/__init__.py +++ b/homeassistant/components/dhcp/__init__.py @@ -51,6 +51,7 @@ from homeassistant.helpers import ( discovery_flow, ) from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import ( async_track_state_added_domain, @@ -155,6 +156,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await dhcp_watcher.async_start() watchers.append(dhcp_watcher) + rediscovery_watcher = RediscoveryWatcher( + hass, address_data, integration_matchers + ) + rediscovery_watcher.async_start() + watchers.append(rediscovery_watcher) + @callback def _async_stop(event: Event) -> None: for watcher in watchers: @@ -192,7 +199,11 @@ class WatcherBase: @callback def async_process_client( - self, ip_address: str, hostname: str, unformatted_mac_address: str + self, + ip_address: str, + hostname: str, + unformatted_mac_address: str, + force: bool = False, ) -> None: """Process a client.""" if (made_ip_address := cached_ip_addresses(ip_address)) is None: @@ -213,19 +224,21 @@ class WatcherBase: # and since all consumers of this data are expecting it to be # formatted without colons we will continue to do so mac_address = formatted_mac.replace(":", "") + compressed_ip_address = made_ip_address.compressed - data = self._address_data.get(ip_address) + data = self._address_data.get(mac_address) if ( - data - and data[MAC_ADDRESS] == mac_address + not force + and data + and data[IP_ADDRESS] == compressed_ip_address and data[HOSTNAME].startswith(hostname) ): # If the address data is the same no need # to process it return - data = {MAC_ADDRESS: mac_address, HOSTNAME: hostname} - self._address_data[ip_address] = data + data = {IP_ADDRESS: compressed_ip_address, HOSTNAME: hostname} + self._address_data[mac_address] = data lowercase_hostname = hostname.lower() uppercase_mac = mac_address.upper() @@ -270,6 +283,14 @@ class WatcherBase: _LOGGER.debug("Matched %s against %s", data, matcher) matched_domains.add(domain) + if not matched_domains: + return # avoid creating DiscoveryKey if there are no matches + + discovery_key = DiscoveryKey( + domain=DOMAIN, + key=mac_address, + version=1, + ) for domain in matched_domains: discovery_flow.async_create_flow( self.hass, @@ -280,6 +301,7 @@ class WatcherBase: hostname=lowercase_hostname, macaddress=mac_address, ), + discovery_key=discovery_key, ) @@ -413,6 +435,38 @@ class DHCPWatcher(WatcherBase): self._unsub = await aiodhcpwatcher.async_start(self._async_process_dhcp_request) +class RediscoveryWatcher(WatcherBase): + """Class to trigger rediscovery on config entry removal.""" + + @callback + def _handle_config_entry_removed( + self, + entry: config_entries.ConfigEntry, + ) -> None: + """Handle config entry changes.""" + for discovery_key in entry.discovery_keys[DOMAIN]: + if discovery_key.version != 1 or not isinstance(discovery_key.key, str): + continue + mac_address = discovery_key.key + _LOGGER.debug("Rediscover service %s", mac_address) + if data := self._address_data.get(mac_address): + self.async_process_client( + data[IP_ADDRESS], + data[HOSTNAME], + mac_address, + True, # Force rediscovery + ) + + @callback + def async_start(self) -> None: + """Start watching for config entry removals.""" + self._unsub = async_dispatcher_connect( + self.hass, + config_entries.signal_discovered_config_entry_removed(DOMAIN), + self._handle_config_entry_removed, + ) + + @lru_cache(maxsize=4096, typed=True) def _compile_fnmatch(pattern: str) -> re.Pattern: """Compile a fnmatch pattern.""" diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json index 6023e55faf3..ba773782e1c 100644 --- a/homeassistant/components/dhcp/manifest.json +++ b/homeassistant/components/dhcp/manifest.json @@ -16,6 +16,6 @@ "requirements": [ "aiodhcpwatcher==1.0.2", "aiodiscover==2.1.0", - "cached-ipaddress==0.5.0" + "cached-ipaddress==0.8.0" ] } diff --git a/homeassistant/components/digital_ocean/manifest.json b/homeassistant/components/digital_ocean/manifest.json index 7fee8ca5b2b..819a557491a 100644 --- a/homeassistant/components/digital_ocean/manifest.json +++ b/homeassistant/components/digital_ocean/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/digital_ocean", "iot_class": "local_polling", "loggers": ["digitalocean"], + "quality_scale": "legacy", "requirements": ["python-digitalocean==1.13.2"] } diff --git a/homeassistant/components/directv/config_flow.py b/homeassistant/components/directv/config_flow.py index 56d8f262d1c..1e0577b4f7c 100644 --- a/homeassistant/components/directv/config_flow.py +++ b/homeassistant/components/directv/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import Any, cast from urllib.parse import urlparse from directv import DIRECTV, DIRECTVError @@ -70,7 +70,9 @@ class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: ssdp.SsdpServiceInfo ) -> ConfigFlowResult: """Handle SSDP discovery.""" - host = urlparse(discovery_info.ssdp_location).hostname + # We can cast the hostname to str because the ssdp_location is not bytes and + # not a relative url + host = cast(str, urlparse(discovery_info.ssdp_location).hostname) receiver_id = None if discovery_info.upnp.get(ssdp.ATTR_UPNP_SERIAL): diff --git a/homeassistant/components/directv/manifest.json b/homeassistant/components/directv/manifest.json index 957bbff0acc..bee2c297635 100644 --- a/homeassistant/components/directv/manifest.json +++ b/homeassistant/components/directv/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/directv", "iot_class": "local_polling", "loggers": ["directv"], - "quality_scale": "silver", "requirements": ["directv==0.4.0"], "ssdp": [ { diff --git a/homeassistant/components/discogs/manifest.json b/homeassistant/components/discogs/manifest.json index fceb214aded..f724b4bc6fd 100644 --- a/homeassistant/components/discogs/manifest.json +++ b/homeassistant/components/discogs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/discogs", "iot_class": "cloud_polling", "loggers": ["discogs_client"], + "quality_scale": "legacy", "requirements": ["discogs-client==2.3.0"] } diff --git a/homeassistant/components/discovergy/__init__.py b/homeassistant/components/discovergy/__init__.py index 72aa6c19a21..81c33adc052 100644 --- a/homeassistant/components/discovergy/__init__.py +++ b/homeassistant/components/discovergy/__init__.py @@ -60,11 +60,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/discovergy/config_flow.py b/homeassistant/components/discovergy/config_flow.py index 5e17f0764b7..f24fdd1e43d 100644 --- a/homeassistant/components/discovergy/config_flow.py +++ b/homeassistant/components/discovergy/config_flow.py @@ -11,7 +11,7 @@ from pydiscovergy.authentication import BasicAuth import pydiscovergy.error as discovergyError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.selector import ( @@ -52,29 +52,14 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _existing_entry: ConfigEntry | None = None - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the initial step.""" - if user_input is None: - return self.async_show_form( - step_id="user", - data_schema=CONFIG_SCHEMA, - ) - - return await self._validate_and_save(user_input) - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle the initial step.""" - self._existing_entry = await self.async_set_unique_id(self.context["unique_id"]) - return await self._validate_and_save(entry_data, step_id="reauth") + return await self.async_step_user() - async def _validate_and_save( - self, user_input: Mapping[str, Any] | None = None, step_id: str = "user" + async def async_step_user( + self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: """Validate user input and create config entry.""" errors = {} @@ -95,17 +80,17 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected error occurred while getting meters") errors["base"] = "unknown" else: - if self._existing_entry: + await self.async_set_unique_id(user_input[CONF_EMAIL].lower()) + + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="account_mismatch") return self.async_update_reload_and_abort( - entry=self._existing_entry, - data={ - CONF_EMAIL: user_input[CONF_EMAIL], + entry=self._get_reauth_entry(), + data_updates={ CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - # set unique id to title which is the account email - await self.async_set_unique_id(user_input[CONF_EMAIL].lower()) self._abort_if_unique_id_configured() return self.async_create_entry( @@ -113,10 +98,12 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id=step_id, + step_id="user", data_schema=self.add_suggested_values_to_schema( CONFIG_SCHEMA, - self._existing_entry.data if self._existing_entry else user_input, + self._get_reauth_entry().data + if self.source == SOURCE_REAUTH + else user_input, ), errors=errors, ) diff --git a/homeassistant/components/discovergy/manifest.json b/homeassistant/components/discovergy/manifest.json index 1061766a64c..b82f28a5d11 100644 --- a/homeassistant/components/discovergy/manifest.json +++ b/homeassistant/components/discovergy/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/discovergy", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["pydiscovergy==3.0.1"] + "requirements": ["pydiscovergy==3.0.2"] } diff --git a/homeassistant/components/discovergy/quality_scale.yaml b/homeassistant/components/discovergy/quality_scale.yaml new file mode 100644 index 00000000000..3caeaa6bbe0 --- /dev/null +++ b/homeassistant/components/discovergy/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + The integration does not provide any additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: | + The data_descriptions are missing. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration does not provide any additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + The integration does not provide any additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + The integration does not provide any additional options. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a cloud service. + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a cloud service. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + The integration connects to a single device per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: + status: exempt + comment: | + The integration does not provide any additional icons. + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connect to a single device per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/discovergy/strings.json b/homeassistant/components/discovergy/strings.json index 34c21bc1cfe..b626a11ea1e 100644 --- a/homeassistant/components/discovergy/strings.json +++ b/homeassistant/components/discovergy/strings.json @@ -6,12 +6,6 @@ "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } - }, - "reauth": { - "data": { - "email": "[%key:common::config_flow::data::email%]", - "password": "[%key:common::config_flow::data::password%]" - } } }, "error": { @@ -21,6 +15,7 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "account_mismatch": "The inexogy account authenticated with, does not match the account needed re-authentication.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/homeassistant/components/dlib_face_detect/manifest.json b/homeassistant/components/dlib_face_detect/manifest.json index e395a84f206..e8476583081 100644 --- a/homeassistant/components/dlib_face_detect/manifest.json +++ b/homeassistant/components/dlib_face_detect/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/dlib_face_detect", "iot_class": "local_push", "loggers": ["face_recognition"], + "quality_scale": "legacy", "requirements": ["face-recognition==1.2.3"] } diff --git a/homeassistant/components/dlib_face_identify/manifest.json b/homeassistant/components/dlib_face_identify/manifest.json index 60c0ef3c766..2a764e4a3e8 100644 --- a/homeassistant/components/dlib_face_identify/manifest.json +++ b/homeassistant/components/dlib_face_identify/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/dlib_face_identify", "iot_class": "local_push", "loggers": ["face_recognition"], + "quality_scale": "legacy", "requirements": ["face-recognition==1.2.3"] } diff --git a/homeassistant/components/dlna_dmr/config_flow.py b/homeassistant/components/dlna_dmr/config_flow.py index 265c78fd9a9..75f50192500 100644 --- a/homeassistant/components/dlna_dmr/config_flow.py +++ b/homeassistant/components/dlna_dmr/config_flow.py @@ -7,7 +7,7 @@ from functools import partial from ipaddress import IPv6Address, ip_address import logging from pprint import pformat -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from urllib.parse import urlparse from async_upnp_client.client import UpnpError @@ -74,7 +74,7 @@ class DlnaDmrFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Define the config flow to handle options.""" - return DlnaDmrOptionsFlowHandler(config_entry) + return DlnaDmrOptionsFlowHandler() async def async_step_user(self, user_input: FlowInput = None) -> ConfigFlowResult: """Handle a flow initialized by the user. @@ -138,6 +138,9 @@ class DlnaDmrFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.debug("async_step_ssdp: discovery_info %s", pformat(discovery_info)) await self._async_set_info_from_discovery(discovery_info) + if TYPE_CHECKING: + # _async_set_info_from_discovery unconditionally sets self._name + assert self._name is not None if _is_ignored_device(discovery_info): return self.async_abort(reason="alternative_integration") @@ -195,31 +198,6 @@ class DlnaDmrFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_unignore( - self, user_input: Mapping[str, Any] - ) -> ConfigFlowResult: - """Rediscover previously ignored devices by their unique_id.""" - LOGGER.debug("async_step_unignore: user_input: %s", user_input) - self._udn = user_input["unique_id"] - assert self._udn - await self.async_set_unique_id(self._udn) - - # Find a discovery matching the unignored unique_id for a DMR device - for dev_type in DmrDevice.DEVICE_TYPES: - discovery = await ssdp.async_get_discovery_info_by_udn_st( - self.hass, self._udn, dev_type - ) - if discovery: - break - else: - return self.async_abort(reason="discovery_error") - - await self._async_set_info_from_discovery(discovery, abort_if_configured=False) - - self.context["title_placeholders"] = {"name": self._name} - - return await self.async_step_confirm() - async def async_step_confirm( self, user_input: FlowInput = None ) -> ConfigFlowResult: @@ -349,10 +327,6 @@ class DlnaDmrOptionsFlowHandler(OptionsFlow): Configures the single instance and updates the existing config entry. """ - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dlna_dmr/manifest.json b/homeassistant/components/dlna_dmr/manifest.json index 1120ec3a2f1..84024d5bde1 100644 --- a/homeassistant/components/dlna_dmr/manifest.json +++ b/homeassistant/components/dlna_dmr/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dmr", "iot_class": "local_push", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.40.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", diff --git a/homeassistant/components/dlna_dmr/strings.json b/homeassistant/components/dlna_dmr/strings.json index 48f347a0908..be4336ea8a5 100644 --- a/homeassistant/components/dlna_dmr/strings.json +++ b/homeassistant/components/dlna_dmr/strings.json @@ -17,7 +17,7 @@ } }, "import_turn_on": { - "description": "Please turn on the device and click submit to continue migration" + "description": "Please turn on the device and select **Submit** to continue migration" }, "confirm": { "description": "[%key:common::config_flow::description::confirm_setup%]" @@ -27,7 +27,6 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "alternative_integration": "Device is better supported by another integration", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "discovery_error": "Failed to discover a matching DLNA device", "incomplete_config": "Configuration is missing a required variable", "non_unique_id": "Multiple devices found with the same unique ID", "not_dmr": "Device is not a supported Digital Media Renderer" diff --git a/homeassistant/components/dlna_dms/config_flow.py b/homeassistant/components/dlna_dms/config_flow.py index b50dc7ff227..ad959ece3b6 100644 --- a/homeassistant/components/dlna_dms/config_flow.py +++ b/homeassistant/components/dlna_dms/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging from pprint import pformat -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from urllib.parse import urlparse from async_upnp_client.profiles.dlna import DmsDevice @@ -74,6 +74,9 @@ class DlnaDmsFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.debug("async_step_ssdp: discovery_info %s", pformat(discovery_info)) await self._async_parse_discovery(discovery_info) + if TYPE_CHECKING: + # _async_parse_discovery unconditionally sets self._name + assert self._name is not None # Abort if the device doesn't support all services required for a DmsDevice. # Use the discovery_info instead of DmsDevice.is_profile_device to avoid diff --git a/homeassistant/components/dlna_dms/dms.py b/homeassistant/components/dlna_dms/dms.py index afff1152cca..8f475d53280 100644 --- a/homeassistant/components/dlna_dms/dms.py +++ b/homeassistant/components/dlna_dms/dms.py @@ -7,7 +7,6 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from enum import StrEnum import functools -from functools import cached_property from typing import Any, cast from async_upnp_client.aiohttp import AiohttpSessionRequester @@ -17,11 +16,15 @@ from async_upnp_client.const import NotificationSubType from async_upnp_client.exceptions import UpnpActionError, UpnpConnectionError, UpnpError from async_upnp_client.profiles.dlna import ContentDirectoryErrorCode, DmsDevice from didl_lite import didl_lite +from propcache import cached_property from homeassistant.components import ssdp from homeassistant.components.media_player import BrowseError, MediaClass -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import BrowseMediaSource, PlayMedia +from homeassistant.components.media_source import ( + BrowseMediaSource, + PlayMedia, + Unresolvable, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE_ID, CONF_URL from homeassistant.core import HomeAssistant, callback diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index 62defe0e2e3..1913bb9d5d7 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -7,8 +7,7 @@ "dependencies": ["ssdp"], "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", - "quality_scale": "platinum", - "requirements": ["async-upnp-client==0.40.0"], + "requirements": ["async-upnp-client==0.41.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/components/dlna_dms/media_source.py b/homeassistant/components/dlna_dms/media_source.py index 399398fa5b9..f5bb440f978 100644 --- a/homeassistant/components/dlna_dms/media_source.py +++ b/homeassistant/components/dlna_dms/media_source.py @@ -13,11 +13,11 @@ Media identifiers can look like: from __future__ import annotations from homeassistant.components.media_player import BrowseError, MediaClass, MediaType -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, + Unresolvable, ) from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/dnsip/config_flow.py b/homeassistant/components/dnsip/config_flow.py index 6dda0c03910..8c2cfa5e556 100644 --- a/homeassistant/components/dnsip/config_flow.py +++ b/homeassistant/components/dnsip/config_flow.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_NAME, CONF_PORT from homeassistant.core import callback @@ -101,7 +101,7 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> DnsIPOptionsFlowHandler: """Return Option handler.""" - return DnsIPOptionsFlowHandler(config_entry) + return DnsIPOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -165,7 +165,7 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN): ) -class DnsIPOptionsFlowHandler(OptionsFlowWithConfigEntry): +class DnsIPOptionsFlowHandler(OptionsFlow): """Handle a option config flow for dnsip integration.""" async def async_step_init( diff --git a/homeassistant/components/dnsip/strings.json b/homeassistant/components/dnsip/strings.json index bc502776cc6..39a0fbf7cd3 100644 --- a/homeassistant/components/dnsip/strings.json +++ b/homeassistant/components/dnsip/strings.json @@ -11,6 +11,9 @@ } } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "invalid_hostname": "Invalid hostname" } diff --git a/homeassistant/components/dominos/icons.json b/homeassistant/components/dominos/icons.json index d88bfb2542f..ca33ac91dfd 100644 --- a/homeassistant/components/dominos/icons.json +++ b/homeassistant/components/dominos/icons.json @@ -1,5 +1,7 @@ { "services": { - "order": "mdi:pizza" + "order": { + "service": "mdi:pizza" + } } } diff --git a/homeassistant/components/dominos/manifest.json b/homeassistant/components/dominos/manifest.json index 442f433db7c..5618c6f0d87 100644 --- a/homeassistant/components/dominos/manifest.json +++ b/homeassistant/components/dominos/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/dominos", "iot_class": "cloud_polling", "loggers": ["pizzapi"], + "quality_scale": "legacy", "requirements": ["pizzapi==0.0.6"] } diff --git a/homeassistant/components/doods/image_processing.py b/homeassistant/components/doods/image_processing.py index acd9d7fe71b..51633d0e05d 100644 --- a/homeassistant/components/doods/image_processing.py +++ b/homeassistant/components/doods/image_processing.py @@ -278,7 +278,7 @@ class Doods(ImageProcessingEntity): ) for path in paths: - _LOGGER.info("Saving results image to %s", path) + _LOGGER.debug("Saving results image to %s", path) os.makedirs(os.path.dirname(path), exist_ok=True) img.save(path) diff --git a/homeassistant/components/doods/manifest.json b/homeassistant/components/doods/manifest.json index fabb2c30190..ae307bb4962 100644 --- a/homeassistant/components/doods/manifest.json +++ b/homeassistant/components/doods/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/doods", "iot_class": "local_polling", "loggers": ["pydoods"], - "requirements": ["pydoods==1.0.2", "Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["pydoods==1.0.2", "Pillow==11.0.0"] } diff --git a/homeassistant/components/doorbird/config_flow.py b/homeassistant/components/doorbird/config_flow.py index 31204a6663b..ebb1d6fc126 100644 --- a/homeassistant/components/doorbird/config_flow.py +++ b/homeassistant/components/doorbird/config_flow.py @@ -97,17 +97,17 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + reauth_entry: ConfigEntry + def __init__(self) -> None: """Initialize the DoorBird config flow.""" self.discovery_schema: vol.Schema | None = None - self.reauth_entry: ConfigEntry | None = None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - entry_id = self.context["entry_id"] - self.reauth_entry = self.hass.config_entries.async_get_entry(entry_id) + self.reauth_entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -115,9 +115,7 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauth input.""" errors: dict[str, str] = {} - existing_entry = self.reauth_entry - assert existing_entry - existing_data = existing_entry.data + existing_data = self.reauth_entry.data placeholders: dict[str, str] = { CONF_NAME: existing_data[CONF_NAME], CONF_HOST: existing_data[CONF_HOST], @@ -132,7 +130,7 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): _, errors = await self._async_validate_or_error(new_config) if not errors: return self.async_update_reload_and_abort( - existing_entry, data=new_config + self.reauth_entry, data=new_config ) return self.async_show_form( @@ -215,16 +213,12 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for doorbird.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/doorbird/device.py b/homeassistant/components/doorbird/device.py index adcb441f458..eae5bb6804f 100644 --- a/homeassistant/components/doorbird/device.py +++ b/homeassistant/components/doorbird/device.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections import defaultdict from dataclasses import dataclass -from functools import cached_property from http import HTTPStatus import logging from typing import Any @@ -16,6 +15,7 @@ from doorbirdpy import ( DoorBirdScheduleEntryOutput, DoorBirdScheduleEntrySchedule, ) +from propcache import cached_property from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -240,7 +240,7 @@ class ConfiguredDoorBird: ) return False - _LOGGER.info("Successfully registered URL for %s on %s", event, self.name) + _LOGGER.debug("Successfully registered URL for %s on %s", event, self.name) return True def get_event_data(self, event: str) -> dict[str, str | None]: diff --git a/homeassistant/components/doorbird/manifest.json b/homeassistant/components/doorbird/manifest.json index 0e9f03c8ef8..8480a496762 100644 --- a/homeassistant/components/doorbird/manifest.json +++ b/homeassistant/components/doorbird/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/doorbird", "iot_class": "local_push", "loggers": ["doorbirdpy"], - "requirements": ["DoorBirdPy==3.0.2"], + "requirements": ["DoorBirdPy==3.0.8"], "zeroconf": [ { "type": "_axis-video._tcp.local.", diff --git a/homeassistant/components/dormakaba_dkey/__init__.py b/homeassistant/components/dormakaba_dkey/__init__.py index a8868e8563c..b4304e75aab 100644 --- a/homeassistant/components/dormakaba_dkey/__init__.py +++ b/homeassistant/components/dormakaba_dkey/__init__.py @@ -69,6 +69,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=lock.name, update_method=_async_update, update_interval=timedelta(seconds=UPDATE_SECONDS), diff --git a/homeassistant/components/dormakaba_dkey/config_flow.py b/homeassistant/components/dormakaba_dkey/config_flow.py index 5f90e7e663a..0d23b822231 100644 --- a/homeassistant/components/dormakaba_dkey/config_flow.py +++ b/homeassistant/components/dormakaba_dkey/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.components.bluetooth import ( async_discovered_service_info, async_last_service_info, ) -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ADDRESS from .const import CONF_ASSOCIATION_DATA, DOMAIN @@ -34,8 +34,6 @@ class DormkabaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - def __init__(self) -> None: """Initialize the config flow.""" self._lock: DKEYLock | None = None @@ -121,9 +119,6 @@ class DormkabaConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauthorization request.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -131,13 +126,11 @@ class DormkabaConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauthorization flow.""" errors = {} - reauth_entry = self._reauth_entry - assert reauth_entry is not None if user_input is not None: if ( discovery_info := async_last_service_info( - self.hass, reauth_entry.data[CONF_ADDRESS], True + self.hass, self._get_reauth_entry().data[CONF_ADDRESS], True ) ) is None: errors = {"base": "no_longer_in_range"} @@ -183,10 +176,10 @@ class DormkabaConfigFlow(ConfigFlow, domain=DOMAIN): CONF_ADDRESS: self._discovery_info.device.address, CONF_ASSOCIATION_DATA: association_data.to_json(), } - if reauth_entry := self._reauth_entry: - self.hass.config_entries.async_update_entry(reauth_entry, data=data) - await self.hass.config_entries.async_reload(reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_create_entry( title=lock.device_info.device_name diff --git a/homeassistant/components/dormakaba_dkey/strings.json b/homeassistant/components/dormakaba_dkey/strings.json index 1fdc7cb359f..eb8cbc1d676 100644 --- a/homeassistant/components/dormakaba_dkey/strings.json +++ b/homeassistant/components/dormakaba_dkey/strings.json @@ -12,7 +12,7 @@ "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" }, "reauth_confirm": { - "description": "The activation code is no longer valid, a new unused activation code is needed.\n\n" + "description": "The activation code is no longer valid, a new unused activation code is needed." }, "associate": { "description": "Provide an unused activation code.\n\nTo create an activation code, create a new key in the dKey admin app, then choose to share the key and share an activation code.\n\nMake sure to close the dKey admin app before proceeding.", diff --git a/homeassistant/components/dovado/manifest.json b/homeassistant/components/dovado/manifest.json index 9a0fc46ad16..78b1e0c6719 100644 --- a/homeassistant/components/dovado/manifest.json +++ b/homeassistant/components/dovado/manifest.json @@ -5,5 +5,6 @@ "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/dovado", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["dovado==0.4.1"] } diff --git a/homeassistant/components/downloader/__init__.py b/homeassistant/components/downloader/__init__.py index 3fded1215c4..75e1103a712 100644 --- a/homeassistant/components/downloader/__init__.py +++ b/homeassistant/components/downloader/__init__.py @@ -10,17 +10,10 @@ import threading import requests import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, - ServiceCall, -) -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.service import async_register_admin_service -from homeassistant.helpers.typing import ConfigType from homeassistant.util import raise_if_invalid_filename, raise_if_invalid_path from .const import ( @@ -36,67 +29,6 @@ from .const import ( SERVICE_DOWNLOAD_FILE, ) -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.Schema({vol.Required(CONF_DOWNLOAD_DIR): cv.string})}, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Downloader component, via the YAML file.""" - if DOMAIN not in config: - return True - - hass.async_create_task(_async_import_config(hass, config)) - return True - - -async def _async_import_config(hass: HomeAssistant, config: ConfigType) -> None: - """Import the Downloader component from the YAML file.""" - - import_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_DOWNLOAD_DIR: config[DOMAIN][CONF_DOWNLOAD_DIR], - }, - ) - - if ( - import_result["type"] == FlowResultType.ABORT - and import_result["reason"] != "single_instance_allowed" - ): - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="directory_does_not_exist", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Downloader", - "url": "/config/integrations/dashboard/add?domain=downloader", - }, - ) - else: - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Downloader", - }, - ) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Listen for download events to download files.""" diff --git a/homeassistant/components/downloader/config_flow.py b/homeassistant/components/downloader/config_flow.py index e7191e055a6..3c3d6189f8a 100644 --- a/homeassistant/components/downloader/config_flow.py +++ b/homeassistant/components/downloader/config_flow.py @@ -43,14 +43,6 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Handle a flow initiated by configuration file.""" - try: - await self._validate_input(user_input) - except DirectoryDoesNotExist: - return self.async_abort(reason="directory_does_not_exist") - return self.async_create_entry(title=DEFAULT_NAME, data=user_input) - async def _validate_input(self, user_input: dict[str, Any]) -> None: """Validate the user input if the directory exists.""" download_path = user_input[CONF_DOWNLOAD_DIR] diff --git a/homeassistant/components/downloader/icons.json b/homeassistant/components/downloader/icons.json index 2a78df93ca7..8f8b5bb2688 100644 --- a/homeassistant/components/downloader/icons.json +++ b/homeassistant/components/downloader/icons.json @@ -1,5 +1,7 @@ { "services": { - "download_file": "mdi:download" + "download_file": { + "service": "mdi:download" + } } } diff --git a/homeassistant/components/downloader/strings.json b/homeassistant/components/downloader/strings.json index cf962bd9713..11a2bda8fce 100644 --- a/homeassistant/components/downloader/strings.json +++ b/homeassistant/components/downloader/strings.json @@ -35,11 +35,5 @@ } } } - }, - "issues": { - "directory_does_not_exist": { - "title": "The {integration_title} failed to import", - "description": "The {integration_title} integration failed to import because the configured directory does not exist.\n\nEnsure the directory exists and restart Home Assistant to try again or remove the {integration_title} configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - } } } diff --git a/homeassistant/components/dsmr/config_flow.py b/homeassistant/components/dsmr/config_flow.py index 49e1818edcc..7d6a641b006 100644 --- a/homeassistant/components/dsmr/config_flow.py +++ b/homeassistant/components/dsmr/config_flow.py @@ -171,9 +171,11 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> DSMROptionFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> DSMROptionFlowHandler: """Get the options flow for this handler.""" - return DSMROptionFlowHandler(config_entry) + return DSMROptionFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -311,10 +313,6 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN): class DSMROptionFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -328,7 +326,7 @@ class DSMROptionFlowHandler(OptionsFlow): { vol.Optional( CONF_TIME_BETWEEN_UPDATE, - default=self.entry.options.get( + default=self.config_entry.options.get( CONF_TIME_BETWEEN_UPDATE, DEFAULT_TIME_BETWEEN_UPDATE ), ): vol.All(vol.Coerce(int), vol.Range(min=0)), diff --git a/homeassistant/components/dsmr/const.py b/homeassistant/components/dsmr/const.py index 7f5813cda7f..4c6cb31ca4d 100644 --- a/homeassistant/components/dsmr/const.py +++ b/homeassistant/components/dsmr/const.py @@ -26,6 +26,7 @@ DEFAULT_TIME_BETWEEN_UPDATE = 30 DEVICE_NAME_ELECTRICITY = "Electricity Meter" DEVICE_NAME_GAS = "Gas Meter" DEVICE_NAME_WATER = "Water Meter" +DEVICE_NAME_HEAT = "Heat Meter" DSMR_VERSIONS = {"2.2", "4", "5", "5B", "5L", "5S", "Q3D"} diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index 77c40c5c292..213e948bafb 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -57,6 +57,7 @@ from .const import ( DEFAULT_TIME_BETWEEN_UPDATE, DEVICE_NAME_ELECTRICITY, DEVICE_NAME_GAS, + DEVICE_NAME_HEAT, DEVICE_NAME_WATER, DOMAIN, DSMR_PROTOCOL, @@ -75,6 +76,7 @@ class DSMRSensorEntityDescription(SensorEntityDescription): dsmr_versions: set[str] | None = None is_gas: bool = False is_water: bool = False + is_heat: bool = False obis_reference: str @@ -82,6 +84,7 @@ class MbusDeviceType(IntEnum): """Types of mbus devices (13757-3:2013).""" GAS = 3 + HEAT = 4 WATER = 7 @@ -396,6 +399,16 @@ SENSORS_MBUS_DEVICE_TYPE: dict[int, tuple[DSMRSensorEntityDescription, ...]] = { state_class=SensorStateClass.TOTAL_INCREASING, ), ), + MbusDeviceType.HEAT: ( + DSMRSensorEntityDescription( + key="heat_reading", + translation_key="heat_meter_reading", + obis_reference="MBUS_METER_READING", + is_heat=True, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + ), MbusDeviceType.WATER: ( DSMRSensorEntityDescription( key="water_reading", @@ -490,6 +503,10 @@ def create_mbus_entities( continue type_ = int(device_type.value) + if type_ not in SENSORS_MBUS_DEVICE_TYPE: + LOGGER.warning("Unsupported MBUS_DEVICE_TYPE (%d)", type_) + continue + if identifier := getattr(device, "MBUS_EQUIPMENT_IDENTIFIER", None): serial_ = identifier.value rename_old_gas_to_mbus(hass, entry, serial_) @@ -532,7 +549,7 @@ async def async_setup_entry( dsmr_version = entry.data[CONF_DSMR_VERSION] entities: list[DSMREntity] = [] initialized: bool = False - add_entities_handler: Callable[..., None] | None + add_entities_handler: Callable[[], None] | None @callback def init_async_add_entities(telegram: Telegram) -> None: @@ -554,7 +571,10 @@ async def async_setup_entry( ) for description in SENSORS if is_supported_description(telegram, description, dsmr_version) - and (not description.is_gas or CONF_SERIAL_ID_GAS in entry.data) + and ( + (not description.is_gas and not description.is_heat) + or CONF_SERIAL_ID_GAS in entry.data + ) ] ) async_add_entities(entities) @@ -693,7 +713,7 @@ async def async_setup_entry( task = asyncio.create_task(connect_and_reconnect()) @callback - async def _async_stop(_: Event) -> None: + def _async_stop(_: Event) -> None: if add_entities_handler is not None: add_entities_handler() task.cancel() @@ -743,6 +763,10 @@ class DSMREntity(SensorEntity): if serial_id: device_serial = serial_id device_name = DEVICE_NAME_WATER + if entity_description.is_heat: + if serial_id: + device_serial = serial_id + device_name = DEVICE_NAME_HEAT if device_serial is None: device_serial = entry.entry_id diff --git a/homeassistant/components/dsmr_reader/definitions.py b/homeassistant/components/dsmr_reader/definitions.py index 9003c4d4334..62d095aa993 100644 --- a/homeassistant/components/dsmr_reader/definitions.py +++ b/homeassistant/components/dsmr_reader/definitions.py @@ -40,6 +40,7 @@ def tariff_transform(value: str) -> str: @dataclass(frozen=True) +# pylint: disable-next=hass-enforce-class-module class DSMRReaderSensorEntityDescription(SensorEntityDescription): """Sensor entity description for DSMR Reader.""" diff --git a/homeassistant/components/dte_energy_bridge/__init__.py b/homeassistant/components/dte_energy_bridge/__init__.py deleted file mode 100644 index 2525d047bce..00000000000 --- a/homeassistant/components/dte_energy_bridge/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The dte_energy_bridge component.""" diff --git a/homeassistant/components/dte_energy_bridge/manifest.json b/homeassistant/components/dte_energy_bridge/manifest.json deleted file mode 100644 index f5b57d82869..00000000000 --- a/homeassistant/components/dte_energy_bridge/manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "domain": "dte_energy_bridge", - "name": "DTE Energy Bridge", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/dte_energy_bridge", - "iot_class": "local_polling" -} diff --git a/homeassistant/components/dte_energy_bridge/sensor.py b/homeassistant/components/dte_energy_bridge/sensor.py deleted file mode 100644 index a0b9253034e..00000000000 --- a/homeassistant/components/dte_energy_bridge/sensor.py +++ /dev/null @@ -1,127 +0,0 @@ -"""Support for monitoring energy usage using the DTE energy bridge.""" - -from __future__ import annotations - -from http import HTTPStatus -import logging - -import requests -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorDeviceClass, - SensorEntity, - SensorStateClass, -) -from homeassistant.const import CONF_NAME, UnitOfPower -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -_LOGGER = logging.getLogger(__name__) - -CONF_IP_ADDRESS = "ip" -CONF_VERSION = "version" - -DEFAULT_NAME = "Current Energy Usage" -DEFAULT_VERSION = 1 -DOMAIN = "dte_energy_bridge" - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_IP_ADDRESS): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.All( - vol.Coerce(int), vol.Any(1, 2) - ), - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the DTE energy bridge sensor.""" - create_issue( - hass, - DOMAIN, - "deprecated_integration", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_integration", - translation_placeholders={"domain": DOMAIN}, - ) - - name = config[CONF_NAME] - ip_address = config[CONF_IP_ADDRESS] - version = config[CONF_VERSION] - - add_entities([DteEnergyBridgeSensor(ip_address, name, version)], True) - - -class DteEnergyBridgeSensor(SensorEntity): - """Implementation of the DTE Energy Bridge sensors.""" - - _attr_device_class = SensorDeviceClass.POWER - _attr_native_unit_of_measurement = UnitOfPower.KILO_WATT - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__(self, ip_address, name, version): - """Initialize the sensor.""" - self._version = version - - if self._version == 1: - self._url = f"http://{ip_address}/instantaneousdemand" - elif self._version == 2: - self._url = f"http://{ip_address}:8888/zigbee/se/instantaneousdemand" - - self._attr_name = name - - def update(self) -> None: - """Get the energy usage data from the DTE energy bridge.""" - try: - response = requests.get(self._url, timeout=5) - except (requests.exceptions.RequestException, ValueError): - _LOGGER.warning( - "Could not update status for DTE Energy Bridge (%s)", self._attr_name - ) - return - - if response.status_code != HTTPStatus.OK: - _LOGGER.warning( - "Invalid status_code from DTE Energy Bridge: %s (%s)", - response.status_code, - self._attr_name, - ) - return - - response_split = response.text.split() - - if len(response_split) != 2: - _LOGGER.warning( - 'Invalid response from DTE Energy Bridge: "%s" (%s)', - response.text, - self._attr_name, - ) - return - - val = float(response_split[0]) - - # A workaround for a bug in the DTE energy bridge. - # The returned value can randomly be in W or kW. Checking for a - # a decimal seems to be a reliable way to determine the units. - # Limiting to version 1 because version 2 apparently always returns - # values in the format 000000.000 kW, but the scaling is Watts - # NOT kWatts - if self._version == 1 and "." in response_split[0]: - self._attr_native_value = val - else: - self._attr_native_value = val / 1000 diff --git a/homeassistant/components/dte_energy_bridge/strings.json b/homeassistant/components/dte_energy_bridge/strings.json deleted file mode 100644 index f75867b8faa..00000000000 --- a/homeassistant/components/dte_energy_bridge/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "deprecated_integration": { - "title": "The DTE Energy Bridge integration will be removed", - "description": "The DTE Energy Bridge integration will be removed as new users can't get any supported devices, and the integration will fail as soon as a current device gets internet access.\n\n Please remove all `{domain}`platform sensors from your configuration and restart Home Assistant." - } - } -} diff --git a/homeassistant/components/dublin_bus_transport/manifest.json b/homeassistant/components/dublin_bus_transport/manifest.json index 1866da8ed8d..3df22b0da00 100644 --- a/homeassistant/components/dublin_bus_transport/manifest.json +++ b/homeassistant/components/dublin_bus_transport/manifest.json @@ -3,5 +3,6 @@ "name": "Dublin Bus", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/dublin_bus_transport", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/duckdns/icons.json b/homeassistant/components/duckdns/icons.json index 79ec18d13ff..c5d0b5329dc 100644 --- a/homeassistant/components/duckdns/icons.json +++ b/homeassistant/components/duckdns/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_txt": "mdi:text-box-edit-outline" + "set_txt": { + "service": "mdi:text-box-edit-outline" + } } } diff --git a/homeassistant/components/duckdns/manifest.json b/homeassistant/components/duckdns/manifest.json index b14da053450..b48ed0b2394 100644 --- a/homeassistant/components/duckdns/manifest.json +++ b/homeassistant/components/duckdns/manifest.json @@ -3,5 +3,6 @@ "name": "Duck DNS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/duckdns", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/duke_energy/__init__.py b/homeassistant/components/duke_energy/__init__.py new file mode 100644 index 00000000000..6eacc15880f --- /dev/null +++ b/homeassistant/components/duke_energy/__init__.py @@ -0,0 +1,22 @@ +"""The Duke Energy integration.""" + +from __future__ import annotations + +from homeassistant.core import HomeAssistant + +from .coordinator import DukeEnergyConfigEntry, DukeEnergyCoordinator + + +async def async_setup_entry(hass: HomeAssistant, entry: DukeEnergyConfigEntry) -> bool: + """Set up Duke Energy from a config entry.""" + + coordinator = DukeEnergyCoordinator(hass, entry.data) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: DukeEnergyConfigEntry) -> bool: + """Unload a config entry.""" + return True diff --git a/homeassistant/components/duke_energy/config_flow.py b/homeassistant/components/duke_energy/config_flow.py new file mode 100644 index 00000000000..e06940b0fba --- /dev/null +++ b/homeassistant/components/duke_energy/config_flow.py @@ -0,0 +1,67 @@ +"""Config flow for Duke Energy integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from aiodukeenergy import DukeEnergy +from aiohttp import ClientError, ClientResponseError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class DukeEnergyConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Duke Energy.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + session = async_get_clientsession(self.hass) + api = DukeEnergy( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session + ) + try: + auth = await api.authenticate() + except ClientResponseError as e: + errors["base"] = "invalid_auth" if e.status == 404 else "cannot_connect" + except (ClientError, TimeoutError): + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + username = auth["cdp_internal_user_id"].lower() + await self.async_set_unique_id(username) + self._abort_if_unique_id_configured() + email = auth["email"].lower() + data = { + CONF_EMAIL: email, + CONF_USERNAME: username, + CONF_PASSWORD: user_input[CONF_PASSWORD], + } + self._async_abort_entries_match(data) + return self.async_create_entry(title=email, data=data) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/duke_energy/const.py b/homeassistant/components/duke_energy/const.py new file mode 100644 index 00000000000..98c973fa2fc --- /dev/null +++ b/homeassistant/components/duke_energy/const.py @@ -0,0 +1,3 @@ +"""Constants for the Duke Energy integration.""" + +DOMAIN = "duke_energy" diff --git a/homeassistant/components/duke_energy/coordinator.py b/homeassistant/components/duke_energy/coordinator.py new file mode 100644 index 00000000000..68b7db12d45 --- /dev/null +++ b/homeassistant/components/duke_energy/coordinator.py @@ -0,0 +1,222 @@ +"""Coordinator to handle Duke Energy connections.""" + +from datetime import datetime, timedelta +import logging +from types import MappingProxyType +from typing import Any, cast + +from aiodukeenergy import DukeEnergy +from aiohttp import ClientError + +from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.statistics import ( + async_add_external_statistics, + get_last_statistics, + statistics_during_period, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, UnitOfEnergy, UnitOfVolume +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util import dt as dt_util + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +_SUPPORTED_METER_TYPES = ("ELECTRIC",) + +type DukeEnergyConfigEntry = ConfigEntry[DukeEnergyCoordinator] + + +class DukeEnergyCoordinator(DataUpdateCoordinator[None]): + """Handle inserting statistics.""" + + config_entry: DukeEnergyConfigEntry + + def __init__( + self, + hass: HomeAssistant, + entry_data: MappingProxyType[str, Any], + ) -> None: + """Initialize the data handler.""" + super().__init__( + hass, + _LOGGER, + name="Duke Energy", + # Data is updated daily on Duke Energy. + # Refresh every 12h to be at most 12h behind. + update_interval=timedelta(hours=12), + ) + self.api = DukeEnergy( + entry_data[CONF_USERNAME], + entry_data[CONF_PASSWORD], + async_get_clientsession(hass), + ) + self._statistic_ids: set = set() + + @callback + def _dummy_listener() -> None: + pass + + # Force the coordinator to periodically update by registering at least one listener. + # Duke Energy does not provide forecast data, so all information is historical. + # This makes _async_update_data get periodically called so we can insert statistics. + self.async_add_listener(_dummy_listener) + + self.config_entry.async_on_unload(self._clear_statistics) + + def _clear_statistics(self) -> None: + """Clear statistics.""" + get_instance(self.hass).async_clear_statistics(list(self._statistic_ids)) + + async def _async_update_data(self) -> None: + """Insert Duke Energy statistics.""" + meters: dict[str, dict[str, Any]] = await self.api.get_meters() + for serial_number, meter in meters.items(): + if ( + not isinstance(meter["serviceType"], str) + or meter["serviceType"] not in _SUPPORTED_METER_TYPES + ): + _LOGGER.debug( + "Skipping unsupported meter type %s", meter["serviceType"] + ) + continue + + id_prefix = f"{meter["serviceType"].lower()}_{serial_number}" + consumption_statistic_id = f"{DOMAIN}:{id_prefix}_energy_consumption" + self._statistic_ids.add(consumption_statistic_id) + _LOGGER.debug( + "Updating Statistics for %s", + consumption_statistic_id, + ) + + last_stat = await get_instance(self.hass).async_add_executor_job( + get_last_statistics, self.hass, 1, consumption_statistic_id, True, set() + ) + if not last_stat: + _LOGGER.debug("Updating statistic for the first time") + usage = await self._async_get_energy_usage(meter) + consumption_sum = 0.0 + last_stats_time = None + else: + usage = await self._async_get_energy_usage( + meter, + last_stat[consumption_statistic_id][0]["start"], + ) + if not usage: + _LOGGER.debug("No recent usage data. Skipping update") + continue + stats = await get_instance(self.hass).async_add_executor_job( + statistics_during_period, + self.hass, + min(usage.keys()), + None, + {consumption_statistic_id}, + "hour", + None, + {"sum"}, + ) + consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"]) + last_stats_time = stats[consumption_statistic_id][0]["start"] + + consumption_statistics = [] + + for start, data in usage.items(): + if last_stats_time is not None and start.timestamp() <= last_stats_time: + continue + consumption_sum += data["energy"] + + consumption_statistics.append( + StatisticData( + start=start, state=data["energy"], sum=consumption_sum + ) + ) + + name_prefix = ( + f"Duke Energy " f"{meter["serviceType"].capitalize()} {serial_number}" + ) + consumption_metadata = StatisticMetaData( + has_mean=False, + has_sum=True, + name=f"{name_prefix} Consumption", + source=DOMAIN, + statistic_id=consumption_statistic_id, + unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR + if meter["serviceType"] == "ELECTRIC" + else UnitOfVolume.CENTUM_CUBIC_FEET, + ) + + _LOGGER.debug( + "Adding %s statistics for %s", + len(consumption_statistics), + consumption_statistic_id, + ) + async_add_external_statistics( + self.hass, consumption_metadata, consumption_statistics + ) + + async def _async_get_energy_usage( + self, meter: dict[str, Any], start_time: float | None = None + ) -> dict[datetime, dict[str, float | int]]: + """Get energy usage. + + If start_time is None, get usage since account activation (or as far back as possible), + otherwise since start_time - 30 days to allow corrections in data. + + Duke Energy provides hourly data all the way back to ~3 years. + """ + + # All of Duke Energy Service Areas are currently in America/New_York timezone + # May need to re-think this if that ever changes and determine timezone based + # on the service address somehow. + tz = await dt_util.async_get_time_zone("America/New_York") + lookback = timedelta(days=30) + one = timedelta(days=1) + if start_time is None: + # Max 3 years of data + agreement_date = dt_util.parse_datetime(meter["agreementActiveDate"]) + if agreement_date is None: + start = dt_util.now(tz) - timedelta(days=3 * 365) + else: + start = max( + agreement_date.replace(tzinfo=tz), + dt_util.now(tz) - timedelta(days=3 * 365), + ) + else: + start = datetime.fromtimestamp(start_time, tz=tz) - lookback + + start = start.replace(hour=0, minute=0, second=0, microsecond=0) + end = dt_util.now(tz).replace(hour=0, minute=0, second=0, microsecond=0) - one + _LOGGER.debug("Data lookup range: %s - %s", start, end) + + start_step = end - lookback + end_step = end + usage: dict[datetime, dict[str, float | int]] = {} + while True: + _LOGGER.debug("Getting hourly usage: %s - %s", start_step, end_step) + try: + # Get data + results = await self.api.get_energy_usage( + meter["serialNum"], "HOURLY", "DAY", start_step, end_step + ) + usage = {**results["data"], **usage} + + for missing in results["missing"]: + _LOGGER.debug("Missing data: %s", missing) + + # Set next range + end_step = start_step - one + start_step = max(start_step - lookback, start) + + # Make sure we don't go back too far + if end_step < start: + break + except (TimeoutError, ClientError): + # ClientError is raised when there is no more data for the range + break + + _LOGGER.debug("Got %s meter usage reads", len(usage)) + return usage diff --git a/homeassistant/components/duke_energy/manifest.json b/homeassistant/components/duke_energy/manifest.json new file mode 100644 index 00000000000..ece18d7ad2a --- /dev/null +++ b/homeassistant/components/duke_energy/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "duke_energy", + "name": "Duke Energy", + "codeowners": ["@hunterjm"], + "config_flow": true, + "dependencies": ["recorder"], + "documentation": "https://www.home-assistant.io/integrations/duke_energy", + "iot_class": "cloud_polling", + "requirements": ["aiodukeenergy==0.2.2"] +} diff --git a/homeassistant/components/duke_energy/strings.json b/homeassistant/components/duke_energy/strings.json new file mode 100644 index 00000000000..96dc8b371d1 --- /dev/null +++ b/homeassistant/components/duke_energy/strings.json @@ -0,0 +1,20 @@ +{ + "config": { + "step": { + "user": { + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/duotecno/climate.py b/homeassistant/components/duotecno/climate.py index 77b602c8716..0355d2855d3 100644 --- a/homeassistant/components/duotecno/climate.py +++ b/homeassistant/components/duotecno/climate.py @@ -57,7 +57,6 @@ class DuotecnoClimate(DuotecnoEntity, ClimateEntity): _attr_hvac_modes = list(HVACMODE_REVERSE) _attr_preset_modes = list(PRESETMODES) _attr_translation_key = "duotecno" - _enable_turn_on_off_backwards_compatibility = False @property def current_temperature(self) -> float | None: diff --git a/homeassistant/components/duotecno/config_flow.py b/homeassistant/components/duotecno/config_flow.py index ca95726542f..51b92d4673a 100644 --- a/homeassistant/components/duotecno/config_flow.py +++ b/homeassistant/components/duotecno/config_flow.py @@ -34,9 +34,6 @@ class DuoTecnoConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors: dict[str, str] = {} if user_input is not None: try: diff --git a/homeassistant/components/duotecno/manifest.json b/homeassistant/components/duotecno/manifest.json index 1adb9e874e5..7a79902eae3 100644 --- a/homeassistant/components/duotecno/manifest.json +++ b/homeassistant/components/duotecno/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/duotecno", "iot_class": "local_push", "loggers": ["pyduotecno", "pyduotecno-node", "pyduotecno-unit"], - "quality_scale": "silver", - "requirements": ["pyDuotecno==2024.5.1"] + "requirements": ["pyDuotecno==2024.10.1"], + "single_config_entry": true } diff --git a/homeassistant/components/duotecno/strings.json b/homeassistant/components/duotecno/strings.json index a5585c3dd2c..7f7c156768d 100644 --- a/homeassistant/components/duotecno/strings.json +++ b/homeassistant/components/duotecno/strings.json @@ -5,7 +5,8 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "port": "[%key:common::config_flow::data::port%]" }, "data_description": { "host": "The hostname or IP address of your Duotecno device." @@ -15,8 +16,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "entity": { diff --git a/homeassistant/components/dwd_weather_warnings/coordinator.py b/homeassistant/components/dwd_weather_warnings/coordinator.py index 55705625685..8cf3813a85d 100644 --- a/homeassistant/components/dwd_weather_warnings/coordinator.py +++ b/homeassistant/components/dwd_weather_warnings/coordinator.py @@ -37,8 +37,8 @@ class DwdWeatherWarningsCoordinator(DataUpdateCoordinator[None]): self._device_tracker = None self._previous_position = None - async def async_config_entry_first_refresh(self) -> None: - """Perform first refresh.""" + async def _async_setup(self) -> None: + """Set up coordinator.""" if region_identifier := self.config_entry.data.get(CONF_REGION_IDENTIFIER): self.api = await self.hass.async_add_executor_job( DwdWeatherWarningsAPI, region_identifier @@ -48,8 +48,6 @@ class DwdWeatherWarningsCoordinator(DataUpdateCoordinator[None]): CONF_REGION_DEVICE_TRACKER ) - await super().async_config_entry_first_refresh() - async def _async_update_data(self) -> None: """Get the latest data from the DWD Weather Warnings API.""" if self._device_tracker: diff --git a/homeassistant/components/dweet/manifest.json b/homeassistant/components/dweet/manifest.json index 4badf76f2e9..b4efd0744fb 100644 --- a/homeassistant/components/dweet/manifest.json +++ b/homeassistant/components/dweet/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/dweet", "iot_class": "cloud_polling", "loggers": ["dweepy"], + "quality_scale": "legacy", "requirements": ["dweepy==0.3.0"] } diff --git a/homeassistant/components/dynalite/__init__.py b/homeassistant/components/dynalite/__init__.py index 59b8e464bb0..7388c43cb89 100644 --- a/homeassistant/components/dynalite/__init__.py +++ b/homeassistant/components/dynalite/__init__.py @@ -4,21 +4,17 @@ from __future__ import annotations import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType -# Loading the config flow file will register the flow from .bridge import DynaliteBridge from .const import ( ATTR_AREA, ATTR_CHANNEL, ATTR_HOST, - CONF_BRIDGES, DOMAIN, LOGGER, PLATFORMS, @@ -27,41 +23,14 @@ from .const import ( ) from .convert_config import convert_config from .panel import async_register_dynalite_frontend -from .schema import BRIDGE_SCHEMA -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - {vol.Optional(CONF_BRIDGES): vol.All(cv.ensure_list, [BRIDGE_SCHEMA])} - ), - }, - ), - extra=vol.ALLOW_EXTRA, -) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Dynalite platform.""" - conf = config.get(DOMAIN, {}) - LOGGER.debug("Setting up dynalite component config = %s", conf) hass.data[DOMAIN] = {} - bridges = conf.get(CONF_BRIDGES, []) - - for bridge_conf in bridges: - host = bridge_conf[CONF_HOST] - LOGGER.debug("Starting config entry flow host=%s conf=%s", host, bridge_conf) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=bridge_conf, - ) - ) - async def dynalite_service(service_call: ServiceCall) -> None: data = service_call.data host = data.get(ATTR_HOST, "") diff --git a/homeassistant/components/dynalite/bridge.py b/homeassistant/components/dynalite/bridge.py index 2245364b0b7..6f090371eee 100644 --- a/homeassistant/components/dynalite/bridge.py +++ b/homeassistant/components/dynalite/bridge.py @@ -68,7 +68,7 @@ class DynaliteBridge: log_string = ( "Connected" if self.dynalite_devices.connected else "Disconnected" ) - LOGGER.info("%s to dynalite host", log_string) + LOGGER.debug("%s to dynalite host", log_string) async_dispatcher_send(self.hass, self.update_signal()) else: async_dispatcher_send(self.hass, self.update_signal(device)) diff --git a/homeassistant/components/dynalite/config_flow.py b/homeassistant/components/dynalite/config_flow.py index 3ae4828b668..4b111c25cc9 100644 --- a/homeassistant/components/dynalite/config_flow.py +++ b/homeassistant/components/dynalite/config_flow.py @@ -8,9 +8,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .bridge import DynaliteBridge from .const import DEFAULT_PORT, DOMAIN, LOGGER @@ -26,38 +24,6 @@ class DynaliteFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize the Dynalite flow.""" self.host = None - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: - """Import a new bridge as a config entry.""" - LOGGER.debug("Starting async_step_import (deprecated) - %s", import_info) - # Raise an issue that this is deprecated and has been imported - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2023.12.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Dynalite", - }, - ) - - host = import_info[CONF_HOST] - # Check if host already exists - for entry in self._async_current_entries(): - if entry.data[CONF_HOST] == host: - self.hass.config_entries.async_update_entry( - entry, data=dict(import_info) - ) - return self.async_abort(reason="already_configured") - - # New entry - return await self._try_create(import_info) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dynalite/const.py b/homeassistant/components/dynalite/const.py index c1cb1a0fb1b..4712b14bea3 100644 --- a/homeassistant/components/dynalite/const.py +++ b/homeassistant/components/dynalite/const.py @@ -16,7 +16,6 @@ ACTIVE_OFF = "off" ACTIVE_ON = "on" CONF_AREA = "area" CONF_AUTO_DISCOVER = "autodiscover" -CONF_BRIDGES = "bridges" CONF_CHANNEL = "channel" CONF_CHANNEL_COVER = "channel_cover" CONF_CLOSE_PRESET = "close" diff --git a/homeassistant/components/dynalite/cover.py b/homeassistant/components/dynalite/cover.py index 2bac51e0b8b..d7f366d919c 100644 --- a/homeassistant/components/dynalite/cover.py +++ b/homeassistant/components/dynalite/cover.py @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.enum import try_parse_enum from .bridge import DynaliteBridge -from .dynalitebase import DynaliteBase, async_setup_entry_base +from .entity import DynaliteBase, async_setup_entry_base async def async_setup_entry( diff --git a/homeassistant/components/dynalite/dynalitebase.py b/homeassistant/components/dynalite/entity.py similarity index 97% rename from homeassistant/components/dynalite/dynalitebase.py rename to homeassistant/components/dynalite/entity.py index bfc62609101..62667dc19c3 100644 --- a/homeassistant/components/dynalite/dynalitebase.py +++ b/homeassistant/components/dynalite/entity.py @@ -77,7 +77,7 @@ class DynaliteBase(RestoreEntity, ABC): if cur_state: self.initialize_state(cur_state) else: - LOGGER.info("Restore state not available for %s", self.entity_id) + LOGGER.warning("Restore state not available for %s", self.entity_id) self._unsub_dispatchers.append( async_dispatcher_connect( diff --git a/homeassistant/components/dynalite/icons.json b/homeassistant/components/dynalite/icons.json index dedbb1be3ac..27949197b53 100644 --- a/homeassistant/components/dynalite/icons.json +++ b/homeassistant/components/dynalite/icons.json @@ -1,6 +1,10 @@ { "services": { - "request_area_preset": "mdi:texture-box", - "request_channel_level": "mdi:satellite-uplink" + "request_area_preset": { + "service": "mdi:texture-box" + }, + "request_channel_level": { + "service": "mdi:satellite-uplink" + } } } diff --git a/homeassistant/components/dynalite/light.py b/homeassistant/components/dynalite/light.py index ffb97da49c1..e0dd8b147aa 100644 --- a/homeassistant/components/dynalite/light.py +++ b/homeassistant/components/dynalite/light.py @@ -7,7 +7,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .dynalitebase import DynaliteBase, async_setup_entry_base +from .entity import DynaliteBase, async_setup_entry_base async def async_setup_entry( diff --git a/homeassistant/components/dynalite/panel.py b/homeassistant/components/dynalite/panel.py index b62944f63fe..623736cf02a 100644 --- a/homeassistant/components/dynalite/panel.py +++ b/homeassistant/components/dynalite/panel.py @@ -90,7 +90,7 @@ def save_dynalite_config( message_data = { conf: message_conf[conf] for conf in RELEVANT_CONFS if conf in message_conf } - LOGGER.info("Updating Dynalite config entry") + LOGGER.debug("Updating Dynalite config entry") hass.config_entries.async_update_entry(entry, data=message_data) connection.send_result(msg["id"], {}) diff --git a/homeassistant/components/dynalite/switch.py b/homeassistant/components/dynalite/switch.py index 54e9b919b89..d24a098056a 100644 --- a/homeassistant/components/dynalite/switch.py +++ b/homeassistant/components/dynalite/switch.py @@ -8,7 +8,7 @@ from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .dynalitebase import DynaliteBase, async_setup_entry_base +from .entity import DynaliteBase, async_setup_entry_base async def async_setup_entry( diff --git a/homeassistant/components/eafm/__init__.py b/homeassistant/components/eafm/__init__.py index 1f95437484f..dc618a983f3 100644 --- a/homeassistant/components/eafm/__init__.py +++ b/homeassistant/components/eafm/__init__.py @@ -48,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator[dict[str, dict[str, Any]]]( hass, _LOGGER, + config_entry=entry, name="sensor", update_method=_async_update_data, update_interval=timedelta(seconds=15 * 60), diff --git a/homeassistant/components/easyenergy/__init__.py b/homeassistant/components/easyenergy/__init__.py index e520631158a..0548431f09d 100644 --- a/homeassistant/components/easyenergy/__init__.py +++ b/homeassistant/components/easyenergy/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -10,10 +9,10 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .coordinator import EasyEnergyDataUpdateCoordinator +from .coordinator import EasyEnergyConfigEntry, EasyEnergyDataUpdateCoordinator from .services import async_setup_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -25,25 +24,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> bool: """Set up easyEnergy from a config entry.""" - coordinator = EasyEnergyDataUpdateCoordinator(hass) + coordinator = EasyEnergyDataUpdateCoordinator(hass, entry) try: await coordinator.async_config_entry_first_refresh() except ConfigEntryNotReady: await coordinator.easyenergy.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> bool: """Unload easyEnergy config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/easyenergy/coordinator.py b/homeassistant/components/easyenergy/coordinator.py index 8c1c593af93..e36bdf188ee 100644 --- a/homeassistant/components/easyenergy/coordinator.py +++ b/homeassistant/components/easyenergy/coordinator.py @@ -21,6 +21,8 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN, LOGGER, SCAN_INTERVAL, THRESHOLD_HOUR +type EasyEnergyConfigEntry = ConfigEntry[EasyEnergyDataUpdateCoordinator] + class EasyEnergyData(NamedTuple): """Class for defining data in dict.""" @@ -33,15 +35,16 @@ class EasyEnergyData(NamedTuple): class EasyEnergyDataUpdateCoordinator(DataUpdateCoordinator[EasyEnergyData]): """Class to manage fetching easyEnergy data from single endpoint.""" - config_entry: ConfigEntry + config_entry: EasyEnergyConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> None: """Initialize global easyEnergy data updater.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL, + config_entry=entry, ) self.easyenergy = EasyEnergy(session=async_get_clientsession(hass)) diff --git a/homeassistant/components/easyenergy/diagnostics.py b/homeassistant/components/easyenergy/diagnostics.py index d6912e1c926..64f30ba61fd 100644 --- a/homeassistant/components/easyenergy/diagnostics.py +++ b/homeassistant/components/easyenergy/diagnostics.py @@ -5,12 +5,9 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import EasyEnergyDataUpdateCoordinator -from .const import DOMAIN -from .coordinator import EasyEnergyData +from .coordinator import EasyEnergyConfigEntry, EasyEnergyData def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: @@ -32,41 +29,42 @@ def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: EasyEnergyConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator_data = entry.runtime_data.data + energy_today = coordinator_data.energy_today return { "entry": { "title": entry.title, }, "energy_usage": { - "current_hour_price": coordinator.data.energy_today.current_usage_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": energy_today.current_usage_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1) ), - "average_price": coordinator.data.energy_today.average_usage_price, - "max_price": coordinator.data.energy_today.extreme_usage_prices[1], - "min_price": coordinator.data.energy_today.extreme_usage_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_usage_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_usage_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_usage, + "average_price": energy_today.average_usage_price, + "max_price": energy_today.extreme_usage_prices[1], + "min_price": energy_today.extreme_usage_prices[0], + "highest_price_time": energy_today.highest_usage_price_time, + "lowest_price_time": energy_today.lowest_usage_price_time, + "percentage_of_max": energy_today.pct_of_max_usage, }, "energy_return": { - "current_hour_price": coordinator.data.energy_today.current_return_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1), "return" + "current_hour_price": energy_today.current_return_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1), "return" ), - "average_price": coordinator.data.energy_today.average_return_price, - "max_price": coordinator.data.energy_today.extreme_return_prices[1], - "min_price": coordinator.data.energy_today.extreme_return_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_return_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_return_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_return, + "average_price": energy_today.average_return_price, + "max_price": energy_today.extreme_return_prices[1], + "min_price": energy_today.extreme_return_prices[0], + "highest_price_time": energy_today.highest_return_price_time, + "lowest_price_time": energy_today.lowest_return_price_time, + "percentage_of_max": energy_today.pct_of_max_return, }, "gas": { - "current_hour_price": get_gas_price(coordinator.data, 0), - "next_hour_price": get_gas_price(coordinator.data, 1), + "current_hour_price": get_gas_price(coordinator_data, 0), + "next_hour_price": get_gas_price(coordinator_data, 1), }, } diff --git a/homeassistant/components/easyenergy/icons.json b/homeassistant/components/easyenergy/icons.json index 90cbec17a65..501483eb932 100644 --- a/homeassistant/components/easyenergy/icons.json +++ b/homeassistant/components/easyenergy/icons.json @@ -13,8 +13,14 @@ } }, "services": { - "get_gas_prices": "mdi:gas-station", - "get_energy_usage_prices": "mdi:transmission-tower-import", - "get_energy_return_prices": "mdi:transmission-tower-export" + "get_gas_prices": { + "service": "mdi:gas-station" + }, + "get_energy_usage_prices": { + "service": "mdi:transmission-tower-import" + }, + "get_energy_return_prices": { + "service": "mdi:transmission-tower-export" + } } } diff --git a/homeassistant/components/easyenergy/manifest.json b/homeassistant/components/easyenergy/manifest.json index 4d45dc2d399..25432196169 100644 --- a/homeassistant/components/easyenergy/manifest.json +++ b/homeassistant/components/easyenergy/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/easyenergy", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["easyenergy==2.1.2"] } diff --git a/homeassistant/components/easyenergy/sensor.py b/homeassistant/components/easyenergy/sensor.py index 65fe2558d46..6976a38da49 100644 --- a/homeassistant/components/easyenergy/sensor.py +++ b/homeassistant/components/easyenergy/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, PERCENTAGE, @@ -27,7 +26,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES -from .coordinator import EasyEnergyData, EasyEnergyDataUpdateCoordinator +from .coordinator import ( + EasyEnergyConfigEntry, + EasyEnergyData, + EasyEnergyDataUpdateCoordinator, +) @dataclass(frozen=True, kw_only=True) @@ -208,10 +211,12 @@ def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: EasyEnergyConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up easyEnergy sensors based on a config entry.""" - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( EasyEnergySensorEntity(coordinator=coordinator, description=description) for description in SENSORS diff --git a/homeassistant/components/easyenergy/services.py b/homeassistant/components/easyenergy/services.py index 5b80cfafd08..f5ee89d5325 100644 --- a/homeassistant/components/easyenergy/services.py +++ b/homeassistant/components/easyenergy/services.py @@ -10,7 +10,7 @@ from typing import Final from easyenergy import Electricity, Gas, VatOption import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -23,7 +23,7 @@ from homeassistant.helpers import selector from homeassistant.util import dt as dt_util from .const import DOMAIN -from .coordinator import EasyEnergyDataUpdateCoordinator +from .coordinator import EasyEnergyConfigEntry, EasyEnergyDataUpdateCoordinator ATTR_CONFIG_ENTRY: Final = "config_entry" ATTR_START: Final = "start" @@ -86,12 +86,12 @@ def __serialize_prices(prices: list[dict[str, float | datetime]]) -> ServiceResp } -def __get_coordinator( - hass: HomeAssistant, call: ServiceCall -) -> EasyEnergyDataUpdateCoordinator: +def __get_coordinator(call: ServiceCall) -> EasyEnergyDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EasyEnergyConfigEntry | None = call.hass.config_entries.async_get_entry( + entry_id + ) if not entry: raise ServiceValidationError( @@ -110,18 +110,16 @@ def __get_coordinator( }, ) - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry_id] - return coordinator + return entry.runtime_data async def __get_prices( call: ServiceCall, *, - hass: HomeAssistant, price_type: PriceType, ) -> ServiceResponse: """Get prices from easyEnergy.""" - coordinator = __get_coordinator(hass, call) + coordinator = __get_coordinator(call) start = __get_date(call.data.get(ATTR_START)) end = __get_date(call.data.get(ATTR_END)) @@ -157,21 +155,21 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.GAS), + partial(__get_prices, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_USAGE_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY_USAGE), + partial(__get_prices, price_type=PriceType.ENERGY_USAGE), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_RETURN_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY_RETURN), + partial(__get_prices, price_type=PriceType.ENERGY_RETURN), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/ebox/manifest.json b/homeassistant/components/ebox/manifest.json index 952f9dc133d..d87c85b6612 100644 --- a/homeassistant/components/ebox/manifest.json +++ b/homeassistant/components/ebox/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ebox", "iot_class": "cloud_polling", "loggers": ["pyebox"], + "quality_scale": "legacy", "requirements": ["pyebox==1.1.4"] } diff --git a/homeassistant/components/ebusd/icons.json b/homeassistant/components/ebusd/icons.json index 642be37a43b..ebfa3673a0c 100644 --- a/homeassistant/components/ebusd/icons.json +++ b/homeassistant/components/ebusd/icons.json @@ -1,5 +1,7 @@ { "services": { - "write": "mdi:pencil" + "write": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/ebusd/manifest.json b/homeassistant/components/ebusd/manifest.json index 3ce18d6e8d3..b82e8f1b910 100644 --- a/homeassistant/components/ebusd/manifest.json +++ b/homeassistant/components/ebusd/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ebusd", "iot_class": "local_polling", "loggers": ["ebusdpy"], + "quality_scale": "legacy", "requirements": ["ebusdpy==0.0.17"] } diff --git a/homeassistant/components/ecoal_boiler/manifest.json b/homeassistant/components/ecoal_boiler/manifest.json index 75dc95ae121..4d8202f8fde 100644 --- a/homeassistant/components/ecoal_boiler/manifest.json +++ b/homeassistant/components/ecoal_boiler/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ecoal_boiler", "iot_class": "local_polling", "loggers": ["ecoaliface"], + "quality_scale": "legacy", "requirements": ["ecoaliface==0.4.0"] } diff --git a/homeassistant/components/ecobee/__init__.py b/homeassistant/components/ecobee/__init__.py index 6f032fbaae9..54af6c0f801 100644 --- a/homeassistant/components/ecobee/__init__.py +++ b/homeassistant/components/ecobee/__init__.py @@ -6,15 +6,14 @@ from pyecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN, Ecobee, ExpiredTokenE import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform +from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, discovery +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from homeassistant.util import Throttle from .const import ( _LOGGER, - ATTR_CONFIG_ENTRY_ID, CONF_REFRESH_TOKEN, DATA_ECOBEE_CONFIG, DATA_HASS_CONFIG, @@ -73,18 +72,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - # The legacy Ecobee notify.notify service is deprecated - # was with HA Core 2024.5.0 and will be removed with HA core 2024.11.0 - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - {CONF_NAME: entry.title, ATTR_CONFIG_ENTRY_ID: entry.entry_id}, - hass.data[DATA_HASS_CONFIG], - ) - ) - return True diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index 8dcc7285590..709926d8496 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -32,16 +32,18 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import entity_platform +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr, entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.util.unit_conversion import TemperatureConverter from . import EcobeeData from .const import ( _LOGGER, + ATTR_ACTIVE_SENSORS, + ATTR_AVAILABLE_SENSORS, DOMAIN, ECOBEE_AUX_HEAT_ONLY, ECOBEE_MODEL_TO_NAME, @@ -63,6 +65,8 @@ ATTR_DST_ENABLED = "dst_enabled" ATTR_MIC_ENABLED = "mic_enabled" ATTR_AUTO_AWAY = "auto_away" ATTR_FOLLOW_ME = "follow_me" +ATTR_SENSOR_LIST = "device_ids" +ATTR_PRESET_MODE = "preset_mode" DEFAULT_RESUME_ALL = False PRESET_AWAY_INDEFINITELY = "away_indefinitely" @@ -130,6 +134,7 @@ SERVICE_SET_FAN_MIN_ON_TIME = "set_fan_min_on_time" SERVICE_SET_DST_MODE = "set_dst_mode" SERVICE_SET_MIC_MODE = "set_mic_mode" SERVICE_SET_OCCUPANCY_MODES = "set_occupancy_modes" +SERVICE_SET_SENSORS_USED_IN_CLIMATE = "set_sensors_used_in_climate" DTGROUP_START_INCLUSIVE_MSG = ( f"{ATTR_START_DATE} and {ATTR_START_TIME} must be specified together" @@ -218,7 +223,7 @@ async def async_setup_entry( thermostat["name"], thermostat["modelNumber"], ) - entities.append(Thermostat(data, index, thermostat)) + entities.append(Thermostat(data, index, thermostat, hass)) async_add_entities(entities, True) @@ -328,6 +333,15 @@ async def async_setup_entry( "set_occupancy_modes", ) + platform.async_register_entity_service( + SERVICE_SET_SENSORS_USED_IN_CLIMATE, + { + vol.Optional(ATTR_PRESET_MODE): cv.string, + vol.Required(ATTR_SENSOR_LIST): cv.ensure_list, + }, + "set_sensors_used_in_climate", + ) + class Thermostat(ClimateEntity): """A thermostat class for Ecobee.""" @@ -339,11 +353,14 @@ class Thermostat(ClimateEntity): _attr_fan_modes = [FAN_AUTO, FAN_ON] _attr_name = None _attr_has_entity_name = True - _enable_turn_on_off_backwards_compatibility = False _attr_translation_key = "ecobee" def __init__( - self, data: EcobeeData, thermostat_index: int, thermostat: dict + self, + data: EcobeeData, + thermostat_index: int, + thermostat: dict, + hass: HomeAssistant, ) -> None: """Initialize the thermostat.""" self.data = data @@ -353,6 +370,7 @@ class Thermostat(ClimateEntity): self.vacation = None self._last_active_hvac_mode = HVACMode.HEAT_COOL self._last_hvac_mode_before_aux_heat = HVACMode.HEAT_COOL + self._hass = hass self._attr_hvac_modes = [] if self.settings["heatStages"] or self.settings["hasHeatPump"]: @@ -362,7 +380,11 @@ class Thermostat(ClimateEntity): if len(self._attr_hvac_modes) == 2: self._attr_hvac_modes.insert(0, HVACMode.HEAT_COOL) self._attr_hvac_modes.append(HVACMode.OFF) - + self._sensors = self.remote_sensors + self._preset_modes = { + comfort["climateRef"]: comfort["name"] + for comfort in self.thermostat["program"]["climates"] + } self.update_without_throttle = False async def async_update(self) -> None: @@ -387,8 +409,6 @@ class Thermostat(ClimateEntity): supported = SUPPORT_FLAGS if self.has_humidifier_control: supported = supported | ClimateEntityFeature.TARGET_HUMIDITY - if self.has_aux_heat: - supported = supported | ClimateEntityFeature.AUX_HEAT if len(self.hvac_modes) > 1 and HVACMode.OFF in self.hvac_modes: supported = ( supported | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON @@ -449,11 +469,6 @@ class Thermostat(ClimateEntity): and self.settings.get("humidifierMode") == HUMIDIFIER_MANUAL_MODE ) - @property - def has_aux_heat(self) -> bool: - """Return true if the ecobee has a heat pump.""" - return bool(self.settings.get(HAS_HEAT_PUMP)) - @property def target_humidity(self) -> int | None: """Return the desired humidity set point.""" @@ -560,6 +575,8 @@ class Thermostat(ClimateEntity): return HVACAction.IDLE + _unrecorded_attributes = frozenset({ATTR_AVAILABLE_SENSORS, ATTR_ACTIVE_SENSORS}) + @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return device specific state attributes.""" @@ -571,47 +588,61 @@ class Thermostat(ClimateEntity): ), "equipment_running": status, "fan_min_on_time": self.settings["fanMinOnTime"], + ATTR_AVAILABLE_SENSORS: self.remote_sensor_devices, + ATTR_ACTIVE_SENSORS: self.active_sensor_devices_in_preset_mode, } @property - def is_aux_heat(self) -> bool: - """Return true if aux heater.""" - return self.settings["hvacMode"] == ECOBEE_AUX_HEAT_ONLY + def remote_sensors(self) -> list: + """Return the remote sensor names of the thermostat.""" + sensors_info = self.thermostat.get("remoteSensors", []) + return [sensor["name"] for sensor in sensors_info if sensor.get("name")] - async def async_turn_aux_heat_on(self) -> None: - """Turn auxiliary heater on.""" - async_create_issue( - self.hass, - DOMAIN, - "migrate_aux_heat", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - translation_key="migrate_aux_heat", - severity=IssueSeverity.WARNING, + @property + def remote_sensor_devices(self) -> list: + """Return the remote sensor device name_by_user or name for the thermostat.""" + return sorted( + [ + f'{item["name_by_user"]} ({item["id"]})' + for item in self.remote_sensor_ids_names + ] ) - _LOGGER.debug("Setting HVAC mode to auxHeatOnly to turn on aux heat") - self._last_hvac_mode_before_aux_heat = self.hvac_mode - await self.hass.async_add_executor_job( - self.data.ecobee.set_hvac_mode, self.thermostat_index, ECOBEE_AUX_HEAT_ONLY - ) - self.update_without_throttle = True - async def async_turn_aux_heat_off(self) -> None: - """Turn auxiliary heater off.""" - async_create_issue( - self.hass, - DOMAIN, - "migrate_aux_heat", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - translation_key="migrate_aux_heat", - severity=IssueSeverity.WARNING, - ) - _LOGGER.debug("Setting HVAC mode to last mode to disable aux heat") - await self.async_set_hvac_mode(self._last_hvac_mode_before_aux_heat) - self.update_without_throttle = True + @property + def remote_sensor_ids_names(self) -> list: + """Return the remote sensor device id and name_by_user for the thermostat.""" + sensors_info = self.thermostat.get("remoteSensors", []) + device_registry = dr.async_get(self._hass) + + return [ + { + "id": device.id, + "name_by_user": device.name_by_user + if device.name_by_user + else device.name, + } + for device in device_registry.devices.values() + for sensor_info in sensors_info + if device.name == sensor_info["name"] + ] + + @property + def active_sensors_in_preset_mode(self) -> list: + """Return the currently active/participating sensors.""" + # https://support.ecobee.com/s/articles/SmartSensors-Sensor-Participation + # During a manual hold, the ecobee will follow the Sensor Participation + # rules for the Home Comfort Settings + mode = self._preset_modes.get(self.preset_mode, "Home") + return self._sensors_in_preset_mode(mode) + + @property + def active_sensor_devices_in_preset_mode(self) -> list: + """Return the currently active/participating sensor devices.""" + # https://support.ecobee.com/s/articles/SmartSensors-Sensor-Participation + # During a manual hold, the ecobee will follow the Sensor Participation + # rules for the Home Comfort Settings + mode = self._preset_modes.get(self.preset_mode, "Home") + return self._sensor_devices_in_preset_mode(mode) def set_preset_mode(self, preset_mode: str) -> None: """Activate a preset.""" @@ -721,7 +752,7 @@ class Thermostat(ClimateEntity): holdHours=self.hold_hours(), ) - _LOGGER.info("Setting fan mode to: %s", fan_mode) + _LOGGER.debug("Setting fan mode to: %s", fan_mode) def set_temp_hold(self, temp): """Set temperature hold in modes other than auto. @@ -789,6 +820,115 @@ class Thermostat(ClimateEntity): ) self.update_without_throttle = True + def set_sensors_used_in_climate( + self, device_ids: list[str], preset_mode: str | None = None + ) -> None: + """Set the sensors used on a climate for a thermostat.""" + if preset_mode is None: + preset_mode = self.preset_mode + + # Check if climate is an available preset option. + elif preset_mode not in self._preset_modes.values(): + if self.preset_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_preset", + translation_placeholders={ + "options": ", ".join(self._preset_modes.values()) + }, + ) + + # Get device name from device id. + device_registry = dr.async_get(self.hass) + sensor_names: list[str] = [] + sensor_ids: list[str] = [] + for device_id in device_ids: + device = device_registry.async_get(device_id) + if device and device.name: + r_sensors = self.thermostat.get("remoteSensors", []) + ecobee_identifier = next( + ( + identifier + for identifier in device.identifiers + if identifier[0] == "ecobee" + ), + None, + ) + if ecobee_identifier: + code = ecobee_identifier[1] + for r_sensor in r_sensors: + if ( # occurs if remote sensor + len(code) == 4 and r_sensor.get("code") == code + ) or ( # occurs if thermostat + len(code) != 4 and r_sensor.get("type") == "thermostat" + ): + sensor_ids.append(r_sensor.get("id")) # noqa: PERF401 + sensor_names.append(device.name) + + # Ensure sensors provided are available for thermostat or not empty. + if not set(sensor_names).issubset(set(self._sensors)) or not sensor_names: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_sensor", + translation_placeholders={ + "options": ", ".join( + [ + f'{item["name_by_user"]} ({item["id"]})' + for item in self.remote_sensor_ids_names + ] + ) + }, + ) + + # Check that an id was found for each sensor + if len(device_ids) != len(sensor_ids): + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="sensor_lookup_failed" + ) + + # Check if sensors are currently used on the climate for the thermostat. + current_sensors_in_climate = self._sensors_in_preset_mode(preset_mode) + if set(sensor_names) == set(current_sensors_in_climate): + _LOGGER.debug( + "This action would not be an update, current sensors on climate (%s) are: %s", + preset_mode, + ", ".join(current_sensors_in_climate), + ) + return + + _LOGGER.debug( + "Setting sensors %s to be used on thermostat %s for program %s", + sensor_names, + self.device_info.get("name"), + preset_mode, + ) + self.data.ecobee.update_climate_sensors( + self.thermostat_index, preset_mode, sensor_ids=sensor_ids + ) + self.update_without_throttle = True + + def _sensors_in_preset_mode(self, preset_mode: str | None) -> list[str]: + """Return current sensors used in climate.""" + climates = self.thermostat["program"]["climates"] + for climate in climates: + if climate.get("name") == preset_mode: + return [sensor["name"] for sensor in climate["sensors"]] + + return [] + + def _sensor_devices_in_preset_mode(self, preset_mode: str | None) -> list[str]: + """Return current sensor device name_by_user or name used in climate.""" + device_registry = dr.async_get(self._hass) + sensor_names = self._sensors_in_preset_mode(preset_mode) + return sorted( + [ + device.name_by_user if device.name_by_user else device.name + for device in device_registry.devices.values() + for sensor_name in sensor_names + if device.name == sensor_name + ] + ) + def hold_preference(self): """Return user preference setting for hold time.""" # Values returned from thermostat are: diff --git a/homeassistant/components/ecobee/config_flow.py b/homeassistant/components/ecobee/config_flow.py index e9a89e0fba5..687d9173a66 100644 --- a/homeassistant/components/ecobee/config_flow.py +++ b/homeassistant/components/ecobee/config_flow.py @@ -23,18 +23,12 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the ecobee flow.""" - self._ecobee: Ecobee | None = None + _ecobee: Ecobee async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - # Config entry already exists, only one allowed. - return self.async_abort(reason="single_instance_allowed") - errors = {} stored_api_key = ( self.hass.data[DATA_ECOBEE_CONFIG].get(CONF_API_KEY) @@ -59,7 +53,9 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_authorize(self, user_input=None): + async def async_step_authorize( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Present the user with the PIN so that the app can be authorized on ecobee.com.""" errors = {} @@ -80,7 +76,7 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): description_placeholders={"pin": self._ecobee.pin}, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Import ecobee config from configuration.yaml. Triggered by async_setup only if a config entry doesn't already exist. diff --git a/homeassistant/components/ecobee/const.py b/homeassistant/components/ecobee/const.py index 85a332f3c87..d0e9ba8e8e9 100644 --- a/homeassistant/components/ecobee/const.py +++ b/homeassistant/components/ecobee/const.py @@ -23,6 +23,8 @@ DOMAIN = "ecobee" DATA_ECOBEE_CONFIG = "ecobee_config" DATA_HASS_CONFIG = "ecobee_hass_config" ATTR_CONFIG_ENTRY_ID = "entry_id" +ATTR_AVAILABLE_SENSORS = "available_sensors" +ATTR_ACTIVE_SENSORS = "active_sensors" CONF_REFRESH_TOKEN = "refresh_token" diff --git a/homeassistant/components/ecobee/icons.json b/homeassistant/components/ecobee/icons.json index 3e736d0dc68..647a14dc5d5 100644 --- a/homeassistant/components/ecobee/icons.json +++ b/homeassistant/components/ecobee/icons.json @@ -1,11 +1,28 @@ { "services": { - "create_vacation": "mdi:umbrella-beach", - "delete_vacation": "mdi:umbrella-beach-outline", - "resume_program": "mdi:play", - "set_fan_min_on_time": "mdi:fan-clock", - "set_dst_mode": "mdi:sun-clock", - "set_mic_mode": "mdi:microphone", - "set_occupancy_modes": "mdi:eye-settings" + "create_vacation": { + "service": "mdi:umbrella-beach" + }, + "delete_vacation": { + "service": "mdi:umbrella-beach-outline" + }, + "resume_program": { + "service": "mdi:play" + }, + "set_fan_min_on_time": { + "service": "mdi:fan-clock" + }, + "set_dst_mode": { + "service": "mdi:sun-clock" + }, + "set_mic_mode": { + "service": "mdi:microphone" + }, + "set_occupancy_modes": { + "service": "mdi:eye-settings" + }, + "set_sensors_used_in_climate": { + "service": "mdi:home-thermometer" + } } } diff --git a/homeassistant/components/ecobee/manifest.json b/homeassistant/components/ecobee/manifest.json index 22dfcb2a428..20b346b776b 100644 --- a/homeassistant/components/ecobee/manifest.json +++ b/homeassistant/components/ecobee/manifest.json @@ -9,7 +9,8 @@ }, "iot_class": "cloud_polling", "loggers": ["pyecobee"], - "requirements": ["python-ecobee-api==0.2.18"], + "requirements": ["python-ecobee-api==0.2.20"], + "single_config_entry": true, "zeroconf": [ { "type": "_ecobee._tcp.local." diff --git a/homeassistant/components/ecobee/notify.py b/homeassistant/components/ecobee/notify.py index 167233e4071..28cfbebe506 100644 --- a/homeassistant/components/ecobee/notify.py +++ b/homeassistant/components/ecobee/notify.py @@ -2,66 +2,16 @@ from __future__ import annotations -from functools import partial -from typing import Any - -from homeassistant.components.notify import ( - ATTR_TARGET, - BaseNotificationService, - NotifyEntity, - migrate_notify_issue, -) +from homeassistant.components.notify import NotifyEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import Ecobee, EcobeeData +from . import EcobeeData from .const import DOMAIN from .entity import EcobeeBaseEntity -def get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> EcobeeNotificationService | None: - """Get the Ecobee notification service.""" - if discovery_info is None: - return None - - data: EcobeeData = hass.data[DOMAIN] - return EcobeeNotificationService(data.ecobee) - - -class EcobeeNotificationService(BaseNotificationService): - """Implement the notification service for the Ecobee thermostat.""" - - def __init__(self, ecobee: Ecobee) -> None: - """Initialize the service.""" - self.ecobee = ecobee - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message and raise issue.""" - migrate_notify_issue( - self.hass, DOMAIN, "Ecobee", "2024.11.0", service_name=self._service_name - ) - await self.hass.async_add_executor_job( - partial(self.send_message, message, **kwargs) - ) - - def send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message.""" - targets = kwargs.get(ATTR_TARGET) - - if not targets: - raise ValueError("Missing required argument: target") - - for target in targets: - thermostat_index = int(target) - self.ecobee.send_message(thermostat_index, message) - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, diff --git a/homeassistant/components/ecobee/number.py b/homeassistant/components/ecobee/number.py index ab09407903d..ed3744bf11e 100644 --- a/homeassistant/components/ecobee/number.py +++ b/homeassistant/components/ecobee/number.py @@ -6,9 +6,14 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass import logging -from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfTime +from homeassistant.const import UnitOfTemperature, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -54,21 +59,30 @@ async def async_setup_entry( ) -> None: """Set up the ecobee thermostat number entity.""" data: EcobeeData = hass.data[DOMAIN] - _LOGGER.debug("Adding min time ventilators numbers (if present)") - async_add_entities( + assert data is not None + + entities: list[NumberEntity] = [ + EcobeeVentilatorMinTime(data, index, numbers) + for index, thermostat in enumerate(data.ecobee.thermostats) + if thermostat["settings"]["ventilatorType"] != "none" + for numbers in VENTILATOR_NUMBERS + ] + + _LOGGER.debug("Adding compressor min temp number (if present)") + entities.extend( ( - EcobeeVentilatorMinTime(data, index, numbers) + EcobeeCompressorMinTemp(data, index) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - for numbers in VENTILATOR_NUMBERS - ), - True, + if thermostat["settings"]["hasHeatPump"] + ) ) + async_add_entities(entities, True) + class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): - """A number class, representing min time for an ecobee thermostat with ventilator attached.""" + """A number class, representing min time for an ecobee thermostat with ventilator attached.""" entity_description: EcobeeNumberEntityDescription @@ -105,3 +119,53 @@ class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): """Set new ventilator Min On Time value.""" self.entity_description.set_fn(self.data, self.thermostat_index, int(value)) self.update_without_throttle = True + + +class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity): + """Minimum outdoor temperature at which the compressor will operate. + + This applies more to air source heat pumps than geothermal. This serves as a safety + feature (compressors have a minimum operating temperature) as well as + providing the ability to choose fuel in a dual-fuel system (i.e. choose between + electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar, + etc.). + Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee + uses Compressor Protection Min Temp. + """ + + _attr_device_class = NumberDeviceClass.TEMPERATURE + _attr_has_entity_name = True + _attr_icon = "mdi:thermometer-off" + _attr_mode = NumberMode.BOX + _attr_native_min_value = -25 + _attr_native_max_value = 66 + _attr_native_step = 5 + _attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT + _attr_translation_key = "compressor_protection_min_temp" + + def __init__( + self, + data: EcobeeData, + thermostat_index: int, + ) -> None: + """Initialize ecobee compressor min temperature.""" + super().__init__(data, thermostat_index) + self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp" + self.update_without_throttle = False + + async def async_update(self) -> None: + """Get the latest state from the thermostat.""" + if self.update_without_throttle: + await self.data.update(no_throttle=True) + self.update_without_throttle = False + else: + await self.data.update() + + self._attr_native_value = ( + (self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10 + ) + + def set_native_value(self, value: float) -> None: + """Set new compressor minimum temperature.""" + self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value) + self.update_without_throttle = True diff --git a/homeassistant/components/ecobee/services.yaml b/homeassistant/components/ecobee/services.yaml index a184f422725..d58ae81d552 100644 --- a/homeassistant/components/ecobee/services.yaml +++ b/homeassistant/components/ecobee/services.yaml @@ -134,3 +134,23 @@ set_occupancy_modes: follow_me: selector: boolean: + +set_sensors_used_in_climate: + target: + entity: + integration: ecobee + domain: climate + fields: + preset_mode: + example: "Home" + selector: + text: + device_ids: + required: true + selector: + device: + multiple: true + integration: ecobee + entity: + - domain: climate + - domain: sensor diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 5483ca2299d..8c636bd9b04 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -8,7 +8,7 @@ } }, "authorize": { - "description": "Please authorize this app at https://www.ecobee.com/consumerportal/index.html with PIN code:\n\n{pin}\n\nThen, press Submit." + "description": "Please authorize this app at https://www.ecobee.com/consumerportal/index.html with PIN code:\n\n{pin}\n\nThen, select **Submit**." } }, "error": { @@ -33,15 +33,18 @@ }, "number": { "ventilator_min_type_home": { - "name": "Ventilator min time home" + "name": "Ventilator minimum time home" }, "ventilator_min_type_away": { - "name": "Ventilator min time away" + "name": "Ventilator minimum time away" + }, + "compressor_protection_min_temp": { + "name": "Compressor minimum temperature" } }, "switch": { "aux_heat_only": { - "name": "Aux heat only" + "name": "Auxiliary heat only" } } }, @@ -167,6 +170,35 @@ "description": "Enable Follow Me mode." } } + }, + "set_sensors_used_in_climate": { + "name": "Set Sensors Used in Climate", + "description": "Sets the participating sensors for a climate.", + "fields": { + "entity_id": { + "name": "Entity", + "description": "Ecobee thermostat on which to set active sensors." + }, + "preset_mode": { + "name": "Climate Name", + "description": "Name of the climate program to set the sensors active on.\nDefaults to currently active program." + }, + "device_ids": { + "name": "Sensors", + "description": "Sensors to set as participating sensors." + } + } + } + }, + "exceptions": { + "invalid_preset": { + "message": "Invalid climate name, available options are: {options}" + }, + "invalid_sensor": { + "message": "Invalid sensor for thermostat, available options are: {options}" + }, + "sensor_lookup_failed": { + "message": "There was an error getting the sensor ids from sensor names. Try reloading the ecobee integration." } }, "issues": { diff --git a/homeassistant/components/ecobee/switch.py b/homeassistant/components/ecobee/switch.py index 67be78fb21d..89ee433c072 100644 --- a/homeassistant/components/ecobee/switch.py +++ b/homeassistant/components/ecobee/switch.py @@ -31,25 +31,26 @@ async def async_setup_entry( """Set up the ecobee thermostat switch entity.""" data: EcobeeData = hass.data[DOMAIN] - async_add_entities( - [ - EcobeeVentilator20MinSwitch( - data, - index, - (await dt_util.async_get_time_zone(thermostat["location"]["timeZone"])) - or dt_util.get_default_time_zone(), - ) + entities: list[SwitchEntity] = [ + EcobeeVentilator20MinSwitch( + data, + index, + (await dt_util.async_get_time_zone(thermostat["location"]["timeZone"])) + or dt_util.get_default_time_zone(), + ) + for index, thermostat in enumerate(data.ecobee.thermostats) + if thermostat["settings"]["ventilatorType"] != "none" + ] + + entities.extend( + ( + EcobeeSwitchAuxHeatOnly(data, index) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - ], - update_before_add=True, + if thermostat["settings"]["hasHeatPump"] + ) ) - async_add_entities( - EcobeeSwitchAuxHeatOnly(data, index) - for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["hasHeatPump"] - ) + async_add_entities(entities, update_before_add=True) class EcobeeVentilator20MinSwitch(EcobeeBaseEntity, SwitchEntity): diff --git a/homeassistant/components/econet/__init__.py b/homeassistant/components/econet/__init__.py index 84e636e660b..4fd920a5ecc 100644 --- a/homeassistant/components/econet/__init__.py +++ b/homeassistant/components/econet/__init__.py @@ -16,14 +16,12 @@ from pyeconet.errors import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.event import async_track_time_interval -from .const import API_CLIENT, DOMAIN, EQUIPMENT +from .const import API_CLIENT, DOMAIN, EQUIPMENT, PUSH_UPDATE _LOGGER = logging.getLogger(__name__) @@ -31,9 +29,9 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.SENSOR, + Platform.SWITCH, Platform.WATER_HEATER, ] -PUSH_UPDATE = "econet.push_update" INTERVAL = timedelta(minutes=60) @@ -98,41 +96,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN][API_CLIENT].pop(entry.entry_id) hass.data[DOMAIN][EQUIPMENT].pop(entry.entry_id) return unload_ok - - -class EcoNetEntity(Entity): - """Define a base EcoNet entity.""" - - _attr_should_poll = False - - def __init__(self, econet): - """Initialize.""" - self._econet = econet - self._attr_name = econet.device_name - self._attr_unique_id = f"{econet.device_id}_{econet.device_name}" - - async def async_added_to_hass(self): - """Subscribe to device events.""" - await super().async_added_to_hass() - self.async_on_remove( - async_dispatcher_connect(self.hass, PUSH_UPDATE, self.on_update_received) - ) - - @callback - def on_update_received(self): - """Update was pushed from the ecoent API.""" - self.async_write_ha_state() - - @property - def available(self): - """Return if the device is online or not.""" - return self._econet.connected - - @property - def device_info(self) -> DeviceInfo: - """Return device registry information for this entity.""" - return DeviceInfo( - identifiers={(DOMAIN, self._econet.device_id)}, - manufacturer="Rheem", - name=self._econet.device_name, - ) diff --git a/homeassistant/components/econet/binary_sensor.py b/homeassistant/components/econet/binary_sensor.py index 3f8e17a5fbe..0f5cb6f92af 100644 --- a/homeassistant/components/econet/binary_sensor.py +++ b/homeassistant/components/econet/binary_sensor.py @@ -13,8 +13,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EcoNetEntity from .const import DOMAIN, EQUIPMENT +from .entity import EcoNetEntity BINARY_SENSOR_TYPES: tuple[BinarySensorEntityDescription, ...] = ( BinarySensorEntityDescription( diff --git a/homeassistant/components/econet/climate.py b/homeassistant/components/econet/climate.py index f6bd52c9702..cdf82f6817f 100644 --- a/homeassistant/components/econet/climate.py +++ b/homeassistant/components/econet/climate.py @@ -20,9 +20,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from . import EcoNetEntity from .const import DOMAIN, EQUIPMENT +from .entity import EcoNetEntity ECONET_STATE_TO_HA = { ThermostatOperationMode.HEATING: HVACMode.HEAT, @@ -67,7 +68,6 @@ class EcoNetThermostat(EcoNetEntity, ClimateEntity): _attr_should_poll = True _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT - _enable_turn_on_off_backwards_compatibility = False def __init__(self, thermostat): """Initialize.""" @@ -203,10 +203,30 @@ class EcoNetThermostat(EcoNetEntity, ClimateEntity): def turn_aux_heat_on(self) -> None: """Turn auxiliary heater on.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._econet.set_mode(ThermostatOperationMode.EMERGENCY_HEAT) def turn_aux_heat_off(self) -> None: """Turn auxiliary heater off.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._econet.set_mode(ThermostatOperationMode.HEATING) @property diff --git a/homeassistant/components/econet/const.py b/homeassistant/components/econet/const.py index 46c70021048..ee8d4fc8a46 100644 --- a/homeassistant/components/econet/const.py +++ b/homeassistant/components/econet/const.py @@ -3,3 +3,5 @@ DOMAIN = "econet" API_CLIENT = "api_client" EQUIPMENT = "equipment" + +PUSH_UPDATE = "econet.push_update" diff --git a/homeassistant/components/econet/entity.py b/homeassistant/components/econet/entity.py new file mode 100644 index 00000000000..44488f0b133 --- /dev/null +++ b/homeassistant/components/econet/entity.py @@ -0,0 +1,46 @@ +"""Support for EcoNet products.""" + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, PUSH_UPDATE + + +class EcoNetEntity(Entity): + """Define a base EcoNet entity.""" + + _attr_should_poll = False + + def __init__(self, econet): + """Initialize.""" + self._econet = econet + self._attr_name = econet.device_name + self._attr_unique_id = f"{econet.device_id}_{econet.device_name}" + + async def async_added_to_hass(self): + """Subscribe to device events.""" + await super().async_added_to_hass() + self.async_on_remove( + async_dispatcher_connect(self.hass, PUSH_UPDATE, self.on_update_received) + ) + + @callback + def on_update_received(self): + """Update was pushed from the ecoent API.""" + self.async_write_ha_state() + + @property + def available(self): + """Return if the device is online or not.""" + return self._econet.connected + + @property + def device_info(self) -> DeviceInfo: + """Return device registry information for this entity.""" + return DeviceInfo( + identifiers={(DOMAIN, self._econet.device_id)}, + manufacturer="Rheem", + name=self._econet.device_name, + ) diff --git a/homeassistant/components/econet/manifest.json b/homeassistant/components/econet/manifest.json index c96867b489b..6586af92d1f 100644 --- a/homeassistant/components/econet/manifest.json +++ b/homeassistant/components/econet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/econet", "iot_class": "cloud_push", "loggers": ["paho_mqtt", "pyeconet"], - "requirements": ["pyeconet==0.1.22"] + "requirements": ["pyeconet==0.1.23"] } diff --git a/homeassistant/components/econet/sensor.py b/homeassistant/components/econet/sensor.py index f2d4ab304a5..19bac8c9e1f 100644 --- a/homeassistant/components/econet/sensor.py +++ b/homeassistant/components/econet/sensor.py @@ -21,8 +21,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EcoNetEntity from .const import DOMAIN, EQUIPMENT +from .entity import EcoNetEntity SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( diff --git a/homeassistant/components/econet/strings.json b/homeassistant/components/econet/strings.json index 6e81085a9bf..212ff83007b 100644 --- a/homeassistant/components/econet/strings.json +++ b/homeassistant/components/econet/strings.json @@ -18,5 +18,18 @@ } } } + }, + "issues": { + "migrate_aux_heat": { + "title": "Migration of EcoNet set_aux_heat action", + "fix_flow": { + "step": { + "confirm": { + "description": "The EcoNet `set_aux_heat` action has been migrated. A new `aux_heat_only` switch entity is available for each thermostat.\n\nUpdate any automations to use the new `aux_heat_only` switch entity. When this is done, select **Submit** to fix this issue.", + "title": "[%key:component::econet::issues::migrate_aux_heat::title%]" + } + } + } + } } } diff --git a/homeassistant/components/econet/switch.py b/homeassistant/components/econet/switch.py new file mode 100644 index 00000000000..e36f6c834b1 --- /dev/null +++ b/homeassistant/components/econet/switch.py @@ -0,0 +1,57 @@ +"""Support for using switch with ecoNet thermostats.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyeconet.equipment import EquipmentType +from pyeconet.equipment.thermostat import ThermostatOperationMode + +from homeassistant.components.switch import SwitchEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN, EQUIPMENT +from .entity import EcoNetEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ecobee thermostat switch entity.""" + equipment = hass.data[DOMAIN][EQUIPMENT][entry.entry_id] + async_add_entities( + EcoNetSwitchAuxHeatOnly(thermostat) + for thermostat in equipment[EquipmentType.THERMOSTAT] + ) + + +class EcoNetSwitchAuxHeatOnly(EcoNetEntity, SwitchEntity): + """Representation of a aux_heat_only EcoNet switch.""" + + def __init__(self, thermostat) -> None: + """Initialize EcoNet ventilator platform.""" + super().__init__(thermostat) + self._attr_name = f"{thermostat.device_name} emergency heat" + self._attr_unique_id = ( + f"{thermostat.device_id}_{thermostat.device_name}_auxheat" + ) + + def turn_on(self, **kwargs: Any) -> None: + """Set the hvacMode to auxHeatOnly.""" + self._econet.set_mode(ThermostatOperationMode.EMERGENCY_HEAT) + + def turn_off(self, **kwargs: Any) -> None: + """Set the hvacMode back to the prior setting.""" + self._econet.set_mode(ThermostatOperationMode.HEATING) + + @property + def is_on(self) -> bool: + """Return true if auxHeatOnly mode is active.""" + return self._econet.mode == ThermostatOperationMode.EMERGENCY_HEAT diff --git a/homeassistant/components/econet/water_heater.py b/homeassistant/components/econet/water_heater.py index 5db339b4411..efe4196993c 100644 --- a/homeassistant/components/econet/water_heater.py +++ b/homeassistant/components/econet/water_heater.py @@ -22,8 +22,8 @@ from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EcoNetEntity from .const import DOMAIN, EQUIPMENT +from .entity import EcoNetEntity SCAN_INTERVAL = timedelta(hours=1) diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index ec67845cf9f..69dd0f0813f 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -13,7 +13,6 @@ from deebot_client.authentication import Authenticator, create_rest_config from deebot_client.const import UNDEFINED, UndefinedType from deebot_client.device import Device from deebot_client.exceptions import DeebotError, InvalidAuthenticationError -from deebot_client.models import DeviceInfo from deebot_client.mqtt_client import MqttClient, create_mqtt_config from deebot_client.util import md5 from deebot_client.util.continents import get_continent @@ -81,25 +80,32 @@ class EcovacsController: try: devices = await self._api_client.get_devices() credentials = await self._authenticator.authenticate() - for device_config in devices: - if isinstance(device_config, DeviceInfo): - # MQTT device - device = Device(device_config, self._authenticator) - mqtt = await self._get_mqtt_client() - await device.initialize(mqtt) - self._devices.append(device) - else: - # Legacy device - bot = VacBot( - credentials.user_id, - EcoVacsAPI.REALM, - self._device_id[0:8], - credentials.token, - device_config, - self._continent, - monitor=True, - ) - self._legacy_devices.append(bot) + for device_info in devices.mqtt: + device = Device(device_info, self._authenticator) + mqtt = await self._get_mqtt_client() + await device.initialize(mqtt) + self._devices.append(device) + for device_config in devices.xmpp: + bot = VacBot( + credentials.user_id, + EcoVacsAPI.REALM, + self._device_id[0:8], + credentials.token, + device_config, + self._continent, + monitor=True, + ) + self._legacy_devices.append(bot) + for device_config in devices.not_supported: + _LOGGER.warning( + ( + 'Device "%s" not supported. More information at ' + "https://github.com/DeebotUniverse/client.py/issues/612: %s" + ), + device_config["deviceName"], + device_config, + ) + except InvalidAuthenticationError as ex: raise ConfigEntryError("Invalid credentials") from ex except DeebotError as ex: diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index 0c7178ced84..6097f43a4e4 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -145,6 +145,8 @@ } }, "services": { - "raw_get_positions": "mdi:map-marker-radius-outline" + "raw_get_positions": { + "service": "mdi:map-marker-radius-outline" + } } } diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 560ee4d599c..271f9ee8dcd 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.3.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.4.0"] } diff --git a/homeassistant/components/ecovacs/sensor.py b/homeassistant/components/ecovacs/sensor.py index 28c4efbd0c6..7c190d27775 100644 --- a/homeassistant/components/ecovacs/sensor.py +++ b/homeassistant/components/ecovacs/sensor.py @@ -26,11 +26,11 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - AREA_SQUARE_METERS, ATTR_BATTERY_LEVEL, CONF_DESCRIPTION, PERCENTAGE, EntityCategory, + UnitOfArea, UnitOfTime, ) from homeassistant.core import HomeAssistant @@ -67,7 +67,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = ( capability_fn=lambda caps: caps.stats.clean, value_fn=lambda e: e.area, translation_key="stats_area", - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, ), EcovacsSensorEntityDescription[StatsEvent]( key="stats_time", @@ -84,7 +84,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = ( value_fn=lambda e: e.area, key="total_stats_area", translation_key="total_stats_area", - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, state_class=SensorStateClass.TOTAL_INCREASING, ), EcovacsSensorEntityDescription[TotalStatsEvent]( diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index 8222cabed07..c9de461ad5b 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -31,7 +31,7 @@ "mode": "[%key:common::config_flow::data::mode%]" }, "data_description": { - "mode": "Select the mode you want to use to connect to Ecovacs. If you are unsure, select 'Cloud'.\n\nSelect 'Self-hosted' only if you have a working self-hosted instance." + "mode": "Select the mode you want to use to connect to Ecovacs. If you are unsure, select **Cloud**.\n\nSelect **Self-hosted** only if you have a working self-hosted instance." } } } diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index 0d14267e08d..dde4fd64b56 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -13,14 +13,9 @@ from deebot_client.models import CleanAction, CleanMode, Room, State import sucks from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, StateVacuumEntityDescription, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant, SupportsResponse @@ -123,22 +118,22 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): self.schedule_update_ha_state() @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the state of the vacuum cleaner.""" if self.error is not None: - return STATE_ERROR + return VacuumActivity.ERROR if self.device.is_cleaning: - return STATE_CLEANING + return VacuumActivity.CLEANING if self.device.is_charging: - return STATE_DOCKED + return VacuumActivity.DOCKED if self.device.vacuum_status == sucks.CLEAN_MODE_STOP: - return STATE_IDLE + return VacuumActivity.IDLE if self.device.vacuum_status == sucks.CHARGE_MODE_RETURNING: - return STATE_RETURNING + return VacuumActivity.RETURNING return None @@ -202,7 +197,7 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): def set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None: """Set fan speed.""" - if self.state == STATE_CLEANING: + if self.state == VacuumActivity.CLEANING: self.device.run(sucks.Clean(mode=self.device.clean_status, speed=fan_speed)) def send_command( @@ -225,12 +220,12 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): _STATE_TO_VACUUM_STATE = { - State.IDLE: STATE_IDLE, - State.CLEANING: STATE_CLEANING, - State.RETURNING: STATE_RETURNING, - State.DOCKED: STATE_DOCKED, - State.ERROR: STATE_ERROR, - State.PAUSED: STATE_PAUSED, + State.IDLE: VacuumActivity.IDLE, + State.CLEANING: VacuumActivity.CLEANING, + State.RETURNING: VacuumActivity.RETURNING, + State.DOCKED: VacuumActivity.DOCKED, + State.ERROR: VacuumActivity.ERROR, + State.PAUSED: VacuumActivity.PAUSED, } _ATTR_ROOMS = "rooms" @@ -284,7 +279,7 @@ class EcovacsVacuum( self.async_write_ha_state() async def on_status(event: StateEvent) -> None: - self._attr_state = _STATE_TO_VACUUM_STATE[event.state] + self._attr_activity = _STATE_TO_VACUUM_STATE[event.state] self.async_write_ha_state() self._subscribe(self._capability.battery.event, on_battery) diff --git a/homeassistant/components/ecowitt/strings.json b/homeassistant/components/ecowitt/strings.json index cca51c1129e..aaacb5e03dd 100644 --- a/homeassistant/components/ecowitt/strings.json +++ b/homeassistant/components/ecowitt/strings.json @@ -6,7 +6,7 @@ } }, "create_entry": { - "default": "To finish setting up the integration, use the Ecowitt App (on your phone) or access the Ecowitt WebUI in a browser at the station IP address.\n\nPick your station -> Menu Others -> DIY Upload Servers. Hit next and select 'Customized'\n\n- Server IP: `{server}`\n- Path: `{path}`\n- Port: `{port}`\n\nClick on 'Save'." + "default": "To finish setting up the integration, you need to tell the Ecowitt station to send data to Home Assistant at the following address:\n\n- Server IP / Host Name: `{server}`\n- Path: `{path}`\n- Port: `{port}`\n\nYou can access the Ecowitt configuration in one of two ways:\n\n1. Use the Ecowitt App (on your phone):\n - Select the Menu Icon (☰) on the upper left, then **My Devices** → **Pick your station**\n - Select the Ellipsis Icon (⋯) → **Others**\n - Select **DIY Upload Servers** → **Customized**\n - Make sure to choose 'Protocol Type Same As: Ecowitt'\n - Enter the Server IP / Host Name, Path, and Port (printed above). _Note: The path has to match! Remove the first forward slash from the path, as the app will prepend one._\n - Save\n1. Navigate to the Ecowitt web UI in a browser at the station IP address:\n - Select **Weather Services** then scroll down to 'Customized'\n - Make sure to select 'Customized: 🔘 Enable' and 'Protocol Type Same As: 🔘 Ecowitt'\n - Enter the Server IP / Host Name, Path, and Port (printed above).\n - Save" } } } diff --git a/homeassistant/components/eddystone_temperature/manifest.json b/homeassistant/components/eddystone_temperature/manifest.json index b15a88d099f..18e67f55667 100644 --- a/homeassistant/components/eddystone_temperature/manifest.json +++ b/homeassistant/components/eddystone_temperature/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/eddystone_temperature", "iot_class": "local_polling", "loggers": ["beacontools"], + "quality_scale": "legacy", "requirements": ["beacontools[scan]==2.1.0"] } diff --git a/homeassistant/components/eddystone_temperature/sensor.py b/homeassistant/components/eddystone_temperature/sensor.py index 637beffcf94..5dc30a575d7 100644 --- a/homeassistant/components/eddystone_temperature/sensor.py +++ b/homeassistant/components/eddystone_temperature/sensor.py @@ -79,12 +79,12 @@ def setup_platform( def monitor_stop(event: Event) -> None: """Stop the monitor thread.""" - _LOGGER.info("Stopping scanner for Eddystone beacons") + _LOGGER.debug("Stopping scanner for Eddystone beacons") mon.stop() def monitor_start(event: Event) -> None: """Start the monitor thread.""" - _LOGGER.info("Starting scanner for Eddystone beacons") + _LOGGER.debug("Starting scanner for Eddystone beacons") mon.start() add_entities(devices) diff --git a/homeassistant/components/edimax/manifest.json b/homeassistant/components/edimax/manifest.json index f104ec40e64..a226ef3bbe8 100644 --- a/homeassistant/components/edimax/manifest.json +++ b/homeassistant/components/edimax/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/edimax", "iot_class": "local_polling", "loggers": ["pyedimax"], + "quality_scale": "legacy", "requirements": ["pyedimax==0.2.1"] } diff --git a/homeassistant/components/efergy/__init__.py b/homeassistant/components/efergy/__init__.py index 52979e50552..fd5aa930027 100644 --- a/homeassistant/components/efergy/__init__.py +++ b/homeassistant/components/efergy/__init__.py @@ -8,12 +8,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity - -from .const import DEFAULT_NAME, DOMAIN PLATFORMS = [Platform.SENSOR] type EfergyConfigEntry = ConfigEntry[Efergy] @@ -47,22 +42,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: EfergyConfigEntry) -> bo async def async_unload_entry(hass: HomeAssistant, entry: EfergyConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class EfergyEntity(Entity): - """Representation of a Efergy entity.""" - - _attr_attribution = "Data provided by Efergy" - - def __init__(self, api: Efergy, server_unique_id: str) -> None: - """Initialize an Efergy entity.""" - self.api = api - self._attr_device_info = DeviceInfo( - configuration_url="https://engage.efergy.com/user/login", - connections={(dr.CONNECTION_NETWORK_MAC, api.info["mac"])}, - identifiers={(DOMAIN, server_unique_id)}, - manufacturer=DEFAULT_NAME, - name=DEFAULT_NAME, - model=api.info["type"], - sw_version=api.info["version"], - ) diff --git a/homeassistant/components/efergy/config_flow.py b/homeassistant/components/efergy/config_flow.py index b17c19693d6..5b132211587 100644 --- a/homeassistant/components/efergy/config_flow.py +++ b/homeassistant/components/efergy/config_flow.py @@ -33,9 +33,7 @@ class EfergyFlowHandler(ConfigFlow, domain=DOMAIN): if error is None: entry = await self.async_set_unique_id(hid) if entry: - self.hass.config_entries.async_update_entry(entry, data=user_input) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(entry, data=user_input) self._abort_if_unique_id_configured() return self.async_create_entry( title=DEFAULT_NAME, diff --git a/homeassistant/components/efergy/entity.py b/homeassistant/components/efergy/entity.py new file mode 100644 index 00000000000..4cbe44d1c10 --- /dev/null +++ b/homeassistant/components/efergy/entity.py @@ -0,0 +1,30 @@ +"""The Efergy integration.""" + +from __future__ import annotations + +from pyefergy import Efergy + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DEFAULT_NAME, DOMAIN + + +class EfergyEntity(Entity): + """Representation of a Efergy entity.""" + + _attr_attribution = "Data provided by Efergy" + + def __init__(self, api: Efergy, server_unique_id: str) -> None: + """Initialize an Efergy entity.""" + self.api = api + self._attr_device_info = DeviceInfo( + configuration_url="https://engage.efergy.com/user/login", + connections={(dr.CONNECTION_NETWORK_MAC, api.info["mac"])}, + identifiers={(DOMAIN, server_unique_id)}, + manufacturer=DEFAULT_NAME, + name=DEFAULT_NAME, + model=api.info["type"], + sw_version=api.info["version"], + ) diff --git a/homeassistant/components/efergy/sensor.py b/homeassistant/components/efergy/sensor.py index a03f8f7d012..419c4da591d 100644 --- a/homeassistant/components/efergy/sensor.py +++ b/homeassistant/components/efergy/sensor.py @@ -20,8 +20,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import EfergyConfigEntry, EfergyEntity +from . import EfergyConfigEntry from .const import CONF_CURRENT_VALUES, LOGGER +from .entity import EfergyEntity SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( @@ -182,4 +183,4 @@ class EfergySensor(EfergyEntity, SensorEntity): return if not self._attr_available: self._attr_available = True - LOGGER.info("Connection has resumed") + LOGGER.debug("Connection has resumed") diff --git a/homeassistant/components/egardia/alarm_control_panel.py b/homeassistant/components/egardia/alarm_control_panel.py index 706ba0db719..5a18a23541a 100644 --- a/homeassistant/components/egardia/alarm_control_panel.py +++ b/homeassistant/components/egardia/alarm_control_panel.py @@ -9,13 +9,7 @@ import requests from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -33,13 +27,13 @@ from . import ( _LOGGER = logging.getLogger(__name__) STATES = { - "ARM": STATE_ALARM_ARMED_AWAY, - "DAY HOME": STATE_ALARM_ARMED_HOME, - "DISARM": STATE_ALARM_DISARMED, - "ARMHOME": STATE_ALARM_ARMED_HOME, - "HOME": STATE_ALARM_ARMED_HOME, - "NIGHT HOME": STATE_ALARM_ARMED_NIGHT, - "TRIGGERED": STATE_ALARM_TRIGGERED, + "ARM": AlarmControlPanelState.ARMED_AWAY, + "DAY HOME": AlarmControlPanelState.ARMED_HOME, + "DISARM": AlarmControlPanelState.DISARMED, + "ARMHOME": AlarmControlPanelState.ARMED_HOME, + "HOME": AlarmControlPanelState.ARMED_HOME, + "NIGHT HOME": AlarmControlPanelState.ARMED_NIGHT, + "TRIGGERED": AlarmControlPanelState.TRIGGERED, } @@ -66,7 +60,6 @@ def setup_platform( class EgardiaAlarm(AlarmControlPanelEntity): """Representation of a Egardia alarm.""" - _attr_state: str | None _attr_code_arm_required = False _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME @@ -123,7 +116,7 @@ class EgardiaAlarm(AlarmControlPanelEntity): _LOGGER.debug("Not ignoring status %s", status) newstatus = STATES.get(status.upper()) _LOGGER.debug("newstatus %s", newstatus) - self._attr_state = newstatus + self._attr_alarm_state = newstatus else: _LOGGER.error("Ignoring status") diff --git a/homeassistant/components/egardia/manifest.json b/homeassistant/components/egardia/manifest.json index 99f39c99cbc..08eb82df0e7 100644 --- a/homeassistant/components/egardia/manifest.json +++ b/homeassistant/components/egardia/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/egardia", "iot_class": "local_polling", "loggers": ["pythonegardia"], + "quality_scale": "legacy", "requirements": ["pythonegardia==1.0.52"] } diff --git a/homeassistant/components/eheimdigital/__init__.py b/homeassistant/components/eheimdigital/__init__.py new file mode 100644 index 00000000000..cf08f45bed5 --- /dev/null +++ b/homeassistant/components/eheimdigital/__init__.py @@ -0,0 +1,51 @@ +"""The EHEIM Digital integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntry + +from .const import DOMAIN +from .coordinator import EheimDigitalUpdateCoordinator + +PLATFORMS = [Platform.LIGHT] + +type EheimDigitalConfigEntry = ConfigEntry[EheimDigitalUpdateCoordinator] + + +async def async_setup_entry( + hass: HomeAssistant, entry: EheimDigitalConfigEntry +) -> bool: + """Set up EHEIM Digital from a config entry.""" + + coordinator = EheimDigitalUpdateCoordinator(hass) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: EheimDigitalConfigEntry +) -> bool: + """Unload a config entry.""" + await entry.runtime_data.hub.close() + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_remove_config_entry_device( + hass: HomeAssistant, + config_entry: EheimDigitalConfigEntry, + device_entry: DeviceEntry, +) -> bool: + """Remove a config entry from a device.""" + return not any( + identifier + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + and identifier[1] in config_entry.runtime_data.hub.devices + ) diff --git a/homeassistant/components/eheimdigital/config_flow.py b/homeassistant/components/eheimdigital/config_flow.py new file mode 100644 index 00000000000..6994c6f65b5 --- /dev/null +++ b/homeassistant/components/eheimdigital/config_flow.py @@ -0,0 +1,127 @@ +"""Config flow for EHEIM Digital.""" + +from __future__ import annotations + +import asyncio +from typing import TYPE_CHECKING, Any + +from aiohttp import ClientError +from eheimdigital.device import EheimDigitalDevice +from eheimdigital.hub import EheimDigitalHub +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.helpers import selector +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN, LOGGER + +CONFIG_SCHEMA = vol.Schema( + {vol.Required(CONF_HOST, default="eheimdigital.local"): selector.TextSelector()} +) + + +class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN): + """The EHEIM Digital config flow.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + super().__init__() + self.data: dict[str, Any] = {} + self.main_device_added_event = asyncio.Event() + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + self.data[CONF_HOST] = host = discovery_info.host + + self._async_abort_entries_match(self.data) + + hub = EheimDigitalHub( + host=host, + session=async_get_clientsession(self.hass), + loop=self.hass.loop, + main_device_added_event=self.main_device_added_event, + ) + try: + await hub.connect() + + async with asyncio.timeout(2): + # This event gets triggered when the first message is received from + # the device, it contains the data necessary to create the main device. + # This removes the race condition where the main device is accessed + # before the response from the device is parsed. + await self.main_device_added_event.wait() + if TYPE_CHECKING: + # At this point the main device is always set + assert isinstance(hub.main, EheimDigitalDevice) + await hub.close() + except (ClientError, TimeoutError): + return self.async_abort(reason="cannot_connect") + except Exception: # noqa: BLE001 + return self.async_abort(reason="unknown") + await self.async_set_unique_id(hub.main.mac_address) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self.data[CONF_HOST], + data={CONF_HOST: self.data[CONF_HOST]}, + ) + + self._set_confirm_only() + return self.async_show_form(step_id="discovery_confirm") + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + if user_input is None: + return self.async_show_form(step_id=SOURCE_USER, data_schema=CONFIG_SCHEMA) + + self._async_abort_entries_match(user_input) + errors: dict[str, str] = {} + hub = EheimDigitalHub( + host=user_input[CONF_HOST], + session=async_get_clientsession(self.hass), + loop=self.hass.loop, + main_device_added_event=self.main_device_added_event, + ) + + try: + await hub.connect() + + async with asyncio.timeout(2): + # This event gets triggered when the first message is received from + # the device, it contains the data necessary to create the main device. + # This removes the race condition where the main device is accessed + # before the response from the device is parsed. + await self.main_device_added_event.wait() + if TYPE_CHECKING: + # At this point the main device is always set + assert isinstance(hub.main, EheimDigitalDevice) + await self.async_set_unique_id( + hub.main.mac_address, raise_on_progress=False + ) + await hub.close() + except (ClientError, TimeoutError): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + LOGGER.exception("Unknown exception occurred") + else: + self._abort_if_unique_id_configured() + return self.async_create_entry(data=user_input, title=user_input[CONF_HOST]) + return self.async_show_form( + step_id=SOURCE_USER, + data_schema=CONFIG_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/eheimdigital/const.py b/homeassistant/components/eheimdigital/const.py new file mode 100644 index 00000000000..5ed9303be40 --- /dev/null +++ b/homeassistant/components/eheimdigital/const.py @@ -0,0 +1,17 @@ +"""Constants for the EHEIM Digital integration.""" + +from logging import Logger, getLogger + +from eheimdigital.types import LightMode + +from homeassistant.components.light import EFFECT_OFF + +LOGGER: Logger = getLogger(__package__) +DOMAIN = "eheimdigital" + +EFFECT_DAYCL_MODE = "daycl_mode" + +EFFECT_TO_LIGHT_MODE = { + EFFECT_DAYCL_MODE: LightMode.DAYCL_MODE, + EFFECT_OFF: LightMode.MAN_MODE, +} diff --git a/homeassistant/components/eheimdigital/coordinator.py b/homeassistant/components/eheimdigital/coordinator.py new file mode 100644 index 00000000000..f122a1227c5 --- /dev/null +++ b/homeassistant/components/eheimdigital/coordinator.py @@ -0,0 +1,78 @@ +"""Data update coordinator for the EHEIM Digital integration.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from typing import Any + +from aiohttp import ClientError +from eheimdigital.device import EheimDigitalDevice +from eheimdigital.hub import EheimDigitalHub +from eheimdigital.types import EheimDeviceType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +type AsyncSetupDeviceEntitiesCallback = Callable[[str], Coroutine[Any, Any, None]] + + +class EheimDigitalUpdateCoordinator( + DataUpdateCoordinator[dict[str, EheimDigitalDevice]] +): + """The EHEIM Digital data update coordinator.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the EHEIM Digital data update coordinator.""" + super().__init__( + hass, LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL + ) + self.hub = EheimDigitalHub( + host=self.config_entry.data[CONF_HOST], + session=async_get_clientsession(hass), + loop=hass.loop, + receive_callback=self._async_receive_callback, + device_found_callback=self._async_device_found, + ) + self.known_devices: set[str] = set() + self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set() + + def add_platform_callback( + self, + async_setup_device_entities: AsyncSetupDeviceEntitiesCallback, + ) -> None: + """Add the setup callbacks from a specific platform.""" + self.platform_callbacks.add(async_setup_device_entities) + + async def _async_device_found( + self, device_address: str, device_type: EheimDeviceType + ) -> None: + """Set up a new device found. + + This function is called from the library whenever a new device is added. + """ + + if device_address not in self.known_devices: + for platform_callback in self.platform_callbacks: + await platform_callback(device_address) + + async def _async_receive_callback(self) -> None: + self.async_set_updated_data(self.hub.devices) + + async def _async_setup(self) -> None: + await self.hub.connect() + await self.hub.update() + + async def _async_update_data(self) -> dict[str, EheimDigitalDevice]: + try: + await self.hub.update() + except ClientError as ex: + raise UpdateFailed from ex + return self.data diff --git a/homeassistant/components/eheimdigital/entity.py b/homeassistant/components/eheimdigital/entity.py new file mode 100644 index 00000000000..c0f91a4b798 --- /dev/null +++ b/homeassistant/components/eheimdigital/entity.py @@ -0,0 +1,53 @@ +"""Base entity for EHEIM Digital.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +from eheimdigital.device import EheimDigitalDevice + +from homeassistant.const import CONF_HOST +from homeassistant.core import callback +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import EheimDigitalUpdateCoordinator + + +class EheimDigitalEntity[_DeviceT: EheimDigitalDevice]( + CoordinatorEntity[EheimDigitalUpdateCoordinator], ABC +): + """Represent a EHEIM Digital entity.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: EheimDigitalUpdateCoordinator, device: _DeviceT + ) -> None: + """Initialize a EHEIM Digital entity.""" + super().__init__(coordinator) + if TYPE_CHECKING: + # At this point at least one device is found and so there is always a main device set + assert isinstance(coordinator.hub.main, EheimDigitalDevice) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", + name=device.name, + connections={(CONNECTION_NETWORK_MAC, device.mac_address)}, + manufacturer="EHEIM", + model=device.device_type.model_name, + identifiers={(DOMAIN, device.mac_address)}, + suggested_area=device.aquarium_name, + sw_version=device.sw_version, + via_device=(DOMAIN, coordinator.hub.main.mac_address), + ) + self._device = device + self._device_address = device.mac_address + + @abstractmethod + def _async_update_attrs(self) -> None: ... + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() diff --git a/homeassistant/components/eheimdigital/light.py b/homeassistant/components/eheimdigital/light.py new file mode 100644 index 00000000000..a119e0bda8d --- /dev/null +++ b/homeassistant/components/eheimdigital/light.py @@ -0,0 +1,127 @@ +"""EHEIM Digital lights.""" + +from typing import Any + +from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl +from eheimdigital.types import EheimDigitalClientError, LightMode + +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + EFFECT_OFF, + ColorMode, + LightEntity, + LightEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.color import brightness_to_value, value_to_brightness + +from . import EheimDigitalConfigEntry +from .const import EFFECT_DAYCL_MODE, EFFECT_TO_LIGHT_MODE +from .coordinator import EheimDigitalUpdateCoordinator +from .entity import EheimDigitalEntity + +BRIGHTNESS_SCALE = (1, 100) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: EheimDigitalConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the callbacks for the coordinator so lights can be added as devices are found.""" + coordinator = entry.runtime_data + + async def async_setup_device_entities(device_address: str) -> None: + """Set up the light entities for a device.""" + device = coordinator.hub.devices[device_address] + entities: list[EheimDigitalClassicLEDControlLight] = [] + + if isinstance(device, EheimDigitalClassicLEDControl): + for channel in range(2): + if len(device.tankconfig[channel]) > 0: + entities.append( + EheimDigitalClassicLEDControlLight(coordinator, device, channel) + ) + coordinator.known_devices.add(device.mac_address) + async_add_entities(entities) + + coordinator.add_platform_callback(async_setup_device_entities) + + for device_address in entry.runtime_data.hub.devices: + await async_setup_device_entities(device_address) + + +class EheimDigitalClassicLEDControlLight( + EheimDigitalEntity[EheimDigitalClassicLEDControl], LightEntity +): + """Represent a EHEIM Digital classicLEDcontrol light.""" + + _attr_supported_color_modes = {ColorMode.BRIGHTNESS} + _attr_color_mode = ColorMode.BRIGHTNESS + _attr_effect_list = [EFFECT_DAYCL_MODE] + _attr_supported_features = LightEntityFeature.EFFECT + _attr_translation_key = "channel" + + def __init__( + self, + coordinator: EheimDigitalUpdateCoordinator, + device: EheimDigitalClassicLEDControl, + channel: int, + ) -> None: + """Initialize an EHEIM Digital classicLEDcontrol light entity.""" + super().__init__(coordinator, device) + self._channel = channel + self._attr_translation_placeholders = {"channel_id": str(channel)} + self._attr_unique_id = f"{self._device_address}_{channel}" + self._async_update_attrs() + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + return super().available and self._device.light_level[self._channel] is not None + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + if ATTR_EFFECT in kwargs: + await self._device.set_light_mode(EFFECT_TO_LIGHT_MODE[kwargs[ATTR_EFFECT]]) + return + if ATTR_BRIGHTNESS in kwargs: + if self._device.light_mode == LightMode.DAYCL_MODE: + await self._device.set_light_mode(LightMode.MAN_MODE) + try: + await self._device.turn_on( + int(brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])), + self._channel, + ) + except EheimDigitalClientError as err: + raise HomeAssistantError from err + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the light.""" + if self._device.light_mode == LightMode.DAYCL_MODE: + await self._device.set_light_mode(LightMode.MAN_MODE) + try: + await self._device.turn_off(self._channel) + except EheimDigitalClientError as err: + raise HomeAssistantError from err + + def _async_update_attrs(self) -> None: + light_level = self._device.light_level[self._channel] + + self._attr_is_on = light_level > 0 if light_level is not None else None + self._attr_brightness = ( + value_to_brightness(BRIGHTNESS_SCALE, light_level) + if light_level is not None + else None + ) + self._attr_effect = ( + EFFECT_DAYCL_MODE + if self._device.light_mode == LightMode.DAYCL_MODE + else EFFECT_OFF + ) diff --git a/homeassistant/components/eheimdigital/manifest.json b/homeassistant/components/eheimdigital/manifest.json new file mode 100644 index 00000000000..159aecd6b6c --- /dev/null +++ b/homeassistant/components/eheimdigital/manifest.json @@ -0,0 +1,15 @@ +{ + "domain": "eheimdigital", + "name": "EHEIM Digital", + "codeowners": ["@autinerd"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/eheimdigital", + "integration_type": "hub", + "iot_class": "local_polling", + "loggers": ["eheimdigital"], + "quality_scale": "bronze", + "requirements": ["eheimdigital==1.0.3"], + "zeroconf": [ + { "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." } + ] +} diff --git a/homeassistant/components/eheimdigital/quality_scale.yaml b/homeassistant/components/eheimdigital/quality_scale.yaml new file mode 100644 index 00000000000..a56551a14f6 --- /dev/null +++ b/homeassistant/components/eheimdigital/quality_scale.yaml @@ -0,0 +1,70 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No service actions implemented. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No service actions implemented. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: No service actions implemented. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: This integration doesn't have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: This integration requires no authentication. + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/eheimdigital/strings.json b/homeassistant/components/eheimdigital/strings.json new file mode 100644 index 00000000000..0e6fa6a0814 --- /dev/null +++ b/homeassistant/components/eheimdigital/strings.json @@ -0,0 +1,39 @@ +{ + "config": { + "step": { + "discovery_confirm": { + "description": "[%key:common::config_flow::description::confirm_setup%]" + }, + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The host or IP address of your main device. Only needed to change if 'eheimdigital' doesn't work." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "entity": { + "light": { + "channel": { + "name": "Channel {channel_id}", + "state_attributes": { + "effect": { + "state": { + "daycl_mode": "Daycycle mode" + } + } + } + } + } + } +} diff --git a/homeassistant/components/eight_sleep/manifest.json b/homeassistant/components/eight_sleep/manifest.json index a4f7482c920..59de546824f 100644 --- a/homeassistant/components/eight_sleep/manifest.json +++ b/homeassistant/components/eight_sleep/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/eight_sleep", "integration_type": "system", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": [] } diff --git a/homeassistant/components/electrasmart/climate.py b/homeassistant/components/electrasmart/climate.py index 9f6e7cbddf5..04e4742554b 100644 --- a/homeassistant/components/electrasmart/climate.py +++ b/homeassistant/components/electrasmart/climate.py @@ -111,7 +111,6 @@ class ElectraClimateEntity(ClimateEntity): _attr_hvac_modes = ELECTRA_MODES _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: ElectraAirConditioner, api: ElectraAPI) -> None: """Initialize Electra climate entity.""" @@ -203,7 +202,7 @@ class ElectraClimateEntity(ClimateEntity): return if not self._was_available: - _LOGGER.info( + _LOGGER.debug( "%s (%s) is now available", self._electra_ac_device.mac, self.name, diff --git a/homeassistant/components/electric_kiwi/api.py b/homeassistant/components/electric_kiwi/api.py index 89109f01948..dead8a6a3c0 100644 --- a/homeassistant/components/electric_kiwi/api.py +++ b/homeassistant/components/electric_kiwi/api.py @@ -27,7 +27,6 @@ class AsyncConfigEntryAuth(AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/electric_kiwi/config_flow.py b/homeassistant/components/electric_kiwi/config_flow.py index 5be3edeaa66..b74ab4268e2 100644 --- a/homeassistant/components/electric_kiwi/config_flow.py +++ b/homeassistant/components/electric_kiwi/config_flow.py @@ -6,7 +6,7 @@ from collections.abc import Mapping import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, SCOPE_VALUES @@ -19,11 +19,6 @@ class ElectricKiwiOauth2FlowHandler( DOMAIN = DOMAIN - def __init__(self) -> None: - """Set up instance.""" - super().__init__() - self._reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -38,9 +33,6 @@ class ElectricKiwiOauth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -55,7 +47,5 @@ class ElectricKiwiOauth2FlowHandler( """Create an entry for Electric Kiwi.""" existing_entry = await self.async_set_unique_id(DOMAIN) if existing_entry: - self.hass.config_entries.async_update_entry(existing_entry, data=data) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(existing_entry, data=data) return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/electric_kiwi/strings.json b/homeassistant/components/electric_kiwi/strings.json index 359ca8e367d..410d32909ba 100644 --- a/homeassistant/components/electric_kiwi/strings.json +++ b/homeassistant/components/electric_kiwi/strings.json @@ -14,6 +14,7 @@ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index 99cddd783e2..84b2b61b8ed 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -4,14 +4,14 @@ from __future__ import annotations from dataclasses import dataclass -from elevenlabs import Model -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs, Model from elevenlabs.core import ApiError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError +from homeassistant.helpers.httpx_client import get_async_client from .const import CONF_MODEL @@ -41,12 +41,15 @@ type EleventLabsConfigEntry = ConfigEntry[ElevenLabsData] async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) -> bool: """Set up ElevenLabs text-to-speech from a config entry.""" entry.add_update_listener(update_listener) - client = AsyncElevenLabs(api_key=entry.data[CONF_API_KEY]) + httpx_client = get_async_client(hass) + client = AsyncElevenLabs( + api_key=entry.data[CONF_API_KEY], httpx_client=httpx_client + ) model_id = entry.options[CONF_MODEL] try: model = await get_model_by_id(client, model_id) except ApiError as err: - raise ConfigEntryError("Auth failed") from err + raise ConfigEntryAuthFailed("Auth failed") from err if model is None or (not model.languages): raise ConfigEntryError("Model could not be resolved") diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index cf04304510a..60df79d6eaa 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -5,25 +5,38 @@ from __future__ import annotations import logging from typing import Any -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, - OptionsFlowWithConfigEntry, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.selector import ( SelectOptionDict, SelectSelector, SelectSelectorConfig, ) -from .const import CONF_MODEL, CONF_VOICE, DEFAULT_MODEL, DOMAIN +from . import EleventLabsConfigEntry +from .const import ( + CONF_CONFIGURE_VOICE, + CONF_MODEL, + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, + CONF_VOICE, + DEFAULT_MODEL, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, + DOMAIN, +) USER_STEP_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) @@ -31,9 +44,12 @@ USER_STEP_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) _LOGGER = logging.getLogger(__name__) -async def get_voices_models(api_key: str) -> tuple[dict[str, str], dict[str, str]]: +async def get_voices_models( + hass: HomeAssistant, api_key: str +) -> tuple[dict[str, str], dict[str, str]]: """Get available voices and models as dicts.""" - client = AsyncElevenLabs(api_key=api_key) + httpx_client = get_async_client(hass) + client = AsyncElevenLabs(api_key=api_key, httpx_client=httpx_client) voices = (await client.voices.get_all()).voices models = await client.models.get_all() voices_dict = { @@ -61,7 +77,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: try: - voices, _ = await get_voices_models(user_input[CONF_API_KEY]) + voices, _ = await get_voices_models(self.hass, user_input[CONF_API_KEY]) except ApiError: errors["base"] = "invalid_api_key" else: @@ -76,33 +92,39 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: EleventLabsConfigEntry, ) -> OptionsFlow: """Create the options flow.""" return ElevenLabsOptionsFlow(config_entry) -class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): +class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self, config_entry: EleventLabsConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - self.api_key: str = self.config_entry.data[CONF_API_KEY] + self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name self.voices: dict[str, str] = {} self.models: dict[str, str] = {} + self.model: str | None = None + self.voice: str | None = None async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage the options.""" if not self.voices or not self.models: - self.voices, self.models = await get_voices_models(self.api_key) + self.voices, self.models = await get_voices_models(self.hass, self.api_key) assert self.models and self.voices if user_input is not None: + self.model = user_input[CONF_MODEL] + self.voice = user_input[CONF_VOICE] + configure_voice = user_input.pop(CONF_CONFIGURE_VOICE) + if configure_voice: + return await self.async_step_voice_settings() return self.async_create_entry( title="ElevenLabs", data=user_input, @@ -139,7 +161,69 @@ class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): ] ) ), + vol.Required(CONF_CONFIGURE_VOICE, default=False): bool, } ), - self.options, + self.config_entry.options, + ) + + async def async_step_voice_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle voice settings.""" + assert self.voices and self.models + if user_input is not None: + user_input[CONF_MODEL] = self.model + user_input[CONF_VOICE] = self.voice + return self.async_create_entry( + title="ElevenLabs", + data=user_input, + ) + return self.async_show_form( + step_id="voice_settings", + data_schema=self.elevenlabs_config_options_voice_schema(), + ) + + def elevenlabs_config_options_voice_schema(self) -> vol.Schema: + """Elevenlabs options voice schema.""" + return vol.Schema( + { + vol.Optional( + CONF_STABILITY, + default=self.config_entry.options.get( + CONF_STABILITY, DEFAULT_STABILITY + ), + ): vol.All( + vol.Coerce(float), + vol.Range(min=0, max=1), + ), + vol.Optional( + CONF_SIMILARITY, + default=self.config_entry.options.get( + CONF_SIMILARITY, DEFAULT_SIMILARITY + ), + ): vol.All( + vol.Coerce(float), + vol.Range(min=0, max=1), + ), + vol.Optional( + CONF_OPTIMIZE_LATENCY, + default=self.config_entry.options.get( + CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY + ), + ): vol.All(int, vol.Range(min=0, max=4)), + vol.Optional( + CONF_STYLE, + default=self.config_entry.options.get(CONF_STYLE, DEFAULT_STYLE), + ): vol.All( + vol.Coerce(float), + vol.Range(min=0, max=1), + ), + vol.Optional( + CONF_USE_SPEAKER_BOOST, + default=self.config_entry.options.get( + CONF_USE_SPEAKER_BOOST, DEFAULT_USE_SPEAKER_BOOST + ), + ): bool, + } ) diff --git a/homeassistant/components/elevenlabs/const.py b/homeassistant/components/elevenlabs/const.py index c0fc3c7b1b0..040d38d272c 100644 --- a/homeassistant/components/elevenlabs/const.py +++ b/homeassistant/components/elevenlabs/const.py @@ -2,6 +2,17 @@ CONF_VOICE = "voice" CONF_MODEL = "model" +CONF_CONFIGURE_VOICE = "configure_voice" +CONF_STABILITY = "stability" +CONF_SIMILARITY = "similarity" +CONF_OPTIMIZE_LATENCY = "optimize_streaming_latency" +CONF_STYLE = "style" +CONF_USE_SPEAKER_BOOST = "use_speaker_boost" DOMAIN = "elevenlabs" DEFAULT_MODEL = "eleven_multilingual_v2" +DEFAULT_STABILITY = 0.5 +DEFAULT_SIMILARITY = 0.75 +DEFAULT_OPTIMIZE_LATENCY = 0 +DEFAULT_STYLE = 0 +DEFAULT_USE_SPEAKER_BOOST = True diff --git a/homeassistant/components/elevenlabs/manifest.json b/homeassistant/components/elevenlabs/manifest.json index 968ea7b688a..eb6df09149a 100644 --- a/homeassistant/components/elevenlabs/manifest.json +++ b/homeassistant/components/elevenlabs/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["elevenlabs"], - "requirements": ["elevenlabs==1.6.1"] + "requirements": ["elevenlabs==1.9.0"] } diff --git a/homeassistant/components/elevenlabs/quality_scale.yaml b/homeassistant/components/elevenlabs/quality_scale.yaml new file mode 100644 index 00000000000..49f0d7518f5 --- /dev/null +++ b/homeassistant/components/elevenlabs/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: done + comment: > + Only entity services + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: > + We should have every test end in either ABORT or CREATE_ENTRY. + test_invalid_api_key should assert the kind of error that is raised. + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: > + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: todo + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: + status: exempt + comment: > + There is no state in the TTS platform and we can't check poll if the TTS service is available. + action-exceptions: done + reauthentication-flow: todo + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: todo + + # Gold + entity-translations: todo + entity-device-class: + status: exempt + comment: There is no device class for Text To Speech entities. + devices: done + entity-category: done + entity-disabled-by-default: todo + discovery: + status: exempt + comment: > + This is not possible because there is no physical device. + stale-devices: + status: exempt + comment: > + This is not possible because there is no physical device. + diagnostics: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: + status: todo + comment: > + I imagine this could be useful if the default voice is deleted from voice lab. + dynamic-devices: + status: exempt + comment: | + This is not possible because there is no physical device. + discovery-update-info: + status: exempt + comment: > + This is not needed because there are no physical devices. + repair-issues: todo + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: > + This integration does not support any devices. + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/elevenlabs/strings.json b/homeassistant/components/elevenlabs/strings.json index 16b40137090..b346f94a963 100644 --- a/homeassistant/components/elevenlabs/strings.json +++ b/homeassistant/components/elevenlabs/strings.json @@ -19,11 +19,29 @@ "init": { "data": { "voice": "Voice", - "model": "Model" + "model": "Model", + "configure_voice": "Configure advanced voice settings" }, "data_description": { "voice": "Voice to use for the TTS.", - "model": "ElevenLabs model to use. Please note that not all models support all languages equally well." + "model": "ElevenLabs model to use. Please note that not all models support all languages equally well.", + "configure_voice": "Configure advanced voice settings. Find more information in the ElevenLabs documentation." + } + }, + "voice_settings": { + "data": { + "stability": "Stability", + "similarity": "Similarity", + "optimize_streaming_latency": "Latency", + "style": "Style", + "use_speaker_boost": "Speaker boost" + }, + "data_description": { + "stability": "Stability of the generated audio. Higher values lead to less emotional audio.", + "similarity": "Similarity of the generated audio to the original voice. Higher values may result in more similar audio, but may also introduce background noise.", + "optimize_streaming_latency": "Optimize the model for streaming. This may reduce the quality of the generated audio.", + "style": "Style of the generated audio. Recommended to keep at 0 for most almost all use cases.", + "use_speaker_boost": "Use speaker boost to increase the similarity of the generated audio to the original voice." } } } diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index 35ba6053cd8..c96a7161b72 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -3,11 +3,12 @@ from __future__ import annotations import logging +from types import MappingProxyType from typing import Any -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError -from elevenlabs.types import Model, Voice as ElevenLabsVoice +from elevenlabs.types import Model, Voice as ElevenLabsVoice, VoiceSettings from homeassistant.components.tts import ( ATTR_VOICE, @@ -15,15 +16,42 @@ from homeassistant.components.tts import ( TtsAudioType, Voice, ) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import EleventLabsConfigEntry -from .const import CONF_VOICE, DOMAIN +from .const import ( + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, + CONF_VOICE, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, + DOMAIN, +) _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + + +def to_voice_settings(options: MappingProxyType[str, Any]) -> VoiceSettings: + """Return voice settings.""" + return VoiceSettings( + stability=options.get(CONF_STABILITY, DEFAULT_STABILITY), + similarity_boost=options.get(CONF_SIMILARITY, DEFAULT_SIMILARITY), + style=options.get(CONF_STYLE, DEFAULT_STYLE), + use_speaker_boost=options.get( + CONF_USE_SPEAKER_BOOST, DEFAULT_USE_SPEAKER_BOOST + ), + ) async def async_setup_entry( @@ -35,6 +63,7 @@ async def async_setup_entry( client = config_entry.runtime_data.client voices = (await client.voices.get_all()).voices default_voice_id = config_entry.options[CONF_VOICE] + voice_settings = to_voice_settings(config_entry.options) async_add_entities( [ ElevenLabsTTSEntity( @@ -44,6 +73,10 @@ async def async_setup_entry( default_voice_id, config_entry.entry_id, config_entry.title, + voice_settings, + config_entry.options.get( + CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY + ), ) ] ) @@ -53,6 +86,7 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): """The ElevenLabs API entity.""" _attr_supported_options = [ATTR_VOICE] + _attr_entity_category = EntityCategory.CONFIG def __init__( self, @@ -62,6 +96,8 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): default_voice_id: str, entry_id: str, title: str, + voice_settings: VoiceSettings, + latency: int = 0, ) -> None: """Init ElevenLabs TTS service.""" self._client = client @@ -77,6 +113,10 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): ] if voice_indices: self._voices.insert(0, self._voices.pop(voice_indices[0])) + self._voice_settings = voice_settings + self._latency = latency + + # Entity attributes self._attr_unique_id = entry_id self._attr_name = title self._attr_device_info = DeviceInfo( @@ -100,11 +140,13 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): """Load tts audio file from the engine.""" _LOGGER.debug("Getting TTS audio for %s", message) _LOGGER.debug("Options: %s", options) - voice_id = options[ATTR_VOICE] + voice_id = options.get(ATTR_VOICE, self._default_voice_id) try: audio = await self._client.generate( text=message, voice=voice_id, + optimize_streaming_latency=self._latency, + voice_settings=self._voice_settings, model=self._model.model_id, ) bytes_combined = b"".join([byte_seg async for byte_seg in audio]) diff --git a/homeassistant/components/elgato/__init__.py b/homeassistant/components/elgato/__init__.py index 2d8446c3b76..1b1ff9948c9 100644 --- a/homeassistant/components/elgato/__init__.py +++ b/homeassistant/components/elgato/__init__.py @@ -1,17 +1,14 @@ """Support for Elgato Lights.""" -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator PLATFORMS = [Platform.BUTTON, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] -type ElgatorConfigEntry = ConfigEntry[ElgatoDataUpdateCoordinator] - -async def async_setup_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ElgatoConfigEntry) -> bool: """Set up Elgato Light from a config entry.""" coordinator = ElgatoDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() @@ -22,6 +19,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> b return True -async def async_unload_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ElgatoConfigEntry) -> bool: """Unload Elgato Light config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/elgato/button.py b/homeassistant/components/elgato/button.py index aefff0b750b..505eff36b44 100644 --- a/homeassistant/components/elgato/button.py +++ b/homeassistant/components/elgato/button.py @@ -18,10 +18,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class ElgatoButtonEntityDescription(ButtonEntityDescription): @@ -48,7 +49,7 @@ BUTTONS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato button based on a config entry.""" diff --git a/homeassistant/components/elgato/config_flow.py b/homeassistant/components/elgato/config_flow.py index 5329fcee90a..e20afc73a2d 100644 --- a/homeassistant/components/elgato/config_flow.py +++ b/homeassistant/components/elgato/config_flow.py @@ -9,7 +9,7 @@ import voluptuous as vol from homeassistant.components import onboarding, zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -34,7 +34,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): return self._async_show_setup_form() self.host = user_input[CONF_HOST] - self.port = user_input[CONF_PORT] try: await self._get_elgato_serial_number(raise_on_progress=False) @@ -49,7 +48,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): """Handle zeroconf discovery.""" self.host = discovery_info.host self.mac = discovery_info.properties.get("id") - self.port = discovery_info.port or 9123 try: await self._get_elgato_serial_number() @@ -81,7 +79,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema( { vol.Required(CONF_HOST): str, - vol.Optional(CONF_PORT, default=9123): int, } ), errors=errors or {}, @@ -93,7 +90,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): title=self.serial_number, data={ CONF_HOST: self.host, - CONF_PORT: self.port, CONF_MAC: self.mac, }, ) @@ -103,7 +99,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): session = async_get_clientsession(self.hass) elgato = Elgato( host=self.host, - port=self.port, session=session, ) info = await elgato.info() @@ -113,7 +108,7 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): info.serial_number, raise_on_progress=raise_on_progress ) self._abort_if_unique_id_configured( - updates={CONF_HOST: self.host, CONF_PORT: self.port, CONF_MAC: self.mac} + updates={CONF_HOST: self.host, CONF_MAC: self.mac} ) self.serial_number = info.serial_number diff --git a/homeassistant/components/elgato/coordinator.py b/homeassistant/components/elgato/coordinator.py index c2bc79491a1..5e1ba0a6494 100644 --- a/homeassistant/components/elgato/coordinator.py +++ b/homeassistant/components/elgato/coordinator.py @@ -5,13 +5,15 @@ from dataclasses import dataclass from elgato import BatteryInfo, Elgato, ElgatoConnectionError, Info, Settings, State from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, SCAN_INTERVAL +type ElgatoConfigEntry = ConfigEntry[ElgatoDataUpdateCoordinator] + @dataclass class ElgatoData: @@ -26,20 +28,20 @@ class ElgatoData: class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): """Class to manage fetching Elgato data.""" - config_entry: ConfigEntry + config_entry: ElgatoConfigEntry has_battery: bool | None = None - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: ElgatoConfigEntry) -> None: """Initialize the coordinator.""" self.config_entry = entry self.client = Elgato( entry.data[CONF_HOST], - port=entry.data[CONF_PORT], session=async_get_clientsession(hass), ) super().__init__( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_HOST]}", update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/elgato/diagnostics.py b/homeassistant/components/elgato/diagnostics.py index ac3ea0a155d..4e1b9d4cfdd 100644 --- a/homeassistant/components/elgato/diagnostics.py +++ b/homeassistant/components/elgato/diagnostics.py @@ -6,11 +6,11 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import ElgatorConfigEntry +from .coordinator import ElgatoConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ElgatorConfigEntry + hass: HomeAssistant, entry: ElgatoConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" coordinator = entry.runtime_data diff --git a/homeassistant/components/elgato/icons.json b/homeassistant/components/elgato/icons.json index 1b5eaf3763a..d2c286594c7 100644 --- a/homeassistant/components/elgato/icons.json +++ b/homeassistant/components/elgato/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "identify": "mdi:crosshairs-question" + "identify": { + "service": "mdi:crosshairs-question" + } } } diff --git a/homeassistant/components/elgato/light.py b/homeassistant/components/elgato/light.py index a62a26f21d3..990a0606fce 100644 --- a/homeassistant/components/elgato/light.py +++ b/homeassistant/components/elgato/light.py @@ -8,7 +8,7 @@ from elgato import ElgatoError from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -19,10 +19,10 @@ from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, ) +from homeassistant.util import color as color_util -from . import ElgatorConfigEntry from .const import SERVICE_IDENTIFY -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -30,7 +30,7 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato Light based on a config entry.""" @@ -49,8 +49,8 @@ class ElgatoLight(ElgatoEntity, LightEntity): """Defines an Elgato Light.""" _attr_name = None - _attr_min_mireds = 143 - _attr_max_mireds = 344 + _attr_min_color_temp_kelvin = 2900 # 344 Mireds + _attr_max_color_temp_kelvin = 7000 # 143 Mireds def __init__(self, coordinator: ElgatoDataUpdateCoordinator) -> None: """Initialize Elgato Light.""" @@ -69,8 +69,8 @@ class ElgatoLight(ElgatoEntity, LightEntity): or self.coordinator.data.state.hue is not None ): self._attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS} - self._attr_min_mireds = 153 - self._attr_max_mireds = 285 + self._attr_min_color_temp_kelvin = 3500 # 285 Mireds + self._attr_max_color_temp_kelvin = 6500 # 153 Mireds @property def brightness(self) -> int | None: @@ -78,9 +78,11 @@ class ElgatoLight(ElgatoEntity, LightEntity): return round((self.coordinator.data.state.brightness * 255) / 100) @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self.coordinator.data.state.temperature + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if (mired_temperature := self.coordinator.data.state.temperature) is None: + return None + return color_util.color_temperature_mired_to_kelvin(mired_temperature) @property def color_mode(self) -> str | None: @@ -116,7 +118,7 @@ class ElgatoLight(ElgatoEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - temperature = kwargs.get(ATTR_COLOR_TEMP) + temperature_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hue = None saturation = None @@ -133,12 +135,18 @@ class ElgatoLight(ElgatoEntity, LightEntity): if ( brightness and ATTR_HS_COLOR not in kwargs - and ATTR_COLOR_TEMP not in kwargs + and ATTR_COLOR_TEMP_KELVIN not in kwargs and self.supported_color_modes and ColorMode.HS in self.supported_color_modes and self.color_mode == ColorMode.COLOR_TEMP ): - temperature = self.color_temp + temperature_kelvin = self.color_temp_kelvin + + temperature = ( + None + if temperature_kelvin is None + else color_util.color_temperature_kelvin_to_mired(temperature_kelvin) + ) try: await self.coordinator.client.light( diff --git a/homeassistant/components/elgato/manifest.json b/homeassistant/components/elgato/manifest.json index c68902560b9..734ad5ec930 100644 --- a/homeassistant/components/elgato/manifest.json +++ b/homeassistant/components/elgato/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/elgato", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["elgato==5.1.2"], "zeroconf": ["_elg._tcp.local."] } diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml new file mode 100644 index 00000000000..531f0447f70 --- /dev/null +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -0,0 +1,82 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: todo + comment: | + The integration doesn't update the device info based on DHCP discovery + of known existing devices. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: todo + comment: | + Device are documented, but some are missing. For example, the their pro + strip is supported as well. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/elgato/sensor.py b/homeassistant/components/elgato/sensor.py index f794d26cf7f..529d2f7c76e 100644 --- a/homeassistant/components/elgato/sensor.py +++ b/homeassistant/components/elgato/sensor.py @@ -21,10 +21,12 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ElgatoSensorEntityDescription(SensorEntityDescription): @@ -101,7 +103,7 @@ SENSORS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato sensor based on a config entry.""" diff --git a/homeassistant/components/elgato/strings.json b/homeassistant/components/elgato/strings.json index 6e1031c8ddf..727b8ee7024 100644 --- a/homeassistant/components/elgato/strings.json +++ b/homeassistant/components/elgato/strings.json @@ -5,8 +5,7 @@ "user": { "description": "Set up your Elgato Light to integrate with Home Assistant.", "data": { - "host": "[%key:common::config_flow::data::host%]", - "port": "[%key:common::config_flow::data::port%]" + "host": "[%key:common::config_flow::data::host%]" }, "data_description": { "host": "The hostname or IP address of your Elgato device." diff --git a/homeassistant/components/elgato/switch.py b/homeassistant/components/elgato/switch.py index fe177616034..3b2420b0ace 100644 --- a/homeassistant/components/elgato/switch.py +++ b/homeassistant/components/elgato/switch.py @@ -14,10 +14,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class ElgatoSwitchEntityDescription(SwitchEntityDescription): @@ -52,7 +53,7 @@ SWITCHES = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato switches based on a config entry.""" diff --git a/homeassistant/components/eliqonline/manifest.json b/homeassistant/components/eliqonline/manifest.json index 78fd62fbd33..70f2cd8a675 100644 --- a/homeassistant/components/eliqonline/manifest.json +++ b/homeassistant/components/eliqonline/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/eliqonline", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["eliqonline==1.2.2"] } diff --git a/homeassistant/components/elkm1/__init__.py b/homeassistant/components/elkm1/__init__.py index b66a4ce2ed8..34a35fbeb09 100644 --- a/homeassistant/components/elkm1/__init__.py +++ b/homeassistant/components/elkm1/__init__.py @@ -3,8 +3,6 @@ from __future__ import annotations import asyncio -from collections.abc import Iterable -from enum import Enum import logging import re from types import MappingProxyType @@ -17,7 +15,6 @@ import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( - ATTR_CONNECTIONS, CONF_ENABLED, CONF_EXCLUDE, CONF_HOST, @@ -33,8 +30,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo -from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util @@ -430,126 +425,3 @@ def _create_elk_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, "set_time", _set_time_service, SET_TIME_SERVICE_SCHEMA ) - - -def create_elk_entities( - elk_data: ELKM1Data, - elk_elements: Iterable[Element], - element_type: str, - class_: Any, - entities: list[ElkEntity], -) -> list[ElkEntity] | None: - """Create the ElkM1 devices of a particular class.""" - auto_configure = elk_data.auto_configure - - if not auto_configure and not elk_data.config[element_type]["enabled"]: - return None - - elk = elk_data.elk - _LOGGER.debug("Creating elk entities for %s", elk) - - for element in elk_elements: - if auto_configure: - if not element.configured: - continue - # Only check the included list if auto configure is not - elif not elk_data.config[element_type]["included"][element.index]: - continue - - entities.append(class_(element, elk, elk_data)) - return entities - - -class ElkEntity(Entity): - """Base class for all Elk entities.""" - - _attr_has_entity_name = True - _attr_should_poll = False - - def __init__(self, element: Element, elk: Elk, elk_data: ELKM1Data) -> None: - """Initialize the base of all Elk devices.""" - self._elk = elk - self._element = element - self._mac = elk_data.mac - self._prefix = elk_data.prefix - self._temperature_unit: str = elk_data.config["temperature_unit"] - # unique_id starts with elkm1_ iff there is no prefix - # it starts with elkm1m_{prefix} iff there is a prefix - # this is to avoid a conflict between - # prefix=foo, name=bar (which would be elkm1_foo_bar) - # - and - - # prefix="", name="foo bar" (which would be elkm1_foo_bar also) - # we could have used elkm1__foo_bar for the latter, but that - # would have been a breaking change - if self._prefix != "": - uid_start = f"elkm1m_{self._prefix}" - else: - uid_start = "elkm1" - self._unique_id = f"{uid_start}_{self._element.default_name('_')}".lower() - self._attr_name = element.name - - @property - def unique_id(self) -> str: - """Return unique id of the element.""" - return self._unique_id - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return the default attributes of the element.""" - dict_as_str = {} - for key, val in self._element.as_dict().items(): - dict_as_str[key] = val.value if isinstance(val, Enum) else val - return {**dict_as_str, **self.initial_attrs()} - - @property - def available(self) -> bool: - """Is the entity available to be updated.""" - return self._elk.is_connected() - - def initial_attrs(self) -> dict[str, Any]: - """Return the underlying element's attributes as a dict.""" - return {"index": self._element.index + 1} - - def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None: - pass - - @callback - def _element_callback(self, element: Element, changeset: dict[str, Any]) -> None: - """Handle callback from an Elk element that has changed.""" - self._element_changed(element, changeset) - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Register callback for ElkM1 changes and update entity state.""" - self._element.add_callback(self._element_callback) - self._element_callback(self._element, {}) - - @property - def device_info(self) -> DeviceInfo: - """Device info connecting via the ElkM1 system.""" - return DeviceInfo( - name=self._element.name, - identifiers={(DOMAIN, self._unique_id)}, - via_device=(DOMAIN, f"{self._prefix}_system"), - ) - - -class ElkAttachedEntity(ElkEntity): - """An elk entity that is attached to the elk system.""" - - @property - def device_info(self) -> DeviceInfo: - """Device info for the underlying ElkM1 system.""" - device_name = "ElkM1" - if self._prefix: - device_name += f" {self._prefix}" - device_info = DeviceInfo( - identifiers={(DOMAIN, f"{self._prefix}_system")}, - manufacturer="ELK Products, Inc.", - model="M1", - name=device_name, - sw_version=self._elk.panel.elkm1_version, - ) - if self._mac: - device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, self._mac)} - return device_info diff --git a/homeassistant/components/elkm1/alarm_control_panel.py b/homeassistant/components/elkm1/alarm_control_panel.py index b24d0f869c6..f1ecf626263 100644 --- a/homeassistant/components/elkm1/alarm_control_panel.py +++ b/homeassistant/components/elkm1/alarm_control_panel.py @@ -15,17 +15,9 @@ from homeassistant.components.alarm_control_panel import ( ATTR_CHANGED_BY, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv @@ -33,13 +25,14 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import VolDictType -from . import ElkAttachedEntity, ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import ElkM1ConfigEntry from .const import ( ATTR_CHANGED_BY_ID, ATTR_CHANGED_BY_KEYPAD, ATTR_CHANGED_BY_TIME, ELK_USER_CODE_SERVICE_SCHEMA, ) +from .entity import ElkAttachedEntity, ElkEntity, create_elk_entities from .models import ELKM1Data DISPLAY_MESSAGE_SERVICE_SCHEMA: VolDictType = { @@ -124,7 +117,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity): self._changed_by_time: str | None = None self._changed_by_id: int | None = None self._changed_by: str | None = None - self._state: str | None = None + self._state: AlarmControlPanelState | None = None async def async_added_to_hass(self) -> None: """Register callback for ElkM1 changes.""" @@ -176,7 +169,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity): return CodeFormat.NUMBER @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the element.""" return self._state @@ -206,23 +199,25 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity): def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None: elk_state_to_hass_state = { - ArmedStatus.DISARMED: STATE_ALARM_DISARMED, - ArmedStatus.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - ArmedStatus.ARMED_STAY: STATE_ALARM_ARMED_HOME, - ArmedStatus.ARMED_STAY_INSTANT: STATE_ALARM_ARMED_HOME, - ArmedStatus.ARMED_TO_NIGHT: STATE_ALARM_ARMED_NIGHT, - ArmedStatus.ARMED_TO_NIGHT_INSTANT: STATE_ALARM_ARMED_NIGHT, - ArmedStatus.ARMED_TO_VACATION: STATE_ALARM_ARMED_AWAY, + ArmedStatus.DISARMED: AlarmControlPanelState.DISARMED, + ArmedStatus.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + ArmedStatus.ARMED_STAY: AlarmControlPanelState.ARMED_HOME, + ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME, + ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT, + ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY, } if self._element.alarm_state is None: self._state = None elif self._element.in_alarm_state(): # Area is in alarm state - self._state = STATE_ALARM_TRIGGERED + self._state = AlarmControlPanelState.TRIGGERED elif self._entry_exit_timer_is_running(): self._state = ( - STATE_ALARM_ARMING if self._element.is_exit else STATE_ALARM_PENDING + AlarmControlPanelState.ARMING + if self._element.is_exit + else AlarmControlPanelState.PENDING ) elif self._element.armed_status is not None: self._state = elk_state_to_hass_state[self._element.armed_status] diff --git a/homeassistant/components/elkm1/binary_sensor.py b/homeassistant/components/elkm1/binary_sensor.py index 171e9968ce6..854f8c56fb8 100644 --- a/homeassistant/components/elkm1/binary_sensor.py +++ b/homeassistant/components/elkm1/binary_sensor.py @@ -12,7 +12,8 @@ from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElkAttachedEntity, ElkEntity, ElkM1ConfigEntry +from . import ElkM1ConfigEntry +from .entity import ElkAttachedEntity, ElkEntity async def async_setup_entry( diff --git a/homeassistant/components/elkm1/climate.py b/homeassistant/components/elkm1/climate.py index 6281cca8592..1448acc6079 100644 --- a/homeassistant/components/elkm1/climate.py +++ b/homeassistant/components/elkm1/climate.py @@ -20,8 +20,11 @@ from homeassistant.components.climate import ( from homeassistant.const import PRECISION_WHOLE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from . import ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import ElkM1ConfigEntry +from .const import DOMAIN +from .entity import ElkEntity, create_elk_entities SUPPORT_HVAC = [ HVACMode.OFF, @@ -87,7 +90,6 @@ class ElkThermostat(ElkEntity, ClimateEntity): _attr_target_temperature_step = 1 _attr_fan_modes = [FAN_AUTO, FAN_ON] _element: Thermostat - _enable_turn_on_off_backwards_compatibility = False @property def temperature_unit(self) -> str: @@ -151,10 +153,30 @@ class ElkThermostat(ElkEntity, ClimateEntity): async def async_turn_aux_heat_on(self) -> None: """Turn auxiliary heater on.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._elk_set(ThermostatMode.EMERGENCY_HEAT, None) async def async_turn_aux_heat_off(self) -> None: """Turn auxiliary heater off.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._elk_set(ThermostatMode.HEAT, None) async def async_set_fan_mode(self, fan_mode: str) -> None: diff --git a/homeassistant/components/elkm1/config_flow.py b/homeassistant/components/elkm1/config_flow.py index 4ab8d1fe181..a3dd1d46f8b 100644 --- a/homeassistant/components/elkm1/config_flow.py +++ b/homeassistant/components/elkm1/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import Any, Self from elkm1_lib.discovery import ElkSystem from elkm1_lib.elk import Elk @@ -132,6 +132,8 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str | None = None + def __init__(self) -> None: """Initialize the elkm1 config flow.""" self._discovered_device: ElkSystem | None = None @@ -176,10 +178,9 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): if async_update_entry_from_discovery(self.hass, entry, device): self.hass.config_entries.async_schedule_reload(entry.entry_id) return self.async_abort(reason="already_configured") - self.context[CONF_HOST] = host - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == host: - return self.async_abort(reason="already_in_progress") + self.host = host + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") # Handled ignored case since _async_current_entries # is called with include_ignore=False self._abort_if_unique_id_configured() @@ -190,6 +191,10 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="cannot_connect") return await self.async_step_discovery_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow.host == self.host + async def async_step_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -335,10 +340,10 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" _LOGGER.debug("Elk is importing from yaml") - url = _make_url_from_data(user_input) + url = _make_url_from_data(import_data) if self._url_already_configured(url): return self.async_abort(reason="address_already_configured") @@ -357,7 +362,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): ) self._abort_if_unique_id_configured() - errors, result = await self._async_create_or_error(user_input, True) + errors, result = await self._async_create_or_error(import_data, True) if errors: return self.async_abort(reason=list(errors.values())[0]) assert result is not None diff --git a/homeassistant/components/elkm1/entity.py b/homeassistant/components/elkm1/entity.py new file mode 100644 index 00000000000..d9967d93967 --- /dev/null +++ b/homeassistant/components/elkm1/entity.py @@ -0,0 +1,144 @@ +"""Support the ElkM1 Gold and ElkM1 EZ8 alarm/integration panels.""" + +from __future__ import annotations + +from collections.abc import Iterable +from enum import Enum +import logging +from typing import Any + +from elkm1_lib.elements import Element +from elkm1_lib.elk import Elk + +from homeassistant.const import ATTR_CONNECTIONS +from homeassistant.core import callback +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN +from .models import ELKM1Data + +_LOGGER = logging.getLogger(__name__) + + +def create_elk_entities( + elk_data: ELKM1Data, + elk_elements: Iterable[Element], + element_type: str, + class_: Any, + entities: list[ElkEntity], +) -> list[ElkEntity] | None: + """Create the ElkM1 devices of a particular class.""" + auto_configure = elk_data.auto_configure + + if not auto_configure and not elk_data.config[element_type]["enabled"]: + return None + + elk = elk_data.elk + _LOGGER.debug("Creating elk entities for %s", elk) + + for element in elk_elements: + if auto_configure: + if not element.configured: + continue + # Only check the included list if auto configure is not + elif not elk_data.config[element_type]["included"][element.index]: + continue + + entities.append(class_(element, elk, elk_data)) + return entities + + +class ElkEntity(Entity): + """Base class for all Elk entities.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__(self, element: Element, elk: Elk, elk_data: ELKM1Data) -> None: + """Initialize the base of all Elk devices.""" + self._elk = elk + self._element = element + self._mac = elk_data.mac + self._prefix = elk_data.prefix + self._temperature_unit: str = elk_data.config["temperature_unit"] + # unique_id starts with elkm1_ iff there is no prefix + # it starts with elkm1m_{prefix} iff there is a prefix + # this is to avoid a conflict between + # prefix=foo, name=bar (which would be elkm1_foo_bar) + # - and - + # prefix="", name="foo bar" (which would be elkm1_foo_bar also) + # we could have used elkm1__foo_bar for the latter, but that + # would have been a breaking change + if self._prefix != "": + uid_start = f"elkm1m_{self._prefix}" + else: + uid_start = "elkm1" + self._unique_id = f"{uid_start}_{self._element.default_name('_')}".lower() + self._attr_name = element.name + + @property + def unique_id(self) -> str: + """Return unique id of the element.""" + return self._unique_id + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the default attributes of the element.""" + dict_as_str = {} + for key, val in self._element.as_dict().items(): + dict_as_str[key] = val.value if isinstance(val, Enum) else val + return {**dict_as_str, **self.initial_attrs()} + + @property + def available(self) -> bool: + """Is the entity available to be updated.""" + return self._elk.is_connected() + + def initial_attrs(self) -> dict[str, Any]: + """Return the underlying element's attributes as a dict.""" + return {"index": self._element.index + 1} + + def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None: + pass + + @callback + def _element_callback(self, element: Element, changeset: dict[str, Any]) -> None: + """Handle callback from an Elk element that has changed.""" + self._element_changed(element, changeset) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register callback for ElkM1 changes and update entity state.""" + self._element.add_callback(self._element_callback) + self._element_callback(self._element, {}) + + @property + def device_info(self) -> DeviceInfo: + """Device info connecting via the ElkM1 system.""" + return DeviceInfo( + name=self._element.name, + identifiers={(DOMAIN, self._unique_id)}, + via_device=(DOMAIN, f"{self._prefix}_system"), + ) + + +class ElkAttachedEntity(ElkEntity): + """An elk entity that is attached to the elk system.""" + + @property + def device_info(self) -> DeviceInfo: + """Device info for the underlying ElkM1 system.""" + device_name = "ElkM1" + if self._prefix: + device_name += f" {self._prefix}" + device_info = DeviceInfo( + identifiers={(DOMAIN, f"{self._prefix}_system")}, + manufacturer="ELK Products, Inc.", + model="M1", + name=device_name, + sw_version=self._elk.panel.elkm1_version, + ) + if self._mac: + device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, self._mac)} + return device_info diff --git a/homeassistant/components/elkm1/icons.json b/homeassistant/components/elkm1/icons.json index 3bb9ea8c87d..54827e4b6ef 100644 --- a/homeassistant/components/elkm1/icons.json +++ b/homeassistant/components/elkm1/icons.json @@ -10,18 +10,44 @@ } }, "services": { - "alarm_bypass": "mdi:shield-off", - "alarm_clear_bypass": "mdi:shield", - "alarm_arm_home_instant": "mdi:shield-lock", - "alarm_arm_night_instant": "mdi:shield-moon", - "alarm_arm_vacation": "mdi:beach", - "alarm_display_message": "mdi:message-alert", - "set_time": "mdi:clock-edit", - "speak_phrase": "mdi:message-processing", - "speak_word": "mdi:message-minus", - "sensor_counter_refresh": "mdi:refresh", - "sensor_counter_set": "mdi:counter", - "sensor_zone_bypass": "mdi:shield-off", - "sensor_zone_trigger": "mdi:shield" + "alarm_bypass": { + "service": "mdi:shield-off" + }, + "alarm_clear_bypass": { + "service": "mdi:shield" + }, + "alarm_arm_home_instant": { + "service": "mdi:shield-lock" + }, + "alarm_arm_night_instant": { + "service": "mdi:shield-moon" + }, + "alarm_arm_vacation": { + "service": "mdi:beach" + }, + "alarm_display_message": { + "service": "mdi:message-alert" + }, + "set_time": { + "service": "mdi:clock-edit" + }, + "speak_phrase": { + "service": "mdi:message-processing" + }, + "speak_word": { + "service": "mdi:message-minus" + }, + "sensor_counter_refresh": { + "service": "mdi:refresh" + }, + "sensor_counter_set": { + "service": "mdi:counter" + }, + "sensor_zone_bypass": { + "service": "mdi:shield-off" + }, + "sensor_zone_trigger": { + "service": "mdi:shield" + } } } diff --git a/homeassistant/components/elkm1/light.py b/homeassistant/components/elkm1/light.py index 17d525f6ddc..c041c9c9d65 100644 --- a/homeassistant/components/elkm1/light.py +++ b/homeassistant/components/elkm1/light.py @@ -12,7 +12,8 @@ from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEnti from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import ElkM1ConfigEntry +from .entity import ElkEntity, create_elk_entities from .models import ELKM1Data diff --git a/homeassistant/components/elkm1/manifest.json b/homeassistant/components/elkm1/manifest.json index 5edab8463f7..7822307e12e 100644 --- a/homeassistant/components/elkm1/manifest.json +++ b/homeassistant/components/elkm1/manifest.json @@ -15,5 +15,5 @@ "documentation": "https://www.home-assistant.io/integrations/elkm1", "iot_class": "local_push", "loggers": ["elkm1_lib"], - "requirements": ["elkm1-lib==2.2.7"] + "requirements": ["elkm1-lib==2.2.10"] } diff --git a/homeassistant/components/elkm1/scene.py b/homeassistant/components/elkm1/scene.py index e4b738c9dbd..d8a1d83f326 100644 --- a/homeassistant/components/elkm1/scene.py +++ b/homeassistant/components/elkm1/scene.py @@ -10,7 +10,8 @@ from homeassistant.components.scene import Scene from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElkAttachedEntity, ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import ElkM1ConfigEntry +from .entity import ElkAttachedEntity, ElkEntity, create_elk_entities async def async_setup_entry( diff --git a/homeassistant/components/elkm1/sensor.py b/homeassistant/components/elkm1/sensor.py index 16f877719a7..e0231c86699 100644 --- a/homeassistant/components/elkm1/sensor.py +++ b/homeassistant/components/elkm1/sensor.py @@ -22,8 +22,9 @@ from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType -from . import ElkAttachedEntity, ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import ElkM1ConfigEntry from .const import ATTR_VALUE, ELK_USER_CODE_SERVICE_SCHEMA +from .entity import ElkAttachedEntity, ElkEntity, create_elk_entities SERVICE_SENSOR_COUNTER_REFRESH = "sensor_counter_refresh" SERVICE_SENSOR_COUNTER_SET = "sensor_counter_set" diff --git a/homeassistant/components/elkm1/strings.json b/homeassistant/components/elkm1/strings.json index c854307dd92..bf02d727280 100644 --- a/homeassistant/components/elkm1/strings.json +++ b/homeassistant/components/elkm1/strings.json @@ -68,7 +68,7 @@ } }, "alarm_arm_home_instant": { - "name": "Alarm are home instant", + "name": "Alarm arm home instant", "description": "Arms the ElkM1 in home instant mode.", "fields": { "code": { @@ -189,5 +189,18 @@ "name": "Sensor zone trigger", "description": "Triggers zone." } + }, + "issues": { + "migrate_aux_heat": { + "title": "Migration of Elk-M1 set_aux_heat action", + "fix_flow": { + "step": { + "confirm": { + "description": "The Elk-M1 `set_aux_heat` action has been migrated. A new emergency heat switch entity is available for each thermostat.\n\nUpdate any automations to use the new emergency heat switch entity. When this is done, select **Submit** to fix this issue.", + "title": "[%key:component::elkm1::issues::migrate_aux_heat::title%]" + } + } + } + } } } diff --git a/homeassistant/components/elkm1/switch.py b/homeassistant/components/elkm1/switch.py index f4820f57b3d..3e0f4849518 100644 --- a/homeassistant/components/elkm1/switch.py +++ b/homeassistant/components/elkm1/switch.py @@ -4,13 +4,19 @@ from __future__ import annotations from typing import Any +from elkm1_lib.const import ThermostatMode, ThermostatSetting +from elkm1_lib.elements import Element +from elkm1_lib.elk import Elk from elkm1_lib.outputs import Output +from elkm1_lib.thermostats import Thermostat from homeassistant.components.switch import SwitchEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElkAttachedEntity, ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import ElkM1ConfigEntry +from .entity import ElkAttachedEntity, ElkEntity, create_elk_entities +from .models import ELKM1Data async def async_setup_entry( @@ -23,6 +29,9 @@ async def async_setup_entry( elk = elk_data.elk entities: list[ElkEntity] = [] create_elk_entities(elk_data, elk.outputs, "output", ElkOutput, entities) + create_elk_entities( + elk_data, elk.thermostats, "thermostat", ElkThermostatEMHeat, entities + ) async_add_entities(entities) @@ -43,3 +52,32 @@ class ElkOutput(ElkAttachedEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the output.""" self._element.turn_off() + + +class ElkThermostatEMHeat(ElkEntity, SwitchEntity): + """Elk Thermostat emergency heat as switch.""" + + _element: Thermostat + + def __init__(self, element: Element, elk: Elk, elk_data: ELKM1Data) -> None: + """Initialize the emergency heat switch.""" + super().__init__(element, elk, elk_data) + self._unique_id = f"{self._unique_id}emheat" + self._attr_name = f"{element.name} emergency heat" + + @property + def is_on(self) -> bool: + """Get the current emergency heat status.""" + return self._element.mode == ThermostatMode.EMERGENCY_HEAT + + def _elk_set(self, mode: ThermostatMode) -> None: + """Set the thermostat mode.""" + self._element.set(ThermostatSetting.MODE, mode) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the output.""" + self._elk_set(ThermostatMode.EMERGENCY_HEAT) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the output.""" + self._elk_set(ThermostatMode.EMERGENCY_HEAT) diff --git a/homeassistant/components/elmax/alarm_control_panel.py b/homeassistant/components/elmax/alarm_control_panel.py index 61d13704641..841b94a3d72 100644 --- a/homeassistant/components/elmax/alarm_control_panel.py +++ b/homeassistant/components/elmax/alarm_control_panel.py @@ -10,24 +10,17 @@ from elmax_api.model.panel import PanelStatus from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, InvalidStateError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType -from .common import ElmaxEntity from .const import DOMAIN from .coordinator import ElmaxCoordinator +from .entity import ElmaxEntity async def async_setup_entry( @@ -74,16 +67,16 @@ class ElmaxArea(ElmaxEntity, AlarmControlPanelEntity): _attr_code_arm_required = False _attr_has_entity_name = True _attr_supported_features = AlarmControlPanelEntityFeature.ARM_AWAY - _pending_state: str | None = None + _pending_state: AlarmControlPanelState | None = None async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - if self._attr_state == AlarmStatus.NOT_ARMED_NOT_ARMABLE: + if self._attr_alarm_state == AlarmStatus.NOT_ARMED_NOT_ARMABLE: raise InvalidStateError( f"Cannot arm {self.name}: please check for open windows/doors first" ) - self._pending_state = STATE_ALARM_ARMING + self._pending_state = AlarmControlPanelState.ARMING self.async_write_ha_state() try: @@ -107,7 +100,7 @@ class ElmaxArea(ElmaxEntity, AlarmControlPanelEntity): if code is None or code == "": raise ValueError("Please input the disarm code.") - self._pending_state = STATE_ALARM_DISARMING + self._pending_state = AlarmControlPanelState.DISARMING self.async_write_ha_state() try: @@ -130,7 +123,7 @@ class ElmaxArea(ElmaxEntity, AlarmControlPanelEntity): await self.coordinator.async_refresh() @property - def state(self) -> StateType: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the entity.""" if self._pending_state is not None: return self._pending_state @@ -151,10 +144,10 @@ class ElmaxArea(ElmaxEntity, AlarmControlPanelEntity): ALARM_STATE_TO_HA = { - AlarmArmStatus.ARMED_TOTALLY: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.ARMED_P1_P2: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.ARMED_P2: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.ARMED_P1: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.NOT_ARMED: STATE_ALARM_DISARMED, - AlarmStatus.TRIGGERED: STATE_ALARM_TRIGGERED, + AlarmArmStatus.ARMED_TOTALLY: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.ARMED_P1_P2: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.ARMED_P2: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.ARMED_P1: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.NOT_ARMED: AlarmControlPanelState.DISARMED, + AlarmStatus.TRIGGERED: AlarmControlPanelState.TRIGGERED, } diff --git a/homeassistant/components/elmax/binary_sensor.py b/homeassistant/components/elmax/binary_sensor.py index e477ab6c2a4..ec51f861819 100644 --- a/homeassistant/components/elmax/binary_sensor.py +++ b/homeassistant/components/elmax/binary_sensor.py @@ -12,9 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import ElmaxEntity from .const import DOMAIN from .coordinator import ElmaxCoordinator +from .entity import ElmaxEntity async def async_setup_entry( diff --git a/homeassistant/components/elmax/common.py b/homeassistant/components/elmax/common.py index 965e30235ff..18350e45efe 100644 --- a/homeassistant/components/elmax/common.py +++ b/homeassistant/components/elmax/common.py @@ -4,15 +4,10 @@ from __future__ import annotations import ssl -from elmax_api.model.endpoint import DeviceEndpoint from elmax_api.model.panel import PanelEntry from packaging import version -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN, ELMAX_LOCAL_API_PATH, MIN_APIV2_SUPPORTED_VERSION -from .coordinator import ElmaxCoordinator +from .const import ELMAX_LOCAL_API_PATH, MIN_APIV2_SUPPORTED_VERSION def get_direct_api_url(host: str, port: int, use_ssl: bool) -> str: @@ -40,40 +35,10 @@ def check_local_version_supported(api_version: str | None) -> bool: class DirectPanel(PanelEntry): """Helper class for wrapping a directly accessed Elmax Panel.""" - def __init__(self, panel_uri): + def __init__(self, panel_uri) -> None: """Construct the object.""" super().__init__(panel_uri, True, {}) def get_name_by_user(self, username: str) -> str: """Return the panel name.""" return f"Direct Panel {self.hash}" - - -class ElmaxEntity(CoordinatorEntity[ElmaxCoordinator]): - """Wrapper for Elmax entities.""" - - def __init__( - self, - elmax_device: DeviceEndpoint, - panel_version: str, - coordinator: ElmaxCoordinator, - ) -> None: - """Construct the object.""" - super().__init__(coordinator=coordinator) - self._device = elmax_device - self._attr_unique_id = elmax_device.endpoint_id - self._attr_name = elmax_device.name - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.panel_entry.hash)}, - name=coordinator.panel_entry.get_name_by_user( - coordinator.http_client.get_authenticated_username() - ), - manufacturer="Elmax", - model=panel_version, - sw_version=panel_version, - ) - - @property - def available(self) -> bool: - """Return if entity is available.""" - return super().available and self.coordinator.panel_entry.online diff --git a/homeassistant/components/elmax/config_flow.py b/homeassistant/components/elmax/config_flow.py index 69f69a5fd31..3bb01efd3d5 100644 --- a/homeassistant/components/elmax/config_flow.py +++ b/homeassistant/components/elmax/config_flow.py @@ -13,7 +13,7 @@ import httpx import voluptuous as vol from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.exceptions import HomeAssistantError from .common import ( @@ -114,7 +114,6 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): # Panel selection variables _panels_schema: vol.Schema _panel_names: dict - _entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -204,7 +203,7 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_direct(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Handle the direct setup step.""" - self._selected_mode = CONF_ELMAX_MODE_CLOUD + self._selected_mode = CONF_ELMAX_MODE_DIRECT if user_input is None: return self.async_show_form( step_id=CONF_ELMAX_MODE_DIRECT, @@ -395,7 +394,6 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) self._reauth_cloud_username = entry_data.get(CONF_ELMAX_USERNAME) self._reauth_cloud_panelid = entry_data.get(CONF_ELMAX_PANEL_ID) return await self.async_step_reauth_confirm() @@ -413,7 +411,7 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): # Handle authentication, make sure the panel we are re-authenticating against is listed among results # and verify its pin is correct. - assert self._entry is not None + reauth_entry = self._get_reauth_entry() try: # Test login. client = await self._async_login(username=username, password=password) @@ -421,14 +419,14 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): panels = [ p for p in await client.list_control_panels() - if p.hash == self._entry.data[CONF_ELMAX_PANEL_ID] + if p.hash == reauth_entry.data[CONF_ELMAX_PANEL_ID] ] if len(panels) < 1: raise NoOnlinePanelsError # noqa: TRY301 # Verify the pin is still valid. await client.get_panel_status( - control_panel_id=self._entry.data[CONF_ELMAX_PANEL_ID], + control_panel_id=reauth_entry.data[CONF_ELMAX_PANEL_ID], pin=panel_pin, ) @@ -440,18 +438,16 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_pin" # If all went right, update the config entry - if not errors: - self.hass.config_entries.async_update_entry( - self._entry, + else: + return self.async_update_reload_and_abort( + reauth_entry, data={ - CONF_ELMAX_PANEL_ID: self._entry.data[CONF_ELMAX_PANEL_ID], + CONF_ELMAX_PANEL_ID: reauth_entry.data[CONF_ELMAX_PANEL_ID], CONF_ELMAX_PANEL_PIN: panel_pin, CONF_ELMAX_USERNAME: username, CONF_ELMAX_PASSWORD: password, }, ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reauth_successful") # Otherwise start over and show the relative error message return self.async_show_form( diff --git a/homeassistant/components/elmax/cover.py b/homeassistant/components/elmax/cover.py index 528b2e6dead..403bc51dbff 100644 --- a/homeassistant/components/elmax/cover.py +++ b/homeassistant/components/elmax/cover.py @@ -13,9 +13,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import ElmaxEntity from .const import DOMAIN from .coordinator import ElmaxCoordinator +from .entity import ElmaxEntity _LOGGER = logging.getLogger(__name__) @@ -121,13 +121,13 @@ class ElmaxCover(ElmaxEntity, CoverEntity): else: _LOGGER.debug("Ignoring stop request as the cover is IDLE") - async def async_open_cover(self, **kwargs): + async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self.coordinator.http_client.execute_command( endpoint_id=self._device.endpoint_id, command=CoverCommand.UP ) - async def async_close_cover(self, **kwargs): + async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self.coordinator.http_client.execute_command( endpoint_id=self._device.endpoint_id, command=CoverCommand.DOWN diff --git a/homeassistant/components/elmax/entity.py b/homeassistant/components/elmax/entity.py new file mode 100644 index 00000000000..a49fdc14c3e --- /dev/null +++ b/homeassistant/components/elmax/entity.py @@ -0,0 +1,41 @@ +"""Elmax integration common classes and utilities.""" + +from __future__ import annotations + +from elmax_api.model.endpoint import DeviceEndpoint + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ElmaxCoordinator + + +class ElmaxEntity(CoordinatorEntity[ElmaxCoordinator]): + """Wrapper for Elmax entities.""" + + def __init__( + self, + elmax_device: DeviceEndpoint, + panel_version: str, + coordinator: ElmaxCoordinator, + ) -> None: + """Construct the object.""" + super().__init__(coordinator=coordinator) + self._device = elmax_device + self._attr_unique_id = elmax_device.endpoint_id + self._attr_name = elmax_device.name + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.panel_entry.hash)}, + name=coordinator.panel_entry.get_name_by_user( + coordinator.http_client.get_authenticated_username() + ), + manufacturer="Elmax", + model=panel_version, + sw_version=panel_version, + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.coordinator.panel_entry.online diff --git a/homeassistant/components/elmax/manifest.json b/homeassistant/components/elmax/manifest.json index c57b707906b..dfa20326d0c 100644 --- a/homeassistant/components/elmax/manifest.json +++ b/homeassistant/components/elmax/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/elmax", "iot_class": "cloud_polling", "loggers": ["elmax_api"], - "requirements": ["elmax-api==0.0.5"], + "requirements": ["elmax-api==0.0.6.3"], "zeroconf": [ { "type": "_elmax-ssl._tcp.local." diff --git a/homeassistant/components/elmax/switch.py b/homeassistant/components/elmax/switch.py index 6ecbc70a8c5..d0e52c556f6 100644 --- a/homeassistant/components/elmax/switch.py +++ b/homeassistant/components/elmax/switch.py @@ -12,9 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import ElmaxEntity from .const import DOMAIN from .coordinator import ElmaxCoordinator +from .entity import ElmaxEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/elv/manifest.json b/homeassistant/components/elv/manifest.json index 9b71595e58f..5757aeb5e52 100644 --- a/homeassistant/components/elv/manifest.json +++ b/homeassistant/components/elv/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/elv", "iot_class": "local_polling", "loggers": ["pypca"], + "quality_scale": "legacy", "requirements": ["pypca==0.0.7"] } diff --git a/homeassistant/components/emby/manifest.json b/homeassistant/components/emby/manifest.json index 3f57f62eb0b..856cdaf189f 100644 --- a/homeassistant/components/emby/manifest.json +++ b/homeassistant/components/emby/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/emby", "iot_class": "local_push", "loggers": ["pyemby"], + "quality_scale": "legacy", "requirements": ["pyEmby==1.10"] } diff --git a/homeassistant/components/emoncms/__init__.py b/homeassistant/components/emoncms/__init__.py index 5e7adbcd6e7..0cd686b5b56 100644 --- a/homeassistant/components/emoncms/__init__.py +++ b/homeassistant/components/emoncms/__init__.py @@ -1 +1,87 @@ """The emoncms component.""" + +from pyemoncms import EmoncmsClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, CONF_URL, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER +from .coordinator import EmoncmsCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator] + + +def _migrate_unique_id( + hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str +) -> None: + """Migrate to emoncms unique id if needed.""" + ent_reg = er.async_get(hass) + entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id) + for entity in entry_entities: + if entity.unique_id.split("-")[0] == entry.entry_id: + feed_id = entity.unique_id.split("-")[-1] + LOGGER.debug(f"moving feed {feed_id} to hardware uuid") + ent_reg.async_update_entity( + entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}" + ) + hass.config_entries.async_update_entry( + entry, + unique_id=emoncms_unique_id, + ) + + +async def _check_unique_id_migration( + hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient +) -> None: + """Check if we can migrate to the emoncms uuid.""" + emoncms_unique_id = await emoncms_client.async_get_uuid() + if emoncms_unique_id: + if entry.unique_id != emoncms_unique_id: + _migrate_unique_id(hass, entry, emoncms_unique_id) + else: + async_create_issue( + hass, + DOMAIN, + "migrate database", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="migrate_database", + translation_placeholders={ + "url": entry.data[CONF_URL], + "doc_url": EMONCMS_UUID_DOC_URL, + }, + ) + + +async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool: + """Load a config entry.""" + emoncms_client = EmoncmsClient( + entry.data[CONF_URL], + entry.data[CONF_API_KEY], + session=async_get_clientsession(hass), + ) + await _check_unique_id_migration(hass, entry, emoncms_client) + coordinator = EmoncmsCoordinator(hass, emoncms_client) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + entry.async_on_unload(entry.add_update_listener(update_listener)) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry): + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/emoncms/config_flow.py b/homeassistant/components/emoncms/config_flow.py new file mode 100644 index 00000000000..e0d4d0d03e9 --- /dev/null +++ b/homeassistant/components/emoncms/config_flow.py @@ -0,0 +1,225 @@ +"""Configflow for the emoncms integration.""" + +from __future__ import annotations + +from typing import Any + +from pyemoncms import EmoncmsClient +import voluptuous as vol + +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import selector +from homeassistant.helpers.typing import ConfigType + +from .const import ( + CONF_MESSAGE, + CONF_ONLY_INCLUDE_FEEDID, + CONF_SUCCESS, + DOMAIN, + FEED_ID, + FEED_NAME, + FEED_TAG, + LOGGER, +) + + +def get_options(feeds: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Build the selector options with the feed list.""" + return [ + { + "value": feed[FEED_ID], + "label": f"{feed[FEED_ID]}|{feed[FEED_TAG]}|{feed[FEED_NAME]}", + } + for feed in feeds + ] + + +def sensor_name(url: str) -> str: + """Return sensor name.""" + sensorip = url.rsplit("//", maxsplit=1)[-1] + return f"emoncms@{sensorip}" + + +async def get_feed_list( + emoncms_client: EmoncmsClient, +) -> dict[str, Any]: + """Check connection to emoncms and return feed list if successful.""" + return await emoncms_client.async_request("/feed/list.json") + + +class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): + """emoncms integration UI config flow.""" + + url: str + api_key: str + include_only_feeds: list | None = None + dropdown: dict = {} + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> EmoncmsOptionsFlow: + """Get the options flow for this handler.""" + return EmoncmsOptionsFlow(config_entry) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Initiate a flow via the UI.""" + errors: dict[str, str] = {} + description_placeholders = {} + + if user_input is not None: + self.url = user_input[CONF_URL] + self.api_key = user_input[CONF_API_KEY] + self._async_abort_entries_match( + { + CONF_API_KEY: self.api_key, + CONF_URL: self.url, + } + ) + emoncms_client = EmoncmsClient( + self.url, self.api_key, session=async_get_clientsession(self.hass) + ) + result = await get_feed_list(emoncms_client) + if not result[CONF_SUCCESS]: + errors["base"] = "api_error" + description_placeholders = {"details": result[CONF_MESSAGE]} + else: + self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID) + await self.async_set_unique_id(await emoncms_client.async_get_uuid()) + self._abort_if_unique_id_configured() + options = get_options(result[CONF_MESSAGE]) + self.dropdown = { + "options": options, + "mode": "dropdown", + "multiple": True, + } + return await self.async_step_choose_feeds() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_URL): str, + vol.Required(CONF_API_KEY): str, + } + ), + user_input, + ), + errors=errors, + description_placeholders=description_placeholders, + ) + + async def async_step_choose_feeds( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Choose feeds to import.""" + errors: dict[str, str] = {} + include_only_feeds: list = [] + if user_input or self.include_only_feeds is not None: + if self.include_only_feeds is not None: + include_only_feeds = self.include_only_feeds + elif user_input: + include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID] + return self.async_create_entry( + title=sensor_name(self.url), + data={ + CONF_URL: self.url, + CONF_API_KEY: self.api_key, + CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, + }, + ) + return self.async_show_form( + step_id="choose_feeds", + data_schema=vol.Schema( + { + vol.Required( + CONF_ONLY_INCLUDE_FEEDID, + default=include_only_feeds, + ): selector({"select": self.dropdown}), + } + ), + errors=errors, + ) + + async def async_step_import(self, import_info: ConfigType) -> ConfigFlowResult: + """Import config from yaml.""" + url = import_info[CONF_URL] + api_key = import_info[CONF_API_KEY] + include_only_feeds = None + if import_info.get(CONF_ONLY_INCLUDE_FEEDID) is not None: + include_only_feeds = list(map(str, import_info[CONF_ONLY_INCLUDE_FEEDID])) + config = { + CONF_API_KEY: api_key, + CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, + CONF_URL: url, + } + LOGGER.debug(config) + result = await self.async_step_user(config) + if errors := result.get("errors"): + return self.async_abort(reason=errors["base"]) + return result + + +class EmoncmsOptionsFlow(OptionsFlow): + """Emoncms Options flow handler.""" + + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize emoncms options flow.""" + self._url = config_entry.data[CONF_URL] + self._api_key = config_entry.data[CONF_API_KEY] + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + errors: dict[str, str] = {} + description_placeholders = {} + include_only_feeds = self.config_entry.options.get( + CONF_ONLY_INCLUDE_FEEDID, + self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []), + ) + options: list = include_only_feeds + emoncms_client = EmoncmsClient( + self._url, + self._api_key, + session=async_get_clientsession(self.hass), + ) + result = await get_feed_list(emoncms_client) + if not result[CONF_SUCCESS]: + errors["base"] = "api_error" + description_placeholders = {"details": result[CONF_MESSAGE]} + else: + options = get_options(result[CONF_MESSAGE]) + dropdown = {"options": options, "mode": "dropdown", "multiple": True} + if user_input: + include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID] + return self.async_create_entry( + data={ + CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, + }, + ) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Required( + CONF_ONLY_INCLUDE_FEEDID, default=include_only_feeds + ): selector({"select": dropdown}), + } + ), + errors=errors, + description_placeholders=description_placeholders, + ) diff --git a/homeassistant/components/emoncms/const.py b/homeassistant/components/emoncms/const.py index 96269218316..c53f7cc8a9f 100644 --- a/homeassistant/components/emoncms/const.py +++ b/homeassistant/components/emoncms/const.py @@ -7,6 +7,13 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_MESSAGE = "message" CONF_SUCCESS = "success" DOMAIN = "emoncms" +EMONCMS_UUID_DOC_URL = ( + "https://docs.openenergymonitor.org/emoncms/update.html" + "#upgrading-to-a-version-producing-a-unique-identifier" +) +FEED_ID = "id" +FEED_NAME = "name" +FEED_TAG = "tag" LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/emoncms/coordinator.py b/homeassistant/components/emoncms/coordinator.py index d1f6a2858c7..c6fda5ed7c8 100644 --- a/homeassistant/components/emoncms/coordinator.py +++ b/homeassistant/components/emoncms/coordinator.py @@ -18,14 +18,13 @@ class EmoncmsCoordinator(DataUpdateCoordinator[list[dict[str, Any]] | None]): self, hass: HomeAssistant, emoncms_client: EmoncmsClient, - scan_interval: timedelta, ) -> None: """Initialize the emoncms data coordinator.""" super().__init__( hass, LOGGER, name="emoncms_coordinator", - update_interval=scan_interval, + update_interval=timedelta(seconds=60), ) self.emoncms_client = emoncms_client diff --git a/homeassistant/components/emoncms/manifest.json b/homeassistant/components/emoncms/manifest.json index 09229d0419a..c7f18cb205e 100644 --- a/homeassistant/components/emoncms/manifest.json +++ b/homeassistant/components/emoncms/manifest.json @@ -2,7 +2,8 @@ "domain": "emoncms", "name": "Emoncms", "codeowners": ["@borpin", "@alexandrecuer"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/emoncms", "iot_class": "local_polling", - "requirements": ["pyemoncms==0.0.7"] + "requirements": ["pyemoncms==0.1.1"] } diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index 3c448391974..291ecad0bd3 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -2,39 +2,200 @@ from __future__ import annotations -from datetime import timedelta from typing import Any -from pyemoncms import EmoncmsClient import voluptuous as vol from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, + SensorEntityDescription, SensorStateClass, ) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + CONCENTRATION_PARTS_PER_MILLION, CONF_API_KEY, CONF_ID, - CONF_SCAN_INTERVAL, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_VALUE_TEMPLATE, - STATE_UNKNOWN, + PERCENTAGE, + UnitOfApparentPower, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfFrequency, UnitOfPower, + UnitOfPressure, + UnitOfSoundPressure, + UnitOfSpeed, + UnitOfTemperature, + UnitOfVolume, + UnitOfVolumeFlowRate, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import template -from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_EXCLUDE_FEEDID, CONF_ONLY_INCLUDE_FEEDID +from .config_flow import sensor_name +from .const import ( + CONF_EXCLUDE_FEEDID, + CONF_ONLY_INCLUDE_FEEDID, + DOMAIN, + FEED_ID, + FEED_NAME, + FEED_TAG, +) from .coordinator import EmoncmsCoordinator +SENSORS: dict[str | None, SensorEntityDescription] = { + "kWh": SensorEntityDescription( + key="energy|kWh", + translation_key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + "Wh": SensorEntityDescription( + key="energy|Wh", + translation_key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + "kW": SensorEntityDescription( + key="power|kW", + translation_key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + state_class=SensorStateClass.MEASUREMENT, + ), + "W": SensorEntityDescription( + key="power|W", + translation_key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + ), + "V": SensorEntityDescription( + key="voltage", + translation_key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + ), + "A": SensorEntityDescription( + key="current", + translation_key="current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + ), + "VA": SensorEntityDescription( + key="apparent_power", + translation_key="apparent_power", + device_class=SensorDeviceClass.APPARENT_POWER, + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + state_class=SensorStateClass.MEASUREMENT, + ), + "°C": SensorEntityDescription( + key="temperature|celsius", + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + ), + "°F": SensorEntityDescription( + key="temperature|fahrenheit", + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT, + state_class=SensorStateClass.MEASUREMENT, + ), + "K": SensorEntityDescription( + key="temperature|kelvin", + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.KELVIN, + state_class=SensorStateClass.MEASUREMENT, + ), + "Hz": SensorEntityDescription( + key="frequency", + translation_key="frequency", + device_class=SensorDeviceClass.FREQUENCY, + native_unit_of_measurement=UnitOfFrequency.HERTZ, + state_class=SensorStateClass.MEASUREMENT, + ), + "hPa": SensorEntityDescription( + key="pressure", + translation_key="pressure", + device_class=SensorDeviceClass.PRESSURE, + native_unit_of_measurement=UnitOfPressure.HPA, + state_class=SensorStateClass.MEASUREMENT, + ), + "dB": SensorEntityDescription( + key="decibel", + translation_key="decibel", + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + native_unit_of_measurement=UnitOfSoundPressure.DECIBEL, + state_class=SensorStateClass.MEASUREMENT, + ), + "m³": SensorEntityDescription( + key="volume|cubic_meter", + translation_key="volume", + device_class=SensorDeviceClass.VOLUME_STORAGE, + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + state_class=SensorStateClass.MEASUREMENT, + ), + "m³/h": SensorEntityDescription( + key="flow|cubic_meters_per_hour", + translation_key="flow", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + state_class=SensorStateClass.MEASUREMENT, + ), + "l/m": SensorEntityDescription( + key="flow|liters_per_minute", + translation_key="flow", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + state_class=SensorStateClass.MEASUREMENT, + ), + "m/s": SensorEntityDescription( + key="speed|meters_per_second", + translation_key="speed", + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + state_class=SensorStateClass.MEASUREMENT, + ), + "µg/m³": SensorEntityDescription( + key="concentration|microgram_per_cubic_meter", + translation_key="concentration", + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + "ppm": SensorEntityDescription( + key="concentration|microgram_parts_per_million", + translation_key="concentration", + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + ), + "%": SensorEntityDescription( + key="percent", + translation_key="percent", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), +} + ATTR_FEEDID = "FeedId" ATTR_FEEDNAME = "FeedName" ATTR_LASTUPDATETIME = "LastUpdated" @@ -42,9 +203,7 @@ ATTR_LASTUPDATETIMESTR = "LastUpdatedStr" ATTR_SIZE = "Size" ATTR_TAG = "Tag" ATTR_USERID = "UserId" - CONF_SENSOR_NAMES = "sensor_names" - DECIMALS = 2 DEFAULT_UNIT = UnitOfPower.WATT @@ -76,49 +235,90 @@ async def async_setup_platform( async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: - """Set up the Emoncms sensor.""" - apikey = config[CONF_API_KEY] - url = config[CONF_URL] - sensorid = config[CONF_ID] - value_template = config.get(CONF_VALUE_TEMPLATE) - config_unit = config.get(CONF_UNIT_OF_MEASUREMENT) - exclude_feeds = config.get(CONF_EXCLUDE_FEEDID) - include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID) - sensor_names = config.get(CONF_SENSOR_NAMES) - scan_interval = config.get(CONF_SCAN_INTERVAL, timedelta(seconds=30)) + """Import config from yaml.""" + if CONF_VALUE_TEMPLATE in config: + async_create_issue( + hass, + DOMAIN, + f"remove_{CONF_VALUE_TEMPLATE}_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.ERROR, + translation_key=f"remove_{CONF_VALUE_TEMPLATE}", + translation_placeholders={ + "domain": DOMAIN, + "parameter": CONF_VALUE_TEMPLATE, + }, + ) + return + if CONF_ONLY_INCLUDE_FEEDID not in config: + async_create_issue( + hass, + DOMAIN, + f"missing_{CONF_ONLY_INCLUDE_FEEDID}_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"missing_{CONF_ONLY_INCLUDE_FEEDID}", + translation_placeholders={ + "domain": DOMAIN, + }, + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + if ( + result.get("type") == FlowResultType.CREATE_ENTRY + or result.get("reason") == "already_configured" + ): + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + breaks_in_ha_version="2025.3.0", + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "emoncms", + }, + ) - emoncms_client = EmoncmsClient(url, apikey, session=async_get_clientsession(hass)) - coordinator = EmoncmsCoordinator(hass, emoncms_client, scan_interval) - await coordinator.async_refresh() + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the emoncms sensors.""" + name = sensor_name(entry.data[CONF_URL]) + exclude_feeds = entry.data.get(CONF_EXCLUDE_FEEDID) + include_only_feeds = entry.options.get( + CONF_ONLY_INCLUDE_FEEDID, entry.data.get(CONF_ONLY_INCLUDE_FEEDID) + ) + + if exclude_feeds is None and include_only_feeds is None: + return + + coordinator = entry.runtime_data + # uuid was added in emoncms database 11.5.7 + unique_id = entry.unique_id if entry.unique_id else entry.entry_id elems = coordinator.data if not elems: return - sensors: list[EmonCmsSensor] = [] for idx, elem in enumerate(elems): - if exclude_feeds is not None and int(elem["id"]) in exclude_feeds: + if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds: continue - - if include_only_feeds is not None and int(elem["id"]) not in include_only_feeds: - continue - - name = None - if sensor_names is not None: - name = sensor_names.get(int(elem["id"]), None) - - if unit := elem.get("unit"): - unit_of_measurement = unit - else: - unit_of_measurement = config_unit - sensors.append( EmonCmsSensor( coordinator, + unique_id, + elem.get("unit"), name, - value_template, - unit_of_measurement, - str(sensorid), idx, ) ) @@ -128,13 +328,14 @@ async def async_setup_platform( class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): """Implementation of an Emoncms sensor.""" + _attr_has_entity_name = True + def __init__( self, coordinator: EmoncmsCoordinator, - name: str | None, - value_template: template.Template | None, + unique_id: str, unit_of_measurement: str | None, - sensorid: str, + name: str, idx: int, ) -> None: """Initialize the sensor.""" @@ -143,52 +344,24 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): elem = {} if self.coordinator.data: elem = self.coordinator.data[self.idx] - if name is None: - # Suppress ID in sensor name if it's 1, since most people won't - # have more than one EmonCMS source and it's redundant to show the - # ID if there's only one. - id_for_name = "" if str(sensorid) == "1" else sensorid - # Use the feed name assigned in EmonCMS or fall back to the feed ID - feed_name = elem.get("name", f"Feed {elem.get('id')}") - self._attr_name = f"EmonCMS{id_for_name} {feed_name}" + self._attr_translation_placeholders = { + "emoncms_details": f"{elem[FEED_TAG]} {elem[FEED_NAME]}", + } + self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}" + description = SENSORS.get(unit_of_measurement) + if description is not None: + self.entity_description = description else: - self._attr_name = name - self._value_template = value_template - self._attr_native_unit_of_measurement = unit_of_measurement - self._sensorid = sensorid - - if unit_of_measurement in ("kWh", "Wh"): - self._attr_device_class = SensorDeviceClass.ENERGY - self._attr_state_class = SensorStateClass.TOTAL_INCREASING - elif unit_of_measurement == "W": - self._attr_device_class = SensorDeviceClass.POWER - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "V": - self._attr_device_class = SensorDeviceClass.VOLTAGE - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "A": - self._attr_device_class = SensorDeviceClass.CURRENT - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "VA": - self._attr_device_class = SensorDeviceClass.APPARENT_POWER - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement in ("°C", "°F", "K"): - self._attr_device_class = SensorDeviceClass.TEMPERATURE - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "Hz": - self._attr_device_class = SensorDeviceClass.FREQUENCY - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "hPa": - self._attr_device_class = SensorDeviceClass.PRESSURE - self._attr_state_class = SensorStateClass.MEASUREMENT + self._attr_native_unit_of_measurement = unit_of_measurement + self._attr_name = f"{name} {elem[FEED_NAME]}" self._update_attributes(elem) def _update_attributes(self, elem: dict[str, Any]) -> None: """Update entity attributes.""" self._attr_extra_state_attributes = { - ATTR_FEEDID: elem["id"], - ATTR_TAG: elem["tag"], - ATTR_FEEDNAME: elem["name"], + ATTR_FEEDID: elem[FEED_ID], + ATTR_TAG: elem[FEED_TAG], + ATTR_FEEDNAME: elem[FEED_NAME], } if elem["value"] is not None: self._attr_extra_state_attributes[ATTR_SIZE] = elem["size"] @@ -199,13 +372,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): ) self._attr_native_value = None - if self._value_template is not None: - self._attr_native_value = ( - self._value_template.async_render_with_possible_json_value( - elem["value"], STATE_UNKNOWN - ) - ) - elif elem["value"] is not None: + if elem["value"] is not None: self._attr_native_value = round(float(elem["value"]), DECIMALS) @callback diff --git a/homeassistant/components/emoncms/strings.json b/homeassistant/components/emoncms/strings.json new file mode 100644 index 00000000000..5769e825944 --- /dev/null +++ b/homeassistant/components/emoncms/strings.json @@ -0,0 +1,99 @@ +{ + "config": { + "error": { + "api_error": "An error occured in the pyemoncms API : {details}" + }, + "step": { + "user": { + "data": { + "url": "[%key:common::config_flow::data::url%]", + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "url": "Server url starting with the protocol (http or https)", + "api_key": "Your 32 bits api key" + } + }, + "choose_feeds": { + "data": { + "include_only_feed_id": "Choose feeds to include" + } + } + }, + "abort": { + "already_configured": "This server is already configured" + } + }, + "entity": { + "sensor": { + "energy": { + "name": "Energy {emoncms_details}" + }, + "power": { + "name": "Power {emoncms_details}" + }, + "percent": { + "name": "Percentage {emoncms_details}" + }, + "voltage": { + "name": "Voltage {emoncms_details}" + }, + "current": { + "name": "Current {emoncms_details}" + }, + "apparent_power": { + "name": "Apparent power {emoncms_details}" + }, + "temperature": { + "name": "Temperature {emoncms_details}" + }, + "frequency": { + "name": "Frequency {emoncms_details}" + }, + "pressure": { + "name": "Pressure {emoncms_details}" + }, + "decibel": { + "name": "Decibel {emoncms_details}" + }, + "volume": { + "name": "Volume {emoncms_details}" + }, + "flow": { + "name": "Flow rate {emoncms_details}" + }, + "speed": { + "name": "Speed {emoncms_details}" + }, + "concentration": { + "name": "Concentration {emoncms_details}" + } + } + }, + "options": { + "error": { + "api_error": "[%key:component::emoncms::config::error::api_error%]" + }, + "step": { + "init": { + "data": { + "include_only_feed_id": "[%key:component::emoncms::config::step::choose_feeds::data::include_only_feed_id%]" + } + } + } + }, + "issues": { + "remove_value_template": { + "title": "The {domain} integration cannot start", + "description": "Configuring {domain} using YAML is being removed and the `{parameter}` parameter cannot be imported.\n\nPlease remove `{parameter}` from your `{domain}` yaml configuration and restart Home Assistant\n\nAlternatively, you may entirely remove the `{domain}` configuration from your configuration.yaml, restart Home Assistant, and add the {domain} integration manually." + }, + "missing_include_only_feed_id": { + "title": "No feed synchronized with the {domain} sensor", + "description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration." + }, + "migrate_database": { + "title": "Upgrade your emoncms version", + "description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})" + } + } +} diff --git a/homeassistant/components/emoncms_history/__init__.py b/homeassistant/components/emoncms_history/__init__.py index 7de3a4f2ef8..00af1fec6c6 100644 --- a/homeassistant/components/emoncms_history/__init__.py +++ b/homeassistant/components/emoncms_history/__init__.py @@ -86,15 +86,13 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: continue if payload_dict: - payload = "{{{}}}".format( - ",".join(f"{key}:{val}" for key, val in payload_dict.items()) - ) + payload = ",".join(f"{key}:{val}" for key, val in payload_dict.items()) send_data( conf.get(CONF_URL), conf.get(CONF_API_KEY), str(conf.get(CONF_INPUTNODE)), - payload, + f"{{{payload}}}", ) track_point_in_time( diff --git a/homeassistant/components/emoncms_history/manifest.json b/homeassistant/components/emoncms_history/manifest.json index faa91e64017..e73f76f7528 100644 --- a/homeassistant/components/emoncms_history/manifest.json +++ b/homeassistant/components/emoncms_history/manifest.json @@ -3,5 +3,6 @@ "name": "Emoncms History", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/emoncms_history", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/emonitor/__init__.py b/homeassistant/components/emonitor/__init__.py index 7506edae1d3..4316487352b 100644 --- a/homeassistant/components/emonitor/__init__.py +++ b/homeassistant/components/emonitor/__init__.py @@ -31,6 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonitorConfigEntry) -> coordinator = DataUpdateCoordinator[EmonitorStatus]( hass, _LOGGER, + config_entry=entry, name=entry.title, update_method=emonitor.async_get_status, update_interval=timedelta(seconds=DEFAULT_UPDATE_RATE), diff --git a/homeassistant/components/emonitor/config_flow.py b/homeassistant/components/emonitor/config_flow.py index b90b1477f87..833b80f9d47 100644 --- a/homeassistant/components/emonitor/config_flow.py +++ b/homeassistant/components/emonitor/config_flow.py @@ -34,10 +34,11 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + discovered_info: dict[str, str] + def __init__(self) -> None: """Initialize Emonitor ConfigFlow.""" self.discovered_ip: str | None = None - self.discovered_info: dict[str, str] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -87,8 +88,11 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Attempt to confirm.""" + assert self.discovered_ip is not None if user_input is not None: return self.async_create_entry( title=self.discovered_info["title"], diff --git a/homeassistant/components/emulated_hue/hue_api.py b/homeassistant/components/emulated_hue/hue_api.py index 8194d31823d..e13112f20bb 100644 --- a/homeassistant/components/emulated_hue/hue_api.py +++ b/homeassistant/components/emulated_hue/hue_api.py @@ -39,7 +39,7 @@ from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.components.humidifier import ATTR_HUMIDITY, SERVICE_SET_HUMIDITY from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -67,6 +67,7 @@ from homeassistant.const import ( ) from homeassistant.core import Event, EventStateChangedData, State from homeassistant.helpers.event import async_track_state_change_event +from homeassistant.util import color as color_util from homeassistant.util.json import json_loads from homeassistant.util.network import is_local @@ -500,7 +501,11 @@ class HueOneLightChangeView(HomeAssistantView): light.color_temp_supported(color_modes) and parsed[STATE_COLOR_TEMP] is not None ): - data[ATTR_COLOR_TEMP] = parsed[STATE_COLOR_TEMP] + data[ATTR_COLOR_TEMP_KELVIN] = ( + color_util.color_temperature_mired_to_kelvin( + parsed[STATE_COLOR_TEMP] + ) + ) if ( entity_features & LightEntityFeature.TRANSITION @@ -702,7 +707,12 @@ def _build_entity_state_dict(entity: State) -> dict[str, Any]: else: data[STATE_HUE] = HUE_API_STATE_HUE_MIN data[STATE_SATURATION] = HUE_API_STATE_SAT_MIN - data[STATE_COLOR_TEMP] = attributes.get(ATTR_COLOR_TEMP) or 0 + kelvin = attributes.get(ATTR_COLOR_TEMP_KELVIN) + data[STATE_COLOR_TEMP] = ( + color_util.color_temperature_kelvin_to_mired(kelvin) + if kelvin is not None + else 0 + ) else: data[STATE_BRIGHTNESS] = 0 diff --git a/homeassistant/components/emulated_kasa/manifest.json b/homeassistant/components/emulated_kasa/manifest.json index 640a2113d6f..da3912a9d25 100644 --- a/homeassistant/components/emulated_kasa/manifest.json +++ b/homeassistant/components/emulated_kasa/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_push", "loggers": ["sense_energy"], "quality_scale": "internal", - "requirements": ["sense-energy==0.12.4"] + "requirements": ["sense-energy==0.13.4"] } diff --git a/homeassistant/components/emulated_roku/config_flow.py b/homeassistant/components/emulated_roku/config_flow.py index 0e5cc1ba55a..725987418da 100644 --- a/homeassistant/components/emulated_roku/config_flow.py +++ b/homeassistant/components/emulated_roku/config_flow.py @@ -6,13 +6,13 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from .const import CONF_LISTEN_PORT, DEFAULT_NAME, DEFAULT_PORT, DOMAIN @callback -def configured_servers(hass): +def configured_servers(hass: HomeAssistant) -> set[str]: """Return a set of the configured servers.""" return { entry.data[CONF_NAME] for entry in hass.config_entries.async_entries(DOMAIN) @@ -56,6 +56,6 @@ class EmulatedRokuFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow import.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/energy/data.py b/homeassistant/components/energy/data.py index 9c5a9fbacd1..ff86177cf41 100644 --- a/homeassistant/components/energy/data.py +++ b/homeassistant/components/energy/data.py @@ -331,7 +331,7 @@ class EnergyManager: "device_consumption", ): if key in update: - data[key] = update[key] # type: ignore[literal-required] + data[key] = update[key] self.data = data self._store.async_delay_save(lambda: data, 60) diff --git a/homeassistant/components/energy/strings.json b/homeassistant/components/energy/strings.json index 4a9c1b4aacf..e9d72247319 100644 --- a/homeassistant/components/energy/strings.json +++ b/homeassistant/components/energy/strings.json @@ -56,6 +56,10 @@ "entity_state_class_measurement_no_last_reset": { "title": "Last reset missing", "description": "The following entities have state class 'measurement' but 'last_reset' is missing:" + }, + "statistics_not_defined": { + "title": "Statistics not defined", + "description": "Some entities currently have no statistics metadata. If these are newly created, it may take up to 5 minutes for this to be generated for the following entities:" } } } diff --git a/homeassistant/components/energyzero/__init__.py b/homeassistant/components/energyzero/__init__.py index 3e1bb830cce..fc2855374dd 100644 --- a/homeassistant/components/energyzero/__init__.py +++ b/homeassistant/components/energyzero/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -10,10 +9,10 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .coordinator import EnergyZeroDataUpdateCoordinator +from .coordinator import EnergyZeroConfigEntry, EnergyZeroDataUpdateCoordinator from .services import async_setup_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -25,25 +24,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Set up EnergyZero from a config entry.""" - coordinator = EnergyZeroDataUpdateCoordinator(hass) + coordinator = EnergyZeroDataUpdateCoordinator(hass, entry) try: await coordinator.async_config_entry_first_refresh() except ConfigEntryNotReady: await coordinator.energyzero.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Unload EnergyZero config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/energyzero/coordinator.py b/homeassistant/components/energyzero/coordinator.py index 65955b2ebe6..35054f7b3b7 100644 --- a/homeassistant/components/energyzero/coordinator.py +++ b/homeassistant/components/energyzero/coordinator.py @@ -21,6 +21,8 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN, LOGGER, SCAN_INTERVAL, THRESHOLD_HOUR +type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] + class EnergyZeroData(NamedTuple): """Class for defining data in dict.""" @@ -35,13 +37,14 @@ class EnergyZeroDataUpdateCoordinator(DataUpdateCoordinator[EnergyZeroData]): config_entry: ConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> None: """Initialize global EnergyZero data updater.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL, + config_entry=entry, ) self.energyzero = EnergyZero(session=async_get_clientsession(hass)) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index 35d20fee929..0a45d87fee5 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -5,12 +5,9 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import EnergyZeroDataUpdateCoordinator -from .const import DOMAIN -from .coordinator import EnergyZeroData +from .coordinator import EnergyZeroConfigEntry, EnergyZeroData def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: @@ -32,30 +29,31 @@ def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: EnergyZeroConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator_data = entry.runtime_data.data + energy_today = coordinator_data.energy_today return { "entry": { "title": entry.title, }, "energy": { - "current_hour_price": coordinator.data.energy_today.current_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": energy_today.current_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1) ), - "average_price": coordinator.data.energy_today.average_price, - "max_price": coordinator.data.energy_today.extreme_prices[1], - "min_price": coordinator.data.energy_today.extreme_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_price, - "hours_priced_equal_or_lower": coordinator.data.energy_today.hours_priced_equal_or_lower, + "average_price": energy_today.average_price, + "max_price": energy_today.extreme_prices[1], + "min_price": energy_today.extreme_prices[0], + "highest_price_time": energy_today.highest_price_time, + "lowest_price_time": energy_today.lowest_price_time, + "percentage_of_max": energy_today.pct_of_max_price, + "hours_priced_equal_or_lower": energy_today.hours_priced_equal_or_lower, }, "gas": { - "current_hour_price": get_gas_price(coordinator.data, 0), - "next_hour_price": get_gas_price(coordinator.data, 1), + "current_hour_price": get_gas_price(coordinator_data, 0), + "next_hour_price": get_gas_price(coordinator_data, 1), }, } diff --git a/homeassistant/components/energyzero/icons.json b/homeassistant/components/energyzero/icons.json index bac061dd318..802f8ef6916 100644 --- a/homeassistant/components/energyzero/icons.json +++ b/homeassistant/components/energyzero/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "get_gas_prices": "mdi:gas-station", - "get_energy_prices": "mdi:lightning-bolt" + "get_gas_prices": { + "service": "mdi:gas-station" + }, + "get_energy_prices": { + "service": "mdi:lightning-bolt" + } } } diff --git a/homeassistant/components/energyzero/manifest.json b/homeassistant/components/energyzero/manifest.json index 807a0419967..bb867e88d85 100644 --- a/homeassistant/components/energyzero/manifest.json +++ b/homeassistant/components/energyzero/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/energyzero", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["energyzero==2.1.1"] } diff --git a/homeassistant/components/energyzero/sensor.py b/homeassistant/components/energyzero/sensor.py index f65f7bd559c..141ac793fba 100644 --- a/homeassistant/components/energyzero/sensor.py +++ b/homeassistant/components/energyzero/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, PERCENTAGE, @@ -27,7 +26,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES -from .coordinator import EnergyZeroData, EnergyZeroDataUpdateCoordinator +from .coordinator import ( + EnergyZeroConfigEntry, + EnergyZeroData, + EnergyZeroDataUpdateCoordinator, +) @dataclass(frozen=True, kw_only=True) @@ -142,10 +145,12 @@ def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: EnergyZeroConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up EnergyZero Sensors based on a config entry.""" - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( EnergyZeroSensorEntity( coordinator=coordinator, diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index d98699c5c08..c47958b670f 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -10,7 +10,7 @@ from typing import Final from energyzero import Electricity, Gas, VatOption import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -23,7 +23,7 @@ from homeassistant.helpers import selector from homeassistant.util import dt as dt_util from .const import DOMAIN -from .coordinator import EnergyZeroDataUpdateCoordinator +from .coordinator import EnergyZeroConfigEntry, EnergyZeroDataUpdateCoordinator ATTR_CONFIG_ENTRY: Final = "config_entry" ATTR_START: Final = "start" @@ -83,12 +83,12 @@ def __serialize_prices(prices: Electricity | Gas) -> ServiceResponse: } -def __get_coordinator( - hass: HomeAssistant, call: ServiceCall -) -> EnergyZeroDataUpdateCoordinator: +def __get_coordinator(call: ServiceCall) -> EnergyZeroDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EnergyZeroConfigEntry | None = call.hass.config_entries.async_get_entry( + entry_id + ) if not entry: raise ServiceValidationError( @@ -107,17 +107,15 @@ def __get_coordinator( }, ) - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry_id] - return coordinator + return entry.runtime_data async def __get_prices( call: ServiceCall, *, - hass: HomeAssistant, price_type: PriceType, ) -> ServiceResponse: - coordinator = __get_coordinator(hass, call) + coordinator = __get_coordinator(call) start = __get_date(call.data.get(ATTR_START)) end = __get_date(call.data.get(ATTR_END)) @@ -152,14 +150,14 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.GAS), + partial(__get_prices, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY), + partial(__get_prices, price_type=PriceType.ENERGY), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index 71c5830d550..b0649a8368d 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -22,10 +22,9 @@ from homeassistant.const import ( CONF_USERNAME, CONF_VERIFY_SSL, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, callback +from homeassistant.core import callback from homeassistant.helpers import selector from homeassistant.helpers.aiohttp_client import async_create_clientsession -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.schema_config_entry_flow import ( SchemaCommonFlowHandler, SchemaFlowFormStep, @@ -134,7 +133,8 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): except Exception: # noqa: BLE001 errors = {"base": "unknown"} else: - await self.async_set_unique_id(about["info"]["ifaces"][0]["mac"]) + unique_id = about["info"]["ifaces"][0]["mac"] or self.unique_id + await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() return errors @@ -152,54 +152,6 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) return self.async_create_entry(data=user_input, title=user_input[CONF_HOST]) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Handle the import step.""" - if CONF_PORT not in user_input: - user_input[CONF_PORT] = DEFAULT_PORT - if CONF_SSL not in user_input: - user_input[CONF_SSL] = DEFAULT_SSL - user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL - - data = {key: user_input[key] for key in user_input if key in self.DATA_KEYS} - options = { - key: user_input[key] for key in user_input if key in self.OPTIONS_KEYS - } - - if errors := await self.validate_user_input(user_input): - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_{DOMAIN}_import_issue_{errors["base"]}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{errors["base"]}", - translation_placeholders={ - "url": "/config/integrations/dashboard/add?domain=enigma2" - }, - ) - return self.async_abort(reason=errors["base"]) - - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Enigma2", - }, - ) - return self.async_create_entry( - data=data, title=data[CONF_HOST], options=options - ) - @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> SchemaOptionsFlowHandler: diff --git a/homeassistant/components/enigma2/coordinator.py b/homeassistant/components/enigma2/coordinator.py index a35e74f582f..d5bbf2c0ce5 100644 --- a/homeassistant/components/enigma2/coordinator.py +++ b/homeassistant/components/enigma2/coordinator.py @@ -35,6 +35,7 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): """The Enigma2 data update coordinator.""" device: OpenWebIfDevice + unique_id: str | None def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the Enigma2 data update coordinator.""" @@ -64,6 +65,10 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): name=config_entry.data[CONF_HOST], ) + # set the unique ID for the entities to the config entry unique ID + # for devices that don't report a MAC address + self.unique_id = config_entry.unique_id + async def _async_setup(self) -> None: """Provide needed data to the device info.""" @@ -71,16 +76,20 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): self.device.mac_address = about["info"]["ifaces"][0]["mac"] self.device_info["model"] = about["info"]["model"] self.device_info["manufacturer"] = about["info"]["brand"] - self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(iface["mac"])) - for iface in about["info"]["ifaces"] - if "mac" in iface and iface["mac"] is not None - } - self.device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) - for iface in about["info"]["ifaces"] - if "mac" in iface and iface["mac"] is not None - } + if self.device.mac_address is not None: + self.device_info[ATTR_IDENTIFIERS] = { + (DOMAIN, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None + } + self.device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None + } + self.unique_id = self.device.mac_address + elif self.unique_id is not None: + self.device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self.unique_id)} async def _async_update_data(self) -> OpenWebIfStatus: await self.device.update() diff --git a/homeassistant/components/enigma2/manifest.json b/homeassistant/components/enigma2/manifest.json index 1a0875b04c0..7d6887ad14c 100644 --- a/homeassistant/components/enigma2/manifest.json +++ b/homeassistant/components/enigma2/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["openwebif"], - "requirements": ["openwebifpy==4.2.7"] + "requirements": ["openwebifpy==4.3.0"] } diff --git a/homeassistant/components/enigma2/media_player.py b/homeassistant/components/enigma2/media_player.py index 927e35706ed..ee0de15c3fb 100644 --- a/homeassistant/components/enigma2/media_player.py +++ b/homeassistant/components/enigma2/media_player.py @@ -4,51 +4,21 @@ from __future__ import annotations import contextlib from logging import getLogger -from typing import cast from aiohttp.client_exceptions import ServerDisconnectedError from openwebif.enums import PowerState, RemoteControlCodes, SetVolumeOption -import voluptuous as vol from homeassistant.components.media_player import ( - PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, - CONF_SSL, - CONF_USERNAME, -) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import Enigma2ConfigEntry -from .const import ( - CONF_DEEP_STANDBY, - CONF_MAC_ADDRESS, - CONF_SOURCE_BOUQUET, - CONF_USE_CHANNEL_ICON, - DEFAULT_DEEP_STANDBY, - DEFAULT_MAC_ADDRESS, - DEFAULT_NAME, - DEFAULT_PASSWORD, - DEFAULT_PORT, - DEFAULT_SOURCE_BOUQUET, - DEFAULT_SSL, - DEFAULT_USE_CHANNEL_ICON, - DEFAULT_USERNAME, - DOMAIN, -) from .coordinator import Enigma2UpdateCoordinator ATTR_MEDIA_CURRENTLY_RECORDING = "media_currently_recording" @@ -58,49 +28,6 @@ ATTR_MEDIA_START_TIME = "media_start_time" _LOGGER = getLogger(__name__) -PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string, - vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string, - vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, - vol.Optional( - CONF_USE_CHANNEL_ICON, default=DEFAULT_USE_CHANNEL_ICON - ): cv.boolean, - vol.Optional(CONF_DEEP_STANDBY, default=DEFAULT_DEEP_STANDBY): cv.boolean, - vol.Optional(CONF_MAC_ADDRESS, default=DEFAULT_MAC_ADDRESS): cv.string, - vol.Optional(CONF_SOURCE_BOUQUET, default=DEFAULT_SOURCE_BOUQUET): cv.string, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up of an enigma2 media player.""" - - entry_data = { - CONF_HOST: config[CONF_HOST], - CONF_PORT: config[CONF_PORT], - CONF_USERNAME: config[CONF_USERNAME], - CONF_PASSWORD: config[CONF_PASSWORD], - CONF_SSL: config[CONF_SSL], - CONF_USE_CHANNEL_ICON: config[CONF_USE_CHANNEL_ICON], - CONF_DEEP_STANDBY: config[CONF_DEEP_STANDBY], - CONF_SOURCE_BOUQUET: config[CONF_SOURCE_BOUQUET], - } - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=entry_data - ) - ) - async def async_setup_entry( hass: HomeAssistant, @@ -136,10 +63,7 @@ class Enigma2Device(CoordinatorEntity[Enigma2UpdateCoordinator], MediaPlayerEnti super().__init__(coordinator) - self._attr_unique_id = ( - coordinator.device.mac_address - or cast(ConfigEntry, coordinator.config_entry).entry_id - ) + self._attr_unique_id = coordinator.unique_id self._attr_device_info = coordinator.device_info diff --git a/homeassistant/components/enigma2/strings.json b/homeassistant/components/enigma2/strings.json index f74806b60a2..7a75136bdc2 100644 --- a/homeassistant/components/enigma2/strings.json +++ b/homeassistant/components/enigma2/strings.json @@ -39,19 +39,5 @@ } } } - }, - "issues": { - "deprecated_yaml_import_issue_unknown": { - "title": "The Enigma2 YAML configuration import failed", - "description": "Configuring Enigma2 using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure connection to the device works, the authentication details are correct and restart Home Assistant to try again or remove the Enigma2 YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_auth": { - "title": "The Enigma2 YAML configuration import failed", - "description": "Configuring Enigma2 using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the authentication details are correct and restart Home Assistant to try again or remove the Enigma2 YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Enigma2 YAML configuration import failed", - "description": "Configuring Enigma2 using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure connection to the device works and restart Home Assistant to try again or remove the Enigma2 YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - } } } diff --git a/homeassistant/components/enocean/binary_sensor.py b/homeassistant/components/enocean/binary_sensor.py index 3ecf1ba4ba2..01e39f96510 100644 --- a/homeassistant/components/enocean/binary_sensor.py +++ b/homeassistant/components/enocean/binary_sensor.py @@ -17,7 +17,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .device import EnOceanEntity +from .entity import EnOceanEntity DEFAULT_NAME = "EnOcean binary sensor" DEPENDENCIES = ["enocean"] diff --git a/homeassistant/components/enocean/config_flow.py b/homeassistant/components/enocean/config_flow.py index b68026a34ba..2452d27b168 100644 --- a/homeassistant/components/enocean/config_flow.py +++ b/homeassistant/components/enocean/config_flow.py @@ -22,33 +22,32 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): self.dongle_path = None self.discovery_info = None - async def async_step_import(self, data=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a yaml configuration.""" - if not await self.validate_enocean_conf(data): + if not await self.validate_enocean_conf(import_data): LOGGER.warning( "Cannot import yaml configuration: %s is not a valid dongle path", - data[CONF_DEVICE], + import_data[CONF_DEVICE], ) return self.async_abort(reason="invalid_dongle_path") - return self.create_enocean_entry(data) + return self.create_enocean_entry(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle an EnOcean config flow start.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return await self.async_step_detect() - async def async_step_detect(self, user_input=None): + async def async_step_detect( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Propose a list of detected dongles.""" errors = {} if user_input is not None: if user_input[CONF_DEVICE] == self.MANUAL_PATH_VALUE: - return await self.async_step_manual(None) + return await self.async_step_manual() if await self.validate_enocean_conf(user_input): return self.create_enocean_entry(user_input) errors = {CONF_DEVICE: ERROR_INVALID_DONGLE_PATH} @@ -64,7 +63,9 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_manual(self, user_input=None): + async def async_step_manual( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Request manual USB dongle path.""" default_value = None errors = {} diff --git a/homeassistant/components/enocean/device.py b/homeassistant/components/enocean/entity.py similarity index 100% rename from homeassistant/components/enocean/device.py rename to homeassistant/components/enocean/entity.py diff --git a/homeassistant/components/enocean/light.py b/homeassistant/components/enocean/light.py index 1e81e3cd089..aae84e73848 100644 --- a/homeassistant/components/enocean/light.py +++ b/homeassistant/components/enocean/light.py @@ -20,7 +20,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .device import EnOceanEntity +from .entity import EnOceanEntity CONF_SENDER_ID = "sender_id" diff --git a/homeassistant/components/enocean/manifest.json b/homeassistant/components/enocean/manifest.json index 495ab6618e3..2faba47e126 100644 --- a/homeassistant/components/enocean/manifest.json +++ b/homeassistant/components/enocean/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/enocean", "iot_class": "local_push", "loggers": ["enocean"], - "requirements": ["enocean==0.50"] + "requirements": ["enocean==0.50"], + "single_config_entry": true } diff --git a/homeassistant/components/enocean/sensor.py b/homeassistant/components/enocean/sensor.py index 177c95c2832..98e32ce1a4f 100644 --- a/homeassistant/components/enocean/sensor.py +++ b/homeassistant/components/enocean/sensor.py @@ -30,7 +30,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .device import EnOceanEntity +from .entity import EnOceanEntity CONF_MAX_TEMP = "max_temp" CONF_MIN_TEMP = "min_temp" diff --git a/homeassistant/components/enocean/strings.json b/homeassistant/components/enocean/strings.json index 97da526185f..9d9699481b1 100644 --- a/homeassistant/components/enocean/strings.json +++ b/homeassistant/components/enocean/strings.json @@ -18,8 +18,7 @@ "invalid_dongle_path": "No valid dongle found for this path" }, "abort": { - "invalid_dongle_path": "Invalid dongle path", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "invalid_dongle_path": "Invalid dongle path" } } } diff --git a/homeassistant/components/enocean/switch.py b/homeassistant/components/enocean/switch.py index 9bf8b8e775c..0259a60982f 100644 --- a/homeassistant/components/enocean/switch.py +++ b/homeassistant/components/enocean/switch.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN, LOGGER -from .device import EnOceanEntity +from .entity import EnOceanEntity CONF_CHANNEL = "channel" DEFAULT_NAME = "EnOcean Switch" diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index f6438230789..db36cab1288 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -2,15 +2,22 @@ from __future__ import annotations +import httpx from pyenphase import Envoy +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.httpx_client import get_async_client -from .const import DOMAIN, PLATFORMS +from .const import ( + DOMAIN, + OPTION_DISABLE_KEEP_ALIVE, + OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, + PLATFORMS, +) from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator @@ -18,7 +25,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b """Set up Enphase Envoy from a config entry.""" host = entry.data[CONF_HOST] - envoy = Envoy(host, get_async_client(hass, verify_ssl=False)) + options = entry.options + envoy = ( + Envoy( + host, + httpx.AsyncClient( + verify=False, limits=httpx.Limits(max_keepalive_connections=0) + ), + ) + if options.get( + OPTION_DISABLE_KEEP_ALIVE, OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE + ) + else Envoy(host, get_async_client(hass, verify_ssl=False)) + ) coordinator = EnphaseUpdateCoordinator(hass, envoy, entry) await coordinator.async_config_entry_first_refresh() @@ -40,9 +59,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + # Reload entry when it is updated. + entry.async_on_unload(entry.add_update_listener(async_reload_entry)) + return True +async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Reload the config entry when it changed.""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> bool: """Unload a config entry.""" coordinator: EnphaseUpdateCoordinator = entry.runtime_data @@ -60,8 +87,16 @@ async def async_remove_config_entry_device( envoy_serial_num = config_entry.unique_id if envoy_serial_num in dev_ids: return False - if envoy_data and envoy_data.inverters: - for inverter in envoy_data.inverters: - if str(inverter) in dev_ids: + if envoy_data: + if envoy_data.inverters: + for inverter in envoy_data.inverters: + if str(inverter) in dev_ids: + return False + if envoy_data.encharge_inventory: + for encharge in envoy_data.encharge_inventory: + if str(encharge) in dev_ids: + return False + if envoy_data.enpower: + if str(envoy_data.enpower.serial_number) in dev_ids: return False return True diff --git a/homeassistant/components/enphase_envoy/binary_sensor.py b/homeassistant/components/enphase_envoy/binary_sensor.py index 6be29d19ecb..1ad6f259de1 100644 --- a/homeassistant/components/enphase_envoy/binary_sensor.py +++ b/homeassistant/components/enphase_envoy/binary_sensor.py @@ -22,6 +22,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class EnvoyEnchargeBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index c18401859de..70ba3570e91 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -4,8 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from types import MappingProxyType -from typing import Any +from typing import TYPE_CHECKING, Any from awesomeversion import AwesomeVersion from pyenphase import AUTH_TOKEN_MIN_VERSION, Envoy, EnvoyError @@ -13,10 +12,11 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -28,7 +28,10 @@ from .const import ( INVALID_AUTH_ERRORS, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, + OPTION_DISABLE_KEEP_ALIVE, + OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, ) +from .coordinator import EnphaseConfigEntry _LOGGER = logging.getLogger(__name__) @@ -54,18 +57,21 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _reauth_entry: ConfigEntry + def __init__(self) -> None: """Initialize an envoy flow.""" self.ip_address: str | None = None self.username = None self.protovers: str | None = None - self._reauth_entry: ConfigEntry | None = None @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> EnvoyOptionsFlowHandler: + def async_get_options_flow( + config_entry: EnphaseConfigEntry, + ) -> EnvoyOptionsFlowHandler: """Options flow handler for Enphase_Envoy.""" - return EnvoyOptionsFlowHandler(config_entry) + return EnvoyOptionsFlowHandler() @callback def _async_generate_schema(self) -> vol.Schema: @@ -76,7 +82,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): schema[vol.Required(CONF_HOST, default=self.ip_address)] = vol.In( [self.ip_address] ) - elif not self._reauth_entry: + elif self.source != SOURCE_REAUTH: schema[vol.Required(CONF_HOST)] = str default_username = "" @@ -149,10 +155,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert self._reauth_entry is not None + self._reauth_entry = self._get_reauth_entry() if unique_id := self._reauth_entry.unique_id: await self.async_set_unique_id(unique_id, raise_on_progress=False) return await self.async_step_user() @@ -168,7 +171,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} description_placeholders: dict[str, str] = {} - if self._reauth_entry: + if self.source == SOURCE_REAUTH: host = self._reauth_entry.data[CONF_HOST] else: host = (user_input or {}).get(CONF_HOST) or self.ip_address or "" @@ -193,7 +196,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): else: name = self._async_envoy_name() - if self._reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( self._reauth_entry, data=self._reauth_entry.data | user_input, @@ -236,21 +239,14 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Add reconfigure step to allow to manually reconfigure a config entry.""" + reconfigure_entry = self._get_reconfigure_entry() errors: dict[str, str] = {} description_placeholders: dict[str, str] = {} - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - - suggested_values: dict[str, Any] | MappingProxyType[str, Any] = ( - user_input or entry.data - ) - - host: Any = suggested_values.get(CONF_HOST) - username: Any = suggested_values.get(CONF_USERNAME) - password: Any = suggested_values.get(CONF_PASSWORD) - if user_input is not None: + host: str = user_input[CONF_HOST] + username: str = user_input[CONF_USERNAME] + password: str = user_input[CONF_PASSWORD] try: envoy = await validate_input( self.hass, @@ -268,29 +264,23 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self.unique_id != envoy.serial_number: - errors["base"] = "unexpected_envoy" - description_placeholders = { - "reason": f"target: {self.unique_id}, actual: {envoy.serial_number}" - } - else: - # If envoy exists in configuration update fields and exit - self._abort_if_unique_id_configured( - { - CONF_HOST: host, - CONF_USERNAME: username, - CONF_PASSWORD: password, - }, - error="reconfigure_successful", - ) - if not self.unique_id: - await self.async_set_unique_id(entry.unique_id) + await self.async_set_unique_id(envoy.serial_number) + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={ + CONF_HOST: host, + CONF_USERNAME: username, + CONF_PASSWORD: password, + }, + ) self.context["title_placeholders"] = { - CONF_SERIAL: self.unique_id, - CONF_HOST: host, + CONF_SERIAL: reconfigure_entry.unique_id or "-", + CONF_HOST: reconfigure_entry.data[CONF_HOST], } + suggested_values: Mapping[str, Any] = user_input or reconfigure_entry.data return self.async_show_form( step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( @@ -301,7 +291,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): ) -class EnvoyOptionsFlowHandler(OptionsFlowWithConfigEntry): +class EnvoyOptionsFlowHandler(OptionsFlow): """Envoy config flow options handler.""" async def async_step_init( @@ -311,6 +301,9 @@ class EnvoyOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: return self.async_create_entry(title="", data=user_input) + if TYPE_CHECKING: + assert self.config_entry.unique_id is not None + return self.async_show_form( step_id="init", data_schema=vol.Schema( @@ -322,10 +315,17 @@ class EnvoyOptionsFlowHandler(OptionsFlowWithConfigEntry): OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, ), ): bool, + vol.Required( + OPTION_DISABLE_KEEP_ALIVE, + default=self.config_entry.options.get( + OPTION_DISABLE_KEEP_ALIVE, + OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, + ), + ): bool, } ), description_placeholders={ CONF_SERIAL: self.config_entry.unique_id, - CONF_HOST: self.config_entry.data.get("host"), + CONF_HOST: self.config_entry.data[CONF_HOST], }, ) diff --git a/homeassistant/components/enphase_envoy/const.py b/homeassistant/components/enphase_envoy/const.py index 80ce8604f24..465b2f9d587 100644 --- a/homeassistant/components/enphase_envoy/const.py +++ b/homeassistant/components/enphase_envoy/const.py @@ -18,3 +18,6 @@ INVALID_AUTH_ERRORS = (EnvoyAuthenticationError, EnvoyAuthenticationRequired) OPTION_DIAGNOSTICS_INCLUDE_FIXTURES = "diagnostics_include_fixtures" OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE = False + +OPTION_DISABLE_KEEP_ALIVE = "disable_keep_alive" +OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE = False diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index e91e245658c..00bc7666f78 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -24,6 +24,7 @@ SCAN_INTERVAL = timedelta(seconds=60) TOKEN_REFRESH_CHECK_INTERVAL = timedelta(days=1) STALE_TOKEN_THRESHOLD = timedelta(days=30).total_seconds() +NOTIFICATION_ID = "enphase_envoy_notification" _LOGGER = logging.getLogger(__name__) @@ -35,6 +36,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """DataUpdateCoordinator to gather data from any envoy.""" envoy_serial_number: str + envoy_firmware: str def __init__( self, hass: HomeAssistant, envoy: Envoy, entry: EnphaseConfigEntry @@ -46,6 +48,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): self.username = entry_data[CONF_USERNAME] self.password = entry_data[CONF_PASSWORD] self._setup_complete = False + self.envoy_firmware = "" self._cancel_token_refresh: CALLBACK_TYPE | None = None super().__init__( hass, @@ -158,6 +161,24 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): raise ConfigEntryAuthFailed from err except EnvoyError as err: raise UpdateFailed(f"Error communicating with API: {err}") from err + + # if we have a firmware version from previous setup, compare to current one + # when envoy gets new firmware there will be an authentication failure + # which results in getting fw version again, if so reload the integration. + if (current_firmware := self.envoy_firmware) and current_firmware != ( + new_firmware := envoy.firmware + ): + _LOGGER.warning( + "Envoy firmware changed from: %s to: %s, reloading enphase envoy integration", + current_firmware, + new_firmware, + ) + # reload the integration to get all established again + self.hass.async_create_task( + self.hass.config_entries.async_reload(self.entry.entry_id) + ) + # remember firmware version for next time + self.envoy_firmware = envoy.firmware _LOGGER.debug("Envoy data: %s", envoy_data) return envoy_data.raw diff --git a/homeassistant/components/enphase_envoy/diagnostics.py b/homeassistant/components/enphase_envoy/diagnostics.py index b3323687e7c..d5b3880cf24 100644 --- a/homeassistant/components/enphase_envoy/diagnostics.py +++ b/homeassistant/components/enphase_envoy/diagnostics.py @@ -104,8 +104,12 @@ async def async_get_config_entry_diagnostics( if state := hass.states.get(entity.entity_id): state_dict = dict(state.as_dict()) state_dict.pop("context", None) - entities.append({"entity": asdict(entity), "state": state_dict}) - device_entities.append({"device": asdict(device), "entities": entities}) + entity_dict = asdict(entity) + entity_dict.pop("_cache", None) + entities.append({"entity": entity_dict, "state": state_dict}) + device_dict = asdict(device) + device_dict.pop("_cache", None) + device_entities.append({"device": device_dict, "entities": entities}) # remove envoy serial old_serial = coordinator.envoy_serial_number diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index aa06a1ff79f..bdc90e6c634 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.22.0"], + "requirements": ["pyenphase==1.23.0"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/homeassistant/components/enphase_envoy/number.py b/homeassistant/components/enphase_envoy/number.py index 2c0708d9215..a62913a4c0b 100644 --- a/homeassistant/components/enphase_envoy/number.py +++ b/homeassistant/components/enphase_envoy/number.py @@ -25,6 +25,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyRelayNumberEntityDescription(NumberEntityDescription): @@ -88,7 +90,6 @@ async def async_setup_entry( envoy_data.tariff and envoy_data.tariff.storage_settings and coordinator.envoy.supported_features & SupportedFeatures.ENCHARGE - and coordinator.envoy.supported_features & SupportedFeatures.ENPOWER ): entities.append( EnvoyStorageSettingsNumberEntity(coordinator, STORAGE_RESERVE_SOC_ENTITY) @@ -152,18 +153,30 @@ class EnvoyStorageSettingsNumberEntity(EnvoyBaseEntity, NumberEntity): """Initialize the Enphase relay number entity.""" super().__init__(coordinator, description) self.envoy = coordinator.envoy - assert self.data.enpower is not None - enpower = self.data.enpower - self._serial_number = enpower.serial_number - self._attr_unique_id = f"{self._serial_number}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._serial_number)}, - manufacturer="Enphase", - model="Enpower", - name=f"Enpower {self._serial_number}", - sw_version=str(enpower.firmware_version), - via_device=(DOMAIN, self.envoy_serial_num), - ) + assert self.data is not None + if enpower := self.data.enpower: + self._serial_number = enpower.serial_number + self._attr_unique_id = f"{self._serial_number}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._serial_number)}, + manufacturer="Enphase", + model="Enpower", + name=f"Enpower {self._serial_number}", + sw_version=str(enpower.firmware_version), + via_device=(DOMAIN, self.envoy_serial_num), + ) + else: + # If no enpower device assign numbers to Envoy itself + self._attr_unique_id = f"{self.envoy_serial_num}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.envoy_serial_num)}, + manufacturer="Enphase", + model=coordinator.envoy.envoy_model, + name=coordinator.name, + sw_version=str(coordinator.envoy.firmware), + hw_version=coordinator.envoy.part_number, + serial_number=self.envoy_serial_num, + ) @property def native_value(self) -> float: diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml new file mode 100644 index 00000000000..c4077b8df67 --- /dev/null +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -0,0 +1,124 @@ +rules: + # Bronze + action-setup: + status: done + comment: only actions implemented are platform native ones. + appropriate-polling: + status: done + comment: fixed 1 minute cycle based on Enphase Envoy device characteristics + brands: done + common-modules: + status: done + comment: | + In coordinator.py, you set self.entry = entry, while after the super constructor, + you can access the entry via self.config_entry (you would have to overwrite the + type to make sure you don't have to assert not None every time)done + config-flow-test-coverage: + status: todo + comment: | + - test_form is missing an assertion for the unique id of the resulting entry + - Let's also have test_user_no_serial_number assert the unique_id (as in, it can't be set to the serial_number since we dont have one, so let's assert what it will result in) + - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) + - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think + - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: + I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? + test_reauth -> Let's also assert result before we start with the async_configure part + config-flow: + status: todo + comment: | + - async_step_zeroconf -> a config entry title is considered userland, + so if someone renamed their entry, it will be reverted back with the code at L146. + - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow + - The config flow is missing data descriptions for the other fields + dependency-transparency: done + docs-actions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy/#actions + docs-high-level-description: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy + docs-installation-instructions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#prerequisites + docs-removal-instructions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#removing-the-integration + entity-event-setup: + status: done + comment: no events used. + entity-unique-id: done + has-entity-name: done + runtime-data: + status: done + comment: | + async_unload_entry- coordinator: EnphaseUpdateCoordinator = entry.runtime_data + You can remove the EnphaseUpdateCoordinator as the type can now be inferred thanks to the typed config entry + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + needs to raise appropriate error when exception occurs. + Pending https://github.com/pyenphase/pyenphase/pull/194 + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#configuration + docs-installation-parameters: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#required-manual-input + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: pending https://github.com/home-assistant/core/pull/132373 + reauthentication-flow: done + test-coverage: + status: todo + comment: | + - test_config_different_unique_id -> unique_id set to the mock config entry is an int, not a str + - Apart from the coverage, test_option_change_reload does not verify that the config entry is reloaded + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#data-updates + docs-examples: + status: todo + comment: add blue-print examples, if any + docs-known-limitations: todo + docs-supported-devices: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#supported-devices + docs-supported-functions: todo + docs-troubleshooting: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#troubleshooting + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: todo + comment: pending https://github.com/home-assistant/core/pull/132483 + icon-translations: todo + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no general issues or repair.py + stale-devices: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/enphase_envoy/select.py b/homeassistant/components/enphase_envoy/select.py index 78ebaa26d13..d9729a16683 100644 --- a/homeassistant/components/enphase_envoy/select.py +++ b/homeassistant/components/enphase_envoy/select.py @@ -20,6 +20,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyRelaySelectEntityDescription(SelectEntityDescription): @@ -143,7 +145,6 @@ async def async_setup_entry( envoy_data.tariff and envoy_data.tariff.storage_settings and coordinator.envoy.supported_features & SupportedFeatures.ENCHARGE - and coordinator.envoy.supported_features & SupportedFeatures.ENPOWER ): entities.append( EnvoyStorageSettingsSelectEntity(coordinator, STORAGE_MODE_ENTITY) @@ -209,18 +210,29 @@ class EnvoyStorageSettingsSelectEntity(EnvoyBaseEntity, SelectEntity): super().__init__(coordinator, description) self.envoy = coordinator.envoy assert coordinator.envoy.data is not None - assert coordinator.envoy.data.enpower is not None - enpower = coordinator.envoy.data.enpower - self._serial_number = enpower.serial_number - self._attr_unique_id = f"{self._serial_number}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._serial_number)}, - manufacturer="Enphase", - model="Enpower", - name=f"Enpower {self._serial_number}", - sw_version=str(enpower.firmware_version), - via_device=(DOMAIN, self.envoy_serial_num), - ) + if enpower := coordinator.envoy.data.enpower: + self._serial_number = enpower.serial_number + self._attr_unique_id = f"{self._serial_number}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._serial_number)}, + manufacturer="Enphase", + model="Enpower", + name=f"Enpower {self._serial_number}", + sw_version=str(enpower.firmware_version), + via_device=(DOMAIN, self.envoy_serial_num), + ) + else: + # If no enpower device assign selects to Envoy itself + self._attr_unique_id = f"{self.envoy_serial_num}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.envoy_serial_num)}, + manufacturer="Enphase", + model=coordinator.envoy.envoy_model, + name=coordinator.name, + sw_version=str(coordinator.envoy.firmware), + hw_version=coordinator.envoy.part_number, + serial_number=self.envoy_serial_num, + ) @property def current_option(self) -> str: diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index e6c7a585eb7..fadbf191840 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -36,6 +36,7 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( PERCENTAGE, UnitOfApparentPower, + UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, @@ -58,6 +59,8 @@ _LOGGER = logging.getLogger(__name__) INVERTERS_KEY = "inverters" LAST_REPORTED_KEY = "last_reported" +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class EnvoyInverterSensorEntityDescription(SensorEntityDescription): @@ -227,6 +230,50 @@ CONSUMPTION_PHASE_SENSORS = { } +NET_CONSUMPTION_SENSORS = ( + EnvoyConsumptionSensorEntityDescription( + key="balanced_net_consumption", + translation_key="balanced_net_consumption", + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + suggested_display_precision=3, + value_fn=attrgetter("watts_now"), + on_phase=None, + ), + EnvoyConsumptionSensorEntityDescription( + key="lifetime_balanced_net_consumption", + translation_key="lifetime_balanced_net_consumption", + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.ENERGY, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=3, + value_fn=attrgetter("watt_hours_lifetime"), + on_phase=None, + ), +) + + +NET_CONSUMPTION_PHASE_SENSORS = { + (on_phase := PHASENAMES[phase]): [ + replace( + sensor, + key=f"{sensor.key}_l{phase + 1}", + translation_key=f"{sensor.translation_key}_phase", + entity_registry_enabled_default=False, + on_phase=on_phase, + translation_placeholders={"phase_name": f"l{phase + 1}"}, + ) + for sensor in list(NET_CONSUMPTION_SENSORS) + ] + for phase in range(3) +} + + @dataclass(frozen=True, kw_only=True) class EnvoyCTSensorEntityDescription(SensorEntityDescription): """Describes an Envoy CT sensor entity.""" @@ -295,6 +342,28 @@ CT_NET_CONSUMPTION_SENSORS = ( value_fn=attrgetter("voltage"), on_phase=None, ), + EnvoyCTSensorEntityDescription( + key="net_ct_current", + translation_key="net_ct_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=3, + entity_registry_enabled_default=False, + value_fn=attrgetter("current"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="net_ct_powerfactor", + translation_key="net_ct_powerfactor", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + value_fn=attrgetter("power_factor"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="net_consumption_ct_metering_status", translation_key="net_ct_metering_status", @@ -331,6 +400,51 @@ CT_NET_CONSUMPTION_PHASE_SENSORS = { } CT_PRODUCTION_SENSORS = ( + EnvoyCTSensorEntityDescription( + key="production_ct_frequency", + translation_key="production_ct_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=1, + entity_registry_enabled_default=False, + value_fn=attrgetter("frequency"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="production_ct_voltage", + translation_key="production_ct_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + suggested_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=1, + entity_registry_enabled_default=False, + value_fn=attrgetter("voltage"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="production_ct_current", + translation_key="production_ct_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=3, + entity_registry_enabled_default=False, + value_fn=attrgetter("current"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="production_ct_powerfactor", + translation_key="production_ct_powerfactor", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + value_fn=attrgetter("power_factor"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="production_ct_metering_status", translation_key="production_ct_metering_status", @@ -399,6 +513,17 @@ CT_STORAGE_SENSORS = ( value_fn=attrgetter("active_power"), on_phase=None, ), + EnvoyCTSensorEntityDescription( + key="storage_ct_frequency", + translation_key="storage_ct_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=1, + entity_registry_enabled_default=False, + value_fn=attrgetter("frequency"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="storage_voltage", translation_key="storage_ct_voltage", @@ -411,6 +536,28 @@ CT_STORAGE_SENSORS = ( value_fn=attrgetter("voltage"), on_phase=None, ), + EnvoyCTSensorEntityDescription( + key="storage_ct_current", + translation_key="storage_ct_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=3, + entity_registry_enabled_default=False, + value_fn=attrgetter("current"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="storage_ct_powerfactor", + translation_key="storage_ct_powerfactor", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + value_fn=attrgetter("power_factor"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="storage_ct_metering_status", translation_key="storage_ct_metering_status", @@ -596,6 +743,11 @@ async def async_setup_entry( EnvoyConsumptionEntity(coordinator, description) for description in CONSUMPTION_SENSORS ) + if envoy_data.system_net_consumption: + entities.extend( + EnvoyNetConsumptionEntity(coordinator, description) + for description in NET_CONSUMPTION_SENSORS + ) # For each production phase reported add production entities if envoy_data.system_production_phases: entities.extend( @@ -612,6 +764,14 @@ async def async_setup_entry( for description in CONSUMPTION_PHASE_SENSORS[use_phase] if phase is not None ) + # For each net_consumption phase reported add consumption entities + if envoy_data.system_net_consumption_phases: + entities.extend( + EnvoyNetConsumptionPhaseEntity(coordinator, description) + for use_phase, phase in envoy_data.system_net_consumption_phases.items() + for description in NET_CONSUMPTION_PHASE_SENSORS[use_phase] + if phase is not None + ) # Add net consumption CT entities if ctmeter := envoy_data.ctmeter_consumption: entities.extend( @@ -745,6 +905,19 @@ class EnvoyConsumptionEntity(EnvoySystemSensorEntity): return self.entity_description.value_fn(system_consumption) +class EnvoyNetConsumptionEntity(EnvoySystemSensorEntity): + """Envoy consumption entity.""" + + entity_description: EnvoyConsumptionSensorEntityDescription + + @property + def native_value(self) -> int | None: + """Return the state of the sensor.""" + system_net_consumption = self.data.system_net_consumption + assert system_net_consumption is not None + return self.entity_description.value_fn(system_net_consumption) + + class EnvoyProductionPhaseEntity(EnvoySystemSensorEntity): """Envoy phase production entity.""" @@ -787,6 +960,27 @@ class EnvoyConsumptionPhaseEntity(EnvoySystemSensorEntity): return self.entity_description.value_fn(system_consumption) +class EnvoyNetConsumptionPhaseEntity(EnvoySystemSensorEntity): + """Envoy phase consumption entity.""" + + entity_description: EnvoyConsumptionSensorEntityDescription + + @property + def native_value(self) -> int | None: + """Return the state of the sensor.""" + if TYPE_CHECKING: + assert self.entity_description.on_phase + assert self.data.system_net_consumption_phases + + if ( + system_net_consumption := self.data.system_net_consumption_phases[ + self.entity_description.on_phase + ] + ) is None: + return None + return self.entity_description.value_fn(system_net_consumption) + + class EnvoyConsumptionCTEntity(EnvoySystemSensorEntity): """Envoy net consumption CT entity.""" diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index f7964bf2f45..2d91b3b0960 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -28,12 +28,13 @@ "error": { "cannot_connect": "Cannot connect: {reason}", "invalid_auth": "Invalid authentication: {reason}", - "unexpected_envoy": "Unexpected Envoy: {reason}", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The serial number of the device does not match the previous serial number" } }, "options": { @@ -41,7 +42,8 @@ "init": { "title": "Envoy {serial} {host} options", "data": { - "diagnostics_include_fixtures": "Include test fixture data in diagnostic report. Use when requested to provide test data for troubleshooting or development activies. With this option enabled the diagnostic report may take more time to download. When report is created best disable this option again." + "diagnostics_include_fixtures": "Include test fixture data in diagnostic report. Use when requested to provide test data for troubleshooting or development activies. With this option enabled the diagnostic report may take more time to download. When report is created best disable this option again.", + "disable_keep_alive": "Always use a new connection when requesting data from the Envoy. May resolve communication issues with some Envoy firmwares." } } } @@ -165,6 +167,18 @@ "lifetime_consumption_phase": { "name": "Lifetime energy consumption {phase_name}" }, + "balanced_net_consumption": { + "name": "balanced net power consumption" + }, + "lifetime_balanced_net_consumption": { + "name": "Lifetime balanced net energy consumption" + }, + "balanced_net_consumption_phase": { + "name": "balanced net power consumption {phase_name}" + }, + "lifetime_balanced_net_consumption_phase": { + "name": "Lifetime balanced net energy consumption {phase_name}" + }, "lifetime_net_consumption": { "name": "Lifetime net energy consumption" }, @@ -180,12 +194,30 @@ "net_ct_voltage": { "name": "Voltage net consumption CT" }, + "net_ct_current": { + "name": "Net consumption CT current" + }, + "net_ct_powerfactor": { + "name": "Powerfactor net consumption CT" + }, "net_ct_metering_status": { "name": "Metering status net consumption CT" }, "net_ct_status_flags": { "name": "Meter status flags active net consumption CT" }, + "production_ct_frequency": { + "name": "Frequency production CT" + }, + "production_ct_voltage": { + "name": "Voltage production CT" + }, + "production_ct_current": { + "name": "Production CT current" + }, + "production_ct_powerfactor": { + "name": "powerfactor production CT" + }, "production_ct_metering_status": { "name": "Metering status production CT" }, @@ -201,9 +233,18 @@ "battery_discharge": { "name": "Current battery discharge" }, + "storage_ct_frequency": { + "name": "Frequency storage CT" + }, "storage_ct_voltage": { "name": "Voltage storage CT" }, + "storage_ct_current": { + "name": "Storage CT current" + }, + "storage_ct_powerfactor": { + "name": "Powerfactor storage CT" + }, "storage_ct_metering_status": { "name": "Metering status storage CT" }, @@ -225,12 +266,30 @@ "net_ct_voltage_phase": { "name": "Voltage net consumption CT {phase_name}" }, + "net_ct_current_phase": { + "name": "Net consumption CT current {phase_name}" + }, + "net_ct_powerfactor_phase": { + "name": "Powerfactor net consumption CT {phase_name}" + }, "net_ct_metering_status_phase": { "name": "Metering status net consumption CT {phase_name}" }, "net_ct_status_flags_phase": { "name": "Meter status flags active net consumption CT {phase_name}" }, + "production_ct_frequency_phase": { + "name": "Frequency production CT {phase_name}" + }, + "production_ct_voltage_phase": { + "name": "Voltage production CT {phase_name}" + }, + "production_ct_current_phase": { + "name": "Production CT current {phase_name}" + }, + "production_ct_powerfactor_phase": { + "name": "Powerfactor production CT {phase_name}" + }, "production_ct_metering_status_phase": { "name": "Metering status production CT {phase_name}" }, @@ -246,9 +305,18 @@ "battery_discharge_phase": { "name": "Current battery discharge {phase_name}" }, + "storage_ct_frequency_phase": { + "name": "Frequency storage CT {phase_name}" + }, "storage_ct_voltage_phase": { "name": "Voltage storage CT {phase_name}" }, + "storage_ct_current_phase": { + "name": "Storage CT current {phase_name}" + }, + "storage_ct_powerfactor_phase": { + "name": "Powerfactor storage CT {phase_name}" + }, "storage_ct_metering_status_phase": { "name": "Metering status storage CT {phase_name}" }, diff --git a/homeassistant/components/enphase_envoy/switch.py b/homeassistant/components/enphase_envoy/switch.py index 09711cd5908..5170b694587 100644 --- a/homeassistant/components/enphase_envoy/switch.py +++ b/homeassistant/components/enphase_envoy/switch.py @@ -20,6 +20,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyEnpowerSwitchEntityDescription(SwitchEntityDescription): @@ -98,8 +100,7 @@ async def async_setup_entry( ) if ( - envoy_data.enpower - and envoy_data.tariff + envoy_data.tariff and envoy_data.tariff.storage_settings and (coordinator.envoy.supported_features & SupportedFeatures.ENCHARGE) ): @@ -213,22 +214,35 @@ class EnvoyStorageSettingsSwitchEntity(EnvoyBaseEntity, SwitchEntity): self, coordinator: EnphaseUpdateCoordinator, description: EnvoyStorageSettingsSwitchEntityDescription, - enpower: EnvoyEnpower, + enpower: EnvoyEnpower | None, ) -> None: """Initialize the Enphase storage settings switch entity.""" super().__init__(coordinator, description) self.envoy = coordinator.envoy self.enpower = enpower - self._serial_number = enpower.serial_number - self._attr_unique_id = f"{self._serial_number}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._serial_number)}, - manufacturer="Enphase", - model="Enpower", - name=f"Enpower {self._serial_number}", - sw_version=str(enpower.firmware_version), - via_device=(DOMAIN, self.envoy_serial_num), - ) + if enpower: + self._serial_number = enpower.serial_number + self._attr_unique_id = f"{self._serial_number}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._serial_number)}, + manufacturer="Enphase", + model="Enpower", + name=f"Enpower {self._serial_number}", + sw_version=str(enpower.firmware_version), + via_device=(DOMAIN, self.envoy_serial_num), + ) + else: + # If no enpower device assign switches to Envoy itself + self._attr_unique_id = f"{self.envoy_serial_num}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.envoy_serial_num)}, + manufacturer="Enphase", + model=coordinator.envoy.envoy_model, + name=coordinator.name, + sw_version=str(coordinator.envoy.firmware), + hw_version=coordinator.envoy.part_number, + serial_number=self.envoy_serial_num, + ) @property def is_on(self) -> bool: diff --git a/homeassistant/components/entur_public_transport/manifest.json b/homeassistant/components/entur_public_transport/manifest.json index f75099c2c27..5e25eb4b4a7 100644 --- a/homeassistant/components/entur_public_transport/manifest.json +++ b/homeassistant/components/entur_public_transport/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/entur_public_transport", "iot_class": "cloud_polling", "loggers": ["enturclient"], + "quality_scale": "legacy", "requirements": ["enturclient==0.2.4"] } diff --git a/homeassistant/components/environment_canada/icons.json b/homeassistant/components/environment_canada/icons.json index 5e23a96bcfb..c3562ce1840 100644 --- a/homeassistant/components/environment_canada/icons.json +++ b/homeassistant/components/environment_canada/icons.json @@ -19,6 +19,8 @@ } }, "services": { - "set_radar_type": "mdi:radar" + "set_radar_type": { + "service": "mdi:radar" + } } } diff --git a/homeassistant/components/envisalink/__init__.py b/homeassistant/components/envisalink/__init__.py index 65fdc1b5c63..0146b650c22 100644 --- a/homeassistant/components/envisalink/__init__.py +++ b/homeassistant/components/envisalink/__init__.py @@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) @@ -160,7 +159,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @callback def async_connection_success_callback(data): """Handle a successful connection.""" - _LOGGER.info("Established a connection with the Envisalink") + _LOGGER.debug("Established a connection with the Envisalink") if not sync_connect.done(): hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_envisalink) sync_connect.set_result(True) @@ -186,7 +185,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @callback def stop_envisalink(event): """Shutdown envisalink connection and thread on exit.""" - _LOGGER.info("Shutting down Envisalink") + _LOGGER.debug("Shutting down Envisalink") controller.stop() async def handle_custom_function(call: ServiceCall) -> None: @@ -203,7 +202,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: controller.callback_login_timeout = async_connection_fail_callback controller.callback_login_success = async_connection_success_callback - _LOGGER.info("Start envisalink") + _LOGGER.debug("Start envisalink") controller.start() if not await sync_connect: @@ -244,20 +243,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) return True - - -class EnvisalinkDevice(Entity): - """Representation of an Envisalink device.""" - - _attr_should_poll = False - - def __init__(self, name, info, controller): - """Initialize the device.""" - self._controller = controller - self._info = info - self._name = name - - @property - def name(self): - """Return the name of the device.""" - return self._name diff --git a/homeassistant/components/envisalink/alarm_control_panel.py b/homeassistant/components/envisalink/alarm_control_panel.py index d4bbe174f20..ce65178b8d8 100644 --- a/homeassistant/components/envisalink/alarm_control_panel.py +++ b/homeassistant/components/envisalink/alarm_control_panel.py @@ -9,20 +9,10 @@ import voluptuous as vol from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_CODE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_ENTITY_ID, CONF_CODE from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -37,8 +27,8 @@ from . import ( PARTITION_SCHEMA, SIGNAL_KEYPAD_UPDATE, SIGNAL_PARTITION_UPDATE, - EnvisalinkDevice, ) +from .entity import EnvisalinkEntity _LOGGER = logging.getLogger(__name__) @@ -102,7 +92,7 @@ async def async_setup_platform( ) -class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): +class EnvisalinkAlarm(EnvisalinkEntity, AlarmControlPanelEntity): """Representation of an Envisalink-based alarm panel.""" _attr_supported_features = ( @@ -119,7 +109,7 @@ class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): self._partition_number = partition_number self._panic_type = panic_type self._alarm_control_panel_option_default_code = code - self._attr_code_format = CodeFormat.NUMBER + self._attr_code_format = CodeFormat.NUMBER if not code else None _LOGGER.debug("Setting up alarm: %s", alarm_name) super().__init__(alarm_name, info, controller) @@ -144,24 +134,24 @@ class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): self.async_write_ha_state() @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" - state = STATE_UNKNOWN + state = None if self._info["status"]["alarm"]: - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED elif self._info["status"]["armed_zero_entry_delay"]: - state = STATE_ALARM_ARMED_NIGHT + state = AlarmControlPanelState.ARMED_NIGHT elif self._info["status"]["armed_away"]: - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif self._info["status"]["armed_stay"]: - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif self._info["status"]["exit_delay"]: - state = STATE_ALARM_ARMING + state = AlarmControlPanelState.ARMING elif self._info["status"]["entry_delay"]: - state = STATE_ALARM_PENDING + state = AlarmControlPanelState.PENDING elif self._info["status"]["alpha"]: - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED return state async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/envisalink/binary_sensor.py b/homeassistant/components/envisalink/binary_sensor.py index 9c0909539bb..6c4e2b528e9 100644 --- a/homeassistant/components/envisalink/binary_sensor.py +++ b/homeassistant/components/envisalink/binary_sensor.py @@ -13,14 +13,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util -from . import ( - CONF_ZONENAME, - CONF_ZONETYPE, - DATA_EVL, - SIGNAL_ZONE_UPDATE, - ZONE_SCHEMA, - EnvisalinkDevice, -) +from . import CONF_ZONENAME, CONF_ZONETYPE, DATA_EVL, SIGNAL_ZONE_UPDATE, ZONE_SCHEMA +from .entity import EnvisalinkEntity _LOGGER = logging.getLogger(__name__) @@ -52,7 +46,7 @@ async def async_setup_platform( async_add_entities(entities) -class EnvisalinkBinarySensor(EnvisalinkDevice, BinarySensorEntity): +class EnvisalinkBinarySensor(EnvisalinkEntity, BinarySensorEntity): """Representation of an Envisalink binary sensor.""" def __init__(self, hass, zone_number, zone_name, zone_type, info, controller): diff --git a/homeassistant/components/envisalink/entity.py b/homeassistant/components/envisalink/entity.py new file mode 100644 index 00000000000..a686ed2e3cb --- /dev/null +++ b/homeassistant/components/envisalink/entity.py @@ -0,0 +1,20 @@ +"""Support for Envisalink devices.""" + +from homeassistant.helpers.entity import Entity + + +class EnvisalinkEntity(Entity): + """Representation of an Envisalink device.""" + + _attr_should_poll = False + + def __init__(self, name, info, controller): + """Initialize the device.""" + self._controller = controller + self._info = info + self._name = name + + @property + def name(self): + """Return the name of the device.""" + return self._name diff --git a/homeassistant/components/envisalink/icons.json b/homeassistant/components/envisalink/icons.json index 20696067f76..b25e988f478 100644 --- a/homeassistant/components/envisalink/icons.json +++ b/homeassistant/components/envisalink/icons.json @@ -1,6 +1,10 @@ { "services": { - "alarm_keypress": "mdi:alarm-panel", - "invoke_custom_function": "mdi:console" + "alarm_keypress": { + "service": "mdi:alarm-panel" + }, + "invoke_custom_function": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/envisalink/manifest.json b/homeassistant/components/envisalink/manifest.json index 0cf9f165aa2..42587aa7c2f 100644 --- a/homeassistant/components/envisalink/manifest.json +++ b/homeassistant/components/envisalink/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/envisalink", "iot_class": "local_push", "loggers": ["pyenvisalink"], + "quality_scale": "legacy", "requirements": ["pyenvisalink==4.7"] } diff --git a/homeassistant/components/envisalink/sensor.py b/homeassistant/components/envisalink/sensor.py index fcafc23dd37..70d471a685c 100644 --- a/homeassistant/components/envisalink/sensor.py +++ b/homeassistant/components/envisalink/sensor.py @@ -16,8 +16,8 @@ from . import ( PARTITION_SCHEMA, SIGNAL_KEYPAD_UPDATE, SIGNAL_PARTITION_UPDATE, - EnvisalinkDevice, ) +from .entity import EnvisalinkEntity _LOGGER = logging.getLogger(__name__) @@ -49,7 +49,7 @@ async def async_setup_platform( async_add_entities(entities) -class EnvisalinkSensor(EnvisalinkDevice, SensorEntity): +class EnvisalinkSensor(EnvisalinkEntity, SensorEntity): """Representation of an Envisalink keypad.""" def __init__(self, hass, partition_name, partition_number, info, controller): diff --git a/homeassistant/components/envisalink/switch.py b/homeassistant/components/envisalink/switch.py index 36ad3d5bf81..e4f37bf328d 100644 --- a/homeassistant/components/envisalink/switch.py +++ b/homeassistant/components/envisalink/switch.py @@ -11,13 +11,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - CONF_ZONENAME, - DATA_EVL, - SIGNAL_ZONE_BYPASS_UPDATE, - ZONE_SCHEMA, - EnvisalinkDevice, -) +from . import CONF_ZONENAME, DATA_EVL, SIGNAL_ZONE_BYPASS_UPDATE, ZONE_SCHEMA +from .entity import EnvisalinkEntity _LOGGER = logging.getLogger(__name__) @@ -51,7 +46,7 @@ async def async_setup_platform( async_add_entities(entities) -class EnvisalinkSwitch(EnvisalinkDevice, SwitchEntity): +class EnvisalinkSwitch(EnvisalinkEntity, SwitchEntity): """Representation of an Envisalink switch.""" def __init__(self, hass, zone_number, zone_name, info, controller): diff --git a/homeassistant/components/ephember/climate.py b/homeassistant/components/ephember/climate.py index 44e5986970d..cedad8b76e2 100644 --- a/homeassistant/components/ephember/climate.py +++ b/homeassistant/components/ephember/climate.py @@ -84,7 +84,6 @@ class EphEmberThermostat(ClimateEntity): _attr_hvac_modes = OPERATION_LIST _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, ember, zone): """Initialize the thermostat.""" diff --git a/homeassistant/components/ephember/manifest.json b/homeassistant/components/ephember/manifest.json index dd7938ccbd2..547ab2918f5 100644 --- a/homeassistant/components/ephember/manifest.json +++ b/homeassistant/components/ephember/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ephember", "iot_class": "local_polling", "loggers": ["pyephember"], + "quality_scale": "legacy", "requirements": ["pyephember==0.3.1"] } diff --git a/homeassistant/components/epson/__init__.py b/homeassistant/components/epson/__init__.py index 5171865594d..715b55824b4 100644 --- a/homeassistant/components/epson/__init__.py +++ b/homeassistant/components/epson/__init__.py @@ -13,7 +13,7 @@ from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN, HTTP +from .const import CONF_CONNECTION_TYPE, DOMAIN, HTTP from .exceptions import CannotConnect, PoweredOff PLATFORMS = [Platform.MEDIA_PLAYER] @@ -22,13 +22,17 @@ _LOGGER = logging.getLogger(__name__) async def validate_projector( - hass: HomeAssistant, host, check_power=True, check_powered_on=True + hass: HomeAssistant, + host: str, + conn_type: str, + check_power: bool = True, + check_powered_on: bool = True, ): """Validate the given projector host allows us to connect.""" epson_proj = Projector( host=host, websession=async_get_clientsession(hass, verify_ssl=False), - type=HTTP, + type=conn_type, ) if check_power: _power = await epson_proj.get_power() @@ -46,6 +50,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: projector = await validate_projector( hass=hass, host=entry.data[CONF_HOST], + conn_type=entry.data[CONF_CONNECTION_TYPE], check_power=False, check_powered_on=False, ) @@ -60,5 +65,33 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) + projector = hass.data[DOMAIN].pop(entry.entry_id) + projector.close() return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version > 1 or config_entry.minor_version > 1: + # This means the user has downgraded from a future version + return False + + if config_entry.version == 1 and config_entry.minor_version == 1: + new_data = {**config_entry.data} + new_data[CONF_CONNECTION_TYPE] = HTTP + + hass.config_entries.async_update_entry( + config_entry, data=new_data, version=1, minor_version=2 + ) + + _LOGGER.debug( + "Migration to configuration version %s successful", config_entry.version + ) + + return True diff --git a/homeassistant/components/epson/config_flow.py b/homeassistant/components/epson/config_flow.py index 1e3b006a984..c54bff2eea9 100644 --- a/homeassistant/components/epson/config_flow.py +++ b/homeassistant/components/epson/config_flow.py @@ -7,13 +7,21 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from . import validate_projector -from .const import DOMAIN +from .const import CONF_CONNECTION_TYPE, DOMAIN, HTTP, SERIAL from .exceptions import CannotConnect, PoweredOff +ALLOWED_CONNECTION_TYPE = [HTTP, SERIAL] + DATA_SCHEMA = vol.Schema( { + vol.Required(CONF_CONNECTION_TYPE, default=HTTP): SelectSelector( + SelectSelectorConfig( + options=ALLOWED_CONNECTION_TYPE, translation_key="connection_type" + ) + ), vol.Required(CONF_HOST): str, vol.Required(CONF_NAME, default=DOMAIN): str, } @@ -26,6 +34,7 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for epson.""" VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -33,12 +42,16 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors = {} if user_input is not None: + # Epson projector doesn't appear to need to be on for serial + check_power = user_input[CONF_CONNECTION_TYPE] != SERIAL + projector = None try: projector = await validate_projector( hass=self.hass, + conn_type=user_input[CONF_CONNECTION_TYPE], host=user_input[CONF_HOST], check_power=True, - check_powered_on=True, + check_powered_on=check_power, ) except CannotConnect: errors["base"] = "cannot_connect" @@ -55,6 +68,9 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry( title=user_input.pop(CONF_NAME), data=user_input ) + finally: + if projector: + projector.close() return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) diff --git a/homeassistant/components/epson/const.py b/homeassistant/components/epson/const.py index 06ef9f25e35..5bc5f57cb3f 100644 --- a/homeassistant/components/epson/const.py +++ b/homeassistant/components/epson/const.py @@ -2,6 +2,8 @@ DOMAIN = "epson" SERVICE_SELECT_CMODE = "select_cmode" +CONF_CONNECTION_TYPE = "connection_type" ATTR_CMODE = "cmode" HTTP = "http" +SERIAL = "serial" diff --git a/homeassistant/components/epson/icons.json b/homeassistant/components/epson/icons.json index a9237edcfd1..d41ddebcdce 100644 --- a/homeassistant/components/epson/icons.json +++ b/homeassistant/components/epson/icons.json @@ -1,5 +1,7 @@ { "services": { - "select_cmode": "mdi:palette" + "select_cmode": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/epson/strings.json b/homeassistant/components/epson/strings.json index 94544c32d1d..fb8d7ab5fdd 100644 --- a/homeassistant/components/epson/strings.json +++ b/homeassistant/components/epson/strings.json @@ -3,11 +3,12 @@ "step": { "user": { "data": { + "connection_type": "Connection type", "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]" }, "data_description": { - "host": "The hostname or IP address of your Epson projector." + "host": "The hostname, IP address or serial port of your Epson projector." } } }, @@ -30,5 +31,13 @@ } } } + }, + "selector": { + "connection_type": { + "options": { + "http": "HTTP", + "serial": "Serial" + } + } } } diff --git a/homeassistant/components/eq3btsmart/__init__.py b/homeassistant/components/eq3btsmart/__init__.py index f63e627ea7d..4493f944db3 100644 --- a/homeassistant/components/eq3btsmart/__init__.py +++ b/homeassistant/components/eq3btsmart/__init__.py @@ -15,17 +15,24 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED +from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED from .models import Eq3Config, Eq3ConfigEntryData PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.CLIMATE, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, ] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData] + + +async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: """Handle config entry setup.""" mac_address: str | None = entry.unique_id @@ -53,12 +60,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ble_device=device, ) - eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry - + entry.runtime_data = Eq3ConfigEntryData( + eq3_config=eq3_config, thermostat=thermostat + ) entry.async_on_unload(entry.add_update_listener(update_listener)) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_create_background_task( hass, _async_run_thermostat(hass, entry), entry.entry_id ) @@ -66,29 +72,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: """Handle config entry unload.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id) - await eq3_config_entry.thermostat.async_disconnect() + await entry.runtime_data.thermostat.async_disconnect() return unload_ok -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: """Handle config entry update.""" await hass.config_entries.async_reload(entry.entry_id) -async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: """Run the thermostat.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] - thermostat = eq3_config_entry.thermostat - mac_address = eq3_config_entry.eq3_config.mac_address - scan_interval = eq3_config_entry.eq3_config.scan_interval + thermostat = entry.runtime_data.thermostat + mac_address = entry.runtime_data.eq3_config.mac_address + scan_interval = entry.runtime_data.eq3_config.scan_interval await _async_reconnect_thermostat(hass, entry) @@ -117,13 +121,14 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None await asyncio.sleep(scan_interval) -async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_reconnect_thermostat( + hass: HomeAssistant, entry: Eq3ConfigEntry +) -> None: """Reconnect the thermostat.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] - thermostat = eq3_config_entry.thermostat - mac_address = eq3_config_entry.eq3_config.mac_address - scan_interval = eq3_config_entry.eq3_config.scan_interval + thermostat = entry.runtime_data.thermostat + mac_address = entry.runtime_data.eq3_config.mac_address + scan_interval = entry.runtime_data.eq3_config.scan_interval while True: try: diff --git a/homeassistant/components/eq3btsmart/binary_sensor.py b/homeassistant/components/eq3btsmart/binary_sensor.py new file mode 100644 index 00000000000..27525d47972 --- /dev/null +++ b/homeassistant/components/eq3btsmart/binary_sensor.py @@ -0,0 +1,86 @@ +"""Platform for eq3 binary sensor entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from eq3btsmart.models import Status + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_BATTERY, ENTITY_KEY_DST, ENTITY_KEY_WINDOW +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3BinarySensorEntityDescription(BinarySensorEntityDescription): + """Entity description for eq3 binary sensors.""" + + value_func: Callable[[Status], bool] + + +BINARY_SENSOR_ENTITY_DESCRIPTIONS = [ + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_low_battery, + key=ENTITY_KEY_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_window_open, + key=ENTITY_KEY_WINDOW, + device_class=BinarySensorDeviceClass.WINDOW, + ), + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_dst, + key=ENTITY_KEY_DST, + translation_key=ENTITY_KEY_DST, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3BinarySensorEntity(entry, entity_description) + for entity_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS + ) + + +class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity): + """Base class for eQ-3 binary sensor entities.""" + + entity_description: Eq3BinarySensorEntityDescription + + def __init__( + self, + entry: Eq3ConfigEntry, + entity_description: Eq3BinarySensorEntityDescription, + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/eq3btsmart/climate.py b/homeassistant/components/eq3btsmart/climate.py index 7b8ccb6c990..ae01d0fc9a7 100644 --- a/homeassistant/components/eq3btsmart/climate.py +++ b/homeassistant/components/eq3btsmart/climate.py @@ -3,7 +3,6 @@ import logging from typing import Any -from eq3btsmart import Thermostat from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode from eq3btsmart.exceptions import Eq3Exception @@ -15,45 +14,35 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import slugify +from . import Eq3ConfigEntry from .const import ( - DEVICE_MODEL, - DOMAIN, EQ_TO_HA_HVAC, HA_TO_EQ_HVAC, - MANUFACTURER, - SIGNAL_THERMOSTAT_CONNECTED, - SIGNAL_THERMOSTAT_DISCONNECTED, CurrentTemperatureSelector, Preset, TargetTemperatureSelector, ) from .entity import Eq3Entity -from .models import Eq3Config, Eq3ConfigEntryData _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: Eq3ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Handle config entry setup.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - [Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)], + [Eq3Climate(entry)], ) @@ -80,53 +69,6 @@ class Eq3Climate(Eq3Entity, ClimateEntity): _attr_preset_mode: str | None = None _target_temperature: float | None = None - def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: - """Initialize the climate entity.""" - - super().__init__(eq3_config, thermostat) - self._attr_unique_id = dr.format_mac(eq3_config.mac_address) - self._attr_device_info = DeviceInfo( - name=slugify(self._eq3_config.mac_address), - manufacturer=MANUFACTURER, - model=DEVICE_MODEL, - connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, - ) - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - - self._thermostat.register_update_callback(self._async_on_updated) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", - self._async_on_disconnected, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", - self._async_on_connected, - ) - ) - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - - self._thermostat.unregister_update_callback(self._async_on_updated) - - @callback - def _async_on_disconnected(self) -> None: - self._attr_available = False - self.async_write_ha_state() - - @callback - def _async_on_connected(self) -> None: - self._attr_available = True - self.async_write_ha_state() - @callback def _async_on_updated(self) -> None: """Handle updated data from the thermostat.""" @@ -137,12 +79,15 @@ class Eq3Climate(Eq3Entity, ClimateEntity): if self._thermostat.device_data is not None: self._async_on_device_updated() - self.async_write_ha_state() + super()._async_on_updated() @callback def _async_on_status_updated(self) -> None: """Handle updated status from the thermostat.""" + if self._thermostat.status is None: + return + self._target_temperature = self._thermostat.status.target_temperature.value self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode] self._attr_current_temperature = self._get_current_temperature() @@ -154,13 +99,16 @@ class Eq3Climate(Eq3Entity, ClimateEntity): def _async_on_device_updated(self) -> None: """Handle updated device data from the thermostat.""" + if self._thermostat.device_data is None: + return + device_registry = dr.async_get(self.hass) if device := device_registry.async_get_device( connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, ): device_registry.async_update_device( device.id, - sw_version=self._thermostat.device_data.firmware_version, + sw_version=str(self._thermostat.device_data.firmware_version), serial_number=self._thermostat.device_data.device_serial.value, ) @@ -265,7 +213,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity): self.async_write_ha_state() try: - await self._thermostat.async_set_temperature(self._target_temperature) + await self._thermostat.async_set_temperature(temperature) except Eq3Exception: _LOGGER.error( "[%s] Failed setting temperature", self._eq3_config.mac_address diff --git a/homeassistant/components/eq3btsmart/const.py b/homeassistant/components/eq3btsmart/const.py index 111c4d0eba4..a5f7ea2ff95 100644 --- a/homeassistant/components/eq3btsmart/const.py +++ b/homeassistant/components/eq3btsmart/const.py @@ -18,8 +18,21 @@ DOMAIN = "eq3btsmart" MANUFACTURER = "eQ-3 AG" DEVICE_MODEL = "CC-RT-BLE-EQ" -GET_DEVICE_TIMEOUT = 5 # seconds +ENTITY_KEY_DST = "dst" +ENTITY_KEY_BATTERY = "battery" +ENTITY_KEY_WINDOW = "window" +ENTITY_KEY_LOCK = "lock" +ENTITY_KEY_BOOST = "boost" +ENTITY_KEY_AWAY = "away" +ENTITY_KEY_COMFORT = "comfort" +ENTITY_KEY_ECO = "eco" +ENTITY_KEY_OFFSET = "offset" +ENTITY_KEY_WINDOW_OPEN_TEMPERATURE = "window_open_temperature" +ENTITY_KEY_WINDOW_OPEN_TIMEOUT = "window_open_timeout" +ENTITY_KEY_VALVE = "valve" +ENTITY_KEY_AWAY_UNTIL = "away_until" +GET_DEVICE_TIMEOUT = 5 # seconds EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = { OperationMode.OFF: HVACMode.OFF, @@ -71,3 +84,5 @@ DEFAULT_SCAN_INTERVAL = 10 # seconds SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected" SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected" + +EQ3BT_STEP = 0.5 diff --git a/homeassistant/components/eq3btsmart/entity.py b/homeassistant/components/eq3btsmart/entity.py index e8c00d4e3cf..e68545c08c7 100644 --- a/homeassistant/components/eq3btsmart/entity.py +++ b/homeassistant/components/eq3btsmart/entity.py @@ -1,10 +1,22 @@ """Base class for all eQ-3 entities.""" -from eq3btsmart.thermostat import Thermostat - +from homeassistant.core import callback +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity +from homeassistant.util import slugify -from .models import Eq3Config +from . import Eq3ConfigEntry +from .const import ( + DEVICE_MODEL, + MANUFACTURER, + SIGNAL_THERMOSTAT_CONNECTED, + SIGNAL_THERMOSTAT_DISCONNECTED, +) class Eq3Entity(Entity): @@ -12,8 +24,70 @@ class Eq3Entity(Entity): _attr_has_entity_name = True - def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: + def __init__( + self, + entry: Eq3ConfigEntry, + unique_id_key: str | None = None, + ) -> None: """Initialize the eq3 entity.""" - self._eq3_config = eq3_config - self._thermostat = thermostat + self._eq3_config = entry.runtime_data.eq3_config + self._thermostat = entry.runtime_data.thermostat + self._attr_device_info = DeviceInfo( + name=slugify(self._eq3_config.mac_address), + manufacturer=MANUFACTURER, + model=DEVICE_MODEL, + connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, + ) + suffix = f"_{unique_id_key}" if unique_id_key else "" + self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}" + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + self._thermostat.register_update_callback(self._async_on_updated) + + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", + self._async_on_disconnected, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", + self._async_on_connected, + ) + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + + self._thermostat.unregister_update_callback(self._async_on_updated) + + def _async_on_updated(self) -> None: + """Handle updated data from the thermostat.""" + + self.async_write_ha_state() + + @callback + def _async_on_disconnected(self) -> None: + """Handle disconnection from the thermostat.""" + + self._attr_available = False + self.async_write_ha_state() + + @callback + def _async_on_connected(self) -> None: + """Handle connection to the thermostat.""" + + self._attr_available = True + self.async_write_ha_state() + + @property + def available(self) -> bool: + """Whether the entity is available.""" + + return self._thermostat.status is not None and self._attr_available diff --git a/homeassistant/components/eq3btsmart/icons.json b/homeassistant/components/eq3btsmart/icons.json new file mode 100644 index 00000000000..892352c2ea4 --- /dev/null +++ b/homeassistant/components/eq3btsmart/icons.json @@ -0,0 +1,57 @@ +{ + "entity": { + "binary_sensor": { + "dst": { + "default": "mdi:sun-clock", + "state": { + "off": "mdi:sun-clock-outline" + } + } + }, + "number": { + "comfort": { + "default": "mdi:sun-thermometer" + }, + "eco": { + "default": "mdi:snowflake-thermometer" + }, + "offset": { + "default": "mdi:thermometer-plus" + }, + "window_open_temperature": { + "default": "mdi:window-open-variant" + }, + "window_open_timeout": { + "default": "mdi:timer-refresh" + } + }, + "sensor": { + "away_until": { + "default": "mdi:home-export-outline" + }, + "valve": { + "default": "mdi:pipe-valve" + } + }, + "switch": { + "away": { + "default": "mdi:home-account", + "state": { + "on": "mdi:home-export-outline" + } + }, + "lock": { + "default": "mdi:lock", + "state": { + "off": "mdi:lock-off" + } + }, + "boost": { + "default": "mdi:fire", + "state": { + "off": "mdi:fire-off" + } + } + } + } +} diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index d308d02027d..ed80ad9aabf 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -22,6 +22,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["eq3btsmart"], - "quality_scale": "silver", - "requirements": ["eq3btsmart==1.1.9", "bleak-esphome==1.0.0"] + "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"] } diff --git a/homeassistant/components/eq3btsmart/models.py b/homeassistant/components/eq3btsmart/models.py index 8ea0955dbdd..858465effa8 100644 --- a/homeassistant/components/eq3btsmart/models.py +++ b/homeassistant/components/eq3btsmart/models.py @@ -2,7 +2,6 @@ from dataclasses import dataclass -from eq3btsmart.const import DEFAULT_AWAY_HOURS, DEFAULT_AWAY_TEMP from eq3btsmart.thermostat import Thermostat from .const import ( @@ -23,8 +22,6 @@ class Eq3Config: target_temp_selector: TargetTemperatureSelector = DEFAULT_TARGET_TEMP_SELECTOR external_temp_sensor: str = "" scan_interval: int = DEFAULT_SCAN_INTERVAL - default_away_hours: float = DEFAULT_AWAY_HOURS - default_away_temperature: float = DEFAULT_AWAY_TEMP @dataclass(slots=True) diff --git a/homeassistant/components/eq3btsmart/number.py b/homeassistant/components/eq3btsmart/number.py new file mode 100644 index 00000000000..2e069180fa3 --- /dev/null +++ b/homeassistant/components/eq3btsmart/number.py @@ -0,0 +1,158 @@ +"""Platform for eq3 number entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from eq3btsmart import Thermostat +from eq3btsmart.const import ( + EQ3BT_MAX_OFFSET, + EQ3BT_MAX_TEMP, + EQ3BT_MIN_OFFSET, + EQ3BT_MIN_TEMP, +) +from eq3btsmart.models import Presets + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ( + ENTITY_KEY_COMFORT, + ENTITY_KEY_ECO, + ENTITY_KEY_OFFSET, + ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, + ENTITY_KEY_WINDOW_OPEN_TIMEOUT, + EQ3BT_STEP, +) +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3NumberEntityDescription(NumberEntityDescription): + """Entity description for eq3 number entities.""" + + value_func: Callable[[Presets], float] + value_set_func: Callable[ + [Thermostat], + Callable[[float], Awaitable[None]], + ] + mode: NumberMode = NumberMode.BOX + entity_category: EntityCategory | None = EntityCategory.CONFIG + + +NUMBER_ENTITY_DESCRIPTIONS = [ + Eq3NumberEntityDescription( + key=ENTITY_KEY_COMFORT, + value_func=lambda presets: presets.comfort_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature, + translation_key=ENTITY_KEY_COMFORT, + native_min_value=EQ3BT_MIN_TEMP, + native_max_value=EQ3BT_MAX_TEMP, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_ECO, + value_func=lambda presets: presets.eco_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature, + translation_key=ENTITY_KEY_ECO, + native_min_value=EQ3BT_MIN_TEMP, + native_max_value=EQ3BT_MAX_TEMP, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, + value_func=lambda presets: presets.window_open_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature, + translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, + native_min_value=EQ3BT_MIN_TEMP, + native_max_value=EQ3BT_MAX_TEMP, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_OFFSET, + value_func=lambda presets: presets.offset_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset, + translation_key=ENTITY_KEY_OFFSET, + native_min_value=EQ3BT_MIN_OFFSET, + native_max_value=EQ3BT_MAX_OFFSET, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT, + value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration, + value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60, + translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT, + native_min_value=0, + native_max_value=60, + native_step=5, + native_unit_of_measurement=UnitOfTime.MINUTES, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3NumberEntity(entry, entity_description) + for entity_description in NUMBER_ENTITY_DESCRIPTIONS + ) + + +class Eq3NumberEntity(Eq3Entity, NumberEntity): + """Base class for all eq3 number entities.""" + + entity_description: Eq3NumberEntityDescription + + def __init__( + self, entry: Eq3ConfigEntry, entity_description: Eq3NumberEntityDescription + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def native_value(self) -> float: + """Return the state of the entity.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + assert self._thermostat.status.presets is not None + + return self.entity_description.value_func(self._thermostat.status.presets) + + async def async_set_native_value(self, value: float) -> None: + """Set the state of the entity.""" + + await self.entity_description.value_set_func(self._thermostat)(value) + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + + return ( + self._thermostat.status is not None + and self._thermostat.status.presets is not None + and self._attr_available + ) diff --git a/homeassistant/components/eq3btsmart/sensor.py b/homeassistant/components/eq3btsmart/sensor.py new file mode 100644 index 00000000000..bd2605042f4 --- /dev/null +++ b/homeassistant/components/eq3btsmart/sensor.py @@ -0,0 +1,84 @@ +"""Platform for eq3 sensor entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime +from typing import TYPE_CHECKING + +from eq3btsmart.models import Status + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.components.sensor.const import SensorStateClass +from homeassistant.const import PERCENTAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_AWAY_UNTIL, ENTITY_KEY_VALVE +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3SensorEntityDescription(SensorEntityDescription): + """Entity description for eq3 sensor entities.""" + + value_func: Callable[[Status], int | datetime | None] + + +SENSOR_ENTITY_DESCRIPTIONS = [ + Eq3SensorEntityDescription( + key=ENTITY_KEY_VALVE, + translation_key=ENTITY_KEY_VALVE, + value_func=lambda status: status.valve, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + Eq3SensorEntityDescription( + key=ENTITY_KEY_AWAY_UNTIL, + translation_key=ENTITY_KEY_AWAY_UNTIL, + value_func=lambda status: ( + status.away_until.value if status.away_until else None + ), + device_class=SensorDeviceClass.DATE, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3SensorEntity(entry, entity_description) + for entity_description in SENSOR_ENTITY_DESCRIPTIONS + ) + + +class Eq3SensorEntity(Eq3Entity, SensorEntity): + """Base class for eq3 sensor entities.""" + + entity_description: Eq3SensorEntityDescription + + def __init__( + self, entry: Eq3ConfigEntry, entity_description: Eq3SensorEntityDescription + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def native_value(self) -> int | datetime | None: + """Return the value reported by the sensor.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/eq3btsmart/strings.json b/homeassistant/components/eq3btsmart/strings.json index 7477aab4cfb..ab363f4d752 100644 --- a/homeassistant/components/eq3btsmart/strings.json +++ b/homeassistant/components/eq3btsmart/strings.json @@ -14,6 +14,52 @@ "init": { "title": "Configure new eQ-3 device" } + }, + "error": { + "invalid_mac_address": "Invalid MAC address" + } + }, + "entity": { + "binary_sensor": { + "dst": { + "name": "Daylight saving time" + } + }, + "number": { + "comfort": { + "name": "Comfort temperature" + }, + "eco": { + "name": "Eco temperature" + }, + "offset": { + "name": "Offset temperature" + }, + "window_open_temperature": { + "name": "Window open temperature" + }, + "window_open_timeout": { + "name": "Window open timeout" + } + }, + "sensor": { + "away_until": { + "name": "Away until" + }, + "valve": { + "name": "Valve" + } + }, + "switch": { + "lock": { + "name": "Lock" + }, + "boost": { + "name": "Boost" + }, + "away": { + "name": "Away" + } } } } diff --git a/homeassistant/components/eq3btsmart/switch.py b/homeassistant/components/eq3btsmart/switch.py new file mode 100644 index 00000000000..7525d8ca494 --- /dev/null +++ b/homeassistant/components/eq3btsmart/switch.py @@ -0,0 +1,94 @@ +"""Platform for eq3 switch entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + +from eq3btsmart import Thermostat +from eq3btsmart.models import Status + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3SwitchEntityDescription(SwitchEntityDescription): + """Entity description for eq3 switch entities.""" + + toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]] + value_func: Callable[[Status], bool] + + +SWITCH_ENTITY_DESCRIPTIONS = [ + Eq3SwitchEntityDescription( + key=ENTITY_KEY_LOCK, + translation_key=ENTITY_KEY_LOCK, + toggle_func=lambda thermostat: thermostat.async_set_locked, + value_func=lambda status: status.is_locked, + ), + Eq3SwitchEntityDescription( + key=ENTITY_KEY_BOOST, + translation_key=ENTITY_KEY_BOOST, + toggle_func=lambda thermostat: thermostat.async_set_boost, + value_func=lambda status: status.is_boost, + ), + Eq3SwitchEntityDescription( + key=ENTITY_KEY_AWAY, + translation_key=ENTITY_KEY_AWAY, + toggle_func=lambda thermostat: thermostat.async_set_away, + value_func=lambda status: status.is_away, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3SwitchEntity(entry, entity_description) + for entity_description in SWITCH_ENTITY_DESCRIPTIONS + ) + + +class Eq3SwitchEntity(Eq3Entity, SwitchEntity): + """Base class for eq3 switch entities.""" + + entity_description: Eq3SwitchEntityDescription + + def __init__( + self, + entry: Eq3ConfigEntry, + entity_description: Eq3SwitchEntityDescription, + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch.""" + + await self.entity_description.toggle_func(self._thermostat)(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch.""" + + await self.entity_description.toggle_func(self._thermostat)(False) + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/escea/climate.py b/homeassistant/components/escea/climate.py index 555da1494d7..c3fb0015e68 100644 --- a/homeassistant/components/escea/climate.py +++ b/homeassistant/components/escea/climate.py @@ -89,7 +89,6 @@ class ControllerEntity(ClimateEntity): ) _attr_target_temperature_step = PRECISION_WHOLE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, controller: Controller) -> None: """Initialise ControllerDevice.""" diff --git a/homeassistant/components/esphome/__init__.py b/homeassistant/components/esphome/__init__.py index b06fcd4bab0..13e9496a9fd 100644 --- a/homeassistant/components/esphome/__init__.py +++ b/homeassistant/components/esphome/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations from aioesphomeapi import APIClient -from homeassistant.components import zeroconf +from homeassistant.components import ffmpeg, zeroconf from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -15,12 +15,13 @@ from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from .const import CONF_NOISE_PSK, DOMAIN +from .const import CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN from .dashboard import async_setup as async_setup_dashboard from .domain_data import DomainData # Import config flow so that it's added to the registry from .entry_data import ESPHomeConfigEntry, RuntimeEntryData +from .ffmpeg_proxy import FFmpegProxyData, FFmpegProxyView from .manager import ESPHomeManager, cleanup_instance CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -30,7 +31,12 @@ CLIENT_INFO = f"Home Assistant {ha_version}" async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the esphome component.""" + proxy_data = hass.data[DATA_FFMPEG_PROXY] = FFmpegProxyData() + await async_setup_dashboard(hass) + hass.http.register_view( + FFmpegProxyView(ffmpeg.get_ffmpeg_manager(hass), proxy_data) + ) return True diff --git a/homeassistant/components/esphome/alarm_control_panel.py b/homeassistant/components/esphome/alarm_control_panel.py index 64a0210f0f7..8f1b5ae8b1a 100644 --- a/homeassistant/components/esphome/alarm_control_panel.py +++ b/homeassistant/components/esphome/alarm_control_panel.py @@ -6,9 +6,9 @@ from functools import partial from aioesphomeapi import ( AlarmControlPanelCommand, - AlarmControlPanelEntityState, + AlarmControlPanelEntityState as ESPHomeAlarmControlPanelEntityState, AlarmControlPanelInfo, - AlarmControlPanelState, + AlarmControlPanelState as ESPHomeAlarmControlPanelState, APIIntEnum, EntityInfo, ) @@ -16,20 +16,9 @@ from aioesphomeapi import ( from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import callback from .entity import ( @@ -40,21 +29,21 @@ from .entity import ( ) from .enum_mapper import EsphomeEnumMapper -_ESPHOME_ACP_STATE_TO_HASS_STATE: EsphomeEnumMapper[AlarmControlPanelState, str] = ( - EsphomeEnumMapper( - { - AlarmControlPanelState.DISARMED: STATE_ALARM_DISARMED, - AlarmControlPanelState.ARMED_HOME: STATE_ALARM_ARMED_HOME, - AlarmControlPanelState.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - AlarmControlPanelState.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - AlarmControlPanelState.ARMED_VACATION: STATE_ALARM_ARMED_VACATION, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS: STATE_ALARM_ARMED_CUSTOM_BYPASS, - AlarmControlPanelState.PENDING: STATE_ALARM_PENDING, - AlarmControlPanelState.ARMING: STATE_ALARM_ARMING, - AlarmControlPanelState.DISARMING: STATE_ALARM_DISARMING, - AlarmControlPanelState.TRIGGERED: STATE_ALARM_TRIGGERED, - } - ) +_ESPHOME_ACP_STATE_TO_HASS_STATE: EsphomeEnumMapper[ + ESPHomeAlarmControlPanelState, AlarmControlPanelState +] = EsphomeEnumMapper( + { + ESPHomeAlarmControlPanelState.DISARMED: AlarmControlPanelState.DISARMED, + ESPHomeAlarmControlPanelState.ARMED_HOME: AlarmControlPanelState.ARMED_HOME, + ESPHomeAlarmControlPanelState.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + ESPHomeAlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + ESPHomeAlarmControlPanelState.ARMED_VACATION: AlarmControlPanelState.ARMED_VACATION, + ESPHomeAlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ESPHomeAlarmControlPanelState.PENDING: AlarmControlPanelState.PENDING, + ESPHomeAlarmControlPanelState.ARMING: AlarmControlPanelState.ARMING, + ESPHomeAlarmControlPanelState.DISARMING: AlarmControlPanelState.DISARMING, + ESPHomeAlarmControlPanelState.TRIGGERED: AlarmControlPanelState.TRIGGERED, + } ) @@ -70,7 +59,7 @@ class EspHomeACPFeatures(APIIntEnum): class EsphomeAlarmControlPanel( - EsphomeEntity[AlarmControlPanelInfo, AlarmControlPanelEntityState], + EsphomeEntity[AlarmControlPanelInfo, ESPHomeAlarmControlPanelEntityState], AlarmControlPanelEntity, ): """An Alarm Control Panel implementation for ESPHome.""" @@ -101,7 +90,7 @@ class EsphomeAlarmControlPanel( @property @esphome_state_property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return _ESPHOME_ACP_STATE_TO_HASS_STATE.from_esphome(self._state.state) @@ -159,5 +148,5 @@ async_setup_entry = partial( platform_async_setup_entry, info_type=AlarmControlPanelInfo, entity_type=EsphomeAlarmControlPanel, - state_type=AlarmControlPanelEntityState, + state_type=ESPHomeAlarmControlPanelEntityState, ) diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py new file mode 100644 index 00000000000..f60668b0a06 --- /dev/null +++ b/homeassistant/components/esphome/assist_satellite.py @@ -0,0 +1,693 @@ +"""Support for assist satellites in ESPHome.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterable +from functools import partial +import io +from itertools import chain +import logging +import socket +from typing import Any, cast +import wave + +from aioesphomeapi import ( + MediaPlayerFormatPurpose, + MediaPlayerSupportedFormat, + VoiceAssistantAnnounceFinished, + VoiceAssistantAudioSettings, + VoiceAssistantCommandFlag, + VoiceAssistantEventType, + VoiceAssistantFeature, + VoiceAssistantTimerEventType, +) + +from homeassistant.components import assist_satellite, tts +from homeassistant.components.assist_pipeline import ( + PipelineEvent, + PipelineEventType, + PipelineStage, +) +from homeassistant.components.intent import ( + TimerEventType, + TimerInfo, + async_register_timer_handler, +) +from homeassistant.components.media_player import async_process_play_media_url +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .entity import EsphomeAssistEntity +from .entry_data import ESPHomeConfigEntry, RuntimeEntryData +from .enum_mapper import EsphomeEnumMapper +from .ffmpeg_proxy import async_create_proxy_url + +_LOGGER = logging.getLogger(__name__) + +_VOICE_ASSISTANT_EVENT_TYPES: EsphomeEnumMapper[ + VoiceAssistantEventType, PipelineEventType +] = EsphomeEnumMapper( + { + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: PipelineEventType.ERROR, + VoiceAssistantEventType.VOICE_ASSISTANT_RUN_START: PipelineEventType.RUN_START, + VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END: PipelineEventType.RUN_END, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: PipelineEventType.STT_START, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: PipelineEventType.STT_END, + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START: PipelineEventType.INTENT_START, + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: PipelineEventType.INTENT_END, + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: PipelineEventType.TTS_START, + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: PipelineEventType.TTS_END, + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START: PipelineEventType.WAKE_WORD_START, + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: PipelineEventType.WAKE_WORD_END, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_START: PipelineEventType.STT_VAD_START, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_END: PipelineEventType.STT_VAD_END, + } +) + +_TIMER_EVENT_TYPES: EsphomeEnumMapper[VoiceAssistantTimerEventType, TimerEventType] = ( + EsphomeEnumMapper( + { + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED: TimerEventType.STARTED, + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED: TimerEventType.UPDATED, + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_CANCELLED: TimerEventType.CANCELLED, + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_FINISHED: TimerEventType.FINISHED, + } + ) +) + +_ANNOUNCEMENT_TIMEOUT_SEC = 5 * 60 # 5 minutes +_CONFIG_TIMEOUT_SEC = 5 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ESPHomeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Assist satellite entity.""" + entry_data = entry.runtime_data + assert entry_data.device_info is not None + if entry_data.device_info.voice_assistant_feature_flags_compat( + entry_data.api_version + ): + async_add_entities([EsphomeAssistSatellite(entry, entry_data)]) + + +class EsphomeAssistSatellite( + EsphomeAssistEntity, assist_satellite.AssistSatelliteEntity +): + """Satellite running ESPHome.""" + + entity_description = assist_satellite.AssistSatelliteEntityDescription( + key="assist_satellite", translation_key="assist_satellite" + ) + + def __init__( + self, + config_entry: ConfigEntry, + entry_data: RuntimeEntryData, + ) -> None: + """Initialize satellite.""" + super().__init__(entry_data) + + self.config_entry = config_entry + self.entry_data = entry_data + self.cli = self.entry_data.client + + self._is_running: bool = True + self._pipeline_task: asyncio.Task | None = None + self._audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + self._tts_streaming_task: asyncio.Task | None = None + self._udp_server: VoiceAssistantUDPServer | None = None + + # Empty config. Updated when added to HA. + self._satellite_config = assist_satellite.AssistSatelliteConfiguration( + available_wake_words=[], active_wake_words=[], max_active_wake_words=1 + ) + + @property + def pipeline_entity_id(self) -> str | None: + """Return the entity ID of the pipeline to use for the next conversation.""" + assert self.entry_data.device_info is not None + ent_reg = er.async_get(self.hass) + return ent_reg.async_get_entity_id( + Platform.SELECT, + DOMAIN, + f"{self.entry_data.device_info.mac_address}-pipeline", + ) + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Return the entity ID of the VAD sensitivity to use for the next conversation.""" + assert self.entry_data.device_info is not None + ent_reg = er.async_get(self.hass) + return ent_reg.async_get_entity_id( + Platform.SELECT, + DOMAIN, + f"{self.entry_data.device_info.mac_address}-vad_sensitivity", + ) + + @callback + def async_get_configuration( + self, + ) -> assist_satellite.AssistSatelliteConfiguration: + """Get the current satellite configuration.""" + return self._satellite_config + + async def async_set_configuration( + self, config: assist_satellite.AssistSatelliteConfiguration + ) -> None: + """Set the current satellite configuration.""" + await self.cli.set_voice_assistant_configuration( + active_wake_words=config.active_wake_words + ) + _LOGGER.debug("Set active wake words: %s", config.active_wake_words) + + # Ensure configuration is updated + await self._update_satellite_config() + + async def _update_satellite_config(self) -> None: + """Get the latest satellite configuration from the device.""" + try: + config = await self.cli.get_voice_assistant_configuration( + _CONFIG_TIMEOUT_SEC + ) + except TimeoutError: + # Placeholder config will be used + return + + # Update available/active wake words + self._satellite_config.available_wake_words = [ + assist_satellite.AssistSatelliteWakeWord( + id=model.id, + wake_word=model.wake_word, + trained_languages=list(model.trained_languages), + ) + for model in config.available_wake_words + ] + self._satellite_config.active_wake_words = list(config.active_wake_words) + self._satellite_config.max_active_wake_words = config.max_active_wake_words + _LOGGER.debug("Received satellite configuration: %s", self._satellite_config) + + # Inform listeners that config has been updated + self.entry_data.async_assist_satellite_config_updated(self._satellite_config) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + + assert self.entry_data.device_info is not None + feature_flags = ( + self.entry_data.device_info.voice_assistant_feature_flags_compat( + self.entry_data.api_version + ) + ) + if feature_flags & VoiceAssistantFeature.API_AUDIO: + # TCP audio + self.async_on_remove( + self.cli.subscribe_voice_assistant( + handle_start=self.handle_pipeline_start, + handle_stop=self.handle_pipeline_stop, + handle_audio=self.handle_audio, + handle_announcement_finished=self.handle_announcement_finished, + ) + ) + else: + # UDP audio + self.async_on_remove( + self.cli.subscribe_voice_assistant( + handle_start=self.handle_pipeline_start, + handle_stop=self.handle_pipeline_stop, + handle_announcement_finished=self.handle_announcement_finished, + ) + ) + + if feature_flags & VoiceAssistantFeature.TIMERS: + # Device supports timers + assert (self.registry_entry is not None) and ( + self.registry_entry.device_id is not None + ) + self.async_on_remove( + async_register_timer_handler( + self.hass, self.registry_entry.device_id, self.handle_timer_event + ) + ) + + if feature_flags & VoiceAssistantFeature.ANNOUNCE: + # Device supports announcements + self._attr_supported_features |= ( + assist_satellite.AssistSatelliteEntityFeature.ANNOUNCE + ) + + # Block until config is retrieved. + # If the device supports announcements, it will return a config. + _LOGGER.debug("Waiting for satellite configuration") + await self._update_satellite_config() + + if not (feature_flags & VoiceAssistantFeature.SPEAKER): + # Will use media player for TTS/announcements + self._update_tts_format() + + # Update wake word select when config is updated + self.async_on_remove( + self.entry_data.async_register_assist_satellite_set_wake_word_callback( + self.async_set_wake_word + ) + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + + self._is_running = False + self._stop_pipeline() + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Handle pipeline events.""" + try: + event_type = _VOICE_ASSISTANT_EVENT_TYPES.from_hass(event.type) + except KeyError: + _LOGGER.debug("Received unknown pipeline event type: %s", event.type) + return + + data_to_send: dict[str, Any] = {} + if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: + self.entry_data.async_set_assist_pipeline_state(True) + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: + assert event.data is not None + data_to_send = {"text": event.data["stt_output"]["text"]} + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: + assert event.data is not None + data_to_send = { + "conversation_id": event.data["intent_output"]["conversation_id"] or "", + } + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: + assert event.data is not None + data_to_send = {"text": event.data["tts_input"]} + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: + assert event.data is not None + if tts_output := event.data["tts_output"]: + path = tts_output["url"] + url = async_process_play_media_url(self.hass, path) + data_to_send = {"url": url} + + assert self.entry_data.device_info is not None + feature_flags = ( + self.entry_data.device_info.voice_assistant_feature_flags_compat( + self.entry_data.api_version + ) + ) + if feature_flags & VoiceAssistantFeature.SPEAKER: + media_id = tts_output["media_id"] + self._tts_streaming_task = ( + self.config_entry.async_create_background_task( + self.hass, + self._stream_tts_audio(media_id), + "esphome_voice_assistant_tts", + ) + ) + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: + assert event.data is not None + if not event.data["wake_word_output"]: + event_type = VoiceAssistantEventType.VOICE_ASSISTANT_ERROR + data_to_send = { + "code": "no_wake_word", + "message": "No wake word detected", + } + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: + assert event.data is not None + data_to_send = { + "code": event.data["code"], + "message": event.data["message"], + } + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END: + if self._tts_streaming_task is None: + # No TTS + self.entry_data.async_set_assist_pipeline_state(False) + + self.cli.send_voice_assistant_event(event_type, data_to_send) + + async def async_announce( + self, announcement: assist_satellite.AssistSatelliteAnnouncement + ) -> None: + """Announce media on the satellite. + + Should block until the announcement is done playing. + """ + _LOGGER.debug( + "Waiting for announcement to finished (message=%s, media_id=%s)", + announcement.message, + announcement.media_id, + ) + media_id = announcement.media_id + if announcement.media_id_source != "tts": + # Route non-TTS media through the proxy + format_to_use: MediaPlayerSupportedFormat | None = None + for supported_format in chain( + *self.entry_data.media_player_formats.values() + ): + if supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT: + format_to_use = supported_format + break + + if format_to_use is not None: + assert (self.registry_entry is not None) and ( + self.registry_entry.device_id is not None + ) + proxy_url = async_create_proxy_url( + self.hass, + self.registry_entry.device_id, + media_id, + media_format=format_to_use.format, + rate=format_to_use.sample_rate or None, + channels=format_to_use.num_channels or None, + width=format_to_use.sample_bytes or None, + ) + media_id = async_process_play_media_url(self.hass, proxy_url) + + await self.cli.send_voice_assistant_announcement_await_response( + media_id, _ANNOUNCEMENT_TIMEOUT_SEC, announcement.message + ) + + async def handle_pipeline_start( + self, + conversation_id: str, + flags: int, + audio_settings: VoiceAssistantAudioSettings, + wake_word_phrase: str | None, + ) -> int | None: + """Handle pipeline run request.""" + # Clear audio queue + while not self._audio_queue.empty(): + await self._audio_queue.get() + + if self._tts_streaming_task is not None: + # Cancel current TTS response + self._tts_streaming_task.cancel() + self._tts_streaming_task = None + + # API or UDP output audio + port: int = 0 + assert self.entry_data.device_info is not None + feature_flags = ( + self.entry_data.device_info.voice_assistant_feature_flags_compat( + self.entry_data.api_version + ) + ) + if (feature_flags & VoiceAssistantFeature.SPEAKER) and not ( + feature_flags & VoiceAssistantFeature.API_AUDIO + ): + port = await self._start_udp_server() + _LOGGER.debug("Started UDP server on port %s", port) + + # Device triggered pipeline (wake word, etc.) + if flags & VoiceAssistantCommandFlag.USE_WAKE_WORD: + start_stage = PipelineStage.WAKE_WORD + else: + start_stage = PipelineStage.STT + + end_stage = PipelineStage.TTS + + if feature_flags & VoiceAssistantFeature.SPEAKER: + # Stream WAV audio + self._attr_tts_options = { + tts.ATTR_PREFERRED_FORMAT: "wav", + tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + else: + # ANNOUNCEMENT format from media player + self._update_tts_format() + + # Run the pipeline + _LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage) + self._pipeline_task = self.config_entry.async_create_background_task( + self.hass, + self.async_accept_pipeline_from_satellite( + audio_stream=self._wrap_audio_stream(), + start_stage=start_stage, + end_stage=end_stage, + wake_word_phrase=wake_word_phrase, + ), + "esphome_assist_satellite_pipeline", + ) + self._pipeline_task.add_done_callback( + lambda _future: self.handle_pipeline_finished() + ) + + return port + + async def handle_audio(self, data: bytes) -> None: + """Handle incoming audio chunk from API.""" + self._audio_queue.put_nowait(data) + + async def handle_pipeline_stop(self, abort: bool) -> None: + """Handle request for pipeline to stop.""" + if abort: + self._abort_pipeline() + else: + self._stop_pipeline() + + def handle_pipeline_finished(self) -> None: + """Handle when pipeline has finished running.""" + self._stop_udp_server() + _LOGGER.debug("Pipeline finished") + + def handle_timer_event( + self, event_type: TimerEventType, timer_info: TimerInfo + ) -> None: + """Handle timer events.""" + try: + native_event_type = _TIMER_EVENT_TYPES.from_hass(event_type) + except KeyError: + _LOGGER.debug("Received unknown timer event type: %s", event_type) + return + + self.cli.send_voice_assistant_timer_event( + native_event_type, + timer_info.id, + timer_info.name, + timer_info.created_seconds, + timer_info.seconds_left, + timer_info.is_active, + ) + + async def handle_announcement_finished( + self, announce_finished: VoiceAssistantAnnounceFinished + ) -> None: + """Handle announcement finished message (also sent for TTS).""" + self.tts_response_finished() + + @callback + def async_set_wake_word(self, wake_word_id: str) -> None: + """Set active wake word and update config on satellite.""" + self._satellite_config.active_wake_words = [wake_word_id] + self.config_entry.async_create_background_task( + self.hass, + self.async_set_configuration(self._satellite_config), + "esphome_voice_assistant_set_config", + ) + _LOGGER.debug("Setting active wake word: %s", wake_word_id) + + def _update_tts_format(self) -> None: + """Update the TTS format from the first media player.""" + for supported_format in chain(*self.entry_data.media_player_formats.values()): + # Find first announcement format + if supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT: + self._attr_tts_options = { + tts.ATTR_PREFERRED_FORMAT: supported_format.format, + } + + if supported_format.sample_rate > 0: + self._attr_tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = ( + supported_format.sample_rate + ) + + if supported_format.sample_rate > 0: + self._attr_tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = ( + supported_format.num_channels + ) + + if supported_format.sample_rate > 0: + self._attr_tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = ( + supported_format.sample_bytes + ) + + break + + async def _stream_tts_audio( + self, + media_id: str, + sample_rate: int = 16000, + sample_width: int = 2, + sample_channels: int = 1, + samples_per_chunk: int = 512, + ) -> None: + """Stream TTS audio chunks to device via API or UDP.""" + self.cli.send_voice_assistant_event( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, {} + ) + + try: + if not self._is_running: + return + + extension, data = await tts.async_get_media_source_audio( + self.hass, + media_id, + ) + + if extension != "wav": + _LOGGER.error("Only WAV audio can be streamed, got %s", extension) + return + + with io.BytesIO(data) as wav_io, wave.open(wav_io, "rb") as wav_file: + if ( + (wav_file.getframerate() != sample_rate) + or (wav_file.getsampwidth() != sample_width) + or (wav_file.getnchannels() != sample_channels) + ): + _LOGGER.error("Can only stream 16Khz 16-bit mono WAV") + return + + _LOGGER.debug("Streaming %s audio samples", wav_file.getnframes()) + + while self._is_running: + chunk = wav_file.readframes(samples_per_chunk) + if not chunk: + break + + if self._udp_server is not None: + self._udp_server.send_audio_bytes(chunk) + else: + self.cli.send_voice_assistant_audio(chunk) + + # Wait for 90% of the duration of the audio that was + # sent for it to be played. This will overrun the + # device's buffer for very long audio, so using a media + # player is preferred. + samples_in_chunk = len(chunk) // (sample_width * sample_channels) + seconds_in_chunk = samples_in_chunk / sample_rate + await asyncio.sleep(seconds_in_chunk * 0.9) + except asyncio.CancelledError: + return # Don't trigger state change + finally: + self.cli.send_voice_assistant_event( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, {} + ) + + # State change + self.tts_response_finished() + self.entry_data.async_set_assist_pipeline_state(False) + + async def _wrap_audio_stream(self) -> AsyncIterable[bytes]: + """Yield audio chunks from the queue until None.""" + while True: + chunk = await self._audio_queue.get() + if not chunk: + break + + yield chunk + + def _stop_pipeline(self) -> None: + """Request pipeline to be stopped by ending the audio stream and continue processing.""" + self._audio_queue.put_nowait(None) + _LOGGER.debug("Requested pipeline stop") + + def _abort_pipeline(self) -> None: + """Request pipeline to be aborted (no further processing).""" + _LOGGER.debug("Requested pipeline abort") + self._audio_queue.put_nowait(None) + if self._pipeline_task is not None: + self._pipeline_task.cancel() + + async def _start_udp_server(self) -> int: + """Start a UDP server on a random free port.""" + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(("", 0)) # random free port + + ( + _transport, + protocol, + ) = await asyncio.get_running_loop().create_datagram_endpoint( + partial(VoiceAssistantUDPServer, self._audio_queue), sock=sock + ) + + assert isinstance(protocol, VoiceAssistantUDPServer) + self._udp_server = protocol + + # Return port + return cast(int, sock.getsockname()[1]) + + def _stop_udp_server(self) -> None: + """Stop the UDP server if it's running.""" + if self._udp_server is None: + return + + try: + self._udp_server.close() + finally: + self._udp_server = None + + _LOGGER.debug("Stopped UDP server") + + +class VoiceAssistantUDPServer(asyncio.DatagramProtocol): + """Receive UDP packets and forward them to the audio queue.""" + + transport: asyncio.DatagramTransport | None = None + remote_addr: tuple[str, int] | None = None + + def __init__( + self, audio_queue: asyncio.Queue[bytes | None], *args: Any, **kwargs: Any + ) -> None: + """Initialize protocol.""" + super().__init__(*args, **kwargs) + self._audio_queue = audio_queue + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Store transport for later use.""" + self.transport = cast(asyncio.DatagramTransport, transport) + + def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: + """Handle incoming UDP packet.""" + if self.remote_addr is None: + self.remote_addr = addr + + self._audio_queue.put_nowait(data) + + def error_received(self, exc: Exception) -> None: + """Handle when a send or receive operation raises an OSError. + + (Other than BlockingIOError or InterruptedError.) + """ + _LOGGER.error("ESPHome Voice Assistant UDP server error received: %s", exc) + + # Stop pipeline + self._audio_queue.put_nowait(None) + + def close(self) -> None: + """Close the receiver.""" + if self.transport is not None: + self.transport.close() + + self.remote_addr = None + + def send_audio_bytes(self, data: bytes) -> None: + """Send bytes to the device via UDP.""" + if self.transport is None: + _LOGGER.error("No transport to send audio to") + return + + if self.remote_addr is None: + _LOGGER.error("No address to send audio to") + return + + self.transport.sendto(data, self.remote_addr) diff --git a/homeassistant/components/esphome/binary_sensor.py b/homeassistant/components/esphome/binary_sensor.py index 32d96785601..ac759aa7b17 100644 --- a/homeassistant/components/esphome/binary_sensor.py +++ b/homeassistant/components/esphome/binary_sensor.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import TYPE_CHECKING + from aioesphomeapi import BinarySensorInfo, BinarySensorState, EntityInfo from homeassistant.components.binary_sensor import ( @@ -10,9 +12,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntityDescription, ) from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.enum import try_parse_enum +from .const import DOMAIN from .entity import EsphomeAssistEntity, EsphomeEntity, platform_async_setup_entry from .entry_data import ESPHomeConfigEntry @@ -74,10 +78,45 @@ class EsphomeAssistInProgressBinarySensor(EsphomeAssistEntity, BinarySensorEntit """A binary sensor implementation for ESPHome for use with assist_pipeline.""" entity_description = BinarySensorEntityDescription( + entity_registry_enabled_default=False, key="assist_in_progress", translation_key="assist_in_progress", ) + async def async_added_to_hass(self) -> None: + """Create issue.""" + await super().async_added_to_hass() + if TYPE_CHECKING: + assert self.registry_entry is not None + ir.async_create_issue( + self.hass, + DOMAIN, + f"assist_in_progress_deprecated_{self.registry_entry.id}", + breaks_in_ha_version="2025.4", + data={ + "entity_id": self.entity_id, + "entity_uuid": self.registry_entry.id, + "integration_name": "ESPHome", + }, + is_fixable=True, + severity=ir.IssueSeverity.WARNING, + translation_key="assist_in_progress_deprecated", + translation_placeholders={ + "integration_name": "ESPHome", + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Remove issue.""" + await super().async_will_remove_from_hass() + if TYPE_CHECKING: + assert self.registry_entry is not None + ir.async_delete_issue( + self.hass, + DOMAIN, + f"assist_in_progress_deprecated_{self.registry_entry.id}", + ) + @property def is_on(self) -> bool | None: """Return true if the binary sensor is on.""" diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index 1b9b53f24cd..8089fc4712a 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -129,7 +129,6 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = "climate" - _enable_turn_on_off_backwards_compatibility = False @callback def _on_static_info_update(self, static_info: EntityInfo) -> None: diff --git a/homeassistant/components/esphome/config_flow.py b/homeassistant/components/esphome/config_flow.py index d1948df0690..cb892b314cd 100644 --- a/homeassistant/components/esphome/config_flow.py +++ b/homeassistant/components/esphome/config_flow.py @@ -21,16 +21,17 @@ import aiohttp import voluptuous as vol from homeassistant.components import dhcp, zeroconf -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import callback from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from homeassistant.util.json import json_loads_object @@ -57,15 +58,17 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _reauth_entry: ConfigEntry + def __init__(self) -> None: """Initialize flow.""" self._host: str | None = None + self.__name: str | None = None self._port: int | None = None self._password: str | None = None self._noise_required: bool | None = None self._noise_psk: str | None = None self._device_info: DeviceInfo | None = None - self._reauth_entry: ConfigEntry | None = None # The ESPHome name as per its config self._device_name: str | None = None @@ -102,14 +105,12 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry is not None - self._reauth_entry = entry - self._host = entry.data[CONF_HOST] - self._port = entry.data[CONF_PORT] - self._password = entry.data[CONF_PASSWORD] - self._name = entry.title - self._device_name = entry.data.get(CONF_DEVICE_NAME) + self._reauth_entry = self._get_reauth_entry() + self._host = entry_data[CONF_HOST] + self._port = entry_data[CONF_PORT] + self._password = entry_data[CONF_PASSWORD] + self._name = self._reauth_entry.title + self._device_name = entry_data.get(CONF_DEVICE_NAME) # Device without encryption allows fetching device info. We can then check # if the device is no longer using a password. If we did try with a password, @@ -152,12 +153,12 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): ) @property - def _name(self) -> str | None: - return self.context.get(CONF_NAME) + def _name(self) -> str: + return self.__name or "ESPHome" @_name.setter def _name(self, value: str) -> None: - self.context[CONF_NAME] = value + self.__name = value self.context["title_placeholders"] = {"name": self._name} async def _async_try_fetch_device_info(self) -> ConfigFlowResult: @@ -256,6 +257,9 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): self, discovery_info: MqttServiceInfo ) -> ConfigFlowResult: """Handle MQTT discovery.""" + if not discovery_info.payload: + return self.async_abort(reason="mqtt_missing_payload") + device_info = json_loads_object(discovery_info.payload) if "mac" not in device_info: return self.async_abort(reason="mqtt_missing_mac") @@ -323,7 +327,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): config_options = { CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, } - if self._reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( self._reauth_entry, data=self._reauth_entry.data | config_data ) @@ -410,7 +414,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): self._device_name = self._device_info.name mac_address = format_mac(self._device_info.mac_address) await self.async_set_unique_id(mac_address, raise_on_progress=False) - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured( updates={CONF_HOST: self._host, CONF_PORT: self._port} ) @@ -481,16 +485,12 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for esphome.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/esphome/const.py b/homeassistant/components/esphome/const.py index 9c09591f6ea..143aaa6342a 100644 --- a/homeassistant/components/esphome/const.py +++ b/homeassistant/components/esphome/const.py @@ -18,3 +18,5 @@ PROJECT_URLS = { "esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/", } DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_VERSION_STR}.html" + +DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy" diff --git a/homeassistant/components/esphome/coordinator.py b/homeassistant/components/esphome/coordinator.py index 284e17fd183..b31a74dcf3f 100644 --- a/homeassistant/components/esphome/coordinator.py +++ b/homeassistant/components/esphome/coordinator.py @@ -31,6 +31,7 @@ class ESPHomeDashboardCoordinator(DataUpdateCoordinator[dict[str, ConfiguredDevi super().__init__( hass, _LOGGER, + config_entry=None, name="ESPHome Dashboard", update_interval=timedelta(minutes=5), always_update=False, diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index 6fc40612c48..fc41ee99a00 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -31,6 +31,7 @@ from aioesphomeapi import ( LightInfo, LockInfo, MediaPlayerInfo, + MediaPlayerSupportedFormat, NumberInfo, SelectInfo, SensorInfo, @@ -47,6 +48,7 @@ from aioesphomeapi import ( from aioesphomeapi.model import ButtonInfo from bleak_esphome.backend.device import ESPHomeBluetoothDevice +from homeassistant.components.assist_satellite import AssistSatelliteConfiguration from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback @@ -148,6 +150,15 @@ class RuntimeEntryData: tuple[type[EntityInfo], int], list[Callable[[EntityInfo], None]] ] = field(default_factory=dict) original_options: dict[str, Any] = field(default_factory=dict) + media_player_formats: dict[str, list[MediaPlayerSupportedFormat]] = field( + default_factory=lambda: defaultdict(list) + ) + assist_satellite_config_update_callbacks: list[ + Callable[[AssistSatelliteConfiguration], None] + ] = field(default_factory=list) + assist_satellite_set_wake_word_callbacks: list[Callable[[str], None]] = field( + default_factory=list + ) @property def name(self) -> str: @@ -500,3 +511,35 @@ class RuntimeEntryData: # We use this to determine if a deep sleep device should # be marked as unavailable or not. self.expected_disconnect = True + + @callback + def async_register_assist_satellite_config_updated_callback( + self, + callback_: Callable[[AssistSatelliteConfiguration], None], + ) -> CALLBACK_TYPE: + """Register to receive callbacks when the Assist satellite's configuration is updated.""" + self.assist_satellite_config_update_callbacks.append(callback_) + return lambda: self.assist_satellite_config_update_callbacks.remove(callback_) + + @callback + def async_assist_satellite_config_updated( + self, config: AssistSatelliteConfiguration + ) -> None: + """Notify listeners that the Assist satellite configuration has been updated.""" + for callback_ in self.assist_satellite_config_update_callbacks.copy(): + callback_(config) + + @callback + def async_register_assist_satellite_set_wake_word_callback( + self, + callback_: Callable[[str], None], + ) -> CALLBACK_TYPE: + """Register to receive callbacks when the Assist satellite's wake word is set.""" + self.assist_satellite_set_wake_word_callbacks.append(callback_) + return lambda: self.assist_satellite_set_wake_word_callbacks.remove(callback_) + + @callback + def async_assist_satellite_set_wake_word(self, wake_word_id: str) -> None: + """Notify listeners that the Assist satellite wake word has been set.""" + for callback_ in self.assist_satellite_set_wake_word_callbacks.copy(): + callback_(wake_word_id) diff --git a/homeassistant/components/esphome/fan.py b/homeassistant/components/esphome/fan.py index 454c5edf030..c09145c17b5 100644 --- a/homeassistant/components/esphome/fan.py +++ b/homeassistant/components/esphome/fan.py @@ -45,7 +45,6 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity): """A fan implementation for ESPHome.""" _supports_speed_levels: bool = True - _enable_turn_on_off_backwards_compatibility = False async def async_set_percentage(self, percentage: int) -> None: """Set the speed percentage of the fan.""" diff --git a/homeassistant/components/esphome/ffmpeg_proxy.py b/homeassistant/components/esphome/ffmpeg_proxy.py new file mode 100644 index 00000000000..9484d1e7593 --- /dev/null +++ b/homeassistant/components/esphome/ffmpeg_proxy.py @@ -0,0 +1,315 @@ +"""HTTP view that converts audio from a URL to a preferred format.""" + +import asyncio +from collections import defaultdict +from dataclasses import dataclass, field +from http import HTTPStatus +import logging +import secrets +from typing import Final + +from aiohttp import web +from aiohttp.abc import AbstractStreamWriter, BaseRequest + +from homeassistant.components.ffmpeg import FFmpegManager +from homeassistant.components.http import HomeAssistantView +from homeassistant.core import HomeAssistant + +from .const import DATA_FFMPEG_PROXY + +_LOGGER = logging.getLogger(__name__) + +_MAX_CONVERSIONS_PER_DEVICE: Final[int] = 2 + + +def async_create_proxy_url( + hass: HomeAssistant, + device_id: str, + media_url: str, + media_format: str, + rate: int | None = None, + channels: int | None = None, + width: int | None = None, +) -> str: + """Create a use proxy URL that automatically converts the media.""" + data: FFmpegProxyData = hass.data[DATA_FFMPEG_PROXY] + return data.async_create_proxy_url( + device_id, media_url, media_format, rate, channels, width + ) + + +@dataclass +class FFmpegConversionInfo: + """Information for ffmpeg conversion.""" + + convert_id: str + """Unique id for media conversion.""" + + media_url: str + """Source URL of media to convert.""" + + media_format: str + """Target format for media (mp3, flac, etc.)""" + + rate: int | None + """Target sample rate (None to keep source rate).""" + + channels: int | None + """Target number of channels (None to keep source channels).""" + + width: int | None + """Target sample width in bytes (None to keep source width).""" + + proc: asyncio.subprocess.Process | None = None + """Subprocess doing ffmpeg conversion.""" + + is_finished: bool = False + """True if conversion has finished.""" + + +@dataclass +class FFmpegProxyData: + """Data for ffmpeg proxy conversion.""" + + # device_id -> [info] + conversions: dict[str, list[FFmpegConversionInfo]] = field( + default_factory=lambda: defaultdict(list) + ) + + def async_create_proxy_url( + self, + device_id: str, + media_url: str, + media_format: str, + rate: int | None, + channels: int | None, + width: int | None, + ) -> str: + """Create a one-time use proxy URL that automatically converts the media.""" + + # Remove completed conversions + device_conversions = [ + info for info in self.conversions[device_id] if not info.is_finished + ] + + while len(device_conversions) >= _MAX_CONVERSIONS_PER_DEVICE: + # Stop oldest conversion before adding a new one + convert_info = device_conversions[0] + if (convert_info.proc is not None) and ( + convert_info.proc.returncode is None + ): + _LOGGER.debug( + "Stopping existing ffmpeg process for device: %s", device_id + ) + convert_info.proc.kill() + + device_conversions = device_conversions[1:] + + convert_id = secrets.token_urlsafe(16) + device_conversions.append( + FFmpegConversionInfo( + convert_id, media_url, media_format, rate, channels, width + ) + ) + _LOGGER.debug("Media URL allowed by proxy: %s", media_url) + + self.conversions[device_id] = device_conversions + + return f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.{media_format}" + + +class FFmpegConvertResponse(web.StreamResponse): + """HTTP streaming response that uses ffmpeg to convert audio from a URL.""" + + def __init__( + self, + manager: FFmpegManager, + convert_info: FFmpegConversionInfo, + device_id: str, + proxy_data: FFmpegProxyData, + chunk_size: int = 2048, + ) -> None: + """Initialize response. + + Parameters + ---------- + manager: FFmpegManager + ffmpeg manager + convert_info: FFmpegConversionInfo + Information necessary to do the conversion + device_id: str + ESPHome device id + proxy_data: FFmpegProxyData + Data object to store ffmpeg process + chunk_size: int + Number of bytes to read from ffmpeg process at a time + + """ + super().__init__(status=200) + self.hass = manager.hass + self.manager = manager + self.convert_info = convert_info + self.device_id = device_id + self.proxy_data = proxy_data + self.chunk_size = chunk_size + + async def transcode( + self, request: BaseRequest, writer: AbstractStreamWriter + ) -> None: + """Stream url through ffmpeg conversion and out to HTTP client.""" + command_args = [ + "-i", + self.convert_info.media_url, + "-f", + self.convert_info.media_format, + ] + + if self.convert_info.rate is not None: + # Sample rate + command_args.extend(["-ar", str(self.convert_info.rate)]) + + if self.convert_info.channels is not None: + # Number of channels + command_args.extend(["-ac", str(self.convert_info.channels)]) + + if self.convert_info.width == 2: + # 16-bit samples + command_args.extend(["-sample_fmt", "s16"]) + + # Remove metadata and cover art + command_args.extend(["-map_metadata", "-1", "-vn"]) + + # disable progress stats on stderr + command_args.append("-nostats") + + # Output to stdout + command_args.append("pipe:") + + _LOGGER.debug("%s %s", self.manager.binary, " ".join(command_args)) + proc = await asyncio.create_subprocess_exec( + self.manager.binary, + *command_args, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + close_fds=False, # use posix_spawn in CPython < 3.13 + ) + + # Only one conversion process per device is allowed + self.convert_info.proc = proc + + # Create background task which will be cancelled when home assistant shuts down + write_task = self.hass.async_create_background_task( + self._write_ffmpeg_data(request, writer, proc), "ESPHome media proxy" + ) + await write_task + + async def _write_ffmpeg_data( + self, + request: BaseRequest, + writer: AbstractStreamWriter, + proc: asyncio.subprocess.Process, + ) -> None: + assert proc.stdout is not None + assert proc.stderr is not None + + stderr_task = self.hass.async_create_background_task( + self._dump_ffmpeg_stderr(proc), "ESPHome media proxy dump stderr" + ) + + try: + # Pull audio chunks from ffmpeg and pass them to the HTTP client + while ( + self.hass.is_running + and (request.transport is not None) + and (not request.transport.is_closing()) + and (chunk := await proc.stdout.read(self.chunk_size)) + ): + await self.write(chunk) + except asyncio.CancelledError: + _LOGGER.debug("ffmpeg transcoding cancelled") + # Abort the transport, we don't wait for ESPHome to drain the write buffer; + # it may need a very long time or never finish if the player is paused. + if request.transport: + request.transport.abort() + raise # don't log error + except: + _LOGGER.exception("Unexpected error during ffmpeg conversion") + raise + finally: + # Allow conversion info to be removed + self.convert_info.is_finished = True + + # stop dumping ffmpeg stderr task + stderr_task.cancel() + + # Terminate hangs, so kill is used + if proc.returncode is None: + proc.kill() + + # Close connection by writing EOF unless already closing + if request.transport and not request.transport.is_closing(): + await writer.write_eof() + + async def _dump_ffmpeg_stderr( + self, + proc: asyncio.subprocess.Process, + ) -> None: + assert proc.stdout is not None + assert proc.stderr is not None + + while self.hass.is_running and (chunk := await proc.stderr.readline()): + _LOGGER.debug("ffmpeg[%s] output: %s", proc.pid, chunk.decode().rstrip()) + + +class FFmpegProxyView(HomeAssistantView): + """FFmpeg web view to convert audio and stream back to client.""" + + requires_auth = False + url = "/api/esphome/ffmpeg_proxy/{device_id}/{filename}" + name = "api:esphome:ffmpeg_proxy" + + def __init__(self, manager: FFmpegManager, proxy_data: FFmpegProxyData) -> None: + """Initialize an ffmpeg view.""" + self.manager = manager + self.proxy_data = proxy_data + + async def get( + self, request: web.Request, device_id: str, filename: str + ) -> web.StreamResponse: + """Start a get request.""" + device_conversions = self.proxy_data.conversions[device_id] + if not device_conversions: + return web.Response( + body="No proxy URL for device", status=HTTPStatus.NOT_FOUND + ) + + # {id}.mp3 -> id, mp3 + convert_id, media_format = filename.rsplit(".") + + # Look up conversion info + convert_info: FFmpegConversionInfo | None = None + for maybe_convert_info in device_conversions: + if (maybe_convert_info.convert_id == convert_id) and ( + maybe_convert_info.media_format == media_format + ): + convert_info = maybe_convert_info + break + + if convert_info is None: + return web.Response(body="Invalid proxy URL", status=HTTPStatus.BAD_REQUEST) + + # Stop previous process if the URL is being reused. + # We could continue from where the previous connection left off, but + # there would be no media header. + if (convert_info.proc is not None) and (convert_info.proc.returncode is None): + convert_info.proc.kill() + convert_info.proc = None + + # Stream converted audio back to client + resp = FFmpegConvertResponse( + self.manager, convert_info, device_id, self.proxy_data + ) + writer = await resp.prepare(request) + assert writer is not None + await resp.transcode(request, writer) + return resp diff --git a/homeassistant/components/esphome/light.py b/homeassistant/components/esphome/light.py index 52f999afe4f..8fecf34862b 100644 --- a/homeassistant/components/esphome/light.py +++ b/homeassistant/components/esphome/light.py @@ -414,11 +414,8 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity): self._attr_supported_color_modes = supported self._attr_effect_list = static_info.effects - self._attr_min_mireds = round(static_info.min_mireds) - self._attr_max_mireds = round(static_info.max_mireds) - if ColorMode.COLOR_TEMP in supported: - self._attr_min_color_temp_kelvin = _mired_to_kelvin(static_info.max_mireds) - self._attr_max_color_temp_kelvin = _mired_to_kelvin(static_info.min_mireds) + self._attr_min_color_temp_kelvin = _mired_to_kelvin(static_info.max_mireds) + self._attr_max_color_temp_kelvin = _mired_to_kelvin(static_info.min_mireds) async_setup_entry = partial( diff --git a/homeassistant/components/esphome/lock.py b/homeassistant/components/esphome/lock.py index 15a402ccb91..502cd361277 100644 --- a/homeassistant/components/esphome/lock.py +++ b/homeassistant/components/esphome/lock.py @@ -68,7 +68,7 @@ class EsphomeLock(EsphomeEntity[LockInfo, LockEntityState], LockEntity): @convert_api_error_ha_error async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - code = kwargs.get(ATTR_CODE, None) + code = kwargs.get(ATTR_CODE) self._client.lock_command(self._key, LockCommand.UNLOCK, code) @convert_api_error_ha_error diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index 7629d1fa9cd..007b4e791e1 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -20,19 +20,17 @@ from aioesphomeapi import ( RequiresEncryptionAPIError, UserService, UserServiceArgType, - VoiceAssistantAudioSettings, - VoiceAssistantFeature, ) from awesomeversion import AwesomeVersion import voluptuous as vol from homeassistant.components import tag, zeroconf -from homeassistant.components.intent import async_register_timer_handler from homeassistant.const import ( ATTR_DEVICE_ID, CONF_MODE, EVENT_HOMEASSISTANT_CLOSE, EVENT_LOGGING_CHANGED, + Platform, ) from homeassistant.core import ( Event, @@ -73,12 +71,6 @@ from .domain_data import DomainData # Import config flow so that it's added to the registry from .entry_data import ESPHomeConfigEntry, RuntimeEntryData -from .voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantPipeline, - VoiceAssistantUDPPipeline, - handle_timer_event, -) _LOGGER = logging.getLogger(__name__) @@ -149,7 +141,6 @@ class ESPHomeManager: "cli", "device_id", "domain_data", - "voice_assistant_pipeline", "reconnect_logic", "zeroconf_instance", "entry_data", @@ -173,7 +164,6 @@ class ESPHomeManager: self.cli = cli self.device_id: str | None = None self.domain_data = domain_data - self.voice_assistant_pipeline: VoiceAssistantPipeline | None = None self.reconnect_logic: ReconnectLogic | None = None self.zeroconf_instance = zeroconf_instance self.entry_data = entry.runtime_data @@ -329,77 +319,15 @@ class ESPHomeManager: entity_id, attribute, hass.states.get(entity_id) ) - def _handle_pipeline_finished(self) -> None: - self.entry_data.async_set_assist_pipeline_state(False) - - if self.voice_assistant_pipeline is not None: - if isinstance(self.voice_assistant_pipeline, VoiceAssistantUDPPipeline): - self.voice_assistant_pipeline.close() - self.voice_assistant_pipeline = None - - async def _handle_pipeline_start( - self, - conversation_id: str, - flags: int, - audio_settings: VoiceAssistantAudioSettings, - wake_word_phrase: str | None, - ) -> int | None: - """Start a voice assistant pipeline.""" - if self.voice_assistant_pipeline is not None: - _LOGGER.warning("Previous Voice assistant pipeline was not stopped") - self.voice_assistant_pipeline.stop() - self.voice_assistant_pipeline = None - - hass = self.hass - assert self.entry_data.device_info is not None - if ( - self.entry_data.device_info.voice_assistant_feature_flags_compat( - self.entry_data.api_version - ) - & VoiceAssistantFeature.API_AUDIO - ): - self.voice_assistant_pipeline = VoiceAssistantAPIPipeline( - hass, - self.entry_data, - self.cli.send_voice_assistant_event, - self._handle_pipeline_finished, - self.cli, - ) - port = 0 - else: - self.voice_assistant_pipeline = VoiceAssistantUDPPipeline( - hass, - self.entry_data, - self.cli.send_voice_assistant_event, - self._handle_pipeline_finished, - ) - port = await self.voice_assistant_pipeline.start_server() - - assert self.device_id is not None, "Device ID must be set" - hass.async_create_background_task( - self.voice_assistant_pipeline.run_pipeline( - device_id=self.device_id, - conversation_id=conversation_id or None, - flags=flags, - audio_settings=audio_settings, - wake_word_phrase=wake_word_phrase, - ), - "esphome.voice_assistant_pipeline.run_pipeline", + @callback + def async_on_state_request( + self, entity_id: str, attribute: str | None = None + ) -> None: + """Forward state for requested entity.""" + self._send_home_assistant_state( + entity_id, attribute, self.hass.states.get(entity_id) ) - return port - - async def _handle_pipeline_stop(self) -> None: - """Stop a voice assistant pipeline.""" - if self.voice_assistant_pipeline is not None: - self.voice_assistant_pipeline.stop() - - async def _handle_audio(self, data: bytes) -> None: - if self.voice_assistant_pipeline is None: - return - assert isinstance(self.voice_assistant_pipeline, VoiceAssistantAPIPipeline) - self.voice_assistant_pipeline.receive_audio_bytes(data) - async def on_connect(self) -> None: """Subscribe to states and list entities on successful API login.""" try: @@ -500,33 +428,21 @@ class ESPHomeManager: ) ) - flags = device_info.voice_assistant_feature_flags_compat(api_version) - if flags: - if flags & VoiceAssistantFeature.API_AUDIO: - entry_data.disconnect_callbacks.add( - cli.subscribe_voice_assistant( - handle_start=self._handle_pipeline_start, - handle_stop=self._handle_pipeline_stop, - handle_audio=self._handle_audio, - ) - ) - else: - entry_data.disconnect_callbacks.add( - cli.subscribe_voice_assistant( - handle_start=self._handle_pipeline_start, - handle_stop=self._handle_pipeline_stop, - ) - ) - if flags & VoiceAssistantFeature.TIMERS: - entry_data.disconnect_callbacks.add( - async_register_timer_handler( - hass, self.device_id, partial(handle_timer_event, cli) - ) - ) + if device_info.voice_assistant_feature_flags_compat(api_version) and ( + Platform.ASSIST_SATELLITE not in entry_data.loaded_platforms + ): + # Create assist satellite entity + await self.hass.config_entries.async_forward_entry_setups( + self.entry, [Platform.ASSIST_SATELLITE] + ) + entry_data.loaded_platforms.add(Platform.ASSIST_SATELLITE) cli.subscribe_states(entry_data.async_update_state) cli.subscribe_service_calls(self.async_on_service_call) - cli.subscribe_home_assistant_states(self.async_on_state_subscription) + cli.subscribe_home_assistant_states( + self.async_on_state_subscription, + self.async_on_state_request, + ) entry_data.async_save_to_store() _async_check_firmware_version(hass, device_info, api_version) @@ -560,6 +476,13 @@ class ESPHomeManager: # will be cleared anyway. entry_data.async_update_device_state() + if Platform.ASSIST_SATELLITE in self.entry_data.loaded_platforms: + await self.hass.config_entries.async_unload_platforms( + self.entry, [Platform.ASSIST_SATELLITE] + ) + + self.entry_data.loaded_platforms.remove(Platform.ASSIST_SATELLITE) + async def on_connect_error(self, err: Exception) -> None: """Start reauth flow if appropriate connect error type.""" if isinstance( @@ -647,7 +570,11 @@ def _async_setup_device_registry( configuration_url = None if device_info.webserver_port > 0: configuration_url = f"http://{entry.data['host']}:{device_info.webserver_port}" - elif dashboard := async_get_dashboard(hass): + elif ( + (dashboard := async_get_dashboard(hass)) + and dashboard.data + and dashboard.data.get(device_info.name) + ): configuration_url = f"homeassistant://hassio/ingress/{dashboard.addon_slug}" manufacturer = "espressif" diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 454b547cdf4..775ffbff4c8 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -4,7 +4,7 @@ "after_dependencies": ["zeroconf", "tag"], "codeowners": ["@OttoWinter", "@jesserockz", "@kbx81", "@bdraco"], "config_flow": true, - "dependencies": ["assist_pipeline", "bluetooth", "intent"], + "dependencies": ["assist_pipeline", "bluetooth", "intent", "ffmpeg", "http"], "dhcp": [ { "registered_devices": true @@ -15,11 +15,10 @@ "iot_class": "local_push", "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], - "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.2.1", + "aioesphomeapi==28.0.0", "esphome-dashboard-api==1.2.3", - "bleak-esphome==1.0.0" + "bleak-esphome==1.1.0" ], "zeroconf": ["_esphomelib._tcp.local."] } diff --git a/homeassistant/components/esphome/media_player.py b/homeassistant/components/esphome/media_player.py index f7c5d7011f8..8a30814aa2c 100644 --- a/homeassistant/components/esphome/media_player.py +++ b/homeassistant/components/esphome/media_player.py @@ -3,19 +3,24 @@ from __future__ import annotations from functools import partial -from typing import Any +import logging +from typing import Any, cast +from urllib.parse import urlparse from aioesphomeapi import ( EntityInfo, MediaPlayerCommand, MediaPlayerEntityState, + MediaPlayerFormatPurpose, MediaPlayerInfo, MediaPlayerState as EspMediaPlayerState, + MediaPlayerSupportedFormat, ) from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_EXTRA, BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEntity, @@ -34,6 +39,9 @@ from .entity import ( platform_async_setup_entry, ) from .enum_mapper import EsphomeEnumMapper +from .ffmpeg_proxy import async_create_proxy_url + +_LOGGER = logging.getLogger(__name__) _STATES: EsphomeEnumMapper[EspMediaPlayerState, MediaPlayerState] = EsphomeEnumMapper( { @@ -43,6 +51,8 @@ _STATES: EsphomeEnumMapper[EspMediaPlayerState, MediaPlayerState] = EsphomeEnumM } ) +ATTR_BYPASS_PROXY = "bypass_proxy" + class EsphomeMediaPlayer( EsphomeEntity[MediaPlayerInfo, MediaPlayerEntityState], MediaPlayerEntity @@ -66,6 +76,9 @@ class EsphomeMediaPlayer( if self._static_info.supports_pause: flags |= MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY self._attr_supported_features = flags + self._entry_data.media_player_formats[static_info.unique_id] = cast( + MediaPlayerInfo, static_info + ).supported_formats @property @esphome_state_property @@ -98,11 +111,97 @@ class EsphomeMediaPlayer( media_id = async_process_play_media_url(self.hass, media_id) announcement = kwargs.get(ATTR_MEDIA_ANNOUNCE) + bypass_proxy = kwargs.get(ATTR_MEDIA_EXTRA, {}).get(ATTR_BYPASS_PROXY) + + supported_formats: list[MediaPlayerSupportedFormat] | None = ( + self._entry_data.media_player_formats.get(self._static_info.unique_id) + ) + + if ( + not bypass_proxy + and supported_formats + and _is_url(media_id) + and ( + proxy_url := self._get_proxy_url( + supported_formats, media_id, announcement is True + ) + ) + ): + # Substitute proxy URL + media_id = proxy_url self._client.media_player_command( self._key, media_url=media_id, announcement=announcement ) + async def async_will_remove_from_hass(self) -> None: + """Handle entity being removed.""" + await super().async_will_remove_from_hass() + self._entry_data.media_player_formats.pop(self.entity_id, None) + + def _get_proxy_url( + self, + supported_formats: list[MediaPlayerSupportedFormat], + url: str, + announcement: bool, + ) -> str | None: + """Get URL for ffmpeg proxy.""" + if self.device_entry is None: + # Device id is required + return None + + # Choose the first default or announcement supported format + format_to_use: MediaPlayerSupportedFormat | None = None + for supported_format in supported_formats: + if (format_to_use is None) and ( + supported_format.purpose == MediaPlayerFormatPurpose.DEFAULT + ): + # First default format + format_to_use = supported_format + elif announcement and ( + supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT + ): + # First announcement format + format_to_use = supported_format + break + + if format_to_use is None: + # No format for conversion + return None + + # Replace the media URL with a proxy URL pointing to Home + # Assistant. When requested, Home Assistant will use ffmpeg to + # convert the source URL to the supported format. + _LOGGER.debug("Proxying media url %s with format %s", url, format_to_use) + device_id = self.device_entry.id + media_format = format_to_use.format + + # 0 = None + rate: int | None = None + channels: int | None = None + width: int | None = None + if format_to_use.sample_rate > 0: + rate = format_to_use.sample_rate + + if format_to_use.num_channels > 0: + channels = format_to_use.num_channels + + if format_to_use.sample_bytes > 0: + width = format_to_use.sample_bytes + + proxy_url = async_create_proxy_url( + self.hass, + device_id, + url, + media_format=media_format, + rate=rate, + channels=channels, + width=width, + ) + + # Resolve URL + return async_process_play_media_url(self.hass, proxy_url) + async def async_browse_media( self, media_content_type: MediaType | str | None = None, @@ -144,6 +243,12 @@ class EsphomeMediaPlayer( ) +def _is_url(url: str) -> bool: + """Validate the URL can be parsed and at least has scheme + netloc.""" + result = urlparse(url) + return all([result.scheme, result.netloc]) + + async_setup_entry = partial( platform_async_setup_entry, info_type=MediaPlayerInfo, diff --git a/homeassistant/components/esphome/repairs.py b/homeassistant/components/esphome/repairs.py new file mode 100644 index 00000000000..31e4b88c689 --- /dev/null +++ b/homeassistant/components/esphome/repairs.py @@ -0,0 +1,22 @@ +"""Repairs implementation for the esphome integration.""" + +from __future__ import annotations + +from homeassistant.components.assist_pipeline.repair_flows import ( + AssistInProgressDeprecatedRepairFlow, +) +from homeassistant.components.repairs import RepairsFlow +from homeassistant.core import HomeAssistant + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + if issue_id.startswith("assist_in_progress_deprecated"): + return AssistInProgressDeprecatedRepairFlow(data) + # If ESPHome adds confirm-only repairs in the future, this should be changed + # to return a ConfirmRepairFlow instead of raising a ValueError + raise ValueError(f"unknown repair {issue_id}") diff --git a/homeassistant/components/esphome/select.py b/homeassistant/components/esphome/select.py index 623946503eb..71a21186d3d 100644 --- a/homeassistant/components/esphome/select.py +++ b/homeassistant/components/esphome/select.py @@ -8,8 +8,11 @@ from homeassistant.components.assist_pipeline.select import ( AssistPipelineSelect, VadSensitivitySelect, ) -from homeassistant.components.select import SelectEntity +from homeassistant.components.assist_satellite import AssistSatelliteConfiguration +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import restore_state from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -47,6 +50,7 @@ async def async_setup_entry( [ EsphomeAssistPipelineSelect(hass, entry_data), EsphomeVadSensitivitySelect(hass, entry_data), + EsphomeAssistSatelliteWakeWordSelect(hass, entry_data), ] ) @@ -89,3 +93,77 @@ class EsphomeVadSensitivitySelect(EsphomeAssistEntity, VadSensitivitySelect): """Initialize a VAD sensitivity selector.""" EsphomeAssistEntity.__init__(self, entry_data) VadSensitivitySelect.__init__(self, hass, self._device_info.mac_address) + + +class EsphomeAssistSatelliteWakeWordSelect( + EsphomeAssistEntity, SelectEntity, restore_state.RestoreEntity +): + """Wake word selector for esphome devices.""" + + entity_description = SelectEntityDescription( + key="wake_word", + translation_key="wake_word", + entity_category=EntityCategory.CONFIG, + ) + _attr_should_poll = False + _attr_current_option: str | None = None + _attr_options: list[str] = [] + + def __init__(self, hass: HomeAssistant, entry_data: RuntimeEntryData) -> None: + """Initialize a wake word selector.""" + EsphomeAssistEntity.__init__(self, entry_data) + + unique_id_prefix = self._device_info.mac_address + self._attr_unique_id = f"{unique_id_prefix}-wake_word" + + # name -> id + self._wake_words: dict[str, str] = {} + + @property + def available(self) -> bool: + """Return if entity is available.""" + return bool(self._attr_options) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + + # Update options when config is updated + self.async_on_remove( + self._entry_data.async_register_assist_satellite_config_updated_callback( + self.async_satellite_config_updated + ) + ) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + if wake_word_id := self._wake_words.get(option): + # _attr_current_option will be updated on + # async_satellite_config_updated after the device sets the wake + # word. + self._entry_data.async_assist_satellite_set_wake_word(wake_word_id) + + def async_satellite_config_updated( + self, config: AssistSatelliteConfiguration + ) -> None: + """Update options with available wake words.""" + if (not config.available_wake_words) or (config.max_active_wake_words < 1): + self._attr_current_option = None + self._wake_words.clear() + self.async_write_ha_state() + return + + self._wake_words = {w.wake_word: w.id for w in config.available_wake_words} + self._attr_options = sorted(self._wake_words) + + if config.active_wake_words: + # Select first active wake word + wake_word_id = config.active_wake_words[0] + for wake_word in config.available_wake_words: + if wake_word.id == wake_word_id: + self._attr_current_option = wake_word.wake_word + else: + # Select first available wake word + self._attr_current_option = config.available_wake_words[0].wake_word + + self.async_write_ha_state() diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index eb2e8f65b78..81b58de8df2 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -8,7 +8,8 @@ "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", - "mqtt_missing_ip": "Missing IP address in MQTT properties." + "mqtt_missing_ip": "Missing IP address in MQTT properties.", + "mqtt_missing_payload": "Missing MQTT Payload." }, "error": { "resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address", @@ -59,6 +60,11 @@ } }, "entity": { + "assist_satellite": { + "assist_satellite": { + "name": "[%key:component::assist_satellite::entity_component::_::name%]" + } + }, "binary_sensor": { "assist_in_progress": { "name": "[%key:component::assist_pipeline::entity::binary_sensor::assist_in_progress::name%]" @@ -78,6 +84,12 @@ "aggressive": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::aggressive%]", "relaxed": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::relaxed%]" } + }, + "wake_word": { + "name": "Wake word", + "state": { + "okay_nabu": "Okay Nabu" + } } }, "climate": { @@ -93,6 +105,16 @@ } }, "issues": { + "assist_in_progress_deprecated": { + "title": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::title%]", + "fix_flow": { + "step": { + "confirm_disable_entity": { + "description": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::fix_flow::step::confirm_disable_entity::description%]" + } + } + } + }, "ble_firmware_outdated": { "title": "Update {name} with ESPHome {version} or later", "description": "To improve Bluetooth reliability and performance, we highly recommend updating {name} with ESPHome {version} or later. When updating the device from ESPHome earlier than 2022.12.0, it is recommended to use a serial cable instead of an over-the-air update to take advantage of the new partition scheme." @@ -103,7 +125,7 @@ }, "service_calls_not_allowed": { "title": "{name} is not permitted to perform Home Assistant actions", - "description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perfom Home Assistant action, you can enable this functionality in the options flow." + "description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perform Home Assistant action, you can enable this functionality in the options flow." } } } diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index b7905fb4fdb..2b593051742 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -61,6 +61,8 @@ async def async_setup_entry( if (dashboard := async_get_dashboard(hass)) is None: return entry_data = DomainData.get(hass).get_entry_data(entry) + assert entry_data.device_info is not None + device_name = entry_data.device_info.name unsubs: list[CALLBACK_TYPE] = [] @callback @@ -72,13 +74,22 @@ async def async_setup_entry( if not entry_data.available or not dashboard.last_update_success: return + # Do not add Dashboard Entity if this device is not known to the ESPHome dashboard. + if dashboard.data is None or dashboard.data.get(device_name) is None: + return + for unsub in unsubs: unsub() unsubs.clear() async_add_entities([ESPHomeDashboardUpdateEntity(entry_data, dashboard)]) - if entry_data.available and dashboard.last_update_success: + if ( + entry_data.available + and dashboard.last_update_success + and dashboard.data is not None + and dashboard.data.get(device_name) + ): _async_setup_update_entity() return @@ -133,10 +144,8 @@ class ESPHomeDashboardUpdateEntity( self._attr_supported_features = NO_FEATURES self._attr_installed_version = device_info.esphome_version device = coordinator.data.get(device_info.name) - if device is None: - self._attr_latest_version = None - else: - self._attr_latest_version = device["current_version"] + assert device is not None + self._attr_latest_version = device["current_version"] @callback def _handle_coordinator_update(self) -> None: @@ -230,10 +239,8 @@ class ESPHomeUpdateEntity(EsphomeEntity[UpdateInfo, UpdateState], UpdateEntity): @property @esphome_state_property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: """Return if the update is in progress.""" - if self._state.has_progress: - return int(self._state.progress) return self._state.in_progress @property @@ -260,6 +267,14 @@ class ESPHomeUpdateEntity(EsphomeEntity[UpdateInfo, UpdateState], UpdateEntity): """Return the title of the update.""" return self._state.title + @property + @esphome_state_property + def update_percentage(self) -> int | None: + """Return if the update is in progress.""" + if self._state.has_progress: + return int(self._state.progress) + return None + @convert_api_error_ha_error async def async_update(self) -> None: """Command device to check for update.""" diff --git a/homeassistant/components/esphome/voice_assistant.py b/homeassistant/components/esphome/voice_assistant.py deleted file mode 100644 index eb55be2ced6..00000000000 --- a/homeassistant/components/esphome/voice_assistant.py +++ /dev/null @@ -1,479 +0,0 @@ -"""ESPHome voice assistant support.""" - -from __future__ import annotations - -import asyncio -from collections.abc import AsyncIterable, Callable -import io -import logging -import socket -from typing import cast -import wave - -from aioesphomeapi import ( - APIClient, - VoiceAssistantAudioSettings, - VoiceAssistantCommandFlag, - VoiceAssistantEventType, - VoiceAssistantFeature, - VoiceAssistantTimerEventType, -) - -from homeassistant.components import stt, tts -from homeassistant.components.assist_pipeline import ( - AudioSettings, - PipelineEvent, - PipelineEventType, - PipelineNotFound, - PipelineStage, - WakeWordSettings, - async_pipeline_from_audio_stream, - select as pipeline_select, -) -from homeassistant.components.assist_pipeline.error import ( - WakeWordDetectionAborted, - WakeWordDetectionError, -) -from homeassistant.components.assist_pipeline.vad import VadSensitivity -from homeassistant.components.intent.timers import TimerEventType, TimerInfo -from homeassistant.components.media_player import async_process_play_media_url -from homeassistant.core import Context, HomeAssistant, callback - -from .const import DOMAIN -from .entry_data import RuntimeEntryData -from .enum_mapper import EsphomeEnumMapper - -_LOGGER = logging.getLogger(__name__) - -UDP_PORT = 0 # Set to 0 to let the OS pick a free random port -UDP_MAX_PACKET_SIZE = 1024 - -_VOICE_ASSISTANT_EVENT_TYPES: EsphomeEnumMapper[ - VoiceAssistantEventType, PipelineEventType -] = EsphomeEnumMapper( - { - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: PipelineEventType.ERROR, - VoiceAssistantEventType.VOICE_ASSISTANT_RUN_START: PipelineEventType.RUN_START, - VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END: PipelineEventType.RUN_END, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: PipelineEventType.STT_START, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: PipelineEventType.STT_END, - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START: PipelineEventType.INTENT_START, - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: PipelineEventType.INTENT_END, - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: PipelineEventType.TTS_START, - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: PipelineEventType.TTS_END, - VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START: PipelineEventType.WAKE_WORD_START, - VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: PipelineEventType.WAKE_WORD_END, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_START: PipelineEventType.STT_VAD_START, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_END: PipelineEventType.STT_VAD_END, - } -) - -_TIMER_EVENT_TYPES: EsphomeEnumMapper[VoiceAssistantTimerEventType, TimerEventType] = ( - EsphomeEnumMapper( - { - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED: TimerEventType.STARTED, - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED: TimerEventType.UPDATED, - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_CANCELLED: TimerEventType.CANCELLED, - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_FINISHED: TimerEventType.FINISHED, - } - ) -) - - -class VoiceAssistantPipeline: - """Base abstract pipeline class.""" - - started = False - stop_requested = False - - def __init__( - self, - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - ) -> None: - """Initialize the pipeline.""" - self.context = Context() - self.hass = hass - self.entry_data = entry_data - assert entry_data.device_info is not None - self.device_info = entry_data.device_info - - self.queue: asyncio.Queue[bytes] = asyncio.Queue() - self.handle_event = handle_event - self.handle_finished = handle_finished - self._tts_done = asyncio.Event() - self._tts_task: asyncio.Task | None = None - - @property - def is_running(self) -> bool: - """True if the pipeline is started and hasn't been asked to stop.""" - return self.started and (not self.stop_requested) - - async def _iterate_packets(self) -> AsyncIterable[bytes]: - """Iterate over incoming packets.""" - while data := await self.queue.get(): - if not self.is_running: - break - - yield data - - def _event_callback(self, event: PipelineEvent) -> None: - """Handle pipeline events.""" - - try: - event_type = _VOICE_ASSISTANT_EVENT_TYPES.from_hass(event.type) - except KeyError: - _LOGGER.debug("Received unknown pipeline event type: %s", event.type) - return - - data_to_send = None - error = False - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: - self.entry_data.async_set_assist_pipeline_state(True) - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: - assert event.data is not None - data_to_send = {"text": event.data["stt_output"]["text"]} - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: - assert event.data is not None - data_to_send = { - "conversation_id": event.data["intent_output"]["conversation_id"] or "", - } - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: - assert event.data is not None - data_to_send = {"text": event.data["tts_input"]} - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: - assert event.data is not None - tts_output = event.data["tts_output"] - if tts_output: - path = tts_output["url"] - url = async_process_play_media_url(self.hass, path) - data_to_send = {"url": url} - - if ( - self.device_info.voice_assistant_feature_flags_compat( - self.entry_data.api_version - ) - & VoiceAssistantFeature.SPEAKER - ): - media_id = tts_output["media_id"] - self._tts_task = self.hass.async_create_background_task( - self._send_tts(media_id), "esphome_voice_assistant_tts" - ) - else: - self._tts_done.set() - else: - # Empty TTS response - data_to_send = {} - self._tts_done.set() - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: - assert event.data is not None - if not event.data["wake_word_output"]: - event_type = VoiceAssistantEventType.VOICE_ASSISTANT_ERROR - data_to_send = { - "code": "no_wake_word", - "message": "No wake word detected", - } - error = True - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: - assert event.data is not None - data_to_send = { - "code": event.data["code"], - "message": event.data["message"], - } - error = True - - self.handle_event(event_type, data_to_send) - if error: - self._tts_done.set() - self.handle_finished() - - async def run_pipeline( - self, - device_id: str, - conversation_id: str | None, - flags: int = 0, - audio_settings: VoiceAssistantAudioSettings | None = None, - wake_word_phrase: str | None = None, - ) -> None: - """Run the Voice Assistant pipeline.""" - if audio_settings is None or audio_settings.volume_multiplier == 0: - audio_settings = VoiceAssistantAudioSettings() - - if ( - self.device_info.voice_assistant_feature_flags_compat( - self.entry_data.api_version - ) - & VoiceAssistantFeature.SPEAKER - ): - tts_audio_output = "wav" - else: - tts_audio_output = "mp3" - - _LOGGER.debug("Starting pipeline") - if flags & VoiceAssistantCommandFlag.USE_WAKE_WORD: - start_stage = PipelineStage.WAKE_WORD - else: - start_stage = PipelineStage.STT - try: - await async_pipeline_from_audio_stream( - self.hass, - context=self.context, - event_callback=self._event_callback, - stt_metadata=stt.SpeechMetadata( - language="", # set in async_pipeline_from_audio_stream - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=self._iterate_packets(), - pipeline_id=pipeline_select.get_chosen_pipeline( - self.hass, DOMAIN, self.device_info.mac_address - ), - conversation_id=conversation_id, - device_id=device_id, - tts_audio_output=tts_audio_output, - start_stage=start_stage, - wake_word_settings=WakeWordSettings(timeout=5), - wake_word_phrase=wake_word_phrase, - audio_settings=AudioSettings( - noise_suppression_level=audio_settings.noise_suppression_level, - auto_gain_dbfs=audio_settings.auto_gain, - volume_multiplier=audio_settings.volume_multiplier, - is_vad_enabled=bool(flags & VoiceAssistantCommandFlag.USE_VAD), - silence_seconds=VadSensitivity.to_seconds( - pipeline_select.get_vad_sensitivity( - self.hass, DOMAIN, self.device_info.mac_address - ) - ), - ), - ) - - # Block until TTS is done sending - await self._tts_done.wait() - - _LOGGER.debug("Pipeline finished") - except PipelineNotFound as e: - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - { - "code": e.code, - "message": e.message, - }, - ) - _LOGGER.warning("Pipeline not found") - except WakeWordDetectionAborted: - pass # Wake word detection was aborted and `handle_finished` is enough. - except WakeWordDetectionError as e: - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - { - "code": e.code, - "message": e.message, - }, - ) - finally: - self.handle_finished() - - async def _send_tts(self, media_id: str) -> None: - """Send TTS audio to device via UDP.""" - # Always send stream start/end events - self.handle_event(VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, {}) - - try: - if not self.is_running: - return - - extension, data = await tts.async_get_media_source_audio( - self.hass, - media_id, - ) - - if extension != "wav": - raise ValueError(f"Only WAV audio can be streamed, got {extension}") - - with io.BytesIO(data) as wav_io: - with wave.open(wav_io, "rb") as wav_file: - sample_rate = wav_file.getframerate() - sample_width = wav_file.getsampwidth() - sample_channels = wav_file.getnchannels() - - if ( - (sample_rate != 16000) - or (sample_width != 2) - or (sample_channels != 1) - ): - raise ValueError( - "Expected rate/width/channels as 16000/2/1," - " got {sample_rate}/{sample_width}/{sample_channels}}" - ) - - audio_bytes = wav_file.readframes(wav_file.getnframes()) - - audio_bytes_size = len(audio_bytes) - - _LOGGER.debug("Sending %d bytes of audio", audio_bytes_size) - - bytes_per_sample = stt.AudioBitRates.BITRATE_16 // 8 - sample_offset = 0 - samples_left = audio_bytes_size // bytes_per_sample - - while (samples_left > 0) and self.is_running: - bytes_offset = sample_offset * bytes_per_sample - chunk: bytes = audio_bytes[bytes_offset : bytes_offset + 1024] - samples_in_chunk = len(chunk) // bytes_per_sample - samples_left -= samples_in_chunk - - self.send_audio_bytes(chunk) - await asyncio.sleep( - samples_in_chunk / stt.AudioSampleRates.SAMPLERATE_16000 * 0.9 - ) - - sample_offset += samples_in_chunk - finally: - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, {} - ) - self._tts_task = None - self._tts_done.set() - - def send_audio_bytes(self, data: bytes) -> None: - """Send bytes to the device.""" - raise NotImplementedError - - def stop(self) -> None: - """Stop the pipeline.""" - self.queue.put_nowait(b"") - - -class VoiceAssistantUDPPipeline(asyncio.DatagramProtocol, VoiceAssistantPipeline): - """Receive UDP packets and forward them to the voice assistant.""" - - transport: asyncio.DatagramTransport | None = None - remote_addr: tuple[str, int] | None = None - - async def start_server(self) -> int: - """Start accepting connections.""" - - def accept_connection() -> VoiceAssistantUDPPipeline: - """Accept connection.""" - if self.started: - raise RuntimeError("Can only start once") - if self.stop_requested: - raise RuntimeError("No longer accepting connections") - - self.started = True - return self - - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.setblocking(False) - - sock.bind(("", UDP_PORT)) - - await asyncio.get_running_loop().create_datagram_endpoint( - accept_connection, sock=sock - ) - - return cast(int, sock.getsockname()[1]) - - @callback - def connection_made(self, transport: asyncio.BaseTransport) -> None: - """Store transport for later use.""" - self.transport = cast(asyncio.DatagramTransport, transport) - - @callback - def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: - """Handle incoming UDP packet.""" - if not self.is_running: - return - if self.remote_addr is None: - self.remote_addr = addr - self.queue.put_nowait(data) - - def error_received(self, exc: Exception) -> None: - """Handle when a send or receive operation raises an OSError. - - (Other than BlockingIOError or InterruptedError.) - """ - _LOGGER.error("ESPHome Voice Assistant UDP server error received: %s", exc) - self.handle_finished() - - @callback - def stop(self) -> None: - """Stop the receiver.""" - super().stop() - self.close() - - def close(self) -> None: - """Close the receiver.""" - self.started = False - self.stop_requested = True - - if self.transport is not None: - self.transport.close() - - def send_audio_bytes(self, data: bytes) -> None: - """Send bytes to the device via UDP.""" - if self.transport is None: - _LOGGER.error("No transport to send audio to") - return - self.transport.sendto(data, self.remote_addr) - - -class VoiceAssistantAPIPipeline(VoiceAssistantPipeline): - """Send audio to the voice assistant via the API.""" - - def __init__( - self, - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - api_client: APIClient, - ) -> None: - """Initialize the pipeline.""" - super().__init__(hass, entry_data, handle_event, handle_finished) - self.api_client = api_client - self.started = True - - def send_audio_bytes(self, data: bytes) -> None: - """Send bytes to the device via the API.""" - self.api_client.send_voice_assistant_audio(data) - - @callback - def receive_audio_bytes(self, data: bytes) -> None: - """Receive audio bytes from the device.""" - if not self.is_running: - return - self.queue.put_nowait(data) - - @callback - def stop(self) -> None: - """Stop the pipeline.""" - super().stop() - - self.started = False - self.stop_requested = True - - -def handle_timer_event( - api_client: APIClient, event_type: TimerEventType, timer_info: TimerInfo -) -> None: - """Handle timer events.""" - try: - native_event_type = _TIMER_EVENT_TYPES.from_hass(event_type) - except KeyError: - _LOGGER.debug("Received unknown timer event type: %s", event_type) - return - - api_client.send_voice_assistant_timer_event( - native_event_type, - timer_info.id, - timer_info.name, - timer_info.created_seconds, - timer_info.seconds_left, - timer_info.is_active, - ) diff --git a/homeassistant/components/etherscan/manifest.json b/homeassistant/components/etherscan/manifest.json index 1b296e4e4be..e5099ffaf9c 100644 --- a/homeassistant/components/etherscan/manifest.json +++ b/homeassistant/components/etherscan/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/etherscan", "iot_class": "cloud_polling", "loggers": ["pyetherscan"], + "quality_scale": "legacy", "requirements": ["python-etherscan-api==0.0.3"] } diff --git a/homeassistant/components/eufy/light.py b/homeassistant/components/eufy/light.py index c1506c00cdc..95ad8a15d1c 100644 --- a/homeassistant/components/eufy/light.py +++ b/homeassistant/components/eufy/light.py @@ -8,7 +8,7 @@ import lakeside from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -17,10 +17,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.color as color_util -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) EUFYHOME_MAX_KELVIN = 6500 EUFYHOME_MIN_KELVIN = 2700 @@ -41,6 +37,9 @@ def setup_platform( class EufyHomeLight(LightEntity): """Representation of a EufyHome light.""" + _attr_min_color_temp_kelvin = EUFYHOME_MIN_KELVIN + _attr_max_color_temp_kelvin = EUFYHOME_MAX_KELVIN + def __init__(self, device): """Initialize the light.""" @@ -96,23 +95,12 @@ class EufyHomeLight(LightEntity): return int(self._brightness * 255 / 100) @property - def min_mireds(self) -> int: - """Return minimum supported color temperature.""" - return kelvin_to_mired(EUFYHOME_MAX_KELVIN) - - @property - def max_mireds(self) -> int: - """Return maximum supported color temperature.""" - return kelvin_to_mired(EUFYHOME_MIN_KELVIN) - - @property - def color_temp(self): - """Return the color temperature of this light.""" - temp_in_k = int( + def color_temp_kelvin(self) -> int: + """Return the color temperature value in Kelvin.""" + return int( EUFYHOME_MIN_KELVIN + (self._temp * (EUFYHOME_MAX_KELVIN - EUFYHOME_MIN_KELVIN) / 100) ) - return kelvin_to_mired(temp_in_k) @property def hs_color(self): @@ -134,7 +122,7 @@ class EufyHomeLight(LightEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the specified light on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) - colortemp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hs = kwargs.get(ATTR_HS_COLOR) if brightness is not None: @@ -144,10 +132,9 @@ class EufyHomeLight(LightEntity): self._brightness = 100 brightness = self._brightness - if colortemp is not None: + if color_temp_kelvin is not None: self._colormode = False - temp_in_k = mired_to_kelvin(colortemp) - relative_temp = temp_in_k - EUFYHOME_MIN_KELVIN + relative_temp = color_temp_kelvin - EUFYHOME_MIN_KELVIN temp = int( relative_temp * 100 / (EUFYHOME_MAX_KELVIN - EUFYHOME_MIN_KELVIN) ) diff --git a/homeassistant/components/eufy/manifest.json b/homeassistant/components/eufy/manifest.json index ccf15144f9e..6ad1b7de81b 100644 --- a/homeassistant/components/eufy/manifest.json +++ b/homeassistant/components/eufy/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/eufy", "iot_class": "local_polling", "loggers": ["lakeside"], + "quality_scale": "legacy", "requirements": ["lakeside==0.13"] } diff --git a/homeassistant/components/event/__init__.py b/homeassistant/components/event/__init__.py index 4ca000f6a40..c4a8fb2d0af 100644 --- a/homeassistant/components/event/__init__.py +++ b/homeassistant/components/event/__init__.py @@ -5,10 +5,11 @@ from __future__ import annotations from dataclasses import asdict, dataclass from datetime import datetime, timedelta from enum import StrEnum -from functools import cached_property import logging from typing import Any, Self, final +from propcache import cached_property + from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv @@ -17,10 +18,12 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey from .const import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES, DOMAIN _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[EventEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -51,7 +54,7 @@ __all__ = [ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Event entities.""" - component = hass.data[DOMAIN] = EntityComponent[EventEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[EventEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -60,14 +63,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[EventEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[EventEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class EventEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/everlights/manifest.json b/homeassistant/components/everlights/manifest.json index 6f856b26087..a2deeab2666 100644 --- a/homeassistant/components/everlights/manifest.json +++ b/homeassistant/components/everlights/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/everlights", "iot_class": "local_polling", "loggers": ["pyeverlights"], + "quality_scale": "legacy", "requirements": ["pyeverlights==0.1.0"] } diff --git a/homeassistant/components/evil_genius_labs/__init__.py b/homeassistant/components/evil_genius_labs/__init__.py index afc6fecd9a4..d5bc3a564a2 100644 --- a/homeassistant/components/evil_genius_labs/__init__.py +++ b/homeassistant/components/evil_genius_labs/__init__.py @@ -7,9 +7,7 @@ import pyevilgenius from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import aiohttp_client, device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers import aiohttp_client from .const import DOMAIN from .coordinator import EvilGeniusUpdateCoordinator @@ -41,23 +39,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class EvilGeniusEntity(CoordinatorEntity[EvilGeniusUpdateCoordinator]): - """Base entity for Evil Genius.""" - - _attr_has_entity_name = True - - @property - def device_info(self) -> DeviceInfo: - """Return device info.""" - info = self.coordinator.info - return DeviceInfo( - identifiers={(DOMAIN, info["wiFiChipId"])}, - connections={(dr.CONNECTION_NETWORK_MAC, info["macAddress"])}, - name=self.coordinator.device_name, - model=self.coordinator.product_name, - manufacturer="Evil Genius Labs", - sw_version=info["coreVersion"].replace("_", "."), - configuration_url=self.coordinator.client.url, - ) diff --git a/homeassistant/components/evil_genius_labs/entity.py b/homeassistant/components/evil_genius_labs/entity.py new file mode 100644 index 00000000000..a690b385c56 --- /dev/null +++ b/homeassistant/components/evil_genius_labs/entity.py @@ -0,0 +1,30 @@ +"""The Evil Genius Labs integration.""" + +from __future__ import annotations + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import EvilGeniusUpdateCoordinator + + +class EvilGeniusEntity(CoordinatorEntity[EvilGeniusUpdateCoordinator]): + """Base entity for Evil Genius.""" + + _attr_has_entity_name = True + + @property + def device_info(self) -> DeviceInfo: + """Return device info.""" + info = self.coordinator.info + return DeviceInfo( + identifiers={(DOMAIN, info["wiFiChipId"])}, + connections={(dr.CONNECTION_NETWORK_MAC, info["macAddress"])}, + name=self.coordinator.device_name, + model=self.coordinator.product_name, + manufacturer="Evil Genius Labs", + sw_version=info["coreVersion"].replace("_", "."), + configuration_url=self.coordinator.client.url, + ) diff --git a/homeassistant/components/evil_genius_labs/light.py b/homeassistant/components/evil_genius_labs/light.py index 89bdcae9ef7..3556672dcce 100644 --- a/homeassistant/components/evil_genius_labs/light.py +++ b/homeassistant/components/evil_genius_labs/light.py @@ -11,9 +11,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EvilGeniusEntity from .const import DOMAIN from .coordinator import EvilGeniusUpdateCoordinator +from .entity import EvilGeniusEntity from .util import update_when_done HA_NO_EFFECT = "None" diff --git a/homeassistant/components/evil_genius_labs/util.py b/homeassistant/components/evil_genius_labs/util.py index f3c86f2666f..1182cab3e8b 100644 --- a/homeassistant/components/evil_genius_labs/util.py +++ b/homeassistant/components/evil_genius_labs/util.py @@ -6,7 +6,7 @@ from collections.abc import Awaitable, Callable, Coroutine from functools import wraps from typing import Any, Concatenate -from . import EvilGeniusEntity +from .entity import EvilGeniusEntity def update_when_done[_EvilGeniusEntityT: EvilGeniusEntity, **_P, _R]( diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 5a5d9d09521..612131919d4 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -79,7 +79,8 @@ CONFIG_SCHEMA: Final = vol.Schema( extra=vol.ALLOW_EXTRA, ) -# system mode schemas are built dynamically when the services are regiatered +# system mode schemas are built dynamically when the services are registered +# because supported modes can vary for edge-case systems RESET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema( {vol.Required(ATTR_ENTITY_ID): cv.entity_id} @@ -175,7 +176,7 @@ class EvoSession: ): app_storage[ACCESS_TOKEN_EXPIRES] = dt_aware_to_naive(expires) - user_data: dict[str, str] = app_storage.pop(USER_DATA, {}) + user_data: dict[str, str] = app_storage.pop(USER_DATA, {}) or {} self.session_id = user_data.get(SZ_SESSION_ID) self._tokens = app_storage @@ -222,7 +223,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: config[DOMAIN][CONF_PASSWORD], ) - except evo.AuthenticationFailed as err: + except (evo.AuthenticationFailed, evo.RequestFailed) as err: handle_evo_exception(err) return False @@ -239,6 +240,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=f"{DOMAIN}_coordinator", update_interval=config[DOMAIN][CONF_SCAN_INTERVAL], update_method=broker.async_update, diff --git a/homeassistant/components/evohome/climate.py b/homeassistant/components/evohome/climate.py index 07601474062..c71831fa4bc 100644 --- a/homeassistant/components/evohome/climate.py +++ b/homeassistant/components/evohome/climate.py @@ -66,8 +66,6 @@ _LOGGER = logging.getLogger(__name__) PRESET_RESET = "Reset" # reset all child zones to EVO_FOLLOW PRESET_CUSTOM = "Custom" -HA_HVAC_TO_TCS = {HVACMode.OFF: EVO_HEATOFF, HVACMode.HEAT: EVO_AUTO} - TCS_PRESET_TO_HA = { EVO_AWAY: PRESET_AWAY, EVO_CUSTOM: PRESET_CUSTOM, @@ -148,26 +146,21 @@ async def async_setup_platform( class EvoClimateEntity(EvoDevice, ClimateEntity): - """Base for an evohome Climate device.""" + """Base for any evohome-compatible climate entity (controller, zone).""" + _attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return a list of available hvac operation modes.""" - return list(HA_HVAC_TO_TCS) class EvoZone(EvoChild, EvoClimateEntity): - """Base for a Honeywell TCC Zone.""" + """Base for any evohome-compatible heating zone.""" _attr_preset_modes = list(HA_PRESET_TO_EVO) _evo_device: evo.Zone # mypy hint def __init__(self, evo_broker: EvoBroker, evo_device: evo.Zone) -> None: - """Initialize a Honeywell TCC Zone.""" + """Initialize an evohome-compatible heating zone.""" super().__init__(evo_broker, evo_device) self._evo_id = evo_device.zoneId @@ -342,7 +335,7 @@ class EvoZone(EvoChild, EvoClimateEntity): class EvoController(EvoClimateEntity): - """Base for a Honeywell TCC Controller/Location. + """Base for any evohome-compatible controller. The Controller (aka TCS, temperature control system) is the parent of all the child (CH/DHW) devices. It is implemented as a Climate entity to expose the controller's @@ -357,7 +350,7 @@ class EvoController(EvoClimateEntity): _evo_device: evo.ControlSystem # mypy hint def __init__(self, evo_broker: EvoBroker, evo_device: evo.ControlSystem) -> None: - """Initialize a Honeywell TCC Controller/Location.""" + """Initialize an evohome-compatible controller.""" super().__init__(evo_broker, evo_device) self._evo_id = evo_device.systemId @@ -365,9 +358,9 @@ class EvoController(EvoClimateEntity): self._attr_unique_id = evo_device.systemId self._attr_name = evo_device.location.name - modes = [m[SZ_SYSTEM_MODE] for m in evo_broker.tcs.allowedSystemModes] + self._evo_modes = [m[SZ_SYSTEM_MODE] for m in evo_device.allowedSystemModes] self._attr_preset_modes = [ - TCS_PRESET_TO_HA[m] for m in modes if m in list(TCS_PRESET_TO_HA) + TCS_PRESET_TO_HA[m] for m in self._evo_modes if m in list(TCS_PRESET_TO_HA) ] if self._attr_preset_modes: self._attr_supported_features = ClimateEntityFeature.PRESET_MODE @@ -401,14 +394,14 @@ class EvoController(EvoClimateEntity): """Set a Controller to any of its native EVO_* operating modes.""" until = dt_util.as_utc(until) if until else None await self._evo_broker.call_client_api( - self._evo_tcs.set_mode(mode, until=until) # type: ignore[arg-type] + self._evo_device.set_mode(mode, until=until) # type: ignore[arg-type] ) @property def hvac_mode(self) -> HVACMode: """Return the current operating mode of a Controller.""" - tcs_mode = self._evo_tcs.system_mode - return HVACMode.OFF if tcs_mode == EVO_HEATOFF else HVACMode.HEAT + evo_mode = self._evo_device.system_mode + return HVACMode.OFF if evo_mode in (EVO_HEATOFF, "Off") else HVACMode.HEAT @property def current_temperature(self) -> float | None: @@ -418,7 +411,7 @@ class EvoController(EvoClimateEntity): """ temps = [ z.temperature - for z in self._evo_tcs.zones.values() + for z in self._evo_device.zones.values() if z.temperature is not None ] return round(sum(temps) / len(temps), 1) if temps else None @@ -426,9 +419,9 @@ class EvoController(EvoClimateEntity): @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" - if not self._evo_tcs.system_mode: + if not self._evo_device.system_mode: return None - return TCS_PRESET_TO_HA.get(self._evo_tcs.system_mode) + return TCS_PRESET_TO_HA.get(self._evo_device.system_mode) async def async_set_temperature(self, **kwargs: Any) -> None: """Raise exception as Controllers don't have a target temperature.""" @@ -436,9 +429,13 @@ class EvoController(EvoClimateEntity): async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set an operating mode for a Controller.""" - if not (tcs_mode := HA_HVAC_TO_TCS.get(hvac_mode)): + if hvac_mode == HVACMode.HEAT: + evo_mode = EVO_AUTO if EVO_AUTO in self._evo_modes else "Heat" + elif hvac_mode == HVACMode.OFF: + evo_mode = EVO_HEATOFF if EVO_HEATOFF in self._evo_modes else "Off" + else: raise HomeAssistantError(f"Invalid hvac_mode: {hvac_mode}") - await self._set_tcs_mode(tcs_mode) + await self._set_tcs_mode(evo_mode) async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode; if None, then revert to 'Auto' mode.""" @@ -451,6 +448,6 @@ class EvoController(EvoClimateEntity): attrs = self._device_state_attrs for attr in STATE_ATTRS_TCS: if attr == SZ_ACTIVE_FAULTS: - attrs["activeSystemFaults"] = getattr(self._evo_tcs, attr) + attrs["activeSystemFaults"] = getattr(self._evo_device, attr) else: - attrs[attr] = getattr(self._evo_tcs, attr) + attrs[attr] = getattr(self._evo_device, attr) diff --git a/homeassistant/components/evohome/const.py b/homeassistant/components/evohome/const.py index 15949bc3c37..3ebe6954fea 100644 --- a/homeassistant/components/evohome/const.py +++ b/homeassistant/components/evohome/const.py @@ -53,8 +53,8 @@ ATTR_DURATION_UNTIL: Final = "duration" class EvoService(StrEnum): """The Evohome services.""" - REFRESH_SYSTEM: Final = "refresh_system" - SET_SYSTEM_MODE: Final = "set_system_mode" - RESET_SYSTEM: Final = "reset_system" - SET_ZONE_OVERRIDE: Final = "set_zone_override" - RESET_ZONE_OVERRIDE: Final = "clear_zone_override" + REFRESH_SYSTEM = "refresh_system" + SET_SYSTEM_MODE = "set_system_mode" + RESET_SYSTEM = "reset_system" + SET_ZONE_OVERRIDE = "set_zone_override" + RESET_ZONE_OVERRIDE = "clear_zone_override" diff --git a/homeassistant/components/evohome/entity.py b/homeassistant/components/evohome/entity.py index 4f85791572c..b5842c1073a 100644 --- a/homeassistant/components/evohome/entity.py +++ b/homeassistant/components/evohome/entity.py @@ -26,9 +26,9 @@ _LOGGER = logging.getLogger(__name__) class EvoDevice(Entity): - """Base for any evohome device. + """Base for any evohome-compatible entity (controller, DHW, zone). - This includes the Controller, (up to 12) Heating Zones and (optionally) a + This includes the controller, (1 to 12) heating zones and (optionally) a DHW controller. """ @@ -39,10 +39,9 @@ class EvoDevice(Entity): evo_broker: EvoBroker, evo_device: evo.ControlSystem | evo.HotWater | evo.Zone, ) -> None: - """Initialize the evohome entity.""" + """Initialize an evohome-compatible entity (TCS, DHW, zone).""" self._evo_device = evo_device self._evo_broker = evo_broker - self._evo_tcs = evo_broker.tcs self._device_state_attrs: dict[str, Any] = {} @@ -88,9 +87,9 @@ class EvoDevice(Entity): class EvoChild(EvoDevice): - """Base for any evohome child. + """Base for any evohome-compatible child entity (DHW, zone). - This includes (up to 12) Heating Zones and (optionally) a DHW controller. + This includes (1 to 12) heating zones and (optionally) a DHW controller. """ _evo_id: str # mypy hint @@ -98,9 +97,11 @@ class EvoChild(EvoDevice): def __init__( self, evo_broker: EvoBroker, evo_device: evo.HotWater | evo.Zone ) -> None: - """Initialize a evohome Controller (hub).""" + """Initialize an evohome-compatible child entity (DHW, zone).""" super().__init__(evo_broker, evo_device) + self._evo_tcs = evo_device.tcs + self._schedule: dict[str, Any] = {} self._setpoints: dict[str, Any] = {} diff --git a/homeassistant/components/evohome/icons.json b/homeassistant/components/evohome/icons.json index cd0005e2546..54488440e60 100644 --- a/homeassistant/components/evohome/icons.json +++ b/homeassistant/components/evohome/icons.json @@ -1,9 +1,19 @@ { "services": { - "set_system_mode": "mdi:pencil", - "reset_system": "mdi:refresh", - "refresh_system": "mdi:refresh", - "set_zone_override": "mdi:motion-sensor", - "clear_zone_override": "mdi:motion-sensor-off" + "set_system_mode": { + "service": "mdi:pencil" + }, + "reset_system": { + "service": "mdi:refresh" + }, + "refresh_system": { + "service": "mdi:refresh" + }, + "set_zone_override": { + "service": "mdi:motion-sensor" + }, + "clear_zone_override": { + "service": "mdi:motion-sensor-off" + } } } diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index e81e71c5b07..22edadad7f4 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/evohome", "iot_class": "cloud_polling", "loggers": ["evohomeasync", "evohomeasync2"], - "requirements": ["evohome-async==0.4.20"] + "quality_scale": "legacy", + "requirements": ["evohome-async==0.4.21"] } diff --git a/homeassistant/components/evohome/water_heater.py b/homeassistant/components/evohome/water_heater.py index abf3e2f3926..a50e16b5dda 100644 --- a/homeassistant/components/evohome/water_heater.py +++ b/homeassistant/components/evohome/water_heater.py @@ -74,7 +74,7 @@ async def async_setup_platform( class EvoDHW(EvoChild, WaterHeaterEntity): - """Base for a Honeywell TCC DHW controller (aka boiler).""" + """Base for any evohome-compatible DHW controller.""" _attr_name = "DHW controller" _attr_icon = "mdi:thermometer-lines" @@ -84,7 +84,7 @@ class EvoDHW(EvoChild, WaterHeaterEntity): _evo_device: evo.HotWater # mypy hint def __init__(self, evo_broker: EvoBroker, evo_device: evo.HotWater) -> None: - """Initialize an evohome DHW controller.""" + """Initialize an evohome-compatible DHW controller.""" super().__init__(evo_broker, evo_device) self._evo_id = evo_device.dhwId diff --git a/homeassistant/components/ezviz/__init__.py b/homeassistant/components/ezviz/__init__.py index c453060b472..6885304e0de 100644 --- a/homeassistant/components/ezviz/__init__.py +++ b/homeassistant/components/ezviz/__init__.py @@ -105,7 +105,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if sensor_type == ATTR_TYPE_CAMERA and hass.data[DOMAIN]: for item in hass.config_entries.async_entries(domain=DOMAIN): if item.data.get(CONF_TYPE) == ATTR_TYPE_CLOUD: - _LOGGER.info("Reload Ezviz main account with camera entry") + _LOGGER.debug("Reload Ezviz main account with camera entry") await hass.config_entries.async_reload(item.entry_id) return True diff --git a/homeassistant/components/ezviz/alarm_control_panel.py b/homeassistant/components/ezviz/alarm_control_panel.py index 21e9f2d0422..f30a7852b4e 100644 --- a/homeassistant/components/ezviz/alarm_control_panel.py +++ b/homeassistant/components/ezviz/alarm_control_panel.py @@ -13,13 +13,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityDescription, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -45,9 +41,9 @@ ALARM_TYPE = EzvizAlarmControlPanelEntityDescription( key="ezviz_alarm", ezviz_alarm_states=[ None, - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, ], ) @@ -96,7 +92,7 @@ class EzvizAlarm(AlarmControlPanelEntity): self._attr_device_info = device_info self.entity_description = entity_description self.coordinator = coordinator - self._attr_state = None + self._attr_alarm_state = None async def async_added_to_hass(self) -> None: """Entity added to hass.""" @@ -108,7 +104,7 @@ class EzvizAlarm(AlarmControlPanelEntity): if self.coordinator.ezviz_client.api_set_defence_mode( DefenseModeType.HOME_MODE.value ): - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED except PyEzvizError as err: raise HomeAssistantError("Cannot disarm EZVIZ alarm") from err @@ -119,7 +115,7 @@ class EzvizAlarm(AlarmControlPanelEntity): if self.coordinator.ezviz_client.api_set_defence_mode( DefenseModeType.AWAY_MODE.value ): - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY except PyEzvizError as err: raise HomeAssistantError("Cannot arm EZVIZ alarm") from err @@ -130,7 +126,7 @@ class EzvizAlarm(AlarmControlPanelEntity): if self.coordinator.ezviz_client.api_set_defence_mode( DefenseModeType.SLEEP_MODE.value ): - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME except PyEzvizError as err: raise HomeAssistantError("Cannot arm EZVIZ alarm") from err @@ -145,7 +141,7 @@ class EzvizAlarm(AlarmControlPanelEntity): _LOGGER.debug( "Updating EZVIZ alarm with response %s", ezviz_alarm_state_number ) - self._attr_state = self.entity_description.ezviz_alarm_states[ + self._attr_alarm_state = self.entity_description.ezviz_alarm_states[ int(ezviz_alarm_state_number) ] diff --git a/homeassistant/components/ezviz/config_flow.py b/homeassistant/components/ezviz/config_flow.py index 2b47b120cf8..a7551737c10 100644 --- a/homeassistant/components/ezviz/config_flow.py +++ b/homeassistant/components/ezviz/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any from pyezviz.client import EzvizClient from pyezviz.exceptions import ( @@ -93,6 +93,11 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + ip_address: str + username: str | None + password: str | None + unique_id: str + async def _validate_and_create_camera_rtsp(self, data: dict) -> ConfigFlowResult: """Try DESCRIBE on RTSP camera with credentials.""" @@ -145,7 +150,7 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> EzvizOptionsFlowHandler: """Get the options flow for this handler.""" - return EzvizOptionsFlowHandler(config_entry) + return EzvizOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -166,10 +171,8 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() if user_input[CONF_URL] == CONF_CUSTOMIZE: - self.context["data"] = { - CONF_USERNAME: user_input[CONF_USERNAME], - CONF_PASSWORD: user_input[CONF_PASSWORD], - } + self.username = user_input[CONF_USERNAME] + self.password = user_input[CONF_PASSWORD] return await self.async_step_user_custom_url() try: @@ -222,8 +225,8 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): auth_data = {} if user_input is not None: - user_input[CONF_USERNAME] = self.context["data"][CONF_USERNAME] - user_input[CONF_PASSWORD] = self.context["data"][CONF_PASSWORD] + user_input[CONF_USERNAME] = self.username + user_input[CONF_PASSWORD] = self.password try: auth_data = await self.hass.async_add_executor_job( @@ -271,8 +274,11 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(discovery_info[ATTR_SERIAL]) self._abort_if_unique_id_configured() + if TYPE_CHECKING: + # A unique ID is passed in via the discovery info + assert self.unique_id is not None self.context["title_placeholders"] = {ATTR_SERIAL: self.unique_id} - self.context["data"] = {CONF_IP_ADDRESS: discovery_info[CONF_IP_ADDRESS]} + self.ip_address = discovery_info[CONF_IP_ADDRESS] return await self.async_step_confirm() @@ -284,7 +290,7 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: user_input[ATTR_SERIAL] = self.unique_id - user_input[CONF_IP_ADDRESS] = self.context["data"][CONF_IP_ADDRESS] + user_input[CONF_IP_ADDRESS] = self.ip_address try: return await self._validate_and_create_camera_rtsp(user_input) @@ -314,12 +320,12 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, description_placeholders={ ATTR_SERIAL: self.unique_id, - CONF_IP_ADDRESS: self.context["data"][CONF_IP_ADDRESS], + CONF_IP_ADDRESS: self.ip_address, }, ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauthentication with password.""" @@ -363,15 +369,11 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="unknown") else: - self.hass.config_entries.async_update_entry( + return self.async_update_reload_and_abort( entry, data=auth_data, ) - await self.hass.config_entries.async_reload(entry.entry_id) - - return self.async_abort(reason="reauth_successful") - data_schema = vol.Schema( { vol.Required(CONF_USERNAME, default=entry.title): vol.In([entry.title]), @@ -389,10 +391,6 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): class EzvizOptionsFlowHandler(OptionsFlow): """Handle EZVIZ client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/ezviz/icons.json b/homeassistant/components/ezviz/icons.json index 89b4747ed69..e4a2e49a22c 100644 --- a/homeassistant/components/ezviz/icons.json +++ b/homeassistant/components/ezviz/icons.json @@ -26,7 +26,11 @@ } }, "services": { - "set_alarm_detection_sensibility": "mdi:motion-sensor", - "wake_device": "mdi:sleep-off" + "set_alarm_detection_sensibility": { + "service": "mdi:motion-sensor" + }, + "wake_device": { + "service": "mdi:sleep-off" + } } } diff --git a/homeassistant/components/ezviz/update.py b/homeassistant/components/ezviz/update.py index 05735d152cf..25a506a0052 100644 --- a/homeassistant/components/ezviz/update.py +++ b/homeassistant/components/ezviz/update.py @@ -73,11 +73,9 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity): return self.data["version"] @property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: """Update installation progress.""" - if self.data["upgrade_in_progress"]: - return self.data["upgrade_percent"] - return False + return bool(self.data["upgrade_in_progress"]) @property def latest_version(self) -> str | None: @@ -93,6 +91,13 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity): return self.data["latest_firmware_info"].get("desc") return None + @property + def update_percentage(self) -> int | None: + """Update installation progress.""" + if self.data["upgrade_in_progress"]: + return self.data["upgrade_percent"] + return None + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: diff --git a/homeassistant/components/facebook/manifest.json b/homeassistant/components/facebook/manifest.json index 5074489852e..5a7eb216ccc 100644 --- a/homeassistant/components/facebook/manifest.json +++ b/homeassistant/components/facebook/manifest.json @@ -3,5 +3,6 @@ "name": "Facebook Messenger", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/facebook", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/fail2ban/manifest.json b/homeassistant/components/fail2ban/manifest.json index e348db1c695..1570afda6eb 100644 --- a/homeassistant/components/fail2ban/manifest.json +++ b/homeassistant/components/fail2ban/manifest.json @@ -3,5 +3,6 @@ "name": "Fail2Ban", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/fail2ban", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/familyhub/manifest.json b/homeassistant/components/familyhub/manifest.json index f57030efb27..cf4bf0ba68f 100644 --- a/homeassistant/components/familyhub/manifest.json +++ b/homeassistant/components/familyhub/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/familyhub", "iot_class": "local_polling", "loggers": ["pyfamilyhublocal"], + "quality_scale": "legacy", "requirements": ["python-family-hub-local==0.0.2"] } diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index 5a15ece665a..863ae705603 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -2,15 +2,14 @@ from __future__ import annotations -import asyncio from datetime import timedelta from enum import IntFlag import functools as ft -from functools import cached_property import logging import math from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -23,17 +22,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, @@ -42,6 +35,7 @@ from homeassistant.util.percentage import ( _LOGGER = logging.getLogger(__name__) DOMAIN = "fan" +DATA_COMPONENT: HassKey[EntityComponent[FanEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -59,21 +53,6 @@ class FanEntityFeature(IntFlag): TURN_ON = 32 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the FanEntityFeature enum instead. -_DEPRECATED_SUPPORT_SET_SPEED = DeprecatedConstantEnum( - FanEntityFeature.SET_SPEED, "2025.1" -) -_DEPRECATED_SUPPORT_OSCILLATE = DeprecatedConstantEnum( - FanEntityFeature.OSCILLATE, "2025.1" -) -_DEPRECATED_SUPPORT_DIRECTION = DeprecatedConstantEnum( - FanEntityFeature.DIRECTION, "2025.1" -) -_DEPRECATED_SUPPORT_PRESET_MODE = DeprecatedConstantEnum( - FanEntityFeature.PRESET_MODE, "2025.1" -) - SERVICE_INCREASE_SPEED = "increase_speed" SERVICE_DECREASE_SPEED = "decrease_speed" SERVICE_OSCILLATE = "oscillate" @@ -119,7 +98,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Expose fan control via statemachine and services.""" - component = hass.data[DOMAIN] = EntityComponent[FanEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[FanEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -201,14 +180,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[FanEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[FanEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class FanEntityDescription(ToggleEntityDescription, frozen_or_thawed=True): @@ -234,105 +211,12 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): entity_description: FanEntityDescription _attr_current_direction: str | None = None _attr_oscillating: bool | None = None - _attr_percentage: int | None - _attr_preset_mode: str | None - _attr_preset_modes: list[str] | None - _attr_speed_count: int + _attr_percentage: int | None = 0 + _attr_preset_mode: str | None = None + _attr_preset_modes: list[str] | None = None + _attr_speed_count: int = 100 _attr_supported_features: FanEntityFeature = FanEntityFeature(0) - __mod_supported_features: FanEntityFeature = FanEntityFeature(0) - # Integrations should set `_enable_turn_on_off_backwards_compatibility` to False - # once migrated and set the feature flags TURN_ON/TURN_OFF as needed. - _enable_turn_on_off_backwards_compatibility: bool = True - - def __getattribute__(self, __name: str) -> Any: - """Get attribute. - - Modify return of `supported_features` to - include `_mod_supported_features` if attribute is set. - """ - if __name != "supported_features": - return super().__getattribute__(__name) - - # Convert the supported features to ClimateEntityFeature. - # Remove this compatibility shim in 2025.1 or later. - _supported_features: FanEntityFeature = super().__getattribute__( - "supported_features" - ) - _mod_supported_features: FanEntityFeature = super().__getattribute__( - "_FanEntity__mod_supported_features" - ) - if type(_supported_features) is int: # noqa: E721 - _features = FanEntityFeature(_supported_features) - self._report_deprecated_supported_features_values(_features) - else: - _features = _supported_features - - if not _mod_supported_features: - return _features - - # Add automatically calculated FanEntityFeature.TURN_OFF/TURN_ON to - # supported features and return it - return _features | _mod_supported_features - - @callback - def add_to_platform_start( - self, - hass: HomeAssistant, - platform: EntityPlatform, - parallel_updates: asyncio.Semaphore | None, - ) -> None: - """Start adding an entity to a platform.""" - super().add_to_platform_start(hass, platform, parallel_updates) - - def _report_turn_on_off(feature: str, method: str) -> None: - """Log warning not implemented turn on/off feature.""" - report_issue = self._suggest_report_issue() - message = ( - "Entity %s (%s) does not set FanEntityFeature.%s" - " but implements the %s method. Please %s" - ) - _LOGGER.warning( - message, - self.entity_id, - type(self), - feature, - method, - report_issue, - ) - - # Adds FanEntityFeature.TURN_OFF/TURN_ON depending on service calls implemented - # This should be removed in 2025.2. - if self._enable_turn_on_off_backwards_compatibility is False: - # Return if integration has migrated already - return - - supported_features = self.supported_features - if supported_features & (FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF): - # The entity supports both turn_on and turn_off, the backwards compatibility - # checks are not needed - return - - if not supported_features & FanEntityFeature.TURN_OFF and ( - type(self).async_turn_off is not ToggleEntity.async_turn_off - or type(self).turn_off is not ToggleEntity.turn_off - ): - # turn_off implicitly supported by implementing turn_off method - _report_turn_on_off("TURN_OFF", "turn_off") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - FanEntityFeature.TURN_OFF - ) - - if not supported_features & FanEntityFeature.TURN_ON and ( - type(self).async_turn_on is not FanEntity.async_turn_on - or type(self).turn_on is not FanEntity.turn_on - ): - # turn_on implicitly supported by implementing turn_on method - _report_turn_on_off("TURN_ON", "turn_on") - self.__mod_supported_features |= ( # pylint: disable=unused-private-member - FanEntityFeature.TURN_ON - ) - def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" raise NotImplementedError @@ -463,16 +347,12 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @cached_property def percentage(self) -> int | None: """Return the current speed as a percentage.""" - if hasattr(self, "_attr_percentage"): - return self._attr_percentage - return 0 + return self._attr_percentage @cached_property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" - if hasattr(self, "_attr_speed_count"): - return self._attr_speed_count - return 100 + return self._attr_speed_count @property def percentage_step(self) -> float: @@ -538,9 +418,7 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Requires FanEntityFeature.SET_SPEED. """ - if hasattr(self, "_attr_preset_mode"): - return self._attr_preset_mode - return None + return self._attr_preset_mode @cached_property def preset_modes(self) -> list[str] | None: @@ -548,14 +426,4 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Requires FanEntityFeature.SET_SPEED. """ - if hasattr(self, "_attr_preset_modes"): - return self._attr_preset_modes - return None - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) + return self._attr_preset_modes diff --git a/homeassistant/components/fan/icons.json b/homeassistant/components/fan/icons.json index 60edbce5f01..caf80775f80 100644 --- a/homeassistant/components/fan/icons.json +++ b/homeassistant/components/fan/icons.json @@ -20,14 +20,32 @@ } }, "services": { - "decrease_speed": "mdi:fan-minus", - "increase_speed": "mdi:fan-plus", - "oscillate": "mdi:arrow-oscillating", - "set_direction": "mdi:rotate-3d-variant", - "set_percentage": "mdi:fan", - "set_preset_mode": "mdi:fan-auto", - "toggle": "mdi:fan", - "turn_off": "mdi:fan-off", - "turn_on": "mdi:fan" + "decrease_speed": { + "service": "mdi:fan-minus" + }, + "increase_speed": { + "service": "mdi:fan-plus" + }, + "oscillate": { + "service": "mdi:arrow-oscillating" + }, + "set_direction": { + "service": "mdi:rotate-3d-variant" + }, + "set_percentage": { + "service": "mdi:fan" + }, + "set_preset_mode": { + "service": "mdi:fan-auto" + }, + "toggle": { + "service": "mdi:fan" + }, + "turn_off": { + "service": "mdi:fan-off" + }, + "turn_on": { + "service": "mdi:fan" + } } } diff --git a/homeassistant/components/fan/strings.json b/homeassistant/components/fan/strings.json index aab714d3e07..c4951e88c91 100644 --- a/homeassistant/components/fan/strings.json +++ b/homeassistant/components/fan/strings.json @@ -56,17 +56,17 @@ "services": { "set_preset_mode": { "name": "Set preset mode", - "description": "Sets preset mode.", + "description": "Sets preset fan mode.", "fields": { "preset_mode": { "name": "Preset mode", - "description": "Preset mode." + "description": "Preset fan mode." } } }, "set_percentage": { "name": "Set speed", - "description": "Sets the fan speed.", + "description": "Sets the speed of a fan.", "fields": { "percentage": { "name": "Percentage", @@ -94,45 +94,45 @@ }, "oscillate": { "name": "Oscillate", - "description": "Controls oscillatation of the fan.", + "description": "Controls the oscillation of a fan.", "fields": { "oscillating": { "name": "Oscillating", - "description": "Turn on/off oscillation." + "description": "Turns oscillation on/off." } } }, "toggle": { "name": "[%key:common::action::toggle%]", - "description": "Toggles the fan on/off." + "description": "Toggles a fan on/off." }, "set_direction": { "name": "Set direction", - "description": "Sets the fan rotation direction.", + "description": "Sets a fan's rotation direction.", "fields": { "direction": { "name": "Direction", - "description": "Direction to rotate." + "description": "Direction of the fan rotation." } } }, "increase_speed": { "name": "Increase speed", - "description": "Increases the speed of the fan.", + "description": "Increases the speed of a fan.", "fields": { "percentage_step": { "name": "Increment", - "description": "Increases the speed by a percentage step." + "description": "Percentage step by which the speed should be increased." } } }, "decrease_speed": { "name": "Decrease speed", - "description": "Decreases the speed of the fan.", + "description": "Decreases the speed of a fan.", "fields": { "percentage_step": { "name": "Decrement", - "description": "Decreases the speed by a percentage step." + "description": "Percentage step by which the speed should be decreased." } } } diff --git a/homeassistant/components/fastdotcom/__init__.py b/homeassistant/components/fastdotcom/__init__.py index b9593ec907f..967e7ef8e35 100644 --- a/homeassistant/components/fastdotcom/__init__.py +++ b/homeassistant/components/fastdotcom/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers.start import async_at_started @@ -26,7 +26,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _async_finish_startup(hass: HomeAssistant) -> None: """Run this only when HA has finished its startup.""" - await coordinator.async_config_entry_first_refresh() + if entry.state == ConfigEntryState.LOADED: + await coordinator.async_refresh() + else: + await coordinator.async_config_entry_first_refresh() # Don't start a speedtest during startup, this will slow down the overall startup dramatically async_at_started(hass, _async_finish_startup) diff --git a/homeassistant/components/fastdotcom/manifest.json b/homeassistant/components/fastdotcom/manifest.json index 9e2e077858c..10b6fdb5b5d 100644 --- a/homeassistant/components/fastdotcom/manifest.json +++ b/homeassistant/components/fastdotcom/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/fastdotcom", "iot_class": "cloud_polling", "loggers": ["fastdotcom"], - "quality_scale": "gold", "requirements": ["fastdotcom==0.0.3"], "single_config_entry": true } diff --git a/homeassistant/components/feedreader/__init__.py b/homeassistant/components/feedreader/__init__.py index b9f0b006e2a..9faed54c041 100644 --- a/homeassistant/components/feedreader/__init__.py +++ b/homeassistant/components/feedreader/__init__.py @@ -2,17 +2,12 @@ from __future__ import annotations -import voluptuous as vol - -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_SCAN_INTERVAL, CONF_URL, Platform -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL, Platform +from homeassistant.core import HomeAssistant from homeassistant.util.hass_dict import HassKey -from .const import CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES, DEFAULT_SCAN_INTERVAL, DOMAIN +from .const import CONF_MAX_ENTRIES, DOMAIN from .coordinator import FeedReaderCoordinator, StoredData type FeedReaderConfigEntry = ConfigEntry[FeedReaderCoordinator] @@ -21,60 +16,6 @@ CONF_URLS = "urls" MY_KEY: HassKey[StoredData] = HassKey(DOMAIN) -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_URLS): vol.All(cv.ensure_list, [cv.url]), - vol.Optional( - CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL - ): cv.time_period, - vol.Optional( - CONF_MAX_ENTRIES, default=DEFAULT_MAX_ENTRIES - ): cv.positive_int, - } - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Feedreader component.""" - if DOMAIN in config: - for url in config[DOMAIN][CONF_URLS]: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_URL: url, - CONF_MAX_ENTRIES: config[DOMAIN][CONF_MAX_ENTRIES], - }, - ) - ) - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Feedreader", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) -> bool: """Set up Feedreader from a config entry.""" diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index d367432ff8c..f3e56ad1778 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -2,31 +2,28 @@ from __future__ import annotations +import html import logging -from typing import TYPE_CHECKING, Any +from typing import Any import urllib.error import feedparser import voluptuous as vol from homeassistant.config_entries import ( - SOURCE_IMPORT, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ( TextSelector, TextSelectorConfig, TextSelectorType, ) -from homeassistant.util import slugify from .const import CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES, DOMAIN @@ -42,14 +39,14 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - _config_entry: ConfigEntry - _max_entries: int | None = None @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: """Get the options flow for this handler.""" - return FeedReaderOptionsFlowHandler(config_entry) + return FeedReaderOptionsFlowHandler() def show_user_form( self, @@ -74,21 +71,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - def abort_on_import_error(self, url: str, error: str) -> ConfigFlowResult: - """Abort import flow on error.""" - async_create_issue( - self.hass, - DOMAIN, - f"import_yaml_error_{DOMAIN}_{error}_{slugify(url)}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"import_yaml_error_{error}", - translation_placeholders={"url": url}, - ) - return self.async_abort(reason=error) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -103,44 +85,26 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): if feed.bozo: LOGGER.debug("feed bozo_exception: %s", feed.bozo_exception) if isinstance(feed.bozo_exception, urllib.error.URLError): - if self.context["source"] == SOURCE_IMPORT: - return self.abort_on_import_error(user_input[CONF_URL], "url_error") return self.show_user_form(user_input, {"base": "url_error"}) - feed_title = feed["feed"]["title"] + feed_title = html.unescape(feed["feed"]["title"]) return self.async_create_entry( title=feed_title, data=user_input, - options={CONF_MAX_ENTRIES: self._max_entries or DEFAULT_MAX_ENTRIES}, + options={CONF_MAX_ENTRIES: DEFAULT_MAX_ENTRIES}, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Handle an import flow.""" - self._max_entries = user_input[CONF_MAX_ENTRIES] - return await self.async_step_user({CONF_URL: user_input[CONF_URL]}) - async def async_step_reconfigure( - self, _: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - if TYPE_CHECKING: - assert config_entry is not None - self._config_entry = config_entry - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" + reconfigure_entry = self._get_reconfigure_entry() if not user_input: return self.show_user_form( - user_input={**self._config_entry.data}, - description_placeholders={"name": self._config_entry.title}, - step_id="reconfigure_confirm", + user_input={**reconfigure_entry.data}, + description_placeholders={"name": reconfigure_entry.title}, + step_id="reconfigure", ) feed = await async_fetch_feed(self.hass, user_input[CONF_URL]) @@ -150,16 +114,16 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): if isinstance(feed.bozo_exception, urllib.error.URLError): return self.show_user_form( user_input=user_input, - description_placeholders={"name": self._config_entry.title}, - step_id="reconfigure_confirm", + description_placeholders={"name": reconfigure_entry.title}, + step_id="reconfigure", errors={"base": "url_error"}, ) - self.hass.config_entries.async_update_entry(self._config_entry, data=user_input) + self.hass.config_entries.async_update_entry(reconfigure_entry, data=user_input) return self.async_abort(reason="reconfigure_successful") -class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FeedReaderOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -174,7 +138,9 @@ class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_MAX_ENTRIES, - default=self.options.get(CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES), + default=self.config_entry.options.get( + CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES + ), ): cv.positive_int, } ) diff --git a/homeassistant/components/feedreader/coordinator.py b/homeassistant/components/feedreader/coordinator.py index 6608c4312fe..f45b303946a 100644 --- a/homeassistant/components/feedreader/coordinator.py +++ b/homeassistant/components/feedreader/coordinator.py @@ -4,6 +4,7 @@ from __future__ import annotations from calendar import timegm from datetime import datetime +import html from logging import getLogger from time import gmtime, struct_time from typing import TYPE_CHECKING @@ -102,7 +103,8 @@ class FeedReaderCoordinator( """Set up the feed manager.""" feed = await self._async_fetch_feed() self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"]) - self.feed_author = feed["feed"].get("author") + if feed_author := feed["feed"].get("author"): + self.feed_author = html.unescape(feed_author) self.feed_version = feedparser.api.SUPPORTED_VERSIONS.get(feed["version"]) self._feed = feed diff --git a/homeassistant/components/feedreader/event.py b/homeassistant/components/feedreader/event.py index 48c18c4e70d..ad6aed0fc76 100644 --- a/homeassistant/components/feedreader/event.py +++ b/homeassistant/components/feedreader/event.py @@ -2,6 +2,7 @@ from __future__ import annotations +import html import logging from feedparser import FeedParserDict @@ -19,6 +20,7 @@ from .coordinator import FeedReaderCoordinator LOGGER = logging.getLogger(__name__) ATTR_CONTENT = "content" +ATTR_DESCRIPTION = "description" ATTR_LINK = "link" ATTR_TITLE = "title" @@ -40,7 +42,9 @@ class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity): _attr_event_types = [EVENT_FEEDREADER] _attr_name = None _attr_has_entity_name = True - _unrecorded_attributes = frozenset({ATTR_CONTENT, ATTR_TITLE, ATTR_LINK}) + _unrecorded_attributes = frozenset( + {ATTR_CONTENT, ATTR_DESCRIPTION, ATTR_TITLE, ATTR_LINK} + ) coordinator: FeedReaderCoordinator def __init__(self, coordinator: FeedReaderCoordinator) -> None: @@ -73,14 +77,22 @@ class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity): # so we always take the first entry in list, since we only care about the latest entry feed_data: FeedParserDict = data[0] + if description := feed_data.get("description"): + description = html.unescape(description) + + if title := feed_data.get("title"): + title = html.unescape(title) + if content := feed_data.get("content"): if isinstance(content, list) and isinstance(content[0], dict): content = content[0].get("value") + content = html.unescape(content) self._trigger_event( EVENT_FEEDREADER, { - ATTR_TITLE: feed_data.get("title"), + ATTR_DESCRIPTION: description, + ATTR_TITLE: title, ATTR_LINK: feed_data.get("link"), ATTR_CONTENT: content, }, diff --git a/homeassistant/components/feedreader/strings.json b/homeassistant/components/feedreader/strings.json index da66333fa5b..0f0492eb6c9 100644 --- a/homeassistant/components/feedreader/strings.json +++ b/homeassistant/components/feedreader/strings.json @@ -6,7 +6,7 @@ "url": "[%key:common::config_flow::data::url%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update your configuration information for {name}.", "data": { "url": "[%key:common::config_flow::data::url%]" diff --git a/homeassistant/components/ffmpeg/__init__.py b/homeassistant/components/ffmpeg/__init__.py index 5e1be36f398..9a88317027e 100644 --- a/homeassistant/components/ffmpeg/__init__.py +++ b/homeassistant/components/ffmpeg/__init__.py @@ -3,11 +3,11 @@ from __future__ import annotations import asyncio -from functools import cached_property import re from haffmpeg.core import HAFFmpeg from haffmpeg.tools import IMAGE_JPEG, FFVersion, ImageFrame +from propcache import cached_property import voluptuous as vol from homeassistant.const import ( @@ -176,7 +176,7 @@ class FFmpegManager: return CONTENT_TYPE_MULTIPART.format("ffserver") -class FFmpegBase[_HAFFmpegT: HAFFmpeg](Entity): +class FFmpegBase[_HAFFmpegT: HAFFmpeg](Entity): # pylint: disable=hass-enforce-class-module """Interface object for FFmpeg.""" _attr_should_poll = False diff --git a/homeassistant/components/ffmpeg/icons.json b/homeassistant/components/ffmpeg/icons.json index a23f024599c..780eb071af1 100644 --- a/homeassistant/components/ffmpeg/icons.json +++ b/homeassistant/components/ffmpeg/icons.json @@ -1,7 +1,13 @@ { "services": { - "restart": "mdi:restart", - "start": "mdi:play", - "stop": "mdi:stop" + "restart": { + "service": "mdi:restart" + }, + "start": { + "service": "mdi:play" + }, + "stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/ffmpeg/manifest.json b/homeassistant/components/ffmpeg/manifest.json index ab9f3ed65c1..085db6791b3 100644 --- a/homeassistant/components/ffmpeg/manifest.json +++ b/homeassistant/components/ffmpeg/manifest.json @@ -4,5 +4,5 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ffmpeg", "integration_type": "system", - "requirements": ["ha-ffmpeg==3.2.0"] + "requirements": ["ha-ffmpeg==3.2.2"] } diff --git a/homeassistant/components/ffmpeg_motion/manifest.json b/homeassistant/components/ffmpeg_motion/manifest.json index 0115ed712e3..f51a6206e2b 100644 --- a/homeassistant/components/ffmpeg_motion/manifest.json +++ b/homeassistant/components/ffmpeg_motion/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/ffmpeg_motion", - "iot_class": "calculated" + "iot_class": "calculated", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ffmpeg_noise/manifest.json b/homeassistant/components/ffmpeg_noise/manifest.json index 6352fed88c4..f1c0cc9f673 100644 --- a/homeassistant/components/ffmpeg_noise/manifest.json +++ b/homeassistant/components/ffmpeg_noise/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/ffmpeg_noise", - "iot_class": "calculated" + "iot_class": "calculated", + "quality_scale": "legacy" } diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index d6118aa3655..8ede0169482 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -15,14 +15,7 @@ from pyfibaro.fibaro_state_resolver import FibaroEvent, FibaroStateResolver from requests.exceptions import HTTPError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ARMED, - ATTR_BATTERY_LEVEL, - CONF_PASSWORD, - CONF_URL, - CONF_USERNAME, - Platform, -) +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ( ConfigEntryAuthFailed, @@ -31,13 +24,13 @@ from homeassistant.exceptions import ( ) from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo -from homeassistant.helpers.entity import Entity from homeassistant.util import slugify from .const import CONF_IMPORT_PLUGINS, DOMAIN -_LOGGER = logging.getLogger(__name__) +type FibaroConfigEntry = ConfigEntry[FibaroController] +_LOGGER = logging.getLogger(__name__) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -249,11 +242,14 @@ class FibaroController: platform = Platform.LOCK elif device.has_central_scene_event: platform = Platform.EVENT - elif device.value.has_value: - if device.value.is_bool_value: - platform = Platform.BINARY_SENSOR - else: - platform = Platform.SENSOR + elif device.value.has_value and device.value.is_bool_value: + platform = Platform.BINARY_SENSOR + elif ( + device.value.has_value + or "power" in device.properties + or "energy" in device.properties + ): + platform = Platform.SENSOR # Switches that control lights should show up as lights if platform == Platform.SWITCH and device.properties.get("isLight", False): @@ -386,7 +382,7 @@ def init_controller(data: Mapping[str, Any]) -> FibaroController: return controller -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: """Set up the Fibaro Component. The unique id of the config entry is the serial number of the home center. @@ -400,7 +396,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except FibaroAuthFailed as auth_ex: raise ConfigEntryAuthFailed from auth_ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = controller + entry.runtime_data = controller # register the hub device info separately as the hub has sometimes no entities device_registry = dr.async_get(hass) @@ -422,25 +418,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: """Unload a config entry.""" _LOGGER.debug("Shutting down Fibaro connection") unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - hass.data[DOMAIN][entry.entry_id].disable_state_handler() - hass.data[DOMAIN].pop(entry.entry_id) + entry.runtime_data.disable_state_handler() return unload_ok async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry + hass: HomeAssistant, config_entry: FibaroConfigEntry, device_entry: DeviceEntry ) -> bool: """Remove a device entry from fibaro integration. Only removing devices which are not present anymore are eligible to be removed. """ - controller: FibaroController = hass.data[DOMAIN][config_entry.entry_id] + controller = config_entry.runtime_data for identifiers in controller.get_all_device_identifiers(): if device_entry.identifiers == identifiers: # Fibaro device is still served by the controller, @@ -450,118 +445,6 @@ async def async_remove_config_entry_device( return True -class FibaroDevice(Entity): - """Representation of a Fibaro device entity.""" - - _attr_should_poll = False - - def __init__(self, fibaro_device: DeviceModel) -> None: - """Initialize the device.""" - self.fibaro_device = fibaro_device - self.controller = fibaro_device.fibaro_controller - self.ha_id = fibaro_device.ha_id - self._attr_name = fibaro_device.friendly_name - self._attr_unique_id = fibaro_device.unique_id_str - - self._attr_device_info = self.controller.get_device_info(fibaro_device) - # propagate hidden attribute set in fibaro home center to HA - if not fibaro_device.visible: - self._attr_entity_registry_visible_default = False - - async def async_added_to_hass(self) -> None: - """Call when entity is added to hass.""" - self.controller.register(self.fibaro_device.fibaro_id, self._update_callback) - - def _update_callback(self) -> None: - """Update the state.""" - self.schedule_update_ha_state(True) - - @property - def level(self) -> int | None: - """Get the level of Fibaro device.""" - if self.fibaro_device.value.has_value: - return self.fibaro_device.value.int_value() - return None - - @property - def level2(self) -> int | None: - """Get the tilt level of Fibaro device.""" - if self.fibaro_device.value_2.has_value: - return self.fibaro_device.value_2.int_value() - return None - - def dont_know_message(self, cmd: str) -> None: - """Make a warning in case we don't know how to perform an action.""" - _LOGGER.warning( - "Not sure how to %s: %s (available actions: %s)", - cmd, - str(self.ha_id), - str(self.fibaro_device.actions), - ) - - def set_level(self, level: int) -> None: - """Set the level of Fibaro device.""" - self.action("setValue", level) - if self.fibaro_device.value.has_value: - self.fibaro_device.properties["value"] = level - if self.fibaro_device.has_brightness: - self.fibaro_device.properties["brightness"] = level - - def set_level2(self, level: int) -> None: - """Set the level2 of Fibaro device.""" - self.action("setValue2", level) - if self.fibaro_device.value_2.has_value: - self.fibaro_device.properties["value2"] = level - - def call_turn_on(self) -> None: - """Turn on the Fibaro device.""" - self.action("turnOn") - - def call_turn_off(self) -> None: - """Turn off the Fibaro device.""" - self.action("turnOff") - - def call_set_color(self, red: int, green: int, blue: int, white: int) -> None: - """Set the color of Fibaro device.""" - red = int(max(0, min(255, red))) - green = int(max(0, min(255, green))) - blue = int(max(0, min(255, blue))) - white = int(max(0, min(255, white))) - color_str = f"{red},{green},{blue},{white}" - self.fibaro_device.properties["color"] = color_str - self.action("setColor", str(red), str(green), str(blue), str(white)) - - def action(self, cmd: str, *args: Any) -> None: - """Perform an action on the Fibaro HC.""" - if cmd in self.fibaro_device.actions: - self.fibaro_device.execute_action(cmd, args) - _LOGGER.debug("-> %s.%s%s called", str(self.ha_id), str(cmd), str(args)) - else: - self.dont_know_message(cmd) - - @property - def current_binary_state(self) -> bool: - """Return the current binary state.""" - return self.fibaro_device.value.bool_value(False) - - @property - def extra_state_attributes(self) -> Mapping[str, Any]: - """Return the state attributes of the device.""" - attr = {"fibaro_id": self.fibaro_device.fibaro_id} - - if self.fibaro_device.has_battery_level: - attr[ATTR_BATTERY_LEVEL] = self.fibaro_device.battery_level - if self.fibaro_device.has_armed: - attr[ATTR_ARMED] = self.fibaro_device.armed - - return attr - - def update(self) -> None: - """Update the available state of the entity.""" - if self.fibaro_device.has_dead: - self._attr_available = not self.fibaro_device.dead - - class FibaroConnectFailed(HomeAssistantError): """Error to indicate we cannot connect to fibaro home center.""" diff --git a/homeassistant/components/fibaro/binary_sensor.py b/homeassistant/components/fibaro/binary_sensor.py index 3c965c11b34..16e79c0c1d0 100644 --- a/homeassistant/components/fibaro/binary_sensor.py +++ b/homeassistant/components/fibaro/binary_sensor.py @@ -12,13 +12,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity SENSOR_TYPES = { "com.fibaro.floodSensor": ["Flood", "mdi:water", BinarySensorDeviceClass.MOISTURE], @@ -42,11 +41,11 @@ SENSOR_TYPES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [ FibaroBinarySensor(device) @@ -56,7 +55,7 @@ async def async_setup_entry( ) -class FibaroBinarySensor(FibaroDevice, BinarySensorEntity): +class FibaroBinarySensor(FibaroEntity, BinarySensorEntity): """Representation of a Fibaro Binary Sensor.""" def __init__(self, fibaro_device: DeviceModel) -> None: diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index cf08d52d36e..45f700026a0 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -17,13 +17,12 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity PRESET_RESUME = "resume" PRESET_MOIST = "moist" @@ -110,11 +109,11 @@ OP_MODE_ACTIONS = ("setMode", "setOperatingMode", "setThermostatMode") async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [ FibaroThermostat(device) @@ -124,18 +123,16 @@ async def async_setup_entry( ) -class FibaroThermostat(FibaroDevice, ClimateEntity): +class FibaroThermostat(FibaroEntity, ClimateEntity): """Representation of a Fibaro Thermostat.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, fibaro_device: DeviceModel) -> None: """Initialize the Fibaro device.""" super().__init__(fibaro_device) - self._temp_sensor_device: FibaroDevice | None = None - self._target_temp_device: FibaroDevice | None = None - self._op_mode_device: FibaroDevice | None = None - self._fan_mode_device: FibaroDevice | None = None + self._temp_sensor_device: FibaroEntity | None = None + self._target_temp_device: FibaroEntity | None = None + self._op_mode_device: FibaroEntity | None = None + self._fan_mode_device: FibaroEntity | None = None self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id) siblings = fibaro_device.fibaro_controller.get_siblings(fibaro_device) @@ -150,23 +147,23 @@ class FibaroThermostat(FibaroDevice, ClimateEntity): and (device.value.has_value or device.has_heating_thermostat_setpoint) and device.unit in ("C", "F") ): - self._temp_sensor_device = FibaroDevice(device) + self._temp_sensor_device = FibaroEntity(device) tempunit = device.unit if any( action for action in TARGET_TEMP_ACTIONS if action in device.actions ): - self._target_temp_device = FibaroDevice(device) + self._target_temp_device = FibaroEntity(device) self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE if device.has_unit: tempunit = device.unit if any(action for action in OP_MODE_ACTIONS if action in device.actions): - self._op_mode_device = FibaroDevice(device) + self._op_mode_device = FibaroEntity(device) self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE if "setFanMode" in device.actions: - self._fan_mode_device = FibaroDevice(device) + self._fan_mode_device = FibaroEntity(device) self._attr_supported_features |= ClimateEntityFeature.FAN_MODE if tempunit == "F": @@ -273,7 +270,9 @@ class FibaroThermostat(FibaroDevice, ClimateEntity): if isinstance(fibaro_operation_mode, str): with suppress(ValueError): return HVACMode(fibaro_operation_mode.lower()) - elif fibaro_operation_mode in OPMODES_HVAC: + # when the mode cannot be instantiated a preset_mode is selected + return HVACMode.AUTO + if fibaro_operation_mode in OPMODES_HVAC: return OPMODES_HVAC[fibaro_operation_mode] return None @@ -281,8 +280,6 @@ class FibaroThermostat(FibaroDevice, ClimateEntity): """Set new target operation mode.""" if not self._op_mode_device: return - if self.preset_mode: - return if "setOperatingMode" in self._op_mode_device.fibaro_device.actions: self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode]) diff --git a/homeassistant/components/fibaro/config_flow.py b/homeassistant/components/fibaro/config_flow.py index 9003704348d..0ffd9aaa48f 100644 --- a/homeassistant/components/fibaro/config_flow.py +++ b/homeassistant/components/fibaro/config_flow.py @@ -9,8 +9,8 @@ from typing import Any from slugify import slugify import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant from . import FibaroAuthFailed, FibaroConnectFailed, init_controller @@ -63,10 +63,6 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -94,9 +90,6 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauthentication.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -105,9 +98,10 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by reauthentication.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() + if user_input is not None: - new_data = self._reauth_entry.data | user_input + new_data = reauth_entry.data | user_input try: await _validate_input(self.hass, new_data) except FibaroConnectFailed: @@ -115,19 +109,16 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): except FibaroAuthFailed: errors["base"] = "invalid_auth" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=new_data + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.title, }, ) diff --git a/homeassistant/components/fibaro/cover.py b/homeassistant/components/fibaro/cover.py index e71ae8982e7..bfebbf87bd2 100644 --- a/homeassistant/components/fibaro/cover.py +++ b/homeassistant/components/fibaro/cover.py @@ -13,29 +13,28 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro covers.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroCover(device) for device in controller.fibaro_devices[Platform.COVER]], True, ) -class FibaroCover(FibaroDevice, CoverEntity): +class FibaroCover(FibaroEntity, CoverEntity): """Representation a Fibaro Cover.""" def __init__(self, fibaro_device: DeviceModel) -> None: @@ -68,15 +67,29 @@ class FibaroCover(FibaroDevice, CoverEntity): # so if it is missing we have a device which supports open / close only return not self.fibaro_device.value.has_value - @property - def current_cover_position(self) -> int | None: - """Return current position of cover. 0 is closed, 100 is open.""" - return self.bound(self.level) + def update(self) -> None: + """Update the state.""" + super().update() - @property - def current_cover_tilt_position(self) -> int | None: - """Return the current tilt position for venetian blinds.""" - return self.bound(self.level2) + self._attr_current_cover_position = self.bound(self.level) + self._attr_current_cover_tilt_position = self.bound(self.level2) + + device_state = self.fibaro_device.state + + # Be aware that opening and closing is only available for some modern + # devices. + # For example the Fibaro Roller Shutter 4 reports this correctly. + if device_state.has_value: + self._attr_is_opening = device_state.str_value().lower() == "opening" + self._attr_is_closing = device_state.str_value().lower() == "closing" + + closed: bool | None = None + if self._is_open_close_only(): + if device_state.has_value and device_state.str_value().lower() != "unknown": + closed = device_state.str_value().lower() == "closed" + elif self.current_cover_position is not None: + closed = self.current_cover_position == 0 + self._attr_is_closed = closed def set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" @@ -86,19 +99,6 @@ class FibaroCover(FibaroDevice, CoverEntity): """Move the cover to a specific position.""" self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION))) - @property - def is_closed(self) -> bool | None: - """Return if the cover is closed.""" - if self._is_open_close_only(): - state = self.fibaro_device.state - if not state.has_value or state.str_value().lower() == "unknown": - return None - return state.str_value().lower() == "closed" - - if self.current_cover_position is None: - return None - return self.current_cover_position == 0 - def open_cover(self, **kwargs: Any) -> None: """Open the cover.""" self.action("open") diff --git a/homeassistant/components/fibaro/entity.py b/homeassistant/components/fibaro/entity.py new file mode 100644 index 00000000000..6a8e12136c8 --- /dev/null +++ b/homeassistant/components/fibaro/entity.py @@ -0,0 +1,126 @@ +"""Support for the Fibaro devices.""" + +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from pyfibaro.fibaro_device import DeviceModel + +from homeassistant.const import ATTR_ARMED, ATTR_BATTERY_LEVEL +from homeassistant.helpers.entity import Entity + +_LOGGER = logging.getLogger(__name__) + + +class FibaroEntity(Entity): + """Representation of a Fibaro device entity.""" + + _attr_should_poll = False + + def __init__(self, fibaro_device: DeviceModel) -> None: + """Initialize the device.""" + self.fibaro_device = fibaro_device + self.controller = fibaro_device.fibaro_controller + self.ha_id = fibaro_device.ha_id + self._attr_name = fibaro_device.friendly_name + self._attr_unique_id = fibaro_device.unique_id_str + + self._attr_device_info = self.controller.get_device_info(fibaro_device) + # propagate hidden attribute set in fibaro home center to HA + if not fibaro_device.visible: + self._attr_entity_registry_visible_default = False + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + self.controller.register(self.fibaro_device.fibaro_id, self._update_callback) + + def _update_callback(self) -> None: + """Update the state.""" + self.schedule_update_ha_state(True) + + @property + def level(self) -> int | None: + """Get the level of Fibaro device.""" + if self.fibaro_device.value.has_value: + return self.fibaro_device.value.int_value() + return None + + @property + def level2(self) -> int | None: + """Get the tilt level of Fibaro device.""" + if self.fibaro_device.value_2.has_value: + return self.fibaro_device.value_2.int_value() + return None + + def dont_know_message(self, cmd: str) -> None: + """Make a warning in case we don't know how to perform an action.""" + _LOGGER.warning( + "Not sure how to %s: %s (available actions: %s)", + cmd, + str(self.ha_id), + str(self.fibaro_device.actions), + ) + + def set_level(self, level: int) -> None: + """Set the level of Fibaro device.""" + self.action("setValue", level) + if self.fibaro_device.value.has_value: + self.fibaro_device.properties["value"] = level + if self.fibaro_device.has_brightness: + self.fibaro_device.properties["brightness"] = level + + def set_level2(self, level: int) -> None: + """Set the level2 of Fibaro device.""" + self.action("setValue2", level) + if self.fibaro_device.value_2.has_value: + self.fibaro_device.properties["value2"] = level + + def call_turn_on(self) -> None: + """Turn on the Fibaro device.""" + self.action("turnOn") + + def call_turn_off(self) -> None: + """Turn off the Fibaro device.""" + self.action("turnOff") + + def call_set_color(self, red: int, green: int, blue: int, white: int) -> None: + """Set the color of Fibaro device.""" + red = int(max(0, min(255, red))) + green = int(max(0, min(255, green))) + blue = int(max(0, min(255, blue))) + white = int(max(0, min(255, white))) + color_str = f"{red},{green},{blue},{white}" + self.fibaro_device.properties["color"] = color_str + self.action("setColor", str(red), str(green), str(blue), str(white)) + + def action(self, cmd: str, *args: Any) -> None: + """Perform an action on the Fibaro HC.""" + if cmd in self.fibaro_device.actions: + self.fibaro_device.execute_action(cmd, args) + _LOGGER.debug("-> %s.%s%s called", str(self.ha_id), str(cmd), str(args)) + else: + self.dont_know_message(cmd) + + @property + def current_binary_state(self) -> bool: + """Return the current binary state.""" + return self.fibaro_device.value.bool_value(False) + + @property + def extra_state_attributes(self) -> Mapping[str, Any]: + """Return the state attributes of the device.""" + attr = {"fibaro_id": self.fibaro_device.fibaro_id} + + if self.fibaro_device.has_battery_level: + attr[ATTR_BATTERY_LEVEL] = self.fibaro_device.battery_level + if self.fibaro_device.has_armed: + attr[ATTR_ARMED] = self.fibaro_device.armed + + return attr + + def update(self) -> None: + """Update the available state of the entity.""" + if self.fibaro_device.has_dead: + self._attr_available = not self.fibaro_device.dead diff --git a/homeassistant/components/fibaro/event.py b/homeassistant/components/fibaro/event.py index c65e8f143c6..a2d5da7f877 100644 --- a/homeassistant/components/fibaro/event.py +++ b/homeassistant/components/fibaro/event.py @@ -10,22 +10,21 @@ from homeassistant.components.event import ( EventDeviceClass, EventEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro event entities.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data # Each scene event represents a button on a device async_add_entities( @@ -38,7 +37,7 @@ async def async_setup_entry( ) -class FibaroEventEntity(FibaroDevice, EventEntity): +class FibaroEventEntity(FibaroEntity, EventEntity): """Representation of a Fibaro Event Entity.""" def __init__(self, fibaro_device: DeviceModel, scene_event: SceneEvent) -> None: diff --git a/homeassistant/components/fibaro/light.py b/homeassistant/components/fibaro/light.py index 2f2182c53cd..d40e26244f3 100644 --- a/homeassistant/components/fibaro/light.py +++ b/homeassistant/components/fibaro/light.py @@ -17,13 +17,12 @@ from homeassistant.components.light import ( brightness_supported, color_supported, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity PARALLEL_UPDATES = 2 @@ -51,18 +50,18 @@ def scaleto99(value: int | None) -> int: async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroLight(device) for device in controller.fibaro_devices[Platform.LIGHT]], True, ) -class FibaroLight(FibaroDevice, LightEntity): +class FibaroLight(FibaroEntity, LightEntity): """Representation of a Fibaro Light, including dimmable.""" def __init__(self, fibaro_device: DeviceModel) -> None: @@ -131,32 +130,25 @@ class FibaroLight(FibaroDevice, LightEntity): """Turn the light off.""" self.call_turn_off() - @property - def is_on(self) -> bool | None: - """Return true if device is on. - - Dimmable and RGB lights can be on based on different - properties, so we need to check here several values. - - JSON for HC2 uses always string, HC3 uses int for integers. - """ - if self.current_binary_state: - return True - with suppress(TypeError): - if self.fibaro_device.brightness != 0: - return True - with suppress(TypeError): - if self.fibaro_device.current_program != 0: - return True - with suppress(TypeError): - if self.fibaro_device.current_program_id != 0: - return True - - return False - def update(self) -> None: """Update the state.""" super().update() + + # Dimmable and RGB lights can be on based on different + # properties, so we need to check here several values + # to see if the light is on. + light_is_on = self.current_binary_state + with suppress(TypeError): + if self.fibaro_device.brightness != 0: + light_is_on = True + with suppress(TypeError): + if self.fibaro_device.current_program != 0: + light_is_on = True + with suppress(TypeError): + if self.fibaro_device.current_program_id != 0: + light_is_on = True + self._attr_is_on = light_is_on + # Brightness handling if brightness_supported(self.supported_color_modes): self._attr_brightness = scaleto255(self.fibaro_device.value.int_value()) @@ -171,7 +163,7 @@ class FibaroLight(FibaroDevice, LightEntity): if rgbw == (0, 0, 0, 0) and self.fibaro_device.last_color_set.has_color: rgbw = self.fibaro_device.last_color_set.rgbw_color - if self._attr_color_mode == ColorMode.RGB: + if self.color_mode == ColorMode.RGB: self._attr_rgb_color = rgbw[:3] else: self._attr_rgbw_color = rgbw diff --git a/homeassistant/components/fibaro/lock.py b/homeassistant/components/fibaro/lock.py index faa82815b8d..62a9dfa43b1 100644 --- a/homeassistant/components/fibaro/lock.py +++ b/homeassistant/components/fibaro/lock.py @@ -7,29 +7,28 @@ from typing import Any from pyfibaro.fibaro_device import DeviceModel from homeassistant.components.lock import ENTITY_ID_FORMAT, LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro locks.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroLock(device) for device in controller.fibaro_devices[Platform.LOCK]], True, ) -class FibaroLock(FibaroDevice, LockEntity): +class FibaroLock(FibaroEntity, LockEntity): """Representation of a Fibaro Lock.""" def __init__(self, fibaro_device: DeviceModel) -> None: diff --git a/homeassistant/components/fibaro/manifest.json b/homeassistant/components/fibaro/manifest.json index 39850672d06..d2a1186b05b 100644 --- a/homeassistant/components/fibaro/manifest.json +++ b/homeassistant/components/fibaro/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyfibaro"], - "requirements": ["pyfibaro==0.7.8"] + "requirements": ["pyfibaro==0.8.0"] } diff --git a/homeassistant/components/fibaro/scene.py b/homeassistant/components/fibaro/scene.py index a40a1ef5b57..a4c0f1bd7f1 100644 --- a/homeassistant/components/fibaro/scene.py +++ b/homeassistant/components/fibaro/scene.py @@ -7,23 +7,22 @@ from typing import Any from pyfibaro.fibaro_scene import SceneModel from homeassistant.components.scene import Scene -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import slugify -from . import FibaroController +from . import FibaroConfigEntry, FibaroController from .const import DOMAIN async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro scenes.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroScene(scene, controller) for scene in controller.read_scenes()], True, diff --git a/homeassistant/components/fibaro/sensor.py b/homeassistant/components/fibaro/sensor.py index fd6ec74050d..245a0d087d8 100644 --- a/homeassistant/components/fibaro/sensor.py +++ b/homeassistant/components/fibaro/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, @@ -27,8 +26,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import convert -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity # List of known sensors which represents a fibaro device MAIN_SENSOR_TYPES: dict[str, SensorEntityDescription] = { @@ -102,15 +101,20 @@ FIBARO_TO_HASS_UNIT: dict[str, str] = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data entities: list[SensorEntity] = [ FibaroSensor(device, MAIN_SENSOR_TYPES.get(device.type)) for device in controller.fibaro_devices[Platform.SENSOR] + # Some sensor devices do not have a value but report power or energy. + # These sensors are added to the sensor list but need to be excluded + # here as the FibaroSensor expects a value. One example is the + # Qubino 3 phase power meter. + if device.value.has_value ] entities.extend( @@ -132,7 +136,7 @@ async def async_setup_entry( async_add_entities(entities, True) -class FibaroSensor(FibaroDevice, SensorEntity): +class FibaroSensor(FibaroEntity, SensorEntity): """Representation of a Fibaro Sensor.""" def __init__( @@ -161,7 +165,7 @@ class FibaroSensor(FibaroDevice, SensorEntity): self._attr_native_value = self.fibaro_device.value.float_value() -class FibaroAdditionalSensor(FibaroDevice, SensorEntity): +class FibaroAdditionalSensor(FibaroEntity, SensorEntity): """Representation of a Fibaro Additional Sensor.""" def __init__( diff --git a/homeassistant/components/fibaro/strings.json b/homeassistant/components/fibaro/strings.json index de875176cdb..99f718d545c 100644 --- a/homeassistant/components/fibaro/strings.json +++ b/homeassistant/components/fibaro/strings.json @@ -3,16 +3,25 @@ "step": { "user": { "data": { - "url": "URL in the format http://HOST/api/", + "url": "[%key:common::config_flow::data::url%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "import_plugins": "Import entities from fibaro plugins?" + "import_plugins": "Import entities from fibaro plugins / quickapps" + }, + "data_description": { + "url": "The URL of the Fibaro hub in the format `http(s)://IP`.", + "username": "The username of the Fibaro hub user.", + "password": "The password of the Fibaro hub user.", + "import_plugins": "Select if entities from Fibaro plugins / quickapps should be imported." } }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "password": "[%key:component::fibaro::config::step::user::data_description::password%]" + }, "title": "[%key:common::config_flow::title::reauth%]", "description": "Please update your password for {username}" } diff --git a/homeassistant/components/fibaro/switch.py b/homeassistant/components/fibaro/switch.py index f6ceed972f7..f67683dff6a 100644 --- a/homeassistant/components/fibaro/switch.py +++ b/homeassistant/components/fibaro/switch.py @@ -7,29 +7,28 @@ from typing import Any from pyfibaro.fibaro_device import DeviceModel from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController, FibaroDevice -from .const import DOMAIN +from . import FibaroConfigEntry +from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro switches.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroSwitch(device) for device in controller.fibaro_devices[Platform.SWITCH]], True, ) -class FibaroSwitch(FibaroDevice, SwitchEntity): +class FibaroSwitch(FibaroEntity, SwitchEntity): """Representation of a Fibaro Switch.""" def __init__(self, fibaro_device: DeviceModel) -> None: diff --git a/homeassistant/components/fido/manifest.json b/homeassistant/components/fido/manifest.json index dc440304646..23949a56ee2 100644 --- a/homeassistant/components/fido/manifest.json +++ b/homeassistant/components/fido/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fido", "iot_class": "cloud_polling", "loggers": ["pyfido"], + "quality_scale": "legacy", "requirements": ["pyfido==2.1.2"] } diff --git a/homeassistant/components/file/__init__.py b/homeassistant/components/file/__init__.py index 0c9cfee5f4d..7bc206057c8 100644 --- a/homeassistant/components/file/__init__.py +++ b/homeassistant/components/file/__init__.py @@ -3,88 +3,16 @@ from copy import deepcopy from typing import Any -from homeassistant.components.notify import migrate_notify_issue -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_FILE_PATH, - CONF_NAME, - CONF_PLATFORM, - CONF_SCAN_INTERVAL, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - discovery, - issue_registry as ir, -) -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA -from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA - -IMPORT_SCHEMA = { - Platform.SENSOR: SENSOR_PLATFORM_SCHEMA, - Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA, -} - -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.NOTIFY, Platform.SENSOR] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the file integration.""" - - hass.data[DOMAIN] = config - if hass.config_entries.async_entries(DOMAIN): - # We skip import in case we already have config entries - return True - # The use of the legacy notify service was deprecated with HA Core 2024.6.0 - # and will be removed with HA Core 2024.12 - migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0") - # The YAML config was imported with HA Core 2024.6.0 and will be removed with - # HA Core 2024.12 - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - learn_more_url="https://www.home-assistant.io/integrations/file/", - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "File", - }, - ) - - # Import the YAML config into separate config entries - platforms_config: dict[Platform, list[ConfigType]] = { - domain: config[domain] for domain in PLATFORMS if domain in config - } - for domain, items in platforms_config.items(): - for item in items: - if item[CONF_PLATFORM] == DOMAIN: - file_config_item = IMPORT_SCHEMA[domain](item) - file_config_item[CONF_PLATFORM] = domain - if CONF_SCAN_INTERVAL in file_config_item: - del file_config_item[CONF_SCAN_INTERVAL] - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=file_config_item, - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a file component entry.""" config = {**entry.data, **entry.options} @@ -102,20 +30,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, [Platform(entry.data[CONF_PLATFORM])] ) entry.async_on_unload(entry.add_update_listener(update_listener)) - if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data: - # New notify entities are being setup through the config entry, - # but during the deprecation period we want to keep the legacy notify platform, - # so we forward the setup config through discovery. - # Only the entities from yaml will still be available as legacy service. - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - config, - hass.data[DOMAIN], - ) - ) return True diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 8cb58ec1f47..992635d05fd 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -1,7 +1,8 @@ """Config flow for file integration.""" +from __future__ import annotations + from copy import deepcopy -import os from typing import Any import voluptuous as vol @@ -11,11 +12,9 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_FILE_PATH, - CONF_FILENAME, CONF_NAME, CONF_PLATFORM, CONF_UNIT_OF_MEASUREMENT, @@ -74,9 +73,11 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FileOptionsFlowHandler: """Get the options flow for this handler.""" - return FileOptionsFlowHandler(config_entry) + return FileOptionsFlowHandler() async def validate_file_path(self, file_path: str) -> bool: """Ensure the file path is valid.""" @@ -129,32 +130,8 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle file sensor config flow.""" return await self._async_handle_step(Platform.SENSOR.value, user_input) - async def async_step_import( - self, import_data: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Import `file`` config from configuration.yaml.""" - assert import_data is not None - self._async_abort_entries_match(import_data) - platform = import_data[CONF_PLATFORM] - name: str = import_data.get(CONF_NAME, DEFAULT_NAME) - file_name: str - if platform == Platform.NOTIFY: - file_name = import_data.pop(CONF_FILENAME) - file_path: str = os.path.join(self.hass.config.config_dir, file_name) - import_data[CONF_FILE_PATH] = file_path - else: - file_path = import_data[CONF_FILE_PATH] - title = f"{name} [{file_path}]" - data = deepcopy(import_data) - options = {} - for key, value in import_data.items(): - if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): - data.pop(key) - options[key] = value - return self.async_create_entry(title=title, data=data, options=options) - -class FileOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FileOptionsFlowHandler(OptionsFlow): """Handle File options.""" async def async_step_init( diff --git a/homeassistant/components/file/notify.py b/homeassistant/components/file/notify.py index 9411b7cf1a8..10e3d4a4ac6 100644 --- a/homeassistant/components/file/notify.py +++ b/homeassistant/components/file/notify.py @@ -2,104 +2,23 @@ from __future__ import annotations -from functools import partial -import logging import os from typing import Any, TextIO -import voluptuous as vol - from homeassistant.components.notify import ( - ATTR_TITLE, ATTR_TITLE_DEFAULT, - PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, - BaseNotificationService, NotifyEntity, NotifyEntityFeature, - migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME +from homeassistant.const import CONF_FILE_PATH, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON -_LOGGER = logging.getLogger(__name__) - -# The legacy platform schema uses a filename, after import -# The full file path is stored in the config entry -PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_FILENAME): cv.string, - vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, - } -) - - -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> FileNotificationService | None: - """Get the file notification service.""" - if discovery_info is None: - # We only set up through discovery - return None - file_path: str = discovery_info[CONF_FILE_PATH] - timestamp: bool = discovery_info[CONF_TIMESTAMP] - - return FileNotificationService(file_path, timestamp) - - -class FileNotificationService(BaseNotificationService): - """Implement the notification service for the File service.""" - - def __init__(self, file_path: str, add_timestamp: bool) -> None: - """Initialize the service.""" - self._file_path = file_path - self.add_timestamp = add_timestamp - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to a file.""" - # The use of the legacy notify service was deprecated with HA Core 2024.6.0 - # and will be removed with HA Core 2024.12 - migrate_notify_issue( - self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name - ) - await self.hass.async_add_executor_job( - partial(self.send_message, message, **kwargs) - ) - - def send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to a file.""" - file: TextIO - filepath = self._file_path - try: - with open(filepath, "a", encoding="utf8") as file: - if os.stat(filepath).st_size == 0: - title = ( - f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log" - f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" - ) - file.write(title) - - if self.add_timestamp: - text = f"{dt_util.utcnow().isoformat()} {message}\n" - else: - text = f"{message}\n" - file.write(text) - except OSError as exc: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="write_access_failed", - translation_placeholders={"filename": filepath, "exc": f"{exc!r}"}, - ) from exc - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/sensor.py b/homeassistant/components/file/sensor.py index e37a3df86a6..879c06e29f3 100644 --- a/homeassistant/components/file/sensor.py +++ b/homeassistant/components/file/sensor.py @@ -6,12 +6,8 @@ import logging import os from file_read_backwards import FileReadBackwards -import voluptuous as vol -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorEntity, -) +from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_FILE_PATH, @@ -20,38 +16,13 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DEFAULT_NAME, FILE_ICON _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_FILE_PATH): cv.isfile, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_VALUE_TEMPLATE): cv.string, - vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the file sensor from YAML. - - The YAML platform config is automatically - imported to a config entry, this method can be removed - when YAML support is removed. - """ - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/strings.json b/homeassistant/components/file/strings.json index 60ebf451f78..bd8f23602e3 100644 --- a/homeassistant/components/file/strings.json +++ b/homeassistant/components/file/strings.json @@ -18,7 +18,7 @@ }, "data_description": { "file_path": "The local file path to retrieve the sensor value from", - "value_template": "A template to render the the sensors value based on the file content", + "value_template": "A template to render the sensor's value based on the file content", "unit_of_measurement": "Unit of measurement for the sensor" } }, diff --git a/homeassistant/components/filesize/config_flow.py b/homeassistant/components/filesize/config_flow.py index 51eff46bdb3..8ffe3f94353 100644 --- a/homeassistant/components/filesize/config_flow.py +++ b/homeassistant/components/filesize/config_flow.py @@ -11,7 +11,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_FILE_PATH from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN @@ -20,20 +19,20 @@ DATA_SCHEMA = vol.Schema({vol.Required(CONF_FILE_PATH): str}) _LOGGER = logging.getLogger(__name__) -def validate_path(hass: HomeAssistant, path: str) -> str: +def validate_path(hass: HomeAssistant, path: str) -> tuple[str | None, dict[str, str]]: """Validate path.""" get_path = pathlib.Path(path) if not get_path.exists() or not get_path.is_file(): _LOGGER.error("Can not access file %s", path) - raise NotValidError + return (None, {"base": "not_valid"}) if not hass.config.is_allowed_path(path): _LOGGER.error("Filepath %s is not allowed", path) - raise NotAllowedError + return (None, {"base": "not_allowed"}) full_path = get_path.absolute() - return str(full_path) + return (str(full_path), {}) class FilesizeConfigFlow(ConfigFlow, domain=DOMAIN): @@ -45,18 +44,13 @@ class FilesizeConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors: dict[str, Any] = {} + errors: dict[str, str] = {} if user_input is not None: - try: - full_path = await self.hass.async_add_executor_job( - validate_path, self.hass, user_input[CONF_FILE_PATH] - ) - except NotValidError: - errors["base"] = "not_valid" - except NotAllowedError: - errors["base"] = "not_allowed" - else: + full_path, errors = await self.hass.async_add_executor_job( + validate_path, self.hass, user_input[CONF_FILE_PATH] + ) + if not errors: await self.async_set_unique_id(full_path) self._abort_if_unique_id_configured() @@ -70,10 +64,29 @@ class FilesizeConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfigure flow initialized by the user.""" + errors: dict[str, str] = {} -class NotValidError(HomeAssistantError): - """Path is not valid error.""" + if user_input is not None: + reconfigure_entry = self._get_reconfigure_entry() + full_path, errors = await self.hass.async_add_executor_job( + validate_path, self.hass, user_input[CONF_FILE_PATH] + ) + if not errors: + await self.async_set_unique_id(full_path) + self._abort_if_unique_id_configured() + name = str(user_input[CONF_FILE_PATH]).rsplit("/", maxsplit=1)[-1] + return self.async_update_reload_and_abort( + reconfigure_entry, + title=name, + unique_id=self.unique_id, + data_updates={CONF_FILE_PATH: user_input[CONF_FILE_PATH]}, + ) -class NotAllowedError(HomeAssistantError): - """Path is not allowed error.""" + return self.async_show_form( + step_id="reconfigure", data_schema=DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/filesize/coordinator.py b/homeassistant/components/filesize/coordinator.py index c0dbb14555e..8350cee91bf 100644 --- a/homeassistant/components/filesize/coordinator.py +++ b/homeassistant/components/filesize/coordinator.py @@ -60,12 +60,14 @@ class FileSizeCoordinator(DataUpdateCoordinator[dict[str, int | float | datetime statinfo = await self.hass.async_add_executor_job(self._update) size = statinfo.st_size last_updated = dt_util.utc_from_timestamp(statinfo.st_mtime) + created = dt_util.utc_from_timestamp(statinfo.st_ctime) _LOGGER.debug("size %s, last updated %s", size, last_updated) data: dict[str, int | float | datetime] = { "file": round(size / 1e6, 2), "bytes": size, "last_updated": last_updated, + "created": created, } return data diff --git a/homeassistant/components/filesize/icons.json b/homeassistant/components/filesize/icons.json index 15829589853..059a51a9e34 100644 --- a/homeassistant/components/filesize/icons.json +++ b/homeassistant/components/filesize/icons.json @@ -9,6 +9,9 @@ }, "last_updated": { "default": "mdi:file" + }, + "created": { + "default": "mdi:file" } } } diff --git a/homeassistant/components/filesize/sensor.py b/homeassistant/components/filesize/sensor.py index 71a4e50edfe..2eb170af99d 100644 --- a/homeassistant/components/filesize/sensor.py +++ b/homeassistant/components/filesize/sensor.py @@ -47,6 +47,13 @@ SENSOR_TYPES = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, ), + SensorEntityDescription( + key="created", + translation_key="created", + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + ), ) @@ -75,7 +82,6 @@ class FilesizeEntity(CoordinatorEntity[FileSizeCoordinator], SensorEntity): ) -> None: """Initialize the Filesize sensor.""" super().__init__(coordinator) - base_name = str(coordinator.path.absolute()).rsplit("/", maxsplit=1)[-1] self._attr_unique_id = ( entry_id if description.key == "file" else f"{entry_id}-{description.key}" ) @@ -83,7 +89,6 @@ class FilesizeEntity(CoordinatorEntity[FileSizeCoordinator], SensorEntity): self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, entry_id)}, - name=base_name, ) @property diff --git a/homeassistant/components/filesize/strings.json b/homeassistant/components/filesize/strings.json index 3323c3411b2..6623cf9c375 100644 --- a/homeassistant/components/filesize/strings.json +++ b/homeassistant/components/filesize/strings.json @@ -5,6 +5,11 @@ "data": { "file_path": "Path to file" } + }, + "reconfigure": { + "data": { + "file_path": "[%key:component::filesize::config::step::user::data::file_path%]" + } } }, "error": { @@ -12,7 +17,8 @@ "not_allowed": "Path is not allowed" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "title": "Filesize", @@ -26,6 +32,9 @@ }, "last_updated": { "name": "Last updated" + }, + "created": { + "name": "Created" } } } diff --git a/homeassistant/components/filter/icons.json b/homeassistant/components/filter/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/filter/icons.json +++ b/homeassistant/components/filter/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/fints/manifest.json b/homeassistant/components/fints/manifest.json index 063e612d35d..0a9c5389cd9 100644 --- a/homeassistant/components/fints/manifest.json +++ b/homeassistant/components/fints/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["fints", "mt_940", "sepaxml"], + "quality_scale": "legacy", "requirements": ["fints==3.1.0"] } diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 8a92850ad47..a1cd565153f 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -4,12 +4,12 @@ from __future__ import annotations from collections import namedtuple from datetime import timedelta -from functools import cached_property import logging from typing import Any from fints.client import FinTS3PinTanClient from fints.models import SEPAAccount +from propcache import cached_property import voluptuous as vol from homeassistant.components.sensor import ( @@ -89,7 +89,7 @@ def setup_platform( for account in balance_accounts: if config[CONF_ACCOUNTS] and account.iban not in account_config: - _LOGGER.info("Skipping account %s for bank %s", account.iban, fints_name) + _LOGGER.debug("Skipping account %s for bank %s", account.iban, fints_name) continue if not (account_name := account_config.get(account.iban)): @@ -99,7 +99,7 @@ def setup_platform( for account in holdings_accounts: if config[CONF_HOLDINGS] and account.accountnumber not in holdings_config: - _LOGGER.info( + _LOGGER.debug( "Skipping holdings %s for bank %s", account.accountnumber, fints_name ) continue diff --git a/homeassistant/components/fireservicerota/__init__.py b/homeassistant/components/fireservicerota/__init__.py index 9173a2b3392..aa303a08795 100644 --- a/homeassistant/components/fireservicerota/__init__.py +++ b/homeassistant/components/fireservicerota/__init__.py @@ -46,6 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="duty binary sensor", update_method=async_update_data, update_interval=MIN_TIME_BETWEEN_UPDATES, diff --git a/homeassistant/components/firmata/config_flow.py b/homeassistant/components/firmata/config_flow.py index 571df351b25..4c0f800fff4 100644 --- a/homeassistant/components/firmata/config_flow.py +++ b/homeassistant/components/firmata/config_flow.py @@ -19,9 +19,7 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a firmata board as a config entry. This flow is triggered by `async_setup` for configured boards. @@ -30,14 +28,14 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): config entry yet (based on entry_id). It validates a connection and then adds the entry. """ - name = f"serial-{import_config[CONF_SERIAL_PORT]}" - import_config[CONF_NAME] = name + name = f"serial-{import_data[CONF_SERIAL_PORT]}" + import_data[CONF_NAME] = name # Connect to the board to verify connection and then shutdown # If either fail then we cannot continue _LOGGER.debug("Connecting to Firmata board %s to test connection", name) try: - api = await get_board(import_config) + api = await get_board(import_data) await api.shutdown() except RuntimeError as err: _LOGGER.error("Error connecting to PyMata board %s: %s", name, err) @@ -54,6 +52,4 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="cannot_connect") _LOGGER.debug("Connection test to Firmata board %s successful", name) - return self.async_create_entry( - title=import_config[CONF_NAME], data=import_config - ) + return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) diff --git a/homeassistant/components/firmata/manifest.json b/homeassistant/components/firmata/manifest.json index a35b6f179ce..363b5bd60c6 100644 --- a/homeassistant/components/firmata/manifest.json +++ b/homeassistant/components/firmata/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/firmata", "iot_class": "local_push", "loggers": ["pymata_express"], + "quality_scale": "legacy", "requirements": ["pymata-express==1.19"] } diff --git a/homeassistant/components/fitbit/api.py b/homeassistant/components/fitbit/api.py index 1eed5acbcca..e5ae88c5420 100644 --- a/homeassistant/components/fitbit/api.py +++ b/homeassistant/components/fitbit/api.py @@ -156,8 +156,7 @@ class OAuthFitbitApi(FitbitApi): async def async_get_access_token(self) -> dict[str, Any]: """Return a valid access token for the Fitbit API.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token diff --git a/homeassistant/components/fitbit/config_flow.py b/homeassistant/components/fitbit/config_flow.py index 0ae1973b5fb..d5b33a731e3 100644 --- a/homeassistant/components/fitbit/config_flow.py +++ b/homeassistant/components/fitbit/config_flow.py @@ -4,7 +4,7 @@ from collections.abc import Mapping import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -22,8 +22,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -34,16 +32,13 @@ class OAuth2FlowHandler( """Extra data that needs to be appended to the authorize url.""" return { "scope": " ".join(OAUTH_SCOPES), - "prompt": "consent" if not self.reauth_entry else "none", + "prompt": "consent" if self.source != SOURCE_REAUTH else "none", } async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -82,17 +77,12 @@ class OAuth2FlowHandler( _LOGGER.error("Failed to fetch user profile for Fitbit API: %s", err) return self.async_abort(reason="cannot_connect") - if self.reauth_entry: - if self.reauth_entry.unique_id != profile.encoded_id: - return self.async_abort(reason="wrong_account") - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - await self.async_set_unique_id(profile.encoded_id) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) + self._abort_if_unique_id_configured() return self.async_create_entry(title=profile.display_name, data=data) - - async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: - """Handle import from YAML.""" - return await self.async_oauth_create_entry(data) diff --git a/homeassistant/components/fitbit/quality_scale.yaml b/homeassistant/components/fitbit/quality_scale.yaml new file mode 100644 index 00000000000..abf127cdb98 --- /dev/null +++ b/homeassistant/components/fitbit/quality_scale.yaml @@ -0,0 +1,70 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: The integration has no actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: done + dependency-transparency: todo + docs-actions: + status: exempt + comment: There are no actions in Fitbit integration. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: Fitbit is a polling integration that does use async events. + entity-unique-id: done + has-entity-name: done + runtime-data: + status: todo + comment: | + The integration uses `hass.data` for data associated with a configuration + entry and needs to be updated to use `runtime_data`. + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: todo + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: todo + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: todo + diagnostics: todo + discovery: todo + discovery-update-info: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/fitbit/sensor.py b/homeassistant/components/fitbit/sensor.py index ab9a593e195..d58dad4ca67 100644 --- a/homeassistant/components/fitbit/sensor.py +++ b/homeassistant/components/fitbit/sensor.py @@ -6,30 +6,16 @@ from collections.abc import Callable from dataclasses import dataclass import datetime import logging -import os from typing import Any, Final, cast -from fitbit import Fitbit -from oauthlib.oauth2.rfc6749.errors import OAuth2Error -import voluptuous as vol - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_TOKEN, - CONF_UNIT_SYSTEM, PERCENTAGE, EntityCategory, UnitOfLength, @@ -38,33 +24,13 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResultType -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.icon import icon_for_battery_level -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from homeassistant.util.json import load_json_object from .api import FitbitApi -from .const import ( - ATTR_ACCESS_TOKEN, - ATTR_LAST_SAVED_AT, - ATTR_REFRESH_TOKEN, - ATTRIBUTION, - BATTERY_LEVELS, - CONF_CLOCK_FORMAT, - CONF_MONITORED_RESOURCES, - DEFAULT_CLOCK_FORMAT, - DEFAULT_CONFIG, - DOMAIN, - FITBIT_CONFIG_FILE, - FITBIT_DEFAULT_RESOURCES, - FitbitScope, - FitbitUnitSystem, -) +from .const import ATTRIBUTION, BATTERY_LEVELS, DOMAIN, FitbitScope, FitbitUnitSystem from .coordinator import FitbitData, FitbitDeviceCoordinator from .exceptions import FitbitApiException, FitbitAuthException from .model import FitbitDevice, config_from_entry_data @@ -75,6 +41,8 @@ _CONFIGURING: dict[str, str] = {} SCAN_INTERVAL: Final = datetime.timedelta(minutes=30) +FITBIT_TRACKER_SUBSTRING = "/tracker/" + def _default_value_fn(result: dict[str, Any]) -> str: """Parse a Fitbit timeseries API responses.""" @@ -156,11 +124,34 @@ class FitbitSensorEntityDescription(SensorEntityDescription): unit_fn: Callable[[FitbitUnitSystem], str | None] = lambda x: None scope: FitbitScope | None = None + @property + def is_tracker(self) -> bool: + """Return if the entity is a tracker.""" + return FITBIT_TRACKER_SUBSTRING in self.key + + +def _build_device_info( + config_entry: ConfigEntry, entity_description: FitbitSensorEntityDescription +) -> DeviceInfo: + """Build device info for sensor entities info across devices.""" + unique_id = cast(str, config_entry.unique_id) + if entity_description.is_tracker: + return DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, f"{unique_id}_tracker")}, + translation_key="tracker", + translation_placeholders={"display_name": config_entry.title}, + ) + return DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, unique_id)}, + ) + FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( FitbitSensorEntityDescription( key="activities/activityCalories", - name="Activity Calories", + translation_key="activity_calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -169,7 +160,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/calories", - name="Calories", + translation_key="calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -177,7 +168,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/caloriesBMR", - name="Calories BMR", + translation_key="calories_bmr", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -187,7 +178,6 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/distance", - name="Distance", icon="mdi:map-marker", device_class=SensorDeviceClass.DISTANCE, value_fn=_distance_value_fn, @@ -197,7 +187,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/elevation", - name="Elevation", + translation_key="elevation", icon="mdi:walk", device_class=SensorDeviceClass.DISTANCE, unit_fn=_elevation_unit, @@ -207,7 +197,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/floors", - name="Floors", + translation_key="floors", native_unit_of_measurement="floors", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -216,7 +206,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/heart", - name="Resting Heart Rate", + translation_key="resting_heart_rate", native_unit_of_measurement="bpm", icon="mdi:heart-pulse", value_fn=_int_value_or_none("restingHeartRate"), @@ -225,7 +215,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesFairlyActive", - name="Minutes Fairly Active", + translation_key="minutes_fairly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -235,7 +225,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesLightlyActive", - name="Minutes Lightly Active", + translation_key="minutes_lightly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -245,7 +235,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesSedentary", - name="Minutes Sedentary", + translation_key="minutes_sedentary", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:seat-recline-normal", device_class=SensorDeviceClass.DURATION, @@ -255,7 +245,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesVeryActive", - name="Minutes Very Active", + translation_key="minutes_very_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:run", device_class=SensorDeviceClass.DURATION, @@ -265,7 +255,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/steps", - name="Steps", + translation_key="steps", native_unit_of_measurement="steps", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -273,7 +263,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/activityCalories", - name="Tracker Activity Calories", + translation_key="activity_calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -283,7 +273,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/calories", - name="Tracker Calories", + translation_key="calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -293,7 +283,6 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/distance", - name="Tracker Distance", icon="mdi:map-marker", device_class=SensorDeviceClass.DISTANCE, value_fn=_distance_value_fn, @@ -305,7 +294,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/elevation", - name="Tracker Elevation", + translation_key="elevation", icon="mdi:walk", device_class=SensorDeviceClass.DISTANCE, unit_fn=_elevation_unit, @@ -316,7 +305,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/floors", - name="Tracker Floors", + translation_key="floors", native_unit_of_measurement="floors", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -326,7 +315,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesFairlyActive", - name="Tracker Minutes Fairly Active", + translation_key="minutes_fairly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -337,7 +326,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesLightlyActive", - name="Tracker Minutes Lightly Active", + translation_key="minutes_lightly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -348,7 +337,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesSedentary", - name="Tracker Minutes Sedentary", + translation_key="minutes_sedentary", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:seat-recline-normal", device_class=SensorDeviceClass.DURATION, @@ -359,7 +348,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesVeryActive", - name="Tracker Minutes Very Active", + translation_key="minutes_very_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:run", device_class=SensorDeviceClass.DURATION, @@ -370,7 +359,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/steps", - name="Tracker Steps", + translation_key="steps", native_unit_of_measurement="steps", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -380,7 +369,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="body/bmi", - name="BMI", + translation_key="bmi", native_unit_of_measurement="BMI", icon="mdi:human", state_class=SensorStateClass.MEASUREMENT, @@ -391,7 +380,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="body/fat", - name="Body Fat", + translation_key="body_fat", native_unit_of_measurement=PERCENTAGE, icon="mdi:human", state_class=SensorStateClass.MEASUREMENT, @@ -402,7 +391,6 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="body/weight", - name="Weight", icon="mdi:human", state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.WEIGHT, @@ -412,7 +400,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/awakeningsCount", - name="Awakenings Count", + translation_key="awakenings_count", native_unit_of_measurement="times awaken", icon="mdi:sleep", scope=FitbitScope.SLEEP, @@ -421,7 +409,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/efficiency", - name="Sleep Efficiency", + translation_key="sleep_efficiency", native_unit_of_measurement=PERCENTAGE, icon="mdi:sleep", state_class=SensorStateClass.MEASUREMENT, @@ -430,7 +418,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesAfterWakeup", - name="Minutes After Wakeup", + translation_key="minutes_after_wakeup", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -440,7 +428,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesAsleep", - name="Sleep Minutes Asleep", + translation_key="sleep_minutes_asleep", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -450,7 +438,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesAwake", - name="Sleep Minutes Awake", + translation_key="sleep_minutes_awake", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -460,7 +448,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesToFallAsleep", - name="Sleep Minutes to Fall Asleep", + translation_key="sleep_minutes_to_fall_asleep", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -470,7 +458,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/timeInBed", - name="Sleep Time in Bed", + translation_key="sleep_time_in_bed", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:hotel", device_class=SensorDeviceClass.DURATION, @@ -480,7 +468,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="foods/log/caloriesIn", - name="Calories In", + translation_key="calories_in", native_unit_of_measurement="cal", icon="mdi:food-apple", state_class=SensorStateClass.TOTAL_INCREASING, @@ -489,7 +477,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="foods/log/water", - name="Water", + translation_key="water", icon="mdi:cup-water", unit_fn=_water_unit, state_class=SensorStateClass.TOTAL_INCREASING, @@ -501,14 +489,14 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( # Different description depending on clock format SLEEP_START_TIME = FitbitSensorEntityDescription( key="sleep/startTime", - name="Sleep Start Time", + translation_key="sleep_start_time", icon="mdi:clock", scope=FitbitScope.SLEEP, entity_category=EntityCategory.DIAGNOSTIC, ) SLEEP_START_TIME_12HR = FitbitSensorEntityDescription( key="sleep/startTime", - name="Sleep Start Time", + translation_key="sleep_start_time", icon="mdi:clock", value_fn=_clock_format_12h, scope=FitbitScope.SLEEP, @@ -533,126 +521,6 @@ FITBIT_RESOURCE_BATTERY_LEVEL = FitbitSensorEntityDescription( native_unit_of_measurement=PERCENTAGE, ) -FITBIT_RESOURCES_KEYS: Final[list[str]] = [ - desc.key - for desc in (*FITBIT_RESOURCES_LIST, FITBIT_RESOURCE_BATTERY, SLEEP_START_TIME) -] - -PLATFORM_SCHEMA: Final = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional( - CONF_MONITORED_RESOURCES, default=FITBIT_DEFAULT_RESOURCES - ): vol.All(cv.ensure_list, [vol.In(FITBIT_RESOURCES_KEYS)]), - vol.Optional(CONF_CLOCK_FORMAT, default=DEFAULT_CLOCK_FORMAT): vol.In( - ["12H", "24H"] - ), - vol.Optional(CONF_UNIT_SYSTEM, default=FitbitUnitSystem.LEGACY_DEFAULT): vol.In( - [ - FitbitUnitSystem.EN_GB, - FitbitUnitSystem.EN_US, - FitbitUnitSystem.METRIC, - FitbitUnitSystem.LEGACY_DEFAULT, - ] - ), - } -) - -# Only import configuration if it was previously created successfully with all -# of the following fields. -FITBIT_CONF_KEYS = [ - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - ATTR_ACCESS_TOKEN, - ATTR_REFRESH_TOKEN, - ATTR_LAST_SAVED_AT, -] - - -def load_config_file(config_path: str) -> dict[str, Any] | None: - """Load existing valid fitbit.conf from disk for import.""" - if os.path.isfile(config_path): - config_file = load_json_object(config_path) - if config_file != DEFAULT_CONFIG and all( - key in config_file for key in FITBIT_CONF_KEYS - ): - return config_file - return None - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the Fitbit sensor.""" - config_path = hass.config.path(FITBIT_CONFIG_FILE) - config_file = await hass.async_add_executor_job(load_config_file, config_path) - _LOGGER.debug("loaded config file: %s", config_file) - - if config_file is not None: - _LOGGER.debug("Importing existing fitbit.conf application credentials") - - # Refresh the token before importing to ensure it is working and not - # expired on first initialization. - authd_client = Fitbit( - config_file[CONF_CLIENT_ID], - config_file[CONF_CLIENT_SECRET], - access_token=config_file[ATTR_ACCESS_TOKEN], - refresh_token=config_file[ATTR_REFRESH_TOKEN], - expires_at=config_file[ATTR_LAST_SAVED_AT], - refresh_cb=lambda x: None, - ) - try: - updated_token = await hass.async_add_executor_job( - authd_client.client.refresh_token - ) - except OAuth2Error as err: - _LOGGER.debug("Unable to import fitbit OAuth2 credentials: %s", err) - translation_key = "deprecated_yaml_import_issue_cannot_connect" - else: - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential( - config_file[CONF_CLIENT_ID], config_file[CONF_CLIENT_SECRET] - ), - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - "auth_implementation": DOMAIN, - CONF_TOKEN: { - ATTR_ACCESS_TOKEN: updated_token[ATTR_ACCESS_TOKEN], - ATTR_REFRESH_TOKEN: updated_token[ATTR_REFRESH_TOKEN], - "expires_at": updated_token["expires_at"], - "scope": " ".join(updated_token.get("scope", [])), - }, - CONF_CLOCK_FORMAT: config[CONF_CLOCK_FORMAT], - CONF_UNIT_SYSTEM: config[CONF_UNIT_SYSTEM], - CONF_MONITORED_RESOURCES: config[CONF_MONITORED_RESOURCES], - }, - ) - translation_key = "deprecated_yaml_import" - if ( - result.get("type") == FlowResultType.ABORT - and result.get("reason") == "cannot_connect" - ): - translation_key = "deprecated_yaml_import_issue_cannot_connect" - else: - translation_key = "deprecated_yaml_no_import" - - async_create_issue( - hass, - DOMAIN, - "deprecated_yaml", - breaks_in_ha_version="2024.5.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key=translation_key, - ) - async def async_setup_entry( hass: HomeAssistant, @@ -694,6 +562,7 @@ async def async_setup_entry( description, units=description.unit_fn(unit_system), enable_default_override=is_explicit_enable(description), + device_info=_build_device_info(entry, description), ) for description in resource_list if is_allowed_resource(description) @@ -728,6 +597,7 @@ class FitbitSensor(SensorEntity): entity_description: FitbitSensorEntityDescription _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True def __init__( self, @@ -737,6 +607,7 @@ class FitbitSensor(SensorEntity): description: FitbitSensorEntityDescription, units: str | None, enable_default_override: bool, + device_info: DeviceInfo, ) -> None: """Initialize the Fitbit sensor.""" self.config_entry = config_entry @@ -744,6 +615,7 @@ class FitbitSensor(SensorEntity): self.api = api self._attr_unique_id = f"{user_profile_id}_{description.key}" + self._attr_device_info = device_info if units is not None: self._attr_native_unit_of_measurement = units diff --git a/homeassistant/components/fitbit/strings.json b/homeassistant/components/fitbit/strings.json index e1ca1b01f7a..9029a8265bb 100644 --- a/homeassistant/components/fitbit/strings.json +++ b/homeassistant/components/fitbit/strings.json @@ -38,21 +38,82 @@ }, "battery_level": { "name": "Battery level" + }, + "activity_calories": { + "name": "Activity calories" + }, + "calories": { + "name": "Calories" + }, + "calories_bmr": { + "name": "Calories BMR" + }, + "elevation": { + "name": "Elevation" + }, + "floors": { + "name": "Floors" + }, + "resting_heart_rate": { + "name": "Resting heart rate" + }, + "minutes_fairly_active": { + "name": "Minutes fairly active" + }, + "minutes_lightly_active": { + "name": "Minutes lightly active" + }, + "minutes_sedentary": { + "name": "Minutes sedentary" + }, + "minutes_very_active": { + "name": "Minutes very active" + }, + "sleep_start_time": { + "name": "Sleep start time" + }, + "steps": { + "name": "Steps" + }, + "bmi": { + "name": "BMI" + }, + "body_fat": { + "name": "Body fat" + }, + "awakenings_count": { + "name": "Awakenings count" + }, + "sleep_efficiency": { + "name": "Sleep efficiency" + }, + "minutes_after_wakeup": { + "name": "Minutes after wakeup" + }, + "sleep_minutes_asleep": { + "name": "Sleep minutes asleep" + }, + "sleep_minutes_awake": { + "name": "Sleep minutes awake" + }, + "sleep_minutes_to_fall_asleep": { + "name": "Sleep minutes to fall asleep" + }, + "sleep_time_in_bed": { + "name": "Sleep time in bed" + }, + "calories_in": { + "name": "Calories in" + }, + "water": { + "name": "Water" } } }, - "issues": { - "deprecated_yaml_no_import": { - "title": "Fitbit YAML configuration is being removed", - "description": "Configuring Fitbit using YAML is being removed.\n\nRemove the `fitbit` configuration from your configuration.yaml file and remove fitbit.conf if it exists and restart Home Assistant and [set up the integration](/config/integrations/dashboard/add?domain=fitbit) manually." - }, - "deprecated_yaml_import": { - "title": "Fitbit YAML configuration is being removed", - "description": "Configuring Fitbit using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically, including OAuth Application Credentials.\n\nRemove the `fitbit` configuration from your configuration.yaml file and remove fitbit.conf and restart Home Assistant to fix this issue." - }, - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Fitbit YAML configuration import failed", - "description": "Configuring Fitbit using YAML is being removed but there was a connection error importing your YAML configuration.\n\nRestart Home Assistant to try again or remove the Fitbit YAML configuration from your configuration.yaml file and remove the fitbit.conf and continue to [set up the integration](/config/integrations/dashboard/add?domain=fitbit) manually." + + "device": { + "tracker": { + "name": "{display_name} tracker" } } } diff --git a/homeassistant/components/fivem/strings.json b/homeassistant/components/fivem/strings.json index abdef61fb28..fd58922a481 100644 --- a/homeassistant/components/fivem/strings.json +++ b/homeassistant/components/fivem/strings.json @@ -15,7 +15,7 @@ "error": { "cannot_connect": "Failed to connect. Please check the host and port and try again. Also ensure that you are running the latest FiveM server.", "invalid_game_name": "The api of the game you are trying to connect to is not a FiveM game.", - "unknown_error": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" diff --git a/homeassistant/components/fixer/manifest.json b/homeassistant/components/fixer/manifest.json index 052a594b745..3c457919ac3 100644 --- a/homeassistant/components/fixer/manifest.json +++ b/homeassistant/components/fixer/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fixer", "iot_class": "cloud_polling", "loggers": ["fixerio"], + "quality_scale": "legacy", "requirements": ["fixerio==1.0.0a0"] } diff --git a/homeassistant/components/fjaraskupan/coordinator.py b/homeassistant/components/fjaraskupan/coordinator.py index 22811ce534b..90b2c617239 100644 --- a/homeassistant/components/fjaraskupan/coordinator.py +++ b/homeassistant/components/fjaraskupan/coordinator.py @@ -3,11 +3,18 @@ from __future__ import annotations from collections.abc import AsyncIterator -from contextlib import asynccontextmanager +from contextlib import asynccontextmanager, contextmanager from datetime import timedelta import logging -from fjaraskupan import Device, State +from fjaraskupan import ( + Device, + FjaraskupanConnectionError, + FjaraskupanError, + FjaraskupanReadError, + FjaraskupanWriteError, + State, +) from homeassistant.components.bluetooth import ( BluetoothServiceInfoBleak, @@ -19,9 +26,37 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from .const import DOMAIN + _LOGGER = logging.getLogger(__name__) +@contextmanager +def exception_converter(): + """Convert exception so home assistant translated ones.""" + + try: + yield + except FjaraskupanWriteError as exception: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="write_error" + ) from exception + except FjaraskupanReadError as exception: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="read_error" + ) from exception + except FjaraskupanConnectionError as exception: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="connection_error" + ) from exception + except FjaraskupanError as exception: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unexpected_error", + translation_placeholders={"msg": str(exception)}, + ) from exception + + class UnableToConnect(HomeAssistantError): """Exception to indicate that we cannot connect to device.""" @@ -71,8 +106,11 @@ class FjaraskupanCoordinator(DataUpdateCoordinator[State]): ) ) is None: raise UpdateFailed("No connectable path to device") - async with self.device.connect(ble_device) as device: - await device.update() + + with exception_converter(): + async with self.device.connect(ble_device) as device: + await device.update() + return self.device.state def detection_callback(self, service_info: BluetoothServiceInfoBleak) -> None: @@ -90,7 +128,8 @@ class FjaraskupanCoordinator(DataUpdateCoordinator[State]): ) is None: raise UnableToConnect("No connectable path to device") - async with self.device.connect(ble_device) as device: - yield device + with exception_converter(): + async with self.device.connect(ble_device) as device: + yield device self.async_set_updated_data(self.device.state) diff --git a/homeassistant/components/fjaraskupan/fan.py b/homeassistant/components/fjaraskupan/fan.py index 864160cb464..540a7dd410d 100644 --- a/homeassistant/components/fjaraskupan/fan.py +++ b/homeassistant/components/fjaraskupan/fan.py @@ -71,7 +71,7 @@ class Fan(CoordinatorEntity[FjaraskupanCoordinator], FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False + _attr_has_entity_name = True _attr_name = None diff --git a/homeassistant/components/fjaraskupan/strings.json b/homeassistant/components/fjaraskupan/strings.json index d91cc47dea1..024152a0a00 100644 --- a/homeassistant/components/fjaraskupan/strings.json +++ b/homeassistant/components/fjaraskupan/strings.json @@ -24,5 +24,19 @@ "name": "Periodic venting" } } + }, + "exceptions": { + "write_error": { + "message": "Failed to write data to device" + }, + "read_error": { + "message": "Failed to read data from device" + }, + "connection_error": { + "message": "Failed to connect to device" + }, + "unexpected_error": { + "message": "Unexpected error occurred: {msg}" + } } } diff --git a/homeassistant/components/fleetgo/manifest.json b/homeassistant/components/fleetgo/manifest.json index 9e916bd7fcd..ad00ca3b7b1 100644 --- a/homeassistant/components/fleetgo/manifest.json +++ b/homeassistant/components/fleetgo/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fleetgo", "iot_class": "cloud_polling", "loggers": ["geopy", "ritassist"], + "quality_scale": "legacy", "requirements": ["ritassist==0.9.2"] } diff --git a/homeassistant/components/flexit/climate.py b/homeassistant/components/flexit/climate.py index d456fbef6fc..8be5df4eca7 100644 --- a/homeassistant/components/flexit/climate.py +++ b/homeassistant/components/flexit/climate.py @@ -70,7 +70,6 @@ class Flexit(ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, hub: ModbusHub, modbus_slave: int | None, name: str | None diff --git a/homeassistant/components/flexit/manifest.json b/homeassistant/components/flexit/manifest.json index 98e5a3734a8..b3b66fb871e 100644 --- a/homeassistant/components/flexit/manifest.json +++ b/homeassistant/components/flexit/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["modbus"], "documentation": "https://www.home-assistant.io/integrations/flexit", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/flexit_bacnet/climate.py b/homeassistant/components/flexit_bacnet/climate.py index 0526a0d6bd3..a2291dea9d6 100644 --- a/homeassistant/components/flexit_bacnet/climate.py +++ b/homeassistant/components/flexit_bacnet/climate.py @@ -74,7 +74,6 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_max_temp = MAX_TEMP _attr_min_temp = MIN_TEMP - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: FlexitCoordinator) -> None: """Initialize the Flexit unit.""" diff --git a/homeassistant/components/flexit_bacnet/entity.py b/homeassistant/components/flexit_bacnet/entity.py index bd92550db19..38efa838c93 100644 --- a/homeassistant/components/flexit_bacnet/entity.py +++ b/homeassistant/components/flexit_bacnet/entity.py @@ -26,6 +26,7 @@ class FlexitEntity(CoordinatorEntity[FlexitCoordinator]): name=coordinator.device.device_name, manufacturer="Flexit", model="Nordic", + model_id=coordinator.device.model, serial_number=coordinator.device.serial_number, ) diff --git a/homeassistant/components/flexit_bacnet/number.py b/homeassistant/components/flexit_bacnet/number.py index 6e6e2eea980..029ce896445 100644 --- a/homeassistant/components/flexit_bacnet/number.py +++ b/homeassistant/components/flexit_bacnet/number.py @@ -29,6 +29,8 @@ class FlexitNumberEntityDescription(NumberEntityDescription): """Describes a Flexit number entity.""" native_value_fn: Callable[[FlexitBACnet], float] + native_max_value_fn: Callable[[FlexitBACnet], int] + native_min_value_fn: Callable[[FlexitBACnet], int] set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]] @@ -37,121 +39,121 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( key="away_extract_fan_setpoint", translation_key="away_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="away_supply_fan_setpoint", translation_key="away_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_extract_fan_setpoint", translation_key="cooker_hood_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_supply_fan_setpoint", translation_key="cooker_hood_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_extract_fan_setpoint", translation_key="fireplace_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_supply_fan_setpoint", translation_key="fireplace_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="high_extract_fan_setpoint", translation_key="high_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), ), FlexitNumberEntityDescription( key="high_supply_fan_setpoint", translation_key="high_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), ), FlexitNumberEntityDescription( key="home_extract_fan_setpoint", translation_key="home_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away), ), FlexitNumberEntityDescription( key="home_supply_fan_setpoint", translation_key="home_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away), ), ) @@ -192,6 +194,16 @@ class FlexitNumber(FlexitEntity, NumberEntity): """Return the state of the number.""" return self.entity_description.native_value_fn(self.coordinator.device) + @property + def native_max_value(self) -> float: + """Return the native max value of the number.""" + return self.entity_description.native_max_value_fn(self.coordinator.device) + + @property + def native_min_value(self) -> float: + """Return the native min value of the number.""" + return self.entity_description.native_min_value_fn(self.coordinator.device) + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" set_native_value_fn = self.entity_description.set_native_value_fn( diff --git a/homeassistant/components/flexit_bacnet/sensor.py b/homeassistant/components/flexit_bacnet/sensor.py index 2453acb90be..be5f12e480e 100644 --- a/homeassistant/components/flexit_bacnet/sensor.py +++ b/homeassistant/components/flexit_bacnet/sensor.py @@ -10,7 +10,6 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, SensorStateClass, - StateType, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -22,6 +21,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from . import FlexitCoordinator from .const import DOMAIN diff --git a/homeassistant/components/flic/binary_sensor.py b/homeassistant/components/flic/binary_sensor.py index fcfe4b6604f..cd160480674 100644 --- a/homeassistant/components/flic/binary_sensor.py +++ b/homeassistant/components/flic/binary_sensor.py @@ -108,7 +108,7 @@ def start_scanning(config, add_entities, client): def scan_completed_callback(scan_wizard, result, address, name): """Restart scan wizard to constantly check for new buttons.""" if result == pyflic.ScanWizardResult.WizardSuccess: - _LOGGER.info("Found new button %s", address) + _LOGGER.debug("Found new button %s", address) elif result != pyflic.ScanWizardResult.WizardFailedTimeout: _LOGGER.warning( "Failed to connect to button %s. Reason: %s", address, result @@ -132,7 +132,7 @@ def setup_button( timeout: int = config[CONF_TIMEOUT] ignored_click_types: list[str] | None = config.get(CONF_IGNORED_CLICK_TYPES) button = FlicButton(hass, client, address, timeout, ignored_click_types) - _LOGGER.info("Connected to button %s", address) + _LOGGER.debug("Connected to button %s", address) add_entities([button]) @@ -203,7 +203,7 @@ class FlicButton(BinarySensorEntity): time_string, ) return True - _LOGGER.info( + _LOGGER.debug( "Queued %s allowed for %s. Time in queue was %s", click_type, self._address, diff --git a/homeassistant/components/flic/manifest.json b/homeassistant/components/flic/manifest.json index 0442e4a7b7b..67a9a2e901c 100644 --- a/homeassistant/components/flic/manifest.json +++ b/homeassistant/components/flic/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/flic", "iot_class": "local_push", "loggers": ["pyflic"], + "quality_scale": "legacy", "requirements": ["pyflic==2.0.4"] } diff --git a/homeassistant/components/flipr/__init__.py b/homeassistant/components/flipr/__init__.py index 28515dd386f..99bddb5a0d0 100644 --- a/homeassistant/components/flipr/__init__.py +++ b/homeassistant/components/flipr/__init__.py @@ -1,22 +1,66 @@ """The Flipr integration.""" +from collections import Counter +from dataclasses import dataclass +import logging + +from flipr_api import FliprAPIRestClient + from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers import issue_registry as ir from .const import DOMAIN -from .coordinator import FliprDataUpdateCoordinator +from .coordinator import FliprDataUpdateCoordinator, FliprHubDataUpdateCoordinator -PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.SELECT, Platform.SENSOR, Platform.SWITCH] + +_LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Flipr from a config entry.""" - hass.data.setdefault(DOMAIN, {}) +@dataclass +class FliprData: + """The Flipr data class.""" - coordinator = FliprDataUpdateCoordinator(hass, entry) - await coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = coordinator + flipr_coordinators: list[FliprDataUpdateCoordinator] + hub_coordinators: list[FliprHubDataUpdateCoordinator] + + +type FliprConfigEntry = ConfigEntry[FliprData] + + +async def async_setup_entry(hass: HomeAssistant, entry: FliprConfigEntry) -> bool: + """Set up flipr from a config entry.""" + + # Detect invalid old config entry and raise error if found + detect_invalid_old_configuration(hass, entry) + + config = entry.data + + username = config[CONF_EMAIL] + password = config[CONF_PASSWORD] + + _LOGGER.debug("Initializing Flipr client %s", username) + client = FliprAPIRestClient(username, password) + ids = await hass.async_add_executor_job(client.search_all_ids) + + _LOGGER.debug("List of devices ids : %s", ids) + + flipr_coordinators = [] + for flipr_id in ids["flipr"]: + flipr_coordinator = FliprDataUpdateCoordinator(hass, client, flipr_id) + await flipr_coordinator.async_config_entry_first_refresh() + flipr_coordinators.append(flipr_coordinator) + + hub_coordinators = [] + for hub_id in ids["hub"]: + hub_coordinator = FliprHubDataUpdateCoordinator(hass, client, hub_id) + await hub_coordinator.async_config_entry_first_refresh() + hub_coordinators.append(hub_coordinator) + + entry.runtime_data = FliprData(flipr_coordinators, hub_coordinators) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -25,9 +69,49 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - return unload_ok + +def detect_invalid_old_configuration(hass: HomeAssistant, entry: ConfigEntry): + """Detect invalid old configuration and raise error if found.""" + + def find_duplicate_entries(entries): + values = [e.data["email"] for e in entries] + _LOGGER.debug("Detecting duplicates in values : %s", values) + return any(count > 1 for count in Counter(values).values()) + + entries = hass.config_entries.async_entries(DOMAIN) + + if find_duplicate_entries(entries): + ir.async_create_issue( + hass, + DOMAIN, + "duplicate_config", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="duplicate_config", + ) + + raise ConfigEntryError( + "Duplicate entries found for flipr with the same user email. Please remove one of it manually. Multiple fliprs will be automatically detected after restart." + ) + + +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate config entry.""" + _LOGGER.debug("Migration of flipr config from version %s", entry.version) + + if entry.version == 1: + # In version 1, we have flipr device as config entry unique id + # and one device per config entry. + # We need to migrate to a new config entry that may contain multiple devices. + # So we change the entry data to match config_flow evolution. + login = entry.data[CONF_EMAIL] + + hass.config_entries.async_update_entry(entry, version=2, unique_id=login) + + _LOGGER.debug("Migration of flipr config to version 2 successful") + + return True diff --git a/homeassistant/components/flipr/binary_sensor.py b/homeassistant/components/flipr/binary_sensor.py index a3c3e4dc8a1..cc6a9d36abc 100644 --- a/homeassistant/components/flipr/binary_sensor.py +++ b/homeassistant/components/flipr/binary_sensor.py @@ -7,11 +7,10 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FliprConfigEntry from .entity import FliprEntity BINARY_SENSORS_TYPES: tuple[BinarySensorEntityDescription, ...] = ( @@ -30,15 +29,17 @@ BINARY_SENSORS_TYPES: tuple[BinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FliprConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Defer sensor setup of flipr binary sensors.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + + coordinators = config_entry.runtime_data.flipr_coordinators async_add_entities( FliprBinarySensor(coordinator, description) for description in BINARY_SENSORS_TYPES + for coordinator in coordinators ) diff --git a/homeassistant/components/flipr/config_flow.py b/homeassistant/components/flipr/config_flow.py index 3d616feb37f..287c7108b3f 100644 --- a/homeassistant/components/flipr/config_flow.py +++ b/homeassistant/components/flipr/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from flipr_api import FliprAPIRestClient from requests.exceptions import HTTPError, Timeout @@ -11,35 +12,37 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from .const import CONF_FLIPR_ID, DOMAIN +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, + } +) + class FliprConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Flipr.""" - VERSION = 1 - - _username: str - _password: str - _flipr_id: str = "" - _possible_flipr_ids: list[str] + VERSION = 2 async def async_step_user( - self, user_input: dict[str, str] | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if user_input is None: - return self._show_setup_form() - self._username = user_input[CONF_EMAIL] - self._password = user_input[CONF_PASSWORD] + errors: dict[str, str] = {} + + if user_input is not None: + client = FliprAPIRestClient( + user_input[CONF_EMAIL], user_input[CONF_PASSWORD] + ) - errors = {} - if not self._flipr_id: try: - flipr_ids = await self._authenticate_and_search_flipr() + ids = await self.hass.async_add_executor_job(client.search_all_ids) except HTTPError: errors["base"] = "invalid_auth" except (Timeout, ConnectionError): @@ -48,79 +51,25 @@ class FliprConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" _LOGGER.exception("Unexpected exception") - if not errors and not flipr_ids: - # No flipr_id found. Tell the user with an error message. + else: + _LOGGER.debug("Found flipr or hub ids : %s", ids) + + if len(ids["flipr"]) > 0 or len(ids["hub"]) > 0: + # If there is a flipr or hub, we can create a config entry. + + await self.async_set_unique_id(user_input[CONF_EMAIL]) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=f"Flipr {user_input[CONF_EMAIL]}", + data=user_input, + ) + + # if no flipr or hub found. Tell the user with an error message. errors["base"] = "no_flipr_id_found" - if errors: - return self._show_setup_form(errors) - - if len(flipr_ids) == 1: - self._flipr_id = flipr_ids[0] - else: - # If multiple flipr found (rare case), we ask the user to choose one in a select box. - # The user will have to run config_flow as many times as many fliprs he has. - self._possible_flipr_ids = flipr_ids - return await self.async_step_flipr_id() - - # Check if already configured - await self.async_set_unique_id(self._flipr_id) - self._abort_if_unique_id_configured() - - return self.async_create_entry( - title=self._flipr_id, - data={ - CONF_EMAIL: self._username, - CONF_PASSWORD: self._password, - CONF_FLIPR_ID: self._flipr_id, - }, - ) - - def _show_setup_form(self, errors=None): - """Show the setup form to the user.""" return self.async_show_form( step_id="user", - data_schema=vol.Schema( - {vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str} - ), + data_schema=DATA_SCHEMA, errors=errors, ) - - async def _authenticate_and_search_flipr(self) -> list[str]: - """Validate the username and password provided and searches for a flipr id.""" - # Instantiates the flipr API that does not require async since it is has no network access. - client = FliprAPIRestClient(self._username, self._password) - - return await self.hass.async_add_executor_job(client.search_flipr_ids) - - async def async_step_flipr_id( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: - """Handle the initial step.""" - if not user_input: - # Creation of a select with the proposal of flipr ids values found by API. - flipr_ids_for_form = {} - for flipr_id in self._possible_flipr_ids: - flipr_ids_for_form[flipr_id] = f"{flipr_id}" - - return self.async_show_form( - step_id="flipr_id", - data_schema=vol.Schema( - { - vol.Required(CONF_FLIPR_ID): vol.All( - vol.Coerce(str), vol.In(flipr_ids_for_form) - ) - } - ), - ) - - # Get chosen flipr_id. - self._flipr_id = user_input[CONF_FLIPR_ID] - - return await self.async_step_user( - { - CONF_EMAIL: self._username, - CONF_PASSWORD: self._password, - CONF_FLIPR_ID: self._flipr_id, - } - ) diff --git a/homeassistant/components/flipr/const.py b/homeassistant/components/flipr/const.py index d28353f4776..256426ae97a 100644 --- a/homeassistant/components/flipr/const.py +++ b/homeassistant/components/flipr/const.py @@ -2,8 +2,6 @@ DOMAIN = "flipr" -CONF_FLIPR_ID = "flipr_id" - ATTRIBUTION = "Flipr Data" MANUFACTURER = "CTAC-TECH" diff --git a/homeassistant/components/flipr/coordinator.py b/homeassistant/components/flipr/coordinator.py index afc7465498f..12fd174fe7d 100644 --- a/homeassistant/components/flipr/coordinator.py +++ b/homeassistant/components/flipr/coordinator.py @@ -2,43 +2,61 @@ from datetime import timedelta import logging +from typing import Any from flipr_api import FliprAPIRestClient from flipr_api.exceptions import FliprError -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_FLIPR_ID - _LOGGER = logging.getLogger(__name__) -class FliprDataUpdateCoordinator(DataUpdateCoordinator): - """Class to hold Flipr data retrieval.""" +class BaseDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): + """Parent class to hold Flipr and Hub data retrieval.""" - def __init__(self, hass, entry): + config_entry: ConfigEntry + + def __init__( + self, hass: HomeAssistant, client: FliprAPIRestClient, flipr_or_hub_id: str + ) -> None: """Initialize.""" - username = entry.data[CONF_EMAIL] - password = entry.data[CONF_PASSWORD] - self.flipr_id = entry.data[CONF_FLIPR_ID] - - # Establishes the connection. - self.client = FliprAPIRestClient(username, password) - self.entry = entry + self.device_id = flipr_or_hub_id + self.client = client super().__init__( hass, _LOGGER, - name=f"Flipr data measure for {self.flipr_id}", - update_interval=timedelta(minutes=60), + name=f"Flipr or Hub data measure for {self.device_id}", + update_interval=timedelta(minutes=15), ) - async def _async_update_data(self): + +class FliprDataUpdateCoordinator(BaseDataUpdateCoordinator[dict[str, Any]]): + """Class to hold Flipr data retrieval.""" + + async def _async_update_data(self) -> dict[str, Any]: """Fetch data from API endpoint.""" try: data = await self.hass.async_add_executor_job( - self.client.get_pool_measure_latest, self.flipr_id + self.client.get_pool_measure_latest, self.device_id + ) + except FliprError as error: + raise UpdateFailed(error) from error + + return data + + +class FliprHubDataUpdateCoordinator(BaseDataUpdateCoordinator[dict[str, Any]]): + """Class to hold Flipr hub data retrieval.""" + + async def _async_update_data(self) -> dict[str, Any]: + """Fetch data from API endpoint.""" + try: + data = await self.hass.async_add_executor_job( + self.client.get_hub_state, self.device_id ) except FliprError as error: raise UpdateFailed(error) from error diff --git a/homeassistant/components/flipr/entity.py b/homeassistant/components/flipr/entity.py index 859ffc9390b..7db60ebc890 100644 --- a/homeassistant/components/flipr/entity.py +++ b/homeassistant/components/flipr/entity.py @@ -2,32 +2,34 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTRIBUTION, CONF_FLIPR_ID, DOMAIN, MANUFACTURER +from .const import ATTRIBUTION, DOMAIN, MANUFACTURER +from .coordinator import BaseDataUpdateCoordinator -class FliprEntity(CoordinatorEntity): +class FliprEntity(CoordinatorEntity[BaseDataUpdateCoordinator]): """Implements a common class elements representing the Flipr component.""" _attr_attribution = ATTRIBUTION _attr_has_entity_name = True def __init__( - self, coordinator: DataUpdateCoordinator, description: EntityDescription + self, + coordinator: BaseDataUpdateCoordinator, + description: EntityDescription, + is_flipr_hub: bool = False, ) -> None: """Initialize Flipr sensor.""" super().__init__(coordinator) + self.device_id = coordinator.device_id self.entity_description = description - if coordinator.config_entry: - flipr_id = coordinator.config_entry.data[CONF_FLIPR_ID] - self._attr_unique_id = f"{flipr_id}-{description.key}" + self._attr_unique_id = f"{self.device_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, flipr_id)}, - manufacturer=MANUFACTURER, - name=f"Flipr {flipr_id}", - ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device_id)}, + manufacturer=MANUFACTURER, + name=f"Flipr hub {self.device_id}" + if is_flipr_hub + else f"Flipr {self.device_id}", + ) diff --git a/homeassistant/components/flipr/manifest.json b/homeassistant/components/flipr/manifest.json index 1f9b04e3d57..cdd03770bab 100644 --- a/homeassistant/components/flipr/manifest.json +++ b/homeassistant/components/flipr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/flipr", "iot_class": "cloud_polling", "loggers": ["flipr_api"], - "requirements": ["flipr-api==1.6.0"] + "requirements": ["flipr-api==1.6.1"] } diff --git a/homeassistant/components/flipr/select.py b/homeassistant/components/flipr/select.py new file mode 100644 index 00000000000..b8a8f0db60a --- /dev/null +++ b/homeassistant/components/flipr/select.py @@ -0,0 +1,56 @@ +"""Select platform for the Flipr's Hub.""" + +import logging + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import FliprConfigEntry +from .entity import FliprEntity + +_LOGGER = logging.getLogger(__name__) + +SELECT_TYPES: tuple[SelectEntityDescription, ...] = ( + SelectEntityDescription( + key="hubMode", + translation_key="hub_mode", + options=["auto", "manual", "planning"], + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: FliprConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up select for Flipr hub mode.""" + coordinators = config_entry.runtime_data.hub_coordinators + + async_add_entities( + FliprHubSelect(coordinator, description, True) + for description in SELECT_TYPES + for coordinator in coordinators + ) + + +class FliprHubSelect(FliprEntity, SelectEntity): + """Select representing Hub mode.""" + + @property + def current_option(self) -> str | None: + """Return current select option.""" + _LOGGER.debug("coordinator data = %s", self.coordinator.data) + return self.coordinator.data["mode"] + + async def async_select_option(self, option: str) -> None: + """Select new mode for Hub.""" + _LOGGER.debug("Changing mode of %s to %s", self.device_id, option) + data = await self.hass.async_add_executor_job( + self.coordinator.client.set_hub_mode, + self.device_id, + option, + ) + _LOGGER.debug("New hub infos are %s", data) + self.coordinator.async_set_updated_data(data) diff --git a/homeassistant/components/flipr/sensor.py b/homeassistant/components/flipr/sensor.py index 7a1c64dc766..ba863718182 100644 --- a/homeassistant/components/flipr/sensor.py +++ b/homeassistant/components/flipr/sensor.py @@ -8,12 +8,11 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfElectricPotential, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FliprConfigEntry from .entity import FliprEntity SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( @@ -57,14 +56,17 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FliprConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Defer sensor setup to the shared sensor module.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinators = config_entry.runtime_data.flipr_coordinators - sensors = [FliprSensor(coordinator, description) for description in SENSOR_TYPES] - async_add_entities(sensors) + async_add_entities( + FliprSensor(coordinator, description) + for description in SENSOR_TYPES + for coordinator in coordinators + ) class FliprSensor(FliprEntity, SensorEntity): diff --git a/homeassistant/components/flipr/strings.json b/homeassistant/components/flipr/strings.json index 235117afbd4..631b0ce5488 100644 --- a/homeassistant/components/flipr/strings.json +++ b/homeassistant/components/flipr/strings.json @@ -8,23 +8,13 @@ "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } - }, - "flipr_id": { - "title": "Choose your Flipr", - "description": "Choose your Flipr ID in the list", - "data": { - "flipr_id": "Flipr ID" - } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", - "no_flipr_id_found": "No flipr id associated to your account for now. You should verify it is working with the Flipr's mobile app first." - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "no_flipr_id_found": "No flipr or hub associated to your account for now. You should verify it is working with the Flipr's mobile app first." } }, "entity": { @@ -49,6 +39,22 @@ "red_ox": { "name": "Red OX" } + }, + "select": { + "hub_mode": { + "name": "Mode", + "state": { + "auto": "Automatic", + "manual": "Manual", + "planning": "Planning" + } + } + } + }, + "issues": { + "duplicate_config": { + "title": "Multiple flipr configurations with the same account", + "description": "The Flipr integration has been updated to work account based rather than device based. This means that if you have 2 devices, you only need one configuration. For every account you have, please delete all but one configuration and restart Home Assistant for it to set up the devices linked to your account." } } } diff --git a/homeassistant/components/flipr/switch.py b/homeassistant/components/flipr/switch.py new file mode 100644 index 00000000000..65e729ec280 --- /dev/null +++ b/homeassistant/components/flipr/switch.py @@ -0,0 +1,67 @@ +"""Switch platform for the Flipr's Hub.""" + +import logging +from typing import Any + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import FliprConfigEntry +from .entity import FliprEntity + +_LOGGER = logging.getLogger(__name__) + +SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( + SwitchEntityDescription( + key="hubState", + name=None, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: FliprConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up switch for Flipr hub.""" + coordinators = config_entry.runtime_data.hub_coordinators + + async_add_entities( + FliprHubSwitch(coordinator, description, True) + for description in SWITCH_TYPES + for coordinator in coordinators + ) + + +class FliprHubSwitch(FliprEntity, SwitchEntity): + """Switch representing Hub state.""" + + @property + def is_on(self) -> bool: + """Return state of the switch.""" + _LOGGER.debug("coordinator data = %s", self.coordinator.data) + return self.coordinator.data["state"] + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch.""" + _LOGGER.debug("Switching off %s", self.device_id) + data = await self.hass.async_add_executor_job( + self.coordinator.client.set_hub_state, + self.device_id, + False, + ) + _LOGGER.debug("New hub infos are %s", data) + self.coordinator.async_set_updated_data(data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch.""" + _LOGGER.debug("Switching on %s", self.device_id) + data = await self.hass.async_add_executor_job( + self.coordinator.client.set_hub_state, + self.device_id, + True, + ) + _LOGGER.debug("New hub infos are %s", data) + self.coordinator.async_set_updated_data(data) diff --git a/homeassistant/components/flo/icons.json b/homeassistant/components/flo/icons.json index 3164781c1b4..4bd0380c56c 100644 --- a/homeassistant/components/flo/icons.json +++ b/homeassistant/components/flo/icons.json @@ -10,9 +10,17 @@ } }, "services": { - "set_sleep_mode": "mdi:sleep", - "set_away_mode": "mdi:home-off", - "set_home_mode": "mdi:home", - "run_health_test": "mdi:heart-flash" + "set_sleep_mode": { + "service": "mdi:sleep" + }, + "set_away_mode": { + "service": "mdi:home-off" + }, + "set_home_mode": { + "service": "mdi:home" + }, + "run_health_test": { + "service": "mdi:heart-flash" + } } } diff --git a/homeassistant/components/flock/manifest.json b/homeassistant/components/flock/manifest.json index 29c3e1c881f..c4cd5cdadb3 100644 --- a/homeassistant/components/flock/manifest.json +++ b/homeassistant/components/flock/manifest.json @@ -3,5 +3,6 @@ "name": "Flock", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/flock", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/flume/icons.json b/homeassistant/components/flume/icons.json index 631c0645ed3..90830943689 100644 --- a/homeassistant/components/flume/icons.json +++ b/homeassistant/components/flume/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "list_notifications": "mdi:bell" + "list_notifications": { + "service": "mdi:bell" + } } } diff --git a/homeassistant/components/flux/switch.py b/homeassistant/components/flux/switch.py index fac31d445cc..f7cf5b2c03a 100644 --- a/homeassistant/components/flux/switch.py +++ b/homeassistant/components/flux/switch.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -21,7 +21,7 @@ from homeassistant.components.light import ( VALID_TRANSITION, is_on, ) -from homeassistant.components.switch import DOMAIN, SwitchEntity +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity from homeassistant.const import ( ATTR_ENTITY_ID, CONF_BRIGHTNESS, @@ -43,7 +43,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import slugify from homeassistant.util.color import ( color_RGB_to_xy_brightness, - color_temperature_kelvin_to_mired, color_temperature_to_rgb, ) from homeassistant.util.dt import as_local, utcnow as dt_utcnow @@ -109,13 +108,13 @@ async def async_set_lights_xy(hass, lights, x_val, y_val, brightness, transition await hass.services.async_call(LIGHT_DOMAIN, SERVICE_TURN_ON, service_data) -async def async_set_lights_temp(hass, lights, mired, brightness, transition): +async def async_set_lights_temp(hass, lights, kelvin, brightness, transition): """Set color of array of lights.""" for light in lights: if is_on(hass, light): service_data = {ATTR_ENTITY_ID: light} - if mired is not None: - service_data[ATTR_COLOR_TEMP] = int(mired) + if kelvin is not None: + service_data[ATTR_COLOR_TEMP_KELVIN] = kelvin if brightness is not None: service_data[ATTR_BRIGHTNESS] = brightness if transition is not None: @@ -178,7 +177,7 @@ async def async_setup_platform( await flux.async_flux_update() service_name = slugify(f"{name} update") - hass.services.async_register(DOMAIN, service_name, async_update) + hass.services.async_register(SWITCH_DOMAIN, service_name, async_update) class FluxSwitch(SwitchEntity, RestoreEntity): @@ -350,17 +349,15 @@ class FluxSwitch(SwitchEntity, RestoreEntity): now, ) else: - # Convert to mired and clamp to allowed values - mired = color_temperature_kelvin_to_mired(temp) await async_set_lights_temp( - self.hass, self._lights, mired, brightness, self._transition + self.hass, self._lights, int(temp), brightness, self._transition ) _LOGGER.debug( ( - "Lights updated to mired:%s brightness:%s, %s%% " + "Lights updated to kelvin:%s brightness:%s, %s%% " "of %s cycle complete at %s" ), - mired, + temp, brightness, round(percentage_complete * 100), time_state, diff --git a/homeassistant/components/flux_led/__init__.py b/homeassistant/components/flux_led/__init__.py index b3e17a65a5c..1472dfa4bf1 100644 --- a/homeassistant/components/flux_led/__init__.py +++ b/homeassistant/components/flux_led/__init__.py @@ -136,7 +136,7 @@ async def _async_migrate_unique_ids(hass: HomeAssistant, entry: ConfigEntry) -> new_unique_id = f"{unique_id}{entity_unique_id[len(unique_id):]}" else: return None - _LOGGER.info( + _LOGGER.debug( "Migrating unique_id from [%s] to [%s]", entity_unique_id, new_unique_id, diff --git a/homeassistant/components/flux_led/config_flow.py b/homeassistant/components/flux_led/config_flow.py index 469c67deb22..9a02120f33a 100644 --- a/homeassistant/components/flux_led/config_flow.py +++ b/homeassistant/components/flux_led/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import contextlib -from typing import Any, cast +from typing import Any, Self, cast from flux_led.const import ( ATTR_ID, @@ -61,6 +61,8 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str | None = None + def __init__(self) -> None: """Initialize the config flow.""" self._discovered_devices: dict[str, FluxLEDDiscovery] = {} @@ -69,9 +71,11 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FluxLedOptionsFlow: """Get the options flow for the Flux LED component.""" - return FluxLedOptionsFlow(config_entry) + return FluxLedOptionsFlow() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -149,10 +153,9 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): assert device is not None await self._async_set_discovered_mac(device, self._allow_update_mac) host = device[ATTR_IPADDR] - self.context[CONF_HOST] = host - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == host: - return self.async_abort(reason="already_in_progress") + self.host = host + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") if not device[ATTR_MODEL_DESCRIPTION]: mac_address = device[ATTR_ID] assert mac_address is not None @@ -173,6 +176,10 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): await self._async_set_discovered_mac(device, True) return await self.async_step_discovery_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow.host == self.host + async def async_step_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -186,7 +193,9 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): self._set_confirm_only() placeholders = { - "model": device[ATTR_MODEL_DESCRIPTION] or device[ATTR_MODEL], + "model": device[ATTR_MODEL_DESCRIPTION] + or device[ATTR_MODEL] + or "Magic Home", "id": mac_address[-6:], "ipaddr": device[ATTR_IPADDR], } @@ -313,10 +322,6 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): class FluxLedOptionsFlow(OptionsFlow): """Handle flux_led options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the flux_led options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -325,7 +330,7 @@ class FluxLedOptionsFlow(OptionsFlow): if user_input is not None: return self.async_create_entry(title="", data=user_input) - options = self._config_entry.options + options = self.config_entry.options options_schema = vol.Schema( { vol.Optional( diff --git a/homeassistant/components/flux_led/icons.json b/homeassistant/components/flux_led/icons.json index 873fcd7c441..07c27869ff7 100644 --- a/homeassistant/components/flux_led/icons.json +++ b/homeassistant/components/flux_led/icons.json @@ -54,8 +54,14 @@ } }, "services": { - "set_custom_effect": "mdi:creation", - "set_zones": "mdi:texture-box", - "set_music_mode": "mdi:music" + "set_custom_effect": { + "service": "mdi:creation" + }, + "set_zones": { + "service": "mdi:texture-box" + }, + "set_music_mode": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/flux_led/light.py b/homeassistant/components/flux_led/light.py index f4982a13c3a..ca7fb7aeea2 100644 --- a/homeassistant/components/flux_led/light.py +++ b/homeassistant/components/flux_led/light.py @@ -14,7 +14,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -30,10 +30,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from .const import ( CONF_COLORS, @@ -67,7 +63,7 @@ _LOGGER = logging.getLogger(__name__) MODE_ATTRS = { ATTR_EFFECT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -205,8 +201,8 @@ class FluxLight( ) -> None: """Initialize the light.""" super().__init__(coordinator, base_unique_id, None) - self._attr_min_mireds = color_temperature_kelvin_to_mired(self._device.max_temp) - self._attr_max_mireds = color_temperature_kelvin_to_mired(self._device.min_temp) + self._attr_min_color_temp_kelvin = self._device.min_temp + self._attr_max_color_temp_kelvin = self._device.max_temp self._attr_supported_color_modes = _hass_color_modes(self._device) custom_effects: list[str] = [] if custom_effect_colors: @@ -222,9 +218,9 @@ class FluxLight( return self._device.brightness @property - def color_temp(self) -> int: - """Return the kelvin value of this light in mired.""" - return color_temperature_kelvin_to_mired(self._device.color_temp) + def color_temp_kelvin(self) -> int: + """Return the kelvin value of this light.""" + return self._device.color_temp @property def rgb_color(self) -> tuple[int, int, int]: @@ -304,8 +300,7 @@ class FluxLight( await self._async_set_effect(effect, brightness) return # Handle switch to CCT Color Mode - if color_temp_mired := kwargs.get(ATTR_COLOR_TEMP): - color_temp_kelvin = color_temperature_mired_to_kelvin(color_temp_mired) + if color_temp_kelvin := kwargs.get(ATTR_COLOR_TEMP_KELVIN): if ( ATTR_BRIGHTNESS not in kwargs and self.color_mode in MULTI_BRIGHTNESS_COLOR_MODES diff --git a/homeassistant/components/folder/manifest.json b/homeassistant/components/folder/manifest.json index 2436d5dbe9a..984b287c2c0 100644 --- a/homeassistant/components/folder/manifest.json +++ b/homeassistant/components/folder/manifest.json @@ -3,5 +3,6 @@ "name": "Folder", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/folder", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/folder_watcher/__init__.py b/homeassistant/components/folder_watcher/__init__.py index 800a95509c2..dd56b3aad72 100644 --- a/homeassistant/components/folder_watcher/__init__.py +++ b/homeassistant/components/folder_watcher/__init__.py @@ -4,10 +4,13 @@ from __future__ import annotations import logging import os -from typing import Any, cast +from typing import cast -import voluptuous as vol from watchdog.events import ( + DirCreatedEvent, + DirDeletedEvent, + DirModifiedEvent, + DirMovedEvent, FileClosedEvent, FileCreatedEvent, FileDeletedEvent, @@ -19,69 +22,17 @@ from watchdog.events import ( ) from watchdog.observers import Observer -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType -from .const import CONF_FOLDER, CONF_PATTERNS, DEFAULT_PATTERN, DOMAIN, PLATFORMS +from .const import CONF_FOLDER, CONF_PATTERNS, DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.All( - cv.ensure_list, - [ - vol.Schema( - { - vol.Required(CONF_FOLDER): cv.isdir, - vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ) - ], - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the folder watcher.""" - if DOMAIN in config: - conf: list[dict[str, Any]] = config[DOMAIN] - for watcher in conf: - path: str = watcher[CONF_FOLDER] - if not hass.config.is_allowed_path(path): - async_create_issue( - hass, - DOMAIN, - f"import_failed_not_allowed_path_{path}", - is_fixable=False, - is_persistent=False, - severity=IssueSeverity.ERROR, - translation_key="import_failed_not_allowed_path", - translation_placeholders={ - "path": path, - "config_variable": "allowlist_external_dirs", - }, - ) - continue - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=watcher - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Folder watcher from a config entry.""" @@ -121,7 +72,7 @@ class EventHandler(PatternMatchingEventHandler): def __init__(self, patterns: list[str], hass: HomeAssistant, entry_id: str) -> None: """Initialise the EventHandler.""" - super().__init__(patterns) + super().__init__(patterns=patterns) self.hass = hass self.entry_id = entry_id @@ -154,19 +105,19 @@ class EventHandler(PatternMatchingEventHandler): signal = f"folder_watcher-{self.entry_id}" dispatcher_send(self.hass, signal, event.event_type, fireable) - def on_modified(self, event: FileModifiedEvent) -> None: + def on_modified(self, event: DirModifiedEvent | FileModifiedEvent) -> None: """File modified.""" self.process(event) - def on_moved(self, event: FileMovedEvent) -> None: + def on_moved(self, event: DirMovedEvent | FileMovedEvent) -> None: """File moved.""" self.process(event, moved=True) - def on_created(self, event: FileCreatedEvent) -> None: + def on_created(self, event: DirCreatedEvent | FileCreatedEvent) -> None: """File created.""" self.process(event) - def on_deleted(self, event: FileDeletedEvent) -> None: + def on_deleted(self, event: DirDeletedEvent | FileDeletedEvent) -> None: """File deleted.""" self.process(event) diff --git a/homeassistant/components/folder_watcher/config_flow.py b/homeassistant/components/folder_watcher/config_flow.py index fe43cd1c725..eb176cfaf24 100644 --- a/homeassistant/components/folder_watcher/config_flow.py +++ b/homeassistant/components/folder_watcher/config_flow.py @@ -8,10 +8,8 @@ from typing import Any import voluptuous as vol -from homeassistant.components.homeassistant import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.config_entries import ConfigFlowResult from homeassistant.core import callback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.schema_config_entry_flow import ( SchemaCommonFlowHandler, SchemaConfigFlowHandler, @@ -46,28 +44,6 @@ async def validate_setup( return user_input -async def validate_import_setup( - handler: SchemaCommonFlowHandler, user_input: dict[str, Any] -) -> dict[str, Any]: - """Create issue on successful import.""" - async_create_issue( - handler.parent_handler.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Folder Watcher", - }, - ) - return user_input - - OPTIONS_SCHEMA = vol.Schema( { vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): SelectSelector( @@ -88,9 +64,6 @@ DATA_SCHEMA = vol.Schema( CONFIG_FLOW = { "user": SchemaFlowFormStep(schema=DATA_SCHEMA, validate_user_input=validate_setup), - "import": SchemaFlowFormStep( - schema=DATA_SCHEMA, validate_user_input=validate_import_setup - ), } OPTIONS_FLOW = { "init": SchemaFlowFormStep(schema=OPTIONS_SCHEMA), diff --git a/homeassistant/components/folder_watcher/manifest.json b/homeassistant/components/folder_watcher/manifest.json index 7b471e08fcc..1f0d9c595ee 100644 --- a/homeassistant/components/folder_watcher/manifest.json +++ b/homeassistant/components/folder_watcher/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["watchdog"], "quality_scale": "internal", - "requirements": ["watchdog==2.3.1"] + "requirements": ["watchdog==6.0.0"] } diff --git a/homeassistant/components/foobot/manifest.json b/homeassistant/components/foobot/manifest.json index a517f1fea6f..147a0037a18 100644 --- a/homeassistant/components/foobot/manifest.json +++ b/homeassistant/components/foobot/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/foobot", "iot_class": "cloud_polling", "loggers": ["foobot_async"], + "quality_scale": "legacy", "requirements": ["foobot_async==1.0.0"] } diff --git a/homeassistant/components/forecast_solar/config_flow.py b/homeassistant/components/forecast_solar/config_flow.py index 982f32eb07b..9a64ce6e1fb 100644 --- a/homeassistant/components/forecast_solar/config_flow.py +++ b/homeassistant/components/forecast_solar/config_flow.py @@ -41,7 +41,7 @@ class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> ForecastSolarOptionFlowHandler: """Get the options flow for this handler.""" - return ForecastSolarOptionFlowHandler(config_entry) + return ForecastSolarOptionFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -91,10 +91,6 @@ class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN): class ForecastSolarOptionFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/forecast_solar/coordinator.py b/homeassistant/components/forecast_solar/coordinator.py index 1de5edddbef..c9c062a0c88 100644 --- a/homeassistant/components/forecast_solar/coordinator.py +++ b/homeassistant/components/forecast_solar/coordinator.py @@ -4,13 +4,13 @@ from __future__ import annotations from datetime import timedelta -from forecast_solar import Estimate, ForecastSolar +from forecast_solar import Estimate, ForecastSolar, ForecastSolarConnectionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_AZIMUTH, @@ -65,4 +65,7 @@ class ForecastSolarDataUpdateCoordinator(DataUpdateCoordinator[Estimate]): async def _async_update_data(self) -> Estimate: """Fetch Forecast.Solar estimates.""" - return await self.forecast.estimate() + try: + return await self.forecast.estimate() + except ForecastSolarConnectionError as error: + raise UpdateFailed(error) from error diff --git a/homeassistant/components/forecast_solar/manifest.json b/homeassistant/components/forecast_solar/manifest.json index f5dd79281e6..1eb9c98701d 100644 --- a/homeassistant/components/forecast_solar/manifest.json +++ b/homeassistant/components/forecast_solar/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/forecast_solar", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["forecast-solar==3.1.0"] + "requirements": ["forecast-solar==4.0.0"] } diff --git a/homeassistant/components/forked_daapd/browse_media.py b/homeassistant/components/forked_daapd/browse_media.py index f2c62b80234..35ad0ed49b0 100644 --- a/homeassistant/components/forked_daapd/browse_media.py +++ b/homeassistant/components/forked_daapd/browse_media.py @@ -7,8 +7,12 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, cast from urllib.parse import quote, unquote -from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import ( + BrowseError, + BrowseMedia, + MediaClass, + MediaType, +) from homeassistant.helpers.network import is_internal_request from .const import CAN_PLAY_TYPE, URI_SCHEMA diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 1f76fe21bad..5fb9f08f1c0 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -52,11 +52,9 @@ TEST_CONNECTION_ERROR_DICT = { class ForkedDaapdOptionsFlowHandler(OptionsFlow): """Handle a forked-daapd options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="options", data=user_input) @@ -120,7 +118,7 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> ForkedDaapdOptionsFlowHandler: """Return options flow handler.""" - return ForkedDaapdOptionsFlowHandler(config_entry) + return ForkedDaapdOptionsFlowHandler() async def validate_input(self, user_input): """Validate the user input.""" diff --git a/homeassistant/components/fortios/device_tracker.py b/homeassistant/components/fortios/device_tracker.py index 192c1e4bc69..af2bc92a065 100644 --- a/homeassistant/components/fortios/device_tracker.py +++ b/homeassistant/components/fortios/device_tracker.py @@ -13,7 +13,7 @@ from fortiosapi import FortiOSAPI import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -37,9 +37,11 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> FortiOSDeviceScanner | None: """Validate the configuration and return a FortiOSDeviceScanner.""" - host = config[DOMAIN][CONF_HOST] - verify_ssl = config[DOMAIN][CONF_VERIFY_SSL] - token = config[DOMAIN][CONF_TOKEN] + config = config[DEVICE_TRACKER_DOMAIN] + + host = config[CONF_HOST] + verify_ssl = config[CONF_VERIFY_SSL] + token = config[CONF_TOKEN] fgt = FortiOSAPI() diff --git a/homeassistant/components/fortios/manifest.json b/homeassistant/components/fortios/manifest.json index 93e55071178..22c44acfd82 100644 --- a/homeassistant/components/fortios/manifest.json +++ b/homeassistant/components/fortios/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fortios", "iot_class": "local_polling", "loggers": ["fortiosapi", "paramiko"], + "quality_scale": "legacy", "requirements": ["fortiosapi==1.0.5"] } diff --git a/homeassistant/components/foscam/__init__.py b/homeassistant/components/foscam/__init__.py index f8708a589ce..b4d64464972 100644 --- a/homeassistant/components/foscam/__init__.py +++ b/homeassistant/components/foscam/__init__.py @@ -89,6 +89,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: unique_id=None, ) - LOGGER.info("Migration to version %s successful", entry.version) + LOGGER.debug("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/foscam/camera.py b/homeassistant/components/foscam/camera.py index 45704515422..075848f6ffb 100644 --- a/homeassistant/components/foscam/camera.py +++ b/homeassistant/components/foscam/camera.py @@ -129,7 +129,7 @@ class HassFoscamCamera(FoscamEntity, Camera): ) if ret == -3: - LOGGER.info( + LOGGER.warning( ( "Can't get motion detection status, camera %s configured with" " non-admin user" @@ -171,7 +171,7 @@ class HassFoscamCamera(FoscamEntity, Camera): if ret != 0: if ret == -3: - LOGGER.info( + LOGGER.warning( ( "Can't set motion detection status, camera %s configured" " with non-admin user" @@ -197,7 +197,7 @@ class HassFoscamCamera(FoscamEntity, Camera): if ret != 0: if ret == -3: - LOGGER.info( + LOGGER.warning( ( "Can't set motion detection status, camera %s configured" " with non-admin user" diff --git a/homeassistant/components/foscam/icons.json b/homeassistant/components/foscam/icons.json index 0c7dba9a4df..437575024d1 100644 --- a/homeassistant/components/foscam/icons.json +++ b/homeassistant/components/foscam/icons.json @@ -1,6 +1,10 @@ { "services": { - "ptz": "mdi:pan", - "ptz_preset": "mdi:target-variant" + "ptz": { + "service": "mdi:pan" + }, + "ptz_preset": { + "service": "mdi:target-variant" + } } } diff --git a/homeassistant/components/foursquare/icons.json b/homeassistant/components/foursquare/icons.json index cf60ed9f247..8e2b4e91d5f 100644 --- a/homeassistant/components/foursquare/icons.json +++ b/homeassistant/components/foursquare/icons.json @@ -1,5 +1,7 @@ { "services": { - "checkin": "mdi:map-marker" + "checkin": { + "service": "mdi:map-marker" + } } } diff --git a/homeassistant/components/foursquare/manifest.json b/homeassistant/components/foursquare/manifest.json index ce1c87814d7..0503ea4abb5 100644 --- a/homeassistant/components/foursquare/manifest.json +++ b/homeassistant/components/foursquare/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/foursquare", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/free_mobile/manifest.json b/homeassistant/components/free_mobile/manifest.json index 61a1f94c19d..9ce9bc72c76 100644 --- a/homeassistant/components/free_mobile/manifest.json +++ b/homeassistant/components/free_mobile/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/free_mobile", "iot_class": "cloud_push", "loggers": ["freesms"], + "quality_scale": "legacy", "requirements": ["freesms==0.2.0"] } diff --git a/homeassistant/components/freebox/alarm_control_panel.py b/homeassistant/components/freebox/alarm_control_panel.py index da5983f9374..9d8e85a14ca 100644 --- a/homeassistant/components/freebox/alarm_control_panel.py +++ b/homeassistant/components/freebox/alarm_control_panel.py @@ -5,31 +5,25 @@ from typing import Any from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, FreeboxHomeCategory -from .home_base import FreeboxHomeEntity +from .entity import FreeboxHomeEntity from .router import FreeboxRouter FREEBOX_TO_STATUS = { - "alarm1_arming": STATE_ALARM_ARMING, - "alarm2_arming": STATE_ALARM_ARMING, - "alarm1_armed": STATE_ALARM_ARMED_AWAY, - "alarm2_armed": STATE_ALARM_ARMED_HOME, - "alarm1_alert_timer": STATE_ALARM_TRIGGERED, - "alarm2_alert_timer": STATE_ALARM_TRIGGERED, - "alert": STATE_ALARM_TRIGGERED, - "idle": STATE_ALARM_DISARMED, + "alarm1_arming": AlarmControlPanelState.ARMING, + "alarm2_arming": AlarmControlPanelState.ARMING, + "alarm1_armed": AlarmControlPanelState.ARMED_AWAY, + "alarm2_armed": AlarmControlPanelState.ARMED_HOME, + "alarm1_alert_timer": AlarmControlPanelState.TRIGGERED, + "alarm2_alert_timer": AlarmControlPanelState.TRIGGERED, + "alert": AlarmControlPanelState.TRIGGERED, + "idle": AlarmControlPanelState.DISARMED, } @@ -103,6 +97,6 @@ class FreeboxAlarm(FreeboxHomeEntity, AlarmControlPanelEntity): """Update state.""" state: str | None = await self.get_home_endpoint_value(self._command_state) if state: - self._attr_state = FREEBOX_TO_STATUS.get(state) + self._attr_alarm_state = FREEBOX_TO_STATUS.get(state) else: - self._attr_state = None + self._attr_alarm_state = None diff --git a/homeassistant/components/freebox/binary_sensor.py b/homeassistant/components/freebox/binary_sensor.py index a54930753a0..20c124efea6 100644 --- a/homeassistant/components/freebox/binary_sensor.py +++ b/homeassistant/components/freebox/binary_sensor.py @@ -17,7 +17,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, FreeboxHomeCategory -from .home_base import FreeboxHomeEntity +from .entity import FreeboxHomeEntity from .router import FreeboxRouter _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/freebox/camera.py b/homeassistant/components/freebox/camera.py index 879941af040..33919df74f6 100644 --- a/homeassistant/components/freebox/camera.py +++ b/homeassistant/components/freebox/camera.py @@ -20,7 +20,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ATTR_DETECTION, DOMAIN, FreeboxHomeCategory -from .home_base import FreeboxHomeEntity +from .entity import FreeboxHomeEntity from .router import FreeboxRouter _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/freebox/device_tracker.py b/homeassistant/components/freebox/device_tracker.py index 0f5b7eb4837..1fa37ebc270 100644 --- a/homeassistant/components/freebox/device_tracker.py +++ b/homeassistant/components/freebox/device_tracker.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import datetime from typing import Any -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -98,11 +98,6 @@ class FreeboxDevice(ScannerEntity): """Return true if the device is connected to the network.""" return self._active - @property - def source_type(self) -> SourceType: - """Return the source type.""" - return SourceType.ROUTER - @callback def async_on_demand_update(self) -> None: """Update state.""" diff --git a/homeassistant/components/freebox/home_base.py b/homeassistant/components/freebox/entity.py similarity index 100% rename from homeassistant/components/freebox/home_base.py rename to homeassistant/components/freebox/entity.py diff --git a/homeassistant/components/freebox/icons.json b/homeassistant/components/freebox/icons.json index 81361d2c990..f4184f0673e 100644 --- a/homeassistant/components/freebox/icons.json +++ b/homeassistant/components/freebox/icons.json @@ -1,5 +1,7 @@ { "services": { - "reboot": "mdi:restart" + "reboot": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/freebox/router.py b/homeassistant/components/freebox/router.py index ed2fbcf1e83..efa96eca5a7 100644 --- a/homeassistant/components/freebox/router.py +++ b/homeassistant/components/freebox/router.py @@ -225,7 +225,7 @@ class FreeboxRouter: fbx_raids: list[dict[str, Any]] = await self._api.storage.get_raids() or [] except HttpRequestError: self.supports_raid = False - _LOGGER.info( + _LOGGER.warning( "Router %s API does not support RAID", self.name, ) diff --git a/homeassistant/components/freebox/sensor.py b/homeassistant/components/freebox/sensor.py index e5a0b8223a9..097c8c138ee 100644 --- a/homeassistant/components/freebox/sensor.py +++ b/homeassistant/components/freebox/sensor.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util from .const import DOMAIN -from .home_base import FreeboxHomeEntity +from .entity import FreeboxHomeEntity from .router import FreeboxRouter _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/freebox/strings.json b/homeassistant/components/freebox/strings.json index eaa56a38da1..0d91daaa290 100644 --- a/homeassistant/components/freebox/strings.json +++ b/homeassistant/components/freebox/strings.json @@ -12,7 +12,7 @@ }, "link": { "title": "Link Freebox router", - "description": "Click \"Submit\", then touch the right arrow on the router to register Freebox with Home Assistant.\n\n![Location of button on the router](/static/images/config_freebox.png)" + "description": "Select **Submit**, then touch the right arrow on the router to register Freebox with Home Assistant.\n\n![Location of button on the router](/static/images/config_freebox.png)" } }, "error": { diff --git a/homeassistant/components/freedns/manifest.json b/homeassistant/components/freedns/manifest.json index ac320a51d93..7c6bceb11a6 100644 --- a/homeassistant/components/freedns/manifest.json +++ b/homeassistant/components/freedns/manifest.json @@ -3,5 +3,6 @@ "name": "FreeDNS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/freedns", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/freedompro/climate.py b/homeassistant/components/freedompro/climate.py index d534db7e858..a5b0144ce0c 100644 --- a/homeassistant/components/freedompro/climate.py +++ b/homeassistant/components/freedompro/climate.py @@ -73,7 +73,6 @@ class Device(CoordinatorEntity[FreedomproDataUpdateCoordinator], ClimateEntity): _attr_current_temperature = 0 _attr_target_temperature = 0 _attr_hvac_mode = HVACMode.OFF - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/freedompro/config_flow.py b/homeassistant/components/freedompro/config_flow.py index f986cd05904..48d075f8a87 100644 --- a/homeassistant/components/freedompro/config_flow.py +++ b/homeassistant/components/freedompro/config_flow.py @@ -19,19 +19,19 @@ STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) class Hub: """Freedompro Hub class.""" - def __init__(self, hass, api_key): + def __init__(self, hass: HomeAssistant, api_key: str) -> None: """Freedompro Hub class init.""" self._hass = hass self._api_key = api_key - async def authenticate(self): + async def authenticate(self) -> dict[str, Any]: """Freedompro Hub class authenticate.""" return await get_list( aiohttp_client.async_get_clientsession(self._hass), self._api_key ) -async def validate_input(hass: HomeAssistant, api_key): +async def validate_input(hass: HomeAssistant, api_key: str) -> None: """Validate api key.""" hub = Hub(hass, api_key) result = await hub.authenticate() diff --git a/homeassistant/components/freedompro/fan.py b/homeassistant/components/freedompro/fan.py index 698d57d1001..d21ede9bad3 100644 --- a/homeassistant/components/freedompro/fan.py +++ b/homeassistant/components/freedompro/fan.py @@ -40,7 +40,6 @@ class FreedomproFan(CoordinatorEntity[FreedomproDataUpdateCoordinator], FanEntit _attr_name = None _attr_is_on = False _attr_percentage = 0 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index fbc324fde2b..920ecda1c52 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -6,7 +6,7 @@ from collections.abc import Mapping import ipaddress import logging import socket -from typing import Any +from typing import Any, Self from urllib.parse import ParseResult, urlparse from fritzconnection import FritzConnection @@ -23,7 +23,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -58,16 +57,18 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _host: str + @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FritzBoxToolsOptionsFlowHandler: """Get the options flow for this handler.""" - return FritzBoxToolsOptionsFlowHandler(config_entry) + return FritzBoxToolsOptionsFlowHandler() def __init__(self) -> None: """Initialize FRITZ!Box Tools flow.""" - self._host: str | None = None - self._entry: ConfigEntry | None = None self._name: str = "" self._password: str = "" self._use_tls: bool = False @@ -75,6 +76,10 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._username: str = "" self._model: str = "" + async def async_fritz_tools_init(self) -> str | None: + """Initialize FRITZ!Box Tools class.""" + return await self.hass.async_add_executor_job(self.fritz_tools_init) + def fritz_tools_init(self) -> str | None: """Initialize FRITZ!Box Tools class.""" @@ -108,7 +113,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): async def async_check_configured_entry(self) -> ConfigEntry | None: """Check if entry is configured.""" - assert self._host current_host = await self.hass.async_add_executor_job( socket.gethostbyname, self._host ) @@ -150,25 +154,25 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a flow initialized by discovery.""" ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "") - self._host = ssdp_location.hostname + host = ssdp_location.hostname + if not host or ipaddress.ip_address(host).is_link_local: + return self.async_abort(reason="ignore_ip6_link_local") + + self._host = host self._name = ( discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME] ) - self.context[CONF_HOST] = self._host - - if not self._host or ipaddress.ip_address(self._host).is_link_local: - return self.async_abort(reason="ignore_ip6_link_local") + uuid: str | None if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN): if uuid.startswith("uuid:"): uuid = uuid[5:] await self.async_set_unique_id(uuid) self._abort_if_unique_id_configured({CONF_HOST: self._host}) - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == self._host: - return self.async_abort(reason="already_in_progress") + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") if entry := await self.async_check_configured_entry(): if uuid and not entry.unique_id: @@ -184,6 +188,10 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow._host == self._host # noqa: SLF001 + async def async_step_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -198,7 +206,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._use_tls = user_input[CONF_SSL] self._port = self._determine_port(user_input) - error = await self.hass.async_add_executor_job(self.fritz_tools_init) + error = await self.async_fritz_tools_init() if error: errors["base"] = error @@ -261,7 +269,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._port = self._determine_port(user_input) - if not (error := await self.hass.async_add_executor_job(self.fritz_tools_init)): + if not (error := await self.async_fritz_tools_init()): self._name = self._model if await self.async_check_configured_entry(): @@ -276,7 +284,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) self._host = entry_data[CONF_HOST] self._port = entry_data[CONF_PORT] self._username = entry_data[CONF_USERNAME] @@ -314,14 +321,13 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] - if error := await self.hass.async_add_executor_job(self.fritz_tools_init): + if error := await self.async_fritz_tools_init(): return self._show_setup_form_reauth_confirm( user_input=user_input, errors={"base": error} ) - assert isinstance(self._entry, ConfigEntry) - self.hass.config_entries.async_update_entry( - self._entry, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={ CONF_HOST: self._host, CONF_PASSWORD: self._password, @@ -330,22 +336,8 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): CONF_SSL: self._use_tls, }, ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reauth_successful") - async def async_step_reconfigure(self, _: Mapping[str, Any]) -> ConfigFlowResult: - """Handle reconfigure flow .""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert self._entry - self._host = self._entry.data[CONF_HOST] - self._port = self._entry.data[CONF_PORT] - self._username = self._entry.data[CONF_USERNAME] - self._password = self._entry.data[CONF_PASSWORD] - self._use_tls = self._entry.data.get(CONF_SSL, DEFAULT_SSL) - - return await self.async_step_reconfigure_confirm() - - def _show_setup_form_reconfigure_confirm( + def _show_setup_form_reconfigure( self, user_input: dict[str, Any], errors: dict[str, str] | None = None ) -> ConfigFlowResult: """Show the reconfigure form to the user.""" @@ -356,7 +348,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): } return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, @@ -364,20 +356,21 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): vol.Required(CONF_SSL, default=user_input[CONF_SSL]): bool, } ), - description_placeholders={"host": self._host}, + description_placeholders={"host": user_input[CONF_HOST]}, errors=errors or {}, ) - async def async_step_reconfigure_confirm( + async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfigure flow.""" if user_input is None: - return self._show_setup_form_reconfigure_confirm( + reconfigure_entry_data = self._get_reconfigure_entry().data + return self._show_setup_form_reconfigure( { - CONF_HOST: self._host, - CONF_PORT: self._port, - CONF_SSL: self._use_tls, + CONF_HOST: reconfigure_entry_data[CONF_HOST], + CONF_PORT: reconfigure_entry_data[CONF_PORT], + CONF_SSL: reconfigure_entry_data.get(CONF_SSL, DEFAULT_SSL), } ) @@ -385,27 +378,25 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._use_tls = user_input[CONF_SSL] self._port = self._determine_port(user_input) - if error := await self.hass.async_add_executor_job(self.fritz_tools_init): - return self._show_setup_form_reconfigure_confirm( + reconfigure_entry = self._get_reconfigure_entry() + self._username = reconfigure_entry.data[CONF_USERNAME] + self._password = reconfigure_entry.data[CONF_PASSWORD] + if error := await self.async_fritz_tools_init(): + return self._show_setup_form_reconfigure( user_input={**user_input, CONF_PORT: self._port}, errors={"base": error} ) - assert isinstance(self._entry, ConfigEntry) - self.hass.config_entries.async_update_entry( - self._entry, - data={ + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={ CONF_HOST: self._host, - CONF_PASSWORD: self._password, CONF_PORT: self._port, - CONF_USERNAME: self._username, CONF_SSL: self._use_tls, }, ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reconfigure_successful") -class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FritzBoxToolsOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -416,19 +407,18 @@ class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: return self.async_create_entry(title="", data=user_input) + options = self.config_entry.options data_schema = vol.Schema( { vol.Optional( CONF_CONSIDER_HOME, - default=self.options.get( + default=options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)), vol.Optional( CONF_OLD_DISCOVERY, - default=self.options.get( - CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY - ), + default=options.get(CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY), ): bool, } ) diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index 13c442a1ace..90bd6068ecb 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -79,7 +79,7 @@ def device_filter_out_from_trackers( def _ha_is_stopping(activity: str) -> None: """Inform that HA is stopping.""" - _LOGGER.info("Cannot execute %s: HomeAssistant is shutting down", activity) + _LOGGER.warning("Cannot execute %s: HomeAssistant is shutting down", activity) class ClassSetupMissing(Exception): @@ -326,7 +326,11 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): "call_deflections" ] = await self.async_update_call_deflections() except FRITZ_EXCEPTIONS as ex: - raise UpdateFailed(ex) from ex + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(ex)}, + ) from ex _LOGGER.debug("enity_data: %s", entity_data) return entity_data @@ -606,6 +610,9 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): dev_info: Device = hosts[dev_mac] for link in interf["node_links"]: + if link.get("state") != "CONNECTED": + continue # ignore orphan node links + intf = mesh_intf.get(link["node_interface_1_uid"]) if intf is not None: if intf["op_mode"] == "AP_GUEST": @@ -658,7 +665,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): entity.domain == DEVICE_TRACKER_DOMAIN or "_internet_access" in entity.unique_id ) and entry_mac not in device_hosts: - _LOGGER.info("Removing orphan entity entry %s", entity.entity_id) + _LOGGER.debug("Removing orphan entity entry %s", entity.entity_id) entity_reg.async_remove(entity.entity_id) device_reg = dr.async_get(self.hass) diff --git a/homeassistant/components/fritz/device_tracker.py b/homeassistant/components/fritz/device_tracker.py index 6bf182458e0..d1270a0510c 100644 --- a/homeassistant/components/fritz/device_tracker.py +++ b/homeassistant/components/fritz/device_tracker.py @@ -5,7 +5,7 @@ from __future__ import annotations import datetime import logging -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -112,8 +112,3 @@ class FritzBoxTracker(FritzDeviceBase, ScannerEntity): if device.ssid: attrs["ssid"] = device.ssid return attrs - - @property - def source_type(self) -> SourceType: - """Return tracker source type.""" - return SourceType.ROUTER diff --git a/homeassistant/components/fritz/icons.json b/homeassistant/components/fritz/icons.json index d2154dc7232..481568a4c2c 100644 --- a/homeassistant/components/fritz/icons.json +++ b/homeassistant/components/fritz/icons.json @@ -51,9 +51,17 @@ } }, "services": { - "reconnect": "mdi:connection", - "reboot": "mdi:refresh", - "cleanup": "mdi:broom", - "set_guest_wifi_password": "mdi:form-textbox-password" + "reconnect": { + "service": "mdi:connection" + }, + "reboot": { + "service": "mdi:refresh" + }, + "cleanup": { + "service": "mdi:broom" + }, + "set_guest_wifi_password": { + "service": "mdi:form-textbox-password" + } } } diff --git a/homeassistant/components/fritz/manifest.json b/homeassistant/components/fritz/manifest.json index d8d8f6b94bf..27aa42d9b2c 100644 --- a/homeassistant/components/fritz/manifest.json +++ b/homeassistant/components/fritz/manifest.json @@ -1,13 +1,13 @@ { "domain": "fritz", "name": "AVM FRITZ!Box Tools", - "codeowners": ["@mammuth", "@AaronDavidSchneider", "@chemelli74", "@mib1185"], + "codeowners": ["@AaronDavidSchneider", "@chemelli74", "@mib1185"], "config_flow": true, "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/fritz", "iot_class": "local_polling", "loggers": ["fritzconnection"], - "requirements": ["fritzconnection[qr]==1.13.2", "xmltodict==0.13.0"], + "requirements": ["fritzconnection[qr]==1.14.0", "xmltodict==0.13.0"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:fritzbox:1" diff --git a/homeassistant/components/fritz/quality_scale.yaml b/homeassistant/components/fritz/quality_scale.yaml new file mode 100644 index 00000000000..06c572f93a6 --- /dev/null +++ b/homeassistant/components/fritz/quality_scale.yaml @@ -0,0 +1,101 @@ +rules: + # Bronze + action-setup: + status: todo + comment: still in async_setup_entry, needs to be moved to async_setup + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: one coverage miss in line 110 + config-flow: + status: todo + comment: data_description are missing + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: todo + comment: include the proper docs snippet + entity-event-setup: done + entity-unique-id: done + has-entity-name: + status: todo + comment: partially done + runtime-data: + status: todo + comment: still uses hass.data + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: + status: todo + comment: add the proper configuration_basic block + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: todo + comment: not set at the moment, we use a coordinator + reauthentication-flow: done + test-coverage: + status: todo + comment: we are close to the goal of 95% + + # Gold + devices: done + diagnostics: done + discovery-update-info: todo + discovery: done + docs-data-update: todo + docs-examples: done + docs-known-limitations: + status: exempt + comment: no known limitations, yet + docs-supported-devices: + status: todo + comment: add the known supported devices + docs-supported-functions: + status: todo + comment: need to be overhauled + docs-troubleshooting: done + docs-use-cases: + status: todo + comment: need to be overhauled + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no known use cases for repair issues or flows, yet + stale-devices: + status: todo + comment: automate the current cleanup process and deprecate the corresponding button + + # Platinum + async-dependency: + status: todo + comment: | + the fritzconnection lib is not async + changing this might need a bit more efforts to be spent + inject-websession: + status: todo + comment: | + the fritzconnection lib is not async and relies on requests + changing this might need a bit more efforts to be spent + strict-typing: + status: todo + comment: | + Requirements 'fritzconnection==1.14.0' and 'xmltodict==0.13.0' appear untyped diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 3b6c60ed48f..06a07cba79e 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -7,7 +7,8 @@ "description": "Discovered FRITZ!Box: {name}\n\nSet up FRITZ!Box Tools to control your {name}", "data": { "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]" } }, "reauth_confirm": { @@ -18,7 +19,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Updating FRITZ!Box Tools - configuration", "description": "Update FRITZ!Box Tools configuration for: {host}.", "data": { @@ -55,6 +56,7 @@ "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { + "unknown_error": "[%key:common::config_flow::error::unknown%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "upnp_not_configured": "Missing UPnP settings on device.", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", @@ -174,6 +176,9 @@ }, "unable_to_connect": { "message": "Unable to establish a connection" + }, + "update_failed": { + "message": "Error while uptaing the data: {error}" } } } diff --git a/homeassistant/components/fritz/switch.py b/homeassistant/components/fritz/switch.py index ce89cfc736d..372af89cc9e 100644 --- a/homeassistant/components/fritz/switch.py +++ b/homeassistant/components/fritz/switch.py @@ -9,7 +9,7 @@ from homeassistant.components.network import async_get_source_ip from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity @@ -46,9 +46,7 @@ async def _async_deflection_entities_list( _LOGGER.debug("Setting up %s switches", SWITCH_TYPE_DEFLECTION) - if ( - call_deflections := avm_wrapper.data.get("call_deflections") - ) is None or not isinstance(call_deflections, dict): + if not (call_deflections := avm_wrapper.data["call_deflections"]): _LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION) return [] @@ -72,7 +70,7 @@ async def _async_port_entities_list( # Query port forwardings and setup a switch for each forward for the current device resp = await avm_wrapper.async_get_num_port_mapping(avm_wrapper.device_conn_type) if not resp: - _LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_DEFLECTION) + _LOGGER.debug("The FRITZ!Box has no %s options", SWITCH_TYPE_PORTFORWARD) return [] port_forwards_count: int = resp["NewPortMappingNumberOfEntries"] @@ -242,7 +240,6 @@ async def async_setup_entry( async_add_entities(entities_list) - @callback async def async_update_avm_device() -> None: """Update the values of the AVM device.""" async_add_entities(await _async_profile_entities_list(avm_wrapper, data_fritz)) diff --git a/homeassistant/components/fritzbox/__init__.py b/homeassistant/components/fritzbox/__init__.py index 460e1edd851..07bc8fb15f2 100644 --- a/homeassistant/components/fritzbox/__init__.py +++ b/homeassistant/components/fritzbox/__init__.py @@ -2,18 +2,11 @@ from __future__ import annotations -from abc import ABC, abstractmethod - -from pyfritzhome import FritzhomeDevice -from pyfritzhome.devicetypes.fritzhomeentitybase import FritzhomeEntityBase - from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_STOP, UnitOfTemperature from homeassistant.core import Event, HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo -from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, LOGGER, PLATFORMS from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator @@ -29,14 +22,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: FritzboxConfigEntry) -> and "_temperature" not in entry.unique_id ): new_unique_id = f"{entry.unique_id}_temperature" - LOGGER.info( + LOGGER.debug( "Migrating unique_id [%s] to [%s]", entry.unique_id, new_unique_id ) return {"new_unique_id": new_unique_id} if entry.domain == BINARY_SENSOR_DOMAIN and "_" not in entry.unique_id: new_unique_id = f"{entry.unique_id}_alarm" - LOGGER.info( + LOGGER.debug( "Migrating unique_id [%s] to [%s]", entry.unique_id, new_unique_id ) return {"new_unique_id": new_unique_id} @@ -83,56 +76,3 @@ async def async_remove_config_entry_device( return False return True - - -class FritzBoxEntity(CoordinatorEntity[FritzboxDataUpdateCoordinator], ABC): - """Basis FritzBox entity.""" - - def __init__( - self, - coordinator: FritzboxDataUpdateCoordinator, - ain: str, - entity_description: EntityDescription | None = None, - ) -> None: - """Initialize the FritzBox entity.""" - super().__init__(coordinator) - - self.ain = ain - if entity_description is not None: - self._attr_has_entity_name = True - self.entity_description = entity_description - self._attr_unique_id = f"{ain}_{entity_description.key}" - else: - self._attr_name = self.data.name - self._attr_unique_id = ain - - @property - @abstractmethod - def data(self) -> FritzhomeEntityBase: - """Return data object from coordinator.""" - - -class FritzBoxDeviceEntity(FritzBoxEntity): - """Reflects FritzhomeDevice and uses its attributes to construct FritzBoxDeviceEntity.""" - - @property - def available(self) -> bool: - """Return if entity is available.""" - return super().available and self.data.present - - @property - def data(self) -> FritzhomeDevice: - """Return device data object from coordinator.""" - return self.coordinator.data.devices[self.ain] - - @property - def device_info(self) -> DeviceInfo: - """Return device specific attributes.""" - return DeviceInfo( - name=self.data.name, - identifiers={(DOMAIN, self.ain)}, - manufacturer=self.data.manufacturer, - model=self.data.productname, - sw_version=self.data.fw_version, - configuration_url=self.coordinator.configuration_url, - ) diff --git a/homeassistant/components/fritzbox/binary_sensor.py b/homeassistant/components/fritzbox/binary_sensor.py index 89394d35fe5..3c9cb6ada5c 100644 --- a/homeassistant/components/fritzbox/binary_sensor.py +++ b/homeassistant/components/fritzbox/binary_sensor.py @@ -17,8 +17,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzBoxDeviceEntity from .coordinator import FritzboxConfigEntry +from .entity import FritzBoxDeviceEntity from .model import FritzEntityDescriptionMixinBase diff --git a/homeassistant/components/fritzbox/button.py b/homeassistant/components/fritzbox/button.py index 7ef91a74252..44a6697e1c0 100644 --- a/homeassistant/components/fritzbox/button.py +++ b/homeassistant/components/fritzbox/button.py @@ -7,9 +7,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzBoxEntity from .const import DOMAIN from .coordinator import FritzboxConfigEntry +from .entity import FritzBoxEntity async def async_setup_entry( diff --git a/homeassistant/components/fritzbox/climate.py b/homeassistant/components/fritzbox/climate.py index 5288682c388..d5a81fdef1a 100644 --- a/homeassistant/components/fritzbox/climate.py +++ b/homeassistant/components/fritzbox/climate.py @@ -22,7 +22,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzBoxDeviceEntity from .const import ( ATTR_STATE_BATTERY_LOW, ATTR_STATE_HOLIDAY_MODE, @@ -32,7 +31,9 @@ from .const import ( LOGGER, ) from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator +from .entity import FritzBoxDeviceEntity from .model import ClimateExtraAttributes +from .sensor import value_scheduled_preset HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF] PRESET_HOLIDAY = "holiday" @@ -87,7 +88,6 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): _attr_precision = PRECISION_HALVES _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = "thermostat" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -135,14 +135,16 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" - if kwargs.get(ATTR_HVAC_MODE) is not None: - hvac_mode = kwargs[ATTR_HVAC_MODE] + target_temp = kwargs.get(ATTR_TEMPERATURE) + hvac_mode = kwargs.get(ATTR_HVAC_MODE) + if hvac_mode == HVACMode.OFF: await self.async_set_hvac_mode(hvac_mode) - elif kwargs.get(ATTR_TEMPERATURE) is not None: - temperature = kwargs[ATTR_TEMPERATURE] + elif target_temp is not None: await self.hass.async_add_executor_job( - self.data.set_target_temperature, temperature + self.data.set_target_temperature, target_temp ) + else: + return await self.coordinator.async_refresh() @property @@ -175,7 +177,11 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): if hvac_mode == HVACMode.OFF: await self.async_set_temperature(temperature=OFF_REPORT_SET_TEMPERATURE) else: - await self.async_set_temperature(temperature=self.data.comfort_temperature) + if value_scheduled_preset(self.data) == PRESET_ECO: + target_temp = self.data.eco_temperature + else: + target_temp = self.data.comfort_temperature + await self.async_set_temperature(temperature=target_temp) @property def preset_mode(self) -> str | None: diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index 62f189b542f..ffec4a9ea29 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import ipaddress -from typing import Any +from typing import Any, Self from urllib.parse import urlparse from pyfritzhome import Fritzhome, LoginError @@ -12,7 +12,7 @@ from requests.exceptions import HTTPError import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from .const import DEFAULT_HOST, DEFAULT_USERNAME, DOMAIN @@ -43,11 +43,11 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _name: str + def __init__(self) -> None: """Initialize flow.""" - self._entry: ConfigEntry | None = None self._host: str | None = None - self._name: str | None = None self._password: str | None = None self._username: str | None = None @@ -61,17 +61,9 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def _update_entry(self) -> None: - assert self._entry is not None - self.hass.config_entries.async_update_entry( - self._entry, - data={ - CONF_HOST: self._host, - CONF_PASSWORD: self._password, - CONF_USERNAME: self._username, - }, - ) - await self.hass.config_entries.async_reload(self._entry.entry_id) + async def async_try_connect(self) -> str: + """Try to connect and check auth.""" + return await self.hass.async_add_executor_job(self._try_connect) def _try_connect(self) -> str: """Try to connect and check auth.""" @@ -104,7 +96,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): self._password = user_input[CONF_PASSWORD] self._username = user_input[CONF_USERNAME] - result = await self.hass.async_add_executor_job(self._try_connect) + result = await self.async_try_connect() if result == RESULT_SUCCESS: return self._get_entry(self._name) @@ -122,7 +114,6 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by discovery.""" host = urlparse(discovery_info.ssdp_location).hostname assert isinstance(host, str) - self.context[CONF_HOST] = host if ( ipaddress.ip_address(host).version == 6 @@ -136,9 +127,9 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(uuid) self._abort_if_unique_id_configured({CONF_HOST: host}) - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == host: - return self.async_abort(reason="already_in_progress") + self._host = host + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") # update old and user-configured config entries for entry in self._async_current_entries(include_ignore=False): @@ -147,12 +138,15 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): self.hass.config_entries.async_update_entry(entry, unique_id=uuid) return self.async_abort(reason="already_configured") - self._host = host self._name = str(discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) or host) self.context["title_placeholders"] = {"name": self._name} return await self.async_step_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow._host == self._host # noqa: SLF001 + async def async_step_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -162,10 +156,9 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self._password = user_input[CONF_PASSWORD] self._username = user_input[CONF_USERNAME] - result = await self.hass.async_add_executor_job(self._try_connect) + result = await self.async_try_connect() if result == RESULT_SUCCESS: - assert self._name is not None return self._get_entry(self._name) if result != RESULT_INVALID_AUTH: return self.async_abort(reason=result) @@ -182,9 +175,6 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Trigger a reauthentication flow.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry is not None - self._entry = entry self._host = entry_data[CONF_HOST] self._name = str(entry_data[CONF_HOST]) self._username = entry_data[CONF_USERNAME] @@ -201,11 +191,17 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): self._password = user_input[CONF_PASSWORD] self._username = user_input[CONF_USERNAME] - result = await self.hass.async_add_executor_job(self._try_connect) + result = await self.async_try_connect() if result == RESULT_SUCCESS: - await self._update_entry() - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={ + CONF_HOST: self._host, + CONF_PASSWORD: self._password, + CONF_USERNAME: self._username, + }, + ) if result != RESULT_INVALID_AUTH: return self.async_abort(reason=result) errors["base"] = result @@ -223,20 +219,6 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reconfigure( - self, _: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry is not None - self._entry = entry - self._name = self._entry.data[CONF_HOST] - self._host = self._entry.data[CONF_HOST] - self._username = self._entry.data[CONF_USERNAME] - self._password = self._entry.data[CONF_PASSWORD] - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" @@ -245,20 +227,27 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self._host = user_input[CONF_HOST] - result = await self.hass.async_add_executor_job(self._try_connect) + reconfigure_entry = self._get_reconfigure_entry() + self._username = reconfigure_entry.data[CONF_USERNAME] + self._password = reconfigure_entry.data[CONF_PASSWORD] + + result = await self.async_try_connect() if result == RESULT_SUCCESS: - await self._update_entry() - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={CONF_HOST: self._host}, + ) errors["base"] = result + host = self._get_reconfigure_entry().data[CONF_HOST] return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { - vol.Required(CONF_HOST, default=self._host): str, + vol.Required(CONF_HOST, default=host): str, } ), - description_placeholders={"name": self._name}, + description_placeholders={"name": host}, errors=errors, ) diff --git a/homeassistant/components/fritzbox/cover.py b/homeassistant/components/fritzbox/cover.py index 7a74d0b8184..de87d6f8852 100644 --- a/homeassistant/components/fritzbox/cover.py +++ b/homeassistant/components/fritzbox/cover.py @@ -13,8 +13,8 @@ from homeassistant.components.cover import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzBoxDeviceEntity from .coordinator import FritzboxConfigEntry +from .entity import FritzBoxDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/fritzbox/entity.py b/homeassistant/components/fritzbox/entity.py new file mode 100644 index 00000000000..cd619588bc1 --- /dev/null +++ b/homeassistant/components/fritzbox/entity.py @@ -0,0 +1,68 @@ +"""Support for AVM FRITZ!SmartHome devices.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod + +from pyfritzhome import FritzhomeDevice +from pyfritzhome.devicetypes.fritzhomeentitybase import FritzhomeEntityBase + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import FritzboxDataUpdateCoordinator + + +class FritzBoxEntity(CoordinatorEntity[FritzboxDataUpdateCoordinator], ABC): + """Basis FritzBox entity.""" + + def __init__( + self, + coordinator: FritzboxDataUpdateCoordinator, + ain: str, + entity_description: EntityDescription | None = None, + ) -> None: + """Initialize the FritzBox entity.""" + super().__init__(coordinator) + + self.ain = ain + if entity_description is not None: + self._attr_has_entity_name = True + self.entity_description = entity_description + self._attr_unique_id = f"{ain}_{entity_description.key}" + else: + self._attr_name = self.data.name + self._attr_unique_id = ain + + @property + @abstractmethod + def data(self) -> FritzhomeEntityBase: + """Return data object from coordinator.""" + + +class FritzBoxDeviceEntity(FritzBoxEntity): + """Reflects FritzhomeDevice and uses its attributes to construct FritzBoxDeviceEntity.""" + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.data.present + + @property + def data(self) -> FritzhomeDevice: + """Return device data object from coordinator.""" + return self.coordinator.data.devices[self.ain] + + @property + def device_info(self) -> DeviceInfo: + """Return device specific attributes.""" + return DeviceInfo( + name=self.data.name, + identifiers={(DOMAIN, self.ain)}, + manufacturer=self.data.manufacturer, + model=self.data.productname, + sw_version=self.data.fw_version, + configuration_url=self.coordinator.configuration_url, + ) diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index c19d7a8600d..d347f6898c0 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -16,9 +16,9 @@ from homeassistant.components.light import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity from .const import COLOR_MODE, LOGGER -from .coordinator import FritzboxConfigEntry +from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator +from .entity import FritzBoxDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/fritzbox/manifest.json b/homeassistant/components/fritzbox/manifest.json index 3735c16571e..1a127597b81 100644 --- a/homeassistant/components/fritzbox/manifest.json +++ b/homeassistant/components/fritzbox/manifest.json @@ -7,7 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["pyfritzhome"], - "quality_scale": "gold", "requirements": ["pyfritzhome==0.6.12"], "ssdp": [ { diff --git a/homeassistant/components/fritzbox/sensor.py b/homeassistant/components/fritzbox/sensor.py index d28727c01f5..e610fd80f3e 100644 --- a/homeassistant/components/fritzbox/sensor.py +++ b/homeassistant/components/fritzbox/sensor.py @@ -30,8 +30,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utc_from_timestamp -from . import FritzBoxDeviceEntity from .coordinator import FritzboxConfigEntry +from .entity import FritzBoxDeviceEntity from .model import FritzEntityDescriptionMixinBase @@ -83,20 +83,38 @@ def entity_category_temperature(device: FritzhomeDevice) -> EntityCategory | Non return None -def value_nextchange_preset(device: FritzhomeDevice) -> str: +def value_nextchange_preset(device: FritzhomeDevice) -> str | None: """Return native value for next scheduled preset sensor.""" + if not device.nextchange_endperiod: + return None if device.nextchange_temperature == device.eco_temperature: return PRESET_ECO return PRESET_COMFORT -def value_scheduled_preset(device: FritzhomeDevice) -> str: +def value_scheduled_preset(device: FritzhomeDevice) -> str | None: """Return native value for current scheduled preset sensor.""" + if not device.nextchange_endperiod: + return None if device.nextchange_temperature == device.eco_temperature: return PRESET_COMFORT return PRESET_ECO +def value_nextchange_temperature(device: FritzhomeDevice) -> float | None: + """Return native value for next scheduled temperature time sensor.""" + if device.nextchange_endperiod and isinstance(device.nextchange_temperature, float): + return device.nextchange_temperature + return None + + +def value_nextchange_time(device: FritzhomeDevice) -> datetime | None: + """Return native value for next scheduled changed time sensor.""" + if device.nextchange_endperiod: + return utc_from_timestamp(device.nextchange_endperiod) + return None + + SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = ( FritzSensorEntityDescription( key="temperature", @@ -181,7 +199,7 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.TEMPERATURE, entity_category=EntityCategory.DIAGNOSTIC, suitable=suitable_nextchange_temperature, - native_value=lambda device: device.nextchange_temperature, + native_value=value_nextchange_temperature, ), FritzSensorEntityDescription( key="nextchange_time", @@ -189,7 +207,7 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, suitable=suitable_nextchange_time, - native_value=lambda device: utc_from_timestamp(device.nextchange_endperiod), + native_value=value_nextchange_time, ), FritzSensorEntityDescription( key="nextchange_preset", diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index d4f59fd1c08..c7c2439b566 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -27,7 +27,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update your configuration information for {name}.", "data": { "host": "[%key:common::config_flow::data::host%]" @@ -47,6 +47,7 @@ "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, diff --git a/homeassistant/components/fritzbox/switch.py b/homeassistant/components/fritzbox/switch.py index d13f21e1c14..18b676d449e 100644 --- a/homeassistant/components/fritzbox/switch.py +++ b/homeassistant/components/fritzbox/switch.py @@ -9,9 +9,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzBoxDeviceEntity from .const import DOMAIN from .coordinator import FritzboxConfigEntry +from .entity import FritzBoxDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/fritzbox_callmonitor/__init__.py b/homeassistant/components/fritzbox_callmonitor/__init__.py index b33ba94cf16..b1b5db48216 100644 --- a/homeassistant/components/fritzbox_callmonitor/__init__.py +++ b/homeassistant/components/fritzbox_callmonitor/__init__.py @@ -8,7 +8,7 @@ from requests.exceptions import ConnectionError as RequestsConnectionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .base import FritzBoxPhonebook from .const import CONF_PHONEBOOK, CONF_PREFIXES, PLATFORMS @@ -42,8 +42,7 @@ async def async_setup_entry( ) return False except FritzConnectionException as ex: - _LOGGER.error("Invalid authentication: %s", ex) - return False + raise ConfigEntryAuthFailed from ex except RequestsConnectionError as ex: _LOGGER.error("Unable to connect to AVM FRITZ!Box call monitor: %s", ex) raise ConfigEntryNotReady from ex diff --git a/homeassistant/components/fritzbox_callmonitor/base.py b/homeassistant/components/fritzbox_callmonitor/base.py index 72d17b57abc..3c8714624e7 100644 --- a/homeassistant/components/fritzbox_callmonitor/base.py +++ b/homeassistant/components/fritzbox_callmonitor/base.py @@ -3,6 +3,7 @@ from __future__ import annotations from contextlib import suppress +from dataclasses import dataclass from datetime import timedelta import logging import re @@ -19,12 +20,33 @@ _LOGGER = logging.getLogger(__name__) MIN_TIME_PHONEBOOK_UPDATE = timedelta(hours=6) +@dataclass +class Contact: + """Store details for one phonebook contact.""" + + name: str + numbers: list[str] + vip: bool + + def __init__( + self, name: str, numbers: list[str] | None = None, category: str | None = None + ) -> None: + """Initialize the class.""" + self.name = name + self.numbers = [re.sub(REGEX_NUMBER, "", nr) for nr in numbers or ()] + self.vip = category == "1" + + +unknown_contact = Contact(UNKNOWN_NAME) + + class FritzBoxPhonebook: """Connects to a FritzBox router and downloads its phone book.""" fph: FritzPhonebook phonebook_dict: dict[str, list[str]] - number_dict: dict[str, str] + contacts: list[Contact] + number_dict: dict[str, Contact] def __init__( self, @@ -56,27 +78,27 @@ class FritzBoxPhonebook: if self.phonebook_id is None: return - self.phonebook_dict = self.fph.get_all_names(self.phonebook_id) - self.number_dict = { - re.sub(REGEX_NUMBER, "", nr): name - for name, nrs in self.phonebook_dict.items() - for nr in nrs - } - _LOGGER.info("Fritz!Box phone book successfully updated") + self.fph.get_all_name_numbers(self.phonebook_id) + self.contacts = [ + Contact(c.name, c.numbers, getattr(c, "category", None)) + for c in self.fph.phonebook.contacts + ] + self.number_dict = {nr: c for c in self.contacts for nr in c.numbers} + _LOGGER.debug("Fritz!Box phone book successfully updated") def get_phonebook_ids(self) -> list[int]: """Return list of phonebook ids.""" return self.fph.phonebook_ids # type: ignore[no-any-return] - def get_name(self, number: str) -> str: - """Return a name for a given phone number.""" + def get_contact(self, number: str) -> Contact: + """Return a contact for a given phone number.""" number = re.sub(REGEX_NUMBER, "", str(number)) with suppress(KeyError): return self.number_dict[number] if not self.prefixes: - return UNKNOWN_NAME + return unknown_contact for prefix in self.prefixes: with suppress(KeyError): @@ -84,4 +106,4 @@ class FritzBoxPhonebook: with suppress(KeyError): return self.number_dict[prefix + number.lstrip("0")] - return UNKNOWN_NAME + return unknown_contact diff --git a/homeassistant/components/fritzbox_callmonitor/config_flow.py b/homeassistant/components/fritzbox_callmonitor/config_flow.py index 019326d840c..8435eff3e18 100644 --- a/homeassistant/components/fritzbox_callmonitor/config_flow.py +++ b/homeassistant/components/fritzbox_callmonitor/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from enum import StrEnum from typing import Any, cast @@ -11,19 +12,12 @@ from requests.exceptions import ConnectionError as RequestsConnectionError import voluptuous as vol from homeassistant.config_entries import ( - SOURCE_IMPORT, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, - CONF_USERNAME, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME from homeassistant.core import callback from .base import FritzBoxPhonebook @@ -65,6 +59,7 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _entry: ConfigEntry _host: str _port: int _username: str @@ -139,7 +134,7 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> FritzBoxCallMonitorOptionsFlowHandler: """Get the options flow for this handler.""" - return FritzBoxCallMonitorOptionsFlowHandler(config_entry) + return FritzBoxCallMonitorOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -168,16 +163,11 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): if result != ConnectResult.SUCCESS: return self.async_abort(reason=result) - if self.context["source"] == SOURCE_IMPORT: - self._phonebook_id = user_input[CONF_PHONEBOOK] - self._phonebook_name = user_input[CONF_NAME] - - elif len(self._phonebook_ids) > 1: + if len(self._phonebook_ids) > 1: return await self.async_step_phonebook() - else: - self._phonebook_id = DEFAULT_PHONEBOOK - self._phonebook_name = await self._get_name_of_phonebook(self._phonebook_id) + self._phonebook_id = DEFAULT_PHONEBOOK + self._phonebook_name = await self._get_name_of_phonebook(self._phonebook_id) await self.async_set_unique_id(f"{self._serial_number}-{self._phonebook_id}") self._abort_if_unique_id_configured() @@ -209,14 +199,73 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): return self._get_config_entry() + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle flow upon an API authentication error.""" + self._entry = self._get_reauth_entry() + self._host = entry_data[CONF_HOST] + self._port = entry_data[CONF_PORT] + self._username = entry_data[CONF_USERNAME] + self._password = entry_data[CONF_PASSWORD] + self._phonebook_id = entry_data[CONF_PHONEBOOK] + + return await self.async_step_reauth_confirm() + + def _show_setup_form_reauth_confirm( + self, user_input: dict[str, Any], errors: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Show the reauth form to the user.""" + default_username = user_input.get(CONF_USERNAME) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_USERNAME, default=default_username): str, + vol.Required(CONF_PASSWORD): str, + } + ), + description_placeholders={"host": self._host}, + errors=errors or {}, + ) + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + if user_input is None: + return self._show_setup_form_reauth_confirm( + user_input={CONF_USERNAME: self._username} + ) + + self._username = user_input[CONF_USERNAME] + self._password = user_input[CONF_PASSWORD] + + if ( + error := await self.hass.async_add_executor_job(self._try_connect) + ) is not ConnectResult.SUCCESS: + return self._show_setup_form_reauth_confirm( + user_input=user_input, errors={"base": error} + ) + + self.hass.config_entries.async_update_entry( + self._entry, + data={ + CONF_HOST: self._host, + CONF_PORT: self._port, + CONF_USERNAME: self._username, + CONF_PASSWORD: self._password, + CONF_PHONEBOOK: self._phonebook_id, + SERIAL_NUMBER: self._serial_number, + }, + ) + await self.hass.config_entries.async_reload(self._entry.entry_id) + return self.async_abort(reason="reauth_successful") + class FritzBoxCallMonitorOptionsFlowHandler(OptionsFlow): """Handle a fritzbox_callmonitor options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - @classmethod def _are_prefixes_valid(cls, prefixes: str | None) -> bool: """Check if prefixes are valid.""" diff --git a/homeassistant/components/fritzbox_callmonitor/manifest.json b/homeassistant/components/fritzbox_callmonitor/manifest.json index 4e5c60091c9..06492647c30 100644 --- a/homeassistant/components/fritzbox_callmonitor/manifest.json +++ b/homeassistant/components/fritzbox_callmonitor/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["fritzconnection"], - "requirements": ["fritzconnection[qr]==1.13.2"] + "requirements": ["fritzconnection[qr]==1.14.0"] } diff --git a/homeassistant/components/fritzbox_callmonitor/sensor.py b/homeassistant/components/fritzbox_callmonitor/sensor.py index 668369c35a7..df18ae5702a 100644 --- a/homeassistant/components/fritzbox_callmonitor/sensor.py +++ b/homeassistant/components/fritzbox_callmonitor/sensor.py @@ -20,7 +20,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzBoxCallMonitorConfigEntry -from .base import FritzBoxPhonebook +from .base import Contact, FritzBoxPhonebook from .const import ( ATTR_PREFIXES, CONF_PHONEBOOK, @@ -96,7 +96,7 @@ class FritzBoxCallSensor(SensorEntity): self._host = host self._port = port self._monitor: FritzBoxCallMonitor | None = None - self._attributes: dict[str, str | list[str]] = {} + self._attributes: dict[str, str | list[str] | bool] = {} self._attr_translation_placeholders = {"phonebook_name": phonebook_name} self._attr_unique_id = unique_id @@ -152,20 +152,20 @@ class FritzBoxCallSensor(SensorEntity): """Set the state.""" self._attr_native_value = state - def set_attributes(self, attributes: Mapping[str, str]) -> None: + def set_attributes(self, attributes: Mapping[str, str | bool]) -> None: """Set the state attributes.""" self._attributes = {**attributes} @property - def extra_state_attributes(self) -> dict[str, str | list[str]]: + def extra_state_attributes(self) -> dict[str, str | list[str] | bool]: """Return the state attributes.""" if self._prefixes: self._attributes[ATTR_PREFIXES] = self._prefixes return self._attributes - def number_to_name(self, number: str) -> str: - """Return a name for a given phone number.""" - return self._fritzbox_phonebook.get_name(number) + def number_to_contact(self, number: str) -> Contact: + """Return a contact for a given phone number.""" + return self._fritzbox_phonebook.get_contact(number) def update(self) -> None: """Update the phonebook if it is defined.""" @@ -225,35 +225,42 @@ class FritzBoxCallMonitor: df_in = "%d.%m.%y %H:%M:%S" df_out = "%Y-%m-%dT%H:%M:%S" isotime = datetime.strptime(line[0], df_in).strftime(df_out) + att: dict[str, str | bool] if line[1] == FritzState.RING: self._sensor.set_state(CallState.RINGING) + contact = self._sensor.number_to_contact(line[3]) att = { "type": "incoming", "from": line[3], "to": line[4], "device": line[5], "initiated": isotime, - "from_name": self._sensor.number_to_name(line[3]), + "from_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.CALL: self._sensor.set_state(CallState.DIALING) + contact = self._sensor.number_to_contact(line[5]) att = { "type": "outgoing", "from": line[4], "to": line[5], "device": line[6], "initiated": isotime, - "to_name": self._sensor.number_to_name(line[5]), + "to_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.CONNECT: self._sensor.set_state(CallState.TALKING) + contact = self._sensor.number_to_contact(line[4]) att = { "with": line[4], "device": line[3], "accepted": isotime, - "with_name": self._sensor.number_to_name(line[4]), + "with_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.DISCONNECT: diff --git a/homeassistant/components/fritzbox_callmonitor/strings.json b/homeassistant/components/fritzbox_callmonitor/strings.json index bcfa945e1df..437b218a8e2 100644 --- a/homeassistant/components/fritzbox_callmonitor/strings.json +++ b/homeassistant/components/fritzbox_callmonitor/strings.json @@ -17,14 +17,22 @@ "data": { "phonebook": "Phonebook" } + }, + "reauth_confirm": { + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", - "insufficient_permissions": "User has insufficient permissions to access AVM FRITZ!Box settings and its phonebooks." + "insufficient_permissions": "User has insufficient permissions to access AVM FRITZ!Box settings and its phonebooks.", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { + "insufficient_permissions": "[%key:component::fritzbox_callmonitor::config::abort::insufficient_permissions%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, @@ -70,7 +78,8 @@ "accepted": { "name": "Accepted" }, "with_name": { "name": "With name" }, "duration": { "name": "Duration" }, - "closed": { "name": "Closed" } + "closed": { "name": "Closed" }, + "vip": { "name": "Important" } } } } diff --git a/homeassistant/components/fronius/__init__.py b/homeassistant/components/fronius/__init__.py index 07271b91f28..4ba893df85c 100644 --- a/homeassistant/components/fronius/__init__.py +++ b/homeassistant/components/fronius/__init__.py @@ -60,7 +60,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + hass: HomeAssistant, config_entry: FroniusConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove a config entry from a device.""" return True @@ -199,7 +199,10 @@ class FroniusSolarNet: name=_inverter_name, inverter_info=_inverter_info, ) - await _coordinator.async_config_entry_first_refresh() + if self.config_entry.state == ConfigEntryState.LOADED: + await _coordinator.async_refresh() + else: + await _coordinator.async_config_entry_first_refresh() self.inverter_coordinators.append(_coordinator) # Only for re-scans. Initial setup adds entities through sensor.async_setup_entry @@ -223,7 +226,14 @@ class FroniusSolarNet: _LOGGER.debug("Re-scan failed for %s", self.host) return inverter_infos - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": self.host, + "fronius_error": str(err), + }, + ) from err for inverter in _inverter_info["inverters"]: solar_net_id = inverter["device_id"]["value"] diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index b16f43d58e8..ccc15d80401 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -10,7 +10,7 @@ from pyfronius import Fronius, FroniusError import voluptuous as vol from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -52,11 +52,9 @@ async def validate_host( try: inverter_info = await fronius.inverter_info() first_inverter = next(inverter for inverter in inverter_info["inverters"]) - except FroniusError as err: + except (FroniusError, StopIteration) as err: _LOGGER.debug(err) raise CannotConnect from err - except StopIteration as err: - raise CannotConnect("No supported Fronius SolarNet device found.") from err first_inverter_uid: str = first_inverter["unique_id"]["value"] return first_inverter_uid, FroniusConfigEntryData( host=host, @@ -72,7 +70,6 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize flow.""" self.info: FroniusConfigEntryData - self._entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -90,7 +87,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" else: await self.async_set_unique_id(unique_id, raise_on_progress=False) - self._abort_if_unique_id_configured(updates=dict(info)) + self._abort_if_unique_id_configured() return self.async_create_entry(title=create_title(info), data=info) @@ -145,6 +142,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Add reconfigure step to allow to reconfigure a config entry.""" errors = {} + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: try: @@ -155,33 +153,16 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - # Config didn't change or is already configured in another entry - self._async_abort_entries_match(dict(info)) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_mismatch() - existing_entry = await self.async_set_unique_id( - unique_id, raise_on_progress=False - ) - assert self._entry is not None - if existing_entry and existing_entry.entry_id != self._entry.entry_id: - # Uid of device is already configured in another entry (but with different host) - self._abort_if_unique_id_configured() + return self.async_update_reload_and_abort(reconfigure_entry, data=info) - return self.async_update_reload_and_abort( - self._entry, - data=info, - reason="reconfigure_successful", - ) - - if self._entry is None: - self._entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert self._entry is not None - host = self._entry.data[CONF_HOST] + host = reconfigure_entry.data[CONF_HOST] return self.async_show_form( step_id="reconfigure", data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}), - description_placeholders={"device": self._entry.title}, + description_placeholders={"device": reconfigure_entry.title}, errors=errors, ) diff --git a/homeassistant/components/fronius/const.py b/homeassistant/components/fronius/const.py index 083085270e0..273f1acab41 100644 --- a/homeassistant/components/fronius/const.py +++ b/homeassistant/components/fronius/const.py @@ -42,8 +42,6 @@ class InverterStatusCodeOption(StrEnum): IDLE = "idle" READY = "ready" SLEEPING = "sleeping" - UNKNOWN = "unknown" - INVALID = "invalid" _INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { @@ -61,13 +59,13 @@ _INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { 11: InverterStatusCodeOption.IDLE, 12: InverterStatusCodeOption.READY, 13: InverterStatusCodeOption.SLEEPING, - 255: InverterStatusCodeOption.UNKNOWN, + # 255: "Unknown" is handled by `None` state - same as the invalid codes. } -def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption: +def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption | None: """Return a status message for a given status code.""" - return _INVERTER_STATUS_CODES.get(code, InverterStatusCodeOption.INVALID) # type: ignore[arg-type] + return _INVERTER_STATUS_CODES.get(code) # type: ignore[arg-type] class MeterLocationCodeOption(StrEnum): diff --git a/homeassistant/components/fronius/coordinator.py b/homeassistant/components/fronius/coordinator.py index c3dea123a77..d4f1fc6c230 100644 --- a/homeassistant/components/fronius/coordinator.py +++ b/homeassistant/components/fronius/coordinator.py @@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( + DOMAIN, SOLAR_NET_ID_POWER_FLOW, SOLAR_NET_ID_SYSTEM, FroniusDeviceInfo, @@ -67,7 +68,11 @@ class FroniusCoordinatorBase( self._failed_update_count += 1 if self._failed_update_count == self.MAX_FAILED_UPDATES: self.update_interval = self.error_interval - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"fronius_error": str(err)}, + ) from err if self._failed_update_count != 0: self._failed_update_count = 0 diff --git a/homeassistant/components/fronius/manifest.json b/homeassistant/components/fronius/manifest.json index c2f635119aa..227234f9937 100644 --- a/homeassistant/components/fronius/manifest.json +++ b/homeassistant/components/fronius/manifest.json @@ -11,6 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/fronius", "iot_class": "local_polling", "loggers": ["pyfronius"], - "quality_scale": "platinum", "requirements": ["PyFronius==0.7.3"] } diff --git a/homeassistant/components/fronius/quality_scale.yaml b/homeassistant/components/fronius/quality_scale.yaml new file mode 100644 index 00000000000..2c4b892475b --- /dev/null +++ b/homeassistant/components/fronius/quality_scale.yaml @@ -0,0 +1,89 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: + status: done + comment: | + Single platform only, so no entity.py file. + CoordinatorEntity is used. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + This integration does not subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not provide configuration options. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: | + Coordinators are used and asyncio.Lock mutex across them ensure proper + rate limiting. Platforms are read-only. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any known user-repairable issues. + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: + status: todo + comment: | + The pyfronius library isn't strictly typed and doesn't export type information. diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index c8a840b1c2c..95c5df269e4 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -54,6 +54,9 @@ if TYPE_CHECKING: FroniusStorageUpdateCoordinator, ) + +PARALLEL_UPDATES = 0 + ENERGY_VOLT_AMPERE_REACTIVE_HOUR: Final = "varh" diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index ccfb88852a8..e2740c76696 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -3,10 +3,12 @@ "flow_title": "{device}", "step": { "user": { - "title": "Fronius SolarNet", - "description": "Configure the IP address or local hostname of your Fronius device.", + "description": "Configure your Fronius SolarAPI device.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The IP address or hostname of your Fronius device." } }, "confirm_discovery": { @@ -16,6 +18,9 @@ "description": "Update your configuration information for {device}.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::fronius::config::step::user::data_description::host%]" } } }, @@ -26,7 +31,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "invalid_host": "[%key:common::config_flow::error::invalid_host%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The identifier does not match the previous identifier" } }, "entity": { @@ -40,9 +46,6 @@ "energy_total": { "name": "Total energy" }, - "frequency_ac": { - "name": "[%key:component::sensor::entity_component::frequency::name%]" - }, "current_ac": { "name": "AC current" }, @@ -83,9 +86,7 @@ "error": "Error", "idle": "Idle", "ready": "Ready", - "sleeping": "Sleeping", - "unknown": "Unknown", - "invalid": "Invalid" + "sleeping": "Sleeping" } }, "led_state": { @@ -155,9 +156,6 @@ "power_apparent_phase_3": { "name": "Apparent power phase 3" }, - "power_apparent": { - "name": "[%key:component::sensor::entity_component::apparent_power::name%]" - }, "power_factor_phase_1": { "name": "Power factor phase 1" }, @@ -167,9 +165,6 @@ "power_factor_phase_3": { "name": "Power factor phase 3" }, - "power_factor": { - "name": "[%key:component::sensor::entity_component::power_factor::name%]" - }, "power_reactive_phase_1": { "name": "Reactive power phase 1" }, @@ -215,12 +210,6 @@ "energy_real_ac_consumed": { "name": "Energy consumed" }, - "power_real_ac": { - "name": "[%key:component::sensor::entity_component::power::name%]" - }, - "temperature_channel_1": { - "name": "[%key:component::sensor::entity_component::temperature::name%]" - }, "state_code": { "name": "State code" }, @@ -275,7 +264,7 @@ "name": "Relative self consumption" }, "capacity_maximum": { - "name": "Maximum capacity " + "name": "Maximum capacity" }, "capacity_designed": { "name": "Designed capacity" @@ -293,5 +282,13 @@ "name": "[%key:component::sensor::entity_component::temperature::name%]" } } + }, + "exceptions": { + "entry_cannot_connect": { + "message": "Failed to connect to Fronius device at {host}: {fronius_error}" + }, + "update_failed": { + "message": "An error occurred while attempting to fetch data: {fronius_error}" + } } } diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index c5df84cf549..c1098ac19d3 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Callable, Iterator -from functools import cached_property, lru_cache, partial +from functools import lru_cache, partial import logging import os import pathlib @@ -11,12 +11,13 @@ from typing import Any, TypedDict from aiohttp import hdrs, web, web_urldispatcher import jinja2 +from propcache import cached_property import voluptuous as vol from yarl import URL from homeassistant.components import onboarding, websocket_api from homeassistant.components.http import KEY_HASS, HomeAssistantView, StaticPathConfig -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.config import async_hass_config_yaml from homeassistant.const import ( CONF_MODE, diff --git a/homeassistant/components/frontend/icons.json b/homeassistant/components/frontend/icons.json index 9fbe4d5b9b0..b4bcdef6194 100644 --- a/homeassistant/components/frontend/icons.json +++ b/homeassistant/components/frontend/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_theme": "mdi:palette-swatch", - "reload_themes": "mdi:reload" + "set_theme": { + "service": "mdi:palette-swatch" + }, + "reload_themes": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 035b087e481..1f9988dff38 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240809.0"] + "requirements": ["home-assistant-frontend==20241127.8"] } diff --git a/homeassistant/components/frontend/storage.py b/homeassistant/components/frontend/storage.py index d387e14b085..cbcc3024aa7 100644 --- a/homeassistant/components/frontend/storage.py +++ b/homeassistant/components/frontend/storage.py @@ -9,7 +9,7 @@ from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.storage import Store diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index 103323ff575..0612419fc33 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -16,7 +16,7 @@ from afsapi import ( import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT from .const import ( @@ -58,7 +58,6 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): _name: str _webfsapi_url: str - _reauth_entry: ConfigEntry | None = None # Only used in reauth flows async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -101,8 +100,9 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): if device_hostname == hostname_from_url(entry.data[CONF_WEBFSAPI_URL]): return self.async_abort(reason="already_configured") - speaker_name = discovery_info.ssdp_headers.get(SSDP_ATTR_SPEAKER_NAME) - self.context["title_placeholders"] = {"name": speaker_name} + if speaker_name := discovery_info.ssdp_headers.get(SSDP_ATTR_SPEAKER_NAME): + # If we have a name, use it as flow title + self.context["title_placeholders"] = {"name": speaker_name} try: self._webfsapi_url = await AFSAPI.get_webfsapi_endpoint(device_url) @@ -172,14 +172,11 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): step_id="confirm", description_placeholders={"name": self._name} ) - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._webfsapi_url = config[CONF_WEBFSAPI_URL] - - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - + self._webfsapi_url = entry_data[CONF_WEBFSAPI_URL] return await self.async_step_device_config() async def async_step_device_config( @@ -210,13 +207,11 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self._reauth_entry: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={CONF_PIN: user_input[CONF_PIN]}, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_PIN: user_input[CONF_PIN]}, ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") try: unique_id = await afsapi.get_radio_id() diff --git a/homeassistant/components/frontier_silicon/media_player.py b/homeassistant/components/frontier_silicon/media_player.py index cb02d430230..8407e0a869d 100644 --- a/homeassistant/components/frontier_silicon/media_player.py +++ b/homeassistant/components/frontier_silicon/media_player.py @@ -118,7 +118,7 @@ class AFSAPIDevice(MediaPlayerEntity): return if not self._attr_available: - _LOGGER.info( + _LOGGER.warning( "Reconnected to %s", self.name or afsapi.webfsapi_endpoint, ) diff --git a/homeassistant/components/fujitsu_fglair/__init__.py b/homeassistant/components/fujitsu_fglair/__init__.py index bd891f05b8d..f25e01bcd11 100644 --- a/homeassistant/components/fujitsu_fglair/__init__.py +++ b/homeassistant/components/fujitsu_fglair/__init__.py @@ -5,13 +5,14 @@ from __future__ import annotations from contextlib import suppress from ayla_iot_unofficial import new_ayla_api +from ayla_iot_unofficial.fujitsu_consts import FGLAIR_APP_CREDENTIALS from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client -from .const import API_TIMEOUT, CONF_EUROPE, FGLAIR_APP_ID, FGLAIR_APP_SECRET +from .const import API_TIMEOUT, CONF_EUROPE, CONF_REGION, REGION_DEFAULT, REGION_EU from .coordinator import FGLairCoordinator PLATFORMS: list[Platform] = [Platform.CLIMATE] @@ -21,12 +22,13 @@ type FGLairConfigEntry = ConfigEntry[FGLairCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> bool: """Set up Fujitsu HVAC (based on Ayla IOT) from a config entry.""" + app_id, app_secret = FGLAIR_APP_CREDENTIALS[entry.data[CONF_REGION]] api = new_ayla_api( entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], - FGLAIR_APP_ID, - FGLAIR_APP_SECRET, - europe=entry.data[CONF_EUROPE], + app_id, + app_secret, + europe=entry.data[CONF_REGION] == REGION_EU, websession=aiohttp_client.async_get_clientsession(hass), timeout=API_TIMEOUT, ) @@ -47,3 +49,24 @@ async def async_unload_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> b await entry.runtime_data.api.async_sign_out() return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> bool: + """Migrate old entry.""" + if entry.version > 1: + return False + + if entry.version == 1: + new_data = {**entry.data} + if entry.minor_version < 2: + is_europe = new_data.get(CONF_EUROPE, False) + if is_europe: + new_data[CONF_REGION] = REGION_EU + else: + new_data[CONF_REGION] = REGION_DEFAULT + + hass.config_entries.async_update_entry( + entry, data=new_data, minor_version=2, version=1 + ) + + return True diff --git a/homeassistant/components/fujitsu_fglair/climate.py b/homeassistant/components/fujitsu_fglair/climate.py index 558f4b73a18..5359075c728 100644 --- a/homeassistant/components/fujitsu_fglair/climate.py +++ b/homeassistant/components/fujitsu_fglair/climate.py @@ -2,9 +2,23 @@ from typing import Any -from ayla_iot_unofficial.fujitsu_hvac import Capability, FujitsuHVAC +from ayla_iot_unofficial.fujitsu_hvac import ( + Capability, + FanSpeed, + FujitsuHVAC, + OpMode, + SwingMode, +) from homeassistant.components.climate import ( + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + SWING_BOTH, + SWING_HORIZONTAL, + SWING_OFF, + SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, HVACMode, @@ -16,17 +30,35 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import FGLairConfigEntry -from .const import ( - DOMAIN, - FUJI_TO_HA_FAN, - FUJI_TO_HA_HVAC, - FUJI_TO_HA_SWING, - HA_TO_FUJI_FAN, - HA_TO_FUJI_HVAC, - HA_TO_FUJI_SWING, -) +from .const import DOMAIN from .coordinator import FGLairCoordinator +HA_TO_FUJI_FAN = { + FAN_LOW: FanSpeed.LOW, + FAN_MEDIUM: FanSpeed.MEDIUM, + FAN_HIGH: FanSpeed.HIGH, + FAN_AUTO: FanSpeed.AUTO, +} +FUJI_TO_HA_FAN = {value: key for key, value in HA_TO_FUJI_FAN.items()} + +HA_TO_FUJI_HVAC = { + HVACMode.OFF: OpMode.OFF, + HVACMode.HEAT: OpMode.HEAT, + HVACMode.COOL: OpMode.COOL, + HVACMode.HEAT_COOL: OpMode.AUTO, + HVACMode.DRY: OpMode.DRY, + HVACMode.FAN_ONLY: OpMode.FAN, +} +FUJI_TO_HA_HVAC = {value: key for key, value in HA_TO_FUJI_HVAC.items()} + +HA_TO_FUJI_SWING = { + SWING_OFF: SwingMode.OFF, + SWING_VERTICAL: SwingMode.SWING_VERTICAL, + SWING_HORIZONTAL: SwingMode.SWING_HORIZONTAL, + SWING_BOTH: SwingMode.SWING_BOTH, +} +FUJI_TO_HA_SWING = {value: key for key, value in HA_TO_FUJI_SWING.items()} + async def async_setup_entry( hass: HomeAssistant, @@ -49,8 +81,6 @@ class FGLairDevice(CoordinatorEntity[FGLairCoordinator], ClimateEntity): _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility: bool = False - def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None: """Store the representation of the device and set the static attributes.""" super().__init__(coordinator, context=device.device_serial_number) diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index db1975298a8..c4b097ff0de 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -5,13 +5,15 @@ import logging from typing import Any from ayla_iot_unofficial import AylaAuthError, new_ayla_api +from ayla_iot_unofficial.fujitsu_consts import FGLAIR_APP_CREDENTIALS import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig -from .const import API_TIMEOUT, CONF_EUROPE, DOMAIN, FGLAIR_APP_ID, FGLAIR_APP_SECRET +from .const import API_TIMEOUT, CONF_REGION, DOMAIN, REGION_DEFAULT, REGION_EU _LOGGER = logging.getLogger(__name__) @@ -20,7 +22,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_EUROPE): bool, + vol.Required(CONF_REGION, default=REGION_DEFAULT): SelectSelector( + SelectSelectorConfig( + options=[region.lower() for region in FGLAIR_APP_CREDENTIALS], + translation_key=CONF_REGION, + ) + ), } ) STEP_REAUTH_DATA_SCHEMA = vol.Schema( @@ -33,18 +40,19 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema( class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fujitsu HVAC (based on Ayla IOT).""" - _reauth_entry: ConfigEntry | None = None + MINOR_VERSION = 2 async def _async_validate_credentials( self, user_input: dict[str, Any] ) -> dict[str, str]: errors: dict[str, str] = {} + app_id, app_secret = FGLAIR_APP_CREDENTIALS[user_input[CONF_REGION]] api = new_ayla_api( user_input[CONF_USERNAME], user_input[CONF_PASSWORD], - FGLAIR_APP_ID, - FGLAIR_APP_SECRET, - europe=user_input[CONF_EUROPE], + app_id, + app_secret, + europe=user_input[CONF_REGION] == REGION_EU, websession=aiohttp_client.async_get_clientsession(self.hass), timeout=API_TIMEOUT, ) @@ -70,7 +78,7 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() errors = await self._async_validate_credentials(user_input) - if len(errors) == 0: + if not errors: return self.async_create_entry( title=f"FGLair ({user_input[CONF_USERNAME]})", data=user_input, @@ -84,9 +92,6 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -94,25 +99,23 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input: - reauth_data = { - **self._reauth_entry.data, - CONF_PASSWORD: user_input[CONF_PASSWORD], - } - errors = await self._async_validate_credentials(reauth_data) + errors = await self._async_validate_credentials( + reauth_entry.data | user_input + ) - if len(errors) == 0: + if not errors: return self.async_update_reload_and_abort( - self._reauth_entry, data=reauth_data + reauth_entry, data_updates=user_input ) return self.async_show_form( step_id="reauth_confirm", data_schema=STEP_REAUTH_DATA_SCHEMA, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], **self.context["title_placeholders"], }, errors=errors, diff --git a/homeassistant/components/fujitsu_fglair/const.py b/homeassistant/components/fujitsu_fglair/const.py index 0e93361f20b..73c811a1ed5 100644 --- a/homeassistant/components/fujitsu_fglair/const.py +++ b/homeassistant/components/fujitsu_fglair/const.py @@ -2,53 +2,12 @@ from datetime import timedelta -from ayla_iot_unofficial.fujitsu_consts import ( # noqa: F401 - FGLAIR_APP_ID, - FGLAIR_APP_SECRET, -) -from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, OpMode, SwingMode - -from homeassistant.components.climate import ( - FAN_AUTO, - FAN_HIGH, - FAN_LOW, - FAN_MEDIUM, - SWING_BOTH, - SWING_HORIZONTAL, - SWING_OFF, - SWING_VERTICAL, - HVACMode, -) - API_TIMEOUT = 10 API_REFRESH = timedelta(minutes=5) DOMAIN = "fujitsu_fglair" +CONF_REGION = "region" CONF_EUROPE = "is_europe" - -HA_TO_FUJI_FAN = { - FAN_LOW: FanSpeed.LOW, - FAN_MEDIUM: FanSpeed.MEDIUM, - FAN_HIGH: FanSpeed.HIGH, - FAN_AUTO: FanSpeed.AUTO, -} -FUJI_TO_HA_FAN = {value: key for key, value in HA_TO_FUJI_FAN.items()} - -HA_TO_FUJI_HVAC = { - HVACMode.OFF: OpMode.OFF, - HVACMode.HEAT: OpMode.HEAT, - HVACMode.COOL: OpMode.COOL, - HVACMode.HEAT_COOL: OpMode.AUTO, - HVACMode.DRY: OpMode.DRY, - HVACMode.FAN_ONLY: OpMode.FAN, -} -FUJI_TO_HA_HVAC = {value: key for key, value in HA_TO_FUJI_HVAC.items()} - -HA_TO_FUJI_SWING = { - SWING_OFF: SwingMode.OFF, - SWING_VERTICAL: SwingMode.SWING_VERTICAL, - SWING_HORIZONTAL: SwingMode.SWING_HORIZONTAL, - SWING_BOTH: SwingMode.SWING_BOTH, -} -FUJI_TO_HA_SWING = {value: key for key, value in HA_TO_FUJI_SWING.items()} +REGION_EU = "eu" +REGION_DEFAULT = "default" diff --git a/homeassistant/components/fujitsu_fglair/coordinator.py b/homeassistant/components/fujitsu_fglair/coordinator.py index 902464bdd80..eac3cfd6ce5 100644 --- a/homeassistant/components/fujitsu_fglair/coordinator.py +++ b/homeassistant/components/fujitsu_fglair/coordinator.py @@ -47,12 +47,12 @@ class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): except AylaAuthError as e: raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e - if len(listening_entities) == 0: - devices = list(filter(lambda x: isinstance(x, FujitsuHVAC), devices)) + if not listening_entities: + devices = [dev for dev in devices if isinstance(dev, FujitsuHVAC)] else: - devices = list( - filter(lambda x: x.device_serial_number in listening_entities, devices) - ) + devices = [ + dev for dev in devices if dev.device_serial_number in listening_entities + ] try: for dev in devices: diff --git a/homeassistant/components/fujitsu_fglair/manifest.json b/homeassistant/components/fujitsu_fglair/manifest.json index 9286f7c24d9..ea08a2cfe02 100644 --- a/homeassistant/components/fujitsu_fglair/manifest.json +++ b/homeassistant/components/fujitsu_fglair/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/fujitsu_fglair", "iot_class": "cloud_polling", - "requirements": ["ayla-iot-unofficial==1.3.1"] + "requirements": ["ayla-iot-unofficial==1.4.4"] } diff --git a/homeassistant/components/fujitsu_fglair/strings.json b/homeassistant/components/fujitsu_fglair/strings.json index 8f7d775d7e4..3ad4e59ec1c 100644 --- a/homeassistant/components/fujitsu_fglair/strings.json +++ b/homeassistant/components/fujitsu_fglair/strings.json @@ -4,12 +4,9 @@ "user": { "title": "Enter your FGLair credentials", "data": { - "is_europe": "Use european servers", + "region": "Region", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" - }, - "data_description": { - "is_europe": "Allows the user to choose whether to use european servers or not since the API uses different endoint URLs for european vs non-european users" } }, "reauth_confirm": { @@ -29,5 +26,14 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "selector": { + "region": { + "options": { + "default": "Other", + "eu": "Europe", + "cn": "China" + } + } } } diff --git a/homeassistant/components/fully_kiosk/__init__.py b/homeassistant/components/fully_kiosk/__init__.py index 99b477c2989..074ec3feaa0 100644 --- a/homeassistant/components/fully_kiosk/__init__.py +++ b/homeassistant/components/fully_kiosk/__init__.py @@ -10,6 +10,8 @@ from .const import DOMAIN from .coordinator import FullyKioskDataUpdateCoordinator from .services import async_setup_services +type FullyKioskConfigEntry = ConfigEntry[FullyKioskDataUpdateCoordinator] + PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -33,13 +35,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: FullyKioskConfigEntry) -> bool: """Set up Fully Kiosk Browser from a config entry.""" coordinator = FullyKioskDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) coordinator.async_update_listeners() @@ -47,10 +49,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FullyKioskConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/fully_kiosk/binary_sensor.py b/homeassistant/components/fully_kiosk/binary_sensor.py index 3cf9adea1d5..c039baa0397 100644 --- a/homeassistant/components/fully_kiosk/binary_sensor.py +++ b/homeassistant/components/fully_kiosk/binary_sensor.py @@ -7,12 +7,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -38,13 +37,11 @@ SENSORS: tuple[BinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser sensor.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullyBinarySensor(coordinator, description) diff --git a/homeassistant/components/fully_kiosk/button.py b/homeassistant/components/fully_kiosk/button.py index 94c34b50de1..4b172d45ae2 100644 --- a/homeassistant/components/fully_kiosk/button.py +++ b/homeassistant/components/fully_kiosk/button.py @@ -13,12 +13,11 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -68,13 +67,11 @@ BUTTONS: tuple[FullyButtonEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser button entities.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullyButtonEntity(coordinator, description) for description in BUTTONS diff --git a/homeassistant/components/fully_kiosk/camera.py b/homeassistant/components/fully_kiosk/camera.py index d55875e094f..7dfbe9e9257 100644 --- a/homeassistant/components/fully_kiosk/camera.py +++ b/homeassistant/components/fully_kiosk/camera.py @@ -5,21 +5,22 @@ from __future__ import annotations from fullykiosk import FullyKioskError from homeassistant.components.camera import Camera, CameraEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FullyKioskConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the cameras.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities([FullyCameraEntity(coordinator)]) diff --git a/homeassistant/components/fully_kiosk/config_flow.py b/homeassistant/components/fully_kiosk/config_flow.py index 98cf96f637e..15771d12b5d 100644 --- a/homeassistant/components/fully_kiosk/config_flow.py +++ b/homeassistant/components/fully_kiosk/config_flow.py @@ -32,6 +32,8 @@ class FullyKioskConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str + def __init__(self) -> None: """Initialize the config flow.""" self._discovered_device_info: dict[str, Any] = {} @@ -135,15 +137,13 @@ class FullyKioskConfigFlow(ConfigFlow, domain=DOMAIN): """Confirm discovery.""" errors: dict[str, str] = {} if user_input is not None: - result = await self._create_entry( - self.context[CONF_HOST], user_input, errors - ) + result = await self._create_entry(self.host, user_input, errors) if result: return result placeholders = { "name": self._discovered_device_info["deviceName"], - CONF_HOST: self.context[CONF_HOST], + CONF_HOST: self.host, } self.context["title_placeholders"] = placeholders return self.async_show_form( @@ -168,6 +168,6 @@ class FullyKioskConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(device_id) self._abort_if_unique_id_configured() - self.context[CONF_HOST] = device_info["hostname4"] + self.host = device_info["hostname4"] self._discovered_device_info = device_info return await self.async_step_discovery_confirm() diff --git a/homeassistant/components/fully_kiosk/diagnostics.py b/homeassistant/components/fully_kiosk/diagnostics.py index df03cb4a7bf..c8364c77753 100644 --- a/homeassistant/components/fully_kiosk/diagnostics.py +++ b/homeassistant/components/fully_kiosk/diagnostics.py @@ -4,12 +4,11 @@ from __future__ import annotations from typing import Any -from homeassistant.components.diagnostics.util import async_redact_data -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .const import DOMAIN +from . import FullyKioskConfigEntry DEVICE_INFO_TO_REDACT = { "serial", @@ -57,10 +56,10 @@ SETTINGS_TO_REDACT = { async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: dr.DeviceEntry + hass: HomeAssistant, entry: FullyKioskConfigEntry, device: dr.DeviceEntry ) -> dict[str, Any]: """Return device diagnostics.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data data = coordinator.data data["settings"] = async_redact_data(data["settings"], SETTINGS_TO_REDACT) return async_redact_data(data, DEVICE_INFO_TO_REDACT) diff --git a/homeassistant/components/fully_kiosk/icons.json b/homeassistant/components/fully_kiosk/icons.json index 760698f7ac8..0166679abe2 100644 --- a/homeassistant/components/fully_kiosk/icons.json +++ b/homeassistant/components/fully_kiosk/icons.json @@ -1,7 +1,13 @@ { "services": { - "load_url": "mdi:link", - "set_config": "mdi:cog", - "start_application": "mdi:rocket-launch" + "load_url": { + "service": "mdi:link" + }, + "set_config": { + "service": "mdi:cog" + }, + "start_application": { + "service": "mdi:rocket-launch" + } } } diff --git a/homeassistant/components/fully_kiosk/image.py b/homeassistant/components/fully_kiosk/image.py index fbf3481e38b..00318a77ab5 100644 --- a/homeassistant/components/fully_kiosk/image.py +++ b/homeassistant/components/fully_kiosk/image.py @@ -9,13 +9,12 @@ from typing import Any from fullykiosk import FullyKiosk, FullyKioskError from homeassistant.components.image import ImageEntity, ImageEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -37,10 +36,12 @@ IMAGES: tuple[FullyImageEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FullyKioskConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser image entities.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( FullyImageEntity(coordinator, description) for description in IMAGES ) diff --git a/homeassistant/components/fully_kiosk/manifest.json b/homeassistant/components/fully_kiosk/manifest.json index 4d7d1a2d7da..1fbbb6656a2 100644 --- a/homeassistant/components/fully_kiosk/manifest.json +++ b/homeassistant/components/fully_kiosk/manifest.json @@ -12,5 +12,6 @@ "documentation": "https://www.home-assistant.io/integrations/fully_kiosk", "iot_class": "local_polling", "mqtt": ["fully/deviceInfo/+"], + "quality_scale": "bronze", "requirements": ["python-fullykiosk==0.0.14"] } diff --git a/homeassistant/components/fully_kiosk/media_player.py b/homeassistant/components/fully_kiosk/media_player.py index ae61a39bb81..24f002a7544 100644 --- a/homeassistant/components/fully_kiosk/media_player.py +++ b/homeassistant/components/fully_kiosk/media_player.py @@ -12,23 +12,23 @@ from homeassistant.components.media_player import ( MediaType, async_process_play_media_url, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import AUDIOMANAGER_STREAM_MUSIC, DOMAIN, MEDIA_SUPPORT_FULLYKIOSK +from . import FullyKioskConfigEntry +from .const import AUDIOMANAGER_STREAM_MUSIC, MEDIA_SUPPORT_FULLYKIOSK from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser media player entity.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data async_add_entities([FullyMediaPlayer(coordinator)]) diff --git a/homeassistant/components/fully_kiosk/notify.py b/homeassistant/components/fully_kiosk/notify.py index aa47c178f03..bddc07439b3 100644 --- a/homeassistant/components/fully_kiosk/notify.py +++ b/homeassistant/components/fully_kiosk/notify.py @@ -7,12 +7,11 @@ from dataclasses import dataclass from fullykiosk import FullyKioskError from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -39,10 +38,12 @@ NOTIFIERS: tuple[FullyNotifyEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FullyKioskConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser notify entities.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( FullyNotifyEntity(coordinator, description) for description in NOTIFIERS ) diff --git a/homeassistant/components/fully_kiosk/number.py b/homeassistant/components/fully_kiosk/number.py index 59c249fd1c2..ef25a69f1ee 100644 --- a/homeassistant/components/fully_kiosk/number.py +++ b/homeassistant/components/fully_kiosk/number.py @@ -5,12 +5,11 @@ from __future__ import annotations from contextlib import suppress from homeassistant.components.number import NumberEntity, NumberEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -54,11 +53,11 @@ ENTITY_TYPES: tuple[NumberEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser number entities.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data async_add_entities( FullyNumberEntity(coordinator, entity) diff --git a/homeassistant/components/fully_kiosk/quality_scale.yaml b/homeassistant/components/fully_kiosk/quality_scale.yaml new file mode 100644 index 00000000000..68fa7b9c3f9 --- /dev/null +++ b/homeassistant/components/fully_kiosk/quality_scale.yaml @@ -0,0 +1,66 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: done + dependency-transparency: done + action-setup: done + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: todo + reauthentication-flow: todo + parallel-updates: todo + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: This integration does not utilize an options flow. + + # Gold + entity-translations: todo + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: + status: exempt + comment: Each config entry maps to a single device + diagnostics: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: Each config entry maps to a single device + discovery-update-info: done + repair-issues: todo + docs-use-cases: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-data-update: todo + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: done + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/fully_kiosk/sensor.py b/homeassistant/components/fully_kiosk/sensor.py index 48fc8e51425..ed95323547f 100644 --- a/homeassistant/components/fully_kiosk/sensor.py +++ b/homeassistant/components/fully_kiosk/sensor.py @@ -12,13 +12,12 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfInformation from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -114,13 +113,11 @@ SENSORS: tuple[FullySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser sensor.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullySensor(coordinator, description) for description in SENSORS diff --git a/homeassistant/components/fully_kiosk/services.py b/homeassistant/components/fully_kiosk/services.py index b9369198940..089ae1d4246 100644 --- a/homeassistant/components/fully_kiosk/services.py +++ b/homeassistant/components/fully_kiosk/services.py @@ -53,7 +53,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: for config_entry in config_entries: if config_entry.state != ConfigEntryState.LOADED: raise HomeAssistantError(f"{config_entry.title} is not loaded") - coordinators.append(hass.data[DOMAIN][config_entry.entry_id]) + coordinators.append(config_entry.runtime_data) return coordinators async def async_load_url(call: ServiceCall) -> None: diff --git a/homeassistant/components/fully_kiosk/strings.json b/homeassistant/components/fully_kiosk/strings.json index 9c0049d3e5f..a4b466926f0 100644 --- a/homeassistant/components/fully_kiosk/strings.json +++ b/homeassistant/components/fully_kiosk/strings.json @@ -1,10 +1,22 @@ { + "common": { + "data_description_password": "The Remote Admin Password from the Fully Kiosk Browser app settings.", + "data_description_ssl": "Is the Fully Kiosk app configured to require SSL for the connection?", + "data_description_verify_ssl": "Should SSL certificartes be verified? This should be off for self-signed certificates." + }, "config": { "step": { "discovery_confirm": { "description": "Do you want to set up {name} ({host})?", "data": { - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "password": "[%key:component::fully_kiosk::common::data_description_password%]", + "ssl": "[%key:component::fully_kiosk::common::data_description_ssl%]", + "verify_ssl": "[%key:component::fully_kiosk::common::data_description_verify_ssl%]" } }, "user": { @@ -15,7 +27,10 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The hostname or IP address of the device running your Fully Kiosk Browser application." + "host": "The hostname or IP address of the device running your Fully Kiosk Browser application.", + "password": "[%key:component::fully_kiosk::common::data_description_password%]", + "ssl": "[%key:component::fully_kiosk::common::data_description_ssl%]", + "verify_ssl": "[%key:component::fully_kiosk::common::data_description_verify_ssl%]" } } }, diff --git a/homeassistant/components/fully_kiosk/switch.py b/homeassistant/components/fully_kiosk/switch.py index 9d5af87abe9..4adf8e8c924 100644 --- a/homeassistant/components/fully_kiosk/switch.py +++ b/homeassistant/components/fully_kiosk/switch.py @@ -9,12 +9,11 @@ from typing import Any from fullykiosk import FullyKiosk from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -84,13 +83,11 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser switch.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullySwitchEntity(coordinator, description) for description in SWITCHES diff --git a/homeassistant/components/futurenow/manifest.json b/homeassistant/components/futurenow/manifest.json index dbe1b2d06fb..32a8761b1db 100644 --- a/homeassistant/components/futurenow/manifest.json +++ b/homeassistant/components/futurenow/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/futurenow", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pyfnip==0.2"] } diff --git a/homeassistant/components/fyta/__init__.py b/homeassistant/components/fyta/__init__.py index efbb1453456..1969ebfffe9 100644 --- a/homeassistant/components/fyta/__init__.py +++ b/homeassistant/components/fyta/__init__.py @@ -15,6 +15,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util.dt import async_get_time_zone from .const import CONF_EXPIRATION @@ -39,7 +40,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: FytaConfigEntry) -> bool entry.data[CONF_EXPIRATION] ).astimezone(await async_get_time_zone(tz)) - fyta = FytaConnector(username, password, access_token, expiration, tz) + fyta = FytaConnector( + username, password, access_token, expiration, tz, async_get_clientsession(hass) + ) coordinator = FytaCoordinator(hass, fyta) @@ -52,13 +55,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: FytaConfigEntry) -> bool return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FytaConfigEntry) -> bool: """Unload Fyta entity.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: FytaConfigEntry +) -> bool: """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index f2b5163c9db..78cb7647785 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -23,7 +23,6 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from . import FytaConfigEntry from .const import CONF_EXPIRATION, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -51,7 +50,6 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fyta.""" credentials: Credentials - _entry: FytaConfigEntry | None = None VERSION = 1 MINOR_VERSION = 2 @@ -100,7 +98,6 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -108,20 +105,21 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauthorization flow.""" errors = {} - assert self._entry is not None + reauth_entry = self._get_reauth_entry() if user_input and not (errors := await self.async_auth(user_input)): user_input |= { CONF_ACCESS_TOKEN: self.credentials.access_token, CONF_EXPIRATION: self.credentials.expiration.isoformat(), } return self.async_update_reload_and_abort( - self._entry, data={**self._entry.data, **user_input} + reauth_entry, + data_updates=user_input, ) data_schema = self.add_suggested_values_to_schema( DATA_SCHEMA, - {CONF_USERNAME: self._entry.data[CONF_USERNAME], **(user_input or {})}, + {CONF_USERNAME: reauth_entry.data[CONF_USERNAME], **(user_input or {})}, ) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/fyta/coordinator.py b/homeassistant/components/fyta/coordinator.py index c92a96eed63..553960bdcc6 100644 --- a/homeassistant/components/fyta/coordinator.py +++ b/homeassistant/components/fyta/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from datetime import datetime, timedelta import logging from typing import TYPE_CHECKING @@ -18,9 +19,10 @@ from fyta_cli.fyta_models import Plant from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_EXPIRATION +from .const import CONF_EXPIRATION, DOMAIN if TYPE_CHECKING: from . import FytaConfigEntry @@ -39,9 +41,11 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): hass, _LOGGER, name="FYTA Coordinator", - update_interval=timedelta(seconds=60), + update_interval=timedelta(minutes=4), ) self.fyta = fyta + self._plants_last_update: set[int] = set() + self.new_device_callbacks: list[Callable[[int], None]] = [] async def _async_update_data( self, @@ -55,9 +59,64 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): await self.renew_authentication() try: - return await self.fyta.update_all_plants() + data = await self.fyta.update_all_plants() except (FytaConnectionError, FytaPlantError) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="update_error" + ) from err + _LOGGER.debug("Data successfully updated") + + # data must be assigned before _async_add_remove_devices, as it is uses to set-up possible new devices + self.data = data + self._async_add_remove_devices() + + return data + + def _async_add_remove_devices(self) -> None: + """Add new devices, remove non-existing devices.""" + if not self._plants_last_update: + self._plants_last_update = set(self.fyta.plant_list.keys()) + + if ( + current_plants := set(self.fyta.plant_list.keys()) + ) == self._plants_last_update: + return + + _LOGGER.debug( + "Check for new and removed plant(s): old plants: %s; new plants: %s", + ", ".join(map(str, self._plants_last_update)), + ", ".join(map(str, current_plants)), + ) + + # remove old plants + if removed_plants := self._plants_last_update - current_plants: + _LOGGER.debug("Removed plant(s): %s", ", ".join(map(str, removed_plants))) + + device_registry = dr.async_get(self.hass) + for plant_id in removed_plants: + if device := device_registry.async_get_device( + identifiers={ + ( + DOMAIN, + f"{self.config_entry.entry_id}-{plant_id}", + ) + } + ): + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.config_entry.entry_id, + ) + _LOGGER.debug("Device removed from device registry: %s", device.id) + + # add new devices + if new_plants := current_plants - self._plants_last_update: + _LOGGER.debug("New plant(s) found: %s", ", ".join(map(str, new_plants))) + for plant_id in new_plants: + for callback in self.new_device_callbacks: + callback(plant_id) + _LOGGER.debug("Device added: %s", plant_id) + + self._plants_last_update = current_plants async def renew_authentication(self) -> bool: """Renew access token for FYTA API.""" @@ -65,9 +124,14 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): try: credentials = await self.fyta.login() except FytaConnectionError as ex: - raise ConfigEntryNotReady from ex + raise ConfigEntryNotReady( + translation_domain=DOMAIN, translation_key="config_entry_not_ready" + ) from ex except (FytaAuthentificationError, FytaPasswordError) as ex: - raise ConfigEntryAuthFailed from ex + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from ex new_config_entry = {**self.config_entry.data} new_config_entry[CONF_ACCESS_TOKEN] = credentials.access_token diff --git a/homeassistant/components/fyta/entity.py b/homeassistant/components/fyta/entity.py index 18c52d74e25..4c078098ec1 100644 --- a/homeassistant/components/fyta/entity.py +++ b/homeassistant/components/fyta/entity.py @@ -3,10 +3,10 @@ from fyta_cli.fyta_models import Plant from homeassistant.components.sensor import SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import FytaConfigEntry from .const import DOMAIN from .coordinator import FytaCoordinator @@ -19,7 +19,7 @@ class FytaPlantEntity(CoordinatorEntity[FytaCoordinator]): def __init__( self, coordinator: FytaCoordinator, - entry: ConfigEntry, + entry: FytaConfigEntry, description: SensorEntityDescription, plant_id: int, ) -> None: diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index c07a19a3db0..ea628f55c6c 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -3,9 +3,11 @@ "name": "FYTA", "codeowners": ["@dontinelli"], "config_flow": true, + "dhcp": [{ "hostname": "fyta*" }], "documentation": "https://www.home-assistant.io/integrations/fyta", "integration_type": "hub", "iot_class": "cloud_polling", + "loggers": ["fyta_cli"], "quality_scale": "platinum", - "requirements": ["fyta_cli==0.6.3"] + "requirements": ["fyta_cli==0.7.0"] } diff --git a/homeassistant/components/fyta/quality_scale.yaml b/homeassistant/components/fyta/quality_scale.yaml new file mode 100644 index 00000000000..0fbacd0e12e --- /dev/null +++ b/homeassistant/components/fyta/quality_scale.yaml @@ -0,0 +1,90 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: done + dependency-transparency: done + action-setup: + status: exempt + comment: No custom action. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: No custom action. + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: done + parallel-updates: + status: exempt + comment: | + Coordinator and only sensor platform. + + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + No options flow. + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: No noisy entities. + discovery: + status: done + comment: DHCP + stale-devices: done + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: exempt + comment: No configuration besides credentials. + dynamic-devices: done + discovery-update-info: + status: exempt + comment: Fyta can be discovered but does not have a local connection. + repair-issues: + status: exempt + comment: | + No issues/repairs. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + No known issues that could be resolved by the user. + docs-examples: + status: exempt + comment: | + As only sensors are provided, no examples deemed necessary/appropriate. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/fyta/sensor.py b/homeassistant/components/fyta/sensor.py index a351d79dd8b..89ee22265cf 100644 --- a/homeassistant/components/fyta/sensor.py +++ b/homeassistant/components/fyta/sensor.py @@ -113,7 +113,7 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [ FytaSensorEntityDescription( key="salinity", translation_key="salinity", - native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS, + native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS_PER_CM, device_class=SensorDeviceClass.CONDUCTIVITY, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda plant: plant.salinity, @@ -150,6 +150,15 @@ async def async_setup_entry( async_add_entities(plant_entities) + def _async_add_new_device(plant_id: int) -> None: + async_add_entities( + FytaPlantSensor(coordinator, entry, sensor, plant_id) + for sensor in SENSORS + if sensor.key in dir(coordinator.data.get(plant_id)) + ) + + coordinator.new_device_callbacks.append(_async_add_new_device) + class FytaPlantSensor(FytaPlantEntity, SensorEntity): """Represents a Fyta sensor.""" diff --git a/homeassistant/components/fyta/strings.json b/homeassistant/components/fyta/strings.json index bacd24555b0..fc9f424d5aa 100644 --- a/homeassistant/components/fyta/strings.json +++ b/homeassistant/components/fyta/strings.json @@ -3,10 +3,14 @@ "step": { "user": { "title": "Credentials for FYTA API", - "description": "Provide username and password to connect to the FYTA server", + "description": "Provide email and password to connect to the FYTA server", "data": { - "username": "[%key:common::config_flow::data::username%]", + "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "The email address to login to your FYTA account.", + "password": "The password to login to your FYTA account." } }, "reauth_confirm": { @@ -14,11 +18,16 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::fyta::config::step::user::data_description::username%]", + "password": "[%key:component::fyta::config::step::user::data_description::password%]" } } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { @@ -93,5 +102,16 @@ "name": "Salinity" } } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + }, + "config_entry_not_ready": { + "message": "Error while loading the config entry." + }, + "auth_failed": { + "message": "Error while logging in to the API." + } } } diff --git a/homeassistant/components/garadget/cover.py b/homeassistant/components/garadget/cover.py index 988c66b679c..82045e91321 100644 --- a/homeassistant/components/garadget/cover.py +++ b/homeassistant/components/garadget/cover.py @@ -12,6 +12,7 @@ from homeassistant.components.cover import ( PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverDeviceClass, CoverEntity, + CoverState, ) from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -20,8 +21,6 @@ from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, CONF_USERNAME, - STATE_CLOSED, - STATE_OPEN, ) from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -38,16 +37,14 @@ ATTR_TIME_IN_STATE = "time_in_state" DEFAULT_NAME = "Garadget" -STATE_CLOSING = "closing" STATE_OFFLINE = "offline" -STATE_OPENING = "opening" STATE_STOPPED = "stopped" STATES_MAP = { - "open": STATE_OPEN, - "opening": STATE_OPENING, - "closed": STATE_CLOSED, - "closing": STATE_CLOSING, + "open": CoverState.OPEN, + "opening": CoverState.OPENING, + "closed": CoverState.CLOSED, + "closing": CoverState.CLOSING, "stopped": STATE_STOPPED, } @@ -175,7 +172,7 @@ class GaradgetCover(CoverEntity): """Return if the cover is closed.""" if self._state is None: return None - return self._state == STATE_CLOSED + return self._state == CoverState.CLOSED def get_token(self): """Get new token for usage during this session.""" @@ -249,7 +246,7 @@ class GaradgetCover(CoverEntity): self._state = STATE_OFFLINE if ( - self._state not in [STATE_CLOSING, STATE_OPENING] + self._state not in [CoverState.CLOSING, CoverState.OPENING] and self._unsub_listener_cover is not None ): self._unsub_listener_cover() diff --git a/homeassistant/components/garadget/manifest.json b/homeassistant/components/garadget/manifest.json index c7a30a465d2..bd1920a7c4c 100644 --- a/homeassistant/components/garadget/manifest.json +++ b/homeassistant/components/garadget/manifest.json @@ -3,5 +3,6 @@ "name": "Garadget", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/garadget", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/garages_amsterdam/__init__.py b/homeassistant/components/garages_amsterdam/__init__.py index 81ec72d9fbf..99d751cfcc8 100644 --- a/homeassistant/components/garages_amsterdam/__init__.py +++ b/homeassistant/components/garages_amsterdam/__init__.py @@ -1,62 +1,38 @@ """The Garages Amsterdam integration.""" -import asyncio -from datetime import timedelta -import logging +from __future__ import annotations -from odp_amsterdam import ODPAmsterdam, VehicleType +from odp_amsterdam import ODPAmsterdam from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import aiohttp_client -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from .coordinator import GaragesAmsterdamDataUpdateCoordinator -PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] + +type GaragesAmsterdamConfigEntry = ConfigEntry[GaragesAmsterdamDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: GaragesAmsterdamConfigEntry +) -> bool: """Set up Garages Amsterdam from a config entry.""" - await get_coordinator(hass) + client = ODPAmsterdam(session=async_get_clientsession(hass)) + coordinator = GaragesAmsterdamDataUpdateCoordinator(hass, client) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GaragesAmsterdamConfigEntry +) -> bool: """Unload Garages Amsterdam config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if len(hass.config_entries.async_entries(DOMAIN)) == 1: - hass.data.pop(DOMAIN) - - return unload_ok - - -async def get_coordinator( - hass: HomeAssistant, -) -> DataUpdateCoordinator: - """Get the data update coordinator.""" - if DOMAIN in hass.data: - return hass.data[DOMAIN] - - async def async_get_garages(): - async with asyncio.timeout(10): - return { - garage.garage_name: garage - for garage in await ODPAmsterdam( - session=aiohttp_client.async_get_clientsession(hass) - ).all_garages(vehicle=VehicleType.CAR) - } - - coordinator = DataUpdateCoordinator( - hass, - logging.getLogger(__name__), - name=DOMAIN, - update_method=async_get_garages, - update_interval=timedelta(minutes=10), - ) - await coordinator.async_config_entry_first_refresh() - - hass.data[DOMAIN] = coordinator - return coordinator + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/garages_amsterdam/binary_sensor.py b/homeassistant/components/garages_amsterdam/binary_sensor.py index 0aebe36baeb..b93b43e1173 100644 --- a/homeassistant/components/garages_amsterdam/binary_sensor.py +++ b/homeassistant/components/garages_amsterdam/binary_sensor.py @@ -2,47 +2,77 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass + +from odp_amsterdam import Garage + from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, + BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import get_coordinator +from . import GaragesAmsterdamConfigEntry +from .coordinator import GaragesAmsterdamDataUpdateCoordinator from .entity import GaragesAmsterdamEntity -BINARY_SENSORS = { - "state", -} + +@dataclass(frozen=True, kw_only=True) +class GaragesAmsterdamBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Garages Amsterdam binary sensor entity.""" + + is_on: Callable[[Garage], bool] + + +BINARY_SENSORS: tuple[GaragesAmsterdamBinarySensorEntityDescription, ...] = ( + GaragesAmsterdamBinarySensorEntityDescription( + key="state", + translation_key="state", + device_class=BinarySensorDeviceClass.PROBLEM, + is_on=lambda garage: garage.state != "ok", + ), +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: GaragesAmsterdamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Defer sensor setup to the shared sensor module.""" - coordinator = await get_coordinator(hass) + coordinator = entry.runtime_data async_add_entities( GaragesAmsterdamBinarySensor( - coordinator, config_entry.data["garage_name"], info_type + coordinator=coordinator, + garage_name=entry.data["garage_name"], + description=description, ) - for info_type in BINARY_SENSORS + for description in BINARY_SENSORS ) class GaragesAmsterdamBinarySensor(GaragesAmsterdamEntity, BinarySensorEntity): """Binary Sensor representing garages amsterdam data.""" - _attr_device_class = BinarySensorDeviceClass.PROBLEM - _attr_name = None + entity_description: GaragesAmsterdamBinarySensorEntityDescription + + def __init__( + self, + *, + coordinator: GaragesAmsterdamDataUpdateCoordinator, + garage_name: str, + description: GaragesAmsterdamBinarySensorEntityDescription, + ) -> None: + """Initialize garages amsterdam binary sensor.""" + super().__init__(coordinator, garage_name) + self.entity_description = description + self._attr_unique_id = f"{garage_name}-{description.key}" @property def is_on(self) -> bool: """If the binary sensor is currently on or off.""" - return ( - getattr(self.coordinator.data[self._garage_name], self._info_type) != "ok" - ) + return self.entity_description.is_on(self.coordinator.data[self._garage_name]) diff --git a/homeassistant/components/garages_amsterdam/const.py b/homeassistant/components/garages_amsterdam/const.py index ae7801a9abd..be5e2216a81 100644 --- a/homeassistant/components/garages_amsterdam/const.py +++ b/homeassistant/components/garages_amsterdam/const.py @@ -1,4 +1,13 @@ """Constants for the Garages Amsterdam integration.""" -DOMAIN = "garages_amsterdam" -ATTRIBUTION = f'{"Data provided by municipality of Amsterdam"}' +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Final + +DOMAIN: Final = "garages_amsterdam" +ATTRIBUTION = "Data provided by municipality of Amsterdam" + +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(minutes=10) diff --git a/homeassistant/components/garages_amsterdam/coordinator.py b/homeassistant/components/garages_amsterdam/coordinator.py new file mode 100644 index 00000000000..3d06aba79e2 --- /dev/null +++ b/homeassistant/components/garages_amsterdam/coordinator.py @@ -0,0 +1,34 @@ +"""Coordinator for the Garages Amsterdam integration.""" + +from __future__ import annotations + +from odp_amsterdam import Garage, ODPAmsterdam, VehicleType + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +class GaragesAmsterdamDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Garage]]): + """Class to manage fetching Garages Amsterdam data from single endpoint.""" + + def __init__( + self, + hass: HomeAssistant, + client: ODPAmsterdam, + ) -> None: + """Initialize global Garages Amsterdam data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = client + + async def _async_update_data(self) -> dict[str, Garage]: + return { + garage.garage_name: garage + for garage in await self.client.all_garages(vehicle=VehicleType.CAR) + } diff --git a/homeassistant/components/garages_amsterdam/entity.py b/homeassistant/components/garages_amsterdam/entity.py index 671405235d4..433bc75b962 100644 --- a/homeassistant/components/garages_amsterdam/entity.py +++ b/homeassistant/components/garages_amsterdam/entity.py @@ -3,28 +3,26 @@ from __future__ import annotations from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ATTRIBUTION, DOMAIN +from .coordinator import GaragesAmsterdamDataUpdateCoordinator -class GaragesAmsterdamEntity(CoordinatorEntity): +class GaragesAmsterdamEntity(CoordinatorEntity[GaragesAmsterdamDataUpdateCoordinator]): """Base Entity for garages amsterdam data.""" _attr_attribution = ATTRIBUTION _attr_has_entity_name = True def __init__( - self, coordinator: DataUpdateCoordinator, garage_name: str, info_type: str + self, + coordinator: GaragesAmsterdamDataUpdateCoordinator, + garage_name: str, ) -> None: """Initialize garages amsterdam entity.""" super().__init__(coordinator) - self._attr_unique_id = f"{garage_name}-{info_type}" self._garage_name = garage_name - self._info_type = info_type self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, garage_name)}, name=garage_name, diff --git a/homeassistant/components/garages_amsterdam/sensor.py b/homeassistant/components/garages_amsterdam/sensor.py index b6fc950a843..b562fff841a 100644 --- a/homeassistant/components/garages_amsterdam/sensor.py +++ b/homeassistant/components/garages_amsterdam/sensor.py @@ -2,49 +2,93 @@ from __future__ import annotations -from homeassistant.components.sensor import SensorEntity -from homeassistant.config_entries import ConfigEntry +from collections.abc import Callable +from dataclasses import dataclass + +from odp_amsterdam import Garage + +from homeassistant.components.sensor import ( + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.typing import StateType -from . import get_coordinator +from . import GaragesAmsterdamConfigEntry +from .coordinator import GaragesAmsterdamDataUpdateCoordinator from .entity import GaragesAmsterdamEntity -SENSORS = { - "free_space_short", - "free_space_long", - "short_capacity", - "long_capacity", -} + +@dataclass(frozen=True, kw_only=True) +class GaragesAmsterdamSensorEntityDescription(SensorEntityDescription): + """Class describing Garages Amsterdam sensor entity.""" + + value_fn: Callable[[Garage], StateType] + + +SENSORS: tuple[GaragesAmsterdamSensorEntityDescription, ...] = ( + GaragesAmsterdamSensorEntityDescription( + key="free_space_short", + translation_key="free_space_short", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda garage: garage.free_space_short, + ), + GaragesAmsterdamSensorEntityDescription( + key="free_space_long", + translation_key="free_space_long", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda garage: garage.free_space_long, + ), + GaragesAmsterdamSensorEntityDescription( + key="short_capacity", + translation_key="short_capacity", + value_fn=lambda garage: garage.short_capacity, + ), + GaragesAmsterdamSensorEntityDescription( + key="long_capacity", + translation_key="long_capacity", + value_fn=lambda garage: garage.long_capacity, + ), +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: GaragesAmsterdamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Defer sensor setup to the shared sensor module.""" - coordinator = await get_coordinator(hass) + coordinator = entry.runtime_data async_add_entities( - GaragesAmsterdamSensor(coordinator, config_entry.data["garage_name"], info_type) - for info_type in SENSORS - if getattr(coordinator.data[config_entry.data["garage_name"]], info_type) != "" + GaragesAmsterdamSensor( + coordinator=coordinator, + garage_name=entry.data["garage_name"], + description=description, + ) + for description in SENSORS + if description.value_fn(coordinator.data[entry.data["garage_name"]]) is not None ) class GaragesAmsterdamSensor(GaragesAmsterdamEntity, SensorEntity): """Sensor representing garages amsterdam data.""" - _attr_native_unit_of_measurement = "cars" + entity_description: GaragesAmsterdamSensorEntityDescription def __init__( - self, coordinator: DataUpdateCoordinator, garage_name: str, info_type: str + self, + *, + coordinator: GaragesAmsterdamDataUpdateCoordinator, + garage_name: str, + description: GaragesAmsterdamSensorEntityDescription, ) -> None: """Initialize garages amsterdam sensor.""" - super().__init__(coordinator, garage_name, info_type) - self._attr_translation_key = info_type + super().__init__(coordinator, garage_name) + self.entity_description = description + self._attr_unique_id = f"{garage_name}-{description.key}" @property def available(self) -> bool: @@ -54,6 +98,8 @@ class GaragesAmsterdamSensor(GaragesAmsterdamEntity, SensorEntity): ) @property - def native_value(self) -> str: + def native_value(self) -> StateType: """Return the state of the sensor.""" - return getattr(self.coordinator.data[self._garage_name], self._info_type) + return self.entity_description.value_fn( + self.coordinator.data[self._garage_name] + ) diff --git a/homeassistant/components/garages_amsterdam/strings.json b/homeassistant/components/garages_amsterdam/strings.json index 89a85f97448..19157afdafb 100644 --- a/homeassistant/components/garages_amsterdam/strings.json +++ b/homeassistant/components/garages_amsterdam/strings.json @@ -3,8 +3,13 @@ "config": { "step": { "user": { - "title": "Pick a garage to monitor", - "data": { "garage_name": "Garage name" } + "description": "Select a garage from the list", + "data": { + "garage_name": "Garage name" + }, + "data_description": { + "garage_name": "The name of the garage you want to monitor." + } } }, "abort": { @@ -16,16 +21,25 @@ "entity": { "sensor": { "free_space_short": { - "name": "Short parking free space" + "name": "Short parking free space", + "unit_of_measurement": "cars" }, "free_space_long": { - "name": "Long parking free space" + "name": "Long parking free space", + "unit_of_measurement": "cars" }, "short_capacity": { - "name": "Short parking capacity" + "name": "Short parking capacity", + "unit_of_measurement": "cars" }, "long_capacity": { - "name": "Long parking capacity" + "name": "Long parking capacity", + "unit_of_measurement": "cars" + } + }, + "binary_sensor": { + "state": { + "name": "State" } } } diff --git a/homeassistant/components/gardena_bluetooth/__init__.py b/homeassistant/components/gardena_bluetooth/__init__.py index ed5b1c14ba3..7aae629974c 100644 --- a/homeassistant/components/gardena_bluetooth/__init__.py +++ b/homeassistant/components/gardena_bluetooth/__init__.py @@ -18,7 +18,7 @@ from homeassistant.helpers.device_registry import DeviceInfo import homeassistant.util.dt as dt_util from .const import DOMAIN -from .coordinator import Coordinator, DeviceUnavailable +from .coordinator import DeviceUnavailable, GardenaBluetoothCoordinator PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, @@ -32,6 +32,8 @@ LOGGER = logging.getLogger(__name__) TIMEOUT = 20.0 DISCONNECT_DELAY = 5 +type GardenaBluetoothConfigEntry = ConfigEntry[GardenaBluetoothCoordinator] + def get_connection(hass: HomeAssistant, address: str) -> CachedConnection: """Set up a cached client that keeps connection after last use.""" @@ -47,7 +49,9 @@ def get_connection(hass: HomeAssistant, address: str) -> CachedConnection: return CachedConnection(DISCONNECT_DELAY, _device_lookup) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: GardenaBluetoothConfigEntry +) -> bool: """Set up Gardena Bluetooth from a config entry.""" address = entry.data[CONF_ADDRESS] @@ -75,19 +79,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: model=model, ) - coordinator = Coordinator(hass, LOGGER, client, uuids, device, address) + coordinator = GardenaBluetoothCoordinator( + hass, LOGGER, client, uuids, device, address + ) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await coordinator.async_refresh() return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GardenaBluetoothConfigEntry +) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - coordinator: Coordinator = hass.data[DOMAIN].pop(entry.entry_id) - await coordinator.async_shutdown() + await entry.runtime_data.async_shutdown() return unload_ok diff --git a/homeassistant/components/gardena_bluetooth/binary_sensor.py b/homeassistant/components/gardena_bluetooth/binary_sensor.py index c552beaf878..d3ae096e291 100644 --- a/homeassistant/components/gardena_bluetooth/binary_sensor.py +++ b/homeassistant/components/gardena_bluetooth/binary_sensor.py @@ -12,13 +12,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import Coordinator, GardenaBluetoothDescriptorEntity +from . import GardenaBluetoothConfigEntry +from .entity import GardenaBluetoothDescriptorEntity @dataclass(frozen=True) @@ -52,10 +51,12 @@ DESCRIPTIONS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary sensor based on a config entry.""" - coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [ GardenaBluetoothBinarySensor(coordinator, description, description.context) for description in DESCRIPTIONS diff --git a/homeassistant/components/gardena_bluetooth/button.py b/homeassistant/components/gardena_bluetooth/button.py index bdcf9094f5c..9d87cba2446 100644 --- a/homeassistant/components/gardena_bluetooth/button.py +++ b/homeassistant/components/gardena_bluetooth/button.py @@ -8,13 +8,12 @@ from gardena_bluetooth.const import Reset from gardena_bluetooth.parse import CharacteristicBool from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import Coordinator, GardenaBluetoothDescriptorEntity +from . import GardenaBluetoothConfigEntry +from .entity import GardenaBluetoothDescriptorEntity @dataclass(frozen=True) @@ -41,10 +40,12 @@ DESCRIPTIONS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up button based on a config entry.""" - coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [ GardenaBluetoothButton(coordinator, description, description.context) for description in DESCRIPTIONS diff --git a/homeassistant/components/gardena_bluetooth/coordinator.py b/homeassistant/components/gardena_bluetooth/coordinator.py index 296eff2686e..5caafe0e794 100644 --- a/homeassistant/components/gardena_bluetooth/coordinator.py +++ b/homeassistant/components/gardena_bluetooth/coordinator.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import timedelta import logging -from typing import Any from gardena_bluetooth.client import Client from gardena_bluetooth.exceptions import ( @@ -16,12 +15,7 @@ from gardena_bluetooth.parse import Characteristic, CharacteristicType from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed SCAN_INTERVAL = timedelta(seconds=60) LOGGER = logging.getLogger(__name__) @@ -31,7 +25,7 @@ class DeviceUnavailable(HomeAssistantError): """Raised if device can't be found.""" -class Coordinator(DataUpdateCoordinator[dict[str, bytes]]): +class GardenaBluetoothCoordinator(DataUpdateCoordinator[dict[str, bytes]]): """Class to manage fetching data.""" def __init__( @@ -102,34 +96,3 @@ class Coordinator(DataUpdateCoordinator[dict[str, bytes]]): self.data[char.uuid] = char.encode(value) await self.async_refresh() - - -class GardenaBluetoothEntity(CoordinatorEntity[Coordinator]): - """Coordinator entity for Gardena Bluetooth.""" - - _attr_has_entity_name = True - - def __init__(self, coordinator: Coordinator, context: Any = None) -> None: - """Initialize coordinator entity.""" - super().__init__(coordinator, context) - self._attr_device_info = coordinator.device_info - - @property - def available(self) -> bool: - """Return if entity is available.""" - return self.coordinator.last_update_success and self._attr_available - - -class GardenaBluetoothDescriptorEntity(GardenaBluetoothEntity): - """Coordinator entity for entities with entity description.""" - - def __init__( - self, - coordinator: Coordinator, - description: EntityDescription, - context: set[str], - ) -> None: - """Initialize description entity.""" - super().__init__(coordinator, context) - self._attr_unique_id = f"{coordinator.address}-{description.key}" - self.entity_description = description diff --git a/homeassistant/components/gardena_bluetooth/entity.py b/homeassistant/components/gardena_bluetooth/entity.py new file mode 100644 index 00000000000..a0344fc4ca0 --- /dev/null +++ b/homeassistant/components/gardena_bluetooth/entity.py @@ -0,0 +1,43 @@ +"""Provides the DataUpdateCoordinator.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import GardenaBluetoothCoordinator + + +class GardenaBluetoothEntity(CoordinatorEntity[GardenaBluetoothCoordinator]): + """Coordinator entity for Gardena Bluetooth.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: GardenaBluetoothCoordinator, context: Any = None + ) -> None: + """Initialize coordinator entity.""" + super().__init__(coordinator, context) + self._attr_device_info = coordinator.device_info + + @property + def available(self) -> bool: + """Return if entity is available.""" + return self.coordinator.last_update_success and self._attr_available + + +class GardenaBluetoothDescriptorEntity(GardenaBluetoothEntity): + """Coordinator entity for entities with entity description.""" + + def __init__( + self, + coordinator: GardenaBluetoothCoordinator, + description: EntityDescription, + context: set[str], + ) -> None: + """Initialize description entity.""" + super().__init__(coordinator, context) + self._attr_unique_id = f"{coordinator.address}-{description.key}" + self.entity_description = description diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index 4812def7dde..da5c08c38c5 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.2"] + "requirements": ["gardena-bluetooth==1.4.4"] } diff --git a/homeassistant/components/gardena_bluetooth/number.py b/homeassistant/components/gardena_bluetooth/number.py index cbc4866b0ff..b55630fa797 100644 --- a/homeassistant/components/gardena_bluetooth/number.py +++ b/homeassistant/components/gardena_bluetooth/number.py @@ -17,17 +17,13 @@ from homeassistant.components.number import ( NumberEntityDescription, NumberMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import ( - Coordinator, - GardenaBluetoothDescriptorEntity, - GardenaBluetoothEntity, -) +from . import GardenaBluetoothConfigEntry +from .coordinator import GardenaBluetoothCoordinator +from .entity import GardenaBluetoothDescriptorEntity, GardenaBluetoothEntity @dataclass(frozen=True) @@ -108,10 +104,12 @@ DESCRIPTIONS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up entity based on a config entry.""" - coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities: list[NumberEntity] = [ GardenaBluetoothNumber(coordinator, description, description.context) for description in DESCRIPTIONS @@ -159,7 +157,7 @@ class GardenaBluetoothRemainingOpenSetNumber(GardenaBluetoothEntity, NumberEntit def __init__( self, - coordinator: Coordinator, + coordinator: GardenaBluetoothCoordinator, ) -> None: """Initialize the remaining time entity.""" super().__init__(coordinator, {Valve.remaining_open_time.uuid}) diff --git a/homeassistant/components/gardena_bluetooth/sensor.py b/homeassistant/components/gardena_bluetooth/sensor.py index 3e6ddf9a2df..ee8a2663218 100644 --- a/homeassistant/components/gardena_bluetooth/sensor.py +++ b/homeassistant/components/gardena_bluetooth/sensor.py @@ -14,18 +14,14 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .coordinator import ( - Coordinator, - GardenaBluetoothDescriptorEntity, - GardenaBluetoothEntity, -) +from . import GardenaBluetoothConfigEntry +from .coordinator import GardenaBluetoothCoordinator +from .entity import GardenaBluetoothDescriptorEntity, GardenaBluetoothEntity @dataclass(frozen=True) @@ -98,10 +94,12 @@ DESCRIPTIONS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Gardena Bluetooth sensor based on a config entry.""" - coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities: list[GardenaBluetoothEntity] = [ GardenaBluetoothSensor(coordinator, description, description.context) for description in DESCRIPTIONS @@ -140,7 +138,7 @@ class GardenaBluetoothRemainSensor(GardenaBluetoothEntity, SensorEntity): def __init__( self, - coordinator: Coordinator, + coordinator: GardenaBluetoothCoordinator, ) -> None: """Initialize the sensor.""" super().__init__(coordinator, {Valve.remaining_open_time.uuid}) diff --git a/homeassistant/components/gardena_bluetooth/strings.json b/homeassistant/components/gardena_bluetooth/strings.json index d0c1b878cef..dd50bac0b2a 100644 --- a/homeassistant/components/gardena_bluetooth/strings.json +++ b/homeassistant/components/gardena_bluetooth/strings.json @@ -16,7 +16,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { diff --git a/homeassistant/components/gardena_bluetooth/switch.py b/homeassistant/components/gardena_bluetooth/switch.py index d010665e427..f82c39025a5 100644 --- a/homeassistant/components/gardena_bluetooth/switch.py +++ b/homeassistant/components/gardena_bluetooth/switch.py @@ -7,20 +7,22 @@ from typing import Any from gardena_bluetooth.const import Valve from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import Coordinator, GardenaBluetoothEntity +from . import GardenaBluetoothConfigEntry +from .coordinator import GardenaBluetoothCoordinator +from .entity import GardenaBluetoothEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch based on a config entry.""" - coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [] if GardenaBluetoothValveSwitch.characteristics.issubset( coordinator.characteristics @@ -41,7 +43,7 @@ class GardenaBluetoothValveSwitch(GardenaBluetoothEntity, SwitchEntity): def __init__( self, - coordinator: Coordinator, + coordinator: GardenaBluetoothCoordinator, ) -> None: """Initialize the switch.""" super().__init__( diff --git a/homeassistant/components/gardena_bluetooth/valve.py b/homeassistant/components/gardena_bluetooth/valve.py index 3faf758f7e9..ae6bf56a7ff 100644 --- a/homeassistant/components/gardena_bluetooth/valve.py +++ b/homeassistant/components/gardena_bluetooth/valve.py @@ -7,21 +7,23 @@ from typing import Any from gardena_bluetooth.const import Valve from homeassistant.components.valve import ValveEntity, ValveEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import Coordinator, GardenaBluetoothEntity +from . import GardenaBluetoothConfigEntry +from .coordinator import GardenaBluetoothCoordinator +from .entity import GardenaBluetoothEntity FALLBACK_WATERING_TIME_IN_SECONDS = 60 * 60 async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch based on a config entry.""" - coordinator: Coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [] if GardenaBluetoothValve.characteristics.issubset(coordinator.characteristics): entities.append(GardenaBluetoothValve(coordinator)) @@ -45,7 +47,7 @@ class GardenaBluetoothValve(GardenaBluetoothEntity, ValveEntity): def __init__( self, - coordinator: Coordinator, + coordinator: GardenaBluetoothCoordinator, ) -> None: """Initialize the switch.""" super().__init__( diff --git a/homeassistant/components/gc100/manifest.json b/homeassistant/components/gc100/manifest.json index b4af14a323b..687e09f5c89 100644 --- a/homeassistant/components/gc100/manifest.json +++ b/homeassistant/components/gc100/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/gc100", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["python-gc100==1.0.3a0"] } diff --git a/homeassistant/components/gdacs/diagnostics.py b/homeassistant/components/gdacs/diagnostics.py new file mode 100644 index 00000000000..435e28ca1ae --- /dev/null +++ b/homeassistant/components/gdacs/diagnostics.py @@ -0,0 +1,39 @@ +"""Diagnostics support for GDACS integration.""" + +from __future__ import annotations + +from typing import Any + +from aio_georss_client.status_update import StatusUpdate + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from . import GdacsFeedEntityManager +from .const import DOMAIN, FEED + +TO_REDACT = {CONF_LATITUDE, CONF_LONGITUDE} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data: dict[str, Any] = { + "info": async_redact_data(config_entry.data, TO_REDACT), + } + + manager: GdacsFeedEntityManager = hass.data[DOMAIN][FEED][config_entry.entry_id] + status_info: StatusUpdate = manager.status_info() + if status_info: + data["service"] = { + "status": status_info.status, + "total": status_info.total, + "last_update": status_info.last_update, + "last_update_successful": status_info.last_update_successful, + "last_timestamp": status_info.last_timestamp, + } + + return data diff --git a/homeassistant/components/gdacs/manifest.json b/homeassistant/components/gdacs/manifest.json index d743dd00424..a40dc8cf91b 100644 --- a/homeassistant/components/gdacs/manifest.json +++ b/homeassistant/components/gdacs/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_georss_gdacs", "aio_georss_client"], - "quality_scale": "platinum", - "requirements": ["aio-georss-gdacs==0.9"] + "requirements": ["aio-georss-gdacs==0.10"] } diff --git a/homeassistant/components/generic/config_flow.py b/homeassistant/components/generic/config_flow.py index 401b49dad4a..84243101bd6 100644 --- a/homeassistant/components/generic/config_flow.py +++ b/homeassistant/components/generic/config_flow.py @@ -9,7 +9,7 @@ from datetime import datetime from errno import EHOSTUNREACH, EIO import io import logging -from typing import Any +from typing import Any, cast from aiohttp import web from httpx import HTTPStatusError, RequestError, TimeoutException @@ -22,7 +22,7 @@ from homeassistant.components.camera import ( DynamicStreamSettings, _async_get_image, ) -from homeassistant.components.http.view import HomeAssistantView +from homeassistant.components.http import HomeAssistantView from homeassistant.components.stream import ( CONF_RTSP_TRANSPORT, CONF_USE_WALLCLOCK_AS_TIMESTAMPS, @@ -47,7 +47,6 @@ from homeassistant.const import ( HTTP_DIGEST_AUTHENTICATION, ) from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import UnknownFlow from homeassistant.exceptions import HomeAssistantError, TemplateError from homeassistant.helpers import config_validation as cv, template as template_helper from homeassistant.helpers.httpx_client import get_async_client @@ -283,7 +282,7 @@ async def async_test_stream( return {CONF_STREAM_SOURCE: "timeout"} await stream.stop() except StreamWorkerError as err: - return {CONF_STREAM_SOURCE: str(err)} + return {CONF_STREAM_SOURCE: "unknown_with_details", "error_details": str(err)} except PermissionError: return {CONF_STREAM_SOURCE: "stream_not_permitted"} except OSError as err: @@ -316,6 +315,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize Generic ConfigFlow.""" + self.preview_cam: dict[str, Any] = {} self.user_input: dict[str, Any] = {} self.title = "" @@ -324,7 +324,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> GenericOptionsFlowHandler: """Get the options flow for this handler.""" - return GenericOptionsFlowHandler(config_entry) + return GenericOptionsFlowHandler() def check_for_existing(self, options: dict[str, Any]) -> bool: """Check whether an existing entry is using the same URLs.""" @@ -339,6 +339,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the start of the config flow.""" errors = {} + description_placeholders = {} hass = self.hass if user_input: # Secondary validation because serialised vol can't seem to handle this complexity: @@ -370,8 +371,10 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): title=self.title, data={}, options=self.user_input ) # temporary preview for user to check the image - self.context["preview_cam"] = user_input + self.preview_cam = user_input return await self.async_step_user_confirm_still() + if "error_details" in errors: + description_placeholders["error"] = errors.pop("error_details") elif self.user_input: user_input = self.user_input else: @@ -379,6 +382,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=build_schema(user_input), + description_placeholders=description_placeholders, errors=errors, ) @@ -409,9 +413,9 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): class GenericOptionsFlowHandler(OptionsFlow): """Handle Generic IP Camera options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Generic IP Camera options flow.""" - self.config_entry = config_entry + self.preview_cam: dict[str, Any] = {} self.user_input: dict[str, Any] = {} async def async_step_init( @@ -443,7 +447,7 @@ class GenericOptionsFlowHandler(OptionsFlow): } self.user_input = data # temporary preview for user to check the image - self.context["preview_cam"] = data + self.preview_cam = data return await self.async_step_confirm_still() return self.async_show_form( step_id="init", @@ -494,15 +498,17 @@ class CameraImagePreview(HomeAssistantView): async def get(self, request: web.Request, flow_id: str) -> web.Response: """Start a GET request.""" _LOGGER.debug("processing GET request for flow_id=%s", flow_id) - try: - flow = self.hass.config_entries.flow.async_get(flow_id) - except UnknownFlow: - try: - flow = self.hass.config_entries.options.async_get(flow_id) - except UnknownFlow as exc: - _LOGGER.warning("Unknown flow while getting image preview") - raise web.HTTPNotFound from exc - user_input = flow["context"]["preview_cam"] + flow = cast( + GenericIPCamConfigFlow, + self.hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001 + ) or cast( + GenericOptionsFlowHandler, + self.hass.config_entries.options._progress.get(flow_id), # noqa: SLF001 + ) + if not flow: + _LOGGER.warning("Unknown flow while getting image preview") + raise web.HTTPNotFound + user_input = flow.preview_cam camera = GenericCamera(self.hass, user_input, flow_id, "preview") if not camera.is_on: _LOGGER.debug("Camera is off") diff --git a/homeassistant/components/generic/diagnostics.py b/homeassistant/components/generic/diagnostics.py index e5bf4294e4a..3150ba0cd4c 100644 --- a/homeassistant/components/generic/diagnostics.py +++ b/homeassistant/components/generic/diagnostics.py @@ -23,12 +23,16 @@ TO_REDACT = { def redact_url(data: str) -> str: """Redact credentials from string url.""" url = url_in = yarl.URL(data) + # https://github.com/pylint-dev/pylint/issues/3484 + # pylint: disable-next=using-constant-test if url_in.user: url = url.with_user("****") + # pylint: disable-next=using-constant-test if url_in.password: url = url.with_password("****") if url_in.path != "/": url = url.with_path("****") + # pylint: disable-next=using-constant-test if url_in.query_string: url = url.with_query("****=****") return str(url) diff --git a/homeassistant/components/generic/icons.json b/homeassistant/components/generic/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/generic/icons.json +++ b/homeassistant/components/generic/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index b19d6d6293e..c1fbc16d9be 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/generic", "integration_type": "device", "iot_class": "local_push", - "requirements": ["ha-av==10.1.1", "Pillow==10.4.0"] + "requirements": ["av==13.1.0", "Pillow==11.0.0"] } diff --git a/homeassistant/components/generic/strings.json b/homeassistant/components/generic/strings.json index b05f17efc8d..94360a5b7c2 100644 --- a/homeassistant/components/generic/strings.json +++ b/homeassistant/components/generic/strings.json @@ -3,6 +3,7 @@ "config": { "error": { "unknown": "[%key:common::config_flow::error::unknown%]", + "unknown_with_details": "An unknown error occurred: {error}", "already_exists": "A camera with these URL settings already exists.", "unable_still_load": "Unable to load valid image from still image URL (e.g. invalid host, URL or authentication failure). Review log for more info.", "unable_still_load_auth": "Unable to load valid image from still image URL: The camera may require a user name and password, or they are not correct.", diff --git a/homeassistant/components/generic_hygrostat/humidifier.py b/homeassistant/components/generic_hygrostat/humidifier.py index ab29e587232..69c4fb3cdf4 100644 --- a/homeassistant/components/generic_hygrostat/humidifier.py +++ b/homeassistant/components/generic_hygrostat/humidifier.py @@ -36,6 +36,7 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, Event, EventStateChangedData, + EventStateReportedData, HomeAssistant, State, callback, @@ -45,6 +46,7 @@ from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import ( async_track_state_change_event, + async_track_state_report_event, async_track_time_interval, ) from homeassistant.helpers.restore_state import RestoreEntity @@ -72,7 +74,6 @@ _LOGGER = logging.getLogger(__name__) ATTR_SAVED_HUMIDITY = "saved_humidity" - PLATFORM_SCHEMA = HUMIDIFIER_PLATFORM_SCHEMA.extend(HYGROSTAT_SCHEMA.schema) @@ -222,18 +223,21 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): """Run when entity about to be added.""" await super().async_added_to_hass() - # Add listener self.async_on_remove( async_track_state_change_event( - self.hass, self._sensor_entity_id, self._async_sensor_changed_event + self.hass, self._sensor_entity_id, self._async_sensor_event + ) + ) + self.async_on_remove( + async_track_state_report_event( + self.hass, self._sensor_entity_id, self._async_sensor_event ) ) self.async_on_remove( async_track_state_change_event( - self.hass, self._switch_entity_id, self._async_switch_changed_event + self.hass, self._switch_entity_id, self._async_switch_event ) ) - if self._keep_alive: self.async_on_remove( async_track_time_interval( @@ -253,7 +257,8 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): sensor_state.state if sensor_state is not None else "None", ) return - await self._async_sensor_changed(self._sensor_entity_id, None, sensor_state) + + await self._async_sensor_update(sensor_state) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_startup) @@ -391,25 +396,23 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): # Get default humidity from super class return super().max_humidity - async def _async_sensor_changed_event( - self, event: Event[EventStateChangedData] - ) -> None: - """Handle ambient humidity changes.""" - data = event.data - await self._async_sensor_changed( - data["entity_id"], data["old_state"], data["new_state"] - ) - - async def _async_sensor_changed( - self, entity_id: str, old_state: State | None, new_state: State | None + async def _async_sensor_event( + self, event: Event[EventStateChangedData] | Event[EventStateReportedData] ) -> None: """Handle ambient humidity changes.""" + new_state = event.data["new_state"] if new_state is None: return + await self._async_sensor_update(new_state) + + async def _async_sensor_update(self, new_state: State) -> None: + """Update state based on humidity sensor.""" + if self._sensor_stale_duration: if self._remove_stale_tracking: self._remove_stale_tracking() + self._remove_stale_tracking = async_track_time_interval( self.hass, self._async_sensor_not_responding, @@ -426,23 +429,18 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): state = self.hass.states.get(self._sensor_entity_id) _LOGGER.debug( "Sensor has not been updated for %s", - now - state.last_updated if now and state else "---", + now - state.last_reported if now and state else "---", ) _LOGGER.warning("Sensor is stalled, call the emergency stop") await self._async_update_humidity("Stalled") @callback - def _async_switch_changed_event(self, event: Event[EventStateChangedData]) -> None: + def _async_switch_event(self, event: Event[EventStateChangedData]) -> None: """Handle humidifier switch state changes.""" - data = event.data - self._async_switch_changed( - data["entity_id"], data["old_state"], data["new_state"] - ) + self._async_switch_changed(event.data["new_state"]) @callback - def _async_switch_changed( - self, entity_id: str, old_state: State | None, new_state: State | None - ) -> None: + def _async_switch_changed(self, new_state: State | None) -> None: """Handle humidifier switch state changes.""" if new_state is None: return @@ -482,7 +480,7 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): ): self._active = True force = True - _LOGGER.info( + _LOGGER.debug( ( "Obtained current and target humidity. " "Generic hygrostat active. %s, %s" @@ -532,7 +530,7 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): ) or ( self._device_class == HumidifierDeviceClass.DEHUMIDIFIER and too_dry ): - _LOGGER.info("Turning off humidifier %s", self._switch_entity_id) + _LOGGER.debug("Turning off humidifier %s", self._switch_entity_id) await self._async_device_turn_off() elif time is not None: # The time argument is passed only in keep-alive case @@ -540,7 +538,7 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): elif ( self._device_class == HumidifierDeviceClass.HUMIDIFIER and too_dry ) or (self._device_class == HumidifierDeviceClass.DEHUMIDIFIER and too_wet): - _LOGGER.info("Turning on humidifier %s", self._switch_entity_id) + _LOGGER.debug("Turning on humidifier %s", self._switch_entity_id) await self._async_device_turn_on() elif time is not None: # The time argument is passed only in keep-alive case diff --git a/homeassistant/components/generic_hygrostat/strings.json b/homeassistant/components/generic_hygrostat/strings.json index a21ab68c628..7b8d56dbaa5 100644 --- a/homeassistant/components/generic_hygrostat/strings.json +++ b/homeassistant/components/generic_hygrostat/strings.json @@ -3,8 +3,8 @@ "config": { "step": { "user": { - "title": "Add generic hygrostat", - "description": "Create a entity that control the humidity via a switch and sensor.", + "title": "Create generic hygrostat", + "description": "Create a humidifier entity that controls the humidity via a switch and sensor.", "data": { "device_class": "Device class", "dry_tolerance": "Dry tolerance", @@ -17,7 +17,7 @@ "data_description": { "dry_tolerance": "The minimum amount of difference between the humidity read by the sensor specified in the target sensor option and the target humidity that must change prior to being switched on.", "humidifier": "Humidifier or dehumidifier switch; must be a toggle device.", - "min_cycle_duration": "Set a minimum amount of time that the switch specified in the humidifier option must be in its current state prior to being switched either off or on.", + "min_cycle_duration": "Set a minimum duration for which the specified switch must remain in its current state before it can be toggled off or on.", "target_sensor": "Sensor with current humidity.", "wet_tolerance": "The minimum amount of difference between the humidity read by the sensor specified in the target sensor option and the target humidity that must change prior to being switched off." } diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index 2a118b70879..dd6829eacce 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -63,7 +63,9 @@ from .const import ( CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, + CONF_MAX_TEMP, CONF_MIN_DUR, + CONF_MIN_TEMP, CONF_PRESETS, CONF_SENSOR, DEFAULT_TOLERANCE, @@ -77,8 +79,6 @@ DEFAULT_NAME = "Generic Thermostat" CONF_INITIAL_HVAC_MODE = "initial_hvac_mode" CONF_KEEP_ALIVE = "keep_alive" -CONF_MIN_TEMP = "min_temp" -CONF_MAX_TEMP = "max_temp" CONF_PRECISION = "precision" CONF_TARGET_TEMP = "target_temp" CONF_TEMP_STEP = "target_temp_step" @@ -205,7 +205,6 @@ class GenericThermostat(ClimateEntity, RestoreEntity): """Representation of a Generic Thermostat device.""" _attr_should_poll = False - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -500,7 +499,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity): self._target_temp, ): self._active = True - _LOGGER.info( + _LOGGER.debug( ( "Obtained current and target temperature. " "Generic thermostat active. %s, %s" @@ -539,21 +538,21 @@ class GenericThermostat(ClimateEntity, RestoreEntity): too_hot = self._cur_temp >= self._target_temp + self._hot_tolerance if self._is_device_active: if (self.ac_mode and too_cold) or (not self.ac_mode and too_hot): - _LOGGER.info("Turning off heater %s", self.heater_entity_id) + _LOGGER.debug("Turning off heater %s", self.heater_entity_id) await self._async_heater_turn_off() elif time is not None: # The time argument is passed only in keep-alive case - _LOGGER.info( + _LOGGER.debug( "Keep-alive - Turning on heater heater %s", self.heater_entity_id, ) await self._async_heater_turn_on() elif (self.ac_mode and too_hot) or (not self.ac_mode and too_cold): - _LOGGER.info("Turning on heater %s", self.heater_entity_id) + _LOGGER.debug("Turning on heater %s", self.heater_entity_id) await self._async_heater_turn_on() elif time is not None: # The time argument is passed only in keep-alive case - _LOGGER.info( + _LOGGER.debug( "Keep-alive - Turning off heater %s", self.heater_entity_id ) await self._async_heater_turn_off() diff --git a/homeassistant/components/generic_thermostat/config_flow.py b/homeassistant/components/generic_thermostat/config_flow.py index e9079a9f41a..1fbeaefde6b 100644 --- a/homeassistant/components/generic_thermostat/config_flow.py +++ b/homeassistant/components/generic_thermostat/config_flow.py @@ -21,7 +21,9 @@ from .const import ( CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, + CONF_MAX_TEMP, CONF_MIN_DUR, + CONF_MIN_TEMP, CONF_PRESETS, CONF_SENSOR, DEFAULT_TOLERANCE, @@ -57,12 +59,22 @@ OPTIONS_SCHEMA = { vol.Optional(CONF_MIN_DUR): selector.DurationSelector( selector.DurationSelectorConfig(allow_negative=False) ), + vol.Optional(CONF_MIN_TEMP): selector.NumberSelector( + selector.NumberSelectorConfig( + mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1 + ) + ), + vol.Optional(CONF_MAX_TEMP): selector.NumberSelector( + selector.NumberSelectorConfig( + mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1 + ) + ), } PRESETS_SCHEMA = { vol.Optional(v): selector.NumberSelector( selector.NumberSelectorConfig( - mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE + mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1 ) ) for v in CONF_PRESETS.values() diff --git a/homeassistant/components/generic_thermostat/const.py b/homeassistant/components/generic_thermostat/const.py index 51927297b63..f0e6f1a7d73 100644 --- a/homeassistant/components/generic_thermostat/const.py +++ b/homeassistant/components/generic_thermostat/const.py @@ -18,7 +18,9 @@ CONF_AC_MODE = "ac_mode" CONF_COLD_TOLERANCE = "cold_tolerance" CONF_HEATER = "heater" CONF_HOT_TOLERANCE = "hot_tolerance" +CONF_MAX_TEMP = "max_temp" CONF_MIN_DUR = "min_cycle_duration" +CONF_MIN_TEMP = "min_temp" CONF_PRESETS = { p: f"{p}_temp" for p in ( diff --git a/homeassistant/components/generic_thermostat/icons.json b/homeassistant/components/generic_thermostat/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/generic_thermostat/icons.json +++ b/homeassistant/components/generic_thermostat/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/generic_thermostat/strings.json b/homeassistant/components/generic_thermostat/strings.json index 1ddd41de734..58280e99543 100644 --- a/homeassistant/components/generic_thermostat/strings.json +++ b/homeassistant/components/generic_thermostat/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add generic thermostat helper", + "title": "Create generic thermostat", "description": "Create a climate entity that controls the temperature via a switch and sensor.", "data": { "ac_mode": "Cooling mode", @@ -12,13 +12,15 @@ "min_cycle_duration": "Minimum cycle duration", "name": "[%key:common::config_flow::data::name%]", "cold_tolerance": "Cold tolerance", - "hot_tolerance": "Hot tolerance" + "hot_tolerance": "Hot tolerance", + "min_temp": "Minimum target temperature", + "max_temp": "Maximum target temperature" }, "data_description": { "ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.", "heater": "Switch entity used to cool or heat depending on A/C mode.", - "target_sensor": "Temperature sensor that reflect the current temperature.", - "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on. This option will be ignored if the keep alive option is set.", + "target_sensor": "Temperature sensor that reflects the current temperature.", + "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.", "cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.", "hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5." } @@ -45,7 +47,9 @@ "target_sensor": "[%key:component::generic_thermostat::config::step::user::data::target_sensor%]", "min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data::min_cycle_duration%]", "cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data::cold_tolerance%]", - "hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]" + "hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]", + "min_temp": "[%key:component::generic_thermostat::config::step::user::data::min_temp%]", + "max_temp": "[%key:component::generic_thermostat::config::step::user::data::max_temp%]" }, "data_description": { "heater": "[%key:component::generic_thermostat::config::step::user::data_description::heater%]", diff --git a/homeassistant/components/geniushub/__init__.py b/homeassistant/components/geniushub/__init__.py index 836add310b6..9ca6ecfcfe0 100644 --- a/homeassistant/components/geniushub/__init__.py +++ b/homeassistant/components/geniushub/__init__.py @@ -9,7 +9,6 @@ import aiohttp from geniushubclient import GeniusHub import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, @@ -21,20 +20,12 @@ from homeassistant.const import ( CONF_USERNAME, Platform, ) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, - ServiceCall, - callback, -) -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.service import verify_domain_control -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN @@ -45,27 +36,6 @@ SCAN_INTERVAL = timedelta(seconds=60) MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$" -CLOUD_API_SCHEMA = vol.Schema( - { - vol.Required(CONF_TOKEN): cv.string, - vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), - } -) - - -LOCAL_API_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), - } -) - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.Any(LOCAL_API_SCHEMA, CLOUD_API_SCHEMA)}, extra=vol.ALLOW_EXTRA -) - ATTR_ZONE_MODE = "mode" ATTR_DURATION = "duration" @@ -91,63 +61,13 @@ SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( } ) -PLATFORMS = ( - Platform.CLIMATE, - Platform.WATER_HEATER, - Platform.SENSOR, +PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.CLIMATE, + Platform.SENSOR, Platform.SWITCH, -) - - -async def _async_import(hass: HomeAssistant, base_config: ConfigType) -> None: - """Import a config entry from configuration.yaml.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=base_config[DOMAIN], - ) - if ( - result["type"] is FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Genius Hub", - }, - ) - return - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Genius Hub", - }, - ) - - -async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: - """Set up a Genius Hub system.""" - if DOMAIN in base_config: - hass.async_create_task(_async_import(hass, base_config)) - return True + Platform.WATER_HEATER, +] type GeniusHubConfigEntry = ConfigEntry[GeniusBroker] @@ -155,6 +75,19 @@ type GeniusHubConfigEntry = ConfigEntry[GeniusBroker] async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) -> bool: """Create a Genius Hub system.""" + if CONF_TOKEN in entry.data and CONF_MAC in entry.data: + entity_registry = er.async_get(hass) + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + for reg_entry in registry_entries: + if reg_entry.unique_id.startswith(entry.data[CONF_MAC]): + entity_registry.async_update_entity( + reg_entry.entity_id, + new_unique_id=reg_entry.unique_id.replace( + entry.data[CONF_MAC], entry.entry_id + ), + ) session = async_get_clientsession(hass) if CONF_HOST in entry.data: @@ -169,9 +102,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) -> unique_id = entry.unique_id or entry.entry_id - broker = entry.runtime_data = GeniusBroker( - hass, client, entry.data.get(CONF_MAC, unique_id) - ) + broker = entry.runtime_data = GeniusBroker(hass, client, unique_id) try: await client.update() @@ -239,7 +170,7 @@ class GeniusBroker: await self.client.update() if self._connect_error: self._connect_error = False - _LOGGER.info("Connection to geniushub re-established") + _LOGGER.warning("Connection to geniushub re-established") except ( aiohttp.ClientResponseError, aiohttp.client_exceptions.ClientConnectorError, diff --git a/homeassistant/components/geniushub/climate.py b/homeassistant/components/geniushub/climate.py index 99d1bde8099..e20d649541e 100644 --- a/homeassistant/components/geniushub/climate.py +++ b/homeassistant/components/geniushub/climate.py @@ -51,7 +51,6 @@ class GeniusClimateZone(GeniusHeatingZone, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, broker, zone) -> None: """Initialize the climate device.""" diff --git a/homeassistant/components/geniushub/config_flow.py b/homeassistant/components/geniushub/config_flow.py index 5f026c91ee1..b106f9907bb 100644 --- a/homeassistant/components/geniushub/config_flow.py +++ b/homeassistant/components/geniushub/config_flow.py @@ -13,7 +13,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -123,14 +122,3 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA ) - - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - if CONF_HOST in user_input: - result = await self.async_step_local_api(user_input) - else: - result = await self.async_step_cloud_api(user_input) - if result["type"] is FlowResultType.FORM: - assert result["errors"] - return self.async_abort(reason=result["errors"]["base"]) - return result diff --git a/homeassistant/components/geniushub/icons.json b/homeassistant/components/geniushub/icons.json index 41697b419a8..c8a59dedbbd 100644 --- a/homeassistant/components/geniushub/icons.json +++ b/homeassistant/components/geniushub/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_zone_mode": "mdi:auto-mode", - "set_zone_override": "mdi:thermometer-lines", - "set_switch_override": "mdi:toggle-switch-variant" + "set_zone_mode": { + "service": "mdi:auto-mode" + }, + "set_zone_override": { + "service": "mdi:thermometer-lines" + }, + "set_switch_override": { + "service": "mdi:toggle-switch-variant" + } } } diff --git a/homeassistant/components/geo_json_events/manifest.json b/homeassistant/components/geo_json_events/manifest.json index 8f4b36657dd..c41796514a5 100644 --- a/homeassistant/components/geo_json_events/manifest.json +++ b/homeassistant/components/geo_json_events/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_generic_client"], - "requirements": ["aio-geojson-generic-client==0.4"] + "requirements": ["aio-geojson-generic-client==0.5"] } diff --git a/homeassistant/components/geo_location/__init__.py b/homeassistant/components/geo_location/__init__.py index e0c8d806fe6..877471f002a 100644 --- a/homeassistant/components/geo_location/__init__.py +++ b/homeassistant/components/geo_location/__init__.py @@ -3,10 +3,11 @@ from __future__ import annotations from datetime import timedelta -from functools import cached_property import logging from typing import Any, final +from propcache import cached_property + from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE from homeassistant.core import HomeAssistant @@ -14,10 +15,12 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey _LOGGER = logging.getLogger(__name__) DOMAIN = "geo_location" +DATA_COMPONENT: HassKey[EntityComponent[GeolocationEvent]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -32,7 +35,7 @@ ATTR_SOURCE = "source" async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Geolocation component.""" - component = hass.data[DOMAIN] = EntityComponent[GeolocationEvent]( + component = hass.data[DATA_COMPONENT] = EntityComponent[GeolocationEvent]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -41,14 +44,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[GeolocationEvent] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[GeolocationEvent] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) CACHED_PROPERTIES_WITH_ATTR_ = { diff --git a/homeassistant/components/geo_rss_events/manifest.json b/homeassistant/components/geo_rss_events/manifest.json index 17640e37278..7c089bfa4e9 100644 --- a/homeassistant/components/geo_rss_events/manifest.json +++ b/homeassistant/components/geo_rss_events/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/geo_rss_events", "iot_class": "cloud_polling", "loggers": ["georss_client", "georss_generic_client"], + "quality_scale": "legacy", "requirements": ["georss-generic-client==0.8"] } diff --git a/homeassistant/components/geofency/device_tracker.py b/homeassistant/components/geofency/device_tracker.py index b72ad4bc04c..2ad3c1772de 100644 --- a/homeassistant/components/geofency/device_tracker.py +++ b/homeassistant/components/geofency/device_tracker.py @@ -1,6 +1,6 @@ """Support for the Geofency device tracker platform.""" -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE from homeassistant.core import HomeAssistant, callback @@ -57,51 +57,19 @@ class GeofencyEntity(TrackerEntity, RestoreEntity): def __init__(self, device, gps=None, location_name=None, attributes=None): """Set up Geofency entity.""" - self._attributes = attributes or {} + self._attr_extra_state_attributes = attributes or {} self._name = device - self._location_name = location_name - self._gps = gps + self._attr_location_name = location_name + if gps: + self._attr_latitude = gps[0] + self._attr_longitude = gps[1] self._unsub_dispatcher = None - self._unique_id = device - - @property - def extra_state_attributes(self): - """Return device specific attributes.""" - return self._attributes - - @property - def latitude(self): - """Return latitude value of the device.""" - return self._gps[0] - - @property - def longitude(self): - """Return longitude value of the device.""" - return self._gps[1] - - @property - def location_name(self): - """Return a location name for the current location of the device.""" - return self._location_name - - @property - def unique_id(self): - """Return the unique ID.""" - return self._unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - return DeviceInfo( - identifiers={(GF_DOMAIN, self._unique_id)}, - name=self._name, + self._attr_unique_id = device + self._attr_device_info = DeviceInfo( + identifiers={(GF_DOMAIN, device)}, + name=device, ) - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - async def async_added_to_hass(self) -> None: """Register state update callback.""" await super().async_added_to_hass() @@ -109,21 +77,23 @@ class GeofencyEntity(TrackerEntity, RestoreEntity): self.hass, TRACKER_UPDATE, self._async_receive_data ) - if self._attributes: + if self._attr_extra_state_attributes: return if (state := await self.async_get_last_state()) is None: - self._gps = (None, None) + self._attr_latitude = None + self._attr_longitude = None return attr = state.attributes - self._gps = (attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE)) + self._attr_latitude = attr.get(ATTR_LATITUDE) + self._attr_longitude = attr.get(ATTR_LONGITUDE) async def async_will_remove_from_hass(self) -> None: """Clean up after entity before removal.""" await super().async_will_remove_from_hass() self._unsub_dispatcher() - self.hass.data[GF_DOMAIN]["devices"].remove(self._unique_id) + self.hass.data[GF_DOMAIN]["devices"].remove(self.unique_id) @callback def _async_receive_data(self, device, gps, location_name, attributes): @@ -131,7 +101,8 @@ class GeofencyEntity(TrackerEntity, RestoreEntity): if device != self._name: return - self._attributes.update(attributes) - self._location_name = location_name - self._gps = gps + self._attr_extra_state_attributes.update(attributes) + self._attr_location_name = location_name + self._attr_latitude = gps[0] + self._attr_longitude = gps[1] self.async_write_ha_state() diff --git a/homeassistant/components/geonetnz_quakes/config_flow.py b/homeassistant/components/geonetnz_quakes/config_flow.py index ac5a2e8c48e..083ac29b362 100644 --- a/homeassistant/components/geonetnz_quakes/config_flow.py +++ b/homeassistant/components/geonetnz_quakes/config_flow.py @@ -45,9 +45,9 @@ class GeonetnzQuakesFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors or {} ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/geonetnz_quakes/diagnostics.py b/homeassistant/components/geonetnz_quakes/diagnostics.py new file mode 100644 index 00000000000..fbe9bf511aa --- /dev/null +++ b/homeassistant/components/geonetnz_quakes/diagnostics.py @@ -0,0 +1,39 @@ +"""Diagnostics support for GeoNet NZ Quakes Feeds integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from . import GeonetnzQuakesFeedEntityManager +from .const import DOMAIN, FEED + +TO_REDACT = {CONF_LATITUDE, CONF_LONGITUDE} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data: dict[str, Any] = { + "info": async_redact_data(config_entry.data, TO_REDACT), + } + + manager: GeonetnzQuakesFeedEntityManager = hass.data[DOMAIN][FEED][ + config_entry.entry_id + ] + status_info = manager.status_info() + if status_info: + data["service"] = { + "status": status_info.status, + "total": status_info.total, + "last_update": status_info.last_update, + "last_update_successful": status_info.last_update_successful, + "last_timestamp": status_info.last_timestamp, + } + + return data diff --git a/homeassistant/components/geonetnz_quakes/manifest.json b/homeassistant/components/geonetnz_quakes/manifest.json index 2314dabcf0f..e8f4ee1a8c1 100644 --- a/homeassistant/components/geonetnz_quakes/manifest.json +++ b/homeassistant/components/geonetnz_quakes/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_geonetnz_quakes"], - "quality_scale": "platinum", "requirements": ["aio-geojson-geonetnz-quakes==0.16"] } diff --git a/homeassistant/components/geonetnz_volcano/config_flow.py b/homeassistant/components/geonetnz_volcano/config_flow.py index 12c7157b7e4..cf3d5bc1139 100644 --- a/homeassistant/components/geonetnz_volcano/config_flow.py +++ b/homeassistant/components/geonetnz_volcano/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_UNIT_SYSTEM, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM @@ -26,7 +26,7 @@ from .const import ( @callback -def configured_instances(hass): +def configured_instances(hass: HomeAssistant) -> set[str]: """Return a set of configured GeoNet NZ Volcano instances.""" return { f"{entry.data[CONF_LATITUDE]}, {entry.data[CONF_LONGITUDE]}" @@ -47,9 +47,9 @@ class GeonetnzVolcanoFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors or {} ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/geonetnz_volcano/strings.json b/homeassistant/components/geonetnz_volcano/strings.json index 867d2840fb7..f49fb4f9830 100644 --- a/homeassistant/components/geonetnz_volcano/strings.json +++ b/homeassistant/components/geonetnz_volcano/strings.json @@ -6,7 +6,7 @@ "data": { "radius": "Radius" } } }, - "abort": { + "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" } } diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index b509806d07f..3d2e719fab6 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], - "quality_scale": "platinum", - "requirements": ["gios==4.0.0"] + "requirements": ["gios==5.0.0"] } diff --git a/homeassistant/components/github/config_flow.py b/homeassistant/components/github/config_flow.py index 25d8782618f..9977f9d84cc 100644 --- a/homeassistant/components/github/config_flow.py +++ b/homeassistant/components/github/config_flow.py @@ -211,16 +211,12 @@ class GitHubConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for GitHub.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None, diff --git a/homeassistant/components/github/manifest.json b/homeassistant/components/github/manifest.json index cae2e7faca9..e202f805ec6 100644 --- a/homeassistant/components/github/manifest.json +++ b/homeassistant/components/github/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/github", "iot_class": "cloud_polling", "loggers": ["aiogithubapi"], - "requirements": ["aiogithubapi==23.11.0"] + "requirements": ["aiogithubapi==24.6.0"] } diff --git a/homeassistant/components/github/sensor.py b/homeassistant/components/github/sensor.py index 9a2b5ef5ac4..614ebe254c4 100644 --- a/homeassistant/components/github/sensor.py +++ b/homeassistant/components/github/sensor.py @@ -37,7 +37,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="discussions_count", translation_key="discussions_count", - native_unit_of_measurement="Discussions", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["discussion"]["total"], @@ -45,7 +44,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="stargazers_count", translation_key="stargazers_count", - native_unit_of_measurement="Stars", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["stargazers_count"], @@ -53,7 +51,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="subscribers_count", translation_key="subscribers_count", - native_unit_of_measurement="Watchers", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["watchers"]["total"], @@ -61,7 +58,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="forks_count", translation_key="forks_count", - native_unit_of_measurement="Forks", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["forks_count"], @@ -69,7 +65,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="issues_count", translation_key="issues_count", - native_unit_of_measurement="Issues", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["issue"]["total"], @@ -77,7 +72,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="pulls_count", translation_key="pulls_count", - native_unit_of_measurement="Pull Requests", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["pull_request"]["total"], diff --git a/homeassistant/components/github/strings.json b/homeassistant/components/github/strings.json index 130b404015c..bcda47d72fb 100644 --- a/homeassistant/components/github/strings.json +++ b/homeassistant/components/github/strings.json @@ -9,7 +9,7 @@ } }, "progress": { - "wait_for_device": "Open {url}, and paste the following code to authorize the integration: \n```\n{code}\n```\n" + "wait_for_device": "Open {url}, and paste the following code to authorize the integration: \n```\n{code}\n```" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", @@ -19,22 +19,28 @@ "entity": { "sensor": { "discussions_count": { - "name": "Discussions" + "name": "Discussions", + "unit_of_measurement": "discussions" }, "stargazers_count": { - "name": "Stars" + "name": "Stars", + "unit_of_measurement": "stars" }, "subscribers_count": { - "name": "Watchers" + "name": "Watchers", + "unit_of_measurement": "watchers" }, "forks_count": { - "name": "Forks" + "name": "Forks", + "unit_of_measurement": "forks" }, "issues_count": { - "name": "Issues" + "name": "Issues", + "unit_of_measurement": "issues" }, "pulls_count": { - "name": "Pull requests" + "name": "Pull requests", + "unit_of_measurement": "pull requests" }, "latest_commit": { "name": "Latest commit" diff --git a/homeassistant/components/gitlab_ci/manifest.json b/homeassistant/components/gitlab_ci/manifest.json index 36fb356dae4..58fd827ff31 100644 --- a/homeassistant/components/gitlab_ci/manifest.json +++ b/homeassistant/components/gitlab_ci/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gitlab_ci", "iot_class": "cloud_polling", "loggers": ["gitlab"], + "quality_scale": "legacy", "requirements": ["python-gitlab==1.6.0"] } diff --git a/homeassistant/components/gitter/manifest.json b/homeassistant/components/gitter/manifest.json index 009746a06c6..c578f7c2242 100644 --- a/homeassistant/components/gitter/manifest.json +++ b/homeassistant/components/gitter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gitter", "iot_class": "cloud_polling", "loggers": ["gitterpy"], + "quality_scale": "legacy", "requirements": ["gitterpy==0.1.7"] } diff --git a/homeassistant/components/glances/__init__.py b/homeassistant/components/glances/__init__.py index 0ddd8a86979..9d09e63606e 100644 --- a/homeassistant/components/glances/__init__.py +++ b/homeassistant/components/glances/__init__.py @@ -28,9 +28,7 @@ from homeassistant.exceptions import ( HomeAssistantError, ) from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from .const import DOMAIN from .coordinator import GlancesDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] @@ -71,7 +69,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: GlancesConfigEntry) -> async def get_api(hass: HomeAssistant, entry_data: dict[str, Any]) -> Glances: """Return the api from glances_api.""" httpx_client = get_async_client(hass, verify_ssl=entry_data[CONF_VERIFY_SSL]) - for version in (4, 3, 2): + for version in (4, 3): api = Glances( host=entry_data[CONF_HOST], port=entry_data[CONF_PORT], @@ -86,19 +84,9 @@ async def get_api(hass: HomeAssistant, entry_data: dict[str, Any]) -> Glances: except GlancesApiNoDataAvailable as err: _LOGGER.debug("Failed to connect to Glances API v%s: %s", version, err) continue - if version == 2: - async_create_issue( - hass, - DOMAIN, - "deprecated_version", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_version", - ) _LOGGER.debug("Connected to Glances API v%s", version) return api - raise ServerVersionMismatch("Could not connect to Glances API version 2, 3 or 4") + raise ServerVersionMismatch("Could not connect to Glances API version 3 or 4") class ServerVersionMismatch(HomeAssistantError): diff --git a/homeassistant/components/glances/config_flow.py b/homeassistant/components/glances/config_flow.py index 9208a4b0ebd..1dbc939d532 100644 --- a/homeassistant/components/glances/config_flow.py +++ b/homeassistant/components/glances/config_flow.py @@ -11,7 +11,7 @@ from glances_api.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -40,15 +40,11 @@ class GlancesFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Glances config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -56,9 +52,10 @@ class GlancesFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} try: await get_api(self.hass, user_input) except GlancesApiAuthorizationError: @@ -67,15 +64,13 @@ class GlancesFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input + reauth_entry, data=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + await self.hass.config_entries.async_reload(reauth_entry.entry_id) return self.async_abort(reason="reauth_successful") return self.async_show_form( - description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] - }, + description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]}, step_id="reauth_confirm", data_schema=vol.Schema( { diff --git a/homeassistant/components/glances/strings.json b/homeassistant/components/glances/strings.json index 11735601ce9..92aa1b47e31 100644 --- a/homeassistant/components/glances/strings.json +++ b/homeassistant/components/glances/strings.json @@ -123,11 +123,5 @@ "name": "{sensor_label} TX" } } - }, - "issues": { - "deprecated_version": { - "title": "Glances servers with version 2 is deprecated", - "description": "Glances servers with version 2 is deprecated and will not be supported in future versions of HA. It is recommended to update your server to Glances version 3 then reload the integration." - } } } diff --git a/homeassistant/components/go2rtc/__init__.py b/homeassistant/components/go2rtc/__init__.py new file mode 100644 index 00000000000..31acdd2de50 --- /dev/null +++ b/homeassistant/components/go2rtc/__init__.py @@ -0,0 +1,293 @@ +"""The go2rtc component.""" + +import logging +import shutil + +from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError +from awesomeversion import AwesomeVersion +from go2rtc_client import Go2RtcRestClient +from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError +from go2rtc_client.ws import ( + Go2RtcWsClient, + ReceiveMessages, + WebRTCAnswer, + WebRTCCandidate, + WebRTCOffer, + WsError, +) +import voluptuous as vol +from webrtc_models import RTCIceCandidateInit + +from homeassistant.components.camera import ( + Camera, + CameraWebRTCProvider, + WebRTCAnswer as HAWebRTCAnswer, + WebRTCCandidate as HAWebRTCCandidate, + WebRTCError, + WebRTCMessage, + WebRTCSendMessage, + async_register_webrtc_provider, +) +from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN +from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry +from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP +from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import ( + config_validation as cv, + discovery_flow, + issue_registry as ir, +) +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey +from homeassistant.util.package import is_docker_env + +from .const import ( + CONF_DEBUG_UI, + DEBUG_UI_URL_MESSAGE, + DOMAIN, + HA_MANAGED_URL, + RECOMMENDED_VERSION, +) +from .server import Server + +_LOGGER = logging.getLogger(__name__) + +_SUPPORTED_STREAMS = frozenset( + ( + "bubble", + "dvrip", + "expr", + "ffmpeg", + "gopro", + "homekit", + "http", + "https", + "httpx", + "isapi", + "ivideon", + "kasa", + "nest", + "onvif", + "roborock", + "rtmp", + "rtmps", + "rtmpx", + "rtsp", + "rtsps", + "rtspx", + "tapo", + "tcp", + "webrtc", + "webtorrent", + ) +) + +CONFIG_SCHEMA = vol.Schema( + { + DOMAIN: vol.Schema( + { + vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url, + vol.Exclusive(CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.boolean, + } + ) + }, + extra=vol.ALLOW_EXTRA, +) + +_DATA_GO2RTC: HassKey[str] = HassKey(DOMAIN) +_RETRYABLE_ERRORS = (ClientConnectionError, ServerConnectionError) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up WebRTC.""" + url: str | None = None + if DOMAIN not in config and DEFAULT_CONFIG_DOMAIN not in config: + await _remove_go2rtc_entries(hass) + return True + + if not (configured_by_user := DOMAIN in config) or not ( + url := config[DOMAIN].get(CONF_URL) + ): + if not is_docker_env(): + if not configured_by_user: + # Remove config entry if it exists + await _remove_go2rtc_entries(hass) + return True + _LOGGER.warning("Go2rtc URL required in non-docker installs") + return False + if not (binary := await _get_binary(hass)): + _LOGGER.error("Could not find go2rtc docker binary") + return False + + # HA will manage the binary + server = Server( + hass, binary, enable_ui=config.get(DOMAIN, {}).get(CONF_DEBUG_UI, False) + ) + try: + await server.start() + except Exception: # noqa: BLE001 + _LOGGER.warning("Could not start go2rtc server", exc_info=True) + return False + + async def on_stop(event: Event) -> None: + await server.stop() + + hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, on_stop) + + url = HA_MANAGED_URL + + hass.data[_DATA_GO2RTC] = url + discovery_flow.async_create_flow( + hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={} + ) + return True + + +async def _remove_go2rtc_entries(hass: HomeAssistant) -> None: + """Remove go2rtc config entries, if any.""" + for entry in hass.config_entries.async_entries(DOMAIN): + await hass.config_entries.async_remove(entry.entry_id) + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up go2rtc from a config entry.""" + url = hass.data[_DATA_GO2RTC] + + # Validate the server URL + try: + client = Go2RtcRestClient(async_get_clientsession(hass), url) + version = await client.validate_server_version() + if version < AwesomeVersion(RECOMMENDED_VERSION): + ir.async_create_issue( + hass, + DOMAIN, + "recommended_version", + is_fixable=False, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="recommended_version", + translation_placeholders={ + "recommended_version": RECOMMENDED_VERSION, + "current_version": str(version), + }, + ) + except Go2RtcClientError as err: + if isinstance(err.__cause__, _RETRYABLE_ERRORS): + raise ConfigEntryNotReady( + f"Could not connect to go2rtc instance on {url}" + ) from err + _LOGGER.warning("Could not connect to go2rtc instance on %s (%s)", url, err) + return False + except Go2RtcVersionError as err: + raise ConfigEntryNotReady( + f"The go2rtc server version is not supported, {err}" + ) from err + except Exception as err: # noqa: BLE001 + _LOGGER.warning("Could not connect to go2rtc instance on %s (%s)", url, err) + return False + + provider = WebRTCProvider(hass, url) + async_register_webrtc_provider(hass, provider) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a go2rtc config entry.""" + return True + + +async def _get_binary(hass: HomeAssistant) -> str | None: + """Return the binary path if found.""" + return await hass.async_add_executor_job(shutil.which, "go2rtc") + + +class WebRTCProvider(CameraWebRTCProvider): + """WebRTC provider.""" + + def __init__(self, hass: HomeAssistant, url: str) -> None: + """Initialize the WebRTC provider.""" + self._hass = hass + self._url = url + self._session = async_get_clientsession(hass) + self._rest_client = Go2RtcRestClient(self._session, url) + self._sessions: dict[str, Go2RtcWsClient] = {} + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return DOMAIN + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Return if this provider is supports the Camera as source.""" + return stream_source.partition(":")[0] in _SUPPORTED_STREAMS + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + self._sessions[session_id] = ws_client = Go2RtcWsClient( + self._session, self._url, source=camera.entity_id + ) + + if not (stream_source := await camera.stream_source()): + send_message( + WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + ) + return + + streams = await self._rest_client.streams.list() + + if (stream := streams.get(camera.entity_id)) is None or not any( + stream_source == producer.url for producer in stream.producers + ): + await self._rest_client.streams.add( + camera.entity_id, + [ + stream_source, + # We are setting any ffmpeg rtsp related logs to debug + # Connection problems to the camera will be logged by the first stream + # Therefore setting it to debug will not hide any important logs + f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", + ], + ) + + @callback + def on_messages(message: ReceiveMessages) -> None: + """Handle messages.""" + value: WebRTCMessage + match message: + case WebRTCCandidate(): + value = HAWebRTCCandidate(RTCIceCandidateInit(message.candidate)) + case WebRTCAnswer(): + value = HAWebRTCAnswer(message.sdp) + case WsError(): + value = WebRTCError("go2rtc_webrtc_offer_failed", message.error) + + send_message(value) + + ws_client.subscribe(on_messages) + config = camera.async_get_webrtc_client_configuration() + await ws_client.send(WebRTCOffer(offer_sdp, config.configuration.ice_servers)) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle the WebRTC candidate.""" + + if ws_client := self._sessions.get(session_id): + await ws_client.send(WebRTCCandidate(candidate.candidate)) + else: + _LOGGER.debug("Unknown session %s. Ignoring candidate", session_id) + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" + ws_client = self._sessions.pop(session_id) + self._hass.async_create_task(ws_client.close()) diff --git a/homeassistant/components/go2rtc/config_flow.py b/homeassistant/components/go2rtc/config_flow.py new file mode 100644 index 00000000000..02fdfb656a6 --- /dev/null +++ b/homeassistant/components/go2rtc/config_flow.py @@ -0,0 +1,21 @@ +"""Config flow for the go2rtc integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult + +from .const import DOMAIN + + +class CloudConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for the go2rtc integration.""" + + VERSION = 1 + + async def async_step_system( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the system step.""" + return self.async_create_entry(title="go2rtc", data={}) diff --git a/homeassistant/components/go2rtc/const.py b/homeassistant/components/go2rtc/const.py new file mode 100644 index 00000000000..3c1c84c42b5 --- /dev/null +++ b/homeassistant/components/go2rtc/const.py @@ -0,0 +1,9 @@ +"""Go2rtc constants.""" + +DOMAIN = "go2rtc" + +CONF_DEBUG_UI = "debug_ui" +DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time." +HA_MANAGED_API_PORT = 11984 +HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/" +RECOMMENDED_VERSION = "1.9.7" diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json new file mode 100644 index 00000000000..07dbd3bd29b --- /dev/null +++ b/homeassistant/components/go2rtc/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "go2rtc", + "name": "go2rtc", + "codeowners": ["@home-assistant/core"], + "config_flow": false, + "dependencies": ["camera"], + "documentation": "https://www.home-assistant.io/integrations/go2rtc", + "integration_type": "system", + "iot_class": "local_polling", + "quality_scale": "internal", + "requirements": ["go2rtc-client==0.1.2"], + "single_config_entry": true +} diff --git a/homeassistant/components/go2rtc/server.py b/homeassistant/components/go2rtc/server.py new file mode 100644 index 00000000000..6699ee4d8a2 --- /dev/null +++ b/homeassistant/components/go2rtc/server.py @@ -0,0 +1,252 @@ +"""Go2rtc server.""" + +import asyncio +from collections import deque +from contextlib import suppress +import logging +from tempfile import NamedTemporaryFile + +from go2rtc_client import Go2RtcRestClient + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import HA_MANAGED_API_PORT, HA_MANAGED_URL + +_LOGGER = logging.getLogger(__name__) +_TERMINATE_TIMEOUT = 5 +_SETUP_TIMEOUT = 30 +_SUCCESSFUL_BOOT_MESSAGE = "INF [api] listen addr=" +_LOCALHOST_IP = "127.0.0.1" +_LOG_BUFFER_SIZE = 512 +_RESPAWN_COOLDOWN = 1 + +# Default configuration for HA +# - Api is listening only on localhost +# - Enable rtsp for localhost only as ffmpeg needs it +# - Clear default ice servers +_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant +# Do not edit it manually + +api: + listen: "{api_ip}:{api_port}" + +rtsp: + listen: "127.0.0.1:18554" + +webrtc: + listen: ":18555/tcp" + ice_servers: [] +""" + +_LOG_LEVEL_MAP = { + "TRC": logging.DEBUG, + "DBG": logging.DEBUG, + "INF": logging.DEBUG, + "WRN": logging.WARNING, + "ERR": logging.WARNING, + "FTL": logging.ERROR, + "PNC": logging.ERROR, +} + + +class Go2RTCServerStartError(HomeAssistantError): + """Raised when server does not start.""" + + _message = "Go2rtc server didn't start correctly" + + +class Go2RTCWatchdogError(HomeAssistantError): + """Raised on watchdog error.""" + + +def _create_temp_file(api_ip: str) -> str: + """Create temporary config file.""" + # Set delete=False to prevent the file from being deleted when the file is closed + # Linux is clearing tmp folder on reboot, so no need to delete it manually + with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file: + file.write( + _GO2RTC_CONFIG_FORMAT.format( + api_ip=api_ip, api_port=HA_MANAGED_API_PORT + ).encode() + ) + return file.name + + +class Server: + """Go2rtc server.""" + + def __init__( + self, hass: HomeAssistant, binary: str, *, enable_ui: bool = False + ) -> None: + """Initialize the server.""" + self._hass = hass + self._binary = binary + self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE) + self._process: asyncio.subprocess.Process | None = None + self._startup_complete = asyncio.Event() + self._api_ip = _LOCALHOST_IP + if enable_ui: + # Listen on all interfaces for allowing access from all ips + self._api_ip = "" + self._watchdog_task: asyncio.Task | None = None + self._watchdog_tasks: list[asyncio.Task] = [] + + async def start(self) -> None: + """Start the server.""" + await self._start() + self._watchdog_task = asyncio.create_task( + self._watchdog(), name="Go2rtc respawn" + ) + + async def _start(self) -> None: + """Start the server.""" + _LOGGER.debug("Starting go2rtc server") + config_file = await self._hass.async_add_executor_job( + _create_temp_file, self._api_ip + ) + + self._startup_complete.clear() + + self._process = await asyncio.create_subprocess_exec( + self._binary, + "-c", + config_file, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + close_fds=False, # required for posix_spawn on CPython < 3.13 + ) + + self._hass.async_create_background_task( + self._log_output(self._process), "Go2rtc log output" + ) + + try: + async with asyncio.timeout(_SETUP_TIMEOUT): + await self._startup_complete.wait() + except TimeoutError as err: + msg = "Go2rtc server didn't start correctly" + _LOGGER.exception(msg) + self._log_server_output(logging.WARNING) + await self._stop() + raise Go2RTCServerStartError from err + + # Check the server version + client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL) + await client.validate_server_version() + + async def _log_output(self, process: asyncio.subprocess.Process) -> None: + """Log the output of the process.""" + assert process.stdout is not None + + async for line in process.stdout: + msg = line[:-1].decode().strip() + self._log_buffer.append(msg) + loglevel = logging.WARNING + if len(split_msg := msg.split(" ", 2)) == 3: + loglevel = _LOG_LEVEL_MAP.get(split_msg[1], loglevel) + _LOGGER.log(loglevel, msg) + if not self._startup_complete.is_set() and _SUCCESSFUL_BOOT_MESSAGE in msg: + self._startup_complete.set() + + def _log_server_output(self, loglevel: int) -> None: + """Log captured process output, then clear the log buffer.""" + for line in list(self._log_buffer): # Copy the deque to avoid mutation error + _LOGGER.log(loglevel, line) + self._log_buffer.clear() + + async def _watchdog(self) -> None: + """Keep respawning go2rtc servers. + + A new go2rtc server is spawned if the process terminates or the API + stops responding. + """ + while True: + try: + monitor_process_task = asyncio.create_task(self._monitor_process()) + self._watchdog_tasks.append(monitor_process_task) + monitor_process_task.add_done_callback(self._watchdog_tasks.remove) + monitor_api_task = asyncio.create_task(self._monitor_api()) + self._watchdog_tasks.append(monitor_api_task) + monitor_api_task.add_done_callback(self._watchdog_tasks.remove) + try: + await asyncio.gather(monitor_process_task, monitor_api_task) + except Go2RTCWatchdogError: + _LOGGER.debug("Caught Go2RTCWatchdogError") + for task in self._watchdog_tasks: + if task.done(): + if not task.cancelled(): + task.exception() + continue + task.cancel() + await asyncio.sleep(_RESPAWN_COOLDOWN) + try: + await self._stop() + _LOGGER.warning("Go2rtc unexpectedly stopped, server log:") + self._log_server_output(logging.WARNING) + _LOGGER.debug("Spawning new go2rtc server") + with suppress(Go2RTCServerStartError): + await self._start() + except Exception: + _LOGGER.exception( + "Unexpected error when restarting go2rtc server" + ) + except Exception: + _LOGGER.exception("Unexpected error in go2rtc server watchdog") + + async def _monitor_process(self) -> None: + """Raise if the go2rtc process terminates.""" + _LOGGER.debug("Monitoring go2rtc server process") + if self._process: + await self._process.wait() + _LOGGER.debug("go2rtc server terminated") + raise Go2RTCWatchdogError("Process ended") + + async def _monitor_api(self) -> None: + """Raise if the go2rtc process terminates.""" + client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL) + + _LOGGER.debug("Monitoring go2rtc API") + try: + while True: + await client.validate_server_version() + await asyncio.sleep(10) + except Exception as err: + _LOGGER.debug("go2rtc API did not reply", exc_info=True) + raise Go2RTCWatchdogError("API error") from err + + async def _stop_watchdog(self) -> None: + """Handle watchdog stop request.""" + tasks: list[asyncio.Task] = [] + if watchdog_task := self._watchdog_task: + self._watchdog_task = None + tasks.append(watchdog_task) + watchdog_task.cancel() + for task in self._watchdog_tasks: + tasks.append(task) + task.cancel() + await asyncio.gather(*tasks, return_exceptions=True) + + async def stop(self) -> None: + """Stop the server and abort the watchdog task.""" + _LOGGER.debug("Server stop requested") + await self._stop_watchdog() + await self._stop() + + async def _stop(self) -> None: + """Stop the server.""" + if self._process: + _LOGGER.debug("Stopping go2rtc server") + process = self._process + self._process = None + with suppress(ProcessLookupError): + process.terminate() + try: + await asyncio.wait_for(process.wait(), timeout=_TERMINATE_TIMEOUT) + except TimeoutError: + _LOGGER.warning("Go2rtc server didn't terminate gracefully. Killing it") + with suppress(ProcessLookupError): + process.kill() + else: + _LOGGER.debug("Go2rtc server has been stopped") diff --git a/homeassistant/components/go2rtc/strings.json b/homeassistant/components/go2rtc/strings.json new file mode 100644 index 00000000000..e350c19af96 --- /dev/null +++ b/homeassistant/components/go2rtc/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "recommended_version": { + "title": "Outdated go2rtc server detected", + "description": "We detected that you are using an outdated go2rtc server version. For the best experience, we recommend updating the go2rtc server to version `{recommended_version}`.\nCurrently you are using version `{current_version}`." + } + } +} diff --git a/homeassistant/components/goalzero/config_flow.py b/homeassistant/components/goalzero/config_flow.py index eb38e8fa154..dabe642b658 100644 --- a/homeassistant/components/goalzero/config_flow.py +++ b/homeassistant/components/goalzero/config_flow.py @@ -24,22 +24,20 @@ class GoalZeroFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize a Goal Zero Yeti flow.""" - self.ip_address: str | None = None + _discovered_ip: str async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo ) -> ConfigFlowResult: """Handle dhcp discovery.""" - self.ip_address = discovery_info.ip await self.async_set_unique_id(format_mac(discovery_info.macaddress)) - self._abort_if_unique_id_configured(updates={CONF_HOST: self.ip_address}) - self._async_abort_entries_match({CONF_HOST: self.ip_address}) + self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip}) + self._async_abort_entries_match({CONF_HOST: discovery_info.ip}) - _, error = await self._async_try_connect(str(self.ip_address)) + _, error = await self._async_try_connect(discovery_info.ip) if error is None: + self._discovered_ip = discovery_info.ip return await self.async_step_confirm_discovery() return self.async_abort(reason=error) @@ -51,7 +49,7 @@ class GoalZeroFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry( title=MANUFACTURER, data={ - CONF_HOST: self.ip_address, + CONF_HOST: self._discovered_ip, CONF_NAME: DEFAULT_NAME, }, ) @@ -60,7 +58,7 @@ class GoalZeroFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="confirm_discovery", description_placeholders={ - CONF_HOST: self.ip_address, + CONF_HOST: self._discovered_ip, CONF_NAME: DEFAULT_NAME, }, ) diff --git a/homeassistant/components/goalzero/manifest.json b/homeassistant/components/goalzero/manifest.json index f1bfc7de876..a9fcbf26d36 100644 --- a/homeassistant/components/goalzero/manifest.json +++ b/homeassistant/components/goalzero/manifest.json @@ -15,6 +15,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["goalzero"], - "quality_scale": "silver", "requirements": ["goalzero==0.2.2"] } diff --git a/homeassistant/components/gogogate2/common.py b/homeassistant/components/gogogate2/common.py index 3052e9041ac..52b1788c23e 100644 --- a/homeassistant/components/gogogate2/common.py +++ b/homeassistant/components/gogogate2/common.py @@ -14,7 +14,7 @@ from ismartgate import ( ISmartGateApi, ISmartGateInfoResponse, ) -from ismartgate.common import AbstractDoor, get_door_by_id +from ismartgate.common import AbstractDoor from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -24,11 +24,10 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.update_coordinator import CoordinatorEntity, UpdateFailed +from homeassistant.helpers.update_coordinator import UpdateFailed -from .const import DATA_UPDATE_COORDINATOR, DEVICE_TYPE_ISMARTGATE, DOMAIN, MANUFACTURER +from .const import DATA_UPDATE_COORDINATOR, DEVICE_TYPE_ISMARTGATE, DOMAIN from .coordinator import DeviceDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -42,61 +41,6 @@ class StateData(NamedTuple): door: AbstractDoor | None -class GoGoGate2Entity(CoordinatorEntity[DeviceDataUpdateCoordinator]): - """Base class for gogogate2 entities.""" - - def __init__( - self, - config_entry: ConfigEntry, - data_update_coordinator: DeviceDataUpdateCoordinator, - door: AbstractDoor, - unique_id: str, - ) -> None: - """Initialize gogogate2 base entity.""" - super().__init__(data_update_coordinator) - self._config_entry = config_entry - self._door = door - self._door_id = door.door_id - self._api = data_update_coordinator.api - self._attr_unique_id = unique_id - - @property - def door(self) -> AbstractDoor: - """Return the door object.""" - door = get_door_by_id(self._door.door_id, self.coordinator.data) - self._door = door or self._door - return self._door - - @property - def door_status(self) -> AbstractDoor: - """Return the door with status.""" - data = self.coordinator.data - door_with_statuses = self._api.async_get_door_statuses_from_info(data) - return door_with_statuses[self._door_id] - - @property - def device_info(self) -> DeviceInfo: - """Device info for the controller.""" - data = self.coordinator.data - if data.remoteaccessenabled: - configuration_url = f"https://{data.remoteaccess}" - else: - configuration_url = f"http://{self._config_entry.data[CONF_IP_ADDRESS]}" - return DeviceInfo( - configuration_url=configuration_url, - identifiers={(DOMAIN, str(self._config_entry.unique_id))}, - name=self._config_entry.title, - manufacturer=MANUFACTURER, - model=data.model, - sw_version=data.firmwareversion, - ) - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - return {"door_id": self._door_id} - - def get_data_update_coordinator( hass: HomeAssistant, config_entry: ConfigEntry ) -> DeviceDataUpdateCoordinator: diff --git a/homeassistant/components/gogogate2/config_flow.py b/homeassistant/components/gogogate2/config_flow.py index cd9ca21b063..837c0454719 100644 --- a/homeassistant/components/gogogate2/config_flow.py +++ b/homeassistant/components/gogogate2/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations import dataclasses import re -from typing import Any +from typing import Any, Self from ismartgate.common import AbstractInfoResponse, ApiError from ismartgate.const import GogoGate2ApiErrorCode, ISmartGateApiErrorCode @@ -57,19 +57,21 @@ class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN): async def _async_discovery_handler(self, ip_address: str) -> ConfigFlowResult: """Start the user flow from any discovery.""" - self.context[CONF_IP_ADDRESS] = ip_address self._abort_if_unique_id_configured({CONF_IP_ADDRESS: ip_address}) self._async_abort_entries_match({CONF_IP_ADDRESS: ip_address}) self._ip_address = ip_address - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_IP_ADDRESS) == self._ip_address: - raise AbortFlow("already_in_progress") + if self.hass.config_entries.flow.async_has_matching_flow(self): + raise AbortFlow("already_in_progress") self._device_type = DEVICE_TYPE_ISMARTGATE return await self.async_step_user() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow._ip_address == self._ip_address # noqa: SLF001 + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/gogogate2/cover.py b/homeassistant/components/gogogate2/cover.py index e807f1acd3f..6bd38a0bc01 100644 --- a/homeassistant/components/gogogate2/cover.py +++ b/homeassistant/components/gogogate2/cover.py @@ -20,8 +20,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import GoGoGate2Entity, cover_unique_id, get_data_update_coordinator +from .common import cover_unique_id, get_data_update_coordinator from .coordinator import DeviceDataUpdateCoordinator +from .entity import GoGoGate2Entity async def async_setup_entry( diff --git a/homeassistant/components/gogogate2/entity.py b/homeassistant/components/gogogate2/entity.py new file mode 100644 index 00000000000..8a699f6101b --- /dev/null +++ b/homeassistant/components/gogogate2/entity.py @@ -0,0 +1,68 @@ +"""Common code for GogoGate2 component.""" + +from __future__ import annotations + +from ismartgate.common import AbstractDoor, get_door_by_id + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_IP_ADDRESS +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import DeviceDataUpdateCoordinator + + +class GoGoGate2Entity(CoordinatorEntity[DeviceDataUpdateCoordinator]): + """Base class for gogogate2 entities.""" + + def __init__( + self, + config_entry: ConfigEntry, + data_update_coordinator: DeviceDataUpdateCoordinator, + door: AbstractDoor, + unique_id: str, + ) -> None: + """Initialize gogogate2 base entity.""" + super().__init__(data_update_coordinator) + self._config_entry = config_entry + self._door = door + self._door_id = door.door_id + self._api = data_update_coordinator.api + self._attr_unique_id = unique_id + + @property + def door(self) -> AbstractDoor: + """Return the door object.""" + door = get_door_by_id(self._door.door_id, self.coordinator.data) + self._door = door or self._door + return self._door + + @property + def door_status(self) -> AbstractDoor: + """Return the door with status.""" + data = self.coordinator.data + door_with_statuses = self._api.async_get_door_statuses_from_info(data) + return door_with_statuses[self._door_id] + + @property + def device_info(self) -> DeviceInfo: + """Device info for the controller.""" + data = self.coordinator.data + if data.remoteaccessenabled: + configuration_url = f"https://{data.remoteaccess}" + else: + configuration_url = f"http://{self._config_entry.data[CONF_IP_ADDRESS]}" + return DeviceInfo( + configuration_url=configuration_url, + identifiers={(DOMAIN, str(self._config_entry.unique_id))}, + name=self._config_entry.title, + manufacturer=MANUFACTURER, + model=data.model, + sw_version=data.firmwareversion, + ) + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + return {"door_id": self._door_id} diff --git a/homeassistant/components/gogogate2/sensor.py b/homeassistant/components/gogogate2/sensor.py index 1dd0a57f7ed..c7740e24825 100644 --- a/homeassistant/components/gogogate2/sensor.py +++ b/homeassistant/components/gogogate2/sensor.py @@ -16,8 +16,9 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import GoGoGate2Entity, get_data_update_coordinator, sensor_unique_id +from .common import get_data_update_coordinator, sensor_unique_id from .coordinator import DeviceDataUpdateCoordinator +from .entity import GoGoGate2Entity SENSOR_ID_WIRED = "WIRE" diff --git a/homeassistant/components/google/__init__.py b/homeassistant/components/google/__init__.py index 9bb6dbd059f..2ad400aabab 100644 --- a/homeassistant/components/google/__init__.py +++ b/homeassistant/components/google/__init__.py @@ -175,7 +175,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except aiohttp.ClientError as err: raise ConfigEntryNotReady from err - if not async_entry_has_scopes(hass, entry): + if not async_entry_has_scopes(entry): raise ConfigEntryAuthFailed( "Required scopes are not available, reauth required" ) @@ -198,7 +198,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry(entry, unique_id=primary_calendar.id) # Only expose the add event service if we have the correct permissions - if get_feature_access(hass, entry) is FeatureAccess.read_write: + if get_feature_access(entry) is FeatureAccess.read_write: await async_setup_add_event_service(hass, calendar_service) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -208,9 +208,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -def async_entry_has_scopes(hass: HomeAssistant, entry: ConfigEntry) -> bool: +def async_entry_has_scopes(entry: ConfigEntry) -> bool: """Verify that the config entry desired scope is present in the oauth token.""" - access = get_feature_access(hass, entry) + access = get_feature_access(entry) token_scopes = entry.data.get("token", {}).get("scope", []) return access.scope in token_scopes @@ -224,7 +224,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reload config entry if the access options change.""" - if not async_entry_has_scopes(hass, entry): + if not async_entry_has_scopes(entry): await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/google/api.py b/homeassistant/components/google/api.py index 8ed18cca41c..194c2a0b4a5 100644 --- a/homeassistant/components/google/api.py +++ b/homeassistant/components/google/api.py @@ -26,13 +26,7 @@ from homeassistant.helpers.event import ( ) from homeassistant.util import dt as dt_util -from .const import ( - CONF_CALENDAR_ACCESS, - DATA_CONFIG, - DEFAULT_FEATURE_ACCESS, - DOMAIN, - FeatureAccess, -) +from .const import CONF_CALENDAR_ACCESS, DEFAULT_FEATURE_ACCESS, FeatureAccess _LOGGER = logging.getLogger(__name__) @@ -161,27 +155,11 @@ class DeviceFlow: self._listener() -def get_feature_access( - hass: HomeAssistant, config_entry: ConfigEntry | None = None -) -> FeatureAccess: +def get_feature_access(config_entry: ConfigEntry) -> FeatureAccess: """Return the desired calendar feature access.""" - if ( - config_entry - and config_entry.options - and CONF_CALENDAR_ACCESS in config_entry.options - ): + if config_entry.options and CONF_CALENDAR_ACCESS in config_entry.options: return FeatureAccess[config_entry.options[CONF_CALENDAR_ACCESS]] - - # This may be called during config entry setup without integration setup running when there - # is no google entry in configuration.yaml - return cast( - FeatureAccess, - ( - hass.data.get(DOMAIN, {}) - .get(DATA_CONFIG, {}) - .get(CONF_CALENDAR_ACCESS, DEFAULT_FEATURE_ACCESS) - ), - ) + return DEFAULT_FEATURE_ACCESS async def async_create_device_flow( diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index f51bf64d400..5ac5dae616c 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -2,13 +2,22 @@ from __future__ import annotations +from collections.abc import Mapping +import dataclasses from datetime import datetime, timedelta import logging from typing import Any, cast from gcal_sync.api import Range, SyncEventsRequest from gcal_sync.exceptions import ApiException -from gcal_sync.model import AccessRole, DateOrDatetime, Event +from gcal_sync.model import ( + AccessRole, + Calendar, + DateOrDatetime, + Event, + EventTypeEnum, + ResponseStatus, +) from gcal_sync.store import ScopedCalendarStore from gcal_sync.sync import CalendarEventSyncManager @@ -22,6 +31,7 @@ from homeassistant.components.calendar import ( EVENT_START, EVENT_SUMMARY, CalendarEntity, + CalendarEntityDescription, CalendarEntityFeature, CalendarEvent, extract_offset, @@ -81,6 +91,104 @@ RRULE_PREFIX = "RRULE:" SERVICE_CREATE_EVENT = "create_event" +@dataclasses.dataclass(frozen=True, kw_only=True) +class GoogleCalendarEntityDescription(CalendarEntityDescription): + """Google calendar entity description.""" + + name: str | None + entity_id: str | None + read_only: bool + ignore_availability: bool + offset: str | None + search: str | None + local_sync: bool + device_id: str + working_location: bool = False + + +def _get_entity_descriptions( + hass: HomeAssistant, + config_entry: ConfigEntry, + calendar_item: Calendar, + calendar_info: Mapping[str, Any], +) -> list[GoogleCalendarEntityDescription]: + """Create entity descriptions for the calendar. + + The entity descriptions are based on the type of Calendar from the API + and optional calendar_info yaml configuration that is the older way to + configure calendars before they supported UI based config. + + The yaml config may map one calendar to multiple entities and they do not + have a unique id. The yaml config also supports additional options like + offsets or search. + """ + calendar_id = calendar_item.id + num_entities = len(calendar_info[CONF_ENTITIES]) + entity_descriptions = [] + for data in calendar_info[CONF_ENTITIES]: + if num_entities > 1: + key = "" + else: + key = calendar_id + entity_enabled = data.get(CONF_TRACK, True) + if not entity_enabled: + _LOGGER.warning( + "The 'track' option in google_calendars.yaml has been deprecated." + " The setting has been imported to the UI, and should now be" + " removed from google_calendars.yaml" + ) + read_only = not ( + calendar_item.access_role.is_writer + and get_feature_access(config_entry) is FeatureAccess.read_write + ) + # Prefer calendar sync down of resources when possible. However, + # sync does not work for search. Also free-busy calendars denormalize + # recurring events as individual events which is not efficient for sync + local_sync = True + if ( + search := data.get(CONF_SEARCH) + ) or calendar_item.access_role == AccessRole.FREE_BUSY_READER: + read_only = True + local_sync = False + entity_description = GoogleCalendarEntityDescription( + key=key, + name=data[CONF_NAME].capitalize(), + entity_id=generate_entity_id( + ENTITY_ID_FORMAT, data[CONF_DEVICE_ID], hass=hass + ), + read_only=read_only, + ignore_availability=data.get(CONF_IGNORE_AVAILABILITY, False), + offset=data.get(CONF_OFFSET, DEFAULT_CONF_OFFSET), + search=search, + local_sync=local_sync, + entity_registry_enabled_default=entity_enabled, + device_id=data[CONF_DEVICE_ID], + ) + entity_descriptions.append(entity_description) + _LOGGER.debug( + "calendar_item.primary=%s, search=%s, calendar_item.access_role=%s - %s", + calendar_item.primary, + search, + calendar_item.access_role, + local_sync, + ) + if calendar_item.primary and local_sync: + _LOGGER.debug("work location entity") + # Create an optional disabled by default entity for Work Location + entity_descriptions.append( + dataclasses.replace( + entity_description, + key=f"{key}-work-location", + translation_key="working_location", + working_location=True, + name=None, + entity_id=None, + entity_registry_enabled_default=False, + ) + ) + return entity_descriptions + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -117,30 +225,21 @@ async def async_setup_entry( hass, calendar_item.dict(exclude_unset=True) ) new_calendars.append(calendar_info) - # Yaml calendar config may map one calendar to multiple entities - # with extra options like offsets or search criteria. - num_entities = len(calendar_info[CONF_ENTITIES]) - for data in calendar_info[CONF_ENTITIES]: - entity_enabled = data.get(CONF_TRACK, True) - if not entity_enabled: - _LOGGER.warning( - "The 'track' option in google_calendars.yaml has been deprecated." - " The setting has been imported to the UI, and should now be" - " removed from google_calendars.yaml" - ) - entity_name = data[CONF_DEVICE_ID] - # The unique id is based on the config entry and calendar id since - # multiple accounts can have a common calendar id - # (e.g. `en.usa#holiday@group.v.calendar.google.com`). - # When using google_calendars.yaml with multiple entities for a - # single calendar, we have no way to set a unique id. - if num_entities > 1: - unique_id = None - else: - unique_id = f"{config_entry.unique_id}-{calendar_id}" + + for entity_description in _get_entity_descriptions( + hass, config_entry, calendar_item, calendar_info + ): + unique_id = ( + f"{config_entry.unique_id}-{entity_description.key}" + if entity_description.key + else None + ) # Migrate to new unique_id format which supports # multiple config entries as of 2022.7 - for old_unique_id in (calendar_id, f"{calendar_id}-{entity_name}"): + for old_unique_id in ( + calendar_id, + f"{calendar_id}-{entity_description.device_id}", + ): if not (entity_entry := entity_entry_map.get(old_unique_id)): continue if unique_id: @@ -162,25 +261,16 @@ async def async_setup_entry( entity_registry.async_remove( entity_entry.entity_id, ) + _LOGGER.debug("Creating entity with unique_id=%s", unique_id) coordinator: CalendarSyncUpdateCoordinator | CalendarQueryUpdateCoordinator - # Prefer calendar sync down of resources when possible. However, - # sync does not work for search. Also free-busy calendars denormalize - # recurring events as individual events which is not efficient for sync - support_write = ( - calendar_item.access_role.is_writer - and get_feature_access(hass, config_entry) is FeatureAccess.read_write - ) - if ( - search := data.get(CONF_SEARCH) - ) or calendar_item.access_role == AccessRole.FREE_BUSY_READER: + if not entity_description.local_sync: coordinator = CalendarQueryUpdateCoordinator( hass, calendar_service, - data[CONF_NAME], + entity_description.name or entity_description.key, calendar_id, - search, + entity_description.search, ) - support_write = False else: request_template = SyncEventsRequest( calendar_id=calendar_id, @@ -188,23 +278,22 @@ async def async_setup_entry( ) sync = CalendarEventSyncManager( calendar_service, - store=ScopedCalendarStore(store, unique_id or entity_name), + store=ScopedCalendarStore( + store, unique_id or entity_description.device_id + ), request_template=request_template, ) coordinator = CalendarSyncUpdateCoordinator( hass, sync, - data[CONF_NAME], + entity_description.name or entity_description.key, ) entities.append( GoogleCalendarEntity( coordinator, calendar_id, - data, - generate_entity_id(ENTITY_ID_FORMAT, entity_name, hass=hass), + entity_description, unique_id, - entity_enabled, - support_write, ) ) @@ -222,7 +311,7 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() if ( any(calendar_item.access_role.is_writer for calendar_item in result.items) - and get_feature_access(hass, config_entry) is FeatureAccess.read_write + and get_feature_access(config_entry) is FeatureAccess.read_write ): platform.async_register_entity_service( SERVICE_CREATE_EVENT, @@ -238,29 +327,29 @@ class GoogleCalendarEntity( ): """A calendar event entity.""" + entity_description: GoogleCalendarEntityDescription _attr_has_entity_name = True def __init__( self, coordinator: CalendarSyncUpdateCoordinator | CalendarQueryUpdateCoordinator, calendar_id: str, - data: dict[str, Any], - entity_id: str, + entity_description: GoogleCalendarEntityDescription, unique_id: str | None, - entity_enabled: bool, - supports_write: bool, ) -> None: """Create the Calendar event device.""" super().__init__(coordinator) + _LOGGER.debug("entity_description.entity_id=%s", entity_description.entity_id) + _LOGGER.debug("entity_description=%s", entity_description) self.calendar_id = calendar_id - self._ignore_availability: bool = data.get(CONF_IGNORE_AVAILABILITY, False) + self.entity_description = entity_description + self._ignore_availability = entity_description.ignore_availability + self._offset = entity_description.offset self._event: CalendarEvent | None = None - self._attr_name = data[CONF_NAME].capitalize() - self._offset = data.get(CONF_OFFSET, DEFAULT_CONF_OFFSET) - self.entity_id = entity_id + if entity_description.entity_id: + self.entity_id = entity_description.entity_id self._attr_unique_id = unique_id - self._attr_entity_registry_enabled_default = entity_enabled - if supports_write: + if not entity_description.read_only: self._attr_supported_features = ( CalendarEntityFeature.CREATE_EVENT | CalendarEntityFeature.DELETE_EVENT ) @@ -285,7 +374,16 @@ class GoogleCalendarEntity( return event def _event_filter(self, event: Event) -> bool: - """Return True if the event is visible.""" + """Return True if the event is visible and not declined.""" + + if any( + attendee.is_self and attendee.response_status == ResponseStatus.DECLINED + for attendee in event.attendees + ): + return False + + if event.event_type == EventTypeEnum.WORKING_LOCATION: + return self.entity_description.working_location if self._ignore_availability: return True return event.transparency == OPAQUE diff --git a/homeassistant/components/google/config_flow.py b/homeassistant/components/google/config_flow.py index 6207303c8a6..8ae09b58957 100644 --- a/homeassistant/components/google/config_flow.py +++ b/homeassistant/components/google/config_flow.py @@ -11,7 +11,12 @@ from gcal_sync.api import GoogleCalendarService from gcal_sync.exceptions import ApiException, ApiForbiddenException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -24,7 +29,6 @@ from .api import ( InvalidCredential, OAuthError, async_create_device_flow, - get_feature_access, ) from .const import ( CONF_CALENDAR_ACCESS, @@ -74,7 +78,6 @@ class OAuth2FlowHandler( def __init__(self) -> None: """Set up instance.""" super().__init__() - self._reauth_config_entry: ConfigEntry | None = None self._device_flow: DeviceFlow | None = None # First attempt is device auth, then fallback to web auth self._web_auth = False @@ -94,18 +97,6 @@ class OAuth2FlowHandler( "prompt": "consent", } - async def async_step_import(self, info: dict[str, Any]) -> ConfigFlowResult: - """Import existing auth into a new config entry.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - implementations = await config_entry_oauth2_flow.async_get_implementations( - self.hass, self.DOMAIN - ) - assert len(implementations) == 1 - self.flow_impl = list(implementations.values())[0] - self.external_data = info - return await super().async_step_creation(info) - async def async_step_auth( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -129,11 +120,11 @@ class OAuth2FlowHandler( self.flow_impl, ) return self.async_abort(reason="oauth_error") - calendar_access = get_feature_access(self.hass) - if self._reauth_config_entry and self._reauth_config_entry.options: - calendar_access = FeatureAccess[ - self._reauth_config_entry.options[CONF_CALENDAR_ACCESS] - ] + calendar_access = DEFAULT_FEATURE_ACCESS + if self.source == SOURCE_REAUTH and ( + reauth_options := self._get_reauth_entry().options + ): + calendar_access = FeatureAccess[reauth_options[CONF_CALENDAR_ACCESS]] try: device_flow = await async_create_device_flow( self.hass, @@ -190,14 +181,10 @@ class OAuth2FlowHandler( data[CONF_CREDENTIAL_TYPE] = ( CredentialType.WEB_AUTH if self._web_auth else CredentialType.DEVICE_AUTH ) - if self._reauth_config_entry: - self.hass.config_entries.async_update_entry( - self._reauth_config_entry, data=data + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - await self.hass.config_entries.async_reload( - self._reauth_config_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") calendar_service = GoogleCalendarService( AccessTokenAuthImpl( async_get_clientsession(self.hass), data["token"]["access_token"] @@ -226,7 +213,7 @@ class OAuth2FlowHandler( title=primary_calendar.id, data=data, options={ - CONF_CALENDAR_ACCESS: get_feature_access(self.hass).name, + CONF_CALENDAR_ACCESS: DEFAULT_FEATURE_ACCESS.name, }, ) @@ -234,9 +221,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) self._web_auth = entry_data.get(CONF_CREDENTIAL_TYPE) == CredentialType.WEB_AUTH return await self.async_step_reauth_confirm() @@ -254,16 +238,12 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> OptionsFlow: """Create an options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Google Calendar options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google/icons.json b/homeassistant/components/google/icons.json index 6dbad61b43d..e4f25442546 100644 --- a/homeassistant/components/google/icons.json +++ b/homeassistant/components/google/icons.json @@ -1,6 +1,10 @@ { "services": { - "add_event": "mdi:calendar-plus", - "create_event": "mdi:calendar-plus" + "add_event": { + "service": "mdi:calendar-plus" + }, + "create_event": { + "service": "mdi:calendar-plus" + } } } diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index 163ad91fb7c..85c4714432b 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -4,8 +4,8 @@ "codeowners": ["@allenporter"], "config_flow": true, "dependencies": ["application_credentials"], - "documentation": "https://www.home-assistant.io/integrations/calendar.google", + "documentation": "https://www.home-assistant.io/integrations/google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==6.1.4", "oauth2client==4.1.3", "ical==8.1.1"] + "requirements": ["gcal-sync==6.2.0", "oauth2client==4.1.3", "ical==8.2.0"] } diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 4e62b134b0e..acc69c3799a 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -19,6 +19,7 @@ "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]", "code_expired": "Authentication code expired or credential setup is invalid, please try again.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]", @@ -44,7 +45,7 @@ } }, "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Calendar. You also need to create Application Credentials linked to your Calendar:\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **TV and Limited Input devices** for the Application Type.\n\n" + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Calendar. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." }, "services": { "add_event": { @@ -86,8 +87,8 @@ } }, "create_event": { - "name": "Creates event", - "description": "Add a new calendar event.", + "name": "Create event", + "description": "Adds a new calendar event.", "fields": { "summary": { "name": "Summary", @@ -123,5 +124,12 @@ } } } + }, + "entity": { + "calendar": { + "working_location": { + "name": "Working location" + } + } } } diff --git a/homeassistant/components/google_assistant/config_flow.py b/homeassistant/components/google_assistant/config_flow.py index 9504c623138..5934657f9ae 100644 --- a/homeassistant/components/google_assistant/config_flow.py +++ b/homeassistant/components/google_assistant/config_flow.py @@ -1,6 +1,8 @@ """Config flow for google assistant component.""" -from homeassistant.config_entries import ConfigFlow +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import CONF_PROJECT_ID, DOMAIN @@ -10,10 +12,10 @@ class GoogleAssistantHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - await self.async_set_unique_id(unique_id=user_input[CONF_PROJECT_ID]) + await self.async_set_unique_id(unique_id=import_data[CONF_PROJECT_ID]) self._abort_if_unique_id_configured() return self.async_create_entry( - title=user_input[CONF_PROJECT_ID], data=user_input + title=import_data[CONF_PROJECT_ID], data=import_data ) diff --git a/homeassistant/components/google_assistant/const.py b/homeassistant/components/google_assistant/const.py index 04c85639e07..8132ecaae2c 100644 --- a/homeassistant/components/google_assistant/const.py +++ b/homeassistant/components/google_assistant/const.py @@ -78,6 +78,7 @@ TYPE_AWNING = f"{PREFIX_TYPES}AWNING" TYPE_BLINDS = f"{PREFIX_TYPES}BLINDS" TYPE_CAMERA = f"{PREFIX_TYPES}CAMERA" TYPE_CURTAIN = f"{PREFIX_TYPES}CURTAIN" +TYPE_CARBON_MONOXIDE_DETECTOR = f"{PREFIX_TYPES}CARBON_MONOXIDE_DETECTOR" TYPE_DEHUMIDIFIER = f"{PREFIX_TYPES}DEHUMIDIFIER" TYPE_DOOR = f"{PREFIX_TYPES}DOOR" TYPE_DOORBELL = f"{PREFIX_TYPES}DOORBELL" @@ -93,6 +94,7 @@ TYPE_SCENE = f"{PREFIX_TYPES}SCENE" TYPE_SENSOR = f"{PREFIX_TYPES}SENSOR" TYPE_SETTOP = f"{PREFIX_TYPES}SETTOP" TYPE_SHUTTER = f"{PREFIX_TYPES}SHUTTER" +TYPE_SMOKE_DETECTOR = f"{PREFIX_TYPES}SMOKE_DETECTOR" TYPE_SPEAKER = f"{PREFIX_TYPES}SPEAKER" TYPE_SWITCH = f"{PREFIX_TYPES}SWITCH" TYPE_THERMOSTAT = f"{PREFIX_TYPES}THERMOSTAT" @@ -136,6 +138,7 @@ EVENT_SYNC_RECEIVED = "google_assistant_sync" DOMAIN_TO_GOOGLE_TYPES = { alarm_control_panel.DOMAIN: TYPE_ALARM, + binary_sensor.DOMAIN: TYPE_SENSOR, button.DOMAIN: TYPE_SCENE, camera.DOMAIN: TYPE_CAMERA, climate.DOMAIN: TYPE_THERMOSTAT, @@ -168,6 +171,14 @@ DEVICE_CLASS_TO_GOOGLE_TYPES = { binary_sensor.DOMAIN, binary_sensor.BinarySensorDeviceClass.GARAGE_DOOR, ): TYPE_GARAGE, + ( + binary_sensor.DOMAIN, + binary_sensor.BinarySensorDeviceClass.SMOKE, + ): TYPE_SMOKE_DETECTOR, + ( + binary_sensor.DOMAIN, + binary_sensor.BinarySensorDeviceClass.CO, + ): TYPE_CARBON_MONOXIDE_DETECTOR, (cover.DOMAIN, cover.CoverDeviceClass.AWNING): TYPE_AWNING, (cover.DOMAIN, cover.CoverDeviceClass.CURTAIN): TYPE_CURTAIN, (cover.DOMAIN, cover.CoverDeviceClass.DOOR): TYPE_DOOR, diff --git a/homeassistant/components/google_assistant/helpers.py b/homeassistant/components/google_assistant/helpers.py index 7f8f7a68ffa..76869487ee3 100644 --- a/homeassistant/components/google_assistant/helpers.py +++ b/homeassistant/components/google_assistant/helpers.py @@ -521,7 +521,7 @@ def supported_traits_for_state(state: State) -> list[type[trait._Trait]]: class GoogleEntity: """Adaptation of Entity expressed in Google's terms.""" - __slots__ = ("hass", "config", "state", "_traits") + __slots__ = ("hass", "config", "state", "entity_id", "_traits") def __init__( self, hass: HomeAssistant, config: AbstractConfig, state: State @@ -530,17 +530,13 @@ class GoogleEntity: self.hass = hass self.config = config self.state = state + self.entity_id = state.entity_id self._traits: list[trait._Trait] | None = None def __repr__(self) -> str: """Return the representation.""" return f"" - @property - def entity_id(self): - """Return entity ID.""" - return self.state.entity_id - @callback def traits(self) -> list[trait._Trait]: """Return traits for entity.""" diff --git a/homeassistant/components/google_assistant/icons.json b/homeassistant/components/google_assistant/icons.json index 3bcab03d2c2..a522103328a 100644 --- a/homeassistant/components/google_assistant/icons.json +++ b/homeassistant/components/google_assistant/icons.json @@ -1,5 +1,7 @@ { "services": { - "request_sync": "mdi:sync" + "request_sync": { + "service": "mdi:sync" + } } } diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 145eb4b2935..44251a3be04 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -33,14 +33,17 @@ from homeassistant.components import ( valve, water_heater, ) -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature from homeassistant.components.fan import FanEntityFeature from homeassistant.components.humidifier import HumidifierEntityFeature from homeassistant.components.light import LightEntityFeature -from homeassistant.components.lock import STATE_JAMMED, STATE_UNLOCKING +from homeassistant.components.lock import LockState from homeassistant.components.media_player import MediaPlayerEntityFeature, MediaType from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import ValveEntityFeature @@ -63,15 +66,7 @@ from homeassistant.const import ( SERVICE_ALARM_TRIGGER, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_IDLE, - STATE_LOCKED, STATE_OFF, STATE_ON, STATE_PAUSED, @@ -110,61 +105,42 @@ from .error import ChallengeNeeded, SmartHomeError _LOGGER = logging.getLogger(__name__) PREFIX_TRAITS = "action.devices.traits." -TRAIT_CAMERA_STREAM = f"{PREFIX_TRAITS}CameraStream" -TRAIT_ONOFF = f"{PREFIX_TRAITS}OnOff" -TRAIT_DOCK = f"{PREFIX_TRAITS}Dock" -TRAIT_STARTSTOP = f"{PREFIX_TRAITS}StartStop" +TRAIT_ARM_DISARM = f"{PREFIX_TRAITS}ArmDisarm" TRAIT_BRIGHTNESS = f"{PREFIX_TRAITS}Brightness" -TRAIT_COLOR_SETTING = f"{PREFIX_TRAITS}ColorSetting" -TRAIT_SCENE = f"{PREFIX_TRAITS}Scene" -TRAIT_TEMPERATURE_SETTING = f"{PREFIX_TRAITS}TemperatureSetting" -TRAIT_TEMPERATURE_CONTROL = f"{PREFIX_TRAITS}TemperatureControl" -TRAIT_LOCKUNLOCK = f"{PREFIX_TRAITS}LockUnlock" -TRAIT_FANSPEED = f"{PREFIX_TRAITS}FanSpeed" -TRAIT_MODES = f"{PREFIX_TRAITS}Modes" -TRAIT_INPUTSELECTOR = f"{PREFIX_TRAITS}InputSelector" -TRAIT_OBJECTDETECTION = f"{PREFIX_TRAITS}ObjectDetection" -TRAIT_OPENCLOSE = f"{PREFIX_TRAITS}OpenClose" -TRAIT_VOLUME = f"{PREFIX_TRAITS}Volume" -TRAIT_ARMDISARM = f"{PREFIX_TRAITS}ArmDisarm" -TRAIT_HUMIDITY_SETTING = f"{PREFIX_TRAITS}HumiditySetting" -TRAIT_TRANSPORT_CONTROL = f"{PREFIX_TRAITS}TransportControl" -TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState" +TRAIT_CAMERA_STREAM = f"{PREFIX_TRAITS}CameraStream" TRAIT_CHANNEL = f"{PREFIX_TRAITS}Channel" +TRAIT_COLOR_SETTING = f"{PREFIX_TRAITS}ColorSetting" +TRAIT_DOCK = f"{PREFIX_TRAITS}Dock" +TRAIT_ENERGY_STORAGE = f"{PREFIX_TRAITS}EnergyStorage" +TRAIT_FAN_SPEED = f"{PREFIX_TRAITS}FanSpeed" +TRAIT_HUMIDITY_SETTING = f"{PREFIX_TRAITS}HumiditySetting" +TRAIT_INPUT_SELECTOR = f"{PREFIX_TRAITS}InputSelector" TRAIT_LOCATOR = f"{PREFIX_TRAITS}Locator" -TRAIT_ENERGYSTORAGE = f"{PREFIX_TRAITS}EnergyStorage" +TRAIT_LOCK_UNLOCK = f"{PREFIX_TRAITS}LockUnlock" +TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState" +TRAIT_MODES = f"{PREFIX_TRAITS}Modes" +TRAIT_OBJECT_DETECTION = f"{PREFIX_TRAITS}ObjectDetection" +TRAIT_ON_OFF = f"{PREFIX_TRAITS}OnOff" +TRAIT_OPEN_CLOSE = f"{PREFIX_TRAITS}OpenClose" +TRAIT_SCENE = f"{PREFIX_TRAITS}Scene" TRAIT_SENSOR_STATE = f"{PREFIX_TRAITS}SensorState" +TRAIT_START_STOP = f"{PREFIX_TRAITS}StartStop" +TRAIT_TEMPERATURE_CONTROL = f"{PREFIX_TRAITS}TemperatureControl" +TRAIT_TEMPERATURE_SETTING = f"{PREFIX_TRAITS}TemperatureSetting" +TRAIT_TRANSPORT_CONTROL = f"{PREFIX_TRAITS}TransportControl" +TRAIT_VOLUME = f"{PREFIX_TRAITS}Volume" PREFIX_COMMANDS = "action.devices.commands." -COMMAND_ONOFF = f"{PREFIX_COMMANDS}OnOff" -COMMAND_GET_CAMERA_STREAM = f"{PREFIX_COMMANDS}GetCameraStream" -COMMAND_DOCK = f"{PREFIX_COMMANDS}Dock" -COMMAND_STARTSTOP = f"{PREFIX_COMMANDS}StartStop" -COMMAND_PAUSEUNPAUSE = f"{PREFIX_COMMANDS}PauseUnpause" -COMMAND_BRIGHTNESS_ABSOLUTE = f"{PREFIX_COMMANDS}BrightnessAbsolute" -COMMAND_COLOR_ABSOLUTE = f"{PREFIX_COMMANDS}ColorAbsolute" COMMAND_ACTIVATE_SCENE = f"{PREFIX_COMMANDS}ActivateScene" -COMMAND_SET_TEMPERATURE = f"{PREFIX_COMMANDS}SetTemperature" -COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = ( - f"{PREFIX_COMMANDS}ThermostatTemperatureSetpoint" -) -COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = ( - f"{PREFIX_COMMANDS}ThermostatTemperatureSetRange" -) -COMMAND_THERMOSTAT_SET_MODE = f"{PREFIX_COMMANDS}ThermostatSetMode" -COMMAND_LOCKUNLOCK = f"{PREFIX_COMMANDS}LockUnlock" -COMMAND_FANSPEED = f"{PREFIX_COMMANDS}SetFanSpeed" -COMMAND_FANSPEEDRELATIVE = f"{PREFIX_COMMANDS}SetFanSpeedRelative" -COMMAND_MODES = f"{PREFIX_COMMANDS}SetModes" -COMMAND_INPUT = f"{PREFIX_COMMANDS}SetInput" +COMMAND_ARM_DISARM = f"{PREFIX_COMMANDS}ArmDisarm" +COMMAND_BRIGHTNESS_ABSOLUTE = f"{PREFIX_COMMANDS}BrightnessAbsolute" +COMMAND_CHARGE = f"{PREFIX_COMMANDS}Charge" +COMMAND_COLOR_ABSOLUTE = f"{PREFIX_COMMANDS}ColorAbsolute" +COMMAND_DOCK = f"{PREFIX_COMMANDS}Dock" +COMMAND_GET_CAMERA_STREAM = f"{PREFIX_COMMANDS}GetCameraStream" +COMMAND_LOCK_UNLOCK = f"{PREFIX_COMMANDS}LockUnlock" +COMMAND_LOCATE = f"{PREFIX_COMMANDS}Locate" COMMAND_NEXT_INPUT = f"{PREFIX_COMMANDS}NextInput" -COMMAND_PREVIOUS_INPUT = f"{PREFIX_COMMANDS}PreviousInput" -COMMAND_OPENCLOSE = f"{PREFIX_COMMANDS}OpenClose" -COMMAND_OPENCLOSE_RELATIVE = f"{PREFIX_COMMANDS}OpenCloseRelative" -COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume" -COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative" -COMMAND_MUTE = f"{PREFIX_COMMANDS}mute" -COMMAND_ARMDISARM = f"{PREFIX_COMMANDS}ArmDisarm" COMMAND_MEDIA_NEXT = f"{PREFIX_COMMANDS}mediaNext" COMMAND_MEDIA_PAUSE = f"{PREFIX_COMMANDS}mediaPause" COMMAND_MEDIA_PREVIOUS = f"{PREFIX_COMMANDS}mediaPrevious" @@ -173,11 +149,30 @@ COMMAND_MEDIA_SEEK_RELATIVE = f"{PREFIX_COMMANDS}mediaSeekRelative" COMMAND_MEDIA_SEEK_TO_POSITION = f"{PREFIX_COMMANDS}mediaSeekToPosition" COMMAND_MEDIA_SHUFFLE = f"{PREFIX_COMMANDS}mediaShuffle" COMMAND_MEDIA_STOP = f"{PREFIX_COMMANDS}mediaStop" +COMMAND_MUTE = f"{PREFIX_COMMANDS}mute" +COMMAND_OPEN_CLOSE = f"{PREFIX_COMMANDS}OpenClose" +COMMAND_ON_OFF = f"{PREFIX_COMMANDS}OnOff" +COMMAND_OPEN_CLOSE_RELATIVE = f"{PREFIX_COMMANDS}OpenCloseRelative" +COMMAND_PAUSE_UNPAUSE = f"{PREFIX_COMMANDS}PauseUnpause" COMMAND_REVERSE = f"{PREFIX_COMMANDS}Reverse" -COMMAND_SET_HUMIDITY = f"{PREFIX_COMMANDS}SetHumidity" +COMMAND_PREVIOUS_INPUT = f"{PREFIX_COMMANDS}PreviousInput" COMMAND_SELECT_CHANNEL = f"{PREFIX_COMMANDS}selectChannel" -COMMAND_LOCATE = f"{PREFIX_COMMANDS}Locate" -COMMAND_CHARGE = f"{PREFIX_COMMANDS}Charge" +COMMAND_SET_TEMPERATURE = f"{PREFIX_COMMANDS}SetTemperature" +COMMAND_SET_FAN_SPEED = f"{PREFIX_COMMANDS}SetFanSpeed" +COMMAND_SET_FAN_SPEED_RELATIVE = f"{PREFIX_COMMANDS}SetFanSpeedRelative" +COMMAND_SET_HUMIDITY = f"{PREFIX_COMMANDS}SetHumidity" +COMMAND_SET_INPUT = f"{PREFIX_COMMANDS}SetInput" +COMMAND_SET_MODES = f"{PREFIX_COMMANDS}SetModes" +COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume" +COMMAND_START_STOP = f"{PREFIX_COMMANDS}StartStop" +COMMAND_THERMOSTAT_SET_MODE = f"{PREFIX_COMMANDS}ThermostatSetMode" +COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = ( + f"{PREFIX_COMMANDS}ThermostatTemperatureSetpoint" +) +COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = ( + f"{PREFIX_COMMANDS}ThermostatTemperatureSetRange" +) +COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative" TRAITS: list[type[_Trait]] = [] @@ -416,7 +411,7 @@ class ObjectDetection(_Trait): https://developers.google.com/actions/smarthome/traits/objectdetection """ - name = TRAIT_OBJECTDETECTION + name = TRAIT_OBJECT_DETECTION commands = [] @staticmethod @@ -474,8 +469,8 @@ class OnOffTrait(_Trait): https://developers.google.com/actions/smarthome/traits/onoff """ - name = TRAIT_ONOFF - commands = [COMMAND_ONOFF] + name = TRAIT_ON_OFF + commands = [COMMAND_ON_OFF] @staticmethod def supported(domain, features, device_class, _): @@ -558,15 +553,9 @@ class ColorSettingTrait(_Trait): response["colorModel"] = "hsv" if light.color_temp_supported(color_modes): - # Max Kelvin is Min Mireds K = 1000000 / mireds - # Min Kelvin is Max Mireds K = 1000000 / mireds response["colorTemperatureRange"] = { - "temperatureMaxK": color_util.color_temperature_mired_to_kelvin( - attrs.get(light.ATTR_MIN_MIREDS) - ), - "temperatureMinK": color_util.color_temperature_mired_to_kelvin( - attrs.get(light.ATTR_MAX_MIREDS) - ), + "temperatureMaxK": int(attrs.get(light.ATTR_MAX_COLOR_TEMP_KELVIN)), + "temperatureMinK": int(attrs.get(light.ATTR_MIN_COLOR_TEMP_KELVIN)), } return response @@ -588,7 +577,7 @@ class ColorSettingTrait(_Trait): } if light.color_temp_supported([color_mode]): - temp = self.state.attributes.get(light.ATTR_COLOR_TEMP) + temp = self.state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) # Some faulty integrations might put 0 in here, raising exception. if temp == 0: _LOGGER.warning( @@ -597,9 +586,7 @@ class ColorSettingTrait(_Trait): temp, ) elif temp is not None: - color["temperatureK"] = color_util.color_temperature_mired_to_kelvin( - temp - ) + color["temperatureK"] = temp response = {} @@ -611,11 +598,9 @@ class ColorSettingTrait(_Trait): async def execute(self, command, data, params, challenge): """Execute a color temperature command.""" if "temperature" in params["color"]: - temp = color_util.color_temperature_kelvin_to_mired( - params["color"]["temperature"] - ) - min_temp = self.state.attributes[light.ATTR_MIN_MIREDS] - max_temp = self.state.attributes[light.ATTR_MAX_MIREDS] + temp = params["color"]["temperature"] + max_temp = self.state.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN] + min_temp = self.state.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN] if temp < min_temp or temp > max_temp: raise SmartHomeError( @@ -626,7 +611,10 @@ class ColorSettingTrait(_Trait): await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_COLOR_TEMP: temp}, + { + ATTR_ENTITY_ID: self.state.entity_id, + light.ATTR_COLOR_TEMP_KELVIN: temp, + }, blocking=not self.config.should_report_state, context=data.context, ) @@ -734,7 +722,7 @@ class DockTrait(_Trait): def query_attributes(self) -> dict[str, Any]: """Return dock query attributes.""" - return {"isDocked": self.state.state == vacuum.STATE_DOCKED} + return {"isDocked": self.state.state == vacuum.VacuumActivity.DOCKED} async def execute(self, command, data, params, challenge): """Execute a dock command.""" @@ -794,7 +782,7 @@ class EnergyStorageTrait(_Trait): https://developers.google.com/actions/smarthome/traits/energystorage """ - name = TRAIT_ENERGYSTORAGE + name = TRAIT_ENERGY_STORAGE commands = [COMMAND_CHARGE] @staticmethod @@ -830,8 +818,8 @@ class EnergyStorageTrait(_Trait): "capacityUntilFull": [ {"rawValue": 100 - battery_level, "unit": "PERCENTAGE"} ], - "isCharging": self.state.state == vacuum.STATE_DOCKED, - "isPluggedIn": self.state.state == vacuum.STATE_DOCKED, + "isCharging": self.state.state == vacuum.VacuumActivity.DOCKED, + "isPluggedIn": self.state.state == vacuum.VacuumActivity.DOCKED, } async def execute(self, command, data, params, challenge): @@ -849,8 +837,8 @@ class StartStopTrait(_Trait): https://developers.google.com/actions/smarthome/traits/startstop """ - name = TRAIT_STARTSTOP - commands = [COMMAND_STARTSTOP, COMMAND_PAUSEUNPAUSE] + name = TRAIT_START_STOP + commands = [COMMAND_START_STOP, COMMAND_PAUSE_UNPAUSE] @staticmethod def supported(domain, features, device_class, _): @@ -887,8 +875,8 @@ class StartStopTrait(_Trait): if domain == vacuum.DOMAIN: return { - "isRunning": state == vacuum.STATE_CLEANING, - "isPaused": state == vacuum.STATE_PAUSED, + "isRunning": state == vacuum.VacuumActivity.CLEANING, + "isPaused": state == vacuum.VacuumActivity.PAUSED, } if domain in COVER_VALVE_DOMAINS: @@ -914,7 +902,7 @@ class StartStopTrait(_Trait): async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" - if command == COMMAND_STARTSTOP: + if command == COMMAND_START_STOP: if params["start"]: await self.hass.services.async_call( self.state.domain, @@ -931,7 +919,7 @@ class StartStopTrait(_Trait): blocking=not self.config.should_report_state, context=data.context, ) - elif command == COMMAND_PAUSEUNPAUSE: + elif command == COMMAND_PAUSE_UNPAUSE: if params["pause"]: await self.hass.services.async_call( self.state.domain, @@ -952,7 +940,7 @@ class StartStopTrait(_Trait): async def _execute_cover_or_valve(self, command, data, params, challenge): """Execute a StartStop command.""" domain = self.state.domain - if command == COMMAND_STARTSTOP: + if command == COMMAND_START_STOP: if params["start"] is False: if self.state.state in ( COVER_VALVE_STATES[domain]["closing"], @@ -1505,8 +1493,8 @@ class LockUnlockTrait(_Trait): https://developers.google.com/actions/smarthome/traits/lockunlock """ - name = TRAIT_LOCKUNLOCK - commands = [COMMAND_LOCKUNLOCK] + name = TRAIT_LOCK_UNLOCK + commands = [COMMAND_LOCK_UNLOCK] @staticmethod def supported(domain, features, device_class, _): @@ -1524,11 +1512,11 @@ class LockUnlockTrait(_Trait): def query_attributes(self) -> dict[str, Any]: """Return LockUnlock query attributes.""" - if self.state.state == STATE_JAMMED: + if self.state.state == LockState.JAMMED: return {"isJammed": True} # If its unlocking its not yet unlocked so we consider is locked - return {"isLocked": self.state.state in (STATE_UNLOCKING, STATE_LOCKED)} + return {"isLocked": self.state.state in (LockState.UNLOCKING, LockState.LOCKED)} async def execute(self, command, data, params, challenge): """Execute an LockUnlock command.""" @@ -1554,23 +1542,23 @@ class ArmDisArmTrait(_Trait): https://developers.google.com/actions/smarthome/traits/armdisarm """ - name = TRAIT_ARMDISARM - commands = [COMMAND_ARMDISARM] + name = TRAIT_ARM_DISARM + commands = [COMMAND_ARM_DISARM] state_to_service = { - STATE_ALARM_ARMED_HOME: SERVICE_ALARM_ARM_HOME, - STATE_ALARM_ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT, - STATE_ALARM_ARMED_AWAY: SERVICE_ALARM_ARM_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS: SERVICE_ALARM_ARM_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED: SERVICE_ALARM_TRIGGER, + AlarmControlPanelState.ARMED_HOME: SERVICE_ALARM_ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT, + AlarmControlPanelState.ARMED_AWAY: SERVICE_ALARM_ARM_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED: SERVICE_ALARM_TRIGGER, } state_to_support = { - STATE_ALARM_ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, - STATE_ALARM_ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, - STATE_ALARM_ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED: AlarmControlPanelEntityFeature.TRIGGER, + AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, + AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED: AlarmControlPanelEntityFeature.TRIGGER, } """The list of states to support in increasing security state.""" @@ -1596,8 +1584,8 @@ class ArmDisArmTrait(_Trait): def _default_arm_state(self): states = self._supported_states() - if STATE_ALARM_TRIGGERED in states: - states.remove(STATE_ALARM_TRIGGERED) + if AlarmControlPanelState.TRIGGERED in states: + states.remove(AlarmControlPanelState.TRIGGERED) if not states: raise SmartHomeError(ERR_NOT_SUPPORTED, "ArmLevel missing") @@ -1612,7 +1600,7 @@ class ArmDisArmTrait(_Trait): # level synonyms are generated from state names # 'armed_away' becomes 'armed away' or 'away' level_synonym = [state.replace("_", " ")] - if state != STATE_ALARM_TRIGGERED: + if state != AlarmControlPanelState.TRIGGERED: level_synonym.append(state.split("_")[1]) level = { @@ -1653,11 +1641,11 @@ class ArmDisArmTrait(_Trait): elif ( params["arm"] and params.get("cancel") - and self.state.state == STATE_ALARM_PENDING + and self.state.state == AlarmControlPanelState.PENDING ): service = SERVICE_ALARM_DISARM else: - if self.state.state == STATE_ALARM_DISARMED: + if self.state.state == AlarmControlPanelState.DISARMED: raise SmartHomeError(ERR_ALREADY_DISARMED, "System is already disarmed") _verify_pin_challenge(data, self.state, challenge) service = SERVICE_ALARM_DISARM @@ -1695,8 +1683,8 @@ class FanSpeedTrait(_Trait): https://developers.google.com/actions/smarthome/traits/fanspeed """ - name = TRAIT_FANSPEED - commands = [COMMAND_FANSPEED, COMMAND_REVERSE] + name = TRAIT_FAN_SPEED + commands = [COMMAND_SET_FAN_SPEED, COMMAND_REVERSE] def __init__(self, hass, state, config): """Initialize a trait for a state.""" @@ -1841,7 +1829,7 @@ class FanSpeedTrait(_Trait): async def execute(self, command, data, params, challenge): """Execute a smart home command.""" - if command == COMMAND_FANSPEED: + if command == COMMAND_SET_FAN_SPEED: await self.execute_fanspeed(data, params) elif command == COMMAND_REVERSE: await self.execute_reverse(data, params) @@ -1855,7 +1843,7 @@ class ModesTrait(_Trait): """ name = TRAIT_MODES - commands = [COMMAND_MODES] + commands = [COMMAND_SET_MODES] SYNONYMS = { "preset mode": ["preset mode", "mode", "preset"], @@ -2089,8 +2077,8 @@ class InputSelectorTrait(_Trait): https://developers.google.com/assistant/smarthome/traits/inputselector """ - name = TRAIT_INPUTSELECTOR - commands = [COMMAND_INPUT, COMMAND_NEXT_INPUT, COMMAND_PREVIOUS_INPUT] + name = TRAIT_INPUT_SELECTOR + commands = [COMMAND_SET_INPUT, COMMAND_NEXT_INPUT, COMMAND_PREVIOUS_INPUT] SYNONYMS: dict[str, list[str]] = {} @@ -2125,7 +2113,7 @@ class InputSelectorTrait(_Trait): sources = self.state.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or [] source = self.state.attributes.get(media_player.ATTR_INPUT_SOURCE) - if command == COMMAND_INPUT: + if command == COMMAND_SET_INPUT: requested_source = params.get("newInput") elif command == COMMAND_NEXT_INPUT: requested_source = _next_selected(sources, source) @@ -2163,8 +2151,8 @@ class OpenCloseTrait(_Trait): cover.CoverDeviceClass.GATE, ) - name = TRAIT_OPENCLOSE - commands = [COMMAND_OPENCLOSE, COMMAND_OPENCLOSE_RELATIVE] + name = TRAIT_OPEN_CLOSE + commands = [COMMAND_OPEN_CLOSE, COMMAND_OPEN_CLOSE_RELATIVE] @staticmethod def supported(domain, features, device_class, _): @@ -2264,7 +2252,7 @@ class OpenCloseTrait(_Trait): if domain in COVER_VALVE_DOMAINS: svc_params = {ATTR_ENTITY_ID: self.state.entity_id} should_verify = False - if command == COMMAND_OPENCLOSE_RELATIVE: + if command == COMMAND_OPEN_CLOSE_RELATIVE: position = self.state.attributes.get( COVER_VALVE_CURRENT_POSITION[domain] ) @@ -2711,6 +2699,21 @@ class SensorStateTrait(_Trait): ), } + binary_sensor_types = { + binary_sensor.BinarySensorDeviceClass.CO: ( + "CarbonMonoxideLevel", + ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], + ), + binary_sensor.BinarySensorDeviceClass.SMOKE: ( + "SmokeLevel", + ["smoke detected", "no smoke detected", "unknown"], + ), + binary_sensor.BinarySensorDeviceClass.MOISTURE: ( + "WaterLeak", + ["leak", "no leak", "unknown"], + ), + } + name = TRAIT_SENSOR_STATE commands: list[str] = [] @@ -2733,24 +2736,37 @@ class SensorStateTrait(_Trait): @classmethod def supported(cls, domain, features, device_class, _): """Test if state is supported.""" - return domain == sensor.DOMAIN and device_class in cls.sensor_types + return (domain == sensor.DOMAIN and device_class in cls.sensor_types) or ( + domain == binary_sensor.DOMAIN and device_class in cls.binary_sensor_types + ) def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) - data = self.sensor_types.get(device_class) - if device_class is None or data is None: - return {} + def create_sensor_state( + name: str, + raw_value_unit: str | None = None, + available_states: list[str] | None = None, + ) -> dict[str, Any]: + sensor_state: dict[str, Any] = { + "name": name, + } + if raw_value_unit: + sensor_state["numericCapabilities"] = {"rawValueUnit": raw_value_unit} + if available_states: + sensor_state["descriptiveCapabilities"] = { + "availableStates": available_states + } + return {"sensorStatesSupported": [sensor_state]} - sensor_state = { - "name": data[0], - "numericCapabilities": {"rawValueUnit": data[1]}, - } - - if device_class == sensor.SensorDeviceClass.AQI: - sensor_state["descriptiveCapabilities"] = { - "availableStates": [ + if self.state.domain == sensor.DOMAIN: + sensor_data = self.sensor_types.get(device_class) + if device_class is None or sensor_data is None: + return {} + available_states: list[str] | None = None + if device_class == sensor.SensorDeviceClass.AQI: + available_states = [ "healthy", "moderate", "unhealthy for sensitive groups", @@ -2758,30 +2774,53 @@ class SensorStateTrait(_Trait): "very unhealthy", "hazardous", "unknown", - ], - } - - return {"sensorStatesSupported": [sensor_state]} + ] + return create_sensor_state(sensor_data[0], sensor_data[1], available_states) + binary_sensor_data = self.binary_sensor_types.get(device_class) + if device_class is None or binary_sensor_data is None: + return {} + return create_sensor_state( + binary_sensor_data[0], available_states=binary_sensor_data[1] + ) def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) - data = self.sensor_types.get(device_class) - if device_class is None or data is None: + def create_sensor_state( + name: str, raw_value: float | None = None, current_state: str | None = None + ) -> dict[str, Any]: + sensor_state: dict[str, Any] = { + "name": name, + "rawValue": raw_value, + } + if current_state: + sensor_state["currentSensorState"] = current_state + return {"currentSensorStateData": [sensor_state]} + + if self.state.domain == sensor.DOMAIN: + sensor_data = self.sensor_types.get(device_class) + if device_class is None or sensor_data is None: + return {} + try: + value = float(self.state.state) + except ValueError: + value = None + if self.state.state == STATE_UNKNOWN: + value = None + current_state: str | None = None + if device_class == sensor.SensorDeviceClass.AQI: + current_state = self._air_quality_description_for_aqi(value) + return create_sensor_state(sensor_data[0], value, current_state) + + binary_sensor_data = self.binary_sensor_types.get(device_class) + if device_class is None or binary_sensor_data is None: return {} - - try: - value = float(self.state.state) - except ValueError: - value = None - if self.state.state == STATE_UNKNOWN: - value = None - sensor_data = {"name": data[0], "rawValue": value} - - if device_class == sensor.SensorDeviceClass.AQI: - sensor_data["currentSensorState"] = self._air_quality_description_for_aqi( - value - ) - - return {"currentSensorStateData": [sensor_data]} + value = { + STATE_ON: 0, + STATE_OFF: 1, + STATE_UNKNOWN: 2, + }[self.state.state] + return create_sensor_state( + binary_sensor_data[0], current_state=binary_sensor_data[1][value] + ) diff --git a/homeassistant/components/google_assistant_sdk/config_flow.py b/homeassistant/components/google_assistant_sdk/config_flow.py index 85dfd974b22..48c92832483 100644 --- a/homeassistant/components/google_assistant_sdk/config_flow.py +++ b/homeassistant/components/google_assistant_sdk/config_flow.py @@ -8,7 +8,12 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback from homeassistant.helpers import config_entry_oauth2_flow @@ -25,8 +30,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -46,9 +49,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -61,14 +61,10 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow, or update existing entry.""" - if self.reauth_entry: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - if self._async_current_entries(): - # Config entry already exists, only one allowed. - return self.async_abort(reason="single_instance_allowed") + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_create_entry( title=DEFAULT_NAME, @@ -84,16 +80,12 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Google Assistant SDK options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google_assistant_sdk/icons.json b/homeassistant/components/google_assistant_sdk/icons.json index bf1420b2e3f..75747c43f5b 100644 --- a/homeassistant/components/google_assistant_sdk/icons.json +++ b/homeassistant/components/google_assistant_sdk/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_text_command": "mdi:comment-text-outline" + "send_text_command": { + "service": "mdi:comment-text-outline" + } } } diff --git a/homeassistant/components/google_assistant_sdk/manifest.json b/homeassistant/components/google_assistant_sdk/manifest.json index b6281e2a4f0..85469a464b3 100644 --- a/homeassistant/components/google_assistant_sdk/manifest.json +++ b/homeassistant/components/google_assistant_sdk/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["gassist-text==0.0.11"] + "requirements": ["gassist-text==0.0.11"], + "single_config_entry": true } diff --git a/homeassistant/components/google_assistant_sdk/strings.json b/homeassistant/components/google_assistant_sdk/strings.json index d5d1d885427..4fd817aadce 100644 --- a/homeassistant/components/google_assistant_sdk/strings.json +++ b/homeassistant/components/google_assistant_sdk/strings.json @@ -40,7 +40,7 @@ } }, "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Assistant SDK. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n\n" + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Assistant SDK. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." }, "services": { "send_text_command": { diff --git a/homeassistant/components/google_cloud/__init__.py b/homeassistant/components/google_cloud/__init__.py index 97b669245d2..9d1923fd87d 100644 --- a/homeassistant/components/google_cloud/__init__.py +++ b/homeassistant/components/google_cloud/__init__.py @@ -1 +1,26 @@ """The google_cloud component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +PLATFORMS = [Platform.STT, Platform.TTS] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a config entry.""" + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(async_update_options)) + return True + + +async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/google_cloud/config_flow.py b/homeassistant/components/google_cloud/config_flow.py new file mode 100644 index 00000000000..fa6c952022b --- /dev/null +++ b/homeassistant/components/google_cloud/config_flow.py @@ -0,0 +1,187 @@ +"""Config flow for the Google Cloud integration.""" + +from __future__ import annotations + +import json +import logging +from typing import TYPE_CHECKING, Any, cast + +from google.cloud import texttospeech +import voluptuous as vol + +from homeassistant.components.file_upload import process_uploaded_file +from homeassistant.components.tts import CONF_LANG +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.core import callback +from homeassistant.helpers.selector import ( + FileSelector, + FileSelectorConfig, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) + +from .const import ( + CONF_KEY_FILE, + CONF_SERVICE_ACCOUNT_INFO, + CONF_STT_MODEL, + DEFAULT_LANG, + DEFAULT_STT_MODEL, + DOMAIN, + SUPPORTED_STT_MODELS, + TITLE, +) +from .helpers import ( + async_tts_voices, + tts_options_schema, + tts_platform_schema, + validate_service_account_info, +) + +_LOGGER = logging.getLogger(__name__) + +UPLOADED_KEY_FILE = "uploaded_key_file" + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(UPLOADED_KEY_FILE): FileSelector( + FileSelectorConfig(accept=".json,application/json") + ) + } +) + + +class GoogleCloudConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Google Cloud integration.""" + + VERSION = 1 + + _name: str | None = None + entry: ConfigEntry | None = None + abort_reason: str | None = None + + def _parse_uploaded_file(self, uploaded_file_id: str) -> dict[str, Any]: + """Read and parse an uploaded JSON file.""" + with process_uploaded_file(self.hass, uploaded_file_id) as file_path: + contents = file_path.read_text() + return cast(dict[str, Any], json.loads(contents)) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, Any] = {} + if user_input is not None: + try: + service_account_info = await self.hass.async_add_executor_job( + self._parse_uploaded_file, user_input[UPLOADED_KEY_FILE] + ) + validate_service_account_info(service_account_info) + except ValueError: + _LOGGER.exception("Reading uploaded JSON file failed") + errors["base"] = "invalid_file" + else: + data = {CONF_SERVICE_ACCOUNT_INFO: service_account_info} + if self.entry: + if TYPE_CHECKING: + assert self.abort_reason + return self.async_update_reload_and_abort( + self.entry, data=data, reason=self.abort_reason + ) + return self.async_create_entry(title=TITLE, data=data) + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + description_placeholders={ + "url": "https://console.cloud.google.com/apis/credentials/serviceaccountkey" + }, + ) + + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import Google Cloud configuration from YAML.""" + + def _read_key_file() -> dict[str, Any]: + with open( + self.hass.config.path(import_data[CONF_KEY_FILE]), encoding="utf8" + ) as f: + return cast(dict[str, Any], json.load(f)) + + service_account_info = await self.hass.async_add_executor_job(_read_key_file) + try: + validate_service_account_info(service_account_info) + except ValueError: + _LOGGER.exception("Reading credentials JSON file failed") + return self.async_abort(reason="invalid_file") + options = { + k: v for k, v in import_data.items() if k in tts_platform_schema().schema + } + options.pop(CONF_KEY_FILE) + _LOGGER.debug("Creating imported config entry with options: %s", options) + return self.async_create_entry( + title=TITLE, + data={CONF_SERVICE_ACCOUNT_INFO: service_account_info}, + options=options, + ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> GoogleCloudOptionsFlowHandler: + """Create the options flow.""" + return GoogleCloudOptionsFlowHandler() + + +class GoogleCloudOptionsFlowHandler(OptionsFlow): + """Google Cloud options flow.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + service_account_info = self.config_entry.data[CONF_SERVICE_ACCOUNT_INFO] + client: texttospeech.TextToSpeechAsyncClient = ( + texttospeech.TextToSpeechAsyncClient.from_service_account_info( + service_account_info + ) + ) + voices = await async_tts_voices(client) + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Optional( + CONF_LANG, + default=DEFAULT_LANG, + ): SelectSelector( + SelectSelectorConfig( + mode=SelectSelectorMode.DROPDOWN, options=list(voices) + ) + ), + **tts_options_schema( + self.config_entry.options, voices, from_config_flow=True + ).schema, + vol.Optional( + CONF_STT_MODEL, + default=DEFAULT_STT_MODEL, + ): SelectSelector( + SelectSelectorConfig( + mode=SelectSelectorMode.DROPDOWN, + options=SUPPORTED_STT_MODELS, + ) + ), + } + ), + self.config_entry.options, + ), + ) diff --git a/homeassistant/components/google_cloud/const.py b/homeassistant/components/google_cloud/const.py index 0fbd5e78274..f416d36483a 100644 --- a/homeassistant/components/google_cloud/const.py +++ b/homeassistant/components/google_cloud/const.py @@ -2,10 +2,15 @@ from __future__ import annotations +DOMAIN = "google_cloud" +TITLE = "Google Cloud" + +CONF_SERVICE_ACCOUNT_INFO = "service_account_info" CONF_KEY_FILE = "key_file" DEFAULT_LANG = "en-US" +# TTS constants CONF_GENDER = "gender" CONF_VOICE = "voice" CONF_ENCODING = "encoding" @@ -14,3 +19,166 @@ CONF_PITCH = "pitch" CONF_GAIN = "gain" CONF_PROFILES = "profiles" CONF_TEXT_TYPE = "text_type" + +# STT constants +CONF_STT_MODEL = "stt_model" + +DEFAULT_STT_MODEL = "latest_short" + +# https://cloud.google.com/speech-to-text/docs/transcription-model +SUPPORTED_STT_MODELS = [ + "latest_long", + "latest_short", + "telephony", + "telephony_short", + "medical_dictation", + "medical_conversation", + "command_and_search", + "default", + "phone_call", + "video", +] + +# https://cloud.google.com/speech-to-text/docs/speech-to-text-supported-languages +STT_LANGUAGES = [ + "af-ZA", + "am-ET", + "ar-AE", + "ar-BH", + "ar-DZ", + "ar-EG", + "ar-IL", + "ar-IQ", + "ar-JO", + "ar-KW", + "ar-LB", + "ar-MA", + "ar-MR", + "ar-OM", + "ar-PS", + "ar-QA", + "ar-SA", + "ar-SY", + "ar-TN", + "ar-YE", + "az-AZ", + "bg-BG", + "bn-BD", + "bn-IN", + "bs-BA", + "ca-ES", + "cmn-Hans-CN", + "cmn-Hans-HK", + "cmn-Hant-TW", + "cs-CZ", + "da-DK", + "de-AT", + "de-CH", + "de-DE", + "el-GR", + "en-AU", + "en-CA", + "en-GB", + "en-GH", + "en-HK", + "en-IE", + "en-IN", + "en-KE", + "en-NG", + "en-NZ", + "en-PH", + "en-PK", + "en-SG", + "en-TZ", + "en-US", + "en-ZA", + "es-AR", + "es-BO", + "es-CL", + "es-CO", + "es-CR", + "es-DO", + "es-EC", + "es-ES", + "es-GT", + "es-HN", + "es-MX", + "es-NI", + "es-PA", + "es-PE", + "es-PR", + "es-PY", + "es-SV", + "es-US", + "es-UY", + "es-VE", + "et-EE", + "eu-ES", + "fa-IR", + "fi-FI", + "fil-PH", + "fr-BE", + "fr-CA", + "fr-CH", + "fr-FR", + "gl-ES", + "gu-IN", + "hi-IN", + "hr-HR", + "hu-HU", + "hy-AM", + "id-ID", + "is-IS", + "it-CH", + "it-IT", + "iw-IL", + "ja-JP", + "jv-ID", + "ka-GE", + "kk-KZ", + "km-KH", + "kn-IN", + "ko-KR", + "lo-LA", + "lt-LT", + "lv-LV", + "mk-MK", + "ml-IN", + "mn-MN", + "mr-IN", + "ms-MY", + "my-MM", + "ne-NP", + "nl-BE", + "nl-NL", + "no-NO", + "pa-Guru-IN", + "pl-PL", + "pt-BR", + "pt-PT", + "ro-RO", + "ru-RU", + "si-LK", + "sk-SK", + "sl-SI", + "sq-AL", + "sr-RS", + "su-ID", + "sv-SE", + "sw-KE", + "sw-TZ", + "ta-IN", + "ta-LK", + "ta-MY", + "ta-SG", + "te-IN", + "th-TH", + "tr-TR", + "uk-UA", + "ur-IN", + "ur-PK", + "uz-UZ", + "vi-VN", + "yue-Hant-HK", + "zu-ZA", +] diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 8ae6a456a4f..f6e89fae7fa 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -2,12 +2,13 @@ from __future__ import annotations +from collections.abc import Mapping import functools import operator -from types import MappingProxyType from typing import Any from google.cloud import texttospeech +from google.oauth2.service_account import Credentials import voluptuous as vol from homeassistant.components.tts import CONF_LANG @@ -51,15 +52,20 @@ async def async_tts_voices( def tts_options_schema( - config_options: MappingProxyType[str, Any], voices: dict[str, list[str]] -): + config_options: Mapping[str, Any], + voices: dict[str, list[str]], + from_config_flow: bool = False, +) -> vol.Schema: """Return schema for TTS options with default values from config or constants.""" + # If we are called from the config flow we want the defaults to be from constants + # to allow clearing the current value (passed as suggested_value) in the UI. + # If we aren't called from the config flow we want the defaults to be from the config. + defaults = {} if from_config_flow else config_options return vol.Schema( { vol.Optional( CONF_GENDER, - description={"suggested_value": config_options.get(CONF_GENDER)}, - default=config_options.get( + default=defaults.get( CONF_GENDER, texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined] ), @@ -74,8 +80,7 @@ def tts_options_schema( ), vol.Optional( CONF_VOICE, - description={"suggested_value": config_options.get(CONF_VOICE)}, - default=config_options.get(CONF_VOICE, DEFAULT_VOICE), + default=defaults.get(CONF_VOICE, DEFAULT_VOICE), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -84,8 +89,7 @@ def tts_options_schema( ), vol.Optional( CONF_ENCODING, - description={"suggested_value": config_options.get(CONF_ENCODING)}, - default=config_options.get( + default=defaults.get( CONF_ENCODING, texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined] ), @@ -100,23 +104,19 @@ def tts_options_schema( ), vol.Optional( CONF_SPEED, - description={"suggested_value": config_options.get(CONF_SPEED)}, - default=config_options.get(CONF_SPEED, 1.0), + default=defaults.get(CONF_SPEED, 1.0), ): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)), vol.Optional( CONF_PITCH, - description={"suggested_value": config_options.get(CONF_PITCH)}, - default=config_options.get(CONF_PITCH, 0), + default=defaults.get(CONF_PITCH, 0), ): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)), vol.Optional( CONF_GAIN, - description={"suggested_value": config_options.get(CONF_GAIN)}, - default=config_options.get(CONF_GAIN, 0), + default=defaults.get(CONF_GAIN, 0), ): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)), vol.Optional( CONF_PROFILES, - description={"suggested_value": config_options.get(CONF_PROFILES)}, - default=config_options.get(CONF_PROFILES, []), + default=defaults.get(CONF_PROFILES, []), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -137,8 +137,7 @@ def tts_options_schema( ), vol.Optional( CONF_TEXT_TYPE, - description={"suggested_value": config_options.get(CONF_TEXT_TYPE)}, - default=config_options.get(CONF_TEXT_TYPE, "text"), + default=defaults.get(CONF_TEXT_TYPE, "text"), ): vol.All( vol.Lower, SelectSelector( @@ -152,7 +151,7 @@ def tts_options_schema( ) -def tts_platform_schema(): +def tts_platform_schema() -> vol.Schema: """Return schema for TTS platform.""" return vol.Schema( { @@ -166,3 +165,16 @@ def tts_platform_schema(): ), } ) + + +def validate_service_account_info(info: Mapping[str, str]) -> None: + """Validate service account info. + + Args: + info: The service account info in Google format. + + Raises: + ValueError: If the info is not in the expected format. + + """ + Credentials.from_service_account_info(info) # type:ignore[no-untyped-call] diff --git a/homeassistant/components/google_cloud/manifest.json b/homeassistant/components/google_cloud/manifest.json index b4fc3f39b86..3e08b6254db 100644 --- a/homeassistant/components/google_cloud/manifest.json +++ b/homeassistant/components/google_cloud/manifest.json @@ -1,8 +1,14 @@ { "domain": "google_cloud", - "name": "Google Cloud Platform", - "codeowners": ["@lufton"], + "name": "Google Cloud", + "codeowners": ["@lufton", "@tronikos"], + "config_flow": true, + "dependencies": ["file_upload"], "documentation": "https://www.home-assistant.io/integrations/google_cloud", + "integration_type": "service", "iot_class": "cloud_push", - "requirements": ["google-cloud-texttospeech==2.16.3"] + "requirements": [ + "google-cloud-texttospeech==2.17.2", + "google-cloud-speech==2.27.0" + ] } diff --git a/homeassistant/components/google_cloud/strings.json b/homeassistant/components/google_cloud/strings.json new file mode 100644 index 00000000000..3bf9d8c8489 --- /dev/null +++ b/homeassistant/components/google_cloud/strings.json @@ -0,0 +1,33 @@ +{ + "config": { + "step": { + "user": { + "description": "Upload your Google Cloud service account JSON file that you can create at {url}.", + "data": { + "uploaded_key_file": "Upload service account JSON file" + } + } + }, + "error": { + "invalid_file": "Invalid service account JSON file" + } + }, + "options": { + "step": { + "init": { + "data": { + "language": "Default language of the voice", + "gender": "Default gender of the voice", + "voice": "Default voice name (overrides language and gender)", + "encoding": "Default audio encoder", + "speed": "Default rate/speed of the voice", + "pitch": "Default pitch of the voice", + "gain": "Default volume gain (in dB) of the voice", + "profiles": "Default audio profiles", + "text_type": "Default text type", + "stt_model": "STT model" + } + } + } + } +} diff --git a/homeassistant/components/google_cloud/stt.py b/homeassistant/components/google_cloud/stt.py new file mode 100644 index 00000000000..99b7dadbb0e --- /dev/null +++ b/homeassistant/components/google_cloud/stt.py @@ -0,0 +1,147 @@ +"""Support for the Google Cloud STT service.""" + +from __future__ import annotations + +from collections.abc import AsyncGenerator, AsyncIterable +import logging + +from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.cloud import speech_v1 + +from homeassistant.components.stt import ( + AudioBitRates, + AudioChannels, + AudioCodecs, + AudioFormats, + AudioSampleRates, + SpeechMetadata, + SpeechResult, + SpeechResultState, + SpeechToTextEntity, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ( + CONF_SERVICE_ACCOUNT_INFO, + CONF_STT_MODEL, + DEFAULT_STT_MODEL, + DOMAIN, + STT_LANGUAGES, +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Google Cloud speech platform via config entry.""" + service_account_info = config_entry.data[CONF_SERVICE_ACCOUNT_INFO] + client = speech_v1.SpeechAsyncClient.from_service_account_info(service_account_info) + async_add_entities([GoogleCloudSpeechToTextEntity(config_entry, client)]) + + +class GoogleCloudSpeechToTextEntity(SpeechToTextEntity): + """Google Cloud STT entity.""" + + def __init__( + self, + entry: ConfigEntry, + client: speech_v1.SpeechAsyncClient, + ) -> None: + """Init Google Cloud STT entity.""" + self._attr_unique_id = f"{entry.entry_id}" + self._attr_name = entry.title + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Google", + model="Cloud", + entry_type=dr.DeviceEntryType.SERVICE, + ) + self._entry = entry + self._client = client + self._model = entry.options.get(CONF_STT_MODEL, DEFAULT_STT_MODEL) + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return STT_LANGUAGES + + @property + def supported_formats(self) -> list[AudioFormats]: + """Return a list of supported formats.""" + return [AudioFormats.WAV, AudioFormats.OGG] + + @property + def supported_codecs(self) -> list[AudioCodecs]: + """Return a list of supported codecs.""" + return [AudioCodecs.PCM, AudioCodecs.OPUS] + + @property + def supported_bit_rates(self) -> list[AudioBitRates]: + """Return a list of supported bitrates.""" + return [AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[AudioSampleRates]: + """Return a list of supported samplerates.""" + return [AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[AudioChannels]: + """Return a list of supported channels.""" + return [AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] + ) -> SpeechResult: + """Process an audio stream to STT service.""" + streaming_config = speech_v1.StreamingRecognitionConfig( + config=speech_v1.RecognitionConfig( + encoding=( + speech_v1.RecognitionConfig.AudioEncoding.OGG_OPUS + if metadata.codec == AudioCodecs.OPUS + else speech_v1.RecognitionConfig.AudioEncoding.LINEAR16 + ), + sample_rate_hertz=metadata.sample_rate, + language_code=metadata.language, + model=self._model, + ) + ) + + async def request_generator() -> ( + AsyncGenerator[speech_v1.StreamingRecognizeRequest] + ): + # The first request must only contain a streaming_config + yield speech_v1.StreamingRecognizeRequest(streaming_config=streaming_config) + # All subsequent requests must only contain audio_content + async for audio_content in stream: + yield speech_v1.StreamingRecognizeRequest(audio_content=audio_content) + + try: + responses = await self._client.streaming_recognize( + requests=request_generator(), + timeout=10, + ) + + transcript = "" + async for response in responses: + _LOGGER.debug("response: %s", response) + if not response.results: + continue + result = response.results[0] + if not result.alternatives: + continue + transcript += response.results[0].alternatives[0].transcript + except GoogleAPIError as err: + _LOGGER.error("Error occurred during Google Cloud STT call: %s", err) + if isinstance(err, Unauthenticated): + self._entry.async_start_reauth(self.hass) + return SpeechResult(None, SpeechResultState.ERROR) + + return SpeechResult(transcript, SpeechResultState.SUCCESS) diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index ee9999fc496..c3a8254ad90 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -1,9 +1,12 @@ """Support for the Google Cloud TTS service.""" -import logging -import os +from __future__ import annotations -from google.api_core.exceptions import GoogleAPIError +import logging +from pathlib import Path +from typing import Any, cast + +from google.api_core.exceptions import GoogleAPIError, Unauthenticated from google.cloud import texttospeech import voluptuous as vol @@ -11,9 +14,15 @@ from homeassistant.components.tts import ( CONF_LANG, PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, Provider, + TextToSpeechEntity, + TtsAudioType, Voice, ) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import ( CONF_ENCODING, @@ -22,10 +31,12 @@ from .const import ( CONF_KEY_FILE, CONF_PITCH, CONF_PROFILES, + CONF_SERVICE_ACCOUNT_INFO, CONF_SPEED, CONF_TEXT_TYPE, CONF_VOICE, DEFAULT_LANG, + DOMAIN, ) from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema @@ -34,17 +45,28 @@ _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = TTS_PLATFORM_SCHEMA.extend(tts_platform_schema().schema) -async def async_get_engine(hass, config, discovery_info=None): +async def async_get_engine( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, +) -> Provider | None: """Set up Google Cloud TTS component.""" if key_file := config.get(CONF_KEY_FILE): key_file = hass.config.path(key_file) - if not os.path.isfile(key_file): + if not Path(key_file).is_file(): _LOGGER.error("File %s doesn't exist", key_file) return None if key_file: - client = texttospeech.TextToSpeechAsyncClient.from_service_account_json( + client = texttospeech.TextToSpeechAsyncClient.from_service_account_file( key_file ) + if not hass.config_entries.async_entries(DOMAIN): + _LOGGER.debug("Creating config entry by importing: %s", config) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + ) else: client = texttospeech.TextToSpeechAsyncClient() try: @@ -53,7 +75,6 @@ async def async_get_engine(hass, config, discovery_info=None): _LOGGER.error("Error from calling list_voices: %s", err) return None return GoogleCloudTTSProvider( - hass, client, voices, config.get(CONF_LANG, DEFAULT_LANG), @@ -61,44 +82,75 @@ async def async_get_engine(hass, config, discovery_info=None): ) -class GoogleCloudTTSProvider(Provider): - """The Google Cloud TTS API provider.""" +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Google Cloud text-to-speech.""" + service_account_info = config_entry.data[CONF_SERVICE_ACCOUNT_INFO] + client: texttospeech.TextToSpeechAsyncClient = ( + texttospeech.TextToSpeechAsyncClient.from_service_account_info( + service_account_info + ) + ) + try: + voices = await async_tts_voices(client) + except GoogleAPIError as err: + _LOGGER.error("Error from calling list_voices: %s", err) + if isinstance(err, Unauthenticated): + config_entry.async_start_reauth(hass) + return + options_schema = tts_options_schema(dict(config_entry.options), voices) + language = config_entry.options.get(CONF_LANG, DEFAULT_LANG) + async_add_entities( + [ + GoogleCloudTTSEntity( + config_entry, + client, + voices, + language, + options_schema, + ) + ] + ) + + +class BaseGoogleCloudProvider: + """The Google Cloud TTS base provider.""" def __init__( self, - hass: HomeAssistant, client: texttospeech.TextToSpeechAsyncClient, voices: dict[str, list[str]], - language, - options_schema, + language: str, + options_schema: vol.Schema, ) -> None: - """Init Google Cloud TTS service.""" - self.hass = hass - self.name = "Google Cloud TTS" + """Init Google Cloud TTS base provider.""" self._client = client self._voices = voices self._language = language self._options_schema = options_schema @property - def supported_languages(self): - """Return list of supported languages.""" + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" return list(self._voices) @property - def default_language(self): + def default_language(self) -> str: """Return the default language.""" return self._language @property - def supported_options(self): + def supported_options(self) -> list[str]: """Return a list of supported options.""" return [option.schema for option in self._options_schema.schema] @property - def default_options(self): + def default_options(self) -> dict[str, Any]: """Return a dict including default options.""" - return self._options_schema({}) + return cast(dict[str, Any], self._options_schema({})) @callback def async_get_supported_voices(self, language: str) -> list[Voice] | None: @@ -107,16 +159,25 @@ class GoogleCloudTTSProvider(Provider): return None return [Voice(voice, voice) for voice in voices] - async def async_get_tts_audio(self, message, language, options): - """Load TTS from google.""" + async def _async_get_tts_audio( + self, + message: str, + language: str, + options: dict[str, Any], + ) -> TtsAudioType: + """Load TTS from Google Cloud.""" try: options = self._options_schema(options) except vol.Invalid as err: _LOGGER.error("Error: %s when validating options: %s", err, options) return None, None - encoding = texttospeech.AudioEncoding[options[CONF_ENCODING]] - gender = texttospeech.SsmlVoiceGender[options[CONF_GENDER]] + encoding: texttospeech.AudioEncoding = texttospeech.AudioEncoding[ + options[CONF_ENCODING] + ] # type: ignore[misc] + gender: texttospeech.SsmlVoiceGender | None = texttospeech.SsmlVoiceGender[ + options[CONF_GENDER] + ] # type: ignore[misc] voice = options[CONF_VOICE] if voice: gender = None @@ -139,11 +200,7 @@ class GoogleCloudTTSProvider(Provider): ), ) - try: - response = await self._client.synthesize_speech(request, timeout=10) - except GoogleAPIError as err: - _LOGGER.error("Error occurred during Google Cloud TTS call: %s", err) - return None, None + response = await self._client.synthesize_speech(request, timeout=10) if encoding == texttospeech.AudioEncoding.MP3: extension = "mp3" @@ -153,3 +210,64 @@ class GoogleCloudTTSProvider(Provider): extension = "wav" return extension, response.audio_content + + +class GoogleCloudTTSEntity(BaseGoogleCloudProvider, TextToSpeechEntity): + """The Google Cloud TTS entity.""" + + def __init__( + self, + entry: ConfigEntry, + client: texttospeech.TextToSpeechAsyncClient, + voices: dict[str, list[str]], + language: str, + options_schema: vol.Schema, + ) -> None: + """Init Google Cloud TTS entity.""" + super().__init__(client, voices, language, options_schema) + self._attr_unique_id = f"{entry.entry_id}" + self._attr_name = entry.title + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Google", + model="Cloud", + entry_type=dr.DeviceEntryType.SERVICE, + ) + self._entry = entry + + async def async_get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load TTS from Google Cloud.""" + try: + return await self._async_get_tts_audio(message, language, options) + except GoogleAPIError as err: + _LOGGER.error("Error occurred during Google Cloud TTS call: %s", err) + if isinstance(err, Unauthenticated): + self._entry.async_start_reauth(self.hass) + return None, None + + +class GoogleCloudTTSProvider(BaseGoogleCloudProvider, Provider): + """The Google Cloud TTS API provider.""" + + def __init__( + self, + client: texttospeech.TextToSpeechAsyncClient, + voices: dict[str, list[str]], + language: str, + options_schema: vol.Schema, + ) -> None: + """Init Google Cloud TTS service.""" + super().__init__(client, voices, language, options_schema) + self.name = "Google Cloud TTS" + + async def async_get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load TTS from Google Cloud.""" + try: + return await self._async_get_tts_audio(message, language, options) + except GoogleAPIError as err: + _LOGGER.error("Error occurred during Google Cloud TTS call: %s", err) + return None, None diff --git a/homeassistant/components/google_domains/__init__.py b/homeassistant/components/google_domains/__init__.py deleted file mode 100644 index a4dcef62964..00000000000 --- a/homeassistant/components/google_domains/__init__.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Support for Google Domains.""" - -import asyncio -from datetime import timedelta -import logging - -import aiohttp -import voluptuous as vol - -from homeassistant.const import CONF_DOMAIN, CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.typing import ConfigType - -_LOGGER = logging.getLogger(__name__) - -DOMAIN = "google_domains" - -INTERVAL = timedelta(minutes=5) - -DEFAULT_TIMEOUT = 10 - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_DOMAIN): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Initialize the Google Domains component.""" - domain = config[DOMAIN].get(CONF_DOMAIN) - user = config[DOMAIN].get(CONF_USERNAME) - password = config[DOMAIN].get(CONF_PASSWORD) - timeout = config[DOMAIN].get(CONF_TIMEOUT) - - session = async_get_clientsession(hass) - - result = await _update_google_domains( - hass, session, domain, user, password, timeout - ) - - if not result: - return False - - async def update_domain_interval(now): - """Update the Google Domains entry.""" - await _update_google_domains(hass, session, domain, user, password, timeout) - - async_track_time_interval(hass, update_domain_interval, INTERVAL) - - return True - - -async def _update_google_domains(hass, session, domain, user, password, timeout): - """Update Google Domains.""" - url = f"https://{user}:{password}@domains.google.com/nic/update" - - params = {"hostname": domain} - - try: - async with asyncio.timeout(timeout): - resp = await session.get(url, params=params) - body = await resp.text() - - if body.startswith(("good", "nochg")): - return True - - _LOGGER.warning("Updating Google Domains failed: %s => %s", domain, body) - - except aiohttp.ClientError: - _LOGGER.warning("Can't connect to Google Domains API") - - except TimeoutError: - _LOGGER.warning("Timeout from Google Domains API for domain: %s", domain) - - return False diff --git a/homeassistant/components/google_domains/manifest.json b/homeassistant/components/google_domains/manifest.json deleted file mode 100644 index 83d9320e818..00000000000 --- a/homeassistant/components/google_domains/manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "domain": "google_domains", - "name": "Google Domains", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/google_domains", - "iot_class": "cloud_polling" -} diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index ab23ac25f26..83eec25ed15 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -15,6 +15,7 @@ import google.generativeai as genai import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -85,10 +86,6 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize a new GoogleGenerativeAIConfigFlow.""" - self.reauth_entry: ConfigEntry | None = None - async def async_step_api( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -106,9 +103,9 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self.reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.reauth_entry, + self._get_reauth_entry(), data=user_input, ) return self.async_create_entry( @@ -135,9 +132,6 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -146,12 +140,13 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN): """Dialog that informs the user that reauth is required.""" if user_input is not None: return await self.async_step_api() - assert self.reauth_entry + + reauth_entry = self._get_reauth_entry() return self.async_show_form( step_id="reauth_confirm", description_placeholders={ - CONF_NAME: self.reauth_entry.title, - CONF_API_KEY: self.reauth_entry.data.get(CONF_API_KEY, ""), + CONF_NAME: reauth_entry.title, + CONF_API_KEY: reauth_entry.data.get(CONF_API_KEY, ""), }, ) @@ -168,7 +163,6 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/google_generative_ai_conversation/icons.json b/homeassistant/components/google_generative_ai_conversation/icons.json index 6544532783a..6ac3cc3b21c 100644 --- a/homeassistant/components/google_generative_ai_conversation/icons.json +++ b/homeassistant/components/google_generative_ai_conversation/icons.json @@ -1,5 +1,7 @@ { "services": { - "generate_content": "mdi:receipt-text" + "generate_content": { + "service": "mdi:receipt-text" + } } } diff --git a/homeassistant/components/google_generative_ai_conversation/manifest.json b/homeassistant/components/google_generative_ai_conversation/manifest.json index 9e0dc1ddeab..7b687b7da6f 100644 --- a/homeassistant/components/google_generative_ai_conversation/manifest.json +++ b/homeassistant/components/google_generative_ai_conversation/manifest.json @@ -8,6 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["google-generativeai==0.6.0"] + "requirements": ["google-generativeai==0.8.2"] } diff --git a/homeassistant/components/google_mail/config_flow.py b/homeassistant/components/google_mail/config_flow.py index 5c81f7d49f5..b3a9a0e5d56 100644 --- a/homeassistant/components/google_mail/config_flow.py +++ b/homeassistant/components/google_mail/config_flow.py @@ -9,11 +9,10 @@ from typing import Any, cast from google.oauth2.credentials import Credentials from googleapiclient.discovery import build -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow -from . import GoogleMailConfigEntry from .const import DEFAULT_ACCESS, DOMAIN @@ -24,8 +23,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: GoogleMailConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -45,9 +42,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -69,18 +63,15 @@ class OAuth2FlowHandler( credentials = Credentials(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) email = await self.hass.async_add_executor_job(_get_profile) - if not self.reauth_entry: - await self.async_set_unique_id(email) + await self.async_set_unique_id(email) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry(title=email, data=data) - if self.reauth_entry.unique_id == email: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( + reauth_entry = self._get_reauth_entry() + self._abort_if_unique_id_mismatch( reason="wrong_account", - description_placeholders={"email": cast(str, self.reauth_entry.unique_id)}, + description_placeholders={"email": cast(str, reauth_entry.unique_id)}, ) + return self.async_update_reload_and_abort(reauth_entry, data=data) diff --git a/homeassistant/components/google_mail/icons.json b/homeassistant/components/google_mail/icons.json index 599ccffe3c7..d0a6eb33715 100644 --- a/homeassistant/components/google_mail/icons.json +++ b/homeassistant/components/google_mail/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_vacation": "mdi:beach" + "set_vacation": { + "service": "mdi:beach" + } } } diff --git a/homeassistant/components/google_mail/strings.json b/homeassistant/components/google_mail/strings.json index 142e8f039d2..2c6e24109c3 100644 --- a/homeassistant/components/google_mail/strings.json +++ b/homeassistant/components/google_mail/strings.json @@ -32,7 +32,7 @@ } }, "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Mail. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n\n" + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Mail. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." }, "entity": { "sensor": { diff --git a/homeassistant/components/google_maps/device_tracker.py b/homeassistant/components/google_maps/device_tracker.py index d703078d198..31eca8fba01 100644 --- a/homeassistant/components/google_maps/device_tracker.py +++ b/homeassistant/components/google_maps/device_tracker.py @@ -100,7 +100,7 @@ class GoogleMapsScanner: self.max_gps_accuracy is not None and person.accuracy > self.max_gps_accuracy ): - _LOGGER.info( + _LOGGER.debug( ( "Ignoring %s update because expected GPS " "accuracy %s is not met: %s" diff --git a/homeassistant/components/google_maps/manifest.json b/homeassistant/components/google_maps/manifest.json index d7364e834a3..8311f75b732 100644 --- a/homeassistant/components/google_maps/manifest.json +++ b/homeassistant/components/google_maps/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/google_maps", "iot_class": "cloud_polling", "loggers": ["locationsharinglib"], + "quality_scale": "legacy", "requirements": ["locationsharinglib==5.0.1"] } diff --git a/homeassistant/components/google_photos/__init__.py b/homeassistant/components/google_photos/__init__.py new file mode 100644 index 00000000000..2a7109d8189 --- /dev/null +++ b/homeassistant/components/google_photos/__init__.py @@ -0,0 +1,59 @@ +"""The Google Photos integration.""" + +from __future__ import annotations + +from aiohttp import ClientError, ClientResponseError +from google_photos_library_api.api import GooglePhotosLibraryApi + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from . import api +from .const import DOMAIN +from .coordinator import GooglePhotosUpdateCoordinator +from .services import async_register_services +from .types import GooglePhotosConfigEntry + +__all__ = [ + "DOMAIN", +] + + +async def async_setup_entry( + hass: HomeAssistant, entry: GooglePhotosConfigEntry +) -> bool: + """Set up Google Photos from a config entry.""" + implementation = ( + await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry + ) + ) + web_session = async_get_clientsession(hass) + oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + auth = api.AsyncConfigEntryAuth(web_session, oauth_session) + try: + await auth.async_get_access_token() + except ClientResponseError as err: + if 400 <= err.status < 500: + raise ConfigEntryAuthFailed( + "OAuth session is not valid, reauth required" + ) from err + raise ConfigEntryNotReady from err + except ClientError as err: + raise ConfigEntryNotReady from err + coordinator = GooglePhotosUpdateCoordinator(hass, GooglePhotosLibraryApi(auth)) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + async_register_services(hass) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: GooglePhotosConfigEntry +) -> bool: + """Unload a config entry.""" + return True diff --git a/homeassistant/components/google_photos/api.py b/homeassistant/components/google_photos/api.py new file mode 100644 index 00000000000..35878efd792 --- /dev/null +++ b/homeassistant/components/google_photos/api.py @@ -0,0 +1,44 @@ +"""API for Google Photos bound to Home Assistant OAuth.""" + +from typing import cast + +import aiohttp +from google_photos_library_api import api + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.helpers import config_entry_oauth2_flow + + +class AsyncConfigEntryAuth(api.AbstractAuth): + """Provide Google Photos authentication tied to an OAuth2 based config entry.""" + + def __init__( + self, + websession: aiohttp.ClientSession, + oauth_session: config_entry_oauth2_flow.OAuth2Session, + ) -> None: + """Initialize AsyncConfigEntryAuth.""" + super().__init__(websession) + self._session = oauth_session + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + await self._session.async_ensure_token_valid() + return cast(str, self._session.token[CONF_ACCESS_TOKEN]) + + +class AsyncConfigFlowAuth(api.AbstractAuth): + """An API client used during the config flow with a fixed token.""" + + def __init__( + self, + websession: aiohttp.ClientSession, + token: str, + ) -> None: + """Initialize ConfigFlowAuth.""" + super().__init__(websession) + self._token = token + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + return self._token diff --git a/homeassistant/components/google_photos/application_credentials.py b/homeassistant/components/google_photos/application_credentials.py new file mode 100644 index 00000000000..fc6cdbd272d --- /dev/null +++ b/homeassistant/components/google_photos/application_credentials.py @@ -0,0 +1,23 @@ +"""application_credentials platform the Google Photos integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) + + +async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]: + """Return description placeholders for the credentials dialog.""" + return { + "oauth_consent_url": "https://console.cloud.google.com/apis/credentials/consent", + "more_info_url": "https://www.home-assistant.io/integrations/google_photos/", + "oauth_creds_url": "https://console.cloud.google.com/apis/credentials", + } diff --git a/homeassistant/components/google_photos/config_flow.py b/homeassistant/components/google_photos/config_flow.py new file mode 100644 index 00000000000..a336455c9b4 --- /dev/null +++ b/homeassistant/components/google_photos/config_flow.py @@ -0,0 +1,81 @@ +"""Config flow for Google Photos.""" + +from collections.abc import Mapping +import logging +from typing import Any + +from google_photos_library_api.api import GooglePhotosLibraryApi +from google_photos_library_api.exceptions import GooglePhotosApiError + +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN +from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow + +from . import api +from .const import DOMAIN, OAUTH2_SCOPES + + +class OAuth2FlowHandler( + config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN +): + """Config flow to handle Google Photos OAuth2 authentication.""" + + DOMAIN = DOMAIN + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) + + @property + def extra_authorize_data(self) -> dict[str, Any]: + """Extra data that needs to be appended to the authorize url.""" + return { + "scope": " ".join(OAUTH2_SCOPES), + # Add params to ensure we get back a refresh token + "access_type": "offline", + "prompt": "consent", + } + + async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: + """Create an entry for the flow.""" + session = aiohttp_client.async_get_clientsession(self.hass) + auth = api.AsyncConfigFlowAuth(session, data[CONF_TOKEN][CONF_ACCESS_TOKEN]) + client = GooglePhotosLibraryApi(auth) + + try: + user_resource_info = await client.get_user_info() + await client.list_media_items(page_size=1) + except GooglePhotosApiError as ex: + return self.async_abort( + reason="access_not_configured", + description_placeholders={"message": str(ex)}, + ) + except Exception: + self.logger.exception("Unknown error occurred") + return self.async_abort(reason="unknown") + user_id = user_resource_info.id + + await self.async_set_unique_id(user_id) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) + + self._abort_if_unique_id_configured() + return self.async_create_entry(title=user_resource_info.name, data=data) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: Mapping[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + return await self.async_step_user() diff --git a/homeassistant/components/google_photos/const.py b/homeassistant/components/google_photos/const.py new file mode 100644 index 00000000000..9c623ed7819 --- /dev/null +++ b/homeassistant/components/google_photos/const.py @@ -0,0 +1,14 @@ +"""Constants for the Google Photos integration.""" + +DOMAIN = "google_photos" + +OAUTH2_AUTHORIZE = "https://accounts.google.com/o/oauth2/v2/auth" +OAUTH2_TOKEN = "https://oauth2.googleapis.com/token" + +UPLOAD_SCOPE = "https://www.googleapis.com/auth/photoslibrary.appendonly" +READ_SCOPE = "https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" +OAUTH2_SCOPES = [ + READ_SCOPE, + UPLOAD_SCOPE, + "https://www.googleapis.com/auth/userinfo.profile", +] diff --git a/homeassistant/components/google_photos/coordinator.py b/homeassistant/components/google_photos/coordinator.py new file mode 100644 index 00000000000..3ba5a8124d6 --- /dev/null +++ b/homeassistant/components/google_photos/coordinator.py @@ -0,0 +1,71 @@ +"""Coordinator for fetching data from Google Photos API. + +This coordinator fetches the list of Google Photos albums that were created by +Home Assistant, which for large libraries may take some time. The list of album +ids and titles is cached and this provides a method to refresh urls since they +are short lived. +""" + +import asyncio +import datetime +import logging +from typing import Final + +from google_photos_library_api.api import GooglePhotosLibraryApi +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import Album, NewAlbum + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + +UPDATE_INTERVAL: Final = datetime.timedelta(hours=24) +ALBUM_PAGE_SIZE = 50 + + +class GooglePhotosUpdateCoordinator(DataUpdateCoordinator[dict[str, str]]): + """Coordinator for fetching Google Photos albums. + + The `data` object is a dict from Album ID to Album title. + """ + + def __init__(self, hass: HomeAssistant, client: GooglePhotosLibraryApi) -> None: + """Initialize TaskUpdateCoordinator.""" + super().__init__( + hass, + _LOGGER, + name="Google Photos", + update_interval=UPDATE_INTERVAL, + ) + self.client = client + + async def _async_update_data(self) -> dict[str, str]: + """Fetch albums from API endpoint.""" + albums: dict[str, str] = {} + try: + async for album_result in await self.client.list_albums( + page_size=ALBUM_PAGE_SIZE + ): + for album in album_result.albums: + albums[album.id] = album.title + except GooglePhotosApiError as err: + _LOGGER.debug("Error listing albums: %s", err) + raise UpdateFailed(f"Error listing albums: {err}") from err + return albums + + async def list_albums(self) -> list[Album]: + """Return Albums with refreshed URLs based on the cached list of album ids.""" + return await asyncio.gather( + *(self.client.get_album(album_id) for album_id in self.data) + ) + + async def get_or_create_album(self, album: str) -> str: + """Return an existing album id or create a new one.""" + for album_id, album_title in self.data.items(): + if album_title == album: + return album_id + new_album = await self.client.create_album(NewAlbum(title=album)) + _LOGGER.debug("Created new album: %s", new_album) + self.data[new_album.id] = new_album.title + return new_album.id diff --git a/homeassistant/components/google_photos/icons.json b/homeassistant/components/google_photos/icons.json new file mode 100644 index 00000000000..5d51ed4370a --- /dev/null +++ b/homeassistant/components/google_photos/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "upload": { + "service": "mdi:cloud-upload" + } + } +} diff --git a/homeassistant/components/google_photos/manifest.json b/homeassistant/components/google_photos/manifest.json new file mode 100644 index 00000000000..9a2e7bc13f4 --- /dev/null +++ b/homeassistant/components/google_photos/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "google_photos", + "name": "Google Photos", + "codeowners": ["@allenporter"], + "config_flow": true, + "dependencies": ["application_credentials"], + "documentation": "https://www.home-assistant.io/integrations/google_photos", + "iot_class": "cloud_polling", + "loggers": ["google_photos_library_api"], + "requirements": ["google-photos-library-api==0.12.1"] +} diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py new file mode 100644 index 00000000000..7ee81b51bc0 --- /dev/null +++ b/homeassistant/components/google_photos/media_source.py @@ -0,0 +1,305 @@ +"""Media source for Google Photos.""" + +from __future__ import annotations + +from dataclasses import dataclass +from enum import StrEnum +import logging +from typing import Self, cast + +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import Album, MediaItem + +from homeassistant.components.media_player import MediaClass, MediaType +from homeassistant.components.media_source import ( + BrowseError, + BrowseMediaSource, + MediaSource, + MediaSourceItem, + PlayMedia, +) +from homeassistant.core import HomeAssistant + +from . import GooglePhotosConfigEntry +from .const import DOMAIN, READ_SCOPE + +_LOGGER = logging.getLogger(__name__) + +MEDIA_ITEMS_PAGE_SIZE = 100 +ALBUM_PAGE_SIZE = 50 + +THUMBNAIL_SIZE = 256 +LARGE_IMAGE_SIZE = 2160 + + +# The PhotosIdentifier can be in the following forms: +# config-entry-id +# config-entry-id/a/album-media-id +# config-entry-id/p/photo-media-id +# +# The album-media-id can contain special reserved folder names for use by +# this integration for virtual folders like the `recent` album. + + +class PhotosIdentifierType(StrEnum): + """Type for a PhotosIdentifier.""" + + PHOTO = "p" + ALBUM = "a" + + @classmethod + def of(cls, name: str) -> PhotosIdentifierType: + """Parse a PhotosIdentifierType by string value.""" + for enum in PhotosIdentifierType: + if enum.value == name: + return enum + raise ValueError(f"Invalid PhotosIdentifierType: {name}") + + +@dataclass +class PhotosIdentifier: + """Google Photos item identifier in a media source URL.""" + + config_entry_id: str + """Identifies the account for the media item.""" + + id_type: PhotosIdentifierType | None = None + """Type of identifier""" + + media_id: str | None = None + """Identifies the album or photo contents to show.""" + + def as_string(self) -> str: + """Serialize the identifier as a string.""" + if self.id_type is None: + return self.config_entry_id + return f"{self.config_entry_id}/{self.id_type}/{self.media_id}" + + @classmethod + def of(cls, identifier: str) -> Self: + """Parse a PhotosIdentifier form a string.""" + parts = identifier.split("/") + if len(parts) == 1: + return cls(parts[0]) + if len(parts) != 3: + raise BrowseError(f"Invalid identifier: {identifier}") + return cls(parts[0], PhotosIdentifierType.of(parts[1]), parts[2]) + + @classmethod + def album(cls, config_entry_id: str, media_id: str) -> Self: + """Create an album PhotosIdentifier.""" + return cls(config_entry_id, PhotosIdentifierType.ALBUM, media_id) + + @classmethod + def photo(cls, config_entry_id: str, media_id: str) -> Self: + """Create an album PhotosIdentifier.""" + return cls(config_entry_id, PhotosIdentifierType.PHOTO, media_id) + + +async def async_get_media_source(hass: HomeAssistant) -> MediaSource: + """Set up Google Photos media source.""" + return GooglePhotosMediaSource(hass) + + +class GooglePhotosMediaSource(MediaSource): + """Provide Google Photos as media sources.""" + + name = "Google Photos" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize Google Photos source.""" + super().__init__(DOMAIN) + self.hass = hass + + async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: + """Resolve media identifier to a url. + + This will resolve a specific media item to a url for the full photo or video contents. + """ + try: + identifier = PhotosIdentifier.of(item.identifier) + except ValueError as err: + raise BrowseError(f"Could not parse identifier: {item.identifier}") from err + if ( + identifier.media_id is None + or identifier.id_type != PhotosIdentifierType.PHOTO + ): + raise BrowseError( + f"Could not resolve identiifer that is not a Photo: {identifier}" + ) + entry = self._async_config_entry(identifier.config_entry_id) + client = entry.runtime_data.client + media_item = await client.get_media_item(media_item_id=identifier.media_id) + if not media_item.mime_type: + raise BrowseError("Could not determine mime type of media item") + if media_item.media_metadata and (media_item.media_metadata.video is not None): + url = _video_url(media_item) + else: + url = _media_url(media_item, LARGE_IMAGE_SIZE) + return PlayMedia( + url=url, + mime_type=media_item.mime_type, + ) + + async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource: + """Return details about the media source. + + This renders the multi-level album structure for an account, its albums, + or the contents of an album. This will return a BrowseMediaSource with a + single level of children at the next level of the hierarchy. + """ + if not item.identifier: + # Top level view that lists all accounts. + return BrowseMediaSource( + domain=DOMAIN, + identifier=None, + media_class=MediaClass.DIRECTORY, + media_content_type=MediaClass.IMAGE, + title="Google Photos", + can_play=False, + can_expand=True, + children_media_class=MediaClass.DIRECTORY, + children=[ + _build_account(entry, PhotosIdentifier(cast(str, entry.unique_id))) + for entry in self._async_config_entries() + ], + ) + + # Determine the configuration entry for this item + identifier = PhotosIdentifier.of(item.identifier) + entry = self._async_config_entry(identifier.config_entry_id) + coordinator = entry.runtime_data + client = coordinator.client + + source = _build_account(entry, identifier) + if identifier.id_type is None: + albums = await coordinator.list_albums() + source.children = [ + _build_album( + album.title, + PhotosIdentifier.album( + identifier.config_entry_id, + album.id, + ), + _cover_photo_url(album, THUMBNAIL_SIZE), + ) + for album in albums + ] + return source + + if ( + identifier.id_type != PhotosIdentifierType.ALBUM + or identifier.media_id is None + ): + raise BrowseError(f"Unsupported identifier: {identifier}") + + media_items: list[MediaItem] = [] + try: + async for media_item_result in await client.list_media_items( + album_id=identifier.media_id, page_size=MEDIA_ITEMS_PAGE_SIZE + ): + media_items.extend(media_item_result.media_items) + except GooglePhotosApiError as err: + raise BrowseError(f"Error listing media items: {err}") from err + + source.children = [ + _build_media_item( + PhotosIdentifier.photo(identifier.config_entry_id, media_item.id), + media_item, + ) + for media_item in media_items + ] + return source + + def _async_config_entries(self) -> list[GooglePhotosConfigEntry]: + """Return all config entries that support photo library reads.""" + entries = [] + for entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + scopes = entry.data["token"]["scope"].split(" ") + if READ_SCOPE in scopes: + entries.append(entry) + return entries + + def _async_config_entry(self, config_entry_id: str) -> GooglePhotosConfigEntry: + """Return a config entry with the specified id.""" + entry = self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, config_entry_id + ) + if not entry: + raise BrowseError( + f"Could not find config entry for identifier: {config_entry_id}" + ) + return entry + + +def _build_account( + config_entry: GooglePhotosConfigEntry, + identifier: PhotosIdentifier, +) -> BrowseMediaSource: + """Build the root node for a Google Photos account for a config entry.""" + return BrowseMediaSource( + domain=DOMAIN, + identifier=identifier.as_string(), + media_class=MediaClass.DIRECTORY, + media_content_type=MediaClass.IMAGE, + title=config_entry.title, + can_play=False, + can_expand=True, + ) + + +def _build_album( + title: str, identifier: PhotosIdentifier, thumbnail_url: str | None = None +) -> BrowseMediaSource: + """Build an album node.""" + return BrowseMediaSource( + domain=DOMAIN, + identifier=identifier.as_string(), + media_class=MediaClass.ALBUM, + media_content_type=MediaClass.ALBUM, + title=title, + can_play=False, + can_expand=True, + thumbnail=thumbnail_url, + ) + + +def _build_media_item( + identifier: PhotosIdentifier, + media_item: MediaItem, +) -> BrowseMediaSource: + """Build the node for an individual photo or video.""" + is_video = media_item.media_metadata and ( + media_item.media_metadata.video is not None + ) + return BrowseMediaSource( + domain=DOMAIN, + identifier=identifier.as_string(), + media_class=MediaClass.IMAGE if not is_video else MediaClass.VIDEO, + media_content_type=MediaType.IMAGE if not is_video else MediaType.VIDEO, + title=media_item.filename, + can_play=is_video, + can_expand=False, + thumbnail=_media_url(media_item, THUMBNAIL_SIZE), + ) + + +def _media_url(media_item: MediaItem, max_size: int) -> str: + """Return a media item url with the specified max thumbnail size on the longest edge. + + See https://developers.google.com/photos/library/guides/access-media-items#base-urls + """ + return f"{media_item.base_url}=h{max_size}" + + +def _video_url(media_item: MediaItem) -> str: + """Return a video url for the item. + + See https://developers.google.com/photos/library/guides/access-media-items#base-urls + """ + return f"{media_item.base_url}=dv" + + +def _cover_photo_url(album: Album, max_size: int) -> str: + """Return a media item url for the cover photo of the album.""" + return f"{album.cover_photo_base_url}=h{max_size}" diff --git a/homeassistant/components/google_photos/quality_scale.yaml b/homeassistant/components/google_photos/quality_scale.yaml new file mode 100644 index 00000000000..ed313e13d6a --- /dev/null +++ b/homeassistant/components/google_photos/quality_scale.yaml @@ -0,0 +1,68 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: done + common-modules: done + has-entity-name: + status: exempt + comment: Integration does not have entities + action-setup: + status: todo + comment: | + The integration does action setup in `async_setup_entry` which needs to be + moved to `async_setup`. + appropriate-polling: done + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: done + docs-high-level-description: done + config-flow-test-coverage: done + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: todo + config-entry-unloading: todo + reauthentication-flow: done + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: todo + parallel-updates: todo + test-coverage: todo + docs-configuration-parameters: todo + entity-unavailable: todo + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/homeassistant/components/google_photos/services.py b/homeassistant/components/google_photos/services.py new file mode 100644 index 00000000000..f23a706b2e2 --- /dev/null +++ b/homeassistant/components/google_photos/services.py @@ -0,0 +1,164 @@ +"""Google Photos services.""" + +from __future__ import annotations + +import asyncio +import mimetypes +from pathlib import Path + +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import NewMediaItem, SimpleMediaItem +import voluptuous as vol + +from homeassistant.const import CONF_FILENAME +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv + +from .const import DOMAIN, UPLOAD_SCOPE +from .types import GooglePhotosConfigEntry + +CONF_CONFIG_ENTRY_ID = "config_entry_id" +CONF_ALBUM = "album" + +UPLOAD_SERVICE = "upload" +UPLOAD_SERVICE_SCHEMA = vol.Schema( + { + vol.Required(CONF_CONFIG_ENTRY_ID): cv.string, + vol.Required(CONF_FILENAME): vol.All(cv.ensure_list, [cv.string]), + vol.Required(CONF_ALBUM): cv.string, + } +) +CONTENT_SIZE_LIMIT = 20 * 1024 * 1024 + + +def _read_file_contents( + hass: HomeAssistant, filenames: list[str] +) -> list[tuple[str, bytes]]: + """Return the mime types and file contents for each file.""" + results = [] + for filename in filenames: + if not hass.config.is_allowed_path(filename): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="no_access_to_path", + translation_placeholders={"filename": filename}, + ) + filename_path = Path(filename) + if not filename_path.exists(): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="filename_does_not_exist", + translation_placeholders={"filename": filename}, + ) + if filename_path.stat().st_size > CONTENT_SIZE_LIMIT: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="file_too_large", + translation_placeholders={ + "filename": filename, + "size": str(filename_path.stat().st_size), + "limit": str(CONTENT_SIZE_LIMIT), + }, + ) + mime_type, _ = mimetypes.guess_type(filename) + if mime_type is None or not (mime_type.startswith(("image", "video"))): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="filename_is_not_image", + translation_placeholders={"filename": filename}, + ) + results.append((mime_type, filename_path.read_bytes())) + return results + + +def async_register_services(hass: HomeAssistant) -> None: + """Register Google Photos services.""" + + async def async_handle_upload(call: ServiceCall) -> ServiceResponse: + """Generate content from text and optionally images.""" + config_entry: GooglePhotosConfigEntry | None = ( + hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID]) + ) + if not config_entry: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="integration_not_found", + translation_placeholders={"target": DOMAIN}, + ) + scopes = config_entry.data["token"]["scope"].split(" ") + if UPLOAD_SCOPE not in scopes: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="missing_upload_permission", + translation_placeholders={"target": DOMAIN}, + ) + coordinator = config_entry.runtime_data + client_api = coordinator.client + upload_tasks = [] + file_results = await hass.async_add_executor_job( + _read_file_contents, hass, call.data[CONF_FILENAME] + ) + + album = call.data[CONF_ALBUM] + try: + album_id = await coordinator.get_or_create_album(album) + except GooglePhotosApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="create_album_error", + translation_placeholders={"message": str(err)}, + ) from err + + for mime_type, content in file_results: + upload_tasks.append(client_api.upload_content(content, mime_type)) + try: + upload_results = await asyncio.gather(*upload_tasks) + except GooglePhotosApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="upload_error", + translation_placeholders={"message": str(err)}, + ) from err + try: + upload_result = await client_api.create_media_items( + [ + NewMediaItem( + SimpleMediaItem(upload_token=upload_result.upload_token) + ) + for upload_result in upload_results + ], + album_id=album_id, + ) + except GooglePhotosApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="api_error", + translation_placeholders={"message": str(err)}, + ) from err + if call.return_response: + return { + "media_items": [ + { + "media_item_id": item_result.media_item.id + for item_result in upload_result.new_media_item_results + if item_result.media_item and item_result.media_item.id + } + ], + "album_id": album_id, + } + return None + + if not hass.services.has_service(DOMAIN, UPLOAD_SERVICE): + hass.services.async_register( + DOMAIN, + UPLOAD_SERVICE, + async_handle_upload, + schema=UPLOAD_SERVICE_SCHEMA, + supports_response=SupportsResponse.OPTIONAL, + ) diff --git a/homeassistant/components/google_photos/services.yaml b/homeassistant/components/google_photos/services.yaml new file mode 100644 index 00000000000..ec3b94c453b --- /dev/null +++ b/homeassistant/components/google_photos/services.yaml @@ -0,0 +1,15 @@ +upload: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: google_photos + filename: + required: false + selector: + object: + album: + required: true + selector: + text: diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json new file mode 100644 index 00000000000..fa3f4669dac --- /dev/null +++ b/homeassistant/components/google_photos/strings.json @@ -0,0 +1,88 @@ +{ + "application_credentials": { + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Photos. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." + }, + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "access_not_configured": "Unable to access the Google API:\n\n{message}", + "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_account": "Wrong account: Please authenticate with the right account.", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + }, + "exceptions": { + "integration_not_found": { + "message": "Integration \"{target}\" not found in registry." + }, + "not_loaded": { + "message": "{target} is not loaded." + }, + "no_access_to_path": { + "message": "Cannot read {filename}, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`" + }, + "filename_does_not_exist": { + "message": "`{filename}` does not exist" + }, + "file_too_large": { + "message": "`{filename}` is too large ({size} > {limit})" + }, + "filename_is_not_image": { + "message": "`{filename}` is not an image" + }, + "missing_upload_permission": { + "message": "Home Assistant was not granted permission to upload to Google Photos" + }, + "upload_error": { + "message": "Failed to upload content: {message}" + }, + "create_album_error": { + "message": "Failed to create album: {message}" + }, + "api_error": { + "message": "Google Photos API responded with error: {message}" + }, + "albums_failed": { + "message": "Cannot fetch albums from the Google Photos API" + } + }, + "services": { + "upload": { + "name": "Upload media", + "description": "Upload images or videos to Google Photos.", + "fields": { + "config_entry_id": { + "name": "Integration Id", + "description": "The Google Photos integration id." + }, + "filename": { + "name": "Filename", + "description": "Path to the image or video to upload.", + "example": "/config/www/image.jpg" + }, + "album": { + "name": "Album", + "description": "Album name that is the destination for the uploaded content.", + "example": "Family photos" + } + } + } + } +} diff --git a/homeassistant/components/google_photos/types.py b/homeassistant/components/google_photos/types.py new file mode 100644 index 00000000000..4f4cc1845e4 --- /dev/null +++ b/homeassistant/components/google_photos/types.py @@ -0,0 +1,7 @@ +"""Google Photos types.""" + +from homeassistant.config_entries import ConfigEntry + +from .coordinator import GooglePhotosUpdateCoordinator + +type GooglePhotosConfigEntry = ConfigEntry[GooglePhotosUpdateCoordinator] diff --git a/homeassistant/components/google_pubsub/manifest.json b/homeassistant/components/google_pubsub/manifest.json index f22317404ab..9ea747898b2 100644 --- a/homeassistant/components/google_pubsub/manifest.json +++ b/homeassistant/components/google_pubsub/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/google_pubsub", "iot_class": "cloud_push", - "requirements": ["google-cloud-pubsub==2.13.11"] + "quality_scale": "legacy", + "requirements": ["google-cloud-pubsub==2.23.0"] } diff --git a/homeassistant/components/google_sheets/config_flow.py b/homeassistant/components/google_sheets/config_flow.py index 4008d42f52d..81c82bf1bc4 100644 --- a/homeassistant/components/google_sheets/config_flow.py +++ b/homeassistant/components/google_sheets/config_flow.py @@ -9,11 +9,10 @@ from typing import Any from google.oauth2.credentials import Credentials from gspread import Client, GSpreadException -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow -from . import GoogleSheetsConfigEntry from .const import DEFAULT_ACCESS, DEFAULT_NAME, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -26,8 +25,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: GoogleSheetsConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -47,9 +44,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -66,24 +60,23 @@ class OAuth2FlowHandler( Credentials(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) # type: ignore[no-untyped-call] ) - if self.reauth_entry: + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() _LOGGER.debug("service.open_by_key") try: await self.hass.async_add_executor_job( service.open_by_key, - self.reauth_entry.unique_id, + reauth_entry.unique_id, ) except GSpreadException as err: _LOGGER.error( "Could not find spreadsheet '%s': %s", - self.reauth_entry.unique_id, + reauth_entry.unique_id, str(err), ) return self.async_abort(reason="open_spreadsheet_failure") - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) try: doc = await self.hass.async_add_executor_job( diff --git a/homeassistant/components/google_sheets/icons.json b/homeassistant/components/google_sheets/icons.json index c8010a690be..e2b6ed57579 100644 --- a/homeassistant/components/google_sheets/icons.json +++ b/homeassistant/components/google_sheets/icons.json @@ -1,5 +1,7 @@ { "services": { - "append_sheet": "mdi:google-spreadsheet" + "append_sheet": { + "service": "mdi:google-spreadsheet" + } } } diff --git a/homeassistant/components/google_sheets/strings.json b/homeassistant/components/google_sheets/strings.json index 0723456224f..d8cb06d9bcd 100644 --- a/homeassistant/components/google_sheets/strings.json +++ b/homeassistant/components/google_sheets/strings.json @@ -31,7 +31,7 @@ } }, "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Sheets. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n\n" + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Sheets. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." }, "services": { "append_sheet": { diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index 29a1b20f2bc..2ff22068ca9 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from aiohttp import ClientError, ClientResponseError -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -12,11 +11,17 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import api from .const import DOMAIN +from .exceptions import GoogleTasksApiError +from .types import GoogleTasksConfigEntry, GoogleTasksData + +__all__ = [ + "DOMAIN", +] PLATFORMS: list[Platform] = [Platform.TODO] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: GoogleTasksConfigEntry) -> bool: """Set up Google Tasks from a config entry.""" implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( @@ -36,16 +41,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ClientError as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = auth + try: + task_lists = await auth.list_task_lists() + except GoogleTasksApiError as err: + raise ConfigEntryNotReady from err + + entry.runtime_data = GoogleTasksData(auth, task_lists) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GoogleTasksConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/api.py b/homeassistant/components/google_tasks/api.py index c8b30c173eb..2a294b84654 100644 --- a/homeassistant/components/google_tasks/api.py +++ b/homeassistant/components/google_tasks/api.py @@ -46,8 +46,7 @@ class AsyncConfigEntryAuth: async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token[CONF_ACCESS_TOKEN] async def _get_service(self) -> Resource: diff --git a/homeassistant/components/google_tasks/config_flow.py b/homeassistant/components/google_tasks/config_flow.py index 965c215ee4d..795b6e6eff5 100644 --- a/homeassistant/components/google_tasks/config_flow.py +++ b/homeassistant/components/google_tasks/config_flow.py @@ -9,7 +9,7 @@ from googleapiclient.discovery import build from googleapiclient.errors import HttpError from googleapiclient.http import HttpRequest -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -23,8 +23,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -70,25 +68,24 @@ class OAuth2FlowHandler( self.logger.exception("Unknown error occurred") return self.async_abort(reason="unknown") user_id = user_resource_info["id"] - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry(title=user_resource_info["name"], data=data) - if self.reauth_entry.unique_id == user_id or not self.reauth_entry.unique_id: - return self.async_update_reload_and_abort( - self.reauth_entry, unique_id=user_id, data=data - ) + reauth_entry = self._get_reauth_entry() + if reauth_entry.unique_id: + self._abort_if_unique_id_mismatch(reason="wrong_account") - return self.async_abort(reason="wrong_account") + return self.async_update_reload_and_abort( + reauth_entry, unique_id=user_id, data=data + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml new file mode 100644 index 00000000000..79d216709e5 --- /dev/null +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -0,0 +1,69 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: todo + common-modules: + status: exempt + comment: | + The integration has a coordinator.py and no base entities. + has-entity-name: done + action-setup: + status: exempt + comment: The integration does not register any actions. + appropriate-polling: done + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: done + docs-high-level-description: done + config-flow-test-coverage: done + docs-actions: + status: exempt + comment: The integration does not register any actions. + runtime-data: done + + # Silver + log-when-unavailable: done + config-entry-unloading: done + reauthentication-flow: done + action-exceptions: done + docs-installation-parameters: todo + integration-owner: done + parallel-updates: done + test-coverage: done + docs-configuration-parameters: todo + entity-unavailable: done + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/homeassistant/components/google_tasks/strings.json b/homeassistant/components/google_tasks/strings.json index 4479b34935e..a26cf8c58ec 100644 --- a/homeassistant/components/google_tasks/strings.json +++ b/homeassistant/components/google_tasks/strings.json @@ -1,6 +1,6 @@ { "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Tasks. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n\n" + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Tasks. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." }, "config": { "step": { @@ -21,7 +21,8 @@ "wrong_account": "Wrong account: Please authenticate with the right account.", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 95c5f1c3a16..9a44b91b529 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import date, datetime, timedelta +from datetime import UTC, date, datetime, timedelta from typing import Any, cast from homeassistant.components.todo import ( @@ -11,16 +11,15 @@ from homeassistant.components.todo import ( TodoListEntity, TodoListEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .api import AsyncConfigEntryAuth -from .const import DOMAIN from .coordinator import TaskUpdateCoordinator +from .types import GoogleTasksConfigEntry +PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(minutes=15) TODO_STATUS_MAP = { @@ -39,8 +38,10 @@ def _convert_todo_item(item: TodoItem) -> dict[str, str | None]: else: result["status"] = TodoItemStatus.NEEDS_ACTION if (due := item.due) is not None: - # due API field is a timestamp string, but with only date resolution - result["due"] = dt_util.start_of_local_day(due).isoformat() + # due API field is a timestamp string, but with only date resolution. + # The time portion of the date is always discarded by the API, so we + # always set to UTC. + result["due"] = dt_util.start_of_local_day(due).replace(tzinfo=UTC).isoformat() else: result["due"] = None result["notes"] = item.description @@ -51,6 +52,8 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem: """Convert tasks API items into a TodoItem.""" due: date | None = None if (due_str := item.get("due")) is not None: + # Due dates are returned always in UTC so we only need to + # parse the date portion which will be interpreted as a a local date. due = datetime.fromisoformat(due_str).date() return TodoItem( summary=item["title"], @@ -65,20 +68,20 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GoogleTasksConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Google Tasks todo platform.""" - api: AsyncConfigEntryAuth = hass.data[DOMAIN][entry.entry_id] - task_lists = await api.list_task_lists() async_add_entities( ( GoogleTaskTodoListEntity( - TaskUpdateCoordinator(hass, api, task_list["id"]), + TaskUpdateCoordinator(hass, entry.runtime_data.api, task_list["id"]), task_list["title"], entry.entry_id, task_list["id"], ) - for task_list in task_lists + for task_list in entry.runtime_data.task_lists ), True, ) @@ -106,7 +109,7 @@ class GoogleTaskTodoListEntity( config_entry_id: str, task_list_id: str, ) -> None: - """Initialize LocalTodoListEntity.""" + """Initialize GoogleTaskTodoListEntity.""" super().__init__(coordinator) self._attr_name = name.capitalize() self._attr_unique_id = f"{config_entry_id}-{task_list_id}" @@ -153,9 +156,9 @@ class GoogleTaskTodoListEntity( def _order_tasks(tasks: list[dict[str, Any]]) -> list[dict[str, Any]]: """Order the task items response. - All tasks have an order amongst their sibblings based on position. + All tasks have an order amongst their siblings based on position. - Home Assistant To-do items do not support the Google Task parent/sibbling + Home Assistant To-do items do not support the Google Task parent/sibling relationships and the desired behavior is for them to be filtered. """ parents = [task for task in tasks if task.get("parent") is None] diff --git a/homeassistant/components/google_tasks/types.py b/homeassistant/components/google_tasks/types.py new file mode 100644 index 00000000000..eaaec23ddf5 --- /dev/null +++ b/homeassistant/components/google_tasks/types.py @@ -0,0 +1,19 @@ +"""Types for the Google Tasks integration.""" + +from dataclasses import dataclass +from typing import Any + +from homeassistant.config_entries import ConfigEntry + +from .api import AsyncConfigEntryAuth + + +@dataclass +class GoogleTasksData: + """Class to hold Google Tasks data.""" + + api: AsyncConfigEntryAuth + task_lists: list[dict[str, Any]] + + +type GoogleTasksConfigEntry = ConfigEntry[GoogleTasksData] diff --git a/homeassistant/components/google_translate/tts.py b/homeassistant/components/google_translate/tts.py index 221c99e7c20..13e0ca4c273 100644 --- a/homeassistant/components/google_translate/tts.py +++ b/homeassistant/components/google_translate/tts.py @@ -74,7 +74,7 @@ class GoogleTTSEntity(TextToSpeechEntity): else: self._lang = lang self._tld = tld - self._attr_name = f"Google {self._lang} {self._tld}" + self._attr_name = f"Google Translate {self._lang} {self._tld}" self._attr_unique_id = config_entry.entry_id @property @@ -130,7 +130,7 @@ class GoogleProvider(Provider): else: self._lang = lang self._tld = tld - self.name = "Google" + self.name = "Google Translate" @property def default_language(self) -> str: diff --git a/homeassistant/components/google_travel_time/config_flow.py b/homeassistant/components/google_travel_time/config_flow.py index 0b493d7eeeb..08de293bc7d 100644 --- a/homeassistant/components/google_travel_time/config_flow.py +++ b/homeassistant/components/google_travel_time/config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import Any import voluptuous as vol @@ -148,10 +148,6 @@ def default_options(hass: HomeAssistant) -> dict[str, str]: class GoogleOptionsFlow(OptionsFlow): """Handle an options flow for Google Travel Time.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize google options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: @@ -213,7 +209,7 @@ class GoogleTravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> GoogleOptionsFlow: """Get the options flow for this handler.""" - return GoogleOptionsFlow(config_entry) + return GoogleOptionsFlow() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" @@ -238,25 +234,18 @@ class GoogleTravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - if TYPE_CHECKING: - assert entry - errors: dict[str, str] | None = None - user_input = user_input or {} - if user_input: + if user_input is not None: errors = await validate_input(self.hass, user_input) if not errors: return self.async_update_reload_and_abort( - entry, - data=user_input, - reason="reconfigure_successful", + self._get_reconfigure_entry(), data=user_input ) return self.async_show_form( step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( - RECONFIGURE_SCHEMA, entry.data.copy() + RECONFIGURE_SCHEMA, self._get_reconfigure_entry().data ), errors=errors, ) diff --git a/homeassistant/components/google_travel_time/sensor.py b/homeassistant/components/google_travel_time/sensor.py index 618dda50bd4..a764036321b 100644 --- a/homeassistant/components/google_travel_time/sensor.py +++ b/homeassistant/components/google_travel_time/sensor.py @@ -7,6 +7,7 @@ import logging from googlemaps import Client from googlemaps.distance_matrix import distance_matrix +from googlemaps.exceptions import ApiError, Timeout, TransportError from homeassistant.components.sensor import ( SensorDeviceClass, @@ -172,9 +173,13 @@ class GoogleTravelTimeSensor(SensorEntity): self._resolved_destination, ) if self._resolved_destination is not None and self._resolved_origin is not None: - self._matrix = distance_matrix( - self._client, - self._resolved_origin, - self._resolved_destination, - **options_copy, - ) + try: + self._matrix = distance_matrix( + self._client, + self._resolved_origin, + self._resolved_destination, + **options_copy, + ) + except (ApiError, TransportError, Timeout) as ex: + _LOGGER.error("Error getting travel time: %s", ex) + self._matrix = None diff --git a/homeassistant/components/google_wifi/manifest.json b/homeassistant/components/google_wifi/manifest.json index 200684b2e1c..a71558a7d6f 100644 --- a/homeassistant/components/google_wifi/manifest.json +++ b/homeassistant/components/google_wifi/manifest.json @@ -3,5 +3,6 @@ "name": "Google Wifi", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/google_wifi", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/govee_ble/binary_sensor.py b/homeassistant/components/govee_ble/binary_sensor.py index e5966124216..bd92093c29c 100644 --- a/homeassistant/components/govee_ble/binary_sensor.py +++ b/homeassistant/components/govee_ble/binary_sensor.py @@ -44,7 +44,7 @@ BINARY_SENSOR_DESCRIPTIONS = { def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, -) -> PassiveBluetoothDataUpdate: +) -> PassiveBluetoothDataUpdate[bool | None]: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ @@ -95,13 +95,13 @@ class GoveeBluetoothBinarySensorEntity( ): """Representation of a govee-ble binary sensor.""" - processor: GoveeBLEPassiveBluetoothDataProcessor + processor: GoveeBLEPassiveBluetoothDataProcessor[bool | None] @property def available(self) -> bool: """Return False if sensor is in error.""" coordinator = self.processor.coordinator - return self.processor.entity_data.get(self.entity_key) != ERROR and ( + return self.processor.entity_data.get(self.entity_key) != ERROR and ( # type: ignore[comparison-overlap] ((model_info := coordinator.model_info) and model_info.sleepy) or super().available ) diff --git a/homeassistant/components/govee_ble/coordinator.py b/homeassistant/components/govee_ble/coordinator.py index 011a89e565b..4408b7f3199 100644 --- a/homeassistant/components/govee_ble/coordinator.py +++ b/homeassistant/components/govee_ble/coordinator.py @@ -1,5 +1,7 @@ """The govee Bluetooth integration.""" +from __future__ import annotations + from collections.abc import Callable from logging import Logger diff --git a/homeassistant/components/govee_ble/sensor.py b/homeassistant/components/govee_ble/sensor.py index a94610ef0e1..383f50e5c46 100644 --- a/homeassistant/components/govee_ble/sensor.py +++ b/homeassistant/components/govee_ble/sensor.py @@ -2,6 +2,9 @@ from __future__ import annotations +from datetime import date, datetime +from decimal import Decimal + from govee_ble import DeviceClass, SensorUpdate, Units from govee_ble.parser import ERROR @@ -29,6 +32,8 @@ from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info from .coordinator import GoveeBLEConfigEntry, GoveeBLEPassiveBluetoothDataProcessor from .device import device_key_to_bluetooth_entity_key +type _SensorValueType = str | int | float | date | datetime | Decimal | None + SENSOR_DESCRIPTIONS = { (DeviceClass.TEMPERATURE, Units.TEMP_CELSIUS): SensorEntityDescription( key=f"{DeviceClass.TEMPERATURE}_{Units.TEMP_CELSIUS}", @@ -72,7 +77,7 @@ SENSOR_DESCRIPTIONS = { def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, -) -> PassiveBluetoothDataUpdate: +) -> PassiveBluetoothDataUpdate[_SensorValueType]: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ @@ -117,13 +122,13 @@ async def async_setup_entry( class GoveeBluetoothSensorEntity( PassiveBluetoothProcessorEntity[ - PassiveBluetoothDataProcessor[float | int | str | None, SensorUpdate] + PassiveBluetoothDataProcessor[_SensorValueType, SensorUpdate] ], SensorEntity, ): """Representation of a govee ble sensor.""" - processor: GoveeBLEPassiveBluetoothDataProcessor + processor: GoveeBLEPassiveBluetoothDataProcessor[_SensorValueType] @property def available(self) -> bool: @@ -135,6 +140,6 @@ class GoveeBluetoothSensorEntity( ) @property - def native_value(self) -> float | int | str | None: + def native_value(self) -> _SensorValueType: # pylint: disable=hass-return-type """Return the native value.""" return self.processor.entity_data.get(self.entity_key) diff --git a/homeassistant/components/govee_light_local/__init__.py b/homeassistant/components/govee_light_local/__init__.py index 088f9bae22b..44dbc825665 100644 --- a/homeassistant/components/govee_light_local/__init__.py +++ b/homeassistant/components/govee_light_local/__init__.py @@ -9,23 +9,21 @@ import logging from govee_local_api.controller import LISTENING_PORT -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DISCOVERY_TIMEOUT, DOMAIN -from .coordinator import GoveeLocalApiCoordinator +from .const import DISCOVERY_TIMEOUT +from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry PLATFORMS: list[Platform] = [Platform.LIGHT] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: GoveeLocalConfigEntry) -> bool: """Set up Govee light local from a config entry.""" - - coordinator: GoveeLocalApiCoordinator = GoveeLocalApiCoordinator(hass=hass) + coordinator = GoveeLocalApiCoordinator(hass=hass) async def await_cleanup(): cleanup_complete: asyncio.Event = coordinator.cleanup() @@ -52,14 +50,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except TimeoutError as ex: raise ConfigEntryNotReady from ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: GoveeLocalConfigEntry) -> bool: """Unload a config entry.""" - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/govee_light_local/coordinator.py b/homeassistant/components/govee_light_local/coordinator.py index 64119f1871c..240313a34b8 100644 --- a/homeassistant/components/govee_light_local/coordinator.py +++ b/homeassistant/components/govee_light_local/coordinator.py @@ -6,6 +6,7 @@ import logging from govee_local_api import GoveeController, GoveeDevice +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -19,6 +20,8 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +type GoveeLocalConfigEntry = ConfigEntry[GoveeLocalApiCoordinator] + class GoveeLocalApiCoordinator(DataUpdateCoordinator[list[GoveeDevice]]): """Govee light local coordinator.""" diff --git a/homeassistant/components/govee_light_local/light.py b/homeassistant/components/govee_light_local/light.py index 60bf07e8e19..cb2e24fa8a6 100644 --- a/homeassistant/components/govee_light_local/light.py +++ b/homeassistant/components/govee_light_local/light.py @@ -15,26 +15,25 @@ from homeassistant.components.light import ( LightEntity, filter_supported_color_modes, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, MANUFACTURER -from .coordinator import GoveeLocalApiCoordinator +from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: GoveeLocalConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Govee light setup.""" - coordinator: GoveeLocalApiCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def discovery_callback(device: GoveeDevice, is_new: bool) -> bool: if is_new: @@ -93,7 +92,7 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity): }, name=device.sku, manufacturer=MANUFACTURER, - model=device.sku, + model_id=device.sku, serial_number=device.fingerprint, ) diff --git a/homeassistant/components/govee_light_local/manifest.json b/homeassistant/components/govee_light_local/manifest.json index 168a13e2477..a94d4e58e9a 100644 --- a/homeassistant/components/govee_light_local/manifest.json +++ b/homeassistant/components/govee_light_local/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/govee_light_local", "iot_class": "local_push", - "requirements": ["govee-local-api==1.5.1"] + "requirements": ["govee-local-api==1.5.3"] } diff --git a/homeassistant/components/gpslogger/device_tracker.py b/homeassistant/components/gpslogger/device_tracker.py index b1c7ad9091f..3ed68ed1b06 100644 --- a/homeassistant/components/gpslogger/device_tracker.py +++ b/homeassistant/components/gpslogger/device_tracker.py @@ -1,6 +1,6 @@ """Support for the GPSLogger device tracking.""" -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_BATTERY_LEVEL, @@ -71,57 +71,25 @@ class GPSLoggerEntity(TrackerEntity, RestoreEntity): def __init__(self, device, location, battery, accuracy, attributes): """Set up GPSLogger entity.""" - self._accuracy = accuracy - self._attributes = attributes + self._attr_location_accuracy = accuracy + self._attr_extra_state_attributes = attributes self._name = device self._battery = battery - self._location = location + if location: + self._attr_latitude = location[0] + self._attr_longitude = location[1] self._unsub_dispatcher = None - self._unique_id = device + self._attr_unique_id = device + self._attr_device_info = DeviceInfo( + identifiers={(GPL_DOMAIN, device)}, + name=device, + ) @property def battery_level(self): """Return battery value of the device.""" return self._battery - @property - def extra_state_attributes(self): - """Return device specific attributes.""" - return self._attributes - - @property - def latitude(self): - """Return latitude value of the device.""" - return self._location[0] - - @property - def longitude(self): - """Return longitude value of the device.""" - return self._location[1] - - @property - def location_accuracy(self): - """Return the gps accuracy of the device.""" - return self._accuracy - - @property - def unique_id(self): - """Return the unique ID.""" - return self._unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - return DeviceInfo( - identifiers={(GPL_DOMAIN, self._unique_id)}, - name=self._name, - ) - - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - async def async_added_to_hass(self) -> None: """Register state update callback.""" await super().async_added_to_hass() @@ -130,13 +98,14 @@ class GPSLoggerEntity(TrackerEntity, RestoreEntity): ) # don't restore if we got created with data - if self._location is not None: + if self.latitude is not None: return if (state := await self.async_get_last_state()) is None: - self._location = (None, None) - self._accuracy = None - self._attributes = { + self._attr_latitude = None + self._attr_longitude = None + self._attr_location_accuracy = 0 + self._attr_extra_state_attributes = { ATTR_ALTITUDE: None, ATTR_ACTIVITY: None, ATTR_DIRECTION: None, @@ -147,9 +116,10 @@ class GPSLoggerEntity(TrackerEntity, RestoreEntity): return attr = state.attributes - self._location = (attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE)) - self._accuracy = attr.get(ATTR_GPS_ACCURACY) - self._attributes = { + self._attr_latitude = attr.get(ATTR_LATITUDE) + self._attr_longitude = attr.get(ATTR_LONGITUDE) + self._attr_location_accuracy = attr.get(ATTR_GPS_ACCURACY, 0) + self._attr_extra_state_attributes = { ATTR_ALTITUDE: attr.get(ATTR_ALTITUDE), ATTR_ACTIVITY: attr.get(ATTR_ACTIVITY), ATTR_DIRECTION: attr.get(ATTR_DIRECTION), @@ -169,8 +139,9 @@ class GPSLoggerEntity(TrackerEntity, RestoreEntity): if device != self._name: return - self._location = location + self._attr_latitude = location[0] + self._attr_longitude = location[1] self._battery = battery - self._accuracy = accuracy - self._attributes.update(attributes) + self._attr_location_accuracy = accuracy + self._attr_extra_state_attributes.update(attributes) self.async_write_ha_state() diff --git a/homeassistant/components/graphite/__init__.py b/homeassistant/components/graphite/__init__.py index b0672e1f853..336ca6ba2cb 100644 --- a/homeassistant/components/graphite/__init__.py +++ b/homeassistant/components/graphite/__init__.py @@ -138,8 +138,7 @@ class GraphiteFeeder(threading.Thread): with suppress(ValueError): things["state"] = state.state_as_number(new_state) lines = [ - "%s.%s.%s %f %i" - % (self._prefix, entity_id, key.replace(" ", "_"), value, now) + f"{self._prefix}.{entity_id}.{key.replace(' ', '_')} {value:f} {now}" for key, value in things.items() if isinstance(value, (float, int)) ] diff --git a/homeassistant/components/graphite/manifest.json b/homeassistant/components/graphite/manifest.json index da249a22829..cd50a5933f1 100644 --- a/homeassistant/components/graphite/manifest.json +++ b/homeassistant/components/graphite/manifest.json @@ -3,5 +3,6 @@ "name": "Graphite", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/graphite", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/gree/climate.py b/homeassistant/components/gree/climate.py index 6a8f48780c8..f197f21a4e1 100644 --- a/homeassistant/components/gree/climate.py +++ b/homeassistant/components/gree/climate.py @@ -126,7 +126,6 @@ class GreeClimateEntity(GreeEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_min_temp = TEMP_MIN _attr_max_temp = TEMP_MAX - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceDataUpdateCoordinator) -> None: """Initialize the Gree device.""" diff --git a/homeassistant/components/gree/coordinator.py b/homeassistant/components/gree/coordinator.py index ae8b22706ef..42d6734a6b2 100644 --- a/homeassistant/components/gree/coordinator.py +++ b/homeassistant/components/gree/coordinator.py @@ -138,7 +138,7 @@ class DiscoveryService(Listener): except DeviceTimeoutError: _LOGGER.error("Timeout trying to bind to gree device: %s", device_info) - _LOGGER.info( + _LOGGER.debug( "Adding Gree device %s at %s:%i", device.device_info.name, device.device_info.ip, diff --git a/homeassistant/components/greeneye_monitor/manifest.json b/homeassistant/components/greeneye_monitor/manifest.json index fcf4d004d26..15c4c2123e3 100644 --- a/homeassistant/components/greeneye_monitor/manifest.json +++ b/homeassistant/components/greeneye_monitor/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/greeneye_monitor", "iot_class": "local_push", "loggers": ["greeneye"], + "quality_scale": "legacy", "requirements": ["greeneye_monitor==3.0.3"] } diff --git a/homeassistant/components/greenwave/manifest.json b/homeassistant/components/greenwave/manifest.json index 5cb3255192f..422d3bc512e 100644 --- a/homeassistant/components/greenwave/manifest.json +++ b/homeassistant/components/greenwave/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/greenwave", "iot_class": "local_polling", "loggers": ["greenwavereality"], + "quality_scale": "legacy", "requirements": ["greenwavereality==0.5.1"] } diff --git a/homeassistant/components/group/__init__.py b/homeassistant/components/group/__init__.py index f89bf67861d..c48cd8529a2 100644 --- a/homeassistant/components/group/__init__.py +++ b/homeassistant/components/group/__init__.py @@ -22,7 +22,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv, entity_registry as er -from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.group import ( expand_entity_ids as _expand_entity_ids, get_entity_ids as _get_entity_ids, @@ -49,12 +48,13 @@ from .const import ( # noqa: F401 ATTR_ORDER, ATTR_REMOVE_ENTITIES, CONF_HIDE_MEMBERS, + DATA_COMPONENT, DOMAIN, GROUP_ORDER, REG_KEY, ) from .entity import Group, async_get_component -from .registry import GroupIntegrationRegistry, async_setup as async_setup_registry +from .registry import async_setup as async_setup_registry CONF_ALL = "all" @@ -110,8 +110,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: return False if (state := hass.states.get(entity_id)) is not None: - registry: GroupIntegrationRegistry = hass.data[REG_KEY] - return state.state in registry.on_off_mapping + return state.state in hass.data[REG_KEY].on_off_mapping return False @@ -132,7 +131,7 @@ def groups_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]: return [ group.entity_id - for group in hass.data[DOMAIN].entities + for group in hass.data[DATA_COMPONENT].entities if entity_id in group.tracking ] @@ -179,10 +178,7 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up all groups found defined in the configuration.""" - if DOMAIN not in hass.data: - hass.data[DOMAIN] = EntityComponent[Group](_LOGGER, DOMAIN, hass) - - component: EntityComponent[Group] = hass.data[DOMAIN] + component = async_get_component(hass) await async_setup_registry(hass) @@ -338,7 +334,7 @@ async def _async_process_config(hass: HomeAssistant, config: ConfigType) -> None entity_ids: Collection[str] = conf.get(CONF_ENTITIES) or [] icon: str | None = conf.get(CONF_ICON) mode = bool(conf.get(CONF_ALL)) - order: int = hass.data[GROUP_ORDER] + order = hass.data[GROUP_ORDER] # We keep track of the order when we are creating the tasks # in the same way that async_create_group does to make diff --git a/homeassistant/components/group/button.py b/homeassistant/components/group/button.py index d8481686615..a18e074b775 100644 --- a/homeassistant/components/group/button.py +++ b/homeassistant/components/group/button.py @@ -7,7 +7,7 @@ from typing import Any import voluptuous as vol from homeassistant.components.button import ( - DOMAIN, + DOMAIN as BUTTON_DOMAIN, PLATFORM_SCHEMA as BUTTON_PLATFORM_SCHEMA, SERVICE_PRESS, ButtonEntity, @@ -34,7 +34,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = BUTTON_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(BUTTON_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -113,7 +113,7 @@ class ButtonGroup(GroupEntity, ButtonEntity): async def async_press(self) -> None: """Forward the press to all buttons in the group.""" await self.hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: self._entity_ids}, blocking=True, diff --git a/homeassistant/components/group/const.py b/homeassistant/components/group/const.py index 0fdd429269f..c706247ae01 100644 --- a/homeassistant/components/group/const.py +++ b/homeassistant/components/group/const.py @@ -1,14 +1,24 @@ """Constants for the Group integration.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from .entity import Group + from .registry import GroupIntegrationRegistry + CONF_HIDE_MEMBERS = "hide_members" CONF_IGNORE_NON_NUMERIC = "ignore_non_numeric" DOMAIN = "group" - -REG_KEY = f"{DOMAIN}_registry" - -GROUP_ORDER = "group_order" - +DATA_COMPONENT: HassKey[EntityComponent[Group]] = HassKey(DOMAIN) +REG_KEY: HassKey[GroupIntegrationRegistry] = HassKey(f"{DOMAIN}_registry") +GROUP_ORDER: HassKey[int] = HassKey("group_order") ATTR_ADD_ENTITIES = "add_entities" ATTR_REMOVE_ENTITIES = "remove_entities" diff --git a/homeassistant/components/group/cover.py b/homeassistant/components/group/cover.py index 5d7f99012fd..b2e5c6eef37 100644 --- a/homeassistant/components/group/cover.py +++ b/homeassistant/components/group/cover.py @@ -11,10 +11,11 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -31,10 +32,6 @@ from homeassistant.const import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -57,7 +54,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(COVER_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -181,21 +178,25 @@ class CoverGroup(GroupEntity, CoverEntity): """Move the covers up.""" data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, data, blocking=True, context=self._context + COVER_DOMAIN, SERVICE_OPEN_COVER, data, blocking=True, context=self._context ) async def async_close_cover(self, **kwargs: Any) -> None: """Move the covers down.""" data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + data, + blocking=True, + context=self._context, ) async def async_stop_cover(self, **kwargs: Any) -> None: """Fire the stop action.""" data = {ATTR_ENTITY_ID: self._covers[KEY_STOP]} await self.hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, data, blocking=True, context=self._context + COVER_DOMAIN, SERVICE_STOP_COVER, data, blocking=True, context=self._context ) async def async_set_cover_position(self, **kwargs: Any) -> None: @@ -205,7 +206,7 @@ class CoverGroup(GroupEntity, CoverEntity): ATTR_POSITION: kwargs[ATTR_POSITION], } await self.hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, data, blocking=True, @@ -216,21 +217,33 @@ class CoverGroup(GroupEntity, CoverEntity): """Tilt covers open.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + data, + blocking=True, + context=self._context, ) async def async_close_cover_tilt(self, **kwargs: Any) -> None: """Tilt covers closed.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + data, + blocking=True, + context=self._context, ) async def async_stop_cover_tilt(self, **kwargs: Any) -> None: """Stop cover tilt.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_STOP]} await self.hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + data, + blocking=True, + context=self._context, ) async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: @@ -240,7 +253,7 @@ class CoverGroup(GroupEntity, CoverEntity): ATTR_TILT_POSITION: kwargs[ATTR_TILT_POSITION], } await self.hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, data, blocking=True, @@ -269,15 +282,15 @@ class CoverGroup(GroupEntity, CoverEntity): for entity_id in self._entity_ids: if not (state := self.hass.states.get(entity_id)): continue - if state.state == STATE_OPEN: + if state.state == CoverState.OPEN: self._attr_is_closed = False continue - if state.state == STATE_CLOSED: + if state.state == CoverState.CLOSED: continue - if state.state == STATE_CLOSING: + if state.state == CoverState.CLOSING: self._attr_is_closing = True continue - if state.state == STATE_OPENING: + if state.state == CoverState.OPENING: self._attr_is_opening = True continue if not valid_state: diff --git a/homeassistant/components/group/entity.py b/homeassistant/components/group/entity.py index 1b2db35531f..03a8be4bed5 100644 --- a/homeassistant/components/group/entity.py +++ b/homeassistant/components/group/entity.py @@ -22,7 +22,7 @@ from homeassistant.helpers.entity import Entity, async_generate_entity_id from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_state_change_event -from .const import ATTR_AUTO, ATTR_ORDER, DOMAIN, GROUP_ORDER, REG_KEY +from .const import ATTR_AUTO, ATTR_ORDER, DATA_COMPONENT, DOMAIN, GROUP_ORDER, REG_KEY from .registry import GroupIntegrationRegistry, SingleStateType ENTITY_ID_FORMAT = DOMAIN + ".{}" @@ -478,8 +478,8 @@ class Group(Entity): def async_get_component(hass: HomeAssistant) -> EntityComponent[Group]: """Get the group entity component.""" - if (component := hass.data.get(DOMAIN)) is None: - component = hass.data[DOMAIN] = EntityComponent[Group]( + if (component := hass.data.get(DATA_COMPONENT)) is None: + component = hass.data[DATA_COMPONENT] = EntityComponent[Group]( _PACKAGE_LOGGER, DOMAIN, hass ) return component diff --git a/homeassistant/components/group/event.py b/homeassistant/components/group/event.py index 67220b878a1..e7f7938edf3 100644 --- a/homeassistant/components/group/event.py +++ b/homeassistant/components/group/event.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.event import ( ATTR_EVENT_TYPE, ATTR_EVENT_TYPES, - DOMAIN, + DOMAIN as EVENT_DOMAIN, PLATFORM_SCHEMA as EVENT_PLATFORM_SCHEMA, EventEntity, ) @@ -40,7 +40,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = EVENT_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(EVENT_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } diff --git a/homeassistant/components/group/fan.py b/homeassistant/components/group/fan.py index 93004e8a1b5..87d9cb281f4 100644 --- a/homeassistant/components/group/fan.py +++ b/homeassistant/components/group/fan.py @@ -14,7 +14,7 @@ from homeassistant.components.fan import ( ATTR_OSCILLATING, ATTR_PERCENTAGE, ATTR_PERCENTAGE_STEP, - DOMAIN, + DOMAIN as FAN_DOMAIN, PLATFORM_SCHEMA as FAN_PLATFORM_SCHEMA, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, @@ -58,7 +58,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = FAN_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(FAN_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -109,7 +109,6 @@ class FanGroup(GroupEntity, FanEntity): """Representation of a FanGroup.""" _attr_available: bool = False - _enable_turn_on_off_backwards_compatibility = False def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None: """Initialize a FanGroup entity.""" @@ -218,7 +217,7 @@ class FanGroup(GroupEntity, FanEntity): ) -> None: """Call a service with all entities.""" await self.hass.services.async_call( - DOMAIN, + FAN_DOMAIN, service, {**data, ATTR_ENTITY_ID: self._fans[support_flag]}, blocking=True, @@ -228,7 +227,7 @@ class FanGroup(GroupEntity, FanEntity): async def _async_call_all_entities(self, service: str) -> None: """Call a service with all entities.""" await self.hass.services.async_call( - DOMAIN, + FAN_DOMAIN, service, {ATTR_ENTITY_ID: self._entity_ids}, blocking=True, diff --git a/homeassistant/components/group/icons.json b/homeassistant/components/group/icons.json index 8cca94e08e1..577d1effac0 100644 --- a/homeassistant/components/group/icons.json +++ b/homeassistant/components/group/icons.json @@ -1,7 +1,13 @@ { "services": { - "reload": "mdi:reload", - "set": "mdi:home-group-plus", - "remove": "mdi:home-group-remove" + "reload": { + "service": "mdi:reload" + }, + "set": { + "service": "mdi:home-group-plus" + }, + "remove": { + "service": "mdi:home-group-remove" + } } } diff --git a/homeassistant/components/group/lock.py b/homeassistant/components/group/lock.py index 8bb7b18ce29..e22e1ecd85c 100644 --- a/homeassistant/components/group/lock.py +++ b/homeassistant/components/group/lock.py @@ -8,10 +8,11 @@ from typing import Any import voluptuous as vol from homeassistant.components.lock import ( - DOMAIN, + DOMAIN as LOCK_DOMAIN, PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA, LockEntity, LockEntityFeature, + LockState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -22,14 +23,8 @@ from homeassistant.const import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, entity_registry as er @@ -45,7 +40,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(LOCK_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -131,7 +126,7 @@ class LockGroup(GroupEntity, LockEntity): _LOGGER.debug("Forwarded lock command: %s", data) await self.hass.services.async_call( - DOMAIN, + LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True, @@ -142,7 +137,7 @@ class LockGroup(GroupEntity, LockEntity): """Forward the unlock command to all locks in the group.""" data = {ATTR_ENTITY_ID: self._entity_ids} await self.hass.services.async_call( - DOMAIN, + LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True, @@ -153,7 +148,7 @@ class LockGroup(GroupEntity, LockEntity): """Forward the open command to all locks in the group.""" data = {ATTR_ENTITY_ID: self._entity_ids} await self.hass.services.async_call( - DOMAIN, + LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True, @@ -183,11 +178,11 @@ class LockGroup(GroupEntity, LockEntity): self._attr_is_locked = None else: # Set attributes based on member states and let the lock entity sort out the correct state - self._attr_is_jammed = STATE_JAMMED in states - self._attr_is_locking = STATE_LOCKING in states - self._attr_is_opening = STATE_OPENING in states - self._attr_is_open = STATE_OPEN in states - self._attr_is_unlocking = STATE_UNLOCKING in states - self._attr_is_locked = all(state == STATE_LOCKED for state in states) + self._attr_is_jammed = LockState.JAMMED in states + self._attr_is_locking = LockState.LOCKING in states + self._attr_is_opening = LockState.OPENING in states + self._attr_is_open = LockState.OPEN in states + self._attr_is_unlocking = LockState.UNLOCKING in states + self._attr_is_locked = all(state == LockState.LOCKED for state in states) self._attr_available = any(state != STATE_UNAVAILABLE for state in states) diff --git a/homeassistant/components/group/media_player.py b/homeassistant/components/group/media_player.py index 7d2ce46b107..ab8ee64b3e1 100644 --- a/homeassistant/components/group/media_player.py +++ b/homeassistant/components/group/media_player.py @@ -15,7 +15,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, @@ -73,7 +73,7 @@ DEFAULT_NAME = "Media Group" PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(MEDIA_PLAYER_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -274,7 +274,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Clear players playlist.""" data = {ATTR_ENTITY_ID: self._features[KEY_CLEAR_PLAYLIST]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_CLEAR_PLAYLIST, data, context=self._context, @@ -284,7 +284,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send next track command.""" data = {ATTR_ENTITY_ID: self._features[KEY_TRACKS]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data, context=self._context, @@ -294,7 +294,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send pause command.""" data = {ATTR_ENTITY_ID: self._features[KEY_PAUSE_PLAY_STOP]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PAUSE, data, context=self._context, @@ -304,7 +304,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send play command.""" data = {ATTR_ENTITY_ID: self._features[KEY_PAUSE_PLAY_STOP]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PLAY, data, context=self._context, @@ -314,7 +314,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send previous track command.""" data = {ATTR_ENTITY_ID: self._features[KEY_TRACKS]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data, context=self._context, @@ -327,7 +327,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_SEEK_POSITION: position, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_SEEK, data, context=self._context, @@ -337,7 +337,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send stop command.""" data = {ATTR_ENTITY_ID: self._features[KEY_PAUSE_PLAY_STOP]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_STOP, data, context=self._context, @@ -350,7 +350,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_VOLUME_MUTED: mute, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, data, context=self._context, @@ -368,7 +368,7 @@ class MediaPlayerGroup(MediaPlayerEntity): if kwargs: data.update(kwargs) await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, data, context=self._context, @@ -381,7 +381,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_SHUFFLE: shuffle, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_SHUFFLE_SET, data, context=self._context, @@ -391,7 +391,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Forward the turn_on command to all media in the media group.""" data = {ATTR_ENTITY_ID: self._features[KEY_ON_OFF]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_TURN_ON, data, context=self._context, @@ -404,7 +404,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_VOLUME_LEVEL: volume, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_SET, data, context=self._context, @@ -414,7 +414,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Forward the turn_off command to all media in the media group.""" data = {ATTR_ENTITY_ID: self._features[KEY_ON_OFF]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_TURN_OFF, data, context=self._context, diff --git a/homeassistant/components/group/notify.py b/homeassistant/components/group/notify.py index ecbfec0bdb8..fdef327cb73 100644 --- a/homeassistant/components/group/notify.py +++ b/homeassistant/components/group/notify.py @@ -13,7 +13,7 @@ from homeassistant.components.notify import ( ATTR_DATA, ATTR_MESSAGE, ATTR_TITLE, - DOMAIN, + DOMAIN as NOTIFY_DOMAIN, PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, SERVICE_SEND_MESSAGE, BaseNotificationService, @@ -115,7 +115,10 @@ class GroupNotifyPlatform(BaseNotificationService): tasks.append( asyncio.create_task( self.hass.services.async_call( - DOMAIN, entity[CONF_ACTION], sending_payload, blocking=True + NOTIFY_DOMAIN, + entity[CONF_ACTION], + sending_payload, + blocking=True, ) ) ) @@ -172,7 +175,7 @@ class NotifyGroup(GroupEntity, NotifyEntity): async def async_send_message(self, message: str, title: str | None = None) -> None: """Send a message to all members of the group.""" await self.hass.services.async_call( - DOMAIN, + NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE, { ATTR_MESSAGE: message, diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index aba1b299ced..2f3c4aa5221 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -8,8 +8,10 @@ from __future__ import annotations from dataclasses import dataclass from typing import Protocol +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import HVACMode -from homeassistant.components.vacuum import STATE_CLEANING, STATE_ERROR, STATE_RETURNING +from homeassistant.components.lock import LockState +from homeassistant.components.vacuum import VacuumActivity from homeassistant.components.water_heater import ( STATE_ECO, STATE_ELECTRIC, @@ -19,28 +21,17 @@ from homeassistant.components.water_heater import ( STATE_PERFORMANCE, ) from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_HOME, STATE_IDLE, - STATE_LOCKED, - STATE_LOCKING, STATE_NOT_HOME, STATE_OFF, STATE_OK, STATE_ON, STATE_OPEN, - STATE_OPENING, STATE_PAUSED, STATE_PLAYING, STATE_PROBLEM, - STATE_UNLOCKED, - STATE_UNLOCKING, Platform, ) from homeassistant.core import HomeAssistant, callback @@ -64,12 +55,12 @@ ON_OFF_STATES: dict[Platform | str, tuple[set[str], str, str]] = { Platform.ALARM_CONTROL_PANEL: ( { STATE_ON, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.TRIGGERED, }, STATE_ON, STATE_OFF, @@ -90,14 +81,14 @@ ON_OFF_STATES: dict[Platform | str, tuple[set[str], str, str]] = { Platform.DEVICE_TRACKER: ({STATE_HOME}, STATE_HOME, STATE_NOT_HOME), Platform.LOCK: ( { - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, + LockState.LOCKING, + LockState.OPEN, + LockState.OPENING, + LockState.UNLOCKED, + LockState.UNLOCKING, }, - STATE_UNLOCKED, - STATE_LOCKED, + LockState.UNLOCKED, + LockState.LOCKED, ), Platform.MEDIA_PLAYER: ( { @@ -114,9 +105,9 @@ ON_OFF_STATES: dict[Platform | str, tuple[set[str], str, str]] = { Platform.VACUUM: ( { STATE_ON, - STATE_CLEANING, - STATE_RETURNING, - STATE_ERROR, + VacuumActivity.CLEANING, + VacuumActivity.RETURNING, + VacuumActivity.ERROR, }, STATE_ON, STATE_OFF, @@ -160,8 +151,7 @@ def _process_group_platform( hass: HomeAssistant, domain: str, platform: GroupProtocol ) -> None: """Process a group platform.""" - registry: GroupIntegrationRegistry = hass.data[REG_KEY] - platform.async_describe_on_off_states(hass, registry) + platform.async_describe_on_off_states(hass, hass.data[REG_KEY]) @dataclass(frozen=True, slots=True) diff --git a/homeassistant/components/group/sensor.py b/homeassistant/components/group/sensor.py index a99ed9dad63..4a3e191e511 100644 --- a/homeassistant/components/group/sensor.py +++ b/homeassistant/components/group/sensor.py @@ -16,7 +16,7 @@ from homeassistant.components.sensor import ( CONF_STATE_CLASS, DEVICE_CLASS_UNITS, DEVICE_CLASSES_SCHEMA, - DOMAIN, + DOMAIN as SENSOR_DOMAIN, PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, STATE_CLASSES_SCHEMA, UNIT_CONVERTERS, @@ -36,14 +36,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import ( - CALLBACK_TYPE, - Event, - EventStateChangedData, - HomeAssistant, - State, - callback, -) +from homeassistant.core import HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity import ( @@ -52,7 +45,6 @@ from homeassistant.helpers.entity import ( get_unit_of_measurement, ) from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.issue_registry import ( IssueSeverity, async_create_issue, @@ -96,7 +88,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_ENTITIES): cv.entities_domain( - [DOMAIN, NUMBER_DOMAIN, INPUT_NUMBER_DOMAIN] + [SENSOR_DOMAIN, NUMBER_DOMAIN, INPUT_NUMBER_DOMAIN] ), vol.Required(CONF_TYPE): vol.All(cv.string, vol.In(SENSOR_TYPES.values())), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -180,6 +172,17 @@ def async_create_preview_sensor( ) +def _has_numeric_state(hass: HomeAssistant, entity_id: str) -> bool: + """Test if state is numeric.""" + if not (state := hass.states.get(entity_id)): + return False + try: + float(state.state) + except ValueError: + return False + return True + + def calc_min( sensor_values: list[tuple[str, float, State]], ) -> tuple[dict[str, str | None], float | None]: @@ -332,12 +335,11 @@ class SensorGroup(GroupEntity, SensorEntity): self.hass = hass self._entity_ids = entity_ids self._sensor_type = sensor_type - self._state_class = state_class - self._device_class = device_class - self._native_unit_of_measurement = unit_of_measurement + self._configured_state_class = state_class + self._configured_device_class = device_class + self._configured_unit_of_measurement = unit_of_measurement self._valid_units: set[str | None] = set() self._can_convert: bool = False - self.calculate_attributes_later: CALLBACK_TYPE | None = None self._attr_name = name if name == DEFAULT_NAME: self._attr_name = f"{DEFAULT_NAME} {sensor_type}".capitalize() @@ -352,39 +354,25 @@ class SensorGroup(GroupEntity, SensorEntity): self._state_incorrect: set[str] = set() self._extra_state_attribute: dict[str, Any] = {} - async def async_added_to_hass(self) -> None: - """When added to hass.""" - for entity_id in self._entity_ids: - if self.hass.states.get(entity_id) is None: - self.calculate_attributes_later = async_track_state_change_event( - self.hass, self._entity_ids, self.calculate_state_attributes - ) - break - if not self.calculate_attributes_later: - await self.calculate_state_attributes() - await super().async_added_to_hass() - - async def calculate_state_attributes( - self, event: Event[EventStateChangedData] | None = None - ) -> None: + def calculate_state_attributes(self, valid_state_entities: list[str]) -> None: """Calculate state attributes.""" - for entity_id in self._entity_ids: - if self.hass.states.get(entity_id) is None: - return - if self.calculate_attributes_later: - self.calculate_attributes_later() - self.calculate_attributes_later = None - self._attr_state_class = self._calculate_state_class(self._state_class) - self._attr_device_class = self._calculate_device_class(self._device_class) + self._attr_state_class = self._calculate_state_class( + self._configured_state_class, valid_state_entities + ) + self._attr_device_class = self._calculate_device_class( + self._configured_device_class, valid_state_entities + ) self._attr_native_unit_of_measurement = self._calculate_unit_of_measurement( - self._native_unit_of_measurement + self._configured_unit_of_measurement, valid_state_entities ) self._valid_units = self._get_valid_units() @callback def async_update_group_state(self) -> None: """Query all members and determine the sensor group state.""" + self.calculate_state_attributes(self._get_valid_entities()) states: list[StateType] = [] + valid_units = self._valid_units valid_states: list[bool] = [] sensor_values: list[tuple[str, float, State]] = [] for entity_id in self._entity_ids: @@ -392,20 +380,18 @@ class SensorGroup(GroupEntity, SensorEntity): states.append(state.state) try: numeric_state = float(state.state) - if ( - self._valid_units - and (uom := state.attributes["unit_of_measurement"]) - in self._valid_units - and self._can_convert is True - ): + uom = state.attributes.get("unit_of_measurement") + + # Convert the state to the native unit of measurement when we have valid units + # and a correct device class + if valid_units and uom in valid_units and self._can_convert is True: numeric_state = UNIT_CONVERTERS[self.device_class].convert( numeric_state, uom, self.native_unit_of_measurement ) - if ( - self._valid_units - and (uom := state.attributes["unit_of_measurement"]) - not in self._valid_units - ): + + # If we have valid units and the entity's unit does not match + # we raise which skips the state and log a warning once + if valid_units and uom not in valid_units: raise HomeAssistantError("Not a valid unit") # noqa: TRY301 sensor_values.append((entity_id, numeric_state, state)) @@ -480,7 +466,9 @@ class SensorGroup(GroupEntity, SensorEntity): return None def _calculate_state_class( - self, state_class: SensorStateClass | None + self, + state_class: SensorStateClass | None, + valid_state_entities: list[str], ) -> SensorStateClass | None: """Calculate state class. @@ -491,8 +479,18 @@ class SensorGroup(GroupEntity, SensorEntity): """ if state_class: return state_class + + if not valid_state_entities: + return None + + if not self._ignore_non_numeric and len(valid_state_entities) < len( + self._entity_ids + ): + # Only return state class if all states are valid when not ignoring non numeric + return None + state_classes: list[SensorStateClass] = [] - for entity_id in self._entity_ids: + for entity_id in valid_state_entities: try: _state_class = get_capability(self.hass, entity_id, "state_class") except HomeAssistantError: @@ -503,7 +501,7 @@ class SensorGroup(GroupEntity, SensorEntity): if all(x == state_classes[0] for x in state_classes): async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_state_classes_not_matching" + self.hass, SENSOR_DOMAIN, f"{self.entity_id}_state_classes_not_matching" ) return state_classes[0] async_create_issue( @@ -523,7 +521,9 @@ class SensorGroup(GroupEntity, SensorEntity): return None def _calculate_device_class( - self, device_class: SensorDeviceClass | None + self, + device_class: SensorDeviceClass | None, + valid_state_entities: list[str], ) -> SensorDeviceClass | None: """Calculate device class. @@ -534,8 +534,18 @@ class SensorGroup(GroupEntity, SensorEntity): """ if device_class: return device_class + + if not valid_state_entities: + return None + + if not self._ignore_non_numeric and len(valid_state_entities) < len( + self._entity_ids + ): + # Only return device class if all states are valid when not ignoring non numeric + return None + device_classes: list[SensorDeviceClass] = [] - for entity_id in self._entity_ids: + for entity_id in valid_state_entities: try: _device_class = get_device_class(self.hass, entity_id) except HomeAssistantError: @@ -546,7 +556,9 @@ class SensorGroup(GroupEntity, SensorEntity): if all(x == device_classes[0] for x in device_classes): async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_device_classes_not_matching" + self.hass, + SENSOR_DOMAIN, + f"{self.entity_id}_device_classes_not_matching", ) return device_classes[0] async_create_issue( @@ -566,7 +578,9 @@ class SensorGroup(GroupEntity, SensorEntity): return None def _calculate_unit_of_measurement( - self, unit_of_measurement: str | None + self, + unit_of_measurement: str | None, + valid_state_entities: list[str], ) -> str | None: """Calculate the unit of measurement. @@ -577,8 +591,17 @@ class SensorGroup(GroupEntity, SensorEntity): if unit_of_measurement: return unit_of_measurement + if not valid_state_entities: + return None + + if not self._ignore_non_numeric and len(valid_state_entities) < len( + self._entity_ids + ): + # Only return device class if all states are valid when not ignoring non numeric + return None + unit_of_measurements: list[str] = [] - for entity_id in self._entity_ids: + for entity_id in valid_state_entities: try: _unit_of_measurement = get_unit_of_measurement(self.hass, entity_id) except HomeAssistantError: @@ -614,10 +637,14 @@ class SensorGroup(GroupEntity, SensorEntity): ) ): async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_uoms_not_matching_device_class" + self.hass, + SENSOR_DOMAIN, + f"{self.entity_id}_uoms_not_matching_device_class", ) async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_uoms_not_matching_no_device_class" + self.hass, + SENSOR_DOMAIN, + f"{self.entity_id}_uoms_not_matching_no_device_class", ) return unit_of_measurements[0] @@ -659,19 +686,31 @@ class SensorGroup(GroupEntity, SensorEntity): If device class is set and compatible unit of measurements. If device class is not set, use one unit of measurement. + Only calculate valid units if there are no valid units set. """ - if ( - device_class := self.device_class - ) in UNIT_CONVERTERS and self.native_unit_of_measurement: + if (valid_units := self._valid_units) and not self._ignore_non_numeric: + # If we have valid units already and not using ignore_non_numeric + # we should not recalculate. + return valid_units + + native_uom = self.native_unit_of_measurement + if (device_class := self.device_class) in UNIT_CONVERTERS and native_uom: self._can_convert = True return UNIT_CONVERTERS[device_class].VALID_UNITS - if ( - device_class - and (device_class) in DEVICE_CLASS_UNITS - and self.native_unit_of_measurement - ): + if device_class and (device_class) in DEVICE_CLASS_UNITS and native_uom: valid_uoms: set = DEVICE_CLASS_UNITS[device_class] return valid_uoms - if device_class is None and self.native_unit_of_measurement: - return {self.native_unit_of_measurement} + if device_class is None and native_uom: + return {native_uom} return set() + + def _get_valid_entities( + self, + ) -> list[str]: + """Return list of valid entities.""" + + return [ + entity_id + for entity_id in self._entity_ids + if _has_numeric_state(self.hass, entity_id) + ] diff --git a/homeassistant/components/group/strings.json b/homeassistant/components/group/strings.json index dbb6fb01f7b..cf694af0d98 100644 --- a/homeassistant/components/group/strings.json +++ b/homeassistant/components/group/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Group", + "title": "Create Group", "description": "Groups allow you to create a new entity that represents multiple entities of the same type.", "menu_options": { "binary_sensor": "Binary sensor group", @@ -283,20 +283,20 @@ }, "issues": { "uoms_not_matching_device_class": { - "title": "Unit of measurements are not correct", - "description": "Unit of measurements `{uoms}` of input sensors `{source_entities}` are not compatible and can't be converted with the device class `{device_class}` of sensor group `{entity_id}`.\n\nPlease correct the unit of measurements on the source entities and reload the group sensor to fix this issue." + "title": "Units of measurement are not correct", + "description": "Units of measurement `{uoms}` of input sensors `{source_entities}` are not compatible and can't be converted with the device class `{device_class}` of sensor group `{entity_id}`.\n\nPlease correct the unit of measurement on the source entities and reload the group sensor to fix this issue." }, "uoms_not_matching_no_device_class": { - "title": "Unit of measurements is not correct", - "description": "Unit of measurements `{uoms}` of input sensors `{source_entities}` are not compatible when not using a device class on sensor group `{entity_id}`.\n\nPlease correct the unit of measurements on the source entities or set a proper device class on the sensor group and reload the group sensor to fix this issue." + "title": "Units of measurement are not correct", + "description": "Units of measurement `{uoms}` of input sensors `{source_entities}` are not compatible when not using a device class on sensor group `{entity_id}`.\n\nPlease correct the unit of measurement on the source entities or set a proper device class on the sensor group and reload the group sensor to fix this issue." }, "device_classes_not_matching": { - "title": "Device classes is not correct", - "description": "Device classes `{device_classes}` on source entities `{source_entities}` needs to be same for sensor group `{entity_id}`.\n\nPlease correct the device classes on the source entities and reload the group sensor to fix this issue." + "title": "Device classes are not correct", + "description": "Device classes `{device_classes}` on source entities `{source_entities}` need to be identical for sensor group `{entity_id}`.\n\nPlease correct the device classes on the source entities and reload the group sensor to fix this issue." }, "state_classes_not_matching": { - "title": "State classes is not correct", - "description": "State classes `{state_classes}` on source entities `{source_entities}` needs to be same for sensor group `{entity_id}`.\n\nPlease correct the state classes on the source entities and reload the group sensor to fix this issue." + "title": "State classes are not correct", + "description": "State classes `{state_classes}` on source entities `{source_entities}` need to be identical for sensor group `{entity_id}`.\n\nPlease correct the state classes on the source entities and reload the group sensor to fix this issue." } } } diff --git a/homeassistant/components/group/switch.py b/homeassistant/components/group/switch.py index 9db264c8041..101c42d354f 100644 --- a/homeassistant/components/group/switch.py +++ b/homeassistant/components/group/switch.py @@ -8,7 +8,7 @@ from typing import Any import voluptuous as vol from homeassistant.components.switch import ( - DOMAIN, + DOMAIN as SWITCH_DOMAIN, PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, SwitchEntity, ) @@ -39,7 +39,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(SWITCH_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Optional(CONF_ALL, default=False): cv.boolean, @@ -132,7 +132,7 @@ class SwitchGroup(GroupEntity, SwitchEntity): _LOGGER.debug("Forwarded turn_on command: %s", data) await self.hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, data, blocking=True, @@ -143,7 +143,7 @@ class SwitchGroup(GroupEntity, SwitchEntity): """Forward the turn_off command to all switches in the group.""" data = {ATTR_ENTITY_ID: self._entity_ids} await self.hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, data, blocking=True, diff --git a/homeassistant/components/growatt_server/config_flow.py b/homeassistant/components/growatt_server/config_flow.py index 8123d7ff067..e676d8fae32 100644 --- a/homeassistant/components/growatt_server/config_flow.py +++ b/homeassistant/components/growatt_server/config_flow.py @@ -23,9 +23,10 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + api: growattServer.GrowattApi + def __init__(self) -> None: """Initialise growatt server flow.""" - self.api: growattServer.GrowattApi | None = None self.user_id = None self.data: dict[str, Any] = {} @@ -70,7 +71,9 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): self.data = user_input return await self.async_step_plant() - async def async_step_plant(self, user_input=None): + async def async_step_plant( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle adding a "plant" to Home Assistant.""" plant_info = await self.hass.async_add_executor_job( self.api.plant_list, self.user_id @@ -86,7 +89,8 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="plant", data_schema=data_schema) - if user_input is None and len(plant_info["data"]) == 1: + if user_input is None: + # single plant => mark it as selected user_input = {CONF_PLANT_ID: plant_info["data"][0]["plantId"]} user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]] diff --git a/homeassistant/components/growatt_server/const.py b/homeassistant/components/growatt_server/const.py index fe8622bea7f..4ad62aa812b 100644 --- a/homeassistant/components/growatt_server/const.py +++ b/homeassistant/components/growatt_server/const.py @@ -12,6 +12,7 @@ SERVER_URLS = [ "https://openapi.growatt.com/", # Other regional server "https://openapi-cn.growatt.com/", # Chinese server "https://openapi-us.growatt.com/", # North American server + "https://openapi-au.growatt.com/", # Australia Server "http://server.smten.com/", # smten server ] diff --git a/homeassistant/components/growatt_server/sensor.py b/homeassistant/components/growatt_server/sensor/__init__.py similarity index 97% rename from homeassistant/components/growatt_server/sensor.py rename to homeassistant/components/growatt_server/sensor/__init__.py index 9c680b5d4f8..e77660e6a3a 100644 --- a/homeassistant/components/growatt_server/sensor.py +++ b/homeassistant/components/growatt_server/sensor/__init__.py @@ -17,7 +17,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import Throttle, dt as dt_util -from .const import ( +from ..const import ( CONF_PLANT_ID, DEFAULT_PLANT_ID, DEFAULT_URL, @@ -25,12 +25,12 @@ from .const import ( DOMAIN, LOGIN_INVALID_AUTH_CODE, ) -from .sensor_types.inverter import INVERTER_SENSOR_TYPES -from .sensor_types.mix import MIX_SENSOR_TYPES -from .sensor_types.sensor_entity_description import GrowattSensorEntityDescription -from .sensor_types.storage import STORAGE_SENSOR_TYPES -from .sensor_types.tlx import TLX_SENSOR_TYPES -from .sensor_types.total import TOTAL_SENSOR_TYPES +from .inverter import INVERTER_SENSOR_TYPES +from .mix import MIX_SENSOR_TYPES +from .sensor_entity_description import GrowattSensorEntityDescription +from .storage import STORAGE_SENSOR_TYPES +from .tlx import TLX_SENSOR_TYPES +from .total import TOTAL_SENSOR_TYPES _LOGGER = logging.getLogger(__name__) @@ -72,7 +72,7 @@ async def async_setup_entry( # If the URL has been deprecated then change to the default instead if url in DEPRECATED_URLS: - _LOGGER.info( + _LOGGER.warning( "URL: %s has been deprecated, migrating to the latest default: %s", url, DEFAULT_URL, diff --git a/homeassistant/components/growatt_server/sensor_types/inverter.py b/homeassistant/components/growatt_server/sensor/inverter.py similarity index 100% rename from homeassistant/components/growatt_server/sensor_types/inverter.py rename to homeassistant/components/growatt_server/sensor/inverter.py diff --git a/homeassistant/components/growatt_server/sensor_types/mix.py b/homeassistant/components/growatt_server/sensor/mix.py similarity index 100% rename from homeassistant/components/growatt_server/sensor_types/mix.py rename to homeassistant/components/growatt_server/sensor/mix.py diff --git a/homeassistant/components/growatt_server/sensor_types/sensor_entity_description.py b/homeassistant/components/growatt_server/sensor/sensor_entity_description.py similarity index 100% rename from homeassistant/components/growatt_server/sensor_types/sensor_entity_description.py rename to homeassistant/components/growatt_server/sensor/sensor_entity_description.py diff --git a/homeassistant/components/growatt_server/sensor_types/storage.py b/homeassistant/components/growatt_server/sensor/storage.py similarity index 100% rename from homeassistant/components/growatt_server/sensor_types/storage.py rename to homeassistant/components/growatt_server/sensor/storage.py diff --git a/homeassistant/components/growatt_server/sensor_types/tlx.py b/homeassistant/components/growatt_server/sensor/tlx.py similarity index 100% rename from homeassistant/components/growatt_server/sensor_types/tlx.py rename to homeassistant/components/growatt_server/sensor/tlx.py diff --git a/homeassistant/components/growatt_server/sensor_types/total.py b/homeassistant/components/growatt_server/sensor/total.py similarity index 100% rename from homeassistant/components/growatt_server/sensor_types/total.py rename to homeassistant/components/growatt_server/sensor/total.py diff --git a/homeassistant/components/growatt_server/sensor_types/__init__.py b/homeassistant/components/growatt_server/sensor_types/__init__.py deleted file mode 100644 index 3f5be3be7f5..00000000000 --- a/homeassistant/components/growatt_server/sensor_types/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Sensor types for supported Growatt systems.""" diff --git a/homeassistant/components/gstreamer/manifest.json b/homeassistant/components/gstreamer/manifest.json index 95df94ef834..3ea9010a9d7 100644 --- a/homeassistant/components/gstreamer/manifest.json +++ b/homeassistant/components/gstreamer/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gstreamer", "iot_class": "local_push", "loggers": ["gsp"], + "quality_scale": "legacy", "requirements": ["gstreamer-player==1.1.2"] } diff --git a/homeassistant/components/gtfs/manifest.json b/homeassistant/components/gtfs/manifest.json index 73a5998ea92..3bf41a1c763 100644 --- a/homeassistant/components/gtfs/manifest.json +++ b/homeassistant/components/gtfs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gtfs", "iot_class": "local_polling", "loggers": ["pygtfs"], + "quality_scale": "legacy", "requirements": ["pygtfs==0.1.9"] } diff --git a/homeassistant/components/guardian/__init__.py b/homeassistant/components/guardian/__init__.py index 812c54d76a6..c1cbb4c0e5a 100644 --- a/homeassistant/components/guardian/__init__.py +++ b/homeassistant/components/guardian/__init__.py @@ -24,10 +24,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( API_SENSOR_PAIR_DUMP, @@ -357,70 +354,3 @@ class PairedSensorManager: config_entry_id=self._entry.entry_id, identifiers={(DOMAIN, uid)} ) dev_reg.async_remove_device(device.id) - - -class GuardianEntity(CoordinatorEntity[GuardianDataUpdateCoordinator]): - """Define a base Guardian entity.""" - - _attr_has_entity_name = True - - def __init__( - self, coordinator: GuardianDataUpdateCoordinator, description: EntityDescription - ) -> None: - """Initialize.""" - super().__init__(coordinator) - - self.entity_description = description - - -class PairedSensorEntity(GuardianEntity): - """Define a Guardian paired sensor entity.""" - - def __init__( - self, - entry: ConfigEntry, - coordinator: GuardianDataUpdateCoordinator, - description: EntityDescription, - ) -> None: - """Initialize.""" - super().__init__(coordinator, description) - - paired_sensor_uid = coordinator.data["uid"] - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, paired_sensor_uid)}, - manufacturer="Elexa", - model=coordinator.data["codename"], - name=f"Guardian paired sensor {paired_sensor_uid}", - via_device=(DOMAIN, entry.data[CONF_UID]), - ) - self._attr_unique_id = f"{paired_sensor_uid}_{description.key}" - - -@dataclass(frozen=True, kw_only=True) -class ValveControllerEntityDescription(EntityDescription): - """Describe a Guardian valve controller entity.""" - - api_category: str - - -class ValveControllerEntity(GuardianEntity): - """Define a Guardian valve controller entity.""" - - def __init__( - self, - entry: ConfigEntry, - coordinators: dict[str, GuardianDataUpdateCoordinator], - description: ValveControllerEntityDescription, - ) -> None: - """Initialize.""" - super().__init__(coordinators[description.api_category], description) - - self._diagnostics_coordinator = coordinators[API_SYSTEM_DIAGNOSTICS] - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, entry.data[CONF_UID])}, - manufacturer="Elexa", - model=self._diagnostics_coordinator.data["firmware"], - name=f"Guardian valve controller {entry.data[CONF_UID]}", - ) - self._attr_unique_id = f"{entry.data[CONF_UID]}_{description.key}" diff --git a/homeassistant/components/guardian/binary_sensor.py b/homeassistant/components/guardian/binary_sensor.py index c3621ea2d79..84bb61da0e5 100644 --- a/homeassistant/components/guardian/binary_sensor.py +++ b/homeassistant/components/guardian/binary_sensor.py @@ -18,12 +18,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ( - GuardianData, - PairedSensorEntity, - ValveControllerEntity, - ValveControllerEntityDescription, -) +from . import GuardianData from .const import ( API_SYSTEM_ONBOARD_SENSOR_STATUS, CONF_UID, @@ -31,6 +26,11 @@ from .const import ( SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED, ) from .coordinator import GuardianDataUpdateCoordinator +from .entity import ( + PairedSensorEntity, + ValveControllerEntity, + ValveControllerEntityDescription, +) from .util import ( EntityDomainReplacementStrategy, async_finish_entity_domain_replacements, diff --git a/homeassistant/components/guardian/button.py b/homeassistant/components/guardian/button.py index 8313ad23007..f4881a9d94b 100644 --- a/homeassistant/components/guardian/button.py +++ b/homeassistant/components/guardian/button.py @@ -18,8 +18,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import GuardianData, ValveControllerEntity, ValveControllerEntityDescription +from . import GuardianData from .const import API_SYSTEM_DIAGNOSTICS, DOMAIN +from .entity import ValveControllerEntity, ValveControllerEntityDescription from .util import convert_exceptions_to_homeassistant_error diff --git a/homeassistant/components/guardian/config_flow.py b/homeassistant/components/guardian/config_flow.py index e73e6c586ce..c4146d72469 100644 --- a/homeassistant/components/guardian/config_flow.py +++ b/homeassistant/components/guardian/config_flow.py @@ -111,7 +111,7 @@ class GuardianConfigFlow(ConfigFlow, domain=DOMAIN): await self._async_set_unique_id( async_get_pin_from_uid(discovery_info.macaddress.replace(":", "").upper()) ) - return await self._async_handle_discovery() + return await self.async_step_discovery_confirm() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -123,17 +123,6 @@ class GuardianConfigFlow(ConfigFlow, domain=DOMAIN): } pin = async_get_pin_from_discovery_hostname(discovery_info.hostname) await self._async_set_unique_id(pin) - return await self._async_handle_discovery() - - async def _async_handle_discovery(self) -> ConfigFlowResult: - """Handle any discovery.""" - self.context[CONF_IP_ADDRESS] = self.discovery_info[CONF_IP_ADDRESS] - if any( - self.context[CONF_IP_ADDRESS] == flow["context"][CONF_IP_ADDRESS] - for flow in self._async_in_progress() - ): - return self.async_abort(reason="already_in_progress") - return await self.async_step_discovery_confirm() async def async_step_discovery_confirm( diff --git a/homeassistant/components/guardian/entity.py b/homeassistant/components/guardian/entity.py new file mode 100644 index 00000000000..fca0afeda0e --- /dev/null +++ b/homeassistant/components/guardian/entity.py @@ -0,0 +1,80 @@ +"""The Elexa Guardian integration.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from homeassistant.config_entries import ConfigEntry +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import API_SYSTEM_DIAGNOSTICS, CONF_UID, DOMAIN +from .coordinator import GuardianDataUpdateCoordinator + + +class GuardianEntity(CoordinatorEntity[GuardianDataUpdateCoordinator]): + """Define a base Guardian entity.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: GuardianDataUpdateCoordinator, description: EntityDescription + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self.entity_description = description + + +class PairedSensorEntity(GuardianEntity): + """Define a Guardian paired sensor entity.""" + + def __init__( + self, + entry: ConfigEntry, + coordinator: GuardianDataUpdateCoordinator, + description: EntityDescription, + ) -> None: + """Initialize.""" + super().__init__(coordinator, description) + + paired_sensor_uid = coordinator.data["uid"] + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, paired_sensor_uid)}, + manufacturer="Elexa", + model=coordinator.data["codename"], + name=f"Guardian paired sensor {paired_sensor_uid}", + via_device=(DOMAIN, entry.data[CONF_UID]), + ) + self._attr_unique_id = f"{paired_sensor_uid}_{description.key}" + + +@dataclass(frozen=True, kw_only=True) +class ValveControllerEntityDescription(EntityDescription): + """Describe a Guardian valve controller entity.""" + + api_category: str + + +class ValveControllerEntity(GuardianEntity): + """Define a Guardian valve controller entity.""" + + def __init__( + self, + entry: ConfigEntry, + coordinators: dict[str, GuardianDataUpdateCoordinator], + description: ValveControllerEntityDescription, + ) -> None: + """Initialize.""" + super().__init__(coordinators[description.api_category], description) + + self._diagnostics_coordinator = coordinators[API_SYSTEM_DIAGNOSTICS] + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, entry.data[CONF_UID])}, + manufacturer="Elexa", + model=self._diagnostics_coordinator.data["firmware"], + name=f"Guardian valve controller {entry.data[CONF_UID]}", + ) + self._attr_unique_id = f"{entry.data[CONF_UID]}_{description.key}" diff --git a/homeassistant/components/guardian/icons.json b/homeassistant/components/guardian/icons.json index 4740366e993..fe44eb0460b 100644 --- a/homeassistant/components/guardian/icons.json +++ b/homeassistant/components/guardian/icons.json @@ -18,8 +18,14 @@ } }, "services": { - "pair_sensor": "mdi:link-variant", - "unpair_sensor": "mdi:link-variant-remove", - "upgrade_firmware": "mdi:update" + "pair_sensor": { + "service": "mdi:link-variant" + }, + "unpair_sensor": { + "service": "mdi:link-variant-remove" + }, + "upgrade_firmware": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/guardian/sensor.py b/homeassistant/components/guardian/sensor.py index 448a7231df1..3f9547e652a 100644 --- a/homeassistant/components/guardian/sensor.py +++ b/homeassistant/components/guardian/sensor.py @@ -25,12 +25,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import ( - GuardianData, - PairedSensorEntity, - ValveControllerEntity, - ValveControllerEntityDescription, -) +from . import GuardianData from .const import ( API_SYSTEM_DIAGNOSTICS, API_SYSTEM_ONBOARD_SENSOR_STATUS, @@ -39,6 +34,11 @@ from .const import ( DOMAIN, SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED, ) +from .entity import ( + PairedSensorEntity, + ValveControllerEntity, + ValveControllerEntityDescription, +) SENSOR_KIND_AVG_CURRENT = "average_current" SENSOR_KIND_BATTERY = "battery" diff --git a/homeassistant/components/guardian/strings.json b/homeassistant/components/guardian/strings.json index e8622fe9d03..b1b72b71002 100644 --- a/homeassistant/components/guardian/strings.json +++ b/homeassistant/components/guardian/strings.json @@ -12,6 +12,9 @@ "description": "Do you want to set up this Guardian device?" } }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", diff --git a/homeassistant/components/guardian/switch.py b/homeassistant/components/guardian/switch.py index 25bc8115208..fccf4f55a1f 100644 --- a/homeassistant/components/guardian/switch.py +++ b/homeassistant/components/guardian/switch.py @@ -14,8 +14,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import GuardianData, ValveControllerEntity, ValveControllerEntityDescription +from . import GuardianData from .const import API_VALVE_STATUS, API_WIFI_STATUS, DOMAIN +from .entity import ValveControllerEntity, ValveControllerEntityDescription from .util import convert_exceptions_to_homeassistant_error from .valve import GuardianValveState diff --git a/homeassistant/components/guardian/util.py b/homeassistant/components/guardian/util.py index 4b9a2835474..69e79f6627e 100644 --- a/homeassistant/components/guardian/util.py +++ b/homeassistant/components/guardian/util.py @@ -18,7 +18,7 @@ from homeassistant.helpers import entity_registry as er from .const import LOGGER if TYPE_CHECKING: - from . import GuardianEntity + from .entity import GuardianEntity DEFAULT_UPDATE_INTERVAL = timedelta(seconds=30) @@ -55,7 +55,7 @@ def async_finish_entity_domain_replacements( continue old_entity_id = registry_entry.entity_id - LOGGER.info('Removing old entity: "%s"', old_entity_id) + LOGGER.debug('Removing old entity: "%s"', old_entity_id) ent_reg.async_remove(old_entity_id) diff --git a/homeassistant/components/guardian/valve.py b/homeassistant/components/guardian/valve.py index fcedc71f188..8c9749958bf 100644 --- a/homeassistant/components/guardian/valve.py +++ b/homeassistant/components/guardian/valve.py @@ -19,8 +19,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import GuardianData, ValveControllerEntity, ValveControllerEntityDescription +from . import GuardianData from .const import API_VALVE_STATUS, DOMAIN +from .entity import ValveControllerEntity, ValveControllerEntityDescription from .util import convert_exceptions_to_homeassistant_error VALVE_KIND_VALVE = "valve" diff --git a/homeassistant/components/habitica/__init__.py b/homeassistant/components/habitica/__init__.py index 468db8fbc42..5843e14d63e 100644 --- a/homeassistant/components/habitica/__init__.py +++ b/homeassistant/components/habitica/__init__.py @@ -1,112 +1,48 @@ """The habitica integration.""" from http import HTTPStatus -import logging from aiohttp import ClientResponseError from habitipy.aio import HabitipyAsync -import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_NAME, + APPLICATION_NAME, CONF_API_KEY, CONF_NAME, - CONF_SENSORS, CONF_URL, + CONF_VERIFY_SSL, Platform, + __version__, ) -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType -from .const import ( - ATTR_ARGS, - ATTR_DATA, - ATTR_PATH, - CONF_API_USER, - DEFAULT_URL, - DOMAIN, - EVENT_API_CALL_SUCCESS, - SERVICE_API_CALL, -) +from .const import CONF_API_USER, DEVELOPER_ID, DOMAIN from .coordinator import HabiticaDataUpdateCoordinator +from .services import async_setup_services +from .types import HabiticaConfigEntry -_LOGGER = logging.getLogger(__name__) - -type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator] - -SENSORS_TYPES = ["name", "hp", "maxHealth", "mp", "maxMP", "exp", "toNextLevel", "lvl"] - -INSTANCE_SCHEMA = vol.All( - cv.deprecated(CONF_SENSORS), - vol.Schema( - { - vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url, - vol.Optional(CONF_NAME): cv.string, - vol.Required(CONF_API_USER): cv.string, - vol.Required(CONF_API_KEY): cv.string, - vol.Optional(CONF_SENSORS, default=list(SENSORS_TYPES)): vol.All( - cv.ensure_list, vol.Unique(), [vol.In(list(SENSORS_TYPES))] - ), - } - ), -) - -has_unique_values = vol.Schema(vol.Unique()) -# because we want a handy alias +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -def has_all_unique_users(value): - """Validate that all API users are unique.""" - api_users = [user[CONF_API_USER] for user in value] - has_unique_values(api_users) - return value - - -def has_all_unique_users_names(value): - """Validate that all user's names are unique and set if any is set.""" - names = [user.get(CONF_NAME) for user in value] - if None in names and any(name is not None for name in names): - raise vol.Invalid("user names of all users must be set if any is set") - if not all(name is None for name in names): - has_unique_values(names) - return value - - -INSTANCE_LIST_SCHEMA = vol.All( - cv.ensure_list, has_all_unique_users, has_all_unique_users_names, [INSTANCE_SCHEMA] -) -CONFIG_SCHEMA = vol.Schema({DOMAIN: INSTANCE_LIST_SCHEMA}, extra=vol.ALLOW_EXTRA) - -PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH, Platform.TODO] - -SERVICE_API_CALL_SCHEMA = vol.Schema( - { - vol.Required(ATTR_NAME): str, - vol.Required(ATTR_PATH): vol.All(cv.ensure_list, [str]), - vol.Optional(ATTR_ARGS): dict, - } -) +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CALENDAR, + Platform.SENSOR, + Platform.SWITCH, + Platform.TODO, +] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Habitica service.""" - configs = config.get(DOMAIN, []) - - for conf in configs: - if conf.get(CONF_URL) is None: - conf[CONF_URL] = DEFAULT_URL - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=conf - ) - ) + async_setup_services(hass) return True @@ -121,44 +57,23 @@ async def async_setup_entry( def __call__(self, **kwargs): return super().__call__(websession, **kwargs) - async def handle_api_call(call: ServiceCall) -> None: - name = call.data[ATTR_NAME] - path = call.data[ATTR_PATH] - entries = hass.config_entries.async_entries(DOMAIN) - api = None - for entry in entries: - if entry.data[CONF_NAME] == name: - api = entry.runtime_data.api - break - if api is None: - _LOGGER.error("API_CALL: User '%s' not configured", name) - return - try: - for element in path: - api = api[element] - except KeyError: - _LOGGER.error( - "API_CALL: Path %s is invalid for API on '{%s}' element", path, element + def _make_headers(self) -> dict[str, str]: + headers = super()._make_headers() + headers.update( + {"x-client": f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"} ) - return - kwargs = call.data.get(ATTR_ARGS, {}) - data = await api(**kwargs) - hass.bus.async_fire( - EVENT_API_CALL_SUCCESS, {ATTR_NAME: name, ATTR_PATH: path, ATTR_DATA: data} - ) + return headers - websession = async_get_clientsession(hass) - - url = config_entry.data[CONF_URL] - username = config_entry.data[CONF_API_USER] - password = config_entry.data[CONF_API_KEY] + websession = async_get_clientsession( + hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True) + ) api = await hass.async_add_executor_job( HAHabitipyAsync, { - "url": url, - "login": username, - "password": password, + "url": config_entry.data[CONF_URL], + "login": config_entry.data[CONF_API_USER], + "password": config_entry.data[CONF_API_KEY], }, ) try: @@ -184,16 +99,9 @@ async def async_setup_entry( config_entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - if not hass.services.has_service(DOMAIN, SERVICE_API_CALL): - hass.services.async_register( - DOMAIN, SERVICE_API_CALL, handle_api_call, schema=SERVICE_API_CALL_SCHEMA - ) - return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - if len(hass.config_entries.async_entries(DOMAIN)) == 1: - hass.services.async_remove(DOMAIN, SERVICE_API_CALL) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/habitica/binary_sensor.py b/homeassistant/components/habitica/binary_sensor.py new file mode 100644 index 00000000000..bc79370ea63 --- /dev/null +++ b/homeassistant/components/habitica/binary_sensor.py @@ -0,0 +1,85 @@ +"""Binary sensor platform for Habitica integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import StrEnum +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ASSETS_URL +from .entity import HabiticaBase +from .types import HabiticaConfigEntry + + +@dataclass(kw_only=True, frozen=True) +class HabiticaBinarySensorEntityDescription(BinarySensorEntityDescription): + """Habitica Binary Sensor Description.""" + + value_fn: Callable[[dict[str, Any]], bool | None] + entity_picture: Callable[[dict[str, Any]], str | None] + + +class HabiticaBinarySensor(StrEnum): + """Habitica Entities.""" + + PENDING_QUEST = "pending_quest" + + +def get_scroll_image_for_pending_quest_invitation(user: dict[str, Any]) -> str | None: + """Entity picture for pending quest invitation.""" + if user["party"]["quest"].get("key") and user["party"]["quest"]["RSVPNeeded"]: + return f"inventory_quest_scroll_{user["party"]["quest"]["key"]}.png" + return None + + +BINARY_SENSOR_DESCRIPTIONS: tuple[HabiticaBinarySensorEntityDescription, ...] = ( + HabiticaBinarySensorEntityDescription( + key=HabiticaBinarySensor.PENDING_QUEST, + translation_key=HabiticaBinarySensor.PENDING_QUEST, + value_fn=lambda user: user["party"]["quest"]["RSVPNeeded"], + entity_picture=get_scroll_image_for_pending_quest_invitation, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HabiticaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the habitica binary sensors.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + HabiticaBinarySensorEntity(coordinator, description) + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity): + """Representation of a Habitica binary sensor.""" + + entity_description: HabiticaBinarySensorEntityDescription + + @property + def is_on(self) -> bool | None: + """If the binary sensor is on.""" + return self.entity_description.value_fn(self.coordinator.data.user) + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture( + self.coordinator.data.user + ): + return f"{ASSETS_URL}{entity_picture}" + return None diff --git a/homeassistant/components/habitica/button.py b/homeassistant/components/habitica/button.py index 276aa4e7fc0..2b9a4199133 100644 --- a/homeassistant/components/habitica/button.py +++ b/homeassistant/components/habitica/button.py @@ -10,15 +10,22 @@ from typing import Any from aiohttp import ClientResponseError -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HabiticaConfigEntry -from .const import DOMAIN +from .const import ASSETS_URL, DOMAIN, HEALER, MAGE, ROGUE, WARRIOR from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator from .entity import HabiticaBase +from .types import HabiticaConfigEntry + +PARALLEL_UPDATES = 1 @dataclass(kw_only=True, frozen=True) @@ -26,7 +33,9 @@ class HabiticaButtonEntityDescription(ButtonEntityDescription): """Describes Habitica button entity.""" press_fn: Callable[[HabiticaDataUpdateCoordinator], Any] - available_fn: Callable[[HabiticaData], bool] | None = None + available_fn: Callable[[HabiticaData], bool] + class_needed: str | None = None + entity_picture: str | None = None class HabitipyButtonEntity(StrEnum): @@ -36,6 +45,18 @@ class HabitipyButtonEntity(StrEnum): BUY_HEALTH_POTION = "buy_health_potion" ALLOCATE_ALL_STAT_POINTS = "allocate_all_stat_points" REVIVE = "revive" + MPHEAL = "mpheal" + EARTH = "earth" + FROST = "frost" + DEFENSIVE_STANCE = "defensive_stance" + VALOROUS_PRESENCE = "valorous_presence" + INTIMIDATE = "intimidate" + TOOLS_OF_TRADE = "tools_of_trade" + STEALTH = "stealth" + HEAL = "heal" + PROTECT_AURA = "protect_aura" + BRIGHTNESS = "brightness" + HEAL_ALL = "heal_all" BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = ( @@ -55,6 +76,7 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = ( lambda data: data.user["stats"]["gp"] >= 25 and data.user["stats"]["hp"] < 50 ), + entity_picture="shop_potion.png", ), HabiticaButtonEntityDescription( key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS, @@ -74,6 +96,175 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = ( ) +CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = ( + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.MPHEAL, + translation_key=HabitipyButtonEntity.MPHEAL, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["mpheal"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 12 + and data.user["stats"]["mp"] >= 30 + ), + class_needed=MAGE, + entity_picture="shop_mpheal.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.EARTH, + translation_key=HabitipyButtonEntity.EARTH, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["earth"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 35 + ), + class_needed=MAGE, + entity_picture="shop_earth.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.FROST, + translation_key=HabitipyButtonEntity.FROST, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["frost"].post(), + # chilling frost can only be cast once per day (streaks buff is false) + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 40 + and not data.user["stats"]["buffs"]["streaks"] + ), + class_needed=MAGE, + entity_picture="shop_frost.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.DEFENSIVE_STANCE, + translation_key=HabitipyButtonEntity.DEFENSIVE_STANCE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast[ + "defensiveStance" + ].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 12 + and data.user["stats"]["mp"] >= 25 + ), + class_needed=WARRIOR, + entity_picture="shop_defensiveStance.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.VALOROUS_PRESENCE, + translation_key=HabitipyButtonEntity.VALOROUS_PRESENCE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast[ + "valorousPresence" + ].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 20 + ), + class_needed=WARRIOR, + entity_picture="shop_valorousPresence.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.INTIMIDATE, + translation_key=HabitipyButtonEntity.INTIMIDATE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["intimidate"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 15 + ), + class_needed=WARRIOR, + entity_picture="shop_intimidate.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.TOOLS_OF_TRADE, + translation_key=HabitipyButtonEntity.TOOLS_OF_TRADE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["toolsOfTrade"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 25 + ), + class_needed=ROGUE, + entity_picture="shop_toolsOfTrade.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.STEALTH, + translation_key=HabitipyButtonEntity.STEALTH, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["stealth"].post() + ), + # Stealth buffs stack and it can only be cast if the amount of + # unfinished dailies is smaller than the amount of buffs + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 45 + and data.user["stats"]["buffs"]["stealth"] + < len( + [ + r + for r in data.tasks + if r.get("type") == "daily" + and r.get("isDue") is True + and r.get("completed") is False + ] + ) + ), + class_needed=ROGUE, + entity_picture="shop_stealth.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.HEAL, + translation_key=HabitipyButtonEntity.HEAL, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["heal"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 11 + and data.user["stats"]["mp"] >= 15 + and data.user["stats"]["hp"] < 50 + ), + class_needed=HEALER, + entity_picture="shop_heal.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.BRIGHTNESS, + translation_key=HabitipyButtonEntity.BRIGHTNESS, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["brightness"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 12 + and data.user["stats"]["mp"] >= 15 + ), + class_needed=HEALER, + entity_picture="shop_brightness.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.PROTECT_AURA, + translation_key=HabitipyButtonEntity.PROTECT_AURA, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["protectAura"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 30 + ), + class_needed=HEALER, + entity_picture="shop_protectAura.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.HEAL_ALL, + translation_key=HabitipyButtonEntity.HEAL_ALL, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["healAll"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 25 + ), + class_needed=HEALER, + entity_picture="shop_healAll.png", + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: HabiticaConfigEntry, @@ -82,6 +273,40 @@ async def async_setup_entry( """Set up buttons from a config entry.""" coordinator = entry.runtime_data + skills_added: set[str] = set() + + @callback + def add_entities() -> None: + """Add or remove a skillset based on the player's class.""" + + nonlocal skills_added + buttons = [] + entity_registry = er.async_get(hass) + + for description in CLASS_SKILLS: + if ( + coordinator.data.user["stats"]["lvl"] >= 10 + and coordinator.data.user["flags"]["classSelected"] + and not coordinator.data.user["preferences"]["disableClasses"] + and description.class_needed == coordinator.data.user["stats"]["class"] + ): + if description.key not in skills_added: + buttons.append(HabiticaButton(coordinator, description)) + skills_added.add(description.key) + elif description.key in skills_added: + if entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, + DOMAIN, + f"{coordinator.config_entry.unique_id}_{description.key}", + ): + entity_registry.async_remove(entity_id) + skills_added.remove(description.key) + + if buttons: + async_add_entities(buttons) + + coordinator.async_add_listener(add_entities) + add_entities() async_add_entities( HabiticaButton(coordinator, description) for description in BUTTON_DESCRIPTIONS @@ -108,7 +333,7 @@ class HabiticaButton(HabiticaBase, ButtonEntity): translation_domain=DOMAIN, translation_key="service_call_unallowed", ) from e - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="service_call_exception", ) from e @@ -118,8 +343,14 @@ class HabiticaButton(HabiticaBase, ButtonEntity): @property def available(self) -> bool: """Is entity available.""" - if not super().available: - return False - if self.entity_description.available_fn: - return self.entity_description.available_fn(self.coordinator.data) - return True + + return super().available and self.entity_description.available_fn( + self.coordinator.data + ) + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture: + return f"{ASSETS_URL}{entity_picture}" + return None diff --git a/homeassistant/components/habitica/calendar.py b/homeassistant/components/habitica/calendar.py new file mode 100644 index 00000000000..ff483b71fd8 --- /dev/null +++ b/homeassistant/components/habitica/calendar.py @@ -0,0 +1,369 @@ +"""Calendar platform for Habitica integration.""" + +from __future__ import annotations + +from abc import abstractmethod +from datetime import date, datetime, timedelta +from enum import StrEnum + +from dateutil.rrule import rrule + +from homeassistant.components.calendar import ( + CalendarEntity, + CalendarEntityDescription, + CalendarEvent, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util + +from . import HabiticaConfigEntry +from .coordinator import HabiticaDataUpdateCoordinator +from .entity import HabiticaBase +from .types import HabiticaTaskType +from .util import build_rrule, get_recurrence_rule + + +class HabiticaCalendar(StrEnum): + """Habitica calendars.""" + + DAILIES = "dailys" + TODOS = "todos" + TODO_REMINDERS = "todo_reminders" + DAILY_REMINDERS = "daily_reminders" + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HabiticaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the calendar platform.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [ + HabiticaTodosCalendarEntity(coordinator), + HabiticaDailiesCalendarEntity(coordinator), + HabiticaTodoRemindersCalendarEntity(coordinator), + HabiticaDailyRemindersCalendarEntity(coordinator), + ] + ) + + +class HabiticaCalendarEntity(HabiticaBase, CalendarEntity): + """Base Habitica calendar entity.""" + + def __init__( + self, + coordinator: HabiticaDataUpdateCoordinator, + ) -> None: + """Initialize calendar entity.""" + super().__init__(coordinator, self.entity_description) + + @abstractmethod + def get_events( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Return events.""" + + @property + def event(self) -> CalendarEvent | None: + """Return the current or next upcoming event.""" + + return next(iter(self.get_events(dt_util.now())), None) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range.""" + + return self.get_events(start_date, end_date) + + @property + def start_of_today(self) -> datetime: + """Habitica daystart.""" + return dt_util.start_of_local_day( + datetime.fromisoformat(self.coordinator.data.user["lastCron"]) + ) + + def get_recurrence_dates( + self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None + ) -> list[datetime]: + """Calculate recurrence dates based on start_date and end_date.""" + if end_date: + return recurrences.between( + start_date, end_date - timedelta(days=1), inc=True + ) + # if no end_date is given, return only the next recurrence + return [recurrences.after(start_date, inc=True)] + + +class HabiticaTodosCalendarEntity(HabiticaCalendarEntity): + """Habitica todos calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.TODOS, + translation_key=HabiticaCalendar.TODOS, + ) + + def get_events( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Get all dated todos.""" + + events = [] + for task in self.coordinator.data.tasks: + if not ( + task["type"] == HabiticaTaskType.TODO + and not task["completed"] + and task.get("date") # only if has due date + ): + continue + + start = dt_util.start_of_local_day(datetime.fromisoformat(task["date"])) + end = start + timedelta(days=1) + # return current and upcoming events or events within the requested range + + if end < start_date: + # Event ends before date range + continue + + if end_date and start > end_date: + # Event starts after date range + continue + + events.append( + CalendarEvent( + start=start.date(), + end=end.date(), + summary=task["text"], + description=task["notes"], + uid=task["id"], + ) + ) + return sorted( + events, + key=lambda event: ( + event.start, + self.coordinator.data.user["tasksOrder"]["todos"].index(event.uid), + ), + ) + + +class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): + """Habitica dailies calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.DAILIES, + translation_key=HabiticaCalendar.DAILIES, + ) + + def end_date(self, recurrence: datetime, end: datetime | None = None) -> date: + """Calculate the end date for a yesterdaily. + + The enddates of events from yesterday move forward to the end + of the current day (until the cron resets the dailies) to show them + as still active events on the calendar state entity (state: on). + + Events in the calendar view will show all-day events on their due day + """ + if end: + return recurrence.date() + timedelta(days=1) + return ( + dt_util.start_of_local_day() + if recurrence == self.start_of_today + else recurrence + ).date() + timedelta(days=1) + + def get_events( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Get dailies and recurrences for a given period or the next upcoming.""" + + # we only have dailies for today and future recurrences + if end_date and end_date < self.start_of_today: + return [] + start_date = max(start_date, self.start_of_today) + + events = [] + for task in self.coordinator.data.tasks: + # only dailies that that are not 'grey dailies' + if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]): + continue + + recurrences = build_rrule(task) + recurrence_dates = self.get_recurrence_dates( + recurrences, start_date, end_date + ) + for recurrence in recurrence_dates: + is_future_event = recurrence > self.start_of_today + is_current_event = ( + recurrence <= self.start_of_today and not task["completed"] + ) + + if not is_future_event and not is_current_event: + continue + + events.append( + CalendarEvent( + start=recurrence.date(), + end=self.end_date(recurrence, end_date), + summary=task["text"], + description=task["notes"], + uid=task["id"], + rrule=get_recurrence_rule(recurrences), + ) + ) + return sorted( + events, + key=lambda event: ( + event.start, + self.coordinator.data.user["tasksOrder"]["dailys"].index(event.uid), + ), + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + return next(iter(self.get_events(self.start_of_today)), None) + + @property + def extra_state_attributes(self) -> dict[str, bool | None] | None: + """Return entity specific state attributes.""" + return { + "yesterdaily": self.event.start < self.start_of_today.date() + if self.event + else None + } + + +class HabiticaTodoRemindersCalendarEntity(HabiticaCalendarEntity): + """Habitica to-do reminders calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.TODO_REMINDERS, + translation_key=HabiticaCalendar.TODO_REMINDERS, + ) + + def get_events( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Reminders for todos.""" + + events = [] + + for task in self.coordinator.data.tasks: + if task["type"] != HabiticaTaskType.TODO or task["completed"]: + continue + + for reminder in task.get("reminders", []): + # reminders are returned by the API in local time but with wrong + # timezone (UTC) and arbitrary added seconds/microseconds. When + # creating reminders in Habitica only hours and minutes can be defined. + start = datetime.fromisoformat(reminder["time"]).replace( + tzinfo=dt_util.DEFAULT_TIME_ZONE, second=0, microsecond=0 + ) + end = start + timedelta(hours=1) + + if end < start_date: + # Event ends before date range + continue + + if end_date and start > end_date: + # Event starts after date range + continue + + events.append( + CalendarEvent( + start=start, + end=end, + summary=task["text"], + description=task["notes"], + uid=f"{task["id"]}_{reminder["id"]}", + ) + ) + + return sorted( + events, + key=lambda event: event.start, + ) + + +class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity): + """Habitica daily reminders calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.DAILY_REMINDERS, + translation_key=HabiticaCalendar.DAILY_REMINDERS, + ) + + def start(self, reminder_time: str, reminder_date: date) -> datetime: + """Generate reminder times for dailies. + + Reminders for dailies have a datetime but the date part is arbitrary, + only the time part is evaluated. The dates for the reminders are the + dailies' due dates. + """ + return datetime.combine( + reminder_date, + datetime.fromisoformat(reminder_time) + .replace( + second=0, + microsecond=0, + ) + .time(), + tzinfo=dt_util.DEFAULT_TIME_ZONE, + ) + + def get_events( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Reminders for dailies.""" + + events = [] + if end_date and end_date < self.start_of_today: + return [] + start_date = max(start_date, self.start_of_today) + + for task in self.coordinator.data.tasks: + if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]): + continue + + recurrences = build_rrule(task) + recurrences_start = self.start_of_today + + recurrence_dates = self.get_recurrence_dates( + recurrences, recurrences_start, end_date + ) + for recurrence in recurrence_dates: + is_future_event = recurrence > self.start_of_today + is_current_event = ( + recurrence <= self.start_of_today and not task["completed"] + ) + + if not is_future_event and not is_current_event: + continue + + for reminder in task.get("reminders", []): + start = self.start(reminder["time"], recurrence) + end = start + timedelta(hours=1) + + if end < start_date: + # Event ends before date range + continue + + events.append( + CalendarEvent( + start=start, + end=end, + summary=task["text"], + description=task["notes"], + uid=f"{task["id"]}_{reminder["id"]}", + ) + ) + + return sorted( + events, + key=lambda event: event.start, + ) diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index 742523751a2..d168a5f57b4 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from http import HTTPStatus import logging from typing import Any @@ -10,48 +11,59 @@ from habitipy.aio import HabitipyAsync import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_URL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.const import ( + CONF_API_KEY, + CONF_PASSWORD, + CONF_URL, + CONF_USERNAME, + CONF_VERIFY_SSL, +) from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) -from .const import CONF_API_USER, DEFAULT_URL, DOMAIN +from .const import ( + CONF_API_USER, + DEFAULT_URL, + DOMAIN, + FORGOT_PASSWORD_URL, + HABITICANS_URL, + SIGN_UP_URL, + SITE_DATA_URL, +) -DATA_SCHEMA = vol.Schema( +STEP_ADVANCED_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_API_USER): str, vol.Required(CONF_API_KEY): str, - vol.Optional(CONF_NAME): str, vol.Optional(CONF_URL, default=DEFAULT_URL): str, + vol.Required(CONF_VERIFY_SSL, default=True): bool, + } +) + +STEP_LOGIN_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ) + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ) + ), } ) _LOGGER = logging.getLogger(__name__) -async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, str]: - """Validate the user input allows us to connect.""" - - websession = async_get_clientsession(hass) - api = await hass.async_add_executor_job( - HabitipyAsync, - { - "login": data[CONF_API_USER], - "password": data[CONF_API_KEY], - "url": data[CONF_URL] or DEFAULT_URL, - }, - ) - try: - await api.user.get(session=websession) - return { - "title": f"{data.get('name', 'Default username')}", - CONF_API_USER: data[CONF_API_USER], - } - except ClientResponseError as ex: - raise InvalidAuth from ex - - class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for habitica.""" @@ -62,44 +74,122 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} - if user_input is not None: - try: - info = await validate_input(self.hass, user_input) - except InvalidAuth: - errors = {"base": "invalid_credentials"} - except Exception: - _LOGGER.exception("Unexpected exception") - errors = {"base": "unknown"} - else: - await self.async_set_unique_id(info[CONF_API_USER]) - self._abort_if_unique_id_configured() - return self.async_create_entry(title=info["title"], data=user_input) - return self.async_show_form( + return self.async_show_menu( step_id="user", - data_schema=DATA_SCHEMA, - errors=errors, - description_placeholders={}, - ) - - async def async_step_import(self, import_data): - """Import habitica config from configuration.yaml.""" - - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - breaks_in_ha_version="2024.11.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Habitica", + menu_options=["login", "advanced"], + description_placeholders={ + "signup": SIGN_UP_URL, + "habiticans": HABITICANS_URL, }, ) - return await self.async_step_user(import_data) + async def async_step_login( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Config flow with username/password. -class InvalidAuth(HomeAssistantError): - """Error to indicate there is invalid auth.""" + Simplified configuration setup that retrieves API credentials + from Habitica.com by authenticating with login and password. + """ + errors: dict[str, str] = {} + if user_input is not None: + try: + session = async_get_clientsession(self.hass) + api = await self.hass.async_add_executor_job( + HabitipyAsync, + { + "login": "", + "password": "", + "url": DEFAULT_URL, + }, + ) + login_response = await api.user.auth.local.login.post( + session=session, + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + + except ClientResponseError as ex: + if ex.status == HTTPStatus.UNAUTHORIZED: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(login_response["id"]) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title=login_response["username"], + data={ + CONF_API_USER: login_response["id"], + CONF_API_KEY: login_response["apiToken"], + CONF_USERNAME: login_response["username"], + CONF_URL: DEFAULT_URL, + CONF_VERIFY_SSL: True, + }, + ) + + return self.async_show_form( + step_id="login", + data_schema=self.add_suggested_values_to_schema( + data_schema=STEP_LOGIN_DATA_SCHEMA, suggested_values=user_input + ), + errors=errors, + description_placeholders={"forgot_password": FORGOT_PASSWORD_URL}, + ) + + async def async_step_advanced( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Advanced configuration with User Id and API Token. + + Advanced configuration allows connecting to Habitica instances + hosted on different domains or to self-hosted instances. + """ + errors: dict[str, str] = {} + if user_input is not None: + try: + session = async_get_clientsession( + self.hass, verify_ssl=user_input.get(CONF_VERIFY_SSL, True) + ) + api = await self.hass.async_add_executor_job( + HabitipyAsync, + { + "login": user_input[CONF_API_USER], + "password": user_input[CONF_API_KEY], + "url": user_input.get(CONF_URL, DEFAULT_URL), + }, + ) + api_response = await api.user.get( + session=session, + userFields="auth", + ) + except ClientResponseError as ex: + if ex.status == HTTPStatus.UNAUTHORIZED: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(user_input[CONF_API_USER]) + self._abort_if_unique_id_configured() + user_input[CONF_USERNAME] = api_response["auth"]["local"]["username"] + return self.async_create_entry( + title=user_input[CONF_USERNAME], data=user_input + ) + + return self.async_show_form( + step_id="advanced", + data_schema=self.add_suggested_values_to_schema( + data_schema=STEP_ADVANCED_DATA_SCHEMA, suggested_values=user_input + ), + errors=errors, + description_placeholders={ + "site_data": SITE_DATA_URL, + "default_url": DEFAULT_URL, + }, + ) diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index 4b10e9a705b..42d64ca7d3f 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -6,6 +6,11 @@ CONF_API_USER = "api_user" DEFAULT_URL = "https://habitica.com" ASSETS_URL = "https://habitica-assets.s3.amazonaws.com/mobileApp/images/" +SITE_DATA_URL = "https://habitica.com/user/settings/siteData" +FORGOT_PASSWORD_URL = "https://habitica.com/forgot-password" +SIGN_UP_URL = "https://habitica.com/register" +HABITICANS_URL = "https://habitica.com/static/img/home-main@3x.ffc32b12.png" + DOMAIN = "habitica" # service constants @@ -20,4 +25,28 @@ ATTR_DATA = "data" MANUFACTURER = "HabitRPG, Inc." NAME = "Habitica" -UNIT_TASKS = "tasks" +ATTR_CONFIG_ENTRY = "config_entry" +ATTR_SKILL = "skill" +ATTR_TASK = "task" +ATTR_DIRECTION = "direction" +ATTR_TARGET = "target" +ATTR_ITEM = "item" +SERVICE_CAST_SKILL = "cast_skill" +SERVICE_START_QUEST = "start_quest" +SERVICE_ACCEPT_QUEST = "accept_quest" +SERVICE_CANCEL_QUEST = "cancel_quest" +SERVICE_ABORT_QUEST = "abort_quest" +SERVICE_REJECT_QUEST = "reject_quest" +SERVICE_LEAVE_QUEST = "leave_quest" +SERVICE_SCORE_HABIT = "score_habit" +SERVICE_SCORE_REWARD = "score_reward" + +SERVICE_TRANSFORMATION = "transformation" + + +WARRIOR = "warrior" +ROGUE = "rogue" +HEALER = "healer" +MAGE = "wizard" + +DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf" diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index 357643593e4..f9ffb1b53bd 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -51,24 +51,22 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): ), ) self.api = habitipy + self.content: dict[str, Any] = {} async def _async_update_data(self) -> HabiticaData: try: user_response = await self.api.user.get() tasks_response = await self.api.tasks.user.get() - tasks_response.extend( - [ - {"id": task["_id"], **task} - for task in await self.api.tasks.user.get(type="completedTodos") - if task.get("_id") - ] - ) - + tasks_response.extend(await self.api.tasks.user.get(type="completedTodos")) + if not self.content: + self.content = await self.api.content.get( + language=user_response["preferences"]["language"] + ) except ClientResponseError as error: if error.status == HTTPStatus.TOO_MANY_REQUESTS: - _LOGGER.debug("Currently rate limited, skipping update") + _LOGGER.debug("Rate limit exceeded, will try again later") return self.data - raise UpdateFailed(f"Error communicating with API: {error}") from error + raise UpdateFailed(f"Unable to connect to Habitica: {error}") from error return HabiticaData(user=user_response, tasks=tasks_response) diff --git a/homeassistant/components/habitica/diagnostics.py b/homeassistant/components/habitica/diagnostics.py new file mode 100644 index 00000000000..bca79946503 --- /dev/null +++ b/homeassistant/components/habitica/diagnostics.py @@ -0,0 +1,27 @@ +"""Diagnostics platform for Habitica integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from .const import CONF_API_USER +from .types import HabiticaConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: HabiticaConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + habitica_data = await config_entry.runtime_data.api.user.anonymized.get() + + return { + "config_entry_data": { + CONF_URL: config_entry.data[CONF_URL], + CONF_API_USER: config_entry.data[CONF_API_USER], + }, + "habitica_data": habitica_data, + } diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index 710b8c9d25b..d4ca5dba10d 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -20,6 +20,56 @@ }, "revive": { "default": "mdi:grave-stone" + }, + "mpheal": { + "default": "mdi:broadcast" + }, + "earth": { + "default": "mdi:landslide" + }, + "frost": { + "default": "mdi:snowflake" + }, + "defensive_stance": { + "default": "mdi:shield-sword" + }, + "valorous_presence": { + "default": "mdi:shield-sun" + }, + "intimidate": { + "default": "mdi:emoticon-angry" + }, + "tools_of_trade": { + "default": "mdi:domino-mask" + }, + "stealth": { + "default": "mdi:ninja" + }, + "heal": { + "default": "mdi:aurora" + }, + "brightness": { + "default": "mdi:flare" + }, + "protect_aura": { + "default": "mdi:shimmer" + }, + "heal_all": { + "default": "mdi:hand-heart-outline" + } + }, + "calendar": { + "todos": { + "default": "mdi:calendar-check" + }, + "dailys": { + "default": "mdi:calendar-multiple" + }, + "todo_reminders": { + "default": "mdi:reminder" + }, + "daily_reminders": { + "default": "mdi:reminder" } }, "sensor": { @@ -56,6 +106,12 @@ "gold": { "default": "mdi:sack" }, + "gems": { + "default": "mdi:diamond-stone" + }, + "trinkets": { + "default": "mdi:timer-sand" + }, "class": { "default": "mdi:sword", "state": { @@ -76,6 +132,18 @@ }, "rewards": { "default": "mdi:treasure-chest" + }, + "strength": { + "default": "mdi:arm-flex-outline" + }, + "intelligence": { + "default": "mdi:head-snowflake-outline" + }, + "perception": { + "default": "mdi:eye-outline" + }, + "constitution": { + "default": "mdi:run-fast" } }, "switch": { @@ -85,9 +153,49 @@ "on": "mdi:sleep" } } + }, + "binary_sensor": { + "pending_quest": { + "default": "mdi:script-outline", + "state": { + "on": "mdi:script-text-outline" + } + } } }, "services": { - "api_call": "mdi:console" + "api_call": { + "service": "mdi:console" + }, + "cast_skill": { + "service": "mdi:creation-outline" + }, + "accept_quest": { + "service": "mdi:script-text" + }, + "reject_quest": { + "service": "mdi:script-text" + }, + "leave_quest": { + "service": "mdi:script-text" + }, + "abort_quest": { + "service": "mdi:script-text-key" + }, + "cancel_quest": { + "service": "mdi:script-text-key" + }, + "start_quest": { + "service": "mdi:script-text-key" + }, + "score_habit": { + "service": "mdi:counter" + }, + "score_reward": { + "service": "mdi:sack" + }, + "transformation": { + "service": "mdi:flask-round-bottom" + } } } diff --git a/homeassistant/components/habitica/manifest.json b/homeassistant/components/habitica/manifest.json index 16a4ef959a8..a01697c3945 100644 --- a/homeassistant/components/habitica/manifest.json +++ b/homeassistant/components/habitica/manifest.json @@ -1,10 +1,10 @@ { "domain": "habitica", "name": "Habitica", - "codeowners": ["@ASMfreaK", "@leikoilja", "@tr4nt0r"], + "codeowners": ["@tr4nt0r"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/habitica", "iot_class": "cloud_polling", "loggers": ["habitipy", "plumbum"], - "requirements": ["habitipy==0.3.1"] + "requirements": ["habitipy==0.3.3"] } diff --git a/homeassistant/components/habitica/quality_scale.yaml b/homeassistant/components/habitica/quality_scale.yaml new file mode 100644 index 00000000000..9d505b85b8c --- /dev/null +++ b/homeassistant/components/habitica/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: test already_configured, tests should finish with create_entry or abort, assert unique_id + config-flow: done + dependency-transparency: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: No events are registered by the integration. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: There is no options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Integration represents a service + discovery: + status: exempt + comment: Integration represents a service + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: + status: exempt + comment: No supportable devices. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Integration is a service, no devices that could be added at runtime. + Button entities for casting skills are created/removed dynamically if unlocked or on class change + entity-category: + status: done + comment: Default categories are appropriate for currently available entities. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: todo + comment: translations for UpdateFailed missing + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: done + comment: Used to inform of deprecated entities and actions. + stale-devices: + status: done + comment: Not applicable. Only one device per config entry. Removed together with the config entry. + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 8762345b597..bead15d109b 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -24,10 +24,10 @@ from homeassistant.helpers.issue_registry import ( ) from homeassistant.helpers.typing import StateType -from . import HabiticaConfigEntry -from .const import DOMAIN, UNIT_TASKS +from .const import ASSETS_URL, DOMAIN from .entity import HabiticaBase -from .util import entity_used_in +from .types import HabiticaConfigEntry +from .util import entity_used_in, get_attribute_points, get_attributes_total _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,11 @@ _LOGGER = logging.getLogger(__name__) class HabitipySensorEntityDescription(SensorEntityDescription): """Habitipy Sensor Description.""" - value_fn: Callable[[dict[str, Any]], StateType] + value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType] + attributes_fn: ( + Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None + ) = None + entity_picture: str | None = None @dataclass(kw_only=True, frozen=True) @@ -63,72 +67,123 @@ class HabitipySensorEntity(StrEnum): DAILIES = "dailys" TODOS = "todos" REWARDS = "rewards" + GEMS = "gems" + TRINKETS = "trinkets" + STRENGTH = "strength" + INTELLIGENCE = "intelligence" + CONSTITUTION = "constitution" + PERCEPTION = "perception" SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( HabitipySensorEntityDescription( key=HabitipySensorEntity.DISPLAY_NAME, translation_key=HabitipySensorEntity.DISPLAY_NAME, - value_fn=lambda user: user.get("profile", {}).get("name"), + value_fn=lambda user, _: user.get("profile", {}).get("name"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH, translation_key=HabitipySensorEntity.HEALTH, - native_unit_of_measurement="HP", suggested_display_precision=0, - value_fn=lambda user: user.get("stats", {}).get("hp"), + value_fn=lambda user, _: user.get("stats", {}).get("hp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH_MAX, translation_key=HabitipySensorEntity.HEALTH_MAX, - native_unit_of_measurement="HP", entity_registry_enabled_default=False, - value_fn=lambda user: user.get("stats", {}).get("maxHealth"), + value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA, translation_key=HabitipySensorEntity.MANA, - native_unit_of_measurement="MP", suggested_display_precision=0, - value_fn=lambda user: user.get("stats", {}).get("mp"), + value_fn=lambda user, _: user.get("stats", {}).get("mp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA_MAX, translation_key=HabitipySensorEntity.MANA_MAX, - native_unit_of_measurement="MP", - value_fn=lambda user: user.get("stats", {}).get("maxMP"), + value_fn=lambda user, _: user.get("stats", {}).get("maxMP"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE, translation_key=HabitipySensorEntity.EXPERIENCE, - native_unit_of_measurement="XP", - value_fn=lambda user: user.get("stats", {}).get("exp"), + value_fn=lambda user, _: user.get("stats", {}).get("exp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE_MAX, translation_key=HabitipySensorEntity.EXPERIENCE_MAX, - native_unit_of_measurement="XP", - value_fn=lambda user: user.get("stats", {}).get("toNextLevel"), + value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.LEVEL, translation_key=HabitipySensorEntity.LEVEL, - value_fn=lambda user: user.get("stats", {}).get("lvl"), + value_fn=lambda user, _: user.get("stats", {}).get("lvl"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GOLD, translation_key=HabitipySensorEntity.GOLD, - native_unit_of_measurement="GP", suggested_display_precision=2, - value_fn=lambda user: user.get("stats", {}).get("gp"), + value_fn=lambda user, _: user.get("stats", {}).get("gp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.CLASS, translation_key=HabitipySensorEntity.CLASS, - value_fn=lambda user: user.get("stats", {}).get("class"), + value_fn=lambda user, _: user.get("stats", {}).get("class"), device_class=SensorDeviceClass.ENUM, options=["warrior", "healer", "wizard", "rogue"], ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.GEMS, + translation_key=HabitipySensorEntity.GEMS, + value_fn=lambda user, _: user.get("balance", 0) * 4, + suggested_display_precision=0, + entity_picture="shop_gem.png", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.TRINKETS, + translation_key=HabitipySensorEntity.TRINKETS, + value_fn=( + lambda user, _: user.get("purchased", {}) + .get("plan", {}) + .get("consecutive", {}) + .get("trinkets", 0) + ), + suggested_display_precision=0, + native_unit_of_measurement="⧖", + entity_picture="notif_subscriber_reward.png", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.STRENGTH, + translation_key=HabitipySensorEntity.STRENGTH, + value_fn=lambda user, content: get_attributes_total(user, content, "str"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "str"), + suggested_display_precision=0, + native_unit_of_measurement="STR", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.INTELLIGENCE, + translation_key=HabitipySensorEntity.INTELLIGENCE, + value_fn=lambda user, content: get_attributes_total(user, content, "int"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "int"), + suggested_display_precision=0, + native_unit_of_measurement="INT", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.PERCEPTION, + translation_key=HabitipySensorEntity.PERCEPTION, + value_fn=lambda user, content: get_attributes_total(user, content, "per"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "per"), + suggested_display_precision=0, + native_unit_of_measurement="PER", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.CONSTITUTION, + translation_key=HabitipySensorEntity.CONSTITUTION, + value_fn=lambda user, content: get_attributes_total(user, content, "con"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "con"), + suggested_display_precision=0, + native_unit_of_measurement="CON", + ), ) @@ -140,6 +195,8 @@ TASKS_MAP = { "frequency": "frequency", "every_x": "everyX", "streak": "streak", + "up": "up", + "down": "down", "counter_up": "counterUp", "counter_down": "counterDown", "next_due": "nextDue", @@ -164,20 +221,17 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.HABITS, translation_key=HabitipySensorEntity.HABITS, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "habit"], ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.DAILIES, translation_key=HabitipySensorEntity.DAILIES, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "daily"], entity_registry_enabled_default=False, ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.TODOS, translation_key=HabitipySensorEntity.TODOS, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [ r for r in tasks if r.get("type") == "todo" and not r.get("completed") ], @@ -186,7 +240,6 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.REWARDS, translation_key=HabitipySensorEntity.REWARDS, - native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "reward"], ), ) @@ -220,7 +273,23 @@ class HabitipySensor(HabiticaBase, SensorEntity): def native_value(self) -> StateType: """Return the state of the device.""" - return self.entity_description.value_fn(self.coordinator.data.user) + return self.entity_description.value_fn( + self.coordinator.data.user, self.coordinator.content + ) + + @property + def extra_state_attributes(self) -> dict[str, float | None] | None: + """Return entity specific state attributes.""" + if func := self.entity_description.attributes_fn: + return func(self.coordinator.data.user, self.coordinator.content) + return None + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture: + return f"{ASSETS_URL}{entity_picture}" + return None class HabitipyTaskSensor(HabiticaBase, SensorEntity): diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py new file mode 100644 index 00000000000..7f2d66e4690 --- /dev/null +++ b/homeassistant/components/habitica/services.py @@ -0,0 +1,421 @@ +"""Actions for the Habitica integration.""" + +from __future__ import annotations + +from http import HTTPStatus +import logging +from typing import Any + +from aiohttp import ClientResponseError +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_NAME, CONF_NAME +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.selector import ConfigEntrySelector + +from .const import ( + ATTR_ARGS, + ATTR_CONFIG_ENTRY, + ATTR_DATA, + ATTR_DIRECTION, + ATTR_ITEM, + ATTR_PATH, + ATTR_SKILL, + ATTR_TARGET, + ATTR_TASK, + DOMAIN, + EVENT_API_CALL_SUCCESS, + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_API_CALL, + SERVICE_CANCEL_QUEST, + SERVICE_CAST_SKILL, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_SCORE_HABIT, + SERVICE_SCORE_REWARD, + SERVICE_START_QUEST, + SERVICE_TRANSFORMATION, +) +from .types import HabiticaConfigEntry + +_LOGGER = logging.getLogger(__name__) + + +SERVICE_API_CALL_SCHEMA = vol.Schema( + { + vol.Required(ATTR_NAME): str, + vol.Required(ATTR_PATH): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_ARGS): dict, + } +) + +SERVICE_CAST_SKILL_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_SKILL): cv.string, + vol.Optional(ATTR_TASK): cv.string, + } +) + +SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + } +) +SERVICE_SCORE_TASK_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_TASK): cv.string, + vol.Optional(ATTR_DIRECTION): cv.string, + } +) + +SERVICE_TRANSFORMATION_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_ITEM): cv.string, + vol.Required(ATTR_TARGET): cv.string, + } +) + + +def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry: + """Return config entry or raise if not found or not loaded.""" + if not (entry := hass.config_entries.async_get_entry(entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_found", + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_loaded", + ) + return entry + + +def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 + """Set up services for Habitica integration.""" + + async def handle_api_call(call: ServiceCall) -> None: + async_create_issue( + hass, + DOMAIN, + "deprecated_api_call", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_api_call", + ) + _LOGGER.warning( + "Deprecated action called: 'habitica.api_call' is deprecated and will be removed in Home Assistant version 2025.6.0" + ) + + name = call.data[ATTR_NAME] + path = call.data[ATTR_PATH] + entries = hass.config_entries.async_entries(DOMAIN) + + api = None + for entry in entries: + if entry.data[CONF_NAME] == name: + api = entry.runtime_data.api + break + if api is None: + _LOGGER.error("API_CALL: User '%s' not configured", name) + return + try: + for element in path: + api = api[element] + except KeyError: + _LOGGER.error( + "API_CALL: Path %s is invalid for API on '{%s}' element", path, element + ) + return + kwargs = call.data.get(ATTR_ARGS, {}) + data = await api(**kwargs) + hass.bus.async_fire( + EVENT_API_CALL_SUCCESS, {ATTR_NAME: name, ATTR_PATH: path, ATTR_DATA: data} + ) + + async def cast_skill(call: ServiceCall) -> ServiceResponse: + """Skill action.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + skill = { + "pickpocket": {"spellId": "pickPocket", "cost": "10 MP"}, + "backstab": {"spellId": "backStab", "cost": "15 MP"}, + "smash": {"spellId": "smash", "cost": "10 MP"}, + "fireball": {"spellId": "fireball", "cost": "10 MP"}, + } + try: + task_id = next( + task["id"] + for task in coordinator.data.tasks + if call.data[ATTR_TASK] in (task["id"], task.get("alias")) + or call.data[ATTR_TASK] == task["text"] + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="task_not_found", + translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, + ) from e + + try: + response: dict[str, Any] = await coordinator.api.user.class_.cast[ + skill[call.data[ATTR_SKILL]]["spellId"] + ].post(targetId=task_id) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_enough_mana", + translation_placeholders={ + "cost": skill[call.data[ATTR_SKILL]]["cost"], + "mana": f"{int(coordinator.data.user.get("stats", {}).get("mp", 0))} MP", + }, + ) from e + if e.status == HTTPStatus.NOT_FOUND: + # could also be task not found, but the task is looked up + # before the request, so most likely wrong skill selected + # or the skill hasn't been unlocked yet. + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="skill_not_found", + translation_placeholders={"skill": call.data[ATTR_SKILL]}, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await coordinator.async_request_refresh() + return response + + async def manage_quests(call: ServiceCall) -> ServiceResponse: + """Accept, reject, start, leave or cancel quests.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + + COMMAND_MAP = { + SERVICE_ABORT_QUEST: "abort", + SERVICE_ACCEPT_QUEST: "accept", + SERVICE_CANCEL_QUEST: "cancel", + SERVICE_LEAVE_QUEST: "leave", + SERVICE_REJECT_QUEST: "reject", + SERVICE_START_QUEST: "force-start", + } + try: + return await coordinator.api.groups.party.quests[ + COMMAND_MAP[call.service] + ].post() + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="quest_action_unallowed" + ) from e + if e.status == HTTPStatus.NOT_FOUND: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="quest_not_found" + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="service_call_exception" + ) from e + + for service in ( + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_CANCEL_QUEST, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_START_QUEST, + ): + hass.services.async_register( + DOMAIN, + service, + manage_quests, + schema=SERVICE_MANAGE_QUEST_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + async def score_task(call: ServiceCall) -> ServiceResponse: + """Score a task action.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + try: + task_id, task_value = next( + (task["id"], task.get("value")) + for task in coordinator.data.tasks + if call.data[ATTR_TASK] in (task["id"], task.get("alias")) + or call.data[ATTR_TASK] == task["text"] + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="task_not_found", + translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, + ) from e + + try: + response: dict[str, Any] = ( + await coordinator.api.tasks[task_id] + .score[call.data.get(ATTR_DIRECTION, "up")] + .post() + ) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_enough_gold", + translation_placeholders={ + "gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP", + "cost": f"{task_value} GP", + }, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await coordinator.async_request_refresh() + return response + + async def transformation(call: ServiceCall) -> ServiceResponse: + """User a transformation item on a player character.""" + + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + ITEMID_MAP = { + "snowball": {"itemId": "snowball"}, + "spooky_sparkles": {"itemId": "spookySparkles"}, + "seafoam": {"itemId": "seafoam"}, + "shiny_seed": {"itemId": "shinySeed"}, + } + # check if target is self + if call.data[ATTR_TARGET] in ( + coordinator.data.user["id"], + coordinator.data.user["profile"]["name"], + coordinator.data.user["auth"]["local"]["username"], + ): + target_id = coordinator.data.user["id"] + else: + # check if target is a party member + try: + party = await coordinator.api.groups.party.members.get() + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.NOT_FOUND: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="party_not_found", + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + try: + target_id = next( + member["id"] + for member in party + if call.data[ATTR_TARGET].lower() + in ( + member["id"], + member["auth"]["local"]["username"].lower(), + member["profile"]["name"].lower(), + ) + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="target_not_found", + translation_placeholders={"target": f"'{call.data[ATTR_TARGET]}'"}, + ) from e + try: + response: dict[str, Any] = await coordinator.api.user.class_.cast[ + ITEMID_MAP[call.data[ATTR_ITEM]]["itemId"] + ].post(targetId=target_id) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="item_not_found", + translation_placeholders={"item": call.data[ATTR_ITEM]}, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + return response + + hass.services.async_register( + DOMAIN, + SERVICE_API_CALL, + handle_api_call, + schema=SERVICE_API_CALL_SCHEMA, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_CAST_SKILL, + cast_skill, + schema=SERVICE_CAST_SKILL_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SCORE_HABIT, + score_task, + schema=SERVICE_SCORE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SCORE_REWARD, + score_task, + schema=SERVICE_SCORE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_TRANSFORMATION, + transformation, + schema=SERVICE_TRANSFORMATION_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index a7ef39eb529..a89c935b630 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -15,3 +15,82 @@ api_call: example: '{"text": "Use API from Home Assistant", "type": "todo"}' selector: object: +cast_skill: + fields: + config_entry: &config_entry + required: true + selector: + config_entry: + integration: habitica + skill: + required: true + selector: + select: + options: + - "pickpocket" + - "backstab" + - "smash" + - "fireball" + mode: dropdown + translation_key: "skill_select" + task: &task + required: true + selector: + text: +accept_quest: + fields: + config_entry: *config_entry +reject_quest: + fields: + config_entry: *config_entry +start_quest: + fields: + config_entry: *config_entry +cancel_quest: + fields: + config_entry: *config_entry +abort_quest: + fields: + config_entry: *config_entry +leave_quest: + fields: + config_entry: *config_entry +score_habit: + fields: + config_entry: *config_entry + task: *task + direction: + required: true + selector: + select: + options: + - value: up + label: "➕" + - value: down + label: "➖" +score_reward: + fields: + config_entry: *config_entry + task: *task +transformation: + fields: + config_entry: + required: true + selector: + config_entry: + integration: habitica + item: + required: true + selector: + select: + options: + - "snowball" + - "spooky_sparkles" + - "seafoam" + - "shiny_seed" + mode: dropdown + translation_key: "transformation_item_select" + target: + required: true + selector: + text: diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 21d2622245c..f1b956fe17e 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -1,25 +1,66 @@ { + "common": { + "todos": "To-Do's", + "dailies": "Dailies", + "config_entry_name": "Select character", + "unit_tasks": "tasks", + "unit_health_points": "HP", + "unit_mana_points": "MP", + "unit_experience_points": "XP" + }, "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" }, "error": { - "invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { "user": { + "title": "Habitica - Gamify your life", + "menu_options": { + "login": "Login to Habitica", + "advanced": "Login to other instances" + }, + "description": "![Habiticans]({habiticans}) Connect your Habitica account to keep track of your adventurer's stats, progress, and manage your to-dos and daily tasks.\n\n[Don't have a Habitica account? Sign up here.]({signup})" + }, + "login": { + "title": "[%key:component::habitica::config::step::user::menu_options::login%]", + "data": { + "username": "Email or username (case-sensitive)", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "Email or username (case-sensitive) to connect Home Assistant to your Habitica account", + "password": "Password for the account to connect Home Assistant to Habitica" + }, + "description": "Enter your login details to start using Habitica with Home Assistant\n\n[Forgot your password?]({forgot_password})" + }, + "advanced": { + "title": "[%key:component::habitica::config::step::user::menu_options::advanced%]", "data": { "url": "[%key:common::config_flow::data::url%]", - "name": "Override for Habitica’s username. Will be used for actions", - "api_user": "Habitica’s API user ID", - "api_key": "[%key:common::config_flow::data::api_key%]" + "api_user": "User ID", + "api_key": "API Token", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, - "description": "Connect your Habitica profile to allow monitoring of your user's profile and tasks. Note that api_id and api_key must be gotten from https://habitica.com/user/settings/api" + "data_description": { + "url": "URL of the Habitica installation to connect to. Defaults to `{default_url}`", + "api_user": "User ID of your Habitica account", + "api_key": "API Token of the Habitica account" + }, + "description": "You can retrieve your `User ID` and `API Token` from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to" } } }, "entity": { + "binary_sensor": { + "pending_quest": { + "name": "Pending quest invitation" + } + }, "button": { "run_cron": { "name": "Start my day" @@ -32,6 +73,65 @@ }, "revive": { "name": "Revive from death" + }, + "mpheal": { + "name": "Ethereal surge" + }, + "earth": { + "name": "Earthquake" + }, + "frost": { + "name": "Chilling frost" + }, + "defensive_stance": { + "name": "Defensive stance" + }, + "valorous_presence": { + "name": "Valorous presence" + }, + "intimidate": { + "name": "Intimidating gaze" + }, + "tools_of_trade": { + "name": "Tools of the trade" + }, + "stealth": { + "name": "Stealth" + }, + "heal": { + "name": "Healing light" + }, + "brightness": { + "name": "Searing brightness" + }, + "protect_aura": { + "name": "Protective aura" + }, + "heal_all": { + "name": "Blessing" + } + }, + "calendar": { + "todos": { + "name": "[%key:component::habitica::common::todos%]" + }, + "dailys": { + "name": "[%key:component::habitica::common::dailies%]", + "state_attributes": { + "yesterdaily": { + "name": "Yester-Daily", + "state": { + "true": "[%key:common::state::yes%]", + "false": "[%key:common::state::no%]" + } + } + } + }, + "todo_reminders": { + "name": "To-do reminders" + }, + "daily_reminders": { + "name": "Daily reminders" } }, "sensor": { @@ -39,28 +139,42 @@ "name": "Display name" }, "health": { - "name": "Health" + "name": "Health", + "unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]" }, "health_max": { - "name": "Max. health" + "name": "Max. health", + "unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]" }, "mana": { - "name": "Mana" + "name": "Mana", + "unit_of_measurement": "[%key:component::habitica::common::unit_mana_points%]" }, "mana_max": { - "name": "Max. mana" + "name": "Max. mana", + "unit_of_measurement": "[%key:component::habitica::common::unit_mana_points%]" }, "experience": { - "name": "Experience" + "name": "Experience", + "unit_of_measurement": "[%key:component::habitica::common::unit_experience_points%]" }, "experience_max": { - "name": "Next level" + "name": "Next level", + "unit_of_measurement": "[%key:component::habitica::common::unit_experience_points%]" }, "level": { "name": "Level" }, "gold": { - "name": "Gold" + "name": "Gold", + "unit_of_measurement": "GP" + }, + "gems": { + "name": "Gems", + "unit_of_measurement": "gems" + }, + "trinkets": { + "name": "Mystic hourglasses" }, "class": { "name": "Class", @@ -72,16 +186,100 @@ } }, "todos": { - "name": "To-Do's" + "name": "[%key:component::habitica::common::todos%]", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "dailys": { - "name": "Dailies" + "name": "[%key:component::habitica::common::dailies%]", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "habits": { - "name": "Habits" + "name": "Habits", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" }, "rewards": { - "name": "Rewards" + "name": "Rewards", + "unit_of_measurement": "[%key:component::habitica::common::unit_tasks%]" + }, + "strength": { + "name": "Strength", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "Battle gear" + }, + "class": { + "name": "Class equip bonus" + }, + "allocated": { + "name": "Allocated attribute points" + }, + "buffs": { + "name": "Buffs" + } + } + }, + "intelligence": { + "name": "Intelligence", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } + }, + "perception": { + "name": "Perception", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } + }, + "constitution": { + "name": "Constitution", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } } }, "switch": { @@ -91,10 +289,10 @@ }, "todo": { "todos": { - "name": "To-Do's" + "name": "[%key:component::habitica::common::todos%]" }, "dailys": { - "name": "Dailies" + "name": "[%key:component::habitica::common::dailies%]" } } }, @@ -127,19 +325,56 @@ "message": "Unable to create new to-do `{name}` for Habitica, please try again" }, "setup_rate_limit_exception": { - "message": "Currently rate limited, try again later" + "message": "Rate limit exceeded, try again later" }, "service_call_unallowed": { - "message": "Unable to carry out this action, because the required conditions are not met" + "message": "Unable to complete action, the required conditions are not met" }, "service_call_exception": { "message": "Unable to connect to Habitica, try again later" + }, + "not_enough_mana": { + "message": "Unable to cast skill, not enough mana. Your character has {mana}, but the skill costs {cost}." + }, + "not_enough_gold": { + "message": "Unable to buy reward, not enough gold. Your character has {gold}, but the reward costs {cost}." + }, + "skill_not_found": { + "message": "Unable to cast skill, your character does not have the skill or spell {skill}." + }, + "entry_not_found": { + "message": "The selected character is not configured in Home Assistant." + }, + "entry_not_loaded": { + "message": "The selected character is currently not loaded or disabled in Home Assistant." + }, + "task_not_found": { + "message": "Unable to complete action, could not find the task {task}" + }, + "quest_action_unallowed": { + "message": "Action not allowed, only quest leader or group leader can perform this action" + }, + "quest_not_found": { + "message": "Unable to complete action, quest or group not found" + }, + "target_not_found": { + "message": "Unable to find target {target} in your party" + }, + "party_not_found": { + "message": "Unable to find target, you are currently not in a party. You can only target yourself" + }, + "item_not_found": { + "message": "Unable to use {item}, you don't own this item." } }, "issues": { "deprecated_task_entity": { - "title": "The Habitica `{task_name}` sensor is deprecated", + "title": "The Habitica {task_name} sensor is deprecated", "description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`." + }, + "deprecated_api_call": { + "title": "The Habitica action habitica.api_call is deprecated", + "description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities." } }, "services": { @@ -160,6 +395,152 @@ "description": "Any additional JSON or URL parameter arguments. See apidoc mentioned for path. Example uses same API endpoint." } } + }, + "cast_skill": { + "name": "Cast a skill", + "description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Choose the Habitica character to cast the skill." + }, + "skill": { + "name": "Skill", + "description": "Select the skill or spell you want to cast on the task. Only skills corresponding to your character's class can be used." + }, + "task": { + "name": "Task name", + "description": "The name (or task ID) of the task you want to target with the skill or spell." + } + } + }, + "accept_quest": { + "name": "Accept a quest invitation", + "description": "Accept a pending invitation to a quest.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Choose the Habitica character for which to perform the action." + } + } + }, + "reject_quest": { + "name": "Reject a quest invitation", + "description": "Reject a pending invitation to a quest.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "leave_quest": { + "name": "Leave a quest", + "description": "Leave the current quest you are participating in.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "abort_quest": { + "name": "Abort an active quest", + "description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "cancel_quest": { + "name": "Cancel a pending quest", + "description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "start_quest": { + "name": "Force-start a pending quest", + "description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "score_habit": { + "name": "Track a habit", + "description": "Increase the positive or negative streak of a habit to track its progress.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica character tracking your habit." + }, + "task": { + "name": "Habit name", + "description": "The name (or task ID) of the Habitica habit." + }, + "direction": { + "name": "Reward or loss", + "description": "Is it positive or negative progress you want to track for your habit." + } + } + }, + "score_reward": { + "name": "Buy a reward", + "description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica character buying the reward." + }, + "task": { + "name": "Reward name", + "description": "The name (or task ID) of the custom reward." + } + } + }, + "transformation": { + "name": "Use a transformation item", + "description": "Use a transformation item from your Habitica character's inventory on a member of your party or yourself.", + "fields": { + "config_entry": { + "name": "Select character", + "description": "Choose the Habitica character to use the transformation item." + }, + "item": { + "name": "Transformation item", + "description": "Select the transformation item you want to use. Item must be in the characters inventory." + }, + "target": { + "name": "Target character", + "description": "The name of the character you want to use the transformation item on. You can also specify the players username or user ID." + } + } + } + }, + "selector": { + "skill_select": { + "options": { + "fireball": "Mage: Burst of flames", + "pickpocket": "Rogue: Pickpocket", + "backstab": "Rogue: Backstab", + "smash": "Warrior: Brutal smash" + } + }, + "transformation_item_select": { + "options": { + "snowball": "Snowball", + "spooky_sparkles": "Spooky sparkles", + "seafoam": "Seafoam", + "shiny_seed": "Shiny seed" + } } } } diff --git a/homeassistant/components/habitica/switch.py b/homeassistant/components/habitica/switch.py index c83d2332030..de0cc533050 100644 --- a/homeassistant/components/habitica/switch.py +++ b/homeassistant/components/habitica/switch.py @@ -15,9 +15,11 @@ from homeassistant.components.switch import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HabiticaConfigEntry from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator from .entity import HabiticaBase +from .types import HabiticaConfigEntry + +PARALLEL_UPDATES = 1 @dataclass(kw_only=True, frozen=True) diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index ae739d47262..0ca5f723c45 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -21,12 +21,14 @@ from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import HabiticaConfigEntry from .const import ASSETS_URL, DOMAIN from .coordinator import HabiticaDataUpdateCoordinator from .entity import HabiticaBase +from .types import HabiticaConfigEntry, HabiticaTaskType from .util import next_due_date +PARALLEL_UPDATES = 1 + class HabiticaTodoList(StrEnum): """Habitica Entities.""" @@ -37,15 +39,6 @@ class HabiticaTodoList(StrEnum): REWARDS = "rewards" -class HabiticaTaskType(StrEnum): - """Habitica Entities.""" - - HABIT = "habit" - DAILY = "daily" - TODO = "todo" - REWARD = "reward" - - async def async_setup_entry( hass: HomeAssistant, config_entry: HabiticaConfigEntry, diff --git a/homeassistant/components/habitica/types.py b/homeassistant/components/habitica/types.py new file mode 100644 index 00000000000..9789a65dc40 --- /dev/null +++ b/homeassistant/components/habitica/types.py @@ -0,0 +1,18 @@ +"""Types for Habitica integration.""" + +from enum import StrEnum + +from homeassistant.config_entries import ConfigEntry + +from .coordinator import HabiticaDataUpdateCoordinator + +type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator] + + +class HabiticaTaskType(StrEnum): + """Habitica Entities.""" + + HABIT = "habit" + DAILY = "daily" + TODO = "todo" + REWARD = "reward" diff --git a/homeassistant/components/habitica/util.py b/homeassistant/components/habitica/util.py index b3241aa5787..b2b4430c490 100644 --- a/homeassistant/components/habitica/util.py +++ b/homeassistant/components/habitica/util.py @@ -3,7 +3,23 @@ from __future__ import annotations import datetime -from typing import Any +from math import floor +from typing import TYPE_CHECKING, Any + +from dateutil.rrule import ( + DAILY, + FR, + MO, + MONTHLY, + SA, + SU, + TH, + TU, + WE, + WEEKLY, + YEARLY, + rrule, +) from homeassistant.components.automation import automations_with_entity from homeassistant.components.script import scripts_with_entity @@ -14,25 +30,47 @@ from homeassistant.util import dt as dt_util def next_due_date(task: dict[str, Any], last_cron: str) -> datetime.date | None: """Calculate due date for dailies and yesterdailies.""" + if task["everyX"] == 0 or not task.get("nextDue"): # grey dailies never become due + return None + + today = to_date(last_cron) + startdate = to_date(task["startDate"]) + if TYPE_CHECKING: + assert today + assert startdate + if task["isDue"] and not task["completed"]: - return dt_util.as_local(datetime.datetime.fromisoformat(last_cron)).date() + return to_date(last_cron) + + if startdate > today: + if task["frequency"] == "daily" or ( + task["frequency"] in ("monthly", "yearly") and task["daysOfMonth"] + ): + return startdate + + if ( + task["frequency"] in ("weekly", "monthly") + and (nextdue := to_date(task["nextDue"][0])) + and startdate > nextdue + ): + return to_date(task["nextDue"][1]) + + return to_date(task["nextDue"][0]) + + +def to_date(date: str) -> datetime.date | None: + """Convert an iso date to a datetime.date object.""" try: - return dt_util.as_local( - datetime.datetime.fromisoformat(task["nextDue"][0]) - ).date() + return dt_util.as_local(datetime.datetime.fromisoformat(date)).date() except ValueError: - # sometimes nextDue dates are in this format instead of iso: + # sometimes nextDue dates are JavaScript datetime strings instead of iso: # "Mon May 06 2024 00:00:00 GMT+0200" try: return dt_util.as_local( - datetime.datetime.strptime( - task["nextDue"][0], "%a %b %d %Y %H:%M:%S %Z%z" - ) + datetime.datetime.strptime(date, "%a %b %d %Y %H:%M:%S %Z%z") ).date() except ValueError: return None - except IndexError: - return None def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: @@ -40,3 +78,114 @@ def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: used_in = automations_with_entity(hass, entity_id) used_in += scripts_with_entity(hass, entity_id) return used_in + + +FREQUENCY_MAP = {"daily": DAILY, "weekly": WEEKLY, "monthly": MONTHLY, "yearly": YEARLY} +WEEKDAY_MAP = {"m": MO, "t": TU, "w": WE, "th": TH, "f": FR, "s": SA, "su": SU} + + +def build_rrule(task: dict[str, Any]) -> rrule: + """Build rrule string.""" + + rrule_frequency = FREQUENCY_MAP.get(task["frequency"], DAILY) + weekdays = [ + WEEKDAY_MAP[day] for day, is_active in task["repeat"].items() if is_active + ] + bymonthday = ( + task["daysOfMonth"] + if rrule_frequency == MONTHLY and task["daysOfMonth"] + else None + ) + + bysetpos = None + if rrule_frequency == MONTHLY and task["weeksOfMonth"]: + bysetpos = task["weeksOfMonth"] + weekdays = weekdays if weekdays else [MO] + + return rrule( + freq=rrule_frequency, + interval=task["everyX"], + dtstart=dt_util.start_of_local_day( + datetime.datetime.fromisoformat(task["startDate"]) + ), + byweekday=weekdays if rrule_frequency in [WEEKLY, MONTHLY] else None, + bymonthday=bymonthday, + bysetpos=bysetpos, + ) + + +def get_recurrence_rule(recurrence: rrule) -> str: + r"""Extract and return the recurrence rule portion of an RRULE. + + This function takes an RRULE representing a task's recurrence pattern, + builds the RRULE string, and extracts the recurrence rule part. + + 'DTSTART:YYYYMMDDTHHMMSS\nRRULE:FREQ=YEARLY;INTERVAL=2' + + Parameters + ---------- + recurrence : rrule + An RRULE object. + + Returns + ------- + str + The recurrence rule portion of the RRULE string, starting with 'FREQ='. + + Example + ------- + >>> rule = get_recurrence_rule(task) + >>> print(rule) + 'FREQ=YEARLY;INTERVAL=2' + + """ + return str(recurrence).split("RRULE:")[1] + + +def get_attribute_points( + user: dict[str, Any], content: dict[str, Any], attribute: str +) -> dict[str, float]: + """Get modifiers contributing to strength attribute.""" + + gear_set = { + "weapon", + "armor", + "head", + "shield", + "back", + "headAccessory", + "eyewear", + "body", + } + + equipment = sum( + stats[attribute] + for gear in gear_set + if (equipped := user["items"]["gear"]["equipped"].get(gear)) + and (stats := content["gear"]["flat"].get(equipped)) + ) + + class_bonus = sum( + stats[attribute] / 2 + for gear in gear_set + if (equipped := user["items"]["gear"]["equipped"].get(gear)) + and (stats := content["gear"]["flat"].get(equipped)) + and stats["klass"] == user["stats"]["class"] + ) + + return { + "level": min(floor(user["stats"]["lvl"] / 2), 50), + "equipment": equipment, + "class": class_bonus, + "allocated": user["stats"][attribute], + "buffs": user["stats"]["buffs"][attribute], + } + + +def get_attributes_total( + user: dict[str, Any], content: dict[str, Any], attribute: str +) -> int: + """Get total attribute points.""" + return floor( + sum(value for value in get_attribute_points(user, content, attribute).values()) + ) diff --git a/homeassistant/components/hardkernel/__init__.py b/homeassistant/components/hardkernel/__init__.py index 5d70f6cbfe0..66d2fa9d154 100644 --- a/homeassistant/components/hardkernel/__init__.py +++ b/homeassistant/components/hardkernel/__init__.py @@ -2,10 +2,11 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.hassio import is_hassio async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/hardkernel/config_flow.py b/homeassistant/components/hardkernel/config_flow.py index cf70adae55a..5fa3611aa86 100644 --- a/homeassistant/components/hardkernel/config_flow.py +++ b/homeassistant/components/hardkernel/config_flow.py @@ -18,7 +18,4 @@ class HardkernelConfigFlow(ConfigFlow, domain=DOMAIN): self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Hardkernel", data={}) diff --git a/homeassistant/components/hardkernel/manifest.json b/homeassistant/components/hardkernel/manifest.json index 2a528a5173e..aca1b207f4f 100644 --- a/homeassistant/components/hardkernel/manifest.json +++ b/homeassistant/components/hardkernel/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware"], "documentation": "https://www.home-assistant.io/integrations/hardkernel", - "integration_type": "hardware" + "integration_type": "hardware", + "single_config_entry": true } diff --git a/homeassistant/components/harman_kardon_avr/manifest.json b/homeassistant/components/harman_kardon_avr/manifest.json index c28504cf2d8..e56aeebafe4 100644 --- a/homeassistant/components/harman_kardon_avr/manifest.json +++ b/homeassistant/components/harman_kardon_avr/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/harman_kardon_avr", "iot_class": "local_polling", "loggers": ["hkavr"], + "quality_scale": "legacy", "requirements": ["hkavr==0.0.5"] } diff --git a/homeassistant/components/harmony/__init__.py b/homeassistant/components/harmony/__init__.py index 12f7d903f0d..e4b6f1c7c2c 100644 --- a/homeassistant/components/harmony/__init__.py +++ b/homeassistant/components/harmony/__init__.py @@ -8,7 +8,7 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN, HARMONY_OPTIONS_UPDATE, PLATFORMS # noqa: F401 +from .const import HARMONY_OPTIONS_UPDATE, PLATFORMS from .data import HarmonyConfigEntry, HarmonyData _LOGGER = logging.getLogger(__name__) @@ -59,7 +59,7 @@ async def _migrate_old_unique_ids( activity_id = names_to_ids.get(activity_name) if activity_id is not None: - _LOGGER.info( + _LOGGER.debug( "Migrating unique_id from [%s] to [%s]", entity_entry.unique_id, activity_id, diff --git a/homeassistant/components/harmony/config_flow.py b/homeassistant/components/harmony/config_flow.py index 87eb657a0a9..b75ad617b39 100644 --- a/homeassistant/components/harmony/config_flow.py +++ b/homeassistant/components/harmony/config_flow.py @@ -28,7 +28,6 @@ from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN, PREVIOUS_ACTIVE_ACTIVITY, UNIQUE_ID -from .data import HarmonyConfigEntry from .util import ( find_best_name_for_remote, find_unique_id_for_remote, @@ -156,7 +155,7 @@ class HarmonyConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _async_create_entry_from_valid_input( self, validated: dict[str, Any], user_input: dict[str, Any] @@ -186,10 +185,6 @@ def _options_from_user_input(user_input: dict[str, Any]) -> dict[str, Any]: class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Harmony.""" - def __init__(self, config_entry: HarmonyConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/harmony/icons.json b/homeassistant/components/harmony/icons.json index f96fd985323..b6fe0d8c42e 100644 --- a/homeassistant/components/harmony/icons.json +++ b/homeassistant/components/harmony/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "sync": "mdi:sync", - "change_channel": "mdi:remote-tv" + "sync": { + "service": "mdi:sync" + }, + "change_channel": { + "service": "mdi:remote-tv" + } } } diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index 647c2248d56..a2a9d8ff028 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -5,11 +5,13 @@ from __future__ import annotations import asyncio from contextlib import suppress from datetime import datetime +from functools import partial import logging import os import re from typing import Any, NamedTuple +from aiohasupervisor import SupervisorError import voluptuous as vol from homeassistant.auth.const import GROUP_ID_ADMIN @@ -37,7 +39,22 @@ from homeassistant.helpers import ( discovery_flow, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.deprecation import ( + DeprecatedConstant, + all_with_deprecated_constants, + check_if_deprecated_constant, + deprecated_function, + dir_with_deprecated_constants, +) from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.hassio import ( + get_supervisor_ip as _get_supervisor_ip, + is_hassio as _is_hassio, +) +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.service_info.hassio import ( + HassioServiceInfo as _HassioServiceInfo, +) from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass @@ -92,34 +109,20 @@ from .coordinator import ( get_info, # noqa: F401 get_issues_info, # noqa: F401 get_os_info, - get_store, # noqa: F401 get_supervisor_info, # noqa: F401 get_supervisor_stats, # noqa: F401 ) -from .discovery import HassioServiceInfo, async_setup_discovery_view # noqa: F401 +from .discovery import async_setup_discovery_view # noqa: F401 from .handler import ( # noqa: F401 HassIO, HassioAPIError, async_create_backup, - async_get_addon_discovery_info, - async_get_addon_info, - async_get_addon_store_info, async_get_green_settings, async_get_yellow_settings, - async_install_addon, - async_reboot_host, - async_restart_addon, - async_set_addon_options, async_set_green_settings, async_set_yellow_settings, - async_start_addon, - async_stop_addon, - async_uninstall_addon, - async_update_addon, - async_update_core, async_update_diagnostics, - async_update_os, - async_update_supervisor, + get_supervisor_client, ) from .http import HassIOView from .ingress import async_setup_ingress_view @@ -128,6 +131,14 @@ from .websocket_api import async_load_websocket_api _LOGGER = logging.getLogger(__name__) +get_supervisor_ip = deprecated_function( + "homeassistant.helpers.hassio.get_supervisor_ip", breaks_in_ha_version="2025.11" +)(_get_supervisor_ip) +_DEPRECATED_HassioServiceInfo = DeprecatedConstant( + _HassioServiceInfo, + "homeassistant.helpers.service_info.hassio.HassioServiceInfo", + "2025.11", +) STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 @@ -283,21 +294,16 @@ def hostname_from_addon_slug(addon_slug: str) -> str: @callback +@deprecated_function( + "homeassistant.helpers.hassio.is_hassio", breaks_in_ha_version="2025.11" +) @bind_hass def is_hassio(hass: HomeAssistant) -> bool: """Return true if Hass.io is loaded. Async friendly. """ - return DOMAIN in hass.config.components - - -@callback -def get_supervisor_ip() -> str | None: - """Return the supervisor ip address.""" - if "SUPERVISOR" not in os.environ: - return None - return os.environ["SUPERVISOR"].partition(":")[0] + return _is_hassio(hass) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901 @@ -318,8 +324,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: host = os.environ["SUPERVISOR"] websession = async_get_clientsession(hass) hass.data[DOMAIN] = hassio = HassIO(hass.loop, websession, host) + supervisor_client = get_supervisor_client(hass) - if not await hassio.is_connected(): + try: + await supervisor_client.supervisor.ping() + except SupervisorError: _LOGGER.warning("Not connected with the supervisor / system too busy!") store = Store[dict[str, str]](hass, STORAGE_VERSION, STORAGE_KEY) @@ -399,6 +408,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: async def async_service_handler(service: ServiceCall) -> None: """Handle service calls for Hass.io.""" + if service.service == SERVICE_ADDON_UPDATE: + async_create_issue( + hass, + DOMAIN, + "update_service_deprecated", + breaks_in_ha_version="2025.5", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="update_service_deprecated", + ) api_endpoint = MAP_SERVICE_API[service.service] data = service.data.copy() @@ -428,12 +447,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: async def update_info_data(_: datetime | None = None) -> None: """Update last available supervisor information.""" + supervisor_client = get_supervisor_client(hass) try: ( hass.data[DATA_INFO], hass.data[DATA_HOST_INFO], - hass.data[DATA_STORE], + store_info, hass.data[DATA_CORE_INFO], hass.data[DATA_SUPERVISOR_INFO], hass.data[DATA_OS_INFO], @@ -441,7 +461,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: ) = await asyncio.gather( create_eager_task(hassio.get_info()), create_eager_task(hassio.get_host_info()), - create_eager_task(hassio.get_store()), + create_eager_task(supervisor_client.store.info()), create_eager_task(hassio.get_core_info()), create_eager_task(hassio.get_supervisor_info()), create_eager_task(hassio.get_os_info()), @@ -450,6 +470,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: except HassioAPIError as err: _LOGGER.warning("Can't read Supervisor data: %s", err) + else: + hass.data[DATA_STORE] = store_info.to_dict() async_call_later( hass, @@ -463,9 +485,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: async def _async_stop(hass: HomeAssistant, restart: bool) -> None: """Stop or restart home assistant.""" if restart: - await hassio.restart_homeassistant() + await supervisor_client.homeassistant.restart() else: - await hassio.stop_homeassistant() + await supervisor_client.homeassistant.stop() # Set a custom handler for the homeassistant.restart and homeassistant.stop services async_set_stop_handler(hass, _async_stop) @@ -546,3 +568,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data.pop(ADDONS_COORDINATOR, None) return unload_ok + + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/hassio/addon_manager.py b/homeassistant/components/hassio/addon_manager.py index b3c43f16be1..db81e17e48d 100644 --- a/homeassistant/components/hassio/addon_manager.py +++ b/homeassistant/components/hassio/addon_manager.py @@ -10,23 +10,18 @@ from functools import partial, wraps import logging from typing import Any, Concatenate +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ( + AddonsOptions, + AddonState as SupervisorAddonState, + InstalledAddonComplete, + StoreAddonUpdate, +) + from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from .handler import ( - HassioAPIError, - async_create_backup, - async_get_addon_discovery_info, - async_get_addon_info, - async_get_addon_store_info, - async_install_addon, - async_restart_addon, - async_set_addon_options, - async_start_addon, - async_stop_addon, - async_uninstall_addon, - async_update_addon, -) +from .handler import HassioAPIError, async_create_backup, get_supervisor_client type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Awaitable[_R]] type _ReturnFuncType[_T, **_P, _R] = Callable[ @@ -36,10 +31,13 @@ type _ReturnFuncType[_T, **_P, _R] = Callable[ def api_error[_AddonManagerT: AddonManager, **_P, _R]( error_message: str, + *, + expected_error_type: type[HassioAPIError | SupervisorError] | None = None, ) -> Callable[ [_FuncType[_AddonManagerT, _P, _R]], _ReturnFuncType[_AddonManagerT, _P, _R] ]: """Handle HassioAPIError and raise a specific AddonError.""" + error_type = expected_error_type or (HassioAPIError, SupervisorError) def handle_hassio_api_error( func: _FuncType[_AddonManagerT, _P, _R], @@ -53,7 +51,7 @@ def api_error[_AddonManagerT: AddonManager, **_P, _R]( """Wrap an add-on manager method.""" try: return_value = await func(self, *args, **kwargs) - except HassioAPIError as err: + except error_type as err: raise AddonError( f"{error_message.format(addon_name=self.addon_name)}: {err}" ) from err @@ -111,6 +109,7 @@ class AddonManager: self._restart_task: asyncio.Task | None = None self._start_task: asyncio.Task | None = None self._update_task: asyncio.Task | None = None + self._supervisor_client = get_supervisor_client(hass) def task_in_progress(self) -> bool: """Return True if any of the add-on tasks are in progress.""" @@ -124,27 +123,39 @@ class AddonManager: ) ) - @api_error("Failed to get the {addon_name} add-on discovery info") + @api_error( + "Failed to get the {addon_name} add-on discovery info", + expected_error_type=SupervisorError, + ) async def async_get_addon_discovery_info(self) -> dict: """Return add-on discovery info.""" - discovery_info = await async_get_addon_discovery_info( - self._hass, self.addon_slug + discovery_info = next( + ( + msg + for msg in await self._supervisor_client.discovery.list() + if msg.addon == self.addon_slug + ), + None, ) if not discovery_info: raise AddonError(f"Failed to get {self.addon_name} add-on discovery info") - discovery_info_config: dict = discovery_info["config"] - return discovery_info_config + return discovery_info.config - @api_error("Failed to get the {addon_name} add-on info") + @api_error( + "Failed to get the {addon_name} add-on info", + expected_error_type=SupervisorError, + ) async def async_get_addon_info(self) -> AddonInfo: """Return and cache manager add-on info.""" - addon_store_info = await async_get_addon_store_info(self._hass, self.addon_slug) - self._logger.debug("Add-on store info: %s", addon_store_info) - if not addon_store_info["installed"]: + addon_store_info = await self._supervisor_client.store.addon_info( + self.addon_slug + ) + self._logger.debug("Add-on store info: %s", addon_store_info.to_dict()) + if not addon_store_info.installed: return AddonInfo( - available=addon_store_info["available"], + available=addon_store_info.available, hostname=None, options={}, state=AddonState.NOT_INSTALLED, @@ -152,23 +163,23 @@ class AddonManager: version=None, ) - addon_info = await async_get_addon_info(self._hass, self.addon_slug) + addon_info = await self._supervisor_client.addons.addon_info(self.addon_slug) addon_state = self.async_get_addon_state(addon_info) return AddonInfo( - available=addon_info["available"], - hostname=addon_info["hostname"], - options=addon_info["options"], + available=addon_info.available, + hostname=addon_info.hostname, + options=addon_info.options, state=addon_state, - update_available=addon_info["update_available"], - version=addon_info["version"], + update_available=addon_info.update_available, + version=addon_info.version, ) @callback - def async_get_addon_state(self, addon_info: dict[str, Any]) -> AddonState: + def async_get_addon_state(self, addon_info: InstalledAddonComplete) -> AddonState: """Return the current state of the managed add-on.""" addon_state = AddonState.NOT_RUNNING - if addon_info["state"] == "started": + if addon_info.state == SupervisorAddonState.STARTED: addon_state = AddonState.RUNNING if self._install_task and not self._install_task.done(): addon_state = AddonState.INSTALLING @@ -177,31 +188,39 @@ class AddonManager: return addon_state - @api_error("Failed to set the {addon_name} add-on options") + @api_error( + "Failed to set the {addon_name} add-on options", + expected_error_type=SupervisorError, + ) async def async_set_addon_options(self, config: dict) -> None: """Set manager add-on options.""" - options = {"options": config} - await async_set_addon_options(self._hass, self.addon_slug, options) + await self._supervisor_client.addons.set_addon_options( + self.addon_slug, AddonsOptions(config=config) + ) def _check_addon_available(self, addon_info: AddonInfo) -> None: """Check if the managed add-on is available.""" - if not addon_info.available: raise AddonError(f"{self.addon_name} add-on is not available") - @api_error("Failed to install the {addon_name} add-on") + @api_error( + "Failed to install the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_install_addon(self) -> None: """Install the managed add-on.""" addon_info = await self.async_get_addon_info() self._check_addon_available(addon_info) - await async_install_addon(self._hass, self.addon_slug) + await self._supervisor_client.store.install_addon(self.addon_slug) - @api_error("Failed to uninstall the {addon_name} add-on") + @api_error( + "Failed to uninstall the {addon_name} add-on", + expected_error_type=SupervisorError, + ) async def async_uninstall_addon(self) -> None: """Uninstall the managed add-on.""" - await async_uninstall_addon(self._hass, self.addon_slug) + await self._supervisor_client.addons.uninstall_addon(self.addon_slug) @api_error("Failed to update the {addon_name} add-on") async def async_update_addon(self) -> None: @@ -217,22 +236,30 @@ class AddonManager: return await self.async_create_backup() - await async_update_addon(self._hass, self.addon_slug) + await self._supervisor_client.store.update_addon( + self.addon_slug, StoreAddonUpdate(backup=False) + ) - @api_error("Failed to start the {addon_name} add-on") + @api_error( + "Failed to start the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_start_addon(self) -> None: """Start the managed add-on.""" - await async_start_addon(self._hass, self.addon_slug) + await self._supervisor_client.addons.start_addon(self.addon_slug) - @api_error("Failed to restart the {addon_name} add-on") + @api_error( + "Failed to restart the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_restart_addon(self) -> None: """Restart the managed add-on.""" - await async_restart_addon(self._hass, self.addon_slug) + await self._supervisor_client.addons.restart_addon(self.addon_slug) - @api_error("Failed to stop the {addon_name} add-on") + @api_error( + "Failed to stop the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_stop_addon(self) -> None: """Stop the managed add-on.""" - await async_stop_addon(self._hass, self.addon_slug) + await self._supervisor_client.addons.stop_addon(self.addon_slug) @api_error("Failed to create a backup of the {addon_name} add-on") async def async_create_backup(self) -> None: diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py new file mode 100644 index 00000000000..4bc6dff44d2 --- /dev/null +++ b/homeassistant/components/hassio/backup.py @@ -0,0 +1,436 @@ +"""Backup functionality for supervised installations.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +import logging +from pathlib import Path +from typing import Any, cast + +from aiohasupervisor.exceptions import ( + SupervisorBadRequestError, + SupervisorNotFoundError, +) +from aiohasupervisor.models import ( + backups as supervisor_backups, + mounts as supervisor_mounts, +) + +from homeassistant.components.backup import ( + DATA_MANAGER, + AddonInfo, + AgentBackup, + BackupAgent, + BackupReaderWriter, + CreateBackupEvent, + Folder, + NewBackup, + WrittenBackup, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.dispatcher import async_dispatcher_connect + +from .const import DOMAIN, EVENT_SUPERVISOR_EVENT +from .handler import get_supervisor_client + +LOCATION_CLOUD_BACKUP = ".cloud_backup" +MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount") +_LOGGER = logging.getLogger(__name__) + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the hassio backup agents.""" + client = get_supervisor_client(hass) + mounts = await client.mounts.info() + agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)] + for mount in mounts.mounts: + if mount.usage is not supervisor_mounts.MountUsage.BACKUP: + continue + agents.append(SupervisorBackupAgent(hass, mount.name, mount.name)) + return agents + + +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + + @callback + def unsub() -> None: + """Unsubscribe from job events.""" + unsub_signal() + + @callback + def handle_signal(data: Mapping[str, Any]) -> None: + """Handle a job signal.""" + if ( + data.get("event") != "job" + or not (event_data := data.get("data")) + or event_data.get("name") not in MOUNT_JOBS + or event_data.get("done") is not True + ): + return + _LOGGER.debug("Mount added or removed %s, calling listener", data) + listener() + + unsub_signal = async_dispatcher_connect(hass, EVENT_SUPERVISOR_EVENT, handle_signal) + return unsub + + +def _backup_details_to_agent_backup( + details: supervisor_backups.BackupComplete, +) -> AgentBackup: + """Convert a supervisor backup details object to an agent backup.""" + homeassistant_included = details.homeassistant is not None + if not homeassistant_included: + database_included = False + else: + database_included = details.homeassistant_exclude_database is False + addons = [ + AddonInfo(name=addon.name, slug=addon.slug, version=addon.version) + for addon in details.addons + ] + return AgentBackup( + addons=addons, + backup_id=details.slug, + database_included=database_included, + date=details.date.isoformat(), + folders=[Folder(folder) for folder in details.folders], + homeassistant_included=homeassistant_included, + homeassistant_version=details.homeassistant, + name=details.name, + protected=details.protected, + size=details.size_bytes, + ) + + +class SupervisorBackupAgent(BackupAgent): + """Backup agent for supervised installations.""" + + domain = DOMAIN + + def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None: + """Initialize the backup agent.""" + super().__init__() + self._hass = hass + self._backup_dir = Path("/backups") + self._client = get_supervisor_client(hass) + self.name = name + self.location = location + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return await self._client.backups.download_backup( + backup_id, + options=supervisor_backups.DownloadBackupOptions(location=self.location), + ) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + Not required for supervisor, the SupervisorBackupReaderWriter stores files. + """ + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + backup_list = await self._client.backups.list() + result = [] + for backup in backup_list: + if not backup.locations or self.location not in backup.locations: + continue + details = await self._client.backups.backup_info(backup.slug) + result.append(_backup_details_to_agent_backup(details)) + return result + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + details = await self._client.backups.backup_info(backup_id) + if self.location not in details.locations: + return None + return _backup_details_to_agent_backup(details) + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Remove a backup.""" + try: + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={self.location} + ), + ) + except SupervisorBadRequestError as err: + if err.args[0] != "Backup does not exist": + raise + _LOGGER.debug("Backup %s does not exist", backup_id) + except SupervisorNotFoundError: + _LOGGER.debug("Backup %s does not exist", backup_id) + + +class SupervisorBackupReaderWriter(BackupReaderWriter): + """Class for reading and writing backups in supervised installations.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup reader/writer.""" + self._hass = hass + self._client = get_supervisor_client(hass) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[CreateBackupEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Create a backup.""" + manager = self._hass.data[DATA_MANAGER] + + include_addons_set: supervisor_backups.AddonSet | set[str] | None = None + if include_all_addons: + include_addons_set = supervisor_backups.AddonSet.ALL + elif include_addons: + include_addons_set = set(include_addons) + include_folders_set = ( + {supervisor_backups.Folder(folder) for folder in include_folders} + if include_folders + else None + ) + + hassio_agents: list[SupervisorBackupAgent] = [ + cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + for agent_id in agent_ids + if manager.backup_agents[agent_id].domain == DOMAIN + ] + locations = [agent.location for agent in hassio_agents] + + backup = await self._client.backups.partial_backup( + supervisor_backups.PartialBackupOptions( + addons=include_addons_set, + folders=include_folders_set, + homeassistant=include_homeassistant, + name=backup_name, + password=password, + compressed=True, + location=locations or LOCATION_CLOUD_BACKUP, + homeassistant_exclude_database=not include_database, + background=True, + ) + ) + backup_task = self._hass.async_create_task( + self._async_wait_for_backup( + backup, remove_after_upload=not bool(locations) + ), + name="backup_manager_create_backup", + eager_start=False, # To ensure the task is not started before we return + ) + + return (NewBackup(backup_job_id=backup.job_id), backup_task) + + async def _async_wait_for_backup( + self, backup: supervisor_backups.NewBackup, *, remove_after_upload: bool + ) -> WrittenBackup: + """Wait for a backup to complete.""" + backup_complete = asyncio.Event() + backup_id: str | None = None + + @callback + def on_progress(data: Mapping[str, Any]) -> None: + """Handle backup progress.""" + nonlocal backup_id + if data.get("done") is True: + backup_id = data.get("reference") + backup_complete.set() + + try: + unsub = self._async_listen_job_events(backup.job_id, on_progress) + await backup_complete.wait() + finally: + unsub() + if not backup_id: + raise HomeAssistantError("Backup failed") + + async def open_backup() -> AsyncIterator[bytes]: + return await self._client.backups.download_backup(backup_id) + + async def remove_backup() -> None: + if not remove_after_upload: + return + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) + + details = await self._client.backups.backup_info(backup_id) + + return WrittenBackup( + backup=_backup_details_to_agent_backup(details), + open_stream=open_backup, + release_stream=remove_backup, + ) + + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + manager = self._hass.data[DATA_MANAGER] + + hassio_agents: list[SupervisorBackupAgent] = [ + cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + for agent_id in agent_ids + if manager.backup_agents[agent_id].domain == DOMAIN + ] + locations = {agent.location for agent in hassio_agents} + + backup_id = await self._client.backups.upload_backup( + stream, + supervisor_backups.UploadBackupOptions( + location=locations or {LOCATION_CLOUD_BACKUP} + ), + ) + + async def open_backup() -> AsyncIterator[bytes]: + return await self._client.backups.download_backup(backup_id) + + async def remove_backup() -> None: + if locations: + return + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) + + details = await self._client.backups.backup_info(backup_id) + + return WrittenBackup( + backup=_backup_details_to_agent_backup(details), + open_stream=open_backup, + release_stream=remove_backup, + ) + + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup.""" + if restore_homeassistant and not restore_database: + raise HomeAssistantError("Cannot restore Home Assistant without database") + if not restore_homeassistant and restore_database: + raise HomeAssistantError("Cannot restore database without Home Assistant") + restore_addons_set = set(restore_addons) if restore_addons else None + restore_folders_set = ( + {supervisor_backups.Folder(folder) for folder in restore_folders} + if restore_folders + else None + ) + + manager = self._hass.data[DATA_MANAGER] + restore_location: str | None + if manager.backup_agents[agent_id].domain != DOMAIN: + # Download the backup to the supervisor. Supervisor will clean up the backup + # two days after the restore is done. + await self.async_receive_backup( + agent_ids=[], + stream=await open_stream(), + suggested_filename=f"{backup_id}.tar", + ) + restore_location = LOCATION_CLOUD_BACKUP + else: + agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + restore_location = agent.location + + job = await self._client.backups.partial_restore( + backup_id, + supervisor_backups.PartialRestoreOptions( + addons=restore_addons_set, + folders=restore_folders_set, + homeassistant=restore_homeassistant, + password=password, + background=True, + location=restore_location, + ), + ) + + restore_complete = asyncio.Event() + + @callback + def on_progress(data: Mapping[str, Any]) -> None: + """Handle backup progress.""" + if data.get("done") is True: + restore_complete.set() + + try: + unsub = self._async_listen_job_events(job.job_id, on_progress) + await restore_complete.wait() + finally: + unsub() + + @callback + def _async_listen_job_events( + self, job_id: str, on_event: Callable[[Mapping[str, Any]], None] + ) -> Callable[[], None]: + """Listen for job events.""" + + @callback + def unsub() -> None: + """Unsubscribe from job events.""" + unsub_signal() + + @callback + def handle_signal(data: Mapping[str, Any]) -> None: + """Handle a job signal.""" + if ( + data.get("event") != "job" + or not (event_data := data.get("data")) + or event_data.get("uuid") != job_id + ): + return + on_event(event_data) + + unsub_signal = async_dispatcher_connect( + self._hass, EVENT_SUPERVISOR_EVENT, handle_signal + ) + return unsub diff --git a/homeassistant/components/hassio/config_flow.py b/homeassistant/components/hassio/config_flow.py index 57be400acc7..e8bed912fd7 100644 --- a/homeassistant/components/hassio/config_flow.py +++ b/homeassistant/components/hassio/config_flow.py @@ -18,7 +18,4 @@ class HassIoConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - # We only need one Hass.io config entry - await self.async_set_unique_id(DOMAIN) - self._abort_if_unique_id_configured() return self.async_create_entry(title="Supervisor", data={}) diff --git a/homeassistant/components/hassio/const.py b/homeassistant/components/hassio/const.py index 6e6c9006fca..82ce74832c2 100644 --- a/homeassistant/components/hassio/const.py +++ b/homeassistant/components/hassio/const.py @@ -103,6 +103,7 @@ PLACEHOLDER_KEY_ADDON_URL = "addon_url" PLACEHOLDER_KEY_REFERENCE = "reference" PLACEHOLDER_KEY_COMPONENTS = "components" +ISSUE_KEY_ADDON_BOOT_FAIL = "issue_addon_boot_fail" ISSUE_KEY_SYSTEM_DOCKER_CONFIG = "issue_system_docker_config" ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING = "issue_addon_detached_addon_missing" ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED = "issue_addon_detached_addon_removed" @@ -136,17 +137,3 @@ class SupervisorEntityModel(StrEnum): CORE = "Home Assistant Core" SUPERVIOSR = "Home Assistant Supervisor" HOST = "Home Assistant Host" - - -class SupervisorIssueContext(StrEnum): - """Context for supervisor issues.""" - - ADDON = "addon" - CORE = "core" - DNS_SERVER = "dns_server" - MOUNT = "mount" - OS = "os" - PLUGIN = "plugin" - SUPERVISOR = "supervisor" - STORE = "store" - SYSTEM = "system" diff --git a/homeassistant/components/hassio/coordinator.py b/homeassistant/components/hassio/coordinator.py index 024128f4ef8..cb1dda8aeed 100644 --- a/homeassistant/components/hassio/coordinator.py +++ b/homeassistant/components/hassio/coordinator.py @@ -7,6 +7,9 @@ from collections import defaultdict import logging from typing import TYPE_CHECKING, Any +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import StoreInfo + from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback @@ -53,7 +56,7 @@ from .const import ( SUPERVISOR_CONTAINER, SupervisorEntityModel, ) -from .handler import HassIO, HassioAPIError +from .handler import HassIO, HassioAPIError, get_supervisor_client if TYPE_CHECKING: from .issues import SupervisorIssues @@ -315,6 +318,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict( lambda: defaultdict(set) ) + self.supervisor_client = get_supervisor_client(hass) async def _async_update_data(self) -> dict[str, Any]: """Update data via library.""" @@ -330,12 +334,15 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): addons_info = get_addons_info(self.hass) or {} addons_stats = get_addons_stats(self.hass) addons_changelogs = get_addons_changelogs(self.hass) - store_data = get_store(self.hass) or {} + store_data = get_store(self.hass) - repositories = { - repo[ATTR_SLUG]: repo[ATTR_NAME] - for repo in store_data.get("repositories", []) - } + if store_data: + repositories = { + repo.slug: repo.name + for repo in StoreInfo.from_dict(store_data).repositories + } + else: + repositories = {} new_data[DATA_KEY_ADDONS] = { addon[ATTR_SLUG]: { @@ -496,17 +503,17 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]: """Update single addon stats.""" try: - stats = await self.hassio.get_addon_stats(slug) - except HassioAPIError as err: + stats = await self.supervisor_client.addons.addon_stats(slug) + except SupervisorError as err: _LOGGER.warning("Could not fetch stats for %s: %s", slug, err) return (slug, None) - return (slug, stats) + return (slug, stats.to_dict()) async def _update_addon_changelog(self, slug: str) -> tuple[str, str | None]: """Return the changelog for an add-on.""" try: - changelog = await self.hassio.get_addon_changelog(slug) - except HassioAPIError as err: + changelog = await self.supervisor_client.store.addon_changelog(slug) + except SupervisorError as err: _LOGGER.warning("Could not fetch changelog for %s: %s", slug, err) return (slug, None) return (slug, changelog) @@ -514,11 +521,15 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]: """Return the info for an add-on.""" try: - info = await self.hassio.get_addon_info(slug) - except HassioAPIError as err: + info = await self.supervisor_client.addons.addon_info(slug) + except SupervisorError as err: _LOGGER.warning("Could not fetch info for %s: %s", slug, err) return (slug, None) - return (slug, info) + # Translate to legacy hassio names for compatibility + info_dict = info.to_dict() + info_dict["hassio_api"] = info_dict.pop("supervisor_api") + info_dict["hassio_role"] = info_dict.pop("supervisor_role") + return (slug, info_dict) @callback def async_enable_container_updates( @@ -552,8 +563,8 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): # updates if this is not a scheduled refresh and # we are not doing the first refresh. try: - await self.hassio.refresh_updates() - except HassioAPIError as err: + await self.supervisor_client.refresh_updates() + except SupervisorError as err: _LOGGER.warning("Error on Supervisor API: %s", err) await super()._async_refresh( diff --git a/homeassistant/components/hassio/discovery.py b/homeassistant/components/hassio/discovery.py index 66be8267d53..b51b8e5a8f2 100644 --- a/homeassistant/components/hassio/discovery.py +++ b/homeassistant/components/hassio/discovery.py @@ -3,54 +3,48 @@ from __future__ import annotations import asyncio -from dataclasses import dataclass import logging from typing import Any +from uuid import UUID +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import Discovery from aiohttp import web from aiohttp.web_exceptions import HTTPServiceUnavailable from homeassistant import config_entries from homeassistant.components.http import HomeAssistantView -from homeassistant.const import ATTR_NAME, ATTR_SERVICE, EVENT_HOMEASSISTANT_START +from homeassistant.const import ATTR_SERVICE, EVENT_HOMEASSISTANT_START from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.data_entry_flow import BaseServiceInfo from homeassistant.helpers import discovery_flow +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.service_info.hassio import HassioServiceInfo -from .const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_UUID -from .handler import HassIO, HassioAPIError +from .const import ATTR_ADDON, ATTR_UUID, DOMAIN +from .handler import HassIO, get_supervisor_client _LOGGER = logging.getLogger(__name__) -@dataclass(slots=True) -class HassioServiceInfo(BaseServiceInfo): - """Prepared info from hassio entries.""" - - config: dict[str, Any] - name: str - slug: str - uuid: str - - @callback def async_setup_discovery_view(hass: HomeAssistant, hassio: HassIO) -> None: """Discovery setup.""" hassio_discovery = HassIODiscovery(hass, hassio) + supervisor_client = get_supervisor_client(hass) hass.http.register_view(hassio_discovery) # Handle exists discovery messages async def _async_discovery_start_handler(event: Event) -> None: """Process all exists discovery on startup.""" try: - data = await hassio.retrieve_discovery_messages() - except HassioAPIError as err: + data = await supervisor_client.discovery.list() + except SupervisorError as err: _LOGGER.error("Can't read discover info: %s", err) return jobs = [ asyncio.create_task(hassio_discovery.async_process_new(discovery)) - for discovery in data[ATTR_DISCOVERY] + for discovery in data ] if jobs: await asyncio.wait(jobs) @@ -59,6 +53,23 @@ def async_setup_discovery_view(hass: HomeAssistant, hassio: HassIO) -> None: EVENT_HOMEASSISTANT_START, _async_discovery_start_handler ) + async def _handle_config_entry_removed( + entry: config_entries.ConfigEntry, + ) -> None: + """Handle config entry changes.""" + for disc_key in entry.discovery_keys[DOMAIN]: + if disc_key.version != 1 or not isinstance(key := disc_key.key, str): + continue + uuid = key + _LOGGER.debug("Rediscover addon %s", uuid) + await hassio_discovery.async_rediscover(uuid) + + async_dispatcher_connect( + hass, + config_entries.signal_discovered_config_entry_removed(DOMAIN), + _handle_config_entry_removed, + ) + class HassIODiscovery(HomeAssistantView): """Hass.io view to handle base part.""" @@ -70,13 +81,14 @@ class HassIODiscovery(HomeAssistantView): """Initialize WebView.""" self.hass = hass self.hassio = hassio + self._supervisor_client = get_supervisor_client(hass) async def post(self, request: web.Request, uuid: str) -> web.Response: """Handle new discovery requests.""" # Fetch discovery data and prevent injections try: - data = await self.hassio.get_discovery_message(uuid) - except HassioAPIError as err: + data = await self._supervisor_client.discovery.get(UUID(uuid)) + except SupervisorError as err: _LOGGER.error("Can't read discovery data: %s", err) raise HTTPServiceUnavailable from None @@ -90,40 +102,53 @@ class HassIODiscovery(HomeAssistantView): await self.async_process_del(data) return web.Response() - async def async_process_new(self, data: dict[str, Any]) -> None: - """Process add discovery entry.""" - service: str = data[ATTR_SERVICE] - config_data: dict[str, Any] = data[ATTR_CONFIG] - slug: str = data[ATTR_ADDON] - uuid: str = data[ATTR_UUID] + async def async_rediscover(self, uuid: str) -> None: + """Rediscover add-on when config entry is removed.""" + try: + data = await self._supervisor_client.discovery.get(UUID(uuid)) + except SupervisorError as err: + _LOGGER.debug("Can't read discovery data: %s", err) + else: + await self.async_process_new(data) + async def async_process_new(self, data: Discovery) -> None: + """Process add discovery entry.""" # Read additional Add-on info try: - addon_info = await self.hassio.get_addon_info(slug) - except HassioAPIError as err: + addon_info = await self._supervisor_client.addons.addon_info(data.addon) + except SupervisorError as err: _LOGGER.error("Can't read add-on info: %s", err) return - name: str = addon_info[ATTR_NAME] - config_data[ATTR_ADDON] = name + data.config[ATTR_ADDON] = addon_info.name # Use config flow discovery_flow.async_create_flow( self.hass, - service, + data.service, context={"source": config_entries.SOURCE_HASSIO}, - data=HassioServiceInfo(config=config_data, name=name, slug=slug, uuid=uuid), + data=HassioServiceInfo( + config=data.config, + name=addon_info.name, + slug=data.addon, + uuid=data.uuid.hex, + ), + discovery_key=discovery_flow.DiscoveryKey( + domain=DOMAIN, + key=data.uuid.hex, + version=1, + ), ) async def async_process_del(self, data: dict[str, Any]) -> None: """Process remove discovery entry.""" - service = data[ATTR_SERVICE] - uuid = data[ATTR_UUID] + service: str = data[ATTR_SERVICE] + uuid: str = data[ATTR_UUID] # Check if really deletet / prevent injections try: - data = await self.hassio.get_discovery_message(uuid) - except HassioAPIError: + await self._supervisor_client.discovery.get(UUID(uuid)) + except SupervisorError: pass else: _LOGGER.warning("Retrieve wrong unload for %s", service) diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index 305b9d4961b..254c392462c 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -9,6 +9,7 @@ import logging import os from typing import Any +from aiohasupervisor import SupervisorClient import aiohttp from yarl import URL @@ -20,12 +21,15 @@ from homeassistant.components.http import ( ) from homeassistant.const import SERVER_PORT from homeassistant.core import HomeAssistant +from homeassistant.helpers.singleton import singleton from homeassistant.loader import bind_hass -from .const import ATTR_DISCOVERY, ATTR_MESSAGE, ATTR_RESULT, DOMAIN, X_HASS_SOURCE +from .const import ATTR_MESSAGE, ATTR_RESULT, DOMAIN, X_HASS_SOURCE _LOGGER = logging.getLogger(__name__) +KEY_SUPERVISOR_CLIENT = "supervisor_client" + class HassioAPIError(RuntimeError): """Return if a API trow a error.""" @@ -62,28 +66,6 @@ def api_data[**_P]( return _wrapper -@bind_hass -async def async_get_addon_info(hass: HomeAssistant, slug: str) -> dict: - """Return add-on info. - - The add-on must be installed. - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - return await hassio.get_addon_info(slug) - - -@api_data -async def async_get_addon_store_info(hass: HomeAssistant, slug: str) -> dict: - """Return add-on store info. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/store/addons/{slug}" - return await hassio.send_command(command, method="get") - - @bind_hass async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> bool: """Update Supervisor diagnostics toggle. @@ -94,109 +76,6 @@ async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> bo return await hassio.update_diagnostics(diagnostics) -@bind_hass -@api_data -async def async_install_addon(hass: HomeAssistant, slug: str) -> dict: - """Install add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/install" - return await hassio.send_command(command, timeout=None) - - -@bind_hass -@api_data -async def async_uninstall_addon(hass: HomeAssistant, slug: str) -> dict: - """Uninstall add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/uninstall" - return await hassio.send_command(command, timeout=60) - - -@bind_hass -@api_data -async def async_update_addon( - hass: HomeAssistant, - slug: str, - backup: bool = False, -) -> dict: - """Update add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/update" - return await hassio.send_command( - command, - payload={"backup": backup}, - timeout=None, - ) - - -@bind_hass -@api_data -async def async_start_addon(hass: HomeAssistant, slug: str) -> dict: - """Start add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/start" - return await hassio.send_command(command, timeout=60) - - -@bind_hass -@api_data -async def async_restart_addon(hass: HomeAssistant, slug: str) -> dict: - """Restart add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/restart" - return await hassio.send_command(command, timeout=None) - - -@bind_hass -@api_data -async def async_stop_addon(hass: HomeAssistant, slug: str) -> dict: - """Stop add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/stop" - return await hassio.send_command(command, timeout=60) - - -@bind_hass -@api_data -async def async_set_addon_options( - hass: HomeAssistant, slug: str, options: dict -) -> dict: - """Set add-on options. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/options" - return await hassio.send_command(command, payload=options) - - -@bind_hass -async def async_get_addon_discovery_info(hass: HomeAssistant, slug: str) -> dict | None: - """Return discovery data for an add-on.""" - hassio: HassIO = hass.data[DOMAIN] - data = await hassio.retrieve_discovery_messages() - discovered_addons = data[ATTR_DISCOVERY] - return next((addon for addon in discovered_addons if addon["addon"] == slug), None) - - @bind_hass @api_data async def async_create_backup( @@ -212,61 +91,6 @@ async def async_create_backup( return await hassio.send_command(command, payload=payload, timeout=None) -@bind_hass -@api_data -async def async_update_os(hass: HomeAssistant, version: str | None = None) -> dict: - """Update Home Assistant Operating System. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = "/os/update" - return await hassio.send_command( - command, - payload={"version": version}, - timeout=None, - ) - - -@bind_hass -@api_data -async def async_update_supervisor(hass: HomeAssistant) -> dict: - """Update Home Assistant Supervisor. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = "/supervisor/update" - return await hassio.send_command(command, timeout=None) - - -@bind_hass -@api_data -async def async_update_core( - hass: HomeAssistant, version: str | None = None, backup: bool = False -) -> dict: - """Update Home Assistant Core. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = "/core/update" - return await hassio.send_command( - command, - payload={"version": version, "backup": backup}, - timeout=None, - ) - - -@bind_hass -@_api_bool -async def async_apply_suggestion(hass: HomeAssistant, suggestion_uuid: str) -> dict: - """Apply a suggestion from supervisor's resolution center.""" - hassio: HassIO = hass.data[DOMAIN] - command = f"/resolution/suggestion/{suggestion_uuid}" - return await hassio.send_command(command, timeout=None) - - @api_data async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]: """Return settings specific to Home Assistant Green.""" @@ -309,16 +133,6 @@ async def async_set_yellow_settings( ) -@api_data -async def async_reboot_host(hass: HomeAssistant) -> dict: - """Reboot the host. - - Returns an empty dict. - """ - hassio: HassIO = hass.data[DOMAIN] - return await hassio.send_command("/host/reboot", method="post", timeout=60) - - class HassIO: """Small API wrapper for Hass.io.""" @@ -332,15 +146,13 @@ class HassIO: self.loop = loop self.websession = websession self._ip = ip - self._base_url = URL(f"http://{ip}") + base_url = f"http://{ip}" + self._base_url = URL(base_url) - @_api_bool - def is_connected(self) -> Coroutine: - """Return true if it connected to Hass.io supervisor. - - This method returns a coroutine. - """ - return self.send_command("/supervisor/ping", method="get", timeout=15) + @property + def base_url(self) -> URL: + """Return base url for Supervisor.""" + return self._base_url @api_data def get_info(self) -> Coroutine: @@ -390,14 +202,6 @@ class HassIO: """ return self.send_command("/network/info", method="get") - @api_data - def get_addon_info(self, addon: str) -> Coroutine: - """Return data for a Add-on. - - This method returns a coroutine. - """ - return self.send_command(f"/addons/{addon}/info", method="get") - @api_data def get_core_stats(self) -> Coroutine: """Return stats for the core. @@ -406,14 +210,6 @@ class HassIO: """ return self.send_command("/core/stats", method="get") - @api_data - def get_addon_stats(self, addon: str) -> Coroutine: - """Return stats for an Add-on. - - This method returns a coroutine. - """ - return self.send_command(f"/addons/{addon}/stats", method="get") - @api_data def get_supervisor_stats(self) -> Coroutine: """Return stats for the supervisor. @@ -422,23 +218,6 @@ class HassIO: """ return self.send_command("/supervisor/stats", method="get") - def get_addon_changelog(self, addon: str) -> Coroutine: - """Return changelog for an Add-on. - - This method returns a coroutine. - """ - return self.send_command( - f"/addons/{addon}/changelog", method="get", return_text=True - ) - - @api_data - def get_store(self) -> Coroutine: - """Return data from the store. - - This method returns a coroutine. - """ - return self.send_command("/store", method="get") - @api_data def get_ingress_panels(self) -> Coroutine: """Return data for Add-on ingress panels. @@ -447,66 +226,6 @@ class HassIO: """ return self.send_command("/ingress/panels", method="get") - @_api_bool - def restart_homeassistant(self) -> Coroutine: - """Restart Home-Assistant container. - - This method returns a coroutine. - """ - return self.send_command("/homeassistant/restart") - - @_api_bool - def stop_homeassistant(self) -> Coroutine: - """Stop Home-Assistant container. - - This method returns a coroutine. - """ - return self.send_command("/homeassistant/stop") - - @_api_bool - def refresh_updates(self) -> Coroutine: - """Refresh available updates. - - This method returns a coroutine. - """ - return self.send_command("/refresh_updates", timeout=300) - - @api_data - def retrieve_discovery_messages(self) -> Coroutine: - """Return all discovery data from Hass.io API. - - This method returns a coroutine. - """ - return self.send_command("/discovery", method="get", timeout=60) - - @api_data - def get_discovery_message(self, uuid: str) -> Coroutine: - """Return a single discovery data message. - - This method returns a coroutine. - """ - return self.send_command(f"/discovery/{uuid}", method="get") - - @api_data - def get_resolution_info(self) -> Coroutine: - """Return data for Supervisor resolution center. - - This method returns a coroutine. - """ - return self.send_command("/resolution/info", method="get") - - @api_data - def get_suggestions_for_issue( - self, issue_id: str - ) -> Coroutine[Any, Any, dict[str, Any]]: - """Return suggestions for issue from Supervisor resolution center. - - This method returns a coroutine. - """ - return self.send_command( - f"/resolution/issue/{issue_id}/suggestions", method="get" - ) - @_api_bool async def update_hass_api( self, http_config: dict[str, Any], refresh_token: RefreshToken @@ -546,14 +265,6 @@ class HassIO: "/supervisor/options", payload={"diagnostics": diagnostics} ) - @_api_bool - def apply_suggestion(self, suggestion_uuid: str) -> Coroutine: - """Apply a suggestion from supervisor's resolution center. - - This method returns a coroutine. - """ - return self.send_command(f"/resolution/suggestion/{suggestion_uuid}") - async def send_command( self, command: str, @@ -568,14 +279,13 @@ class HassIO: This method is a coroutine. """ - url = f"http://{self._ip}{command}" - joined_url = self._base_url.join(URL(command)) + joined_url = self._base_url.with_path(command) # This check is to make sure the normalized URL string # is the same as the URL string that was passed in. If # they are different, then the passed in command URL # contained characters that were removed by the normalization # such as ../../../../etc/passwd - if url != str(joined_url): + if joined_url.raw_path != command: _LOGGER.error("Invalid request %s", command) raise HassioAPIError @@ -618,3 +328,14 @@ class HassIO: _LOGGER.error("Client error on %s request %s", command, err) raise HassioAPIError + + +@singleton(KEY_SUPERVISOR_CLIENT) +def get_supervisor_client(hass: HomeAssistant) -> SupervisorClient: + """Return supervisor client.""" + hassio: HassIO = hass.data[DOMAIN] + return SupervisorClient( + str(hassio.base_url), + os.environ.get("SUPERVISOR_TOKEN", ""), + session=hassio.websession, + ) diff --git a/homeassistant/components/hassio/http.py b/homeassistant/components/hassio/http.py index 8c1fb11973e..2b34a48149b 100644 --- a/homeassistant/components/hassio/http.py +++ b/homeassistant/components/hassio/http.py @@ -18,6 +18,7 @@ from aiohttp.hdrs import ( CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, + RANGE, TRANSFER_ENCODING, ) from aiohttp.web_exceptions import HTTPBadGateway @@ -41,6 +42,15 @@ NO_TIMEOUT = re.compile( r"|backups/.+/full" r"|backups/.+/partial" r"|backups/[^/]+/(?:upload|download)" + r"|audio/logs/(follow|boots/-?\d+(/follow)?)" + r"|cli/logs/(follow|boots/-?\d+(/follow)?)" + r"|core/logs/(follow|boots/-?\d+(/follow)?)" + r"|dns/logs/(follow|boots/-?\d+(/follow)?)" + r"|host/logs/(follow|boots/-?\d+(/follow)?)" + r"|multicast/logs/(follow|boots/-?\d+(/follow)?)" + r"|observer/logs/(follow|boots/-?\d+(/follow)?)" + r"|supervisor/logs/(follow|boots/-?\d+(/follow)?)" + r"|addons/[^/]+/logs/(follow|boots/-?\d+(/follow)?)" r")$" ) @@ -58,15 +68,16 @@ PATHS_ADMIN = re.compile( r"^(?:" r"|backups/[a-f0-9]{8}(/info|/download|/restore/full|/restore/partial)?" r"|backups/new/upload" - r"|audio/logs" - r"|cli/logs" - r"|core/logs" - r"|dns/logs" - r"|host/logs" - r"|multicast/logs" - r"|observer/logs" - r"|supervisor/logs" - r"|addons/[^/]+/(changelog|documentation|logs)" + r"|audio/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|cli/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|core/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|dns/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|host/logs(/follow|/boots(/-?\d+(/follow)?)?)?" + r"|multicast/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|observer/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|supervisor/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|addons/[^/]+/(changelog|documentation)" + r"|addons/[^/]+/logs(/follow|/boots/-?\d+(/follow)?)?" r")$" ) @@ -83,8 +94,38 @@ NO_STORE = re.compile( r"|app/entrypoint.js" r")$" ) + +# Follow logs should not be compressed, to be able to get streamed by frontend +NO_COMPRESS = re.compile( + r"^(?:" + r"|audio/logs/(follow|boots/-?\d+(/follow)?)" + r"|cli/logs/(follow|boots/-?\d+(/follow)?)" + r"|core/logs/(follow|boots/-?\d+(/follow)?)" + r"|dns/logs/(follow|boots/-?\d+(/follow)?)" + r"|host/logs/(follow|boots/-?\d+(/follow)?)" + r"|multicast/logs/(follow|boots/-?\d+(/follow)?)" + r"|observer/logs/(follow|boots/-?\d+(/follow)?)" + r"|supervisor/logs/(follow|boots/-?\d+(/follow)?)" + r"|addons/[^/]+/logs/(follow|boots/-?\d+(/follow)?)" + r")$" +) + +PATHS_LOGS = re.compile( + r"^(?:" + r"|audio/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|cli/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|core/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|dns/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|host/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|multicast/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|observer/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|supervisor/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|addons/[^/]+/logs(/follow|/boots/-?\d+(/follow)?)?" + r")$" +) # fmt: on + RESPONSE_HEADERS_FILTER = { TRANSFER_ENCODING, CONTENT_LENGTH, @@ -161,6 +202,10 @@ class HassIOView(HomeAssistantView): assert isinstance(request._stored_content_type, str) # noqa: SLF001 headers[CONTENT_TYPE] = request._stored_content_type # noqa: SLF001 + # forward range headers for logs + if PATHS_LOGS.match(path) and request.headers.get(RANGE): + headers[RANGE] = request.headers[RANGE] + try: client = await self._websession.request( method=request.method, @@ -177,7 +222,7 @@ class HassIOView(HomeAssistantView): ) response.content_type = client.content_type - if should_compress(response.content_type): + if should_compress(response.content_type, path): response.enable_compression() await response.prepare(request) # In testing iter_chunked, iter_any, and iter_chunks: @@ -217,8 +262,10 @@ def _get_timeout(path: str) -> ClientTimeout: return ClientTimeout(connect=10, total=300) -def should_compress(content_type: str) -> bool: +def should_compress(content_type: str, path: str | None = None) -> bool: """Return if we should compress a response.""" + if path is not None and NO_COMPRESS.match(path): + return False if content_type.startswith("image/"): return "svg" in content_type if content_type.startswith("application/"): diff --git a/homeassistant/components/hassio/icons.json b/homeassistant/components/hassio/icons.json index c55820b58f2..64f032d9f80 100644 --- a/homeassistant/components/hassio/icons.json +++ b/homeassistant/components/hassio/icons.json @@ -10,16 +10,38 @@ } }, "services": { - "addon_start": "mdi:play", - "addon_restart": "mdi:restart", - "addon_stdin": "mdi:console", - "addon_stop": "mdi:stop", - "addon_update": "mdi:update", - "host_reboot": "mdi:restart", - "host_shutdown": "mdi:power", - "backup_full": "mdi:content-save", - "backup_partial": "mdi:content-save", - "restore_full": "mdi:backup-restore", - "restore_partial": "mdi:backup-restore" + "addon_start": { + "service": "mdi:play" + }, + "addon_restart": { + "service": "mdi:restart" + }, + "addon_stdin": { + "service": "mdi:console" + }, + "addon_stop": { + "service": "mdi:stop" + }, + "addon_update": { + "service": "mdi:update" + }, + "host_reboot": { + "service": "mdi:restart" + }, + "host_shutdown": { + "service": "mdi:power" + }, + "backup_full": { + "service": "mdi:content-save" + }, + "backup_partial": { + "service": "mdi:content-save" + }, + "restore_full": { + "service": "mdi:backup-restore" + }, + "restore_partial": { + "service": "mdi:backup-restore" + } } } diff --git a/homeassistant/components/hassio/issues.py b/homeassistant/components/hassio/issues.py index 9c2152489d6..16697659077 100644 --- a/homeassistant/components/hassio/issues.py +++ b/homeassistant/components/hassio/issues.py @@ -7,6 +7,10 @@ from dataclasses import dataclass, field from datetime import datetime import logging from typing import Any, NotRequired, TypedDict +from uuid import UUID + +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ContextType, Issue as SupervisorIssue from homeassistant.core import HassJob, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -20,12 +24,8 @@ from homeassistant.helpers.issue_registry import ( from .const import ( ATTR_DATA, ATTR_HEALTHY, - ATTR_ISSUES, - ATTR_SUGGESTIONS, ATTR_SUPPORTED, - ATTR_UNHEALTHY, ATTR_UNHEALTHY_REASONS, - ATTR_UNSUPPORTED, ATTR_UNSUPPORTED_REASONS, ATTR_UPDATE_KEY, ATTR_WS_EVENT, @@ -36,6 +36,7 @@ from .const import ( EVENT_SUPERVISOR_EVENT, EVENT_SUPERVISOR_UPDATE, EVENT_SUPPORTED_CHANGED, + ISSUE_KEY_ADDON_BOOT_FAIL, ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_SYSTEM_DOCKER_CONFIG, @@ -44,10 +45,9 @@ from .const import ( PLACEHOLDER_KEY_REFERENCE, REQUEST_REFRESH_DELAY, UPDATE_KEY_SUPERVISOR, - SupervisorIssueContext, ) from .coordinator import get_addons_info -from .handler import HassIO, HassioAPIError +from .handler import HassIO, get_supervisor_client ISSUE_KEY_UNHEALTHY = "unhealthy" ISSUE_KEY_UNSUPPORTED = "unsupported" @@ -94,6 +94,7 @@ UNHEALTHY_REASONS = { # Keys (type + context) of issues that when found should be made into a repair ISSUE_KEYS_FOR_REPAIRS = { + ISSUE_KEY_ADDON_BOOT_FAIL, "issue_mount_mount_failed", "issue_system_multiple_data_disks", "issue_system_reboot_required", @@ -118,9 +119,9 @@ class SuggestionDataType(TypedDict): class Suggestion: """Suggestion from Supervisor which resolves an issue.""" - uuid: str + uuid: UUID type: str - context: SupervisorIssueContext + context: ContextType reference: str | None = None @property @@ -132,9 +133,9 @@ class Suggestion: def from_dict(cls, data: SuggestionDataType) -> Suggestion: """Convert from dictionary representation.""" return cls( - uuid=data["uuid"], + uuid=UUID(data["uuid"]), type=data["type"], - context=SupervisorIssueContext(data["context"]), + context=ContextType(data["context"]), reference=data["reference"], ) @@ -153,9 +154,9 @@ class IssueDataType(TypedDict): class Issue: """Issue from Supervisor.""" - uuid: str + uuid: UUID type: str - context: SupervisorIssueContext + context: ContextType reference: str | None = None suggestions: list[Suggestion] = field(default_factory=list, compare=False) @@ -169,9 +170,9 @@ class Issue: """Convert from dictionary representation.""" suggestions: list[SuggestionDataType] = data.get("suggestions", []) return cls( - uuid=data["uuid"], + uuid=UUID(data["uuid"]), type=data["type"], - context=SupervisorIssueContext(data["context"]), + context=ContextType(data["context"]), reference=data["reference"], suggestions=[ Suggestion.from_dict(suggestion) for suggestion in suggestions @@ -188,7 +189,8 @@ class SupervisorIssues: self._client = client self._unsupported_reasons: set[str] = set() self._unhealthy_reasons: set[str] = set() - self._issues: dict[str, Issue] = {} + self._issues: dict[UUID, Issue] = {} + self._supervisor_client = get_supervisor_client(hass) @property def unhealthy_reasons(self) -> set[str]: @@ -281,7 +283,7 @@ class SupervisorIssues: async_create_issue( self._hass, DOMAIN, - issue.uuid, + issue.uuid.hex, is_fixable=bool(issue.suggestions), severity=IssueSeverity.WARNING, translation_key=issue.key, @@ -290,19 +292,37 @@ class SupervisorIssues: self._issues[issue.uuid] = issue - async def add_issue_from_data(self, data: IssueDataType) -> None: + async def add_issue_from_data(self, data: SupervisorIssue) -> None: """Add issue from data to list after getting latest suggestions.""" try: - data["suggestions"] = ( - await self._client.get_suggestions_for_issue(data["uuid"]) - )[ATTR_SUGGESTIONS] - except HassioAPIError: + suggestions = ( + await self._supervisor_client.resolution.suggestions_for_issue( + data.uuid + ) + ) + except SupervisorError: _LOGGER.error( "Could not get suggestions for supervisor issue %s, skipping it", - data["uuid"], + data.uuid.hex, ) return - self.add_issue(Issue.from_dict(data)) + self.add_issue( + Issue( + uuid=data.uuid, + type=str(data.type), + context=data.context, + reference=data.reference, + suggestions=[ + Suggestion( + uuid=suggestion.uuid, + type=str(suggestion.type), + context=suggestion.context, + reference=suggestion.reference, + ) + for suggestion in suggestions + ], + ) + ) def remove_issue(self, issue: Issue) -> None: """Remove an issue from the list. Delete a repair if necessary.""" @@ -310,13 +330,13 @@ class SupervisorIssues: return if issue.key in ISSUE_KEYS_FOR_REPAIRS: - async_delete_issue(self._hass, DOMAIN, issue.uuid) + async_delete_issue(self._hass, DOMAIN, issue.uuid.hex) del self._issues[issue.uuid] def get_issue(self, issue_id: str) -> Issue | None: """Get issue from key.""" - return self._issues.get(issue_id) + return self._issues.get(UUID(issue_id)) async def setup(self) -> None: """Create supervisor events listener.""" @@ -329,8 +349,8 @@ class SupervisorIssues: async def _update(self, _: datetime | None = None) -> None: """Update issues from Supervisor resolution center.""" try: - data = await self._client.get_resolution_info() - except HassioAPIError as err: + data = await self._supervisor_client.resolution.info() + except SupervisorError as err: _LOGGER.error("Failed to update supervisor issues: %r", err) async_call_later( self._hass, @@ -338,18 +358,16 @@ class SupervisorIssues: HassJob(self._update, cancel_on_shutdown=True), ) return - self.unhealthy_reasons = set(data[ATTR_UNHEALTHY]) - self.unsupported_reasons = set(data[ATTR_UNSUPPORTED]) + self.unhealthy_reasons = set(data.unhealthy) + self.unsupported_reasons = set(data.unsupported) # Remove any cached issues that weren't returned - for issue_id in set(self._issues.keys()) - { - issue["uuid"] for issue in data[ATTR_ISSUES] - }: + for issue_id in set(self._issues) - {issue.uuid for issue in data.issues}: self.remove_issue(self._issues[issue_id]) # Add/update any issues that came back await asyncio.gather( - *[self.add_issue_from_data(issue) for issue in data[ATTR_ISSUES]] + *[self.add_issue_from_data(issue) for issue in data.issues] ) @callback diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index b32e5ebcd53..70230701965 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -5,5 +5,7 @@ "dependencies": ["http", "repairs"], "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", - "quality_scale": "internal" + "quality_scale": "internal", + "requirements": ["aiohasupervisor==0.2.2b2"], + "single_config_entry": true } diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 082dbe38bee..0e8122c08b9 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -6,6 +6,8 @@ from collections.abc import Callable, Coroutine from types import MethodType from typing import Any +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ContextType import voluptuous as vol from homeassistant.components.repairs import RepairsFlow @@ -14,14 +16,14 @@ from homeassistant.data_entry_flow import FlowResult from . import get_addons_info, get_issues_info from .const import ( + ISSUE_KEY_ADDON_BOOT_FAIL, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_SYSTEM_DOCKER_CONFIG, PLACEHOLDER_KEY_ADDON, PLACEHOLDER_KEY_COMPONENTS, PLACEHOLDER_KEY_REFERENCE, - SupervisorIssueContext, ) -from .handler import async_apply_suggestion +from .handler import get_supervisor_client from .issues import Issue, Suggestion HELP_URLS = { @@ -50,9 +52,10 @@ class SupervisorIssueRepairFlow(RepairsFlow): _data: dict[str, Any] | None = None _issue: Issue | None = None - def __init__(self, issue_id: str) -> None: + def __init__(self, hass: HomeAssistant, issue_id: str) -> None: """Initialize repair flow.""" self._issue_id = issue_id + self._supervisor_client = get_supervisor_client(hass) super().__init__() @property @@ -123,9 +126,12 @@ class SupervisorIssueRepairFlow(RepairsFlow): if not confirmed and suggestion.key in SUGGESTION_CONFIRMATION_REQUIRED: return self._async_form_for_suggestion(suggestion) - if await async_apply_suggestion(self.hass, suggestion.uuid): - return self.async_create_entry(data={}) - return self.async_abort(reason="apply_suggestion_fail") + try: + await self._supervisor_client.resolution.apply_suggestion(suggestion.uuid) + except SupervisorError: + return self.async_abort(reason="apply_suggestion_fail") + + return self.async_create_entry(data={}) @staticmethod def _async_step( @@ -162,9 +168,9 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow): if issue.key == self.issue.key or issue.type != self.issue.type: continue - if issue.context == SupervisorIssueContext.CORE: + if issue.context == ContextType.CORE: components.insert(0, "Home Assistant") - elif issue.context == SupervisorIssueContext.ADDON: + elif issue.context == ContextType.ADDON: components.append( next( ( @@ -181,8 +187,8 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow): return placeholders -class DetachedAddonIssueRepairFlow(SupervisorIssueRepairFlow): - """Handler for detached addon issue fixing flows.""" +class AddonIssueRepairFlow(SupervisorIssueRepairFlow): + """Handler for addon issue fixing flows.""" @property def description_placeholders(self) -> dict[str, str] | None: @@ -209,8 +215,11 @@ async def async_create_fix_flow( supervisor_issues = get_issues_info(hass) issue = supervisor_issues and supervisor_issues.get_issue(issue_id) if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG: - return DockerConfigIssueRepairFlow(issue_id) - if issue and issue.key == ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: - return DetachedAddonIssueRepairFlow(issue_id) + return DockerConfigIssueRepairFlow(hass, issue_id) + if issue and issue.key in { + ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, + ISSUE_KEY_ADDON_BOOT_FAIL, + }: + return AddonIssueRepairFlow(hass, issue_id) - return SupervisorIssueRepairFlow(issue_id) + return SupervisorIssueRepairFlow(hass, issue_id) diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index 7c3aa70b559..556a5a13f95 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -17,6 +17,23 @@ } }, "issues": { + "issue_addon_boot_fail": { + "title": "Add-on failed to start at boot", + "fix_flow": { + "step": { + "fix_menu": { + "description": "Add-on {addon} is set to start at boot but failed to start. Usually this occurs when the configuration is incorrect or the same port is used in multiple add-ons. Check the configuration as well as logs for {addon} and Supervisor.\n\nUse Start to try again or Disable to turn off the start at boot option.", + "menu_options": { + "addon_execute_start": "Start", + "addon_disable_boot": "Disable" + } + } + }, + "abort": { + "apply_suggestion_fail": "Could not apply the fix. Check the Supervisor logs for more details." + } + } + }, "issue_addon_detached_addon_missing": { "title": "Missing repository for an installed add-on", "description": "Repository for add-on {addon} is missing. This means it will not get updates, and backups may not be restored correctly as the supervisor may not be able to build/download the resources required.\n\nPlease check the [add-on's documentation]({addon_url}) for installation instructions and add the repository to the store." @@ -26,7 +43,7 @@ "fix_flow": { "step": { "addon_execute_remove": { - "description": "Add-on {addon} has been removed from the repository it was installed from. This means it will not get updates, and backups may not be restored correctly as the supervisor may not be able to build/download the resources required.\n\nClicking submit will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to." + "description": "Add-on {addon} has been removed from the repository it was installed from. This means it will not get updates, and backups may not be restored correctly as the supervisor may not be able to build/download the resources required.\n\nSelecting **Submit** will uninstall this deprecated add-on. Alternatively, you can check [Home Assistant help]({help_url}) and the [community forum]({community_url}) for alternatives to migrate to." } }, "abort": { @@ -76,7 +93,7 @@ } }, "system_adopt_data_disk": { - "description": "Select submit to make `{reference}` the active data disk. The one and only.\n\nYou won't have access anymore to the current Home Assistant data (will be marked as inactive data disk). After reboot, your system will be in the state of the Home Assistant data on `{reference}`." + "description": "Select **Submit** to make `{reference}` the active data disk. The one and only.\n\nYou won't have access anymore to the current Home Assistant data (will be marked as inactive data disk). After reboot, your system will be in the state of the Home Assistant data on `{reference}`." } }, "abort": { @@ -208,6 +225,10 @@ "unsupported_virtualization_image": { "title": "Unsupported system - Incorrect OS image for virtualization", "description": "System is unsupported because the Home Assistant OS image in use is not intended for use in a virtualized environment. Use the link to learn more and how to fix this." + }, + "update_service_deprecated": { + "title": "Deprecated update add-on action", + "description": "The update add-on action has been deprecated and will be removed in 2025.5. Please use the update entity and the respective action to update the add-on instead." } }, "entity": { @@ -253,60 +274,60 @@ "fields": { "addon": { "name": "Add-on", - "description": "The add-on slug." + "description": "The add-on to start." } } }, "addon_restart": { - "name": "Restart add-on.", + "name": "Restart add-on", "description": "Restarts an add-on.", "fields": { "addon": { "name": "[%key:component::hassio::services::addon_start::fields::addon::name%]", - "description": "[%key:component::hassio::services::addon_start::fields::addon::description%]" + "description": "The add-on to restart." } } }, "addon_stdin": { - "name": "Write data to add-on stdin.", - "description": "Writes data to add-on stdin.", + "name": "Write data to add-on stdin", + "description": "Writes data to the add-on's standard input.", "fields": { "addon": { "name": "[%key:component::hassio::services::addon_start::fields::addon::name%]", - "description": "[%key:component::hassio::services::addon_start::fields::addon::description%]" + "description": "The add-on to write to." } } }, "addon_stop": { - "name": "Stop add-on.", + "name": "Stop add-on", "description": "Stops an add-on.", "fields": { "addon": { "name": "[%key:component::hassio::services::addon_start::fields::addon::name%]", - "description": "[%key:component::hassio::services::addon_start::fields::addon::description%]" + "description": "The add-on to stop." } } }, "addon_update": { - "name": "Update add-on.", + "name": "Update add-on", "description": "Updates an add-on. This action should be used with caution since add-on updates can contain breaking changes. It is highly recommended that you review release notes/change logs before updating an add-on.", "fields": { "addon": { "name": "[%key:component::hassio::services::addon_start::fields::addon::name%]", - "description": "[%key:component::hassio::services::addon_start::fields::addon::description%]" + "description": "The add-on to update." } } }, "host_reboot": { - "name": "Reboot the host system.", + "name": "Reboot the host system", "description": "Reboots the host system." }, "host_shutdown": { - "name": "Power off the host system.", + "name": "Power off the host system", "description": "Powers off the host system." }, "backup_full": { - "name": "Create a full backup.", + "name": "Create a full backup", "description": "Creates a full backup.", "fields": { "name": { @@ -332,7 +353,7 @@ } }, "backup_partial": { - "name": "Create a partial backup.", + "name": "Create a partial backup", "description": "Creates a partial backup.", "fields": { "homeassistant": { @@ -370,7 +391,7 @@ } }, "restore_full": { - "name": "Restore from full backup.", + "name": "Restore from full backup", "description": "Restores from full backup.", "fields": { "slug": { @@ -384,7 +405,7 @@ } }, "restore_partial": { - "name": "Restore from partial backup.", + "name": "Restore from partial backup", "description": "Restores from a partial backup.", "fields": { "slug": { diff --git a/homeassistant/components/hassio/update.py b/homeassistant/components/hassio/update.py index 8e7650a9225..fbb3e191f81 100644 --- a/homeassistant/components/hassio/update.py +++ b/homeassistant/components/hassio/update.py @@ -4,6 +4,12 @@ from __future__ import annotations from typing import Any +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ( + HomeAssistantUpdateOptions, + OSUpdate, + StoreAddonUpdate, +) from awesomeversion import AwesomeVersion, AwesomeVersionStrategy from homeassistant.components.update import ( @@ -34,13 +40,6 @@ from .entity import ( HassioOSEntity, HassioSupervisorEntity, ) -from .handler import ( - HassioAPIError, - async_update_addon, - async_update_core, - async_update_os, - async_update_supervisor, -) ENTITY_DESCRIPTION = UpdateEntityDescription( name="Update", @@ -165,8 +164,10 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity): ) -> None: """Install an update.""" try: - await async_update_addon(self.hass, slug=self._addon_slug, backup=backup) - except HassioAPIError as err: + await self.coordinator.supervisor_client.store.update_addon( + self._addon_slug, StoreAddonUpdate(backup=backup) + ) + except SupervisorError as err: raise HomeAssistantError(f"Error updating {self.title}: {err}") from err await self.coordinator.force_info_update_supervisor() @@ -210,8 +211,10 @@ class SupervisorOSUpdateEntity(HassioOSEntity, UpdateEntity): ) -> None: """Install an update.""" try: - await async_update_os(self.hass, version) - except HassioAPIError as err: + await self.coordinator.supervisor_client.os.update( + OSUpdate(version=version) + ) + except SupervisorError as err: raise HomeAssistantError( f"Error updating Home Assistant Operating System: {err}" ) from err @@ -256,8 +259,8 @@ class SupervisorSupervisorUpdateEntity(HassioSupervisorEntity, UpdateEntity): ) -> None: """Install an update.""" try: - await async_update_supervisor(self.hass) - except HassioAPIError as err: + await self.coordinator.supervisor_client.supervisor.update() + except SupervisorError as err: raise HomeAssistantError( f"Error updating Home Assistant Supervisor: {err}" ) from err @@ -301,8 +304,10 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity): ) -> None: """Install an update.""" try: - await async_update_core(self.hass, version=version, backup=backup) - except HassioAPIError as err: + await self.coordinator.supervisor_client.homeassistant.update( + HomeAssistantUpdateOptions(version=version, backup=backup) + ) + except SupervisorError as err: raise HomeAssistantError( - f"Error updating Home Assistant Core {err}" + f"Error updating Home Assistant Core: {err}" ) from err diff --git a/homeassistant/components/hassio/websocket_api.py b/homeassistant/components/hassio/websocket_api.py index 03ca424035c..954d9ee8a02 100644 --- a/homeassistant/components/hassio/websocket_api.py +++ b/homeassistant/components/hassio/websocket_api.py @@ -8,7 +8,7 @@ from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import Unauthorized import homeassistant.helpers.config_validation as cv diff --git a/homeassistant/components/haveibeenpwned/manifest.json b/homeassistant/components/haveibeenpwned/manifest.json index 2451871f0c8..eb9ad4c356f 100644 --- a/homeassistant/components/haveibeenpwned/manifest.json +++ b/homeassistant/components/haveibeenpwned/manifest.json @@ -3,5 +3,6 @@ "name": "HaveIBeenPwned", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/haveibeenpwned", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/hddtemp/manifest.json b/homeassistant/components/hddtemp/manifest.json index 8dd2676596c..4fe23233870 100644 --- a/homeassistant/components/hddtemp/manifest.json +++ b/homeassistant/components/hddtemp/manifest.json @@ -3,5 +3,6 @@ "name": "hddtemp", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/hddtemp", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/hdmi_cec/__init__.py b/homeassistant/components/hdmi_cec/__init__.py index 43a649ba01a..6b4a949c0fc 100644 --- a/homeassistant/components/hdmi_cec/__init__.py +++ b/homeassistant/components/hdmi_cec/__init__.py @@ -35,30 +35,15 @@ from homeassistant.const import ( from homeassistant.core import HassJob, HomeAssistant, ServiceCall, callback from homeassistant.helpers import discovery, event import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType -DOMAIN = "hdmi_cec" +from .const import DOMAIN, EVENT_HDMI_CEC_UNAVAILABLE _LOGGER = logging.getLogger(__name__) DEFAULT_DISPLAY_NAME = "HA" CONF_TYPES = "types" -ICON_UNKNOWN = "mdi:help" -ICON_AUDIO = "mdi:speaker" -ICON_PLAYER = "mdi:play" -ICON_TUNER = "mdi:radio" -ICON_RECORDER = "mdi:microphone" -ICON_TV = "mdi:television" -ICONS_BY_TYPE = { - 0: ICON_TV, - 1: ICON_RECORDER, - 3: ICON_TUNER, - 4: ICON_PLAYER, - 5: ICON_AUDIO, -} - CMD_UP = "up" CMD_DOWN = "down" CMD_MUTE = "mute" @@ -70,12 +55,7 @@ CMD_RELEASE = "release" EVENT_CEC_COMMAND_RECEIVED = "cec_command_received" EVENT_CEC_KEYPRESS_RECEIVED = "cec_keypress_received" -ATTR_PHYSICAL_ADDRESS = "physical_address" -ATTR_TYPE_ID = "type_id" -ATTR_VENDOR_NAME = "vendor_name" -ATTR_VENDOR_ID = "vendor_id" ATTR_DEVICE = "device" -ATTR_TYPE = "type" ATTR_KEY = "key" ATTR_DUR = "dur" ATTR_SRC = "src" @@ -156,7 +136,6 @@ CONFIG_SCHEMA = vol.Schema( ) WATCHDOG_INTERVAL = 120 -EVENT_HDMI_CEC_UNAVAILABLE = "hdmi_cec_unavailable" def pad_physical_address(addr): @@ -210,7 +189,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901 _LOGGER.debug("Reached _adapter_watchdog") event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job) if not adapter.initialized: - _LOGGER.info("Adapter not initialized; Trying to restart") + _LOGGER.warning("Adapter not initialized; Trying to restart") hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE) adapter.init() @@ -240,7 +219,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901 KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM) ) hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM)) - _LOGGER.info("Audio muted") + _LOGGER.debug("Audio muted") else: _LOGGER.warning("Unknown command %s", cmd) @@ -307,7 +286,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901 if not isinstance(addr, (PhysicalAddress,)): addr = PhysicalAddress(addr) hdmi_network.active_source(addr) - _LOGGER.info("Selected %s (%s)", call.data[ATTR_DEVICE], addr) + _LOGGER.debug("Selected %s (%s)", call.data[ATTR_DEVICE], addr) def _update(call: ServiceCall) -> None: """Update if device update is needed. @@ -356,85 +335,3 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901 hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) return True - - -class CecEntity(Entity): - """Representation of a HDMI CEC device entity.""" - - _attr_should_poll = False - - def __init__(self, device, logical) -> None: - """Initialize the device.""" - self._device = device - self._logical_address = logical - self.entity_id = "%s.%d" % (DOMAIN, self._logical_address) - self._set_attr_name() - self._attr_icon = ICONS_BY_TYPE.get(self._device.type, ICON_UNKNOWN) - - def _set_attr_name(self): - """Set name.""" - if ( - self._device.osd_name is not None - and self.vendor_name is not None - and self.vendor_name != "Unknown" - ): - self._attr_name = f"{self.vendor_name} {self._device.osd_name}" - elif self._device.osd_name is None: - self._attr_name = f"{self._device.type_name} {self._logical_address}" - else: - self._attr_name = f"{self._device.type_name} {self._logical_address} ({self._device.osd_name})" - - def _hdmi_cec_unavailable(self, callback_event): - self._attr_available = False - self.schedule_update_ha_state(False) - - async def async_added_to_hass(self): - """Register HDMI callbacks after initialization.""" - self._device.set_update_callback(self._update) - self.hass.bus.async_listen( - EVENT_HDMI_CEC_UNAVAILABLE, self._hdmi_cec_unavailable - ) - - def _update(self, device=None): - """Device status changed, schedule an update.""" - self._attr_available = True - self.schedule_update_ha_state(True) - - @property - def vendor_id(self): - """Return the ID of the device's vendor.""" - return self._device.vendor_id - - @property - def vendor_name(self): - """Return the name of the device's vendor.""" - return self._device.vendor - - @property - def physical_address(self): - """Return the physical address of device in HDMI network.""" - return str(self._device.physical_address) - - @property - def type(self): - """Return a string representation of the device's type.""" - return self._device.type_name - - @property - def type_id(self): - """Return the type ID of device.""" - return self._device.type - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - state_attr = {} - if self.vendor_id is not None: - state_attr[ATTR_VENDOR_ID] = self.vendor_id - state_attr[ATTR_VENDOR_NAME] = self.vendor_name - if self.type_id is not None: - state_attr[ATTR_TYPE_ID] = self.type_id - state_attr[ATTR_TYPE] = self.type - if self.physical_address is not None: - state_attr[ATTR_PHYSICAL_ADDRESS] = self.physical_address - return state_attr diff --git a/homeassistant/components/hdmi_cec/const.py b/homeassistant/components/hdmi_cec/const.py new file mode 100644 index 00000000000..beb95e95676 --- /dev/null +++ b/homeassistant/components/hdmi_cec/const.py @@ -0,0 +1,7 @@ +"""Support for HDMI CEC.""" + +DOMAIN = "hdmi_cec" + +ATTR_NEW = "new" + +EVENT_HDMI_CEC_UNAVAILABLE = "hdmi_cec_unavailable" diff --git a/homeassistant/components/hdmi_cec/entity.py b/homeassistant/components/hdmi_cec/entity.py new file mode 100644 index 00000000000..bdb796e6a36 --- /dev/null +++ b/homeassistant/components/hdmi_cec/entity.py @@ -0,0 +1,109 @@ +"""Support for HDMI CEC.""" + +from __future__ import annotations + +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, EVENT_HDMI_CEC_UNAVAILABLE + +ATTR_PHYSICAL_ADDRESS = "physical_address" +ATTR_TYPE = "type" +ATTR_TYPE_ID = "type_id" +ATTR_VENDOR_NAME = "vendor_name" +ATTR_VENDOR_ID = "vendor_id" + +ICON_UNKNOWN = "mdi:help" +ICON_AUDIO = "mdi:speaker" +ICON_PLAYER = "mdi:play" +ICON_TUNER = "mdi:radio" +ICON_RECORDER = "mdi:microphone" +ICON_TV = "mdi:television" +ICONS_BY_TYPE = { + 0: ICON_TV, + 1: ICON_RECORDER, + 3: ICON_TUNER, + 4: ICON_PLAYER, + 5: ICON_AUDIO, +} + + +class CecEntity(Entity): + """Representation of a HDMI CEC device entity.""" + + _attr_should_poll = False + + def __init__(self, device, logical) -> None: + """Initialize the device.""" + self._device = device + self._logical_address = logical + self.entity_id = f"{DOMAIN}.{self._logical_address}" + self._set_attr_name() + self._attr_icon = ICONS_BY_TYPE.get(self._device.type, ICON_UNKNOWN) + + def _set_attr_name(self): + """Set name.""" + if ( + self._device.osd_name is not None + and self.vendor_name is not None + and self.vendor_name != "Unknown" + ): + self._attr_name = f"{self.vendor_name} {self._device.osd_name}" + elif self._device.osd_name is None: + self._attr_name = f"{self._device.type_name} {self._logical_address}" + else: + self._attr_name = f"{self._device.type_name} {self._logical_address} ({self._device.osd_name})" + + def _hdmi_cec_unavailable(self, callback_event): + self._attr_available = False + self.schedule_update_ha_state(False) + + async def async_added_to_hass(self): + """Register HDMI callbacks after initialization.""" + self._device.set_update_callback(self._update) + self.hass.bus.async_listen( + EVENT_HDMI_CEC_UNAVAILABLE, self._hdmi_cec_unavailable + ) + + def _update(self, device=None): + """Device status changed, schedule an update.""" + self._attr_available = True + self.schedule_update_ha_state(True) + + @property + def vendor_id(self): + """Return the ID of the device's vendor.""" + return self._device.vendor_id + + @property + def vendor_name(self): + """Return the name of the device's vendor.""" + return self._device.vendor + + @property + def physical_address(self): + """Return the physical address of device in HDMI network.""" + return str(self._device.physical_address) + + @property + def type(self): + """Return a string representation of the device's type.""" + return self._device.type_name + + @property + def type_id(self): + """Return the type ID of device.""" + return self._device.type + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + state_attr = {} + if self.vendor_id is not None: + state_attr[ATTR_VENDOR_ID] = self.vendor_id + state_attr[ATTR_VENDOR_NAME] = self.vendor_name + if self.type_id is not None: + state_attr[ATTR_TYPE_ID] = self.type_id + state_attr[ATTR_TYPE] = self.type + if self.physical_address is not None: + state_attr[ATTR_PHYSICAL_ADDRESS] = self.physical_address + return state_attr diff --git a/homeassistant/components/hdmi_cec/icons.json b/homeassistant/components/hdmi_cec/icons.json index 0bfcb98eea2..93647a6bb12 100644 --- a/homeassistant/components/hdmi_cec/icons.json +++ b/homeassistant/components/hdmi_cec/icons.json @@ -1,10 +1,22 @@ { "services": { - "power_on": "mdi:power", - "select_device": "mdi:television", - "send_command": "mdi:console", - "standby": "mdi:power-standby", - "update": "mdi:update", - "volume": "mdi:volume-high" + "power_on": { + "service": "mdi:power" + }, + "select_device": { + "service": "mdi:television" + }, + "send_command": { + "service": "mdi:console" + }, + "standby": { + "service": "mdi:power-standby" + }, + "update": { + "service": "mdi:update" + }, + "volume": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/hdmi_cec/manifest.json b/homeassistant/components/hdmi_cec/manifest.json index fbd9e2304d9..2e37e908e16 100644 --- a/homeassistant/components/hdmi_cec/manifest.json +++ b/homeassistant/components/hdmi_cec/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/hdmi_cec", "iot_class": "local_push", "loggers": ["pycec"], + "quality_scale": "legacy", "requirements": ["pyCEC==0.5.2"] } diff --git a/homeassistant/components/hdmi_cec/media_player.py b/homeassistant/components/hdmi_cec/media_player.py index e86a1f5be70..7ad06f0c45a 100644 --- a/homeassistant/components/hdmi_cec/media_player.py +++ b/homeassistant/components/hdmi_cec/media_player.py @@ -37,7 +37,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ATTR_NEW, DOMAIN, CecEntity +from .const import ATTR_NEW, DOMAIN +from .entity import CecEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/hdmi_cec/strings.json b/homeassistant/components/hdmi_cec/strings.json index 22715907a99..d280cfc1a2b 100644 --- a/homeassistant/components/hdmi_cec/strings.json +++ b/homeassistant/components/hdmi_cec/strings.json @@ -24,11 +24,11 @@ }, "cmd": { "name": "Command", - "description": "Command itself. Could be decimal number or string with hexadeximal notation: \"0x10\"." + "description": "Command itself. Could be decimal number or string with hexadecimal notation: \"0x10\"." }, "dst": { "name": "Destination", - "description": "Destination for command. Could be decimal number or string with hexadeximal notation: \"0x10\"." + "description": "Destination for command. Could be decimal number or string with hexadecimal notation: \"0x10\"." }, "raw": { "name": "Raw", @@ -36,7 +36,7 @@ }, "src": { "name": "Source", - "description": "Source of command. Could be decimal number or string with hexadeximal notation: \"0x10\"." + "description": "Source of command. Could be decimal number or string with hexadecimal notation: \"0x10\"." } } }, diff --git a/homeassistant/components/hdmi_cec/switch.py b/homeassistant/components/hdmi_cec/switch.py index 280ea20413b..d1bb603a938 100644 --- a/homeassistant/components/hdmi_cec/switch.py +++ b/homeassistant/components/hdmi_cec/switch.py @@ -12,7 +12,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ATTR_NEW, DOMAIN, CecEntity +from .const import ATTR_NEW, DOMAIN +from .entity import CecEntity _LOGGER = logging.getLogger(__name__) @@ -27,7 +28,7 @@ def setup_platform( ) -> None: """Find and return HDMI devices as switches.""" if discovery_info and ATTR_NEW in discovery_info: - _LOGGER.info("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) + _LOGGER.debug("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data[DOMAIN][device] diff --git a/homeassistant/components/heatmiser/climate.py b/homeassistant/components/heatmiser/climate.py index f9f0cfacf60..de66315a467 100644 --- a/homeassistant/components/heatmiser/climate.py +++ b/homeassistant/components/heatmiser/climate.py @@ -1,11 +1,11 @@ -"""Support for the PRT Heatmiser themostats using the V3 protocol.""" +"""Support for the PRT Heatmiser thermostats using the V3 protocol.""" from __future__ import annotations import logging from typing import Any -from heatmiserV3 import connection, heatmiser +from heatmiserv3 import connection, heatmiser import voluptuous as vol from homeassistant.components.climate import ( @@ -82,7 +82,6 @@ class HeatmiserV3Thermostat(ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, therm, device, uh1): """Initialize the thermostat.""" diff --git a/homeassistant/components/heatmiser/manifest.json b/homeassistant/components/heatmiser/manifest.json index 7ae9cac1297..c7ffeb237ed 100644 --- a/homeassistant/components/heatmiser/manifest.json +++ b/homeassistant/components/heatmiser/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/heatmiser", "iot_class": "local_polling", "loggers": ["heatmiserV3"], - "requirements": ["heatmiserV3==1.1.18"] + "quality_scale": "legacy", + "requirements": ["heatmiserV3==2.0.3"] } diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index 1573ff3f23e..e6a46f5a4ca 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -3,34 +3,27 @@ from __future__ import annotations import asyncio +from dataclasses import dataclass from datetime import timedelta import logging -from pyheos import Heos, HeosError, const as heos_const -import voluptuous as vol +from pyheos import Heos, HeosError, HeosPlayer, const as heos_const -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.typing import ConfigType from homeassistant.util import Throttle from . import services -from .config_flow import format_title from .const import ( COMMAND_RETRY_ATTEMPTS, COMMAND_RETRY_DELAY, - DATA_CONTROLLER_MANAGER, - DATA_ENTITY_ID_MAP, - DATA_GROUP_MANAGER, - DATA_SOURCE_MANAGER, DOMAIN, SIGNAL_HEOS_PLAYER_ADDED, SIGNAL_HEOS_UPDATED, @@ -38,44 +31,25 @@ from .const import ( PLATFORMS = [Platform.MEDIA_PLAYER] -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - {DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, - ), - extra=vol.ALLOW_EXTRA, -) - MIN_UPDATE_SOURCES = timedelta(seconds=1) _LOGGER = logging.getLogger(__name__) -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the HEOS component.""" - if DOMAIN not in config: - return True - host = config[DOMAIN][CONF_HOST] - entries = hass.config_entries.async_entries(DOMAIN) - if not entries: - # Create new entry based on config - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: host} - ) - ) - else: - # Check if host needs to be updated - entry = entries[0] - if entry.data[CONF_HOST] != host: - hass.config_entries.async_update_entry( - entry, title=format_title(host), data={**entry.data, CONF_HOST: host} - ) +@dataclass +class HeosRuntimeData: + """Runtime data and coordinators for HEOS config entries.""" - return True + controller_manager: ControllerManager + group_manager: GroupManager + source_manager: SourceManager + players: dict[int, HeosPlayer] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type HeosConfigEntry = ConfigEntry[HeosRuntimeData] + + +async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Initialize config entry which represents the HEOS controller.""" # For backwards compat if entry.unique_id is None: @@ -128,17 +102,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: source_manager = SourceManager(favorites, inputs) source_manager.connect_update(hass, controller) - group_manager = GroupManager(hass, controller) + group_manager = GroupManager(hass, controller, players) - hass.data[DOMAIN] = { - DATA_CONTROLLER_MANAGER: controller_manager, - DATA_GROUP_MANAGER: group_manager, - DATA_SOURCE_MANAGER: source_manager, - Platform.MEDIA_PLAYER: players, - # Maps player_id to entity_id. Populated by the individual - # HeosMediaPlayer entities. - DATA_ENTITY_ID_MAP: {}, - } + entry.runtime_data = HeosRuntimeData( + controller_manager, group_manager, source_manager, players + ) services.register(hass, controller) group_manager.connect_update() @@ -149,11 +117,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Unload a config entry.""" - controller_manager = hass.data[DOMAIN][DATA_CONTROLLER_MANAGER] - await controller_manager.disconnect() - hass.data.pop(DOMAIN) + await entry.runtime_data.controller_manager.disconnect() services.remove(hass) @@ -246,21 +212,25 @@ class ControllerManager: class GroupManager: """Class that manages HEOS groups.""" - def __init__(self, hass, controller): + def __init__( + self, hass: HomeAssistant, controller: Heos, players: dict[int, HeosPlayer] + ) -> None: """Init group manager.""" self._hass = hass - self._group_membership = {} + self._group_membership: dict[str, str] = {} self._disconnect_player_added = None self._initialized = False self.controller = controller + self.players = players + self.entity_id_map: dict[int, str] = {} def _get_entity_id_to_player_id_map(self) -> dict: """Return mapping of all HeosMediaPlayer entity_ids to player_ids.""" - return {v: k for k, v in self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP].items()} + return {v: k for k, v in self.entity_id_map.items()} - async def async_get_group_membership(self): + async def async_get_group_membership(self) -> dict[str, list[str]]: """Return all group members for each player as entity_ids.""" - group_info_by_entity_id = { + group_info_by_entity_id: dict[str, list[str]] = { player_entity_id: [] for player_entity_id in self._get_entity_id_to_player_id_map() } @@ -271,7 +241,7 @@ class GroupManager: _LOGGER.error("Unable to get HEOS group info: %s", err) return group_info_by_entity_id - player_id_to_entity_id_map = self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP] + player_id_to_entity_id_map = self.entity_id_map for group in groups.values(): leader_entity_id = player_id_to_entity_id_map.get(group.leader.player_id) member_entity_ids = [ @@ -282,9 +252,9 @@ class GroupManager: # Make sure the group leader is always the first element group_info = [leader_entity_id, *member_entity_ids] if leader_entity_id: - group_info_by_entity_id[leader_entity_id] = group_info + group_info_by_entity_id[leader_entity_id] = group_info # type: ignore[assignment] for member_entity_id in member_entity_ids: - group_info_by_entity_id[member_entity_id] = group_info + group_info_by_entity_id[member_entity_id] = group_info # type: ignore[assignment] return group_info_by_entity_id @@ -358,13 +328,9 @@ class GroupManager: # When adding a new HEOS player we need to update the groups. async def _async_handle_player_added(): - # Avoid calling async_update_groups when `DATA_ENTITY_ID_MAP` has not been + # Avoid calling async_update_groups when the entity_id map has not been # fully populated yet. This may only happen during early startup. - if ( - len(self._hass.data[DOMAIN][Platform.MEDIA_PLAYER]) - <= len(self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP]) - and not self._initialized - ): + if len(self.players) <= len(self.entity_id_map) and not self._initialized: self._initialized = True await self.async_update_groups(SIGNAL_HEOS_PLAYER_ADDED) diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index 968f677df23..f861247d1a9 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -10,12 +10,25 @@ from homeassistant.components import ssdp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST -from .const import DATA_DISCOVERED_HOSTS, DOMAIN +from .const import DOMAIN def format_title(host: str) -> str: """Format the title for config entries.""" - return f"Controller ({host})" + return f"HEOS System (via {host})" + + +async def _validate_host(host: str, errors: dict[str, str]) -> bool: + """Validate host is reachable, return True, otherwise populate errors and return False.""" + heos = Heos(host) + try: + await heos.connect() + except HeosError: + errors[CONF_HOST] = "cannot_connect" + return False + finally: + await heos.disconnect() + return True class HeosFlowHandler(ConfigFlow, domain=DOMAIN): @@ -34,56 +47,56 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): friendly_name = ( f"{discovery_info.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME]} ({hostname})" ) - self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) - self.hass.data[DATA_DISCOVERED_HOSTS][friendly_name] = hostname - # Abort if other flows in progress or an entry already exists - if self._async_in_progress() or self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + self.hass.data.setdefault(DOMAIN, {}) + self.hass.data[DOMAIN][friendly_name] = hostname await self.async_set_unique_id(DOMAIN) # Show selection form return self.async_show_form(step_id="user") - async def async_step_import(self, user_input=None): - """Occurs when an entry is setup through config.""" - host = user_input[CONF_HOST] - # raise_on_progress is False here in case ssdp discovers - # heos first which would block the import - await self.async_set_unique_id(DOMAIN, raise_on_progress=False) - return self.async_create_entry(title=format_title(host), data={CONF_HOST: host}) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Obtain host and validate connection.""" - self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) - # Only a single entry is needed for all devices - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + self.hass.data.setdefault(DOMAIN, {}) + await self.async_set_unique_id(DOMAIN) # Try connecting to host if provided - errors = {} + errors: dict[str, str] = {} host = None if user_input is not None: host = user_input[CONF_HOST] # Map host from friendly name if in discovered hosts - host = self.hass.data[DATA_DISCOVERED_HOSTS].get(host, host) - heos = Heos(host) - try: - await heos.connect() - self.hass.data.pop(DATA_DISCOVERED_HOSTS) - return await self.async_step_import({CONF_HOST: host}) - except HeosError: - errors[CONF_HOST] = "cannot_connect" - finally: - await heos.disconnect() + host = self.hass.data[DOMAIN].get(host, host) + if await _validate_host(host, errors): + self.hass.data.pop(DOMAIN) # Remove discovery data + return self.async_create_entry( + title=format_title(host), data={CONF_HOST: host} + ) # Return form host_type = ( - str - if not self.hass.data[DATA_DISCOVERED_HOSTS] - else vol.In(list(self.hass.data[DATA_DISCOVERED_HOSTS])) + str if not self.hass.data[DOMAIN] else vol.In(list(self.hass.data[DOMAIN])) ) return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): host_type}), errors=errors, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Allow reconfiguration of entry.""" + entry = self._get_reconfigure_entry() + host = entry.data[CONF_HOST] # Get current host value + errors: dict[str, str] = {} + if user_input is not None: + host = user_input[CONF_HOST] + if await _validate_host(host, errors): + return self.async_update_reload_and_abort( + entry, data_updates={CONF_HOST: host} + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}), + errors=errors, + ) diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 636751d150b..5b2df2b5ebf 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -4,11 +4,6 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" COMMAND_RETRY_ATTEMPTS = 2 COMMAND_RETRY_DELAY = 1 -DATA_CONTROLLER_MANAGER = "controller" -DATA_ENTITY_ID_MAP = "entity_id_map" -DATA_GROUP_MANAGER = "group_manager" -DATA_SOURCE_MANAGER = "source_manager" -DATA_DISCOVERED_HOSTS = "heos_discovered_hosts" DOMAIN = "heos" SERVICE_SIGN_IN = "sign_in" SERVICE_SIGN_OUT = "sign_out" diff --git a/homeassistant/components/heos/icons.json b/homeassistant/components/heos/icons.json index 69c434c8287..23c2c8faeaf 100644 --- a/homeassistant/components/heos/icons.json +++ b/homeassistant/components/heos/icons.json @@ -1,6 +1,10 @@ { "services": { - "sign_in": "mdi:login", - "sign_out": "mdi:logout" + "sign_in": { + "service": "mdi:login" + }, + "sign_out": { + "service": "mdi:logout" + } } } diff --git a/homeassistant/components/heos/manifest.json b/homeassistant/components/heos/manifest.json index a90f0aebaae..12f10bcd0e3 100644 --- a/homeassistant/components/heos/manifest.json +++ b/homeassistant/components/heos/manifest.json @@ -7,6 +7,7 @@ "iot_class": "local_push", "loggers": ["pyheos"], "requirements": ["pyheos==0.7.2"], + "single_config_entry": true, "ssdp": [ { "st": "urn:schemas-denon-com:device:ACT-Denon:1" diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 858ebd225b7..5255d369c2f 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -13,7 +13,6 @@ from pyheos import HeosError, const as heos_const from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, - DOMAIN, BrowseMedia, MediaPlayerEnqueue, MediaPlayerEntity, @@ -22,7 +21,6 @@ from homeassistant.components.media_player import ( MediaType, async_process_play_media_url, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import ( @@ -32,14 +30,8 @@ from homeassistant.helpers.dispatcher import ( from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow -from .const import ( - DATA_ENTITY_ID_MAP, - DATA_GROUP_MANAGER, - DATA_SOURCE_MANAGER, - DOMAIN as HEOS_DOMAIN, - SIGNAL_HEOS_PLAYER_ADDED, - SIGNAL_HEOS_UPDATED, -) +from . import GroupManager, HeosConfigEntry, SourceManager +from .const import DOMAIN as HEOS_DOMAIN, SIGNAL_HEOS_PLAYER_ADDED, SIGNAL_HEOS_UPDATED BASE_SUPPORTED_FEATURES = ( MediaPlayerEntityFeature.VOLUME_MUTE @@ -80,11 +72,16 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: HeosConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Add media players for a config entry.""" - players = hass.data[HEOS_DOMAIN][DOMAIN] - devices = [HeosMediaPlayer(player) for player in players.values()] + players = entry.runtime_data.players + devices = [ + HeosMediaPlayer( + player, entry.runtime_data.source_manager, entry.runtime_data.group_manager + ) + for player in players.values() + ] async_add_entities(devices, True) @@ -120,13 +117,15 @@ class HeosMediaPlayer(MediaPlayerEntity): _attr_has_entity_name = True _attr_name = None - def __init__(self, player): + def __init__( + self, player, source_manager: SourceManager, group_manager: GroupManager + ) -> None: """Initialize.""" self._media_position_updated_at = None self._player = player - self._signals = [] - self._source_manager = None - self._group_manager = None + self._signals: list = [] + self._source_manager = source_manager + self._group_manager = group_manager self._attr_unique_id = str(player.player_id) self._attr_device_info = DeviceInfo( identifiers={(HEOS_DOMAIN, player.player_id)}, @@ -161,9 +160,7 @@ class HeosMediaPlayer(MediaPlayerEntity): async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated) ) # Register this player's entity_id so it can be resolved by the group manager - self.hass.data[HEOS_DOMAIN][DATA_ENTITY_ID_MAP][self._player.player_id] = ( - self.entity_id - ) + self._group_manager.entity_id_map[self._player.player_id] = self.entity_id async_dispatcher_send(self.hass, SIGNAL_HEOS_PLAYER_ADDED) @log_command_error("clear playlist") @@ -294,12 +291,6 @@ class HeosMediaPlayer(MediaPlayerEntity): ior, current_support, BASE_SUPPORTED_FEATURES ) - if self._group_manager is None: - self._group_manager = self.hass.data[HEOS_DOMAIN][DATA_GROUP_MANAGER] - - if self._source_manager is None: - self._source_manager = self.hass.data[HEOS_DOMAIN][DATA_SOURCE_MANAGER] - @log_command_error("unjoin_player") async def async_unjoin_player(self) -> None: """Remove this player from any group.""" diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml new file mode 100644 index 00000000000..39c25486e52 --- /dev/null +++ b/homeassistant/components/heos/quality_scale.yaml @@ -0,0 +1,99 @@ +rules: + # Bronze + action-setup: + status: todo + comment: Future enhancement to move custom actions for login/out into an options flow. + appropriate-polling: + status: done + comment: Integration is a local push integration + brands: done + common-modules: todo + config-flow-test-coverage: done + config-flow: + status: done + comment: Consider enhnacement to automatically select a host when multiple are discovered. + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: todo + comment: | + Simplify by using async_on_remove instead of keeping track of listeners to remove + later in async_will_remove_from_hass. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: + status: todo + comment: Actions currently only log and instead should raise exceptions. + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: | + The integration doesn't provide any additional configuration parameters. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: todo + comment: | + The integration currently spams the logs until reconnected + parallel-updates: + status: todo + comment: Needs to be set to 0. The underlying library handles parallel updates. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require re-authentication. + test-coverage: + status: todo + comment: | + 1. Integration has >95% coverage, however tests need to be updated to not patch internals. + 2. test_async_setup_entry_connect_failure and test_async_setup_entry_player_failure -> Instead of + calling async_setup_entry directly, rather use hass.config_entries.async_setup and then assert + the config_entry.state is what we expect. + 3. test_unload_entry -> We should use hass.config_entries.async_unload and assert the entry state + 4. Recommend using snapshot in test_state_attributes. + 5. Find a way to avoid using internal dispatcher in test_updates_from_connection_event. + # Gold + devices: + status: todo + comment: | + The integraiton creates devices, but needs to stringify the id for the device identifier and + also migrate the device. + diagnostics: todo + discovery-update-info: + status: todo + comment: Explore if this is possible. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: + status: todo + comment: Has some troublehsooting setps, but needs to be improved + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: todo + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: + status: done + comment: The integration does not use websession + strict-typing: todo diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index df18fc7834a..fe4fc63b449 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -2,13 +2,23 @@ "config": { "step": { "user": { - "title": "Connect to Heos", - "description": "Please enter the host name or IP address of a Heos device (preferably one connected via wire to the network).", + "title": "Connect to HEOS", + "description": "Please enter the host name or IP address of a HEOS-capable product to access your HEOS System.", "data": { "host": "[%key:common::config_flow::data::host%]" }, "data_description": { - "host": "The hostname or IP address of your HEOS device." + "host": "Host name or IP address of a HEOS-capable product (preferrably one connected via wire to the network)." + } + }, + "reconfigure": { + "title": "Reconfigure HEOS", + "description": "Change the host name or IP address of the HEOS-capable product used to access your HEOS System.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::heos::config::step::user::data_description::host%]" } } }, @@ -16,13 +26,15 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "services": { "sign_in": { "name": "Sign in", - "description": "Signs the controller in to a HEOS account.", + "description": "Signs in to a HEOS account.", "fields": { "username": { "name": "[%key:common::config_flow::data::username%]", @@ -36,7 +48,7 @@ }, "sign_out": { "name": "Sign out", - "description": "Signs the controller out of the HEOS account." + "description": "Signs out of the HEOS account." } } } diff --git a/homeassistant/components/here_travel_time/config_flow.py b/homeassistant/components/here_travel_time/config_flow.py index b708fd9cd3d..c2b70de148c 100644 --- a/homeassistant/components/here_travel_time/config_flow.py +++ b/homeassistant/components/here_travel_time/config_flow.py @@ -2,8 +2,9 @@ from __future__ import annotations +from collections.abc import Mapping import logging -from typing import TYPE_CHECKING, Any +from typing import Any from here_routing import ( HERERoutingApi, @@ -16,6 +17,7 @@ from here_transit import HERETransitError import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -78,7 +80,7 @@ async def async_validate_api_key(api_key: str) -> None: ) -def get_user_step_schema(data: dict[str, Any]) -> vol.Schema: +def get_user_step_schema(data: Mapping[str, Any]) -> vol.Schema: """Get a populated schema or default.""" travel_mode = data.get(CONF_MODE, TRAVEL_MODE_CAR) if travel_mode == "publicTransportTimeTable": @@ -104,8 +106,6 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Init Config Flow.""" self._config: dict[str, Any] = {} - self._entry: ConfigEntry | None = None - self._is_reconfigure_flow: bool = False @staticmethod @callback @@ -113,7 +113,7 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> HERETravelTimeOptionsFlow: """Get the options flow.""" - return HERETravelTimeOptionsFlow(config_entry) + return HERETravelTimeOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -121,35 +121,31 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors = {} user_input = user_input or {} - if not self._is_reconfigure_flow: # Always show form first for reconfiguration - if user_input: - try: - await async_validate_api_key(user_input[CONF_API_KEY]) - except HERERoutingUnauthorizedError: - errors["base"] = "invalid_auth" - except (HERERoutingError, HERETransitError): - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - if not errors: - self._config[CONF_NAME] = user_input[CONF_NAME] - self._config[CONF_API_KEY] = user_input[CONF_API_KEY] - self._config[CONF_MODE] = user_input[CONF_MODE] - return await self.async_step_origin_menu() - self._is_reconfigure_flow = False + if user_input: + try: + await async_validate_api_key(user_input[CONF_API_KEY]) + except HERERoutingUnauthorizedError: + errors["base"] = "invalid_auth" + except (HERERoutingError, HERETransitError): + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + if not errors: + self._config[CONF_NAME] = user_input[CONF_NAME] + self._config[CONF_API_KEY] = user_input[CONF_API_KEY] + self._config[CONF_MODE] = user_input[CONF_MODE] + return await self.async_step_origin_menu() return self.async_show_form( step_id="user", data_schema=get_user_step_schema(user_input), errors=errors ) async def async_step_reconfigure( - self, _: dict[str, Any] | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" - self._is_reconfigure_flow = True - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - if TYPE_CHECKING: - assert self._entry - self._config = self._entry.data.copy() - return await self.async_step_user(self._config) + return self.async_show_form( + step_id="user", + data_schema=get_user_step_schema(self._get_reconfigure_entry().data), + ) async def async_step_origin_menu(self, _: None = None) -> ConfigFlowResult: """Show the origin menu.""" @@ -232,12 +228,11 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): ] # Remove possible previous configuration using an entity_id self._config.pop(CONF_DESTINATION_ENTITY_ID, None) - if self._entry: + if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self._entry, + self._get_reconfigure_entry(), title=self._config[CONF_NAME], data=self._config, - reason="reconfigure_successful", ) return self.async_create_entry( title=self._config[CONF_NAME], @@ -277,9 +272,9 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): # Remove possible previous configuration using coordinates self._config.pop(CONF_DESTINATION_LATITUDE, None) self._config.pop(CONF_DESTINATION_LONGITUDE, None) - if self._entry: + if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self._entry, data=self._config, reason="reconfigure_successful" + self._get_reconfigure_entry(), data=self._config ) return self.async_create_entry( title=self._config[CONF_NAME], @@ -302,9 +297,8 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): class HERETravelTimeOptionsFlow(OptionsFlow): """Handle HERE Travel Time options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize HERE Travel Time options flow.""" - self.config_entry = config_entry self._config: dict[str, Any] = {} async def async_step_init( diff --git a/homeassistant/components/hikvision/manifest.json b/homeassistant/components/hikvision/manifest.json index e37e149ccda..a0832732105 100644 --- a/homeassistant/components/hikvision/manifest.json +++ b/homeassistant/components/hikvision/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/hikvision", "iot_class": "local_push", "loggers": ["pyhik"], + "quality_scale": "legacy", "requirements": ["pyHik==0.3.2"] } diff --git a/homeassistant/components/hikvisioncam/manifest.json b/homeassistant/components/hikvisioncam/manifest.json index 28f677512b7..badb38a52d5 100644 --- a/homeassistant/components/hikvisioncam/manifest.json +++ b/homeassistant/components/hikvisioncam/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/hikvisioncam", "iot_class": "local_polling", "loggers": ["hikvision"], + "quality_scale": "legacy", "requirements": ["hikvision==0.4"] } diff --git a/homeassistant/components/hisense_aehw4a1/climate.py b/homeassistant/components/hisense_aehw4a1/climate.py index 656ba6c68c0..68f79439162 100644 --- a/homeassistant/components/hisense_aehw4a1/climate.py +++ b/homeassistant/components/hisense_aehw4a1/climate.py @@ -155,7 +155,6 @@ class ClimateAehW4a1(ClimateEntity): _attr_target_temperature_step = 1 _previous_state: HVACMode | str | None = None _on: str | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device): """Initialize the climate device.""" diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index 465416607a2..c85d975c3c9 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -13,8 +13,7 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.components.recorder import get_instance, history -from homeassistant.components.websocket_api import messages -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection, messages from homeassistant.const import ( COMPRESSED_STATE_ATTRIBUTES, COMPRESSED_STATE_LAST_CHANGED, diff --git a/homeassistant/components/history_stats/__init__.py b/homeassistant/components/history_stats/__init__.py index dcca10d73e9..63f32138dba 100644 --- a/homeassistant/components/history_stats/__init__.py +++ b/homeassistant/components/history_stats/__init__.py @@ -41,7 +41,7 @@ async def async_setup_entry( Template(end, hass) if end else None, duration, ) - coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, entry.title) + coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, entry, entry.title) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/history_stats/coordinator.py b/homeassistant/components/history_stats/coordinator.py index 0d613d2bbc0..fafbb5d3ce0 100644 --- a/homeassistant/components/history_stats/coordinator.py +++ b/homeassistant/components/history_stats/coordinator.py @@ -6,6 +6,7 @@ from datetime import timedelta import logging from typing import Any +from homeassistant.config_entries import ConfigEntry from homeassistant.core import ( CALLBACK_TYPE, Event, @@ -33,6 +34,7 @@ class HistoryStatsUpdateCoordinator(DataUpdateCoordinator[HistoryStatsState]): self, hass: HomeAssistant, history_stats: HistoryStats, + config_entry: ConfigEntry | None, name: str, ) -> None: """Initialize DataUpdateCoordinator.""" @@ -43,6 +45,7 @@ class HistoryStatsUpdateCoordinator(DataUpdateCoordinator[HistoryStatsState]): super().__init__( hass, _LOGGER, + config_entry=config_entry, name=name, update_interval=UPDATE_INTERVAL, ) diff --git a/homeassistant/components/history_stats/data.py b/homeassistant/components/history_stats/data.py index 544e1772b01..f9b79d74cb4 100644 --- a/homeassistant/components/history_stats/data.py +++ b/homeassistant/components/history_stats/data.py @@ -4,6 +4,8 @@ from __future__ import annotations from dataclasses import dataclass import datetime +import logging +import math from homeassistant.components.recorder import get_instance, history from homeassistant.core import Event, EventStateChangedData, HomeAssistant, State @@ -14,6 +16,8 @@ from .helpers import async_calculate_period, floored_timestamp MIN_TIME_UTC = datetime.datetime.min.replace(tzinfo=dt_util.UTC) +_LOGGER = logging.getLogger(__name__) + @dataclass class HistoryStatsState: @@ -176,26 +180,32 @@ class HistoryStats: # state_changes_during_period is called with include_start_time_state=True # which is the default and always provides the state at the start # of the period - previous_state_matches = ( - self._history_current_period - and self._history_current_period[0].state in self._entity_states - ) - last_state_change_timestamp = start_timestamp + previous_state_matches = False + last_state_change_timestamp = 0.0 elapsed = 0.0 - match_count = 1 if previous_state_matches else 0 + match_count = 0 # Make calculations for history_state in self._history_current_period: current_state_matches = history_state.state in self._entity_states state_change_timestamp = history_state.last_changed + if math.floor(state_change_timestamp) > now_timestamp: + # Shouldn't count states that are in the future + _LOGGER.debug( + "Skipping future timestamp %s (now %s)", + state_change_timestamp, + now_timestamp, + ) + continue + if previous_state_matches: elapsed += state_change_timestamp - last_state_change_timestamp elif current_state_matches: match_count += 1 previous_state_matches = current_state_matches - last_state_change_timestamp = state_change_timestamp + last_state_change_timestamp = max(start_timestamp, state_change_timestamp) # Count time elapsed between last history state and end of measure if previous_state_matches: diff --git a/homeassistant/components/history_stats/icons.json b/homeassistant/components/history_stats/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/history_stats/icons.json +++ b/homeassistant/components/history_stats/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/history_stats/sensor.py b/homeassistant/components/history_stats/sensor.py index 4558da8722c..e1241034aeb 100644 --- a/homeassistant/components/history_stats/sensor.py +++ b/homeassistant/components/history_stats/sensor.py @@ -104,7 +104,7 @@ async def async_setup_platform( unique_id: str | None = config.get(CONF_UNIQUE_ID) history_stats = HistoryStats(hass, entity_id, entity_states, start, end, duration) - coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, name) + coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, None, name) await coordinator.async_refresh() if not coordinator.last_update_success: raise PlatformNotReady from coordinator.last_exception diff --git a/homeassistant/components/history_stats/strings.json b/homeassistant/components/history_stats/strings.json index 603a6b8c4dc..aff2ac50bef 100644 --- a/homeassistant/components/history_stats/strings.json +++ b/homeassistant/components/history_stats/strings.json @@ -1,4 +1,5 @@ { + "title": "History Stats", "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" @@ -8,7 +9,7 @@ }, "step": { "user": { - "description": "Add a history stats sensor", + "description": "Create a history stats sensor", "data": { "name": "[%key:common::config_flow::data::name%]", "entity_id": "Entity", diff --git a/homeassistant/components/hitron_coda/device_tracker.py b/homeassistant/components/hitron_coda/device_tracker.py index 61199e4b2f7..2126f5834ce 100644 --- a/homeassistant/components/hitron_coda/device_tracker.py +++ b/homeassistant/components/hitron_coda/device_tracker.py @@ -10,7 +10,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -37,7 +37,7 @@ def get_scanner( _hass: HomeAssistant, config: ConfigType ) -> HitronCODADeviceScanner | None: """Validate the configuration and return a Hitron CODA-4582U scanner.""" - scanner = HitronCODADeviceScanner(config[DOMAIN]) + scanner = HitronCODADeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -66,7 +66,6 @@ class HitronCODADeviceScanner(DeviceScanner): self._userid = None self.success_init = self._update_info() - _LOGGER.info("Scanner initialized") def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" @@ -82,7 +81,7 @@ class HitronCODADeviceScanner(DeviceScanner): def _login(self): """Log in to the router. This is required for subsequent api calls.""" - _LOGGER.info("Logging in to CODA") + _LOGGER.debug("Logging in to CODA") try: data = [("user", self._username), (self._type, self._password)] @@ -102,7 +101,7 @@ class HitronCODADeviceScanner(DeviceScanner): def _update_info(self): """Get ARP from router.""" - _LOGGER.info("Fetching") + _LOGGER.debug("Fetching") if self._userid is None and not self._login(): _LOGGER.error("Could not obtain a user ID from the router") @@ -137,5 +136,5 @@ class HitronCODADeviceScanner(DeviceScanner): self.last_results = last_results - _LOGGER.info("Request successful") + _LOGGER.debug("Request successful") return True diff --git a/homeassistant/components/hitron_coda/manifest.json b/homeassistant/components/hitron_coda/manifest.json index 2f18707c95e..15f71b62cf3 100644 --- a/homeassistant/components/hitron_coda/manifest.json +++ b/homeassistant/components/hitron_coda/manifest.json @@ -3,5 +3,6 @@ "name": "Rogers Hitron CODA", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/hitron_coda", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/hive/__init__.py b/homeassistant/components/hive/__init__.py index 4001215d90e..ac008b857af 100644 --- a/homeassistant/components/hive/__init__.py +++ b/homeassistant/components/hive/__init__.py @@ -10,68 +10,24 @@ from typing import Any, Concatenate from aiohttp.web_exceptions import HTTPException from apyhiveapi import Auth, Hive from apyhiveapi.helper.hive_exceptions import HiveReauthRequired -import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME +from homeassistant.const import CONF_SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import aiohttp_client, config_validation as cv -from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.device_registry import DeviceEntry +from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import DOMAIN, PLATFORM_LOOKUP, PLATFORMS +from .entity import HiveEntity _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int, - }, - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Hive configuration setup.""" - hass.data[DOMAIN] = {} - - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - if not hass.config_entries.async_entries(DOMAIN): - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_USERNAME: conf[CONF_USERNAME], - CONF_PASSWORD: conf[CONF_PASSWORD], - }, - ) - ) - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Hive from a config entry.""" + hass.data.setdefault(DOMAIN, {}) web_session = aiohttp_client.async_get_clientsession(hass) hive_config = dict(entry.data) @@ -139,29 +95,3 @@ def refresh_system[_HiveEntityT: HiveEntity, **_P]( async_dispatcher_send(self.hass, DOMAIN) return wrapper - - -class HiveEntity(Entity): - """Initiate Hive Base Class.""" - - def __init__(self, hive: Hive, hive_device: dict[str, Any]) -> None: - """Initialize the instance.""" - self.hive = hive - self.device = hive_device - self._attr_name = self.device["haName"] - self._attr_unique_id = f'{self.device["hiveID"]}-{self.device["hiveType"]}' - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self.device["device_id"])}, - model=self.device["deviceData"]["model"], - manufacturer=self.device["deviceData"]["manufacturer"], - name=self.device["device_name"], - sw_version=self.device["deviceData"]["version"], - via_device=(DOMAIN, self.device["parentDevice"]), - ) - self.attributes: dict[str, Any] = {} - - async def async_added_to_hass(self) -> None: - """When entity is added to Home Assistant.""" - self.async_on_remove( - async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state) - ) diff --git a/homeassistant/components/hive/alarm_control_panel.py b/homeassistant/components/hive/alarm_control_panel.py index 06383784a3f..2b196ce820b 100644 --- a/homeassistant/components/hive/alarm_control_panel.py +++ b/homeassistant/components/hive/alarm_control_panel.py @@ -7,27 +7,22 @@ from datetime import timedelta from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HiveEntity from .const import DOMAIN +from .entity import HiveEntity PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(seconds=15) HIVETOHA = { - "home": STATE_ALARM_DISARMED, - "asleep": STATE_ALARM_ARMED_NIGHT, - "away": STATE_ALARM_ARMED_AWAY, - "sos": STATE_ALARM_TRIGGERED, + "home": AlarmControlPanelState.DISARMED, + "asleep": AlarmControlPanelState.ARMED_NIGHT, + "away": AlarmControlPanelState.ARMED_AWAY, + "sos": AlarmControlPanelState.TRIGGERED, } @@ -76,6 +71,6 @@ class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity): self._attr_available = self.device["deviceData"].get("online") if self._attr_available: if self.device["status"]["state"]: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED else: - self._attr_state = HIVETOHA[self.device["status"]["mode"]] + self._attr_alarm_state = HIVETOHA[self.device["status"]["mode"]] diff --git a/homeassistant/components/hive/binary_sensor.py b/homeassistant/components/hive/binary_sensor.py index 512b06ece6d..d14d98bcf50 100644 --- a/homeassistant/components/hive/binary_sensor.py +++ b/homeassistant/components/hive/binary_sensor.py @@ -14,8 +14,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HiveEntity from .const import DOMAIN +from .entity import HiveEntity PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(seconds=15) diff --git a/homeassistant/components/hive/climate.py b/homeassistant/components/hive/climate.py index 87d93eea95f..c76379cf940 100644 --- a/homeassistant/components/hive/climate.py +++ b/homeassistant/components/hive/climate.py @@ -21,13 +21,14 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HiveEntity, refresh_system +from . import refresh_system from .const import ( ATTR_TIME_PERIOD, DOMAIN, SERVICE_BOOST_HEATING_OFF, SERVICE_BOOST_HEATING_ON, ) +from .entity import HiveEntity HIVE_TO_HASS_STATE = { "SCHEDULE": HVACMode.AUTO, @@ -99,7 +100,6 @@ class HiveClimateEntity(HiveEntity, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hive: Hive, hive_device: dict[str, Any]) -> None: """Initialize the Climate device.""" diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index f8cb089834a..e3180dc9734 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -104,7 +104,7 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "no_internet_available" if not errors: - if self.context["source"] == SOURCE_REAUTH: + if self.source == SOURCE_REAUTH: return await self.async_setup_hive_entry() self.device_registration = True return await self.async_step_configuration() @@ -144,7 +144,7 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): # Setup the config entry self.data["tokens"] = self.tokens - if self.context["source"] == SOURCE_REAUTH: + if self.source == SOURCE_REAUTH: assert self.entry self.hass.config_entries.async_update_entry( self.entry, title=self.data["username"], data=self.data @@ -163,12 +163,6 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): } return await self.async_step_user(data) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Import user.""" - return await self.async_step_user(user_input) - @staticmethod @callback def async_get_options_flow( @@ -184,7 +178,6 @@ class HiveOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Hive options flow.""" self.hive = None - self.config_entry = config_entry self.interval = config_entry.options.get(CONF_SCAN_INTERVAL, 120) async def async_step_init( diff --git a/homeassistant/components/hive/entity.py b/homeassistant/components/hive/entity.py new file mode 100644 index 00000000000..1209e8c8f05 --- /dev/null +++ b/homeassistant/components/hive/entity.py @@ -0,0 +1,39 @@ +"""Support for the Hive devices and services.""" + +from __future__ import annotations + +from typing import Any + +from apyhiveapi import Hive + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class HiveEntity(Entity): + """Initiate Hive Base Class.""" + + def __init__(self, hive: Hive, hive_device: dict[str, Any]) -> None: + """Initialize the instance.""" + self.hive = hive + self.device = hive_device + self._attr_name = self.device["haName"] + self._attr_unique_id = f'{self.device["hiveID"]}-{self.device["hiveType"]}' + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device["device_id"])}, + model=self.device["deviceData"]["model"], + manufacturer=self.device["deviceData"]["manufacturer"], + name=self.device["device_name"], + sw_version=self.device["deviceData"]["version"], + via_device=(DOMAIN, self.device["parentDevice"]), + ) + self.attributes: dict[str, Any] = {} + + async def async_added_to_hass(self) -> None: + """When entity is added to Home Assistant.""" + self.async_on_remove( + async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state) + ) diff --git a/homeassistant/components/hive/icons.json b/homeassistant/components/hive/icons.json index 2704317779c..e4c06556906 100644 --- a/homeassistant/components/hive/icons.json +++ b/homeassistant/components/hive/icons.json @@ -18,8 +18,14 @@ } }, "services": { - "boost_heating_on": "mdi:radiator", - "boost_heating_off": "mdi:radiator-off", - "boost_hot_water": "mdi:water-boiler" + "boost_heating_on": { + "service": "mdi:radiator" + }, + "boost_heating_off": { + "service": "mdi:radiator-off" + }, + "boost_hot_water": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/hive/light.py b/homeassistant/components/hive/light.py index 1ce49599262..b510569eb47 100644 --- a/homeassistant/components/hive/light.py +++ b/homeassistant/components/hive/light.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -17,8 +17,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util -from . import HiveEntity, refresh_system +from . import refresh_system from .const import ATTR_MODE, DOMAIN +from .entity import HiveEntity if TYPE_CHECKING: from apyhiveapi import Hive @@ -42,6 +43,9 @@ async def async_setup_entry( class HiveDeviceLight(HiveEntity, LightEntity): """Hive Active Light Device.""" + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds + def __init__(self, hive: Hive, hive_device: dict[str, Any]) -> None: """Initialise hive light.""" super().__init__(hive, hive_device) @@ -55,9 +59,6 @@ class HiveDeviceLight(HiveEntity, LightEntity): self._attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS} self._attr_color_mode = ColorMode.UNKNOWN - self._attr_min_mireds = 153 - self._attr_max_mireds = 370 - @refresh_system async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" @@ -70,9 +71,8 @@ class HiveDeviceLight(HiveEntity, LightEntity): new_brightness = int(round(percentage_brightness / 5.0) * 5.0) if new_brightness == 0: new_brightness = 5 - if ATTR_COLOR_TEMP in kwargs: - tmp_new_color_temp = kwargs[ATTR_COLOR_TEMP] - new_color_temp = round(1000000 / tmp_new_color_temp) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + new_color_temp = kwargs[ATTR_COLOR_TEMP_KELVIN] if ATTR_HS_COLOR in kwargs: get_new_color = kwargs[ATTR_HS_COLOR] hue = int(get_new_color[0]) @@ -101,12 +101,22 @@ class HiveDeviceLight(HiveEntity, LightEntity): self._attr_is_on = self.device["status"]["state"] self._attr_brightness = self.device["status"]["brightness"] if self.device["hiveType"] == "tuneablelight": - self._attr_color_temp = self.device["status"].get("color_temp") + color_temp = self.device["status"].get("color_temp") + self._attr_color_temp_kelvin = ( + None + if color_temp is None + else color_util.color_temperature_mired_to_kelvin(color_temp) + ) + if self.device["hiveType"] == "colourtuneablelight": if self.device["status"]["mode"] == "COLOUR": rgb = self.device["status"]["hs_color"] self._attr_hs_color = color_util.color_RGB_to_hs(*rgb) self._attr_color_mode = ColorMode.HS else: - self._attr_color_temp = self.device["status"].get("color_temp") + self._attr_color_temp_kelvin = ( + None + if color_temp is None + else color_util.color_temperature_mired_to_kelvin(color_temp) + ) self._attr_color_mode = ColorMode.COLOR_TEMP diff --git a/homeassistant/components/hive/sensor.py b/homeassistant/components/hive/sensor.py index d51acecc9f6..00a2116e268 100644 --- a/homeassistant/components/hive/sensor.py +++ b/homeassistant/components/hive/sensor.py @@ -24,8 +24,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import HiveEntity from .const import DOMAIN +from .entity import HiveEntity PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(seconds=15) @@ -127,5 +127,5 @@ class HiveSensorEntity(HiveEntity, SensorEntity): await self.hive.session.updateData(self.device) self.device = await self.hive.sensor.getSensor(self.device) self._attr_native_value = self.entity_description.fn( - self.device["status"]["state"] + self.device.get("status", {}).get("state") ) diff --git a/homeassistant/components/hive/strings.json b/homeassistant/components/hive/strings.json index bd4e95618e4..c8062a64ade 100644 --- a/homeassistant/components/hive/strings.json +++ b/homeassistant/components/hive/strings.json @@ -21,7 +21,7 @@ "data": { "device_name": "Device Name" }, - "description": "Enter your Hive configuration ", + "description": "Enter your Hive configuration", "title": "Hive Configuration." }, "reauth": { diff --git a/homeassistant/components/hive/switch.py b/homeassistant/components/hive/switch.py index 136f03de195..1421616db57 100644 --- a/homeassistant/components/hive/switch.py +++ b/homeassistant/components/hive/switch.py @@ -13,8 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HiveEntity, refresh_system +from . import refresh_system from .const import ATTR_MODE, DOMAIN +from .entity import HiveEntity PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(seconds=15) diff --git a/homeassistant/components/hive/water_heater.py b/homeassistant/components/hive/water_heater.py index 2e582e19567..b038739d2ad 100644 --- a/homeassistant/components/hive/water_heater.py +++ b/homeassistant/components/hive/water_heater.py @@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HiveEntity, refresh_system +from . import refresh_system from .const import ( ATTR_ONOFF, ATTR_TIME_PERIOD, @@ -24,6 +24,7 @@ from .const import ( SERVICE_BOOST_HOT_WATER, WATER_HEATER_MODES, ) +from .entity import HiveEntity HOTWATER_NAME = "Hot Water" PARALLEL_UPDATES = 0 diff --git a/homeassistant/components/hlk_sw16/__init__.py b/homeassistant/components/hlk_sw16/__init__.py index 3e6a9f6b0d6..ce37be96dcd 100644 --- a/homeassistant/components/hlk_sw16/__init__.py +++ b/homeassistant/components/hlk_sw16/__init__.py @@ -9,11 +9,7 @@ from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, CONF_SWITCHES, Platform from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from .const import ( @@ -131,53 +127,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if not hass.data[DOMAIN]: hass.data.pop(DOMAIN) return unload_ok - - -class SW16Device(Entity): - """Representation of a HLK-SW16 device. - - Contains the common logic for HLK-SW16 entities. - """ - - _attr_should_poll = False - - def __init__(self, device_port, entry_id, client): - """Initialize the device.""" - # HLK-SW16 specific attributes for every component type - self._entry_id = entry_id - self._device_port = device_port - self._is_on = None - self._client = client - self._attr_name = device_port - self._attr_unique_id = f"{self._entry_id}_{self._device_port}" - - @callback - def handle_event_callback(self, event): - """Propagate changes through ha.""" - _LOGGER.debug("Relay %s new state callback: %r", self.unique_id, event) - self._is_on = event - self.async_write_ha_state() - - @property - def available(self): - """Return True if entity is available.""" - return bool(self._client.is_connected) - - @callback - def _availability_callback(self, availability): - """Update availability state.""" - self.async_write_ha_state() - - async def async_added_to_hass(self): - """Register update callback.""" - self._client.register_status_callback( - self.handle_event_callback, self._device_port - ) - self._is_on = await self._client.status(self._device_port) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"hlk_sw16_device_available_{self._entry_id}", - self._availability_callback, - ) - ) diff --git a/homeassistant/components/hlk_sw16/config_flow.py b/homeassistant/components/hlk_sw16/config_flow.py index df2f32088cf..34ee1ebd0e7 100644 --- a/homeassistant/components/hlk_sw16/config_flow.py +++ b/homeassistant/components/hlk_sw16/config_flow.py @@ -4,6 +4,7 @@ import asyncio from typing import Any from hlk_sw16 import create_hlk_sw16_connection +from hlk_sw16.protocol import SW16Client import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -27,7 +28,7 @@ DATA_SCHEMA = vol.Schema( ) -async def connect_client(hass, user_input): +async def connect_client(hass: HomeAssistant, user_input: dict[str, Any]) -> SW16Client: """Connect the HLK-SW16 client.""" client_aw = create_hlk_sw16_connection( host=user_input[CONF_HOST], @@ -41,7 +42,7 @@ async def connect_client(hass, user_input): return await client_aw -async def validate_input(hass: HomeAssistant, user_input): +async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> None: """Validate the user input allows us to connect.""" try: client = await connect_client(hass, user_input) @@ -70,9 +71,9 @@ class SW16FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/hlk_sw16/entity.py b/homeassistant/components/hlk_sw16/entity.py new file mode 100644 index 00000000000..fdef5f6764b --- /dev/null +++ b/homeassistant/components/hlk_sw16/entity.py @@ -0,0 +1,59 @@ +"""Support for HLK-SW16 relay switches.""" + +import logging + +from homeassistant.core import callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +_LOGGER = logging.getLogger(__name__) + + +class SW16Entity(Entity): + """Representation of a HLK-SW16 device. + + Contains the common logic for HLK-SW16 entities. + """ + + _attr_should_poll = False + + def __init__(self, device_port, entry_id, client): + """Initialize the device.""" + # HLK-SW16 specific attributes for every component type + self._entry_id = entry_id + self._device_port = device_port + self._is_on = None + self._client = client + self._attr_name = device_port + self._attr_unique_id = f"{self._entry_id}_{self._device_port}" + + @callback + def handle_event_callback(self, event): + """Propagate changes through ha.""" + _LOGGER.debug("Relay %s new state callback: %r", self.unique_id, event) + self._is_on = event + self.async_write_ha_state() + + @property + def available(self): + """Return True if entity is available.""" + return bool(self._client.is_connected) + + @callback + def _availability_callback(self, availability): + """Update availability state.""" + self.async_write_ha_state() + + async def async_added_to_hass(self): + """Register update callback.""" + self._client.register_status_callback( + self.handle_event_callback, self._device_port + ) + self._is_on = await self._client.status(self._device_port) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"hlk_sw16_device_available_{self._entry_id}", + self._availability_callback, + ) + ) diff --git a/homeassistant/components/hlk_sw16/switch.py b/homeassistant/components/hlk_sw16/switch.py index 590ab9c4497..3911dd6eab9 100644 --- a/homeassistant/components/hlk_sw16/switch.py +++ b/homeassistant/components/hlk_sw16/switch.py @@ -7,8 +7,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DATA_DEVICE_REGISTER, SW16Device +from . import DATA_DEVICE_REGISTER from .const import DOMAIN +from .entity import SW16Entity PARALLEL_UPDATES = 0 @@ -31,7 +32,7 @@ async def async_setup_entry( async_add_entities(devices_from_entities(hass, entry)) -class SW16Switch(SW16Device, SwitchEntity): +class SW16Switch(SW16Entity, SwitchEntity): """Representation of a HLK-SW16 switch.""" @property diff --git a/homeassistant/components/holiday/config_flow.py b/homeassistant/components/holiday/config_flow.py index a9b2f3e9772..27b13e34851 100644 --- a/homeassistant/components/holiday/config_flow.py +++ b/homeassistant/components/holiday/config_flow.py @@ -8,7 +8,7 @@ from babel import Locale, UnknownLocaleError from holidays import list_supported_countries import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_COUNTRY from homeassistant.helpers.selector import ( CountrySelector, @@ -27,7 +27,6 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Holiday.""" VERSION = 1 - config_entry: ConfigEntry | None def __init__(self) -> None: """Initialize the config flow.""" @@ -115,19 +114,9 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the re-configuration of a province.""" - self.config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the re-configuration of a province.""" - assert self.config_entry - + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - combined_input: dict[str, Any] = {**self.config_entry.data, **user_input} + combined_input: dict[str, Any] = {**reconfigure_entry.data, **user_input} country = combined_input[CONF_COUNTRY] province = combined_input.get(CONF_PROVINCE) @@ -149,10 +138,7 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): name = f"{locale.territories[country]}{province_str}" return self.async_update_reload_and_abort( - self.config_entry, - title=name, - data=combined_input, - reason="reconfigure_successful", + reconfigure_entry, title=name, data=combined_input ) province_schema = vol.Schema( @@ -160,7 +146,7 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): vol.Optional(CONF_PROVINCE): SelectSelector( SelectSelectorConfig( options=SUPPORTED_COUNTRIES[ - self.config_entry.data[CONF_COUNTRY] + reconfigure_entry.data[CONF_COUNTRY] ], mode=SelectSelectorMode.DROPDOWN, ) @@ -168,6 +154,4 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): } ) - return self.async_show_form( - step_id="reconfigure_confirm", data_schema=province_schema - ) + return self.async_show_form(step_id="reconfigure", data_schema=province_schema) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 0a3064450d4..33cae231595 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.55", "babel==2.15.0"] + "requirements": ["holidays==0.63", "babel==2.15.0"] } diff --git a/homeassistant/components/holiday/strings.json b/homeassistant/components/holiday/strings.json index de013f44d60..ae4930ecdb4 100644 --- a/homeassistant/components/holiday/strings.json +++ b/homeassistant/components/holiday/strings.json @@ -16,7 +16,7 @@ "province": "Province" } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "province": "[%key:component::holiday::config::step::province::data::province%]" } diff --git a/homeassistant/components/home_connect/__init__.py b/homeassistant/components/home_connect/__init__.py index ebfd6f91c76..818c4e6fe19 100644 --- a/homeassistant/components/home_connect/__init__.py +++ b/homeassistant/components/home_connect/__init__.py @@ -4,18 +4,22 @@ from __future__ import annotations from datetime import timedelta import logging +import re +from typing import Any, cast from requests import HTTPError import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DEVICE_ID, CONF_DEVICE, Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ATTR_DEVICE_ID, Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_entry_oauth2_flow, config_validation as cv, device_registry as dr, ) +from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries from homeassistant.helpers.typing import ConfigType from homeassistant.util import Throttle @@ -28,6 +32,7 @@ from .const import ( BSH_PAUSE, BSH_RESUME, DOMAIN, + OLD_NEW_UNIQUE_ID_SUFFIX_MAP, SERVICE_OPTION_ACTIVE, SERVICE_OPTION_SELECTED, SERVICE_PAUSE_PROGRAM, @@ -35,10 +40,17 @@ from .const import ( SERVICE_SELECT_PROGRAM, SERVICE_SETTING, SERVICE_START_PROGRAM, + SVE_TRANSLATION_PLACEHOLDER_KEY, + SVE_TRANSLATION_PLACEHOLDER_PROGRAM, + SVE_TRANSLATION_PLACEHOLDER_VALUE, ) +type HomeConnectConfigEntry = ConfigEntry[api.ConfigEntryAuth] + _LOGGER = logging.getLogger(__name__) +RE_CAMEL_CASE = re.compile(r"(? api.HomeConnectAppliance: + """Return a Home Connect appliance instance given a device id or a device entry.""" + if device_id is not None and device_entry is None: + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(device_id) + assert device_entry, "Either a device id or a device entry must be provided" + + ha_id = next( + ( + identifier[1] + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + ), + None, + ) + assert ha_id + + def find_appliance( + entry: HomeConnectConfigEntry, + ) -> api.HomeConnectAppliance | None: + for device in entry.runtime_data.devices: + appliance = device.appliance + if appliance.haId == ha_id: + return appliance + return None + + if entry is None: + for entry_id in device_entry.config_entries: + entry = hass.config_entries.async_get_entry(entry_id) + assert entry + if entry.domain == DOMAIN: + entry = cast(HomeConnectConfigEntry, entry) + if (appliance := find_appliance(entry)) is not None: + return appliance + elif (appliance := find_appliance(entry)) is not None: + return appliance + raise ValueError(f"Appliance for device id {device_entry.id} not found") + + +def _get_appliance_or_raise_service_validation_error( hass: HomeAssistant, device_id: str -) -> api.HomeConnectDevice: - """Return a Home Connect appliance instance given an device_id.""" - for hc_api in hass.data[DOMAIN].values(): - for dev_dict in hc_api.devices: - device = dev_dict[CONF_DEVICE] - if device.device_id == device_id: - return device.appliance - raise ValueError(f"Appliance for device id {device_id} not found") +) -> api.HomeConnectAppliance: + """Return a Home Connect appliance instance or raise a service validation error.""" + try: + return _get_appliance(hass, device_id) + except (ValueError, AssertionError) as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="appliance_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) from err + + +async def _run_appliance_service[*_Ts]( + hass: HomeAssistant, + appliance: api.HomeConnectAppliance, + method: str, + *args: *_Ts, + error_translation_key: str, + error_translation_placeholders: dict[str, str], +) -> None: + try: + await hass.async_add_executor_job(getattr(appliance, method), args) + except api.HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=error_translation_key, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + **error_translation_placeholders, + }, + ) from err async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Home Connect component.""" - hass.data[DOMAIN] = {} async def _async_service_program(call, method): """Execute calls to services taking a program.""" @@ -111,16 +199,31 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: option[ATTR_UNIT] = option_unit options.append(option) - - appliance = _get_appliance_by_device_id(hass, device_id) - await hass.async_add_executor_job(getattr(appliance, method), program, options) + await _run_appliance_service( + hass, + _get_appliance_or_raise_service_validation_error(hass, device_id), + method, + program, + options, + error_translation_key=method, + error_translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program, + }, + ) async def _async_service_command(call, command): """Execute calls to services executing a command.""" device_id = call.data[ATTR_DEVICE_ID] - appliance = _get_appliance_by_device_id(hass, device_id) - await hass.async_add_executor_job(appliance.execute_command, command) + appliance = _get_appliance_or_raise_service_validation_error(hass, device_id) + await _run_appliance_service( + hass, + appliance, + "execute_command", + command, + error_translation_key="execute_command", + error_translation_placeholders={"command": command}, + ) async def _async_service_key_value(call, method): """Execute calls to services taking a key and value.""" @@ -129,20 +232,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: unit = call.data.get(ATTR_UNIT) device_id = call.data[ATTR_DEVICE_ID] - appliance = _get_appliance_by_device_id(hass, device_id) - if unit is not None: - await hass.async_add_executor_job( - getattr(appliance, method), - key, - value, - unit, - ) - else: - await hass.async_add_executor_job( - getattr(appliance, method), - key, - value, - ) + await _run_appliance_service( + hass, + _get_appliance_or_raise_service_validation_error(hass, device_id), + method, + *((key, value) if unit is None else (key, value, unit)), + error_translation_key=method, + error_translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_KEY: key, + SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + }, + ) async def async_service_option_active(call): """Service for setting an option for an active program.""" @@ -215,7 +315,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: HomeConnectConfigEntry) -> bool: """Set up Home Connect from a config entry.""" implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( @@ -223,9 +323,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) ) - hc_api = api.ConfigEntryAuth(hass, entry, implementation) - - hass.data[DOMAIN][entry.entry_id] = hc_api + entry.runtime_data = api.ConfigEntryAuth(hass, entry, implementation) await update_all_devices(hass, entry) @@ -234,37 +332,75 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @Throttle(SCAN_INTERVAL) -async def update_all_devices(hass, entry): +async def update_all_devices( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> None: """Update all the devices.""" - data = hass.data[DOMAIN] - hc_api = data[entry.entry_id] + hc_api = entry.runtime_data - device_registry = dr.async_get(hass) try: await hass.async_add_executor_job(hc_api.get_devices) - for device_dict in hc_api.devices: - device = device_dict["device"] - - device_entry = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, device.appliance.haId)}, - name=device.appliance.name, - manufacturer=device.appliance.brand, - model=device.appliance.vib, - ) - - device.device_id = device_entry.id - + for device in hc_api.devices: await hass.async_add_executor_job(device.initialize) except HTTPError as err: _LOGGER.warning("Cannot update devices: %s", err.response.status_code) + + +async def async_migrate_entry( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> bool: + """Migrate old entry.""" + _LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version == 1 and entry.minor_version == 1: + + @callback + def update_unique_id( + entity_entry: RegistryEntry, + ) -> dict[str, Any] | None: + """Update unique ID of entity entry.""" + for old_id_suffix, new_id_suffix in OLD_NEW_UNIQUE_ID_SUFFIX_MAP.items(): + if entity_entry.unique_id.endswith(f"-{old_id_suffix}"): + return { + "new_unique_id": entity_entry.unique_id.replace( + old_id_suffix, new_id_suffix + ) + } + return None + + await async_migrate_entries(hass, entry.entry_id, update_unique_id) + + hass.config_entries.async_update_entry(entry, minor_version=2) + + _LOGGER.debug("Migration to version %s successful", entry.version) + return True + + +def get_dict_from_home_connect_error(err: api.HomeConnectError) -> dict[str, Any]: + """Return a dict from a Home Connect error.""" + return { + "description": cast(dict[str, Any], err.args[0]).get("description", "?") + if len(err.args) > 0 and isinstance(err.args[0], dict) + else err.args[0] + if len(err.args) > 0 and isinstance(err.args[0], str) + else "?", + } + + +def bsh_key_to_translation_key(bsh_key: str) -> str: + """Convert a BSH key to a translation key format. + + This function takes a BSH key, such as `Dishcare.Dishwasher.Program.Eco50`, + and converts it to a translation key format, such as `dishcare_dishwasher_bsh_key_eco50`. + """ + return "_".join( + RE_CAMEL_CASE.sub("_", split) for split in bsh_key.split(".") + ).lower() diff --git a/homeassistant/components/home_connect/api.py b/homeassistant/components/home_connect/api.py index 10dc2d360fa..453f926c402 100644 --- a/homeassistant/components/home_connect/api.py +++ b/homeassistant/components/home_connect/api.py @@ -2,39 +2,16 @@ from asyncio import run_coroutine_threadsafe import logging -from typing import Any import homeconnect -from homeconnect.api import HomeConnectError +from homeconnect.api import HomeConnectAppliance, HomeConnectError -from homeassistant import config_entries, core -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_ICON, - CONF_DEVICE, - CONF_ENTITIES, - PERCENTAGE, - UnitOfTime, -) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.dispatcher import dispatcher_send -from .const import ( - ATTR_AMBIENT, - ATTR_DESC, - ATTR_DEVICE, - ATTR_KEY, - ATTR_SENSOR_TYPE, - ATTR_SIGN, - ATTR_UNIT, - ATTR_VALUE, - BSH_ACTIVE_PROGRAM, - BSH_OPERATION_STATE, - BSH_POWER_OFF, - BSH_POWER_STANDBY, - SIGNAL_UPDATE_ENTITIES, -) +from .const import ATTR_KEY, ATTR_VALUE, BSH_ACTIVE_PROGRAM, SIGNAL_UPDATE_ENTITIES _LOGGER = logging.getLogger(__name__) @@ -44,8 +21,8 @@ class ConfigEntryAuth(homeconnect.HomeConnectAPI): def __init__( self, - hass: core.HomeAssistant, - config_entry: config_entries.ConfigEntry, + hass: HomeAssistant, + config_entry: ConfigEntry, implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation, ) -> None: """Initialize Home Connect Auth.""" @@ -55,7 +32,7 @@ class ConfigEntryAuth(homeconnect.HomeConnectAPI): hass, config_entry, implementation ) super().__init__(self.session.token) - self.devices: list[dict[str, Any]] = [] + self.devices: list[HomeConnectDevice] = [] def refresh_tokens(self) -> dict: """Refresh and return new Home Connect tokens using Home Assistant OAuth2 session.""" @@ -65,58 +42,22 @@ class ConfigEntryAuth(homeconnect.HomeConnectAPI): return self.session.token - def get_devices(self): + def get_devices(self) -> list[HomeConnectAppliance]: """Get a dictionary of devices.""" - appl = self.get_appliances() - devices = [] - for app in appl: - if app.type == "Dryer": - device = Dryer(self.hass, app) - elif app.type == "Washer": - device = Washer(self.hass, app) - elif app.type == "WasherDryer": - device = WasherDryer(self.hass, app) - elif app.type == "Dishwasher": - device = Dishwasher(self.hass, app) - elif app.type == "FridgeFreezer": - device = FridgeFreezer(self.hass, app) - elif app.type == "Refrigerator": - device = Refrigerator(self.hass, app) - elif app.type == "Freezer": - device = Freezer(self.hass, app) - elif app.type == "Oven": - device = Oven(self.hass, app) - elif app.type == "CoffeeMaker": - device = CoffeeMaker(self.hass, app) - elif app.type == "Hood": - device = Hood(self.hass, app) - elif app.type == "Hob": - device = Hob(self.hass, app) - elif app.type == "CookProcessor": - device = CookProcessor(self.hass, app) - else: - _LOGGER.warning("Appliance type %s not implemented", app.type) - continue - devices.append( - {CONF_DEVICE: device, CONF_ENTITIES: device.get_entity_info()} - ) - self.devices = devices - return devices + appl: list[HomeConnectAppliance] = self.get_appliances() + self.devices = [HomeConnectDevice(self.hass, app) for app in appl] + return self.devices class HomeConnectDevice: """Generic Home Connect device.""" - # for some devices, this is instead BSH_POWER_STANDBY - # see https://developer.home-connect.com/docs/settings/power_state - power_off_state = BSH_POWER_OFF - - def __init__(self, hass, appliance): + def __init__(self, hass: HomeAssistant, appliance: HomeConnectAppliance) -> None: """Initialize the device class.""" self.hass = hass self.appliance = appliance - def initialize(self): + def initialize(self) -> None: """Fetch the info needed to initialize the device.""" try: self.appliance.get_status() @@ -137,347 +78,8 @@ class HomeConnectDevice: } self.appliance.listen_events(callback=self.event_callback) - def event_callback(self, appliance): + def event_callback(self, appliance: HomeConnectAppliance) -> None: """Handle event.""" _LOGGER.debug("Update triggered on %s", appliance.name) _LOGGER.debug(self.appliance.status) dispatcher_send(self.hass, SIGNAL_UPDATE_ENTITIES, appliance.haId) - - -class DeviceWithPrograms(HomeConnectDevice): - """Device with programs.""" - - def get_programs_available(self): - """Get the available programs.""" - try: - programs_available = self.appliance.get_programs_available() - except (HomeConnectError, ValueError): - _LOGGER.debug("Unable to fetch available programs. Probably offline") - programs_available = [] - return programs_available - - def get_program_switches(self): - """Get a dictionary with info about program switches. - - There will be one switch for each program. - """ - programs = self.get_programs_available() - return [{ATTR_DEVICE: self, "program_name": p} for p in programs] - - def get_program_sensors(self): - """Get a dictionary with info about program sensors. - - There will be one of the four types of sensors for each - device. - """ - sensors = { - "Remaining Program Time": (None, None, SensorDeviceClass.TIMESTAMP, 1), - "Duration": (UnitOfTime.SECONDS, "mdi:update", None, 1), - "Program Progress": (PERCENTAGE, "mdi:progress-clock", None, 1), - } - return [ - { - ATTR_DEVICE: self, - ATTR_DESC: k, - ATTR_UNIT: unit, - ATTR_KEY: "BSH.Common.Option.{}".format(k.replace(" ", "")), - ATTR_ICON: icon, - ATTR_DEVICE_CLASS: device_class, - ATTR_SIGN: sign, - } - for k, (unit, icon, device_class, sign) in sensors.items() - ] - - -class DeviceWithOpState(HomeConnectDevice): - """Device that has an operation state sensor.""" - - def get_opstate_sensor(self): - """Get a list with info about operation state sensors.""" - - return [ - { - ATTR_DEVICE: self, - ATTR_DESC: "Operation State", - ATTR_UNIT: None, - ATTR_KEY: BSH_OPERATION_STATE, - ATTR_ICON: "mdi:state-machine", - ATTR_DEVICE_CLASS: None, - ATTR_SIGN: 1, - } - ] - - -class DeviceWithDoor(HomeConnectDevice): - """Device that has a door sensor.""" - - def get_door_entity(self): - """Get a dictionary with info about the door binary sensor.""" - return { - ATTR_DEVICE: self, - ATTR_DESC: "Door", - ATTR_SENSOR_TYPE: "door", - ATTR_DEVICE_CLASS: "door", - } - - -class DeviceWithLight(HomeConnectDevice): - """Device that has lighting.""" - - def get_light_entity(self): - """Get a dictionary with info about the lighting.""" - return {ATTR_DEVICE: self, ATTR_DESC: "Light", ATTR_AMBIENT: None} - - -class DeviceWithAmbientLight(HomeConnectDevice): - """Device that has ambient lighting.""" - - def get_ambientlight_entity(self): - """Get a dictionary with info about the ambient lighting.""" - return {ATTR_DEVICE: self, ATTR_DESC: "AmbientLight", ATTR_AMBIENT: True} - - -class DeviceWithRemoteControl(HomeConnectDevice): - """Device that has Remote Control binary sensor.""" - - def get_remote_control(self): - """Get a dictionary with info about the remote control sensor.""" - return { - ATTR_DEVICE: self, - ATTR_DESC: "Remote Control", - ATTR_SENSOR_TYPE: "remote_control", - } - - -class DeviceWithRemoteStart(HomeConnectDevice): - """Device that has a Remote Start binary sensor.""" - - def get_remote_start(self): - """Get a dictionary with info about the remote start sensor.""" - return { - ATTR_DEVICE: self, - ATTR_DESC: "Remote Start", - ATTR_SENSOR_TYPE: "remote_start", - } - - -class Dryer( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Dryer class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Dishwasher( - DeviceWithDoor, - DeviceWithAmbientLight, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Dishwasher class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Oven( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Oven class.""" - - power_off_state = BSH_POWER_STANDBY - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Washer( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Washer class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class WasherDryer( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """WasherDryer class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class CoffeeMaker(DeviceWithOpState, DeviceWithPrograms, DeviceWithRemoteStart): - """Coffee maker class.""" - - power_off_state = BSH_POWER_STANDBY - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Hood( - DeviceWithLight, - DeviceWithAmbientLight, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Hood class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - light_entity = self.get_light_entity() - ambientlight_entity = self.get_ambientlight_entity() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - "light": [light_entity, ambientlight_entity], - } - - -class FridgeFreezer(DeviceWithDoor): - """Fridge/Freezer class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - return {"binary_sensor": [door_entity]} - - -class Refrigerator(DeviceWithDoor): - """Refrigerator class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - return {"binary_sensor": [door_entity]} - - -class Freezer(DeviceWithDoor): - """Freezer class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - return {"binary_sensor": [door_entity]} - - -class Hob(DeviceWithOpState, DeviceWithPrograms, DeviceWithRemoteControl): - """Hob class.""" - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - remote_control = self.get_remote_control() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [remote_control], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class CookProcessor(DeviceWithOpState): - """CookProcessor class.""" - - power_off_state = BSH_POWER_STANDBY - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - op_state_sensor = self.get_opstate_sensor() - return {"sensor": op_state_sensor} diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index 84b02be1cc4..f9775918f16 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -1,13 +1,26 @@ """Provides a binary sensor for Home Connect.""" +from dataclasses import dataclass import logging -from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ENTITIES +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.components.script import scripts_with_entity from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) +from . import HomeConnectConfigEntry +from .api import HomeConnectDevice from .const import ( ATTR_VALUE, BSH_DOOR_STATE, @@ -17,25 +30,109 @@ from .const import ( BSH_REMOTE_CONTROL_ACTIVATION_STATE, BSH_REMOTE_START_ALLOWANCE_STATE, DOMAIN, + REFRIGERATION_STATUS_DOOR_CHILLER, + REFRIGERATION_STATUS_DOOR_CLOSED, + REFRIGERATION_STATUS_DOOR_FREEZER, + REFRIGERATION_STATUS_DOOR_OPEN, + REFRIGERATION_STATUS_DOOR_REFRIGERATOR, ) from .entity import HomeConnectEntity _LOGGER = logging.getLogger(__name__) +REFRIGERATION_DOOR_BOOLEAN_MAP = { + REFRIGERATION_STATUS_DOOR_CLOSED: False, + REFRIGERATION_STATUS_DOOR_OPEN: True, +} + + +@dataclass(frozen=True, kw_only=True) +class HomeConnectBinarySensorEntityDescription(BinarySensorEntityDescription): + """Entity Description class for binary sensors.""" + + boolean_map: dict[str, bool] | None = None + + +BINARY_SENSORS = ( + HomeConnectBinarySensorEntityDescription( + key=BSH_REMOTE_CONTROL_ACTIVATION_STATE, + translation_key="remote_control", + ), + HomeConnectBinarySensorEntityDescription( + key=BSH_REMOTE_START_ALLOWANCE_STATE, + translation_key="remote_start", + ), + HomeConnectBinarySensorEntityDescription( + key="BSH.Common.Status.LocalControlActive", + translation_key="local_control", + ), + HomeConnectBinarySensorEntityDescription( + key="BSH.Common.Status.BatteryChargingState", + device_class=BinarySensorDeviceClass.BATTERY_CHARGING, + boolean_map={ + "BSH.Common.EnumType.BatteryChargingState.Charging": True, + "BSH.Common.EnumType.BatteryChargingState.Discharging": False, + }, + translation_key="battery_charging_state", + ), + HomeConnectBinarySensorEntityDescription( + key="BSH.Common.Status.ChargingConnection", + device_class=BinarySensorDeviceClass.PLUG, + boolean_map={ + "BSH.Common.EnumType.ChargingConnection.Connected": True, + "BSH.Common.EnumType.ChargingConnection.Disconnected": False, + }, + translation_key="charging_connection", + ), + HomeConnectBinarySensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.DustBoxInserted", + translation_key="dust_box_inserted", + ), + HomeConnectBinarySensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.Lifted", + translation_key="lifted", + ), + HomeConnectBinarySensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.Lost", + translation_key="lost", + ), + HomeConnectBinarySensorEntityDescription( + key=REFRIGERATION_STATUS_DOOR_CHILLER, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="chiller_door", + ), + HomeConnectBinarySensorEntityDescription( + key=REFRIGERATION_STATUS_DOOR_FREEZER, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="freezer_door", + ), + HomeConnectBinarySensorEntityDescription( + key=REFRIGERATION_STATUS_DOOR_REFRIGERATOR, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="refrigerator_door", + ), +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect binary sensor.""" - def get_entities(): - entities = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("binary_sensor", []) - entities += [HomeConnectBinarySensor(**d) for d in entity_dicts] + def get_entities() -> list[BinarySensorEntity]: + entities: list[BinarySensorEntity] = [] + for device in entry.runtime_data.devices: + entities.extend( + HomeConnectBinarySensor(device, description) + for description in BINARY_SENSORS + if description.key in device.appliance.status + ) + if BSH_DOOR_STATE in device.appliance.status: + entities.append(HomeConnectDoorBinarySensor(device)) return entities async_add_entities(await hass.async_add_executor_job(get_entities), True) @@ -44,52 +141,99 @@ async def async_setup_entry( class HomeConnectBinarySensor(HomeConnectEntity, BinarySensorEntity): """Binary sensor for Home Connect.""" - def __init__(self, device, desc, sensor_type, device_class=None): - """Initialize the entity.""" - super().__init__(device, desc) - self._state = None - self._device_class = device_class - self._type = sensor_type - if self._type == "door": - self._update_key = BSH_DOOR_STATE - self._false_value_list = (BSH_DOOR_STATE_CLOSED, BSH_DOOR_STATE_LOCKED) - self._true_value_list = [BSH_DOOR_STATE_OPEN] - elif self._type == "remote_control": - self._update_key = BSH_REMOTE_CONTROL_ACTIVATION_STATE - self._false_value_list = [False] - self._true_value_list = [True] - elif self._type == "remote_start": - self._update_key = BSH_REMOTE_START_ALLOWANCE_STATE - self._false_value_list = [False] - self._true_value_list = [True] - - @property - def is_on(self): - """Return true if the binary sensor is on.""" - return bool(self._state) + entity_description: HomeConnectBinarySensorEntityDescription @property def available(self) -> bool: """Return true if the binary sensor is available.""" - return self._state is not None + return self._attr_is_on is not None async def async_update(self) -> None: """Update the binary sensor's status.""" - state = self.device.appliance.status.get(self._update_key, {}) - if not state: - self._state = None - elif state.get(ATTR_VALUE) in self._false_value_list: - self._state = False - elif state.get(ATTR_VALUE) in self._true_value_list: - self._state = True + if not self.device.appliance.status or not ( + status := self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) + ): + self._attr_is_on = None + return + if self.entity_description.boolean_map: + self._attr_is_on = self.entity_description.boolean_map.get(status) + elif status not in [True, False]: + self._attr_is_on = None else: - _LOGGER.warning( - "Unexpected value for HomeConnect %s state: %s", self._type, state - ) - self._state = None - _LOGGER.debug("Updated, new state: %s", self._state) + self._attr_is_on = status + _LOGGER.debug("Updated, new state: %s", self._attr_is_on) - @property - def device_class(self): - """Return the device class.""" - return self._device_class + +class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): + """Binary sensor for Home Connect Generic Door.""" + + _attr_has_entity_name = False + + def __init__( + self, + device: HomeConnectDevice, + ) -> None: + """Initialize the entity.""" + super().__init__( + device, + HomeConnectBinarySensorEntityDescription( + key=BSH_DOOR_STATE, + device_class=BinarySensorDeviceClass.DOOR, + boolean_map={ + BSH_DOOR_STATE_CLOSED: False, + BSH_DOOR_STATE_LOCKED: False, + BSH_DOOR_STATE_OPEN: True, + }, + ), + ) + self._attr_unique_id = f"{device.appliance.haId}-Door" + self._attr_name = f"{device.appliance.name} Door" + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_common_door_sensor_{self.entity_id}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_binary_common_door_sensor", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_common_door_sensor_{self.entity_id}" + ) diff --git a/homeassistant/components/home_connect/config_flow.py b/homeassistant/components/home_connect/config_flow.py index f6616bf98ca..444ea24cb6b 100644 --- a/homeassistant/components/home_connect/config_flow.py +++ b/homeassistant/components/home_connect/config_flow.py @@ -14,6 +14,8 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN + MINOR_VERSION = 2 + @property def logger(self) -> logging.Logger: """Return logger.""" diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index b54637bb524..e20cf3b1fa0 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -5,15 +5,35 @@ DOMAIN = "home_connect" OAUTH2_AUTHORIZE = "https://api.home-connect.com/security/oauth/authorize" OAUTH2_TOKEN = "https://api.home-connect.com/security/oauth/token" +APPLIANCES_WITH_PROGRAMS = ( + "CleaningRobot", + "CoffeeMaker", + "Dishwasher", + "Dryer", + "Hood", + "Oven", + "WarmingDrawer", + "Washer", + "WasherDryer", +) + BSH_POWER_STATE = "BSH.Common.Setting.PowerState" BSH_POWER_ON = "BSH.Common.EnumType.PowerState.On" BSH_POWER_OFF = "BSH.Common.EnumType.PowerState.Off" BSH_POWER_STANDBY = "BSH.Common.EnumType.PowerState.Standby" +BSH_SELECTED_PROGRAM = "BSH.Common.Root.SelectedProgram" BSH_ACTIVE_PROGRAM = "BSH.Common.Root.ActiveProgram" BSH_REMOTE_CONTROL_ACTIVATION_STATE = "BSH.Common.Status.RemoteControlActive" BSH_REMOTE_START_ALLOWANCE_STATE = "BSH.Common.Status.RemoteControlStartAllowed" BSH_CHILD_LOCK_STATE = "BSH.Common.Setting.ChildLock" +BSH_REMAINING_PROGRAM_TIME = "BSH.Common.Option.RemainingProgramTime" +BSH_COMMON_OPTION_DURATION = "BSH.Common.Option.Duration" +BSH_COMMON_OPTION_PROGRAM_PROGRESS = "BSH.Common.Option.ProgramProgress" + +BSH_EVENT_PRESENT_STATE_PRESENT = "BSH.Common.EnumType.EventPresentState.Present" +BSH_EVENT_PRESENT_STATE_CONFIRMED = "BSH.Common.EnumType.EventPresentState.Confirmed" +BSH_EVENT_PRESENT_STATE_OFF = "BSH.Common.EnumType.EventPresentState.Off" BSH_OPERATION_STATE = "BSH.Common.Status.OperationState" BSH_OPERATION_STATE_RUN = "BSH.Common.EnumType.OperationState.Run" @@ -23,6 +43,50 @@ BSH_OPERATION_STATE_FINISHED = "BSH.Common.EnumType.OperationState.Finished" COOKING_LIGHTING = "Cooking.Common.Setting.Lighting" COOKING_LIGHTING_BRIGHTNESS = "Cooking.Common.Setting.LightingBrightness" +COFFEE_EVENT_BEAN_CONTAINER_EMPTY = ( + "ConsumerProducts.CoffeeMaker.Event.BeanContainerEmpty" +) +COFFEE_EVENT_WATER_TANK_EMPTY = "ConsumerProducts.CoffeeMaker.Event.WaterTankEmpty" +COFFEE_EVENT_DRIP_TRAY_FULL = "ConsumerProducts.CoffeeMaker.Event.DripTrayFull" + +DISHWASHER_EVENT_SALT_NEARLY_EMPTY = "Dishcare.Dishwasher.Event.SaltNearlyEmpty" +DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY = ( + "Dishcare.Dishwasher.Event.RinseAidNearlyEmpty" +) + +REFRIGERATION_INTERNAL_LIGHT_POWER = "Refrigeration.Common.Setting.Light.Internal.Power" +REFRIGERATION_INTERNAL_LIGHT_BRIGHTNESS = ( + "Refrigeration.Common.Setting.Light.Internal.Brightness" +) +REFRIGERATION_EXTERNAL_LIGHT_POWER = "Refrigeration.Common.Setting.Light.External.Power" +REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS = ( + "Refrigeration.Common.Setting.Light.External.Brightness" +) + +REFRIGERATION_SUPERMODEFREEZER = "Refrigeration.FridgeFreezer.Setting.SuperModeFreezer" +REFRIGERATION_SUPERMODEREFRIGERATOR = ( + "Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator" +) +REFRIGERATION_DISPENSER = "Refrigeration.Common.Setting.Dispenser.Enabled" + +REFRIGERATION_STATUS_DOOR_CHILLER = "Refrigeration.Common.Status.Door.ChillerCommon" +REFRIGERATION_STATUS_DOOR_FREEZER = "Refrigeration.Common.Status.Door.Freezer" +REFRIGERATION_STATUS_DOOR_REFRIGERATOR = "Refrigeration.Common.Status.Door.Refrigerator" + +REFRIGERATION_STATUS_DOOR_CLOSED = "Refrigeration.Common.EnumType.Door.States.Closed" +REFRIGERATION_STATUS_DOOR_OPEN = "Refrigeration.Common.EnumType.Door.States.Open" + +REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR = ( + "Refrigeration.FridgeFreezer.Event.DoorAlarmRefrigerator" +) +REFRIGERATION_EVENT_DOOR_ALARM_FREEZER = ( + "Refrigeration.FridgeFreezer.Event.DoorAlarmFreezer" +) +REFRIGERATION_EVENT_TEMP_ALARM_FREEZER = ( + "Refrigeration.FridgeFreezer.Event.TemperatureAlarmFreezer" +) + + BSH_AMBIENT_LIGHT_ENABLED = "BSH.Common.Setting.AmbientLightEnabled" BSH_AMBIENT_LIGHT_BRIGHTNESS = "BSH.Common.Setting.AmbientLightBrightness" BSH_AMBIENT_LIGHT_COLOR = "BSH.Common.Setting.AmbientLightColor" @@ -49,12 +113,51 @@ SERVICE_SELECT_PROGRAM = "select_program" SERVICE_SETTING = "change_setting" SERVICE_START_PROGRAM = "start_program" +ATTR_ALLOWED_VALUES = "allowedvalues" ATTR_AMBIENT = "ambient" +ATTR_BSH_KEY = "bsh_key" +ATTR_CONSTRAINTS = "constraints" ATTR_DESC = "desc" ATTR_DEVICE = "device" ATTR_KEY = "key" ATTR_PROGRAM = "program" ATTR_SENSOR_TYPE = "sensor_type" ATTR_SIGN = "sign" +ATTR_STEPSIZE = "stepsize" ATTR_UNIT = "unit" ATTR_VALUE = "value" + +SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity" + +SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name" +SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID = "entity_id" +SVE_TRANSLATION_PLACEHOLDER_PROGRAM = "program" +SVE_TRANSLATION_PLACEHOLDER_KEY = "key" +SVE_TRANSLATION_PLACEHOLDER_VALUE = "value" + +OLD_NEW_UNIQUE_ID_SUFFIX_MAP = { + "ChildLock": BSH_CHILD_LOCK_STATE, + "Operation State": BSH_OPERATION_STATE, + "Light": COOKING_LIGHTING, + "AmbientLight": BSH_AMBIENT_LIGHT_ENABLED, + "Power": BSH_POWER_STATE, + "Remaining Program Time": BSH_REMAINING_PROGRAM_TIME, + "Duration": BSH_COMMON_OPTION_DURATION, + "Program Progress": BSH_COMMON_OPTION_PROGRAM_PROGRESS, + "Remote Control": BSH_REMOTE_CONTROL_ACTIVATION_STATE, + "Remote Start": BSH_REMOTE_START_ALLOWANCE_STATE, + "Supermode Freezer": REFRIGERATION_SUPERMODEFREEZER, + "Supermode Refrigerator": REFRIGERATION_SUPERMODEREFRIGERATOR, + "Dispenser Enabled": REFRIGERATION_DISPENSER, + "Internal Light": REFRIGERATION_INTERNAL_LIGHT_POWER, + "External Light": REFRIGERATION_EXTERNAL_LIGHT_POWER, + "Chiller Door": REFRIGERATION_STATUS_DOOR_CHILLER, + "Freezer Door": REFRIGERATION_STATUS_DOOR_FREEZER, + "Refrigerator Door": REFRIGERATION_STATUS_DOOR_REFRIGERATOR, + "Door Alarm Freezer": REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, + "Door Alarm Refrigerator": REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR, + "Temperature Alarm Freezer": REFRIGERATION_EVENT_TEMP_ALARM_FREEZER, + "Bean Container Empty": COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + "Water Tank Empty": COFFEE_EVENT_WATER_TANK_EMPTY, + "Drip Tray Full": COFFEE_EVENT_DRIP_TRAY_FULL, +} diff --git a/homeassistant/components/home_connect/diagnostics.py b/homeassistant/components/home_connect/diagnostics.py new file mode 100644 index 00000000000..d2505853d23 --- /dev/null +++ b/homeassistant/components/home_connect/diagnostics.py @@ -0,0 +1,46 @@ +"""Diagnostics support for Home Connect Diagnostics.""" + +from __future__ import annotations + +from typing import Any + +from homeconnect.api import HomeConnectAppliance + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntry + +from . import HomeConnectConfigEntry, _get_appliance +from .api import HomeConnectDevice + + +def _generate_appliance_diagnostics(appliance: HomeConnectAppliance) -> dict[str, Any]: + return { + "status": appliance.status, + "programs": appliance.get_programs_available(), + } + + +def _generate_entry_diagnostics( + devices: list[HomeConnectDevice], +) -> dict[str, dict[str, Any]]: + return { + device.appliance.haId: _generate_appliance_diagnostics(device.appliance) + for device in devices + } + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + return await hass.async_add_executor_job( + _generate_entry_diagnostics, entry.runtime_data.devices + ) + + +async def async_get_device_diagnostics( + hass: HomeAssistant, entry: HomeConnectConfigEntry, device: DeviceEntry +) -> dict[str, Any]: + """Return diagnostics for a device.""" + appliance = _get_appliance(hass, device_entry=device, entry=entry) + return await hass.async_add_executor_job(_generate_appliance_diagnostics, appliance) diff --git a/homeassistant/components/home_connect/entity.py b/homeassistant/components/home_connect/entity.py index d60f8a96e09..0ae4a28b8d4 100644 --- a/homeassistant/components/home_connect/entity.py +++ b/homeassistant/components/home_connect/entity.py @@ -5,7 +5,7 @@ import logging from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.entity import Entity, EntityDescription from .api import HomeConnectDevice from .const import DOMAIN, SIGNAL_UPDATE_ENTITIES @@ -17,12 +17,13 @@ class HomeConnectEntity(Entity): """Generic Home Connect entity (base class).""" _attr_should_poll = False + _attr_has_entity_name = True - def __init__(self, device: HomeConnectDevice, desc: str) -> None: + def __init__(self, device: HomeConnectDevice, desc: EntityDescription) -> None: """Initialize the entity.""" self.device = device - self._attr_name = f"{device.appliance.name} {desc}" - self._attr_unique_id = f"{device.appliance.haId}-{desc}" + self.entity_description = desc + self._attr_unique_id = f"{device.appliance.haId}-{self.bsh_key}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device.appliance.haId)}, manufacturer=device.appliance.brand, @@ -30,7 +31,7 @@ class HomeConnectEntity(Entity): name=device.appliance.name, ) - async def async_added_to_hass(self): + async def async_added_to_hass(self) -> None: """Register callbacks.""" self.async_on_remove( async_dispatcher_connect( @@ -39,13 +40,18 @@ class HomeConnectEntity(Entity): ) @callback - def _update_callback(self, ha_id): + def _update_callback(self, ha_id: str) -> None: """Update data.""" if ha_id == self.device.appliance.haId: self.async_entity_update() @callback - def async_entity_update(self): + def async_entity_update(self) -> None: """Update the entity.""" _LOGGER.debug("Entity update triggered on %s", self) self.async_schedule_update_ha_state(True) + + @property + def bsh_key(self) -> str: + """Return the BSH key.""" + return self.entity_description.key diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index 48965cc554a..166b2fe2c34 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -1,11 +1,211 @@ { "services": { - "start_program": "mdi:play", - "select_program": "mdi:form-select", - "pause_program": "mdi:pause", - "resume_program": "mdi:play-pause", - "set_option_active": "mdi:gesture-tap", - "set_option_selected": "mdi:gesture-tap", - "change_setting": "mdi:cog" + "start_program": { + "service": "mdi:play" + }, + "select_program": { + "service": "mdi:form-select" + }, + "pause_program": { + "service": "mdi:pause" + }, + "resume_program": { + "service": "mdi:play-pause" + }, + "set_option_active": { + "service": "mdi:gesture-tap" + }, + "set_option_selected": { + "service": "mdi:gesture-tap" + }, + "change_setting": { + "service": "mdi:cog" + } + }, + "entity": { + "binary_sensor": { + "remote_control": { + "default": "mdi:remote", + "state": { + "off": "mdi:remote-off" + } + }, + "remote_start": { + "default": "mdi:remote", + "state": { + "off": "mdi:remote-off" + } + }, + "dust_box_inserted": { + "default": "mdi:download" + }, + "lifted": { + "default": "mdi:arrow-up-right-bold" + }, + "lost": { + "default": "mdi:map-marker-remove-variant" + } + }, + "sensor": { + "operation_state": { + "default": "mdi:state-machine", + "state": { + "inactive": "mdi:stop", + "ready": "mdi:check-circle", + "delayedstart": "mdi:progress-clock", + "run": "mdi:play", + "pause": "mdi:pause", + "actionrequired": "mdi:gesture-tap", + "finished": "mdi:flag-checkered", + "error": "mdi:alert-circle", + "aborting": "mdi:close-circle" + } + }, + "door": { + "default": "mdi:door", + "state": { + "closed": "mdi:door-closed", + "locked": "mdi:door-closed-lock", + "open": "mdi:door-open" + } + }, + "program_progress": { + "default": "mdi:progress-clock" + }, + "coffee_counter": { + "default": "mdi:coffee" + }, + "powder_coffee_counter": { + "default": "mdi:coffee" + }, + "hot_water_counter": { + "default": "mdi:cup-water" + }, + "hot_water_cups_counter": { + "default": "mdi:cup" + }, + "hot_milk_counter": { + "default": "mdi:cup" + }, + "frothy_milk_counter": { + "default": "mdi:cup" + }, + "milk_counter": { + "default": "mdi:cup" + }, + "coffee_and_milk": { + "default": "mdi:coffee" + }, + "ristretto_espresso_counter": { + "default": "mdi:coffee" + }, + "camera_state": { + "default": "mdi:camera", + "state": { + "disabled": "mdi:camera-off", + "sleeping": "mdi:sleep", + "error": "mdi:alert-circle-outline" + } + }, + "last_selected_map": { + "default": "mdi:map", + "state": { + "tempmap": "mdi:map-clock-outline", + "map1": "mdi:numeric-1", + "map2": "mdi:numeric-2", + "map3": "mdi:numeric-3" + } + }, + "refrigerator_door_alarm": { + "default": "mdi:fridge", + "state": { + "confirmed": "mdi:fridge-alert-outline", + "present": "mdi:fridge-alert" + } + }, + "freezer_door_alarm": { + "default": "mdi:snowflake", + "state": { + "confirmed": "mdi:snowflake-check", + "present": "mdi:snowflake-alert" + } + }, + "freezer_temperature_alarm": { + "default": "mdi:thermometer", + "state": { + "confirmed": "mdi:thermometer-check", + "present": "mdi:thermometer-alert" + } + }, + "bean_container_empty": { + "default": "mdi:coffee-maker", + "state": { + "confirmed": "mdi:coffee-maker-check", + "present": "mdi:coffee-maker-outline" + } + }, + "water_tank_empty": { + "default": "mdi:water", + "state": { + "confirmed": "mdi:water-check", + "present": "mdi:water-alert" + } + }, + "drip_tray_full": { + "default": "mdi:tray", + "state": { + "confirmed": "mdi:tray-full", + "present": "mdi:tray-alert" + } + } + }, + "switch": { + "power": { + "default": "mdi:power" + }, + "child_lock": { + "default": "mdi:lock", + "state": { + "on": "mdi:lock", + "off": "mdi:lock-off" + } + }, + "cup_warmer": { + "default": "mdi:heat-wave" + }, + "refrigerator_super_mode": { + "default": "mdi:speedometer" + }, + "freezer_super_mode": { + "default": "mdi:speedometer" + }, + "eco_mode": { + "default": "mdi:sprout" + }, + "cooking-oven-setting-sabbath_mode": { + "default": "mdi:volume-mute" + }, + "sabbath_mode": { + "default": "mdi:volume-mute" + }, + "vacation_mode": { + "default": "mdi:beach" + }, + "fresh_mode": { + "default": "mdi:leaf" + }, + "dispenser_enabled": { + "default": "mdi:snowflake", + "state": { + "off": "mdi:snowflake-off" + } + }, + "door-assistant_fridge": { + "default": "mdi:door" + }, + "door-assistant_freezer": { + "default": "mdi:door" + } + } } } diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 3b062fac66c..e33017cd51f 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -1,5 +1,6 @@ """Provides a light for Home Connect.""" +from dataclasses import dataclass import logging from math import ceil from typing import Any @@ -9,15 +10,18 @@ from homeconnect.api import HomeConnectError from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, + ATTR_RGB_COLOR, ColorMode, LightEntity, + LightEntityDescription, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ENTITIES from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error +from .api import HomeConnectDevice from .const import ( ATTR_VALUE, BSH_AMBIENT_LIGHT_BRIGHTNESS, @@ -28,28 +32,74 @@ from .const import ( COOKING_LIGHTING, COOKING_LIGHTING_BRIGHTNESS, DOMAIN, + REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS, + REFRIGERATION_EXTERNAL_LIGHT_POWER, + REFRIGERATION_INTERNAL_LIGHT_BRIGHTNESS, + REFRIGERATION_INTERNAL_LIGHT_POWER, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, ) from .entity import HomeConnectEntity _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class HomeConnectLightEntityDescription(LightEntityDescription): + """Light entity description.""" + + brightness_key: str | None = None + color_key: str | None = None + enable_custom_color_value_key: str | None = None + custom_color_key: str | None = None + brightness_scale: tuple[float, float] = (0.0, 100.0) + + +LIGHTS: tuple[HomeConnectLightEntityDescription, ...] = ( + HomeConnectLightEntityDescription( + key=REFRIGERATION_INTERNAL_LIGHT_POWER, + brightness_key=REFRIGERATION_INTERNAL_LIGHT_BRIGHTNESS, + brightness_scale=(1.0, 100.0), + translation_key="internal_light", + ), + HomeConnectLightEntityDescription( + key=REFRIGERATION_EXTERNAL_LIGHT_POWER, + brightness_key=REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS, + brightness_scale=(1.0, 100.0), + translation_key="external_light", + ), + HomeConnectLightEntityDescription( + key=COOKING_LIGHTING, + brightness_key=COOKING_LIGHTING_BRIGHTNESS, + brightness_scale=(10.0, 100.0), + translation_key="cooking_lighting", + ), + HomeConnectLightEntityDescription( + key=BSH_AMBIENT_LIGHT_ENABLED, + brightness_key=BSH_AMBIENT_LIGHT_BRIGHTNESS, + color_key=BSH_AMBIENT_LIGHT_COLOR, + enable_custom_color_value_key=BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, + custom_color_key=BSH_AMBIENT_LIGHT_CUSTOM_COLOR, + brightness_scale=(10.0, 100.0), + translation_key="ambient_light", + ), +) + + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect light.""" - def get_entities(): + def get_entities() -> list[LightEntity]: """Get a list of entities.""" - entities = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("light", []) - entity_list = [HomeConnectLight(**d) for d in entity_dicts] - entities += entity_list - return entities + return [ + HomeConnectLight(device, description) + for description in LIGHTS + for device in entry.runtime_data.devices + if description.key in device.appliance.status + ] async_add_entities(await hass.async_add_executor_job(get_entities), True) @@ -57,85 +107,151 @@ async def async_setup_entry( class HomeConnectLight(HomeConnectEntity, LightEntity): """Light for Home Connect.""" - def __init__(self, device, desc, ambient): + entity_description: LightEntityDescription + + def __init__( + self, device: HomeConnectDevice, desc: HomeConnectLightEntityDescription + ) -> None: """Initialize the entity.""" super().__init__(device, desc) - self._ambient = ambient - if ambient: - self._brightness_key = BSH_AMBIENT_LIGHT_BRIGHTNESS - self._key = BSH_AMBIENT_LIGHT_ENABLED - self._custom_color_key = BSH_AMBIENT_LIGHT_CUSTOM_COLOR - self._color_key = BSH_AMBIENT_LIGHT_COLOR - self._attr_color_mode = ColorMode.HS - self._attr_supported_color_modes = {ColorMode.HS} - else: - self._brightness_key = COOKING_LIGHTING_BRIGHTNESS - self._key = COOKING_LIGHTING - self._custom_color_key = None - self._color_key = None - self._attr_color_mode = ColorMode.BRIGHTNESS - self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + + def get_setting_key_if_setting_exists(setting_key: str | None) -> str | None: + if setting_key and setting_key in device.appliance.status: + return setting_key + return None + + self._brightness_key = get_setting_key_if_setting_exists(desc.brightness_key) + self._custom_color_key = get_setting_key_if_setting_exists( + desc.custom_color_key + ) + self._color_key = get_setting_key_if_setting_exists(desc.color_key) + self._enable_custom_color_value_key = desc.enable_custom_color_value_key + self._custom_color_key = get_setting_key_if_setting_exists( + desc.custom_color_key + ) + self._brightness_scale = desc.brightness_scale + + match (self._brightness_key, self._custom_color_key): + case (None, None): + self._attr_color_mode = ColorMode.ONOFF + self._attr_supported_color_modes = {ColorMode.ONOFF} + case (_, None): + self._attr_color_mode = ColorMode.BRIGHTNESS + self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + case (_, _): + self._attr_color_mode = ColorMode.HS + self._attr_supported_color_modes = {ColorMode.HS, ColorMode.RGB} async def async_turn_on(self, **kwargs: Any) -> None: """Switch the light on, change brightness, change color.""" - if self._ambient: - _LOGGER.debug("Switching ambient light on for: %s", self.name) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, self._key, True - ) - except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on ambient light: %s", err) - return - if ATTR_BRIGHTNESS in kwargs or ATTR_HS_COLOR in kwargs: + _LOGGER.debug("Switching light on for: %s", self.name) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, self.bsh_key, True + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="turn_on_light", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err + if self._custom_color_key: + if ( + ATTR_RGB_COLOR in kwargs or ATTR_HS_COLOR in kwargs + ) and self._enable_custom_color_value_key: try: await self.hass.async_add_executor_job( self.device.appliance.set_setting, self._color_key, - BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, + self._enable_custom_color_value_key, ) except HomeConnectError as err: - _LOGGER.error("Error while trying selecting customcolor: %s", err) - if self._attr_brightness is not None: - brightness = 10 + ceil(self._attr_brightness / 255 * 90) - if ATTR_BRIGHTNESS in kwargs: - brightness = 10 + ceil(kwargs[ATTR_BRIGHTNESS] / 255 * 90) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="select_light_custom_color", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + if ATTR_RGB_COLOR in kwargs: + hex_val = color_util.color_rgb_to_hex(*kwargs[ATTR_RGB_COLOR]) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self._custom_color_key, + f"#{hex_val}", + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_light_color", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err + elif (ATTR_BRIGHTNESS in kwargs or ATTR_HS_COLOR in kwargs) and ( + self._attr_brightness is not None or ATTR_BRIGHTNESS in kwargs + ): + brightness = 10 + ceil( + color_util.brightness_to_value( + self._brightness_scale, + kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness), + ) + ) - if hs_color is not None: - rgb = color_util.color_hsv_to_RGB( - hs_color[0], hs_color[1], brightness + hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + + if hs_color is not None: + rgb = color_util.color_hsv_to_RGB( + hs_color[0], hs_color[1], brightness + ) + hex_val = color_util.color_rgb_to_hex(*rgb) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self._custom_color_key, + f"#{hex_val}", ) - hex_val = color_util.color_rgb_to_hex(rgb[0], rgb[1], rgb[2]) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, - self._custom_color_key, - f"#{hex_val}", - ) - except HomeConnectError as err: - _LOGGER.error( - "Error while trying setting the color: %s", err - ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_light_color", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err - elif ATTR_BRIGHTNESS in kwargs: - _LOGGER.debug("Changing brightness for: %s", self.name) - brightness = 10 + ceil(kwargs[ATTR_BRIGHTNESS] / 255 * 90) + elif self._brightness_key and ATTR_BRIGHTNESS in kwargs: + _LOGGER.debug( + "Changing brightness for: %s, to: %s", + self.name, + kwargs[ATTR_BRIGHTNESS], + ) + brightness = ceil( + color_util.brightness_to_value( + self._brightness_scale, kwargs[ATTR_BRIGHTNESS] + ) + ) try: await self.hass.async_add_executor_job( self.device.appliance.set_setting, self._brightness_key, brightness ) except HomeConnectError as err: - _LOGGER.error("Error while trying set the brightness: %s", err) - else: - _LOGGER.debug("Switching light on for: %s", self.name) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, self._key, True - ) - except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on light: %s", err) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_light_brightness", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err self.async_entity_update() @@ -144,43 +260,59 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): _LOGGER.debug("Switching light off for: %s", self.name) try: await self.hass.async_add_executor_job( - self.device.appliance.set_setting, self._key, False + self.device.appliance.set_setting, self.bsh_key, False ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn off light: %s", err) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="turn_off_light", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err self.async_entity_update() async def async_update(self) -> None: """Update the light's status.""" - if self.device.appliance.status.get(self._key, {}).get(ATTR_VALUE) is True: + if self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) is True: self._attr_is_on = True - elif self.device.appliance.status.get(self._key, {}).get(ATTR_VALUE) is False: + elif ( + self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) is False + ): self._attr_is_on = False else: self._attr_is_on = None _LOGGER.debug("Updated, new light state: %s", self._attr_is_on) - if self._ambient: + if self._custom_color_key: color = self.device.appliance.status.get(self._custom_color_key, {}) if not color: + self._attr_rgb_color = None self._attr_hs_color = None self._attr_brightness = None else: - colorvalue = color.get(ATTR_VALUE)[1:] - rgb = color_util.rgb_hex_to_rgb_list(colorvalue) - hsv = color_util.color_RGB_to_hsv(rgb[0], rgb[1], rgb[2]) + color_value = color.get(ATTR_VALUE)[1:] + rgb = color_util.rgb_hex_to_rgb_list(color_value) + self._attr_rgb_color = (rgb[0], rgb[1], rgb[2]) + hsv = color_util.color_RGB_to_hsv(*rgb) self._attr_hs_color = (hsv[0], hsv[1]) - self._attr_brightness = ceil((hsv[2] - 10) * 255 / 90) - _LOGGER.debug("Updated, new brightness: %s", self._attr_brightness) - - else: + self._attr_brightness = color_util.value_to_brightness( + self._brightness_scale, hsv[2] + ) + _LOGGER.debug( + "Updated, new color (%s) and new brightness (%s) ", + color_value, + self._attr_brightness, + ) + elif self._brightness_key: brightness = self.device.appliance.status.get(self._brightness_key, {}) if brightness is None: self._attr_brightness = None else: - self._attr_brightness = ceil( - (brightness.get(ATTR_VALUE) - 10) * 255 / 90 + self._attr_brightness = color_util.value_to_brightness( + self._brightness_scale, brightness[ATTR_VALUE] ) _LOGGER.debug("Updated, new brightness: %s", self._attr_brightness) diff --git a/homeassistant/components/home_connect/manifest.json b/homeassistant/components/home_connect/manifest.json index 389386e42af..e041e13d36b 100644 --- a/homeassistant/components/home_connect/manifest.json +++ b/homeassistant/components/home_connect/manifest.json @@ -1,7 +1,7 @@ { "domain": "home_connect", "name": "Home Connect", - "codeowners": ["@DavidMStraub"], + "codeowners": ["@DavidMStraub", "@Diegorro98"], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/home_connect", diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py new file mode 100644 index 00000000000..0703b4772bb --- /dev/null +++ b/homeassistant/components/home_connect/number.py @@ -0,0 +1,165 @@ +"""Provides number enties for Home Connect.""" + +import logging + +from homeconnect.api import HomeConnectError + +from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error +from .const import ( + ATTR_CONSTRAINTS, + ATTR_STEPSIZE, + ATTR_UNIT, + ATTR_VALUE, + DOMAIN, + SVE_TRANSLATION_KEY_SET_SETTING, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, + SVE_TRANSLATION_PLACEHOLDER_KEY, + SVE_TRANSLATION_PLACEHOLDER_VALUE, +) +from .entity import HomeConnectEntity + +_LOGGER = logging.getLogger(__name__) + + +NUMBERS = ( + NumberEntityDescription( + key="Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="refrigerator_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.FridgeFreezer.Setting.SetpointTemperatureFreezer", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="freezer_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.BottleCooler.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="bottle_cooler_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.ChillerLeft.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="chiller_left_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.ChillerCommon.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="chiller_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.ChillerRight.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="chiller_right_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.WineCompartment.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="wine_compartment_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.WineCompartment2.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="wine_compartment_2_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.WineCompartment3.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="wine_compartment_3_setpoint_temperature", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: HomeConnectConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Home Connect number.""" + + def get_entities() -> list[HomeConnectNumberEntity]: + """Get a list of entities.""" + return [ + HomeConnectNumberEntity(device, description) + for description in NUMBERS + for device in entry.runtime_data.devices + if description.key in device.appliance.status + ] + + async_add_entities(await hass.async_add_executor_job(get_entities), True) + + +class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): + """Number setting class for Home Connect.""" + + async def async_set_native_value(self, value: float) -> None: + """Set the native value of the entity.""" + _LOGGER.debug( + "Tried to set value %s to %s for %s", + value, + self.bsh_key, + self.entity_id, + ) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self.bsh_key, + value, + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + }, + ) from err + + async def async_fetch_constraints(self) -> None: + """Fetch the max and min values and step for the number entity.""" + try: + data = await self.hass.async_add_executor_job( + self.device.appliance.get, f"/settings/{self.bsh_key}" + ) + except HomeConnectError as err: + _LOGGER.error("An error occurred: %s", err) + return + if not data or not (constraints := data.get(ATTR_CONSTRAINTS)): + return + self._attr_native_max_value = constraints.get(ATTR_MAX) + self._attr_native_min_value = constraints.get(ATTR_MIN) + self._attr_native_step = constraints.get(ATTR_STEPSIZE) + self._attr_native_unit_of_measurement = data.get(ATTR_UNIT) + + async def async_update(self) -> None: + """Update the number setting status.""" + if not (data := self.device.appliance.status.get(self.bsh_key)): + _LOGGER.error("No value for %s", self.bsh_key) + self._attr_native_value = None + return + self._attr_native_value = data.get(ATTR_VALUE, None) + _LOGGER.debug("Updated, new value: %s", self._attr_native_value) + + if ( + not hasattr(self, "_attr_native_min_value") + or self._attr_native_min_value is None + or not hasattr(self, "_attr_native_max_value") + or self._attr_native_max_value is None + or not hasattr(self, "_attr_native_step") + or self._attr_native_step is None + ): + await self.async_fetch_constraints() diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py new file mode 100644 index 00000000000..c97b3db28e0 --- /dev/null +++ b/homeassistant/components/home_connect/select.py @@ -0,0 +1,301 @@ +"""Provides a select platform for Home Connect.""" + +import contextlib +import logging + +from homeconnect.api import HomeConnectError + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ( + HomeConnectConfigEntry, + bsh_key_to_translation_key, + get_dict_from_home_connect_error, +) +from .api import HomeConnectDevice +from .const import ( + APPLIANCES_WITH_PROGRAMS, + ATTR_VALUE, + BSH_ACTIVE_PROGRAM, + BSH_SELECTED_PROGRAM, + DOMAIN, + SVE_TRANSLATION_PLACEHOLDER_PROGRAM, +) +from .entity import HomeConnectEntity + +_LOGGER = logging.getLogger(__name__) + +TRANSLATION_KEYS_PROGRAMS_MAP = { + bsh_key_to_translation_key(program): program + for program in ( + "ConsumerProducts.CleaningRobot.Program.Cleaning.CleanAll", + "ConsumerProducts.CleaningRobot.Program.Cleaning.CleanMap", + "ConsumerProducts.CleaningRobot.Program.Basic.GoHome", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Ristretto", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso", + "ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoDoppio", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Coffee", + "ConsumerProducts.CoffeeMaker.Program.Beverage.XLCoffee", + "ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeGrande", + "ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Cappuccino", + "ConsumerProducts.CoffeeMaker.Program.Beverage.LatteMacchiato", + "ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeLatte", + "ConsumerProducts.CoffeeMaker.Program.Beverage.MilkFroth", + "ConsumerProducts.CoffeeMaker.Program.Beverage.WarmMilk", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.KleinerBrauner", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.GrosserBrauner", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Verlaengerter", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.VerlaengerterBraun", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.WienerMelange", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.FlatWhite", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Cortado", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.CafeCortado", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.CafeConLeche", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.CafeAuLait", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Doppio", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Kaapi", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.KoffieVerkeerd", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Galao", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Garoto", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Americano", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.RedEye", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.BlackEye", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.DeadEye", + "ConsumerProducts.CoffeeMaker.Program.Beverage.HotWater", + "Dishcare.Dishwasher.Program.PreRinse", + "Dishcare.Dishwasher.Program.Auto1", + "Dishcare.Dishwasher.Program.Auto2", + "Dishcare.Dishwasher.Program.Auto3", + "Dishcare.Dishwasher.Program.Eco50", + "Dishcare.Dishwasher.Program.Quick45", + "Dishcare.Dishwasher.Program.Intensiv70", + "Dishcare.Dishwasher.Program.Normal65", + "Dishcare.Dishwasher.Program.Glas40", + "Dishcare.Dishwasher.Program.GlassCare", + "Dishcare.Dishwasher.Program.NightWash", + "Dishcare.Dishwasher.Program.Quick65", + "Dishcare.Dishwasher.Program.Normal45", + "Dishcare.Dishwasher.Program.Intensiv45", + "Dishcare.Dishwasher.Program.AutoHalfLoad", + "Dishcare.Dishwasher.Program.IntensivPower", + "Dishcare.Dishwasher.Program.MagicDaily", + "Dishcare.Dishwasher.Program.Super60", + "Dishcare.Dishwasher.Program.Kurz60", + "Dishcare.Dishwasher.Program.ExpressSparkle65", + "Dishcare.Dishwasher.Program.MachineCare", + "Dishcare.Dishwasher.Program.SteamFresh", + "Dishcare.Dishwasher.Program.MaximumCleaning", + "Dishcare.Dishwasher.Program.MixedLoad", + "LaundryCare.Dryer.Program.Cotton", + "LaundryCare.Dryer.Program.Synthetic", + "LaundryCare.Dryer.Program.Mix", + "LaundryCare.Dryer.Program.Blankets", + "LaundryCare.Dryer.Program.BusinessShirts", + "LaundryCare.Dryer.Program.DownFeathers", + "LaundryCare.Dryer.Program.Hygiene", + "LaundryCare.Dryer.Program.Jeans", + "LaundryCare.Dryer.Program.Outdoor", + "LaundryCare.Dryer.Program.SyntheticRefresh", + "LaundryCare.Dryer.Program.Towels", + "LaundryCare.Dryer.Program.Delicates", + "LaundryCare.Dryer.Program.Super40", + "LaundryCare.Dryer.Program.Shirts15", + "LaundryCare.Dryer.Program.Pillow", + "LaundryCare.Dryer.Program.AntiShrink", + "LaundryCare.Dryer.Program.MyTime.MyDryingTime", + "LaundryCare.Dryer.Program.TimeCold", + "LaundryCare.Dryer.Program.TimeWarm", + "LaundryCare.Dryer.Program.InBasket", + "LaundryCare.Dryer.Program.TimeColdFix.TimeCold20", + "LaundryCare.Dryer.Program.TimeColdFix.TimeCold30", + "LaundryCare.Dryer.Program.TimeColdFix.TimeCold60", + "LaundryCare.Dryer.Program.TimeWarmFix.TimeWarm30", + "LaundryCare.Dryer.Program.TimeWarmFix.TimeWarm40", + "LaundryCare.Dryer.Program.TimeWarmFix.TimeWarm60", + "LaundryCare.Dryer.Program.Dessous", + "Cooking.Common.Program.Hood.Automatic", + "Cooking.Common.Program.Hood.Venting", + "Cooking.Common.Program.Hood.DelayedShutOff", + "Cooking.Oven.Program.HeatingMode.PreHeating", + "Cooking.Oven.Program.HeatingMode.HotAir", + "Cooking.Oven.Program.HeatingMode.HotAirEco", + "Cooking.Oven.Program.HeatingMode.HotAirGrilling", + "Cooking.Oven.Program.HeatingMode.TopBottomHeating", + "Cooking.Oven.Program.HeatingMode.TopBottomHeatingEco", + "Cooking.Oven.Program.HeatingMode.BottomHeating", + "Cooking.Oven.Program.HeatingMode.PizzaSetting", + "Cooking.Oven.Program.HeatingMode.SlowCook", + "Cooking.Oven.Program.HeatingMode.IntensiveHeat", + "Cooking.Oven.Program.HeatingMode.KeepWarm", + "Cooking.Oven.Program.HeatingMode.PreheatOvenware", + "Cooking.Oven.Program.HeatingMode.FrozenHeatupSpecial", + "Cooking.Oven.Program.HeatingMode.Desiccation", + "Cooking.Oven.Program.HeatingMode.Defrost", + "Cooking.Oven.Program.HeatingMode.Proof", + "Cooking.Oven.Program.HeatingMode.HotAir30Steam", + "Cooking.Oven.Program.HeatingMode.HotAir60Steam", + "Cooking.Oven.Program.HeatingMode.HotAir80Steam", + "Cooking.Oven.Program.HeatingMode.HotAir100Steam", + "Cooking.Oven.Program.HeatingMode.SabbathProgramme", + "Cooking.Oven.Program.Microwave.90Watt", + "Cooking.Oven.Program.Microwave.180Watt", + "Cooking.Oven.Program.Microwave.360Watt", + "Cooking.Oven.Program.Microwave.600Watt", + "Cooking.Oven.Program.Microwave.900Watt", + "Cooking.Oven.Program.Microwave.1000Watt", + "Cooking.Oven.Program.Microwave.Max", + "Cooking.Oven.Program.HeatingMode.WarmingDrawer", + "LaundryCare.Washer.Program.Cotton", + "LaundryCare.Washer.Program.Cotton.CottonEco", + "LaundryCare.Washer.Program.Cotton.Eco4060", + "LaundryCare.Washer.Program.Cotton.Colour", + "LaundryCare.Washer.Program.EasyCare", + "LaundryCare.Washer.Program.Mix", + "LaundryCare.Washer.Program.Mix.NightWash", + "LaundryCare.Washer.Program.DelicatesSilk", + "LaundryCare.Washer.Program.Wool", + "LaundryCare.Washer.Program.Sensitive", + "LaundryCare.Washer.Program.Auto30", + "LaundryCare.Washer.Program.Auto40", + "LaundryCare.Washer.Program.Auto60", + "LaundryCare.Washer.Program.Chiffon", + "LaundryCare.Washer.Program.Curtains", + "LaundryCare.Washer.Program.DarkWash", + "LaundryCare.Washer.Program.Dessous", + "LaundryCare.Washer.Program.Monsoon", + "LaundryCare.Washer.Program.Outdoor", + "LaundryCare.Washer.Program.PlushToy", + "LaundryCare.Washer.Program.ShirtsBlouses", + "LaundryCare.Washer.Program.SportFitness", + "LaundryCare.Washer.Program.Towels", + "LaundryCare.Washer.Program.WaterProof", + "LaundryCare.Washer.Program.PowerSpeed59", + "LaundryCare.Washer.Program.Super153045.Super15", + "LaundryCare.Washer.Program.Super153045.Super1530", + "LaundryCare.Washer.Program.DownDuvet.Duvet", + "LaundryCare.Washer.Program.Rinse.RinseSpinDrain", + "LaundryCare.Washer.Program.DrumClean", + "LaundryCare.WasherDryer.Program.Cotton", + "LaundryCare.WasherDryer.Program.Cotton.Eco4060", + "LaundryCare.WasherDryer.Program.Mix", + "LaundryCare.WasherDryer.Program.EasyCare", + "LaundryCare.WasherDryer.Program.WashAndDry60", + "LaundryCare.WasherDryer.Program.WashAndDry90", + ) +} + +PROGRAMS_TRANSLATION_KEYS_MAP = { + value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items() +} + +PROGRAM_SELECT_ENTITY_DESCRIPTIONS = ( + SelectEntityDescription( + key=BSH_ACTIVE_PROGRAM, + translation_key="active_program", + ), + SelectEntityDescription( + key=BSH_SELECTED_PROGRAM, + translation_key="selected_program", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: HomeConnectConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Home Connect select entities.""" + + def get_entities() -> list[HomeConnectProgramSelectEntity]: + """Get a list of entities.""" + entities: list[HomeConnectProgramSelectEntity] = [] + programs_not_found = set() + for device in entry.runtime_data.devices: + if device.appliance.type in APPLIANCES_WITH_PROGRAMS: + with contextlib.suppress(HomeConnectError): + programs = device.appliance.get_programs_available() + if programs: + for program in programs: + if program not in PROGRAMS_TRANSLATION_KEYS_MAP: + programs.remove(program) + if program not in programs_not_found: + _LOGGER.info( + 'The program "%s" is not part of the official Home Connect API specification', + program, + ) + programs_not_found.add(program) + entities.extend( + HomeConnectProgramSelectEntity(device, programs, desc) + for desc in PROGRAM_SELECT_ENTITY_DESCRIPTIONS + ) + return entities + + async_add_entities(await hass.async_add_executor_job(get_entities), True) + + +class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity): + """Select class for Home Connect programs.""" + + def __init__( + self, + device: HomeConnectDevice, + programs: list[str], + desc: SelectEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__( + device, + desc, + ) + self._attr_options = [ + PROGRAMS_TRANSLATION_KEYS_MAP[program] for program in programs + ] + self.start_on_select = desc.key == BSH_ACTIVE_PROGRAM + + async def async_update(self) -> None: + """Update the program selection status.""" + program = self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) + if not program: + program_translation_key = None + elif not ( + program_translation_key := PROGRAMS_TRANSLATION_KEYS_MAP.get(program) + ): + _LOGGER.debug( + 'The program "%s" is not part of the official Home Connect API specification', + program, + ) + self._attr_current_option = program_translation_key + _LOGGER.debug("Updated, new program: %s", self._attr_current_option) + + async def async_select_option(self, option: str) -> None: + """Select new program.""" + bsh_key = TRANSLATION_KEYS_PROGRAMS_MAP[option] + _LOGGER.debug( + "Starting program: %s" if self.start_on_select else "Selecting program: %s", + bsh_key, + ) + if self.start_on_select: + target = self.device.appliance.start_program + else: + target = self.device.appliance.select_program + try: + await self.hass.async_add_executor_job(target, bsh_key) + except HomeConnectError as err: + if self.start_on_select: + translation_key = "start_program" + else: + translation_key = "select_program" + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=translation_key, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_PROGRAM: bsh_key, + }, + ) from err + self.async_entity_update() diff --git a/homeassistant/components/home_connect/sensor.py b/homeassistant/components/home_connect/sensor.py index 9bd48617fb3..3ccf55bac6e 100644 --- a/homeassistant/components/home_connect/sensor.py +++ b/homeassistant/components/home_connect/sensor.py @@ -1,43 +1,284 @@ """Provides a sensor for Home Connect.""" +import contextlib +from dataclasses import dataclass from datetime import datetime, timedelta import logging from typing import cast -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ENTITIES +from homeconnect.api import HomeConnectError + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import PERCENTAGE, UnitOfTime, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify import homeassistant.util.dt as dt_util +from . import HomeConnectConfigEntry from .const import ( ATTR_VALUE, + BSH_DOOR_STATE, BSH_OPERATION_STATE, BSH_OPERATION_STATE_FINISHED, BSH_OPERATION_STATE_PAUSE, BSH_OPERATION_STATE_RUN, - DOMAIN, + COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + COFFEE_EVENT_DRIP_TRAY_FULL, + COFFEE_EVENT_WATER_TANK_EMPTY, + DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, + REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR, + REFRIGERATION_EVENT_TEMP_ALARM_FREEZER, ) from .entity import HomeConnectEntity _LOGGER = logging.getLogger(__name__) +EVENT_OPTIONS = ["confirmed", "off", "present"] + + +@dataclass(frozen=True, kw_only=True) +class HomeConnectSensorEntityDescription(SensorEntityDescription): + """Entity Description class for sensors.""" + + default_value: str | None = None + appliance_types: tuple[str, ...] | None = None + sign: int = 1 + + +BSH_PROGRAM_SENSORS = ( + HomeConnectSensorEntityDescription( + key="BSH.Common.Option.RemainingProgramTime", + device_class=SensorDeviceClass.TIMESTAMP, + sign=1, + translation_key="program_finish_time", + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Option.Duration", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + sign=1, + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Option.ProgramProgress", + native_unit_of_measurement=PERCENTAGE, + sign=1, + translation_key="program_progress", + ), +) + +SENSORS = ( + HomeConnectSensorEntityDescription( + key=BSH_OPERATION_STATE, + device_class=SensorDeviceClass.ENUM, + options=[ + "inactive", + "ready", + "delayedstart", + "run", + "pause", + "actionrequired", + "finished", + "error", + "aborting", + ], + translation_key="operation_state", + ), + HomeConnectSensorEntityDescription( + key=BSH_DOOR_STATE, + device_class=SensorDeviceClass.ENUM, + options=[ + "closed", + "locked", + "open", + ], + translation_key="door", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterCoffee", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="coffee_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterPowderCoffee", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="powder_coffee_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterHotWater", + native_unit_of_measurement=UnitOfVolume.MILLILITERS, + device_class=SensorDeviceClass.VOLUME, + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="hot_water_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterHotWaterCups", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="hot_water_cups_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterHotMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="hot_milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterFrothyMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="frothy_milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterCoffeeAndMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="coffee_and_milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterRistrettoEspresso", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="ristretto_espresso_counter", + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Status.BatteryLevel", + device_class=SensorDeviceClass.BATTERY, + translation_key="battery_level", + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Status.Video.CameraState", + device_class=SensorDeviceClass.ENUM, + options=[ + "disabled", + "sleeping", + "ready", + "streaminglocal", + "streamingcloud", + "streaminglocalancloud", + "error", + ], + translation_key="camera_state", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.LastSelectedMap", + device_class=SensorDeviceClass.ENUM, + options=[ + "tempmap", + "map1", + "map2", + "map3", + ], + translation_key="last_selected_map", + ), +) + +EVENT_SENSORS = ( + HomeConnectSensorEntityDescription( + key=REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_door_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="refrigerator_door_alarm", + appliance_types=("FridgeFreezer", "Refrigerator"), + ), + HomeConnectSensorEntityDescription( + key=REFRIGERATION_EVENT_TEMP_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_temperature_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="bean_container_empty", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=COFFEE_EVENT_WATER_TANK_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="water_tank_empty", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=COFFEE_EVENT_DRIP_TRAY_FULL, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="drip_tray_full", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="salt_nearly_empty", + appliance_types=("Dishwasher",), + ), + HomeConnectSensorEntityDescription( + key=DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="rinse_aid_nearly_empty", + appliance_types=("Dishwasher",), + ), +) + + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect sensor.""" - def get_entities(): + def get_entities() -> list[SensorEntity]: """Get a list of entities.""" - entities = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("sensor", []) - entities += [HomeConnectSensor(**d) for d in entity_dicts] + entities: list[SensorEntity] = [] + for device in entry.runtime_data.devices: + entities.extend( + HomeConnectSensor( + device, + description, + ) + for description in EVENT_SENSORS + if description.appliance_types + and device.appliance.type in description.appliance_types + ) + with contextlib.suppress(HomeConnectError): + if device.appliance.get_programs_available(): + entities.extend( + HomeConnectSensor(device, desc) for desc in BSH_PROGRAM_SENSORS + ) + entities.extend( + HomeConnectSensor(device, description) + for description in SENSORS + if description.key in device.appliance.status + ) return entities async_add_entities(await hass.async_add_executor_job(get_entities), True) @@ -46,14 +287,7 @@ async def async_setup_entry( class HomeConnectSensor(HomeConnectEntity, SensorEntity): """Sensor class for Home Connect.""" - def __init__(self, device, desc, key, unit, icon, device_class, sign=1): - """Initialize the entity.""" - super().__init__(device, desc) - self._key = key - self._sign = sign - self._attr_native_unit_of_measurement = unit - self._attr_icon = icon - self._attr_device_class = device_class + entity_description: HomeConnectSensorEntityDescription @property def available(self) -> bool: @@ -62,42 +296,52 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity): async def async_update(self) -> None: """Update the sensor's status.""" - status = self.device.appliance.status - if self._key not in status: - self._attr_native_value = None - elif self.device_class == SensorDeviceClass.TIMESTAMP: - if ATTR_VALUE not in status[self._key]: - self._attr_native_value = None - elif ( - self._attr_native_value is not None - and self._sign == 1 - and isinstance(self._attr_native_value, datetime) - and self._attr_native_value < dt_util.utcnow() - ): - # if the date is supposed to be in the future but we're - # already past it, set state to None. - self._attr_native_value = None - elif ( - BSH_OPERATION_STATE in status - and ATTR_VALUE in status[BSH_OPERATION_STATE] - and status[BSH_OPERATION_STATE][ATTR_VALUE] - in [ - BSH_OPERATION_STATE_RUN, - BSH_OPERATION_STATE_PAUSE, - BSH_OPERATION_STATE_FINISHED, - ] - ): - seconds = self._sign * float(status[self._key][ATTR_VALUE]) - self._attr_native_value = dt_util.utcnow() + timedelta(seconds=seconds) - else: - self._attr_native_value = None - else: - self._attr_native_value = status[self._key].get(ATTR_VALUE) - if self._key == BSH_OPERATION_STATE: + appliance_status = self.device.appliance.status + if ( + self.bsh_key not in appliance_status + or ATTR_VALUE not in appliance_status[self.bsh_key] + ): + self._attr_native_value = self.entity_description.default_value + _LOGGER.debug("Updated, new state: %s", self._attr_native_value) + return + status = appliance_status[self.bsh_key] + match self.device_class: + case SensorDeviceClass.TIMESTAMP: + if ATTR_VALUE not in status: + self._attr_native_value = None + elif ( + self._attr_native_value is not None + and self.entity_description.sign == 1 + and isinstance(self._attr_native_value, datetime) + and self._attr_native_value < dt_util.utcnow() + ): + # if the date is supposed to be in the future but we're + # already past it, set state to None. + self._attr_native_value = None + elif ( + BSH_OPERATION_STATE + in (appliance_status := self.device.appliance.status) + and ATTR_VALUE in appliance_status[BSH_OPERATION_STATE] + and appliance_status[BSH_OPERATION_STATE][ATTR_VALUE] + in [ + BSH_OPERATION_STATE_RUN, + BSH_OPERATION_STATE_PAUSE, + BSH_OPERATION_STATE_FINISHED, + ] + ): + seconds = self.entity_description.sign * float(status[ATTR_VALUE]) + self._attr_native_value = dt_util.utcnow() + timedelta( + seconds=seconds + ) + else: + self._attr_native_value = None + case SensorDeviceClass.ENUM: # Value comes back as an enum, we only really care about the # last part, so split it off # https://developer.home-connect.com/docs/status/operation_state - self._attr_native_value = cast(str, self._attr_native_value).split(".")[ - -1 - ] + self._attr_native_value = slugify( + cast(str, status.get(ATTR_VALUE)).split(".")[-1] + ) + case _: + self._attr_native_value = status.get(ATTR_VALUE) _LOGGER.debug("Updated, new state: %s", self._attr_native_value) diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index 8afd3aaf8ce..e70f2f28c65 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -1,4 +1,8 @@ { + "common": { + "confirmed": "Confirmed", + "present": "Present" + }, "config": { "step": { "pick_implementation": { @@ -17,6 +21,77 @@ "default": "[%key:common::config_flow::create_entry::authenticated%]" } }, + "exceptions": { + "appliance_not_found": { + "message": "Appliance for device id {device_id} not found" + }, + "turn_on_light": { + "message": "Error turning on {entity_id}: {description}" + }, + "turn_off_light": { + "message": "Error turning off {entity_id}: {description}" + }, + "set_light_brightness": { + "message": "Error setting brightness of {entity_id}: {description}" + }, + "select_light_custom_color": { + "message": "Error selecting custom color of {entity_id}: {description}" + }, + "set_light_color": { + "message": "Error setting color of {entity_id}: {description}" + }, + "set_setting_entity": { + "message": "Error assigning the value \"{value}\" to the setting \"{key}\" for {entity_id}: {description}" + }, + "set_setting": { + "message": "Error assigning the value \"{value}\" to the setting \"{key}\": {description}" + }, + "turn_on": { + "message": "Error turning on {entity_id} ({key}): {description}" + }, + "turn_off": { + "message": "Error turning off {entity_id} ({key}): {description}" + }, + "select_program": { + "message": "Error selecting program {program}: {description}" + }, + "start_program": { + "message": "Error starting program {program}: {description}" + }, + "pause_program": { + "message": "Error pausing program: {description}" + }, + "stop_program": { + "message": "Error stopping program: {description}" + }, + "set_options_active_program": { + "message": "Error setting options for the active program: {description}" + }, + "set_options_selected_program": { + "message": "Error setting options for the selected program: {description}" + }, + "execute_command": { + "message": "Error executing command {command}: {description}" + }, + "power_on": { + "message": "Error turning on {appliance_name}: {description}" + }, + "power_off": { + "message": "Error turning off {appliance_name} with value \"{value}\": {description}" + }, + "turn_off_not_supported": { + "message": "{appliance_name} does not support turning off or entering standby mode." + }, + "unable_to_retrieve_turn_off": { + "message": "Unable to turn off {appliance_name} because its support for turning off or entering standby mode could not be determined." + } + }, + "issues": { + "deprecated_binary_common_door_sensor": { + "title": "Deprecated binary door sensor detected in some automations or scripts", + "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + } + }, "services": { "start_program": { "name": "Start program", @@ -129,5 +204,600 @@ "value": { "name": "Value", "description": "Value of the setting." } } } + }, + "entity": { + "binary_sensor": { + "remote_control": { + "name": "Remote control" + }, + "remote_start": { + "name": "Remote start" + }, + "local_control": { + "name": "Local control" + }, + "battery_charging_state": { + "name": "Battery charging state" + }, + "charging_connection": { + "name": "Charging connection" + }, + "dust_box_inserted": { + "name": "Dust box", + "state": { + "on": "Inserted", + "off": "Not inserted" + } + }, + "lifted": { + "name": "Lifted" + }, + "lost": { + "name": "Lost" + }, + "chiller_door": { + "name": "Chiller door" + }, + "freezer_door": { + "name": "Freezer door" + }, + "refrigerator_door": { + "name": "Refrigerator door" + } + }, + "light": { + "cooking_lighting": { + "name": "Functional light" + }, + "ambient_light": { + "name": "Ambient light" + }, + "external_light": { + "name": "External light" + }, + "internal_light": { + "name": "Internal light" + } + }, + "number": { + "refrigerator_setpoint_temperature": { + "name": "Refrigerator temperature" + }, + "freezer_setpoint_temperature": { + "name": "Freezer temperature" + }, + "bottle_cooler_setpoint_temperature": { + "name": "Bottle cooler temperature" + }, + "chiller_left_setpoint_temperature": { + "name": "Chiller left temperature" + }, + "chiller_setpoint_temperature": { + "name": "Chiller temperature" + }, + "chiller_right_setpoint_temperature": { + "name": "Chiller right temperature" + }, + "wine_compartment_setpoint_temperature": { + "name": "Wine compartment temperature" + }, + "wine_compartment_2_setpoint_temperature": { + "name": "Wine compartment 2 temperature" + }, + "wine_compartment_3_setpoint_temperature": { + "name": "Wine compartment 3 temperature" + } + }, + "select": { + "selected_program": { + "name": "Selected program", + "state": { + "consumer_products_cleaning_robot_program_cleaning_clean_all": "Clean all", + "consumer_products_cleaning_robot_program_cleaning_clean_map": "Clean map", + "consumer_products_cleaning_robot_program_basic_go_home": "Go home", + "consumer_products_coffee_maker_program_beverage_ristretto": "Ristretto", + "consumer_products_coffee_maker_program_beverage_espresso": "Espresso", + "consumer_products_coffee_maker_program_beverage_espresso_doppio": "Espresso doppio", + "consumer_products_coffee_maker_program_beverage_coffee": "Coffee", + "consumer_products_coffee_maker_program_beverage_x_l_coffee": "XL coffee", + "consumer_products_coffee_maker_program_beverage_caffe_grande": "Caffe grande", + "consumer_products_coffee_maker_program_beverage_espresso_macchiato": "Espresso macchiato", + "consumer_products_coffee_maker_program_beverage_cappuccino": "Cappuccino", + "consumer_products_coffee_maker_program_beverage_latte_macchiato": "Latte macchiato", + "consumer_products_coffee_maker_program_beverage_caffe_latte": "Caffe latte", + "consumer_products_coffee_maker_program_beverage_milk_froth": "Milk froth", + "consumer_products_coffee_maker_program_beverage_warm_milk": "Warm milk", + "consumer_products_coffee_maker_program_coffee_world_kleiner_brauner": "Kleiner brauner", + "consumer_products_coffee_maker_program_coffee_world_grosser_brauner": "Grosser brauner", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter": "Verlaengerter", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun": "Verlaengerter braun", + "consumer_products_coffee_maker_program_coffee_world_wiener_melange": "Wiener melange", + "consumer_products_coffee_maker_program_coffee_world_flat_white": "Flat white", + "consumer_products_coffee_maker_program_coffee_world_cortado": "Cortado", + "consumer_products_coffee_maker_program_coffee_world_cafe_cortado": "Cafe cortado", + "consumer_products_coffee_maker_program_coffee_world_cafe_con_leche": "Cafe con leche", + "consumer_products_coffee_maker_program_coffee_world_cafe_au_lait": "Cafe au lait", + "consumer_products_coffee_maker_program_coffee_world_doppio": "Doppio", + "consumer_products_coffee_maker_program_coffee_world_kaapi": "Kaapi", + "consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd": "Koffie verkeerd", + "consumer_products_coffee_maker_program_coffee_world_galao": "Galao", + "consumer_products_coffee_maker_program_coffee_world_garoto": "Garoto", + "consumer_products_coffee_maker_program_coffee_world_americano": "Americano", + "consumer_products_coffee_maker_program_coffee_world_red_eye": "Red eye", + "consumer_products_coffee_maker_program_coffee_world_black_eye": "Black eye", + "consumer_products_coffee_maker_program_coffee_world_dead_eye": "Dead eye", + "consumer_products_coffee_maker_program_beverage_hot_water": "Hot water", + "dishcare_dishwasher_program_pre_rinse": "Pre_rinse", + "dishcare_dishwasher_program_auto_1": "Auto 1", + "dishcare_dishwasher_program_auto_2": "Auto 2", + "dishcare_dishwasher_program_auto_3": "Auto 3", + "dishcare_dishwasher_program_eco_50": "Eco 50ºC", + "dishcare_dishwasher_program_quick_45": "Quick 45ºC", + "dishcare_dishwasher_program_intensiv_70": "Intensive 70ºC", + "dishcare_dishwasher_program_normal_65": "Normal 65ºC", + "dishcare_dishwasher_program_glas_40": "Glass 40ºC", + "dishcare_dishwasher_program_glass_care": "Glass care", + "dishcare_dishwasher_program_night_wash": "Night wash", + "dishcare_dishwasher_program_quick_65": "Quick 65ºC", + "dishcare_dishwasher_program_normal_45": "Normal 45ºC", + "dishcare_dishwasher_program_intensiv_45": "Intensive 45ºC", + "dishcare_dishwasher_program_auto_half_load": "Auto half load", + "dishcare_dishwasher_program_intensiv_power": "Intensive power", + "dishcare_dishwasher_program_magic_daily": "Magic daily", + "dishcare_dishwasher_program_super_60": "Super 60ºC", + "dishcare_dishwasher_program_kurz_60": "Kurz 60ºC", + "dishcare_dishwasher_program_express_sparkle_65": "Express sparkle 65ºC", + "dishcare_dishwasher_program_machine_care": "Machine care", + "dishcare_dishwasher_program_steam_fresh": "Steam fresh", + "dishcare_dishwasher_program_maximum_cleaning": "Maximum cleaning", + "dishcare_dishwasher_program_mixed_load": "Mixed load", + "laundry_care_dryer_program_cotton": "Cotton", + "laundry_care_dryer_program_synthetic": "Synthetic", + "laundry_care_dryer_program_mix": "Mix", + "laundry_care_dryer_program_blankets": "Blankets", + "laundry_care_dryer_program_business_shirts": "Business shirts", + "laundry_care_dryer_program_down_feathers": "Down feathers", + "laundry_care_dryer_program_hygiene": "Hygiene", + "laundry_care_dryer_program_jeans": "Jeans", + "laundry_care_dryer_program_outdoor": "Outdoor", + "laundry_care_dryer_program_synthetic_refresh": "Synthetic refresh", + "laundry_care_dryer_program_towels": "Towels", + "laundry_care_dryer_program_delicates": "Delicates", + "laundry_care_dryer_program_super_40": "Super 40ºC", + "laundry_care_dryer_program_shirts_15": "Shirts 15ºC", + "laundry_care_dryer_program_pillow": "Pillow", + "laundry_care_dryer_program_anti_shrink": "Anti shrink", + "laundry_care_dryer_program_my_time_my_drying_time": "My drying time", + "laundry_care_dryer_program_time_cold": "Cold (variable time)", + "laundry_care_dryer_program_time_warm": "Warm (variable time)", + "laundry_care_dryer_program_in_basket": "In basket", + "laundry_care_dryer_program_time_cold_fix_time_cold_20": "Cold (20 min)", + "laundry_care_dryer_program_time_cold_fix_time_cold_30": "Cold (30 min)", + "laundry_care_dryer_program_time_cold_fix_time_cold_60": "Cold (60 min)", + "laundry_care_dryer_program_time_warm_fix_time_warm_30": "Warm (30 min)", + "laundry_care_dryer_program_time_warm_fix_time_warm_40": "Warm (40 min)", + "laundry_care_dryer_program_time_warm_fix_time_warm_60": "Warm (60 min)", + "laundry_care_dryer_program_dessous": "Dessous", + "cooking_common_program_hood_automatic": "Automatic", + "cooking_common_program_hood_venting": "Venting", + "cooking_common_program_hood_delayed_shut_off": "Delayed shut off", + "cooking_oven_program_heating_mode_pre_heating": "Pre-heating", + "cooking_oven_program_heating_mode_hot_air": "Hot air", + "cooking_oven_program_heating_mode_hot_air_eco": "Hot air eco", + "cooking_oven_program_heating_mode_hot_air_grilling": "Hot air grilling", + "cooking_oven_program_heating_mode_top_bottom_heating": "Top bottom heating", + "cooking_oven_program_heating_mode_top_bottom_heating_eco": "Top bottom heating eco", + "cooking_oven_program_heating_mode_bottom_heating": "Bottom heating", + "cooking_oven_program_heating_mode_pizza_setting": "Pizza setting", + "cooking_oven_program_heating_mode_slow_cook": "Slow cook", + "cooking_oven_program_heating_mode_intensive_heat": "Intensive heat", + "cooking_oven_program_heating_mode_keep_warm": "Keep warm", + "cooking_oven_program_heating_mode_preheat_ovenware": "Preheat ovenware", + "cooking_oven_program_heating_mode_frozen_heatup_special": "Special Heat-Up for frozen products", + "cooking_oven_program_heating_mode_desiccation": "Desiccation", + "cooking_oven_program_heating_mode_defrost": "Defrost", + "cooking_oven_program_heating_mode_proof": "Proof", + "cooking_oven_program_heating_mode_hot_air_30_steam": "Hot air + 30 RH", + "cooking_oven_program_heating_mode_hot_air_60_steam": "Hot air + 60 RH", + "cooking_oven_program_heating_mode_hot_air_80_steam": "Hot air + 80 RH", + "cooking_oven_program_heating_mode_hot_air_100_steam": "Hot air + 100 RH", + "cooking_oven_program_heating_mode_sabbath_programme": "Sabbath programme", + "cooking_oven_program_microwave_90_watt": "90 Watt", + "cooking_oven_program_microwave_180_watt": "180 Watt", + "cooking_oven_program_microwave_360_watt": "360 Watt", + "cooking_oven_program_microwave_600_watt": "600 Watt", + "cooking_oven_program_microwave_900_watt": "900 Watt", + "cooking_oven_program_microwave_1000_watt": "1000 Watt", + "cooking_oven_program_microwave_max": "Max", + "cooking_oven_program_heating_mode_warming_drawer": "Warming drawer", + "laundry_care_washer_program_cotton": "Cotton", + "laundry_care_washer_program_cotton_cotton_eco": "Cotton eco", + "laundry_care_washer_program_cotton_eco_4060": "Cotton eco 40/60ºC", + "laundry_care_washer_program_cotton_colour": "Cotton color", + "laundry_care_washer_program_easy_care": "Easy care", + "laundry_care_washer_program_mix": "Mix", + "laundry_care_washer_program_mix_night_wash": "Mix night wash", + "laundry_care_washer_program_delicates_silk": "Delicates silk", + "laundry_care_washer_program_wool": "Wool", + "laundry_care_washer_program_sensitive": "Sensitive", + "laundry_care_washer_program_auto_30": "Auto 30ºC", + "laundry_care_washer_program_auto_40": "Auto 40ºC", + "laundry_care_washer_program_auto_60": "Auto 60ºC", + "laundry_care_washer_program_chiffon": "Chiffon", + "laundry_care_washer_program_curtains": "Curtains", + "laundry_care_washer_program_dark_wash": "Dark wash", + "laundry_care_washer_program_dessous": "Dessous", + "laundry_care_washer_program_monsoon": "Monsoon", + "laundry_care_washer_program_outdoor": "Outdoor", + "laundry_care_washer_program_plush_toy": "Plush toy", + "laundry_care_washer_program_shirts_blouses": "Shirts blouses", + "laundry_care_washer_program_sport_fitness": "Sport fitness", + "laundry_care_washer_program_towels": "Towels", + "laundry_care_washer_program_water_proof": "Water proof", + "laundry_care_washer_program_power_speed_59": "Power speed <60 min", + "laundry_care_washer_program_super_153045_super_15": "Super 15 min", + "laundry_care_washer_program_super_153045_super_1530": "Super 15/30 min", + "laundry_care_washer_program_down_duvet_duvet": "Down duvet", + "laundry_care_washer_program_rinse_rinse_spin_drain": "Rinse spin drain", + "laundry_care_washer_program_drum_clean": "Drum clean", + "laundry_care_washer_dryer_program_cotton": "Cotton", + "laundry_care_washer_dryer_program_cotton_eco_4060": "Cotton eco 40/60 ºC", + "laundry_care_washer_dryer_program_mix": "Mix", + "laundry_care_washer_dryer_program_easy_care": "Easy care", + "laundry_care_washer_dryer_program_wash_and_dry_60": "Wash and dry (60 min)", + "laundry_care_washer_dryer_program_wash_and_dry_90": "Wash and dry (90 min)" + } + }, + "active_program": { + "name": "Active program", + "state": { + "consumer_products_cleaning_robot_program_cleaning_clean_all": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_cleaning_robot_program_cleaning_clean_all%]", + "consumer_products_cleaning_robot_program_cleaning_clean_map": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_cleaning_robot_program_cleaning_clean_map%]", + "consumer_products_cleaning_robot_program_basic_go_home": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_cleaning_robot_program_basic_go_home%]", + "consumer_products_coffee_maker_program_beverage_ristretto": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_ristretto%]", + "consumer_products_coffee_maker_program_beverage_espresso": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_espresso%]", + "consumer_products_coffee_maker_program_beverage_espresso_doppio": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_espresso_doppio%]", + "consumer_products_coffee_maker_program_beverage_coffee": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_coffee%]", + "consumer_products_coffee_maker_program_beverage_x_l_coffee": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_x_l_coffee%]", + "consumer_products_coffee_maker_program_beverage_caffe_grande": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_caffe_grande%]", + "consumer_products_coffee_maker_program_beverage_espresso_macchiato": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_espresso_macchiato%]", + "consumer_products_coffee_maker_program_beverage_cappuccino": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_cappuccino%]", + "consumer_products_coffee_maker_program_beverage_latte_macchiato": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_latte_macchiato%]", + "consumer_products_coffee_maker_program_beverage_caffe_latte": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_caffe_latte%]", + "consumer_products_coffee_maker_program_beverage_milk_froth": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_milk_froth%]", + "consumer_products_coffee_maker_program_beverage_warm_milk": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_warm_milk%]", + "consumer_products_coffee_maker_program_coffee_world_kleiner_brauner": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_kleiner_brauner%]", + "consumer_products_coffee_maker_program_coffee_world_grosser_brauner": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_grosser_brauner%]", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_verlaengerter%]", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun%]", + "consumer_products_coffee_maker_program_coffee_world_wiener_melange": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_wiener_melange%]", + "consumer_products_coffee_maker_program_coffee_world_flat_white": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_flat_white%]", + "consumer_products_coffee_maker_program_coffee_world_cortado": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cortado%]", + "consumer_products_coffee_maker_program_coffee_world_cafe_cortado": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cafe_cortado%]", + "consumer_products_coffee_maker_program_coffee_world_cafe_con_leche": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cafe_con_leche%]", + "consumer_products_coffee_maker_program_coffee_world_cafe_au_lait": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cafe_au_lait%]", + "consumer_products_coffee_maker_program_coffee_world_doppio": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_doppio%]", + "consumer_products_coffee_maker_program_coffee_world_kaapi": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_kaapi%]", + "consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd%]", + "consumer_products_coffee_maker_program_coffee_world_galao": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_galao%]", + "consumer_products_coffee_maker_program_coffee_world_garoto": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_garoto%]", + "consumer_products_coffee_maker_program_coffee_world_americano": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_americano%]", + "consumer_products_coffee_maker_program_coffee_world_red_eye": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_red_eye%]", + "consumer_products_coffee_maker_program_coffee_world_black_eye": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_black_eye%]", + "consumer_products_coffee_maker_program_coffee_world_dead_eye": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_dead_eye%]", + "consumer_products_coffee_maker_program_beverage_hot_water": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_hot_water%]", + "dishcare_dishwasher_program_pre_rinse": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_pre_rinse%]", + "dishcare_dishwasher_program_auto_1": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_1%]", + "dishcare_dishwasher_program_auto_2": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_2%]", + "dishcare_dishwasher_program_auto_3": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_3%]", + "dishcare_dishwasher_program_eco_50": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_eco_50%]", + "dishcare_dishwasher_program_quick_45": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_quick_45%]", + "dishcare_dishwasher_program_intensiv_70": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_intensiv_70%]", + "dishcare_dishwasher_program_normal_65": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_normal_65%]", + "dishcare_dishwasher_program_glas_40": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_glas_40%]", + "dishcare_dishwasher_program_glass_care": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_glass_care%]", + "dishcare_dishwasher_program_night_wash": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_night_wash%]", + "dishcare_dishwasher_program_quick_65": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_quick_65%]", + "dishcare_dishwasher_program_normal_45": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_normal_45%]", + "dishcare_dishwasher_program_intensiv_45": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_intensiv_45%]", + "dishcare_dishwasher_program_auto_half_load": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_half_load%]", + "dishcare_dishwasher_program_intensiv_power": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_intensiv_power%]", + "dishcare_dishwasher_program_magic_daily": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_magic_daily%]", + "dishcare_dishwasher_program_super_60": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_super_60%]", + "dishcare_dishwasher_program_kurz_60": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_kurz_60%]", + "dishcare_dishwasher_program_express_sparkle_65": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_express_sparkle_65%]", + "dishcare_dishwasher_program_machine_care": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_machine_care%]", + "dishcare_dishwasher_program_steam_fresh": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_steam_fresh%]", + "dishcare_dishwasher_program_maximum_cleaning": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_maximum_cleaning%]", + "dishcare_dishwasher_program_mixed_load": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_mixed_load%]", + "laundry_care_dryer_program_cotton": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_cotton%]", + "laundry_care_dryer_program_synthetic": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_synthetic%]", + "laundry_care_dryer_program_mix": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_mix%]", + "laundry_care_dryer_program_blankets": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_blankets%]", + "laundry_care_dryer_program_business_shirts": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_business_shirts%]", + "laundry_care_dryer_program_down_feathers": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_down_feathers%]", + "laundry_care_dryer_program_hygiene": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_hygiene%]", + "laundry_care_dryer_program_jeans": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_jeans%]", + "laundry_care_dryer_program_outdoor": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_outdoor%]", + "laundry_care_dryer_program_synthetic_refresh": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_synthetic_refresh%]", + "laundry_care_dryer_program_towels": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_towels%]", + "laundry_care_dryer_program_delicates": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_delicates%]", + "laundry_care_dryer_program_super_40": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_super_40%]", + "laundry_care_dryer_program_shirts_15": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_shirts_15%]", + "laundry_care_dryer_program_pillow": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_pillow%]", + "laundry_care_dryer_program_anti_shrink": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_anti_shrink%]", + "laundry_care_dryer_program_my_time_my_drying_time": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_my_time_my_drying_time%]", + "laundry_care_dryer_program_time_cold": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold%]", + "laundry_care_dryer_program_time_warm": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm%]", + "laundry_care_dryer_program_in_basket": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_in_basket%]", + "laundry_care_dryer_program_time_cold_fix_time_cold_20": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold_fix_time_cold_20%]", + "laundry_care_dryer_program_time_cold_fix_time_cold_30": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold_fix_time_cold_30%]", + "laundry_care_dryer_program_time_cold_fix_time_cold_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold_fix_time_cold_60%]", + "laundry_care_dryer_program_time_warm_fix_time_warm_30": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm_fix_time_warm_30%]", + "laundry_care_dryer_program_time_warm_fix_time_warm_40": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm_fix_time_warm_40%]", + "laundry_care_dryer_program_time_warm_fix_time_warm_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm_fix_time_warm_60%]", + "laundry_care_dryer_program_dessous": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_dessous%]", + "cooking_common_program_hood_automatic": "[%key:component::home_connect::entity::select::selected_program::state::cooking_common_program_hood_automatic%]", + "cooking_common_program_hood_venting": "[%key:component::home_connect::entity::select::selected_program::state::cooking_common_program_hood_venting%]", + "cooking_common_program_hood_delayed_shut_off": "[%key:component::home_connect::entity::select::selected_program::state::cooking_common_program_hood_delayed_shut_off%]", + "cooking_oven_program_heating_mode_pre_heating": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_pre_heating%]", + "cooking_oven_program_heating_mode_hot_air": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air%]", + "cooking_oven_program_heating_mode_hot_air_eco": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_eco%]", + "cooking_oven_program_heating_mode_hot_air_grilling": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_grilling%]", + "cooking_oven_program_heating_mode_top_bottom_heating": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_top_bottom_heating%]", + "cooking_oven_program_heating_mode_top_bottom_heating_eco": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_top_bottom_heating_eco%]", + "cooking_oven_program_heating_mode_bottom_heating": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_bottom_heating%]", + "cooking_oven_program_heating_mode_pizza_setting": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_pizza_setting%]", + "cooking_oven_program_heating_mode_slow_cook": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_slow_cook%]", + "cooking_oven_program_heating_mode_intensive_heat": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_intensive_heat%]", + "cooking_oven_program_heating_mode_keep_warm": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_keep_warm%]", + "cooking_oven_program_heating_mode_preheat_ovenware": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_preheat_ovenware%]", + "cooking_oven_program_heating_mode_frozen_heatup_special": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_frozen_heatup_special%]", + "cooking_oven_program_heating_mode_desiccation": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_desiccation%]", + "cooking_oven_program_heating_mode_defrost": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_defrost%]", + "cooking_oven_program_heating_mode_proof": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_proof%]", + "cooking_oven_program_heating_mode_hot_air_30_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_30_steam%]", + "cooking_oven_program_heating_mode_hot_air_60_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_60_steam%]", + "cooking_oven_program_heating_mode_hot_air_80_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_80_steam%]", + "cooking_oven_program_heating_mode_hot_air_100_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_100_steam%]", + "cooking_oven_program_heating_mode_sabbath_programme": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_sabbath_programme%]", + "cooking_oven_program_microwave_90_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_90_watt%]", + "cooking_oven_program_microwave_180_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_180_watt%]", + "cooking_oven_program_microwave_360_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_360_watt%]", + "cooking_oven_program_microwave_600_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_600_watt%]", + "cooking_oven_program_microwave_900_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_900_watt%]", + "cooking_oven_program_microwave_1000_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_1000_watt%]", + "cooking_oven_program_microwave_max": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_max%]", + "cooking_oven_program_heating_mode_warming_drawer": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_warming_drawer%]", + "laundry_care_washer_program_cotton": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton%]", + "laundry_care_washer_program_cotton_cotton_eco": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton_cotton_eco%]", + "laundry_care_washer_program_cotton_eco_4060": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton_eco_4060%]", + "laundry_care_washer_program_cotton_colour": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton_colour%]", + "laundry_care_washer_program_easy_care": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_easy_care%]", + "laundry_care_washer_program_mix": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_mix%]", + "laundry_care_washer_program_mix_night_wash": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_mix_night_wash%]", + "laundry_care_washer_program_delicates_silk": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_delicates_silk%]", + "laundry_care_washer_program_wool": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_wool%]", + "laundry_care_washer_program_sensitive": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_sensitive%]", + "laundry_care_washer_program_auto_30": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_auto_30%]", + "laundry_care_washer_program_auto_40": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_auto_40%]", + "laundry_care_washer_program_auto_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_auto_60%]", + "laundry_care_washer_program_chiffon": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_chiffon%]", + "laundry_care_washer_program_curtains": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_curtains%]", + "laundry_care_washer_program_dark_wash": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_dark_wash%]", + "laundry_care_washer_program_dessous": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_dessous%]", + "laundry_care_washer_program_monsoon": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_monsoon%]", + "laundry_care_washer_program_outdoor": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_outdoor%]", + "laundry_care_washer_program_plush_toy": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_plush_toy%]", + "laundry_care_washer_program_shirts_blouses": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_shirts_blouses%]", + "laundry_care_washer_program_sport_fitness": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_sport_fitness%]", + "laundry_care_washer_program_towels": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_towels%]", + "laundry_care_washer_program_water_proof": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_water_proof%]", + "laundry_care_washer_program_power_speed_59": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_power_speed_59%]", + "laundry_care_washer_program_super_153045_super_15": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_super_153045_super_15%]", + "laundry_care_washer_program_super_153045_super_1530": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_super_153045_super_1530%]", + "laundry_care_washer_program_down_duvet_duvet": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_down_duvet_duvet%]", + "laundry_care_washer_program_rinse_rinse_spin_drain": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_rinse_rinse_spin_drain%]", + "laundry_care_washer_program_drum_clean": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_drum_clean%]", + "laundry_care_washer_dryer_program_cotton": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_cotton%]", + "laundry_care_washer_dryer_program_cotton_eco_4060": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_cotton_eco_4060%]", + "laundry_care_washer_dryer_program_mix": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_mix%]", + "laundry_care_washer_dryer_program_easy_care": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_easy_care%]", + "laundry_care_washer_dryer_program_wash_and_dry_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_wash_and_dry_60%]", + "laundry_care_washer_dryer_program_wash_and_dry_90": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_wash_and_dry_90%]" + } + } + }, + "sensor": { + "program_progress": { + "name": "Program progress" + }, + "program_finish_time": { + "name": "Program finish time" + }, + "operation_state": { + "name": "Operation state", + "state": { + "inactive": "Inactive", + "ready": "Ready", + "delayedstart": "Delayed start", + "run": "Run", + "pause": "[%key:common::state::paused%]", + "actionrequired": "Action required", + "finished": "Finished", + "error": "Error", + "aborting": "Aborting" + } + }, + "door": { + "name": "Door", + "state": { + "closed": "[%key:common::state::closed%]", + "locked": "[%key:common::state::locked%]", + "open": "[%key:common::state::open%]" + } + }, + "coffee_counter": { + "name": "Coffees" + }, + "powder_coffee_counter": { + "name": "Powder coffees" + }, + "hot_water_counter": { + "name": "Hot water" + }, + "hot_water_cups_counter": { + "name": "Hot water cups" + }, + "hot_milk_counter": { + "name": "Hot milk cups" + }, + "frothy_milk_counter": { + "name": "Frothy milk cups" + }, + "milk_counter": { + "name": "Milk cups" + }, + "coffee_and_milk_counter": { + "name": "Coffee and milk cups" + }, + "ristretto_espresso_counter": { + "name": "Ristretto espresso cups" + }, + "battery_level": { + "name": "Battery level" + }, + "camera_state": { + "name": "Camera state", + "state": { + "disabled": "[%key:common::state::disabled%]", + "sleeping": "Sleeping", + "ready": "Ready", + "streaminglocal": "Streaming local", + "streamingcloud": "Streaming cloud", + "streaminglocal_and_cloud": "Streaming local and cloud", + "error": "Error" + } + }, + "last_selected_map": { + "name": "Last selected map", + "state": { + "tempmap": "Temporary map", + "map1": "Map 1", + "map2": "Map 2", + "map3": "Map 3" + } + }, + "freezer_door_alarm": { + "name": "Freezer door alarm", + "state": { + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "refrigerator_door_alarm": { + "name": "Refrigerator door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_temperature_alarm": { + "name": "Freezer temperature alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "bean_container_empty": { + "name": "Bean container empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "water_tank_empty": { + "name": "Water tank empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "drip_tray_full": { + "name": "Drip tray full", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "salt_nearly_empty": { + "name": "Salt nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "rinse_aid_nearly_empty": { + "name": "Rinse aid nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + } + }, + "switch": { + "power": { + "name": "Power" + }, + "child_lock": { + "name": "Child lock" + }, + "cup_warmer": { + "name": "Cup warmer" + }, + "refrigerator_super_mode": { + "name": "Refrigerator super mode" + }, + "freezer_super_mode": { + "name": "Freezer super mode" + }, + "eco_mode": { + "name": "Eco mode" + }, + "sabbath_mode": { + "name": "Sabbath mode" + }, + "vacation_mode": { + "name": "Vacation mode" + }, + "fresh_mode": { + "name": "Fresh mode" + }, + "dispenser_enabled": { + "name": "Dispenser", + "state": { + "off": "[%key:common::state::disabled%]", + "on": "[%key:common::state::enabled%]" + } + }, + "door_assistant_fridge": { + "name": "Fridge door assistant" + }, + "door_assistant_freezer": { + "name": "Freezer door assistant" + } + }, + "time": { + "alarm_clock": { + "name": "Alarm clock" + } + } } } diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 8c7ef2eb11a..acb78e87db1 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -1,63 +1,204 @@ """Provides a switch for Home Connect.""" +import contextlib import logging from typing import Any from homeconnect.api import HomeConnectError -from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEVICE, CONF_ENTITIES +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( + APPLIANCES_WITH_PROGRAMS, + ATTR_ALLOWED_VALUES, + ATTR_CONSTRAINTS, ATTR_VALUE, BSH_ACTIVE_PROGRAM, BSH_CHILD_LOCK_STATE, BSH_OPERATION_STATE, + BSH_POWER_OFF, BSH_POWER_ON, + BSH_POWER_STANDBY, BSH_POWER_STATE, DOMAIN, + REFRIGERATION_DISPENSER, + REFRIGERATION_SUPERMODEFREEZER, + REFRIGERATION_SUPERMODEREFRIGERATOR, + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, + SVE_TRANSLATION_PLACEHOLDER_KEY, + SVE_TRANSLATION_PLACEHOLDER_VALUE, ) -from .entity import HomeConnectEntity +from .entity import HomeConnectDevice, HomeConnectEntity _LOGGER = logging.getLogger(__name__) +SWITCHES = ( + SwitchEntityDescription( + key=BSH_CHILD_LOCK_STATE, + translation_key="child_lock", + ), + SwitchEntityDescription( + key="ConsumerProducts.CoffeeMaker.Setting.CupWarmer", + translation_key="cup_warmer", + ), + SwitchEntityDescription( + key=REFRIGERATION_SUPERMODEFREEZER, + translation_key="freezer_super_mode", + ), + SwitchEntityDescription( + key=REFRIGERATION_SUPERMODEREFRIGERATOR, + translation_key="refrigerator_super_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.EcoMode", + translation_key="eco_mode", + ), + SwitchEntityDescription( + key="Cooking.Oven.Setting.SabbathMode", + translation_key="sabbath_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.SabbathMode", + translation_key="sabbath_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.VacationMode", + translation_key="vacation_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.FreshMode", + translation_key="fresh_mode", + ), + SwitchEntityDescription( + key=REFRIGERATION_DISPENSER, + translation_key="dispenser_enabled", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.Door.AssistantFridge", + translation_key="door_assistant_fridge", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.Door.AssistantFreezer", + translation_key="door_assistant_freezer", + ), +) + + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect switch.""" - def get_entities(): + def get_entities() -> list[SwitchEntity]: """Get a list of entities.""" - entities = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("switch", []) - entity_list = [HomeConnectProgramSwitch(**d) for d in entity_dicts] - entity_list += [HomeConnectPowerSwitch(device_dict[CONF_DEVICE])] - entity_list += [HomeConnectChildLockSwitch(device_dict[CONF_DEVICE])] - entities += entity_list + entities: list[SwitchEntity] = [] + for device in entry.runtime_data.devices: + if device.appliance.type in APPLIANCES_WITH_PROGRAMS: + with contextlib.suppress(HomeConnectError): + programs = device.appliance.get_programs_available() + if programs: + entities.extend( + HomeConnectProgramSwitch(device, program) + for program in programs + ) + entities.append(HomeConnectPowerSwitch(device)) + entities.extend( + HomeConnectSwitch(device, description) + for description in SWITCHES + if description.key in device.appliance.status + ) + return entities async_add_entities(await hass.async_add_executor_job(get_entities), True) +class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): + """Generic switch class for Home Connect Binary Settings.""" + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on setting.""" + + _LOGGER.debug("Turning on %s", self.entity_description.key) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, self.entity_description.key, True + ) + except HomeConnectError as err: + self._attr_available = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="turn_on", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + }, + ) from err + + self._attr_available = True + self.async_entity_update() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off setting.""" + + _LOGGER.debug("Turning off %s", self.entity_description.key) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, self.entity_description.key, False + ) + except HomeConnectError as err: + _LOGGER.error("Error while trying to turn off: %s", err) + self._attr_available = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="turn_off", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + }, + ) from err + + self._attr_available = True + self.async_entity_update() + + async def async_update(self) -> None: + """Update the switch's status.""" + + self._attr_is_on = self.device.appliance.status.get( + self.entity_description.key, {} + ).get(ATTR_VALUE) + self._attr_available = True + _LOGGER.debug( + "Updated %s, new state: %s", + self.entity_description.key, + self._attr_is_on, + ) + + class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): """Switch class for Home Connect.""" - def __init__(self, device, program_name): + def __init__(self, device: HomeConnectDevice, program_name: str) -> None: """Initialize the entity.""" desc = " ".join(["Program", program_name.split(".")[-1]]) if device.appliance.type == "WasherDryer": desc = " ".join( ["Program", program_name.split(".")[-3], program_name.split(".")[-1]] ) - super().__init__(device, desc) + super().__init__(device, SwitchEntityDescription(key=program_name)) + self._attr_name = f"{device.appliance.name} {desc}" + self._attr_unique_id = f"{device.appliance.haId}-{desc}" + self._attr_has_entity_name = False self.program_name = program_name async def async_turn_on(self, **kwargs: Any) -> None: @@ -68,7 +209,14 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): self.device.appliance.start_program, self.program_name ) except HomeConnectError as err: - _LOGGER.error("Error while trying to start program: %s", err) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="start_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "program": self.program_name, + }, + ) from err self.async_entity_update() async def async_turn_off(self, **kwargs: Any) -> None: @@ -77,7 +225,13 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): try: await self.hass.async_add_executor_job(self.device.appliance.stop_program) except HomeConnectError as err: - _LOGGER.error("Error while trying to stop program: %s", err) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="stop_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + }, + ) from err self.async_entity_update() async def async_update(self) -> None: @@ -93,9 +247,26 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): """Power switch class for Home Connect.""" - def __init__(self, device): + power_off_state: str | None + + def __init__(self, device: HomeConnectDevice) -> None: """Initialize the entity.""" - super().__init__(device, "Power") + super().__init__( + device, + SwitchEntityDescription(key=BSH_POWER_STATE, translation_key="power"), + ) + if ( + power_state := device.appliance.status.get(BSH_POWER_STATE, {}).get( + ATTR_VALUE + ) + ) and power_state in [BSH_POWER_OFF, BSH_POWER_STANDBY]: + self.power_off_state = power_state + + async def async_added_to_hass(self) -> None: + """Add the entity to the hass instance.""" + await super().async_added_to_hass() + if not hasattr(self, "power_off_state"): + await self.async_fetch_power_off_state() async def async_turn_on(self, **kwargs: Any) -> None: """Switch the device on.""" @@ -105,22 +276,54 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): self.device.appliance.set_setting, BSH_POWER_STATE, BSH_POWER_ON ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on device: %s", err) self._attr_is_on = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="power_on", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name, + }, + ) from err self.async_entity_update() async def async_turn_off(self, **kwargs: Any) -> None: """Switch the device off.""" + if not hasattr(self, "power_off_state"): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unable_to_retrieve_turn_off", + translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name + }, + ) + + if self.power_off_state is None: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="turn_off_not_supported", + translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name + }, + ) _LOGGER.debug("tried to switch off %s", self.name) try: await self.hass.async_add_executor_job( self.device.appliance.set_setting, BSH_POWER_STATE, - self.device.power_off_state, + self.power_off_state, ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn off device: %s", err) self._attr_is_on = True + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="power_off", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name, + SVE_TRANSLATION_PLACEHOLDER_VALUE: self.power_off_state, + }, + ) from err self.async_entity_update() async def async_update(self) -> None: @@ -131,8 +334,9 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): ): self._attr_is_on = True elif ( - self.device.appliance.status.get(BSH_POWER_STATE, {}).get(ATTR_VALUE) - == self.device.power_off_state + hasattr(self, "power_off_state") + and self.device.appliance.status.get(BSH_POWER_STATE, {}).get(ATTR_VALUE) + == self.power_off_state ): self._attr_is_on = False elif self.device.appliance.status.get(BSH_OPERATION_STATE, {}).get( @@ -156,43 +360,23 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): self._attr_is_on = None _LOGGER.debug("Updated, new state: %s", self._attr_is_on) - -class HomeConnectChildLockSwitch(HomeConnectEntity, SwitchEntity): - """Child lock switch class for Home Connect.""" - - def __init__(self, device) -> None: - """Initialize the entity.""" - super().__init__(device, "ChildLock") - - async def async_turn_on(self, **kwargs: Any) -> None: - """Switch child lock on.""" - _LOGGER.debug("Tried to switch child lock on device: %s", self.name) + async def async_fetch_power_off_state(self) -> None: + """Fetch the power off state.""" try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, BSH_CHILD_LOCK_STATE, True + data = await self.hass.async_add_executor_job( + self.device.appliance.get, f"/settings/{self.bsh_key}" ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on child lock on device: %s", err) - self._attr_is_on = False - self.async_entity_update() + _LOGGER.error("An error occurred: %s", err) + return + if not data or not ( + allowed_values := data.get(ATTR_CONSTRAINTS, {}).get(ATTR_ALLOWED_VALUES) + ): + return - async def async_turn_off(self, **kwargs: Any) -> None: - """Switch child lock off.""" - _LOGGER.debug("Tried to switch off child lock on device: %s", self.name) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, BSH_CHILD_LOCK_STATE, False - ) - except HomeConnectError as err: - _LOGGER.error( - "Error while trying to turn off child lock on device: %s", err - ) - self._attr_is_on = True - self.async_entity_update() - - async def async_update(self) -> None: - """Update the switch's status.""" - self._attr_is_on = False - if self.device.appliance.status.get(BSH_CHILD_LOCK_STATE, {}).get(ATTR_VALUE): - self._attr_is_on = True - _LOGGER.debug("Updated child lock, new state: %s", self._attr_is_on) + if BSH_POWER_OFF in allowed_values: + self.power_off_state = BSH_POWER_OFF + elif BSH_POWER_STANDBY in allowed_values: + self.power_off_state = BSH_POWER_STANDBY + else: + self.power_off_state = None diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py new file mode 100644 index 00000000000..c1f125cd2f7 --- /dev/null +++ b/homeassistant/components/home_connect/time.py @@ -0,0 +1,107 @@ +"""Provides time enties for Home Connect.""" + +from datetime import time +import logging + +from homeconnect.api import HomeConnectError + +from homeassistant.components.time import TimeEntity, TimeEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error +from .const import ( + ATTR_VALUE, + DOMAIN, + SVE_TRANSLATION_KEY_SET_SETTING, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, + SVE_TRANSLATION_PLACEHOLDER_KEY, + SVE_TRANSLATION_PLACEHOLDER_VALUE, +) +from .entity import HomeConnectEntity + +_LOGGER = logging.getLogger(__name__) + + +TIME_ENTITIES = ( + TimeEntityDescription( + key="BSH.Common.Setting.AlarmClock", + translation_key="alarm_clock", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: HomeConnectConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Home Connect switch.""" + + def get_entities() -> list[HomeConnectTimeEntity]: + """Get a list of entities.""" + return [ + HomeConnectTimeEntity(device, description) + for description in TIME_ENTITIES + for device in entry.runtime_data.devices + if description.key in device.appliance.status + ] + + async_add_entities(await hass.async_add_executor_job(get_entities), True) + + +def seconds_to_time(seconds: int) -> time: + """Convert seconds to a time object.""" + minutes, sec = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + return time(hour=hours, minute=minutes, second=sec) + + +def time_to_seconds(t: time) -> int: + """Convert a time object to seconds.""" + return t.hour * 3600 + t.minute * 60 + t.second + + +class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): + """Time setting class for Home Connect.""" + + async def async_set_value(self, value: time) -> None: + """Set the native value of the entity.""" + _LOGGER.debug( + "Tried to set value %s to %s for %s", + value, + self.bsh_key, + self.entity_id, + ) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self.bsh_key, + time_to_seconds(value), + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + }, + ) from err + + async def async_update(self) -> None: + """Update the Time setting status.""" + data = self.device.appliance.status.get(self.bsh_key) + if data is None: + _LOGGER.error("No value for %s", self.bsh_key) + self._attr_native_value = None + return + seconds = data.get(ATTR_VALUE, None) + if seconds is not None: + self._attr_native_value = seconds_to_time(seconds) + else: + self._attr_native_value = None + _LOGGER.debug("Updated, new value: %s", self._attr_native_value) diff --git a/homeassistant/components/homeassistant/__init__.py b/homeassistant/components/homeassistant/__init__.py index f771923ab2d..dc33b0c63e3 100644 --- a/homeassistant/components/homeassistant/__init__.py +++ b/homeassistant/components/homeassistant/__init__.py @@ -8,9 +8,9 @@ from typing import Any import voluptuous as vol +from homeassistant import config as conf_util, core_config from homeassistant.auth.permissions.const import CAT_ENTITIES, POLICY_CONTROL from homeassistant.components import persistent_notification -import homeassistant.config as conf_util from homeassistant.const import ( ATTR_ELEVATION, ATTR_ENTITY_ID, @@ -54,7 +54,7 @@ from .const import ( SERVICE_HOMEASSISTANT_RESTART, SERVICE_HOMEASSISTANT_STOP, ) -from .exposed_entities import ExposedEntities +from .exposed_entities import ExposedEntities, async_should_expose # noqa: F401 ATTR_ENTRY_ID = "entry_id" ATTR_SAFE_MODE = "safe_mode" @@ -269,7 +269,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: return # auth only processed during startup - await conf_util.async_process_ha_core_config(hass, conf.get(DOMAIN) or {}) + await core_config.async_process_ha_core_config(hass, conf.get(DOMAIN) or {}) async_register_admin_service( hass, DOMAIN, SERVICE_RELOAD_CORE_CONFIG, async_handle_reload_config @@ -282,7 +282,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: "longitude": call.data[ATTR_LONGITUDE], } - if elevation := call.data.get(ATTR_ELEVATION): + if (elevation := call.data.get(ATTR_ELEVATION)) is not None: service_data["elevation"] = elevation await hass.config.async_update(**service_data) diff --git a/homeassistant/components/homeassistant/icons.json b/homeassistant/components/homeassistant/icons.json index ec4d5729918..f08fa8d969b 100644 --- a/homeassistant/components/homeassistant/icons.json +++ b/homeassistant/components/homeassistant/icons.json @@ -1,17 +1,43 @@ { "services": { - "check_config": "mdi:receipt-text-check", - "reload_core_config": "mdi:receipt-text-send", - "restart": "mdi:restart", - "set_location": "mdi:map-marker", - "stop": "mdi:stop", - "toggle": "mdi:toggle-switch", - "turn_on": "mdi:power-on", - "turn_off": "mdi:power-off", - "update_entity": "mdi:update", - "reload_custom_templates": "mdi:palette-swatch", - "reload_config_entry": "mdi:reload", - "save_persistent_states": "mdi:content-save", - "reload_all": "mdi:reload" + "check_config": { + "service": "mdi:receipt-text-check" + }, + "reload_core_config": { + "service": "mdi:receipt-text-send" + }, + "restart": { + "service": "mdi:restart" + }, + "set_location": { + "service": "mdi:map-marker" + }, + "stop": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:toggle-switch" + }, + "turn_on": { + "service": "mdi:power-on" + }, + "turn_off": { + "service": "mdi:power-off" + }, + "update_entity": { + "service": "mdi:update" + }, + "reload_custom_templates": { + "service": "mdi:palette-swatch" + }, + "reload_config_entry": { + "service": "mdi:reload" + }, + "save_persistent_states": { + "service": "mdi:content-save" + }, + "reload_all": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 69a3e26ad79..3283d480fdd 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -10,6 +10,10 @@ "title": "The country has not been configured", "description": "No country has been configured, please update the configuration by clicking on the \"learn more\" button below." }, + "imperial_unit_system": { + "title": "The imperial unit system is deprecated", + "description": "The imperial unit system is deprecated and your system is currently using us customary. Please update your configuration to use the us customary unit system and reload the core configuration to fix this issue." + }, "deprecated_yaml": { "title": "The {integration_title} YAML configuration is being removed", "description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." @@ -19,7 +23,7 @@ "description": "The currency {currency} is no longer in use, please reconfigure the currency configuration." }, "legacy_templates_false": { - "title": "`legacy_templates` config key is being removed", + "title": "legacy_templates config key is being removed", "description": "Nothing will change with your templates.\n\nRemove the `legacy_templates` key from the `homeassistant` configuration in your configuration.yaml file and restart Home Assistant to fix this issue." }, "legacy_templates_true": { @@ -40,15 +44,15 @@ }, "no_platform_setup": { "title": "Unused YAML configuration for the {platform} integration", - "description": "It's not possible to configure {platform} {domain} by adding `{platform_key}` to the {domain} configuration. Please check the documentation for more information on how to set up this integration.\n\nTo resolve this:\n1. Remove `{platform_key}` occurences from the `{domain}:` configuration in your YAML configuration file.\n2. Restart Home Assistant.\n\nExample that should be removed:\n{yaml_example}\n" + "description": "It's not possible to configure {platform} {domain} by adding `{platform_key}` to the {domain} configuration. Please check the documentation for more information on how to set up this integration.\n\nTo resolve this:\n1. Remove `{platform_key}` occurences from the `{domain}:` configuration in your YAML configuration file.\n2. Restart Home Assistant.\n\nExample that should be removed:\n{yaml_example}" }, "storage_corruption": { - "title": "Storage corruption detected for `{storage_key}`", + "title": "Storage corruption detected for {storage_key}", "fix_flow": { "step": { "confirm": { "title": "[%key:component::homeassistant::issues::storage_corruption::title%]", - "description": "The `{storage_key}` storage could not be parsed and has been renamed to `{corrupt_path}` to allow Home Assistant to continue.\n\nA default `{storage_key}` may have been created automatically.\n\nIf you made manual edits to the storage file, fix any syntax errors in `{corrupt_path}`, restore the file to the original path `{original_path}`, and restart Home Assistant. Otherwise, restore the system from a backup.\n\nClick SUBMIT below to confirm you have repaired the file or restored from a backup.\n\nThe exact error was: {error}" + "description": "The `{storage_key}` storage could not be parsed and has been renamed to `{corrupt_path}` to allow Home Assistant to continue.\n\nA default `{storage_key}` may have been created automatically.\n\nIf you made manual edits to the storage file, fix any syntax errors in `{corrupt_path}`, restore the file to the original path `{original_path}`, and restart Home Assistant. Otherwise, restore the system from a backup.\n\nSelect **Submit** below to confirm you have repaired the file or restored from a backup.\n\nThe exact error was: {error}" } } } @@ -57,6 +61,14 @@ "title": "[%key:common::config_flow::title::reauth%]", "description": "Reauthentication is needed" }, + "config_entry_unique_id_collision": { + "title": "Multiple {domain} config entries with same unique ID", + "description": "There are multiple {domain} config entries with the same unique ID.\nThe config entries are named {titles}.\n\nTo fix this error, [configure the integration]({configure_url}) and remove all except one of the duplicates.\n\nNote: Another group of duplicates may be revealed after removing these duplicates." + }, + "config_entry_unique_id_collision_many": { + "title": "[%key:component::homeassistant::issues::config_entry_unique_id_collision::title%]", + "description": "There are multiple ({number_of_entries}) {domain} config entries with the same unique ID.\nThe first {title_limit} config entries are named {titles}.\n\nTo fix this error, [configure the integration]({configure_url}) and remove all except one of the duplicates.\n\nNote: Another group of duplicates may be revealed after removing these duplicates." + }, "integration_not_found": { "title": "Integration {domain} not found", "fix_flow": { @@ -126,7 +138,7 @@ }, "elevation": { "name": "[%key:common::config_flow::data::elevation%]", - "description": "Elevation of your location." + "description": "Elevation of your location above sea level." } } }, @@ -216,6 +228,9 @@ "service_not_found": { "message": "Action {domain}.{service} not found." }, + "service_not_supported": { + "message": "Entity {entity_id} does not support action {domain}.{service}." + }, "service_does_not_support_response": { "message": "An action which does not return responses can't be called with {return_response}." }, diff --git a/homeassistant/components/homeassistant/triggers/time.py b/homeassistant/components/homeassistant/triggers/time.py index 5441683b86f..bea6e8a66a7 100644 --- a/homeassistant/components/homeassistant/triggers/time.py +++ b/homeassistant/components/homeassistant/triggers/time.py @@ -1,7 +1,9 @@ """Offer time listening automation rules.""" -from datetime import datetime +from collections.abc import Callable +from datetime import datetime, timedelta from functools import partial +from typing import Any, NamedTuple import voluptuous as vol @@ -9,6 +11,8 @@ from homeassistant.components import sensor from homeassistant.const import ( ATTR_DEVICE_CLASS, CONF_AT, + CONF_ENTITY_ID, + CONF_OFFSET, CONF_PLATFORM, STATE_UNAVAILABLE, STATE_UNKNOWN, @@ -22,7 +26,8 @@ from homeassistant.core import ( State, callback, ) -from homeassistant.helpers import config_validation as cv +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv, template from homeassistant.helpers.event import ( async_track_point_in_time, async_track_state_change_event, @@ -32,14 +37,39 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util +_TIME_TRIGGER_ENTITY = vol.All(str, cv.entity_domain(["input_datetime", "sensor"])) +_TIME_AT_SCHEMA = vol.Any(cv.time, _TIME_TRIGGER_ENTITY) + +_TIME_TRIGGER_ENTITY_WITH_OFFSET = vol.Schema( + { + vol.Required(CONF_ENTITY_ID): cv.entity_domain(["sensor"]), + vol.Optional(CONF_OFFSET): cv.time_period, + } +) + + +def valid_at_template(value: Any) -> template.Template: + """Validate either a jinja2 template, valid time, or valid trigger entity.""" + tpl = cv.template(value) + + if tpl.is_static: + _TIME_AT_SCHEMA(value) + + return tpl + + _TIME_TRIGGER_SCHEMA = vol.Any( cv.time, - vol.All(str, cv.entity_domain(["input_datetime", "sensor"])), + _TIME_TRIGGER_ENTITY, + _TIME_TRIGGER_ENTITY_WITH_OFFSET, + valid_at_template, msg=( - "Expected HH:MM, HH:MM:SS or Entity ID with domain 'input_datetime' or 'sensor'" + "Expected HH:MM, HH:MM:SS, an Entity ID with domain 'input_datetime' or " + "'sensor', a combination of a timestamp sensor entity and an offset, or Limited Template" ), ) + TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_PLATFORM): "time", @@ -48,6 +78,13 @@ TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( ) +class TrackEntity(NamedTuple): + """Represents a tracking entity for a time trigger.""" + + entity_id: str + callback: Callable + + async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, @@ -56,7 +93,8 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" trigger_data = trigger_info["trigger_data"] - entities: dict[str, CALLBACK_TYPE] = {} + variables = trigger_info["variables"] or {} + entities: dict[tuple[str, timedelta], CALLBACK_TYPE] = {} removes: list[CALLBACK_TYPE] = [] job = HassJob(action, f"time trigger {trigger_info}") @@ -79,15 +117,21 @@ async def async_attach_trigger( ) @callback - def update_entity_trigger_event(event: Event[EventStateChangedData]) -> None: + def update_entity_trigger_event( + event: Event[EventStateChangedData], offset: timedelta = timedelta(0) + ) -> None: """update_entity_trigger from the event.""" - return update_entity_trigger(event.data["entity_id"], event.data["new_state"]) + return update_entity_trigger( + event.data["entity_id"], event.data["new_state"], offset + ) @callback - def update_entity_trigger(entity_id: str, new_state: State | None = None) -> None: + def update_entity_trigger( + entity_id: str, new_state: State | None = None, offset: timedelta = timedelta(0) + ) -> None: """Update the entity trigger for the entity_id.""" # If a listener was already set up for entity, remove it. - if remove := entities.pop(entity_id, None): + if remove := entities.pop((entity_id, offset), None): remove() remove = None @@ -153,6 +197,9 @@ async def async_attach_trigger( ): trigger_dt = dt_util.parse_datetime(new_state.state) + if trigger_dt is not None: + trigger_dt += offset + if trigger_dt is not None and trigger_dt > dt_util.utcnow(): remove = async_track_point_in_time( hass, @@ -166,15 +213,37 @@ async def async_attach_trigger( # Was a listener set up? if remove: - entities[entity_id] = remove + entities[(entity_id, offset)] = remove - to_track: list[str] = [] + to_track: list[TrackEntity] = [] for at_time in config[CONF_AT]: + if isinstance(at_time, template.Template): + render = template.render_complex(at_time, variables, limited=True) + try: + at_time = _TIME_AT_SCHEMA(render) + except vol.Invalid as exc: + raise HomeAssistantError( + f"Limited Template for 'at' rendered a unexpected value '{render}', expected HH:MM, " + f"HH:MM:SS or Entity ID with domain 'input_datetime' or 'sensor'" + ) from exc + if isinstance(at_time, str): # entity - to_track.append(at_time) update_entity_trigger(at_time, new_state=hass.states.get(at_time)) + to_track.append(TrackEntity(at_time, update_entity_trigger_event)) + elif isinstance(at_time, dict) and CONF_OFFSET in at_time: + # entity with offset + entity_id: str = at_time.get(CONF_ENTITY_ID, "") + offset: timedelta = at_time.get(CONF_OFFSET, timedelta(0)) + update_entity_trigger( + entity_id, new_state=hass.states.get(entity_id), offset=offset + ) + to_track.append( + TrackEntity( + entity_id, partial(update_entity_trigger_event, offset=offset) + ) + ) else: # datetime.time removes.append( @@ -187,9 +256,10 @@ async def async_attach_trigger( ) ) - # Track state changes of any entities. - removes.append( - async_track_state_change_event(hass, to_track, update_entity_trigger_event) + # Besides time, we also track state changes of requested entities. + removes.extend( + (async_track_state_change_event(hass, entry.entity_id, entry.callback)) + for entry in to_track ) @callback diff --git a/homeassistant/components/homeassistant_alerts/coordinator.py b/homeassistant/components/homeassistant_alerts/coordinator.py index 5d99e1c980f..a81824d2376 100644 --- a/homeassistant/components/homeassistant_alerts/coordinator.py +++ b/homeassistant/components/homeassistant_alerts/coordinator.py @@ -5,10 +5,11 @@ import logging from awesomeversion import AwesomeVersion, AwesomeVersionStrategy -from homeassistant.components.hassio import get_supervisor_info, is_hassio +from homeassistant.components.hassio import get_supervisor_info from homeassistant.const import __version__ from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, REQUEST_TIMEOUT, UPDATE_INTERVAL diff --git a/homeassistant/components/homeassistant_alerts/manifest.json b/homeassistant/components/homeassistant_alerts/manifest.json index 96e419ad9a2..0412f43da69 100644 --- a/homeassistant/components/homeassistant_alerts/manifest.json +++ b/homeassistant/components/homeassistant_alerts/manifest.json @@ -1,6 +1,7 @@ { "domain": "homeassistant_alerts", "name": "Home Assistant Alerts", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/core"], "config_flow": false, "documentation": "https://www.home-assistant.io/integrations/homeassistant_alerts", diff --git a/homeassistant/components/homeassistant_green/__init__.py b/homeassistant/components/homeassistant_green/__init__.py index 2d35b5bbed3..79688f9d16a 100644 --- a/homeassistant/components/homeassistant_green/__init__.py +++ b/homeassistant/components/homeassistant_green/__init__.py @@ -2,10 +2,11 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.hassio import is_hassio async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/homeassistant_green/config_flow.py b/homeassistant/components/homeassistant_green/config_flow.py index 4b71c7f1056..c9aed577365 100644 --- a/homeassistant/components/homeassistant_green/config_flow.py +++ b/homeassistant/components/homeassistant_green/config_flow.py @@ -13,7 +13,6 @@ from homeassistant.components.hassio import ( HassioAPIError, async_get_green_settings, async_set_green_settings, - is_hassio, ) from homeassistant.config_entries import ( ConfigEntry, @@ -23,6 +22,7 @@ from homeassistant.config_entries import ( ) from homeassistant.core import callback from homeassistant.helpers import selector +from homeassistant.helpers.hassio import is_hassio from .const import DOMAIN @@ -55,9 +55,6 @@ class HomeAssistantGreenConfigFlow(ConfigFlow, domain=DOMAIN): self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Home Assistant Green", data={}) diff --git a/homeassistant/components/homeassistant_green/manifest.json b/homeassistant/components/homeassistant_green/manifest.json index d543d562ee3..78da50603df 100644 --- a/homeassistant/components/homeassistant_green/manifest.json +++ b/homeassistant/components/homeassistant_green/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware", "homeassistant_hardware"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_green", - "integration_type": "hardware" + "integration_type": "hardware", + "single_config_entry": true } diff --git a/homeassistant/components/homeassistant_green/strings.json b/homeassistant/components/homeassistant_green/strings.json index 9066ca64e5c..13507439e4b 100644 --- a/homeassistant/components/homeassistant_green/strings.json +++ b/homeassistant/components/homeassistant_green/strings.json @@ -21,7 +21,6 @@ "abort": { "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", "read_hw_settings_error": "Failed to read hardware settings", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "write_hw_settings_error": "Failed to write hardware settings" } } diff --git a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py index b8dc4227ece..a91fb00c142 100644 --- a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py +++ b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py @@ -14,7 +14,6 @@ from homeassistant.components.hassio import ( AddonInfo, AddonManager, AddonState, - is_hassio, ) from homeassistant.components.zha.repairs.wrong_silabs_firmware import ( probe_silabs_firmware_type, @@ -25,10 +24,10 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.data_entry_flow import AbortFlow +from homeassistant.helpers.hassio import is_hassio from . import silabs_multiprotocol_addon from .const import ZHA_DOMAIN @@ -496,13 +495,15 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow): return await self.async_step_pick_firmware() -class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry): +class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow): """Zigbee and Thread options flow handlers.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, config_entry: ConfigEntry, *args: Any, **kwargs: Any) -> None: """Instantiate options flow.""" super().__init__(*args, **kwargs) + self._config_entry = config_entry + self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"]) # Make `context` a regular dictionary diff --git a/homeassistant/components/homeassistant_hardware/manifest.json b/homeassistant/components/homeassistant_hardware/manifest.json index 8898cece75a..f692094bc67 100644 --- a/homeassistant/components/homeassistant_hardware/manifest.json +++ b/homeassistant/components/homeassistant_hardware/manifest.json @@ -1,7 +1,7 @@ { "domain": "homeassistant_hardware", "name": "Home Assistant Hardware", - "after_dependencies": ["zha"], + "after_dependencies": ["hassio", "zha"], "codeowners": ["@home-assistant/core"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware", "integration_type": "system" diff --git a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py index 31032ff6a8c..2b08031405f 100644 --- a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py +++ b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py @@ -17,7 +17,6 @@ from homeassistant.components.hassio import ( AddonManager, AddonState, hostname_from_addon_slug, - is_hassio, ) from homeassistant.config_entries import ( ConfigEntry, @@ -28,6 +27,7 @@ from homeassistant.config_entries import ( from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -318,7 +318,6 @@ class OptionsFlowHandler(OptionsFlow, ABC): self.start_task: asyncio.Task | None = None self.stop_task: asyncio.Task | None = None self._zha_migration_mgr: ZhaMultiPANMigrationHelper | None = None - self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/homeassistant_hardware/strings.json b/homeassistant/components/homeassistant_hardware/strings.json index dbbb2057323..b483df75d75 100644 --- a/homeassistant/components/homeassistant_hardware/strings.json +++ b/homeassistant/components/homeassistant_hardware/strings.json @@ -51,7 +51,8 @@ "not_hassio_thread": "The OpenThread Border Router addon can only be installed with Home Assistant OS. If you would like to use the {model} as an Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.", "otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again.", "zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.", - "otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again." + "otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again.", + "unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or addon is currently trying to communicate with the device. If you are running Home Assistant OS in a virtual machine or in Docker, please make sure that permissions are set correctly for the device." }, "progress": { "install_zigbee_flasher_addon": "The Silicon Labs Flasher addon is installed, this may take a few minutes.", diff --git a/homeassistant/components/homeassistant_hardware/util.py b/homeassistant/components/homeassistant_hardware/util.py index 90cfee076e3..0c06ff05e5c 100644 --- a/homeassistant/components/homeassistant_hardware/util.py +++ b/homeassistant/components/homeassistant_hardware/util.py @@ -9,9 +9,10 @@ from typing import cast from universal_silabs_flasher.const import ApplicationType -from homeassistant.components.hassio import AddonError, AddonState, is_hassio +from homeassistant.components.hassio import AddonError, AddonState from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.singleton import singleton from .const import ( diff --git a/homeassistant/components/homeassistant_sky_connect/config_flow.py b/homeassistant/components/homeassistant_sky_connect/config_flow.py index b1776624736..5c35732312b 100644 --- a/homeassistant/components/homeassistant_sky_connect/config_flow.py +++ b/homeassistant/components/homeassistant_sky_connect/config_flow.py @@ -12,7 +12,13 @@ from homeassistant.components.homeassistant_hardware import ( firmware_config_flow, silabs_multiprotocol_addon, ) -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigEntryBaseFlow, + ConfigFlowContext, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback from .const import DOCS_WEB_FLASHER_URL, DOMAIN, HardwareVariant @@ -33,10 +39,10 @@ else: TranslationPlaceholderProtocol = object -class SkyConnectTranslationMixin(TranslationPlaceholderProtocol): +class SkyConnectTranslationMixin(ConfigEntryBaseFlow, TranslationPlaceholderProtocol): """Translation placeholder mixin for Home Assistant SkyConnect.""" - context: dict[str, Any] + context: ConfigFlowContext def _get_translation_placeholders(self) -> dict[str, str]: """Shared translation placeholders.""" diff --git a/homeassistant/components/homeassistant_sky_connect/manifest.json b/homeassistant/components/homeassistant_sky_connect/manifest.json index f56fd24de61..27280c6aac3 100644 --- a/homeassistant/components/homeassistant_sky_connect/manifest.json +++ b/homeassistant/components/homeassistant_sky_connect/manifest.json @@ -1,6 +1,6 @@ { "domain": "homeassistant_sky_connect", - "name": "Home Assistant SkyConnect", + "name": "Home Assistant Connect ZBT-1", "codeowners": ["@home-assistant/core"], "config_flow": true, "dependencies": ["hardware", "usb", "homeassistant_hardware"], diff --git a/homeassistant/components/homeassistant_sky_connect/strings.json b/homeassistant/components/homeassistant_sky_connect/strings.json index 20f587c2dbb..a596b9846ce 100644 --- a/homeassistant/components/homeassistant_sky_connect/strings.json +++ b/homeassistant/components/homeassistant_sky_connect/strings.json @@ -113,7 +113,8 @@ "not_hassio_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::not_hassio_thread%]", "otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]", "zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]", - "otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]" + "otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]", + "unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]" }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", @@ -181,7 +182,10 @@ "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", "not_hassio_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::not_hassio_thread%]", - "otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]" + "otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]", + "zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]", + "otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]", + "unsupported_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::unsupported_firmware%]" }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index 14c2de2c9a1..dc34cc4cdc9 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -2,18 +2,25 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +import logging + +from homeassistant.components.hassio import get_os_info from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( check_multi_pan_addon, - get_zigbee_socket, - multi_pan_addon_using_device, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + guess_firmware_type, ) from homeassistant.config_entries import SOURCE_HARDWARE, ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import discovery_flow +from homeassistant.helpers.hassio import is_hassio -from .const import RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA +from .const import FIRMWARE, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -27,34 +34,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # The hassio integration has not yet fetched data from the supervisor raise ConfigEntryNotReady - board: str | None - if (board := os_info.get("board")) is None or board != "yellow": + if os_info.get("board") != "yellow": # Not running on a Home Assistant Yellow, Home Assistant may have been migrated hass.async_create_task(hass.config_entries.async_remove(entry.entry_id)) return False - try: - await check_multi_pan_addon(hass) - except HomeAssistantError as err: - raise ConfigEntryNotReady from err + firmware = ApplicationType(entry.data[FIRMWARE]) - if not await multi_pan_addon_using_device(hass, RADIO_DEVICE): - hw_discovery_data = ZHA_HW_DISCOVERY_DATA - else: - hw_discovery_data = { - "name": "Yellow Multiprotocol", - "port": { - "path": get_zigbee_socket(), - }, - "radio_type": "ezsp", - } + if firmware is ApplicationType.CPC: + try: + await check_multi_pan_addon(hass) + except HomeAssistantError as err: + raise ConfigEntryNotReady from err - discovery_flow.async_create_flow( - hass, - "zha", - context={"source": SOURCE_HARDWARE}, - data=hw_discovery_data, - ) + if firmware is ApplicationType.EZSP: + discovery_flow.async_create_flow( + hass, + "zha", + context={"source": SOURCE_HARDWARE}, + data=ZHA_HW_DISCOVERY_DATA, + ) return True @@ -62,3 +61,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return True + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + + _LOGGER.debug( + "Migrating from version %s.%s", config_entry.version, config_entry.minor_version + ) + + if config_entry.version == 1: + if config_entry.minor_version == 1: + # Add-on startup with type service get started before Core, always (e.g. the + # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, + # so we can't safely probe here. Instead, we must make an educated guess! + firmware_guess = await guess_firmware_type(hass, RADIO_DEVICE) + + new_data = {**config_entry.data} + new_data[FIRMWARE] = firmware_guess.firmware_type.value + + hass.config_entries.async_update_entry( + config_entry, + data=new_data, + version=1, + minor_version=2, + ) + + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True + + # This means the user has downgraded from a future version + return False diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index d2212a968db..2c58ecdfc1c 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -2,25 +2,40 @@ from __future__ import annotations +from abc import ABC, abstractmethod import asyncio import logging -from typing import Any +from typing import Any, final import aiohttp +from universal_silabs_flasher.const import ApplicationType import voluptuous as vol from homeassistant.components.hassio import ( HassioAPIError, async_get_yellow_settings, - async_reboot_host, async_set_yellow_settings, + get_supervisor_client, ) -from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.core import callback -from homeassistant.helpers import selector +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + BaseFirmwareConfigFlow, + BaseFirmwareOptionsFlow, +) +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + OptionsFlowHandler as MultiprotocolOptionsFlowHandler, + SerialPortSettings as MultiprotocolSerialPortSettings, +) +from homeassistant.config_entries import ( + SOURCE_HARDWARE, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.core import HomeAssistant, async_get_hass, callback +from homeassistant.helpers import discovery_flow, selector -from .const import DOMAIN, ZHA_HW_DISCOVERY_DATA +from .const import DOMAIN, FIRMWARE, RADIO_DEVICE, ZHA_DOMAIN, ZHA_HW_DISCOVERY_DATA +from .hardware import BOARD_NAME _LOGGER = logging.getLogger(__name__) @@ -33,50 +48,89 @@ STEP_HW_SETTINGS_SCHEMA = vol.Schema( ) -class HomeAssistantYellowConfigFlow(ConfigFlow, domain=DOMAIN): +class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): """Handle a config flow for Home Assistant Yellow.""" VERSION = 1 + MINOR_VERSION = 2 + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate config flow.""" + super().__init__(*args, **kwargs) + + self._device = RADIO_DEVICE @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> HomeAssistantYellowOptionsFlow: + ) -> OptionsFlow: """Return the options flow.""" - return HomeAssistantYellowOptionsFlow(config_entry) + firmware_type = ApplicationType(config_entry.data[FIRMWARE]) + hass = async_get_hass() + + if firmware_type is ApplicationType.CPC: + return HomeAssistantYellowMultiPanOptionsFlowHandler(hass, config_entry) + + return HomeAssistantYellowOptionsFlowHandler(hass, config_entry) async def async_step_system( self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + # We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this + await self._probe_firmware_type() - return self.async_create_entry(title="Home Assistant Yellow", data={}) + # Kick off ZHA hardware discovery automatically if Zigbee firmware is running + if self._probed_firmware_type is ApplicationType.EZSP: + discovery_flow.async_create_flow( + self.hass, + ZHA_DOMAIN, + context={"source": SOURCE_HARDWARE}, + data=ZHA_HW_DISCOVERY_DATA, + ) + + return self._async_flow_finished() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + return self.async_create_entry( + title=BOARD_NAME, + data={ + # Assume the firmware type is EZSP if we cannot probe it + FIRMWARE: (self._probed_firmware_type or ApplicationType.EZSP).value, + }, + ) -class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandler): - """Handle an option flow for Home Assistant Yellow.""" +class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC): + """Base Home Assistant Yellow options flow shared between firmware and multi-PAN.""" _hw_settings: dict[str, bool] | None = None + def __init__(self, hass: HomeAssistant, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + self._supervisor_client = get_supervisor_client(hass) + + @abstractmethod + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + + @final + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options flow.""" + return await self.async_step_main_menu() + + @final async def async_step_on_supervisor( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle logic when on Supervisor host.""" return await self.async_step_main_menu() - async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: - """Show the main menu.""" - return self.async_show_menu( - step_id="main_menu", - menu_options=[ - "hardware_settings", - "multipan_settings", - ], - ) - async def async_step_hardware_settings( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -124,7 +178,7 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Reboot now.""" - await async_reboot_host(self.hass) + await self._supervisor_client.host.reboot() return self.async_create_entry(data={}) async def async_step_reboot_later( @@ -133,18 +187,36 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl """Reboot later.""" return self.async_create_entry(data={}) + +class HomeAssistantYellowMultiPanOptionsFlowHandler( + BaseHomeAssistantYellowOptionsFlow, MultiprotocolOptionsFlowHandler +): + """Handle a multi-PAN options flow for Home Assistant Yellow.""" + + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + return self.async_show_menu( + step_id="main_menu", + menu_options=[ + "hardware_settings", + "multipan_settings", + ], + ) + async def async_step_multipan_settings( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle multipan settings.""" - return await super().async_step_on_supervisor(user_input) + return await MultiprotocolOptionsFlowHandler.async_step_on_supervisor( + self, user_input + ) async def _async_serial_port_settings( self, - ) -> silabs_multiprotocol_addon.SerialPortSettings: + ) -> MultiprotocolSerialPortSettings: """Return the radio serial port settings.""" - return silabs_multiprotocol_addon.SerialPortSettings( - device="/dev/ttyAMA1", + return MultiprotocolSerialPortSettings( + device=RADIO_DEVICE, baudrate="115200", flow_control=True, ) @@ -163,4 +235,64 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl def _hardware_name(self) -> str: """Return the name of the hardware.""" - return "Home Assistant Yellow" + return BOARD_NAME + + async def async_step_flashing_complete( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Finish flashing and update the config entry.""" + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + FIRMWARE: ApplicationType.EZSP.value, + }, + ) + + return await super().async_step_flashing_complete(user_input) + + +class HomeAssistantYellowOptionsFlowHandler( + BaseHomeAssistantYellowOptionsFlow, BaseFirmwareOptionsFlow +): + """Handle a firmware options flow for Home Assistant Yellow.""" + + def __init__(self, hass: HomeAssistant, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(hass, *args, **kwargs) + + self._hardware_name = BOARD_NAME + self._device = RADIO_DEVICE + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + return self.async_show_menu( + step_id="main_menu", + menu_options=[ + "hardware_settings", + "firmware_settings", + ], + ) + + async def async_step_firmware_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle firmware configuration settings.""" + return await super().async_step_pick_firmware() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + FIRMWARE: self._probed_firmware_type.value, + }, + ) + + return self.async_create_entry(title="", data={}) diff --git a/homeassistant/components/homeassistant_yellow/const.py b/homeassistant/components/homeassistant_yellow/const.py index 8f1f9a4c2b8..79753ae9b9e 100644 --- a/homeassistant/components/homeassistant_yellow/const.py +++ b/homeassistant/components/homeassistant_yellow/const.py @@ -12,3 +12,6 @@ ZHA_HW_DISCOVERY_DATA = { }, "radio_type": "efr32", } + +FIRMWARE = "firmware" +ZHA_DOMAIN = "zha" diff --git a/homeassistant/components/homeassistant_yellow/manifest.json b/homeassistant/components/homeassistant_yellow/manifest.json index a9715003172..caf4d32c746 100644 --- a/homeassistant/components/homeassistant_yellow/manifest.json +++ b/homeassistant/components/homeassistant_yellow/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware", "homeassistant_hardware"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_yellow", - "integration_type": "hardware" + "integration_type": "hardware", + "single_config_entry": true } diff --git a/homeassistant/components/homeassistant_yellow/strings.json b/homeassistant/components/homeassistant_yellow/strings.json index 95442d31500..b089e483899 100644 --- a/homeassistant/components/homeassistant_yellow/strings.json +++ b/homeassistant/components/homeassistant_yellow/strings.json @@ -42,6 +42,7 @@ "main_menu": { "menu_options": { "hardware_settings": "[%key:component::homeassistant_yellow::options::step::hardware_settings::title%]", + "firmware_settings": "Switch between Zigbee or Thread firmware.", "multipan_settings": "Configure IEEE 802.15.4 radio multiprotocol support" } }, @@ -79,6 +80,46 @@ "start_flasher_addon": { "title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::title%]", "description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::description%]" + }, + "pick_firmware": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]", + "menu_options": { + "pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]", + "pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]" + } + }, + "install_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_flasher_addon::description%]" + }, + "run_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::run_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::run_zigbee_flasher_addon::description%]" + }, + "zigbee_flasher_failed": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_flasher_failed::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_flasher_failed::description%]" + }, + "confirm_zigbee": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]" + }, + "install_otbr_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]" + }, + "start_otbr_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]" + }, + "otbr_failed": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::description%]" + }, + "confirm_otbr": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]" } }, "error": { @@ -93,11 +134,20 @@ "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", "read_hw_settings_error": "Failed to read hardware settings", "write_hw_settings_error": "Failed to write hardware settings", - "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]" + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", + "not_hassio_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::not_hassio_thread%]", + "otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]", + "zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]", + "otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]", + "unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or addon is currently trying to communicate with the device." }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", - "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]" + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_zigbee_flasher_addon%]", + "run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]", + "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]" } } } diff --git a/homeassistant/components/homekit/__init__.py b/homeassistant/components/homekit/__init__.py index 3f633c2ec59..97fb17d7db5 100644 --- a/homeassistant/components/homekit/__init__.py +++ b/homeassistant/components/homekit/__init__.py @@ -33,6 +33,7 @@ from homeassistant.components.device_automation.trigger import ( from homeassistant.components.event import DOMAIN as EVENT_DOMAIN, EventDeviceClass from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.components.humidifier import DOMAIN as HUMIDIFIER_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, SensorDeviceClass from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( @@ -167,7 +168,6 @@ BATTERY_SENSOR = (SENSOR_DOMAIN, SensorDeviceClass.BATTERY) MOTION_EVENT_SENSOR = (EVENT_DOMAIN, EventDeviceClass.MOTION) MOTION_SENSOR = (BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass.MOTION) DOORBELL_EVENT_SENSOR = (EVENT_DOMAIN, EventDeviceClass.DOORBELL) -DOORBELL_BINARY_SENSOR = (BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass.OCCUPANCY) HUMIDITY_SENSOR = (SENSOR_DOMAIN, SensorDeviceClass.HUMIDITY) @@ -409,7 +409,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: HomeKitConfigEntry) -> break if not logged_shutdown_wait: - _LOGGER.info("Waiting for the HomeKit server to shutdown") + _LOGGER.debug("Waiting for the HomeKit server to shutdown") logged_shutdown_wait = True await asyncio.sleep(PORT_CLEANUP_CHECK_INTERVAL_SECS) @@ -1134,14 +1134,12 @@ class HomeKit: config[entity_id].setdefault( CONF_LINKED_MOTION_SENSOR, motion_binary_sensor_entity_id ) + + if domain in (CAMERA_DOMAIN, LOCK_DOMAIN): if doorbell_event_entity_id := lookup.get(DOORBELL_EVENT_SENSOR): config[entity_id].setdefault( CONF_LINKED_DOORBELL_SENSOR, doorbell_event_entity_id ) - elif doorbell_binary_sensor_entity_id := lookup.get(DOORBELL_BINARY_SENSOR): - config[entity_id].setdefault( - CONF_LINKED_DOORBELL_SENSOR, doorbell_binary_sensor_entity_id - ) if domain == HUMIDIFIER_DOMAIN and ( current_humidity_sensor_entity_id := lookup.get(HUMIDITY_SENSOR) diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index 78979f73490..53db7774821 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -39,6 +39,7 @@ from homeassistant.helpers import ( config_validation as cv, device_registry as dr, entity_registry as er, + selector, ) from homeassistant.loader import async_get_integrations @@ -178,12 +179,12 @@ def _async_build_entities_filter( ) -def _async_cameras_from_entities(entities: list[str]) -> dict[str, str]: - return { - entity_id: entity_id +def _async_cameras_from_entities(entities: list[str]) -> list[str]: + return [ + entity_id for entity_id in entities if entity_id.startswith(CAMERA_ENTITY_PREFIX) - } + ] async def _async_name_to_type_map(hass: HomeAssistant) -> dict[str, str]: @@ -311,12 +312,12 @@ class HomeKitConfigFlow(ConfigFlow, domain=DOMAIN): title=f"{name}:{entry_data[CONF_PORT]}", data=entry_data ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from yaml.""" - if not self._async_is_unique_name_port(user_input): + if not self._async_is_unique_name_port(import_data): return self.async_abort(reason="port_name_in_use") return self.async_create_entry( - title=f"{user_input[CONF_NAME]}:{user_input[CONF_PORT]}", data=user_input + title=f"{import_data[CONF_NAME]}:{import_data[CONF_PORT]}", data=import_data ) @callback @@ -361,17 +362,16 @@ class HomeKitConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for homekit.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.hk_options: dict[str, Any] = {} - self.included_cameras: dict[str, str] = {} + self.included_cameras: list[str] = [] async def async_step_yaml( self, user_input: dict[str, Any] | None = None @@ -461,13 +461,21 @@ class OptionsFlowHandler(OptionsFlow): data_schema = vol.Schema( { vol.Optional( - CONF_CAMERA_COPY, - default=cameras_with_copy, - ): cv.multi_select(self.included_cameras), + CONF_CAMERA_COPY, default=cameras_with_copy + ): selector.EntitySelector( + selector.EntitySelectorConfig( + multiple=True, + include_entities=(self.included_cameras), + ) + ), vol.Optional( - CONF_CAMERA_AUDIO, - default=cameras_with_audio, - ): cv.multi_select(self.included_cameras), + CONF_CAMERA_AUDIO, default=cameras_with_audio + ): selector.EntitySelector( + selector.EntitySelectorConfig( + multiple=True, + include_entities=(self.included_cameras), + ) + ), } ) return self.async_show_form(step_id="cameras", data_schema=data_schema) @@ -508,9 +516,13 @@ class OptionsFlowHandler(OptionsFlow): step_id="accessory", data_schema=vol.Schema( { - vol.Required(CONF_ENTITIES, default=default_value): vol.In( - all_supported_entities - ) + vol.Required( + CONF_ENTITIES, default=default_value + ): selector.EntitySelector( + selector.EntitySelectorConfig( + include_entities=all_supported_entities, + ) + ), } ), ) @@ -546,9 +558,14 @@ class OptionsFlowHandler(OptionsFlow): }, data_schema=vol.Schema( { - vol.Optional(CONF_ENTITIES, default=default_value): cv.multi_select( - all_supported_entities - ) + vol.Optional( + CONF_ENTITIES, default=default_value + ): selector.EntitySelector( + selector.EntitySelectorConfig( + multiple=True, + include_entities=all_supported_entities, + ) + ), } ), ) @@ -561,17 +578,17 @@ class OptionsFlowHandler(OptionsFlow): domains = hk_options[CONF_DOMAINS] if user_input is not None: - self.included_cameras = {} + self.included_cameras = [] entities = cv.ensure_list(user_input[CONF_ENTITIES]) if CAMERA_DOMAIN in domains: camera_entities = _async_get_matching_entities( self.hass, [CAMERA_DOMAIN] ) - self.included_cameras = { - entity_id: entity_id + self.included_cameras = [ + entity_id for entity_id in camera_entities if entity_id not in entities - } + ] hk_options[CONF_FILTER] = _make_entity_filter( include_domains=domains, exclude_entities=entities ) @@ -598,9 +615,14 @@ class OptionsFlowHandler(OptionsFlow): }, data_schema=vol.Schema( { - vol.Optional(CONF_ENTITIES, default=default_value): cv.multi_select( - all_supported_entities - ) + vol.Optional( + CONF_ENTITIES, default=default_value + ): selector.EntitySelector( + selector.EntitySelectorConfig( + multiple=True, + include_entities=all_supported_entities, + ) + ), } ), ) @@ -684,13 +706,11 @@ def _async_get_matching_entities( domains: list[str] | None = None, include_entity_category: bool = False, include_hidden: bool = False, -) -> dict[str, str]: +) -> list[str]: """Fetch all entities or entities in the given domains.""" ent_reg = er.async_get(hass) - return { - state.entity_id: ( - f"{state.attributes.get(ATTR_FRIENDLY_NAME, state.entity_id)} ({state.entity_id})" - ) + return [ + state.entity_id for state in sorted( hass.states.async_all(domains and set(domains)), key=lambda item: item.entity_id, @@ -698,7 +718,7 @@ def _async_get_matching_entities( if not _exclude_by_entity_registry( ent_reg, state.entity_id, include_entity_category, include_hidden ) - } + ] def _domains_set_from_entities(entity_ids: Iterable[str]) -> set[str]: diff --git a/homeassistant/components/homekit/doorbell.py b/homeassistant/components/homekit/doorbell.py new file mode 100644 index 00000000000..45bbb2ea0ca --- /dev/null +++ b/homeassistant/components/homekit/doorbell.py @@ -0,0 +1,121 @@ +"""Extend the doorbell functions.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyhap.util import callback as pyhap_callback + +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import ( + Event, + EventStateChangedData, + HassJobType, + State, + callback as ha_callback, +) +from homeassistant.helpers.event import async_track_state_change_event + +from .accessories import HomeAccessory +from .const import ( + CHAR_MUTE, + CHAR_PROGRAMMABLE_SWITCH_EVENT, + CONF_LINKED_DOORBELL_SENSOR, + SERV_DOORBELL, + SERV_SPEAKER, + SERV_STATELESS_PROGRAMMABLE_SWITCH, +) +from .util import state_changed_event_is_same_state + +_LOGGER = logging.getLogger(__name__) + +DOORBELL_SINGLE_PRESS = 0 +DOORBELL_DOUBLE_PRESS = 1 +DOORBELL_LONG_PRESS = 2 + + +class HomeDoorbellAccessory(HomeAccessory): + """Accessory with optional doorbell.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize an Accessory object with optional attached doorbell.""" + super().__init__(*args, **kwargs) + self._char_doorbell_detected = None + self._char_doorbell_detected_switch = None + linked_doorbell_sensor: str | None + linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR) + self.linked_doorbell_sensor = linked_doorbell_sensor + self.doorbell_is_event = False + if not linked_doorbell_sensor: + return + self.doorbell_is_event = linked_doorbell_sensor.startswith("event.") + if not (state := self.hass.states.get(linked_doorbell_sensor)): + return + serv_doorbell = self.add_preload_service(SERV_DOORBELL) + self.set_primary_service(serv_doorbell) + self._char_doorbell_detected = serv_doorbell.configure_char( + CHAR_PROGRAMMABLE_SWITCH_EVENT, + value=0, + ) + serv_stateless_switch = self.add_preload_service( + SERV_STATELESS_PROGRAMMABLE_SWITCH + ) + self._char_doorbell_detected_switch = serv_stateless_switch.configure_char( + CHAR_PROGRAMMABLE_SWITCH_EVENT, + value=0, + valid_values={"SinglePress": DOORBELL_SINGLE_PRESS}, + ) + serv_speaker = self.add_preload_service(SERV_SPEAKER) + serv_speaker.configure_char(CHAR_MUTE, value=0) + self.async_update_doorbell_state(None, state) + + @ha_callback + @pyhap_callback # type: ignore[misc] + def run(self) -> None: + """Handle doorbell event.""" + if self._char_doorbell_detected: + assert self.linked_doorbell_sensor + self._subscriptions.append( + async_track_state_change_event( + self.hass, + self.linked_doorbell_sensor, + self.async_update_doorbell_state_event, + job_type=HassJobType.Callback, + ) + ) + + super().run() + + @ha_callback + def async_update_doorbell_state_event( + self, event: Event[EventStateChangedData] + ) -> None: + """Handle state change event listener callback.""" + if not state_changed_event_is_same_state(event) and ( + new_state := event.data["new_state"] + ): + self.async_update_doorbell_state(event.data["old_state"], new_state) + + @ha_callback + def async_update_doorbell_state( + self, old_state: State | None, new_state: State + ) -> None: + """Handle link doorbell sensor state change to update HomeKit value.""" + assert self._char_doorbell_detected + assert self._char_doorbell_detected_switch + state = new_state.state + if state == STATE_ON or ( + self.doorbell_is_event + and old_state is not None + and old_state.state != STATE_UNAVAILABLE + and state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) + ): + self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS) + self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS) + _LOGGER.debug( + "%s: Set linked doorbell %s sensor to %d", + self.entity_id, + self.linked_doorbell_sensor, + DOORBELL_SINGLE_PRESS, + ) diff --git a/homeassistant/components/homekit/icons.json b/homeassistant/components/homekit/icons.json index fb0461eb5d8..7d8ddf131ef 100644 --- a/homeassistant/components/homekit/icons.json +++ b/homeassistant/components/homekit/icons.json @@ -1,7 +1,13 @@ { "services": { - "reload": "mdi:reload", - "reset_accessory": "mdi:cog-refresh", - "unpair": "mdi:link-variant-off" + "reload": { + "service": "mdi:reload" + }, + "reset_accessory": { + "service": "mdi:cog-refresh" + }, + "unpair": { + "service": "mdi:link-variant-off" + } } } diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index eebdc0026fd..cf74bcc7d67 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["pyhap"], "requirements": [ - "HAP-python==4.9.1", + "HAP-python==4.9.2", "fnv-hash-fast==1.0.2", "PyQRCode==1.2.1", "base36==0.1.1" diff --git a/homeassistant/components/homekit/type_cameras.py b/homeassistant/components/homekit/type_cameras.py index 3851bb43541..0fb2c2e7922 100644 --- a/homeassistant/components/homekit/type_cameras.py +++ b/homeassistant/components/homekit/type_cameras.py @@ -31,15 +31,12 @@ from homeassistant.helpers.event import ( ) from homeassistant.util.async_ import create_eager_task -from .accessories import TYPES, HomeAccessory, HomeDriver +from .accessories import TYPES, HomeDriver from .const import ( CHAR_MOTION_DETECTED, - CHAR_MUTE, - CHAR_PROGRAMMABLE_SWITCH_EVENT, CONF_AUDIO_CODEC, CONF_AUDIO_MAP, CONF_AUDIO_PACKET_SIZE, - CONF_LINKED_DOORBELL_SENSOR, CONF_LINKED_MOTION_SENSOR, CONF_MAX_FPS, CONF_MAX_HEIGHT, @@ -64,18 +61,13 @@ from .const import ( DEFAULT_VIDEO_MAP, DEFAULT_VIDEO_PACKET_SIZE, DEFAULT_VIDEO_PROFILE_NAMES, - SERV_DOORBELL, SERV_MOTION_SENSOR, - SERV_SPEAKER, - SERV_STATELESS_PROGRAMMABLE_SWITCH, ) +from .doorbell import HomeDoorbellAccessory from .util import pid_is_alive, state_changed_event_is_same_state _LOGGER = logging.getLogger(__name__) -DOORBELL_SINGLE_PRESS = 0 -DOORBELL_DOUBLE_PRESS = 1 -DOORBELL_LONG_PRESS = 2 VIDEO_OUTPUT = ( "-map {v_map} -an " @@ -147,7 +139,9 @@ CONFIG_DEFAULTS = { @TYPES.register("Camera") -class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] +# False-positive on pylint, not a CameraEntity +# pylint: disable-next=hass-enforce-class-module +class Camera(HomeDoorbellAccessory, PyhapCamera): # type: ignore[misc] """Generate a Camera accessory.""" def __init__( @@ -235,36 +229,6 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] ) self._async_update_motion_state(None, state) - self._char_doorbell_detected = None - self._char_doorbell_detected_switch = None - linked_doorbell_sensor: str | None = self.config.get( - CONF_LINKED_DOORBELL_SENSOR - ) - self.linked_doorbell_sensor = linked_doorbell_sensor - self.doorbell_is_event = False - if not linked_doorbell_sensor: - return - self.doorbell_is_event = linked_doorbell_sensor.startswith("event.") - if not (state := self.hass.states.get(linked_doorbell_sensor)): - return - serv_doorbell = self.add_preload_service(SERV_DOORBELL) - self.set_primary_service(serv_doorbell) - self._char_doorbell_detected = serv_doorbell.configure_char( - CHAR_PROGRAMMABLE_SWITCH_EVENT, - value=0, - ) - serv_stateless_switch = self.add_preload_service( - SERV_STATELESS_PROGRAMMABLE_SWITCH - ) - self._char_doorbell_detected_switch = serv_stateless_switch.configure_char( - CHAR_PROGRAMMABLE_SWITCH_EVENT, - value=0, - valid_values={"SinglePress": DOORBELL_SINGLE_PRESS}, - ) - serv_speaker = self.add_preload_service(SERV_SPEAKER) - serv_speaker.configure_char(CHAR_MUTE, value=0) - self._async_update_doorbell_state(None, state) - @pyhap_callback # type: ignore[misc] @callback def run(self) -> None: @@ -283,17 +247,6 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] ) ) - if self._char_doorbell_detected: - assert self.linked_doorbell_sensor - self._subscriptions.append( - async_track_state_change_event( - self.hass, - self.linked_doorbell_sensor, - self._async_update_doorbell_state_event, - job_type=HassJobType.Callback, - ) - ) - super().run() @callback @@ -342,39 +295,6 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] detected, ) - @callback - def _async_update_doorbell_state_event( - self, event: Event[EventStateChangedData] - ) -> None: - """Handle state change event listener callback.""" - if not state_changed_event_is_same_state(event) and ( - new_state := event.data["new_state"] - ): - self._async_update_doorbell_state(event.data["old_state"], new_state) - - @callback - def _async_update_doorbell_state( - self, old_state: State | None, new_state: State - ) -> None: - """Handle link doorbell sensor state change to update HomeKit value.""" - assert self._char_doorbell_detected - assert self._char_doorbell_detected_switch - state = new_state.state - if state == STATE_ON or ( - self.doorbell_is_event - and old_state is not None - and old_state.state != STATE_UNAVAILABLE - and state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) - ): - self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS) - self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS) - _LOGGER.debug( - "%s: Set linked doorbell %s sensor to %d", - self.entity_id, - self.linked_doorbell_sensor, - DOORBELL_SINGLE_PRESS, - ) - @callback def async_update_state(self, new_state: State | None) -> None: """Handle state change to update HomeKit value.""" @@ -453,7 +373,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] _LOGGER.error("Failed to open ffmpeg stream") return False - _LOGGER.info( + _LOGGER.debug( "[%s] Started stream process - PID %d", session_info["id"], stream.process.pid, @@ -528,11 +448,11 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self._async_stop_ffmpeg_watch(session_id) if not pid_is_alive(stream.process.pid): - _LOGGER.info("[%s] Stream already stopped", session_id) + _LOGGER.warning("[%s] Stream already stopped", session_id) return for shutdown_method in ("close", "kill"): - _LOGGER.info("[%s] %s stream", session_id, shutdown_method) + _LOGGER.debug("[%s] %s stream", session_id, shutdown_method) try: await getattr(stream, shutdown_method)() except Exception: diff --git a/homeassistant/components/homekit/type_covers.py b/homeassistant/components/homekit/type_covers.py index 29dda418665..6752633f3d2 100644 --- a/homeassistant/components/homekit/type_covers.py +++ b/homeassistant/components/homekit/type_covers.py @@ -17,8 +17,9 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, CoverEntityFeature, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -28,11 +29,7 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, STATE_ON, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import ( Event, @@ -72,10 +69,10 @@ from .const import ( ) DOOR_CURRENT_HASS_TO_HK = { - STATE_OPEN: HK_DOOR_OPEN, - STATE_CLOSED: HK_DOOR_CLOSED, - STATE_OPENING: HK_DOOR_OPENING, - STATE_CLOSING: HK_DOOR_CLOSING, + CoverState.OPEN: HK_DOOR_OPEN, + CoverState.CLOSED: HK_DOOR_CLOSED, + CoverState.OPENING: HK_DOOR_OPENING, + CoverState.CLOSING: HK_DOOR_CLOSING, } # HomeKit only has two states for @@ -85,13 +82,13 @@ DOOR_CURRENT_HASS_TO_HK = { # Opening is mapped to 0 since the target is Open # Closing is mapped to 1 since the target is Closed DOOR_TARGET_HASS_TO_HK = { - STATE_OPEN: HK_DOOR_OPEN, - STATE_CLOSED: HK_DOOR_CLOSED, - STATE_OPENING: HK_DOOR_OPEN, - STATE_CLOSING: HK_DOOR_CLOSED, + CoverState.OPEN: HK_DOOR_OPEN, + CoverState.CLOSED: HK_DOOR_CLOSED, + CoverState.OPENING: HK_DOOR_OPEN, + CoverState.CLOSING: HK_DOOR_CLOSED, } -MOVING_STATES = {STATE_OPENING, STATE_CLOSING} +MOVING_STATES = {CoverState.OPENING, CoverState.CLOSING} _LOGGER = logging.getLogger(__name__) @@ -181,16 +178,16 @@ class GarageDoorOpener(HomeAccessory): if value == HK_DOOR_OPEN: if self.char_current_state.value != value: self.char_current_state.set_value(HK_DOOR_OPENING) - self.async_call_service(DOMAIN, SERVICE_OPEN_COVER, params) + self.async_call_service(COVER_DOMAIN, SERVICE_OPEN_COVER, params) elif value == HK_DOOR_CLOSED: if self.char_current_state.value != value: self.char_current_state.set_value(HK_DOOR_CLOSING) - self.async_call_service(DOMAIN, SERVICE_CLOSE_COVER, params) + self.async_call_service(COVER_DOMAIN, SERVICE_CLOSE_COVER, params) @callback def async_update_state(self, new_state: State) -> None: """Update cover state after state changed.""" - hass_state = new_state.state + hass_state: CoverState = new_state.state # type: ignore[assignment] target_door_state = DOOR_TARGET_HASS_TO_HK.get(hass_state) current_door_state = DOOR_CURRENT_HASS_TO_HK.get(hass_state) @@ -248,12 +245,12 @@ class OpeningDeviceBase(HomeAccessory): if value != 1: return self.async_call_service( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: self.entity_id} + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: self.entity_id} ) def set_tilt(self, value: float) -> None: """Set tilt to value if call came from HomeKit.""" - _LOGGER.info("%s: Set tilt to %d", self.entity_id, value) + _LOGGER.debug("%s: Set tilt to %d", self.entity_id, value) # HomeKit sends values between -90 and 90. # We'll have to normalize to [0,100] @@ -261,7 +258,9 @@ class OpeningDeviceBase(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id, ATTR_TILT_POSITION: value} - self.async_call_service(DOMAIN, SERVICE_SET_COVER_TILT_POSITION, params, value) + self.async_call_service( + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, params, value + ) @callback def async_update_state(self, new_state: State) -> None: @@ -322,7 +321,7 @@ class OpeningDevice(OpeningDeviceBase, HomeAccessory): """Move cover to value if call came from HomeKit.""" _LOGGER.debug("%s: Set position to %d", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_POSITION: value} - self.async_call_service(DOMAIN, SERVICE_SET_COVER_POSITION, params, value) + self.async_call_service(COVER_DOMAIN, SERVICE_SET_COVER_POSITION, params, value) @callback def async_update_state(self, new_state: State) -> None: @@ -423,7 +422,7 @@ class WindowCoveringBasic(OpeningDeviceBase, HomeAccessory): service, position = (SERVICE_STOP_COVER, 50) params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(COVER_DOMAIN, service, params) # Snap the current/target position to the expected final position. self.char_current_position.set_value(position) @@ -432,10 +431,11 @@ class WindowCoveringBasic(OpeningDeviceBase, HomeAccessory): @callback def async_update_state(self, new_state: State) -> None: """Update cover position after state changed.""" - position_mapping = {STATE_OPEN: 100, STATE_CLOSED: 0} - hk_position = position_mapping.get(new_state.state) + position_mapping = {CoverState.OPEN: 100, CoverState.CLOSED: 0} + _state: CoverState = new_state.state # type: ignore[assignment] + hk_position = position_mapping.get(_state) if hk_position is not None: - is_moving = new_state.state in MOVING_STATES + is_moving = _state in MOVING_STATES if self.char_current_position.value != hk_position: self.char_current_position.set_value(hk_position) @@ -450,8 +450,8 @@ class WindowCoveringBasic(OpeningDeviceBase, HomeAccessory): def _hass_state_to_position_start(state: str) -> int: """Convert hass state to homekit position state.""" - if state == STATE_OPENING: + if state == CoverState.OPENING: return HK_POSITION_GOING_TO_MAX - if state == STATE_CLOSING: + if state == CoverState.CLOSING: return HK_POSITION_GOING_TO_MIN return HK_POSITION_STOPPED diff --git a/homeassistant/components/homekit/type_fans.py b/homeassistant/components/homekit/type_fans.py index 64c121878a9..542d4500cbc 100644 --- a/homeassistant/components/homekit/type_fans.py +++ b/homeassistant/components/homekit/type_fans.py @@ -14,7 +14,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODES, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, SERVICE_SET_PERCENTAGE, @@ -179,12 +179,12 @@ class Fan(HomeAccessory): "%s: Set auto to 1 (%s)", self.entity_id, self.preset_modes[0] ) params[ATTR_PRESET_MODE] = self.preset_modes[0] - self.async_call_service(DOMAIN, SERVICE_SET_PRESET_MODE, params) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_PRESET_MODE, params) elif current_state := self.hass.states.get(self.entity_id): percentage: float = current_state.attributes.get(ATTR_PERCENTAGE) or 50.0 params[ATTR_PERCENTAGE] = percentage _LOGGER.debug("%s: Set auto to 0", self.entity_id) - self.async_call_service(DOMAIN, SERVICE_TURN_ON, params) + self.async_call_service(FAN_DOMAIN, SERVICE_TURN_ON, params) def set_preset_mode(self, value: int, preset_mode: str) -> None: """Set preset_mode if call came from HomeKit.""" @@ -194,36 +194,36 @@ class Fan(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id} if value: params[ATTR_PRESET_MODE] = preset_mode - self.async_call_service(DOMAIN, SERVICE_SET_PRESET_MODE, params) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_PRESET_MODE, params) else: - self.async_call_service(DOMAIN, SERVICE_TURN_ON, params) + self.async_call_service(FAN_DOMAIN, SERVICE_TURN_ON, params) def set_state(self, value: int) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set state to %d", self.entity_id, value) service = SERVICE_TURN_ON if value == 1 else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(FAN_DOMAIN, service, params) def set_direction(self, value: int) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set direction to %d", self.entity_id, value) direction = DIRECTION_REVERSE if value == 1 else DIRECTION_FORWARD params = {ATTR_ENTITY_ID: self.entity_id, ATTR_DIRECTION: direction} - self.async_call_service(DOMAIN, SERVICE_SET_DIRECTION, params, direction) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_DIRECTION, params, direction) def set_oscillating(self, value: int) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set oscillating to %d", self.entity_id, value) oscillating = value == 1 params = {ATTR_ENTITY_ID: self.entity_id, ATTR_OSCILLATING: oscillating} - self.async_call_service(DOMAIN, SERVICE_OSCILLATE, params, oscillating) + self.async_call_service(FAN_DOMAIN, SERVICE_OSCILLATE, params, oscillating) def set_percentage(self, value: float) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set speed to %d", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_PERCENTAGE: value} - self.async_call_service(DOMAIN, SERVICE_SET_PERCENTAGE, params, value) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_PERCENTAGE, params, value) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_humidifiers.py b/homeassistant/components/homekit/type_humidifiers.py index 5bdf5950f18..a57a5e00974 100644 --- a/homeassistant/components/homekit/type_humidifiers.py +++ b/homeassistant/components/homekit/type_humidifiers.py @@ -13,7 +13,7 @@ from homeassistant.components.humidifier import ( ATTR_MIN_HUMIDITY, DEFAULT_MAX_HUMIDITY, DEFAULT_MIN_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, HumidifierDeviceClass, ) @@ -253,7 +253,7 @@ class HumidifierDehumidifier(HomeAccessory): if CHAR_ACTIVE in char_values: self.async_call_service( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON if char_values[CHAR_ACTIVE] else SERVICE_TURN_OFF, {ATTR_ENTITY_ID: self.entity_id}, f"{CHAR_ACTIVE} to {char_values[CHAR_ACTIVE]}", @@ -272,7 +272,7 @@ class HumidifierDehumidifier(HomeAccessory): self.char_target_humidity.set_value(humidity) self.async_call_service( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: self.entity_id, ATTR_HUMIDITY: humidity}, ( diff --git a/homeassistant/components/homekit/type_lights.py b/homeassistant/components/homekit/type_lights.py index cb446ea551c..cde80178c5e 100644 --- a/homeassistant/components/homekit/type_lights.py +++ b/homeassistant/components/homekit/type_lights.py @@ -20,7 +20,7 @@ from homeassistant.components.light import ( ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, brightness_supported, color_supported, @@ -171,8 +171,9 @@ class Light(HomeAccessory): events = [] service = SERVICE_TURN_ON params: dict[str, Any] = {ATTR_ENTITY_ID: self.entity_id} + has_on = CHAR_ON in char_values - if CHAR_ON in char_values: + if has_on: if not char_values[CHAR_ON]: service = SERVICE_TURN_OFF events.append(f"Set state to {char_values[CHAR_ON]}") @@ -180,7 +181,10 @@ class Light(HomeAccessory): brightness_pct = None if CHAR_BRIGHTNESS in char_values: if char_values[CHAR_BRIGHTNESS] == 0: - events[-1] = "Set state to 0" + if has_on: + events[-1] = "Set state to 0" + else: + events.append("Set state to 0") service = SERVICE_TURN_OFF else: brightness_pct = char_values[CHAR_BRIGHTNESS] @@ -188,7 +192,10 @@ class Light(HomeAccessory): if service == SERVICE_TURN_OFF: self.async_call_service( - DOMAIN, service, {ATTR_ENTITY_ID: self.entity_id}, ", ".join(events) + LIGHT_DOMAIN, + service, + {ATTR_ENTITY_ID: self.entity_id}, + ", ".join(events), ) return @@ -232,7 +239,7 @@ class Light(HomeAccessory): _LOGGER.debug( "Calling light service with params: %s -> %s", char_values, params ) - self.async_call_service(DOMAIN, service, params, ", ".join(events)) + self.async_call_service(LIGHT_DOMAIN, service, params, ", ".join(events)) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_locks.py b/homeassistant/components/homekit/type_locks.py index e5b0ad22396..59da802b8b7 100644 --- a/homeassistant/components/homekit/type_locks.py +++ b/homeassistant/components/homekit/type_locks.py @@ -5,57 +5,56 @@ from typing import Any from pyhap.const import CATEGORY_DOOR_LOCK -from homeassistant.components.lock import ( - DOMAIN, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_UNLOCKED, - STATE_UNLOCKING, -) +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import State, callback -from .accessories import TYPES, HomeAccessory +from .accessories import TYPES from .const import CHAR_LOCK_CURRENT_STATE, CHAR_LOCK_TARGET_STATE, SERV_LOCK +from .doorbell import HomeDoorbellAccessory _LOGGER = logging.getLogger(__name__) HASS_TO_HOMEKIT_CURRENT = { - STATE_UNLOCKED: 0, - STATE_UNLOCKING: 1, - STATE_LOCKING: 0, - STATE_LOCKED: 1, - STATE_JAMMED: 2, + LockState.UNLOCKED.value: 0, + LockState.UNLOCKING.value: 1, + LockState.LOCKING.value: 0, + LockState.LOCKED.value: 1, + LockState.JAMMED.value: 2, STATE_UNKNOWN: 3, } HASS_TO_HOMEKIT_TARGET = { - STATE_UNLOCKED: 0, - STATE_UNLOCKING: 0, - STATE_LOCKING: 1, - STATE_LOCKED: 1, + LockState.UNLOCKED.value: 0, + LockState.UNLOCKING.value: 0, + LockState.LOCKING.value: 1, + LockState.LOCKED.value: 1, } -VALID_TARGET_STATES = {STATE_LOCKING, STATE_UNLOCKING, STATE_LOCKED, STATE_UNLOCKED} +VALID_TARGET_STATES = { + LockState.LOCKING.value, + LockState.UNLOCKING.value, + LockState.LOCKED.value, + LockState.UNLOCKED.value, +} HOMEKIT_TO_HASS = { - 0: STATE_UNLOCKED, - 1: STATE_LOCKED, - 2: STATE_JAMMED, + 0: LockState.UNLOCKED.value, + 1: LockState.LOCKED.value, + 2: LockState.JAMMED.value, 3: STATE_UNKNOWN, } STATE_TO_SERVICE = { - STATE_LOCKING: "unlock", - STATE_LOCKED: "lock", - STATE_UNLOCKING: "lock", - STATE_UNLOCKED: "unlock", + LockState.LOCKING.value: "unlock", + LockState.LOCKED.value: "lock", + LockState.UNLOCKING.value: "lock", + LockState.UNLOCKED.value: "unlock", } @TYPES.register("Lock") -class Lock(HomeAccessory): +class Lock(HomeDoorbellAccessory): """Generate a Lock accessory for a lock entity. The lock entity must support: unlock and lock. @@ -74,7 +73,7 @@ class Lock(HomeAccessory): ) self.char_target_state = serv_lock_mechanism.configure_char( CHAR_LOCK_TARGET_STATE, - value=HASS_TO_HOMEKIT_CURRENT[STATE_LOCKED], + value=HASS_TO_HOMEKIT_CURRENT[LockState.LOCKED.value], setter_callback=self.set_state, ) self.async_update_state(state) @@ -89,7 +88,7 @@ class Lock(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id} if self._code: params[ATTR_CODE] = self._code - self.async_call_service(DOMAIN, service, params) + self.async_call_service(LOCK_DOMAIN, service, params) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_media_players.py b/homeassistant/components/homekit/type_media_players.py index 4cdb471b4ff..adb16da5a2d 100644 --- a/homeassistant/components/homekit/type_media_players.py +++ b/homeassistant/components/homekit/type_media_players.py @@ -11,7 +11,7 @@ from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_SELECT_SOURCE, MediaPlayerEntityFeature, ) @@ -151,7 +151,7 @@ class MediaPlayer(HomeAccessory): _LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value) service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_play_pause(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -160,7 +160,7 @@ class MediaPlayer(HomeAccessory): ) service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_PAUSE params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_play_stop(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -169,7 +169,7 @@ class MediaPlayer(HomeAccessory): ) service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_STOP params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_toggle_mute(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -177,7 +177,7 @@ class MediaPlayer(HomeAccessory): '%s: Set switch state for "toggle_mute" to %s', self.entity_id, value ) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value} - self.async_call_service(DOMAIN, SERVICE_VOLUME_MUTE, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, params) @callback def async_update_state(self, new_state: State) -> None: @@ -286,7 +286,7 @@ class TelevisionMediaPlayer(RemoteInputSelectAccessory): _LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value) service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_mute(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -294,27 +294,27 @@ class TelevisionMediaPlayer(RemoteInputSelectAccessory): '%s: Set switch state for "toggle_mute" to %s', self.entity_id, value ) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value} - self.async_call_service(DOMAIN, SERVICE_VOLUME_MUTE, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, params) def set_volume(self, value: bool) -> None: """Send volume step value if call came from HomeKit.""" _LOGGER.debug("%s: Set volume to %s", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_LEVEL: value} - self.async_call_service(DOMAIN, SERVICE_VOLUME_SET, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_SET, params) def set_volume_step(self, value: bool) -> None: """Send volume step value if call came from HomeKit.""" _LOGGER.debug("%s: Step volume by %s", self.entity_id, value) service = SERVICE_VOLUME_DOWN if value else SERVICE_VOLUME_UP params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_input_source(self, value: int) -> None: """Send input set value if call came from HomeKit.""" _LOGGER.debug("%s: Set current input to %s", self.entity_id, value) source_name = self._mapped_sources[self.sources[value]] params = {ATTR_ENTITY_ID: self.entity_id, ATTR_INPUT_SOURCE: source_name} - self.async_call_service(DOMAIN, SERVICE_SELECT_SOURCE, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_SELECT_SOURCE, params) def set_remote_key(self, value: int) -> None: """Send remote key value if call came from HomeKit.""" @@ -335,7 +335,7 @@ class TelevisionMediaPlayer(RemoteInputSelectAccessory): else: service = SERVICE_MEDIA_PLAY_PAUSE params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) return # Unhandled keys can be handled by listening to the event bus diff --git a/homeassistant/components/homekit/type_security_systems.py b/homeassistant/components/homekit/type_security_systems.py index 27c479de6ba..8634589cb5f 100644 --- a/homeassistant/components/homekit/type_security_systems.py +++ b/homeassistant/components/homekit/type_security_systems.py @@ -6,8 +6,9 @@ from typing import Any from pyhap.const import CATEGORY_ALARM_SYSTEM from homeassistant.components.alarm_control_panel import ( - DOMAIN, + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -17,13 +18,8 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, ) from homeassistant.core import State, callback @@ -43,22 +39,22 @@ HK_ALARM_DISARMED = 3 HK_ALARM_TRIGGERED = 4 HASS_TO_HOMEKIT_CURRENT = { - STATE_ALARM_ARMED_HOME: HK_ALARM_STAY_ARMED, - STATE_ALARM_ARMED_VACATION: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_AWAY: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, - STATE_ALARM_ARMING: HK_ALARM_DISARMED, - STATE_ALARM_DISARMED: HK_ALARM_DISARMED, - STATE_ALARM_TRIGGERED: HK_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_HOME: HK_ALARM_STAY_ARMED, + AlarmControlPanelState.ARMED_VACATION: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_AWAY: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, + AlarmControlPanelState.ARMING: HK_ALARM_DISARMED, + AlarmControlPanelState.DISARMED: HK_ALARM_DISARMED, + AlarmControlPanelState.TRIGGERED: HK_ALARM_TRIGGERED, } HASS_TO_HOMEKIT_TARGET = { - STATE_ALARM_ARMED_HOME: HK_ALARM_STAY_ARMED, - STATE_ALARM_ARMED_VACATION: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_AWAY: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, - STATE_ALARM_ARMING: HK_ALARM_AWAY_ARMED, - STATE_ALARM_DISARMED: HK_ALARM_DISARMED, + AlarmControlPanelState.ARMED_HOME: HK_ALARM_STAY_ARMED, + AlarmControlPanelState.ARMED_VACATION: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_AWAY: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, + AlarmControlPanelState.ARMING: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.DISARMED: HK_ALARM_DISARMED, } HASS_TO_HOMEKIT_SERVICES = { @@ -124,7 +120,7 @@ class SecuritySystem(HomeAccessory): self.char_current_state = serv_alarm.configure_char( CHAR_CURRENT_SECURITY_STATE, - value=HASS_TO_HOMEKIT_CURRENT[STATE_ALARM_DISARMED], + value=HASS_TO_HOMEKIT_CURRENT[AlarmControlPanelState.DISARMED], valid_values={ key: val for key, val in default_current_states.items() @@ -153,13 +149,21 @@ class SecuritySystem(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id} if self._alarm_code: params[ATTR_CODE] = self._alarm_code - self.async_call_service(DOMAIN, service, params) + self.async_call_service(ALARM_CONTROL_PANEL_DOMAIN, service, params) @callback def async_update_state(self, new_state: State) -> None: """Update security state after state changed.""" - hass_state = new_state.state - if (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None: + hass_state: str | AlarmControlPanelState = new_state.state + if hass_state in {"None", STATE_UNKNOWN, STATE_UNAVAILABLE}: + # Bail out early for no state, unknown or unavailable + return + if hass_state is not None: + hass_state = AlarmControlPanelState(hass_state) + if ( + hass_state + and (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None + ): self.char_current_state.set_value(current_state) _LOGGER.debug( "%s: Updated current state to %s (%d)", @@ -167,5 +171,8 @@ class SecuritySystem(HomeAccessory): hass_state, current_state, ) - if (target_state := HASS_TO_HOMEKIT_TARGET.get(hass_state)) is not None: + if ( + hass_state + and (target_state := HASS_TO_HOMEKIT_TARGET.get(hass_state)) is not None + ): self.char_target_state.set_value(target_state) diff --git a/homeassistant/components/homekit/type_switches.py b/homeassistant/components/homekit/type_switches.py index 45a823882f7..0482a5956ac 100644 --- a/homeassistant/components/homekit/type_switches.py +++ b/homeassistant/components/homekit/type_switches.py @@ -16,12 +16,12 @@ from pyhap.const import ( from homeassistant.components import button, input_button from homeassistant.components.input_select import ATTR_OPTIONS, SERVICE_SELECT_OPTION -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE, SERVICE_START, - STATE_CLEANING, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -109,7 +109,7 @@ class Outlet(HomeAccessory): _LOGGER.debug("%s: Set switch state to %s", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id} service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF - self.async_call_service(DOMAIN, service, params) + self.async_call_service(SWITCH_DOMAIN, service, params) @callback def async_update_state(self, new_state: State) -> None: @@ -213,7 +213,7 @@ class Vacuum(Switch): @callback def async_update_state(self, new_state: State) -> None: """Update switch state after state changed.""" - current_state = new_state.state in (STATE_CLEANING, STATE_ON) + current_state = new_state.state in (VacuumActivity.CLEANING, STATE_ON) _LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state) self.char_on.set_value(current_state) diff --git a/homeassistant/components/homekit/util.py b/homeassistant/components/homekit/util.py index 4d4620477cb..d339aa6aded 100644 --- a/homeassistant/components/homekit/util.py +++ b/homeassistant/components/homekit/util.py @@ -114,6 +114,7 @@ _LOGGER = logging.getLogger(__name__) NUMBERS_ONLY_RE = re.compile(r"[^\d.]+") VERSION_RE = re.compile(r"([0-9]+)(\.[0-9]+)?(\.[0-9]+)?") +INVALID_END_CHARS = "-_ " MAX_VERSION_PART = 2**32 - 1 @@ -181,7 +182,6 @@ HUMIDIFIER_SCHEMA = BASIC_INFO_SCHEMA.extend( {vol.Optional(CONF_LINKED_HUMIDITY_SENSOR): cv.entity_domain(sensor.DOMAIN)} ) - COVER_SCHEMA = BASIC_INFO_SCHEMA.extend( { vol.Optional(CONF_LINKED_OBSTRUCTION_SENSOR): cv.entity_domain( @@ -194,6 +194,14 @@ CODE_SCHEMA = BASIC_INFO_SCHEMA.extend( {vol.Optional(ATTR_CODE, default=None): vol.Any(None, cv.string)} ) +LOCK_SCHEMA = CODE_SCHEMA.extend( + { + vol.Optional(CONF_LINKED_DOORBELL_SENSOR): cv.entity_domain( + [binary_sensor.DOMAIN, EVENT_DOMAIN] + ), + } +) + MEDIA_PLAYER_SCHEMA = vol.Schema( { vol.Required(CONF_FEATURE): vol.All( @@ -283,7 +291,7 @@ def validate_entity_config(values: dict) -> dict[str, dict]: if not isinstance(config, dict): raise vol.Invalid(f"The configuration for {entity} must be a dictionary.") - if domain in ("alarm_control_panel", "lock"): + if domain == "alarm_control_panel": config = CODE_SCHEMA(config) elif domain == media_player.const.DOMAIN: @@ -300,6 +308,9 @@ def validate_entity_config(values: dict) -> dict[str, dict]: elif domain == "camera": config = CAMERA_SCHEMA(config) + elif domain == "lock": + config = LOCK_SCHEMA(config) + elif domain == "switch": config = SWITCH_TYPE_SCHEMA(config) @@ -414,25 +425,21 @@ def cleanup_name_for_homekit(name: str | None) -> str: # likely isn't a problem if name is None: return "None" # None crashes apple watches - return name.translate(HOMEKIT_CHAR_TRANSLATIONS)[:MAX_NAME_LENGTH] + return ( + name.translate(HOMEKIT_CHAR_TRANSLATIONS) + .lstrip(INVALID_END_CHARS)[:MAX_NAME_LENGTH] + .rstrip(INVALID_END_CHARS) + ) def temperature_to_homekit(temperature: float, unit: str) -> float: """Convert temperature to Celsius for HomeKit.""" - return round( - TemperatureConverter.convert(temperature, unit, UnitOfTemperature.CELSIUS), 1 - ) + return TemperatureConverter.convert(temperature, unit, UnitOfTemperature.CELSIUS) def temperature_to_states(temperature: float, unit: str) -> float: """Convert temperature back from Celsius to Home Assistant unit.""" - return ( - round( - TemperatureConverter.convert(temperature, UnitOfTemperature.CELSIUS, unit) - * 2 - ) - / 2 - ) + return TemperatureConverter.convert(temperature, UnitOfTemperature.CELSIUS, unit) def density_to_air_quality(density: float) -> int: diff --git a/homeassistant/components/homekit_controller/alarm_control_panel.py b/homeassistant/components/homekit_controller/alarm_control_panel.py index 1cb94926e8b..b17f122dfa5 100644 --- a/homeassistant/components/homekit_controller/alarm_control_panel.py +++ b/homeassistant/components/homekit_controller/alarm_control_panel.py @@ -10,17 +10,10 @@ from aiohomekit.model.services import Service, ServicesTypes from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_LEVEL, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - Platform, -) +from homeassistant.const import ATTR_BATTERY_LEVEL, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -29,18 +22,18 @@ from .connection import HKDevice from .entity import HomeKitEntity CURRENT_STATE_MAP = { - 0: STATE_ALARM_ARMED_HOME, - 1: STATE_ALARM_ARMED_AWAY, - 2: STATE_ALARM_ARMED_NIGHT, - 3: STATE_ALARM_DISARMED, - 4: STATE_ALARM_TRIGGERED, + 0: AlarmControlPanelState.ARMED_HOME, + 1: AlarmControlPanelState.ARMED_AWAY, + 2: AlarmControlPanelState.ARMED_NIGHT, + 3: AlarmControlPanelState.DISARMED, + 4: AlarmControlPanelState.TRIGGERED, } TARGET_STATE_MAP = { - STATE_ALARM_ARMED_HOME: 0, - STATE_ALARM_ARMED_AWAY: 1, - STATE_ALARM_ARMED_NIGHT: 2, - STATE_ALARM_DISARMED: 3, + AlarmControlPanelState.ARMED_HOME: 0, + AlarmControlPanelState.ARMED_AWAY: 1, + AlarmControlPanelState.ARMED_NIGHT: 2, + AlarmControlPanelState.DISARMED: 3, } @@ -76,6 +69,7 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.ARM_AWAY | AlarmControlPanelEntityFeature.ARM_NIGHT ) + _attr_code_arm_required = False def get_characteristic_types(self) -> list[str]: """Define the homekit characteristics the entity cares about.""" @@ -86,7 +80,7 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity): ] @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" return CURRENT_STATE_MAP[ self.service.value(CharacteristicsTypes.SECURITY_SYSTEM_STATE_CURRENT) @@ -94,21 +88,23 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity): async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - await self.set_alarm_state(STATE_ALARM_DISARMED, code) + await self.set_alarm_state(AlarmControlPanelState.DISARMED, code) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm command.""" - await self.set_alarm_state(STATE_ALARM_ARMED_AWAY, code) + await self.set_alarm_state(AlarmControlPanelState.ARMED_AWAY, code) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send stay command.""" - await self.set_alarm_state(STATE_ALARM_ARMED_HOME, code) + await self.set_alarm_state(AlarmControlPanelState.ARMED_HOME, code) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send night command.""" - await self.set_alarm_state(STATE_ALARM_ARMED_NIGHT, code) + await self.set_alarm_state(AlarmControlPanelState.ARMED_NIGHT, code) - async def set_alarm_state(self, state: str, code: str | None = None) -> None: + async def set_alarm_state( + self, state: AlarmControlPanelState, code: str | None = None + ) -> None: """Send state command.""" await self.async_put_characteristics( {CharacteristicsTypes.SECURITY_SYSTEM_STATE_TARGET: TARGET_STATE_MAP[state]} diff --git a/homeassistant/components/homekit_controller/climate.py b/homeassistant/components/homekit_controller/climate.py index 544e23798d0..ba5237e6e2d 100644 --- a/homeassistant/components/homekit_controller/climate.py +++ b/homeassistant/components/homekit_controller/climate.py @@ -2,13 +2,13 @@ from __future__ import annotations -from functools import cached_property import logging from typing import Any, Final from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, + CurrentFanStateValues, CurrentHeaterCoolerStateValues, HeatingCoolingCurrentValues, HeatingCoolingTargetValues, @@ -17,6 +17,7 @@ from aiohomekit.model.characteristics import ( ) from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.utils import clamp_enum_to_char +from propcache import cached_property from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -135,7 +136,6 @@ class HomeKitBaseClimateEntity(HomeKitEntity, ClimateEntity): """The base HomeKit Controller climate entity.""" _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False @callback def _async_reconfigure(self) -> None: @@ -484,6 +484,7 @@ class HomeKitClimateEntity(HomeKitBaseClimateEntity): CharacteristicsTypes.TEMPERATURE_TARGET, CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT, CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET, + CharacteristicsTypes.FAN_STATE_CURRENT, ] async def async_set_temperature(self, **kwargs: Any) -> None: @@ -666,7 +667,19 @@ class HomeKitClimateEntity(HomeKitBaseClimateEntity): return HVACAction.IDLE value = self.service.value(CharacteristicsTypes.HEATING_COOLING_CURRENT) - return CURRENT_MODE_HOMEKIT_TO_HASS.get(value) + current_hass_value = CURRENT_MODE_HOMEKIT_TO_HASS.get(value) + + # If a device has a fan state (such as an Ecobee thermostat) + # show the Fan state when the device is otherwise idle. + if ( + current_hass_value == HVACAction.IDLE + and self.service.has(CharacteristicsTypes.FAN_STATE_CURRENT) + and self.service.value(CharacteristicsTypes.FAN_STATE_CURRENT) + == CurrentFanStateValues.ACTIVE + ): + return HVACAction.FAN + + return current_hass_value @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/homekit_controller/config_flow.py b/homeassistant/components/homekit_controller/config_flow.py index 2ca32ccb911..9e67d618079 100644 --- a/homeassistant/components/homekit_controller/config_flow.py +++ b/homeassistant/components/homekit_controller/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import re -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any, Self, cast import aiohomekit from aiohomekit import Controller, const as aiohomekit_const @@ -111,6 +111,8 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN): self.devices: dict[str, AbstractDiscovery] = {} self.controller: Controller | None = None self.finish_pairing: FinishPairing | None = None + self.pairing = False + self._device_paired = False async def _async_setup_controller(self) -> None: """Create the controller.""" @@ -168,28 +170,6 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_unignore(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Rediscover a previously ignored discover.""" - unique_id = user_input["unique_id"] - await self.async_set_unique_id(unique_id) - - if self.controller is None: - await self._async_setup_controller() - - assert self.controller - - try: - discovery = await self.controller.async_find(unique_id) - except aiohomekit.AccessoryNotFoundError: - return self.async_abort(reason="accessory_not_found_error") - - self.name = discovery.description.name - self.model = getattr(discovery.description, "model", BLE_DEFAULT_NAME) - self.category = discovery.description.category - self.hkid = discovery.description.id - - return self._async_step_pair_show_form() - @callback def _hkid_is_homekit(self, hkid: str) -> bool: """Determine if the device is a homekit bridge or accessory.""" @@ -322,18 +302,10 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN): # Set unique-id and error out if it's already configured self._abort_if_unique_id_configured(updates=updated_ip_port) - for progress in self._async_in_progress(include_uninitialized=True): - context = progress["context"] - if context.get("unique_id") == normalized_hkid and not context.get( - "pairing" - ): - if paired: - # If the device gets paired, we want to dismiss - # an existing discovery since we can no longer - # pair with it - self.hass.config_entries.flow.async_abort(progress["flow_id"]) - else: - raise AbortFlow("already_in_progress") + self.hkid = normalized_hkid + self._device_paired = paired + if self.hass.config_entries.flow.async_has_matching_flow(self): + raise AbortFlow("already_in_progress") if paired: # Device is paired but not to us - ignore it @@ -354,13 +326,24 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN): self.name = name self.model = model self.category = Categories(int(properties.get("ci", 0))) - self.hkid = normalized_hkid # We want to show the pairing form - but don't call async_step_pair # directly as it has side effects (will ask the device to show a # pairing code) return self._async_step_pair_show_form() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + if other_flow.context.get("unique_id") == self.hkid and not other_flow.pairing: + if self._device_paired: + # If the device gets paired, we want to dismiss + # an existing discovery since we can no longer + # pair with it + self.hass.config_entries.flow.async_abort(other_flow.flow_id) + else: + return True + return False + async def async_step_bluetooth( self, discovery_info: bluetooth.BluetoothServiceInfoBleak ) -> ConfigFlowResult: @@ -441,7 +424,7 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN): assert self.controller if pair_info and self.finish_pairing: - self.context["pairing"] = True + self.pairing = True code = pair_info["pairing_code"] try: code = ensure_pin_format( @@ -552,7 +535,7 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN): assert self.category placeholders = self.context["title_placeholders"] = { - "name": self.name, + "name": self.name or "Homekit Device", "category": formatted_category(self.category), } diff --git a/homeassistant/components/homekit_controller/connection.py b/homeassistant/components/homekit_controller/connection.py index 934e7e883ae..52f22bcc9f4 100644 --- a/homeassistant/components/homekit_controller/connection.py +++ b/homeassistant/components/homekit_controller/connection.py @@ -22,7 +22,7 @@ from aiohomekit.model import Accessories, Accessory, Transport from aiohomekit.model.characteristics import Characteristic, CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes -from homeassistant.components.thread.dataset_store import async_get_preferred_dataset +from homeassistant.components.thread import async_get_preferred_dataset from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_VIA_DEVICE, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import CALLBACK_TYPE, CoreState, Event, HomeAssistant, callback @@ -433,7 +433,7 @@ class HKDevice: continue if self.config_entry.entry_id not in device.config_entries: - _LOGGER.info( + _LOGGER.warning( ( "Found candidate device for %s:aid:%s, but owned by a different" " config entry, skipping" @@ -443,7 +443,7 @@ class HKDevice: ) continue - _LOGGER.info( + _LOGGER.debug( "Migrating device identifiers for %s:aid:%s", self.unique_id, accessory.aid, @@ -904,7 +904,7 @@ class HKDevice: return if self._polling_lock_warned: - _LOGGER.info( + _LOGGER.warning( ( "HomeKit device no longer detecting back pressure - not" " skipping poll: %s" diff --git a/homeassistant/components/homekit_controller/const.py b/homeassistant/components/homekit_controller/const.py index aea5a6661ee..77deb07b3dd 100644 --- a/homeassistant/components/homekit_controller/const.py +++ b/homeassistant/components/homekit_controller/const.py @@ -50,6 +50,7 @@ HOMEKIT_ACCESSORY_DISPATCH = { ServicesTypes.FAN_V2: "fan", ServicesTypes.OCCUPANCY_SENSOR: "binary_sensor", ServicesTypes.TELEVISION: "media_player", + ServicesTypes.FAUCET: "switch", ServicesTypes.VALVE: "switch", ServicesTypes.CAMERA_RTP_STREAM_MANAGEMENT: "camera", ServicesTypes.DOORBELL: "event", diff --git a/homeassistant/components/homekit_controller/cover.py b/homeassistant/components/homekit_controller/cover.py index 0eebb72c988..d7480a40a93 100644 --- a/homeassistant/components/homekit_controller/cover.py +++ b/homeassistant/components/homekit_controller/cover.py @@ -2,11 +2,11 @@ from __future__ import annotations -from functools import cached_property from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes +from propcache import cached_property from homeassistant.components.cover import ( ATTR_POSITION, @@ -14,15 +14,10 @@ from homeassistant.components.cover import ( CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -33,16 +28,24 @@ from .entity import HomeKitEntity STATE_STOPPED = "stopped" CURRENT_GARAGE_STATE_MAP = { - 0: STATE_OPEN, - 1: STATE_CLOSED, - 2: STATE_OPENING, - 3: STATE_CLOSING, + 0: CoverState.OPEN, + 1: CoverState.CLOSED, + 2: CoverState.OPENING, + 3: CoverState.CLOSING, 4: STATE_STOPPED, } -TARGET_GARAGE_STATE_MAP = {STATE_OPEN: 0, STATE_CLOSED: 1, STATE_STOPPED: 2} +TARGET_GARAGE_STATE_MAP = { + CoverState.OPEN: 0, + CoverState.CLOSED: 1, + STATE_STOPPED: 2, +} -CURRENT_WINDOW_STATE_MAP = {0: STATE_CLOSING, 1: STATE_OPENING, 2: STATE_STOPPED} +CURRENT_WINDOW_STATE_MAP = { + 0: CoverState.CLOSING, + 1: CoverState.OPENING, + 2: STATE_STOPPED, +} async def async_setup_entry( @@ -92,25 +95,25 @@ class HomeKitGarageDoorCover(HomeKitEntity, CoverEntity): @property def is_closed(self) -> bool: """Return true if cover is closed, else False.""" - return self._state == STATE_CLOSED + return self._state == CoverState.CLOSED @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state == STATE_CLOSING + return self._state == CoverState.CLOSING @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state == STATE_OPENING + return self._state == CoverState.OPENING async def async_open_cover(self, **kwargs: Any) -> None: """Send open command.""" - await self.set_door_state(STATE_OPEN) + await self.set_door_state(CoverState.OPEN) async def async_close_cover(self, **kwargs: Any) -> None: """Send close command.""" - await self.set_door_state(STATE_CLOSED) + await self.set_door_state(CoverState.CLOSED) async def set_door_state(self, state: str) -> None: """Send state command.""" @@ -188,14 +191,14 @@ class HomeKitWindowCover(HomeKitEntity, CoverEntity): """Return if the cover is closing or not.""" value = self.service.value(CharacteristicsTypes.POSITION_STATE) state = CURRENT_WINDOW_STATE_MAP[value] - return state == STATE_CLOSING + return state == CoverState.CLOSING @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" value = self.service.value(CharacteristicsTypes.POSITION_STATE) state = CURRENT_WINDOW_STATE_MAP[value] - return state == STATE_OPENING + return state == CoverState.OPENING @property def is_horizontal_tilt(self) -> bool: diff --git a/homeassistant/components/homekit_controller/fan.py b/homeassistant/components/homekit_controller/fan.py index 93ebbba62b1..2ae534099ae 100644 --- a/homeassistant/components/homekit_controller/fan.py +++ b/homeassistant/components/homekit_controller/fan.py @@ -2,11 +2,11 @@ from __future__ import annotations -from functools import cached_property from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes +from propcache import cached_property from homeassistant.components.fan import ( DIRECTION_FORWARD, @@ -42,7 +42,6 @@ class BaseHomeKitFan(HomeKitEntity, FanEntity): # This must be set in subclasses to the name of a boolean characteristic # that controls whether the fan is on or off. on_characteristic: str - _enable_turn_on_off_backwards_compatibility = False @callback def _async_reconfigure(self) -> None: diff --git a/homeassistant/components/homekit_controller/humidifier.py b/homeassistant/components/homekit_controller/humidifier.py index cbfcfb6d3bb..f82baab5df7 100644 --- a/homeassistant/components/homekit_controller/humidifier.py +++ b/homeassistant/components/homekit_controller/humidifier.py @@ -2,11 +2,11 @@ from __future__ import annotations -from functools import cached_property from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes +from propcache import cached_property from homeassistant.components.humidifier import ( DEFAULT_MAX_HUMIDITY, diff --git a/homeassistant/components/homekit_controller/light.py b/homeassistant/components/homekit_controller/light.py index d5f20723ff1..26f10768aa0 100644 --- a/homeassistant/components/homekit_controller/light.py +++ b/homeassistant/components/homekit_controller/light.py @@ -2,16 +2,18 @@ from __future__ import annotations -from functools import cached_property from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes +from propcache import cached_property from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, ) @@ -53,11 +55,19 @@ async def async_setup_entry( class HomeKitLight(HomeKitEntity, LightEntity): """Representation of a Homekit light.""" + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + @callback def _async_reconfigure(self) -> None: """Reconfigure entity.""" self._async_clear_property_cache( - ("supported_features", "min_mireds", "max_mireds", "supported_color_modes") + ( + "supported_features", + "min_color_temp_kelvin", + "max_color_temp_kelvin", + "supported_color_modes", + ) ) super()._async_reconfigure() @@ -90,25 +100,35 @@ class HomeKitLight(HomeKitEntity, LightEntity): ) @cached_property - def min_mireds(self) -> int: - """Return minimum supported color temperature.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().min_mireds - min_value = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].minValue - return int(min_value) if min_value else super().min_mireds + return DEFAULT_MAX_KELVIN + min_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].minValue + return ( + color_util.color_temperature_mired_to_kelvin(min_value_mireds) + if min_value_mireds + else DEFAULT_MAX_KELVIN + ) @cached_property - def max_mireds(self) -> int: - """Return the maximum color temperature.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().max_mireds - max_value = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].maxValue - return int(max_value) if max_value else super().max_mireds + return DEFAULT_MIN_KELVIN + max_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].maxValue + return ( + color_util.color_temperature_mired_to_kelvin(max_value_mireds) + if max_value_mireds + else DEFAULT_MIN_KELVIN + ) @property - def color_temp(self) -> int: - """Return the color temperature.""" - return self.service.value(CharacteristicsTypes.COLOR_TEMPERATURE) + def color_temp_kelvin(self) -> int: + """Return the color temperature value in Kelvin.""" + return color_util.color_temperature_mired_to_kelvin( + self.service.value(CharacteristicsTypes.COLOR_TEMPERATURE) + ) @property def color_mode(self) -> str: @@ -153,7 +173,7 @@ class HomeKitLight(HomeKitEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the specified light on.""" hs_color = kwargs.get(ATTR_HS_COLOR) - temperature = kwargs.get(ATTR_COLOR_TEMP) + temperature_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) brightness = kwargs.get(ATTR_BRIGHTNESS) characteristics: dict[str, Any] = {} @@ -167,19 +187,18 @@ class HomeKitLight(HomeKitEntity, LightEntity): # does not support both, temperature will win. This is not # expected to happen in the UI, but it is possible via a manual # service call. - if temperature is not None: + if temperature_kelvin is not None: if self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - characteristics[CharacteristicsTypes.COLOR_TEMPERATURE] = int( - temperature + characteristics[CharacteristicsTypes.COLOR_TEMPERATURE] = ( + color_util.color_temperature_kelvin_to_mired(temperature_kelvin) ) + elif hs_color is None: # Some HomeKit devices implement color temperature with HS # since the spec "technically" does not permit the COLOR_TEMPERATURE # characteristic and the HUE and SATURATION characteristics to be # present at the same time. - hue_sat = color_util.color_temperature_to_hs( - color_util.color_temperature_mired_to_kelvin(temperature) - ) + hue_sat = color_util.color_temperature_to_hs(temperature_kelvin) characteristics[CharacteristicsTypes.HUE] = hue_sat[0] characteristics[CharacteristicsTypes.SATURATION] = hue_sat[1] diff --git a/homeassistant/components/homekit_controller/lock.py b/homeassistant/components/homekit_controller/lock.py index 8e1bcd424d4..98974c4a514 100644 --- a/homeassistant/components/homekit_controller/lock.py +++ b/homeassistant/components/homekit_controller/lock.py @@ -7,15 +7,9 @@ from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes -from homeassistant.components.lock import STATE_JAMMED, LockEntity +from homeassistant.components.lock import LockEntity, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_LEVEL, - STATE_LOCKED, - STATE_UNKNOWN, - STATE_UNLOCKED, - Platform, -) +from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -24,13 +18,13 @@ from .connection import HKDevice from .entity import HomeKitEntity CURRENT_STATE_MAP = { - 0: STATE_UNLOCKED, - 1: STATE_LOCKED, - 2: STATE_JAMMED, + 0: LockState.UNLOCKED, + 1: LockState.LOCKED, + 2: LockState.JAMMED, 3: STATE_UNKNOWN, } -TARGET_STATE_MAP = {STATE_UNLOCKED: 0, STATE_LOCKED: 1} +TARGET_STATE_MAP = {LockState.UNLOCKED: 0, LockState.LOCKED: 1} REVERSED_TARGET_STATE_MAP = {v: k for k, v in TARGET_STATE_MAP.items()} @@ -76,7 +70,7 @@ class HomeKitLock(HomeKitEntity, LockEntity): value = self.service.value(CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE) if CURRENT_STATE_MAP[value] == STATE_UNKNOWN: return None - return CURRENT_STATE_MAP[value] == STATE_LOCKED + return CURRENT_STATE_MAP[value] == LockState.LOCKED @property def is_locking(self) -> bool: @@ -88,8 +82,8 @@ class HomeKitLock(HomeKitEntity, LockEntity): CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE ) return ( - CURRENT_STATE_MAP[current_value] == STATE_UNLOCKED - and REVERSED_TARGET_STATE_MAP.get(target_value) == STATE_LOCKED + CURRENT_STATE_MAP[current_value] == LockState.UNLOCKED + and REVERSED_TARGET_STATE_MAP.get(target_value) == LockState.LOCKED ) @property @@ -102,25 +96,25 @@ class HomeKitLock(HomeKitEntity, LockEntity): CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE ) return ( - CURRENT_STATE_MAP[current_value] == STATE_LOCKED - and REVERSED_TARGET_STATE_MAP.get(target_value) == STATE_UNLOCKED + CURRENT_STATE_MAP[current_value] == LockState.LOCKED + and REVERSED_TARGET_STATE_MAP.get(target_value) == LockState.UNLOCKED ) @property def is_jammed(self) -> bool: """Return true if device is jammed.""" value = self.service.value(CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE) - return CURRENT_STATE_MAP[value] == STATE_JAMMED + return CURRENT_STATE_MAP[value] == LockState.JAMMED async def async_lock(self, **kwargs: Any) -> None: """Lock the device.""" - await self._set_lock_state(STATE_LOCKED) + await self._set_lock_state(LockState.LOCKED) async def async_unlock(self, **kwargs: Any) -> None: """Unlock the device.""" - await self._set_lock_state(STATE_UNLOCKED) + await self._set_lock_state(LockState.UNLOCKED) - async def _set_lock_state(self, state: str) -> None: + async def _set_lock_state(self, state: LockState) -> None: """Send state command.""" await self.async_put_characteristics( {CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE: TARGET_STATE_MAP[state]} diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index b2b215a98b9..b7c82b9fd51 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.3"], + "requirements": ["aiohomekit==3.2.7"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/homeassistant/components/homekit_controller/switch.py b/homeassistant/components/homekit_controller/switch.py index 9fa4782e061..5abed2a5c79 100644 --- a/homeassistant/components/homekit_controller/switch.py +++ b/homeassistant/components/homekit_controller/switch.py @@ -102,6 +102,27 @@ class HomeKitSwitch(HomeKitEntity, SwitchEntity): return None +class HomeKitFaucet(HomeKitEntity, SwitchEntity): + """Representation of a Homekit faucet.""" + + def get_characteristic_types(self) -> list[str]: + """Define the homekit characteristics the entity cares about.""" + return [CharacteristicsTypes.ACTIVE] + + @property + def is_on(self) -> bool: + """Return true if device is on.""" + return self.service.value(CharacteristicsTypes.ACTIVE) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the specified faucet on.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: True}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the specified faucet off.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: False}) + + class HomeKitValve(HomeKitEntity, SwitchEntity): """Represents a valve in an irrigation system.""" @@ -192,9 +213,10 @@ class DeclarativeCharacteristicSwitch(CharacteristicEntity, SwitchEntity): ) -ENTITY_TYPES: dict[str, type[HomeKitSwitch | HomeKitValve]] = { +ENTITY_TYPES: dict[str, type[HomeKitSwitch | HomeKitFaucet | HomeKitValve]] = { ServicesTypes.SWITCH: HomeKitSwitch, ServicesTypes.OUTLET: HomeKitSwitch, + ServicesTypes.FAUCET: HomeKitFaucet, ServicesTypes.VALVE: HomeKitValve, } @@ -213,7 +235,7 @@ async def async_setup_entry( if not (entity_class := ENTITY_TYPES.get(service.type)): return False info = {"aid": service.accessory.aid, "iid": service.iid} - entity: HomeKitSwitch | HomeKitValve = entity_class(conn, info) + entity: HomeKitSwitch | HomeKitFaucet | HomeKitValve = entity_class(conn, info) conn.async_migrate_unique_id( entity.old_unique_id, entity.unique_id, Platform.SWITCH ) diff --git a/homeassistant/components/homematic/climate.py b/homeassistant/components/homematic/climate.py index 2be28487cbb..6e16e16ba99 100644 --- a/homeassistant/components/homematic/climate.py +++ b/homeassistant/components/homematic/climate.py @@ -63,7 +63,6 @@ class HMThermostat(HMDevice, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/homematic/icons.json b/homeassistant/components/homematic/icons.json index 998c9a385ba..9e58bbe3a90 100644 --- a/homeassistant/components/homematic/icons.json +++ b/homeassistant/components/homematic/icons.json @@ -1,10 +1,22 @@ { "services": { - "virtualkey": "mdi:keyboard", - "set_variable_value": "mdi:console", - "set_device_value": "mdi:television", - "reconnect": "mdi:wifi-refresh", - "set_install_mode": "mdi:cog", - "put_paramset": "mdi:cog" + "virtualkey": { + "service": "mdi:keyboard" + }, + "set_variable_value": { + "service": "mdi:console" + }, + "set_device_value": { + "service": "mdi:television" + }, + "reconnect": { + "service": "mdi:wifi-refresh" + }, + "set_install_mode": { + "service": "mdi:cog" + }, + "put_paramset": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/homematic/light.py b/homeassistant/components/homematic/light.py index b05cc6a46d6..838cdc9c3c3 100644 --- a/homeassistant/components/homematic/light.py +++ b/homeassistant/components/homematic/light.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -17,10 +17,14 @@ from homeassistant.components.light import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import color as color_util from .const import ATTR_DISCOVER_DEVICES from .entity import HMDevice +MAX_MIREDS = 500 # 2000 K +MIN_MIREDS = 153 # 6500 K + def setup_platform( hass: HomeAssistant, @@ -43,6 +47,9 @@ def setup_platform( class HMLight(HMDevice, LightEntity): """Representation of a Homematic light.""" + _attr_min_color_temp_kelvin = 2000 # 500 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds + @property def brightness(self): """Return the brightness of this light between 0..255.""" @@ -99,12 +106,14 @@ class HMLight(HMDevice, LightEntity): return hue * 360.0, sat * 100.0 @property - def color_temp(self): - """Return the color temp in mireds [int].""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if ColorMode.COLOR_TEMP not in self.supported_color_modes: return None hm_color_temp = self._hmdevice.get_color_temp(self._channel) - return self.max_mireds - (self.max_mireds - self.min_mireds) * hm_color_temp + return color_util.color_temperature_mired_to_kelvin( + MAX_MIREDS - (MAX_MIREDS - MIN_MIREDS) * hm_color_temp + ) @property def effect_list(self): @@ -130,7 +139,7 @@ class HMLight(HMDevice, LightEntity): self._hmdevice.set_level(percent_bright, self._channel) elif ( ATTR_HS_COLOR not in kwargs - and ATTR_COLOR_TEMP not in kwargs + and ATTR_COLOR_TEMP_KELVIN not in kwargs and ATTR_EFFECT not in kwargs ): self._hmdevice.on(self._channel) @@ -141,10 +150,11 @@ class HMLight(HMDevice, LightEntity): saturation=kwargs[ATTR_HS_COLOR][1] / 100.0, channel=self._channel, ) - if ATTR_COLOR_TEMP in kwargs: - hm_temp = (self.max_mireds - kwargs[ATTR_COLOR_TEMP]) / ( - self.max_mireds - self.min_mireds + if ATTR_COLOR_TEMP_KELVIN in kwargs: + mireds = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] ) + hm_temp = (MAX_MIREDS - mireds) / (MAX_MIREDS - MIN_MIREDS) self._hmdevice.set_color_temp(hm_temp) if ATTR_EFFECT in kwargs: self._hmdevice.set_effect(kwargs[ATTR_EFFECT]) diff --git a/homeassistant/components/homematic/manifest.json b/homeassistant/components/homematic/manifest.json index 9c67a5da0b2..749bd7b44e8 100644 --- a/homeassistant/components/homematic/manifest.json +++ b/homeassistant/components/homematic/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/homematic", "iot_class": "local_push", "loggers": ["pyhomematic"], + "quality_scale": "legacy", "requirements": ["pyhomematic==0.1.77"] } diff --git a/homeassistant/components/homematicip_cloud/__init__.py b/homeassistant/components/homematicip_cloud/__init__.py index 08002bc551a..c59a9d788b3 100644 --- a/homeassistant/components/homematicip_cloud/__init__.py +++ b/homeassistant/components/homematicip_cloud/__init__.py @@ -21,8 +21,7 @@ from .const import ( HMIPC_HAPID, HMIPC_NAME, ) -from .generic_entity import HomematicipGenericEntity # noqa: F401 -from .hap import HomematicipAuth, HomematicipHAP # noqa: F401 +from .hap import HomematicipHAP from .services import async_setup_services, async_unload_services CONFIG_SCHEMA = vol.Schema( diff --git a/homeassistant/components/homematicip_cloud/alarm_control_panel.py b/homeassistant/components/homematicip_cloud/alarm_control_panel.py index 1f294a8cade..4241316c2a4 100644 --- a/homeassistant/components/homematicip_cloud/alarm_control_panel.py +++ b/homeassistant/components/homematicip_cloud/alarm_control_panel.py @@ -9,19 +9,14 @@ from homematicip.functionalHomes import SecurityAndAlarmHome from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN +from .const import DOMAIN from .hap import AsyncHome, HomematicipHAP _LOGGER = logging.getLogger(__name__) @@ -35,7 +30,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP alrm control panel from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] async_add_entities([HomematicipAlarmControlPanelEntity(hap)]) @@ -52,35 +47,34 @@ class HomematicipAlarmControlPanelEntity(AlarmControlPanelEntity): def __init__(self, hap: HomematicipHAP) -> None: """Initialize the alarm control panel.""" self._home: AsyncHome = hap.home - _LOGGER.info("Setting up %s", self.name) @property def device_info(self) -> DeviceInfo: """Return device specific attributes.""" return DeviceInfo( - identifiers={(HMIPC_DOMAIN, f"ACP {self._home.id}")}, + identifiers={(DOMAIN, f"ACP {self._home.id}")}, manufacturer="eQ-3", model=CONST_ALARM_CONTROL_PANEL_NAME, name=self.name, - via_device=(HMIPC_DOMAIN, self._home.id), + via_device=(DOMAIN, self._home.id), ) @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the alarm control panel.""" # check for triggered alarm if self._security_and_alarm.alarmActive: - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED activation_state = self._home.get_security_zones_activation() # check arm_away if activation_state == (True, True): - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY # check arm_home if activation_state == (False, True): - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_HOME - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED @property def _security_and_alarm(self) -> SecurityAndAlarmHome: diff --git a/homeassistant/components/homematicip_cloud/binary_sensor.py b/homeassistant/components/homematicip_cloud/binary_sensor.py index 29d8576f060..38590e4505b 100644 --- a/homeassistant/components/homematicip_cloud/binary_sensor.py +++ b/homeassistant/components/homematicip_cloud/binary_sensor.py @@ -39,7 +39,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP ATTR_ACCELERATION_SENSOR_MODE = "acceleration_sensor_mode" @@ -78,7 +79,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP Cloud binary sensor from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] entities: list[HomematicipGenericEntity] = [HomematicipCloudConnectionSensor(hap)] for device in hap.home.devices: if isinstance(device, AsyncAccelerationSensor): @@ -168,7 +169,7 @@ class HomematicipCloudConnectionSensor(HomematicipGenericEntity, BinarySensorEnt return DeviceInfo( identifiers={ # Serial numbers of Homematic IP device - (HMIPC_DOMAIN, self._home.id) + (DOMAIN, self._home.id) } ) diff --git a/homeassistant/components/homematicip_cloud/button.py b/homeassistant/components/homematicip_cloud/button.py index c2707f68a89..244be47d7f6 100644 --- a/homeassistant/components/homematicip_cloud/button.py +++ b/homeassistant/components/homematicip_cloud/button.py @@ -9,7 +9,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP @@ -19,7 +20,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP button from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] async_add_entities( HomematicipGarageDoorControllerButton(hap, device) diff --git a/homeassistant/components/homematicip_cloud/climate.py b/homeassistant/components/homematicip_cloud/climate.py index dd89efed1c9..e7132fac83c 100644 --- a/homeassistant/components/homematicip_cloud/climate.py +++ b/homeassistant/components/homematicip_cloud/climate.py @@ -31,7 +31,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP HEATING_PROFILES = {"PROFILE_1": 0, "PROFILE_2": 1, "PROFILE_3": 2} @@ -59,7 +60,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP climate from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] async_add_entities( HomematicipHeatingGroup(hap, device) @@ -80,7 +81,6 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hap: HomematicipHAP, device: AsyncHeatingGroup) -> None: """Initialize heating group.""" @@ -94,11 +94,11 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): def device_info(self) -> DeviceInfo: """Return device specific attributes.""" return DeviceInfo( - identifiers={(HMIPC_DOMAIN, self._device.id)}, + identifiers={(DOMAIN, self._device.id)}, manufacturer="eQ-3", model=self._device.modelType, name=self._device.label, - via_device=(HMIPC_DOMAIN, self._device.homeId), + via_device=(DOMAIN, self._device.homeId), ) @property diff --git a/homeassistant/components/homematicip_cloud/config_flow.py b/homeassistant/components/homematicip_cloud/config_flow.py index c2277e16c79..9a9e1cb6778 100644 --- a/homeassistant/components/homematicip_cloud/config_flow.py +++ b/homeassistant/components/homematicip_cloud/config_flow.py @@ -43,10 +43,10 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN): self.auth = HomematicipAuth(self.hass, user_input) connected = await self.auth.async_setup() if connected: - _LOGGER.info("Connection to HomematicIP Cloud established") + _LOGGER.debug("Connection to HomematicIP Cloud established") return await self.async_step_link() - _LOGGER.info("Connection to HomematicIP Cloud failed") + _LOGGER.debug("Connection to HomematicIP Cloud failed") errors["base"] = "invalid_sgtin_or_pin" return self.async_show_form( @@ -69,7 +69,7 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN): if pressed: authtoken = await self.auth.async_register() if authtoken: - _LOGGER.info("Write config entry for HomematicIP Cloud") + _LOGGER.debug("Write config entry for HomematicIP Cloud") return self.async_create_entry( title=self.auth.config[HMIPC_HAPID], data={ @@ -83,16 +83,16 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="link", errors=errors) - async def async_step_import(self, import_info: dict[str, str]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, str]) -> ConfigFlowResult: """Import a new access point as a config entry.""" - hapid = import_info[HMIPC_HAPID].replace("-", "").upper() - authtoken = import_info[HMIPC_AUTHTOKEN] - name = import_info[HMIPC_NAME] + hapid = import_data[HMIPC_HAPID].replace("-", "").upper() + authtoken = import_data[HMIPC_AUTHTOKEN] + name = import_data[HMIPC_NAME] await self.async_set_unique_id(hapid) self._abort_if_unique_id_configured() - _LOGGER.info("Imported authentication for %s", hapid) + _LOGGER.debug("Imported authentication for %s", hapid) return self.async_create_entry( title=hapid, data={HMIPC_AUTHTOKEN: authtoken, HMIPC_HAPID: hapid, HMIPC_NAME: name}, diff --git a/homeassistant/components/homematicip_cloud/cover.py b/homeassistant/components/homematicip_cloud/cover.py index b0cff8b6a10..1db536afd4f 100644 --- a/homeassistant/components/homematicip_cloud/cover.py +++ b/homeassistant/components/homematicip_cloud/cover.py @@ -25,7 +25,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP HMIP_COVER_OPEN = 0 @@ -40,7 +41,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP cover from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] entities: list[HomematicipGenericEntity] = [ HomematicipCoverShutterGroup(hap, group) for group in hap.home.groups diff --git a/homeassistant/components/homematicip_cloud/generic_entity.py b/homeassistant/components/homematicip_cloud/entity.py similarity index 97% rename from homeassistant/components/homematicip_cloud/generic_entity.py rename to homeassistant/components/homematicip_cloud/entity.py index 163f3eec75e..82d682b9910 100644 --- a/homeassistant/components/homematicip_cloud/generic_entity.py +++ b/homeassistant/components/homematicip_cloud/entity.py @@ -15,7 +15,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity -from .const import DOMAIN as HMIPC_DOMAIN +from .const import DOMAIN from .hap import AsyncHome, HomematicipHAP _LOGGER = logging.getLogger(__name__) @@ -95,7 +95,6 @@ class HomematicipGenericEntity(Entity): self.functional_channel = self.get_current_channel() # Marker showing that the HmIP device hase been removed. self.hmip_device_removed = False - _LOGGER.info("Setting up %s (%s)", self.name, self._device.modelType) @property def device_info(self) -> DeviceInfo | None: @@ -105,14 +104,14 @@ class HomematicipGenericEntity(Entity): return DeviceInfo( identifiers={ # Serial numbers of Homematic IP device - (HMIPC_DOMAIN, self._device.id) + (DOMAIN, self._device.id) }, manufacturer=self._device.oem, model=self._device.modelType, name=self._device.label, sw_version=self._device.firmwareVersion, # Link to the homematic ip access point. - via_device=(HMIPC_DOMAIN, self._device.homeId), + via_device=(DOMAIN, self._device.homeId), ) return None diff --git a/homeassistant/components/homematicip_cloud/hap.py b/homeassistant/components/homematicip_cloud/hap.py index 2384426dc82..db7fcb348c8 100644 --- a/homeassistant/components/homematicip_cloud/hap.py +++ b/homeassistant/components/homematicip_cloud/hap.py @@ -104,7 +104,7 @@ class HomematicipHAP: _LOGGER.error("Error connecting with HomematicIP Cloud: %s", err) return False - _LOGGER.info( + _LOGGER.debug( "Connected to HomematicIP with HAP %s", self.config_entry.unique_id ) @@ -220,7 +220,7 @@ class HomematicipHAP: if self._retry_task is not None: self._retry_task.cancel() await self.home.disable_events() - _LOGGER.info("Closed connection to HomematicIP cloud server") + _LOGGER.debug("Closed connection to HomematicIP cloud server") await self.hass.config_entries.async_unload_platforms( self.config_entry, PLATFORMS ) diff --git a/homeassistant/components/homematicip_cloud/helpers.py b/homeassistant/components/homematicip_cloud/helpers.py index 5b7f98ad884..9959b993a6c 100644 --- a/homeassistant/components/homematicip_cloud/helpers.py +++ b/homeassistant/components/homematicip_cloud/helpers.py @@ -13,7 +13,7 @@ from homematicip.device import Device from homeassistant.exceptions import HomeAssistantError -from . import HomematicipGenericEntity +from .entity import HomematicipGenericEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/homematicip_cloud/icons.json b/homeassistant/components/homematicip_cloud/icons.json index 73c60ea8cdd..53a39d8213c 100644 --- a/homeassistant/components/homematicip_cloud/icons.json +++ b/homeassistant/components/homematicip_cloud/icons.json @@ -1,13 +1,31 @@ { "services": { - "activate_eco_mode_with_duration": "mdi:leaf", - "activate_eco_mode_with_period": "mdi:leaf", - "activate_vacation": "mdi:compass", - "deactivate_eco_mode": "mdi:leaf-off", - "deactivate_vacation": "mdi:compass-off", - "set_active_climate_profile": "mdi:home-thermometer", - "dump_hap_config": "mdi:database-export", - "reset_energy_counter": "mdi:reload", - "set_home_cooling_mode": "mdi:snowflake" + "activate_eco_mode_with_duration": { + "service": "mdi:leaf" + }, + "activate_eco_mode_with_period": { + "service": "mdi:leaf" + }, + "activate_vacation": { + "service": "mdi:compass" + }, + "deactivate_eco_mode": { + "service": "mdi:leaf-off" + }, + "deactivate_vacation": { + "service": "mdi:compass-off" + }, + "set_active_climate_profile": { + "service": "mdi:home-thermometer" + }, + "dump_hap_config": { + "service": "mdi:database-export" + }, + "reset_energy_counter": { + "service": "mdi:reload" + }, + "set_home_cooling_mode": { + "service": "mdi:snowflake" + } } } diff --git a/homeassistant/components/homematicip_cloud/light.py b/homeassistant/components/homematicip_cloud/light.py index 17daafc5896..cf051103a10 100644 --- a/homeassistant/components/homematicip_cloud/light.py +++ b/homeassistant/components/homematicip_cloud/light.py @@ -14,12 +14,14 @@ from homematicip.aio.device import ( AsyncPluggableDimmer, AsyncWiredDimmer3, ) -from homematicip.base.enums import RGBColorState +from homematicip.base.enums import OpticalSignalBehaviour, RGBColorState from homematicip.base.functionalChannels import NotificationLightChannel +from packaging.version import Version from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_NAME, + ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -30,7 +32,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP @@ -40,21 +43,28 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP Cloud lights from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] entities: list[HomematicipGenericEntity] = [] for device in hap.home.devices: if isinstance(device, AsyncBrandSwitchMeasuring): entities.append(HomematicipLightMeasuring(hap, device)) elif isinstance(device, AsyncBrandSwitchNotificationLight): + device_version = Version(device.firmwareVersion) entities.append(HomematicipLight(hap, device)) + + entity_class = ( + HomematicipNotificationLightV2 + if device_version > Version("2.0.0") + else HomematicipNotificationLight + ) + entities.append( - HomematicipNotificationLight(hap, device, device.topLightChannelIndex) + entity_class(hap, device, device.topLightChannelIndex, "Top") ) entities.append( - HomematicipNotificationLight( - hap, device, device.bottomLightChannelIndex - ) + entity_class(hap, device, device.bottomLightChannelIndex, "Bottom") ) + elif isinstance(device, (AsyncWiredDimmer3, AsyncDinRailDimmer3)): entities.extend( HomematicipMultiDimmer(hap, device, channel=channel) @@ -157,16 +167,9 @@ class HomematicipNotificationLight(HomematicipGenericEntity, LightEntity): _attr_supported_color_modes = {ColorMode.HS} _attr_supported_features = LightEntityFeature.TRANSITION - def __init__(self, hap: HomematicipHAP, device, channel: int) -> None: + def __init__(self, hap: HomematicipHAP, device, channel: int, post: str) -> None: """Initialize the notification light entity.""" - if channel == 2: - super().__init__( - hap, device, post="Top", channel=channel, is_multi_channel=True - ) - else: - super().__init__( - hap, device, post="Bottom", channel=channel, is_multi_channel=True - ) + super().__init__(hap, device, post=post, channel=channel, is_multi_channel=True) self._color_switcher: dict[str, tuple[float, float]] = { RGBColorState.WHITE: (0.0, 0.0), @@ -258,6 +261,66 @@ class HomematicipNotificationLight(HomematicipGenericEntity, LightEntity): ) +class HomematicipNotificationLightV2(HomematicipNotificationLight, LightEntity): + """Representation of HomematicIP Cloud notification light.""" + + _effect_list = [ + OpticalSignalBehaviour.BILLOW_MIDDLE, + OpticalSignalBehaviour.BLINKING_MIDDLE, + OpticalSignalBehaviour.FLASH_MIDDLE, + OpticalSignalBehaviour.OFF, + OpticalSignalBehaviour.ON, + ] + + def __init__(self, hap: HomematicipHAP, device, channel: int, post: str) -> None: + """Initialize the notification light entity.""" + super().__init__(hap, device, post=post, channel=channel) + self._attr_supported_features |= LightEntityFeature.EFFECT + + @property + def effect_list(self) -> list[str] | None: + """Return the list of supported effects.""" + return self._effect_list + + @property + def effect(self) -> str | None: + """Return the current effect.""" + return self._func_channel.opticalSignalBehaviour + + @property + def is_on(self) -> bool: + """Return true if light is on.""" + return self._func_channel.on + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the light on.""" + # Use hs_color from kwargs, + # if not applicable use current hs_color. + hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color) + simple_rgb_color = _convert_color(hs_color) + + # If no kwargs, use default value. + brightness = 255 + if ATTR_BRIGHTNESS in kwargs: + brightness = kwargs[ATTR_BRIGHTNESS] + + # Minimum brightness is 10, otherwise the led is disabled + brightness = max(10, brightness) + dim_level = round(brightness / 255.0, 2) + + effect = self.effect + if ATTR_EFFECT in kwargs: + effect = kwargs[ATTR_EFFECT] + + await self._func_channel.async_set_optical_signal( + opticalSignalBehaviour=effect, rgb=simple_rgb_color, dimLevel=dim_level + ) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._func_channel.async_turn_off() + + def _convert_color(color: tuple) -> RGBColorState: """Convert the given color to the reduced RGBColorState color. diff --git a/homeassistant/components/homematicip_cloud/lock.py b/homeassistant/components/homematicip_cloud/lock.py index cf98828598f..b00f42fc844 100644 --- a/homeassistant/components/homematicip_cloud/lock.py +++ b/homeassistant/components/homematicip_cloud/lock.py @@ -13,7 +13,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .helpers import handle_errors _LOGGER = logging.getLogger(__name__) @@ -39,7 +40,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP locks from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] async_add_entities( HomematicipDoorLockDrive(hap, device) diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index b3e7eb9a72a..a44d0586952 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", "iot_class": "cloud_push", "loggers": ["homematicip"], - "quality_scale": "silver", - "requirements": ["homematicip==1.1.2"] + "requirements": ["homematicip==1.1.5"] } diff --git a/homeassistant/components/homematicip_cloud/sensor.py b/homeassistant/components/homematicip_cloud/sensor.py index 1f76c6cce1f..c44d280c190 100644 --- a/homeassistant/components/homematicip_cloud/sensor.py +++ b/homeassistant/components/homematicip_cloud/sensor.py @@ -8,6 +8,9 @@ from typing import Any from homematicip.aio.device import ( AsyncBrandSwitchMeasuring, AsyncEnergySensorsInterface, + AsyncFloorTerminalBlock6, + AsyncFloorTerminalBlock10, + AsyncFloorTerminalBlock12, AsyncFullFlushSwitchMeasuring, AsyncHeatingThermostat, AsyncHeatingThermostatCompact, @@ -28,9 +31,13 @@ from homematicip.aio.device import ( AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro, + AsyncWiredFloorTerminalBlock12, ) from homematicip.base.enums import FunctionalChannelType, ValveState -from homematicip.base.functionalChannels import FunctionalChannel +from homematicip.base.functionalChannels import ( + FloorTerminalBlockMechanicChannel, + FunctionalChannel, +) from homeassistant.components.sensor import ( SensorDeviceClass, @@ -53,7 +60,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP from .helpers import get_channels_from_device @@ -85,13 +93,13 @@ ILLUMINATION_DEVICE_ATTRIBUTES = { } -async def async_setup_entry( +async def async_setup_entry( # noqa: C901 hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP Cloud sensors from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] entities: list[HomematicipGenericEntity] = [] for device in hap.home.devices: if isinstance(device, AsyncHomeControlAccessPoint): @@ -183,10 +191,74 @@ async def async_setup_entry( if ch.currentPowerConsumption is not None: entities.append(HmipEsiLedCurrentPowerConsumption(hap, device)) entities.append(HmipEsiLedEnergyCounterHighTariff(hap, device)) + if isinstance( + device, + ( + AsyncFloorTerminalBlock6, + AsyncFloorTerminalBlock10, + AsyncFloorTerminalBlock12, + AsyncWiredFloorTerminalBlock12, + ), + ): + entities.extend( + HomematicipFloorTerminalBlockMechanicChannelValve( + hap, device, channel=channel.index + ) + for channel in device.functionalChannels + if isinstance(channel, FloorTerminalBlockMechanicChannel) + and getattr(channel, "valvePosition", None) is not None + ) async_add_entities(entities) +class HomematicipFloorTerminalBlockMechanicChannelValve( + HomematicipGenericEntity, SensorEntity +): + """Representation of the HomematicIP floor terminal block.""" + + _attr_native_unit_of_measurement = PERCENTAGE + _attr_state_class = SensorStateClass.MEASUREMENT + + def __init__( + self, hap: HomematicipHAP, device, channel, is_multi_channel=True + ) -> None: + """Initialize floor terminal block 12 device.""" + super().__init__( + hap, + device, + channel=channel, + is_multi_channel=is_multi_channel, + post="Valve Position", + ) + + @property + def icon(self) -> str | None: + """Return the icon.""" + if super().icon: + return super().icon + channel = next( + channel + for channel in self._device.functionalChannels + if channel.index == self._channel + ) + if channel.valveState != ValveState.ADAPTION_DONE: + return "mdi:alert" + return "mdi:heating-coil" + + @property + def native_value(self) -> int | None: + """Return the state of the floor terminal block mechanical channel valve position.""" + channel = next( + channel + for channel in self._device.functionalChannels + if channel.index == self._channel + ) + if channel.valveState != ValveState.ADAPTION_DONE: + return None + return round(channel.valvePosition * 100) + + class HomematicipAccesspointDutyCycle(HomematicipGenericEntity, SensorEntity): """Representation of then HomeMaticIP access point.""" @@ -348,6 +420,7 @@ class HomematicipWindspeedSensor(HomematicipGenericEntity, SensorEntity): _attr_device_class = SensorDeviceClass.WIND_SPEED _attr_native_unit_of_measurement = UnitOfSpeed.KILOMETERS_PER_HOUR + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize the windspeed sensor.""" @@ -379,6 +452,7 @@ class HomematicipTodayRainSensor(HomematicipGenericEntity, SensorEntity): _attr_device_class = SensorDeviceClass.PRECIPITATION _attr_native_unit_of_measurement = UnitOfPrecipitationDepth.MILLIMETERS + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize the device.""" diff --git a/homeassistant/components/homematicip_cloud/services.py b/homeassistant/components/homematicip_cloud/services.py index 4c04e4a858b..69765ccc601 100644 --- a/homeassistant/components/homematicip_cloud/services.py +++ b/homeassistant/components/homematicip_cloud/services.py @@ -21,7 +21,7 @@ from homeassistant.helpers.service import ( verify_domain_control, ) -from .const import DOMAIN as HMIPC_DOMAIN +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -122,10 +122,10 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema( async def async_setup_services(hass: HomeAssistant) -> None: """Set up the HomematicIP Cloud services.""" - if hass.services.async_services_for_domain(HMIPC_DOMAIN): + if hass.services.async_services_for_domain(DOMAIN): return - @verify_domain_control(hass, HMIPC_DOMAIN) + @verify_domain_control(hass, DOMAIN) async def async_call_hmipc_service(service: ServiceCall) -> None: """Call correct HomematicIP Cloud service.""" service_name = service.service @@ -150,42 +150,42 @@ async def async_setup_services(hass: HomeAssistant) -> None: await _async_set_home_cooling_mode(hass, service) hass.services.async_register( - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_ACTIVATE_ECO_MODE_WITH_DURATION, service_func=async_call_hmipc_service, schema=SCHEMA_ACTIVATE_ECO_MODE_WITH_DURATION, ) hass.services.async_register( - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_ACTIVATE_ECO_MODE_WITH_PERIOD, service_func=async_call_hmipc_service, schema=SCHEMA_ACTIVATE_ECO_MODE_WITH_PERIOD, ) hass.services.async_register( - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_ACTIVATE_VACATION, service_func=async_call_hmipc_service, schema=SCHEMA_ACTIVATE_VACATION, ) hass.services.async_register( - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_DEACTIVATE_ECO_MODE, service_func=async_call_hmipc_service, schema=SCHEMA_DEACTIVATE_ECO_MODE, ) hass.services.async_register( - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_DEACTIVATE_VACATION, service_func=async_call_hmipc_service, schema=SCHEMA_DEACTIVATE_VACATION, ) hass.services.async_register( - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_SET_ACTIVE_CLIMATE_PROFILE, service_func=async_call_hmipc_service, schema=SCHEMA_SET_ACTIVE_CLIMATE_PROFILE, @@ -193,7 +193,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: async_register_admin_service( hass=hass, - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_DUMP_HAP_CONFIG, service_func=async_call_hmipc_service, schema=SCHEMA_DUMP_HAP_CONFIG, @@ -201,7 +201,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: async_register_admin_service( hass=hass, - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_RESET_ENERGY_COUNTER, service_func=async_call_hmipc_service, schema=SCHEMA_RESET_ENERGY_COUNTER, @@ -209,7 +209,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: async_register_admin_service( hass=hass, - domain=HMIPC_DOMAIN, + domain=DOMAIN, service=SERVICE_SET_HOME_COOLING_MODE, service_func=async_call_hmipc_service, schema=SCHEMA_SET_HOME_COOLING_MODE, @@ -218,11 +218,11 @@ async def async_setup_services(hass: HomeAssistant) -> None: async def async_unload_services(hass: HomeAssistant): """Unload HomematicIP Cloud services.""" - if hass.data[HMIPC_DOMAIN]: + if hass.data[DOMAIN]: return for hmipc_service in HMIPC_SERVICES: - hass.services.async_remove(domain=HMIPC_DOMAIN, service=hmipc_service) + hass.services.async_remove(domain=DOMAIN, service=hmipc_service) async def _async_activate_eco_mode_with_duration( @@ -235,7 +235,7 @@ async def _async_activate_eco_mode_with_duration( if home := _get_home(hass, hapid): await home.activate_absence_with_duration(duration) else: - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): await hap.home.activate_absence_with_duration(duration) @@ -249,7 +249,7 @@ async def _async_activate_eco_mode_with_period( if home := _get_home(hass, hapid): await home.activate_absence_with_period(endtime) else: - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): await hap.home.activate_absence_with_period(endtime) @@ -262,7 +262,7 @@ async def _async_activate_vacation(hass: HomeAssistant, service: ServiceCall) -> if home := _get_home(hass, hapid): await home.activate_vacation(endtime, temperature) else: - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): await hap.home.activate_vacation(endtime, temperature) @@ -272,7 +272,7 @@ async def _async_deactivate_eco_mode(hass: HomeAssistant, service: ServiceCall) if home := _get_home(hass, hapid): await home.deactivate_absence() else: - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): await hap.home.deactivate_absence() @@ -282,7 +282,7 @@ async def _async_deactivate_vacation(hass: HomeAssistant, service: ServiceCall) if home := _get_home(hass, hapid): await home.deactivate_vacation() else: - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): await hap.home.deactivate_vacation() @@ -293,7 +293,7 @@ async def _set_active_climate_profile( entity_id_list = service.data[ATTR_ENTITY_ID] climate_profile_index = service.data[ATTR_CLIMATE_PROFILE_INDEX] - 1 - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): if entity_id_list != "all": for entity_id in entity_id_list: group = hap.hmip_device_by_entity_id.get(entity_id) @@ -313,7 +313,7 @@ async def _async_dump_hap_config(hass: HomeAssistant, service: ServiceCall) -> N config_file_prefix = service.data[ATTR_CONFIG_OUTPUT_FILE_PREFIX] anonymize = service.data[ATTR_ANONYMIZE] - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): hap_sgtin = hap.config_entry.unique_id if anonymize: @@ -333,7 +333,7 @@ async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall) """Service to reset the energy counter.""" entity_id_list = service.data[ATTR_ENTITY_ID] - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): if entity_id_list != "all": for entity_id in entity_id_list: device = hap.hmip_device_by_entity_id.get(entity_id) @@ -353,17 +353,17 @@ async def _async_set_home_cooling_mode(hass: HomeAssistant, service: ServiceCall if home := _get_home(hass, hapid): await home.set_cooling(cooling) else: - for hap in hass.data[HMIPC_DOMAIN].values(): + for hap in hass.data[DOMAIN].values(): await hap.home.set_cooling(cooling) def _get_home(hass: HomeAssistant, hapid: str) -> AsyncHome | None: """Return a HmIP home.""" - if hap := hass.data[HMIPC_DOMAIN].get(hapid): + if hap := hass.data[DOMAIN].get(hapid): return hap.home raise ServiceValidationError( - translation_domain=HMIPC_DOMAIN, + translation_domain=DOMAIN, translation_key="access_point_not_found", translation_placeholders={"id": hapid}, ) diff --git a/homeassistant/components/homematicip_cloud/strings.json b/homeassistant/components/homematicip_cloud/strings.json index a7c795c81f6..ac7b184e513 100644 --- a/homeassistant/components/homematicip_cloud/strings.json +++ b/homeassistant/components/homematicip_cloud/strings.json @@ -11,7 +11,7 @@ }, "link": { "title": "Link Access point", - "description": "Press the blue button on the access point and the submit button to register HomematicIP with Home Assistant.\n\n![Location of button on bridge](/static/images/config_flows/config_homematicip_cloud.png)" + "description": "Press the blue button on the access point and the **Submit** button to register HomematicIP with Home Assistant.\n\n![Location of button on bridge](/static/images/config_flows/config_homematicip_cloud.png)" } }, "error": { diff --git a/homeassistant/components/homematicip_cloud/switch.py b/homeassistant/components/homematicip_cloud/switch.py index 9aa60d45d93..70bf14631cb 100644 --- a/homeassistant/components/homematicip_cloud/switch.py +++ b/homeassistant/components/homematicip_cloud/switch.py @@ -27,8 +27,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity -from .generic_entity import ATTR_GROUP_MEMBER_UNREACHABLE +from .const import DOMAIN +from .entity import ATTR_GROUP_MEMBER_UNREACHABLE, HomematicipGenericEntity from .hap import HomematicipHAP @@ -38,7 +38,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP switch from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] entities: list[HomematicipGenericEntity] = [ HomematicipGroupSwitch(hap, group) for group in hap.home.groups diff --git a/homeassistant/components/homematicip_cloud/weather.py b/homeassistant/components/homematicip_cloud/weather.py index 34e3f58d6ef..cbe7c2845b8 100644 --- a/homeassistant/components/homematicip_cloud/weather.py +++ b/homeassistant/components/homematicip_cloud/weather.py @@ -27,7 +27,8 @@ from homeassistant.const import UnitOfSpeed, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity +from .const import DOMAIN +from .entity import HomematicipGenericEntity from .hap import HomematicipHAP HOME_WEATHER_CONDITION = { @@ -55,7 +56,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the HomematicIP weather sensor from a config entry.""" - hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] + hap = hass.data[DOMAIN][config_entry.unique_id] entities: list[HomematicipGenericEntity] = [] for device in hap.home.devices: if isinstance(device, AsyncWeatherSensorPro): diff --git a/homeassistant/components/homewizard/button.py b/homeassistant/components/homewizard/button.py index a9cc19d72a7..7b05cb95271 100644 --- a/homeassistant/components/homewizard/button.py +++ b/homeassistant/components/homewizard/button.py @@ -10,6 +10,8 @@ from .coordinator import HWEnergyDeviceUpdateCoordinator from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/homewizard/config_flow.py b/homeassistant/components/homewizard/config_flow.py index 06dbb9c8333..a6e4356328e 100644 --- a/homeassistant/components/homewizard/config_flow.py +++ b/homeassistant/components/homewizard/config_flow.py @@ -6,16 +6,18 @@ from collections.abc import Mapping import logging from typing import Any, NamedTuple -from homewizard_energy import HomeWizardEnergy +from homewizard_energy import HomeWizardEnergyV1 from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError -from homewizard_energy.models import Device -from voluptuous import Required, Schema +from homewizard_energy.v1.models import Device +import voluptuous as vol from homeassistant.components import onboarding, zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.components.dhcp import DhcpServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PATH from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.selector import TextSelector from .const import ( CONF_API_ENABLED, @@ -43,7 +45,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 discovery: DiscoveryData - entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -69,11 +70,11 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): user_input = user_input or {} return self.async_show_form( step_id="user", - data_schema=Schema( + data_schema=vol.Schema( { - Required( + vol.Required( CONF_IP_ADDRESS, default=user_input.get(CONF_IP_ADDRESS) - ): str, + ): TextSelector(), } ), errors=errors, @@ -111,6 +112,32 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle dhcp discovery to update existing entries. + + This flow is triggered only by DHCP discovery of known devices. + """ + try: + device = await self._async_try_connect(discovery_info.ip) + except RecoverableError as ex: + _LOGGER.error(ex) + return self.async_abort(reason="unknown") + + await self.async_set_unique_id( + f"{device.product_type}_{discovery_info.macaddress}" + ) + + self._abort_if_unique_id_configured( + updates={CONF_IP_ADDRESS: discovery_info.ip} + ) + + # This situation should never happen, as Home Assistant will only + # send updates for existing entries. In case it does, we'll just + # abort the flow with an unknown error. + return self.async_abort(reason="unknown") + async def async_step_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -151,7 +178,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth if API was disabled.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -160,18 +186,52 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): """Confirm reauth dialog.""" errors: dict[str, str] | None = None if user_input is not None: - assert self.entry is not None + reauth_entry = self._get_reauth_entry() try: - await self._async_try_connect(self.entry.data[CONF_IP_ADDRESS]) + await self._async_try_connect(reauth_entry.data[CONF_IP_ADDRESS]) except RecoverableError as ex: _LOGGER.error(ex) errors = {"base": ex.error_code} else: - await self.hass.config_entries.async_reload(self.entry.entry_id) + await self.hass.config_entries.async_reload(reauth_entry.entry_id) return self.async_abort(reason="reauth_successful") + return self.async_show_form(step_id="reauth_confirm", errors=errors) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + errors: dict[str, str] = {} + if user_input: + try: + device_info = await self._async_try_connect(user_input[CONF_IP_ADDRESS]) + except RecoverableError as ex: + _LOGGER.error(ex) + errors = {"base": ex.error_code} + else: + await self.async_set_unique_id( + f"{device_info.product_type}_{device_info.serial}" + ) + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates=user_input, + ) + reconfigure_entry = self._get_reconfigure_entry() return self.async_show_form( - step_id="reauth_confirm", + step_id="reconfigure", + data_schema=vol.Schema( + { + vol.Required( + CONF_IP_ADDRESS, + default=reconfigure_entry.data.get(CONF_IP_ADDRESS), + ): TextSelector(), + } + ), + description_placeholders={ + "title": reconfigure_entry.title, + }, errors=errors, ) @@ -182,7 +242,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): Make connection with device to test the connection and to get info for unique_id. """ - energy_api = HomeWizardEnergy(ip_address) + energy_api = HomeWizardEnergyV1(ip_address) try: return await energy_api.device() diff --git a/homeassistant/components/homewizard/const.py b/homeassistant/components/homewizard/const.py index 8cee8350268..809ecc1416b 100644 --- a/homeassistant/components/homewizard/const.py +++ b/homeassistant/components/homewizard/const.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from datetime import timedelta import logging -from homewizard_energy.models import Data, Device, State, System +from homewizard_energy.v1.models import Data, Device, State, System from homeassistant.const import Platform diff --git a/homeassistant/components/homewizard/coordinator.py b/homeassistant/components/homewizard/coordinator.py index db41d1dd128..8f5045d3b94 100644 --- a/homeassistant/components/homewizard/coordinator.py +++ b/homeassistant/components/homewizard/coordinator.py @@ -4,10 +4,10 @@ from __future__ import annotations import logging -from homewizard_energy import HomeWizardEnergy -from homewizard_energy.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE +from homewizard_energy import HomeWizardEnergyV1 from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError -from homewizard_energy.models import Device +from homewizard_energy.v1.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE +from homewizard_energy.v1.models import Device from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_IP_ADDRESS @@ -23,7 +23,7 @@ _LOGGER = logging.getLogger(__name__) class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry]): """Gather data for the energy device.""" - api: HomeWizardEnergy + api: HomeWizardEnergyV1 api_disabled: bool = False _unsupported_error: bool = False @@ -36,7 +36,7 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] ) -> None: """Initialize update coordinator.""" super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL) - self.api = HomeWizardEnergy( + self.api = HomeWizardEnergyV1( self.config_entry.data[CONF_IP_ADDRESS], clientsession=async_get_clientsession(hass), ) @@ -66,7 +66,9 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] ) except RequestError as ex: - raise UpdateFailed(ex) from ex + raise UpdateFailed( + ex, translation_domain=DOMAIN, translation_key="communication_error" + ) from ex except DisabledError as ex: if not self.api_disabled: @@ -74,11 +76,14 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] # Do not reload when performing first refresh if self.data is not None: - await self.hass.config_entries.async_reload( + # Reload config entry to let init flow handle retrying and trigger repair flow + self.hass.config_entries.async_schedule_reload( self.config_entry.entry_id ) - raise UpdateFailed(ex) from ex + raise UpdateFailed( + ex, translation_domain=DOMAIN, translation_key="api_disabled" + ) from ex self.api_disabled = False diff --git a/homeassistant/components/homewizard/manifest.json b/homeassistant/components/homewizard/manifest.json index 65672903eb8..13bfc512551 100644 --- a/homeassistant/components/homewizard/manifest.json +++ b/homeassistant/components/homewizard/manifest.json @@ -3,10 +3,15 @@ "name": "HomeWizard Energy", "codeowners": ["@DCSBL"], "config_flow": true, + "dhcp": [ + { + "registered_devices": true + } + ], "documentation": "https://www.home-assistant.io/integrations/homewizard", "iot_class": "local_polling", "loggers": ["homewizard_energy"], "quality_scale": "platinum", - "requirements": ["python-homewizard-energy==v6.3.0"], + "requirements": ["python-homewizard-energy==v7.0.0"], "zeroconf": ["_hwenergy._tcp.local."] } diff --git a/homeassistant/components/homewizard/number.py b/homeassistant/components/homewizard/number.py index 1af77859a0f..1ed4c642f6b 100644 --- a/homeassistant/components/homewizard/number.py +++ b/homeassistant/components/homewizard/number.py @@ -13,6 +13,8 @@ from .coordinator import HWEnergyDeviceUpdateCoordinator from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, @@ -62,4 +64,4 @@ class HWEnergyNumberEntity(HomeWizardEntity, NumberEntity): or (brightness := self.coordinator.data.state.brightness) is None ): return None - return brightness_to_value((0, 100), brightness) + return round(brightness_to_value((0, 100), brightness)) diff --git a/homeassistant/components/homewizard/quality_scale.yaml b/homeassistant/components/homewizard/quality_scale.yaml new file mode 100644 index 00000000000..423bc4dea49 --- /dev/null +++ b/homeassistant/components/homewizard/quality_scale.yaml @@ -0,0 +1,81 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + The integration does not provide any additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration does not provide any additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + The integration connects to a single device per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connect to a single device per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index c5cf0bc64c7..8b822bffc50 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -6,7 +6,7 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Final -from homewizard_energy.models import Data, ExternalDevice +from homewizard_energy.v1.models import Data, ExternalDevice from homeassistant.components.sensor import ( DEVICE_CLASS_UNITS, @@ -19,7 +19,6 @@ from homeassistant.const import ( ATTR_VIA_DEVICE, PERCENTAGE, EntityCategory, - Platform, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, @@ -28,9 +27,9 @@ from homeassistant.const import ( UnitOfPower, UnitOfReactivePower, UnitOfVolume, + UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -567,7 +566,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( HomeWizardSensorEntityDescription( key="active_liter_lpm", translation_key="active_liter_lpm", - native_unit_of_measurement="l/min", + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, state_class=SensorStateClass.MEASUREMENT, has_fn=lambda data: data.active_liter_lpm is not None, value_fn=lambda data: data.active_liter_lpm, @@ -625,26 +624,7 @@ async def async_setup_entry( ) -> None: """Initialize sensors.""" - # Migrate original gas meter sensor to ExternalDevice - # This is sensor that was directly linked to the P1 Meter - # Migration can be removed after 2024.8.0 - ent_reg = er.async_get(hass) data = entry.runtime_data.data.data - if ( - entity_id := ent_reg.async_get_entity_id( - Platform.SENSOR, DOMAIN, f"{entry.unique_id}_total_gas_m3" - ) - ) and data.gas_unique_id is not None: - ent_reg.async_update_entity( - entity_id, - new_unique_id=f"{DOMAIN}_gas_meter_{data.gas_unique_id}", - ) - - # Remove old gas_unique_id sensor - if entity_id := ent_reg.async_get_entity_id( - Platform.SENSOR, DOMAIN, f"{entry.unique_id}_gas_unique_id" - ): - ent_reg.async_remove(entity_id) # Initialize default sensors entities: list = [ @@ -657,17 +637,6 @@ async def async_setup_entry( if data.external_devices is not None: for unique_id, device in data.external_devices.items(): if description := EXTERNAL_SENSORS.get(device.meter_type): - # Migrate external devices to new unique_id - # This is to ensure that devices with same id but different type are unique - # Migration can be removed after 2024.11.0 - if entity_id := ent_reg.async_get_entity_id( - Platform.SENSOR, DOMAIN, f"{DOMAIN}_{device.unique_id}" - ): - ent_reg.async_update_entity( - entity_id, - new_unique_id=f"{DOMAIN}_{unique_id}", - ) - # Add external device entities.append( HomeWizardExternalSensorEntity( diff --git a/homeassistant/components/homewizard/strings.json b/homeassistant/components/homewizard/strings.json index ca903330a44..4309664c4c8 100644 --- a/homeassistant/components/homewizard/strings.json +++ b/homeassistant/components/homewizard/strings.json @@ -6,6 +6,9 @@ "description": "Enter the IP address of your HomeWizard Energy device to integrate with Home Assistant.", "data": { "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "data_description": { + "ip_address": "The IP address of your HomeWizard Energy device." } }, "discovery_confirm": { @@ -14,18 +17,30 @@ }, "reauth_confirm": { "description": "The local API is disabled. Go to the HomeWizard Energy app and enable the API in the device settings." + }, + "reconfigure": { + "description": "Update configuration for {title}.", + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "data_description": { + "ip_address": "[%key:component::homewizard::config::step::user::data_description::ip_address%]" + } } }, "error": { - "api_not_enabled": "The API is not enabled. Enable API in the HomeWizard Energy App under settings", + "api_not_enabled": "The local API is disabled. Go to the HomeWizard Energy app and enable the API in the device settings.", "network_error": "Device unreachable, make sure that you have entered the correct IP address and that the device is available in your network" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "invalid_discovery_parameters": "Detected unsupported API version", + "invalid_discovery_parameters": "Invalid discovery parameters", "device_not_supported": "This device is not supported", "unknown_error": "[%key:common::config_flow::error::unknown%]", - "reauth_successful": "Enabling API was successful" + "unsupported_api_version": "Detected unsupported API version", + "reauth_successful": "Enabling API was successful", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "wrong_device": "The configured device is not the same found on this IP address." } }, "entity": { @@ -119,7 +134,7 @@ }, "exceptions": { "api_disabled": { - "message": "The local API of the HomeWizard device is disabled" + "message": "The local API is disabled." }, "communication_error": { "message": "An error occurred while communicating with HomeWizard device" diff --git a/homeassistant/components/homewizard/switch.py b/homeassistant/components/homewizard/switch.py index 14c6e0778f1..aa0af17f578 100644 --- a/homeassistant/components/homewizard/switch.py +++ b/homeassistant/components/homewizard/switch.py @@ -6,7 +6,7 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from homewizard_energy import HomeWizardEnergy +from homewizard_energy import HomeWizardEnergyV1 from homeassistant.components.switch import ( SwitchDeviceClass, @@ -23,6 +23,8 @@ from .coordinator import HWEnergyDeviceUpdateCoordinator from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class HomeWizardSwitchEntityDescription(SwitchEntityDescription): @@ -31,7 +33,7 @@ class HomeWizardSwitchEntityDescription(SwitchEntityDescription): available_fn: Callable[[DeviceResponseEntry], bool] create_fn: Callable[[HWEnergyDeviceUpdateCoordinator], bool] is_on_fn: Callable[[DeviceResponseEntry], bool | None] - set_fn: Callable[[HomeWizardEnergy, bool], Awaitable[Any]] + set_fn: Callable[[HomeWizardEnergyV1, bool], Awaitable[Any]] SWITCHES = [ diff --git a/homeassistant/components/homeworks/__init__.py b/homeassistant/components/homeworks/__init__.py index 448487cb8b0..e9e8c969b61 100644 --- a/homeassistant/components/homeworks/__init__.py +++ b/homeassistant/components/homeworks/__init__.py @@ -33,7 +33,6 @@ from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType from homeassistant.util import slugify @@ -48,8 +47,6 @@ CONF_COMMAND = "command" EVENT_BUTTON_PRESS = "homeworks_button_press" EVENT_BUTTON_RELEASE = "homeworks_button_release" -DEFAULT_FADE_RATE = 1.0 - KEYPAD_LEDSTATE_POLL_COOLDOWN = 1.0 CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -204,37 +201,6 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: await hass.config_entries.async_reload(entry.entry_id) -def calculate_unique_id(controller_id: str, addr: str, idx: int) -> str: - """Calculate entity unique id.""" - return f"homeworks.{controller_id}.{addr}.{idx}" - - -class HomeworksEntity(Entity): - """Base class of a Homeworks device.""" - - _attr_has_entity_name = True - _attr_should_poll = False - - def __init__( - self, - controller: Homeworks, - controller_id: str, - addr: str, - idx: int, - name: str | None, - ) -> None: - """Initialize Homeworks device.""" - self._addr = addr - self._idx = idx - self._controller_id = controller_id - self._attr_name = name - self._attr_unique_id = calculate_unique_id( - self._controller_id, self._addr, self._idx - ) - self._controller = controller - self._attr_extra_state_attributes = {"homeworks_address": self._addr} - - class HomeworksKeypad: """When you want signals instead of entities. diff --git a/homeassistant/components/homeworks/binary_sensor.py b/homeassistant/components/homeworks/binary_sensor.py index 9a9f7086ba5..f1ba3c02835 100644 --- a/homeassistant/components/homeworks/binary_sensor.py +++ b/homeassistant/components/homeworks/binary_sensor.py @@ -15,7 +15,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeworksData, HomeworksEntity, HomeworksKeypad +from . import HomeworksData, HomeworksKeypad from .const import ( CONF_ADDR, CONF_BUTTONS, @@ -25,6 +25,7 @@ from .const import ( CONF_NUMBER, DOMAIN, ) +from .entity import HomeworksEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/homeworks/button.py b/homeassistant/components/homeworks/button.py index f071b05b492..6a13573ac88 100644 --- a/homeassistant/components/homeworks/button.py +++ b/homeassistant/components/homeworks/button.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeworksData, HomeworksEntity +from . import HomeworksData from .const import ( CONF_ADDR, CONF_BUTTONS, @@ -23,6 +23,7 @@ from .const import ( CONF_RELEASE_DELAY, DOMAIN, ) +from .entity import HomeworksEntity async def async_setup_entry( diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index 8e9c8e3b29a..d1fa7774ef6 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -39,7 +39,6 @@ from homeassistant.helpers.selector import TextSelector from homeassistant.helpers.typing import VolDictType from homeassistant.util import slugify -from . import DEFAULT_FADE_RATE, calculate_unique_id from .const import ( CONF_ADDR, CONF_BUTTONS, @@ -56,9 +55,12 @@ from .const import ( DEFAULT_LIGHT_NAME, DOMAIN, ) +from .util import calculate_unique_id _LOGGER = logging.getLogger(__name__) +DEFAULT_FADE_RATE = 1.0 + CONTROLLER_EDIT = { vol.Required(CONF_HOST): selector.TextSelector(), vol.Required(CONF_PORT): selector.NumberSelector( @@ -556,23 +558,19 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for Lutron Homeworks.""" async def _validate_edit_controller( - self, user_input: dict[str, Any] + self, user_input: dict[str, Any], reconfigure_entry: ConfigEntry ) -> dict[str, Any]: """Validate controller setup.""" _validate_credentials(user_input) user_input[CONF_PORT] = int(user_input[CONF_PORT]) - our_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert our_entry - other_entries = self._async_current_entries() - for entry in other_entries: - if entry.entry_id == our_entry.entry_id: - continue - if ( - user_input[CONF_HOST] == entry.options[CONF_HOST] - and user_input[CONF_PORT] == entry.options[CONF_PORT] - ): - raise SchemaFlowError("duplicated_host_port") + if any( + entry.entry_id != reconfigure_entry.entry_id + and user_input[CONF_HOST] == entry.options[CONF_HOST] + and user_input[CONF_PORT] == entry.options[CONF_PORT] + for entry in self._async_current_entries() + ): + raise SchemaFlowError("duplicated_host_port") await _try_connection(user_input) return user_input @@ -581,15 +579,13 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfigure flow.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - errors = {} + reconfigure_entry = self._get_reconfigure_entry() suggested_values = { - CONF_HOST: entry.options[CONF_HOST], - CONF_PORT: entry.options[CONF_PORT], - CONF_USERNAME: entry.data.get(CONF_USERNAME), - CONF_PASSWORD: entry.data.get(CONF_PASSWORD), + CONF_HOST: reconfigure_entry.options[CONF_HOST], + CONF_PORT: reconfigure_entry.options[CONF_PORT], + CONF_USERNAME: reconfigure_entry.data.get(CONF_USERNAME), + CONF_PASSWORD: reconfigure_entry.data.get(CONF_PASSWORD), } if user_input: @@ -600,25 +596,24 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PASSWORD: user_input.get(CONF_PASSWORD), } try: - await self._validate_edit_controller(user_input) + await self._validate_edit_controller(user_input, reconfigure_entry) except SchemaFlowError as err: errors["base"] = str(err) else: password = user_input.pop(CONF_PASSWORD, None) username = user_input.pop(CONF_USERNAME, None) - new_data = entry.data | { + new_data = reconfigure_entry.data | { CONF_PASSWORD: password, CONF_USERNAME: username, } - new_options = entry.options | { + new_options = reconfigure_entry.options | { CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], } return self.async_update_reload_and_abort( - entry, + reconfigure_entry, data=new_data, options=new_options, - reason="reconfigure_successful", reload_even_if_entry_is_unchanged=False, ) diff --git a/homeassistant/components/homeworks/entity.py b/homeassistant/components/homeworks/entity.py new file mode 100644 index 00000000000..49abfb9241e --- /dev/null +++ b/homeassistant/components/homeworks/entity.py @@ -0,0 +1,35 @@ +"""Support for Lutron Homeworks Series 4 and 8 systems.""" + +from __future__ import annotations + +from pyhomeworks.pyhomeworks import Homeworks + +from homeassistant.helpers.entity import Entity + +from .util import calculate_unique_id + + +class HomeworksEntity(Entity): + """Base class of a Homeworks device.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__( + self, + controller: Homeworks, + controller_id: str, + addr: str, + idx: int, + name: str | None, + ) -> None: + """Initialize Homeworks device.""" + self._addr = addr + self._idx = idx + self._controller_id = controller_id + self._attr_name = name + self._attr_unique_id = calculate_unique_id( + self._controller_id, self._addr, self._idx + ) + self._controller = controller + self._attr_extra_state_attributes = {"homeworks_address": self._addr} diff --git a/homeassistant/components/homeworks/icons.json b/homeassistant/components/homeworks/icons.json index f53b447d96e..fc39b2ef455 100644 --- a/homeassistant/components/homeworks/icons.json +++ b/homeassistant/components/homeworks/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_command": "mdi:console" + "send_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/homeworks/light.py b/homeassistant/components/homeworks/light.py index 20ae08017d3..ac52c1f4974 100644 --- a/homeassistant/components/homeworks/light.py +++ b/homeassistant/components/homeworks/light.py @@ -15,8 +15,9 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeworksData, HomeworksEntity +from . import HomeworksData from .const import CONF_ADDR, CONF_CONTROLLER_ID, CONF_DIMMERS, CONF_RATE, DOMAIN +from .entity import HomeworksEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/homeworks/strings.json b/homeassistant/components/homeworks/strings.json index a9dcab2f1e0..977e6be8afd 100644 --- a/homeassistant/components/homeworks/strings.json +++ b/homeassistant/components/homeworks/strings.json @@ -1,5 +1,8 @@ { "config": { + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, "error": { "connection_error": "Could not connect to the controller.", "credentials_needed": "The controller needs credentials.", diff --git a/homeassistant/components/homeworks/util.py b/homeassistant/components/homeworks/util.py new file mode 100644 index 00000000000..0ed295f7bae --- /dev/null +++ b/homeassistant/components/homeworks/util.py @@ -0,0 +1,6 @@ +"""Support for Lutron Homeworks Series 4 and 8 systems.""" + + +def calculate_unique_id(controller_id: str, addr: str, idx: int) -> str: + """Calculate entity unique id.""" + return f"homeworks.{controller_id}.{addr}.{idx}" diff --git a/homeassistant/components/honeywell/__init__.py b/homeassistant/components/honeywell/__init__.py index 5a4d6374304..a8ee5975914 100644 --- a/homeassistant/components/honeywell/__init__.py +++ b/homeassistant/components/honeywell/__init__.py @@ -26,10 +26,12 @@ PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] MIGRATE_OPTIONS_KEYS = {CONF_COOL_AWAY_TEMPERATURE, CONF_HEAT_AWAY_TEMPERATURE} +type HoneywellConfigEntry = ConfigEntry[HoneywellData] + @callback def _async_migrate_data_to_options( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: HoneywellConfigEntry ) -> None: if not MIGRATE_OPTIONS_KEYS.intersection(config_entry.data): return @@ -45,7 +47,9 @@ def _async_migrate_data_to_options( ) -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: HoneywellConfigEntry +) -> bool: """Set up the Honeywell thermostat.""" _async_migrate_data_to_options(hass, config_entry) @@ -84,8 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if len(devices) == 0: _LOGGER.debug("No devices found") return False - data = HoneywellData(config_entry.entry_id, client, devices) - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = data + config_entry.runtime_data = HoneywellData(config_entry.entry_id, client, devices) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) config_entry.async_on_unload(config_entry.add_update_listener(update_listener)) @@ -93,19 +96,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True -async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def update_listener( + hass: HomeAssistant, config_entry: HoneywellConfigEntry +) -> None: """Update listener.""" await hass.config_entries.async_reload(config_entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: HoneywellConfigEntry +) -> bool: """Unload the config and platforms.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) @dataclass diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index 141cb87f117..7398ada23be 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -31,20 +31,15 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_conversion import TemperatureConverter -from . import HoneywellData +from . import HoneywellConfigEntry, HoneywellData from .const import ( _LOGGER, CONF_COOL_AWAY_TEMPERATURE, @@ -53,6 +48,10 @@ from .const import ( RETRY, ) +MODE_PERMANENT_HOLD = 2 +MODE_TEMPORARY_HOLD = 1 +MODE_HOLD = {MODE_PERMANENT_HOLD, MODE_TEMPORARY_HOLD} + ATTR_FAN_ACTION = "fan_action" ATTR_PERMANENT_HOLD = "permanent_hold" @@ -97,13 +96,15 @@ SCAN_INTERVAL = datetime.timedelta(seconds=30) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: HoneywellConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Honeywell thermostat.""" cool_away_temp = entry.options.get(CONF_COOL_AWAY_TEMPERATURE) heat_away_temp = entry.options.get(CONF_HEAT_AWAY_TEMPERATURE) - data: HoneywellData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data _async_migrate_unique_id(hass, data.devices) async_add_entities( [ @@ -131,7 +132,7 @@ def _async_migrate_unique_id( def remove_stale_devices( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, devices: dict[str, SomeComfortDevice], ) -> None: """Remove stale devices from device registry.""" @@ -164,7 +165,6 @@ class HoneywellUSThermostat(ClimateEntity): _attr_has_entity_name = True _attr_name = None _attr_translation_key = "honeywell" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -179,6 +179,7 @@ class HoneywellUSThermostat(ClimateEntity): self._cool_away_temp = cool_away_temp self._heat_away_temp = heat_away_temp self._away = False + self._away_hold = False self._retry = 0 self._attr_unique_id = str(device.deviceid) @@ -218,9 +219,6 @@ class HoneywellUSThermostat(ClimateEntity): if device._data.get("canControlHumidification"): # noqa: SLF001 self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY - if device.raw_ui_data.get("SwitchEmergencyHeatAllowed"): - self._attr_supported_features |= ClimateEntityFeature.AUX_HEAT - if not device._data.get("hasFan"): # noqa: SLF001 return @@ -330,27 +328,31 @@ class HoneywellUSThermostat(ClimateEntity): @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" - if self._away: + if self._away and self._is_hold(): + self._away_hold = True return PRESET_AWAY - if self._is_permanent_hold(): + if self._is_hold(): return PRESET_HOLD - + # Someone has changed the stat manually out of hold in away mode + if self._away and self._away_hold: + self._away = False + self._away_hold = False return PRESET_NONE - @property - def is_aux_heat(self) -> bool | None: - """Return true if aux heater.""" - return self._device.system_mode == "emheat" - @property def fan_mode(self) -> str | None: """Return the fan setting.""" return HW_FAN_MODE_TO_HA.get(self._device.fan_mode) + def _is_hold(self) -> bool: + heat_status = self._device.raw_ui_data.get("StatusHeat", 0) + cool_status = self._device.raw_ui_data.get("StatusCool", 0) + return heat_status in MODE_HOLD or cool_status in MODE_HOLD + def _is_permanent_hold(self) -> bool: heat_status = self._device.raw_ui_data.get("StatusHeat", 0) cool_status = self._device.raw_ui_data.get("StatusCool", 0) - return heat_status == 2 or cool_status == 2 + return MODE_PERMANENT_HOLD in (heat_status, cool_status) async def _set_temperature(self, **kwargs) -> None: """Set new target temperature.""" @@ -396,7 +398,7 @@ class HoneywellUSThermostat(ClimateEntity): raise ServiceValidationError( translation_domain=DOMAIN, translation_key="temp_failed_value", - translation_placeholders={"temp": temperature}, + translation_placeholders={"temperature": temperature}, ) from err async def async_set_temperature(self, **kwargs: Any) -> None: @@ -420,7 +422,7 @@ class HoneywellUSThermostat(ClimateEntity): raise ServiceValidationError( translation_domain=DOMAIN, translation_key="temp_failed_value", - translation_placeholders={"temp": str(temperature)}, + translation_placeholders={"temperature": str(temperature)}, ) from err async def async_set_fan_mode(self, fan_mode: str) -> None: @@ -538,53 +540,6 @@ class HoneywellUSThermostat(ClimateEntity): else: await self._turn_away_mode_off() - async def async_turn_aux_heat_on(self) -> None: - """Turn auxiliary heater on.""" - ir.async_create_issue( - self.hass, - DOMAIN, - "service_deprecation", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - severity=ir.IssueSeverity.WARNING, - translation_key="service_deprecation", - ) - try: - await self._device.set_system_mode("emheat") - - except SomeComfortError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="set_aux_failed", - ) from err - - async def async_turn_aux_heat_off(self) -> None: - """Turn auxiliary heater off.""" - - ir.async_create_issue( - self.hass, - DOMAIN, - "service_deprecation", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - severity=ir.IssueSeverity.WARNING, - translation_key="service_deprecation", - ) - - try: - if HVACMode.HEAT in self.hvac_modes: - await self.async_set_hvac_mode(HVACMode.HEAT) - else: - await self.async_set_hvac_mode(HVACMode.OFF) - - except HomeAssistantError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="disable_aux_failed", - ) from err - async def async_update(self) -> None: """Get the latest state from the service.""" diff --git a/homeassistant/components/honeywell/config_flow.py b/homeassistant/components/honeywell/config_flow.py index 7f298aee632..c7cda500692 100644 --- a/homeassistant/components/honeywell/config_flow.py +++ b/homeassistant/components/honeywell/config_flow.py @@ -38,14 +38,11 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a honeywell config flow.""" VERSION = 1 - entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Honeywell.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -53,8 +50,8 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm re-authentication with Honeywell.""" errors: dict[str, str] = {} - assert self.entry is not None + reauth_entry = self._get_reauth_entry() if user_input: try: await self.is_valid( @@ -72,18 +69,14 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - **user_input, - }, + reauth_entry, + data_updates=user_input, ) return self.async_show_form( step_id="reauth_confirm", data_schema=self.add_suggested_values_to_schema( - REAUTH_SCHEMA, - self.entry.data, + REAUTH_SCHEMA, reauth_entry.data ), errors=errors, description_placeholders={"name": "Honeywell"}, @@ -136,16 +129,12 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> HoneywellOptionsFlowHandler: """Options callback for Honeywell.""" - return HoneywellOptionsFlowHandler(config_entry) + return HoneywellOptionsFlowHandler() class HoneywellOptionsFlowHandler(OptionsFlow): """Config flow options for Honeywell.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Honeywell options flow.""" - self.config_entry = entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/honeywell/diagnostics.py b/homeassistant/components/honeywell/diagnostics.py index 35624c8fc39..b266e06d110 100644 --- a/homeassistant/components/honeywell/diagnostics.py +++ b/homeassistant/components/honeywell/diagnostics.py @@ -4,19 +4,17 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import HoneywellData -from .const import DOMAIN +from . import HoneywellConfigEntry async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - honeywell: HoneywellData = hass.data[DOMAIN][config_entry.entry_id] + honeywell = config_entry.runtime_data return { f"Device {device}": { diff --git a/homeassistant/components/honeywell/manifest.json b/homeassistant/components/honeywell/manifest.json index d0f0c8281f7..4a50e326965 100644 --- a/homeassistant/components/honeywell/manifest.json +++ b/homeassistant/components/honeywell/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/honeywell", "iot_class": "cloud_polling", "loggers": ["somecomfort"], - "requirements": ["AIOSomecomfort==0.0.25"] + "requirements": ["AIOSomecomfort==0.0.28"] } diff --git a/homeassistant/components/honeywell/sensor.py b/homeassistant/components/honeywell/sensor.py index 31ed8d646c5..a9109d5d557 100644 --- a/homeassistant/components/honeywell/sensor.py +++ b/homeassistant/components/honeywell/sensor.py @@ -14,14 +14,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import HoneywellData +from . import HoneywellConfigEntry from .const import DOMAIN OUTDOOR_TEMPERATURE_STATUS_KEY = "outdoor_temperature" @@ -81,11 +80,11 @@ SENSOR_TYPES: tuple[HoneywellSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Honeywell thermostat.""" - data: HoneywellData = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data async_add_entities( HoneywellSensor(device, description) diff --git a/homeassistant/components/honeywell/strings.json b/homeassistant/components/honeywell/strings.json index d3bc1924e28..a64f1a6fce0 100644 --- a/homeassistant/components/honeywell/strings.json +++ b/homeassistant/components/honeywell/strings.json @@ -16,6 +16,9 @@ } } }, + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" @@ -88,30 +91,11 @@ "stop_hold_failed": { "message": "Honeywell could not stop hold mode" }, - "set_aux_failed": { - "message": "Honeywell could not set system mode to aux heat" - }, - "disable_aux_failed": { - "message": "Honeywell could turn off aux heat mode" - }, "switch_failed_off": { "message": "Honeywell could turn off emergency heat mode." }, "switch_failed_on": { "message": "Honeywell could not set system mode to emergency heat mode." } - }, - "issues": { - "service_deprecation": { - "title": "Honeywell aux heat is being removed", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::honeywell::issues::service_deprecation::title%]", - "description": "Use `switch.{name}_emergency_heat` instead to change mode.\n\nPlease adjust your automations and scripts and select **submit** to fix this issue." - } - } - } - } } } diff --git a/homeassistant/components/honeywell/switch.py b/homeassistant/components/honeywell/switch.py index b90dd339593..3602dd1ba10 100644 --- a/homeassistant/components/honeywell/switch.py +++ b/homeassistant/components/honeywell/switch.py @@ -12,13 +12,12 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HoneywellData +from . import HoneywellConfigEntry, HoneywellData from .const import DOMAIN EMERGENCY_HEAT_KEY = "emergency_heat" @@ -34,11 +33,11 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: HoneywellConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Honeywell switches.""" - data: HoneywellData = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data async_add_entities( HoneywellSwitch(data, device, description) for device in data.devices.values() diff --git a/homeassistant/components/horizon/manifest.json b/homeassistant/components/horizon/manifest.json index d1280a6fe65..d30e2f39e34 100644 --- a/homeassistant/components/horizon/manifest.json +++ b/homeassistant/components/horizon/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/horizon", "iot_class": "local_polling", "loggers": ["horimote"], + "quality_scale": "legacy", "requirements": ["horimote==0.4.1"] } diff --git a/homeassistant/components/horizon/media_player.py b/homeassistant/components/horizon/media_player.py index 9531f9c0ed7..ba3ca5e2e35 100644 --- a/homeassistant/components/horizon/media_player.py +++ b/homeassistant/components/horizon/media_player.py @@ -65,7 +65,7 @@ def setup_platform( _LOGGER.error("Connection to %s at %s failed: %s", name, host, msg) raise PlatformNotReady from msg - _LOGGER.info("Connection to %s at %s established", name, host) + _LOGGER.debug("Connection to %s at %s established", name, host) add_entities([HorizonDevice(client, name, keys)], True) diff --git a/homeassistant/components/hp_ilo/manifest.json b/homeassistant/components/hp_ilo/manifest.json index 378a9ac1865..9f2dfb21783 100644 --- a/homeassistant/components/hp_ilo/manifest.json +++ b/homeassistant/components/hp_ilo/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/hp_ilo", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["python-hpilo==4.4.3"] } diff --git a/homeassistant/components/html5/__init__.py b/homeassistant/components/html5/__init__.py index 88e437ef566..4b85bf8ab8c 100644 --- a/homeassistant/components/html5/__init__.py +++ b/homeassistant/components/html5/__init__.py @@ -1 +1,16 @@ """The html5 component.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import discovery + +from .const import DOMAIN + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up HTML5 from a config entry.""" + await discovery.async_load_platform( + hass, Platform.NOTIFY, DOMAIN, dict(entry.data), {} + ) + return True diff --git a/homeassistant/components/html5/config_flow.py b/homeassistant/components/html5/config_flow.py new file mode 100644 index 00000000000..66c7be6736d --- /dev/null +++ b/homeassistant/components/html5/config_flow.py @@ -0,0 +1,105 @@ +"""Config flow for the html5 component.""" + +from __future__ import annotations + +import binascii +from typing import Any, cast + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ec +from py_vapid import Vapid +from py_vapid.utils import b64urlencode +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_NAME +from homeassistant.core import callback + +from .const import ATTR_VAPID_EMAIL, ATTR_VAPID_PRV_KEY, ATTR_VAPID_PUB_KEY, DOMAIN +from .issues import async_create_html5_issue + + +def vapid_generate_private_key() -> str: + """Generate a VAPID private key.""" + private_key = ec.generate_private_key(ec.SECP256R1(), default_backend()) + return b64urlencode( + binascii.unhexlify(f"{private_key.private_numbers().private_value:x}".zfill(64)) + ) + + +def vapid_get_public_key(private_key: str) -> str: + """Get the VAPID public key from a private key.""" + vapid = Vapid.from_string(private_key) + public_key = cast(ec.EllipticCurvePublicKey, vapid.public_key) + return b64urlencode( + public_key.public_bytes( + serialization.Encoding.X962, serialization.PublicFormat.UncompressedPoint + ) + ) + + +class HTML5ConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for HTML5.""" + + @callback + def _async_create_html5_entry( + self: HTML5ConfigFlow, data: dict[str, str] + ) -> tuple[dict[str, str], ConfigFlowResult | None]: + """Create an HTML5 entry.""" + errors = {} + flow_result = None + + if not data.get(ATTR_VAPID_PRV_KEY): + data[ATTR_VAPID_PRV_KEY] = vapid_generate_private_key() + + # we will always generate the corresponding public key + try: + data[ATTR_VAPID_PUB_KEY] = vapid_get_public_key(data[ATTR_VAPID_PRV_KEY]) + except (ValueError, binascii.Error): + errors[ATTR_VAPID_PRV_KEY] = "invalid_prv_key" + + if not errors: + config = { + ATTR_VAPID_EMAIL: data[ATTR_VAPID_EMAIL], + ATTR_VAPID_PRV_KEY: data[ATTR_VAPID_PRV_KEY], + ATTR_VAPID_PUB_KEY: data[ATTR_VAPID_PUB_KEY], + CONF_NAME: DOMAIN, + } + flow_result = self.async_create_entry(title="HTML5", data=config) + return errors, flow_result + + async def async_step_user( + self: HTML5ConfigFlow, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + if user_input: + errors, flow_result = self._async_create_html5_entry(user_input) + if flow_result: + return flow_result + else: + user_input = {} + + return self.async_show_form( + data_schema=vol.Schema( + { + vol.Required( + ATTR_VAPID_EMAIL, default=user_input.get(ATTR_VAPID_EMAIL, "") + ): str, + vol.Optional(ATTR_VAPID_PRV_KEY): str, + } + ), + errors=errors, + ) + + async def async_step_import( + self: HTML5ConfigFlow, import_config: dict + ) -> ConfigFlowResult: + """Handle config import from yaml.""" + _, flow_result = self._async_create_html5_entry(import_config) + if not flow_result: + async_create_html5_issue(self.hass, False) + return self.async_abort(reason="invalid_config") + async_create_html5_issue(self.hass, True) + return flow_result diff --git a/homeassistant/components/html5/const.py b/homeassistant/components/html5/const.py index bf7eaca7e24..75826ab90c9 100644 --- a/homeassistant/components/html5/const.py +++ b/homeassistant/components/html5/const.py @@ -1,4 +1,9 @@ """Constants for the HTML5 component.""" DOMAIN = "html5" +DATA_HASS_CONFIG = "html5_hass_config" SERVICE_DISMISS = "dismiss" + +ATTR_VAPID_PUB_KEY = "vapid_pub_key" +ATTR_VAPID_PRV_KEY = "vapid_prv_key" +ATTR_VAPID_EMAIL = "vapid_email" diff --git a/homeassistant/components/html5/icons.json b/homeassistant/components/html5/icons.json index c3d6e27efda..d0a6013dd12 100644 --- a/homeassistant/components/html5/icons.json +++ b/homeassistant/components/html5/icons.json @@ -1,5 +1,7 @@ { "services": { - "dismiss": "mdi:bell-off" + "dismiss": { + "service": "mdi:bell-off" + } } } diff --git a/homeassistant/components/html5/issues.py b/homeassistant/components/html5/issues.py new file mode 100644 index 00000000000..8892562d347 --- /dev/null +++ b/homeassistant/components/html5/issues.py @@ -0,0 +1,50 @@ +"""Issues utility for HTML5.""" + +import logging + +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +SUCCESSFUL_IMPORT_TRANSLATION_KEY = "deprecated_yaml" +FAILED_IMPORT_TRANSLATION_KEY = "deprecated_yaml_import_issue" + +INTEGRATION_TITLE = "HTML5 Push Notifications" + + +@callback +def async_create_html5_issue(hass: HomeAssistant, import_success: bool) -> None: + """Create issues for HTML5.""" + if import_success: + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": INTEGRATION_TITLE, + }, + ) + else: + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml_import_issue", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": INTEGRATION_TITLE, + }, + ) diff --git a/homeassistant/components/html5/manifest.json b/homeassistant/components/html5/manifest.json index f480086d153..c6cbd826544 100644 --- a/homeassistant/components/html5/manifest.json +++ b/homeassistant/components/html5/manifest.json @@ -1,10 +1,12 @@ { "domain": "html5", "name": "HTML5 Push Notifications", - "codeowners": [], + "codeowners": ["@alexyao2015"], + "config_flow": true, "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/html5", "iot_class": "cloud_push", "loggers": ["http_ece", "py_vapid", "pywebpush"], - "requirements": ["pywebpush==1.14.1"] + "requirements": ["pywebpush==1.14.1"], + "single_config_entry": true } diff --git a/homeassistant/components/html5/notify.py b/homeassistant/components/html5/notify.py index 8082ca37aa3..48cc0598479 100644 --- a/homeassistant/components/html5/notify.py +++ b/homeassistant/components/html5/notify.py @@ -29,6 +29,7 @@ from homeassistant.components.notify import ( PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, BaseNotificationService, ) +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ATTR_NAME, URL_ROOT from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError @@ -38,32 +39,23 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import ensure_unique_string from homeassistant.util.json import JsonObjectType, load_json_object -from .const import DOMAIN, SERVICE_DISMISS +from .const import ( + ATTR_VAPID_EMAIL, + ATTR_VAPID_PRV_KEY, + ATTR_VAPID_PUB_KEY, + DOMAIN, + SERVICE_DISMISS, +) +from .issues import async_create_html5_issue _LOGGER = logging.getLogger(__name__) REGISTRATIONS_FILE = "html5_push_registrations.conf" -ATTR_VAPID_PUB_KEY = "vapid_pub_key" -ATTR_VAPID_PRV_KEY = "vapid_prv_key" -ATTR_VAPID_EMAIL = "vapid_email" - - -def gcm_api_deprecated(value): - """Warn user that GCM API config is deprecated.""" - if value: - _LOGGER.warning( - "Configuring html5_push_notifications via the GCM api" - " has been deprecated and stopped working since May 29," - " 2019. Use the VAPID configuration instead. For instructions," - " see https://www.home-assistant.io/integrations/html5/" - ) - return value - PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( { - vol.Optional("gcm_sender_id"): vol.All(cv.string, gcm_api_deprecated), + vol.Optional("gcm_sender_id"): cv.string, vol.Optional("gcm_api_key"): cv.string, vol.Required(ATTR_VAPID_PUB_KEY): cv.string, vol.Required(ATTR_VAPID_PRV_KEY): cv.string, @@ -171,15 +163,30 @@ async def async_get_service( discovery_info: DiscoveryInfoType | None = None, ) -> HTML5NotificationService | None: """Get the HTML5 push notification service.""" + if config: + existing_config_entry = hass.config_entries.async_entries(DOMAIN) + if existing_config_entry: + async_create_html5_issue(hass, True) + return None + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + ) + return None + + if discovery_info is None: + return None + json_path = hass.config.path(REGISTRATIONS_FILE) registrations = await hass.async_add_executor_job(_load_config, json_path) - vapid_pub_key = config[ATTR_VAPID_PUB_KEY] - vapid_prv_key = config[ATTR_VAPID_PRV_KEY] - vapid_email = config[ATTR_VAPID_EMAIL] + vapid_pub_key = discovery_info[ATTR_VAPID_PUB_KEY] + vapid_prv_key = discovery_info[ATTR_VAPID_PRV_KEY] + vapid_email = discovery_info[ATTR_VAPID_EMAIL] - def websocket_appkey(hass, connection, msg): + def websocket_appkey(_hass, connection, msg): connection.send_message(websocket_api.result_message(msg["id"], vapid_pub_key)) websocket_api.async_register_command( diff --git a/homeassistant/components/html5/strings.json b/homeassistant/components/html5/strings.json index fa69025c43c..2c68223581a 100644 --- a/homeassistant/components/html5/strings.json +++ b/homeassistant/components/html5/strings.json @@ -1,4 +1,31 @@ { + "config": { + "step": { + "user": { + "data": { + "vapid_email": "[%key:common::config_flow::data::email%]", + "vapid_prv_key": "VAPID private key" + }, + "data_description": { + "vapid_email": "This contact address will be included in the metadata of each notification.", + "vapid_prv_key": "If not specified, one will be automatically generated." + } + } + }, + "error": { + "unknown": "Unknown error", + "invalid_prv_key": "Invalid private key" + }, + "abort": { + "invalid_config": "Invalid configuration" + } + }, + "issues": { + "deprecated_yaml_import_issue": { + "title": "HTML5 YAML configuration import failed", + "description": "Configuring HTML5 push notification using YAML has been deprecated. An automatic import of your existing configuration was attempted, but it failed.\n\nPlease remove the HTML5 push notification YAML configuration from your configuration.yaml file and reconfigure HTML5 push notification again manually." + } + }, "services": { "dismiss": { "name": "Dismiss", diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py index 5b68f91e494..95cdee9ab9e 100644 --- a/homeassistant/components/http/__init__.py +++ b/homeassistant/components/http/__init__.py @@ -30,10 +30,14 @@ import voluptuous as vol from yarl import URL from homeassistant.components.network import async_get_source_ip -from homeassistant.const import EVENT_HOMEASSISTANT_STOP, SERVER_PORT +from homeassistant.const import ( + EVENT_HOMEASSISTANT_START, + EVENT_HOMEASSISTANT_STOP, + SERVER_PORT, +) from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import frame, storage +from homeassistant.helpers import frame, issue_registry as ir, storage import homeassistant.helpers.config_validation as cv from homeassistant.helpers.http import ( KEY_ALLOW_CONFIGURED_CORS, @@ -264,6 +268,32 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: local_ip, host, server_port, ssl_certificate is not None ) + @callback + def _async_check_ssl_issue(_: Event) -> None: + if ( + ssl_certificate is not None + and (hass.config.external_url or hass.config.internal_url) is None + ): + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.cloud import ( + CloudNotAvailable, + async_remote_ui_url, + ) + + try: + async_remote_ui_url(hass) + except CloudNotAvailable: + ir.async_create_issue( + hass, + DOMAIN, + "ssl_configured_without_configured_urls", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="ssl_configured_without_configured_urls", + ) + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_check_ssl_issue) + return True @@ -296,7 +326,8 @@ class HomeAssistantApplication(web.Application): protocol, writer, task, - loop=self._loop, + # loop will never be None when called from aiohttp + loop=self._loop, # type: ignore[arg-type] client_max_size=self._client_max_size, ) @@ -475,15 +506,14 @@ class HomeAssistantHTTP: self, url_path: str, path: str, cache_headers: bool = True ) -> None: """Register a folder or file to serve as a static path.""" - frame.report( + frame.report_usage( "calls hass.http.register_static_path which is deprecated because " "it does blocking I/O in the event loop, instead " "call `await hass.http.async_register_static_paths(" - f'[StaticPathConfig("{url_path}", "{path}", {cache_headers})])`; ' - "This function will be removed in 2025.7", + f'[StaticPathConfig("{url_path}", "{path}", {cache_headers})])`', exclude_integrations={"http"}, - error_if_core=False, - error_if_integration=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.7", ) configs = [StaticPathConfig(url_path, path, cache_headers)] resources = self._make_static_resources(configs) diff --git a/homeassistant/components/http/auth.py b/homeassistant/components/http/auth.py index 0f43aac0115..7e00cc70eaa 100644 --- a/homeassistant/components/http/auth.py +++ b/homeassistant/components/http/auth.py @@ -34,7 +34,7 @@ _LOGGER = logging.getLogger(__name__) DATA_API_PASSWORD: Final = "api_password" DATA_SIGN_SECRET: Final = "http.auth.sign_secret" SIGN_QUERY_PARAM: Final = "authSig" -SAFE_QUERY_PARAMS: Final = ["height", "width"] +SAFE_QUERY_PARAMS: Final = frozenset(("height", "width")) STORAGE_VERSION = 1 STORAGE_KEY = "http.auth" diff --git a/homeassistant/components/http/ban.py b/homeassistant/components/http/ban.py index dd5f1ed1b05..c8fc8ffb11b 100644 --- a/homeassistant/components/http/ban.py +++ b/homeassistant/components/http/ban.py @@ -27,6 +27,7 @@ from homeassistant.config import load_yaml_config_file from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.hassio import get_supervisor_ip, is_hassio from homeassistant.util import dt as dt_util, yaml from .const import KEY_HASS @@ -149,12 +150,8 @@ async def process_wrong_login(request: Request) -> None: request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] += 1 # Supervisor IP should never be banned - if "hassio" in hass.config.components: - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import hassio - - if hassio.get_supervisor_ip() == str(remote_addr): - return + if is_hassio(hass) and str(remote_addr) == get_supervisor_ip(): + return if ( request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] diff --git a/homeassistant/components/http/static.py b/homeassistant/components/http/static.py index 29c5840a4bf..9ca34af3741 100644 --- a/homeassistant/components/http/static.py +++ b/homeassistant/components/http/static.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Mapping from pathlib import Path +import sys from typing import Final from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE @@ -17,6 +18,15 @@ CACHE_HEADER = f"public, max-age={CACHE_TIME}" CACHE_HEADERS: Mapping[str, str] = {CACHE_CONTROL: CACHE_HEADER} RESPONSE_CACHE: LRU[tuple[str, Path], tuple[Path, str]] = LRU(512) +if sys.version_info >= (3, 13): + # guess_type is soft-deprecated in 3.13 + # for paths and should only be used for + # URLs. guess_file_type should be used + # for paths instead. + _GUESSER = CONTENT_TYPES.guess_file_type +else: + _GUESSER = CONTENT_TYPES.guess_type + class CachingStaticResource(StaticResource): """Static Resource handler that will add cache headers.""" @@ -37,9 +47,7 @@ class CachingStaticResource(StaticResource): # Must be directory index; ignore caching return response file_path = response._path # noqa: SLF001 - response.content_type = ( - CONTENT_TYPES.guess_type(file_path)[0] or FALLBACK_CONTENT_TYPE - ) + response.content_type = _GUESSER(file_path)[0] or FALLBACK_CONTENT_TYPE # Cache actual header after setter construction. content_type = response.headers[CONTENT_TYPE] RESPONSE_CACHE[key] = (file_path, content_type) diff --git a/homeassistant/components/http/strings.json b/homeassistant/components/http/strings.json new file mode 100644 index 00000000000..5dbd8faec20 --- /dev/null +++ b/homeassistant/components/http/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "ssl_configured_without_configured_urls": { + "title": "SSL is configured without an external URL or internal URL", + "description": "Home Assistant detected that SSL has been set up on your instance, however, no custom external internet URL has been set.\n\nThis may result in unexpected behavior. Text-to-speech may fail, and integrations may not be able to connect back to your instance correctly.\n\nTo address this issue, go to Settings > System > Network; under the \"Home Assistant URL\" section, configure your new \"Internet\" and \"Local network\" addresses that match your new SSL configuration." + } + } +} diff --git a/homeassistant/components/huawei_lte/__init__.py b/homeassistant/components/huawei_lte/__init__.py index b0c40c71658..a5a60d8406d 100644 --- a/homeassistant/components/huawei_lte/__init__.py +++ b/homeassistant/components/huawei_lte/__init__.py @@ -48,8 +48,7 @@ from homeassistant.helpers import ( entity_registry as er, ) from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType @@ -209,7 +208,7 @@ class Router: else: _LOGGER.debug("failed") return - _LOGGER.info( + _LOGGER.warning( "%s requires authorization, excluding from future updates", key ) self.subscriptions.pop(key) @@ -221,7 +220,7 @@ class Router: exc, (ResponseErrorNotSupportedException, ExpatError) ) and exc.code not in (-1, 100006): raise - _LOGGER.info( + _LOGGER.warning( "%s apparently not supported by device, excluding from future updates", key, ) @@ -559,74 +558,13 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> if isinstance(recipient, str): options[CONF_RECIPIENT] = [x.strip() for x in recipient.split(",")] hass.config_entries.async_update_entry(config_entry, options=options, version=2) - _LOGGER.info("Migrated config entry to version %d", config_entry.version) + _LOGGER.debug("Migrated config entry to version %d", config_entry.version) if config_entry.version == 2: data = dict(config_entry.data) data[CONF_MAC] = [] hass.config_entries.async_update_entry(config_entry, data=data, version=3) - _LOGGER.info("Migrated config entry to version %d", config_entry.version) + _LOGGER.debug("Migrated config entry to version %d", config_entry.version) # There can be no longer needed *_from_yaml data and options things left behind # from pre-2022.4ish; they can be removed while at it when/if we eventually bump and # migrate to version > 3 for some other reason. return True - - -class HuaweiLteBaseEntity(Entity): - """Huawei LTE entity base class.""" - - _available = True - _attr_has_entity_name = True - _attr_should_poll = False - - def __init__(self, router: Router) -> None: - """Initialize.""" - self.router = router - self._unsub_handlers: list[Callable] = [] - - @property - def _device_unique_id(self) -> str: - """Return unique ID for entity within a router.""" - raise NotImplementedError - - @property - def unique_id(self) -> str: - """Return unique ID for entity.""" - return f"{self.router.config_entry.unique_id}-{self._device_unique_id}" - - @property - def available(self) -> bool: - """Return whether the entity is available.""" - return self._available - - async def async_update(self) -> None: - """Update state.""" - raise NotImplementedError - - async def async_added_to_hass(self) -> None: - """Connect to update signals.""" - self._unsub_handlers.append( - async_dispatcher_connect(self.hass, UPDATE_SIGNAL, self._async_maybe_update) - ) - - async def _async_maybe_update(self, config_entry_unique_id: str) -> None: - """Update state if the update signal comes from our router.""" - if config_entry_unique_id == self.router.config_entry.unique_id: - self.async_schedule_update_ha_state(True) - - async def async_will_remove_from_hass(self) -> None: - """Invoke unsubscription handlers.""" - for unsub in self._unsub_handlers: - unsub() - self._unsub_handlers.clear() - - -class HuaweiLteBaseEntityWithDevice(HuaweiLteBaseEntity): - """Base entity with device info.""" - - @property - def device_info(self) -> DeviceInfo: - """Get info for matching with parent router.""" - return DeviceInfo( - connections=self.router.device_connections, - identifiers=self.router.device_identifiers, - ) diff --git a/homeassistant/components/huawei_lte/binary_sensor.py b/homeassistant/components/huawei_lte/binary_sensor.py index c90a7854a91..06b859cea84 100644 --- a/homeassistant/components/huawei_lte/binary_sensor.py +++ b/homeassistant/components/huawei_lte/binary_sensor.py @@ -16,13 +16,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HuaweiLteBaseEntityWithDevice from .const import ( DOMAIN, KEY_MONITORING_CHECK_NOTIFICATIONS, KEY_MONITORING_STATUS, KEY_WLAN_WIFI_FEATURE_SWITCH, ) +from .entity import HuaweiLteBaseEntityWithDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/huawei_lte/button.py b/homeassistant/components/huawei_lte/button.py index f494836e80d..55b009d25bf 100644 --- a/homeassistant/components/huawei_lte/button.py +++ b/homeassistant/components/huawei_lte/button.py @@ -16,8 +16,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform -from . import HuaweiLteBaseEntityWithDevice from .const import DOMAIN +from .entity import HuaweiLteBaseEntityWithDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index ce6131c784f..08fdae50c51 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -60,13 +60,16 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 3 + manufacturer: str | None = None + url: str | None = None + @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _async_show_user_form( self, @@ -81,10 +84,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): { vol.Required( CONF_URL, - default=user_input.get( - CONF_URL, - self.context.get(CONF_URL, ""), - ), + default=user_input.get(CONF_URL, self.url or ""), ): str, vol.Optional( CONF_VERIFY_SSL, @@ -241,7 +241,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): user_input.update( { CONF_MAC: get_device_macs(info, wlan_settings), - CONF_MANUFACTURER: self.context.get(CONF_MANUFACTURER), + CONF_MANUFACTURER: self.manufacturer, } ) @@ -302,11 +302,12 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): { "title_placeholders": { CONF_NAME: discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) - }, - CONF_MANUFACTURER: discovery_info.upnp.get(ssdp.ATTR_UPNP_MANUFACTURER), - CONF_URL: url, + or "Huawei LTE" + } } ) + self.manufacturer = discovery_info.upnp.get(ssdp.ATTR_UPNP_MANUFACTURER) + self.url = url return await self._async_show_user_form() async def async_step_reauth( @@ -319,8 +320,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry + entry = self._get_reauth_entry() if not user_input: return await self._async_show_reauth_form( user_input={ @@ -339,18 +339,12 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): user_input=user_input, errors=errors ) - self.hass.config_entries.async_update_entry(entry, data=new_data) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(entry, data=new_data) class OptionsFlowHandler(OptionsFlow): """Huawei LTE options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/huawei_lte/device_tracker.py b/homeassistant/components/huawei_lte/device_tracker.py index 0e35208dcce..df849d4f712 100644 --- a/homeassistant/components/huawei_lte/device_tracker.py +++ b/homeassistant/components/huawei_lte/device_tracker.py @@ -11,7 +11,6 @@ from stringcase import snakecase from homeassistant.components.device_tracker import ( DOMAIN as DEVICE_TRACKER_DOMAIN, ScannerEntity, - SourceType, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback @@ -20,7 +19,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HuaweiLteBaseEntity, Router +from . import Router from .const import ( CONF_TRACK_WIRED_CLIENTS, DEFAULT_TRACK_WIRED_CLIENTS, @@ -29,6 +28,7 @@ from .const import ( KEY_WLAN_HOST_LIST, UPDATE_SIGNAL, ) +from .entity import HuaweiLteBaseEntity _LOGGER = logging.getLogger(__name__) @@ -194,11 +194,6 @@ class HuaweiLteScannerEntity(HuaweiLteBaseEntity, ScannerEntity): def _device_unique_id(self) -> str: return self.mac_address - @property - def source_type(self) -> SourceType: - """Return SourceType.ROUTER.""" - return SourceType.ROUTER - @property def ip_address(self) -> str | None: """Return the primary ip address of the device.""" diff --git a/homeassistant/components/huawei_lte/entity.py b/homeassistant/components/huawei_lte/entity.py new file mode 100644 index 00000000000..99d7ca112c4 --- /dev/null +++ b/homeassistant/components/huawei_lte/entity.py @@ -0,0 +1,76 @@ +"""Support for Huawei LTE routers.""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import timedelta + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from . import Router +from .const import UPDATE_SIGNAL + +SCAN_INTERVAL = timedelta(seconds=10) + + +class HuaweiLteBaseEntity(Entity): + """Huawei LTE entity base class.""" + + _available = True + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__(self, router: Router) -> None: + """Initialize.""" + self.router = router + self._unsub_handlers: list[Callable] = [] + + @property + def _device_unique_id(self) -> str: + """Return unique ID for entity within a router.""" + raise NotImplementedError + + @property + def unique_id(self) -> str: + """Return unique ID for entity.""" + return f"{self.router.config_entry.unique_id}-{self._device_unique_id}" + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + return self._available + + async def async_update(self) -> None: + """Update state.""" + raise NotImplementedError + + async def async_added_to_hass(self) -> None: + """Connect to update signals.""" + self._unsub_handlers.append( + async_dispatcher_connect(self.hass, UPDATE_SIGNAL, self._async_maybe_update) + ) + + async def _async_maybe_update(self, config_entry_unique_id: str) -> None: + """Update state if the update signal comes from our router.""" + if config_entry_unique_id == self.router.config_entry.unique_id: + self.async_schedule_update_ha_state(True) + + async def async_will_remove_from_hass(self) -> None: + """Invoke unsubscription handlers.""" + for unsub in self._unsub_handlers: + unsub() + self._unsub_handlers.clear() + + +class HuaweiLteBaseEntityWithDevice(HuaweiLteBaseEntity): + """Base entity with device info.""" + + @property + def device_info(self) -> DeviceInfo: + """Get info for matching with parent router.""" + return DeviceInfo( + connections=self.router.device_connections, + identifiers=self.router.device_identifiers, + ) diff --git a/homeassistant/components/huawei_lte/icons.json b/homeassistant/components/huawei_lte/icons.json index d105702bf51..a338cc65ed4 100644 --- a/homeassistant/components/huawei_lte/icons.json +++ b/homeassistant/components/huawei_lte/icons.json @@ -53,7 +53,11 @@ } }, "services": { - "resume_integration": "mdi:play-pause", - "suspend_integration": "mdi:pause" + "resume_integration": { + "service": "mdi:play-pause" + }, + "suspend_integration": { + "service": "mdi:pause" + } } } diff --git a/homeassistant/components/huawei_lte/manifest.json b/homeassistant/components/huawei_lte/manifest.json index 9a44024111c..6720d6718ef 100644 --- a/homeassistant/components/huawei_lte/manifest.json +++ b/homeassistant/components/huawei_lte/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["huawei_lte_api.Session"], "requirements": [ - "huawei-lte-api==1.7.3", + "huawei-lte-api==1.10.0", "stringcase==1.2.0", "url-normalize==1.4.3" ], diff --git a/homeassistant/components/huawei_lte/select.py b/homeassistant/components/huawei_lte/select.py index bf8f65a8ba5..d8a16ae2f79 100644 --- a/homeassistant/components/huawei_lte/select.py +++ b/homeassistant/components/huawei_lte/select.py @@ -21,8 +21,9 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED -from . import HuaweiLteBaseEntityWithDevice, Router +from . import Router from .const import DOMAIN, KEY_NET_NET_MODE +from .entity import HuaweiLteBaseEntityWithDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/huawei_lte/sensor.py b/homeassistant/components/huawei_lte/sensor.py index 2a7fe5c29b2..86965e89dd0 100644 --- a/homeassistant/components/huawei_lte/sensor.py +++ b/homeassistant/components/huawei_lte/sensor.py @@ -30,7 +30,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import HuaweiLteBaseEntityWithDevice, Router +from . import Router from .const import ( DOMAIN, KEY_DEVICE_INFORMATION, @@ -44,6 +44,7 @@ from .const import ( KEY_SMS_SMS_COUNT, SENSOR_KEYS, ) +from .entity import HuaweiLteBaseEntityWithDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/huawei_lte/switch.py b/homeassistant/components/huawei_lte/switch.py index 3a499851f9a..07fd89d0b6c 100644 --- a/homeassistant/components/huawei_lte/switch.py +++ b/homeassistant/components/huawei_lte/switch.py @@ -15,12 +15,12 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HuaweiLteBaseEntityWithDevice from .const import ( DOMAIN, KEY_DIALUP_MOBILE_DATASWITCH, KEY_WLAN_WIFI_GUEST_NETWORK_SWITCH, ) +from .entity import HuaweiLteBaseEntityWithDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/hue/config_flow.py b/homeassistant/components/hue/config_flow.py index fb32f568ee1..8d17f810461 100644 --- a/homeassistant/components/hue/config_flow.py +++ b/homeassistant/components/hue/config_flow.py @@ -57,8 +57,8 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): ) -> HueV1OptionsFlowHandler | HueV2OptionsFlowHandler: """Get the options flow for this handler.""" if config_entry.data.get(CONF_API_VERSION, 1) == 1: - return HueV1OptionsFlowHandler(config_entry) - return HueV2OptionsFlowHandler(config_entry) + return HueV1OptionsFlowHandler() + return HueV2OptionsFlowHandler() def __init__(self) -> None: """Initialize the Hue flow.""" @@ -258,7 +258,7 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): await self._async_handle_discovery_without_unique_id() return await self.async_step_link() - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new bridge as a config entry. This flow is triggered by `async_setup` for both configured and @@ -268,9 +268,9 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): This flow is also triggered by `async_step_discovery`. """ # Check if host exists, abort if so. - self._async_abort_entries_match({"host": import_info["host"]}) + self._async_abort_entries_match({"host": import_data["host"]}) - bridge = await self._get_bridge(import_info["host"]) + bridge = await self._get_bridge(import_data["host"]) if bridge is None: return self.async_abort(reason="cannot_connect") self.bridge = bridge @@ -280,10 +280,6 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): class HueV1OptionsFlowHandler(OptionsFlow): """Handle Hue options for V1 implementation.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Hue options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -315,10 +311,6 @@ class HueV1OptionsFlowHandler(OptionsFlow): class HueV2OptionsFlowHandler(OptionsFlow): """Handle Hue options for V2 implementation.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Hue options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/hue/device_trigger.py b/homeassistant/components/hue/device_trigger.py index 4104c667d74..dba5aba81da 100644 --- a/homeassistant/components/hue/device_trigger.py +++ b/homeassistant/components/hue/device_trigger.py @@ -4,9 +4,7 @@ from __future__ import annotations from typing import TYPE_CHECKING, Any -from homeassistant.components.device_automation.exceptions import ( - InvalidDeviceAutomationConfig, -) +from homeassistant.components.device_automation import InvalidDeviceAutomationConfig from homeassistant.const import CONF_DEVICE_ID from homeassistant.core import CALLBACK_TYPE from homeassistant.helpers import device_registry as dr diff --git a/homeassistant/components/hue/icons.json b/homeassistant/components/hue/icons.json index 9371ae5843e..31464308b0a 100644 --- a/homeassistant/components/hue/icons.json +++ b/homeassistant/components/hue/icons.json @@ -1,6 +1,10 @@ { "services": { - "hue_activate_scene": "mdi:palette", - "activate_scene": "mdi:palette" + "hue_activate_scene": { + "service": "mdi:palette" + }, + "activate_scene": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/hue/manifest.json b/homeassistant/components/hue/manifest.json index dbd9b511977..22f1d3991e7 100644 --- a/homeassistant/components/hue/manifest.json +++ b/homeassistant/components/hue/manifest.json @@ -10,7 +10,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aiohue"], - "quality_scale": "platinum", "requirements": ["aiohue==4.7.3"], "zeroconf": ["_hue._tcp.local."] } diff --git a/homeassistant/components/hue/scene.py b/homeassistant/components/hue/scene.py index 6808ddb5353..1d83804820d 100644 --- a/homeassistant/components/hue/scene.py +++ b/homeassistant/components/hue/scene.py @@ -130,10 +130,15 @@ class HueSceneEntity(HueSceneEntityBase): @property def is_dynamic(self) -> bool: """Return if this scene has a dynamic color palette.""" - if self.resource.palette.color and len(self.resource.palette.color) > 1: + if ( + self.resource.palette + and self.resource.palette.color + and len(self.resource.palette.color) > 1 + ): return True if ( - self.resource.palette.color_temperature + self.resource.palette + and self.resource.palette.color_temperature and len(self.resource.palette.color_temperature) > 1 ): return True diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index ab1d0fb58ad..2f7f2e55561 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -137,15 +137,15 @@ "services": { "hue_activate_scene": { "name": "Activate scene", - "description": "Activates a hue scene stored in the hue hub.", + "description": "Activates a Hue scene stored in the Hue hub.", "fields": { "group_name": { "name": "Group", - "description": "Name of hue group/room from the hue app." + "description": "Name of Hue group/room from the Hue app." }, "scene_name": { "name": "Scene", - "description": "Name of hue scene from the hue app." + "description": "Name of Hue scene from the Hue app." }, "dynamic": { "name": "Dynamic", diff --git a/homeassistant/components/hue/v1/binary_sensor.py b/homeassistant/components/hue/v1/binary_sensor.py index 01524b48b79..325c4d022fa 100644 --- a/homeassistant/components/hue/v1/binary_sensor.py +++ b/homeassistant/components/hue/v1/binary_sensor.py @@ -25,6 +25,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities): ) +# pylint: disable-next=hass-enforce-class-module class HuePresence(GenericZLLSensor, BinarySensorEntity): """The presence sensor entity for a Hue motion sensor device.""" diff --git a/homeassistant/components/hue/v1/device_trigger.py b/homeassistant/components/hue/v1/device_trigger.py index 554926cdc70..493c668f549 100644 --- a/homeassistant/components/hue/v1/device_trigger.py +++ b/homeassistant/components/hue/v1/device_trigger.py @@ -6,8 +6,8 @@ from typing import TYPE_CHECKING import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger diff --git a/homeassistant/components/hue/v1/light.py b/homeassistant/components/hue/v1/light.py index 68e05932e7a..78a06784b8d 100644 --- a/homeassistant/components/hue/v1/light.py +++ b/homeassistant/components/hue/v1/light.py @@ -12,7 +12,7 @@ import aiohue from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -35,7 +35,7 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, UpdateFailed, ) -from homeassistant.util import color +from homeassistant.util import color as color_util from ..bridge import HueBridge from ..const import ( @@ -305,6 +305,7 @@ def hass_to_hue_brightness(value): return max(1, round((value / 255) * 254)) +# pylint: disable-next=hass-enforce-class-module class HueLight(CoordinatorEntity, LightEntity): """Representation of a Hue light.""" @@ -361,7 +362,7 @@ class HueLight(CoordinatorEntity, LightEntity): "bulb in the Philips Hue App." ) LOGGER.warning(err, self.name) - if self.gamut and not color.check_valid_gamut(self.gamut): + if self.gamut and not color_util.check_valid_gamut(self.gamut): err = "Color gamut of %s: %s, not valid, setting gamut to None." LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE @@ -426,49 +427,50 @@ class HueLight(CoordinatorEntity, LightEntity): source = self.light.action if self.is_group else self.light.state if mode in ("xy", "hs") and "xy" in source: - return color.color_xy_to_hs(*source["xy"], self.gamut) + return color_util.color_xy_to_hs(*source["xy"], self.gamut) return None @property - def color_temp(self): - """Return the CT color value.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" # Don't return color temperature unless in color temperature mode if self._color_mode != "ct": return None - if self.is_group: - return self.light.action.get("ct") - return self.light.state.get("ct") + ct = ( + self.light.action.get("ct") if self.is_group else self.light.state.get("ct") + ) + return color_util.color_temperature_mired_to_kelvin(ct) if ct else None @property - def min_mireds(self): - """Return the coldest color_temp that this light supports.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if self.is_group: - return super().min_mireds + return super().max_color_temp_kelvin min_mireds = self.light.controlcapabilities.get("ct", {}).get("min") # We filter out '0' too, which can be incorrectly reported by 3rd party buls if not min_mireds: - return super().min_mireds + return super().max_color_temp_kelvin - return min_mireds + return color_util.color_temperature_mired_to_kelvin(min_mireds) @property - def max_mireds(self): - """Return the warmest color_temp that this light supports.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if self.is_group: - return super().max_mireds + return super().min_color_temp_kelvin if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get("ct", {}).get("max") if not max_mireds: - return super().max_mireds + return super().min_color_temp_kelvin - return max_mireds + return color_util.color_temperature_mired_to_kelvin(max_mireds) @property def is_on(self): @@ -540,11 +542,14 @@ class HueLight(CoordinatorEntity, LightEntity): # Philips hue bulb models respond differently to hue/sat # requests, so we convert to XY first to ensure a consistent # color. - xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) + xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command["xy"] = xy_color - elif ATTR_COLOR_TEMP in kwargs: - temp = kwargs[ATTR_COLOR_TEMP] - command["ct"] = max(self.min_mireds, min(temp, self.max_mireds)) + elif ATTR_COLOR_TEMP_KELVIN in kwargs: + temp_k = max( + self.min_color_temp_kelvin, + min(self.max_color_temp_kelvin, kwargs[ATTR_COLOR_TEMP_KELVIN]), + ) + command["ct"] = color_util.color_temperature_kelvin_to_mired(temp_k) if ATTR_BRIGHTNESS in kwargs: command["bri"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) diff --git a/homeassistant/components/hue/v1/sensor.py b/homeassistant/components/hue/v1/sensor.py index 9a85f83f3e8..88d494ed44b 100644 --- a/homeassistant/components/hue/v1/sensor.py +++ b/homeassistant/components/hue/v1/sensor.py @@ -32,10 +32,12 @@ async def async_setup_entry(hass, config_entry, async_add_entities): await bridge.sensor_manager.async_register_component("sensor", async_add_entities) +# pylint: disable-next=hass-enforce-class-module class GenericHueGaugeSensorEntity(GenericZLLSensor, SensorEntity): """Parent class for all 'gauge' Hue device sensors.""" +# pylint: disable-next=hass-enforce-class-module class HueLightLevel(GenericHueGaugeSensorEntity): """The light level sensor entity for a Hue motion sensor device.""" @@ -71,6 +73,7 @@ class HueLightLevel(GenericHueGaugeSensorEntity): return attributes +# pylint: disable-next=hass-enforce-class-module class HueTemperature(GenericHueGaugeSensorEntity): """The temperature sensor entity for a Hue motion sensor device.""" @@ -87,6 +90,7 @@ class HueTemperature(GenericHueGaugeSensorEntity): return self.sensor.temperature / 100 +# pylint: disable-next=hass-enforce-class-module class HueBattery(GenericHueSensor, SensorEntity): """Battery class for when a batt-powered device is only represented as an event.""" diff --git a/homeassistant/components/hue/v1/sensor_base.py b/homeassistant/components/hue/v1/sensor_base.py index bac02c45209..393069b0c7c 100644 --- a/homeassistant/components/hue/v1/sensor_base.py +++ b/homeassistant/components/hue/v1/sensor_base.py @@ -165,7 +165,7 @@ class SensorManager: self._component_add_entities[platform](value) -class GenericHueSensor(GenericHueDevice, entity.Entity): +class GenericHueSensor(GenericHueDevice, entity.Entity): # pylint: disable=hass-enforce-class-module """Representation of a Hue sensor.""" should_poll = False diff --git a/homeassistant/components/hue/v1/sensor_device.py b/homeassistant/components/hue/v1/sensor_device.py index 1ff97af2e62..cb0a2721334 100644 --- a/homeassistant/components/hue/v1/sensor_device.py +++ b/homeassistant/components/hue/v1/sensor_device.py @@ -10,7 +10,7 @@ from ..const import ( ) -class GenericHueDevice(entity.Entity): +class GenericHueDevice(entity.Entity): # pylint: disable=hass-enforce-class-module """Representation of a Hue device.""" def __init__(self, sensor, name, bridge, primary_sensor=None): diff --git a/homeassistant/components/hue/v2/binary_sensor.py b/homeassistant/components/hue/v2/binary_sensor.py index 650a9384e35..5054ab6e817 100644 --- a/homeassistant/components/hue/v2/binary_sensor.py +++ b/homeassistant/components/hue/v2/binary_sensor.py @@ -82,6 +82,7 @@ async def async_setup_entry( register_items(api.sensors.tamper, HueTamperSensor) +# pylint: disable-next=hass-enforce-class-module class HueMotionSensor(HueBaseEntity, BinarySensorEntity): """Representation of a Hue Motion sensor.""" @@ -103,6 +104,7 @@ class HueMotionSensor(HueBaseEntity, BinarySensorEntity): return self.resource.motion.value +# pylint: disable-next=hass-enforce-class-module class HueEntertainmentActiveSensor(HueBaseEntity, BinarySensorEntity): """Representation of a Hue Entertainment Configuration as binary sensor.""" @@ -126,6 +128,7 @@ class HueEntertainmentActiveSensor(HueBaseEntity, BinarySensorEntity): return self.resource.metadata.name +# pylint: disable-next=hass-enforce-class-module class HueContactSensor(HueBaseEntity, BinarySensorEntity): """Representation of a Hue Contact sensor.""" @@ -147,6 +150,7 @@ class HueContactSensor(HueBaseEntity, BinarySensorEntity): return self.resource.contact_report.state != ContactState.CONTACT +# pylint: disable-next=hass-enforce-class-module class HueTamperSensor(HueBaseEntity, BinarySensorEntity): """Representation of a Hue Tamper sensor.""" diff --git a/homeassistant/components/hue/v2/entity.py b/homeassistant/components/hue/v2/entity.py index 6575d7f4702..e472009286d 100644 --- a/homeassistant/components/hue/v2/entity.py +++ b/homeassistant/components/hue/v2/entity.py @@ -34,7 +34,7 @@ RESOURCE_TYPE_NAMES = { } -class HueBaseEntity(Entity): +class HueBaseEntity(Entity): # pylint: disable=hass-enforce-class-module """Generic Entity Class for a Hue resource.""" _attr_should_poll = False diff --git a/homeassistant/components/hue/v2/group.py b/homeassistant/components/hue/v2/group.py index 34797b0e42c..c7f966ce9f2 100644 --- a/homeassistant/components/hue/v2/group.py +++ b/homeassistant/components/hue/v2/group.py @@ -12,7 +12,7 @@ from aiohue.v2.models.feature import DynamicStatus from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_FLASH, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er +from homeassistant.util import color as color_util from ..bridge import HueBridge from ..const import DOMAIN @@ -76,6 +77,7 @@ async def async_setup_entry( ) +# pylint: disable-next=hass-enforce-class-module class GroupedHueLight(HueBaseEntity, LightEntity): """Representation of a Grouped Hue light.""" @@ -156,7 +158,7 @@ class GroupedHueLight(HueBaseEntity, LightEntity): """Turn the grouped_light on.""" transition = normalize_hue_transition(kwargs.get(ATTR_TRANSITION)) xy_color = kwargs.get(ATTR_XY_COLOR) - color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP)) + color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP_KELVIN)) brightness = normalize_hue_brightness(kwargs.get(ATTR_BRIGHTNESS)) flash = kwargs.get(ATTR_FLASH) @@ -234,9 +236,21 @@ class GroupedHueLight(HueBaseEntity, LightEntity): if color_temp := light.color_temperature: lights_with_color_temp_support += 1 # we assume mired values from the first capable light - self._attr_color_temp = color_temp.mirek - self._attr_max_mireds = color_temp.mirek_schema.mirek_maximum - self._attr_min_mireds = color_temp.mirek_schema.mirek_minimum + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(color_temp.mirek) + if color_temp.mirek + else None + ) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_maximum + ) + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_minimum + ) + ) if color_temp.mirek is not None and color_temp.mirek_valid: lights_in_colortemp_mode += 1 if color := light.color: diff --git a/homeassistant/components/hue/v2/helpers.py b/homeassistant/components/hue/v2/helpers.py index 480296760e7..384d2a30596 100644 --- a/homeassistant/components/hue/v2/helpers.py +++ b/homeassistant/components/hue/v2/helpers.py @@ -2,6 +2,8 @@ from __future__ import annotations +from homeassistant.util import color as color_util + def normalize_hue_brightness(brightness: float | None) -> float | None: """Return calculated brightness values.""" @@ -21,10 +23,11 @@ def normalize_hue_transition(transition: float | None) -> float | None: return transition -def normalize_hue_colortemp(colortemp: int | None) -> int | None: +def normalize_hue_colortemp(colortemp_k: int | None) -> int | None: """Return color temperature within Hue's ranges.""" - if colortemp is not None: - # Hue only accepts a range between 153..500 - colortemp = min(colortemp, 500) - colortemp = max(colortemp, 153) - return colortemp + if colortemp_k is None: + return None + colortemp = color_util.color_temperature_kelvin_to_mired(colortemp_k) + # Hue only accepts a range between 153..500 + colortemp = min(colortemp, 500) + return max(colortemp, 153) diff --git a/homeassistant/components/hue/v2/light.py b/homeassistant/components/hue/v2/light.py index b908ec83877..86d8cc93e54 100644 --- a/homeassistant/components/hue/v2/light.py +++ b/homeassistant/components/hue/v2/light.py @@ -13,7 +13,7 @@ from aiohue.v2.models.light import Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_TRANSITION, @@ -28,6 +28,7 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from ..bridge import HueBridge from ..const import DOMAIN @@ -39,9 +40,9 @@ from .helpers import ( ) EFFECT_NONE = "None" -FALLBACK_MIN_MIREDS = 153 # 6500 K -FALLBACK_MAX_MIREDS = 500 # 2000 K -FALLBACK_MIREDS = 173 # halfway +FALLBACK_MIN_KELVIN = 6500 +FALLBACK_MAX_KELVIN = 2000 +FALLBACK_KELVIN = 5800 # halfway async def async_setup_entry( @@ -68,6 +69,7 @@ async def async_setup_entry( ) +# pylint: disable-next=hass-enforce-class-module class HueLight(HueBaseEntity, LightEntity): """Representation of a Hue light.""" @@ -163,28 +165,32 @@ class HueLight(HueBaseEntity, LightEntity): return None @property - def color_temp(self) -> int: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if color_temp := self.resource.color_temperature: - return color_temp.mirek + return color_util.color_temperature_mired_to_kelvin(color_temp.mirek) # return a fallback value to prevent issues with mired->kelvin conversions - return FALLBACK_MIREDS + return FALLBACK_KELVIN @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if color_temp := self.resource.color_temperature: - return color_temp.mirek_schema.mirek_minimum + return color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_minimum + ) # return a fallback value to prevent issues with mired->kelvin conversions - return FALLBACK_MIN_MIREDS + return FALLBACK_MAX_KELVIN @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if color_temp := self.resource.color_temperature: - return color_temp.mirek_schema.mirek_maximum + return color_util.color_temperature_mired_to_kelvin( + color_temp.mirek_schema.mirek_maximum + ) # return a fallback value to prevent issues with mired->kelvin conversions - return FALLBACK_MAX_MIREDS + return FALLBACK_MIN_KELVIN @property def extra_state_attributes(self) -> dict[str, str] | None: @@ -209,7 +215,7 @@ class HueLight(HueBaseEntity, LightEntity): """Turn the device on.""" transition = normalize_hue_transition(kwargs.get(ATTR_TRANSITION)) xy_color = kwargs.get(ATTR_XY_COLOR) - color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP)) + color_temp = normalize_hue_colortemp(kwargs.get(ATTR_COLOR_TEMP_KELVIN)) brightness = normalize_hue_brightness(kwargs.get(ATTR_BRIGHTNESS)) if self._last_brightness and brightness is None: # The Hue bridge sets the brightness to 1% when turning on a bulb @@ -226,7 +232,11 @@ class HueLight(HueBaseEntity, LightEntity): flash = kwargs.get(ATTR_FLASH) effect = effect_str = kwargs.get(ATTR_EFFECT) if effect_str in (EFFECT_NONE, EFFECT_NONE.lower()): - effect = EffectStatus.NO_EFFECT + # ignore effect if set to "None" and we have no effect active + # the special effect "None" is only used to stop an active effect + # but sending it while no effect is active can actually result in issues + # https://github.com/home-assistant/core/issues/122165 + effect = None if self.effect == EFFECT_NONE else EffectStatus.NO_EFFECT elif effect_str is not None: # work out if we got a regular effect or timed effect effect = EffectStatus(effect_str) diff --git a/homeassistant/components/hue/v2/sensor.py b/homeassistant/components/hue/v2/sensor.py index 6e90d3ca775..bdf1db6df2e 100644 --- a/homeassistant/components/hue/v2/sensor.py +++ b/homeassistant/components/hue/v2/sensor.py @@ -79,6 +79,7 @@ async def async_setup_entry( register_items(ctrl_base.zigbee_connectivity, HueZigbeeConnectivitySensor) +# pylint: disable-next=hass-enforce-class-module class HueSensorBase(HueBaseEntity, SensorEntity): """Representation of a Hue sensor.""" @@ -94,6 +95,7 @@ class HueSensorBase(HueBaseEntity, SensorEntity): self.controller = controller +# pylint: disable-next=hass-enforce-class-module class HueTemperatureSensor(HueSensorBase): """Representation of a Hue Temperature sensor.""" @@ -111,6 +113,7 @@ class HueTemperatureSensor(HueSensorBase): return round(self.resource.temperature.value, 1) +# pylint: disable-next=hass-enforce-class-module class HueLightLevelSensor(HueSensorBase): """Representation of a Hue LightLevel (illuminance) sensor.""" @@ -139,6 +142,7 @@ class HueLightLevelSensor(HueSensorBase): } +# pylint: disable-next=hass-enforce-class-module class HueBatterySensor(HueSensorBase): """Representation of a Hue Battery sensor.""" @@ -164,6 +168,7 @@ class HueBatterySensor(HueSensorBase): return {"battery_state": self.resource.power_state.battery_state.value} +# pylint: disable-next=hass-enforce-class-module class HueZigbeeConnectivitySensor(HueSensorBase): """Representation of a Hue ZigbeeConnectivity sensor.""" diff --git a/homeassistant/components/huisbaasje/__init__.py b/homeassistant/components/huisbaasje/__init__.py index 3e0c9845c92..f9703f67df5 100644 --- a/homeassistant/components/huisbaasje/__init__.py +++ b/homeassistant/components/huisbaasje/__init__.py @@ -54,6 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="sensor", update_method=async_update_data, update_interval=timedelta(seconds=POLLING_INTERVAL), diff --git a/homeassistant/components/humidifier/__init__.py b/homeassistant/components/humidifier/__init__.py index 37e2bd3e3ba..8c892dca327 100644 --- a/homeassistant/components/humidifier/__init__.py +++ b/homeassistant/components/humidifier/__init__.py @@ -4,10 +4,10 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import cached_property, partial import logging from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -18,22 +18,16 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_ON, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey from .const import ( # noqa: F401 - _DEPRECATED_DEVICE_CLASS_DEHUMIDIFIER, - _DEPRECATED_DEVICE_CLASS_HUMIDIFIER, - _DEPRECATED_SUPPORT_MODES, ATTR_ACTION, ATTR_AVAILABLE_MODES, ATTR_CURRENT_HUMIDITY, @@ -45,7 +39,13 @@ from .const import ( # noqa: F401 DOMAIN, MODE_AUTO, MODE_AWAY, + MODE_BABY, + MODE_BOOST, + MODE_COMFORT, + MODE_ECO, + MODE_HOME, MODE_NORMAL, + MODE_SLEEP, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, HumidifierAction, @@ -54,6 +54,7 @@ from .const import ( # noqa: F401 _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[HumidifierEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -87,7 +88,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up humidifier devices.""" - component = hass.data[DOMAIN] = EntityComponent[HumidifierEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[HumidifierEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -108,7 +109,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: vol.Coerce(int), vol.Range(min=0, max=100) ) }, - "async_set_humidity", + async_service_humidity_set, ) return True @@ -116,14 +117,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[HumidifierEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[HumidifierEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class HumidifierEntityDescription(ToggleEntityDescription, frozen_or_thawed=True): @@ -171,7 +170,7 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT ATTR_MAX_HUMIDITY: self.max_humidity, } - if HumidifierEntityFeature.MODES in self.supported_features_compat: + if HumidifierEntityFeature.MODES in self.supported_features: data[ATTR_AVAILABLE_MODES] = self.available_modes return data @@ -200,7 +199,7 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT if self.target_humidity is not None: data[ATTR_HUMIDITY] = self.target_humidity - if HumidifierEntityFeature.MODES in self.supported_features_compat: + if HumidifierEntityFeature.MODES in self.supported_features: data[ATTR_MODE] = self.mode return data @@ -267,25 +266,29 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT """Return the list of supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> HumidifierEntityFeature: - """Return the supported features as HumidifierEntityFeature. - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = HumidifierEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features +async def async_service_humidity_set( + entity: HumidifierEntity, service_call: ServiceCall +) -> None: + """Handle set humidity service.""" + humidity = service_call.data[ATTR_HUMIDITY] + min_humidity = entity.min_humidity + max_humidity = entity.max_humidity + _LOGGER.debug( + "Check valid humidity %d in range %d - %d", + humidity, + min_humidity, + max_humidity, + ) + if humidity < min_humidity or humidity > max_humidity: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="humidity_out_of_range", + translation_placeholders={ + "humidity": str(humidity), + "min_humidity": str(min_humidity), + "max_humidity": str(max_humidity), + }, + ) - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) + await entity.async_set_humidity(humidity) diff --git a/homeassistant/components/humidifier/const.py b/homeassistant/components/humidifier/const.py index fc6b0fc14d4..ceef0c5a890 100644 --- a/homeassistant/components/humidifier/const.py +++ b/homeassistant/components/humidifier/const.py @@ -1,15 +1,6 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum -from functools import partial - -from homeassistant.helpers.deprecation import ( - DeprecatedConstant, - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) MODE_NORMAL = "normal" MODE_ECO = "eco" @@ -43,34 +34,11 @@ DEFAULT_MAX_HUMIDITY = 100 DOMAIN = "humidifier" -# DEVICE_CLASS_* below are deprecated as of 2021.12 -# use the HumidifierDeviceClass enum instead. -_DEPRECATED_DEVICE_CLASS_HUMIDIFIER = DeprecatedConstant( - "humidifier", "HumidifierDeviceClass.HUMIDIFIER", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_DEHUMIDIFIER = DeprecatedConstant( - "dehumidifier", "HumidifierDeviceClass.DEHUMIDIFIER", "2025.1" -) - SERVICE_SET_MODE = "set_mode" SERVICE_SET_HUMIDITY = "set_humidity" class HumidifierEntityFeature(IntFlag): - """Supported features of the alarm control panel entity.""" + """Supported features of the humidifier entity.""" MODES = 1 - - -# The SUPPORT_MODES constant is deprecated as of Home Assistant 2022.5. -# Please use the HumidifierEntityFeature enum instead. -_DEPRECATED_SUPPORT_MODES = DeprecatedConstantEnum( - HumidifierEntityFeature.MODES, "2025.1" -) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/humidifier/icons.json b/homeassistant/components/humidifier/icons.json index 2c67f759195..15951df432d 100644 --- a/homeassistant/components/humidifier/icons.json +++ b/homeassistant/components/humidifier/icons.json @@ -33,10 +33,20 @@ } }, "services": { - "set_humidity": "mdi:water-percent", - "set_mode": "mdi:air-humidifier", - "toggle": "mdi:air-humidifier", - "turn_off": "mdi:air-humidifier-off", - "turn_on": "mdi:air-humidifier" + "set_humidity": { + "service": "mdi:water-percent" + }, + "set_mode": { + "service": "mdi:air-humidifier" + }, + "toggle": { + "service": "mdi:air-humidifier" + }, + "turn_off": { + "service": "mdi:air-humidifier-off" + }, + "turn_on": { + "service": "mdi:air-humidifier" + } } } diff --git a/homeassistant/components/humidifier/strings.json b/homeassistant/components/humidifier/strings.json index 0416f4a68a6..753368dc572 100644 --- a/homeassistant/components/humidifier/strings.json +++ b/homeassistant/components/humidifier/strings.json @@ -115,5 +115,10 @@ "name": "[%key:common::action::toggle%]", "description": "Toggles the humidifier on/off." } + }, + "exceptions": { + "humidity_out_of_range": { + "message": "Provided humidity {humidity} is not valid. Accepted range is {min_humidity} to {max_humidity}." + } } } diff --git a/homeassistant/components/hunterdouglas_powerview/__init__.py b/homeassistant/components/hunterdouglas_powerview/__init__.py index f8c7ac43b94..d9358db2753 100644 --- a/homeassistant/components/hunterdouglas_powerview/__init__.py +++ b/homeassistant/components/hunterdouglas_powerview/__init__.py @@ -1,9 +1,8 @@ """The Hunter Douglas PowerView integration.""" import logging +from typing import TYPE_CHECKING -from aiopvapi.helpers.aiorequest import AioRequest -from aiopvapi.hub import Hub from aiopvapi.resources.model import PowerviewData from aiopvapi.rooms import Rooms from aiopvapi.scenes import Scenes @@ -12,12 +11,13 @@ from aiopvapi.shades import Shades from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.entity_registry as er from .const import DOMAIN, HUB_EXCEPTIONS from .coordinator import PowerviewShadeUpdateCoordinator -from .model import PowerviewConfigEntry, PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewEntryData from .shade_data import PowerviewShadeData +from .util import async_connect_hub PARALLEL_UPDATES = 1 @@ -35,29 +35,23 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: """Set up Hunter Douglas PowerView from a config entry.""" - config = entry.data - - hub_address = config[CONF_HOST] - api_version = config.get(CONF_API_VERSION, None) + hub_address: str = config[CONF_HOST] + api_version: int | None = config.get(CONF_API_VERSION) _LOGGER.debug("Connecting %s at %s with v%s api", DOMAIN, hub_address, api_version) - websession = async_get_clientsession(hass) - - pv_request = AioRequest( - hub_address, loop=hass.loop, websession=websession, api_version=api_version - ) - # default 15 second timeout for each call in upstream try: - hub = Hub(pv_request) - await hub.query_firmware() - device_info = await async_get_device_info(hub) + api = await async_connect_hub(hass, hub_address, api_version) except HUB_EXCEPTIONS as err: raise ConfigEntryNotReady( f"Connection error to PowerView hub {hub_address}: {err}" ) from err + hub = api.hub + pv_request = api.pv_request + device_info = api.device_info + if hub.role != "Primary": # this should be caught in config_flow, but account for a hub changing roles # this will only happen manually by a user @@ -92,6 +86,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> new_data[CONF_API_VERSION] = hub.api_version hass.config_entries.async_update_entry(entry, data=new_data) + if entry.unique_id is None: + hass.config_entries.async_update_entry( + entry, unique_id=device_info.serial_number + ) + coordinator = PowerviewShadeUpdateCoordinator(hass, shades, hub) coordinator.async_set_updated_data(PowerviewShadeData()) # populate raw shade data into the coordinator for diagnostics @@ -111,18 +110,62 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> return True -async def async_get_device_info(hub: Hub) -> PowerviewDeviceInfo: - """Determine device info.""" - return PowerviewDeviceInfo( - name=hub.name, - mac_address=hub.mac_address, - serial_number=hub.serial_number, - firmware=hub.firmware, - model=hub.model, - hub_address=hub.ip, - ) - - async def async_unload_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: + """Migrate entry.""" + + _LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version) + + if entry.version == 1: + # 1 -> 2: Unique ID from integer to string + if entry.minor_version == 1: + if entry.unique_id is None: + await _async_add_missing_entry_unique_id(hass, entry) + await _migrate_unique_ids(hass, entry) + hass.config_entries.async_update_entry(entry, minor_version=2) + + _LOGGER.debug("Migrated to version %s.%s", entry.version, entry.minor_version) + + return True + + +async def _async_add_missing_entry_unique_id( + hass: HomeAssistant, entry: PowerviewConfigEntry +) -> None: + """Add the unique id if its missing.""" + address: str = entry.data[CONF_HOST] + api_version: int | None = entry.data.get(CONF_API_VERSION) + api = await async_connect_hub(hass, address, api_version) + hass.config_entries.async_update_entry( + entry, unique_id=api.device_info.serial_number + ) + + +async def _migrate_unique_ids(hass: HomeAssistant, entry: PowerviewConfigEntry) -> None: + """Migrate int based unique ids to str.""" + entity_registry = er.async_get(hass) + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + if TYPE_CHECKING: + assert entry.unique_id + for reg_entry in registry_entries: + if isinstance(reg_entry.unique_id, int) or ( + isinstance(reg_entry.unique_id, str) + and not reg_entry.unique_id.startswith(entry.unique_id) + ): + _LOGGER.debug( + "Migrating %s: %s to %s_%s", + reg_entry.entity_id, + reg_entry.unique_id, + entry.unique_id, + reg_entry.unique_id, + ) + entity_registry.async_update_entity( + reg_entry.entity_id, + new_unique_id=f"{entry.unique_id}_{reg_entry.unique_id}", + ) diff --git a/homeassistant/components/hunterdouglas_powerview/config_flow.py b/homeassistant/components/hunterdouglas_powerview/config_flow.py index 88ccf890c66..debb9710dbd 100644 --- a/homeassistant/components/hunterdouglas_powerview/config_flow.py +++ b/homeassistant/components/hunterdouglas_powerview/config_flow.py @@ -3,10 +3,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Self -from aiopvapi.helpers.aiorequest import AioRequest -from aiopvapi.hub import Hub import voluptuous as vol from homeassistant.components import dhcp, zeroconf @@ -14,16 +12,15 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import async_get_device_info from .const import DOMAIN, HUB_EXCEPTIONS +from .util import async_connect_hub _LOGGER = logging.getLogger(__name__) HAP_SUFFIX = "._hap._tcp.local." POWERVIEW_G2_SUFFIX = "._powerview._tcp.local." -POWERVIEW_G3_SUFFIX = "._powerview-g3._tcp.local." +POWERVIEW_G3_SUFFIX = "._PowerView-G3._tcp.local." async def validate_input(hass: HomeAssistant, hub_address: str) -> dict[str, str]: @@ -31,18 +28,9 @@ async def validate_input(hass: HomeAssistant, hub_address: str) -> dict[str, str Data has the keys from DATA_SCHEMA with values provided by the user. """ - - websession = async_get_clientsession(hass) - - pv_request = AioRequest(hub_address, loop=hass.loop, websession=websession) - - try: - hub = Hub(pv_request) - await hub.query_firmware() - device_info = await async_get_device_info(hub) - except HUB_EXCEPTIONS as err: - raise CannotConnect from err - + api = await async_connect_hub(hass, hub_address) + hub = api.hub + device_info = api.device_info if hub.role != "Primary": raise UnsupportedDevice( f"{hub.name} ({hub.hub_address}) is the {hub.role} Hub. " @@ -63,6 +51,7 @@ class PowerviewConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Hunter Douglas PowerView.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the powerview config flow.""" @@ -110,7 +99,7 @@ class PowerviewConfigFlow(ConfigFlow, domain=DOMAIN): try: info = await validate_input(self.hass, host) - except CannotConnect: + except HUB_EXCEPTIONS: return None, "cannot_connect" except UnsupportedDevice: return None, "unsupported_device" @@ -152,10 +141,8 @@ class PowerviewConfigFlow(ConfigFlow, domain=DOMAIN): # If we already have the host configured do # not open connections to it if we can avoid it. assert self.discovered_ip and self.discovered_name is not None - self.context[CONF_HOST] = self.discovered_ip - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == self.discovered_ip: - return self.async_abort(reason="already_in_progress") + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") self._async_abort_entries_match({CONF_HOST: self.discovered_ip}) info, error = await self._async_validate_or_error(self.discovered_ip) @@ -177,6 +164,10 @@ class PowerviewConfigFlow(ConfigFlow, domain=DOMAIN): } return await self.async_step_link() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow.discovered_ip == self.discovered_ip + async def async_step_link( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -197,9 +188,5 @@ class PowerviewConfigFlow(ConfigFlow, domain=DOMAIN): ) -class CannotConnect(HomeAssistantError): - """Error to indicate we cannot connect.""" - - class UnsupportedDevice(HomeAssistantError): """Error to indicate the device is not supported.""" diff --git a/homeassistant/components/hunterdouglas_powerview/cover.py b/homeassistant/components/hunterdouglas_powerview/cover.py index 6ee5fc92a41..197fb4e6223 100644 --- a/homeassistant/components/hunterdouglas_powerview/cover.py +++ b/homeassistant/components/hunterdouglas_powerview/cover.py @@ -595,7 +595,7 @@ class PowerViewShadeTDBUBottom(PowerViewShadeDualRailBase): ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_bottom" + self._attr_unique_id = f"{self._attr_unique_id}_bottom" @callback def _clamp_cover_limit(self, target_hass_position: int) -> int: @@ -632,7 +632,7 @@ class PowerViewShadeTDBUTop(PowerViewShadeDualRailBase): ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_top" + self._attr_unique_id = f"{self._attr_unique_id}_top" @property def should_poll(self) -> bool: @@ -740,7 +740,7 @@ class PowerViewShadeDualOverlappedCombined(PowerViewShadeDualOverlappedBase): ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_combined" + self._attr_unique_id = f"{self._attr_unique_id}_combined" @property def is_closed(self) -> bool: @@ -806,7 +806,7 @@ class PowerViewShadeDualOverlappedFront(PowerViewShadeDualOverlappedBase): ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_front" + self._attr_unique_id = f"{self._attr_unique_id}_front" @property def should_poll(self) -> bool: @@ -862,7 +862,7 @@ class PowerViewShadeDualOverlappedRear(PowerViewShadeDualOverlappedBase): ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_rear" + self._attr_unique_id = f"{self._attr_unique_id}_rear" @property def should_poll(self) -> bool: diff --git a/homeassistant/components/hunterdouglas_powerview/entity.py b/homeassistant/components/hunterdouglas_powerview/entity.py index 424d314c4b9..ba572ecefce 100644 --- a/homeassistant/components/hunterdouglas_powerview/entity.py +++ b/homeassistant/components/hunterdouglas_powerview/entity.py @@ -26,12 +26,12 @@ class HDEntity(CoordinatorEntity[PowerviewShadeUpdateCoordinator]): coordinator: PowerviewShadeUpdateCoordinator, device_info: PowerviewDeviceInfo, room_name: str, - unique_id: str, + powerview_id: str, ) -> None: """Initialize the entity.""" super().__init__(coordinator) self._room_name = room_name - self._attr_unique_id = unique_id + self._attr_unique_id = f"{device_info.serial_number}_{powerview_id}" self._device_info = device_info self._configuration_url = self.coordinator.hub.url diff --git a/homeassistant/components/hunterdouglas_powerview/manifest.json b/homeassistant/components/hunterdouglas_powerview/manifest.json index 4120c55a7a7..a80708d9a3f 100644 --- a/homeassistant/components/hunterdouglas_powerview/manifest.json +++ b/homeassistant/components/hunterdouglas_powerview/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_polling", "loggers": ["aiopvapi"], "requirements": ["aiopvapi==3.1.1"], - "zeroconf": ["_powerview._tcp.local.", "_powerview-g3._tcp.local."] + "zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."] } diff --git a/homeassistant/components/hunterdouglas_powerview/model.py b/homeassistant/components/hunterdouglas_powerview/model.py index 86296b949f4..407de86368f 100644 --- a/homeassistant/components/hunterdouglas_powerview/model.py +++ b/homeassistant/components/hunterdouglas_powerview/model.py @@ -3,20 +3,23 @@ from __future__ import annotations from dataclasses import dataclass +from typing import TYPE_CHECKING from aiopvapi.helpers.aiorequest import AioRequest +from aiopvapi.hub import Hub from aiopvapi.resources.room import Room from aiopvapi.resources.scene import Scene from aiopvapi.resources.shade import BaseShade from homeassistant.config_entries import ConfigEntry -from .coordinator import PowerviewShadeUpdateCoordinator +if TYPE_CHECKING: + from .coordinator import PowerviewShadeUpdateCoordinator type PowerviewConfigEntry = ConfigEntry[PowerviewEntryData] -@dataclass +@dataclass(slots=True) class PowerviewEntryData: """Define class for main domain information.""" @@ -28,7 +31,7 @@ class PowerviewEntryData: device_info: PowerviewDeviceInfo -@dataclass +@dataclass(slots=True) class PowerviewDeviceInfo: """Define class for device information.""" @@ -38,3 +41,12 @@ class PowerviewDeviceInfo: firmware: str | None model: str hub_address: str + + +@dataclass(slots=True) +class PowerviewAPI: + """Define class to hold the Powerview Hub API data.""" + + hub: Hub + pv_request: AioRequest + device_info: PowerviewDeviceInfo diff --git a/homeassistant/components/hunterdouglas_powerview/number.py b/homeassistant/components/hunterdouglas_powerview/number.py index f893b04b2d1..fb8c9f76d79 100644 --- a/homeassistant/components/hunterdouglas_powerview/number.py +++ b/homeassistant/components/hunterdouglas_powerview/number.py @@ -95,7 +95,7 @@ class PowerViewNumber(ShadeEntity, RestoreNumber): self.entity_description = description self._attr_unique_id = f"{self._attr_unique_id}_{description.key}" - def set_native_value(self, value: float) -> None: + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" self._attr_native_value = value self.entity_description.store_value_fn(self.coordinator, self._shade.id, value) diff --git a/homeassistant/components/hunterdouglas_powerview/util.py b/homeassistant/components/hunterdouglas_powerview/util.py index 1d670f46429..360bd7f722b 100644 --- a/homeassistant/components/hunterdouglas_powerview/util.py +++ b/homeassistant/components/hunterdouglas_powerview/util.py @@ -5,12 +5,38 @@ from __future__ import annotations from collections.abc import Iterable from typing import Any +from aiopvapi.helpers.aiorequest import AioRequest from aiopvapi.helpers.constants import ATTR_ID +from aiopvapi.hub import Hub -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .model import PowerviewAPI, PowerviewDeviceInfo @callback def async_map_data_by_id(data: Iterable[dict[str | int, Any]]): """Return a dict with the key being the id for a list of entries.""" return {entry[ATTR_ID]: entry for entry in data} + + +async def async_connect_hub( + hass: HomeAssistant, address: str, api_version: int | None = None +) -> PowerviewAPI: + """Create the hub and fetch the device info address.""" + websession = async_get_clientsession(hass) + pv_request = AioRequest( + address, loop=hass.loop, websession=websession, api_version=api_version + ) + hub = Hub(pv_request) + await hub.query_firmware() + info = PowerviewDeviceInfo( + name=hub.name, + mac_address=hub.mac_address, + serial_number=hub.serial_number, + firmware=hub.firmware, + model=hub.model, + hub_address=hub.ip, + ) + return PowerviewAPI(hub, pv_request, info) diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index 326a9a010ef..da7965250cd 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -9,9 +9,16 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow +from homeassistant.helpers import ( + aiohttp_client, + config_entry_oauth2_flow, + device_registry as dr, + entity_registry as er, +) +from homeassistant.util import dt as dt_util from . import api +from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -19,6 +26,7 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.BUTTON, + Platform.CALENDAR, Platform.DEVICE_TRACKER, Platform.LAWN_MOWER, Platform.NUMBER, @@ -42,7 +50,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> aiohttp_client.async_get_clientsession(hass), session, ) - automower_api = AutomowerSession(api_api) + time_zone_str = str(dt_util.DEFAULT_TIME_ZONE) + automower_api = AutomowerSession( + api_api, + await dt_util.async_get_time_zone(time_zone_str), + ) try: await api_api.async_get_access_token() except ClientResponseError as err: @@ -50,8 +62,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> raise ConfigEntryAuthFailed from err raise ConfigEntryNotReady from err - coordinator = AutomowerDataUpdateCoordinator(hass, automower_api, entry) + if "amc:api" not in entry.data["token"]["scope"]: + # We raise ConfigEntryAuthFailed here because the websocket can't be used + # without the scope. So only polling would be possible. + raise ConfigEntryAuthFailed + + coordinator = AutomowerDataUpdateCoordinator(hass, automower_api) await coordinator.async_config_entry_first_refresh() + available_devices = list(coordinator.data) + cleanup_removed_devices(hass, coordinator.config_entry, available_devices) entry.runtime_data = coordinator entry.async_create_background_task( @@ -60,11 +79,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> "websocket_task", ) - if "amc:api" not in entry.data["token"]["scope"]: - # We raise ConfigEntryAuthFailed here because the websocket can't be used - # without the scope. So only polling would be possible. - raise ConfigEntryAuthFailed - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -72,3 +86,36 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> async def async_unload_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> bool: """Handle unload of an entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +def cleanup_removed_devices( + hass: HomeAssistant, + config_entry: AutomowerConfigEntry, + available_devices: list[str], +) -> None: + """Cleanup entity and device registry from removed devices.""" + device_reg = dr.async_get(hass) + identifiers = {(DOMAIN, mower_id) for mower_id in available_devices} + for device in dr.async_entries_for_config_entry(device_reg, config_entry.entry_id): + if not set(device.identifiers) & identifiers: + _LOGGER.debug("Removing obsolete device entry %s", device.name) + device_reg.async_update_device( + device.id, remove_config_entry_id=config_entry.entry_id + ) + + +def remove_work_area_entities( + hass: HomeAssistant, + config_entry: AutomowerConfigEntry, + removed_work_areas: set[int], + mower_id: str, +) -> None: + """Remove all unused work area entities for the specified mower.""" + entity_reg = er.async_get(hass) + for entity_entry in er.async_entries_for_config_entry( + entity_reg, config_entry.entry_id + ): + for work_area_id in removed_work_areas: + if entity_entry.unique_id.startswith(f"{mower_id}_{work_area_id}_"): + _LOGGER.info("Deleting: %s", entity_entry.entity_id) + entity_reg.async_remove(entity_entry.entity_id) diff --git a/homeassistant/components/husqvarna_automower/api.py b/homeassistant/components/husqvarna_automower/api.py index f1d3e1ef4fa..8a9a31b926a 100644 --- a/homeassistant/components/husqvarna_automower/api.py +++ b/homeassistant/components/husqvarna_automower/api.py @@ -7,6 +7,7 @@ from aioautomower.auth import AbstractAuth from aioautomower.const import API_BASE_URL from aiohttp import ClientSession +from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers import config_entry_oauth2_flow _LOGGER = logging.getLogger(__name__) @@ -28,3 +29,16 @@ class AsyncConfigEntryAuth(AbstractAuth): """Return a valid access token.""" await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) + + +class AsyncConfigFlowAuth(AbstractAuth): + """Provide Automower AbstractAuth for the config flow.""" + + def __init__(self, websession: ClientSession, token: dict) -> None: + """Initialize Husqvarna Automower auth.""" + super().__init__(websession, API_BASE_URL) + self.token: dict = token + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + return cast(str, self.token[CONF_ACCESS_TOKEN]) diff --git a/homeassistant/components/husqvarna_automower/binary_sensor.py b/homeassistant/components/husqvarna_automower/binary_sensor.py index 922f7deb99b..3c23da76797 100644 --- a/homeassistant/components/husqvarna_automower/binary_sensor.py +++ b/homeassistant/components/husqvarna_automower/binary_sensor.py @@ -3,22 +3,42 @@ from collections.abc import Callable from dataclasses import dataclass import logging +from typing import TYPE_CHECKING from aioautomower.model import MowerActivities, MowerAttributes +from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import AutomowerConfigEntry +from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerBaseEntity _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: + """Get list of related automations and scripts.""" + used_in = automations_with_entity(hass, entity_id) + used_in += scripts_with_entity(hass, entity_id) + return used_in @dataclass(frozen=True, kw_only=True) @@ -28,7 +48,7 @@ class AutomowerBinarySensorEntityDescription(BinarySensorEntityDescription): value_fn: Callable[[MowerAttributes], bool] -BINARY_SENSOR_TYPES: tuple[AutomowerBinarySensorEntityDescription, ...] = ( +MOWER_BINARY_SENSOR_TYPES: tuple[AutomowerBinarySensorEntityDescription, ...] = ( AutomowerBinarySensorEntityDescription( key="battery_charging", value_fn=lambda data: data.mower.activity == MowerActivities.CHARGING, @@ -43,6 +63,7 @@ BINARY_SENSOR_TYPES: tuple[AutomowerBinarySensorEntityDescription, ...] = ( key="returning_to_dock", translation_key="returning_to_dock", value_fn=lambda data: data.mower.activity == MowerActivities.GOING_HOME, + entity_registry_enabled_default=False, ), ) @@ -57,7 +78,7 @@ async def async_setup_entry( async_add_entities( AutomowerBinarySensorEntity(mower_id, coordinator, description) for mower_id in coordinator.data - for description in BINARY_SENSOR_TYPES + for description in MOWER_BINARY_SENSOR_TYPES ) @@ -81,3 +102,39 @@ class AutomowerBinarySensorEntity(AutomowerBaseEntity, BinarySensorEntity): def is_on(self) -> bool: """Return the state of the binary sensor.""" return self.entity_description.value_fn(self.mower_attributes) + + async def async_added_to_hass(self) -> None: + """Raise issue when entity is registered and was not disabled.""" + if TYPE_CHECKING: + assert self.unique_id + if not ( + entity_id := er.async_get(self.hass).async_get_entity_id( + BINARY_SENSOR_DOMAIN, DOMAIN, self.unique_id + ) + ): + return + if ( + self.enabled + and self.entity_description.key == "returning_to_dock" + and entity_used_in(self.hass, entity_id) + ): + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_entity_{self.entity_description.key}", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_entity", + translation_placeholders={ + "entity_name": str(self.name), + "entity": entity_id, + }, + ) + else: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_task_entity_{self.entity_description.key}", + ) + await super().async_added_to_hass() diff --git a/homeassistant/components/husqvarna_automower/button.py b/homeassistant/components/husqvarna_automower/button.py index 810dd4df92d..ce303325496 100644 --- a/homeassistant/components/husqvarna_automower/button.py +++ b/homeassistant/components/husqvarna_automower/button.py @@ -1,21 +1,55 @@ -"""Creates a button entity for Husqvarna Automower integration.""" +"""Creates button entities for the Husqvarna Automower integration.""" +from collections.abc import Awaitable, Callable +from dataclasses import dataclass import logging +from typing import Any -from aioautomower.exceptions import ApiException +from aioautomower.model import MowerAttributes +from aioautomower.session import AutomowerSession -from homeassistant.components.button import ButtonEntity +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AutomowerConfigEntry -from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator -from .entity import AutomowerAvailableEntity +from .entity import ( + AutomowerAvailableEntity, + _check_error_free, + handle_sending_exception, +) _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 1 + + +@dataclass(frozen=True, kw_only=True) +class AutomowerButtonEntityDescription(ButtonEntityDescription): + """Describes Automower button entities.""" + + available_fn: Callable[[MowerAttributes], bool] = lambda _: True + exists_fn: Callable[[MowerAttributes], bool] = lambda _: True + press_fn: Callable[[AutomowerSession, str], Awaitable[Any]] + + +MOWER_BUTTON_TYPES: tuple[AutomowerButtonEntityDescription, ...] = ( + AutomowerButtonEntityDescription( + key="confirm_error", + translation_key="confirm_error", + available_fn=lambda data: data.mower.is_error_confirmable, + exists_fn=lambda data: data.capabilities.can_confirm_error, + press_fn=lambda session, mower_id: session.commands.error_confirm(mower_id), + ), + AutomowerButtonEntityDescription( + key="sync_clock", + translation_key="sync_clock", + available_fn=_check_error_free, + press_fn=lambda session, mower_id: session.commands.set_datetime(mower_id), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -25,38 +59,35 @@ async def async_setup_entry( """Set up button platform.""" coordinator = entry.runtime_data async_add_entities( - AutomowerButtonEntity(mower_id, coordinator) + AutomowerButtonEntity(mower_id, coordinator, description) for mower_id in coordinator.data - if coordinator.data[mower_id].capabilities.can_confirm_error + for description in MOWER_BUTTON_TYPES + if description.exists_fn(coordinator.data[mower_id]) ) class AutomowerButtonEntity(AutomowerAvailableEntity, ButtonEntity): """Defining the AutomowerButtonEntity.""" - _attr_translation_key = "confirm_error" + entity_description: AutomowerButtonEntityDescription def __init__( self, mower_id: str, coordinator: AutomowerDataUpdateCoordinator, + description: AutomowerButtonEntityDescription, ) -> None: - """Set up button platform.""" + """Set up AutomowerButtonEntity.""" super().__init__(mower_id, coordinator) - self._attr_unique_id = f"{mower_id}_confirm_error" + self.entity_description = description + self._attr_unique_id = f"{mower_id}_{description.key}" @property def available(self) -> bool: - """Return True if the device and entity is available.""" - return super().available and self.mower_attributes.mower.is_error_confirmable + """Return the available attribute of the entity.""" + return self.entity_description.available_fn(self.mower_attributes) + @handle_sending_exception() async def async_press(self) -> None: - """Handle the button press.""" - try: - await self.coordinator.api.commands.error_confirm(self.mower_id) - except ApiException as exception: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="command_send_failed", - translation_placeholders={"exception": str(exception)}, - ) from exception + """Send a command to the mower.""" + await self.entity_description.press_fn(self.coordinator.api, self.mower_id) diff --git a/homeassistant/components/husqvarna_automower/calendar.py b/homeassistant/components/husqvarna_automower/calendar.py new file mode 100644 index 00000000000..f3e82fde5d4 --- /dev/null +++ b/homeassistant/components/husqvarna_automower/calendar.py @@ -0,0 +1,97 @@ +"""Creates a calendar entity for the mower.""" + +from datetime import datetime +import logging + +from aioautomower.model import make_name_string + +from homeassistant.components.calendar import CalendarEntity, CalendarEvent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util + +from . import AutomowerConfigEntry +from .coordinator import AutomowerDataUpdateCoordinator +from .entity import AutomowerBaseEntity + +_LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AutomowerConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up lawn mower platform.""" + coordinator = entry.runtime_data + async_add_entities( + AutomowerCalendarEntity(mower_id, coordinator) for mower_id in coordinator.data + ) + + +class AutomowerCalendarEntity(AutomowerBaseEntity, CalendarEntity): + """Representation of the Automower Calendar element.""" + + _attr_name: str | None = None + + def __init__( + self, + mower_id: str, + coordinator: AutomowerDataUpdateCoordinator, + ) -> None: + """Set up AutomowerCalendarEntity.""" + super().__init__(mower_id, coordinator) + self._attr_unique_id = mower_id + self._event: CalendarEvent | None = None + + @property + def event(self) -> CalendarEvent | None: + """Return the current or next upcoming event.""" + schedule = self.mower_attributes.calendar + cursor = schedule.timeline.active_after(dt_util.now()) + program_event = next(cursor, None) + _LOGGER.debug("program_event %s", program_event) + if not program_event: + return None + work_area_name = None + if self.mower_attributes.work_area_dict and program_event.work_area_id: + work_area_name = self.mower_attributes.work_area_dict[ + program_event.work_area_id + ] + return CalendarEvent( + summary=make_name_string(work_area_name, program_event.schedule_no), + start=program_event.start, + end=program_event.end, + rrule=program_event.rrule_str, + ) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range. + + This is only called when opening the calendar in the UI. + """ + schedule = self.mower_attributes.calendar + cursor = schedule.timeline.overlapping( + start_date, + end_date, + ) + calendar_events = [] + for program_event in cursor: + work_area_name = None + if self.mower_attributes.work_area_dict and program_event.work_area_id: + work_area_name = self.mower_attributes.work_area_dict[ + program_event.work_area_id + ] + calendar_events.append( + CalendarEvent( + summary=make_name_string(work_area_name, program_event.schedule_no), + start=program_event.start.replace(tzinfo=start_date.tzinfo), + end=program_event.end.replace(tzinfo=start_date.tzinfo), + rrule=program_event.rrule_str, + ) + ) + return calendar_events diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index c848f823b13..7efed529453 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -4,12 +4,15 @@ from collections.abc import Mapping import logging from typing import Any +from aioautomower.session import AutomowerSession from aioautomower.utils import structure_token -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN -from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, CONF_TOKEN +from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow +from homeassistant.util import dt as dt_util +from .api import AsyncConfigFlowAuth from .const import DOMAIN, NAME _LOGGER = logging.getLogger(__name__) @@ -26,27 +29,40 @@ class HusqvarnaConfigFlowHandler( VERSION = 1 DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow.""" token = data[CONF_TOKEN] - if "amc:api" not in token["scope"] and not self.reauth_entry: + if "amc:api" not in token["scope"] and self.source != SOURCE_REAUTH: return self.async_abort(reason="missing_amc_scope") user_id = token[CONF_USER_ID] - if self.reauth_entry: + await self.async_set_unique_id(user_id) + + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() if "amc:api" not in token["scope"]: return self.async_update_reload_and_abort( - self.reauth_entry, data=data, reason="missing_amc_scope" + reauth_entry, data=data, reason="missing_amc_scope" ) - if self.reauth_entry.unique_id != user_id: - return self.async_abort(reason="wrong_account") - return self.async_update_reload_and_abort(self.reauth_entry, data=data) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort(reauth_entry, data=data) + + self._abort_if_unique_id_configured() + + websession = aiohttp_client.async_get_clientsession(self.hass) + tz = await dt_util.async_get_time_zone(str(dt_util.DEFAULT_TIME_ZONE)) + automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) + try: + status_data = await automower_api.get_status() + except Exception: # noqa: BLE001 + return self.async_abort(reason="unknown") + if status_data == {}: + return self.async_abort(reason="no_mower_connected") + structured_token = structure_token(token[CONF_ACCESS_TOKEN]) first_name = structured_token.user.first_name last_name = structured_token.user.last_name - await self.async_set_unique_id(user_id) - self._abort_if_unique_id_configured() + return self.async_create_entry( title=f"{NAME} of {first_name} {last_name}", data=data, @@ -61,12 +77,8 @@ class HusqvarnaConfigFlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - if self.reauth_entry is not None: - if "amc:api" not in self.reauth_entry.data["token"]["scope"]: - return await self.async_step_missing_scope() + if "amc:api" not in entry_data["token"]["scope"]: + return await self.async_step_missing_scope() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -74,16 +86,19 @@ class HusqvarnaConfigFlowHandler( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - return self.async_show_form(step_id="reauth_confirm") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={CONF_NAME: self._get_reauth_entry().title}, + ) return await self.async_step_user() async def async_step_missing_scope( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm reauth for missing scope.""" - if user_input is None and self.reauth_entry is not None: + if user_input is None and self.source == SOURCE_REAUTH: token_structured = structure_token( - self.reauth_entry.data["token"]["access_token"] + self._get_reauth_entry().data["token"]["access_token"] ) return self.async_show_form( step_id="missing_scope", diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index 817789727ca..57be02e7066 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -1,35 +1,42 @@ """Data UpdateCoordinator for the Husqvarna Automower integration.""" +from __future__ import annotations + import asyncio from datetime import timedelta import logging +from typing import TYPE_CHECKING from aioautomower.exceptions import ( ApiException, AuthException, HusqvarnaWSServerHandshakeError, + TimeoutException, ) from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN +if TYPE_CHECKING: + from . import AutomowerConfigEntry + _LOGGER = logging.getLogger(__name__) MAX_WS_RECONNECT_TIME = 600 SCAN_INTERVAL = timedelta(minutes=8) +DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]): """Class to manage fetching Husqvarna data.""" - def __init__( - self, hass: HomeAssistant, api: AutomowerSession, entry: ConfigEntry - ) -> None: + config_entry: AutomowerConfigEntry + + def __init__(self, hass: HomeAssistant, api: AutomowerSession) -> None: """Initialize data updater.""" super().__init__( hass, @@ -38,8 +45,8 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib update_interval=SCAN_INTERVAL, ) self.api = api - self.ws_connected: bool = False + self.reconnect_time = DEFAULT_RECONNECT_TIME async def _async_update_data(self) -> dict[str, MowerAttributes]: """Subscribe for websocket and poll data from the API.""" @@ -62,26 +69,30 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib async def client_listen( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: AutomowerConfigEntry, automower_client: AutomowerSession, - reconnect_time: int = 2, ) -> None: """Listen with the client.""" try: await automower_client.auth.websocket_connect() - reconnect_time = 2 + # Reset reconnect time after successful connection + self.reconnect_time = DEFAULT_RECONNECT_TIME await automower_client.start_listening() except HusqvarnaWSServerHandshakeError as err: _LOGGER.debug( - "Failed to connect to websocket. Trying to reconnect: %s", err + "Failed to connect to websocket. Trying to reconnect: %s", + err, + ) + except TimeoutException as err: + _LOGGER.debug( + "Failed to listen to websocket. Trying to reconnect: %s", + err, ) - if not hass.is_stopping: - await asyncio.sleep(reconnect_time) - reconnect_time = min(reconnect_time * 2, MAX_WS_RECONNECT_TIME) - await self.client_listen( - hass=hass, - entry=entry, - automower_client=automower_client, - reconnect_time=reconnect_time, + await asyncio.sleep(self.reconnect_time) + self.reconnect_time = min(self.reconnect_time * 2, MAX_WS_RECONNECT_TIME) + entry.async_create_background_task( + hass, + self.client_listen(hass, entry, automower_client), + "reconnect_task", ) diff --git a/homeassistant/components/husqvarna_automower/device_tracker.py b/homeassistant/components/husqvarna_automower/device_tracker.py index 74ad624a515..520eaceb1d0 100644 --- a/homeassistant/components/husqvarna_automower/device_tracker.py +++ b/homeassistant/components/husqvarna_automower/device_tracker.py @@ -1,8 +1,6 @@ """Creates the device tracker entity for the mower.""" -from typing import TYPE_CHECKING - -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -10,6 +8,9 @@ from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerBaseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -39,21 +40,12 @@ class AutomowerDeviceTrackerEntity(AutomowerBaseEntity, TrackerEntity): super().__init__(mower_id, coordinator) self._attr_unique_id = mower_id - @property - def source_type(self) -> SourceType: - """Return the source type of the device.""" - return SourceType.GPS - @property def latitude(self) -> float: """Return latitude value of the device.""" - if TYPE_CHECKING: - assert self.mower_attributes.positions is not None return self.mower_attributes.positions[0].latitude @property def longitude(self) -> float: """Return longitude value of the device.""" - if TYPE_CHECKING: - assert self.mower_attributes.positions is not None return self.mower_attributes.positions[0].longitude diff --git a/homeassistant/components/husqvarna_automower/diagnostics.py b/homeassistant/components/husqvarna_automower/diagnostics.py index 658f6f94445..ceeec0f3e0d 100644 --- a/homeassistant/components/husqvarna_automower/diagnostics.py +++ b/homeassistant/components/husqvarna_automower/diagnostics.py @@ -6,7 +6,6 @@ import logging from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry @@ -26,7 +25,7 @@ _LOGGER = logging.getLogger(__name__) async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: AutomowerConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" return async_redact_data(entry.as_dict(), TO_REDACT) diff --git a/homeassistant/components/husqvarna_automower/entity.py b/homeassistant/components/husqvarna_automower/entity.py index 1da49322989..5b5156e5f1d 100644 --- a/homeassistant/components/husqvarna_automower/entity.py +++ b/homeassistant/components/husqvarna_automower/entity.py @@ -1,14 +1,17 @@ """Platform for Husqvarna Automower base entity.""" +from __future__ import annotations + import asyncio -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Callable, Coroutine import functools import logging -from typing import Any +from typing import TYPE_CHECKING, Any, Concatenate from aioautomower.exceptions import ApiException -from aioautomower.model import MowerActivities, MowerAttributes, MowerStates +from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -34,18 +37,34 @@ ERROR_STATES = [ ] -def handle_sending_exception( +@callback +def _check_error_free(mower_attributes: MowerAttributes) -> bool: + """Check if the mower has any errors.""" + return ( + mower_attributes.mower.state not in ERROR_STATES + or mower_attributes.mower.activity not in ERROR_ACTIVITIES + ) + + +@callback +def _work_area_translation_key(work_area_id: int, key: str) -> str: + """Return the translation key.""" + if work_area_id == 0: + return f"my_lawn_{key}" + return f"work_area_{key}" + + +type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]] + + +def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P]( poll_after_sending: bool = False, -) -> Callable[ - [Callable[..., Awaitable[Any]]], Callable[..., Coroutine[Any, Any, None]] -]: +) -> Callable[[_FuncType[_Entity, _P, Any]], _FuncType[_Entity, _P, None]]: """Handle exceptions while sending a command and optionally refresh coordinator.""" - def decorator( - func: Callable[..., Awaitable[Any]], - ) -> Callable[..., Coroutine[Any, Any, None]]: + def decorator(func: _FuncType[_Entity, _P, Any]) -> _FuncType[_Entity, _P, None]: @functools.wraps(func) - async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None: try: await func(self, *args, **kwargs) except ApiException as exception: @@ -82,7 +101,9 @@ class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]): self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, mower_id)}, manufacturer="Husqvarna", - model=self.mower_attributes.system.model, + model=self.mower_attributes.system.model.removeprefix( + "HUSQVARNA " + ).removeprefix("Husqvarna "), name=self.mower_attributes.system.name, serial_number=self.mower_attributes.system.serial_number, suggested_area="Garden", @@ -109,7 +130,39 @@ class AutomowerControlEntity(AutomowerAvailableEntity): @property def available(self) -> bool: """Return True if the device is available.""" - return super().available and ( - self.mower_attributes.mower.state not in ERROR_STATES - or self.mower_attributes.mower.activity not in ERROR_ACTIVITIES - ) + return super().available and _check_error_free(self.mower_attributes) + + +class WorkAreaAvailableEntity(AutomowerAvailableEntity): + """Base entity for work areas.""" + + def __init__( + self, + mower_id: str, + coordinator: AutomowerDataUpdateCoordinator, + work_area_id: int, + ) -> None: + """Initialize AutomowerEntity.""" + super().__init__(mower_id, coordinator) + self.work_area_id = work_area_id + + @property + def work_areas(self) -> dict[int, WorkArea]: + """Get the work areas from the mower attributes.""" + if TYPE_CHECKING: + assert self.mower_attributes.work_areas is not None + return self.mower_attributes.work_areas + + @property + def work_area_attributes(self) -> WorkArea: + """Get the work area attributes of the current work area.""" + return self.work_areas[self.work_area_id] + + @property + def available(self) -> bool: + """Return True if the work area is available and the mower has no errors.""" + return super().available and self.work_area_id in self.work_areas + + +class WorkAreaControlEntity(WorkAreaAvailableEntity, AutomowerControlEntity): + """Base entity for work areas with control function.""" diff --git a/homeassistant/components/husqvarna_automower/icons.json b/homeassistant/components/husqvarna_automower/icons.json index 9dc1cbeb667..14ac5ce4068 100644 --- a/homeassistant/components/husqvarna_automower/icons.json +++ b/homeassistant/components/husqvarna_automower/icons.json @@ -8,6 +8,11 @@ "default": "mdi:debug-step-into" } }, + "button": { + "sync_clock": { + "default": "mdi:clock-check-outline" + } + }, "number": { "cutting_height": { "default": "mdi:grass" @@ -22,6 +27,12 @@ "error": { "default": "mdi:alert-circle-outline" }, + "my_lawn_last_time_completed": { + "default": "mdi:clock-outline" + }, + "my_lawn_progress": { + "default": "mdi:collage" + }, "number_of_charging_cycles": { "default": "mdi:battery-sync-outline" }, @@ -30,11 +41,21 @@ }, "restricted_reason": { "default": "mdi:tooltip-question" + }, + "work_area_last_time_completed": { + "default": "mdi:clock-outline" + }, + "work_area_progress": { + "default": "mdi:collage" } } }, "services": { - "override_schedule": "mdi:debug-step-over", - "override_schedule_work_area": "mdi:land-fields" + "override_schedule": { + "service": "mdi:debug-step-over" + }, + "override_schedule_work_area": { + "service": "mdi:land-fields" + } } } diff --git a/homeassistant/components/husqvarna_automower/lawn_mower.py b/homeassistant/components/husqvarna_automower/lawn_mower.py index ac0f1fd6af2..9b3ce7dab1a 100644 --- a/homeassistant/components/husqvarna_automower/lawn_mower.py +++ b/homeassistant/components/husqvarna_automower/lawn_mower.py @@ -22,11 +22,14 @@ from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerAvailableEntity, handle_sending_exception +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 1 + DOCKED_ACTIVITIES = (MowerActivities.PARKED_IN_CS, MowerActivities.CHARGING) MOWING_ACTIVITIES = ( MowerActivities.MOWING, MowerActivities.LEAVING, - MowerActivities.GOING_HOME, ) PAUSED_STATES = [ MowerStates.PAUSED, @@ -43,9 +46,6 @@ PARK = "park" OVERRIDE_MODES = [MOW, PARK] -_LOGGER = logging.getLogger(__name__) - - async def async_setup_entry( hass: HomeAssistant, entry: AutomowerConfigEntry, @@ -107,6 +107,8 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity): return LawnMowerActivity.PAUSED if mower_attributes.mower.activity in MOWING_ACTIVITIES: return LawnMowerActivity.MOWING + if mower_attributes.mower.activity == MowerActivities.GOING_HOME: + return LawnMowerActivity.RETURNING if (mower_attributes.mower.state == "RESTRICTED") or ( mower_attributes.mower.activity in DOCKED_ACTIVITIES ): diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 7326408e403..02e87a3a772 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.8.0"] + "quality_scale": "silver", + "requirements": ["aioautomower==2024.12.0"] } diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py index 540f6aa712e..e69b52fab93 100644 --- a/homeassistant/components/husqvarna_automower/number.py +++ b/homeassistant/components/husqvarna_automower/number.py @@ -9,17 +9,23 @@ from aioautomower.model import MowerAttributes, WorkArea from aioautomower.session import AutomowerSession from homeassistant.components.number import NumberEntity, NumberEntityDescription -from homeassistant.const import PERCENTAGE, EntityCategory, Platform +from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AutomowerConfigEntry +from . import AutomowerConfigEntry, remove_work_area_entities from .coordinator import AutomowerDataUpdateCoordinator -from .entity import AutomowerControlEntity, handle_sending_exception +from .entity import ( + AutomowerControlEntity, + WorkAreaControlEntity, + _work_area_translation_key, + handle_sending_exception, +) _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 1 + @callback def _async_get_cutting_height(data: MowerAttributes) -> int: @@ -30,14 +36,6 @@ def _async_get_cutting_height(data: MowerAttributes) -> int: return data.settings.cutting_height -@callback -def _work_area_translation_key(work_area_id: int) -> str: - """Return the translation key.""" - if work_area_id == 0: - return "my_lawn_cutting_height" - return "work_area_cutting_height" - - async def async_set_work_area_cutting_height( coordinator: AutomowerDataUpdateCoordinator, mower_id: str, @@ -45,7 +43,7 @@ async def async_set_work_area_cutting_height( work_area_id: int, ) -> None: """Set cutting height for work area.""" - await coordinator.api.commands.set_cutting_height_workarea( + await coordinator.api.commands.workarea_settings( mower_id, int(cheight), work_area_id ) @@ -68,7 +66,7 @@ class AutomowerNumberEntityDescription(NumberEntityDescription): set_value_fn: Callable[[AutomowerSession, str, float], Awaitable[Any]] -NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( +MOWER_NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( AutomowerNumberEntityDescription( key="cutting_height", translation_key="cutting_height", @@ -84,18 +82,18 @@ NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( @dataclass(frozen=True, kw_only=True) -class AutomowerWorkAreaNumberEntityDescription(NumberEntityDescription): +class WorkAreaNumberEntityDescription(NumberEntityDescription): """Describes Automower work area number entity.""" value_fn: Callable[[WorkArea], int] - translation_key_fn: Callable[[int], str] + translation_key_fn: Callable[[int, str], str] set_value_fn: Callable[ [AutomowerDataUpdateCoordinator, str, float, int], Awaitable[Any] ] -WORK_AREA_NUMBER_TYPES: tuple[AutomowerWorkAreaNumberEntityDescription, ...] = ( - AutomowerWorkAreaNumberEntityDescription( +WORK_AREA_NUMBER_TYPES: tuple[WorkAreaNumberEntityDescription, ...] = ( + WorkAreaNumberEntityDescription( key="cutting_height_work_area", translation_key_fn=_work_area_translation_key, entity_category=EntityCategory.CONFIG, @@ -113,26 +111,44 @@ async def async_setup_entry( ) -> None: """Set up number platform.""" coordinator = entry.runtime_data - entities: list[NumberEntity] = [] + current_work_areas: dict[str, set[int]] = {} - for mower_id in coordinator.data: - if coordinator.data[mower_id].capabilities.work_areas: - _work_areas = coordinator.data[mower_id].work_areas - if _work_areas is not None: - entities.extend( - AutomowerWorkAreaNumberEntity( - mower_id, coordinator, description, work_area_id + async_add_entities( + AutomowerNumberEntity(mower_id, coordinator, description) + for mower_id in coordinator.data + for description in MOWER_NUMBER_TYPES + if description.exists_fn(coordinator.data[mower_id]) + ) + + def _async_work_area_listener() -> None: + """Listen for new work areas and add/remove entities as needed.""" + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.work_areas + and (_work_areas := coordinator.data[mower_id].work_areas) is not None + ): + received_work_areas = set(_work_areas.keys()) + current_work_area_set = current_work_areas.setdefault(mower_id, set()) + + new_work_areas = received_work_areas - current_work_area_set + removed_work_areas = current_work_area_set - received_work_areas + + if new_work_areas: + current_work_area_set.update(new_work_areas) + async_add_entities( + WorkAreaNumberEntity( + mower_id, coordinator, description, work_area_id + ) + for description in WORK_AREA_NUMBER_TYPES + for work_area_id in new_work_areas ) - for description in WORK_AREA_NUMBER_TYPES - for work_area_id in _work_areas - ) - async_remove_entities(hass, coordinator, entry, mower_id) - entities.extend( - AutomowerNumberEntity(mower_id, coordinator, description) - for description in NUMBER_TYPES - if description.exists_fn(coordinator.data[mower_id]) - ) - async_add_entities(entities) + + if removed_work_areas: + remove_work_area_entities(hass, entry, removed_work_areas, mower_id) + current_work_area_set.difference_update(removed_work_areas) + + coordinator.async_add_listener(_async_work_area_listener) + _async_work_area_listener() class AutomowerNumberEntity(AutomowerControlEntity, NumberEntity): @@ -164,41 +180,37 @@ class AutomowerNumberEntity(AutomowerControlEntity, NumberEntity): ) -class AutomowerWorkAreaNumberEntity(AutomowerControlEntity, NumberEntity): - """Defining the AutomowerWorkAreaNumberEntity with AutomowerWorkAreaNumberEntityDescription.""" +class WorkAreaNumberEntity(WorkAreaControlEntity, NumberEntity): + """Defining the WorkAreaNumberEntity with WorkAreaNumberEntityDescription.""" - entity_description: AutomowerWorkAreaNumberEntityDescription + entity_description: WorkAreaNumberEntityDescription def __init__( self, mower_id: str, coordinator: AutomowerDataUpdateCoordinator, - description: AutomowerWorkAreaNumberEntityDescription, + description: WorkAreaNumberEntityDescription, work_area_id: int, ) -> None: """Set up AutomowerNumberEntity.""" - super().__init__(mower_id, coordinator) + super().__init__(mower_id, coordinator, work_area_id) self.entity_description = description - self.work_area_id = work_area_id self._attr_unique_id = f"{mower_id}_{work_area_id}_{description.key}" - self._attr_translation_placeholders = {"work_area": self.work_area.name} - - @property - def work_area(self) -> WorkArea: - """Get the mower attributes of the current mower.""" - if TYPE_CHECKING: - assert self.mower_attributes.work_areas is not None - return self.mower_attributes.work_areas[self.work_area_id] + self._attr_translation_placeholders = { + "work_area": self.work_area_attributes.name + } @property def translation_key(self) -> str: """Return the translation key of the work area.""" - return self.entity_description.translation_key_fn(self.work_area_id) + return self.entity_description.translation_key_fn( + self.work_area_id, self.entity_description.key + ) @property def native_value(self) -> float: """Return the state of the number.""" - return self.entity_description.value_fn(self.work_area) + return self.entity_description.value_fn(self.work_area_attributes) @handle_sending_exception(poll_after_sending=True) async def async_set_native_value(self, value: float) -> None: @@ -206,28 +218,3 @@ class AutomowerWorkAreaNumberEntity(AutomowerControlEntity, NumberEntity): await self.entity_description.set_value_fn( self.coordinator, self.mower_id, value, self.work_area_id ) - - -@callback -def async_remove_entities( - hass: HomeAssistant, - coordinator: AutomowerDataUpdateCoordinator, - entry: AutomowerConfigEntry, - mower_id: str, -) -> None: - """Remove deleted work areas from Home Assistant.""" - entity_reg = er.async_get(hass) - active_work_areas = set() - _work_areas = coordinator.data[mower_id].work_areas - if _work_areas is not None: - for work_area_id in _work_areas: - uid = f"{mower_id}_{work_area_id}_cutting_height_work_area" - active_work_areas.add(uid) - for entity_entry in er.async_entries_for_config_entry(entity_reg, entry.entry_id): - if ( - entity_entry.domain == Platform.NUMBER - and (split := entity_entry.unique_id.split("_"))[0] == mower_id - and split[-1] == "area" - and entity_entry.unique_id not in active_work_areas - ): - entity_reg.async_remove(entity_entry.entity_id) diff --git a/homeassistant/components/husqvarna_automower/quality_scale.yaml b/homeassistant/components/husqvarna_automower/quality_scale.yaml new file mode 100644 index 00000000000..2287ccb4d4f --- /dev/null +++ b/homeassistant/components/husqvarna_automower/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: done + comment: | + The integration only has an entity service, registered in the platform. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: no configuration options + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: todo + comment: Discovery not implemented, yet. + discovery: + status: todo + comment: | + Most of the mowers are connected with a SIM card, some of the also have a + Wifi connection. Check, if discovery with Wifi is possible + docs-data-update: todo + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: todo + dynamic-devices: + status: todo + comment: Add devices dynamically + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: exempt + comment: no configuration possible + repair-issues: done + stale-devices: + status: todo + comment: We only remove devices on reload + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/husqvarna_automower/select.py b/homeassistant/components/husqvarna_automower/select.py index a9431acaae3..65960e897e4 100644 --- a/homeassistant/components/husqvarna_automower/select.py +++ b/homeassistant/components/husqvarna_automower/select.py @@ -16,6 +16,7 @@ from .entity import AutomowerControlEntity, handle_sending_exception _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 1 HEADLIGHT_MODES: list = [ HeadlightModes.ALWAYS_OFF.lower(), diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 0e3e6771cec..fb8603623e4 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -4,11 +4,16 @@ from collections.abc import Callable, Mapping from dataclasses import dataclass from datetime import datetime import logging +from operator import attrgetter from typing import TYPE_CHECKING, Any -from zoneinfo import ZoneInfo -from aioautomower.model import MowerAttributes, MowerModes, RestrictedReasons -from aioautomower.utils import naive_to_aware +from aioautomower.model import ( + MowerAttributes, + MowerModes, + MowerStates, + RestrictedReasons, + WorkArea, +) from homeassistant.components.sensor import ( SensorDeviceClass, @@ -20,13 +25,18 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfLength, UnitOf from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.util import dt as dt_util from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator -from .entity import AutomowerBaseEntity +from .entity import ( + AutomowerBaseEntity, + WorkAreaAvailableEntity, + _work_area_translation_key, +) _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 ATTR_WORK_AREA_ID_ASSIGNMENT = "work_area_id_assignment" @@ -82,6 +92,9 @@ ERROR_KEY_LIST = [ "docking_sensor_defect", "electronic_problem", "empty_battery", + MowerStates.ERROR.lower(), + MowerStates.ERROR_AT_POWER_UP.lower(), + MowerStates.FATAL_ERROR.lower(), "folding_cutting_deck_sensor_defect", "folding_sensor_activated", "geofence_problem", @@ -176,17 +189,23 @@ ERROR_KEY_LIST = [ "zone_generator_problem", ] +ERROR_STATES = { + MowerStates.ERROR, + MowerStates.ERROR_AT_POWER_UP, + MowerStates.FATAL_ERROR, +} + RESTRICTED_REASONS: list = [ - RestrictedReasons.ALL_WORK_AREAS_COMPLETED.lower(), - RestrictedReasons.DAILY_LIMIT.lower(), - RestrictedReasons.EXTERNAL.lower(), - RestrictedReasons.FOTA.lower(), - RestrictedReasons.FROST.lower(), - RestrictedReasons.NONE.lower(), - RestrictedReasons.NOT_APPLICABLE.lower(), - RestrictedReasons.PARK_OVERRIDE.lower(), - RestrictedReasons.SENSOR.lower(), - RestrictedReasons.WEEK_SCHEDULE.lower(), + RestrictedReasons.ALL_WORK_AREAS_COMPLETED, + RestrictedReasons.DAILY_LIMIT, + RestrictedReasons.EXTERNAL, + RestrictedReasons.FOTA, + RestrictedReasons.FROST, + RestrictedReasons.NONE, + RestrictedReasons.NOT_APPLICABLE, + RestrictedReasons.PARK_OVERRIDE, + RestrictedReasons.SENSOR, + RestrictedReasons.WEEK_SCHEDULE, ] STATE_NO_WORK_AREA_ACTIVE = "no_work_area_active" @@ -225,6 +244,16 @@ def _get_current_work_area_dict(data: MowerAttributes) -> Mapping[str, Any]: return {ATTR_WORK_AREA_ID_ASSIGNMENT: data.work_area_dict} +@callback +def _get_error_string(data: MowerAttributes) -> str: + """Return the error key, if not provided the mower state or `no error`.""" + if data.mower.error_key is not None: + return data.mower.error_key + if data.mower.state in ERROR_STATES: + return data.mower.state.lower() + return "no_error" + + @dataclass(frozen=True, kw_only=True) class AutomowerSensorEntityDescription(SensorEntityDescription): """Describes Automower sensor entity.""" @@ -237,21 +266,21 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[MowerAttributes], StateType | datetime] -SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( +MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( AutomowerSensorEntityDescription( key="battery_percent", state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.BATTERY, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda data: data.battery.battery_percent, + value_fn=attrgetter("battery.battery_percent"), ), AutomowerSensorEntityDescription( key="mode", translation_key="mode", device_class=SensorDeviceClass.ENUM, - option_fn=lambda data: [option.lower() for option in list(MowerModes)], + option_fn=lambda data: list(MowerModes), value_fn=( - lambda data: data.mower.mode.lower() + lambda data: data.mower.mode if data.mower.mode != MowerModes.UNKNOWN else None ), @@ -264,7 +293,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.cutting_blade_usage_time is not None, - value_fn=lambda data: data.statistics.cutting_blade_usage_time, + value_fn=attrgetter("statistics.cutting_blade_usage_time"), ), AutomowerSensorEntityDescription( key="total_charging_time", @@ -275,7 +304,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_charging_time is not None, - value_fn=lambda data: data.statistics.total_charging_time, + value_fn=attrgetter("statistics.total_charging_time"), ), AutomowerSensorEntityDescription( key="total_cutting_time", @@ -286,7 +315,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_cutting_time is not None, - value_fn=lambda data: data.statistics.total_cutting_time, + value_fn=attrgetter("statistics.total_cutting_time"), ), AutomowerSensorEntityDescription( key="total_running_time", @@ -297,7 +326,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_running_time is not None, - value_fn=lambda data: data.statistics.total_running_time, + value_fn=attrgetter("statistics.total_running_time"), ), AutomowerSensorEntityDescription( key="total_searching_time", @@ -308,7 +337,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_searching_time is not None, - value_fn=lambda data: data.statistics.total_searching_time, + value_fn=attrgetter("statistics.total_searching_time"), ), AutomowerSensorEntityDescription( key="number_of_charging_cycles", @@ -316,15 +345,16 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.TOTAL, exists_fn=lambda data: data.statistics.number_of_charging_cycles is not None, - value_fn=lambda data: data.statistics.number_of_charging_cycles, + value_fn=attrgetter("statistics.number_of_charging_cycles"), ), AutomowerSensorEntityDescription( key="number_of_collisions", translation_key="number_of_collisions", entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, state_class=SensorStateClass.TOTAL, exists_fn=lambda data: data.statistics.number_of_collisions is not None, - value_fn=lambda data: data.statistics.number_of_collisions, + value_fn=attrgetter("statistics.number_of_collisions"), ), AutomowerSensorEntityDescription( key="total_drive_distance", @@ -335,32 +365,27 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfLength.METERS, suggested_unit_of_measurement=UnitOfLength.KILOMETERS, exists_fn=lambda data: data.statistics.total_drive_distance is not None, - value_fn=lambda data: data.statistics.total_drive_distance, + value_fn=attrgetter("statistics.total_drive_distance"), ), AutomowerSensorEntityDescription( key="next_start_timestamp", translation_key="next_start_timestamp", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: naive_to_aware( - data.planner.next_start_datetime_naive, - ZoneInfo(str(dt_util.DEFAULT_TIME_ZONE)), - ), + value_fn=attrgetter("planner.next_start_datetime"), ), AutomowerSensorEntityDescription( key="error", translation_key="error", device_class=SensorDeviceClass.ENUM, option_fn=lambda data: ERROR_KEY_LIST, - value_fn=lambda data: ( - "no_error" if data.mower.error_key is None else data.mower.error_key - ), + value_fn=_get_error_string, ), AutomowerSensorEntityDescription( key="restricted_reason", translation_key="restricted_reason", device_class=SensorDeviceClass.ENUM, option_fn=lambda data: RESTRICTED_REASONS, - value_fn=lambda data: data.planner.restricted_reason.lower(), + value_fn=attrgetter("planner.restricted_reason"), ), AutomowerSensorEntityDescription( key="work_area", @@ -374,6 +399,34 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( ) +@dataclass(frozen=True, kw_only=True) +class WorkAreaSensorEntityDescription(SensorEntityDescription): + """Describes the work area sensor entities.""" + + exists_fn: Callable[[WorkArea], bool] = lambda _: True + value_fn: Callable[[WorkArea], StateType | datetime] + translation_key_fn: Callable[[int, str], str] + + +WORK_AREA_SENSOR_TYPES: tuple[WorkAreaSensorEntityDescription, ...] = ( + WorkAreaSensorEntityDescription( + key="progress", + translation_key_fn=_work_area_translation_key, + exists_fn=lambda data: data.progress is not None, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=attrgetter("progress"), + ), + WorkAreaSensorEntityDescription( + key="last_time_completed", + translation_key_fn=_work_area_translation_key, + exists_fn=lambda data: data.last_time_completed is not None, + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=attrgetter("last_time_completed"), + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: AutomowerConfigEntry, @@ -381,13 +434,45 @@ async def async_setup_entry( ) -> None: """Set up sensor platform.""" coordinator = entry.runtime_data + current_work_areas: dict[str, set[int]] = {} + async_add_entities( AutomowerSensorEntity(mower_id, coordinator, description) - for mower_id in coordinator.data - for description in SENSOR_TYPES - if description.exists_fn(coordinator.data[mower_id]) + for mower_id, data in coordinator.data.items() + for description in MOWER_SENSOR_TYPES + if description.exists_fn(data) ) + def _async_work_area_listener() -> None: + """Listen for new work areas and add sensor entities if they did not exist. + + Listening for deletable work areas is managed in the number platform. + """ + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.work_areas + and (_work_areas := coordinator.data[mower_id].work_areas) is not None + ): + received_work_areas = set(_work_areas.keys()) + new_work_areas = received_work_areas - current_work_areas.get( + mower_id, set() + ) + if new_work_areas: + current_work_areas.setdefault(mower_id, set()).update( + new_work_areas + ) + async_add_entities( + WorkAreaSensorEntity( + mower_id, coordinator, description, work_area_id + ) + for description in WORK_AREA_SENSOR_TYPES + for work_area_id in new_work_areas + if description.exists_fn(_work_areas[work_area_id]) + ) + + coordinator.async_add_listener(_async_work_area_listener) + _async_work_area_listener() + class AutomowerSensorEntity(AutomowerBaseEntity, SensorEntity): """Defining the Automower Sensors with AutomowerSensorEntityDescription.""" @@ -420,3 +505,36 @@ class AutomowerSensorEntity(AutomowerBaseEntity, SensorEntity): def extra_state_attributes(self) -> Mapping[str, Any] | None: """Return the state attributes.""" return self.entity_description.extra_state_attributes_fn(self.mower_attributes) + + +class WorkAreaSensorEntity(WorkAreaAvailableEntity, SensorEntity): + """Defining the Work area sensors with WorkAreaSensorEntityDescription.""" + + entity_description: WorkAreaSensorEntityDescription + + def __init__( + self, + mower_id: str, + coordinator: AutomowerDataUpdateCoordinator, + description: WorkAreaSensorEntityDescription, + work_area_id: int, + ) -> None: + """Set up AutomowerSensors.""" + super().__init__(mower_id, coordinator, work_area_id) + self.entity_description = description + self._attr_unique_id = f"{mower_id}_{work_area_id}_{description.key}" + self._attr_translation_placeholders = { + "work_area": self.work_area_attributes.name + } + + @property + def native_value(self) -> StateType | datetime: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.work_area_attributes) + + @property + def translation_key(self) -> str: + """Return the translation key of the work area.""" + return self.entity_description.translation_key_fn( + self.work_area_id, self.entity_description.key + ) diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index c34a5dd3340..d4c91e29f7d 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -27,7 +27,9 @@ "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "wrong_account": "You can only reauthenticate this entry with the same Husqvarna account.", - "missing_amc_scope": "The `Authentication API` and the `Automower Connect API` are not connected to your application in the Husqvarna Developer Portal." + "no_mower_connected": "No mowers connected to this account.", + "missing_amc_scope": "The `Authentication API` and the `Automower Connect API` are not connected to your application in the Husqvarna Developer Portal.", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" @@ -45,16 +47,19 @@ "button": { "confirm_error": { "name": "Confirm error" + }, + "sync_clock": { + "name": "Sync clock" } }, "number": { "cutting_height": { "name": "Cutting height" }, - "my_lawn_cutting_height": { - "name": "My lawn cutting height " + "my_lawn_cutting_height_work_area": { + "name": "My lawn cutting height" }, - "work_area_cutting_height": { + "work_area_cutting_height_work_area": { "name": "{work_area} cutting height" } }, @@ -115,6 +120,9 @@ "docking_sensor_defect": "Docking sensor defect", "electronic_problem": "Electronic problem", "empty_battery": "Empty battery", + "error": "Error", + "error_at_power_up": "Error at power up", + "fatal_error": "Fatal error", "folding_cutting_deck_sensor_defect": "Folding cutting deck sensor defect", "folding_sensor_activated": "Folding sensor activated", "geofence_problem": "Geofence problem", @@ -198,6 +206,12 @@ "zone_generator_problem": "Zone generator problem" } }, + "my_lawn_last_time_completed": { + "name": "My lawn last time completed" + }, + "my_lawn_progress": { + "name": "My lawn progress" + }, "number_of_charging_cycles": { "name": "Number of charging cycles" }, @@ -260,6 +274,12 @@ "name": "Work area ID assignment" } } + }, + "work_area_last_time_completed": { + "name": "{work_area} last time completed" + }, + "work_area_progress": { + "name": "{work_area} progress" } }, "switch": { @@ -268,6 +288,9 @@ }, "stay_out_zones": { "name": "Avoid {stay_out_zone}" + }, + "my_lawn_work_area": { + "name": "My lawn" } } }, @@ -290,6 +313,12 @@ } } }, + "issues": { + "deprecated_entity": { + "title": "The Husqvarna Automower {entity_name} sensor is deprecated", + "description": "The Husqvarna Automower entity `{entity}` is deprecated and will be removed in a future release.\nYou can use the new returning state of the lawn mower entity instead.\nPlease update your automations and scripts to replace the sensor entity with the newly added lawn mower entity.\nWhen you are done migrating you can disable `{entity}`." + } + }, "services": { "override_schedule": { "name": "Override schedule", diff --git a/homeassistant/components/husqvarna_automower/switch.py b/homeassistant/components/husqvarna_automower/switch.py index a4b60054583..352b4c59ba1 100644 --- a/homeassistant/components/husqvarna_automower/switch.py +++ b/homeassistant/components/husqvarna_automower/switch.py @@ -6,14 +6,20 @@ from typing import TYPE_CHECKING, Any from aioautomower.model import MowerModes, StayOutZones, Zone from homeassistant.components.switch import SwitchEntity -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator -from .entity import AutomowerControlEntity, handle_sending_exception +from .entity import ( + AutomowerControlEntity, + WorkAreaControlEntity, + _work_area_translation_key, + handle_sending_exception, +) + +PARALLEL_UPDATES = 1 _LOGGER = logging.getLogger(__name__) @@ -25,23 +31,82 @@ async def async_setup_entry( ) -> None: """Set up switch platform.""" coordinator = entry.runtime_data - entities: list[SwitchEntity] = [] - entities.extend( + current_work_areas: dict[str, set[int]] = {} + current_stay_out_zones: dict[str, set[str]] = {} + + async_add_entities( AutomowerScheduleSwitchEntity(mower_id, coordinator) for mower_id in coordinator.data ) - for mower_id in coordinator.data: - if coordinator.data[mower_id].capabilities.stay_out_zones: - _stay_out_zones = coordinator.data[mower_id].stay_out_zones - if _stay_out_zones is not None: - entities.extend( - AutomowerStayOutZoneSwitchEntity( - coordinator, mower_id, stay_out_zone_uid - ) - for stay_out_zone_uid in _stay_out_zones.zones + + def _async_work_area_listener() -> None: + """Listen for new work areas and add switch entities if they did not exist. + + Listening for deletable work areas is managed in the number platform. + """ + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.work_areas + and (_work_areas := coordinator.data[mower_id].work_areas) is not None + ): + received_work_areas = set(_work_areas.keys()) + new_work_areas = received_work_areas - current_work_areas.get( + mower_id, set() ) - async_remove_entities(hass, coordinator, entry, mower_id) - async_add_entities(entities) + if new_work_areas: + current_work_areas.setdefault(mower_id, set()).update( + new_work_areas + ) + async_add_entities( + WorkAreaSwitchEntity(coordinator, mower_id, work_area_id) + for work_area_id in new_work_areas + ) + + def _remove_stay_out_zone_entities( + removed_stay_out_zones: set, mower_id: str + ) -> None: + """Remove all unused stay-out zones for all platforms.""" + entity_reg = er.async_get(hass) + for entity_entry in er.async_entries_for_config_entry( + entity_reg, entry.entry_id + ): + for stay_out_zone_uid in removed_stay_out_zones: + if entity_entry.unique_id.startswith(f"{mower_id}_{stay_out_zone_uid}"): + entity_reg.async_remove(entity_entry.entity_id) + + def _async_stay_out_zone_listener() -> None: + """Listen for new stay-out zones and add/remove switch entities if they did not exist.""" + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.stay_out_zones + and (_stay_out_zones := coordinator.data[mower_id].stay_out_zones) + is not None + ): + received_stay_out_zones = set(_stay_out_zones.zones) + current_stay_out_zones_set = current_stay_out_zones.get(mower_id, set()) + new_stay_out_zones = ( + received_stay_out_zones - current_stay_out_zones_set + ) + removed_stay_out_zones = ( + current_stay_out_zones_set - received_stay_out_zones + ) + if new_stay_out_zones: + current_stay_out_zones.setdefault(mower_id, set()).update( + new_stay_out_zones + ) + async_add_entities( + StayOutZoneSwitchEntity( + coordinator, mower_id, stay_out_zone_uid + ) + for stay_out_zone_uid in new_stay_out_zones + ) + if removed_stay_out_zones: + _remove_stay_out_zone_entities(removed_stay_out_zones, mower_id) + + coordinator.async_add_listener(_async_work_area_listener) + coordinator.async_add_listener(_async_stay_out_zone_listener) + _async_work_area_listener() + _async_stay_out_zone_listener() class AutomowerScheduleSwitchEntity(AutomowerControlEntity, SwitchEntity): @@ -74,7 +139,7 @@ class AutomowerScheduleSwitchEntity(AutomowerControlEntity, SwitchEntity): await self.coordinator.api.commands.resume_schedule(self.mower_id) -class AutomowerStayOutZoneSwitchEntity(AutomowerControlEntity, SwitchEntity): +class StayOutZoneSwitchEntity(AutomowerControlEntity, SwitchEntity): """Defining the Automower stay out zone switch.""" _attr_translation_key = "stay_out_zones" @@ -131,26 +196,42 @@ class AutomowerStayOutZoneSwitchEntity(AutomowerControlEntity, SwitchEntity): ) -@callback -def async_remove_entities( - hass: HomeAssistant, - coordinator: AutomowerDataUpdateCoordinator, - entry: AutomowerConfigEntry, - mower_id: str, -) -> None: - """Remove deleted stay-out-zones from Home Assistant.""" - entity_reg = er.async_get(hass) - active_zones = set() - _zones = coordinator.data[mower_id].stay_out_zones - if _zones is not None: - for zones_uid in _zones.zones: - uid = f"{mower_id}_{zones_uid}_stay_out_zones" - active_zones.add(uid) - for entity_entry in er.async_entries_for_config_entry(entity_reg, entry.entry_id): - if ( - entity_entry.domain == Platform.SWITCH - and (split := entity_entry.unique_id.split("_"))[0] == mower_id - and split[-1] == "zones" - and entity_entry.unique_id not in active_zones - ): - entity_reg.async_remove(entity_entry.entity_id) +class WorkAreaSwitchEntity(WorkAreaControlEntity, SwitchEntity): + """Defining the Automower work area switch.""" + + def __init__( + self, + coordinator: AutomowerDataUpdateCoordinator, + mower_id: str, + work_area_id: int, + ) -> None: + """Set up Automower switch.""" + super().__init__(mower_id, coordinator, work_area_id) + key = "work_area" + self._attr_translation_key = _work_area_translation_key(work_area_id, key) + self._attr_unique_id = f"{mower_id}_{work_area_id}_{key}" + if self.work_area_attributes.name == "my_lawn": + self._attr_translation_placeholders = { + "work_area": self.work_area_attributes.name + } + else: + self._attr_name = self.work_area_attributes.name + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.work_area_attributes.enabled + + @handle_sending_exception(poll_after_sending=True) + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.coordinator.api.commands.workarea_settings( + self.mower_id, self.work_area_id, enabled=False + ) + + @handle_sending_exception(poll_after_sending=True) + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.coordinator.api.commands.workarea_settings( + self.mower_id, self.work_area_id, enabled=True + ) diff --git a/homeassistant/components/husqvarna_automower_ble/__init__.py b/homeassistant/components/husqvarna_automower_ble/__init__.py new file mode 100644 index 00000000000..2025ba64cf1 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/__init__.py @@ -0,0 +1,63 @@ +"""The Husqvarna Autoconnect Bluetooth integration.""" + +from __future__ import annotations + +from automower_ble.mower import Mower +from bleak import BleakError +from bleak_retry_connector import close_stale_connections_by_address, get_device + +from homeassistant.components import bluetooth +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from .const import LOGGER +from .coordinator import HusqvarnaCoordinator + +PLATFORMS = [ + Platform.LAWN_MOWER, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Husqvarna Autoconnect Bluetooth from a config entry.""" + address = entry.data[CONF_ADDRESS] + channel_id = entry.data[CONF_CLIENT_ID] + + mower = Mower(channel_id, address) + + await close_stale_connections_by_address(address) + + LOGGER.debug("connecting to %s with channel ID %s", address, str(channel_id)) + try: + device = bluetooth.async_ble_device_from_address( + hass, address, connectable=True + ) or await get_device(address) + if not await mower.connect(device): + raise ConfigEntryNotReady + except (TimeoutError, BleakError) as exception: + raise ConfigEntryNotReady( + f"Unable to connect to device {address} due to {exception}" + ) from exception + LOGGER.debug("connected and paired") + + model = await mower.get_model() + LOGGER.debug("Connected to Automower: %s", model) + + coordinator = HusqvarnaCoordinator(hass, mower, address, channel_id, model) + + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + coordinator: HusqvarnaCoordinator = entry.runtime_data + await coordinator.async_shutdown() + + return unload_ok diff --git a/homeassistant/components/husqvarna_automower_ble/config_flow.py b/homeassistant/components/husqvarna_automower_ble/config_flow.py new file mode 100644 index 00000000000..72835c22334 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/config_flow.py @@ -0,0 +1,121 @@ +"""Config flow for Husqvarna Bluetooth integration.""" + +from __future__ import annotations + +import random +from typing import Any + +from automower_ble.mower import Mower +from bleak import BleakError +import voluptuous as vol + +from homeassistant.components import bluetooth +from homeassistant.components.bluetooth import BluetoothServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID + +from .const import DOMAIN, LOGGER + + +def _is_supported(discovery_info: BluetoothServiceInfo): + """Check if device is supported.""" + + LOGGER.debug( + "%s manufacturer data: %s", + discovery_info.address, + discovery_info.manufacturer_data, + ) + + manufacturer = any(key == 1062 for key in discovery_info.manufacturer_data) + service_husqvarna = any( + service == "98bd0001-0b0e-421a-84e5-ddbf75dc6de4" + for service in discovery_info.service_uuids + ) + service_generic = any( + service == "00001800-0000-1000-8000-00805f9b34fb" + for service in discovery_info.service_uuids + ) + + return manufacturer and service_husqvarna and service_generic + + +class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Husqvarna Bluetooth.""" + + VERSION = 1 + + def __init__(self) -> None: + """Initialize the config flow.""" + self.address: str | None + + async def async_step_bluetooth( + self, discovery_info: BluetoothServiceInfo + ) -> ConfigFlowResult: + """Handle the bluetooth discovery step.""" + + LOGGER.debug("Discovered device: %s", discovery_info) + if not _is_supported(discovery_info): + return self.async_abort(reason="no_devices_found") + + self.address = discovery_info.address + await self.async_set_unique_id(self.address) + self._abort_if_unique_id_configured() + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + assert self.address + + device = bluetooth.async_ble_device_from_address( + self.hass, self.address, connectable=True + ) + channel_id = random.randint(1, 0xFFFFFFFF) + + try: + (manufacturer, device_type, model) = await Mower( + channel_id, self.address + ).probe_gatts(device) + except (BleakError, TimeoutError) as exception: + LOGGER.exception("Failed to connect to device: %s", exception) + return self.async_abort(reason="cannot_connect") + + title = manufacturer + " " + device_type + + LOGGER.debug("Found device: %s", title) + + if user_input is not None: + return self.async_create_entry( + title=title, + data={CONF_ADDRESS: self.address, CONF_CLIENT_ID: channel_id}, + ) + + self.context["title_placeholders"] = { + "name": title, + } + + self._set_confirm_only() + return self.async_show_form( + step_id="confirm", + description_placeholders=self.context["title_placeholders"], + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + if user_input is not None: + self.address = user_input[CONF_ADDRESS] + await self.async_set_unique_id(self.address, raise_on_progress=False) + self._abort_if_unique_id_configured() + return await self.async_step_confirm() + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ADDRESS): str, + }, + ), + ) diff --git a/homeassistant/components/husqvarna_automower_ble/const.py b/homeassistant/components/husqvarna_automower_ble/const.py new file mode 100644 index 00000000000..7117d0c9e29 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/const.py @@ -0,0 +1,8 @@ +"""Constants for the Husqvarna Automower Bluetooth integration.""" + +import logging + +DOMAIN = "husqvarna_automower_ble" +MANUFACTURER = "Husqvarna" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/husqvarna_automower_ble/coordinator.py b/homeassistant/components/husqvarna_automower_ble/coordinator.py new file mode 100644 index 00000000000..c577ccd9196 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/coordinator.py @@ -0,0 +1,100 @@ +"""Provides the DataUpdateCoordinator.""" + +from __future__ import annotations + +from datetime import timedelta + +from automower_ble.mower import Mower +from bleak import BleakError +from bleak_retry_connector import close_stale_connections_by_address + +from homeassistant.components import bluetooth +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +SCAN_INTERVAL = timedelta(seconds=60) + + +class HusqvarnaCoordinator(DataUpdateCoordinator[dict[str, bytes]]): + """Class to manage fetching data.""" + + def __init__( + self, + hass: HomeAssistant, + mower: Mower, + address: str, + channel_id: str, + model: str, + ) -> None: + """Initialize global data updater.""" + super().__init__( + hass=hass, + logger=LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.address = address + self.channel_id = channel_id + self.model = model + self.mower = mower + + async def async_shutdown(self) -> None: + """Shutdown coordinator and any connection.""" + LOGGER.debug("Shutdown") + await super().async_shutdown() + if self.mower.is_connected(): + await self.mower.disconnect() + + async def _async_find_device(self): + LOGGER.debug("Trying to reconnect") + await close_stale_connections_by_address(self.address) + + device = bluetooth.async_ble_device_from_address( + self.hass, self.address, connectable=True + ) + + try: + if not await self.mower.connect(device): + raise UpdateFailed("Failed to connect") + except BleakError as err: + raise UpdateFailed("Failed to connect") from err + + async def _async_update_data(self) -> dict[str, bytes]: + """Poll the device.""" + LOGGER.debug("Polling device") + + data: dict[str, bytes] = {} + + try: + if not self.mower.is_connected(): + await self._async_find_device() + except BleakError as err: + raise UpdateFailed("Failed to connect") from err + + try: + data["battery_level"] = await self.mower.battery_level() + LOGGER.debug("battery_level" + str(data["battery_level"])) + if data["battery_level"] is None: + await self._async_find_device() + raise UpdateFailed("Error getting data from device") + + data["activity"] = await self.mower.mower_activity() + LOGGER.debug("activity:" + str(data["activity"])) + if data["activity"] is None: + await self._async_find_device() + raise UpdateFailed("Error getting data from device") + + data["state"] = await self.mower.mower_state() + LOGGER.debug("state:" + str(data["state"])) + if data["state"] is None: + await self._async_find_device() + raise UpdateFailed("Error getting data from device") + + except BleakError as err: + LOGGER.error("Error getting data from device") + await self._async_find_device() + raise UpdateFailed("Error getting data from device") from err + + return data diff --git a/homeassistant/components/husqvarna_automower_ble/entity.py b/homeassistant/components/husqvarna_automower_ble/entity.py new file mode 100644 index 00000000000..d2873d933ff --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/entity.py @@ -0,0 +1,30 @@ +"""Provides the HusqvarnaAutomowerBleEntity.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import HusqvarnaCoordinator + + +class HusqvarnaAutomowerBleEntity(CoordinatorEntity[HusqvarnaCoordinator]): + """HusqvarnaCoordinator entity for Husqvarna Automower Bluetooth.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: HusqvarnaCoordinator) -> None: + """Initialize coordinator entity.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{coordinator.address}_{coordinator.channel_id}")}, + manufacturer=MANUFACTURER, + model_id=coordinator.model, + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.coordinator.mower.is_connected() diff --git a/homeassistant/components/husqvarna_automower_ble/lawn_mower.py b/homeassistant/components/husqvarna_automower_ble/lawn_mower.py new file mode 100644 index 00000000000..980efc6f069 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/lawn_mower.py @@ -0,0 +1,153 @@ +"""The Husqvarna Autoconnect Bluetooth lawn mower platform.""" + +from __future__ import annotations + +from automower_ble.protocol import MowerActivity, MowerState + +from homeassistant.components import bluetooth +from homeassistant.components.lawn_mower import ( + LawnMowerActivity, + LawnMowerEntity, + LawnMowerEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import LOGGER +from .coordinator import HusqvarnaCoordinator +from .entity import HusqvarnaAutomowerBleEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up AutomowerLawnMower integration from a config entry.""" + coordinator: HusqvarnaCoordinator = config_entry.runtime_data + address = coordinator.address + + async_add_entities( + [ + AutomowerLawnMower( + coordinator, + address, + ), + ] + ) + + +class AutomowerLawnMower(HusqvarnaAutomowerBleEntity, LawnMowerEntity): + """Husqvarna Automower.""" + + _attr_name = None + _attr_supported_features = ( + LawnMowerEntityFeature.PAUSE + | LawnMowerEntityFeature.START_MOWING + | LawnMowerEntityFeature.DOCK + ) + + def __init__( + self, + coordinator: HusqvarnaCoordinator, + address: str, + ) -> None: + """Initialize the lawn mower.""" + super().__init__(coordinator) + self._attr_unique_id = str(address) + + def _get_activity(self) -> LawnMowerActivity | None: + """Return the current lawn mower activity.""" + if self.coordinator.data is None: + return None + + state = self.coordinator.data["state"] + activity = self.coordinator.data["activity"] + + if state is None or activity is None: + return None + + if state == MowerState.PAUSED: + return LawnMowerActivity.PAUSED + if state in (MowerState.STOPPED, MowerState.OFF, MowerState.WAIT_FOR_SAFETYPIN): + # This is actually stopped, but that isn't an option + return LawnMowerActivity.ERROR + if state in ( + MowerState.RESTRICTED, + MowerState.IN_OPERATION, + MowerState.PENDING_START, + ): + if activity in ( + MowerActivity.CHARGING, + MowerActivity.PARKED, + MowerActivity.NONE, + ): + return LawnMowerActivity.DOCKED + if activity in (MowerActivity.GOING_OUT, MowerActivity.MOWING): + return LawnMowerActivity.MOWING + if activity == MowerActivity.GOING_HOME: + return LawnMowerActivity.RETURNING + return LawnMowerActivity.ERROR + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + LOGGER.debug("AutomowerLawnMower: _handle_coordinator_update") + + self._attr_activity = self._get_activity() + self._attr_available = self._attr_activity is not None + super()._handle_coordinator_update() + + async def async_start_mowing(self) -> None: + """Start mowing.""" + LOGGER.debug("Starting mower") + + if not self.coordinator.mower.is_connected(): + device = bluetooth.async_ble_device_from_address( + self.coordinator.hass, self.coordinator.address, connectable=True + ) + if not await self.coordinator.mower.connect(device): + return + + await self.coordinator.mower.mower_resume() + if self._attr_activity is LawnMowerActivity.DOCKED: + await self.coordinator.mower.mower_override() + await self.coordinator.async_request_refresh() + + self._attr_activity = self._get_activity() + self.async_write_ha_state() + + async def async_dock(self) -> None: + """Start docking.""" + LOGGER.debug("Start docking") + + if not self.coordinator.mower.is_connected(): + device = bluetooth.async_ble_device_from_address( + self.coordinator.hass, self.coordinator.address, connectable=True + ) + if not await self.coordinator.mower.connect(device): + return + + await self.coordinator.mower.mower_park() + await self.coordinator.async_request_refresh() + + self._attr_activity = self._get_activity() + self.async_write_ha_state() + + async def async_pause(self) -> None: + """Pause mower.""" + LOGGER.debug("Pausing mower") + + if not self.coordinator.mower.is_connected(): + device = bluetooth.async_ble_device_from_address( + self.coordinator.hass, self.coordinator.address, connectable=True + ) + if not await self.coordinator.mower.connect(device): + return + + await self.coordinator.mower.mower_pause() + await self.coordinator.async_request_refresh() + + self._attr_activity = self._get_activity() + self.async_write_ha_state() diff --git a/homeassistant/components/husqvarna_automower_ble/manifest.json b/homeassistant/components/husqvarna_automower_ble/manifest.json new file mode 100644 index 00000000000..7566b5c9d32 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "husqvarna_automower_ble", + "name": "Husqvarna Automower BLE", + "bluetooth": [ + { + "service_uuid": "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "connectable": true + } + ], + "codeowners": ["@alistair23"], + "config_flow": true, + "dependencies": ["bluetooth_adapters"], + "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower_ble", + "iot_class": "local_polling", + "requirements": ["automower-ble==0.2.0"] +} diff --git a/homeassistant/components/husqvarna_automower_ble/strings.json b/homeassistant/components/husqvarna_automower_ble/strings.json new file mode 100644 index 00000000000..de0a140933a --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/strings.json @@ -0,0 +1,21 @@ +{ + "config": { + "flow_title": "{name} ({address})", + "step": { + "user": { + "data": { + "address": "Device BLE address" + } + }, + "confirm": { + "description": "Do you want to set up {name}? Make sure the mower is in pairing mode" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_devices_found": "Ensure the mower is in pairing mode and try again. It can take a few attempts.", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + } +} diff --git a/homeassistant/components/huum/climate.py b/homeassistant/components/huum/climate.py index df740aea3d1..7e0e4ce5ef1 100644 --- a/homeassistant/components/huum/climate.py +++ b/homeassistant/components/huum/climate.py @@ -56,7 +56,6 @@ class HuumDevice(ClimateEntity): _target_temperature: int | None = None _status: HuumStatusResponse | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, huum_handler: Huum, unique_id: str) -> None: """Initialize the heater.""" diff --git a/homeassistant/components/huum/manifest.json b/homeassistant/components/huum/manifest.json index 7629f529b91..38562e1a072 100644 --- a/homeassistant/components/huum/manifest.json +++ b/homeassistant/components/huum/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/huum", "iot_class": "cloud_polling", - "requirements": ["huum==0.7.10"] + "requirements": ["huum==0.7.12"] } diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py index a02796dbffb..536b8f18259 100644 --- a/homeassistant/components/hvv_departures/config_flow.py +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -49,10 +49,11 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + hub: GTIHub + data: dict[str, Any] + def __init__(self) -> None: """Initialize component.""" - self.hub: GTIHub | None = None - self.data: dict[str, Any] | None = None self.stations: dict[str, Any] = {} async def async_step_user( @@ -86,7 +87,9 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=SCHEMA_STEP_USER, errors=errors ) - async def async_step_station(self, user_input=None): + async def async_step_station( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the step where the user inputs his/her station.""" if user_input is not None: errors = {} @@ -116,7 +119,9 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="station", data_schema=SCHEMA_STEP_STATION) - async def async_step_station_select(self, user_input=None): + async def async_step_station_select( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the step where the user inputs his/her station.""" schema = vol.Schema({vol.Required(CONF_STATION): vol.In(list(self.stations))}) @@ -136,19 +141,19 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Options flow handler.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize HVV Departures options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) self.departure_filters: dict[str, Any] = {} - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" errors = {} if not self.departure_filters: @@ -177,7 +182,7 @@ class OptionsFlowHandler(OptionsFlow): if not errors: self.departure_filters = { str(i): departure_filter - for i, departure_filter in enumerate(departure_list.get("filter")) + for i, departure_filter in enumerate(departure_list["filter"]) } if user_input is not None and not errors: @@ -195,7 +200,7 @@ class OptionsFlowHandler(OptionsFlow): old_filter = [ i for (i, f) in self.departure_filters.items() - if f in self.config_entry.options.get(CONF_FILTER) + if f in self.config_entry.options[CONF_FILTER] ] else: old_filter = [] diff --git a/homeassistant/components/hvv_departures/strings.json b/homeassistant/components/hvv_departures/strings.json index a9ec58f12ad..f69dcd22047 100644 --- a/homeassistant/components/hvv_departures/strings.json +++ b/homeassistant/components/hvv_departures/strings.json @@ -32,6 +32,10 @@ } }, "options": { + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + }, "step": { "init": { "title": "Options", diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index d2af8f37e36..ea5a5801e69 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -7,8 +7,12 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN, SCAN_INTERVAL -from .coordinator import HydrawiseDataUpdateCoordinator +from .const import APP_ID, DOMAIN +from .coordinator import ( + HydrawiseMainDataUpdateCoordinator, + HydrawiseUpdateCoordinators, + HydrawiseWaterUseDataUpdateCoordinator, +) PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, @@ -26,12 +30,22 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b raise ConfigEntryAuthFailed hydrawise = client.Hydrawise( - auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]) + auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]), + app_id=APP_ID, ) - coordinator = HydrawiseDataUpdateCoordinator(hass, hydrawise, SCAN_INTERVAL) - await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = coordinator + main_coordinator = HydrawiseMainDataUpdateCoordinator(hass, hydrawise) + await main_coordinator.async_config_entry_first_refresh() + water_use_coordinator = HydrawiseWaterUseDataUpdateCoordinator( + hass, hydrawise, main_coordinator + ) + await water_use_coordinator.async_config_entry_first_refresh() + hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = ( + HydrawiseUpdateCoordinators( + main=main_coordinator, + water_use=water_use_coordinator, + ) + ) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index 9b6dcadf95f..34c31d3ad16 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -21,7 +21,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType from .const import DOMAIN, SERVICE_RESUME, SERVICE_START_WATERING, SERVICE_SUSPEND -from .coordinator import HydrawiseDataUpdateCoordinator +from .coordinator import HydrawiseUpdateCoordinators from .entity import HydrawiseEntity @@ -81,18 +81,16 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Hydrawise binary_sensor platform.""" - coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id] entities: list[HydrawiseBinarySensor] = [] - for controller in coordinator.data.controllers.values(): + for controller in coordinators.main.data.controllers.values(): entities.extend( - HydrawiseBinarySensor(coordinator, description, controller) + HydrawiseBinarySensor(coordinators.main, description, controller) for description in CONTROLLER_BINARY_SENSORS ) entities.extend( HydrawiseBinarySensor( - coordinator, + coordinators.main, description, controller, sensor_id=sensor.id, @@ -103,7 +101,7 @@ async def async_setup_entry( ) entities.extend( HydrawiseZoneBinarySensor( - coordinator, description, controller, zone_id=zone.id + coordinators.main, description, controller, zone_id=zone.id ) for zone in controller.zones for description in ZONE_BINARY_SENSORS diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index ab9ebbb065d..5af32af3951 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -6,14 +6,14 @@ from collections.abc import Callable, Mapping from typing import Any from aiohttp import ClientError -from pydrawise import auth, client +from pydrawise import auth as pydrawise_auth, client from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from .const import DOMAIN, LOGGER +from .const import APP_ID, DOMAIN, LOGGER class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): @@ -21,10 +21,6 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Construct a ConfigFlow.""" - self.reauth_entry: ConfigEntry | None = None - async def _create_or_update_entry( self, username: str, @@ -33,36 +29,38 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): on_failure: Callable[[str], ConfigFlowResult], ) -> ConfigFlowResult: """Create the config entry.""" - # Verify that the provided credentials work.""" - api = client.Hydrawise(auth.Auth(username, password)) + auth = pydrawise_auth.Auth(username, password) try: - # Don't fetch zones because we don't need them yet. - user = await api.get_user(fetch_zones=False) + await auth.token() except NotAuthorizedError: return on_failure("invalid_auth") except TimeoutError: return on_failure("timeout_connect") + + try: + api = client.Hydrawise(auth, app_id=APP_ID) + # Don't fetch zones because we don't need them yet. + user = await api.get_user(fetch_zones=False) + except TimeoutError: + return on_failure("timeout_connect") except ClientError as ex: LOGGER.error("Unable to connect to Hydrawise cloud service: %s", ex) return on_failure("cannot_connect") await self.async_set_unique_id(f"hydrawise-{user.customer_id}") - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title="Hydrawise", data={CONF_USERNAME: username, CONF_PASSWORD: password}, ) - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data=self.reauth_entry.data - | {CONF_USERNAME: username, CONF_PASSWORD: password}, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_USERNAME: username, CONF_PASSWORD: password}, ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -90,10 +88,7 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth after updating config to username/password.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/hydrawise/const.py b/homeassistant/components/hydrawise/const.py index f731ecf278c..beaf450a586 100644 --- a/homeassistant/components/hydrawise/const.py +++ b/homeassistant/components/hydrawise/const.py @@ -3,14 +3,19 @@ from datetime import timedelta import logging +from homeassistant.const import __version__ as HA_VERSION + LOGGER = logging.getLogger(__package__) +APP_ID = f"homeassistant-{HA_VERSION}" + DOMAIN = "hydrawise" DEFAULT_WATERING_TIME = timedelta(minutes=15) MANUFACTURER = "Hydrawise" -SCAN_INTERVAL = timedelta(seconds=30) +MAIN_SCAN_INTERVAL = timedelta(minutes=5) +WATER_USE_SCAN_INTERVAL = timedelta(minutes=60) SIGNAL_UPDATE_HYDRAWISE = "hydrawise_update" diff --git a/homeassistant/components/hydrawise/coordinator.py b/homeassistant/components/hydrawise/coordinator.py index 6cd233eb1df..e82a4ec1588 100644 --- a/homeassistant/components/hydrawise/coordinator.py +++ b/homeassistant/components/hydrawise/coordinator.py @@ -2,8 +2,7 @@ from __future__ import annotations -from dataclasses import dataclass -from datetime import timedelta +from dataclasses import dataclass, field from pydrawise import Hydrawise from pydrawise.schema import Controller, ControllerWaterUseSummary, Sensor, User, Zone @@ -12,7 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util.dt import now -from .const import DOMAIN, LOGGER +from .const import DOMAIN, LOGGER, MAIN_SCAN_INTERVAL, WATER_USE_SCAN_INTERVAL @dataclass @@ -20,22 +19,39 @@ class HydrawiseData: """Container for data fetched from the Hydrawise API.""" user: User - controllers: dict[int, Controller] - zones: dict[int, Zone] - sensors: dict[int, Sensor] - daily_water_summary: dict[int, ControllerWaterUseSummary] + controllers: dict[int, Controller] = field(default_factory=dict) + zones: dict[int, Zone] = field(default_factory=dict) + sensors: dict[int, Sensor] = field(default_factory=dict) + daily_water_summary: dict[int, ControllerWaterUseSummary] = field( + default_factory=dict + ) + + +@dataclass +class HydrawiseUpdateCoordinators: + """Container for all Hydrawise DataUpdateCoordinator instances.""" + + main: HydrawiseMainDataUpdateCoordinator + water_use: HydrawiseWaterUseDataUpdateCoordinator class HydrawiseDataUpdateCoordinator(DataUpdateCoordinator[HydrawiseData]): - """The Hydrawise Data Update Coordinator.""" + """Base class for Hydrawise Data Update Coordinators.""" api: Hydrawise - def __init__( - self, hass: HomeAssistant, api: Hydrawise, scan_interval: timedelta - ) -> None: + +class HydrawiseMainDataUpdateCoordinator(HydrawiseDataUpdateCoordinator): + """The main Hydrawise Data Update Coordinator. + + This fetches the primary state data for Hydrawise controllers and zones + at a relatively frequent interval so that the primary functions of the + integration are updated in a timely manner. + """ + + def __init__(self, hass: HomeAssistant, api: Hydrawise) -> None: """Initialize HydrawiseDataUpdateCoordinator.""" - super().__init__(hass, LOGGER, name=DOMAIN, update_interval=scan_interval) + super().__init__(hass, LOGGER, name=DOMAIN, update_interval=MAIN_SCAN_INTERVAL) self.api = api async def _async_update_data(self) -> HydrawiseData: @@ -43,28 +59,56 @@ class HydrawiseDataUpdateCoordinator(DataUpdateCoordinator[HydrawiseData]): # Don't fetch zones. We'll fetch them for each controller later. # This is to prevent 502 errors in some cases. # See: https://github.com/home-assistant/core/issues/120128 - user = await self.api.get_user(fetch_zones=False) - controllers = {} - zones = {} - sensors = {} - daily_water_summary: dict[int, ControllerWaterUseSummary] = {} - for controller in user.controllers: - controllers[controller.id] = controller + data = HydrawiseData(user=await self.api.get_user(fetch_zones=False)) + for controller in data.user.controllers: + data.controllers[controller.id] = controller controller.zones = await self.api.get_zones(controller) for zone in controller.zones: - zones[zone.id] = zone + data.zones[zone.id] = zone for sensor in controller.sensors: - sensors[sensor.id] = sensor + data.sensors[sensor.id] = sensor + return data + + +class HydrawiseWaterUseDataUpdateCoordinator(HydrawiseDataUpdateCoordinator): + """Data Update Coordinator for Hydrawise Water Use. + + This fetches data that is more expensive for the Hydrawise API to compute + at a less frequent interval as to not overload the Hydrawise servers. + """ + + _main_coordinator: HydrawiseMainDataUpdateCoordinator + + def __init__( + self, + hass: HomeAssistant, + api: Hydrawise, + main_coordinator: HydrawiseMainDataUpdateCoordinator, + ) -> None: + """Initialize HydrawiseWaterUseDataUpdateCoordinator.""" + super().__init__( + hass, + LOGGER, + name=f"{DOMAIN} water use", + update_interval=WATER_USE_SCAN_INTERVAL, + ) + self.api = api + self._main_coordinator = main_coordinator + + async def _async_update_data(self) -> HydrawiseData: + """Fetch the latest data from Hydrawise.""" + daily_water_summary: dict[int, ControllerWaterUseSummary] = {} + for controller in self._main_coordinator.data.controllers.values(): daily_water_summary[controller.id] = await self.api.get_water_use_summary( controller, now().replace(hour=0, minute=0, second=0, microsecond=0), now(), ) - + main_data = self._main_coordinator.data return HydrawiseData( - user=user, - controllers=controllers, - zones=zones, - sensors=sensors, + user=main_data.user, + controllers=main_data.controllers, + zones=main_data.zones, + sensors=main_data.sensors, daily_water_summary=daily_water_summary, ) diff --git a/homeassistant/components/hydrawise/icons.json b/homeassistant/components/hydrawise/icons.json index 1d1d349dbf9..5baf76454b7 100644 --- a/homeassistant/components/hydrawise/icons.json +++ b/homeassistant/components/hydrawise/icons.json @@ -31,8 +31,14 @@ } }, "services": { - "start_watering": "mdi:sprinkler-variant", - "suspend": "mdi:pause-circle-outline", - "resume": "mdi:play" + "start_watering": { + "service": "mdi:sprinkler-variant" + }, + "suspend": { + "service": "mdi:pause-circle-outline" + }, + "resume": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index 9b733cb73d0..50f803c07dc 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/hydrawise", "iot_class": "cloud_polling", "loggers": ["pydrawise"], - "requirements": ["pydrawise==2024.8.0"] + "requirements": ["pydrawise==2024.12.0"] } diff --git a/homeassistant/components/hydrawise/sensor.py b/homeassistant/components/hydrawise/sensor.py index 563af893700..96cc16832da 100644 --- a/homeassistant/components/hydrawise/sensor.py +++ b/homeassistant/components/hydrawise/sensor.py @@ -4,9 +4,11 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import timedelta from typing import Any +from pydrawise.schema import ControllerWaterUseSummary + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -19,7 +21,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util from .const import DOMAIN -from .coordinator import HydrawiseDataUpdateCoordinator +from .coordinator import HydrawiseUpdateCoordinators from .entity import HydrawiseEntity @@ -30,100 +32,58 @@ class HydrawiseSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[HydrawiseSensor], Any] -def _get_zone_watering_time(sensor: HydrawiseSensor) -> int: - if (current_run := sensor.zone.scheduled_runs.current_run) is not None: - return int(current_run.remaining_time.total_seconds() / 60) - return 0 +def _get_water_use(sensor: HydrawiseSensor) -> ControllerWaterUseSummary: + return sensor.coordinator.data.daily_water_summary[sensor.controller.id] -def _get_zone_next_cycle(sensor: HydrawiseSensor) -> datetime | None: - if (next_run := sensor.zone.scheduled_runs.next_run) is not None: - return dt_util.as_utc(next_run.start_time) - return None - - -def _get_zone_daily_active_water_use(sensor: HydrawiseSensor) -> float: - """Get active water use for the zone.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return float(daily_water_summary.active_use_by_zone_id.get(sensor.zone.id, 0.0)) - - -def _get_zone_daily_active_water_time(sensor: HydrawiseSensor) -> float | None: - """Get active water time for the zone.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.active_time_by_zone_id.get( - sensor.zone.id, timedelta() - ).total_seconds() - - -def _get_controller_daily_active_water_use(sensor: HydrawiseSensor) -> float | None: - """Get active water use for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_active_use - - -def _get_controller_daily_inactive_water_use(sensor: HydrawiseSensor) -> float | None: - """Get inactive water use for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_inactive_use - - -def _get_controller_daily_active_water_time(sensor: HydrawiseSensor) -> float: - """Get active water time for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_active_time.total_seconds() - - -def _get_controller_daily_total_water_use(sensor: HydrawiseSensor) -> float | None: - """Get inactive water use for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_use - - -CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( +WATER_USE_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( HydrawiseSensorEntityDescription( key="daily_active_water_time", translation_key="daily_active_water_time", device_class=SensorDeviceClass.DURATION, native_unit_of_measurement=UnitOfTime.SECONDS, - value_fn=_get_controller_daily_active_water_time, + value_fn=lambda sensor: _get_water_use( + sensor + ).total_active_time.total_seconds(), ), ) +WATER_USE_ZONE_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( + HydrawiseSensorEntityDescription( + key="daily_active_water_time", + translation_key="daily_active_water_time", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + value_fn=lambda sensor: ( + _get_water_use(sensor) + .active_time_by_zone_id.get(sensor.zone.id, timedelta()) + .total_seconds() + ), + ), +) + FLOW_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( HydrawiseSensorEntityDescription( key="daily_total_water_use", translation_key="daily_total_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_controller_daily_total_water_use, + value_fn=lambda sensor: _get_water_use(sensor).total_use, ), HydrawiseSensorEntityDescription( key="daily_active_water_use", translation_key="daily_active_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_controller_daily_active_water_use, + value_fn=lambda sensor: _get_water_use(sensor).total_active_use, ), HydrawiseSensorEntityDescription( key="daily_inactive_water_use", translation_key="daily_inactive_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_controller_daily_inactive_water_use, + value_fn=lambda sensor: _get_water_use(sensor).total_inactive_use, ), ) @@ -133,7 +93,9 @@ FLOW_ZONE_SENSORS: tuple[SensorEntityDescription, ...] = ( translation_key="daily_active_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_zone_daily_active_water_use, + value_fn=lambda sensor: float( + _get_water_use(sensor).active_use_by_zone_id.get(sensor.zone.id, 0.0) + ), ), ) @@ -142,20 +104,24 @@ ZONE_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( key="next_cycle", translation_key="next_cycle", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=_get_zone_next_cycle, + value_fn=lambda sensor: ( + dt_util.as_utc(sensor.zone.scheduled_runs.next_run.start_time) + if sensor.zone.scheduled_runs.next_run is not None + else None + ), ), HydrawiseSensorEntityDescription( key="watering_time", translation_key="watering_time", native_unit_of_measurement=UnitOfTime.MINUTES, - value_fn=_get_zone_watering_time, - ), - HydrawiseSensorEntityDescription( - key="daily_active_water_time", - translation_key="daily_active_water_time", - device_class=SensorDeviceClass.DURATION, - native_unit_of_measurement=UnitOfTime.SECONDS, - value_fn=_get_zone_daily_active_water_time, + value_fn=lambda sensor: ( + int( + sensor.zone.scheduled_runs.current_run.remaining_time.total_seconds() + / 60 + ) + if sensor.zone.scheduled_runs.current_run is not None + else 0 + ), ), ) @@ -168,29 +134,37 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Hydrawise sensor platform.""" - coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id] entities: list[HydrawiseSensor] = [] - for controller in coordinator.data.controllers.values(): + for controller in coordinators.main.data.controllers.values(): entities.extend( - HydrawiseSensor(coordinator, description, controller) - for description in CONTROLLER_SENSORS + HydrawiseSensor(coordinators.water_use, description, controller) + for description in WATER_USE_CONTROLLER_SENSORS ) entities.extend( - HydrawiseSensor(coordinator, description, controller, zone_id=zone.id) + HydrawiseSensor( + coordinators.water_use, description, controller, zone_id=zone.id + ) + for zone in controller.zones + for description in WATER_USE_ZONE_SENSORS + ) + entities.extend( + HydrawiseSensor(coordinators.main, description, controller, zone_id=zone.id) for zone in controller.zones for description in ZONE_SENSORS ) - if coordinator.data.daily_water_summary[controller.id].total_use is not None: + if ( + coordinators.water_use.data.daily_water_summary[controller.id].total_use + is not None + ): # we have a flow sensor for this controller entities.extend( - HydrawiseSensor(coordinator, description, controller) + HydrawiseSensor(coordinators.water_use, description, controller) for description in FLOW_CONTROLLER_SENSORS ) entities.extend( HydrawiseSensor( - coordinator, + coordinators.water_use, description, controller, zone_id=zone.id, diff --git a/homeassistant/components/hydrawise/strings.json b/homeassistant/components/hydrawise/strings.json index b6df36ad4ff..4d50f10bcb2 100644 --- a/homeassistant/components/hydrawise/strings.json +++ b/homeassistant/components/hydrawise/strings.json @@ -13,7 +13,8 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", diff --git a/homeassistant/components/hydrawise/switch.py b/homeassistant/components/hydrawise/switch.py index 001a8e399ee..1addaf1ec92 100644 --- a/homeassistant/components/hydrawise/switch.py +++ b/homeassistant/components/hydrawise/switch.py @@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util from .const import DEFAULT_WATERING_TIME, DOMAIN -from .coordinator import HydrawiseDataUpdateCoordinator +from .coordinator import HydrawiseUpdateCoordinators from .entity import HydrawiseEntity @@ -66,12 +66,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Hydrawise switch platform.""" - coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( - HydrawiseSwitch(coordinator, description, controller, zone_id=zone.id) - for controller in coordinator.data.controllers.values() + HydrawiseSwitch(coordinators.main, description, controller, zone_id=zone.id) + for controller in coordinators.main.data.controllers.values() for zone in controller.zones for description in SWITCH_TYPES ) diff --git a/homeassistant/components/hydrawise/valve.py b/homeassistant/components/hydrawise/valve.py index 6ceb3673c71..37f196bc054 100644 --- a/homeassistant/components/hydrawise/valve.py +++ b/homeassistant/components/hydrawise/valve.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import HydrawiseDataUpdateCoordinator +from .coordinator import HydrawiseUpdateCoordinators from .entity import HydrawiseEntity VALVE_TYPES: tuple[ValveEntityDescription, ...] = ( @@ -34,12 +34,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Hydrawise valve platform.""" - coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( - HydrawiseValve(coordinator, description, controller, zone_id=zone.id) - for controller in coordinator.data.controllers.values() + HydrawiseValve(coordinators.main, description, controller, zone_id=zone.id) + for controller in coordinators.main.data.controllers.values() for zone in controller.zones for description in VALVE_TYPES ) diff --git a/homeassistant/components/hyperion/config_flow.py b/homeassistant/components/hyperion/config_flow.py index 64a9831800f..b2b7dbdf531 100644 --- a/homeassistant/components/hyperion/config_flow.py +++ b/homeassistant/components/hyperion/config_flow.py @@ -111,6 +111,8 @@ class HyperionConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + unique_id: str + def __init__(self) -> None: """Instantiate config flow.""" self._data: dict[str, Any] = {} @@ -422,24 +424,22 @@ class HyperionConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> HyperionOptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> HyperionOptionsFlow: """Get the Hyperion Options flow.""" - return HyperionOptionsFlow(config_entry) + return HyperionOptionsFlow() class HyperionOptionsFlow(OptionsFlow): """Hyperion options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize a Hyperion options flow.""" - self._config_entry = config_entry - def _create_client(self) -> client.HyperionClient: """Create and connect a client instance.""" return create_hyperion_client( - self._config_entry.data[CONF_HOST], - self._config_entry.data[CONF_PORT], - token=self._config_entry.data.get(CONF_TOKEN), + self.config_entry.data[CONF_HOST], + self.config_entry.data[CONF_PORT], + token=self.config_entry.data.get(CONF_TOKEN), ) async def async_step_init( @@ -468,8 +468,7 @@ class HyperionOptionsFlow(OptionsFlow): return self.async_create_entry(title="", data=user_input) default_effect_show_list = list( - set(effects) - - set(self._config_entry.options.get(CONF_EFFECT_HIDE_LIST, [])) + set(effects) - set(self.config_entry.options.get(CONF_EFFECT_HIDE_LIST, [])) ) return self.async_show_form( @@ -478,7 +477,7 @@ class HyperionOptionsFlow(OptionsFlow): { vol.Optional( CONF_PRIORITY, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_PRIORITY, DEFAULT_PRIORITY ), ): vol.All(vol.Coerce(int), vol.Range(min=0, max=255)), diff --git a/homeassistant/components/hyperion/manifest.json b/homeassistant/components/hyperion/manifest.json index f18491044fa..684fb276f53 100644 --- a/homeassistant/components/hyperion/manifest.json +++ b/homeassistant/components/hyperion/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hyperion", "iot_class": "local_push", "loggers": ["hyperion"], - "quality_scale": "platinum", "requirements": ["hyperion-py==0.7.5"], "ssdp": [ { diff --git a/homeassistant/components/hyperion/strings.json b/homeassistant/components/hyperion/strings.json index 79c226b71eb..01682648277 100644 --- a/homeassistant/components/hyperion/strings.json +++ b/homeassistant/components/hyperion/strings.json @@ -52,6 +52,9 @@ "effect_show_list": "Hyperion effects to show" } } + }, + "abort": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { diff --git a/homeassistant/components/ialarm/alarm_control_panel.py b/homeassistant/components/ialarm/alarm_control_panel.py index 912f04a1d1e..4ae3787dc1d 100644 --- a/homeassistant/components/ialarm/alarm_control_panel.py +++ b/homeassistant/components/ialarm/alarm_control_panel.py @@ -5,6 +5,7 @@ from __future__ import annotations from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -50,7 +51,7 @@ class IAlarmPanel( self._attr_unique_id = coordinator.mac @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return self.coordinator.state diff --git a/homeassistant/components/ialarm/const.py b/homeassistant/components/ialarm/const.py index d1561cc86d5..1b8074c34f0 100644 --- a/homeassistant/components/ialarm/const.py +++ b/homeassistant/components/ialarm/const.py @@ -2,12 +2,7 @@ from pyialarm import IAlarm -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState DATA_COORDINATOR = "ialarm" @@ -16,8 +11,8 @@ DEFAULT_PORT = 18034 DOMAIN = "ialarm" IALARM_TO_HASS = { - IAlarm.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - IAlarm.ARMED_STAY: STATE_ALARM_ARMED_HOME, - IAlarm.DISARMED: STATE_ALARM_DISARMED, - IAlarm.TRIGGERED: STATE_ALARM_TRIGGERED, + IAlarm.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + IAlarm.ARMED_STAY: AlarmControlPanelState.ARMED_HOME, + IAlarm.DISARMED: AlarmControlPanelState.DISARMED, + IAlarm.TRIGGERED: AlarmControlPanelState.TRIGGERED, } diff --git a/homeassistant/components/ialarm/coordinator.py b/homeassistant/components/ialarm/coordinator.py index 2aec99c98c4..ad0f2298a3b 100644 --- a/homeassistant/components/ialarm/coordinator.py +++ b/homeassistant/components/ialarm/coordinator.py @@ -7,7 +7,10 @@ import logging from pyialarm import IAlarm -from homeassistant.components.alarm_control_panel import SCAN_INTERVAL +from homeassistant.components.alarm_control_panel import ( + SCAN_INTERVAL, + AlarmControlPanelState, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -22,7 +25,7 @@ class IAlarmDataUpdateCoordinator(DataUpdateCoordinator[None]): def __init__(self, hass: HomeAssistant, ialarm: IAlarm, mac: str) -> None: """Initialize global iAlarm data updater.""" self.ialarm = ialarm - self.state: str | None = None + self.state: AlarmControlPanelState | None = None self.host: str = ialarm.host self.mac = mac diff --git a/homeassistant/components/iammeter/manifest.json b/homeassistant/components/iammeter/manifest.json index f1ebecab00d..22831767e62 100644 --- a/homeassistant/components/iammeter/manifest.json +++ b/homeassistant/components/iammeter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/iammeter", "iot_class": "local_polling", "loggers": ["iammeter"], + "quality_scale": "legacy", "requirements": ["iammeter==0.2.1"] } diff --git a/homeassistant/components/iaqualink/__init__.py b/homeassistant/components/iaqualink/__init__.py index 36235d52ed7..26bffc4e982 100644 --- a/homeassistant/components/iaqualink/__init__.py +++ b/homeassistant/components/iaqualink/__init__.py @@ -12,7 +12,6 @@ import httpx from iaqualink.client import AqualinkClient from iaqualink.device import ( AqualinkBinarySensor, - AqualinkDevice, AqualinkLight, AqualinkSensor, AqualinkSwitch, @@ -29,16 +28,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.httpx_client import get_async_client from .const import DOMAIN, UPDATE_INTERVAL +from .entity import AqualinkEntity _LOGGER = logging.getLogger(__name__) @@ -194,44 +189,3 @@ def refresh_system[_AqualinkEntityT: AqualinkEntity, **_P]( async_dispatcher_send(self.hass, DOMAIN) return wrapper - - -class AqualinkEntity(Entity): - """Abstract class for all Aqualink platforms. - - Entity state is updated via the interval timer within the integration. - Any entity state change via the iaqualink library triggers an internal - state refresh which is then propagated to all the entities in the system - via the refresh_system decorator above to the _update_callback in this - class. - """ - - _attr_should_poll = False - - def __init__(self, dev: AqualinkDevice) -> None: - """Initialize the entity.""" - self.dev = dev - self._attr_unique_id = f"{dev.system.serial}_{dev.name}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._attr_unique_id)}, - manufacturer=dev.manufacturer, - model=dev.model, - name=dev.label, - via_device=(DOMAIN, dev.system.serial), - ) - - async def async_added_to_hass(self) -> None: - """Set up a listener when this entity is added to HA.""" - self.async_on_remove( - async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state) - ) - - @property - def assumed_state(self) -> bool: - """Return whether the state is based on actual reading from the device.""" - return self.dev.system.online in [False, None] - - @property - def available(self) -> bool: - """Return whether the device is available or not.""" - return self.dev.system.online is True diff --git a/homeassistant/components/iaqualink/binary_sensor.py b/homeassistant/components/iaqualink/binary_sensor.py index 06dbcf18e4a..9e173dc36e0 100644 --- a/homeassistant/components/iaqualink/binary_sensor.py +++ b/homeassistant/components/iaqualink/binary_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from iaqualink.device import AqualinkBinarySensor from homeassistant.components.binary_sensor import ( - DOMAIN, + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, ) @@ -13,8 +13,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AqualinkEntity from .const import DOMAIN as AQUALINK_DOMAIN +from .entity import AqualinkEntity PARALLEL_UPDATES = 0 @@ -26,7 +26,10 @@ async def async_setup_entry( ) -> None: """Set up discovered binary sensors.""" async_add_entities( - (HassAqualinkBinarySensor(dev) for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]), + ( + HassAqualinkBinarySensor(dev) + for dev in hass.data[AQUALINK_DOMAIN][BINARY_SENSOR_DOMAIN] + ), True, ) diff --git a/homeassistant/components/iaqualink/climate.py b/homeassistant/components/iaqualink/climate.py index 8ed3026e72e..53d1bce80de 100644 --- a/homeassistant/components/iaqualink/climate.py +++ b/homeassistant/components/iaqualink/climate.py @@ -20,8 +20,9 @@ from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AqualinkEntity, refresh_system +from . import refresh_system from .const import DOMAIN as AQUALINK_DOMAIN +from .entity import AqualinkEntity from .utils import await_or_reraise _LOGGER = logging.getLogger(__name__) @@ -53,7 +54,6 @@ class HassAqualinkThermostat(AqualinkEntity, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, dev: AqualinkThermostat) -> None: """Initialize AquaLink thermostat.""" diff --git a/homeassistant/components/iaqualink/config_flow.py b/homeassistant/components/iaqualink/config_flow.py index 3605c328903..2cb1ba4b5d7 100644 --- a/homeassistant/components/iaqualink/config_flow.py +++ b/homeassistant/components/iaqualink/config_flow.py @@ -27,11 +27,6 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow start.""" - # Supporting a single account. - entries = self._async_current_entries() - if entries: - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: diff --git a/homeassistant/components/iaqualink/entity.py b/homeassistant/components/iaqualink/entity.py new file mode 100644 index 00000000000..437611e5a5f --- /dev/null +++ b/homeassistant/components/iaqualink/entity.py @@ -0,0 +1,52 @@ +"""Component to embed Aqualink devices.""" + +from __future__ import annotations + +from iaqualink.device import AqualinkDevice + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class AqualinkEntity(Entity): + """Abstract class for all Aqualink platforms. + + Entity state is updated via the interval timer within the integration. + Any entity state change via the iaqualink library triggers an internal + state refresh which is then propagated to all the entities in the system + via the refresh_system decorator above to the _update_callback in this + class. + """ + + _attr_should_poll = False + + def __init__(self, dev: AqualinkDevice) -> None: + """Initialize the entity.""" + self.dev = dev + self._attr_unique_id = f"{dev.system.serial}_{dev.name}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._attr_unique_id)}, + manufacturer=dev.manufacturer, + model=dev.model, + name=dev.label, + via_device=(DOMAIN, dev.system.serial), + ) + + async def async_added_to_hass(self) -> None: + """Set up a listener when this entity is added to HA.""" + self.async_on_remove( + async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state) + ) + + @property + def assumed_state(self) -> bool: + """Return whether the state is based on actual reading from the device.""" + return self.dev.system.online in [False, None] + + @property + def available(self) -> bool: + """Return whether the device is available or not.""" + return self.dev.system.online is True diff --git a/homeassistant/components/iaqualink/light.py b/homeassistant/components/iaqualink/light.py index bce4f2c9855..59172c13576 100644 --- a/homeassistant/components/iaqualink/light.py +++ b/homeassistant/components/iaqualink/light.py @@ -9,7 +9,7 @@ from iaqualink.device import AqualinkLight from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_EFFECT, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, LightEntity, LightEntityFeature, @@ -18,8 +18,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AqualinkEntity, refresh_system +from . import refresh_system from .const import DOMAIN as AQUALINK_DOMAIN +from .entity import AqualinkEntity from .utils import await_or_reraise PARALLEL_UPDATES = 0 @@ -32,7 +33,8 @@ async def async_setup_entry( ) -> None: """Set up discovered lights.""" async_add_entities( - (HassAqualinkLight(dev) for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]), True + (HassAqualinkLight(dev) for dev in hass.data[AQUALINK_DOMAIN][LIGHT_DOMAIN]), + True, ) diff --git a/homeassistant/components/iaqualink/manifest.json b/homeassistant/components/iaqualink/manifest.json index 8834a538be9..2531632075c 100644 --- a/homeassistant/components/iaqualink/manifest.json +++ b/homeassistant/components/iaqualink/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/iaqualink", "iot_class": "cloud_polling", "loggers": ["iaqualink"], - "requirements": ["iaqualink==0.5.0", "h2==4.1.0"] + "requirements": ["iaqualink==0.5.0", "h2==4.1.0"], + "single_config_entry": true } diff --git a/homeassistant/components/iaqualink/sensor.py b/homeassistant/components/iaqualink/sensor.py index 8e3983e9c91..881adb420bf 100644 --- a/homeassistant/components/iaqualink/sensor.py +++ b/homeassistant/components/iaqualink/sensor.py @@ -4,14 +4,18 @@ from __future__ import annotations from iaqualink.device import AqualinkSensor -from homeassistant.components.sensor import DOMAIN, SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + DOMAIN as SENSOR_DOMAIN, + SensorDeviceClass, + SensorEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AqualinkEntity from .const import DOMAIN as AQUALINK_DOMAIN +from .entity import AqualinkEntity PARALLEL_UPDATES = 0 @@ -23,7 +27,8 @@ async def async_setup_entry( ) -> None: """Set up discovered sensors.""" async_add_entities( - (HassAqualinkSensor(dev) for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]), True + (HassAqualinkSensor(dev) for dev in hass.data[AQUALINK_DOMAIN][SENSOR_DOMAIN]), + True, ) diff --git a/homeassistant/components/iaqualink/strings.json b/homeassistant/components/iaqualink/strings.json index 85b49996f51..032e1a592d9 100644 --- a/homeassistant/components/iaqualink/strings.json +++ b/homeassistant/components/iaqualink/strings.json @@ -13,9 +13,6 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/iaqualink/switch.py b/homeassistant/components/iaqualink/switch.py index e681879855b..601c5701a4a 100644 --- a/homeassistant/components/iaqualink/switch.py +++ b/homeassistant/components/iaqualink/switch.py @@ -6,13 +6,14 @@ from typing import Any from iaqualink.device import AqualinkSwitch -from homeassistant.components.switch import DOMAIN, SwitchEntity +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AqualinkEntity, refresh_system +from . import refresh_system from .const import DOMAIN as AQUALINK_DOMAIN +from .entity import AqualinkEntity from .utils import await_or_reraise PARALLEL_UPDATES = 0 @@ -25,7 +26,8 @@ async def async_setup_entry( ) -> None: """Set up discovered switches.""" async_add_entities( - (HassAqualinkSwitch(dev) for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]), True + (HassAqualinkSwitch(dev) for dev in hass.data[AQUALINK_DOMAIN][SWITCH_DOMAIN]), + True, ) diff --git a/homeassistant/components/ibeacon/config_flow.py b/homeassistant/components/ibeacon/config_flow.py index 424befa81ec..c00398e39b0 100644 --- a/homeassistant/components/ibeacon/config_flow.py +++ b/homeassistant/components/ibeacon/config_flow.py @@ -30,9 +30,6 @@ class IBeaconConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if not bluetooth.async_scanner_count(self.hass, connectable=False): return self.async_abort(reason="bluetooth_not_available") @@ -47,16 +44,12 @@ class IBeaconConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return IBeaconOptionsFlow(config_entry) + return IBeaconOptionsFlow() class IBeaconOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult: """Manage the options.""" errors = {} diff --git a/homeassistant/components/ibeacon/manifest.json b/homeassistant/components/ibeacon/manifest.json index 8dbc99c8ada..8bd7e3ab9cc 100644 --- a/homeassistant/components/ibeacon/manifest.json +++ b/homeassistant/components/ibeacon/manifest.json @@ -13,5 +13,6 @@ "documentation": "https://www.home-assistant.io/integrations/ibeacon", "iot_class": "local_push", "loggers": ["bleak"], - "requirements": ["ibeacon-ble==1.2.0"] + "requirements": ["ibeacon-ble==1.2.0"], + "single_config_entry": true } diff --git a/homeassistant/components/ibeacon/strings.json b/homeassistant/components/ibeacon/strings.json index 440df8292a9..9307f848644 100644 --- a/homeassistant/components/ibeacon/strings.json +++ b/homeassistant/components/ibeacon/strings.json @@ -6,8 +6,7 @@ } }, "abort": { - "bluetooth_not_available": "At least one Bluetooth adapter or remote must be configured to use iBeacon Tracker.", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "bluetooth_not_available": "At least one Bluetooth adapter or remote must be configured to use iBeacon Tracker." } }, "options": { diff --git a/homeassistant/components/icloud/config_flow.py b/homeassistant/components/icloud/config_flow.py index 544f751dc0b..efcef15b4d0 100644 --- a/homeassistant/components/icloud/config_flow.py +++ b/homeassistant/components/icloud/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping import logging import os -from typing import Any +from typing import TYPE_CHECKING, Any from pyicloud import PyiCloudService from pyicloud.exceptions import ( @@ -200,11 +200,17 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): return await self._validate_and_create_entry(user_input, "reauth_confirm") - async def async_step_trusted_device(self, user_input=None, errors=None): + async def async_step_trusted_device( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """We need a trusted device.""" if errors is None: errors = {} + if TYPE_CHECKING: + assert self.api is not None trusted_devices = await self.hass.async_add_executor_job( getattr, self.api, "trusted_devices" ) @@ -216,7 +222,7 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is None: return await self._show_trusted_device_form( - trusted_devices_for_form, user_input, errors + trusted_devices_for_form, errors ) self._trusted_device = trusted_devices[int(user_input[CONF_TRUSTED_DEVICE])] @@ -229,18 +235,18 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): errors[CONF_TRUSTED_DEVICE] = "send_verification_code" return await self._show_trusted_device_form( - trusted_devices_for_form, user_input, errors + trusted_devices_for_form, errors ) return await self.async_step_verification_code() async def _show_trusted_device_form( - self, trusted_devices, user_input=None, errors=None - ): + self, trusted_devices, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: """Show the trusted_device form to the user.""" return self.async_show_form( - step_id=CONF_TRUSTED_DEVICE, + step_id="trusted_device", data_schema=vol.Schema( { vol.Required(CONF_TRUSTED_DEVICE): vol.All( @@ -251,13 +257,20 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors or {}, ) - async def async_step_verification_code(self, user_input=None, errors=None): + async def async_step_verification_code( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Ask the verification code to the user.""" if errors is None: errors = {} if user_input is None: - return await self._show_verification_code_form(user_input, errors) + return await self._show_verification_code_form(errors) + + if TYPE_CHECKING: + assert self.api is not None self._verification_code = user_input[CONF_VERIFICATION_CODE] @@ -310,11 +323,13 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): } ) - async def _show_verification_code_form(self, user_input=None, errors=None): + async def _show_verification_code_form( + self, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: """Show the verification_code form to the user.""" return self.async_show_form( - step_id=CONF_VERIFICATION_CODE, + step_id="verification_code", data_schema=vol.Schema({vol.Required(CONF_VERIFICATION_CODE): str}), - errors=errors or {}, + errors=errors, ) diff --git a/homeassistant/components/icloud/device_tracker.py b/homeassistant/components/icloud/device_tracker.py index 48070a7f153..11a18a10020 100644 --- a/homeassistant/components/icloud/device_tracker.py +++ b/homeassistant/components/icloud/device_tracker.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo @@ -87,11 +87,6 @@ class IcloudTrackerEntity(TrackerEntity): """Return the battery level of the device.""" return self._device.battery_level - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - @property def icon(self) -> str: """Return the icon.""" diff --git a/homeassistant/components/icloud/icons.json b/homeassistant/components/icloud/icons.json index 4ed856aabc1..16280a063e3 100644 --- a/homeassistant/components/icloud/icons.json +++ b/homeassistant/components/icloud/icons.json @@ -1,8 +1,16 @@ { "services": { - "update": "mdi:update", - "play_sound": "mdi:speaker-wireless", - "display_message": "mdi:message-alert", - "lost_device": "mdi:devices" + "update": { + "service": "mdi:update" + }, + "play_sound": { + "service": "mdi:speaker-wireless" + }, + "display_message": { + "service": "mdi:message-alert" + }, + "lost_device": { + "service": "mdi:devices" + } } } diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index 17a5f519274..0f8c9eaafc9 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -11,6 +11,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "iot_class": "local_push", - "quality_scale": "silver", "requirements": ["idasen-ha==2.6.2"] } diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml new file mode 100644 index 00000000000..28381f98a3e --- /dev/null +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -0,0 +1,108 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration does not use polling. + brands: done + common-modules: + status: todo + comment: | + The cover and sensor entities could move common initialization to a base entity class. + config-flow-test-coverage: + status: todo + comment: | + - use mock_desk_api + - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. + config-flow: + status: todo + comment: | + Missing data description for user step. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not provide configuration parameters. + docs-installation-parameters: + status: exempt + comment: | + This integration does not provide installation parameters. + entity-unavailable: done + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: + status: todo + comment: | + - remove the await hass.async_block_till_done() after service calls with blocking=True + - use constants (like SERVICE_PRESS and ATTR_ENTITY_ID) in the tests calling services + - rename test_buttons.py -> test_button.py + - rename test_sensors.py -> test_sensor.py + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration uses Bluetooth and addresses don't change. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: exempt + comment: | + This integration doesn't have any cases where a reconfiguration is needed. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration doesn't use websession. + strict-typing: todo diff --git a/homeassistant/components/idteck_prox/manifest.json b/homeassistant/components/idteck_prox/manifest.json index e1d9b8a7ba8..92055908591 100644 --- a/homeassistant/components/idteck_prox/manifest.json +++ b/homeassistant/components/idteck_prox/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/idteck_prox", "iot_class": "local_push", "loggers": ["rfk101py"], + "quality_scale": "legacy", "requirements": ["rfk101py==0.0.1"] } diff --git a/homeassistant/components/ifttt/alarm_control_panel.py b/homeassistant/components/ifttt/alarm_control_panel.py index 1af23d716c8..739352485bd 100644 --- a/homeassistant/components/ifttt/alarm_control_panel.py +++ b/homeassistant/components/ifttt/alarm_control_panel.py @@ -10,6 +10,7 @@ from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.const import ( @@ -18,10 +19,6 @@ from homeassistant.const import ( CONF_CODE, CONF_NAME, CONF_OPTIMISTIC, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, ) from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv @@ -33,10 +30,10 @@ from . import ATTR_EVENT, DOMAIN, SERVICE_PUSH_ALARM_STATE, SERVICE_TRIGGER _LOGGER = logging.getLogger(__name__) ALLOWED_STATES = [ - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, ] DATA_IFTTT_ALARM = "ifttt_alarm" @@ -168,40 +165,41 @@ class IFTTTAlarmPanel(AlarmControlPanelEntity): """Send disarm command.""" if not self._check_code(code): return - self.set_alarm_state(self._event_disarm, STATE_ALARM_DISARMED) + self.set_alarm_state(self._event_disarm, AlarmControlPanelState.DISARMED) def alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" if self._code_arm_required and not self._check_code(code): return - self.set_alarm_state(self._event_away, STATE_ALARM_ARMED_AWAY) + self.set_alarm_state(self._event_away, AlarmControlPanelState.ARMED_AWAY) def alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" if self._code_arm_required and not self._check_code(code): return - self.set_alarm_state(self._event_home, STATE_ALARM_ARMED_HOME) + self.set_alarm_state(self._event_home, AlarmControlPanelState.ARMED_HOME) def alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" if self._code_arm_required and not self._check_code(code): return - self.set_alarm_state(self._event_night, STATE_ALARM_ARMED_NIGHT) + self.set_alarm_state(self._event_night, AlarmControlPanelState.ARMED_NIGHT) - def set_alarm_state(self, event: str, state: str) -> None: + def set_alarm_state(self, event: str, state: AlarmControlPanelState) -> None: """Call the IFTTT trigger service to change the alarm state.""" data = {ATTR_EVENT: event} self.hass.services.call(DOMAIN, SERVICE_TRIGGER, data) _LOGGER.debug("Called IFTTT integration to trigger event %s", event) if self._optimistic: - self._attr_state = state + self._attr_alarm_state = state def push_alarm_state(self, value: str) -> None: """Push the alarm state to the given value.""" + value = AlarmControlPanelState(value) if value in ALLOWED_STATES: _LOGGER.debug("Pushed the alarm state to %s", value) - self._attr_state = value + self._attr_alarm_state = value def _check_code(self, code: str | None) -> bool: return self._code is None or self._code == code diff --git a/homeassistant/components/ifttt/icons.json b/homeassistant/components/ifttt/icons.json index b943478a70b..a90d76f664a 100644 --- a/homeassistant/components/ifttt/icons.json +++ b/homeassistant/components/ifttt/icons.json @@ -1,6 +1,10 @@ { "services": { - "push_alarm_state": "mdi:security", - "trigger": "mdi:play" + "push_alarm_state": { + "service": "mdi:security" + }, + "trigger": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/iglo/light.py b/homeassistant/components/iglo/light.py index a31183f4489..0d20761c6e5 100644 --- a/homeassistant/components/iglo/light.py +++ b/homeassistant/components/iglo/light.py @@ -2,7 +2,6 @@ from __future__ import annotations -import math from typing import Any from iglo import Lamp @@ -11,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, @@ -83,23 +82,19 @@ class IGloLamp(LightEntity): return ColorMode.HS @property - def color_temp(self): - """Return the color temperature.""" - return color_util.color_temperature_kelvin_to_mired(self._lamp.state()["white"]) + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return self._lamp.state()["white"] @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return math.ceil( - color_util.color_temperature_kelvin_to_mired(self._lamp.max_kelvin) - ) + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return self._lamp.max_kelvin @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return math.ceil( - color_util.color_temperature_kelvin_to_mired(self._lamp.min_kelvin) - ) + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return self._lamp.min_kelvin @property def hs_color(self): @@ -135,11 +130,8 @@ class IGloLamp(LightEntity): self._lamp.rgb(*rgb) return - if ATTR_COLOR_TEMP in kwargs: - kelvin = int( - color_util.color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) - ) - self._lamp.white(kelvin) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + self._lamp.white(kwargs[ATTR_COLOR_TEMP_KELVIN]) return if ATTR_EFFECT in kwargs: diff --git a/homeassistant/components/iglo/manifest.json b/homeassistant/components/iglo/manifest.json index f270d06bcae..7ce4804a516 100644 --- a/homeassistant/components/iglo/manifest.json +++ b/homeassistant/components/iglo/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/iglo", "iot_class": "local_polling", "loggers": ["iglo"], + "quality_scale": "legacy", "requirements": ["iglo==1.2.7"] } diff --git a/homeassistant/components/ign_sismologia/manifest.json b/homeassistant/components/ign_sismologia/manifest.json index c76013f6821..d371f0d3614 100644 --- a/homeassistant/components/ign_sismologia/manifest.json +++ b/homeassistant/components/ign_sismologia/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["georss_ign_sismologia_client"], + "quality_scale": "legacy", "requirements": ["georss-ign-sismologia-client==0.8"] } diff --git a/homeassistant/components/ihc/binary_sensor.py b/homeassistant/components/ihc/binary_sensor.py index ed273878cb4..413d89ca027 100644 --- a/homeassistant/components/ihc/binary_sensor.py +++ b/homeassistant/components/ihc/binary_sensor.py @@ -15,7 +15,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.enum import try_parse_enum from .const import CONF_INVERTING, DOMAIN, IHC_CONTROLLER -from .ihcdevice import IHCDevice +from .entity import IHCEntity def setup_platform( @@ -48,7 +48,7 @@ def setup_platform( add_entities(devices) -class IHCBinarySensor(IHCDevice, BinarySensorEntity): +class IHCBinarySensor(IHCEntity, BinarySensorEntity): """IHC Binary Sensor. The associated IHC resource can be any in or output from a IHC product diff --git a/homeassistant/components/ihc/ihcdevice.py b/homeassistant/components/ihc/entity.py similarity index 97% rename from homeassistant/components/ihc/ihcdevice.py rename to homeassistant/components/ihc/entity.py index 07ff71b812a..f73c3079867 100644 --- a/homeassistant/components/ihc/ihcdevice.py +++ b/homeassistant/components/ihc/entity.py @@ -11,10 +11,10 @@ from .const import CONF_INFO, DOMAIN _LOGGER = logging.getLogger(__name__) -class IHCDevice(Entity): +class IHCEntity(Entity): """Base class for all IHC devices. - All IHC devices have an associated IHC resource. IHCDevice handled the + All IHC devices have an associated IHC resource. IHCEntity handled the registration of the IHC controller callback when the IHC resource changes. Derived classes must implement the on_ihc_change method """ diff --git a/homeassistant/components/ihc/icons.json b/homeassistant/components/ihc/icons.json index 73aab5f80d8..3842d1a48a6 100644 --- a/homeassistant/components/ihc/icons.json +++ b/homeassistant/components/ihc/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_runtime_value_bool": "mdi:toggle-switch", - "set_runtime_value_int": "mdi:numeric", - "set_runtime_value_float": "mdi:numeric", - "pulse": "mdi:pulse" + "set_runtime_value_bool": { + "service": "mdi:toggle-switch" + }, + "set_runtime_value_int": { + "service": "mdi:numeric" + }, + "set_runtime_value_float": { + "service": "mdi:numeric" + }, + "pulse": { + "service": "mdi:pulse" + } } } diff --git a/homeassistant/components/ihc/light.py b/homeassistant/components/ihc/light.py index 98e373daff4..47f343304dc 100644 --- a/homeassistant/components/ihc/light.py +++ b/homeassistant/components/ihc/light.py @@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import CONF_DIMMABLE, CONF_OFF_ID, CONF_ON_ID, DOMAIN, IHC_CONTROLLER -from .ihcdevice import IHCDevice +from .entity import IHCEntity from .util import async_pulse, async_set_bool, async_set_int @@ -50,7 +50,7 @@ def setup_platform( add_entities(devices) -class IhcLight(IHCDevice, LightEntity): +class IhcLight(IHCEntity, LightEntity): """Representation of a IHC light. For dimmable lights, the associated IHC resource should be a light diff --git a/homeassistant/components/ihc/manifest.json b/homeassistant/components/ihc/manifest.json index 2400206c3a0..68cc1b2c754 100644 --- a/homeassistant/components/ihc/manifest.json +++ b/homeassistant/components/ihc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ihc", "iot_class": "local_push", "loggers": ["ihcsdk"], + "quality_scale": "legacy", "requirements": ["defusedxml==0.7.1", "ihcsdk==2.8.5"] } diff --git a/homeassistant/components/ihc/sensor.py b/homeassistant/components/ihc/sensor.py index 1ca41ed2666..f3b722b2cdd 100644 --- a/homeassistant/components/ihc/sensor.py +++ b/homeassistant/components/ihc/sensor.py @@ -12,7 +12,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.unit_system import TEMPERATURE_UNITS from .const import DOMAIN, IHC_CONTROLLER -from .ihcdevice import IHCDevice +from .entity import IHCEntity def setup_platform( @@ -38,7 +38,7 @@ def setup_platform( add_entities(devices) -class IHCSensor(IHCDevice, SensorEntity): +class IHCSensor(IHCEntity, SensorEntity): """Implementation of the IHC sensor.""" def __init__( diff --git a/homeassistant/components/ihc/switch.py b/homeassistant/components/ihc/switch.py index f41f17bc998..b509c2dd10f 100644 --- a/homeassistant/components/ihc/switch.py +++ b/homeassistant/components/ihc/switch.py @@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import CONF_OFF_ID, CONF_ON_ID, DOMAIN, IHC_CONTROLLER -from .ihcdevice import IHCDevice +from .entity import IHCEntity from .util import async_pulse, async_set_bool @@ -43,7 +43,7 @@ def setup_platform( add_entities(devices) -class IHCSwitch(IHCDevice, SwitchEntity): +class IHCSwitch(IHCEntity, SwitchEntity): """Representation of an IHC switch.""" def __init__( diff --git a/homeassistant/components/image/__init__.py b/homeassistant/components/image/__init__.py index 2307a66d5a1..ea235127894 100644 --- a/homeassistant/components/image/__init__.py +++ b/homeassistant/components/image/__init__.py @@ -7,20 +7,28 @@ import collections from contextlib import suppress from dataclasses import dataclass from datetime import datetime, timedelta -from functools import cached_property import logging +import os from random import SystemRandom from typing import Final, final from aiohttp import hdrs, web import httpx +from propcache import cached_property +import voluptuous as vol from homeassistant.components.http import KEY_AUTHENTICATED, KEY_HASS, HomeAssistantView from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONTENT_TYPE_MULTIPART, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + HomeAssistant, + ServiceCall, + callback, +) from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv +import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import ( @@ -28,17 +36,26 @@ from homeassistant.helpers.event import ( async_track_time_interval, ) from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType +from homeassistant.helpers.typing import ( + UNDEFINED, + ConfigType, + UndefinedType, + VolDictType, +) -from .const import DOMAIN, IMAGE_TIMEOUT +from .const import DATA_COMPONENT, DOMAIN, IMAGE_TIMEOUT _LOGGER = logging.getLogger(__name__) +SERVICE_SNAPSHOT: Final = "snapshot" + ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL: Final = timedelta(seconds=30) +ATTR_FILENAME: Final = "filename" + DEFAULT_CONTENT_TYPE: Final = "image/jpeg" ENTITY_IMAGE_URL: Final = "/api/image_proxy/{0}?token={1}" @@ -51,6 +68,8 @@ FRAME_BOUNDARY = "frame-boundary" FRAME_SEPARATOR = bytes(f"\r\n--{FRAME_BOUNDARY}\r\n", "utf-8") LAST_FRAME_MARKER = bytes(f"\r\n--{FRAME_BOUNDARY}--\r\n", "utf-8") +IMAGE_SERVICE_SNAPSHOT: VolDictType = {vol.Required(ATTR_FILENAME): cv.string} + class ImageEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes image entities.""" @@ -70,7 +89,7 @@ class ImageContentTypeError(HomeAssistantError): def valid_image_content_type(content_type: str | None) -> str: """Validate the assigned content type is one of an image.""" - if content_type is None or content_type.split("/", 1)[0] != "image": + if content_type is None or content_type.split("/", 1)[0].lower() != "image": raise ImageContentTypeError return content_type @@ -88,7 +107,7 @@ async def _async_get_image(image_entity: ImageEntity, timeout: int) -> Image: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the image component.""" - component = hass.data[DOMAIN] = EntityComponent[ImageEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[ImageEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -115,19 +134,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, unsub_track_time_interval) + component.async_register_entity_service( + SERVICE_SNAPSHOT, IMAGE_SERVICE_SNAPSHOT, async_handle_snapshot_service + ) + return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[ImageEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[ImageEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) CACHED_PROPERTIES_WITH_ATTR_ = { @@ -327,7 +348,7 @@ async def async_get_still_stream( # While this results in additional bandwidth usage, # given the low frequency of image updates, it is acceptable. frame.extend(frame) - await response.write(frame) + await response.write(frame) # type: ignore[arg-type] return True event = asyncio.Event() @@ -382,3 +403,34 @@ class ImageStreamView(ImageView): ) -> web.StreamResponse: """Serve image stream.""" return await async_get_still_stream(request, image_entity) + + +async def async_handle_snapshot_service( + image: ImageEntity, service_call: ServiceCall +) -> None: + """Handle snapshot services calls.""" + hass = image.hass + snapshot_file: str = service_call.data[ATTR_FILENAME] + + # check if we allow to access to that file + if not hass.config.is_allowed_path(snapshot_file): + raise HomeAssistantError( + f"Cannot write `{snapshot_file}`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`" + ) + + async with asyncio.timeout(IMAGE_TIMEOUT): + image_data = await image.async_image() + + if image_data is None: + return + + def _write_image(to_file: str, image_data: bytes) -> None: + """Executor helper to write image.""" + os.makedirs(os.path.dirname(to_file), exist_ok=True) + with open(to_file, "wb") as img_file: + img_file.write(image_data) + + try: + await hass.async_add_executor_job(_write_image, snapshot_file, image_data) + except OSError as err: + raise HomeAssistantError("Can't write image to file") from err diff --git a/homeassistant/components/image/const.py b/homeassistant/components/image/const.py index d96f13b4951..a646b0dd3d5 100644 --- a/homeassistant/components/image/const.py +++ b/homeassistant/components/image/const.py @@ -1,7 +1,18 @@ """Constants for the image integration.""" -from typing import Final +from __future__ import annotations + +from typing import TYPE_CHECKING, Final + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import ImageEntity + DOMAIN: Final = "image" +DATA_COMPONENT: HassKey[EntityComponent[ImageEntity]] = HassKey(DOMAIN) IMAGE_TIMEOUT: Final = 10 diff --git a/homeassistant/components/image/icons.json b/homeassistant/components/image/icons.json index cec9c99d765..4434f3c180c 100644 --- a/homeassistant/components/image/icons.json +++ b/homeassistant/components/image/icons.json @@ -3,5 +3,10 @@ "_": { "default": "mdi:image" } + }, + "services": { + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/image/media_source.py b/homeassistant/components/image/media_source.py index e7f240aef5c..8d06ec3807f 100644 --- a/homeassistant/components/image/media_source.py +++ b/homeassistant/components/image/media_source.py @@ -5,19 +5,17 @@ from __future__ import annotations from typing import cast from homeassistant.components.media_player import BrowseError, MediaClass -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.const import ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant, State -from homeassistant.helpers.entity_component import EntityComponent -from . import ImageEntity -from .const import DOMAIN +from .const import DATA_COMPONENT, DOMAIN async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource: @@ -37,8 +35,7 @@ class ImageMediaSource(MediaSource): async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" - component: EntityComponent[ImageEntity] = self.hass.data[DOMAIN] - image = component.get_entity(item.identifier) + image = self.hass.data[DATA_COMPONENT].get_entity(item.identifier) if not image: raise Unresolvable(f"Could not resolve media item: {item.identifier}") @@ -55,7 +52,6 @@ class ImageMediaSource(MediaSource): if item.identifier: raise BrowseError("Unknown item") - component: EntityComponent[ImageEntity] = self.hass.data[DOMAIN] children = [ BrowseMediaSource( domain=DOMAIN, @@ -69,7 +65,7 @@ class ImageMediaSource(MediaSource): can_play=True, can_expand=False, ) - for image in component.entities + for image in self.hass.data[DATA_COMPONENT].entities ] return BrowseMediaSource( diff --git a/homeassistant/components/image/services.yaml b/homeassistant/components/image/services.yaml new file mode 100644 index 00000000000..8eef055cd89 --- /dev/null +++ b/homeassistant/components/image/services.yaml @@ -0,0 +1,12 @@ +# Describes the format for available image services + +snapshot: + target: + entity: + domain: image + fields: + filename: + required: true + example: "/tmp/image_snapshot.jpg" + selector: + text: diff --git a/homeassistant/components/image/strings.json b/homeassistant/components/image/strings.json index ea7ecd16956..011102f5b9e 100644 --- a/homeassistant/components/image/strings.json +++ b/homeassistant/components/image/strings.json @@ -4,5 +4,17 @@ "_": { "name": "[%key:component::image::title%]" } + }, + "services": { + "snapshot": { + "name": "Take snapshot", + "description": "Takes a snapshot from an image.", + "fields": { + "filename": { + "name": "Filename", + "description": "Template of a filename. Variable available is `entity_id`." + } + } + } } } diff --git a/homeassistant/components/image_processing/__init__.py b/homeassistant/components/image_processing/__init__.py index 2c1d0f9304c..0ac8d39813b 100644 --- a/homeassistant/components/image_processing/__init__.py +++ b/homeassistant/components/image_processing/__init__.py @@ -223,7 +223,7 @@ class ImageProcessingFaceEntity(ImageProcessingEntity): confidence = f_co for attr in (ATTR_NAME, ATTR_MOTION): if attr in face: - state = face[attr] # type: ignore[literal-required] + state = face[attr] break return state diff --git a/homeassistant/components/image_processing/icons.json b/homeassistant/components/image_processing/icons.json index b19d29c186d..ae95718e381 100644 --- a/homeassistant/components/image_processing/icons.json +++ b/homeassistant/components/image_processing/icons.json @@ -1,5 +1,7 @@ { "services": { - "scan": "mdi:qrcode-scan" + "scan": { + "service": "mdi:qrcode-scan" + } } } diff --git a/homeassistant/components/image_upload/manifest.json b/homeassistant/components/image_upload/manifest.json index 963721a0476..bb8c33ba749 100644 --- a/homeassistant/components/image_upload/manifest.json +++ b/homeassistant/components/image_upload/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/image_upload", "integration_type": "system", "quality_scale": "internal", - "requirements": ["Pillow==10.4.0"] + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/image_upload/media_source.py b/homeassistant/components/image_upload/media_source.py new file mode 100644 index 00000000000..ee9511e2c36 --- /dev/null +++ b/homeassistant/components/image_upload/media_source.py @@ -0,0 +1,76 @@ +"""Expose image_upload as media sources.""" + +from __future__ import annotations + +from homeassistant.components.media_player import BrowseError, MediaClass +from homeassistant.components.media_source import ( + BrowseMediaSource, + MediaSource, + MediaSourceItem, + PlayMedia, + Unresolvable, +) +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + + +async def async_get_media_source(hass: HomeAssistant) -> ImageUploadMediaSource: + """Set up image media source.""" + return ImageUploadMediaSource(hass) + + +class ImageUploadMediaSource(MediaSource): + """Provide images as media sources.""" + + name: str = "Image Upload" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize ImageMediaSource.""" + super().__init__(DOMAIN) + self.hass = hass + + async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: + """Resolve media to a url.""" + image = self.hass.data[DOMAIN].data.get(item.identifier) + + if not image: + raise Unresolvable(f"Could not resolve media item: {item.identifier}") + + return PlayMedia( + f"/api/image/serve/{image['id']}/original", image["content_type"] + ) + + async def async_browse_media( + self, + item: MediaSourceItem, + ) -> BrowseMediaSource: + """Return media.""" + if item.identifier: + raise BrowseError("Unknown item") + + children = [ + BrowseMediaSource( + domain=DOMAIN, + identifier=image["id"], + media_class=MediaClass.IMAGE, + media_content_type=image["content_type"], + title=image["name"], + thumbnail=f"/api/image/serve/{image['id']}/256x256", + can_play=True, + can_expand=False, + ) + for image in self.hass.data[DOMAIN].data.values() + ] + + return BrowseMediaSource( + domain=DOMAIN, + identifier=None, + media_class=MediaClass.APP, + media_content_type="", + title="Image Upload", + can_play=False, + can_expand=True, + children_media_class=MediaClass.IMAGE, + children=children, + ) diff --git a/homeassistant/components/imap/config_flow.py b/homeassistant/components/imap/config_flow.py index 6f93ce71d84..df0e63e200a 100644 --- a/homeassistant/components/imap/config_flow.py +++ b/homeassistant/components/imap/config_flow.py @@ -9,13 +9,14 @@ from typing import Any from aioimaplib import AioImapException import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlowWithConfigEntry, +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow +from homeassistant.const import ( + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, + CONF_VERIFY_SSL, ) -from homeassistant.const import CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import config_validation as cv @@ -29,6 +30,7 @@ from homeassistant.helpers.selector import ( ) from homeassistant.util.ssl import SSLCipherList +from . import ImapConfigEntry from .const import ( CONF_CHARSET, CONF_CUSTOM_EVENT_DATA_TEMPLATE, @@ -144,7 +146,6 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for imap.""" VERSION = 1 - _reauth_entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -177,9 +178,6 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -187,17 +185,16 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} if not (errors := await validate_input(self.hass, user_input)): - return self.async_update_reload_and_abort( - self._reauth_entry, data=user_input - ) + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.title, }, step_id="reauth_confirm", data_schema=vol.Schema( @@ -211,13 +208,13 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, - ) -> OptionsFlow: + config_entry: ImapConfigEntry, + ) -> ImapOptionsFlow: """Get the options flow for this handler.""" - return OptionsFlow(config_entry) + return ImapOptionsFlow() -class OptionsFlow(OptionsFlowWithConfigEntry): +class ImapOptionsFlow(OptionsFlow): """Option flow handler.""" async def async_step_init( @@ -225,13 +222,13 @@ class OptionsFlow(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Manage the options.""" errors: dict[str, str] | None = None - entry_data: dict[str, Any] = dict(self._config_entry.data) + entry_data: dict[str, Any] = dict(self.config_entry.data) if user_input is not None: try: self._async_abort_entries_match( { - CONF_SERVER: self._config_entry.data[CONF_SERVER], - CONF_USERNAME: self._config_entry.data[CONF_USERNAME], + CONF_SERVER: self.config_entry.data[CONF_SERVER], + CONF_USERNAME: self.config_entry.data[CONF_USERNAME], CONF_FOLDER: user_input[CONF_FOLDER], CONF_SEARCH: user_input[CONF_SEARCH], } diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index a9d0fdfbd48..1df107196ff 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -14,7 +14,6 @@ from typing import TYPE_CHECKING, Any from aioimaplib import AUTH, IMAP4_SSL, NONAUTH, SELECTED, AioImapException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_PASSWORD, CONF_PORT, @@ -53,6 +52,9 @@ from .const import ( ) from .errors import InvalidAuth, InvalidFolder +if TYPE_CHECKING: + from . import ImapConfigEntry + _LOGGER = logging.getLogger(__name__) BACKOFF_TIME = 10 @@ -210,14 +212,14 @@ class ImapMessage: class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]): """Base class for imap client.""" - config_entry: ConfigEntry + config_entry: ImapConfigEntry custom_event_template: Template | None def __init__( self, hass: HomeAssistant, imap_client: IMAP4_SSL, - entry: ConfigEntry, + entry: ImapConfigEntry, update_interval: timedelta | None, ) -> None: """Initiate imap client.""" @@ -332,7 +334,17 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]): raise UpdateFailed( f"Invalid response for search '{self.config_entry.data[CONF_SEARCH]}': {result} / {lines[0]}" ) - if not (count := len(message_ids := lines[0].split())): + # Check we do have returned items. + # + # In rare cases, when no UID's are returned, + # only the status line is returned, and not an empty line. + # See: https://github.com/home-assistant/core/issues/132042 + # + # Strictly the RfC notes that 0 or more numbers should be returned + # delimited by a space. + # + # See: https://datatracker.ietf.org/doc/html/rfc3501#section-7.2.5 + if len(lines) == 1 or not (count := len(message_ids := lines[0].split())): self._last_message_uid = None return 0 last_message_uid = ( @@ -391,7 +403,7 @@ class ImapPollingDataUpdateCoordinator(ImapDataUpdateCoordinator): """Class for imap client.""" def __init__( - self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ConfigEntry + self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ImapConfigEntry ) -> None: """Initiate imap client.""" _LOGGER.debug( @@ -437,7 +449,7 @@ class ImapPushDataUpdateCoordinator(ImapDataUpdateCoordinator): """Class for imap client.""" def __init__( - self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ConfigEntry + self, hass: HomeAssistant, imap_client: IMAP4_SSL, entry: ImapConfigEntry ) -> None: """Initiate imap client.""" _LOGGER.debug("Connected to server %s using IMAP push", entry.data[CONF_SERVER]) diff --git a/homeassistant/components/imap/icons.json b/homeassistant/components/imap/icons.json index 6672f9a4a7f..17a11d0fe22 100644 --- a/homeassistant/components/imap/icons.json +++ b/homeassistant/components/imap/icons.json @@ -10,9 +10,17 @@ } }, "services": { - "seen": "mdi:email-open-outline", - "move": "mdi:email-arrow-right-outline", - "delete": "mdi:trash-can-outline", - "fetch": "mdi:email-sync-outline" + "seen": { + "service": "mdi:email-open-outline" + }, + "move": { + "service": "mdi:email-arrow-right-outline" + }, + "delete": { + "service": "mdi:trash-can-outline" + }, + "fetch": { + "service": "mdi:email-sync-outline" + } } } diff --git a/homeassistant/components/imap/quality_scale.yaml b/homeassistant/components/imap/quality_scale.yaml new file mode 100644 index 00000000000..1c75b527882 --- /dev/null +++ b/homeassistant/components/imap/quality_scale.yaml @@ -0,0 +1,100 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: + status: todo + comment: | + The package is only tested, but not built and published inside a CI pipeline yet. + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: > + Per IMAP service instance there is one numeric sensor entity to reflect + the actual number of emails for a service. There is no event registration. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: | + Logs for unavailability are on debug level to avoid flooding the logs. + entity-unavailable: + status: done + comment: > + An entity is available as long as the service is loaded. + An `unknown` value is set if the mail service is temporary unavailable. + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: done + comment: The only entity supplied returns the primary value for the service. + discovery: + status: exempt + comment: | + Discovery for IMAP services is not desirerable. + stale-devices: + status: exempt + comment: > + The device class is a service. When removed, entities are removed as well. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: todo + comment: | + Options can be set through the option flow, reconfiguration is not supported yet. + dynamic-devices: + status: exempt + comment: | + The device class is a service. + discovery-update-info: + status: exempt + comment: Discovery is not desirable for this integration. + repair-issues: + status: exempt + comment: There are no repairs currently. + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: The device class is a service. + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration does not use web sessions. + strict-typing: + status: todo + comment: | + Requirement 'aioimaplib==1.1.0' appears untyped diff --git a/homeassistant/components/imap/sensor.py b/homeassistant/components/imap/sensor.py index 625af9ce6a1..60892388252 100644 --- a/homeassistant/components/imap/sensor.py +++ b/homeassistant/components/imap/sensor.py @@ -7,7 +7,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import CONF_USERNAME +from homeassistant.const import CONF_USERNAME, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -17,12 +17,15 @@ from . import ImapConfigEntry from .const import DOMAIN from .coordinator import ImapDataUpdateCoordinator +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + IMAP_MAIL_COUNT_DESCRIPTION = SensorEntityDescription( key="imap_mail_count", + entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=0, translation_key="imap_mail_count", - name=None, ) diff --git a/homeassistant/components/imap/strings.json b/homeassistant/components/imap/strings.json index 115d46f3d0e..8ff5d838199 100644 --- a/homeassistant/components/imap/strings.json +++ b/homeassistant/components/imap/strings.json @@ -10,8 +10,21 @@ "charset": "Character set", "folder": "Folder", "search": "IMAP search", + "event_message_data": "Message data to be included in the `imap_content` event data:", "ssl_cipher_list": "SSL cipher list (Advanced)", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "username": "The IMAP username.", + "password": "The IMAP password", + "server": "The IMAP server.", + "port": "The IMAP port supporting SSL, usually this is 993.", + "charset": "The character set used. Common values are `utf-8` or `US-ASCII`.", + "folder": "In generally the folder is set to `INBOX`, but e.g. in case of a sub folder, named `Test`, this should be `INBOX.Test`.", + "search": "The IMAP search command which is `UnSeen UnDeleted` by default.", + "event_message_data": "Note that the event size is limited, and not all message text might be sent with the event if the message is too large.", + "ssl_cipher_list": "If the IMAP service only supports legacy encryption, try to change this.", + "verify_ssl": "Recommended, to ensure the server certificate is valid. Turn off, if the server certificate is not trusted (e.g. self signed)." } }, "reauth_confirm": { @@ -19,6 +32,9 @@ "title": "[%key:common::config_flow::title::reauth%]", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Correct the IMAP password." } } }, @@ -35,6 +51,14 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "sensor": { + "imap_mail_count": { + "name": "Messages", + "unit_of_measurement": "messages" + } + } + }, "exceptions": { "copy_failed": { "message": "Copying the message failed with \"{error}\"." @@ -73,7 +97,15 @@ "custom_event_data_template": "Template to create custom event data", "max_message_size": "Max message size (2048 < size < 30000)", "enable_push": "Enable Push-IMAP if the server supports it. Turn off if Push-IMAP updates are unreliable.", - "event_message_data": "Message data to be included in the `imap_content` event data:" + "event_message_data": "Message data to be included in the `imap_content` event data." + }, + "data_description": { + "folder": "[%key:component::imap::config::step::user::data_description::folder%]", + "search": "[%key:component::imap::config::step::user::data_description::search%]", + "event_message_data": "[%key:component::imap::config::step::user::data_description::event_message_data%]", + "custom_event_data_template": "This template is evaluated when a new message was received, and the result is added to the `custom` attribute of the event data.", + "max_message_size": "Limit the maximum size of the event. Instead of passing the (whole) text message, using a template is a better option.", + "enable_push": "Using Push-IMAP is recommended. Polling will increase the time to respond." } } }, @@ -104,7 +136,7 @@ "services": { "fetch": { "name": "Fetch message", - "description": "Fetch the email message from the server.", + "description": "Fetch an email message from the server.", "fields": { "entry": { "name": "Entry", diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index 08946a802f1..ce3bc14d37b 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["imgw_pib==1.0.5"] + "requirements": ["imgw_pib==1.0.7"] } diff --git a/homeassistant/components/improv_ble/config_flow.py b/homeassistant/components/improv_ble/config_flow.py index f38f4830ace..05dd1de449a 100644 --- a/homeassistant/components/improv_ble/config_flow.py +++ b/homeassistant/components/improv_ble/config_flow.py @@ -120,12 +120,22 @@ class ImprovBLEConfigFlow(ConfigFlow, domain=DOMAIN): assert self._discovery_info is not None service_data = self._discovery_info.service_data - improv_service_data = ImprovServiceData.from_bytes( - service_data[SERVICE_DATA_UUID] - ) + try: + improv_service_data = ImprovServiceData.from_bytes( + service_data[SERVICE_DATA_UUID] + ) + except improv_ble_errors.InvalidCommand as err: + _LOGGER.warning( + "Aborting improv flow, device %s sent invalid improv data: '%s'", + self._discovery_info.address, + service_data[SERVICE_DATA_UUID].hex(), + ) + raise AbortFlow("invalid_improv_data") from err + if improv_service_data.state in (State.PROVISIONING, State.PROVISIONED): _LOGGER.debug( - "Aborting improv flow, device is already provisioned: %s", + "Aborting improv flow, device %s is already provisioned: %s", + self._discovery_info.address, improv_service_data.state, ) raise AbortFlow("already_provisioned") diff --git a/homeassistant/components/incomfort/__init__.py b/homeassistant/components/incomfort/__init__.py index 39e471b7614..4b6a6a5fcc3 100644 --- a/homeassistant/components/incomfort/__init__.py +++ b/homeassistant/components/incomfort/__init__.py @@ -4,33 +4,15 @@ from __future__ import annotations from aiohttp import ClientResponseError from incomfortclient import IncomfortError, InvalidHeaterList -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN from .coordinator import InComfortDataCoordinator, async_connect_gateway from .errors import InConfortTimeout, InConfortUnknownError, NoHeaters, NotFound -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Inclusive(CONF_USERNAME, "credentials"): cv.string, - vol.Inclusive(CONF_PASSWORD, "credentials"): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = ( Platform.WATER_HEATER, Platform.BINARY_SENSOR, @@ -43,53 +25,6 @@ INTEGRATION_TITLE = "Intergas InComfort/Intouch Lan2RF gateway" type InComfortConfigEntry = ConfigEntry[InComfortDataCoordinator] -async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: - """Import config entry from configuration.yaml.""" - if not hass.config_entries.async_entries(DOMAIN): - # Start import flow - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - if result["type"] == FlowResultType.ABORT: - ir.async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": INTEGRATION_TITLE, - }, - ) - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2025.1.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": INTEGRATION_TITLE, - }, - ) - - -async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: - """Create an Intergas InComfort/Intouch system.""" - if config := hass_config.get(DOMAIN): - hass.async_create_task(_async_import(hass, config)) - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" try: diff --git a/homeassistant/components/incomfort/climate.py b/homeassistant/components/incomfort/climate.py index dc08ce8a6c0..41470180051 100644 --- a/homeassistant/components/incomfort/climate.py +++ b/homeassistant/components/incomfort/climate.py @@ -46,7 +46,6 @@ class InComfortClimate(IncomfortEntity, ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -90,8 +89,10 @@ class InComfortClimate(IncomfortEntity, ClimateEntity): As we set the override, we report back the override. The actual set point is is returned at a later time. + Some older thermostats return 0.0 as override, in that case we fallback to + the actual setpoint. """ - return self._room.override + return self._room.override or self._room.setpoint async def async_set_temperature(self, **kwargs: Any) -> None: """Set a new target temperature for this zone.""" diff --git a/homeassistant/components/incomfort/config_flow.py b/homeassistant/components/incomfort/config_flow.py index e905f0d743d..f4838a9771d 100644 --- a/homeassistant/components/incomfort/config_flow.py +++ b/homeassistant/components/incomfort/config_flow.py @@ -81,11 +81,3 @@ class InComfortConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=CONFIG_SCHEMA, errors=errors ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import `incomfort` config entry from configuration.yaml.""" - errors: dict[str, str] | None = None - if (errors := await async_try_connect_gateway(self.hass, import_data)) is None: - return self.async_create_entry(title=TITLE, data=import_data) - reason = next(iter(errors.items()))[1] - return self.async_abort(reason=reason) diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index 40c93012eef..f404f33b970 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], - "requirements": ["incomfort-client==0.6.3-1"] + "requirements": ["incomfort-client==0.6.4"] } diff --git a/homeassistant/components/incomfort/water_heater.py b/homeassistant/components/incomfort/water_heater.py index 28424069d1c..e7620ac2a1a 100644 --- a/homeassistant/components/incomfort/water_heater.py +++ b/homeassistant/components/incomfort/water_heater.py @@ -54,12 +54,16 @@ class IncomfortWaterHeater(IncomfortBoilerEntity, WaterHeaterEntity): return {k: v for k, v in self._heater.status.items() if k in HEATER_ATTRS} @property - def current_temperature(self) -> float: + def current_temperature(self) -> float | None: """Return the current temperature.""" if self._heater.is_tapping: return self._heater.tap_temp if self._heater.is_pumping: return self._heater.heater_temp + if self._heater.heater_temp is None: + return self._heater.tap_temp + if self._heater.tap_temp is None: + return self._heater.heater_temp return max(self._heater.heater_temp, self._heater.tap_temp) @property diff --git a/homeassistant/components/influxdb/manifest.json b/homeassistant/components/influxdb/manifest.json index ad3f282eff7..55af2b37fb7 100644 --- a/homeassistant/components/influxdb/manifest.json +++ b/homeassistant/components/influxdb/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/influxdb", "iot_class": "local_push", "loggers": ["influxdb", "influxdb_client"], + "quality_scale": "legacy", "requirements": ["influxdb==5.3.1", "influxdb-client==1.24.0"] } diff --git a/homeassistant/components/input_boolean/icons.json b/homeassistant/components/input_boolean/icons.json index dc595a60fba..088c9094b3f 100644 --- a/homeassistant/components/input_boolean/icons.json +++ b/homeassistant/components/input_boolean/icons.json @@ -8,9 +8,17 @@ } }, "services": { - "toggle": "mdi:toggle-switch", - "turn_off": "mdi:toggle-switch-off", - "turn_on": "mdi:toggle-switch", - "reload": "mdi:reload" + "toggle": { + "service": "mdi:toggle-switch" + }, + "turn_off": { + "service": "mdi:toggle-switch-off" + }, + "turn_on": { + "service": "mdi:toggle-switch" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_button/__init__.py b/homeassistant/components/input_button/__init__.py index 6584b40fb55..69ff235948d 100644 --- a/homeassistant/components/input_button/__init__.py +++ b/homeassistant/components/input_button/__init__.py @@ -128,6 +128,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True +# pylint: disable-next=hass-enforce-class-module class InputButton(collection.CollectionEntity, ButtonEntity, RestoreEntity): """Representation of a button.""" diff --git a/homeassistant/components/input_button/icons.json b/homeassistant/components/input_button/icons.json index 226b8ede110..20d41b4934a 100644 --- a/homeassistant/components/input_button/icons.json +++ b/homeassistant/components/input_button/icons.json @@ -1,6 +1,10 @@ { "services": { - "press": "mdi:gesture-tap-button", - "reload": "mdi:reload" + "press": { + "service": "mdi:gesture-tap-button" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_datetime/icons.json b/homeassistant/components/input_datetime/icons.json index de899023cf2..f3676f02220 100644 --- a/homeassistant/components/input_datetime/icons.json +++ b/homeassistant/components/input_datetime/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_datetime": "mdi:calendar-clock", - "reload": "mdi:reload" + "set_datetime": { + "service": "mdi:calendar-clock" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_number/icons.json b/homeassistant/components/input_number/icons.json index d1423838491..9f90582308b 100644 --- a/homeassistant/components/input_number/icons.json +++ b/homeassistant/components/input_number/icons.json @@ -1,8 +1,16 @@ { "services": { - "decrement": "mdi:minus", - "increment": "mdi:plus", - "set_value": "mdi:numeric", - "reload": "mdi:reload" + "decrement": { + "service": "mdi:minus" + }, + "increment": { + "service": "mdi:plus" + }, + "set_value": { + "service": "mdi:numeric" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_number/strings.json b/homeassistant/components/input_number/strings.json index 8a2351ebad4..ed6b6fad208 100644 --- a/homeassistant/components/input_number/strings.json +++ b/homeassistant/components/input_number/strings.json @@ -41,7 +41,7 @@ }, "increment": { "name": "Increment", - "description": "Increments the value by 1 step." + "description": "Increments the current value by 1 step." }, "set_value": { "name": "Set", diff --git a/homeassistant/components/input_select/__init__.py b/homeassistant/components/input_select/__init__.py index 6efe16240cb..a117cf0a867 100644 --- a/homeassistant/components/input_select/__init__.py +++ b/homeassistant/components/input_select/__init__.py @@ -246,6 +246,7 @@ class InputSelectStorageCollection(collection.DictStorageCollection): return {CONF_ID: item[CONF_ID]} | update_data +# pylint: disable-next=hass-enforce-class-module class InputSelect(collection.CollectionEntity, SelectEntity, RestoreEntity): """Representation of a select input.""" diff --git a/homeassistant/components/input_select/icons.json b/homeassistant/components/input_select/icons.json index 03b477ddb36..6ef5cfaf96a 100644 --- a/homeassistant/components/input_select/icons.json +++ b/homeassistant/components/input_select/icons.json @@ -1,11 +1,25 @@ { "services": { - "select_next": "mdi:skip-next", - "select_option": "mdi:check", - "select_previous": "mdi:skip-previous", - "select_first": "mdi:skip-backward", - "select_last": "mdi:skip-forward", - "set_options": "mdi:cog", - "reload": "mdi:reload" + "select_next": { + "service": "mdi:skip-next" + }, + "select_option": { + "service": "mdi:check" + }, + "select_previous": { + "service": "mdi:skip-previous" + }, + "select_first": { + "service": "mdi:skip-backward" + }, + "select_last": { + "service": "mdi:skip-forward" + }, + "set_options": { + "service": "mdi:cog" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_text/icons.json b/homeassistant/components/input_text/icons.json index 0190e4ffba2..8fca66668bc 100644 --- a/homeassistant/components/input_text/icons.json +++ b/homeassistant/components/input_text/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_value": "mdi:form-textbox", - "reload": "mdi:reload" + "set_value": { + "service": "mdi:form-textbox" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/insteon/api/__init__.py b/homeassistant/components/insteon/api/__init__.py index b19b1912340..d277a4b3caf 100644 --- a/homeassistant/components/insteon/api/__init__.py +++ b/homeassistant/components/insteon/api/__init__.py @@ -14,13 +14,16 @@ from .aldb import ( websocket_get_aldb, websocket_load_aldb, websocket_notify_on_aldb_status, + websocket_notify_on_aldb_status_all, websocket_reset_aldb, websocket_write_aldb, ) from .config import ( websocket_add_device_override, + websocket_get_broken_links, websocket_get_config, websocket_get_modem_schema, + websocket_get_unknown_devices, websocket_remove_device_override, websocket_update_modem_config, ) @@ -70,6 +73,7 @@ def async_load_api(hass): websocket_api.async_register_command(hass, websocket_notify_on_aldb_status) websocket_api.async_register_command(hass, websocket_add_x10_device) websocket_api.async_register_command(hass, websocket_remove_device) + websocket_api.async_register_command(hass, websocket_notify_on_aldb_status_all) websocket_api.async_register_command(hass, websocket_get_properties) websocket_api.async_register_command(hass, websocket_change_properties_record) @@ -82,6 +86,8 @@ def async_load_api(hass): websocket_api.async_register_command(hass, websocket_update_modem_config) websocket_api.async_register_command(hass, websocket_add_device_override) websocket_api.async_register_command(hass, websocket_remove_device_override) + websocket_api.async_register_command(hass, websocket_get_broken_links) + websocket_api.async_register_command(hass, websocket_get_unknown_devices) async def async_register_insteon_frontend(hass: HomeAssistant): diff --git a/homeassistant/components/insteon/api/aldb.py b/homeassistant/components/insteon/api/aldb.py index 663dcf4dffd..ffc846fe6c3 100644 --- a/homeassistant/components/insteon/api/aldb.py +++ b/homeassistant/components/insteon/api/aldb.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr from ..const import DEVICE_ADDRESS, ID, INSTEON_DEVICE_NOT_FOUND, TYPE -from .device import async_device_name, notify_device_not_found +from ..utils import async_device_name +from .device import notify_device_not_found ALDB_RECORD = "record" ALDB_RECORD_SCHEMA = vol.Schema( @@ -59,6 +60,13 @@ async def async_reload_and_save_aldb(hass, device): await devices.async_save(workdir=hass.config.config_dir) +def any_aldb_loading() -> bool: + """Identify if any All-Link Databases are loading.""" + return any( + device.aldb.status == ALDBStatus.LOADING for _, device in devices.items() + ) + + @websocket_api.websocket_command( {vol.Required(TYPE): "insteon/aldb/get", vol.Required(DEVICE_ADDRESS): str} ) @@ -293,3 +301,45 @@ async def websocket_notify_on_aldb_status( device.aldb.subscribe_status_changed(aldb_loaded) connection.send_result(msg[ID]) + + +@websocket_api.websocket_command({vol.Required(TYPE): "insteon/aldb/notify_all"}) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_notify_on_aldb_status_all( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Tell Insteon all ALDBs are loaded.""" + + @callback + def aldb_status_changed(status: ALDBStatus) -> None: + """Forward ALDB loaded event to websocket.""" + + forward_data = { + "type": "status", + "is_loading": any_aldb_loading(), + } + connection.send_message(websocket_api.event_message(msg["id"], forward_data)) + + @callback + def async_cleanup() -> None: + """Remove signal listeners.""" + for device in devices.values(): + device.aldb.unsubscribe_status_changed(aldb_status_changed) + + forward_data = {"type": "unsubscribed"} + connection.send_message(websocket_api.event_message(msg["id"], forward_data)) + + connection.subscriptions[msg["id"]] = async_cleanup + for device in devices.values(): + device.aldb.subscribe_status_changed(aldb_status_changed) + + connection.send_result(msg[ID]) + + forward_data = { + "type": "status", + "is_loading": any_aldb_loading(), + } + connection.send_message(websocket_api.event_message(msg["id"], forward_data)) diff --git a/homeassistant/components/insteon/api/config.py b/homeassistant/components/insteon/api/config.py index 8a617911d1e..70baa4b8ee9 100644 --- a/homeassistant/components/insteon/api/config.py +++ b/homeassistant/components/insteon/api/config.py @@ -6,6 +6,9 @@ from typing import Any, TypedDict from pyinsteon import async_close, async_connect, devices from pyinsteon.address import Address +from pyinsteon.aldb.aldb_record import ALDBRecord +from pyinsteon.constants import LinkStatus +from pyinsteon.managers.link_manager import get_broken_links import voluptuous as vol import voluptuous_serialize @@ -13,6 +16,7 @@ from homeassistant.components import websocket_api from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ADDRESS, CONF_DEVICE from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send from ..const import ( @@ -34,7 +38,7 @@ from ..schemas import ( build_plm_manual_schema, build_plm_schema, ) -from ..utils import async_get_usb_ports +from ..utils import async_device_name, async_get_usb_ports HUB_V1_SCHEMA = build_hub_schema(hub_version=1) HUB_V2_SCHEMA = build_hub_schema(hub_version=2) @@ -134,6 +138,30 @@ def remove_device_override(hass: HomeAssistant, address: Address): hass.config_entries.async_update_entry(entry=config_entry, options=new_options) +async def async_link_to_dict( + address: Address, record: ALDBRecord, dev_registry: dr.DeviceRegistry, status=None +) -> dict[str, str | int]: + """Convert a link to a dictionary.""" + link_dict: dict[str, str | int] = {} + device_name = await async_device_name(dev_registry, address) + target_name = await async_device_name(dev_registry, record.target) + link_dict["address"] = str(address) + link_dict["device_name"] = device_name if device_name else str(address) + link_dict["mem_addr"] = record.mem_addr + link_dict["in_use"] = record.is_in_use + link_dict["group"] = record.group + link_dict["is_controller"] = record.is_controller + link_dict["highwater"] = record.is_high_water_mark + link_dict["target"] = str(record.target) + link_dict["target_name"] = target_name if target_name else str(record.target) + link_dict["data1"] = record.data1 + link_dict["data2"] = record.data2 + link_dict["data3"] = record.data3 + if status: + link_dict["status"] = status.name.lower() + return link_dict + + async def _async_connect(**kwargs): """Connect to the Insteon modem.""" if devices.modem: @@ -211,7 +239,7 @@ async def websocket_update_modem_config( """Get the schema for the modem configuration.""" config = msg["config"] config_entry = get_insteon_config_entry(hass) - is_connected = devices.modem.connected + is_connected = devices.modem is not None and devices.modem.connected if not await _async_connect(**config): connection.send_error( @@ -270,3 +298,44 @@ async def websocket_remove_device_override( remove_device_override(hass, address) async_dispatcher_send(hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, address) connection.send_result(msg[ID]) + + +@websocket_api.websocket_command( + {vol.Required(TYPE): "insteon/config/get_broken_links"} +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_get_broken_links( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get any broken links between devices.""" + broken_links = get_broken_links(devices=devices) + dev_registry = dr.async_get(hass) + broken_links_list = [ + await async_link_to_dict(address, record, dev_registry, status) + for address, record, status in broken_links + if status != LinkStatus.MISSING_TARGET + ] + connection.send_result(msg[ID], broken_links_list) + + +@websocket_api.websocket_command( + {vol.Required(TYPE): "insteon/config/get_unknown_devices"} +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_get_unknown_devices( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get any broken links between devices.""" + broken_links = get_broken_links(devices=devices) + unknown_devices = { + str(record.target) + for _, record, status in broken_links + if status == LinkStatus.MISSING_TARGET + } + connection.send_result(msg[ID], unknown_devices) diff --git a/homeassistant/components/insteon/api/device.py b/homeassistant/components/insteon/api/device.py index ff688eef40c..cd2b992c706 100644 --- a/homeassistant/components/insteon/api/device.py +++ b/homeassistant/components/insteon/api/device.py @@ -26,6 +26,7 @@ from ..const import ( TYPE, ) from ..schemas import build_x10_schema +from ..utils import compute_device_name from .config import add_x10_device, remove_device_override, remove_x10_device X10_DEVICE = "x10_device" @@ -33,11 +34,6 @@ X10_DEVICE_SCHEMA = build_x10_schema() REMOVE_ALL_REFS = "remove_all_refs" -def compute_device_name(ha_device): - """Return the HA device name.""" - return ha_device.name_by_user if ha_device.name_by_user else ha_device.name - - async def async_add_devices(address, multiple): """Add one or more Insteon devices.""" async for _ in devices.async_add_device(address=address, multiple=multiple): @@ -52,20 +48,10 @@ def get_insteon_device_from_ha_device(ha_device): return None -async def async_device_name(dev_registry, address): - """Get the Insteon device name from a device registry id.""" - ha_device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))}) - if not ha_device: - if device := devices[address]: - return f"{device.description} ({device.model})" - return "" - return compute_device_name(ha_device) - - def notify_device_not_found(connection, msg, text): """Notify the caller that the device was not found.""" connection.send_message( - websocket_api.error_message(msg[ID], websocket_api.ERR_NOT_FOUND, text) + websocket_api.error_message(msg[ID], websocket_api.const.ERR_NOT_FOUND, text) ) diff --git a/homeassistant/components/insteon/binary_sensor.py b/homeassistant/components/insteon/binary_sensor.py index fb19d2287cc..abb26b7f8e8 100644 --- a/homeassistant/components/insteon/binary_sensor.py +++ b/homeassistant/components/insteon/binary_sensor.py @@ -25,7 +25,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities SENSOR_TYPES = { diff --git a/homeassistant/components/insteon/climate.py b/homeassistant/components/insteon/climate.py index ffdd17f3ac0..506841e7efb 100644 --- a/homeassistant/components/insteon/climate.py +++ b/homeassistant/components/insteon/climate.py @@ -23,7 +23,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities FAN_ONLY = "fan_only" @@ -94,7 +94,6 @@ class InsteonClimateEntity(InsteonEntity, ClimateEntity): _attr_hvac_modes = list(HVAC_MODES.values()) _attr_fan_modes = list(FAN_MODES.values()) _attr_min_humidity = 1 - _enable_turn_on_off_backwards_compatibility = False @property def temperature_unit(self) -> str: diff --git a/homeassistant/components/insteon/config_flow.py b/homeassistant/components/insteon/config_flow.py index 7a701db1b82..143a9e2a5e2 100644 --- a/homeassistant/components/insteon/config_flow.py +++ b/homeassistant/components/insteon/config_flow.py @@ -44,27 +44,27 @@ async def _async_connect(**kwargs): _LOGGER.error("Could not connect to Insteon modem") return False - _LOGGER.info("Connected to Insteon modem") + _LOGGER.debug("Connected to Insteon modem") return True class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): """Insteon config flow handler.""" - _device_path: str | None = None - _device_name: str | None = None + _device_path: str + _device_name: str discovered_conf: dict[str, str] = {} async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Init the config flow.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") modem_types = [STEP_PLM, STEP_HUB_V1, STEP_HUB_V2] return self.async_show_menu(step_id="user", menu_options=modem_types) - async def async_step_plm(self, user_input=None): + async def async_step_plm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the PLM modem type.""" errors = {} if user_input is not None: @@ -83,7 +83,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): step_id=STEP_PLM, data_schema=data_schema, errors=errors ) - async def async_step_plm_manually(self, user_input=None): + async def async_step_plm_manually( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the PLM modem type manually.""" errors = {} schema_defaults = {} @@ -97,15 +99,21 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): step_id=STEP_PLM_MANUALLY, data_schema=data_schema, errors=errors ) - async def async_step_hubv1(self, user_input=None): + async def async_step_hubv1( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the Hub v1 modem type.""" return await self._async_setup_hub(hub_version=1, user_input=user_input) - async def async_step_hubv2(self, user_input=None): + async def async_step_hubv2( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the Hub v2 modem type.""" return await self._async_setup_hub(hub_version=2, user_input=user_input) - async def _async_setup_hub(self, hub_version, user_input): + async def _async_setup_hub( + self, hub_version: int, user_input: dict[str, Any] | None + ) -> ConfigFlowResult: """Set up the Hub versions 1 and 2.""" errors = {} if user_input is not None: @@ -125,9 +133,6 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): self, discovery_info: usb.UsbServiceInfo ) -> ConfigFlowResult: """Handle USB discovery.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - self._device_path = discovery_info.device self._device_name = usb.human_readable_device_name( discovery_info.device, @@ -144,7 +149,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(DEFAULT_DISCOVERY_UNIQUE_ID) return await self.async_step_confirm_usb() - async def async_step_confirm_usb(self, user_input=None) -> ConfigFlowResult: + async def async_step_confirm_usb( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm a USB discovery.""" if user_input is not None: return await self.async_step_plm({CONF_DEVICE: self._device_path}) diff --git a/homeassistant/components/insteon/cover.py b/homeassistant/components/insteon/cover.py index 60c4593f3c5..fe4f484798d 100644 --- a/homeassistant/components/insteon/cover.py +++ b/homeassistant/components/insteon/cover.py @@ -15,7 +15,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities diff --git a/homeassistant/components/insteon/insteon_entity.py b/homeassistant/components/insteon/entity.py similarity index 100% rename from homeassistant/components/insteon/insteon_entity.py rename to homeassistant/components/insteon/entity.py diff --git a/homeassistant/components/insteon/fan.py b/homeassistant/components/insteon/fan.py index 0a31e5915f6..0f1c70b9ea8 100644 --- a/homeassistant/components/insteon/fan.py +++ b/homeassistant/components/insteon/fan.py @@ -17,7 +17,7 @@ from homeassistant.util.percentage import ( ) from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities SPEED_RANGE = (1, 255) # off is not included @@ -56,7 +56,6 @@ class InsteonFanEntity(InsteonEntity, FanEntity): | FanEntityFeature.TURN_ON ) _attr_speed_count = 3 - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int | None: diff --git a/homeassistant/components/insteon/icons.json b/homeassistant/components/insteon/icons.json index 4d015e13b0d..530006ca7d4 100644 --- a/homeassistant/components/insteon/icons.json +++ b/homeassistant/components/insteon/icons.json @@ -1,15 +1,37 @@ { "services": { - "add_all_link": "mdi:link-variant", - "delete_all_link": "mdi:link-variant-remove", - "load_all_link_database": "mdi:database", - "print_all_link_database": "mdi:database-export", - "print_im_all_link_database": "mdi:database-export", - "x10_all_units_off": "mdi:power-off", - "x10_all_lights_on": "mdi:lightbulb-on", - "x10_all_lights_off": "mdi:lightbulb-off", - "scene_on": "mdi:palette", - "scene_off": "mdi:palette-outline", - "add_default_links": "mdi:link-variant-plus" + "add_all_link": { + "service": "mdi:link-variant" + }, + "delete_all_link": { + "service": "mdi:link-variant-remove" + }, + "load_all_link_database": { + "service": "mdi:database" + }, + "print_all_link_database": { + "service": "mdi:database-export" + }, + "print_im_all_link_database": { + "service": "mdi:database-export" + }, + "x10_all_units_off": { + "service": "mdi:power-off" + }, + "x10_all_lights_on": { + "service": "mdi:lightbulb-on" + }, + "x10_all_lights_off": { + "service": "mdi:lightbulb-off" + }, + "scene_on": { + "service": "mdi:palette" + }, + "scene_off": { + "service": "mdi:palette-outline" + }, + "add_default_links": { + "service": "mdi:link-variant-plus" + } } } diff --git a/homeassistant/components/insteon/light.py b/homeassistant/components/insteon/light.py index f6752db3cf1..d19f3cca34a 100644 --- a/homeassistant/components/insteon/light.py +++ b/homeassistant/components/insteon/light.py @@ -13,7 +13,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities MAX_BRIGHTNESS = 255 diff --git a/homeassistant/components/insteon/lock.py b/homeassistant/components/insteon/lock.py index 27fb0fd42d8..d5f30eacbac 100644 --- a/homeassistant/components/insteon/lock.py +++ b/homeassistant/components/insteon/lock.py @@ -10,7 +10,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities diff --git a/homeassistant/components/insteon/manifest.json b/homeassistant/components/insteon/manifest.json index c5791573195..c9127640250 100644 --- a/homeassistant/components/insteon/manifest.json +++ b/homeassistant/components/insteon/manifest.json @@ -20,6 +20,7 @@ "pyinsteon==1.6.3", "insteon-frontend-home-assistant==0.5.0" ], + "single_config_entry": true, "usb": [ { "vid": "10BF" diff --git a/homeassistant/components/insteon/strings.json b/homeassistant/components/insteon/strings.json index 37cdd5c0343..4df997ac939 100644 --- a/homeassistant/components/insteon/strings.json +++ b/homeassistant/components/insteon/strings.json @@ -44,7 +44,6 @@ }, "abort": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "not_insteon_device": "Discovered device not an Insteon device" } }, @@ -113,7 +112,7 @@ "services": { "add_all_link": { "name": "Add all link", - "description": "Tells the Insteom Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", + "description": "Tells the Insteon Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", "fields": { "group": { "name": "Group", diff --git a/homeassistant/components/insteon/switch.py b/homeassistant/components/insteon/switch.py index b60729232f2..67ce5fa8c0d 100644 --- a/homeassistant/components/insteon/switch.py +++ b/homeassistant/components/insteon/switch.py @@ -10,7 +10,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import SIGNAL_ADD_ENTITIES -from .insteon_entity import InsteonEntity +from .entity import InsteonEntity from .utils import async_add_insteon_devices, async_add_insteon_entities diff --git a/homeassistant/components/insteon/utils.py b/homeassistant/components/insteon/utils.py index 26d1aab4928..5b1d6379328 100644 --- a/homeassistant/components/insteon/utils.py +++ b/homeassistant/components/insteon/utils.py @@ -98,7 +98,7 @@ from .schemas import ( ) if TYPE_CHECKING: - from .insteon_entity import InsteonEntity + from .entity import InsteonEntity _LOGGER = logging.getLogger(__name__) @@ -471,3 +471,18 @@ def get_usb_ports() -> dict[str, str]: async def async_get_usb_ports(hass: HomeAssistant) -> dict[str, str]: """Return a dict of USB ports and their friendly names.""" return await hass.async_add_executor_job(get_usb_ports) + + +def compute_device_name(ha_device) -> str: + """Return the HA device name.""" + return ha_device.name_by_user if ha_device.name_by_user else ha_device.name + + +async def async_device_name(dev_registry: dr.DeviceRegistry, address: Address) -> str: + """Get the Insteon device name from a device registry id.""" + ha_device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))}) + if not ha_device: + if device := devices[address]: + return f"{device.description} ({device.model})" + return "" + return compute_device_name(ha_device) diff --git a/homeassistant/components/integration/strings.json b/homeassistant/components/integration/strings.json index 6186521aa1b..ed4f5de3ea7 100644 --- a/homeassistant/components/integration/strings.json +++ b/homeassistant/components/integration/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Riemann sum integral sensor", + "title": "Create Riemann sum integral sensor", "description": "Create a sensor that calculates a Riemann sum to estimate the integral of a sensor.", "data": { "method": "Integration method", diff --git a/homeassistant/components/intellifire/__init__.py b/homeassistant/components/intellifire/__init__.py index 7af472c8745..7609398673b 100644 --- a/homeassistant/components/intellifire/__init__.py +++ b/homeassistant/components/intellifire/__init__.py @@ -2,15 +2,17 @@ from __future__ import annotations -from aiohttp import ClientConnectionError -from intellifire4py import IntellifireControlAsync -from intellifire4py.exceptions import LoginException -from intellifire4py.intellifire import IntellifireAPICloud, IntellifireAPILocal +import asyncio + +from intellifire4py import UnifiedFireplace +from intellifire4py.cloud_interface import IntelliFireCloudInterface +from intellifire4py.model import IntelliFireCommonFireplaceData from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_HOST, + CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, Platform, @@ -18,7 +20,18 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import CONF_USER_ID, DOMAIN, LOGGER +from .const import ( + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, + INIT_WAIT_TIME_SECONDS, + LOGGER, + STARTUP_TIMEOUT, +) from .coordinator import IntellifireDataUpdateCoordinator PLATFORMS = [ @@ -32,79 +45,114 @@ PLATFORMS = [ ] +def _construct_common_data(entry: ConfigEntry) -> IntelliFireCommonFireplaceData: + """Convert config entry data into IntelliFireCommonFireplaceData.""" + + return IntelliFireCommonFireplaceData( + auth_cookie=entry.data[CONF_AUTH_COOKIE], + user_id=entry.data[CONF_USER_ID], + web_client_id=entry.data[CONF_WEB_CLIENT_ID], + serial=entry.data[CONF_SERIAL], + api_key=entry.data[CONF_API_KEY], + ip_address=entry.data[CONF_IP_ADDRESS], + read_mode=entry.options[CONF_READ_MODE], + control_mode=entry.options[CONF_CONTROL_MODE], + ) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate entries.""" + LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version == 1: + new = {**config_entry.data} + + if config_entry.minor_version < 2: + username = config_entry.data[CONF_USERNAME] + password = config_entry.data[CONF_PASSWORD] + + # Create a Cloud Interface + async with IntelliFireCloudInterface() as cloud_interface: + await cloud_interface.login_with_credentials( + username=username, password=password + ) + + new_data = cloud_interface.user_data.get_data_for_ip(new[CONF_HOST]) + + if not new_data: + raise ConfigEntryAuthFailed + new[CONF_API_KEY] = new_data.api_key + new[CONF_WEB_CLIENT_ID] = new_data.web_client_id + new[CONF_AUTH_COOKIE] = new_data.auth_cookie + + new[CONF_IP_ADDRESS] = new_data.ip_address + new[CONF_SERIAL] = new_data.serial + + hass.config_entries.async_update_entry( + config_entry, + data=new, + options={CONF_READ_MODE: "local", CONF_CONTROL_MODE: "local"}, + unique_id=new[CONF_SERIAL], + version=1, + minor_version=2, + ) + LOGGER.debug("Pseudo Migration %s successful", config_entry.version) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up IntelliFire from a config entry.""" - LOGGER.debug("Setting up config entry: %s", entry.unique_id) if CONF_USERNAME not in entry.data: - LOGGER.debug("Old config entry format detected: %s", entry.unique_id) + LOGGER.debug("Config entry without username detected: %s", entry.unique_id) raise ConfigEntryAuthFailed - ift_control = IntellifireControlAsync( - fireplace_ip=entry.data[CONF_HOST], - ) try: - await ift_control.login( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], + fireplace: UnifiedFireplace = ( + await UnifiedFireplace.build_fireplace_from_common( + _construct_common_data(entry) + ) ) - except (ConnectionError, ClientConnectionError) as err: - raise ConfigEntryNotReady from err - except LoginException as err: - raise ConfigEntryAuthFailed(err) from err - - finally: - await ift_control.close() - - # Extract API Key and User_ID from ift_control - # Eventually this will migrate to using IntellifireAPICloud - - if CONF_USER_ID not in entry.data or CONF_API_KEY not in entry.data: - LOGGER.info( - "Updating intellifire config entry for %s with api information", - entry.unique_id, - ) - cloud_api = IntellifireAPICloud() - await cloud_api.login( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], - ) - api_key = cloud_api.get_fireplace_api_key() - user_id = cloud_api.get_user_id() - # Update data entry - hass.config_entries.async_update_entry( - entry, - data={ - **entry.data, - CONF_API_KEY: api_key, - CONF_USER_ID: user_id, - }, + LOGGER.debug("Waiting for Fireplace to Initialize") + await asyncio.wait_for( + _async_wait_for_initialization(fireplace), timeout=STARTUP_TIMEOUT ) + except TimeoutError as err: + raise ConfigEntryNotReady( + "Initialization of fireplace timed out after 10 minutes" + ) from err - else: - api_key = entry.data[CONF_API_KEY] - user_id = entry.data[CONF_USER_ID] - - # Instantiate local control - api = IntellifireAPILocal( - fireplace_ip=entry.data[CONF_HOST], - api_key=api_key, - user_id=user_id, + # Construct coordinator + data_update_coordinator = IntellifireDataUpdateCoordinator( + hass=hass, fireplace=fireplace ) - # Define the update coordinator - coordinator = IntellifireDataUpdateCoordinator( - hass=hass, - api=api, - ) + LOGGER.debug("Fireplace to Initialized - Awaiting first refresh") + await data_update_coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = data_update_coordinator - await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +async def _async_wait_for_initialization( + fireplace: UnifiedFireplace, timeout=STARTUP_TIMEOUT +): + """Wait for a fireplace to be initialized.""" + while ( + fireplace.data.ipv4_address == "127.0.0.1" and fireplace.data.serial == "unset" + ): + LOGGER.debug(f"Waiting for fireplace to initialize [{fireplace.read_mode}]") + await asyncio.sleep(INIT_WAIT_TIME_SECONDS) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): diff --git a/homeassistant/components/intellifire/binary_sensor.py b/homeassistant/components/intellifire/binary_sensor.py index a1b8865c876..7d00bdfc26d 100644 --- a/homeassistant/components/intellifire/binary_sensor.py +++ b/homeassistant/components/intellifire/binary_sensor.py @@ -5,8 +5,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from intellifire4py import IntellifirePollData - from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -26,7 +24,7 @@ from .entity import IntellifireEntity class IntellifireBinarySensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntellifirePollData], bool] + value_fn: Callable[[IntellifireDataUpdateCoordinator], bool | None] @dataclass(frozen=True) @@ -40,100 +38,114 @@ INTELLIFIRE_BINARY_SENSORS: tuple[IntellifireBinarySensorEntityDescription, ...] IntellifireBinarySensorEntityDescription( key="on_off", # This is the sensor name translation_key="flame", # This is the translation key - value_fn=lambda data: data.is_on, + value_fn=lambda coordinator: coordinator.data.is_on, ), IntellifireBinarySensorEntityDescription( key="timer_on", translation_key="timer_on", - value_fn=lambda data: data.timer_on, + value_fn=lambda coordinator: coordinator.data.timer_on, ), IntellifireBinarySensorEntityDescription( key="pilot_light_on", translation_key="pilot_light_on", - value_fn=lambda data: data.pilot_on, + value_fn=lambda coordinator: coordinator.data.pilot_on, ), IntellifireBinarySensorEntityDescription( key="thermostat_on", translation_key="thermostat_on", - value_fn=lambda data: data.thermostat_on, + value_fn=lambda coordinator: coordinator.data.thermostat_on, ), IntellifireBinarySensorEntityDescription( key="error_pilot_flame", translation_key="pilot_flame_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_pilot_flame, + value_fn=lambda coordinator: coordinator.data.error_pilot_flame, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_flame", translation_key="flame_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_flame, + value_fn=lambda coordinator: coordinator.data.error_flame, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_fan_delay", translation_key="fan_delay_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_fan_delay, + value_fn=lambda coordinator: coordinator.data.error_fan_delay, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_maintenance", translation_key="maintenance_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_maintenance, + value_fn=lambda coordinator: coordinator.data.error_maintenance, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_disabled", translation_key="disabled_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_disabled, + value_fn=lambda coordinator: coordinator.data.error_disabled, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_fan", translation_key="fan_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_fan, + value_fn=lambda coordinator: coordinator.data.error_fan, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_lights", translation_key="lights_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_lights, + value_fn=lambda coordinator: coordinator.data.error_lights, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_accessory", translation_key="accessory_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_accessory, + value_fn=lambda coordinator: coordinator.data.error_accessory, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_soft_lock_out", translation_key="soft_lock_out_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_soft_lock_out, + value_fn=lambda coordinator: coordinator.data.error_soft_lock_out, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_ecm_offline", translation_key="ecm_offline_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_ecm_offline, + value_fn=lambda coordinator: coordinator.data.error_ecm_offline, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_offline", translation_key="offline_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_offline, + value_fn=lambda coordinator: coordinator.data.error_offline, device_class=BinarySensorDeviceClass.PROBLEM, ), + IntellifireBinarySensorEntityDescription( + key="local_connectivity", + translation_key="local_connectivity", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + value_fn=lambda coordinator: coordinator.fireplace.local_connectivity, + ), + IntellifireBinarySensorEntityDescription( + key="cloud_connectivity", + translation_key="cloud_connectivity", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + value_fn=lambda coordinator: coordinator.fireplace.cloud_connectivity, + ), ) @@ -157,6 +169,6 @@ class IntellifireBinarySensor(IntellifireEntity, BinarySensorEntity): entity_description: IntellifireBinarySensorEntityDescription @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Use this to get the correct value.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intellifire/climate.py b/homeassistant/components/intellifire/climate.py index ed4facffc67..f72df254424 100644 --- a/homeassistant/components/intellifire/climate.py +++ b/homeassistant/components/intellifire/climate.py @@ -58,7 +58,6 @@ class IntellifireClimate(IntellifireEntity, ClimateEntity): _attr_target_temperature_step = 1.0 _attr_temperature_unit = UnitOfTemperature.CELSIUS last_temp = DEFAULT_THERMOSTAT_TEMP - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -69,7 +68,7 @@ class IntellifireClimate(IntellifireEntity, ClimateEntity): super().__init__(coordinator, description) if coordinator.data.thermostat_on: - self.last_temp = coordinator.data.thermostat_setpoint_c + self.last_temp = int(coordinator.data.thermostat_setpoint_c) @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/intellifire/config_flow.py b/homeassistant/components/intellifire/config_flow.py index 268fc6623d3..a6b63f3b3e8 100644 --- a/homeassistant/components/intellifire/config_flow.py +++ b/homeassistant/components/intellifire/config_flow.py @@ -7,16 +7,33 @@ from dataclasses import dataclass from typing import Any from aiohttp import ClientConnectionError -from intellifire4py import AsyncUDPFireplaceFinder -from intellifire4py.exceptions import LoginException -from intellifire4py.intellifire import IntellifireAPICloud, IntellifireAPILocal +from intellifire4py.cloud_interface import IntelliFireCloudInterface +from intellifire4py.exceptions import LoginError +from intellifire4py.local_api import IntelliFireAPILocal +from intellifire4py.model import IntelliFireCommonFireplaceData import voluptuous as vol from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) -from .const import CONF_USER_ID, DOMAIN, LOGGER +from .const import ( + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, + LOGGER, +) STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -31,17 +48,20 @@ class DiscoveredHostInfo: serial: str | None -async def validate_host_input(host: str, dhcp_mode: bool = False) -> str: +async def _async_poll_local_fireplace_for_serial( + host: str, dhcp_mode: bool = False +) -> str: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ LOGGER.debug("Instantiating IntellifireAPI with host: [%s]", host) - api = IntellifireAPILocal(fireplace_ip=host) + api = IntelliFireAPILocal(fireplace_ip=host) await api.poll(suppress_warnings=dhcp_mode) serial = api.data.serial LOGGER.debug("Found a fireplace: %s", serial) + # Return the serial number which will be used to calculate a unique ID for the device/sensors return serial @@ -50,239 +70,193 @@ class IntelliFireConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for IntelliFire.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the Config Flow Handler.""" - self._host: str = "" - self._serial: str = "" - self._not_configured_hosts: list[DiscoveredHostInfo] = [] + + # DHCP Variables + self._dhcp_discovered_serial: str = "" # used only in discovery mode self._discovered_host: DiscoveredHostInfo + self._dhcp_mode = False + + self._not_configured_hosts: list[DiscoveredHostInfo] = [] self._reauth_needed: DiscoveredHostInfo - async def _find_fireplaces(self): - """Perform UDP discovery.""" - fireplace_finder = AsyncUDPFireplaceFinder() - discovered_hosts = await fireplace_finder.search_fireplace(timeout=12) - configured_hosts = { - entry.data[CONF_HOST] - for entry in self._async_current_entries(include_ignore=False) - if CONF_HOST in entry.data # CONF_HOST will be missing for ignored entries - } + self._configured_serials: list[str] = [] - self._not_configured_hosts = [ - DiscoveredHostInfo(ip, None) - for ip in discovered_hosts - if ip not in configured_hosts - ] - LOGGER.debug("Discovered Hosts: %s", discovered_hosts) - LOGGER.debug("Configured Hosts: %s", configured_hosts) - LOGGER.debug("Not Configured Hosts: %s", self._not_configured_hosts) - - async def validate_api_access_and_create_or_update( - self, *, host: str, username: str, password: str, serial: str - ): - """Validate username/password against api.""" - LOGGER.debug("Attempting login to iftapi with: %s", username) - - ift_cloud = IntellifireAPICloud() - await ift_cloud.login(username=username, password=password) - api_key = ift_cloud.get_fireplace_api_key() - user_id = ift_cloud.get_user_id() - - data = { - CONF_HOST: host, - CONF_PASSWORD: password, - CONF_USERNAME: username, - CONF_API_KEY: api_key, - CONF_USER_ID: user_id, - } - - # Update or Create - existing_entry = await self.async_set_unique_id(serial) - if existing_entry: - self.hass.config_entries.async_update_entry(existing_entry, data=data) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry(title=f"Fireplace {serial}", data=data) - - async def async_step_api_config( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Configure API access.""" - - errors = {} - control_schema = vol.Schema( - { - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } - ) - - if user_input is not None: - control_schema = vol.Schema( - { - vol.Required( - CONF_USERNAME, default=user_input.get(CONF_USERNAME, "") - ): str, - vol.Required( - CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "") - ): str, - } - ) - - try: - return await self.validate_api_access_and_create_or_update( - host=self._host, - username=user_input[CONF_USERNAME], - password=user_input[CONF_PASSWORD], - serial=self._serial, - ) - - except (ConnectionError, ClientConnectionError): - errors["base"] = "iftapi_connect" - LOGGER.error( - "Could not connect to iftapi.net over https - verify connectivity" - ) - except LoginException: - errors["base"] = "api_error" - LOGGER.error("Invalid credentials for iftapi.net") - - return self.async_show_form( - step_id="api_config", errors=errors, data_schema=control_schema - ) - - async def _async_validate_ip_and_continue(self, host: str) -> ConfigFlowResult: - """Validate local config and continue.""" - self._async_abort_entries_match({CONF_HOST: host}) - self._serial = await validate_host_input(host) - await self.async_set_unique_id(self._serial, raise_on_progress=False) - self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - # Store current data and jump to next stage - self._host = host - - return await self.async_step_api_config() - - async def async_step_manual_device_entry(self, user_input=None): - """Handle manual input of local IP configuration.""" - LOGGER.debug("STEP: manual_device_entry") - errors = {} - self._host = user_input.get(CONF_HOST) if user_input else None - if user_input is not None: - try: - return await self._async_validate_ip_and_continue(self._host) - except (ConnectionError, ClientConnectionError): - errors["base"] = "cannot_connect" - - return self.async_show_form( - step_id="manual_device_entry", - errors=errors, - data_schema=vol.Schema({vol.Required(CONF_HOST, default=self._host): str}), - ) - - async def async_step_pick_device( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Pick which device to configure.""" - errors = {} - LOGGER.debug("STEP: pick_device") - - if user_input is not None: - if user_input[CONF_HOST] == MANUAL_ENTRY_STRING: - return await self.async_step_manual_device_entry() - - try: - return await self._async_validate_ip_and_continue(user_input[CONF_HOST]) - except (ConnectionError, ClientConnectionError): - errors["base"] = "cannot_connect" - - return self.async_show_form( - step_id="pick_device", - errors=errors, - data_schema=vol.Schema( - { - vol.Required(CONF_HOST): vol.In( - [host.ip for host in self._not_configured_hosts] - + [MANUAL_ENTRY_STRING] - ) - } - ), - ) + # Define a cloud api interface we can use + self.cloud_api_interface = IntelliFireCloudInterface() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Start the user flow.""" - # Launch fireplaces discovery - await self._find_fireplaces() - LOGGER.debug("STEP: user") - if self._not_configured_hosts: - LOGGER.debug("Running Step: pick_device") - return await self.async_step_pick_device() - LOGGER.debug("Running Step: manual_device_entry") - return await self.async_step_manual_device_entry() + current_entries = self._async_current_entries(include_ignore=False) + self._configured_serials = [ + entry.data[CONF_SERIAL] for entry in current_entries + ] + + return await self.async_step_cloud_api() + + async def async_step_cloud_api( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Authenticate against IFTAPI Cloud in order to see configured devices. + + Local control of IntelliFire devices requires that the user download the correct API KEY which is only available on the cloud. Cloud control of the devices requires the user has at least once authenticated against the cloud and a set of cookie variables have been stored locally. + + """ + errors: dict[str, str] = {} + LOGGER.debug("STEP: cloud_api") + + if user_input is not None: + try: + async with self.cloud_api_interface as cloud_interface: + await cloud_interface.login_with_credentials( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + + # If login was successful pass username/password to next step + return await self.async_step_pick_cloud_device() + except LoginError: + errors["base"] = "api_error" + + return self.async_show_form( + step_id="cloud_api", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } + ), + ) + + async def async_step_pick_cloud_device( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Step to select a device from the cloud. + + We can only get here if we have logged in. If there is only one device available it will be auto-configured, + else the user will be given a choice to pick a device. + """ + errors: dict[str, str] = {} + LOGGER.debug( + f"STEP: pick_cloud_device: {user_input} - DHCP_MODE[{self._dhcp_mode}" + ) + + if self._dhcp_mode or user_input is not None: + if self._dhcp_mode: + serial = self._dhcp_discovered_serial + LOGGER.debug(f"DHCP Mode detected for serial [{serial}]") + if user_input is not None: + serial = user_input[CONF_SERIAL] + + # Run a unique ID Check prior to anything else + await self.async_set_unique_id(serial) + self._abort_if_unique_id_configured(updates={CONF_SERIAL: serial}) + + # If Serial is Good obtain fireplace and configure + fireplace = self.cloud_api_interface.user_data.get_data_for_serial(serial) + if fireplace: + return await self._async_create_config_entry_from_common_data( + fireplace=fireplace + ) + + # Parse User Data to see if we auto-configure or prompt for selection: + user_data = self.cloud_api_interface.user_data + + available_fireplaces: list[IntelliFireCommonFireplaceData] = [ + fp + for fp in user_data.fireplaces + if fp.serial not in self._configured_serials + ] + + # Abort if all devices have been configured + if not available_fireplaces: + return self.async_abort(reason="no_available_devices") + + # If there is a single fireplace configure it + if len(available_fireplaces) == 1: + return await self._async_create_config_entry_from_common_data( + fireplace=available_fireplaces[0] + ) + + return self.async_show_form( + step_id="pick_cloud_device", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_SERIAL): vol.In( + [fp.serial for fp in available_fireplaces] + ) + } + ), + ) + + async def _async_create_config_entry_from_common_data( + self, fireplace: IntelliFireCommonFireplaceData + ) -> ConfigFlowResult: + """Construct a config entry based on an object of IntelliFireCommonFireplaceData.""" + + data = { + CONF_IP_ADDRESS: fireplace.ip_address, + CONF_API_KEY: fireplace.api_key, + CONF_SERIAL: fireplace.serial, + CONF_AUTH_COOKIE: fireplace.auth_cookie, + CONF_WEB_CLIENT_ID: fireplace.web_client_id, + CONF_USER_ID: fireplace.user_id, + CONF_USERNAME: self.cloud_api_interface.user_data.username, + CONF_PASSWORD: self.cloud_api_interface.user_data.password, + } + + options = {CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_LOCAL} + + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data, options=options + ) + return self.async_create_entry( + title=f"Fireplace {fireplace.serial}", data=data, options=options + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" LOGGER.debug("STEP: reauth") - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - assert entry.unique_id # populate the expected vars - self._serial = entry.unique_id - self._host = entry.data[CONF_HOST] + self._dhcp_discovered_serial = self._get_reauth_entry().data[CONF_SERIAL] - placeholders = {CONF_HOST: self._host, "serial": self._serial} + placeholders = {"serial": self._dhcp_discovered_serial} self.context["title_placeholders"] = placeholders - return await self.async_step_api_config() + + return await self.async_step_cloud_api() async def async_step_dhcp( self, discovery_info: DhcpServiceInfo ) -> ConfigFlowResult: """Handle DHCP Discovery.""" + self._dhcp_mode = True # Run validation logic on ip - host = discovery_info.ip - LOGGER.debug("STEP: dhcp for host %s", host) + ip_address = discovery_info.ip + LOGGER.debug("STEP: dhcp for ip_address %s", ip_address) - self._async_abort_entries_match({CONF_HOST: host}) + self._async_abort_entries_match({CONF_IP_ADDRESS: ip_address}) try: - self._serial = await validate_host_input(host, dhcp_mode=True) + self._dhcp_discovered_serial = await _async_poll_local_fireplace_for_serial( + ip_address, dhcp_mode=True + ) except (ConnectionError, ClientConnectionError): LOGGER.debug( - "DHCP Discovery has determined %s is not an IntelliFire device", host + "DHCP Discovery has determined %s is not an IntelliFire device", + ip_address, ) return self.async_abort(reason="not_intellifire_device") - await self.async_set_unique_id(self._serial) - self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - self._discovered_host = DiscoveredHostInfo(ip=host, serial=self._serial) - - placeholders = {CONF_HOST: host, "serial": self._serial} - self.context["title_placeholders"] = placeholders - self._set_confirm_only() - - return await self.async_step_dhcp_confirm() - - async def async_step_dhcp_confirm(self, user_input=None): - """Attempt to confirm.""" - - LOGGER.debug("STEP: dhcp_confirm") - # Add the hosts one by one - host = self._discovered_host.ip - serial = self._discovered_host.serial - - if user_input is None: - # Show the confirmation dialog - return self.async_show_form( - step_id="dhcp_confirm", - description_placeholders={CONF_HOST: host, "serial": serial}, - ) - - return self.async_create_entry( - title=f"Fireplace {serial}", - data={CONF_HOST: host}, - ) + return await self.async_step_cloud_api() diff --git a/homeassistant/components/intellifire/const.py b/homeassistant/components/intellifire/const.py index 5c8af1eefe9..f194eeaf4e2 100644 --- a/homeassistant/components/intellifire/const.py +++ b/homeassistant/components/intellifire/const.py @@ -5,11 +5,22 @@ from __future__ import annotations import logging DOMAIN = "intellifire" - -CONF_USER_ID = "user_id" - LOGGER = logging.getLogger(__package__) +DEFAULT_THERMOSTAT_TEMP = 21 + +CONF_USER_ID = "user_id" # part of the cloud cookie +CONF_WEB_CLIENT_ID = "web_client_id" # part of the cloud cookie +CONF_AUTH_COOKIE = "auth_cookie" # part of the cloud cookie CONF_SERIAL = "serial" +CONF_READ_MODE = "cloud_read" +CONF_CONTROL_MODE = "cloud_control" -DEFAULT_THERMOSTAT_TEMP = 21 + +API_MODE_LOCAL = "local" +API_MODE_CLOUD = "cloud" + + +STARTUP_TIMEOUT = 600 + +INIT_WAIT_TIME_SECONDS = 10 diff --git a/homeassistant/components/intellifire/coordinator.py b/homeassistant/components/intellifire/coordinator.py index 0a46ff61435..b4f03f4b5c8 100644 --- a/homeassistant/components/intellifire/coordinator.py +++ b/homeassistant/components/intellifire/coordinator.py @@ -2,27 +2,27 @@ from __future__ import annotations -import asyncio from datetime import timedelta -from aiohttp import ClientConnectionError -from intellifire4py import IntellifirePollData -from intellifire4py.intellifire import IntellifireAPILocal +from intellifire4py import UnifiedFireplace +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData +from intellifire4py.read import IntelliFireDataProvider from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, LOGGER -class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData]): +class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntelliFirePollData]): """Class to manage the polling of the fireplace API.""" def __init__( self, hass: HomeAssistant, - api: IntellifireAPILocal, + fireplace: UnifiedFireplace, ) -> None: """Initialize the Coordinator.""" super().__init__( @@ -31,36 +31,21 @@ class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData name=DOMAIN, update_interval=timedelta(seconds=15), ) - self._api = api - async def _async_update_data(self) -> IntellifirePollData: - if not self._api.is_polling_in_background: - LOGGER.info("Starting Intellifire Background Polling Loop") - await self._api.start_background_polling() - - # Don't return uninitialized poll data - async with asyncio.timeout(15): - try: - await self._api.poll() - except (ConnectionError, ClientConnectionError) as exception: - raise UpdateFailed from exception - - LOGGER.debug("Failure Count %d", self._api.failed_poll_attempts) - if self._api.failed_poll_attempts > 10: - LOGGER.debug("Too many polling errors - raising exception") - raise UpdateFailed - - return self._api.data + self.fireplace = fireplace @property - def read_api(self) -> IntellifireAPILocal: + def read_api(self) -> IntelliFireDataProvider: """Return the Status API pointer.""" - return self._api + return self.fireplace.read_api @property - def control_api(self) -> IntellifireAPILocal: + def control_api(self) -> IntelliFireController: """Return the control API.""" - return self._api + return self.fireplace.control_api + + async def _async_update_data(self) -> IntelliFirePollData: + return self.fireplace.data @property def device_info(self) -> DeviceInfo: @@ -69,7 +54,6 @@ class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData manufacturer="Hearth and Home", model="IFT-WFM", name="IntelliFire", - identifiers={("IntelliFire", f"{self.read_api.data.serial}]")}, - sw_version=self.read_api.data.fw_ver_str, - configuration_url=f"http://{self._api.fireplace_ip}/poll", + identifiers={("IntelliFire", str(self.fireplace.serial))}, + configuration_url=f"http://{self.fireplace.ip_address}/poll", ) diff --git a/homeassistant/components/intellifire/entity.py b/homeassistant/components/intellifire/entity.py index 3b35c9dabd8..571c4717ac2 100644 --- a/homeassistant/components/intellifire/entity.py +++ b/homeassistant/components/intellifire/entity.py @@ -9,7 +9,7 @@ from . import IntellifireDataUpdateCoordinator class IntellifireEntity(CoordinatorEntity[IntellifireDataUpdateCoordinator]): - """Define a generic class for Intellifire entities.""" + """Define a generic class for IntelliFire entities.""" _attr_attribution = "Data provided by unpublished Intellifire API" _attr_has_entity_name = True @@ -22,6 +22,8 @@ class IntellifireEntity(CoordinatorEntity[IntellifireDataUpdateCoordinator]): """Class initializer.""" super().__init__(coordinator=coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}_{coordinator.read_api.data.serial}" + self._attr_unique_id = f"{description.key}_{coordinator.fireplace.serial}" + self.identifiers = ({("IntelliFire", f"{coordinator.fireplace.serial}]")},) + # Configure the Device Info self._attr_device_info = self.coordinator.device_info diff --git a/homeassistant/components/intellifire/fan.py b/homeassistant/components/intellifire/fan.py index f68827b0a56..c5bec07faaa 100644 --- a/homeassistant/components/intellifire/fan.py +++ b/homeassistant/components/intellifire/fan.py @@ -7,7 +7,8 @@ from dataclasses import dataclass import math from typing import Any -from intellifire4py import IntellifireControlAsync, IntellifirePollData +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData from homeassistant.components.fan import ( FanEntity, @@ -31,8 +32,8 @@ from .entity import IntellifireEntity class IntellifireFanRequiredKeysMixin: """Required keys for fan entity.""" - set_fn: Callable[[IntellifireControlAsync, int], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + set_fn: Callable[[IntelliFireController, int], Awaitable] + value_fn: Callable[[IntelliFirePollData], int] speed_range: tuple[int, int] @@ -80,7 +81,6 @@ class IntellifireFan(IntellifireEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False @property def is_on(self) -> bool: @@ -91,7 +91,8 @@ class IntellifireFan(IntellifireEntity, FanEntity): def percentage(self) -> int | None: """Return fan percentage.""" return ranged_value_to_percentage( - self.entity_description.speed_range, self.coordinator.read_api.data.fanspeed + self.entity_description.speed_range, + self.coordinator.read_api.data.fanspeed, ) @property diff --git a/homeassistant/components/intellifire/icons.json b/homeassistant/components/intellifire/icons.json index 6dca69484b6..fd6a2c149a7 100644 --- a/homeassistant/components/intellifire/icons.json +++ b/homeassistant/components/intellifire/icons.json @@ -18,6 +18,20 @@ }, "fan_error": { "default": "mdi:fan-alert" + }, + "local_connectivity": { + "default": "mdi:lan-pending", + "state": { + "on": "mdi:lan-connect", + "off": "mdi:lan-disconnect" + } + }, + "cloud_connectivity": { + "default": "mdi:cloud-question", + "state": { + "on": "mdi:cloud-check-variant-outline", + "off": "mdi:cloud-alert-outline" + } } }, "number": { diff --git a/homeassistant/components/intellifire/light.py b/homeassistant/components/intellifire/light.py index a7f2befaf33..5f25b5de823 100644 --- a/homeassistant/components/intellifire/light.py +++ b/homeassistant/components/intellifire/light.py @@ -6,7 +6,8 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from intellifire4py import IntellifireControlAsync, IntellifirePollData +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -27,8 +28,8 @@ from .entity import IntellifireEntity class IntellifireLightRequiredKeysMixin: """Required keys for fan entity.""" - set_fn: Callable[[IntellifireControlAsync, int], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + set_fn: Callable[[IntelliFireController, int], Awaitable] + value_fn: Callable[[IntelliFirePollData], int] @dataclass(frozen=True) @@ -56,7 +57,7 @@ class IntellifireLight(IntellifireEntity, LightEntity): _attr_supported_color_modes = {ColorMode.BRIGHTNESS} @property - def brightness(self): + def brightness(self) -> int: """Return the current brightness 0-255.""" return 85 * self.entity_description.value_fn(self.coordinator.read_api.data) diff --git a/homeassistant/components/intellifire/manifest.json b/homeassistant/components/intellifire/manifest.json index 90d41fcffe7..e3ee663e8fe 100644 --- a/homeassistant/components/intellifire/manifest.json +++ b/homeassistant/components/intellifire/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/intellifire", "iot_class": "local_polling", "loggers": ["intellifire4py"], - "requirements": ["intellifire4py==2.2.2"] + "requirements": ["intellifire4py==4.1.9"] } diff --git a/homeassistant/components/intellifire/sensor.py b/homeassistant/components/intellifire/sensor.py index dd3eef9c9b4..eaff89d08e7 100644 --- a/homeassistant/components/intellifire/sensor.py +++ b/homeassistant/components/intellifire/sensor.py @@ -6,8 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta -from intellifire4py import IntellifirePollData - from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -29,7 +27,9 @@ from .entity import IntellifireEntity class IntellifireSensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntellifirePollData], int | str | datetime | None] + value_fn: Callable[ + [IntellifireDataUpdateCoordinator], int | str | datetime | float | None + ] @dataclass(frozen=True) @@ -40,16 +40,29 @@ class IntellifireSensorEntityDescription( """Describes a sensor entity.""" -def _time_remaining_to_timestamp(data: IntellifirePollData) -> datetime | None: +def _time_remaining_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: """Define a sensor that takes into account timezone.""" - if not (seconds_offset := data.timeremaining_s): + if not (seconds_offset := coordinator.data.timeremaining_s): return None return utcnow() + timedelta(seconds=seconds_offset) -def _downtime_to_timestamp(data: IntellifirePollData) -> datetime | None: +def _downtime_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: """Define a sensor that takes into account a timezone.""" - if not (seconds_offset := data.downtime): + if not (seconds_offset := coordinator.data.downtime): + return None + return utcnow() - timedelta(seconds=seconds_offset) + + +def _uptime_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: + """Return a timestamp of how long the sensor has been up.""" + if not (seconds_offset := coordinator.data.uptime): return None return utcnow() - timedelta(seconds=seconds_offset) @@ -60,14 +73,14 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( translation_key="flame_height", state_class=SensorStateClass.MEASUREMENT, # UI uses 1-5 for flame height, backing lib uses 0-4 - value_fn=lambda data: (data.flameheight + 1), + value_fn=lambda coordinator: (coordinator.data.flameheight + 1), ), IntellifireSensorEntityDescription( key="temperature", state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda data: data.temperature_c, + value_fn=lambda coordinator: coordinator.data.temperature_c, ), IntellifireSensorEntityDescription( key="target_temp", @@ -75,13 +88,13 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda data: data.thermostat_setpoint_c, + value_fn=lambda coordinator: coordinator.data.thermostat_setpoint_c, ), IntellifireSensorEntityDescription( key="fan_speed", translation_key="fan_speed", state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.fanspeed, + value_fn=lambda coordinator: coordinator.data.fanspeed, ), IntellifireSensorEntityDescription( key="timer_end_timestamp", @@ -102,27 +115,27 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( translation_key="uptime", entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: utcnow() - timedelta(seconds=data.uptime), + value_fn=_uptime_to_timestamp, ), IntellifireSensorEntityDescription( key="connection_quality", translation_key="connection_quality", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.connection_quality, + value_fn=lambda coordinator: coordinator.data.connection_quality, entity_registry_enabled_default=False, ), IntellifireSensorEntityDescription( key="ecm_latency", translation_key="ecm_latency", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.ecm_latency, + value_fn=lambda coordinator: coordinator.data.ecm_latency, entity_registry_enabled_default=False, ), IntellifireSensorEntityDescription( key="ipv4_address", translation_key="ipv4_address", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.ipv4_address, + value_fn=lambda coordinator: coordinator.data.ipv4_address, ), ) @@ -134,17 +147,17 @@ async def async_setup_entry( coordinator: IntellifireDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( - IntellifireSensor(coordinator=coordinator, description=description) + IntelliFireSensor(coordinator=coordinator, description=description) for description in INTELLIFIRE_SENSORS ) -class IntellifireSensor(IntellifireEntity, SensorEntity): - """Extends IntellifireEntity with Sensor specific logic.""" +class IntelliFireSensor(IntellifireEntity, SensorEntity): + """Extends IntelliFireEntity with Sensor specific logic.""" entity_description: IntellifireSensorEntityDescription @property - def native_value(self) -> int | str | datetime | None: + def native_value(self) -> int | str | datetime | float | None: """Return the state.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intellifire/strings.json b/homeassistant/components/intellifire/strings.json index 6393a4e070d..423d2c0788d 100644 --- a/homeassistant/components/intellifire/strings.json +++ b/homeassistant/components/intellifire/strings.json @@ -1,39 +1,30 @@ { "config": { - "flow_title": "{serial} ({host})", + "flow_title": "{serial}", "step": { - "manual_device_entry": { - "description": "Local Configuration", - "data": { - "host": "Host (IP Address)" - } + "pick_cloud_device": { + "title": "Configure fireplace", + "description": "Select fireplace by serial number:" }, - "api_config": { + "cloud_api": { + "description": "Authenticate against IntelliFire Cloud", + "data_description": { + "username": "Your IntelliFire app username", + "password": "Your IntelliFire app password" + }, "data": { "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } - }, - "dhcp_confirm": { - "description": "Do you want to set up {host}\nSerial: {serial}?" - }, - "pick_device": { - "title": "Device Selection", - "description": "The following IntelliFire devices were discovered. Please select which you wish to configure.", - "data": { - "host": "[%key:common::config_flow::data::host%]" - } } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "api_error": "Login failed", - "iftapi_connect": "Error conecting to iftapi.net" + "api_error": "Login failed" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "not_intellifire_device": "Not an IntelliFire Device." + "not_intellifire_device": "Not an IntelliFire device.", + "no_available_devices": "All available devices have already been configured." } }, "entity": { @@ -82,6 +73,12 @@ }, "offline_error": { "name": "Offline error" + }, + "cloud_connectivity": { + "name": "Cloud connectivity" + }, + "local_connectivity": { + "name": "Local connectivity" } }, "fan": { diff --git a/homeassistant/components/intellifire/switch.py b/homeassistant/components/intellifire/switch.py index 00de6d74a9c..ac6096497b6 100644 --- a/homeassistant/components/intellifire/switch.py +++ b/homeassistant/components/intellifire/switch.py @@ -6,16 +6,13 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from intellifire4py import IntellifirePollData -from intellifire4py.intellifire import IntellifireAPILocal - from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import IntellifireDataUpdateCoordinator from .const import DOMAIN -from .coordinator import IntellifireDataUpdateCoordinator from .entity import IntellifireEntity @@ -23,9 +20,9 @@ from .entity import IntellifireEntity class IntellifireSwitchRequiredKeysMixin: """Mixin for required keys.""" - on_fn: Callable[[IntellifireAPILocal], Awaitable] - off_fn: Callable[[IntellifireAPILocal], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + on_fn: Callable[[IntellifireDataUpdateCoordinator], Awaitable] + off_fn: Callable[[IntellifireDataUpdateCoordinator], Awaitable] + value_fn: Callable[[IntellifireDataUpdateCoordinator], bool] @dataclass(frozen=True) @@ -39,16 +36,16 @@ INTELLIFIRE_SWITCHES: tuple[IntellifireSwitchEntityDescription, ...] = ( IntellifireSwitchEntityDescription( key="on_off", translation_key="flame", - on_fn=lambda control_api: control_api.flame_on(), - off_fn=lambda control_api: control_api.flame_off(), - value_fn=lambda data: data.is_on, + on_fn=lambda coordinator: coordinator.control_api.flame_on(), + off_fn=lambda coordinator: coordinator.control_api.flame_off(), + value_fn=lambda coordinator: coordinator.read_api.data.is_on, ), IntellifireSwitchEntityDescription( key="pilot", translation_key="pilot_light", - on_fn=lambda control_api: control_api.pilot_on(), - off_fn=lambda control_api: control_api.pilot_off(), - value_fn=lambda data: data.pilot_on, + on_fn=lambda coordinator: coordinator.control_api.pilot_on(), + off_fn=lambda coordinator: coordinator.control_api.pilot_off(), + value_fn=lambda coordinator: coordinator.read_api.data.pilot_on, ), ) @@ -74,15 +71,15 @@ class IntellifireSwitch(IntellifireEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the switch.""" - await self.entity_description.on_fn(self.coordinator.control_api) + await self.entity_description.on_fn(self.coordinator) await self.async_update_ha_state(force_refresh=True) async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" - await self.entity_description.off_fn(self.coordinator.control_api) + await self.entity_description.off_fn(self.coordinator) await self.async_update_ha_state(force_refresh=True) @property def is_on(self) -> bool | None: """Return the on state.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 001f2515ebf..71ef40ad369 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from datetime import datetime import logging from typing import Any, Protocol @@ -42,9 +41,11 @@ from homeassistant.const import ( from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.helpers import config_validation as cv, integration_platform, intent from homeassistant.helpers.typing import ConfigType +from homeassistant.util import dt as dt_util from .const import DOMAIN, TIMER_DATA from .timers import ( + CancelAllTimersIntentHandler, CancelTimerIntentHandler, DecreaseTimerIntentHandler, IncreaseTimerIntentHandler, @@ -130,6 +131,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, SetPositionIntentHandler()) intent.async_register(hass, StartTimerIntentHandler()) intent.async_register(hass, CancelTimerIntentHandler()) + intent.async_register(hass, CancelAllTimersIntentHandler()) intent.async_register(hass, IncreaseTimerIntentHandler()) intent.async_register(hass, DecreaseTimerIntentHandler()) intent.async_register(hass, PauseTimerIntentHandler()) @@ -137,6 +139,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, TimerStatusIntentHandler()) intent.async_register(hass, GetCurrentDateIntentHandler()) intent.async_register(hass, GetCurrentTimeIntentHandler()) + intent.async_register(hass, RespondIntentHandler()) return True @@ -239,6 +242,8 @@ class GetStateIntentHandler(intent.IntentHandler): vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("state"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, } async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: @@ -280,7 +285,13 @@ class GetStateIntentHandler(intent.IntentHandler): device_classes=device_classes, assistant=intent_obj.assistant, ) - match_result = intent.async_match_targets(hass, match_constraints) + match_preferences = intent.MatchTargetsPreferences( + area_id=slots.get("preferred_area_id", {}).get("value"), + floor_id=slots.get("preferred_floor_id", {}).get("value"), + ) + match_result = intent.async_match_targets( + hass, match_constraints, match_preferences + ) if ( (not match_result.is_match) and (match_result.no_match_reason is not None) @@ -356,7 +367,7 @@ class NevermindIntentHandler(intent.IntentHandler): description = "Cancels the current request and does nothing" async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: - """Doe not do anything, and produces an empty response.""" + """Do nothing and produces an empty response.""" return intent_obj.create_response() @@ -396,7 +407,7 @@ class GetCurrentDateIntentHandler(intent.IntentHandler): async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: response = intent_obj.create_response() - response.async_set_speech_slots({"date": datetime.now().date()}) + response.async_set_speech_slots({"date": dt_util.now().date()}) return response @@ -408,7 +419,28 @@ class GetCurrentTimeIntentHandler(intent.IntentHandler): async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: response = intent_obj.create_response() - response.async_set_speech_slots({"time": datetime.now().time()}) + response.async_set_speech_slots({"time": dt_util.now().time()}) + return response + + +class RespondIntentHandler(intent.IntentHandler): + """Responds with no action.""" + + intent_type = intent.INTENT_RESPOND + description = "Returns the provided response with no action." + + slot_schema = { + vol.Optional("response"): cv.string, + } + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return the provided response, but take no action.""" + slots = self.async_validate_slots(intent_obj.slots) + response = intent_obj.create_response() + + if "response" in slots: + response.async_set_speech(slots["response"]["value"]) + return response diff --git a/homeassistant/components/intent/timers.py b/homeassistant/components/intent/timers.py index a8576509a4b..84b96492241 100644 --- a/homeassistant/components/intent/timers.py +++ b/homeassistant/components/intent/timers.py @@ -6,11 +6,11 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass from enum import StrEnum -from functools import cached_property import logging import time from typing import Any +from propcache import cached_property import voluptuous as vol from homeassistant.const import ATTR_DEVICE_ID, ATTR_ID, ATTR_NAME @@ -887,6 +887,36 @@ class CancelTimerIntentHandler(intent.IntentHandler): return intent_obj.create_response() +class CancelAllTimersIntentHandler(intent.IntentHandler): + """Intent handler for cancelling all timers.""" + + intent_type = intent.INTENT_CANCEL_ALL_TIMERS + description = "Cancels all timers" + slot_schema = { + vol.Optional("area"): cv.string, + } + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Handle the intent.""" + hass = intent_obj.hass + timer_manager: TimerManager = hass.data[TIMER_DATA] + slots = self.async_validate_slots(intent_obj.slots) + canceled = 0 + + for timer in _find_timers(hass, intent_obj.device_id, slots): + timer_manager.cancel_timer(timer.id) + canceled += 1 + + response = intent_obj.create_response() + speech_slots = {"canceled": canceled} + if "area" in slots: + speech_slots["area"] = slots["area"]["value"] + + response.async_set_speech_slots(speech_slots) + + return response + + class IncreaseTimerIntentHandler(intent.IntentHandler): """Intent handler for increasing the time of a timer.""" diff --git a/homeassistant/components/intent_script/__init__.py b/homeassistant/components/intent_script/__init__.py index 6f47cadb04f..a4f84f6ff9e 100644 --- a/homeassistant/components/intent_script/__init__.py +++ b/homeassistant/components/intent_script/__init__.py @@ -148,6 +148,8 @@ class ScriptIntentHandler(intent.IntentHandler): vol.Any("name", "area", "floor"): cv.string, vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, } def __init__(self, intent_type: str, config: ConfigType) -> None: @@ -205,7 +207,14 @@ class ScriptIntentHandler(intent.IntentHandler): ) if match_constraints.has_constraints: - match_result = intent.async_match_targets(hass, match_constraints) + match_preferences = intent.MatchTargetsPreferences( + area_id=slots.get("preferred_area_id"), + floor_id=slots.get("preferred_floor_id"), + ) + + match_result = intent.async_match_targets( + hass, match_constraints, match_preferences + ) if match_result.is_match: targets = {} diff --git a/homeassistant/components/intent_script/icons.json b/homeassistant/components/intent_script/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/intent_script/icons.json +++ b/homeassistant/components/intent_script/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/intesishome/climate.py b/homeassistant/components/intesishome/climate.py index 82b653a34c7..1a1f58a6b80 100644 --- a/homeassistant/components/intesishome/climate.py +++ b/homeassistant/components/intesishome/climate.py @@ -147,7 +147,6 @@ class IntesisAC(ClimateEntity): _attr_should_poll = False _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, ih_device_id, ih_device, controller): """Initialize the thermostat.""" diff --git a/homeassistant/components/intesishome/manifest.json b/homeassistant/components/intesishome/manifest.json index 6b7a579d99f..ab306fb4773 100644 --- a/homeassistant/components/intesishome/manifest.json +++ b/homeassistant/components/intesishome/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/intesishome", "iot_class": "cloud_push", "loggers": ["pyintesishome"], + "quality_scale": "legacy", "requirements": ["pyintesishome==1.8.0"] } diff --git a/homeassistant/components/ios/__init__.py b/homeassistant/components/ios/__init__.py index 2a821166d8a..ef141a28475 100644 --- a/homeassistant/components/ios/__init__.py +++ b/homeassistant/components/ios/__init__.py @@ -19,6 +19,16 @@ from homeassistant.helpers.typing import ConfigType from homeassistant.util.json import load_json_object from .const import ( + ATTR_BATTERY, + ATTR_BATTERY_LEVEL, + ATTR_BATTERY_STATE, + ATTR_DEVICE, + ATTR_DEVICE_ID, + ATTR_DEVICE_NAME, + ATTR_DEVICE_PERMANENT_ID, + ATTR_DEVICE_SYSTEM_VERSION, + ATTR_DEVICE_TYPE, + BATTERY_STATES, CONF_ACTION_BACKGROUND_COLOR, CONF_ACTION_ICON, CONF_ACTION_ICON_COLOR, @@ -64,21 +74,14 @@ BEHAVIORS = [ATTR_DEFAULT_BEHAVIOR, ATTR_TEXT_INPUT_BEHAVIOR] ATTR_LAST_SEEN_AT = "lastSeenAt" -ATTR_DEVICE = "device" ATTR_PUSH_TOKEN = "pushToken" ATTR_APP = "app" ATTR_PERMISSIONS = "permissions" ATTR_PUSH_ID = "pushId" -ATTR_DEVICE_ID = "deviceId" ATTR_PUSH_SOUNDS = "pushSounds" -ATTR_BATTERY = "battery" -ATTR_DEVICE_NAME = "name" ATTR_DEVICE_LOCALIZED_MODEL = "localizedModel" ATTR_DEVICE_MODEL = "model" -ATTR_DEVICE_PERMANENT_ID = "permanentID" -ATTR_DEVICE_SYSTEM_VERSION = "systemVersion" -ATTR_DEVICE_TYPE = "type" ATTR_DEVICE_SYSTEM_NAME = "systemName" ATTR_APP_BUNDLE_IDENTIFIER = "bundleIdentifier" @@ -90,20 +93,6 @@ ATTR_NOTIFICATIONS_PERMISSION = "notifications" PERMISSIONS = [ATTR_LOCATION_PERMISSION, ATTR_NOTIFICATIONS_PERMISSION] -ATTR_BATTERY_STATE = "state" -ATTR_BATTERY_LEVEL = "level" - -ATTR_BATTERY_STATE_UNPLUGGED = "Not Charging" -ATTR_BATTERY_STATE_CHARGING = "Charging" -ATTR_BATTERY_STATE_FULL = "Full" -ATTR_BATTERY_STATE_UNKNOWN = "Unknown" - -BATTERY_STATES = [ - ATTR_BATTERY_STATE_UNPLUGGED, - ATTR_BATTERY_STATE_CHARGING, - ATTR_BATTERY_STATE_FULL, - ATTR_BATTERY_STATE_UNKNOWN, -] ATTR_DEVICES = "devices" diff --git a/homeassistant/components/ios/const.py b/homeassistant/components/ios/const.py index 181bbebd9a6..c9782aab1c7 100644 --- a/homeassistant/components/ios/const.py +++ b/homeassistant/components/ios/const.py @@ -2,6 +2,28 @@ DOMAIN = "ios" +ATTR_BATTERY = "battery" +ATTR_BATTERY_LEVEL = "level" +ATTR_BATTERY_STATE = "state" +ATTR_BATTERY_STATE_UNPLUGGED = "Not Charging" +ATTR_BATTERY_STATE_CHARGING = "Charging" +ATTR_BATTERY_STATE_FULL = "Full" +ATTR_BATTERY_STATE_UNKNOWN = "Unknown" + +BATTERY_STATES = [ + ATTR_BATTERY_STATE_UNPLUGGED, + ATTR_BATTERY_STATE_CHARGING, + ATTR_BATTERY_STATE_FULL, + ATTR_BATTERY_STATE_UNKNOWN, +] + +ATTR_DEVICE = "device" +ATTR_DEVICE_ID = "deviceId" +ATTR_DEVICE_NAME = "name" +ATTR_DEVICE_PERMANENT_ID = "permanentID" +ATTR_DEVICE_SYSTEM_VERSION = "systemVersion" +ATTR_DEVICE_TYPE = "type" + CONF_ACTION_NAME = "name" CONF_ACTION_BACKGROUND_COLOR = "background_color" CONF_ACTION_LABEL = "label" diff --git a/homeassistant/components/ios/notify.py b/homeassistant/components/ios/notify.py index 92a706b3a38..b5bd0aea58f 100644 --- a/homeassistant/components/ios/notify.py +++ b/homeassistant/components/ios/notify.py @@ -20,7 +20,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -from .. import ios +from . import device_name_for_push_id, devices_with_push, enabled_push_ids _LOGGER = logging.getLogger(__name__) @@ -42,7 +42,7 @@ def log_rate_limits( _LOGGER.log( level, rate_limit_msg, - ios.device_name_for_push_id(hass, target), + device_name_for_push_id(hass, target), rate_limits["successful"], rate_limits["maximum"], rate_limits["errors"], @@ -60,7 +60,7 @@ def get_service( # Need this to enable requirements checking in the app. hass.config.components.add("ios.notify") - if not ios.devices_with_push(hass): + if not devices_with_push(hass): return None return iOSNotificationService() @@ -75,7 +75,7 @@ class iOSNotificationService(BaseNotificationService): @property def targets(self) -> dict[str, str]: """Return a dictionary of registered targets.""" - return ios.devices_with_push(self.hass) + return devices_with_push(self.hass) def send_message(self, message: str = "", **kwargs: Any) -> None: """Send a message to the Lambda APNS gateway.""" @@ -89,13 +89,13 @@ class iOSNotificationService(BaseNotificationService): data[ATTR_TITLE] = kwargs.get(ATTR_TITLE) if not (targets := kwargs.get(ATTR_TARGET)): - targets = ios.enabled_push_ids(self.hass) + targets = enabled_push_ids(self.hass) if kwargs.get(ATTR_DATA) is not None: data[ATTR_DATA] = kwargs.get(ATTR_DATA) for target in targets: - if target not in ios.enabled_push_ids(self.hass): + if target not in enabled_push_ids(self.hass): _LOGGER.error("The target (%s) does not exist in .ios.conf", targets) return diff --git a/homeassistant/components/ios/sensor.py b/homeassistant/components/ios/sensor.py index 4171b8ecd46..a97c2145919 100644 --- a/homeassistant/components/ios/sensor.py +++ b/homeassistant/components/ios/sensor.py @@ -18,8 +18,22 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.icon import icon_for_battery_level from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .. import ios -from .const import DOMAIN +from . import devices +from .const import ( + ATTR_BATTERY, + ATTR_BATTERY_LEVEL, + ATTR_BATTERY_STATE, + ATTR_BATTERY_STATE_FULL, + ATTR_BATTERY_STATE_UNKNOWN, + ATTR_BATTERY_STATE_UNPLUGGED, + ATTR_DEVICE, + ATTR_DEVICE_ID, + ATTR_DEVICE_NAME, + ATTR_DEVICE_PERMANENT_ID, + ATTR_DEVICE_SYSTEM_VERSION, + ATTR_DEVICE_TYPE, + DOMAIN, +) SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( @@ -55,7 +69,7 @@ async def async_setup_entry( """Set up iOS from a config entry.""" async_add_entities( IOSSensor(device_name, device, description) - for device_name, device in ios.devices(hass).items() + for device_name, device in devices(hass).items() for description in SENSOR_TYPES ) @@ -76,7 +90,7 @@ class IOSSensor(SensorEntity): self.entity_description = description self._device = device - device_id = device[ios.ATTR_DEVICE_ID] + device_id = device[ATTR_DEVICE_ID] self._attr_unique_id = f"{description.key}_{device_id}" @property @@ -85,44 +99,44 @@ class IOSSensor(SensorEntity): return DeviceInfo( identifiers={ ( - ios.DOMAIN, - self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_PERMANENT_ID], + DOMAIN, + self._device[ATTR_DEVICE][ATTR_DEVICE_PERMANENT_ID], ) }, manufacturer="Apple", - model=self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_TYPE], - name=self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_NAME], - sw_version=self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_SYSTEM_VERSION], + model=self._device[ATTR_DEVICE][ATTR_DEVICE_TYPE], + name=self._device[ATTR_DEVICE][ATTR_DEVICE_NAME], + sw_version=self._device[ATTR_DEVICE][ATTR_DEVICE_SYSTEM_VERSION], ) @property def extra_state_attributes(self) -> dict[str, Any]: """Return the device state attributes.""" - device = self._device[ios.ATTR_DEVICE] - device_battery = self._device[ios.ATTR_BATTERY] + device = self._device[ATTR_DEVICE] + device_battery = self._device[ATTR_BATTERY] return { - "Battery State": device_battery[ios.ATTR_BATTERY_STATE], - "Battery Level": device_battery[ios.ATTR_BATTERY_LEVEL], - "Device Type": device[ios.ATTR_DEVICE_TYPE], - "Device Name": device[ios.ATTR_DEVICE_NAME], - "Device Version": device[ios.ATTR_DEVICE_SYSTEM_VERSION], + "Battery State": device_battery[ATTR_BATTERY_STATE], + "Battery Level": device_battery[ATTR_BATTERY_LEVEL], + "Device Type": device[ATTR_DEVICE_TYPE], + "Device Name": device[ATTR_DEVICE_NAME], + "Device Version": device[ATTR_DEVICE_SYSTEM_VERSION], } @property def icon(self) -> str: """Return the icon to use in the frontend, if any.""" - device_battery = self._device[ios.ATTR_BATTERY] - battery_state = device_battery[ios.ATTR_BATTERY_STATE] - battery_level = device_battery[ios.ATTR_BATTERY_LEVEL] + device_battery = self._device[ATTR_BATTERY] + battery_state = device_battery[ATTR_BATTERY_STATE] + battery_level = device_battery[ATTR_BATTERY_LEVEL] charging = True icon_state = DEFAULT_ICON_STATE if battery_state in ( - ios.ATTR_BATTERY_STATE_FULL, - ios.ATTR_BATTERY_STATE_UNPLUGGED, + ATTR_BATTERY_STATE_FULL, + ATTR_BATTERY_STATE_UNPLUGGED, ): charging = False icon_state = f"{DEFAULT_ICON_STATE}-off" - elif battery_state == ios.ATTR_BATTERY_STATE_UNKNOWN: + elif battery_state == ATTR_BATTERY_STATE_UNKNOWN: battery_level = None charging = False icon_state = f"{DEFAULT_ICON_LEVEL}-unknown" @@ -135,17 +149,17 @@ class IOSSensor(SensorEntity): def _update(self, device: dict[str, Any]) -> None: """Get the latest state of the sensor.""" self._device = device - self._attr_native_value = self._device[ios.ATTR_BATTERY][ + self._attr_native_value = self._device[ATTR_BATTERY][ self.entity_description.key ] self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Handle addition to hass: register to dispatch.""" - self._attr_native_value = self._device[ios.ATTR_BATTERY][ + self._attr_native_value = self._device[ATTR_BATTERY][ self.entity_description.key ] - device_id = self._device[ios.ATTR_DEVICE_ID] + device_id = self._device[ATTR_DEVICE_ID] self.async_on_remove( async_dispatcher_connect(self.hass, f"{DOMAIN}.{device_id}", self._update) ) diff --git a/homeassistant/components/iotawatt/config_flow.py b/homeassistant/components/iotawatt/config_flow.py index 187423c7d8b..668844a1c5c 100644 --- a/homeassistant/components/iotawatt/config_flow.py +++ b/homeassistant/components/iotawatt/config_flow.py @@ -75,7 +75,9 @@ class IOTaWattConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_auth(self, user_input=None): + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Authenticate user if authentication is enabled on the IoTaWatt device.""" if user_input is None: user_input = {} diff --git a/homeassistant/components/iotawatt/strings.json b/homeassistant/components/iotawatt/strings.json index 266b32c5c31..01a82b721a2 100644 --- a/homeassistant/components/iotawatt/strings.json +++ b/homeassistant/components/iotawatt/strings.json @@ -14,7 +14,7 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" }, - "description": "The IoTawatt device requires authentication. Please enter the username and password and click the Submit button." + "description": "The IoTawatt device requires authentication. Please enter the username and password and select the Submit button." } }, "error": { diff --git a/homeassistant/components/iotty/__init__.py b/homeassistant/components/iotty/__init__.py index b34b8d3840d..804f3f40196 100644 --- a/homeassistant/components/iotty/__init__.py +++ b/homeassistant/components/iotty/__init__.py @@ -19,7 +19,7 @@ from . import coordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.SWITCH] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.SWITCH] type IottyConfigEntry = ConfigEntry[IottyConfigEntryData] diff --git a/homeassistant/components/iotty/api.py b/homeassistant/components/iotty/api.py index 03e18a02903..d87fda57731 100644 --- a/homeassistant/components/iotty/api.py +++ b/homeassistant/components/iotty/api.py @@ -33,8 +33,6 @@ class IottyProxy(CloudApi): async def async_get_access_token(self) -> Any: """Return a valid access token.""" - - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/homeassistant/components/iotty/coordinator.py b/homeassistant/components/iotty/coordinator.py index f63c4b45112..420248f7724 100644 --- a/homeassistant/components/iotty/coordinator.py +++ b/homeassistant/components/iotty/coordinator.py @@ -7,7 +7,8 @@ from datetime import timedelta import logging from iottycloud.device import Device -from iottycloud.verbs import RESULT, STATUS +from iottycloud.shutter import Shutter +from iottycloud.verbs import OPEN_PERCENTAGE, RESULT, STATUS from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -60,14 +61,12 @@ class IottyDataUpdateCoordinator(DataUpdateCoordinator[IottyData]): ) self._device_registry = dr.async_get(hass) - async def async_config_entry_first_refresh(self) -> None: - """Override the first refresh to also fetch iotty devices list.""" + async def _async_setup(self) -> None: + """Get devices.""" _LOGGER.debug("Fetching devices list from iottyCloud") self._devices = await self.iotty.get_devices() _LOGGER.debug("There are %d devices", len(self._devices)) - await super().async_config_entry_first_refresh() - async def _async_update_data(self) -> IottyData: """Fetch data from iottyCloud device.""" _LOGGER.debug("Fetching devices status from iottyCloud") @@ -104,5 +103,9 @@ class IottyDataUpdateCoordinator(DataUpdateCoordinator[IottyData]): "Retrieved status: '%s' for device %s", status, device.device_id ) device.update_status(status) + if isinstance(device, Shutter) and isinstance( + percentage := json.get(OPEN_PERCENTAGE), int + ): + device.update_percentage(percentage) return IottyData(self._devices) diff --git a/homeassistant/components/iotty/cover.py b/homeassistant/components/iotty/cover.py new file mode 100644 index 00000000000..50a4a1deeba --- /dev/null +++ b/homeassistant/components/iotty/cover.py @@ -0,0 +1,193 @@ +"""Implement a iotty Shutter Device.""" + +from __future__ import annotations + +import logging +from typing import Any + +from iottycloud.device import Device +from iottycloud.shutter import Shutter, ShutterState +from iottycloud.verbs import SH_DEVICE_TYPE_UID + +from homeassistant.components.cover import ( + ATTR_POSITION, + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IottyConfigEntry +from .api import IottyProxy +from .coordinator import IottyDataUpdateCoordinator +from .entity import IottyEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: IottyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Activate the iotty Shutter component.""" + _LOGGER.debug("Setup COVER entry id is %s", config_entry.entry_id) + + coordinator = config_entry.runtime_data.coordinator + entities = [ + IottyShutter( + coordinator=coordinator, iotty_cloud=coordinator.iotty, iotty_device=d + ) + for d in coordinator.data.devices + if d.device_type == SH_DEVICE_TYPE_UID + if (isinstance(d, Shutter)) + ] + _LOGGER.debug("Found %d Shutters", len(entities)) + + async_add_entities(entities) + + known_devices: set = config_entry.runtime_data.known_devices + for known_device in coordinator.data.devices: + if known_device.device_type == SH_DEVICE_TYPE_UID: + known_devices.add(known_device) + + @callback + def async_update_data() -> None: + """Handle updated data from the API endpoint.""" + if not coordinator.last_update_success: + return + + devices = coordinator.data.devices + entities = [] + known_devices: set = config_entry.runtime_data.known_devices + + # Add entities for devices which we've not yet seen + for device in devices: + if ( + any(d.device_id == device.device_id for d in known_devices) + or device.device_type != SH_DEVICE_TYPE_UID + ): + continue + + iotty_entity = IottyShutter( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=Shutter( + device.device_id, + device.serial_number, + device.device_type, + device.device_name, + ), + ) + + entities.extend([iotty_entity]) + known_devices.add(device) + + async_add_entities(entities) + + # Add a subscriber to the coordinator to discover new devices + coordinator.async_add_listener(async_update_data) + + +class IottyShutter(IottyEntity, CoverEntity): + """Haas entity class for iotty Shutter.""" + + _attr_device_class = CoverDeviceClass.SHUTTER + _iotty_device: Shutter + _attr_supported_features: CoverEntityFeature = CoverEntityFeature(0) | ( + CoverEntityFeature.OPEN + | CoverEntityFeature.CLOSE + | CoverEntityFeature.STOP + | CoverEntityFeature.SET_POSITION + ) + + def __init__( + self, + coordinator: IottyDataUpdateCoordinator, + iotty_cloud: IottyProxy, + iotty_device: Shutter, + ) -> None: + """Initialize the Shutter device.""" + super().__init__(coordinator, iotty_cloud, iotty_device) + + @property + def current_cover_position(self) -> int | None: + """Return the current position of the shutter. + + None is unknown, 0 is closed, 100 is fully open. + """ + return self._iotty_device.percentage + + @property + def is_closed(self) -> bool: + """Return true if the Shutter is closed.""" + _LOGGER.debug( + "Retrieve device status for %s ? %s : %s", + self._iotty_device.device_id, + self._iotty_device.status, + self._iotty_device.percentage, + ) + return ( + self._iotty_device.status == ShutterState.STATIONARY + and self._iotty_device.percentage == 0 + ) + + @property + def is_opening(self) -> bool: + """Return true if the Shutter is opening.""" + return self._iotty_device.status == ShutterState.OPENING + + @property + def is_closing(self) -> bool: + """Return true if the Shutter is closing.""" + return self._iotty_device.status == ShutterState.CLOSING + + @property + def supported_features(self) -> CoverEntityFeature: + """Flag supported features.""" + return self._attr_supported_features + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + await self._iotty_cloud.command( + self._iotty_device.device_id, self._iotty_device.cmd_open() + ) + await self.coordinator.async_request_refresh() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close cover.""" + await self._iotty_cloud.command( + self._iotty_device.device_id, self._iotty_device.cmd_close() + ) + await self.coordinator.async_request_refresh() + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the cover to a specific position.""" + percentage = kwargs[ATTR_POSITION] + await self._iotty_cloud.command( + self._iotty_device.device_id, + self._iotty_device.cmd_move_to(), + {"open_percentage": percentage}, + ) + await self.coordinator.async_request_refresh() + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self._iotty_cloud.command( + self._iotty_device.device_id, self._iotty_device.cmd_stop() + ) + await self.coordinator.async_request_refresh() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + + device: Device = next( + device + for device in self.coordinator.data.devices + if device.device_id == self._iotty_device.device_id + ) + if isinstance(device, Shutter): + self._iotty_device = device + self.async_write_ha_state() diff --git a/homeassistant/components/iotty/entity.py b/homeassistant/components/iotty/entity.py new file mode 100644 index 00000000000..4eb7a421281 --- /dev/null +++ b/homeassistant/components/iotty/entity.py @@ -0,0 +1,49 @@ +"""Base class for iotty entities.""" + +import logging + +from iottycloud.lightswitch import Device + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .api import IottyProxy +from .const import DOMAIN +from .coordinator import IottyDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class IottyEntity(CoordinatorEntity[IottyDataUpdateCoordinator]): + """Defines a base iotty entity.""" + + _attr_has_entity_name = True + _attr_name = None + _iotty_device_name: str + _iotty_cloud: IottyProxy + _iotty_device: Device + + def __init__( + self, + coordinator: IottyDataUpdateCoordinator, + iotty_cloud: IottyProxy, + iotty_device: Device, + ) -> None: + """Initialize iotty entity.""" + super().__init__(coordinator) + + _LOGGER.debug( + "Creating new COVER (%s) %s", + iotty_device.device_type, + iotty_device.device_id, + ) + + self._iotty_cloud = iotty_cloud + self._attr_unique_id = iotty_device.device_id + self._iotty_device_name = iotty_device.name + self._iotty_device = iotty_device + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, iotty_device.device_id)}, + name=iotty_device.name, + manufacturer="iotty", + ) diff --git a/homeassistant/components/iotty/manifest.json b/homeassistant/components/iotty/manifest.json index 87aa49799b2..5425ce3b480 100644 --- a/homeassistant/components/iotty/manifest.json +++ b/homeassistant/components/iotty/manifest.json @@ -1,11 +1,11 @@ { "domain": "iotty", "name": "iotty", - "codeowners": ["@pburgio"], + "codeowners": ["@shapournemati-iotty"], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/iotty", "integration_type": "device", "iot_class": "cloud_polling", - "requirements": ["iottycloud==0.1.3"] + "requirements": ["iottycloud==0.3.0"] } diff --git a/homeassistant/components/iotty/strings.json b/homeassistant/components/iotty/strings.json index 569e148a5a3..cb0dc509d9a 100644 --- a/homeassistant/components/iotty/strings.json +++ b/homeassistant/components/iotty/strings.json @@ -12,7 +12,8 @@ "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/iotty/switch.py b/homeassistant/components/iotty/switch.py index ee489e88349..1e2bdffcf79 100644 --- a/homeassistant/components/iotty/switch.py +++ b/homeassistant/components/iotty/switch.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any, cast +from typing import Any from iottycloud.device import Device from iottycloud.lightswitch import LightSwitch @@ -11,14 +11,12 @@ from iottycloud.verbs import LS_DEVICE_TYPE_UID from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import IottyConfigEntry from .api import IottyProxy -from .const import DOMAIN from .coordinator import IottyDataUpdateCoordinator +from .entity import IottyEntity _LOGGER = logging.getLogger(__name__) @@ -87,14 +85,10 @@ async def async_setup_entry( coordinator.async_add_listener(async_update_data) -class IottyLightSwitch(SwitchEntity, CoordinatorEntity[IottyDataUpdateCoordinator]): +class IottyLightSwitch(IottyEntity, SwitchEntity): """Haas entity class for iotty LightSwitch.""" - _attr_has_entity_name = True - _attr_name = None - _attr_entity_category = None _attr_device_class = SwitchDeviceClass.SWITCH - _iotty_cloud: IottyProxy _iotty_device: LightSwitch def __init__( @@ -104,26 +98,7 @@ class IottyLightSwitch(SwitchEntity, CoordinatorEntity[IottyDataUpdateCoordinato iotty_device: LightSwitch, ) -> None: """Initialize the LightSwitch device.""" - super().__init__(coordinator=coordinator) - - _LOGGER.debug( - "Creating new SWITCH (%s) %s", - iotty_device.device_type, - iotty_device.device_id, - ) - - self._iotty_cloud = iotty_cloud - self._iotty_device = iotty_device - self._attr_unique_id = iotty_device.device_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - return DeviceInfo( - identifiers={(DOMAIN, cast(str, self._attr_unique_id))}, - name=self._iotty_device.name, - manufacturer="iotty", - ) + super().__init__(coordinator, iotty_cloud, iotty_device) @property def is_on(self) -> bool: diff --git a/homeassistant/components/iperf3/icons.json b/homeassistant/components/iperf3/icons.json index 3ef7e301ed6..f6ebe1aee2f 100644 --- a/homeassistant/components/iperf3/icons.json +++ b/homeassistant/components/iperf3/icons.json @@ -1,5 +1,7 @@ { "services": { - "speedtest": "mdi:speedometer" + "speedtest": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/iperf3/manifest.json b/homeassistant/components/iperf3/manifest.json index a1bb26ddc1a..16e33e47331 100644 --- a/homeassistant/components/iperf3/manifest.json +++ b/homeassistant/components/iperf3/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/iperf3", "iot_class": "local_polling", "loggers": ["iperf3"], + "quality_scale": "legacy", "requirements": ["iperf3==0.1.11"] } diff --git a/homeassistant/components/ipp/manifest.json b/homeassistant/components/ipp/manifest.json index 2ba82b2cfec..54c26b63585 100644 --- a/homeassistant/components/ipp/manifest.json +++ b/homeassistant/components/ipp/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["deepmerge", "pyipp"], - "quality_scale": "platinum", - "requirements": ["pyipp==0.16.0"], + "requirements": ["pyipp==0.17.0"], "zeroconf": ["_ipps._tcp.local.", "_ipp._tcp.local."] } diff --git a/homeassistant/components/ipp/sensor.py b/homeassistant/components/ipp/sensor.py index e872fc7977f..a2792c7749b 100644 --- a/homeassistant/components/ipp/sensor.py +++ b/homeassistant/components/ipp/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import datetime from typing import Any from pyipp import Marker, Printer @@ -19,7 +19,6 @@ from homeassistant.const import ATTR_LOCATION, PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.util.dt import utcnow from . import IPPConfigEntry from .const import ( @@ -80,7 +79,7 @@ PRINTER_SENSORS: tuple[IPPSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - value_fn=lambda printer: (utcnow() - timedelta(seconds=printer.info.uptime)), + value_fn=lambda printer: printer.booted_at, ), ) diff --git a/homeassistant/components/iqvia/__init__.py b/homeassistant/components/iqvia/__init__.py index ab05ae19d86..3fabb88b041 100644 --- a/homeassistant/components/iqvia/__init__.py +++ b/homeassistant/components/iqvia/__init__.py @@ -13,15 +13,10 @@ from pyiqvia.errors import IQVIAError from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_ZIP_CODE, @@ -81,6 +76,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = coordinators[sensor_type] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{entry.data[CONF_ZIP_CODE]} {sensor_type}", update_interval=DEFAULT_SCAN_INTERVAL, update_method=partial(async_get_data_from_api, api_coro), @@ -112,50 +108,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class IQVIAEntity(CoordinatorEntity[DataUpdateCoordinator[dict[str, Any]]]): - """Define a base IQVIA entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: DataUpdateCoordinator[dict[str, Any]], - entry: ConfigEntry, - description: EntityDescription, - ) -> None: - """Initialize.""" - super().__init__(coordinator) - - self._attr_extra_state_attributes = {} - self._attr_unique_id = f"{entry.data[CONF_ZIP_CODE]}_{description.key}" - self._entry = entry - self.entity_description = description - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - if not self.coordinator.last_update_success: - return - - self.update_from_latest_data() - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - await super().async_added_to_hass() - - if self.entity_description.key == TYPE_ALLERGY_FORECAST: - self.async_on_remove( - self.hass.data[DOMAIN][self._entry.entry_id][ - TYPE_ALLERGY_OUTLOOK - ].async_add_listener(self._handle_coordinator_update) - ) - - self.update_from_latest_data() - - @callback - def update_from_latest_data(self) -> None: - """Update the entity from the latest data.""" - raise NotImplementedError diff --git a/homeassistant/components/iqvia/entity.py b/homeassistant/components/iqvia/entity.py new file mode 100644 index 00000000000..e77c0f7e32a --- /dev/null +++ b/homeassistant/components/iqvia/entity.py @@ -0,0 +1,62 @@ +"""Support for IQVIA.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import callback +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import CONF_ZIP_CODE, DOMAIN, TYPE_ALLERGY_FORECAST, TYPE_ALLERGY_OUTLOOK + + +class IQVIAEntity(CoordinatorEntity[DataUpdateCoordinator[dict[str, Any]]]): + """Define a base IQVIA entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DataUpdateCoordinator[dict[str, Any]], + entry: ConfigEntry, + description: EntityDescription, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._attr_extra_state_attributes = {} + self._attr_unique_id = f"{entry.data[CONF_ZIP_CODE]}_{description.key}" + self._entry = entry + self.entity_description = description + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if not self.coordinator.last_update_success: + return + + self.update_from_latest_data() + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + + if self.entity_description.key == TYPE_ALLERGY_FORECAST: + self.async_on_remove( + self.hass.data[DOMAIN][self._entry.entry_id][ + TYPE_ALLERGY_OUTLOOK + ].async_add_listener(self._handle_coordinator_update) + ) + + self.update_from_latest_data() + + @callback + def update_from_latest_data(self) -> None: + """Update the entity from the latest data.""" + raise NotImplementedError diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index ce519de1b67..0236b72c89d 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiqvia"], - "requirements": ["numpy==1.26.0", "pyiqvia==2022.04.0"] + "requirements": ["numpy==2.2.0", "pyiqvia==2022.04.0"] } diff --git a/homeassistant/components/iqvia/sensor.py b/homeassistant/components/iqvia/sensor.py index ba3c288b702..d04e0885454 100644 --- a/homeassistant/components/iqvia/sensor.py +++ b/homeassistant/components/iqvia/sensor.py @@ -17,7 +17,6 @@ from homeassistant.const import ATTR_STATE from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import IQVIAEntity from .const import ( DOMAIN, TYPE_ALLERGY_FORECAST, @@ -33,6 +32,7 @@ from .const import ( TYPE_DISEASE_INDEX, TYPE_DISEASE_TODAY, ) +from .entity import IQVIAEntity ATTR_ALLERGEN_AMOUNT = "allergen_amount" ATTR_ALLERGEN_GENUS = "allergen_genus" @@ -244,8 +244,8 @@ class IndexSensor(IQVIAEntity, SensorEntity): key = self.entity_description.key.split("_")[-1].title() try: - [period] = [p for p in data["periods"] if p["Type"] == key] # type: ignore[index] - except TypeError: + period = next(p for p in data["periods"] if p["Type"] == key) # type: ignore[index] + except StopIteration: return data = cast(dict[str, Any], data) diff --git a/homeassistant/components/irish_rail_transport/manifest.json b/homeassistant/components/irish_rail_transport/manifest.json index bb9b0d59ef0..2a118f17e2a 100644 --- a/homeassistant/components/irish_rail_transport/manifest.json +++ b/homeassistant/components/irish_rail_transport/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/irish_rail_transport", "iot_class": "cloud_polling", "loggers": ["pyirishrail"], + "quality_scale": "legacy", "requirements": ["pyirishrail==0.0.2"] } diff --git a/homeassistant/components/irish_rail_transport/sensor.py b/homeassistant/components/irish_rail_transport/sensor.py index 39bf39bcbe0..2765a14b7a3 100644 --- a/homeassistant/components/irish_rail_transport/sensor.py +++ b/homeassistant/components/irish_rail_transport/sensor.py @@ -194,9 +194,9 @@ class IrishRailTransportData: ATTR_STATION: self.station, ATTR_ORIGIN: "", ATTR_DESTINATION: dest, - ATTR_DUE_IN: "n/a", - ATTR_DUE_AT: "n/a", - ATTR_EXPECT_AT: "n/a", + ATTR_DUE_IN: None, + ATTR_DUE_AT: None, + ATTR_EXPECT_AT: None, ATTR_DIRECTION: direction, ATTR_STOPS_AT: stops_at, ATTR_TRAIN_TYPE: "", diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 11d99a1558a..0fe5acc2db6 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -5,24 +5,55 @@ from __future__ import annotations import logging from typing import TYPE_CHECKING -from pynecil import Pynecil +from pynecil import IronOSUpdate, Pynecil from homeassistant.components import bluetooth from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_NAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN -from .coordinator import IronOSCoordinator +from .coordinator import ( + IronOSCoordinators, + IronOSFirmwareUpdateCoordinator, + IronOSLiveDataCoordinator, + IronOSSettingsCoordinator, +) -PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.NUMBER, + Platform.SENSOR, + Platform.UPDATE, +] + + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +type IronOSConfigEntry = ConfigEntry[IronOSCoordinators] +IRON_OS_KEY: HassKey[IronOSFirmwareUpdateCoordinator] = HassKey(DOMAIN) -type IronOSConfigEntry = ConfigEntry[IronOSCoordinator] _LOGGER = logging.getLogger(__name__) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up IronOS firmware update coordinator.""" + + session = async_get_clientsession(hass) + github = IronOSUpdate(session) + + hass.data[IRON_OS_KEY] = IronOSFirmwareUpdateCoordinator(hass, github) + await hass.data[IRON_OS_KEY].async_request_refresh() + return True + + async def async_setup_entry(hass: HomeAssistant, entry: IronOSConfigEntry) -> bool: """Set up IronOS from a config entry.""" if TYPE_CHECKING: @@ -39,10 +70,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: IronOSConfigEntry) -> bo device = Pynecil(ble_device) - coordinator = IronOSCoordinator(hass, device) - await coordinator.async_config_entry_first_refresh() + live_data = IronOSLiveDataCoordinator(hass, device) + await live_data.async_config_entry_first_refresh() - entry.runtime_data = coordinator + settings = IronOSSettingsCoordinator(hass, device) + await settings.async_config_entry_first_refresh() + + entry.runtime_data = IronOSCoordinators( + live_data=live_data, + settings=settings, + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/iron_os/binary_sensor.py b/homeassistant/components/iron_os/binary_sensor.py new file mode 100644 index 00000000000..81ba0e08c95 --- /dev/null +++ b/homeassistant/components/iron_os/binary_sensor.py @@ -0,0 +1,54 @@ +"""Binary sensor platform for IronOS integration.""" + +from __future__ import annotations + +from enum import StrEnum + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IronOSConfigEntry +from .coordinator import IronOSLiveDataCoordinator +from .entity import IronOSBaseEntity + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +class PinecilBinarySensor(StrEnum): + """Pinecil Binary Sensors.""" + + TIP_CONNECTED = "tip_connected" + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensors from a config entry.""" + coordinator = entry.runtime_data.live_data + + entity_description = BinarySensorEntityDescription( + key=PinecilBinarySensor.TIP_CONNECTED, + translation_key=PinecilBinarySensor.TIP_CONNECTED, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + ) + + async_add_entities([IronOSBinarySensorEntity(coordinator, entity_description)]) + + +class IronOSBinarySensorEntity(IronOSBaseEntity, BinarySensorEntity): + """Representation of a IronOS binary sensor entity.""" + + coordinator: IronOSLiveDataCoordinator + + @property + def is_on(self) -> bool | None: + """Return true if the binary sensor is on.""" + return self.coordinator.has_tip diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index aefb14b689b..e8ddef43bd7 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -2,13 +2,24 @@ from __future__ import annotations +from dataclasses import dataclass from datetime import timedelta import logging -from pynecil import CommunicationError, DeviceInfoResponse, LiveDataResponse, Pynecil +from pynecil import ( + CommunicationError, + DeviceInfoResponse, + IronOSUpdate, + LatestRelease, + LiveDataResponse, + Pynecil, + SettingsDataResponse, + UpdateException, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -16,38 +27,123 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=5) +SCAN_INTERVAL_GITHUB = timedelta(hours=3) +SCAN_INTERVAL_SETTINGS = timedelta(seconds=60) -class IronOSCoordinator(DataUpdateCoordinator[LiveDataResponse]): - """IronOS coordinator.""" +@dataclass +class IronOSCoordinators: + """IronOS data class holding coordinators.""" + + live_data: IronOSLiveDataCoordinator + settings: IronOSSettingsCoordinator + + +class IronOSBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]): + """IronOS base coordinator.""" device_info: DeviceInfoResponse config_entry: ConfigEntry - def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: + def __init__( + self, + hass: HomeAssistant, + device: Pynecil, + update_interval: timedelta, + ) -> None: """Initialize IronOS coordinator.""" + super().__init__( hass, _LOGGER, name=DOMAIN, - update_interval=SCAN_INTERVAL, + update_interval=update_interval, + request_refresh_debouncer=Debouncer( + hass, _LOGGER, cooldown=3, immediate=False + ), ) self.device = device - async def _async_update_data(self) -> LiveDataResponse: - """Fetch data from Device.""" - - try: - return await self.device.get_live_data() - - except CommunicationError as e: - raise UpdateFailed("Cannot connect to device") from e - async def _async_setup(self) -> None: """Set up the coordinator.""" - try: self.device_info = await self.device.get_device_info() except CommunicationError as e: raise UpdateFailed("Cannot connect to device") from e + + +class IronOSLiveDataCoordinator(IronOSBaseCoordinator[LiveDataResponse]): + """IronOS coordinator.""" + + def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: + """Initialize IronOS coordinator.""" + super().__init__(hass, device=device, update_interval=SCAN_INTERVAL) + + async def _async_update_data(self) -> LiveDataResponse: + """Fetch data from Device.""" + + try: + # device info is cached and won't be refetched on every + # coordinator refresh, only after the device has disconnected + # the device info is refetched + self.device_info = await self.device.get_device_info() + return await self.device.get_live_data() + + except CommunicationError as e: + raise UpdateFailed("Cannot connect to device") from e + + @property + def has_tip(self) -> bool: + """Return True if the tip is connected.""" + if ( + self.data.max_tip_temp_ability is not None + and self.data.live_temp is not None + ): + threshold = self.data.max_tip_temp_ability - 5 + return self.data.live_temp <= threshold + return False + + +class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[LatestRelease]): + """IronOS coordinator for retrieving update information from github.""" + + def __init__(self, hass: HomeAssistant, github: IronOSUpdate) -> None: + """Initialize IronOS coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=None, + name=DOMAIN, + update_interval=SCAN_INTERVAL_GITHUB, + ) + self.github = github + + async def _async_update_data(self) -> LatestRelease: + """Fetch data from Github.""" + + try: + return await self.github.latest_release() + except UpdateException as e: + raise UpdateFailed("Failed to check for latest IronOS update") from e + + +class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]): + """IronOS coordinator.""" + + def __init__(self, hass: HomeAssistant, device: Pynecil) -> None: + """Initialize IronOS coordinator.""" + super().__init__(hass, device=device, update_interval=SCAN_INTERVAL_SETTINGS) + + async def _async_update_data(self) -> SettingsDataResponse: + """Fetch data from Device.""" + + characteristics = set(self.async_contexts()) + + if self.device.is_connected and characteristics: + try: + return await self.device.get_settings(list(characteristics)) + except CommunicationError as e: + _LOGGER.debug("Failed to fetch settings", exc_info=e) + + return self.data or SettingsDataResponse() diff --git a/homeassistant/components/iron_os/entity.py b/homeassistant/components/iron_os/entity.py index 5a24b0a5567..684957a2197 100644 --- a/homeassistant/components/iron_os/entity.py +++ b/homeassistant/components/iron_os/entity.py @@ -2,28 +2,29 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import MANUFACTURER, MODEL -from .coordinator import IronOSCoordinator +from .coordinator import IronOSBaseCoordinator -class IronOSBaseEntity(CoordinatorEntity[IronOSCoordinator]): +class IronOSBaseEntity(CoordinatorEntity[IronOSBaseCoordinator]): """Base IronOS entity.""" _attr_has_entity_name = True def __init__( self, - coordinator: IronOSCoordinator, + coordinator: IronOSBaseCoordinator, entity_description: EntityDescription, + context: Any | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(coordinator) + super().__init__(coordinator, context=context) self.entity_description = entity_description self._attr_unique_id = ( diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index fa14b8134d0..eadcc17bb37 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -1,8 +1,73 @@ { "entity": { + "binary_sensor": { + "tip_connected": { + "default": "mdi:pencil-outline", + "state": { + "off": "mdi:pencil-off-outline" + } + } + }, "number": { "setpoint_temperature": { "default": "mdi:thermometer" + }, + "sleep_temperature": { + "default": "mdi:thermometer-low" + }, + "sleep_timeout": { + "default": "mdi:timer-sand" + }, + "qc_max_voltage": { + "default": "mdi:flash-alert-outline" + }, + "pd_timeout": { + "default": "mdi:timer-alert-outline" + }, + "boost_temp": { + "default": "mdi:thermometer-high" + }, + "shutdown_timeout": { + "default": "mdi:thermometer-off" + }, + "display_brightness": { + "default": "mdi:brightness-6" + }, + "voltage_div": { + "default": "mdi:call-split" + }, + "temp_increment_short": { + "default": "mdi:gesture-tap-button" + }, + "temp_increment_long": { + "default": "mdi:gesture-tap-button" + }, + "accel_sensitivity": { + "default": "mdi:motion" + }, + "calibration_offset": { + "default": "mdi:contrast" + }, + "hall_sensitivity": { + "default": "mdi:leak" + }, + "keep_awake_pulse_delay": { + "default": "mdi:clock-end" + }, + "keep_awake_pulse_duration": { + "default": "mdi:clock-start" + }, + "keep_awake_pulse_power": { + "default": "mdi:waves-arrow-up" + }, + "min_voltage_per_cell": { + "default": "mdi:fuel-cell" + }, + "min_dc_voltage_cells": { + "default": "mdi:battery-arrow-down" + }, + "power_limit": { + "default": "mdi:flash-alert" } }, "sensor": { diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index cfaf36880f2..8556d1e3609 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil"], - "requirements": ["pynecil==0.2.0"] + "requirements": ["pynecil==2.1.0"] } diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py index 9230faec1f1..a288a61b021 100644 --- a/homeassistant/components/iron_os/number.py +++ b/homeassistant/components/iron_os/number.py @@ -6,37 +6,76 @@ from collections.abc import Callable from dataclasses import dataclass from enum import StrEnum -from pynecil import CharSetting, CommunicationError, LiveDataResponse +from pynecil import ( + CharSetting, + CommunicationError, + LiveDataResponse, + SettingsDataResponse, +) from homeassistant.components.number import ( + DEFAULT_MAX_VALUE, NumberDeviceClass, NumberEntity, NumberEntityDescription, NumberMode, ) -from homeassistant.const import UnitOfTemperature +from homeassistant.const import ( + EntityCategory, + UnitOfElectricPotential, + UnitOfPower, + UnitOfTemperature, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import IronOSConfigEntry from .const import DOMAIN, MAX_TEMP, MIN_TEMP +from .coordinator import IronOSCoordinators from .entity import IronOSBaseEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class IronOSNumberEntityDescription(NumberEntityDescription): """Describes IronOS number entity.""" - value_fn: Callable[[LiveDataResponse], float | int | None] - max_value_fn: Callable[[LiveDataResponse], float | int] - set_key: CharSetting + value_fn: Callable[[LiveDataResponse, SettingsDataResponse], float | int | None] + max_value_fn: Callable[[LiveDataResponse], float | int] | None = None + characteristic: CharSetting + raw_value_fn: Callable[[float], float | int] | None = None class PinecilNumber(StrEnum): """Number controls for Pinecil device.""" SETPOINT_TEMP = "setpoint_temperature" + SLEEP_TEMP = "sleep_temperature" + SLEEP_TIMEOUT = "sleep_timeout" + QC_MAX_VOLTAGE = "qc_max_voltage" + PD_TIMEOUT = "pd_timeout" + BOOST_TEMP = "boost_temp" + SHUTDOWN_TIMEOUT = "shutdown_timeout" + DISPLAY_BRIGHTNESS = "display_brightness" + POWER_LIMIT = "power_limit" + CALIBRATION_OFFSET = "calibration_offset" + HALL_SENSITIVITY = "hall_sensitivity" + MIN_VOLTAGE_PER_CELL = "min_voltage_per_cell" + ACCEL_SENSITIVITY = "accel_sensitivity" + KEEP_AWAKE_PULSE_POWER = "keep_awake_pulse_power" + KEEP_AWAKE_PULSE_DELAY = "keep_awake_pulse_delay" + KEEP_AWAKE_PULSE_DURATION = "keep_awake_pulse_duration" + VOLTAGE_DIV = "voltage_div" + TEMP_INCREMENT_SHORT = "temp_increment_short" + TEMP_INCREMENT_LONG = "temp_increment_long" + + +def multiply(value: float | None, multiplier: float) -> float | None: + """Multiply if not None.""" + return value * multiplier if value is not None else None PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = ( @@ -45,13 +84,249 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = ( translation_key=PinecilNumber.SETPOINT_TEMP, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=NumberDeviceClass.TEMPERATURE, - value_fn=lambda data: data.setpoint_temp, - set_key=CharSetting.SETPOINT_TEMP, + value_fn=lambda data, _: data.setpoint_temp, + characteristic=CharSetting.SETPOINT_TEMP, mode=NumberMode.BOX, native_min_value=MIN_TEMP, native_step=5, max_value_fn=lambda data: min(data.max_tip_temp_ability or MAX_TEMP, MAX_TEMP), ), + IronOSNumberEntityDescription( + key=PinecilNumber.SLEEP_TEMP, + translation_key=PinecilNumber.SLEEP_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + value_fn=lambda _, settings: settings.get("sleep_temp"), + characteristic=CharSetting.SLEEP_TEMP, + mode=NumberMode.BOX, + native_min_value=MIN_TEMP, + native_max_value=MAX_TEMP, + native_step=10, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.BOOST_TEMP, + translation_key=PinecilNumber.BOOST_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + value_fn=lambda _, settings: settings.get("boost_temp"), + characteristic=CharSetting.BOOST_TEMP, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=MAX_TEMP, + native_step=10, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.QC_MAX_VOLTAGE, + translation_key=PinecilNumber.QC_MAX_VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=NumberDeviceClass.VOLTAGE, + value_fn=lambda _, settings: settings.get("qc_ideal_voltage"), + characteristic=CharSetting.QC_IDEAL_VOLTAGE, + mode=NumberMode.BOX, + native_min_value=9.0, + native_max_value=22.0, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.PD_TIMEOUT, + translation_key=PinecilNumber.PD_TIMEOUT, + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=NumberDeviceClass.DURATION, + value_fn=lambda _, settings: settings.get("pd_negotiation_timeout"), + characteristic=CharSetting.PD_NEGOTIATION_TIMEOUT, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=5.0, + native_step=1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.SHUTDOWN_TIMEOUT, + translation_key=PinecilNumber.SHUTDOWN_TIMEOUT, + native_unit_of_measurement=UnitOfTime.MINUTES, + device_class=NumberDeviceClass.DURATION, + value_fn=lambda _, settings: settings.get("shutdown_time"), + characteristic=CharSetting.SHUTDOWN_TIME, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=60, + native_step=1, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.DISPLAY_BRIGHTNESS, + translation_key=PinecilNumber.DISPLAY_BRIGHTNESS, + value_fn=lambda _, settings: settings.get("display_brightness"), + characteristic=CharSetting.DISPLAY_BRIGHTNESS, + mode=NumberMode.SLIDER, + native_min_value=1, + native_max_value=5, + native_step=1, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.SLEEP_TIMEOUT, + translation_key=PinecilNumber.SLEEP_TIMEOUT, + value_fn=lambda _, settings: settings.get("sleep_timeout"), + characteristic=CharSetting.SLEEP_TIMEOUT, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=15, + native_step=1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.MINUTES, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.POWER_LIMIT, + translation_key=PinecilNumber.POWER_LIMIT, + value_fn=lambda _, settings: settings.get("power_limit"), + characteristic=CharSetting.POWER_LIMIT, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=12, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfPower.WATT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.CALIBRATION_OFFSET, + translation_key=PinecilNumber.CALIBRATION_OFFSET, + value_fn=lambda _, settings: settings.get("calibration_offset"), + characteristic=CharSetting.CALIBRATION_OFFSET, + mode=NumberMode.BOX, + native_min_value=100, + native_max_value=2500, + native_step=1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricPotential.MICROVOLT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.HALL_SENSITIVITY, + translation_key=PinecilNumber.HALL_SENSITIVITY, + value_fn=lambda _, settings: settings.get("hall_sensitivity"), + characteristic=CharSetting.HALL_SENSITIVITY, + mode=NumberMode.SLIDER, + native_min_value=0, + native_max_value=9, + native_step=1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.MIN_VOLTAGE_PER_CELL, + translation_key=PinecilNumber.MIN_VOLTAGE_PER_CELL, + value_fn=lambda _, settings: settings.get("min_voltage_per_cell"), + characteristic=CharSetting.MIN_VOLTAGE_PER_CELL, + mode=NumberMode.BOX, + native_min_value=2.4, + native_max_value=3.8, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.ACCEL_SENSITIVITY, + translation_key=PinecilNumber.ACCEL_SENSITIVITY, + value_fn=lambda _, settings: settings.get("accel_sensitivity"), + characteristic=CharSetting.ACCEL_SENSITIVITY, + mode=NumberMode.SLIDER, + native_min_value=0, + native_max_value=9, + native_step=1, + entity_category=EntityCategory.CONFIG, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.KEEP_AWAKE_PULSE_POWER, + translation_key=PinecilNumber.KEEP_AWAKE_PULSE_POWER, + value_fn=lambda _, settings: settings.get("keep_awake_pulse_power"), + characteristic=CharSetting.KEEP_AWAKE_PULSE_POWER, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=9.9, + native_step=0.1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfPower.WATT, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.KEEP_AWAKE_PULSE_DELAY, + translation_key=PinecilNumber.KEEP_AWAKE_PULSE_DELAY, + value_fn=( + lambda _, settings: multiply(settings.get("keep_awake_pulse_delay"), 2.5) + ), + characteristic=CharSetting.KEEP_AWAKE_PULSE_DELAY, + raw_value_fn=lambda value: value / 2.5, + mode=NumberMode.BOX, + native_min_value=2.5, + native_max_value=22.5, + native_step=2.5, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.SECONDS, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.KEEP_AWAKE_PULSE_DURATION, + translation_key=PinecilNumber.KEEP_AWAKE_PULSE_DURATION, + value_fn=( + lambda _, settings: multiply(settings.get("keep_awake_pulse_duration"), 250) + ), + characteristic=CharSetting.KEEP_AWAKE_PULSE_DURATION, + raw_value_fn=lambda value: value / 250, + mode=NumberMode.BOX, + native_min_value=250, + native_max_value=2250, + native_step=250, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.MILLISECONDS, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.VOLTAGE_DIV, + translation_key=PinecilNumber.VOLTAGE_DIV, + value_fn=(lambda _, settings: settings.get("voltage_div")), + characteristic=CharSetting.VOLTAGE_DIV, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=360, + native_max_value=900, + native_step=1, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.TEMP_INCREMENT_SHORT, + translation_key=PinecilNumber.TEMP_INCREMENT_SHORT, + value_fn=(lambda _, settings: settings.get("temp_increment_short")), + characteristic=CharSetting.TEMP_INCREMENT_SHORT, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=1, + native_max_value=50, + native_step=1, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + IronOSNumberEntityDescription( + key=PinecilNumber.TEMP_INCREMENT_LONG, + translation_key=PinecilNumber.TEMP_INCREMENT_LONG, + value_fn=(lambda _, settings: settings.get("temp_increment_long")), + characteristic=CharSetting.TEMP_INCREMENT_LONG, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=5, + native_max_value=90, + native_step=5, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), ) @@ -74,23 +349,56 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity): entity_description: IronOSNumberEntityDescription + def __init__( + self, + coordinator: IronOSCoordinators, + entity_description: IronOSNumberEntityDescription, + ) -> None: + """Initialize the number entity.""" + super().__init__( + coordinator.live_data, entity_description, entity_description.characteristic + ) + + self.settings = coordinator.settings + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" + if raw_value_fn := self.entity_description.raw_value_fn: + value = raw_value_fn(value) try: - await self.coordinator.device.write(self.entity_description.set_key, value) + await self.coordinator.device.write( + self.entity_description.characteristic, value + ) except CommunicationError as e: raise ServiceValidationError( translation_domain=DOMAIN, translation_key="submit_setting_failed", ) from e - self.async_write_ha_state() + await self.settings.async_request_refresh() @property def native_value(self) -> float | int | None: """Return sensor state.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn( + self.coordinator.data, self.settings.data + ) @property def native_max_value(self) -> float: """Return sensor state.""" - return self.entity_description.max_value_fn(self.coordinator.data) + + if self.entity_description.max_value_fn is not None: + return self.entity_description.max_value_fn(self.coordinator.data) + + return self.entity_description.native_max_value or DEFAULT_MAX_VALUE + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + await super().async_added_to_hass() + self.async_on_remove( + self.settings.async_add_listener( + self._handle_coordinator_update, self.entity_description.characteristic + ) + ) + await self.settings.async_request_refresh() diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml new file mode 100644 index 00000000000..5ede3d6971d --- /dev/null +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not have actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: todo + config-flow: done + dependency-transparency: done + docs-actions: + status: done + comment: Integration does register actions aside from entity actions + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: Integration does not register events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: todo + test-before-setup: todo + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: Integration does not have actions + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration has no options flow + docs-installation-parameters: + status: todo + comment: Needs bluetooth address as parameter + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: Devices don't require authentication + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: Device is not connected to an ip network. Other information from discovery is immutable and does not require updating. + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: Only one device per config entry. New devices are set up as new entries. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: exempt + comment: Reconfiguration would force a new config entry + repair-issues: + status: exempt + comment: no repairs/issues + stale-devices: todo + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: Device doesn't make http requests. + strict-typing: done diff --git a/homeassistant/components/iron_os/sensor.py b/homeassistant/components/iron_os/sensor.py index 095ffd254df..d178b46723f 100644 --- a/homeassistant/components/iron_os/sensor.py +++ b/homeassistant/components/iron_os/sensor.py @@ -28,8 +28,12 @@ from homeassistant.helpers.typing import StateType from . import IronOSConfigEntry from .const import OHM +from .coordinator import IronOSLiveDataCoordinator from .entity import IronOSBaseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + class PinecilSensor(StrEnum): """Pinecil Sensors.""" @@ -54,7 +58,7 @@ class PinecilSensor(StrEnum): class IronOSSensorEntityDescription(SensorEntityDescription): """IronOS sensor entity descriptions.""" - value_fn: Callable[[LiveDataResponse], StateType] + value_fn: Callable[[LiveDataResponse, bool], StateType] PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( @@ -64,7 +68,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.live_temp, + value_fn=lambda data, has_tip: data.live_temp if has_tip else None, ), IronOSSensorEntityDescription( key=PinecilSensor.DC_VOLTAGE, @@ -72,7 +76,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.dc_voltage, + value_fn=lambda data, _: data.dc_voltage, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -81,7 +85,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.handle_temp, + value_fn=lambda data, _: data.handle_temp, ), IronOSSensorEntityDescription( key=PinecilSensor.PWMLEVEL, @@ -90,7 +94,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( suggested_display_precision=0, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.pwm_level, + value_fn=lambda data, _: data.pwm_level, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -98,15 +102,18 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( translation_key=PinecilSensor.POWER_SRC, device_class=SensorDeviceClass.ENUM, options=[item.name.lower() for item in PowerSource], - value_fn=lambda data: data.power_src.name.lower() if data.power_src else None, + value_fn=( + lambda data, _: data.power_src.name.lower() if data.power_src else None + ), entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( key=PinecilSensor.TIP_RESISTANCE, translation_key=PinecilSensor.TIP_RESISTANCE, native_unit_of_measurement=OHM, - value_fn=lambda data: data.tip_resistance, + value_fn=lambda data, has_tip: data.tip_resistance if has_tip else None, entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, ), IronOSSensorEntityDescription( key=PinecilSensor.UPTIME, @@ -114,7 +121,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda data: data.uptime, + value_fn=lambda data, _: data.uptime, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -123,7 +130,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.movement_time, + value_fn=lambda data, _: data.movement_time, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -131,17 +138,17 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( translation_key=PinecilSensor.MAX_TIP_TEMP_ABILITY, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, - value_fn=lambda data: data.max_tip_temp_ability, + value_fn=lambda data, has_tip: data.max_tip_temp_ability if has_tip else None, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( key=PinecilSensor.TIP_VOLTAGE, translation_key=PinecilSensor.TIP_VOLTAGE, - native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + native_unit_of_measurement=UnitOfElectricPotential.MICROVOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=3, - value_fn=lambda data: data.tip_voltage, + suggested_display_precision=0, + value_fn=lambda data, has_tip: data.tip_voltage if has_tip else None, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -149,7 +156,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( translation_key=PinecilSensor.HALL_SENSOR, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, - value_fn=lambda data: data.hall_sensor, + value_fn=lambda data, _: data.hall_sensor, entity_category=EntityCategory.DIAGNOSTIC, ), IronOSSensorEntityDescription( @@ -158,7 +165,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.ENUM, options=[item.name.lower() for item in OperatingMode], value_fn=( - lambda data: data.operating_mode.name.lower() + lambda data, _: data.operating_mode.name.lower() if data.operating_mode else None ), @@ -169,7 +176,7 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.estimated_power, + value_fn=lambda data, _: data.estimated_power, ), ) @@ -180,7 +187,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensors from a config entry.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.live_data async_add_entities( IronOSSensorEntity(coordinator, description) @@ -192,8 +199,11 @@ class IronOSSensorEntity(IronOSBaseEntity, SensorEntity): """Representation of a IronOS sensor entity.""" entity_description: IronOSSensorEntityDescription + coordinator: IronOSLiveDataCoordinator @property def native_value(self) -> StateType: """Return sensor state.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn( + self.coordinator.data, self.coordinator.has_tip + ) diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index 75584fe191c..13528104f8c 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -5,10 +5,13 @@ "description": "[%key:component::bluetooth::config::step::user::description%]", "data": { "address": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "address": "Ensure your device is powered on and within Bluetooth range before continuing" } }, "bluetooth_confirm": { - "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" + "description": "Do you want to set up {name}?\n\n*Ensure your device is powered on and within Bluetooth range before continuing*" } }, "abort": { @@ -17,9 +20,68 @@ } }, "entity": { + "binary_sensor": { + "tip_connected": { + "name": "Soldering tip" + } + }, "number": { "setpoint_temperature": { "name": "Setpoint temperature" + }, + "sleep_temperature": { + "name": "Sleep temperature" + }, + "sleep_timeout": { + "name": "Sleep timeout" + }, + "qc_max_voltage": { + "name": "Quick Charge voltage" + }, + "pd_timeout": { + "name": "Power Delivery timeout" + }, + "boost_temp": { + "name": "Boost temperature" + }, + "shutdown_timeout": { + "name": "Shutdown timeout" + }, + "display_brightness": { + "name": "Display brightness" + }, + "power_limit": { + "name": "Power limit" + }, + "calibration_offset": { + "name": "Calibration offset" + }, + "hall_sensitivity": { + "name": "Hall effect sensitivity" + }, + "min_voltage_per_cell": { + "name": "Min. voltage per cell" + }, + "accel_sensitivity": { + "name": "Motion sensitivity" + }, + "keep_awake_pulse_power": { + "name": "Keep-awake pulse intensity" + }, + "keep_awake_pulse_delay": { + "name": "Keep-awake pulse delay" + }, + "keep_awake_pulse_duration": { + "name": "Keep-awake pulse duration" + }, + "voltage_div": { + "name": "Voltage divider" + }, + "temp_increment_short": { + "name": "Short-press temperature step" + }, + "temp_increment_long": { + "name": "Long-press temperature step" } }, "sensor": { diff --git a/homeassistant/components/iron_os/update.py b/homeassistant/components/iron_os/update.py new file mode 100644 index 00000000000..b431d321f24 --- /dev/null +++ b/homeassistant/components/iron_os/update.py @@ -0,0 +1,100 @@ +"""Update platform for IronOS integration.""" + +from __future__ import annotations + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityDescription, + UpdateEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IRON_OS_KEY, IronOSConfigEntry, IronOSLiveDataCoordinator +from .coordinator import IronOSFirmwareUpdateCoordinator +from .entity import IronOSBaseEntity + +PARALLEL_UPDATES = 0 + +UPDATE_DESCRIPTION = UpdateEntityDescription( + key="firmware", + device_class=UpdateDeviceClass.FIRMWARE, +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up IronOS update platform.""" + + coordinator = entry.runtime_data.live_data + + async_add_entities( + [IronOSUpdate(coordinator, hass.data[IRON_OS_KEY], UPDATE_DESCRIPTION)] + ) + + +class IronOSUpdate(IronOSBaseEntity, UpdateEntity): + """Representation of an IronOS update entity.""" + + _attr_supported_features = UpdateEntityFeature.RELEASE_NOTES + + def __init__( + self, + coordinator: IronOSLiveDataCoordinator, + firmware_update: IronOSFirmwareUpdateCoordinator, + entity_description: UpdateEntityDescription, + ) -> None: + """Initialize the sensor.""" + self.firmware_update = firmware_update + super().__init__(coordinator, entity_description) + + @property + def installed_version(self) -> str | None: + """IronOS version on the device.""" + + return self.coordinator.device_info.build + + @property + def title(self) -> str | None: + """Title of the IronOS release.""" + + return f"IronOS {self.firmware_update.data.name}" + + @property + def release_url(self) -> str | None: + """URL to the full release notes of the latest IronOS version available.""" + + return self.firmware_update.data.html_url + + @property + def latest_version(self) -> str | None: + """Latest IronOS version available for install.""" + + return self.firmware_update.data.tag_name + + async def async_release_notes(self) -> str | None: + """Return the release notes.""" + + return self.firmware_update.data.body + + async def async_added_to_hass(self) -> None: + """When entity is added to hass. + + Register extra update listener for the firmware update coordinator. + """ + await super().async_added_to_hass() + self.async_on_remove( + self.firmware_update.async_add_listener(self._handle_coordinator_update) + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return ( + self.installed_version is not None + and self.firmware_update.last_update_success + ) diff --git a/homeassistant/components/isal/manifest.json b/homeassistant/components/isal/manifest.json index d367b1c8eb9..1aa5666f410 100644 --- a/homeassistant/components/isal/manifest.json +++ b/homeassistant/components/isal/manifest.json @@ -6,5 +6,5 @@ "integration_type": "system", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["isal==1.6.1"] + "requirements": ["isal==1.7.1"] } diff --git a/homeassistant/components/iskra/__init__.py b/homeassistant/components/iskra/__init__.py new file mode 100644 index 00000000000..b841da9df26 --- /dev/null +++ b/homeassistant/components/iskra/__init__.py @@ -0,0 +1,100 @@ +"""The iskra integration.""" + +from __future__ import annotations + +from pyiskra.adapters import Modbus, RestAPI +from pyiskra.devices import Device +from pyiskra.exceptions import DeviceConnectionError, DeviceNotSupported, NotAuthorised + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN, MANUFACTURER +from .coordinator import IskraDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + + +type IskraConfigEntry = ConfigEntry[list[IskraDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: IskraConfigEntry) -> bool: + """Set up iskra device from a config entry.""" + conf = entry.data + adapter = None + + if conf[CONF_PROTOCOL] == "modbus_tcp": + adapter = Modbus( + ip_address=conf[CONF_HOST], + protocol="tcp", + port=conf[CONF_PORT], + modbus_address=conf[CONF_ADDRESS], + ) + elif conf[CONF_PROTOCOL] == "rest_api": + authentication = None + if (username := conf.get(CONF_USERNAME)) is not None and ( + password := conf.get(CONF_PASSWORD) + ) is not None: + authentication = { + "username": username, + "password": password, + } + adapter = RestAPI(ip_address=conf[CONF_HOST], authentication=authentication) + + # Try connecting to the device and create pyiskra device object + try: + base_device = await Device.create_device(adapter) + except DeviceConnectionError as e: + raise ConfigEntryNotReady("Cannot connect to the device") from e + except NotAuthorised as e: + raise ConfigEntryNotReady("Not authorised to connect to the device") from e + except DeviceNotSupported as e: + raise ConfigEntryNotReady("Device not supported") from e + + # Initialize the device + await base_device.init() + + # if the device is a gateway, add all child devices, otherwise add the device itself. + if base_device.is_gateway: + # Add the gateway device to the device registry + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, base_device.serial)}, + manufacturer=MANUFACTURER, + name=base_device.model, + model=base_device.model, + sw_version=base_device.fw_version, + ) + + coordinators = [ + IskraDataUpdateCoordinator(hass, child_device) + for child_device in base_device.get_child_devices() + ] + else: + coordinators = [IskraDataUpdateCoordinator(hass, base_device)] + + for coordinator in coordinators: + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinators + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: IskraConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/iskra/config_flow.py b/homeassistant/components/iskra/config_flow.py new file mode 100644 index 00000000000..b67b9ba3839 --- /dev/null +++ b/homeassistant/components/iskra/config_flow.py @@ -0,0 +1,253 @@ +"""Config flow for iskra integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyiskra.adapters import Modbus, RestAPI +from pyiskra.exceptions import ( + DeviceConnectionError, + DeviceTimeoutError, + InvalidResponseCode, + NotAuthorised, +) +from pyiskra.helper import BasicInfo +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, +) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PROTOCOL, default="rest_api"): SelectSelector( + SelectSelectorConfig( + options=["rest_api", "modbus_tcp"], + mode=SelectSelectorMode.LIST, + translation_key="protocol", + ), + ), + } +) + +STEP_AUTHENTICATION_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + +# CONF_ADDRESS validation is done later in code, as if ranges are set in voluptuous it turns into a slider +STEP_MODBUS_TCP_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_PORT, default=10001): vol.All( + vol.Coerce(int), vol.Range(min=0, max=65535) + ), + vol.Required(CONF_ADDRESS, default=33): NumberSelector( + NumberSelectorConfig(min=1, max=255, mode=NumberSelectorMode.BOX) + ), + } +) + + +async def test_rest_api_connection(host: str, user_input: dict[str, Any]) -> BasicInfo: + """Check if the RestAPI requires authentication.""" + + rest_api = RestAPI(ip_address=host, authentication=user_input) + try: + basic_info = await rest_api.get_basic_info() + except NotAuthorised as e: + raise NotAuthorised from e + except (DeviceConnectionError, DeviceTimeoutError, InvalidResponseCode) as e: + raise CannotConnect from e + except Exception as e: + _LOGGER.error("Unexpected exception: %s", e) + raise UnknownException from e + + return basic_info + + +async def test_modbus_connection(host: str, user_input: dict[str, Any]) -> BasicInfo: + """Test the Modbus connection.""" + modbus_api = Modbus( + ip_address=host, + protocol="tcp", + port=user_input[CONF_PORT], + modbus_address=user_input[CONF_ADDRESS], + ) + + try: + basic_info = await modbus_api.get_basic_info() + except NotAuthorised as e: + raise NotAuthorised from e + except (DeviceConnectionError, DeviceTimeoutError, InvalidResponseCode) as e: + raise CannotConnect from e + except Exception as e: + _LOGGER.error("Unexpected exception: %s", e) + raise UnknownException from e + + return basic_info + + +class IskraConfigFlowFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for iskra.""" + + VERSION = 1 + host: str + protocol: str + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + if user_input is not None: + self.host = user_input[CONF_HOST] + self.protocol = user_input[CONF_PROTOCOL] + if self.protocol == "rest_api": + # Check if authentication is required. + try: + device_info = await test_rest_api_connection(self.host, user_input) + except CannotConnect: + errors["base"] = "cannot_connect" + except NotAuthorised: + # Proceed to authentication step. + return await self.async_step_authentication() + except UnknownException: + errors["base"] = "unknown" + # If the connection was not successful, show an error. + + # If the connection was successful, create the device. + if not errors: + return await self._create_entry( + host=self.host, + protocol=self.protocol, + device_info=device_info, + user_input=user_input, + ) + + if self.protocol == "modbus_tcp": + # Proceed to modbus step. + return await self.async_step_modbus_tcp() + + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) + + async def async_step_authentication( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the authentication step.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + device_info = await test_rest_api_connection(self.host, user_input) + # If the connection failed, abort. + except CannotConnect: + errors["base"] = "cannot_connect" + # If the authentication failed, show an error and authentication form again. + except NotAuthorised: + errors["base"] = "invalid_auth" + except UnknownException: + errors["base"] = "unknown" + + # if the connection was successful, create the device. + if not errors: + return await self._create_entry( + self.host, + self.protocol, + device_info=device_info, + user_input=user_input, + ) + + # If there's no user_input or there was an error, show the authentication form again. + return self.async_show_form( + step_id="authentication", + data_schema=STEP_AUTHENTICATION_DATA_SCHEMA, + errors=errors, + ) + + async def async_step_modbus_tcp( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the Modbus TCP step.""" + errors: dict[str, str] = {} + + # If there's user_input, check the connection. + if user_input is not None: + # convert to integer + user_input[CONF_ADDRESS] = int(user_input[CONF_ADDRESS]) + + try: + device_info = await test_modbus_connection(self.host, user_input) + + # If the connection failed, show an error. + except CannotConnect: + errors["base"] = "cannot_connect" + except UnknownException: + errors["base"] = "unknown" + + # If the connection was successful, create the device. + if not errors: + return await self._create_entry( + host=self.host, + protocol=self.protocol, + device_info=device_info, + user_input=user_input, + ) + + # If there's no user_input or there was an error, show the modbus form again. + return self.async_show_form( + step_id="modbus_tcp", + data_schema=STEP_MODBUS_TCP_DATA_SCHEMA, + errors=errors, + ) + + async def _create_entry( + self, + host: str, + protocol: str, + device_info: BasicInfo, + user_input: dict[str, Any], + ) -> ConfigFlowResult: + """Create the config entry.""" + + await self.async_set_unique_id(device_info.serial) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=device_info.model, + data={CONF_HOST: host, CONF_PROTOCOL: protocol, **user_input}, + ) + + +class CannotConnect(HomeAssistantError): + """Error to indicate we cannot connect.""" + + +class UnknownException(HomeAssistantError): + """Error to indicate an unknown exception occurred.""" diff --git a/homeassistant/components/iskra/const.py b/homeassistant/components/iskra/const.py new file mode 100644 index 00000000000..a4ed36b50b2 --- /dev/null +++ b/homeassistant/components/iskra/const.py @@ -0,0 +1,29 @@ +"""Constants for the iskra integration.""" + +DOMAIN = "iskra" +MANUFACTURER = "Iskra d.o.o" + +# POWER +ATTR_TOTAL_APPARENT_POWER = "total_apparent_power" +ATTR_TOTAL_REACTIVE_POWER = "total_reactive_power" +ATTR_TOTAL_ACTIVE_POWER = "total_active_power" +ATTR_PHASE1_POWER = "phase1_power" +ATTR_PHASE2_POWER = "phase2_power" +ATTR_PHASE3_POWER = "phase3_power" + +# Voltage +ATTR_PHASE1_VOLTAGE = "phase1_voltage" +ATTR_PHASE2_VOLTAGE = "phase2_voltage" +ATTR_PHASE3_VOLTAGE = "phase3_voltage" + +# Current +ATTR_PHASE1_CURRENT = "phase1_current" +ATTR_PHASE2_CURRENT = "phase2_current" +ATTR_PHASE3_CURRENT = "phase3_current" + +# Counters +ATTR_NON_RESETTABLE_COUNTER = "non_resettable_counter_{}" +ATTR_RESETTABLE_COUNTER = "resettable_counter_{}" + +# Frequency +ATTR_FREQUENCY = "frequency" diff --git a/homeassistant/components/iskra/coordinator.py b/homeassistant/components/iskra/coordinator.py new file mode 100644 index 00000000000..175d8ed4c86 --- /dev/null +++ b/homeassistant/components/iskra/coordinator.py @@ -0,0 +1,57 @@ +"""Coordinator for Iskra integration.""" + +from datetime import timedelta +import logging + +from pyiskra.devices import Device +from pyiskra.exceptions import ( + DeviceConnectionError, + DeviceTimeoutError, + InvalidResponseCode, + NotAuthorised, +) + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class IskraDataUpdateCoordinator(DataUpdateCoordinator[None]): + """Class to manage fetching Iskra data.""" + + def __init__(self, hass: HomeAssistant, device: Device) -> None: + """Initialize.""" + self.device = device + + update_interval = timedelta(seconds=60) + + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=update_interval, + ) + + async def _async_update_data(self) -> None: + """Fetch data from Iskra device.""" + try: + await self.device.update_status() + except DeviceTimeoutError as e: + raise UpdateFailed( + f"Timeout error occurred while updating data for device {self.device.serial}" + ) from e + except DeviceConnectionError as e: + raise UpdateFailed( + f"Connection error occurred while updating data for device {self.device.serial}" + ) from e + except NotAuthorised as e: + raise UpdateFailed( + f"Not authorised to fetch data from device {self.device.serial}" + ) from e + except InvalidResponseCode as e: + raise UpdateFailed( + f"Invalid response code from device {self.device.serial}" + ) from e diff --git a/homeassistant/components/iskra/entity.py b/homeassistant/components/iskra/entity.py new file mode 100644 index 00000000000..f1c01d3eaa4 --- /dev/null +++ b/homeassistant/components/iskra/entity.py @@ -0,0 +1,38 @@ +"""Base entity for Iskra devices.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import IskraDataUpdateCoordinator + + +class IskraEntity(CoordinatorEntity[IskraDataUpdateCoordinator]): + """Representation a base Iskra device.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: IskraDataUpdateCoordinator) -> None: + """Initialize the Iskra device.""" + super().__init__(coordinator) + self.device = coordinator.device + gateway = self.device.parent_device + + if gateway is not None: + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device.serial)}, + manufacturer=MANUFACTURER, + model=self.device.model, + name=self.device.model, + sw_version=self.device.fw_version, + serial_number=self.device.serial, + via_device=(DOMAIN, gateway.serial), + ) + else: + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device.serial)}, + manufacturer=MANUFACTURER, + model=self.device.model, + sw_version=self.device.fw_version, + serial_number=self.device.serial, + ) diff --git a/homeassistant/components/iskra/manifest.json b/homeassistant/components/iskra/manifest.json new file mode 100644 index 00000000000..94f20b4d93c --- /dev/null +++ b/homeassistant/components/iskra/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "iskra", + "name": "iskra", + "codeowners": ["@iskramis"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/iskra", + "integration_type": "hub", + "iot_class": "local_polling", + "loggers": ["pyiskra"], + "requirements": ["pyiskra==0.1.14"] +} diff --git a/homeassistant/components/iskra/sensor.py b/homeassistant/components/iskra/sensor.py new file mode 100644 index 00000000000..df9e3ec53f9 --- /dev/null +++ b/homeassistant/components/iskra/sensor.py @@ -0,0 +1,284 @@ +"""Support for Iskra.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass, replace + +from pyiskra.devices import Device +from pyiskra.helper import Counter, CounterType + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + UnitOfApparentPower, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfFrequency, + UnitOfPower, + UnitOfReactivePower, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IskraConfigEntry +from .const import ( + ATTR_FREQUENCY, + ATTR_NON_RESETTABLE_COUNTER, + ATTR_PHASE1_CURRENT, + ATTR_PHASE1_POWER, + ATTR_PHASE1_VOLTAGE, + ATTR_PHASE2_CURRENT, + ATTR_PHASE2_POWER, + ATTR_PHASE2_VOLTAGE, + ATTR_PHASE3_CURRENT, + ATTR_PHASE3_POWER, + ATTR_PHASE3_VOLTAGE, + ATTR_RESETTABLE_COUNTER, + ATTR_TOTAL_ACTIVE_POWER, + ATTR_TOTAL_APPARENT_POWER, + ATTR_TOTAL_REACTIVE_POWER, +) +from .coordinator import IskraDataUpdateCoordinator +from .entity import IskraEntity + + +@dataclass(frozen=True, kw_only=True) +class IskraSensorEntityDescription(SensorEntityDescription): + """Describes Iskra sensor entity.""" + + value_func: Callable[[Device], float | None] + + +SENSOR_TYPES: tuple[IskraSensorEntityDescription, ...] = ( + # Power + IskraSensorEntityDescription( + key=ATTR_TOTAL_ACTIVE_POWER, + translation_key="total_active_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.total.active_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_TOTAL_REACTIVE_POWER, + translation_key="total_reactive_power", + device_class=SensorDeviceClass.REACTIVE_POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, + value_func=lambda device: device.measurements.total.reactive_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_TOTAL_APPARENT_POWER, + translation_key="total_apparent_power", + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + value_func=lambda device: device.measurements.total.apparent_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE1_POWER, + translation_key="phase1_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.phases[0].active_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE2_POWER, + translation_key="phase2_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.phases[1].active_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE3_POWER, + translation_key="phase3_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.phases[2].active_power.value, + ), + # Voltage + IskraSensorEntityDescription( + key=ATTR_PHASE1_VOLTAGE, + translation_key="phase1_voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=lambda device: device.measurements.phases[0].voltage.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE2_VOLTAGE, + translation_key="phase2_voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=lambda device: device.measurements.phases[1].voltage.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE3_VOLTAGE, + translation_key="phase3_voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=lambda device: device.measurements.phases[2].voltage.value, + ), + # Current + IskraSensorEntityDescription( + key=ATTR_PHASE1_CURRENT, + translation_key="phase1_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=lambda device: device.measurements.phases[0].current.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE2_CURRENT, + translation_key="phase2_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=lambda device: device.measurements.phases[1].current.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE3_CURRENT, + translation_key="phase3_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=lambda device: device.measurements.phases[2].current.value, + ), + # Frequency + IskraSensorEntityDescription( + key=ATTR_FREQUENCY, + translation_key="frequency", + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfFrequency.HERTZ, + value_func=lambda device: device.measurements.frequency.value, + ), +) + + +def get_counter_entity_description( + counter: Counter, + index: int, + entity_name: str, +) -> IskraSensorEntityDescription: + """Dynamically create IskraSensor object as energy meter's counters are customizable.""" + + key = entity_name.format(index + 1) + + if entity_name == ATTR_NON_RESETTABLE_COUNTER: + entity_description = IskraSensorEntityDescription( + key=key, + translation_key=key, + state_class=SensorStateClass.TOTAL_INCREASING, + value_func=lambda device: device.counters.non_resettable[index].value, + native_unit_of_measurement=counter.units, + ) + else: + entity_description = IskraSensorEntityDescription( + key=key, + translation_key=key, + state_class=SensorStateClass.TOTAL_INCREASING, + value_func=lambda device: device.counters.resettable[index].value, + native_unit_of_measurement=counter.units, + ) + + # Set unit of measurement and device class based on counter type + # HA's Energy device class supports only active energy + if counter.counter_type in [CounterType.ACTIVE_IMPORT, CounterType.ACTIVE_EXPORT]: + entity_description = replace( + entity_description, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + ) + + return entity_description + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IskraConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Iskra sensors based on config_entry.""" + + # Device that uses the config entry. + coordinators = entry.runtime_data + + entities: list[IskraSensor] = [] + + # Add sensors for each device. + for coordinator in coordinators: + device = coordinator.device + sensors = [] + + # Add measurement sensors. + if device.supports_measurements: + sensors.append(ATTR_FREQUENCY) + sensors.append(ATTR_TOTAL_APPARENT_POWER) + sensors.append(ATTR_TOTAL_ACTIVE_POWER) + sensors.append(ATTR_TOTAL_REACTIVE_POWER) + if device.phases >= 1: + sensors.append(ATTR_PHASE1_VOLTAGE) + sensors.append(ATTR_PHASE1_POWER) + sensors.append(ATTR_PHASE1_CURRENT) + if device.phases >= 2: + sensors.append(ATTR_PHASE2_VOLTAGE) + sensors.append(ATTR_PHASE2_POWER) + sensors.append(ATTR_PHASE2_CURRENT) + if device.phases >= 3: + sensors.append(ATTR_PHASE3_VOLTAGE) + sensors.append(ATTR_PHASE3_POWER) + sensors.append(ATTR_PHASE3_CURRENT) + + entities.extend( + IskraSensor(coordinator, description) + for description in SENSOR_TYPES + if description.key in sensors + ) + + if device.supports_counters: + for index, counter in enumerate(device.counters.non_resettable[:4]): + description = get_counter_entity_description( + counter, index, ATTR_NON_RESETTABLE_COUNTER + ) + entities.append(IskraSensor(coordinator, description)) + + for index, counter in enumerate(device.counters.resettable[:8]): + description = get_counter_entity_description( + counter, index, ATTR_RESETTABLE_COUNTER + ) + entities.append(IskraSensor(coordinator, description)) + + async_add_entities(entities) + + +class IskraSensor(IskraEntity, SensorEntity): + """Representation of a Sensor.""" + + entity_description: IskraSensorEntityDescription + + def __init__( + self, + coordinator: IskraDataUpdateCoordinator, + description: IskraSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.device.serial}_{description.key}" + + @property + def native_value(self) -> float | None: + """Return the state of the sensor.""" + return self.entity_description.value_func(self.device) diff --git a/homeassistant/components/iskra/strings.json b/homeassistant/components/iskra/strings.json new file mode 100644 index 00000000000..5818cdfa1db --- /dev/null +++ b/homeassistant/components/iskra/strings.json @@ -0,0 +1,128 @@ +{ + "config": { + "step": { + "user": { + "title": "Configure Iskra Device", + "description": "Enter the IP address of your Iskra Device and select protocol.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of your Iskra device." + } + }, + "authentication": { + "title": "Configure Rest API Credentials", + "description": "Enter username and password", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "modbus_tcp": { + "title": "Configure Modbus TCP", + "description": "Enter Modbus TCP port and device's Modbus address.", + "data": { + "port": "[%key:common::config_flow::data::port%]", + "address": "Modbus address" + }, + "data_description": { + "port": "Port number can be found in the device's settings menu.", + "address": "Modbus address can be found in the device's settings menu." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "selector": { + "protocol": { + "options": { + "rest_api": "Rest API", + "modbus_tcp": "Modbus TCP" + } + } + }, + "entity": { + "sensor": { + "total_active_power": { + "name": "Total active power" + }, + "total_apparent_power": { + "name": "Total apparent power" + }, + "total_reactive_power": { + "name": "Total reactive power" + }, + "phase1_power": { + "name": "Phase 1 power" + }, + "phase2_power": { + "name": "Phase 2 power" + }, + "phase3_power": { + "name": "Phase 3 power" + }, + "phase1_voltage": { + "name": "Phase 1 voltage" + }, + "phase2_voltage": { + "name": "Phase 2 voltage" + }, + "phase3_voltage": { + "name": "Phase 3 voltage" + }, + "phase1_current": { + "name": "Phase 1 current" + }, + "phase2_current": { + "name": "Phase 2 current" + }, + "phase3_current": { + "name": "Phase 3 current" + }, + "non_resettable_counter_1": { + "name": "Non Resettable counter 1" + }, + "non_resettable_counter_2": { + "name": "Non Resettable counter 2" + }, + "non_resettable_counter_3": { + "name": "Non Resettable counter 3" + }, + "non_resettable_counter_4": { + "name": "Non Resettable counter 4" + }, + "resettable_counter_1": { + "name": "Resettable counter 1" + }, + "resettable_counter_2": { + "name": "Resettable counter 2" + }, + "resettable_counter_3": { + "name": "Resettable counter 3" + }, + "resettable_counter_4": { + "name": "Resettable counter 4" + }, + "resettable_counter_5": { + "name": "Resettable counter 5" + }, + "resettable_counter_6": { + "name": "Resettable counter 6" + }, + "resettable_counter_7": { + "name": "Resettable counter 7" + }, + "resettable_counter_8": { + "name": "Resettable counter 8" + } + } + } +} diff --git a/homeassistant/components/islamic_prayer_times/config_flow.py b/homeassistant/components/islamic_prayer_times/config_flow.py index 2db89183499..ce911ccc49d 100644 --- a/homeassistant/components/islamic_prayer_times/config_flow.py +++ b/homeassistant/components/islamic_prayer_times/config_flow.py @@ -52,7 +52,7 @@ class IslamicPrayerFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> IslamicPrayerOptionsFlowHandler: """Get the options flow for this handler.""" - return IslamicPrayerOptionsFlowHandler(config_entry) + return IslamicPrayerOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -93,10 +93,6 @@ class IslamicPrayerFlowHandler(ConfigFlow, domain=DOMAIN): class IslamicPrayerOptionsFlowHandler(OptionsFlow): """Handle Islamic Prayer client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/israel_rail/config_flow.py b/homeassistant/components/israel_rail/config_flow.py index 3adecaf428c..0f78c227d0a 100644 --- a/homeassistant/components/israel_rail/config_flow.py +++ b/homeassistant/components/israel_rail/config_flow.py @@ -12,6 +12,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import CONF_DESTINATION, CONF_START, DOMAIN STATIONS_NAMES = [station["Heb"] for station in STATIONS.values()] +STATIONS_NAMES.sort() DATA_SCHEMA = vol.Schema( { diff --git a/homeassistant/components/iss/__init__.py b/homeassistant/components/iss/__init__.py index 606263ce769..dbbcc8b6c51 100644 --- a/homeassistant/components/iss/__init__.py +++ b/homeassistant/components/iss/__init__.py @@ -53,6 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update, update_interval=timedelta(seconds=60), diff --git a/homeassistant/components/iss/config_flow.py b/homeassistant/components/iss/config_flow.py index 80644698239..eaf01a6d094 100644 --- a/homeassistant/components/iss/config_flow.py +++ b/homeassistant/components/iss/config_flow.py @@ -1,5 +1,7 @@ """Config flow to configure iss component.""" +from __future__ import annotations + import voluptuous as vol from homeassistant.config_entries import ( @@ -23,16 +25,12 @@ class ISSConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - # Check if already configured - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry( title=DEFAULT_NAME, @@ -46,16 +44,10 @@ class ISSConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Config flow options handler for iss.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) return self.async_show_form( step_id="init", diff --git a/homeassistant/components/iss/manifest.json b/homeassistant/components/iss/manifest.json index 1dc885c9df6..bf36a15db46 100644 --- a/homeassistant/components/iss/manifest.json +++ b/homeassistant/components/iss/manifest.json @@ -7,5 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiss"], - "requirements": ["pyiss==1.0.1"] + "requirements": ["pyiss==1.0.1"], + "single_config_entry": true } diff --git a/homeassistant/components/iss/strings.json b/homeassistant/components/iss/strings.json index e0c7d85efa4..17e86587e85 100644 --- a/homeassistant/components/iss/strings.json +++ b/homeassistant/components/iss/strings.json @@ -6,7 +6,6 @@ } }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "latitude_longitude_not_defined": "Latitude and longitude are not defined in Home Assistant." } }, diff --git a/homeassistant/components/ista_ecotrend/config_flow.py b/homeassistant/components/ista_ecotrend/config_flow.py index 15222995a37..c11c43070df 100644 --- a/homeassistant/components/ista_ecotrend/config_flow.py +++ b/homeassistant/components/ista_ecotrend/config_flow.py @@ -17,7 +17,6 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from . import IstaConfigEntry from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -43,8 +42,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( class IstaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for ista EcoTrend.""" - reauth_entry: IstaConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -88,9 +85,6 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -98,9 +92,8 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - if TYPE_CHECKING: - assert self.reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: ista = PyEcotrendIsta( user_input[CONF_EMAIL], @@ -117,9 +110,7 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - self.reauth_entry, data=user_input - ) + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( step_id="reauth_confirm", @@ -128,12 +119,12 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN): suggested_values={ CONF_EMAIL: user_input[CONF_EMAIL] if user_input is not None - else self.reauth_entry.data[CONF_EMAIL] + else reauth_entry.data[CONF_EMAIL] }, ), description_placeholders={ - CONF_NAME: self.reauth_entry.title, - CONF_EMAIL: self.reauth_entry.data[CONF_EMAIL], + CONF_NAME: reauth_entry.title, + CONF_EMAIL: reauth_entry.data[CONF_EMAIL], }, errors=errors, ) diff --git a/homeassistant/components/ista_ecotrend/quality_scale.yaml b/homeassistant/components/ista_ecotrend/quality_scale.yaml new file mode 100644 index 00000000000..b942ecba487 --- /dev/null +++ b/homeassistant/components/ista_ecotrend/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: The integration registers no actions. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: Group the 3 different executor jobs as one executor job + config-flow-test-coverage: + status: todo + comment: test_form/docstrings outdated, test already_configuret, test abort conditions in reauth, + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: The integration registers no actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: The integration registers no events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: The integration registers no actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration has no configuration parameters + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: The integration is a web service, there are no discoverable devices. + discovery: + status: exempt + comment: The integration is a web service, there are no discoverable devices. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: todo + entity-category: + status: done + comment: The default category is appropriate. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index 7aa1adfe4c9..eb06fabe373 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -40,6 +40,8 @@ from .coordinator import IstaCoordinator from .util import IstaConsumptionType, IstaValueType, get_native_value, get_statistics _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(kw_only=True, frozen=True) @@ -71,7 +73,6 @@ SENSOR_DESCRIPTIONS: tuple[IstaSensorEntityDescription, ...] = ( translation_key=IstaSensorEntity.HEATING, suggested_display_precision=0, consumption_type=IstaConsumptionType.HEATING, - native_unit_of_measurement="units", state_class=SensorStateClass.TOTAL, ), IstaSensorEntityDescription( diff --git a/homeassistant/components/ista_ecotrend/strings.json b/homeassistant/components/ista_ecotrend/strings.json index f76cf5286cb..e7c37461b19 100644 --- a/homeassistant/components/ista_ecotrend/strings.json +++ b/homeassistant/components/ista_ecotrend/strings.json @@ -14,14 +14,23 @@ "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" - } + }, + "data_description": { + "email": "Enter the email address associated with your ista EcoTrend account", + "password": "Enter the password for your ista EcoTrend account" + }, + "description": "Connect your **ista EcoTrend** account to Home Assistant to access your monthly heating and water usage data." }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "Please reenter the password for: {email}", + "description": "Re-enter your password for `{email}` to reconnect your ista EcoTrend account to Home Assistant.", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::ista_ecotrend::config::step::user::data_description::email%]", + "password": "[%key:component::ista_ecotrend::config::step::user::data_description::password%]" } } } @@ -29,7 +38,8 @@ "entity": { "sensor": { "heating": { - "name": "Heating" + "name": "Heating", + "unit_of_measurement": "units" }, "heating_cost": { "name": "Heating cost" diff --git a/homeassistant/components/isy994/__init__.py b/homeassistant/components/isy994/__init__.py index 0c238182849..d2862054971 100644 --- a/homeassistant/components/isy994/__init__.py +++ b/homeassistant/components/isy994/__init__.py @@ -144,7 +144,7 @@ async def async_setup_entry( isy_data.net_resources.append(resource) # Dump ISY Clock Information. Future: Add ISY as sensor to Hass with attrs - _LOGGER.info(repr(isy.clock)) + _LOGGER.debug(repr(isy.clock)) isy_data.root = isy _async_get_or_create_isy_device_in_registry(hass, entry, isy) diff --git a/homeassistant/components/isy994/climate.py b/homeassistant/components/isy994/climate.py index d4376b5a3b4..d5deba56284 100644 --- a/homeassistant/components/isy994/climate.py +++ b/homeassistant/components/isy994/climate.py @@ -88,7 +88,6 @@ class ISYThermostatEntity(ISYNodeEntity, ClimateEntity): ) _attr_target_temperature_step = 1.0 _attr_fan_modes = [FAN_AUTO, FAN_ON] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, node: Node, device_info: DeviceInfo | None = None) -> None: """Initialize the ISY Thermostat entity.""" diff --git a/homeassistant/components/isy994/config_flow.py b/homeassistant/components/isy994/config_flow.py index 0239926f5e3..3575fa99a55 100644 --- a/homeassistant/components/isy994/config_flow.py +++ b/homeassistant/components/isy994/config_flow.py @@ -140,7 +140,7 @@ class Isy994ConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -314,10 +314,6 @@ class Isy994ConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for ISY/IoX.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/isy994/const.py b/homeassistant/components/isy994/const.py index 57b30c88075..b43385a0e5d 100644 --- a/homeassistant/components/isy994/const.py +++ b/homeassistant/components/isy994/const.py @@ -15,6 +15,7 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) +from homeassistant.components.lock import LockState from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, @@ -29,14 +30,12 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS_MILLIWATT, STATE_CLOSED, STATE_CLOSING, - STATE_LOCKED, STATE_OFF, STATE_ON, STATE_OPEN, STATE_OPENING, STATE_PROBLEM, STATE_UNKNOWN, - STATE_UNLOCKED, UV_INDEX, Platform, UnitOfApparentPower, @@ -451,8 +450,8 @@ UOM_FRIENDLY_NAME = { UOM_TO_STATES = { "11": { # Deadbolt Status - 0: STATE_UNLOCKED, - 100: STATE_LOCKED, + 0: LockState.UNLOCKED, + 100: LockState.LOCKED, 101: STATE_UNKNOWN, 102: STATE_PROBLEM, }, diff --git a/homeassistant/components/isy994/fan.py b/homeassistant/components/isy994/fan.py index 1d8af78f83c..fc0406e2d5f 100644 --- a/homeassistant/components/isy994/fan.py +++ b/homeassistant/components/isy994/fan.py @@ -53,7 +53,6 @@ class ISYFanEntity(ISYNodeEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int | None: diff --git a/homeassistant/components/isy994/icons.json b/homeassistant/components/isy994/icons.json index 27b2ea6954e..9c6e7fa78df 100644 --- a/homeassistant/components/isy994/icons.json +++ b/homeassistant/components/isy994/icons.json @@ -1,12 +1,28 @@ { "services": { - "send_raw_node_command": "mdi:console-line", - "send_node_command": "mdi:console", - "get_zwave_parameter": "mdi:download", - "set_zwave_parameter": "mdi:upload", - "set_zwave_lock_user_code": "mdi:upload-lock", - "delete_zwave_lock_user_code": "mdi:lock-remove", - "rename_node": "mdi:pencil", - "send_program_command": "mdi:console" + "send_raw_node_command": { + "service": "mdi:console-line" + }, + "send_node_command": { + "service": "mdi:console" + }, + "get_zwave_parameter": { + "service": "mdi:download" + }, + "set_zwave_parameter": { + "service": "mdi:upload" + }, + "set_zwave_lock_user_code": { + "service": "mdi:upload-lock" + }, + "delete_zwave_lock_user_code": { + "service": "mdi:lock-remove" + }, + "rename_node": { + "service": "mdi:pencil" + }, + "send_program_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/isy994/services.py b/homeassistant/components/isy994/services.py index ffcea5cc8f8..1cd46446ed6 100644 --- a/homeassistant/components/isy994/services.py +++ b/homeassistant/components/isy994/services.py @@ -242,7 +242,7 @@ def async_unload_services(hass: HomeAssistant) -> None: if not existing_services or SERVICE_SEND_PROGRAM_COMMAND not in existing_services: return - _LOGGER.info("Unloading ISY994 Services") + _LOGGER.debug("Unloading ISY994 Services") hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND) hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_RAW_NODE_COMMAND) hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_NODE_COMMAND) diff --git a/homeassistant/components/isy994/strings.json b/homeassistant/components/isy994/strings.json index ec7d78edd53..f0e55881652 100644 --- a/homeassistant/components/isy994/strings.json +++ b/homeassistant/components/isy994/strings.json @@ -29,7 +29,8 @@ "invalid_host": "The host entry was not in full URL format, e.g., http://192.168.10.100:80" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { diff --git a/homeassistant/components/itach/manifest.json b/homeassistant/components/itach/manifest.json index 2928620b952..68b34b4321e 100644 --- a/homeassistant/components/itach/manifest.json +++ b/homeassistant/components/itach/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/itach", "iot_class": "assumed_state", + "quality_scale": "legacy", "requirements": ["pyitachip2ir==0.0.7"] } diff --git a/homeassistant/components/itunes/manifest.json b/homeassistant/components/itunes/manifest.json index f1135dbf847..a12271d04d7 100644 --- a/homeassistant/components/itunes/manifest.json +++ b/homeassistant/components/itunes/manifest.json @@ -3,5 +3,6 @@ "name": "Apple iTunes", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/itunes", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ituran/__init__.py b/homeassistant/components/ituran/__init__.py new file mode 100644 index 00000000000..b0a26cf7db2 --- /dev/null +++ b/homeassistant/components/ituran/__init__.py @@ -0,0 +1,28 @@ +"""The Ituran integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import IturanConfigEntry, IturanDataUpdateCoordinator + +PLATFORMS: list[Platform] = [ + Platform.DEVICE_TRACKER, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: IturanConfigEntry) -> bool: + """Set up Ituran from a config entry.""" + + coordinator = IturanDataUpdateCoordinator(hass, entry=entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: IturanConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ituran/config_flow.py b/homeassistant/components/ituran/config_flow.py new file mode 100644 index 00000000000..9709e471503 --- /dev/null +++ b/homeassistant/components/ituran/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for Ituran integration.""" + +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from pyituran import Ituran +from pyituran.exceptions import IturanApiError, IturanAuthError +import voluptuous as vol + +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult + +from .const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_OTP, + CONF_PHONE_NUMBER, + DOMAIN, +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_ID_OR_PASSPORT): str, + vol.Required(CONF_PHONE_NUMBER): str, + } +) + +STEP_OTP_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_OTP): str, + } +) + + +class IturanConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Ituran.""" + + _user_info: dict[str, Any] + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + await self.async_set_unique_id(user_input[CONF_ID_OR_PASSPORT]) + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() + + ituran = Ituran( + user_input[CONF_ID_OR_PASSPORT], + user_input[CONF_PHONE_NUMBER], + ) + user_input[CONF_MOBILE_ID] = ituran.mobile_id + try: + authenticated = await ituran.is_authenticated() + if not authenticated: + await ituran.request_otp() + except IturanApiError: + errors["base"] = "cannot_connect" + except IturanAuthError: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if authenticated: + return self.async_create_entry( + title=f"Ituran {user_input[CONF_ID_OR_PASSPORT]}", + data=user_input, + ) + self._user_info = user_input + return await self.async_step_otp() + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + async def async_step_otp( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the OTP step.""" + errors: dict[str, str] = {} + if user_input is not None: + ituran = Ituran( + self._user_info[CONF_ID_OR_PASSPORT], + self._user_info[CONF_PHONE_NUMBER], + self._user_info[CONF_MOBILE_ID], + ) + try: + await ituran.authenticate(user_input[CONF_OTP]) + except IturanApiError: + errors["base"] = "cannot_connect" + except IturanAuthError: + errors["base"] = "invalid_otp" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._user_info + ) + return self.async_create_entry( + title=f"Ituran {self._user_info[CONF_ID_OR_PASSPORT]}", + data=self._user_info, + ) + + return self.async_show_form( + step_id="otp", data_schema=STEP_OTP_DATA_SCHEMA, errors=errors + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self._user_info = dict(entry_data) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reauth confirmation message.""" + if user_input is not None: + return await self.async_step_user(self._user_info) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema({}), + description_placeholders={ + "phone_number": self._user_info[CONF_PHONE_NUMBER] + }, + ) diff --git a/homeassistant/components/ituran/const.py b/homeassistant/components/ituran/const.py new file mode 100644 index 00000000000..b17271490ee --- /dev/null +++ b/homeassistant/components/ituran/const.py @@ -0,0 +1,13 @@ +"""Constants for the Ituran integration.""" + +from datetime import timedelta +from typing import Final + +DOMAIN = "ituran" + +CONF_ID_OR_PASSPORT: Final = "id_or_passport" +CONF_PHONE_NUMBER: Final = "phone_number" +CONF_MOBILE_ID: Final = "mobile_id" +CONF_OTP: Final = "otp" + +UPDATE_INTERVAL = timedelta(seconds=300) diff --git a/homeassistant/components/ituran/coordinator.py b/homeassistant/components/ituran/coordinator.py new file mode 100644 index 00000000000..cd0949eb4c2 --- /dev/null +++ b/homeassistant/components/ituran/coordinator.py @@ -0,0 +1,76 @@ +"""Coordinator for Ituran.""" + +import logging + +from pyituran import Ituran, Vehicle +from pyituran.exceptions import IturanApiError, IturanAuthError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_PHONE_NUMBER, + DOMAIN, + UPDATE_INTERVAL, +) + +_LOGGER = logging.getLogger(__name__) + +type IturanConfigEntry = ConfigEntry[IturanDataUpdateCoordinator] + + +class IturanDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Vehicle]]): + """Class to manage fetching Ituran data.""" + + config_entry: IturanConfigEntry + + def __init__(self, hass: HomeAssistant, entry: IturanConfigEntry) -> None: + """Initialize account-wide Ituran data updater.""" + super().__init__( + hass, + _LOGGER, + name=f"{DOMAIN}-{entry.data[CONF_ID_OR_PASSPORT]}", + update_interval=UPDATE_INTERVAL, + config_entry=entry, + ) + self.ituran = Ituran( + entry.data[CONF_ID_OR_PASSPORT], + entry.data[CONF_PHONE_NUMBER], + entry.data[CONF_MOBILE_ID], + ) + + async def _async_update_data(self) -> dict[str, Vehicle]: + """Fetch data from Ituran.""" + + try: + vehicles = await self.ituran.get_vehicles() + except IturanApiError as e: + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from e + except IturanAuthError as e: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, translation_key="auth_error" + ) from e + + updated_data = {vehicle.license_plate: vehicle for vehicle in vehicles} + self._cleanup_removed_vehicles(updated_data) + + return updated_data + + def _cleanup_removed_vehicles(self, data: dict[str, Vehicle]) -> None: + account_vehicles = {(DOMAIN, license_plate) for license_plate in data} + device_registry = dr.async_get(self.hass) + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry_id=self.config_entry.entry_id + ) + for device in device_entries: + if not device.identifiers.intersection(account_vehicles): + device_registry.async_update_device( + device.id, remove_config_entry_id=self.config_entry.entry_id + ) diff --git a/homeassistant/components/ituran/device_tracker.py b/homeassistant/components/ituran/device_tracker.py new file mode 100644 index 00000000000..37796570c61 --- /dev/null +++ b/homeassistant/components/ituran/device_tracker.py @@ -0,0 +1,49 @@ +"""Device tracker for Ituran vehicles.""" + +from __future__ import annotations + +from homeassistant.components.device_tracker import TrackerEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IturanConfigEntry +from .coordinator import IturanDataUpdateCoordinator +from .entity import IturanBaseEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: IturanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Ituran tracker from config entry.""" + coordinator = config_entry.runtime_data + async_add_entities( + IturanDeviceTracker(coordinator, license_plate) + for license_plate in coordinator.data + ) + + +class IturanDeviceTracker(IturanBaseEntity, TrackerEntity): + """Ituran device tracker.""" + + _attr_translation_key = "car" + _attr_name = None + + def __init__( + self, + coordinator: IturanDataUpdateCoordinator, + license_plate: str, + ) -> None: + """Initialize the device tracker.""" + super().__init__(coordinator, license_plate, "device_tracker") + + @property + def latitude(self) -> float | None: + """Return latitude value of the device.""" + return self.vehicle.gps_coordinates[0] + + @property + def longitude(self) -> float | None: + """Return longitude value of the device.""" + return self.vehicle.gps_coordinates[1] diff --git a/homeassistant/components/ituran/entity.py b/homeassistant/components/ituran/entity.py new file mode 100644 index 00000000000..597cdac9513 --- /dev/null +++ b/homeassistant/components/ituran/entity.py @@ -0,0 +1,47 @@ +"""Base for all turan entities.""" + +from __future__ import annotations + +from pyituran import Vehicle + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import IturanDataUpdateCoordinator + + +class IturanBaseEntity(CoordinatorEntity[IturanDataUpdateCoordinator]): + """Common base for Ituran entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: IturanDataUpdateCoordinator, + license_plate: str, + unique_key: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self._license_plate = license_plate + self._attr_unique_id = f"{license_plate}-{unique_key}" + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.vehicle.license_plate)}, + manufacturer=self.vehicle.make, + model=self.vehicle.model, + name=self.vehicle.model, + serial_number=self.vehicle.license_plate, + ) + + @property + def available(self) -> bool: + """Return True if vehicle is still included in the account.""" + return super().available and self._license_plate in self.coordinator.data + + @property + def vehicle(self) -> Vehicle: + """Return the vehicle information associated with this entity.""" + return self.coordinator.data[self._license_plate] diff --git a/homeassistant/components/ituran/icons.json b/homeassistant/components/ituran/icons.json new file mode 100644 index 00000000000..a20ea5b7304 --- /dev/null +++ b/homeassistant/components/ituran/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "device_tracker": { + "car": { + "default": "mdi:car" + } + } + } +} diff --git a/homeassistant/components/ituran/manifest.json b/homeassistant/components/ituran/manifest.json new file mode 100644 index 00000000000..93860427a77 --- /dev/null +++ b/homeassistant/components/ituran/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "ituran", + "name": "Ituran", + "codeowners": ["@shmuelzon"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/ituran", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["pyituran==0.1.4"] +} diff --git a/homeassistant/components/ituran/quality_scale.yaml b/homeassistant/components/ituran/quality_scale.yaml new file mode 100644 index 00000000000..71d0d9698da --- /dev/null +++ b/homeassistant/components/ituran/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + brands: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + reauthentication-flow: done + parallel-updates: + status: exempt + comment: | + Read only platforms and coordinator. + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + No options flow. + # Gold + entity-translations: done + entity-device-class: + status: exempt + comment: | + Only device_tracker platform. + devices: done + entity-category: todo + entity-disabled-by-default: + status: exempt + comment: | + No noisy entities + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users credentials to get the data. + stale-devices: todo + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: done + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users credentials to get the data. + repair-issues: + status: exempt + comment: | + No repairs/issues. + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: todo + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/ituran/strings.json b/homeassistant/components/ituran/strings.json new file mode 100644 index 00000000000..212dbd1b86a --- /dev/null +++ b/homeassistant/components/ituran/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "step": { + "user": { + "data": { + "id_or_passport": "ID or passport number", + "phone_number": "Mobile phone number" + }, + "data_description": { + "id_or_passport": "The government ID or passport number provided when registering with Ituran.", + "phone_number": "The mobile phone number provided when registering with Ituran. A one-time password will be sent to this mobile number." + } + }, + "otp": { + "data": { + "otp": "OTP" + }, + "data_description": { + "otp": "A one-time-password sent as a text message to the mobile phone number provided before." + } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "A new one-time password will be sent to {phone_number}." + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_otp": "OTP invalid", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + } + }, + "exceptions": { + "api_error": { + "message": "An error occurred while communicating with the Ituran service." + }, + "auth_error": { + "message": "Failed authenticating with the Ituran service, please reauthenticate the integration." + } + } +} diff --git a/homeassistant/components/izone/climate.py b/homeassistant/components/izone/climate.py index 617cdc730cc..e61917c825b 100644 --- a/homeassistant/components/izone/climate.py +++ b/homeassistant/components/izone/climate.py @@ -85,9 +85,9 @@ async def async_setup_entry( # Filter out any entities excluded in the config file if conf and ctrl.device_uid in conf[CONF_EXCLUDE]: - _LOGGER.info("Controller UID=%s ignored as excluded", ctrl.device_uid) + _LOGGER.debug("Controller UID=%s ignored as excluded", ctrl.device_uid) return - _LOGGER.info("Controller UID=%s discovered", ctrl.device_uid) + _LOGGER.debug("Controller UID=%s discovered", ctrl.device_uid) device = ControllerDevice(ctrl) async_add_entities([device]) @@ -141,7 +141,6 @@ class ControllerDevice(ClimateEntity): _attr_has_entity_name = True _attr_name = None _attr_target_temperature_step = 0.5 - _enable_turn_on_off_backwards_compatibility = False def __init__(self, controller: Controller) -> None: """Initialise ControllerDevice.""" @@ -245,9 +244,9 @@ class ControllerDevice(ClimateEntity): return if available: - _LOGGER.info("Reconnected controller %s ", self._controller.device_uid) + _LOGGER.warning("Reconnected controller %s ", self._controller.device_uid) else: - _LOGGER.info( + _LOGGER.warning( "Controller %s disconnected due to exception: %s", self._controller.device_uid, ex, diff --git a/homeassistant/components/izone/icons.json b/homeassistant/components/izone/icons.json index e02cd57c141..bb38db27839 100644 --- a/homeassistant/components/izone/icons.json +++ b/homeassistant/components/izone/icons.json @@ -1,6 +1,10 @@ { "services": { - "airflow_min": "mdi:fan-minus", - "airflow_max": "mdi:fan-plus" + "airflow_min": { + "service": "mdi:fan-minus" + }, + "airflow_max": { + "service": "mdi:fan-plus" + } } } diff --git a/homeassistant/components/jellyfin/__init__.py b/homeassistant/components/jellyfin/__init__.py index 0dc51ebd9b3..4f0886dfa22 100644 --- a/homeassistant/components/jellyfin/__init__.py +++ b/homeassistant/components/jellyfin/__init__.py @@ -9,10 +9,9 @@ from homeassistant.helpers import device_registry as dr from .client_wrapper import CannotConnect, InvalidAuth, create_client, validate_input from .const import CONF_CLIENT_DEVICE_ID, DOMAIN, PLATFORMS -from .coordinator import JellyfinDataUpdateCoordinator, SessionsDataUpdateCoordinator -from .models import JellyfinData +from .coordinator import JellyfinDataUpdateCoordinator -type JellyfinConfigEntry = ConfigEntry[JellyfinData] +type JellyfinConfigEntry = ConfigEntry[JellyfinDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: JellyfinConfigEntry) -> bool: @@ -36,20 +35,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: JellyfinConfigEntry) -> server_info: dict[str, Any] = connect_result["Servers"][0] - coordinators: dict[str, JellyfinDataUpdateCoordinator[Any]] = { - "sessions": SessionsDataUpdateCoordinator( - hass, client, server_info, entry.data[CONF_CLIENT_DEVICE_ID], user_id - ), - } + coordinator = JellyfinDataUpdateCoordinator(hass, client, server_info, user_id) - for coordinator in coordinators.values(): - await coordinator.async_config_entry_first_refresh() + await coordinator.async_config_entry_first_refresh() - entry.runtime_data = JellyfinData( - client_device_id=entry.data[CONF_CLIENT_DEVICE_ID], - jellyfin_client=client, - coordinators=coordinators, - ) + entry.runtime_data = coordinator + entry.async_on_unload(client.stop) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -58,19 +49,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: JellyfinConfigEntry) -> async def async_unload_entry(hass: HomeAssistant, entry: JellyfinConfigEntry) -> bool: """Unload a config entry.""" - unloaded = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unloaded: - entry.runtime_data.jellyfin_client.stop() - - return unloaded + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_remove_config_entry_device( hass: HomeAssistant, config_entry: JellyfinConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove device from a config entry.""" - data = config_entry.runtime_data - coordinator = data.coordinators["sessions"] + coordinator = config_entry.runtime_data return not device_entry.identifiers.intersection( ( diff --git a/homeassistant/components/jellyfin/browse_media.py b/homeassistant/components/jellyfin/browse_media.py index 2af2bac4875..e5648b0a34f 100644 --- a/homeassistant/components/jellyfin/browse_media.py +++ b/homeassistant/components/jellyfin/browse_media.py @@ -7,8 +7,12 @@ from typing import Any from jellyfin_apiclient_python import JellyfinClient -from homeassistant.components.media_player import BrowseError, MediaClass, MediaType -from homeassistant.components.media_player.browse_media import BrowseMedia +from homeassistant.components.media_player import ( + BrowseError, + BrowseMedia, + MediaClass, + MediaType, +) from homeassistant.core import HomeAssistant from .client_wrapper import get_artwork_url diff --git a/homeassistant/components/jellyfin/config_flow.py b/homeassistant/components/jellyfin/config_flow.py index 7b5426cffde..0c170d2485f 100644 --- a/homeassistant/components/jellyfin/config_flow.py +++ b/homeassistant/components/jellyfin/config_flow.py @@ -8,11 +8,7 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ( - ConfigFlow, - ConfigFlowResult, - OptionsFlowWithConfigEntry, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import callback from homeassistant.util.uuid import random_uuid_hex @@ -56,7 +52,6 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the Jellyfin config flow.""" self.client_device_id: str | None = None - self.entry: JellyfinConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -108,7 +103,6 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -118,8 +112,8 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - assert self.entry is not None - new_input = self.entry.data | user_input + reauth_entry = self._get_reauth_entry() + new_input = reauth_entry.data | user_input if self.client_device_id is None: self.client_device_id = _generate_client_device_id() @@ -135,10 +129,7 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" _LOGGER.exception("Unexpected exception") else: - self.hass.config_entries.async_update_entry(self.entry, data=new_input) - - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=new_input) return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_DATA_SCHEMA, errors=errors @@ -148,12 +139,12 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: JellyfinConfigEntry, - ) -> OptionsFlowWithConfigEntry: + ) -> OptionsFlowHandler: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle an option flow for jellyfin.""" async def async_step_init( diff --git a/homeassistant/components/jellyfin/const.py b/homeassistant/components/jellyfin/const.py index 34fb040115f..cdddaa46ad1 100644 --- a/homeassistant/components/jellyfin/const.py +++ b/homeassistant/components/jellyfin/const.py @@ -83,5 +83,5 @@ MEDIA_CLASS_MAP = { "Season": MediaClass.SEASON, } -PLATFORMS = [Platform.MEDIA_PLAYER, Platform.SENSOR] +PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE, Platform.SENSOR] LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/jellyfin/coordinator.py b/homeassistant/components/jellyfin/coordinator.py index bbd0dfe7496..20428250254 100644 --- a/homeassistant/components/jellyfin/coordinator.py +++ b/homeassistant/components/jellyfin/coordinator.py @@ -2,32 +2,28 @@ from __future__ import annotations -from abc import ABC, abstractmethod from datetime import timedelta -from typing import Any, TypeVar +from typing import Any from jellyfin_apiclient_python import JellyfinClient +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN, LOGGER, USER_APP_NAME - -JellyfinDataT = TypeVar( - "JellyfinDataT", - bound=dict[str, dict[str, Any]] | dict[str, Any], -) +from .const import CONF_CLIENT_DEVICE_ID, DOMAIN, LOGGER, USER_APP_NAME -class JellyfinDataUpdateCoordinator(DataUpdateCoordinator[JellyfinDataT], ABC): +class JellyfinDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]): """Data update coordinator for the Jellyfin integration.""" + config_entry: ConfigEntry + def __init__( self, hass: HomeAssistant, api_client: JellyfinClient, system_info: dict[str, Any], - client_device_id: str, user_id: str, ) -> None: """Initialize the coordinator.""" @@ -37,32 +33,19 @@ class JellyfinDataUpdateCoordinator(DataUpdateCoordinator[JellyfinDataT], ABC): name=DOMAIN, update_interval=timedelta(seconds=10), ) - self.api_client: JellyfinClient = api_client + self.api_client = api_client self.server_id: str = system_info["Id"] self.server_name: str = system_info["Name"] self.server_version: str | None = system_info.get("Version") - self.client_device_id: str = client_device_id + self.client_device_id: str = self.config_entry.data[CONF_CLIENT_DEVICE_ID] self.user_id: str = user_id self.session_ids: set[str] = set() + self.remote_session_ids: set[str] = set() self.device_ids: set[str] = set() - async def _async_update_data(self) -> JellyfinDataT: + async def _async_update_data(self) -> dict[str, dict[str, Any]]: """Get the latest data from Jellyfin.""" - return await self._fetch_data() - - @abstractmethod - async def _fetch_data(self) -> JellyfinDataT: - """Fetch the actual data.""" - - -class SessionsDataUpdateCoordinator( - JellyfinDataUpdateCoordinator[dict[str, dict[str, Any]]] -): - """Sessions update coordinator for Jellyfin.""" - - async def _fetch_data(self) -> dict[str, dict[str, Any]]: - """Fetch the data.""" sessions = await self.hass.async_add_executor_job( self.api_client.jellyfin.sessions ) diff --git a/homeassistant/components/jellyfin/diagnostics.py b/homeassistant/components/jellyfin/diagnostics.py index 80bbd78c9ad..8042d588d1b 100644 --- a/homeassistant/components/jellyfin/diagnostics.py +++ b/homeassistant/components/jellyfin/diagnostics.py @@ -17,8 +17,7 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: JellyfinConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data = entry.runtime_data - sessions = data.coordinators["sessions"] + coordinator = entry.runtime_data return { "entry": { @@ -26,9 +25,9 @@ async def async_get_config_entry_diagnostics( "data": async_redact_data(entry.data, TO_REDACT), }, "server": { - "id": sessions.server_id, - "name": sessions.server_name, - "version": sessions.server_version, + "id": coordinator.server_id, + "name": coordinator.server_name, + "version": coordinator.server_version, }, "sessions": [ { @@ -42,6 +41,6 @@ async def async_get_config_entry_diagnostics( "now_playing": session_data.get("NowPlayingItem"), "play_state": session_data.get("PlayState"), } - for session_id, session_data in sessions.data.items() + for session_id, session_data in coordinator.data.items() ], } diff --git a/homeassistant/components/jellyfin/entity.py b/homeassistant/components/jellyfin/entity.py index 2204a36dc61..4a3b2b77bb1 100644 --- a/homeassistant/components/jellyfin/entity.py +++ b/homeassistant/components/jellyfin/entity.py @@ -2,33 +2,74 @@ from __future__ import annotations +from typing import Any + from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_NAME, DOMAIN -from .coordinator import JellyfinDataT, JellyfinDataUpdateCoordinator +from .coordinator import JellyfinDataUpdateCoordinator -class JellyfinEntity(CoordinatorEntity[JellyfinDataUpdateCoordinator[JellyfinDataT]]): +class JellyfinEntity(CoordinatorEntity[JellyfinDataUpdateCoordinator]): """Defines a base Jellyfin entity.""" _attr_has_entity_name = True + +class JellyfinServerEntity(JellyfinEntity): + """Defines a base Jellyfin server entity.""" + + def __init__(self, coordinator: JellyfinDataUpdateCoordinator) -> None: + """Initialize the Jellyfin entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, coordinator.server_id)}, + manufacturer=DEFAULT_NAME, + name=coordinator.server_name, + sw_version=coordinator.server_version, + ) + + +class JellyfinClientEntity(JellyfinEntity): + """Defines a base Jellyfin client entity.""" + def __init__( self, - coordinator: JellyfinDataUpdateCoordinator[JellyfinDataT], - description: EntityDescription, + coordinator: JellyfinDataUpdateCoordinator, + session_id: str, ) -> None: """Initialize the Jellyfin entity.""" super().__init__(coordinator) - self.coordinator = coordinator - self.entity_description = description - self._attr_unique_id = f"{coordinator.server_id}-{description.key}" - self._attr_device_info = DeviceInfo( - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, self.coordinator.server_id)}, - manufacturer=DEFAULT_NAME, - name=self.coordinator.server_name, - sw_version=self.coordinator.server_version, - ) + self.session_id = session_id + self.device_id: str = self.session_data["DeviceId"] + self.device_name: str = self.session_data["DeviceName"] + self.client_name: str = self.session_data["Client"] + self.app_version: str = self.session_data["ApplicationVersion"] + self.capabilities: dict[str, Any] = self.session_data["Capabilities"] + + if self.capabilities.get("SupportsPersistentIdentifier", False): + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device_id)}, + manufacturer="Jellyfin", + model=self.client_name, + name=self.device_name, + sw_version=self.app_version, + via_device=(DOMAIN, coordinator.server_id), + ) + self._attr_name = None + else: + self._attr_device_info = None + self._attr_has_entity_name = False + self._attr_name = self.device_name + + @property + def session_data(self) -> dict[str, Any]: + """Return the session data.""" + return self.coordinator.data[self.session_id] + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.session_id in self.coordinator.data diff --git a/homeassistant/components/jellyfin/media_player.py b/homeassistant/components/jellyfin/media_player.py index d24d15f1dfa..bf6e95c0c96 100644 --- a/homeassistant/components/jellyfin/media_player.py +++ b/homeassistant/components/jellyfin/media_player.py @@ -5,24 +5,22 @@ from __future__ import annotations from typing import Any from homeassistant.components.media_player import ( + BrowseMedia, MediaPlayerEntity, - MediaPlayerEntityDescription, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.components.media_player.browse_media import BrowseMedia from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import parse_datetime from . import JellyfinConfigEntry from .browse_media import build_item_response, build_root_response from .client_wrapper import get_artwork_url -from .const import CONTENT_TYPE_MAP, DOMAIN, LOGGER +from .const import CONTENT_TYPE_MAP, LOGGER from .coordinator import JellyfinDataUpdateCoordinator -from .entity import JellyfinEntity +from .entity import JellyfinClientEntity async def async_setup_entry( @@ -31,18 +29,15 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Jellyfin media_player from a config entry.""" - jellyfin_data = entry.runtime_data - coordinator = jellyfin_data.coordinators["sessions"] + coordinator = entry.runtime_data @callback def handle_coordinator_update() -> None: """Add media player per session.""" entities: list[MediaPlayerEntity] = [] - for session_id, session_data in coordinator.data.items(): + for session_id in coordinator.data: if session_id not in coordinator.session_ids: - entity: MediaPlayerEntity = JellyfinMediaPlayer( - coordinator, session_id, session_data - ) + entity: MediaPlayerEntity = JellyfinMediaPlayer(coordinator, session_id) LOGGER.debug("Creating media player for session: %s", session_id) coordinator.session_ids.add(session_id) entities.append(entity) @@ -53,60 +48,28 @@ async def async_setup_entry( entry.async_on_unload(coordinator.async_add_listener(handle_coordinator_update)) -class JellyfinMediaPlayer(JellyfinEntity, MediaPlayerEntity): +class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity): """Represents a Jellyfin Player device.""" def __init__( self, coordinator: JellyfinDataUpdateCoordinator, session_id: str, - session_data: dict[str, Any], ) -> None: """Initialize the Jellyfin Media Player entity.""" - super().__init__( - coordinator, - MediaPlayerEntityDescription( - key=session_id, - ), + super().__init__(coordinator, session_id) + self._attr_unique_id = f"{coordinator.server_id}-{session_id}" + + self.now_playing: dict[str, Any] | None = self.session_data.get( + "NowPlayingItem" ) - - self.session_id = session_id - self.session_data: dict[str, Any] | None = session_data - self.device_id: str = session_data["DeviceId"] - self.device_name: str = session_data["DeviceName"] - self.client_name: str = session_data["Client"] - self.app_version: str = session_data["ApplicationVersion"] - - self.capabilities: dict[str, Any] = session_data["Capabilities"] - self.now_playing: dict[str, Any] | None = session_data.get("NowPlayingItem") - self.play_state: dict[str, Any] | None = session_data.get("PlayState") - - if self.capabilities.get("SupportsPersistentIdentifier", False): - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self.device_id)}, - manufacturer="Jellyfin", - model=self.client_name, - name=self.device_name, - sw_version=self.app_version, - via_device=(DOMAIN, coordinator.server_id), - ) - self._attr_name = None - else: - self._attr_device_info = None - self._attr_has_entity_name = False - self._attr_name = self.device_name + self.play_state: dict[str, Any] | None = self.session_data.get("PlayState") self._update_from_session_data() @callback def _handle_coordinator_update(self) -> None: - self.session_data = ( - self.coordinator.data.get(self.session_id) - if self.coordinator.data is not None - else None - ) - - if self.session_data is not None: + if self.available: self.now_playing = self.session_data.get("NowPlayingItem") self.play_state = self.session_data.get("PlayState") else: @@ -136,7 +99,7 @@ class JellyfinMediaPlayer(JellyfinEntity, MediaPlayerEntity): volume_muted = False volume_level = None - if self.session_data is not None: + if self.available: state = MediaPlayerState.IDLE media_position_updated = ( parse_datetime(self.session_data["LastPlaybackCheckIn"]) @@ -234,11 +197,6 @@ class JellyfinMediaPlayer(JellyfinEntity, MediaPlayerEntity): return features - @property - def available(self) -> bool: - """Return if entity is available.""" - return self.coordinator.last_update_success and self.session_data is not None - def media_seek(self, position: float) -> None: """Send seek command.""" self.coordinator.api_client.jellyfin.remote_seek( diff --git a/homeassistant/components/jellyfin/media_source.py b/homeassistant/components/jellyfin/media_source.py index 4b3e8b0146a..a061118dd0a 100644 --- a/homeassistant/components/jellyfin/media_source.py +++ b/homeassistant/components/jellyfin/media_source.py @@ -11,7 +11,7 @@ from jellyfin_apiclient_python.api import jellyfin_url from jellyfin_apiclient_python.client import JellyfinClient from homeassistant.components.media_player import BrowseError, MediaClass -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, @@ -56,9 +56,9 @@ async def async_get_media_source(hass: HomeAssistant) -> MediaSource: """Set up Jellyfin media source.""" # Currently only a single Jellyfin server is supported entry: JellyfinConfigEntry = hass.config_entries.async_entries(DOMAIN)[0] - jellyfin_data = entry.runtime_data + coordinator = entry.runtime_data - return JellyfinSource(hass, jellyfin_data.jellyfin_client, entry) + return JellyfinSource(hass, coordinator.api_client, entry) class JellyfinSource(MediaSource): diff --git a/homeassistant/components/jellyfin/models.py b/homeassistant/components/jellyfin/models.py deleted file mode 100644 index bfa639a7567..00000000000 --- a/homeassistant/components/jellyfin/models.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Models for the Jellyfin integration.""" - -from __future__ import annotations - -from dataclasses import dataclass - -from jellyfin_apiclient_python import JellyfinClient - -from .coordinator import JellyfinDataUpdateCoordinator - - -@dataclass -class JellyfinData: - """Data for the Jellyfin integration.""" - - client_device_id: str - jellyfin_client: JellyfinClient - coordinators: dict[str, JellyfinDataUpdateCoordinator] diff --git a/homeassistant/components/jellyfin/remote.py b/homeassistant/components/jellyfin/remote.py new file mode 100644 index 00000000000..ae33d58cc0c --- /dev/null +++ b/homeassistant/components/jellyfin/remote.py @@ -0,0 +1,80 @@ +"""Support for Jellyfin remote commands.""" + +from __future__ import annotations + +from collections.abc import Iterable +import time +from typing import Any + +from homeassistant.components.remote import ( + ATTR_DELAY_SECS, + ATTR_NUM_REPEATS, + DEFAULT_DELAY_SECS, + DEFAULT_NUM_REPEATS, + RemoteEntity, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import JellyfinConfigEntry +from .const import LOGGER +from .coordinator import JellyfinDataUpdateCoordinator +from .entity import JellyfinClientEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: JellyfinConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Jellyfin remote from a config entry.""" + coordinator = entry.runtime_data + + @callback + def handle_coordinator_update() -> None: + """Add remote per session.""" + entities: list[RemoteEntity] = [] + for session_id, session_data in coordinator.data.items(): + if ( + session_id not in coordinator.remote_session_ids + and session_data["SupportsRemoteControl"] + ): + entity = JellyfinRemote(coordinator, session_id) + LOGGER.debug("Creating remote for session: %s", session_id) + coordinator.remote_session_ids.add(session_id) + entities.append(entity) + async_add_entities(entities) + + handle_coordinator_update() + + entry.async_on_unload(coordinator.async_add_listener(handle_coordinator_update)) + + +class JellyfinRemote(JellyfinClientEntity, RemoteEntity): + """Defines a Jellyfin remote entity.""" + + def __init__( + self, + coordinator: JellyfinDataUpdateCoordinator, + session_id: str, + ) -> None: + """Initialize the Jellyfin Remote entity.""" + super().__init__(coordinator, session_id) + self._attr_unique_id = f"{coordinator.server_id}-{session_id}" + + @property + def is_on(self) -> bool: + """Return if the client is on.""" + return self.session_data["IsActive"] if self.session_data else False + + def send_command(self, command: Iterable[str], **kwargs: Any) -> None: + """Send a command to the client.""" + num_repeats = kwargs.get(ATTR_NUM_REPEATS, DEFAULT_NUM_REPEATS) + delay = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS) + + for _ in range(num_repeats): + for single_command in command: + self.coordinator.api_client.jellyfin.command( + self.session_id, single_command + ) + time.sleep(delay) diff --git a/homeassistant/components/jellyfin/sensor.py b/homeassistant/components/jellyfin/sensor.py index 3be4ccf2559..5c519f661ee 100644 --- a/homeassistant/components/jellyfin/sensor.py +++ b/homeassistant/components/jellyfin/sensor.py @@ -4,25 +4,25 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from typing import Any from homeassistant.components.sensor import SensorEntity, SensorEntityDescription from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import JellyfinConfigEntry -from .coordinator import JellyfinDataT -from .entity import JellyfinEntity +from . import JellyfinConfigEntry, JellyfinDataUpdateCoordinator +from .entity import JellyfinServerEntity @dataclass(frozen=True, kw_only=True) class JellyfinSensorEntityDescription(SensorEntityDescription): """Describes Jellyfin sensor entity.""" - value_fn: Callable[[JellyfinDataT], StateType] + value_fn: Callable[[dict[str, dict[str, Any]]], StateType] -def _count_now_playing(data: JellyfinDataT) -> int: +def _count_now_playing(data: dict[str, dict[str, Any]]) -> int: """Count the number of now playing.""" session_ids = [ sid for (sid, session) in data.items() if "NowPlayingItem" in session @@ -31,15 +31,13 @@ def _count_now_playing(data: JellyfinDataT) -> int: return len(session_ids) -SENSOR_TYPES: dict[str, JellyfinSensorEntityDescription] = { - "sessions": JellyfinSensorEntityDescription( +SENSOR_TYPES: tuple[JellyfinSensorEntityDescription, ...] = ( + JellyfinSensorEntityDescription( key="watching", translation_key="watching", - name=None, - native_unit_of_measurement="Watching", value_fn=_count_now_playing, - ) -} + ), +) async def async_setup_entry( @@ -48,19 +46,28 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Jellyfin sensor based on a config entry.""" - data = entry.runtime_data + coordinator = entry.runtime_data async_add_entities( - JellyfinSensor(data.coordinators[coordinator_type], description) - for coordinator_type, description in SENSOR_TYPES.items() + JellyfinServerSensor(coordinator, description) for description in SENSOR_TYPES ) -class JellyfinSensor(JellyfinEntity, SensorEntity): +class JellyfinServerSensor(JellyfinServerEntity, SensorEntity): """Defines a Jellyfin sensor entity.""" entity_description: JellyfinSensorEntityDescription + def __init__( + self, + coordinator: JellyfinDataUpdateCoordinator, + description: JellyfinSensorEntityDescription, + ) -> None: + """Initialize Jellyfin sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.server_id}-{description.key}" + @property def native_value(self) -> StateType: """Return the state of the sensor.""" diff --git a/homeassistant/components/jellyfin/strings.json b/homeassistant/components/jellyfin/strings.json index fd11d8fbad2..a9816b1fb78 100644 --- a/homeassistant/components/jellyfin/strings.json +++ b/homeassistant/components/jellyfin/strings.json @@ -26,6 +26,14 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "sensor": { + "watching": { + "name": "Active clients", + "unit_of_measurement": "clients" + } + } + }, "options": { "step": { "init": { diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index fd238e8d615..823e9bd59be 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -5,26 +5,17 @@ from __future__ import annotations from functools import partial from hdate import Location -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_ELEVATION, CONF_LANGUAGE, CONF_LATITUDE, - CONF_LOCATION, CONF_LONGITUDE, - CONF_NAME, CONF_TIME_ZONE, Platform, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback -import homeassistant.helpers.config_validation as cv -import homeassistant.helpers.entity_registry as er -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType +from homeassistant.core import HomeAssistant -from .binary_sensor import BINARY_SENSORS from .const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, @@ -33,94 +24,15 @@ from .const import ( DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, - DEFAULT_NAME, - DOMAIN, ) -from .sensor import INFO_SENSORS, TIME_SENSORS +from .entity import JewishCalendarConfigEntry, JewishCalendarData PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.All( - cv.deprecated(DOMAIN), - { - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_DIASPORA, default=DEFAULT_DIASPORA): cv.boolean, - vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, - vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, - vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In( - ["hebrew", "english"] - ), - vol.Optional( - CONF_CANDLE_LIGHT_MINUTES, default=DEFAULT_CANDLE_LIGHT - ): int, - # Default of 0 means use 8.5 degrees / 'three_stars' time. - vol.Optional( - CONF_HAVDALAH_OFFSET_MINUTES, - default=DEFAULT_HAVDALAH_OFFSET_MINUTES, - ): int, - }, - ) - }, - extra=vol.ALLOW_EXTRA, -) - -def get_unique_prefix( - location: Location, - language: str, - candle_lighting_offset: int | None, - havdalah_offset: int | None, -) -> str: - """Create a prefix for unique ids.""" - # location.altitude was unset before 2024.6 when this method - # was used to create the unique id. As such it would always - # use the default altitude of 754. - config_properties = [ - location.latitude, - location.longitude, - location.timezone, - 754, - location.diaspora, - language, - candle_lighting_offset, - havdalah_offset, - ] - prefix = "_".join(map(str, config_properties)) - return f"{prefix}" - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Jewish Calendar component.""" - if DOMAIN not in config: - return True - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - issue_domain=DOMAIN, - breaks_in_ha_version="2024.12.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": DEFAULT_NAME, - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] - ) - ) - - return True - - -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: """Set up a configuration entry for Jewish calendar.""" language = config_entry.data.get(CONF_LANGUAGE, DEFAULT_LANGUAGE) diaspora = config_entry.data.get(CONF_DIASPORA, DEFAULT_DIASPORA) @@ -143,27 +55,19 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) ) - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = { - CONF_LANGUAGE: language, - CONF_DIASPORA: diaspora, - CONF_LOCATION: location, - CONF_CANDLE_LIGHT_MINUTES: candle_lighting_offset, - CONF_HAVDALAH_OFFSET_MINUTES: havdalah_offset, - } - - # Update unique ID to be unrelated to user defined options - old_prefix = get_unique_prefix( - location, language, candle_lighting_offset, havdalah_offset + config_entry.runtime_data = JewishCalendarData( + language, + diaspora, + location, + candle_lighting_offset, + havdalah_offset, ) - ent_reg = er.async_get(hass) - entries = er.async_entries_for_config_entry(ent_reg, config_entry.entry_id) - if not entries or any(entry.unique_id.startswith(old_prefix) for entry in entries): - async_update_unique_ids(ent_reg, config_entry.entry_id, old_prefix) - await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + async def update_listener( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry + ) -> None: # Trigger update of states for all platforms await hass.config_entries.async_reload(config_entry.entry_id) @@ -171,35 +75,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok - - -@callback -def async_update_unique_ids( - ent_reg: er.EntityRegistry, new_prefix: str, old_prefix: str -) -> None: - """Update unique ID to be unrelated to user defined options. - - Introduced with release 2024.6 - """ - platform_descriptions = { - Platform.BINARY_SENSOR: BINARY_SENSORS, - Platform.SENSOR: (*INFO_SENSORS, *TIME_SENSORS), - } - for platform, descriptions in platform_descriptions.items(): - for description in descriptions: - new_unique_id = f"{new_prefix}-{description.key}" - old_unique_id = f"{old_prefix}_{description.key}" - if entity_id := ent_reg.async_get_entity_id( - platform, DOMAIN, old_unique_id - ): - ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) diff --git a/homeassistant/components/jewish_calendar/binary_sensor.py b/homeassistant/components/jewish_calendar/binary_sensor.py index 060650ee25c..9fd1371f8a8 100644 --- a/homeassistant/components/jewish_calendar/binary_sensor.py +++ b/homeassistant/components/jewish_calendar/binary_sensor.py @@ -14,15 +14,13 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers import event from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .entity import JewishCalendarEntity +from .entity import JewishCalendarConfigEntry, JewishCalendarEntity @dataclass(frozen=True) @@ -63,14 +61,12 @@ BINARY_SENSORS: tuple[JewishCalendarBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: JewishCalendarConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish Calendar binary sensors.""" - entry = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - JewishCalendarBinarySensor(config_entry, entry, description) + JewishCalendarBinarySensor(config_entry, description) for description in BINARY_SENSORS ) diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index 8f04d73915f..a2eadbf57bd 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_ELEVATION, @@ -30,7 +30,6 @@ from homeassistant.helpers.selector import ( SelectSelector, SelectSelectorConfig, ) -from homeassistant.helpers.typing import ConfigType from .const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -91,32 +90,21 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowWithConfigEntry: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> JewishCalendarOptionsFlowHandler: """Get the options flow for this handler.""" - return JewishCalendarOptionsFlowHandler(config_entry) + return JewishCalendarOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: - _options = {} - if CONF_CANDLE_LIGHT_MINUTES in user_input: - _options[CONF_CANDLE_LIGHT_MINUTES] = user_input[ - CONF_CANDLE_LIGHT_MINUTES - ] - del user_input[CONF_CANDLE_LIGHT_MINUTES] - if CONF_HAVDALAH_OFFSET_MINUTES in user_input: - _options[CONF_HAVDALAH_OFFSET_MINUTES] = user_input[ - CONF_HAVDALAH_OFFSET_MINUTES - ] - del user_input[CONF_HAVDALAH_OFFSET_MINUTES] if CONF_LOCATION in user_input: user_input[CONF_LATITUDE] = user_input[CONF_LOCATION][CONF_LATITUDE] user_input[CONF_LONGITUDE] = user_input[CONF_LOCATION][CONF_LONGITUDE] - return self.async_create_entry( - title=DEFAULT_NAME, data=user_input, options=_options - ) + return self.async_create_entry(title=DEFAULT_NAME, data=user_input) return self.async_show_form( step_id="user", @@ -125,14 +113,24 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import( - self, import_config: ConfigType | None + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + """Handle a reconfiguration flow initialized by the user.""" + reconfigure_entry = self._get_reconfigure_entry() + if not user_input: + return self.async_show_form( + data_schema=self.add_suggested_values_to_schema( + _get_data_schema(self.hass), + reconfigure_entry.data, + ), + step_id="reconfigure", + ) + + return self.async_update_reload_and_abort(reconfigure_entry, data=user_input) -class JewishCalendarOptionsFlowHandler(OptionsFlowWithConfigEntry): +class JewishCalendarOptionsFlowHandler(OptionsFlow): """Handle Jewish Calendar options.""" async def async_step_init( diff --git a/homeassistant/components/jewish_calendar/entity.py b/homeassistant/components/jewish_calendar/entity.py index c11925df954..1d2a6e45c0a 100644 --- a/homeassistant/components/jewish_calendar/entity.py +++ b/homeassistant/components/jewish_calendar/entity.py @@ -1,18 +1,27 @@ """Entity representing a Jewish Calendar sensor.""" -from typing import Any +from dataclasses import dataclass + +from hdate import Location from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_LANGUAGE, CONF_LOCATION from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription -from .const import ( - CONF_CANDLE_LIGHT_MINUTES, - CONF_DIASPORA, - CONF_HAVDALAH_OFFSET_MINUTES, - DOMAIN, -) +from .const import DOMAIN + +type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData] + + +@dataclass +class JewishCalendarData: + """Jewish Calendar runtime dataclass.""" + + language: str + diaspora: bool + location: Location + candle_lighting_offset: int + havdalah_offset: int class JewishCalendarEntity(Entity): @@ -22,8 +31,7 @@ class JewishCalendarEntity(Entity): def __init__( self, - config_entry: ConfigEntry, - data: dict[str, Any], + config_entry: JewishCalendarConfigEntry, description: EntityDescription, ) -> None: """Initialize a Jewish Calendar entity.""" @@ -32,10 +40,11 @@ class JewishCalendarEntity(Entity): self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, config_entry.entry_id)}, - name=config_entry.title, ) - self._location = data[CONF_LOCATION] - self._hebrew = data[CONF_LANGUAGE] == "hebrew" - self._candle_lighting_offset = data[CONF_CANDLE_LIGHT_MINUTES] - self._havdalah_offset = data[CONF_HAVDALAH_OFFSET_MINUTES] - self._diaspora = data[CONF_DIASPORA] + data = config_entry.runtime_data + self._location = data.location + self._hebrew = data.language == "hebrew" + self._language = data.language + self._candle_lighting_offset = data.candle_lighting_offset + self._havdalah_offset = data.havdalah_offset + self._diaspora = data.diaspora diff --git a/homeassistant/components/jewish_calendar/manifest.json b/homeassistant/components/jewish_calendar/manifest.json index 2642f6c81e9..aca45320002 100644 --- a/homeassistant/components/jewish_calendar/manifest.json +++ b/homeassistant/components/jewish_calendar/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/jewish_calendar", "iot_class": "calculated", "loggers": ["hdate"], - "quality_scale": "silver", - "requirements": ["hdate==0.10.9"], + "requirements": ["hdate==0.11.1"], "single_config_entry": true } diff --git a/homeassistant/components/jewish_calendar/sensor.py b/homeassistant/components/jewish_calendar/sensor.py index 87b4375b8b2..d3e70eb411c 100644 --- a/homeassistant/components/jewish_calendar/sensor.py +++ b/homeassistant/components/jewish_calendar/sensor.py @@ -14,15 +14,13 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import SUN_EVENT_SUNSET, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sun import get_astral_event_date import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .entity import JewishCalendarEntity +from .entity import JewishCalendarConfigEntry, JewishCalendarEntity _LOGGER = logging.getLogger(__name__) @@ -169,17 +167,15 @@ TIME_SENSORS: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: JewishCalendarConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish calendar sensors .""" - entry = hass.data[DOMAIN][config_entry.entry_id] sensors = [ - JewishCalendarSensor(config_entry, entry, description) - for description in INFO_SENSORS + JewishCalendarSensor(config_entry, description) for description in INFO_SENSORS ] sensors.extend( - JewishCalendarTimeSensor(config_entry, entry, description) + JewishCalendarTimeSensor(config_entry, description) for description in TIME_SENSORS ) @@ -193,12 +189,11 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): def __init__( self, - config_entry: ConfigEntry, - data: dict[str, Any], + config_entry: JewishCalendarConfigEntry, description: SensorEntityDescription, ) -> None: """Initialize the Jewish calendar sensor.""" - super().__init__(config_entry, data, description) + super().__init__(config_entry, description) self._attrs: dict[str, str] = {} async def async_update(self) -> None: @@ -280,15 +275,18 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): # Compute the weekly portion based on the upcoming shabbat. return after_tzais_date.upcoming_shabbat.parasha if self.entity_description.key == "holiday": - self._attrs = { - "id": after_shkia_date.holiday_name, - "type": after_shkia_date.holiday_type.name, - "type_id": after_shkia_date.holiday_type.value, - } - self._attr_options = [ - h.description.hebrew.long if self._hebrew else h.description.english - for h in htables.HOLIDAYS - ] + _id = _type = _type_id = "" + _holiday_type = after_shkia_date.holiday_type + if isinstance(_holiday_type, list): + _id = ", ".join(after_shkia_date.holiday_name) + _type = ", ".join([_htype.name for _htype in _holiday_type]) + _type_id = ", ".join([str(_htype.value) for _htype in _holiday_type]) + else: + _id = after_shkia_date.holiday_name + _type = _holiday_type.name + _type_id = _holiday_type.value + self._attrs = {"id": _id, "type": _type, "type_id": _type_id} + self._attr_options = htables.get_all_holidays(self._language) return after_shkia_date.holiday_description if self.entity_description.key == "omer_count": diff --git a/homeassistant/components/jewish_calendar/strings.json b/homeassistant/components/jewish_calendar/strings.json index e5367b5819e..1b7b86c0056 100644 --- a/homeassistant/components/jewish_calendar/strings.json +++ b/homeassistant/components/jewish_calendar/strings.json @@ -27,7 +27,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "options": { diff --git a/homeassistant/components/joaoapps_join/manifest.json b/homeassistant/components/joaoapps_join/manifest.json index 36d54ec6d55..55a908bf090 100644 --- a/homeassistant/components/joaoapps_join/manifest.json +++ b/homeassistant/components/joaoapps_join/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/joaoapps_join", "iot_class": "cloud_push", "loggers": ["pyjoin"], + "quality_scale": "legacy", "requirements": ["python-join-api==0.0.9"] } diff --git a/homeassistant/components/juicenet/__init__.py b/homeassistant/components/juicenet/__init__.py index 5c32caab36f..fcfca7f2492 100644 --- a/homeassistant/components/juicenet/__init__.py +++ b/homeassistant/components/juicenet/__init__.py @@ -72,7 +72,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if not juicenet.devices: _LOGGER.error("No JuiceNet devices found for this account") return False - _LOGGER.info("%d JuiceNet device(s) found", len(juicenet.devices)) + _LOGGER.debug("%d JuiceNet device(s) found", len(juicenet.devices)) async def async_update_data(): """Update all device states from the JuiceNet API.""" @@ -83,6 +83,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="JuiceNet", update_method=async_update_data, update_interval=timedelta(seconds=30), diff --git a/homeassistant/components/juicenet/config_flow.py b/homeassistant/components/juicenet/config_flow.py index 393e6842274..8bcee5677e6 100644 --- a/homeassistant/components/juicenet/config_flow.py +++ b/homeassistant/components/juicenet/config_flow.py @@ -69,9 +69,9 @@ class JuiceNetConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) class CannotConnect(exceptions.HomeAssistantError): diff --git a/homeassistant/components/justnimbus/config_flow.py b/homeassistant/components/justnimbus/config_flow.py index 0520c558266..7b0d3f8e5db 100644 --- a/homeassistant/components/justnimbus/config_flow.py +++ b/homeassistant/components/justnimbus/config_flow.py @@ -9,7 +9,7 @@ from typing import Any import justnimbus import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID from homeassistant.helpers import config_validation as cv @@ -29,7 +29,6 @@ class JustNimbusConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for JustNimbus.""" VERSION = 1 - reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -44,7 +43,7 @@ class JustNimbusConfigFlow(ConfigFlow, domain=DOMAIN): unique_id = f"{user_input[CONF_CLIENT_ID]}{user_input[CONF_ZIP_CODE]}" await self.async_set_unique_id(unique_id=unique_id) - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() client = justnimbus.JustNimbusClient( @@ -60,27 +59,18 @@ class JustNimbusConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: return self.async_create_entry(title="JustNimbus", data=user_input) - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=user_input, unique_id=unique_id + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input, unique_id=unique_id ) - # Reload the config entry otherwise devices will remain unavailable - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") - return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/jvc_projector/config_flow.py b/homeassistant/components/jvc_projector/config_flow.py index 7fbfb17a976..5d9bedd7591 100644 --- a/homeassistant/components/jvc_projector/config_flow.py +++ b/homeassistant/components/jvc_projector/config_flow.py @@ -9,7 +9,7 @@ from jvcprojector import JvcProjector, JvcProjectorAuthError, JvcProjectorConnec from jvcprojector.projector import DEFAULT_PORT import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.helpers.device_registry import format_mac from homeassistant.util.network import is_host_valid @@ -22,8 +22,6 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -74,25 +72,21 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth on password authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self._reauth_entry - errors = {} if user_input is not None: - host = self._reauth_entry.data[CONF_HOST] - port = self._reauth_entry.data[CONF_PORT] + reauth_entry = self._get_reauth_entry() + host = reauth_entry.data[CONF_HOST] + port = reauth_entry.data[CONF_PORT] password = user_input[CONF_PASSWORD] try: @@ -102,12 +96,9 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): except JvcProjectorAuthError: errors["base"] = "invalid_auth" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={CONF_HOST: host, CONF_PORT: port, CONF_PASSWORD: password}, + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/jvc_projector/coordinator.py b/homeassistant/components/jvc_projector/coordinator.py index 874253b3324..a2ecfa8eb52 100644 --- a/homeassistant/components/jvc_projector/coordinator.py +++ b/homeassistant/components/jvc_projector/coordinator.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta import logging +from typing import Any from jvcprojector import ( JvcProjector, @@ -40,7 +41,7 @@ class JvcProjectorDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str]]): self.device = device self.unique_id = format_mac(device.mac) - async def _async_update_data(self) -> dict[str, str]: + async def _async_update_data(self) -> dict[str, Any]: """Get the latest state data.""" try: state = await self.device.get_state() diff --git a/homeassistant/components/jvc_projector/manifest.json b/homeassistant/components/jvc_projector/manifest.json index 5d83e937494..b8c670277c8 100644 --- a/homeassistant/components/jvc_projector/manifest.json +++ b/homeassistant/components/jvc_projector/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["jvcprojector"], - "requirements": ["pyjvcprojector==1.0.12"] + "requirements": ["pyjvcprojector==1.1.2"] } diff --git a/homeassistant/components/jvc_projector/strings.json b/homeassistant/components/jvc_projector/strings.json index b89139cbab3..b517bf064e1 100644 --- a/homeassistant/components/jvc_projector/strings.json +++ b/homeassistant/components/jvc_projector/strings.json @@ -24,6 +24,7 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { diff --git a/homeassistant/components/kaiterra/manifest.json b/homeassistant/components/kaiterra/manifest.json index 12ac1559fd7..88651565cd0 100644 --- a/homeassistant/components/kaiterra/manifest.json +++ b/homeassistant/components/kaiterra/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kaiterra", "iot_class": "cloud_polling", "loggers": ["kaiterra_async_client"], + "quality_scale": "legacy", "requirements": ["kaiterra-async-client==1.0.0"] } diff --git a/homeassistant/components/kankun/manifest.json b/homeassistant/components/kankun/manifest.json index c15a87eacaa..473209508ac 100644 --- a/homeassistant/components/kankun/manifest.json +++ b/homeassistant/components/kankun/manifest.json @@ -3,5 +3,6 @@ "name": "Kankun", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/kankun", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/kankun/switch.py b/homeassistant/components/kankun/switch.py index a86bed5eb9a..cd91b7660c8 100644 --- a/homeassistant/components/kankun/switch.py +++ b/homeassistant/components/kankun/switch.py @@ -89,7 +89,7 @@ class KankunSwitch(SwitchEntity): def _switch(self, newstate): """Switch on or off.""" - _LOGGER.info("Switching to state: %s", newstate) + _LOGGER.debug("Switching to state: %s", newstate) try: req = requests.get( @@ -101,7 +101,7 @@ class KankunSwitch(SwitchEntity): def _query_state(self): """Query switch state.""" - _LOGGER.info("Querying state from: %s", self._url) + _LOGGER.debug("Querying state from: %s", self._url) try: req = requests.get(f"{self._url}?get=state", auth=self._auth, timeout=5) diff --git a/homeassistant/components/keba/icons.json b/homeassistant/components/keba/icons.json index 7f64bf7fb34..6de43a84cf6 100644 --- a/homeassistant/components/keba/icons.json +++ b/homeassistant/components/keba/icons.json @@ -1,12 +1,28 @@ { "services": { - "request_data": "mdi:database-arrow-down", - "authorize": "mdi:lock", - "deauthorize": "mdi:lock-open", - "set_energy": "mdi:flash", - "set_current": "mdi:flash", - "enable": "mdi:flash", - "disable": "mdi:fash-off", - "set_failsafe": "mdi:message-alert" + "request_data": { + "service": "mdi:database-arrow-down" + }, + "authorize": { + "service": "mdi:lock" + }, + "deauthorize": { + "service": "mdi:lock-open" + }, + "set_energy": { + "service": "mdi:flash" + }, + "set_current": { + "service": "mdi:flash" + }, + "enable": { + "service": "mdi:flash" + }, + "disable": { + "service": "mdi:fash-off" + }, + "set_failsafe": { + "service": "mdi:message-alert" + } } } diff --git a/homeassistant/components/keba/manifest.json b/homeassistant/components/keba/manifest.json index 42f2762ef3d..d86ce053187 100644 --- a/homeassistant/components/keba/manifest.json +++ b/homeassistant/components/keba/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/keba", "iot_class": "local_polling", "loggers": ["keba_kecontact"], + "quality_scale": "legacy", "requirements": ["keba-kecontact==1.1.0"] } diff --git a/homeassistant/components/keenetic_ndms2/config_flow.py b/homeassistant/components/keenetic_ndms2/config_flow.py index 9e3c6728338..d11fedac385 100644 --- a/homeassistant/components/keenetic_ndms2/config_flow.py +++ b/homeassistant/components/keenetic_ndms2/config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from urllib.parse import urlparse from ndms2_client import Client, ConnectionException, InterfaceInfo, TelnetConnection @@ -47,13 +47,15 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str | bytes | None = None + @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> KeeneticOptionsFlowHandler: """Get the options flow for this handler.""" - return KeeneticOptionsFlowHandler(config_entry) + return KeeneticOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -61,7 +63,7 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors = {} if user_input is not None: - host = self.context.get(CONF_HOST) or user_input[CONF_HOST] + host = self.host or user_input[CONF_HOST] self._async_abort_entries_match({CONF_HOST: host}) _client = Client( @@ -86,7 +88,7 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): ) host_schema: VolDictType = ( - {vol.Required(CONF_HOST): str} if CONF_HOST not in self.context else {} + {vol.Required(CONF_HOST): str} if not self.host else {} ) return self.async_show_form( @@ -116,13 +118,15 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): if not discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN): return self.async_abort(reason="no_udn") - host = urlparse(discovery_info.ssdp_location).hostname + # We can cast the hostname to str because the ssdp_location is not bytes and + # not a relative url + host = cast(str, urlparse(discovery_info.ssdp_location).hostname) await self.async_set_unique_id(discovery_info.upnp[ssdp.ATTR_UPNP_UDN]) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) self._async_abort_entries_match({CONF_HOST: host}) - self.context[CONF_HOST] = host + self.host = host self.context["title_placeholders"] = { "name": friendly_name, "host": host, @@ -134,9 +138,8 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): class KeeneticOptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._interface_options: dict[str, str] = {} async def async_step_init( diff --git a/homeassistant/components/keenetic_ndms2/device_tracker.py b/homeassistant/components/keenetic_ndms2/device_tracker.py index 34c5cb502c6..efd2a88b1f8 100644 --- a/homeassistant/components/keenetic_ndms2/device_tracker.py +++ b/homeassistant/components/keenetic_ndms2/device_tracker.py @@ -9,7 +9,6 @@ from ndms2_client import Device from homeassistant.components.device_tracker import ( DOMAIN as DEVICE_TRACKER_DOMAIN, ScannerEntity, - SourceType, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback @@ -103,11 +102,6 @@ class KeeneticTracker(ScannerEntity): < self._router.consider_home_interval ) - @property - def source_type(self) -> SourceType: - """Return the source type of the client.""" - return SourceType.ROUTER - @property def name(self) -> str: """Return the name of the device.""" diff --git a/homeassistant/components/kef/icons.json b/homeassistant/components/kef/icons.json index eeb6dd099ce..e259e91eb1b 100644 --- a/homeassistant/components/kef/icons.json +++ b/homeassistant/components/kef/icons.json @@ -1,12 +1,28 @@ { "services": { - "update_dsp": "mdi:update", - "set_mode": "mdi:cog", - "set_desk_db": "mdi:volume-high", - "set_wall_db": "mdi:volume-high", - "set_treble_db": "mdi:volume-high", - "set_high_hz": "mdi:sine-wave", - "set_low_hz": "mdi:cosine-wave", - "set_sub_db": "mdi:volume-high" + "update_dsp": { + "service": "mdi:update" + }, + "set_mode": { + "service": "mdi:cog" + }, + "set_desk_db": { + "service": "mdi:volume-high" + }, + "set_wall_db": { + "service": "mdi:volume-high" + }, + "set_treble_db": { + "service": "mdi:volume-high" + }, + "set_high_hz": { + "service": "mdi:sine-wave" + }, + "set_low_hz": { + "service": "mdi:cosine-wave" + }, + "set_sub_db": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/kef/manifest.json b/homeassistant/components/kef/manifest.json index 29e398994f4..1bbce2ff35d 100644 --- a/homeassistant/components/kef/manifest.json +++ b/homeassistant/components/kef/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kef", "iot_class": "local_polling", "loggers": ["aiokef", "tenacity"], + "quality_scale": "legacy", "requirements": ["aiokef==0.2.16", "getmac==0.9.4"] } diff --git a/homeassistant/components/kef/strings.json b/homeassistant/components/kef/strings.json index e5ffff68162..c8aa644333a 100644 --- a/homeassistant/components/kef/strings.json +++ b/homeassistant/components/kef/strings.json @@ -22,14 +22,14 @@ }, "high_pass": { "name": "High pass", - "description": "High-pass mode\"." + "description": "High-pass mode." }, "sub_polarity": { "name": "Subwoofer polarity", "description": "Sub polarity." }, "bass_extension": { - "name": "Base extension", + "name": "Bass extension", "description": "Bass extension." } } diff --git a/homeassistant/components/keyboard/icons.json b/homeassistant/components/keyboard/icons.json index 8186b2684dd..03b6210bf41 100644 --- a/homeassistant/components/keyboard/icons.json +++ b/homeassistant/components/keyboard/icons.json @@ -1,10 +1,22 @@ { "services": { - "volume_up": "mdi:volume-high", - "volume_down": "mdi:volume-low", - "volume_mute": "mdi:volume-off", - "media_play_pause": "mdi:play-pause", - "media_next_track": "mdi:skip-next", - "media_prev_track": "mdi:skip-previous" + "volume_up": { + "service": "mdi:volume-high" + }, + "volume_down": { + "service": "mdi:volume-low" + }, + "volume_mute": { + "service": "mdi:volume-off" + }, + "media_play_pause": { + "service": "mdi:play-pause" + }, + "media_next_track": { + "service": "mdi:skip-next" + }, + "media_prev_track": { + "service": "mdi:skip-previous" + } } } diff --git a/homeassistant/components/keyboard/manifest.json b/homeassistant/components/keyboard/manifest.json index ea6d0aa20c2..e4a6606fb80 100644 --- a/homeassistant/components/keyboard/manifest.json +++ b/homeassistant/components/keyboard/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/keyboard", "iot_class": "local_push", "loggers": ["pykeyboard"], + "quality_scale": "legacy", "requirements": ["pyuserinput==0.1.11"] } diff --git a/homeassistant/components/keyboard_remote/manifest.json b/homeassistant/components/keyboard_remote/manifest.json index bb84b32defc..b405f36bb23 100644 --- a/homeassistant/components/keyboard_remote/manifest.json +++ b/homeassistant/components/keyboard_remote/manifest.json @@ -6,5 +6,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aionotify", "evdev"], + "quality_scale": "legacy", "requirements": ["evdev==1.6.1", "asyncinotify==4.0.2"] } diff --git a/homeassistant/components/keymitt_ble/icons.json b/homeassistant/components/keymitt_ble/icons.json index 77450fbf026..d265d96b395 100644 --- a/homeassistant/components/keymitt_ble/icons.json +++ b/homeassistant/components/keymitt_ble/icons.json @@ -1,5 +1,7 @@ { "services": { - "calibrate": "mdi:wrench" + "calibrate": { + "service": "mdi:wrench" + } } } diff --git a/homeassistant/components/kira/__init__.py b/homeassistant/components/kira/__init__.py index b0305bc0643..52618a125b6 100644 --- a/homeassistant/components/kira/__init__.py +++ b/homeassistant/components/kira/__init__.py @@ -111,7 +111,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the KIRA module and load platform.""" # note: module_name is not the HA device name. it's just a unique name # to ensure the component and platform can share information - module_name = ("%s_%d" % (DOMAIN, idx)) if idx else DOMAIN + module_name = f"{DOMAIN}_{idx}" if idx else DOMAIN device_name = module_conf.get(CONF_NAME, DOMAIN) port = module_conf.get(CONF_PORT, DEFAULT_PORT) host = module_conf.get(CONF_HOST, DEFAULT_HOST) @@ -141,7 +141,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Stop the KIRA receiver.""" for receiver in hass.data[DOMAIN][CONF_SENSOR].values(): receiver.stop() - _LOGGER.info("Terminated receivers") + _LOGGER.debug("Terminated receivers") hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_kira) diff --git a/homeassistant/components/kira/manifest.json b/homeassistant/components/kira/manifest.json index c8a476b07c9..60901d13f4e 100644 --- a/homeassistant/components/kira/manifest.json +++ b/homeassistant/components/kira/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kira", "iot_class": "local_push", "loggers": ["pykira"], + "quality_scale": "legacy", "requirements": ["pykira==0.1.1"] } diff --git a/homeassistant/components/kira/remote.py b/homeassistant/components/kira/remote.py index f6ee4af75ef..c1d28f8b077 100644 --- a/homeassistant/components/kira/remote.py +++ b/homeassistant/components/kira/remote.py @@ -45,5 +45,5 @@ class KiraRemote(remote.RemoteEntity): """Send a command to one device.""" for single_command in command: code_tuple = (single_command, kwargs.get(remote.ATTR_DEVICE)) - _LOGGER.info("Sending Command: %s to %s", *code_tuple) + _LOGGER.debug("Sending Command: %s to %s", *code_tuple) self._kira.sendCode(code_tuple) diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index 94dfca77410..88d0c868636 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -9,6 +9,8 @@ from __future__ import annotations import datetime from random import random +import voluptuous as vol + from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance from homeassistant.components.recorder.models import StatisticData, StatisticMetaData from homeassistant.components.recorder.statistics import ( @@ -18,14 +20,13 @@ from homeassistant.components.recorder.statistics import ( ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util -DOMAIN = "kitchen_sink" - +from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN COMPONENTS_WITH_DEMO_PLATFORM = [ Platform.BUTTON, @@ -40,6 +41,15 @@ COMPONENTS_WITH_DEMO_PLATFORM = [ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +SCHEMA_SERVICE_TEST_SERVICE_1 = vol.Schema( + { + vol.Required("field_1"): vol.Coerce(int), + vol.Required("field_2"): vol.In(["off", "auto", "cool"]), + vol.Optional("field_3"): vol.Coerce(int), + vol.Optional("field_4"): vol.In(["forwards", "reverse"]), + } +) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the demo environment.""" @@ -48,6 +58,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: DOMAIN, context={"source": SOURCE_IMPORT}, data={} ) ) + + @callback + def service_handler(call: ServiceCall | None = None) -> None: + """Do nothing.""" + + hass.services.async_register( + DOMAIN, "test_service_1", service_handler, SCHEMA_SERVICE_TEST_SERVICE_1 + ) + return True @@ -68,9 +87,27 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b # Start a reauth flow config_entry.async_start_reauth(hass) + # Notify backup listeners + hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + return True +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload config entry.""" + # Notify backup listeners + hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + + return await hass.config_entries.async_unload_platforms( + entry, COMPONENTS_WITH_DEMO_PLATFORM + ) + + +async def _notify_backup_listeners(hass: HomeAssistant) -> None: + for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []): + listener() + + def _create_issues(hass: HomeAssistant) -> None: """Create some issue registry issues.""" async_create_issue( diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py new file mode 100644 index 00000000000..615364f55ee --- /dev/null +++ b/homeassistant/components/kitchen_sink/backup.py @@ -0,0 +1,117 @@ +"""Backup platform for the kitchen_sink integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine +import logging +from typing import Any + +from homeassistant.components.backup import AddonInfo, AgentBackup, BackupAgent, Folder +from homeassistant.core import HomeAssistant, callback + +from . import DATA_BACKUP_AGENT_LISTENERS, DOMAIN + +LOGGER = logging.getLogger(__name__) + + +async def async_get_backup_agents( + hass: HomeAssistant, +) -> list[BackupAgent]: + """Register the backup agents.""" + if not hass.config_entries.async_loaded_entries(DOMAIN): + LOGGER.info("No config entry found or entry is not loaded") + return [] + return [KitchenSinkBackupAgent("syncer")] + + +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener) + + @callback + def remove_listener() -> None: + """Remove the listener.""" + hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener) + + return remove_listener + + +class KitchenSinkBackupAgent(BackupAgent): + """Kitchen sink backup agent.""" + + domain = DOMAIN + + def __init__(self, name: str) -> None: + """Initialize the kitchen sink backup sync agent.""" + super().__init__() + self.name = name + self._uploads = [ + AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=False, + date="1970-01-01T00:00:00Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Kitchen sink syncer", + protected=False, + size=1234, + ) + ] + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + LOGGER.info("Downloading backup %s", backup_id) + reader = asyncio.StreamReader() + reader.feed_data(b"backup data") + reader.feed_eof() + return reader + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + LOGGER.info("Uploading backup %s %s", backup.backup_id, backup) + self._uploads.append(backup) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file.""" + self._uploads = [ + upload for upload in self._uploads if upload.backup_id != backup_id + ] + LOGGER.info("Deleted backup %s", backup_id) + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List synced backups.""" + return self._uploads + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + for backup in self._uploads: + if backup.backup_id == backup_id: + return backup + return None diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index c561ca29b8a..019d1dddcad 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any import voluptuous as vol @@ -11,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.core import callback @@ -32,27 +33,28 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + return self.async_create_entry(title="Kitchen Sink", data=import_data) - return self.async_create_entry(title="Kitchen Sink", data=import_info) - - async def async_step_reauth(self, data): + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Reauth step.""" return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Reauth confirm step.""" if user_input is None: return self.async_show_form(step_id="reauth_confirm") return self.async_abort(reason="reauth_successful") -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -66,8 +68,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - self.options.update(user_input) - return await self._update_options() + return self.async_create_entry(data=self.config_entry.options | user_input) return self.async_show_form( step_id="options_1", @@ -93,7 +94,3 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): } ), ) - - async def _update_options(self) -> ConfigFlowResult: - """Update config entry options.""" - return self.async_create_entry(title="", data=self.options) diff --git a/homeassistant/components/kitchen_sink/const.py b/homeassistant/components/kitchen_sink/const.py new file mode 100644 index 00000000000..e6edaca46ce --- /dev/null +++ b/homeassistant/components/kitchen_sink/const.py @@ -0,0 +1,12 @@ +"""Constants for the Kitchen Sink integration.""" + +from __future__ import annotations + +from collections.abc import Callable + +from homeassistant.util.hass_dict import HassKey + +DOMAIN = "kitchen_sink" +DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey( + f"{DOMAIN}.backup_agent_listeners" +) diff --git a/homeassistant/components/kitchen_sink/icons.json b/homeassistant/components/kitchen_sink/icons.json index 2947cfa7ec5..565d595d9c7 100644 --- a/homeassistant/components/kitchen_sink/icons.json +++ b/homeassistant/components/kitchen_sink/icons.json @@ -7,5 +7,13 @@ } } } + }, + "services": { + "test_service_1": { + "service": "mdi:flask", + "sections": { + "advanced_fields": "mdi:test-tube" + } + } } } diff --git a/homeassistant/components/kitchen_sink/lock.py b/homeassistant/components/kitchen_sink/lock.py index 9b8093c2f0b..80ecc57d0d9 100644 --- a/homeassistant/components/kitchen_sink/lock.py +++ b/homeassistant/components/kitchen_sink/lock.py @@ -4,9 +4,8 @@ from __future__ import annotations from typing import Any -from homeassistant.components.lock import LockEntity, LockEntityFeature +from homeassistant.components.lock import LockEntity, LockEntityFeature, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_LOCKED, STATE_OPEN, STATE_UNLOCKED from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -24,24 +23,24 @@ async def async_setup_platform( DemoLock( "kitchen_sink_lock_001", "Openable lock", - STATE_LOCKED, + LockState.LOCKED, LockEntityFeature.OPEN, ), DemoLock( "kitchen_sink_lock_002", "Another openable lock", - STATE_UNLOCKED, + LockState.UNLOCKED, LockEntityFeature.OPEN, ), DemoLock( "kitchen_sink_lock_003", "Basic lock", - STATE_LOCKED, + LockState.LOCKED, ), DemoLock( "kitchen_sink_lock_004", "Another basic lock", - STATE_UNLOCKED, + LockState.UNLOCKED, ), ] ) @@ -77,19 +76,19 @@ class DemoLock(LockEntity): @property def is_locked(self) -> bool: """Return true if lock is locked.""" - return self._state == STATE_LOCKED + return self._state == LockState.LOCKED @property def is_open(self) -> bool: """Return true if lock is open.""" - return self._state == STATE_OPEN + return self._state == LockState.OPEN async def async_lock(self, **kwargs: Any) -> None: """Lock the device.""" self._attr_is_locking = True self.async_write_ha_state() self._attr_is_locking = False - self._state = STATE_LOCKED + self._state = LockState.LOCKED self.async_write_ha_state() async def async_unlock(self, **kwargs: Any) -> None: @@ -97,10 +96,10 @@ class DemoLock(LockEntity): self._attr_is_unlocking = True self.async_write_ha_state() self._attr_is_unlocking = False - self._state = STATE_UNLOCKED + self._state = LockState.UNLOCKED self.async_write_ha_state() async def async_open(self, **kwargs: Any) -> None: """Open the door latch.""" - self._state = STATE_OPEN + self._state = LockState.OPEN self.async_write_ha_state() diff --git a/homeassistant/components/kitchen_sink/manifest.json b/homeassistant/components/kitchen_sink/manifest.json index e2f9468f7e0..ae2462afbbd 100644 --- a/homeassistant/components/kitchen_sink/manifest.json +++ b/homeassistant/components/kitchen_sink/manifest.json @@ -5,5 +5,6 @@ "codeowners": ["@home-assistant/core"], "documentation": "https://www.home-assistant.io/integrations/kitchen_sink", "iot_class": "calculated", - "quality_scale": "internal" + "quality_scale": "internal", + "single_config_entry": true } diff --git a/homeassistant/components/kitchen_sink/services.yaml b/homeassistant/components/kitchen_sink/services.yaml new file mode 100644 index 00000000000..c65495095dc --- /dev/null +++ b/homeassistant/components/kitchen_sink/services.yaml @@ -0,0 +1,32 @@ +test_service_1: + fields: + field_1: + required: true + selector: + number: + min: 0 + max: 60 + unit_of_measurement: seconds + field_2: + required: true + selector: + select: + options: + - "off" + - "auto" + - "cool" + advanced_fields: + collapsed: true + fields: + field_3: + selector: + number: + min: 0 + max: 24 + unit_of_measurement: hours + field_4: + selector: + select: + options: + - "forward" + - "reverse" diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index c25964ab2ab..63e27e04637 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -1,8 +1,11 @@ { "config": { + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "step": { "reauth_confirm": { - "description": "Press SUBMIT to reauthenticate" + "description": "Select **Submit** to reauthenticate" } } }, @@ -38,7 +41,7 @@ "step": { "confirm": { "title": "The power supply needs to be replaced", - "description": "Press SUBMIT to confirm the power supply has been replaced" + "description": "Select **Submit** to confirm the power supply has been replaced" } } } @@ -49,7 +52,7 @@ "step": { "confirm": { "title": "Blinker fluid needs to be refilled", - "description": "Press SUBMIT when blinker fluid has been refilled" + "description": "Select **Submit** when blinker fluid has been refilled" } } } @@ -71,5 +74,35 @@ "title": "This is not a fixable problem", "description": "This issue is never going to give up." } + }, + "services": { + "test_service_1": { + "name": "Test service 1", + "description": "Fake service for testing", + "fields": { + "field_1": { + "name": "Field 1", + "description": "Number of seconds" + }, + "field_2": { + "name": "Field 2", + "description": "Mode" + }, + "field_3": { + "name": "Field 3", + "description": "Number of hours" + }, + "field_4": { + "name": "Field 4", + "description": "Direction" + } + }, + "sections": { + "advanced_fields": { + "name": "Advanced options", + "description": "Some very advanced things" + } + } + } } } diff --git a/homeassistant/components/kiwi/lock.py b/homeassistant/components/kiwi/lock.py index bde9a77f748..887747d4ca4 100644 --- a/homeassistant/components/kiwi/lock.py +++ b/homeassistant/components/kiwi/lock.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components.lock import ( PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA, LockEntity, + LockState, ) from homeassistant.const import ( ATTR_ID, @@ -18,8 +19,6 @@ from homeassistant.const import ( ATTR_LONGITUDE, CONF_PASSWORD, CONF_USERNAME, - STATE_LOCKED, - STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv @@ -55,7 +54,7 @@ def setup_platform( return if not (available_locks := kiwi.get_locks()): # No locks found; abort setup routine. - _LOGGER.info("No KIWI locks found in your account") + _LOGGER.debug("No KIWI locks found in your account") return add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True) @@ -68,7 +67,7 @@ class KiwiLock(LockEntity): self._sensor = kiwi_lock self._client = client self.lock_id = kiwi_lock["sensor_id"] - self._state = STATE_LOCKED + self._state = LockState.LOCKED address = kiwi_lock.get("address") address.update( @@ -96,7 +95,7 @@ class KiwiLock(LockEntity): @property def is_locked(self) -> bool: """Return true if lock is locked.""" - return self._state == STATE_LOCKED + return self._state == LockState.LOCKED @property def extra_state_attributes(self) -> dict[str, Any]: @@ -106,7 +105,7 @@ class KiwiLock(LockEntity): @callback def clear_unlock_state(self, _): """Clear unlock state automatically.""" - self._state = STATE_LOCKED + self._state = LockState.LOCKED self.async_write_ha_state() def unlock(self, **kwargs: Any) -> None: @@ -117,7 +116,7 @@ class KiwiLock(LockEntity): except KiwiException: _LOGGER.error("Failed to open door") else: - self._state = STATE_UNLOCKED + self._state = LockState.UNLOCKED self.hass.add_job( async_call_later, self.hass, diff --git a/homeassistant/components/kiwi/manifest.json b/homeassistant/components/kiwi/manifest.json index 60b0d1fd28b..74a27776128 100644 --- a/homeassistant/components/kiwi/manifest.json +++ b/homeassistant/components/kiwi/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kiwi", "iot_class": "cloud_polling", "loggers": ["kiwiki"], + "quality_scale": "legacy", "requirements": ["kiwiki-client==0.1.1"] } diff --git a/homeassistant/components/kmtronic/__init__.py b/homeassistant/components/kmtronic/__init__.py index 5f93de3c60e..edec0b32af2 100644 --- a/homeassistant/components/kmtronic/__init__.py +++ b/homeassistant/components/kmtronic/__init__.py @@ -44,6 +44,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{MANUFACTURER} {hub.name}", update_method=async_update_data, update_interval=timedelta(seconds=30), diff --git a/homeassistant/components/kmtronic/config_flow.py b/homeassistant/components/kmtronic/config_flow.py index f83d102ac05..56b1d4675bc 100644 --- a/homeassistant/components/kmtronic/config_flow.py +++ b/homeassistant/components/kmtronic/config_flow.py @@ -66,7 +66,7 @@ class KmtronicConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> KMTronicOptionsFlow: """Get the options flow for this handler.""" - return KMTronicOptionsFlow(config_entry) + return KMTronicOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -102,11 +102,9 @@ class InvalidAuth(HomeAssistantError): class KMTronicOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/knocki/__init__.py b/homeassistant/components/knocki/__init__.py index 42c3956bd68..dfdf060e3b5 100644 --- a/homeassistant/components/knocki/__init__.py +++ b/homeassistant/components/knocki/__init__.py @@ -41,13 +41,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: KnockiConfigEntry) -> bo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_create_background_task( - hass, client.start_websocket(), "knocki-websocket" - ) + await client.start_websocket() return True async def async_unload_entry(hass: HomeAssistant, entry: KnockiConfigEntry) -> bool: """Unload a config entry.""" + await entry.runtime_data.client.close() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index 4195320f382..a91119ca831 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -4,8 +4,8 @@ "codeowners": ["@joostlek", "@jgatto1", "@JakeBosh"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/knocki", - "integration_type": "device", + "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["knocki"], - "requirements": ["knocki==0.3.1"] + "requirements": ["knocki==0.4.2"] } diff --git a/homeassistant/components/knocki/strings.json b/homeassistant/components/knocki/strings.json index b7a7daad1fc..8e6fb722281 100644 --- a/homeassistant/components/knocki/strings.json +++ b/homeassistant/components/knocki/strings.json @@ -10,7 +10,11 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } }, "entity": { diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index a401ee2ccac..ea654c358e7 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -5,6 +5,7 @@ from __future__ import annotations import contextlib import logging from pathlib import Path +from typing import Final import voluptuous as vol from xknx import XKNX @@ -28,8 +29,6 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import discovery -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.reload import async_integration_yaml_config from homeassistant.helpers.storage import STORAGE_DIR @@ -55,13 +54,14 @@ from .const import ( CONF_KNX_SECURE_USER_PASSWORD, CONF_KNX_STATE_UPDATER, CONF_KNX_TELEGRAM_LOG_SIZE, + CONF_KNX_TUNNEL_ENDPOINT_IA, CONF_KNX_TUNNELING, CONF_KNX_TUNNELING_TCP, CONF_KNX_TUNNELING_TCP_SECURE, DATA_HASS_CONFIG, - DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, SUPPORTED_PLATFORMS_UI, SUPPORTED_PLATFORMS_YAML, TELEGRAM_LOG_DEFAULT, @@ -97,24 +97,11 @@ from .websocket import register_panel _LOGGER = logging.getLogger(__name__) +_KNX_YAML_CONFIG: Final = "knx_yaml_config" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( - # deprecated since 2021.12 - cv.deprecated(CONF_KNX_STATE_UPDATER), - cv.deprecated(CONF_KNX_RATE_LIMIT), - cv.deprecated(CONF_KNX_ROUTING), - cv.deprecated(CONF_KNX_TUNNELING), - cv.deprecated(CONF_KNX_INDIVIDUAL_ADDRESS), - cv.deprecated(CONF_KNX_MCAST_GRP), - cv.deprecated(CONF_KNX_MCAST_PORT), - cv.deprecated("event_filter"), - # deprecated since 2021.4 - cv.deprecated("config_file"), - # deprecated since 2021.2 - cv.deprecated("fire_event"), - cv.deprecated("fire_event_filter"), vol.Schema( { **EventSchema.SCHEMA, @@ -148,7 +135,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Start the KNX integration.""" hass.data[DATA_HASS_CONFIG] = config if (conf := config.get(DOMAIN)) is not None: - hass.data[DATA_KNX_CONFIG] = dict(conf) + hass.data[_KNX_YAML_CONFIG] = dict(conf) register_knx_services(hass) return True @@ -156,16 +143,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load a config entry.""" - # `config` is None when reloading the integration - # or no `knx` key in configuration.yaml - if (config := hass.data.get(DATA_KNX_CONFIG)) is None: + # `_KNX_YAML_CONFIG` is only set in async_setup. + # It's None when reloading the integration or no `knx` key in configuration.yaml + config = hass.data.pop(_KNX_YAML_CONFIG, None) + if config is None: _conf = await async_integration_yaml_config(hass, DOMAIN) if not _conf or DOMAIN not in _conf: - _LOGGER.warning( - "No `knx:` key found in configuration.yaml. See " - "https://www.home-assistant.io/integrations/knx/ " - "for KNX entity configuration documentation" - ) # generate defaults config = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] else: @@ -176,33 +159,25 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except XKNXException as ex: raise ConfigEntryNotReady from ex - hass.data[DATA_KNX_CONFIG] = config - hass.data[DOMAIN] = knx_module + hass.data[KNX_MODULE_KEY] = knx_module if CONF_KNX_EXPOSE in config: for expose_config in config[CONF_KNX_EXPOSE]: knx_module.exposures.append( create_knx_exposure(hass, knx_module.xknx, expose_config) ) + configured_platforms_yaml = { + platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config + } await hass.config_entries.async_forward_entry_setups( entry, { Platform.SENSOR, # always forward sensor for system entities (telegram counter, etc.) *SUPPORTED_PLATFORMS_UI, # forward all platforms that support UI entity management - *{ # forward yaml-only managed platforms on demand - platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config - }, + *configured_platforms_yaml, # forward yaml-only managed platforms on demand, }, ) - # set up notify service for backwards compatibility - remove 2024.11 - if NotifySchema.PLATFORM in config: - hass.async_create_task( - discovery.async_load_platform( - hass, Platform.NOTIFY, DOMAIN, {}, hass.data[DATA_HASS_CONFIG] - ) - ) - await register_panel(hass) return True @@ -210,30 +185,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unloading the KNX platforms.""" - # if not loaded directly return - if not hass.data.get(DOMAIN): + knx_module = hass.data.get(KNX_MODULE_KEY) + if not knx_module: + # if not loaded directly return return True - knx_module: KNXModule = hass.data[DOMAIN] for exposure in knx_module.exposures: exposure.async_remove() + configured_platforms_yaml = { + platform + for platform in SUPPORTED_PLATFORMS_YAML + if platform in knx_module.config_yaml + } unload_ok = await hass.config_entries.async_unload_platforms( entry, { Platform.SENSOR, # always unload system entities (telegram counter, etc.) *SUPPORTED_PLATFORMS_UI, # unload all platforms that support UI entity management - *{ # unload yaml-only managed platforms if configured - platform - for platform in SUPPORTED_PLATFORMS_YAML - if platform in hass.data[DATA_KNX_CONFIG] - }, + *configured_platforms_yaml, # unload yaml-only managed platforms if configured, }, ) if unload_ok: await knx_module.stop() hass.data.pop(DOMAIN) - hass.data.pop(DATA_KNX_CONFIG) return unload_ok @@ -267,7 +242,7 @@ async def async_remove_config_entry_device( hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry ) -> bool: """Remove a config entry from a device.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] if not device_entry.identifiers.isdisjoint( knx_module.interface_device.device_info["identifiers"] ): @@ -287,7 +262,7 @@ class KNXModule: ) -> None: """Initialize KNX module.""" self.hass = hass - self.config = config + self.config_yaml = config self.connected = False self.exposures: list[KNXExposeSensor | KNXExposeTime] = [] self.service_exposures: dict[str, KNXExposeSensor | KNXExposeTime] = {} @@ -297,6 +272,7 @@ class KNXModule: self.config_store = KNXConfigStore(hass=hass, config_entry=entry) self.xknx = XKNX( + address_format=self.project.get_address_format(), connection_config=self.connection_config(), rate_limit=self.entry.data[CONF_KNX_RATE_LIMIT], state_updater=self.entry.data[CONF_KNX_STATE_UPDATER], @@ -377,6 +353,7 @@ class KNXModule: if _conn_type == CONF_KNX_TUNNELING_TCP: return ConnectionConfig( connection_type=ConnectionType.TUNNELING_TCP, + individual_address=self.entry.data.get(CONF_KNX_TUNNEL_ENDPOINT_IA), gateway_ip=self.entry.data[CONF_HOST], gateway_port=self.entry.data[CONF_PORT], auto_reconnect=True, @@ -389,6 +366,7 @@ class KNXModule: if _conn_type == CONF_KNX_TUNNELING_TCP_SECURE: return ConnectionConfig( connection_type=ConnectionType.TUNNELING_TCP_SECURE, + individual_address=self.entry.data.get(CONF_KNX_TUNNEL_ENDPOINT_IA), gateway_ip=self.entry.data[CONF_HOST], gateway_port=self.entry.data[CONF_PORT], secure_config=SecureConfig( @@ -488,7 +466,7 @@ class KNXModule: def register_event_callback(self) -> TelegramQueue.Callback: """Register callback for knx_event within XKNX TelegramQueue.""" address_filters = [] - for filter_set in self.config[CONF_EVENT]: + for filter_set in self.config_yaml[CONF_EVENT]: _filters = list(map(AddressFilter, filter_set[KNX_ADDRESS])) address_filters.extend(_filters) if (dpt := filter_set.get(CONF_TYPE)) and ( diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index 7d80ca55bf6..96438df96d7 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -23,8 +23,8 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ATTR_COUNTER, ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxYamlEntity +from .const import ATTR_COUNTER, ATTR_SOURCE, KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import BinarySensorSchema @@ -34,12 +34,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: ConfigType = hass.data[DATA_KNX_CONFIG] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.BINARY_SENSOR] async_add_entities( - KNXBinarySensor(knx_module, entity_config) - for entity_config in config[Platform.BINARY_SENSOR] + KNXBinarySensor(knx_module, entity_config) for entity_config in config ) diff --git a/homeassistant/components/knx/button.py b/homeassistant/components/knx/button.py index f6627fc527b..5a2add5dcd7 100644 --- a/homeassistant/components/knx/button.py +++ b/homeassistant/components/knx/button.py @@ -12,8 +12,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import CONF_PAYLOAD_LENGTH, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxYamlEntity +from .const import CONF_PAYLOAD_LENGTH, KNX_ADDRESS, KNX_MODULE_KEY +from .entity import KnxYamlEntity async def async_setup_entry( @@ -22,13 +22,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: ConfigType = hass.data[DATA_KNX_CONFIG] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.BUTTON] - async_add_entities( - KNXButton(knx_module, entity_config) - for entity_config in config[Platform.BUTTON] - ) + async_add_entities(KNXButton(knx_module, entity_config) for entity_config in config) class KNXButton(KnxYamlEntity, ButtonEntity): diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 4932df55087..af58dd6ef4d 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -10,10 +10,15 @@ from xknx.devices import ( ClimateMode as XknxClimateMode, Device as XknxDevice, ) +from xknx.devices.fan import FanSpeedMode from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode from homeassistant import config_entries from homeassistant.components.climate import ( + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + FAN_ON, ClimateEntity, ClimateEntityFeature, HVACAction, @@ -31,8 +36,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxYamlEntity +from .const import CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import ClimateSchema ATTR_COMMAND_VALUE = "command_value" @@ -45,8 +50,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up climate(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.CLIMATE] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.CLIMATE] async_add_entities( KNXClimate(knx_module, entity_config) for entity_config in config @@ -126,6 +131,14 @@ def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: min_temp=config.get(ClimateSchema.CONF_MIN_TEMP), max_temp=config.get(ClimateSchema.CONF_MAX_TEMP), mode=climate_mode, + group_address_fan_speed=config.get(ClimateSchema.CONF_FAN_SPEED_ADDRESS), + group_address_fan_speed_state=config.get( + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS + ), + fan_speed_mode=config[ClimateSchema.CONF_FAN_SPEED_MODE], + group_address_humidity_state=config.get( + ClimateSchema.CONF_HUMIDITY_STATE_ADDRESS + ), ) @@ -135,7 +148,6 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): _device: XknxClimate _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = "knx_climate" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX climate device.""" @@ -166,6 +178,36 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): self._attr_preset_modes = [ mode.name.lower() for mode in self._device.mode.operation_modes ] + + fan_max_step = config[ClimateSchema.CONF_FAN_MAX_STEP] + self._fan_modes_percentages = [ + int(100 * i / fan_max_step) for i in range(fan_max_step + 1) + ] + self.fan_zero_mode: str = config[ClimateSchema.CONF_FAN_ZERO_MODE] + + if self._device.fan_speed is not None and self._device.fan_speed.initialized: + self._attr_supported_features |= ClimateEntityFeature.FAN_MODE + + if fan_max_step == 3: + self._attr_fan_modes = [ + self.fan_zero_mode, + FAN_LOW, + FAN_MEDIUM, + FAN_HIGH, + ] + elif fan_max_step == 2: + self._attr_fan_modes = [self.fan_zero_mode, FAN_LOW, FAN_HIGH] + elif fan_max_step == 1: + self._attr_fan_modes = [self.fan_zero_mode, FAN_ON] + elif self._device.fan_speed_mode == FanSpeedMode.STEP: + self._attr_fan_modes = [self.fan_zero_mode] + [ + str(i) for i in range(1, fan_max_step + 1) + ] + else: + self._attr_fan_modes = [self.fan_zero_mode] + [ + f"{percentage}%" for percentage in self._fan_modes_percentages[1:] + ] + self._attr_target_temperature_step = self._device.temperature_step self._attr_unique_id = ( f"{self._device.temperature.group_address_state}_" @@ -322,6 +364,46 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): ) self.async_write_ha_state() + @property + def fan_mode(self) -> str: + """Return the fan setting.""" + + fan_speed = self._device.current_fan_speed + + if not fan_speed or self._attr_fan_modes is None: + return self.fan_zero_mode + + if self._device.fan_speed_mode == FanSpeedMode.STEP: + return self._attr_fan_modes[fan_speed] + + # Find the closest fan mode percentage + closest_percentage = min( + self._fan_modes_percentages[1:], # fan_speed == 0 is handled above + key=lambda x: abs(x - fan_speed), + ) + return self._attr_fan_modes[ + self._fan_modes_percentages.index(closest_percentage) + ] + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set fan mode.""" + + if self._attr_fan_modes is None: + return + + fan_mode_index = self._attr_fan_modes.index(fan_mode) + + if self._device.fan_speed_mode == FanSpeedMode.STEP: + await self._device.set_fan_speed(fan_mode_index) + return + + await self._device.set_fan_speed(self._fan_modes_percentages[fan_mode_index]) + + @property + def current_humidity(self) -> float | None: + """Return the current humidity.""" + return self._device.humidity.value + @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return device specific state attributes.""" diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index 7e4db1f889b..feeb7626577 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -58,6 +58,7 @@ from .const import ( CONF_KNX_TUNNELING_TCP_SECURE, DEFAULT_ROUTING_IA, DOMAIN, + KNX_MODULE_KEY, TELEGRAM_LOG_DEFAULT, TELEGRAM_LOG_MAX, KNXConfigEntryData, @@ -182,7 +183,9 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): CONF_KNX_ROUTING: CONF_KNX_ROUTING.capitalize(), } - if isinstance(self, OptionsFlow) and (knx_module := self.hass.data.get(DOMAIN)): + if isinstance(self, OptionsFlow) and ( + knx_module := self.hass.data.get(KNX_MODULE_KEY) + ): xknx = knx_module.xknx else: xknx = XKNX() @@ -767,7 +770,6 @@ class KNXOptionsFlow(KNXCommonFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize KNX options flow.""" - self.config_entry = config_entry super().__init__(initial_data=config_entry.data) # type: ignore[arg-type] @callback diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index 9ceb18385cb..a946ded0359 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -3,16 +3,21 @@ from __future__ import annotations from collections.abc import Awaitable, Callable -from enum import Enum -from typing import Final, TypedDict +from enum import Enum, StrEnum +from typing import TYPE_CHECKING, Final, TypedDict from xknx.dpt.dpt_20 import HVACControllerMode from xknx.telegram import Telegram -from homeassistant.components.climate import HVACAction, HVACMode +from homeassistant.components.climate import FAN_AUTO, FAN_OFF, HVACAction, HVACMode from homeassistant.const import Platform +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from . import KNXModule DOMAIN: Final = "knx" +KNX_MODULE_KEY: HassKey[KNXModule] = HassKey(DOMAIN) # Address is used for configuration and services by the same functions so the key has to match KNX_ADDRESS: Final = "address" @@ -47,8 +52,8 @@ CONF_KNX_DEFAULT_RATE_LIMIT: Final = 0 DEFAULT_ROUTING_IA: Final = "0.0.240" CONF_KNX_TELEGRAM_LOG_SIZE: Final = "telegram_log_size" -TELEGRAM_LOG_DEFAULT: Final = 200 -TELEGRAM_LOG_MAX: Final = 5000 # ~2 MB or ~5 hours of reasonable bus load +TELEGRAM_LOG_DEFAULT: Final = 1000 +TELEGRAM_LOG_MAX: Final = 25000 # ~10 MB or ~25 hours of reasonable bus load ## # Secure constants @@ -68,8 +73,6 @@ CONF_RESPOND_TO_READ: Final = "respond_to_read" CONF_STATE_ADDRESS: Final = "state_address" CONF_SYNC_STATE: Final = "sync_state" -# yaml config merged with config entry data -DATA_KNX_CONFIG: Final = "knx_config" # original hass yaml config DATA_HASS_CONFIG: Final = "knx_hass_config" @@ -101,7 +104,7 @@ class KNXConfigEntryData(TypedDict, total=False): route_back: bool # not required host: str # only required for tunnelling port: int # only required for tunnelling - tunnel_endpoint_ia: str | None + tunnel_endpoint_ia: str | None # tunnelling only - not required (use get()) # KNX secure user_id: int | None # not required user_password: str | None # not required @@ -126,6 +129,13 @@ class ColorTempModes(Enum): RELATIVE = "5.001" +class FanZeroMode(StrEnum): + """Enum for setting the fan zero mode.""" + + OFF = FAN_OFF + AUTO = FAN_AUTO + + SUPPORTED_PLATFORMS_YAML: Final = { Platform.BINARY_SENSOR, Platform.BUTTON, diff --git a/homeassistant/components/knx/cover.py b/homeassistant/components/knx/cover.py index 408f746e094..2d38426a687 100644 --- a/homeassistant/components/knx/cover.py +++ b/homeassistant/components/knx/cover.py @@ -26,8 +26,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxYamlEntity +from .const import KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import CoverSchema @@ -37,8 +37,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up cover(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.COVER] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.COVER] async_add_entities(KNXCover(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index 9f04a4acd7e..8f65ac8a952 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -27,11 +27,10 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) -from .knx_entity import KnxYamlEntity +from .entity import KnxYamlEntity async def async_setup_entry( @@ -40,8 +39,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATE] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.DATE] async_add_entities( KNXDateEntity(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index 8f1a25e6e3c..caeaed6da93 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -28,11 +28,10 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) -from .knx_entity import KnxYamlEntity +from .entity import KnxYamlEntity async def async_setup_entry( @@ -41,8 +40,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATETIME] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.DATETIME] async_add_entities( KNXDateTimeEntity(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/device_trigger.py b/homeassistant/components/knx/device_trigger.py index ea3cc5faad4..2eb1f86e7fc 100644 --- a/homeassistant/components/knx/device_trigger.py +++ b/homeassistant/components/knx/device_trigger.py @@ -6,8 +6,8 @@ from typing import Any, Final import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE @@ -16,9 +16,8 @@ from homeassistant.helpers import selector from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import KNXModule, trigger -from .const import DOMAIN -from .project import KNXProject +from . import trigger +from .const import DOMAIN, KNX_MODULE_KEY from .trigger import ( CONF_KNX_DESTINATION, CONF_KNX_GROUP_VALUE_READ, @@ -47,7 +46,7 @@ async def async_get_triggers( """List device triggers for KNX devices.""" triggers = [] - knx: KNXModule = hass.data[DOMAIN] + knx = hass.data[KNX_MODULE_KEY] if knx.interface_device.device.id == device_id: # Add trigger for KNX telegrams to interface device triggers.append( @@ -67,7 +66,7 @@ async def async_get_trigger_capabilities( hass: HomeAssistant, config: ConfigType ) -> dict[str, vol.Schema]: """List trigger capabilities.""" - project: KNXProject = hass.data[DOMAIN].project + project = hass.data[KNX_MODULE_KEY].project options = [ selector.SelectOptionDict(value=ga.address, label=f"{ga.address} - {ga.name}") for ga in project.group_addresses.values() diff --git a/homeassistant/components/knx/diagnostics.py b/homeassistant/components/knx/diagnostics.py index 1907539fc61..974a6b3b448 100644 --- a/homeassistant/components/knx/diagnostics.py +++ b/homeassistant/components/knx/diagnostics.py @@ -18,6 +18,7 @@ from .const import ( CONF_KNX_SECURE_DEVICE_AUTHENTICATION, CONF_KNX_SECURE_USER_PASSWORD, DOMAIN, + KNX_MODULE_KEY, ) TO_REDACT = { @@ -33,7 +34,7 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" diag: dict[str, Any] = {} - knx_module = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] diag["xknx"] = { "version": knx_module.xknx.version, "current_address": str(knx_module.xknx.current_address), diff --git a/homeassistant/components/knx/knx_entity.py b/homeassistant/components/knx/entity.py similarity index 82% rename from homeassistant/components/knx/knx_entity.py rename to homeassistant/components/knx/entity.py index c81a6ee06db..6574e5d5860 100644 --- a/homeassistant/components/knx/knx_entity.py +++ b/homeassistant/components/knx/entity.py @@ -2,20 +2,23 @@ from __future__ import annotations -from abc import ABC, abstractmethod from typing import TYPE_CHECKING, Any from xknx.devices import Device as XknxDevice +from homeassistant.const import CONF_ENTITY_CATEGORY, EntityCategory +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.entity_registry import RegistryEntry +from .const import DOMAIN +from .storage.config_store import PlatformControllerBase +from .storage.const import CONF_DEVICE_INFO + if TYPE_CHECKING: from . import KNXModule -from .storage.config_store import PlatformControllerBase - class KnxUiEntityPlatformController(PlatformControllerBase): """Class to manage dynamic adding and reloading of UI entities.""" @@ -93,13 +96,19 @@ class KnxYamlEntity(_KnxEntityBase): self._device = device -class KnxUiEntity(_KnxEntityBase, ABC): +class KnxUiEntity(_KnxEntityBase): """Representation of a KNX UI entity.""" _attr_unique_id: str + _attr_has_entity_name = True - @abstractmethod def __init__( - self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] + self, knx_module: KNXModule, unique_id: str, entity_config: dict[str, Any] ) -> None: """Initialize the UI entity.""" + self._knx_module = knx_module + self._attr_unique_id = unique_id + if entity_category := entity_config.get(CONF_ENTITY_CATEGORY): + self._attr_entity_category = EntityCategory(entity_category) + if device_info := entity_config.get(CONF_DEVICE_INFO): + self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) diff --git a/homeassistant/components/knx/expose.py b/homeassistant/components/knx/expose.py index 921af6ba4a9..82bee48ba69 100644 --- a/homeassistant/components/knx/expose.py +++ b/homeassistant/components/knx/expose.py @@ -125,6 +125,8 @@ class KNXExposeSensor: def _get_expose_value(self, state: State | None) -> bool | int | float | str | None: """Extract value from state.""" if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + if self.expose_default is None: + return None value = self.expose_default elif self.expose_attribute is not None: _attr = state.attributes.get(self.expose_attribute) @@ -154,12 +156,22 @@ class KNXExposeSensor: if value is not None and ( isinstance(self.device.sensor_value, RemoteValueSensor) ): - if issubclass(self.device.sensor_value.dpt_class, DPTNumeric): - return float(value) - if issubclass(self.device.sensor_value.dpt_class, DPTString): - # DPT 16.000 only allows up to 14 Bytes - return str(value)[:14] - return value + try: + if issubclass(self.device.sensor_value.dpt_class, DPTNumeric): + return float(value) + if issubclass(self.device.sensor_value.dpt_class, DPTString): + # DPT 16.000 only allows up to 14 Bytes + return str(value)[:14] + except (ValueError, TypeError) as err: + _LOGGER.warning( + 'Could not expose %s %s value "%s" to KNX: Conversion failed: %s', + self.entity_id, + self.expose_attribute or "state", + value, + err, + ) + return None + return value # type: ignore[no-any-return] async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None: """Handle entity change.""" diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index 6fd87be97d1..75d91e48048 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -20,8 +20,8 @@ from homeassistant.util.percentage import ( from homeassistant.util.scaling import int_states_in_range from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxYamlEntity +from .const import KNX_ADDRESS, KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import FanSchema DEFAULT_PERCENTAGE: Final = 50 @@ -33,8 +33,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up fan(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.FAN] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.FAN] async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config) @@ -43,7 +43,6 @@ class KNXFan(KnxYamlEntity, FanEntity): """Representation of a KNX fan.""" _device: XknxFan - _enable_turn_on_off_backwards_compatibility = False def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX fan.""" diff --git a/homeassistant/components/knx/icons.json b/homeassistant/components/knx/icons.json index 2aee34219f6..756b6ab9f9e 100644 --- a/homeassistant/components/knx/icons.json +++ b/homeassistant/components/knx/icons.json @@ -36,10 +36,20 @@ } }, "services": { - "send": "mdi:email-arrow-right", - "read": "mdi:email-search", - "event_register": "mdi:home-import-outline", - "exposure_register": "mdi:home-export-outline", - "reload": "mdi:reload" + "send": { + "service": "mdi:email-arrow-right" + }, + "read": { + "service": "mdi:email-search" + }, + "event_register": { + "service": "mdi:home-import-outline" + }, + "exposure_register": { + "service": "mdi:home-export-outline" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 0caa3f0a799..8e64b46c890 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any, cast +from propcache import cached_property from xknx import XKNX from xknx.devices.light import ColorTemperatureType, Light as XknxLight, XYYColor @@ -20,7 +21,6 @@ from homeassistant.components.light import ( ) from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, @@ -29,13 +29,12 @@ from homeassistant.helpers.typing import ConfigType import homeassistant.util.color as color_util from . import KNXModule -from .const import CONF_SYNC_STATE, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, ColorTempModes -from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity +from .const import CONF_SYNC_STATE, DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY, ColorTempModes +from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import LightSchema from .storage.const import ( CONF_COLOR_TEMP_MAX, CONF_COLOR_TEMP_MIN, - CONF_DEVICE_INFO, CONF_DPT, CONF_ENTITY, CONF_GA_BLUE_BRIGHTNESS, @@ -65,7 +64,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up light(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] platform = async_get_current_platform() knx_module.config_store.add_platform( platform=Platform.LIGHT, @@ -77,7 +76,7 @@ async def async_setup_entry( ) entities: list[KnxYamlEntity | KnxUiEntity] = [] - if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): + if yaml_platform_config := knx_module.config_yaml.get(Platform.LIGHT): entities.extend( KnxYamlLight(knx_module, entity_config) for entity_config in yaml_platform_config @@ -391,39 +390,47 @@ class _KnxLight(LightEntity): ) return None - @property - def color_mode(self) -> ColorMode: - """Return the color mode of the light.""" - if self._device.supports_xyy_color: - return ColorMode.XY - if self._device.supports_hs_color: - return ColorMode.HS - if self._device.supports_rgbw: - return ColorMode.RGBW - if self._device.supports_color: - return ColorMode.RGB + @cached_property + def supported_color_modes(self) -> set[ColorMode]: + """Get supported color modes.""" + color_mode = set() if ( self._device.supports_color_temperature or self._device.supports_tunable_white ): - return ColorMode.COLOR_TEMP - if self._device.supports_brightness: - return ColorMode.BRIGHTNESS - return ColorMode.ONOFF - - @property - def supported_color_modes(self) -> set[ColorMode]: - """Flag supported color modes.""" - return {self.color_mode} + color_mode.add(ColorMode.COLOR_TEMP) + if self._device.supports_xyy_color: + color_mode.add(ColorMode.XY) + if self._device.supports_rgbw: + color_mode.add(ColorMode.RGBW) + elif self._device.supports_color: + # one of RGB or RGBW so individual color configurations work properly + color_mode.add(ColorMode.RGB) + if self._device.supports_hs_color: + color_mode.add(ColorMode.HS) + if not color_mode: + # brightness or on/off must be the only supported mode + if self._device.supports_brightness: + color_mode.add(ColorMode.BRIGHTNESS) + else: + color_mode.add(ColorMode.ONOFF) + return color_mode async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) - color_temp = kwargs.get(ATTR_COLOR_TEMP_KELVIN) - rgb = kwargs.get(ATTR_RGB_COLOR) - rgbw = kwargs.get(ATTR_RGBW_COLOR) - hs_color = kwargs.get(ATTR_HS_COLOR) - xy_color = kwargs.get(ATTR_XY_COLOR) + # LightEntity color translation will ensure that only attributes of supported + # color modes are passed to this method - so we can't set unsupported mode here + if color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN): + self._attr_color_mode = ColorMode.COLOR_TEMP + if rgb := kwargs.get(ATTR_RGB_COLOR): + self._attr_color_mode = ColorMode.RGB + if rgbw := kwargs.get(ATTR_RGBW_COLOR): + self._attr_color_mode = ColorMode.RGBW + if hs_color := kwargs.get(ATTR_HS_COLOR): + self._attr_color_mode = ColorMode.HS + if xy_color := kwargs.get(ATTR_XY_COLOR): + self._attr_color_mode = ColorMode.XY if ( not self.is_on @@ -502,17 +509,17 @@ class _KnxLight(LightEntity): await self._device.set_brightness(brightness) return # brightness without color in kwargs; set via color - if self.color_mode == ColorMode.XY: + if self._attr_color_mode == ColorMode.XY: await self._device.set_xyy_color(XYYColor(brightness=brightness)) return # default to white if color not known for RGB(W) - if self.color_mode == ColorMode.RGBW: + if self._attr_color_mode == ColorMode.RGBW: _rgbw = self.rgbw_color if not _rgbw or not any(_rgbw): _rgbw = (0, 0, 0, 255) await set_color(_rgbw[:3], _rgbw[3], brightness) return - if self.color_mode == ColorMode.RGB: + if self._attr_color_mode == ColorMode.RGB: _rgb = self.rgb_color if not _rgb or not any(_rgb): _rgb = (255, 255, 255) @@ -535,6 +542,7 @@ class KnxYamlLight(_KnxLight, KnxYamlEntity): knx_module=knx_module, device=_create_yaml_light(knx_module.xknx, config), ) + self._attr_color_mode = next(iter(self.supported_color_modes)) self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN] self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN] self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) @@ -554,21 +562,20 @@ class KnxYamlLight(_KnxLight, KnxYamlEntity): class KnxUiLight(_KnxLight, KnxUiEntity): """Representation of a KNX light.""" - _attr_has_entity_name = True _device: XknxLight def __init__( self, knx_module: KNXModule, unique_id: str, config: ConfigType ) -> None: """Initialize of KNX light.""" - self._knx_module = knx_module + super().__init__( + knx_module=knx_module, + unique_id=unique_id, + entity_config=config[CONF_ENTITY], + ) self._device = _create_ui_light( knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] ) + self._attr_color_mode = next(iter(self.supported_color_modes)) self._attr_max_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MAX] self._attr_min_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MIN] - - self._attr_entity_category = config[CONF_ENTITY][CONF_ENTITY_CATEGORY] - self._attr_unique_id = unique_id - if device_info := config[CONF_ENTITY].get(CONF_DEVICE_INFO): - self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index b7efd14fa2a..55c19443aa0 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -9,11 +9,10 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["xknx", "xknxproject"], - "quality_scale": "platinum", "requirements": [ - "xknx==3.1.1", - "xknxproject==3.7.1", - "knx-frontend==2024.8.9.225351" + "xknx==3.4.0", + "xknxproject==3.8.1", + "knx-frontend==2024.11.16.205004" ], "single_config_entry": true } diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 173ab3119a0..245de2e937e 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -2,83 +2,19 @@ from __future__ import annotations -from typing import Any - from xknx import XKNX from xknx.devices import Notification as XknxNotification from homeassistant import config_entries -from homeassistant.components.notify import ( - BaseNotificationService, - NotifyEntity, - migrate_notify_issue, -) +from homeassistant.components.notify import NotifyEntity from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, CONF_TYPE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxYamlEntity - - -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> KNXNotificationService | None: - """Get the KNX notification service.""" - if discovery_info is None: - return None - - if platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.NOTIFY): - xknx: XKNX = hass.data[DOMAIN].xknx - - notification_devices = [ - _create_notification_instance(xknx, device_config) - for device_config in platform_config - ] - return KNXNotificationService(notification_devices) - - return None - - -class KNXNotificationService(BaseNotificationService): - """Implement notification service.""" - - def __init__(self, devices: list[XknxNotification]) -> None: - """Initialize the service.""" - self.devices = devices - - @property - def targets(self) -> dict[str, str]: - """Return a dictionary of registered targets.""" - ret = {} - for device in self.devices: - ret[device.name] = device.name - return ret - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a notification to knx bus.""" - migrate_notify_issue( - self.hass, DOMAIN, "KNX", "2024.11.0", service_name=self._service_name - ) - if "target" in kwargs: - await self._async_send_to_device(message, kwargs["target"]) - else: - await self._async_send_to_all_devices(message) - - async def _async_send_to_all_devices(self, message: str) -> None: - """Send a notification to knx bus to all connected devices.""" - for device in self.devices: - await device.set(message) - - async def _async_send_to_device(self, message: str, names: str) -> None: - """Send a notification to knx bus to device with given names.""" - for device in self.devices: - if device.name in names: - await device.set(message) +from .const import KNX_ADDRESS, KNX_MODULE_KEY +from .entity import KnxYamlEntity async def async_setup_entry( @@ -87,8 +23,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up notify(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NOTIFY] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.NOTIFY] async_add_entities(KNXNotify(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/number.py b/homeassistant/components/knx/number.py index cbbe91aba54..27e4ff743ab 100644 --- a/homeassistant/components/knx/number.py +++ b/homeassistant/components/knx/number.py @@ -23,14 +23,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ( - CONF_RESPOND_TO_READ, - CONF_STATE_ADDRESS, - DATA_KNX_CONFIG, - DOMAIN, - KNX_ADDRESS, -) -from .knx_entity import KnxYamlEntity +from .const import CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, KNX_ADDRESS, KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import NumberSchema @@ -40,8 +34,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NUMBER] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.NUMBER] async_add_entities(KNXNumber(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/project.py b/homeassistant/components/knx/project.py index b5bafe00724..04cac68aab0 100644 --- a/homeassistant/components/knx/project.py +++ b/homeassistant/components/knx/project.py @@ -8,12 +8,13 @@ from typing import Final from xknx import XKNX from xknx.dpt import DPTBase -from xknx.telegram.address import DeviceAddressableType +from xknx.telegram.address import DeviceAddressableType, GroupAddress, GroupAddressType from xknxproject import XKNXProj from xknxproject.models import ( Device, DPTType, GroupAddress as GroupAddressModel, + GroupAddressStyle as XknxProjectGroupAddressStyle, KNXProject as KNXProjectModel, ProjectInfo, ) @@ -90,6 +91,7 @@ class KNXProject: if project := data or await self._store.async_load(): self.devices = project["devices"] self.info = project["info"] + GroupAddress.address_format = self.get_address_format() xknx.group_address_dpt.clear() xknx_ga_dict: dict[DeviceAddressableType, DPTType] = {} @@ -133,3 +135,13 @@ class KNXProject: async def get_knxproject(self) -> KNXProjectModel | None: """Load the project file from local storage.""" return await self._store.async_load() + + def get_address_format(self) -> GroupAddressType: + """Return the address format for group addresses used in the project.""" + if self.info: + match self.info["group_address_style"]: + case XknxProjectGroupAddressStyle.TWOLEVEL.value: + return GroupAddressType.SHORT + case XknxProjectGroupAddressStyle.FREE.value: + return GroupAddressType.FREE + return GroupAddressType.LONG diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index 2de832ae54a..dfd226d72b1 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -14,8 +14,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxYamlEntity +from .const import KNX_ADDRESS, KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import SceneSchema @@ -25,8 +25,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up scene(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SCENE] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.SCENE] async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index c31b3d30ad0..9311046e410 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -7,7 +7,7 @@ from collections import OrderedDict from typing import ClassVar, Final import voluptuous as vol -from xknx.devices.climate import SetpointShiftMode +from xknx.devices.climate import FanSpeedMode, SetpointShiftMode from xknx.dpt import DPTBase, DPTNumeric from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode from xknx.exceptions import ConversionError, CouldNotParseTelegram @@ -15,7 +15,7 @@ from xknx.exceptions import ConversionError, CouldNotParseTelegram from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA as BINARY_SENSOR_DEVICE_CLASSES_SCHEMA, ) -from homeassistant.components.climate import HVACMode +from homeassistant.components.climate import FAN_OFF, HVACMode from homeassistant.components.cover import ( DEVICE_CLASSES_SCHEMA as COVER_DEVICE_CLASSES_SCHEMA, ) @@ -54,6 +54,7 @@ from .const import ( CONF_SYNC_STATE, KNX_ADDRESS, ColorTempModes, + FanZeroMode, ) from .validation import ( backwards_compatible_xknx_climate_enum_member, @@ -221,9 +222,6 @@ class BinarySensorSchema(KNXPlatformSchema): DEFAULT_NAME = "KNX Binary Sensor" ENTITY_SCHEMA = vol.All( - # deprecated since September 2020 - cv.deprecated("significant_bit"), - cv.deprecated("automation"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -341,6 +339,12 @@ class ClimateSchema(KNXPlatformSchema): CONF_ON_OFF_INVERT = "on_off_invert" CONF_MIN_TEMP = "min_temp" CONF_MAX_TEMP = "max_temp" + CONF_FAN_SPEED_ADDRESS = "fan_speed_address" + CONF_FAN_SPEED_STATE_ADDRESS = "fan_speed_state_address" + CONF_FAN_MAX_STEP = "fan_max_step" + CONF_FAN_SPEED_MODE = "fan_speed_mode" + CONF_FAN_ZERO_MODE = "fan_zero_mode" + CONF_HUMIDITY_STATE_ADDRESS = "humidity_state_address" DEFAULT_NAME = "KNX Climate" DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010" @@ -348,12 +352,9 @@ class ClimateSchema(KNXPlatformSchema): DEFAULT_SETPOINT_SHIFT_MIN = -6 DEFAULT_TEMPERATURE_STEP = 0.1 DEFAULT_ON_OFF_INVERT = False + DEFAULT_FAN_SPEED_MODE = "percent" ENTITY_SCHEMA = vol.All( - # deprecated since September 2020 - cv.deprecated("setpoint_shift_step", replacement_key=CONF_TEMPERATURE_STEP), - # deprecated since 2021.6 - cv.deprecated("create_temperature_sensors"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -423,6 +424,16 @@ class ClimateSchema(KNXPlatformSchema): vol.Optional(CONF_MIN_TEMP): vol.Coerce(float), vol.Optional(CONF_MAX_TEMP): vol.Coerce(float), vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA, + vol.Optional(CONF_FAN_SPEED_ADDRESS): ga_list_validator, + vol.Optional(CONF_FAN_SPEED_STATE_ADDRESS): ga_list_validator, + vol.Optional(CONF_FAN_MAX_STEP, default=3): cv.byte, + vol.Optional( + CONF_FAN_SPEED_MODE, default=DEFAULT_FAN_SPEED_MODE + ): vol.All(vol.Upper, cv.enum(FanSpeedMode)), + vol.Optional(CONF_FAN_ZERO_MODE, default=FAN_OFF): vol.Coerce( + FanZeroMode + ), + vol.Optional(CONF_HUMIDITY_STATE_ADDRESS): ga_list_validator, } ), ) @@ -951,8 +962,6 @@ class WeatherSchema(KNXPlatformSchema): DEFAULT_NAME = "KNX Weather Station" ENTITY_SCHEMA = vol.All( - # deprecated since 2021.6 - cv.deprecated("create_sensors"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index 6c73bf8d573..b499e3c601d 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -26,11 +26,10 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) -from .knx_entity import KnxYamlEntity +from .entity import KnxYamlEntity from .schema import SelectSchema @@ -40,8 +39,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SELECT] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.SELECT] async_add_entities(KNXSelect(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index a28c1a339e6..ed265db4ac7 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -34,8 +34,8 @@ from homeassistant.helpers.typing import ConfigType, StateType from homeassistant.util.enum import try_parse_enum from . import KNXModule -from .const import ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxYamlEntity +from .const import ATTR_SOURCE, KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import SensorSchema SCAN_INTERVAL = timedelta(seconds=10) @@ -115,13 +115,13 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] entities: list[SensorEntity] = [] entities.extend( KNXSystemSensor(knx_module, description) for description in SYSTEM_ENTITY_DESCRIPTIONS ) - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG].get(Platform.SENSOR) + config: list[ConfigType] | None = knx_module.config_yaml.get(Platform.SENSOR) if config: entities.extend( KNXSensor(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index 8b82671deaa..6c392902737 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -2,7 +2,6 @@ from __future__ import annotations -from functools import partial import logging from typing import TYPE_CHECKING @@ -22,6 +21,7 @@ from homeassistant.helpers.service import async_register_admin_service from .const import ( DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, SERVICE_KNX_ATTR_PAYLOAD, SERVICE_KNX_ATTR_REMOVE, SERVICE_KNX_ATTR_RESPONSE, @@ -46,14 +46,14 @@ def register_knx_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, SERVICE_KNX_SEND, - partial(service_send_to_knx_bus, hass), + service_send_to_knx_bus, schema=SERVICE_KNX_SEND_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_KNX_READ, - partial(service_read_to_knx_bus, hass), + service_read_to_knx_bus, schema=SERVICE_KNX_READ_SCHEMA, ) @@ -61,7 +61,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_KNX_EVENT_REGISTER, - partial(service_event_register_modify, hass), + service_event_register_modify, schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA, ) @@ -69,7 +69,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_KNX_EXPOSURE_REGISTER, - partial(service_exposure_register_modify, hass), + service_exposure_register_modify, schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA, ) @@ -77,7 +77,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_RELOAD, - partial(service_reload_integration, hass), + service_reload_integration, ) @@ -85,7 +85,7 @@ def register_knx_services(hass: HomeAssistant) -> None: def get_knx_module(hass: HomeAssistant) -> KNXModule: """Return KNXModule instance.""" try: - return hass.data[DOMAIN] # type: ignore[no-any-return] + return hass.data[KNX_MODULE_KEY] except KeyError as err: raise HomeAssistantError("KNX entry not loaded") from err @@ -102,9 +102,9 @@ SERVICE_KNX_EVENT_REGISTER_SCHEMA = vol.Schema( ) -async def service_event_register_modify(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_event_register_modify(call: ServiceCall) -> None: """Service for adding or removing a GroupAddress to the knx_event filter.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) attr_address = call.data[KNX_ADDRESS] group_addresses = list(map(parse_device_group_address, attr_address)) @@ -155,11 +155,9 @@ SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA = vol.Any( ) -async def service_exposure_register_modify( - hass: HomeAssistant, call: ServiceCall -) -> None: +async def service_exposure_register_modify(call: ServiceCall) -> None: """Service for adding or removing an exposure to KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) group_address = call.data[KNX_ADDRESS] @@ -222,9 +220,9 @@ SERVICE_KNX_SEND_SCHEMA = vol.Any( ) -async def service_send_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_send_to_knx_bus(call: ServiceCall) -> None: """Service for sending an arbitrary KNX message to the KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) attr_address = call.data[KNX_ADDRESS] attr_payload = call.data[SERVICE_KNX_ATTR_PAYLOAD] @@ -270,9 +268,9 @@ SERVICE_KNX_READ_SCHEMA = vol.Schema( ) -async def service_read_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_read_to_knx_bus(call: ServiceCall) -> None: """Service for sending a GroupValueRead telegram to the KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) for address in call.data[KNX_ADDRESS]: telegram = Telegram( @@ -283,8 +281,8 @@ async def service_read_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> Non await knx_module.xknx.telegrams.put(telegram) -async def service_reload_integration(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_reload_integration(call: ServiceCall) -> None: """Reload the integration.""" - knx_module = get_knx_module(hass) - await hass.config_entries.async_reload(knx_module.entry.entry_id) - hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context) + knx_module = get_knx_module(call.hass) + await call.hass.config_entries.async_reload(knx_module.entry.entry_id) + call.hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context) diff --git a/homeassistant/components/knx/storage/config_store.py b/homeassistant/components/knx/storage/config_store.py index ce7a705e629..2899448a128 100644 --- a/homeassistant/components/knx/storage/config_store.py +++ b/homeassistant/components/knx/storage/config_store.py @@ -19,8 +19,8 @@ _LOGGER = logging.getLogger(__name__) STORAGE_VERSION: Final = 1 STORAGE_KEY: Final = f"{DOMAIN}/config_store.json" -KNXPlatformStoreModel = dict[str, dict[str, Any]] # unique_id: configuration -KNXEntityStoreModel = dict[ +type KNXPlatformStoreModel = dict[str, dict[str, Any]] # unique_id: configuration +type KNXEntityStoreModel = dict[ str, KNXPlatformStoreModel ] # platform: KNXPlatformStoreModel diff --git a/homeassistant/components/knx/storage/entity_store_validation.py b/homeassistant/components/knx/storage/entity_store_validation.py index e9997bd9f1a..9bad5297853 100644 --- a/homeassistant/components/knx/storage/entity_store_validation.py +++ b/homeassistant/components/knx/storage/entity_store_validation.py @@ -38,7 +38,10 @@ def parse_invalid(exc: vol.Invalid) -> _ErrorDescription: def validate_entity_data(entity_data: dict) -> dict: - """Validate entity data. Return validated data or raise EntityStoreValidationException.""" + """Validate entity data. + + Return validated data or raise EntityStoreValidationException. + """ try: # return so defaults are applied return ENTITY_STORE_DATA_SCHEMA(entity_data) # type: ignore[no-any-return] diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 8d8692f6b7a..08b921f316b 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -294,19 +294,24 @@ "name": "Connection type" }, "telegrams_incoming": { - "name": "Incoming telegrams" + "name": "Incoming telegrams", + "unit_of_measurement": "[%key:component::knx::entity::sensor::telegram_count::unit_of_measurement%]" }, "telegrams_incoming_error": { - "name": "Incoming telegram errors" + "name": "Incoming telegram errors", + "unit_of_measurement": "errors" }, "telegrams_outgoing": { - "name": "Outgoing telegrams" + "name": "Outgoing telegrams", + "unit_of_measurement": "[%key:component::knx::entity::sensor::telegram_count::unit_of_measurement%]" }, "telegrams_outgoing_error": { - "name": "Outgoing telegram errors" + "name": "Outgoing telegram errors", + "unit_of_measurement": "[%key:component::knx::entity::sensor::telegrams_incoming_error::unit_of_measurement%]" }, "telegram_count": { - "name": "Telegrams" + "name": "Telegrams", + "unit_of_measurement": "telegrams" } } }, diff --git a/homeassistant/components/knx/switch.py b/homeassistant/components/knx/switch.py index ebe930957d6..725468cd6a9 100644 --- a/homeassistant/components/knx/switch.py +++ b/homeassistant/components/knx/switch.py @@ -18,7 +18,6 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, @@ -31,14 +30,13 @@ from .const import ( CONF_INVERT, CONF_RESPOND_TO_READ, CONF_SYNC_STATE, - DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) -from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity +from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import SwitchSchema from .storage.const import ( - CONF_DEVICE_INFO, CONF_ENTITY, CONF_GA_PASSIVE, CONF_GA_STATE, @@ -53,7 +51,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch(es) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] platform = async_get_current_platform() knx_module.config_store.add_platform( platform=Platform.SWITCH, @@ -65,7 +63,7 @@ async def async_setup_entry( ) entities: list[KnxYamlEntity | KnxUiEntity] = [] - if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): + if yaml_platform_config := knx_module.config_yaml.get(Platform.SWITCH): entities.extend( KnxYamlSwitch(knx_module, entity_config) for entity_config in yaml_platform_config @@ -133,14 +131,17 @@ class KnxYamlSwitch(_KnxSwitch, KnxYamlEntity): class KnxUiSwitch(_KnxSwitch, KnxUiEntity): """Representation of a KNX switch configured from UI.""" - _attr_has_entity_name = True _device: XknxSwitch def __init__( self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] ) -> None: """Initialize KNX switch.""" - self._knx_module = knx_module + super().__init__( + knx_module=knx_module, + unique_id=unique_id, + entity_config=config[CONF_ENTITY], + ) self._device = XknxSwitch( knx_module.xknx, name=config[CONF_ENTITY][CONF_NAME], @@ -153,7 +154,3 @@ class KnxUiSwitch(_KnxSwitch, KnxUiEntity): sync_state=config[DOMAIN][CONF_SYNC_STATE], invert=config[DOMAIN][CONF_INVERT], ) - self._attr_entity_category = config[CONF_ENTITY][CONF_ENTITY_CATEGORY] - self._attr_unique_id = unique_id - if device_info := config[CONF_ENTITY].get(CONF_DEVICE_INFO): - self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) diff --git a/homeassistant/components/knx/telegrams.py b/homeassistant/components/knx/telegrams.py index a96d841a07d..dcd5f477679 100644 --- a/homeassistant/components/knx/telegrams.py +++ b/homeassistant/components/knx/telegrams.py @@ -9,7 +9,7 @@ from xknx import XKNX from xknx.dpt import DPTArray, DPTBase, DPTBinary from xknx.dpt.dpt import DPTComplexData, DPTEnumData from xknx.exceptions import XKNXException -from xknx.telegram import Telegram +from xknx.telegram import Telegram, TelegramDirection from xknx.telegram.apci import GroupValueResponse, GroupValueWrite from homeassistant.core import HomeAssistant @@ -75,6 +75,7 @@ class Telegrams: ) ) self.recent_telegrams: deque[TelegramDict] = deque(maxlen=log_size) + self.last_ga_telegrams: dict[str, TelegramDict] = {} async def load_history(self) -> None: """Load history from store.""" @@ -88,6 +89,9 @@ class Telegrams: if isinstance(telegram["payload"], list): telegram["payload"] = tuple(telegram["payload"]) # type: ignore[unreachable] self.recent_telegrams.extend(telegrams) + self.last_ga_telegrams = { + t["destination"]: t for t in telegrams if t["payload"] is not None + } async def save_history(self) -> None: """Save history to store.""" @@ -98,6 +102,9 @@ class Telegrams: """Handle incoming and outgoing telegrams from xknx.""" telegram_dict = self.telegram_to_dict(telegram) self.recent_telegrams.append(telegram_dict) + if telegram_dict["payload"] is not None: + # exclude GroupValueRead telegrams + self.last_ga_telegrams[telegram_dict["destination"]] = telegram_dict async_dispatcher_send(self.hass, SIGNAL_KNX_TELEGRAM, telegram, telegram_dict) def telegram_to_dict(self, telegram: Telegram) -> TelegramDict: @@ -119,6 +126,8 @@ class Telegrams: device := self.project.devices.get(f"{telegram.source_address}") ) is not None: src_name = f"{device['manufacturer_name']} {device['name']}" + elif telegram.direction is TelegramDirection.OUTGOING: + src_name = "Home Assistant" if isinstance(telegram.payload, (GroupValueWrite, GroupValueResponse)): payload_data = telegram.payload.value.value diff --git a/homeassistant/components/knx/text.py b/homeassistant/components/knx/text.py index 381cb95ad32..2256afadbd9 100644 --- a/homeassistant/components/knx/text.py +++ b/homeassistant/components/knx/text.py @@ -23,14 +23,8 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ( - CONF_RESPOND_TO_READ, - CONF_STATE_ADDRESS, - DATA_KNX_CONFIG, - DOMAIN, - KNX_ADDRESS, -) -from .knx_entity import KnxYamlEntity +from .const import CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, KNX_ADDRESS, KNX_MODULE_KEY +from .entity import KnxYamlEntity async def async_setup_entry( @@ -39,8 +33,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TEXT] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.TEXT] async_add_entities(KNXText(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index b4e562a8869..1e82c324502 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -27,11 +27,10 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) -from .knx_entity import KnxYamlEntity +from .entity import KnxYamlEntity async def async_setup_entry( @@ -40,8 +39,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TIME] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.TIME] async_add_entities( KNXTimeEntity(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/weather.py b/homeassistant/components/knx/weather.py index 99f4be962fe..a1e5c0efe48 100644 --- a/homeassistant/components/knx/weather.py +++ b/homeassistant/components/knx/weather.py @@ -20,8 +20,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxYamlEntity +from .const import KNX_MODULE_KEY +from .entity import KnxYamlEntity from .schema import WeatherSchema @@ -31,8 +31,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch(es) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.WEATHER] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.WEATHER] async_add_entities( KNXWeather(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/websocket.py b/homeassistant/components/knx/websocket.py index 4af3012741a..9ba3e0ccff6 100644 --- a/homeassistant/components/knx/websocket.py +++ b/homeassistant/components/knx/websocket.py @@ -2,7 +2,10 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Final +import asyncio +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import TYPE_CHECKING, Any, Final, overload import knx_frontend as knx_panel import voluptuous as vol @@ -18,7 +21,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import UNDEFINED from homeassistant.util.ulid import ulid_now -from .const import DOMAIN +from .const import DOMAIN, KNX_MODULE_KEY from .storage.config_store import ConfigStoreException from .storage.const import CONF_DATA from .storage.entity_store_schema import ( @@ -35,7 +38,6 @@ from .telegrams import SIGNAL_KNX_TELEGRAM, TelegramDict if TYPE_CHECKING: from . import KNXModule - URL_BASE: Final = "/knx_static" @@ -45,6 +47,7 @@ async def register_panel(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, ws_project_file_process) websocket_api.async_register_command(hass, ws_project_file_remove) websocket_api.async_register_command(hass, ws_group_monitor_info) + websocket_api.async_register_command(hass, ws_group_telegrams) websocket_api.async_register_command(hass, ws_subscribe_telegram) websocket_api.async_register_command(hass, ws_get_knx_project) websocket_api.async_register_command(hass, ws_validate_entity) @@ -77,21 +80,92 @@ async def register_panel(hass: HomeAssistant) -> None: ) +type KnxWebSocketCommandHandler = Callable[ + [HomeAssistant, KNXModule, websocket_api.ActiveConnection, dict[str, Any]], None +] +type KnxAsyncWebSocketCommandHandler = Callable[ + [HomeAssistant, KNXModule, websocket_api.ActiveConnection, dict[str, Any]], + Awaitable[None], +] + + +@overload +def provide_knx( + func: KnxAsyncWebSocketCommandHandler, +) -> websocket_api.const.AsyncWebSocketCommandHandler: ... +@overload +def provide_knx( + func: KnxWebSocketCommandHandler, +) -> websocket_api.const.WebSocketCommandHandler: ... + + +def provide_knx( + func: KnxAsyncWebSocketCommandHandler | KnxWebSocketCommandHandler, +) -> ( + websocket_api.const.AsyncWebSocketCommandHandler + | websocket_api.const.WebSocketCommandHandler +): + """Websocket decorator to provide a KNXModule instance.""" + + def _send_not_loaded_error( + connection: websocket_api.ActiveConnection, msg_id: int + ) -> None: + connection.send_error( + msg_id, + websocket_api.const.ERR_HOME_ASSISTANT_ERROR, + "KNX integration not loaded.", + ) + + if asyncio.iscoroutinefunction(func): + + @wraps(func) + async def with_knx( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Add KNX Module to call function.""" + try: + knx = hass.data[KNX_MODULE_KEY] + except KeyError: + _send_not_loaded_error(connection, msg["id"]) + return + await func(hass, knx, connection, msg) + + else: + + @wraps(func) + def with_knx( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Add KNX Module to call function.""" + try: + knx = hass.data[KNX_MODULE_KEY] + except KeyError: + _send_not_loaded_error(connection, msg["id"]) + return + func(hass, knx, connection, msg) + + return with_knx + + @websocket_api.require_admin @websocket_api.websocket_command( { vol.Required("type"): "knx/info", } ) +@provide_knx @callback def ws_info( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] - _project_info = None if project_info := knx.project.info: _project_info = { @@ -119,13 +193,14 @@ def ws_info( } ) @websocket_api.async_response +@provide_knx async def ws_get_knx_project( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get KNX project.""" - knx: KNXModule = hass.data[DOMAIN] knxproject = await knx.project.get_knxproject() connection.send_result( msg["id"], @@ -145,13 +220,14 @@ async def ws_get_knx_project( } ) @websocket_api.async_response +@provide_knx async def ws_project_file_process( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] try: await knx.project.process_project_file( xknx=knx.xknx, @@ -175,13 +251,14 @@ async def ws_project_file_process( } ) @websocket_api.async_response +@provide_knx async def ws_project_file_remove( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] await knx.project.remove_project_file() connection.send_result(msg["id"]) @@ -192,14 +269,15 @@ async def ws_project_file_remove( vol.Required("type"): "knx/group_monitor_info", } ) +@provide_knx @callback def ws_group_monitor_info( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command of group monitor.""" - knx: KNXModule = hass.data[DOMAIN] recent_telegrams = [*knx.telegrams.recent_telegrams] connection.send_result( msg["id"], @@ -210,6 +288,27 @@ def ws_group_monitor_info( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "knx/group_telegrams", + } +) +@provide_knx +@callback +def ws_group_telegrams( + hass: HomeAssistant, + knx: KNXModule, + connection: websocket_api.ActiveConnection, + msg: dict, +) -> None: + """Handle get group telegrams command.""" + connection.send_result( + msg["id"], + knx.telegrams.last_ga_telegrams, + ) + + @websocket_api.require_admin @websocket_api.websocket_command( { @@ -272,8 +371,10 @@ def ws_validate_entity( } ) @websocket_api.async_response +@provide_knx async def ws_create_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: @@ -283,7 +384,6 @@ async def ws_create_entity( except EntityStoreValidationException as exc: connection.send_result(msg["id"], exc.validation_error) return - knx: KNXModule = hass.data[DOMAIN] try: entity_id = await knx.config_store.create_entity( # use validation result so defaults are applied @@ -308,8 +408,10 @@ async def ws_create_entity( } ) @websocket_api.async_response +@provide_knx async def ws_update_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: @@ -319,7 +421,6 @@ async def ws_update_entity( except EntityStoreValidationException as exc: connection.send_result(msg["id"], exc.validation_error) return - knx: KNXModule = hass.data[DOMAIN] try: await knx.config_store.update_entity( validated_data[CONF_PLATFORM], @@ -344,13 +445,14 @@ async def ws_update_entity( } ) @websocket_api.async_response +@provide_knx async def ws_delete_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Delete entity from entity store and remove it.""" - knx: KNXModule = hass.data[DOMAIN] try: await knx.config_store.delete_entity(msg[CONF_ENTITY_ID]) except ConfigStoreException as err: @@ -367,14 +469,15 @@ async def ws_delete_entity( vol.Required("type"): "knx/get_entity_entries", } ) +@provide_knx @callback def ws_get_entity_entries( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Get entities configured from entity store.""" - knx: KNXModule = hass.data[DOMAIN] entity_entries = [ entry.extended_dict for entry in knx.config_store.get_entity_entries() ] @@ -388,14 +491,15 @@ def ws_get_entity_entries( vol.Required(CONF_ENTITY_ID): str, } ) +@provide_knx @callback def ws_get_entity_config( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Get entity configuration from entity store.""" - knx: KNXModule = hass.data[DOMAIN] try: config_info = knx.config_store.get_entity_config(msg[CONF_ENTITY_ID]) except ConfigStoreException as err: @@ -414,14 +518,15 @@ def ws_get_entity_config( vol.Optional("area_id"): str, } ) +@provide_knx @callback def ws_create_device( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Create a new KNX device.""" - knx: KNXModule = hass.data[DOMAIN] identifier = f"knx_vdev_{ulid_now()}" device_registry = dr.async_get(hass) _device = device_registry.async_get_or_create( diff --git a/homeassistant/components/kodi/config_flow.py b/homeassistant/components/kodi/config_flow.py index c740aeb6057..f87b94b23fd 100644 --- a/homeassistant/components/kodi/config_flow.py +++ b/homeassistant/components/kodi/config_flow.py @@ -140,9 +140,12 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() - async def async_step_discovery_confirm(self, user_input=None): + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is None: + assert self._name is not None return self.async_show_form( step_id="discovery_confirm", description_placeholders={"name": self._name}, @@ -178,7 +181,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_user_form(errors) - async def async_step_credentials(self, user_input=None): + async def async_step_credentials( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle username and password input.""" errors = {} @@ -203,7 +208,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_credentials_form(errors) - async def async_step_ws_port(self, user_input=None): + async def async_step_ws_port( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle websocket port of discovered node.""" errors = {} @@ -226,12 +233,12 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_ws_port_form(errors) - async def async_step_import(self, data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from YAML.""" reason = None try: - await validate_http(self.hass, data) - await validate_ws(self.hass, data) + await validate_http(self.hass, import_data) + await validate_ws(self.hass, import_data) except InvalidAuth: _LOGGER.exception("Invalid Kodi credentials") reason = "invalid_auth" @@ -242,12 +249,16 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") reason = "unknown" else: - return self.async_create_entry(title=data[CONF_NAME], data=data) + return self.async_create_entry( + title=import_data[CONF_NAME], data=import_data + ) return self.async_abort(reason=reason) @callback - def _show_credentials_form(self, errors=None): + def _show_credentials_form( + self, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: schema = vol.Schema( { vol.Optional( @@ -260,7 +271,7 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="credentials", data_schema=schema, errors=errors or {} + step_id="credentials", data_schema=schema, errors=errors ) @callback @@ -302,7 +313,7 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): ) @callback - def _get_data(self): + def _get_data(self) -> dict[str, Any]: return { CONF_NAME: self._name, CONF_HOST: self._host, diff --git a/homeassistant/components/kodi/icons.json b/homeassistant/components/kodi/icons.json index 07bd246e92d..d9c32630961 100644 --- a/homeassistant/components/kodi/icons.json +++ b/homeassistant/components/kodi/icons.json @@ -1,6 +1,10 @@ { "services": { - "add_to_playlist": "mdi:playlist-plus", - "call_method": "mdi:console" + "add_to_playlist": { + "service": "mdi:playlist-plus" + }, + "call_method": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 6c9a542c53b..65dd7cf39b3 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -177,7 +177,9 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 # class variable to store/share discovered host information - discovered_hosts: dict[str, dict[str, Any]] = {} + DISCOVERED_HOSTS: dict[str, dict[str, Any]] = {} + + unique_id: str def __init__(self) -> None: """Initialize the Konnected flow.""" @@ -202,24 +204,24 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): random.choices(f"{string.ascii_uppercase}{string.digits}", k=20) ) - async def async_step_import(self, device_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a configuration.yaml config. This flow is triggered by `async_setup` for configured panels. """ - _LOGGER.debug(device_config) + _LOGGER.debug(import_data) # save the data and confirm connection via user step - await self.async_set_unique_id(device_config["id"]) - self.options = device_config[CONF_DEFAULT_OPTIONS] + await self.async_set_unique_id(import_data["id"]) + self.options = import_data[CONF_DEFAULT_OPTIONS] # config schema ensures we have port if we have host - if device_config.get(CONF_HOST): + if import_data.get(CONF_HOST): # automatically connect if we have host info return await self.async_step_user( user_input={ - CONF_HOST: device_config[CONF_HOST], - CONF_PORT: device_config[CONF_PORT], + CONF_HOST: import_data[CONF_HOST], + CONF_PORT: import_data[CONF_PORT], } ) @@ -227,7 +229,9 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() return await self.async_step_import_confirm() - async def async_step_import_confirm(self, user_input=None): + async def async_step_import_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the user wants to import the config entry.""" if user_input is None: return self.async_show_form( @@ -236,13 +240,13 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): ) # if we have ssdp discovered applicable host info use it - if KonnectedFlowHandler.discovered_hosts.get(self.unique_id): + if KonnectedFlowHandler.DISCOVERED_HOSTS.get(self.unique_id): return await self.async_step_user( user_input={ - CONF_HOST: KonnectedFlowHandler.discovered_hosts[self.unique_id][ + CONF_HOST: KonnectedFlowHandler.DISCOVERED_HOSTS[self.unique_id][ CONF_HOST ], - CONF_PORT: KonnectedFlowHandler.discovered_hosts[self.unique_id][ + CONF_PORT: KonnectedFlowHandler.DISCOVERED_HOSTS[self.unique_id][ CONF_PORT ], } @@ -295,7 +299,7 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): self.data[CONF_ID] = status.get("chipId", status["mac"].replace(":", "")) self.data[CONF_MODEL] = status.get("model", KONN_MODEL) - KonnectedFlowHandler.discovered_hosts[self.data[CONF_ID]] = { + KonnectedFlowHandler.DISCOVERED_HOSTS[self.data[CONF_ID]] = { CONF_HOST: self.data[CONF_HOST], CONF_PORT: self.data[CONF_PORT], } @@ -328,7 +332,7 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): self.data[CONF_MODEL] = status.get("model", KONN_MODEL) # save off our discovered host info - KonnectedFlowHandler.discovered_hosts[self.data[CONF_ID]] = { + KonnectedFlowHandler.DISCOVERED_HOSTS[self.data[CONF_ID]] = { CONF_HOST: self.data[CONF_HOST], CONF_PORT: self.data[CONF_PORT], } @@ -349,7 +353,9 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Attempt to link with the Konnected panel. Given a configured host, will ask the user to confirm and finalize @@ -396,13 +402,14 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.entry = config_entry - self.model = self.entry.data[CONF_MODEL] - self.current_opt = self.entry.options or self.entry.data[CONF_DEFAULT_OPTIONS] + self.model = config_entry.data[CONF_MODEL] + self.current_opt = ( + config_entry.options or config_entry.data[CONF_DEFAULT_OPTIONS] + ) # as config proceeds we'll build up new options and then replace what's in the config entry - self.new_opt: dict[str, dict[str, Any]] = {CONF_IO: {}} - self.active_cfg = None + self.new_opt: dict[str, Any] = {CONF_IO: {}} + self.active_cfg: str | None = None self.io_cfg: dict[str, Any] = {} self.current_states: list[dict[str, Any]] = [] self.current_state = 1 @@ -419,13 +426,17 @@ class OptionsFlowHandler(OptionsFlow): {}, ) - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" return await self.async_step_options_io() - async def async_step_options_io(self, user_input=None): + async def async_step_options_io( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Configure legacy panel IO or first half of pro IO.""" - errors = {} + errors: dict[str, str] = {} current_io = self.current_opt.get(CONF_IO, {}) if user_input is not None: @@ -465,7 +476,7 @@ class OptionsFlowHandler(OptionsFlow): ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) @@ -501,16 +512,18 @@ class OptionsFlowHandler(OptionsFlow): ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) return self.async_abort(reason="not_konn_panel") - async def async_step_options_io_ext(self, user_input=None): + async def async_step_options_io_ext( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the extended IO for pro.""" - errors = {} + errors: dict[str, str] = {} current_io = self.current_opt.get(CONF_IO, {}) if user_input is not None: @@ -559,17 +572,19 @@ class OptionsFlowHandler(OptionsFlow): ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) return self.async_abort(reason="not_konn_panel") - async def async_step_options_binary(self, user_input=None): + async def async_step_options_binary( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the IO options for binary sensors.""" - errors = {} - if user_input is not None: + errors: dict[str, str] = {} + if user_input is not None and self.active_cfg is not None: zone = {"zone": self.active_cfg} zone.update(user_input) self.new_opt[CONF_BINARY_SENSORS] = [ @@ -602,7 +617,7 @@ class OptionsFlowHandler(OptionsFlow): description_placeholders={ "zone": f"Zone {self.active_cfg}" if len(self.active_cfg) < 3 - else self.active_cfg.upper + else self.active_cfg.upper() }, errors=errors, ) @@ -635,17 +650,19 @@ class OptionsFlowHandler(OptionsFlow): description_placeholders={ "zone": f"Zone {self.active_cfg}" if len(self.active_cfg) < 3 - else self.active_cfg.upper + else self.active_cfg.upper() }, errors=errors, ) return await self.async_step_options_digital() - async def async_step_options_digital(self, user_input=None): + async def async_step_options_digital( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the IO options for digital sensors.""" - errors = {} - if user_input is not None: + errors: dict[str, str] = {} + if user_input is not None and self.active_cfg is not None: zone = {"zone": self.active_cfg} zone.update(user_input) self.new_opt[CONF_SENSORS] = [*self.new_opt.get(CONF_SENSORS, []), zone] @@ -710,10 +727,12 @@ class OptionsFlowHandler(OptionsFlow): return await self.async_step_options_switch() - async def async_step_options_switch(self, user_input=None): + async def async_step_options_switch( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the IO options for switches.""" - errors = {} - if user_input is not None: + errors: dict[str, str] = {} + if user_input is not None and self.active_cfg is not None: zone = {"zone": self.active_cfg} zone.update(user_input) del zone[CONF_MORE_STATES] @@ -825,7 +844,9 @@ class OptionsFlowHandler(OptionsFlow): return await self.async_step_options_misc() - async def async_step_options_misc(self, user_input=None): + async def async_step_options_misc( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the LED behavior.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/konnected/panel.py b/homeassistant/components/konnected/panel.py index 605b27f7547..e2dfc6be06a 100644 --- a/homeassistant/components/konnected/panel.py +++ b/homeassistant/components/konnected/panel.py @@ -123,7 +123,7 @@ class AlarmPanel: self.api_version = KONN_API_VERSIONS.get( self.status.get("model", KONN_MODEL), KONN_API_VERSIONS[KONN_MODEL] ) - _LOGGER.info( + _LOGGER.debug( "Connected to new %s device", self.status.get("model", "Konnected") ) _LOGGER.debug(self.status) @@ -145,7 +145,7 @@ class AlarmPanel: self.connect_attempts = 0 self.connected = True - _LOGGER.info( + _LOGGER.debug( ( "Set up Konnected device %s. Open http://%s:%s in a " "web browser to view device status" @@ -380,7 +380,7 @@ class AlarmPanel: self.async_desired_settings_payload() != self.async_current_settings_payload() ): - _LOGGER.info("Pushing settings to device %s", self.device_id) + _LOGGER.debug("Pushing settings to device %s", self.device_id) await self.client.put_settings(**self.async_desired_settings_payload()) diff --git a/homeassistant/components/kostal_plenticore/manifest.json b/homeassistant/components/kostal_plenticore/manifest.json index d65368e7ee4..09352fa7a80 100644 --- a/homeassistant/components/kostal_plenticore/manifest.json +++ b/homeassistant/components/kostal_plenticore/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/kostal_plenticore", "iot_class": "local_polling", "loggers": ["kostal"], - "requirements": ["pykoplenti==1.2.2"] + "requirements": ["pykoplenti==1.3.0"] } diff --git a/homeassistant/components/kostal_plenticore/sensor.py b/homeassistant/components/kostal_plenticore/sensor.py index fbbfb03fb3e..67de34f2fce 100644 --- a/homeassistant/components/kostal_plenticore/sensor.py +++ b/homeassistant/components/kostal_plenticore/sensor.py @@ -17,6 +17,7 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, + EntityCategory, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -747,6 +748,15 @@ SENSOR_PROCESS_DATA = [ state_class=SensorStateClass.TOTAL_INCREASING, formatter="format_energy", ), + PlenticoreSensorEntityDescription( + module_id="scb:event", + key="Event:ActiveErrorCnt", + name="Active Alarms", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + icon="mdi:alert", + formatter="format_round", + ), PlenticoreSensorEntityDescription( module_id="_virt_", key="pv_P", diff --git a/homeassistant/components/kraken/__init__.py b/homeassistant/components/kraken/__init__.py index 692f602460b..9a90e77f2b6 100644 --- a/homeassistant/components/kraken/__init__.py +++ b/homeassistant/components/kraken/__init__.py @@ -77,7 +77,7 @@ class KrakenData: return await self._hass.async_add_executor_job(self._get_kraken_data) except pykrakenapi.pykrakenapi.KrakenAPIError as error: if "Unknown asset pair" in str(error): - _LOGGER.info( + _LOGGER.warning( "Kraken.com reported an unknown asset pair. Refreshing list of" " tradable asset pairs" ) diff --git a/homeassistant/components/kraken/config_flow.py b/homeassistant/components/kraken/config_flow.py index 67778515273..54a817f0a50 100644 --- a/homeassistant/components/kraken/config_flow.py +++ b/homeassistant/components/kraken/config_flow.py @@ -33,7 +33,7 @@ class KrakenConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> KrakenOptionsFlowHandler: """Get the options flow for this handler.""" - return KrakenOptionsFlowHandler(config_entry) + return KrakenOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,10 +53,6 @@ class KrakenConfigFlow(ConfigFlow, domain=DOMAIN): class KrakenOptionsFlowHandler(OptionsFlow): """Handle Kraken client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Kraken options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/kraken/manifest.json b/homeassistant/components/kraken/manifest.json index 98347f7681b..fed16a673b5 100644 --- a/homeassistant/components/kraken/manifest.json +++ b/homeassistant/components/kraken/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/kraken", "iot_class": "cloud_polling", "loggers": ["krakenex", "pykrakenapi"], - "requirements": ["krakenex==2.1.0", "pykrakenapi==0.1.8"] + "requirements": ["krakenex==2.2.2", "pykrakenapi==0.1.8"] } diff --git a/homeassistant/components/kulersky/light.py b/homeassistant/components/kulersky/light.py index cb98e52250f..552507ef50b 100644 --- a/homeassistant/components/kulersky/light.py +++ b/homeassistant/components/kulersky/light.py @@ -137,7 +137,7 @@ class KulerskyLight(LightEntity): self._attr_available = False return if self._attr_available is False: - _LOGGER.info("Reconnected to %s", self._light.address) + _LOGGER.warning("Reconnected to %s", self._light.address) self._attr_available = True brightness = max(rgbw) diff --git a/homeassistant/components/kwb/manifest.json b/homeassistant/components/kwb/manifest.json index 36d3a0af2d7..6a11e08555f 100644 --- a/homeassistant/components/kwb/manifest.json +++ b/homeassistant/components/kwb/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kwb", "iot_class": "local_polling", "loggers": ["pykwb"], + "quality_scale": "legacy", "requirements": ["pykwb==0.0.8"] } diff --git a/homeassistant/components/lacrosse/manifest.json b/homeassistant/components/lacrosse/manifest.json index 0c7cf8b6dc6..b4023b533ca 100644 --- a/homeassistant/components/lacrosse/manifest.json +++ b/homeassistant/components/lacrosse/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/lacrosse", "iot_class": "local_polling", "loggers": ["pylacrosse"], + "quality_scale": "legacy", "requirements": ["pylacrosse==0.4"] } diff --git a/homeassistant/components/lacrosse_view/config_flow.py b/homeassistant/components/lacrosse_view/config_flow.py index 5a3fe4a03ca..ecf30f9a197 100644 --- a/homeassistant/components/lacrosse_view/config_flow.py +++ b/homeassistant/components/lacrosse_view/config_flow.py @@ -9,7 +9,7 @@ from typing import Any from lacrosse_view import LaCrosse, Location, LoginError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -54,7 +54,6 @@ class LaCrosseViewConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self.data: dict[str, str] = {} self.locations: list[Location] = [] - self._reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,12 +82,10 @@ class LaCrosseViewConfigFlow(ConfigFlow, domain=DOMAIN): self.locations = info # Check if we are reauthenticating - if self._reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | self.data + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=self.data ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") _LOGGER.debug("Moving on to location step") return await self.async_step_location() @@ -139,9 +136,6 @@ class LaCrosseViewConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Reauth in case of a password change or other error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/lacrosse_view/manifest.json b/homeassistant/components/lacrosse_view/manifest.json index 1cf8794237d..453a0855229 100644 --- a/homeassistant/components/lacrosse_view/manifest.json +++ b/homeassistant/components/lacrosse_view/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lacrosse_view", "iot_class": "cloud_polling", "loggers": ["lacrosse_view"], - "requirements": ["lacrosse-view==1.0.2"] + "requirements": ["lacrosse-view==1.0.3"] } diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index dfcaa54047d..d20616e1940 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -2,15 +2,15 @@ import logging -from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.const import BT_MODEL_PREFIXES, FirmwareType -from lmcloud.exceptions import AuthFail, RequestNotSuccessful from packaging import version +from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.clients.cloud import LaMarzoccoCloudClient +from pylamarzocco.clients.local import LaMarzoccoLocalClient +from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType +from pylamarzocco.devices.machine import LaMarzoccoMachine +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -23,10 +23,16 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import CONF_USE_BLUETOOTH, DOMAIN -from .coordinator import LaMarzoccoUpdateCoordinator +from .coordinator import ( + LaMarzoccoConfigEntry, + LaMarzoccoConfigUpdateCoordinator, + LaMarzoccoFirmwareUpdateCoordinator, + LaMarzoccoRuntimeData, + LaMarzoccoStatisticsUpdateCoordinator, +) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -41,8 +47,6 @@ PLATFORMS = [ _LOGGER = logging.getLogger(__name__) -type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoUpdateCoordinator] - async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -> bool: """Set up La Marzocco as config entry.""" @@ -50,9 +54,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - assert entry.unique_id serial = entry.unique_id + client = async_create_clientsession(hass) cloud_client = LaMarzoccoCloudClient( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], + client=client, ) # initialize local API @@ -62,7 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - local_client = LaMarzoccoLocalClient( host=host, local_bearer=entry.data[CONF_TOKEN], - client=get_async_client(hass), + client=client, ) # initialize Bluetooth @@ -100,18 +106,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - address_or_ble_device=entry.data[CONF_MAC], ) - coordinator = LaMarzoccoUpdateCoordinator( - hass=hass, - local_client=local_client, + device = LaMarzoccoMachine( + model=entry.data[CONF_MODEL], + serial_number=entry.unique_id, + name=entry.data[CONF_NAME], cloud_client=cloud_client, + local_client=local_client, bluetooth_client=bluetooth_client, ) - await coordinator.async_setup() - await coordinator.async_config_entry_first_refresh() - entry.runtime_data = coordinator + coordinators = LaMarzoccoRuntimeData( + LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client), + LaMarzoccoFirmwareUpdateCoordinator(hass, entry, device), + LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device), + ) - gateway_version = coordinator.device.firmware[FirmwareType.GATEWAY].current_version + # API does not like concurrent requests, so no asyncio.gather here + await coordinators.config_coordinator.async_config_entry_first_refresh() + await coordinators.firmware_coordinator.async_config_entry_first_refresh() + await coordinators.statistics_coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinators + + gateway_version = device.firmware[FirmwareType.GATEWAY].current_version if version.parse(gateway_version) < version.parse("v3.4-rc5"): # incompatible gateway firmware, create an issue ir.async_create_issue( @@ -126,7 +143,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + async def update_listener( + hass: HomeAssistant, entry: LaMarzoccoConfigEntry + ) -> None: await hass.config_entries.async_reload(entry.entry_id) entry.async_on_unload(entry.add_update_listener(update_listener)) @@ -134,12 +153,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, entry: LaMarzoccoConfigEntry +) -> bool: """Migrate config entry.""" if entry.version > 2: # guard against downgrade from a future version diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 81ac3672a0f..3d11992e7c1 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -14,9 +14,12 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoBinarySensorEntityDescription( @@ -61,7 +64,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary sensor entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoBinarySensorEntity(coordinator, description) diff --git a/homeassistant/components/lamarzocco/button.py b/homeassistant/components/lamarzocco/button.py index 7b38c9fbf72..22e92f656ff 100644 --- a/homeassistant/components/lamarzocco/button.py +++ b/homeassistant/components/lamarzocco/button.py @@ -1,18 +1,24 @@ """Button platform for La Marzocco espresso machines.""" +import asyncio from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.exceptions import RequestNotSuccessful from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 +BACKFLUSH_ENABLED_DURATION = 15 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoButtonEntityDescription( @@ -21,14 +27,25 @@ class LaMarzoccoButtonEntityDescription( ): """Description of a La Marzocco button.""" - press_fn: Callable[[LaMarzoccoMachine], Coroutine[Any, Any, None]] + press_fn: Callable[[LaMarzoccoUpdateCoordinator], Coroutine[Any, Any, None]] + + +async def async_backflush_and_update(coordinator: LaMarzoccoUpdateCoordinator) -> None: + """Press backflush button.""" + await coordinator.device.start_backflush() + # lib will set state optimistically + coordinator.async_set_updated_data(None) + # backflush is enabled for 15 seconds + # then turns off automatically + await asyncio.sleep(BACKFLUSH_ENABLED_DURATION + 1) + await coordinator.async_request_refresh() ENTITIES: tuple[LaMarzoccoButtonEntityDescription, ...] = ( LaMarzoccoButtonEntityDescription( key="start_backflush", translation_key="start_backflush", - press_fn=lambda machine: machine.start_backflush(), + press_fn=async_backflush_and_update, ), ) @@ -40,7 +57,7 @@ async def async_setup_entry( ) -> None: """Set up button entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoButtonEntity(coordinator, description) for description in ENTITIES @@ -55,5 +72,13 @@ class LaMarzoccoButtonEntity(LaMarzoccoEntity, ButtonEntity): async def async_press(self) -> None: """Press button.""" - await self.entity_description.press_fn(self.coordinator.device) - await self.coordinator.async_request_refresh() + try: + await self.entity_description.press_fn(self.coordinator) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="button_error", + translation_placeholders={ + "key": self.entity_description.key, + }, + ) from exc diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 8b3240ff7a1..1dcc7c324ac 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -3,17 +3,19 @@ from collections.abc import Iterator from datetime import datetime, timedelta -from lmcloud.models import LaMarzoccoWakeUpSleepEntry +from pylamarzocco.models import LaMarzoccoWakeUpSleepEntry from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import LaMarzoccoConfigEntry -from .coordinator import LaMarzoccoUpdateCoordinator +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoBaseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + CALENDAR_KEY = "auto_on_off_schedule" DAY_OF_WEEK = [ @@ -34,7 +36,7 @@ async def async_setup_entry( ) -> None: """Set up switch entities and services.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoCalendarEntity(coordinator, CALENDAR_KEY, wake_up_sleep_entry) for wake_up_sleep_entry in coordinator.device.config.wake_up_sleep_entries.values() diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index b4fed615733..5d927c6cc79 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -1,24 +1,32 @@ """Config flow for La Marzocco integration.""" +from __future__ import annotations + from collections.abc import Mapping import logging from typing import Any -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.models import LaMarzoccoDeviceInfo +from aiohttp import ClientSession +from pylamarzocco.clients.cloud import LaMarzoccoCloudClient +from pylamarzocco.clients.local import LaMarzoccoLocalClient +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoDeviceInfo import voluptuous as vol -from homeassistant.components.bluetooth import BluetoothServiceInfo +from homeassistant.components.bluetooth import ( + BluetoothServiceInfo, + async_discovered_service_info, +) +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.config_entries import ( - ConfigEntry, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( + CONF_ADDRESS, CONF_HOST, CONF_MAC, CONF_MODEL, @@ -29,7 +37,7 @@ from homeassistant.const import ( ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.aiohttp_client import async_create_clientsession from homeassistant.helpers.selector import ( SelectOptionDict, SelectSelector, @@ -38,6 +46,7 @@ from homeassistant.helpers.selector import ( ) from .const import CONF_USE_BLUETOOTH, DOMAIN +from .coordinator import LaMarzoccoConfigEntry CONF_MACHINE = "machine" @@ -49,10 +58,10 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 + _client: ClientSession + def __init__(self) -> None: """Initialize the config flow.""" - - self.reauth_entry: ConfigEntry | None = None self._config: dict[str, Any] = {} self._fleet: dict[str, LaMarzoccoDeviceInfo] = {} self._discovered: dict[str, str] = {} @@ -66,17 +75,19 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: data: dict[str, Any] = {} - if self.reauth_entry: - data = dict(self.reauth_entry.data) + if self.source == SOURCE_REAUTH: + data = dict(self._get_reauth_entry().data) data = { **data, **user_input, **self._discovered, } + self._client = async_create_clientsession(self.hass) cloud_client = LaMarzoccoCloudClient( username=data[CONF_USERNAME], password=data[CONF_PASSWORD], + client=self._client, ) try: self._fleet = await cloud_client.get_customer_fleet() @@ -91,19 +102,24 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "no_machines" if not errors: - if self.reauth_entry: - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=data + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - await self.hass.config_entries.async_reload( - self.reauth_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") if self._discovered: if self._discovered[CONF_MACHINE] not in self._fleet: errors["base"] = "machine_not_found" else: self._config = data + # if DHCP discovery was used, auto fill machine selection + if CONF_HOST in self._discovered: + return await self.async_step_machine_selection( + user_input={ + CONF_HOST: self._discovered[CONF_HOST], + CONF_MACHINE: self._discovered[CONF_MACHINE], + } + ) + # if Bluetooth discovery was used, only select host return self.async_show_form( step_id="machine_selection", data_schema=vol.Schema( @@ -115,6 +131,12 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): self._config = data return await self.async_step_machine_selection() + placeholders: dict[str, str] | None = None + if self._discovered: + self.context["title_placeholders"] = placeholders = { + CONF_NAME: self._discovered[CONF_MACHINE] + } + return self.async_show_form( step_id="user", data_schema=vol.Schema( @@ -124,6 +146,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): } ), errors=errors, + description_placeholders=placeholders, ) async def async_step_machine_selection( @@ -134,8 +157,9 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: if not self._discovered: serial_number = user_input[CONF_MACHINE] - await self.async_set_unique_id(serial_number) - self._abort_if_unique_id_configured() + if self.source != SOURCE_RECONFIGURE: + await self.async_set_unique_id(serial_number) + self._abort_if_unique_id_configured() else: serial_number = self._discovered[CONF_MACHINE] @@ -144,7 +168,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): # validate local connection if host is provided if user_input.get(CONF_HOST): if not await LaMarzoccoLocalClient.validate_connection( - client=get_async_client(self.hass), + client=self._client, host=user_input[CONF_HOST], token=selected_device.communication_key, ): @@ -153,6 +177,13 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): self._config[CONF_HOST] = user_input[CONF_HOST] if not errors: + if self.source == SOURCE_RECONFIGURE: + for service_info in async_discovered_service_info(self.hass): + self._discovered[service_info.name] = service_info.address + + if self._discovered: + return await self.async_step_bluetooth_selection() + return self.async_create_entry( title=selected_device.name, data={ @@ -191,6 +222,42 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_bluetooth_selection( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle Bluetooth device selection.""" + + if user_input is not None: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data={ + **self._config, + CONF_MAC: user_input[CONF_MAC], + }, + ) + + bt_options = [ + SelectOptionDict( + value=device_mac, + label=f"{device_name} ({device_mac})", + ) + for device_name, device_mac in self._discovered.items() + ] + + return self.async_show_form( + step_id="bluetooth_selection", + data_schema=vol.Schema( + { + vol.Required(CONF_MAC): SelectSelector( + SelectSelectorConfig( + options=bt_options, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + }, + ), + ) + async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfo ) -> ConfigFlowResult: @@ -215,13 +282,38 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle discovery via dhcp.""" + + serial = discovery_info.hostname.upper() + + await self.async_set_unique_id(serial) + self._abort_if_unique_id_configured( + updates={ + CONF_HOST: discovery_info.ip, + CONF_ADDRESS: discovery_info.macaddress, + } + ) + self._async_abort_entries_match({CONF_ADDRESS: discovery_info.macaddress}) + + _LOGGER.debug( + "Discovered La Marzocco machine %s through DHCP at address %s", + discovery_info.hostname, + discovery_info.ip, + ) + + self._discovered[CONF_MACHINE] = serial + self._discovered[CONF_HOST] = discovery_info.ip + self._discovered[CONF_ADDRESS] = discovery_info.macaddress + + return await self.async_step_user() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -240,16 +332,40 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user(user_input) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Perform reconfiguration of the config entry.""" + if not user_input: + reconfigure_entry = self._get_reconfigure_entry() + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema( + { + vol.Required( + CONF_USERNAME, + default=reconfigure_entry.data[CONF_USERNAME], + ): str, + vol.Required( + CONF_PASSWORD, + default=reconfigure_entry.data[CONF_PASSWORD], + ): str, + } + ), + ) + + return await self.async_step_user(user_input) + @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, - ) -> OptionsFlow: + config_entry: LaMarzoccoConfigEntry, + ) -> LmOptionsFlowHandler: """Create the options flow.""" - return LmOptionsFlowHandler(config_entry) + return LmOptionsFlowHandler() -class LmOptionsFlowHandler(OptionsFlowWithConfigEntry): +class LmOptionsFlowHandler(OptionsFlow): """Handles options flow for the component.""" async def async_step_init( @@ -263,7 +379,7 @@ class LmOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_USE_BLUETOOTH, - default=self.options.get(CONF_USE_BLUETOOTH, True), + default=self.config_entry.options.get(CONF_USE_BLUETOOTH, True), ): cv.boolean, } ) diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 2c78a925ca4..aca84fc4660 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -1,19 +1,19 @@ """Coordinator for La Marzocco API.""" -from collections.abc import Callable, Coroutine +from __future__ import annotations + +from abc import abstractmethod +from dataclasses import dataclass from datetime import timedelta import logging -from time import time from typing import Any -from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.clients.local import LaMarzoccoLocalClient +from pylamarzocco.devices.machine import LaMarzoccoMachine +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -21,43 +21,72 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN SCAN_INTERVAL = timedelta(seconds=30) -FIRMWARE_UPDATE_INTERVAL = 3600 -STATISTICS_UPDATE_INTERVAL = 300 - +FIRMWARE_UPDATE_INTERVAL = timedelta(hours=1) +STATISTICS_UPDATE_INTERVAL = timedelta(minutes=5) _LOGGER = logging.getLogger(__name__) -class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): - """Class to handle fetching data from the La Marzocco API centrally.""" +@dataclass +class LaMarzoccoRuntimeData: + """Runtime data for La Marzocco.""" - config_entry: ConfigEntry + config_coordinator: LaMarzoccoConfigUpdateCoordinator + firmware_coordinator: LaMarzoccoFirmwareUpdateCoordinator + statistics_coordinator: LaMarzoccoStatisticsUpdateCoordinator + + +type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoRuntimeData] + + +class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): + """Base class for La Marzocco coordinators.""" + + _default_update_interval = SCAN_INTERVAL + config_entry: LaMarzoccoConfigEntry def __init__( self, hass: HomeAssistant, - cloud_client: LaMarzoccoCloudClient, - local_client: LaMarzoccoLocalClient | None, - bluetooth_client: LaMarzoccoBluetoothClient | None, + entry: LaMarzoccoConfigEntry, + device: LaMarzoccoMachine, + local_client: LaMarzoccoLocalClient | None = None, ) -> None: """Initialize coordinator.""" - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) - self.local_connection_configured = local_client is not None - - assert self.config_entry.unique_id - self.device = LaMarzoccoMachine( - model=self.config_entry.data[CONF_MODEL], - serial_number=self.config_entry.unique_id, - name=self.config_entry.data[CONF_NAME], - cloud_client=cloud_client, - local_client=local_client, - bluetooth_client=bluetooth_client, + super().__init__( + hass, + _LOGGER, + config_entry=entry, + name=DOMAIN, + update_interval=self._default_update_interval, ) - - self._last_firmware_data_update: float | None = None - self._last_statistics_data_update: float | None = None + self.device = device + self.local_connection_configured = local_client is not None self._local_client = local_client - async def async_setup(self) -> None: + async def _async_update_data(self) -> None: + """Do the data update.""" + try: + await self._internal_async_update_data() + except AuthFail as ex: + _LOGGER.debug("Authentication failed", exc_info=True) + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, translation_key="authentication_failed" + ) from ex + except RequestNotSuccessful as ex: + _LOGGER.debug(ex, exc_info=True) + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from ex + + @abstractmethod + async def _internal_async_update_data(self) -> None: + """Actual data update logic.""" + + +class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Class to handle fetching data from the La Marzocco API centrally.""" + + async def _async_setup(self) -> None: """Set up the coordinator.""" if self._local_client is not None: _LOGGER.debug("Init WebSocket in background task") @@ -74,9 +103,8 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): if ( self._local_client is not None and self._local_client.websocket is not None - and self._local_client.websocket.open + and not self._local_client.websocket.closed ): - self._local_client.terminating = True await self._local_client.websocket.close() self.config_entry.async_on_unload( @@ -86,38 +114,29 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): ) self.config_entry.async_on_unload(websocket_close) - async def _async_update_data(self) -> None: + async def _internal_async_update_data(self) -> None: """Fetch data from API endpoint.""" - await self._async_handle_request(self.device.get_config) - - if ( - self._last_firmware_data_update is None - or (self._last_firmware_data_update + FIRMWARE_UPDATE_INTERVAL) < time() - ): - await self._async_handle_request(self.device.get_firmware) - self._last_firmware_data_update = time() - - if ( - self._last_statistics_data_update is None - or (self._last_statistics_data_update + STATISTICS_UPDATE_INTERVAL) < time() - ): - await self._async_handle_request(self.device.get_statistics) - self._last_statistics_data_update = time() - + await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) - async def _async_handle_request[**_P]( - self, - func: Callable[_P, Coroutine[None, None, None]], - *args: _P.args, - **kwargs: _P.kwargs, - ) -> None: - try: - await func() - except AuthFail as ex: - msg = "Authentication failed." - _LOGGER.debug(msg, exc_info=True) - raise ConfigEntryAuthFailed(msg) from ex - except RequestNotSuccessful as ex: - _LOGGER.debug(ex, exc_info=True) - raise UpdateFailed(f"Querying API failed. Error: {ex}") from ex + +class LaMarzoccoFirmwareUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Coordinator for La Marzocco firmware.""" + + _default_update_interval = FIRMWARE_UPDATE_INTERVAL + + async def _internal_async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.device.get_firmware() + _LOGGER.debug("Current firmware: %s", str(self.device.firmware)) + + +class LaMarzoccoStatisticsUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Coordinator for La Marzocco statistics.""" + + _default_update_interval = STATISTICS_UPDATE_INTERVAL + + async def _internal_async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.device.get_statistics() + _LOGGER.debug("Current statistics: %s", str(self.device.statistics)) diff --git a/homeassistant/components/lamarzocco/diagnostics.py b/homeassistant/components/lamarzocco/diagnostics.py index 4293fdca615..204a8b7142a 100644 --- a/homeassistant/components/lamarzocco/diagnostics.py +++ b/homeassistant/components/lamarzocco/diagnostics.py @@ -5,12 +5,12 @@ from __future__ import annotations from dataclasses import asdict from typing import Any, TypedDict -from lmcloud.const import FirmwareType +from pylamarzocco.const import FirmwareType from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant -from . import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry TO_REDACT = { "serial_number", @@ -31,7 +31,7 @@ async def async_get_config_entry_diagnostics( entry: LaMarzoccoConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator device = coordinator.device # collect all data sources diagnostics_data = DiagnosticsData( diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index 9cc2ce8ef6b..c3385eebd52 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -3,10 +3,15 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.const import FirmwareType -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.const import FirmwareType +from pylamarzocco.devices.machine import LaMarzoccoMachine -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.const import CONF_ADDRESS, CONF_MAC +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + CONNECTION_NETWORK_MAC, + DeviceInfo, +) from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -43,9 +48,21 @@ class LaMarzoccoBaseEntity( name=device.name, manufacturer="La Marzocco", model=device.full_model_name, + model_id=device.model, serial_number=device.serial_number, sw_version=device.firmware[FirmwareType.MACHINE].current_version, ) + connections: set[tuple[str, str]] = set() + if coordinator.config_entry.data.get(CONF_ADDRESS): + connections.add( + (CONNECTION_NETWORK_MAC, coordinator.config_entry.data[CONF_ADDRESS]) + ) + if coordinator.config_entry.data.get(CONF_MAC): + connections.add( + (CONNECTION_BLUETOOTH, coordinator.config_entry.data[CONF_MAC]) + ) + if connections: + self._attr_device_info.update(DeviceInfo(connections=connections)) class LaMarzoccoEntity(LaMarzoccoBaseEntity): diff --git a/homeassistant/components/lamarzocco/icons.json b/homeassistant/components/lamarzocco/icons.json index bc7d621d91d..860da12ddd9 100644 --- a/homeassistant/components/lamarzocco/icons.json +++ b/homeassistant/components/lamarzocco/icons.json @@ -43,6 +43,9 @@ "preinfusion_off": { "default": "mdi:water" }, + "smart_standby_time": { + "default": "mdi:timer" + }, "steam_temp": { "default": "mdi:thermometer-water" }, @@ -51,6 +54,13 @@ } }, "select": { + "smart_standby_mode": { + "default": "mdi:power", + "state": { + "poweron": "mdi:power", + "lastbrewing": "mdi:coffee" + } + }, "steam_temp_select": { "default": "mdi:thermometer", "state": { @@ -100,6 +110,12 @@ "off": "mdi:alarm-off" } }, + "smart_standby_enabled": { + "state": { + "on": "mdi:sleep", + "off": "mdi:sleep-off" + } + }, "steam_boiler": { "default": "mdi:water-boiler", "state": { diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 73d14250525..7505843850c 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -18,9 +18,24 @@ "codeowners": ["@zweckj"], "config_flow": true, "dependencies": ["bluetooth_adapters"], + "dhcp": [ + { + "registered_devices": true + }, + { + "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]" + }, + { + "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]" + }, + { + "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]" + } + ], "documentation": "https://www.home-assistant.io/integrations/lamarzocco", "integration_type": "device", "iot_class": "cloud_polling", - "loggers": ["lmcloud"], - "requirements": ["lmcloud==1.1.13"] + "loggers": ["pylamarzocco"], + "quality_scale": "platinum", + "requirements": ["pylamarzocco==1.4.0"] } diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index 69e5b42c116..a1389769194 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -4,15 +4,16 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.const import ( +from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.devices.machine import LaMarzoccoMachine +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.number import ( NumberDeviceClass, @@ -27,12 +28,15 @@ from homeassistant.const import ( UnitOfTime, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry -from .coordinator import LaMarzoccoUpdateCoordinator +from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoNumberEntityDescription( @@ -106,6 +110,22 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = ( MachineModel.GS3_MP, ), ), + LaMarzoccoNumberEntityDescription( + key="smart_standby_time", + translation_key="smart_standby_time", + device_class=NumberDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.MINUTES, + native_step=10, + native_min_value=10, + native_max_value=240, + entity_category=EntityCategory.CONFIG, + set_value_fn=lambda machine, value: machine.set_smart_standby( + enabled=machine.config.smart_standby.enabled, + mode=machine.config.smart_standby.mode, + minutes=int(value), + ), + native_value_fn=lambda config: config.smart_standby.minutes, + ), ) @@ -190,7 +210,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator entities: list[NumberEntity] = [ LaMarzoccoNumberEntity(coordinator, description) for description in ENTITIES @@ -220,7 +240,19 @@ class LaMarzoccoNumberEntity(LaMarzoccoEntity, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Set the value.""" if value != self.native_value: - await self.entity_description.set_value_fn(self.coordinator.device, value) + try: + await self.entity_description.set_value_fn( + self.coordinator.device, value + ) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="number_exception", + translation_placeholders={ + "key": self.entity_description.key, + "value": str(value), + }, + ) from exc self.async_write_ha_state() @@ -258,7 +290,18 @@ class LaMarzoccoKeyNumberEntity(LaMarzoccoEntity, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Set the value.""" if value != self.native_value: - await self.entity_description.set_value_fn( - self.coordinator.device, value, PhysicalKey(self.pyhsical_key) - ) + try: + await self.entity_description.set_value_fn( + self.coordinator.device, value, PhysicalKey(self.pyhsical_key) + ) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="number_exception_key", + translation_placeholders={ + "key": self.entity_description.key, + "value": str(value), + "physical_key": str(self.pyhsical_key), + }, + ) from exc self.async_write_ha_state() diff --git a/homeassistant/components/lamarzocco/quality_scale.yaml b/homeassistant/components/lamarzocco/quality_scale.yaml new file mode 100644 index 00000000000..3677bd8d6b8 --- /dev/null +++ b/homeassistant/components/lamarzocco/quality_scale.yaml @@ -0,0 +1,87 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions are defined. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: | + Handled by coordinator. + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: + status: done + comment: | + DHCP & Bluetooth discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Device type integration. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + Device type integration. + + # Platinum + async-dependency: done + inject-websession: + status: done + comment: | + Uses `httpx` session. + strict-typing: done diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 5bff815fb95..595c157b823 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -4,29 +4,30 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.const import MachineModel, PrebrewMode, SteamLevel -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.devices.machine import LaMarzoccoMachine +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + STEAM_LEVEL_HA_TO_LM = { "1": SteamLevel.LEVEL_1, "2": SteamLevel.LEVEL_2, "3": SteamLevel.LEVEL_3, } -STEAM_LEVEL_LM_TO_HA = { - SteamLevel.LEVEL_1: "1", - SteamLevel.LEVEL_2: "2", - SteamLevel.LEVEL_3: "3", -} +STEAM_LEVEL_LM_TO_HA = {value: key for key, value in STEAM_LEVEL_HA_TO_LM.items()} PREBREW_MODE_HA_TO_LM = { "disabled": PrebrewMode.DISABLED, @@ -34,12 +35,15 @@ PREBREW_MODE_HA_TO_LM = { "preinfusion": PrebrewMode.PREINFUSION, } -PREBREW_MODE_LM_TO_HA = { - PrebrewMode.DISABLED: "disabled", - PrebrewMode.PREBREW: "prebrew", - PrebrewMode.PREINFUSION: "preinfusion", +PREBREW_MODE_LM_TO_HA = {value: key for key, value in PREBREW_MODE_HA_TO_LM.items()} + +STANDBY_MODE_HA_TO_LM = { + "power_on": SmartStandbyMode.POWER_ON, + "last_brewing": SmartStandbyMode.LAST_BREWING, } +STANDBY_MODE_LM_TO_HA = {value: key for key, value in STANDBY_MODE_HA_TO_LM.items()} + @dataclass(frozen=True, kw_only=True) class LaMarzoccoSelectEntityDescription( @@ -80,6 +84,20 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = ( MachineModel.LINEA_MINI, ), ), + LaMarzoccoSelectEntityDescription( + key="smart_standby_mode", + translation_key="smart_standby_mode", + entity_category=EntityCategory.CONFIG, + options=["power_on", "last_brewing"], + select_option_fn=lambda machine, option: machine.set_smart_standby( + enabled=machine.config.smart_standby.enabled, + mode=STANDBY_MODE_HA_TO_LM[option], + minutes=machine.config.smart_standby.minutes, + ), + current_option_fn=lambda config: STANDBY_MODE_LM_TO_HA[ + config.smart_standby.mode + ], + ), ) @@ -89,7 +107,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoSelectEntity(coordinator, description) @@ -113,7 +131,17 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Change the selected option.""" if option != self.current_option: - await self.entity_description.select_option_fn( - self.coordinator.device, option - ) + try: + await self.entity_description.select_option_fn( + self.coordinator.device, option + ) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="select_option_error", + translation_placeholders={ + "key": self.entity_description.key, + "option": option, + }, + ) from exc self.async_write_ha_state() diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 225f0a43c5c..8d57c1b8403 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.const import BoilerType, MachineModel, PhysicalKey -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.const import BoilerType, MachineModel, PhysicalKey +from pylamarzocco.devices.machine import LaMarzoccoMachine from homeassistant.components.sensor import ( SensorDeviceClass, @@ -16,9 +16,12 @@ from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoSensorEntityDescription( @@ -30,24 +33,6 @@ class LaMarzoccoSensorEntityDescription( ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( - LaMarzoccoSensorEntityDescription( - key="drink_stats_coffee", - translation_key="drink_stats_coffee", - native_unit_of_measurement="drinks", - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.statistics.drink_stats.get(PhysicalKey.A, 0), - available_fn=lambda device: len(device.statistics.drink_stats) > 0, - entity_category=EntityCategory.DIAGNOSTIC, - ), - LaMarzoccoSensorEntityDescription( - key="drink_stats_flushing", - translation_key="drink_stats_flushing", - native_unit_of_measurement="drinks", - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.statistics.total_flushes, - available_fn=lambda device: len(device.statistics.drink_stats) > 0, - entity_category=EntityCategory.DIAGNOSTIC, - ), LaMarzoccoSensorEntityDescription( key="shot_timer", translation_key="shot_timer", @@ -85,6 +70,27 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( ), ) +STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( + LaMarzoccoSensorEntityDescription( + key="drink_stats_coffee", + translation_key="drink_stats_coffee", + native_unit_of_measurement="drinks", + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda device: device.statistics.drink_stats.get(PhysicalKey.A, 0), + available_fn=lambda device: len(device.statistics.drink_stats) > 0, + entity_category=EntityCategory.DIAGNOSTIC, + ), + LaMarzoccoSensorEntityDescription( + key="drink_stats_flushing", + translation_key="drink_stats_flushing", + native_unit_of_measurement="drinks", + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda device: device.statistics.total_flushes, + available_fn=lambda device: len(device.statistics.drink_stats) > 0, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -92,14 +98,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor entities.""" - coordinator = entry.runtime_data + config_coordinator = entry.runtime_data.config_coordinator - async_add_entities( - LaMarzoccoSensorEntity(coordinator, description) + entities = [ + LaMarzoccoSensorEntity(config_coordinator, description) for description in ENTITIES - if description.supported_fn(coordinator) + if description.supported_fn(config_coordinator) + ] + + statistics_coordinator = entry.runtime_data.statistics_coordinator + entities.extend( + LaMarzoccoSensorEntity(statistics_coordinator, description) + for description in STATISTIC_ENTITIES + if description.supported_fn(statistics_coordinator) ) + async_add_entities(entities) + class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity): """Sensor representing espresso machine temperature data.""" diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 08e3e764379..666eb7f4a84 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -1,12 +1,13 @@ { "config": { - "flow_title": "La Marzocco Espresso {host}", "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "machine_not_found": "Discovered machine not found in given account", "no_machines": "No machines found in account", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, @@ -21,6 +22,15 @@ "password": "Your password from the La Marzocco app" } }, + "bluetooth_selection": { + "description": "Select your device from available Bluetooth devices.", + "data": { + "mac": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "mac": "Select the Bluetooth device that is your machine" + } + }, "machine_selection": { "description": "Select the machine you want to integrate. Set the \"IP\" to get access to shot time related sensors.", "data": { @@ -28,7 +38,8 @@ "machine": "Machine" }, "data_description": { - "host": "Local IP address of the machine" + "host": "Local IP address of the machine", + "machine": "Select the machine you want to integrate" } }, "reauth_confirm": { @@ -39,6 +50,16 @@ "data_description": { "password": "[%key:component::lamarzocco::config::step::user::data_description::password%]" } + }, + "reconfigure": { + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::lamarzocco::config::step::user::data_description::username%]", + "password": "[%key:component::lamarzocco::config::step::user::data_description::password%]" + } } } }, @@ -46,8 +67,10 @@ "step": { "init": { "data": { - "title": "Update Configuration", "use_bluetooth": "Use Bluetooth" + }, + "data_description": { + "use_bluetooth": "Should the integration try to use Bluetooth to control the machine?" } } } @@ -99,6 +122,9 @@ "preinfusion_off_key": { "name": "Preinfusion time Key {key}" }, + "smart_standby_time": { + "name": "Smart standby time" + }, "steam_temp": { "name": "Steam target temperature" }, @@ -115,6 +141,13 @@ "preinfusion": "Preinfusion" } }, + "smart_standby_mode": { + "name": "Smart standby mode", + "state": { + "last_brewing": "Last brewing", + "power_on": "Power on" + } + }, "steam_temp_select": { "name": "Steam level", "state": { @@ -145,6 +178,9 @@ "auto_on_off": { "name": "Auto on/off ({id})" }, + "smart_standby_enabled": { + "name": "Smart standby enabled" + }, "steam_boiler": { "name": "Steam boiler" } @@ -163,5 +199,37 @@ "title": "Unsupported gateway firmware", "description": "Gateway firmware {gateway_version} is no longer supported by this integration, please update." } + }, + "exceptions": { + "api_error": { + "message": "Error while communicating with the API" + }, + "authentication_failed": { + "message": "Authentication failed" + }, + "auto_on_off_error": { + "message": "Error while setting auto on/off to {state} for {id}" + }, + "button_error": { + "message": "Error while executing button {key}" + }, + "number_exception": { + "message": "Error while setting value {value} for number {key}" + }, + "number_exception_key": { + "message": "Error while setting value {value} for number {key}, key {physical_key}" + }, + "select_option_error": { + "message": "Error while setting select option {option} for {key}" + }, + "switch_on_error": { + "message": "Error while turning on switch {key}" + }, + "switch_off_error": { + "message": "Error while turning off switch {key}" + }, + "update_failed": { + "message": "Error while updating {key}" + } } } diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index c57e0662ab2..54bd1ac2aed 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -4,19 +4,23 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.const import BoilerType -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.const import BoilerType +from pylamarzocco.devices.machine import LaMarzoccoMachine +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry -from .coordinator import LaMarzoccoUpdateCoordinator +from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoBaseEntity, LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoSwitchEntityDescription( @@ -43,6 +47,17 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = ( control_fn=lambda machine, state: machine.set_steam(state), is_on_fn=lambda config: config.boilers[BoilerType.STEAM].enabled, ), + LaMarzoccoSwitchEntityDescription( + key="smart_standby_enabled", + translation_key="smart_standby_enabled", + entity_category=EntityCategory.CONFIG, + control_fn=lambda machine, state: machine.set_smart_standby( + enabled=state, + mode=machine.config.smart_standby.mode, + minutes=machine.config.smart_standby.minutes, + ), + is_on_fn=lambda config: config.smart_standby.enabled, + ), ) @@ -53,7 +68,7 @@ async def async_setup_entry( ) -> None: """Set up switch entities and services.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator entities: list[SwitchEntity] = [] entities.extend( @@ -77,12 +92,26 @@ class LaMarzoccoSwitchEntity(LaMarzoccoEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn device on.""" - await self.entity_description.control_fn(self.coordinator.device, True) + try: + await self.entity_description.control_fn(self.coordinator.device, True) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_on_error", + translation_placeholders={"key": self.entity_description.key}, + ) from exc self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn device off.""" - await self.entity_description.control_fn(self.coordinator.device, False) + try: + await self.entity_description.control_fn(self.coordinator.device, False) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_off_error", + translation_placeholders={"key": self.entity_description.key}, + ) from exc self.async_write_ha_state() @property @@ -114,7 +143,14 @@ class LaMarzoccoAutoOnOffSwitchEntity(LaMarzoccoBaseEntity, SwitchEntity): self._identifier ] wake_up_sleep_entry.enabled = state - await self.coordinator.device.set_wake_up_sleep(wake_up_sleep_entry) + try: + await self.coordinator.device.set_wake_up_sleep(wake_up_sleep_entry) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="auto_on_off_error", + translation_placeholders={"id": self._identifier, "state": str(state)}, + ) from exc self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/lamarzocco/update.py b/homeassistant/components/lamarzocco/update.py index 2769016e43b..0833ee6e249 100644 --- a/homeassistant/components/lamarzocco/update.py +++ b/homeassistant/components/lamarzocco/update.py @@ -3,7 +3,8 @@ from dataclasses import dataclass from typing import Any -from lmcloud.const import FirmwareType +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import RequestNotSuccessful from homeassistant.components.update import ( UpdateDeviceClass, @@ -16,9 +17,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoUpdateEntityDescription( @@ -55,7 +59,7 @@ async def async_setup_entry( ) -> None: """Create update entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.firmware_coordinator async_add_entities( LaMarzoccoUpdateEntity(coordinator, description) for description in ENTITIES @@ -94,10 +98,25 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity): """Install an update.""" self._attr_in_progress = True self.async_write_ha_state() - success = await self.coordinator.device.update_firmware( - self.entity_description.component - ) + try: + success = await self.coordinator.device.update_firmware( + self.entity_description.component + ) + except RequestNotSuccessful as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={ + "key": self.entity_description.key, + }, + ) from exc if not success: - raise HomeAssistantError("Update failed") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={ + "key": self.entity_description.key, + }, + ) self._attr_in_progress = False await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lametric/config_flow.py b/homeassistant/components/lametric/config_flow.py index 8dbd5279bc6..05c5dea77d1 100644 --- a/homeassistant/components/lametric/config_flow.py +++ b/homeassistant/components/lametric/config_flow.py @@ -29,7 +29,7 @@ from homeassistant.components.ssdp import ( ATTR_UPNP_SERIAL, SsdpServiceInfo, ) -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_DEVICE, CONF_HOST, CONF_MAC from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -59,7 +59,6 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): discovered_host: str discovered_serial: str discovered: bool = False - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -113,9 +112,6 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with LaMetric.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_choice_enter_manual_or_fetch_cloud() async def async_step_choice_enter_manual_or_fetch_cloud( @@ -138,8 +134,8 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): if user_input is not None: if self.discovered: host = self.discovered_host - elif self.reauth_entry: - host = self.reauth_entry.data[CONF_HOST] + elif self.source == SOURCE_REAUTH: + host = self._get_reauth_entry().data[CONF_HOST] else: host = user_input[CONF_HOST] @@ -162,7 +158,7 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): TextSelectorConfig(type=TextSelectorType.PASSWORD) ) } - if not self.discovered and not self.reauth_entry: + if not self.discovered and self.source != SOURCE_REAUTH: schema = {vol.Required(CONF_HOST): TextSelector()} | schema return self.async_show_form( @@ -195,10 +191,11 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle device selection from devices offered by the cloud.""" if self.discovered: user_input = {CONF_DEVICE: self.discovered_serial} - elif self.reauth_entry: - if self.reauth_entry.unique_id not in self.devices: + elif self.source == SOURCE_REAUTH: + reauth_unique_id = self._get_reauth_entry().unique_id + if reauth_unique_id not in self.devices: return self.async_abort(reason="reauth_device_not_found") - user_input = {CONF_DEVICE: self.reauth_entry.unique_id} + user_input = {CONF_DEVICE: reauth_unique_id} elif len(self.devices) == 1: user_input = {CONF_DEVICE: list(self.devices.values())[0].serial_number} @@ -251,8 +248,11 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): device = await lametric.device() - if not self.reauth_entry: - await self.async_set_unique_id(device.serial_number) + if self.source != SOURCE_REAUTH: + await self.async_set_unique_id( + device.serial_number, + raise_on_progress=False, + ) self._abort_if_unique_id_configured( updates={CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key} ) @@ -273,19 +273,14 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): ) ) - if self.reauth_entry: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key, }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=device.name, diff --git a/homeassistant/components/lametric/diagnostics.py b/homeassistant/components/lametric/diagnostics.py index 69c681e911a..c14ed998ace 100644 --- a/homeassistant/components/lametric/diagnostics.py +++ b/homeassistant/components/lametric/diagnostics.py @@ -26,5 +26,5 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" coordinator: LaMetricDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] # Round-trip via JSON to trigger serialization - data = json.loads(coordinator.data.json()) + data = json.loads(coordinator.data.to_json()) return async_redact_data(data, TO_REDACT) diff --git a/homeassistant/components/lametric/icons.json b/homeassistant/components/lametric/icons.json index 7e1841272cf..229770c96dc 100644 --- a/homeassistant/components/lametric/icons.json +++ b/homeassistant/components/lametric/icons.json @@ -39,7 +39,11 @@ } }, "services": { - "chart": "mdi:chart-areaspline-variant", - "message": "mdi:message" + "chart": { + "service": "mdi:chart-areaspline-variant" + }, + "message": { + "service": "mdi:message" + } } } diff --git a/homeassistant/components/lametric/manifest.json b/homeassistant/components/lametric/manifest.json index 92ccd29c916..5a066d015f2 100644 --- a/homeassistant/components/lametric/manifest.json +++ b/homeassistant/components/lametric/manifest.json @@ -13,8 +13,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["demetriek"], - "quality_scale": "platinum", - "requirements": ["demetriek==0.4.0"], + "requirements": ["demetriek==1.1.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:LaMetric:1" diff --git a/homeassistant/components/lametric/notify.py b/homeassistant/components/lametric/notify.py index 7362f0ca402..195924e2da5 100644 --- a/homeassistant/components/lametric/notify.py +++ b/homeassistant/components/lametric/notify.py @@ -5,12 +5,14 @@ from __future__ import annotations from typing import Any from demetriek import ( + AlarmSound, LaMetricDevice, LaMetricError, Model, Notification, NotificationIconType, NotificationPriority, + NotificationSound, Simple, Sound, ) @@ -18,8 +20,9 @@ from demetriek import ( from homeassistant.components.notify import ATTR_DATA, BaseNotificationService from homeassistant.const import CONF_ICON from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util.enum import try_parse_enum from .const import CONF_CYCLES, CONF_ICON_TYPE, CONF_PRIORITY, CONF_SOUND, DOMAIN from .coordinator import LaMetricDataUpdateCoordinator @@ -53,7 +56,12 @@ class LaMetricNotificationService(BaseNotificationService): sound = None if CONF_SOUND in data: - sound = Sound(sound=data[CONF_SOUND], category=None) + snd: AlarmSound | NotificationSound | None + if (snd := try_parse_enum(AlarmSound, data[CONF_SOUND])) is None and ( + snd := try_parse_enum(NotificationSound, data[CONF_SOUND]) + ) is None: + raise ServiceValidationError("Unknown sound provided") + sound = Sound(sound=snd, category=None) notification = Notification( icon_type=NotificationIconType(data.get(CONF_ICON_TYPE, "none")), diff --git a/homeassistant/components/lametric/number.py b/homeassistant/components/lametric/number.py index cea9debb04b..1025e04a4a8 100644 --- a/homeassistant/components/lametric/number.py +++ b/homeassistant/components/lametric/number.py @@ -25,6 +25,7 @@ class LaMetricNumberEntityDescription(NumberEntityDescription): """Class describing LaMetric number entities.""" value_fn: Callable[[Device], int | None] + has_fn: Callable[[Device], bool] = lambda device: True set_value_fn: Callable[[LaMetricDevice, float], Awaitable[Any]] @@ -49,7 +50,8 @@ NUMBERS = [ native_step=1, native_min_value=0, native_max_value=100, - value_fn=lambda device: device.audio.volume, + has_fn=lambda device: bool(device.audio), + value_fn=lambda device: device.audio.volume if device.audio else 0, set_value_fn=lambda api, volume: api.audio(volume=int(volume)), ), ] diff --git a/homeassistant/components/lametric/quality_scale.yaml b/homeassistant/components/lametric/quality_scale.yaml new file mode 100644 index 00000000000..a8982bb938b --- /dev/null +++ b/homeassistant/components/lametric/quality_scale.yaml @@ -0,0 +1,75 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: todo + comment: | + Device are documented, but some are missing. For example, the their pro + strip is supported as well. + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/lametric/services.py b/homeassistant/components/lametric/services.py index d5191e0a434..2d9cd8f222d 100644 --- a/homeassistant/components/lametric/services.py +++ b/homeassistant/components/lametric/services.py @@ -19,8 +19,9 @@ import voluptuous as vol from homeassistant.const import CONF_DEVICE_ID, CONF_ICON from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv +from homeassistant.util.enum import try_parse_enum from .const import ( CONF_CYCLES, @@ -118,7 +119,12 @@ async def async_send_notification( """Send a notification to an LaMetric device.""" sound = None if CONF_SOUND in call.data: - sound = Sound(sound=call.data[CONF_SOUND], category=None) + snd: AlarmSound | NotificationSound | None + if (snd := try_parse_enum(AlarmSound, call.data[CONF_SOUND])) is None and ( + snd := try_parse_enum(NotificationSound, call.data[CONF_SOUND]) + ) is None: + raise ServiceValidationError("Unknown sound provided") + sound = Sound(sound=snd, category=None) notification = Notification( icon_type=NotificationIconType(call.data[CONF_ICON_TYPE]), diff --git a/homeassistant/components/lametric/strings.json b/homeassistant/components/lametric/strings.json index 87bda01e305..0fd6f5a12dc 100644 --- a/homeassistant/components/lametric/strings.json +++ b/homeassistant/components/lametric/strings.json @@ -21,8 +21,11 @@ "api_key": "You can find this API key in [devices page in your LaMetric developer account](https://developer.lametric.com/user/devices)." } }, - "user_cloud_select_device": { + "cloud_select_device": { "data": { + "device": "Device" + }, + "data_description": { "device": "Select the LaMetric device to add" } } diff --git a/homeassistant/components/lametric/switch.py b/homeassistant/components/lametric/switch.py index 9689bb7b802..3aabfaf17e1 100644 --- a/homeassistant/components/lametric/switch.py +++ b/homeassistant/components/lametric/switch.py @@ -25,6 +25,7 @@ class LaMetricSwitchEntityDescription(SwitchEntityDescription): """Class describing LaMetric switch entities.""" available_fn: Callable[[Device], bool] = lambda device: True + has_fn: Callable[[Device], bool] = lambda device: True is_on_fn: Callable[[Device], bool] set_fn: Callable[[LaMetricDevice, bool], Awaitable[Any]] @@ -34,8 +35,11 @@ SWITCHES = [ key="bluetooth", translation_key="bluetooth", entity_category=EntityCategory.CONFIG, - available_fn=lambda device: device.bluetooth.available, - is_on_fn=lambda device: device.bluetooth.active, + available_fn=lambda device: bool( + device.bluetooth and device.bluetooth.available + ), + has_fn=lambda device: bool(device.bluetooth), + is_on_fn=lambda device: bool(device.bluetooth and device.bluetooth.active), set_fn=lambda api, active: api.bluetooth(active=active), ), ] @@ -54,6 +58,7 @@ async def async_setup_entry( description=description, ) for description in SWITCHES + if description.has_fn(coordinator.data) ) diff --git a/homeassistant/components/landisgyr_heat_meter/__init__.py b/homeassistant/components/landisgyr_heat_meter/__init__.py index a2fc1320c2b..5cbdc593100 100644 --- a/homeassistant/components/landisgyr_heat_meter/__init__.py +++ b/homeassistant/components/landisgyr_heat_meter/__init__.py @@ -73,6 +73,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> hass, config_entry.entry_id, update_entity_unique_id ) - _LOGGER.info("Migration to version %s successful", config_entry.version) + _LOGGER.debug("Migration to version %s successful", config_entry.version) return True diff --git a/homeassistant/components/landisgyr_heat_meter/strings.json b/homeassistant/components/landisgyr_heat_meter/strings.json index 4bae2490006..31f08ded79f 100644 --- a/homeassistant/components/landisgyr_heat_meter/strings.json +++ b/homeassistant/components/landisgyr_heat_meter/strings.json @@ -12,6 +12,9 @@ } } }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } diff --git a/homeassistant/components/lannouncer/manifest.json b/homeassistant/components/lannouncer/manifest.json index c04d9e87655..9d0942bd14f 100644 --- a/homeassistant/components/lannouncer/manifest.json +++ b/homeassistant/components/lannouncer/manifest.json @@ -3,5 +3,6 @@ "name": "LANnouncer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/lannouncer", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/lastfm/config_flow.py b/homeassistant/components/lastfm/config_flow.py index c6ea120242d..0e1f680dd63 100644 --- a/homeassistant/components/lastfm/config_flow.py +++ b/homeassistant/components/lastfm/config_flow.py @@ -11,7 +11,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY from homeassistant.core import callback @@ -80,7 +80,7 @@ class LastFmConfigFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> LastFmOptionsFlowHandler: """Get the options flow for this handler.""" - return LastFmOptionsFlowHandler(config_entry) + return LastFmOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -155,7 +155,7 @@ class LastFmConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) -class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): +class LastFmOptionsFlowHandler(OptionsFlow): """LastFm Options flow handler.""" async def async_step_init( @@ -163,24 +163,25 @@ class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Initialize form.""" errors: dict[str, str] = {} + options = self.config_entry.options if user_input is not None: users, errors = validate_lastfm_users( - self.options[CONF_API_KEY], user_input[CONF_USERS] + options[CONF_API_KEY], user_input[CONF_USERS] ) user_input[CONF_USERS] = users if not errors: return self.async_create_entry( title="LastFM", data={ - **self.options, + **options, CONF_USERS: user_input[CONF_USERS], }, ) - if self.options[CONF_MAIN_USER]: + if options[CONF_MAIN_USER]: try: main_user, _ = get_lastfm_user( - self.options[CONF_API_KEY], - self.options[CONF_MAIN_USER], + options[CONF_API_KEY], + options[CONF_MAIN_USER], ) friends_response = await self.hass.async_add_executor_job( main_user.get_friends @@ -206,6 +207,6 @@ class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): ), } ), - user_input or self.options, + user_input or options, ), ) diff --git a/homeassistant/components/launch_library/__init__.py b/homeassistant/components/launch_library/__init__.py index 66e7eb832fe..6bfd3bc9adf 100644 --- a/homeassistant/components/launch_library/__init__.py +++ b/homeassistant/components/launch_library/__init__.py @@ -51,6 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update, update_interval=timedelta(hours=1), diff --git a/homeassistant/components/launch_library/config_flow.py b/homeassistant/components/launch_library/config_flow.py index 3cdff3650b3..37b80fbff8a 100644 --- a/homeassistant/components/launch_library/config_flow.py +++ b/homeassistant/components/launch_library/config_flow.py @@ -18,10 +18,6 @@ class LaunchLibraryFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - # Check if already configured - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry(title="Launch Library", data=user_input) diff --git a/homeassistant/components/launch_library/manifest.json b/homeassistant/components/launch_library/manifest.json index 00f11f95a44..3258a9a34fb 100644 --- a/homeassistant/components/launch_library/manifest.json +++ b/homeassistant/components/launch_library/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/launch_library", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["pylaunches==2.0.0"] + "requirements": ["pylaunches==2.0.0"], + "single_config_entry": true } diff --git a/homeassistant/components/launch_library/strings.json b/homeassistant/components/launch_library/strings.json index f3cca9fc581..a587544f836 100644 --- a/homeassistant/components/launch_library/strings.json +++ b/homeassistant/components/launch_library/strings.json @@ -4,9 +4,6 @@ "user": { "description": "Do you want to configure the Launch Library?" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "entity": { diff --git a/homeassistant/components/laundrify/__init__.py b/homeassistant/components/laundrify/__init__.py index 9eb15625319..b08624b6d23 100644 --- a/homeassistant/components/laundrify/__init__.py +++ b/homeassistant/components/laundrify/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + from laundrify_aio import LaundrifyAPI from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException @@ -14,7 +16,9 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DEFAULT_POLL_INTERVAL, DOMAIN from .coordinator import LaundrifyUpdateCoordinator -PLATFORMS = [Platform.BINARY_SENSOR] +_LOGGER = logging.getLogger(__name__) + +PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -51,3 +55,21 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate entry.""" + + _LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version == 1: + # 1 -> 2: Unique ID from integer to string + if entry.minor_version == 1: + minor_version = 2 + hass.config_entries.async_update_entry( + entry, unique_id=str(entry.unique_id), minor_version=minor_version + ) + + _LOGGER.debug("Migration successful") + + return True diff --git a/homeassistant/components/laundrify/binary_sensor.py b/homeassistant/components/laundrify/binary_sensor.py index c94c943a17d..cee6aa6c754 100644 --- a/homeassistant/components/laundrify/binary_sensor.py +++ b/homeassistant/components/laundrify/binary_sensor.py @@ -44,7 +44,6 @@ class LaundrifyPowerPlug( _attr_device_class = BinarySensorDeviceClass.RUNNING _attr_unique_id: str _attr_has_entity_name = True - _attr_name = None _attr_translation_key = "wash_cycle" def __init__( diff --git a/homeassistant/components/laundrify/config_flow.py b/homeassistant/components/laundrify/config_flow.py index 5a608954321..22988af3241 100644 --- a/homeassistant/components/laundrify/config_flow.py +++ b/homeassistant/components/laundrify/config_flow.py @@ -29,6 +29,7 @@ class LaundrifyConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for laundrify.""" VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -64,7 +65,7 @@ class LaundrifyConfigFlow(ConfigFlow, domain=DOMAIN): else: entry_data = {CONF_ACCESS_TOKEN: access_token} - await self.async_set_unique_id(account_id) + await self.async_set_unique_id(str(account_id)) self._abort_if_unique_id_configured() # Create a new entry if it doesn't exist diff --git a/homeassistant/components/laundrify/sensor.py b/homeassistant/components/laundrify/sensor.py new file mode 100644 index 00000000000..98169f95fce --- /dev/null +++ b/homeassistant/components/laundrify/sensor.py @@ -0,0 +1,99 @@ +"""Platform for sensor integration.""" + +import logging + +from laundrify_aio import LaundrifyDevice +from laundrify_aio.exceptions import LaundrifyDeviceException + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import UnitOfEnergy, UnitOfPower +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import LaundrifyUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Add power sensor for passed config_entry in HA.""" + + coordinator: LaundrifyUpdateCoordinator = hass.data[DOMAIN][config.entry_id][ + "coordinator" + ] + + sensor_entities: list[LaundrifyPowerSensor | LaundrifyEnergySensor] = [] + for device in coordinator.data.values(): + sensor_entities.append(LaundrifyPowerSensor(device)) + sensor_entities.append(LaundrifyEnergySensor(coordinator, device)) + + async_add_entities(sensor_entities) + + +class LaundrifyBaseSensor(SensorEntity): + """Base class for Laundrify sensors.""" + + _attr_has_entity_name = True + + def __init__(self, device: LaundrifyDevice) -> None: + """Initialize the sensor.""" + self._device = device + self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device.id)}) + self._attr_unique_id = f"{device.id}_{self._attr_device_class}" + + +class LaundrifyPowerSensor(LaundrifyBaseSensor): + """Representation of a Power sensor.""" + + _attr_device_class = SensorDeviceClass.POWER + _attr_native_unit_of_measurement = UnitOfPower.WATT + _attr_state_class = SensorStateClass.MEASUREMENT + _attr_suggested_display_precision = 0 + + async def async_update(self) -> None: + """Fetch latest power measurement from the device.""" + try: + power = await self._device.get_power() + except LaundrifyDeviceException as err: + _LOGGER.debug("Couldn't load power for %s: %s", self._attr_unique_id, err) + self._attr_available = False + else: + _LOGGER.debug("Retrieved power for %s: %s", self._attr_unique_id, power) + if power is not None: + self._attr_available = True + self._attr_native_value = power + + +class LaundrifyEnergySensor( + CoordinatorEntity[LaundrifyUpdateCoordinator], LaundrifyBaseSensor +): + """Representation of an Energy sensor.""" + + _attr_device_class = SensorDeviceClass.ENERGY + _attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL + _attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_suggested_display_precision = 2 + + def __init__( + self, coordinator: LaundrifyUpdateCoordinator, device: LaundrifyDevice + ) -> None: + """Initialize the sensor.""" + CoordinatorEntity.__init__(self, coordinator) + LaundrifyBaseSensor.__init__(self, device) + + @property + def native_value(self) -> float: + """Return the total energy of the device.""" + device = self.coordinator.data[self._device.id] + return float(device.totalEnergy) diff --git a/homeassistant/components/lawn_mower/__init__.py b/homeassistant/components/lawn_mower/__init__.py index 9eef6ad8343..a8c52b72a81 100644 --- a/homeassistant/components/lawn_mower/__init__.py +++ b/homeassistant/components/lawn_mower/__init__.py @@ -3,16 +3,18 @@ from __future__ import annotations from datetime import timedelta -from functools import cached_property import logging from typing import final +from propcache import cached_property + from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import ( DOMAIN, @@ -25,6 +27,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[LawnMowerEntity]] = HassKey(DOMAIN) PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL = timedelta(seconds=60) @@ -32,7 +35,7 @@ SCAN_INTERVAL = timedelta(seconds=60) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the lawn_mower component.""" - component = hass.data[DOMAIN] = EntityComponent[LawnMowerEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[LawnMowerEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -55,14 +58,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up lawn mower devices.""" - component: EntityComponent[LawnMowerEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[LawnMowerEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class LawnMowerEntityEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -86,9 +87,7 @@ class LawnMowerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @property def state(self) -> str | None: """Return the current state.""" - if (activity := self.activity) is None: - return None - return str(activity) + return self.activity @cached_property def activity(self) -> LawnMowerActivity | None: diff --git a/homeassistant/components/lawn_mower/icons.json b/homeassistant/components/lawn_mower/icons.json index b25bf927fcd..2fa1f79efa1 100644 --- a/homeassistant/components/lawn_mower/icons.json +++ b/homeassistant/components/lawn_mower/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "dock": "mdi:home-import-outline", - "pause": "mdi:pause", - "start_mowing": "mdi:play" + "dock": { + "service": "mdi:home-import-outline" + }, + "pause": { + "service": "mdi:pause" + }, + "start_mowing": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 75f417cb3a5..eb26ef48e4e 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -2,92 +2,77 @@ from __future__ import annotations -from collections.abc import Callable from functools import partial import logging import pypck +from pypck.connection import PchkConnectionManager -from homeassistant import config_entries +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - CONF_ADDRESS, CONF_DEVICE_ID, CONF_DOMAIN, + CONF_ENTITIES, CONF_IP_ADDRESS, - CONF_NAME, CONF_PASSWORD, CONF_PORT, - CONF_RESOURCE, CONF_USERNAME, + Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity +from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.typing import ConfigType from .const import ( ADD_ENTITIES_CALLBACKS, + CONF_ACKNOWLEDGE, CONF_DIM_MODE, CONF_DOMAIN_DATA, CONF_SK_NUM_TRIES, + CONF_TRANSITION, CONNECTION, DOMAIN, PLATFORMS, ) from .helpers import ( AddressType, - DeviceConnectionType, InputType, async_update_config_entry, generate_unique_id, - get_device_model, - import_lcn_config, register_lcn_address_devices, register_lcn_host_device, ) -from .schemas import CONFIG_SCHEMA # noqa: F401 -from .services import SERVICES +from .services import register_services from .websocket import register_panel_and_ws_api _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the LCN component.""" - if DOMAIN not in config: - return True + hass.data.setdefault(DOMAIN, {}) - # initialize a config_flow for all LCN configurations read from - # configuration.yaml - config_entries_data = import_lcn_config(config[DOMAIN]) + await register_services(hass) + await register_panel_and_ws_api(hass) - for config_entry_data in config_entries_data: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=config_entry_data, - ) - ) return True -async def async_setup_entry( - hass: HomeAssistant, config_entry: config_entries.ConfigEntry -) -> bool: +async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up a connection to PCHK host from a config entry.""" - hass.data.setdefault(DOMAIN, {}) if config_entry.entry_id in hass.data[DOMAIN]: return False settings = { "SK_NUM_TRIES": config_entry.data[CONF_SK_NUM_TRIES], "DIM_MODE": pypck.lcn_defs.OutputPortDimMode[config_entry.data[CONF_DIM_MODE]], + "ACKNOWLEDGE": config_entry.data[CONF_ACKNOWLEDGE], } # connect to PCHK - lcn_connection = pypck.connection.PchkConnectionManager( + lcn_connection = PchkConnectionManager( config_entry.data[CONF_IP_ADDRESS], config_entry.data[CONF_PORT], config_entry.data[CONF_USERNAME], @@ -136,21 +121,46 @@ async def async_setup_entry( ) lcn_connection.register_for_inputs(input_received) - # register service calls - for service_name, service in SERVICES: - if not hass.services.has_service(DOMAIN, service_name): - hass.services.async_register( - DOMAIN, service_name, service(hass).async_call_service, service.schema - ) - - await register_panel_and_ws_api(hass) - return True -async def async_unload_entry( - hass: HomeAssistant, config_entry: config_entries.ConfigEntry -) -> bool: +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + new_data = {**config_entry.data} + + if config_entry.version == 1: + # update to 1.2 (add acknowledge flag) + if config_entry.minor_version < 2: + new_data[CONF_ACKNOWLEDGE] = False + + # update to 2.1 (fix transitions for lights and switches) + new_entities_data = [*new_data[CONF_ENTITIES]] + for entity in new_entities_data: + if entity[CONF_DOMAIN] in [Platform.LIGHT, Platform.SCENE]: + if entity[CONF_DOMAIN_DATA][CONF_TRANSITION] is None: + entity[CONF_DOMAIN_DATA][CONF_TRANSITION] = 0 + entity[CONF_DOMAIN_DATA][CONF_TRANSITION] /= 1000.0 + new_data[CONF_ENTITIES] = new_entities_data + + hass.config_entries.async_update_entry( + config_entry, data=new_data, minor_version=1, version=2 + ) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + return True + + +async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Close connection to PCHK host represented by config_entry.""" # forward unloading to platforms unload_ok = await hass.config_entries.async_unload_platforms( @@ -161,17 +171,12 @@ async def async_unload_entry( host = hass.data[DOMAIN].pop(config_entry.entry_id) await host[CONNECTION].async_close() - # unregister service calls - if unload_ok and not hass.data[DOMAIN]: # check if this is the last entry to unload - for service_name, _ in SERVICES: - hass.services.async_remove(DOMAIN, service_name) - return unload_ok def async_host_input_received( hass: HomeAssistant, - config_entry: config_entries.ConfigEntry, + config_entry: ConfigEntry, device_registry: dr.DeviceRegistry, inp: pypck.inputs.Input, ) -> None: @@ -241,75 +246,3 @@ def _async_fire_send_keys_event( event_data.update({CONF_DEVICE_ID: device.id}) hass.bus.async_fire("lcn_send_keys", event_data) - - -class LcnEntity(Entity): - """Parent class for all entities associated with the LCN component.""" - - _attr_should_poll = False - - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: - """Initialize the LCN device.""" - self.config = config - self.entry_id = entry_id - self.device_connection = device_connection - self._unregister_for_inputs: Callable | None = None - self._name: str = config[CONF_NAME] - - @property - def address(self) -> AddressType: - """Return LCN address.""" - return ( - self.device_connection.seg_id, - self.device_connection.addr_id, - self.device_connection.is_group, - ) - - @property - def unique_id(self) -> str: - """Return a unique ID.""" - return generate_unique_id( - self.entry_id, self.address, self.config[CONF_RESOURCE] - ) - - @property - def device_info(self) -> DeviceInfo | None: - """Return device specific attributes.""" - address = f"{'g' if self.address[2] else 'm'}{self.address[0]:03d}{self.address[1]:03d}" - model = ( - "LCN resource" - f" ({get_device_model(self.config[CONF_DOMAIN], self.config[CONF_DOMAIN_DATA])})" - ) - - return DeviceInfo( - identifiers={(DOMAIN, self.unique_id)}, - name=f"{address}.{self.config[CONF_RESOURCE]}", - model=model, - manufacturer="Issendorff", - via_device=( - DOMAIN, - generate_unique_id(self.entry_id, self.config[CONF_ADDRESS]), - ), - ) - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - if not self.device_connection.is_group: - self._unregister_for_inputs = self.device_connection.register_for_inputs( - self.input_received - ) - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - if self._unregister_for_inputs is not None: - self._unregister_for_inputs() - - @property - def name(self) -> str: - """Return the name of the device.""" - return self._name - - def input_received(self, input_obj: InputType) -> None: - """Set state/value when LCN input object (command) is received.""" diff --git a/homeassistant/components/lcn/binary_sensor.py b/homeassistant/components/lcn/binary_sensor.py index a0f8e1cf360..d0ce4815f19 100644 --- a/homeassistant/components/lcn/binary_sensor.py +++ b/homeassistant/components/lcn/binary_sensor.py @@ -5,17 +5,23 @@ from functools import partial import pypck +from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( DOMAIN as DOMAIN_BINARY_SENSOR, BinarySensorEntity, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE +from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, BINSENSOR_PORTS, @@ -23,11 +29,11 @@ from .const import ( DOMAIN, SETPOINTS, ) -from .helpers import DeviceConnectionType, InputType, get_device_connection +from .entity import LcnEntity +from .helpers import InputType def add_lcn_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], @@ -35,26 +41,12 @@ def add_lcn_entities( """Add entities for this domain.""" entities: list[LcnRegulatorLockSensor | LcnBinarySensor | LcnLockKeysSensor] = [] for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS: - entities.append( - LcnRegulatorLockSensor( - entity_config, config_entry.entry_id, device_connection - ) - ) + entities.append(LcnRegulatorLockSensor(entity_config, config_entry)) elif entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS: - entities.append( - LcnBinarySensor(entity_config, config_entry.entry_id, device_connection) - ) + entities.append(LcnBinarySensor(entity_config, config_entry)) else: # in KEY - entities.append( - LcnLockKeysSensor( - entity_config, config_entry.entry_id, device_connection - ) - ) + entities.append(LcnLockKeysSensor(entity_config, config_entry)) async_add_entities(entities) @@ -67,7 +59,6 @@ async def async_setup_entry( """Set up LCN switch entities from a config entry.""" add_entities = partial( add_lcn_entities, - hass, config_entry, async_add_entities, ) @@ -88,11 +79,9 @@ async def async_setup_entry( class LcnRegulatorLockSensor(LcnEntity, BinarySensorEntity): """Representation of a LCN binary sensor for regulator locks.""" - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN binary sensor.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.setpoint_variable = pypck.lcn_defs.Var[ config[CONF_DOMAIN_DATA][CONF_SOURCE] @@ -101,11 +90,28 @@ class LcnRegulatorLockSensor(LcnEntity, BinarySensorEntity): async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_added_to_hass() + if not self.device_connection.is_group: await self.device_connection.activate_status_request_handler( self.setpoint_variable ) + entity_automations = automations_with_entity(self.hass, self.entity_id) + entity_scripts = scripts_with_entity(self.hass, self.entity_id) + if entity_automations + entity_scripts: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_sensor_{self.entity_id}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_regulatorlock_sensor", + translation_placeholders={ + "entity": f"{DOMAIN_BINARY_SENSOR}.{self.name.lower().replace(' ', '_')}", + }, + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() @@ -113,6 +119,9 @@ class LcnRegulatorLockSensor(LcnEntity, BinarySensorEntity): await self.device_connection.cancel_status_request_handler( self.setpoint_variable ) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}" + ) def input_received(self, input_obj: InputType) -> None: """Set sensor value when LCN input object (command) is received.""" @@ -129,11 +138,9 @@ class LcnRegulatorLockSensor(LcnEntity, BinarySensorEntity): class LcnBinarySensor(LcnEntity, BinarySensorEntity): """Representation of a LCN binary sensor for binary sensor ports.""" - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN binary sensor.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.bin_sensor_port = pypck.lcn_defs.BinSensorPort[ config[CONF_DOMAIN_DATA][CONF_SOURCE] @@ -167,25 +174,43 @@ class LcnBinarySensor(LcnEntity, BinarySensorEntity): class LcnLockKeysSensor(LcnEntity, BinarySensorEntity): """Representation of a LCN sensor for key locks.""" - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN sensor.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.source = pypck.lcn_defs.Key[config[CONF_DOMAIN_DATA][CONF_SOURCE]] async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_added_to_hass() + if not self.device_connection.is_group: await self.device_connection.activate_status_request_handler(self.source) + entity_automations = automations_with_entity(self.hass, self.entity_id) + entity_scripts = scripts_with_entity(self.hass, self.entity_id) + if entity_automations + entity_scripts: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_sensor_{self.entity_id}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_keylock_sensor", + translation_placeholders={ + "entity": f"{DOMAIN_BINARY_SENSOR}.{self.name.lower().replace(' ', '_')}", + }, + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() if not self.device_connection.is_group: await self.device_connection.cancel_status_request_handler(self.source) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}" + ) def input_received(self, input_obj: InputType) -> None: """Set sensor value when LCN input object (command) is received.""" diff --git a/homeassistant/components/lcn/climate.py b/homeassistant/components/lcn/climate.py index 0142894a16b..360b732c02e 100644 --- a/homeassistant/components/lcn/climate.py +++ b/homeassistant/components/lcn/climate.py @@ -15,7 +15,6 @@ from homeassistant.components.climate import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_TEMPERATURE, - CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE, @@ -26,7 +25,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, @@ -36,27 +34,21 @@ from .const import ( CONF_SETPOINT, DOMAIN, ) -from .helpers import DeviceConnectionType, InputType, get_device_connection +from .entity import LcnEntity +from .helpers import InputType PARALLEL_UPDATES = 0 def add_lcn_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], ) -> None: """Add entities for this domain.""" - entities: list[LcnClimate] = [] - for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - - entities.append( - LcnClimate(entity_config, config_entry.entry_id, device_connection) - ) + entities = [ + LcnClimate(entity_config, config_entry) for entity_config in entity_configs + ] async_add_entities(entities) @@ -69,7 +61,6 @@ async def async_setup_entry( """Set up LCN switch entities from a config entry.""" add_entities = partial( add_lcn_entities, - hass, config_entry, async_add_entities, ) @@ -90,13 +81,9 @@ async def async_setup_entry( class LcnClimate(LcnEntity, ClimateEntity): """Representation of a LCN climate device.""" - _enable_turn_on_off_backwards_compatibility = False - - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize of a LCN climate device.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.variable = pypck.lcn_defs.Var[config[CONF_DOMAIN_DATA][CONF_SOURCE]] self.setpoint = pypck.lcn_defs.Var[config[CONF_DOMAIN_DATA][CONF_SETPOINT]] diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index 664f32e5585..008265e62ae 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -19,13 +19,12 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType -from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN -from .helpers import purge_device_registry, purge_entity_registry +from . import PchkConnectionManager +from .const import CONF_ACKNOWLEDGE, CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -36,6 +35,7 @@ CONFIG_DATA = { vol.Required(CONF_PASSWORD, default=""): str, vol.Required(CONF_SK_NUM_TRIES, default=0): cv.positive_int, vol.Required(CONF_DIM_MODE, default="STEPS200"): vol.In(DIM_MODES), + vol.Required(CONF_ACKNOWLEDGE, default=False): cv.boolean, } USER_DATA = {vol.Required(CONF_HOST, default="pchk"): str, **CONFIG_DATA} @@ -69,15 +69,17 @@ async def validate_connection(data: ConfigType) -> str | None: password = data[CONF_PASSWORD] sk_num_tries = data[CONF_SK_NUM_TRIES] dim_mode = data[CONF_DIM_MODE] + acknowledge = data[CONF_ACKNOWLEDGE] settings = { "SK_NUM_TRIES": sk_num_tries, "DIM_MODE": pypck.lcn_defs.OutputPortDimMode[dim_mode], + "ACKNOWLEDGE": acknowledge, } _LOGGER.debug("Validating connection parameters to PCHK host '%s'", host_name) - connection = pypck.connection.PchkConnectionManager( + connection = PchkConnectionManager( host, port, username, password, settings=settings ) @@ -105,56 +107,8 @@ async def validate_connection(data: ConfigType) -> str | None: class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a LCN config flow.""" - VERSION = 1 - - async def async_step_import( - self, data: ConfigType - ) -> config_entries.ConfigFlowResult: - """Import existing configuration from LCN.""" - # validate the imported connection parameters - if error := await validate_connection(data): - async_create_issue( - self.hass, - DOMAIN, - error, - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.ERROR, - translation_key=error, - translation_placeholders={ - "url": "/config/integrations/dashboard/add?domain=lcn" - }, - ) - return self.async_abort(reason=error) - - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LCN", - }, - ) - - # check if we already have a host with the same address configured - if entry := get_config_entry(self.hass, data): - entry.source = config_entries.SOURCE_IMPORT - # Cleanup entity and device registry, if we imported from configuration.yaml to - # remove orphans when entities were removed from configuration - purge_entity_registry(self.hass, entry.entry_id, data) - purge_device_registry(self.hass, entry.entry_id, data) - - self.hass.config_entries.async_update_entry(entry, data=data) - return self.async_abort(reason="existing_configuration_updated") - - return self.async_create_entry(title=f"{data[CONF_HOST]}", data=data) + VERSION = 2 + MINOR_VERSION = 1 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -190,28 +144,26 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> config_entries.ConfigFlowResult: """Reconfigure LCN configuration.""" + reconfigure_entry = self._get_reconfigure_entry() errors = None - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - if user_input is not None: - user_input[CONF_HOST] = entry.data[CONF_HOST] + user_input[CONF_HOST] = reconfigure_entry.data[CONF_HOST] - await self.hass.config_entries.async_unload(entry.entry_id) + await self.hass.config_entries.async_unload(reconfigure_entry.entry_id) if (error := await validate_connection(user_input)) is not None: errors = {CONF_BASE: error} if errors is None: - data = entry.data.copy() - data.update(user_input) - self.hass.config_entries.async_update_entry(entry, data=data) - await self.hass.config_entries.async_setup(entry.entry_id) - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) - await self.hass.config_entries.async_setup(entry.entry_id) + await self.hass.config_entries.async_setup(reconfigure_entry.entry_id) return self.async_show_form( step_id="reconfigure", - data_schema=self.add_suggested_values_to_schema(CONFIG_SCHEMA, entry.data), - errors=errors or {}, + data_schema=self.add_suggested_values_to_schema( + CONFIG_SCHEMA, reconfigure_entry.data + ), + errors=errors, ) diff --git a/homeassistant/components/lcn/const.py b/homeassistant/components/lcn/const.py index 24d2e68495c..97aeeecd8b5 100644 --- a/homeassistant/components/lcn/const.py +++ b/homeassistant/components/lcn/const.py @@ -25,6 +25,7 @@ CONF_SOFTWARE_SERIAL = "software_serial" CONF_HARDWARE_TYPE = "hardware_type" CONF_DOMAIN_DATA = "domain_data" +CONF_ACKNOWLEDGE = "acknowledge" CONF_CONNECTIONS = "connections" CONF_SK_NUM_TRIES = "sk_num_tries" CONF_OUTPUT = "output" @@ -41,6 +42,7 @@ CONF_LED = "led" CONF_KEYS = "keys" CONF_TIME = "time" CONF_TIME_UNIT = "time_unit" +CONF_LOCK_TIME = "lock_time" CONF_TABLE = "table" CONF_ROW = "row" CONF_TEXT = "text" diff --git a/homeassistant/components/lcn/cover.py b/homeassistant/components/lcn/cover.py index 1e428a350d6..042461b6af2 100644 --- a/homeassistant/components/lcn/cover.py +++ b/homeassistant/components/lcn/cover.py @@ -8,12 +8,11 @@ import pypck from homeassistant.components.cover import DOMAIN as DOMAIN_COVER, CoverEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES +from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, @@ -21,13 +20,13 @@ from .const import ( CONF_REVERSE_TIME, DOMAIN, ) -from .helpers import DeviceConnectionType, InputType, get_device_connection +from .entity import LcnEntity +from .helpers import InputType PARALLEL_UPDATES = 0 def add_lcn_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], @@ -35,18 +34,10 @@ def add_lcn_entities( """Add entities for this domain.""" entities: list[LcnOutputsCover | LcnRelayCover] = [] for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - if entity_config[CONF_DOMAIN_DATA][CONF_MOTOR] in "OUTPUTS": - entities.append( - LcnOutputsCover(entity_config, config_entry.entry_id, device_connection) - ) + entities.append(LcnOutputsCover(entity_config, config_entry)) else: # in RELAYS - entities.append( - LcnRelayCover(entity_config, config_entry.entry_id, device_connection) - ) + entities.append(LcnRelayCover(entity_config, config_entry)) async_add_entities(entities) @@ -59,7 +50,6 @@ async def async_setup_entry( """Set up LCN cover entities from a config entry.""" add_entities = partial( add_lcn_entities, - hass, config_entry, async_add_entities, ) @@ -85,11 +75,9 @@ class LcnOutputsCover(LcnEntity, CoverEntity): _attr_is_opening = False _attr_assumed_state = True - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN cover.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.output_ids = [ pypck.lcn_defs.OutputPort["OUTPUTUP"].value, @@ -189,11 +177,9 @@ class LcnRelayCover(LcnEntity, CoverEntity): _attr_is_opening = False _attr_assumed_state = True - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN cover.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.motor = pypck.lcn_defs.MotorPort[config[CONF_DOMAIN_DATA][CONF_MOTOR]] self.motor_port_onoff = self.motor.value * 2 diff --git a/homeassistant/components/lcn/entity.py b/homeassistant/components/lcn/entity.py new file mode 100644 index 00000000000..12d8f966801 --- /dev/null +++ b/homeassistant/components/lcn/entity.py @@ -0,0 +1,90 @@ +"""LCN parent entity class.""" + +from collections.abc import Callable + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_NAME, CONF_RESOURCE +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.typing import ConfigType + +from .const import CONF_DOMAIN_DATA, DOMAIN +from .helpers import ( + AddressType, + DeviceConnectionType, + InputType, + generate_unique_id, + get_device_connection, + get_device_model, +) + + +class LcnEntity(Entity): + """Parent class for all entities associated with the LCN component.""" + + _attr_should_poll = False + device_connection: DeviceConnectionType + + def __init__( + self, + config: ConfigType, + config_entry: ConfigEntry, + ) -> None: + """Initialize the LCN device.""" + self.config = config + self.config_entry = config_entry + self.address: AddressType = config[CONF_ADDRESS] + self._unregister_for_inputs: Callable | None = None + self._name: str = config[CONF_NAME] + + @property + def unique_id(self) -> str: + """Return a unique ID.""" + return generate_unique_id( + self.config_entry.entry_id, self.address, self.config[CONF_RESOURCE] + ) + + @property + def device_info(self) -> DeviceInfo | None: + """Return device specific attributes.""" + address = f"{'g' if self.address[2] else 'm'}{self.address[0]:03d}{self.address[1]:03d}" + model = ( + "LCN resource" + f" ({get_device_model(self.config[CONF_DOMAIN], self.config[CONF_DOMAIN_DATA])})" + ) + + return DeviceInfo( + identifiers={(DOMAIN, self.unique_id)}, + name=f"{address}.{self.config[CONF_RESOURCE]}", + model=model, + manufacturer="Issendorff", + via_device=( + DOMAIN, + generate_unique_id( + self.config_entry.entry_id, self.config[CONF_ADDRESS] + ), + ), + ) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + self.device_connection = get_device_connection( + self.hass, self.config[CONF_ADDRESS], self.config_entry + ) + if not self.device_connection.is_group: + self._unregister_for_inputs = self.device_connection.register_for_inputs( + self.input_received + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + if self._unregister_for_inputs is not None: + self._unregister_for_inputs() + + @property + def name(self) -> str: + """Return the name of the device.""" + return self._name + + def input_received(self, input_obj: InputType) -> None: + """Set state/value when LCN input object (command) is received.""" diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index fd8c59ad46f..6a9c63ea212 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -9,7 +9,6 @@ import re from typing import cast import pypck -import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -19,17 +18,12 @@ from homeassistant.const import ( CONF_DEVICES, CONF_DOMAIN, CONF_ENTITIES, - CONF_HOST, - CONF_IP_ADDRESS, CONF_LIGHTS, CONF_NAME, - CONF_PASSWORD, - CONF_PORT, CONF_RESOURCE, CONF_SENSORS, CONF_SOURCE, CONF_SWITCHES, - CONF_USERNAME, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -38,17 +32,12 @@ from homeassistant.helpers.typing import ConfigType from .const import ( BINSENSOR_PORTS, CONF_CLIMATES, - CONF_CONNECTIONS, - CONF_DIM_MODE, - CONF_DOMAIN_DATA, CONF_HARDWARE_SERIAL, CONF_HARDWARE_TYPE, CONF_OUTPUT, CONF_SCENES, - CONF_SK_NUM_TRIES, CONF_SOFTWARE_SERIAL, CONNECTION, - DEFAULT_NAME, DOMAIN, LED_PORTS, LOGICOP_PORTS, @@ -84,7 +73,7 @@ DOMAIN_LOOKUP = { def get_device_connection( hass: HomeAssistant, address: AddressType, config_entry: ConfigEntry -) -> DeviceConnectionType | None: +) -> DeviceConnectionType: """Return a lcn device_connection.""" host_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION] addr = pypck.lcn_addr.LcnAddr(*address) @@ -145,108 +134,6 @@ def generate_unique_id( return unique_id -def import_lcn_config(lcn_config: ConfigType) -> list[ConfigType]: - """Convert lcn settings from configuration.yaml to config_entries data. - - Create a list of config_entry data structures like: - - "data": { - "host": "pchk", - "ip_address": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn, - "sk_num_tries: 0, - "dim_mode: "STEPS200", - "devices": [ - { - "address": (0, 7, False) - "name": "", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - }, ... - ], - "entities": [ - { - "address": (0, 7, False) - "name": "Light_Output1", - "resource": "output1", - "domain": "light", - "domain_data": { - "output": "OUTPUT1", - "dimmable": True, - "transition": 5000.0 - } - }, ... - ] - } - """ - data = {} - for connection in lcn_config[CONF_CONNECTIONS]: - host = { - CONF_HOST: connection[CONF_NAME], - CONF_IP_ADDRESS: connection[CONF_HOST], - CONF_PORT: connection[CONF_PORT], - CONF_USERNAME: connection[CONF_USERNAME], - CONF_PASSWORD: connection[CONF_PASSWORD], - CONF_SK_NUM_TRIES: connection[CONF_SK_NUM_TRIES], - CONF_DIM_MODE: connection[CONF_DIM_MODE], - CONF_DEVICES: [], - CONF_ENTITIES: [], - } - data[connection[CONF_NAME]] = host - - for confkey, domain_config in lcn_config.items(): - if confkey == CONF_CONNECTIONS: - continue - domain = DOMAIN_LOOKUP[confkey] - # loop over entities in configuration.yaml - for domain_data in domain_config: - # remove name and address from domain_data - entity_name = domain_data.pop(CONF_NAME) - address, host_name = domain_data.pop(CONF_ADDRESS) - - if host_name is None: - host_name = DEFAULT_NAME - - # check if we have a new device config - for device_config in data[host_name][CONF_DEVICES]: - if address == device_config[CONF_ADDRESS]: - break - else: # create new device_config - device_config = { - CONF_ADDRESS: address, - CONF_NAME: "", - CONF_HARDWARE_SERIAL: -1, - CONF_SOFTWARE_SERIAL: -1, - CONF_HARDWARE_TYPE: -1, - } - - data[host_name][CONF_DEVICES].append(device_config) - - # insert entity config - resource = get_resource(domain, domain_data).lower() - for entity_config in data[host_name][CONF_ENTITIES]: - if ( - address == entity_config[CONF_ADDRESS] - and resource == entity_config[CONF_RESOURCE] - and domain == entity_config[CONF_DOMAIN] - ): - break - else: # create new entity_config - entity_config = { - CONF_ADDRESS: address, - CONF_NAME: entity_name, - CONF_RESOURCE: resource, - CONF_DOMAIN: domain, - CONF_DOMAIN_DATA: domain_data.copy(), - } - data[host_name][CONF_ENTITIES].append(entity_config) - - return list(data.values()) - - def purge_entity_registry( hass: HomeAssistant, entry_id: str, imported_entry_data: ConfigType ) -> None: @@ -433,26 +320,6 @@ def get_device_config( return None -def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]: - """Validate that all connection names are unique. - - Use 'pchk' as default connection_name (or add a numeric suffix if - pchk' is already in use. - """ - suffix = 0 - for host in hosts: - if host.get(CONF_NAME) is None: - if suffix == 0: - host[CONF_NAME] = DEFAULT_NAME - else: - host[CONF_NAME] = f"{DEFAULT_NAME}{suffix:d}" - suffix += 1 - - schema = vol.Schema(vol.Unique()) - schema([host.get(CONF_NAME) for host in hosts]) - return hosts - - def is_address(value: str) -> tuple[AddressType, str]: """Validate the given address string. diff --git a/homeassistant/components/lcn/icons.json b/homeassistant/components/lcn/icons.json index c8b451a79ea..944c3938a92 100644 --- a/homeassistant/components/lcn/icons.json +++ b/homeassistant/components/lcn/icons.json @@ -1,17 +1,43 @@ { "services": { - "output_abs": "mdi:brightness-auto", - "output_rel": "mdi:brightness-7", - "output_toggle": "mdi:toggle-switch", - "relays": "mdi:light-switch-off", - "led": "mdi:led-on", - "var_abs": "mdi:wrench", - "var_reset": "mdi:reload", - "var_rel": "mdi:wrench", - "lock_regulator": "mdi:lock", - "send_keys": "mdi:alarm-panel", - "lock_keys": "mdi:lock", - "dyn_text": "mdi:form-textbox", - "pck": "mdi:package-variant-closed" + "output_abs": { + "service": "mdi:brightness-auto" + }, + "output_rel": { + "service": "mdi:brightness-7" + }, + "output_toggle": { + "service": "mdi:toggle-switch" + }, + "relays": { + "service": "mdi:light-switch-off" + }, + "led": { + "service": "mdi:led-on" + }, + "var_abs": { + "service": "mdi:wrench" + }, + "var_reset": { + "service": "mdi:reload" + }, + "var_rel": { + "service": "mdi:wrench" + }, + "lock_regulator": { + "service": "mdi:lock" + }, + "send_keys": { + "service": "mdi:alarm-panel" + }, + "lock_keys": { + "service": "mdi:lock" + }, + "dyn_text": { + "service": "mdi:form-textbox" + }, + "pck": { + "service": "mdi:package-variant-closed" + } } } diff --git a/homeassistant/components/lcn/light.py b/homeassistant/components/lcn/light.py index 799ed0036d8..9ec660325c8 100644 --- a/homeassistant/components/lcn/light.py +++ b/homeassistant/components/lcn/light.py @@ -15,12 +15,11 @@ from homeassistant.components.light import ( LightEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES +from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, CONF_DIMMABLE, @@ -30,13 +29,13 @@ from .const import ( DOMAIN, OUTPUT_PORTS, ) -from .helpers import DeviceConnectionType, InputType, get_device_connection +from .entity import LcnEntity +from .helpers import InputType PARALLEL_UPDATES = 0 def add_lcn_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], @@ -44,18 +43,10 @@ def add_lcn_entities( """Add entities for this domain.""" entities: list[LcnOutputLight | LcnRelayLight] = [] for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: - entities.append( - LcnOutputLight(entity_config, config_entry.entry_id, device_connection) - ) + entities.append(LcnOutputLight(entity_config, config_entry)) else: # in RELAY_PORTS - entities.append( - LcnRelayLight(entity_config, config_entry.entry_id, device_connection) - ) + entities.append(LcnRelayLight(entity_config, config_entry)) async_add_entities(entities) @@ -68,7 +59,6 @@ async def async_setup_entry( """Set up LCN light entities from a config entry.""" add_entities = partial( add_lcn_entities, - hass, config_entry, async_add_entities, ) @@ -93,16 +83,14 @@ class LcnOutputLight(LcnEntity, LightEntity): _attr_is_on = False _attr_brightness = 255 - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN light.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.output = pypck.lcn_defs.OutputPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] self._transition = pypck.lcn_defs.time_to_ramp_value( - config[CONF_DOMAIN_DATA][CONF_TRANSITION] + config[CONF_DOMAIN_DATA][CONF_TRANSITION] * 1000.0 ) self.dimmable = config[CONF_DOMAIN_DATA][CONF_DIMMABLE] @@ -187,11 +175,9 @@ class LcnRelayLight(LcnEntity, LightEntity): _attr_supported_color_modes = {ColorMode.ONOFF} _attr_is_on = False - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN light.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.output = pypck.lcn_defs.RelayPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index f8b7d02b103..695a35df871 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.21", "lcn-frontend==0.1.6"] + "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.2"] } diff --git a/homeassistant/components/lcn/scene.py b/homeassistant/components/lcn/scene.py index 52ec0262b55..0f40926cf17 100644 --- a/homeassistant/components/lcn/scene.py +++ b/homeassistant/components/lcn/scene.py @@ -8,12 +8,11 @@ import pypck from homeassistant.components.scene import DOMAIN as DOMAIN_SCENE, Scene from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES, CONF_SCENE +from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES, CONF_SCENE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, @@ -23,27 +22,20 @@ from .const import ( DOMAIN, OUTPUT_PORTS, ) -from .helpers import DeviceConnectionType, get_device_connection +from .entity import LcnEntity PARALLEL_UPDATES = 0 def add_lcn_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], ) -> None: """Add entities for this domain.""" - entities: list[LcnScene] = [] - for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - - entities.append( - LcnScene(entity_config, config_entry.entry_id, device_connection) - ) + entities = [ + LcnScene(entity_config, config_entry) for entity_config in entity_configs + ] async_add_entities(entities) @@ -56,7 +48,6 @@ async def async_setup_entry( """Set up LCN switch entities from a config entry.""" add_entities = partial( add_lcn_entities, - hass, config_entry, async_add_entities, ) @@ -77,11 +68,9 @@ async def async_setup_entry( class LcnScene(LcnEntity, Scene): """Representation of a LCN scene.""" - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN scene.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.register_id = config[CONF_DOMAIN_DATA][CONF_REGISTER] self.scene_id = config[CONF_DOMAIN_DATA][CONF_SCENE] @@ -98,7 +87,7 @@ class LcnScene(LcnEntity, Scene): self.transition = None else: self.transition = pypck.lcn_defs.time_to_ramp_value( - config[CONF_DOMAIN_DATA][CONF_TRANSITION] + config[CONF_DOMAIN_DATA][CONF_TRANSITION] * 1000.0 ) async def async_activate(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/lcn/schemas.py b/homeassistant/components/lcn/schemas.py index 0539e83dea8..c9c91b9843d 100644 --- a/homeassistant/components/lcn/schemas.py +++ b/homeassistant/components/lcn/schemas.py @@ -4,20 +4,9 @@ import voluptuous as vol from homeassistant.components.climate import DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP from homeassistant.const import ( - CONF_ADDRESS, - CONF_BINARY_SENSORS, - CONF_COVERS, - CONF_HOST, - CONF_LIGHTS, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, CONF_SCENE, - CONF_SENSORS, CONF_SOURCE, - CONF_SWITCHES, CONF_UNIT_OF_MEASUREMENT, - CONF_USERNAME, UnitOfTemperature, ) import homeassistant.helpers.config_validation as cv @@ -25,9 +14,6 @@ from homeassistant.helpers.typing import VolDictType from .const import ( BINSENSOR_PORTS, - CONF_CLIMATES, - CONF_CONNECTIONS, - CONF_DIM_MODE, CONF_DIMMABLE, CONF_LOCKABLE, CONF_MAX_TEMP, @@ -37,12 +23,8 @@ from .const import ( CONF_OUTPUTS, CONF_REGISTER, CONF_REVERSE_TIME, - CONF_SCENES, CONF_SETPOINT, - CONF_SK_NUM_TRIES, CONF_TRANSITION, - DIM_MODES, - DOMAIN, KEYS, LED_PORTS, LOGICOP_PORTS, @@ -56,7 +38,6 @@ from .const import ( VAR_UNITS, VARIABLES, ) -from .helpers import has_unique_host_names, is_address ADDRESS_SCHEMA = vol.Coerce(tuple) @@ -95,7 +76,7 @@ DOMAIN_DATA_LIGHT: VolDictType = { vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS + RELAY_PORTS)), vol.Optional(CONF_DIMMABLE, default=False): vol.Coerce(bool), vol.Optional(CONF_TRANSITION, default=0): vol.All( - vol.Coerce(float), vol.Range(min=0.0, max=486.0), lambda value: value * 1000 + vol.Coerce(float), vol.Range(min=0.0, max=486.0) ), } @@ -106,13 +87,8 @@ DOMAIN_DATA_SCENE: VolDictType = { vol.Optional(CONF_OUTPUTS, default=[]): vol.All( cv.ensure_list, [vol.All(vol.Upper, vol.In(OUTPUT_PORTS + RELAY_PORTS))] ), - vol.Optional(CONF_TRANSITION, default=None): vol.Any( - vol.All( - vol.Coerce(int), - vol.Range(min=0.0, max=486.0), - lambda value: value * 1000, - ), - None, + vol.Optional(CONF_TRANSITION, default=0): vol.Any( + vol.All(vol.Coerce(int), vol.Range(min=0.0, max=486.0)) ), } @@ -130,73 +106,8 @@ DOMAIN_DATA_SENSOR: VolDictType = { DOMAIN_DATA_SWITCH: VolDictType = { - vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS + RELAY_PORTS)), -} - -# -# Configuration -# - -DOMAIN_DATA_BASE: VolDictType = { - vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_ADDRESS): is_address, -} - -BINARY_SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_BINARY_SENSOR}) - -CLIMATES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_CLIMATE}) - -COVERS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_COVER}) - -LIGHTS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_LIGHT}) - -SCENES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SCENE}) - -SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SENSOR}) - -SWITCHES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SWITCH}) - -CONNECTION_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PORT): cv.port, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_SK_NUM_TRIES, default=0): cv.positive_int, - vol.Optional(CONF_DIM_MODE, default="steps50"): vol.All( - vol.Upper, vol.In(DIM_MODES) - ), - vol.Optional(CONF_NAME): cv.string, - } -) - -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CONNECTIONS): vol.All( - cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] - ), - vol.Optional(CONF_BINARY_SENSORS): vol.All( - cv.ensure_list, [BINARY_SENSORS_SCHEMA] - ), - vol.Optional(CONF_CLIMATES): vol.All( - cv.ensure_list, [CLIMATES_SCHEMA] - ), - vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), - vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), - vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), - vol.Optional(CONF_SENSORS): vol.All( - cv.ensure_list, [SENSORS_SCHEMA] - ), - vol.Optional(CONF_SWITCHES): vol.All( - cv.ensure_list, [SWITCHES_SCHEMA] - ), - }, - ) - }, + vol.Required(CONF_OUTPUT): vol.All( + vol.Upper, + vol.In(OUTPUT_PORTS + RELAY_PORTS + SETPOINTS + KEYS), ), - extra=vol.ALLOW_EXTRA, -) +} diff --git a/homeassistant/components/lcn/sensor.py b/homeassistant/components/lcn/sensor.py index 7e8941a0bf9..ada0857742c 100644 --- a/homeassistant/components/lcn/sensor.py +++ b/homeassistant/components/lcn/sensor.py @@ -7,10 +7,13 @@ from typing import cast import pypck -from homeassistant.components.sensor import DOMAIN as DOMAIN_SENSOR, SensorEntity +from homeassistant.components.sensor import ( + DOMAIN as DOMAIN_SENSOR, + SensorDeviceClass, + SensorEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE, @@ -20,7 +23,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, @@ -31,11 +33,22 @@ from .const import ( THRESHOLDS, VARIABLES, ) -from .helpers import DeviceConnectionType, InputType, get_device_connection +from .entity import LcnEntity +from .helpers import InputType + +DEVICE_CLASS_MAPPING = { + pypck.lcn_defs.VarUnit.CELSIUS: SensorDeviceClass.TEMPERATURE, + pypck.lcn_defs.VarUnit.KELVIN: SensorDeviceClass.TEMPERATURE, + pypck.lcn_defs.VarUnit.FAHRENHEIT: SensorDeviceClass.TEMPERATURE, + pypck.lcn_defs.VarUnit.LUX_T: SensorDeviceClass.ILLUMINANCE, + pypck.lcn_defs.VarUnit.LUX_I: SensorDeviceClass.ILLUMINANCE, + pypck.lcn_defs.VarUnit.METERPERSECOND: SensorDeviceClass.SPEED, + pypck.lcn_defs.VarUnit.VOLT: SensorDeviceClass.VOLTAGE, + pypck.lcn_defs.VarUnit.AMPERE: SensorDeviceClass.CURRENT, +} def add_lcn_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], @@ -43,24 +56,12 @@ def add_lcn_entities( """Add entities for this domain.""" entities: list[LcnVariableSensor | LcnLedLogicSensor] = [] for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in chain( VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS ): - entities.append( - LcnVariableSensor( - entity_config, config_entry.entry_id, device_connection - ) - ) + entities.append(LcnVariableSensor(entity_config, config_entry)) else: # in LED_PORTS + LOGICOP_PORTS - entities.append( - LcnLedLogicSensor( - entity_config, config_entry.entry_id, device_connection - ) - ) + entities.append(LcnLedLogicSensor(entity_config, config_entry)) async_add_entities(entities) @@ -73,7 +74,6 @@ async def async_setup_entry( """Set up LCN switch entities from a config entry.""" add_entities = partial( add_lcn_entities, - hass, config_entry, async_add_entities, ) @@ -94,17 +94,17 @@ async def async_setup_entry( class LcnVariableSensor(LcnEntity, SensorEntity): """Representation of a LCN sensor for variables.""" - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN sensor.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.variable = pypck.lcn_defs.Var[config[CONF_DOMAIN_DATA][CONF_SOURCE]] self.unit = pypck.lcn_defs.VarUnit.parse( config[CONF_DOMAIN_DATA][CONF_UNIT_OF_MEASUREMENT] ) + self._attr_native_unit_of_measurement = cast(str, self.unit.value) + self._attr_device_class = DEVICE_CLASS_MAPPING.get(self.unit, None) async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" @@ -126,18 +126,20 @@ class LcnVariableSensor(LcnEntity, SensorEntity): ): return - self._attr_native_value = input_obj.get_value().to_var_unit(self.unit) + is_regulator = self.variable.name in SETPOINTS + self._attr_native_value = input_obj.get_value().to_var_unit( + self.unit, is_regulator + ) + self.async_write_ha_state() class LcnLedLogicSensor(LcnEntity, SensorEntity): """Representation of a LCN sensor for leds and logicops.""" - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN sensor.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) if config[CONF_DOMAIN_DATA][CONF_SOURCE] in LED_PORTS: self.source = pypck.lcn_defs.LedPort[config[CONF_DOMAIN_DATA][CONF_SOURCE]] diff --git a/homeassistant/components/lcn/services.py b/homeassistant/components/lcn/services.py index 49b54fc0c8d..92f5863c47e 100644 --- a/homeassistant/components/lcn/services.py +++ b/homeassistant/components/lcn/services.py @@ -1,5 +1,7 @@ """Service calls related dependencies for LCN component.""" +from enum import StrEnum, auto + import pypck import voluptuous as vol @@ -394,18 +396,44 @@ class Pck(LcnServiceCall): await device_connection.pck(pck) +class LcnService(StrEnum): + """LCN service names.""" + + OUTPUT_ABS = auto() + OUTPUT_REL = auto() + OUTPUT_TOGGLE = auto() + RELAYS = auto() + VAR_ABS = auto() + VAR_RESET = auto() + VAR_REL = auto() + LOCK_REGULATOR = auto() + LED = auto() + SEND_KEYS = auto() + LOCK_KEYS = auto() + DYN_TEXT = auto() + PCK = auto() + + SERVICES = ( - ("output_abs", OutputAbs), - ("output_rel", OutputRel), - ("output_toggle", OutputToggle), - ("relays", Relays), - ("var_abs", VarAbs), - ("var_reset", VarReset), - ("var_rel", VarRel), - ("lock_regulator", LockRegulator), - ("led", Led), - ("send_keys", SendKeys), - ("lock_keys", LockKeys), - ("dyn_text", DynText), - ("pck", Pck), + (LcnService.OUTPUT_ABS, OutputAbs), + (LcnService.OUTPUT_REL, OutputRel), + (LcnService.OUTPUT_TOGGLE, OutputToggle), + (LcnService.RELAYS, Relays), + (LcnService.VAR_ABS, VarAbs), + (LcnService.VAR_RESET, VarReset), + (LcnService.VAR_REL, VarRel), + (LcnService.LOCK_REGULATOR, LockRegulator), + (LcnService.LED, Led), + (LcnService.SEND_KEYS, SendKeys), + (LcnService.LOCK_KEYS, LockKeys), + (LcnService.DYN_TEXT, DynText), + (LcnService.PCK, Pck), ) + + +async def register_services(hass: HomeAssistant) -> None: + """Register services for LCN.""" + for service_name, service in SERVICES: + hass.services.async_register( + DOMAIN, service_name, service(hass).async_call_service, service.schema + ) diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index a5f303c6392..088a3654500 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -26,7 +26,12 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "sk_num_tries": "Segment coupler scan attempts", - "dim_mode": "Dimming mode" + "dim_mode": "Dimming mode", + "acknowledge": "Request acknowledgement from modules" + }, + "data_description": { + "dim_mode": "The number of steps used for dimming outputs.", + "acknowledge": "Retry sendig commands if no response is received (increases bus traffic)." } }, "reconfigure": { @@ -37,8 +42,13 @@ "port": "[%key:common::config_flow::data::port%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "sk_num_tries": "Segment coupler scan attempts", - "dim_mode": "Dimming mode" + "sk_num_tries": "[%key:component::lcn::config::step::user::data::sk_num_tries%]", + "dim_mode": "[%key:component::lcn::config::step::user::data::dim_mode%]", + "acknowledge": "[%key:component::lcn::config::step::user::data::acknowledge%]" + }, + "data_description": { + "dim_mode": "[%key:component::lcn::config::step::user::data_description::dim_mode%]", + "acknowledge": "[%key:component::lcn::config::step::user::data_description::acknowledge%]" } } }, @@ -53,17 +63,13 @@ } }, "issues": { - "authentication_error": { - "title": "Authentication failed.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure username and password are correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "deprecated_regulatorlock_sensor": { + "title": "Deprecated LCN regulator lock binary sensor", + "description": "Your LCN regulator lock binary sensor entity `{entity}` is beeing used in automations or scripts. A regulator lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." }, - "license_error": { - "title": "Maximum number of connections was reached.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure sufficient PCHK licenses are registered and restart Home Assistant.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "connection_refused": { - "title": "Unable to connect to PCHK.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the connection (IP and port) to the LCN bus coupler is correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "deprecated_keylock_sensor": { + "title": "Deprecated LCN key lock binary sensor", + "description": "Your LCN key lock binary sensor entity `{entity}` is beeing used in automations or scripts. A key lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." } }, "services": { diff --git a/homeassistant/components/lcn/switch.py b/homeassistant/components/lcn/switch.py index 4c316cef547..dd940bd38b3 100644 --- a/homeassistant/components/lcn/switch.py +++ b/homeassistant/components/lcn/switch.py @@ -8,45 +8,44 @@ import pypck from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH, SwitchEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_ENTITIES +from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType -from . import LcnEntity from .const import ( ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, CONF_OUTPUT, DOMAIN, OUTPUT_PORTS, + RELAY_PORTS, + SETPOINTS, ) -from .helpers import DeviceConnectionType, InputType, get_device_connection +from .entity import LcnEntity +from .helpers import InputType PARALLEL_UPDATES = 0 def add_lcn_switch_entities( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, entity_configs: Iterable[ConfigType], ) -> None: """Add entities for this domain.""" - entities: list[LcnOutputSwitch | LcnRelaySwitch] = [] + entities: list[ + LcnOutputSwitch | LcnRelaySwitch | LcnRegulatorLockSwitch | LcnKeyLockSwitch + ] = [] for entity_config in entity_configs: - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: - entities.append( - LcnOutputSwitch(entity_config, config_entry.entry_id, device_connection) - ) - else: # in RELAY_PORTS - entities.append( - LcnRelaySwitch(entity_config, config_entry.entry_id, device_connection) - ) + entities.append(LcnOutputSwitch(entity_config, config_entry)) + elif entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in RELAY_PORTS: + entities.append(LcnRelaySwitch(entity_config, config_entry)) + elif entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in SETPOINTS: + entities.append(LcnRegulatorLockSwitch(entity_config, config_entry)) + else: # in KEYS + entities.append(LcnKeyLockSwitch(entity_config, config_entry)) async_add_entities(entities) @@ -59,7 +58,6 @@ async def async_setup_entry( """Set up LCN switch entities from a config entry.""" add_entities = partial( add_lcn_switch_entities, - hass, config_entry, async_add_entities, ) @@ -82,11 +80,9 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity): _attr_is_on = False - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN switch.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.output = pypck.lcn_defs.OutputPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] @@ -133,11 +129,9 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity): _attr_is_on = False - def __init__( - self, config: ConfigType, entry_id: str, device_connection: DeviceConnectionType - ) -> None: + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: """Initialize the LCN switch.""" - super().__init__(config, entry_id, device_connection) + super().__init__(config, config_entry) self.output = pypck.lcn_defs.RelayPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] @@ -178,3 +172,118 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity): self._attr_is_on = input_obj.get_state(self.output.value) self.async_write_ha_state() + + +class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity): + """Representation of a LCN switch for regulator locks.""" + + _attr_is_on = False + + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: + """Initialize the LCN switch.""" + super().__init__(config, config_entry) + + self.setpoint_variable = pypck.lcn_defs.Var[ + config[CONF_DOMAIN_DATA][CONF_OUTPUT] + ] + self.reg_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint_variable) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + if not self.device_connection.is_group: + await self.device_connection.activate_status_request_handler( + self.setpoint_variable + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if not self.device_connection.is_group: + await self.device_connection.cancel_status_request_handler( + self.setpoint_variable + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + if not await self.device_connection.lock_regulator(self.reg_id, True): + return + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + if not await self.device_connection.lock_regulator(self.reg_id, False): + return + self._attr_is_on = False + self.async_write_ha_state() + + def input_received(self, input_obj: InputType) -> None: + """Set switch state when LCN input object (command) is received.""" + if ( + not isinstance(input_obj, pypck.inputs.ModStatusVar) + or input_obj.get_var() != self.setpoint_variable + ): + return + + self._attr_is_on = input_obj.get_value().is_locked_regulator() + self.async_write_ha_state() + + +class LcnKeyLockSwitch(LcnEntity, SwitchEntity): + """Representation of a LCN switch for key locks.""" + + _attr_is_on = False + + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: + """Initialize the LCN switch.""" + super().__init__(config, config_entry) + + self.key = pypck.lcn_defs.Key[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] + self.table_id = ord(self.key.name[0]) - 65 + self.key_id = int(self.key.name[1]) - 1 + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + if not self.device_connection.is_group: + await self.device_connection.activate_status_request_handler(self.key) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if not self.device_connection.is_group: + await self.device_connection.cancel_status_request_handler(self.key) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + states = [pypck.lcn_defs.KeyLockStateModifier.NOCHANGE] * 8 + states[self.key_id] = pypck.lcn_defs.KeyLockStateModifier.ON + + if not await self.device_connection.lock_keys(self.table_id, states): + return + + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + states = [pypck.lcn_defs.KeyLockStateModifier.NOCHANGE] * 8 + states[self.key_id] = pypck.lcn_defs.KeyLockStateModifier.OFF + + if not await self.device_connection.lock_keys(self.table_id, states): + return + + self._attr_is_on = False + self.async_write_ha_state() + + def input_received(self, input_obj: InputType) -> None: + """Set switch state when LCN input object (command) is received.""" + if ( + not isinstance(input_obj, pypck.inputs.ModStatusKeyLocks) + or self.key not in pypck.lcn_defs.Key + ): + return + + self._attr_is_on = input_obj.get_state(self.table_id, self.key_id) + self.async_write_ha_state() diff --git a/homeassistant/components/lcn/websocket.py b/homeassistant/components/lcn/websocket.py index b418e362b27..d3268dfbf91 100644 --- a/homeassistant/components/lcn/websocket.py +++ b/homeassistant/components/lcn/websocket.py @@ -21,7 +21,7 @@ from homeassistant.const import ( CONF_NAME, CONF_RESOURCE, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.helpers.config_validation as cv @@ -102,7 +102,6 @@ def get_config_entry( ) -> AsyncWebSocketCommandHandler: """Websocket decorator to ensure the config_entry exists and return it.""" - @callback @wraps(func) async def get_entry( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict @@ -158,7 +157,13 @@ async def websocket_get_entity_configs( else: entity_configs = config_entry.data[CONF_ENTITIES] - connection.send_result(msg["id"], entity_configs) + result_entity_configs = [ + {**entity_config, CONF_NAME: entity.name or entity.original_name} + for entity_config in entity_configs[:] + if (entity := get_entity_entry(hass, entity_config, config_entry)) is not None + ] + + connection.send_result(msg["id"], result_entity_configs) @websocket_api.require_admin @@ -438,3 +443,23 @@ async def async_create_or_update_device_in_config_entry( await async_update_device_config(device_connection, device_config) hass.config_entries.async_update_entry(config_entry, data=data) + + +def get_entity_entry( + hass: HomeAssistant, entity_config: dict, config_entry: ConfigEntry +) -> er.RegistryEntry | None: + """Get entity RegistryEntry from entity_config.""" + entity_registry = er.async_get(hass) + domain_name = entity_config[CONF_DOMAIN] + domain_data = entity_config[CONF_DOMAIN_DATA] + resource = get_resource(domain_name, domain_data).lower() + unique_id = generate_unique_id( + config_entry.entry_id, + entity_config[CONF_ADDRESS], + resource, + ) + if ( + entity_id := entity_registry.async_get_entity_id(domain_name, DOMAIN, unique_id) + ) is None: + return None + return entity_registry.async_get(entity_id) diff --git a/homeassistant/components/led_ble/__init__.py b/homeassistant/components/led_ble/__init__.py index d09f88b145a..84d7369d706 100644 --- a/homeassistant/components/led_ble/__init__.py +++ b/homeassistant/components/led_ble/__init__.py @@ -66,6 +66,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=led_ble.name, update_method=_async_update, update_interval=timedelta(seconds=UPDATE_SECONDS), diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 1d12e355a0d..4aaaebc0006 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.1.1"] } diff --git a/homeassistant/components/lektrico/__init__.py b/homeassistant/components/lektrico/__init__.py new file mode 100644 index 00000000000..475b6132541 --- /dev/null +++ b/homeassistant/components/lektrico/__init__.py @@ -0,0 +1,61 @@ +"""The Lektrico Charging Station integration.""" + +from __future__ import annotations + +from lektricowifi import Device + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import LektricoDeviceDataUpdateCoordinator + +# List the platforms that charger supports. +CHARGERS_PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, +] + +# List the platforms that load balancer device supports. +LB_DEVICES_PLATFORMS: list[Platform] = [ + Platform.BUTTON, + Platform.SELECT, + Platform.SENSOR, +] + +type LektricoConfigEntry = ConfigEntry[LektricoDeviceDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: LektricoConfigEntry) -> bool: + """Set up Lektrico Charging Station from a config entry.""" + coordinator = LektricoDeviceDataUpdateCoordinator( + hass, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, _get_platforms(entry)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms( + entry, _get_platforms(entry) + ) + + +def _get_platforms(entry: ConfigEntry) -> list[Platform]: + """Return the platforms for this type of device.""" + _device_type: str = entry.data[CONF_TYPE] + if _device_type in (Device.TYPE_1P7K, Device.TYPE_3P22K): + return CHARGERS_PLATFORMS + return LB_DEVICES_PLATFORMS diff --git a/homeassistant/components/lektrico/binary_sensor.py b/homeassistant/components/lektrico/binary_sensor.py new file mode 100644 index 00000000000..d0a3e39690c --- /dev/null +++ b/homeassistant/components/lektrico/binary_sensor.py @@ -0,0 +1,139 @@ +"""Support for Lektrico binary sensors entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Lektrico binary sensor entity.""" + + value_fn: Callable[[dict[str, Any]], bool] + + +BINARY_SENSORS: tuple[LektricoBinarySensorEntityDescription, ...] = ( + LektricoBinarySensorEntityDescription( + key="state_e_activated", + translation_key="state_e_activated", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["state_e_activated"]), + ), + LektricoBinarySensorEntityDescription( + key="overtemp", + translation_key="overtemp", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overtemp"]), + ), + LektricoBinarySensorEntityDescription( + key="critical_temp", + translation_key="critical_temp", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["critical_temp"]), + ), + LektricoBinarySensorEntityDescription( + key="overcurrent", + translation_key="overcurrent", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overcurrent"]), + ), + LektricoBinarySensorEntityDescription( + key="meter_fault", + translation_key="meter_fault", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["meter_fault"]), + ), + LektricoBinarySensorEntityDescription( + key="undervoltage", + translation_key="undervoltage", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["undervoltage_error"]), + ), + LektricoBinarySensorEntityDescription( + key="overvoltage", + translation_key="overvoltage", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overvoltage_error"]), + ), + LektricoBinarySensorEntityDescription( + key="rcd_error", + translation_key="rcd_error", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["rcd_error"]), + ), + LektricoBinarySensorEntityDescription( + key="cp_diode_failure", + translation_key="cp_diode_failure", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["cp_diode_failure"]), + ), + LektricoBinarySensorEntityDescription( + key="contactor_failure", + translation_key="contactor_failure", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["contactor_failure"]), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico binary sensor entities based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + LektricoBinarySensor( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in BINARY_SENSORS + ) + + +class LektricoBinarySensor(LektricoEntity, BinarySensorEntity): + """Defines a Lektrico binary sensor entity.""" + + entity_description: LektricoBinarySensorEntityDescription + + def __init__( + self, + description: LektricoBinarySensorEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico binary sensor.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._coordinator = coordinator + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/lektrico/button.py b/homeassistant/components/lektrico/button.py new file mode 100644 index 00000000000..62aef12ff53 --- /dev/null +++ b/homeassistant/components/lektrico/button.py @@ -0,0 +1,102 @@ +"""Support for Lektrico buttons.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoButtonEntityDescription(ButtonEntityDescription): + """Describes Lektrico button entity.""" + + press_fn: Callable[[Device], Coroutine[Any, Any, dict[Any, Any]]] + + +BUTTONS_FOR_CHARGERS: tuple[LektricoButtonEntityDescription, ...] = ( + LektricoButtonEntityDescription( + key="charge_start", + translation_key="charge_start", + entity_category=EntityCategory.CONFIG, + press_fn=lambda device: device.send_charge_start(), + ), + LektricoButtonEntityDescription( + key="charge_stop", + translation_key="charge_stop", + entity_category=EntityCategory.CONFIG, + press_fn=lambda device: device.send_charge_stop(), + ), + LektricoButtonEntityDescription( + key="reboot", + device_class=ButtonDeviceClass.RESTART, + entity_category=EntityCategory.CONFIG, + press_fn=lambda device: device.send_reset(), + ), +) + +BUTTONS_FOR_LB_DEVICES: tuple[LektricoButtonEntityDescription, ...] = ( + LektricoButtonEntityDescription( + key="reboot", + device_class=ButtonDeviceClass.RESTART, + entity_category=EntityCategory.CONFIG, + press_fn=lambda device: device.send_reset(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico charger based on a config entry.""" + coordinator = entry.runtime_data + + buttons_to_be_used: tuple[LektricoButtonEntityDescription, ...] + if coordinator.device_type in (Device.TYPE_1P7K, Device.TYPE_3P22K): + buttons_to_be_used = BUTTONS_FOR_CHARGERS + else: + buttons_to_be_used = BUTTONS_FOR_LB_DEVICES + + async_add_entities( + LektricoButton( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in buttons_to_be_used + ) + + +class LektricoButton(LektricoEntity, ButtonEntity): + """Defines an Lektrico button.""" + + entity_description: LektricoButtonEntityDescription + + def __init__( + self, + description: LektricoButtonEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico button.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}-{description.key}" + + async def async_press(self) -> None: + """Press the button.""" + await self.entity_description.press_fn(self.coordinator.device) diff --git a/homeassistant/components/lektrico/config_flow.py b/homeassistant/components/lektrico/config_flow.py new file mode 100644 index 00000000000..7091856f4fd --- /dev/null +++ b/homeassistant/components/lektrico/config_flow.py @@ -0,0 +1,138 @@ +"""Config flow for Lektrico Charging Station.""" + +from __future__ import annotations + +from typing import Any + +from lektricowifi import Device, DeviceConnectionError +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) +from homeassistant.core import callback +from homeassistant.helpers.httpx_client import get_async_client + +from .const import DOMAIN + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +class LektricoFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a Lektrico config flow.""" + + VERSION = 1 + + _host: str + _name: str + _serial_number: str + _board_revision: str + _device_type: str + + async def async_step_user( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors = None + + if user_input is not None: + self._host = user_input[CONF_HOST] + + # obtain serial number + try: + await self._get_lektrico_device_settings_and_treat_unique_id() + return self._async_create_entry() + except DeviceConnectionError: + errors = {CONF_HOST: "cannot_connect"} + + return self._async_show_setup_form(user_input=user_input, errors=errors) + + @callback + def _async_show_setup_form( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: + """Show the setup form to the user.""" + if user_input is None: + user_input = {} + + schema = self.add_suggested_values_to_schema(STEP_USER_DATA_SCHEMA, user_input) + + return self.async_show_form( + step_id="user", + data_schema=schema, + errors=errors or {}, + ) + + @callback + def _async_create_entry(self) -> ConfigFlowResult: + return self.async_create_entry( + title=self._name, + data={ + CONF_HOST: self._host, + ATTR_SERIAL_NUMBER: self._serial_number, + CONF_TYPE: self._device_type, + ATTR_HW_VERSION: self._board_revision, + }, + ) + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + self._host = discovery_info.host # 192.168.100.11 + + # read settings from the device + try: + await self._get_lektrico_device_settings_and_treat_unique_id() + except DeviceConnectionError: + return self.async_abort(reason="cannot_connect") + + self.context["title_placeholders"] = { + "serial_number": self._serial_number, + "name": self._name, + } + + return await self.async_step_confirm() + + async def _get_lektrico_device_settings_and_treat_unique_id(self) -> None: + """Get device's serial number from a Lektrico device.""" + device = Device( + _host=self._host, + asyncClient=get_async_client(self.hass), + ) + + settings = await device.device_config() + self._serial_number = str(settings["serial_number"]) + self._device_type = settings["type"] + self._board_revision = settings["board_revision"] + self._name = f"{settings["type"]}_{self._serial_number}" + + # Check if already configured + # Set unique id + await self.async_set_unique_id(self._serial_number, raise_on_progress=True) + # Abort if already configured, but update the last-known host + self._abort_if_unique_id_configured( + updates={CONF_HOST: self._host}, reload_on_update=True + ) + + async def async_step_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Allow the user to confirm adding the device.""" + + if user_input is not None: + return self._async_create_entry() + + self._set_confirm_only() + return self.async_show_form(step_id="confirm") diff --git a/homeassistant/components/lektrico/const.py b/homeassistant/components/lektrico/const.py new file mode 100644 index 00000000000..d3fc52f61be --- /dev/null +++ b/homeassistant/components/lektrico/const.py @@ -0,0 +1,9 @@ +"""Constants for the Lektrico Charging Station integration.""" + +from logging import Logger, getLogger + +# Integration domain +DOMAIN = "lektrico" + +# Logger +LOGGER: Logger = getLogger(__package__) diff --git a/homeassistant/components/lektrico/coordinator.py b/homeassistant/components/lektrico/coordinator.py new file mode 100644 index 00000000000..7c72a00e2d3 --- /dev/null +++ b/homeassistant/components/lektrico/coordinator.py @@ -0,0 +1,52 @@ +"""Coordinator for the Lektrico Charging Station integration.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from lektricowifi import Device, DeviceConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + +SCAN_INTERVAL = timedelta(seconds=10) + + +class LektricoDeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Data update coordinator for Lektrico device.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, device_name: str) -> None: + """Initialize a Lektrico Device.""" + super().__init__( + hass, + LOGGER, + name=device_name, + update_interval=SCAN_INTERVAL, + ) + self.device = Device( + self.config_entry.data[CONF_HOST], + asyncClient=get_async_client(hass), + ) + self.serial_number: str = self.config_entry.data[ATTR_SERIAL_NUMBER] + self.board_revision: str = self.config_entry.data[ATTR_HW_VERSION] + self.device_type: str = self.config_entry.data[CONF_TYPE] + + async def _async_update_data(self) -> dict[str, Any]: + """Async Update device state.""" + try: + return await self.device.device_info(self.device_type) + except DeviceConnectionError as lek_ex: + raise UpdateFailed(lek_ex) from lek_ex diff --git a/homeassistant/components/lektrico/entity.py b/homeassistant/components/lektrico/entity.py new file mode 100644 index 00000000000..1a5e08febe3 --- /dev/null +++ b/homeassistant/components/lektrico/entity.py @@ -0,0 +1,33 @@ +"""Entity classes for the Lektrico integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import LektricoDeviceDataUpdateCoordinator +from .const import DOMAIN + + +class LektricoEntity(CoordinatorEntity[LektricoDeviceDataUpdateCoordinator]): + """Define an Lektrico entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.serial_number)}, + model=coordinator.device_type.upper(), + name=device_name, + manufacturer="Lektrico", + sw_version=coordinator.data["fw_version"], + hw_version=coordinator.board_revision, + serial_number=coordinator.serial_number, + ) diff --git a/homeassistant/components/lektrico/manifest.json b/homeassistant/components/lektrico/manifest.json new file mode 100644 index 00000000000..d34915d66ba --- /dev/null +++ b/homeassistant/components/lektrico/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "lektrico", + "name": "Lektrico Charging Station", + "codeowners": ["@lektrico"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/lektrico", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["lektricowifi==0.0.43"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "lektrico*" + } + ] +} diff --git a/homeassistant/components/lektrico/number.py b/homeassistant/components/lektrico/number.py new file mode 100644 index 00000000000..8054ba8afe5 --- /dev/null +++ b/homeassistant/components/lektrico/number.py @@ -0,0 +1,100 @@ +"""Support for Lektrico number entities.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.const import ( + ATTR_SERIAL_NUMBER, + CONF_TYPE, + PERCENTAGE, + EntityCategory, + UnitOfElectricCurrent, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoNumberEntityDescription(NumberEntityDescription): + """Describes Lektrico number entity.""" + + value_fn: Callable[[dict[str, Any]], int] + set_value_fn: Callable[[Device, int], Coroutine[Any, Any, dict[Any, Any]]] + + +NUMBERS: tuple[LektricoNumberEntityDescription, ...] = ( + LektricoNumberEntityDescription( + key="led_max_brightness", + translation_key="led_max_brightness", + entity_category=EntityCategory.CONFIG, + native_min_value=0, + native_max_value=100, + native_step=5, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: int(data["led_max_brightness"]), + set_value_fn=lambda data, value: data.set_led_max_brightness(value), + ), + LektricoNumberEntityDescription( + key="dynamic_limit", + translation_key="dynamic_limit", + entity_category=EntityCategory.CONFIG, + native_min_value=0, + native_max_value=32, + native_step=1, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: int(data["dynamic_current"]), + set_value_fn=lambda data, value: data.set_dynamic_current(value), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico number entities based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + LektricoNumber( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in NUMBERS + ) + + +class LektricoNumber(LektricoEntity, NumberEntity): + """Defines a Lektrico number entity.""" + + entity_description: LektricoNumberEntityDescription + + def __init__( + self, + description: LektricoNumberEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico number.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def native_value(self) -> int | None: + """Return the state of the number.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_set_native_value(self, value: float) -> None: + """Set the selected value.""" + await self.entity_description.set_value_fn(self.coordinator.device, int(value)) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lektrico/select.py b/homeassistant/components/lektrico/select.py new file mode 100644 index 00000000000..ef45d97d697 --- /dev/null +++ b/homeassistant/components/lektrico/select.py @@ -0,0 +1,91 @@ +"""Support for Lektrico select entities.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoSelectEntityDescription(SelectEntityDescription): + """Describes Lektrico select entity.""" + + value_fn: Callable[[dict[str, Any]], str] + set_value_fn: Callable[[Device, int], Coroutine[Any, Any, dict[Any, Any]]] + + +LB_MODE_OPTIONS = [ + "disabled", + "power", + "hybrid", + "green", +] + + +SELECTS: tuple[LektricoSelectEntityDescription, ...] = ( + LektricoSelectEntityDescription( + key="load_balancing_mode", + translation_key="load_balancing_mode", + options=LB_MODE_OPTIONS, + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: LB_MODE_OPTIONS[data["lb_mode"]], + set_value_fn=lambda device, value: device.set_load_balancing_mode(value), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico select entities based on a config entry.""" + + coordinator = entry.runtime_data + + async_add_entities( + LektricoSelect( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in SELECTS + ) + + +class LektricoSelect(LektricoEntity, SelectEntity): + """Defines a Lektrico select entity.""" + + entity_description: LektricoSelectEntityDescription + + def __init__( + self, + description: LektricoSelectEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico select.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def current_option(self) -> str | None: + """Return the state of the select.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.set_value_fn( + self.coordinator.device, LB_MODE_OPTIONS.index(option) + ) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lektrico/sensor.py b/homeassistant/components/lektrico/sensor.py new file mode 100644 index 00000000000..d55d91c4cd4 --- /dev/null +++ b/homeassistant/components/lektrico/sensor.py @@ -0,0 +1,336 @@ +"""Support for Lektrico charging station sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + ATTR_SERIAL_NUMBER, + CONF_TYPE, + PERCENTAGE, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, + UnitOfTemperature, + UnitOfTime, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import IntegrationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoSensorEntityDescription(SensorEntityDescription): + """A class that describes the Lektrico sensor entities.""" + + value_fn: Callable[[dict[str, Any]], StateType] + + +LIMIT_REASON_OPTIONS = [ + "no_limit", + "installation_current", + "user_limit", + "dynamic_limit", + "schedule", + "em_offline", + "em", + "ocpp", + "overtemperature", + "switching_phases", + "1p_charging_disabled", +] + + +SENSORS_FOR_CHARGERS: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="state", + device_class=SensorDeviceClass.ENUM, + options=[ + "available", + "charging", + "connected", + "error", + "locked", + "need_auth", + "paused", + "paused_by_scheduler", + "updating_firmware", + ], + translation_key="state", + value_fn=lambda data: str(data["charger_state"]), + ), + LektricoSensorEntityDescription( + key="charging_time", + translation_key="charging_time", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + value_fn=lambda data: int(data["charging_time"]), + ), + LektricoSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["instant_power"]), + ), + LektricoSensorEntityDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda data: float(data["session_energy"]) / 1000, + ), + LektricoSensorEntityDescription( + key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda data: float(data["temperature"]), + ), + LektricoSensorEntityDescription( + key="lifetime_energy", + translation_key="lifetime_energy", + state_class=SensorStateClass.TOTAL_INCREASING, + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda data: int(data["total_charged_energy"]), + ), + LektricoSensorEntityDescription( + key="installation_current", + translation_key="installation_current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: int(data["install_current"]), + ), + LektricoSensorEntityDescription( + key="limit_reason", + translation_key="limit_reason", + device_class=SensorDeviceClass.ENUM, + options=LIMIT_REASON_OPTIONS, + value_fn=lambda data: ( + str(data["current_limit_reason"]) + if str(data["current_limit_reason"]) in LIMIT_REASON_OPTIONS + else None + ), + ), +) + +SENSORS_FOR_LB_DEVICES: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="breaker_current", + translation_key="breaker_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: int(data["breaker_curent"]), + ), +) + +SENSORS_FOR_1_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l1"]), + ), + LektricoSensorEntityDescription( + key="current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l1"]), + ), +) + +SENSORS_FOR_3_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="voltage_l1", + translation_key="voltage_l1", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l1"]), + ), + LektricoSensorEntityDescription( + key="voltage_l2", + translation_key="voltage_l2", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l2"]), + ), + LektricoSensorEntityDescription( + key="voltage_l3", + translation_key="voltage_l3", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l3"]), + ), + LektricoSensorEntityDescription( + key="current_l1", + translation_key="current_l1", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l1"]), + ), + LektricoSensorEntityDescription( + key="current_l2", + translation_key="current_l2", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l2"]), + ), + LektricoSensorEntityDescription( + key="current_l3", + translation_key="current_l3", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l3"]), + ), +) + + +SENSORS_FOR_LB_1_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l1"]), + ), + LektricoSensorEntityDescription( + key="pf", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l1"]) * 100, + ), +) + + +SENSORS_FOR_LB_3_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="power_l1", + translation_key="power_l1", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l1"]), + ), + LektricoSensorEntityDescription( + key="power_l2", + translation_key="power_l2", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l2"]), + ), + LektricoSensorEntityDescription( + key="power_l3", + translation_key="power_l3", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l3"]), + ), + LektricoSensorEntityDescription( + key="pf_l1", + translation_key="pf_l1", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l1"]) * 100, + ), + LektricoSensorEntityDescription( + key="pf_l2", + translation_key="pf_l2", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l2"]) * 100, + ), + LektricoSensorEntityDescription( + key="pf_l3", + translation_key="pf_l3", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l3"]) * 100, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico charger based on a config entry.""" + coordinator = entry.runtime_data + + sensors_to_be_used: tuple[LektricoSensorEntityDescription, ...] + if coordinator.device_type == Device.TYPE_1P7K: + sensors_to_be_used = SENSORS_FOR_CHARGERS + SENSORS_FOR_1_PHASE + elif coordinator.device_type == Device.TYPE_3P22K: + sensors_to_be_used = SENSORS_FOR_CHARGERS + SENSORS_FOR_3_PHASE + elif coordinator.device_type == Device.TYPE_EM: + sensors_to_be_used = ( + SENSORS_FOR_LB_DEVICES + SENSORS_FOR_1_PHASE + SENSORS_FOR_LB_1_PHASE + ) + elif coordinator.device_type == Device.TYPE_3EM: + sensors_to_be_used = ( + SENSORS_FOR_LB_DEVICES + SENSORS_FOR_3_PHASE + SENSORS_FOR_LB_3_PHASE + ) + else: + raise IntegrationError + + async_add_entities( + LektricoSensor( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in sensors_to_be_used + ) + + +class LektricoSensor(LektricoEntity, SensorEntity): + """The entity class for Lektrico charging stations sensors.""" + + entity_description: LektricoSensorEntityDescription + + def __init__( + self, + description: LektricoSensorEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico charger.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json new file mode 100644 index 00000000000..e24700c9b09 --- /dev/null +++ b/homeassistant/components/lektrico/strings.json @@ -0,0 +1,176 @@ +{ + "config": { + "step": { + "user": { + "description": "Set required parameters to connect to your device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "device_name": "[%key:common::config_flow::data::name%]" + } + }, + "zeroconf_confirm": { + "description": "Do you want to add the Lektrico Charger with serial number `{serial_number}` to Home Assistant?", + "title": "Discovered Lektrico Charger device" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + } + }, + "entity": { + "binary_sensor": { + "state_e_activated": { + "name": "Ev error" + }, + "overtemp": { + "name": "Thermal throttling" + }, + "critical_temp": { + "name": "Overheating" + }, + "overcurrent": { + "name": "Overcurrent" + }, + "meter_fault": { + "name": "Metering error" + }, + "undervoltage": { + "name": "Undervoltage" + }, + "overvoltage": { + "name": "Overvoltage" + }, + "rcd_error": { + "name": "Rcd error" + }, + "cp_diode_failure": { + "name": "Ev diode short" + }, + "contactor_failure": { + "name": "Relay contacts welded" + } + }, + "button": { + "charge_start": { + "name": "Charge start" + }, + "charge_stop": { + "name": "Charge stop" + } + }, + "number": { + "led_max_brightness": { + "name": "Led brightness" + }, + "dynamic_limit": { + "name": "Dynamic limit" + } + }, + "select": { + "load_balancing_mode": { + "name": "Load balancing mode", + "state": { + "disabled": "[%key:common::state::disabled%]", + "power": "Power", + "hybrid": "Hybrid", + "green": "Green" + } + } + }, + "sensor": { + "state": { + "name": "State", + "state": { + "available": "Available", + "charging": "Charging", + "connected": "Connected", + "error": "Error", + "locked": "Locked", + "need_auth": "Waiting for authentication", + "paused": "Paused", + "paused_by_scheduler": "Paused by scheduler", + "updating_firmware": "Updating firmware" + } + }, + "charging_time": { + "name": "Charging time" + }, + "lifetime_energy": { + "name": "Lifetime energy" + }, + "installation_current": { + "name": "Installation current" + }, + "limit_reason": { + "name": "Limit reason", + "state": { + "no_limit": "No limit", + "installation_current": "Installation current", + "user_limit": "User limit", + "dynamic_limit": "Dynamic limit", + "schedule": "Schedule", + "em_offline": "EM offline", + "em": "EM", + "ocpp": "OCPP", + "overtemperature": "Overtemperature", + "switching_phases": "Switching phases", + "1p_charging_disabled": "1p charging disabled" + } + }, + "breaker_current": { + "name": "Breaker current" + }, + "voltage_l1": { + "name": "Voltage L1" + }, + "voltage_l2": { + "name": "Voltage L2" + }, + "voltage_l3": { + "name": "Voltage L3" + }, + "current_l1": { + "name": "Current L1" + }, + "current_l2": { + "name": "Current L2" + }, + "current_l3": { + "name": "Current L3" + }, + "power_l1": { + "name": "Power L1" + }, + "power_l2": { + "name": "Power L2" + }, + "power_l3": { + "name": "Power L3" + }, + "pf_l1": { + "name": "Power factor L1" + }, + "pf_l2": { + "name": "Power factor L2" + }, + "pf_l3": { + "name": "Power factor L3" + } + }, + "switch": { + "authentication": { + "name": "Authentication" + }, + "force_single_phase": { + "name": "Force single phase" + }, + "lock": { + "name": "Lock" + } + } + } +} diff --git a/homeassistant/components/lektrico/switch.py b/homeassistant/components/lektrico/switch.py new file mode 100644 index 00000000000..0fdfbd2ad41 --- /dev/null +++ b/homeassistant/components/lektrico/switch.py @@ -0,0 +1,116 @@ +"""Support for Lektrico switch entities.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoSwitchEntityDescription(SwitchEntityDescription): + """Describes Lektrico switch entity.""" + + value_fn: Callable[[dict[str, Any]], bool] + set_value_fn: Callable[[Device, dict[Any, Any], bool], Coroutine[Any, Any, Any]] + + +SWITCHS_FOR_ALL_CHARGERS: tuple[LektricoSwitchEntityDescription, ...] = ( + LektricoSwitchEntityDescription( + key="authentication", + translation_key="authentication", + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: bool(data["require_auth"]), + set_value_fn=lambda device, data, value: device.set_auth(not value), + ), + LektricoSwitchEntityDescription( + key="lock", + translation_key="lock", + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: str(data["charger_state"]) == "locked", + set_value_fn=lambda device, data, value: device.set_charger_locked(value), + ), +) + + +SWITCHS_FOR_3_PHASE_CHARGERS: tuple[LektricoSwitchEntityDescription, ...] = ( + LektricoSwitchEntityDescription( + key="force_single_phase", + translation_key="force_single_phase", + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: data["relay_mode"] == 1, + set_value_fn=lambda device, data, value: ( + device.set_relay_mode(data["dynamic_current"], 1) + if value + else device.set_relay_mode(data["dynamic_current"], 3) + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico switch entities based on a config entry.""" + coordinator = entry.runtime_data + + switchs_to_be_used: tuple[LektricoSwitchEntityDescription, ...] + if coordinator.device_type == Device.TYPE_3P22K: + switchs_to_be_used = SWITCHS_FOR_ALL_CHARGERS + SWITCHS_FOR_3_PHASE_CHARGERS + else: + switchs_to_be_used = SWITCHS_FOR_ALL_CHARGERS + + async_add_entities( + LektricoSwitch( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in switchs_to_be_used + ) + + +class LektricoSwitch(LektricoEntity, SwitchEntity): + """Defines a Lektrico switch entity.""" + + entity_description: LektricoSwitchEntityDescription + + def __init__( + self, + description: LektricoSwitchEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico switch.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.entity_description.set_value_fn( + self.coordinator.device, self.coordinator.data, True + ) + await self.coordinator.async_request_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.entity_description.set_value_fn( + self.coordinator.device, self.coordinator.data, False + ) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lg_netcast/config_flow.py b/homeassistant/components/lg_netcast/config_flow.py index c4e6c75edea..d5e28f3c057 100644 --- a/homeassistant/components/lg_netcast/config_flow.py +++ b/homeassistant/components/lg_netcast/config_flow.py @@ -18,10 +18,9 @@ from homeassistant.const import ( CONF_MODEL, CONF_NAME, ) -from homeassistant.core import CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, callback +from homeassistant.core import CALLBACK_TYPE, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.util.network import is_host_valid from .const import DEFAULT_NAME, DOMAIN @@ -68,56 +67,6 @@ class LGNetCast(config_entries.ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: - """Import configuration from yaml.""" - self.device_config = { - CONF_HOST: config[CONF_HOST], - CONF_NAME: config[CONF_NAME], - } - - def _create_issue(): - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LG Netcast", - }, - ) - - try: - result: ConfigFlowResult = await self.async_step_authorize(config) - except AbortFlow as err: - if err.reason != "already_configured": - async_create_issue( - self.hass, - DOMAIN, - "deprecated_yaml_import_issue_{err.reason}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{err.reason}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LG Netcast", - "error_type": err.reason, - }, - ) - else: - _create_issue() - raise - - _create_issue() - - return result - async def async_discover_client(self): """Handle Discovery step.""" self.create_client() diff --git a/homeassistant/components/lg_netcast/device_trigger.py b/homeassistant/components/lg_netcast/device_trigger.py index 51c5ec53004..d1808b3e536 100644 --- a/homeassistant/components/lg_netcast/device_trigger.py +++ b/homeassistant/components/lg_netcast/device_trigger.py @@ -6,8 +6,8 @@ from typing import Any import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.const import CONF_DEVICE_ID, CONF_PLATFORM, CONF_TYPE diff --git a/homeassistant/components/lg_netcast/media_player.py b/homeassistant/components/lg_netcast/media_player.py index 4dc694cd085..b3f8f8e0437 100644 --- a/homeassistant/components/lg_netcast/media_player.py +++ b/homeassistant/components/lg_netcast/media_player.py @@ -7,26 +7,20 @@ from typing import TYPE_CHECKING, Any from pylgnetcast import LG_COMMAND, LgNetCastClient, LgNetCastError from requests import RequestException -import voluptuous as vol from homeassistant.components.media_player import ( - PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerDeviceClass, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.exceptions import PlatformNotReady -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.trigger import PluggableAction -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import ATTR_MANUFACTURER, DOMAIN from .triggers.turn_on import async_get_turn_on_trigger @@ -49,15 +43,6 @@ SUPPORT_LGTV = ( | MediaPlayerEntityFeature.STOP ) -PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA, - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_ACCESS_TOKEN): vol.All(cv.string, vol.Length(max=6)), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - } -) - async def async_setup_entry( hass: HomeAssistant, @@ -79,27 +64,6 @@ async def async_setup_entry( async_add_entities([LgTVDevice(client, name, model, unique_id=unique_id)]) -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the LG TV platform.""" - - host = config.get(CONF_HOST) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - - if ( - result.get("type") == FlowResultType.ABORT - and result.get("reason") == "cannot_connect" - ): - raise PlatformNotReady(f"Connection error while connecting to {host}") - - class LgTVDevice(MediaPlayerEntity): """Representation of a LG TV.""" diff --git a/homeassistant/components/lg_netcast/strings.json b/homeassistant/components/lg_netcast/strings.json index 77003f60f43..0377d4bf318 100644 --- a/homeassistant/components/lg_netcast/strings.json +++ b/homeassistant/components/lg_netcast/strings.json @@ -25,17 +25,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" - } - }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The {integration_title} is not online for YAML migration to complete", - "description": "Migrating {integration_title} from YAML cannot complete until the TV is online.\n\nPlease turn on your TV for migration to complete." - }, - "deprecated_yaml_import_issue_invalid_host": { - "title": "The {integration_title} YAML configuration has an invalid host.", - "description": "Configuring {integration_title} using YAML is being removed but the device returned an invalid response.\n\nPlease check or manually remove the YAML configuration." + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_host": "[%key:common::config_flow::error::invalid_host%]" } }, "device_automation": { diff --git a/homeassistant/components/lg_soundbar/media_player.py b/homeassistant/components/lg_soundbar/media_player.py index 61baed1198b..cebe1d33728 100644 --- a/homeassistant/components/lg_soundbar/media_player.py +++ b/homeassistant/components/lg_soundbar/media_player.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + import temescal from homeassistant.components.media_player import ( @@ -43,6 +45,8 @@ class LGDevice(MediaPlayerEntity): _attr_supported_features = ( MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.SELECT_SOUND_MODE ) @@ -93,14 +97,7 @@ class LGDevice(MediaPlayerEntity): """Handle responses from the speakers.""" data = response.get("data") or {} if response["msg"] == "EQ_VIEW_INFO": - if "i_bass" in data: - self._bass = data["i_bass"] - if "i_treble" in data: - self._treble = data["i_treble"] - if "ai_eq_list" in data: - self._equalisers = data["ai_eq_list"] - if "i_curr_eq" in data: - self._equaliser = data["i_curr_eq"] + self._update_equalisers(data) elif response["msg"] == "SPK_LIST_VIEW_INFO": if "i_vol" in data: self._volume = data["i_vol"] @@ -112,6 +109,11 @@ class LGDevice(MediaPlayerEntity): self._mute = data["b_mute"] if "i_curr_func" in data: self._function = data["i_curr_func"] + if "b_powerstatus" in data: + if data["b_powerstatus"]: + self._attr_state = MediaPlayerState.ON + else: + self._attr_state = MediaPlayerState.OFF elif response["msg"] == "FUNC_VIEW_INFO": if "i_curr_func" in data: self._function = data["i_curr_func"] @@ -137,6 +139,17 @@ class LGDevice(MediaPlayerEntity): self.schedule_update_ha_state() + def _update_equalisers(self, data: dict[str, Any]) -> None: + """Update the equalisers.""" + if "i_bass" in data: + self._bass = data["i_bass"] + if "i_treble" in data: + self._treble = data["i_treble"] + if "ai_eq_list" in data: + self._equalisers = data["ai_eq_list"] + if "i_curr_eq" in data: + self._equaliser = data["i_curr_eq"] + def update(self) -> None: """Trigger updates from the device.""" self._device.get_eq() @@ -204,3 +217,17 @@ class LGDevice(MediaPlayerEntity): def select_sound_mode(self, sound_mode: str) -> None: """Set Sound Mode for Receiver..""" self._device.set_eq(temescal.equalisers.index(sound_mode)) + + def turn_on(self) -> None: + """Turn the media player on.""" + self._set_power(True) + + def turn_off(self) -> None: + """Turn the media player off.""" + self._set_power(False) + + def _set_power(self, status: bool) -> None: + """Set the media player state.""" + self._device.send_packet( + {"cmd": "set", "data": {"b_powerkey": status}, "msg": "SPK_LIST_VIEW_INFO"} + ) diff --git a/homeassistant/components/lg_thinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py new file mode 100644 index 00000000000..657524f0ef5 --- /dev/null +++ b/homeassistant/components/lg_thinq/__init__.py @@ -0,0 +1,167 @@ +"""Support for LG ThinQ Connect device.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass, field +import logging + +from thinqconnect import ThinQApi, ThinQAPIException +from thinqconnect.integration import async_get_ha_bridge_list + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_ACCESS_TOKEN, + CONF_COUNTRY, + EVENT_HOMEASSISTANT_STOP, + Platform, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.event import async_track_time_interval + +from .const import CONF_CONNECT_CLIENT_ID, MQTT_SUBSCRIPTION_INTERVAL +from .coordinator import DeviceDataUpdateCoordinator, async_setup_device_coordinator +from .mqtt import ThinQMQTT + + +@dataclass(kw_only=True) +class ThinqData: + """A class that holds runtime data.""" + + coordinators: dict[str, DeviceDataUpdateCoordinator] = field(default_factory=dict) + mqtt_client: ThinQMQTT | None = None + + +type ThinqConfigEntry = ConfigEntry[ThinqData] + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.CLIMATE, + Platform.EVENT, + Platform.FAN, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, + Platform.VACUUM, +] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: + """Set up an entry.""" + entry.runtime_data = ThinqData() + + access_token = entry.data[CONF_ACCESS_TOKEN] + client_id = entry.data[CONF_CONNECT_CLIENT_ID] + country_code = entry.data[CONF_COUNTRY] + + thinq_api = ThinQApi( + session=async_get_clientsession(hass), + access_token=access_token, + country_code=country_code, + client_id=client_id, + ) + + # Setup coordinators and register devices. + await async_setup_coordinators(hass, entry, thinq_api) + + # Set up all platforms for this device/entry. + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + # Set up MQTT connection. + await async_setup_mqtt(hass, entry, thinq_api, client_id) + + # Clean up devices they are no longer in use. + async_cleanup_device_registry(hass, entry) + + return True + + +async def async_setup_coordinators( + hass: HomeAssistant, + entry: ThinqConfigEntry, + thinq_api: ThinQApi, +) -> None: + """Set up coordinators and register devices.""" + # Get a list of ha bridge. + try: + bridge_list = await async_get_ha_bridge_list(thinq_api) + except ThinQAPIException as exc: + raise ConfigEntryNotReady(exc.message) from exc + + if not bridge_list: + _LOGGER.warning("No devices registered with the correct profile") + return + + # Setup coordinator per device. + task_list = [ + hass.async_create_task(async_setup_device_coordinator(hass, bridge)) + for bridge in bridge_list + ] + task_result = await asyncio.gather(*task_list) + for coordinator in task_result: + entry.runtime_data.coordinators[coordinator.unique_id] = coordinator + + +@callback +def async_cleanup_device_registry(hass: HomeAssistant, entry: ThinqConfigEntry) -> None: + """Clean up device registry.""" + new_device_unique_ids = [ + coordinator.unique_id + for coordinator in entry.runtime_data.coordinators.values() + ] + device_registry = dr.async_get(hass) + existing_entries = dr.async_entries_for_config_entry( + device_registry, entry.entry_id + ) + + # Remove devices that are no longer exist. + for old_entry in existing_entries: + old_unique_id = next(iter(old_entry.identifiers))[1] + if old_unique_id not in new_device_unique_ids: + device_registry.async_remove_device(old_entry.id) + _LOGGER.debug("Remove device_registry: device_id=%s", old_entry.id) + + +async def async_setup_mqtt( + hass: HomeAssistant, entry: ThinqConfigEntry, thinq_api: ThinQApi, client_id: str +) -> None: + """Set up MQTT connection.""" + mqtt_client = ThinQMQTT(hass, thinq_api, client_id, entry.runtime_data.coordinators) + entry.runtime_data.mqtt_client = mqtt_client + + # Try to connect. + result = await mqtt_client.async_connect() + if not result: + _LOGGER.error("Failed to set up mqtt connection") + return + + # Ready to subscribe. + await mqtt_client.async_start_subscribes() + + entry.async_on_unload( + async_track_time_interval( + hass, + mqtt_client.async_refresh_subscribe, + MQTT_SUBSCRIPTION_INTERVAL, + cancel_on_shutdown=True, + ) + ) + entry.async_on_unload( + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_STOP, mqtt_client.async_disconnect + ) + ) + + +async def async_unload_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: + """Unload the entry.""" + if entry.runtime_data.mqtt_client: + await entry.runtime_data.mqtt_client.async_disconnect() + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/lg_thinq/binary_sensor.py b/homeassistant/components/lg_thinq/binary_sensor.py new file mode 100644 index 00000000000..845bf8c3079 --- /dev/null +++ b/homeassistant/components/lg_thinq/binary_sensor.py @@ -0,0 +1,181 @@ +"""Support for binary sensor entities.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + + +@dataclass(frozen=True, kw_only=True) +class ThinQBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes ThinQ sensor entity.""" + + on_key: str | None = None + + +BINARY_SENSOR_DESC: dict[ThinQProperty, ThinQBinarySensorEntityDescription] = { + ThinQProperty.RINSE_REFILL: ThinQBinarySensorEntityDescription( + key=ThinQProperty.RINSE_REFILL, + translation_key=ThinQProperty.RINSE_REFILL, + ), + ThinQProperty.ECO_FRIENDLY_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.ECO_FRIENDLY_MODE, + translation_key=ThinQProperty.ECO_FRIENDLY_MODE, + ), + ThinQProperty.POWER_SAVE_ENABLED: ThinQBinarySensorEntityDescription( + key=ThinQProperty.POWER_SAVE_ENABLED, + translation_key=ThinQProperty.POWER_SAVE_ENABLED, + ), + ThinQProperty.REMOTE_CONTROL_ENABLED: ThinQBinarySensorEntityDescription( + key=ThinQProperty.REMOTE_CONTROL_ENABLED, + translation_key=ThinQProperty.REMOTE_CONTROL_ENABLED, + ), + ThinQProperty.SABBATH_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.SABBATH_MODE, + translation_key=ThinQProperty.SABBATH_MODE, + ), + ThinQProperty.DOOR_STATE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.DOOR_STATE, + device_class=BinarySensorDeviceClass.DOOR, + on_key="open", + ), + ThinQProperty.MACHINE_CLEAN_REMINDER: ThinQBinarySensorEntityDescription( + key=ThinQProperty.MACHINE_CLEAN_REMINDER, + translation_key=ThinQProperty.MACHINE_CLEAN_REMINDER, + on_key="mcreminder_on", + ), + ThinQProperty.SIGNAL_LEVEL: ThinQBinarySensorEntityDescription( + key=ThinQProperty.SIGNAL_LEVEL, + translation_key=ThinQProperty.SIGNAL_LEVEL, + on_key="signallevel_on", + ), + ThinQProperty.CLEAN_LIGHT_REMINDER: ThinQBinarySensorEntityDescription( + key=ThinQProperty.CLEAN_LIGHT_REMINDER, + translation_key=ThinQProperty.CLEAN_LIGHT_REMINDER, + on_key="cleanlreminder_on", + ), + ThinQProperty.HOOD_OPERATION_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.HOOD_OPERATION_MODE, + translation_key="operation_mode", + on_key="power_on", + ), + ThinQProperty.WATER_HEATER_OPERATION_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.WATER_HEATER_OPERATION_MODE, + translation_key="operation_mode", + on_key="power_on", + ), + ThinQProperty.ONE_TOUCH_FILTER: ThinQBinarySensorEntityDescription( + key=ThinQProperty.ONE_TOUCH_FILTER, + translation_key=ThinQProperty.ONE_TOUCH_FILTER, + on_key="on", + ), +} + +DEVICE_TYPE_BINARY_SENSOR_MAP: dict[ + DeviceType, tuple[ThinQBinarySensorEntityDescription, ...] +] = { + DeviceType.COOKTOP: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.DISH_WASHER: ( + BINARY_SENSOR_DESC[ThinQProperty.DOOR_STATE], + BINARY_SENSOR_DESC[ThinQProperty.RINSE_REFILL], + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + BINARY_SENSOR_DESC[ThinQProperty.MACHINE_CLEAN_REMINDER], + BINARY_SENSOR_DESC[ThinQProperty.SIGNAL_LEVEL], + BINARY_SENSOR_DESC[ThinQProperty.CLEAN_LIGHT_REMINDER], + ), + DeviceType.DRYER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.HOOD: (BINARY_SENSOR_DESC[ThinQProperty.HOOD_OPERATION_MODE],), + DeviceType.OVEN: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.REFRIGERATOR: ( + BINARY_SENSOR_DESC[ThinQProperty.DOOR_STATE], + BINARY_SENSOR_DESC[ThinQProperty.ECO_FRIENDLY_MODE], + BINARY_SENSOR_DESC[ThinQProperty.POWER_SAVE_ENABLED], + BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE], + ), + DeviceType.KIMCHI_REFRIGERATOR: ( + BINARY_SENSOR_DESC[ThinQProperty.ONE_TOUCH_FILTER], + ), + DeviceType.STYLER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHCOMBO_MAIN: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHCOMBO_MINI: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHTOWER_DRYER: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHTOWER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHTOWER_WASHER: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WATER_HEATER: ( + BINARY_SENSOR_DESC[ThinQProperty.WATER_HEATER_OPERATION_MODE], + ), + DeviceType.WINE_CELLAR: (BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE],), +} +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for binary sensor platform.""" + entities: list[ThinQBinarySensorEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_BINARY_SENSOR_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQBinarySensorEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_ONLY + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQBinarySensorEntity(ThinQEntity, BinarySensorEntity): + """Represent a thinq binary sensor platform.""" + + entity_description: ThinQBinarySensorEntityDescription + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + if (key := self.entity_description.on_key) is not None: + self._attr_is_on = self.data.value == key + else: + self._attr_is_on = self.data.is_on + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.is_on, + ) diff --git a/homeassistant/components/lg_thinq/climate.py b/homeassistant/components/lg_thinq/climate.py new file mode 100644 index 00000000000..5cf9ccbd442 --- /dev/null +++ b/homeassistant/components/lg_thinq/climate.py @@ -0,0 +1,322 @@ +"""Support for climate entities.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.climate import ( + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + ClimateEntity, + ClimateEntityDescription, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.temperature import display_temp + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + + +@dataclass(frozen=True, kw_only=True) +class ThinQClimateEntityDescription(ClimateEntityDescription): + """Describes ThinQ climate entity.""" + + min_temp: float | None = None + max_temp: float | None = None + step: float | None = None + + +DEVICE_TYPE_CLIMATE_MAP: dict[DeviceType, tuple[ThinQClimateEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + ThinQClimateEntityDescription( + key=ExtendedProperty.CLIMATE_AIR_CONDITIONER, + name=None, + translation_key=ExtendedProperty.CLIMATE_AIR_CONDITIONER, + ), + ), + DeviceType.SYSTEM_BOILER: ( + ThinQClimateEntityDescription( + key=ExtendedProperty.CLIMATE_SYSTEM_BOILER, + name=None, + min_temp=16, + max_temp=30, + step=1, + ), + ), +} + +STR_TO_HVAC: dict[str, HVACMode] = { + "air_dry": HVACMode.DRY, + "auto": HVACMode.AUTO, + "cool": HVACMode.COOL, + "fan": HVACMode.FAN_ONLY, + "heat": HVACMode.HEAT, +} + +HVAC_TO_STR: dict[HVACMode, str] = { + HVACMode.AUTO: "auto", + HVACMode.COOL: "cool", + HVACMode.DRY: "air_dry", + HVACMode.FAN_ONLY: "fan", + HVACMode.HEAT: "heat", +} + +THINQ_PRESET_MODE: list[str] = ["air_clean", "aroma", "energy_saving"] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for climate platform.""" + entities: list[ThinQClimateEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_CLIMATE_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQClimateEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) + ) + + if entities: + async_add_entities(entities) + + +class ThinQClimateEntity(ThinQEntity, ClimateEntity): + """Represent a thinq climate platform.""" + + entity_description: ThinQClimateEntityDescription + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: ThinQClimateEntityDescription, + property_id: str, + ) -> None: + """Initialize a climate entity.""" + super().__init__(coordinator, entity_description, property_id) + + self._attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + ) + self._attr_hvac_modes = [HVACMode.OFF] + self._attr_hvac_mode = HVACMode.OFF + self._attr_preset_modes = [] + self._attr_temperature_unit = UnitOfTemperature.CELSIUS + self._requested_hvac_mode: str | None = None + + # Set up HVAC modes. + for mode in self.data.hvac_modes: + if mode in STR_TO_HVAC: + self._attr_hvac_modes.append(STR_TO_HVAC[mode]) + elif mode in THINQ_PRESET_MODE: + self._attr_preset_modes.append(mode) + self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE + + # Set up fan modes. + self._attr_fan_modes = self.data.fan_modes + if self.fan_modes: + self._attr_supported_features |= ClimateEntityFeature.FAN_MODE + + # Supports target temperature range. + if self.data.support_temperature_range: + self._attr_supported_features |= ( + ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + # Update fan, hvac and preset mode. + if self.supported_features & ClimateEntityFeature.FAN_MODE: + self._attr_fan_mode = self.data.fan_mode + if self.data.is_on: + hvac_mode = self._requested_hvac_mode or self.data.hvac_mode + if hvac_mode in STR_TO_HVAC: + self._attr_hvac_mode = STR_TO_HVAC.get(hvac_mode) + self._attr_preset_mode = None + elif hvac_mode in THINQ_PRESET_MODE: + self._attr_preset_mode = hvac_mode + else: + self._attr_hvac_mode = HVACMode.OFF + self._attr_preset_mode = None + + self.reset_requested_hvac_mode() + self._attr_current_humidity = self.data.humidity + self._attr_current_temperature = self.data.current_temp + + # Update min, max and step. + if (max_temp := self.entity_description.max_temp) is not None or ( + max_temp := self.data.max + ) is not None: + self._attr_max_temp = max_temp + if (min_temp := self.entity_description.min_temp) is not None or ( + min_temp := self.data.min + ) is not None: + self._attr_min_temp = min_temp + if (step := self.entity_description.step) is not None or ( + step := self.data.step + ) is not None: + self._attr_target_temperature_step = step + + # Update target temperatures. + self._attr_target_temperature = self.data.target_temp + self._attr_target_temperature_high = self.data.target_temp_high + self._attr_target_temperature_low = self.data.target_temp_low + + _LOGGER.debug( + "[%s:%s] update status: c:%s, t:%s, l:%s, h:%s, hvac:%s, unit:%s, step:%s", + self.coordinator.device_name, + self.property_id, + self.current_temperature, + self.target_temperature, + self.target_temperature_low, + self.target_temperature_high, + self.hvac_mode, + self.temperature_unit, + self.target_temperature_step, + ) + + def reset_requested_hvac_mode(self) -> None: + """Cancel request to set hvac mode.""" + self._requested_hvac_mode = None + + async def async_turn_on(self) -> None: + """Turn the entity on.""" + _LOGGER.debug( + "[%s:%s] async_turn_on", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id)) + + async def async_turn_off(self) -> None: + """Turn the entity off.""" + _LOGGER.debug( + "[%s:%s] async_turn_off", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id)) + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + if hvac_mode == HVACMode.OFF: + await self.async_turn_off() + return + + # If device is off, turn on first. + if not self.data.is_on: + await self.async_turn_on() + + # When we request hvac mode while turning on the device, the previously set + # hvac mode is displayed first and then switches to the requested hvac mode. + # To prevent this, set the requested hvac mode here so that it will be set + # immediately on the next update. + self._requested_hvac_mode = HVAC_TO_STR.get(hvac_mode) + + _LOGGER.debug( + "[%s:%s] async_set_hvac_mode: %s", + self.coordinator.device_name, + self.property_id, + hvac_mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_hvac_mode( + self.property_id, self._requested_hvac_mode + ), + self.reset_requested_hvac_mode, + ) + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + _LOGGER.debug( + "[%s:%s] async_set_preset_mode: %s", + self.coordinator.device_name, + self.property_id, + preset_mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_hvac_mode(self.property_id, preset_mode) + ) + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set new target fan mode.""" + _LOGGER.debug( + "[%s:%s] async_set_fan_mode: %s", + self.coordinator.device_name, + self.property_id, + fan_mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_fan_mode(self.property_id, fan_mode) + ) + + def _round_by_step(self, temperature: float) -> float: + """Round the value by step.""" + if ( + target_temp := display_temp( + self.coordinator.hass, + temperature, + self.coordinator.hass.config.units.temperature_unit, + self.target_temperature_step or 1, + ) + ) is not None: + return target_temp + + return temperature + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + _LOGGER.debug( + "[%s:%s] async_set_temperature: %s", + self.coordinator.device_name, + self.property_id, + kwargs, + ) + + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is not None: + if ( + target_temp := self._round_by_step(temperature) + ) != self.target_temperature: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature( + self.property_id, target_temp + ) + ) + + if (temperature_low := kwargs.get(ATTR_TARGET_TEMP_LOW)) is not None: + if ( + target_temp_low := self._round_by_step(temperature_low) + ) != self.target_temperature_low: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature_low( + self.property_id, target_temp_low + ) + ) + + if (temperature_high := kwargs.get(ATTR_TARGET_TEMP_HIGH)) is not None: + if ( + target_temp_high := self._round_by_step(temperature_high) + ) != self.target_temperature_high: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature_high( + self.property_id, target_temp_high + ) + ) diff --git a/homeassistant/components/lg_thinq/config_flow.py b/homeassistant/components/lg_thinq/config_flow.py new file mode 100644 index 00000000000..3bbcf3cd226 --- /dev/null +++ b/homeassistant/components/lg_thinq/config_flow.py @@ -0,0 +1,111 @@ +"""Config flow for LG ThinQ.""" + +from __future__ import annotations + +import logging +from typing import Any +import uuid + +from thinqconnect import ThinQApi, ThinQAPIErrorCodes, ThinQAPIException +from thinqconnect.country import Country +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import CountrySelector, CountrySelectorConfig + +from .const import ( + CLIENT_PREFIX, + CONF_CONNECT_CLIENT_ID, + DEFAULT_COUNTRY, + DOMAIN, + THINQ_DEFAULT_NAME, + THINQ_PAT_URL, +) + +SUPPORTED_COUNTRIES = [country.value for country in Country] +THINQ_ERRORS = { + ThinQAPIErrorCodes.INVALID_TOKEN: "invalid_token", + ThinQAPIErrorCodes.NOT_ACCEPTABLE_TERMS: "not_acceptable_terms", + ThinQAPIErrorCodes.NOT_ALLOWED_API_AGAIN: "not_allowed_api_again", + ThinQAPIErrorCodes.NOT_SUPPORTED_COUNTRY: "not_supported_country", + ThinQAPIErrorCodes.EXCEEDED_API_CALLS: "exceeded_api_calls", +} + +_LOGGER = logging.getLogger(__name__) + + +class ThinQFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a config flow.""" + + VERSION = 1 + + def _get_default_country_code(self) -> str: + """Get the default country code based on config.""" + country = self.hass.config.country + if country is not None and country in SUPPORTED_COUNTRIES: + return country + + return DEFAULT_COUNTRY + + async def _validate_and_create_entry( + self, access_token: str, country_code: str + ) -> ConfigFlowResult: + """Create an entry for the flow.""" + connect_client_id = f"{CLIENT_PREFIX}-{uuid.uuid4()!s}" + + # To verify PAT, create an api to retrieve the device list. + await ThinQApi( + session=async_get_clientsession(self.hass), + access_token=access_token, + country_code=country_code, + client_id=connect_client_id, + ).async_get_device_list() + + # If verification is success, create entry. + return self.async_create_entry( + title=THINQ_DEFAULT_NAME, + data={ + CONF_ACCESS_TOKEN: access_token, + CONF_CONNECT_CLIENT_ID: connect_client_id, + CONF_COUNTRY: country_code, + }, + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + + if user_input is not None: + access_token = user_input[CONF_ACCESS_TOKEN] + country_code = user_input[CONF_COUNTRY] + + # Check if PAT is already configured. + await self.async_set_unique_id(access_token) + self._abort_if_unique_id_configured() + + try: + return await self._validate_and_create_entry(access_token, country_code) + except ThinQAPIException as exc: + errors["base"] = THINQ_ERRORS.get(exc.code, "token_unauthorized") + _LOGGER.error("Failed to validate access_token %s", exc) + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ACCESS_TOKEN): cv.string, + vol.Required( + CONF_COUNTRY, default=self._get_default_country_code() + ): CountrySelector( + CountrySelectorConfig(countries=SUPPORTED_COUNTRIES) + ), + } + ), + description_placeholders={"pat_url": THINQ_PAT_URL}, + errors=errors, + ) diff --git a/homeassistant/components/lg_thinq/const.py b/homeassistant/components/lg_thinq/const.py new file mode 100644 index 00000000000..a65dee715db --- /dev/null +++ b/homeassistant/components/lg_thinq/const.py @@ -0,0 +1,20 @@ +"""Constants for LG ThinQ.""" + +from datetime import timedelta +from typing import Final + +# Config flow +DOMAIN = "lg_thinq" +COMPANY = "LGE" +DEFAULT_COUNTRY: Final = "US" +THINQ_DEFAULT_NAME: Final = "LG ThinQ" +THINQ_PAT_URL: Final = "https://connect-pat.lgthinq.com" +CLIENT_PREFIX: Final = "home-assistant" +CONF_CONNECT_CLIENT_ID: Final = "connect_client_id" + +# MQTT +MQTT_SUBSCRIPTION_INTERVAL: Final = timedelta(days=1) + +# MQTT: Message types +DEVICE_PUSH_MESSAGE: Final = "DEVICE_PUSH" +DEVICE_STATUS_MESSAGE: Final = "DEVICE_STATUS" diff --git a/homeassistant/components/lg_thinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py new file mode 100644 index 00000000000..9f317dc21d9 --- /dev/null +++ b/homeassistant/components/lg_thinq/coordinator.py @@ -0,0 +1,85 @@ +"""DataUpdateCoordinator for the LG ThinQ device.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import ThinQAPIException +from thinqconnect.integration import HABridge + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """LG Device's Data Update Coordinator.""" + + def __init__(self, hass: HomeAssistant, ha_bridge: HABridge) -> None: + """Initialize data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=f"{DOMAIN}_{ha_bridge.device.device_id}", + ) + + self.data = {} + self.api = ha_bridge + self.device_id = ha_bridge.device.device_id + self.sub_id = ha_bridge.sub_id + + alias = ha_bridge.device.alias + + # The device name is usually set to 'alias'. + # But, if the sub_id exists, it will be set to 'alias {sub_id}'. + # e.g. alias='MyWashTower', sub_id='dryer' then 'MyWashTower dryer'. + self.device_name = f"{alias} {self.sub_id}" if self.sub_id else alias + + # The unique id is usually set to 'device_id'. + # But, if the sub_id exists, it will be set to 'device_id_{sub_id}'. + # e.g. device_id='TQSXXXX', sub_id='dryer' then 'TQSXXXX_dryer'. + self.unique_id = ( + f"{self.device_id}_{self.sub_id}" if self.sub_id else self.device_id + ) + + async def _async_update_data(self) -> dict[str, Any]: + """Request to the server to update the status from full response data.""" + try: + return await self.api.fetch_data() + except ThinQAPIException as e: + raise UpdateFailed(e) from e + + def refresh_status(self) -> None: + """Refresh current status.""" + self.async_set_updated_data(self.data) + + def handle_update_status(self, status: dict[str, Any]) -> None: + """Handle the status received from the mqtt connection.""" + data = self.api.update_status(status) + if data is not None: + self.async_set_updated_data(data) + + def handle_notification_message(self, message: str | None) -> None: + """Handle the status received from the mqtt connection.""" + data = self.api.update_notification(message) + if data is not None: + self.async_set_updated_data(data) + + +async def async_setup_device_coordinator( + hass: HomeAssistant, ha_bridge: HABridge +) -> DeviceDataUpdateCoordinator: + """Create DeviceDataUpdateCoordinator and device_api per device.""" + coordinator = DeviceDataUpdateCoordinator(hass, ha_bridge) + await coordinator.async_refresh() + + _LOGGER.debug( + "Setup device's coordinator: %s, model:%s", + coordinator.device_name, + coordinator.api.device.model_name, + ) + return coordinator diff --git a/homeassistant/components/lg_thinq/entity.py b/homeassistant/components/lg_thinq/entity.py new file mode 100644 index 00000000000..7856506559b --- /dev/null +++ b/homeassistant/components/lg_thinq/entity.py @@ -0,0 +1,114 @@ +"""Base class for ThinQ entities.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +import logging +from typing import Any + +from thinqconnect import ThinQAPIException +from thinqconnect.devices.const import Location +from thinqconnect.integration import PropertyState + +from homeassistant.const import UnitOfTemperature +from homeassistant.core import callback +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import COMPANY, DOMAIN +from .coordinator import DeviceDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + +EMPTY_STATE = PropertyState() + +UNIT_CONVERSION_MAP: dict[str, str] = { + "F": UnitOfTemperature.FAHRENHEIT, + "C": UnitOfTemperature.CELSIUS, +} + + +class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): + """The base implementation of all lg thinq entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: EntityDescription, + property_id: str, + ) -> None: + """Initialize an entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + self.property_id = property_id + self.location = self.coordinator.api.get_location_for_idx(self.property_id) + + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, coordinator.unique_id)}, + manufacturer=COMPANY, + model=f"{coordinator.api.device.model_name} ({self.coordinator.api.device.device_type})", + name=coordinator.device_name, + ) + self._attr_unique_id = f"{coordinator.unique_id}_{self.property_id}" + if self.location is not None and self.location not in ( + Location.MAIN, + Location.OVEN, + coordinator.sub_id, + ): + self._attr_translation_placeholders = {"location": self.location} + self._attr_translation_key = ( + f"{entity_description.translation_key}_for_location" + ) + + @property + def data(self) -> PropertyState: + """Return the state data of entity.""" + return self.coordinator.data.get(self.property_id, EMPTY_STATE) + + def _get_unit_of_measurement(self, unit: str | None) -> str | None: + """Convert thinq unit string to HA unit string.""" + if unit is None: + return None + + return UNIT_CONVERSION_MAP.get(unit) + + def _update_status(self) -> None: + """Update status itself. + + All inherited classes can update their own status in here. + """ + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_status() + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + async def async_call_api( + self, + target: Coroutine[Any, Any, Any], + on_fail_method: Callable[[], None] | None = None, + ) -> None: + """Call the given api and handle exception.""" + try: + await target + except ThinQAPIException as exc: + if on_fail_method: + on_fail_method() + raise ServiceValidationError( + exc.message, translation_domain=DOMAIN, translation_key=exc.code + ) from exc + except ValueError as exc: + if on_fail_method: + on_fail_method() + raise ServiceValidationError(exc) from exc diff --git a/homeassistant/components/lg_thinq/event.py b/homeassistant/components/lg_thinq/event.py new file mode 100644 index 00000000000..b963cba37cc --- /dev/null +++ b/homeassistant/components/lg_thinq/event.py @@ -0,0 +1,115 @@ +"""Support for event entity.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.integration import ActiveMode, ThinQPropertyEx + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +NOTIFICATION_EVENT_DESC = EventEntityDescription( + key=ThinQPropertyEx.NOTIFICATION, + translation_key=ThinQPropertyEx.NOTIFICATION, +) +ERROR_EVENT_DESC = EventEntityDescription( + key=ThinQPropertyEx.ERROR, + translation_key=ThinQPropertyEx.ERROR, +) +ALL_EVENTS: tuple[EventEntityDescription, ...] = ( + ERROR_EVENT_DESC, + NOTIFICATION_EVENT_DESC, +) +DEVICE_TYPE_EVENT_MAP: dict[DeviceType, tuple[EventEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: (NOTIFICATION_EVENT_DESC,), + DeviceType.AIR_PURIFIER_FAN: (NOTIFICATION_EVENT_DESC,), + DeviceType.AIR_PURIFIER: (NOTIFICATION_EVENT_DESC,), + DeviceType.DEHUMIDIFIER: (NOTIFICATION_EVENT_DESC,), + DeviceType.DISH_WASHER: ALL_EVENTS, + DeviceType.DRYER: ALL_EVENTS, + DeviceType.HUMIDIFIER: (NOTIFICATION_EVENT_DESC,), + DeviceType.KIMCHI_REFRIGERATOR: (NOTIFICATION_EVENT_DESC,), + DeviceType.MICROWAVE_OVEN: (NOTIFICATION_EVENT_DESC,), + DeviceType.OVEN: (NOTIFICATION_EVENT_DESC,), + DeviceType.REFRIGERATOR: (NOTIFICATION_EVENT_DESC,), + DeviceType.ROBOT_CLEANER: ALL_EVENTS, + DeviceType.STICK_CLEANER: (NOTIFICATION_EVENT_DESC,), + DeviceType.STYLER: ALL_EVENTS, + DeviceType.WASHCOMBO_MAIN: ALL_EVENTS, + DeviceType.WASHCOMBO_MINI: ALL_EVENTS, + DeviceType.WASHER: ALL_EVENTS, + DeviceType.WASHTOWER_DRYER: ALL_EVENTS, + DeviceType.WASHTOWER: ALL_EVENTS, + DeviceType.WASHTOWER_WASHER: ALL_EVENTS, + DeviceType.WINE_CELLAR: (NOTIFICATION_EVENT_DESC,), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for event platform.""" + entities: list[ThinQEventEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_EVENT_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQEventEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_ONLY + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQEventEntity(ThinQEntity, EventEntity): + """Represent an thinq event platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: EventEntityDescription, + property_id: str, + ) -> None: + """Initialize an event platform.""" + super().__init__(coordinator, entity_description, property_id) + + # For event types. + self._attr_event_types = self.data.options + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + _LOGGER.debug( + "[%s:%s] update status: %s, event_types=%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.event_types, + ) + # Handle an event. + if (value := self.data.value) is not None and value in self.event_types: + self._async_handle_update(value) + + def _async_handle_update(self, value: str) -> None: + """Handle the event.""" + self._trigger_event(value) + self.async_write_ha_state() diff --git a/homeassistant/components/lg_thinq/fan.py b/homeassistant/components/lg_thinq/fan.py new file mode 100644 index 00000000000..edcadf2598a --- /dev/null +++ b/homeassistant/components/lg_thinq/fan.py @@ -0,0 +1,153 @@ +"""Support for fan entities.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.fan import ( + FanEntity, + FanEntityDescription, + FanEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.percentage import ( + ordered_list_item_to_percentage, + percentage_to_ordered_list_item, +) + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +DEVICE_TYPE_FAN_MAP: dict[DeviceType, tuple[FanEntityDescription, ...]] = { + DeviceType.CEILING_FAN: ( + FanEntityDescription( + key=ExtendedProperty.FAN, + name=None, + ), + ), +} + +FOUR_STEP_SPEEDS = ["low", "mid", "high", "turbo"] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for fan platform.""" + entities: list[ThinQFanEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_FAN_MAP.get(coordinator.api.device.device_type) + ) is not None: + for description in descriptions: + entities.extend( + ThinQFanEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) + ) + + if entities: + async_add_entities(entities) + + +class ThinQFanEntity(ThinQEntity, FanEntity): + """Represent a thinq fan platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: FanEntityDescription, + property_id: str, + ) -> None: + """Initialize fan platform.""" + super().__init__(coordinator, entity_description, property_id) + + self._ordered_named_fan_speeds = [] + self._attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + ) + if (fan_modes := self.data.fan_modes) is not None: + self._attr_speed_count = len(fan_modes) + if self.speed_count == 4: + self._ordered_named_fan_speeds = FOUR_STEP_SPEEDS + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + # Update power on state. + self._attr_is_on = self.data.is_on + + # Update fan speed. + if ( + self.data.is_on + and (mode := self.data.fan_mode) in self._ordered_named_fan_speeds + ): + self._attr_percentage = ordered_list_item_to_percentage( + self._ordered_named_fan_speeds, mode + ) + else: + self._attr_percentage = 0 + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s (percentage=%s)", + self.coordinator.device_name, + self.property_id, + self.data.is_on, + self.is_on, + self.percentage, + ) + + async def async_set_percentage(self, percentage: int) -> None: + """Set the speed percentage of the fan.""" + if percentage == 0: + await self.async_turn_off() + return + try: + value = percentage_to_ordered_list_item( + self._ordered_named_fan_speeds, percentage + ) + except ValueError: + _LOGGER.exception("Failed to async_set_percentage") + return + + _LOGGER.debug( + "[%s:%s] async_set_percentage. percentage=%s, value=%s", + self.coordinator.device_name, + self.property_id, + percentage, + value, + ) + await self.async_call_api( + self.coordinator.api.async_set_fan_mode(self.property_id, value) + ) + + async def async_turn_on( + self, + percentage: int | None = None, + preset_mode: str | None = None, + **kwargs: Any, + ) -> None: + """Turn on the fan.""" + _LOGGER.debug( + "[%s:%s] async_turn_on", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id)) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the fan off.""" + _LOGGER.debug( + "[%s:%s] async_turn_off", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id)) diff --git a/homeassistant/components/lg_thinq/icons.json b/homeassistant/components/lg_thinq/icons.json new file mode 100644 index 00000000000..87cf04e0c1a --- /dev/null +++ b/homeassistant/components/lg_thinq/icons.json @@ -0,0 +1,407 @@ +{ + "entity": { + "switch": { + "auto_mode": { + "default": "mdi:cogs" + }, + "express_mode": { + "default": "mdi:snowflake-variant" + }, + "hot_water_mode": { + "default": "mdi:list-status" + }, + "humidity_warm_mode": { + "default": "mdi:heat-wave" + }, + "hygiene_dry_mode": { + "default": "mdi:format-list-bulleted" + }, + "mood_lamp_state": { + "default": "mdi:lamp" + }, + "operation_power": { + "default": "mdi:power" + }, + "optimal_humidity": { + "default": "mdi:water-percent" + }, + "power_save_enabled": { + "default": "mdi:hydro-power" + }, + "rapid_freeze": { + "default": "mdi:snowflake" + }, + "sleep_mode": { + "default": "mdi:format-list-bulleted" + }, + "uv_nano": { + "default": "mdi:air-filter" + }, + "warm_mode": { + "default": "mdi:heat-wave" + } + }, + "binary_sensor": { + "eco_friendly_mode": { + "default": "mdi:sprout" + }, + "power_save_enabled": { + "default": "mdi:meter-electric" + }, + "remote_control_enabled": { + "default": "mdi:remote" + }, + "remote_control_enabled_for_location": { + "default": "mdi:remote" + }, + "rinse_refill": { + "default": "mdi:tune-vertical-variant" + }, + "sabbath_mode": { + "default": "mdi:food-off-outline" + }, + "machine_clean_reminder": { + "default": "mdi:tune-vertical-variant" + }, + "signal_level": { + "default": "mdi:tune-vertical-variant" + }, + "clean_light_reminder": { + "default": "mdi:tune-vertical-variant" + }, + "operation_mode": { + "default": "mdi:power" + }, + "one_touch_filter": { + "default": "mdi:air-filter" + } + }, + "climate": { + "climate_air_conditioner": { + "state_attributes": { + "fan_mode": { + "state": { + "slow": "mdi:fan-chevron-down", + "low": "mdi:fan-speed-1", + "mid": "mdi:fan-speed-2", + "high": "mdi:fan-speed-3", + "power": "mdi:fan-chevron-up", + "auto": "mdi:fan-auto" + } + } + } + } + }, + "event": { + "error": { + "default": "mdi:alert-circle-outline" + }, + "notification": { + "default": "mdi:message-badge-outline" + } + }, + "number": { + "target_temperature": { + "default": "mdi:thermometer" + }, + "target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "light_status": { + "default": "mdi:television-ambient-light" + }, + "fan_speed": { + "default": "mdi:wind-power-outline" + }, + "lamp_brightness": { + "default": "mdi:alarm-light-outline" + }, + "wind_temperature": { + "default": "mdi:thermometer" + }, + "relative_hour_to_start": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_start_for_location": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_start_wm": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_start_wm_for_location": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop_for_location": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop_wm": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop_wm_for_location": { + "default": "mdi:timer-edit-outline" + }, + "sleep_timer_relative_hour_to_stop": { + "default": "mdi:bed-clock" + }, + "sleep_timer_relative_hour_to_stop_for_location": { + "default": "mdi:bed-clock" + } + }, + "select": { + "wind_strength": { + "default": "mdi:wind-power-outline" + }, + "monitoring_enabled": { + "default": "mdi:monitor-eye" + }, + "current_job_mode": { + "default": "mdi:format-list-bulleted" + }, + "operation_mode": { + "default": "mdi:gesture-tap-button" + }, + "operation_mode_for_location": { + "default": "mdi:gesture-tap-button" + }, + "air_clean_operation_mode": { + "default": "mdi:air-filter" + }, + "cook_mode": { + "default": "mdi:chef-hat" + }, + "cook_mode_for_location": { + "default": "mdi:chef-hat" + }, + "light_brightness": { + "default": "mdi:list-status" + }, + "wind_angle": { + "default": "mdi:rotate-360" + }, + "display_light": { + "default": "mdi:brightness-6" + }, + "fresh_air_filter": { + "default": "mdi:air-filter" + }, + "hygiene_dry_mode": { + "default": "mdi:format-list-bulleted" + } + }, + "sensor": { + "odor_level": { + "default": "mdi:scent" + }, + "current_temperature": { + "default": "mdi:thermometer" + }, + "temperature": { + "default": "mdi:thermometer" + }, + "total_pollution_level": { + "default": "mdi:air-filter" + }, + "monitoring_enabled": { + "default": "mdi:monitor-eye" + }, + "growth_mode": { + "default": "mdi:sprout-outline" + }, + "growth_mode_for_location": { + "default": "mdi:sprout-outline" + }, + "wind_volume": { + "default": "mdi:wind-power-outline" + }, + "wind_volume_for_location": { + "default": "mdi:wind-power-outline" + }, + "brightness": { + "default": "mdi:tune-vertical-variant" + }, + "brightness_for_location": { + "default": "mdi:tune-vertical-variant" + }, + "duration": { + "default": "mdi:tune-vertical-variant" + }, + "duration_for_location": { + "default": "mdi:tune-vertical-variant" + }, + "day_target_temperature": { + "default": "mdi:thermometer" + }, + "day_target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "night_target_temperature": { + "default": "mdi:thermometer" + }, + "night_target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "temperature_state": { + "default": "mdi:thermometer" + }, + "temperature_state_for_location": { + "default": "mdi:thermometer" + }, + "current_state": { + "default": "mdi:list-status" + }, + "current_state_for_location": { + "default": "mdi:list-status" + }, + "fresh_air_filter": { + "default": "mdi:air-filter" + }, + "filter_lifetime": { + "default": "mdi:air-filter" + }, + "used_time": { + "default": "mdi:air-filter" + }, + "current_job_mode": { + "default": "mdi:dots-circle" + }, + "current_job_mode_stick_cleaner": { + "default": "mdi:dots-circle" + }, + "personalization_mode": { + "default": "mdi:dots-circle" + }, + "current_dish_washing_course": { + "default": "mdi:format-list-checks" + }, + "rinse_level": { + "default": "mdi:tune-vertical-variant" + }, + "softening_level": { + "default": "mdi:tune-vertical-variant" + }, + "cock_state": { + "default": "mdi:air-filter" + }, + "sterilizing_state": { + "default": "mdi:water-alert-outline" + }, + "water_type": { + "default": "mdi:water" + }, + "target_temperature": { + "default": "mdi:thermometer" + }, + "target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "elapsed_day_state": { + "default": "mdi:calendar-range-outline" + }, + "elapsed_day_total": { + "default": "mdi:calendar-range-outline" + }, + "recipe_name": { + "default": "mdi:information-box-outline" + }, + "wort_info": { + "default": "mdi:information-box-outline" + }, + "yeast_info": { + "default": "mdi:information-box-outline" + }, + "hop_oil_info": { + "default": "mdi:information-box-outline" + }, + "flavor_info": { + "default": "mdi:information-box-outline" + }, + "beer_remain": { + "default": "mdi:glass-mug-variant" + }, + "battery_level": { + "default": "mdi:battery-medium" + }, + "relative_to_start": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_start_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_start_wm": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_start_wm_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop_wm": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop_wm_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "sleep_timer_relative_to_stop": { + "default": "mdi:bed-clock" + }, + "sleep_timer_relative_to_stop_for_location": { + "default": "mdi:bed-clock" + }, + "absolute_to_start": { + "default": "mdi:clock-time-three-outline" + }, + "absolute_to_start_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "absolute_to_stop": { + "default": "mdi:clock-time-three-outline" + }, + "absolute_to_stop_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "remain": { + "default": "mdi:timer-sand" + }, + "remain_for_location": { + "default": "mdi:timer-sand" + }, + "running": { + "default": "mdi:timer-play-outline" + }, + "running_for_location": { + "default": "mdi:timer-play-outline" + }, + "total": { + "default": "mdi:timer-play-outline" + }, + "total_for_location": { + "default": "mdi:timer-play-outline" + }, + "target": { + "default": "mdi:clock-time-three-outline" + }, + "target_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "light_start": { + "default": "mdi:clock-time-three-outline" + }, + "light_start_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "power_level": { + "default": "mdi:radiator" + }, + "power_level_for_location": { + "default": "mdi:radiator" + } + } + } +} diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json new file mode 100644 index 00000000000..6dd60909c66 --- /dev/null +++ b/homeassistant/components/lg_thinq/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "lg_thinq", + "name": "LG ThinQ", + "codeowners": ["@LG-ThinQ-Integration"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/lg_thinq", + "iot_class": "cloud_push", + "loggers": ["thinqconnect"], + "requirements": ["thinqconnect==1.0.2"] +} diff --git a/homeassistant/components/lg_thinq/mqtt.py b/homeassistant/components/lg_thinq/mqtt.py new file mode 100644 index 00000000000..8759869aad3 --- /dev/null +++ b/homeassistant/components/lg_thinq/mqtt.py @@ -0,0 +1,191 @@ +"""Support for LG ThinQ Connect API.""" + +from __future__ import annotations + +import asyncio +from datetime import datetime +import json +import logging +from typing import Any + +from thinqconnect import ( + DeviceType, + ThinQApi, + ThinQAPIErrorCodes, + ThinQAPIException, + ThinQMQTTClient, +) + +from homeassistant.core import Event, HomeAssistant + +from .const import DEVICE_PUSH_MESSAGE, DEVICE_STATUS_MESSAGE +from .coordinator import DeviceDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class ThinQMQTT: + """A class that implements MQTT connection.""" + + def __init__( + self, + hass: HomeAssistant, + thinq_api: ThinQApi, + client_id: str, + coordinators: dict[str, DeviceDataUpdateCoordinator], + ) -> None: + """Initialize a mqtt.""" + self.hass = hass + self.thinq_api = thinq_api + self.client_id = client_id + self.coordinators = coordinators + self.client: ThinQMQTTClient | None = None + + async def async_connect(self) -> bool: + """Create a mqtt client and then try to connect.""" + try: + self.client = await ThinQMQTTClient( + self.thinq_api, self.client_id, self.on_message_received + ) + if self.client is None: + return False + + # Connect to server and create certificate. + return await self.client.async_prepare_mqtt() + except (ThinQAPIException, TypeError, ValueError): + _LOGGER.exception("Failed to connect") + return False + + async def async_disconnect(self, event: Event | None = None) -> None: + """Unregister client and disconnects handlers.""" + await self.async_end_subscribes() + + if self.client is not None: + try: + await self.client.async_disconnect() + except (ThinQAPIException, TypeError, ValueError): + _LOGGER.exception("Failed to disconnect") + + def _get_failed_device_count( + self, results: list[dict | BaseException | None] + ) -> int: + """Check if there exists errors while performing tasks and then return count.""" + # Note that result code '1207' means 'Already subscribed push' + # and is not actually fail. + return sum( + isinstance(result, (TypeError, ValueError)) + or ( + isinstance(result, ThinQAPIException) + and result.code != ThinQAPIErrorCodes.ALREADY_SUBSCRIBED_PUSH + ) + for result in results + ) + + async def async_refresh_subscribe(self, now: datetime | None = None) -> None: + """Update event subscribes.""" + _LOGGER.debug("async_refresh_subscribe: now=%s", now) + + tasks = [ + self.hass.async_create_task( + self.thinq_api.async_post_event_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ] + if tasks: + results = await asyncio.gather(*tasks, return_exceptions=True) + if (count := self._get_failed_device_count(results)) > 0: + _LOGGER.error("Failed to refresh subscription on %s devices", count) + + async def async_start_subscribes(self) -> None: + """Start push/event subscribes.""" + _LOGGER.debug("async_start_subscribes") + + if self.client is None: + _LOGGER.error("Failed to start subscription: No client") + return + + tasks = [ + self.hass.async_create_task( + self.thinq_api.async_post_push_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ] + tasks.extend( + self.hass.async_create_task( + self.thinq_api.async_post_event_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ) + if tasks: + results = await asyncio.gather(*tasks, return_exceptions=True) + if (count := self._get_failed_device_count(results)) > 0: + _LOGGER.error("Failed to start subscription on %s devices", count) + + await self.client.async_connect_mqtt() + + async def async_end_subscribes(self) -> None: + """Start push/event unsubscribes.""" + _LOGGER.debug("async_end_subscribes") + + tasks = [ + self.hass.async_create_task( + self.thinq_api.async_delete_push_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ] + tasks.extend( + self.hass.async_create_task( + self.thinq_api.async_delete_event_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ) + if tasks: + results = await asyncio.gather(*tasks, return_exceptions=True) + if (count := self._get_failed_device_count(results)) > 0: + _LOGGER.error("Failed to end subscription on %s devices", count) + + def on_message_received( + self, + topic: str, + payload: bytes, + dup: bool, + qos: Any, + retain: bool, + **kwargs: dict, + ) -> None: + """Handle the received message that matching the topic.""" + decoded = payload.decode() + try: + message = json.loads(decoded) + except ValueError: + _LOGGER.error("Failed to parse message: payload=%s", decoded) + return + + asyncio.run_coroutine_threadsafe( + self.async_handle_device_event(message), self.hass.loop + ).result() + + async def async_handle_device_event(self, message: dict) -> None: + """Handle received mqtt message.""" + unique_id = ( + f"{message["deviceId"]}_{list(message["report"].keys())[0]}" + if message["deviceType"] == DeviceType.WASHTOWER + else message["deviceId"] + ) + coordinator = self.coordinators.get(unique_id) + if coordinator is None: + _LOGGER.error("Failed to handle device event: No device") + return + + _LOGGER.debug( + "async_handle_device_event: %s, model:%s, message=%s", + coordinator.device_name, + coordinator.api.device.model_name, + message, + ) + push_type = message.get("pushType") + + if push_type == DEVICE_STATUS_MESSAGE: + coordinator.handle_update_status(message.get("report", {})) + elif push_type == DEVICE_PUSH_MESSAGE: + coordinator.handle_notification_message(message.get("pushCode")) diff --git a/homeassistant/components/lg_thinq/number.py b/homeassistant/components/lg_thinq/number.py new file mode 100644 index 00000000000..634c1a8fe84 --- /dev/null +++ b/homeassistant/components/lg_thinq/number.py @@ -0,0 +1,224 @@ +"""Support for number entities.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode, TimerProperty + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import PERCENTAGE, UnitOfTemperature, UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + +NUMBER_DESC: dict[ThinQProperty, NumberEntityDescription] = { + ThinQProperty.FAN_SPEED: NumberEntityDescription( + key=ThinQProperty.FAN_SPEED, + translation_key=ThinQProperty.FAN_SPEED, + ), + ThinQProperty.LAMP_BRIGHTNESS: NumberEntityDescription( + key=ThinQProperty.LAMP_BRIGHTNESS, + translation_key=ThinQProperty.LAMP_BRIGHTNESS, + ), + ThinQProperty.LIGHT_STATUS: NumberEntityDescription( + key=ThinQProperty.LIGHT_STATUS, + native_unit_of_measurement=PERCENTAGE, + translation_key=ThinQProperty.LIGHT_STATUS, + ), + ThinQProperty.TARGET_HUMIDITY: NumberEntityDescription( + key=ThinQProperty.TARGET_HUMIDITY, + device_class=NumberDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + translation_key=ThinQProperty.TARGET_HUMIDITY, + ), + ThinQProperty.TARGET_TEMPERATURE: NumberEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ThinQProperty.WIND_TEMPERATURE: NumberEntityDescription( + key=ThinQProperty.WIND_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.WIND_TEMPERATURE, + ), +} +TIMER_NUMBER_DESC: dict[ThinQProperty, NumberEntityDescription] = { + ThinQProperty.RELATIVE_HOUR_TO_START: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_START, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.RELATIVE_HOUR_TO_START, + ), + TimerProperty.RELATIVE_HOUR_TO_START_WM: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_START, + native_min_value=0, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=TimerProperty.RELATIVE_HOUR_TO_START_WM, + ), + ThinQProperty.RELATIVE_HOUR_TO_STOP: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_STOP, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.RELATIVE_HOUR_TO_STOP, + ), + TimerProperty.RELATIVE_HOUR_TO_STOP_WM: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_STOP, + native_min_value=0, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=TimerProperty.RELATIVE_HOUR_TO_STOP_WM, + ), + ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP: NumberEntityDescription( + key=ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP, + ), +} +WASHER_NUMBERS: tuple[NumberEntityDescription, ...] = ( + TIMER_NUMBER_DESC[TimerProperty.RELATIVE_HOUR_TO_START_WM], + TIMER_NUMBER_DESC[TimerProperty.RELATIVE_HOUR_TO_STOP_WM], +) + +DEVICE_TYPE_NUMBER_MAP: dict[DeviceType, tuple[NumberEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + TIMER_NUMBER_DESC[ThinQProperty.RELATIVE_HOUR_TO_START], + TIMER_NUMBER_DESC[ThinQProperty.RELATIVE_HOUR_TO_STOP], + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), + DeviceType.AIR_PURIFIER_FAN: ( + NUMBER_DESC[ThinQProperty.WIND_TEMPERATURE], + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), + DeviceType.DRYER: WASHER_NUMBERS, + DeviceType.HOOD: ( + NUMBER_DESC[ThinQProperty.LAMP_BRIGHTNESS], + NUMBER_DESC[ThinQProperty.FAN_SPEED], + ), + DeviceType.HUMIDIFIER: ( + NUMBER_DESC[ThinQProperty.TARGET_HUMIDITY], + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), + DeviceType.MICROWAVE_OVEN: ( + NUMBER_DESC[ThinQProperty.LAMP_BRIGHTNESS], + NUMBER_DESC[ThinQProperty.FAN_SPEED], + ), + DeviceType.OVEN: (NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE],), + DeviceType.REFRIGERATOR: (NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE],), + DeviceType.STYLER: (TIMER_NUMBER_DESC[TimerProperty.RELATIVE_HOUR_TO_STOP_WM],), + DeviceType.WASHCOMBO_MAIN: WASHER_NUMBERS, + DeviceType.WASHCOMBO_MINI: WASHER_NUMBERS, + DeviceType.WASHER: WASHER_NUMBERS, + DeviceType.WASHTOWER_DRYER: WASHER_NUMBERS, + DeviceType.WASHTOWER: WASHER_NUMBERS, + DeviceType.WASHTOWER_WASHER: WASHER_NUMBERS, + DeviceType.WATER_HEATER: ( + NumberEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + native_max_value=60, + native_min_value=35, + native_step=1, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ), + DeviceType.WINE_CELLAR: ( + NUMBER_DESC[ThinQProperty.LIGHT_STATUS], + NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE], + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for number platform.""" + entities: list[ThinQNumberEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_NUMBER_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQNumberEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_WRITE + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQNumberEntity(ThinQEntity, NumberEntity): + """Represent a thinq number platform.""" + + _attr_mode = NumberMode.BOX + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + self._attr_native_value = self.data.value + + # Update unit. + if ( + unit_of_measurement := self._get_unit_of_measurement(self.data.unit) + ) is not None: + self._attr_native_unit_of_measurement = unit_of_measurement + + # Undate range. + if ( + self.entity_description.native_min_value is None + and (min_value := self.data.min) is not None + ): + self._attr_native_min_value = min_value + + if ( + self.entity_description.native_max_value is None + and (max_value := self.data.max) is not None + ): + self._attr_native_max_value = max_value + + if ( + self.entity_description.native_step is None + and (step := self.data.step) is not None + ): + self._attr_native_step = step + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s, unit:%s, min:%s, max:%s, step:%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.native_value, + self.native_unit_of_measurement, + self.native_min_value, + self.native_max_value, + self.native_step, + ) + + async def async_set_native_value(self, value: float) -> None: + """Change to new number value.""" + if self.step.is_integer(): + value = int(value) + _LOGGER.debug( + "[%s:%s] async_set_native_value: %s", + self.coordinator.device_name, + self.property_id, + value, + ) + + await self.async_call_api(self.coordinator.api.post(self.property_id, value)) diff --git a/homeassistant/components/lg_thinq/select.py b/homeassistant/components/lg_thinq/select.py new file mode 100644 index 00000000000..e555d616ca3 --- /dev/null +++ b/homeassistant/components/lg_thinq/select.py @@ -0,0 +1,207 @@ +"""Support for select entities.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +SELECT_DESC: dict[ThinQProperty, SelectEntityDescription] = { + ThinQProperty.MONITORING_ENABLED: SelectEntityDescription( + key=ThinQProperty.MONITORING_ENABLED, + translation_key=ThinQProperty.MONITORING_ENABLED, + ), + ThinQProperty.COOK_MODE: SelectEntityDescription( + key=ThinQProperty.COOK_MODE, + translation_key=ThinQProperty.COOK_MODE, + ), + ThinQProperty.DISPLAY_LIGHT: SelectEntityDescription( + key=ThinQProperty.DISPLAY_LIGHT, + translation_key=ThinQProperty.DISPLAY_LIGHT, + ), + ThinQProperty.CURRENT_JOB_MODE: SelectEntityDescription( + key=ThinQProperty.CURRENT_JOB_MODE, + translation_key=ThinQProperty.CURRENT_JOB_MODE, + ), + ThinQProperty.FRESH_AIR_FILTER: SelectEntityDescription( + key=ThinQProperty.FRESH_AIR_FILTER, + translation_key=ThinQProperty.FRESH_AIR_FILTER, + ), +} +AIR_FLOW_SELECT_DESC: dict[ThinQProperty, SelectEntityDescription] = { + ThinQProperty.WIND_STRENGTH: SelectEntityDescription( + key=ThinQProperty.WIND_STRENGTH, + translation_key=ThinQProperty.WIND_STRENGTH, + ), + ThinQProperty.WIND_ANGLE: SelectEntityDescription( + key=ThinQProperty.WIND_ANGLE, + translation_key=ThinQProperty.WIND_ANGLE, + ), +} +OPERATION_SELECT_DESC: dict[ThinQProperty, SelectEntityDescription] = { + ThinQProperty.AIR_CLEAN_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.AIR_CLEAN_OPERATION_MODE, + translation_key="air_clean_operation_mode", + ), + ThinQProperty.DISH_WASHER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.DISH_WASHER_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.DRYER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.DRYER_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.HYGIENE_DRY_MODE: SelectEntityDescription( + key=ThinQProperty.HYGIENE_DRY_MODE, + translation_key=ThinQProperty.HYGIENE_DRY_MODE, + ), + ThinQProperty.LIGHT_BRIGHTNESS: SelectEntityDescription( + key=ThinQProperty.LIGHT_BRIGHTNESS, + translation_key=ThinQProperty.LIGHT_BRIGHTNESS, + ), + ThinQProperty.OVEN_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.OVEN_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.STYLER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.STYLER_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.WASHER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.WASHER_OPERATION_MODE, + translation_key="operation_mode", + ), +} + +DEVICE_TYPE_SELECT_MAP: dict[DeviceType, tuple[SelectEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + SELECT_DESC[ThinQProperty.MONITORING_ENABLED], + OPERATION_SELECT_DESC[ThinQProperty.AIR_CLEAN_OPERATION_MODE], + ), + DeviceType.AIR_PURIFIER_FAN: ( + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH], + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_ANGLE], + SELECT_DESC[ThinQProperty.DISPLAY_LIGHT], + SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE], + ), + DeviceType.AIR_PURIFIER: ( + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH], + SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE], + ), + DeviceType.DEHUMIDIFIER: (AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH],), + DeviceType.DISH_WASHER: ( + OPERATION_SELECT_DESC[ThinQProperty.DISH_WASHER_OPERATION_MODE], + ), + DeviceType.DRYER: (OPERATION_SELECT_DESC[ThinQProperty.DRYER_OPERATION_MODE],), + DeviceType.HUMIDIFIER: ( + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH], + SELECT_DESC[ThinQProperty.DISPLAY_LIGHT], + SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE], + OPERATION_SELECT_DESC[ThinQProperty.HYGIENE_DRY_MODE], + ), + DeviceType.OVEN: ( + SELECT_DESC[ThinQProperty.COOK_MODE], + OPERATION_SELECT_DESC[ThinQProperty.OVEN_OPERATION_MODE], + ), + DeviceType.REFRIGERATOR: (SELECT_DESC[ThinQProperty.FRESH_AIR_FILTER],), + DeviceType.STYLER: (OPERATION_SELECT_DESC[ThinQProperty.STYLER_OPERATION_MODE],), + DeviceType.WASHCOMBO_MAIN: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHCOMBO_MINI: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHER: (OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE],), + DeviceType.WASHTOWER_DRYER: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHTOWER: ( + OPERATION_SELECT_DESC[ThinQProperty.DRYER_OPERATION_MODE], + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHTOWER_WASHER: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WATER_HEATER: (SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE],), + DeviceType.WINE_CELLAR: (OPERATION_SELECT_DESC[ThinQProperty.LIGHT_BRIGHTNESS],), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for select platform.""" + entities: list[ThinQSelectEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_SELECT_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQSelectEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.WRITABLE + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQSelectEntity(ThinQEntity, SelectEntity): + """Represent a thinq select platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: SelectEntityDescription, + property_id: str, + ) -> None: + """Initialize a select entity.""" + super().__init__(coordinator, entity_description, property_id) + + self._attr_options = self.data.options if self.data.options is not None else [] + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + if self.data.value: + self._attr_current_option = str(self.data.value) + else: + self._attr_current_option = None + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s, options:%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.current_option, + self.options, + ) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + _LOGGER.debug( + "[%s:%s] async_select_option: %s", + self.coordinator.device_name, + self.property_id, + option, + ) + await self.async_call_api(self.coordinator.api.post(self.property_id, option)) diff --git a/homeassistant/components/lg_thinq/sensor.py b/homeassistant/components/lg_thinq/sensor.py new file mode 100644 index 00000000000..99b4df8176e --- /dev/null +++ b/homeassistant/components/lg_thinq/sensor.py @@ -0,0 +1,447 @@ +"""Support for sensor entities.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode, ThinQPropertyEx, TimerProperty + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + PERCENTAGE, + UnitOfTemperature, + UnitOfTime, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +AIR_QUALITY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.PM1: SensorEntityDescription( + key=ThinQProperty.PM1, + device_class=SensorDeviceClass.PM1, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.PM2: SensorEntityDescription( + key=ThinQProperty.PM2, + device_class=SensorDeviceClass.PM25, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.PM10: SensorEntityDescription( + key=ThinQProperty.PM10, + device_class=SensorDeviceClass.PM10, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.HUMIDITY: SensorEntityDescription( + key=ThinQProperty.HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.MONITORING_ENABLED: SensorEntityDescription( + key=ThinQProperty.MONITORING_ENABLED, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.MONITORING_ENABLED, + ), + ThinQProperty.TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.TEMPERATURE, + ), + ThinQProperty.ODOR_LEVEL: SensorEntityDescription( + key=ThinQProperty.ODOR_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.ODOR_LEVEL, + ), + ThinQProperty.TOTAL_POLLUTION_LEVEL: SensorEntityDescription( + key=ThinQProperty.TOTAL_POLLUTION_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.TOTAL_POLLUTION_LEVEL, + ), +} +BATTERY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.BATTERY_PERCENT: SensorEntityDescription( + key=ThinQProperty.BATTERY_PERCENT, + translation_key=ThinQProperty.BATTERY_LEVEL, + ), +} +DISH_WASHING_COURSE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_DISH_WASHING_COURSE: SensorEntityDescription( + key=ThinQProperty.CURRENT_DISH_WASHING_COURSE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.CURRENT_DISH_WASHING_COURSE, + ) +} +FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.FILTER_LIFETIME: SensorEntityDescription( + key=ThinQProperty.FILTER_LIFETIME, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.FILTER_LIFETIME, + ), +} +HUMIDITY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_HUMIDITY: SensorEntityDescription( + key=ThinQProperty.CURRENT_HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ) +} +JOB_MODE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_JOB_MODE: SensorEntityDescription( + key=ThinQProperty.CURRENT_JOB_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.CURRENT_JOB_MODE, + ), + ThinQPropertyEx.CURRENT_JOB_MODE_STICK_CLEANER: SensorEntityDescription( + key=ThinQProperty.CURRENT_JOB_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQPropertyEx.CURRENT_JOB_MODE_STICK_CLEANER, + ), + ThinQProperty.PERSONALIZATION_MODE: SensorEntityDescription( + key=ThinQProperty.PERSONALIZATION_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.PERSONALIZATION_MODE, + ), +} +LIGHT_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.BRIGHTNESS: SensorEntityDescription( + key=ThinQProperty.BRIGHTNESS, + translation_key=ThinQProperty.BRIGHTNESS, + ), + ThinQProperty.DURATION: SensorEntityDescription( + key=ThinQProperty.DURATION, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.DURATION, + ), +} +POWER_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.POWER_LEVEL: SensorEntityDescription( + key=ThinQProperty.POWER_LEVEL, + translation_key=ThinQProperty.POWER_LEVEL, + ) +} +PREFERENCE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.RINSE_LEVEL: SensorEntityDescription( + key=ThinQProperty.RINSE_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.RINSE_LEVEL, + ), + ThinQProperty.SOFTENING_LEVEL: SensorEntityDescription( + key=ThinQProperty.SOFTENING_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.SOFTENING_LEVEL, + ), +} +RECIPE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.RECIPE_NAME: SensorEntityDescription( + key=ThinQProperty.RECIPE_NAME, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.RECIPE_NAME, + ), + ThinQProperty.WORT_INFO: SensorEntityDescription( + key=ThinQProperty.WORT_INFO, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.WORT_INFO, + ), + ThinQProperty.YEAST_INFO: SensorEntityDescription( + key=ThinQProperty.YEAST_INFO, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.YEAST_INFO, + ), + ThinQProperty.HOP_OIL_INFO: SensorEntityDescription( + key=ThinQProperty.HOP_OIL_INFO, + translation_key=ThinQProperty.HOP_OIL_INFO, + ), + ThinQProperty.FLAVOR_INFO: SensorEntityDescription( + key=ThinQProperty.FLAVOR_INFO, + translation_key=ThinQProperty.FLAVOR_INFO, + ), + ThinQProperty.BEER_REMAIN: SensorEntityDescription( + key=ThinQProperty.BEER_REMAIN, + native_unit_of_measurement=PERCENTAGE, + translation_key=ThinQProperty.BEER_REMAIN, + ), +} +REFRIGERATION_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.FRESH_AIR_FILTER: SensorEntityDescription( + key=ThinQProperty.FRESH_AIR_FILTER, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.FRESH_AIR_FILTER, + ), +} +RUN_STATE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_STATE: SensorEntityDescription( + key=ThinQProperty.CURRENT_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.CURRENT_STATE, + ), + ThinQProperty.COCK_STATE: SensorEntityDescription( + key=ThinQProperty.COCK_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.COCK_STATE, + ), + ThinQProperty.STERILIZING_STATE: SensorEntityDescription( + key=ThinQProperty.STERILIZING_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.STERILIZING_STATE, + ), + ThinQProperty.GROWTH_MODE: SensorEntityDescription( + key=ThinQProperty.GROWTH_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.GROWTH_MODE, + ), + ThinQProperty.WIND_VOLUME: SensorEntityDescription( + key=ThinQProperty.WIND_VOLUME, + device_class=SensorDeviceClass.WIND_SPEED, + translation_key=ThinQProperty.WIND_VOLUME, + ), +} +TEMPERATURE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.TARGET_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ThinQProperty.DAY_TARGET_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.DAY_TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.DAY_TARGET_TEMPERATURE, + ), + ThinQProperty.NIGHT_TARGET_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.NIGHT_TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.NIGHT_TARGET_TEMPERATURE, + ), + ThinQProperty.TEMPERATURE_STATE: SensorEntityDescription( + key=ThinQProperty.TEMPERATURE_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.TEMPERATURE_STATE, + ), + ThinQProperty.CURRENT_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.CURRENT_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.CURRENT_TEMPERATURE, + ), +} +WATER_FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.USED_TIME: SensorEntityDescription( + key=ThinQProperty.USED_TIME, + native_unit_of_measurement=UnitOfTime.MONTHS, + translation_key=ThinQProperty.USED_TIME, + ), +} +WATER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.WATER_TYPE: SensorEntityDescription( + key=ThinQProperty.WATER_TYPE, + translation_key=ThinQProperty.WATER_TYPE, + ), +} + +WASHER_SENSORS: tuple[SensorEntityDescription, ...] = ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], +) +DEVICE_TYPE_SENSOR_MAP: dict[DeviceType, tuple[SensorEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + FILTER_INFO_SENSOR_DESC[ThinQProperty.FILTER_LIFETIME], + ), + DeviceType.AIR_PURIFIER_FAN: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TEMPERATURE], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.MONITORING_ENABLED], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + ), + DeviceType.AIR_PURIFIER: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.MONITORING_ENABLED], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE], + JOB_MODE_SENSOR_DESC[ThinQProperty.PERSONALIZATION_MODE], + ), + DeviceType.COOKTOP: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + POWER_SENSOR_DESC[ThinQProperty.POWER_LEVEL], + ), + DeviceType.DEHUMIDIFIER: ( + JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE], + HUMIDITY_SENSOR_DESC[ThinQProperty.CURRENT_HUMIDITY], + ), + DeviceType.DISH_WASHER: ( + DISH_WASHING_COURSE_SENSOR_DESC[ThinQProperty.CURRENT_DISH_WASHING_COURSE], + PREFERENCE_SENSOR_DESC[ThinQProperty.RINSE_LEVEL], + PREFERENCE_SENSOR_DESC[ThinQProperty.SOFTENING_LEVEL], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + ), + DeviceType.DRYER: WASHER_SENSORS, + DeviceType.HOME_BREW: ( + RECIPE_SENSOR_DESC[ThinQProperty.RECIPE_NAME], + RECIPE_SENSOR_DESC[ThinQProperty.WORT_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.YEAST_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.HOP_OIL_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.FLAVOR_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.BEER_REMAIN], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + ), + DeviceType.HUMIDIFIER: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TEMPERATURE], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.MONITORING_ENABLED], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + ), + DeviceType.KIMCHI_REFRIGERATOR: ( + REFRIGERATION_SENSOR_DESC[ThinQProperty.FRESH_AIR_FILTER], + SensorEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ), + DeviceType.MICROWAVE_OVEN: (RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE],), + DeviceType.OVEN: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + TEMPERATURE_SENSOR_DESC[ThinQProperty.TARGET_TEMPERATURE], + ), + DeviceType.PLANT_CULTIVATOR: ( + LIGHT_SENSOR_DESC[ThinQProperty.BRIGHTNESS], + LIGHT_SENSOR_DESC[ThinQProperty.DURATION], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + RUN_STATE_SENSOR_DESC[ThinQProperty.GROWTH_MODE], + RUN_STATE_SENSOR_DESC[ThinQProperty.WIND_VOLUME], + TEMPERATURE_SENSOR_DESC[ThinQProperty.DAY_TARGET_TEMPERATURE], + TEMPERATURE_SENSOR_DESC[ThinQProperty.NIGHT_TARGET_TEMPERATURE], + TEMPERATURE_SENSOR_DESC[ThinQProperty.TEMPERATURE_STATE], + ), + DeviceType.REFRIGERATOR: ( + REFRIGERATION_SENSOR_DESC[ThinQProperty.FRESH_AIR_FILTER], + WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.USED_TIME], + ), + DeviceType.ROBOT_CLEANER: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE], + ), + DeviceType.STICK_CLEANER: ( + BATTERY_SENSOR_DESC[ThinQProperty.BATTERY_PERCENT], + JOB_MODE_SENSOR_DESC[ThinQPropertyEx.CURRENT_JOB_MODE_STICK_CLEANER], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + ), + DeviceType.STYLER: WASHER_SENSORS, + DeviceType.WASHCOMBO_MAIN: WASHER_SENSORS, + DeviceType.WASHCOMBO_MINI: WASHER_SENSORS, + DeviceType.WASHER: WASHER_SENSORS, + DeviceType.WASHTOWER_DRYER: WASHER_SENSORS, + DeviceType.WASHTOWER: WASHER_SENSORS, + DeviceType.WASHTOWER_WASHER: WASHER_SENSORS, + DeviceType.WATER_HEATER: ( + TEMPERATURE_SENSOR_DESC[ThinQProperty.CURRENT_TEMPERATURE], + ), + DeviceType.WATER_PURIFIER: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.COCK_STATE], + RUN_STATE_SENSOR_DESC[ThinQProperty.STERILIZING_STATE], + WATER_INFO_SENSOR_DESC[ThinQProperty.WATER_TYPE], + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for sensor platform.""" + entities: list[ThinQSensorEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_SENSOR_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQSensorEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, + ( + ActiveMode.READABLE + if ( + coordinator.api.device.device_type == DeviceType.COOKTOP + or isinstance(description.key, TimerProperty) + ) + else ActiveMode.READ_ONLY + ), + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQSensorEntity(ThinQEntity, SensorEntity): + """Represent a thinq sensor platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: SensorEntityDescription, + property_id: str, + ) -> None: + """Initialize a sensor entity.""" + super().__init__(coordinator, entity_description, property_id) + + if entity_description.device_class == SensorDeviceClass.ENUM: + self._attr_options = self.data.options + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + self._attr_native_value = self.data.value + + if (data_unit := self._get_unit_of_measurement(self.data.unit)) is not None: + # For different from description's unit + self._attr_native_unit_of_measurement = data_unit + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s, options:%s, unit:%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.native_value, + self.options, + self.native_unit_of_measurement, + ) diff --git a/homeassistant/components/lg_thinq/strings.json b/homeassistant/components/lg_thinq/strings.json new file mode 100644 index 00000000000..a776dde2054 --- /dev/null +++ b/homeassistant/components/lg_thinq/strings.json @@ -0,0 +1,998 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + }, + "error": { + "invalid_token": "The token is not valid.", + "not_acceptable_terms": "The service terms are not accepted.", + "not_allowed_api_again": "The user does NOT have permission on the API call.", + "not_supported_country": "The country is not supported.", + "exceeded_api_calls": "The number of API calls has been exceeded.", + "exceeded_user_api_calls": "The number of User API calls has been exceeded.", + "token_unauthorized": "The token is invalid or unauthorized." + }, + "step": { + "user": { + "title": "Connect to ThinQ", + "description": "Please enter a ThinQ [PAT(Personal Access Token)]({pat_url}) created with your LG ThinQ account.", + "data": { + "access_token": "Personal Access Token", + "country": "Country" + } + } + } + }, + "entity": { + "switch": { + "auto_mode": { + "name": "Auto mode" + }, + "express_mode": { + "name": "Ice plus" + }, + "hot_water_mode": { + "name": "Hot water" + }, + "humidity_warm_mode": { + "name": "Warm mist" + }, + "hygiene_dry_mode": { + "name": "Drying mode" + }, + "mood_lamp_state": { + "name": "Mood light" + }, + "operation_power": { + "name": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]" + }, + "optimal_humidity": { + "name": "Ventilation" + }, + "power_save_enabled": { + "name": "Energy saving" + }, + "rapid_freeze": { + "name": "Quick freeze" + }, + "sleep_mode": { + "name": "Sleep mode" + }, + "uv_nano": { + "name": "UVnano" + }, + "warm_mode": { + "name": "Heating" + } + }, + "binary_sensor": { + "eco_friendly_mode": { + "name": "Eco friendly" + }, + "power_save_enabled": { + "name": "Power saving mode" + }, + "remote_control_enabled": { + "name": "Remote start" + }, + "remote_control_enabled_for_location": { + "name": "{location} remote start" + }, + "rinse_refill": { + "name": "Rinse refill needed" + }, + "sabbath_mode": { + "name": "Sabbath" + }, + "machine_clean_reminder": { + "name": "Machine clean reminder" + }, + "signal_level": { + "name": "Chime sound" + }, + "clean_light_reminder": { + "name": "Clean indicator light" + }, + "operation_mode": { + "name": "[%key:component::binary_sensor::entity_component::power::name%]" + }, + "one_touch_filter": { + "name": "Fresh air filter" + } + }, + "climate": { + "climate_air_conditioner": { + "state_attributes": { + "fan_mode": { + "state": { + "slow": "Slow", + "low": "Low", + "mid": "Medium", + "high": "High", + "power": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]" + } + }, + "preset_mode": { + "state": { + "air_clean": "Air purify", + "aroma": "Aroma", + "energy_saving": "Energy saving" + } + } + } + } + }, + "event": { + "error": { + "name": "Error", + "state_attributes": { + "event_type": { + "state": { + "block_error": "Cleaning has stopped. Check for obstacles", + "brush_error": "Moving brush has a problem", + "bubble_error": "Bubble error", + "child_lock_active_error": "Child lock", + "cliff_error": "Fall prevention sensor has an error", + "clutch_error": "Clutch error", + "compressor_error": "Compressor error", + "dispensing_error": "Dispensor error", + "door_close_error": "Door closed error", + "door_lock_error": "Door lock error", + "door_open_error": "Door open", + "door_sensor_error": "Door sensor error", + "drainmotor_error": "Drain error", + "dust_full_error": "Dust bin is full and needs to be emptied", + "empty_water_alert_error": "Empty water", + "fan_motor_error": "Fan lock error", + "filter_clogging_error": "Filter error", + "frozen_error": "Freezing detection error", + "heater_circuit_error": "Heater circuit failure", + "high_power_supply_error": "Power supply error", + "high_temperature_detection_error": "High-temperature error", + "inner_lid_open_error": "Lid open error", + "ir_sensor_error": "IR sensor error", + "le_error": "LE error", + "le2_error": "LE2 error", + "left_wheel_error": "Left wheel has a problem", + "locked_motor_error": "Driver motor error", + "mop_error": "Cannot operate properly without the mop attached", + "motor_error": "Motor trouble", + "motor_lock_error": "Motor lock error", + "move_error": "The wheels are not touching the floor", + "need_water_drain": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::empty_water_alert_error%]", + "need_water_replenishment": "Fill water", + "no_battery_error": "Robot cleaner's battery is low", + "no_dust_bin_error": "Dust bin is not installed", + "no_filter_error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::filter_clogging_error%]", + "out_of_balance_error": "Out of balance load", + "overfill_error": "Overfill error", + "part_malfunction_error": "AIE error", + "power_code_connection_error": "Power cord connection error", + "power_fail_error": "Power failure", + "right_wheel_error": "Right wheel has a problem", + "stack_error": "Stacking error", + "steam_heat_error": "Steam heater error", + "suction_blocked_error": "Suction motor is clogged", + "temperature_sensor_error": "Thermistor error", + "time_to_run_the_tub_clean_cycle_error": "Tub clean recommendation", + "timeout_error": "Timeout error", + "turbidity_sensor_error": "Turbidity sensor error", + "unable_to_lock_error": "Door lock error", + "unbalanced_load_error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::out_of_balance_error%]", + "unknown_error": "Product requires attention", + "vibration_sensor_error": "Vibration sensor error", + "water_drain_error": "Water drain error", + "water_leakage_error": "Water leakage problem", + "water_leaks_error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::water_leakage_error%]", + "water_level_sensor_error": "Water sensor error", + "water_supply_error": "Water supply error" + } + } + } + }, + "notification": { + "name": "Notification", + "state_attributes": { + "event_type": { + "state": { + "charging_is_complete": "Charging is completed", + "cleaning_is_complete": "Cycle is finished", + "cleaning_is_completed": "Cleaning is completed", + "cleaning_is_failed": "Cleaning has failed", + "cooking_is_complete": "Turned off", + "door_is_open": "The door is open", + "drying_failed": "An error has occurred in the dryer", + "drying_is_complete": "Drying is completed", + "error_during_cleaning": "Cleaning stopped due to an error", + "error_during_washing": "An error has occurred in the washing machine", + "error_has_occurred": "An error has occurred", + "frozen_is_complete": "Ice plus is done", + "homeguard_is_stopped": "Home guard has stopped", + "lack_of_water": "There is no water in the water tank", + "motion_is_detected": "Photograph is sent as movement is detected during home guard", + "need_to_check_location": "Location check is required", + "pollution_is_high": "Air status is rapidly becoming bad", + "preheating_is_complete": "Preheating is done", + "rinse_is_not_enough": "Add rinse aid for better drying performance", + "salt_refill_is_needed": "Add salt for better softening performance", + "scheduled_cleaning_starts": "Scheduled cleaning starts", + "styling_is_complete": "Styling is completed", + "time_to_change_filter": "It is time to replace the filter", + "time_to_change_water_filter": "You need to replace water filter", + "time_to_clean": "Need to selfcleaning", + "time_to_clean_filter": "It is time to clean the filter", + "timer_is_complete": "Timer has been completed", + "washing_is_complete": "Washing is completed", + "water_is_full": "Water is full", + "water_leak_has_occurred": "The dishwasher has detected a water leak" + } + } + } + } + }, + "number": { + "target_temperature": { + "name": "[%key:component::sensor::entity_component::temperature::name%]" + }, + "target_temperature_for_location": { + "name": "{location} temperature" + }, + "light_status": { + "name": "Light" + }, + "fan_speed": { + "name": "Fan" + }, + "lamp_brightness": { + "name": "[%key:component::lg_thinq::entity::number::light_status::name%]" + }, + "wind_temperature": { + "name": "Wind temperature" + }, + "relative_hour_to_start": { + "name": "Schedule turn-on" + }, + "relative_hour_to_start_for_location": { + "name": "{location} schedule turn-on" + }, + "relative_hour_to_start_wm": { + "name": "Delay starts in" + }, + "relative_hour_to_start_wm_for_location": { + "name": "{location} delay starts in" + }, + "relative_hour_to_stop": { + "name": "Schedule turn-off" + }, + "relative_hour_to_stop_for_location": { + "name": "{location} schedule turn-off" + }, + "relative_hour_to_stop_wm": { + "name": "Delay ends in" + }, + "relative_hour_to_stop_wm_for_location": { + "name": "{location} delay ends in" + }, + "sleep_timer_relative_hour_to_stop": { + "name": "Sleep timer" + }, + "sleep_timer_relative_hour_to_stop_for_location": { + "name": "{location} sleep timer" + }, + "target_humidity": { + "name": "Target humidity" + } + }, + "sensor": { + "odor_level": { + "name": "Odor", + "state": { + "invalid": "Invalid", + "weak": "Weak", + "normal": "Normal", + "strong": "Strong", + "very_strong": "Very strong" + } + }, + "current_temperature": { + "name": "Current temperature" + }, + "temperature": { + "name": "Temperature" + }, + "total_pollution_level": { + "name": "Overall air quality", + "state": { + "invalid": "Invalid", + "good": "Good", + "normal": "Moderate", + "bad": "Unhealthy", + "very_bad": "Poor" + } + }, + "monitoring_enabled": { + "name": "Air quality sensor", + "state": { + "on_working": "Turns on with product", + "always": "Always on" + } + }, + "growth_mode": { + "name": "Mode", + "state": { + "standard": "Auto", + "ext_leaf": "Vegetables", + "ext_herb": "Herbs", + "ext_flower": "Flowers", + "ext_expert": "Custom growing mode" + } + }, + "growth_mode_for_location": { + "name": "{location} mode", + "state": { + "standard": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "ext_leaf": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_leaf%]", + "ext_herb": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_herb%]", + "ext_flower": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_flower%]", + "ext_expert": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_expert%]" + } + }, + "wind_volume_for_location": { + "name": "{location} wind speed" + }, + "brightness": { + "name": "Lighting intensity" + }, + "brightness_for_location": { + "name": "{location} lighting intensity" + }, + "duration": { + "name": "Lighting duration" + }, + "duration_for_location": { + "name": "{location} lighting duration" + }, + "day_target_temperature": { + "name": "Day growth temperature" + }, + "day_target_temperature_for_location": { + "name": "{location} day growth temperature" + }, + "night_target_temperature": { + "name": "Night growth temperature" + }, + "night_target_temperature_for_location": { + "name": "{location} night growth temperature" + }, + "temperature_state": { + "name": "[%key:component::sensor::entity_component::temperature::name%]", + "state": { + "high": "High", + "normal": "Good", + "low": "Low" + } + }, + "temperature_state_for_location": { + "name": "[%key:component::lg_thinq::entity::number::target_temperature_for_location::name%]", + "state": { + "high": "[%key:component::lg_thinq::entity::sensor::temperature_state::state::high%]", + "normal": "[%key:component::lg_thinq::entity::sensor::temperature_state::state::normal%]", + "low": "[%key:component::lg_thinq::entity::sensor::temperature_state::state::low%]" + } + }, + "current_state": { + "name": "Current status", + "state": { + "add_drain": "Filling", + "as_pop_up": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "cancel": "Cancel", + "carbonation": "Carbonation", + "change_condition": "Settings Change", + "charging": "Charging", + "charging_complete": "Charging completed", + "checking_turbidity": "Detecting soil level", + "cleaning": "Cleaning", + "cleaning_is_done": "Cleaning is done", + "complete": "Done", + "cook": "Cooking", + "cook_complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "cooking_in_progress": "[%key:component::lg_thinq::entity::sensor::current_state::state::cook%]", + "cool_down": "Cool down", + "cooling": "Cooling", + "detecting": "Detecting", + "detergent_amount": "Providing the info about the amount of detergent", + "diagnosis": "Smart diagnosis is in progress", + "dispensing": "Auto dispensing", + "display_loadsize": "Load size", + "done": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "drying": "Drying", + "during_aging": "Aging", + "during_fermentation": "Fermentation", + "end": "Finished", + "end_cooling": "[%key:component::lg_thinq::entity::sensor::current_state::state::drying%]", + "error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "extracting_capsule": "Capsule brewing", + "extraction_mode": "Storing", + "firmware": "Updating firmware", + "fota": "Updating", + "frozen_prevent_initial": "Freeze protection standby", + "frozen_prevent_running": "Freeze protection in progress", + "frozen_prevent_pause": "Freeze protection paused", + "homing": "Moving", + "initial": "[%key:common::state::standby%]", + "initializing": "[%key:common::state::standby%]", + "lock": "Control lock", + "macrosector": "Remote is in use", + "melting": "Wort dissolving", + "monitoring_detecting": "HomeGuard is active", + "monitoring_moving": "Going to the starting point", + "monitoring_positioning": "Setting homeguard start point", + "night_dry": "Night dry", + "oven_setting": "Cooktop connected", + "pause": "[%key:common::state::paused%]", + "paused": "[%key:common::state::paused%]", + "power_fail": "Power fail", + "power_on": "[%key:common::state::on%]", + "power_off": "[%key:common::state::off%]", + "preference": "Setting", + "preheat": "Preheating", + "preheat_complete": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "preheating": "[%key:component::lg_thinq::entity::sensor::current_state::state::preheat%]", + "preheating_is_done": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "prepareing_fermentation": "Preparing now", + "presteam": "Ready to steam", + "prewash": "Prewashing", + "proofing": "Proofing", + "refreshing": "Refreshing", + "reservation": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "reserved": "Delay set", + "rinse_hold": "Waiting to rinse", + "rinsing": "Rinsing", + "running": "Running", + "running_end": "Complete", + "setdate": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "shoes_module": "Drying shoes", + "sleep": "In sleep mode", + "smart_grid_run": "Running smart grid", + "soaking": "Soak", + "softening": "Softener", + "spinning": "Spinning", + "stay": "Refresh", + "standby": "[%key:common::state::standby%]", + "steam": "Refresh", + "steam_softening": "Steam softening", + "sterilize": "Sterilize", + "temperature_stabilization": "Temperature adjusting", + "working": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning%]", + "wrinkle_care": "Wrinkle care" + } + }, + "current_state_for_location": { + "name": "{location} current status", + "state": { + "add_drain": "[%key:component::lg_thinq::entity::sensor::current_state::state::add_drain%]", + "as_pop_up": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]", + "carbonation": "[%key:component::lg_thinq::entity::sensor::current_state::state::carbonation%]", + "change_condition": "[%key:component::lg_thinq::entity::sensor::current_state::state::change_condition%]", + "charging": "[%key:component::lg_thinq::entity::sensor::current_state::state::charging%]", + "charging_complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::charging_complete%]", + "checking_turbidity": "[%key:component::lg_thinq::entity::sensor::current_state::state::checking_turbidity%]", + "cleaning": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning%]", + "cleaning_is_done": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning_is_done%]", + "complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "cook": "[%key:component::lg_thinq::entity::sensor::current_state::state::cook%]", + "cook_complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "cooking_in_progress": "[%key:component::lg_thinq::entity::sensor::current_state::state::cook%]", + "cool_down": "[%key:component::lg_thinq::entity::sensor::current_state::state::cool_down%]", + "cooling": "[%key:component::lg_thinq::entity::sensor::current_state::state::cooling%]", + "detecting": "[%key:component::lg_thinq::entity::sensor::current_state::state::detecting%]", + "detergent_amount": "[%key:component::lg_thinq::entity::sensor::current_state::state::detergent_amount%]", + "diagnosis": "[%key:component::lg_thinq::entity::sensor::current_state::state::diagnosis%]", + "dispensing": "[%key:component::lg_thinq::entity::sensor::current_state::state::dispensing%]", + "display_loadsize": "[%key:component::lg_thinq::entity::sensor::current_state::state::display_loadsize%]", + "done": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "drying": "[%key:component::lg_thinq::entity::sensor::current_state::state::drying%]", + "during_aging": "[%key:component::lg_thinq::entity::sensor::current_state::state::during_aging%]", + "during_fermentation": "[%key:component::lg_thinq::entity::sensor::current_state::state::during_fermentation%]", + "end": "[%key:component::lg_thinq::entity::sensor::current_state::state::end%]", + "end_cooling": "[%key:component::lg_thinq::entity::sensor::current_state::state::drying%]", + "error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "extracting_capsule": "[%key:component::lg_thinq::entity::sensor::current_state::state::extracting_capsule%]", + "extraction_mode": "[%key:component::lg_thinq::entity::sensor::current_state::state::extraction_mode%]", + "firmware": "[%key:component::lg_thinq::entity::sensor::current_state::state::firmware%]", + "fota": "[%key:component::lg_thinq::entity::sensor::current_state::state::fota%]", + "frozen_prevent_initial": "[%key:component::lg_thinq::entity::sensor::current_state::state::frozen_prevent_initial%]", + "frozen_prevent_running": "[%key:component::lg_thinq::entity::sensor::current_state::state::frozen_prevent_running%]", + "frozen_prevent_pause": "[%key:component::lg_thinq::entity::sensor::current_state::state::frozen_prevent_pause%]", + "homing": "[%key:component::lg_thinq::entity::sensor::current_state::state::homing%]", + "initial": "[%key:common::state::standby%]", + "initializing": "[%key:common::state::standby%]", + "lock": "[%key:component::lg_thinq::entity::sensor::current_state::state::lock%]", + "macrosector": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "melting": "[%key:component::lg_thinq::entity::sensor::current_state::state::melting%]", + "monitoring_detecting": "[%key:component::lg_thinq::entity::sensor::current_state::state::monitoring_detecting%]", + "monitoring_moving": "[%key:component::lg_thinq::entity::sensor::current_state::state::monitoring_moving%]", + "monitoring_positioning": "[%key:component::lg_thinq::entity::sensor::current_state::state::monitoring_positioning%]", + "night_dry": "[%key:component::lg_thinq::entity::sensor::current_state::state::night_dry%]", + "oven_setting": "[%key:component::lg_thinq::entity::sensor::current_state::state::oven_setting%]", + "pause": "[%key:common::state::paused%]", + "paused": "[%key:common::state::paused%]", + "power_fail": "[%key:component::lg_thinq::entity::sensor::current_state::state::power_fail%]", + "power_on": "[%key:common::state::on%]", + "power_off": "[%key:common::state::off%]", + "preference": "[%key:component::lg_thinq::entity::sensor::current_state::state::preference%]", + "preheat": "[%key:component::lg_thinq::entity::sensor::current_state::state::preheat%]", + "preheat_complete": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "preheating": "[%key:component::lg_thinq::entity::sensor::current_state::state::preheat%]", + "preheating_is_done": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "prepareing_fermentation": "[%key:component::lg_thinq::entity::sensor::current_state::state::prepareing_fermentation%]", + "presteam": "[%key:component::lg_thinq::entity::sensor::current_state::state::presteam%]", + "prewash": "[%key:component::lg_thinq::entity::sensor::current_state::state::prewash%]", + "proofing": "[%key:component::lg_thinq::entity::sensor::current_state::state::proofing%]", + "refreshing": "[%key:component::lg_thinq::entity::sensor::current_state::state::refreshing%]", + "reservation": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "reserved": "[%key:component::lg_thinq::entity::sensor::current_state::state::reserved%]", + "rinse_hold": "[%key:component::lg_thinq::entity::sensor::current_state::state::rinse_hold%]", + "rinsing": "[%key:component::lg_thinq::entity::sensor::current_state::state::rinsing%]", + "running": "[%key:component::lg_thinq::entity::sensor::current_state::state::running%]", + "running_end": "[%key:component::lg_thinq::entity::sensor::current_state::state::running_end%]", + "setdate": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "shoes_module": "[%key:component::lg_thinq::entity::sensor::current_state::state::shoes_module%]", + "sleep": "[%key:component::lg_thinq::entity::sensor::current_state::state::sleep%]", + "smart_grid_run": "[%key:component::lg_thinq::entity::sensor::current_state::state::smart_grid_run%]", + "soaking": "[%key:component::lg_thinq::entity::sensor::current_state::state::soaking%]", + "softening": "[%key:component::lg_thinq::entity::sensor::current_state::state::softening%]", + "spinning": "[%key:component::lg_thinq::entity::sensor::current_state::state::spinning%]", + "stay": "[%key:component::lg_thinq::entity::sensor::current_state::state::stay%]", + "standby": "[%key:common::state::standby%]", + "steam": "[%key:component::lg_thinq::entity::sensor::current_state::state::steam%]", + "steam_softening": "[%key:component::lg_thinq::entity::sensor::current_state::state::steam_softening%]", + "sterilize": "[%key:component::lg_thinq::entity::sensor::current_state::state::sterilize%]", + "temperature_stabilization": "[%key:component::lg_thinq::entity::sensor::current_state::state::temperature_stabilization%]", + "working": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning%]", + "wrinkle_care": "[%key:component::lg_thinq::entity::sensor::current_state::state::wrinkle_care%]" + } + }, + "fresh_air_filter": { + "name": "[%key:component::lg_thinq::entity::binary_sensor::one_touch_filter::name%]", + "state": { + "off": "[%key:common::state::off%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "power": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "replace": "Replace filter", + "smart_power": "Smart safe storage", + "smart_off": "[%key:common::state::off%]", + "smart_on": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::smart_power%]" + } + }, + "filter_lifetime": { + "name": "Filter remaining" + }, + "used_time": { + "name": "Water filter used" + }, + "current_job_mode": { + "name": "Operating mode", + "state": { + "air_clean": "Purify", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "clothes_dry": "Laundry", + "edge": "Edge cleaning", + "heat_pump": "Heat pump", + "high": "Power", + "intensive_dry": "Spot", + "macro": "Custom mode", + "mop": "Mop", + "normal": "Normal", + "off": "[%key:common::state::off%]", + "quiet_humidity": "Silent", + "rapid_humidity": "Jet", + "sector_base": "Cell by cell", + "select": "My space", + "smart_humidity": "Smart", + "spot": "Spiral spot mode", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "vacation": "Vacation", + "zigzag": "Zigzag" + } + }, + "current_job_mode_stick_cleaner": { + "name": "Operating mode", + "state": { + "auto": "Low power", + "high": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "mop": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::mop%]", + "normal": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::normal%]", + "off": "[%key:common::state::off%]", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]" + } + }, + "personalization_mode": { + "name": "Personal mode", + "state": { + "auto_inside": "[%key:component::lg_thinq::entity::switch::auto_mode::name%]", + "sleep": "Sleep mode", + "baby": "Baby care mode", + "sick_house": "New Home mode", + "auto_outside": "Interlocking mode", + "pet": "Pet mode", + "cooking": "Cooking mode", + "smoke": "Smoke mode", + "exercise": "Exercise mode", + "others": "Others" + } + }, + "current_dish_washing_course": { + "name": "Current cycle", + "state": { + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "heavy": "Intensive", + "delicate": "Delicate", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "normal": "Normal", + "rinse": "Rinse", + "refresh": "Refresh", + "express": "Express", + "machine_clean": "Machine clean", + "short_mode": "Short mode", + "download_cycle": "Download cycle", + "quick": "Quick", + "steam": "Steam care", + "spray": "Spray", + "eco": "Eco" + } + }, + "rinse_level": { + "name": "Rinse aid dispenser level", + "state": { + "rinselevel_0": "0", + "rinselevel_1": "1", + "rinselevel_2": "2", + "rinselevel_3": "3", + "rinselevel_4": "4" + } + }, + "softening_level": { + "name": "Softening level", + "state": { + "softeninglevel_0": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_0%]", + "softeninglevel_1": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_1%]", + "softeninglevel_2": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_2%]", + "softeninglevel_3": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_3%]", + "softeninglevel_4": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_4%]" + } + }, + "cock_state": { + "name": "[%key:component::lg_thinq::entity::switch::uv_nano::name%]", + "state": { + "cleaning": "In progress", + "normal": "[%key:common::state::standby%]" + } + }, + "sterilizing_state": { + "name": "High-temp sterilization", + "state": { + "off": "[%key:common::state::off%]", + "on": "Sterilizing", + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]" + } + }, + "water_type": { + "name": "Type" + }, + "target_temperature": { + "name": "[%key:component::sensor::entity_component::temperature::name%]", + "state": { + "kimchi": "Kimchi", + "off": "[%key:common::state::off%]", + "freezer": "Freezer", + "fridge": "Fridge", + "storage": "Storage", + "meat_fish": "Meat/Fish", + "rice_grain": "Rice/Grain", + "vegetable_fruit": "Vege/Fruit", + "temperature_number": "Number" + } + }, + "target_temperature_for_location": { + "name": "[%key:component::lg_thinq::entity::number::target_temperature_for_location::name%]", + "state": { + "kimchi": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::kimchi%]", + "off": "[%key:common::state::off%]", + "freezer": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::freezer%]", + "fridge": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::fridge%]", + "storage": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::storage%]", + "meat_fish": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::meat_fish%]", + "rice_grain": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::rice_grain%]", + "vegetable_fruit": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::vegetable_fruit%]", + "temperature_number": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::temperature_number%]" + } + }, + "elapsed_day_state": { + "name": "Brewing period" + }, + "elapsed_day_total": { + "name": "Brewing duration" + }, + "recipe_name": { + "name": "Homebrew recipe", + "state": { + "ipa": "IPA", + "pale_ale": "Pale ale", + "stout": "Stout", + "wheat": "Wheat", + "pilsner": "Pilsner", + "red_ale": "Red ale", + "my_recipe": "My recipe" + } + }, + "wort_info": { + "name": "Wort", + "state": { + "hoppy": "Hoppy", + "deep_gold": "DeepGold", + "wheat": "Wheat", + "dark": "Dark" + } + }, + "yeast_info": { + "name": "Yeast", + "state": { + "american_ale": "American ale", + "english_ale": "English ale", + "lager": "Lager", + "weizen": "Weizen" + } + }, + "hop_oil_info": { + "name": "Hops" + }, + "flavor_info": { + "name": "Flavor" + }, + "beer_remain": { + "name": "Recipe progress" + }, + "battery_level": { + "name": "Battery", + "state": { + "high": "Full", + "mid": "Medium", + "low": "Low", + "warning": "Empty" + } + }, + "relative_to_start": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start::name%]" + }, + "relative_to_start_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_for_location::name%]" + }, + "relative_to_start_wm": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_wm::name%]" + }, + "relative_to_start_wm_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_wm_for_location::name%]" + }, + "relative_to_stop": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop::name%]" + }, + "relative_to_stop_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_for_location::name%]" + }, + "relative_to_stop_wm": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_wm::name%]" + }, + "relative_to_stop_wm_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_wm_for_location::name%]" + }, + "sleep_timer_relative_to_stop": { + "name": "[%key:component::lg_thinq::entity::number::sleep_timer_relative_hour_to_stop::name%]" + }, + "sleep_timer_relative_to_stop_for_location": { + "name": "[%key:component::lg_thinq::entity::number::sleep_timer_relative_hour_to_stop_for_location::name%]" + }, + "absolute_to_start": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start::name%]" + }, + "absolute_to_start_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_for_location::name%]" + }, + "absolute_to_stop": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop::name%]" + }, + "absolute_to_stop_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_for_location::name%]" + }, + "remain": { + "name": "Remaining time" + }, + "remain_for_location": { + "name": "{location} remaining time" + }, + "running": { + "name": "Running time" + }, + "running_for_location": { + "name": "{location} running time" + }, + "total": { + "name": "Total time" + }, + "total_for_location": { + "name": "{location} total time" + }, + "target": { + "name": "Cook time" + }, + "target_for_location": { + "name": "{location} cook time" + }, + "light_start": { + "name": "Lights on time" + }, + "light_start_for_location": { + "name": "{location} lights on time" + }, + "power_level": { + "name": "Power level" + }, + "power_level_for_location": { + "name": "{location} power level" + } + }, + "select": { + "wind_strength": { + "name": "Speed", + "state": { + "slow": "[%key:component::lg_thinq::entity::climate::climate_air_conditioner::state_attributes::fan_mode::state::slow%]", + "low": "Low", + "mid": "Medium", + "high": "High", + "power": "Turbo", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "wind_1": "Step 1", + "wind_2": "Step 2", + "wind_3": "Step 3", + "wind_4": "Step 4", + "wind_5": "Step 5", + "wind_6": "Step 6", + "wind_7": "Step 7", + "wind_8": "Step 8", + "wind_9": "Step 9", + "wind_10": "Step 10" + } + }, + "monitoring_enabled": { + "name": "[%key:component::lg_thinq::entity::sensor::monitoring_enabled::name%]", + "state": { + "on_working": "[%key:component::lg_thinq::entity::sensor::monitoring_enabled::state::on_working%]", + "always": "[%key:component::lg_thinq::entity::sensor::monitoring_enabled::state::always%]" + } + }, + "current_job_mode": { + "name": "Operating mode", + "state": { + "air_clean": "Purifying", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "baby_care": "[%key:component::lg_thinq::entity::sensor::personalization_mode::state::baby%]", + "circulator": "Booster", + "clean": "Single", + "direct_clean": "Direct mode", + "dual_clean": "Dual", + "fast": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "heat_pump": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::heat_pump%]", + "humidify": "Mist", + "humidify_and_air_clean": "Mist & purifying", + "humidity": "Humid", + "nature_clean": "Natural mode", + "pet_clean": "[%key:component::lg_thinq::entity::sensor::personalization_mode::state::pet%]", + "silent": "Silent", + "sleep": "Sleep", + "smart": "Smart mode", + "space_clean": "Diffusion mode", + "spot_clean": "Wide mode", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "up_feature": "Additional mode", + "vacation": "Vacation" + } + }, + "operation_mode": { + "name": "Operation", + "state": { + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]", + "power_off": "Power off", + "preheating": "Preheating", + "start": "[%key:common::action::start%]", + "stop": "[%key:common::action::stop%]", + "wake_up": "Sleep mode off" + } + }, + "operation_mode_for_location": { + "name": "{location} operation", + "state": { + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]", + "power_off": "[%key:component::lg_thinq::entity::select::operation_mode::state::power_off%]", + "preheating": "[%key:component::lg_thinq::entity::select::operation_mode::state::preheating%]", + "start": "[%key:common::action::start%]", + "stop": "[%key:common::action::stop%]", + "wake_up": "[%key:component::lg_thinq::entity::select::operation_mode::state::wake_up%]" + } + }, + "air_clean_operation_mode": { + "name": "[%key:component::lg_thinq::entity::climate::climate_air_conditioner::state_attributes::preset_mode::state::air_clean%]", + "state": { + "start": "[%key:common::action::start%]", + "stop": "[%key:common::action::stop%]" + } + }, + "cook_mode": { + "name": "Cook mode", + "state": { + "bake": "Bake", + "convection_bake": "Convection bake", + "convection_roast": "Convection roast", + "roast": "Roast", + "crisp_convection": "Crisp convection" + } + }, + "cook_mode_for_location": { + "name": "{location} cook mode", + "state": { + "bake": "[%key:component::lg_thinq::entity::select::cook_mode::state::bake%]", + "convection_bake": "[%key:component::lg_thinq::entity::select::cook_mode::state::convection_bake%]", + "convection_roast": "[%key:component::lg_thinq::entity::select::cook_mode::state::convection_roast%]", + "roast": "[%key:component::lg_thinq::entity::select::cook_mode::state::roast%]", + "crisp_convection": "[%key:component::lg_thinq::entity::select::cook_mode::state::crisp_convection%]" + } + }, + "light_brightness": { + "name": "Light" + }, + "wind_angle": { + "name": "Rotation", + "state": { + "off": "[%key:common::state::off%]", + "angle_45": "45°", + "angle_60": "60°", + "angle_90": "90°", + "angle_140": "140°" + } + }, + "display_light": { + "name": "Display brightness", + "state": { + "off": "[%key:common::state::off%]", + "level_1": "Brightness 1", + "level_2": "Brightness 2", + "level_3": "Brightness 3" + } + }, + "fresh_air_filter": { + "name": "[%key:component::lg_thinq::entity::binary_sensor::one_touch_filter::name%]", + "state": { + "off": "[%key:common::state::off%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "power": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "replace": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::replace%]", + "smart_power": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::smart_power%]", + "smart_off": "[%key:common::state::off%]", + "smart_on": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::smart_power%]" + } + }, + "hygiene_dry_mode": { + "name": "[%key:component::lg_thinq::entity::switch::hygiene_dry_mode::name%]", + "state": { + "off": "[%key:common::state::off%]", + "fast": "Fast", + "silent": "Silent", + "normal": "[%key:component::lg_thinq::entity::sensor::current_dish_washing_course::state::delicate%]" + } + } + } + } +} diff --git a/homeassistant/components/lg_thinq/switch.py b/homeassistant/components/lg_thinq/switch.py new file mode 100644 index 00000000000..25fd7eb8b64 --- /dev/null +++ b/homeassistant/components/lg_thinq/switch.py @@ -0,0 +1,228 @@ +"""Support for switch entities.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + + +@dataclass(frozen=True, kw_only=True) +class ThinQSwitchEntityDescription(SwitchEntityDescription): + """Describes ThinQ switch entity.""" + + on_key: str | None = None + off_key: str | None = None + + +DEVICE_TYPE_SWITCH_MAP: dict[DeviceType, tuple[ThinQSwitchEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.POWER_SAVE_ENABLED, + translation_key=ThinQProperty.POWER_SAVE_ENABLED, + on_key="true", + off_key="false", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.AIR_PURIFIER_FAN: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.AIR_FAN_OPERATION_MODE, translation_key="operation_power" + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.UV_NANO, + translation_key=ThinQProperty.UV_NANO, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.WARM_MODE, + translation_key=ThinQProperty.WARM_MODE, + on_key="warm_on", + off_key="warm_off", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.AIR_PURIFIER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.AIR_PURIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ), + DeviceType.DEHUMIDIFIER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.DEHUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ), + DeviceType.HUMIDIFIER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.HUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.WARM_MODE, + translation_key="humidity_warm_mode", + on_key="warm_on", + off_key="warm_off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.MOOD_LAMP_STATE, + translation_key=ThinQProperty.MOOD_LAMP_STATE, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.AUTO_MODE, + translation_key=ThinQProperty.AUTO_MODE, + on_key="auto_on", + off_key="auto_off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.SLEEP_MODE, + translation_key=ThinQProperty.SLEEP_MODE, + on_key="sleep_on", + off_key="sleep_off", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.REFRIGERATOR: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.EXPRESS_MODE, + translation_key=ThinQProperty.EXPRESS_MODE, + on_key="true", + off_key="false", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.RAPID_FREEZE, + translation_key=ThinQProperty.RAPID_FREEZE, + on_key="true", + off_key="false", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.SYSTEM_BOILER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.HOT_WATER_MODE, + translation_key=ThinQProperty.HOT_WATER_MODE, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.WINE_CELLAR: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.OPTIMAL_HUMIDITY, + translation_key=ThinQProperty.OPTIMAL_HUMIDITY, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for switch platform.""" + entities: list[ThinQSwitchEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_SWITCH_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQSwitchEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_WRITE + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQSwitchEntity(ThinQEntity, SwitchEntity): + """Represent a thinq switch platform.""" + + entity_description: ThinQSwitchEntityDescription + _attr_device_class = SwitchDeviceClass.SWITCH + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + if (key := self.entity_description.on_key) is not None: + self._attr_is_on = self.data.value == key + else: + self._attr_is_on = self.data.is_on + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s", + self.coordinator.device_name, + self.property_id, + self.data.is_on, + self.is_on, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch.""" + _LOGGER.debug( + "[%s:%s] async_turn_on id: %s", + self.coordinator.device_name, + self.name, + self.property_id, + ) + if (on_command := self.entity_description.on_key) is not None: + await self.async_call_api( + self.coordinator.api.post(self.property_id, on_command) + ) + else: + await self.async_call_api( + self.coordinator.api.async_turn_on(self.property_id) + ) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch.""" + _LOGGER.debug( + "[%s:%s] async_turn_off id: %s", + self.coordinator.device_name, + self.name, + self.property_id, + ) + if (off_command := self.entity_description.off_key) is not None: + await self.async_call_api( + self.coordinator.api.post(self.property_id, off_command) + ) + else: + await self.async_call_api( + self.coordinator.api.async_turn_off(self.property_id) + ) diff --git a/homeassistant/components/lg_thinq/vacuum.py b/homeassistant/components/lg_thinq/vacuum.py new file mode 100644 index 00000000000..6cbb731869c --- /dev/null +++ b/homeassistant/components/lg_thinq/vacuum.py @@ -0,0 +1,168 @@ +"""Support for vacuum entities.""" + +from __future__ import annotations + +from enum import StrEnum +import logging + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.vacuum import ( + StateVacuumEntity, + StateVacuumEntityDescription, + VacuumActivity, + VacuumEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + +DEVICE_TYPE_VACUUM_MAP: dict[DeviceType, tuple[StateVacuumEntityDescription, ...]] = { + DeviceType.ROBOT_CLEANER: ( + StateVacuumEntityDescription( + key=ExtendedProperty.VACUUM, + name=None, + ), + ), +} + + +class State(StrEnum): + """State of device.""" + + HOMING = "homing" + PAUSE = "pause" + RESUME = "resume" + SLEEP = "sleep" + START = "start" + WAKE_UP = "wake_up" + + +ROBOT_STATUS_TO_HA = { + "charging": VacuumActivity.DOCKED, + "diagnosis": VacuumActivity.IDLE, + "homing": VacuumActivity.RETURNING, + "initializing": VacuumActivity.IDLE, + "macrosector": VacuumActivity.IDLE, + "monitoring_detecting": VacuumActivity.IDLE, + "monitoring_moving": VacuumActivity.IDLE, + "monitoring_positioning": VacuumActivity.IDLE, + "pause": VacuumActivity.PAUSED, + "reservation": VacuumActivity.IDLE, + "setdate": VacuumActivity.IDLE, + "sleep": VacuumActivity.IDLE, + "standby": VacuumActivity.IDLE, + "working": VacuumActivity.CLEANING, + "error": VacuumActivity.ERROR, +} +ROBOT_BATT_TO_HA = { + "moveless": 5, + "dock_level": 5, + "low": 30, + "mid": 50, + "high": 90, + "full": 100, + "over_charge": 100, +} +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for vacuum platform.""" + entities: list[ThinQStateVacuumEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_VACUUM_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQStateVacuumEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) + ) + + if entities: + async_add_entities(entities) + + +class ThinQStateVacuumEntity(ThinQEntity, StateVacuumEntity): + """Represent a thinq vacuum platform.""" + + _attr_supported_features = ( + VacuumEntityFeature.SEND_COMMAND + | VacuumEntityFeature.STATE + | VacuumEntityFeature.BATTERY + | VacuumEntityFeature.START + | VacuumEntityFeature.PAUSE + | VacuumEntityFeature.RETURN_HOME + ) + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + # Update state. + self._attr_activity = ROBOT_STATUS_TO_HA[self.data.current_state] + + # Update battery. + if (level := self.data.battery) is not None: + self._attr_battery_level = ( + level if isinstance(level, int) else ROBOT_BATT_TO_HA.get(level, 0) + ) + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s (battery_level=%s)", + self.coordinator.device_name, + self.property_id, + self.data.current_state, + self.state, + self.battery_level, + ) + + async def async_start(self, **kwargs) -> None: + """Start the device.""" + if self.data.current_state == State.SLEEP: + value = State.WAKE_UP + elif self._attr_activity == VacuumActivity.PAUSED: + value = State.RESUME + else: + value = State.START + + _LOGGER.debug( + "[%s:%s] async_start", self.coordinator.device_name, self.property_id + ) + await self.async_call_api( + self.coordinator.api.async_set_clean_operation_mode(self.property_id, value) + ) + + async def async_pause(self, **kwargs) -> None: + """Pause the device.""" + _LOGGER.debug( + "[%s:%s] async_pause", self.coordinator.device_name, self.property_id + ) + await self.async_call_api( + self.coordinator.api.async_set_clean_operation_mode( + self.property_id, State.PAUSE + ) + ) + + async def async_return_to_base(self, **kwargs) -> None: + """Return device to dock.""" + _LOGGER.debug( + "[%s:%s] async_return_to_base", + self.coordinator.device_name, + self.property_id, + ) + await self.async_call_api( + self.coordinator.api.async_set_clean_operation_mode( + self.property_id, State.HOMING + ) + ) diff --git a/homeassistant/components/lidarr/__init__.py b/homeassistant/components/lidarr/__init__.py index e7935501650..a421a881b69 100644 --- a/homeassistant/components/lidarr/__init__.py +++ b/homeassistant/components/lidarr/__init__.py @@ -12,17 +12,14 @@ from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platfor from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers.device_registry import DeviceEntryType from .const import DEFAULT_NAME, DOMAIN from .coordinator import ( + AlbumsDataUpdateCoordinator, DiskSpaceDataUpdateCoordinator, - LidarrDataUpdateCoordinator, QueueDataUpdateCoordinator, StatusDataUpdateCoordinator, - T, WantedDataUpdateCoordinator, ) @@ -39,6 +36,7 @@ class LidarrData: queue: QueueDataUpdateCoordinator status: StatusDataUpdateCoordinator wanted: WantedDataUpdateCoordinator + albums: AlbumsDataUpdateCoordinator async def async_setup_entry(hass: HomeAssistant, entry: LidarrConfigEntry) -> bool: @@ -58,6 +56,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LidarrConfigEntry) -> bo queue=QueueDataUpdateCoordinator(hass, host_configuration, lidarr), status=StatusDataUpdateCoordinator(hass, host_configuration, lidarr), wanted=WantedDataUpdateCoordinator(hass, host_configuration, lidarr), + albums=AlbumsDataUpdateCoordinator(hass, host_configuration, lidarr), ) for field in fields(data): coordinator = getattr(data, field.name) @@ -80,22 +79,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: LidarrConfigEntry) -> bo async def async_unload_entry(hass: HomeAssistant, entry: LidarrConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class LidarrEntity(CoordinatorEntity[LidarrDataUpdateCoordinator[T]]): - """Defines a base Lidarr entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: LidarrDataUpdateCoordinator[T], - description: EntityDescription, - ) -> None: - """Initialize the Lidarr entity.""" - super().__init__(coordinator) - self.entity_description = description - self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.config_entry.entry_id)} - ) diff --git a/homeassistant/components/lidarr/config_flow.py b/homeassistant/components/lidarr/config_flow.py index 05d6900bb41..dfbfff2cdfd 100644 --- a/homeassistant/components/lidarr/config_flow.py +++ b/homeassistant/components/lidarr/config_flow.py @@ -10,12 +10,11 @@ from aiopyarr import exceptions from aiopyarr.lidarr_client import LidarrClient import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import LidarrConfigEntry from .const import DEFAULT_NAME, DOMAIN @@ -24,16 +23,10 @@ class LidarrConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the flow.""" - self.entry: LidarrConfigEntry | None = None - async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -52,10 +45,7 @@ class LidarrConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by the user.""" errors = {} - if user_input is None: - user_input = dict(self.entry.data) if self.entry else None - - else: + if user_input is not None: try: if result := await validate_input(self.hass, user_input): user_input[CONF_API_KEY] = result[1] @@ -70,17 +60,18 @@ class LidarrConfigFlow(ConfigFlow, domain=DOMAIN): except exceptions.ArrException: errors = {"base": "unknown"} if not errors: - if self.entry: - self.hass.config_entries.async_update_entry( - self.entry, data=user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - - return self.async_abort(reason="reauth_successful") return self.async_create_entry(title=DEFAULT_NAME, data=user_input) - user_input = user_input or {} + if user_input is None: + user_input = {} + if self.source == SOURCE_REAUTH: + user_input = dict(self._get_reauth_entry().data) + return self.async_show_form( step_id="user", data_schema=vol.Schema( diff --git a/homeassistant/components/lidarr/coordinator.py b/homeassistant/components/lidarr/coordinator.py index 2f18e4f0ebb..1010f708748 100644 --- a/homeassistant/components/lidarr/coordinator.py +++ b/homeassistant/components/lidarr/coordinator.py @@ -17,7 +17,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DEFAULT_MAX_RECORDS, DOMAIN, LOGGER -T = TypeVar("T", bound=list[LidarrRootFolder] | LidarrQueue | str | LidarrAlbum) +T = TypeVar("T", bound=list[LidarrRootFolder] | LidarrQueue | str | LidarrAlbum | int) class LidarrDataUpdateCoordinator(DataUpdateCoordinator[T], Generic[T], ABC): @@ -96,3 +96,11 @@ class WantedDataUpdateCoordinator(LidarrDataUpdateCoordinator[LidarrAlbum]): LidarrAlbum, await self.api_client.async_get_wanted(page_size=DEFAULT_MAX_RECORDS), ) + + +class AlbumsDataUpdateCoordinator(LidarrDataUpdateCoordinator[int]): + """Albums update coordinator.""" + + async def _fetch_data(self) -> int: + """Fetch the album data.""" + return len(cast(list[LidarrAlbum], await self.api_client.async_get_albums())) diff --git a/homeassistant/components/lidarr/entity.py b/homeassistant/components/lidarr/entity.py new file mode 100644 index 00000000000..a707f7850fb --- /dev/null +++ b/homeassistant/components/lidarr/entity.py @@ -0,0 +1,29 @@ +"""The Lidarr component.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import LidarrDataUpdateCoordinator, T + + +class LidarrEntity(CoordinatorEntity[LidarrDataUpdateCoordinator[T]]): + """Defines a base Lidarr entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: LidarrDataUpdateCoordinator[T], + description: EntityDescription, + ) -> None: + """Initialize the Lidarr entity.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.config_entry.entry_id)} + ) diff --git a/homeassistant/components/lidarr/sensor.py b/homeassistant/components/lidarr/sensor.py index b50a826a1c7..b02361e65ca 100644 --- a/homeassistant/components/lidarr/sensor.py +++ b/homeassistant/components/lidarr/sensor.py @@ -18,9 +18,10 @@ from homeassistant.const import UnitOfInformation from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LidarrConfigEntry, LidarrEntity +from . import LidarrConfigEntry from .const import BYTE_SIZES from .coordinator import LidarrDataUpdateCoordinator, T +from .entity import LidarrEntity def get_space(data: list[LidarrRootFolder], name: str) -> str: @@ -84,7 +85,7 @@ SENSOR_TYPES: dict[str, LidarrSensorEntityDescription[Any]] = { "queue": LidarrSensorEntityDescription[LidarrQueue]( key="queue", translation_key="queue", - native_unit_of_measurement="Albums", + native_unit_of_measurement="albums", value_fn=lambda data, _: data.totalRecords, state_class=SensorStateClass.TOTAL, attributes_fn=lambda data: {i.title: queue_str(i) for i in data.records}, @@ -92,7 +93,7 @@ SENSOR_TYPES: dict[str, LidarrSensorEntityDescription[Any]] = { "wanted": LidarrSensorEntityDescription[LidarrQueue]( key="wanted", translation_key="wanted", - native_unit_of_measurement="Albums", + native_unit_of_measurement="albums", value_fn=lambda data, _: data.totalRecords, state_class=SensorStateClass.TOTAL, entity_registry_enabled_default=False, @@ -100,6 +101,14 @@ SENSOR_TYPES: dict[str, LidarrSensorEntityDescription[Any]] = { album.title: album.artist.artistName for album in data.records }, ), + "albums": LidarrSensorEntityDescription[int]( + key="albums", + translation_key="albums", + native_unit_of_measurement="albums", + value_fn=lambda data, _: data, + state_class=SensorStateClass.TOTAL, + entity_registry_enabled_default=False, + ), } diff --git a/homeassistant/components/lidarr/strings.json b/homeassistant/components/lidarr/strings.json index bbe4b19db25..68e9c395319 100644 --- a/homeassistant/components/lidarr/strings.json +++ b/homeassistant/components/lidarr/strings.json @@ -39,6 +39,9 @@ }, "wanted": { "name": "Wanted" + }, + "albums": { + "name": "Albums" } } } diff --git a/homeassistant/components/lifx/__init__.py b/homeassistant/components/lifx/__init__.py index 47f00959bcd..974292c6e80 100644 --- a/homeassistant/components/lifx/__init__.py +++ b/homeassistant/components/lifx/__init__.py @@ -88,7 +88,7 @@ async def async_legacy_migration( hass, hosts_by_serial, existing_serials, legacy_entry ) if missing_discovery_count: - _LOGGER.info( + _LOGGER.debug( "Migration in progress, waiting to discover %s device(s)", missing_discovery_count, ) diff --git a/homeassistant/components/lifx/config_flow.py b/homeassistant/components/lifx/config_flow.py index e4db80bec73..053bb72c4fd 100644 --- a/homeassistant/components/lifx/config_flow.py +++ b/homeassistant/components/lifx/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import socket -from typing import Any +from typing import Any, Self from aiolifx.aiolifx import Light from aiolifx.connection import LIFXConnection @@ -41,6 +41,8 @@ class LifXConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str | None = None + def __init__(self) -> None: """Initialize the config flow.""" self._discovered_devices: dict[str, Light] = {} @@ -90,11 +92,8 @@ class LifXConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle any discovery.""" self._async_abort_entries_match({CONF_HOST: host}) - self.context[CONF_HOST] = host - if any( - progress.get("context", {}).get(CONF_HOST) == host - for progress in self._async_in_progress() - ): + self.host = host + if self.hass.config_entries.flow.async_has_matching_flow(self): return self.async_abort(reason="already_in_progress") if not ( device := await self._async_try_connect( @@ -105,6 +104,10 @@ class LifXConfigFlow(ConfigFlow, domain=DOMAIN): self._discovered_device = device return await self.async_step_discovery_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow.host == self.host + @callback def _async_discovered_pending_migration(self) -> bool: """Check if a discovered device is pending migration.""" diff --git a/homeassistant/components/lifx/const.py b/homeassistant/components/lifx/const.py index 9b213cc9f6d..667afe1125d 100644 --- a/homeassistant/components/lifx/const.py +++ b/homeassistant/components/lifx/const.py @@ -64,3 +64,6 @@ DATA_LIFX_MANAGER = "lifx_manager" LIFX_CEILING_PRODUCT_IDS = {176, 177} _LOGGER = logging.getLogger(__package__) + +# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1 +_ATTR_COLOR_TEMP = "color_temp" diff --git a/homeassistant/components/lifx/coordinator.py b/homeassistant/components/lifx/coordinator.py index 9d5532aeeb2..41fa04057f7 100644 --- a/homeassistant/components/lifx/coordinator.py +++ b/homeassistant/components/lifx/coordinator.py @@ -6,7 +6,7 @@ import asyncio from collections.abc import Callable from datetime import timedelta from enum import IntEnum -from functools import cached_property, partial +from functools import partial from math import floor, log10 from typing import Any, cast @@ -21,6 +21,7 @@ from aiolifx.aiolifx import ( from aiolifx.connection import LIFXConnection from aiolifx_themes.themes import ThemeLibrary, ThemePainter from awesomeversion import AwesomeVersion +from propcache import cached_property from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, diff --git a/homeassistant/components/lifx/icons.json b/homeassistant/components/lifx/icons.json index e32fdb5e06b..58a7c89e266 100644 --- a/homeassistant/components/lifx/icons.json +++ b/homeassistant/components/lifx/icons.json @@ -1,13 +1,31 @@ { "services": { - "set_hev_cycle_state": "mdi:led-on", - "set_state": "mdi:led-on", - "effect_pulse": "mdi:pulse", - "effect_colorloop": "mdi:looks", - "effect_move": "mdi:cube-send", - "effect_flame": "mdi:fire", - "effect_morph": "mdi:shape-outline", - "effect_sky": "mdi:clouds", - "effect_stop": "mdi:stop" + "set_hev_cycle_state": { + "service": "mdi:led-on" + }, + "set_state": { + "service": "mdi:led-on" + }, + "effect_pulse": { + "service": "mdi:pulse" + }, + "effect_colorloop": { + "service": "mdi:looks" + }, + "effect_move": { + "service": "mdi:cube-send" + }, + "effect_flame": { + "service": "mdi:fire" + }, + "effect_morph": { + "service": "mdi:shape-outline" + }, + "effect_sky": { + "service": "mdi:clouds" + }, + "effect_stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/lifx/manager.py b/homeassistant/components/lifx/manager.py index c23837c5fcc..27e62717e96 100644 --- a/homeassistant/components/lifx/manager.py +++ b/homeassistant/components/lifx/manager.py @@ -15,7 +15,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -30,7 +29,7 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.service import async_extract_referenced_entity_ids -from .const import ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN +from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN from .coordinator import LIFXUpdateCoordinator, Light from .util import convert_8_to_16, find_hsbk @@ -126,7 +125,8 @@ LIFX_EFFECT_PULSE_SCHEMA = cv.make_entity_service_schema( vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): vol.All( vol.Coerce(int), vol.Range(min=1500, max=9000) ), - vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int, + # _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1 + vol.Exclusive(_ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int, ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)), ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)), ATTR_MODE: vol.In(PULSE_MODES), @@ -332,7 +332,7 @@ class LIFXManager: elif service == SERVICE_EFFECT_MORPH: theme_name = kwargs.get(ATTR_THEME, "exciting") - palette = kwargs.get(ATTR_PALETTE, None) + palette = kwargs.get(ATTR_PALETTE) if palette is not None: theme = Theme() @@ -362,7 +362,7 @@ class LIFXManager: direction=kwargs.get( ATTR_DIRECTION, EFFECT_MOVE_DEFAULT_DIRECTION ), - theme_name=kwargs.get(ATTR_THEME, None), + theme_name=kwargs.get(ATTR_THEME), power_on=kwargs.get(ATTR_POWER_ON, False), ) for coordinator in coordinators @@ -410,7 +410,7 @@ class LIFXManager: await self.effects_conductor.start(effect, bulbs) elif service == SERVICE_EFFECT_SKY: - palette = kwargs.get(ATTR_PALETTE, None) + palette = kwargs.get(ATTR_PALETTE) if palette is not None: theme = Theme() for hsbk in palette: diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 08540702736..2e16eb2082b 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -17,13 +17,13 @@ "models": [ "LIFX A19", "LIFX A21", - "LIFX B10", "LIFX Beam", "LIFX BR30", "LIFX Candle", "LIFX Ceiling", "LIFX Clean", "LIFX Color", + "LIFX Colour", "LIFX DLCOL", "LIFX Dlight", "LIFX DLWW", @@ -36,13 +36,14 @@ "LIFX Neon", "LIFX Nightvision", "LIFX PAR38", + "LIFX Permanent Outdoor", "LIFX Pls", "LIFX Plus", "LIFX Round", "LIFX Square", "LIFX String", - "LIFX T10", "LIFX Tile", + "LIFX Tube", "LIFX White", "LIFX Z" ] @@ -50,8 +51,8 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.8", + "aiolifx==1.1.2", "aiolifx-effects==0.3.2", - "aiolifx-themes==0.5.0" + "aiolifx-themes==0.5.5" ] } diff --git a/homeassistant/components/lifx/sensor.py b/homeassistant/components/lifx/sensor.py index 2f54317f9bd..68f354024e4 100644 --- a/homeassistant/components/lifx/sensor.py +++ b/homeassistant/components/lifx/sensor.py @@ -65,7 +65,6 @@ class LIFXRssiSensor(LIFXEntity, SensorEntity): """Handle coordinator updates.""" self._attr_native_value = self.coordinator.rssi - @callback async def async_added_to_hass(self) -> None: """Enable RSSI updates.""" self.async_on_remove(self.coordinator.async_enable_rssi_updates()) diff --git a/homeassistant/components/lifx/strings.json b/homeassistant/components/lifx/strings.json index 68f9e31aabd..19d86e57f09 100644 --- a/homeassistant/components/lifx/strings.json +++ b/homeassistant/components/lifx/strings.json @@ -26,7 +26,8 @@ "abort": { "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { diff --git a/homeassistant/components/lifx/util.py b/homeassistant/components/lifx/util.py index 9782fe4adba..ffffe7a4856 100644 --- a/homeassistant/components/lifx/util.py +++ b/homeassistant/components/lifx/util.py @@ -16,10 +16,8 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_XY_COLOR, ) @@ -29,6 +27,7 @@ from homeassistant.helpers import device_registry as dr import homeassistant.util.color as color_util from .const import ( + _ATTR_COLOR_TEMP, _LOGGER, DEFAULT_ATTEMPTS, DOMAIN, @@ -114,17 +113,14 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] | saturation = int(saturation / 100 * 65535) kelvin = 3500 - if ATTR_KELVIN in kwargs: + if _ATTR_COLOR_TEMP in kwargs: + # added in 2025.1, can be removed in 2026.1 _LOGGER.warning( - "The 'kelvin' parameter is deprecated. Please use 'color_temp_kelvin' for" + "The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for" " all service calls" ) - kelvin = kwargs.pop(ATTR_KELVIN) - saturation = 0 - - if ATTR_COLOR_TEMP in kwargs: kelvin = color_util.color_temperature_mired_to_kelvin( - kwargs.pop(ATTR_COLOR_TEMP) + kwargs.pop(_ATTR_COLOR_TEMP) ) saturation = 0 diff --git a/homeassistant/components/lifx_cloud/manifest.json b/homeassistant/components/lifx_cloud/manifest.json index 7799de85b8d..61e5d66c821 100644 --- a/homeassistant/components/lifx_cloud/manifest.json +++ b/homeassistant/components/lifx_cloud/manifest.json @@ -3,5 +3,6 @@ "name": "LIFX Cloud", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/lifx_cloud", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 445096ae643..33bd259469b 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -5,13 +5,12 @@ from __future__ import annotations from collections.abc import Iterable import csv import dataclasses -from datetime import timedelta -from enum import IntFlag, StrEnum -from functools import cached_property +from functools import partial import logging import os -from typing import Any, Self, cast, final +from typing import Any, Final, Self, cast, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -24,95 +23,84 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.deprecation import ( + DeprecatedConstant, + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass import homeassistant.util.color as color_util -DOMAIN = "light" +from .const import ( # noqa: F401 + COLOR_MODES_BRIGHTNESS, + COLOR_MODES_COLOR, + DATA_COMPONENT, + DATA_PROFILES, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, + DOMAIN, + SCAN_INTERVAL, + VALID_COLOR_MODES, + ColorMode, + LightEntityFeature, +) + ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE -SCAN_INTERVAL = timedelta(seconds=30) - -DATA_PROFILES = "light_profiles" - - -class LightEntityFeature(IntFlag): - """Supported features of the light entity.""" - - EFFECT = 4 - FLASH = 8 - TRANSITION = 32 # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. # Please use the LightEntityFeature enum instead. -SUPPORT_BRIGHTNESS = 1 # Deprecated, replaced by color modes -SUPPORT_COLOR_TEMP = 2 # Deprecated, replaced by color modes -SUPPORT_EFFECT = 4 -SUPPORT_FLASH = 8 -SUPPORT_COLOR = 16 # Deprecated, replaced by color modes -SUPPORT_TRANSITION = 32 +_DEPRECATED_SUPPORT_BRIGHTNESS: Final = DeprecatedConstant( + 1, "supported_color_modes", "2026.1" +) # Deprecated, replaced by color modes +_DEPRECATED_SUPPORT_COLOR_TEMP: Final = DeprecatedConstant( + 2, "supported_color_modes", "2026.1" +) # Deprecated, replaced by color modes +_DEPRECATED_SUPPORT_EFFECT: Final = DeprecatedConstantEnum( + LightEntityFeature.EFFECT, "2026.1" +) +_DEPRECATED_SUPPORT_FLASH: Final = DeprecatedConstantEnum( + LightEntityFeature.FLASH, "2026.1" +) +_DEPRECATED_SUPPORT_COLOR: Final = DeprecatedConstant( + 16, "supported_color_modes", "2026.1" +) # Deprecated, replaced by color modes +_DEPRECATED_SUPPORT_TRANSITION: Final = DeprecatedConstantEnum( + LightEntityFeature.TRANSITION, "2026.1" +) # Color mode of the light ATTR_COLOR_MODE = "color_mode" # List of color modes supported by the light ATTR_SUPPORTED_COLOR_MODES = "supported_color_modes" - -class ColorMode(StrEnum): - """Possible light color modes.""" - - UNKNOWN = "unknown" - """Ambiguous color mode""" - ONOFF = "onoff" - """Must be the only supported mode""" - BRIGHTNESS = "brightness" - """Must be the only supported mode""" - COLOR_TEMP = "color_temp" - HS = "hs" - XY = "xy" - RGB = "rgb" - RGBW = "rgbw" - RGBWW = "rgbww" - WHITE = "white" - """Must *NOT* be the only supported mode""" - - # These COLOR_MODE_* constants are deprecated as of Home Assistant 2022.5. # Please use the LightEntityFeature enum instead. -COLOR_MODE_UNKNOWN = "unknown" -COLOR_MODE_ONOFF = "onoff" -COLOR_MODE_BRIGHTNESS = "brightness" -COLOR_MODE_COLOR_TEMP = "color_temp" -COLOR_MODE_HS = "hs" -COLOR_MODE_XY = "xy" -COLOR_MODE_RGB = "rgb" -COLOR_MODE_RGBW = "rgbw" -COLOR_MODE_RGBWW = "rgbww" -COLOR_MODE_WHITE = "white" +_DEPRECATED_COLOR_MODE_UNKNOWN: Final = DeprecatedConstantEnum( + ColorMode.UNKNOWN, "2026.1" +) +_DEPRECATED_COLOR_MODE_ONOFF: Final = DeprecatedConstantEnum(ColorMode.ONOFF, "2026.1") +_DEPRECATED_COLOR_MODE_BRIGHTNESS: Final = DeprecatedConstantEnum( + ColorMode.BRIGHTNESS, "2026.1" +) +_DEPRECATED_COLOR_MODE_COLOR_TEMP: Final = DeprecatedConstantEnum( + ColorMode.COLOR_TEMP, "2026.1" +) +_DEPRECATED_COLOR_MODE_HS: Final = DeprecatedConstantEnum(ColorMode.HS, "2026.1") +_DEPRECATED_COLOR_MODE_XY: Final = DeprecatedConstantEnum(ColorMode.XY, "2026.1") +_DEPRECATED_COLOR_MODE_RGB: Final = DeprecatedConstantEnum(ColorMode.RGB, "2026.1") +_DEPRECATED_COLOR_MODE_RGBW: Final = DeprecatedConstantEnum(ColorMode.RGBW, "2026.1") +_DEPRECATED_COLOR_MODE_RGBWW: Final = DeprecatedConstantEnum(ColorMode.RGBWW, "2026.1") +_DEPRECATED_COLOR_MODE_WHITE: Final = DeprecatedConstantEnum(ColorMode.WHITE, "2026.1") -VALID_COLOR_MODES = { - ColorMode.ONOFF, - ColorMode.BRIGHTNESS, - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ColorMode.RGB, - ColorMode.RGBW, - ColorMode.RGBWW, - ColorMode.WHITE, -} -COLOR_MODES_BRIGHTNESS = VALID_COLOR_MODES - {ColorMode.ONOFF} -COLOR_MODES_COLOR = { - ColorMode.HS, - ColorMode.RGB, - ColorMode.RGBW, - ColorMode.RGBWW, - ColorMode.XY, -} # mypy: disallow-any-generics @@ -198,16 +186,26 @@ ATTR_RGBW_COLOR = "rgbw_color" ATTR_RGBWW_COLOR = "rgbww_color" ATTR_XY_COLOR = "xy_color" ATTR_HS_COLOR = "hs_color" -ATTR_COLOR_TEMP = "color_temp" # Deprecated in HA Core 2022.11 -ATTR_KELVIN = "kelvin" # Deprecated in HA Core 2022.11 -ATTR_MIN_MIREDS = "min_mireds" # Deprecated in HA Core 2022.11 -ATTR_MAX_MIREDS = "max_mireds" # Deprecated in HA Core 2022.11 ATTR_COLOR_TEMP_KELVIN = "color_temp_kelvin" ATTR_MIN_COLOR_TEMP_KELVIN = "min_color_temp_kelvin" ATTR_MAX_COLOR_TEMP_KELVIN = "max_color_temp_kelvin" ATTR_COLOR_NAME = "color_name" ATTR_WHITE = "white" +# Deprecated in HA Core 2022.11 +_DEPRECATED_ATTR_COLOR_TEMP: Final = DeprecatedConstant( + "color_temp", "kelvin equivalent (ATTR_COLOR_TEMP_KELVIN)", "2026.1" +) +_DEPRECATED_ATTR_KELVIN: Final = DeprecatedConstant( + "kelvin", "ATTR_COLOR_TEMP_KELVIN", "2026.1" +) +_DEPRECATED_ATTR_MIN_MIREDS: Final = DeprecatedConstant( + "min_mireds", "kelvin equivalent (ATTR_MAX_COLOR_TEMP_KELVIN)", "2026.1" +) +_DEPRECATED_ATTR_MAX_MIREDS: Final = DeprecatedConstant( + "max_mireds", "kelvin equivalent (ATTR_MIN_COLOR_TEMP_KELVIN)", "2026.1" +) + # Brightness of the light, 0..255 or percentage ATTR_BRIGHTNESS = "brightness" ATTR_BRIGHTNESS_PCT = "brightness_pct" @@ -252,11 +250,11 @@ LIGHT_TURN_ON_SCHEMA: VolDictType = { vol.Exclusive(ATTR_BRIGHTNESS_STEP, ATTR_BRIGHTNESS): VALID_BRIGHTNESS_STEP, vol.Exclusive(ATTR_BRIGHTNESS_STEP_PCT, ATTR_BRIGHTNESS): VALID_BRIGHTNESS_STEP_PCT, vol.Exclusive(ATTR_COLOR_NAME, COLOR_GROUP): cv.string, - vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): vol.All( + vol.Exclusive(_DEPRECATED_ATTR_COLOR_TEMP.value, COLOR_GROUP): vol.All( vol.Coerce(int), vol.Range(min=1) ), vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): cv.positive_int, - vol.Exclusive(ATTR_KELVIN, COLOR_GROUP): cv.positive_int, + vol.Exclusive(_DEPRECATED_ATTR_KELVIN.value, COLOR_GROUP): cv.positive_int, vol.Exclusive(ATTR_HS_COLOR, COLOR_GROUP): vol.All( vol.Coerce(tuple), vol.ExactSequence( @@ -299,7 +297,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: def preprocess_turn_on_alternatives( - hass: HomeAssistant, params: dict[str, Any] | VolDictType + hass: HomeAssistant, params: dict[str, Any] ) -> None: """Process extra data for turn light on request. @@ -319,19 +317,29 @@ def preprocess_turn_on_alternatives( _LOGGER.warning("Got unknown color %s, falling back to white", color_name) params[ATTR_RGB_COLOR] = (255, 255, 255) - if (mired := params.pop(ATTR_COLOR_TEMP, None)) is not None: + if (mired := params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value, None)) is not None: + _LOGGER.warning( + "Got `color_temp` argument in `turn_on` service, which is deprecated " + "and will break in Home Assistant 2026.1, please use " + "`color_temp_kelvin` argument" + ) kelvin = color_util.color_temperature_mired_to_kelvin(mired) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) - if (kelvin := params.pop(ATTR_KELVIN, None)) is not None: + if (kelvin := params.pop(_DEPRECATED_ATTR_KELVIN.value, None)) is not None: + _LOGGER.warning( + "Got `kelvin` argument in `turn_on` service, which is deprecated " + "and will break in Home Assistant 2026.1, please use " + "`color_temp_kelvin` argument" + ) mired = color_util.color_temperature_kelvin_to_mired(kelvin) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) if (kelvin := params.pop(ATTR_COLOR_TEMP_KELVIN, None)) is not None: mired = color_util.color_temperature_kelvin_to_mired(kelvin) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) brightness_pct = params.pop(ATTR_BRIGHTNESS_PCT, None) @@ -346,7 +354,7 @@ def filter_turn_off_params( if not params: return params - supported_features = light.supported_features_compat + supported_features = light.supported_features if LightEntityFeature.FLASH not in supported_features: params.pop(ATTR_FLASH, None) @@ -358,7 +366,7 @@ def filter_turn_off_params( def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[str, Any]: """Filter out params not supported by the light.""" - supported_features = light.supported_features_compat + supported_features = light.supported_features if LightEntityFeature.EFFECT not in supported_features: params.pop(ATTR_EFFECT, None) @@ -373,7 +381,7 @@ def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[st if not brightness_supported(supported_color_modes): params.pop(ATTR_BRIGHTNESS, None) if ColorMode.COLOR_TEMP not in supported_color_modes: - params.pop(ATTR_COLOR_TEMP, None) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value, None) params.pop(ATTR_COLOR_TEMP_KELVIN, None) if ColorMode.HS not in supported_color_modes: params.pop(ATTR_HS_COLOR, None) @@ -393,7 +401,7 @@ def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[st async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901 """Expose light control via state machine and services.""" - component = hass.data[DOMAIN] = EntityComponent[LightEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[LightEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -403,10 +411,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: # of the light base platform. hass.async_create_task(profiles.async_initialize(), eager_start=True) - def preprocess_data(data: VolDictType) -> VolDictType: + def preprocess_data(data: dict[str, Any]) -> VolDictType: """Preprocess the service data.""" base: VolDictType = { - entity_field: data.pop(entity_field) + entity_field: data.pop(entity_field) # type: ignore[arg-type] for entity_field in cv.ENTITY_SERVICE_FIELDS if entity_field in data } @@ -455,7 +463,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: and ColorMode.COLOR_TEMP not in supported_color_modes and ColorMode.RGBWW in supported_color_modes ): - params.pop(ATTR_COLOR_TEMP) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) brightness = params.get(ATTR_BRIGHTNESS, light.brightness) params[ATTR_RGBWW_COLOR] = color_util.color_temperature_to_rgbww( @@ -465,7 +473,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: light.max_color_temp_kelvin, ) elif ColorMode.COLOR_TEMP not in legacy_supported_color_modes: - params.pop(ATTR_COLOR_TEMP) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) if color_supported(legacy_supported_color_modes): params[ATTR_HS_COLOR] = color_util.color_temperature_to_hs( @@ -512,8 +520,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_RGB_COLOR in params and ColorMode.RGB not in supported_color_modes: rgb_color = params.pop(ATTR_RGB_COLOR) @@ -535,8 +545,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_XY_COLOR in params and ColorMode.XY not in supported_color_modes: xy_color = params.pop(ATTR_XY_COLOR) @@ -556,8 +568,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_RGBW_COLOR in params and ColorMode.RGBW not in supported_color_modes: rgbw_color = params.pop(ATTR_RGBW_COLOR) @@ -577,8 +591,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ( ATTR_RGBWW_COLOR in params and ColorMode.RGBWW not in supported_color_modes @@ -601,8 +617,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) # If white is set to True, set it to the light's brightness @@ -670,14 +688,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[LightEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[LightEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) def _coerce_none(value: str) -> None: @@ -812,7 +828,7 @@ class Profiles: color_attributes = ( ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_HS_COLOR, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -860,13 +876,13 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): { ATTR_SUPPORTED_COLOR_MODES, ATTR_EFFECT_LIST, - ATTR_MIN_MIREDS, - ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS.value, + _DEPRECATED_ATTR_MAX_MIREDS.value, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -880,17 +896,15 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): entity_description: LightEntityDescription _attr_brightness: int | None = None _attr_color_mode: ColorMode | str | None = None - _attr_color_temp: int | None = None _attr_color_temp_kelvin: int | None = None _attr_effect_list: list[str] | None = None _attr_effect: str | None = None _attr_hs_color: tuple[float, float] | None = None - # Default to the Philips Hue value that HA has always assumed - # https://developers.meethue.com/documentation/core-concepts + # We cannot set defaults without causing breaking changes until mireds + # are fully removed. Until then, developers can explicitly + # use DEFAULT_MIN_KELVIN and DEFAULT_MAX_KELVIN _attr_max_color_temp_kelvin: int | None = None _attr_min_color_temp_kelvin: int | None = None - _attr_max_mireds: int = 500 # 2000 K - _attr_min_mireds: int = 153 # 6500 K _attr_rgb_color: tuple[int, int, int] | None = None _attr_rgbw_color: tuple[int, int, int, int] | None = None _attr_rgbww_color: tuple[int, int, int, int, int] | None = None @@ -898,6 +912,11 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_supported_features: LightEntityFeature = LightEntityFeature(0) _attr_xy_color: tuple[float, float] | None = None + # Deprecated, see https://github.com/home-assistant/core/pull/79591 + _attr_color_temp: Final[int | None] = None + _attr_max_mireds: Final[int] = 500 # = 2000 K + _attr_min_mireds: Final[int] = 153 # = 6535.94 K (~ 6500 K) + __color_mode_reported = False @cached_property @@ -973,32 +992,70 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return the rgbww color value [int, int, int, int, int].""" return self._attr_rgbww_color + @final @cached_property def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" + """Return the CT color value in mireds. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_color_temp @property def color_temp_kelvin(self) -> int | None: """Return the CT color value in Kelvin.""" if self._attr_color_temp_kelvin is None and (color_temp := self.color_temp): + report_usage( + "is using mireds for current light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_color_temp_kelvin` or override the kelvin property " + "`color_temp_kelvin` (see " + "https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(color_temp) return self._attr_color_temp_kelvin + @final @cached_property def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" + """Return the coldest color_temp that this light supports. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_min_mireds + @final @cached_property def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" + """Return the warmest color_temp that this light supports. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_max_mireds @property def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if self._attr_min_color_temp_kelvin is None: + report_usage( + "is using mireds for warmest light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_min_color_temp_kelvin` or override the kelvin property " + "`min_color_temp_kelvin`, possibly with default DEFAULT_MIN_KELVIN " + "(see https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(self.max_mireds) return self._attr_min_color_temp_kelvin @@ -1006,6 +1063,19 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if self._attr_max_color_temp_kelvin is None: + report_usage( + "is using mireds for coldest light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_max_color_temp_kelvin` or override the kelvin property " + "`max_color_temp_kelvin`, possibly with default DEFAULT_MAX_KELVIN " + "(see https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(self.min_mireds) return self._attr_max_color_temp_kelvin @@ -1023,7 +1093,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def capability_attributes(self) -> dict[str, Any]: """Return capability attributes.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features supported_color_modes = self._light_internal_supported_color_modes if ColorMode.COLOR_TEMP in supported_color_modes: @@ -1032,16 +1102,16 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): data[ATTR_MIN_COLOR_TEMP_KELVIN] = min_color_temp_kelvin data[ATTR_MAX_COLOR_TEMP_KELVIN] = max_color_temp_kelvin if not max_color_temp_kelvin: - data[ATTR_MIN_MIREDS] = None + data[_DEPRECATED_ATTR_MIN_MIREDS.value] = None else: - data[ATTR_MIN_MIREDS] = color_util.color_temperature_kelvin_to_mired( - max_color_temp_kelvin + data[_DEPRECATED_ATTR_MIN_MIREDS.value] = ( + color_util.color_temperature_kelvin_to_mired(max_color_temp_kelvin) ) if not min_color_temp_kelvin: - data[ATTR_MAX_MIREDS] = None + data[_DEPRECATED_ATTR_MAX_MIREDS.value] = None else: - data[ATTR_MAX_MIREDS] = color_util.color_temperature_kelvin_to_mired( - min_color_temp_kelvin + data[_DEPRECATED_ATTR_MAX_MIREDS.value] = ( + color_util.color_temperature_kelvin_to_mired(min_color_temp_kelvin) ) if LightEntityFeature.EFFECT in supported_features: data[ATTR_EFFECT_LIST] = self.effect_list @@ -1185,12 +1255,11 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def state_attributes(self) -> dict[str, Any] | None: """Return state attributes.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features supported_color_modes = self.supported_color_modes legacy_supported_color_modes = ( supported_color_modes or self._light_internal_supported_color_modes ) - supported_features_value = supported_features.value _is_on = self.is_on color_mode = self._light_internal_color_mode if _is_on else None @@ -1209,42 +1278,20 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): data[ATTR_BRIGHTNESS] = self.brightness else: data[ATTR_BRIGHTNESS] = None - elif supported_features_value & SUPPORT_BRIGHTNESS: - # Backwards compatibility for ambiguous / incomplete states - # Warning is printed by supported_features_compat, remove in 2025.1 - if _is_on: - data[ATTR_BRIGHTNESS] = self.brightness - else: - data[ATTR_BRIGHTNESS] = None if color_temp_supported(supported_color_modes): if color_mode == ColorMode.COLOR_TEMP: color_temp_kelvin = self.color_temp_kelvin data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin if color_temp_kelvin: - data[ATTR_COLOR_TEMP] = ( + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( color_util.color_temperature_kelvin_to_mired(color_temp_kelvin) ) else: - data[ATTR_COLOR_TEMP] = None + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None else: data[ATTR_COLOR_TEMP_KELVIN] = None - data[ATTR_COLOR_TEMP] = None - elif supported_features_value & SUPPORT_COLOR_TEMP: - # Backwards compatibility - # Warning is printed by supported_features_compat, remove in 2025.1 - if _is_on: - color_temp_kelvin = self.color_temp_kelvin - data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin - if color_temp_kelvin: - data[ATTR_COLOR_TEMP] = ( - color_util.color_temperature_kelvin_to_mired(color_temp_kelvin) - ) - else: - data[ATTR_COLOR_TEMP] = None - else: - data[ATTR_COLOR_TEMP_KELVIN] = None - data[ATTR_COLOR_TEMP] = None + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None if color_supported(legacy_supported_color_modes) or color_temp_supported( legacy_supported_color_modes @@ -1282,21 +1329,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): type(self), report_issue, ) - supported_features = self.supported_features_compat - supported_features_value = supported_features.value - supported_color_modes: set[ColorMode] = set() - - if supported_features_value & SUPPORT_COLOR_TEMP: - supported_color_modes.add(ColorMode.COLOR_TEMP) - if supported_features_value & SUPPORT_COLOR: - supported_color_modes.add(ColorMode.HS) - if not supported_color_modes and supported_features_value & SUPPORT_BRIGHTNESS: - supported_color_modes = {ColorMode.BRIGHTNESS} - - if not supported_color_modes: - supported_color_modes = {ColorMode.ONOFF} - - return supported_color_modes + return {ColorMode.ONOFF} @cached_property def supported_color_modes(self) -> set[ColorMode] | set[str] | None: @@ -1308,40 +1341,17 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Flag supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> LightEntityFeature: - """Return the supported features as LightEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is not int: # noqa: E721 - return features - new_features = LightEntityFeature(features) - if self._deprecated_supported_features_reported is True: - return new_features - self._deprecated_supported_features_reported = True - report_issue = self._suggest_report_issue() - report_issue += ( - " and reference " - "https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation" - ) - _LOGGER.warning( - ( - "Entity %s (%s) is using deprecated supported features" - " values which will be removed in HA Core 2025.1. Instead it should use" - " %s and color modes, please %s" - ), - self.entity_id, - type(self), - repr(new_features), - report_issue, - ) - return new_features - def __should_report_light_issue(self) -> bool: """Return if light color mode issues should be reported.""" if not self.platform: return True # philips_js has known issues, we don't need users to open issues return self.platform.platform_name not in {"philips_js"} + + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/light/const.py b/homeassistant/components/light/const.py new file mode 100644 index 00000000000..d27750a950d --- /dev/null +++ b/homeassistant/components/light/const.py @@ -0,0 +1,73 @@ +"""Provides constants for lights.""" + +from __future__ import annotations + +from datetime import timedelta +from enum import IntFlag, StrEnum +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import LightEntity, Profiles + +DOMAIN = "light" +DATA_COMPONENT: HassKey[EntityComponent[LightEntity]] = HassKey(DOMAIN) +SCAN_INTERVAL = timedelta(seconds=30) + +DATA_PROFILES: HassKey[Profiles] = HassKey(f"{DOMAIN}_profiles") + + +class LightEntityFeature(IntFlag): + """Supported features of the light entity.""" + + EFFECT = 4 + FLASH = 8 + TRANSITION = 32 + + +class ColorMode(StrEnum): + """Possible light color modes.""" + + UNKNOWN = "unknown" + """Ambiguous color mode""" + ONOFF = "onoff" + """Must be the only supported mode""" + BRIGHTNESS = "brightness" + """Must be the only supported mode""" + COLOR_TEMP = "color_temp" + HS = "hs" + XY = "xy" + RGB = "rgb" + RGBW = "rgbw" + RGBWW = "rgbww" + WHITE = "white" + """Must *NOT* be the only supported mode""" + + +VALID_COLOR_MODES = { + ColorMode.ONOFF, + ColorMode.BRIGHTNESS, + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.XY, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ColorMode.WHITE, +} +COLOR_MODES_BRIGHTNESS = VALID_COLOR_MODES - {ColorMode.ONOFF} +COLOR_MODES_COLOR = { + ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ColorMode.XY, +} + +# Default to the Philips Hue value that HA has always assumed +# https://developers.meethue.com/documentation/core-concepts +DEFAULT_MIN_KELVIN = 2000 # 500 mireds +DEFAULT_MAX_KELVIN = 6535 # 153 mireds diff --git a/homeassistant/components/light/device_action.py b/homeassistant/components/light/device_action.py index 45e9731c5b8..56bf7485e68 100644 --- a/homeassistant/components/light/device_action.py +++ b/homeassistant/components/light/device_action.py @@ -27,14 +27,13 @@ from . import ( ATTR_BRIGHTNESS_PCT, ATTR_BRIGHTNESS_STEP_PCT, ATTR_FLASH, - DOMAIN, FLASH_SHORT, VALID_BRIGHTNESS_PCT, VALID_FLASH, - LightEntityFeature, brightness_supported, get_supported_color_modes, ) +from .const import DOMAIN, LightEntityFeature # mypy: disallow-any-generics diff --git a/homeassistant/components/light/device_condition.py b/homeassistant/components/light/device_condition.py index f9bb7c30bd7..6dc702f8551 100644 --- a/homeassistant/components/light/device_condition.py +++ b/homeassistant/components/light/device_condition.py @@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.condition import ConditionCheckerType from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from .const import DOMAIN # mypy: disallow-any-generics diff --git a/homeassistant/components/light/device_trigger.py b/homeassistant/components/light/device_trigger.py index 033ea75357e..1f6bfdbe6e9 100644 --- a/homeassistant/components/light/device_trigger.py +++ b/homeassistant/components/light/device_trigger.py @@ -10,7 +10,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from .const import DOMAIN TRIGGER_SCHEMA = vol.All( toggle_entity.TRIGGER_SCHEMA, diff --git a/homeassistant/components/light/icons.json b/homeassistant/components/light/icons.json index 5113834e575..df98def090e 100644 --- a/homeassistant/components/light/icons.json +++ b/homeassistant/components/light/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "toggle": "mdi:lightbulb", - "turn_off": "mdi:lightbulb-off", - "turn_on": "mdi:lightbulb-on" + "toggle": { + "service": "mdi:lightbulb" + }, + "turn_off": { + "service": "mdi:lightbulb-off" + }, + "turn_on": { + "service": "mdi:lightbulb-on" + } } } diff --git a/homeassistant/components/light/intent.py b/homeassistant/components/light/intent.py index 458dbbde770..e496255029a 100644 --- a/homeassistant/components/light/intent.py +++ b/homeassistant/components/light/intent.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, intent import homeassistant.util.color as color_util -from . import ATTR_BRIGHTNESS_PCT, ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, DOMAIN +from . import ATTR_BRIGHTNESS_PCT, ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index 4024f2f84ba..4e994ab791d 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -15,11 +15,13 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import Context, HomeAssistant, State +from homeassistant.util import color as color_util from . import ( + _DEPRECATED_ATTR_COLOR_TEMP, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -28,9 +30,8 @@ from . import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, - DOMAIN, - ColorMode, ) +from .const import DOMAIN, ColorMode _LOGGER = logging.getLogger(__name__) @@ -40,7 +41,8 @@ ATTR_GROUP = [ATTR_BRIGHTNESS, ATTR_EFFECT] COLOR_GROUP = [ ATTR_HS_COLOR, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -56,7 +58,7 @@ class ColorModeAttr(NamedTuple): COLOR_MODE_TO_ATTRIBUTE = { - ColorMode.COLOR_TEMP: ColorModeAttr(ATTR_COLOR_TEMP, ATTR_COLOR_TEMP), + ColorMode.COLOR_TEMP: ColorModeAttr(ATTR_COLOR_TEMP_KELVIN, ATTR_COLOR_TEMP_KELVIN), ColorMode.HS: ColorModeAttr(ATTR_HS_COLOR, ATTR_HS_COLOR), ColorMode.RGB: ColorModeAttr(ATTR_RGB_COLOR, ATTR_RGB_COLOR), ColorMode.RGBW: ColorModeAttr(ATTR_RGBW_COLOR, ATTR_RGBW_COLOR), @@ -125,13 +127,30 @@ async def _async_reproduce_state( color_mode = state.attributes[ATTR_COLOR_MODE] if cm_attr := COLOR_MODE_TO_ATTRIBUTE.get(color_mode): if (cm_attr_state := state.attributes.get(cm_attr.state_attr)) is None: + if ( + color_mode != ColorMode.COLOR_TEMP + or ( + mireds := state.attributes.get( + _DEPRECATED_ATTR_COLOR_TEMP.value + ) + ) + is None + ): + _LOGGER.warning( + "Color mode %s specified but attribute %s missing for: %s", + color_mode, + cm_attr.state_attr, + state.entity_id, + ) + return _LOGGER.warning( - "Color mode %s specified but attribute %s missing for: %s", + "Color mode %s specified but attribute %s missing for: %s, " + "using color_temp (mireds) as fallback", color_mode, cm_attr.state_attr, state.entity_id, ) - return + cm_attr_state = color_util.color_temperature_mired_to_kelvin(mireds) service_data[cm_attr.parameter] = cm_attr_state else: # Fall back to Choosing the first color that is specified diff --git a/homeassistant/components/light/significant_change.py b/homeassistant/components/light/significant_change.py index 1877c925622..773b7a6b898 100644 --- a/homeassistant/components/light/significant_change.py +++ b/homeassistant/components/light/significant_change.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.significant_change import check_absolute_change -from . import ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR +from . import ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR @callback @@ -44,10 +44,10 @@ def async_check_significant_change( return True if check_absolute_change( - # Default range 153..500 - old_attrs.get(ATTR_COLOR_TEMP), - new_attrs.get(ATTR_COLOR_TEMP), - 5, + # Default range 2000..6500 + old_attrs.get(ATTR_COLOR_TEMP_KELVIN), + new_attrs.get(ATTR_COLOR_TEMP_KELVIN), + 50, ): return True diff --git a/homeassistant/components/lightwave/climate.py b/homeassistant/components/lightwave/climate.py index 1016e8ce80d..942fb4a1fbc 100644 --- a/homeassistant/components/lightwave/climate.py +++ b/homeassistant/components/lightwave/climate.py @@ -55,7 +55,6 @@ class LightwaveTrv(ClimateEntity): ) _attr_target_temperature_step = 0.5 _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, name, device_id, lwlink, serial): """Initialize LightwaveTrv entity.""" diff --git a/homeassistant/components/lightwave/manifest.json b/homeassistant/components/lightwave/manifest.json index d242195a71c..75b39b18c26 100644 --- a/homeassistant/components/lightwave/manifest.json +++ b/homeassistant/components/lightwave/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/lightwave", "iot_class": "assumed_state", "loggers": ["lightwave"], + "quality_scale": "legacy", "requirements": ["lightwave==0.24"] } diff --git a/homeassistant/components/limitlessled/light.py b/homeassistant/components/limitlessled/light.py index 4456d112d0f..4b2b75be9d7 100644 --- a/homeassistant/components/limitlessled/light.py +++ b/homeassistant/components/limitlessled/light.py @@ -19,7 +19,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -38,7 +38,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.color import color_hs_to_RGB, color_temperature_mired_to_kelvin +from homeassistant.util.color import color_hs_to_RGB _LOGGER = logging.getLogger(__name__) @@ -119,13 +119,13 @@ def rewrite_legacy(config: ConfigType) -> ConfigType: else: _LOGGER.warning("Legacy configuration format detected") for i in range(1, 5): - name_key = "group_%d_name" % i + name_key = f"group_{i}_name" if name_key in bridge_conf: groups.append( { "number": i, "type": bridge_conf.get( - "group_%d_type" % i, DEFAULT_LED_TYPE + f"group_{i}_type", DEFAULT_LED_TYPE ), "name": bridge_conf.get(name_key), } @@ -217,8 +217,8 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): """Representation of a LimitessLED group.""" _attr_assumed_state = True - _attr_max_mireds = 370 - _attr_min_mireds = 154 + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 154 Mireds _attr_should_poll = False def __init__(self, group: Group, config: dict[str, Any]) -> None: @@ -261,7 +261,9 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): if last_state := await self.async_get_last_state(): self._attr_is_on = last_state.state == STATE_ON self._attr_brightness = last_state.attributes.get("brightness") - self._attr_color_temp = last_state.attributes.get("color_temp") + self._attr_color_temp_kelvin = last_state.attributes.get( + "color_temp_kelvin" + ) self._attr_hs_color = last_state.attributes.get("hs_color") @property @@ -325,12 +327,12 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): else: args["color"] = self.limitlessled_color() - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: assert self.supported_color_modes if ColorMode.HS in self.supported_color_modes: pipeline.white() self._attr_hs_color = WHITE - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] args["temperature"] = self.limitlessled_temperature() if args: @@ -354,12 +356,9 @@ class LimitlessLEDGroup(LightEntity, RestoreEntity): def limitlessled_temperature(self) -> float: """Convert Home Assistant color temperature units to percentage.""" - max_kelvin = color_temperature_mired_to_kelvin(self.min_mireds) - min_kelvin = color_temperature_mired_to_kelvin(self.max_mireds) - width = max_kelvin - min_kelvin - assert self.color_temp is not None - kelvin = color_temperature_mired_to_kelvin(self.color_temp) - temperature = (kelvin - min_kelvin) / width + width = self.max_color_temp_kelvin - self.min_color_temp_kelvin + assert self.color_temp_kelvin is not None + temperature = (self.color_temp_kelvin - self.min_color_temp_kelvin) / width return max(0, min(1, temperature)) def limitlessled_brightness(self) -> float: diff --git a/homeassistant/components/limitlessled/manifest.json b/homeassistant/components/limitlessled/manifest.json index 3495ac2c981..c2a921c6e24 100644 --- a/homeassistant/components/limitlessled/manifest.json +++ b/homeassistant/components/limitlessled/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/limitlessled", "iot_class": "assumed_state", "loggers": ["limitlessled"], + "quality_scale": "legacy", "requirements": ["limitlessled==1.1.3"] } diff --git a/homeassistant/components/linear_garage_door/config_flow.py b/homeassistant/components/linear_garage_door/config_flow.py index d1dda97c513..2cfd0af6a8f 100644 --- a/homeassistant/components/linear_garage_door/config_flow.py +++ b/homeassistant/components/linear_garage_door/config_flow.py @@ -11,7 +11,7 @@ from linear_garage_door import Linear from linear_garage_door.errors import InvalidLoginError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -69,7 +69,6 @@ class LinearGarageDoorConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" self.data: dict[str, Sequence[Collection[str]]] = {} - self._reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -93,14 +92,14 @@ class LinearGarageDoorConfigFlow(ConfigFlow, domain=DOMAIN): self.data = info # Check if we are reauthenticating - if self._reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data=self._reauth_entry.data - | {"email": self.data["email"], "password": self.data["password"]}, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ + CONF_EMAIL: self.data["email"], + CONF_PASSWORD: self.data["password"], + }, ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return await self.async_step_site() @@ -150,9 +149,6 @@ class LinearGarageDoorConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Reauth in case of a password change or other error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/linkplay/__init__.py b/homeassistant/components/linkplay/__init__.py index c0fe711a61b..918e52a755d 100644 --- a/homeassistant/components/linkplay/__init__.py +++ b/homeassistant/components/linkplay/__init__.py @@ -1,17 +1,23 @@ """Support for LinkPlay devices.""" +from dataclasses import dataclass + +from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge -from linkplay.discovery import linkplay_factory_bridge +from linkplay.controller import LinkPlayController +from linkplay.discovery import linkplay_factory_httpapi_bridge +from linkplay.exceptions import LinkPlayRequestException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import PLATFORMS +from .const import CONTROLLER, CONTROLLER_KEY, DOMAIN, PLATFORMS +from .utils import async_get_client_session +@dataclass class LinkPlayData: """Data for LinkPlay.""" @@ -24,16 +30,31 @@ type LinkPlayConfigEntry = ConfigEntry[LinkPlayData] async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) -> bool: """Async setup hass config entry. Called when an entry has been setup.""" - session = async_get_clientsession(hass) - if ( - bridge := await linkplay_factory_bridge(entry.data[CONF_HOST], session) - ) is None: + session: ClientSession = await async_get_client_session(hass) + bridge: LinkPlayBridge | None = None + + # try create a bridge + try: + bridge = await linkplay_factory_httpapi_bridge(entry.data[CONF_HOST], session) + except LinkPlayRequestException as exception: raise ConfigEntryNotReady( f"Failed to connect to LinkPlay device at {entry.data[CONF_HOST]}" - ) + ) from exception - entry.runtime_data = LinkPlayData() - entry.runtime_data.bridge = bridge + # setup the controller and discover multirooms + controller: LinkPlayController | None = None + hass.data.setdefault(DOMAIN, {}) + if CONTROLLER not in hass.data[DOMAIN]: + controller = LinkPlayController(session) + hass.data[DOMAIN][CONTROLLER_KEY] = controller + else: + controller = hass.data[DOMAIN][CONTROLLER_KEY] + + await controller.add_bridge(bridge) + await controller.discover_multirooms() + + # forward to platforms + entry.runtime_data = LinkPlayData(bridge=bridge) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/linkplay/button.py b/homeassistant/components/linkplay/button.py new file mode 100644 index 00000000000..1c93ebcdc3e --- /dev/null +++ b/homeassistant/components/linkplay/button.py @@ -0,0 +1,82 @@ +"""Support for LinkPlay buttons.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +import logging +from typing import Any + +from linkplay.bridge import LinkPlayBridge + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LinkPlayConfigEntry +from .entity import LinkPlayBaseEntity, exception_wrap + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class LinkPlayButtonEntityDescription(ButtonEntityDescription): + """Class describing LinkPlay button entities.""" + + remote_function: Callable[[LinkPlayBridge], Coroutine[Any, Any, None]] + + +BUTTON_TYPES: tuple[LinkPlayButtonEntityDescription, ...] = ( + LinkPlayButtonEntityDescription( + key="timesync", + translation_key="timesync", + remote_function=lambda linkplay_bridge: linkplay_bridge.device.timesync(), + entity_category=EntityCategory.CONFIG, + ), + LinkPlayButtonEntityDescription( + key="restart", + device_class=ButtonDeviceClass.RESTART, + remote_function=lambda linkplay_bridge: linkplay_bridge.device.reboot(), + entity_category=EntityCategory.CONFIG, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: LinkPlayConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the LinkPlay buttons from config entry.""" + + # add entities + async_add_entities( + LinkPlayButton(config_entry.runtime_data.bridge, description) + for description in BUTTON_TYPES + ) + + +class LinkPlayButton(LinkPlayBaseEntity, ButtonEntity): + """Representation of LinkPlay button.""" + + entity_description: LinkPlayButtonEntityDescription + + def __init__( + self, + bridge: LinkPlayBridge, + description: LinkPlayButtonEntityDescription, + ) -> None: + """Initialize LinkPlay button.""" + super().__init__(bridge) + self.entity_description = description + self._attr_unique_id = f"{bridge.device.uuid}-{description.key}" + + @exception_wrap + async def async_press(self) -> None: + """Press the button.""" + await self.entity_description.remote_function(self._bridge) diff --git a/homeassistant/components/linkplay/config_flow.py b/homeassistant/components/linkplay/config_flow.py index 0f9c40d0fd4..7dfdce238ff 100644 --- a/homeassistant/components/linkplay/config_flow.py +++ b/homeassistant/components/linkplay/config_flow.py @@ -1,16 +1,22 @@ """Config flow to configure LinkPlay component.""" +import logging from typing import Any -from linkplay.discovery import linkplay_factory_bridge +from aiohttp import ClientSession +from linkplay.bridge import LinkPlayBridge +from linkplay.discovery import linkplay_factory_httpapi_bridge +from linkplay.exceptions import LinkPlayRequestException import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_MODEL -from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +from .utils import async_get_client_session + +_LOGGER = logging.getLogger(__name__) class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): @@ -25,10 +31,15 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle Zeroconf discovery.""" - session = async_get_clientsession(self.hass) - bridge = await linkplay_factory_bridge(discovery_info.host, session) + session: ClientSession = await async_get_client_session(self.hass) + bridge: LinkPlayBridge | None = None - if bridge is None: + try: + bridge = await linkplay_factory_httpapi_bridge(discovery_info.host, session) + except LinkPlayRequestException: + _LOGGER.exception( + "Failed to connect to LinkPlay device at %s", discovery_info.host + ) return self.async_abort(reason="cannot_connect") self.data[CONF_HOST] = discovery_info.host @@ -66,14 +77,26 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] = {} if user_input: - session = async_get_clientsession(self.hass) - bridge = await linkplay_factory_bridge(user_input[CONF_HOST], session) + session: ClientSession = await async_get_client_session(self.hass) + bridge: LinkPlayBridge | None = None + + try: + bridge = await linkplay_factory_httpapi_bridge( + user_input[CONF_HOST], session + ) + except LinkPlayRequestException: + _LOGGER.exception( + "Failed to connect to LinkPlay device at %s", user_input[CONF_HOST] + ) + errors["base"] = "cannot_connect" if bridge is not None: self.data[CONF_HOST] = user_input[CONF_HOST] self.data[CONF_MODEL] = bridge.device.name - await self.async_set_unique_id(bridge.device.uuid) + await self.async_set_unique_id( + bridge.device.uuid, raise_on_progress=False + ) self._abort_if_unique_id_configured( updates={CONF_HOST: self.data[CONF_HOST]} ) @@ -83,7 +106,6 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): data={CONF_HOST: self.data[CONF_HOST]}, ) - errors["base"] = "cannot_connect" return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Required(CONF_HOST): str}), diff --git a/homeassistant/components/linkplay/const.py b/homeassistant/components/linkplay/const.py index 48ae225dd98..e10450cf255 100644 --- a/homeassistant/components/linkplay/const.py +++ b/homeassistant/components/linkplay/const.py @@ -1,6 +1,12 @@ """LinkPlay constants.""" +from linkplay.controller import LinkPlayController + from homeassistant.const import Platform +from homeassistant.util.hass_dict import HassKey DOMAIN = "linkplay" -PLATFORMS = [Platform.MEDIA_PLAYER] +CONTROLLER = "controller" +CONTROLLER_KEY: HassKey[LinkPlayController] = HassKey(CONTROLLER) +PLATFORMS = [Platform.BUTTON, Platform.MEDIA_PLAYER] +DATA_SESSION = "session" diff --git a/homeassistant/components/linkplay/diagnostics.py b/homeassistant/components/linkplay/diagnostics.py new file mode 100644 index 00000000000..cfc1346aff4 --- /dev/null +++ b/homeassistant/components/linkplay/diagnostics.py @@ -0,0 +1,17 @@ +"""Diagnostics support for Linkplay.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import LinkPlayConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: LinkPlayConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = entry.runtime_data + return {"device_info": data.bridge.to_dict()} diff --git a/homeassistant/components/linkplay/entity.py b/homeassistant/components/linkplay/entity.py new file mode 100644 index 00000000000..00e2f39b233 --- /dev/null +++ b/homeassistant/components/linkplay/entity.py @@ -0,0 +1,57 @@ +"""BaseEntity to support multiple LinkPlay platforms.""" + +from collections.abc import Callable, Coroutine +from typing import Any, Concatenate + +from linkplay.bridge import LinkPlayBridge + +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity import Entity + +from . import DOMAIN, LinkPlayRequestException +from .utils import MANUFACTURER_GENERIC, get_info_from_project + + +def exception_wrap[_LinkPlayEntityT: LinkPlayBaseEntity, **_P, _R]( + func: Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]]: + """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" + + async def _wrap(self: _LinkPlayEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: + try: + return await func(self, *args, **kwargs) + except LinkPlayRequestException as err: + raise HomeAssistantError( + f"Exception occurred when communicating with API {func}: {err}" + ) from err + + return _wrap + + +class LinkPlayBaseEntity(Entity): + """Representation of a LinkPlay base entity.""" + + _attr_has_entity_name = True + + def __init__(self, bridge: LinkPlayBridge) -> None: + """Initialize the LinkPlay media player.""" + + self._bridge = bridge + + manufacturer, model = get_info_from_project(bridge.device.properties["project"]) + model_id = None + if model != MANUFACTURER_GENERIC: + model_id = bridge.device.properties["project"] + + self._attr_device_info = dr.DeviceInfo( + configuration_url=bridge.endpoint, + connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])}, + hw_version=bridge.device.properties["hardware"], + identifiers={(DOMAIN, bridge.device.uuid)}, + manufacturer=manufacturer, + model=model, + model_id=model_id, + name=bridge.device.name, + sw_version=bridge.device.properties["firmware"], + ) diff --git a/homeassistant/components/linkplay/icons.json b/homeassistant/components/linkplay/icons.json new file mode 100644 index 00000000000..c0fe86d9ac7 --- /dev/null +++ b/homeassistant/components/linkplay/icons.json @@ -0,0 +1,14 @@ +{ + "entity": { + "button": { + "timesync": { + "default": "mdi:clock" + } + } + }, + "services": { + "play_preset": { + "service": "mdi:play-box-outline" + } + } +} diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 5212f3f99b8..cc124ceb611 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -6,6 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/linkplay", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["python-linkplay==0.0.8"], + "loggers": ["linkplay"], + "requirements": ["python-linkplay==0.1.1"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 398add235bd..456fbf23289 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -2,13 +2,14 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine import logging -from typing import Any, Concatenate +from typing import Any from linkplay.bridge import LinkPlayBridge from linkplay.consts import EqualizerMode, LoopMode, PlayingMode, PlayingStatus -from linkplay.exceptions import LinkPlayException, LinkPlayRequestException +from linkplay.controller import LinkPlayController, LinkPlayMultiroom +from linkplay.exceptions import LinkPlayRequestException +import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( @@ -19,16 +20,22 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, RepeatMode, + async_process_play_media_url, ) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + entity_registry as er, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow -from . import LinkPlayConfigEntry -from .const import DOMAIN -from .utils import get_info_from_project +from . import LinkPlayConfigEntry, LinkPlayData +from .const import CONTROLLER_KEY, DOMAIN +from .entity import LinkPlayBaseEntity, exception_wrap _LOGGER = logging.getLogger(__name__) STATE_MAP: dict[PlayingStatus, MediaPlayerState] = { @@ -39,6 +46,7 @@ STATE_MAP: dict[PlayingStatus, MediaPlayerState] = { } SOURCE_MAP: dict[PlayingMode, str] = { + PlayingMode.NETWORK: "Wifi", PlayingMode.LINE_IN: "Line In", PlayingMode.BLUETOOTH: "Bluetooth", PlayingMode.OPTICAL: "Optical", @@ -48,6 +56,20 @@ SOURCE_MAP: dict[PlayingMode, str] = { PlayingMode.XLR: "XLR", PlayingMode.HDMI: "HDMI", PlayingMode.OPTICAL_2: "Optical 2", + PlayingMode.EXTERN_BLUETOOTH: "External Bluetooth", + PlayingMode.PHONO: "Phono", + PlayingMode.ARC: "ARC", + PlayingMode.COAXIAL_2: "Coaxial 2", + PlayingMode.TF_CARD_1: "SD Card 1", + PlayingMode.TF_CARD_2: "SD Card 2", + PlayingMode.CD: "CD", + PlayingMode.DAB: "DAB Radio", + PlayingMode.FM: "FM Radio", + PlayingMode.RCA: "RCA", + PlayingMode.UDISK: "USB", + PlayingMode.SPOTIFY: "Spotify", + PlayingMode.TIDAL: "Tidal", + PlayingMode.FOLLOWER: "Follower", } SOURCE_MAP_INV: dict[str, PlayingMode] = {v: k for k, v in SOURCE_MAP.items()} @@ -94,6 +116,15 @@ SEEKABLE_FEATURES: MediaPlayerEntityFeature = ( | MediaPlayerEntityFeature.SEEK ) +SERVICE_PLAY_PRESET = "play_preset" +ATTR_PRESET_NUMBER = "preset_number" + +SERVICE_PLAY_PRESET_SCHEMA = cv.make_entity_service_schema( + { + vol.Required(ATTR_PRESET_NUMBER): cv.positive_int, + } +) + async def async_setup_entry( hass: HomeAssistant, @@ -102,65 +133,42 @@ async def async_setup_entry( ) -> None: """Set up a media player from a config entry.""" + # register services + platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( + SERVICE_PLAY_PRESET, SERVICE_PLAY_PRESET_SCHEMA, "async_play_preset" + ) + + # add entities async_add_entities([LinkPlayMediaPlayerEntity(entry.runtime_data.bridge)]) -def exception_wrap[_LinkPlayEntityT: LinkPlayMediaPlayerEntity, **_P, _R]( - func: Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]], -) -> Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]]: - """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" - - async def _wrap(self: _LinkPlayEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: - try: - return await func(self, *args, **kwargs) - except LinkPlayRequestException as err: - raise HomeAssistantError( - f"Exception occurred when communicating with API {func}: {err}" - ) from err - - return _wrap - - -class LinkPlayMediaPlayerEntity(MediaPlayerEntity): +class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): """Representation of a LinkPlay media player.""" _attr_sound_mode_list = list(EQUALIZER_MAP.values()) _attr_device_class = MediaPlayerDeviceClass.RECEIVER _attr_media_content_type = MediaType.MUSIC - _attr_has_entity_name = True _attr_name = None def __init__(self, bridge: LinkPlayBridge) -> None: """Initialize the LinkPlay media player.""" - self._bridge = bridge + super().__init__(bridge) self._attr_unique_id = bridge.device.uuid self._attr_source_list = [ SOURCE_MAP[playing_mode] for playing_mode in bridge.device.playmode_support ] - manufacturer, model = get_info_from_project(bridge.device.properties["project"]) - self._attr_device_info = dr.DeviceInfo( - configuration_url=bridge.endpoint, - connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])}, - hw_version=bridge.device.properties["hardware"], - identifiers={(DOMAIN, bridge.device.uuid)}, - manufacturer=manufacturer, - model=model, - name=bridge.device.name, - sw_version=bridge.device.properties["firmware"], - ) - @exception_wrap async def async_update(self) -> None: """Update the state of the media player.""" try: await self._bridge.player.update_status() self._update_properties() - except LinkPlayException: + except LinkPlayRequestException: self._attr_available = False - raise @exception_wrap async def async_select_source(self, source: str) -> None: @@ -195,6 +203,21 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): """Send play command.""" await self._bridge.player.resume() + @exception_wrap + async def async_media_stop(self) -> None: + """Send stop command.""" + await self._bridge.player.stop() + + @exception_wrap + async def async_media_next_track(self) -> None: + """Send next command.""" + await self._bridge.player.next() + + @exception_wrap + async def async_media_previous_track(self) -> None: + """Send previous command.""" + await self._bridge.player.previous() + @exception_wrap async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set repeat mode.""" @@ -222,10 +245,94 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Play a piece of media.""" - media = await media_source.async_resolve_media( - self.hass, media_id, self.entity_id + if media_source.is_media_source_id(media_id): + play_item = await media_source.async_resolve_media( + self.hass, media_id, self.entity_id + ) + media_id = play_item.url + + url = async_process_play_media_url(self.hass, media_id) + await self._bridge.player.play(url) + + @exception_wrap + async def async_play_preset(self, preset_number: int) -> None: + """Play preset number.""" + try: + await self._bridge.player.play_preset(preset_number) + except ValueError as err: + raise HomeAssistantError(err) from err + + @exception_wrap + async def async_media_seek(self, position: float) -> None: + """Seek to a position.""" + await self._bridge.player.seek(round(position)) + + @exception_wrap + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + + controller: LinkPlayController = self.hass.data[DOMAIN][CONTROLLER_KEY] + multiroom = self._bridge.multiroom + if multiroom is None: + multiroom = LinkPlayMultiroom(self._bridge) + + for group_member in group_members: + bridge = self._get_linkplay_bridge(group_member) + if bridge: + await multiroom.add_follower(bridge) + + await controller.discover_multirooms() + + def _get_linkplay_bridge(self, entity_id: str) -> LinkPlayBridge: + """Get linkplay bridge from entity_id.""" + + entity_registry = er.async_get(self.hass) + + # Check for valid linkplay media_player entity + entity_entry = entity_registry.async_get(entity_id) + + if ( + entity_entry is None + or entity_entry.domain != Platform.MEDIA_PLAYER + or entity_entry.platform != DOMAIN + or entity_entry.config_entry_id is None + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_grouping_entity", + translation_placeholders={"entity_id": entity_id}, + ) + + config_entry = self.hass.config_entries.async_get_entry( + entity_entry.config_entry_id ) - await self._bridge.player.play(media.url) + assert config_entry + + # Return bridge + data: LinkPlayData = config_entry.runtime_data + return data.bridge + + @property + def group_members(self) -> list[str]: + """List of players which are grouped together.""" + multiroom = self._bridge.multiroom + if multiroom is not None: + return [multiroom.leader.device.uuid] + [ + follower.device.uuid for follower in multiroom.followers + ] + + return [] + + @exception_wrap + async def async_unjoin_player(self) -> None: + """Remove this player from any group.""" + controller: LinkPlayController = self.hass.data[DOMAIN][CONTROLLER_KEY] + + multiroom = self._bridge.multiroom + if multiroom is not None: + await multiroom.remove_follower(self._bridge) + + await controller.discover_multirooms() def _update_properties(self) -> None: """Update the properties of the media player.""" @@ -245,9 +352,9 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): ) self._attr_source = SOURCE_MAP.get(self._bridge.player.play_mode, "other") - self._attr_media_position = self._bridge.player.current_position / 1000 + self._attr_media_position = self._bridge.player.current_position_in_seconds self._attr_media_position_updated_at = utcnow() - self._attr_media_duration = self._bridge.player.total_length / 1000 + self._attr_media_duration = self._bridge.player.total_length_in_seconds self._attr_media_artist = self._bridge.player.artist self._attr_media_title = self._bridge.player.title self._attr_media_album_name = self._bridge.player.album diff --git a/homeassistant/components/linkplay/services.yaml b/homeassistant/components/linkplay/services.yaml new file mode 100644 index 00000000000..0d7335a28c8 --- /dev/null +++ b/homeassistant/components/linkplay/services.yaml @@ -0,0 +1,14 @@ +play_preset: + target: + entity: + integration: linkplay + domain: media_player + fields: + preset_number: + example: 1 + required: true + default: 1 + selector: + number: + min: 1 + mode: box diff --git a/homeassistant/components/linkplay/strings.json b/homeassistant/components/linkplay/strings.json index 46f5b29059f..31b4649e131 100644 --- a/homeassistant/components/linkplay/strings.json +++ b/homeassistant/components/linkplay/strings.json @@ -22,5 +22,29 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "unknown": "[%key:common::config_flow::error::unknown%]" } + }, + "services": { + "play_preset": { + "name": "Play preset", + "description": "Play the preset number on the device.", + "fields": { + "preset_number": { + "name": "Preset number", + "description": "The preset number on the device to play." + } + } + } + }, + "entity": { + "button": { + "timesync": { + "name": "Sync time" + } + } + }, + "exceptions": { + "invalid_grouping_entity": { + "message": "Entity with id {entity_id} can't be added to the LinkPlay multiroom. Is the entity a LinkPlay mediaplayer?" + } } } diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 7532c9b354a..00bb691362b 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -2,45 +2,95 @@ from typing import Final +from aiohttp import ClientSession +from linkplay.utils import async_create_unverified_client_session + +from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import Event, HomeAssistant, callback + +from .const import DATA_SESSION, DOMAIN + MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" MANUFACTURER_ARYLIC: Final[str] = "Arylic" MANUFACTURER_IEAST: Final[str] = "iEAST" +MANUFACTURER_WIIM: Final[str] = "WiiM" +MANUFACTURER_GGMM: Final[str] = "GGMM" +MANUFACTURER_MEDION: Final[str] = "Medion" MANUFACTURER_GENERIC: Final[str] = "Generic" MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP" MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde" MODELS_ARYLIC_S50: Final[str] = "S50+" MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro" MODELS_ARYLIC_A30: Final[str] = "A30" +MODELS_ARYLIC_A50: Final[str] = "A50" MODELS_ARYLIC_A50S: Final[str] = "A50+" +MODELS_ARYLIC_UP2STREAM_AMP: Final[str] = "Up2Stream Amp 2.0" MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3" MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4" +MODELS_ARYLIC_UP2STREAM_PRO: Final[str] = "Up2Stream Pro v1" MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3" +MODELS_ARYLIC_UP2STREAM_PLATE_AMP: Final[str] = "Up2Stream Plate Amp" MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5" +MODELS_WIIM_AMP: Final[str] = "WiiM Amp" +MODELS_WIIM_MINI: Final[str] = "WiiM Mini" +MODELS_GGMM_GGMM_E2: Final[str] = "GGMM E2" +MODELS_MEDION_MD_43970: Final[str] = "Life P66970 (MD 43970)" MODELS_GENERIC: Final[str] = "Generic" +PROJECTID_LOOKUP: Final[dict[str, tuple[str, str]]] = { + "SMART_ZONE4_AMP": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4), + "SMART_HYDE": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE), + "ARYLIC_S50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50), + "RP0016_S50PRO_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO), + "RP0011_WB60_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30), + "X-50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50), + "ARYLIC_A50S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S), + "RP0011_WB60": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP), + "UP2STREAM_AMP_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3), + "UP2STREAM_AMP_V4": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4), + "UP2STREAM_PRO_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3), + "ARYLIC_V20": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PLATE_AMP), + "UP2STREAM_MINI_V3": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "UP2STREAM_AMP_2P1": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0014_A50C_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_A30": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_SUBWOOFER": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_S50A": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0010_D5_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0001": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0013_WA31S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0010_D5": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0013_WA31S_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0014_A50D_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_A50TE": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_A50N": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "iEAST-02": (MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5), + "WiiM_Amp_4layer": (MANUFACTURER_WIIM, MODELS_WIIM_AMP), + "Muzo_Mini": (MANUFACTURER_WIIM, MODELS_WIIM_MINI), + "GGMM_E2A": (MANUFACTURER_GGMM, MODELS_GGMM_GGMM_E2), + "A16": (MANUFACTURER_MEDION, MODELS_MEDION_MD_43970), +} + def get_info_from_project(project: str) -> tuple[str, str]: """Get manufacturer and model info based on given project.""" - match project: - case "SMART_ZONE4_AMP": - return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4 - case "SMART_HYDE": - return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE - case "ARYLIC_S50": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50 - case "RP0016_S50PRO_S": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO - case "RP0011_WB60_S": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30 - case "ARYLIC_A50S": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S - case "UP2STREAM_AMP_V3": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3 - case "UP2STREAM_AMP_V4": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4 - case "UP2STREAM_PRO_V3": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3 - case "iEAST-02": - return MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5 - case _: - return MANUFACTURER_GENERIC, MODELS_GENERIC + return PROJECTID_LOOKUP.get(project, (MANUFACTURER_GENERIC, MODELS_GENERIC)) + + +async def async_get_client_session(hass: HomeAssistant) -> ClientSession: + """Get a ClientSession that can be used with LinkPlay devices.""" + hass.data.setdefault(DOMAIN, {}) + if DATA_SESSION not in hass.data[DOMAIN]: + clientsession: ClientSession = await async_create_unverified_client_session() + + @callback + def _async_close_websession(event: Event) -> None: + """Close websession.""" + clientsession.detach() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_websession) + hass.data[DOMAIN][DATA_SESSION] = clientsession + return clientsession + + session: ClientSession = hass.data[DOMAIN][DATA_SESSION] + return session diff --git a/homeassistant/components/linksys_smart/device_tracker.py b/homeassistant/components/linksys_smart/device_tracker.py index 45ae1d328dd..596b7012140 100644 --- a/homeassistant/components/linksys_smart/device_tracker.py +++ b/homeassistant/components/linksys_smart/device_tracker.py @@ -9,7 +9,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -32,7 +32,7 @@ def get_scanner( ) -> LinksysSmartWifiDeviceScanner | None: """Validate the configuration and return a Linksys AP scanner.""" try: - return LinksysSmartWifiDeviceScanner(config[DOMAIN]) + return LinksysSmartWifiDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) except ConnectionError: return None @@ -62,7 +62,7 @@ class LinksysSmartWifiDeviceScanner(DeviceScanner): def _update_info(self): """Check for connected devices.""" - _LOGGER.info("Checking Linksys Smart Wifi") + _LOGGER.debug("Checking Linksys Smart Wifi") self.last_results = {} response = self._make_request() diff --git a/homeassistant/components/linksys_smart/manifest.json b/homeassistant/components/linksys_smart/manifest.json index 6200da5866d..4f099f81277 100644 --- a/homeassistant/components/linksys_smart/manifest.json +++ b/homeassistant/components/linksys_smart/manifest.json @@ -3,5 +3,6 @@ "name": "Linksys Smart Wi-Fi", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/linksys_smart", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/linode/__init__.py b/homeassistant/components/linode/__init__.py index 2ed3cf244d0..80c082344e7 100644 --- a/homeassistant/components/linode/__init__.py +++ b/homeassistant/components/linode/__init__.py @@ -45,7 +45,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: _linode = Linode(access_token) try: - _LOGGER.info("Linode Profile %s", _linode.manager.get_profile().username) + _LOGGER.debug("Linode Profile %s", _linode.manager.get_profile().username) except linode.errors.ApiError as _ex: _LOGGER.error(_ex) return False diff --git a/homeassistant/components/linode/manifest.json b/homeassistant/components/linode/manifest.json index bedd6c2d172..975747de86d 100644 --- a/homeassistant/components/linode/manifest.json +++ b/homeassistant/components/linode/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/linode", "iot_class": "cloud_polling", "loggers": ["linode"], + "quality_scale": "legacy", "requirements": ["linode-api==4.1.9b1"] } diff --git a/homeassistant/components/linux_battery/manifest.json b/homeassistant/components/linux_battery/manifest.json index 12b49c18aee..39bd331e3a4 100644 --- a/homeassistant/components/linux_battery/manifest.json +++ b/homeassistant/components/linux_battery/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/linux_battery", "iot_class": "local_polling", "loggers": ["batinfo"], + "quality_scale": "legacy", "requirements": ["batinfo==0.4.2"] } diff --git a/homeassistant/components/lirc/__init__.py b/homeassistant/components/lirc/__init__.py index b847a160f51..f5b26743a03 100644 --- a/homeassistant/components/lirc/__init__.py +++ b/homeassistant/components/lirc/__init__.py @@ -71,7 +71,7 @@ class LircInterface(threading.Thread): # interpret result from python-lirc if code: code = code[0] - _LOGGER.info("Got new LIRC code %s", code) + _LOGGER.debug("Got new LIRC code %s", code) self.hass.bus.fire(EVENT_IR_COMMAND_RECEIVED, {BUTTON_NAME: code}) else: time.sleep(0.2) diff --git a/homeassistant/components/lirc/manifest.json b/homeassistant/components/lirc/manifest.json index 3cc5d453721..64dbee06390 100644 --- a/homeassistant/components/lirc/manifest.json +++ b/homeassistant/components/lirc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/lirc", "iot_class": "local_push", "loggers": ["lirc"], + "quality_scale": "legacy", "requirements": ["python-lirc==1.2.3"] } diff --git a/homeassistant/components/litejet/__init__.py b/homeassistant/components/litejet/__init__.py index e9d1cca74cb..84667d6c94d 100644 --- a/homeassistant/components/litejet/__init__.py +++ b/homeassistant/components/litejet/__init__.py @@ -25,7 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: def handle_connected_changed(connected: bool, reason: str) -> None: if connected: - _LOGGER.info("Connected") + _LOGGER.debug("Connected") else: _LOGGER.warning("Disconnected %s", reason) diff --git a/homeassistant/components/litejet/config_flow.py b/homeassistant/components/litejet/config_flow.py index 19ddf0122c4..9aa0b19c506 100644 --- a/homeassistant/components/litejet/config_flow.py +++ b/homeassistant/components/litejet/config_flow.py @@ -24,10 +24,6 @@ from .const import CONF_DEFAULT_TRANSITION, DOMAIN class LiteJetOptionsFlow(OptionsFlow): """Handle LiteJet options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize LiteJet options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -57,9 +53,6 @@ class LiteJetConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Create a LiteJet config entry based upon user input.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: port = user_input[CONF_PORT] @@ -87,4 +80,4 @@ class LiteJetConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> LiteJetOptionsFlow: """Get the options flow for this handler.""" - return LiteJetOptionsFlow(config_entry) + return LiteJetOptionsFlow() diff --git a/homeassistant/components/litejet/manifest.json b/homeassistant/components/litejet/manifest.json index 65dde31436d..cd2e5fda11a 100644 --- a/homeassistant/components/litejet/manifest.json +++ b/homeassistant/components/litejet/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pylitejet"], - "quality_scale": "platinum", - "requirements": ["pylitejet==0.6.2"] + "requirements": ["pylitejet==0.6.3"], + "single_config_entry": true } diff --git a/homeassistant/components/litejet/strings.json b/homeassistant/components/litejet/strings.json index 398f1a1e5aa..c55df54c931 100644 --- a/homeassistant/components/litejet/strings.json +++ b/homeassistant/components/litejet/strings.json @@ -9,9 +9,6 @@ } } }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - }, "error": { "open_failed": "Cannot open the specified serial port." } diff --git a/homeassistant/components/litterrobot/config_flow.py b/homeassistant/components/litterrobot/config_flow.py index 633c6a5a5a2..90f1fcba56d 100644 --- a/homeassistant/components/litterrobot/config_flow.py +++ b/homeassistant/components/litterrobot/config_flow.py @@ -43,16 +43,11 @@ class LitterRobotConfigFlow(ConfigFlow, domain=DOMAIN): """Handle user's reauth credentials.""" errors = {} if user_input: - entry_id = self.context["entry_id"] - if entry := self.hass.config_entries.async_get_entry(entry_id): - user_input = user_input | {CONF_USERNAME: self.username} - if not (error := await self._async_validate_input(user_input)): - self.hass.config_entries.async_update_entry( - entry, - data=entry.data | user_input, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + user_input = user_input | {CONF_USERNAME: self.username} + if not (error := await self._async_validate_input(user_input)): + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) errors["base"] = error return self.async_show_form( diff --git a/homeassistant/components/litterrobot/icons.json b/homeassistant/components/litterrobot/icons.json index 333f309e9e8..482031f8424 100644 --- a/homeassistant/components/litterrobot/icons.json +++ b/homeassistant/components/litterrobot/icons.json @@ -40,6 +40,8 @@ } }, "services": { - "set_sleep_mode": "mdi:sleep" + "set_sleep_mode": { + "service": "mdi:sleep" + } } } diff --git a/homeassistant/components/litterrobot/vacuum.py b/homeassistant/components/litterrobot/vacuum.py index a1ed2ea600d..bd00c328233 100644 --- a/homeassistant/components/litterrobot/vacuum.py +++ b/homeassistant/components/litterrobot/vacuum.py @@ -10,15 +10,11 @@ from pylitterbot.enums import LitterBoxStatus import voluptuous as vol from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_PAUSED, StateVacuumEntity, StateVacuumEntityDescription, + VacuumActivity, VacuumEntityFeature, ) -from homeassistant.const import STATE_OFF from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,16 +26,16 @@ from .entity import LitterRobotEntity SERVICE_SET_SLEEP_MODE = "set_sleep_mode" LITTER_BOX_STATUS_STATE_MAP = { - LitterBoxStatus.CLEAN_CYCLE: STATE_CLEANING, - LitterBoxStatus.EMPTY_CYCLE: STATE_CLEANING, - LitterBoxStatus.CLEAN_CYCLE_COMPLETE: STATE_DOCKED, - LitterBoxStatus.CAT_DETECTED: STATE_DOCKED, - LitterBoxStatus.CAT_SENSOR_TIMING: STATE_DOCKED, - LitterBoxStatus.DRAWER_FULL_1: STATE_DOCKED, - LitterBoxStatus.DRAWER_FULL_2: STATE_DOCKED, - LitterBoxStatus.READY: STATE_DOCKED, - LitterBoxStatus.CAT_SENSOR_INTERRUPTED: STATE_PAUSED, - LitterBoxStatus.OFF: STATE_OFF, + LitterBoxStatus.CLEAN_CYCLE: VacuumActivity.CLEANING, + LitterBoxStatus.EMPTY_CYCLE: VacuumActivity.CLEANING, + LitterBoxStatus.CLEAN_CYCLE_COMPLETE: VacuumActivity.DOCKED, + LitterBoxStatus.CAT_DETECTED: VacuumActivity.DOCKED, + LitterBoxStatus.CAT_SENSOR_TIMING: VacuumActivity.DOCKED, + LitterBoxStatus.DRAWER_FULL_1: VacuumActivity.DOCKED, + LitterBoxStatus.DRAWER_FULL_2: VacuumActivity.DOCKED, + LitterBoxStatus.READY: VacuumActivity.DOCKED, + LitterBoxStatus.CAT_SENSOR_INTERRUPTED: VacuumActivity.PAUSED, + LitterBoxStatus.OFF: VacuumActivity.DOCKED, } LITTER_BOX_ENTITY = StateVacuumEntityDescription( @@ -79,9 +75,9 @@ class LitterRobotCleaner(LitterRobotEntity[LitterRobot], StateVacuumEntity): ) @property - def state(self) -> str: + def activity(self) -> VacuumActivity: """Return the state of the cleaner.""" - return LITTER_BOX_STATUS_STATE_MAP.get(self.robot.status, STATE_ERROR) + return LITTER_BOX_STATUS_STATE_MAP.get(self.robot.status, VacuumActivity.ERROR) @property def status(self) -> str: diff --git a/homeassistant/components/livisi/__init__.py b/homeassistant/components/livisi/__init__.py index 26e36e68efa..fc9e381a1c3 100644 --- a/homeassistant/components/livisi/__init__.py +++ b/homeassistant/components/livisi/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Final from aiohttp import ClientConnectorError -from aiolivisi import AioLivisi +from livisi.aiolivisi import AioLivisi from homeassistant import core from homeassistant.config_entries import ConfigEntry diff --git a/homeassistant/components/livisi/climate.py b/homeassistant/components/livisi/climate.py index 56fe63d351f..3ecdcb486c0 100644 --- a/homeassistant/components/livisi/climate.py +++ b/homeassistant/components/livisi/climate.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from aiolivisi.const import CAPABILITY_CONFIG +from livisi.const import CAPABILITY_CONFIG from homeassistant.components.climate import ( ClimateEntity, @@ -68,7 +68,6 @@ class LivisiClimate(LivisiEntity, ClimateEntity): _attr_hvac_mode = HVACMode.HEAT _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/livisi/config_flow.py b/homeassistant/components/livisi/config_flow.py index 7317aec0abc..ce14c0e44e9 100644 --- a/homeassistant/components/livisi/config_flow.py +++ b/homeassistant/components/livisi/config_flow.py @@ -6,7 +6,8 @@ from contextlib import suppress from typing import Any from aiohttp import ClientConnectorError -from aiolivisi import AioLivisi, errors as livisi_errors +from livisi import errors as livisi_errors +from livisi.aiolivisi import AioLivisi import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult diff --git a/homeassistant/components/livisi/coordinator.py b/homeassistant/components/livisi/coordinator.py index 7cb5757310f..b8b282c2829 100644 --- a/homeassistant/components/livisi/coordinator.py +++ b/homeassistant/components/livisi/coordinator.py @@ -6,8 +6,9 @@ from datetime import timedelta from typing import Any from aiohttp import ClientConnectorError -from aiolivisi import AioLivisi, LivisiEvent, Websocket -from aiolivisi.errors import TokenExpiredException +from livisi import LivisiEvent, Websocket +from livisi.aiolivisi import AioLivisi +from livisi.errors import TokenExpiredException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD diff --git a/homeassistant/components/livisi/entity.py b/homeassistant/components/livisi/entity.py index 3160b8f288a..af588b0e360 100644 --- a/homeassistant/components/livisi/entity.py +++ b/homeassistant/components/livisi/entity.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any -from aiolivisi.const import CAPABILITY_MAP +from livisi.const import CAPABILITY_MAP from homeassistant.config_entries import ConfigEntry from homeassistant.core import callback diff --git a/homeassistant/components/livisi/manifest.json b/homeassistant/components/livisi/manifest.json index e6f46324ed8..1077cacf2c4 100644 --- a/homeassistant/components/livisi/manifest.json +++ b/homeassistant/components/livisi/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/livisi", "iot_class": "local_polling", - "requirements": ["aiolivisi==0.0.19"] + "requirements": ["livisi==0.0.24"] } diff --git a/homeassistant/components/llamalab_automate/manifest.json b/homeassistant/components/llamalab_automate/manifest.json index 861b919f24b..4343d617e93 100644 --- a/homeassistant/components/llamalab_automate/manifest.json +++ b/homeassistant/components/llamalab_automate/manifest.json @@ -3,5 +3,6 @@ "name": "LlamaLab Automate", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/llamalab_automate", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/local_calendar/__init__.py b/homeassistant/components/local_calendar/__init__.py index 2be5133a21c..baebeba4f26 100644 --- a/homeassistant/components/local_calendar/__init__.py +++ b/homeassistant/components/local_calendar/__init__.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.util import slugify -from .const import CONF_CALENDAR_NAME, CONF_STORAGE_KEY, DOMAIN +from .const import CONF_CALENDAR_NAME, CONF_STORAGE_KEY, DOMAIN, STORAGE_PATH from .store import LocalCalendarStore _LOGGER = logging.getLogger(__name__) @@ -19,8 +19,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.CALENDAR] -STORAGE_PATH = ".storage/local_calendar.{key}.ics" - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Local Calendar from a config entry.""" diff --git a/homeassistant/components/local_calendar/calendar.py b/homeassistant/components/local_calendar/calendar.py index 66b3f80c19c..eb7b0c20d91 100644 --- a/homeassistant/components/local_calendar/calendar.py +++ b/homeassistant/components/local_calendar/calendar.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from datetime import date, datetime, timedelta import logging from typing import Any @@ -74,6 +75,7 @@ class LocalCalendarEntity(CalendarEntity): """Initialize LocalCalendarEntity.""" self._store = store self._calendar = calendar + self._calendar_lock = asyncio.Lock() self._event: CalendarEvent | None = None self._attr_name = name self._attr_unique_id = unique_id @@ -110,8 +112,10 @@ class LocalCalendarEntity(CalendarEntity): async def async_create_event(self, **kwargs: Any) -> None: """Add a new event to calendar.""" event = _parse_event(kwargs) - EventStore(self._calendar).add(event) - await self._async_store() + async with self._calendar_lock: + event_store = EventStore(self._calendar) + await self.hass.async_add_executor_job(event_store.add, event) + await self._async_store() await self.async_update_ha_state(force_refresh=True) async def async_delete_event( @@ -124,15 +128,16 @@ class LocalCalendarEntity(CalendarEntity): range_value: Range = Range.NONE if recurrence_range == Range.THIS_AND_FUTURE: range_value = Range.THIS_AND_FUTURE - try: - EventStore(self._calendar).delete( - uid, - recurrence_id=recurrence_id, - recurrence_range=range_value, - ) - except EventStoreError as err: - raise HomeAssistantError(f"Error while deleting event: {err}") from err - await self._async_store() + async with self._calendar_lock: + try: + EventStore(self._calendar).delete( + uid, + recurrence_id=recurrence_id, + recurrence_range=range_value, + ) + except EventStoreError as err: + raise HomeAssistantError(f"Error while deleting event: {err}") from err + await self._async_store() await self.async_update_ha_state(force_refresh=True) async def async_update_event( @@ -147,16 +152,23 @@ class LocalCalendarEntity(CalendarEntity): range_value: Range = Range.NONE if recurrence_range == Range.THIS_AND_FUTURE: range_value = Range.THIS_AND_FUTURE - try: - EventStore(self._calendar).edit( - uid, - new_event, - recurrence_id=recurrence_id, - recurrence_range=range_value, - ) - except EventStoreError as err: - raise HomeAssistantError(f"Error while updating event: {err}") from err - await self._async_store() + + async with self._calendar_lock: + event_store = EventStore(self._calendar) + + def apply_edit() -> None: + event_store.edit( + uid, + new_event, + recurrence_id=recurrence_id, + recurrence_range=range_value, + ) + + try: + await self.hass.async_add_executor_job(apply_edit) + except EventStoreError as err: + raise HomeAssistantError(f"Error while updating event: {err}") from err + await self._async_store() await self.async_update_ha_state(force_refresh=True) diff --git a/homeassistant/components/local_calendar/config_flow.py b/homeassistant/components/local_calendar/config_flow.py index 8caa3a5d528..fef45f786f9 100644 --- a/homeassistant/components/local_calendar/config_flow.py +++ b/homeassistant/components/local_calendar/config_flow.py @@ -2,18 +2,55 @@ from __future__ import annotations +import logging +from pathlib import Path +import shutil from typing import Any +from ical.calendar_stream import CalendarStream +from ical.exceptions import CalendarParseError import voluptuous as vol +from homeassistant.components.file_upload import process_uploaded_file from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import selector from homeassistant.util import slugify -from .const import CONF_CALENDAR_NAME, CONF_STORAGE_KEY, DOMAIN +from .const import ( + ATTR_CREATE_EMPTY, + ATTR_IMPORT_ICS_FILE, + CONF_CALENDAR_NAME, + CONF_ICS_FILE, + CONF_IMPORT, + CONF_STORAGE_KEY, + DOMAIN, + STORAGE_PATH, +) + +_LOGGER = logging.getLogger(__name__) STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_CALENDAR_NAME): str, + vol.Optional(CONF_IMPORT, default=ATTR_CREATE_EMPTY): selector.SelectSelector( + selector.SelectSelectorConfig( + options=[ + ATTR_CREATE_EMPTY, + ATTR_IMPORT_ICS_FILE, + ], + translation_key=CONF_IMPORT, + ) + ), + } +) + +STEP_IMPORT_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_ICS_FILE): selector.FileSelector( + config=selector.FileSelectorConfig(accept=".ics") + ), } ) @@ -23,6 +60,10 @@ class LocalCalendarConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + def __init__(self) -> None: + """Initialize the config flow.""" + self.data: dict[str, Any] = {} + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -35,6 +76,52 @@ class LocalCalendarConfigFlow(ConfigFlow, domain=DOMAIN): key = slugify(user_input[CONF_CALENDAR_NAME]) self._async_abort_entries_match({CONF_STORAGE_KEY: key}) user_input[CONF_STORAGE_KEY] = key + if user_input.get(CONF_IMPORT) == ATTR_IMPORT_ICS_FILE: + self.data = user_input + return await self.async_step_import_ics_file() return self.async_create_entry( - title=user_input[CONF_CALENDAR_NAME], data=user_input + title=user_input[CONF_CALENDAR_NAME], + data=user_input, ) + + async def async_step_import_ics_file( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle optional iCal (.ics) import.""" + errors = {} + if user_input is not None: + try: + await self.hass.async_add_executor_job( + save_uploaded_ics_file, + self.hass, + user_input[CONF_ICS_FILE], + self.data[CONF_STORAGE_KEY], + ) + except HomeAssistantError as err: + _LOGGER.debug("Error saving uploaded file: %s", err) + errors[CONF_ICS_FILE] = "invalid_ics_file" + else: + return self.async_create_entry( + title=self.data[CONF_CALENDAR_NAME], data=self.data + ) + + return self.async_show_form( + step_id="import_ics_file", + data_schema=STEP_IMPORT_DATA_SCHEMA, + errors=errors, + ) + + +def save_uploaded_ics_file( + hass: HomeAssistant, uploaded_file_id: str, storage_key: str +): + """Validate the uploaded file and move it to the storage directory.""" + + with process_uploaded_file(hass, uploaded_file_id) as file: + ics = file.read_text(encoding="utf8") + try: + CalendarStream.from_ics(ics) + except CalendarParseError as err: + raise HomeAssistantError("Failed to upload file: Invalid ICS file") from err + dest_path = Path(hass.config.path(STORAGE_PATH.format(key=storage_key))) + shutil.move(file, dest_path) diff --git a/homeassistant/components/local_calendar/const.py b/homeassistant/components/local_calendar/const.py index 1cfa774ab0a..cbbd6c9308f 100644 --- a/homeassistant/components/local_calendar/const.py +++ b/homeassistant/components/local_calendar/const.py @@ -3,4 +3,11 @@ DOMAIN = "local_calendar" CONF_CALENDAR_NAME = "calendar_name" +CONF_ICS_FILE = "ics_file" +CONF_IMPORT = "import" CONF_STORAGE_KEY = "storage_key" + +ATTR_CREATE_EMPTY = "create_empty" +ATTR_IMPORT_ICS_FILE = "import_ics_file" + +STORAGE_PATH = ".storage/local_calendar.{key}.ics" diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index 95c65089c79..27798d0456c 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -3,8 +3,9 @@ "name": "Local Calendar", "codeowners": ["@allenporter"], "config_flow": true, + "dependencies": ["file_upload"], "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==8.1.1"] + "requirements": ["ical==8.2.0"] } diff --git a/homeassistant/components/local_calendar/strings.json b/homeassistant/components/local_calendar/strings.json index c6eb36ee88f..2b61fc9ab3e 100644 --- a/homeassistant/components/local_calendar/strings.json +++ b/homeassistant/components/local_calendar/strings.json @@ -5,8 +5,26 @@ "user": { "description": "Please choose a name for your new calendar", "data": { - "calendar_name": "Calendar Name" + "calendar_name": "Calendar Name", + "import": "Starting Data" } + }, + "import": { + "description": "You can import events in iCal format (.ics file)." + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "error": { + "invalid_ics_file": "Invalid .ics file" + } + }, + "selector": { + "import": { + "options": { + "create_empty": "Create an empty calendar", + "import_ics_file": "Upload an iCalendar file (.ics)" } } } diff --git a/homeassistant/components/local_file/__init__.py b/homeassistant/components/local_file/__init__.py index 4ad752bbc54..70144cd0704 100644 --- a/homeassistant/components/local_file/__init__.py +++ b/homeassistant/components/local_file/__init__.py @@ -1 +1,37 @@ """The local_file component.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_FILE_PATH, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError + +from .const import DOMAIN +from .util import check_file_path_access + +PLATFORMS = [Platform.CAMERA] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Local file from a config entry.""" + file_path: str = entry.options[CONF_FILE_PATH] + if not await hass.async_add_executor_job(check_file_path_access, file_path): + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="not_readable_path", + translation_placeholders={"file_path": file_path}, + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Local file config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/local_file/camera.py b/homeassistant/components/local_file/camera.py index 1306751f1a9..db421bbce1d 100644 --- a/homeassistant/components/local_file/camera.py +++ b/homeassistant/components/local_file/camera.py @@ -4,7 +4,6 @@ from __future__ import annotations import logging import mimetypes -import os import voluptuous as vol @@ -12,13 +11,21 @@ from homeassistant.components.camera import ( PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, Camera, ) -from homeassistant.const import ATTR_ENTITY_ID, CONF_FILE_PATH, CONF_NAME -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + issue_registry as ir, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import slugify -from .const import DATA_LOCAL_FILE, DEFAULT_NAME, DOMAIN, SERVICE_UPDATE_FILE_PATH +from .const import DEFAULT_NAME, DOMAIN, SERVICE_UPDATE_FILE_PATH +from .util import check_file_path_access _LOGGER = logging.getLogger(__name__) @@ -29,57 +36,93 @@ PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( } ) -CAMERA_SERVICE_UPDATE_FILE_PATH = vol.Schema( - { - vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(CONF_FILE_PATH): cv.string, - } -) + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Camera for local file from a config entry.""" + + platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( + SERVICE_UPDATE_FILE_PATH, + { + vol.Required(CONF_FILE_PATH): cv.string, + }, + "update_file_path", + ) + + async_add_entities( + [ + LocalFile( + entry.options[CONF_NAME], + entry.options[CONF_FILE_PATH], + entry.entry_id, + ) + ] + ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Camera that works with local files.""" - if DATA_LOCAL_FILE not in hass.data: - hass.data[DATA_LOCAL_FILE] = [] + file_path: str = config[CONF_FILE_PATH] + file_path_slug = slugify(file_path) - file_path = config[CONF_FILE_PATH] - camera = LocalFile(config[CONF_NAME], file_path) - hass.data[DATA_LOCAL_FILE].append(camera) + if not await hass.async_add_executor_job(check_file_path_access, file_path): + ir.async_create_issue( + hass, + DOMAIN, + f"no_access_path_{file_path_slug}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + learn_more_url="https://www.home-assistant.io/integrations/local_file/", + severity=ir.IssueSeverity.WARNING, + translation_key="no_access_path", + translation_placeholders={ + "file_path": file_path_slug, + }, + ) + return - def update_file_path_service(call: ServiceCall) -> None: - """Update the file path.""" - file_path = call.data[CONF_FILE_PATH] - entity_ids = call.data[ATTR_ENTITY_ID] - cameras = hass.data[DATA_LOCAL_FILE] - - for camera in cameras: - if camera.entity_id in entity_ids: - camera.update_file_path(file_path) - - hass.services.register( - DOMAIN, - SERVICE_UPDATE_FILE_PATH, - update_file_path_service, - schema=CAMERA_SERVICE_UPDATE_FILE_PATH, + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + learn_more_url="https://www.home-assistant.io/integrations/local_file/", + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Local file", + }, ) - add_entities([camera]) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config, + ) + ) class LocalFile(Camera): """Representation of a local file camera.""" - def __init__(self, name, file_path): + def __init__(self, name: str, file_path: str, unique_id: str) -> None: """Initialize Local File Camera component.""" super().__init__() - - self._name = name - self.check_file_path_access(file_path) + self._attr_name = name + self._attr_unique_id = unique_id self._file_path = file_path # Set content type of local file content, _ = mimetypes.guess_type(file_path) @@ -96,30 +139,21 @@ class LocalFile(Camera): except FileNotFoundError: _LOGGER.warning( "Could not read camera %s image from file: %s", - self._name, + self.name, self._file_path, ) return None - def check_file_path_access(self, file_path): - """Check that filepath given is readable.""" - if not os.access(file_path, os.R_OK): - _LOGGER.warning( - "Could not read camera %s image from file: %s", self._name, file_path - ) - - def update_file_path(self, file_path): + async def update_file_path(self, file_path: str) -> None: """Update the file_path.""" - self.check_file_path_access(file_path) + if not await self.hass.async_add_executor_job( + check_file_path_access, file_path + ): + raise ServiceValidationError(f"Path {file_path} is not accessible") self._file_path = file_path self.schedule_update_ha_state() @property - def name(self): - """Return the name of this camera.""" - return self._name - - @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, str]: """Return the camera state attributes.""" return {"file_path": self._file_path} diff --git a/homeassistant/components/local_file/config_flow.py b/homeassistant/components/local_file/config_flow.py new file mode 100644 index 00000000000..36a41c03543 --- /dev/null +++ b/homeassistant/components/local_file/config_flow.py @@ -0,0 +1,77 @@ +"""Config flow for Local file.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, cast + +import voluptuous as vol + +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowError, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import TextSelector + +from .const import DEFAULT_NAME, DOMAIN +from .util import check_file_path_access + + +async def validate_options( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate options selected.""" + file_path: str = user_input[CONF_FILE_PATH] + if not await handler.parent_handler.hass.async_add_executor_job( + check_file_path_access, file_path + ): + raise SchemaFlowError("not_readable_path") + + handler.parent_handler._async_abort_entries_match( # noqa: SLF001 + {CONF_FILE_PATH: user_input[CONF_FILE_PATH]} + ) + + return user_input + + +DATA_SCHEMA_OPTIONS = vol.Schema( + { + vol.Required(CONF_FILE_PATH): TextSelector(), + } +) +DATA_SCHEMA_SETUP = vol.Schema( + { + vol.Optional(CONF_NAME, default=DEFAULT_NAME): TextSelector(), + } +).extend(DATA_SCHEMA_OPTIONS.schema) + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=DATA_SCHEMA_SETUP, + validate_user_input=validate_options, + ), + "import": SchemaFlowFormStep( + schema=DATA_SCHEMA_SETUP, + validate_user_input=validate_options, + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + DATA_SCHEMA_OPTIONS, + validate_user_input=validate_options, + ) +} + + +class LocalFileConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Local file.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return cast(str, options[CONF_NAME]) diff --git a/homeassistant/components/local_file/icons.json b/homeassistant/components/local_file/icons.json index c9c92fa86c8..7b0067c6a44 100644 --- a/homeassistant/components/local_file/icons.json +++ b/homeassistant/components/local_file/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_file_path": "mdi:cog" + "update_file_path": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/local_file/manifest.json b/homeassistant/components/local_file/manifest.json index 46268ff2a77..0e6e64d17e5 100644 --- a/homeassistant/components/local_file/manifest.json +++ b/homeassistant/components/local_file/manifest.json @@ -2,6 +2,7 @@ "domain": "local_file", "name": "Local File", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_file", "iot_class": "local_polling" } diff --git a/homeassistant/components/local_file/services.yaml b/homeassistant/components/local_file/services.yaml index 5fc0b11f4c2..1b3000e663e 100644 --- a/homeassistant/components/local_file/services.yaml +++ b/homeassistant/components/local_file/services.yaml @@ -1,10 +1,9 @@ update_file_path: + target: + entity: + integration: local_file + domain: camera fields: - entity_id: - required: true - selector: - entity: - domain: camera file_path: required: true example: "/config/www/images/image.jpg" diff --git a/homeassistant/components/local_file/strings.json b/homeassistant/components/local_file/strings.json index 0db5d709c69..abf31a6f94e 100644 --- a/homeassistant/components/local_file/strings.json +++ b/homeassistant/components/local_file/strings.json @@ -1,18 +1,63 @@ { + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "error": { + "not_readable_path": "The provided path to the file can not be read" + }, + "step": { + "user": { + "data": { + "name": "[%key:common::config_flow::data::name%]", + "file_path": "File path" + }, + "data_description": { + "name": "Name for the created entity.", + "file_path": "The full path to the image file to be displayed. Be sure the path of the file is in the allowed paths, you can read more about this in the documentation." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "error": { + "not_readable_path": "[%key:component::local_file::config::error::not_readable_path%]" + }, + "step": { + "init": { + "data": { + "file_path": "[%key:component::local_file::config::step::user::data::file_path%]" + }, + "data_description": { + "file_path": "[%key:component::local_file::config::step::user::data_description::file_path%]" + } + } + } + }, "services": { "update_file_path": { "name": "Updates file path", "description": "Use this action to change the file displayed by the camera.", "fields": { - "entity_id": { - "name": "Entity", - "description": "Name of the entity_id of the camera to update." - }, "file_path": { "name": "File path", - "description": "The full path to the new image file to be displayed." + "description": "[%key:component::local_file::config::step::user::data_description::file_path%]" } } } + }, + "exceptions": { + "file_path_not_accessible": { + "message": "Path {file_path} is not accessible" + } + }, + "issues": { + "no_access_path": { + "title": "Incorrect file path", + "description": "While trying to import your configuration the provided file path {file_path} could not be read.\nPlease update your configuration to a correct file path and restart to fix this issue." + } } } diff --git a/homeassistant/components/local_file/util.py b/homeassistant/components/local_file/util.py new file mode 100644 index 00000000000..9e25bb88678 --- /dev/null +++ b/homeassistant/components/local_file/util.py @@ -0,0 +1,10 @@ +"""Utils for local file.""" + +import os + + +def check_file_path_access(file_path: str) -> bool: + """Check that filepath given is readable.""" + if not os.access(file_path, os.R_OK): + return False + return True diff --git a/homeassistant/components/local_ip/config_flow.py b/homeassistant/components/local_ip/config_flow.py index 3a4612d84aa..6bf9f865489 100644 --- a/homeassistant/components/local_ip/config_flow.py +++ b/homeassistant/components/local_ip/config_flow.py @@ -16,9 +16,6 @@ class SimpleConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form(step_id="user") diff --git a/homeassistant/components/local_ip/manifest.json b/homeassistant/components/local_ip/manifest.json index 11d86ea0230..6a68ed59628 100644 --- a/homeassistant/components/local_ip/manifest.json +++ b/homeassistant/components/local_ip/manifest.json @@ -5,5 +5,6 @@ "config_flow": true, "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/local_ip", - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true } diff --git a/homeassistant/components/local_ip/strings.json b/homeassistant/components/local_ip/strings.json index a4d9138d88e..7f7508aa9b3 100644 --- a/homeassistant/components/local_ip/strings.json +++ b/homeassistant/components/local_ip/strings.json @@ -6,9 +6,6 @@ "title": "[%key:component::local_ip::title%]", "description": "[%key:common::config_flow::description::confirm_setup%]" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 313315a34f6..c126799c39d 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==8.1.1"] + "requirements": ["ical==8.2.0"] } diff --git a/homeassistant/components/local_todo/todo.py b/homeassistant/components/local_todo/todo.py index a5f40c26738..c496fd6b6ba 100644 --- a/homeassistant/components/local_todo/todo.py +++ b/homeassistant/components/local_todo/todo.py @@ -1,5 +1,6 @@ """A Local To-do todo platform.""" +import asyncio import datetime import logging @@ -130,6 +131,7 @@ class LocalTodoListEntity(TodoListEntity): """Initialize LocalTodoListEntity.""" self._store = store self._calendar = calendar + self._calendar_lock = asyncio.Lock() self._attr_name = name.capitalize() self._attr_unique_id = unique_id @@ -159,23 +161,28 @@ class LocalTodoListEntity(TodoListEntity): async def async_create_todo_item(self, item: TodoItem) -> None: """Add an item to the To-do list.""" todo = _convert_item(item) - self._new_todo_store().add(todo) - await self.async_save() + async with self._calendar_lock: + todo_store = self._new_todo_store() + await self.hass.async_add_executor_job(todo_store.add, todo) + await self.async_save() await self.async_update_ha_state(force_refresh=True) async def async_update_todo_item(self, item: TodoItem) -> None: """Update an item to the To-do list.""" todo = _convert_item(item) - self._new_todo_store().edit(todo.uid, todo) - await self.async_save() + async with self._calendar_lock: + todo_store = self._new_todo_store() + await self.hass.async_add_executor_job(todo_store.edit, todo.uid, todo) + await self.async_save() await self.async_update_ha_state(force_refresh=True) async def async_delete_todo_items(self, uids: list[str]) -> None: """Delete an item from the To-do list.""" store = self._new_todo_store() - for uid in uids: - store.delete(uid) - await self.async_save() + async with self._calendar_lock: + for uid in uids: + store.delete(uid) + await self.async_save() await self.async_update_ha_state(force_refresh=True) async def async_move_todo_item( @@ -184,23 +191,24 @@ class LocalTodoListEntity(TodoListEntity): """Re-order an item to the To-do list.""" if uid == previous_uid: return - todos = self._calendar.todos - item_idx: dict[str, int] = {itm.uid: idx for idx, itm in enumerate(todos)} - if uid not in item_idx: - raise HomeAssistantError( - "Item '{uid}' not found in todo list {self.entity_id}" - ) - if previous_uid and previous_uid not in item_idx: - raise HomeAssistantError( - "Item '{previous_uid}' not found in todo list {self.entity_id}" - ) - dst_idx = item_idx[previous_uid] + 1 if previous_uid else 0 - src_idx = item_idx[uid] - src_item = todos.pop(src_idx) - if dst_idx > src_idx: - dst_idx -= 1 - todos.insert(dst_idx, src_item) - await self.async_save() + async with self._calendar_lock: + todos = self._calendar.todos + item_idx: dict[str, int] = {itm.uid: idx for idx, itm in enumerate(todos)} + if uid not in item_idx: + raise HomeAssistantError( + "Item '{uid}' not found in todo list {self.entity_id}" + ) + if previous_uid and previous_uid not in item_idx: + raise HomeAssistantError( + "Item '{previous_uid}' not found in todo list {self.entity_id}" + ) + dst_idx = item_idx[previous_uid] + 1 if previous_uid else 0 + src_idx = item_idx[uid] + src_item = todos.pop(src_idx) + if dst_idx > src_idx: + dst_idx -= 1 + todos.insert(dst_idx, src_item) + await self.async_save() await self.async_update_ha_state(force_refresh=True) async def async_save(self) -> None: diff --git a/homeassistant/components/locative/device_tracker.py b/homeassistant/components/locative/device_tracker.py index 0b5cb32c22b..47a498331eb 100644 --- a/homeassistant/components/locative/device_tracker.py +++ b/homeassistant/components/locative/device_tracker.py @@ -1,6 +1,6 @@ """Support for the Locative platform.""" -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -35,35 +35,16 @@ class LocativeEntity(TrackerEntity): def __init__(self, device, location, location_name): """Set up Locative entity.""" self._name = device - self._location = location - self._location_name = location_name + self._attr_latitude = location[0] + self._attr_longitude = location[1] + self._attr_location_name = location_name self._unsub_dispatcher = None - @property - def latitude(self): - """Return latitude value of the device.""" - return self._location[0] - - @property - def longitude(self): - """Return longitude value of the device.""" - return self._location[1] - - @property - def location_name(self): - """Return a location name for the current location of the device.""" - return self._location_name - @property def name(self): """Return the name of the device.""" return self._name - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - async def async_added_to_hass(self) -> None: """Register state update callback.""" self._unsub_dispatcher = async_dispatcher_connect( @@ -79,6 +60,7 @@ class LocativeEntity(TrackerEntity): """Update device data.""" if device != self._name: return - self._location_name = location_name - self._location = location + self._attr_location_name = location_name + self._attr_latitude = location[0] + self._attr_longitude = location[1] self.async_write_ha_state() diff --git a/homeassistant/components/lock/__init__.py b/homeassistant/components/lock/__init__.py index fd3f60d3502..39d5d3c350d 100644 --- a/homeassistant/components/lock/__init__.py +++ b/homeassistant/components/lock/__init__.py @@ -5,33 +5,32 @@ from __future__ import annotations from datetime import timedelta from enum import IntFlag import functools as ft -from functools import cached_property import logging import re from typing import TYPE_CHECKING, Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( +from homeassistant.const import ( # noqa: F401 + _DEPRECATED_STATE_JAMMED, + _DEPRECATED_STATE_LOCKED, + _DEPRECATED_STATE_LOCKING, + _DEPRECATED_STATE_UNLOCKED, + _DEPRECATED_STATE_UNLOCKING, ATTR_CODE, ATTR_CODE_FORMAT, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, STATE_OPEN, STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, @@ -39,11 +38,13 @@ from homeassistant.helpers.deprecation import ( from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType, StateType +from homeassistant.util.hass_dict import HassKey -from .const import DOMAIN +from .const import DOMAIN, LockState _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[LockEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -65,10 +66,6 @@ class LockEntityFeature(IntFlag): OPEN = 1 -# The SUPPORT_OPEN constant is deprecated as of Home Assistant 2022.5. -# Please use the LockEntityFeature enum instead. -_DEPRECATED_SUPPORT_OPEN = DeprecatedConstantEnum(LockEntityFeature.OPEN, "2025.1") - PROP_TO_ATTR = {"changed_by": ATTR_CHANGED_BY, "code_format": ATTR_CODE_FORMAT} # mypy: disallow-any-generics @@ -76,7 +73,7 @@ PROP_TO_ATTR = {"changed_by": ATTR_CHANGED_BY, "code_format": ATTR_CODE_FORMAT} async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for locks.""" - component = hass.data[DOMAIN] = EntityComponent[LockEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[LockEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -100,14 +97,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[LockEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[LockEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class LockEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -274,28 +269,23 @@ class LockEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def state(self) -> str | None: """Return the state.""" if self.is_jammed: - return STATE_JAMMED + return LockState.JAMMED if self.is_opening: - return STATE_OPENING + return LockState.OPENING if self.is_locking: - return STATE_LOCKING + return LockState.LOCKING if self.is_open: - return STATE_OPEN + return LockState.OPEN if self.is_unlocking: - return STATE_UNLOCKING + return LockState.UNLOCKING if (locked := self.is_locked) is None: return None - return STATE_LOCKED if locked else STATE_UNLOCKED + return LockState.LOCKED if locked else LockState.UNLOCKED @cached_property def supported_features(self) -> LockEntityFeature: """Return the list of supported features.""" - features = self._attr_supported_features - if type(features) is int: # noqa: E721 - new_features = LockEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features + return self._attr_supported_features async def async_internal_added_to_hass(self) -> None: """Call when the sensor entity is added to hass.""" diff --git a/homeassistant/components/lock/const.py b/homeassistant/components/lock/const.py index 1370a26ab36..7a06bc12b05 100644 --- a/homeassistant/components/lock/const.py +++ b/homeassistant/components/lock/const.py @@ -1,3 +1,17 @@ """Constants for the lock entity platform.""" +from enum import StrEnum + DOMAIN = "lock" + + +class LockState(StrEnum): + """State of lock entities.""" + + JAMMED = "jammed" + OPENING = "opening" + LOCKING = "locking" + OPEN = "open" + UNLOCKING = "unlocking" + LOCKED = "locked" + UNLOCKED = "unlocked" diff --git a/homeassistant/components/lock/device_condition.py b/homeassistant/components/lock/device_condition.py index ec6373c889f..c104abd82a4 100644 --- a/homeassistant/components/lock/device_condition.py +++ b/homeassistant/components/lock/device_condition.py @@ -11,13 +11,6 @@ from homeassistant.const import ( CONF_DOMAIN, CONF_ENTITY_ID, CONF_TYPE, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -28,7 +21,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN +from . import DOMAIN, LockState # mypy: disallow-any-generics @@ -81,19 +74,19 @@ def async_condition_from_config( ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == "is_jammed": - state = STATE_JAMMED + state = LockState.JAMMED elif config[CONF_TYPE] == "is_opening": - state = STATE_OPENING + state = LockState.OPENING elif config[CONF_TYPE] == "is_locking": - state = STATE_LOCKING + state = LockState.LOCKING elif config[CONF_TYPE] == "is_open": - state = STATE_OPEN + state = LockState.OPEN elif config[CONF_TYPE] == "is_unlocking": - state = STATE_UNLOCKING + state = LockState.UNLOCKING elif config[CONF_TYPE] == "is_locked": - state = STATE_LOCKED + state = LockState.LOCKED else: - state = STATE_UNLOCKED + state = LockState.UNLOCKED registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID]) diff --git a/homeassistant/components/lock/device_trigger.py b/homeassistant/components/lock/device_trigger.py index 336fe127ca6..06e4e5b6431 100644 --- a/homeassistant/components/lock/device_trigger.py +++ b/homeassistant/components/lock/device_trigger.py @@ -13,20 +13,13 @@ from homeassistant.const import ( CONF_FOR, CONF_PLATFORM, CONF_TYPE, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from . import DOMAIN, LockState TRIGGER_TYPES = { "jammed", @@ -93,19 +86,19 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Attach a trigger.""" if config[CONF_TYPE] == "jammed": - to_state = STATE_JAMMED + to_state = LockState.JAMMED elif config[CONF_TYPE] == "opening": - to_state = STATE_OPENING + to_state = LockState.OPENING elif config[CONF_TYPE] == "locking": - to_state = STATE_LOCKING + to_state = LockState.LOCKING elif config[CONF_TYPE] == "open": - to_state = STATE_OPEN + to_state = LockState.OPEN elif config[CONF_TYPE] == "unlocking": - to_state = STATE_UNLOCKING + to_state = LockState.UNLOCKING elif config[CONF_TYPE] == "locked": - to_state = STATE_LOCKED + to_state = LockState.LOCKED else: - to_state = STATE_UNLOCKED + to_state = LockState.UNLOCKED state_config = { CONF_PLATFORM: "state", diff --git a/homeassistant/components/lock/icons.json b/homeassistant/components/lock/icons.json index 009bd84a372..0b1befde9ff 100644 --- a/homeassistant/components/lock/icons.json +++ b/homeassistant/components/lock/icons.json @@ -13,8 +13,14 @@ } }, "services": { - "lock": "mdi:lock", - "open": "mdi:door-open", - "unlock": "mdi:lock-open-variant" + "lock": { + "service": "mdi:lock" + }, + "open": { + "service": "mdi:door-open" + }, + "unlock": { + "service": "mdi:lock-open-variant" + } } } diff --git a/homeassistant/components/lock/reproduce_state.py b/homeassistant/components/lock/reproduce_state.py index 5fc3345c1f6..252528c9985 100644 --- a/homeassistant/components/lock/reproduce_state.py +++ b/homeassistant/components/lock/reproduce_state.py @@ -12,26 +12,20 @@ from homeassistant.const import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, ) from homeassistant.core import Context, HomeAssistant, State -from . import DOMAIN +from . import DOMAIN, LockState _LOGGER = logging.getLogger(__name__) VALID_STATES = { - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, + LockState.LOCKED, + LockState.LOCKING, + LockState.OPEN, + LockState.OPENING, + LockState.UNLOCKED, + LockState.UNLOCKING, } @@ -59,11 +53,11 @@ async def _async_reproduce_state( service_data = {ATTR_ENTITY_ID: state.entity_id} - if state.state in {STATE_LOCKED, STATE_LOCKING}: + if state.state in {LockState.LOCKED, LockState.LOCKING}: service = SERVICE_LOCK - elif state.state in {STATE_UNLOCKED, STATE_UNLOCKING}: + elif state.state in {LockState.UNLOCKED, LockState.UNLOCKING}: service = SERVICE_UNLOCK - elif state.state in {STATE_OPEN, STATE_OPENING}: + elif state.state in {LockState.OPEN, LockState.OPENING}: service = SERVICE_OPEN await hass.services.async_call( diff --git a/homeassistant/components/logbook/__init__.py b/homeassistant/components/logbook/__init__.py index 239a52ff7a1..2e2ffddac88 100644 --- a/homeassistant/components/logbook/__init__.py +++ b/homeassistant/components/logbook/__init__.py @@ -55,7 +55,7 @@ CONFIG_SCHEMA = vol.Schema( LOG_MESSAGE_SCHEMA = vol.Schema( { vol.Required(ATTR_NAME): cv.string, - vol.Required(ATTR_MESSAGE): cv.template, + vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_DOMAIN): cv.slug, vol.Optional(ATTR_ENTITY_ID): cv.entity_id, } @@ -112,7 +112,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # away so we use the "logbook" domain domain = DOMAIN - message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id, service.context) frontend.async_register_built_in_panel( diff --git a/homeassistant/components/logbook/icons.json b/homeassistant/components/logbook/icons.json index cd2cde8600c..a8af6427b8c 100644 --- a/homeassistant/components/logbook/icons.json +++ b/homeassistant/components/logbook/icons.json @@ -1,5 +1,7 @@ { "services": { - "log": "mdi:file-document" + "log": { + "service": "mdi:file-document" + } } } diff --git a/homeassistant/components/logbook/models.py b/homeassistant/components/logbook/models.py index 8fd850b26fb..c33325d7dcb 100644 --- a/homeassistant/components/logbook/models.py +++ b/homeassistant/components/logbook/models.py @@ -4,9 +4,9 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass -from functools import cached_property from typing import TYPE_CHECKING, Any, Final, NamedTuple, cast +from propcache import cached_property from sqlalchemy.engine.row import Row from homeassistant.components.recorder.filters import Filters diff --git a/homeassistant/components/logbook/websocket_api.py b/homeassistant/components/logbook/websocket_api.py index b776ad6303d..b295b845532 100644 --- a/homeassistant/components/logbook/websocket_api.py +++ b/homeassistant/components/logbook/websocket_api.py @@ -13,8 +13,7 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.components.recorder import get_instance -from homeassistant.components.websocket_api import messages -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection, messages from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.json import json_bytes diff --git a/homeassistant/components/logentries/manifest.json b/homeassistant/components/logentries/manifest.json index ecf2d8a227c..e63e83aff00 100644 --- a/homeassistant/components/logentries/manifest.json +++ b/homeassistant/components/logentries/manifest.json @@ -3,5 +3,6 @@ "name": "Logentries", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/logentries", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/logger/icons.json b/homeassistant/components/logger/icons.json index 305dd3ece91..1542e1e5ad3 100644 --- a/homeassistant/components/logger/icons.json +++ b/homeassistant/components/logger/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_default_level": "mdi:cog-outline", - "set_level": "mdi:cog-outline" + "set_default_level": { + "service": "mdi:cog-outline" + }, + "set_level": { + "service": "mdi:cog-outline" + } } } diff --git a/homeassistant/components/logger/websocket_api.py b/homeassistant/components/logger/websocket_api.py index 6d34b10bd34..2430f187a6f 100644 --- a/homeassistant/components/logger/websocket_api.py +++ b/homeassistant/components/logger/websocket_api.py @@ -5,7 +5,7 @@ from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.loader import IntegrationNotFound, async_get_integration from homeassistant.setup import async_get_loaded_integrations diff --git a/homeassistant/components/london_air/manifest.json b/homeassistant/components/london_air/manifest.json index 60eed8d83bd..653a951ae56 100644 --- a/homeassistant/components/london_air/manifest.json +++ b/homeassistant/components/london_air/manifest.json @@ -3,5 +3,6 @@ "name": "London Air", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/london_air", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/london_underground/const.py b/homeassistant/components/london_underground/const.py index 532f4333ba9..447ed4461f3 100644 --- a/homeassistant/components/london_underground/const.py +++ b/homeassistant/components/london_underground/const.py @@ -24,4 +24,10 @@ TUBE_LINES = [ "Piccadilly", "Victoria", "Waterloo & City", + "Liberty", + "Lioness", + "Mildmay", + "Suffragette", + "Weaver", + "Windrush", ] diff --git a/homeassistant/components/london_underground/coordinator.py b/homeassistant/components/london_underground/coordinator.py index cf14ad14b43..29d1e8e2f54 100644 --- a/homeassistant/components/london_underground/coordinator.py +++ b/homeassistant/components/london_underground/coordinator.py @@ -24,6 +24,7 @@ class LondonTubeCoordinator(DataUpdateCoordinator[dict[str, dict[str, str]]]): super().__init__( hass, _LOGGER, + config_entry=None, name=DOMAIN, update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/london_underground/manifest.json b/homeassistant/components/london_underground/manifest.json index eafc63c6ae7..94b993097c0 100644 --- a/homeassistant/components/london_underground/manifest.json +++ b/homeassistant/components/london_underground/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/london_underground", "iot_class": "cloud_polling", "loggers": ["london_tube_status"], + "quality_scale": "legacy", "requirements": ["london-tube-status==0.5"] } diff --git a/homeassistant/components/lookin/climate.py b/homeassistant/components/lookin/climate.py index fadeb6d16fa..051a18c9a32 100644 --- a/homeassistant/components/lookin/climate.py +++ b/homeassistant/components/lookin/climate.py @@ -107,7 +107,6 @@ class ConditionerEntity(LookinCoordinatorEntity, ClimateEntity): _attr_min_temp = MIN_TEMP _attr_max_temp = MAX_TEMP _attr_target_temperature_step = PRECISION_WHOLE - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/lookin/config_flow.py b/homeassistant/components/lookin/config_flow.py index ce798b8f24b..aaf98a06fa8 100644 --- a/homeassistant/components/lookin/config_flow.py +++ b/homeassistant/components/lookin/config_flow.py @@ -47,7 +47,10 @@ class LookinFlowHandler(ConfigFlow, domain=DOMAIN): self._name = device.name self._host = host self._set_confirm_only() - self.context["title_placeholders"] = {"name": self._name, "host": host} + self.context["title_placeholders"] = { + "name": self._name or "LOOKin", + "host": host, + } return await self.async_step_discovery_confirm() async def async_step_user( @@ -92,13 +95,12 @@ class LookinFlowHandler(ConfigFlow, domain=DOMAIN): """Confirm the discover flow.""" assert self._host is not None if user_input is None: - self.context["title_placeholders"] = { - "name": self._name, - "host": self._host, - } return self.async_show_form( step_id="discovery_confirm", - description_placeholders={"name": self._name, "host": self._host}, + description_placeholders={ + "name": self._name or "LOOKin", + "host": self._host, + }, ) return self.async_create_entry( diff --git a/homeassistant/components/lovelace/icons.json b/homeassistant/components/lovelace/icons.json index fe0a0e114ae..8261dc2d0c9 100644 --- a/homeassistant/components/lovelace/icons.json +++ b/homeassistant/components/lovelace/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload_resources": "mdi:reload" + "reload_resources": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/luci/device_tracker.py b/homeassistant/components/luci/device_tracker.py index 59d4d12ddf6..cf04cdb292a 100644 --- a/homeassistant/components/luci/device_tracker.py +++ b/homeassistant/components/luci/device_tracker.py @@ -8,7 +8,7 @@ from openwrt_luci_rpc import OpenWrtRpc import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -41,7 +41,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> LuciDeviceScanner | None: """Validate the configuration and return a Luci scanner.""" - scanner = LuciDeviceScanner(config[DOMAIN]) + scanner = LuciDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None diff --git a/homeassistant/components/luci/manifest.json b/homeassistant/components/luci/manifest.json index 597aad30648..a8df2c63df4 100644 --- a/homeassistant/components/luci/manifest.json +++ b/homeassistant/components/luci/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/luci", "iot_class": "local_polling", "loggers": ["openwrt_luci_rpc"], + "quality_scale": "legacy", "requirements": ["openwrt-luci-rpc==1.1.17"] } diff --git a/homeassistant/components/luftdaten/__init__.py b/homeassistant/components/luftdaten/__init__.py index 9079b056731..37f0f27d2d8 100644 --- a/homeassistant/components/luftdaten/__init__.py +++ b/homeassistant/components/luftdaten/__init__.py @@ -52,6 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator[dict[str, Any]] = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{DOMAIN}_{sensor_community.sensor_id}", update_interval=DEFAULT_SCAN_INTERVAL, update_method=async_update, diff --git a/homeassistant/components/luftdaten/manifest.json b/homeassistant/components/luftdaten/manifest.json index 96927bdd4a8..bafffe4d6ae 100644 --- a/homeassistant/components/luftdaten/manifest.json +++ b/homeassistant/components/luftdaten/manifest.json @@ -7,6 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["luftdaten"], - "quality_scale": "gold", "requirements": ["luftdaten==0.7.4"] } diff --git a/homeassistant/components/luftdaten/strings.json b/homeassistant/components/luftdaten/strings.json index b7d0a90b511..ea842f18ebd 100644 --- a/homeassistant/components/luftdaten/strings.json +++ b/homeassistant/components/luftdaten/strings.json @@ -8,6 +8,9 @@ } } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "invalid_sensor": "Sensor not available or invalid", diff --git a/homeassistant/components/lupusec/alarm_control_panel.py b/homeassistant/components/lupusec/alarm_control_panel.py index 73aba775a2a..4b3d12ad743 100644 --- a/homeassistant/components/lupusec/alarm_control_panel.py +++ b/homeassistant/components/lupusec/alarm_control_panel.py @@ -9,14 +9,9 @@ import lupupy from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -64,16 +59,16 @@ class LupusecAlarm(LupusecDevice, AlarmControlPanelEntity): ) @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self._device.is_standby: - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED elif self._device.is_away: - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif self._device.is_home: - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif self._device.is_alarm_triggered: - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED else: state = None return state diff --git a/homeassistant/components/lutron/__init__.py b/homeassistant/components/lutron/__init__.py index 45a51eb6df8..a494a37cb52 100644 --- a/homeassistant/components/lutron/__init__.py +++ b/homeassistant/components/lutron/__init__.py @@ -54,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b lutron_client = Lutron(host, uid, pwd) await hass.async_add_executor_job(lutron_client.load_xml_db) lutron_client.connect() - _LOGGER.info("Connected to main repeater at %s", host) + _LOGGER.debug("Connected to main repeater at %s", host) entity_registry = er.async_get(hass) device_registry = dr.async_get(hass) diff --git a/homeassistant/components/lutron/config_flow.py b/homeassistant/components/lutron/config_flow.py index e14d56fde57..6a48e0d4b67 100644 --- a/homeassistant/components/lutron/config_flow.py +++ b/homeassistant/components/lutron/config_flow.py @@ -26,11 +26,6 @@ class LutronConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """First step in the config flow.""" - - # Check if a configuration entry already exists - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: diff --git a/homeassistant/components/lutron/fan.py b/homeassistant/components/lutron/fan.py index dc881b393de..7db8b12c8d0 100644 --- a/homeassistant/components/lutron/fan.py +++ b/homeassistant/components/lutron/fan.py @@ -51,7 +51,6 @@ class LutronFan(LutronDevice, FanEntity): ) _lutron_device: Output _prev_percentage: int | None = None - _enable_turn_on_off_backwards_compatibility = False def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" diff --git a/homeassistant/components/lutron/manifest.json b/homeassistant/components/lutron/manifest.json index d9432f77bba..82bdfad4774 100644 --- a/homeassistant/components/lutron/manifest.json +++ b/homeassistant/components/lutron/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/lutron", "iot_class": "local_polling", "loggers": ["pylutron"], - "requirements": ["pylutron==0.2.15"] + "requirements": ["pylutron==0.2.16"], + "single_config_entry": true } diff --git a/homeassistant/components/lutron/scene.py b/homeassistant/components/lutron/scene.py index b66ca08a587..9e8070713a9 100644 --- a/homeassistant/components/lutron/scene.py +++ b/homeassistant/components/lutron/scene.py @@ -51,4 +51,4 @@ class LutronScene(LutronKeypad, Scene): def activate(self, **kwargs: Any) -> None: """Activate the scene.""" - self._lutron_device.press() + self._lutron_device.tap() diff --git a/homeassistant/components/lutron/strings.json b/homeassistant/components/lutron/strings.json index 770a453eb9e..b73e0bd15ed 100644 --- a/homeassistant/components/lutron/strings.json +++ b/homeassistant/components/lutron/strings.json @@ -17,9 +17,6 @@ "description": "Please enter the main repeater login information", "title": "Main repeater setup" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "entity": { diff --git a/homeassistant/components/lutron_caseta/__init__.py b/homeassistant/components/lutron_caseta/__init__.py index 178acea83f0..26fc5ba153e 100644 --- a/homeassistant/components/lutron_caseta/__init__.py +++ b/homeassistant/components/lutron_caseta/__init__.py @@ -14,13 +14,12 @@ from pylutron_caseta.smartbridge import Smartbridge import voluptuous as vol from homeassistant import config_entries -from homeassistant.const import ATTR_DEVICE_ID, ATTR_SUGGESTED_AREA, CONF_HOST, Platform +from homeassistant.const import ATTR_DEVICE_ID, CONF_HOST, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType from .const import ( @@ -40,7 +39,6 @@ from .const import ( CONF_CERTFILE, CONF_KEYFILE, CONF_SUBTYPE, - CONFIG_URL, DOMAIN, LUTRON_CASETA_BUTTON_EVENT, MANUFACTURER, @@ -68,7 +66,7 @@ from .models import ( LutronKeypad, LutronKeypadData, ) -from .util import serial_to_unique_id +from .util import area_name_from_id, serial_to_unique_id _LOGGER = logging.getLogger(__name__) @@ -224,7 +222,7 @@ def _async_register_bridge_device( configuration_url="https://device-login.lutron.com", ) - area = _area_name_from_id(bridge.areas, bridge_device["area"]) + area = area_name_from_id(bridge.areas, bridge_device["area"]) if area != UNASSIGNED_AREA: device_args["suggested_area"] = area @@ -342,7 +340,7 @@ def _async_build_lutron_keypad( keypad_device_id: int, ) -> LutronKeypad: # First time seeing this keypad, build keypad data and store in keypads - area_name = _area_name_from_id(bridge.areas, bridge_keypad["area"]) + area_name = area_name_from_id(bridge.areas, bridge_keypad["area"]) keypad_name = bridge_keypad["name"].split("_")[-1] keypad_serial = _handle_none_keypad_serial(bridge_keypad, bridge_device["serial"]) device_info = DeviceInfo( @@ -404,27 +402,6 @@ def _handle_none_keypad_serial(keypad_device: dict, bridge_serial: int) -> str: return keypad_device["serial"] or f"{bridge_serial}_{keypad_device['device_id']}" -def _area_name_from_id(areas: dict[str, dict], area_id: str | None) -> str: - """Return the full area name including parent(s).""" - if area_id is None: - return UNASSIGNED_AREA - return _construct_area_name_from_id(areas, area_id, []) - - -def _construct_area_name_from_id( - areas: dict[str, dict], area_id: str, labels: list[str] -) -> str: - """Recursively construct the full area name including parent(s).""" - area = areas[area_id] - parent_area_id = area["parent_id"] - if parent_area_id is None: - # This is the root area, return last area - return " ".join(labels) - - labels.insert(0, area["name"]) - return _construct_area_name_from_id(areas, parent_area_id, labels) - - @callback def async_get_lip_button(device_type: str, leap_button: int) -> int | None: """Get the LIP button for a given LEAP button.""" @@ -500,98 +477,6 @@ async def async_unload_entry( return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -class LutronCasetaDevice(Entity): - """Common base class for all Lutron Caseta devices.""" - - _attr_should_poll = False - - def __init__(self, device: dict[str, Any], data: LutronCasetaData) -> None: - """Set up the base class. - - [:param]device the device metadata - [:param]bridge the smartbridge object - [:param]bridge_device a dict with the details of the bridge - """ - self._device = device - self._smartbridge = data.bridge - self._bridge_device = data.bridge_device - self._bridge_unique_id = serial_to_unique_id(data.bridge_device["serial"]) - if "serial" not in self._device: - return - - if "parent_device" in device: - # This is a child entity, handle the naming in button.py and switch.py - return - area = _area_name_from_id(self._smartbridge.areas, device["area"]) - name = device["name"].split("_")[-1] - self._attr_name = full_name = f"{area} {name}" - info = DeviceInfo( - # Historically we used the device serial number for the identifier - # but the serial is usually an integer and a string is expected - # here. Since it would be a breaking change to change the identifier - # we are ignoring the type error here until it can be migrated to - # a string in a future release. - identifiers={ - ( - DOMAIN, - self._handle_none_serial(self.serial), # type: ignore[arg-type] - ) - }, - manufacturer=MANUFACTURER, - model=f"{device['model']} ({device['type']})", - name=full_name, - via_device=(DOMAIN, self._bridge_device["serial"]), - configuration_url=CONFIG_URL, - ) - if area != UNASSIGNED_AREA: - info[ATTR_SUGGESTED_AREA] = area - self._attr_device_info = info - - async def async_added_to_hass(self): - """Register callbacks.""" - self._smartbridge.add_subscriber(self.device_id, self.async_write_ha_state) - - def _handle_none_serial(self, serial: str | int | None) -> str | int: - """Handle None serial returned by RA3 and QSX processors.""" - if serial is None: - return f"{self._bridge_unique_id}_{self.device_id}" - return serial - - @property - def device_id(self): - """Return the device ID used for calling pylutron_caseta.""" - return self._device["device_id"] - - @property - def serial(self) -> int | None: - """Return the serial number of the device.""" - return self._device["serial"] - - @property - def unique_id(self) -> str: - """Return the unique ID of the device (serial).""" - return str(self._handle_none_serial(self.serial)) - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - attributes = { - "device_id": self.device_id, - } - if zone := self._device.get("zone"): - attributes["zone_id"] = zone - return attributes - - -class LutronCasetaDeviceUpdatableEntity(LutronCasetaDevice): - """A lutron_caseta entity that can update by syncing data from the bridge.""" - - async def async_update(self) -> None: - """Update when forcing a refresh of the device.""" - self._device = self._smartbridge.get_device_by_id(self.device_id) - _LOGGER.debug(self._device) - - def _id_to_identifier(lutron_id: str) -> tuple[str, str]: """Convert a lutron caseta identifier to a device identifier.""" return (DOMAIN, lutron_id) diff --git a/homeassistant/components/lutron_caseta/binary_sensor.py b/homeassistant/components/lutron_caseta/binary_sensor.py index bfed8c785ae..b51756692c1 100644 --- a/homeassistant/components/lutron_caseta/binary_sensor.py +++ b/homeassistant/components/lutron_caseta/binary_sensor.py @@ -11,9 +11,11 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice, _area_name_from_id +from . import DOMAIN as CASETA_DOMAIN from .const import CONFIG_URL, MANUFACTURER, UNASSIGNED_AREA +from .entity import LutronCasetaEntity from .models import LutronCasetaConfigEntry +from .util import area_name_from_id async def async_setup_entry( @@ -35,7 +37,7 @@ async def async_setup_entry( ) -class LutronOccupancySensor(LutronCasetaDevice, BinarySensorEntity): +class LutronOccupancySensor(LutronCasetaEntity, BinarySensorEntity): """Representation of a Lutron occupancy group.""" _attr_device_class = BinarySensorDeviceClass.OCCUPANCY @@ -43,7 +45,7 @@ class LutronOccupancySensor(LutronCasetaDevice, BinarySensorEntity): def __init__(self, device, data): """Init an occupancy sensor.""" super().__init__(device, data) - area = _area_name_from_id(self._smartbridge.areas, device["area"]) + area = area_name_from_id(self._smartbridge.areas, device["area"]) name = f"{area} {device['device_name']}" self._attr_name = name self._attr_device_info = DeviceInfo( diff --git a/homeassistant/components/lutron_caseta/button.py b/homeassistant/components/lutron_caseta/button.py index d2651673c4c..a74de46346b 100644 --- a/homeassistant/components/lutron_caseta/button.py +++ b/homeassistant/components/lutron_caseta/button.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LutronCasetaDevice from .device_trigger import LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP +from .entity import LutronCasetaEntity from .models import LutronCasetaConfigEntry, LutronCasetaData @@ -65,7 +65,7 @@ async def async_setup_entry( async_add_entities(entities) -class LutronCasetaButton(LutronCasetaDevice, ButtonEntity): +class LutronCasetaButton(LutronCasetaEntity, ButtonEntity): """Representation of a Lutron pico and keypad button.""" def __init__( diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index 0458b8ee185..cd566b767fb 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -95,7 +95,9 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by homekit discovery.""" return await self.async_step_zeroconf(discovery_info) - async def async_step_link(self, user_input=None): + async def async_step_link( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle pairing with the hub.""" errors = {} # Abort if existing entry with matching host exists. @@ -166,21 +168,21 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): for asset_key, conf_key in FILE_MAPPING.items(): self.data[conf_key] = TLS_ASSET_TEMPLATE.format(self.bridge_id, asset_key) - async def async_step_import(self, import_info): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new Caseta bridge as a config entry. This flow is triggered by `async_setup`. """ - host = import_info[CONF_HOST] + host = import_data[CONF_HOST] # Store the imported config for other steps in this flow to access. self.data[CONF_HOST] = host # Abort if existing entry with matching host exists. self._async_abort_entries_match({CONF_HOST: self.data[CONF_HOST]}) - self.data[CONF_KEYFILE] = import_info[CONF_KEYFILE] - self.data[CONF_CERTFILE] = import_info[CONF_CERTFILE] - self.data[CONF_CA_CERTS] = import_info[CONF_CA_CERTS] + self.data[CONF_KEYFILE] = import_data[CONF_KEYFILE] + self.data[CONF_CERTFILE] = import_data[CONF_CERTFILE] + self.data[CONF_CA_CERTS] = import_data[CONF_CA_CERTS] if not (lutron_id := await self.async_get_lutron_id()): # Ultimately we won't have a dedicated step for import failure, but @@ -198,7 +200,9 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() return self.async_create_entry(title=ENTRY_DEFAULT_TITLE, data=self.data) - async def async_step_import_failed(self, user_input=None): + async def async_step_import_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Make failed import surfaced to user.""" self.context["title_placeholders"] = {CONF_NAME: self.data[CONF_HOST]} diff --git a/homeassistant/components/lutron_caseta/cover.py b/homeassistant/components/lutron_caseta/cover.py index 3edb62c0d98..11da2220be9 100644 --- a/homeassistant/components/lutron_caseta/cover.py +++ b/homeassistant/components/lutron_caseta/cover.py @@ -5,7 +5,7 @@ from typing import Any from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, CoverDeviceClass, CoverEntity, CoverEntityFeature, @@ -13,11 +13,11 @@ from homeassistant.components.cover import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LutronCasetaDeviceUpdatableEntity +from .entity import LutronCasetaUpdatableEntity from .models import LutronCasetaConfigEntry -class LutronCasetaShade(LutronCasetaDeviceUpdatableEntity, CoverEntity): +class LutronCasetaShade(LutronCasetaUpdatableEntity, CoverEntity): """Representation of a Lutron shade with open/close functionality.""" _attr_supported_features = ( @@ -59,7 +59,7 @@ class LutronCasetaShade(LutronCasetaDeviceUpdatableEntity, CoverEntity): await self._smartbridge.set_value(self.device_id, kwargs[ATTR_POSITION]) -class LutronCasetaTiltOnlyBlind(LutronCasetaDeviceUpdatableEntity, CoverEntity): +class LutronCasetaTiltOnlyBlind(LutronCasetaUpdatableEntity, CoverEntity): """Representation of a Lutron tilt only blind.""" _attr_supported_features = ( @@ -122,7 +122,7 @@ async def async_setup_entry( """ data = config_entry.runtime_data bridge = data.bridge - cover_devices = bridge.get_devices_by_domain(DOMAIN) + cover_devices = bridge.get_devices_by_domain(COVER_DOMAIN) async_add_entities( # default to standard LutronCasetaCover type if the pylutron type is not yet mapped PYLUTRON_TYPE_TO_CLASSES.get(cover_device["type"], LutronCasetaShade)( diff --git a/homeassistant/components/lutron_caseta/entity.py b/homeassistant/components/lutron_caseta/entity.py new file mode 100644 index 00000000000..f954be74f1d --- /dev/null +++ b/homeassistant/components/lutron_caseta/entity.py @@ -0,0 +1,108 @@ +"""Component for interacting with a Lutron Caseta system.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.const import ATTR_SUGGESTED_AREA +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import CONFIG_URL, DOMAIN, MANUFACTURER, UNASSIGNED_AREA +from .models import LutronCasetaData +from .util import area_name_from_id, serial_to_unique_id + +_LOGGER = logging.getLogger(__name__) + + +class LutronCasetaEntity(Entity): + """Common base class for all Lutron Caseta devices.""" + + _attr_should_poll = False + + def __init__(self, device: dict[str, Any], data: LutronCasetaData) -> None: + """Set up the base class. + + [:param]device the device metadata + [:param]bridge the smartbridge object + [:param]bridge_device a dict with the details of the bridge + """ + self._device = device + self._smartbridge = data.bridge + self._bridge_device = data.bridge_device + self._bridge_unique_id = serial_to_unique_id(data.bridge_device["serial"]) + if "serial" not in self._device: + return + + if "parent_device" in device: + # This is a child entity, handle the naming in button.py and switch.py + return + area = area_name_from_id(self._smartbridge.areas, device["area"]) + name = device["name"].split("_")[-1] + self._attr_name = full_name = f"{area} {name}" + info = DeviceInfo( + # Historically we used the device serial number for the identifier + # but the serial is usually an integer and a string is expected + # here. Since it would be a breaking change to change the identifier + # we are ignoring the type error here until it can be migrated to + # a string in a future release. + identifiers={ + ( + DOMAIN, + self._handle_none_serial(self.serial), # type: ignore[arg-type] + ) + }, + manufacturer=MANUFACTURER, + model=f"{device['model']} ({device['type']})", + name=full_name, + via_device=(DOMAIN, self._bridge_device["serial"]), + configuration_url=CONFIG_URL, + ) + if area != UNASSIGNED_AREA: + info[ATTR_SUGGESTED_AREA] = area + self._attr_device_info = info + + async def async_added_to_hass(self): + """Register callbacks.""" + self._smartbridge.add_subscriber(self.device_id, self.async_write_ha_state) + + def _handle_none_serial(self, serial: str | int | None) -> str | int: + """Handle None serial returned by RA3 and QSX processors.""" + if serial is None: + return f"{self._bridge_unique_id}_{self.device_id}" + return serial + + @property + def device_id(self): + """Return the device ID used for calling pylutron_caseta.""" + return self._device["device_id"] + + @property + def serial(self) -> int | None: + """Return the serial number of the device.""" + return self._device["serial"] + + @property + def unique_id(self) -> str: + """Return the unique ID of the device (serial).""" + return str(self._handle_none_serial(self.serial)) + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + attributes = { + "device_id": self.device_id, + } + if zone := self._device.get("zone"): + attributes["zone_id"] = zone + return attributes + + +class LutronCasetaUpdatableEntity(LutronCasetaEntity): + """A lutron_caseta entity that can update by syncing data from the bridge.""" + + async def async_update(self) -> None: + """Update when forcing a refresh of the device.""" + self._device = self._smartbridge.get_device_by_id(self.device_id) + _LOGGER.debug(self._device) diff --git a/homeassistant/components/lutron_caseta/fan.py b/homeassistant/components/lutron_caseta/fan.py index 1e7c0b2265c..69167929e14 100644 --- a/homeassistant/components/lutron_caseta/fan.py +++ b/homeassistant/components/lutron_caseta/fan.py @@ -6,7 +6,11 @@ from typing import Any from pylutron_caseta import FAN_HIGH, FAN_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_OFF -from homeassistant.components.fan import DOMAIN, FanEntity, FanEntityFeature +from homeassistant.components.fan import ( + DOMAIN as FAN_DOMAIN, + FanEntity, + FanEntityFeature, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.percentage import ( @@ -14,7 +18,7 @@ from homeassistant.util.percentage import ( percentage_to_ordered_list_item, ) -from . import LutronCasetaDeviceUpdatableEntity +from .entity import LutronCasetaUpdatableEntity from .models import LutronCasetaConfigEntry DEFAULT_ON_PERCENTAGE = 50 @@ -33,11 +37,11 @@ async def async_setup_entry( """ data = config_entry.runtime_data bridge = data.bridge - fan_devices = bridge.get_devices_by_domain(DOMAIN) + fan_devices = bridge.get_devices_by_domain(FAN_DOMAIN) async_add_entities(LutronCasetaFan(fan_device, data) for fan_device in fan_devices) -class LutronCasetaFan(LutronCasetaDeviceUpdatableEntity, FanEntity): +class LutronCasetaFan(LutronCasetaUpdatableEntity, FanEntity): """Representation of a Lutron Caseta fan. Including Fan Speed.""" _attr_supported_features = ( @@ -46,7 +50,6 @@ class LutronCasetaFan(LutronCasetaDeviceUpdatableEntity, FanEntity): | FanEntityFeature.TURN_ON ) _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int | None: diff --git a/homeassistant/components/lutron_caseta/light.py b/homeassistant/components/lutron_caseta/light.py index c0cf9449f87..146ed826c14 100644 --- a/homeassistant/components/lutron_caseta/light.py +++ b/homeassistant/components/lutron_caseta/light.py @@ -15,7 +15,7 @@ from homeassistant.components.light import ( ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_WHITE, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, LightEntity, LightEntityFeature, @@ -24,8 +24,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LutronCasetaDeviceUpdatableEntity from .const import DEVICE_TYPE_SPECTRUM_TUNE, DEVICE_TYPE_WHITE_TUNE +from .entity import LutronCasetaUpdatableEntity from .models import LutronCasetaData SUPPORTED_COLOR_MODE_DICT = { @@ -62,13 +62,13 @@ async def async_setup_entry( """ data = config_entry.runtime_data bridge = data.bridge - light_devices = bridge.get_devices_by_domain(DOMAIN) + light_devices = bridge.get_devices_by_domain(LIGHT_DOMAIN) async_add_entities( LutronCasetaLight(light_device, data) for light_device in light_devices ) -class LutronCasetaLight(LutronCasetaDeviceUpdatableEntity, LightEntity): +class LutronCasetaLight(LutronCasetaUpdatableEntity, LightEntity): """Representation of a Lutron Light, including dimmable, white tune, and spectrum tune.""" _attr_supported_features = LightEntityFeature.TRANSITION diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index 3c6348ed4da..ec278615743 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -1,7 +1,7 @@ { "domain": "lutron_caseta", "name": "Lutron Cas\u00e9ta", - "codeowners": ["@swails", "@bdraco", "@danaues", "@eclair4151"], + "codeowners": ["@swails", "@danaues", "@eclair4151"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/lutron_caseta", "homekit": { @@ -9,8 +9,14 @@ }, "iot_class": "local_push", "loggers": ["pylutron_caseta"], - "requirements": ["pylutron-caseta==0.21.1"], + "requirements": ["pylutron-caseta==0.22.0"], "zeroconf": [ + { + "type": "_lutron._tcp.local.", + "properties": { + "SYSTYPE": "hwqs*" + } + }, { "type": "_lutron._tcp.local.", "properties": { diff --git a/homeassistant/components/lutron_caseta/switch.py b/homeassistant/components/lutron_caseta/switch.py index b7ec5b58b04..5037d077a02 100644 --- a/homeassistant/components/lutron_caseta/switch.py +++ b/homeassistant/components/lutron_caseta/switch.py @@ -2,12 +2,12 @@ from typing import Any -from homeassistant.components.switch import DOMAIN, SwitchEntity +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LutronCasetaDeviceUpdatableEntity +from .entity import LutronCasetaUpdatableEntity async def async_setup_entry( @@ -22,13 +22,13 @@ async def async_setup_entry( """ data = config_entry.runtime_data bridge = data.bridge - switch_devices = bridge.get_devices_by_domain(DOMAIN) + switch_devices = bridge.get_devices_by_domain(SWITCH_DOMAIN) async_add_entities( LutronCasetaLight(switch_device, data) for switch_device in switch_devices ) -class LutronCasetaLight(LutronCasetaDeviceUpdatableEntity, SwitchEntity): +class LutronCasetaLight(LutronCasetaUpdatableEntity, SwitchEntity): """Representation of a Lutron Caseta switch.""" def __init__(self, device, data): diff --git a/homeassistant/components/lutron_caseta/util.py b/homeassistant/components/lutron_caseta/util.py index 07b5b502fd0..d4f0a9083fe 100644 --- a/homeassistant/components/lutron_caseta/util.py +++ b/homeassistant/components/lutron_caseta/util.py @@ -2,7 +2,30 @@ from __future__ import annotations +from .const import UNASSIGNED_AREA + def serial_to_unique_id(serial: int) -> str: """Convert a lutron serial number to a unique id.""" return hex(serial)[2:].zfill(8) + + +def area_name_from_id(areas: dict[str, dict], area_id: str | None) -> str: + """Return the full area name including parent(s).""" + if area_id is None: + return UNASSIGNED_AREA + return _construct_area_name_from_id(areas, area_id, []) + + +def _construct_area_name_from_id( + areas: dict[str, dict], area_id: str, labels: list[str] +) -> str: + """Recursively construct the full area name including parent(s).""" + area = areas[area_id] + parent_area_id = area["parent_id"] + if parent_area_id is None: + # This is the root area, return last area + return " ".join(labels) + + labels.insert(0, area["name"]) + return _construct_area_name_from_id(areas, parent_area_id, labels) diff --git a/homeassistant/components/lw12wifi/manifest.json b/homeassistant/components/lw12wifi/manifest.json index d8b2290b234..683498f2056 100644 --- a/homeassistant/components/lw12wifi/manifest.json +++ b/homeassistant/components/lw12wifi/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/lw12wifi", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["lw12==0.9.2"] } diff --git a/homeassistant/components/lyric/__init__.py b/homeassistant/components/lyric/__init__.py index 6c35e084424..f99adf26999 100644 --- a/homeassistant/components/lyric/__init__.py +++ b/homeassistant/components/lyric/__init__.py @@ -10,9 +10,6 @@ import logging from aiohttp.client_exceptions import ClientResponseError from aiolyric import Lyric from aiolyric.exceptions import LyricAuthenticationException, LyricException -from aiolyric.objects.device import LyricDevice -from aiolyric.objects.location import LyricLocation -from aiolyric.objects.priority import LyricAccessory, LyricRoom from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform @@ -22,14 +19,8 @@ from homeassistant.helpers import ( aiohttp_client, config_entry_oauth2_flow, config_validation as cv, - device_registry as dr, -) -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, ) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .api import ( ConfigEntryLyricClient, @@ -104,6 +95,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator[Lyric]( hass, _LOGGER, + config_entry=entry, # Name of the data. For logging purposes. name="lyric_coordinator", update_method=async_update_data, @@ -127,102 +119,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class LyricEntity(CoordinatorEntity[DataUpdateCoordinator[Lyric]]): - """Defines a base Honeywell Lyric entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: DataUpdateCoordinator[Lyric], - location: LyricLocation, - device: LyricDevice, - key: str, - ) -> None: - """Initialize the Honeywell Lyric entity.""" - super().__init__(coordinator) - self._key = key - self._location = location - self._mac_id = device.mac_id - self._update_thermostat = coordinator.data.update_thermostat - self._update_fan = coordinator.data.update_fan - - @property - def unique_id(self) -> str: - """Return the unique ID for this entity.""" - return self._key - - @property - def location(self) -> LyricLocation: - """Get the Lyric Location.""" - return self.coordinator.data.locations_dict[self._location.location_id] - - @property - def device(self) -> LyricDevice: - """Get the Lyric Device.""" - return self.location.devices_dict[self._mac_id] - - -class LyricDeviceEntity(LyricEntity): - """Defines a Honeywell Lyric device entity.""" - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this Honeywell Lyric instance.""" - return DeviceInfo( - identifiers={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, - connections={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, - manufacturer="Honeywell", - model=self.device.device_model, - name=f"{self.device.name} Thermostat", - ) - - -class LyricAccessoryEntity(LyricDeviceEntity): - """Defines a Honeywell Lyric accessory entity, a sub-device of a thermostat.""" - - def __init__( - self, - coordinator: DataUpdateCoordinator[Lyric], - location: LyricLocation, - device: LyricDevice, - room: LyricRoom, - accessory: LyricAccessory, - key: str, - ) -> None: - """Initialize the Honeywell Lyric accessory entity.""" - super().__init__(coordinator, location, device, key) - self._room_id = room.id - self._accessory_id = accessory.id - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this Honeywell Lyric instance.""" - return DeviceInfo( - identifiers={ - ( - f"{dr.CONNECTION_NETWORK_MAC}_room_accessory", - f"{self._mac_id}_room{self._room_id}_accessory{self._accessory_id}", - ) - }, - manufacturer="Honeywell", - model="RCHTSENSOR", - name=f"{self.room.room_name} Sensor", - via_device=(dr.CONNECTION_NETWORK_MAC, self._mac_id), - ) - - @property - def room(self) -> LyricRoom: - """Get the Lyric Device.""" - return self.coordinator.data.rooms_dict[self._mac_id][self._room_id] - - @property - def accessory(self) -> LyricAccessory: - """Get the Lyric Device.""" - return next( - accessory - for accessory in self.room.accessories - if accessory.id == self._accessory_id - ) diff --git a/homeassistant/components/lyric/api.py b/homeassistant/components/lyric/api.py index c9a424bf8ab..7399e013b96 100644 --- a/homeassistant/components/lyric/api.py +++ b/homeassistant/components/lyric/api.py @@ -36,8 +36,7 @@ class ConfigEntryLyricClient(LyricClient): async def async_get_access_token(self): """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/homeassistant/components/lyric/climate.py b/homeassistant/components/lyric/climate.py index 1c459c2c66a..87b5d566bb8 100644 --- a/homeassistant/components/lyric/climate.py +++ b/homeassistant/components/lyric/climate.py @@ -40,7 +40,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import LyricDeviceEntity from .const import ( DOMAIN, LYRIC_EXCEPTIONS, @@ -50,6 +49,7 @@ from .const import ( PRESET_TEMPORARY_HOLD, PRESET_VACATION_HOLD, ) +from .entity import LyricDeviceEntity _LOGGER = logging.getLogger(__name__) @@ -174,7 +174,6 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): PRESET_TEMPORARY_HOLD, PRESET_VACATION_HOLD, ] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -208,8 +207,11 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): if LYRIC_HVAC_MODE_COOL in device.allowed_modes: self._attr_hvac_modes.append(HVACMode.COOL) - if ( - LYRIC_HVAC_MODE_HEAT in device.allowed_modes + # TCC devices like the Lyric round do not have the Auto + # option in allowed_modes, but still support Auto mode + if LYRIC_HVAC_MODE_HEAT_COOL in device.allowed_modes or ( + self._attr_thermostat_type is LyricThermostatType.TCC + and LYRIC_HVAC_MODE_HEAT in device.allowed_modes and LYRIC_HVAC_MODE_COOL in device.allowed_modes ): self._attr_hvac_modes.append(HVACMode.HEAT_COOL) @@ -358,8 +360,8 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): await self._update_thermostat( self.location, device, - coolSetpoint=target_temp_high, - heatSetpoint=target_temp_low, + cool_setpoint=target_temp_high, + heat_setpoint=target_temp_low, mode=mode, ) except LYRIC_EXCEPTIONS as exception: @@ -371,11 +373,11 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): try: if self.hvac_mode == HVACMode.COOL: await self._update_thermostat( - self.location, device, coolSetpoint=temp + self.location, device, cool_setpoint=temp ) else: await self._update_thermostat( - self.location, device, heatSetpoint=temp + self.location, device, heat_setpoint=temp ) except LYRIC_EXCEPTIONS as exception: _LOGGER.error(exception) @@ -410,7 +412,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=HVAC_MODES[LYRIC_HVAC_MODE_HEAT], - autoChangeoverActive=False, + auto_changeover_active=False, ) # Sleep 3 seconds before proceeding await asyncio.sleep(3) @@ -422,7 +424,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=HVAC_MODES[LYRIC_HVAC_MODE_HEAT], - autoChangeoverActive=True, + auto_changeover_active=True, ) else: _LOGGER.debug( @@ -430,7 +432,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): HVAC_MODES[self.device.changeable_values.mode], ) await self._update_thermostat( - self.location, self.device, autoChangeoverActive=True + self.location, self.device, auto_changeover_active=True ) else: _LOGGER.debug("HVAC mode passed to lyric: %s", LYRIC_HVAC_MODES[hvac_mode]) @@ -438,13 +440,13 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=LYRIC_HVAC_MODES[hvac_mode], - autoChangeoverActive=False, + auto_changeover_active=False, ) async def _async_set_hvac_mode_lcc(self, hvac_mode: HVACMode) -> None: """Set hvac mode for LCC devices (e.g., T5,6).""" _LOGGER.debug("HVAC mode passed to lyric: %s", LYRIC_HVAC_MODES[hvac_mode]) - # Set autoChangeoverActive to True if the mode being passed is Auto + # Set auto_changeover_active to True if the mode being passed is Auto # otherwise leave unchanged. if ( LYRIC_HVAC_MODES[hvac_mode] == LYRIC_HVAC_MODE_HEAT_COOL @@ -458,7 +460,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=LYRIC_HVAC_MODES[hvac_mode], - autoChangeoverActive=auto_changeover, + auto_changeover_active=auto_changeover, ) async def async_set_preset_mode(self, preset_mode: str) -> None: @@ -466,7 +468,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): _LOGGER.debug("Set preset mode: %s", preset_mode) try: await self._update_thermostat( - self.location, self.device, thermostatSetpointStatus=preset_mode + self.location, self.device, thermostat_setpoint_status=preset_mode ) except LYRIC_EXCEPTIONS as exception: _LOGGER.error(exception) @@ -479,8 +481,8 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): await self._update_thermostat( self.location, self.device, - thermostatSetpointStatus=PRESET_HOLD_UNTIL, - nextPeriodTime=time_period, + thermostat_setpoint_status=PRESET_HOLD_UNTIL, + next_period_time=time_period, ) except LYRIC_EXCEPTIONS as exception: _LOGGER.error(exception) diff --git a/homeassistant/components/lyric/entity.py b/homeassistant/components/lyric/entity.py new file mode 100644 index 00000000000..5a5a76f1442 --- /dev/null +++ b/homeassistant/components/lyric/entity.py @@ -0,0 +1,114 @@ +"""The Honeywell Lyric integration.""" + +from __future__ import annotations + +from aiolyric import Lyric +from aiolyric.objects.device import LyricDevice +from aiolyric.objects.location import LyricLocation +from aiolyric.objects.priority import LyricAccessory, LyricRoom + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + + +class LyricEntity(CoordinatorEntity[DataUpdateCoordinator[Lyric]]): + """Defines a base Honeywell Lyric entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DataUpdateCoordinator[Lyric], + location: LyricLocation, + device: LyricDevice, + key: str, + ) -> None: + """Initialize the Honeywell Lyric entity.""" + super().__init__(coordinator) + self._key = key + self._location = location + self._mac_id = device.mac_id + self._update_thermostat = coordinator.data.update_thermostat + self._update_fan = coordinator.data.update_fan + + @property + def unique_id(self) -> str: + """Return the unique ID for this entity.""" + return self._key + + @property + def location(self) -> LyricLocation: + """Get the Lyric Location.""" + return self.coordinator.data.locations_dict[self._location.location_id] + + @property + def device(self) -> LyricDevice: + """Get the Lyric Device.""" + return self.location.devices_dict[self._mac_id] + + +class LyricDeviceEntity(LyricEntity): + """Defines a Honeywell Lyric device entity.""" + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this Honeywell Lyric instance.""" + return DeviceInfo( + identifiers={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, + connections={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, + manufacturer="Honeywell", + model=self.device.device_model, + name=f"{self.device.name} Thermostat", + ) + + +class LyricAccessoryEntity(LyricDeviceEntity): + """Defines a Honeywell Lyric accessory entity, a sub-device of a thermostat.""" + + def __init__( + self, + coordinator: DataUpdateCoordinator[Lyric], + location: LyricLocation, + device: LyricDevice, + room: LyricRoom, + accessory: LyricAccessory, + key: str, + ) -> None: + """Initialize the Honeywell Lyric accessory entity.""" + super().__init__(coordinator, location, device, key) + self._room_id = room.id + self._accessory_id = accessory.id + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this Honeywell Lyric instance.""" + return DeviceInfo( + identifiers={ + ( + f"{dr.CONNECTION_NETWORK_MAC}_room_accessory", + f"{self._mac_id}_room{self._room_id}_accessory{self._accessory_id}", + ) + }, + manufacturer="Honeywell", + model="RCHTSENSOR", + name=f"{self.room.room_name} Sensor", + via_device=(dr.CONNECTION_NETWORK_MAC, self._mac_id), + ) + + @property + def room(self) -> LyricRoom: + """Get the Lyric Device.""" + return self.coordinator.data.rooms_dict[self._mac_id][self._room_id] + + @property + def accessory(self) -> LyricAccessory: + """Get the Lyric Device.""" + return next( + accessory + for accessory in self.room.accessories + if accessory.id == self._accessory_id + ) diff --git a/homeassistant/components/lyric/icons.json b/homeassistant/components/lyric/icons.json index 555215f8685..edb61c3f8e2 100644 --- a/homeassistant/components/lyric/icons.json +++ b/homeassistant/components/lyric/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "set_hold_time": "mdi:timer-pause" + "set_hold_time": { + "service": "mdi:timer-pause" + } } } diff --git a/homeassistant/components/lyric/manifest.json b/homeassistant/components/lyric/manifest.json index 8bed909ace2..cca69969f70 100644 --- a/homeassistant/components/lyric/manifest.json +++ b/homeassistant/components/lyric/manifest.json @@ -21,6 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/lyric", "iot_class": "cloud_polling", "loggers": ["aiolyric"], - "quality_scale": "silver", "requirements": ["aiolyric==2.0.1"] } diff --git a/homeassistant/components/lyric/sensor.py b/homeassistant/components/lyric/sensor.py index 7e006bc7bfe..38cb895a110 100644 --- a/homeassistant/components/lyric/sensor.py +++ b/homeassistant/components/lyric/sensor.py @@ -25,7 +25,6 @@ from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util import dt as dt_util -from . import LyricAccessoryEntity, LyricDeviceEntity from .const import ( DOMAIN, PRESET_HOLD_UNTIL, @@ -34,6 +33,7 @@ from .const import ( PRESET_TEMPORARY_HOLD, PRESET_VACATION_HOLD, ) +from .entity import LyricAccessoryEntity, LyricDeviceEntity LYRIC_SETPOINT_STATUS_NAMES = { PRESET_NO_HOLD: "Following Schedule", diff --git a/homeassistant/components/lyric/strings.json b/homeassistant/components/lyric/strings.json index 739ad7fad68..83c65359643 100644 --- a/homeassistant/components/lyric/strings.json +++ b/homeassistant/components/lyric/strings.json @@ -16,7 +16,8 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/madvr/__init__.py b/homeassistant/components/madvr/__init__.py index a6ad3b2d1fd..bb42adb21fc 100644 --- a/homeassistant/components/madvr/__init__.py +++ b/homeassistant/components/madvr/__init__.py @@ -8,7 +8,7 @@ from madvr.madvr import Madvr from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform -from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant from .coordinator import MadVRCoordinator @@ -47,7 +47,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: MadVRConfigEntry) -> boo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - @callback async def handle_unload(event: Event) -> None: """Handle unload.""" await async_handle_unload(coordinator=coordinator) diff --git a/homeassistant/components/madvr/config_flow.py b/homeassistant/components/madvr/config_flow.py index 1ca1dd296d8..60f7b8fc481 100644 --- a/homeassistant/components/madvr/config_flow.py +++ b/homeassistant/components/madvr/config_flow.py @@ -8,7 +8,11 @@ import aiohttp from madvr.madvr import HeartBeatError, Madvr import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant @@ -32,8 +36,6 @@ class MadVRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -42,13 +44,6 @@ class MadVRConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration of the device.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reconfigure_confirm(user_input) - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" return await self._handle_config_step(user_input, step_id="reconfigure") @@ -75,23 +70,16 @@ class MadVRConfigFlow(ConfigFlow, domain=DOMAIN): else: _LOGGER.debug("MAC address found: %s", mac) # abort if the detected mac differs from the one in the entry - if self.entry: - existing_mac = self.entry.unique_id - if existing_mac != mac: - _LOGGER.debug( - "MAC address changed from %s to %s", existing_mac, mac - ) - # abort - return self.async_abort(reason="set_up_new_device") + await self.async_set_unique_id(mac) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="set_up_new_device") _LOGGER.debug("Reconfiguration done") return self.async_update_reload_and_abort( - entry=self.entry, + entry=self._get_reconfigure_entry(), data={**user_input, CONF_HOST: host, CONF_PORT: port}, - reason="reconfigure_successful", ) # abort if already configured with same mac - await self.async_set_unique_id(mac) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) _LOGGER.debug("Configuration successful") diff --git a/homeassistant/components/madvr/diagnostics.py b/homeassistant/components/madvr/diagnostics.py new file mode 100644 index 00000000000..f6261d27305 --- /dev/null +++ b/homeassistant/components/madvr/diagnostics.py @@ -0,0 +1,25 @@ +"""Provides diagnostics for madVR.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from . import MadVRConfigEntry + +TO_REDACT = [CONF_HOST] + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: MadVRConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data.data + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "madvr_data": data, + } diff --git a/homeassistant/components/madvr/manifest.json b/homeassistant/components/madvr/manifest.json index ce6336acabc..0ac906fdbef 100644 --- a/homeassistant/components/madvr/manifest.json +++ b/homeassistant/components/madvr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/madvr", "integration_type": "device", "iot_class": "local_push", - "requirements": ["py-madvr2==1.6.29"] + "requirements": ["py-madvr2==1.6.32"] } diff --git a/homeassistant/components/madvr/strings.json b/homeassistant/components/madvr/strings.json index b8d30be23aa..1a4f0f79aae 100644 --- a/homeassistant/components/madvr/strings.json +++ b/homeassistant/components/madvr/strings.json @@ -3,7 +3,7 @@ "step": { "user": { "title": "Setup madVR Envy", - "description": "Your device needs to be on in order to add the integation. ", + "description": "Your device needs to be on in order to add the integation.", "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" @@ -15,7 +15,7 @@ }, "reconfigure": { "title": "Reconfigure madVR Envy", - "description": "Your device needs to be on in order to reconfigure the integation. ", + "description": "Your device needs to be on in order to reconfigure the integation.", "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" @@ -28,12 +28,12 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "set_up_new_device": "A new device was detected. Please set it up as a new entity instead of reconfiguring." }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable.", - "set_up_new_device": "A new device was detected. Please set it up as a new entity instead of reconfiguring." + "no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable." } }, "entity": { diff --git a/homeassistant/components/mailbox/__init__.py b/homeassistant/components/mailbox/__init__.py deleted file mode 100644 index e0438342a54..00000000000 --- a/homeassistant/components/mailbox/__init__.py +++ /dev/null @@ -1,291 +0,0 @@ -"""Support for Voice mailboxes.""" - -from __future__ import annotations - -import asyncio -from contextlib import suppress -from datetime import timedelta -from http import HTTPStatus -import logging -from typing import Any, Final - -from aiohttp import web -from aiohttp.web_exceptions import HTTPNotFound - -from homeassistant.components import frontend -from homeassistant.components.http import HomeAssistantView -from homeassistant.config import config_per_platform -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv, discovery -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import async_prepare_setup_platform - -_LOGGER = logging.getLogger(__name__) - -DOMAIN: Final = "mailbox" - -EVENT: Final = "mailbox_updated" -CONTENT_TYPE_MPEG: Final = "audio/mpeg" -CONTENT_TYPE_NONE: Final = "none" - -SCAN_INTERVAL = timedelta(seconds=30) - -CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Track states and offer events for mailboxes.""" - mailboxes: list[Mailbox] = [] - frontend.async_register_built_in_panel(hass, "mailbox", "mailbox", "mdi:mailbox") - hass.http.register_view(MailboxPlatformsView(mailboxes)) - hass.http.register_view(MailboxMessageView(mailboxes)) - hass.http.register_view(MailboxMediaView(mailboxes)) - hass.http.register_view(MailboxDeleteView(mailboxes)) - - async def async_setup_platform( - p_type: str, - p_config: ConfigType | None = None, - discovery_info: DiscoveryInfoType | None = None, - ) -> None: - """Set up a mailbox platform.""" - if p_config is None: - p_config = {} - if discovery_info is None: - discovery_info = {} - - platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type) - - if platform is None: - _LOGGER.error("Unknown mailbox platform specified") - return - - if p_type not in ["asterisk_cdr", "asterisk_mbox", "demo"]: - # Asterisk integration will raise a repair issue themselves - # For demo we don't create one - async_create_issue( - hass, - DOMAIN, - f"deprecated_mailbox_{p_type}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_mailbox_integration", - translation_placeholders={ - "integration_domain": p_type, - }, - ) - - _LOGGER.info("Setting up %s.%s", DOMAIN, p_type) - mailbox = None - try: - if hasattr(platform, "async_get_handler"): - mailbox = await platform.async_get_handler( - hass, p_config, discovery_info - ) - elif hasattr(platform, "get_handler"): - mailbox = await hass.async_add_executor_job( - platform.get_handler, hass, p_config, discovery_info - ) - else: - raise HomeAssistantError("Invalid mailbox platform.") # noqa: TRY301 - - if mailbox is None: - _LOGGER.error("Failed to initialize mailbox platform %s", p_type) - return - - except Exception: - _LOGGER.exception("Error setting up platform %s", p_type) - return - - mailboxes.append(mailbox) - mailbox_entity = MailboxEntity(mailbox) - component = EntityComponent[MailboxEntity]( - logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL - ) - component.register_shutdown() - await component.async_add_entities([mailbox_entity]) - - for p_type, p_config in config_per_platform(config, DOMAIN): - if p_type is not None: - hass.async_create_task( - async_setup_platform(p_type, p_config), eager_start=True - ) - - async def async_platform_discovered( - platform: str, info: DiscoveryInfoType | None - ) -> None: - """Handle for discovered platform.""" - await async_setup_platform(platform, discovery_info=info) - - discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered) - - return True - - -class MailboxEntity(Entity): - """Entity for each mailbox platform to provide a badge display.""" - - def __init__(self, mailbox: Mailbox) -> None: - """Initialize mailbox entity.""" - self.mailbox = mailbox - self.message_count = 0 - - async def async_added_to_hass(self) -> None: - """Complete entity initialization.""" - - @callback - def _mailbox_updated(event: Event) -> None: - self.async_schedule_update_ha_state(True) - - self.hass.bus.async_listen(EVENT, _mailbox_updated) - self.async_schedule_update_ha_state(True) - - @property - def state(self) -> str: - """Return the state of the binary sensor.""" - return str(self.message_count) - - @property - def name(self) -> str: - """Return the name of the entity.""" - return self.mailbox.name - - async def async_update(self) -> None: - """Retrieve messages from platform.""" - messages = await self.mailbox.async_get_messages() - self.message_count = len(messages) - - -class Mailbox: - """Represent a mailbox device.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize mailbox object.""" - self.hass = hass - self.name = name - - @callback - def async_update(self) -> None: - """Send event notification of updated mailbox.""" - self.hass.bus.async_fire(EVENT) - - @property - def media_type(self) -> str: - """Return the supported media type.""" - raise NotImplementedError - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return False - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return False - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - raise NotImplementedError - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - raise NotImplementedError - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - raise NotImplementedError - - -class StreamError(Exception): - """Media streaming exception.""" - - -class MailboxView(HomeAssistantView): - """Base mailbox view.""" - - def __init__(self, mailboxes: list[Mailbox]) -> None: - """Initialize a basic mailbox view.""" - self.mailboxes = mailboxes - - def get_mailbox(self, platform: str) -> Mailbox: - """Retrieve the specified mailbox.""" - for mailbox in self.mailboxes: - if mailbox.name == platform: - return mailbox - raise HTTPNotFound - - -class MailboxPlatformsView(MailboxView): - """View to return the list of mailbox platforms.""" - - url = "/api/mailbox/platforms" - name = "api:mailbox:platforms" - - async def get(self, request: web.Request) -> web.Response: - """Retrieve list of platforms.""" - return self.json( - [ - { - "name": mailbox.name, - "has_media": mailbox.has_media, - "can_delete": mailbox.can_delete, - } - for mailbox in self.mailboxes - ] - ) - - -class MailboxMessageView(MailboxView): - """View to return the list of messages.""" - - url = "/api/mailbox/messages/{platform}" - name = "api:mailbox:messages" - - async def get(self, request: web.Request, platform: str) -> web.Response: - """Retrieve messages.""" - mailbox = self.get_mailbox(platform) - messages = await mailbox.async_get_messages() - return self.json(messages) - - -class MailboxDeleteView(MailboxView): - """View to delete selected messages.""" - - url = "/api/mailbox/delete/{platform}/{msgid}" - name = "api:mailbox:delete" - - async def delete(self, request: web.Request, platform: str, msgid: str) -> None: - """Delete items.""" - mailbox = self.get_mailbox(platform) - await mailbox.async_delete(msgid) - - -class MailboxMediaView(MailboxView): - """View to return a media file.""" - - url = r"/api/mailbox/media/{platform}/{msgid}" - name = "api:asteriskmbox:media" - - async def get( - self, request: web.Request, platform: str, msgid: str - ) -> web.Response: - """Retrieve media.""" - mailbox = self.get_mailbox(platform) - - with suppress(asyncio.CancelledError, TimeoutError): - async with asyncio.timeout(10): - try: - stream = await mailbox.async_get_media(msgid) - except StreamError as err: - _LOGGER.error("Error getting media: %s", err) - return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) - if stream: - return web.Response(body=stream, content_type=mailbox.media_type) - - return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) diff --git a/homeassistant/components/mailbox/manifest.json b/homeassistant/components/mailbox/manifest.json deleted file mode 100644 index 43dd133654c..00000000000 --- a/homeassistant/components/mailbox/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "mailbox", - "name": "Mailbox", - "codeowners": [], - "dependencies": ["http"], - "documentation": "https://www.home-assistant.io/integrations/mailbox", - "integration_type": "entity", - "quality_scale": "internal" -} diff --git a/homeassistant/components/mailbox/strings.json b/homeassistant/components/mailbox/strings.json deleted file mode 100644 index 01746e3e98d..00000000000 --- a/homeassistant/components/mailbox/strings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "title": "Mailbox", - "issues": { - "deprecated_mailbox": { - "title": "The mailbox platform is being removed", - "description": "The mailbox platform is being removed. Please report it to the author of the \"{integration_domain}\" custom integration." - } - } -} diff --git a/homeassistant/components/manual/alarm_control_panel.py b/homeassistant/components/manual/alarm_control_panel.py index c1910d0dfa1..244f38e0902 100644 --- a/homeassistant/components/manual/alarm_control_panel.py +++ b/homeassistant/components/manual/alarm_control_panel.py @@ -11,6 +11,7 @@ from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.const import ( @@ -21,15 +22,6 @@ from homeassistant.const import ( CONF_NAME, CONF_TRIGGER_TIME, CONF_UNIQUE_ID, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError @@ -47,6 +39,16 @@ CONF_ARMING_STATES = "arming_states" CONF_CODE_TEMPLATE = "code_template" CONF_CODE_ARM_REQUIRED = "code_arm_required" +CONF_ALARM_ARMED_AWAY = "armed_away" +CONF_ALARM_ARMED_CUSTOM_BYPASS = "armed_custom_bypass" +CONF_ALARM_ARMED_HOME = "armed_home" +CONF_ALARM_ARMED_NIGHT = "armed_night" +CONF_ALARM_ARMED_VACATION = "armed_vacation" +CONF_ALARM_ARMING = "arming" +CONF_ALARM_DISARMED = "disarmed" +CONF_ALARM_PENDING = "pending" +CONF_ALARM_TRIGGERED = "triggered" + DEFAULT_ALARM_NAME = "HA Alarm" DEFAULT_DELAY_TIME = datetime.timedelta(seconds=60) DEFAULT_ARMING_TIME = datetime.timedelta(seconds=60) @@ -54,39 +56,46 @@ DEFAULT_TRIGGER_TIME = datetime.timedelta(seconds=120) DEFAULT_DISARM_AFTER_TRIGGER = False SUPPORTED_STATES = [ - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED, ] SUPPORTED_PRETRIGGER_STATES = [ - state for state in SUPPORTED_STATES if state != STATE_ALARM_TRIGGERED + state for state in SUPPORTED_STATES if state != AlarmControlPanelState.TRIGGERED ] SUPPORTED_ARMING_STATES = [ state for state in SUPPORTED_STATES - if state not in (STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED) + if state + not in ( + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.TRIGGERED, + ) ] SUPPORTED_ARMING_STATE_TO_FEATURE = { - STATE_ALARM_ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, - STATE_ALARM_ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, - STATE_ALARM_ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, - STATE_ALARM_ARMED_VACATION: AlarmControlPanelEntityFeature.ARM_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, + AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, + AlarmControlPanelState.ARMED_VACATION: AlarmControlPanelEntityFeature.ARM_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, } ATTR_PREVIOUS_STATE = "previous_state" ATTR_NEXT_STATE = "next_state" -def _state_validator(config: dict[str, Any]) -> dict[str, Any]: +def _state_validator( + config: dict[AlarmControlPanelState | str, Any], +) -> dict[str, Any]: """Validate the state.""" + state: AlarmControlPanelState for state in SUPPORTED_PRETRIGGER_STATES: if CONF_DELAY_TIME not in config[state]: config[state] = config[state] | {CONF_DELAY_TIME: config[CONF_DELAY_TIME]} @@ -142,26 +151,26 @@ PLATFORM_SCHEMA = vol.Schema( vol.Optional( CONF_ARMING_STATES, default=SUPPORTED_ARMING_STATES ): vol.All(cv.ensure_list, [vol.In(SUPPORTED_ARMING_STATES)]), - vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( - STATE_ALARM_ARMED_AWAY + vol.Optional(CONF_ALARM_ARMED_AWAY, default={}): _state_schema( + AlarmControlPanelState.ARMED_AWAY ), - vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( - STATE_ALARM_ARMED_HOME + vol.Optional(CONF_ALARM_ARMED_HOME, default={}): _state_schema( + AlarmControlPanelState.ARMED_HOME ), - vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( - STATE_ALARM_ARMED_NIGHT + vol.Optional(CONF_ALARM_ARMED_NIGHT, default={}): _state_schema( + AlarmControlPanelState.ARMED_NIGHT ), - vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( - STATE_ALARM_ARMED_VACATION + vol.Optional(CONF_ALARM_ARMED_VACATION, default={}): _state_schema( + AlarmControlPanelState.ARMED_VACATION ), - vol.Optional( - STATE_ALARM_ARMED_CUSTOM_BYPASS, default={} - ): _state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS), - vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( - STATE_ALARM_DISARMED + vol.Optional(CONF_ALARM_ARMED_CUSTOM_BYPASS, default={}): _state_schema( + AlarmControlPanelState.ARMED_CUSTOM_BYPASS ), - vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( - STATE_ALARM_TRIGGERED + vol.Optional(CONF_ALARM_DISARMED, default={}): _state_schema( + AlarmControlPanelState.DISARMED + ), + vol.Optional(CONF_ALARM_TRIGGERED, default={}): _state_schema( + AlarmControlPanelState.TRIGGERED ), }, ), @@ -217,25 +226,25 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): config: dict[str, Any], ) -> None: """Init the manual alarm panel.""" - self._state = STATE_ALARM_DISARMED + self._state: AlarmControlPanelState = AlarmControlPanelState.DISARMED self._hass = hass self._attr_name = name self._attr_unique_id = unique_id self._code = code_template or code or None self._attr_code_arm_required = code_arm_required self._disarm_after_trigger = disarm_after_trigger - self._previous_state = self._state + self._previous_state: AlarmControlPanelState = self._state self._state_ts: datetime.datetime = dt_util.utcnow() - self._delay_time_by_state = { + self._delay_time_by_state: dict[AlarmControlPanelState, Any] = { state: config[state][CONF_DELAY_TIME] for state in SUPPORTED_PRETRIGGER_STATES } - self._trigger_time_by_state = { + self._trigger_time_by_state: dict[AlarmControlPanelState, Any] = { state: config[state][CONF_TRIGGER_TIME] for state in SUPPORTED_PRETRIGGER_STATES } - self._arming_time_by_state = { + self._arming_time_by_state: dict[AlarmControlPanelState, Any] = { state: config[state][CONF_ARMING_TIME] for state in SUPPORTED_ARMING_STATES } @@ -246,11 +255,11 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): ] @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" - if self._state == STATE_ALARM_TRIGGERED: + if self._state == AlarmControlPanelState.TRIGGERED: if self._within_pending_time(self._state): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING trigger_time: datetime.timedelta = self._trigger_time_by_state[ self._previous_state ] @@ -258,39 +267,42 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): self._state_ts + self._pending_time(self._state) + trigger_time ) < dt_util.utcnow(): if self._disarm_after_trigger: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED self._state = self._previous_state return self._state if self._state in SUPPORTED_ARMING_STATES and self._within_arming_time( self._state ): - return STATE_ALARM_ARMING + return AlarmControlPanelState.ARMING return self._state @property - def _active_state(self) -> str: + def _active_state(self) -> AlarmControlPanelState: """Get the current state.""" - if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): + if self.state in ( + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + ): return self._previous_state return self._state - def _arming_time(self, state: str) -> datetime.timedelta: + def _arming_time(self, state: AlarmControlPanelState) -> datetime.timedelta: """Get the arming time.""" arming_time: datetime.timedelta = self._arming_time_by_state[state] return arming_time - def _pending_time(self, state: str) -> datetime.timedelta: + def _pending_time(self, state: AlarmControlPanelState) -> datetime.timedelta: """Get the pending time.""" delay_time: datetime.timedelta = self._delay_time_by_state[self._previous_state] return delay_time - def _within_arming_time(self, state: str) -> bool: + def _within_arming_time(self, state: AlarmControlPanelState) -> bool: """Get if the action is in the arming time window.""" return self._state_ts + self._arming_time(state) > dt_util.utcnow() - def _within_pending_time(self, state: str) -> bool: + def _within_pending_time(self, state: AlarmControlPanelState) -> bool: """Get if the action is in the pending time window.""" return self._state_ts + self._pending_time(state) > dt_util.utcnow() @@ -305,35 +317,35 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._async_validate_code(code, STATE_ALARM_DISARMED) - self._state = STATE_ALARM_DISARMED + self._async_validate_code(code, AlarmControlPanelState.DISARMED) + self._state = AlarmControlPanelState.DISARMED self._state_ts = dt_util.utcnow() self.async_write_ha_state() async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_HOME) - self._async_update_state(STATE_ALARM_ARMED_HOME) + self._async_validate_code(code, AlarmControlPanelState.ARMED_HOME) + self._async_update_state(AlarmControlPanelState.ARMED_HOME) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_AWAY) - self._async_update_state(STATE_ALARM_ARMED_AWAY) + self._async_validate_code(code, AlarmControlPanelState.ARMED_AWAY) + self._async_update_state(AlarmControlPanelState.ARMED_AWAY) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_NIGHT) - self._async_update_state(STATE_ALARM_ARMED_NIGHT) + self._async_validate_code(code, AlarmControlPanelState.ARMED_NIGHT) + self._async_update_state(AlarmControlPanelState.ARMED_NIGHT) async def async_alarm_arm_vacation(self, code: str | None = None) -> None: """Send arm vacation command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_VACATION) - self._async_update_state(STATE_ALARM_ARMED_VACATION) + self._async_validate_code(code, AlarmControlPanelState.ARMED_VACATION) + self._async_update_state(AlarmControlPanelState.ARMED_VACATION) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Send arm custom bypass command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_CUSTOM_BYPASS) - self._async_update_state(STATE_ALARM_ARMED_CUSTOM_BYPASS) + self._async_validate_code(code, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) + self._async_update_state(AlarmControlPanelState.ARMED_CUSTOM_BYPASS) async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command. @@ -343,9 +355,9 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): """ if not self._trigger_time_by_state[self._active_state]: return - self._async_update_state(STATE_ALARM_TRIGGERED) + self._async_update_state(AlarmControlPanelState.TRIGGERED) - def _async_update_state(self, state: str) -> None: + def _async_update_state(self, state: AlarmControlPanelState) -> None: """Update the state.""" if self._state == state: return @@ -358,7 +370,7 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): def _async_set_state_update_events(self) -> None: state = self._state - if state == STATE_ALARM_TRIGGERED: + if state == AlarmControlPanelState.TRIGGERED: pending_time = self._pending_time(state) async_track_point_in_time( self._hass, self.async_scheduled_update, self._state_ts + pending_time @@ -382,7 +394,7 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): def _async_validate_code(self, code: str | None, state: str) -> None: """Validate given code.""" if ( - state != STATE_ALARM_DISARMED and not self.code_arm_required + state != AlarmControlPanelState.DISARMED and not self.code_arm_required ) or self._code is None: return @@ -405,10 +417,13 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): + if self.state in ( + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + ): prev_state: str | None = self._previous_state state: str | None = self._state - elif self.state == STATE_ALARM_TRIGGERED: + elif self.state == AlarmControlPanelState.TRIGGERED: prev_state = self._previous_state state = None else: @@ -429,9 +444,9 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): if next_state := state.attributes.get(ATTR_NEXT_STATE): # If in arming or pending state we record the transition, # not the current state - self._state = next_state + self._state = AlarmControlPanelState(next_state) else: - self._state = state.state + self._state = AlarmControlPanelState(state.state) if prev_state := state.attributes.get(ATTR_PREVIOUS_STATE): self._previous_state = prev_state diff --git a/homeassistant/components/manual_mqtt/alarm_control_panel.py b/homeassistant/components/manual_mqtt/alarm_control_panel.py index 8d447bbc8ac..768690e8ec5 100644 --- a/homeassistant/components/manual_mqtt/alarm_control_panel.py +++ b/homeassistant/components/manual_mqtt/alarm_control_panel.py @@ -12,6 +12,7 @@ from homeassistant.components import mqtt from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.const import ( @@ -22,14 +23,6 @@ from homeassistant.const import ( CONF_PENDING_TIME, CONF_PLATFORM, CONF_TRIGGER_TIME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -54,6 +47,15 @@ CONF_PAYLOAD_ARM_NIGHT = "payload_arm_night" CONF_PAYLOAD_ARM_VACATION = "payload_arm_vacation" CONF_PAYLOAD_ARM_CUSTOM_BYPASS = "payload_arm_custom_bypass" +CONF_ALARM_ARMED_AWAY = "armed_away" +CONF_ALARM_ARMED_CUSTOM_BYPASS = "armed_custom_bypass" +CONF_ALARM_ARMED_HOME = "armed_home" +CONF_ALARM_ARMED_NIGHT = "armed_night" +CONF_ALARM_ARMED_VACATION = "armed_vacation" +CONF_ALARM_DISARMED = "disarmed" +CONF_ALARM_PENDING = "pending" +CONF_ALARM_TRIGGERED = "triggered" + DEFAULT_ALARM_NAME = "HA Alarm" DEFAULT_DELAY_TIME = datetime.timedelta(seconds=0) DEFAULT_PENDING_TIME = datetime.timedelta(seconds=60) @@ -67,21 +69,21 @@ DEFAULT_ARM_CUSTOM_BYPASS = "ARM_CUSTOM_BYPASS" DEFAULT_DISARM = "DISARM" SUPPORTED_STATES = [ - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED, ] SUPPORTED_PRETRIGGER_STATES = [ - state for state in SUPPORTED_STATES if state != STATE_ALARM_TRIGGERED + state for state in SUPPORTED_STATES if state != AlarmControlPanelState.TRIGGERED ] SUPPORTED_PENDING_STATES = [ - state for state in SUPPORTED_STATES if state != STATE_ALARM_DISARMED + state for state in SUPPORTED_STATES if state != AlarmControlPanelState.DISARMED ] ATTR_PRE_PENDING_STATE = "pre_pending_state" @@ -143,26 +145,26 @@ PLATFORM_SCHEMA = vol.Schema( vol.Optional( CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER ): cv.boolean, - vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( - STATE_ALARM_ARMED_AWAY + vol.Optional(CONF_ALARM_ARMED_AWAY, default={}): _state_schema( + AlarmControlPanelState.ARMED_AWAY ), - vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( - STATE_ALARM_ARMED_HOME + vol.Optional(CONF_ALARM_ARMED_HOME, default={}): _state_schema( + AlarmControlPanelState.ARMED_HOME ), - vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( - STATE_ALARM_ARMED_NIGHT + vol.Optional(CONF_ALARM_ARMED_NIGHT, default={}): _state_schema( + AlarmControlPanelState.ARMED_NIGHT ), - vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( - STATE_ALARM_ARMED_VACATION + vol.Optional(CONF_ALARM_ARMED_VACATION, default={}): _state_schema( + AlarmControlPanelState.ARMED_VACATION ), - vol.Optional( - STATE_ALARM_ARMED_CUSTOM_BYPASS, default={} - ): _state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS), - vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( - STATE_ALARM_DISARMED + vol.Optional(CONF_ALARM_ARMED_CUSTOM_BYPASS, default={}): _state_schema( + AlarmControlPanelState.ARMED_CUSTOM_BYPASS ), - vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( - STATE_ALARM_TRIGGERED + vol.Optional(CONF_ALARM_DISARMED, default={}): _state_schema( + AlarmControlPanelState.DISARMED + ), + vol.Optional(CONF_ALARM_TRIGGERED, default={}): _state_schema( + AlarmControlPanelState.TRIGGERED ), vol.Required(mqtt.CONF_COMMAND_TOPIC): mqtt.valid_publish_topic, vol.Required(mqtt.CONF_STATE_TOPIC): mqtt.valid_subscribe_topic, @@ -268,7 +270,7 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): config, ): """Init the manual MQTT alarm panel.""" - self._state = STATE_ALARM_DISARMED + self._state = AlarmControlPanelState.DISARMED self._hass = hass self._attr_name = name if code_template: @@ -304,38 +306,38 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): self._payload_arm_custom_bypass = payload_arm_custom_bypass @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" - if self._state == STATE_ALARM_TRIGGERED: + if self._state == AlarmControlPanelState.TRIGGERED: if self._within_pending_time(self._state): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING trigger_time = self._trigger_time_by_state[self._previous_state] if ( self._state_ts + self._pending_time(self._state) + trigger_time ) < dt_util.utcnow(): if self._disarm_after_trigger: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED self._state = self._previous_state return self._state if self._state in SUPPORTED_PENDING_STATES and self._within_pending_time( self._state ): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING return self._state @property def _active_state(self): """Get the current state.""" - if self.state == STATE_ALARM_PENDING: + if self.state == AlarmControlPanelState.PENDING: return self._previous_state return self._state def _pending_time(self, state): """Get the pending time.""" pending_time = self._pending_time_by_state[state] - if state == STATE_ALARM_TRIGGERED: + if state == AlarmControlPanelState.TRIGGERED: pending_time += self._delay_time_by_state[self._previous_state] return pending_time @@ -354,35 +356,35 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._async_validate_code(code, STATE_ALARM_DISARMED) - self._state = STATE_ALARM_DISARMED + self._async_validate_code(code, AlarmControlPanelState.DISARMED) + self._state = AlarmControlPanelState.DISARMED self._state_ts = dt_util.utcnow() self.async_write_ha_state() async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_HOME) - self._async_update_state(STATE_ALARM_ARMED_HOME) + self._async_validate_code(code, AlarmControlPanelState.ARMED_HOME) + self._async_update_state(AlarmControlPanelState.ARMED_HOME) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_AWAY) - self._async_update_state(STATE_ALARM_ARMED_AWAY) + self._async_validate_code(code, AlarmControlPanelState.ARMED_AWAY) + self._async_update_state(AlarmControlPanelState.ARMED_AWAY) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_NIGHT) - self._async_update_state(STATE_ALARM_ARMED_NIGHT) + self._async_validate_code(code, AlarmControlPanelState.ARMED_NIGHT) + self._async_update_state(AlarmControlPanelState.ARMED_NIGHT) async def async_alarm_arm_vacation(self, code: str | None = None) -> None: """Send arm vacation command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_VACATION) - self._async_update_state(STATE_ALARM_ARMED_VACATION) + self._async_validate_code(code, AlarmControlPanelState.ARMED_VACATION) + self._async_update_state(AlarmControlPanelState.ARMED_VACATION) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Send arm custom bypass command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_CUSTOM_BYPASS) - self._async_update_state(STATE_ALARM_ARMED_CUSTOM_BYPASS) + self._async_validate_code(code, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) + self._async_update_state(AlarmControlPanelState.ARMED_CUSTOM_BYPASS) async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command. @@ -392,7 +394,7 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): """ if not self._trigger_time_by_state[self._active_state]: return - self._async_update_state(STATE_ALARM_TRIGGERED) + self._async_update_state(AlarmControlPanelState.TRIGGERED) def _async_update_state(self, state: str) -> None: """Update the state.""" @@ -405,7 +407,7 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): self.async_write_ha_state() pending_time = self._pending_time(state) - if state == STATE_ALARM_TRIGGERED: + if state == AlarmControlPanelState.TRIGGERED: async_track_point_in_time( self._hass, self.async_scheduled_update, self._state_ts + pending_time ) @@ -424,7 +426,7 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): def _async_validate_code(self, code, state): """Validate given code.""" if ( - state != STATE_ALARM_DISARMED and not self.code_arm_required + state != AlarmControlPanelState.DISARMED and not self.code_arm_required ) or self._code is None: return @@ -443,7 +445,7 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - if self.state != STATE_ALARM_PENDING: + if self.state != AlarmControlPanelState.PENDING: return {} return { ATTR_PRE_PENDING_STATE: self._previous_state, diff --git a/homeassistant/components/manual_mqtt/manifest.json b/homeassistant/components/manual_mqtt/manifest.json index d4adcaf3bc9..bf2fccb62ae 100644 --- a/homeassistant/components/manual_mqtt/manifest.json +++ b/homeassistant/components/manual_mqtt/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/manual_mqtt", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/map/__init__.py b/homeassistant/components/map/__init__.py deleted file mode 100644 index 25095e92b93..00000000000 --- a/homeassistant/components/map/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Support for showing device locations.""" - -from homeassistant.components import onboarding -from homeassistant.components.lovelace import _create_map_dashboard -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.storage import Store -from homeassistant.helpers.typing import ConfigType - -DOMAIN = "map" - -CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) - -STORAGE_KEY = DOMAIN -STORAGE_VERSION_MAJOR = 1 - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Create a map panel.""" - - if DOMAIN in config: - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "map", - }, - ) - - store: Store[dict[str, bool]] = Store( - hass, - STORAGE_VERSION_MAJOR, - STORAGE_KEY, - ) - data = await store.async_load() - if data: - return True - - if onboarding.async_is_onboarded(hass): - await _create_map_dashboard(hass) - - await store.async_save({"migrated": True}) - - return True diff --git a/homeassistant/components/map/manifest.json b/homeassistant/components/map/manifest.json deleted file mode 100644 index 6a0333c862a..00000000000 --- a/homeassistant/components/map/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "map", - "name": "Map", - "codeowners": [], - "dependencies": ["frontend", "lovelace"], - "documentation": "https://www.home-assistant.io/integrations/map", - "integration_type": "system", - "quality_scale": "internal" -} diff --git a/homeassistant/components/marytts/manifest.json b/homeassistant/components/marytts/manifest.json index bbf23327547..814d3c64925 100644 --- a/homeassistant/components/marytts/manifest.json +++ b/homeassistant/components/marytts/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/marytts", "iot_class": "local_push", "loggers": ["speak2mary"], + "quality_scale": "legacy", "requirements": ["speak2mary==1.4.0"] } diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 0d680170f3d..f7f974ffbb0 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -81,7 +81,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> ) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: """Migrate old config.""" if entry.version == 1 and entry.minor_version == 1: @@ -97,10 +97,9 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: LOGGER.error("Migration failed with error %s", ex) return False - entry.minor_version = 2 - hass.config_entries.async_update_entry( entry, + minor_version=2, unique_id=slugify(construct_mastodon_username(instance, account)), ) @@ -114,7 +113,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -def setup_mastodon(entry: ConfigEntry) -> tuple[Mastodon, dict, dict]: +def setup_mastodon(entry: MastodonConfigEntry) -> tuple[Mastodon, dict, dict]: """Get mastodon details.""" client = create_mastodon_client( entry.data[CONF_BASE_URL], diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 4e856275736..a36ba2e917f 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -6,8 +6,9 @@ from typing import Any from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError import voluptuous as vol +from yarl import URL -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_CLIENT_ID, @@ -19,7 +20,6 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) -from homeassistant.helpers.typing import ConfigType from homeassistant.util import slugify from .const import CONF_BASE_URL, DEFAULT_URL, DOMAIN, LOGGER @@ -29,7 +29,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required( CONF_BASE_URL, - default=DEFAULT_URL, ): TextSelector(TextSelectorConfig(type=TextSelectorType.URL)), vol.Required( CONF_CLIENT_ID, @@ -44,12 +43,16 @@ STEP_USER_DATA_SCHEMA = vol.Schema( ) +def base_url_from_url(url: str) -> str: + """Return the base url from a url.""" + return str(URL(url).origin()) + + class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 MINOR_VERSION = 2 - config_entry: ConfigEntry def check_connection( self, @@ -107,6 +110,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] | None = None if user_input: + user_input[CONF_BASE_URL] = base_url_from_url(user_input[CONF_BASE_URL]) + instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, user_input[CONF_BASE_URL], @@ -126,17 +131,17 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): return self.show_user_form(user_input, errors) - async def async_step_import(self, import_config: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" errors: dict[str, str] | None = None LOGGER.debug("Importing Mastodon from configuration.yaml") - base_url = str(import_config.get(CONF_BASE_URL, DEFAULT_URL)) - client_id = str(import_config.get(CONF_CLIENT_ID)) - client_secret = str(import_config.get(CONF_CLIENT_SECRET)) - access_token = str(import_config.get(CONF_ACCESS_TOKEN)) - name = import_config.get(CONF_NAME, None) + base_url = base_url_from_url(str(import_data.get(CONF_BASE_URL, DEFAULT_URL))) + client_id = str(import_data.get(CONF_CLIENT_ID)) + client_secret = str(import_data.get(CONF_CLIENT_SECRET)) + access_token = str(import_data.get(CONF_ACCESS_TOKEN)) + name = import_data.get(CONF_NAME) instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, diff --git a/homeassistant/components/mastodon/manifest.json b/homeassistant/components/mastodon/manifest.json index 40fd9d2f7b3..20c506e7766 100644 --- a/homeassistant/components/mastodon/manifest.json +++ b/homeassistant/components/mastodon/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mastodon", "integration_type": "service", - "iot_class": "cloud_push", + "iot_class": "cloud_polling", "loggers": ["mastodon"], "requirements": ["Mastodon.py==1.8.1"] } diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml new file mode 100644 index 00000000000..86702095e95 --- /dev/null +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -0,0 +1,99 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: + status: todo + comment: | + Mastodon.py does not have CI build/publish. + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration do not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + Legacy Notify needs rewriting once Notify architecture stabilizes. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + There are no configuration options. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: todo + comment: | + Does not set parallel-updates on notify platform. + reauthentication-flow: + status: todo + comment: | + Waiting to move to oAuth. + test-coverage: + status: todo + comment: | + Legacy Notify needs rewriting once Notify architecture stabilizes. + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + Web service does not support discovery. + discovery: + status: exempt + comment: | + Web service does not support discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single web service. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: todo + comment: | + Waiting to move to OAuth. + repair-issues: done + stale-devices: + status: exempt + comment: | + Web service does not go stale. + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: + status: todo + comment: | + Requirement 'Mastodon.py==1.8.1' appears untyped diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py index 12acfc04743..1bb59ad7c05 100644 --- a/homeassistant/components/mastodon/sensor.py +++ b/homeassistant/components/mastodon/sensor.py @@ -23,6 +23,9 @@ from .const import ( ) from .entity import MastodonEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class MastodonSensorEntityDescription(SensorEntityDescription): @@ -35,21 +38,18 @@ ENTITY_DESCRIPTIONS = ( MastodonSensorEntityDescription( key="followers", translation_key="followers", - native_unit_of_measurement="accounts", state_class=SensorStateClass.TOTAL, value_fn=lambda data: data.get(ACCOUNT_FOLLOWERS_COUNT), ), MastodonSensorEntityDescription( key="following", translation_key="following", - native_unit_of_measurement="accounts", state_class=SensorStateClass.TOTAL, value_fn=lambda data: data.get(ACCOUNT_FOLLOWING_COUNT), ), MastodonSensorEntityDescription( key="posts", translation_key="posts", - native_unit_of_measurement="posts", state_class=SensorStateClass.TOTAL, value_fn=lambda data: data.get(ACCOUNT_STATUSES_COUNT), ), diff --git a/homeassistant/components/mastodon/strings.json b/homeassistant/components/mastodon/strings.json index 906b67dd481..c6aefefca06 100644 --- a/homeassistant/components/mastodon/strings.json +++ b/homeassistant/components/mastodon/strings.json @@ -9,7 +9,10 @@ "access_token": "[%key:common::config_flow::data::access_token%]" }, "data_description": { - "base_url": "The URL of your Mastodon instance." + "base_url": "The URL of your Mastodon instance e.g. https://mastodon.social.", + "client_id": "The client key for the application created within your Mastodon account.", + "client_secret": "The client secret for the application created within your Mastodon account.", + "access_token": "The access token for the application created within your Mastodon account." } } }, @@ -39,13 +42,16 @@ "entity": { "sensor": { "followers": { - "name": "Followers" + "name": "Followers", + "unit_of_measurement": "accounts" }, "following": { - "name": "Following" + "name": "Following", + "unit_of_measurement": "[%key:component::mastodon::entity::sensor::followers::unit_of_measurement%]" }, "posts": { - "name": "Posts" + "name": "Posts", + "unit_of_measurement": "posts" } } } diff --git a/homeassistant/components/matrix/icons.json b/homeassistant/components/matrix/icons.json index 4fc56ebe0ff..a8b83e67303 100644 --- a/homeassistant/components/matrix/icons.json +++ b/homeassistant/components/matrix/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_message": "mdi:matrix" + "send_message": { + "service": "mdi:matrix" + } } } diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 3c465c44f24..e06eed1176f 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.25.0", "Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["matrix-nio==0.25.2", "Pillow==11.0.0"] } diff --git a/homeassistant/components/matter/__init__.py b/homeassistant/components/matter/__init__.py index ddd6db3e50e..e751387d7e8 100644 --- a/homeassistant/components/matter/__init__.py +++ b/homeassistant/components/matter/__init__.py @@ -9,6 +9,7 @@ from matter_server.client import MatterClient from matter_server.client.exceptions import ( CannotConnect, InvalidServerVersion, + NotConnected, ServerVersionTooNew, ServerVersionTooOld, ) @@ -132,6 +133,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: listen_task.cancel() raise ConfigEntryNotReady("Matter client not ready") from err + # Set default fabric + try: + await matter_client.set_default_fabric_label( + hass.config.location_name or "Home" + ) + except (NotConnected, MatterError) as err: + listen_task.cancel() + raise ConfigEntryNotReady("Failed to set default fabric label") from err + if DOMAIN not in hass.data: hass.data[DOMAIN] = {} diff --git a/homeassistant/components/matter/adapter.py b/homeassistant/components/matter/adapter.py index a3536435ded..0ccd3e065ff 100644 --- a/homeassistant/components/matter/adapter.py +++ b/homeassistant/components/matter/adapter.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import TYPE_CHECKING, cast +from chip.clusters import Objects as clusters from matter_server.client.models.device_types import BridgedDevice from matter_server.common.models import EventType, ServerInfoMessage @@ -44,6 +45,7 @@ class MatterAdapter: self.hass = hass self.config_entry = config_entry self.platform_handlers: dict[Platform, AddEntitiesCallback] = {} + self.discovered_entities: set[str] = set() def register_platform_handler( self, platform: Platform, add_entities: AddEntitiesCallback @@ -53,27 +55,19 @@ class MatterAdapter: async def setup_nodes(self) -> None: """Set up all existing nodes and subscribe to new nodes.""" - initialized_nodes: set[int] = set() for node in self.matter_client.get_nodes(): - if not node.available: - # ignore un-initialized nodes at startup - # catch them later when they become available. - continue - initialized_nodes.add(node.node_id) self._setup_node(node) def node_added_callback(event: EventType, node: MatterNode) -> None: """Handle node added event.""" - initialized_nodes.add(node.node_id) self._setup_node(node) def node_updated_callback(event: EventType, node: MatterNode) -> None: """Handle node updated event.""" - if node.node_id in initialized_nodes: - return if not node.available: return - initialized_nodes.add(node.node_id) + # We always run the discovery logic again, + # because the firmware version could have been changed or features added. self._setup_node(node) def endpoint_added_callback(event: EventType, data: dict[str, int]) -> None: @@ -142,10 +136,18 @@ class MatterAdapter: def _setup_node(self, node: MatterNode) -> None: """Set up an node.""" LOGGER.debug("Setting up entities for node %s", node.node_id) - - for endpoint in node.endpoints.values(): - # Node endpoints are translated into HA devices - self._setup_endpoint(endpoint) + try: + for endpoint in node.endpoints.values(): + # Node endpoints are translated into HA devices + self._setup_endpoint(endpoint) + except Exception as err: # noqa: BLE001 + # We don't want to crash the whole setup when a single node fails to setup + # for whatever reason, so we catch all exceptions here. + LOGGER.exception( + "Error setting up node %s: %s", + node.node_id, + err, + ) def _create_device_registry( self, @@ -194,11 +196,25 @@ class MatterAdapter: identifiers.add((DOMAIN, f"{ID_TYPE_SERIAL}_{basic_info_serial_number}")) serial_number = basic_info_serial_number - model = ( - get_clean_name(basic_info.productName) or device_type.__name__ + # Model name is the human readable name of the model/product name + model_name = ( + # productLabel is optional but preferred (e.g. Hue Bloom) + get_clean_name(basic_info.productLabel) + # alternative is the productName (e.g. LCT001) + or get_clean_name(basic_info.productName) + # if no product name, use the device type name + or device_type.__name__ if device_type else None ) + # Model ID is the non-human readable product ID + # we prefer the matter product ID so we can look it up in Matter DCL + if isinstance(basic_info, clusters.BridgedDeviceBasicInformation): + # On bridged devices, the productID is not available + model_id = None + else: + model_id = str(product_id) if (product_id := basic_info.productID) else None + dr.async_get(self.hass).async_get_or_create( name=name, config_entry_id=self.config_entry.entry_id, @@ -206,7 +222,8 @@ class MatterAdapter: hw_version=basic_info.hardwareVersionString, sw_version=basic_info.softwareVersionString, manufacturer=basic_info.vendorName or endpoint.node.device_info.vendorName, - model=model, + model=model_name, + model_id=model_id, serial_number=serial_number, via_device=(DOMAIN, bridge_device_id) if bridge_device_id else None, ) @@ -217,11 +234,20 @@ class MatterAdapter: self._create_device_registry(endpoint) # run platform discovery from device type instances for entity_info in async_discover_entities(endpoint): + discovery_key = ( + f"{entity_info.platform}_{endpoint.node.node_id}_{endpoint.endpoint_id}_" + f"{entity_info.primary_attribute.cluster_id}_" + f"{entity_info.primary_attribute.attribute_id}_" + f"{entity_info.entity_description.key}" + ) + if discovery_key in self.discovered_entities: + continue LOGGER.debug( "Creating %s entity for %s", entity_info.platform, entity_info.primary_attribute, ) + self.discovered_entities.add(discovery_key) new_entity = entity_info.entity_class( self.matter_client, endpoint, entity_info ) diff --git a/homeassistant/components/matter/binary_sensor.py b/homeassistant/components/matter/binary_sensor.py index a6d68682e9d..6882078a712 100644 --- a/homeassistant/components/matter/binary_sensor.py +++ b/homeassistant/components/matter/binary_sensor.py @@ -150,15 +150,116 @@ DISCOVERY_SCHEMAS = [ entity_description=MatterBinarySensorEntityDescription( key="LockDoorStateSensor", device_class=BinarySensorDeviceClass.DOOR, - # pylint: disable=unnecessary-lambda - measurement_to_ha=lambda x: { + measurement_to_ha={ clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen: True, clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed: True, clusters.DoorLock.Enums.DoorStateEnum.kDoorForcedOpen: True, clusters.DoorLock.Enums.DoorStateEnum.kDoorClosed: False, - }.get(x), + }.get, ), entity_class=MatterBinarySensor, required_attributes=(clusters.DoorLock.Attributes.DoorState,), + featuremap_contains=clusters.DoorLock.Bitmaps.Feature.kDoorPositionSensor, + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmDeviceMutedSensor", + measurement_to_ha=lambda x: ( + x == clusters.SmokeCoAlarm.Enums.MuteStateEnum.kMuted + ), + translation_key="muted", + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.DeviceMuted,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmEndfOfServiceSensor", + measurement_to_ha=lambda x: ( + x == clusters.SmokeCoAlarm.Enums.EndOfServiceEnum.kExpired + ), + translation_key="end_of_service", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.EndOfServiceAlert,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmBatteryAlertSensor", + measurement_to_ha=lambda x: ( + x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal + ), + translation_key="battery_alert", + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.BatteryAlert,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmTestInProgressSensor", + translation_key="test_in_progress", + device_class=BinarySensorDeviceClass.RUNNING, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.TestInProgress,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmHardwareFaultAlertSensor", + translation_key="hardware_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.HardwareFaultAlert,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmSmokeStateSensor", + device_class=BinarySensorDeviceClass.SMOKE, + measurement_to_ha=lambda x: ( + x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal + ), + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.SmokeState,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmInterconnectSmokeAlarmSensor", + device_class=BinarySensorDeviceClass.SMOKE, + measurement_to_ha=lambda x: ( + x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal + ), + translation_key="interconnected_smoke_alarm", + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.InterconnectSmokeAlarm,), + ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="SmokeCoAlarmInterconnectCOAlarmSensor", + device_class=BinarySensorDeviceClass.CO, + measurement_to_ha=lambda x: ( + x != clusters.SmokeCoAlarm.Enums.AlarmStateEnum.kNormal + ), + translation_key="interconnected_co_alarm", + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.InterconnectCOAlarm,), ), ] diff --git a/homeassistant/components/matter/button.py b/homeassistant/components/matter/button.py new file mode 100644 index 00000000000..153124a4f7e --- /dev/null +++ b/homeassistant/components/matter/button.py @@ -0,0 +1,150 @@ +"""Matter Button platform.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + +from chip.clusters import Objects as clusters + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .entity import MatterEntity, MatterEntityDescription +from .helpers import get_matter +from .models import MatterDiscoverySchema + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Matter Button platform.""" + matter = get_matter(hass) + matter.register_platform_handler(Platform.BUTTON, async_add_entities) + + +@dataclass(frozen=True) +class MatterButtonEntityDescription(ButtonEntityDescription, MatterEntityDescription): + """Describe Matter Button entities.""" + + command: Callable[[], Any] | None = None + + +class MatterCommandButton(MatterEntity, ButtonEntity): + """Representation of a Matter Button entity.""" + + entity_description: MatterButtonEntityDescription + + async def async_press(self) -> None: + """Handle the button press leveraging a Matter command.""" + if TYPE_CHECKING: + assert self.entity_description.command is not None + await self.matter_client.send_device_command( + node_id=self._endpoint.node.node_id, + endpoint_id=self._endpoint.endpoint_id, + command=self.entity_description.command(), + ) + + +# Discovery schema(s) to map Matter Attributes to HA entities +DISCOVERY_SCHEMAS = [ + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="IdentifyButton", + entity_category=EntityCategory.CONFIG, + device_class=ButtonDeviceClass.IDENTIFY, + command=lambda: clusters.Identify.Commands.Identify(identifyTime=15), + ), + entity_class=MatterCommandButton, + required_attributes=(clusters.Identify.Attributes.AcceptedCommandList,), + value_contains=clusters.Identify.Commands.Identify.command_id, + allow_multi=True, + ), + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="OperationalStatePauseButton", + translation_key="pause", + command=clusters.OperationalState.Commands.Pause, + ), + entity_class=MatterCommandButton, + required_attributes=(clusters.OperationalState.Attributes.AcceptedCommandList,), + value_contains=clusters.OperationalState.Commands.Pause.command_id, + allow_multi=True, + ), + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="OperationalStateResumeButton", + translation_key="resume", + command=clusters.OperationalState.Commands.Resume, + ), + entity_class=MatterCommandButton, + required_attributes=(clusters.OperationalState.Attributes.AcceptedCommandList,), + value_contains=clusters.OperationalState.Commands.Resume.command_id, + allow_multi=True, + ), + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="OperationalStateStartButton", + translation_key="start", + command=clusters.OperationalState.Commands.Start, + ), + entity_class=MatterCommandButton, + required_attributes=(clusters.OperationalState.Attributes.AcceptedCommandList,), + value_contains=clusters.OperationalState.Commands.Start.command_id, + allow_multi=True, + ), + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="OperationalStateStopButton", + translation_key="stop", + command=clusters.OperationalState.Commands.Stop, + ), + entity_class=MatterCommandButton, + required_attributes=(clusters.OperationalState.Attributes.AcceptedCommandList,), + value_contains=clusters.OperationalState.Commands.Stop.command_id, + allow_multi=True, + ), + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="HepaFilterMonitoringResetButton", + translation_key="reset_filter_condition", + command=clusters.HepaFilterMonitoring.Commands.ResetCondition, + ), + entity_class=MatterCommandButton, + required_attributes=( + clusters.HepaFilterMonitoring.Attributes.AcceptedCommandList, + ), + value_contains=clusters.HepaFilterMonitoring.Commands.ResetCondition.command_id, + allow_multi=True, + ), + MatterDiscoverySchema( + platform=Platform.BUTTON, + entity_description=MatterButtonEntityDescription( + key="ActivatedCarbonFilterMonitoringResetButton", + translation_key="reset_filter_condition", + command=clusters.ActivatedCarbonFilterMonitoring.Commands.ResetCondition, + ), + entity_class=MatterCommandButton, + required_attributes=( + clusters.ActivatedCarbonFilterMonitoring.Attributes.AcceptedCommandList, + ), + value_contains=clusters.ActivatedCarbonFilterMonitoring.Commands.ResetCondition.command_id, + allow_multi=True, + ), +] diff --git a/homeassistant/components/matter/climate.py b/homeassistant/components/matter/climate.py index ff00e4ee495..0378d0ea226 100644 --- a/homeassistant/components/matter/climate.py +++ b/homeassistant/components/matter/climate.py @@ -46,7 +46,36 @@ SINGLE_SETPOINT_DEVICES: set[tuple[int, int]] = { # We were told this is just some legacy inheritance from zigbee specs. # In the list below specify tuples of (vendorid, productid) of devices for # which we just need a single setpoint to control both heating and cooling. + (0x1209, 0x8000), + (0x1209, 0x8001), + (0x1209, 0x8002), + (0x1209, 0x8003), + (0x1209, 0x8004), + (0x1209, 0x8005), + (0x1209, 0x8006), (0x1209, 0x8007), + (0x1209, 0x8008), + (0x1209, 0x8009), + (0x1209, 0x800A), + (0x1209, 0x800B), + (0x1209, 0x800C), + (0x1209, 0x800D), + (0x1209, 0x800E), + (0x1209, 0x8010), + (0x1209, 0x8011), + (0x1209, 0x8012), + (0x1209, 0x8013), + (0x1209, 0x8014), + (0x1209, 0x8020), + (0x1209, 0x8021), + (0x1209, 0x8022), + (0x1209, 0x8023), + (0x1209, 0x8024), + (0x1209, 0x8025), + (0x1209, 0x8026), + (0x1209, 0x8027), + (0x1209, 0x8028), + (0x1209, 0x8029), } SUPPORT_DRY_MODE_DEVICES: set[tuple[int, int]] = { @@ -55,7 +84,36 @@ SUPPORT_DRY_MODE_DEVICES: set[tuple[int, int]] = { # support dry mode. (0x0001, 0x0108), (0x0001, 0x010A), + (0x1209, 0x8000), + (0x1209, 0x8001), + (0x1209, 0x8002), + (0x1209, 0x8003), + (0x1209, 0x8004), + (0x1209, 0x8005), + (0x1209, 0x8006), (0x1209, 0x8007), + (0x1209, 0x8008), + (0x1209, 0x8009), + (0x1209, 0x800A), + (0x1209, 0x800B), + (0x1209, 0x800C), + (0x1209, 0x800D), + (0x1209, 0x800E), + (0x1209, 0x8010), + (0x1209, 0x8011), + (0x1209, 0x8012), + (0x1209, 0x8013), + (0x1209, 0x8014), + (0x1209, 0x8020), + (0x1209, 0x8021), + (0x1209, 0x8022), + (0x1209, 0x8023), + (0x1209, 0x8024), + (0x1209, 0x8025), + (0x1209, 0x8026), + (0x1209, 0x8027), + (0x1209, 0x8028), + (0x1209, 0x8029), } SUPPORT_FAN_MODE_DEVICES: set[tuple[int, int]] = { @@ -64,7 +122,36 @@ SUPPORT_FAN_MODE_DEVICES: set[tuple[int, int]] = { # support fan-only mode. (0x0001, 0x0108), (0x0001, 0x010A), + (0x1209, 0x8000), + (0x1209, 0x8001), + (0x1209, 0x8002), + (0x1209, 0x8003), + (0x1209, 0x8004), + (0x1209, 0x8005), + (0x1209, 0x8006), (0x1209, 0x8007), + (0x1209, 0x8008), + (0x1209, 0x8009), + (0x1209, 0x800A), + (0x1209, 0x800B), + (0x1209, 0x800C), + (0x1209, 0x800D), + (0x1209, 0x800E), + (0x1209, 0x8010), + (0x1209, 0x8011), + (0x1209, 0x8012), + (0x1209, 0x8013), + (0x1209, 0x8014), + (0x1209, 0x8020), + (0x1209, 0x8021), + (0x1209, 0x8022), + (0x1209, 0x8023), + (0x1209, 0x8024), + (0x1209, 0x8025), + (0x1209, 0x8026), + (0x1209, 0x8027), + (0x1209, 0x8028), + (0x1209, 0x8029), } SystemModeEnum = clusters.Thermostat.Enums.SystemModeEnum @@ -100,7 +187,8 @@ class MatterClimate(MatterEntity, ClimateEntity): _attr_temperature_unit: str = UnitOfTemperature.CELSIUS _attr_hvac_mode: HVACMode = HVACMode.OFF _feature_map: int | None = None - _enable_turn_on_off_backwards_compatibility = False + + _platform_translation_key = "thermostat" async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" @@ -190,48 +278,56 @@ class MatterClimate(MatterEntity, ClimateEntity): # if the mains power is off - treat it as if the HVAC mode is off self._attr_hvac_mode = HVACMode.OFF self._attr_hvac_action = None - return - - # update hvac_mode from SystemMode - system_mode_value = int( - self.get_matter_attribute_value(clusters.Thermostat.Attributes.SystemMode) - ) - match system_mode_value: - case SystemModeEnum.kAuto: - self._attr_hvac_mode = HVACMode.HEAT_COOL - case SystemModeEnum.kDry: - self._attr_hvac_mode = HVACMode.DRY - case SystemModeEnum.kFanOnly: - self._attr_hvac_mode = HVACMode.FAN_ONLY - case SystemModeEnum.kCool | SystemModeEnum.kPrecooling: - self._attr_hvac_mode = HVACMode.COOL - case SystemModeEnum.kHeat | SystemModeEnum.kEmergencyHeat: - self._attr_hvac_mode = HVACMode.HEAT - case SystemModeEnum.kFanOnly: - self._attr_hvac_mode = HVACMode.FAN_ONLY - case SystemModeEnum.kDry: - self._attr_hvac_mode = HVACMode.DRY - case _: - self._attr_hvac_mode = HVACMode.OFF - # running state is an optional attribute - # which we map to hvac_action if it exists (its value is not None) - self._attr_hvac_action = None - if running_state_value := self.get_matter_attribute_value( - clusters.Thermostat.Attributes.ThermostatRunningState - ): - match running_state_value: - case ThermostatRunningState.Heat | ThermostatRunningState.HeatStage2: - self._attr_hvac_action = HVACAction.HEATING - case ThermostatRunningState.Cool | ThermostatRunningState.CoolStage2: - self._attr_hvac_action = HVACAction.COOLING - case ( - ThermostatRunningState.Fan - | ThermostatRunningState.FanStage2 - | ThermostatRunningState.FanStage3 - ): - self._attr_hvac_action = HVACAction.FAN + else: + # update hvac_mode from SystemMode + system_mode_value = int( + self.get_matter_attribute_value( + clusters.Thermostat.Attributes.SystemMode + ) + ) + match system_mode_value: + case SystemModeEnum.kAuto: + self._attr_hvac_mode = HVACMode.HEAT_COOL + case SystemModeEnum.kDry: + self._attr_hvac_mode = HVACMode.DRY + case SystemModeEnum.kFanOnly: + self._attr_hvac_mode = HVACMode.FAN_ONLY + case SystemModeEnum.kCool | SystemModeEnum.kPrecooling: + self._attr_hvac_mode = HVACMode.COOL + case SystemModeEnum.kHeat | SystemModeEnum.kEmergencyHeat: + self._attr_hvac_mode = HVACMode.HEAT + case SystemModeEnum.kFanOnly: + self._attr_hvac_mode = HVACMode.FAN_ONLY + case SystemModeEnum.kDry: + self._attr_hvac_mode = HVACMode.DRY case _: - self._attr_hvac_action = HVACAction.OFF + self._attr_hvac_mode = HVACMode.OFF + # running state is an optional attribute + # which we map to hvac_action if it exists (its value is not None) + self._attr_hvac_action = None + if running_state_value := self.get_matter_attribute_value( + clusters.Thermostat.Attributes.ThermostatRunningState + ): + match running_state_value: + case ( + ThermostatRunningState.Heat + | ThermostatRunningState.HeatStage2 + ): + self._attr_hvac_action = HVACAction.HEATING + case ( + ThermostatRunningState.Cool + | ThermostatRunningState.CoolStage2 + ): + self._attr_hvac_action = HVACAction.COOLING + case ( + ThermostatRunningState.Fan + | ThermostatRunningState.FanStage2 + | ThermostatRunningState.FanStage3 + ): + self._attr_hvac_action = HVACAction.FAN + case _: + self._attr_hvac_action = HVACAction.OFF + # update target temperature high/low supports_range = ( self._attr_supported_features @@ -332,7 +428,7 @@ DISCOVERY_SCHEMAS = [ platform=Platform.CLIMATE, entity_description=ClimateEntityDescription( key="MatterThermostat", - translation_key="thermostat", + name=None, ), entity_class=MatterClimate, required_attributes=(clusters.Thermostat.Attributes.LocalTemperature,), diff --git a/homeassistant/components/matter/config_flow.py b/homeassistant/components/matter/config_flow.py index ae71b7a1711..6f7505eb61f 100644 --- a/homeassistant/components/matter/config_flow.py +++ b/homeassistant/components/matter/config_flow.py @@ -14,8 +14,6 @@ from homeassistant.components.hassio import ( AddonInfo, AddonManager, AddonState, - HassioServiceInfo, - is_hassio, ) from homeassistant.components.onboarding import async_is_onboarded from homeassistant.components.zeroconf import ZeroconfServiceInfo @@ -25,6 +23,8 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.hassio import is_hassio +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .addon import get_addon_manager from .const import ( diff --git a/homeassistant/components/matter/const.py b/homeassistant/components/matter/const.py index a0e160a6c01..8018d5e09ed 100644 --- a/homeassistant/components/matter/const.py +++ b/homeassistant/components/matter/const.py @@ -13,3 +13,5 @@ LOGGER = logging.getLogger(__package__) # prefixes to identify device identifier id types ID_TYPE_DEVICE_ID = "deviceid" ID_TYPE_SERIAL = "serial" + +FEATUREMAP_ATTRIBUTE_ID = 65532 diff --git a/homeassistant/components/matter/cover.py b/homeassistant/components/matter/cover.py index c32b7bc9e1a..ba9c3afbdee 100644 --- a/homeassistant/components/matter/cover.py +++ b/homeassistant/components/matter/cover.py @@ -201,7 +201,8 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCover", translation_key="cover" + key="MatterCover", + name=None, ), entity_class=MatterCover, required_attributes=( @@ -216,7 +217,7 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCoverPositionAwareLift", translation_key="cover" + key="MatterCoverPositionAwareLift", name=None ), entity_class=MatterCover, required_attributes=( @@ -231,7 +232,7 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCoverPositionAwareTilt", translation_key="cover" + key="MatterCoverPositionAwareTilt", name=None ), entity_class=MatterCover, required_attributes=( @@ -246,7 +247,7 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCoverPositionAwareLiftAndTilt", translation_key="cover" + key="MatterCoverPositionAwareLiftAndTilt", name=None ), entity_class=MatterCover, required_attributes=( diff --git a/homeassistant/components/matter/discovery.py b/homeassistant/components/matter/discovery.py index 33c8bb47e6a..3b9fb0b8a94 100644 --- a/homeassistant/components/matter/discovery.py +++ b/homeassistant/components/matter/discovery.py @@ -11,7 +11,9 @@ from homeassistant.const import Platform from homeassistant.core import callback from .binary_sensor import DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS +from .button import DISCOVERY_SCHEMAS as BUTTON_SCHEMAS from .climate import DISCOVERY_SCHEMAS as CLIMATE_SENSOR_SCHEMAS +from .const import FEATUREMAP_ATTRIBUTE_ID from .cover import DISCOVERY_SCHEMAS as COVER_SCHEMAS from .event import DISCOVERY_SCHEMAS as EVENT_SCHEMAS from .fan import DISCOVERY_SCHEMAS as FAN_SCHEMAS @@ -23,9 +25,12 @@ from .select import DISCOVERY_SCHEMAS as SELECT_SCHEMAS from .sensor import DISCOVERY_SCHEMAS as SENSOR_SCHEMAS from .switch import DISCOVERY_SCHEMAS as SWITCH_SCHEMAS from .update import DISCOVERY_SCHEMAS as UPDATE_SCHEMAS +from .vacuum import DISCOVERY_SCHEMAS as VACUUM_SCHEMAS +from .valve import DISCOVERY_SCHEMAS as VALVE_SCHEMAS DISCOVERY_SCHEMAS: dict[Platform, list[MatterDiscoverySchema]] = { Platform.BINARY_SENSOR: BINARY_SENSOR_SCHEMAS, + Platform.BUTTON: BUTTON_SCHEMAS, Platform.CLIMATE: CLIMATE_SENSOR_SCHEMAS, Platform.COVER: COVER_SCHEMAS, Platform.EVENT: EVENT_SCHEMAS, @@ -37,6 +42,8 @@ DISCOVERY_SCHEMAS: dict[Platform, list[MatterDiscoverySchema]] = { Platform.SENSOR: SENSOR_SCHEMAS, Platform.SWITCH: SWITCH_SCHEMAS, Platform.UPDATE: UPDATE_SCHEMAS, + Platform.VACUUM: VACUUM_SCHEMAS, + Platform.VALVE: VALVE_SCHEMAS, } SUPPORTED_PLATFORMS = tuple(DISCOVERY_SCHEMAS) @@ -100,13 +107,42 @@ def async_discover_entities( ): continue - # check for values that may not be present + # check for endpoint-attributes that may not be present if schema.absent_attributes is not None and any( endpoint.has_attribute(None, val_schema) for val_schema in schema.absent_attributes ): continue + # check for clusters that may not be present + if schema.absent_clusters is not None and any( + endpoint.node.has_cluster(val_schema) + for val_schema in schema.absent_clusters + ): + continue + + # check for required value in (primary) attribute + primary_attribute = schema.required_attributes[0] + primary_value = endpoint.get_attribute_value(None, primary_attribute) + if schema.value_contains is not None and ( + isinstance(primary_value, list) + and schema.value_contains not in primary_value + ): + continue + + # check for required value in cluster featuremap + if schema.featuremap_contains is not None and ( + not bool( + int( + endpoint.get_attribute_value( + primary_attribute.cluster_id, FEATUREMAP_ATTRIBUTE_ID + ) + ) + & schema.featuremap_contains + ) + ): + continue + # all checks passed, this value belongs to an entity attributes_to_watch = list(schema.required_attributes) @@ -124,6 +160,7 @@ def async_discover_entities( attributes_to_watch=attributes_to_watch, entity_description=schema.entity_description, entity_class=schema.entity_class, + discovery_schema=schema, ) # prevent re-discovery of the primary attribute if not allowed diff --git a/homeassistant/components/matter/entity.py b/homeassistant/components/matter/entity.py index 61e29477585..50a0f2b1fee 100644 --- a/homeassistant/components/matter/entity.py +++ b/homeassistant/components/matter/entity.py @@ -2,10 +2,8 @@ from __future__ import annotations -from abc import abstractmethod from collections.abc import Callable from dataclasses import dataclass -from functools import cached_property import logging from typing import TYPE_CHECKING, Any, cast @@ -13,13 +11,15 @@ from chip.clusters import Objects as clusters from chip.clusters.Objects import ClusterAttributeDescriptor, NullValue from matter_server.common.helpers.util import create_attribute_path from matter_server.common.models import EventType, ServerInfoMessage +from propcache import cached_property from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription +import homeassistant.helpers.entity_registry as er from homeassistant.helpers.typing import UndefinedType -from .const import DOMAIN, ID_TYPE_DEVICE_ID +from .const import DOMAIN, FEATUREMAP_ATTRIBUTE_ID, ID_TYPE_DEVICE_ID from .helpers import get_device_id if TYPE_CHECKING: @@ -46,6 +46,7 @@ class MatterEntity(Entity): _attr_has_entity_name = True _attr_should_poll = False _name_postfix: str | None = None + _platform_translation_key: str | None = None def __init__( self, @@ -84,6 +85,8 @@ class MatterEntity(Entity): and ep.has_attribute(None, entity_info.primary_attribute) ): self._name_postfix = str(self._endpoint.endpoint_id) + if self._platform_translation_key and not self.translation_key: + self._attr_translation_key = self._platform_translation_key # prefer the label attribute for the entity name # Matter has a way for users and/or vendors to specify a name for an endpoint @@ -138,6 +141,19 @@ class MatterEntity(Entity): node_filter=self._endpoint.node.node_id, ) ) + # subscribe to FeatureMap attribute (as that can dynamically change) + self._unsubscribes.append( + self.matter_client.subscribe_events( + callback=self._on_featuremap_update, + event_filter=EventType.ATTRIBUTE_UPDATED, + node_filter=self._endpoint.node.node_id, + attr_path_filter=create_attribute_path( + endpoint=self._endpoint.endpoint_id, + cluster_id=self._entity_info.primary_attribute.cluster_id, + attribute_id=FEATUREMAP_ATTRIBUTE_ID, + ), + ) + ) @cached_property def name(self) -> str | UndefinedType | None: @@ -158,7 +174,29 @@ class MatterEntity(Entity): self.async_write_ha_state() @callback - @abstractmethod + def _on_featuremap_update( + self, event: EventType, data: tuple[int, str, int] | None + ) -> None: + """Handle FeatureMap attribute updates.""" + if data is None: + return + new_value = data[2] + # handle edge case where a Feature is removed from a cluster + if ( + self._entity_info.discovery_schema.featuremap_contains is not None + and not bool( + new_value & self._entity_info.discovery_schema.featuremap_contains + ) + ): + # this entity is no longer supported by the device + ent_reg = er.async_get(self.hass) + ent_reg.async_remove(self.entity_id) + + return + # all other cases, just update the entity + self._on_matter_event(event, data) + + @callback def _update_from_device(self) -> None: """Update data from Matter device.""" diff --git a/homeassistant/components/matter/fan.py b/homeassistant/components/matter/fan.py index 458a57538eb..593693dbbf9 100644 --- a/homeassistant/components/matter/fan.py +++ b/homeassistant/components/matter/fan.py @@ -58,8 +58,9 @@ class MatterFan(MatterEntity, FanEntity): _last_known_preset_mode: str | None = None _last_known_percentage: int = 0 - _enable_turn_on_off_backwards_compatibility = False + _feature_map: int | None = None + _platform_translation_key = "fan" async def async_turn_on( self, @@ -329,7 +330,8 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.FAN, entity_description=FanEntityDescription( - key="MatterFan", name=None, translation_key="fan" + key="MatterFan", + name=None, ), entity_class=MatterFan, # FanEntityFeature diff --git a/homeassistant/components/matter/icons.json b/homeassistant/components/matter/icons.json index 94da41931de..adcdcd05137 100644 --- a/homeassistant/components/matter/icons.json +++ b/homeassistant/components/matter/icons.json @@ -1,5 +1,24 @@ { "entity": { + "binary_sensor": { + "muted": { + "default": "mdi:bell-off" + } + }, + "button": { + "pause": { + "default": "mdi:pause" + }, + "resume": { + "default": "mdi:play-pause" + }, + "start": { + "default": "mdi:play" + }, + "stop": { + "default": "mdi:stop" + } + }, "fan": { "fan": { "state_attributes": { @@ -16,6 +35,29 @@ } } } + }, + "sensor": { + "contamination_state": { + "default": "mdi:air-filter" + }, + "air_quality": { + "default": "mdi:air-filter" + }, + "bat_replacement_description": { + "default": "mdi:battery-sync" + }, + "hepa_filter_condition": { + "default": "mdi:filter-check" + }, + "activated_carbon_filter_condition": { + "default": "mdi:filter-check" + }, + "operational_state": { + "default": "mdi:play-pause" + }, + "valve_position": { + "default": "mdi:valve" + } } } } diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 58ef8081fa9..c9d5c688f69 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -9,10 +9,12 @@ from matter_server.client.models import device_types from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityDescription, @@ -23,6 +25,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .const import LOGGER from .entity import MatterEntity @@ -41,7 +44,6 @@ COLOR_MODE_MAP = { clusters.ColorControl.Enums.ColorMode.kCurrentXAndCurrentY: ColorMode.XY, clusters.ColorControl.Enums.ColorMode.kColorTemperature: ColorMode.COLOR_TEMP, } -DEFAULT_TRANSITION = 0.2 # there's a bug in (at least) Espressif's implementation of light transitions # on devices based on Matter 1.0. Mark potential devices with this issue. @@ -66,6 +68,9 @@ TRANSITION_BLOCKLIST = ( (4999, 25057, "1.0", "27.0"), (5009, 514, "1.0", "1.0.0"), (5010, 769, "3.0", "1.0.0"), + (5130, 544, "v0.4", "6.7.196e9d4e08-14"), + (5127, 4232, "ver_0.1", "v1.00.51"), + (5245, 1412, "1.0", "1.0.21"), ) @@ -87,6 +92,9 @@ class MatterLight(MatterEntity, LightEntity): _supports_color = False _supports_color_temperature = False _transitions_disabled = False + _platform_translation_key = "light" + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN async def _set_xy_color( self, xy_color: tuple[float, float], transition: float = 0.0 @@ -128,12 +136,16 @@ class MatterLight(MatterEntity, LightEntity): ) ) - async def _set_color_temp(self, color_temp: int, transition: float = 0.0) -> None: + async def _set_color_temp( + self, color_temp_kelvin: int, transition: float = 0.0 + ) -> None: """Set color temperature.""" - + color_temp_mired = color_util.color_temperature_kelvin_to_mired( + color_temp_kelvin + ) await self.send_device_command( clusters.ColorControl.Commands.MoveToColorTemperature( - colorTemperatureMireds=color_temp, + colorTemperatureMireds=color_temp_mired, # transition in matter is measured in tenths of a second transitionTime=int(transition * 10), # allow setting the color while the light is off, @@ -283,9 +295,9 @@ class MatterLight(MatterEntity, LightEntity): hs_color = kwargs.get(ATTR_HS_COLOR) xy_color = kwargs.get(ATTR_XY_COLOR) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) brightness = kwargs.get(ATTR_BRIGHTNESS) - transition = kwargs.get(ATTR_TRANSITION, DEFAULT_TRANSITION) + transition = kwargs.get(ATTR_TRANSITION, 0) if self._transitions_disabled: transition = 0 @@ -295,10 +307,10 @@ class MatterLight(MatterEntity, LightEntity): elif xy_color is not None and ColorMode.XY in self.supported_color_modes: await self._set_xy_color(xy_color, transition) elif ( - color_temp is not None + color_temp_kelvin is not None and ColorMode.COLOR_TEMP in self.supported_color_modes ): - await self._set_color_temp(color_temp, transition) + await self._set_color_temp(color_temp_kelvin, transition) if brightness is not None and self._supports_brightness: await self._set_brightness(brightness, transition) @@ -365,12 +377,16 @@ class MatterLight(MatterEntity, LightEntity): clusters.ColorControl.Attributes.ColorTempPhysicalMinMireds ) if min_mireds > 0: - self._attr_min_mireds = min_mireds + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + ) max_mireds = self.get_matter_attribute_value( clusters.ColorControl.Attributes.ColorTempPhysicalMaxMireds ) - if min_mireds > 0: - self._attr_max_mireds = max_mireds + if max_mireds > 0: + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + ) supported_color_modes = filter_supported_color_modes(supported_color_modes) self._attr_supported_color_modes = supported_color_modes @@ -396,8 +412,13 @@ class MatterLight(MatterEntity, LightEntity): if self._supports_brightness: self._attr_brightness = self._get_brightness() - if self._supports_color_temperature: - self._attr_color_temp = self._get_color_temperature() + if ( + self._supports_color_temperature + and (color_temperature := self._get_color_temperature()) > 0 + ): + self._attr_color_temp_kelvin = color_util.color_temperature_mired_to_kelvin( + color_temperature + ) if self._supports_color: self._attr_color_mode = color_mode = self._get_color_mode() @@ -411,7 +432,7 @@ class MatterLight(MatterEntity, LightEntity): and color_mode == ColorMode.XY ): self._attr_xy_color = self._get_xy_color() - elif self._attr_color_temp is not None: + elif self._attr_color_temp_kelvin is not None: self._attr_color_mode = ColorMode.COLOR_TEMP elif self._attr_brightness is not None: self._attr_color_mode = ColorMode.BRIGHTNESS @@ -441,7 +462,8 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterLight", translation_key="light" + key="MatterLight", + name=None, ), entity_class=MatterLight, required_attributes=(clusters.OnOff.Attributes.OnOff,), @@ -468,7 +490,8 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterHSColorLightFallback", translation_key="light" + key="MatterHSColorLightFallback", + name=None, ), entity_class=MatterLight, required_attributes=( @@ -488,7 +511,8 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterXYColorLightFallback", translation_key="light" + key="MatterXYColorLightFallback", + name=None, ), entity_class=MatterLight, required_attributes=( @@ -508,7 +532,8 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterColorTemperatureLightFallback", translation_key="light" + key="MatterColorTemperatureLightFallback", + name=None, ), entity_class=MatterLight, required_attributes=( diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 8adaecd67ad..d69d0fd3dab 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -40,6 +40,7 @@ class MatterLock(MatterEntity, LockEntity): _feature_map: int | None = None _optimistic_timer: asyncio.TimerHandle | None = None + _platform_translation_key = "lock" @property def code_format(self) -> str | None: @@ -200,10 +201,10 @@ DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.LOCK, entity_description=LockEntityDescription( - key="MatterLock", translation_key="lock" + key="MatterLock", + name=None, ), entity_class=MatterLock, required_attributes=(clusters.DoorLock.Attributes.LockState,), - optional_attributes=(clusters.DoorLock.Attributes.DoorState,), ), ] diff --git a/homeassistant/components/matter/manifest.json b/homeassistant/components/matter/manifest.json index 5488df01e4e..4573fe17401 100644 --- a/homeassistant/components/matter/manifest.json +++ b/homeassistant/components/matter/manifest.json @@ -1,11 +1,12 @@ { "domain": "matter", "name": "Matter (BETA)", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/matter"], "config_flow": true, "dependencies": ["websocket_api"], "documentation": "https://www.home-assistant.io/integrations/matter", "iot_class": "local_push", - "requirements": ["python-matter-server==6.3.0"], + "requirements": ["python-matter-server==6.6.0"], "zeroconf": ["_matter._tcp.local.", "_matterc._udp.local."] } diff --git a/homeassistant/components/matter/models.py b/homeassistant/components/matter/models.py index bb79d3571cf..a00963c825a 100644 --- a/homeassistant/components/matter/models.py +++ b/homeassistant/components/matter/models.py @@ -3,10 +3,10 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TypedDict +from typing import Any, TypedDict from chip.clusters import Objects as clusters -from chip.clusters.Objects import ClusterAttributeDescriptor +from chip.clusters.Objects import Cluster, ClusterAttributeDescriptor from matter_server.client.models.device_types import DeviceType from matter_server.client.models.node import MatterEndpoint @@ -51,6 +51,9 @@ class MatterEntityInfo: # entity class to use to instantiate the entity entity_class: type + # the original discovery schema used to create this entity + discovery_schema: MatterDiscoverySchema + @property def primary_attribute(self) -> type[ClusterAttributeDescriptor]: """Return Primary Attribute belonging to the entity.""" @@ -95,15 +98,28 @@ class MatterDiscoverySchema: # [optional] the attribute's endpoint_id must match ANY of these values endpoint_id: tuple[int, ...] | None = None - # [optional] additional attributes that MAY NOT be present - # on the node for this scheme to pass + # [optional] attributes that MAY NOT be present + # (on the same endpoint) for this scheme to pass absent_attributes: tuple[type[ClusterAttributeDescriptor], ...] | None = None - # [optional] additional attributes that may be present + # [optional] cluster(s) that MAY NOT be present + # (on ANY endpoint) for this scheme to pass + absent_clusters: tuple[type[Cluster], ...] | None = None + + # [optional] additional attributes that may be present (on the same endpoint) # these attributes are copied over to attributes_to_watch and # are not discovered by other entities optional_attributes: tuple[type[ClusterAttributeDescriptor], ...] | None = None + # [optional] the primary attribute value must contain this value + # for example for the AcceptedCommandList + # NOTE: only works for list values + value_contains: Any | None = None + + # [optional] the primary attribute's cluster featuremap must contain this value + # for example for the DoorSensor on a DoorLock Cluster + featuremap_contains: int | None = None + # [optional] bool to specify if this primary value may be discovered # by multiple platforms allow_multi: bool = False diff --git a/homeassistant/components/matter/number.py b/homeassistant/components/matter/number.py index c9b40ef71a0..cc312cdc66a 100644 --- a/homeassistant/components/matter/number.py +++ b/homeassistant/components/matter/number.py @@ -5,15 +5,17 @@ from __future__ import annotations from dataclasses import dataclass from chip.clusters import Objects as clusters +from matter_server.common import custom_clusters from matter_server.common.helpers.util import create_attribute_path_from_attribute from homeassistant.components.number import ( + NumberDeviceClass, NumberEntity, NumberEntityDescription, NumberMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform, UnitOfTime +from homeassistant.const import EntityCategory, Platform, UnitOfLength, UnitOfTime from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -137,4 +139,20 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterNumber, required_attributes=(clusters.LevelControl.Attributes.OnOffTransitionTime,), ), + MatterDiscoverySchema( + platform=Platform.NUMBER, + entity_description=MatterNumberEntityDescription( + key="EveWeatherAltitude", + device_class=NumberDeviceClass.DISTANCE, + entity_category=EntityCategory.CONFIG, + translation_key="altitude", + native_max_value=9000, + native_min_value=0, + native_unit_of_measurement=UnitOfLength.METERS, + native_step=1, + mode=NumberMode.BOX, + ), + entity_class=MatterNumber, + required_attributes=(custom_clusters.EveCluster.Attributes.Altitude,), + ), ] diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index b46cad53123..1a2fc36c014 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -105,7 +105,7 @@ class MatterModeSelectEntity(MatterSelectEntity): ) modes = {mode.mode: mode.label for mode in cluster.supportedModes} self._attr_options = list(modes.values()) - self._attr_current_option = modes[cluster.currentMode] + self._attr_current_option = modes.get(cluster.currentMode) # handle optional Description attribute as descriptive name for the mode if desc := getattr(cluster, "description", None): self._attr_name = desc @@ -162,23 +162,11 @@ DISCOVERY_SCHEMAS = [ clusters.RefrigeratorAndTemperatureControlledCabinetMode.Attributes.SupportedModes, ), ), - MatterDiscoverySchema( - platform=Platform.SELECT, - entity_description=MatterSelectEntityDescription( - key="MatterRvcRunMode", - translation_key="mode", - ), - entity_class=MatterModeSelectEntity, - required_attributes=( - clusters.RvcRunMode.Attributes.CurrentMode, - clusters.RvcRunMode.Attributes.SupportedModes, - ), - ), MatterDiscoverySchema( platform=Platform.SELECT, entity_description=MatterSelectEntityDescription( key="MatterRvcCleanMode", - translation_key="mode", + translation_key="clean_mode", ), entity_class=MatterModeSelectEntity, required_attributes=( @@ -228,21 +216,42 @@ DISCOVERY_SCHEMAS = [ key="MatterStartUpOnOff", entity_category=EntityCategory.CONFIG, translation_key="startup_on_off", - options=["On", "Off", "Toggle", "Previous"], - measurement_to_ha=lambda x: { # pylint: disable=unnecessary-lambda - 0: "Off", - 1: "On", - 2: "Toggle", - None: "Previous", - }.get(x), - ha_to_native_value=lambda x: { - "Off": 0, - "On": 1, - "Toggle": 2, - "Previous": None, - }[x], + options=["on", "off", "toggle", "previous"], + measurement_to_ha={ + 0: "off", + 1: "on", + 2: "toggle", + None: "previous", + }.get, + ha_to_native_value={ + "off": 0, + "on": 1, + "toggle": 2, + "previous": None, + }.get, ), entity_class=MatterSelectEntity, required_attributes=(clusters.OnOff.Attributes.StartUpOnOff,), ), + MatterDiscoverySchema( + platform=Platform.SELECT, + entity_description=MatterSelectEntityDescription( + key="SmokeCOSmokeSensitivityLevel", + entity_category=EntityCategory.CONFIG, + translation_key="sensitivity_level", + options=["high", "standard", "low"], + measurement_to_ha={ + 0: "high", + 1: "standard", + 2: "low", + }.get, + ha_to_native_value={ + "high": 0, + "standard": 1, + "low": 2, + }.get, + ), + entity_class=MatterSelectEntity, + required_attributes=(clusters.SmokeCoAlarm.Attributes.SmokeSensitivityLevel,), + ), ] diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index c3ab18072f0..d71cd52a0c6 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -3,6 +3,8 @@ from __future__ import annotations from dataclasses import dataclass +from datetime import datetime +from typing import TYPE_CHECKING, cast from chip.clusters import Objects as clusters from chip.clusters.Types import Nullable, NullValue @@ -36,6 +38,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify from .entity import MatterEntity, MatterEntityDescription from .helpers import get_matter @@ -48,8 +51,24 @@ AIR_QUALITY_MAP = { clusters.AirQuality.Enums.AirQualityEnum.kFair: "fair", clusters.AirQuality.Enums.AirQualityEnum.kGood: "good", clusters.AirQuality.Enums.AirQualityEnum.kModerate: "moderate", - clusters.AirQuality.Enums.AirQualityEnum.kUnknown: "unknown", - clusters.AirQuality.Enums.AirQualityEnum.kUnknownEnumValue: "unknown", + clusters.AirQuality.Enums.AirQualityEnum.kUnknown: None, + clusters.AirQuality.Enums.AirQualityEnum.kUnknownEnumValue: None, +} + +CONTAMINATION_STATE_MAP = { + clusters.SmokeCoAlarm.Enums.ContaminationStateEnum.kNormal: "normal", + clusters.SmokeCoAlarm.Enums.ContaminationStateEnum.kLow: "low", + clusters.SmokeCoAlarm.Enums.ContaminationStateEnum.kWarning: "warning", + clusters.SmokeCoAlarm.Enums.ContaminationStateEnum.kCritical: "critical", +} + + +OPERATIONAL_STATE_MAP = { + # enum with known Operation state values which we can translate + clusters.OperationalState.Enums.OperationalStateEnum.kStopped: "stopped", + clusters.OperationalState.Enums.OperationalStateEnum.kRunning: "running", + clusters.OperationalState.Enums.OperationalStateEnum.kPaused: "paused", + clusters.OperationalState.Enums.OperationalStateEnum.kError: "error", } @@ -85,6 +104,42 @@ class MatterSensor(MatterEntity, SensorEntity): self._attr_native_value = value +class MatterOperationalStateSensor(MatterSensor): + """Representation of a sensor for Matter Operational State.""" + + states_map: dict[int, str] + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + # the operational state list is a list of the possible operational states + # this is a dynamic list and is condition, device and manufacturer specific + # therefore it is not possible to provide a fixed list of options + # or to provide a mapping to a translateable string for all options + operational_state_list = self.get_matter_attribute_value( + clusters.OperationalState.Attributes.OperationalStateList + ) + if TYPE_CHECKING: + operational_state_list = cast( + list[clusters.OperationalState.Structs.OperationalStateStruct], + operational_state_list, + ) + states_map: dict[int, str] = {} + for state in operational_state_list: + # prefer translateable (known) state from mapping, + # fallback to the raw state label as given by the device/manufacturer + states_map[state.operationalStateID] = OPERATIONAL_STATE_MAP.get( + state.operationalStateID, slugify(state.operationalStateLabel) + ) + self.states_map = states_map + self._attr_options = list(states_map.values()) + self._attr_native_value = states_map.get( + self.get_matter_attribute_value( + clusters.OperationalState.Attributes.OperationalState + ) + ) + + # Discovery schema(s) to map Matter Attributes to HA entities DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( @@ -163,6 +218,33 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterSensor, required_attributes=(clusters.PowerSource.Attributes.BatPercentRemaining,), ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="PowerSourceBatVoltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + measurement_to_ha=lambda x: x / 1000, + state_class=SensorStateClass.MEASUREMENT, + ), + entity_class=MatterSensor, + required_attributes=(clusters.PowerSource.Attributes.BatVoltage,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="PowerSourceBatReplacementDescription", + translation_key="battery_replacement_description", + native_unit_of_measurement=None, + device_class=None, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.PowerSource.Attributes.BatReplacementDescription, + ), + ), MatterDiscoverySchema( platform=Platform.SENSOR, entity_description=MatterSensorEntityDescription( @@ -175,6 +257,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.Watt,), + absent_clusters=(clusters.ElectricalPowerMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -188,6 +271,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.Voltage,), + absent_clusters=(clusters.ElectricalPowerMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -201,6 +285,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.WattAccumulated,), + absent_clusters=(clusters.ElectricalEnergyMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -214,6 +299,29 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.Current,), + absent_clusters=(clusters.ElectricalPowerMeasurement,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="EveThermoValvePosition", + translation_key="valve_position", + native_unit_of_measurement=PERCENTAGE, + ), + entity_class=MatterSensor, + required_attributes=(EveCluster.Attributes.ValvePosition,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="EveWeatherPressure", + device_class=SensorDeviceClass.PRESSURE, + native_unit_of_measurement=UnitOfPressure.HPA, + suggested_display_precision=1, + state_class=SensorStateClass.MEASUREMENT, + ), + entity_class=MatterSensor, + required_attributes=(EveCluster.Attributes.Pressure,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -288,9 +396,8 @@ DISCOVERY_SCHEMAS = [ device_class=SensorDeviceClass.ENUM, state_class=None, # convert to set first to remove the duplicate unknown value - options=list(set(AIR_QUALITY_MAP.values())), + options=[x for x in AIR_QUALITY_MAP.values() if x is not None], measurement_to_ha=lambda x: AIR_QUALITY_MAP[x], - icon="mdi:air-filter", ), entity_class=MatterSensor, required_attributes=(clusters.AirQuality.Attributes.AirQuality,), @@ -342,7 +449,6 @@ DISCOVERY_SCHEMAS = [ device_class=None, state_class=SensorStateClass.MEASUREMENT, translation_key="hepa_filter_condition", - icon="mdi:filter-check", ), entity_class=MatterSensor, required_attributes=(clusters.HepaFilterMonitoring.Attributes.Condition,), @@ -355,7 +461,6 @@ DISCOVERY_SCHEMAS = [ device_class=None, state_class=SensorStateClass.MEASUREMENT, translation_key="activated_carbon_filter_condition", - icon="mdi:filter-check", ), entity_class=MatterSensor, required_attributes=( @@ -377,6 +482,7 @@ DISCOVERY_SCHEMAS = [ required_attributes=( ThirdRealityMeteringCluster.Attributes.InstantaneousDemand, ), + absent_clusters=(clusters.ElectricalPowerMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -384,7 +490,7 @@ DISCOVERY_SCHEMAS = [ key="ThirdRealityEnergySensorWattAccumulated", device_class=SensorDeviceClass.ENERGY, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_display_precision=3, state_class=SensorStateClass.TOTAL_INCREASING, measurement_to_ha=lambda x: x / 1000, @@ -393,6 +499,7 @@ DISCOVERY_SCHEMAS = [ required_attributes=( ThirdRealityMeteringCluster.Attributes.CurrentSummationDelivered, ), + absent_clusters=(clusters.ElectricalEnergyMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -407,6 +514,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.Watt,), + absent_clusters=(clusters.ElectricalPowerMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -420,6 +528,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.WattAccumulated,), + absent_clusters=(clusters.ElectricalEnergyMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -434,6 +543,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.Voltage,), + absent_clusters=(clusters.ElectricalPowerMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -447,6 +557,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.Current,), + absent_clusters=(clusters.ElectricalPowerMeasurement,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -457,9 +568,111 @@ DISCOVERY_SCHEMAS = [ state_class=SensorStateClass.MEASUREMENT, translation_key="switch_current_position", entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, ), entity_class=MatterSensor, required_attributes=(clusters.Switch.Attributes.CurrentPosition,), allow_multi=True, # also used for event entity ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalPowerMeasurementWatt", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_display_precision=2, + state_class=SensorStateClass.MEASUREMENT, + measurement_to_ha=lambda x: x / 1000, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.ElectricalPowerMeasurement.Attributes.ActivePower, + ), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalPowerMeasurementVoltage", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=0, + state_class=SensorStateClass.MEASUREMENT, + measurement_to_ha=lambda x: x / 1000, + ), + entity_class=MatterSensor, + required_attributes=(clusters.ElectricalPowerMeasurement.Attributes.Voltage,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalPowerMeasurementActiveCurrent", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=2, + state_class=SensorStateClass.MEASUREMENT, + measurement_to_ha=lambda x: x / 1000, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.ElectricalPowerMeasurement.Attributes.ActiveCurrent, + ), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalEnergyMeasurementCumulativeEnergyImported", + device_class=SensorDeviceClass.ENERGY, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfEnergy.MILLIWATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=3, + state_class=SensorStateClass.TOTAL_INCREASING, + # id 0 of the EnergyMeasurementStruct is the cumulative energy (in mWh) + measurement_to_ha=lambda x: x.energy, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.ElectricalEnergyMeasurement.Attributes.CumulativeEnergyImported, + ), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="SmokeCOAlarmContaminationState", + translation_key="contamination_state", + device_class=SensorDeviceClass.ENUM, + options=list(CONTAMINATION_STATE_MAP.values()), + measurement_to_ha=CONTAMINATION_STATE_MAP.get, + ), + entity_class=MatterSensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.ContaminationState,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="SmokeCOAlarmExpiryDate", + translation_key="expiry_date", + device_class=SensorDeviceClass.TIMESTAMP, + # raw value is epoch seconds + measurement_to_ha=datetime.fromtimestamp, + ), + entity_class=MatterSensor, + required_attributes=(clusters.SmokeCoAlarm.Attributes.ExpiryDate,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="OperationalState", + device_class=SensorDeviceClass.ENUM, + translation_key="operational_state", + ), + entity_class=MatterOperationalStateSensor, + required_attributes=( + clusters.OperationalState.Attributes.OperationalState, + clusters.OperationalState.Attributes.OperationalStateList, + ), + ), ] diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index e69c7ae3090..ca15538997e 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -36,6 +36,7 @@ "addon_start_failed": "Failed to start the Matter Server add-on.", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "not_matter_addon": "Discovered add-on is not the official Matter Server add-on.", "reconfiguration_successful": "Successfully reconfigured the Matter integration." }, @@ -46,6 +47,24 @@ }, "entity": { "binary_sensor": { + "battery_alert": { + "name": "Battery alert" + }, + "end_of_service": { + "name": "End of service" + }, + "hardware_fault": { + "name": "Hardware fault" + }, + "interconnected_smoke_alarm": { + "name": "Interconnected smoke alarm" + }, + "interconnected_co_alarm": { + "name": "Interconnected CO alarm" + }, + "test_in_progress": { + "name": "Test in progress" + }, "water_leak": { "name": "Water leak" }, @@ -54,6 +73,26 @@ }, "rain": { "name": "Rain" + }, + "muted": { + "name": "Muted" + } + }, + "button": { + "pause": { + "name": "[%key:common::action::pause%]" + }, + "resume": { + "name": "Resume" + }, + "start": { + "name": "[%key:common::action::start%]" + }, + "stop": { + "name": "[%key:common::action::stop%]" + }, + "reset_filter_condition": { + "name": "Reset filter condition" } }, "climate": { @@ -119,6 +158,9 @@ }, "on_off_transition_time": { "name": "On/Off transition time" + }, + "altitude": { + "name": "Altitude above Sea Level" } }, "light": { @@ -132,17 +174,43 @@ } }, "select": { + "clean_mode": { + "name": "Clean mode" + }, "mode": { "name": "Mode" }, + "sensitivity_level": { + "name": "Sensitivity", + "state": { + "low": "[%key:component::matter::entity::fan::fan::state_attributes::preset_mode::state::low%]", + "standard": "Standard", + "high": "[%key:component::matter::entity::fan::fan::state_attributes::preset_mode::state::high%]" + } + }, "startup_on_off": { - "name": "Power-on behavior on Startup" + "name": "Power-on behavior on startup", + "state": { + "on": "[%key:common::state::on%]", + "off": "[%key:common::state::off%]", + "toggle": "[%key:common::action::toggle%]", + "previous": "Previous" + } } }, "sensor": { "activated_carbon_filter_condition": { "name": "Activated carbon filter condition" }, + "contamination_state": { + "name": "Contamination state", + "state": { + "normal": "Normal", + "low": "[%key:component::matter::entity::fan::fan::state_attributes::preset_mode::state::low%]", + "warning": "Warning", + "critical": "Critical" + } + }, "air_quality": { "name": "Air quality", "state": { @@ -151,18 +219,35 @@ "poor": "Poor", "fair": "Fair", "good": "Good", - "moderate": "Moderate", - "unknown": "Unknown" + "moderate": "Moderate" } }, + "expiry_date": { + "name": "Expiry date" + }, "flow": { "name": "Flow" }, "hepa_filter_condition": { "name": "Hepa filter condition" }, + "operational_state": { + "name": "Operational state", + "state": { + "stopped": "Stopped", + "running": "Running", + "paused": "[%key:common::state::paused%]", + "error": "Error" + } + }, "switch_current_position": { "name": "Current switch position" + }, + "valve_position": { + "name": "Valve position" + }, + "battery_replacement_description": { + "name": "Battery type" } }, "switch": { @@ -172,6 +257,16 @@ "power": { "name": "Power" } + }, + "vacuum": { + "vacuum": { + "name": "[%key:component::vacuum::title%]" + } + }, + "valve": { + "valve": { + "name": "[%key:component::valve::title%]" + } } }, "issues": { diff --git a/homeassistant/components/matter/switch.py b/homeassistant/components/matter/switch.py index 953897fdaa6..75269de953c 100644 --- a/homeassistant/components/matter/switch.py +++ b/homeassistant/components/matter/switch.py @@ -35,6 +35,8 @@ async def async_setup_entry( class MatterSwitch(MatterEntity, SwitchEntity): """Representation of a Matter switch.""" + _platform_translation_key = "switch" + async def async_turn_on(self, **kwargs: Any) -> None: """Turn switch on.""" await self.matter_client.send_device_command( @@ -66,7 +68,7 @@ DISCOVERY_SCHEMAS = [ entity_description=SwitchEntityDescription( key="MatterPlug", device_class=SwitchDeviceClass.OUTLET, - translation_key="switch", + name=None, ), entity_class=MatterSwitch, required_attributes=(clusters.OnOff.Attributes.OnOff,), @@ -106,7 +108,7 @@ DISCOVERY_SCHEMAS = [ entity_description=SwitchEntityDescription( key="MatterSwitch", device_class=SwitchDeviceClass.OUTLET, - translation_key="switch", + name=None, ), entity_class=MatterSwitch, required_attributes=(clusters.OnOff.Attributes.OnOff,), diff --git a/homeassistant/components/matter/update.py b/homeassistant/components/matter/update.py index 736664e0101..f31dd7b3aa3 100644 --- a/homeassistant/components/matter/update.py +++ b/homeassistant/components/matter/update.py @@ -100,21 +100,23 @@ class MatterUpdate(MatterEntity, UpdateEntity): == clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kIdle ): self._attr_in_progress = False + self._attr_update_percentage = None return update_progress: int = self.get_matter_attribute_value( clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateStateProgress ) + self._attr_in_progress = True if ( update_state == clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kDownloading and update_progress is not None and update_progress > 0 ): - self._attr_in_progress = update_progress + self._attr_update_percentage = update_progress else: - self._attr_in_progress = True + self._attr_update_percentage = None async def async_update(self) -> None: """Call when the entity needs to be updated.""" diff --git a/homeassistant/components/matter/vacuum.py b/homeassistant/components/matter/vacuum.py new file mode 100644 index 00000000000..e98e1ad0bbd --- /dev/null +++ b/homeassistant/components/matter/vacuum.py @@ -0,0 +1,223 @@ +"""Matter vacuum platform.""" + +from __future__ import annotations + +from enum import IntEnum +from typing import TYPE_CHECKING, Any + +from chip.clusters import Objects as clusters +from matter_server.client.models import device_types + +from homeassistant.components.vacuum import ( + StateVacuumEntity, + StateVacuumEntityDescription, + VacuumActivity, + VacuumEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .entity import MatterEntity +from .helpers import get_matter +from .models import MatterDiscoverySchema + + +class OperationalState(IntEnum): + """Operational State of the vacuum cleaner. + + Combination of generic OperationalState and RvcOperationalState. + """ + + NO_ERROR = 0x00 + UNABLE_TO_START_OR_RESUME = 0x01 + UNABLE_TO_COMPLETE_OPERATION = 0x02 + COMMAND_INVALID_IN_STATE = 0x03 + SEEKING_CHARGER = 0x40 + CHARGING = 0x41 + DOCKED = 0x42 + + +class ModeTag(IntEnum): + """Enum with available ModeTag values.""" + + IDLE = 0x4000 # 16384 decimal + CLEANING = 0x4001 # 16385 decimal + MAPPING = 0x4002 # 16386 decimal + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Matter vacuum platform from Config Entry.""" + matter = get_matter(hass) + matter.register_platform_handler(Platform.VACUUM, async_add_entities) + + +class MatterVacuum(MatterEntity, StateVacuumEntity): + """Representation of a Matter Vacuum cleaner entity.""" + + _last_accepted_commands: list[int] | None = None + _supported_run_modes: ( + dict[int, clusters.RvcCleanMode.Structs.ModeOptionStruct] | None + ) = None + entity_description: StateVacuumEntityDescription + _platform_translation_key = "vacuum" + + async def async_stop(self, **kwargs: Any) -> None: + """Stop the vacuum cleaner.""" + await self._send_device_command(clusters.OperationalState.Commands.Stop()) + + async def async_return_to_base(self, **kwargs: Any) -> None: + """Set the vacuum cleaner to return to the dock.""" + await self._send_device_command(clusters.RvcOperationalState.Commands.GoHome()) + + async def async_locate(self, **kwargs: Any) -> None: + """Locate the vacuum cleaner.""" + await self._send_device_command(clusters.Identify.Commands.Identify()) + + async def async_start(self) -> None: + """Start or resume the cleaning task.""" + if TYPE_CHECKING: + assert self._last_accepted_commands is not None + if ( + clusters.RvcOperationalState.Commands.Resume.command_id + in self._last_accepted_commands + ): + await self._send_device_command( + clusters.RvcOperationalState.Commands.Resume() + ) + else: + await self._send_device_command(clusters.OperationalState.Commands.Start()) + + async def async_pause(self) -> None: + """Pause the cleaning task.""" + await self._send_device_command(clusters.OperationalState.Commands.Pause()) + + async def _send_device_command( + self, + command: clusters.ClusterCommand, + ) -> None: + """Send a command to the device.""" + await self.matter_client.send_device_command( + node_id=self._endpoint.node.node_id, + endpoint_id=self._endpoint.endpoint_id, + command=command, + ) + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + self._calculate_features() + # optional battery level + if VacuumEntityFeature.BATTERY & self._attr_supported_features: + self._attr_battery_level = self.get_matter_attribute_value( + clusters.PowerSource.Attributes.BatPercentRemaining + ) + # derive state from the run mode + operational state + run_mode_raw: int = self.get_matter_attribute_value( + clusters.RvcRunMode.Attributes.CurrentMode + ) + operational_state: int = self.get_matter_attribute_value( + clusters.RvcOperationalState.Attributes.OperationalState + ) + state: VacuumActivity | None = None + if TYPE_CHECKING: + assert self._supported_run_modes is not None + if operational_state in (OperationalState.CHARGING, OperationalState.DOCKED): + state = VacuumActivity.DOCKED + elif operational_state == OperationalState.SEEKING_CHARGER: + state = VacuumActivity.RETURNING + elif operational_state in ( + OperationalState.UNABLE_TO_COMPLETE_OPERATION, + OperationalState.UNABLE_TO_START_OR_RESUME, + ): + state = VacuumActivity.ERROR + elif (run_mode := self._supported_run_modes.get(run_mode_raw)) is not None: + tags = {x.value for x in run_mode.modeTags} + if ModeTag.CLEANING in tags: + state = VacuumActivity.CLEANING + elif ModeTag.IDLE in tags: + state = VacuumActivity.IDLE + self._attr_activity = state + + @callback + def _calculate_features(self) -> None: + """Calculate features for HA Vacuum platform.""" + accepted_operational_commands: list[int] = self.get_matter_attribute_value( + clusters.RvcOperationalState.Attributes.AcceptedCommandList + ) + # in principle the feature set should not change, except for the accepted commands + if self._last_accepted_commands == accepted_operational_commands: + return + self._last_accepted_commands = accepted_operational_commands + supported_features: VacuumEntityFeature = VacuumEntityFeature(0) + supported_features |= VacuumEntityFeature.STATE + # optional battery attribute = battery feature + if self.get_matter_attribute_value( + clusters.PowerSource.Attributes.BatPercentRemaining + ): + supported_features |= VacuumEntityFeature.BATTERY + # optional identify cluster = locate feature (value must be not None or 0) + if self.get_matter_attribute_value(clusters.Identify.Attributes.IdentifyType): + supported_features |= VacuumEntityFeature.LOCATE + # create a map of supported run modes + run_modes: list[clusters.RvcCleanMode.Structs.ModeOptionStruct] = ( + self.get_matter_attribute_value( + clusters.RvcRunMode.Attributes.SupportedModes + ) + ) + self._supported_run_modes = {mode.mode: mode for mode in run_modes} + # map operational state commands to vacuum features + if ( + clusters.RvcOperationalState.Commands.Pause.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.PAUSE + if ( + clusters.OperationalState.Commands.Stop.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.STOP + if ( + clusters.OperationalState.Commands.Start.command_id + in accepted_operational_commands + ): + # note that start has been replaced by resume in rev2 of the spec + supported_features |= VacuumEntityFeature.START + if ( + clusters.RvcOperationalState.Commands.Resume.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.START + if ( + clusters.RvcOperationalState.Commands.GoHome.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.RETURN_HOME + + self._attr_supported_features = supported_features + + +# Discovery schema(s) to map Matter Attributes to HA entities +DISCOVERY_SCHEMAS = [ + MatterDiscoverySchema( + platform=Platform.VACUUM, + entity_description=StateVacuumEntityDescription( + key="MatterVacuumCleaner", name=None + ), + entity_class=MatterVacuum, + required_attributes=( + clusters.RvcRunMode.Attributes.CurrentMode, + clusters.RvcOperationalState.Attributes.CurrentPhase, + ), + optional_attributes=( + clusters.RvcCleanMode.Attributes.CurrentMode, + clusters.PowerSource.Attributes.BatPercentRemaining, + ), + device_type=(device_types.RoboticVacuumCleaner,), + ), +] diff --git a/homeassistant/components/matter/valve.py b/homeassistant/components/matter/valve.py new file mode 100644 index 00000000000..ccb4e89da17 --- /dev/null +++ b/homeassistant/components/matter/valve.py @@ -0,0 +1,153 @@ +"""Matter valve platform.""" + +from __future__ import annotations + +from chip.clusters import Objects as clusters +from matter_server.client.models import device_types + +from homeassistant.components.valve import ( + ValveDeviceClass, + ValveEntity, + ValveEntityDescription, + ValveEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .entity import MatterEntity +from .helpers import get_matter +from .models import MatterDiscoverySchema + +ValveConfigurationAndControl = clusters.ValveConfigurationAndControl + +ValveStateEnum = ValveConfigurationAndControl.Enums.ValveStateEnum + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Matter valve platform from Config Entry.""" + matter = get_matter(hass) + matter.register_platform_handler(Platform.VALVE, async_add_entities) + + +class MatterValve(MatterEntity, ValveEntity): + """Representation of a Matter Valve.""" + + _feature_map: int | None = None + entity_description: ValveEntityDescription + _platform_translation_key = "valve" + + async def send_device_command( + self, + command: clusters.ClusterCommand, + ) -> None: + """Send a command to the device.""" + await self.matter_client.send_device_command( + node_id=self._endpoint.node.node_id, + endpoint_id=self._endpoint.endpoint_id, + command=command, + ) + + async def async_open_valve(self) -> None: + """Open the valve.""" + await self.send_device_command(ValveConfigurationAndControl.Commands.Open()) + + async def async_close_valve(self) -> None: + """Close the valve.""" + await self.send_device_command(ValveConfigurationAndControl.Commands.Close()) + + async def async_set_valve_position(self, position: int) -> None: + """Move the valve to a specific position.""" + await self.send_device_command( + ValveConfigurationAndControl.Commands.Open(targetLevel=position) + ) + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + self._calculate_features() + current_state: int + current_state = self.get_matter_attribute_value( + ValveConfigurationAndControl.Attributes.CurrentState + ) + target_state: int + target_state = self.get_matter_attribute_value( + ValveConfigurationAndControl.Attributes.TargetState + ) + if ( + current_state == ValveStateEnum.kTransitioning + and target_state == ValveStateEnum.kOpen + ): + self._attr_is_opening = True + self._attr_is_closing = False + elif ( + current_state == ValveStateEnum.kTransitioning + and target_state == ValveStateEnum.kClosed + ): + self._attr_is_opening = False + self._attr_is_closing = True + elif current_state == ValveStateEnum.kClosed: + self._attr_is_opening = False + self._attr_is_closing = False + self._attr_is_closed = True + else: + self._attr_is_opening = False + self._attr_is_closing = False + self._attr_is_closed = False + # handle optional position + if self.supported_features & ValveEntityFeature.SET_POSITION: + self._attr_current_valve_position = self.get_matter_attribute_value( + ValveConfigurationAndControl.Attributes.CurrentLevel + ) + + @callback + def _calculate_features( + self, + ) -> None: + """Calculate features for HA Valve platform from Matter FeatureMap.""" + feature_map = int( + self.get_matter_attribute_value( + ValveConfigurationAndControl.Attributes.FeatureMap + ) + ) + # NOTE: the featuremap can dynamically change, so we need to update the + # supported features if the featuremap changes. + # work out supported features and presets from matter featuremap + if self._feature_map == feature_map: + return + self._feature_map = feature_map + self._attr_supported_features = ValveEntityFeature(0) + if feature_map & ValveConfigurationAndControl.Bitmaps.Feature.kLevel: + self._attr_supported_features |= ValveEntityFeature.SET_POSITION + self._attr_reports_position = True + else: + self._attr_reports_position = False + + self._attr_supported_features |= ( + ValveEntityFeature.CLOSE | ValveEntityFeature.OPEN + ) + + +# Discovery schema(s) to map Matter Attributes to HA entities +DISCOVERY_SCHEMAS = [ + MatterDiscoverySchema( + platform=Platform.VALVE, + entity_description=ValveEntityDescription( + key="MatterValve", + device_class=ValveDeviceClass.WATER, + name=None, + ), + entity_class=MatterValve, + required_attributes=( + ValveConfigurationAndControl.Attributes.CurrentState, + ValveConfigurationAndControl.Attributes.TargetState, + ), + optional_attributes=(ValveConfigurationAndControl.Attributes.CurrentLevel,), + device_type=(device_types.WaterValve,), + ), +] diff --git a/homeassistant/components/maxcube/climate.py b/homeassistant/components/maxcube/climate.py index b14efbbe073..da5a9f34dda 100644 --- a/homeassistant/components/maxcube/climate.py +++ b/homeassistant/components/maxcube/climate.py @@ -73,7 +73,6 @@ class MaxCubeClimate(ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, handler, device): """Initialize MAX! Cube ClimateEntity.""" diff --git a/homeassistant/components/maxcube/manifest.json b/homeassistant/components/maxcube/manifest.json index 6421686d2cf..d57ccacc5b1 100644 --- a/homeassistant/components/maxcube/manifest.json +++ b/homeassistant/components/maxcube/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/maxcube", "iot_class": "local_polling", "loggers": ["maxcube"], + "quality_scale": "legacy", "requirements": ["maxcube-api==0.4.3"] } diff --git a/homeassistant/components/mazda/manifest.json b/homeassistant/components/mazda/manifest.json index 75a83a9f468..fcd39e11a10 100644 --- a/homeassistant/components/mazda/manifest.json +++ b/homeassistant/components/mazda/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mazda", "integration_type": "system", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": [] } diff --git a/homeassistant/components/mealie/__init__.py b/homeassistant/components/mealie/__init__.py index 5c9c91729c0..443c8fdd991 100644 --- a/homeassistant/components/mealie/__init__.py +++ b/homeassistant/components/mealie/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations -from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionError +from aiomealie import MealieAuthenticationError, MealieClient, MealieError from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL, Platform from homeassistant.core import HomeAssistant @@ -48,11 +48,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo ), ) try: + await client.define_household_support() about = await client.get_about() version = create_version(about.version) except MealieAuthenticationError as error: raise ConfigEntryAuthFailed from error - except MealieConnectionError as error: + except MealieError as error: raise ConfigEntryNotReady(error) from error if not version.valid: diff --git a/homeassistant/components/mealie/config_flow.py b/homeassistant/components/mealie/config_flow.py index ccbedff04fc..2addd23284e 100644 --- a/homeassistant/components/mealie/config_flow.py +++ b/homeassistant/components/mealie/config_flow.py @@ -6,7 +6,7 @@ from typing import Any from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -32,13 +32,16 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): host: str | None = None verify_ssl: bool = True - entry: ConfigEntry | None = None async def check_connection( self, api_token: str ) -> tuple[dict[str, str], str | None]: """Check connection to the Mealie API.""" assert self.host is not None + + if "/hassio/ingress/" in self.host: + return {"base": "ingress_url"}, None + client = MealieClient( self.host, token=api_token, @@ -89,7 +92,6 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): """Perform reauth upon an API authentication error.""" self.host = entry_data[CONF_HOST] self.verify_ssl = entry_data.get(CONF_VERIFY_SSL, True) - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -102,16 +104,12 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_API_TOKEN], ) if not errors: - assert self.entry - if self.entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - CONF_API_TOKEN: user_input[CONF_API_TOKEN], - }, - ) - return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]}, + ) return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, @@ -122,13 +120,6 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration of the integration.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration confirmation.""" errors: dict[str, str] = {} if user_input: self.host = user_input[CONF_HOST] @@ -137,21 +128,18 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_API_TOKEN], ) if not errors: - assert self.entry - if self.entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL], - CONF_HOST: user_input[CONF_HOST], - CONF_API_TOKEN: user_input[CONF_API_TOKEN], - }, - reason="reconfigure_successful", - ) - return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={ + CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL], + CONF_HOST: user_input[CONF_HOST], + CONF_API_TOKEN: user_input[CONF_API_TOKEN], + }, + ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=USER_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/mealie/icons.json b/homeassistant/components/mealie/icons.json index 16176391701..d7e29cc8bbe 100644 --- a/homeassistant/components/mealie/icons.json +++ b/homeassistant/components/mealie/icons.json @@ -24,10 +24,20 @@ } }, "services": { - "get_mealplan": "mdi:food", - "get_recipe": "mdi:map", - "import_recipe": "mdi:map-search", - "set_random_mealplan": "mdi:dice-multiple", - "set_mealplan": "mdi:food" + "get_mealplan": { + "service": "mdi:food" + }, + "get_recipe": { + "service": "mdi:map" + }, + "import_recipe": { + "service": "mdi:map-search" + }, + "set_random_mealplan": { + "service": "mdi:dice-multiple" + }, + "set_mealplan": { + "service": "mdi:food" + } } } diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index 75093577b0f..c555fcbc3d6 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.8.1"] + "requirements": ["aiomealie==0.9.4"] } diff --git a/homeassistant/components/mealie/sensor.py b/homeassistant/components/mealie/sensor.py index b4baac34ebe..141a28ecdab 100644 --- a/homeassistant/components/mealie/sensor.py +++ b/homeassistant/components/mealie/sensor.py @@ -28,31 +28,26 @@ class MealieStatisticsSensorEntityDescription(SensorEntityDescription): SENSOR_TYPES: tuple[MealieStatisticsSensorEntityDescription, ...] = ( MealieStatisticsSensorEntityDescription( key="recipes", - native_unit_of_measurement="recipes", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_recipes, ), MealieStatisticsSensorEntityDescription( key="users", - native_unit_of_measurement="users", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_users, ), MealieStatisticsSensorEntityDescription( key="categories", - native_unit_of_measurement="categories", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_categories, ), MealieStatisticsSensorEntityDescription( key="tags", - native_unit_of_measurement="tags", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_tags, ), MealieStatisticsSensorEntityDescription( key="tools", - native_unit_of_measurement="tools", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_tools, ), diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 785dd98fea6..830d43d8f93 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -8,7 +8,7 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The URL of your Mealie instance." + "host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234" } }, "reauth_confirm": { @@ -17,7 +17,7 @@ "api_token": "[%key:common::config_flow::data::api_token%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Please reconfigure with Mealie.", "data": { "host": "[%key:common::config_flow::data::url%]", @@ -29,6 +29,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "ingress_url": "Ingress URLs are only used for accessing the Mealie UI. Use your Home Assistant IP address and the network port within the configuration tab of the Mealie add-on.", "unknown": "[%key:common::config_flow::error::unknown%]", "mealie_version": "Minimum required version is v1.0.0. Please upgrade Mealie and then retry." }, @@ -56,19 +57,24 @@ }, "sensor": { "recipes": { - "name": "Recipes" + "name": "Recipes", + "unit_of_measurement": "recipes" }, "users": { - "name": "Users" + "name": "Users", + "unit_of_measurement": "users" }, "categories": { - "name": "Categories" + "name": "Categories", + "unit_of_measurement": "categories" }, "tags": { - "name": "Tags" + "name": "Tags", + "unit_of_measurement": "tags" }, "tools": { - "name": "Tools" + "name": "Tools", + "unit_of_measurement": "tools" } } }, @@ -110,7 +116,7 @@ "services": { "get_mealplan": { "name": "Get mealplan", - "description": "Get meaplan from Mealie", + "description": "Get mealplan from Mealie", "fields": { "config_entry_id": { "name": "Mealie instance", diff --git a/homeassistant/components/meater/__init__.py b/homeassistant/components/meater/__init__.py index 08ca32029cb..50eff40c0e8 100644 --- a/homeassistant/components/meater/__init__.py +++ b/homeassistant/components/meater/__init__.py @@ -64,6 +64,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, # Name of the data. For logging purposes. name="meater_api", update_method=async_update_data, diff --git a/homeassistant/components/meater/strings.json b/homeassistant/components/meater/strings.json index 279841bb147..20dd2919026 100644 --- a/homeassistant/components/meater/strings.json +++ b/homeassistant/components/meater/strings.json @@ -19,7 +19,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", diff --git a/homeassistant/components/medcom_ble/__init__.py b/homeassistant/components/medcom_ble/__init__.py index 36357746b95..8603e1b9ce5 100644 --- a/homeassistant/components/medcom_ble/__init__.py +++ b/homeassistant/components/medcom_ble/__init__.py @@ -53,6 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=_async_update_method, update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), diff --git a/homeassistant/components/media_extractor/__init__.py b/homeassistant/components/media_extractor/__init__.py index b8bb5f98cd0..79fa9d6fb9a 100644 --- a/homeassistant/components/media_extractor/__init__.py +++ b/homeassistant/components/media_extractor/__init__.py @@ -16,10 +16,9 @@ from homeassistant.components.media_player import ( MEDIA_PLAYER_PLAY_MEDIA_SCHEMA, SERVICE_PLAY_MEDIA, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, ServiceCall, ServiceResponse, @@ -27,7 +26,6 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from .const import ( @@ -43,19 +41,7 @@ _LOGGER = logging.getLogger(__name__) CONF_CUSTOMIZE_ENTITIES = "customize" CONF_DEFAULT_STREAM_QUERY = "default_query" -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Optional(CONF_DEFAULT_STREAM_QUERY): cv.string, - vol.Optional(CONF_CUSTOMIZE_ENTITIES): vol.Schema( - {cv.entity_id: vol.Schema({cv.string: cv.string})} - ), - } - ) - }, - extra=vol.ALLOW_EXTRA, -) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -67,29 +53,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the media extractor service.""" - if DOMAIN in config: - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Media extractor", - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - ) - ) - async def extract_media_url(call: ServiceCall) -> ServiceResponse: """Extract media url.""" diff --git a/homeassistant/components/media_extractor/config_flow.py b/homeassistant/components/media_extractor/config_flow.py index 4343d0551e0..cb2166c35f1 100644 --- a/homeassistant/components/media_extractor/config_flow.py +++ b/homeassistant/components/media_extractor/config_flow.py @@ -24,9 +24,3 @@ class MediaExtractorConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title="Media extractor", data={}) return self.async_show_form(step_id="user", data_schema=vol.Schema({})) - - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: - """Handle import.""" - return self.async_create_entry(title="Media extractor", data={}) diff --git a/homeassistant/components/media_extractor/icons.json b/homeassistant/components/media_extractor/icons.json index 7abc4410b19..611db7c944c 100644 --- a/homeassistant/components/media_extractor/icons.json +++ b/homeassistant/components/media_extractor/icons.json @@ -1,6 +1,10 @@ { "services": { - "play_media": "mdi:play", - "extract_media_url": "mdi:link" + "play_media": { + "service": "mdi:play" + }, + "extract_media_url": { + "service": "mdi:link" + } } } diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 2285d7bce7d..21c07607573 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2024.08.06"], + "requirements": ["yt-dlp[default]==2024.12.13"], "single_config_entry": true } diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index beb672a1e58..e7bbe1d19bd 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -9,7 +9,7 @@ from contextlib import suppress import datetime as dt from enum import StrEnum import functools as ft -from functools import cached_property, lru_cache +from functools import lru_cache import hashlib from http import HTTPStatus import logging @@ -21,6 +21,7 @@ import aiohttp from aiohttp import web from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE from aiohttp.typedefs import LooseHeaders +from propcache import cached_property import voluptuous as vol from yarl import URL @@ -54,14 +55,41 @@ from homeassistant.const import ( # noqa: F401 from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.deprecation import ( + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.network import get_url from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey from .browse_media import BrowseMedia, async_process_play_media_url # noqa: F401 from .const import ( # noqa: F401 + _DEPRECATED_MEDIA_CLASS_DIRECTORY, + _DEPRECATED_SUPPORT_BROWSE_MEDIA, + _DEPRECATED_SUPPORT_CLEAR_PLAYLIST, + _DEPRECATED_SUPPORT_GROUPING, + _DEPRECATED_SUPPORT_NEXT_TRACK, + _DEPRECATED_SUPPORT_PAUSE, + _DEPRECATED_SUPPORT_PLAY, + _DEPRECATED_SUPPORT_PLAY_MEDIA, + _DEPRECATED_SUPPORT_PREVIOUS_TRACK, + _DEPRECATED_SUPPORT_REPEAT_SET, + _DEPRECATED_SUPPORT_SEEK, + _DEPRECATED_SUPPORT_SELECT_SOUND_MODE, + _DEPRECATED_SUPPORT_SELECT_SOURCE, + _DEPRECATED_SUPPORT_SHUFFLE_SET, + _DEPRECATED_SUPPORT_STOP, + _DEPRECATED_SUPPORT_TURN_OFF, + _DEPRECATED_SUPPORT_TURN_ON, + _DEPRECATED_SUPPORT_VOLUME_MUTE, + _DEPRECATED_SUPPORT_VOLUME_SET, + _DEPRECATED_SUPPORT_VOLUME_STEP, ATTR_APP_ID, ATTR_APP_NAME, ATTR_ENTITY_PICTURE_LOCAL, @@ -95,7 +123,6 @@ from .const import ( # noqa: F401 ATTR_SOUND_MODE_LIST, CONTENT_AUTH_EXPIRY_TIME, DOMAIN, - MEDIA_CLASS_DIRECTORY, REPEAT_MODES, SERVICE_CLEAR_PLAYLIST, SERVICE_JOIN, @@ -103,25 +130,6 @@ from .const import ( # noqa: F401 SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, SERVICE_UNJOIN, - SUPPORT_BROWSE_MEDIA, - SUPPORT_CLEAR_PLAYLIST, - SUPPORT_GROUPING, - SUPPORT_NEXT_TRACK, - SUPPORT_PAUSE, - SUPPORT_PLAY, - SUPPORT_PLAY_MEDIA, - SUPPORT_PREVIOUS_TRACK, - SUPPORT_REPEAT_SET, - SUPPORT_SEEK, - SUPPORT_SELECT_SOUND_MODE, - SUPPORT_SELECT_SOURCE, - SUPPORT_SHUFFLE_SET, - SUPPORT_STOP, - SUPPORT_TURN_OFF, - SUPPORT_TURN_ON, - SUPPORT_VOLUME_MUTE, - SUPPORT_VOLUME_SET, - SUPPORT_VOLUME_STEP, MediaClass, MediaPlayerEntityFeature, MediaPlayerState, @@ -132,6 +140,7 @@ from .errors import BrowseError _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[MediaPlayerEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -170,10 +179,16 @@ DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(MediaPlayerDeviceClass)) # DEVICE_CLASS* below are deprecated as of 2021.12 # use the MediaPlayerDeviceClass enum instead. +_DEPRECATED_DEVICE_CLASS_TV = DeprecatedConstantEnum( + MediaPlayerDeviceClass.TV, "2025.10" +) +_DEPRECATED_DEVICE_CLASS_SPEAKER = DeprecatedConstantEnum( + MediaPlayerDeviceClass.SPEAKER, "2025.10" +) +_DEPRECATED_DEVICE_CLASS_RECEIVER = DeprecatedConstantEnum( + MediaPlayerDeviceClass.RECEIVER, "2025.10" +) DEVICE_CLASSES = [cls.value for cls in MediaPlayerDeviceClass] -DEVICE_CLASS_TV = MediaPlayerDeviceClass.TV.value -DEVICE_CLASS_SPEAKER = MediaPlayerDeviceClass.SPEAKER.value -DEVICE_CLASS_RECEIVER = MediaPlayerDeviceClass.RECEIVER.value MEDIA_PLAYER_PLAY_MEDIA_SCHEMA = { @@ -264,7 +279,7 @@ def _rename_keys(**keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for media_players.""" - component = hass.data[DOMAIN] = EntityComponent[MediaPlayerEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[MediaPlayerEntity]( logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL ) @@ -438,14 +453,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[MediaPlayerEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[MediaPlayerEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class MediaPlayerEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -760,19 +773,6 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Flag media player features that are supported.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> MediaPlayerEntityFeature: - """Return the supported features as MediaPlayerEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = MediaPlayerEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - def turn_on(self) -> None: """Turn the media player on.""" raise NotImplementedError @@ -912,87 +912,85 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @property def support_play(self) -> bool: """Boolean if play is supported.""" - return MediaPlayerEntityFeature.PLAY in self.supported_features_compat + return MediaPlayerEntityFeature.PLAY in self.supported_features @final @property def support_pause(self) -> bool: """Boolean if pause is supported.""" - return MediaPlayerEntityFeature.PAUSE in self.supported_features_compat + return MediaPlayerEntityFeature.PAUSE in self.supported_features @final @property def support_stop(self) -> bool: """Boolean if stop is supported.""" - return MediaPlayerEntityFeature.STOP in self.supported_features_compat + return MediaPlayerEntityFeature.STOP in self.supported_features @final @property def support_seek(self) -> bool: """Boolean if seek is supported.""" - return MediaPlayerEntityFeature.SEEK in self.supported_features_compat + return MediaPlayerEntityFeature.SEEK in self.supported_features @final @property def support_volume_set(self) -> bool: """Boolean if setting volume is supported.""" - return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat + return MediaPlayerEntityFeature.VOLUME_SET in self.supported_features @final @property def support_volume_mute(self) -> bool: """Boolean if muting volume is supported.""" - return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features_compat + return MediaPlayerEntityFeature.VOLUME_MUTE in self.supported_features @final @property def support_previous_track(self) -> bool: """Boolean if previous track command supported.""" - return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features_compat + return MediaPlayerEntityFeature.PREVIOUS_TRACK in self.supported_features @final @property def support_next_track(self) -> bool: """Boolean if next track command supported.""" - return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features_compat + return MediaPlayerEntityFeature.NEXT_TRACK in self.supported_features @final @property def support_play_media(self) -> bool: """Boolean if play media command supported.""" - return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features_compat + return MediaPlayerEntityFeature.PLAY_MEDIA in self.supported_features @final @property def support_select_source(self) -> bool: """Boolean if select source command supported.""" - return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features_compat + return MediaPlayerEntityFeature.SELECT_SOURCE in self.supported_features @final @property def support_select_sound_mode(self) -> bool: """Boolean if select sound mode command supported.""" - return ( - MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features_compat - ) + return MediaPlayerEntityFeature.SELECT_SOUND_MODE in self.supported_features @final @property def support_clear_playlist(self) -> bool: """Boolean if clear playlist command supported.""" - return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features_compat + return MediaPlayerEntityFeature.CLEAR_PLAYLIST in self.supported_features @final @property def support_shuffle_set(self) -> bool: """Boolean if shuffle is supported.""" - return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features_compat + return MediaPlayerEntityFeature.SHUFFLE_SET in self.supported_features @final @property def support_grouping(self) -> bool: """Boolean if player grouping is supported.""" - return MediaPlayerEntityFeature.GROUPING in self.supported_features_compat + return MediaPlayerEntityFeature.GROUPING in self.supported_features async def async_toggle(self) -> None: """Toggle the power on the media player.""" @@ -1021,7 +1019,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ( self.volume_level is not None and self.volume_level < 1 - and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat + and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features ): await self.async_set_volume_level( min(1, self.volume_level + self.volume_step) @@ -1039,7 +1037,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ( self.volume_level is not None and self.volume_level > 0 - and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features_compat + and MediaPlayerEntityFeature.VOLUME_SET in self.supported_features ): await self.async_set_volume_level( max(0, self.volume_level - self.volume_step) @@ -1082,7 +1080,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def capability_attributes(self) -> dict[str, Any]: """Return capability attributes.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features if ( source_list := self.source_list @@ -1282,14 +1280,13 @@ async def websocket_browse_media( To use, media_player integrations can implement MediaPlayerEntity.async_browse_media() """ - component: EntityComponent[MediaPlayerEntity] = hass.data[DOMAIN] - player = component.get_entity(msg["entity_id"]) + player = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) if player is None: connection.send_error(msg["id"], "entity_not_found", "Entity not found") return - if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features_compat: + if MediaPlayerEntityFeature.BROWSE_MEDIA not in player.supported_features: connection.send_message( websocket_api.error_message( msg["id"], ERR_NOT_SUPPORTED, "Player does not support browsing media" @@ -1359,3 +1356,13 @@ async def async_fetch_image( logger.warning("Error retrieving proxied image from %s", url) return content, content_type + + +# As we import deprecated constants from the const module, we need to add these two functions +# otherwise this module will be logged for using deprecated constants and not the custom component +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = ft.partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/media_player/browse_media.py b/homeassistant/components/media_player/browse_media.py index 351d4e9140f..c917164a2ee 100644 --- a/homeassistant/components/media_player/browse_media.py +++ b/homeassistant/components/media_player/browse_media.py @@ -23,7 +23,7 @@ from homeassistant.helpers.network import ( from .const import CONTENT_AUTH_EXPIRY_TIME, MediaClass, MediaType # Paths that we don't need to sign -PATHS_WITHOUT_AUTH = ("/api/tts_proxy/",) +PATHS_WITHOUT_AUTH = ("/api/tts_proxy/", "/api/esphome/ffmpeg_proxy/") @callback @@ -46,6 +46,8 @@ def async_process_play_media_url( elif media_content_id[0] != "/": return media_content_id + # https://github.com/pylint-dev/pylint/issues/3484 + # pylint: disable-next=using-constant-test if parsed.query: logging.getLogger(__name__).debug( "Not signing path for content with query param" diff --git a/homeassistant/components/media_player/const.py b/homeassistant/components/media_player/const.py index 9b69ee62846..ca2f3307846 100644 --- a/homeassistant/components/media_player/const.py +++ b/homeassistant/components/media_player/const.py @@ -1,6 +1,14 @@ """Provides the constants needed for component.""" from enum import IntFlag, StrEnum +from functools import partial + +from homeassistant.helpers.deprecation import ( + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) # How long our auth signature on the content should be valid for CONTENT_AUTH_EXPIRY_TIME = 3600 * 24 @@ -79,26 +87,34 @@ class MediaClass(StrEnum): # These MEDIA_CLASS_* constants are deprecated as of Home Assistant 2022.10. # Please use the MediaClass enum instead. -MEDIA_CLASS_ALBUM = "album" -MEDIA_CLASS_APP = "app" -MEDIA_CLASS_ARTIST = "artist" -MEDIA_CLASS_CHANNEL = "channel" -MEDIA_CLASS_COMPOSER = "composer" -MEDIA_CLASS_CONTRIBUTING_ARTIST = "contributing_artist" -MEDIA_CLASS_DIRECTORY = "directory" -MEDIA_CLASS_EPISODE = "episode" -MEDIA_CLASS_GAME = "game" -MEDIA_CLASS_GENRE = "genre" -MEDIA_CLASS_IMAGE = "image" -MEDIA_CLASS_MOVIE = "movie" -MEDIA_CLASS_MUSIC = "music" -MEDIA_CLASS_PLAYLIST = "playlist" -MEDIA_CLASS_PODCAST = "podcast" -MEDIA_CLASS_SEASON = "season" -MEDIA_CLASS_TRACK = "track" -MEDIA_CLASS_TV_SHOW = "tv_show" -MEDIA_CLASS_URL = "url" -MEDIA_CLASS_VIDEO = "video" +_DEPRECATED_MEDIA_CLASS_ALBUM = DeprecatedConstantEnum(MediaClass.ALBUM, "2025.10") +_DEPRECATED_MEDIA_CLASS_APP = DeprecatedConstantEnum(MediaClass.APP, "2025.10") +_DEPRECATED_MEDIA_CLASS_ARTIST = DeprecatedConstantEnum(MediaClass.ARTIST, "2025.10") +_DEPRECATED_MEDIA_CLASS_CHANNEL = DeprecatedConstantEnum(MediaClass.CHANNEL, "2025.10") +_DEPRECATED_MEDIA_CLASS_COMPOSER = DeprecatedConstantEnum( + MediaClass.COMPOSER, "2025.10" +) +_DEPRECATED_MEDIA_CLASS_CONTRIBUTING_ARTIST = DeprecatedConstantEnum( + MediaClass.CONTRIBUTING_ARTIST, "2025.10" +) +_DEPRECATED_MEDIA_CLASS_DIRECTORY = DeprecatedConstantEnum( + MediaClass.DIRECTORY, "2025.10" +) +_DEPRECATED_MEDIA_CLASS_EPISODE = DeprecatedConstantEnum(MediaClass.EPISODE, "2025.10") +_DEPRECATED_MEDIA_CLASS_GAME = DeprecatedConstantEnum(MediaClass.GAME, "2025.10") +_DEPRECATED_MEDIA_CLASS_GENRE = DeprecatedConstantEnum(MediaClass.GENRE, "2025.10") +_DEPRECATED_MEDIA_CLASS_IMAGE = DeprecatedConstantEnum(MediaClass.IMAGE, "2025.10") +_DEPRECATED_MEDIA_CLASS_MOVIE = DeprecatedConstantEnum(MediaClass.MOVIE, "2025.10") +_DEPRECATED_MEDIA_CLASS_MUSIC = DeprecatedConstantEnum(MediaClass.MUSIC, "2025.10") +_DEPRECATED_MEDIA_CLASS_PLAYLIST = DeprecatedConstantEnum( + MediaClass.PLAYLIST, "2025.10" +) +_DEPRECATED_MEDIA_CLASS_PODCAST = DeprecatedConstantEnum(MediaClass.PODCAST, "2025.10") +_DEPRECATED_MEDIA_CLASS_SEASON = DeprecatedConstantEnum(MediaClass.SEASON, "2025.10") +_DEPRECATED_MEDIA_CLASS_TRACK = DeprecatedConstantEnum(MediaClass.TRACK, "2025.10") +_DEPRECATED_MEDIA_CLASS_TV_SHOW = DeprecatedConstantEnum(MediaClass.TV_SHOW, "2025.10") +_DEPRECATED_MEDIA_CLASS_URL = DeprecatedConstantEnum(MediaClass.URL, "2025.10") +_DEPRECATED_MEDIA_CLASS_VIDEO = DeprecatedConstantEnum(MediaClass.VIDEO, "2025.10") class MediaType(StrEnum): @@ -129,27 +145,30 @@ class MediaType(StrEnum): # These MEDIA_TYPE_* constants are deprecated as of Home Assistant 2022.10. # Please use the MediaType enum instead. -MEDIA_TYPE_ALBUM = "album" -MEDIA_TYPE_APP = "app" -MEDIA_TYPE_APPS = "apps" -MEDIA_TYPE_ARTIST = "artist" -MEDIA_TYPE_CHANNEL = "channel" -MEDIA_TYPE_CHANNELS = "channels" -MEDIA_TYPE_COMPOSER = "composer" -MEDIA_TYPE_CONTRIBUTING_ARTIST = "contributing_artist" -MEDIA_TYPE_EPISODE = "episode" -MEDIA_TYPE_GAME = "game" -MEDIA_TYPE_GENRE = "genre" -MEDIA_TYPE_IMAGE = "image" -MEDIA_TYPE_MOVIE = "movie" -MEDIA_TYPE_MUSIC = "music" -MEDIA_TYPE_PLAYLIST = "playlist" -MEDIA_TYPE_PODCAST = "podcast" -MEDIA_TYPE_SEASON = "season" -MEDIA_TYPE_TRACK = "track" -MEDIA_TYPE_TVSHOW = "tvshow" -MEDIA_TYPE_URL = "url" -MEDIA_TYPE_VIDEO = "video" +_DEPRECATED_MEDIA_TYPE_ALBUM = DeprecatedConstantEnum(MediaType.ALBUM, "2025.10") +_DEPRECATED_MEDIA_TYPE_APP = DeprecatedConstantEnum(MediaType.APP, "2025.10") +_DEPRECATED_MEDIA_TYPE_APPS = DeprecatedConstantEnum(MediaType.APPS, "2025.10") +_DEPRECATED_MEDIA_TYPE_ARTIST = DeprecatedConstantEnum(MediaType.ARTIST, "2025.10") +_DEPRECATED_MEDIA_TYPE_CHANNEL = DeprecatedConstantEnum(MediaType.CHANNEL, "2025.10") +_DEPRECATED_MEDIA_TYPE_CHANNELS = DeprecatedConstantEnum(MediaType.CHANNELS, "2025.10") +_DEPRECATED_MEDIA_TYPE_COMPOSER = DeprecatedConstantEnum(MediaType.COMPOSER, "2025.10") +_DEPRECATED_MEDIA_TYPE_CONTRIBUTING_ARTIST = DeprecatedConstantEnum( + MediaType.CONTRIBUTING_ARTIST, "2025.10" +) +_DEPRECATED_MEDIA_TYPE_EPISODE = DeprecatedConstantEnum(MediaType.EPISODE, "2025.10") +_DEPRECATED_MEDIA_TYPE_GAME = DeprecatedConstantEnum(MediaType.GAME, "2025.10") +_DEPRECATED_MEDIA_TYPE_GENRE = DeprecatedConstantEnum(MediaType.GENRE, "2025.10") +_DEPRECATED_MEDIA_TYPE_IMAGE = DeprecatedConstantEnum(MediaType.IMAGE, "2025.10") +_DEPRECATED_MEDIA_TYPE_MOVIE = DeprecatedConstantEnum(MediaType.MOVIE, "2025.10") +_DEPRECATED_MEDIA_TYPE_MUSIC = DeprecatedConstantEnum(MediaType.MUSIC, "2025.10") +_DEPRECATED_MEDIA_TYPE_PLAYLIST = DeprecatedConstantEnum(MediaType.PLAYLIST, "2025.10") +_DEPRECATED_MEDIA_TYPE_PODCAST = DeprecatedConstantEnum(MediaType.PODCAST, "2025.10") +_DEPRECATED_MEDIA_TYPE_SEASON = DeprecatedConstantEnum(MediaType.SEASON, "2025.10") +_DEPRECATED_MEDIA_TYPE_TRACK = DeprecatedConstantEnum(MediaType.TRACK, "2025.10") +_DEPRECATED_MEDIA_TYPE_TVSHOW = DeprecatedConstantEnum(MediaType.TVSHOW, "2025.10") +_DEPRECATED_MEDIA_TYPE_URL = DeprecatedConstantEnum(MediaType.URL, "2025.10") +_DEPRECATED_MEDIA_TYPE_VIDEO = DeprecatedConstantEnum(MediaType.VIDEO, "2025.10") + SERVICE_CLEAR_PLAYLIST = "clear_playlist" SERVICE_JOIN = "join" @@ -169,10 +188,10 @@ class RepeatMode(StrEnum): # These REPEAT_MODE_* constants are deprecated as of Home Assistant 2022.10. # Please use the RepeatMode enum instead. -REPEAT_MODE_ALL = "all" -REPEAT_MODE_OFF = "off" -REPEAT_MODE_ONE = "one" -REPEAT_MODES = [REPEAT_MODE_OFF, REPEAT_MODE_ALL, REPEAT_MODE_ONE] +_DEPRECATED_REPEAT_MODE_ALL = DeprecatedConstantEnum(RepeatMode.ALL, "2025.10") +_DEPRECATED_REPEAT_MODE_OFF = DeprecatedConstantEnum(RepeatMode.OFF, "2025.10") +_DEPRECATED_REPEAT_MODE_ONE = DeprecatedConstantEnum(RepeatMode.ONE, "2025.10") +REPEAT_MODES = [cls.value for cls in RepeatMode] class MediaPlayerEntityFeature(IntFlag): @@ -204,23 +223,67 @@ class MediaPlayerEntityFeature(IntFlag): # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. # Please use the MediaPlayerEntityFeature enum instead. -SUPPORT_PAUSE = 1 -SUPPORT_SEEK = 2 -SUPPORT_VOLUME_SET = 4 -SUPPORT_VOLUME_MUTE = 8 -SUPPORT_PREVIOUS_TRACK = 16 -SUPPORT_NEXT_TRACK = 32 +_DEPRECATED_SUPPORT_PAUSE = DeprecatedConstantEnum( + MediaPlayerEntityFeature.PAUSE, "2025.10" +) +_DEPRECATED_SUPPORT_SEEK = DeprecatedConstantEnum( + MediaPlayerEntityFeature.SEEK, "2025.10" +) +_DEPRECATED_SUPPORT_VOLUME_SET = DeprecatedConstantEnum( + MediaPlayerEntityFeature.VOLUME_SET, "2025.10" +) +_DEPRECATED_SUPPORT_VOLUME_MUTE = DeprecatedConstantEnum( + MediaPlayerEntityFeature.VOLUME_MUTE, "2025.10" +) +_DEPRECATED_SUPPORT_PREVIOUS_TRACK = DeprecatedConstantEnum( + MediaPlayerEntityFeature.PREVIOUS_TRACK, "2025.10" +) +_DEPRECATED_SUPPORT_NEXT_TRACK = DeprecatedConstantEnum( + MediaPlayerEntityFeature.NEXT_TRACK, "2025.10" +) +_DEPRECATED_SUPPORT_TURN_ON = DeprecatedConstantEnum( + MediaPlayerEntityFeature.TURN_ON, "2025.10" +) +_DEPRECATED_SUPPORT_TURN_OFF = DeprecatedConstantEnum( + MediaPlayerEntityFeature.TURN_OFF, "2025.10" +) +_DEPRECATED_SUPPORT_PLAY_MEDIA = DeprecatedConstantEnum( + MediaPlayerEntityFeature.PLAY_MEDIA, "2025.10" +) +_DEPRECATED_SUPPORT_VOLUME_STEP = DeprecatedConstantEnum( + MediaPlayerEntityFeature.VOLUME_STEP, "2025.10" +) +_DEPRECATED_SUPPORT_SELECT_SOURCE = DeprecatedConstantEnum( + MediaPlayerEntityFeature.SELECT_SOURCE, "2025.10" +) +_DEPRECATED_SUPPORT_STOP = DeprecatedConstantEnum( + MediaPlayerEntityFeature.STOP, "2025.10" +) +_DEPRECATED_SUPPORT_CLEAR_PLAYLIST = DeprecatedConstantEnum( + MediaPlayerEntityFeature.CLEAR_PLAYLIST, "2025.10" +) +_DEPRECATED_SUPPORT_PLAY = DeprecatedConstantEnum( + MediaPlayerEntityFeature.PLAY, "2025.10" +) +_DEPRECATED_SUPPORT_SHUFFLE_SET = DeprecatedConstantEnum( + MediaPlayerEntityFeature.SHUFFLE_SET, "2025.10" +) +_DEPRECATED_SUPPORT_SELECT_SOUND_MODE = DeprecatedConstantEnum( + MediaPlayerEntityFeature.SELECT_SOUND_MODE, "2025.10" +) +_DEPRECATED_SUPPORT_BROWSE_MEDIA = DeprecatedConstantEnum( + MediaPlayerEntityFeature.BROWSE_MEDIA, "2025.10" +) +_DEPRECATED_SUPPORT_REPEAT_SET = DeprecatedConstantEnum( + MediaPlayerEntityFeature.REPEAT_SET, "2025.10" +) +_DEPRECATED_SUPPORT_GROUPING = DeprecatedConstantEnum( + MediaPlayerEntityFeature.GROUPING, "2025.10" +) -SUPPORT_TURN_ON = 128 -SUPPORT_TURN_OFF = 256 -SUPPORT_PLAY_MEDIA = 512 -SUPPORT_VOLUME_STEP = 1024 -SUPPORT_SELECT_SOURCE = 2048 -SUPPORT_STOP = 4096 -SUPPORT_CLEAR_PLAYLIST = 8192 -SUPPORT_PLAY = 16384 -SUPPORT_SHUFFLE_SET = 32768 -SUPPORT_SELECT_SOUND_MODE = 65536 -SUPPORT_BROWSE_MEDIA = 131072 -SUPPORT_REPEAT_SET = 262144 -SUPPORT_GROUPING = 524288 +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/media_player/icons.json b/homeassistant/components/media_player/icons.json index 847ce5989d6..c11211c38ec 100644 --- a/homeassistant/components/media_player/icons.json +++ b/homeassistant/components/media_player/icons.json @@ -32,27 +32,71 @@ } }, "services": { - "clear_playlist": "mdi:playlist-remove", - "join": "mdi:group", - "media_next_track": "mdi:skip-next", - "media_pause": "mdi:pause", - "media_play": "mdi:play", - "media_play_pause": "mdi:play-pause", - "media_previous_track": "mdi:skip-previous", - "media_seek": "mdi:fast-forward", - "media_stop": "mdi:stop", - "play_media": "mdi:play", - "repeat_set": "mdi:repeat", - "select_sound_mode": "mdi:surround-sound", - "select_source": "mdi:import", - "shuffle_set": "mdi:shuffle", - "toggle": "mdi:play-pause", - "turn_off": "mdi:power", - "turn_on": "mdi:power", - "unjoin": "mdi:ungroup", - "volume_down": "mdi:volume-minus", - "volume_mute": "mdi:volume-mute", - "volume_set": "mdi:volume-medium", - "volume_up": "mdi:volume-plus" + "clear_playlist": { + "service": "mdi:playlist-remove" + }, + "join": { + "service": "mdi:group" + }, + "media_next_track": { + "service": "mdi:skip-next" + }, + "media_pause": { + "service": "mdi:pause" + }, + "media_play": { + "service": "mdi:play" + }, + "media_play_pause": { + "service": "mdi:play-pause" + }, + "media_previous_track": { + "service": "mdi:skip-previous" + }, + "media_seek": { + "service": "mdi:fast-forward" + }, + "media_stop": { + "service": "mdi:stop" + }, + "play_media": { + "service": "mdi:play" + }, + "repeat_set": { + "service": "mdi:repeat" + }, + "select_sound_mode": { + "service": "mdi:surround-sound" + }, + "select_source": { + "service": "mdi:import" + }, + "shuffle_set": { + "service": "mdi:shuffle" + }, + "toggle": { + "service": "mdi:play-pause" + }, + "turn_off": { + "service": "mdi:power" + }, + "turn_on": { + "service": "mdi:power" + }, + "unjoin": { + "service": "mdi:ungroup" + }, + "volume_down": { + "service": "mdi:volume-minus" + }, + "volume_mute": { + "service": "mdi:volume-mute" + }, + "volume_set": { + "service": "mdi:volume-medium" + }, + "volume_up": { + "service": "mdi:volume-plus" + } } } diff --git a/homeassistant/components/media_player/strings.json b/homeassistant/components/media_player/strings.json index ff246e420ce..1c9ba929b38 100644 --- a/homeassistant/components/media_player/strings.json +++ b/homeassistant/components/media_player/strings.json @@ -282,7 +282,7 @@ }, "clear_playlist": { "name": "Clear playlist", - "description": "Clears the playlist." + "description": "Removes all items from the playlist." }, "shuffle_set": { "name": "Shuffle", diff --git a/homeassistant/components/media_source/__init__.py b/homeassistant/components/media_source/__init__.py index 928e46ab528..3ea8f581245 100644 --- a/homeassistant/components/media_source/__init__.py +++ b/homeassistant/components/media_source/__init__.py @@ -13,14 +13,12 @@ from homeassistant.components.media_player import ( CONTENT_AUTH_EXPIRY_TIME, BrowseError, BrowseMedia, -) -from homeassistant.components.media_player.browse_media import ( async_process_play_media_url, ) from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import report_usage from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -158,9 +156,9 @@ async def async_resolve_media( raise Unresolvable("Media Source not loaded") if target_media_player is UNDEFINED: - report( + report_usage( "calls media_source.async_resolve_media without passing an entity_id", - {DOMAIN}, + exclude_integrations={DOMAIN}, ) target_media_player = None diff --git a/homeassistant/components/media_source/local_source.py b/homeassistant/components/media_source/local_source.py index dff851896dd..7916f72c6b9 100644 --- a/homeassistant/components/media_source/local_source.py +++ b/homeassistant/components/media_source/local_source.py @@ -225,7 +225,7 @@ class LocalMediaView(http.HomeAssistantView): media_path = self.source.async_full_path(source_dir_id, location) # Check that the file exists - if not media_path.is_file(): + if not self.hass.async_add_executor_job(media_path.is_file): raise web.HTTPNotFound # Check that it's a media file diff --git a/homeassistant/components/mediaroom/manifest.json b/homeassistant/components/mediaroom/manifest.json index 4cd7b11c22f..060a40b036a 100644 --- a/homeassistant/components/mediaroom/manifest.json +++ b/homeassistant/components/mediaroom/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mediaroom", "iot_class": "local_polling", "loggers": ["pymediaroom"], + "quality_scale": "legacy", "requirements": ["pymediaroom==0.6.5.4"] } diff --git a/homeassistant/components/mediaroom/media_player.py b/homeassistant/components/mediaroom/media_player.py index 8e60609fbac..97b61da437a 100644 --- a/homeassistant/components/mediaroom/media_player.py +++ b/homeassistant/components/mediaroom/media_player.py @@ -149,7 +149,7 @@ class MediaroomDevice(MediaPlayerEntity): self.host = host self.stb = Remote(host) - _LOGGER.info( + _LOGGER.debug( "Found STB at %s%s", host, " - I'm optimistic" if optimistic else "" ) self._channel = None diff --git a/homeassistant/components/melcloud/climate.py b/homeassistant/components/melcloud/climate.py index 08b3658c270..4defd47bc39 100644 --- a/homeassistant/components/melcloud/climate.py +++ b/homeassistant/components/melcloud/climate.py @@ -115,7 +115,6 @@ class MelCloudClimate(ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: MelCloudDevice) -> None: """Initialize the climate.""" diff --git a/homeassistant/components/melcloud/config_flow.py b/homeassistant/components/melcloud/config_flow.py index c4392535364..b604ee5016e 100644 --- a/homeassistant/components/melcloud/config_flow.py +++ b/homeassistant/components/melcloud/config_flow.py @@ -12,7 +12,7 @@ from aiohttp import ClientError, ClientResponseError import pymelcloud import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -25,7 +25,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - entry: ConfigEntry | None = None async def _create_entry(self, username: str, token: str) -> ConfigFlowResult: """Register new entry.""" @@ -82,7 +81,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with MELCloud.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -91,19 +89,13 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle re-authentication with MELCloud.""" errors: dict[str, str] = {} - if user_input is not None and self.entry: + if user_input is not None: aquired_token, errors = await self.async_reauthenticate_client(user_input) if not errors: - self.hass.config_entries.async_update_entry( - self.entry, - data={CONF_TOKEN: aquired_token}, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={CONF_TOKEN: aquired_token} ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") - return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( @@ -150,21 +142,14 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors: dict[str, str] = {} acquired_token = None - assert self.entry + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - user_input[CONF_USERNAME] = self.entry.data[CONF_USERNAME] + user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] try: async with asyncio.timeout(10): acquired_token = await pymelcloud.login( @@ -195,18 +180,18 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): if not errors: user_input[CONF_TOKEN] = acquired_token return self.async_update_reload_and_abort( - self.entry, - data={**self.entry.data, **user_input}, - reason="reconfigure_successful", + reconfigure_entry, data_updates=user_input ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_PASSWORD): str, } ), errors=errors, - description_placeholders={CONF_USERNAME: self.entry.data[CONF_USERNAME]}, + description_placeholders={ + CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] + }, ) diff --git a/homeassistant/components/melcloud/diagnostics.py b/homeassistant/components/melcloud/diagnostics.py index 8c2ad0818ff..31e52bf2bde 100644 --- a/homeassistant/components/melcloud/diagnostics.py +++ b/homeassistant/components/melcloud/diagnostics.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/melcloud/icons.json b/homeassistant/components/melcloud/icons.json index de3eb3c0ba2..b91696b5b35 100644 --- a/homeassistant/components/melcloud/icons.json +++ b/homeassistant/components/melcloud/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "set_vane_horizontal": "mdi:arrow-left-right", - "set_vane_vertical": "mdi:arrow-up-down" + "set_vane_horizontal": { + "service": "mdi:arrow-left-right" + }, + "set_vane_vertical": { + "service": "mdi:arrow-up-down" + } } } diff --git a/homeassistant/components/melcloud/strings.json b/homeassistant/components/melcloud/strings.json index 968f9cf4e50..19ef0b76aad 100644 --- a/homeassistant/components/melcloud/strings.json +++ b/homeassistant/components/melcloud/strings.json @@ -17,7 +17,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure your MelCloud", "description": "Reconfigure the entry to obtain a new token, for your account: `{username}`.", "data": { @@ -36,7 +36,9 @@ "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "already_configured": "MELCloud integration already configured for this email. Access token has been refreshed.", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, "services": { diff --git a/homeassistant/components/melissa/climate.py b/homeassistant/components/melissa/climate.py index 0ad663faa2a..ff68820d70f 100644 --- a/homeassistant/components/melissa/climate.py +++ b/homeassistant/components/melissa/climate.py @@ -65,7 +65,6 @@ class MelissaClimate(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, api, serial_number, init_data): """Initialize the climate device.""" diff --git a/homeassistant/components/melissa/manifest.json b/homeassistant/components/melissa/manifest.json index 60d1d7f145f..a583c3b88fa 100644 --- a/homeassistant/components/melissa/manifest.json +++ b/homeassistant/components/melissa/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/melissa", "iot_class": "cloud_polling", "loggers": ["melissa"], + "quality_scale": "legacy", "requirements": ["py-melissa-climate==2.1.4"] } diff --git a/homeassistant/components/melnor/models.py b/homeassistant/components/melnor/entity.py similarity index 100% rename from homeassistant/components/melnor/models.py rename to homeassistant/components/melnor/entity.py diff --git a/homeassistant/components/melnor/number.py b/homeassistant/components/melnor/number.py index beaa0fd913b..15c47008346 100644 --- a/homeassistant/components/melnor/number.py +++ b/homeassistant/components/melnor/number.py @@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import MelnorDataUpdateCoordinator -from .models import MelnorZoneEntity, get_entities_for_valves +from .entity import MelnorZoneEntity, get_entities_for_valves @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/melnor/sensor.py b/homeassistant/components/melnor/sensor.py index 233dada8ab2..bbb3416dcc9 100644 --- a/homeassistant/components/melnor/sensor.py +++ b/homeassistant/components/melnor/sensor.py @@ -28,7 +28,7 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN from .coordinator import MelnorDataUpdateCoordinator -from .models import MelnorBluetoothEntity, MelnorZoneEntity, get_entities_for_valves +from .entity import MelnorBluetoothEntity, MelnorZoneEntity, get_entities_for_valves def watering_seconds_left(valve: Valve) -> datetime | None: diff --git a/homeassistant/components/melnor/switch.py b/homeassistant/components/melnor/switch.py index efa779f04b0..d7fb96739b3 100644 --- a/homeassistant/components/melnor/switch.py +++ b/homeassistant/components/melnor/switch.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import MelnorDataUpdateCoordinator -from .models import MelnorZoneEntity, get_entities_for_valves +from .entity import MelnorZoneEntity, get_entities_for_valves @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/melnor/time.py b/homeassistant/components/melnor/time.py index 373a22c8ff4..08de7e054de 100644 --- a/homeassistant/components/melnor/time.py +++ b/homeassistant/components/melnor/time.py @@ -17,7 +17,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import MelnorDataUpdateCoordinator -from .models import MelnorZoneEntity, get_entities_for_valves +from .entity import MelnorZoneEntity, get_entities_for_valves @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/meraki/manifest.json b/homeassistant/components/meraki/manifest.json index 4fb7d27d4bb..5b8690ae52d 100644 --- a/homeassistant/components/meraki/manifest.json +++ b/homeassistant/components/meraki/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/meraki", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/message_bird/manifest.json b/homeassistant/components/message_bird/manifest.json index d5118dc3486..3b3c56029c5 100644 --- a/homeassistant/components/message_bird/manifest.json +++ b/homeassistant/components/message_bird/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/message_bird", "iot_class": "cloud_push", "loggers": ["messagebird"], + "quality_scale": "legacy", "requirements": ["messagebird==1.2.0"] } diff --git a/homeassistant/components/met/config_flow.py b/homeassistant/components/met/config_flow.py index 84a44682413..62964d22bb1 100644 --- a/homeassistant/components/met/config_flow.py +++ b/homeassistant/components/met/config_flow.py @@ -11,7 +11,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_ELEVATION, @@ -143,12 +142,12 @@ class MetConfigFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> MetOptionsFlowHandler: """Get the options flow for Met.""" - return MetOptionsFlowHandler(config_entry) + return MetOptionsFlowHandler() -class MetOptionsFlowHandler(OptionsFlowWithConfigEntry): +class MetOptionsFlowHandler(OptionsFlow): """Options flow for Met component.""" async def async_step_init( @@ -159,13 +158,13 @@ class MetOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: # Update config entry with data from user input self.hass.config_entries.async_update_entry( - self._config_entry, data=user_input + self.config_entry, data=user_input ) return self.async_create_entry( - title=self._config_entry.title, data=user_input + title=self.config_entry.title, data=user_input ) return self.async_show_form( step_id="init", - data_schema=_get_data_schema(self.hass, config_entry=self._config_entry), + data_schema=_get_data_schema(self.hass, config_entry=self.config_entry), ) diff --git a/homeassistant/components/met/const.py b/homeassistant/components/met/const.py index c513e98504e..ccc0662b3c3 100644 --- a/homeassistant/components/met/const.py +++ b/homeassistant/components/met/const.py @@ -21,12 +21,14 @@ from homeassistant.components.weather import ( ATTR_FORECAST_NATIVE_WIND_SPEED, ATTR_FORECAST_PRECIPITATION_PROBABILITY, ATTR_FORECAST_TIME, + ATTR_FORECAST_UV_INDEX, ATTR_FORECAST_WIND_BEARING, ATTR_WEATHER_CLOUD_COVERAGE, ATTR_WEATHER_DEW_POINT, ATTR_WEATHER_HUMIDITY, ATTR_WEATHER_PRESSURE, ATTR_WEATHER_TEMPERATURE, + ATTR_WEATHER_UV_INDEX, ATTR_WEATHER_VISIBILITY, ATTR_WEATHER_WIND_BEARING, ATTR_WEATHER_WIND_GUST_SPEED, @@ -190,6 +192,7 @@ FORECAST_MAP = { ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: "wind_gust", ATTR_FORECAST_CLOUD_COVERAGE: "cloudiness", ATTR_FORECAST_HUMIDITY: "humidity", + ATTR_FORECAST_UV_INDEX: "uv_index", } ATTR_MAP = { @@ -202,4 +205,5 @@ ATTR_MAP = { ATTR_WEATHER_WIND_GUST_SPEED: "wind_gust", ATTR_WEATHER_CLOUD_COVERAGE: "cloudiness", ATTR_WEATHER_DEW_POINT: "dew_point", + ATTR_WEATHER_UV_INDEX: "uv_index", } diff --git a/homeassistant/components/met/manifest.json b/homeassistant/components/met/manifest.json index e900c5a012a..1a145589a68 100644 --- a/homeassistant/components/met/manifest.json +++ b/homeassistant/components/met/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/met", "iot_class": "cloud_polling", "loggers": ["metno"], - "requirements": ["PyMetno==0.12.0"] + "requirements": ["PyMetno==0.13.0"] } diff --git a/homeassistant/components/met/weather.py b/homeassistant/components/met/weather.py index 809bb792b2c..7b95567366b 100644 --- a/homeassistant/components/met/weather.py +++ b/homeassistant/components/met/weather.py @@ -13,6 +13,7 @@ from homeassistant.components.weather import ( ATTR_WEATHER_HUMIDITY, ATTR_WEATHER_PRESSURE, ATTR_WEATHER_TEMPERATURE, + ATTR_WEATHER_UV_INDEX, ATTR_WEATHER_WIND_BEARING, ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, @@ -208,6 +209,13 @@ class MetWeather(SingleCoordinatorWeatherEntity[MetDataUpdateCoordinator]): ATTR_MAP[ATTR_WEATHER_DEW_POINT] ) + @property + def uv_index(self) -> float | None: + """Return the uv index.""" + return self.coordinator.data.current_weather_data.get( + ATTR_MAP[ATTR_WEATHER_UV_INDEX] + ) + def _forecast(self, hourly: bool) -> list[Forecast] | None: """Return the forecast array.""" if hourly: diff --git a/homeassistant/components/met_eireann/__init__.py b/homeassistant/components/met_eireann/__init__.py index 7d0e6401bd6..ab2695cbd11 100644 --- a/homeassistant/components/met_eireann/__init__.py +++ b/homeassistant/components/met_eireann/__init__.py @@ -46,6 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=config_entry, name=DOMAIN, update_method=_async_update_data, update_interval=UPDATE_INTERVAL, diff --git a/homeassistant/components/met_eireann/manifest.json b/homeassistant/components/met_eireann/manifest.json index 72afc6977dd..7b913df4d3c 100644 --- a/homeassistant/components/met_eireann/manifest.json +++ b/homeassistant/components/met_eireann/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/met_eireann", "iot_class": "cloud_polling", "loggers": ["meteireann"], - "requirements": ["PyMetEireann==2021.8.0"] + "requirements": ["PyMetEireann==2024.11.0"] } diff --git a/homeassistant/components/met_eireann/strings.json b/homeassistant/components/met_eireann/strings.json index 984f46d71d6..d8c2918e6d3 100644 --- a/homeassistant/components/met_eireann/strings.json +++ b/homeassistant/components/met_eireann/strings.json @@ -12,6 +12,9 @@ } } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } diff --git a/homeassistant/components/meteo_france/__init__.py b/homeassistant/components/meteo_france/__init__.py index ddba982934c..1d4f8293c5e 100644 --- a/homeassistant/components/meteo_france/__init__.py +++ b/homeassistant/components/meteo_france/__init__.py @@ -75,24 +75,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if not coordinator_forecast.last_update_success: raise ConfigEntryNotReady - # Check if rain forecast is available. - if coordinator_forecast.data.position.get("rain_product_available") == 1: - coordinator_rain = DataUpdateCoordinator( - hass, - _LOGGER, - name=f"Météo-France rain for city {entry.title}", - update_method=_async_update_data_rain, - update_interval=SCAN_INTERVAL_RAIN, - ) - await coordinator_rain.async_refresh() - - if not coordinator_rain.last_update_success: - raise ConfigEntryNotReady - else: - _LOGGER.warning( - "1 hour rain forecast not available. %s is not in covered zone", - entry.title, - ) + # Check rain forecast. + coordinator_rain = DataUpdateCoordinator( + hass, + _LOGGER, + name=f"Météo-France rain for city {entry.title}", + update_method=_async_update_data_rain, + update_interval=SCAN_INTERVAL_RAIN, + ) + await coordinator_rain.async_config_entry_first_refresh() department = coordinator_forecast.data.position.get("dept") _LOGGER.debug( diff --git a/homeassistant/components/meteoalarm/manifest.json b/homeassistant/components/meteoalarm/manifest.json index 4de91f6a431..58b6a63ed1d 100644 --- a/homeassistant/components/meteoalarm/manifest.json +++ b/homeassistant/components/meteoalarm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/meteoalarm", "iot_class": "cloud_polling", "loggers": ["meteoalertapi"], + "quality_scale": "legacy", "requirements": ["meteoalertapi==0.3.1"] } diff --git a/homeassistant/components/meteoclimatic/__init__.py b/homeassistant/components/meteoclimatic/__init__.py index f81d60c3d00..8c2fb41c634 100644 --- a/homeassistant/components/meteoclimatic/__init__.py +++ b/homeassistant/components/meteoclimatic/__init__.py @@ -32,6 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"Meteoclimatic weather for {entry.title} ({station_code})", update_method=async_update_data, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/metoffice/__init__.py b/homeassistant/components/metoffice/__init__.py index 18fc121d5d3..1d516bbc4f5 100644 --- a/homeassistant/components/metoffice/__init__.py +++ b/homeassistant/components/metoffice/__init__.py @@ -109,6 +109,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: metoffice_hourly_coordinator = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"MetOffice Hourly Coordinator for {site_name}", update_method=async_update_3hourly, update_interval=DEFAULT_SCAN_INTERVAL, @@ -117,6 +118,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: metoffice_daily_coordinator = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"MetOffice Daily Coordinator for {site_name}", update_method=async_update_daily, update_interval=DEFAULT_SCAN_INTERVAL, diff --git a/homeassistant/components/mfi/manifest.json b/homeassistant/components/mfi/manifest.json index b569009d400..3024fe145c5 100644 --- a/homeassistant/components/mfi/manifest.json +++ b/homeassistant/components/mfi/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mfi", "iot_class": "local_polling", "loggers": ["mficlient"], + "quality_scale": "legacy", "requirements": ["mficlient==0.5.0"] } diff --git a/homeassistant/components/microbees/config_flow.py b/homeassistant/components/microbees/config_flow.py index 4d0f5b4474b..92fa40b24f0 100644 --- a/homeassistant/components/microbees/config_flow.py +++ b/homeassistant/components/microbees/config_flow.py @@ -6,8 +6,7 @@ from typing import Any from microBeesPy import MicroBees, MicroBeesException -from homeassistant import config_entries -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow @@ -20,7 +19,6 @@ class OAuth2FlowHandler( """Handle a config flow for microBees.""" DOMAIN = DOMAIN - reauth_entry: config_entries.ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -49,26 +47,21 @@ class OAuth2FlowHandler( self.logger.exception("Unexpected error") return self.async_abort(reason="unknown") - if not self.reauth_entry: - await self.async_set_unique_id(current_user.id) + await self.async_set_unique_id(current_user.id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title=current_user.username, data=data, ) - if self.reauth_entry.unique_id == current_user.id: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_abort(reason="wrong_account") + + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort(self._get_reauth_entry(), data=data) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/microbees/manifest.json b/homeassistant/components/microbees/manifest.json index 91b7d66d80f..be28bf881d2 100644 --- a/homeassistant/components/microbees/manifest.json +++ b/homeassistant/components/microbees/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/microbees", "iot_class": "cloud_polling", - "requirements": ["microBeesPy==0.3.2"] + "requirements": ["microBeesPy==0.3.5"] } diff --git a/homeassistant/components/microbees/strings.json b/homeassistant/components/microbees/strings.json index 49d42af83d3..8635753a564 100644 --- a/homeassistant/components/microbees/strings.json +++ b/homeassistant/components/microbees/strings.json @@ -21,6 +21,7 @@ "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "unknown": "[%key:common::config_flow::error::unknown%]", "wrong_account": "You can only reauthenticate this entry with the same microBees account." }, diff --git a/homeassistant/components/microsoft/manifest.json b/homeassistant/components/microsoft/manifest.json index dba2f58ba98..3d8f0629cec 100644 --- a/homeassistant/components/microsoft/manifest.json +++ b/homeassistant/components/microsoft/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/microsoft", "iot_class": "cloud_push", "loggers": ["pycsspeechtts"], + "quality_scale": "legacy", "requirements": ["pycsspeechtts==1.0.8"] } diff --git a/homeassistant/components/microsoft_face/icons.json b/homeassistant/components/microsoft_face/icons.json index 826e390197a..6e61676224d 100644 --- a/homeassistant/components/microsoft_face/icons.json +++ b/homeassistant/components/microsoft_face/icons.json @@ -1,10 +1,22 @@ { "services": { - "create_group": "mdi:account-multiple-plus", - "create_person": "mdi:account-plus", - "delete_group": "mdi:account-multiple-remove", - "delete_person": "mdi:account-remove", - "face_person": "mdi:face-man", - "train_group": "mdi:account-multiple-check" + "create_group": { + "service": "mdi:account-multiple-plus" + }, + "create_person": { + "service": "mdi:account-plus" + }, + "delete_group": { + "service": "mdi:account-multiple-remove" + }, + "delete_person": { + "service": "mdi:account-remove" + }, + "face_person": { + "service": "mdi:face-man" + }, + "train_group": { + "service": "mdi:account-multiple-check" + } } } diff --git a/homeassistant/components/microsoft_face/manifest.json b/homeassistant/components/microsoft_face/manifest.json index 0ef18a12271..e13d1c76ccb 100644 --- a/homeassistant/components/microsoft_face/manifest.json +++ b/homeassistant/components/microsoft_face/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["camera"], "documentation": "https://www.home-assistant.io/integrations/microsoft_face", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/microsoft_face_detect/manifest.json b/homeassistant/components/microsoft_face_detect/manifest.json index 1b72ce92c95..f3f9f0fa095 100644 --- a/homeassistant/components/microsoft_face_detect/manifest.json +++ b/homeassistant/components/microsoft_face_detect/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["microsoft_face"], "documentation": "https://www.home-assistant.io/integrations/microsoft_face_detect", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/microsoft_face_identify/manifest.json b/homeassistant/components/microsoft_face_identify/manifest.json index 63418ac2a0b..b3964ee1254 100644 --- a/homeassistant/components/microsoft_face_identify/manifest.json +++ b/homeassistant/components/microsoft_face_identify/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["microsoft_face"], "documentation": "https://www.home-assistant.io/integrations/microsoft_face_identify", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mikrotik/config_flow.py b/homeassistant/components/mikrotik/config_flow.py index fe0d020d373..bca394f0d38 100644 --- a/homeassistant/components/mikrotik/config_flow.py +++ b/homeassistant/components/mikrotik/config_flow.py @@ -39,7 +39,6 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Mikrotik config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry | None @staticmethod @callback @@ -47,7 +46,7 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MikrotikOptionsFlowHandler: """Get the options flow for this handler.""" - return MikrotikOptionsFlowHandler(config_entry) + return MikrotikOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,11 +82,10 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -95,9 +93,10 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} try: await self.hass.async_add_executor_job(get_api, user_input) except CannotConnect: @@ -106,17 +105,10 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): errors[CONF_PASSWORD] = "invalid_auth" if not errors: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data=user_input, - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( - description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] - }, + description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]}, step_id="reauth_confirm", data_schema=vol.Schema( { @@ -130,10 +122,6 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): class MikrotikOptionsFlowHandler(OptionsFlow): """Handle Mikrotik options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Mikrotik options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/mikrotik/device_tracker.py b/homeassistant/components/mikrotik/device_tracker.py index aa19da01369..c2d9e0d2f33 100644 --- a/homeassistant/components/mikrotik/device_tracker.py +++ b/homeassistant/components/mikrotik/device_tracker.py @@ -7,7 +7,6 @@ from typing import Any from homeassistant.components.device_tracker import ( DOMAIN as DEVICE_TRACKER, ScannerEntity, - SourceType, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er @@ -94,11 +93,6 @@ class MikrotikDataUpdateCoordinatorTracker( return True return False - @property - def source_type(self) -> SourceType: - """Return the source type of the client.""" - return SourceType.ROUTER - @property def hostname(self) -> str: """Return the hostname of the client.""" diff --git a/homeassistant/components/mill/climate.py b/homeassistant/components/mill/climate.py index 5c5c7882634..4f700d24e1b 100644 --- a/homeassistant/components/mill/climate.py +++ b/homeassistant/components/mill/climate.py @@ -100,7 +100,6 @@ class MillHeater(CoordinatorEntity[MillDataUpdateCoordinator], ClimateEntity): ) _attr_target_temperature_step = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: MillDataUpdateCoordinator, heater: mill.Heater @@ -194,7 +193,6 @@ class LocalMillHeater(CoordinatorEntity[MillDataUpdateCoordinator], ClimateEntit ) _attr_target_temperature_step = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: MillDataUpdateCoordinator) -> None: """Initialize the thermostat.""" diff --git a/homeassistant/components/mill/config_flow.py b/homeassistant/components/mill/config_flow.py index db1b2711575..7b2e5c3c4d5 100644 --- a/homeassistant/components/mill/config_flow.py +++ b/homeassistant/components/mill/config_flow.py @@ -43,7 +43,9 @@ class MillConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_local() return await self.async_step_cloud() - async def async_step_local(self, user_input=None): + async def async_step_local( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the local step.""" data_schema = vol.Schema({vol.Required(CONF_IP_ADDRESS): str}) if user_input is None: @@ -75,7 +77,9 @@ class MillConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_cloud(self, user_input=None): + async def async_step_cloud( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the cloud step.""" data_schema = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} diff --git a/homeassistant/components/mill/icons.json b/homeassistant/components/mill/icons.json index 13d6bb650c1..f2595f28057 100644 --- a/homeassistant/components/mill/icons.json +++ b/homeassistant/components/mill/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_room_temperature": "mdi:thermometer" + "set_room_temperature": { + "service": "mdi:thermometer" + } } } diff --git a/homeassistant/components/mill/manifest.json b/homeassistant/components/mill/manifest.json index 16e7bf552ba..6316eb72096 100644 --- a/homeassistant/components/mill/manifest.json +++ b/homeassistant/components/mill/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mill", "iot_class": "local_polling", "loggers": ["mill", "mill_local"], - "requirements": ["millheater==0.11.8", "mill-local==0.3.0"] + "requirements": ["millheater==0.12.2", "mill-local==0.3.0"] } diff --git a/homeassistant/components/mill/sensor.py b/homeassistant/components/mill/sensor.py index 64b9008a82b..c4b975ab039 100644 --- a/homeassistant/components/mill/sensor.py +++ b/homeassistant/components/mill/sensor.py @@ -57,6 +57,19 @@ HEATER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, state_class=SensorStateClass.TOTAL_INCREASING, ), + SensorEntityDescription( + key="current_power", + translation_key="current_power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + ), + SensorEntityDescription( + key="control_signal", + translation_key="control_signal", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), ) SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( @@ -118,6 +131,16 @@ LOCAL_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( ), ) +SOCKET_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( + SensorEntityDescription( + key=HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + *HEATER_SENSOR_TYPES, +) + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback @@ -145,7 +168,9 @@ async def async_setup_entry( ) for mill_device in mill_data_coordinator.data.values() for entity_description in ( - HEATER_SENSOR_TYPES + SOCKET_SENSOR_TYPES + if isinstance(mill_device, mill.Socket) + else HEATER_SENSOR_TYPES if isinstance(mill_device, mill.Heater) else SENSOR_TYPES ) diff --git a/homeassistant/components/min_max/icons.json b/homeassistant/components/min_max/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/min_max/icons.json +++ b/homeassistant/components/min_max/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/minecraft_server/__init__.py b/homeassistant/components/minecraft_server/__init__.py index 0a9eee6a0d5..8f016e2de00 100644 --- a/homeassistant/components/minecraft_server/__init__.py +++ b/homeassistant/components/minecraft_server/__init__.py @@ -5,6 +5,10 @@ from __future__ import annotations import logging from typing import Any +import dns.rdata +import dns.rdataclass +import dns.rdatatype + from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_ADDRESS, @@ -28,9 +32,19 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) +def load_dnspython_rdata_classes() -> None: + """Load dnspython rdata classes used by mcstatus.""" + for rdtype in dns.rdatatype.RdataType: + if not dns.rdatatype.is_metatype(rdtype) or rdtype == dns.rdatatype.OPT: + dns.rdata.get_rdata_class(dns.rdataclass.IN, rdtype) # type: ignore[no-untyped-call] + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Minecraft Server from a config entry.""" + # Workaround to avoid blocking imports from dnspython (https://github.com/rthalley/dnspython/issues/1083) + hass.async_add_executor_job(load_dnspython_rdata_classes) + # Create API instance. api = MinecraftServer( hass, diff --git a/homeassistant/components/minecraft_server/diagnostics.py b/homeassistant/components/minecraft_server/diagnostics.py index 1cae535dc43..0bcffe1434a 100644 --- a/homeassistant/components/minecraft_server/diagnostics.py +++ b/homeassistant/components/minecraft_server/diagnostics.py @@ -4,7 +4,7 @@ from collections.abc import Iterable from dataclasses import asdict from typing import Any -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ADDRESS, CONF_NAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/minecraft_server/manifest.json b/homeassistant/components/minecraft_server/manifest.json index 8e098f98a15..d6ade4853c9 100644 --- a/homeassistant/components/minecraft_server/manifest.json +++ b/homeassistant/components/minecraft_server/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/minecraft_server", "iot_class": "local_polling", "loggers": ["dnspython", "mcstatus"], - "quality_scale": "platinum", "requirements": ["mcstatus==11.1.1"] } diff --git a/homeassistant/components/minio/__init__.py b/homeassistant/components/minio/__init__.py index e5470cc3313..57a9632a6ff 100644 --- a/homeassistant/components/minio/__init__.py +++ b/homeassistant/components/minio/__init__.py @@ -73,11 +73,11 @@ CONFIG_SCHEMA = vol.Schema( ) BUCKET_KEY_SCHEMA = vol.Schema( - {vol.Required(ATTR_BUCKET): cv.template, vol.Required(ATTR_KEY): cv.template} + {vol.Required(ATTR_BUCKET): cv.string, vol.Required(ATTR_KEY): cv.string} ) BUCKET_KEY_FILE_SCHEMA = BUCKET_KEY_SCHEMA.extend( - {vol.Required(ATTR_FILE_PATH): cv.template} + {vol.Required(ATTR_FILE_PATH): cv.string} ) @@ -125,15 +125,11 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: get_minio_endpoint(host, port), access_key, secret_key, secure ) - def _render_service_value(service, key): - value = service.data[key] - return value.async_render(parse_result=False) - def put_file(service: ServiceCall) -> None: """Upload file service.""" - bucket = _render_service_value(service, ATTR_BUCKET) - key = _render_service_value(service, ATTR_KEY) - file_path = _render_service_value(service, ATTR_FILE_PATH) + bucket = service.data[ATTR_BUCKET] + key = service.data[ATTR_KEY] + file_path = service.data[ATTR_FILE_PATH] if not hass.config.is_allowed_path(file_path): raise ValueError(f"Invalid file_path {file_path}") @@ -142,9 +138,9 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def get_file(service: ServiceCall) -> None: """Download file service.""" - bucket = _render_service_value(service, ATTR_BUCKET) - key = _render_service_value(service, ATTR_KEY) - file_path = _render_service_value(service, ATTR_FILE_PATH) + bucket = service.data[ATTR_BUCKET] + key = service.data[ATTR_KEY] + file_path = service.data[ATTR_FILE_PATH] if not hass.config.is_allowed_path(file_path): raise ValueError(f"Invalid file_path {file_path}") @@ -153,8 +149,8 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def remove_file(service: ServiceCall) -> None: """Delete file service.""" - bucket = _render_service_value(service, ATTR_BUCKET) - key = _render_service_value(service, ATTR_KEY) + bucket = service.data[ATTR_BUCKET] + key = service.data[ATTR_KEY] minio_client.remove_object(bucket, key) @@ -181,7 +177,7 @@ class QueueListener(threading.Thread): def run(self): """Listen to queue events, and forward them to Home Assistant event bus.""" - _LOGGER.info("Running QueueListener") + _LOGGER.debug("Running QueueListener") while True: if (event := self._queue.get()) is None: break @@ -203,10 +199,10 @@ class QueueListener(threading.Thread): def stop(self): """Stop run by putting None into queue and join the thread.""" - _LOGGER.info("Stopping QueueListener") + _LOGGER.debug("Stopping QueueListener") self._queue.put(None) self.join() - _LOGGER.info("Stopped QueueListener") + _LOGGER.debug("Stopped QueueListener") def start_handler(self, _): """Start handler helper method.""" diff --git a/homeassistant/components/minio/icons.json b/homeassistant/components/minio/icons.json index 16deb1a168d..dce148a23de 100644 --- a/homeassistant/components/minio/icons.json +++ b/homeassistant/components/minio/icons.json @@ -1,7 +1,13 @@ { "services": { - "get": "mdi:cloud-download", - "put": "mdi:cloud-upload", - "remove": "mdi:delete" + "get": { + "service": "mdi:cloud-download" + }, + "put": { + "service": "mdi:cloud-upload" + }, + "remove": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/minio/manifest.json b/homeassistant/components/minio/manifest.json index 5fee7893841..3ab6b82bb86 100644 --- a/homeassistant/components/minio/manifest.json +++ b/homeassistant/components/minio/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/minio", "iot_class": "cloud_push", "loggers": ["minio"], + "quality_scale": "legacy", "requirements": ["minio==7.1.12"] } diff --git a/homeassistant/components/minio/minio_helper.py b/homeassistant/components/minio/minio_helper.py index bd814bdf349..6b0021406f7 100644 --- a/homeassistant/components/minio/minio_helper.py +++ b/homeassistant/components/minio/minio_helper.py @@ -116,7 +116,7 @@ class MinioEventThread(threading.Thread): def run(self): """Create MinioClient and run the loop.""" - _LOGGER.info("Running MinioEventThread") + _LOGGER.debug("Running MinioEventThread") self._should_stop = False @@ -125,7 +125,7 @@ class MinioEventThread(threading.Thread): ) while not self._should_stop: - _LOGGER.info("Connecting to minio event stream") + _LOGGER.debug("Connecting to minio event stream") response = None try: response = get_minio_notification_response( diff --git a/homeassistant/components/mjpeg/config_flow.py b/homeassistant/components/mjpeg/config_flow.py index 84267936788..e0150f8c461 100644 --- a/homeassistant/components/mjpeg/config_flow.py +++ b/homeassistant/components/mjpeg/config_flow.py @@ -141,7 +141,7 @@ class MJPEGFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MJPEGOptionsFlowHandler: """Get the options flow for this handler.""" - return MJPEGOptionsFlowHandler(config_entry) + return MJPEGOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -183,10 +183,6 @@ class MJPEGFlowHandler(ConfigFlow, domain=DOMAIN): class MJPEGOptionsFlowHandler(OptionsFlow): """Handle MJPEG IP Camera options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize MJPEG IP Camera options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/mobile_app/__init__.py b/homeassistant/components/mobile_app/__init__.py index 80893e0cbfa..9fadca31b50 100644 --- a/homeassistant/components/mobile_app/__init__.py +++ b/homeassistant/components/mobile_app/__init__.py @@ -4,6 +4,7 @@ from contextlib import suppress from functools import partial from typing import Any +from homeassistant.auth import EVENT_USER_REMOVED from homeassistant.components import cloud, intent, notify as hass_notify from homeassistant.components.webhook import ( async_register as webhook_register, @@ -11,7 +12,7 @@ from homeassistant.components.webhook import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, CONF_WEBHOOK_ID, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import ( config_validation as cv, device_registry as dr, @@ -36,6 +37,7 @@ from .const import ( ATTR_MODEL, ATTR_OS_VERSION, CONF_CLOUDHOOK_URL, + CONF_USER_ID, DATA_CONFIG_ENTRIES, DATA_DELETED_IDS, DATA_DEVICES, @@ -90,6 +92,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: websocket_api.async_setup_commands(hass) + async def _handle_user_removed(event: Event) -> None: + """Remove an entry when the user is removed.""" + user_id = event.data["user_id"] + for entry in hass.config_entries.async_entries(DOMAIN): + if entry.data[CONF_USER_ID] == user_id: + await hass.config_entries.async_remove(entry.entry_id) + + hass.bus.async_listen(EVENT_USER_REMOVED, _handle_user_removed) + return True diff --git a/homeassistant/components/mobile_app/binary_sensor.py b/homeassistant/components/mobile_app/binary_sensor.py index 58683ef378c..e19e00b1277 100644 --- a/homeassistant/components/mobile_app/binary_sensor.py +++ b/homeassistant/components/mobile_app/binary_sensor.py @@ -69,7 +69,7 @@ async def async_setup_entry( class MobileAppBinarySensor(MobileAppEntity, BinarySensorEntity): - """Representation of an mobile app binary sensor.""" + """Representation of a mobile app binary sensor.""" async def async_restore_last_state(self, last_state: State) -> None: """Restore previous state.""" diff --git a/homeassistant/components/mobile_app/config_flow.py b/homeassistant/components/mobile_app/config_flow.py index bd72b2d7f42..33c0442b529 100644 --- a/homeassistant/components/mobile_app/config_flow.py +++ b/homeassistant/components/mobile_app/config_flow.py @@ -28,7 +28,9 @@ class MobileAppFlowHandler(ConfigFlow, domain=DOMAIN): reason="install_app", description_placeholders=placeholders ) - async def async_step_registration(self, user_input=None): + async def async_step_registration( + self, user_input: dict[str, Any] + ) -> ConfigFlowResult: """Handle a flow initialized during registration.""" if ATTR_DEVICE_ID in user_input: # Unique ID is combi of app + device ID. diff --git a/homeassistant/components/mobile_app/device_tracker.py b/homeassistant/components/mobile_app/device_tracker.py index 2c7a4147811..7e84930e2e9 100644 --- a/homeassistant/components/mobile_app/device_tracker.py +++ b/homeassistant/components/mobile_app/device_tracker.py @@ -5,7 +5,6 @@ from homeassistant.components.device_tracker import ( ATTR_GPS, ATTR_GPS_ACCURACY, ATTR_LOCATION_NAME, - SourceType, TrackerEntity, ) from homeassistant.config_entries import ConfigEntry @@ -103,11 +102,6 @@ class MobileAppEntity(TrackerEntity, RestoreEntity): """Return the name of the device.""" return self._entry.data[ATTR_DEVICE_NAME] - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - @property def device_info(self): """Return the device info.""" diff --git a/homeassistant/components/mobile_app/entity.py b/homeassistant/components/mobile_app/entity.py index f1f7b592621..a0ad4c45963 100644 --- a/homeassistant/components/mobile_app/entity.py +++ b/homeassistant/components/mobile_app/entity.py @@ -1,4 +1,4 @@ -"""A entity class for mobile_app.""" +"""An entity class for mobile_app.""" from __future__ import annotations @@ -24,7 +24,7 @@ from .helpers import device_info class MobileAppEntity(RestoreEntity): - """Representation of an mobile app entity.""" + """Representation of a mobile app entity.""" _attr_should_poll = False diff --git a/homeassistant/components/mobile_app/sensor.py b/homeassistant/components/mobile_app/sensor.py index dd70cf1e22e..06ab924aba2 100644 --- a/homeassistant/components/mobile_app/sensor.py +++ b/homeassistant/components/mobile_app/sensor.py @@ -59,6 +59,8 @@ async def async_setup_entry( ATTR_SENSOR_UOM: entry.unit_of_measurement, ATTR_SENSOR_ENTITY_CATEGORY: entry.entity_category, } + if capabilities := entry.capabilities: + config[ATTR_SENSOR_STATE_CLASS] = capabilities.get(ATTR_SENSOR_STATE_CLASS) entities.append(MobileAppSensor(config, config_entry)) async_add_entities(entities) @@ -78,7 +80,7 @@ async def async_setup_entry( class MobileAppSensor(MobileAppEntity, RestoreSensor): - """Representation of an mobile app sensor.""" + """Representation of a mobile app sensor.""" async def async_restore_last_state(self, last_state: State) -> None: """Restore previous state.""" diff --git a/homeassistant/components/mobile_app/timers.py b/homeassistant/components/mobile_app/timers.py index e092298c5d7..e9e44210534 100644 --- a/homeassistant/components/mobile_app/timers.py +++ b/homeassistant/components/mobile_app/timers.py @@ -3,7 +3,7 @@ from datetime import timedelta from homeassistant.components import notify -from homeassistant.components.intent.timers import TimerEventType, TimerInfo +from homeassistant.components.intent import TimerEventType, TimerInfo from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE_ID from homeassistant.core import HomeAssistant, callback diff --git a/homeassistant/components/mochad/manifest.json b/homeassistant/components/mochad/manifest.json index e4680cc6ff5..96795789c8c 100644 --- a/homeassistant/components/mochad/manifest.json +++ b/homeassistant/components/mochad/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mochad", "iot_class": "local_polling", "loggers": ["pbr", "pymochad"], + "quality_scale": "legacy", "requirements": ["pymochad==0.2.0"] } diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index f5efe03dad4..48f8c726836 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -87,7 +87,6 @@ from .const import ( CONF_HVAC_MODE_VALUES, CONF_HVAC_ONOFF_REGISTER, CONF_INPUT_TYPE, - CONF_LAZY_ERROR, CONF_MAX_TEMP, CONF_MAX_VALUE, CONF_MIN_TEMP, @@ -96,7 +95,6 @@ from .const import ( CONF_NAN_VALUE, CONF_PARITY, CONF_PRECISION, - CONF_RETRIES, CONF_SCALE, CONF_SLAVE_COUNT, CONF_STATE_CLOSED, @@ -162,7 +160,6 @@ BASE_COMPONENT_SCHEMA = vol.Schema( vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.positive_int, - vol.Optional(CONF_LAZY_ERROR): cv.positive_int, vol.Optional(CONF_UNIQUE_ID): cv.string, } ) @@ -234,8 +231,10 @@ BASE_SWITCH_SCHEMA = BASE_COMPONENT_SCHEMA.extend( CALL_TYPE_X_REGISTER_HOLDINGS, ] ), - vol.Optional(CONF_STATE_OFF): cv.positive_int, - vol.Optional(CONF_STATE_ON): cv.positive_int, + vol.Optional(CONF_STATE_OFF): vol.All( + cv.ensure_list, [cv.positive_int] + ), + vol.Optional(CONF_STATE_ON): vol.All(cv.ensure_list, [cv.positive_int]), vol.Optional(CONF_DELAY, default=0): cv.positive_int, } ), @@ -393,7 +392,6 @@ MODBUS_SCHEMA = vol.Schema( vol.Optional(CONF_NAME, default=DEFAULT_HUB): cv.string, vol.Optional(CONF_TIMEOUT, default=3): cv.socket_timeout, vol.Optional(CONF_DELAY, default=0): cv.positive_int, - vol.Optional(CONF_RETRIES): cv.positive_int, vol.Optional(CONF_MSG_WAIT): cv.positive_int, vol.Optional(CONF_BINARY_SENSORS): vol.All( cv.ensure_list, [BINARY_SENSOR_SCHEMA] @@ -464,7 +462,7 @@ async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> No if DOMAIN not in hass.data: _LOGGER.error("Modbus cannot reload, because it was never loaded") return - _LOGGER.info("Modbus reloading") + _LOGGER.debug("Modbus reloading") hubs = hass.data[DOMAIN] for name in hubs: await hubs[name].async_close() diff --git a/homeassistant/components/modbus/binary_sensor.py b/homeassistant/components/modbus/binary_sensor.py index 314877b7927..97ade53762b 100644 --- a/homeassistant/components/modbus/binary_sensor.py +++ b/homeassistant/components/modbus/binary_sensor.py @@ -24,13 +24,13 @@ from homeassistant.helpers.update_coordinator import ( ) from . import get_hub -from .base_platform import BasePlatform from .const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, CONF_SLAVE_COUNT, CONF_VIRTUAL_COUNT, ) +from .entity import BasePlatform from .modbus import ModbusHub _LOGGER = logging.getLogger(__name__) @@ -90,6 +90,7 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): self._coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=name, ) @@ -120,7 +121,7 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): else: self._attr_available = True if self._input_type in (CALL_TYPE_COIL, CALL_TYPE_DISCRETE): - self._result = result.bits + self._result = [int(bit) for bit in result.bits] else: self._result = result.registers self._attr_is_on = bool(self._result[0] & 1) diff --git a/homeassistant/components/modbus/climate.py b/homeassistant/components/modbus/climate.py index 0a4eae341b4..111c0458ef4 100644 --- a/homeassistant/components/modbus/climate.py +++ b/homeassistant/components/modbus/climate.py @@ -43,7 +43,6 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub -from .base_platform import BaseStructPlatform from .const import ( CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_WRITE_REGISTER, @@ -86,6 +85,7 @@ from .const import ( CONF_WRITE_REGISTERS, DataType, ) +from .entity import BaseStructPlatform from .modbus import ModbusHub _LOGGER = logging.getLogger(__name__) @@ -130,7 +130,6 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/modbus/const.py b/homeassistant/components/modbus/const.py index 02f5d99c72c..7a1a4121a93 100644 --- a/homeassistant/components/modbus/const.py +++ b/homeassistant/components/modbus/const.py @@ -20,7 +20,6 @@ CONF_DATA_TYPE = "data_type" CONF_DEVICE_ADDRESS = "device_address" CONF_FANS = "fans" CONF_INPUT_TYPE = "input_type" -CONF_LAZY_ERROR = "lazy_error_count" CONF_MAX_TEMP = "max_temp" CONF_MAX_VALUE = "max_value" CONF_MIN_TEMP = "min_temp" @@ -28,7 +27,6 @@ CONF_MIN_VALUE = "min_value" CONF_MSG_WAIT = "message_wait_milliseconds" CONF_NAN_VALUE = "nan_value" CONF_PARITY = "parity" -CONF_RETRIES = "retries" CONF_PRECISION = "precision" CONF_SCALE = "scale" CONF_SLAVE_COUNT = "slave_count" diff --git a/homeassistant/components/modbus/cover.py b/homeassistant/components/modbus/cover.py index 1221a05a5ac..eb9dac58900 100644 --- a/homeassistant/components/modbus/cover.py +++ b/homeassistant/components/modbus/cover.py @@ -5,24 +5,14 @@ from __future__ import annotations from datetime import datetime from typing import Any -from homeassistant.components.cover import CoverEntity, CoverEntityFeature -from homeassistant.const import ( - CONF_COVERS, - CONF_NAME, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState +from homeassistant.const import CONF_COVERS, CONF_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub -from .base_platform import BasePlatform from .const import ( CALL_TYPE_COIL, CALL_TYPE_WRITE_COIL, @@ -34,6 +24,7 @@ from .const import ( CONF_STATUS_REGISTER, CONF_STATUS_REGISTER_TYPE, ) +from .entity import BasePlatform from .modbus import ModbusHub PARALLEL_UPDATES = 1 @@ -105,10 +96,10 @@ class ModbusCover(BasePlatform, CoverEntity, RestoreEntity): await self.async_base_added_to_hass() if state := await self.async_get_last_state(): convert = { - STATE_CLOSED: self._state_closed, - STATE_CLOSING: self._state_closing, - STATE_OPENING: self._state_opening, - STATE_OPEN: self._state_open, + CoverState.CLOSED: self._state_closed, + CoverState.CLOSING: self._state_closing, + CoverState.OPENING: self._state_opening, + CoverState.OPEN: self._state_open, STATE_UNAVAILABLE: None, STATE_UNKNOWN: None, } diff --git a/homeassistant/components/modbus/base_platform.py b/homeassistant/components/modbus/entity.py similarity index 98% rename from homeassistant/components/modbus/base_platform.py rename to homeassistant/components/modbus/entity.py index 9f0e862f283..90833516e59 100644 --- a/homeassistant/components/modbus/base_platform.py +++ b/homeassistant/components/modbus/entity.py @@ -297,8 +297,10 @@ class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity): self._verify_type = convert[ config[CONF_VERIFY].get(CONF_INPUT_TYPE, config[CONF_WRITE_TYPE]) ][0] - self._state_on = config[CONF_VERIFY].get(CONF_STATE_ON, self.command_on) - self._state_off = config[CONF_VERIFY].get(CONF_STATE_OFF, self._command_off) + self._state_on = config[CONF_VERIFY].get(CONF_STATE_ON, [self.command_on]) + self._state_off = config[CONF_VERIFY].get( + CONF_STATE_OFF, [self._command_off] + ) else: self._verify_active = False @@ -363,9 +365,9 @@ class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity): self._attr_is_on = bool(result.bits[0] & 1) else: value = int(result.registers[0]) - if value == self._state_on: + if value in self._state_on: self._attr_is_on = True - elif value == self._state_off: + elif value in self._state_off: self._attr_is_on = False elif value is not None: _LOGGER.error( diff --git a/homeassistant/components/modbus/fan.py b/homeassistant/components/modbus/fan.py index e8b9d3bdaa7..bed8ff102bb 100644 --- a/homeassistant/components/modbus/fan.py +++ b/homeassistant/components/modbus/fan.py @@ -11,8 +11,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub -from .base_platform import BaseSwitch from .const import CONF_FANS +from .entity import BaseSwitch from .modbus import ModbusHub PARALLEL_UPDATES = 1 @@ -38,8 +38,6 @@ async def async_setup_platform( class ModbusFan(BaseSwitch, FanEntity): """Class representing a Modbus fan.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__( self, hass: HomeAssistant, hub: ModbusHub, config: dict[str, Any] ) -> None: diff --git a/homeassistant/components/modbus/icons.json b/homeassistant/components/modbus/icons.json index eeaeff6403b..05ee76fd44e 100644 --- a/homeassistant/components/modbus/icons.json +++ b/homeassistant/components/modbus/icons.json @@ -1,9 +1,19 @@ { "services": { - "reload": "mdi:reload", - "write_coil": "mdi:pencil", - "write_register": "mdi:database-edit", - "stop": "mdi:stop", - "restart": "mdi:restart" + "reload": { + "service": "mdi:reload" + }, + "write_coil": { + "service": "mdi:pencil" + }, + "write_register": { + "service": "mdi:database-edit" + }, + "stop": { + "service": "mdi:stop" + }, + "restart": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/modbus/light.py b/homeassistant/components/modbus/light.py index 16714219bc2..42745c2bb78 100644 --- a/homeassistant/components/modbus/light.py +++ b/homeassistant/components/modbus/light.py @@ -11,7 +11,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub -from .base_platform import BaseSwitch +from .entity import BaseSwitch from .modbus import ModbusHub PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 4482801482f..fc25a329c11 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -5,6 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/modbus", "iot_class": "local_polling", "loggers": ["pymodbus"], - "quality_scale": "silver", - "requirements": ["pymodbus==3.6.9"] + "requirements": ["pymodbus==3.7.4"] } diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index e70b9de50f0..efce44d7979 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -14,8 +14,8 @@ from pymodbus.client import ( AsyncModbusUdpClient, ) from pymodbus.exceptions import ModbusException -from pymodbus.pdu import ModbusResponse -from pymodbus.transaction import ModbusAsciiFramer, ModbusRtuFramer, ModbusSocketFramer +from pymodbus.framer import FramerType +from pymodbus.pdu import ModbusPDU import voluptuous as vol from homeassistant.const import ( @@ -34,7 +34,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType @@ -62,11 +61,9 @@ from .const import ( PLATFORMS, RTUOVERTCP, SERIAL, - SERVICE_RESTART, SERVICE_STOP, SERVICE_WRITE_COIL, SERVICE_WRITE_REGISTER, - SIGNAL_START_ENTITY, SIGNAL_STOP_ENTITY, TCP, UDP, @@ -161,8 +158,6 @@ async def async_modbus_setup( async def async_stop_modbus(event: Event) -> None: """Stop Modbus service.""" - - async_dispatcher_send(hass, SIGNAL_STOP_ENTITY) for client in hub_collect.values(): await client.async_close() @@ -233,34 +228,12 @@ async def async_modbus_setup( hub = hub_collect[service.data[ATTR_HUB]] await hub.async_close() - async def async_restart_hub(service: ServiceCall) -> None: - """Restart Modbus hub.""" - async_create_issue( - hass, - DOMAIN, - "deprecated_restart", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_restart", - ) - _LOGGER.warning( - "`modbus.restart` is deprecated and will be removed in version 2024.11" - ) - async_dispatcher_send(hass, SIGNAL_START_ENTITY) - hub = hub_collect[service.data[ATTR_HUB]] - await hub.async_restart() - - for x_service in ( - (SERVICE_STOP, async_stop_hub), - (SERVICE_RESTART, async_restart_hub), - ): - hass.services.async_register( - DOMAIN, - x_service[0], - x_service[1], - schema=vol.Schema({vol.Required(ATTR_HUB): cv.string}), - ) + hass.services.async_register( + DOMAIN, + SERVICE_STOP, + async_stop_hub, + schema=vol.Schema({vol.Required(ATTR_HUB): cv.string}), + ) return True @@ -292,14 +265,13 @@ class ModbusHub: "port": client_config[CONF_PORT], "timeout": client_config[CONF_TIMEOUT], "retries": 3, - "retry_on_empty": True, } if self._config_type == SERIAL: # serial configuration if client_config[CONF_METHOD] == "ascii": - self._pb_params["framer"] = ModbusAsciiFramer + self._pb_params["framer"] = FramerType.ASCII else: - self._pb_params["framer"] = ModbusRtuFramer + self._pb_params["framer"] = FramerType.RTU self._pb_params.update( { "baudrate": client_config[CONF_BAUDRATE], @@ -312,9 +284,9 @@ class ModbusHub: # network configuration self._pb_params["host"] = client_config[CONF_HOST] if self._config_type == RTUOVERTCP: - self._pb_params["framer"] = ModbusRtuFramer + self._pb_params["framer"] = FramerType.RTU else: - self._pb_params["framer"] = ModbusSocketFramer + self._pb_params["framer"] = FramerType.SOCKET if CONF_MSG_WAIT in client_config: self._msg_wait = client_config[CONF_MSG_WAIT] / 1000 @@ -393,16 +365,16 @@ class ModbusHub: del self._client self._client = None message = f"modbus {self.name} communication closed" - _LOGGER.warning(message) + _LOGGER.info(message) async def low_level_pb_call( self, slave: int | None, address: int, value: int | list[int], use_call: str - ) -> ModbusResponse | None: + ) -> ModbusPDU | None: """Call sync. pymodbus.""" kwargs = {"slave": slave} if slave else {} entry = self._pb_request[use_call] try: - result: ModbusResponse = await entry.func(address, value, **kwargs) + result: ModbusPDU = await entry.func(address, value, **kwargs) except ModbusException as exception_error: error = f"Error: device: {slave} address: {address} -> {exception_error!s}" self._log_error(error) @@ -430,7 +402,7 @@ class ModbusHub: address: int, value: int | list[int], use_call: str, - ) -> ModbusResponse | None: + ) -> ModbusPDU | None: """Convert async to sync pymodbus call.""" if self._config_delay: return None diff --git a/homeassistant/components/modbus/sensor.py b/homeassistant/components/modbus/sensor.py index dbc464e98a9..d5a16c95cc4 100644 --- a/homeassistant/components/modbus/sensor.py +++ b/homeassistant/components/modbus/sensor.py @@ -27,8 +27,8 @@ from homeassistant.helpers.update_coordinator import ( ) from . import get_hub -from .base_platform import BaseStructPlatform from .const import CONF_SLAVE_COUNT, CONF_VIRTUAL_COUNT +from .entity import BaseStructPlatform from .modbus import ModbusHub _LOGGER = logging.getLogger(__name__) @@ -91,6 +91,7 @@ class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity): self._coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=name, ) diff --git a/homeassistant/components/modbus/strings.json b/homeassistant/components/modbus/strings.json index 8e746ca1299..7b55022645e 100644 --- a/homeassistant/components/modbus/strings.json +++ b/homeassistant/components/modbus/strings.json @@ -71,15 +71,15 @@ }, "issues": { "removed_lazy_error_count": { - "title": "`{config_key}` configuration key is being removed", + "title": "{config_key} configuration key is being removed", "description": "Please remove the `{config_key}` key from the {integration} entry in your configuration.yaml file and restart Home Assistant to fix this issue. All errors will be reported, as lazy_error_count is accepted but ignored" }, "deprecated_retries": { - "title": "`{config_key}` configuration key is being removed", + "title": "{config_key} configuration key is being removed", "description": "Please remove the `{config_key}` key from the {integration} entry in your configuration.yaml file and restart Home Assistant to fix this issue.\n\nThe maximum number of retries is now fixed to 3." }, "missing_modbus_name": { - "title": "Modbus entry with host `{sub_2}` missing name", + "title": "Modbus entry with host {sub_2} missing name", "description": "Please add `{sub_1}` key to the {integration} entry with host `{sub_2}` in your configuration.yaml file and restart Home Assistant to fix this issue\n\n. `{sub_1}: {sub_3}` have been added." }, "duplicate_modbus_entry": { @@ -97,10 +97,6 @@ "no_entities": { "title": "Modbus {sub_1} contain no entities, entry not loaded.", "description": "Please add at least one entity to Modbus {sub_1} in your configuration.yaml file and restart Home Assistant to fix this issue." - }, - "deprecated_restart": { - "title": "`modbus.restart` is being removed", - "description": "Please use reload yaml via the developer tools in the UI instead of via the `modbus.restart` action." } } } diff --git a/homeassistant/components/modbus/switch.py b/homeassistant/components/modbus/switch.py index ff02e4a7a7e..71413391a5f 100644 --- a/homeassistant/components/modbus/switch.py +++ b/homeassistant/components/modbus/switch.py @@ -11,7 +11,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub -from .base_platform import BaseSwitch +from .entity import BaseSwitch from .modbus import ModbusHub PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/modbus/validators.py b/homeassistant/components/modbus/validators.py index e1120094d01..f8f1a7450eb 100644 --- a/homeassistant/components/modbus/validators.py +++ b/homeassistant/components/modbus/validators.py @@ -27,8 +27,6 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from .const import ( CONF_DATA_TYPE, CONF_FAN_MODE_VALUES, - CONF_LAZY_ERROR, - CONF_RETRIES, CONF_SLAVE_COUNT, CONF_SWAP, CONF_SWAP_BYTE, @@ -284,27 +282,6 @@ def validate_modbus( hub_name_inx: int, ) -> bool: """Validate modbus entries.""" - if CONF_RETRIES in hub: - async_create_issue( - hass, - DOMAIN, - "deprecated_retries", - breaks_in_ha_version="2024.7.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_retries", - translation_placeholders={ - "config_key": "retries", - "integration": DOMAIN, - "url": "https://www.home-assistant.io/integrations/modbus", - }, - ) - _LOGGER.warning( - "`retries`: is deprecated and will be removed in version 2024.7" - ) - else: - hub[CONF_RETRIES] = 3 - host: str = ( hub[CONF_PORT] if hub[CONF_TYPE] == SERIAL @@ -353,24 +330,6 @@ def validate_entity( ent_addr: set[str], ) -> bool: """Validate entity.""" - if CONF_LAZY_ERROR in entity: - async_create_issue( - hass, - DOMAIN, - "removed_lazy_error_count", - breaks_in_ha_version="2024.7.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="removed_lazy_error_count", - translation_placeholders={ - "config_key": "lazy_error_count", - "integration": DOMAIN, - "url": "https://www.home-assistant.io/integrations/modbus", - }, - ) - _LOGGER.warning( - "`lazy_error_count`: is deprecated and will be removed in version 2024.7" - ) name = f"{component}.{entity[CONF_NAME]}" scan_interval = entity.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) if 0 < scan_interval < 5: diff --git a/homeassistant/components/modern_forms/__init__.py b/homeassistant/components/modern_forms/__init__.py index dea7d4fadea..ef2bbad70ce 100644 --- a/homeassistant/components/modern_forms/__init__.py +++ b/homeassistant/components/modern_forms/__init__.py @@ -11,11 +11,10 @@ from aiomodernforms import ModernFormsConnectionError, ModernFormsError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import ModernFormsDataUpdateCoordinator +from .entity import ModernFormsDeviceEntity PLATFORMS = [ Platform.BINARY_SENSOR, @@ -84,35 +83,3 @@ def modernforms_exception_handler[ _LOGGER.error("Invalid response from API: %s", error) return handler - - -class ModernFormsDeviceEntity(CoordinatorEntity[ModernFormsDataUpdateCoordinator]): - """Defines a Modern Forms device entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - *, - entry_id: str, - coordinator: ModernFormsDataUpdateCoordinator, - enabled_default: bool = True, - ) -> None: - """Initialize the Modern Forms entity.""" - super().__init__(coordinator) - self._attr_enabled_default = enabled_default - self._entry_id = entry_id - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this Modern Forms device.""" - return DeviceInfo( - identifiers={(DOMAIN, self.coordinator.data.info.mac_address)}, - name=self.coordinator.data.info.device_name, - manufacturer="Modern Forms", - model=self.coordinator.data.info.fan_type, - sw_version=( - f"{self.coordinator.data.info.firmware_version} /" - f" {self.coordinator.data.info.main_mcu_firmware_version}" - ), - ) diff --git a/homeassistant/components/modern_forms/binary_sensor.py b/homeassistant/components/modern_forms/binary_sensor.py index 5fb0096b477..ea903c580a4 100644 --- a/homeassistant/components/modern_forms/binary_sensor.py +++ b/homeassistant/components/modern_forms/binary_sensor.py @@ -8,9 +8,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import ModernFormsDeviceEntity from .const import CLEAR_TIMER, DOMAIN from .coordinator import ModernFormsDataUpdateCoordinator +from .entity import ModernFormsDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/modern_forms/config_flow.py b/homeassistant/components/modern_forms/config_flow.py index c2b88d65a1b..6799dbf97d3 100644 --- a/homeassistant/components/modern_forms/config_flow.py +++ b/homeassistant/components/modern_forms/config_flow.py @@ -9,17 +9,23 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +USER_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) + class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a ModernForms config flow.""" VERSION = 1 + host: str | None = None + mac: str | None = None + name: str + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -33,14 +39,10 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): host = discovery_info.hostname.rstrip(".") name, _ = host.rsplit(".") - self.context.update( - { - CONF_HOST: discovery_info.host, - CONF_NAME: name, - CONF_MAC: discovery_info.properties.get(CONF_MAC), - "title_placeholders": {"name": name}, - } - ) + self.context["title_placeholders"] = {"name": name} + self.host = discovery_info.host + self.mac = discovery_info.properties.get(CONF_MAC) + self.name = name # Prepare configuration flow return await self._handle_config_flow({}, True) @@ -55,19 +57,23 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None, prepare: bool = False ) -> ConfigFlowResult: """Config flow handler for ModernForms.""" - source = self.context.get("source") - # Request user input, unless we are preparing discovery flow if user_input is None: user_input = {} if not prepare: - if source == SOURCE_ZEROCONF: - return self._show_confirm_dialog() - return self._show_setup_form() + if self.source == SOURCE_ZEROCONF: + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={"name": self.name}, + ) + return self.async_show_form( + step_id="user", + data_schema=USER_SCHEMA, + ) - if source == SOURCE_ZEROCONF: - user_input[CONF_HOST] = self.context.get(CONF_HOST) - user_input[CONF_MAC] = self.context.get(CONF_MAC) + if self.source == SOURCE_ZEROCONF: + user_input[CONF_HOST] = self.host + user_input[CONF_MAC] = self.mac if user_input.get(CONF_MAC) is None or not prepare: session = async_get_clientsession(self.hass) @@ -75,19 +81,22 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): try: device = await device.update() except ModernFormsConnectionError: - if source == SOURCE_ZEROCONF: + if self.source == SOURCE_ZEROCONF: return self.async_abort(reason="cannot_connect") - return self._show_setup_form({"base": "cannot_connect"}) + return self.async_show_form( + step_id="user", + data_schema=USER_SCHEMA, + errors={"base": "cannot_connect"}, + ) user_input[CONF_MAC] = device.info.mac_address - user_input[CONF_NAME] = device.info.device_name # Check if already configured await self.async_set_unique_id(user_input[CONF_MAC]) self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) title = device.info.device_name - if source == SOURCE_ZEROCONF: - title = self.context.get(CONF_NAME) + if self.source == SOURCE_ZEROCONF: + title = self.name if prepare: return await self.async_step_zeroconf_confirm() @@ -96,20 +105,3 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): title=title, data={CONF_HOST: user_input[CONF_HOST], CONF_MAC: user_input[CONF_MAC]}, ) - - def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult: - """Show the setup form to the user.""" - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required(CONF_HOST): str}), - errors=errors or {}, - ) - - def _show_confirm_dialog(self, errors: dict | None = None) -> ConfigFlowResult: - """Show the confirm dialog to the user.""" - name = self.context.get(CONF_NAME) - return self.async_show_form( - step_id="zeroconf_confirm", - description_placeholders={"name": name}, - errors=errors or {}, - ) diff --git a/homeassistant/components/modern_forms/diagnostics.py b/homeassistant/components/modern_forms/diagnostics.py new file mode 100644 index 00000000000..0011a7c3bab --- /dev/null +++ b/homeassistant/components/modern_forms/diagnostics.py @@ -0,0 +1,36 @@ +"""Diagnostics support for Modern Forms.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import TYPE_CHECKING, Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_MAC +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import ModernFormsDataUpdateCoordinator + +REDACT_CONFIG = {CONF_MAC} +REDACT_DEVICE_INFO = {"mac_address", "owner"} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator: ModernFormsDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + if TYPE_CHECKING: + assert coordinator is not None + + return { + "config_entry": async_redact_data(entry.as_dict(), REDACT_CONFIG), + "device": { + "info": async_redact_data( + asdict(coordinator.modern_forms.info), REDACT_DEVICE_INFO + ), + "status": asdict(coordinator.modern_forms.status), + }, + } diff --git a/homeassistant/components/modern_forms/entity.py b/homeassistant/components/modern_forms/entity.py new file mode 100644 index 00000000000..c8419295c1f --- /dev/null +++ b/homeassistant/components/modern_forms/entity.py @@ -0,0 +1,41 @@ +"""The Modern Forms integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ModernFormsDataUpdateCoordinator + + +class ModernFormsDeviceEntity(CoordinatorEntity[ModernFormsDataUpdateCoordinator]): + """Defines a Modern Forms device entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + *, + entry_id: str, + coordinator: ModernFormsDataUpdateCoordinator, + enabled_default: bool = True, + ) -> None: + """Initialize the Modern Forms entity.""" + super().__init__(coordinator) + self._attr_enabled_default = enabled_default + self._entry_id = entry_id + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this Modern Forms device.""" + return DeviceInfo( + identifiers={(DOMAIN, self.coordinator.data.info.mac_address)}, + name=self.coordinator.data.info.device_name, + manufacturer="Modern Forms", + model=self.coordinator.data.info.fan_type, + sw_version=( + f"{self.coordinator.data.info.firmware_version} /" + f" {self.coordinator.data.info.main_mcu_firmware_version}" + ), + ) diff --git a/homeassistant/components/modern_forms/fan.py b/homeassistant/components/modern_forms/fan.py index e34038c7be7..988edcb60e5 100644 --- a/homeassistant/components/modern_forms/fan.py +++ b/homeassistant/components/modern_forms/fan.py @@ -18,7 +18,7 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range -from . import ModernFormsDeviceEntity, modernforms_exception_handler +from . import modernforms_exception_handler from .const import ( ATTR_SLEEP_TIME, CLEAR_TIMER, @@ -29,6 +29,7 @@ from .const import ( SERVICE_SET_FAN_SLEEP_TIMER, ) from .coordinator import ModernFormsDataUpdateCoordinator +from .entity import ModernFormsDeviceEntity async def async_setup_entry( @@ -77,7 +78,6 @@ class ModernFormsFanEntity(FanEntity, ModernFormsDeviceEntity): | FanEntityFeature.TURN_ON ) _attr_translation_key = "fan" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, entry_id: str, coordinator: ModernFormsDataUpdateCoordinator diff --git a/homeassistant/components/modern_forms/icons.json b/homeassistant/components/modern_forms/icons.json index e5df55dc15e..544e48e17f1 100644 --- a/homeassistant/components/modern_forms/icons.json +++ b/homeassistant/components/modern_forms/icons.json @@ -26,9 +26,17 @@ } }, "services": { - "set_light_sleep_timer": "mdi:timer", - "clear_light_sleep_timer": "mdi:timer-cancel", - "set_fan_sleep_timer": "mdi:timer", - "clear_fan_sleep_timer": "mdi:timer-cancel" + "set_light_sleep_timer": { + "service": "mdi:timer" + }, + "clear_light_sleep_timer": { + "service": "mdi:timer-cancel" + }, + "set_fan_sleep_timer": { + "service": "mdi:timer" + }, + "clear_fan_sleep_timer": { + "service": "mdi:timer-cancel" + } } } diff --git a/homeassistant/components/modern_forms/light.py b/homeassistant/components/modern_forms/light.py index 4c210038694..2b53a414cea 100644 --- a/homeassistant/components/modern_forms/light.py +++ b/homeassistant/components/modern_forms/light.py @@ -17,7 +17,7 @@ from homeassistant.util.percentage import ( ranged_value_to_percentage, ) -from . import ModernFormsDeviceEntity, modernforms_exception_handler +from . import modernforms_exception_handler from .const import ( ATTR_SLEEP_TIME, CLEAR_TIMER, @@ -28,6 +28,7 @@ from .const import ( SERVICE_SET_LIGHT_SLEEP_TIMER, ) from .coordinator import ModernFormsDataUpdateCoordinator +from .entity import ModernFormsDeviceEntity BRIGHTNESS_RANGE = (1, 255) diff --git a/homeassistant/components/modern_forms/sensor.py b/homeassistant/components/modern_forms/sensor.py index 851e3092ce5..0f1e90cbe52 100644 --- a/homeassistant/components/modern_forms/sensor.py +++ b/homeassistant/components/modern_forms/sensor.py @@ -11,9 +11,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util import dt as dt_util -from . import ModernFormsDeviceEntity from .const import CLEAR_TIMER, DOMAIN from .coordinator import ModernFormsDataUpdateCoordinator +from .entity import ModernFormsDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/modern_forms/switch.py b/homeassistant/components/modern_forms/switch.py index a80115c0f93..f2e8b1b705c 100644 --- a/homeassistant/components/modern_forms/switch.py +++ b/homeassistant/components/modern_forms/switch.py @@ -9,9 +9,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ModernFormsDeviceEntity, modernforms_exception_handler +from . import modernforms_exception_handler from .const import DOMAIN from .coordinator import ModernFormsDataUpdateCoordinator +from .entity import ModernFormsDeviceEntity async def async_setup_entry( diff --git a/homeassistant/components/moehlenhoff_alpha2/climate.py b/homeassistant/components/moehlenhoff_alpha2/climate.py index 33f17271800..7c24dad4469 100644 --- a/homeassistant/components/moehlenhoff_alpha2/climate.py +++ b/homeassistant/components/moehlenhoff_alpha2/climate.py @@ -47,7 +47,6 @@ class Alpha2Climate(CoordinatorEntity[Alpha2BaseCoordinator], ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_preset_modes = [PRESET_AUTO, PRESET_DAY, PRESET_NIGHT] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: Alpha2BaseCoordinator, heat_area_id: str) -> None: """Initialize Alpha2 ClimateEntity.""" diff --git a/homeassistant/components/mold_indicator/__init__.py b/homeassistant/components/mold_indicator/__init__.py index adadf41b2b0..c426b942af5 100644 --- a/homeassistant/components/mold_indicator/__init__.py +++ b/homeassistant/components/mold_indicator/__init__.py @@ -1 +1,26 @@ """Calculates mold growth indication from temperature and humidity.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +PLATFORMS = [Platform.SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Mold indicator from a config entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Mold indicator config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/mold_indicator/config_flow.py b/homeassistant/components/mold_indicator/config_flow.py new file mode 100644 index 00000000000..5e5512a60bf --- /dev/null +++ b/homeassistant/components/mold_indicator/config_flow.py @@ -0,0 +1,175 @@ +"""Config flow for Mold indicator.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, cast + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.components.sensor import SensorDeviceClass +from homeassistant.const import CONF_NAME, Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowError, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + EntitySelector, + EntitySelectorConfig, + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + TextSelector, +) +from homeassistant.util.unit_system import METRIC_SYSTEM + +from .const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, + DOMAIN, +) +from .sensor import MoldIndicator + + +async def validate_input( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate already existing entry.""" + handler.parent_handler._async_abort_entries_match({**handler.options, **user_input}) # noqa: SLF001 + if user_input[CONF_CALIBRATION_FACTOR] == 0.0: + raise SchemaFlowError("calibration_is_zero") + return user_input + + +DATA_SCHEMA_OPTIONS = vol.Schema( + { + vol.Required(CONF_INDOOR_TEMP): EntitySelector( + EntitySelectorConfig( + domain=Platform.SENSOR, device_class=SensorDeviceClass.TEMPERATURE + ) + ), + vol.Required(CONF_INDOOR_HUMIDITY): EntitySelector( + EntitySelectorConfig( + domain=Platform.SENSOR, device_class=SensorDeviceClass.HUMIDITY + ) + ), + vol.Required(CONF_OUTDOOR_TEMP): EntitySelector( + EntitySelectorConfig( + domain=Platform.SENSOR, device_class=SensorDeviceClass.TEMPERATURE + ) + ), + vol.Required(CONF_CALIBRATION_FACTOR): NumberSelector( + NumberSelectorConfig(step=0.1, mode=NumberSelectorMode.BOX) + ), + } +) + +DATA_SCHEMA_CONFIG = vol.Schema( + { + vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), + } +).extend(DATA_SCHEMA_OPTIONS.schema) + + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=DATA_SCHEMA_CONFIG, + validate_user_input=validate_input, + preview="mold_indicator", + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + DATA_SCHEMA_OPTIONS, + validate_user_input=validate_input, + preview="mold_indicator", + ) +} + + +class MoldIndicatorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Mold indicator.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return cast(str, options[CONF_NAME]) + + @staticmethod + async def async_setup_preview(hass: HomeAssistant) -> None: + """Set up preview WS API.""" + websocket_api.async_register_command(hass, ws_start_preview) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "mold_indicator/start_preview", + vol.Required("flow_id"): str, + vol.Required("flow_type"): vol.Any("config_flow", "options_flow"), + vol.Required("user_input"): dict, + } +) +@callback +def ws_start_preview( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate a preview.""" + + if msg["flow_type"] == "config_flow": + flow_status = hass.config_entries.flow.async_get(msg["flow_id"]) + flow_sets = hass.config_entries.flow._handler_progress_index.get( # noqa: SLF001 + flow_status["handler"] + ) + assert flow_sets + config_entry = hass.config_entries.async_get_entry(flow_status["handler"]) + indoor_temp = msg["user_input"].get(CONF_INDOOR_TEMP) + outdoor_temp = msg["user_input"].get(CONF_OUTDOOR_TEMP) + indoor_hum = msg["user_input"].get(CONF_INDOOR_HUMIDITY) + name = msg["user_input"].get(CONF_NAME) + else: + flow_status = hass.config_entries.options.async_get(msg["flow_id"]) + config_entry = hass.config_entries.async_get_entry(flow_status["handler"]) + if not config_entry: + raise HomeAssistantError("Config entry not found") + indoor_temp = config_entry.options[CONF_INDOOR_TEMP] + outdoor_temp = config_entry.options[CONF_OUTDOOR_TEMP] + indoor_hum = config_entry.options[CONF_INDOOR_HUMIDITY] + name = config_entry.options[CONF_NAME] + + @callback + def async_preview_updated(state: str, attributes: Mapping[str, Any]) -> None: + """Forward config entry state events to websocket.""" + connection.send_message( + websocket_api.event_message( + msg["id"], {"attributes": attributes, "state": state} + ) + ) + + preview_entity = MoldIndicator( + hass, + name, + hass.config.units is METRIC_SYSTEM, + indoor_temp, + outdoor_temp, + indoor_hum, + msg["user_input"].get(CONF_CALIBRATION_FACTOR), + None, + ) + preview_entity.hass = hass + + connection.send_result(msg["id"]) + connection.subscriptions[msg["id"]] = preview_entity.async_start_preview( + async_preview_updated + ) diff --git a/homeassistant/components/mold_indicator/const.py b/homeassistant/components/mold_indicator/const.py new file mode 100644 index 00000000000..15fdf51bce3 --- /dev/null +++ b/homeassistant/components/mold_indicator/const.py @@ -0,0 +1,12 @@ +"""Constants for Mold indicator component.""" + +from __future__ import annotations + +DOMAIN = "mold_indicator" + +CONF_CALIBRATION_FACTOR = "calibration_factor" +CONF_INDOOR_HUMIDITY = "indoor_humidity_sensor" +CONF_INDOOR_TEMP = "indoor_temp_sensor" +CONF_OUTDOOR_TEMP = "outdoor_temp_sensor" + +DEFAULT_NAME = "Mold Indicator" diff --git a/homeassistant/components/mold_indicator/manifest.json b/homeassistant/components/mold_indicator/manifest.json index 5ebccb5f92d..b57f1c471ef 100644 --- a/homeassistant/components/mold_indicator/manifest.json +++ b/homeassistant/components/mold_indicator/manifest.json @@ -2,7 +2,9 @@ "domain": "mold_indicator", "name": "Mold Indicator", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mold_indicator", - "iot_class": "local_polling", + "integration_type": "helper", + "iot_class": "calculated", "quality_scale": "internal" } diff --git a/homeassistant/components/mold_indicator/sensor.py b/homeassistant/components/mold_indicator/sensor.py index 9064e0387e5..262d13ad3af 100644 --- a/homeassistant/components/mold_indicator/sensor.py +++ b/homeassistant/components/mold_indicator/sensor.py @@ -2,25 +2,32 @@ from __future__ import annotations +from collections.abc import Callable, Mapping import logging import math +from typing import TYPE_CHECKING, Any import voluptuous as vol from homeassistant import util from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorDeviceClass, SensorEntity, + SensorStateClass, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, - EVENT_HOMEASSISTANT_START, + CONF_UNIQUE_ID, PERCENTAGE, + STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfTemperature, ) from homeassistant.core import ( + CALLBACK_TYPE, Event, EventStateChangedData, HomeAssistant, @@ -28,23 +35,26 @@ from homeassistant.core import ( callback, ) import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.unit_conversion import TemperatureConverter from homeassistant.util.unit_system import METRIC_SYSTEM +from .const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, +) + _LOGGER = logging.getLogger(__name__) ATTR_CRITICAL_TEMP = "estimated_critical_temp" ATTR_DEWPOINT = "dewpoint" -CONF_CALIBRATION_FACTOR = "calibration_factor" -CONF_INDOOR_HUMIDITY = "indoor_humidity_sensor" -CONF_INDOOR_TEMP = "indoor_temp_sensor" -CONF_OUTDOOR_TEMP = "outdoor_temp_sensor" - -DEFAULT_NAME = "Mold Indicator" MAGNUS_K2 = 17.62 MAGNUS_K3 = 243.12 @@ -56,6 +66,7 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( vol.Required(CONF_INDOOR_HUMIDITY): cv.entity_id, vol.Optional(CONF_CALIBRATION_FACTOR): vol.Coerce(float), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UNIQUE_ID): cv.string, } ) @@ -67,21 +78,53 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up MoldIndicator sensor.""" - name = config.get(CONF_NAME, DEFAULT_NAME) - indoor_temp_sensor = config.get(CONF_INDOOR_TEMP) - outdoor_temp_sensor = config.get(CONF_OUTDOOR_TEMP) - indoor_humidity_sensor = config.get(CONF_INDOOR_HUMIDITY) - calib_factor = config.get(CONF_CALIBRATION_FACTOR) + name: str = config.get(CONF_NAME, DEFAULT_NAME) + indoor_temp_sensor: str = config[CONF_INDOOR_TEMP] + outdoor_temp_sensor: str = config[CONF_OUTDOOR_TEMP] + indoor_humidity_sensor: str = config[CONF_INDOOR_HUMIDITY] + calib_factor: float = config[CONF_CALIBRATION_FACTOR] + unique_id: str | None = config.get(CONF_UNIQUE_ID) async_add_entities( [ MoldIndicator( + hass, name, hass.config.units is METRIC_SYSTEM, indoor_temp_sensor, outdoor_temp_sensor, indoor_humidity_sensor, calib_factor, + unique_id, + ) + ], + False, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Mold indicator sensor entry.""" + name: str = entry.options[CONF_NAME] + indoor_temp_sensor: str = entry.options[CONF_INDOOR_TEMP] + outdoor_temp_sensor: str = entry.options[CONF_OUTDOOR_TEMP] + indoor_humidity_sensor: str = entry.options[CONF_INDOOR_HUMIDITY] + calib_factor: float = entry.options[CONF_CALIBRATION_FACTOR] + + async_add_entities( + [ + MoldIndicator( + hass, + name, + hass.config.units is METRIC_SYSTEM, + indoor_temp_sensor, + outdoor_temp_sensor, + indoor_humidity_sensor, + calib_factor, + entry.entry_id, ) ], False, @@ -92,39 +135,77 @@ class MoldIndicator(SensorEntity): """Represents a MoldIndication sensor.""" _attr_should_poll = False + _attr_native_unit_of_measurement = PERCENTAGE + _attr_device_class = SensorDeviceClass.HUMIDITY + _attr_state_class = SensorStateClass.MEASUREMENT def __init__( self, - name, - is_metric, - indoor_temp_sensor, - outdoor_temp_sensor, - indoor_humidity_sensor, - calib_factor, - ): + hass: HomeAssistant, + name: str, + is_metric: bool, + indoor_temp_sensor: str, + outdoor_temp_sensor: str, + indoor_humidity_sensor: str, + calib_factor: float, + unique_id: str | None, + ) -> None: """Initialize the sensor.""" - self._state = None - self._name = name + self._attr_name = name + self._attr_unique_id = unique_id self._indoor_temp_sensor = indoor_temp_sensor self._indoor_humidity_sensor = indoor_humidity_sensor self._outdoor_temp_sensor = outdoor_temp_sensor self._calib_factor = calib_factor self._is_metric = is_metric - self._available = False + self._attr_available = False self._entities = { - self._indoor_temp_sensor, - self._indoor_humidity_sensor, - self._outdoor_temp_sensor, + indoor_temp_sensor, + indoor_humidity_sensor, + outdoor_temp_sensor, } + self._dewpoint: float | None = None + self._indoor_temp: float | None = None + self._outdoor_temp: float | None = None + self._indoor_hum: float | None = None + self._crit_temp: float | None = None + if indoor_humidity_sensor: + self._attr_device_info = async_device_info_to_link_from_entity( + hass, + indoor_humidity_sensor, + ) + self._preview_callback: Callable[[str, Mapping[str, Any]], None] | None = None - self._dewpoint = None - self._indoor_temp = None - self._outdoor_temp = None - self._indoor_hum = None - self._crit_temp = None + @callback + def async_start_preview( + self, + preview_callback: Callable[[str, Mapping[str, Any]], None], + ) -> CALLBACK_TYPE: + """Render a preview.""" + # Abort early if there is no source entity_id's or calibration factor + if ( + not self._outdoor_temp_sensor + or not self._indoor_temp_sensor + or not self._indoor_humidity_sensor + or not self._calib_factor + ): + self._attr_available = False + calculated_state = self._async_calculate_state() + preview_callback(calculated_state.state, calculated_state.attributes) + return self._call_on_remove_callbacks + + self._preview_callback = preview_callback + + self._async_setup_sensor() + return self._call_on_remove_callbacks async def async_added_to_hass(self) -> None: - """Register callbacks.""" + """Run when entity about to be added to hass.""" + self._async_setup_sensor() + + @callback + def _async_setup_sensor(self) -> None: + """Set up the sensor and start tracking state changes.""" @callback def mold_indicator_sensors_state_listener( @@ -142,10 +223,17 @@ class MoldIndicator(SensorEntity): ) if self._update_sensor(entity, old_state, new_state): - self.async_schedule_update_ha_state(True) + if self._preview_callback: + calculated_state = self._async_calculate_state() + self._preview_callback( + calculated_state.state, calculated_state.attributes + ) + # only write state to the state machine if we are not in preview mode + else: + self.async_schedule_update_ha_state(True) @callback - def mold_indicator_startup(event): + def mold_indicator_startup() -> None: """Add listeners and get 1st state.""" _LOGGER.debug("Startup for %s", self.entity_id) @@ -178,12 +266,22 @@ class MoldIndicator(SensorEntity): else schedule_update ) - if schedule_update: + if schedule_update and not self._preview_callback: self.async_schedule_update_ha_state(True) + if self._preview_callback: + # re-calculate dewpoint and mold indicator + self._calc_dewpoint() + self._calc_moldindicator() + if self._attr_native_value is None: + self._attr_available = False + else: + self._attr_available = True + calculated_state = self._async_calculate_state() + self._preview_callback( + calculated_state.state, calculated_state.attributes + ) - self.hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_START, mold_indicator_startup - ) + mold_indicator_startup() def _update_sensor( self, entity: str, old_state: State | None, new_state: State | None @@ -199,11 +297,11 @@ class MoldIndicator(SensorEntity): return False if entity == self._indoor_temp_sensor: - self._indoor_temp = MoldIndicator._update_temp_sensor(new_state) + self._indoor_temp = self._update_temp_sensor(new_state) elif entity == self._outdoor_temp_sensor: - self._outdoor_temp = MoldIndicator._update_temp_sensor(new_state) + self._outdoor_temp = self._update_temp_sensor(new_state) elif entity == self._indoor_humidity_sensor: - self._indoor_hum = MoldIndicator._update_hum_sensor(new_state) + self._indoor_hum = self._update_hum_sensor(new_state) return True @@ -213,7 +311,7 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Updating temp sensor with value %s", state.state) # Return an error if the sensor change its state to Unknown. - if state.state == STATE_UNKNOWN: + if state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): _LOGGER.error( "Unable to parse temperature sensor %s with state: %s", state.entity_id, @@ -221,8 +319,6 @@ class MoldIndicator(SensorEntity): ) return None - unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - if (temp := util.convert(state.state, float)) is None: _LOGGER.error( "Unable to parse temperature sensor %s with state: %s", @@ -232,12 +328,10 @@ class MoldIndicator(SensorEntity): return None # convert to celsius if necessary - if unit == UnitOfTemperature.FAHRENHEIT: - return TemperatureConverter.convert( - temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS - ) - if unit == UnitOfTemperature.CELSIUS: - return temp + if ( + unit := state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + ) in UnitOfTemperature: + return TemperatureConverter.convert(temp, unit, UnitOfTemperature.CELSIUS) _LOGGER.error( "Temp sensor %s has unsupported unit: %s (allowed: %s, %s)", state.entity_id, @@ -254,7 +348,7 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Updating humidity sensor with value %s", state.state) # Return an error if the sensor change its state to Unknown. - if state.state == STATE_UNKNOWN: + if state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): _LOGGER.error( "Unable to parse humidity sensor %s, state: %s", state.entity_id, @@ -272,19 +366,18 @@ class MoldIndicator(SensorEntity): if (unit := state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)) != PERCENTAGE: _LOGGER.error( - "Humidity sensor %s has unsupported unit: %s %s", + "Humidity sensor %s has unsupported unit: %s (allowed: %s)", state.entity_id, unit, - " (allowed: %)", + PERCENTAGE, ) return None if hum > 100 or hum < 0: _LOGGER.error( - "Humidity sensor %s is out of range: %s %s", + "Humidity sensor %s is out of range: %s (allowed: 0-100)", state.entity_id, hum, - "(allowed: 0-100%)", ) return None @@ -295,7 +388,7 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Update state for %s", self.entity_id) # check all sensors if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp): - self._available = False + self._attr_available = False self._dewpoint = None self._crit_temp = None return @@ -303,16 +396,18 @@ class MoldIndicator(SensorEntity): # re-calculate dewpoint and mold indicator self._calc_dewpoint() self._calc_moldindicator() - if self._state is None: - self._available = False + if self._attr_native_value is None: + self._attr_available = False self._dewpoint = None self._crit_temp = None else: - self._available = True + self._attr_available = True - def _calc_dewpoint(self): + def _calc_dewpoint(self) -> None: """Calculate the dewpoint for the indoor air.""" # Use magnus approximation to calculate the dew point + if TYPE_CHECKING: + assert self._indoor_temp and self._indoor_hum alpha = MAGNUS_K2 * self._indoor_temp / (MAGNUS_K3 + self._indoor_temp) beta = MAGNUS_K2 * MAGNUS_K3 / (MAGNUS_K3 + self._indoor_temp) @@ -326,16 +421,19 @@ class MoldIndicator(SensorEntity): ) _LOGGER.debug("Dewpoint: %f %s", self._dewpoint, UnitOfTemperature.CELSIUS) - def _calc_moldindicator(self): + def _calc_moldindicator(self) -> None: """Calculate the humidity at the (cold) calibration point.""" + if TYPE_CHECKING: + assert self._outdoor_temp and self._indoor_temp and self._dewpoint + if None in (self._dewpoint, self._calib_factor) or self._calib_factor == 0: _LOGGER.debug( "Invalid inputs - dewpoint: %s, calibration-factor: %s", self._dewpoint, self._calib_factor, ) - self._state = None - self._available = False + self._attr_native_value = None + self._attr_available = False self._crit_temp = None return @@ -365,46 +463,25 @@ class MoldIndicator(SensorEntity): # check bounds and format if crit_humidity > 100: - self._state = "100" + self._attr_native_value = "100" elif crit_humidity < 0: - self._state = "0" + self._attr_native_value = "0" else: - self._state = f"{int(crit_humidity):d}" + self._attr_native_value = f"{int(crit_humidity):d}" - _LOGGER.debug("Mold indicator humidity: %s", self._state) + _LOGGER.debug("Mold indicator humidity: %s", self.native_value) @property - def name(self): - """Return the name.""" - return self._name - - @property - def native_unit_of_measurement(self): - """Return the unit of measurement.""" - return PERCENTAGE - - @property - def native_value(self): - """Return the state of the entity.""" - return self._state - - @property - def available(self): - """Return the availability of this sensor.""" - return self._available - - @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" if self._is_metric: - return { - ATTR_DEWPOINT: round(self._dewpoint, 2), - ATTR_CRITICAL_TEMP: round(self._crit_temp, 2), - } + convert_to = UnitOfTemperature.CELSIUS + else: + convert_to = UnitOfTemperature.FAHRENHEIT dewpoint = ( TemperatureConverter.convert( - self._dewpoint, UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT + self._dewpoint, UnitOfTemperature.CELSIUS, convert_to ) if self._dewpoint is not None else None @@ -412,13 +489,13 @@ class MoldIndicator(SensorEntity): crit_temp = ( TemperatureConverter.convert( - self._crit_temp, UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT + self._crit_temp, UnitOfTemperature.CELSIUS, convert_to ) if self._crit_temp is not None else None ) return { - ATTR_DEWPOINT: round(dewpoint, 2), - ATTR_CRITICAL_TEMP: round(crit_temp, 2), + ATTR_DEWPOINT: round(dewpoint, 2) if dewpoint else None, + ATTR_CRITICAL_TEMP: round(crit_temp, 2) if crit_temp else None, } diff --git a/homeassistant/components/mold_indicator/strings.json b/homeassistant/components/mold_indicator/strings.json new file mode 100644 index 00000000000..74614bba139 --- /dev/null +++ b/homeassistant/components/mold_indicator/strings.json @@ -0,0 +1,49 @@ +{ + "title": "Mold Indicator", + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "error": { + "calibration_is_zero": "Calibration factor can't be zero." + }, + "step": { + "user": { + "description": "Create Mold indicator helper", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "indoor_humidity_sensor": "Indoor humidity sensor", + "indoor_temp_sensor": "Indoor temperature sensor", + "outdoor_temp_sensor": "Outdoor temperature sensor", + "calibration_factor": "Calibration factor" + }, + "data_description": { + "name": "Name for the created entity.", + "indoor_humidity_sensor": "The entity ID of the indoor humidity sensor.", + "indoor_temp_sensor": "The entity ID of the indoor temperature sensor.", + "outdoor_temp_sensor": "The entity ID of the outdoor temperature sensor.", + "calibration_factor": "Needs to be calibrated to the critical point in the room." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "error": { + "calibration_is_zero": "Calibration factor can't be zero." + }, + "step": { + "init": { + "description": "Adjust the calibration factor as required", + "data": { + "calibration_factor": "[%key:component::mold_indicator::config::step::user::data::calibration_factor%]" + }, + "data_description": { + "calibration_factor": "[%key:component::mold_indicator::config::step::user::data_description::calibration_factor%]" + } + } + } + } +} diff --git a/homeassistant/components/monarch_money/__init__.py b/homeassistant/components/monarch_money/__init__.py new file mode 100644 index 00000000000..5f9aba7dd07 --- /dev/null +++ b/homeassistant/components/monarch_money/__init__.py @@ -0,0 +1,35 @@ +"""The Monarch Money integration.""" + +from __future__ import annotations + +from typedmonarchmoney import TypedMonarchMoney + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_TOKEN, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import MonarchMoneyDataUpdateCoordinator + +type MonarchMoneyConfigEntry = ConfigEntry[MonarchMoneyDataUpdateCoordinator] + +PLATFORMS: list[Platform] = [Platform.SENSOR] + + +async def async_setup_entry( + hass: HomeAssistant, entry: MonarchMoneyConfigEntry +) -> bool: + """Set up Monarch Money from a config entry.""" + monarch_client = TypedMonarchMoney(token=entry.data.get(CONF_TOKEN)) + + mm_coordinator = MonarchMoneyDataUpdateCoordinator(hass, monarch_client) + await mm_coordinator.async_config_entry_first_refresh() + entry.runtime_data = mm_coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: MonarchMoneyConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/monarch_money/config_flow.py b/homeassistant/components/monarch_money/config_flow.py new file mode 100644 index 00000000000..5bfdc02c61e --- /dev/null +++ b/homeassistant/components/monarch_money/config_flow.py @@ -0,0 +1,157 @@ +"""Config flow for Monarch Money integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from monarchmoney import LoginFailedException, RequireMFAException +from monarchmoney.monarchmoney import SESSION_FILE +from typedmonarchmoney import TypedMonarchMoney +from typedmonarchmoney.models import MonarchSubscription +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_ID, CONF_PASSWORD, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import CONF_MFA_CODE, DOMAIN, LOGGER + +_LOGGER = logging.getLogger(__name__) + + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + ), + ), + } +) + +STEP_MFA_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_MFA_CODE): str, + } +) + + +async def validate_login( + hass: HomeAssistant, + data: dict[str, Any], + email: str | None = None, + password: str | None = None, +) -> dict[str, Any]: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. Upon success a session will be saved + """ + + if not email: + email = data[CONF_EMAIL] + if not password: + password = data[CONF_PASSWORD] + monarch_client = TypedMonarchMoney() + if CONF_MFA_CODE in data: + mfa_code = data[CONF_MFA_CODE] + LOGGER.debug("Attempting to authenticate with MFA code") + try: + await monarch_client.multi_factor_authenticate(email, password, mfa_code) + except KeyError as err: + # A bug in the backing lib that I don't control throws a KeyError if the MFA code is wrong + LOGGER.debug("Bad MFA Code") + raise BadMFA from err + else: + LOGGER.debug("Attempting to authenticate") + try: + await monarch_client.login( + email=email, + password=password, + save_session=False, + use_saved_session=False, + ) + except RequireMFAException: + raise + except LoginFailedException as err: + raise InvalidAuth from err + + LOGGER.debug(f"Connection successful - saving session to file {SESSION_FILE}") + LOGGER.debug("Obtaining subscription id") + subs: MonarchSubscription = await monarch_client.get_subscription_details() + assert subs is not None + subscription_id = subs.id + return { + CONF_TOKEN: monarch_client.token, + CONF_ID: subscription_id, + } + + +class MonarchMoneyConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Monarch Money.""" + + VERSION = 1 + + def __init__(self) -> None: + """Initialize config flow.""" + self.email: str | None = None + self.password: str | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + info = await validate_login( + self.hass, user_input, email=self.email, password=self.password + ) + except RequireMFAException: + self.email = user_input[CONF_EMAIL] + self.password = user_input[CONF_PASSWORD] + + return self.async_show_form( + step_id="user", + data_schema=STEP_MFA_DATA_SCHEMA, + errors={"base": "mfa_required"}, + ) + except BadMFA: + return self.async_show_form( + step_id="user", + data_schema=STEP_MFA_DATA_SCHEMA, + errors={"base": "bad_mfa"}, + ) + except InvalidAuth: + errors["base"] = "invalid_auth" + else: + await self.async_set_unique_id(info[CONF_ID]) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title="Monarch Money", + data={CONF_TOKEN: info[CONF_TOKEN]}, + ) + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + +class InvalidAuth(HomeAssistantError): + """Error to indicate there is invalid auth.""" + + +class BadMFA(HomeAssistantError): + """Error to indicate the MFA code was bad.""" diff --git a/homeassistant/components/monarch_money/const.py b/homeassistant/components/monarch_money/const.py new file mode 100644 index 00000000000..f450f123179 --- /dev/null +++ b/homeassistant/components/monarch_money/const.py @@ -0,0 +1,10 @@ +"""Constants for the Monarch Money integration.""" + +import logging + +DOMAIN = "monarch_money" + +LOGGER = logging.getLogger(__package__) + +CONF_MFA_SECRET = "mfa_secret" +CONF_MFA_CODE = "mfa_code" diff --git a/homeassistant/components/monarch_money/coordinator.py b/homeassistant/components/monarch_money/coordinator.py new file mode 100644 index 00000000000..3e689c48e91 --- /dev/null +++ b/homeassistant/components/monarch_money/coordinator.py @@ -0,0 +1,95 @@ +"""Data coordinator for monarch money.""" + +import asyncio +from dataclasses import dataclass +from datetime import datetime, timedelta + +from aiohttp import ClientResponseError +from gql.transport.exceptions import TransportServerError +from monarchmoney import LoginFailedException +from typedmonarchmoney import TypedMonarchMoney +from typedmonarchmoney.models import ( + MonarchAccount, + MonarchCashflowSummary, + MonarchSubscription, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import LOGGER + + +@dataclass +class MonarchData: + """Data class to hold monarch data.""" + + account_data: dict[str, MonarchAccount] + cashflow_summary: MonarchCashflowSummary + + +class MonarchMoneyDataUpdateCoordinator(DataUpdateCoordinator[MonarchData]): + """Data update coordinator for Monarch Money.""" + + config_entry: ConfigEntry + subscription_id: str + + def __init__( + self, + hass: HomeAssistant, + client: TypedMonarchMoney, + ) -> None: + """Initialize the coordinator.""" + super().__init__( + hass=hass, + logger=LOGGER, + name="monarchmoney", + update_interval=timedelta(hours=4), + ) + self.client = client + + async def _async_setup(self) -> None: + """Obtain subscription ID in setup phase.""" + try: + sub_details: MonarchSubscription = ( + await self.client.get_subscription_details() + ) + except (TransportServerError, LoginFailedException, ClientResponseError) as err: + raise ConfigEntryError("Authentication failed") from err + self.subscription_id = sub_details.id + + async def _async_update_data(self) -> MonarchData: + """Fetch data for all accounts.""" + + now = datetime.now() + + account_data, cashflow_summary = await asyncio.gather( + self.client.get_accounts_as_dict_with_id_key(), + self.client.get_cashflow_summary( + start_date=f"{now.year}-01-01", end_date=f"{now.year}-12-31" + ), + ) + + return MonarchData(account_data=account_data, cashflow_summary=cashflow_summary) + + @property + def cashflow_summary(self) -> MonarchCashflowSummary: + """Return cashflow summary.""" + return self.data.cashflow_summary + + @property + def accounts(self) -> list[MonarchAccount]: + """Return accounts.""" + return list(self.data.account_data.values()) + + @property + def value_accounts(self) -> list[MonarchAccount]: + """Return value accounts.""" + return [x for x in self.accounts if x.is_value_account] + + @property + def balance_accounts(self) -> list[MonarchAccount]: + """Return accounts that aren't assets.""" + return [x for x in self.accounts if x.is_balance_account] diff --git a/homeassistant/components/monarch_money/entity.py b/homeassistant/components/monarch_money/entity.py new file mode 100644 index 00000000000..49a24385782 --- /dev/null +++ b/homeassistant/components/monarch_money/entity.py @@ -0,0 +1,83 @@ +"""Monarch money entity definition.""" + +from typedmonarchmoney.models import MonarchAccount, MonarchCashflowSummary + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import MonarchMoneyDataUpdateCoordinator + + +class MonarchMoneyEntityBase(CoordinatorEntity[MonarchMoneyDataUpdateCoordinator]): + """Base entity for Monarch Money with entity name attribute.""" + + _attr_has_entity_name = True + + +class MonarchMoneyCashFlowEntity(MonarchMoneyEntityBase): + """Entity for Cashflow sensors.""" + + def __init__( + self, + coordinator: MonarchMoneyDataUpdateCoordinator, + description: EntityDescription, + ) -> None: + """Initialize the Monarch Money Entity.""" + super().__init__(coordinator) + self._attr_unique_id = ( + f"{coordinator.subscription_id}_cashflow_{description.key}" + ) + self.entity_description = description + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(coordinator.subscription_id))}, + name="Cashflow", + ) + + @property + def summary_data(self) -> MonarchCashflowSummary: + """Return cashflow summary data.""" + return self.coordinator.cashflow_summary + + +class MonarchMoneyAccountEntity(MonarchMoneyEntityBase): + """Entity for Account Sensors.""" + + def __init__( + self, + coordinator: MonarchMoneyDataUpdateCoordinator, + description: EntityDescription, + account: MonarchAccount, + ) -> None: + """Initialize the Monarch Money Entity.""" + super().__init__(coordinator) + + self.entity_description = description + self._account_id = account.id + self._attr_attribution = ( + f"Data provided by Monarch Money API via {account.data_provider}" + ) + self._attr_unique_id = ( + f"{coordinator.subscription_id}_{account.id}_{description.translation_key}" + ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(account.id))}, + name=f"{account.institution_name} {account.name}", + entry_type=DeviceEntryType.SERVICE, + manufacturer=account.data_provider, + model=f"{account.institution_name} - {account.type_name} - {account.subtype_name}", + configuration_url=account.institution_url, + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and ( + self._account_id in self.coordinator.data.account_data + ) + + @property + def account_data(self) -> MonarchAccount: + """Return the account data.""" + return self.coordinator.data.account_data[self._account_id] diff --git a/homeassistant/components/monarch_money/icons.json b/homeassistant/components/monarch_money/icons.json new file mode 100644 index 00000000000..95c5eb3cca4 --- /dev/null +++ b/homeassistant/components/monarch_money/icons.json @@ -0,0 +1,10 @@ +{ + "entity": { + "sensor": { + "sum_income": { "default": "mdi:cash-plus" }, + "sum_expense": { "default": "mdi:cash-minus" }, + "savings": { "default": "mdi:piggy-bank-outline" }, + "savings_rate": { "default": "mdi:cash-sync" } + } + } +} diff --git a/homeassistant/components/monarch_money/manifest.json b/homeassistant/components/monarch_money/manifest.json new file mode 100644 index 00000000000..ed28f825bcf --- /dev/null +++ b/homeassistant/components/monarch_money/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "monarch_money", + "name": "Monarch Money", + "codeowners": ["@jeeftor"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/monarchmoney", + "iot_class": "cloud_polling", + "requirements": ["typedmonarchmoney==0.3.1"] +} diff --git a/homeassistant/components/monarch_money/sensor.py b/homeassistant/components/monarch_money/sensor.py new file mode 100644 index 00000000000..fe7c728cf41 --- /dev/null +++ b/homeassistant/components/monarch_money/sensor.py @@ -0,0 +1,182 @@ +"""Sensor config - monarch money.""" + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from typedmonarchmoney.models import MonarchAccount, MonarchCashflowSummary + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import CURRENCY_DOLLAR, PERCENTAGE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import MonarchMoneyConfigEntry +from .entity import MonarchMoneyAccountEntity, MonarchMoneyCashFlowEntity + + +@dataclass(frozen=True, kw_only=True) +class MonarchMoneyAccountSensorEntityDescription(SensorEntityDescription): + """Describe an account sensor entity.""" + + value_fn: Callable[[MonarchAccount], StateType | datetime] + picture_fn: Callable[[MonarchAccount], str | None] | None = None + + +@dataclass(frozen=True, kw_only=True) +class MonarchMoneyCashflowSensorEntityDescription(SensorEntityDescription): + """Describe a cashflow sensor entity.""" + + summary_fn: Callable[[MonarchCashflowSummary], StateType] + + +# These sensors include assets like a boat that might have value +MONARCH_MONEY_VALUE_SENSORS: tuple[MonarchMoneyAccountSensorEntityDescription, ...] = ( + MonarchMoneyAccountSensorEntityDescription( + key="value", + translation_key="value", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.MONETARY, + value_fn=lambda account: account.balance, + picture_fn=lambda account: account.logo_url, + native_unit_of_measurement=CURRENCY_DOLLAR, + ), +) + +# Most accounts are balance sensors +MONARCH_MONEY_SENSORS: tuple[MonarchMoneyAccountSensorEntityDescription, ...] = ( + MonarchMoneyAccountSensorEntityDescription( + key="currentBalance", + translation_key="balance", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.MONETARY, + value_fn=lambda account: account.balance, + picture_fn=lambda account: account.logo_url, + native_unit_of_measurement=CURRENCY_DOLLAR, + ), +) + +MONARCH_MONEY_AGE_SENSORS: tuple[MonarchMoneyAccountSensorEntityDescription, ...] = ( + MonarchMoneyAccountSensorEntityDescription( + key="age", + translation_key="age", + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda account: account.last_update, + ), +) + +MONARCH_CASHFLOW_SENSORS: tuple[MonarchMoneyCashflowSensorEntityDescription, ...] = ( + MonarchMoneyCashflowSensorEntityDescription( + key="sum_income", + translation_key="sum_income", + summary_fn=lambda summary: summary.income, + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.MONETARY, + native_unit_of_measurement=CURRENCY_DOLLAR, + ), + MonarchMoneyCashflowSensorEntityDescription( + key="sum_expense", + translation_key="sum_expense", + summary_fn=lambda summary: summary.expenses, + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.MONETARY, + native_unit_of_measurement=CURRENCY_DOLLAR, + ), + MonarchMoneyCashflowSensorEntityDescription( + key="savings", + translation_key="savings", + summary_fn=lambda summary: summary.savings, + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.MONETARY, + native_unit_of_measurement=CURRENCY_DOLLAR, + ), + MonarchMoneyCashflowSensorEntityDescription( + key="savings_rate", + translation_key="savings_rate", + summary_fn=lambda summary: summary.savings_rate * 100, + suggested_display_precision=1, + native_unit_of_measurement=PERCENTAGE, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: MonarchMoneyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Monarch Money sensors for config entries.""" + mm_coordinator = config_entry.runtime_data + + entity_list: list[MonarchMoneySensor | MonarchMoneyCashFlowSensor] = [ + MonarchMoneyCashFlowSensor( + mm_coordinator, + sensor_description, + ) + for sensor_description in MONARCH_CASHFLOW_SENSORS + ] + entity_list.extend( + MonarchMoneySensor( + mm_coordinator, + sensor_description, + account, + ) + for account in mm_coordinator.balance_accounts + for sensor_description in MONARCH_MONEY_SENSORS + ) + entity_list.extend( + MonarchMoneySensor( + mm_coordinator, + sensor_description, + account, + ) + for account in mm_coordinator.accounts + for sensor_description in MONARCH_MONEY_AGE_SENSORS + ) + entity_list.extend( + MonarchMoneySensor( + mm_coordinator, + sensor_description, + account, + ) + for account in mm_coordinator.value_accounts + for sensor_description in MONARCH_MONEY_VALUE_SENSORS + ) + + async_add_entities(entity_list) + + +class MonarchMoneyCashFlowSensor(MonarchMoneyCashFlowEntity, SensorEntity): + """Cashflow summary sensor.""" + + entity_description: MonarchMoneyCashflowSensorEntityDescription + + @property + def native_value(self) -> StateType: + """Return the state.""" + return self.entity_description.summary_fn(self.summary_data) + + +class MonarchMoneySensor(MonarchMoneyAccountEntity, SensorEntity): + """Define a monarch money sensor.""" + + entity_description: MonarchMoneyAccountSensorEntityDescription + + @property + def native_value(self) -> StateType | datetime: + """Return the state.""" + return self.entity_description.value_fn(self.account_data) + + @property + def entity_picture(self) -> str | None: + """Return the picture of the account as provided by monarch money if it exists.""" + if self.entity_description.picture_fn is not None: + return self.entity_description.picture_fn(self.account_data) + return None diff --git a/homeassistant/components/monarch_money/strings.json b/homeassistant/components/monarch_money/strings.json new file mode 100644 index 00000000000..d7a28940d7a --- /dev/null +++ b/homeassistant/components/monarch_money/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "step": { + "user": { + "description": "Enter your Monarch Money email and password, if required you will also be prompted for your MFA code.", + "data": { + "mfa_secret": "Add your MFA Secret. See docs for help.", + "mfa_code": "Enter your MFA code", + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "mfa_required": "Multi-factor authentication required.", + "bad_mfa": "Your code was invalid, please try again or use a recovery token." + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "sensor": { + "balance": { "name": "Balance" }, + "value": { "name": "Value" }, + + "age": { + "name": "Data age" + }, + + "sum_income": { + "name": "Income year to date" + }, + "sum_expense": { + "name": "Expense year to date" + }, + "savings": { + "name": "Savings year to date" + }, + "savings_rate": { + "name": "Savings rate" + } + } + } +} diff --git a/homeassistant/components/monoprice/config_flow.py b/homeassistant/components/monoprice/config_flow.py index 5f0b1bf27b5..b2619623a07 100644 --- a/homeassistant/components/monoprice/config_flow.py +++ b/homeassistant/components/monoprice/config_flow.py @@ -108,7 +108,7 @@ class MonoPriceConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MonopriceOptionsFlowHandler: """Define the config flow to handle options.""" - return MonopriceOptionsFlowHandler(config_entry) + return MonopriceOptionsFlowHandler() @callback @@ -126,10 +126,6 @@ def _key_for_source(index, source, previous_sources): class MonopriceOptionsFlowHandler(OptionsFlow): """Handle a Monoprice options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - @callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: @@ -139,7 +135,9 @@ class MonopriceOptionsFlowHandler(OptionsFlow): return previous - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry( diff --git a/homeassistant/components/monoprice/icons.json b/homeassistant/components/monoprice/icons.json index 22610cc2a47..d560c7bcfa8 100644 --- a/homeassistant/components/monoprice/icons.json +++ b/homeassistant/components/monoprice/icons.json @@ -1,6 +1,10 @@ { "services": { - "snapshot": "mdi:content-copy", - "restore": "mdi:content-paste" + "snapshot": { + "service": "mdi:content-copy" + }, + "restore": { + "service": "mdi:content-paste" + } } } diff --git a/homeassistant/components/monoprice/media_player.py b/homeassistant/components/monoprice/media_player.py index daf13b4d7b8..2dde0832440 100644 --- a/homeassistant/components/monoprice/media_player.py +++ b/homeassistant/components/monoprice/media_player.py @@ -71,7 +71,7 @@ async def async_setup_entry( for i in range(1, 4): for j in range(1, 7): zone_id = (i * 10) + j - _LOGGER.info("Adding zone %d for port %s", zone_id, port) + _LOGGER.debug("Adding zone %d for port %s", zone_id, port) entities.append( MonopriceZone(monoprice, sources, config_entry.entry_id, zone_id) ) diff --git a/homeassistant/components/monzo/api.py b/homeassistant/components/monzo/api.py index 6862564d343..5216232199c 100644 --- a/homeassistant/components/monzo/api.py +++ b/homeassistant/components/monzo/api.py @@ -20,7 +20,6 @@ class AuthenticatedMonzoAPI(AbstractMonzoApi): async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return str(self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/monzo/config_flow.py b/homeassistant/components/monzo/config_flow.py index 2eb51b4d305..9f005c6aaa4 100644 --- a/homeassistant/components/monzo/config_flow.py +++ b/homeassistant/components/monzo/config_flow.py @@ -8,7 +8,7 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -23,7 +23,6 @@ class MonzoFlowHandler( DOMAIN = DOMAIN oauth_data: dict[str, Any] - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -35,10 +34,11 @@ class MonzoFlowHandler( ) -> ConfigFlowResult: """Wait for the user to confirm in-app approval.""" if user_input is not None: - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: return self.async_create_entry(title=DOMAIN, data=self.oauth_data) return self.async_update_reload_and_abort( - self.reauth_entry, data={**self.reauth_entry.data, **self.oauth_data} + self._get_reauth_entry(), + data_updates=self.oauth_data, ) data_schema = vol.Schema({vol.Required("confirm"): bool}) @@ -51,11 +51,11 @@ class MonzoFlowHandler( """Create an entry for the flow.""" self.oauth_data = data user_id = data[CONF_TOKEN]["user_id"] - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() - elif self.reauth_entry.unique_id != user_id: - return self.async_abort(reason="wrong_account") + else: + self._abort_if_unique_id_mismatch(reason="wrong_account") return await self.async_step_await_approval_confirmation() @@ -63,9 +63,6 @@ class MonzoFlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/monzo/coordinator.py b/homeassistant/components/monzo/coordinator.py index 223d7b05ffe..caac551f986 100644 --- a/homeassistant/components/monzo/coordinator.py +++ b/homeassistant/components/monzo/coordinator.py @@ -3,13 +3,14 @@ from dataclasses import dataclass from datetime import timedelta import logging +from pprint import pformat from typing import Any -from monzopy import AuthorisationExpiredError +from monzopy import AuthorisationExpiredError, InvalidMonzoAPIResponseError from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .api import AuthenticatedMonzoAPI from .const import DOMAIN @@ -45,5 +46,16 @@ class MonzoCoordinator(DataUpdateCoordinator[MonzoData]): pots = await self.api.user_account.pots() except AuthorisationExpiredError as err: raise ConfigEntryAuthFailed from err + except InvalidMonzoAPIResponseError as err: + message = "Invalid Monzo API response." + if err.missing_key: + _LOGGER.debug( + "%s\nMissing key: %s\nResponse:\n%s", + message, + err.missing_key, + pformat(err.response), + ) + message += " Enabling debug logging for details." + raise UpdateFailed(message) from err return MonzoData(accounts, pots) diff --git a/homeassistant/components/monzo/manifest.json b/homeassistant/components/monzo/manifest.json index d9d17eb8abc..7038cecd7ea 100644 --- a/homeassistant/components/monzo/manifest.json +++ b/homeassistant/components/monzo/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/monzo", "iot_class": "cloud_polling", - "requirements": ["monzopy==1.3.2"] + "requirements": ["monzopy==1.4.2"] } diff --git a/homeassistant/components/mopeka/__init__.py b/homeassistant/components/mopeka/__init__.py index 17a87efd6e6..d73ece581d7 100644 --- a/homeassistant/components/mopeka/__init__.py +++ b/homeassistant/components/mopeka/__init__.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import CONF_MEDIUM_TYPE +from .const import CONF_MEDIUM_TYPE, DEFAULT_MEDIUM_TYPE PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -29,8 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bo address = entry.unique_id assert address is not None - # Default sensors configured prior to the intorudction of MediumType - medium_type_str = entry.data.get(CONF_MEDIUM_TYPE, MediumType.PROPANE.value) + # Default sensors configured prior to the introduction of MediumType + medium_type_str = entry.data.get(CONF_MEDIUM_TYPE, DEFAULT_MEDIUM_TYPE) data = MopekaIOTBluetoothDeviceData(MediumType(medium_type_str)) coordinator = entry.runtime_data = PassiveBluetoothProcessorCoordinator( hass, diff --git a/homeassistant/components/mopeka/config_flow.py b/homeassistant/components/mopeka/config_flow.py index 72e9386a47f..2e35ff4283f 100644 --- a/homeassistant/components/mopeka/config_flow.py +++ b/homeassistant/components/mopeka/config_flow.py @@ -58,7 +58,7 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: config_entries.ConfigEntry, ) -> MopekaOptionsFlow: """Return the options flow for this handler.""" - return MopekaOptionsFlow(config_entry) + return MopekaOptionsFlow() async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak @@ -139,10 +139,6 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): class MopekaOptionsFlow(config_entries.OptionsFlow): """Handle options for the Mopeka component.""" - def __init__(self, config_entry: config_entries.ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/motion_blinds/button.py b/homeassistant/components/motion_blinds/button.py index 30f1cd53e6f..89841bf8fd4 100644 --- a/homeassistant/components/motion_blinds/button.py +++ b/homeassistant/components/motion_blinds/button.py @@ -26,7 +26,13 @@ async def async_setup_entry( coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR] for blind in motion_gateway.device_list.values(): - if blind.limit_status == LimitStatus.Limit3Detected.name: + if blind.limit_status in ( + LimitStatus.Limit3Detected.name, + { + "T": LimitStatus.Limit3Detected.name, + "B": LimitStatus.Limit3Detected.name, + }, + ): entities.append(MotionGoFavoriteButton(coordinator, blind)) entities.append(MotionSetFavoriteButton(coordinator, blind)) diff --git a/homeassistant/components/motion_blinds/config_flow.py b/homeassistant/components/motion_blinds/config_flow.py index 131299314a2..e961880375c 100644 --- a/homeassistant/components/motion_blinds/config_flow.py +++ b/homeassistant/components/motion_blinds/config_flow.py @@ -38,10 +38,6 @@ CONFIG_SCHEMA = vol.Schema( class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,7 +79,7 @@ class MotionBlindsFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo diff --git a/homeassistant/components/motion_blinds/cover.py b/homeassistant/components/motion_blinds/cover.py index 72b78915bad..1ea3a6ed9d6 100644 --- a/homeassistant/components/motion_blinds/cover.py +++ b/homeassistant/components/motion_blinds/cover.py @@ -330,23 +330,63 @@ class MotionTiltOnlyDevice(MotionTiltDevice): """Return current position of cover.""" return None + @property + def current_cover_tilt_position(self) -> int | None: + """Return current angle of cover. + + None is unknown, 0 is closed/minimum tilt, 100 is fully open/maximum tilt. + """ + if self._blind.position is None: + if self._blind.angle is None: + return None + return self._blind.angle * 100 / 180 + + return self._blind.position + @property def is_closed(self) -> bool | None: """Return if the cover is closed or not.""" - if self._blind.angle is None: - return None - return self._blind.angle == 0 + if self._blind.position is None: + if self._blind.angle is None: + return None + return self._blind.angle == 0 + + return self._blind.position == 0 + + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Open the cover tilt.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Open) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Close the cover tilt.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Close) + + async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: + """Move the cover tilt to a specific position.""" + angle = kwargs[ATTR_TILT_POSITION] + if self._blind.position is None: + angle = angle * 180 / 100 + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_angle, angle) + else: + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_position, angle) async def async_set_absolute_position(self, **kwargs): """Move the cover to a specific absolute position (see TDBU).""" angle = kwargs.get(ATTR_TILT_POSITION) - if angle is not None: + if angle is None: + return + + if self._blind.position is None: angle = angle * 180 / 100 async with self._api_lock: - await self.hass.async_add_executor_job( - self._blind.Set_angle, - angle, - ) + await self.hass.async_add_executor_job(self._blind.Set_angle, angle) + else: + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_position, angle) class MotionTDBUDevice(MotionBaseDevice): @@ -421,7 +461,7 @@ class MotionTDBUDevice(MotionBaseDevice): async def async_set_absolute_position(self, **kwargs): """Move the cover to a specific absolute position.""" position = kwargs[ATTR_ABSOLUTE_POSITION] - target_width = kwargs.get(ATTR_WIDTH, None) + target_width = kwargs.get(ATTR_WIDTH) async with self._api_lock: await self.hass.async_add_executor_job( diff --git a/homeassistant/components/motion_blinds/icons.json b/homeassistant/components/motion_blinds/icons.json index 9e1cd613e5b..e50e50130f7 100644 --- a/homeassistant/components/motion_blinds/icons.json +++ b/homeassistant/components/motion_blinds/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "set_absolute_position": "mdi:set-square" + "set_absolute_position": { + "service": "mdi:set-square" + } } } diff --git a/homeassistant/components/motion_blinds/manifest.json b/homeassistant/components/motion_blinds/manifest.json index e1e12cf6729..b327c146300 100644 --- a/homeassistant/components/motion_blinds/manifest.json +++ b/homeassistant/components/motion_blinds/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/motion_blinds", "iot_class": "local_push", "loggers": ["motionblinds"], - "requirements": ["motionblinds==0.6.24"] + "requirements": ["motionblinds==0.6.25"] } diff --git a/homeassistant/components/motionblinds_ble/config_flow.py b/homeassistant/components/motionblinds_ble/config_flow.py index b8e03386844..30417c62c65 100644 --- a/homeassistant/components/motionblinds_ble/config_flow.py +++ b/homeassistant/components/motionblinds_ble/config_flow.py @@ -48,11 +48,12 @@ CONFIG_SCHEMA = vol.Schema({vol.Required(CONF_MAC_CODE): str}) class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Motionblinds Bluetooth.""" + _display_name: str + def __init__(self) -> None: """Initialize a ConfigFlow.""" self._discovery_info: BluetoothServiceInfoBleak | BLEDevice | None = None self._mac_code: str | None = None - self._display_name: str | None = None self._blind_type: MotionBlindType | None = None async def async_step_bluetooth( @@ -68,7 +69,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self._discovery_info = discovery_info self._mac_code = get_mac_from_local_name(discovery_info.name) self._display_name = DISPLAY_NAME.format(mac_code=self._mac_code) - self.context["local_name"] = discovery_info.name self.context["title_placeholders"] = {"name": self._display_name} return await self.async_step_confirm() @@ -114,7 +114,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): assert self._discovery_info is not None return self.async_create_entry( - title=str(self._display_name), + title=self._display_name, data={ CONF_ADDRESS: self._discovery_info.address, CONF_LOCAL_NAME: self._discovery_info.name, @@ -188,16 +188,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an options flow for Motionblinds BLE.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/motionblinds_ble/diagnostics.py b/homeassistant/components/motionblinds_ble/diagnostics.py new file mode 100644 index 00000000000..c76bef7c2f8 --- /dev/null +++ b/homeassistant/components/motionblinds_ble/diagnostics.py @@ -0,0 +1,53 @@ +"""Diagnostics support for Motionblinds Bluetooth.""" + +from __future__ import annotations + +from collections.abc import Iterable +from typing import Any + +from motionblindsble.device import MotionDevice + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_UNIQUE_ID +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + +CONF_TITLE = "title" + +TO_REDACT: Iterable[Any] = { + # Config entry title and unique ID may contain sensitive data: + CONF_TITLE, + CONF_UNIQUE_ID, +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + device: MotionDevice = hass.data[DOMAIN][entry.entry_id] + + return async_redact_data( + { + "entry": entry.as_dict(), + "device": { + "blind_type": device.blind_type.value, + "timezone": device.timezone, + "position": device._position, # noqa: SLF001 + "tilt": device._tilt, # noqa: SLF001 + "calibration_type": device._calibration_type.value # noqa: SLF001 + if device._calibration_type # noqa: SLF001 + else None, + "connection_type": device._connection_type.value, # noqa: SLF001 + "end_position_info": None + if not device._end_position_info # noqa: SLF001 + else { + "end_positions": device._end_position_info.end_positions.value, # noqa: SLF001 + "favorite": device._end_position_info.favorite_position, # noqa: SLF001 + }, + }, + }, + TO_REDACT, + ) diff --git a/homeassistant/components/motionblinds_ble/manifest.json b/homeassistant/components/motionblinds_ble/manifest.json index d9968cfde4c..70cddce30a1 100644 --- a/homeassistant/components/motionblinds_ble/manifest.json +++ b/homeassistant/components/motionblinds_ble/manifest.json @@ -14,5 +14,5 @@ "integration_type": "device", "iot_class": "assumed_state", "loggers": ["motionblindsble"], - "requirements": ["motionblindsble==0.1.1"] + "requirements": ["motionblindsble==0.1.3"] } diff --git a/homeassistant/components/motionblinds_ble/sensor.py b/homeassistant/components/motionblinds_ble/sensor.py index aa0f5ef7c90..740a0509a9e 100644 --- a/homeassistant/components/motionblinds_ble/sensor.py +++ b/homeassistant/components/motionblinds_ble/sensor.py @@ -6,7 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass import logging from math import ceil -from typing import Generic, TypeVar from motionblindsble.const import ( MotionBlindType, @@ -45,11 +44,9 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 -_T = TypeVar("_T") - @dataclass(frozen=True, kw_only=True) -class MotionblindsBLESensorEntityDescription(SensorEntityDescription, Generic[_T]): +class MotionblindsBLESensorEntityDescription[_T](SensorEntityDescription): """Entity description of a sensor entity with initial_value attribute.""" initial_value: str | None = None @@ -110,7 +107,7 @@ async def async_setup_entry( async_add_entities(entities) -class MotionblindsBLESensorEntity(MotionblindsBLEEntity, SensorEntity, Generic[_T]): +class MotionblindsBLESensorEntity[_T](MotionblindsBLEEntity, SensorEntity): """Representation of a sensor entity.""" entity_description: MotionblindsBLESensorEntityDescription[_T] diff --git a/homeassistant/components/motioneye/__init__.py b/homeassistant/components/motioneye/__init__.py index 6ec3092ab35..3e4ad53d200 100644 --- a/homeassistant/components/motioneye/__init__.py +++ b/homeassistant/components/motioneye/__init__.py @@ -8,7 +8,6 @@ from http import HTTPStatus import json import logging import os -from types import MappingProxyType from typing import Any from urllib.parse import urlencode, urljoin @@ -52,18 +51,12 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.network import NoURLAvailableError, get_url -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( ATTR_EVENT_TYPE, @@ -125,13 +118,6 @@ def split_motioneye_device_identifier( return (DOMAIN, config_id, camera_id) -def get_motioneye_entity_unique_id( - config_entry_id: str, camera_id: int, entity_type: str -) -> str: - """Get the unique_id for a motionEye entity.""" - return f"{config_entry_id}_{camera_id}_{entity_type}" - - def get_camera_from_cameras( camera_id: int, data: dict[str, Any] | None ) -> dict[str, Any] | None: @@ -336,6 +322,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=DEFAULT_SCAN_INTERVAL, @@ -530,51 +517,3 @@ def get_media_url( return client.get_image_url(camera_id, path) return client.get_movie_url(camera_id, path) return None - - -class MotionEyeEntity(CoordinatorEntity): - """Base class for motionEye entities.""" - - _attr_has_entity_name = True - - def __init__( - self, - config_entry_id: str, - type_name: str, - camera: dict[str, Any], - client: MotionEyeClient, - coordinator: DataUpdateCoordinator, - options: MappingProxyType[str, Any], - entity_description: EntityDescription | None = None, - ) -> None: - """Initialize a motionEye entity.""" - self._camera_id = camera[KEY_ID] - self._device_identifier = get_motioneye_device_identifier( - config_entry_id, self._camera_id - ) - self._unique_id = get_motioneye_entity_unique_id( - config_entry_id, - self._camera_id, - type_name, - ) - self._client = client - self._camera: dict[str, Any] | None = camera - self._options = options - if entity_description is not None: - self.entity_description = entity_description - super().__init__(coordinator) - - @property - def unique_id(self) -> str: - """Return a unique id for this instance.""" - return self._unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device information.""" - return DeviceInfo(identifiers={self._device_identifier}) - - @property - def available(self) -> bool: - """Return if entity is available.""" - return self._camera is not None and super().available diff --git a/homeassistant/components/motioneye/camera.py b/homeassistant/components/motioneye/camera.py index d84f7b43c04..df4c321037e 100644 --- a/homeassistant/components/motioneye/camera.py +++ b/homeassistant/components/motioneye/camera.py @@ -45,12 +45,7 @@ from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import ( - MotionEyeEntity, - get_camera_from_cameras, - is_acceptable_camera, - listen_for_new_cameras, -) +from . import get_camera_from_cameras, is_acceptable_camera, listen_for_new_cameras from .const import ( CONF_ACTION, CONF_CLIENT, @@ -65,6 +60,7 @@ from .const import ( SERVICE_SNAPSHOT, TYPE_MOTIONEYE_MJPEG_CAMERA, ) +from .entity import MotionEyeEntity PLATFORMS = [Platform.CAMERA] diff --git a/homeassistant/components/motioneye/config_flow.py b/homeassistant/components/motioneye/config_flow.py index 8107ca760cb..80a6449a22d 100644 --- a/homeassistant/components/motioneye/config_flow.py +++ b/homeassistant/components/motioneye/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, cast +from typing import Any from motioneye_client.client import ( MotionEyeClientConnectionError, @@ -12,7 +12,6 @@ from motioneye_client.client import ( ) import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( SOURCE_REAUTH, ConfigEntry, @@ -20,10 +19,11 @@ from homeassistant.config_entries import ( ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_SOURCE, CONF_URL, CONF_WEBHOOK_ID +from homeassistant.const import CONF_URL, CONF_WEBHOOK_ID from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import VolDictType from . import create_motioneye_client @@ -53,7 +53,7 @@ class MotionEyeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" def _get_form( - user_input: dict[str, Any], errors: dict[str, str] | None = None + user_input: Mapping[str, Any], errors: dict[str, str] | None = None ) -> ConfigFlowResult: """Show the form to the user.""" url_schema: VolDictType = {} @@ -89,16 +89,10 @@ class MotionEyeConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - reauth_entry = None - if self.context.get("entry_id"): - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - if user_input is None: - return _get_form( - cast(dict[str, Any], reauth_entry.data) if reauth_entry else {} - ) + if self.source == SOURCE_REAUTH: + return _get_form(self._get_reauth_entry().data) + return _get_form({}) if self._hassio_discovery: # In case of Supervisor discovery, use pushed URL @@ -135,16 +129,13 @@ class MotionEyeConfigFlow(ConfigFlow, domain=DOMAIN): if errors: return _get_form(user_input, errors) - if self.context.get(CONF_SOURCE) == SOURCE_REAUTH and reauth_entry is not None: + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() # Persist the same webhook id across reauths. if CONF_WEBHOOK_ID in reauth_entry.data: user_input[CONF_WEBHOOK_ID] = reauth_entry.data[CONF_WEBHOOK_ID] - self.hass.config_entries.async_update_entry(reauth_entry, data=user_input) - # Need to manually reload, as the listener won't have been - # installed because the initial load did not succeed (the reauth - # flow will not be initiated if the load succeeds). - await self.hass.config_entries.async_reload(reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + + return self.async_update_reload_and_abort(reauth_entry, data=user_input) # Search for duplicates: there isn't a useful unique_id, but # at least prevent entries with the same motionEye URL. @@ -188,18 +179,16 @@ class MotionEyeConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> MotionEyeOptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> MotionEyeOptionsFlow: """Get the Hyperion Options flow.""" - return MotionEyeOptionsFlow(config_entry) + return MotionEyeOptionsFlow() class MotionEyeOptionsFlow(OptionsFlow): """motionEye options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize a motionEye options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -210,14 +199,14 @@ class MotionEyeOptionsFlow(OptionsFlow): schema: dict[vol.Marker, type] = { vol.Required( CONF_WEBHOOK_SET, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_WEBHOOK_SET, DEFAULT_WEBHOOK_SET, ), ): bool, vol.Required( CONF_WEBHOOK_SET_OVERWRITE, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_WEBHOOK_SET_OVERWRITE, DEFAULT_WEBHOOK_SET_OVERWRITE, ), @@ -228,9 +217,9 @@ class MotionEyeOptionsFlow(OptionsFlow): # The input URL is not validated as being a URL, to allow for the possibility # the template input won't be a valid URL until after it's rendered description: dict[str, str] | None = None - if CONF_STREAM_URL_TEMPLATE in self._config_entry.options: + if CONF_STREAM_URL_TEMPLATE in self.config_entry.options: description = { - "suggested_value": self._config_entry.options[ + "suggested_value": self.config_entry.options[ CONF_STREAM_URL_TEMPLATE ] } diff --git a/homeassistant/components/motioneye/entity.py b/homeassistant/components/motioneye/entity.py new file mode 100644 index 00000000000..49739f2fca3 --- /dev/null +++ b/homeassistant/components/motioneye/entity.py @@ -0,0 +1,73 @@ +"""The motionEye integration.""" + +from __future__ import annotations + +from types import MappingProxyType +from typing import Any + +from motioneye_client.client import MotionEyeClient +from motioneye_client.const import KEY_ID + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from . import get_motioneye_device_identifier + + +def get_motioneye_entity_unique_id( + config_entry_id: str, camera_id: int, entity_type: str +) -> str: + """Get the unique_id for a motionEye entity.""" + return f"{config_entry_id}_{camera_id}_{entity_type}" + + +class MotionEyeEntity(CoordinatorEntity): + """Base class for motionEye entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + config_entry_id: str, + type_name: str, + camera: dict[str, Any], + client: MotionEyeClient, + coordinator: DataUpdateCoordinator, + options: MappingProxyType[str, Any], + entity_description: EntityDescription | None = None, + ) -> None: + """Initialize a motionEye entity.""" + self._camera_id = camera[KEY_ID] + self._device_identifier = get_motioneye_device_identifier( + config_entry_id, self._camera_id + ) + self._unique_id = get_motioneye_entity_unique_id( + config_entry_id, + self._camera_id, + type_name, + ) + self._client = client + self._camera: dict[str, Any] | None = camera + self._options = options + if entity_description is not None: + self.entity_description = entity_description + super().__init__(coordinator) + + @property + def unique_id(self) -> str: + """Return a unique id for this instance.""" + return self._unique_id + + @property + def device_info(self) -> DeviceInfo: + """Return the device information.""" + return DeviceInfo(identifiers={self._device_identifier}) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return self._camera is not None and super().available diff --git a/homeassistant/components/motioneye/icons.json b/homeassistant/components/motioneye/icons.json index b0a4ea8dcb1..7cc93d528e8 100644 --- a/homeassistant/components/motioneye/icons.json +++ b/homeassistant/components/motioneye/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_text_overlay": "mdi:text-box-outline", - "action": "mdi:gesture-tap-button", - "snapshot": "mdi:camera" + "set_text_overlay": { + "service": "mdi:text-box-outline" + }, + "action": { + "service": "mdi:gesture-tap-button" + }, + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/motioneye/media_source.py b/homeassistant/components/motioneye/media_source.py index 7c12b84f255..7a5ed6646d5 100644 --- a/homeassistant/components/motioneye/media_source.py +++ b/homeassistant/components/motioneye/media_source.py @@ -9,12 +9,13 @@ from typing import cast from motioneye_client.const import KEY_MEDIA_LIST, KEY_MIME_TYPE, KEY_PATH from homeassistant.components.media_player import MediaClass, MediaType -from homeassistant.components.media_source.error import MediaSourceError, Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, + MediaSourceError, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback diff --git a/homeassistant/components/motioneye/sensor.py b/homeassistant/components/motioneye/sensor.py index dac4d77cdb4..e0113544848 100644 --- a/homeassistant/components/motioneye/sensor.py +++ b/homeassistant/components/motioneye/sensor.py @@ -16,8 +16,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import MotionEyeEntity, get_camera_from_cameras, listen_for_new_cameras +from . import get_camera_from_cameras, listen_for_new_cameras from .const import CONF_CLIENT, CONF_COORDINATOR, DOMAIN, TYPE_MOTIONEYE_ACTION_SENSOR +from .entity import MotionEyeEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/motioneye/switch.py b/homeassistant/components/motioneye/switch.py index 81a01587aa0..9d704f17740 100644 --- a/homeassistant/components/motioneye/switch.py +++ b/homeassistant/components/motioneye/switch.py @@ -22,8 +22,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import MotionEyeEntity, get_camera_from_cameras, listen_for_new_cameras +from . import get_camera_from_cameras, listen_for_new_cameras from .const import CONF_CLIENT, CONF_COORDINATOR, DOMAIN, TYPE_MOTIONEYE_SWITCH_BASE +from .entity import MotionEyeEntity MOTIONEYE_SWITCHES = [ SwitchEntityDescription( diff --git a/homeassistant/components/motionmount/entity.py b/homeassistant/components/motionmount/entity.py index 8403af05491..ba81c9d10bd 100644 --- a/homeassistant/components/motionmount/entity.py +++ b/homeassistant/components/motionmount/entity.py @@ -1,5 +1,7 @@ """Support for MotionMount sensors.""" +import logging +import socket from typing import TYPE_CHECKING import motionmount @@ -12,6 +14,8 @@ from homeassistant.helpers.entity import Entity from .const import DOMAIN, EMPTY_MAC +_LOGGER = logging.getLogger(__name__) + class MotionMountEntity(Entity): """Representation of a MotionMount entity.""" @@ -34,7 +38,8 @@ class MotionMountEntity(Entity): self._attr_device_info = DeviceInfo( name=mm.name, manufacturer="Vogel's", - model="TVM 7675", + model="MotionMount SIGNATURE Pro", + model_id="TVM 7675 Pro", ) if mac == EMPTY_MAC: @@ -69,3 +74,23 @@ class MotionMountEntity(Entity): self.mm.remove_listener(self.async_write_ha_state) self.mm.remove_listener(self.update_name) await super().async_will_remove_from_hass() + + async def _ensure_connected(self) -> bool: + """Make sure there is a connection with the MotionMount. + + Returns false if the connection failed to be ensured. + """ + + if self.mm.is_connected: + return True + try: + await self.mm.connect() + except (ConnectionError, TimeoutError, socket.gaierror): + # We're not interested in exceptions here. In case of a failed connection + # the try/except from the caller will report it. + # The purpose of `_ensure_connected()` is only to make sure we try to + # reconnect, where failures should not be logged each time + return False + else: + _LOGGER.warning("Successfully reconnected to MotionMount") + return True diff --git a/homeassistant/components/motionmount/manifest.json b/homeassistant/components/motionmount/manifest.json index b7ce3ad1fd9..1fa3d31cfab 100644 --- a/homeassistant/components/motionmount/manifest.json +++ b/homeassistant/components/motionmount/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/motionmount", "integration_type": "device", "iot_class": "local_push", - "requirements": ["python-MotionMount==2.0.0"], + "requirements": ["python-MotionMount==2.2.0"], "zeroconf": ["_tvm._tcp.local."] } diff --git a/homeassistant/components/motionmount/number.py b/homeassistant/components/motionmount/number.py index 3217a4558e1..b42c04a6588 100644 --- a/homeassistant/components/motionmount/number.py +++ b/homeassistant/components/motionmount/number.py @@ -1,11 +1,14 @@ """Support for MotionMount numeric control.""" +import socket + import motionmount from homeassistant.components.number import NumberEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -46,7 +49,13 @@ class MotionMountExtension(MotionMountEntity, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Set the new value for extension.""" - await self.mm.set_extension(int(value)) + try: + await self.mm.set_extension(int(value)) + except (TimeoutError, socket.gaierror) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_communication", + ) from ex class MotionMountTurn(MotionMountEntity, NumberEntity): @@ -69,4 +78,10 @@ class MotionMountTurn(MotionMountEntity, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Set the new value for turn.""" - await self.mm.set_turn(int(value * -1)) + try: + await self.mm.set_turn(int(value * -1)) + except (TimeoutError, socket.gaierror) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_communication", + ) from ex diff --git a/homeassistant/components/motionmount/select.py b/homeassistant/components/motionmount/select.py index d15bbb7326b..9b43d901a21 100644 --- a/homeassistant/components/motionmount/select.py +++ b/homeassistant/components/motionmount/select.py @@ -1,15 +1,23 @@ """Support for MotionMount numeric control.""" +from datetime import timedelta +import logging +import socket + import motionmount from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, WALL_PRESET_NAME from .entity import MotionMountEntity +_LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = timedelta(seconds=60) + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback @@ -23,6 +31,7 @@ async def async_setup_entry( class MotionMountPresets(MotionMountEntity, SelectEntity): """The presets of a MotionMount.""" + _attr_should_poll = True _attr_translation_key = "motionmount_preset" def __init__( @@ -44,8 +53,15 @@ class MotionMountPresets(MotionMountEntity, SelectEntity): async def async_update(self) -> None: """Get latest state from MotionMount.""" - self._presets = await self.mm.get_presets() - self._update_options(self._presets) + if not await self._ensure_connected(): + return + + try: + self._presets = await self.mm.get_presets() + except (TimeoutError, socket.gaierror) as ex: + _LOGGER.warning("Failed to communicate with MotionMount: %s", ex) + else: + self._update_options(self._presets) @property def current_option(self) -> str | None: @@ -72,8 +88,12 @@ class MotionMountPresets(MotionMountEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Set the new option.""" index = int(option[:1]) - await self.mm.go_to_preset(index) - self._attr_current_option = option - - # Perform an update so we detect changes to the presets (changes are not pushed) - self.async_schedule_update_ha_state(True) + try: + await self.mm.go_to_preset(index) + except (TimeoutError, socket.gaierror) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_communication", + ) from ex + else: + self._attr_current_option = option diff --git a/homeassistant/components/motionmount/strings.json b/homeassistant/components/motionmount/strings.json index 39f7c53db35..bd28156607c 100644 --- a/homeassistant/components/motionmount/strings.json +++ b/homeassistant/components/motionmount/strings.json @@ -56,5 +56,10 @@ } } } + }, + "exceptions": { + "failed_communication": { + "message": "Failed to communicate with MotionMount" + } } } diff --git a/homeassistant/components/mpd/config_flow.py b/homeassistant/components/mpd/config_flow.py index f37ebe5e5e8..36777a205f9 100644 --- a/homeassistant/components/mpd/config_flow.py +++ b/homeassistant/components/mpd/config_flow.py @@ -67,19 +67,17 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" - self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) client = MPDClient() client.timeout = 30 client.idletimeout = 10 try: async with timeout(35): - await client.connect(import_config[CONF_HOST], import_config[CONF_PORT]) - if CONF_PASSWORD in import_config: - await client.password(import_config[CONF_PASSWORD]) + await client.connect(import_data[CONF_HOST], import_data[CONF_PORT]) + if CONF_PASSWORD in import_data: + await client.password(import_data[CONF_PASSWORD]) with suppress(mpd.ConnectionError): client.disconnect() except ( @@ -94,10 +92,10 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="unknown") return self.async_create_entry( - title=import_config.get(CONF_NAME, "Music Player Daemon"), + title=import_data.get(CONF_NAME, "Music Player Daemon"), data={ - CONF_HOST: import_config[CONF_HOST], - CONF_PORT: import_config[CONF_PORT], - CONF_PASSWORD: import_config.get(CONF_PASSWORD), + CONF_HOST: import_data[CONF_HOST], + CONF_PORT: import_data[CONF_PORT], + CONF_PASSWORD: import_data.get(CONF_PASSWORD), }, ) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 86eeca2017c..bcad8747c39 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -76,8 +76,8 @@ from .const import ( # noqa: F401 DEFAULT_QOS, DEFAULT_RETAIN, DOMAIN, + ENTITY_PLATFORMS, MQTT_CONNECTION_STATE, - RELOADABLE_PLATFORMS, TEMPLATE_ERRORS, ) from .models import ( # noqa: F401 @@ -225,77 +225,27 @@ async def async_check_config_schema( ) from exc -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Load a config entry.""" - conf: dict[str, Any] - mqtt_data: MqttData +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the actions and websocket API for the MQTT component.""" - async def _setup_client( - client_available: asyncio.Future[bool], - ) -> tuple[MqttData, dict[str, Any]]: - """Set up the MQTT client.""" - # Fetch configuration - conf = dict(entry.data) - hass_config = await conf_util.async_hass_config_yaml(hass) - mqtt_yaml = CONFIG_SCHEMA(hass_config).get(DOMAIN, []) - await async_create_certificate_temp_files(hass, conf) - client = MQTT(hass, entry, conf) - if DOMAIN in hass.data: - mqtt_data = hass.data[DATA_MQTT] - mqtt_data.config = mqtt_yaml - mqtt_data.client = client - else: - # Initial setup - websocket_api.async_register_command(hass, websocket_subscribe) - websocket_api.async_register_command(hass, websocket_mqtt_info) - hass.data[DATA_MQTT] = mqtt_data = MqttData(config=mqtt_yaml, client=client) - await client.async_start(mqtt_data) - - # Restore saved subscriptions - if mqtt_data.subscriptions_to_restore: - mqtt_data.client.async_restore_tracked_subscriptions( - mqtt_data.subscriptions_to_restore - ) - mqtt_data.subscriptions_to_restore = set() - mqtt_data.reload_dispatchers.append( - entry.add_update_listener(_async_config_entry_updated) - ) - - return (mqtt_data, conf) - - client_available: asyncio.Future[bool] - if DATA_MQTT_AVAILABLE not in hass.data: - client_available = hass.data[DATA_MQTT_AVAILABLE] = hass.loop.create_future() - else: - client_available = hass.data[DATA_MQTT_AVAILABLE] - - mqtt_data, conf = await _setup_client(client_available) - platforms_used = platforms_from_config(mqtt_data.config) - platforms_used.update( - entry.domain - for entry in er.async_entries_for_config_entry( - er.async_get(hass), entry.entry_id - ) - ) - integration = async_get_loaded_integration(hass, DOMAIN) - # Preload platforms we know we are going to use so - # discovery can setup each platform synchronously - # and avoid creating a flood of tasks at startup - # while waiting for the the imports to complete - if not integration.platforms_are_loaded(platforms_used): - with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PLATFORMS): - await integration.async_get_platforms(platforms_used) - - # Wait to connect until the platforms are loaded so - # we can be sure discovery does not have to wait for - # each platform to load when we get the flood of retained - # messages on connect - await mqtt_data.client.async_connect(client_available) + websocket_api.async_register_command(hass, websocket_subscribe) + websocket_api.async_register_command(hass, websocket_mqtt_info) async def async_publish_service(call: ServiceCall) -> None: """Handle MQTT publish service calls.""" msg_topic: str | None = call.data.get(ATTR_TOPIC) msg_topic_template: str | None = call.data.get(ATTR_TOPIC_TEMPLATE) + + if not mqtt_config_entry_enabled(hass): + raise ServiceValidationError( + translation_key="mqtt_not_setup_cannot_publish", + translation_domain=DOMAIN, + translation_placeholders={ + "topic": str(msg_topic or msg_topic_template) + }, + ) + + mqtt_data = hass.data[DATA_MQTT] payload: PublishPayloadType = call.data.get(ATTR_PAYLOAD) evaluate_payload: bool = call.data.get(ATTR_EVALUATE_PAYLOAD, False) payload_template: str | None = call.data.get(ATTR_PAYLOAD_TEMPLATE) @@ -402,6 +352,71 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: } ), ) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Load a config entry.""" + conf: dict[str, Any] + mqtt_data: MqttData + + async def _setup_client() -> tuple[MqttData, dict[str, Any]]: + """Set up the MQTT client.""" + # Fetch configuration + conf = dict(entry.data) + hass_config = await conf_util.async_hass_config_yaml(hass) + mqtt_yaml = CONFIG_SCHEMA(hass_config).get(DOMAIN, []) + await async_create_certificate_temp_files(hass, conf) + client = MQTT(hass, entry, conf) + if DOMAIN in hass.data: + mqtt_data = hass.data[DATA_MQTT] + mqtt_data.config = mqtt_yaml + mqtt_data.client = client + else: + # Initial setup + hass.data[DATA_MQTT] = mqtt_data = MqttData(config=mqtt_yaml, client=client) + await client.async_start(mqtt_data) + + # Restore saved subscriptions + if mqtt_data.subscriptions_to_restore: + mqtt_data.client.async_restore_tracked_subscriptions( + mqtt_data.subscriptions_to_restore + ) + mqtt_data.subscriptions_to_restore = set() + mqtt_data.reload_dispatchers.append( + entry.add_update_listener(_async_config_entry_updated) + ) + + return (mqtt_data, conf) + + client_available: asyncio.Future[bool] + if DATA_MQTT_AVAILABLE not in hass.data: + client_available = hass.data[DATA_MQTT_AVAILABLE] = hass.loop.create_future() + else: + client_available = hass.data[DATA_MQTT_AVAILABLE] + + mqtt_data, conf = await _setup_client() + platforms_used = platforms_from_config(mqtt_data.config) + platforms_used.update( + entry.domain + for entry in er.async_entries_for_config_entry( + er.async_get(hass), entry.entry_id + ) + ) + integration = async_get_loaded_integration(hass, DOMAIN) + # Preload platforms we know we are going to use so + # discovery can setup each platform synchronously + # and avoid creating a flood of tasks at startup + # while waiting for the the imports to complete + if not integration.platforms_are_loaded(platforms_used): + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PLATFORMS): + await integration.async_get_platforms(platforms_used) + + # Wait to connect until the platforms are loaded so + # we can be sure discovery does not have to wait for + # each platform to load when we get the flood of retained + # messages on connect + await mqtt_data.client.async_connect(client_available) # setup platforms and discovery async def _reload_config(call: ServiceCall) -> None: @@ -438,7 +453,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: for entity in list(mqtt_platform.entities.values()) if getattr(entity, "_discovery_data", None) is None and mqtt_platform.config_entry - and mqtt_platform.domain in RELOADABLE_PLATFORMS + and mqtt_platform.domain in ENTITY_PLATFORMS ] await asyncio.gather(*tasks) @@ -557,10 +572,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: mqtt_data = hass.data[DATA_MQTT] mqtt_client = mqtt_data.client - # Unload publish and dump services. - hass.services.async_remove(DOMAIN, SERVICE_PUBLISH) - hass.services.async_remove(DOMAIN, SERVICE_DUMP) - # Stop the discovery await discovery.async_stop(hass) # Unload the platforms diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index f4a32bbdf9d..65e24d5d780 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -6,9 +6,6 @@ ABBREVIATIONS = { "act_stat_t": "activity_state_topic", "act_val_tpl": "activity_value_template", "atype": "automation_type", - "aux_cmd_t": "aux_command_topic", - "aux_stat_tpl": "aux_state_template", - "aux_stat_t": "aux_state_topic", "av_tones": "available_tones", "avty": "availability", "avty_mode": "availability_mode", @@ -33,6 +30,7 @@ ABBREVIATIONS = { "cmd_on_tpl": "command_on_template", "cmd_t": "command_topic", "cmd_tpl": "command_template", + "cmps": "components", "cod_arm_req": "code_arm_required", "cod_dis_req": "code_disarm_required", "cod_form": "code_format", @@ -48,6 +46,7 @@ ABBREVIATIONS = { "dir_cmd_tpl": "direction_command_template", "dir_stat_t": "direction_state_topic", "dir_val_tpl": "direction_value_template", + "dsp_prc": "display_precision", "dock_cmd_t": "dock_command_topic", "dock_cmd_tpl": "dock_command_template", "e": "encoding", @@ -95,6 +94,7 @@ ABBREVIATIONS = { "min_mirs": "min_mireds", "max_temp": "max_temp", "min_temp": "min_temp", + "migr_discvry": "migrate_discovery", "mode": "mode", "mode_cmd_tpl": "mode_command_template", "mode_cmd_t": "mode_command_topic", @@ -112,6 +112,7 @@ ABBREVIATIONS = { "osc_cmd_tpl": "oscillation_command_template", "osc_stat_t": "oscillation_state_topic", "osc_val_tpl": "oscillation_value_template", + "p": "platform", "pause_cmd_t": "pause_command_topic", "pause_mw_cmd_tpl": "pause_command_template", "pct_cmd_t": "percentage_command_topic", @@ -157,8 +158,6 @@ ABBREVIATIONS = { "pos_open": "position_open", "pow_cmd_t": "power_command_topic", "pow_cmd_tpl": "power_command_template", - "pow_stat_t": "power_state_topic", - "pow_stat_tpl": "power_state_template", "pr_mode_cmd_t": "preset_mode_command_topic", "pr_mode_cmd_tpl": "preset_mode_command_template", "pr_mode_stat_t": "preset_mode_state_topic", diff --git a/homeassistant/components/mqtt/alarm_control_panel.py b/homeassistant/components/mqtt/alarm_control_panel.py index 3cdb3efea7f..613f665c302 100644 --- a/homeassistant/components/mqtt/alarm_control_panel.py +++ b/homeassistant/components/mqtt/alarm_control_panel.py @@ -7,23 +7,12 @@ import logging import voluptuous as vol import homeassistant.components.alarm_control_panel as alarm -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_CODE, - CONF_NAME, - CONF_VALUE_TEMPLATE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_CODE, CONF_NAME, CONF_VALUE_TEMPLATE from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -39,13 +28,15 @@ from .const import ( CONF_SUPPORTED_FEATURES, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import MqttCommandTemplate, MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + _SUPPORTED_FEATURES = { "arm_home": AlarmControlPanelEntityFeature.ARM_HOME, "arm_away": AlarmControlPanelEntityFeature.ARM_AWAY, @@ -182,29 +173,30 @@ class MqttAlarm(MqttEntity, alarm.AlarmControlPanelEntity): ) return if payload == PAYLOAD_NONE: - self._attr_state = None + self._attr_alarm_state = None return if payload not in ( - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_PENDING, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMING, + AlarmControlPanelState.TRIGGERED, ): _LOGGER.warning("Received unexpected payload: %s", msg.payload) return - self._attr_state = str(payload) + assert isinstance(payload, str) + self._attr_alarm_state = AlarmControlPanelState(payload) @callback def _prepare_subscribe_topics(self) -> None: """(Re)Subscribe to topics.""" self.add_subscription( - CONF_STATE_TOPIC, self._state_message_received, {"_attr_state"} + CONF_STATE_TOPIC, self._state_message_received, {"_attr_alarm_state"} ) async def _subscribe_topics(self) -> None: diff --git a/homeassistant/components/mqtt/binary_sensor.py b/homeassistant/components/mqtt/binary_sensor.py index 293b6e5f1f4..b49dc7aa24c 100644 --- a/homeassistant/components/mqtt/binary_sensor.py +++ b/homeassistant/components/mqtt/binary_sensor.py @@ -37,12 +37,14 @@ from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA from .const import CONF_STATE_TOPIC, PAYLOAD_NONE -from .mixins import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper +from .entity import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Binary sensor" CONF_OFF_DELAY = "off_delay" DEFAULT_PAYLOAD_OFF = "OFF" diff --git a/homeassistant/components/mqtt/button.py b/homeassistant/components/mqtt/button.py index 6ad11859f44..8e5446b532e 100644 --- a/homeassistant/components/mqtt/button.py +++ b/homeassistant/components/mqtt/button.py @@ -15,11 +15,13 @@ from homeassistant.helpers.typing import ConfigType from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_RETAIN -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import MqttCommandTemplate from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + CONF_PAYLOAD_PRESS = "payload_press" DEFAULT_NAME = "MQTT Button" DEFAULT_PAYLOAD_PRESS = "PRESS" diff --git a/homeassistant/components/mqtt/camera.py b/homeassistant/components/mqtt/camera.py index fa550b9fd0c..88fabad0446 100644 --- a/homeassistant/components/mqtt/camera.py +++ b/homeassistant/components/mqtt/camera.py @@ -20,13 +20,15 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import subscription from .config import MQTT_BASE_SCHEMA from .const import CONF_TOPIC -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_IMAGE_ENCODING = "image_encoding" DEFAULT_NAME = "MQTT Camera" diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 4fa8b7db02a..0091d2370a4 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -119,7 +119,7 @@ MAX_PACKETS_TO_READ = 500 type SocketType = socket.socket | ssl.SSLSocket | mqtt.WebsocketWrapper | Any -type SubscribePayloadType = str | bytes # Only bytes if encoding is None +type SubscribePayloadType = str | bytes | bytearray # Only bytes if encoding is None def publish( @@ -227,7 +227,7 @@ def async_subscribe_internal( translation_placeholders={"topic": topic}, ) from exc client = mqtt_data.client - if not client.connected and not mqtt_config_entry_enabled(hass): + if not mqtt_config_entry_enabled(hass): raise HomeAssistantError( f"Cannot subscribe to topic '{topic}', MQTT is not enabled", translation_key="mqtt_not_setup_cannot_subscribe", @@ -376,7 +376,9 @@ class MQTT: self._simple_subscriptions: defaultdict[str, set[Subscription]] = defaultdict( set ) - self._wildcard_subscriptions: set[Subscription] = set() + # To ensure the wildcard subscriptions order is preserved, we use a dict + # with `None` values instead of a set. + self._wildcard_subscriptions: dict[Subscription, None] = {} # _retained_topics prevents a Subscription from receiving a # retained message more than once per topic. This prevents flooding # already active subscribers when new subscribers subscribe to a topic @@ -754,7 +756,7 @@ class MQTT: if subscription.is_simple_match: self._simple_subscriptions[subscription.topic].add(subscription) else: - self._wildcard_subscriptions.add(subscription) + self._wildcard_subscriptions[subscription] = None @callback def _async_untrack_subscription(self, subscription: Subscription) -> None: @@ -772,9 +774,13 @@ class MQTT: if not simple_subscriptions[topic]: del simple_subscriptions[topic] else: - self._wildcard_subscriptions.remove(subscription) + del self._wildcard_subscriptions[subscription] except (KeyError, ValueError) as exc: - raise HomeAssistantError("Can't remove subscription twice") from exc + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="mqtt_not_setup_cannot_unsubscribe_twice", + translation_placeholders={"topic": topic}, + ) from exc @callback def _async_queue_subscriptions( @@ -820,7 +826,11 @@ class MQTT: ) -> Callable[[], None]: """Set up a subscription to a topic with the provided qos.""" if not isinstance(topic, str): - raise HomeAssistantError("Topic needs to be a string!") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="mqtt_topic_not_a_string", + translation_placeholders={"topic": topic}, + ) if job_type is None: job_type = get_hassjob_callable_job_type(msg_callback) @@ -1211,7 +1221,11 @@ class MQTT: import paho.mqtt.client as mqtt raise HomeAssistantError( - f"Error talking to MQTT: {mqtt.error_string(result_code)}" + translation_domain=DOMAIN, + translation_key="mqtt_broker_error", + translation_placeholders={ + "error_message": mqtt.error_string(result_code) + }, ) # Create the mid event if not created, either _mqtt_handle_mid or diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index 426bac8e9ca..e62303472ed 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -79,7 +79,7 @@ from .const import ( DEFAULT_OPTIMISTIC, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -91,14 +91,9 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) -DEFAULT_NAME = "MQTT HVAC" +PARALLEL_UPDATES = 0 -# Options CONF_AUX_COMMAND_TOPIC, CONF_AUX_STATE_TOPIC -# and CONF_AUX_STATE_TEMPLATE were deprecated in HA Core 2023.9 -# Support was removed in HA Core 2024.3 -CONF_AUX_COMMAND_TOPIC = "aux_command_topic" -CONF_AUX_STATE_TEMPLATE = "aux_state_template" -CONF_AUX_STATE_TOPIC = "aux_state_topic" +DEFAULT_NAME = "MQTT HVAC" CONF_FAN_MODE_COMMAND_TEMPLATE = "fan_mode_command_template" CONF_FAN_MODE_COMMAND_TOPIC = "fan_mode_command_topic" @@ -113,10 +108,6 @@ CONF_HUMIDITY_STATE_TOPIC = "target_humidity_state_topic" CONF_HUMIDITY_MAX = "max_humidity" CONF_HUMIDITY_MIN = "min_humidity" -# Support for CONF_POWER_STATE_TOPIC and CONF_POWER_STATE_TEMPLATE -# was removed in HA Core 2023.8 -CONF_POWER_STATE_TEMPLATE = "power_state_template" -CONF_POWER_STATE_TOPIC = "power_state_topic" CONF_PRESET_MODE_STATE_TOPIC = "preset_mode_state_topic" CONF_PRESET_MODE_COMMAND_TOPIC = "preset_mode_command_topic" CONF_PRESET_MODE_VALUE_TEMPLATE = "preset_mode_value_template" @@ -201,7 +192,6 @@ TOPIC_KEYS = ( CONF_MODE_COMMAND_TOPIC, CONF_MODE_STATE_TOPIC, CONF_POWER_COMMAND_TOPIC, - CONF_POWER_STATE_TOPIC, CONF_PRESET_MODE_COMMAND_TOPIC, CONF_PRESET_MODE_STATE_TOPIC, CONF_SWING_MODE_COMMAND_TOPIC, @@ -295,8 +285,6 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( vol.Optional(CONF_PAYLOAD_OFF, default="OFF"): cv.string, vol.Optional(CONF_POWER_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_POWER_COMMAND_TEMPLATE): cv.template, - vol.Optional(CONF_POWER_STATE_TEMPLATE): cv.template, - vol.Optional(CONF_POWER_STATE_TOPIC): valid_subscribe_topic, vol.Optional(CONF_PRECISION): vol.In( [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] ), @@ -343,16 +331,6 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) PLATFORM_SCHEMA_MODERN = vol.All( - # Support for CONF_POWER_STATE_TOPIC and CONF_POWER_STATE_TEMPLATE - # was removed in HA Core 2023.8 - cv.removed(CONF_POWER_STATE_TEMPLATE), - cv.removed(CONF_POWER_STATE_TOPIC), - # Options CONF_AUX_COMMAND_TOPIC, CONF_AUX_STATE_TOPIC - # and CONF_AUX_STATE_TEMPLATE were deprecated in HA Core 2023.9 - # Support was removed in HA Core 2024.3 - cv.removed(CONF_AUX_COMMAND_TOPIC), - cv.removed(CONF_AUX_STATE_TEMPLATE), - cv.removed(CONF_AUX_STATE_TOPIC), _PLATFORM_SCHEMA_BASE, valid_preset_mode_configuration, valid_humidity_range_configuration, @@ -363,10 +341,6 @@ _DISCOVERY_SCHEMA_BASE = _PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA DISCOVERY_SCHEMA = vol.All( _DISCOVERY_SCHEMA_BASE, - # Support for CONF_POWER_STATE_TOPIC and CONF_POWER_STATE_TEMPLATE - # was removed in HA Core 2023.8 - cv.removed(CONF_POWER_STATE_TEMPLATE), - cv.removed(CONF_POWER_STATE_TOPIC), valid_preset_mode_configuration, valid_humidity_range_configuration, valid_humidity_state_configuration, @@ -547,7 +521,6 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity): _attributes_extra_blocked = MQTT_CLIMATE_ATTRIBUTES_BLOCKED _attr_target_temperature_low: float | None = None _attr_target_temperature_high: float | None = None - _enable_turn_on_off_backwards_compatibility = False @staticmethod def config_schema() -> VolSchemaType: diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index ca799ff3653..0081246c705 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -16,13 +16,9 @@ from cryptography.x509 import load_pem_x509_certificate import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file -from homeassistant.components.hassio import HassioServiceInfo, is_hassio -from homeassistant.components.hassio.addon_manager import ( - AddonError, - AddonManager, - AddonState, -) +from homeassistant.components.hassio import AddonError, AddonManager, AddonState from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -38,9 +34,10 @@ from homeassistant.const import ( CONF_PROTOCOL, CONF_USERNAME, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.json import json_dumps from homeassistant.helpers.selector import ( BooleanSelector, @@ -57,6 +54,7 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from .addon import get_addon_manager @@ -209,7 +207,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None _hassio_discovery: dict[str, Any] | None = None _addon_manager: AddonManager @@ -224,7 +221,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MQTTOptionsFlowHandler: """Get the options flow for this handler.""" - return MQTTOptionsFlowHandler(config_entry) + return MQTTOptionsFlowHandler() async def _async_install_addon(self) -> None: """Install the Mosquitto Mqtt broker add-on.""" @@ -335,16 +332,15 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): break else: raise AddonError( - f"Failed to correctly start {addon_manager.addon_name} add-on" + translation_domain=DOMAIN, + translation_key="addon_start_failed", + translation_placeholders={"addon": addon_manager.addon_name}, ) async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if is_hassio(self.hass): # Offer to set up broker add-on if supervisor is available self._addon_manager = get_addon_manager(self.hass) @@ -400,7 +396,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with MQTT broker.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) if is_hassio(self.hass): # Check if entry setup matches the add-on discovery config addon_manager = get_addon_manager(self.hass) @@ -439,18 +434,18 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Confirm re-authentication with MQTT broker.""" errors: dict[str, str] = {} - assert self.entry is not None + reauth_entry = self._get_reauth_entry() if user_input: substituted_used_data = update_password_from_user_input( - self.entry.data.get(CONF_PASSWORD), user_input + reauth_entry.data.get(CONF_PASSWORD), user_input ) - new_entry_data = {**self.entry.data, **substituted_used_data} + new_entry_data = {**reauth_entry.data, **substituted_used_data} if await self.hass.async_add_executor_job( try_connection, new_entry_data, ): return self.async_update_reload_and_abort( - self.entry, data=new_entry_data + reauth_entry, data=new_entry_data ) errors["base"] = "invalid_auth" @@ -458,7 +453,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): schema = self.add_suggested_values_to_schema( REAUTH_SCHEMA, { - CONF_USERNAME: self.entry.data.get(CONF_USERNAME), + CONF_USERNAME: reauth_entry.data.get(CONF_USERNAME), CONF_PASSWORD: PWD_NOT_CHANGED, }, ) @@ -475,20 +470,32 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} fields: OrderedDict[Any, Any] = OrderedDict() validated_user_input: dict[str, Any] = {} + if is_reconfigure := (self.source == SOURCE_RECONFIGURE): + reconfigure_entry = self._get_reconfigure_entry() if await async_get_broker_settings( self, fields, - None, + reconfigure_entry.data if is_reconfigure else None, user_input, validated_user_input, errors, ): + if is_reconfigure: + update_password_from_user_input( + reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input + ) + can_connect = await self.hass.async_add_executor_job( try_connection, validated_user_input, ) if can_connect: + if is_reconfigure: + return self.async_update_reload_and_abort( + reconfigure_entry, + data=validated_user_input, + ) validated_user_input[CONF_DISCOVERY] = DEFAULT_DISCOVERY return self.async_create_entry( title=validated_user_input[CONF_BROKER], @@ -501,6 +508,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="broker", data_schema=vol.Schema(fields), errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + return await self.async_step_broker() + async def async_step_hassio( self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: @@ -551,11 +564,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): class MQTTOptionsFlowHandler(OptionsFlow): """Handle MQTT options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize MQTT options flow.""" - self.config_entry = config_entry - self.broker_config: dict[str, str | int] = {} - self.options = config_entry.options + self.broker_config: dict[str, Any] = {} async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the MQTT options.""" @@ -745,6 +756,16 @@ class MQTTOptionsFlowHandler(OptionsFlow): ) +async def _get_uploaded_file(hass: HomeAssistant, id: str) -> str: + """Get file content from uploaded file.""" + + def _proces_uploaded_file() -> str: + with process_uploaded_file(hass, id) as file_path: + return file_path.read_text(encoding=DEFAULT_ENCODING) + + return await hass.async_add_executor_job(_proces_uploaded_file) + + async def async_get_broker_settings( flow: ConfigFlow | OptionsFlow, fields: OrderedDict[Any, Any], @@ -803,8 +824,7 @@ async def async_get_broker_settings( return False certificate_id: str | None = user_input.get(CONF_CERTIFICATE) if certificate_id: - with process_uploaded_file(hass, certificate_id) as certificate_file: - certificate = certificate_file.read_text(encoding=DEFAULT_ENCODING) + certificate = await _get_uploaded_file(hass, certificate_id) # Return to form for file upload CA cert or client cert and key if ( @@ -820,15 +840,9 @@ async def async_get_broker_settings( return False if client_certificate_id: - with process_uploaded_file( - hass, client_certificate_id - ) as client_certificate_file: - client_certificate = client_certificate_file.read_text( - encoding=DEFAULT_ENCODING - ) + client_certificate = await _get_uploaded_file(hass, client_certificate_id) if client_key_id: - with process_uploaded_file(hass, client_key_id) as key_file: - client_key = key_file.read_text(encoding=DEFAULT_ENCODING) + client_key = await _get_uploaded_file(hass, client_key_id) certificate_data: dict[str, Any] = {} if certificate: diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 1e1011cc381..9f1c55a54e0 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -61,6 +61,7 @@ CONF_CURRENT_HUMIDITY_TOPIC = "current_humidity_topic" CONF_CURRENT_TEMP_TEMPLATE = "current_temperature_template" CONF_CURRENT_TEMP_TOPIC = "current_temperature_topic" CONF_ENABLED_BY_DEFAULT = "enabled_by_default" +CONF_ENTITY_PICTURE = "entity_picture" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" CONF_MODE_COMMAND_TOPIC = "mode_command_topic" CONF_MODE_LIST = "modes" @@ -89,6 +90,7 @@ CONF_TEMP_MIN = "min_temp" CONF_CERTIFICATE = "certificate" CONF_CLIENT_KEY = "client_key" CONF_CLIENT_CERT = "client_cert" +CONF_COMPONENTS = "components" CONF_TLS_INSECURE = "tls_insecure" # Device and integration info options @@ -158,7 +160,7 @@ MQTT_CONNECTION_STATE = "mqtt_connection_state" PAYLOAD_EMPTY_JSON = "{}" PAYLOAD_NONE = "None" -RELOADABLE_PLATFORMS = [ +ENTITY_PLATFORMS = [ Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.BUTTON, @@ -189,7 +191,7 @@ RELOADABLE_PLATFORMS = [ TEMPLATE_ERRORS = (jinja2.TemplateError, TemplateError, TypeError, ValueError) -SUPPORTED_COMPONENTS = { +SUPPORTED_COMPONENTS = ( "alarm_control_panel", "binary_sensor", "button", @@ -218,4 +220,4 @@ SUPPORTED_COMPONENTS = { "vacuum", "valve", "water_heater", -} +) diff --git a/homeassistant/components/mqtt/cover.py b/homeassistant/components/mqtt/cover.py index 2d1b64d002a..c7d041848f0 100644 --- a/homeassistant/components/mqtt/cover.py +++ b/homeassistant/components/mqtt/cover.py @@ -15,6 +15,7 @@ from homeassistant.components.cover import ( DEVICE_CLASSES_SCHEMA, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -61,13 +62,15 @@ from .const import ( DEFAULT_RETAIN, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import MqttCommandTemplate, MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_GET_POSITION_TOPIC = "position_topic" CONF_GET_POSITION_TEMPLATE = "position_template" CONF_SET_POSITION_TOPIC = "set_position_topic" @@ -354,9 +357,9 @@ class MqttCover(MqttEntity, CoverEntity): # Reset the state to `unknown` self._attr_is_closed = None else: - self._attr_is_closed = state == STATE_CLOSED - self._attr_is_opening = state == STATE_OPENING - self._attr_is_closing = state == STATE_CLOSING + self._attr_is_closed = state == CoverState.CLOSED + self._attr_is_opening = state == CoverState.OPENING + self._attr_is_closing = state == CoverState.CLOSING @callback def _tilt_message_received(self, msg: ReceiveMessage) -> None: @@ -382,24 +385,24 @@ class MqttCover(MqttEntity, CoverEntity): if payload == self._config[CONF_STATE_STOPPED]: if self._config.get(CONF_GET_POSITION_TOPIC) is not None: state = ( - STATE_CLOSED + CoverState.CLOSED if self._attr_current_cover_position == DEFAULT_POSITION_CLOSED - else STATE_OPEN + else CoverState.OPEN ) else: state = ( - STATE_CLOSED - if self.state in [STATE_CLOSED, STATE_CLOSING] - else STATE_OPEN + CoverState.CLOSED + if self.state in [CoverState.CLOSED, CoverState.CLOSING] + else CoverState.OPEN ) elif payload == self._config[CONF_STATE_OPENING]: - state = STATE_OPENING + state = CoverState.OPENING elif payload == self._config[CONF_STATE_CLOSING]: - state = STATE_CLOSING + state = CoverState.CLOSING elif payload == self._config[CONF_STATE_OPEN]: - state = STATE_OPEN + state = CoverState.OPEN elif payload == self._config[CONF_STATE_CLOSED]: - state = STATE_CLOSED + state = CoverState.CLOSED elif payload == PAYLOAD_NONE: state = None else: @@ -451,7 +454,9 @@ class MqttCover(MqttEntity, CoverEntity): self._attr_current_cover_position = min(100, max(0, percentage_payload)) if self._config.get(CONF_STATE_TOPIC) is None: self._update_state( - STATE_CLOSED if self.current_cover_position == 0 else STATE_OPEN + CoverState.CLOSED + if self.current_cover_position == 0 + else CoverState.OPEN ) @callback @@ -493,7 +498,7 @@ class MqttCover(MqttEntity, CoverEntity): ) if self._optimistic: # Optimistically assume that cover has changed state. - self._update_state(STATE_OPEN) + self._update_state(CoverState.OPEN) if self._config.get(CONF_GET_POSITION_TOPIC): self._attr_current_cover_position = 100 self.async_write_ha_state() @@ -508,7 +513,7 @@ class MqttCover(MqttEntity, CoverEntity): ) if self._optimistic: # Optimistically assume that cover has changed state. - self._update_state(STATE_CLOSED) + self._update_state(CoverState.CLOSED) if self._config.get(CONF_GET_POSITION_TOPIC): self._attr_current_cover_position = 0 self.async_write_ha_state() @@ -609,9 +614,9 @@ class MqttCover(MqttEntity, CoverEntity): ) if self._optimistic: self._update_state( - STATE_CLOSED + CoverState.CLOSED if position_percentage <= self._config[CONF_POSITION_CLOSED] - else STATE_OPEN + else CoverState.OPEN ) self._attr_current_cover_position = position_percentage self.async_write_ha_state() diff --git a/homeassistant/components/mqtt/device_automation.py b/homeassistant/components/mqtt/device_automation.py index 8d23d32326b..366f2f13ad4 100644 --- a/homeassistant/components/mqtt/device_automation.py +++ b/homeassistant/components/mqtt/device_automation.py @@ -12,7 +12,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import device_trigger from .config import MQTT_BASE_SCHEMA -from .mixins import async_setup_non_entity_entry_helper +from .entity import async_setup_non_entity_entry_helper AUTOMATION_TYPE_TRIGGER = "trigger" AUTOMATION_TYPES = [AUTOMATION_TYPE_TRIGGER] diff --git a/homeassistant/components/mqtt/device_tracker.py b/homeassistant/components/mqtt/device_tracker.py index 57614106d4e..bdf543e046a 100644 --- a/homeassistant/components/mqtt/device_tracker.py +++ b/homeassistant/components/mqtt/device_tracker.py @@ -9,11 +9,7 @@ from typing import TYPE_CHECKING import voluptuous as vol from homeassistant.components import device_tracker -from homeassistant.components.device_tracker import ( - SOURCE_TYPES, - SourceType, - TrackerEntity, -) +from homeassistant.components.device_tracker import SourceType, TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_GPS_ACCURACY, @@ -33,13 +29,15 @@ from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_BASE_SCHEMA from .const import CONF_PAYLOAD_RESET, CONF_STATE_TOPIC -from .mixins import CONF_JSON_ATTRS_TOPIC, MqttEntity, async_setup_entity_entry_helper +from .entity import CONF_JSON_ATTRS_TOPIC, MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_PAYLOAD_HOME = "payload_home" CONF_PAYLOAD_NOT_HOME = "payload_not_home" CONF_SOURCE_TYPE = "source_type" @@ -65,8 +63,8 @@ PLATFORM_SCHEMA_MODERN_BASE = MQTT_BASE_SCHEMA.extend( vol.Optional(CONF_PAYLOAD_HOME, default=STATE_HOME): cv.string, vol.Optional(CONF_PAYLOAD_NOT_HOME, default=STATE_NOT_HOME): cv.string, vol.Optional(CONF_PAYLOAD_RESET, default=DEFAULT_PAYLOAD_RESET): cv.string, - vol.Optional(CONF_SOURCE_TYPE, default=DEFAULT_SOURCE_TYPE): vol.In( - SOURCE_TYPES + vol.Optional(CONF_SOURCE_TYPE, default=DEFAULT_SOURCE_TYPE): vol.Coerce( + SourceType ), }, ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) @@ -191,7 +189,7 @@ class MqttDeviceTracker(MqttEntity, TrackerEntity): return self._location_name @property - def source_type(self) -> SourceType | str: + def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" - source_type: SourceType | str = self._config[CONF_SOURCE_TYPE] + source_type: SourceType = self._config[CONF_SOURCE_TYPE] return source_type diff --git a/homeassistant/components/mqtt/device_trigger.py b/homeassistant/components/mqtt/device_trigger.py index 911dce163f9..8665ac26961 100644 --- a/homeassistant/components/mqtt/device_trigger.py +++ b/homeassistant/components/mqtt/device_trigger.py @@ -36,7 +36,7 @@ from .const import ( DOMAIN, ) from .discovery import MQTTDiscoveryPayload, clear_discovery_hash -from .mixins import MqttDiscoveryDeviceUpdateMixin, send_discovery_done, update_device +from .entity import MqttDiscoveryDeviceUpdateMixin, send_discovery_done, update_device from .models import DATA_MQTT from .schemas import MQTT_ENTITY_DEVICE_INFO_SCHEMA @@ -148,7 +148,10 @@ class Trigger: def async_remove() -> None: """Remove trigger.""" if instance not in self.trigger_instances: - raise HomeAssistantError("Can't remove trigger twice") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="mqtt_trigger_cannot_remove_twice", + ) if instance.remove: instance.remove() diff --git a/homeassistant/components/mqtt/discovery.py b/homeassistant/components/mqtt/discovery.py index 8e379633674..a5ddb3ef4e6 100644 --- a/homeassistant/components/mqtt/discovery.py +++ b/homeassistant/components/mqtt/discovery.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections import deque +from dataclasses import dataclass import functools from itertools import chain import logging @@ -11,35 +12,42 @@ import re import time from typing import TYPE_CHECKING, Any -from homeassistant.config_entries import ConfigEntry +import voluptuous as vol + +from homeassistant.config_entries import ( + SOURCE_MQTT, + ConfigEntry, + signal_discovered_config_entry_removed, +) from homeassistant.const import CONF_DEVICE, CONF_PLATFORM from homeassistant.core import HassJobType, HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import discovery_flow import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.service_info.mqtt import MqttServiceInfo +from homeassistant.helpers.service_info.mqtt import MqttServiceInfo, ReceivePayloadType from homeassistant.helpers.typing import DiscoveryInfoType from homeassistant.loader import async_get_mqtt from homeassistant.util.json import json_loads_object from homeassistant.util.signal_type import SignalTypeFormat -from .. import mqtt from .abbreviations import ABBREVIATIONS, DEVICE_ABBREVIATIONS, ORIGIN_ABBREVIATIONS +from .client import async_subscribe_internal from .const import ( ATTR_DISCOVERY_HASH, ATTR_DISCOVERY_PAYLOAD, ATTR_DISCOVERY_TOPIC, CONF_AVAILABILITY, + CONF_COMPONENTS, CONF_ORIGIN, CONF_TOPIC, DOMAIN, SUPPORTED_COMPONENTS, ) -from .models import DATA_MQTT, MqttOriginInfo, ReceiveMessage -from .schemas import MQTT_ORIGIN_INFO_SCHEMA +from .models import DATA_MQTT, MqttComponentConfig, MqttOriginInfo, ReceiveMessage +from .schemas import DEVICE_DISCOVERY_SCHEMA, MQTT_ORIGIN_INFO_SCHEMA, SHARED_OPTIONS from .util import async_forward_entry_setup_and_setup_discovery ABBREVIATIONS_SET = set(ABBREVIATIONS) @@ -65,13 +73,47 @@ MQTT_DISCOVERY_DONE: SignalTypeFormat[Any] = SignalTypeFormat( TOPIC_BASE = "~" +CONF_MIGRATE_DISCOVERY = "migrate_discovery" + +MIGRATE_DISCOVERY_SCHEMA = vol.Schema( + {vol.Optional(CONF_MIGRATE_DISCOVERY): True}, +) + class MQTTDiscoveryPayload(dict[str, Any]): """Class to hold and MQTT discovery payload and discovery data.""" + device_discovery: bool = False + migrate_discovery: bool = False discovery_data: DiscoveryInfoType +@dataclass(frozen=True) +class MQTTIntegrationDiscoveryConfig: + """Class to hold an integration discovery playload.""" + + integration: str + msg: ReceiveMessage + + +@callback +def _async_process_discovery_migration(payload: MQTTDiscoveryPayload) -> bool: + """Process a discovery migration request in the discovery payload.""" + # Allow abbreviation + if migr_discvry := (payload.pop("migr_discvry", None)): + payload[CONF_MIGRATE_DISCOVERY] = migr_discvry + if CONF_MIGRATE_DISCOVERY in payload: + try: + MIGRATE_DISCOVERY_SCHEMA(payload) + except vol.Invalid as exc: + _LOGGER.warning(exc) + return False + payload.migrate_discovery = True + payload.clear() + return True + return False + + def clear_discovery_hash(hass: HomeAssistant, discovery_hash: tuple[str, str]) -> None: """Clear entry from already discovered list.""" hass.data[DATA_MQTT].discovery_already_discovered.discard(discovery_hash) @@ -83,36 +125,51 @@ def set_discovery_hash(hass: HomeAssistant, discovery_hash: tuple[str, str]) -> @callback -def async_log_discovery_origin_info( - message: str, discovery_payload: MQTTDiscoveryPayload, level: int = logging.INFO -) -> None: - """Log information about the discovery and origin.""" - if not _LOGGER.isEnabledFor(level): - # bail early if logging is disabled - return +def get_origin_log_string( + discovery_payload: MQTTDiscoveryPayload, *, include_url: bool +) -> str: + """Get the origin information from a discovery payload for logging.""" if CONF_ORIGIN not in discovery_payload: - _LOGGER.log(level, message) - return + return "" origin_info: MqttOriginInfo = discovery_payload[CONF_ORIGIN] sw_version_log = "" if sw_version := origin_info.get("sw_version"): sw_version_log = f", version: {sw_version}" support_url_log = "" - if support_url := origin_info.get("support_url"): + if include_url and (support_url := get_origin_support_url(discovery_payload)): support_url_log = f", support URL: {support_url}" + return f" from external application {origin_info["name"]}{sw_version_log}{support_url_log}" + + +@callback +def get_origin_support_url(discovery_payload: MQTTDiscoveryPayload) -> str | None: + """Get the origin information support URL from a discovery payload.""" + if CONF_ORIGIN not in discovery_payload: + return "" + origin_info: MqttOriginInfo = discovery_payload[CONF_ORIGIN] + return origin_info.get("support_url") + + +@callback +def async_log_discovery_origin_info( + message: str, discovery_payload: MQTTDiscoveryPayload, level: int = logging.INFO +) -> None: + """Log information about the discovery and origin.""" + # We only log origin info once per device discovery + if not _LOGGER.isEnabledFor(level): + # bail out early if logging is disabled + return _LOGGER.log( level, - "%s from external application %s%s%s", + "%s%s", message, - origin_info["name"], - sw_version_log, - support_url_log, + get_origin_log_string(discovery_payload, include_url=True), ) @callback def _replace_abbreviations( - payload: Any | dict[str, Any], + payload: dict[str, Any] | str, abbreviations: dict[str, str], abbreviations_set: set[str], ) -> None: @@ -124,11 +181,20 @@ def _replace_abbreviations( @callback -def _replace_all_abbreviations(discovery_payload: Any | dict[str, Any]) -> None: +def _replace_all_abbreviations( + discovery_payload: dict[str, Any], component_only: bool = False +) -> None: """Replace all abbreviations in an MQTT discovery payload.""" _replace_abbreviations(discovery_payload, ABBREVIATIONS, ABBREVIATIONS_SET) + if CONF_AVAILABILITY in discovery_payload: + for availability_conf in cv.ensure_list(discovery_payload[CONF_AVAILABILITY]): + _replace_abbreviations(availability_conf, ABBREVIATIONS, ABBREVIATIONS_SET) + + if component_only: + return + if CONF_ORIGIN in discovery_payload: _replace_abbreviations( discovery_payload[CONF_ORIGIN], @@ -143,13 +209,15 @@ def _replace_all_abbreviations(discovery_payload: Any | dict[str, Any]) -> None: DEVICE_ABBREVIATIONS_SET, ) - if CONF_AVAILABILITY in discovery_payload: - for availability_conf in cv.ensure_list(discovery_payload[CONF_AVAILABILITY]): - _replace_abbreviations(availability_conf, ABBREVIATIONS, ABBREVIATIONS_SET) + if CONF_COMPONENTS in discovery_payload: + if not isinstance(discovery_payload[CONF_COMPONENTS], dict): + return + for comp_conf in discovery_payload[CONF_COMPONENTS].values(): + _replace_all_abbreviations(comp_conf, component_only=True) @callback -def _replace_topic_base(discovery_payload: dict[str, Any]) -> None: +def _replace_topic_base(discovery_payload: MQTTDiscoveryPayload) -> None: """Replace topic base in MQTT discovery data.""" base = discovery_payload.pop(TOPIC_BASE) for key, value in discovery_payload.items(): @@ -169,6 +237,79 @@ def _replace_topic_base(discovery_payload: dict[str, Any]) -> None: availability_conf[CONF_TOPIC] = f"{topic[:-1]}{base}" +@callback +def _generate_device_config( + hass: HomeAssistant, + object_id: str, + node_id: str | None, + migrate_discovery: bool = False, +) -> MQTTDiscoveryPayload: + """Generate a cleanup or discovery migration message on device cleanup. + + If an empty payload, or a migrate discovery request is received for a device, + we forward an empty payload for all previously discovered components. + """ + mqtt_data = hass.data[DATA_MQTT] + device_node_id: str = f"{node_id} {object_id}" if node_id else object_id + config = MQTTDiscoveryPayload({CONF_DEVICE: {}, CONF_COMPONENTS: {}}) + config.migrate_discovery = migrate_discovery + comp_config = config[CONF_COMPONENTS] + for platform, discover_id in mqtt_data.discovery_already_discovered: + ids = discover_id.split(" ") + component_node_id = ids.pop(0) + component_object_id = " ".join(ids) + if not ids: + continue + if device_node_id == component_node_id: + comp_config[component_object_id] = {CONF_PLATFORM: platform} + + return config if comp_config else MQTTDiscoveryPayload({}) + + +@callback +def _parse_device_payload( + hass: HomeAssistant, + payload: ReceivePayloadType, + object_id: str, + node_id: str | None, +) -> MQTTDiscoveryPayload: + """Parse a device discovery payload. + + The device discovery payload is translated info the config payloads for every single + component inside the device based configuration. + An empty payload is translated in a cleanup, which forwards an empty payload to all + removed components. + """ + device_payload = MQTTDiscoveryPayload() + if payload == "": + if not (device_payload := _generate_device_config(hass, object_id, node_id)): + _LOGGER.warning( + "No device components to cleanup for %s, node_id '%s'", + object_id, + node_id, + ) + return device_payload + try: + device_payload = MQTTDiscoveryPayload(json_loads_object(payload)) + except ValueError: + _LOGGER.warning("Unable to parse JSON %s: '%s'", object_id, payload) + return device_payload + if _async_process_discovery_migration(device_payload): + return _generate_device_config(hass, object_id, node_id, migrate_discovery=True) + _replace_all_abbreviations(device_payload) + try: + DEVICE_DISCOVERY_SCHEMA(device_payload) + except vol.Invalid as exc: + _LOGGER.warning( + "Invalid MQTT device discovery payload for %s, %s: '%s'", + object_id, + exc, + payload, + ) + return MQTTDiscoveryPayload({}) + return device_payload + + @callback def _valid_origin_info(discovery_payload: MQTTDiscoveryPayload) -> bool: """Parse and validate origin info from a single component discovery payload.""" @@ -186,12 +327,37 @@ def _valid_origin_info(discovery_payload: MQTTDiscoveryPayload) -> bool: return True +@callback +def _merge_common_device_options( + component_config: MQTTDiscoveryPayload, device_config: dict[str, Any] +) -> None: + """Merge common device options with the component config options. + + Common options are: + CONF_AVAILABILITY, + CONF_AVAILABILITY_MODE, + CONF_AVAILABILITY_TEMPLATE, + CONF_AVAILABILITY_TOPIC, + CONF_COMMAND_TOPIC, + CONF_PAYLOAD_AVAILABLE, + CONF_PAYLOAD_NOT_AVAILABLE, + CONF_STATE_TOPIC, + Common options in the body of the device based config are inherited into + the component. Unless the option is explicitly specified at component level, + in that case the option at component level will override the common option. + """ + for option in SHARED_OPTIONS: + if option in device_config and option not in component_config: + component_config[option] = device_config.get(option) + + async def async_start( # noqa: C901 hass: HomeAssistant, discovery_topic: str, config_entry: ConfigEntry ) -> None: """Start MQTT Discovery.""" mqtt_data = hass.data[DATA_MQTT] platform_setup_lock: dict[str, asyncio.Lock] = {} + integration_discovery_messages: dict[str, MQTTIntegrationDiscoveryConfig] = {} @callback def _async_add_component(discovery_payload: MQTTDiscoveryPayload) -> None: @@ -229,8 +395,7 @@ async def async_start( # noqa: C901 _LOGGER.warning( ( "Received message on illegal discovery topic '%s'. The topic" - " contains " - "not allowed characters. For more information see " + " contains non allowed characters. For more information see " "https://www.home-assistant.io/integrations/mqtt/#discovery-topic" ), topic, @@ -239,51 +404,118 @@ async def async_start( # noqa: C901 component, node_id, object_id = match.groups() - if payload: + discovered_components: list[MqttComponentConfig] = [] + if component == CONF_DEVICE: + # Process device based discovery message and regenerate + # cleanup config for the all the components that are being removed. + # This is done when a component in the device config is omitted and detected + # as being removed, or when the device config update payload is empty. + # In that case this will regenerate a cleanup message for all every already + # discovered components that were linked to the initial device discovery. + device_discovery_payload = _parse_device_payload( + hass, payload, object_id, node_id + ) + if not device_discovery_payload: + return + device_config: dict[str, Any] + origin_config: dict[str, Any] | None + component_configs: dict[str, dict[str, Any]] + device_config = device_discovery_payload[CONF_DEVICE] + origin_config = device_discovery_payload.get(CONF_ORIGIN) + component_configs = device_discovery_payload[CONF_COMPONENTS] + for component_id, config in component_configs.items(): + component = config.pop(CONF_PLATFORM) + # The object_id in the device discovery topic is the unique identifier. + # It is used as node_id for the components it contains. + component_node_id = object_id + # The component_id in the discovery playload is used as object_id + # If we have an additional node_id in the discovery topic, + # we extend the component_id with it. + component_object_id = ( + f"{node_id} {component_id}" if node_id else component_id + ) + # We add wrapper to the discovery payload with the discovery data. + # If the dict is empty after removing the platform, the payload is + # assumed to remove the existing config and we do not want to add + # device or orig or shared availability attributes. + if discovery_payload := MQTTDiscoveryPayload(config): + discovery_payload[CONF_DEVICE] = device_config + discovery_payload[CONF_ORIGIN] = origin_config + # Only assign shared config options + # when they are not set at entity level + _merge_common_device_options( + discovery_payload, device_discovery_payload + ) + discovery_payload.device_discovery = True + discovery_payload.migrate_discovery = ( + device_discovery_payload.migrate_discovery + ) + discovered_components.append( + MqttComponentConfig( + component, + component_object_id, + component_node_id, + discovery_payload, + ) + ) + _LOGGER.debug( + "Process device discovery payload %s", device_discovery_payload + ) + device_discovery_id = f"{node_id} {object_id}" if node_id else object_id + message = f"Processing device discovery for '{device_discovery_id}'" + async_log_discovery_origin_info( + message, MQTTDiscoveryPayload(device_discovery_payload) + ) + + else: + # Process component based discovery message try: - discovery_payload = MQTTDiscoveryPayload(json_loads_object(payload)) + discovery_payload = MQTTDiscoveryPayload( + json_loads_object(payload) if payload else {} + ) except ValueError: _LOGGER.warning("Unable to parse JSON %s: '%s'", object_id, payload) return - _replace_all_abbreviations(discovery_payload) - if not _valid_origin_info(discovery_payload): - return + if not _async_process_discovery_migration(discovery_payload): + _replace_all_abbreviations(discovery_payload) + if not _valid_origin_info(discovery_payload): + return + discovered_components.append( + MqttComponentConfig(component, object_id, node_id, discovery_payload) + ) + + discovery_pending_discovered = mqtt_data.discovery_pending_discovered + for component_config in discovered_components: + component = component_config.component + node_id = component_config.node_id + object_id = component_config.object_id + discovery_payload = component_config.discovery_payload + if TOPIC_BASE in discovery_payload: _replace_topic_base(discovery_payload) - else: - discovery_payload = MQTTDiscoveryPayload({}) - # If present, the node_id will be included in the discovered object id - discovery_id = f"{node_id} {object_id}" if node_id else object_id - discovery_hash = (component, discovery_id) + # If present, the node_id will be included in the discovery_id. + discovery_id = f"{node_id} {object_id}" if node_id else object_id + discovery_hash = (component, discovery_id) - if discovery_payload: # Attach MQTT topic to the payload, used for debug prints - setattr( - discovery_payload, - "__configuration_source__", - f"MQTT (topic: '{topic}')", - ) - discovery_data = { + discovery_payload.discovery_data = { ATTR_DISCOVERY_HASH: discovery_hash, ATTR_DISCOVERY_PAYLOAD: discovery_payload, ATTR_DISCOVERY_TOPIC: topic, } - setattr(discovery_payload, "discovery_data", discovery_data) - discovery_payload[CONF_PLATFORM] = "mqtt" + if discovery_hash in discovery_pending_discovered: + pending = discovery_pending_discovered[discovery_hash]["pending"] + pending.appendleft(discovery_payload) + _LOGGER.debug( + "Component has already been discovered: %s %s, queuing update", + component, + discovery_id, + ) + return - if discovery_hash in mqtt_data.discovery_pending_discovered: - pending = mqtt_data.discovery_pending_discovered[discovery_hash]["pending"] - pending.appendleft(discovery_payload) - _LOGGER.debug( - "Component has already been discovered: %s %s, queuing update", - component, - discovery_id, - ) - return - - async_process_discovery_payload(component, discovery_id, discovery_payload) + async_process_discovery_payload(component, discovery_id, discovery_payload) @callback def async_process_discovery_payload( @@ -291,7 +523,7 @@ async def async_start( # noqa: C901 ) -> None: """Process the payload of a new discovery.""" - _LOGGER.debug("Process discovery payload %s", payload) + _LOGGER.debug("Process component discovery payload %s", payload) discovery_hash = (component, discovery_id) already_discovered = discovery_hash in mqtt_data.discovery_already_discovered @@ -341,13 +573,15 @@ async def async_start( # noqa: C901 ) mqtt_data.discovery_unsubscribe = [ - mqtt.async_subscribe_internal( + async_subscribe_internal( hass, topic, async_discovery_message_received, 0, job_type=HassJobType.Callback, ) + # Subscribe first for platform discovery wildcard topics first, + # and then subscribe device discovery wildcard topics. for topic in chain( ( f"{discovery_topic}/{component}/+/config" @@ -357,6 +591,10 @@ async def async_start( # noqa: C901 f"{discovery_topic}/{component}/+/+/config" for component in SUPPORTED_COMPONENTS ), + ( + f"{discovery_topic}/device/+/config", + f"{discovery_topic}/device/+/+/config", + ), ) ] @@ -364,21 +602,53 @@ async def async_start( # noqa: C901 mqtt_integrations = await async_get_mqtt(hass) integration_unsubscribe = mqtt_data.integration_unsubscribe + async def _async_handle_config_entry_removed(entry: ConfigEntry) -> None: + """Handle integration config entry changes.""" + for discovery_key in entry.discovery_keys[DOMAIN]: + if ( + discovery_key.version != 1 + or not isinstance(discovery_key.key, str) + or discovery_key.key not in integration_discovery_messages + ): + continue + topic = discovery_key.key + discovery_message = integration_discovery_messages[topic] + del integration_discovery_messages[topic] + _LOGGER.debug("Rediscover service on topic %s", topic) + # Initiate re-discovery + await async_integration_message_received( + discovery_message.integration, discovery_message.msg + ) + + mqtt_data.discovery_unsubscribe.append( + async_dispatcher_connect( + hass, + signal_discovered_config_entry_removed(DOMAIN), + _async_handle_config_entry_removed, + ) + ) + async def async_integration_message_received( integration: str, msg: ReceiveMessage ) -> None: """Process the received message.""" + if ( + msg.topic in integration_discovery_messages + and integration_discovery_messages[msg.topic].msg.payload == msg.payload + ): + _LOGGER.debug( + "Ignoring already processed discovery message for '%s' on topic %s: %s", + integration, + msg.topic, + msg.payload, + ) + return if TYPE_CHECKING: assert mqtt_data.data_config_flow_lock - key = f"{integration}_{msg.subscribed_topic}" # Lock to prevent initiating many parallel config flows. # Note: The lock is not intended to prevent a race, only for performance async with mqtt_data.data_config_flow_lock: - # Already unsubscribed - if key not in integration_unsubscribe: - return - data = MqttServiceInfo( topic=msg.topic, payload=msg.payload, @@ -387,20 +657,28 @@ async def async_start( # noqa: C901 subscribed_topic=msg.subscribed_topic, timestamp=msg.timestamp, ) - result = await hass.config_entries.flow.async_init( - integration, context={"source": DOMAIN}, data=data + discovery_key = discovery_flow.DiscoveryKey( + domain=DOMAIN, key=msg.topic, version=1 ) - if ( - result - and result["type"] == FlowResultType.ABORT - and result["reason"] - in ("already_configured", "single_instance_allowed") - ): - integration_unsubscribe.pop(key)() + discovery_flow.async_create_flow( + hass, + integration, + {"source": SOURCE_MQTT}, + data, + discovery_key=discovery_key, + ) + if msg.payload: + # Update the last discovered config message + integration_discovery_messages[msg.topic] = ( + MQTTIntegrationDiscoveryConfig(integration=integration, msg=msg) + ) + elif msg.topic in integration_discovery_messages: + # Cleanup cache if discovery payload is empty + del integration_discovery_messages[msg.topic] integration_unsubscribe.update( { - f"{integration}_{topic}": mqtt.async_subscribe_internal( + f"{integration}_{topic}": async_subscribe_internal( hass, topic, functools.partial(async_integration_message_received, integration), diff --git a/homeassistant/components/mqtt/mixins.py b/homeassistant/components/mqtt/entity.py similarity index 83% rename from homeassistant/components/mqtt/mixins.py rename to homeassistant/components/mqtt/entity.py index ce811e13a24..fb047cc8d5e 100644 --- a/homeassistant/components/mqtt/mixins.py +++ b/homeassistant/components/mqtt/entity.py @@ -1,4 +1,4 @@ -"""MQTT component mixins and helpers.""" +"""MQTT (entity) component mixins and helpers.""" from __future__ import annotations @@ -76,6 +76,7 @@ from .const import ( CONF_CONNECTIONS, CONF_ENABLED_BY_DEFAULT, CONF_ENCODING, + CONF_ENTITY_PICTURE, CONF_HW_VERSION, CONF_IDENTIFIERS, CONF_JSON_ATTRS_TEMPLATE, @@ -103,6 +104,8 @@ from .discovery import ( MQTT_DISCOVERY_UPDATED, MQTTDiscoveryPayload, clear_discovery_hash, + get_origin_log_string, + get_origin_support_url, set_discovery_hash, ) from .models import ( @@ -134,7 +137,7 @@ MQTT_ATTRIBUTES_BLOCKED = { "extra_state_attributes", "force_update", "icon", - "name", + "friendly_name", "should_poll", "state", "supported_features", @@ -143,20 +146,6 @@ MQTT_ATTRIBUTES_BLOCKED = { } -class SetupEntity(Protocol): - """Protocol type for async_setup_entities.""" - - async def __call__( - self, - hass: HomeAssistant, - async_add_entities: AddEntitiesCallback, - config: ConfigType, - config_entry: ConfigEntry, - discovery_data: DiscoveryInfoType | None = None, - ) -> None: - """Define setup_entities type.""" - - @callback def async_handle_schema_error( discovery_payload: MQTTDiscoveryPayload, err: vol.Invalid @@ -604,6 +593,7 @@ async def cleanup_device_registry( entity_registry = er.async_get(hass) if ( device_id + and device_id not in device_registry.deleted_devices and config_entry_id and not er.async_entries_for_device( entity_registry, device_id, include_disabled_entities=False @@ -685,6 +675,7 @@ class MqttDiscoveryDeviceUpdateMixin(ABC): self._config_entry = config_entry self._config_entry_id = config_entry.entry_id self._skip_device_removal: bool = False + self._migrate_discovery: str | None = None discovery_hash = get_discovery_hash(discovery_data) self._remove_discovery_updated = async_dispatcher_connect( @@ -717,12 +708,95 @@ class MqttDiscoveryDeviceUpdateMixin(ABC): ) -> None: """Handle discovery update.""" discovery_hash = get_discovery_hash(self._discovery_data) + # Start discovery migration or rollback if migrate_discovery flag is set + # and the discovery topic is valid and not yet migrating + if ( + discovery_payload.migrate_discovery + and self._migrate_discovery is None + and self._discovery_data[ATTR_DISCOVERY_TOPIC] + == discovery_payload.discovery_data[ATTR_DISCOVERY_TOPIC] + ): + self._migrate_discovery = self._discovery_data[ATTR_DISCOVERY_TOPIC] + discovery_hash = self._discovery_data[ATTR_DISCOVERY_HASH] + origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + action = "Rollback" if discovery_payload.device_discovery else "Migration" + schema_type = "platform" if discovery_payload.device_discovery else "device" + _LOGGER.info( + "%s to MQTT %s discovery schema started for %s '%s'" + "%s on topic %s. To complete %s, publish a %s discovery " + "message with %s '%s'. After completed %s, " + "publish an empty (retained) payload to %s", + action, + schema_type, + discovery_hash[0], + discovery_hash[1], + origin_info, + self._migrate_discovery, + action.lower(), + schema_type, + discovery_hash[0], + discovery_hash[1], + action.lower(), + self._migrate_discovery, + ) + + # Cleanup platform resources + await self.async_tear_down() + # Unregister and clean discovery + stop_discovery_updates( + self.hass, self._discovery_data, self._remove_discovery_updated + ) + send_discovery_done(self.hass, self._discovery_data) + return + _LOGGER.debug( "Got update for %s with hash: %s '%s'", self.log_name, discovery_hash, discovery_payload, ) + new_discovery_topic = discovery_payload.discovery_data[ATTR_DISCOVERY_TOPIC] + + # Abort early if an update is not received via the registered discovery topic. + # This can happen if a device and single component discovery payload + # share the same discovery ID. + if self._discovery_data[ATTR_DISCOVERY_TOPIC] != new_discovery_topic: + # Prevent illegal updates + old_origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + new_origin_info = get_origin_log_string( + discovery_payload.discovery_data[ATTR_DISCOVERY_PAYLOAD], + include_url=False, + ) + new_origin_support_url = get_origin_support_url( + discovery_payload.discovery_data[ATTR_DISCOVERY_PAYLOAD] + ) + if new_origin_support_url: + get_support = f"for support visit {new_origin_support_url}" + else: + get_support = ( + "for documentation on migration to device schema or rollback to " + "discovery schema, visit https://www.home-assistant.io/integrations/" + "mqtt/#migration-from-single-component-to-device-based-discovery" + ) + _LOGGER.warning( + "Received a conflicting MQTT discovery message for %s '%s' which was " + "previously discovered on topic %s%s; the conflicting discovery " + "message was received on topic %s%s; %s", + discovery_hash[0], + discovery_hash[1], + self._discovery_data[ATTR_DISCOVERY_TOPIC], + old_origin_info, + new_discovery_topic, + new_origin_info, + get_support, + ) + send_discovery_done(self.hass, self._discovery_data) + return + if ( discovery_payload and discovery_payload != self._discovery_data[ATTR_DISCOVERY_PAYLOAD] @@ -819,6 +893,7 @@ class MqttDiscoveryUpdateMixin(Entity): mqtt_data = hass.data[DATA_MQTT] self._registry_hooks = mqtt_data.discovery_registry_hooks discovery_hash: tuple[str, str] = discovery_data[ATTR_DISCOVERY_HASH] + self._migrate_discovery: str | None = None if discovery_hash in self._registry_hooks: self._registry_hooks.pop(discovery_hash)() @@ -876,7 +951,12 @@ class MqttDiscoveryUpdateMixin(Entity): if TYPE_CHECKING: assert self._discovery_data self._cleanup_discovery_on_remove() - await self._async_remove_state_and_registry_entry() + if self._migrate_discovery is None: + # Unload and cleanup registry + await self._async_remove_state_and_registry_entry() + else: + # Only unload the entity + await self.async_remove(force_remove=True) send_discovery_done(self.hass, self._discovery_data) @callback @@ -891,18 +971,102 @@ class MqttDiscoveryUpdateMixin(Entity): """ if TYPE_CHECKING: assert self._discovery_data - discovery_hash: tuple[str, str] = self._discovery_data[ATTR_DISCOVERY_HASH] + discovery_hash = get_discovery_hash(self._discovery_data) + # Start discovery migration or rollback if migrate_discovery flag is set + # and the discovery topic is valid and not yet migrating + if ( + payload.migrate_discovery + and self._migrate_discovery is None + and self._discovery_data[ATTR_DISCOVERY_TOPIC] + == payload.discovery_data[ATTR_DISCOVERY_TOPIC] + ): + if self.unique_id is None or self.device_info is None: + _LOGGER.error( + "Discovery migration is not possible for " + "for entity %s on topic %s. A unique_id " + "and device context is required, got unique_id: %s, device: %s", + self.entity_id, + self._discovery_data[ATTR_DISCOVERY_TOPIC], + self.unique_id, + self.device_info, + ) + send_discovery_done(self.hass, self._discovery_data) + return + + self._migrate_discovery = self._discovery_data[ATTR_DISCOVERY_TOPIC] + discovery_hash = self._discovery_data[ATTR_DISCOVERY_HASH] + origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + action = "Rollback" if payload.device_discovery else "Migration" + schema_type = "platform" if payload.device_discovery else "device" + _LOGGER.info( + "%s to MQTT %s discovery schema started for entity %s" + "%s on topic %s. To complete %s, publish a %s discovery " + "message with %s entity '%s'. After completed %s, " + "publish an empty (retained) payload to %s", + action, + schema_type, + self.entity_id, + origin_info, + self._migrate_discovery, + action.lower(), + schema_type, + discovery_hash[0], + discovery_hash[1], + action.lower(), + self._migrate_discovery, + ) + old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD] _LOGGER.debug( "Got update for entity with hash: %s '%s'", discovery_hash, payload, ) - old_payload: DiscoveryInfoType - old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD] + new_discovery_topic = payload.discovery_data[ATTR_DISCOVERY_TOPIC] + # Abort early if an update is not received via the registered discovery topic. + # This can happen if a device and single component discovery payload + # share the same discovery ID. + if self._discovery_data[ATTR_DISCOVERY_TOPIC] != new_discovery_topic: + # Prevent illegal updates + old_origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + new_origin_info = get_origin_log_string( + payload.discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + new_origin_support_url = get_origin_support_url( + payload.discovery_data[ATTR_DISCOVERY_PAYLOAD] + ) + if new_origin_support_url: + get_support = f"for support visit {new_origin_support_url}" + else: + get_support = ( + "for documentation on migration to device schema or rollback to " + "discovery schema, visit https://www.home-assistant.io/integrations/" + "mqtt/#migration-from-single-component-to-device-based-discovery" + ) + _LOGGER.warning( + "Received a conflicting MQTT discovery message for entity %s; the " + "entity was previously discovered on topic %s%s; the conflicting " + "discovery message was received on topic %s%s; %s", + self.entity_id, + self._discovery_data[ATTR_DISCOVERY_TOPIC], + old_origin_info, + new_discovery_topic, + new_origin_info, + get_support, + ) + send_discovery_done(self.hass, self._discovery_data) + return + debug_info.update_entity_discovery_data(self.hass, payload, self.entity_id) if not payload: # Empty payload: Remove component - _LOGGER.info("Removing component: %s", self.entity_id) + if self._migrate_discovery is None: + _LOGGER.info("Removing component: %s", self.entity_id) + else: + _LOGGER.info("Unloading component: %s", self.entity_id) self.hass.async_create_task( self._async_process_discovery_update_and_remove() ) @@ -1021,6 +1185,33 @@ def device_info_from_specifications( return info +@callback +def ensure_via_device_exists( + hass: HomeAssistant, device_info: DeviceInfo | None, config_entry: ConfigEntry +) -> None: + """Ensure the via device is in the device registry.""" + if ( + device_info is None + or CONF_VIA_DEVICE not in device_info + or (device_registry := dr.async_get(hass)).async_get_device( + identifiers={device_info["via_device"]} + ) + ): + return + + # Ensure the via device exists in the device registry + _LOGGER.debug( + "Device identifier %s via_device reference from device_info %s " + "not found in the Device Registry, creating new entry", + device_info["via_device"], + device_info, + ) + device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={device_info["via_device"]}, + ) + + class MqttEntityDeviceInfo(Entity): """Mixin used for mqtt platforms that support the device registry.""" @@ -1039,6 +1230,7 @@ class MqttEntityDeviceInfo(Entity): device_info = self.device_info if device_info is not None: + ensure_via_device_exists(self.hass, device_info, self._config_entry) device_registry.async_get_or_create( config_entry_id=config_entry_id, **device_info ) @@ -1092,6 +1284,7 @@ class MqttEntity( self, hass, discovery_data, self.discovery_update ) MqttEntityDeviceInfo.__init__(self, config.get(CONF_DEVICE), config_entry) + ensure_via_device_exists(self.hass, self.device_info, self._config_entry) def _init_entity_id(self) -> None: """Set entity_id from object_id if defined in config.""" @@ -1225,6 +1418,7 @@ class MqttEntity( config.get(CONF_ENABLED_BY_DEFAULT) ) self._attr_icon = config.get(CONF_ICON) + self._attr_entity_picture = config.get(CONF_ENTITY_PICTURE) # Set the entity name if needed self._set_entity_name(config) @@ -1325,6 +1519,8 @@ def update_device( config_entry_id = config_entry.entry_id device_info = device_info_from_specifications(config[CONF_DEVICE]) + ensure_via_device_exists(hass, device_info, config_entry) + if config_entry_id is not None and device_info is not None: update_device_info = cast(dict[str, Any], device_info) update_device_info["config_entry_id"] = config_entry_id diff --git a/homeassistant/components/mqtt/event.py b/homeassistant/components/mqtt/event.py index 0dc267f80f9..d9812aaaf48 100644 --- a/homeassistant/components/mqtt/event.py +++ b/homeassistant/components/mqtt/event.py @@ -26,7 +26,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads_object from . import subscription from .config import MQTT_RO_SCHEMA from .const import CONF_STATE_TOPIC, PAYLOAD_EMPTY_JSON, PAYLOAD_NONE -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( DATA_MQTT, MqttValueTemplate, @@ -38,6 +38,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_EVENT_TYPES = "event_types" MQTT_EVENT_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt/fan.py b/homeassistant/components/mqtt/fan.py index a22dba4ae93..4d2e764a0d5 100644 --- a/homeassistant/components/mqtt/fan.py +++ b/homeassistant/components/mqtt/fan.py @@ -47,7 +47,7 @@ from .const import ( CONF_STATE_VALUE_TEMPLATE, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -57,6 +57,8 @@ from .models import ( from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic +PARALLEL_UPDATES = 0 + CONF_DIRECTION_STATE_TOPIC = "direction_state_topic" CONF_DIRECTION_COMMAND_TOPIC = "direction_command_topic" CONF_DIRECTION_VALUE_TEMPLATE = "direction_value_template" @@ -224,7 +226,6 @@ class MqttFan(MqttEntity, FanEntity): _optimistic_preset_mode: bool _payload: dict[str, Any] _speed_range: tuple[int, int] - _enable_turn_on_off_backwards_compatibility = False @staticmethod def config_schema() -> VolSchemaType: diff --git a/homeassistant/components/mqtt/humidifier.py b/homeassistant/components/mqtt/humidifier.py index d55c1d3cebf..5d1af03ad24 100644 --- a/homeassistant/components/mqtt/humidifier.py +++ b/homeassistant/components/mqtt/humidifier.py @@ -49,7 +49,7 @@ from .const import ( CONF_STATE_VALUE_TEMPLATE, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -59,6 +59,8 @@ from .models import ( from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic +PARALLEL_UPDATES = 0 + CONF_AVAILABLE_MODES_LIST = "modes" CONF_DEVICE_CLASS = "device_class" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" diff --git a/homeassistant/components/mqtt/icons.json b/homeassistant/components/mqtt/icons.json index 1979359c5a1..73cbf22b629 100644 --- a/homeassistant/components/mqtt/icons.json +++ b/homeassistant/components/mqtt/icons.json @@ -1,7 +1,13 @@ { "services": { - "publish": "mdi:publish", - "dump": "mdi:database-export", - "reload": "mdi:reload" + "publish": { + "service": "mdi:publish" + }, + "dump": { + "service": "mdi:database-export" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/mqtt/image.py b/homeassistant/components/mqtt/image.py index 30fd102764d..4b7b2d783d2 100644 --- a/homeassistant/components/mqtt/image.py +++ b/homeassistant/components/mqtt/image.py @@ -25,7 +25,7 @@ from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_BASE_SCHEMA -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( DATA_MQTT, MqttValueTemplate, @@ -37,6 +37,8 @@ from .util import valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_CONTENT_TYPE = "content_type" CONF_IMAGE_ENCODING = "image_encoding" CONF_IMAGE_TOPIC = "image_topic" diff --git a/homeassistant/components/mqtt/lawn_mower.py b/homeassistant/components/mqtt/lawn_mower.py index f4aa248929e..87577c4b4d9 100644 --- a/homeassistant/components/mqtt/lawn_mower.py +++ b/homeassistant/components/mqtt/lawn_mower.py @@ -26,7 +26,7 @@ from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_BASE_SCHEMA from .const import CONF_RETAIN, DEFAULT_OPTIMISTIC, DEFAULT_RETAIN -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -38,6 +38,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_ACTIVITY_STATE_TOPIC = "activity_state_topic" CONF_ACTIVITY_VALUE_TEMPLATE = "activity_value_template" CONF_DOCK_COMMAND_TOPIC = "dock_command_topic" diff --git a/homeassistant/components/mqtt/light/__init__.py b/homeassistant/components/mqtt/light/__init__.py index 04619b08e11..328f80cb5ea 100644 --- a/homeassistant/components/mqtt/light/__init__.py +++ b/homeassistant/components/mqtt/light/__init__.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, VolSchemaType -from ..mixins import async_setup_entity_entry_helper +from ..entity import async_setup_entity_entry_helper from .schema import CONF_SCHEMA, MQTT_LIGHT_SCHEMA_SCHEMA from .schema_basic import ( DISCOVERY_SCHEMA_BASIC, @@ -30,6 +30,8 @@ from .schema_template import ( MqttLightTemplate, ) +PARALLEL_UPDATES = 0 + def validate_mqtt_light_discovery(config_value: dict[str, Any]) -> ConfigType: """Validate MQTT light schema for discovery.""" diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 1a64b1eecb4..159a23d14d9 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -9,20 +9,25 @@ from typing import Any, cast import voluptuous as vol from homeassistant.components.light import ( + _DEPRECATED_ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, ColorMode, LightEntity, @@ -51,7 +56,7 @@ from ..const import ( CONF_STATE_VALUE_TEMPLATE, PAYLOAD_NONE, ) -from ..mixins import MqttEntity +from ..entity import MqttEntity from ..models import ( MqttCommandTemplate, MqttValueTemplate, @@ -112,12 +117,15 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( { ATTR_COLOR_MODE, ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + _DEPRECATED_ATTR_MAX_MIREDS.value, + ATTR_MIN_COLOR_TEMP_KELVIN, + _DEPRECATED_ATTR_MIN_MIREDS.value, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -240,7 +248,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): _optimistic: bool _optimistic_brightness: bool _optimistic_color_mode: bool - _optimistic_color_temp: bool + _optimistic_color_temp_kelvin: bool _optimistic_effect: bool _optimistic_hs_color: bool _optimistic_rgb_color: bool @@ -255,8 +263,16 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else DEFAULT_MIN_KELVIN + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else DEFAULT_MAX_KELVIN + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) topic: dict[str, str | None] = { @@ -321,7 +337,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): and topic[CONF_RGB_STATE_TOPIC] is None ) ) - self._optimistic_color_temp = ( + self._optimistic_color_temp_kelvin = ( optimistic or topic[CONF_COLOR_TEMP_STATE_TOPIC] is None ) self._optimistic_effect = optimistic or topic[CONF_EFFECT_STATE_TOPIC] is None @@ -472,10 +488,8 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): def _converter( r: int, g: int, b: int, cw: int, ww: int ) -> tuple[int, int, int]: - min_kelvin = color_util.color_temperature_mired_to_kelvin(self.max_mireds) - max_kelvin = color_util.color_temperature_mired_to_kelvin(self.min_mireds) return color_util.color_rgbww_to_rgb( - r, g, b, cw, ww, min_kelvin, max_kelvin + r, g, b, cw, ww, self.min_color_temp_kelvin, self.max_color_temp_kelvin ) rgbww = self._rgbx_received( @@ -512,7 +526,9 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): if self._optimistic_color_mode: self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = int(payload) + self._attr_color_temp_kelvin = color_util.color_temperature_mired_to_kelvin( + int(payload) + ) @callback def _effect_received(self, msg: ReceiveMessage) -> None: @@ -586,7 +602,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): self.add_subscription( CONF_COLOR_TEMP_STATE_TOPIC, self._color_temp_received, - {"_attr_color_mode", "_attr_color_temp"}, + {"_attr_color_mode", "_attr_color_temp_kelvin"}, ) self.add_subscription( CONF_EFFECT_STATE_TOPIC, self._effect_received, {"_attr_effect"} @@ -625,7 +641,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): restore_state(ATTR_RGBW_COLOR) restore_state(ATTR_RGBWW_COLOR) restore_state(ATTR_COLOR_MODE) - restore_state(ATTR_COLOR_TEMP) + restore_state(ATTR_COLOR_TEMP_KELVIN) restore_state(ATTR_EFFECT) restore_state(ATTR_HS_COLOR) restore_state(ATTR_XY_COLOR) @@ -797,14 +813,21 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): await publish(CONF_RGBWW_COMMAND_TOPIC, rgbww_s) should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS]) if ( - ATTR_COLOR_TEMP in kwargs + ATTR_COLOR_TEMP_KELVIN in kwargs and self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None ): ct_command_tpl = self._command_templates[CONF_COLOR_TEMP_COMMAND_TEMPLATE] - color_temp = ct_command_tpl(int(kwargs[ATTR_COLOR_TEMP]), None) + color_temp = ct_command_tpl( + color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ), + None, + ) await publish(CONF_COLOR_TEMP_COMMAND_TOPIC, color_temp) should_update |= set_optimistic( - ATTR_COLOR_TEMP, kwargs[ATTR_COLOR_TEMP], ColorMode.COLOR_TEMP + ATTR_COLOR_TEMP_KELVIN, + kwargs[ATTR_COLOR_TEMP_KELVIN], + ColorMode.COLOR_TEMP, ) if ( diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 58fde4a3800..f6efdd3281d 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -12,7 +12,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -22,6 +22,8 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN as LIGHT_DOMAIN, ENTITY_ID_FORMAT, FLASH_LONG, @@ -65,7 +67,7 @@ from ..const import ( CONF_STATE_TOPIC, DOMAIN as MQTT_DOMAIN, ) -from ..mixins import MqttEntity +from ..entity import MqttEntity from ..models import ReceiveMessage from ..schemas import MQTT_ENTITY_COMMON_SCHEMA from ..util import valid_subscribe_topic @@ -273,8 +275,16 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else DEFAULT_MIN_KELVIN + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else DEFAULT_MAX_KELVIN + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) self._topic = { @@ -370,7 +380,11 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): return try: if color_mode == ColorMode.COLOR_TEMP: - self._attr_color_temp = int(values["color_temp"]) + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + values["color_temp"] + ) + ) self._attr_color_mode = ColorMode.COLOR_TEMP elif color_mode == ColorMode.HS: hue = float(values["color"]["h"]) @@ -469,12 +483,16 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): # Deprecated color handling try: if values["color_temp"] is None: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None else: - self._attr_color_temp = int(values["color_temp"]) # type: ignore[arg-type] + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + values["color_temp"] # type: ignore[arg-type] + ) + ) except KeyError: pass - except ValueError: + except (TypeError, ValueError): _LOGGER.warning( "Invalid color temp value '%s' received for entity %s", values["color_temp"], @@ -496,7 +514,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._state_received, { "_attr_brightness", - "_attr_color_temp", + "_attr_color_temp_kelvin", "_attr_effect", "_attr_hs_color", "_attr_is_on", @@ -522,8 +540,8 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_color_mode = last_attributes.get( ATTR_COLOR_MODE, self.color_mode ) - self._attr_color_temp = last_attributes.get( - ATTR_COLOR_TEMP, self.color_temp + self._attr_color_temp_kelvin = last_attributes.get( + ATTR_COLOR_TEMP_KELVIN, self.color_temp_kelvin ) self._attr_effect = last_attributes.get(ATTR_EFFECT, self.effect) self._attr_hs_color = last_attributes.get(ATTR_HS_COLOR, self.hs_color) @@ -623,7 +641,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): message["color"]["s"] = hs_color[1] if self._optimistic: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None self._attr_hs_color = kwargs[ATTR_HS_COLOR] should_update = True @@ -690,12 +708,14 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_brightness = kwargs[ATTR_BRIGHTNESS] should_update = True - if ATTR_COLOR_TEMP in kwargs: - message["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + message["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if self._optimistic: self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] self._attr_hs_color = None should_update = True diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index a1f4ea2e81a..722bd864366 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -10,11 +10,13 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, ColorMode, LightEntity, @@ -38,7 +40,7 @@ import homeassistant.util.color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import CONF_COMMAND_TOPIC, CONF_STATE_TOPIC, PAYLOAD_NONE -from ..mixins import MqttEntity +from ..entity import MqttEntity from ..models import ( MqttCommandTemplate, MqttValueTemplate, @@ -126,8 +128,16 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else DEFAULT_MIN_KELVIN + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else DEFAULT_MAX_KELVIN + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) self._topics = { @@ -213,8 +223,10 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): color_temp = self._value_templates[CONF_COLOR_TEMP_TEMPLATE]( msg.payload ) - self._attr_color_temp = ( - int(color_temp) if color_temp != "None" else None + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(int(color_temp)) + if color_temp != "None" + else None ) except ValueError: _LOGGER.warning("Invalid color temperature value received") @@ -256,7 +268,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): { "_attr_brightness", "_attr_color_mode", - "_attr_color_temp", + "_attr_color_temp_kelvin", "_attr_effect", "_attr_hs_color", "_attr_is_on", @@ -275,8 +287,10 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): if last_state.attributes.get(ATTR_HS_COLOR): self._attr_hs_color = last_state.attributes.get(ATTR_HS_COLOR) self._update_color_mode() - if last_state.attributes.get(ATTR_COLOR_TEMP): - self._attr_color_temp = last_state.attributes.get(ATTR_COLOR_TEMP) + if last_state.attributes.get(ATTR_COLOR_TEMP_KELVIN): + self._attr_color_temp_kelvin = last_state.attributes.get( + ATTR_COLOR_TEMP_KELVIN + ) if last_state.attributes.get(ATTR_EFFECT): self._attr_effect = last_state.attributes.get(ATTR_EFFECT) @@ -295,11 +309,13 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): if self._optimistic: self._attr_brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: - values["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + values["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if self._optimistic: - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] self._attr_hs_color = None self._update_color_mode() @@ -325,7 +341,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): values["sat"] = hs_color[1] if self._optimistic: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None self._attr_hs_color = kwargs[ATTR_HS_COLOR] self._update_color_mode() diff --git a/homeassistant/components/mqtt/lock.py b/homeassistant/components/mqtt/lock.py index c72dcd8dc21..2113dbbd5ba 100644 --- a/homeassistant/components/mqtt/lock.py +++ b/homeassistant/components/mqtt/lock.py @@ -34,7 +34,7 @@ from .const import ( CONF_STATE_OPENING, CONF_STATE_TOPIC, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -45,6 +45,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_CODE_FORMAT = "code_format" CONF_PAYLOAD_LOCK = "payload_lock" diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 34370c82507..081449b142a 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -1,11 +1,12 @@ { "domain": "mqtt", "name": "MQTT", + "after_dependencies": ["hassio"], "codeowners": ["@emontnemery", "@jbouwh", "@bdraco"], "config_flow": true, "dependencies": ["file_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/mqtt", "iot_class": "local_push", - "quality_scale": "platinum", - "requirements": ["paho-mqtt==1.6.1"] + "requirements": ["paho-mqtt==1.6.1"], + "single_config_entry": true } diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index f7abbc29464..34c1f304944 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -410,5 +410,15 @@ class MqttData: tags: dict[str, dict[str, MQTTTagScanner]] = field(default_factory=dict) +@dataclass(slots=True) +class MqttComponentConfig: + """(component, object_id, node_id, discovery_payload).""" + + component: str + object_id: str + node_id: str | None + discovery_payload: MQTTDiscoveryPayload + + DATA_MQTT: HassKey[MqttData] = HassKey("mqtt") DATA_MQTT_AVAILABLE: HassKey[asyncio.Future[bool]] = HassKey("mqtt_client_available") diff --git a/homeassistant/components/mqtt/notify.py b/homeassistant/components/mqtt/notify.py index 581660b6ecf..84442e75e73 100644 --- a/homeassistant/components/mqtt/notify.py +++ b/homeassistant/components/mqtt/notify.py @@ -15,11 +15,13 @@ from homeassistant.helpers.typing import ConfigType from .config import DEFAULT_RETAIN, MQTT_BASE_SCHEMA from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_RETAIN -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import MqttCommandTemplate from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT notify" PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend( diff --git a/homeassistant/components/mqtt/number.py b/homeassistant/components/mqtt/number.py index ce441a2de6e..a9bf1829b63 100644 --- a/homeassistant/components/mqtt/number.py +++ b/homeassistant/components/mqtt/number.py @@ -39,7 +39,7 @@ from .const import ( CONF_PAYLOAD_RESET, CONF_STATE_TOPIC, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -50,6 +50,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_MIN = "min" CONF_MAX = "max" CONF_STEP = "step" diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml new file mode 100644 index 00000000000..26ce8cb08dd --- /dev/null +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -0,0 +1,131 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: > + Entities are updated through dispatchers, and these are + cleaned up when the integration unloads. + entity-unique-id: + status: exempt + comment: > + This is user configurable, but not required. + It is required though when a user wants to use device based discovery. + has-entity-name: done + runtime-data: + status: exempt + comment: > + Runtime data is not used, as the mqtt entry data is only used to set up the + MQTT broker, this happens during integration setup, + and only one config entry is allowed. + test-before-configure: done + test-before-setup: + status: exempt + comment: > + We choose to early exit the entry as it can take some time for the client + to connect. Waiting for the client would increase the overall setup time. + unique-config-entry: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: + status: done + comment: | + Only supported for entities the user has assigned a unique_id. + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: + status: exempt + comment: > + This is not possible because the integrations generates entities + based on a user supplied config or discovery. + entity-device-class: + status: done + comment: An entity device class can be configured by the user for each entity. + devices: + status: done + comment: > + A device context can be configured by the user for each entity. + It is not required though, except when using device based discovery. + entity-category: + status: done + comment: An entity category can be configured by the user for each entity. + entity-disabled-by-default: + status: done + comment: > + The user can configure this through YAML or discover + entities that are disabled by default. + discovery: + status: done + comment: > + When the Mosquitto MQTT broker add on is installed, + a MQTT config flow allows an automatic setup from its discovered settings. + stale-devices: + status: exempt + comment: > + This is is only supported for entities that are configured through MQTT discovery. + Users must manually cleanup stale entities that were set up though YAML. + diagnostics: done + exception-translations: done + icon-translations: + status: exempt + comment: > + This is not possible because the integrations generates entities + based on a user supplied config or discovery. + reconfiguration-flow: + status: done + comment: > + This integration can also be reconfigured via options flow. + dynamic-devices: + status: done + comment: | + MQTT allow to dynamically create and remove devices through MQTT discovery. + discovery-update-info: + status: done + comment: > + If the Mosquitto broker add-on is used to set up MQTT from discovery, + and the broker add-on is re-installed, + MQTT will automatically update from the new brokers credentials. + repair-issues: + status: done + comment: > + This integration uses repair-issues when entities are set up through YAML. + To avoid user panic, discovery deprecation issues are logged only. + It is the responsibility of the maintainer or the service or device to + correct the discovery messages. Extra options are allowed + in MQTT messages to avoid breaking issues. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration does not use web sessions. + strict-typing: + status: todo + comment: | + Requirement 'paho-mqtt==1.6.1' appears untyped diff --git a/homeassistant/components/mqtt/scene.py b/homeassistant/components/mqtt/scene.py index 994a77d3abb..314bd716ee0 100644 --- a/homeassistant/components/mqtt/scene.py +++ b/homeassistant/components/mqtt/scene.py @@ -17,10 +17,12 @@ from homeassistant.helpers.typing import ConfigType from .config import MQTT_BASE_SCHEMA from .const import CONF_COMMAND_TOPIC, CONF_RETAIN -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Scene" DEFAULT_RETAIN = False diff --git a/homeassistant/components/mqtt/schemas.py b/homeassistant/components/mqtt/schemas.py index 67c6b447709..5e942c24738 100644 --- a/homeassistant/components/mqtt/schemas.py +++ b/homeassistant/components/mqtt/schemas.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + import voluptuous as vol from homeassistant.const import ( @@ -11,6 +13,7 @@ from homeassistant.const import ( CONF_MODEL, CONF_MODEL_ID, CONF_NAME, + CONF_PLATFORM, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, ) @@ -25,10 +28,14 @@ from .const import ( CONF_AVAILABILITY_MODE, CONF_AVAILABILITY_TEMPLATE, CONF_AVAILABILITY_TOPIC, + CONF_COMMAND_TOPIC, + CONF_COMPONENTS, CONF_CONFIGURATION_URL, CONF_CONNECTIONS, CONF_DEPRECATED_VIA_HUB, CONF_ENABLED_BY_DEFAULT, + CONF_ENCODING, + CONF_ENTITY_PICTURE, CONF_HW_VERSION, CONF_IDENTIFIERS, CONF_JSON_ATTRS_TEMPLATE, @@ -38,7 +45,9 @@ from .const import ( CONF_ORIGIN, CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, + CONF_QOS, CONF_SERIAL_NUMBER, + CONF_STATE_TOPIC, CONF_SUGGESTED_AREA, CONF_SUPPORT_URL, CONF_SW_VERSION, @@ -46,10 +55,34 @@ from .const import ( CONF_VIA_DEVICE, DEFAULT_PAYLOAD_AVAILABLE, DEFAULT_PAYLOAD_NOT_AVAILABLE, + ENTITY_PLATFORMS, + SUPPORTED_COMPONENTS, ) -from .util import valid_subscribe_topic +from .util import valid_publish_topic, valid_qos_schema, valid_subscribe_topic -MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema( +# Device discovery options that are also available at entity component level +SHARED_OPTIONS = [ + CONF_AVAILABILITY, + CONF_AVAILABILITY_MODE, + CONF_AVAILABILITY_TEMPLATE, + CONF_AVAILABILITY_TOPIC, + CONF_COMMAND_TOPIC, + CONF_PAYLOAD_AVAILABLE, + CONF_PAYLOAD_NOT_AVAILABLE, + CONF_STATE_TOPIC, +] + +MQTT_ORIGIN_INFO_SCHEMA = vol.All( + vol.Schema( + { + vol.Required(CONF_NAME): cv.string, + vol.Optional(CONF_SW_VERSION): cv.string, + vol.Optional(CONF_SUPPORT_URL): cv.configuration_url, + } + ), +) + +_MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema( { vol.Exclusive(CONF_AVAILABILITY_TOPIC, "availability"): valid_subscribe_topic, vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template, @@ -62,7 +95,7 @@ MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema( } ) -MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema( +_MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema( { vol.Optional(CONF_AVAILABILITY_MODE, default=AVAILABILITY_LATEST): vol.All( cv.string, vol.In(AVAILABILITY_MODES) @@ -86,8 +119,8 @@ MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema( } ) -MQTT_AVAILABILITY_SCHEMA = MQTT_AVAILABILITY_SINGLE_SCHEMA.extend( - MQTT_AVAILABILITY_LIST_SCHEMA.schema +_MQTT_AVAILABILITY_SCHEMA = _MQTT_AVAILABILITY_SINGLE_SCHEMA.extend( + _MQTT_AVAILABILITY_LIST_SCHEMA.schema ) @@ -137,9 +170,10 @@ MQTT_ORIGIN_INFO_SCHEMA = vol.All( ), ) -MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend( +MQTT_ENTITY_COMMON_SCHEMA = _MQTT_AVAILABILITY_SCHEMA.extend( { vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA, + vol.Optional(CONF_ENTITY_PICTURE): cv.url, vol.Optional(CONF_ORIGIN): MQTT_ORIGIN_INFO_SCHEMA, vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean, vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA, @@ -150,3 +184,35 @@ MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend( vol.Optional(CONF_UNIQUE_ID): cv.string, } ) + +_UNIQUE_ID_SCHEMA = vol.Schema( + {vol.Required(CONF_UNIQUE_ID): cv.string}, +).extend({}, extra=True) + + +def check_unique_id(config: dict[str, Any]) -> dict[str, Any]: + """Check if a unique ID is set in case an entity platform is configured.""" + platform = config[CONF_PLATFORM] + if platform in ENTITY_PLATFORMS and len(config.keys()) > 1: + _UNIQUE_ID_SCHEMA(config) + return config + + +_COMPONENT_CONFIG_SCHEMA = vol.All( + vol.Schema( + {vol.Required(CONF_PLATFORM): vol.In(SUPPORTED_COMPONENTS)}, + ).extend({}, extra=True), + check_unique_id, +) + +DEVICE_DISCOVERY_SCHEMA = _MQTT_AVAILABILITY_SCHEMA.extend( + { + vol.Required(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA, + vol.Required(CONF_COMPONENTS): vol.Schema({str: _COMPONENT_CONFIG_SCHEMA}), + vol.Required(CONF_ORIGIN): MQTT_ORIGIN_INFO_SCHEMA, + vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic, + vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, + vol.Optional(CONF_QOS): valid_qos_schema, + vol.Optional(CONF_ENCODING): cv.string, + } +) diff --git a/homeassistant/components/mqtt/select.py b/homeassistant/components/mqtt/select.py index 5f9c4a11c23..55d56ecd774 100644 --- a/homeassistant/components/mqtt/select.py +++ b/homeassistant/components/mqtt/select.py @@ -26,7 +26,7 @@ from .const import ( CONF_OPTIONS, CONF_STATE_TOPIC, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -37,6 +37,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Select" MQTT_SELECT_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index fc95807b8a5..bacbf4d323e 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -40,13 +40,15 @@ from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA from .const import CONF_OPTIONS, CONF_STATE_TOPIC, PAYLOAD_NONE -from .mixins import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper +from .entity import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import check_state_too_long _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_EXPIRE_AFTER = "expire_after" CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" @@ -100,7 +102,7 @@ def validate_sensor_state_and_device_class_config(config: ConfigType) -> ConfigT if (device_class := config.get(CONF_DEVICE_CLASS)) != SensorDeviceClass.ENUM: raise vol.Invalid( - f"The option `{CONF_OPTIONS}` can only be used " + f"The option `{CONF_OPTIONS}` must be used " f"together with device class `{SensorDeviceClass.ENUM}`, " f"got `{CONF_DEVICE_CLASS}` '{device_class}'" ) @@ -260,14 +262,18 @@ class MqttSensor(MqttEntity, RestoreSensor): msg.topic, ) return + + if payload == PAYLOAD_NONE: + self._attr_native_value = None + return + if self._numeric_state_expected: if payload == "": _LOGGER.debug("Ignore empty state from '%s'", msg.topic) - elif payload == PAYLOAD_NONE: - self._attr_native_value = None else: self._attr_native_value = payload return + if self.options and payload not in self.options: _LOGGER.warning( "Ignoring invalid option received on topic '%s', got '%s', allowed: %s", diff --git a/homeassistant/components/mqtt/siren.py b/homeassistant/components/mqtt/siren.py index e7cf9e270bd..22f64053d23 100644 --- a/homeassistant/components/mqtt/siren.py +++ b/homeassistant/components/mqtt/siren.py @@ -46,7 +46,7 @@ from .const import ( PAYLOAD_EMPTY_JSON, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -55,6 +55,8 @@ from .models import ( ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Siren" DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OFF = "OFF" diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 75855f6d9f3..3b337c05d2a 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -56,15 +56,16 @@ "port": "The port your MQTT broker listens to. For example 1883.", "username": "The username to login to your MQTT broker.", "password": "The password to login to your MQTT broker.", - "advanced_options": "Enable and click `next` to set advanced options.", + "advanced_options": "Enable and select **Next** to set advanced options.", "certificate": "The custom CA certificate file to validate your MQTT brokers certificate.", "client_id": "The unique ID to identify the Home Assistant MQTT API as MQTT client. It is recommended to leave this option blank.", "client_cert": "The client certificate to authenticate against your MQTT broker.", "client_key": "The private key file that belongs to your client certificate.", + "keepalive": "A value less than 90 seconds is advised.", "tls_insecure": "Option to ignore validation of your MQTT broker's certificate.", "protocol": "The MQTT protocol your broker operates at. For example 3.1.1.", - "set_ca_cert": "Select `Auto` for automatic CA validation, or `Custom` and click `next` to set a custom CA certificate, to allow validating your MQTT brokers certificate.", - "set_client_cert": "Enable and click `next` to set a client certifificate and private key to authenticate against your MQTT broker.", + "set_ca_cert": "Select **Auto** for automatic CA validation, or **Custom** and select **Next** to set a custom CA certificate, to allow validating your MQTT brokers certificate.", + "set_client_cert": "Enable and select **Next** to set a client certificate and private key to authenticate against your MQTT broker.", "transport": "The transport to be used for the connection to your MQTT broker.", "ws_headers": "The WebSocket headers to pass through the WebSocket based connection to your MQTT broker.", "ws_path": "The WebSocket path to be used for the connection to your MQTT broker." @@ -100,6 +101,7 @@ "addon_connection_failed": "Failed to connect to the {addon} add-on. Check the add-on status and try again later.", "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" }, "error": { @@ -113,7 +115,7 @@ "bad_ws_headers": "Supply valid HTTP headers as a JSON object", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "invalid_inclusion": "The client certificate and private key must be configurered together" + "invalid_inclusion": "The client certificate and private key must be configured together" } }, "device_automation": { @@ -172,6 +174,7 @@ "client_id": "[%key:component::mqtt::config::step::broker::data_description::client_id%]", "client_cert": "[%key:component::mqtt::config::step::broker::data_description::client_cert%]", "client_key": "[%key:component::mqtt::config::step::broker::data_description::client_key%]", + "keepalive": "[%key:component::mqtt::config::step::broker::data_description::keepalive%]", "tls_insecure": "[%key:component::mqtt::config::step::broker::data_description::tls_insecure%]", "protocol": "[%key:component::mqtt::config::step::broker::data_description::protocol%]", "set_ca_cert": "[%key:component::mqtt::config::step::broker::data_description::set_ca_cert%]", @@ -287,6 +290,9 @@ } }, "exceptions": { + "addon_start_failed": { + "message": "Failed to correctly start {addon} add-on." + }, "command_template_error": { "message": "Parsing template `{command_template}` for entity `{entity_id}` failed with error: {error}." }, @@ -296,11 +302,23 @@ "invalid_publish_topic": { "message": "Unable to publish: topic template `{topic_template}` produced an invalid topic `{topic}` after rendering ({error})" }, + "mqtt_broker_error": { + "message": "Error talking to MQTT: {error_message}." + }, "mqtt_not_setup_cannot_subscribe": { "message": "Cannot subscribe to topic \"{topic}\", make sure MQTT is set up correctly." }, "mqtt_not_setup_cannot_publish": { "message": "Cannot publish to topic \"{topic}\", make sure MQTT is set up correctly." + }, + "mqtt_not_setup_cannot_unsubscribe_twice": { + "message": "Cannot unsubscribe topic \"{topic}\" twice." + }, + "mqtt_topic_not_a_string": { + "message": "Topic needs to be a string! Got: {topic}." + }, + "mqtt_trigger_cannot_remove_twice": { + "message": "Can't remove trigger twice." } } } diff --git a/homeassistant/components/mqtt/subscription.py b/homeassistant/components/mqtt/subscription.py index 3f3f67970f3..08d501ede12 100644 --- a/homeassistant/components/mqtt/subscription.py +++ b/homeassistant/components/mqtt/subscription.py @@ -86,7 +86,7 @@ class EntitySubscription: @callback def async_prepare_subscribe_topics( hass: HomeAssistant, - new_state: dict[str, EntitySubscription] | None, + sub_state: dict[str, EntitySubscription] | None, topics: dict[str, dict[str, Any]], ) -> dict[str, EntitySubscription]: """Prepare (re)subscribe to a set of MQTT topics. @@ -101,8 +101,9 @@ def async_prepare_subscribe_topics( sets of topics. Every call to async_subscribe_topics must always contain _all_ the topics the subscription state should manage. """ - current_subscriptions = new_state if new_state is not None else {} - new_state = {} + current_subscriptions: dict[str, EntitySubscription] + current_subscriptions = sub_state if sub_state is not None else {} + sub_state = {} for key, value in topics.items(): # Extract the new requested subscription requested = EntitySubscription( @@ -119,7 +120,7 @@ def async_prepare_subscribe_topics( # Get the current subscription state current = current_subscriptions.pop(key, None) requested.resubscribe_if_necessary(hass, current) - new_state[key] = requested + sub_state[key] = requested # Go through all remaining subscriptions and unsubscribe them for remaining in current_subscriptions.values(): @@ -132,7 +133,7 @@ def async_prepare_subscribe_topics( remaining.entity_id, ) - return new_state + return sub_state async def async_subscribe_topics( diff --git a/homeassistant/components/mqtt/switch.py b/homeassistant/components/mqtt/switch.py index 510de7b40dc..0a54bcdb378 100644 --- a/homeassistant/components/mqtt/switch.py +++ b/homeassistant/components/mqtt/switch.py @@ -34,7 +34,7 @@ from .const import ( CONF_STATE_TOPIC, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -43,6 +43,8 @@ from .models import ( ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Switch" DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OFF = "OFF" @@ -89,7 +91,7 @@ class MqttSwitch(MqttEntity, SwitchEntity, RestoreEntity): _entity_id_format = switch.ENTITY_ID_FORMAT _optimistic: bool - _is_on_map: dict[str | bytes, bool | None] + _is_on_map: dict[str | bytes | bytearray, bool | None] _command_template: Callable[[PublishPayloadType], PublishPayloadType] _value_template: Callable[[ReceivePayloadType], ReceivePayloadType] diff --git a/homeassistant/components/mqtt/tag.py b/homeassistant/components/mqtt/tag.py index 031c620af4a..680f252fb20 100644 --- a/homeassistant/components/mqtt/tag.py +++ b/homeassistant/components/mqtt/tag.py @@ -20,7 +20,7 @@ from . import subscription from .config import MQTT_BASE_SCHEMA from .const import ATTR_DISCOVERY_HASH, CONF_QOS, CONF_TOPIC from .discovery import MQTTDiscoveryPayload -from .mixins import ( +from .entity import ( MqttDiscoveryDeviceUpdateMixin, async_handle_schema_error, async_setup_non_entity_entry_helper, diff --git a/homeassistant/components/mqtt/text.py b/homeassistant/components/mqtt/text.py index 0db711cc456..b4ed33a7730 100644 --- a/homeassistant/components/mqtt/text.py +++ b/homeassistant/components/mqtt/text.py @@ -28,7 +28,7 @@ from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_RW_SCHEMA from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_STATE_TOPIC -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -40,6 +40,8 @@ from .util import check_state_too_long _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_MAX = "max" CONF_MIN = "min" CONF_PATTERN = "pattern" diff --git a/homeassistant/components/mqtt/trigger.py b/homeassistant/components/mqtt/trigger.py index b901176cf88..da26f7f6839 100644 --- a/homeassistant/components/mqtt/trigger.py +++ b/homeassistant/components/mqtt/trigger.py @@ -24,8 +24,15 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerData, Trigge from homeassistant.helpers.typing import ConfigType, TemplateVarsType from homeassistant.util.json import json_loads -from .. import mqtt -from .const import CONF_ENCODING, CONF_QOS, CONF_TOPIC, DEFAULT_ENCODING, DEFAULT_QOS +from .client import async_subscribe_internal +from .const import ( + CONF_ENCODING, + CONF_QOS, + CONF_TOPIC, + DEFAULT_ENCODING, + DEFAULT_QOS, + DOMAIN, +) from .models import ( MqttCommandTemplate, MqttValueTemplate, @@ -33,11 +40,12 @@ from .models import ( PublishPayloadType, ReceiveMessage, ) +from .util import valid_subscribe_topic, valid_subscribe_topic_template TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( { - vol.Required(CONF_PLATFORM): mqtt.DOMAIN, - vol.Required(CONF_TOPIC): mqtt.util.valid_subscribe_topic_template, + vol.Required(CONF_PLATFORM): DOMAIN, + vol.Required(CONF_TOPIC): valid_subscribe_topic_template, vol.Optional(CONF_PAYLOAD): cv.template, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string, @@ -76,7 +84,7 @@ async def async_attach_trigger( topic_template: Template = config[CONF_TOPIC] topic = topic_template.async_render(variables, limited=True, parse_result=False) - mqtt.util.valid_subscribe_topic(topic) + valid_subscribe_topic(topic) @callback def mqtt_automation_listener(mqttmsg: ReceiveMessage) -> None: @@ -104,7 +112,7 @@ async def async_attach_trigger( "Attaching MQTT trigger for topic: '%s', payload: '%s'", topic, wanted_payload ) - return mqtt.async_subscribe_internal( + return async_subscribe_internal( hass, topic, mqtt_automation_listener, diff --git a/homeassistant/components/mqtt/update.py b/homeassistant/components/mqtt/update.py index 4b87e0ef7da..99b4e5cb821 100644 --- a/homeassistant/components/mqtt/update.py +++ b/homeassistant/components/mqtt/update.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any, TypedDict, cast +from typing import Any import voluptuous as vol @@ -25,16 +25,18 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import subscription from .config import DEFAULT_RETAIN, MQTT_RO_SCHEMA from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_STATE_TOPIC, PAYLOAD_EMPTY_JSON -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Update" -CONF_ENTITY_PICTURE = "entity_picture" +CONF_DISPLAY_PRECISION = "display_precision" CONF_LATEST_VERSION_TEMPLATE = "latest_version_template" CONF_LATEST_VERSION_TOPIC = "latest_version_topic" CONF_PAYLOAD_INSTALL = "payload_install" @@ -47,7 +49,7 @@ PLATFORM_SCHEMA_MODERN = MQTT_RO_SCHEMA.extend( { vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_DEVICE_CLASS): vol.Any(DEVICE_CLASSES_SCHEMA, None), - vol.Optional(CONF_ENTITY_PICTURE): cv.string, + vol.Optional(CONF_DISPLAY_PRECISION, default=0): cv.positive_int, vol.Optional(CONF_LATEST_VERSION_TEMPLATE): cv.template, vol.Optional(CONF_LATEST_VERSION_TOPIC): valid_subscribe_topic, vol.Optional(CONF_NAME): vol.Any(cv.string, None), @@ -63,15 +65,18 @@ PLATFORM_SCHEMA_MODERN = MQTT_RO_SCHEMA.extend( DISCOVERY_SCHEMA = vol.All(PLATFORM_SCHEMA_MODERN.extend({}, extra=vol.REMOVE_EXTRA)) -class _MqttUpdatePayloadType(TypedDict, total=False): - """Presentation of supported JSON payload to process state updates.""" - - installed_version: str - latest_version: str - title: str - release_summary: str - release_url: str - entity_picture: str +MQTT_JSON_UPDATE_SCHEMA = vol.Schema( + { + vol.Optional("installed_version"): cv.string, + vol.Optional("latest_version"): cv.string, + vol.Optional("title"): cv.string, + vol.Optional("release_summary"): cv.string, + vol.Optional("release_url"): cv.url, + vol.Optional("entity_picture"): cv.url, + vol.Optional("in_progress"): cv.boolean, + vol.Optional("update_percentage"): vol.Any(vol.Range(min=0, max=100), None), + } +) async def async_setup_entry( @@ -96,13 +101,12 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): _default_name = DEFAULT_NAME _entity_id_format = update.ENTITY_ID_FORMAT - _entity_picture: str | None @property def entity_picture(self) -> str | None: """Return the entity picture to use in the frontend.""" - if self._entity_picture is not None: - return self._entity_picture + if self._attr_entity_picture is not None: + return self._attr_entity_picture return super().entity_picture @@ -114,10 +118,10 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" self._attr_device_class = self._config.get(CONF_DEVICE_CLASS) + self._attr_display_precision = self._config[CONF_DISPLAY_PRECISION] self._attr_release_summary = self._config.get(CONF_RELEASE_SUMMARY) self._attr_release_url = self._config.get(CONF_RELEASE_URL) self._attr_title = self._config.get(CONF_TITLE) - self._entity_picture: str | None = self._config.get(CONF_ENTITY_PICTURE) self._templates = { CONF_VALUE_TEMPLATE: MqttValueTemplate( config.get(CONF_VALUE_TEMPLATE), @@ -142,7 +146,7 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): ) return - json_payload: _MqttUpdatePayloadType = {} + json_payload: dict[str, Any] = {} try: rendered_json_payload = json_loads(payload) if isinstance(rendered_json_payload, dict): @@ -154,7 +158,7 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): rendered_json_payload, msg.topic, ) - json_payload = cast(_MqttUpdatePayloadType, rendered_json_payload) + json_payload = MQTT_JSON_UPDATE_SCHEMA(rendered_json_payload) else: _LOGGER.debug( ( @@ -165,14 +169,27 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): msg.topic, ) json_payload = {"installed_version": str(payload)} + except vol.MultipleInvalid as exc: + _LOGGER.warning( + ( + "Schema violation after processing payload '%s'" + " on topic '%s' for entity '%s': %s" + ), + payload, + msg.topic, + self.entity_id, + exc, + ) + return except JSON_DECODE_EXCEPTIONS: _LOGGER.debug( ( "No valid (JSON) payload detected after processing payload '%s'" - " on topic %s" + " on topic '%s' for entity '%s'" ), payload, msg.topic, + self.entity_id, ) json_payload["installed_version"] = str(payload) @@ -192,7 +209,14 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): self._attr_release_url = json_payload["release_url"] if "entity_picture" in json_payload: - self._entity_picture = json_payload["entity_picture"] + self._attr_entity_picture = json_payload["entity_picture"] + + if "update_percentage" in json_payload: + self._attr_update_percentage = json_payload["update_percentage"] + self._attr_in_progress = self._attr_update_percentage is not None + + if "in_progress" in json_payload: + self._attr_in_progress = json_payload["in_progress"] @callback def _handle_latest_version_received(self, msg: ReceiveMessage) -> None: @@ -209,12 +233,14 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): CONF_STATE_TOPIC, self._handle_state_message_received, { + "_attr_entity_picture", + "_attr_in_progress", "_attr_installed_version", "_attr_latest_version", "_attr_title", "_attr_release_summary", "_attr_release_url", - "_entity_picture", + "_attr_update_percentage", }, ) self.add_subscription( @@ -237,7 +263,7 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): @property def supported_features(self) -> UpdateEntityFeature: """Return the list of supported features.""" - support = UpdateEntityFeature(0) + support = UpdateEntityFeature(UpdateEntityFeature.PROGRESS) if self._config.get(CONF_COMMAND_TOPIC) is not None: support |= UpdateEntityFeature.INSTALL diff --git a/homeassistant/components/mqtt/vacuum.py b/homeassistant/components/mqtt/vacuum.py index c9898465184..743bfb363f3 100644 --- a/homeassistant/components/mqtt/vacuum.py +++ b/homeassistant/components/mqtt/vacuum.py @@ -1,10 +1,5 @@ """Support for MQTT vacuums.""" -# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 -# and was removed with HA Core 2024.2.0 -# The use of the schema attribute with MQTT vacuum was deprecated with HA Core 2024.2 -# the attribute will be remove with HA Core 2024.8 - from __future__ import annotations import logging @@ -15,20 +10,12 @@ import voluptuous as vol from homeassistant.components import vacuum from homeassistant.components.vacuum import ( ENTITY_ID_FORMAT, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SUPPORTED_FEATURES, - CONF_NAME, - STATE_IDLE, - STATE_PAUSED, -) +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_NAME from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -38,26 +25,32 @@ from homeassistant.util.json import json_loads_object from . import subscription from .config import MQTT_BASE_SCHEMA -from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_SCHEMA, CONF_STATE_TOPIC -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_STATE_TOPIC +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic -LEGACY = "legacy" -STATE = "state" +PARALLEL_UPDATES = 0 BATTERY = "battery_level" FAN_SPEED = "fan_speed" STATE = "state" -POSSIBLE_STATES: dict[str, str] = { - STATE_IDLE: STATE_IDLE, - STATE_DOCKED: STATE_DOCKED, - STATE_ERROR: STATE_ERROR, - STATE_PAUSED: STATE_PAUSED, - STATE_RETURNING: STATE_RETURNING, - STATE_CLEANING: STATE_CLEANING, +STATE_IDLE = "idle" +STATE_DOCKED = "docked" +STATE_ERROR = "error" +STATE_PAUSED = "paused" +STATE_RETURNING = "returning" +STATE_CLEANING = "cleaning" + +POSSIBLE_STATES: dict[str, VacuumActivity] = { + STATE_IDLE: VacuumActivity.IDLE, + STATE_DOCKED: VacuumActivity.DOCKED, + STATE_ERROR: VacuumActivity.ERROR, + STATE_PAUSED: VacuumActivity.PAUSED, + STATE_RETURNING: VacuumActivity.RETURNING, + STATE_CLEANING: VacuumActivity.CLEANING, } CONF_SUPPORTED_FEATURES = ATTR_SUPPORTED_FEATURES @@ -149,7 +142,7 @@ MQTT_VACUUM_ATTRIBUTES_BLOCKED = frozenset( MQTT_VACUUM_DOCS_URL = "https://www.home-assistant.io/integrations/vacuum.mqtt/" -VACUUM_BASE_SCHEMA = MQTT_BASE_SCHEMA.extend( +PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend( { vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All( cv.ensure_list, [cv.string] @@ -173,26 +166,10 @@ VACUUM_BASE_SCHEMA = MQTT_BASE_SCHEMA.extend( ), vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean, - vol.Optional(CONF_SCHEMA): vol.All(vol.Lower, vol.Any(LEGACY, STATE)), } ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) -DISCOVERY_SCHEMA = vol.All( - VACUUM_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), - # Do not fail a config is the schema option is still present, - # De option was deprecated with HA Core 2024.2 and removed with HA Core 2024.8. - # As we allow extra options, and we will remove this check silently - # with HA Core 2025.8.0, we will only warn, - # if a adiscovery config still uses this option. - cv.removed(CONF_SCHEMA, raise_if_present=False), -) - -PLATFORM_SCHEMA_MODERN = vol.All( - VACUUM_BASE_SCHEMA, - # The schema options was removed with HA Core 2024.8, - # the cleanup is planned for HA Core 2025.8. - cv.removed(CONF_SCHEMA, raise_if_present=True), -) +DISCOVERY_SCHEMA = PLATFORM_SCHEMA_MODERN.extend({}, extra=vol.ALLOW_EXTRA) async def async_setup_entry( @@ -287,7 +264,7 @@ class MqttStateVacuum(MqttEntity, StateVacuumEntity): if STATE in payload and ( (state := payload[STATE]) in POSSIBLE_STATES or state is None ): - self._attr_state = ( + self._attr_activity = ( POSSIBLE_STATES[cast(str, state)] if payload[STATE] else None ) del payload[STATE] @@ -299,7 +276,7 @@ class MqttStateVacuum(MqttEntity, StateVacuumEntity): self.add_subscription( CONF_STATE_TOPIC, self._state_message_received, - {"_attr_battery_level", "_attr_fan_speed", "_attr_state"}, + {"_attr_battery_level", "_attr_fan_speed", "_attr_activity"}, ) async def _subscribe_topics(self) -> None: diff --git a/homeassistant/components/mqtt/valve.py b/homeassistant/components/mqtt/valve.py index 02127dfc19c..50c5960f801 100644 --- a/homeassistant/components/mqtt/valve.py +++ b/homeassistant/components/mqtt/valve.py @@ -13,6 +13,7 @@ from homeassistant.components.valve import ( DEVICE_CLASSES_SCHEMA, ValveEntity, ValveEntityFeature, + ValveState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -20,10 +21,6 @@ from homeassistant.const import ( CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv @@ -59,13 +56,15 @@ from .const import ( DEFAULT_RETAIN, PAYLOAD_NONE, ) -from .mixins import MqttEntity, async_setup_entity_entry_helper +from .entity import MqttEntity, async_setup_entity_entry_helper from .models import MqttCommandTemplate, MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_REPORTS_POSITION = "reports_position" DEFAULT_NAME = "MQTT Valve" @@ -86,8 +85,8 @@ NO_POSITION_KEYS = ( DEFAULTS = { CONF_PAYLOAD_CLOSE: DEFAULT_PAYLOAD_CLOSE, CONF_PAYLOAD_OPEN: DEFAULT_PAYLOAD_OPEN, - CONF_STATE_OPEN: STATE_OPEN, - CONF_STATE_CLOSED: STATE_CLOSED, + CONF_STATE_OPEN: ValveState.OPEN, + CONF_STATE_CLOSED: ValveState.CLOSED, } RESET_CLOSING_OPENING = "reset_opening_closing" @@ -118,9 +117,9 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( vol.Optional(CONF_REPORTS_POSITION, default=False): cv.boolean, vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean, vol.Optional(CONF_STATE_CLOSED): cv.string, - vol.Optional(CONF_STATE_CLOSING, default=STATE_CLOSING): cv.string, + vol.Optional(CONF_STATE_CLOSING, default=ValveState.CLOSING): cv.string, vol.Optional(CONF_STATE_OPEN): cv.string, - vol.Optional(CONF_STATE_OPENING, default=STATE_OPENING): cv.string, + vol.Optional(CONF_STATE_OPENING, default=ValveState.OPENING): cv.string, vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, } @@ -216,14 +215,14 @@ class MqttValve(MqttEntity, ValveEntity): @callback def _update_state(self, state: str | None) -> None: """Update the valve state properties.""" - self._attr_is_opening = state == STATE_OPENING - self._attr_is_closing = state == STATE_CLOSING + self._attr_is_opening = state == ValveState.OPENING + self._attr_is_closing = state == ValveState.CLOSING if self.reports_position: return if state is None: self._attr_is_closed = None else: - self._attr_is_closed = state == STATE_CLOSED + self._attr_is_closed = state == ValveState.CLOSED @callback def _process_binary_valve_update( @@ -232,13 +231,13 @@ class MqttValve(MqttEntity, ValveEntity): """Process an update for a valve that does not report the position.""" state: str | None = None if state_payload == self._config[CONF_STATE_OPENING]: - state = STATE_OPENING + state = ValveState.OPENING elif state_payload == self._config[CONF_STATE_CLOSING]: - state = STATE_CLOSING + state = ValveState.CLOSING elif state_payload == self._config[CONF_STATE_OPEN]: - state = STATE_OPEN + state = ValveState.OPEN elif state_payload == self._config[CONF_STATE_CLOSED]: - state = STATE_CLOSED + state = ValveState.CLOSED elif state_payload == PAYLOAD_NONE: state = None else: @@ -259,9 +258,9 @@ class MqttValve(MqttEntity, ValveEntity): state: str | None = None position_set: bool = False if state_payload == self._config[CONF_STATE_OPENING]: - state = STATE_OPENING + state = ValveState.OPENING elif state_payload == self._config[CONF_STATE_CLOSING]: - state = STATE_CLOSING + state = ValveState.CLOSING elif state_payload == PAYLOAD_NONE: self._attr_current_valve_position = None return @@ -363,7 +362,7 @@ class MqttValve(MqttEntity, ValveEntity): await self.async_publish_with_config(self._config[CONF_COMMAND_TOPIC], payload) if self._optimistic: # Optimistically assume that valve has changed state. - self._update_state(STATE_OPEN) + self._update_state(ValveState.OPEN) self.async_write_ha_state() async def async_close_valve(self) -> None: @@ -377,7 +376,7 @@ class MqttValve(MqttEntity, ValveEntity): await self.async_publish_with_config(self._config[CONF_COMMAND_TOPIC], payload) if self._optimistic: # Optimistically assume that valve has changed state. - self._update_state(STATE_CLOSED) + self._update_state(ValveState.CLOSED) self.async_write_ha_state() async def async_stop_valve(self) -> None: @@ -405,9 +404,9 @@ class MqttValve(MqttEntity, ValveEntity): ) if self._optimistic: self._update_state( - STATE_CLOSED + ValveState.CLOSED if percentage_position == self._config[CONF_POSITION_CLOSED] - else STATE_OPEN + else ValveState.OPEN ) self._attr_current_valve_position = percentage_position self.async_write_ha_state() diff --git a/homeassistant/components/mqtt/water_heater.py b/homeassistant/components/mqtt/water_heater.py index 13b0478210f..4c1d3fa8a53 100644 --- a/homeassistant/components/mqtt/water_heater.py +++ b/homeassistant/components/mqtt/water_heater.py @@ -65,13 +65,15 @@ from .const import ( DEFAULT_OPTIMISTIC, PAYLOAD_NONE, ) -from .mixins import async_setup_entity_entry_helper +from .entity import async_setup_entity_entry_helper from .models import MqttCommandTemplate, MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Water Heater" MQTT_WATER_HEATER_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt_eventstream/manifest.json b/homeassistant/components/mqtt_eventstream/manifest.json index 978b11de994..95e97ebb5fa 100644 --- a/homeassistant/components/mqtt_eventstream/manifest.json +++ b/homeassistant/components/mqtt_eventstream/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_eventstream", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mqtt_json/manifest.json b/homeassistant/components/mqtt_json/manifest.json index 24ed99979cc..ccaa4996fea 100644 --- a/homeassistant/components/mqtt_json/manifest.json +++ b/homeassistant/components/mqtt_json/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_json", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mqtt_room/manifest.json b/homeassistant/components/mqtt_room/manifest.json index efc5e375cfd..858a1cbb98c 100644 --- a/homeassistant/components/mqtt_room/manifest.json +++ b/homeassistant/components/mqtt_room/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_room", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mqtt_statestream/manifest.json b/homeassistant/components/mqtt_statestream/manifest.json index 134cd80d383..c3c278a08bb 100644 --- a/homeassistant/components/mqtt_statestream/manifest.json +++ b/homeassistant/components/mqtt_statestream/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_statestream", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/msteams/manifest.json b/homeassistant/components/msteams/manifest.json index e4b40140441..3ded77c2176 100644 --- a/homeassistant/components/msteams/manifest.json +++ b/homeassistant/components/msteams/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/msteams", "iot_class": "cloud_push", "loggers": ["pymsteams"], + "quality_scale": "legacy", "requirements": ["pymsteams==0.1.12"] } diff --git a/homeassistant/components/mullvad/__init__.py b/homeassistant/components/mullvad/__init__.py index b79b9b4aa6a..f2f6f39c96f 100644 --- a/homeassistant/components/mullvad/__init__.py +++ b/homeassistant/components/mullvad/__init__.py @@ -27,6 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, logging.getLogger(__name__), + config_entry=entry, name=DOMAIN, update_method=async_get_mullvad_api_data, update_interval=timedelta(minutes=1), diff --git a/homeassistant/components/music_assistant/__init__.py b/homeassistant/components/music_assistant/__init__.py new file mode 100644 index 00000000000..22de510ebe3 --- /dev/null +++ b/homeassistant/components/music_assistant/__init__.py @@ -0,0 +1,165 @@ +"""Music Assistant (music-assistant.io) integration.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from music_assistant_client import MusicAssistantClient +from music_assistant_client.exceptions import CannotConnect, InvalidServerVersion +from music_assistant_models.enums import EventType +from music_assistant_models.errors import MusicAssistantError + +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.core import Event, HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) + +from .const import DOMAIN, LOGGER + +if TYPE_CHECKING: + from music_assistant_models.event import MassEvent + +PLATFORMS = [Platform.MEDIA_PLAYER] + +CONNECT_TIMEOUT = 10 +LISTEN_READY_TIMEOUT = 30 + +type MusicAssistantConfigEntry = ConfigEntry[MusicAssistantEntryData] + + +@dataclass +class MusicAssistantEntryData: + """Hold Mass data for the config entry.""" + + mass: MusicAssistantClient + listen_task: asyncio.Task + + +async def async_setup_entry( + hass: HomeAssistant, entry: MusicAssistantConfigEntry +) -> bool: + """Set up Music Assistant from a config entry.""" + http_session = async_get_clientsession(hass, verify_ssl=False) + mass_url = entry.data[CONF_URL] + mass = MusicAssistantClient(mass_url, http_session) + + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await mass.connect() + except (TimeoutError, CannotConnect) as err: + raise ConfigEntryNotReady( + f"Failed to connect to music assistant server {mass_url}" + ) from err + except InvalidServerVersion as err: + async_create_issue( + hass, + DOMAIN, + "invalid_server_version", + is_fixable=False, + severity=IssueSeverity.ERROR, + translation_key="invalid_server_version", + ) + raise ConfigEntryNotReady(f"Invalid server version: {err}") from err + except MusicAssistantError as err: + LOGGER.exception("Failed to connect to music assistant server", exc_info=err) + raise ConfigEntryNotReady( + f"Unknown error connecting to the Music Assistant server {mass_url}" + ) from err + + async_delete_issue(hass, DOMAIN, "invalid_server_version") + + async def on_hass_stop(event: Event) -> None: + """Handle incoming stop event from Home Assistant.""" + await mass.disconnect() + + entry.async_on_unload( + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) + ) + + # launch the music assistant client listen task in the background + # use the init_ready event to wait until initialization is done + init_ready = asyncio.Event() + listen_task = asyncio.create_task(_client_listen(hass, entry, mass, init_ready)) + + try: + async with asyncio.timeout(LISTEN_READY_TIMEOUT): + await init_ready.wait() + except TimeoutError as err: + listen_task.cancel() + raise ConfigEntryNotReady("Music Assistant client not ready") from err + + # store the listen task and mass client in the entry data + entry.runtime_data = MusicAssistantEntryData(mass, listen_task) + + # If the listen task is already failed, we need to raise ConfigEntryNotReady + if listen_task.done() and (listen_error := listen_task.exception()) is not None: + await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + try: + await mass.disconnect() + finally: + raise ConfigEntryNotReady(listen_error) from listen_error + + # initialize platforms + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + # register listener for removed players + async def handle_player_removed(event: MassEvent) -> None: + """Handle Mass Player Removed event.""" + if event.object_id is None: + return + dev_reg = dr.async_get(hass) + if hass_device := dev_reg.async_get_device({(DOMAIN, event.object_id)}): + dev_reg.async_update_device( + hass_device.id, remove_config_entry_id=entry.entry_id + ) + + entry.async_on_unload( + mass.subscribe(handle_player_removed, EventType.PLAYER_REMOVED) + ) + + return True + + +async def _client_listen( + hass: HomeAssistant, + entry: ConfigEntry, + mass: MusicAssistantClient, + init_ready: asyncio.Event, +) -> None: + """Listen with the client.""" + try: + await mass.start_listening(init_ready) + except MusicAssistantError as err: + if entry.state != ConfigEntryState.LOADED: + raise + LOGGER.error("Failed to listen: %s", err) + except Exception as err: # pylint: disable=broad-except + # We need to guard against unknown exceptions to not crash this task. + if entry.state != ConfigEntryState.LOADED: + raise + LOGGER.exception("Unexpected exception: %s", err) + + if not hass.is_stopping: + LOGGER.debug("Disconnected from server. Reloading integration") + hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + if unload_ok: + mass_entry_data: MusicAssistantEntryData = entry.runtime_data + mass_entry_data.listen_task.cancel() + await mass_entry_data.mass.disconnect() + + return unload_ok diff --git a/homeassistant/components/music_assistant/config_flow.py b/homeassistant/components/music_assistant/config_flow.py new file mode 100644 index 00000000000..fc50a2d654b --- /dev/null +++ b/homeassistant/components/music_assistant/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for MusicAssistant integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_client import MusicAssistantClient +from music_assistant_client.exceptions import ( + CannotConnect, + InvalidServerVersion, + MusicAssistantClientException, +) +from music_assistant_models.api import ServerInfoMessage +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers import aiohttp_client + +from .const import DOMAIN, LOGGER + +DEFAULT_URL = "http://mass.local:8095" +DEFAULT_TITLE = "Music Assistant" + + +def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema: + """Return a schema for the manual step.""" + default_url = user_input.get(CONF_URL, DEFAULT_URL) + return vol.Schema( + { + vol.Required(CONF_URL, default=default_url): str, + } + ) + + +async def get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage: + """Validate the user input allows us to connect.""" + async with MusicAssistantClient( + url, aiohttp_client.async_get_clientsession(hass) + ) as client: + if TYPE_CHECKING: + assert client.server_info is not None + return client.server_info + + +class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for MusicAssistant.""" + + VERSION = 1 + + def __init__(self) -> None: + """Set up flow instance.""" + self.server_info: ServerInfoMessage | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a manual configuration.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + self.server_info = await get_server_info( + self.hass, user_input[CONF_URL] + ) + await self.async_set_unique_id( + self.server_info.server_id, raise_on_progress=False + ) + self._abort_if_unique_id_configured( + updates={CONF_URL: self.server_info.base_url}, + reload_on_update=True, + ) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidServerVersion: + errors["base"] = "invalid_server_version" + except MusicAssistantClientException: + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=DEFAULT_TITLE, + data={ + CONF_URL: self.server_info.base_url, + }, + ) + + return self.async_show_form( + step_id="user", data_schema=get_manual_schema(user_input), errors=errors + ) + + return self.async_show_form(step_id="user", data_schema=get_manual_schema({})) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle a discovered Mass server. + + This flow is triggered by the Zeroconf component. It will check if the + host is already configured and delegate to the import step if not. + """ + # abort if discovery info is not what we expect + if "server_id" not in discovery_info.properties: + return self.async_abort(reason="missing_server_id") + # abort if we already have exactly this server_id + # reload the integration if the host got updated + self.server_info = ServerInfoMessage.from_dict(discovery_info.properties) + await self.async_set_unique_id(self.server_info.server_id) + self._abort_if_unique_id_configured( + updates={CONF_URL: self.server_info.base_url}, + reload_on_update=True, + ) + try: + await get_server_info(self.hass, self.server_info.base_url) + except CannotConnect: + return self.async_abort(reason="cannot_connect") + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle user-confirmation of discovered server.""" + if TYPE_CHECKING: + assert self.server_info is not None + if user_input is not None: + return self.async_create_entry( + title=DEFAULT_TITLE, + data={ + CONF_URL: self.server_info.base_url, + }, + ) + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={"url": self.server_info.base_url}, + ) diff --git a/homeassistant/components/music_assistant/const.py b/homeassistant/components/music_assistant/const.py new file mode 100644 index 00000000000..6512f58b96c --- /dev/null +++ b/homeassistant/components/music_assistant/const.py @@ -0,0 +1,18 @@ +"""Constants for Music Assistant Component.""" + +import logging + +DOMAIN = "music_assistant" +DOMAIN_EVENT = f"{DOMAIN}_event" + +DEFAULT_NAME = "Music Assistant" + +ATTR_IS_GROUP = "is_group" +ATTR_GROUP_MEMBERS = "group_members" +ATTR_GROUP_PARENTS = "group_parents" + +ATTR_MASS_PLAYER_TYPE = "mass_player_type" +ATTR_ACTIVE_QUEUE = "active_queue" +ATTR_STREAM_TITLE = "stream_title" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/music_assistant/entity.py b/homeassistant/components/music_assistant/entity.py new file mode 100644 index 00000000000..f5b6d92b0cf --- /dev/null +++ b/homeassistant/components/music_assistant/entity.py @@ -0,0 +1,86 @@ +"""Base entity model.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from music_assistant_models.enums import EventType +from music_assistant_models.event import MassEvent +from music_assistant_models.player import Player + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + + +class MusicAssistantEntity(Entity): + """Base Entity from Music Assistant Player.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__(self, mass: MusicAssistantClient, player_id: str) -> None: + """Initialize MediaPlayer entity.""" + self.mass = mass + self.player_id = player_id + provider = self.mass.get_provider(self.player.provider) + if TYPE_CHECKING: + assert provider is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, player_id)}, + manufacturer=self.player.device_info.manufacturer or provider.name, + model=self.player.device_info.model or self.player.name, + name=self.player.display_name, + configuration_url=f"{mass.server_url}/#/settings/editplayer/{player_id}", + ) + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await self.async_on_update() + self.async_on_remove( + self.mass.subscribe( + self.__on_mass_update, EventType.PLAYER_UPDATED, self.player_id + ) + ) + self.async_on_remove( + self.mass.subscribe( + self.__on_mass_update, + EventType.QUEUE_UPDATED, + ) + ) + + @property + def player(self) -> Player: + """Return the Mass Player attached to this HA entity.""" + return self.mass.players[self.player_id] + + @property + def unique_id(self) -> str | None: + """Return unique id for entity.""" + _base = self.player_id + if hasattr(self, "entity_description"): + return f"{_base}_{self.entity_description.key}" + return _base + + @property + def available(self) -> bool: + """Return availability of entity.""" + return self.player.available and bool(self.mass.connection.connected) + + async def __on_mass_update(self, event: MassEvent) -> None: + """Call when we receive an event from MusicAssistant.""" + if event.event == EventType.QUEUE_UPDATED and event.object_id not in ( + self.player.active_source, + self.player.active_group, + self.player.player_id, + ): + return + await self.async_on_update() + self.async_write_ha_state() + + async def async_on_update(self) -> None: + """Handle player updates.""" diff --git a/homeassistant/components/music_assistant/icons.json b/homeassistant/components/music_assistant/icons.json new file mode 100644 index 00000000000..7533dbb6dad --- /dev/null +++ b/homeassistant/components/music_assistant/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "play_media": { "service": "mdi:play" }, + "play_announcement": { "service": "mdi:bullhorn" }, + "transfer_queue": { "service": "mdi:transfer" } + } +} diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json new file mode 100644 index 00000000000..f5cdcf50673 --- /dev/null +++ b/homeassistant/components/music_assistant/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "music_assistant", + "name": "Music Assistant", + "after_dependencies": ["media_source", "media_player"], + "codeowners": ["@music-assistant"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/music_assistant", + "iot_class": "local_push", + "loggers": ["music_assistant"], + "requirements": ["music-assistant-client==1.0.8"], + "zeroconf": ["_mass._tcp.local."] +} diff --git a/homeassistant/components/music_assistant/media_browser.py b/homeassistant/components/music_assistant/media_browser.py new file mode 100644 index 00000000000..e65d6d4a975 --- /dev/null +++ b/homeassistant/components/music_assistant/media_browser.py @@ -0,0 +1,351 @@ +"""Media Source Implementation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_models.media_items import MediaItemType + +from homeassistant.components import media_source +from homeassistant.components.media_player import ( + BrowseError, + BrowseMedia, + MediaClass, + MediaType, +) +from homeassistant.core import HomeAssistant + +from .const import DEFAULT_NAME, DOMAIN + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + +MEDIA_TYPE_RADIO = "radio" + +PLAYABLE_MEDIA_TYPES = [ + MediaType.PLAYLIST, + MediaType.ALBUM, + MediaType.ARTIST, + MEDIA_TYPE_RADIO, + MediaType.TRACK, +] + +LIBRARY_ARTISTS = "artists" +LIBRARY_ALBUMS = "albums" +LIBRARY_TRACKS = "tracks" +LIBRARY_PLAYLISTS = "playlists" +LIBRARY_RADIO = "radio" + + +LIBRARY_TITLE_MAP = { + LIBRARY_ARTISTS: "Artists", + LIBRARY_ALBUMS: "Albums", + LIBRARY_TRACKS: "Tracks", + LIBRARY_PLAYLISTS: "Playlists", + LIBRARY_RADIO: "Radio stations", +} + +LIBRARY_MEDIA_CLASS_MAP = { + LIBRARY_ARTISTS: MediaClass.ARTIST, + LIBRARY_ALBUMS: MediaClass.ALBUM, + LIBRARY_TRACKS: MediaClass.TRACK, + LIBRARY_PLAYLISTS: MediaClass.PLAYLIST, + LIBRARY_RADIO: MediaClass.MUSIC, # radio is not accepted by HA +} + +MEDIA_CONTENT_TYPE_FLAC = "audio/flac" +THUMB_SIZE = 200 + + +def media_source_filter(item: BrowseMedia) -> bool: + """Filter media sources.""" + return item.media_content_type.startswith("audio/") + + +async def async_browse_media( + hass: HomeAssistant, + mass: MusicAssistantClient, + media_content_id: str | None, + media_content_type: str | None, +) -> BrowseMedia: + """Browse media.""" + if media_content_id is None: + return await build_main_listing(hass) + + assert media_content_type is not None + + if media_source.is_media_source_id(media_content_id): + return await media_source.async_browse_media( + hass, media_content_id, content_filter=media_source_filter + ) + + if media_content_id == LIBRARY_ARTISTS: + return await build_artists_listing(mass) + if media_content_id == LIBRARY_ALBUMS: + return await build_albums_listing(mass) + if media_content_id == LIBRARY_TRACKS: + return await build_tracks_listing(mass) + if media_content_id == LIBRARY_PLAYLISTS: + return await build_playlists_listing(mass) + if media_content_id == LIBRARY_RADIO: + return await build_radio_listing(mass) + if "artist" in media_content_id: + return await build_artist_items_listing(mass, media_content_id) + if "album" in media_content_id: + return await build_album_items_listing(mass, media_content_id) + if "playlist" in media_content_id: + return await build_playlist_items_listing(mass, media_content_id) + + raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}") + + +async def build_main_listing(hass: HomeAssistant) -> BrowseMedia: + """Build main browse listing.""" + children: list[BrowseMedia] = [] + for library, media_class in LIBRARY_MEDIA_CLASS_MAP.items(): + child_source = BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=library, + media_content_type=DOMAIN, + title=LIBRARY_TITLE_MAP[library], + children_media_class=media_class, + can_play=False, + can_expand=True, + ) + children.append(child_source) + + try: + item = await media_source.async_browse_media( + hass, None, content_filter=media_source_filter + ) + # If domain is None, it's overview of available sources + if item.domain is None and item.children is not None: + children.extend(item.children) + else: + children.append(item) + except media_source.BrowseError: + pass + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type=DOMAIN, + title=DEFAULT_NAME, + can_play=False, + can_expand=True, + children=children, + ) + + +async def build_playlists_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Playlists browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_PLAYLISTS] + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_PLAYLISTS, + media_content_type=MediaType.PLAYLIST, + title=LIBRARY_TITLE_MAP[LIBRARY_PLAYLISTS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, item, can_expand=True) + # we only grab the first page here because the + # HA media browser does not support paging + for item in await mass.music.get_library_playlists(limit=500) + if item.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_playlist_items_listing( + mass: MusicAssistantClient, identifier: str +) -> BrowseMedia: + """Build Playlist items browse listing.""" + playlist = await mass.music.get_item_by_uri(identifier) + + return BrowseMedia( + media_class=MediaClass.PLAYLIST, + media_content_id=playlist.uri, + media_content_type=MediaType.PLAYLIST, + title=playlist.name, + can_play=True, + can_expand=True, + children_media_class=MediaClass.TRACK, + children=[ + build_item(mass, item, can_expand=False) + # we only grab the first page here because the + # HA media browser does not support paging + for item in await mass.music.get_playlist_tracks( + playlist.item_id, playlist.provider + ) + if item.available + ], + ) + + +async def build_artists_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Albums browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_ARTISTS] + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_ARTISTS, + media_content_type=MediaType.ARTIST, + title=LIBRARY_TITLE_MAP[LIBRARY_ARTISTS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, artist, can_expand=True) + # we only grab the first page here because the + # HA media browser does not support paging + for artist in await mass.music.get_library_artists(limit=500) + if artist.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_artist_items_listing( + mass: MusicAssistantClient, identifier: str +) -> BrowseMedia: + """Build Artist items browse listing.""" + artist = await mass.music.get_item_by_uri(identifier) + albums = await mass.music.get_artist_albums(artist.item_id, artist.provider) + + return BrowseMedia( + media_class=MediaType.ARTIST, + media_content_id=artist.uri, + media_content_type=MediaType.ARTIST, + title=artist.name, + can_play=True, + can_expand=True, + children_media_class=MediaClass.ALBUM, + children=[ + build_item(mass, album, can_expand=True) + for album in albums + if album.available + ], + ) + + +async def build_albums_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Albums browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_ALBUMS] + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_ALBUMS, + media_content_type=MediaType.ALBUM, + title=LIBRARY_TITLE_MAP[LIBRARY_ALBUMS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, album, can_expand=True) + # we only grab the first page here because the + # HA media browser does not support paging + for album in await mass.music.get_library_albums(limit=500) + if album.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_album_items_listing( + mass: MusicAssistantClient, identifier: str +) -> BrowseMedia: + """Build Album items browse listing.""" + album = await mass.music.get_item_by_uri(identifier) + tracks = await mass.music.get_album_tracks(album.item_id, album.provider) + + return BrowseMedia( + media_class=MediaType.ALBUM, + media_content_id=album.uri, + media_content_type=MediaType.ALBUM, + title=album.name, + can_play=True, + can_expand=True, + children_media_class=MediaClass.TRACK, + children=[ + build_item(mass, track, False) for track in tracks if track.available + ], + ) + + +async def build_tracks_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Tracks browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_TRACKS] + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_TRACKS, + media_content_type=MediaType.TRACK, + title=LIBRARY_TITLE_MAP[LIBRARY_TRACKS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, track, can_expand=False) + # we only grab the first page here because the + # HA media browser does not support paging + for track in await mass.music.get_library_tracks(limit=500) + if track.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_radio_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Radio browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_RADIO] + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_RADIO, + media_content_type=DOMAIN, + title=LIBRARY_TITLE_MAP[LIBRARY_RADIO], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=[ + build_item(mass, track, can_expand=False, media_class=media_class) + # we only grab the first page here because the + # HA media browser does not support paging + for track in await mass.music.get_library_radios(limit=500) + if track.available + ], + ) + + +def build_item( + mass: MusicAssistantClient, + item: MediaItemType, + can_expand: bool = True, + media_class: Any = None, +) -> BrowseMedia: + """Return BrowseMedia for MediaItem.""" + if artists := getattr(item, "artists", None): + title = f"{artists[0].name} - {item.name}" + else: + title = item.name + img_url = mass.get_media_item_image_url(item) + + return BrowseMedia( + media_class=media_class or item.media_type.value, + media_content_id=item.uri, + media_content_type=MediaType.MUSIC, + title=title, + can_play=True, + can_expand=can_expand, + thumbnail=img_url, + ) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py new file mode 100644 index 00000000000..7d09bd5b888 --- /dev/null +++ b/homeassistant/components/music_assistant/media_player.py @@ -0,0 +1,640 @@ +"""MediaPlayer platform for Music Assistant integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable, Coroutine, Mapping +from contextlib import suppress +import functools +import os +from typing import TYPE_CHECKING, Any, Concatenate + +from music_assistant_models.enums import ( + EventType, + MediaType, + PlayerFeature, + PlayerState as MassPlayerState, + QueueOption, + RepeatMode as MassRepeatMode, +) +from music_assistant_models.errors import MediaNotFoundError, MusicAssistantError +from music_assistant_models.event import MassEvent +from music_assistant_models.media_items import ItemMapping, MediaItemType, Track +import voluptuous as vol + +from homeassistant.components import media_source +from homeassistant.components.media_player import ( + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_EXTRA, + BrowseMedia, + MediaPlayerDeviceClass, + MediaPlayerEnqueue, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + MediaType as HAMediaType, + RepeatMode, + async_process_play_media_url, +) +from homeassistant.const import STATE_OFF +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) +from homeassistant.util.dt import utc_from_timestamp + +from . import MusicAssistantConfigEntry +from .const import ATTR_ACTIVE_QUEUE, ATTR_MASS_PLAYER_TYPE, DOMAIN +from .entity import MusicAssistantEntity +from .media_browser import async_browse_media + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + from music_assistant_models.player import Player + from music_assistant_models.player_queue import PlayerQueue + +SUPPORTED_FEATURES = ( + MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.STOP + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.SHUFFLE_SET + | MediaPlayerEntityFeature.REPEAT_SET + | MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PLAY_MEDIA + | MediaPlayerEntityFeature.VOLUME_STEP + | MediaPlayerEntityFeature.CLEAR_PLAYLIST + | MediaPlayerEntityFeature.BROWSE_MEDIA + | MediaPlayerEntityFeature.MEDIA_ENQUEUE + | MediaPlayerEntityFeature.MEDIA_ANNOUNCE + | MediaPlayerEntityFeature.SEEK +) + +QUEUE_OPTION_MAP = { + # map from HA enqueue options to MA enqueue options + # which are the same but just in case + MediaPlayerEnqueue.ADD: QueueOption.ADD, + MediaPlayerEnqueue.NEXT: QueueOption.NEXT, + MediaPlayerEnqueue.PLAY: QueueOption.PLAY, + MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE, +} + +SERVICE_PLAY_MEDIA_ADVANCED = "play_media" +SERVICE_PLAY_ANNOUNCEMENT = "play_announcement" +SERVICE_TRANSFER_QUEUE = "transfer_queue" +ATTR_RADIO_MODE = "radio_mode" +ATTR_MEDIA_ID = "media_id" +ATTR_MEDIA_TYPE = "media_type" +ATTR_ARTIST = "artist" +ATTR_ALBUM = "album" +ATTR_URL = "url" +ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" +ATTR_ANNOUNCE_VOLUME = "announce_volume" +ATTR_SOURCE_PLAYER = "source_player" +ATTR_AUTO_PLAY = "auto_play" + + +def catch_musicassistant_error[_R, **P]( + func: Callable[Concatenate[MusicAssistantPlayer, P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[MusicAssistantPlayer, P], Coroutine[Any, Any, _R]]: + """Check and log commands to players.""" + + @functools.wraps(func) + async def wrapper( + self: MusicAssistantPlayer, *args: P.args, **kwargs: P.kwargs + ) -> _R: + """Catch Music Assistant errors and convert to Home Assistant error.""" + try: + return await func(self, *args, **kwargs) + except MusicAssistantError as err: + error_msg = str(err) or err.__class__.__name__ + raise HomeAssistantError(error_msg) from err + + return wrapper + + +async def async_setup_entry( + hass: HomeAssistant, + entry: MusicAssistantConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Music Assistant MediaPlayer(s) from Config Entry.""" + mass = entry.runtime_data.mass + added_ids = set() + + async def handle_player_added(event: MassEvent) -> None: + """Handle Mass Player Added event.""" + if TYPE_CHECKING: + assert event.object_id is not None + if event.object_id in added_ids: + return + added_ids.add(event.object_id) + async_add_entities([MusicAssistantPlayer(mass, event.object_id)]) + + # register listener for new players + entry.async_on_unload(mass.subscribe(handle_player_added, EventType.PLAYER_ADDED)) + mass_players = [] + # add all current players + for player in mass.players: + added_ids.add(player.player_id) + mass_players.append(MusicAssistantPlayer(mass, player.player_id)) + + async_add_entities(mass_players) + + # add platform service for play_media with advanced options + platform = async_get_current_platform() + platform.async_register_entity_service( + SERVICE_PLAY_MEDIA_ADVANCED, + { + vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption), + vol.Optional(ATTR_ARTIST): cv.string, + vol.Optional(ATTR_ALBUM): cv.string, + vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool), + }, + "_async_handle_play_media", + ) + platform.async_register_entity_service( + SERVICE_PLAY_ANNOUNCEMENT, + { + vol.Required(ATTR_URL): cv.string, + vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool), + vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int), + }, + "_async_handle_play_announcement", + ) + platform.async_register_entity_service( + SERVICE_TRANSFER_QUEUE, + { + vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id, + vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool), + }, + "_async_handle_transfer_queue", + ) + + +class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): + """Representation of MediaPlayerEntity from Music Assistant Player.""" + + _attr_name = None + _attr_media_image_remotely_accessible = True + _attr_media_content_type = HAMediaType.MUSIC + + def __init__(self, mass: MusicAssistantClient, player_id: str) -> None: + """Initialize MediaPlayer entity.""" + super().__init__(mass, player_id) + self._attr_icon = self.player.icon.replace("mdi-", "mdi:") + self._attr_supported_features = SUPPORTED_FEATURES + if PlayerFeature.SET_MEMBERS in self.player.supported_features: + self._attr_supported_features |= MediaPlayerEntityFeature.GROUPING + if PlayerFeature.VOLUME_MUTE in self.player.supported_features: + self._attr_supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE + self._attr_device_class = MediaPlayerDeviceClass.SPEAKER + self._prev_time: float = 0 + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + + # we subscribe to player queue time update but we only + # accept a state change on big time jumps (e.g. seeking) + async def queue_time_updated(event: MassEvent) -> None: + if event.object_id != self.player.active_source: + return + if abs((self._prev_time or 0) - event.data) > 5: + await self.async_on_update() + self.async_write_ha_state() + self._prev_time = event.data + + self.async_on_remove( + self.mass.subscribe( + queue_time_updated, + EventType.QUEUE_TIME_UPDATED, + ) + ) + + @property + def active_queue(self) -> PlayerQueue | None: + """Return the active queue for this player (if any).""" + if not self.player.active_source: + return None + return self.mass.player_queues.get(self.player.active_source) + + @property + def extra_state_attributes(self) -> Mapping[str, Any]: + """Return additional state attributes.""" + return { + ATTR_MASS_PLAYER_TYPE: self.player.type.value, + ATTR_ACTIVE_QUEUE: ( + self.active_queue.queue_id if self.active_queue else None + ), + } + + async def async_on_update(self) -> None: + """Handle player updates.""" + if not self.available: + return + player = self.player + active_queue = self.active_queue + # update generic attributes + if player.powered and active_queue is not None: + self._attr_state = MediaPlayerState(active_queue.state.value) + if player.powered and player.state is not None: + self._attr_state = MediaPlayerState(player.state.value) + else: + self._attr_state = MediaPlayerState(STATE_OFF) + group_members_entity_ids: list[str] = [] + if player.group_childs: + # translate MA group_childs to HA group_members as entity id's + entity_registry = er.async_get(self.hass) + group_members_entity_ids = [ + entity_id + for child_id in player.group_childs + if ( + entity_id := entity_registry.async_get_entity_id( + self.platform.domain, DOMAIN, child_id + ) + ) + ] + # NOTE: we sort the group_members for now, + # until the MA API returns them sorted (group_childs is now a set) + self._attr_group_members = sorted(group_members_entity_ids) + self._attr_volume_level = ( + player.volume_level / 100 if player.volume_level is not None else None + ) + self._attr_is_volume_muted = player.volume_muted + self._update_media_attributes(player, active_queue) + self._update_media_image_url(player, active_queue) + + @catch_musicassistant_error + async def async_media_play(self) -> None: + """Send play command to device.""" + await self.mass.players.player_command_play(self.player_id) + + @catch_musicassistant_error + async def async_media_pause(self) -> None: + """Send pause command to device.""" + await self.mass.players.player_command_pause(self.player_id) + + @catch_musicassistant_error + async def async_media_stop(self) -> None: + """Send stop command to device.""" + await self.mass.players.player_command_stop(self.player_id) + + @catch_musicassistant_error + async def async_media_next_track(self) -> None: + """Send next track command to device.""" + await self.mass.players.player_command_next_track(self.player_id) + + @catch_musicassistant_error + async def async_media_previous_track(self) -> None: + """Send previous track command to device.""" + await self.mass.players.player_command_previous_track(self.player_id) + + @catch_musicassistant_error + async def async_media_seek(self, position: float) -> None: + """Send seek command.""" + position = int(position) + await self.mass.players.player_command_seek(self.player_id, position) + + @catch_musicassistant_error + async def async_mute_volume(self, mute: bool) -> None: + """Mute the volume.""" + await self.mass.players.player_command_volume_mute(self.player_id, mute) + + @catch_musicassistant_error + async def async_set_volume_level(self, volume: float) -> None: + """Send new volume_level to device.""" + volume = int(volume * 100) + await self.mass.players.player_command_volume_set(self.player_id, volume) + + @catch_musicassistant_error + async def async_volume_up(self) -> None: + """Send new volume_level to device.""" + await self.mass.players.player_command_volume_up(self.player_id) + + @catch_musicassistant_error + async def async_volume_down(self) -> None: + """Send new volume_level to device.""" + await self.mass.players.player_command_volume_down(self.player_id) + + @catch_musicassistant_error + async def async_turn_on(self) -> None: + """Turn on device.""" + await self.mass.players.player_command_power(self.player_id, True) + + @catch_musicassistant_error + async def async_turn_off(self) -> None: + """Turn off device.""" + await self.mass.players.player_command_power(self.player_id, False) + + @catch_musicassistant_error + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set shuffle state.""" + if not self.active_queue: + return + await self.mass.player_queues.queue_command_shuffle( + self.active_queue.queue_id, shuffle + ) + + @catch_musicassistant_error + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat state.""" + if not self.active_queue: + return + await self.mass.player_queues.queue_command_repeat( + self.active_queue.queue_id, MassRepeatMode(repeat) + ) + + @catch_musicassistant_error + async def async_clear_playlist(self) -> None: + """Clear players playlist.""" + if TYPE_CHECKING: + assert self.player.active_source is not None + if queue := self.mass.player_queues.get(self.player.active_source): + await self.mass.player_queues.queue_command_clear(queue.queue_id) + + @catch_musicassistant_error + async def async_play_media( + self, + media_type: MediaType | str, + media_id: str, + enqueue: MediaPlayerEnqueue | None = None, + announce: bool | None = None, + **kwargs: Any, + ) -> None: + """Send the play_media command to the media player.""" + if media_source.is_media_source_id(media_id): + # Handle media_source + sourced_media = await media_source.async_resolve_media( + self.hass, media_id, self.entity_id + ) + media_id = sourced_media.url + media_id = async_process_play_media_url(self.hass, media_id) + + if announce: + await self._async_handle_play_announcement( + media_id, + use_pre_announce=kwargs[ATTR_MEDIA_EXTRA].get("use_pre_announce"), + announce_volume=kwargs[ATTR_MEDIA_EXTRA].get("announce_volume"), + ) + return + + # forward to our advanced play_media handler + await self._async_handle_play_media( + media_id=[media_id], + enqueue=enqueue, + media_type=media_type, + radio_mode=kwargs[ATTR_MEDIA_EXTRA].get(ATTR_RADIO_MODE), + ) + + @catch_musicassistant_error + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + player_ids: list[str] = [] + entity_registry = er.async_get(self.hass) + for child_entity_id in group_members: + # resolve HA entity_id to MA player_id + if not (entity_reg_entry := entity_registry.async_get(child_entity_id)): + raise HomeAssistantError(f"Entity {child_entity_id} not found") + # unique id is the MA player_id + player_ids.append(entity_reg_entry.unique_id) + await self.mass.players.player_command_group_many(self.player_id, player_ids) + + @catch_musicassistant_error + async def async_unjoin_player(self) -> None: + """Remove this player from any group.""" + await self.mass.players.player_command_ungroup(self.player_id) + + @catch_musicassistant_error + async def _async_handle_play_media( + self, + media_id: list[str], + artist: str | None = None, + album: str | None = None, + enqueue: MediaPlayerEnqueue | QueueOption | None = None, + radio_mode: bool | None = None, + media_type: str | None = None, + ) -> None: + """Send the play_media command to the media player.""" + media_uris: list[str] = [] + item: MediaItemType | ItemMapping | None = None + # work out (all) uri(s) to play + for media_id_str in media_id: + # URL or URI string + if "://" in media_id_str: + media_uris.append(media_id_str) + continue + # try content id as library id + if media_type and media_id_str.isnumeric(): + with suppress(MediaNotFoundError): + item = await self.mass.music.get_item( + MediaType(media_type), media_id_str, "library" + ) + if isinstance(item, MediaItemType | ItemMapping) and item.uri: + media_uris.append(item.uri) + continue + # try local accessible filename + elif await asyncio.to_thread(os.path.isfile, media_id_str): + media_uris.append(media_id_str) + continue + # last resort: search for media item by name/search + if item := await self.mass.music.get_item_by_name( + name=media_id_str, + artist=artist, + album=album, + media_type=MediaType(media_type) if media_type else None, + ): + media_uris.append(item.uri) + + if not media_uris: + raise HomeAssistantError( + f"Could not resolve {media_id} to playable media item" + ) + + # determine active queue to send the play request to + if TYPE_CHECKING: + assert self.player.active_source is not None + if queue := self.mass.player_queues.get(self.player.active_source): + queue_id = queue.queue_id + else: + queue_id = self.player_id + + await self.mass.player_queues.play_media( + queue_id, + media=media_uris, + option=self._convert_queueoption_to_media_player_enqueue(enqueue), + radio_mode=radio_mode if radio_mode else False, + ) + + @catch_musicassistant_error + async def _async_handle_play_announcement( + self, + url: str, + use_pre_announce: bool | None = None, + announce_volume: int | None = None, + ) -> None: + """Send the play_announcement command to the media player.""" + await self.mass.players.play_announcement( + self.player_id, url, use_pre_announce, announce_volume + ) + + @catch_musicassistant_error + async def _async_handle_transfer_queue( + self, source_player: str | None = None, auto_play: bool | None = None + ) -> None: + """Transfer the current queue to another player.""" + if not source_player: + # no source player given; try to find a playing player(queue) + for queue in self.mass.player_queues: + if queue.state == MassPlayerState.PLAYING: + source_queue_id = queue.queue_id + break + else: + raise HomeAssistantError( + "Source player not specified and no playing player found." + ) + else: + # resolve HA entity_id to MA player_id + entity_registry = er.async_get(self.hass) + if (entity := entity_registry.async_get(source_player)) is None: + raise HomeAssistantError("Source player not available.") + source_queue_id = entity.unique_id # unique_id is the MA player_id + target_queue_id = self.player_id + await self.mass.player_queues.transfer_queue( + source_queue_id, target_queue_id, auto_play + ) + + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: + """Implement the websocket media browsing helper.""" + return await async_browse_media( + self.hass, + self.mass, + media_content_id, + media_content_type, + ) + + def _update_media_image_url( + self, player: Player, queue: PlayerQueue | None + ) -> None: + """Update image URL for the active queue item.""" + if queue is None or queue.current_item is None: + self._attr_media_image_url = None + return + if image_url := self.mass.get_media_item_image_url(queue.current_item): + self._attr_media_image_remotely_accessible = ( + self.mass.server_url not in image_url + ) + self._attr_media_image_url = image_url + return + self._attr_media_image_url = None + + def _update_media_attributes( + self, player: Player, queue: PlayerQueue | None + ) -> None: + """Update media attributes for the active queue item.""" + self._attr_media_artist = None + self._attr_media_album_artist = None + self._attr_media_album_name = None + self._attr_media_title = None + self._attr_media_content_id = None + self._attr_media_duration = None + self._attr_media_position = None + self._attr_media_position_updated_at = None + + if queue is None and player.current_media: + # player has some external source active + self._attr_media_content_id = player.current_media.uri + self._attr_app_id = player.active_source + self._attr_media_title = player.current_media.title + self._attr_media_artist = player.current_media.artist + self._attr_media_album_name = player.current_media.album + self._attr_media_duration = player.current_media.duration + # shuffle and repeat are not (yet) supported for external sources + self._attr_shuffle = None + self._attr_repeat = None + if TYPE_CHECKING: + assert player.elapsed_time is not None + self._attr_media_position = int(player.elapsed_time) + self._attr_media_position_updated_at = ( + utc_from_timestamp(player.elapsed_time_last_updated) + if player.elapsed_time_last_updated + else None + ) + if TYPE_CHECKING: + assert player.elapsed_time is not None + self._prev_time = player.elapsed_time + return + + if queue is None: + # player has no MA queue active + self._attr_source = player.active_source + self._attr_app_id = player.active_source + return + + # player has an MA queue active (either its own queue or some group queue) + self._attr_app_id = DOMAIN + self._attr_shuffle = queue.shuffle_enabled + self._attr_repeat = queue.repeat_mode.value + if not (cur_item := queue.current_item): + # queue is empty + return + + self._attr_media_content_id = queue.current_item.uri + self._attr_media_duration = queue.current_item.duration + self._attr_media_position = int(queue.elapsed_time) + self._attr_media_position_updated_at = utc_from_timestamp( + queue.elapsed_time_last_updated + ) + self._prev_time = queue.elapsed_time + + # handle stream title (radio station icy metadata) + if (stream_details := cur_item.streamdetails) and stream_details.stream_title: + self._attr_media_album_name = cur_item.name + if " - " in stream_details.stream_title: + stream_title_parts = stream_details.stream_title.split(" - ", 1) + self._attr_media_title = stream_title_parts[1] + self._attr_media_artist = stream_title_parts[0] + else: + self._attr_media_title = stream_details.stream_title + return + + if not (media_item := cur_item.media_item): + # queue is not playing a regular media item (edge case?!) + self._attr_media_title = cur_item.name + return + + # queue is playing regular media item + self._attr_media_title = media_item.name + # for tracks we can extract more info + if media_item.media_type == MediaType.TRACK: + if TYPE_CHECKING: + assert isinstance(media_item, Track) + self._attr_media_artist = media_item.artist_str + if media_item.version: + self._attr_media_title += f" ({media_item.version})" + if media_item.album: + self._attr_media_album_name = media_item.album.name + self._attr_media_album_artist = getattr( + media_item.album, "artist_str", None + ) + + def _convert_queueoption_to_media_player_enqueue( + self, queue_option: MediaPlayerEnqueue | QueueOption | None + ) -> QueueOption | None: + """Convert a QueueOption to a MediaPlayerEnqueue.""" + if isinstance(queue_option, MediaPlayerEnqueue): + queue_option = QUEUE_OPTION_MAP.get(queue_option) + return queue_option diff --git a/homeassistant/components/music_assistant/services.yaml b/homeassistant/components/music_assistant/services.yaml new file mode 100644 index 00000000000..00f895c4ef6 --- /dev/null +++ b/homeassistant/components/music_assistant/services.yaml @@ -0,0 +1,90 @@ +# Descriptions for Music Assistant custom services + +play_media: + target: + entity: + domain: media_player + integration: music_assistant + supported_features: + - media_player.MediaPlayerEntityFeature.PLAY_MEDIA + fields: + media_id: + required: true + example: "spotify://playlist/aabbccddeeff" + selector: + object: + media_type: + example: "playlist" + selector: + select: + translation_key: media_type + options: + - artist + - album + - playlist + - track + - radio + artist: + example: "Queen" + selector: + text: + album: + example: "News of the world" + selector: + text: + enqueue: + selector: + select: + options: + - "play" + - "replace" + - "next" + - "replace_next" + - "add" + translation_key: enqueue + radio_mode: + advanced: true + selector: + boolean: + +play_announcement: + target: + entity: + domain: media_player + integration: music_assistant + supported_features: + - media_player.MediaPlayerEntityFeature.PLAY_MEDIA + - media_player.MediaPlayerEntityFeature.MEDIA_ANNOUNCE + fields: + url: + required: true + example: "http://someremotesite.com/doorbell.mp3" + selector: + text: + use_pre_announce: + example: "true" + selector: + boolean: + announce_volume: + example: 75 + selector: + number: + min: 1 + max: 100 + step: 1 + +transfer_queue: + target: + entity: + domain: media_player + integration: music_assistant + fields: + source_player: + selector: + entity: + domain: media_player + integration: music_assistant + auto_play: + example: "true" + selector: + boolean: diff --git a/homeassistant/components/music_assistant/strings.json b/homeassistant/components/music_assistant/strings.json new file mode 100644 index 00000000000..cce7f9607c2 --- /dev/null +++ b/homeassistant/components/music_assistant/strings.json @@ -0,0 +1,124 @@ +{ + "config": { + "step": { + "init": { + "data": { + "url": "URL of the Music Assistant server" + } + }, + "manual": { + "title": "Manually add Music Assistant Server", + "description": "Enter the URL to your already running Music Assistant Server. If you do not have the Music Assistant Server running, you should install it first.", + "data": { + "url": "URL of the Music Assistant server" + } + }, + "discovery_confirm": { + "description": "Do you want to add the Music Assistant Server `{url}` to Home Assistant?", + "title": "Discovered Music Assistant Server" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_server_version": "The Music Assistant server is not the correct version", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "Configuration flow is already in progress", + "reconfiguration_successful": "Successfully reconfigured the Music Assistant integration.", + "cannot_connect": "Failed to connect", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "issues": { + "invalid_server_version": { + "title": "The Music Assistant server is not the correct version", + "description": "Check if there are updates available for the Music Assistant Server and/or integration." + } + }, + "services": { + "play_media": { + "name": "Play media", + "description": "Play media on a Music Assistant player with more fine-grained control options.", + "fields": { + "media_id": { + "name": "Media ID(s)", + "description": "URI or name of the item you want to play. Specify a list if you want to play/enqueue multiple items." + }, + "media_type": { + "name": "Media type", + "description": "The type of the content to play. Such as artist, album, track or playlist. Will be auto-determined if omitted." + }, + "enqueue": { + "name": "Enqueue", + "description": "If the content should be played now or added to the queue." + }, + "artist": { + "name": "Artist name", + "description": "When specifying a track or album by name in the Media ID field, you can optionally restrict results by this artist name." + }, + "album": { + "name": "Album name", + "description": "When specifying a track by name in the Media ID field, you can optionally restrict results by this album name." + }, + "radio_mode": { + "name": "Enable radio mode", + "description": "Enable radio mode to auto-generate a playlist based on the selection." + } + } + }, + "play_announcement": { + "name": "Play announcement", + "description": "Play announcement on a Music Assistant player with more fine-grained control options.", + "fields": { + "url": { + "name": "URL", + "description": "URL to the notification sound." + }, + "use_pre_announce": { + "name": "Use pre-announce", + "description": "Use pre-announcement sound for the announcement. Omit to use the player default." + }, + "announce_volume": { + "name": "Announce volume", + "description": "Use a forced volume level for the announcement. Omit to use player default." + } + } + }, + "transfer_queue": { + "name": "Transfer queue", + "description": "Transfer the player's queue to another player.", + "fields": { + "source_player": { + "name": "Source media player", + "description": "The source media player which has the queue you want to transfer. When omitted, the first playing player will be used." + }, + "auto_play": { + "name": "Auto play", + "description": "Start playing the queue on the target player. Omit to use the default behavior." + } + } + } + }, + "selector": { + "enqueue": { + "options": { + "play": "Play", + "next": "Play next", + "add": "Add to queue", + "replace": "Play now and clear queue", + "replace_next": "Play next and clear queue" + } + }, + "media_type": { + "options": { + "artist": "Artist", + "album": "Album", + "track": "Track", + "playlist": "Playlist", + "radio": "Radio" + } + } + } +} diff --git a/homeassistant/components/mutesync/__init__.py b/homeassistant/components/mutesync/__init__.py index 75eefaf6784..d5d2e3414d5 100644 --- a/homeassistant/components/mutesync/__init__.py +++ b/homeassistant/components/mutesync/__init__.py @@ -45,6 +45,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), + config_entry=entry, name=DOMAIN, update_interval=UPDATE_INTERVAL_NOT_IN_MEETING, update_method=update_data, diff --git a/homeassistant/components/mvglive/manifest.json b/homeassistant/components/mvglive/manifest.json index f73d4612c2e..2c4e6a7e735 100644 --- a/homeassistant/components/mvglive/manifest.json +++ b/homeassistant/components/mvglive/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/mvglive", "iot_class": "cloud_polling", "loggers": ["MVGLive"], + "quality_scale": "legacy", "requirements": ["PyMVGLive==1.1.4"] } diff --git a/homeassistant/components/mycroft/manifest.json b/homeassistant/components/mycroft/manifest.json index 9b8731f0701..568bb8b1784 100644 --- a/homeassistant/components/mycroft/manifest.json +++ b/homeassistant/components/mycroft/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/mycroft", "iot_class": "local_push", "loggers": ["mycroftapi"], + "quality_scale": "legacy", "requirements": ["mycroftapi==2.0"] } diff --git a/homeassistant/components/mysensors/__init__.py b/homeassistant/components/mysensors/__init__.py index 8ebcbe0e2fe..19dcce78446 100644 --- a/homeassistant/components/mysensors/__init__.py +++ b/homeassistant/components/mysensors/__init__.py @@ -23,7 +23,7 @@ from .const import ( DiscoveryInfo, SensorType, ) -from .device import MySensorsChildEntity, get_mysensors_devices +from .entity import MySensorsChildEntity, get_mysensors_devices from .gateway import finish_setup, gw_stop, setup_gateway _LOGGER = logging.getLogger(__name__) @@ -148,7 +148,7 @@ def setup_mysensors_platform( devices[dev_id] = device_class_copy(*args_copy) new_devices.append(devices[dev_id]) if new_devices: - _LOGGER.info("Adding new devices: %s", new_devices) + _LOGGER.debug("Adding new devices: %s", new_devices) if async_add_entities is not None: async_add_entities(new_devices) return new_devices diff --git a/homeassistant/components/mysensors/binary_sensor.py b/homeassistant/components/mysensors/binary_sensor.py index b8a3769308a..54f7036b79c 100644 --- a/homeassistant/components/mysensors/binary_sensor.py +++ b/homeassistant/components/mysensors/binary_sensor.py @@ -17,8 +17,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import mysensors +from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo +from .entity import MySensorsChildEntity from .helpers import on_unload @@ -77,7 +78,7 @@ async def async_setup_entry( @callback def async_discover(discovery_info: DiscoveryInfo) -> None: """Discover and add a MySensors binary_sensor.""" - mysensors.setup_mysensors_platform( + setup_mysensors_platform( hass, Platform.BINARY_SENSOR, discovery_info, @@ -96,7 +97,7 @@ async def async_setup_entry( ) -class MySensorsBinarySensor(mysensors.device.MySensorsChildEntity, BinarySensorEntity): +class MySensorsBinarySensor(MySensorsChildEntity, BinarySensorEntity): """Representation of a MySensors binary sensor child node.""" entity_description: MySensorsBinarySensorDescription diff --git a/homeassistant/components/mysensors/climate.py b/homeassistant/components/mysensors/climate.py index 0008297f299..23b7c47ebf3 100644 --- a/homeassistant/components/mysensors/climate.py +++ b/homeassistant/components/mysensors/climate.py @@ -18,8 +18,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_system import METRIC_SYSTEM -from .. import mysensors +from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo +from .entity import MySensorsChildEntity from .helpers import on_unload DICT_HA_TO_MYS = { @@ -48,7 +49,7 @@ async def async_setup_entry( async def async_discover(discovery_info: DiscoveryInfo) -> None: """Discover and add a MySensors climate.""" - mysensors.setup_mysensors_platform( + setup_mysensors_platform( hass, Platform.CLIMATE, discovery_info, @@ -67,11 +68,10 @@ async def async_setup_entry( ) -class MySensorsHVAC(mysensors.device.MySensorsChildEntity, ClimateEntity): +class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): """Representation of a MySensors HVAC.""" _attr_hvac_modes = OPERATION_LIST - _enable_turn_on_off_backwards_compatibility = False @property def supported_features(self) -> ClimateEntityFeature: diff --git a/homeassistant/components/mysensors/cover.py b/homeassistant/components/mysensors/cover.py index acd5643965f..808589b9022 100644 --- a/homeassistant/components/mysensors/cover.py +++ b/homeassistant/components/mysensors/cover.py @@ -12,8 +12,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import mysensors +from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo +from .entity import MySensorsChildEntity from .helpers import on_unload @@ -36,7 +37,7 @@ async def async_setup_entry( async def async_discover(discovery_info: DiscoveryInfo) -> None: """Discover and add a MySensors cover.""" - mysensors.setup_mysensors_platform( + setup_mysensors_platform( hass, Platform.COVER, discovery_info, @@ -55,7 +56,7 @@ async def async_setup_entry( ) -class MySensorsCover(mysensors.device.MySensorsChildEntity, CoverEntity): +class MySensorsCover(MySensorsChildEntity, CoverEntity): """Representation of the value of a MySensors Cover child node.""" def get_cover_state(self) -> CoverState: diff --git a/homeassistant/components/mysensors/device_tracker.py b/homeassistant/components/mysensors/device_tracker.py index 968ee94b60e..5abe6a64e2d 100644 --- a/homeassistant/components/mysensors/device_tracker.py +++ b/homeassistant/components/mysensors/device_tracker.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback @@ -11,7 +11,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo -from .device import MySensorsChildEntity +from .entity import MySensorsChildEntity from .helpers import on_unload @@ -47,24 +47,6 @@ async def async_setup_entry( class MySensorsDeviceTracker(MySensorsChildEntity, TrackerEntity): """Represent a MySensors device tracker.""" - _latitude: float | None = None - _longitude: float | None = None - - @property - def latitude(self) -> float | None: - """Return latitude value of the device.""" - return self._latitude - - @property - def longitude(self) -> float | None: - """Return longitude value of the device.""" - return self._longitude - - @property - def source_type(self) -> SourceType: - """Return the source type of the device.""" - return SourceType.GPS - @callback def _async_update(self) -> None: """Update the controller with the latest value from a device.""" @@ -73,5 +55,5 @@ class MySensorsDeviceTracker(MySensorsChildEntity, TrackerEntity): child = node.children[self.child_id] position: str = child.values[self.value_type] latitude, longitude, _ = position.split(",") - self._latitude = float(latitude) - self._longitude = float(longitude) + self._attr_latitude = float(latitude) + self._attr_longitude = float(longitude) diff --git a/homeassistant/components/mysensors/device.py b/homeassistant/components/mysensors/entity.py similarity index 100% rename from homeassistant/components/mysensors/device.py rename to homeassistant/components/mysensors/entity.py diff --git a/homeassistant/components/mysensors/gateway.py b/homeassistant/components/mysensors/gateway.py index 00c8d5eecfb..fa3464c0088 100644 --- a/homeassistant/components/mysensors/gateway.py +++ b/homeassistant/components/mysensors/gateway.py @@ -114,14 +114,14 @@ async def try_connect( await gateway_ready.wait() return True except TimeoutError: - _LOGGER.info("Try gateway connect failed with timeout") + _LOGGER.warning("Try gateway connect failed with timeout") return False finally: if connect_task is not None and not connect_task.done(): connect_task.cancel() await gateway.stop() except OSError as err: - _LOGGER.info("Try gateway connect failed with exception", exc_info=err) + _LOGGER.warning("Try gateway connect failed with exception", exc_info=err) return False diff --git a/homeassistant/components/mysensors/handler.py b/homeassistant/components/mysensors/handler.py index 20e0ddd0e5a..96ea5347102 100644 --- a/homeassistant/components/mysensors/handler.py +++ b/homeassistant/components/mysensors/handler.py @@ -13,7 +13,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.util import decorator from .const import CHILD_CALLBACK, NODE_CALLBACK, DevId, GatewayId -from .device import get_mysensors_devices +from .entity import get_mysensors_devices from .helpers import ( discover_mysensors_node, discover_mysensors_platform, diff --git a/homeassistant/components/mysensors/light.py b/homeassistant/components/mysensors/light.py index c3691a40140..87f60174cab 100644 --- a/homeassistant/components/mysensors/light.py +++ b/homeassistant/components/mysensors/light.py @@ -18,9 +18,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.color import rgb_hex_to_rgb_list -from .. import mysensors +from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo, SensorType -from .device import MySensorsChildEntity +from .entity import MySensorsChildEntity from .helpers import on_unload @@ -38,7 +38,7 @@ async def async_setup_entry( async def async_discover(discovery_info: DiscoveryInfo) -> None: """Discover and add a MySensors light.""" - mysensors.setup_mysensors_platform( + setup_mysensors_platform( hass, Platform.LIGHT, discovery_info, @@ -57,7 +57,7 @@ async def async_setup_entry( ) -class MySensorsLight(mysensors.device.MySensorsChildEntity, LightEntity): +class MySensorsLight(MySensorsChildEntity, LightEntity): """Representation of a MySensors Light child node.""" def __init__(self, *args: Any) -> None: @@ -173,7 +173,8 @@ class MySensorsLightRGB(MySensorsLight): new_rgb: tuple[int, int, int] | None = kwargs.get(ATTR_RGB_COLOR) if new_rgb is None: return - hex_color = "{:02x}{:02x}{:02x}".format(*new_rgb) + red, green, blue = new_rgb + hex_color = f"{red:02x}{green:02x}{blue:02x}" self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) @@ -220,7 +221,8 @@ class MySensorsLightRGBW(MySensorsLightRGB): new_rgbw: tuple[int, int, int, int] | None = kwargs.get(ATTR_RGBW_COLOR) if new_rgbw is None: return - hex_color = "{:02x}{:02x}{:02x}{:02x}".format(*new_rgbw) + red, green, blue, white = new_rgbw + hex_color = f"{red:02x}{green:02x}{blue:02x}{white:02x}" self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) diff --git a/homeassistant/components/mysensors/remote.py b/homeassistant/components/mysensors/remote.py index e9404bb3197..1a4f6fdaa90 100644 --- a/homeassistant/components/mysensors/remote.py +++ b/homeassistant/components/mysensors/remote.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo -from .device import MySensorsChildEntity +from .entity import MySensorsChildEntity from .helpers import on_unload diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 82e6833f664..eec3c6bcd79 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -38,7 +38,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_system import METRIC_SYSTEM -from .. import mysensors +from . import setup_mysensors_platform from .const import ( ATTR_GATEWAY_ID, ATTR_NODE_ID, @@ -49,6 +49,7 @@ from .const import ( DiscoveryInfo, NodeDiscoveryInfo, ) +from .entity import MySensorNodeEntity, MySensorsChildEntity from .helpers import on_unload SENSORS: dict[str, SensorEntityDescription] = { @@ -192,7 +193,7 @@ SENSORS: dict[str, SensorEntityDescription] = { ), "V_EC": SensorEntityDescription( key="V_EC", - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, ), "V_VAR": SensorEntityDescription( key="V_VAR", @@ -215,7 +216,7 @@ async def async_setup_entry( async def async_discover(discovery_info: DiscoveryInfo) -> None: """Discover and add a MySensors sensor.""" - mysensors.setup_mysensors_platform( + setup_mysensors_platform( hass, Platform.SENSOR, discovery_info, @@ -252,7 +253,7 @@ async def async_setup_entry( ) -class MyBatterySensor(mysensors.device.MySensorNodeEntity, SensorEntity): +class MyBatterySensor(MySensorNodeEntity, SensorEntity): """Battery sensor of MySensors node.""" _attr_device_class = SensorDeviceClass.BATTERY @@ -277,7 +278,7 @@ class MyBatterySensor(mysensors.device.MySensorNodeEntity, SensorEntity): self.async_write_ha_state() -class MySensorsSensor(mysensors.device.MySensorsChildEntity, SensorEntity): +class MySensorsSensor(MySensorsChildEntity, SensorEntity): """Representation of a MySensors Sensor child node.""" _attr_force_update = True diff --git a/homeassistant/components/mysensors/switch.py b/homeassistant/components/mysensors/switch.py index 400ef2c5896..4eabf6374f1 100644 --- a/homeassistant/components/mysensors/switch.py +++ b/homeassistant/components/mysensors/switch.py @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo, SensorType -from .device import MySensorsChildEntity +from .entity import MySensorsChildEntity from .helpers import on_unload diff --git a/homeassistant/components/mysensors/text.py b/homeassistant/components/mysensors/text.py index 021324d7a67..4edb5ccdbd8 100644 --- a/homeassistant/components/mysensors/text.py +++ b/homeassistant/components/mysensors/text.py @@ -9,9 +9,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import mysensors +from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo -from .device import MySensorsChildEntity +from .entity import MySensorsChildEntity from .helpers import on_unload @@ -25,7 +25,7 @@ async def async_setup_entry( @callback def async_discover(discovery_info: DiscoveryInfo) -> None: """Discover and add a MySensors text entity.""" - mysensors.setup_mysensors_platform( + setup_mysensors_platform( hass, Platform.TEXT, discovery_info, diff --git a/homeassistant/components/mystrom/binary_sensor.py b/homeassistant/components/mystrom/binary_sensor.py index 17a1da75a96..16772fc7073 100644 --- a/homeassistant/components/mystrom/binary_sensor.py +++ b/homeassistant/components/mystrom/binary_sensor.py @@ -5,7 +5,10 @@ from __future__ import annotations from http import HTTPStatus import logging -from homeassistant.components.binary_sensor import DOMAIN, BinarySensorEntity +from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorEntity, +) from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -55,9 +58,9 @@ class MyStromView(HomeAssistantView): ) button_id = data[button_action] - entity_id = f"{DOMAIN}.{button_id}_{button_action}" + entity_id = f"{BINARY_SENSOR_DOMAIN}.{button_id}_{button_action}" if entity_id not in self.buttons: - _LOGGER.info( + _LOGGER.debug( "New myStrom button/action detected: %s/%s", button_id, button_action ) self.buttons[entity_id] = MyStromBinarySensor( diff --git a/homeassistant/components/mythicbeastsdns/manifest.json b/homeassistant/components/mythicbeastsdns/manifest.json index ed0b96575c9..a4381c312bc 100644 --- a/homeassistant/components/mythicbeastsdns/manifest.json +++ b/homeassistant/components/mythicbeastsdns/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mythicbeastsdns", "iot_class": "cloud_push", "loggers": ["mbddns"], + "quality_scale": "legacy", "requirements": ["mbddns==0.1.2"] } diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index d801f27817d..5ad114e973e 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -3,8 +3,10 @@ from __future__ import annotations from http import HTTPStatus +import logging from aiohttp import ClientError, ClientResponseError +import jwt from myuplink import MyUplinkAPI, get_manufacturer, get_model, get_system_name from homeassistant.config_entries import ConfigEntry @@ -22,6 +24,8 @@ from .api import AsyncConfigEntryAuth from .const import DOMAIN, OAUTH2_SCOPES from .coordinator import MyUplinkDataCoordinator +_LOGGER = logging.getLogger(__name__) + PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.NUMBER, @@ -51,13 +55,25 @@ async def async_setup_entry( await auth.async_get_access_token() except ClientResponseError as err: if err.status in {HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN}: - raise ConfigEntryAuthFailed from err - raise ConfigEntryNotReady from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="config_entry_auth_failed", + ) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from err except ClientError as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from err if set(config_entry.data["token"]["scope"].split(" ")) != set(OAUTH2_SCOPES): - raise ConfigEntryAuthFailed("Incorrect OAuth2 scope") + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="incorrect_oauth2_scope", + ) # Setup MyUplinkAPI and coordinator for data fetch api = MyUplinkAPI(auth) @@ -73,14 +89,16 @@ async def async_setup_entry( return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: MyUplinkConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @callback def create_devices( - hass: HomeAssistant, config_entry: ConfigEntry, coordinator: MyUplinkDataCoordinator + hass: HomeAssistant, + config_entry: MyUplinkConfigEntry, + coordinator: MyUplinkDataCoordinator, ) -> None: """Update all devices.""" device_registry = dr.async_get(hass) @@ -109,3 +127,27 @@ async def async_remove_config_entry_device( return not device_entry.identifiers.intersection( (DOMAIN, device_id) for device_id in myuplink_data.data.devices ) + + +async def async_migrate_entry( + hass: HomeAssistant, config_entry: MyUplinkConfigEntry +) -> bool: + """Migrate old entry.""" + + # Use sub(ject) from access_token as unique_id + if config_entry.version == 1 and config_entry.minor_version == 1: + token = jwt.decode( + config_entry.data["token"]["access_token"], + options={"verify_signature": False}, + ) + uid = token["sub"] + hass.config_entries.async_update_entry( + config_entry, unique_id=uid, minor_version=2 + ) + _LOGGER.info( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True diff --git a/homeassistant/components/myuplink/api.py b/homeassistant/components/myuplink/api.py index 89a5d0c19b0..32e0ea70193 100644 --- a/homeassistant/components/myuplink/api.py +++ b/homeassistant/components/myuplink/api.py @@ -26,7 +26,6 @@ class AsyncConfigEntryAuth(AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/myuplink/binary_sensor.py b/homeassistant/components/myuplink/binary_sensor.py index 1478ed9c8b0..d903c7cbfae 100644 --- a/homeassistant/components/myuplink/binary_sensor.py +++ b/homeassistant/components/myuplink/binary_sensor.py @@ -12,10 +12,17 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import F_SERIES from .entity import MyUplinkEntity, MyUplinkSystemEntity -from .helpers import find_matching_platform +from .helpers import find_matching_platform, transform_model_series CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, BinarySensorEntityDescription]] = { + F_SERIES: { + "43161": BinarySensorEntityDescription( + key="elect_add", + translation_key="elect_add", + ), + }, "NIBEF": { "43161": BinarySensorEntityDescription( key="elect_add", @@ -44,6 +51,7 @@ def get_description(device_point: DevicePoint) -> BinarySensorEntityDescription 2. Default to None """ prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id) @@ -147,7 +155,7 @@ class MyUplinkDeviceBinarySensor(MyUplinkEntity, BinarySensorEntity): self, coordinator: MyUplinkDataCoordinator, device_id: str, - entity_description: BinarySensorEntityDescription | None, + entity_description: BinarySensorEntityDescription, unique_id_suffix: str, ) -> None: """Initialize the binary_sensor.""" @@ -157,8 +165,7 @@ class MyUplinkDeviceBinarySensor(MyUplinkEntity, BinarySensorEntity): unique_id_suffix=unique_id_suffix, ) - if entity_description is not None: - self.entity_description = entity_description + self.entity_description = entity_description @property def is_on(self) -> bool: @@ -177,7 +184,7 @@ class MyUplinkSystemBinarySensor(MyUplinkSystemEntity, BinarySensorEntity): coordinator: MyUplinkDataCoordinator, system_id: str, device_id: str, - entity_description: BinarySensorEntityDescription | None, + entity_description: BinarySensorEntityDescription, unique_id_suffix: str, ) -> None: """Initialize the binary_sensor.""" @@ -188,8 +195,7 @@ class MyUplinkSystemBinarySensor(MyUplinkSystemEntity, BinarySensorEntity): unique_id_suffix=unique_id_suffix, ) - if entity_description is not None: - self.entity_description = entity_description + self.entity_description = entity_description @property def is_on(self) -> bool | None: diff --git a/homeassistant/components/myuplink/config_flow.py b/homeassistant/components/myuplink/config_flow.py index fe31dcc6183..cf0428f59ce 100644 --- a/homeassistant/components/myuplink/config_flow.py +++ b/homeassistant/components/myuplink/config_flow.py @@ -4,7 +4,13 @@ from collections.abc import Mapping import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +import jwt + +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlowResult, +) from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, OAUTH2_SCOPES @@ -15,10 +21,10 @@ class OAuth2FlowHandler( ): """Config flow to handle myUplink OAuth2 authentication.""" + VERSION = 1 + MINOR_VERSION = 2 DOMAIN = DOMAIN - config_entry_reauth: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -33,9 +39,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.config_entry_reauth = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -49,11 +52,30 @@ class OAuth2FlowHandler( return await self.async_step_user() + async def async_step_reconfigure( + self, user_input: Mapping[str, Any] | None = None + ) -> ConfigFlowResult: + """User initiated reconfiguration.""" + return await self.async_step_user() + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create or update the config entry.""" - if self.config_entry_reauth: + + token = jwt.decode( + data["token"]["access_token"], options={"verify_signature": False} + ) + uid = token["sub"] + await self.async_set_unique_id(uid) + + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="account_mismatch") return self.async_update_reload_and_abort( - self.config_entry_reauth, - data=data, + self._get_reauth_entry(), data=data ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="account_mismatch") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data=data + ) + self._abort_if_unique_id_configured() return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/myuplink/const.py b/homeassistant/components/myuplink/const.py index 3541a8078c3..6fd354a21ec 100644 --- a/homeassistant/components/myuplink/const.py +++ b/homeassistant/components/myuplink/const.py @@ -6,3 +6,5 @@ API_ENDPOINT = "https://api.myuplink.com" OAUTH2_AUTHORIZE = "https://api.myuplink.com/oauth/authorize" OAUTH2_TOKEN = "https://api.myuplink.com/oauth/token" OAUTH2_SCOPES = ["WRITESYSTEM", "READSYSTEM", "offline_access"] + +F_SERIES = "f-series" diff --git a/homeassistant/components/myuplink/helpers.py b/homeassistant/components/myuplink/helpers.py index ac3d2a2d7fa..bd875d8a872 100644 --- a/homeassistant/components/myuplink/helpers.py +++ b/homeassistant/components/myuplink/helpers.py @@ -6,6 +6,8 @@ from homeassistant.components.number import NumberEntityDescription from homeassistant.components.sensor import SensorEntityDescription from homeassistant.const import Platform +from .const import F_SERIES + def find_matching_platform( device_point: DevicePoint, @@ -36,17 +38,99 @@ def find_matching_platform( return Platform.SENSOR +WEEKDAYS = ( + "monday", + "tuesday", + "wednesday", + "thursday", + "friday", + "saturday", + "sunday", +) + +PARAMETER_ID_TO_EXCLUDE_F730 = ( + "40940", + "47007", + "47015", + "47020", + "47021", + "47022", + "47023", + "47024", + "47025", + "47026", + "47027", + "47028", + "47032", + "47050", + "47051", + "47206", + "47209", + "47271", + "47272", + "47273", + "47274", + "47375", + "47376", + "47538", + "47539", + "47635", + "47669", + "47703", + "47737", + "47771", + "47772", + "47805", + "47806", + "47839", + "47840", + "47907", + "47941", + "47975", + "48009", + "48072", + "48442", + "49909", + "50113", +) + +PARAMETER_ID_TO_INCLUDE_SMO20 = ( + "40013", + "40033", + "40940", + "44069", + "44071", + "44073", + "47011", + "47015", + "47028", + "47032", + "47398", + "50004", +) + + def skip_entity(model: str, device_point: DevicePoint) -> bool: """Check if entity should be skipped for this device model.""" if model == "SMO 20": - if len(device_point.smart_home_categories) > 0 or device_point.parameter_id in ( - "40940", - "47011", - "47015", - "47028", - "47032", - "50004", + if ( + len(device_point.smart_home_categories) > 0 + or device_point.parameter_id in PARAMETER_ID_TO_INCLUDE_SMO20 ): return False return True + if model.lower().startswith("f"): + # Entity names containing weekdays are used for advanced scheduling in the + # heat pump and should not be exposed in the integration + if any(d in device_point.parameter_name.lower() for d in WEEKDAYS): + return True + if device_point.parameter_id in PARAMETER_ID_TO_EXCLUDE_F730: + return True return False + + +def transform_model_series(prefix: str) -> str: + """Remap all F-series models.""" + if prefix.lower().startswith("f"): + return F_SERIES + return prefix diff --git a/homeassistant/components/myuplink/manifest.json b/homeassistant/components/myuplink/manifest.json index 0e638a72715..8438d24194c 100644 --- a/homeassistant/components/myuplink/manifest.json +++ b/homeassistant/components/myuplink/manifest.json @@ -6,5 +6,6 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/myuplink", "iot_class": "cloud_polling", + "quality_scale": "silver", "requirements": ["myuplink==0.6.0"] } diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index 7c63a8ec8a2..e1cbd393947 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -10,8 +10,9 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import DOMAIN, F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity +from .helpers import find_matching_platform, skip_entity, transform_model_series DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = { "DM": NumberEntityDescription( @@ -22,6 +23,13 @@ DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = { } CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, NumberEntityDescription]] = { + F_SERIES: { + "40940": NumberEntityDescription( + key="degree_minutes", + translation_key="degree_minutes", + native_unit_of_measurement="DM", + ), + }, "NIBEF": { "40940": NumberEntityDescription( key="degree_minutes", @@ -41,6 +49,7 @@ def get_description(device_point: DevicePoint) -> NumberEntityDescription | None 3. Default to None """ prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get( device_point.parameter_id ) @@ -101,13 +110,16 @@ class MyUplinkNumber(MyUplinkEntity, NumberEntity): # Internal properties self.point_id = device_point.parameter_id self._attr_name = device_point.parameter_name + _scale = float(device_point.scale_value if device_point.scale_value else 1.0) self._attr_native_min_value = ( - device_point.raw["minValue"] if device_point.raw["minValue"] else -30000 - ) * float(device_point.raw.get("scaleValue", 1)) + device_point.min_value if device_point.min_value else -30000 + ) * _scale self._attr_native_max_value = ( - device_point.raw["maxValue"] if device_point.raw["maxValue"] else 30000 - ) * float(device_point.raw.get("scaleValue", 1)) - self._attr_step_value = device_point.raw.get("stepValue", 20) + device_point.max_value if device_point.max_value else 30000 + ) * _scale + self._attr_native_step = ( + device_point.step_value if device_point.step_value else 1.0 + ) * _scale if entity_description is not None: self.entity_description = entity_description @@ -125,7 +137,13 @@ class MyUplinkNumber(MyUplinkEntity, NumberEntity): ) except ClientError as err: raise HomeAssistantError( - f"Failed to set new value {value} for {self.point_id}/{self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_number_error", + translation_placeholders={ + "entity": self.entity_id, + "point": self.point_id, + "value": str(value), + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml new file mode 100644 index 00000000000..be0780a206c --- /dev/null +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions are defined. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No configuration parameters + docs-installation-parameters: + status: done + comment: Described in installation instructions + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: Handled by coordinator + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + Not possible to discover these devices. + discovery: + status: exempt + comment: | + Not possible to discover these devices. + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: + status: done + comment: | + Datapoint names are read from the API metadata and used as entity names in HA. + It is not feasible to use the API names as translation keys as they can change between + firmware and API upgrades and the number of appliance models and firmware releases are huge. + Entity names translations are therefore not implemented for the time being. + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + No repair-issues are raised. + stale-devices: + status: done + comment: | + There is no way for the integration to know if a device is gone temporarily or permanently. User is allowed to delete a stale device from GUI. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/myuplink/select.py b/homeassistant/components/myuplink/select.py index c0fb66602de..0074d1c75ff 100644 --- a/homeassistant/components/myuplink/select.py +++ b/homeassistant/components/myuplink/select.py @@ -5,13 +5,14 @@ from typing import cast from aiohttp import ClientError from myuplink import DevicePoint -from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.components.select import SelectEntity from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import DOMAIN from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity @@ -30,14 +31,12 @@ async def async_setup_entry( for point_id, device_point in point_data.items(): if skip_entity(device_point.category, device_point): continue - description = None - if find_matching_platform(device_point, description) == Platform.SELECT: + if find_matching_platform(device_point, None) == Platform.SELECT: entities.append( MyUplinkSelect( coordinator=coordinator, device_id=device_id, device_point=device_point, - entity_description=description, unique_id_suffix=point_id, ) ) @@ -53,7 +52,6 @@ class MyUplinkSelect(MyUplinkEntity, SelectEntity): coordinator: MyUplinkDataCoordinator, device_id: str, device_point: DevicePoint, - entity_description: SelectEntityDescription | None, unique_id_suffix: str, ) -> None: """Initialize the select.""" @@ -89,7 +87,13 @@ class MyUplinkSelect(MyUplinkEntity, SelectEntity): ) except ClientError as err: raise HomeAssistantError( - f"Failed to set new option {self.options_rev[option]} for {self.point_id}/{self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_select_error", + translation_placeholders={ + "entity": self.entity_id, + "option": self.options_rev[option], + "point": self.point_id, + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/myuplink/sensor.py b/homeassistant/components/myuplink/sensor.py index e7c8054e304..ef827fc1fb1 100644 --- a/homeassistant/components/myuplink/sensor.py +++ b/homeassistant/components/myuplink/sensor.py @@ -25,8 +25,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity +from .helpers import find_matching_platform, skip_entity, transform_model_series DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = { "°C": SensorEntityDescription( @@ -139,6 +140,32 @@ DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = { MARKER_FOR_UNKNOWN_VALUE = -32768 CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SensorEntityDescription]] = { + F_SERIES: { + "43108": SensorEntityDescription( + key="fan_mode", + translation_key="fan_mode", + ), + "43427": SensorEntityDescription( + key="status_compressor", + translation_key="status_compressor", + device_class=SensorDeviceClass.ENUM, + ), + "49993": SensorEntityDescription( + key="elect_add", + translation_key="elect_add", + device_class=SensorDeviceClass.ENUM, + ), + "49994": SensorEntityDescription( + key="priority", + translation_key="priority", + device_class=SensorDeviceClass.ENUM, + ), + "50095": SensorEntityDescription( + key="status", + translation_key="status", + device_class=SensorDeviceClass.ENUM, + ), + }, "NIBEF": { "43108": SensorEntityDescription( key="fan_mode", @@ -174,6 +201,7 @@ def get_description(device_point: DevicePoint) -> SensorEntityDescription | None """ description = None prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get( device_point.parameter_id ) diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index 30cfefe5e18..939aa2f17c8 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -1,6 +1,6 @@ { "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) to give Home Assistant access to your myUplink account. You also need to create application credentials linked to your account:\n1. Go to [Applications at myUplink developer site]({create_creds_url}) and get credentials from an existing application or click **Create New Application**.\n1. Set appropriate Application name and Description\n2. Enter `{callback_url}` as Callback Url\n\n" + "description": "Follow the [instructions]({more_info_url}) to give Home Assistant access to your myUplink account. You also need to create application credentials linked to your account:\n1. Go to [Applications at myUplink developer site]({create_creds_url}) and get credentials from an existing application or select **Create New Application**.\n1. Set appropriate Application name and Description\n1. Enter `{callback_url}` as Callback URL" }, "config": { "step": { @@ -23,6 +23,8 @@ "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "account_mismatch": "The used account does not match the original account", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" }, "create_entry": { @@ -34,6 +36,31 @@ "alarm": { "name": "Alarm" } + }, + "sensor": { + "status": { + "name": "Status" + } + } + }, + "exceptions": { + "config_entry_auth_failed": { + "message": "Error while logging in to the API. Please check your credentials." + }, + "config_entry_not_ready": { + "message": "Error while loading the integration." + }, + "incorrect_oauth2_scope": { + "message": "Stored permissions are invalid. Please login again to update permissions." + }, + "set_number_error": { + "message": "Failed to set new value {value} for {point}/{entity}." + }, + "set_select_error": { + "message": "Failed to set new option {option} for {point}/{entity}." + }, + "set_switch_error": { + "message": "Failed to set state for {entity}." } } } diff --git a/homeassistant/components/myuplink/switch.py b/homeassistant/components/myuplink/switch.py index 1589701fcbc..3addc7ce6a9 100644 --- a/homeassistant/components/myuplink/switch.py +++ b/homeassistant/components/myuplink/switch.py @@ -12,10 +12,21 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import DOMAIN, F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity +from .helpers import find_matching_platform, skip_entity, transform_model_series CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SwitchEntityDescription]] = { + F_SERIES: { + "50004": SwitchEntityDescription( + key="temporary_lux", + translation_key="temporary_lux", + ), + "50005": SwitchEntityDescription( + key="boost_ventilation", + translation_key="boost_ventilation", + ), + }, "NIBEF": { "50004": SwitchEntityDescription( key="temporary_lux", @@ -37,6 +48,7 @@ def get_description(device_point: DevicePoint) -> SwitchEntityDescription | None 2. Default to None """ prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id) @@ -117,7 +129,11 @@ class MyUplinkDevicePointSwitch(MyUplinkEntity, SwitchEntity): ) except aiohttp.ClientError as err: raise HomeAssistantError( - f"Failed to set state for {self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_switch_error", + translation_placeholders={ + "entity": self.entity_id, + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/nad/manifest.json b/homeassistant/components/nad/manifest.json index 2e2d44341af..64c7855af2d 100644 --- a/homeassistant/components/nad/manifest.json +++ b/homeassistant/components/nad/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nad", "iot_class": "local_polling", "loggers": ["nad_receiver"], + "quality_scale": "legacy", "requirements": ["nad-receiver==0.3.0"] } diff --git a/homeassistant/components/nam/config_flow.py b/homeassistant/components/nam/config_flow.py index d3fec1ddbc2..494ce9fdac0 100644 --- a/homeassistant/components/nam/config_flow.py +++ b/homeassistant/components/nam/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping from dataclasses import dataclass import logging -from typing import TYPE_CHECKING, Any +from typing import Any from aiohttp.client_exceptions import ClientConnectorError from nettigo_air_monitor import ( @@ -18,7 +18,7 @@ from nettigo_air_monitor import ( import voluptuous as vol from homeassistant.components import zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -72,11 +72,8 @@ class NAMFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize flow.""" - self.host: str - self.entry: ConfigEntry - self._config: NamConfig + _config: NamConfig + host: str async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -189,8 +186,6 @@ class NAMFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - if entry := self.hass.config_entries.async_get_entry(self.context["entry_id"]): - self.entry = entry self.host = entry_data[CONF_HOST] self.context["title_placeholders"] = {"host": self.host} return await self.async_step_reauth_confirm() @@ -212,11 +207,9 @@ class NAMFlowHandler(ConfigFlow, domain=DOMAIN): ): return self.async_abort(reason="reauth_unsuccessful") - self.hass.config_entries.async_update_entry( - self.entry, data={**user_input, CONF_HOST: self.host} + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={**user_input, CONF_HOST: self.host} ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", @@ -226,24 +219,12 @@ class NAMFlowHandler(ConfigFlow, domain=DOMAIN): ) async def async_step_reconfigure( - self, _: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - - if TYPE_CHECKING: - assert entry is not None - - self.host = entry.data[CONF_HOST] - self.entry = entry - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors = {} + reconfigure_entry = self._get_reconfigure_entry() + self.host = reconfigure_entry.data[CONF_HOST] if user_input is not None: try: @@ -251,21 +232,20 @@ class NAMFlowHandler(ConfigFlow, domain=DOMAIN): except (ApiError, ClientConnectorError, TimeoutError): errors["base"] = "cannot_connect" else: - if format_mac(config.mac_address) != self.entry.unique_id: - return self.async_abort(reason="another_device") + await self.async_set_unique_id(format_mac(config.mac_address)) + self._abort_if_unique_id_mismatch(reason="another_device") - data = {**self.entry.data, CONF_HOST: user_input[CONF_HOST]} - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates={CONF_HOST: user_input[CONF_HOST]} + ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=self.host): str, } ), - description_placeholders={"device_name": self.entry.title}, + description_placeholders={"device_name": reconfigure_entry.title}, errors=errors, ) diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index 7b37d1f7ede..c3a559de50b 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -7,8 +7,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], - "quality_scale": "platinum", - "requirements": ["nettigo-air-monitor==3.3.0"], + "requirements": ["nettigo-air-monitor==4.0.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/nam/strings.json b/homeassistant/components/nam/strings.json index c4921ec52f9..2caa4d8bd97 100644 --- a/homeassistant/components/nam/strings.json +++ b/homeassistant/components/nam/strings.json @@ -28,7 +28,7 @@ "confirm_discovery": { "description": "Do you want to set up Nettigo Air Monitor at {host}?" }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update configuration for {device_name}.", "data": { "host": "[%key:common::config_flow::data::host%]" diff --git a/homeassistant/components/namecheapdns/manifest.json b/homeassistant/components/namecheapdns/manifest.json index fc9aa3cc033..f97f6568192 100644 --- a/homeassistant/components/namecheapdns/manifest.json +++ b/homeassistant/components/namecheapdns/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/namecheapdns", "iot_class": "cloud_push", + "quality_scale": "legacy", "requirements": ["defusedxml==0.7.1"] } diff --git a/homeassistant/components/nanoleaf/config_flow.py b/homeassistant/components/nanoleaf/config_flow.py index 080b8131b1d..27ef9a887fe 100644 --- a/homeassistant/components/nanoleaf/config_flow.py +++ b/homeassistant/components/nanoleaf/config_flow.py @@ -11,7 +11,7 @@ from aionanoleaf import InvalidToken, Nanoleaf, Unauthorized, Unavailable import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.json import save_json @@ -34,8 +34,6 @@ USER_SCHEMA: Final = vol.Schema( class NanoleafConfigFlow(ConfigFlow, domain=DOMAIN): """Nanoleaf config flow.""" - reauth_entry: ConfigEntry | None = None - nanoleaf: Nanoleaf # For discovery integration import @@ -81,14 +79,10 @@ class NanoleafConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle Nanoleaf reauth flow if token is invalid.""" - self.reauth_entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) self.nanoleaf = Nanoleaf( async_get_clientsession(self.hass), entry_data[CONF_HOST] ) - self.context["title_placeholders"] = {"name": self.reauth_entry.title} + self.context["title_placeholders"] = {"name": self._get_reauth_entry().title} return await self.async_step_link() async def async_step_zeroconf( @@ -177,16 +171,11 @@ class NanoleafConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unknown error authorizing Nanoleaf") return self.async_show_form(step_id="link", errors={"base": "unknown"}) - if self.reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, - CONF_TOKEN: self.nanoleaf.auth_token, - }, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_TOKEN: self.nanoleaf.auth_token}, ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return await self.async_setup_finish() @@ -215,7 +204,7 @@ class NanoleafConfigFlow(ConfigFlow, domain=DOMAIN): self.discovery_conf.pop(self.nanoleaf.host) if self.device_id in self.discovery_conf: self.discovery_conf.pop(self.device_id) - _LOGGER.info( + _LOGGER.debug( "Successfully imported Nanoleaf %s from the discovery integration", name, ) diff --git a/homeassistant/components/nanoleaf/device_trigger.py b/homeassistant/components/nanoleaf/device_trigger.py index b4049f2199d..28b39e03db7 100644 --- a/homeassistant/components/nanoleaf/device_trigger.py +++ b/homeassistant/components/nanoleaf/device_trigger.py @@ -4,8 +4,10 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import DeviceNotFound +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, + DeviceNotFound, +) from homeassistant.components.homeassistant.triggers import event as event_trigger from homeassistant.const import ( CONF_DEVICE_ID, diff --git a/homeassistant/components/nanoleaf/light.py b/homeassistant/components/nanoleaf/light.py index 19d817b9999..681053fa573 100644 --- a/homeassistant/components/nanoleaf/light.py +++ b/homeassistant/components/nanoleaf/light.py @@ -2,12 +2,11 @@ from __future__ import annotations -import math from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -17,10 +16,6 @@ from homeassistant.components.light import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) from . import NanoleafConfigEntry from .coordinator import NanoleafCoordinator @@ -51,10 +46,8 @@ class NanoleafLight(NanoleafEntity, LightEntity): """Initialize the Nanoleaf light.""" super().__init__(coordinator) self._attr_unique_id = self._nanoleaf.serial_no - self._attr_min_mireds = math.ceil( - 1000000 / self._nanoleaf.color_temperature_max - ) - self._attr_max_mireds = kelvin_to_mired(self._nanoleaf.color_temperature_min) + self._attr_max_color_temp_kelvin = self._nanoleaf.color_temperature_max + self._attr_min_color_temp_kelvin = self._nanoleaf.color_temperature_min @property def brightness(self) -> int: @@ -62,9 +55,9 @@ class NanoleafLight(NanoleafEntity, LightEntity): return int(self._nanoleaf.brightness * 2.55) @property - def color_temp(self) -> int: - """Return the current color temperature.""" - return kelvin_to_mired(self._nanoleaf.color_temperature) + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return self._nanoleaf.color_temperature @property def effect(self) -> str | None: @@ -106,7 +99,7 @@ class NanoleafLight(NanoleafEntity, LightEntity): """Instruct the light to turn on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) hs_color = kwargs.get(ATTR_HS_COLOR) - color_temp_mired = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) effect = kwargs.get(ATTR_EFFECT) transition = kwargs.get(ATTR_TRANSITION) @@ -120,10 +113,8 @@ class NanoleafLight(NanoleafEntity, LightEntity): hue, saturation = hs_color await self._nanoleaf.set_hue(int(hue)) await self._nanoleaf.set_saturation(int(saturation)) - elif color_temp_mired: - await self._nanoleaf.set_color_temperature( - mired_to_kelvin(color_temp_mired) - ) + elif color_temp_kelvin: + await self._nanoleaf.set_color_temperature(color_temp_kelvin) if transition: if brightness: # tune to the required brightness in n seconds await self._nanoleaf.set_brightness( diff --git a/homeassistant/components/nanoleaf/strings.json b/homeassistant/components/nanoleaf/strings.json index ef7df8c0ab5..ecc511d658f 100644 --- a/homeassistant/components/nanoleaf/strings.json +++ b/homeassistant/components/nanoleaf/strings.json @@ -12,7 +12,7 @@ }, "link": { "title": "Link Nanoleaf", - "description": "Press and hold the power button on your Nanoleaf for 5 seconds until the button LEDs start flashing, then click **SUBMIT** within 30 seconds." + "description": "Press and hold the power button on your Nanoleaf for 5 seconds until the button LEDs start flashing, then select **Submit** within 30 seconds." } }, "error": { diff --git a/homeassistant/components/nasweb/__init__.py b/homeassistant/components/nasweb/__init__.py new file mode 100644 index 00000000000..1992cc41c75 --- /dev/null +++ b/homeassistant/components/nasweb/__init__.py @@ -0,0 +1,125 @@ +"""The NASweb integration.""" + +from __future__ import annotations + +import logging + +from webio_api import WebioAPI +from webio_api.api_client import AuthError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.network import NoURLAvailableError +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL +from .coordinator import NASwebCoordinator +from .nasweb_data import NASwebData + +PLATFORMS: list[Platform] = [Platform.SWITCH] + +NASWEB_CONFIG_URL = "https://{host}/page" + +_LOGGER = logging.getLogger(__name__) +type NASwebConfigEntry = ConfigEntry[NASwebCoordinator] +DATA_NASWEB: HassKey[NASwebData] = HassKey(DOMAIN) + + +async def async_setup_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: + """Set up NASweb from a config entry.""" + + if DATA_NASWEB not in hass.data: + data = NASwebData() + data.initialize(hass) + hass.data[DATA_NASWEB] = data + nasweb_data = hass.data[DATA_NASWEB] + + webio_api = WebioAPI( + entry.data[CONF_HOST], entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD] + ) + try: + if not await webio_api.check_connection(): + raise ConfigEntryNotReady( + f"[{entry.data[CONF_HOST]}] Check connection failed" + ) + if not await webio_api.refresh_device_info(): + _LOGGER.error("[%s] Refresh device info failed", entry.data[CONF_HOST]) + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + webio_serial = webio_api.get_serial_number() + if webio_serial is None: + _LOGGER.error("[%s] Serial number not available", entry.data[CONF_HOST]) + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + if entry.unique_id != webio_serial: + _LOGGER.error( + "[%s] Serial number doesn't match config entry", entry.data[CONF_HOST] + ) + raise ConfigEntryError(translation_key="config_entry_error_serial_mismatch") + + coordinator = NASwebCoordinator( + hass, webio_api, name=f"NASweb[{webio_api.get_name()}]" + ) + entry.runtime_data = coordinator + nasweb_data.notify_coordinator.add_coordinator(webio_serial, entry.runtime_data) + + webhook_url = nasweb_data.get_webhook_url(hass) + if not await webio_api.status_subscription(webhook_url, True): + _LOGGER.error("Failed to subscribe for status updates from webio") + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + if not await nasweb_data.notify_coordinator.check_connection(webio_serial): + _LOGGER.error("Did not receive status from device") + raise ConfigEntryError( + translation_key="config_entry_error_no_status_update", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + except TimeoutError as error: + raise ConfigEntryNotReady( + f"[{entry.data[CONF_HOST]}] Check connection reached timeout" + ) from error + except AuthError as error: + raise ConfigEntryError( + translation_key="config_entry_error_invalid_authentication" + ) from error + except NoURLAvailableError as error: + raise ConfigEntryError( + translation_key="config_entry_error_missing_internal_url" + ) from error + + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, webio_serial)}, + manufacturer=MANUFACTURER, + name=webio_api.get_name(), + configuration_url=NASWEB_CONFIG_URL.format(host=entry.data[CONF_HOST]), + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + nasweb_data = hass.data[DATA_NASWEB] + coordinator = entry.runtime_data + serial = entry.unique_id + if serial is not None: + nasweb_data.notify_coordinator.remove_coordinator(serial) + if nasweb_data.can_be_deinitialized(): + nasweb_data.deinitialize(hass) + hass.data.pop(DATA_NASWEB) + webhook_url = nasweb_data.get_webhook_url(hass) + await coordinator.webio_api.status_subscription(webhook_url, False) + + return unload_ok diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py new file mode 100644 index 00000000000..3a9ad3f7d49 --- /dev/null +++ b/homeassistant/components/nasweb/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for NASweb integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import voluptuous as vol +from webio_api import WebioAPI +from webio_api.api_client import AuthError + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_UNIQUE_ID, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import AbortFlow +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.network import NoURLAvailableError + +from .const import DOMAIN +from .coordinator import NASwebCoordinator +from .nasweb_data import NASwebData + +NASWEB_SCHEMA_IMG_URL = ( + "https://home-assistant.io/images/integrations/nasweb/nasweb_scheme.png" +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: + """Validate user-provided data.""" + webio_api = WebioAPI(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD]) + if not await webio_api.check_connection(): + raise CannotConnect + try: + await webio_api.refresh_device_info() + except AuthError as e: + raise InvalidAuth from e + + nasweb_data = NASwebData() + nasweb_data.initialize(hass) + try: + webio_serial = webio_api.get_serial_number() + if webio_serial is None: + raise MissingNASwebData("Device serial number is not available") + + coordinator = NASwebCoordinator(hass, webio_api) + webhook_url = nasweb_data.get_webhook_url(hass) + nasweb_data.notify_coordinator.add_coordinator(webio_serial, coordinator) + subscription = await webio_api.status_subscription(webhook_url, True) + if not subscription: + nasweb_data.notify_coordinator.remove_coordinator(webio_serial) + raise MissingNASwebData( + "Failed to subscribe for status updates from device" + ) + + result = await nasweb_data.notify_coordinator.check_connection(webio_serial) + nasweb_data.notify_coordinator.remove_coordinator(webio_serial) + if not result: + if subscription: + await webio_api.status_subscription(webhook_url, False) + raise MissingNASwebStatus("Did not receive status from device") + + name = webio_api.get_name() + finally: + nasweb_data.deinitialize(hass) + return {"title": name, CONF_UNIQUE_ID: webio_serial} + + +class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for NASweb.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + info = await validate_input(self.hass, user_input) + await self.async_set_unique_id(info[CONF_UNIQUE_ID]) + self._abort_if_unique_id_configured() + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + except NoURLAvailableError: + errors["base"] = "missing_internal_url" + except MissingNASwebData: + errors["base"] = "missing_nasweb_data" + except MissingNASwebStatus: + errors["base"] = "missing_status" + except AbortFlow: + raise + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry(title=info["title"], data=user_input) + + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + description_placeholders={ + "nasweb_schema_img": '
', + }, + ) + + +class CannotConnect(HomeAssistantError): + """Error to indicate we cannot connect.""" + + +class InvalidAuth(HomeAssistantError): + """Error to indicate there is invalid auth.""" + + +class MissingNASwebData(HomeAssistantError): + """Error to indicate missing information from NASweb.""" + + +class MissingNASwebStatus(HomeAssistantError): + """Error to indicate there was no status received from NASweb.""" diff --git a/homeassistant/components/nasweb/const.py b/homeassistant/components/nasweb/const.py new file mode 100644 index 00000000000..ec750c90c8c --- /dev/null +++ b/homeassistant/components/nasweb/const.py @@ -0,0 +1,7 @@ +"""Constants for the NASweb integration.""" + +DOMAIN = "nasweb" +MANUFACTURER = "chomtech.pl" +STATUS_UPDATE_MAX_TIME_INTERVAL = 60 +SUPPORT_EMAIL = "support@chomtech.eu" +WEBHOOK_URL = "{internal_url}/api/webhook/{webhook_id}" diff --git a/homeassistant/components/nasweb/coordinator.py b/homeassistant/components/nasweb/coordinator.py new file mode 100644 index 00000000000..90dca0f3022 --- /dev/null +++ b/homeassistant/components/nasweb/coordinator.py @@ -0,0 +1,191 @@ +"""Message routing coordinators for handling NASweb push notifications.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from datetime import datetime, timedelta +import logging +import time +from typing import Any + +from aiohttp.web import Request, Response +from webio_api import WebioAPI +from webio_api.const import KEY_DEVICE_SERIAL, KEY_OUTPUTS, KEY_TYPE, TYPE_STATUS_UPDATE + +from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback +from homeassistant.helpers import event +from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol + +from .const import STATUS_UPDATE_MAX_TIME_INTERVAL + +_LOGGER = logging.getLogger(__name__) + + +class NotificationCoordinator: + """Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator.""" + + def __init__(self) -> None: + """Initialize coordinator.""" + self._coordinators: dict[str, NASwebCoordinator] = {} + + def add_coordinator(self, serial: str, coordinator: NASwebCoordinator) -> None: + """Add NASwebCoordinator to possible notification targets.""" + self._coordinators[serial] = coordinator + _LOGGER.debug("Added NASwebCoordinator for NASweb[%s]", serial) + + def remove_coordinator(self, serial: str) -> None: + """Remove NASwebCoordinator from possible notification targets.""" + self._coordinators.pop(serial) + _LOGGER.debug("Removed NASwebCoordinator for NASweb[%s]", serial) + + def has_coordinators(self) -> bool: + """Check if there is any registered coordinator for push notifications.""" + return len(self._coordinators) > 0 + + async def check_connection(self, serial: str) -> bool: + """Wait for first status update to confirm connection with NASweb.""" + nasweb_coordinator = self._coordinators.get(serial) + if nasweb_coordinator is None: + _LOGGER.error("Cannot check connection. No device match serial number") + return False + for counter in range(10): + _LOGGER.debug("Checking connection with: %s (%s)", serial, counter) + if nasweb_coordinator.is_connection_confirmed(): + return True + await asyncio.sleep(1) + return False + + async def handle_webhook_request( + self, hass: HomeAssistant, webhook_id: str, request: Request + ) -> Response | None: + """Handle webhook request from Push API.""" + if not self.has_coordinators(): + return None + notification = await request.json() + serial = notification.get(KEY_DEVICE_SERIAL, None) + _LOGGER.debug("Received push: %s", notification) + if serial is None: + _LOGGER.warning("Received notification without nasweb identifier") + return None + nasweb_coordinator = self._coordinators.get(serial) + if nasweb_coordinator is None: + _LOGGER.warning("Received notification for not registered nasweb") + return None + await nasweb_coordinator.handle_push_notification(notification) + return Response(body='{"response": "ok"}', content_type="application/json") + + +class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol): + """Coordinator managing status of single NASweb device. + + Since status updates are managed through push notifications, this class schedules + periodic checks to ensure that devices are marked unavailable if updates + haven't been received for a prolonged period. + """ + + def __init__( + self, hass: HomeAssistant, webio_api: WebioAPI, name: str = "NASweb[default]" + ) -> None: + """Initialize NASweb coordinator.""" + self._hass = hass + self.name = name + self.webio_api = webio_api + self._last_update: float | None = None + job_name = f"NASwebCoordinator[{name}]" + self._job = HassJob(self._handle_max_update_interval, job_name) + self._unsub_last_update_check: CALLBACK_TYPE | None = None + self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} + data: dict[str, Any] = {} + data[KEY_OUTPUTS] = self.webio_api.outputs + self.async_set_updated_data(data) + + def is_connection_confirmed(self) -> bool: + """Check whether coordinator received status update from NASweb.""" + return self._last_update is not None + + @callback + def async_add_listener( + self, update_callback: CALLBACK_TYPE, context: Any = None + ) -> Callable[[], None]: + """Listen for data updates.""" + schedule_update_check = not self._listeners + + @callback + def remove_listener() -> None: + """Remove update listener.""" + self._listeners.pop(remove_listener) + if not self._listeners: + self._async_unsub_last_update_check() + + self._listeners[remove_listener] = (update_callback, context) + # This is the first listener, set up interval. + if schedule_update_check: + self._schedule_last_update_check() + return remove_listener + + @callback + def async_set_updated_data(self, data: dict[str, Any]) -> None: + """Update data and notify listeners.""" + self.data = data + self.last_update = self._hass.loop.time() + _LOGGER.debug("Updated %s data", self.name) + if self._listeners: + self._schedule_last_update_check() + self.async_update_listeners() + + @callback + def async_update_listeners(self) -> None: + """Update all registered listeners.""" + for update_callback, _ in list(self._listeners.values()): + update_callback() + + async def _handle_max_update_interval(self, now: datetime) -> None: + """Handle max update interval occurrence. + + This method is called when `STATUS_UPDATE_MAX_TIME_INTERVAL` has passed without + receiving a status update. It only needs to trigger state update of entities + which then change their state accordingly. + """ + self._unsub_last_update_check = None + if self._listeners: + self.async_update_listeners() + + def _schedule_last_update_check(self) -> None: + """Schedule a task to trigger entities state update after `STATUS_UPDATE_MAX_TIME_INTERVAL`. + + This method schedules a task (`_handle_max_update_interval`) to be executed after + `STATUS_UPDATE_MAX_TIME_INTERVAL` seconds without status update, which enables entities + to change their state to unavailable. After each status update this task is rescheduled. + """ + self._async_unsub_last_update_check() + now = self._hass.loop.time() + next_check = ( + now + timedelta(seconds=STATUS_UPDATE_MAX_TIME_INTERVAL).total_seconds() + ) + self._unsub_last_update_check = event.async_call_at( + self._hass, + self._job, + next_check, + ) + + def _async_unsub_last_update_check(self) -> None: + """Cancel any scheduled update check call.""" + if self._unsub_last_update_check: + self._unsub_last_update_check() + self._unsub_last_update_check = None + + async def handle_push_notification(self, notification: dict) -> None: + """Handle incoming push notification from NASweb.""" + msg_type = notification.get(KEY_TYPE) + _LOGGER.debug("Received push notification: %s", msg_type) + + if msg_type == TYPE_STATUS_UPDATE: + await self.process_status_update(notification) + self._last_update = time.time() + + async def process_status_update(self, new_status: dict) -> None: + """Process status update from NASweb.""" + self.webio_api.update_device_status(new_status) + new_data = {KEY_OUTPUTS: self.webio_api.outputs} + self.async_set_updated_data(new_data) diff --git a/homeassistant/components/nasweb/manifest.json b/homeassistant/components/nasweb/manifest.json new file mode 100644 index 00000000000..8a4ecdbee84 --- /dev/null +++ b/homeassistant/components/nasweb/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "nasweb", + "name": "NASweb", + "codeowners": ["@nasWebio"], + "config_flow": true, + "dependencies": ["webhook"], + "documentation": "https://www.home-assistant.io/integrations/nasweb", + "integration_type": "hub", + "iot_class": "local_push", + "requirements": ["webio-api==0.1.11"] +} diff --git a/homeassistant/components/nasweb/nasweb_data.py b/homeassistant/components/nasweb/nasweb_data.py new file mode 100644 index 00000000000..4f6a37e6cc7 --- /dev/null +++ b/homeassistant/components/nasweb/nasweb_data.py @@ -0,0 +1,64 @@ +"""Dataclass storing integration data in hass.data[DOMAIN].""" + +from dataclasses import dataclass, field +import logging + +from aiohttp.hdrs import METH_POST + +from homeassistant.components.webhook import ( + async_generate_id, + async_register as webhook_register, + async_unregister as webhook_unregister, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.network import get_url + +from .const import DOMAIN, WEBHOOK_URL +from .coordinator import NotificationCoordinator + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class NASwebData: + """Class storing integration data.""" + + notify_coordinator: NotificationCoordinator = field( + default_factory=NotificationCoordinator + ) + webhook_id = "" + + def is_initialized(self) -> bool: + """Return True if instance was initialized and is ready for use.""" + return bool(self.webhook_id) + + def can_be_deinitialized(self) -> bool: + """Return whether this instance can be deinitialized.""" + return not self.notify_coordinator.has_coordinators() + + def initialize(self, hass: HomeAssistant) -> None: + """Initialize NASwebData instance.""" + if self.is_initialized(): + return + new_webhook_id = async_generate_id() + webhook_register( + hass, + DOMAIN, + "NASweb", + new_webhook_id, + self.notify_coordinator.handle_webhook_request, + allowed_methods=[METH_POST], + ) + self.webhook_id = new_webhook_id + _LOGGER.debug("Registered webhook: %s", self.webhook_id) + + def deinitialize(self, hass: HomeAssistant) -> None: + """Deinitialize NASwebData instance.""" + if not self.is_initialized(): + return + webhook_unregister(hass, self.webhook_id) + + def get_webhook_url(self, hass: HomeAssistant) -> str: + """Return webhook url for Push API.""" + hass_url = get_url(hass, allow_external=False) + return WEBHOOK_URL.format(internal_url=hass_url, webhook_id=self.webhook_id) diff --git a/homeassistant/components/nasweb/strings.json b/homeassistant/components/nasweb/strings.json new file mode 100644 index 00000000000..b8af8cd54db --- /dev/null +++ b/homeassistant/components/nasweb/strings.json @@ -0,0 +1,50 @@ +{ + "config": { + "step": { + "user": { + "title": "Add NASweb device", + "description": "{nasweb_schema_img}NASweb combines the functions of a control panel and the ability to manage building automation. The device monitors the flow of information from sensors and programmable switches and stores settings, definitions and configured actions.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "missing_internal_url": "Make sure Home Assistant has valid internal url", + "missing_nasweb_data": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant.", + "missing_status": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device.", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "exceptions": { + "config_entry_error_invalid_authentication": { + "message": "Invalid username/password. Most likely user changed password or was removed. Delete this entry and create new one with correct username/password." + }, + "config_entry_error_internal_error": { + "message": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant. If the issue persists contact support at {support_email}" + }, + "config_entry_error_no_status_update": { + "message": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device. If the issue persists contact support at {support_email}" + }, + "config_entry_error_missing_internal_url": { + "message": "[%key:component::nasweb::config::error::missing_internal_url%]" + }, + "serial_mismatch": { + "message": "Connected to different NASweb device (serial number mismatch)." + } + }, + "entity": { + "switch": { + "switch_output": { + "name": "Relay Switch {index}" + } + } + } +} diff --git a/homeassistant/components/nasweb/switch.py b/homeassistant/components/nasweb/switch.py new file mode 100644 index 00000000000..00e5a21da18 --- /dev/null +++ b/homeassistant/components/nasweb/switch.py @@ -0,0 +1,133 @@ +"""Platform for NASweb output.""" + +from __future__ import annotations + +import logging +import time +from typing import Any + +from webio_api import Output as NASwebOutput + +from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH, SwitchEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.helpers.entity_registry as er +from homeassistant.helpers.typing import DiscoveryInfoType +from homeassistant.helpers.update_coordinator import ( + BaseCoordinatorEntity, + BaseDataUpdateCoordinatorProtocol, +) + +from . import NASwebConfigEntry +from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL +from .coordinator import NASwebCoordinator + +OUTPUT_TRANSLATION_KEY = "switch_output" + +_LOGGER = logging.getLogger(__name__) + + +def _get_output(coordinator: NASwebCoordinator, index: int) -> NASwebOutput | None: + for out in coordinator.webio_api.outputs: + if out.index == index: + return out + return None + + +async def async_setup_entry( + hass: HomeAssistant, + config: NASwebConfigEntry, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up switch platform.""" + coordinator = config.runtime_data + current_outputs: set[int] = set() + + @callback + def _check_entities() -> None: + received_outputs = {out.index for out in coordinator.webio_api.outputs} + added = {i for i in received_outputs if i not in current_outputs} + removed = {i for i in current_outputs if i not in received_outputs} + entities_to_add: list[RelaySwitch] = [] + for index in added: + webio_output = _get_output(coordinator, index) + if not isinstance(webio_output, NASwebOutput): + _LOGGER.error("Cannot create RelaySwitch entity without NASwebOutput") + continue + new_output = RelaySwitch(coordinator, webio_output) + entities_to_add.append(new_output) + current_outputs.add(index) + async_add_entities(entities_to_add) + entity_registry = er.async_get(hass) + for index in removed: + unique_id = f"{DOMAIN}.{config.unique_id}.relay_switch.{index}" + if entity_id := entity_registry.async_get_entity_id( + DOMAIN_SWITCH, DOMAIN, unique_id + ): + entity_registry.async_remove(entity_id) + current_outputs.remove(index) + else: + _LOGGER.warning("Failed to remove old output: no entity_id") + + coordinator.async_add_listener(_check_entities) + _check_entities() + + +class RelaySwitch(SwitchEntity, BaseCoordinatorEntity): + """Entity representing NASweb Output.""" + + def __init__( + self, + coordinator: BaseDataUpdateCoordinatorProtocol, + nasweb_output: NASwebOutput, + ) -> None: + """Initialize RelaySwitch.""" + super().__init__(coordinator) + self._output = nasweb_output + self._attr_icon = "mdi:export" + self._attr_has_entity_name = True + self._attr_translation_key = OUTPUT_TRANSLATION_KEY + self._attr_translation_placeholders = {"index": f"{nasweb_output.index:2d}"} + self._attr_unique_id = ( + f"{DOMAIN}.{self._output.webio_serial}.relay_switch.{self._output.index}" + ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._output.webio_serial)}, + ) + + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_is_on = self._output.state + if ( + self.coordinator.last_update is None + or time.time() - self._output.last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL + ): + self._attr_available = False + else: + self._attr_available = ( + self._output.available if self._output.available is not None else False + ) + self.async_write_ha_state() + + async def async_update(self) -> None: + """Update the entity. + + Only used by the generic entity update service. + Scheduling updates is not necessary, the coordinator takes care of updates via push notifications. + """ + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn On RelaySwitch.""" + await self._output.turn_on() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn Off RelaySwitch.""" + await self._output.turn_off() diff --git a/homeassistant/components/neato/icons.json b/homeassistant/components/neato/icons.json index ca50d5a9bc7..eb18a7e3196 100644 --- a/homeassistant/components/neato/icons.json +++ b/homeassistant/components/neato/icons.json @@ -1,5 +1,7 @@ { "services": { - "custom_cleaning": "mdi:broom" + "custom_cleaning": { + "service": "mdi:broom" + } } } diff --git a/homeassistant/components/neato/manifest.json b/homeassistant/components/neato/manifest.json index d6eff486b05..e4b471cb5ac 100644 --- a/homeassistant/components/neato/manifest.json +++ b/homeassistant/components/neato/manifest.json @@ -1,7 +1,7 @@ { "domain": "neato", "name": "Neato Botvac", - "codeowners": ["@Santobert"], + "codeowners": [], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/neato", diff --git a/homeassistant/components/neato/vacuum.py b/homeassistant/components/neato/vacuum.py index b750b121f58..1a9285964a2 100644 --- a/homeassistant/components/neato/vacuum.py +++ b/homeassistant/components/neato/vacuum.py @@ -12,15 +12,12 @@ import voluptuous as vol from homeassistant.components.vacuum import ( ATTR_STATUS, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_MODE, STATE_IDLE, STATE_PAUSED +from homeassistant.const import ATTR_MODE from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.device_registry import DeviceInfo @@ -169,23 +166,23 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): robot_alert = None if self._state["state"] == 1: if self._state["details"]["isCharging"]: - self._attr_state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._status_state = "Charging" elif ( self._state["details"]["isDocked"] and not self._state["details"]["isCharging"] ): - self._attr_state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._status_state = "Docked" else: - self._attr_state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE self._status_state = "Stopped" if robot_alert is not None: self._status_state = robot_alert elif self._state["state"] == 2: if robot_alert is None: - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING self._status_state = ( f"{MODE.get(self._state['cleaning']['mode'])} " f"{ACTION.get(self._state['action'])}" @@ -200,10 +197,10 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): else: self._status_state = robot_alert elif self._state["state"] == 3: - self._attr_state = STATE_PAUSED + self._attr_activity = VacuumActivity.PAUSED self._status_state = "Paused" elif self._state["state"] == 4: - self._attr_state = STATE_ERROR + self._attr_activity = VacuumActivity.ERROR self._status_state = ERRORS.get(self._state["error"]) self._attr_battery_level = self._state["details"]["charge"] @@ -326,9 +323,9 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): def return_to_base(self, **kwargs: Any) -> None: """Set the vacuum cleaner to return to the dock.""" try: - if self._attr_state == STATE_CLEANING: + if self._attr_activity == VacuumActivity.CLEANING: self.robot.pause_cleaning() - self._attr_state = STATE_RETURNING + self._attr_activity = VacuumActivity.RETURNING self.robot.send_to_base() except NeatoRobotException as ex: _LOGGER.error( @@ -376,9 +373,11 @@ class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity): "Zone '%s' was not found for the robot '%s'", zone, self.entity_id ) return - _LOGGER.info("Start cleaning zone '%s' with robot %s", zone, self.entity_id) + _LOGGER.debug( + "Start cleaning zone '%s' with robot %s", zone, self.entity_id + ) - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING try: self.robot.start_cleaning(mode, navigation, category, boundary_id) except NeatoRobotException as ex: diff --git a/homeassistant/components/nederlandse_spoorwegen/manifest.json b/homeassistant/components/nederlandse_spoorwegen/manifest.json index aa8d0f4adf4..0ef9d8d86f3 100644 --- a/homeassistant/components/nederlandse_spoorwegen/manifest.json +++ b/homeassistant/components/nederlandse_spoorwegen/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@YarmoM"], "documentation": "https://www.home-assistant.io/integrations/nederlandse_spoorwegen", "iot_class": "cloud_polling", - "requirements": ["nsapi==3.0.5"] + "quality_scale": "legacy", + "requirements": ["nsapi==3.1.2"] } diff --git a/homeassistant/components/ness_alarm/alarm_control_panel.py b/homeassistant/components/ness_alarm/alarm_control_panel.py index e44c06ecc85..64b764c6872 100644 --- a/homeassistant/components/ness_alarm/alarm_control_panel.py +++ b/homeassistant/components/ness_alarm/alarm_control_panel.py @@ -9,18 +9,9 @@ from nessclient import ArmingMode, ArmingState, Client from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -31,12 +22,12 @@ from . import DATA_NESS, SIGNAL_ARMING_STATE_CHANGED _LOGGER = logging.getLogger(__name__) ARMING_MODE_TO_STATE = { - ArmingMode.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - ArmingMode.ARMED_HOME: STATE_ALARM_ARMED_HOME, - ArmingMode.ARMED_DAY: STATE_ALARM_ARMED_AWAY, # no applicable state, fallback to away - ArmingMode.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - ArmingMode.ARMED_VACATION: STATE_ALARM_ARMED_VACATION, - ArmingMode.ARMED_HIGHEST: STATE_ALARM_ARMED_AWAY, # no applicable state, fallback to away + ArmingMode.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + ArmingMode.ARMED_HOME: AlarmControlPanelState.ARMED_HOME, + ArmingMode.ARMED_DAY: AlarmControlPanelState.ARMED_AWAY, # no applicable state, fallback to away + ArmingMode.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + ArmingMode.ARMED_VACATION: AlarmControlPanelState.ARMED_VACATION, + ArmingMode.ARMED_HIGHEST: AlarmControlPanelState.ARMED_AWAY, # no applicable state, fallback to away } @@ -101,19 +92,19 @@ class NessAlarmPanel(AlarmControlPanelEntity): """Handle arming state update.""" if arming_state == ArmingState.UNKNOWN: - self._attr_state = None + self._attr_alarm_state = None elif arming_state == ArmingState.DISARMED: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif arming_state in (ArmingState.ARMING, ArmingState.EXIT_DELAY): - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING elif arming_state == ArmingState.ARMED: - self._attr_state = ARMING_MODE_TO_STATE.get( - arming_mode, STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = ARMING_MODE_TO_STATE.get( + arming_mode, AlarmControlPanelState.ARMED_AWAY ) elif arming_state == ArmingState.ENTRY_DELAY: - self._attr_state = STATE_ALARM_PENDING + self._attr_alarm_state = AlarmControlPanelState.PENDING elif arming_state == ArmingState.TRIGGERED: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED else: _LOGGER.warning("Unhandled arming state: %s", arming_state) diff --git a/homeassistant/components/ness_alarm/icons.json b/homeassistant/components/ness_alarm/icons.json index ea17fd2b299..29d8ae1c8f5 100644 --- a/homeassistant/components/ness_alarm/icons.json +++ b/homeassistant/components/ness_alarm/icons.json @@ -1,6 +1,10 @@ { "services": { - "aux": "mdi:audio-input-stereo-minijack", - "panic": "mdi:fire" + "aux": { + "service": "mdi:audio-input-stereo-minijack" + }, + "panic": { + "service": "mdi:fire" + } } } diff --git a/homeassistant/components/ness_alarm/manifest.json b/homeassistant/components/ness_alarm/manifest.json index e4c5b5fb344..3d97e3290e0 100644 --- a/homeassistant/components/ness_alarm/manifest.json +++ b/homeassistant/components/ness_alarm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ness_alarm", "iot_class": "local_push", "loggers": ["nessclient"], - "requirements": ["nessclient==1.0.0"] + "quality_scale": "legacy", + "requirements": ["nessclient==1.1.2"] } diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index da72fdfd53b..0bd2891914f 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -49,7 +49,6 @@ from homeassistant.helpers import ( config_validation as cv, device_registry as dr, entity_registry as er, - issue_registry as ir, ) from homeassistant.helpers.entity_registry import async_entries_for_device from homeassistant.helpers.typing import ConfigType @@ -59,9 +58,8 @@ from .const import ( CONF_PROJECT_ID, CONF_SUBSCRIBER_ID, CONF_SUBSCRIBER_ID_IMPORTED, - DATA_DEVICE_MANAGER, + CONF_SUBSCRIPTION_NAME, DATA_SDM, - DATA_SUBSCRIBER, DOMAIN, ) from .events import EVENT_NAME_MAP, NEST_EVENT @@ -72,6 +70,7 @@ from .media_source import ( async_get_media_source_devices, async_get_transcoder, ) +from .types import NestConfigEntry, NestData _LOGGER = logging.getLogger(__name__) @@ -103,35 +102,18 @@ CONFIG_SCHEMA = vol.Schema( PLATFORMS = [Platform.CAMERA, Platform.CLIMATE, Platform.EVENT, Platform.SENSOR] # Fetch media events with a disk backed cache, with a limit for each camera -# device. The largest media items are mp4 clips at ~120kb each, and we target +# device. The largest media items are mp4 clips at ~450kb each, and we target # ~125MB of storage per camera to try to balance a reasonable user experience # for event history not not filling the disk. -EVENT_MEDIA_CACHE_SIZE = 1024 # number of events +EVENT_MEDIA_CACHE_SIZE = 256 # number of events THUMBNAIL_SIZE_PX = 175 async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Nest components with dispatch between old/new flows.""" - hass.data[DOMAIN] = {} - hass.http.register_view(NestEventMediaView(hass)) hass.http.register_view(NestEventMediaThumbnailView(hass)) - - if DOMAIN in config and CONF_PROJECT_ID not in config[DOMAIN]: - ir.async_create_issue( - hass, - DOMAIN, - "legacy_nest_deprecated", - breaks_in_ha_version="2023.8.0", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="legacy_nest_removed", - translation_placeholders={ - "documentation_url": "https://www.home-assistant.io/integrations/nest/", - }, - ) - return False return True @@ -142,12 +124,12 @@ class SignalUpdateCallback: self, hass: HomeAssistant, config_reload_cb: Callable[[], Awaitable[None]], - config_entry_id: str, + config_entry: NestConfigEntry, ) -> None: """Initialize EventCallback.""" self._hass = hass self._config_reload_cb = config_reload_cb - self._config_entry_id = config_entry_id + self._config_entry = config_entry async def async_handle_event(self, event_message: EventMessage) -> None: """Process an incoming EventMessage.""" @@ -166,41 +148,46 @@ class SignalUpdateCallback: ) if not device_entry: return + supported_traits = self._supported_traits(device_id) for api_event_type, image_event in events.items(): if not (event_type := EVENT_NAME_MAP.get(api_event_type)): continue nest_event_id = image_event.event_token - attachment = { - "image": EVENT_THUMBNAIL_URL_FORMAT.format( - device_id=device_entry.id, event_token=image_event.event_token - ), - } - if self._supports_clip(device_id): - attachment["video"] = EVENT_MEDIA_API_URL_FORMAT.format( - device_id=device_entry.id, event_token=image_event.event_token - ) message = { "device_id": device_entry.id, "type": event_type, "timestamp": event_message.timestamp, "nest_event_id": nest_event_id, - "attachment": attachment, } + if ( + TraitType.CAMERA_EVENT_IMAGE in supported_traits + or TraitType.CAMERA_CLIP_PREVIEW in supported_traits + ): + attachment = { + "image": EVENT_THUMBNAIL_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) + } + if TraitType.CAMERA_CLIP_PREVIEW in supported_traits: + attachment["video"] = EVENT_MEDIA_API_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) + message["attachment"] = attachment if image_event.zones: message["zones"] = image_event.zones self._hass.bus.async_fire(NEST_EVENT, message) - def _supports_clip(self, device_id: str) -> bool: - if not ( - device_manager := self._hass.data[DOMAIN] - .get(self._config_entry_id, {}) - .get(DATA_DEVICE_MANAGER) - ) or not (device := device_manager.devices.get(device_id)): - return False - return TraitType.CAMERA_CLIP_PREVIEW in device.traits + def _supported_traits(self, device_id: str) -> list[str]: + if ( + not self._config_entry.runtime_data + or not (device_manager := self._config_entry.runtime_data.device_manager) + or not (device := device_manager.devices.get(device_id)) + ): + return [] + return list(device.traits) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool: """Set up Nest from a config entry with dispatch between old/new flows.""" if DATA_SDM not in entry.data: hass.async_create_task(hass.config_entries.async_remove(entry.entry_id)) @@ -224,7 +211,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_config_reload() -> None: await hass.config_entries.async_reload(entry.entry_id) - update_callback = SignalUpdateCallback(hass, async_config_reload, entry.entry_id) + update_callback = SignalUpdateCallback(hass, async_config_reload, entry) subscriber.set_update_callback(update_callback.async_handle_event) try: await subscriber.start_async() @@ -254,11 +241,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) ) - - hass.data[DOMAIN][entry.entry_id] = { - DATA_SUBSCRIBER: subscriber, - DATA_DEVICE_MANAGER: device_manager, - } + entry.runtime_data = NestData( + subscriber=subscriber, + device_manager=device_manager, + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -271,20 +257,18 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Legacy API return True _LOGGER.debug("Stopping nest subscriber") - subscriber = hass.data[DOMAIN][entry.entry_id][DATA_SUBSCRIBER] + subscriber = entry.runtime_data.subscriber subscriber.stop_async() - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle removal of pubsub subscriptions created during config flow.""" if ( DATA_SDM not in entry.data - or CONF_SUBSCRIBER_ID not in entry.data + or not ( + CONF_SUBSCRIPTION_NAME in entry.data or CONF_SUBSCRIBER_ID in entry.data + ) or CONF_SUBSCRIBER_ID_IMPORTED in entry.data ): return diff --git a/homeassistant/components/nest/api.py b/homeassistant/components/nest/api.py index 3ef26747115..5c65a70c75d 100644 --- a/homeassistant/components/nest/api.py +++ b/homeassistant/components/nest/api.py @@ -8,6 +8,7 @@ from typing import cast from aiohttp import ClientSession from google.oauth2.credentials import Credentials +from google_nest_sdm.admin_client import PUBSUB_API_HOST, AdminClient from google_nest_sdm.auth import AbstractAuth from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber @@ -19,6 +20,7 @@ from .const import ( API_URL, CONF_PROJECT_ID, CONF_SUBSCRIBER_ID, + CONF_SUBSCRIPTION_NAME, OAUTH2_TOKEN, SDM_SCOPES, ) @@ -44,8 +46,7 @@ class AsyncConfigEntryAuth(AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token for SDM API.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) async def async_get_creds(self) -> Credentials: @@ -81,9 +82,10 @@ class AccessTokenAuthImpl(AbstractAuth): self, websession: ClientSession, access_token: str, + host: str, ) -> None: """Init the Nest client library auth implementation.""" - super().__init__(websession, API_URL) + super().__init__(websession, host) self._access_token = access_token async def async_get_access_token(self) -> str: @@ -112,29 +114,46 @@ async def new_subscriber( implementation, config_entry_oauth2_flow.LocalOAuth2Implementation ): raise TypeError(f"Unexpected auth implementation {implementation}") - if not (subscriber_id := entry.data.get(CONF_SUBSCRIBER_ID)): - raise ValueError("Configuration option 'subscriber_id' missing") + if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None: + subscription_name = entry.data[CONF_SUBSCRIBER_ID] auth = AsyncConfigEntryAuth( aiohttp_client.async_get_clientsession(hass), config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation), implementation.client_id, implementation.client_secret, ) - return GoogleNestSubscriber(auth, entry.data[CONF_PROJECT_ID], subscriber_id) + return GoogleNestSubscriber(auth, entry.data[CONF_PROJECT_ID], subscription_name) def new_subscriber_with_token( hass: HomeAssistant, access_token: str, project_id: str, - subscriber_id: str, + subscription_name: str, ) -> GoogleNestSubscriber: """Create a GoogleNestSubscriber with an access token.""" return GoogleNestSubscriber( AccessTokenAuthImpl( aiohttp_client.async_get_clientsession(hass), access_token, + API_URL, ), project_id, - subscriber_id, + subscription_name, + ) + + +def new_pubsub_admin_client( + hass: HomeAssistant, + access_token: str, + cloud_project_id: str, +) -> AdminClient: + """Create a Nest AdminClient with an access token.""" + return AdminClient( + auth=AccessTokenAuthImpl( + aiohttp_client.async_get_clientsession(hass), + access_token, + PUBSUB_API_HOST, + ), + cloud_project_id=cloud_project_id, ) diff --git a/homeassistant/components/nest/camera.py b/homeassistant/components/nest/camera.py index e87c9ccbbe7..df02f17444f 100644 --- a/homeassistant/components/nest/camera.py +++ b/homeassistant/components/nest/camera.py @@ -2,35 +2,40 @@ from __future__ import annotations +from abc import ABC import asyncio -from collections.abc import Callable +from collections.abc import Awaitable, Callable import datetime import functools import logging from pathlib import Path -from typing import cast from google_nest_sdm.camera_traits import ( - CameraImageTrait, CameraLiveStreamTrait, RtspStream, StreamingProtocol, + WebRtcStream, ) from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.exceptions import ApiException +from webrtc_models import RTCIceCandidateInit -from homeassistant.components.camera import Camera, CameraEntityFeature, StreamType +from homeassistant.components.camera import ( + Camera, + CameraEntityFeature, + WebRTCAnswer, + WebRTCClientConfiguration, + WebRTCSendMessage, +) from homeassistant.components.stream import CONF_EXTRA_PART_WAIT_TIME -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util.dt import utcnow -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo +from .types import NestConfigEntry _LOGGER = logging.getLogger(__name__) @@ -39,28 +44,98 @@ PLACEHOLDER = Path(__file__).parent / "placeholder.png" # Used to schedule an alarm to refresh the stream before expiration STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30) +# Refresh streams with a bounded interval and backoff on failure +MIN_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=1) +MAX_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=10) +BACKOFF_MULTIPLIER = 1.5 + async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the cameras.""" - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] - async_add_entities( - NestCamera(device) - for device in device_manager.devices.values() - if CameraImageTrait.NAME in device.traits - or CameraLiveStreamTrait.NAME in device.traits - ) + entities: list[NestCameraBaseEntity] = [] + for device in entry.runtime_data.device_manager.devices.values(): + if (live_stream := device.traits.get(CameraLiveStreamTrait.NAME)) is None: + continue + if StreamingProtocol.WEB_RTC in live_stream.supported_protocols: + entities.append(NestWebRTCEntity(device)) + elif StreamingProtocol.RTSP in live_stream.supported_protocols: + entities.append(NestRTSPEntity(device)) + + async_add_entities(entities) -class NestCamera(Camera): +class StreamRefresh: + """Class that will refresh an expiring stream. + + This class will schedule an alarm for the next expiration time of a stream. + When the alarm fires, it runs the provided `refresh_cb` to extend the + lifetime of the stream and return a new expiration time. + + A simple backoff will be applied when the refresh callback fails. + """ + + def __init__( + self, + hass: HomeAssistant, + expires_at: datetime.datetime, + refresh_cb: Callable[[], Awaitable[datetime.datetime | None]], + ) -> None: + """Initialize StreamRefresh.""" + self._hass = hass + self._unsub: Callable[[], None] | None = None + self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL + self._refresh_cb = refresh_cb + self._schedule_stream_refresh(expires_at - STREAM_EXPIRATION_BUFFER) + + def unsub(self) -> None: + """Invalidates the stream.""" + if self._unsub: + self._unsub() + + async def _handle_refresh(self, _: datetime.datetime) -> None: + """Alarm that fires to check if the stream should be refreshed.""" + self._unsub = None + try: + expires_at = await self._refresh_cb() + except ApiException as err: + _LOGGER.debug("Failed to refresh stream: %s", err) + # Increase backoff until the max backoff interval is reached + self._min_refresh_interval = min( + self._min_refresh_interval * BACKOFF_MULTIPLIER, + MAX_REFRESH_BACKOFF_INTERVAL, + ) + refresh_time = utcnow() + self._min_refresh_interval + else: + if expires_at is None: + return + self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL # Reset backoff + # Defend against invalid stream expiration time in the past + refresh_time = max( + expires_at - STREAM_EXPIRATION_BUFFER, + utcnow() + self._min_refresh_interval, + ) + self._schedule_stream_refresh(refresh_time) + + def _schedule_stream_refresh(self, refresh_time: datetime.datetime) -> None: + """Schedules an alarm to refresh any streams before expiration.""" + _LOGGER.debug("Scheduling stream refresh for %s", refresh_time) + self._unsub = async_track_point_in_utc_time( + self._hass, + self._handle_refresh, + refresh_time, + ) + + +class NestCameraBaseEntity(Camera, ABC): """Devices that support cameras.""" _attr_has_entity_name = True _attr_name = None + _attr_is_streaming = True + _attr_supported_features = CameraEntityFeature.STREAM def __init__(self, device: Device) -> None: """Initialize the camera.""" @@ -70,38 +145,34 @@ class NestCamera(Camera): self._attr_device_info = nest_device_info.device_info self._attr_brand = nest_device_info.device_brand self._attr_model = nest_device_info.device_model - self._stream: RtspStream | None = None - self._create_stream_url_lock = asyncio.Lock() - self._stream_refresh_unsub: Callable[[], None] | None = None - self._attr_is_streaming = False - self._attr_supported_features = CameraEntityFeature(0) - self._rtsp_live_stream_trait: CameraLiveStreamTrait | None = None - if CameraLiveStreamTrait.NAME in self._device.traits: - self._attr_is_streaming = True - self._attr_supported_features |= CameraEntityFeature.STREAM - trait = cast( - CameraLiveStreamTrait, self._device.traits[CameraLiveStreamTrait.NAME] - ) - if StreamingProtocol.RTSP in trait.supported_protocols: - self._rtsp_live_stream_trait = trait self.stream_options[CONF_EXTRA_PART_WAIT_TIME] = 3 # The API "name" field is a unique device identifier. self._attr_unique_id = f"{self._device.name}-camera" - @property - def use_stream_for_stills(self) -> bool: - """Whether or not to use stream to generate stills.""" - return self._rtsp_live_stream_trait is not None + async def async_added_to_hass(self) -> None: + """Run when entity is added to register update signal handler.""" + self.async_on_remove( + self._device.add_update_listener(self.async_write_ha_state) + ) + + +class NestRTSPEntity(NestCameraBaseEntity): + """Nest cameras that use RTSP.""" + + _rtsp_stream: RtspStream | None = None + _rtsp_live_stream_trait: CameraLiveStreamTrait + + def __init__(self, device: Device) -> None: + """Initialize the camera.""" + super().__init__(device) + self._create_stream_url_lock = asyncio.Lock() + self._rtsp_live_stream_trait = device.traits[CameraLiveStreamTrait.NAME] + self._refresh_unsub: Callable[[], None] | None = None @property - def frontend_stream_type(self) -> StreamType | None: - """Return the type of stream supported by this camera.""" - if CameraLiveStreamTrait.NAME not in self._device.traits: - return None - trait = self._device.traits[CameraLiveStreamTrait.NAME] - if StreamingProtocol.WEB_RTC in trait.supported_protocols: - return StreamType.WEB_RTC - return super().frontend_stream_type + def use_stream_for_stills(self) -> bool: + """Always use the RTSP stream to generate snapshots.""" + return True @property def available(self) -> bool: @@ -115,83 +186,83 @@ class NestCamera(Camera): async def stream_source(self) -> str | None: """Return the source of the stream.""" - if not self._rtsp_live_stream_trait: - return None async with self._create_stream_url_lock: - if not self._stream: + if not self._rtsp_stream: _LOGGER.debug("Fetching stream url") try: - self._stream = ( + self._rtsp_stream = ( await self._rtsp_live_stream_trait.generate_rtsp_stream() ) except ApiException as err: raise HomeAssistantError(f"Nest API error: {err}") from err - self._schedule_stream_refresh() - assert self._stream - if self._stream.expires_at < utcnow(): + refresh = StreamRefresh( + self.hass, + self._rtsp_stream.expires_at, + self._async_refresh_stream, + ) + self._refresh_unsub = refresh.unsub + assert self._rtsp_stream + if self._rtsp_stream.expires_at < utcnow(): _LOGGER.warning("Stream already expired") - return self._stream.rtsp_stream_url + return self._rtsp_stream.rtsp_stream_url - def _schedule_stream_refresh(self) -> None: - """Schedules an alarm to refresh the stream url before expiration.""" - assert self._stream - _LOGGER.debug("New stream url expires at %s", self._stream.expires_at) - refresh_time = self._stream.expires_at - STREAM_EXPIRATION_BUFFER - # Schedule an alarm to extend the stream - if self._stream_refresh_unsub is not None: - self._stream_refresh_unsub() - - self._stream_refresh_unsub = async_track_point_in_utc_time( - self.hass, - self._handle_stream_refresh, - refresh_time, - ) - - async def _handle_stream_refresh(self, now: datetime.datetime) -> None: - """Alarm that fires to check if the stream should be refreshed.""" - if not self._stream: - return - _LOGGER.debug("Extending stream url") + async def _async_refresh_stream(self) -> datetime.datetime | None: + """Refresh stream to extend expiration time.""" + if not self._rtsp_stream: + return None + _LOGGER.debug("Extending RTSP stream") try: - self._stream = await self._stream.extend_rtsp_stream() + self._rtsp_stream = await self._rtsp_stream.extend_rtsp_stream() except ApiException as err: _LOGGER.debug("Failed to extend stream: %s", err) # Next attempt to catch a url will get a new one - self._stream = None + self._rtsp_stream = None if self.stream: await self.stream.stop() self.stream = None - return + return None # Update the stream worker with the latest valid url if self.stream: - self.stream.update_source(self._stream.rtsp_stream_url) - self._schedule_stream_refresh() + self.stream.update_source(self._rtsp_stream.rtsp_stream_url) + return self._rtsp_stream.expires_at async def async_will_remove_from_hass(self) -> None: """Invalidates the RTSP token when unloaded.""" - if self._stream: - _LOGGER.debug("Invalidating stream") + await super().async_will_remove_from_hass() + if self._refresh_unsub is not None: + self._refresh_unsub() + if self._rtsp_stream: try: - await self._stream.stop_rtsp_stream() + await self._rtsp_stream.stop_stream() except ApiException as err: - _LOGGER.debug( - "Failed to revoke stream token, will rely on ttl: %s", err - ) - if self._stream_refresh_unsub: - self._stream_refresh_unsub() + _LOGGER.debug("Error stopping stream: %s", err) + self._rtsp_stream = None - async def async_added_to_hass(self) -> None: - """Run when entity is added to register update signal handler.""" - self.async_on_remove( - self._device.add_update_listener(self.async_write_ha_state) - ) + +class NestWebRTCEntity(NestCameraBaseEntity): + """Nest cameras that use WebRTC.""" + + def __init__(self, device: Device) -> None: + """Initialize the camera.""" + super().__init__(device) + self._webrtc_sessions: dict[str, WebRtcStream] = {} + self._refresh_unsub: dict[str, Callable[[], None]] = {} + + async def _async_refresh_stream(self, session_id: str) -> datetime.datetime | None: + """Refresh stream to extend expiration time.""" + if not (webrtc_stream := self._webrtc_sessions.get(session_id)): + return None + _LOGGER.debug("Extending WebRTC stream %s", webrtc_stream.media_session_id) + webrtc_stream = await webrtc_stream.extend_stream() + if session_id in self._webrtc_sessions: + self._webrtc_sessions[session_id] = webrtc_stream + return webrtc_stream.expires_at + return None async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: - """Return bytes of camera image.""" - # Use the thumbnail from RTSP stream, or a placeholder if stream is - # not supported (e.g. WebRTC) as a fallback when 'use_stream_for_stills' if False + """Return a placeholder image for WebRTC cameras that don't support snapshots.""" return await self.hass.async_add_executor_job(self.placeholder_image) @classmethod @@ -200,13 +271,59 @@ class NestCamera(Camera): """Return placeholder image to use when no stream is available.""" return PLACEHOLDER.read_bytes() - async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: """Return the source of the stream.""" trait: CameraLiveStreamTrait = self._device.traits[CameraLiveStreamTrait.NAME] - if StreamingProtocol.WEB_RTC not in trait.supported_protocols: - return await super().async_handle_web_rtc_offer(offer_sdp) try: stream = await trait.generate_web_rtc_stream(offer_sdp) except ApiException as err: raise HomeAssistantError(f"Nest API error: {err}") from err - return stream.answer_sdp + _LOGGER.debug( + "Started WebRTC session %s, %s", session_id, stream.media_session_id + ) + self._webrtc_sessions[session_id] = stream + send_message(WebRTCAnswer(stream.answer_sdp)) + refresh = StreamRefresh( + self.hass, + stream.expires_at, + functools.partial(self._async_refresh_stream, session_id), + ) + self._refresh_unsub[session_id] = refresh.unsub + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Ignore WebRTC candidates for Nest cloud based cameras.""" + return + + @callback + def close_webrtc_session(self, session_id: str) -> None: + """Close a WebRTC session.""" + if (stream := self._webrtc_sessions.pop(session_id, None)) is not None: + _LOGGER.debug( + "Closing WebRTC session %s, %s", session_id, stream.media_session_id + ) + unsub = self._refresh_unsub.pop(session_id) + unsub() + + async def stop_stream() -> None: + try: + await stream.stop_stream() + except ApiException as err: + _LOGGER.debug("Error stopping stream: %s", err) + + self.hass.async_create_task(stop_stream()) + super().close_webrtc_session(session_id) + + @callback + def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: + """Return the WebRTC client configuration adjustable per integration.""" + return WebRTCClientConfiguration(data_channel="dataSendChannel") + + async def async_will_remove_from_hass(self) -> None: + """Invalidates the RTSP token when unloaded.""" + await super().async_will_remove_from_hass() + for session_id in list(self._webrtc_sessions.keys()): + self.close_webrtc_session(session_id) diff --git a/homeassistant/components/nest/climate.py b/homeassistant/components/nest/climate.py index 03fb641d0e5..d5ad28c2dfd 100644 --- a/homeassistant/components/nest/climate.py +++ b/homeassistant/components/nest/climate.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any, cast from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.device_traits import FanTrait, TemperatureTrait from google_nest_sdm.exceptions import ApiException from google_nest_sdm.thermostat_traits import ( @@ -28,14 +27,13 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo +from .types import NestConfigEntry # Mapping for sdm.devices.traits.ThermostatMode mode field THERMOSTAT_MODE_MAP: dict[str, HVACMode] = { @@ -78,17 +76,13 @@ MIN_TEMP_RANGE = 1.66667 async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the client entities.""" - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] - async_add_entities( ThermostatEntity(device) - for device in device_manager.devices.values() + for device in entry.runtime_data.device_manager.devices.values() if ThermostatHvacTrait.NAME in device.traits ) @@ -101,7 +95,6 @@ class ThermostatEntity(ClimateEntity): _attr_has_entity_name = True _attr_should_poll = False _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device: Device) -> None: """Initialize ThermostatEntity.""" diff --git a/homeassistant/components/nest/config_flow.py b/homeassistant/components/nest/config_flow.py index 29ae9f6a08e..274e4c288b4 100644 --- a/homeassistant/components/nest/config_flow.py +++ b/homeassistant/components/nest/config_flow.py @@ -12,18 +12,18 @@ from __future__ import annotations from collections.abc import Iterable, Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from google_nest_sdm.exceptions import ( - ApiException, - AuthException, - ConfigurationException, - SubscriberException, +from google_nest_sdm.admin_client import ( + AdminClient, + EligibleSubscriptions, + EligibleTopics, ) +from google_nest_sdm.exceptions import ApiException from google_nest_sdm.structure import Structure import voluptuous as vol -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.util import get_random_string @@ -31,8 +31,9 @@ from . import api from .const import ( CONF_CLOUD_PROJECT_ID, CONF_PROJECT_ID, - CONF_SUBSCRIBER_ID, - DATA_NEST_CONFIG, + CONF_SUBSCRIBER_ID_IMPORTED, + CONF_SUBSCRIPTION_NAME, + CONF_TOPIC_NAME, DATA_SDM, DOMAIN, OAUTH2_AUTHORIZE, @@ -58,7 +59,7 @@ DEVICE_ACCESS_CONSOLE_URL = "https://console.nest.google.com/device-access/" DEVICE_ACCESS_CONSOLE_EDIT_URL = ( "https://console.nest.google.com/device-access/project/{project_id}/information" ) - +CREATE_NEW_SUBSCRIPTION_KEY = "create_new_subscription" _LOGGER = logging.getLogger(__name__) @@ -95,21 +96,9 @@ class NestFlowHandler( self._data: dict[str, Any] = {DATA_SDM: {}} # Possible name to use for config entry based on the Google Home name self._structure_config_title: str | None = None - - def _async_reauth_entry(self) -> ConfigEntry | None: - """Return existing entry for reauth.""" - if self.source != SOURCE_REAUTH or not ( - entry_id := self.context.get("entry_id") - ): - return None - return next( - ( - entry - for entry in self._async_current_entries() - if entry.entry_id == entry_id - ), - None, - ) + self._admin_client: AdminClient | None = None + self._eligible_topics: EligibleTopics | None = None + self._eligible_subscriptions: EligibleSubscriptions | None = None @property def logger(self) -> logging.Logger: @@ -128,8 +117,7 @@ class NestFlowHandler( async def async_generate_authorize_url(self) -> str: """Generate a url for the user to authorize based on user input.""" - config = self.hass.data.get(DOMAIN, {}).get(DATA_NEST_CONFIG, {}) - project_id = self._data.get(CONF_PROJECT_ID, config.get(CONF_PROJECT_ID, "")) + project_id = self._data.get(CONF_PROJECT_ID) query = await super().async_generate_authorize_url() authorize_url = OAUTH2_AUTHORIZE.format(project_id=project_id) return f"{authorize_url}{query}" @@ -138,15 +126,17 @@ class NestFlowHandler( """Complete OAuth setup and finish pubsub or finish.""" _LOGGER.debug("Finishing post-oauth configuration") self._data.update(data) + _LOGGER.debug("self.source=%s", self.source) if self.source == SOURCE_REAUTH: _LOGGER.debug("Skipping Pub/Sub configuration") - return await self.async_step_finish() + return await self._async_finish() return await self.async_step_pubsub() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + _LOGGER.debug("async_step_reauth %s", self.source) self._data.update(entry_data) return await self.async_step_reauth_confirm() @@ -253,40 +243,114 @@ class NestFlowHandler( async def async_step_pubsub( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Configure and create Pub/Sub subscriber.""" + """Configure and the pre-requisites to configure Pub/Sub topics and subscriptions.""" data = { **self._data, **(user_input if user_input is not None else {}), } cloud_project_id = data.get(CONF_CLOUD_PROJECT_ID, "").strip() - config = self.hass.data.get(DOMAIN, {}).get(DATA_NEST_CONFIG, {}) - project_id = data.get(CONF_PROJECT_ID, config.get(CONF_PROJECT_ID)) + device_access_project_id = data[CONF_PROJECT_ID] errors: dict[str, str] = {} if cloud_project_id: - # Create the subscriber id and/or verify it already exists. Note that - # the existing id is used, and create call below is idempotent - if not (subscriber_id := data.get(CONF_SUBSCRIBER_ID, "")): - subscriber_id = _generate_subscription_id(cloud_project_id) - _LOGGER.debug("Creating subscriber id '%s'", subscriber_id) - subscriber = api.new_subscriber_with_token( - self.hass, - self._data["token"]["access_token"], - project_id, - subscriber_id, + access_token = self._data["token"]["access_token"] + self._admin_client = api.new_pubsub_admin_client( + self.hass, access_token=access_token, cloud_project_id=cloud_project_id ) try: - await subscriber.create_subscription() - except AuthException as err: - _LOGGER.error("Subscriber authentication error: %s", err) - return self.async_abort(reason="invalid_access_token") - except ConfigurationException as err: - _LOGGER.error("Configuration error creating subscription: %s", err) - errors[CONF_CLOUD_PROJECT_ID] = "bad_project_id" - except SubscriberException as err: - _LOGGER.error("Error creating subscription: %s", err) - errors[CONF_CLOUD_PROJECT_ID] = "subscriber_error" + eligible_topics = await self._admin_client.list_eligible_topics( + device_access_project_id=device_access_project_id + ) + except ApiException as err: + _LOGGER.error("Error listing eligible Pub/Sub topics: %s", err) + errors["base"] = "pubsub_api_error" + else: + if not eligible_topics.topic_names: + errors["base"] = "no_pubsub_topics" if not errors: + self._data[CONF_CLOUD_PROJECT_ID] = cloud_project_id + self._eligible_topics = eligible_topics + return await self.async_step_pubsub_topic() + + return self.async_show_form( + step_id="pubsub", + data_schema=vol.Schema( + { + vol.Required(CONF_CLOUD_PROJECT_ID, default=cloud_project_id): str, + } + ), + description_placeholders={ + "url": CLOUD_CONSOLE_URL, + "device_access_console_url": DEVICE_ACCESS_CONSOLE_URL, + "more_info_url": MORE_INFO_URL, + }, + errors=errors, + ) + + async def async_step_pubsub_topic( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure and create Pub/Sub topic.""" + if TYPE_CHECKING: + assert self._eligible_topics + if user_input is not None: + self._data.update(user_input) + return await self.async_step_pubsub_subscription() + topics = list(self._eligible_topics.topic_names) + return self.async_show_form( + step_id="pubsub_topic", + data_schema=vol.Schema( + { + vol.Optional(CONF_TOPIC_NAME, default=topics[0]): vol.In(topics), + } + ), + description_placeholders={ + "device_access_console_url": DEVICE_ACCESS_CONSOLE_URL, + "more_info_url": MORE_INFO_URL, + }, + ) + + async def async_step_pubsub_subscription( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure and create Pub/Sub subscription.""" + if TYPE_CHECKING: + assert self._admin_client + errors = {} + if user_input is not None: + subscription_name = user_input[CONF_SUBSCRIPTION_NAME] + if subscription_name == CREATE_NEW_SUBSCRIPTION_KEY: + topic_name = self._data[CONF_TOPIC_NAME] + subscription_name = _generate_subscription_id( + self._data[CONF_CLOUD_PROJECT_ID] + ) + _LOGGER.debug( + "Creating subscription %s on topic %s", + subscription_name, + topic_name, + ) + try: + await self._admin_client.create_subscription( + topic_name, + subscription_name, + ) + except ApiException as err: + _LOGGER.error("Error creatingPub/Sub subscription: %s", err) + errors["base"] = "pubsub_api_error" + else: + user_input[CONF_SUBSCRIPTION_NAME] = subscription_name + else: + # The user created this subscription themselves so do not delete when removing the integration. + user_input[CONF_SUBSCRIBER_ID_IMPORTED] = True + + if not errors: + self._data.update(user_input) + subscriber = api.new_subscriber_with_token( + self.hass, + self._data["token"]["access_token"], + self._data[CONF_PROJECT_ID], + subscription_name, + ) try: device_manager = await subscriber.async_get_device_manager() except ApiException as err: @@ -296,39 +360,51 @@ class NestFlowHandler( self._structure_config_title = generate_config_title( device_manager.structures.values() ) + return await self._async_finish() - self._data.update( - { - CONF_SUBSCRIBER_ID: subscriber_id, - CONF_CLOUD_PROJECT_ID: cloud_project_id, - } + subscriptions = {} + try: + eligible_subscriptions = ( + await self._admin_client.list_eligible_subscriptions( + expected_topic_name=self._data[CONF_TOPIC_NAME], ) - return await self.async_step_finish() - + ) + except ApiException as err: + _LOGGER.error( + "Error talking to API to list eligible Pub/Sub subscriptions: %s", err + ) + errors["base"] = "pubsub_api_error" + else: + subscriptions.update( + {name: name for name in eligible_subscriptions.subscription_names} + ) + subscriptions[CREATE_NEW_SUBSCRIPTION_KEY] = "Create New" return self.async_show_form( - step_id="pubsub", + step_id="pubsub_subscription", data_schema=vol.Schema( { - vol.Required(CONF_CLOUD_PROJECT_ID, default=cloud_project_id): str, + vol.Optional( + CONF_SUBSCRIPTION_NAME, + default=next(iter(subscriptions)), + ): vol.In(subscriptions), } ), - description_placeholders={"url": CLOUD_CONSOLE_URL}, + description_placeholders={ + "topic": self._data[CONF_TOPIC_NAME], + "more_info_url": MORE_INFO_URL, + }, errors=errors, ) - async def async_step_finish( - self, data: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def _async_finish(self) -> ConfigFlowResult: """Create an entry for the SDM flow.""" _LOGGER.debug("Creating/updating configuration entry") # Update existing config entry when in the reauth flow. - if entry := self._async_reauth_entry(): - self.hass.config_entries.async_update_entry( - entry, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._data, ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") title = self.flow_impl.name if self._structure_config_title: title = self._structure_config_title diff --git a/homeassistant/components/nest/const.py b/homeassistant/components/nest/const.py index 853e778977d..9950d1d5c2a 100644 --- a/homeassistant/components/nest/const.py +++ b/homeassistant/components/nest/const.py @@ -2,15 +2,14 @@ DOMAIN = "nest" DATA_SDM = "sdm" -DATA_SUBSCRIBER = "subscriber" -DATA_DEVICE_MANAGER = "device_manager" -DATA_NEST_CONFIG = "nest_config" WEB_AUTH_DOMAIN = DOMAIN INSTALLED_AUTH_DOMAIN = f"{DOMAIN}.installed" CONF_PROJECT_ID = "project_id" -CONF_SUBSCRIBER_ID = "subscriber_id" +CONF_TOPIC_NAME = "topic_name" +CONF_SUBSCRIPTION_NAME = "subscription_name" +CONF_SUBSCRIBER_ID = "subscriber_id" # Old format CONF_SUBSCRIBER_ID_IMPORTED = "subscriber_id_imported" CONF_CLOUD_PROJECT_ID = "cloud_project_id" diff --git a/homeassistant/components/nest/device_info.py b/homeassistant/components/nest/device_info.py index 33793fe836b..facd429b139 100644 --- a/homeassistant/components/nest/device_info.py +++ b/homeassistant/components/nest/device_info.py @@ -7,11 +7,12 @@ from collections.abc import Mapping from google_nest_sdm.device import Device from google_nest_sdm.device_traits import ConnectivityTrait, InfoTrait +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo -from .const import CONNECTIVITY_TRAIT_OFFLINE, DATA_DEVICE_MANAGER, DOMAIN +from .const import CONNECTIVITY_TRAIT_OFFLINE, DOMAIN DEVICE_TYPE_MAP: dict[str, str] = { "sdm.devices.types.CAMERA": "Camera", @@ -81,14 +82,12 @@ class NestDeviceInfo: @callback def async_nest_devices(hass: HomeAssistant) -> Mapping[str, Device]: """Return a mapping of all nest devices for all config entries.""" - devices = {} - for entry_id in hass.data[DOMAIN]: - if not (device_manager := hass.data[DOMAIN][entry_id].get(DATA_DEVICE_MANAGER)): - continue - devices.update( - {device.name: device for device in device_manager.devices.values()} - ) - return devices + return { + device.name: device + for config_entry in hass.config_entries.async_entries(DOMAIN) + if config_entry.state == ConfigEntryState.LOADED + for device in config_entry.runtime_data.device_manager.devices.values() + } @callback diff --git a/homeassistant/components/nest/device_trigger.py b/homeassistant/components/nest/device_trigger.py index 52c756d6a18..d2d36b6e529 100644 --- a/homeassistant/components/nest/device_trigger.py +++ b/homeassistant/components/nest/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger diff --git a/homeassistant/components/nest/diagnostics.py b/homeassistant/components/nest/diagnostics.py index 57ce4291cc6..345e15b0593 100644 --- a/homeassistant/components/nest/diagnostics.py +++ b/homeassistant/components/nest/diagnostics.py @@ -5,46 +5,26 @@ from __future__ import annotations from typing import Any from google_nest_sdm import diagnostics -from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.device_traits import InfoTrait from homeassistant.components.camera import diagnostics as camera_diagnostics -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from .const import DATA_DEVICE_MANAGER, DATA_SDM, DOMAIN +from .types import NestConfigEntry REDACT_DEVICE_TRAITS = {InfoTrait.NAME} -@callback -def _async_get_nest_devices( - hass: HomeAssistant, config_entry: ConfigEntry -) -> dict[str, Device]: - """Return dict of available devices.""" - if DATA_SDM not in config_entry.data: - return {} - - if ( - config_entry.entry_id not in hass.data[DOMAIN] - or DATA_DEVICE_MANAGER not in hass.data[DOMAIN][config_entry.entry_id] - ): - return {} - - device_manager: DeviceManager = hass.data[DOMAIN][config_entry.entry_id][ - DATA_DEVICE_MANAGER - ] - return device_manager.devices - - async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: NestConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - nest_devices = _async_get_nest_devices(hass, config_entry) - if not nest_devices: + if ( + not hasattr(config_entry, "runtime_data") + or not config_entry.runtime_data + or not (nest_devices := config_entry.runtime_data.device_manager.devices) + ): return {} data: dict[str, Any] = { **diagnostics.get_diagnostics(), @@ -62,11 +42,11 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: NestConfigEntry, device: DeviceEntry, ) -> dict[str, Any]: """Return diagnostics for a device.""" - nest_devices = _async_get_nest_devices(hass, config_entry) + nest_devices = config_entry.runtime_data.device_manager.devices nest_device_id = next(iter(device.identifiers))[1] nest_device = nest_devices.get(nest_device_id) return nest_device.get_diagnostics() if nest_device else {} diff --git a/homeassistant/components/nest/event.py b/homeassistant/components/nest/event.py index a6d70fe86d5..1a2c0317496 100644 --- a/homeassistant/components/nest/event.py +++ b/homeassistant/components/nest/event.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import logging from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.event import EventMessage, EventType from google_nest_sdm.traits import TraitType @@ -13,11 +12,9 @@ from homeassistant.components.event import ( EventEntity, EventEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo from .events import ( EVENT_CAMERA_MOTION, @@ -26,6 +23,7 @@ from .events import ( EVENT_DOORBELL_CHIME, EVENT_NAME_MAP, ) +from .types import NestConfigEntry _LOGGER = logging.getLogger(__name__) @@ -68,16 +66,12 @@ ENTITY_DESCRIPTIONS = [ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the sensors.""" - - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] async_add_entities( NestTraitEventEntity(desc, device) - for device in device_manager.devices.values() + for device in entry.runtime_data.device_manager.devices.values() for desc in ENTITY_DESCRIPTIONS if any(trait in device.traits for trait in desc.trait_types) ) diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index 1b0697f7602..07c34c51568 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -19,6 +19,5 @@ "documentation": "https://www.home-assistant.io/integrations/nest", "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], - "quality_scale": "platinum", - "requirements": ["google-nest-sdm==5.0.0"] + "requirements": ["google-nest-sdm==6.1.5"] } diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index 71501e72552..146b6f2479e 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -37,12 +37,12 @@ from google_nest_sdm.transcoder import Transcoder from homeassistant.components.ffmpeg import get_ffmpeg_manager from homeassistant.components.media_player import BrowseError, MediaClass, MediaType -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr diff --git a/homeassistant/components/nest/quality_scale.yaml b/homeassistant/components/nest/quality_scale.yaml new file mode 100644 index 00000000000..969ee66059d --- /dev/null +++ b/homeassistant/components/nest/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + config-flow: + status: todo + comment: Some fields are missing a data_description + brands: done + dependency-transparency: done + common-modules: + status: exempt + comment: The integration does not have a base entity or coordinator. + has-entity-name: done + action-setup: + status: exempt + comment: The integration does not register actions. + appropriate-polling: + status: exempt + comment: The integration does not poll. + test-before-configure: + status: todo + comment: | + The integration does a connection test in the configuration flow, however + it does not fail if the user has ipv6 misconfigured. + entity-event-setup: done + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: + status: todo + comment: | + The integration does tests on setup, however the most common issues + observed are related to ipv6 misconfigurations and the error messages + are not self explanatory and can be improved. + docs-high-level-description: done + config-flow-test-coverage: + status: todo + comment: | + The integration has full test coverage however it does not yet assert the specific contents of the + unique id of the created entry. Additional tests coverage for combinations of features like + `test_dhcp_discovery_with_creds` would also be useful. + Tests can be improved so that all end in either CREATE_ENTRY or ABORT. + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: todo + config-entry-unloading: todo + reauthentication-flow: + status: todo + comment: | + Supports reauthentication, however can be improved to ensure the user does not change accounts + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: todo + parallel-updates: todo + test-coverage: todo + docs-configuration-parameters: todo + entity-unavailable: todo + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/homeassistant/components/nest/sensor.py b/homeassistant/components/nest/sensor.py index edd359619fd..02a0e305813 100644 --- a/homeassistant/components/nest/sensor.py +++ b/homeassistant/components/nest/sensor.py @@ -5,7 +5,6 @@ from __future__ import annotations import logging from google_nest_sdm.device import Device -from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.device_traits import HumidityTrait, TemperatureTrait from homeassistant.components.sensor import ( @@ -13,13 +12,12 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_DEVICE_MANAGER, DOMAIN from .device_info import NestDeviceInfo +from .types import NestConfigEntry _LOGGER = logging.getLogger(__name__) @@ -33,15 +31,12 @@ DEVICE_TYPE_MAP = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: NestConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the sensors.""" - device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ - DATA_DEVICE_MANAGER - ] entities: list[SensorEntity] = [] - for device in device_manager.devices.values(): + for device in entry.runtime_data.device_manager.devices.values(): if TemperatureTrait.NAME in device.traits: entities.append(TemperatureSensor(device)) if HumidityTrait.NAME in device.traits: diff --git a/homeassistant/components/nest/strings.json b/homeassistant/components/nest/strings.json index cd915acfbe5..a31a2856544 100644 --- a/homeassistant/components/nest/strings.json +++ b/homeassistant/components/nest/strings.json @@ -1,12 +1,12 @@ { "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) to configure the Cloud Console:\n\n1. Go to the [OAuth consent screen]({oauth_consent_url}) and configure\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web Application** for the Application Type.\n1. Add `{redirect_url}` under *Authorized redirect URI*." + "description": "Follow the [instructions]({more_info_url}) to configure the Cloud Console:\n\n1. Go to the [OAuth consent screen]({oauth_consent_url}) and configure\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web Application** for the Application Type.\n1. Add `{redirect_url}` under *Authorized redirect URI*." }, "config": { "step": { "create_cloud_project": { "title": "Nest: Create and configure Cloud Project", - "description": "The Nest integration allows you to integrate your Nest Thermostats, Cameras, and Doorbells using the Smart Device Management API. The SDM API **requires a US $5** one time setup fee. See documentation for [more info]({more_info_url}).\n\n1. Go to the [Google Cloud Console]({cloud_console_url}).\n1. If this is your first project, click **Create Project** then **New Project**.\n1. Give your Cloud Project a Name and then click **Create**.\n1. Save the Cloud Project ID e.g. *example-project-12345* as you will need it later\n1. Go to API Library for [Smart Device Management API]({sdm_api_url}) and click **Enable**.\n1. Go to API Library for [Cloud Pub/Sub API]({pubsub_api_url}) and click **Enable**.\n\nProceed when your cloud project is set up." + "description": "The Nest integration allows you to integrate your Nest Thermostats, Cameras, and Doorbells using the Smart Device Management API. The SDM API **requires a US $5** one time setup fee. See documentation for [more info]({more_info_url}).\n\n1. Go to the [Google Cloud Console]({cloud_console_url}).\n1. If this is your first project, select **Create Project** then **New Project**.\n1. Give your Cloud Project a Name and then select **Create**.\n1. Save the Cloud Project ID e.g. *example-project-12345* as you will need it later\n1. Go to API Library for [Smart Device Management API]({sdm_api_url}) and select **Enable**.\n1. Go to API Library for [Cloud Pub/Sub API]({pubsub_api_url}) and select **Enable**.\n\nProceed when your cloud project is set up." }, "cloud_project": { "title": "Nest: Enter Cloud Project ID", @@ -17,7 +17,7 @@ }, "device_project": { "title": "Nest: Create a Device Access Project", - "description": "Create a Nest Device Access project which **requires paying Google a US $5 fee** to set up.\n1. Go to the [Device Access Console]({device_access_console_url}), and through the payment flow.\n1. Click on **Create project**\n1. Give your Device Access project a name and click **Next**.\n1. Enter your OAuth Client ID\n1. Enable events by clicking **Enable** and **Create project**.\n\nEnter your Device Access Project ID below ([more info]({more_info_url})).\n", + "description": "Create a Nest Device Access project which **requires paying Google a US $5 fee** to set up.\n1. Go to the [Device Access Console]({device_access_console_url}), and through the payment flow.\n1. Select on **Create project**\n1. Give your Device Access project a name and select **Next**.\n1. Enter your OAuth Client ID\n1. Enable events by clicking **Enable** and **Create project**.\n\nEnter your Device Access Project ID below ([more info]({more_info_url})).", "data": { "project_id": "Device Access Project ID" } @@ -26,12 +26,26 @@ "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" }, "pubsub": { - "title": "Configure Google Cloud", - "description": "Visit the [Cloud Console]({url}) to find your Google Cloud Project ID.", + "title": "Configure Google Cloud Pub/Sub", + "description": "Home Assistant uses Cloud Pub/Sub receive realtime Nest device updates. Nest servers publish updates to a Pub/Sub topic and Home Assistant receives the updates through a Pub/Sub subscription.\n\n1. Visit the [Device Access Console]({device_access_console_url}) and ensure a Pub/Sub topic is configured.\n2. Visit the [Cloud Console]({url}) to find your Google Cloud Project ID and confirm it is correct below.\n3. The next step will attempt to auto-discover Pub/Sub topics and subscriptions.\n\nSee the integration documentation for [more info]({more_info_url}).", "data": { "cloud_project_id": "[%key:component::nest::config::step::cloud_project::data::cloud_project_id%]" } }, + "pubsub_topic": { + "title": "Configure Cloud Pub/Sub topic", + "description": "Nest devices publish updates on a Cloud Pub/Sub topic. Select the Pub/Sub topic below that is the same as the [Device Access Console]({device_access_console_url}). See the integration documentation for [more info]({more_info_url}).", + "data": { + "topic_name": "Pub/Sub topic Name" + } + }, + "pubsub_subscription": { + "title": "Configure Cloud Pub/Sub subscription", + "description": "Home Assistant receives realtime Nest device updates with a Cloud Pub/Sub subscription for topic `{topic}`.\n\nSelect an existing subscription below if one already exists, or the next step will create a new one for you. See the integration documentation for [more info]({more_info_url}).", + "data": { + "subscription_name": "Pub/Sub subscription Name" + } + }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", "description": "The Nest integration needs to re-authenticate your account" @@ -40,11 +54,14 @@ "error": { "bad_project_id": "Please enter a valid Cloud Project ID (check Cloud Console)", "wrong_project_id": "Please enter a valid Cloud Project ID (was same as Device Access Project ID)", - "subscriber_error": "Unknown subscriber error, see logs" + "subscriber_error": "Unknown subscriber error, see logs", + "no_pubsub_topics": "No eligible Pub/Sub topics found, please ensure Device Access Console has a Pub/Sub topic.", + "pubsub_api_error": "Unknown error talking to Cloud Pub/Sub, see logs" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "unknown_authorize_url_generation": "[%key:common::config_flow::abort::unknown_authorize_url_generation%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", @@ -67,12 +84,6 @@ "doorbell_chime": "Doorbell pressed" } }, - "issues": { - "legacy_nest_removed": { - "title": "Legacy Works With Nest has been removed", - "description": "Legacy Works With Nest has been removed from Home Assistant, and the API shuts down as of September 2023.\n\nYou must take action to use the SDM API. Remove all `nest` configuration from `configuration.yaml` and restart Home Assistant, then see the Nest [integration instructions]({documentation_url}) for set up instructions and supported devices." - } - }, "entity": { "event": { "chime": { diff --git a/homeassistant/components/nest/types.py b/homeassistant/components/nest/types.py new file mode 100644 index 00000000000..bd6cd5cd887 --- /dev/null +++ b/homeassistant/components/nest/types.py @@ -0,0 +1,19 @@ +"""Type definitions for Nest.""" + +from dataclasses import dataclass + +from google_nest_sdm.device_manager import DeviceManager +from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber + +from homeassistant.config_entries import ConfigEntry + + +@dataclass +class NestData: + """Data for the Nest integration.""" + + subscriber: GoogleNestSubscriber + device_manager: DeviceManager + + +type NestConfigEntry = ConfigEntry[NestData] diff --git a/homeassistant/components/netatmo/__init__.py b/homeassistant/components/netatmo/__init__.py index f402009e13b..6f14c9c76bb 100644 --- a/homeassistant/components/netatmo/__init__.py +++ b/homeassistant/components/netatmo/__init__.py @@ -164,7 +164,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: await hass.data[DOMAIN][entry.entry_id][AUTH].async_addwebhook(webhook_url) - _LOGGER.info("Register Netatmo webhook: %s", webhook_url) + _LOGGER.debug("Register Netatmo webhook: %s", webhook_url) except pyatmo.ApiError as err: _LOGGER.error("Error during webhook registration - %s", err) else: @@ -224,7 +224,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await data[entry.entry_id][AUTH].async_dropwebhook() except pyatmo.ApiError: _LOGGER.debug("No webhook to be dropped") - _LOGGER.info("Unregister Netatmo webhook") + _LOGGER.debug("Unregister Netatmo webhook") unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/netatmo/api.py b/homeassistant/components/netatmo/api.py index f5fe591bfbf..f01436a45d5 100644 --- a/homeassistant/components/netatmo/api.py +++ b/homeassistant/components/netatmo/api.py @@ -40,6 +40,5 @@ class AsyncConfigEntryNetatmoAuth(pyatmo.AbstractAsyncAuth): async def async_get_access_token(self) -> str: """Return a valid access token for Netatmo API.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/netatmo/climate.py b/homeassistant/components/netatmo/climate.py index c2953b9d49d..02c955beac3 100644 --- a/homeassistant/components/netatmo/climate.py +++ b/homeassistant/components/netatmo/climate.py @@ -58,9 +58,9 @@ from .entity import NetatmoRoomEntity _LOGGER = logging.getLogger(__name__) -PRESET_FROST_GUARD = "Frost Guard" -PRESET_SCHEDULE = "Schedule" -PRESET_MANUAL = "Manual" +PRESET_FROST_GUARD = "frost_guard" +PRESET_SCHEDULE = "schedule" +PRESET_MANUAL = "manual" SUPPORT_FLAGS = ( ClimateEntityFeature.TARGET_TEMPERATURE @@ -188,10 +188,10 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity): _attr_supported_features = SUPPORT_FLAGS _attr_target_temperature_step = PRECISION_HALVES _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "thermostat" _attr_name = None _away: bool | None = None _connected: bool | None = None - _enable_turn_on_off_backwards_compatibility = False _away_temperature: float | None = None _hg_temperature: float | None = None diff --git a/homeassistant/components/netatmo/config_flow.py b/homeassistant/components/netatmo/config_flow.py index 0da4d6f16b7..d853694ffea 100644 --- a/homeassistant/components/netatmo/config_flow.py +++ b/homeassistant/components/netatmo/config_flow.py @@ -101,7 +101,6 @@ class NetatmoOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Netatmo options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) self.options.setdefault(CONF_WEATHER_AREAS, {}) diff --git a/homeassistant/components/netatmo/data_handler.py b/homeassistant/components/netatmo/data_handler.py index a4c4dbfa21d..3a28c3b8336 100644 --- a/homeassistant/components/netatmo/data_handler.py +++ b/homeassistant/components/netatmo/data_handler.py @@ -215,11 +215,11 @@ class NetatmoDataHandler: async def handle_event(self, event: dict) -> None: """Handle webhook events.""" if event["data"][WEBHOOK_PUSH_TYPE] == WEBHOOK_ACTIVATION: - _LOGGER.info("%s webhook successfully registered", MANUFACTURER) + _LOGGER.debug("%s webhook successfully registered", MANUFACTURER) self._webhook = True elif event["data"][WEBHOOK_PUSH_TYPE] == WEBHOOK_DEACTIVATION: - _LOGGER.info("%s webhook unregistered", MANUFACTURER) + _LOGGER.debug("%s webhook unregistered", MANUFACTURER) self._webhook = False elif event["data"][WEBHOOK_PUSH_TYPE] == WEBHOOK_NACAMERA_CONNECTION: diff --git a/homeassistant/components/netatmo/device_trigger.py b/homeassistant/components/netatmo/device_trigger.py index 686df2ef2cb..2673ebf8e05 100644 --- a/homeassistant/components/netatmo/device_trigger.py +++ b/homeassistant/components/netatmo/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger diff --git a/homeassistant/components/netatmo/fan.py b/homeassistant/components/netatmo/fan.py index 8610882a453..71a8c548622 100644 --- a/homeassistant/components/netatmo/fan.py +++ b/homeassistant/components/netatmo/fan.py @@ -51,7 +51,6 @@ class NetatmoFan(NetatmoModuleEntity, FanEntity): _attr_configuration_url = CONF_URL_CONTROL _attr_name = None device: NaModules.Fan - _enable_turn_on_off_backwards_compatibility = False def __init__(self, netatmo_device: NetatmoDevice) -> None: """Initialize of Netatmo fan.""" diff --git a/homeassistant/components/netatmo/icons.json b/homeassistant/components/netatmo/icons.json index 31b1740ab21..9f712e08f33 100644 --- a/homeassistant/components/netatmo/icons.json +++ b/homeassistant/components/netatmo/icons.json @@ -1,5 +1,18 @@ { "entity": { + "climate": { + "thermostat": { + "state_attributes": { + "preset_mode": { + "state": { + "frost_guard": "mdi:snowflake-thermometer", + "schedule": "mdi:clock-outline", + "manual": "mdi:gesture-tap" + } + } + } + } + }, "sensor": { "temp_trend": { "default": "mdi:trending-up" @@ -34,15 +47,35 @@ } }, "services": { - "set_camera_light": "mdi:led-on", - "set_schedule": "mdi:calendar-clock", - "set_preset_mode_with_end_datetime": "mdi:calendar-clock", - "set_temperature_with_end_datetime": "mdi:thermometer", - "set_temperature_with_time_period": "mdi:thermometer", - "clear_temperature_setting": "mdi:thermometer", - "set_persons_home": "mdi:home", - "set_person_away": "mdi:walk", - "register_webhook": "mdi:link-variant", - "unregister_webhook": "mdi:link-variant-off" + "set_camera_light": { + "service": "mdi:led-on" + }, + "set_schedule": { + "service": "mdi:calendar-clock" + }, + "set_preset_mode_with_end_datetime": { + "service": "mdi:calendar-clock" + }, + "set_temperature_with_end_datetime": { + "service": "mdi:thermometer" + }, + "set_temperature_with_time_period": { + "service": "mdi:thermometer" + }, + "clear_temperature_setting": { + "service": "mdi:thermometer" + }, + "set_persons_home": { + "service": "mdi:home" + }, + "set_person_away": { + "service": "mdi:walk" + }, + "register_webhook": { + "service": "mdi:link-variant" + }, + "unregister_webhook": { + "service": "mdi:link-variant-off" + } } } diff --git a/homeassistant/components/netatmo/light.py b/homeassistant/components/netatmo/light.py index b1871e9dabb..fe30dc0eaa4 100644 --- a/homeassistant/components/netatmo/light.py +++ b/homeassistant/components/netatmo/light.py @@ -173,7 +173,9 @@ class NetatmoLight(NetatmoModuleEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn light on.""" if ATTR_BRIGHTNESS in kwargs: - await self.device.async_set_brightness(kwargs[ATTR_BRIGHTNESS]) + await self.device.async_set_brightness( + round(kwargs[ATTR_BRIGHTNESS] / 2.55) + ) else: await self.device.async_on() @@ -194,6 +196,6 @@ class NetatmoLight(NetatmoModuleEntity, LightEntity): if (brightness := self.device.brightness) is not None: # Netatmo uses a range of [0, 100] to control brightness - self._attr_brightness = round((brightness / 100) * 255) + self._attr_brightness = round(brightness * 2.55) else: self._attr_brightness = None diff --git a/homeassistant/components/netatmo/manifest.json b/homeassistant/components/netatmo/manifest.json index 98734bcb742..0a32777b527 100644 --- a/homeassistant/components/netatmo/manifest.json +++ b/homeassistant/components/netatmo/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pyatmo"], - "requirements": ["pyatmo==8.0.3"] + "requirements": ["pyatmo==8.1.0"] } diff --git a/homeassistant/components/netatmo/media_source.py b/homeassistant/components/netatmo/media_source.py index 7ad4acf5316..f92214c90f5 100644 --- a/homeassistant/components/netatmo/media_source.py +++ b/homeassistant/components/netatmo/media_source.py @@ -7,12 +7,13 @@ import logging import re from homeassistant.components.media_player import BrowseError, MediaClass, MediaType -from homeassistant.components.media_source.error import MediaSourceError, Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, + MediaSourceError, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.core import HomeAssistant, callback diff --git a/homeassistant/components/netatmo/select.py b/homeassistant/components/netatmo/select.py index 3fe098a75a9..92568b73e80 100644 --- a/homeassistant/components/netatmo/select.py +++ b/homeassistant/components/netatmo/select.py @@ -72,7 +72,7 @@ class NetatmoScheduleSelect(NetatmoBaseEntity, SelectEntity): self._attr_current_option = getattr(self.home.get_selected_schedule(), "name") self._attr_options = [ - schedule.name for schedule in self.home.schedules.values() + schedule.name for schedule in self.home.schedules.values() if schedule.name ] async def async_added_to_hass(self) -> None: @@ -128,5 +128,5 @@ class NetatmoScheduleSelect(NetatmoBaseEntity, SelectEntity): self.home.schedules ) self._attr_options = [ - schedule.name for schedule in self.home.schedules.values() + schedule.name for schedule in self.home.schedules.values() if schedule.name ] diff --git a/homeassistant/components/netatmo/strings.json b/homeassistant/components/netatmo/strings.json index 3c360634147..6b91aa204b2 100644 --- a/homeassistant/components/netatmo/strings.json +++ b/homeassistant/components/netatmo/strings.json @@ -168,6 +168,19 @@ } }, "entity": { + "climate": { + "thermostat": { + "state_attributes": { + "preset_mode": { + "state": { + "frost_guard": "Frost guard", + "schedule": "Schedule", + "manual": "Manual" + } + } + } + } + }, "sensor": { "temp_trend": { "name": "Temperature trend" diff --git a/homeassistant/components/netdata/manifest.json b/homeassistant/components/netdata/manifest.json index 99410ce033d..8901a271de2 100644 --- a/homeassistant/components/netdata/manifest.json +++ b/homeassistant/components/netdata/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/netdata", "iot_class": "local_polling", "loggers": ["netdata"], - "requirements": ["netdata==1.1.0"] + "quality_scale": "legacy", + "requirements": ["netdata==1.3.0"] } diff --git a/homeassistant/components/netdata/sensor.py b/homeassistant/components/netdata/sensor.py index b77a4392ef4..f33349c56ce 100644 --- a/homeassistant/components/netdata/sensor.py +++ b/homeassistant/components/netdata/sensor.py @@ -24,6 +24,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType _LOGGER = logging.getLogger(__name__) @@ -70,7 +71,9 @@ async def async_setup_platform( port = config[CONF_PORT] resources = config[CONF_RESOURCES] - netdata = NetdataData(Netdata(host, port=port, timeout=20.0)) + netdata = NetdataData( + Netdata(host, port=port, timeout=20.0, httpx_client=get_async_client(hass)) + ) await netdata.async_update() if netdata.api.metrics is None: diff --git a/homeassistant/components/netgear/__init__.py b/homeassistant/components/netgear/__init__.py index 445453ad2aa..fa18c3510ba 100644 --- a/homeassistant/components/netgear/__init__.py +++ b/homeassistant/components/netgear/__init__.py @@ -48,7 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if port != router.port or ssl != router.ssl: data = {**entry.data, CONF_PORT: router.port, CONF_SSL: router.ssl} hass.config_entries.async_update_entry(entry, data=data) - _LOGGER.info( + _LOGGER.warning( ( "Netgear port-SSL combination updated from (%i, %r) to (%i, %r), " "this should only occur after a firmware update" @@ -93,6 +93,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Devices", update_method=async_update_devices, update_interval=SCAN_INTERVAL, @@ -100,6 +101,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator_traffic_meter = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Traffic meter", update_method=async_update_traffic_meter, update_interval=SCAN_INTERVAL, @@ -107,6 +109,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator_speed_test = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Speed test", update_method=async_update_speed_test, update_interval=SPEED_TEST_INTERVAL, @@ -114,6 +117,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator_firmware = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Firmware", update_method=async_check_firmware, update_interval=SCAN_INTERVAL_FIRMWARE, @@ -121,6 +125,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator_utilization = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Utilization", update_method=async_update_utilization, update_interval=SCAN_INTERVAL, @@ -128,6 +133,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator_link = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Ethernet Link Status", update_method=async_check_link_status, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index 55112c6662c..965e3618645 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -63,11 +63,9 @@ def _ordered_shared_schema(schema_input): class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, int] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) @@ -107,9 +105,13 @@ class NetgearFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() - async def _show_setup_form(self, user_input=None, errors=None): + async def _show_setup_form( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Show the setup form to the user.""" if not user_input: user_input = {} diff --git a/homeassistant/components/netgear/device_tracker.py b/homeassistant/components/netgear/device_tracker.py index ee3d010e443..b17430d2abb 100644 --- a/homeassistant/components/netgear/device_tracker.py +++ b/homeassistant/components/netgear/device_tracker.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -81,11 +81,6 @@ class NetgearScannerEntity(NetgearDeviceEntity, ScannerEntity): """Return true if the device is connected to the router.""" return self._active - @property - def source_type(self) -> SourceType: - """Return the source type.""" - return SourceType.ROUTER - @property def ip_address(self) -> str: """Return the IP address.""" diff --git a/homeassistant/components/netgear_lte/icons.json b/homeassistant/components/netgear_lte/icons.json index 543d9bf4690..703d330512b 100644 --- a/homeassistant/components/netgear_lte/icons.json +++ b/homeassistant/components/netgear_lte/icons.json @@ -31,9 +31,17 @@ } }, "services": { - "delete_sms": "mdi:delete", - "set_option": "mdi:cog", - "connect_lte": "mdi:wifi", - "disconnect_lte": "mdi:wifi-off" + "delete_sms": { + "service": "mdi:delete" + }, + "set_option": { + "service": "mdi:cog" + }, + "connect_lte": { + "service": "mdi:wifi" + }, + "disconnect_lte": { + "service": "mdi:wifi-off" + } } } diff --git a/homeassistant/components/netio/manifest.json b/homeassistant/components/netio/manifest.json index 683df22e1ff..f2914b17dec 100644 --- a/homeassistant/components/netio/manifest.json +++ b/homeassistant/components/netio/manifest.json @@ -5,5 +5,6 @@ "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/netio", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pynetio==0.1.9.1"] } diff --git a/homeassistant/components/netio/switch.py b/homeassistant/components/netio/switch.py index 54bfef5e1da..5c2b93bcae7 100644 --- a/homeassistant/components/netio/switch.py +++ b/homeassistant/components/netio/switch.py @@ -109,7 +109,7 @@ class NetioApiView(HomeAssistantView): states, consumptions, cumulated_consumptions, start_dates = [], [], [], [] for i in range(1, 5): - out = "output%d" % i + out = f"output{i}" states.append(data.get(f"{out}_state") == STATE_ON) consumptions.append(float(data.get(f"{out}_consumption", 0))) cumulated_consumptions.append( @@ -168,7 +168,8 @@ class NetioSwitch(SwitchEntity): def _set(self, value): val = list("uuuu") val[int(self.outlet) - 1] = "1" if value else "0" - self.netio.get("port list {}".format("".join(val))) + val = "".join(val) + self.netio.get(f"port list {val}") self.netio.states[int(self.outlet) - 1] = value self.schedule_update_ha_state() diff --git a/homeassistant/components/network/websocket.py b/homeassistant/components/network/websocket.py index 78626b893e4..22f7dc23f1e 100644 --- a/homeassistant/components/network/websocket.py +++ b/homeassistant/components/network/websocket.py @@ -2,13 +2,15 @@ from __future__ import annotations +from contextlib import suppress from typing import Any import voluptuous as vol from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.network import NoURLAvailableError, get_url from .const import ATTR_ADAPTERS, ATTR_CONFIGURED_ADAPTERS, NETWORK_CONFIG_SCHEMA from .network import async_get_network @@ -19,6 +21,7 @@ def async_register_websocket_commands(hass: HomeAssistant) -> None: """Register network websocket commands.""" websocket_api.async_register_command(hass, websocket_network_adapters) websocket_api.async_register_command(hass, websocket_network_adapters_configure) + websocket_api.async_register_command(hass, websocket_network_url) @websocket_api.require_admin @@ -62,3 +65,40 @@ async def websocket_network_adapters_configure( msg["id"], {ATTR_CONFIGURED_ADAPTERS: network.configured_adapters}, ) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "network/url", + } +) +def websocket_network_url( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get the internal, external, and cloud URLs.""" + internal_url = None + external_url = None + cloud_url = None + with suppress(NoURLAvailableError): + internal_url = get_url( + hass, allow_internal=True, allow_external=False, allow_cloud=False + ) + with suppress(NoURLAvailableError): + external_url = get_url( + hass, allow_internal=False, allow_external=True, prefer_external=True + ) + with suppress(NoURLAvailableError): + cloud_url = get_url(hass, allow_internal=False, require_cloud=True) + + connection.send_result( + msg["id"], + { + "internal": internal_url, + "external": external_url, + "cloud": cloud_url, + }, + ) diff --git a/homeassistant/components/neurio_energy/manifest.json b/homeassistant/components/neurio_energy/manifest.json index 467825da012..3a524ac4b5f 100644 --- a/homeassistant/components/neurio_energy/manifest.json +++ b/homeassistant/components/neurio_energy/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/neurio_energy", "iot_class": "cloud_polling", "loggers": ["neurio"], + "quality_scale": "legacy", "requirements": ["neurio==0.3.1"] } diff --git a/homeassistant/components/nexia/__init__.py b/homeassistant/components/nexia/__init__.py index 9bc76fdcfdc..66a8ec5bdb8 100644 --- a/homeassistant/components/nexia/__init__.py +++ b/homeassistant/components/nexia/__init__.py @@ -86,3 +86,21 @@ async def async_remove_config_entry_device( if zone_id in dev_ids: return False return True + + +async def async_migrate_entry(hass: HomeAssistant, entry: NexiaConfigEntry) -> bool: + """Migrate entry.""" + + _LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version == 1: + # 1 -> 2: Unique ID from integer to string + if entry.minor_version == 1: + minor_version = 2 + hass.config_entries.async_update_entry( + entry, unique_id=str(entry.unique_id), minor_version=minor_version + ) + + _LOGGER.debug("Migration successful") + + return True diff --git a/homeassistant/components/nexia/climate.py b/homeassistant/components/nexia/climate.py index a4bcc03c210..becd664756b 100644 --- a/homeassistant/components/nexia/climate.py +++ b/homeassistant/components/nexia/climate.py @@ -35,6 +35,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import VolDictType from .const import ( @@ -42,6 +43,7 @@ from .const import ( ATTR_DEHUMIDIFY_SETPOINT, ATTR_HUMIDIFY_SETPOINT, ATTR_RUN_MODE, + DOMAIN, ) from .coordinator import NexiaDataUpdateCoordinator from .entity import NexiaThermostatZoneEntity @@ -153,7 +155,6 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateEntity): """Provides Nexia Climate support.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: NexiaDataUpdateCoordinator, zone: NexiaThermostatZone @@ -378,11 +379,31 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateEntity): async def async_turn_aux_heat_off(self) -> None: """Turn Aux Heat off.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) await self._thermostat.set_emergency_heat(False) self._signal_thermostat_update() async def async_turn_aux_heat_on(self) -> None: """Turn Aux Heat on.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) await self._thermostat.set_emergency_heat(True) self._signal_thermostat_update() diff --git a/homeassistant/components/nexia/config_flow.py b/homeassistant/components/nexia/config_flow.py index 592ebde61c3..85d8db03d7c 100644 --- a/homeassistant/components/nexia/config_flow.py +++ b/homeassistant/components/nexia/config_flow.py @@ -81,6 +81,7 @@ class NexiaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Nexia.""" VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -99,7 +100,7 @@ class NexiaConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" if "base" not in errors: - await self.async_set_unique_id(info["house_id"]) + await self.async_set_unique_id(str(info["house_id"])) self._abort_if_unique_id_configured() return self.async_create_entry(title=info["title"], data=user_input) diff --git a/homeassistant/components/nexia/icons.json b/homeassistant/components/nexia/icons.json index 620d1a42c03..a2157f5c035 100644 --- a/homeassistant/components/nexia/icons.json +++ b/homeassistant/components/nexia/icons.json @@ -20,8 +20,14 @@ } }, "services": { - "set_aircleaner_mode": "mdi:air-filter", - "set_humidify_setpoint": "mdi:water-percent", - "set_hvac_run_mode": "mdi:hvac" + "set_aircleaner_mode": { + "service": "mdi:air-filter" + }, + "set_humidify_setpoint": { + "service": "mdi:water-percent" + }, + "set_hvac_run_mode": { + "service": "mdi:hvac" + } } } diff --git a/homeassistant/components/nexia/strings.json b/homeassistant/components/nexia/strings.json index 9e49f4bb793..d88ce0b898d 100644 --- a/homeassistant/components/nexia/strings.json +++ b/homeassistant/components/nexia/strings.json @@ -64,7 +64,7 @@ "services": { "set_aircleaner_mode": { "name": "Set air cleaner mode", - "description": "The air cleaner mode.", + "description": "Sets the air cleaner mode.", "fields": { "aircleaner_mode": { "name": "Air cleaner mode", @@ -74,17 +74,17 @@ }, "set_humidify_setpoint": { "name": "Set humidify set point", - "description": "The humidification set point.", + "description": "Sets the target humidity.", "fields": { "humidity": { - "name": "Humidify", + "name": "Humidity", "description": "The humidification setpoint." } } }, "set_hvac_run_mode": { "name": "Set hvac run mode", - "description": "The HVAC run mode.", + "description": "Sets the HVAC operation mode.", "fields": { "run_mode": { "name": "Run mode", @@ -96,5 +96,18 @@ } } } + }, + "issues": { + "migrate_aux_heat": { + "title": "Migration of Nexia set_aux_heat action", + "fix_flow": { + "step": { + "confirm": { + "description": "The Nexia `set_aux_heat` action has been migrated. A new `aux_heat_only` switch entity is available for each thermostat.\n\nUpdate any automations to use the new Emergency heat switch entity. When this is done, select **Submit** to fix this issue.", + "title": "[%key:component::nexia::issues::migrate_aux_heat::title%]" + } + } + } + } } } diff --git a/homeassistant/components/nexia/switch.py b/homeassistant/components/nexia/switch.py index 0a874ba1817..9505538e86a 100644 --- a/homeassistant/components/nexia/switch.py +++ b/homeassistant/components/nexia/switch.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import NexiaDataUpdateCoordinator -from .entity import NexiaThermostatZoneEntity +from .entity import NexiaThermostatEntity, NexiaThermostatZoneEntity from .types import NexiaConfigEntry @@ -25,9 +25,11 @@ async def async_setup_entry( """Set up switches for a Nexia device.""" coordinator = config_entry.runtime_data nexia_home = coordinator.nexia_home - entities: list[NexiaHoldSwitch] = [] + entities: list[NexiaHoldSwitch | NexiaEmergencyHeatSwitch] = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat: NexiaThermostat = nexia_home.get_thermostat_by_id(thermostat_id) + if thermostat.has_emergency_heat(): + entities.append(NexiaEmergencyHeatSwitch(coordinator, thermostat)) for zone_id in thermostat.get_zone_ids(): zone: NexiaThermostatZone = thermostat.get_zone_by_id(zone_id) entities.append(NexiaHoldSwitch(coordinator, zone)) @@ -64,3 +66,34 @@ class NexiaHoldSwitch(NexiaThermostatZoneEntity, SwitchEntity): """Disable permanent hold.""" await self._zone.call_return_to_schedule() self._signal_zone_update() + + +class NexiaEmergencyHeatSwitch(NexiaThermostatEntity, SwitchEntity): + """Provides Nexia emergency heat switch support.""" + + _attr_translation_key = "emergency_heat" + + def __init__( + self, coordinator: NexiaDataUpdateCoordinator, thermostat: NexiaThermostat + ) -> None: + """Initialize the emergency heat mode switch.""" + super().__init__( + coordinator, + thermostat, + unique_id=f"{thermostat.thermostat_id}_emergency_heat", + ) + + @property + def is_on(self) -> bool: + """Return if the zone is in hold mode.""" + return self._thermostat.is_emergency_heat_active() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Enable permanent hold.""" + await self._thermostat.set_emergency_heat(True) + self._signal_thermostat_update() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Disable permanent hold.""" + await self._thermostat.set_emergency_heat(False) + self._signal_thermostat_update() diff --git a/homeassistant/components/nextbus/__init__.py b/homeassistant/components/nextbus/__init__.py index e8c0bc224fe..168488e1940 100644 --- a/homeassistant/components/nextbus/__init__.py +++ b/homeassistant/components/nextbus/__init__.py @@ -3,6 +3,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_STOP, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from .const import CONF_AGENCY, CONF_ROUTE, DOMAIN from .coordinator import NextBusDataUpdateCoordinator @@ -13,17 +14,23 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up platforms for NextBus.""" entry_agency = entry.data[CONF_AGENCY] + entry_stop = entry.data[CONF_STOP] + coordinator_key = f"{entry_agency}-{entry_stop}" - coordinator: NextBusDataUpdateCoordinator = hass.data.setdefault(DOMAIN, {}).get( - entry_agency + coordinator: NextBusDataUpdateCoordinator | None = hass.data.setdefault( + DOMAIN, {} + ).get( + coordinator_key, ) if coordinator is None: coordinator = NextBusDataUpdateCoordinator(hass, entry_agency) - hass.data[DOMAIN][entry_agency] = coordinator + hass.data[DOMAIN][coordinator_key] = coordinator - coordinator.add_stop_route(entry.data[CONF_STOP], entry.data[CONF_ROUTE]) + coordinator.add_stop_route(entry_stop, entry.data[CONF_ROUTE]) - await coordinator.async_config_entry_first_refresh() + await coordinator.async_refresh() + if not coordinator.last_update_success: + raise ConfigEntryNotReady from coordinator.last_exception await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -33,11 +40,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - entry_agency = entry.data.get(CONF_AGENCY) - coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN][entry_agency] - coordinator.remove_stop_route(entry.data[CONF_STOP], entry.data[CONF_ROUTE]) + entry_agency = entry.data[CONF_AGENCY] + entry_stop = entry.data[CONF_STOP] + coordinator_key = f"{entry_agency}-{entry_stop}" + + coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN][coordinator_key] + coordinator.remove_stop_route(entry_stop, entry.data[CONF_ROUTE]) + if not coordinator.has_routes(): - hass.data[DOMAIN].pop(entry_agency) + await coordinator.async_shutdown() + hass.data[DOMAIN].pop(coordinator_key) return True diff --git a/homeassistant/components/nextbus/coordinator.py b/homeassistant/components/nextbus/coordinator.py index 781742e4c08..617669adf2f 100644 --- a/homeassistant/components/nextbus/coordinator.py +++ b/homeassistant/components/nextbus/coordinator.py @@ -24,6 +24,7 @@ class NextBusDataUpdateCoordinator(DataUpdateCoordinator): super().__init__( hass, _LOGGER, + config_entry=None, # It is shared between multiple entries name=DOMAIN, update_interval=timedelta(seconds=30), ) @@ -51,24 +52,53 @@ class NextBusDataUpdateCoordinator(DataUpdateCoordinator): async def _async_update_data(self) -> dict[str, Any]: """Fetch data from NextBus.""" - _route_stops = set(self._route_stops) - self.logger.debug("Updating data from API. Routes: %s", str(_route_stops)) + _stops_to_route_stops: dict[str, set[RouteStop]] = {} + for route_stop in self._route_stops: + _stops_to_route_stops.setdefault(route_stop.stop_id, set()).add(route_stop) + + self.logger.debug( + "Updating data from API. Routes: %s", str(_stops_to_route_stops) + ) def _update_data() -> dict: """Fetch data from NextBus.""" self.logger.debug("Updating data from API (executor)") predictions: dict[RouteStop, dict[str, Any]] = {} - for route_stop in _route_stops: - prediction_results: list[dict[str, Any]] = [] + + for stop_id, route_stops in _stops_to_route_stops.items(): + self.logger.debug("Updating data from API (executor) %s", stop_id) try: - prediction_results = self.client.predictions_for_stop( - route_stop.stop_id, route_stop.route_id + prediction_results = self.client.predictions_for_stop(stop_id) + except NextBusHTTPError as ex: + self.logger.error( + "Error updating %s (executor): %s %s", + str(stop_id), + ex, + getattr(ex, "response", None), ) - except (NextBusHTTPError, NextBusFormatError) as ex: + raise UpdateFailed("Failed updating nextbus data", ex) from ex + except NextBusFormatError as ex: raise UpdateFailed("Failed updating nextbus data", ex) from ex - if prediction_results: - predictions[route_stop] = prediction_results[0] + self.logger.debug( + "Prediction results for %s (executor): %s", + str(stop_id), + str(prediction_results), + ) + + for route_stop in route_stops: + for prediction_result in prediction_results: + if ( + prediction_result["stop"]["id"] == route_stop.stop_id + and prediction_result["route"]["id"] == route_stop.route_id + ): + predictions[route_stop] = prediction_result + break + else: + self.logger.warning( + "Prediction not found for %s (executor)", str(route_stop) + ) + self._predictions = predictions return predictions diff --git a/homeassistant/components/nextbus/manifest.json b/homeassistant/components/nextbus/manifest.json index d22ba66d860..6300dc1cdc9 100644 --- a/homeassistant/components/nextbus/manifest.json +++ b/homeassistant/components/nextbus/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nextbus", "iot_class": "cloud_polling", "loggers": ["py_nextbus"], - "requirements": ["py-nextbusnext==2.0.4"] + "requirements": ["py-nextbusnext==2.0.5"] } diff --git a/homeassistant/components/nextbus/sensor.py b/homeassistant/components/nextbus/sensor.py index 8ef5323858f..554814fe2db 100644 --- a/homeassistant/components/nextbus/sensor.py +++ b/homeassistant/components/nextbus/sensor.py @@ -28,8 +28,10 @@ async def async_setup_entry( """Load values from configuration and initialize the platform.""" _LOGGER.debug(config.data) entry_agency = config.data[CONF_AGENCY] + entry_stop = config.data[CONF_STOP] + coordinator_key = f"{entry_agency}-{entry_stop}" - coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN].get(entry_agency) + coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN].get(coordinator_key) async_add_entities( ( diff --git a/homeassistant/components/nextcloud/config_flow.py b/homeassistant/components/nextcloud/config_flow.py index c469936ac48..6c59dd271d5 100644 --- a/homeassistant/components/nextcloud/config_flow.py +++ b/homeassistant/components/nextcloud/config_flow.py @@ -13,7 +13,7 @@ from nextcloudmonitor import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL from .const import DEFAULT_VERIFY_SSL, DOMAIN @@ -39,8 +39,6 @@ class NextcloudConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _entry: ConfigEntry | None = None - def _try_connect_nc(self, user_input: dict) -> NextcloudMonitor: """Try to connect to nextcloud server.""" return NextcloudMonitor( @@ -79,7 +77,6 @@ class NextcloudConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -87,32 +84,29 @@ class NextcloudConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauthorization flow.""" errors = {} - assert self._entry is not None + reauth_entry = self._get_reauth_entry() if user_input is not None: try: await self.hass.async_add_executor_job( - self._try_connect_nc, {**self._entry.data, **user_input} + self._try_connect_nc, {**reauth_entry.data, **user_input} ) except NextcloudMonitorAuthorizationError: errors["base"] = "invalid_auth" except (NextcloudMonitorConnectionError, NextcloudMonitorRequestError): errors["base"] = "connection_error" else: - self.hass.config_entries.async_update_entry( - self._entry, - data={**self._entry.data, **user_input}, + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reauth_successful") data_schema = self.add_suggested_values_to_schema( DATA_SCHEMA_REAUTH, - {CONF_USERNAME: self._entry.data[CONF_USERNAME], **(user_input or {})}, + {CONF_USERNAME: reauth_entry.data[CONF_USERNAME], **(user_input or {})}, ) return self.async_show_form( step_id="reauth_confirm", data_schema=data_schema, - description_placeholders={"url": self._entry.data[CONF_URL]}, + description_placeholders={"url": reauth_entry.data[CONF_URL]}, errors=errors, ) diff --git a/homeassistant/components/nextcloud/update.py b/homeassistant/components/nextcloud/update.py index 8c292e1bba2..5b9de52ad1d 100644 --- a/homeassistant/components/nextcloud/update.py +++ b/homeassistant/components/nextcloud/update.py @@ -32,12 +32,12 @@ class NextcloudUpdateSensor(NextcloudEntity, UpdateEntity): """Represents a Nextcloud update entity.""" @property - def installed_version(self) -> str | None: + def installed_version(self) -> str: """Version installed and in use.""" - return self.coordinator.data.get("system_version") + return self.coordinator.data["system_version"] @property - def latest_version(self) -> str | None: + def latest_version(self) -> str: """Latest version available for install.""" return self.coordinator.data.get( "update_available_version", self.installed_version @@ -46,7 +46,5 @@ class NextcloudUpdateSensor(NextcloudEntity, UpdateEntity): @property def release_url(self) -> str | None: """URL to the full release notes of the latest version available.""" - if self.latest_version: - ver = "-".join(self.latest_version.split(".")[:3]) - return f"https://nextcloud.com/changelog/#{ver}" - return None + ver = "-".join(self.latest_version.split(".")[:3]) + return f"https://nextcloud.com/changelog/#{ver}" diff --git a/homeassistant/components/nextdns/__init__.py b/homeassistant/components/nextdns/__init__.py index 4256126b3c7..7f0729bca1e 100644 --- a/homeassistant/components/nextdns/__init__.py +++ b/homeassistant/components/nextdns/__init__.py @@ -15,6 +15,7 @@ from nextdns import ( AnalyticsStatus, ApiError, ConnectionStatus, + InvalidApiKeyError, NextDns, Settings, ) @@ -23,7 +24,7 @@ from tenacity import RetryError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import ( @@ -88,6 +89,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: NextDnsConfigEntry) -> b nextdns = await NextDns.create(websession, api_key) except (ApiError, ClientConnectorError, RetryError, TimeoutError) as err: raise ConfigEntryNotReady from err + except InvalidApiKeyError as err: + raise ConfigEntryAuthFailed from err tasks = [] coordinators = {} diff --git a/homeassistant/components/nextdns/config_flow.py b/homeassistant/components/nextdns/config_flow.py index bd79112b1f9..d3327c4c08b 100644 --- a/homeassistant/components/nextdns/config_flow.py +++ b/homeassistant/components/nextdns/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from aiohttp.client_exceptions import ClientConnectorError @@ -11,10 +12,20 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_PROFILE_NAME +from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_PROFILE_ID, DOMAIN +AUTH_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) + + +async def async_init_nextdns(hass: HomeAssistant, api_key: str) -> NextDns: + """Check if credentials are valid.""" + websession = async_get_clientsession(hass) + + return await NextDns.create(websession, api_key) + class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for NextDNS.""" @@ -23,8 +34,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" - self.nextdns: NextDns | None = None - self.api_key: str | None = None + self.nextdns: NextDns + self.api_key: str async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -32,14 +43,10 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] = {} - websession = async_get_clientsession(self.hass) - if user_input is not None: self.api_key = user_input[CONF_API_KEY] try: - self.nextdns = await NextDns.create( - websession, user_input[CONF_API_KEY] - ) + self.nextdns = await async_init_nextdns(self.hass, self.api_key) except InvalidApiKeyError: errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): @@ -51,7 +58,7 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), + data_schema=AUTH_SCHEMA, errors=errors, ) @@ -61,8 +68,6 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle the profiles step.""" errors: dict[str, str] = {} - assert self.nextdns is not None - if user_input is not None: profile_name = user_input[CONF_PROFILE_NAME] profile_id = self.nextdns.get_profile_id(profile_name) @@ -86,3 +91,35 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + await async_init_nextdns(self.hass, user_input[CONF_API_KEY]) + except InvalidApiKeyError: + errors["base"] = "invalid_api_key" + except (ApiError, ClientConnectorError, RetryError, TimeoutError): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=AUTH_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/nextdns/coordinator.py b/homeassistant/components/nextdns/coordinator.py index 5210807bd3c..6b35e35a027 100644 --- a/homeassistant/components/nextdns/coordinator.py +++ b/homeassistant/components/nextdns/coordinator.py @@ -21,6 +21,7 @@ from nextdns.model import NextDnsData from tenacity import RetryError from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -62,10 +63,11 @@ class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): except ( ApiError, ClientConnectorError, - InvalidApiKeyError, RetryError, ) as err: raise UpdateFailed(err) from err + except InvalidApiKeyError as err: + raise ConfigEntryAuthFailed from err async def _async_update_data_internal(self) -> CoordinatorDataT: """Update data via library.""" diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index be9eee5049c..d10a1728a94 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["nextdns"], - "quality_scale": "platinum", - "requirements": ["nextdns==3.2.0"] + "requirements": ["nextdns==4.0.0"] } diff --git a/homeassistant/components/nextdns/sensor.py b/homeassistant/components/nextdns/sensor.py index b390ac93e06..ef2b5140fa1 100644 --- a/homeassistant/components/nextdns/sensor.py +++ b/homeassistant/components/nextdns/sensor.py @@ -54,7 +54,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( coordinator_type=ATTR_STATUS, entity_category=EntityCategory.DIAGNOSTIC, translation_key="all_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.all_queries, ), @@ -63,7 +62,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( coordinator_type=ATTR_STATUS, entity_category=EntityCategory.DIAGNOSTIC, translation_key="blocked_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.blocked_queries, ), @@ -72,7 +70,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( coordinator_type=ATTR_STATUS, entity_category=EntityCategory.DIAGNOSTIC, translation_key="relayed_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.relayed_queries, ), @@ -91,7 +88,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="doh_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.doh_queries, ), @@ -101,7 +97,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="doh3_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.doh3_queries, ), @@ -111,7 +106,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="dot_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.dot_queries, ), @@ -121,7 +115,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="doq_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.doq_queries, ), @@ -131,7 +124,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="tcp_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.tcp_queries, ), @@ -141,7 +133,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="udp_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.udp_queries, ), @@ -211,7 +202,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="encrypted_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.encrypted_queries, ), @@ -221,7 +211,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="unencrypted_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.unencrypted_queries, ), @@ -241,7 +230,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="ipv4_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.ipv4_queries, ), @@ -251,7 +239,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="ipv6_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.ipv6_queries, ), @@ -271,7 +258,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="validated_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.validated_queries, ), @@ -281,7 +267,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="not_validated_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.not_validated_queries, ), diff --git a/homeassistant/components/nextdns/strings.json b/homeassistant/components/nextdns/strings.json index e0a37aad03b..f2a5fa2816d 100644 --- a/homeassistant/components/nextdns/strings.json +++ b/homeassistant/components/nextdns/strings.json @@ -10,6 +10,11 @@ "data": { "profile": "Profile" } + }, + "reauth_confirm": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } } }, "error": { @@ -18,7 +23,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "This NextDNS profile is already configured." + "already_configured": "This NextDNS profile is already configured.", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "system_health": { @@ -42,76 +48,91 @@ }, "sensor": { "all_queries": { - "name": "DNS queries" + "name": "DNS queries", + "unit_of_measurement": "queries" }, "blocked_queries": { - "name": "DNS queries blocked" + "name": "DNS queries blocked", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "blocked_queries_ratio": { "name": "DNS queries blocked ratio" }, "doh3_queries": { - "name": "DNS-over-HTTP/3 queries" + "name": "DNS-over-HTTP/3 queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "doh3_queries_ratio": { "name": "DNS-over-HTTP/3 queries ratio" }, "doh_queries": { - "name": "DNS-over-HTTPS queries" + "name": "DNS-over-HTTPS queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "doh_queries_ratio": { "name": "DNS-over-HTTPS queries ratio" }, "doq_queries": { - "name": "DNS-over-QUIC queries" + "name": "DNS-over-QUIC queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "doq_queries_ratio": { "name": "DNS-over-QUIC queries ratio" }, "dot_queries": { - "name": "DNS-over-TLS queries" + "name": "DNS-over-TLS queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "dot_queries_ratio": { "name": "DNS-over-TLS queries ratio" }, "encrypted_queries": { - "name": "Encrypted queries" + "name": "Encrypted queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "encrypted_queries_ratio": { "name": "Encrypted queries ratio" }, "ipv4_queries": { - "name": "IPv4 queries" + "name": "IPv4 queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "ipv6_queries": { - "name": "IPv6 queries" + "name": "IPv6 queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "ipv6_queries_ratio": { "name": "IPv6 queries ratio" }, "not_validated_queries": { - "name": "DNSSEC not validated queries" + "name": "DNSSEC not validated queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "relayed_queries": { - "name": "DNS queries relayed" + "name": "DNS queries relayed", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "tcp_queries": { - "name": "TCP queries" + "name": "TCP queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "tcp_queries_ratio": { "name": "TCP queries ratio" }, "udp_queries": { - "name": "UDP queries" + "name": "UDP queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "udp_queries_ratio": { "name": "UDP queries ratio" }, "unencrypted_queries": { - "name": "Unencrypted queries" + "name": "Unencrypted queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "validated_queries": { - "name": "DNSSEC validated queries" + "name": "DNSSEC validated queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "validated_queries_ratio": { "name": "DNSSEC validated queries ratio" diff --git a/homeassistant/components/nibe_heatpump/__init__.py b/homeassistant/components/nibe_heatpump/__init__.py index fbb49351e0e..b3ceb00a834 100644 --- a/homeassistant/components/nibe_heatpump/__init__.py +++ b/homeassistant/components/nibe_heatpump/__init__.py @@ -30,7 +30,7 @@ from .const import ( CONF_WORD_SWAP, DOMAIN, ) -from .coordinator import Coordinator +from .coordinator import CoilCoordinator PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, @@ -81,7 +81,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop) ) - coordinator = Coordinator(hass, heatpump, connection) + coordinator = CoilCoordinator(hass, heatpump, connection) data = hass.data.setdefault(DOMAIN, {}) data[entry.entry_id] = coordinator diff --git a/homeassistant/components/nibe_heatpump/binary_sensor.py b/homeassistant/components/nibe_heatpump/binary_sensor.py index 035a4a23a08..0cb16bf4485 100644 --- a/homeassistant/components/nibe_heatpump/binary_sensor.py +++ b/homeassistant/components/nibe_heatpump/binary_sensor.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import CoilEntity, Coordinator +from .coordinator import CoilCoordinator +from .entity import CoilEntity async def async_setup_entry( @@ -21,7 +22,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( BinarySensor(coordinator, coil) @@ -35,7 +36,7 @@ class BinarySensor(CoilEntity, BinarySensorEntity): _attr_entity_category = EntityCategory.DIAGNOSTIC - def __init__(self, coordinator: Coordinator, coil: Coil) -> None: + def __init__(self, coordinator: CoilCoordinator, coil: Coil) -> None: """Initialize entity.""" super().__init__(coordinator, coil, ENTITY_ID_FORMAT) diff --git a/homeassistant/components/nibe_heatpump/button.py b/homeassistant/components/nibe_heatpump/button.py index 0c3122805e1..df8ceef6479 100644 --- a/homeassistant/components/nibe_heatpump/button.py +++ b/homeassistant/components/nibe_heatpump/button.py @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, LOGGER -from .coordinator import Coordinator +from .coordinator import CoilCoordinator async def async_setup_entry( @@ -23,7 +23,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] def reset_buttons(): if unit := UNIT_COILGROUPS.get(coordinator.series, {}).get("main"): @@ -35,13 +35,13 @@ async def async_setup_entry( async_add_entities(reset_buttons()) -class NibeAlarmResetButton(CoordinatorEntity[Coordinator], ButtonEntity): +class NibeAlarmResetButton(CoordinatorEntity[CoilCoordinator], ButtonEntity): """Sensor entity.""" _attr_has_entity_name = True _attr_entity_category = EntityCategory.DIAGNOSTIC - def __init__(self, coordinator: Coordinator, unit: UnitCoilGroup) -> None: + def __init__(self, coordinator: CoilCoordinator, unit: UnitCoilGroup) -> None: """Initialize entity.""" self._reset_coil = coordinator.heatpump.get_coil_by_address(unit.alarm_reset) self._alarm_coil = coordinator.heatpump.get_coil_by_address(unit.alarm) diff --git a/homeassistant/components/nibe_heatpump/climate.py b/homeassistant/components/nibe_heatpump/climate.py index d933d5a5ab0..94db90e7f58 100644 --- a/homeassistant/components/nibe_heatpump/climate.py +++ b/homeassistant/components/nibe_heatpump/climate.py @@ -38,7 +38,7 @@ from .const import ( VALUES_PRIORITY_COOLING, VALUES_PRIORITY_HEATING, ) -from .coordinator import Coordinator +from .coordinator import CoilCoordinator async def async_setup_entry( @@ -48,7 +48,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] main_unit = UNIT_COILGROUPS[coordinator.series]["main"] @@ -62,7 +62,7 @@ async def async_setup_entry( async_add_entities(climate_systems()) -class NibeClimateEntity(CoordinatorEntity[Coordinator], ClimateEntity): +class NibeClimateEntity(CoordinatorEntity[CoilCoordinator], ClimateEntity): """Climate entity.""" _attr_entity_category = None @@ -74,11 +74,10 @@ class NibeClimateEntity(CoordinatorEntity[Coordinator], ClimateEntity): _attr_target_temperature_step = 0.5 _attr_max_temp = 35.0 _attr_min_temp = 5.0 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, - coordinator: Coordinator, + coordinator: CoilCoordinator, key: str, unit: UnitCoilGroup, climate: ClimateCoilGroup, diff --git a/homeassistant/components/nibe_heatpump/coordinator.py b/homeassistant/components/nibe_heatpump/coordinator.py index 0f1fabe4249..ed6d18f7888 100644 --- a/homeassistant/components/nibe_heatpump/coordinator.py +++ b/homeassistant/components/nibe_heatpump/coordinator.py @@ -6,23 +6,18 @@ import asyncio from collections import defaultdict from collections.abc import Callable, Iterable from datetime import date, timedelta -from functools import cached_property from typing import Any from nibe.coil import Coil, CoilData from nibe.connection import Connection from nibe.exceptions import CoilNotFoundException, ReadException from nibe.heatpump import HeatPump, Series +from propcache import cached_property from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import async_generate_entity_id -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER @@ -68,7 +63,7 @@ class ContextCoordinator[_DataTypeT, _ContextTypeT](DataUpdateCoordinator[_DataT return release_update -class Coordinator(ContextCoordinator[dict[int, CoilData], int]): +class CoilCoordinator(ContextCoordinator[dict[int, CoilData], int]): """Update coordinator for nibe heat pumps.""" config_entry: ConfigEntry @@ -188,43 +183,3 @@ class Coordinator(ContextCoordinator[dict[int, CoilData], int]): self.task.cancel() await asyncio.wait((self.task,)) await self.connection.stop() - - -class CoilEntity(CoordinatorEntity[Coordinator]): - """Base for coil based entities.""" - - _attr_has_entity_name = True - _attr_entity_registry_enabled_default = False - - def __init__( - self, coordinator: Coordinator, coil: Coil, entity_format: str - ) -> None: - """Initialize base entity.""" - super().__init__(coordinator, {coil.address}) - self.entity_id = async_generate_entity_id( - entity_format, coil.name, hass=coordinator.hass - ) - self._attr_name = coil.title - self._attr_unique_id = f"{coordinator.unique_id}-{coil.address}" - self._attr_device_info = coordinator.device_info - self._coil = coil - - @property - def available(self) -> bool: - """Return if entity is available.""" - return self.coordinator.last_update_success and self._coil.address in ( - self.coordinator.data or {} - ) - - def _async_read_coil(self, data: CoilData): - """Update state of entity based on coil data.""" - - async def _async_write_coil(self, value: float | str): - """Write coil and update state.""" - await self.coordinator.async_write_coil(self._coil, value) - - def _handle_coordinator_update(self) -> None: - data = self.coordinator.data.get(self._coil.address) - if data is not None: - self._async_read_coil(data) - self.async_write_ha_state() diff --git a/homeassistant/components/nibe_heatpump/entity.py b/homeassistant/components/nibe_heatpump/entity.py new file mode 100644 index 00000000000..3cbc8af32a3 --- /dev/null +++ b/homeassistant/components/nibe_heatpump/entity.py @@ -0,0 +1,50 @@ +"""The Nibe Heat Pump coordinator.""" + +from __future__ import annotations + +from nibe.coil import Coil, CoilData + +from homeassistant.helpers.entity import async_generate_entity_id +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import CoilCoordinator + + +class CoilEntity(CoordinatorEntity[CoilCoordinator]): + """Base for coil based entities.""" + + _attr_has_entity_name = True + _attr_entity_registry_enabled_default = False + + def __init__( + self, coordinator: CoilCoordinator, coil: Coil, entity_format: str + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator, {coil.address}) + self.entity_id = async_generate_entity_id( + entity_format, coil.name, hass=coordinator.hass + ) + self._attr_name = coil.title + self._attr_unique_id = f"{coordinator.unique_id}-{coil.address}" + self._attr_device_info = coordinator.device_info + self._coil = coil + + @property + def available(self) -> bool: + """Return if entity is available.""" + return self.coordinator.last_update_success and self._coil.address in ( + self.coordinator.data or {} + ) + + def _async_read_coil(self, data: CoilData): + """Update state of entity based on coil data.""" + + async def _async_write_coil(self, value: float | str): + """Write coil and update state.""" + await self.coordinator.async_write_coil(self._coil, value) + + def _handle_coordinator_update(self) -> None: + data = self.coordinator.data.get(self._coil.address) + if data is not None: + self._async_read_coil(data) + self.async_write_ha_state() diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index b3e5597da73..049ba905f04 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.11.0"] + "requirements": ["nibe==2.14.0"] } diff --git a/homeassistant/components/nibe_heatpump/number.py b/homeassistant/components/nibe_heatpump/number.py index 509f3364fee..cb379139eed 100644 --- a/homeassistant/components/nibe_heatpump/number.py +++ b/homeassistant/components/nibe_heatpump/number.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import CoilEntity, Coordinator +from .coordinator import CoilCoordinator +from .entity import CoilEntity async def async_setup_entry( @@ -21,7 +22,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( Number(coordinator, coil) @@ -44,7 +45,7 @@ class Number(CoilEntity, NumberEntity): _attr_entity_category = EntityCategory.CONFIG - def __init__(self, coordinator: Coordinator, coil: Coil) -> None: + def __init__(self, coordinator: CoilCoordinator, coil: Coil) -> None: """Initialize entity.""" super().__init__(coordinator, coil, ENTITY_ID_FORMAT) if coil.min is None or coil.max is None: diff --git a/homeassistant/components/nibe_heatpump/select.py b/homeassistant/components/nibe_heatpump/select.py index 07c958885b8..3aecff94649 100644 --- a/homeassistant/components/nibe_heatpump/select.py +++ b/homeassistant/components/nibe_heatpump/select.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import CoilEntity, Coordinator +from .coordinator import CoilCoordinator +from .entity import CoilEntity async def async_setup_entry( @@ -21,7 +22,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( Select(coordinator, coil) @@ -35,7 +36,7 @@ class Select(CoilEntity, SelectEntity): _attr_entity_category = EntityCategory.CONFIG - def __init__(self, coordinator: Coordinator, coil: Coil) -> None: + def __init__(self, coordinator: CoilCoordinator, coil: Coil) -> None: """Initialize entity.""" assert coil.mappings super().__init__(coordinator, coil, ENTITY_ID_FORMAT) diff --git a/homeassistant/components/nibe_heatpump/sensor.py b/homeassistant/components/nibe_heatpump/sensor.py index c6bac0323b9..d34fed50977 100644 --- a/homeassistant/components/nibe_heatpump/sensor.py +++ b/homeassistant/components/nibe_heatpump/sensor.py @@ -26,7 +26,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import CoilEntity, Coordinator +from .coordinator import CoilCoordinator +from .entity import CoilEntity UNIT_DESCRIPTIONS = { "°C": SensorEntityDescription( @@ -130,7 +131,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( Sensor(coordinator, coil, UNIT_DESCRIPTIONS.get(coil.unit)) @@ -144,7 +145,7 @@ class Sensor(CoilEntity, SensorEntity): def __init__( self, - coordinator: Coordinator, + coordinator: CoilCoordinator, coil: Coil, entity_description: SensorEntityDescription | None, ) -> None: diff --git a/homeassistant/components/nibe_heatpump/switch.py b/homeassistant/components/nibe_heatpump/switch.py index 594a8078b76..72b7c20c7b3 100644 --- a/homeassistant/components/nibe_heatpump/switch.py +++ b/homeassistant/components/nibe_heatpump/switch.py @@ -13,7 +13,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import CoilEntity, Coordinator +from .coordinator import CoilCoordinator +from .entity import CoilEntity async def async_setup_entry( @@ -23,7 +24,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] async_add_entities( Switch(coordinator, coil) @@ -37,7 +38,7 @@ class Switch(CoilEntity, SwitchEntity): _attr_entity_category = EntityCategory.CONFIG - def __init__(self, coordinator: Coordinator, coil: Coil) -> None: + def __init__(self, coordinator: CoilCoordinator, coil: Coil) -> None: """Initialize entity.""" super().__init__(coordinator, coil, ENTITY_ID_FORMAT) diff --git a/homeassistant/components/nibe_heatpump/water_heater.py b/homeassistant/components/nibe_heatpump/water_heater.py index c60f5b6e3b2..f53df596d27 100644 --- a/homeassistant/components/nibe_heatpump/water_heater.py +++ b/homeassistant/components/nibe_heatpump/water_heater.py @@ -26,7 +26,7 @@ from .const import ( VALUES_TEMPORARY_LUX_INACTIVE, VALUES_TEMPORARY_LUX_ONE_TIME_INCREASE, ) -from .coordinator import Coordinator +from .coordinator import CoilCoordinator async def async_setup_entry( @@ -36,7 +36,7 @@ async def async_setup_entry( ) -> None: """Set up platform.""" - coordinator: Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator: CoilCoordinator = hass.data[DOMAIN][config_entry.entry_id] def water_heaters(): for key, group in WATER_HEATER_COILGROUPS.get(coordinator.series, ()).items(): @@ -48,7 +48,7 @@ async def async_setup_entry( async_add_entities(water_heaters()) -class WaterHeater(CoordinatorEntity[Coordinator], WaterHeaterEntity): +class WaterHeater(CoordinatorEntity[CoilCoordinator], WaterHeaterEntity): """Sensor entity.""" _attr_entity_category = None @@ -59,7 +59,7 @@ class WaterHeater(CoordinatorEntity[Coordinator], WaterHeaterEntity): def __init__( self, - coordinator: Coordinator, + coordinator: CoilCoordinator, key: str, desc: WaterHeaterCoilGroup, ) -> None: diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py index ab3dc06e3c1..b217112c192 100644 --- a/homeassistant/components/nice_go/__init__.py +++ b/homeassistant/components/nice_go/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from .coordinator import NiceGOUpdateCoordinator @@ -25,8 +25,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bo """Set up Nice G.O. from a config entry.""" coordinator = NiceGOUpdateCoordinator(hass) + entry.async_on_unload( + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, coordinator.async_ha_stop) + ) await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator entry.async_create_background_task( @@ -35,6 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bo "nice_go_websocket_task", ) + entry.async_on_unload(coordinator.unsubscribe) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/nice_go/config_flow.py b/homeassistant/components/nice_go/config_flow.py index 9d2c1c05518..da3940117e9 100644 --- a/homeassistant/components/nice_go/config_flow.py +++ b/homeassistant/components/nice_go/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from datetime import datetime import logging from typing import Any @@ -10,7 +11,7 @@ from nice_go import AuthFailedError, NiceGOApi import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.const import CONF_EMAIL, CONF_NAME, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN @@ -66,3 +67,51 @@ class NiceGOConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm re-authentication.""" + errors = {} + + reauth_entry = self._get_reauth_entry() + if user_input is not None: + hub = NiceGOApi() + + try: + refresh_token = await hub.authenticate( + user_input[CONF_EMAIL], + user_input[CONF_PASSWORD], + async_get_clientsession(self.hass), + ) + except AuthFailedError: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data={ + **user_input, + CONF_REFRESH_TOKEN: refresh_token, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + }, + unique_id=user_input[CONF_EMAIL], + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, + user_input or {CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, + ), + description_placeholders={CONF_NAME: reauth_entry.title}, + errors=errors, + ) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py index c3caa92c8be..a6635368f7b 100644 --- a/homeassistant/components/nice_go/const.py +++ b/homeassistant/components/nice_go/const.py @@ -2,6 +2,8 @@ from datetime import timedelta +from homeassistant.const import Platform + DOMAIN = "nice_go" # Configuration @@ -11,3 +13,22 @@ CONF_REFRESH_TOKEN = "refresh_token" CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) + +SUPPORTED_DEVICE_TYPES = { + Platform.LIGHT: ["WallStation"], + Platform.SWITCH: ["WallStation"], +} +KNOWN_UNSUPPORTED_DEVICE_TYPES = { + Platform.LIGHT: ["Mms100"], + Platform.SWITCH: ["Mms100"], +} + +UNSUPPORTED_DEVICE_WARNING = ( + "Device '%s' has unknown device type '%s', " + "which is not supported by this integration. " + "We try to support it with a cover and event entity, but nothing else. " + "Please create an issue with your device model in additional info" + " at https://github.com/home-assistant/core/issues/new" + "?assignees=&labels=&projects=&template=bug_report.yml" + "&title=New%%20Nice%%20G.O.%%20device%%20type%%20'%s'%%20found" +) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index 323e0a08fe8..07b20bbbf10 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -3,11 +3,12 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from dataclasses import dataclass from datetime import datetime import json import logging -from typing import Any +from typing import TYPE_CHECKING, Any from nice_go import ( BARRIER_STATUS, @@ -20,7 +21,7 @@ from nice_go import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -35,18 +36,22 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +RECONNECT_ATTEMPTS = 3 +RECONNECT_DELAY = 5 + @dataclass class NiceGODevice: """Nice G.O. device dataclass.""" + type: str id: str name: str barrier_status: str - light_status: bool + light_status: bool | None fw_version: str connected: bool - vacation_mode: bool + vacation_mode: bool | None class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): @@ -70,9 +75,20 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): self.email = self.config_entry.data[CONF_EMAIL] self.password = self.config_entry.data[CONF_PASSWORD] self.api = NiceGOApi() - self.ws_connected = False + self._unsub_connected: Callable[[], None] | None = None + self._unsub_data: Callable[[], None] | None = None + self._unsub_connection_lost: Callable[[], None] | None = None + self.connected = False + self._hass_stopping: bool = hass.is_stopping - async def _parse_barrier(self, barrier_state: BarrierState) -> NiceGODevice | None: + @callback + def async_ha_stop(self, event: Event) -> None: + """Stop reconnecting if hass is stopping.""" + self._hass_stopping = True + + async def _parse_barrier( + self, device_type: str, barrier_state: BarrierState + ) -> NiceGODevice | None: """Parse barrier data.""" device_id = barrier_state.deviceId @@ -100,15 +116,23 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): else: barrier_status = BARRIER_STATUS[int(barrier_status_raw[2])].lower() - light_status = barrier_state.reported["lightStatus"].split(",")[0] == "1" + light_status = ( + barrier_state.reported["lightStatus"].split(",")[0] == "1" + if barrier_state.reported.get("lightStatus") + else None + ) fw_version = barrier_state.reported["deviceFwVersion"] if barrier_state.connectionState: connected = barrier_state.connectionState.connected + elif device_type == "Mms100": + connected = barrier_state.reported.get("radioConnected", 0) == 1 else: - connected = False - vacation_mode = barrier_state.reported["vcnMode"] + # Assume connected + connected = True + vacation_mode = barrier_state.reported.get("vcnMode", None) return NiceGODevice( + type=device_type, id=device_id, name=name, barrier_status=barrier_status, @@ -139,7 +163,8 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): barriers = await self.api.get_all_barriers() parsed_barriers = [ - await self._parse_barrier(barrier.state) for barrier in barriers + await self._parse_barrier(barrier.type, barrier.state) + for barrier in barriers ] # Parse the barriers and save them in a dictionary @@ -178,16 +203,30 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): async def client_listen(self) -> None: """Listen to the websocket for updates.""" - self.api.event(self.on_connected) - self.api.event(self.on_data) - try: - await self.api.connect(reconnect=True) - except ApiError: - _LOGGER.exception("API error") + self._unsub_connected = self.api.listen("on_connected", self.on_connected) + self._unsub_data = self.api.listen("on_data", self.on_data) + self._unsub_connection_lost = self.api.listen( + "on_connection_lost", self.on_connection_lost + ) - if not self.hass.is_stopping: - await asyncio.sleep(5) - await self.client_listen() + for _ in range(RECONNECT_ATTEMPTS): + if self._hass_stopping: + return + + try: + await self.api.connect(reconnect=True) + except ApiError: + _LOGGER.exception("API error") + else: + return + + await asyncio.sleep(RECONNECT_DELAY) + + self.async_set_update_error( + TimeoutError( + "Failed to connect to the websocket, reconnect attempts exhausted" + ) + ) async def on_data(self, data: dict[str, Any]) -> None: """Handle incoming data from the websocket.""" @@ -195,9 +234,11 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): _LOGGER.debug(data) raw_data = data["data"]["devicesStatesUpdateFeed"]["item"] parsed_data = await self._parse_barrier( + self.data[ + raw_data["deviceId"] + ].type, # Device type is not sent in device state update, and it can't change, so we just reuse the existing one BarrierState( deviceId=raw_data["deviceId"], - desired=json.loads(raw_data["desired"]), reported=json.loads(raw_data["reported"]), connectionState=ConnectionState( connected=raw_data["connectionState"]["connected"], @@ -207,7 +248,7 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): else None, version=raw_data["version"], timestamp=raw_data["timestamp"], - ) + ), ) if parsed_data is None: return @@ -220,4 +261,38 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): async def on_connected(self) -> None: """Handle the websocket connection.""" _LOGGER.debug("Connected to the websocket") + self.connected = True + await self.api.subscribe(self.organization_id) + + if not self.last_update_success: + self.async_set_updated_data(self.data) + + async def on_connection_lost(self, data: dict[str, Exception]) -> None: + """Handle the websocket connection loss. Don't need to do much since the library will automatically reconnect.""" + _LOGGER.debug("Connection lost to the websocket") + self.connected = False + + # Give some time for reconnection + await asyncio.sleep(RECONNECT_DELAY) + if self.connected: + _LOGGER.debug("Reconnected, not setting error") + return + + # There's likely a problem with the connection, and not the server being flaky + self.async_set_update_error(data["exception"]) + + def unsubscribe(self) -> None: + """Unsubscribe from the websocket.""" + if TYPE_CHECKING: + assert self._unsub_connected is not None + assert self._unsub_data is not None + assert self._unsub_connection_lost is not None + + self._unsub_connection_lost() + self._unsub_connected() + self._unsub_data() + self._unsub_connected = None + self._unsub_data = None + self._unsub_connection_lost = None + _LOGGER.debug("Unsubscribed from the websocket") diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py index 4098d9ef426..a823e931804 100644 --- a/homeassistant/components/nice_go/cover.py +++ b/homeassistant/components/nice_go/cover.py @@ -2,17 +2,26 @@ from typing import Any +from aiohttp import ClientError +from nice_go import ApiError + from homeassistant.components.cover import ( CoverDeviceClass, CoverEntity, CoverEntityFeature, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NiceGOConfigEntry +from .const import DOMAIN from .entity import NiceGOEntity +DEVICE_CLASSES = { + "WallStation": CoverDeviceClass.GARAGE, + "Mms100": CoverDeviceClass.GATE, +} PARALLEL_UPDATES = 1 @@ -35,7 +44,11 @@ class NiceGOCoverEntity(NiceGOEntity, CoverEntity): _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE _attr_name = None - _attr_device_class = CoverDeviceClass.GARAGE + + @property + def device_class(self) -> CoverDeviceClass: + """Return the class of this device, from component DEVICE_CLASSES.""" + return DEVICE_CLASSES.get(self.data.type, CoverDeviceClass.GARAGE) @property def is_closed(self) -> bool: @@ -62,11 +75,25 @@ class NiceGOCoverEntity(NiceGOEntity, CoverEntity): if self.is_closed: return - await self.coordinator.api.close_barrier(self._device_id) + try: + await self.coordinator.api.close_barrier(self._device_id) + except (ApiError, ClientError) as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="close_cover_error", + translation_placeholders={"exception": str(err)}, + ) from err async def async_open_cover(self, **kwargs: Any) -> None: """Open the garage door.""" if self.is_opened: return - await self.coordinator.api.open_barrier(self._device_id) + try: + await self.coordinator.api.open_barrier(self._device_id) + except (ApiError, ClientError) as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="open_cover_error", + translation_placeholders={"exception": str(err)}, + ) from err diff --git a/homeassistant/components/nice_go/event.py b/homeassistant/components/nice_go/event.py index a19511b0b11..cd9198bcd26 100644 --- a/homeassistant/components/nice_go/event.py +++ b/homeassistant/components/nice_go/event.py @@ -40,7 +40,11 @@ class NiceGOEventEntity(NiceGOEntity, EventEntity): async def async_added_to_hass(self) -> None: """Listen for events.""" await super().async_added_to_hass() - self.coordinator.api.event(self.on_barrier_obstructed) + self.async_on_remove( + self.coordinator.api.listen( + "on_barrier_obstructed", self.on_barrier_obstructed + ) + ) async def on_barrier_obstructed(self, data: dict[str, Any]) -> None: """Handle barrier obstructed event.""" diff --git a/homeassistant/components/nice_go/light.py b/homeassistant/components/nice_go/light.py index 4a08364688e..abb192adde1 100644 --- a/homeassistant/components/nice_go/light.py +++ b/homeassistant/components/nice_go/light.py @@ -1,14 +1,28 @@ """Nice G.O. light.""" -from typing import Any +import logging +from typing import TYPE_CHECKING, Any + +from aiohttp import ClientError +from nice_go import ApiError from homeassistant.components.light import ColorMode, LightEntity +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NiceGOConfigEntry +from .const import ( + DOMAIN, + KNOWN_UNSUPPORTED_DEVICE_TYPES, + SUPPORTED_DEVICE_TYPES, + UNSUPPORTED_DEVICE_WARNING, +) from .entity import NiceGOEntity +_LOGGER = logging.getLogger(__name__) + async def async_setup_entry( hass: HomeAssistant, @@ -19,10 +33,20 @@ async def async_setup_entry( coordinator = config_entry.runtime_data - async_add_entities( - NiceGOLightEntity(coordinator, device_id, device_data.name) - for device_id, device_data in coordinator.data.items() - ) + entities = [] + + for device_id, device_data in coordinator.data.items(): + if device_data.type in SUPPORTED_DEVICE_TYPES[Platform.LIGHT]: + entities.append(NiceGOLightEntity(coordinator, device_id, device_data.name)) + elif device_data.type not in KNOWN_UNSUPPORTED_DEVICE_TYPES[Platform.LIGHT]: + _LOGGER.warning( + UNSUPPORTED_DEVICE_WARNING, + device_data.name, + device_data.type, + device_data.type, + ) + + async_add_entities(entities) class NiceGOLightEntity(NiceGOEntity, LightEntity): @@ -35,14 +59,30 @@ class NiceGOLightEntity(NiceGOEntity, LightEntity): @property def is_on(self) -> bool: """Return if the light is on or not.""" + if TYPE_CHECKING: + assert self.data.light_status is not None return self.data.light_status async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" - await self.coordinator.api.light_on(self._device_id) + try: + await self.coordinator.api.light_on(self._device_id) + except (ApiError, ClientError) as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="light_on_error", + translation_placeholders={"exception": str(error)}, + ) from error async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the light.""" - await self.coordinator.api.light_off(self._device_id) + try: + await self.coordinator.api.light_off(self._device_id) + except (ApiError, ClientError) as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="light_off_error", + translation_placeholders={"exception": str(error)}, + ) from error diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index c2ff8370e2a..1af23ec4d9b 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -4,7 +4,8 @@ "codeowners": ["@IceBotYT"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nice_go", + "integration_type": "hub", "iot_class": "cloud_push", - "loggers": ["nice-go"], - "requirements": ["nice-go==0.3.0"] + "loggers": ["nice_go"], + "requirements": ["nice-go==1.0.0"] } diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json index 30a2bbf58b6..224996e6408 100644 --- a/homeassistant/components/nice_go/strings.json +++ b/homeassistant/components/nice_go/strings.json @@ -1,10 +1,25 @@ { "config": { "step": { + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::nice_go::config::step::user::data_description::email%]", + "password": "[%key:component::nice_go::config::step::user::data_description::password%]" + } + }, "user": { "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "The email address used to log in to the Nice G.O. app", + "password": "The password used to log in to the Nice G.O. app" } } }, @@ -46,5 +61,25 @@ "title": "Firmware update required", "description": "Your device ({device_name}) requires a firmware update on the Nice G.O. app in order to work with this integration. Please update the firmware on the Nice G.O. app and reconfigure this integration." } + }, + "exceptions": { + "close_cover_error": { + "message": "Error closing the barrier: {exception}" + }, + "open_cover_error": { + "message": "Error opening the barrier: {exception}" + }, + "light_on_error": { + "message": "Error while turning on the light: {exception}" + }, + "light_off_error": { + "message": "Error while turning off the light: {exception}" + }, + "switch_on_error": { + "message": "Error while turning on the switch: {exception}" + }, + "switch_off_error": { + "message": "Error while turning off the switch: {exception}" + } } } diff --git a/homeassistant/components/nice_go/switch.py b/homeassistant/components/nice_go/switch.py index 26d42dab124..e3b85528f3b 100644 --- a/homeassistant/components/nice_go/switch.py +++ b/homeassistant/components/nice_go/switch.py @@ -3,13 +3,24 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any + +from aiohttp import ClientError +from nice_go import ApiError from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NiceGOConfigEntry +from .const import ( + DOMAIN, + KNOWN_UNSUPPORTED_DEVICE_TYPES, + SUPPORTED_DEVICE_TYPES, + UNSUPPORTED_DEVICE_WARNING, +) from .entity import NiceGOEntity _LOGGER = logging.getLogger(__name__) @@ -23,10 +34,22 @@ async def async_setup_entry( """Set up Nice G.O. switch.""" coordinator = config_entry.runtime_data - async_add_entities( - NiceGOSwitchEntity(coordinator, device_id, device_data.name) - for device_id, device_data in coordinator.data.items() - ) + entities = [] + + for device_id, device_data in coordinator.data.items(): + if device_data.type in SUPPORTED_DEVICE_TYPES[Platform.SWITCH]: + entities.append( + NiceGOSwitchEntity(coordinator, device_id, device_data.name) + ) + elif device_data.type not in KNOWN_UNSUPPORTED_DEVICE_TYPES[Platform.SWITCH]: + _LOGGER.warning( + UNSUPPORTED_DEVICE_WARNING, + device_data.name, + device_data.type, + device_data.type, + ) + + async_add_entities(entities) class NiceGOSwitchEntity(NiceGOEntity, SwitchEntity): @@ -38,12 +61,30 @@ class NiceGOSwitchEntity(NiceGOEntity, SwitchEntity): @property def is_on(self) -> bool: """Return if switch is on.""" + if TYPE_CHECKING: + assert self.data.vacation_mode is not None return self.data.vacation_mode async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" - await self.coordinator.api.vacation_mode_on(self.data.id) + + try: + await self.coordinator.api.vacation_mode_on(self.data.id) + except (ApiError, ClientError) as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_on_error", + translation_placeholders={"exception": str(error)}, + ) from error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" - await self.coordinator.api.vacation_mode_off(self.data.id) + + try: + await self.coordinator.api.vacation_mode_off(self.data.id) + except (ApiError, ClientError) as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_off_error", + translation_placeholders={"exception": str(error)}, + ) from error diff --git a/homeassistant/components/nightscout/manifest.json b/homeassistant/components/nightscout/manifest.json index 3551b29ee0b..9b075a6df87 100644 --- a/homeassistant/components/nightscout/manifest.json +++ b/homeassistant/components/nightscout/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nightscout", "iot_class": "cloud_polling", "loggers": ["py_nightscout"], - "quality_scale": "platinum", "requirements": ["py-nightscout==1.2.2"] } diff --git a/homeassistant/components/nightscout/sensor.py b/homeassistant/components/nightscout/sensor.py index 92291bdc4f9..620349ec3c3 100644 --- a/homeassistant/components/nightscout/sensor.py +++ b/homeassistant/components/nightscout/sensor.py @@ -9,9 +9,9 @@ from typing import Any from aiohttp import ClientError from py_nightscout import Api as NightscoutAPI -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DATE +from homeassistant.const import ATTR_DATE, UnitOfBloodGlucoseConcentration from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,7 +37,10 @@ async def async_setup_entry( class NightscoutSensor(SensorEntity): """Implementation of a Nightscout sensor.""" - _attr_native_unit_of_measurement = "mg/dL" + _attr_device_class = SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION + _attr_native_unit_of_measurement = ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER + ) _attr_icon = "mdi:cloud-question" def __init__(self, api: NightscoutAPI, name: str, unique_id: str | None) -> None: diff --git a/homeassistant/components/niko_home_control/__init__.py b/homeassistant/components/niko_home_control/__init__.py index 2cb5c70d1dd..bdbb8d6b85f 100644 --- a/homeassistant/components/niko_home_control/__init__.py +++ b/homeassistant/components/niko_home_control/__init__.py @@ -1 +1,83 @@ -"""The niko_home_control component.""" +"""The Niko home control integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from nclib.errors import NetcatError +from nikohomecontrol import NikoHomeControl + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.util import Throttle + +PLATFORMS: list[Platform] = [Platform.LIGHT] + +type NikoHomeControlConfigEntry = ConfigEntry[NikoHomeControlData] + + +_LOGGER = logging.getLogger(__name__) +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) + + +async def async_setup_entry( + hass: HomeAssistant, entry: NikoHomeControlConfigEntry +) -> bool: + """Set Niko Home Control from a config entry.""" + try: + controller = NikoHomeControl({"ip": entry.data[CONF_HOST], "port": 8000}) + niko_data = NikoHomeControlData(hass, controller) + await niko_data.async_update() + except NetcatError as err: + raise ConfigEntryNotReady("cannot connect to controller.") from err + except OSError as err: + raise ConfigEntryNotReady( + "unknown error while connecting to controller." + ) from err + + entry.runtime_data = niko_data + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: NikoHomeControlConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +class NikoHomeControlData: + """The class for handling data retrieval.""" + + def __init__(self, hass, nhc): + """Set up Niko Home Control Data object.""" + self.nhc = nhc + self.hass = hass + self.available = True + self.data = {} + self._system_info = None + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + async def async_update(self): + """Get the latest data from the NikoHomeControl API.""" + _LOGGER.debug("Fetching async state in bulk") + try: + self.data = await self.hass.async_add_executor_job( + self.nhc.list_actions_raw + ) + self.available = True + except OSError as ex: + _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) + self.available = False + + def get_state(self, aid): + """Find and filter state based on action id.""" + for state in self.data: + if state["id"] == aid: + return state["value1"] + _LOGGER.error("Failed to retrieve state off unknown light") + return None diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py new file mode 100644 index 00000000000..9174a932534 --- /dev/null +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -0,0 +1,66 @@ +"""Config flow for the Niko home control integration.""" + +from __future__ import annotations + +from typing import Any + +from nikohomecontrol import NikoHomeControlConnection +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST + +from .const import DOMAIN + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +def test_connection(host: str) -> str | None: + """Test if we can connect to the Niko Home Control controller.""" + try: + NikoHomeControlConnection(host, 8000) + except Exception: # noqa: BLE001 + return "cannot_connect" + return None + + +class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Niko Home Control.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors = {} + + if user_input is not None: + self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + error = test_connection(user_input[CONF_HOST]) + if not error: + return self.async_create_entry( + title="Niko Home Control", + data=user_input, + ) + errors["base"] = error + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) + + async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + """Import a config entry.""" + self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) + error = test_connection(import_info[CONF_HOST]) + + if not error: + return self.async_create_entry( + title="Niko Home Control", + data={CONF_HOST: import_info[CONF_HOST]}, + ) + return self.async_abort(reason=error) diff --git a/homeassistant/components/niko_home_control/const.py b/homeassistant/components/niko_home_control/const.py new file mode 100644 index 00000000000..202b031b9a2 --- /dev/null +++ b/homeassistant/components/niko_home_control/const.py @@ -0,0 +1,3 @@ +"""Constants for niko_home_control integration.""" + +DOMAIN = "niko_home_control" diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index b2d41f3a41e..f2bf302eab7 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -1,4 +1,4 @@ -"""Support for Niko Home Control.""" +"""Light platform Niko Home Control.""" from __future__ import annotations @@ -6,7 +6,6 @@ from datetime import timedelta import logging from typing import Any -import nikohomecontrol import voluptuous as vol from homeassistant.components.light import ( @@ -16,18 +15,22 @@ from homeassistant.components.light import ( LightEntity, brightness_supported, ) +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import PlatformNotReady +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import Throttle + +from . import NikoHomeControlConfigEntry +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) SCAN_INTERVAL = timedelta(seconds=30) +# delete after 2025.7.0 PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) @@ -38,20 +41,56 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Niko Home Control light platform.""" - host = config[CONF_HOST] - - try: - nhc = nikohomecontrol.NikoHomeControl( - {"ip": host, "port": 8000, "timeout": 20000} + # Start import flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + if ( + result.get("type") == FlowResultType.ABORT + and result.get("reason") != "already_configured" + ): + ir.async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{result['reason']}", + breaks_in_ha_version="2025.7.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Niko Home Control", + }, ) - niko_data = NikoHomeControlData(hass, nhc) - await niko_data.async_update() - except OSError as err: - _LOGGER.error("Unable to access %s (%s)", host, err) - raise PlatformNotReady from err + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.7.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Niko Home Control", + }, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NikoHomeControlConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Niko Home Control light entry.""" + niko_data = entry.runtime_data async_add_entities( - [NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True + NikoHomeControlLight(light, niko_data) for light in niko_data.nhc.list_actions() ) @@ -88,36 +127,3 @@ class NikoHomeControlLight(LightEntity): self._attr_is_on = state != 0 if brightness_supported(self.supported_color_modes): self._attr_brightness = state * 2.55 - - -class NikoHomeControlData: - """The class for handling data retrieval.""" - - def __init__(self, hass, nhc): - """Set up Niko Home Control Data object.""" - self._nhc = nhc - self.hass = hass - self.available = True - self.data = {} - self._system_info = None - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): - """Get the latest data from the NikoHomeControl API.""" - _LOGGER.debug("Fetching async state in bulk") - try: - self.data = await self.hass.async_add_executor_job( - self._nhc.list_actions_raw - ) - self.available = True - except OSError as ex: - _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) - self.available = False - - def get_state(self, aid): - """Find and filter state based on action id.""" - for state in self.data: - if state["id"] == aid: - return state["value1"] - _LOGGER.error("Failed to retrieve state off unknown light") - return None diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json index 72f9dd2f6b3..194596d534f 100644 --- a/homeassistant/components/niko_home_control/manifest.json +++ b/homeassistant/components/niko_home_control/manifest.json @@ -1,7 +1,8 @@ { "domain": "niko_home_control", "name": "Niko Home Control", - "codeowners": [], + "codeowners": ["@VandeurenGlenn"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/niko_home_control", "iot_class": "local_polling", "loggers": ["nikohomecontrol"], diff --git a/homeassistant/components/niko_home_control/strings.json b/homeassistant/components/niko_home_control/strings.json new file mode 100644 index 00000000000..495dca94c0c --- /dev/null +++ b/homeassistant/components/niko_home_control/strings.json @@ -0,0 +1,27 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up your Niko Home Control instance.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Niko Home Control controller." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "issues": { + "deprecated_yaml_import_issue_cannot_connect": { + "title": "YAML import failed due to a connection error", + "description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + } + } +} diff --git a/homeassistant/components/nilu/manifest.json b/homeassistant/components/nilu/manifest.json index 1eabf9e726e..d99a918ef4f 100644 --- a/homeassistant/components/nilu/manifest.json +++ b/homeassistant/components/nilu/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nilu", "iot_class": "cloud_polling", "loggers": ["niluclient"], + "quality_scale": "legacy", "requirements": ["niluclient==0.1.2"] } diff --git a/homeassistant/components/nina/binary_sensor.py b/homeassistant/components/nina/binary_sensor.py index 397ced0f5d3..10d3008fd82 100644 --- a/homeassistant/components/nina/binary_sensor.py +++ b/homeassistant/components/nina/binary_sensor.py @@ -25,6 +25,7 @@ from .const import ( ATTR_SENT, ATTR_SEVERITY, ATTR_START, + ATTR_WEB, CONF_MESSAGE_SLOTS, CONF_REGIONS, DOMAIN, @@ -103,6 +104,7 @@ class NINAMessage(CoordinatorEntity[NINADataUpdateCoordinator], BinarySensorEnti ATTR_SEVERITY: data.severity, ATTR_RECOMMENDED_ACTIONS: data.recommended_actions, ATTR_AFFECTED_AREAS: data.affected_areas, + ATTR_WEB: data.web, ATTR_ID: data.id, ATTR_SENT: data.sent, ATTR_START: data.start, diff --git a/homeassistant/components/nina/config_flow.py b/homeassistant/components/nina/config_flow.py index 1fee6430ffc..a1ba9ae0c61 100644 --- a/homeassistant/components/nina/config_flow.py +++ b/homeassistant/components/nina/config_flow.py @@ -116,7 +116,7 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except Exception as err: # noqa: BLE001 _LOGGER.exception("Unexpected exception: %s", err) - return self.async_abort(reason="unknown") + errors["base"] = "unknown" self.regions = split_regions(self._all_region_codes_sorted, self.regions) @@ -171,8 +171,7 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.data = dict(self.config_entry.data) + self.data = dict(config_entry.data) self._all_region_codes_sorted: dict[str, str] = {} self.regions: dict[str, dict[str, Any]] = {} @@ -182,9 +181,11 @@ class OptionsFlowHandler(OptionsFlow): if name not in self.data: self.data[name] = [] - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" - errors: dict[str, Any] = {} + errors: dict[str, str] = {} if not self._all_region_codes_sorted: nina: Nina = Nina(async_get_clientsession(self.hass)) @@ -197,7 +198,7 @@ class OptionsFlowHandler(OptionsFlow): errors["base"] = "cannot_connect" except Exception as err: # noqa: BLE001 _LOGGER.exception("Unexpected exception: %s", err) - return self.async_abort(reason="unknown") + errors["base"] = "unknown" self.regions = split_regions(self._all_region_codes_sorted, self.regions) @@ -244,33 +245,33 @@ class OptionsFlowHandler(OptionsFlow): self.config_entry, data=user_input ) - return self.async_create_entry(title="", data=None) + return self.async_create_entry(title="", data={}) errors["base"] = "no_selection" + schema: VolDictType = { + **{ + vol.Optional(region, default=self.data[region]): cv.multi_select( + self.regions[region] + ) + for region in CONST_REGIONS + }, + vol.Required( + CONF_MESSAGE_SLOTS, + default=self.data[CONF_MESSAGE_SLOTS], + ): vol.All(int, vol.Range(min=1, max=20)), + vol.Optional( + CONF_HEADLINE_FILTER, + default=self.data[CONF_HEADLINE_FILTER], + ): cv.string, + vol.Optional( + CONF_AREA_FILTER, + default=self.data[CONF_AREA_FILTER], + ): cv.string, + } + return self.async_show_form( step_id="init", - data_schema=vol.Schema( - { - **{ - vol.Optional( - region, default=self.data[region] - ): cv.multi_select(self.regions[region]) - for region in CONST_REGIONS - }, - vol.Required( - CONF_MESSAGE_SLOTS, - default=self.data[CONF_MESSAGE_SLOTS], - ): vol.All(int, vol.Range(min=1, max=20)), - vol.Optional( - CONF_HEADLINE_FILTER, - default=self.data[CONF_HEADLINE_FILTER], - ): cv.string, - vol.Optional( - CONF_AREA_FILTER, - default=self.data[CONF_AREA_FILTER], - ): cv.string, - } - ), + data_schema=vol.Schema(schema), errors=errors, ) diff --git a/homeassistant/components/nina/const.py b/homeassistant/components/nina/const.py index 1e755056079..47194c4c2de 100644 --- a/homeassistant/components/nina/const.py +++ b/homeassistant/components/nina/const.py @@ -27,6 +27,7 @@ ATTR_SENDER: str = "sender" ATTR_SEVERITY: str = "severity" ATTR_RECOMMENDED_ACTIONS: str = "recommended_actions" ATTR_AFFECTED_AREAS: str = "affected_areas" +ATTR_WEB: str = "web" ATTR_ID: str = "id" ATTR_SENT: str = "sent" ATTR_START: str = "start" diff --git a/homeassistant/components/nina/coordinator.py b/homeassistant/components/nina/coordinator.py index c731c7a62d7..2d9548f3d12 100644 --- a/homeassistant/components/nina/coordinator.py +++ b/homeassistant/components/nina/coordinator.py @@ -27,6 +27,7 @@ class NinaWarningData: severity: str recommended_actions: str affected_areas: str + web: str sent: str start: str expires: str @@ -127,6 +128,7 @@ class NINADataUpdateCoordinator( raw_warn.severity, " ".join([str(action) for action in raw_warn.recommended_actions]), affected_areas_string, + raw_warn.web or "", raw_warn.sent or "", raw_warn.start or "", raw_warn.expires or "", diff --git a/homeassistant/components/nina/manifest.json b/homeassistant/components/nina/manifest.json index 53a54f26dcf..45212c0220b 100644 --- a/homeassistant/components/nina/manifest.json +++ b/homeassistant/components/nina/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/nina", "iot_class": "cloud_polling", "loggers": ["pynina"], - "requirements": ["PyNINA==0.3.3"], + "requirements": ["PyNINA==0.3.4"], "single_config_entry": true } diff --git a/homeassistant/components/nissan_leaf/__init__.py b/homeassistant/components/nissan_leaf/__init__.py index 2cbec236261..865ae33b38c 100644 --- a/homeassistant/components/nissan_leaf/__init__.py +++ b/homeassistant/components/nissan_leaf/__init__.py @@ -17,14 +17,10 @@ from pycarwings2.responses import ( import voluptuous as vol from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME, Platform -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, ServiceCall, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import ConfigType from homeassistant.util.dt import utcnow @@ -52,6 +48,7 @@ from .const import ( PYCARWINGS2_SLEEP, RESTRICTED_BATTERY, RESTRICTED_INTERVAL, + SIGNAL_UPDATE_LEAF, ) _LOGGER = logging.getLogger(__name__) @@ -90,7 +87,6 @@ CONFIG_SCHEMA = vol.Schema( PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] -SIGNAL_UPDATE_LEAF = "nissan_leaf_update" SERVICE_UPDATE_LEAF = "update" SERVICE_START_CHARGE_LEAF = "start_charge" @@ -496,44 +492,3 @@ class LeafDataStore: self._remove_listener = async_track_point_in_utc_time( self.hass, self.async_update_data, update_at ) - - -class LeafEntity(Entity): - """Base class for Nissan Leaf entity.""" - - def __init__(self, car: LeafDataStore) -> None: - """Store LeafDataStore upon init.""" - self.car = car - - def log_registration(self) -> None: - """Log registration.""" - _LOGGER.debug( - "Registered %s integration for VIN %s", - self.__class__.__name__, - self.car.leaf.vin, - ) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return default attributes for Nissan leaf entities.""" - return { - "next_update": self.car.next_update, - "last_attempt": self.car.last_check, - "updated_on": self.car.last_battery_response, - "update_in_progress": self.car.request_in_progress, - "vin": self.car.leaf.vin, - } - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.log_registration() - self.async_on_remove( - async_dispatcher_connect( - self.car.hass, SIGNAL_UPDATE_LEAF, self._update_callback - ) - ) - - @callback - def _update_callback(self) -> None: - """Update the state.""" - self.async_schedule_update_ha_state(True) diff --git a/homeassistant/components/nissan_leaf/binary_sensor.py b/homeassistant/components/nissan_leaf/binary_sensor.py index 3b15fabe382..7938b314deb 100644 --- a/homeassistant/components/nissan_leaf/binary_sensor.py +++ b/homeassistant/components/nissan_leaf/binary_sensor.py @@ -12,8 +12,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import LeafDataStore, LeafEntity +from . import LeafDataStore from .const import DATA_CHARGING, DATA_LEAF, DATA_PLUGGED_IN +from .entity import LeafEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nissan_leaf/button.py b/homeassistant/components/nissan_leaf/button.py index aa2bbbbca9b..6a5d051751b 100644 --- a/homeassistant/components/nissan_leaf/button.py +++ b/homeassistant/components/nissan_leaf/button.py @@ -9,7 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DATA_CHARGING, DATA_LEAF, LeafEntity +from . import DATA_CHARGING, DATA_LEAF +from .entity import LeafEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nissan_leaf/const.py b/homeassistant/components/nissan_leaf/const.py index 299576b86a7..22842fbbc72 100644 --- a/homeassistant/components/nissan_leaf/const.py +++ b/homeassistant/components/nissan_leaf/const.py @@ -34,3 +34,5 @@ RESTRICTED_BATTERY: Final = 2 MAX_RESPONSE_ATTEMPTS: Final = 3 PYCARWINGS2_SLEEP: Final = 40 + +SIGNAL_UPDATE_LEAF = "nissan_leaf_update" diff --git a/homeassistant/components/nissan_leaf/entity.py b/homeassistant/components/nissan_leaf/entity.py new file mode 100644 index 00000000000..73813c8931e --- /dev/null +++ b/homeassistant/components/nissan_leaf/entity.py @@ -0,0 +1,56 @@ +"""Support for the Nissan Leaf Carwings/Nissan Connect API.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.core import callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from . import LeafDataStore +from .const import SIGNAL_UPDATE_LEAF + +_LOGGER = logging.getLogger(__name__) + + +class LeafEntity(Entity): + """Base class for Nissan Leaf entity.""" + + def __init__(self, car: LeafDataStore) -> None: + """Store LeafDataStore upon init.""" + self.car = car + + def log_registration(self) -> None: + """Log registration.""" + _LOGGER.debug( + "Registered %s integration for VIN %s", + self.__class__.__name__, + self.car.leaf.vin, + ) + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return default attributes for Nissan leaf entities.""" + return { + "next_update": self.car.next_update, + "last_attempt": self.car.last_check, + "updated_on": self.car.last_battery_response, + "update_in_progress": self.car.request_in_progress, + "vin": self.car.leaf.vin, + } + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + self.log_registration() + self.async_on_remove( + async_dispatcher_connect( + self.car.hass, SIGNAL_UPDATE_LEAF, self._update_callback + ) + ) + + @callback + def _update_callback(self) -> None: + """Update the state.""" + self.async_schedule_update_ha_state(True) diff --git a/homeassistant/components/nissan_leaf/icons.json b/homeassistant/components/nissan_leaf/icons.json index 5da03ed5f1a..832fce90c08 100644 --- a/homeassistant/components/nissan_leaf/icons.json +++ b/homeassistant/components/nissan_leaf/icons.json @@ -1,6 +1,10 @@ { "services": { - "start_charge": "mdi:flash", - "update": "mdi:update" + "start_charge": { + "service": "mdi:flash" + }, + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/nissan_leaf/manifest.json b/homeassistant/components/nissan_leaf/manifest.json index 9c3df39c69f..9ad8773ee44 100644 --- a/homeassistant/components/nissan_leaf/manifest.json +++ b/homeassistant/components/nissan_leaf/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nissan_leaf", "iot_class": "cloud_polling", "loggers": ["pycarwings2"], + "quality_scale": "legacy", "requirements": ["pycarwings2==2.14"] } diff --git a/homeassistant/components/nissan_leaf/sensor.py b/homeassistant/components/nissan_leaf/sensor.py index bde1719e9b1..71dda39db1a 100644 --- a/homeassistant/components/nissan_leaf/sensor.py +++ b/homeassistant/components/nissan_leaf/sensor.py @@ -13,7 +13,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateTyp from homeassistant.util.unit_conversion import DistanceConverter from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from . import LeafDataStore, LeafEntity +from . import LeafDataStore from .const import ( DATA_BATTERY, DATA_CHARGING, @@ -21,6 +21,7 @@ from .const import ( DATA_RANGE_AC, DATA_RANGE_AC_OFF, ) +from .entity import LeafEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nissan_leaf/switch.py b/homeassistant/components/nissan_leaf/switch.py index 39f875ff95f..82a84567fec 100644 --- a/homeassistant/components/nissan_leaf/switch.py +++ b/homeassistant/components/nissan_leaf/switch.py @@ -10,8 +10,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import LeafDataStore, LeafEntity +from . import LeafDataStore from .const import DATA_CLIMATE, DATA_LEAF +from .entity import LeafEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nmap_tracker/__init__.py b/homeassistant/components/nmap_tracker/__init__.py index ffc4b975308..dcb4e1361fd 100644 --- a/homeassistant/components/nmap_tracker/__init__.py +++ b/homeassistant/components/nmap_tracker/__init__.py @@ -380,7 +380,7 @@ class NmapDeviceScanner: ) if mac is None: self._async_device_offline(ipv4, "No MAC address found", now) - _LOGGER.info("No MAC address found for %s", ipv4) + _LOGGER.warning("No MAC address found for %s", ipv4) continue formatted_mac = format_mac(mac) diff --git a/homeassistant/components/nmap_tracker/config_flow.py b/homeassistant/components/nmap_tracker/config_flow.py index b724dca1a81..e05150995aa 100644 --- a/homeassistant/components/nmap_tracker/config_flow.py +++ b/homeassistant/components/nmap_tracker/config_flow.py @@ -213,6 +213,6 @@ class NmapTrackerConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) diff --git a/homeassistant/components/nmap_tracker/device_tracker.py b/homeassistant/components/nmap_tracker/device_tracker.py index 3f07926eaef..c8e7e7c25ea 100644 --- a/homeassistant/components/nmap_tracker/device_tracker.py +++ b/homeassistant/components/nmap_tracker/device_tracker.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -95,11 +95,6 @@ class NmapTrackerEntity(ScannerEntity): return None return short_hostname(self._device.hostname) - @property - def source_type(self) -> SourceType: - """Return tracker source type.""" - return SourceType.ROUTER - @callback def async_process_update(self, online: bool) -> None: """Update device.""" diff --git a/homeassistant/components/nmap_tracker/manifest.json b/homeassistant/components/nmap_tracker/manifest.json index 08d9b94cf2d..5b2dab50812 100644 --- a/homeassistant/components/nmap_tracker/manifest.json +++ b/homeassistant/components/nmap_tracker/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/nmap_tracker", "iot_class": "local_polling", "loggers": ["nmap"], - "requirements": ["netmap==0.7.0.2", "getmac==0.9.4", "aiooui==0.1.6"] + "requirements": ["netmap==0.7.0.2", "getmac==0.9.4", "aiooui==0.1.7"] } diff --git a/homeassistant/components/nmbs/manifest.json b/homeassistant/components/nmbs/manifest.json index 24aadb6b4f0..e17d1227bed 100644 --- a/homeassistant/components/nmbs/manifest.json +++ b/homeassistant/components/nmbs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nmbs", "iot_class": "cloud_polling", "loggers": ["pyrail"], + "quality_scale": "legacy", "requirements": ["pyrail==0.0.3"] } diff --git a/homeassistant/components/no_ip/manifest.json b/homeassistant/components/no_ip/manifest.json index cf995e34b47..8e1e247143e 100644 --- a/homeassistant/components/no_ip/manifest.json +++ b/homeassistant/components/no_ip/manifest.json @@ -3,5 +3,6 @@ "name": "No-IP.com", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/no_ip", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/noaa_tides/manifest.json b/homeassistant/components/noaa_tides/manifest.json index 85c6fbcb788..8cc81857770 100644 --- a/homeassistant/components/noaa_tides/manifest.json +++ b/homeassistant/components/noaa_tides/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/noaa_tides", "iot_class": "cloud_polling", "loggers": ["noaa_coops"], + "quality_scale": "legacy", "requirements": ["noaa-coops==0.1.9"] } diff --git a/homeassistant/components/nobo_hub/climate.py b/homeassistant/components/nobo_hub/climate.py index f1e2f4a78f0..a089209cde5 100644 --- a/homeassistant/components/nobo_hub/climate.py +++ b/homeassistant/components/nobo_hub/climate.py @@ -82,7 +82,6 @@ class NoboZone(ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature_step = 1 # Need to poll to get preset change when in HVACMode.AUTO, so can't set _attr_should_poll = False - _enable_turn_on_off_backwards_compatibility = False def __init__(self, zone_id, hub: nobo, override_type) -> None: """Initialize the climate device.""" diff --git a/homeassistant/components/nobo_hub/config_flow.py b/homeassistant/components/nobo_hub/config_flow.py index 8aed520f21e..7e1ae4c1d9b 100644 --- a/homeassistant/components/nobo_hub/config_flow.py +++ b/homeassistant/components/nobo_hub/config_flow.py @@ -175,7 +175,7 @@ class NoboHubConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class NoboHubConnectError(HomeAssistantError): @@ -190,10 +190,6 @@ class NoboHubConnectError(HomeAssistantError): class OptionsFlowHandler(OptionsFlow): """Handles options flow for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py new file mode 100644 index 00000000000..82db98e2148 --- /dev/null +++ b/homeassistant/components/nordpool/__init__.py @@ -0,0 +1,36 @@ +"""The Nord Pool component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.util import dt as dt_util + +from .const import DOMAIN, PLATFORMS +from .coordinator import NordPoolDataUpdateCoordinator + +type NordPoolConfigEntry = ConfigEntry[NordPoolDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: + """Set up Nord Pool from a config entry.""" + + coordinator = NordPoolDataUpdateCoordinator(hass, entry) + await coordinator.fetch_data(dt_util.utcnow()) + if not coordinator.last_update_success: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="initial_update_failed", + translation_placeholders={"error": str(coordinator.last_exception)}, + ) + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: + """Unload Nord Pool config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/nordpool/config_flow.py b/homeassistant/components/nordpool/config_flow.py new file mode 100644 index 00000000000..1d75d825e47 --- /dev/null +++ b/homeassistant/components/nordpool/config_flow.py @@ -0,0 +1,115 @@ +"""Adds config flow for Nord Pool integration.""" + +from __future__ import annotations + +from typing import Any + +from pynordpool import ( + Currency, + NordPoolClient, + NordPoolEmptyResponseError, + NordPoolError, +) +from pynordpool.const import AREAS +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) +from homeassistant.util import dt as dt_util + +from .const import CONF_AREAS, DEFAULT_NAME, DOMAIN + +SELECT_AREAS = [ + SelectOptionDict(value=area, label=name) for area, name in AREAS.items() +] +SELECT_CURRENCY = [currency.value for currency in Currency] + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_AREAS, default=[]): SelectSelector( + SelectSelectorConfig( + options=SELECT_AREAS, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + sort=True, + ) + ), + vol.Required(CONF_CURRENCY, default="SEK"): SelectSelector( + SelectSelectorConfig( + options=SELECT_CURRENCY, + multiple=False, + mode=SelectSelectorMode.DROPDOWN, + sort=True, + ) + ), + } +) + + +async def test_api(hass: HomeAssistant, user_input: dict[str, Any]) -> dict[str, str]: + """Test fetch data from Nord Pool.""" + client = NordPoolClient(async_get_clientsession(hass)) + try: + await client.async_get_delivery_period( + dt_util.now(), + Currency(user_input[CONF_CURRENCY]), + user_input[CONF_AREAS], + ) + except NordPoolEmptyResponseError: + return {"base": "no_data"} + except NordPoolError: + return {"base": "cannot_connect"} + + return {} + + +class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Nord Pool integration.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input: + errors = await test_api(self.hass, user_input) + if not errors: + return self.async_create_entry( + title=DEFAULT_NAME, + data=user_input, + ) + + return self.async_show_form( + step_id="user", + data_schema=DATA_SCHEMA, + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the reconfiguration step.""" + errors: dict[str, str] = {} + if user_input: + errors = await test_api(self.hass, user_input) + reconfigure_entry = self._get_reconfigure_entry() + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/nordpool/const.py b/homeassistant/components/nordpool/const.py new file mode 100644 index 00000000000..19a978d946c --- /dev/null +++ b/homeassistant/components/nordpool/const.py @@ -0,0 +1,14 @@ +"""Constants for Nord Pool.""" + +import logging + +from homeassistant.const import Platform + +LOGGER = logging.getLogger(__package__) + +DEFAULT_SCAN_INTERVAL = 60 +DOMAIN = "nordpool" +PLATFORMS = [Platform.SENSOR] +DEFAULT_NAME = "Nord Pool" + +CONF_AREAS = "areas" diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py new file mode 100644 index 00000000000..e6b36f7deee --- /dev/null +++ b/homeassistant/components/nordpool/coordinator.py @@ -0,0 +1,99 @@ +"""DataUpdateCoordinator for the Nord Pool integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from datetime import datetime, timedelta +from typing import TYPE_CHECKING + +from pynordpool import ( + Currency, + DeliveryPeriodData, + NordPoolClient, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) + +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.event import async_track_point_in_utc_time +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util import dt as dt_util + +from .const import CONF_AREAS, DOMAIN, LOGGER + +if TYPE_CHECKING: + from . import NordPoolConfigEntry + + +class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): + """A Nord Pool Data Update Coordinator.""" + + config_entry: NordPoolConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: NordPoolConfigEntry) -> None: + """Initialize the Nord Pool coordinator.""" + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + ) + self.client = NordPoolClient(session=async_get_clientsession(hass)) + self.unsub: Callable[[], None] | None = None + + def get_next_interval(self, now: datetime) -> datetime: + """Compute next time an update should occur.""" + next_hour = dt_util.utcnow() + timedelta(hours=1) + next_run = datetime( + next_hour.year, + next_hour.month, + next_hour.day, + next_hour.hour, + tzinfo=dt_util.UTC, + ) + LOGGER.debug("Next update at %s", next_run) + return next_run + + async def async_shutdown(self) -> None: + """Cancel any scheduled call, and ignore new runs.""" + await super().async_shutdown() + if self.unsub: + self.unsub() + self.unsub = None + + async def fetch_data(self, now: datetime) -> None: + """Fetch data from Nord Pool.""" + self.unsub = async_track_point_in_utc_time( + self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow()) + ) + data = await self.api_call() + if data: + self.async_set_updated_data(data) + + async def api_call(self, retry: int = 3) -> DeliveryPeriodData | None: + """Make api call to retrieve data with retry if failure.""" + data = None + try: + data = await self.client.async_get_delivery_period( + dt_util.now(), + Currency(self.config_entry.data[CONF_CURRENCY]), + self.config_entry.data[CONF_AREAS], + ) + except ( + NordPoolEmptyResponseError, + NordPoolResponseError, + NordPoolError, + ) as error: + LOGGER.debug("Connection error: %s", error) + if retry > 0: + next_run = (4 - retry) * 15 + LOGGER.debug("Wait %d seconds for next try", next_run) + await asyncio.sleep(next_run) + return await self.api_call(retry - 1) + self.async_set_update_error(error) + + return data diff --git a/homeassistant/components/nordpool/diagnostics.py b/homeassistant/components/nordpool/diagnostics.py new file mode 100644 index 00000000000..3160c2bfa6d --- /dev/null +++ b/homeassistant/components/nordpool/diagnostics.py @@ -0,0 +1,16 @@ +"""Diagnostics support for Nord Pool.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import NordPoolConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: NordPoolConfigEntry +) -> dict[str, Any]: + """Return diagnostics for Nord Pool config entry.""" + return {"raw": entry.runtime_data.data.raw} diff --git a/homeassistant/components/nordpool/entity.py b/homeassistant/components/nordpool/entity.py new file mode 100644 index 00000000000..ec3264cd2e3 --- /dev/null +++ b/homeassistant/components/nordpool/entity.py @@ -0,0 +1,33 @@ +"""Base entity for Nord Pool.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NordPoolDataUpdateCoordinator + + +class NordpoolBaseEntity(CoordinatorEntity[NordPoolDataUpdateCoordinator]): + """Representation of a Nord Pool base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: EntityDescription, + area: str, + ) -> None: + """Initiate Nord Pool base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = f"{area}-{entity_description.key}" + self.area = area + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, area)}, + name=f"Nord Pool {area}", + entry_type=DeviceEntryType.SERVICE, + ) diff --git a/homeassistant/components/nordpool/icons.json b/homeassistant/components/nordpool/icons.json new file mode 100644 index 00000000000..85434a2d09b --- /dev/null +++ b/homeassistant/components/nordpool/icons.json @@ -0,0 +1,42 @@ +{ + "entity": { + "sensor": { + "updated_at": { + "default": "mdi:clock-outline" + }, + "currency": { + "default": "mdi:currency-usd" + }, + "exchange_rate": { + "default": "mdi:currency-usd" + }, + "current_price": { + "default": "mdi:cash" + }, + "last_price": { + "default": "mdi:cash" + }, + "next_price": { + "default": "mdi:cash" + }, + "block_average": { + "default": "mdi:cash-multiple" + }, + "block_min": { + "default": "mdi:cash-multiple" + }, + "block_max": { + "default": "mdi:cash-multiple" + }, + "block_start_time": { + "default": "mdi:clock-time-twelve-outline" + }, + "block_end_time": { + "default": "mdi:clock-time-two-outline" + }, + "daily_average": { + "default": "mdi:cash-multiple" + } + } + } +} diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json new file mode 100644 index 00000000000..215494e10a0 --- /dev/null +++ b/homeassistant/components/nordpool/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "nordpool", + "name": "Nord Pool", + "codeowners": ["@gjohansson-ST"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/nordpool", + "integration_type": "hub", + "iot_class": "cloud_polling", + "loggers": ["pynordpool"], + "quality_scale": "platinum", + "requirements": ["pynordpool==0.2.3"], + "single_config_entry": true +} diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml new file mode 100644 index 00000000000..dada1115715 --- /dev/null +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -0,0 +1,95 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities doesn't subscribe to events. + dependency-transparency: done + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + brands: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + No actions. + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + This integration has no options flow. + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: + status: exempt + comment: | + No discovery, cloud service + stale-devices: + status: exempt + comment: | + This integration devices (services) will be removed with config entry if needed. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: + status: exempt + comment: | + This integration has fixed devices. + discovery-update-info: + status: exempt + comment: | + No discovery + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: | + Only service, no device + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py new file mode 100644 index 00000000000..47617cc8e42 --- /dev/null +++ b/homeassistant/components/nordpool/sensor.py @@ -0,0 +1,341 @@ +"""Sensor platform for Nord Pool integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta + +from pynordpool import DeliveryPeriodData + +from homeassistant.components.sensor import ( + EntityCategory, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util, slugify + +from . import NordPoolConfigEntry +from .const import LOGGER +from .coordinator import NordPoolDataUpdateCoordinator +from .entity import NordpoolBaseEntity + +PARALLEL_UPDATES = 0 + + +def get_prices( + data: DeliveryPeriodData, +) -> dict[str, tuple[float | None, float, float | None]]: + """Return previous, current and next prices. + + Output: {"SE3": (10.0, 10.5, 12.1)} + """ + last_price_entries: dict[str, float] = {} + current_price_entries: dict[str, float] = {} + next_price_entries: dict[str, float] = {} + current_time = dt_util.utcnow() + previous_time = current_time - timedelta(hours=1) + next_time = current_time + timedelta(hours=1) + price_data = data.entries + LOGGER.debug("Price data: %s", price_data) + for entry in price_data: + if entry.start <= current_time <= entry.end: + current_price_entries = entry.entry + if entry.start <= previous_time <= entry.end: + last_price_entries = entry.entry + if entry.start <= next_time <= entry.end: + next_price_entries = entry.entry + LOGGER.debug( + "Last price %s, current price %s, next price %s", + last_price_entries, + current_price_entries, + next_price_entries, + ) + + result = {} + for area, price in current_price_entries.items(): + result[area] = ( + last_price_entries.get(area), + price, + next_price_entries.get(area), + ) + LOGGER.debug("Prices: %s", result) + return result + + +def get_blockprices( + data: DeliveryPeriodData, +) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: + """Return average, min and max for block prices. + + Output: {"SE3": {"Off-peak 1": (_datetime_, _datetime_, 9.3, 10.5, 12.1)}} + """ + result: dict[str, dict[str, tuple[datetime, datetime, float, float, float]]] = {} + block_prices = data.block_prices + for entry in block_prices: + for _area in entry.average: + if _area not in result: + result[_area] = {} + result[_area][entry.name] = ( + entry.start, + entry.end, + entry.average[_area]["average"], + entry.average[_area]["min"], + entry.average[_area]["max"], + ) + + LOGGER.debug("Block prices: %s", result) + return result + + +@dataclass(frozen=True, kw_only=True) +class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool default sensor entity.""" + + value_fn: Callable[[DeliveryPeriodData], str | float | datetime | None] + + +@dataclass(frozen=True, kw_only=True) +class NordpoolPricesSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool prices sensor entity.""" + + value_fn: Callable[[tuple[float | None, float, float | None]], float | None] + + +@dataclass(frozen=True, kw_only=True) +class NordpoolBlockPricesSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool block prices sensor entity.""" + + value_fn: Callable[ + [tuple[datetime, datetime, float, float, float]], float | datetime | None + ] + + +DEFAULT_SENSOR_TYPES: tuple[NordpoolDefaultSensorEntityDescription, ...] = ( + NordpoolDefaultSensorEntityDescription( + key="updated_at", + translation_key="updated_at", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.updated_at, + entity_category=EntityCategory.DIAGNOSTIC, + ), + NordpoolDefaultSensorEntityDescription( + key="currency", + translation_key="currency", + value_fn=lambda data: data.currency, + entity_category=EntityCategory.DIAGNOSTIC, + ), + NordpoolDefaultSensorEntityDescription( + key="exchange_rate", + translation_key="exchange_rate", + value_fn=lambda data: data.exchange_rate, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) +PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( + NordpoolPricesSensorEntityDescription( + key="current_price", + translation_key="current_price", + value_fn=lambda data: data[1] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="last_price", + translation_key="last_price", + value_fn=lambda data: data[0] / 1000 if data[0] else None, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="next_price", + translation_key="next_price", + value_fn=lambda data: data[2] / 1000 if data[2] else None, + suggested_display_precision=2, + ), +) +BLOCK_PRICES_SENSOR_TYPES: tuple[NordpoolBlockPricesSensorEntityDescription, ...] = ( + NordpoolBlockPricesSensorEntityDescription( + key="block_average", + translation_key="block_average", + value_fn=lambda data: data[2] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_min", + translation_key="block_min", + value_fn=lambda data: data[3] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_max", + translation_key="block_max", + value_fn=lambda data: data[4] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_start_time", + translation_key="block_start_time", + value_fn=lambda data: data[0], + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_end_time", + translation_key="block_end_time", + value_fn=lambda data: data[1], + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + ), +) +DAILY_AVERAGE_PRICES_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( + SensorEntityDescription( + key="daily_average", + translation_key="daily_average", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NordPoolConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nord Pool sensor platform.""" + + coordinator = entry.runtime_data + + entities: list[NordpoolBaseEntity] = [] + currency = entry.runtime_data.data.currency + + for area in get_prices(entry.runtime_data.data): + LOGGER.debug("Setting up base sensors for area %s", area) + entities.extend( + NordpoolSensor(coordinator, description, area) + for description in DEFAULT_SENSOR_TYPES + ) + LOGGER.debug( + "Setting up price sensors for area %s with currency %s", area, currency + ) + entities.extend( + NordpoolPriceSensor(coordinator, description, area, currency) + for description in PRICES_SENSOR_TYPES + ) + entities.extend( + NordpoolDailyAveragePriceSensor(coordinator, description, area, currency) + for description in DAILY_AVERAGE_PRICES_SENSOR_TYPES + ) + for block_name in get_blockprices(coordinator.data)[area]: + LOGGER.debug( + "Setting up block price sensors for area %s with currency %s in block %s", + area, + currency, + block_name, + ) + entities.extend( + NordpoolBlockPriceSensor( + coordinator, description, area, currency, block_name + ) + for description in BLOCK_PRICES_SENSOR_TYPES + ) + async_add_entities(entities) + + +class NordpoolSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool sensor.""" + + entity_description: NordpoolDefaultSensorEntityDescription + + @property + def native_value(self) -> str | float | datetime | None: + """Return value of sensor.""" + return self.entity_description.value_fn(self.coordinator.data) + + +class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool price sensor.""" + + entity_description: NordpoolPricesSensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: NordpoolPricesSensorEntityDescription, + area: str, + currency: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + self._attr_native_unit_of_measurement = f"{currency}/kWh" + + @property + def native_value(self) -> float | None: + """Return value of sensor.""" + return self.entity_description.value_fn( + get_prices(self.coordinator.data)[self.area] + ) + + +class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool block price sensor.""" + + entity_description: NordpoolBlockPricesSensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: NordpoolBlockPricesSensorEntityDescription, + area: str, + currency: str, + block_name: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + if entity_description.device_class is not SensorDeviceClass.TIMESTAMP: + self._attr_native_unit_of_measurement = f"{currency}/kWh" + self._attr_unique_id = f"{slugify(block_name)}-{area}-{entity_description.key}" + self.block_name = block_name + self._attr_translation_placeholders = {"block": block_name} + + @property + def native_value(self) -> float | datetime | None: + """Return value of sensor.""" + return self.entity_description.value_fn( + get_blockprices(self.coordinator.data)[self.area][self.block_name] + ) + + +class NordpoolDailyAveragePriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool daily average price sensor.""" + + entity_description: SensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: SensorEntityDescription, + area: str, + currency: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + self._attr_native_unit_of_measurement = f"{currency}/kWh" + + @property + def native_value(self) -> float | None: + """Return value of sensor.""" + return self.coordinator.data.area_average[self.area] / 1000 diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json new file mode 100644 index 00000000000..96c22633c9e --- /dev/null +++ b/homeassistant/components/nordpool/strings.json @@ -0,0 +1,78 @@ +{ + "config": { + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_data": "API connected but the response was empty" + }, + "step": { + "user": { + "data": { + "currency": "Currency", + "areas": "Areas" + }, + "data_description": { + "currency": "Select currency to display prices in, EUR is the base currency.", + "areas": "Areas to display prices for according to Nordpool market areas." + } + }, + "reconfigure": { + "data": { + "currency": "[%key:component::nordpool::config::step::user::data::currency%]", + "areas": "[%key:component::nordpool::config::step::user::data::areas%]" + }, + "data_description": { + "currency": "[%key:component::nordpool::config::step::user::data_description::currency%]", + "areas": "[%key:component::nordpool::config::step::user::data_description::areas%]" + } + } + } + }, + "entity": { + "sensor": { + "updated_at": { + "name": "Last updated" + }, + "currency": { + "name": "Currency" + }, + "exchange_rate": { + "name": "Exchange rate" + }, + "current_price": { + "name": "Current price" + }, + "last_price": { + "name": "Previous price" + }, + "next_price": { + "name": "Next price" + }, + "block_average": { + "name": "{block} average" + }, + "block_min": { + "name": "{block} lowest price" + }, + "block_max": { + "name": "{block} highest price" + }, + "block_start_time": { + "name": "{block} time from" + }, + "block_end_time": { + "name": "{block} time until" + }, + "daily_average": { + "name": "Daily average" + } + } + }, + "exceptions": { + "initial_update_failed": { + "message": "Initial update failed on startup with error {error}" + } + } +} diff --git a/homeassistant/components/norway_air/manifest.json b/homeassistant/components/norway_air/manifest.json index f787f647db8..5ce6efd944c 100644 --- a/homeassistant/components/norway_air/manifest.json +++ b/homeassistant/components/norway_air/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/norway_air", "iot_class": "cloud_polling", "loggers": ["metno"], - "requirements": ["PyMetno==0.12.0"] + "quality_scale": "legacy", + "requirements": ["PyMetno==0.13.0"] } diff --git a/homeassistant/components/notify/__init__.py b/homeassistant/components/notify/__init__.py index 31c7b8e4d70..0b7a25ced3e 100644 --- a/homeassistant/components/notify/__init__.py +++ b/homeassistant/components/notify/__init__.py @@ -4,10 +4,11 @@ from __future__ import annotations from datetime import timedelta from enum import IntFlag -from functools import cached_property, partial +from functools import partial import logging from typing import Any, final, override +from propcache import cached_property import voluptuous as vol import homeassistant.components.persistent_notification as pn @@ -18,9 +19,9 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey from .const import ( # noqa: F401 ATTR_DATA, @@ -39,7 +40,6 @@ from .legacy import ( # noqa: F401 async_reload, async_reset_platform, async_setup_legacy, - check_templates_warn, ) from .repairs import migrate_notify_issue # noqa: F401 @@ -48,6 +48,7 @@ from .repairs import migrate_notify_issue # noqa: F401 # Platform specific data ATTR_TITLE_DEFAULT = "Home Assistant" +DATA_COMPONENT: HassKey[EntityComponent[NotifyEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) @@ -78,7 +79,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # legacy platforms to finish setting up. hass.async_create_task(setup, eager_start=True) - component = hass.data[DOMAIN] = EntityComponent[NotifyEntity](_LOGGER, DOMAIN, hass) + component = hass.data[DATA_COMPONENT] = EntityComponent[NotifyEntity]( + _LOGGER, DOMAIN, hass + ) component.async_register_entity_service( SERVICE_SEND_MESSAGE, { @@ -90,22 +93,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def persistent_notification(service: ServiceCall) -> None: """Send notification via the built-in persistent_notify integration.""" - message: Template = service.data[ATTR_MESSAGE] - check_templates_warn(hass, message) - - title = None - title_tpl: Template | None - if title_tpl := service.data.get(ATTR_TITLE): - check_templates_warn(hass, title_tpl) - title = title_tpl.async_render(parse_result=False) + message: str = service.data[ATTR_MESSAGE] + title: str | None = service.data.get(ATTR_TITLE) notification_id = None if data := service.data.get(ATTR_DATA): notification_id = data.get(pn.ATTR_NOTIFICATION_ID) - pn.async_create( - hass, message.async_render(parse_result=False), title, notification_id - ) + pn.async_create(hass, message, title, notification_id) hass.services.async_register( DOMAIN, @@ -123,14 +118,12 @@ class NotifyEntityDescription(EntityDescription, frozen_or_thawed=True): async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[NotifyEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[NotifyEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class NotifyEntity(RestoreEntity): diff --git a/homeassistant/components/notify/const.py b/homeassistant/components/notify/const.py index 6cd957e3afe..29064f24a66 100644 --- a/homeassistant/components/notify/const.py +++ b/homeassistant/components/notify/const.py @@ -30,8 +30,8 @@ SERVICE_PERSISTENT_NOTIFICATION = "persistent_notification" NOTIFY_SERVICE_SCHEMA = vol.Schema( { - vol.Required(ATTR_MESSAGE): cv.template, - vol.Optional(ATTR_TITLE): cv.template, + vol.Required(ATTR_MESSAGE): cv.string, + vol.Optional(ATTR_TITLE): cv.string, vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [cv.string]), vol.Optional(ATTR_DATA): dict, } diff --git a/homeassistant/components/notify/icons.json b/homeassistant/components/notify/icons.json index ace8ee0c96b..e5ab34031f7 100644 --- a/homeassistant/components/notify/icons.json +++ b/homeassistant/components/notify/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "notify": "mdi:bell-ring", - "persistent_notification": "mdi:bell-badge", - "send_message": "mdi:message-arrow-right" + "notify": { + "service": "mdi:bell-ring" + }, + "persistent_notification": { + "service": "mdi:bell-badge" + }, + "send_message": { + "service": "mdi:message-arrow-right" + } } } diff --git a/homeassistant/components/notify/legacy.py b/homeassistant/components/notify/legacy.py index dcb148a99f5..46538aad921 100644 --- a/homeassistant/components/notify/legacy.py +++ b/homeassistant/components/notify/legacy.py @@ -3,17 +3,16 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Mapping +from collections.abc import Coroutine, Mapping from functools import partial from typing import Any, Protocol, cast from homeassistant.config import config_per_platform from homeassistant.const import CONF_DESCRIPTION, CONF_NAME -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import discovery from homeassistant.helpers.service import async_set_service_schema -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.loader import async_get_integration, bind_hass from homeassistant.setup import ( @@ -22,6 +21,7 @@ from homeassistant.setup import ( async_start_setup, ) from homeassistant.util import slugify +from homeassistant.util.hass_dict import HassKey from homeassistant.util.yaml import load_yaml_dict from .const import ( @@ -36,8 +36,12 @@ from .const import ( ) CONF_FIELDS = "fields" -NOTIFY_SERVICES = "notify_services" -NOTIFY_DISCOVERY_DISPATCHER = "notify_discovery_dispatcher" +NOTIFY_SERVICES: HassKey[dict[str, list[BaseNotificationService]]] = HassKey( + f"{DOMAIN}_services" +) +NOTIFY_DISCOVERY_DISPATCHER: HassKey[CALLBACK_TYPE | None] = HassKey( + f"{DOMAIN}_discovery_dispatcher" +) class LegacyNotifyPlatform(Protocol): @@ -155,30 +159,15 @@ def async_setup_legacy( ] -@callback -def check_templates_warn(hass: HomeAssistant, tpl: Template) -> None: - """Warn user that passing templates to notify service is deprecated.""" - if tpl.is_static or hass.data.get("notify_template_warned"): - return - - hass.data["notify_template_warned"] = True - LOGGER.warning( - "Passing templates to notify service is deprecated and will be removed in" - " 2021.12. Automations and scripts handle templates automatically" - ) - - @bind_hass async def async_reload(hass: HomeAssistant, integration_name: str) -> None: """Register notify services for an integration.""" if not _async_integration_has_notify_services(hass, integration_name): return - notify_services: list[BaseNotificationService] = hass.data[NOTIFY_SERVICES][ - integration_name - ] tasks = [ - notify_service.async_register_services() for notify_service in notify_services + notify_service.async_register_services() + for notify_service in hass.data[NOTIFY_SERVICES][integration_name] ] await asyncio.gather(*tasks) @@ -187,20 +176,16 @@ async def async_reload(hass: HomeAssistant, integration_name: str) -> None: @bind_hass async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None: """Unregister notify services for an integration.""" - notify_discovery_dispatcher: Callable[[], None] | None = hass.data.get( - NOTIFY_DISCOVERY_DISPATCHER - ) + notify_discovery_dispatcher = hass.data.get(NOTIFY_DISCOVERY_DISPATCHER) if notify_discovery_dispatcher: notify_discovery_dispatcher() hass.data[NOTIFY_DISCOVERY_DISPATCHER] = None if not _async_integration_has_notify_services(hass, integration_name): return - notify_services: list[BaseNotificationService] = hass.data[NOTIFY_SERVICES][ - integration_name - ] tasks = [ - notify_service.async_unregister_services() for notify_service in notify_services + notify_service.async_unregister_services() + for notify_service in hass.data[NOTIFY_SERVICES][integration_name] ] await asyncio.gather(*tasks) @@ -255,19 +240,17 @@ class BaseNotificationService: async def _async_notify_message_service(self, service: ServiceCall) -> None: """Handle sending notification message service calls.""" kwargs = {} - message: Template = service.data[ATTR_MESSAGE] - title: Template | None + message: str = service.data[ATTR_MESSAGE] + title: str | None if title := service.data.get(ATTR_TITLE): - check_templates_warn(self.hass, title) - kwargs[ATTR_TITLE] = title.async_render(parse_result=False) + kwargs[ATTR_TITLE] = title if self.registered_targets.get(service.service) is not None: kwargs[ATTR_TARGET] = [self.registered_targets[service.service]] elif service.data.get(ATTR_TARGET) is not None: kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET) - check_templates_warn(self.hass, message) - kwargs[ATTR_MESSAGE] = message.async_render(parse_result=False) + kwargs[ATTR_MESSAGE] = message kwargs[ATTR_DATA] = service.data.get(ATTR_DATA) await self.async_send_message(**kwargs) diff --git a/homeassistant/components/notify/repairs.py b/homeassistant/components/notify/repairs.py index d188f07c2ed..8969652d98e 100644 --- a/homeassistant/components/notify/repairs.py +++ b/homeassistant/components/notify/repairs.py @@ -2,8 +2,7 @@ from __future__ import annotations -from homeassistant.components.repairs import RepairsFlow -from homeassistant.components.repairs.issue_handler import ConfirmRepairFlow +from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import issue_registry as ir diff --git a/homeassistant/components/notify/strings.json b/homeassistant/components/notify/strings.json index 3fba5e43fc7..e832bfc248a 100644 --- a/homeassistant/components/notify/strings.json +++ b/homeassistant/components/notify/strings.json @@ -67,14 +67,14 @@ "fix_flow": { "step": { "confirm": { - "description": "The {integration_title} `notify` actions(s) are migrated. A new `notify` entity is available now to replace each legacy `notify` action.\n\nUpdate any automations to use the new `notify.send_message` action exposed with this new entity. When this is done, fix this issue and restart Home Assistant.", + "description": "The {integration_title} `notify` action(s) are migrated. A new `notify` entity is available now to replace each legacy `notify` action.\n\nUpdate any automations to use the new `notify.send_message` action exposed with this new entity. When this is done, fix this issue and restart Home Assistant.", "title": "Migrate legacy {integration_title} notify action for domain `{domain}`" } } } }, "migrate_notify_service": { - "title": "Legacy action `notify.{service_name}` stll being used", + "title": "Legacy action notify.{service_name} still being used", "fix_flow": { "step": { "confirm": { diff --git a/homeassistant/components/notify_events/manifest.json b/homeassistant/components/notify_events/manifest.json index a2c01e1d718..e154ab85cae 100644 --- a/homeassistant/components/notify_events/manifest.json +++ b/homeassistant/components/notify_events/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/notify_events", "iot_class": "cloud_push", "loggers": ["notify_events"], + "quality_scale": "legacy", "requirements": ["notify-events==1.0.4"] } diff --git a/homeassistant/components/notion/__init__.py b/homeassistant/components/notion/__init__.py index 00bded5c3a0..79f5d951e7e 100644 --- a/homeassistant/components/notion/__init__.py +++ b/homeassistant/components/notion/__init__.py @@ -6,18 +6,14 @@ from datetime import timedelta from typing import Any from uuid import UUID -from aionotion.bridge.models import Bridge from aionotion.errors import InvalidCredentialsError, NotionError -from aionotion.listener.models import Listener, ListenerKind +from aionotion.listener.models import ListenerKind from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers import entity_registry as er from .const import ( CONF_REFRESH_TOKEN, @@ -168,102 +164,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class NotionEntity(CoordinatorEntity[NotionDataUpdateCoordinator]): - """Define a base Notion entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: NotionDataUpdateCoordinator, - listener_id: str, - sensor_id: str, - bridge_id: int, - description: EntityDescription, - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - - sensor = self.coordinator.data.sensors[sensor_id] - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, sensor.hardware_id)}, - manufacturer="Silicon Labs", - model=str(sensor.hardware_revision), - name=str(sensor.name).capitalize(), - sw_version=sensor.firmware_version, - ) - - if bridge := self._async_get_bridge(bridge_id): - self._attr_device_info["via_device"] = (DOMAIN, bridge.hardware_id) - - self._attr_extra_state_attributes = {} - self._attr_unique_id = listener_id - self._bridge_id = bridge_id - self._listener_id = listener_id - self._sensor_id = sensor_id - self.entity_description = description - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return ( - self.coordinator.last_update_success - and self._listener_id in self.coordinator.data.listeners - ) - - @property - def listener(self) -> Listener: - """Return the listener related to this entity.""" - return self.coordinator.data.listeners[self._listener_id] - - @callback - def _async_get_bridge(self, bridge_id: int) -> Bridge | None: - """Get a bridge by ID (if it exists).""" - if (bridge := self.coordinator.data.bridges.get(bridge_id)) is None: - LOGGER.debug("Entity references a non-existent bridge ID: %s", bridge_id) - return None - return bridge - - @callback - def _async_update_bridge_id(self) -> None: - """Update the entity's bridge ID if it has changed. - - Sensors can move to other bridges based on signal strength, etc. - """ - sensor = self.coordinator.data.sensors[self._sensor_id] - - # If the bridge ID hasn't changed, return: - if self._bridge_id == sensor.bridge.id: - return - - # If the bridge doesn't exist, return: - if (bridge := self._async_get_bridge(sensor.bridge.id)) is None: - return - - self._bridge_id = sensor.bridge.id - - device_registry = dr.async_get(self.hass) - this_device = device_registry.async_get_device( - identifiers={(DOMAIN, sensor.hardware_id)} - ) - bridge = self.coordinator.data.bridges[self._bridge_id] - bridge_device = device_registry.async_get_device( - identifiers={(DOMAIN, bridge.hardware_id)} - ) - - if not bridge_device or not this_device: - return - - device_registry.async_update_device( - this_device.id, via_device_id=bridge_device.id - ) - - @callback - def _handle_coordinator_update(self) -> None: - """Respond to a DataUpdateCoordinator update.""" - if self._listener_id in self.coordinator.data.listeners: - self._async_update_bridge_id() - super()._handle_coordinator_update() diff --git a/homeassistant/components/notion/binary_sensor.py b/homeassistant/components/notion/binary_sensor.py index da50a809689..8c57310752a 100644 --- a/homeassistant/components/notion/binary_sensor.py +++ b/homeassistant/components/notion/binary_sensor.py @@ -17,7 +17,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import NotionEntity from .const import ( DOMAIN, LOGGER, @@ -32,7 +31,7 @@ from .const import ( SENSOR_WINDOW_HINGED, ) from .coordinator import NotionDataUpdateCoordinator -from .model import NotionEntityDescription +from .entity import NotionEntity, NotionEntityDescription @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/notion/config_flow.py b/homeassistant/components/notion/config_flow.py index c803992c2e2..f7347a8f595 100644 --- a/homeassistant/components/notion/config_flow.py +++ b/homeassistant/components/notion/config_flow.py @@ -9,7 +9,7 @@ from typing import Any from aionotion.errors import InvalidCredentialsError, NotionError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -68,36 +68,29 @@ class NotionFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle re-auth completion.""" - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if not user_input: return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME] }, ) credentials_validation_result = await async_validate_credentials( - self.hass, self._reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD] + self.hass, reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD] ) if credentials_validation_result.errors: @@ -106,19 +99,16 @@ class NotionFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=REAUTH_SCHEMA, errors=credentials_validation_result.errors, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME] }, ) - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data=self._reauth_entry.data - | {CONF_REFRESH_TOKEN: credentials_validation_result.refresh_token}, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ + CONF_REFRESH_TOKEN: credentials_validation_result.refresh_token + }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, str] | None = None diff --git a/homeassistant/components/notion/entity.py b/homeassistant/components/notion/entity.py new file mode 100644 index 00000000000..11e470f1d26 --- /dev/null +++ b/homeassistant/components/notion/entity.py @@ -0,0 +1,123 @@ +"""Support for Notion.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from aionotion.bridge.models import Bridge +from aionotion.listener.models import Listener, ListenerKind + +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, LOGGER +from .coordinator import NotionDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class NotionEntityDescription: + """Define an description for Notion entities.""" + + listener_kind: ListenerKind + + +class NotionEntity(CoordinatorEntity[NotionDataUpdateCoordinator]): + """Define a base Notion entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: NotionDataUpdateCoordinator, + listener_id: str, + sensor_id: str, + bridge_id: int, + description: EntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + sensor = self.coordinator.data.sensors[sensor_id] + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, sensor.hardware_id)}, + manufacturer="Silicon Labs", + model=str(sensor.hardware_revision), + name=str(sensor.name).capitalize(), + sw_version=sensor.firmware_version, + ) + + if bridge := self._async_get_bridge(bridge_id): + self._attr_device_info["via_device"] = (DOMAIN, bridge.hardware_id) + + self._attr_extra_state_attributes = {} + self._attr_unique_id = listener_id + self._bridge_id = bridge_id + self._listener_id = listener_id + self._sensor_id = sensor_id + self.entity_description = description + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return ( + self.coordinator.last_update_success + and self._listener_id in self.coordinator.data.listeners + ) + + @property + def listener(self) -> Listener: + """Return the listener related to this entity.""" + return self.coordinator.data.listeners[self._listener_id] + + @callback + def _async_get_bridge(self, bridge_id: int) -> Bridge | None: + """Get a bridge by ID (if it exists).""" + if (bridge := self.coordinator.data.bridges.get(bridge_id)) is None: + LOGGER.debug("Entity references a non-existent bridge ID: %s", bridge_id) + return None + return bridge + + @callback + def _async_update_bridge_id(self) -> None: + """Update the entity's bridge ID if it has changed. + + Sensors can move to other bridges based on signal strength, etc. + """ + sensor = self.coordinator.data.sensors[self._sensor_id] + + # If the bridge ID hasn't changed, return: + if self._bridge_id == sensor.bridge.id: + return + + # If the bridge doesn't exist, return: + if (bridge := self._async_get_bridge(sensor.bridge.id)) is None: + return + + self._bridge_id = sensor.bridge.id + + device_registry = dr.async_get(self.hass) + this_device = device_registry.async_get_device( + identifiers={(DOMAIN, sensor.hardware_id)} + ) + bridge = self.coordinator.data.bridges[self._bridge_id] + bridge_device = device_registry.async_get_device( + identifiers={(DOMAIN, bridge.hardware_id)} + ) + + if not bridge_device or not this_device: + return + + device_registry.async_update_device( + this_device.id, via_device_id=bridge_device.id + ) + + @callback + def _handle_coordinator_update(self) -> None: + """Respond to a DataUpdateCoordinator update.""" + if self._listener_id in self.coordinator.data.listeners: + self._async_update_bridge_id() + super()._handle_coordinator_update() diff --git a/homeassistant/components/notion/model.py b/homeassistant/components/notion/model.py deleted file mode 100644 index 541ca245329..00000000000 --- a/homeassistant/components/notion/model.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Define Notion model mixins.""" - -from dataclasses import dataclass - -from aionotion.listener.models import ListenerKind - - -@dataclass(frozen=True, kw_only=True) -class NotionEntityDescription: - """Define an description for Notion entities.""" - - listener_kind: ListenerKind diff --git a/homeassistant/components/notion/sensor.py b/homeassistant/components/notion/sensor.py index d12dabbbc33..fb853e65d7d 100644 --- a/homeassistant/components/notion/sensor.py +++ b/homeassistant/components/notion/sensor.py @@ -15,10 +15,9 @@ from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import NotionEntity from .const import DOMAIN, SENSOR_MOLD, SENSOR_TEMPERATURE from .coordinator import NotionDataUpdateCoordinator -from .model import NotionEntityDescription +from .entity import NotionEntity, NotionEntityDescription @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/nsw_fuel_station/__init__.py b/homeassistant/components/nsw_fuel_station/__init__.py index 76dc9d4c6ff..85e204b6f51 100644 --- a/homeassistant/components/nsw_fuel_station/__init__.py +++ b/homeassistant/components/nsw_fuel_station/__init__.py @@ -33,6 +33,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name="sensor", update_interval=SCAN_INTERVAL, update_method=async_update_data, diff --git a/homeassistant/components/nsw_fuel_station/manifest.json b/homeassistant/components/nsw_fuel_station/manifest.json index 5c105fd0281..3fccab39189 100644 --- a/homeassistant/components/nsw_fuel_station/manifest.json +++ b/homeassistant/components/nsw_fuel_station/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nsw_fuel_station", "iot_class": "cloud_polling", "loggers": ["nsw_fuel"], + "quality_scale": "legacy", "requirements": ["nsw-fuel-api-client==1.1.0"] } diff --git a/homeassistant/components/nsw_rural_fire_service_feed/manifest.json b/homeassistant/components/nsw_rural_fire_service_feed/manifest.json index 9d1f60e33d1..802f4c89b72 100644 --- a/homeassistant/components/nsw_rural_fire_service_feed/manifest.json +++ b/homeassistant/components/nsw_rural_fire_service_feed/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_nsw_rfs_incidents"], + "quality_scale": "legacy", "requirements": ["aio-geojson-nsw-rfs-incidents==0.7"] } diff --git a/homeassistant/components/nuheat/__init__.py b/homeassistant/components/nuheat/__init__.py index fdb49688eba..fb17e6b45bf 100644 --- a/homeassistant/components/nuheat/__init__.py +++ b/homeassistant/components/nuheat/__init__.py @@ -60,6 +60,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"nuheat {serial_number}", update_method=_async_update_data, update_interval=timedelta(minutes=5), diff --git a/homeassistant/components/nuheat/climate.py b/homeassistant/components/nuheat/climate.py index db85827fc9b..8248c1b9b82 100644 --- a/homeassistant/components/nuheat/climate.py +++ b/homeassistant/components/nuheat/climate.py @@ -79,7 +79,6 @@ class NuHeatThermostat(CoordinatorEntity, ClimateEntity): _attr_has_entity_name = True _attr_name = None _attr_preset_modes = PRESET_MODES - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, thermostat, temperature_unit): """Initialize the thermostat.""" diff --git a/homeassistant/components/nuki/__init__.py b/homeassistant/components/nuki/__init__.py index 2b9035e730f..4f3f56f7f03 100644 --- a/homeassistant/components/nuki/__init__.py +++ b/homeassistant/components/nuki/__init__.py @@ -10,7 +10,6 @@ import logging from aiohttp import web from pynuki import NukiBridge, NukiLock, NukiOpener from pynuki.bridge import InvalidCredentialsException -from pynuki.device import NukiDevice from requests.exceptions import RequestException from homeassistant import exceptions @@ -25,9 +24,8 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import device_registry as dr, issue_registry as ir -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.network import NoURLAvailableError, get_url -from homeassistant.helpers.update_coordinator import CoordinatorEntity, UpdateFailed +from homeassistant.helpers.update_coordinator import UpdateFailed from .const import CONF_ENCRYPT_TOKEN, DEFAULT_TIMEOUT, DOMAIN from .coordinator import NukiCoordinator @@ -266,33 +264,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class NukiEntity[_NukiDeviceT: NukiDevice](CoordinatorEntity[NukiCoordinator]): - """An entity using CoordinatorEntity. - - The CoordinatorEntity class provides: - should_poll - async_update - async_added_to_hass - available - - """ - - def __init__(self, coordinator: NukiCoordinator, nuki_device: _NukiDeviceT) -> None: - """Pass coordinator to CoordinatorEntity.""" - super().__init__(coordinator) - self._nuki_device = nuki_device - - @property - def device_info(self) -> DeviceInfo: - """Device info for Nuki entities.""" - return DeviceInfo( - identifiers={(DOMAIN, parse_id(self._nuki_device.nuki_id))}, - name=self._nuki_device.name, - manufacturer="Nuki Home Solutions GmbH", - model=self._nuki_device.device_model_str.capitalize(), - sw_version=self._nuki_device.firmware_version, - via_device=(DOMAIN, self.coordinator.bridge_id), - serial_number=parse_id(self._nuki_device.nuki_id), - ) diff --git a/homeassistant/components/nuki/binary_sensor.py b/homeassistant/components/nuki/binary_sensor.py index 9b4772ee108..8269c43813e 100644 --- a/homeassistant/components/nuki/binary_sensor.py +++ b/homeassistant/components/nuki/binary_sensor.py @@ -14,8 +14,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import NukiEntity, NukiEntryData -from .const import ATTR_NUKI_ID, DOMAIN as NUKI_DOMAIN +from . import NukiEntryData +from .const import DOMAIN as NUKI_DOMAIN +from .entity import NukiEntity async def async_setup_entry( @@ -51,14 +52,6 @@ class NukiDoorsensorEntity(NukiEntity[NukiDevice], BinarySensorEntity): """Return a unique ID.""" return f"{self._nuki_device.nuki_id}_doorsensor" - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self): - """Return the device specific state attributes.""" - return { - ATTR_NUKI_ID: self._nuki_device.nuki_id, - } - @property def available(self) -> bool: """Return true if door sensor is present and activated.""" @@ -91,14 +84,6 @@ class NukiRingactionEntity(NukiEntity[NukiDevice], BinarySensorEntity): """Return a unique ID.""" return f"{self._nuki_device.nuki_id}_ringaction" - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self): - """Return the device specific state attributes.""" - return { - ATTR_NUKI_ID: self._nuki_device.nuki_id, - } - @property def is_on(self) -> bool: """Return the value of the ring action state.""" diff --git a/homeassistant/components/nuki/config_flow.py b/homeassistant/components/nuki/config_flow.py index 3b8015827f1..4a9789c7e51 100644 --- a/homeassistant/components/nuki/config_flow.py +++ b/homeassistant/components/nuki/config_flow.py @@ -12,6 +12,7 @@ import voluptuous as vol from homeassistant.components import dhcp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN +from homeassistant.core import HomeAssistant from .const import CONF_ENCRYPT_TOKEN, DEFAULT_PORT, DEFAULT_TIMEOUT, DOMAIN from .helpers import CannotConnect, InvalidAuth, parse_id @@ -34,7 +35,7 @@ REAUTH_SCHEMA = vol.Schema( ) -async def validate_input(hass, data): +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: """Validate the user input allows us to connect. Data has the keys from USER_SCHEMA with values provided by the user. @@ -99,7 +100,9 @@ class NukiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Dialog that inform the user that reauth is required.""" errors = {} if user_input is None: @@ -140,7 +143,9 @@ class NukiConfigFlow(ConfigFlow, domain=DOMAIN): step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, errors=errors ) - async def async_step_validate(self, user_input=None): + async def async_step_validate( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle init step of a flow.""" data_schema = self.discovery_schema or USER_SCHEMA diff --git a/homeassistant/components/nuki/entity.py b/homeassistant/components/nuki/entity.py new file mode 100644 index 00000000000..2de1827c416 --- /dev/null +++ b/homeassistant/components/nuki/entity.py @@ -0,0 +1,42 @@ +"""The nuki component.""" + +from __future__ import annotations + +from pynuki.device import NukiDevice + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NukiCoordinator +from .helpers import parse_id + + +class NukiEntity[_NukiDeviceT: NukiDevice](CoordinatorEntity[NukiCoordinator]): + """An entity using CoordinatorEntity. + + The CoordinatorEntity class provides: + should_poll + async_update + async_added_to_hass + available + + """ + + def __init__(self, coordinator: NukiCoordinator, nuki_device: _NukiDeviceT) -> None: + """Pass coordinator to CoordinatorEntity.""" + super().__init__(coordinator) + self._nuki_device = nuki_device + + @property + def device_info(self) -> DeviceInfo: + """Device info for Nuki entities.""" + return DeviceInfo( + identifiers={(DOMAIN, parse_id(self._nuki_device.nuki_id))}, + name=self._nuki_device.name, + manufacturer="Nuki Home Solutions GmbH", + model=self._nuki_device.device_model_str.capitalize(), + sw_version=self._nuki_device.firmware_version, + via_device=(DOMAIN, self.coordinator.bridge_id), + serial_number=parse_id(self._nuki_device.nuki_id), + ) diff --git a/homeassistant/components/nuki/icons.json b/homeassistant/components/nuki/icons.json index f74603cb9dc..ea1ff9c4fed 100644 --- a/homeassistant/components/nuki/icons.json +++ b/homeassistant/components/nuki/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "lock_n_go": "mdi:lock-clock", - "set_continuous_mode": "mdi:bell-cog" + "lock_n_go": { + "service": "mdi:lock-clock" + }, + "set_continuous_mode": { + "service": "mdi:bell-cog" + } } } diff --git a/homeassistant/components/nuki/lock.py b/homeassistant/components/nuki/lock.py index 5a8734d5df7..a2bf7559fc4 100644 --- a/homeassistant/components/nuki/lock.py +++ b/homeassistant/components/nuki/lock.py @@ -17,15 +17,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import NukiEntity, NukiEntryData -from .const import ( - ATTR_BATTERY_CRITICAL, - ATTR_ENABLE, - ATTR_NUKI_ID, - ATTR_UNLATCH, - DOMAIN as NUKI_DOMAIN, - ERROR_STATES, -) +from . import NukiEntryData +from .const import ATTR_ENABLE, ATTR_UNLATCH, DOMAIN as NUKI_DOMAIN, ERROR_STATES +from .entity import NukiEntity from .helpers import CannotConnect @@ -75,15 +69,6 @@ class NukiDeviceEntity[_NukiDeviceT: NukiDevice](NukiEntity[_NukiDeviceT], LockE """Return a unique ID.""" return self._nuki_device.nuki_id - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return the device specific state attributes.""" - return { - ATTR_BATTERY_CRITICAL: self._nuki_device.battery_critical, - ATTR_NUKI_ID: self._nuki_device.nuki_id, - } - @property def available(self) -> bool: """Return True if entity is available.""" diff --git a/homeassistant/components/nuki/sensor.py b/homeassistant/components/nuki/sensor.py index 6647eff5c83..d89202ac7d7 100644 --- a/homeassistant/components/nuki/sensor.py +++ b/homeassistant/components/nuki/sensor.py @@ -10,8 +10,9 @@ from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import NukiEntity, NukiEntryData -from .const import ATTR_NUKI_ID, DOMAIN as NUKI_DOMAIN +from . import NukiEntryData +from .const import DOMAIN as NUKI_DOMAIN +from .entity import NukiEntity async def async_setup_entry( @@ -38,12 +39,6 @@ class NukiBatterySensor(NukiEntity[NukiDevice], SensorEntity): """Return a unique ID.""" return f"{self._nuki_device.nuki_id}_battery_level" - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self): - """Return the device specific state attributes.""" - return {ATTR_NUKI_ID: self._nuki_device.nuki_id} - @property def native_value(self) -> float: """Return the state of the sensor.""" diff --git a/homeassistant/components/numato/__init__.py b/homeassistant/components/numato/__init__.py index 978264d867e..00122132d44 100644 --- a/homeassistant/components/numato/__init__.py +++ b/homeassistant/components/numato/__init__.py @@ -1,5 +1,6 @@ """Support for controlling GPIO pins of a Numato Labs USB GPIO expander.""" +from collections.abc import Callable import logging import numato_gpio as gpio @@ -16,7 +17,7 @@ from homeassistant.const import ( PERCENTAGE, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform from homeassistant.helpers.typing import ConfigType @@ -138,25 +139,25 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: gpio.discover(config[DOMAIN][CONF_DISCOVER]) except gpio.NumatoGpioError as err: - _LOGGER.info("Error discovering Numato devices: %s", err) + _LOGGER.error("Error discovering Numato devices: %s", err) gpio.cleanup() return False - _LOGGER.info( + _LOGGER.debug( "Initializing Numato 32 port USB GPIO expanders with IDs: %s", ", ".join(str(d) for d in gpio.devices), ) hass.data[DOMAIN][DATA_API] = NumatoAPI() - def cleanup_gpio(event): + def cleanup_gpio(event: Event) -> None: """Stuff to do before stopping.""" _LOGGER.debug("Clean up Numato GPIO") gpio.cleanup() if DATA_API in hass.data[DOMAIN]: hass.data[DOMAIN][DATA_API].ports_registered.clear() - def prepare_gpio(event): + def prepare_gpio(event: Event) -> None: """Stuff to do when home assistant starts.""" _LOGGER.debug("Setup cleanup at stop for Numato GPIO") hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_gpio) @@ -172,11 +173,11 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: class NumatoAPI: """Home-Assistant specific API for numato device access.""" - def __init__(self): + def __init__(self) -> None: """Initialize API state.""" - self.ports_registered = {} + self.ports_registered: dict[tuple[int, int], int] = {} - def check_port_free(self, device_id, port, direction): + def check_port_free(self, device_id: int, port: int, direction: int) -> None: """Check whether a port is still free set up. Fail with exception if it has already been registered. @@ -184,17 +185,16 @@ class NumatoAPI: if (device_id, port) not in self.ports_registered: self.ports_registered[(device_id, port)] = direction else: + io = ( + "input" + if self.ports_registered[(device_id, port)] == gpio.IN + else "output" + ) raise gpio.NumatoGpioError( - "Device {} port {} already in use as {}.".format( - device_id, - port, - "input" - if self.ports_registered[(device_id, port)] == gpio.IN - else "output", - ) + f"Device {device_id} port {port} already in use as {io}." ) - def check_device_id(self, device_id): + def check_device_id(self, device_id: int) -> None: """Check whether a device has been discovered. Fail with exception. @@ -202,7 +202,7 @@ class NumatoAPI: if device_id not in gpio.devices: raise gpio.NumatoGpioError(f"Device {device_id} not available.") - def check_port(self, device_id, port, direction): + def check_port(self, device_id: int, port: int, direction: int) -> None: """Raise an error if the port setup doesn't match the direction.""" self.check_device_id(device_id) if (device_id, port) not in self.ports_registered: @@ -220,35 +220,37 @@ class NumatoAPI: if self.ports_registered[(device_id, port)] != direction: raise gpio.NumatoGpioError(msg[direction]) - def setup_output(self, device_id, port): + def setup_output(self, device_id: int, port: int) -> None: """Set up a GPIO as output.""" self.check_device_id(device_id) self.check_port_free(device_id, port, gpio.OUT) gpio.devices[device_id].setup(port, gpio.OUT) - def setup_input(self, device_id, port): + def setup_input(self, device_id: int, port: int) -> None: """Set up a GPIO as input.""" self.check_device_id(device_id) gpio.devices[device_id].setup(port, gpio.IN) self.check_port_free(device_id, port, gpio.IN) - def write_output(self, device_id, port, value): + def write_output(self, device_id: int, port: int, value: int) -> None: """Write a value to a GPIO.""" self.check_port(device_id, port, gpio.OUT) gpio.devices[device_id].write(port, value) - def read_input(self, device_id, port): + def read_input(self, device_id: int, port: int) -> int: """Read a value from a GPIO.""" self.check_port(device_id, port, gpio.IN) return gpio.devices[device_id].read(port) - def read_adc_input(self, device_id, port): + def read_adc_input(self, device_id: int, port: int) -> int: """Read an ADC value from a GPIO ADC port.""" self.check_port(device_id, port, gpio.IN) self.check_device_id(device_id) return gpio.devices[device_id].adc_read(port) - def edge_detect(self, device_id, port, event_callback): + def edge_detect( + self, device_id: int, port: int, event_callback: Callable[[int, bool], None] + ) -> None: """Add detection for RISING and FALLING events.""" self.check_port(device_id, port, gpio.IN) gpio.devices[device_id].add_event_detect(port, event_callback, gpio.BOTH) diff --git a/homeassistant/components/numato/binary_sensor.py b/homeassistant/components/numato/binary_sensor.py index 1f664a372ba..0f4ea23e722 100644 --- a/homeassistant/components/numato/binary_sensor.py +++ b/homeassistant/components/numato/binary_sensor.py @@ -39,7 +39,7 @@ def setup_platform( if discovery_info is None: return - def read_gpio(device_id, port, level): + def read_gpio(device_id: int, port: int, level: bool) -> None: """Send signal to entity to have it update state.""" dispatcher_send(hass, NUMATO_SIGNAL.format(device_id, port), level) @@ -71,7 +71,7 @@ def setup_platform( api.edge_detect(device_id, port, partial(read_gpio, device_id)) except NumatoGpioError as err: - _LOGGER.info( + _LOGGER.error( "Notification setup failed on device %s, " "updates on binary sensor %s only in polling mode: %s", device_id, @@ -97,7 +97,7 @@ class NumatoGpioBinarySensor(BinarySensorEntity): def __init__(self, name, device_id, port, invert_logic, api): """Initialize the Numato GPIO based binary sensor object.""" - self._name = name or DEVICE_DEFAULT_NAME + self._attr_name = name or DEVICE_DEFAULT_NAME self._device_id = device_id self._port = port self._invert_logic = invert_logic @@ -120,11 +120,6 @@ class NumatoGpioBinarySensor(BinarySensorEntity): self._state = level self.async_write_ha_state() - @property - def name(self): - """Return the name of the sensor.""" - return self._name - @property def is_on(self): """Return the state of the entity.""" diff --git a/homeassistant/components/numato/manifest.json b/homeassistant/components/numato/manifest.json index f7bcf0527c2..81f3793fa6c 100644 --- a/homeassistant/components/numato/manifest.json +++ b/homeassistant/components/numato/manifest.json @@ -6,5 +6,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["numato_gpio"], + "quality_scale": "legacy", "requirements": ["numato-gpio==0.13.0"] } diff --git a/homeassistant/components/numato/sensor.py b/homeassistant/components/numato/sensor.py index ef71e00bc73..99ef69baa7b 100644 --- a/homeassistant/components/numato/sensor.py +++ b/homeassistant/components/numato/sensor.py @@ -74,38 +74,22 @@ class NumatoGpioAdc(SensorEntity): def __init__(self, name, device_id, port, src_range, dst_range, dst_unit, api): """Initialize the sensor.""" - self._name = name + self._attr_name = name self._device_id = device_id self._port = port self._src_range = src_range self._dst_range = dst_range - self._state = None - self._unit_of_measurement = dst_unit + self._attr_native_unit_of_measurement = dst_unit self._api = api - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def native_value(self): - """Return the state of the sensor.""" - return self._state - - @property - def native_unit_of_measurement(self): - """Return the unit the value is expressed in.""" - return self._unit_of_measurement - def update(self) -> None: """Get the latest data and updates the state.""" try: adc_val = self._api.read_adc_input(self._device_id, self._port) adc_val = self._clamp_to_source_range(adc_val) - self._state = self._linear_scale_to_dest_range(adc_val) + self._attr_native_value = self._linear_scale_to_dest_range(adc_val) except NumatoGpioError as err: - self._state = None + self._attr_native_value = None _LOGGER.error( "Failed to update Numato device %s ADC-port %s: %s", self._device_id, diff --git a/homeassistant/components/numato/switch.py b/homeassistant/components/numato/switch.py index 37d1229e0b2..0a7522c8b11 100644 --- a/homeassistant/components/numato/switch.py +++ b/homeassistant/components/numato/switch.py @@ -73,30 +73,20 @@ class NumatoGpioSwitch(SwitchEntity): def __init__(self, name, device_id, port, invert_logic, api): """Initialize the port.""" - self._name = name or DEVICE_DEFAULT_NAME + self._attr_name = name or DEVICE_DEFAULT_NAME self._device_id = device_id self._port = port self._invert_logic = invert_logic - self._state = False + self._attr_is_on = False self._api = api - @property - def name(self): - """Return the name of the switch.""" - return self._name - - @property - def is_on(self): - """Return true if port is turned on.""" - return self._state - def turn_on(self, **kwargs: Any) -> None: """Turn the port on.""" try: self._api.write_output( self._device_id, self._port, 0 if self._invert_logic else 1 ) - self._state = True + self._attr_is_on = True self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( @@ -112,7 +102,7 @@ class NumatoGpioSwitch(SwitchEntity): self._api.write_output( self._device_id, self._port, 1 if self._invert_logic else 0 ) - self._state = False + self._attr_is_on = False self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( diff --git a/homeassistant/components/number/__init__.py b/homeassistant/components/number/__init__.py index 2c750bd834e..9f4aef08aa9 100644 --- a/homeassistant/components/number/__init__.py +++ b/homeassistant/components/number/__init__.py @@ -6,11 +6,11 @@ from collections.abc import Callable from contextlib import suppress import dataclasses from datetime import timedelta -from functools import cached_property import logging from math import ceil, floor from typing import TYPE_CHECKING, Any, Self, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -28,6 +28,7 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_suggest_report_issue +from homeassistant.util.hass_dict import HassKey from .const import ( # noqa: F401 ATTR_MAX, @@ -49,6 +50,7 @@ from .websocket_api import async_setup as async_setup_ws_api _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[NumberEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -81,7 +83,7 @@ __all__ = [ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Number entities.""" - component = hass.data[DOMAIN] = EntityComponent[NumberEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[NumberEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) async_setup_ws_api(hass) @@ -124,14 +126,12 @@ async def async_set_value(entity: NumberEntity, service_call: ServiceCall) -> No async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[NumberEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[NumberEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class NumberEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -384,6 +384,18 @@ class NumberEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ): return self.hass.config.units.temperature_unit + if (translation_key := self._unit_of_measurement_translation_key) and ( + unit_of_measurement + := self.platform.default_language_platform_translations.get(translation_key) + ): + if native_unit_of_measurement is not None: + raise ValueError( + f"Number entity {type(self)} from integration '{self.platform.platform_name}' " + f"has a translation key for unit_of_measurement '{unit_of_measurement}', " + f"but also has a native_unit_of_measurement '{native_unit_of_measurement}'" + ) + return unit_of_measurement + return native_unit_of_measurement @cached_property diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index ad95c9b5358..56466934e5f 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -3,7 +3,6 @@ from __future__ import annotations from enum import StrEnum -from functools import partial from typing import Final import voluptuous as vol @@ -17,6 +16,8 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -39,12 +40,6 @@ from homeassistant.const import ( UnitOfVolumeFlowRate, UnitOfVolumetricFlux, ) -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.unit_conversion import ( BaseUnitConverter, TemperatureConverter, @@ -74,12 +69,6 @@ class NumberMode(StrEnum): SLIDER = "slider" -# MODE_* are deprecated as of 2021.12, use the NumberMode enum instead. -_DEPRECATED_MODE_AUTO: Final = DeprecatedConstantEnum(NumberMode.AUTO, "2025.1") -_DEPRECATED_MODE_BOX: Final = DeprecatedConstantEnum(NumberMode.BOX, "2025.1") -_DEPRECATED_MODE_SLIDER: Final = DeprecatedConstantEnum(NumberMode.SLIDER, "2025.1") - - class NumberDeviceClass(StrEnum): """Device class for numbers.""" @@ -97,6 +86,12 @@ class NumberDeviceClass(StrEnum): Unit of measurement: `None` """ + AREA = "area" + """Area + + Unit of measurement: `UnitOfArea` units + """ + ATMOSPHERIC_PRESSURE = "atmospheric_pressure" """Atmospheric pressure. @@ -109,6 +104,12 @@ class NumberDeviceClass(StrEnum): Unit of measurement: `%` """ + BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" + """Blood glucose concentration. + + Unit of measurement: `mg/dL`, `mmol/L` + """ + CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -162,7 +163,7 @@ class NumberDeviceClass(StrEnum): ENERGY = "energy" """Energy. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -171,7 +172,7 @@ class NumberDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ FREQUENCY = "frequency" @@ -279,7 +280,7 @@ class NumberDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW` + Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` """ PRECIPITATION = "precipitation" @@ -362,7 +363,7 @@ class NumberDeviceClass(StrEnum): VOLTAGE = "voltage" """Voltage. - Unit of measurement: `V`, `mV` + Unit of measurement: `V`, `mV`, `µV` """ VOLUME = "volume" @@ -390,7 +391,7 @@ class NumberDeviceClass(StrEnum): """Generic flow rate Unit of measurement: UnitOfVolumeFlowRate - - SI / metric: `m³/h`, `L/min` + - SI / metric: `m³/h`, `L/min`, `mL/s` - USCS / imperial: `ft³/min`, `gal/min` """ @@ -427,8 +428,10 @@ DEVICE_CLASSES_SCHEMA: Final = vol.All(vol.Lower, vol.Coerce(NumberDeviceClass)) DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.APPARENT_POWER: set(UnitOfApparentPower), NumberDeviceClass.AQI: {None}, + NumberDeviceClass.AREA: set(UnitOfArea), NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), NumberDeviceClass.BATTERY: {PERCENTAGE}, + NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), @@ -464,7 +467,13 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, - NumberDeviceClass.POWER: {UnitOfPower.WATT, UnitOfPower.KILO_WATT}, + NumberDeviceClass.POWER: { + UnitOfPower.WATT, + UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, + }, NumberDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), NumberDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), NumberDeviceClass.PRESSURE: set(UnitOfPressure), @@ -503,10 +512,3 @@ UNIT_CONVERTERS: dict[NumberDeviceClass, type[BaseUnitConverter]] = { NumberDeviceClass.TEMPERATURE: TemperatureConverter, NumberDeviceClass.VOLUME_FLOW_RATE: VolumeFlowRateConverter, } - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index d74aa1bf408..636fa0a7751 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -9,12 +9,18 @@ "aqi": { "default": "mdi:air-filter" }, + "area": { + "default": "mdi:texture-box" + }, "atmospheric_pressure": { "default": "mdi:thermometer-lines" }, "battery": { "default": "mdi:battery" }, + "blood_glucose_concentration": { + "default": "mdi:spoon-sugar" + }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, @@ -149,6 +155,8 @@ } }, "services": { - "set_value": "mdi:numeric" + "set_value": { + "service": "mdi:numeric" + } } } diff --git a/homeassistant/components/number/strings.json b/homeassistant/components/number/strings.json index 580385172e3..cc77d224d72 100644 --- a/homeassistant/components/number/strings.json +++ b/homeassistant/components/number/strings.json @@ -37,12 +37,18 @@ "aqi": { "name": "[%key:component::sensor::entity_component::aqi::name%]" }, + "area": { + "name": "[%key:component::sensor::entity_component::area::name%]" + }, "atmospheric_pressure": { "name": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]" }, "battery": { "name": "[%key:component::sensor::entity_component::battery::name%]" }, + "blood_glucose_concentration": { + "name": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]" + }, "carbon_dioxide": { "name": "[%key:component::sensor::entity_component::carbon_dioxide::name%]" }, diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 2ce67c76649..169dbbbff5d 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -86,6 +86,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="NUT resource status", update_method=async_update_data, update_interval=timedelta(seconds=scan_interval), @@ -129,7 +130,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: name=data.name.title(), manufacturer=data.device_info.manufacturer, model=data.device_info.model, + model_id=data.device_info.model_id, sw_version=data.device_info.firmware, + serial_number=data.device_info.serial, + suggested_area=data.device_info.device_location, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -207,7 +211,10 @@ class NUTDeviceInfo: manufacturer: str | None = None model: str | None = None + model_id: str | None = None firmware: str | None = None + serial: str | None = None + device_location: str | None = None class PyNUTData: @@ -266,8 +273,13 @@ class PyNUTData: manufacturer = _manufacturer_from_status(self._status) model = _model_from_status(self._status) + model_id: str | None = self._status.get("device.part") firmware = _firmware_from_status(self._status) - return NUTDeviceInfo(manufacturer, model, firmware) + serial = _serial_from_status(self._status) + device_location: str | None = self._status.get("device.location") + return NUTDeviceInfo( + manufacturer, model, model_id, firmware, serial, device_location + ) async def _async_get_status(self) -> dict[str, str]: """Get the ups status from NUT.""" diff --git a/homeassistant/components/nut/config_flow.py b/homeassistant/components/nut/config_flow.py index d0a2da124a6..966c51e98e9 100644 --- a/homeassistant/components/nut/config_flow.py +++ b/homeassistant/components/nut/config_flow.py @@ -235,16 +235,12 @@ class NutConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for nut.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/nut/icons.json b/homeassistant/components/nut/icons.json index a4125d8633f..e0f78d6400b 100644 --- a/homeassistant/components/nut/icons.json +++ b/homeassistant/components/nut/icons.json @@ -1,59 +1,11 @@ { "entity": { "sensor": { - "ups_status_display": { + "battery_alarm_threshold": { "default": "mdi:information-outline" }, - "ups_status": { - "default": "mdi:information-outline" - }, - "ups_alarm": { - "default": "mdi:alarm" - }, - "ups_load": { - "default": "mdi:gauge" - }, - "ups_load_high": { - "default": "mdi:gauge" - }, - "ups_id": { - "default": "mdi:information-outline" - }, - "ups_test_result": { - "default": "mdi:information-outline" - }, - "ups_test_date": { - "default": "mdi:calendar" - }, - "ups_display_language": { - "default": "mdi:information-outline" - }, - "ups_contacts": { - "default": "mdi:information-outline" - }, - "ups_efficiency": { - "default": "mdi:gauge" - }, - "ups_beeper_status": { - "default": "mdi:information-outline" - }, - "ups_type": { - "default": "mdi:information-outline" - }, - "ups_watchdog_status": { - "default": "mdi:information-outline" - }, - "ups_start_auto": { - "default": "mdi:information-outline" - }, - "ups_start_battery": { - "default": "mdi:information-outline" - }, - "ups_start_reboot": { - "default": "mdi:information-outline" - }, - "ups_shutdown": { - "default": "mdi:information-outline" + "battery_capacity": { + "default": "mdi:flash" }, "battery_charge_low": { "default": "mdi:gauge" @@ -67,12 +19,6 @@ "battery_charger_status": { "default": "mdi:information-outline" }, - "battery_capacity": { - "default": "mdi:flash" - }, - "battery_alarm_threshold": { - "default": "mdi:information-outline" - }, "battery_date": { "default": "mdi:calendar" }, @@ -88,19 +34,19 @@ "battery_type": { "default": "mdi:information-outline" }, - "input_sensitivity": { - "default": "mdi:information-outline" - }, - "input_transfer_reason": { + "input_bypass_phases": { "default": "mdi:information-outline" }, "input_frequency_status": { "default": "mdi:information-outline" }, - "input_bypass_phases": { + "input_phases": { "default": "mdi:information-outline" }, - "input_phases": { + "input_sensitivity": { + "default": "mdi:information-outline" + }, + "input_transfer_reason": { "default": "mdi:information-outline" }, "output_l1_power_percent": { @@ -114,6 +60,60 @@ }, "output_phases": { "default": "mdi:information-outline" + }, + "ups_alarm": { + "default": "mdi:alarm" + }, + "ups_beeper_status": { + "default": "mdi:information-outline" + }, + "ups_contacts": { + "default": "mdi:information-outline" + }, + "ups_display_language": { + "default": "mdi:information-outline" + }, + "ups_efficiency": { + "default": "mdi:gauge" + }, + "ups_id": { + "default": "mdi:information-outline" + }, + "ups_load": { + "default": "mdi:gauge" + }, + "ups_load_high": { + "default": "mdi:gauge" + }, + "ups_shutdown": { + "default": "mdi:information-outline" + }, + "ups_start_auto": { + "default": "mdi:information-outline" + }, + "ups_start_battery": { + "default": "mdi:information-outline" + }, + "ups_start_reboot": { + "default": "mdi:information-outline" + }, + "ups_status": { + "default": "mdi:information-outline" + }, + "ups_status_display": { + "default": "mdi:information-outline" + }, + "ups_test_date": { + "default": "mdi:calendar" + }, + "ups_test_result": { + "default": "mdi:information-outline" + }, + "ups_type": { + "default": "mdi:information-outline" + }, + "ups_watchdog_status": { + "default": "mdi:information-outline" } } } diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 7b61342866b..bb702873052 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -15,6 +15,7 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_SERIAL_NUMBER, ATTR_SW_VERSION, PERCENTAGE, STATE_UNKNOWN, @@ -42,6 +43,7 @@ NUT_DEV_INFO_TO_DEV_INFO: dict[str, str] = { "manufacturer": ATTR_MANUFACTURER, "model": ATTR_MODEL, "firmware": ATTR_SW_VERSION, + "serial": ATTR_SERIAL_NUMBER, } _LOGGER = logging.getLogger(__name__) @@ -658,7 +660,6 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, device_class=SensorDeviceClass.CURRENT, state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), "input.L1.current": SensorEntityDescription( @@ -927,6 +928,7 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), "ambient.temperature": SensorEntityDescription( key="ambient.temperature", @@ -934,6 +936,7 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), "watts": SensorEntityDescription( key="watts", diff --git a/homeassistant/components/nut/strings.json b/homeassistant/components/nut/strings.json index d5b9acbdaad..ec5905fc16c 100644 --- a/homeassistant/components/nut/strings.json +++ b/homeassistant/components/nut/strings.json @@ -127,8 +127,8 @@ "input_l1_current": { "name": "Input L1 current" }, "input_l2_current": { "name": "Input L2 current" }, "input_l3_current": { "name": "Input L3 current" }, - "input_frequency": { "name": "Input line frequency" }, - "input_frequency_nominal": { "name": "Nominal input line frequency" }, + "input_frequency": { "name": "Input frequency" }, + "input_frequency_nominal": { "name": "Input nominal frequency" }, "input_frequency_status": { "name": "Input frequency status" }, "input_l1_frequency": { "name": "Input L1 line frequency" }, "input_l2_frequency": { "name": "Input L2 line frequency" }, diff --git a/homeassistant/components/nws/__init__.py b/homeassistant/components/nws/__init__.py index 2e643d7dbc6..c700476ed3d 100644 --- a/homeassistant/components/nws/__init__.py +++ b/homeassistant/components/nws/__init__.py @@ -110,6 +110,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NWSConfigEntry) -> bool: coordinator_forecast = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"NWS forecast station {station}", update_method=async_setup_update_forecast(0, 0), update_interval=DEFAULT_SCAN_INTERVAL, @@ -121,6 +122,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NWSConfigEntry) -> bool: coordinator_forecast_hourly = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"NWS forecast hourly station {station}", update_method=async_setup_update_forecast_hourly(0, 0), update_interval=DEFAULT_SCAN_INTERVAL, diff --git a/homeassistant/components/nws/icons.json b/homeassistant/components/nws/icons.json index 8f91388a3ef..2aef3a2e614 100644 --- a/homeassistant/components/nws/icons.json +++ b/homeassistant/components/nws/icons.json @@ -1,5 +1,7 @@ { "services": { - "get_forecasts_extra": "mdi:weather-cloudy-clock" + "get_forecasts_extra": { + "service": "mdi:weather-cloudy-clock" + } } } diff --git a/homeassistant/components/nws/manifest.json b/homeassistant/components/nws/manifest.json index d11a0e62bcf..0e02e652b49 100644 --- a/homeassistant/components/nws/manifest.json +++ b/homeassistant/components/nws/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nws", "iot_class": "cloud_polling", "loggers": ["metar", "pynws"], - "quality_scale": "platinum", "requirements": ["pynws[retry]==1.8.2"] } diff --git a/homeassistant/components/nx584/alarm_control_panel.py b/homeassistant/components/nx584/alarm_control_panel.py index 61de4f611b8..6622eec530f 100644 --- a/homeassistant/components/nx584/alarm_control_panel.py +++ b/homeassistant/components/nx584/alarm_control_panel.py @@ -13,17 +13,10 @@ from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PORT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, entity_platform @@ -95,7 +88,6 @@ class NX584Alarm(AlarmControlPanelEntity): """Representation of a NX584-based alarm panel.""" _attr_code_format = CodeFormat.NUMBER - _attr_state: str | None _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME | AlarmControlPanelEntityFeature.ARM_AWAY @@ -118,11 +110,11 @@ class NX584Alarm(AlarmControlPanelEntity): "Unable to connect to %(host)s: %(reason)s", {"host": self._url, "reason": ex}, ) - self._attr_state = None + self._attr_alarm_state = None zones = [] except IndexError: _LOGGER.error("NX584 reports no partitions") - self._attr_state = None + self._attr_alarm_state = None zones = [] bypassed = False @@ -136,15 +128,15 @@ class NX584Alarm(AlarmControlPanelEntity): break if not part["armed"]: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif bypassed: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME else: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY for flag in part["condition_flags"]: if flag == "Siren on": - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED def alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" diff --git a/homeassistant/components/nx584/icons.json b/homeassistant/components/nx584/icons.json index 76e5ae82e09..3bd8e485bfd 100644 --- a/homeassistant/components/nx584/icons.json +++ b/homeassistant/components/nx584/icons.json @@ -1,6 +1,10 @@ { "services": { - "bypass_zone": "mdi:wrench", - "unbypass_zone": "mdi:wrench" + "bypass_zone": { + "service": "mdi:wrench" + }, + "unbypass_zone": { + "service": "mdi:wrench" + } } } diff --git a/homeassistant/components/nx584/manifest.json b/homeassistant/components/nx584/manifest.json index 84ead05d083..9ac469224d0 100644 --- a/homeassistant/components/nx584/manifest.json +++ b/homeassistant/components/nx584/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nx584", "iot_class": "local_push", "loggers": ["nx584"], + "quality_scale": "legacy", "requirements": ["pynx584==0.8.2"] } diff --git a/homeassistant/components/nyt_games/__init__.py b/homeassistant/components/nyt_games/__init__.py new file mode 100644 index 00000000000..94dc22fe89e --- /dev/null +++ b/homeassistant/components/nyt_games/__init__.py @@ -0,0 +1,42 @@ +"""The NYT Games integration.""" + +from __future__ import annotations + +from nyt_games import NYTGamesClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_TOKEN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_create_clientsession + +from .coordinator import NYTGamesCoordinator + +PLATFORMS: list[Platform] = [ + Platform.SENSOR, +] + + +type NYTGamesConfigEntry = ConfigEntry[NYTGamesCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: NYTGamesConfigEntry) -> bool: + """Set up NYTGames from a config entry.""" + + client = NYTGamesClient( + entry.data[CONF_TOKEN], session=async_create_clientsession(hass) + ) + + coordinator = NYTGamesCoordinator(hass, client) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NYTGamesConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/nyt_games/config_flow.py b/homeassistant/components/nyt_games/config_flow.py new file mode 100644 index 00000000000..bfed1f47c41 --- /dev/null +++ b/homeassistant/components/nyt_games/config_flow.py @@ -0,0 +1,46 @@ +"""Config flow for NYT Games.""" + +from typing import Any + +from nyt_games import NYTGamesAuthenticationError, NYTGamesClient, NYTGamesError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_TOKEN +from homeassistant.helpers.aiohttp_client import async_create_clientsession + +from .const import DOMAIN, LOGGER + + +class NYTGamesConfigFlow(ConfigFlow, domain=DOMAIN): + """NYT Games config flow.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + if user_input: + session = async_create_clientsession(self.hass) + token = user_input[CONF_TOKEN].strip() + client = NYTGamesClient(token, session=session) + try: + user_id = await client.get_user_id() + except NYTGamesAuthenticationError: + errors["base"] = "invalid_auth" + except NYTGamesError: + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected error") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(str(user_id)) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title="NYT Games", data={CONF_TOKEN: token} + ) + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_TOKEN): str}), + errors=errors, + ) diff --git a/homeassistant/components/nyt_games/const.py b/homeassistant/components/nyt_games/const.py new file mode 100644 index 00000000000..c290e70b283 --- /dev/null +++ b/homeassistant/components/nyt_games/const.py @@ -0,0 +1,7 @@ +"""Constants for the NYT Games integration.""" + +import logging + +DOMAIN = "nyt_games" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/nyt_games/coordinator.py b/homeassistant/components/nyt_games/coordinator.py new file mode 100644 index 00000000000..5e88a5dd92a --- /dev/null +++ b/homeassistant/components/nyt_games/coordinator.py @@ -0,0 +1,54 @@ +"""Define an object to manage fetching NYT Games data.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +from typing import TYPE_CHECKING + +from nyt_games import Connections, NYTGamesClient, NYTGamesError, SpellingBee, Wordle + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + +if TYPE_CHECKING: + from . import NYTGamesConfigEntry + + +@dataclass +class NYTGamesData: + """Class for NYT Games data.""" + + wordle: Wordle + spelling_bee: SpellingBee | None + connections: Connections | None + + +class NYTGamesCoordinator(DataUpdateCoordinator[NYTGamesData]): + """Class to manage fetching NYT Games data.""" + + config_entry: NYTGamesConfigEntry + + def __init__(self, hass: HomeAssistant, client: NYTGamesClient) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + logger=LOGGER, + name="NYT Games", + update_interval=timedelta(minutes=15), + ) + self.client = client + + async def _async_update_data(self) -> NYTGamesData: + try: + stats_data = await self.client.get_latest_stats() + connections_data = await self.client.get_connections() + except NYTGamesError as error: + raise UpdateFailed(error) from error + return NYTGamesData( + wordle=stats_data.wordle, + spelling_bee=stats_data.spelling_bee, + connections=connections_data, + ) diff --git a/homeassistant/components/nyt_games/entity.py b/homeassistant/components/nyt_games/entity.py new file mode 100644 index 00000000000..40ca6ca973f --- /dev/null +++ b/homeassistant/components/nyt_games/entity.py @@ -0,0 +1,61 @@ +"""Base class for NYT Games entities.""" + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NYTGamesCoordinator + + +class NYTGamesEntity(CoordinatorEntity[NYTGamesCoordinator]): + """Defines a base NYT Games entity.""" + + _attr_has_entity_name = True + + +class WordleEntity(NYTGamesEntity): + """Defines a NYT Games entity.""" + + def __init__(self, coordinator: NYTGamesCoordinator) -> None: + """Initialize a NYT Games entity.""" + super().__init__(coordinator) + unique_id = coordinator.config_entry.unique_id + assert unique_id is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{unique_id}_wordle")}, + entry_type=DeviceEntryType.SERVICE, + manufacturer="New York Times", + name="Wordle", + ) + + +class SpellingBeeEntity(NYTGamesEntity): + """Defines a NYT Games entity.""" + + def __init__(self, coordinator: NYTGamesCoordinator) -> None: + """Initialize a NYT Games entity.""" + super().__init__(coordinator) + unique_id = coordinator.config_entry.unique_id + assert unique_id is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{unique_id}_spelling_bee")}, + entry_type=DeviceEntryType.SERVICE, + manufacturer="New York Times", + name="Spelling Bee", + ) + + +class ConnectionsEntity(NYTGamesEntity): + """Defines a NYT Games entity.""" + + def __init__(self, coordinator: NYTGamesCoordinator) -> None: + """Initialize a NYT Games entity.""" + super().__init__(coordinator) + unique_id = coordinator.config_entry.unique_id + assert unique_id is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{unique_id}_connections")}, + entry_type=DeviceEntryType.SERVICE, + manufacturer="New York Times", + name="Connections", + ) diff --git a/homeassistant/components/nyt_games/icons.json b/homeassistant/components/nyt_games/icons.json new file mode 100644 index 00000000000..2b839c1d218 --- /dev/null +++ b/homeassistant/components/nyt_games/icons.json @@ -0,0 +1,33 @@ +{ + "entity": { + "sensor": { + "wordles_played": { + "default": "mdi:text-long" + }, + "won": { + "default": "mdi:trophy-award" + }, + "streak": { + "default": "mdi:calendar-range" + }, + "max_streak": { + "default": "mdi:calendar-month" + }, + "spelling_bees_played": { + "default": "mdi:beehive-outline" + }, + "total_words": { + "default": "mdi:beehive-outline" + }, + "total_pangrams": { + "default": "mdi:beehive-outline" + }, + "connections_played": { + "default": "mdi:table-large" + }, + "last_played": { + "default": "mdi:calendar" + } + } + } +} diff --git a/homeassistant/components/nyt_games/manifest.json b/homeassistant/components/nyt_games/manifest.json new file mode 100644 index 00000000000..c32de754782 --- /dev/null +++ b/homeassistant/components/nyt_games/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "nyt_games", + "name": "NYT Games", + "codeowners": ["@joostlek"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/nyt_games", + "integration_type": "service", + "iot_class": "cloud_polling", + "requirements": ["nyt_games==0.4.4"] +} diff --git a/homeassistant/components/nyt_games/sensor.py b/homeassistant/components/nyt_games/sensor.py new file mode 100644 index 00000000000..01b2db4620b --- /dev/null +++ b/homeassistant/components/nyt_games/sensor.py @@ -0,0 +1,241 @@ +"""Support for NYT Games sensors.""" + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import date + +from nyt_games import Connections, SpellingBee, Wordle + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import NYTGamesConfigEntry +from .coordinator import NYTGamesCoordinator +from .entity import ConnectionsEntity, SpellingBeeEntity, WordleEntity + + +@dataclass(frozen=True, kw_only=True) +class NYTGamesWordleSensorEntityDescription(SensorEntityDescription): + """Describes a NYT Games Wordle sensor entity.""" + + value_fn: Callable[[Wordle], StateType] + + +WORDLE_SENSORS: tuple[NYTGamesWordleSensorEntityDescription, ...] = ( + NYTGamesWordleSensorEntityDescription( + key="wordles_played", + translation_key="wordles_played", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement="games", + value_fn=lambda wordle: wordle.games_played, + ), + NYTGamesWordleSensorEntityDescription( + key="wordles_won", + translation_key="won", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement="games", + value_fn=lambda wordle: wordle.games_won, + ), + NYTGamesWordleSensorEntityDescription( + key="wordles_streak", + translation_key="streak", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.DAYS, + device_class=SensorDeviceClass.DURATION, + value_fn=lambda wordle: wordle.current_streak, + ), + NYTGamesWordleSensorEntityDescription( + key="wordles_max_streak", + translation_key="max_streak", + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfTime.DAYS, + device_class=SensorDeviceClass.DURATION, + value_fn=lambda wordle: wordle.max_streak, + ), +) + + +@dataclass(frozen=True, kw_only=True) +class NYTGamesSpellingBeeSensorEntityDescription(SensorEntityDescription): + """Describes a NYT Games Spelling Bee sensor entity.""" + + value_fn: Callable[[SpellingBee], StateType] + + +SPELLING_BEE_SENSORS: tuple[NYTGamesSpellingBeeSensorEntityDescription, ...] = ( + NYTGamesSpellingBeeSensorEntityDescription( + key="spelling_bees_played", + translation_key="spelling_bees_played", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="games", + value_fn=lambda spelling_bee: spelling_bee.puzzles_started, + ), + NYTGamesSpellingBeeSensorEntityDescription( + key="spelling_bees_total_words", + translation_key="total_words", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="words", + entity_registry_enabled_default=False, + value_fn=lambda spelling_bee: spelling_bee.total_words, + ), + NYTGamesSpellingBeeSensorEntityDescription( + key="spelling_bees_total_pangrams", + translation_key="total_pangrams", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="pangrams", + entity_registry_enabled_default=False, + value_fn=lambda spelling_bee: spelling_bee.total_pangrams, + ), +) + + +@dataclass(frozen=True, kw_only=True) +class NYTGamesConnectionsSensorEntityDescription(SensorEntityDescription): + """Describes a NYT Games Connections sensor entity.""" + + value_fn: Callable[[Connections], StateType | date] + + +CONNECTIONS_SENSORS: tuple[NYTGamesConnectionsSensorEntityDescription, ...] = ( + NYTGamesConnectionsSensorEntityDescription( + key="connections_played", + translation_key="connections_played", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="games", + value_fn=lambda connections: connections.puzzles_completed, + ), + NYTGamesConnectionsSensorEntityDescription( + key="connections_won", + translation_key="won", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="games", + value_fn=lambda connections: connections.puzzles_won, + ), + NYTGamesConnectionsSensorEntityDescription( + key="connections_last_played", + translation_key="last_played", + device_class=SensorDeviceClass.DATE, + value_fn=lambda connections: connections.last_completed, + ), + NYTGamesConnectionsSensorEntityDescription( + key="connections_streak", + translation_key="streak", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.DAYS, + device_class=SensorDeviceClass.DURATION, + value_fn=lambda connections: connections.current_streak, + ), + NYTGamesConnectionsSensorEntityDescription( + key="connections_max_streak", + translation_key="max_streak", + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfTime.DAYS, + device_class=SensorDeviceClass.DURATION, + value_fn=lambda connections: connections.max_streak, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NYTGamesConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up NYT Games sensor entities based on a config entry.""" + + coordinator = entry.runtime_data + + entities: list[SensorEntity] = [ + NYTGamesWordleSensor(coordinator, description) for description in WORDLE_SENSORS + ] + if coordinator.data.spelling_bee is not None: + entities.extend( + NYTGamesSpellingBeeSensor(coordinator, description) + for description in SPELLING_BEE_SENSORS + ) + if coordinator.data.connections is not None: + entities.extend( + NYTGamesConnectionsSensor(coordinator, description) + for description in CONNECTIONS_SENSORS + ) + + async_add_entities(entities) + + +class NYTGamesWordleSensor(WordleEntity, SensorEntity): + """Defines a NYT Games sensor.""" + + entity_description: NYTGamesWordleSensorEntityDescription + + def __init__( + self, + coordinator: NYTGamesCoordinator, + description: NYTGamesWordleSensorEntityDescription, + ) -> None: + """Initialize NYT Games sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = ( + f"{coordinator.config_entry.unique_id}-wordle-{description.key}" + ) + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data.wordle) + + +class NYTGamesSpellingBeeSensor(SpellingBeeEntity, SensorEntity): + """Defines a NYT Games sensor.""" + + entity_description: NYTGamesSpellingBeeSensorEntityDescription + + def __init__( + self, + coordinator: NYTGamesCoordinator, + description: NYTGamesSpellingBeeSensorEntityDescription, + ) -> None: + """Initialize NYT Games sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = ( + f"{coordinator.config_entry.unique_id}-spelling_bee-{description.key}" + ) + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + assert self.coordinator.data.spelling_bee is not None + return self.entity_description.value_fn(self.coordinator.data.spelling_bee) + + +class NYTGamesConnectionsSensor(ConnectionsEntity, SensorEntity): + """Defines a NYT Games sensor.""" + + entity_description: NYTGamesConnectionsSensorEntityDescription + + def __init__( + self, + coordinator: NYTGamesCoordinator, + description: NYTGamesConnectionsSensorEntityDescription, + ) -> None: + """Initialize NYT Games sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = ( + f"{coordinator.config_entry.unique_id}-connections-{description.key}" + ) + + @property + def native_value(self) -> StateType | date: + """Return the state of the sensor.""" + assert self.coordinator.data.connections is not None + return self.entity_description.value_fn(self.coordinator.data.connections) diff --git a/homeassistant/components/nyt_games/strings.json b/homeassistant/components/nyt_games/strings.json new file mode 100644 index 00000000000..9a3771aebd9 --- /dev/null +++ b/homeassistant/components/nyt_games/strings.json @@ -0,0 +1,53 @@ +{ + "config": { + "step": { + "user": { + "data": { + "token": "Token" + }, + "data_description": { + "token": "The NYT Games NYT-S cookie value." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "entity": { + "sensor": { + "wordles_played": { + "name": "Played" + }, + "won": { + "name": "Won" + }, + "streak": { + "name": "Current streak" + }, + "max_streak": { + "name": "Highest streak" + }, + "spelling_bees_played": { + "name": "[%key:component::nyt_games::entity::sensor::wordles_played::name%]" + }, + "total_words": { + "name": "Total words found" + }, + "total_pangrams": { + "name": "Total pangrams found" + }, + "connections_played": { + "name": "[%key:component::nyt_games::entity::sensor::wordles_played::name%]" + }, + "last_played": { + "name": "Last played" + } + } + } +} diff --git a/homeassistant/components/nzbget/__init__.py b/homeassistant/components/nzbget/__init__.py index d47ac78c9d0..84456c4c006 100644 --- a/homeassistant/components/nzbget/__init__.py +++ b/homeassistant/components/nzbget/__init__.py @@ -6,8 +6,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( ATTR_SPEED, @@ -93,25 +91,3 @@ def _async_register_services( async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -class NZBGetEntity(CoordinatorEntity[NZBGetDataUpdateCoordinator]): - """Defines a base NZBGet entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - *, - entry_id: str, - entry_name: str, - coordinator: NZBGetDataUpdateCoordinator, - ) -> None: - """Initialize the NZBGet entity.""" - super().__init__(coordinator) - self._entry_id = entry_id - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, entry_id)}, - name=entry_name, - entry_type=DeviceEntryType.SERVICE, - ) diff --git a/homeassistant/components/nzbget/config_flow.py b/homeassistant/components/nzbget/config_flow.py index 47d35f32f9f..a99d3d3f328 100644 --- a/homeassistant/components/nzbget/config_flow.py +++ b/homeassistant/components/nzbget/config_flow.py @@ -50,9 +50,6 @@ class NZBGetConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: diff --git a/homeassistant/components/nzbget/entity.py b/homeassistant/components/nzbget/entity.py new file mode 100644 index 00000000000..7644cb28232 --- /dev/null +++ b/homeassistant/components/nzbget/entity.py @@ -0,0 +1,29 @@ +"""The NZBGet integration.""" + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NZBGetDataUpdateCoordinator + + +class NZBGetEntity(CoordinatorEntity[NZBGetDataUpdateCoordinator]): + """Defines a base NZBGet entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + *, + entry_id: str, + entry_name: str, + coordinator: NZBGetDataUpdateCoordinator, + ) -> None: + """Initialize the NZBGet entity.""" + super().__init__(coordinator) + self._entry_id = entry_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, entry_id)}, + name=entry_name, + entry_type=DeviceEntryType.SERVICE, + ) diff --git a/homeassistant/components/nzbget/icons.json b/homeassistant/components/nzbget/icons.json index a693e9fec86..ca4f4d584ae 100644 --- a/homeassistant/components/nzbget/icons.json +++ b/homeassistant/components/nzbget/icons.json @@ -1,7 +1,13 @@ { "services": { - "pause": "mdi:pause", - "resume": "mdi:play", - "set_speed": "mdi:speedometer" + "pause": { + "service": "mdi:pause" + }, + "resume": { + "service": "mdi:play" + }, + "set_speed": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/nzbget/manifest.json b/homeassistant/components/nzbget/manifest.json index 34f6f37873b..60e90e372ff 100644 --- a/homeassistant/components/nzbget/manifest.json +++ b/homeassistant/components/nzbget/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/nzbget", "iot_class": "local_polling", "loggers": ["pynzbgetapi"], - "requirements": ["pynzbgetapi==0.2.0"] + "requirements": ["pynzbgetapi==0.2.0"], + "single_config_entry": true } diff --git a/homeassistant/components/nzbget/sensor.py b/homeassistant/components/nzbget/sensor.py index 394e1175c2f..f6a4e4cc973 100644 --- a/homeassistant/components/nzbget/sensor.py +++ b/homeassistant/components/nzbget/sensor.py @@ -17,9 +17,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utcnow -from . import NZBGetEntity from .const import DATA_COORDINATOR, DOMAIN from .coordinator import NZBGetDataUpdateCoordinator +from .entity import NZBGetEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nzbget/strings.json b/homeassistant/components/nzbget/strings.json index 4da9a0b505e..84a2ed0b821 100644 --- a/homeassistant/components/nzbget/strings.json +++ b/homeassistant/components/nzbget/strings.json @@ -19,7 +19,6 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "unknown": "[%key:common::config_flow::error::unknown%]" } }, diff --git a/homeassistant/components/nzbget/switch.py b/homeassistant/components/nzbget/switch.py index c6505fd522d..552a1854902 100644 --- a/homeassistant/components/nzbget/switch.py +++ b/homeassistant/components/nzbget/switch.py @@ -10,9 +10,9 @@ from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import NZBGetEntity from .const import DATA_COORDINATOR, DOMAIN from .coordinator import NZBGetDataUpdateCoordinator +from .entity import NZBGetEntity async def async_setup_entry( diff --git a/homeassistant/components/oasa_telematics/manifest.json b/homeassistant/components/oasa_telematics/manifest.json index d3dbaad98e3..7365081a959 100644 --- a/homeassistant/components/oasa_telematics/manifest.json +++ b/homeassistant/components/oasa_telematics/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/oasa_telematics", "iot_class": "cloud_polling", "loggers": ["oasatelematics"], + "quality_scale": "legacy", "requirements": ["oasatelematics==0.3"] } diff --git a/homeassistant/components/obihai/__init__.py b/homeassistant/components/obihai/__init__.py index 0ba0b3dfc5e..43fd3e3426b 100644 --- a/homeassistant/components/obihai/__init__.py +++ b/homeassistant/components/obihai/__init__.py @@ -40,7 +40,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, unique_id=format_mac(device_mac), version=2 ) - LOGGER.info("Migration to version %s successful", entry.version) + LOGGER.debug("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/obihai/sensor.py b/homeassistant/components/obihai/sensor.py index 344767c8cd1..c162bd6c559 100644 --- a/homeassistant/components/obihai/sensor.py +++ b/homeassistant/components/obihai/sensor.py @@ -106,7 +106,7 @@ class ObihaiServiceSensors(SensorEntity): if not self.requester.available: self.requester.available = True - LOGGER.info("Connection restored") + LOGGER.warning("Connection restored") self._attr_available = True except RequestException as exc: diff --git a/homeassistant/components/octoprint/camera.py b/homeassistant/components/octoprint/camera.py index c5d6f9a62e1..e6430c55fa2 100644 --- a/homeassistant/components/octoprint/camera.py +++ b/homeassistant/components/octoprint/camera.py @@ -4,7 +4,7 @@ from __future__ import annotations from pyoctoprintapi import OctoprintClient, WebcamSettings -from homeassistant.components.mjpeg.camera import MjpegCamera +from homeassistant.components.mjpeg import MjpegCamera from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_VERIFY_SSL from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 22943b85f4e..9bbf21d71fa 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any import aiohttp from pyoctoprintapi import ApiError, OctoprintClient, OctoprintException @@ -104,7 +104,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): self._user_input = user_input return await self.async_step_get_api_key() - async def async_step_get_api_key(self, user_input=None): + async def async_step_get_api_key( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Get an Application Api Key.""" if not self.api_key_task: self.api_key_task = self.hass.async_create_task( @@ -130,7 +132,7 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_progress_done(next_step_id="user") - async def _finish_config(self, user_input: dict): + async def _finish_config(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Finish the configuration setup.""" existing_entry = await self.async_set_unique_id(self.unique_id) if existing_entry is not None: @@ -156,13 +158,13 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=user_input[CONF_HOST], data=user_input) - async def async_step_auth_failed(self, user_input): + async def async_step_auth_failed(self, user_input: None) -> ConfigFlowResult: """Handle api fetch failure.""" return self.async_abort(reason="auth_failed") - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -201,7 +203,7 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): url = URL(discovery_info.upnp["presentationURL"]) self.context.update( { - "title_placeholders": {CONF_HOST: url.host}, + "title_placeholders": {CONF_HOST: url.host or "-"}, "configuration_url": discovery_info.upnp["presentationURL"], } ) @@ -215,13 +217,15 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauthorization request from Octoprint.""" - self._reauth_data = dict(config) + self._reauth_data = dict(entry_data) self.context.update( { - "title_placeholders": {CONF_HOST: config[CONF_HOST]}, + "title_placeholders": {CONF_HOST: entry_data[CONF_HOST]}, } ) @@ -250,15 +254,17 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): self._user_input = self._reauth_data return await self.async_step_get_api_key() - async def _async_get_auth_key(self): + async def _async_get_auth_key(self) -> None: """Get application api key.""" + if TYPE_CHECKING: + assert self._user_input is not None octoprint = self._get_octoprint_client(self._user_input) self._user_input[CONF_API_KEY] = await octoprint.request_app_key( "Home Assistant", self._user_input[CONF_USERNAME], 300 ) - def _get_octoprint_client(self, user_input: dict) -> OctoprintClient: + def _get_octoprint_client(self, user_input: dict[str, Any]) -> OctoprintClient: """Build an octoprint client from the user_input.""" verify_ssl = user_input.get(CONF_VERIFY_SSL, True) @@ -279,7 +285,7 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): path=user_input[CONF_PATH], ) - def async_remove(self): + def async_remove(self) -> None: """Detach the session.""" for session in self._sessions: session.detach() diff --git a/homeassistant/components/octoprint/icons.json b/homeassistant/components/octoprint/icons.json index 972ecabb765..720718fcede 100644 --- a/homeassistant/components/octoprint/icons.json +++ b/homeassistant/components/octoprint/icons.json @@ -1,5 +1,7 @@ { "services": { - "printer_connect": "mdi:lan-connect" + "printer_connect": { + "service": "mdi:lan-connect" + } } } diff --git a/homeassistant/components/octoprint/strings.json b/homeassistant/components/octoprint/strings.json index e9df0ed755c..5687ab36033 100644 --- a/homeassistant/components/octoprint/strings.json +++ b/homeassistant/components/octoprint/strings.json @@ -33,7 +33,7 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "progress": { - "get_api_key": "Open the OctoPrint UI and click 'Allow' on the Access Request for 'Home Assistant'." + "get_api_key": "Open the OctoPrint UI and select **Allow** on the Access Request for **Home Assistant**." } }, "exceptions": { diff --git a/homeassistant/components/oem/climate.py b/homeassistant/components/oem/climate.py index cf16f1ba87e..4cecb9ff195 100644 --- a/homeassistant/components/oem/climate.py +++ b/homeassistant/components/oem/climate.py @@ -73,7 +73,6 @@ class ThermostatDevice(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, thermostat, name): """Initialize the device.""" diff --git a/homeassistant/components/oem/manifest.json b/homeassistant/components/oem/manifest.json index a8ce99b9372..f7ab34adbd9 100644 --- a/homeassistant/components/oem/manifest.json +++ b/homeassistant/components/oem/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/oem", "iot_class": "local_polling", "loggers": ["oemthermostat"], + "quality_scale": "legacy", "requirements": ["oemthermostat==1.1.1"] } diff --git a/homeassistant/components/ohmconnect/manifest.json b/homeassistant/components/ohmconnect/manifest.json index 74754485ea0..e2f02add22d 100644 --- a/homeassistant/components/ohmconnect/manifest.json +++ b/homeassistant/components/ohmconnect/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@robbiet480"], "documentation": "https://www.home-assistant.io/integrations/ohmconnect", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["defusedxml==0.7.1"] } diff --git a/homeassistant/components/ohme/__init__.py b/homeassistant/components/ohme/__init__.py new file mode 100644 index 00000000000..4dc75cb574c --- /dev/null +++ b/homeassistant/components/ohme/__init__.py @@ -0,0 +1,65 @@ +"""Set up ohme integration.""" + +from dataclasses import dataclass + +from ohme import ApiException, AuthException, OhmeApiClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady + +from .const import DOMAIN, PLATFORMS +from .coordinator import OhmeAdvancedSettingsCoordinator, OhmeChargeSessionCoordinator + +type OhmeConfigEntry = ConfigEntry[OhmeRuntimeData] + + +@dataclass() +class OhmeRuntimeData: + """Dataclass to hold ohme coordinators.""" + + charge_session_coordinator: OhmeChargeSessionCoordinator + advanced_settings_coordinator: OhmeAdvancedSettingsCoordinator + + +async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool: + """Set up Ohme from a config entry.""" + + client = OhmeApiClient(entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD]) + + try: + await client.async_login() + + if not await client.async_update_device_info(): + raise ConfigEntryNotReady( + translation_key="device_info_failed", translation_domain=DOMAIN + ) + except AuthException as e: + raise ConfigEntryAuthFailed( + translation_key="auth_failed", translation_domain=DOMAIN + ) from e + except ApiException as e: + raise ConfigEntryNotReady( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + + coordinators = ( + OhmeChargeSessionCoordinator(hass, client), + OhmeAdvancedSettingsCoordinator(hass, client), + ) + + for coordinator in coordinators: + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = OhmeRuntimeData(*coordinators) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ohme/button.py b/homeassistant/components/ohme/button.py new file mode 100644 index 00000000000..21792770bb4 --- /dev/null +++ b/homeassistant/components/ohme/button.py @@ -0,0 +1,77 @@ +"""Platform for button.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from ohme import ApiException, ChargerStatus, OhmeApiClient + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .const import DOMAIN +from .entity import OhmeEntity, OhmeEntityDescription + +PARALLEL_UPDATES = 1 + + +@dataclass(frozen=True, kw_only=True) +class OhmeButtonDescription(OhmeEntityDescription, ButtonEntityDescription): + """Class describing Ohme button entities.""" + + press_fn: Callable[[OhmeApiClient], Awaitable[None]] + available_fn: Callable[[OhmeApiClient], bool] + + +BUTTON_DESCRIPTIONS = [ + OhmeButtonDescription( + key="approve", + translation_key="approve", + press_fn=lambda client: client.async_approve_charge(), + is_supported_fn=lambda client: client.is_capable("pluginsRequireApprovalMode"), + available_fn=lambda client: client.status is ChargerStatus.PENDING_APPROVAL, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up buttons.""" + coordinator = config_entry.runtime_data.charge_session_coordinator + + async_add_entities( + OhmeButton(coordinator, description) + for description in BUTTON_DESCRIPTIONS + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeButton(OhmeEntity, ButtonEntity): + """Generic button for Ohme.""" + + entity_description: OhmeButtonDescription + + async def async_press(self) -> None: + """Handle the button press.""" + try: + await self.entity_description.press_fn(self.coordinator.client) + except ApiException as e: + raise HomeAssistantError( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + await self.coordinator.async_request_refresh() + + @property + def available(self) -> bool: + """Is entity available.""" + + return super().available and self.entity_description.available_fn( + self.coordinator.client + ) diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py new file mode 100644 index 00000000000..748ea558983 --- /dev/null +++ b/homeassistant/components/ohme/config_flow.py @@ -0,0 +1,116 @@ +"""Config flow for ohme integration.""" + +from collections.abc import Mapping +from typing import Any + +from ohme import ApiException, AuthException, OhmeApiClient +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN + +USER_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), + } +) + +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), + } +) + + +class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): + """Config flow.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """First config step.""" + + errors: dict[str, str] = {} + + if user_input is not None: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + + errors = await self._validate_account( + user_input[CONF_EMAIL], user_input[CONF_PASSWORD] + ) + if not errors: + return self.async_create_entry( + title=user_input[CONF_EMAIL], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=USER_SCHEMA, errors=errors + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication confirmation.""" + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() + if user_input is not None: + errors = await self._validate_account( + reauth_entry.data[CONF_EMAIL], + user_input[CONF_PASSWORD], + ) + if not errors: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=REAUTH_SCHEMA, + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + errors=errors, + ) + + async def _validate_account(self, email: str, password: str) -> dict[str, str]: + """Validate Ohme account and return dict of errors.""" + errors: dict[str, str] = {} + client = OhmeApiClient( + email, + password, + ) + try: + await client.async_login() + except AuthException: + errors["base"] = "invalid_auth" + except ApiException: + errors["base"] = "unknown" + + return errors diff --git a/homeassistant/components/ohme/const.py b/homeassistant/components/ohme/const.py new file mode 100644 index 00000000000..b44262ad509 --- /dev/null +++ b/homeassistant/components/ohme/const.py @@ -0,0 +1,6 @@ +"""Component constants.""" + +from homeassistant.const import Platform + +DOMAIN = "ohme" +PLATFORMS = [Platform.BUTTON, Platform.SENSOR] diff --git a/homeassistant/components/ohme/coordinator.py b/homeassistant/components/ohme/coordinator.py new file mode 100644 index 00000000000..5de59b3d4b2 --- /dev/null +++ b/homeassistant/components/ohme/coordinator.py @@ -0,0 +1,68 @@ +"""Ohme coordinators.""" + +from abc import abstractmethod +from datetime import timedelta +import logging + +from ohme import ApiException, OhmeApiClient + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class OhmeBaseCoordinator(DataUpdateCoordinator[None]): + """Base for all Ohme coordinators.""" + + client: OhmeApiClient + _default_update_interval: timedelta | None = timedelta(minutes=1) + coordinator_name: str = "" + + def __init__(self, hass: HomeAssistant, client: OhmeApiClient) -> None: + """Initialise coordinator.""" + super().__init__( + hass, + _LOGGER, + name="", + update_interval=self._default_update_interval, + ) + + self.name = f"Ohme {self.coordinator_name}" + self.client = client + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + try: + await self._internal_update_data() + except ApiException as e: + raise UpdateFailed( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + + @abstractmethod + async def _internal_update_data(self) -> None: + """Update coordinator data.""" + + +class OhmeChargeSessionCoordinator(OhmeBaseCoordinator): + """Coordinator to pull all updates from the API.""" + + coordinator_name = "Charge Sessions" + _default_update_interval = timedelta(seconds=30) + + async def _internal_update_data(self): + """Fetch data from API endpoint.""" + await self.client.async_get_charge_session() + + +class OhmeAdvancedSettingsCoordinator(OhmeBaseCoordinator): + """Coordinator to pull settings and charger state from the API.""" + + coordinator_name = "Advanced Settings" + + async def _internal_update_data(self): + """Fetch data from API endpoint.""" + await self.client.async_get_advanced_settings() diff --git a/homeassistant/components/ohme/entity.py b/homeassistant/components/ohme/entity.py new file mode 100644 index 00000000000..6a7d0ea16e4 --- /dev/null +++ b/homeassistant/components/ohme/entity.py @@ -0,0 +1,54 @@ +"""Base class for entities.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from ohme import OhmeApiClient + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import OhmeBaseCoordinator + + +@dataclass(frozen=True) +class OhmeEntityDescription(EntityDescription): + """Class describing Ohme entities.""" + + is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True + + +class OhmeEntity(CoordinatorEntity[OhmeBaseCoordinator]): + """Base class for all Ohme entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: OhmeBaseCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + client = coordinator.client + self._attr_unique_id = f"{client.serial}_{entity_description.key}" + + device_info = client.device_info + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, client.serial)}, + name=device_info["name"], + manufacturer="Ohme", + model=device_info["model"], + sw_version=device_info["sw_version"], + serial_number=client.serial, + ) + + @property + def available(self) -> bool: + """Return if charger reporting as online.""" + return super().available and self.coordinator.client.available diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json new file mode 100644 index 00000000000..d5bf3fa1187 --- /dev/null +++ b/homeassistant/components/ohme/icons.json @@ -0,0 +1,23 @@ +{ + "entity": { + "button": { + "approve": { + "default": "mdi:check-decagram" + } + }, + "sensor": { + "status": { + "default": "mdi:car", + "state": { + "unplugged": "mdi:power-plug-off", + "plugged_in": "mdi:power-plug", + "charging": "mdi:battery-charging-100", + "pending_approval": "mdi:alert-decagram" + } + }, + "ct_current": { + "default": "mdi:gauge" + } + } + } +} diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json new file mode 100644 index 00000000000..c9e1ccf9ac2 --- /dev/null +++ b/homeassistant/components/ohme/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "ohme", + "name": "Ohme", + "codeowners": ["@dan-r"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/ohme/", + "integration_type": "device", + "iot_class": "cloud_polling", + "quality_scale": "silver", + "requirements": ["ohme==1.1.1"] +} diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml new file mode 100644 index 00000000000..7fc2f55e2f9 --- /dev/null +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration has no custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration has no custom actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + This integration has no explicit subscriptions to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration has no options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery: + status: exempt + comment: | + All supported devices are cloud connected over mobile data. Discovery is not possible. + discovery-update-info: + status: exempt + comment: | + All supported devices are cloud connected over mobile data. Discovery is not possible. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration currently has no repairs. + stale-devices: todo + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py new file mode 100644 index 00000000000..6d111cf7af6 --- /dev/null +++ b/homeassistant/components/ohme/sensor.py @@ -0,0 +1,106 @@ +"""Platform for sensor.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from ohme import ChargerStatus, OhmeApiClient + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfElectricCurrent, UnitOfEnergy, UnitOfPower +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .entity import OhmeEntity, OhmeEntityDescription + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription): + """Class describing Ohme sensor entities.""" + + value_fn: Callable[[OhmeApiClient], str | int | float] + + +SENSOR_CHARGE_SESSION = [ + OhmeSensorDescription( + key="status", + translation_key="status", + device_class=SensorDeviceClass.ENUM, + options=[e.value for e in ChargerStatus], + value_fn=lambda client: client.status.value, + ), + OhmeSensorDescription( + key="current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda client: client.power.amps, + ), + OhmeSensorDescription( + key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + suggested_display_precision=1, + value_fn=lambda client: client.power.watts, + ), + OhmeSensorDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=1, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda client: client.energy, + ), +] + +SENSOR_ADVANCED_SETTINGS = [ + OhmeSensorDescription( + key="ct_current", + translation_key="ct_current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda client: client.power.ct_amps, + is_supported_fn=lambda client: client.ct_connected, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + coordinators = config_entry.runtime_data + coordinator_map = [ + (SENSOR_CHARGE_SESSION, coordinators.charge_session_coordinator), + (SENSOR_ADVANCED_SETTINGS, coordinators.advanced_settings_coordinator), + ] + + async_add_entities( + OhmeSensor(coordinator, description) + for entities, coordinator in coordinator_map + for description in entities + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeSensor(OhmeEntity, SensorEntity): + """Generic sensor for Ohme.""" + + entity_description: OhmeSensorDescription + + @property + def native_value(self) -> str | int | float: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json new file mode 100644 index 00000000000..125babc1901 --- /dev/null +++ b/homeassistant/components/ohme/strings.json @@ -0,0 +1,67 @@ +{ + "config": { + "step": { + "user": { + "description": "Configure your Ohme account. If you signed up to Ohme with a third party account like Google, please reset your password via Ohme before configuring this integration.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "Enter the email address associated with your Ohme account.", + "password": "Enter the password for your Ohme account" + } + }, + "reauth_confirm": { + "description": "Please update your password for {email}", + "title": "[%key:common::config_flow::title::reauth%]", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Enter the password for your Ohme account" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + } + }, + "entity": { + "button": { + "approve": { + "name": "Approve charge" + } + }, + "sensor": { + "status": { + "name": "Status", + "state": { + "unplugged": "Unplugged", + "plugged_in": "Plugged in", + "charging": "Charging", + "pending_approval": "Pending approval" + } + }, + "ct_current": { + "name": "CT current" + } + } + }, + "exceptions": { + "auth_failed": { + "message": "Unable to login to Ohme" + }, + "device_info_failed": { + "message": "Unable to get Ohme device information" + }, + "api_failed": { + "message": "Error communicating with Ohme API" + } + } +} diff --git a/homeassistant/components/ollama/__init__.py b/homeassistant/components/ollama/__init__.py index 2ad389c55c3..3bcba567803 100644 --- a/homeassistant/components/ollama/__init__.py +++ b/homeassistant/components/ollama/__init__.py @@ -13,6 +13,7 @@ from homeassistant.const import CONF_URL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv +from homeassistant.util.ssl import get_default_context from .const import ( CONF_KEEP_ALIVE, @@ -43,7 +44,7 @@ PLATFORMS = (Platform.CONVERSATION,) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Ollama from a config entry.""" settings = {**entry.data, **entry.options} - client = ollama.AsyncClient(host=settings[CONF_URL]) + client = ollama.AsyncClient(host=settings[CONF_URL], verify=get_default_context()) try: async with asyncio.timeout(DEFAULT_TIMEOUT): await client.list() diff --git a/homeassistant/components/ollama/config_flow.py b/homeassistant/components/ollama/config_flow.py index 6b516d67138..1024a824c25 100644 --- a/homeassistant/components/ollama/config_flow.py +++ b/homeassistant/components/ollama/config_flow.py @@ -33,6 +33,7 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) +from homeassistant.util.ssl import get_default_context from .const import ( CONF_KEEP_ALIVE, @@ -91,7 +92,9 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} try: - self.client = ollama.AsyncClient(host=self.url) + self.client = ollama.AsyncClient( + host=self.url, verify=get_default_context() + ) async with asyncio.timeout(DEFAULT_TIMEOUT): response = await self.client.list() @@ -204,9 +207,8 @@ class OllamaOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.url: str = self.config_entry.data[CONF_URL] - self.model: str = self.config_entry.data[CONF_MODEL] + self.url: str = config_entry.data[CONF_URL] + self.model: str = config_entry.data[CONF_MODEL] async def async_step_init( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 6152b223d6d..69c0a3d6296 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -24,8 +24,12 @@ MAX_HISTORY_SECONDS = 60 * 60 # 1 hour MODEL_NAMES = [ # https://ollama.com/library "alfred", "all-minilm", + "aya-expanse", "aya", "bakllava", + "bespoke-minicheck", + "bge-large", + "bge-m3", "codebooga", "codegeex4", "codegemma", @@ -33,18 +37,19 @@ MODEL_NAMES = [ # https://ollama.com/library "codeqwen", "codestral", "codeup", - "command-r", "command-r-plus", + "command-r", "dbrx", - "deepseek-coder", "deepseek-coder-v2", + "deepseek-coder", "deepseek-llm", + "deepseek-v2.5", "deepseek-v2", - "dolphincoder", "dolphin-llama3", "dolphin-mistral", "dolphin-mixtral", "dolphin-phi", + "dolphincoder", "duckdb-nsql", "everythinglm", "falcon", @@ -55,74 +60,97 @@ MODEL_NAMES = [ # https://ollama.com/library "glm4", "goliath", "granite-code", + "granite3-dense", + "granite3-guardian" "granite3-moe", + "hermes3", "internlm2", - "llama2", + "llama-guard3", + "llama-pro", "llama2-chinese", "llama2-uncensored", - "llama3", + "llama2", "llama3-chatqa", "llama3-gradient", "llama3-groq-tool-use", - "llama-pro", - "llava", + "llama3.1", + "llama3.2", + "llama3", "llava-llama3", "llava-phi3", + "llava", "magicoder", "mathstral", "meditron", "medllama2", "megadolphin", - "mistral", - "mistrallite", + "minicpm-v", + "mistral-large", "mistral-nemo", "mistral-openorca", + "mistral-small", + "mistral", + "mistrallite", "mixtral", "moondream", "mxbai-embed-large", + "nemotron-mini", + "nemotron", "neural-chat", "nexusraven", "nomic-embed-text", "notus", "notux", "nous-hermes", - "nous-hermes2", "nous-hermes2-mixtral", + "nous-hermes2", "nuextract", + "open-orca-platypus2", "openchat", "openhermes", - "open-orca-platypus2", - "orca2", "orca-mini", + "orca2", + "paraphrase-multilingual", "phi", + "phi3.5", "phi3", "phind-codellama", "qwen", + "qwen2-math", + "qwen2.5-coder", + "qwen2.5", "qwen2", + "reader-lm", + "reflection", "samantha-mistral", + "shieldgemma", + "smollm", + "smollm2", "snowflake-arctic-embed", + "solar-pro", "solar", "sqlcoder", "stable-beluga", "stable-code", - "stablelm2", "stablelm-zephyr", + "stablelm2", "starcoder", "starcoder2", "starling-lm", "tinydolphin", "tinyllama", "vicuna", + "wizard-math", + "wizard-vicuna-uncensored", + "wizard-vicuna", "wizardcoder", + "wizardlm-uncensored", "wizardlm", "wizardlm2", - "wizardlm-uncensored", - "wizard-math", - "wizard-vicuna", - "wizard-vicuna-uncensored", "xwinlm", "yarn-llama2", "yarn-mistral", + "yi-coder", "yi", "zephyr", ] -DEFAULT_MODEL = "llama3.1:latest" +DEFAULT_MODEL = "llama3.2:latest" diff --git a/homeassistant/components/ollama/manifest.json b/homeassistant/components/ollama/manifest.json index 64224eb06fb..dca4c2dd6be 100644 --- a/homeassistant/components/ollama/manifest.json +++ b/homeassistant/components/ollama/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/ollama", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["ollama==0.3.1"] + "requirements": ["ollama==0.3.3"] } diff --git a/homeassistant/components/ollama/strings.json b/homeassistant/components/ollama/strings.json index c307f160228..248cac34f11 100644 --- a/homeassistant/components/ollama/strings.json +++ b/homeassistant/components/ollama/strings.json @@ -11,9 +11,11 @@ "title": "Downloading model" } }, + "abort": { + "download_failed": "Model downloading failed" + }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "download_failed": "Model downloading failed", "unknown": "[%key:common::config_flow::error::unknown%]" }, "progress": { diff --git a/homeassistant/components/ombi/icons.json b/homeassistant/components/ombi/icons.json index 4b3e32a1e13..15b8af56188 100644 --- a/homeassistant/components/ombi/icons.json +++ b/homeassistant/components/ombi/icons.json @@ -1,7 +1,13 @@ { "services": { - "submit_movie_request": "mdi:movie-roll", - "submit_tv_request": "mdi:television-classic", - "submit_music_request": "mdi:music" + "submit_movie_request": { + "service": "mdi:movie-roll" + }, + "submit_tv_request": { + "service": "mdi:television-classic" + }, + "submit_music_request": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/ombi/manifest.json b/homeassistant/components/ombi/manifest.json index d9da13d2381..1afc385a5a7 100644 --- a/homeassistant/components/ombi/manifest.json +++ b/homeassistant/components/ombi/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@larssont"], "documentation": "https://www.home-assistant.io/integrations/ombi", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pyombi==0.1.10"] } diff --git a/homeassistant/components/omnilogic/common.py b/homeassistant/components/omnilogic/common.py index 13b9803409c..4e3e2962d03 100644 --- a/homeassistant/components/omnilogic/common.py +++ b/homeassistant/components/omnilogic/common.py @@ -1,97 +1,5 @@ """Common classes and elements for Omnilogic Integration.""" -from typing import Any - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import OmniLogicUpdateCoordinator - - -class OmniLogicEntity(CoordinatorEntity[OmniLogicUpdateCoordinator]): - """Defines the base OmniLogic entity.""" - - def __init__( - self, - coordinator: OmniLogicUpdateCoordinator, - kind: str, - name: str, - item_id: tuple, - icon: str, - ) -> None: - """Initialize the OmniLogic Entity.""" - super().__init__(coordinator) - - bow_id = None - entity_data = coordinator.data[item_id] - - backyard_id = item_id[:2] - if len(item_id) == 6: - bow_id = item_id[:4] - - msp_system_id = coordinator.data[backyard_id]["systemId"] - entity_friendly_name = f"{coordinator.data[backyard_id]['BackyardName']} " - unique_id = f"{msp_system_id}" - - if bow_id is not None: - unique_id = f"{unique_id}_{coordinator.data[bow_id]['systemId']}" - - if kind != "Heaters": - entity_friendly_name = ( - f"{entity_friendly_name}{coordinator.data[bow_id]['Name']} " - ) - else: - entity_friendly_name = f"{entity_friendly_name}{coordinator.data[bow_id]['Operation']['VirtualHeater']['Name']} " - - unique_id = f"{unique_id}_{coordinator.data[item_id]['systemId']}_{kind}" - - if entity_data.get("Name") is not None: - entity_friendly_name = f"{entity_friendly_name} {entity_data['Name']}" - - entity_friendly_name = f"{entity_friendly_name} {name}" - - unique_id = unique_id.replace(" ", "_") - - self._kind = kind - self._name = entity_friendly_name - self._unique_id = unique_id - self._item_id = item_id - self._icon = icon - self._attrs: dict[str, Any] = {} - self._msp_system_id = msp_system_id - self._backyard_name = coordinator.data[backyard_id]["BackyardName"] - - @property - def unique_id(self) -> str: - """Return a unique, Home Assistant friendly identifier for this entity.""" - return self._unique_id - - @property - def name(self) -> str: - """Return the name of the entity.""" - return self._name - - @property - def icon(self): - """Return the icon for the entity.""" - return self._icon - - @property - def extra_state_attributes(self): - """Return the attributes.""" - return self._attrs - - @property - def device_info(self) -> DeviceInfo: - """Define the device as back yard/MSP System.""" - return DeviceInfo( - identifiers={(DOMAIN, self._msp_system_id)}, - manufacturer="Hayward", - model="OmniLogic", - name=self._backyard_name, - ) - def check_guard(state_key, item, entity_setting): """Validate that this entity passes the defined guard conditions defined at setup.""" diff --git a/homeassistant/components/omnilogic/config_flow.py b/homeassistant/components/omnilogic/config_flow.py index 166e4414767..dfbd010ea98 100644 --- a/homeassistant/components/omnilogic/config_flow.py +++ b/homeassistant/components/omnilogic/config_flow.py @@ -34,7 +34,7 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -42,12 +42,6 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors: dict[str, str] = {} - config_entry = self._async_current_entries() - if config_entry: - return self.async_abort(reason="single_instance_allowed") - - errors = {} - if user_input is not None: username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] @@ -84,11 +78,9 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle Omnilogic client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage options.""" if user_input is not None: diff --git a/homeassistant/components/omnilogic/entity.py b/homeassistant/components/omnilogic/entity.py new file mode 100644 index 00000000000..6f7b769fc8f --- /dev/null +++ b/homeassistant/components/omnilogic/entity.py @@ -0,0 +1,93 @@ +"""Common classes and elements for Omnilogic Integration.""" + +from typing import Any + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import OmniLogicUpdateCoordinator + + +class OmniLogicEntity(CoordinatorEntity[OmniLogicUpdateCoordinator]): + """Defines the base OmniLogic entity.""" + + def __init__( + self, + coordinator: OmniLogicUpdateCoordinator, + kind: str, + name: str, + item_id: tuple, + icon: str, + ) -> None: + """Initialize the OmniLogic Entity.""" + super().__init__(coordinator) + + bow_id = None + entity_data = coordinator.data[item_id] + + backyard_id = item_id[:2] + if len(item_id) == 6: + bow_id = item_id[:4] + + msp_system_id = coordinator.data[backyard_id]["systemId"] + entity_friendly_name = f"{coordinator.data[backyard_id]['BackyardName']} " + unique_id = f"{msp_system_id}" + + if bow_id is not None: + unique_id = f"{unique_id}_{coordinator.data[bow_id]['systemId']}" + + if kind != "Heaters": + entity_friendly_name = ( + f"{entity_friendly_name}{coordinator.data[bow_id]['Name']} " + ) + else: + entity_friendly_name = f"{entity_friendly_name}{coordinator.data[bow_id]['Operation']['VirtualHeater']['Name']} " + + unique_id = f"{unique_id}_{coordinator.data[item_id]['systemId']}_{kind}" + + if entity_data.get("Name") is not None: + entity_friendly_name = f"{entity_friendly_name} {entity_data['Name']}" + + entity_friendly_name = f"{entity_friendly_name} {name}" + + unique_id = unique_id.replace(" ", "_") + + self._kind = kind + self._name = entity_friendly_name + self._unique_id = unique_id + self._item_id = item_id + self._icon = icon + self._attrs: dict[str, Any] = {} + self._msp_system_id = msp_system_id + self._backyard_name = coordinator.data[backyard_id]["BackyardName"] + + @property + def unique_id(self) -> str: + """Return a unique, Home Assistant friendly identifier for this entity.""" + return self._unique_id + + @property + def name(self) -> str: + """Return the name of the entity.""" + return self._name + + @property + def icon(self): + """Return the icon for the entity.""" + return self._icon + + @property + def extra_state_attributes(self): + """Return the attributes.""" + return self._attrs + + @property + def device_info(self) -> DeviceInfo: + """Define the device as back yard/MSP System.""" + return DeviceInfo( + identifiers={(DOMAIN, self._msp_system_id)}, + manufacturer="Hayward", + model="OmniLogic", + name=self._backyard_name, + ) diff --git a/homeassistant/components/omnilogic/icons.json b/homeassistant/components/omnilogic/icons.json index ee5b5102177..8f0f13fe652 100644 --- a/homeassistant/components/omnilogic/icons.json +++ b/homeassistant/components/omnilogic/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_pump_speed": "mdi:water-pump" + "set_pump_speed": { + "service": "mdi:water-pump" + } } } diff --git a/homeassistant/components/omnilogic/manifest.json b/homeassistant/components/omnilogic/manifest.json index 252718d2c21..361a15e2d9c 100644 --- a/homeassistant/components/omnilogic/manifest.json +++ b/homeassistant/components/omnilogic/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/omnilogic", "iot_class": "cloud_polling", "loggers": ["config", "omnilogic"], - "requirements": ["omnilogic==0.4.5"] + "requirements": ["omnilogic==0.4.5"], + "single_config_entry": true } diff --git a/homeassistant/components/omnilogic/sensor.py b/homeassistant/components/omnilogic/sensor.py index 9def0d9825e..c87b589e1f6 100644 --- a/homeassistant/components/omnilogic/sensor.py +++ b/homeassistant/components/omnilogic/sensor.py @@ -15,9 +15,10 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import OmniLogicEntity, check_guard +from .common import check_guard from .const import COORDINATOR, DEFAULT_PH_OFFSET, DOMAIN, PUMP_TYPES from .coordinator import OmniLogicUpdateCoordinator +from .entity import OmniLogicEntity async def async_setup_entry( diff --git a/homeassistant/components/omnilogic/strings.json b/homeassistant/components/omnilogic/strings.json index 454644be244..5b193b7f5ba 100644 --- a/homeassistant/components/omnilogic/strings.json +++ b/homeassistant/components/omnilogic/strings.json @@ -14,8 +14,7 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" } }, "options": { diff --git a/homeassistant/components/omnilogic/switch.py b/homeassistant/components/omnilogic/switch.py index 388099f92e9..eb57d03bc34 100644 --- a/homeassistant/components/omnilogic/switch.py +++ b/homeassistant/components/omnilogic/switch.py @@ -12,9 +12,10 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import OmniLogicEntity, check_guard +from .common import check_guard from .const import COORDINATOR, DOMAIN, PUMP_TYPES from .coordinator import OmniLogicUpdateCoordinator +from .entity import OmniLogicEntity SERVICE_SET_SPEED = "set_pump_speed" OMNILOGIC_SWITCH_OFF = 7 diff --git a/homeassistant/components/onboarding/views.py b/homeassistant/components/onboarding/views.py index 1ecfc10d974..b33440a9eb7 100644 --- a/homeassistant/components/onboarding/views.py +++ b/homeassistant/components/onboarding/views.py @@ -20,6 +20,7 @@ from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.components.http.view import HomeAssistantView from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import area_registry as ar +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.system_info import async_get_system_info from homeassistant.helpers.translation import async_get_translations from homeassistant.setup import async_setup_component @@ -216,7 +217,7 @@ class CoreConfigOnboardingView(_BaseOnboardingView): from homeassistant.components import hassio if ( - hassio.is_hassio(hass) + is_hassio(hass) and (core_info := hassio.get_core_info(hass)) and "raspberrypi" in core_info["machine"] ): diff --git a/homeassistant/components/oncue/__init__.py b/homeassistant/components/oncue/__init__.py index 53443b9ed81..19d134a398f 100644 --- a/homeassistant/components/oncue/__init__.py +++ b/homeassistant/components/oncue/__init__.py @@ -43,6 +43,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OncueConfigEntry) -> boo coordinator = DataUpdateCoordinator[dict[str, OncueDevice]]( hass, _LOGGER, + config_entry=entry, name=f"Oncue {entry.data[CONF_USERNAME]}", update_interval=timedelta(minutes=10), update_method=_async_update, diff --git a/homeassistant/components/oncue/config_flow.py b/homeassistant/components/oncue/config_flow.py index 92cd037734e..872fe84350b 100644 --- a/homeassistant/components/oncue/config_flow.py +++ b/homeassistant/components/oncue/config_flow.py @@ -9,7 +9,7 @@ from typing import Any from aiooncue import LoginFailedException, Oncue import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -23,10 +23,6 @@ class OncueConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the oncue config flow.""" - self.reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -80,8 +76,6 @@ class OncueConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - entry_id = self.context["entry_id"] - self.reauth_entry = self.hass.config_entries.async_get_entry(entry_id) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -89,18 +83,15 @@ class OncueConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauth input.""" errors: dict[str, str] = {} - existing_entry = self.reauth_entry - assert existing_entry - existing_data = existing_entry.data + reauth_entry = self._get_reauth_entry() + existing_data = reauth_entry.data description_placeholders: dict[str, str] = { CONF_USERNAME: existing_data[CONF_USERNAME] } if user_input is not None: new_config = {**existing_data, CONF_PASSWORD: user_input[CONF_PASSWORD]} if not (errors := await self._async_validate_or_error(new_config)): - return self.async_update_reload_and_abort( - existing_entry, data=new_config - ) + return self.async_update_reload_and_abort(reauth_entry, data=new_config) return self.async_show_form( description_placeholders=description_placeholders, diff --git a/homeassistant/components/ondilo_ico/config_flow.py b/homeassistant/components/ondilo_ico/config_flow.py index d65c1b15e2a..fe0b89e7258 100644 --- a/homeassistant/components/ondilo_ico/config_flow.py +++ b/homeassistant/components/ondilo_ico/config_flow.py @@ -21,9 +21,6 @@ class OndiloIcoOAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle a flow initialized by the user.""" await self.async_set_unique_id(DOMAIN) - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - self.async_register_implementation( self.hass, OndiloOauth2Implementation(self.hass), diff --git a/homeassistant/components/ondilo_ico/coordinator.py b/homeassistant/components/ondilo_ico/coordinator.py index 9a98ce0037e..bc092ad0b9a 100644 --- a/homeassistant/components/ondilo_ico/coordinator.py +++ b/homeassistant/components/ondilo_ico/coordinator.py @@ -42,9 +42,7 @@ class OndiloIcoCoordinator(DataUpdateCoordinator[dict[str, OndiloIcoData]]): """Fetch data from API endpoint.""" try: return await self.hass.async_add_executor_job(self._update_data) - except OndiloError as err: - _LOGGER.exception("Error getting pools") raise UpdateFailed(f"Error communicating with API: {err}") from err def _update_data(self) -> dict[str, OndiloIcoData]: @@ -52,23 +50,28 @@ class OndiloIcoCoordinator(DataUpdateCoordinator[dict[str, OndiloIcoData]]): res = {} pools = self.api.get_pools() _LOGGER.debug("Pools: %s", pools) + error: OndiloError | None = None for pool in pools: + pool_id = pool["id"] try: - ico = self.api.get_ICO_details(pool["id"]) + ico = self.api.get_ICO_details(pool_id) if not ico: _LOGGER.debug( - "The pool id %s does not have any ICO attached", pool["id"] + "The pool id %s does not have any ICO attached", pool_id ) continue - sensors = self.api.get_last_pool_measures(pool["id"]) - except OndiloError: - _LOGGER.exception("Error communicating with API for %s", pool["id"]) + sensors = self.api.get_last_pool_measures(pool_id) + except OndiloError as err: + error = err + _LOGGER.debug("Error communicating with API for %s: %s", pool_id, err) continue - res[pool["id"]] = OndiloIcoData( + res[pool_id] = OndiloIcoData( ico=ico, pool=pool, sensors={sensor["data_type"]: sensor["value"] for sensor in sensors}, ) if not res: + if error: + raise UpdateFailed(f"Error communicating with API: {error}") from error raise UpdateFailed("No data available") return res diff --git a/homeassistant/components/ondilo_ico/manifest.json b/homeassistant/components/ondilo_ico/manifest.json index 2f522f1b77c..84862a89fbb 100644 --- a/homeassistant/components/ondilo_ico/manifest.json +++ b/homeassistant/components/ondilo_ico/manifest.json @@ -8,5 +8,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["ondilo"], - "requirements": ["ondilo==0.5.0"] + "requirements": ["ondilo==0.5.0"], + "single_config_entry": true } diff --git a/homeassistant/components/onewire/binary_sensor.py b/homeassistant/components/onewire/binary_sensor.py index 82cdb1936f7..5607fd7ed1d 100644 --- a/homeassistant/components/onewire/binary_sensor.py +++ b/homeassistant/components/onewire/binary_sensor.py @@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OneWireConfigEntry from .const import DEVICE_KEYS_0_3, DEVICE_KEYS_0_7, DEVICE_KEYS_A_B, READ_MODE_BOOL -from .onewire_entities import OneWireEntity, OneWireEntityDescription +from .entity import OneWireEntity, OneWireEntityDescription from .onewirehub import OneWireHub diff --git a/homeassistant/components/onewire/config_flow.py b/homeassistant/components/onewire/config_flow.py index a217674e3b4..3889db2a069 100644 --- a/homeassistant/components/onewire/config_flow.py +++ b/homeassistant/components/onewire/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from copy import deepcopy from typing import Any import voluptuous as vol @@ -10,7 +11,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback @@ -100,12 +101,14 @@ class OneWireFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OnewireOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OnewireOptionsFlowHandler: """Get the options flow for this handler.""" return OnewireOptionsFlowHandler(config_entry) -class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OnewireOptionsFlowHandler(OptionsFlow): """Handle OneWire Config options.""" configurable_devices: dict[str, str] @@ -123,6 +126,10 @@ class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): current_device: str """Friendly name of the currently selected device.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.options = deepcopy(dict(config_entry.options)) + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -137,7 +144,7 @@ class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): } if not self.configurable_devices: - return self.async_abort(reason="No configurable devices found.") + return self.async_abort(reason="no_configurable_devices") return await self.async_step_device_selection(user_input=None) diff --git a/homeassistant/components/onewire/onewire_entities.py b/homeassistant/components/onewire/entity.py similarity index 97% rename from homeassistant/components/onewire/onewire_entities.py rename to homeassistant/components/onewire/entity.py index 03ed2dd679a..bbf36deaaa0 100644 --- a/homeassistant/components/onewire/onewire_entities.py +++ b/homeassistant/components/onewire/entity.py @@ -78,7 +78,7 @@ class OneWireEntity(Entity): else: if not self._last_update_success: self._last_update_success = True - _LOGGER.info("Fetching %s data recovered", self.name) + _LOGGER.debug("Fetching %s data recovered", self.name) if self.entity_description.read_mode == READ_MODE_INT: self._state = int(self._value_raw) elif self.entity_description.read_mode == READ_MODE_BOOL: diff --git a/homeassistant/components/onewire/manifest.json b/homeassistant/components/onewire/manifest.json index 32a08223075..4f3cb5d04ab 100644 --- a/homeassistant/components/onewire/manifest.json +++ b/homeassistant/components/onewire/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["pyownet"], - "quality_scale": "gold", "requirements": ["pyownet==0.10.0.post1"] } diff --git a/homeassistant/components/onewire/sensor.py b/homeassistant/components/onewire/sensor.py index b7d7e3ddbe9..2dca53af1cf 100644 --- a/homeassistant/components/onewire/sensor.py +++ b/homeassistant/components/onewire/sensor.py @@ -38,7 +38,7 @@ from .const import ( READ_MODE_FLOAT, READ_MODE_INT, ) -from .onewire_entities import OneWireEntity, OneWireEntityDescription +from .entity import OneWireEntity, OneWireEntityDescription from .onewirehub import OneWireHub @@ -233,7 +233,6 @@ DEVICE_SENSORS: dict[str, tuple[OneWireSensorEntityDescription, ...]] = { "1D": tuple( OneWireSensorEntityDescription( key=f"counter.{device_key}", - native_unit_of_measurement="count", read_mode=READ_MODE_INT, state_class=SensorStateClass.TOTAL_INCREASING, translation_key="counter_id", diff --git a/homeassistant/components/onewire/strings.json b/homeassistant/components/onewire/strings.json index 8dbcbdf8978..68585c3203f 100644 --- a/homeassistant/components/onewire/strings.json +++ b/homeassistant/components/onewire/strings.json @@ -94,6 +94,9 @@ } }, "options": { + "abort": { + "no_configurable_devices": "No configurable devices found" + }, "error": { "device_not_selected": "Select devices to configure" }, diff --git a/homeassistant/components/onewire/switch.py b/homeassistant/components/onewire/switch.py index 11bcbff5970..ec0bc44e03f 100644 --- a/homeassistant/components/onewire/switch.py +++ b/homeassistant/components/onewire/switch.py @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OneWireConfigEntry from .const import DEVICE_KEYS_0_3, DEVICE_KEYS_0_7, DEVICE_KEYS_A_B, READ_MODE_BOOL -from .onewire_entities import OneWireEntity, OneWireEntityDescription +from .entity import OneWireEntity, OneWireEntityDescription from .onewirehub import OneWireHub diff --git a/homeassistant/components/onkyo/__init__.py b/homeassistant/components/onkyo/__init__.py index 02c026d1973..fd5c0ba634a 100644 --- a/homeassistant/components/onkyo/__init__.py +++ b/homeassistant/components/onkyo/__init__.py @@ -1 +1,76 @@ """The onkyo component.""" + +from dataclasses import dataclass + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType + +from .const import DOMAIN, OPTION_INPUT_SOURCES, InputSource +from .receiver import Receiver, async_interview +from .services import DATA_MP_ENTITIES, async_register_services + +PLATFORMS = [Platform.MEDIA_PLAYER] + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +@dataclass +class OnkyoData: + """Config Entry data.""" + + receiver: Receiver + sources: dict[InputSource, str] + + +type OnkyoConfigEntry = ConfigEntry[OnkyoData] + + +async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool: + """Set up Onkyo component.""" + await async_register_services(hass) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: OnkyoConfigEntry) -> bool: + """Set up the Onkyo config entry.""" + entry.async_on_unload(entry.add_update_listener(update_listener)) + + host = entry.data[CONF_HOST] + + info = await async_interview(host) + if info is None: + raise ConfigEntryNotReady(f"Unable to connect to: {host}") + + receiver = await Receiver.async_create(info) + + sources_store: dict[str, str] = entry.options[OPTION_INPUT_SOURCES] + sources = {InputSource(k): v for k, v in sources_store.items()} + + entry.runtime_data = OnkyoData(receiver, sources) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + await receiver.conn.connect() + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: OnkyoConfigEntry) -> bool: + """Unload Onkyo config entry.""" + del hass.data[DATA_MP_ENTITIES][entry.entry_id] + + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + receiver = entry.runtime_data.receiver + receiver.conn.close() + + return unload_ok + + +async def update_listener(hass: HomeAssistant, entry: OnkyoConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py new file mode 100644 index 00000000000..a8ced6fae64 --- /dev/null +++ b/homeassistant/components/onkyo/config_flow.py @@ -0,0 +1,371 @@ +"""Config flow for Onkyo.""" + +import logging +from typing import Any + +import voluptuous as vol + +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import callback +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + Selector, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TextSelector, +) + +from .const import ( + CONF_RECEIVER_MAX_VOLUME, + CONF_SOURCES, + DOMAIN, + OPTION_INPUT_SOURCES, + OPTION_MAX_VOLUME, + OPTION_MAX_VOLUME_DEFAULT, + OPTION_VOLUME_RESOLUTION, + OPTION_VOLUME_RESOLUTION_DEFAULT, + VOLUME_RESOLUTION_ALLOWED, + InputSource, +) +from .receiver import ReceiverInfo, async_discover, async_interview + +_LOGGER = logging.getLogger(__name__) + +CONF_DEVICE = "device" + +INPUT_SOURCES_ALL_MEANINGS = [ + input_source.value_meaning for input_source in InputSource +] +STEP_MANUAL_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) +STEP_CONFIGURE_SCHEMA = vol.Schema( + { + vol.Required(OPTION_VOLUME_RESOLUTION): vol.In(VOLUME_RESOLUTION_ALLOWED), + vol.Required(OPTION_INPUT_SOURCES): SelectSelector( + SelectSelectorConfig( + options=INPUT_SOURCES_ALL_MEANINGS, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + } +) + + +class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): + """Onkyo config flow.""" + + _receiver_info: ReceiverInfo + _discovered_infos: dict[str, ReceiverInfo] + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + return self.async_show_menu( + step_id="user", menu_options=["manual", "eiscp_discovery"] + ) + + async def async_step_manual( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle manual device entry.""" + errors = {} + + if user_input is not None: + host = user_input[CONF_HOST] + _LOGGER.debug("Config flow start manual: %s", host) + try: + info = await async_interview(host) + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if info is None: + errors["base"] = "cannot_connect" + else: + self._receiver_info = info + + await self.async_set_unique_id( + info.identifier, raise_on_progress=False + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() + else: + self._abort_if_unique_id_configured() + + return await self.async_step_configure_receiver() + + suggested_values = user_input + if suggested_values is None and self.source == SOURCE_RECONFIGURE: + suggested_values = { + CONF_HOST: self._get_reconfigure_entry().data[CONF_HOST] + } + + return self.async_show_form( + step_id="manual", + data_schema=self.add_suggested_values_to_schema( + STEP_MANUAL_SCHEMA, suggested_values + ), + errors=errors, + ) + + async def async_step_eiscp_discovery( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Start eiscp discovery and handle user device selection.""" + if user_input is not None: + self._receiver_info = self._discovered_infos[user_input[CONF_DEVICE]] + await self.async_set_unique_id( + self._receiver_info.identifier, raise_on_progress=False + ) + self._abort_if_unique_id_configured( + updates={CONF_HOST: self._receiver_info.host} + ) + return await self.async_step_configure_receiver() + + _LOGGER.debug("Config flow start eiscp discovery") + + try: + infos = await async_discover() + except Exception: + _LOGGER.exception("Unexpected exception") + return self.async_abort(reason="unknown") + + _LOGGER.debug("Discovered devices: %s", infos) + + self._discovered_infos = {} + discovered_names = {} + current_unique_ids = self._async_current_ids() + for info in infos: + if info.identifier in current_unique_ids: + continue + self._discovered_infos[info.identifier] = info + device_name = f"{info.model_name} ({info.host})" + discovered_names[info.identifier] = device_name + + _LOGGER.debug("Discovered new devices: %s", self._discovered_infos) + + if not discovered_names: + return self.async_abort(reason="no_devices_found") + + return self.async_show_form( + step_id="eiscp_discovery", + data_schema=vol.Schema( + {vol.Required(CONF_DEVICE): vol.In(discovered_names)} + ), + ) + + async def async_step_configure_receiver( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the configuration of a single receiver.""" + errors = {} + + entry = None + entry_options = None + if self.source == SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + entry_options = entry.options + + if user_input is not None: + source_meanings: list[str] = user_input[OPTION_INPUT_SOURCES] + if not source_meanings: + errors[OPTION_INPUT_SOURCES] = "empty_input_source_list" + else: + sources_store: dict[str, str] = {} + for source_meaning in source_meanings: + source = InputSource.from_meaning(source_meaning) + + source_name = source_meaning + if entry_options is not None: + source_name = entry_options[OPTION_INPUT_SOURCES].get( + source.value, source_name + ) + sources_store[source.value] = source_name + + volume_resolution = user_input[OPTION_VOLUME_RESOLUTION] + + if entry_options is None: + result = self.async_create_entry( + title=self._receiver_info.model_name, + data={ + CONF_HOST: self._receiver_info.host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: OPTION_MAX_VOLUME_DEFAULT, + OPTION_INPUT_SOURCES: sources_store, + }, + ) + else: + assert entry is not None + result = self.async_update_reload_and_abort( + entry, + data={ + CONF_HOST: self._receiver_info.host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: entry_options[OPTION_MAX_VOLUME], + OPTION_INPUT_SOURCES: sources_store, + }, + ) + + _LOGGER.debug("Configured receiver, result: %s", result) + return result + + _LOGGER.debug("Configuring receiver, info: %s", self._receiver_info) + + suggested_values = user_input + if suggested_values is None: + if entry_options is None: + suggested_values = { + OPTION_VOLUME_RESOLUTION: OPTION_VOLUME_RESOLUTION_DEFAULT, + OPTION_INPUT_SOURCES: [], + } + else: + suggested_values = { + OPTION_VOLUME_RESOLUTION: entry_options[OPTION_VOLUME_RESOLUTION], + OPTION_INPUT_SOURCES: [ + InputSource(input_source).value_meaning + for input_source in entry_options[OPTION_INPUT_SOURCES] + ], + } + + return self.async_show_form( + step_id="configure_receiver", + data_schema=self.add_suggested_values_to_schema( + STEP_CONFIGURE_SCHEMA, suggested_values + ), + errors=errors, + description_placeholders={ + "name": f"{self._receiver_info.model_name} ({self._receiver_info.host})" + }, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the receiver.""" + return await self.async_step_manual() + + async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + """Import the yaml config.""" + _LOGGER.debug("Import flow user input: %s", user_input) + + host: str = user_input[CONF_HOST] + name: str | None = user_input.get(CONF_NAME) + user_max_volume: int = user_input[OPTION_MAX_VOLUME] + user_volume_resolution: int = user_input[CONF_RECEIVER_MAX_VOLUME] + user_sources: dict[InputSource, str] = user_input[CONF_SOURCES] + + info: ReceiverInfo | None = user_input.get("info") + if info is None: + try: + info = await async_interview(host) + except Exception: + _LOGGER.exception("Import flow interview error for host %s", host) + return self.async_abort(reason="cannot_connect") + + if info is None: + _LOGGER.error("Import flow interview error for host %s", host) + return self.async_abort(reason="cannot_connect") + + unique_id = info.identifier + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + name = name or info.model_name + + volume_resolution = VOLUME_RESOLUTION_ALLOWED[-1] + for volume_resolution_allowed in VOLUME_RESOLUTION_ALLOWED: + if user_volume_resolution <= volume_resolution_allowed: + volume_resolution = volume_resolution_allowed + break + + max_volume = min( + 100, user_max_volume * user_volume_resolution / volume_resolution + ) + + sources_store: dict[str, str] = {} + for source, source_name in user_sources.items(): + sources_store[source.value] = source_name + + return self.async_create_entry( + title=name, + data={ + CONF_HOST: host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: max_volume, + OPTION_INPUT_SOURCES: sources_store, + }, + ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Return the options flow.""" + return OnkyoOptionsFlowHandler(config_entry) + + +class OnkyoOptionsFlowHandler(OptionsFlow): + """Handle an options flow for Onkyo.""" + + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + sources_store: dict[str, str] = config_entry.options[OPTION_INPUT_SOURCES] + self._input_sources = {InputSource(k): v for k, v in sources_store.items()} + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + sources_store: dict[str, str] = {} + for source_meaning, source_name in user_input.items(): + if source_meaning in INPUT_SOURCES_ALL_MEANINGS: + source = InputSource.from_meaning(source_meaning) + sources_store[source.value] = source_name + + return self.async_create_entry( + data={ + OPTION_VOLUME_RESOLUTION: self.config_entry.options[ + OPTION_VOLUME_RESOLUTION + ], + OPTION_MAX_VOLUME: user_input[OPTION_MAX_VOLUME], + OPTION_INPUT_SOURCES: sources_store, + } + ) + + schema_dict: dict[Any, Selector] = {} + + max_volume: float = self.config_entry.options[OPTION_MAX_VOLUME] + schema_dict[vol.Required(OPTION_MAX_VOLUME, default=max_volume)] = ( + NumberSelector( + NumberSelectorConfig(min=1, max=100, mode=NumberSelectorMode.BOX) + ) + ) + + for source, source_name in self._input_sources.items(): + schema_dict[vol.Required(source.value_meaning, default=source_name)] = ( + TextSelector() + ) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema(schema_dict), + ) diff --git a/homeassistant/components/onkyo/const.py b/homeassistant/components/onkyo/const.py new file mode 100644 index 00000000000..bd4fe98ae7d --- /dev/null +++ b/homeassistant/components/onkyo/const.py @@ -0,0 +1,141 @@ +"""Constants for the Onkyo integration.""" + +from enum import Enum +import typing +from typing import ClassVar, Literal, Self + +import pyeiscp + +DOMAIN = "onkyo" + +DEVICE_INTERVIEW_TIMEOUT = 5 +DEVICE_DISCOVERY_TIMEOUT = 5 + +CONF_SOURCES = "sources" +CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" + +type VolumeResolution = Literal[50, 80, 100, 200] +OPTION_VOLUME_RESOLUTION = "volume_resolution" +OPTION_VOLUME_RESOLUTION_DEFAULT: VolumeResolution = 50 +VOLUME_RESOLUTION_ALLOWED: tuple[VolumeResolution, ...] = typing.get_args( + VolumeResolution.__value__ +) + +OPTION_MAX_VOLUME = "max_volume" +OPTION_MAX_VOLUME_DEFAULT = 100.0 + +OPTION_INPUT_SOURCES = "input_sources" + +_INPUT_SOURCE_MEANINGS = { + "00": "VIDEO1 ··· VCR/DVR ··· STB/DVR", + "01": "VIDEO2 ··· CBL/SAT", + "02": "VIDEO3 ··· GAME/TV ··· GAME", + "03": "VIDEO4 ··· AUX", + "04": "VIDEO5 ··· AUX2 ··· GAME2", + "05": "VIDEO6 ··· PC", + "06": "VIDEO7", + "07": "HIDDEN1 ··· EXTRA1", + "08": "HIDDEN2 ··· EXTRA2", + "09": "HIDDEN3 ··· EXTRA3", + "10": "DVD ··· BD/DVD", + "11": "STRM BOX", + "12": "TV", + "20": "TAPE ··· TV/TAPE", + "21": "TAPE2", + "22": "PHONO", + "23": "CD ··· TV/CD", + "24": "FM", + "25": "AM", + "26": "TUNER", + "27": "MUSIC SERVER ··· P4S ··· DLNA", + "28": "INTERNET RADIO ··· IRADIO FAVORITE", + "29": "USB ··· USB(FRONT)", + "2A": "USB(REAR)", + "2B": "NETWORK ··· NET", + "2D": "AIRPLAY", + "2E": "BLUETOOTH", + "2F": "USB DAC IN", + "30": "MULTI CH", + "31": "XM", + "32": "SIRIUS", + "33": "DAB", + "40": "UNIVERSAL PORT", + "41": "LINE", + "42": "LINE2", + "44": "OPTICAL", + "45": "COAXIAL", + "55": "HDMI 5", + "56": "HDMI 6", + "57": "HDMI 7", + "80": "MAIN SOURCE", +} + + +class InputSource(Enum): + """Receiver input source.""" + + DVR = "00" + CBL = "01" + GAME = "02" + AUX = "03" + GAME2 = "04" + PC = "05" + VIDEO7 = "06" + EXTRA1 = "07" + EXTRA2 = "08" + EXTRA3 = "09" + DVD = "10" + STRM_BOX = "11" + TV = "12" + TAPE = "20" + TAPE2 = "21" + PHONO = "22" + CD = "23" + FM = "24" + AM = "25" + TUNER = "26" + MUSIC_SERVER = "27" + INTERNET_RADIO = "28" + USB = "29" + USB_REAR = "2A" + NETWORK = "2B" + AIRPLAY = "2D" + BLUETOOTH = "2E" + USB_DAC_IN = "2F" + MULTI_CH = "30" + XM = "31" + SIRIUS = "32" + DAB = "33" + UNIVERSAL_PORT = "40" + LINE = "41" + LINE2 = "42" + OPTICAL = "44" + COAXIAL = "45" + HDMI_5 = "55" + HDMI_6 = "56" + HDMI_7 = "57" + MAIN_SOURCE = "80" + + __meaning_mapping: ClassVar[dict[str, Self]] = {} # type: ignore[misc] + + value_meaning: str + + def __new__(cls, value: str) -> Self: + """Create InputSource enum.""" + obj = object.__new__(cls) + obj._value_ = value + obj.value_meaning = _INPUT_SOURCE_MEANINGS[value] + + cls.__meaning_mapping[obj.value_meaning] = obj + + return obj + + @classmethod + def from_meaning(cls, meaning: str) -> Self: + """Get InputSource enum from its meaning.""" + return cls.__meaning_mapping[meaning] + + +ZONES = {"main": "Main", "zone2": "Zone 2", "zone3": "Zone 3", "zone4": "Zone 4"} + +PYEISCP_COMMANDS = pyeiscp.commands.COMMANDS diff --git a/homeassistant/components/onkyo/manifest.json b/homeassistant/components/onkyo/manifest.json index 072dc9f9e3b..0e75404b3eb 100644 --- a/homeassistant/components/onkyo/manifest.json +++ b/homeassistant/components/onkyo/manifest.json @@ -2,7 +2,9 @@ "domain": "onkyo", "name": "Onkyo", "codeowners": ["@arturpragacz"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/onkyo", + "integration_type": "device", "iot_class": "local_push", "loggers": ["pyeiscp"], "requirements": ["pyeiscp==0.0.7"] diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index acc0459e258..24d63c0d9e4 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -3,42 +3,76 @@ from __future__ import annotations import asyncio -from dataclasses import dataclass import logging -from typing import Any +from typing import Any, Literal -import pyeiscp import voluptuous as vol from homeassistant.components.media_player import ( - DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_HOST, - CONF_NAME, - EVENT_HOMEASSISTANT_STOP, -) -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.helpers import config_validation as cv +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import OnkyoConfigEntry +from .const import ( + CONF_RECEIVER_MAX_VOLUME, + CONF_SOURCES, + DOMAIN, + OPTION_MAX_VOLUME, + OPTION_VOLUME_RESOLUTION, + PYEISCP_COMMANDS, + ZONES, + InputSource, + VolumeResolution, +) +from .receiver import Receiver, async_discover +from .services import DATA_MP_ENTITIES + _LOGGER = logging.getLogger(__name__) -CONF_SOURCES = "sources" -CONF_MAX_VOLUME = "max_volume" -CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" +CONF_MAX_VOLUME_DEFAULT = 100 +CONF_RECEIVER_MAX_VOLUME_DEFAULT = 80 +CONF_SOURCES_DEFAULT = { + "tv": "TV", + "bd": "Bluray", + "game": "Game", + "aux1": "Aux1", + "video1": "Video 1", + "video2": "Video 2", + "video3": "Video 3", + "video4": "Video 4", + "video5": "Video 5", + "video6": "Video 6", + "video7": "Video 7", + "fm": "Radio", +} -DEFAULT_NAME = "Onkyo Receiver" -SUPPORTED_MAX_VOLUME = 100 -DEFAULT_RECEIVER_MAX_VOLUME = 80 -ZONES = {"zone2": "Zone 2", "zone3": "Zone 3", "zone4": "Zone 4"} +PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( + { + vol.Optional(CONF_HOST): cv.string, + vol.Optional(CONF_NAME): cv.string, + vol.Optional(OPTION_MAX_VOLUME, default=CONF_MAX_VOLUME_DEFAULT): vol.All( + vol.Coerce(int), vol.Range(min=1, max=100) + ), + vol.Optional( + CONF_RECEIVER_MAX_VOLUME, default=CONF_RECEIVER_MAX_VOLUME_DEFAULT + ): cv.positive_int, + vol.Optional(CONF_SOURCES, default=CONF_SOURCES_DEFAULT): { + cv.string: cv.string + }, + } +) SUPPORT_ONKYO_WO_VOLUME = ( MediaPlayerEntityFeature.TURN_ON @@ -53,39 +87,12 @@ SUPPORT_ONKYO = ( | MediaPlayerEntityFeature.VOLUME_STEP ) -KNOWN_HOSTS: list[str] = [] - -DEFAULT_SOURCES = { - "tv": "TV", - "bd": "Bluray", - "game": "Game", - "aux1": "Aux1", - "video1": "Video 1", - "video2": "Video 2", - "video3": "Video 3", - "video4": "Video 4", - "video5": "Video 5", - "video6": "Video 6", - "video7": "Video 7", - "fm": "Radio", -} -DEFAULT_PLAYABLE_SOURCES = ("fm", "am", "tuner") - -PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_HOST): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_MAX_VOLUME, default=SUPPORTED_MAX_VOLUME): vol.All( - vol.Coerce(int), vol.Range(min=1, max=100) - ), - vol.Optional( - CONF_RECEIVER_MAX_VOLUME, default=DEFAULT_RECEIVER_MAX_VOLUME - ): cv.positive_int, - vol.Optional(CONF_SOURCES, default=DEFAULT_SOURCES): {cv.string: cv.string}, - } +DEFAULT_PLAYABLE_SOURCES = ( + InputSource.from_meaning("FM"), + InputSource.from_meaning("AM"), + InputSource.from_meaning("TUNER"), ) -ATTR_HDMI_OUTPUT = "hdmi_output" ATTR_PRESET = "preset" ATTR_AUDIO_INFORMATION = "audio_information" ATTR_VIDEO_INFORMATION = "video_information" @@ -104,6 +111,7 @@ AUDIO_INFORMATION_MAPPING = [ "precision_quartz_lock_system", "auto_phase_control_delay", "auto_phase_control_phase", + "upmix_mode", ] VIDEO_INFORMATION_MAPPING = [ @@ -116,36 +124,33 @@ VIDEO_INFORMATION_MAPPING = [ "output_color_schema", "output_color_depth", "picture_mode", + "input_hdr", ] +ISSUE_URL_PLACEHOLDER = "/config/integrations/dashboard/add?domain=onkyo" -ACCEPTED_VALUES = [ - "no", - "analog", - "yes", - "out", - "out-sub", - "sub", - "hdbaset", - "both", - "up", -] -ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( - { - vol.Required(ATTR_ENTITY_ID): cv.entity_ids, - vol.Required(ATTR_HDMI_OUTPUT): vol.In(ACCEPTED_VALUES), - } -) -SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" +type InputLibValue = str | tuple[str, ...] -@dataclass -class ReceiverInfo: - """Onkyo Receiver information.""" +def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: + match zone: + case "main": + cmds = PYEISCP_COMMANDS["main"]["SLI"] + case "zone2": + cmds = PYEISCP_COMMANDS["zone2"]["SLZ"] + case "zone3": + cmds = PYEISCP_COMMANDS["zone3"]["SL3"] + case "zone4": + cmds = PYEISCP_COMMANDS["zone4"]["SL4"] - host: str - port: int - model_name: str - identifier: str + result: dict[InputSource, InputLibValue] = {} + for k, v in cmds["values"].items(): + try: + source = InputSource(k) + except ValueError: + continue + result[source] = v["name"] + + return result async def async_setup_platform( @@ -154,147 +159,167 @@ async def async_setup_platform( async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: - """Set up the Onkyo platform.""" - receivers: dict[str, pyeiscp.Connection] = {} # indexed by host - entities: dict[str, dict[str, OnkyoMediaPlayer]] = {} # indexed by host and zone - - async def async_service_handle(service: ServiceCall) -> None: - """Handle for services.""" - entity_ids = service.data[ATTR_ENTITY_ID] - targets = [ - entity - for h in entities.values() - for entity in h.values() - if entity.entity_id in entity_ids - ] - - for target in targets: - if service.service == SERVICE_SELECT_HDMI_OUTPUT: - await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) - - hass.services.async_register( - DOMAIN, - SERVICE_SELECT_HDMI_OUTPUT, - async_service_handle, - schema=ONKYO_SELECT_OUTPUT_SCHEMA, - ) - + """Import config from yaml.""" host = config.get(CONF_HOST) - name = config.get(CONF_NAME) - max_volume = config[CONF_MAX_VOLUME] - receiver_max_volume = config[CONF_RECEIVER_MAX_VOLUME] - sources = config[CONF_SOURCES] - async def async_setup_receiver( - info: ReceiverInfo, discovered: bool, name: str | None - ) -> None: - @callback - def async_onkyo_update_callback( - message: tuple[str, str, Any], origin: str - ) -> None: - """Process new message from receiver.""" - receiver = receivers[origin] - _LOGGER.debug( - "Received update callback from %s: %s", receiver.name, message - ) + source_mapping: dict[str, InputSource] = {} + for zone in ZONES: + for source, source_lib in _input_lib_cmds(zone).items(): + if isinstance(source_lib, str): + source_mapping.setdefault(source_lib, source) + else: + for source_lib_single in source_lib: + source_mapping.setdefault(source_lib_single, source) - zone, _, value = message - entity = entities[origin].get(zone) - if entity is not None: - if entity.enabled: - entity.process_update(message) - elif zone in ZONES and value != "N/A": - # When we receive the status for a zone, and the value is not "N/A", - # then zone is available on the receiver, so we create the entity for it. - _LOGGER.debug("Discovered %s on %s", ZONES[zone], receiver.name) - zone_entity = OnkyoMediaPlayer( - receiver, sources, zone, max_volume, receiver_max_volume - ) - entities[origin][zone] = zone_entity - async_add_entities([zone_entity]) + sources: dict[InputSource, str] = {} + for source_lib_single, source_name in config[CONF_SOURCES].items(): + user_source = source_mapping.get(source_lib_single.lower()) + if user_source is not None: + sources[user_source] = source_name - @callback - def async_onkyo_connect_callback(origin: str) -> None: - """Receiver (re)connected.""" - receiver = receivers[origin] - _LOGGER.debug( - "Receiver (re)connected: %s (%s)", receiver.name, receiver.host - ) - - for entity in entities[origin].values(): - entity.backfill_state() - - _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) - receiver = await pyeiscp.Connection.create( - host=info.host, - port=info.port, - update_callback=async_onkyo_update_callback, - connect_callback=async_onkyo_connect_callback, - ) - - receiver.model_name = info.model_name - receiver.identifier = info.identifier - receiver.name = name or info.model_name - receiver.discovered = discovered - - # Store the receiver object and create a dictionary to store its entities. - receivers[receiver.host] = receiver - entities[receiver.host] = {} - - # Discover what zones are available for the receiver by querying the power. - # If we get a response for the specific zone, it means it is available. - for zone in ZONES: - receiver.query_property(zone, "power") - - # Add the main zone to entities, since it is always active. - _LOGGER.debug("Adding Main Zone on %s", receiver.name) - main_entity = OnkyoMediaPlayer( - receiver, sources, "main", max_volume, receiver_max_volume - ) - entities[receiver.host]["main"] = main_entity - async_add_entities([main_entity]) + config[CONF_SOURCES] = sources + results = [] if host is not None: - if host in KNOWN_HOSTS: - return - - _LOGGER.debug("Manually creating receiver: %s (%s)", name, host) - - @callback - async def async_onkyo_interview_callback(conn: pyeiscp.Connection): - """Receiver interviewed, connection not yet active.""" - info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) - _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) - if info.host not in KNOWN_HOSTS: - KNOWN_HOSTS.append(info.host) - await async_setup_receiver(info, False, name) - - await pyeiscp.Connection.discover( - host=host, - discovery_callback=async_onkyo_interview_callback, + _LOGGER.debug("Importing yaml single: %s", host) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) + results.append((host, result)) else: - _LOGGER.debug("Discovering receivers") + for info in await async_discover(): + host = info.host - @callback - async def async_onkyo_discovery_callback(conn: pyeiscp.Connection): - """Receiver discovered, connection not yet active.""" - info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) - _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) - if info.host not in KNOWN_HOSTS: - KNOWN_HOSTS.append(info.host) - await async_setup_receiver(info, True, None) + # Migrate legacy entities. + registry = er.async_get(hass) + old_unique_id = f"{info.model_name}_{info.identifier}" + new_unique_id = f"{info.identifier}_main" + entity_id = registry.async_get_entity_id( + "media_player", DOMAIN, old_unique_id + ) + if entity_id is not None: + _LOGGER.debug( + "Migrating unique_id from [%s] to [%s] for entity %s", + old_unique_id, + new_unique_id, + entity_id, + ) + registry.async_update_entity(entity_id, new_unique_id=new_unique_id) - await pyeiscp.Connection.discover( - discovery_callback=async_onkyo_discovery_callback, + _LOGGER.debug("Importing yaml discover: %s", info.host) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config | {CONF_HOST: info.host} | {"info": info}, + ) + results.append((host, result)) + + _LOGGER.debug("Importing yaml results: %s", results) + if not results: + async_create_issue( + hass, + DOMAIN, + "deprecated_yaml_import_issue_no_discover", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml_import_issue_no_discover", + translation_placeholders={"url": ISSUE_URL_PLACEHOLDER}, ) - @callback - def close_receiver(_event): - for receiver in receivers.values(): - receiver.close() + all_successful = True + for host, result in results: + if ( + result.get("type") == FlowResultType.CREATE_ENTRY + or result.get("reason") == "already_configured" + ): + continue + if error := result.get("reason"): + all_successful = False + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{host}_{error}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{error}", + translation_placeholders={ + "host": host, + "url": ISSUE_URL_PLACEHOLDER, + }, + ) - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_receiver) + if all_successful: + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + breaks_in_ha_version="2025.5.0", + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "onkyo", + }, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: OnkyoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up MediaPlayer for config entry.""" + data = entry.runtime_data + + receiver = data.receiver + all_entities = hass.data[DATA_MP_ENTITIES] + + entities: dict[str, OnkyoMediaPlayer] = {} + all_entities[entry.entry_id] = entities + + volume_resolution: VolumeResolution = entry.options[OPTION_VOLUME_RESOLUTION] + max_volume: float = entry.options[OPTION_MAX_VOLUME] + sources = data.sources + + def connect_callback(receiver: Receiver) -> None: + if not receiver.first_connect: + for entity in entities.values(): + if entity.enabled: + entity.backfill_state() + + def update_callback(receiver: Receiver, message: tuple[str, str, Any]) -> None: + zone, _, value = message + entity = entities.get(zone) + if entity is not None: + if entity.enabled: + entity.process_update(message) + elif zone in ZONES and value != "N/A": + # When we receive the status for a zone, and the value is not "N/A", + # then zone is available on the receiver, so we create the entity for it. + _LOGGER.debug( + "Discovered %s on %s (%s)", + ZONES[zone], + receiver.model_name, + receiver.host, + ) + zone_entity = OnkyoMediaPlayer( + receiver, + zone, + volume_resolution=volume_resolution, + max_volume=max_volume, + sources=sources, + ) + entities[zone] = zone_entity + async_add_entities([zone_entity]) + + receiver.callbacks.connect.append(connect_callback) + receiver.callbacks.update.append(update_callback) class OnkyoMediaPlayer(MediaPlayerEntity): @@ -309,28 +334,31 @@ class OnkyoMediaPlayer(MediaPlayerEntity): def __init__( self, - receiver: pyeiscp.Connection, - sources: dict[str, str], + receiver: Receiver, zone: str, - max_volume: int, - volume_resolution: int, + *, + volume_resolution: VolumeResolution, + max_volume: float, + sources: dict[InputSource, str], ) -> None: """Initialize the Onkyo Receiver.""" self._receiver = receiver - name = receiver.name + name = receiver.model_name identifier = receiver.identifier self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" - if receiver.discovered and zone == "main": - # keep legacy unique_id - self._attr_unique_id = f"{name}_{identifier}" - else: - self._attr_unique_id = f"{identifier}_{zone}" + self._attr_unique_id = f"{identifier}_{zone}" self._zone = zone - self._source_mapping = sources - self._reverse_mapping = {value: key for key, value in sources.items()} - self._max_volume = max_volume + self._volume_resolution = volume_resolution + self._max_volume = max_volume + + self._name_mapping = sources + self._reverse_name_mapping = {value: key for key, value in sources.items()} + self._lib_mapping = _input_lib_cmds(zone) + self._reverse_lib_mapping = { + value: key for key, value in self._lib_mapping.items() + } self._attr_source_list = list(sources.values()) self._attr_extra_state_attributes = {} @@ -355,12 +383,12 @@ class OnkyoMediaPlayer(MediaPlayerEntity): @callback def _update_receiver(self, propname: str, value: Any) -> None: """Update a property in the receiver.""" - self._receiver.update_property(self._zone, propname, value) + self._receiver.conn.update_property(self._zone, propname, value) @callback def _query_receiver(self, propname: str) -> None: """Cause the receiver to send an update about a property.""" - self._receiver.query_property(self._zone, propname) + self._receiver.conn.query_property(self._zone, propname) async def async_turn_on(self) -> None: """Turn the media player on.""" @@ -402,9 +430,13 @@ class OnkyoMediaPlayer(MediaPlayerEntity): async def async_select_source(self, source: str) -> None: """Select input source.""" if self.source_list and source in self.source_list: - source = self._reverse_mapping[source] + source_lib = self._lib_mapping[self._reverse_name_mapping[source]] + if isinstance(source_lib, str): + source_lib_single = source_lib + else: + source_lib_single = source_lib[0] self._update_receiver( - "input-selector" if self._zone == "main" else "selector", source + "input-selector" if self._zone == "main" else "selector", source_lib_single ) async def async_select_output(self, hdmi_output: str) -> None: @@ -416,7 +448,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity): ) -> None: """Play radio station by preset number.""" if self.source is not None: - source = self._reverse_mapping[self.source] + source = self._reverse_name_mapping[self.source] if media_type.lower() == "radio" and source in DEFAULT_PLAYABLE_SOURCES: self._update_receiver("preset", media_id) @@ -460,9 +492,10 @@ class OnkyoMediaPlayer(MediaPlayerEntity): elif command in ["volume", "master-volume"] and value != "N/A": self._supports_volume = True # AMP_VOL / (VOL_RESOLUTION * (MAX_VOL / 100)) - self._attr_volume_level = value / ( + volume_level: float = value / ( self._volume_resolution * self._max_volume / 100 ) + self._attr_volume_level = min(1, volume_level) elif command in ["muting", "audio-muting"]: self._attr_is_volume_muted = bool(value == "on") elif command in ["selector", "input-selector"]: @@ -487,19 +520,22 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self.async_write_ha_state() @callback - def _parse_source(self, source): - # source is either a tuple of values or a single value, - # so we convert to a tuple, when it is a single value. - if not isinstance(source, tuple): - source = (source,) - for value in source: - if value in self._source_mapping: - self._attr_source = self._source_mapping[value] - break - self._attr_source = "_".join(source) + def _parse_source(self, source_lib: InputLibValue) -> None: + source = self._reverse_lib_mapping[source_lib] + if source in self._name_mapping: + self._attr_source = self._name_mapping[source] + return + + source_meaning = source.value_meaning + _LOGGER.error( + 'Input source "%s" not in source list: %s', source_meaning, self.entity_id + ) + self._attr_source = source_meaning @callback - def _parse_audio_information(self, audio_information): + def _parse_audio_information( + self, audio_information: tuple[str] | Literal["N/A"] + ) -> None: # If audio information is not available, N/A is returned, # so only update the audio information, when it is not N/A. if audio_information == "N/A": @@ -515,7 +551,9 @@ class OnkyoMediaPlayer(MediaPlayerEntity): } @callback - def _parse_video_information(self, video_information): + def _parse_video_information( + self, video_information: tuple[str] | Literal["N/A"] + ) -> None: # If video information is not available, N/A is returned, # so only update the video information, when it is not N/A. if video_information == "N/A": @@ -530,11 +568,11 @@ class OnkyoMediaPlayer(MediaPlayerEntity): if len(value) > 0 } - def _query_av_info_delayed(self): + def _query_av_info_delayed(self) -> None: if self._zone == "main" and not self._query_timer: @callback - def _query_av_info(): + def _query_av_info() -> None: if self._supports_audio_info: self._query_receiver("audio-information") if self._supports_video_info: diff --git a/homeassistant/components/onkyo/quality_scale.yaml b/homeassistant/components/onkyo/quality_scale.yaml new file mode 100644 index 00000000000..cdcf88e72d7 --- /dev/null +++ b/homeassistant/components/onkyo/quality_scale.yaml @@ -0,0 +1,83 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: + status: todo + comment: | + Coverage is 100%, but the tests need to be improved. + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: done + comment: | + Currently we store created entities in hass.data. That should be removed in the future. + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: todo + diagnostics: todo + discovery: todo + discovery-update-info: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single device. + entity-category: done + entity-device-class: todo + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration is not making any HTTP requests. + strict-typing: + status: todo + comment: | + The library is not fully typed yet. diff --git a/homeassistant/components/onkyo/receiver.py b/homeassistant/components/onkyo/receiver.py new file mode 100644 index 00000000000..cc6cbbc95fb --- /dev/null +++ b/homeassistant/components/onkyo/receiver.py @@ -0,0 +1,151 @@ +"""Onkyo receiver.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable, Iterable +import contextlib +from dataclasses import dataclass, field +import logging +from typing import Any + +import pyeiscp + +from .const import DEVICE_DISCOVERY_TIMEOUT, DEVICE_INTERVIEW_TIMEOUT, ZONES + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class Callbacks: + """Onkyo Receiver Callbacks.""" + + connect: list[Callable[[Receiver], None]] = field(default_factory=list) + update: list[Callable[[Receiver, tuple[str, str, Any]], None]] = field( + default_factory=list + ) + + +@dataclass +class Receiver: + """Onkyo receiver.""" + + conn: pyeiscp.Connection + model_name: str + identifier: str + host: str + first_connect: bool = True + callbacks: Callbacks = field(default_factory=Callbacks) + + @classmethod + async def async_create(cls, info: ReceiverInfo) -> Receiver: + """Set up Onkyo Receiver.""" + + receiver: Receiver | None = None + + def on_connect(_origin: str) -> None: + assert receiver is not None + receiver.on_connect() + + def on_update(message: tuple[str, str, Any], _origin: str) -> None: + assert receiver is not None + receiver.on_update(message) + + _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) + + connection = await pyeiscp.Connection.create( + host=info.host, + port=info.port, + connect_callback=on_connect, + update_callback=on_update, + auto_connect=False, + ) + + return ( + receiver := cls( + conn=connection, + model_name=info.model_name, + identifier=info.identifier, + host=info.host, + ) + ) + + def on_connect(self) -> None: + """Receiver (re)connected.""" + _LOGGER.debug("Receiver (re)connected: %s (%s)", self.model_name, self.host) + + # Discover what zones are available for the receiver by querying the power. + # If we get a response for the specific zone, it means it is available. + for zone in ZONES: + self.conn.query_property(zone, "power") + + for callback in self.callbacks.connect: + callback(self) + + self.first_connect = False + + def on_update(self, message: tuple[str, str, Any]) -> None: + """Process new message from the receiver.""" + _LOGGER.debug("Received update callback from %s: %s", self.model_name, message) + for callback in self.callbacks.update: + callback(self, message) + + +@dataclass +class ReceiverInfo: + """Onkyo receiver information.""" + + host: str + port: int + model_name: str + identifier: str + + +async def async_interview(host: str) -> ReceiverInfo | None: + """Interview Onkyo Receiver.""" + _LOGGER.debug("Interviewing receiver: %s", host) + + receiver_info: ReceiverInfo | None = None + + event = asyncio.Event() + + async def _callback(conn: pyeiscp.Connection) -> None: + """Receiver interviewed, connection not yet active.""" + nonlocal receiver_info + if receiver_info is None: + info = ReceiverInfo(host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) + receiver_info = info + event.set() + + timeout = DEVICE_INTERVIEW_TIMEOUT + + await pyeiscp.Connection.discover( + host=host, discovery_callback=_callback, timeout=timeout + ) + + with contextlib.suppress(asyncio.TimeoutError): + await asyncio.wait_for(event.wait(), timeout) + + return receiver_info + + +async def async_discover() -> Iterable[ReceiverInfo]: + """Discover Onkyo Receivers.""" + _LOGGER.debug("Discovering receivers") + + receiver_infos: list[ReceiverInfo] = [] + + async def _callback(conn: pyeiscp.Connection) -> None: + """Receiver discovered, connection not yet active.""" + info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) + receiver_infos.append(info) + + timeout = DEVICE_DISCOVERY_TIMEOUT + + await pyeiscp.Connection.discover(discovery_callback=_callback, timeout=timeout) + + await asyncio.sleep(timeout) + + return receiver_infos diff --git a/homeassistant/components/onkyo/services.py b/homeassistant/components/onkyo/services.py new file mode 100644 index 00000000000..d875d8287fe --- /dev/null +++ b/homeassistant/components/onkyo/services.py @@ -0,0 +1,69 @@ +"""Onkyo services.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import voluptuous as vol + +from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN + +if TYPE_CHECKING: + from .media_player import OnkyoMediaPlayer + +DATA_MP_ENTITIES: HassKey[dict[str, dict[str, OnkyoMediaPlayer]]] = HassKey(DOMAIN) + +ATTR_HDMI_OUTPUT = "hdmi_output" +ACCEPTED_VALUES = [ + "no", + "analog", + "yes", + "out", + "out-sub", + "sub", + "hdbaset", + "both", + "up", +] +ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_ENTITY_ID): cv.entity_ids, + vol.Required(ATTR_HDMI_OUTPUT): vol.In(ACCEPTED_VALUES), + } +) +SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" + + +async def async_register_services(hass: HomeAssistant) -> None: + """Register Onkyo services.""" + + hass.data.setdefault(DATA_MP_ENTITIES, {}) + + async def async_service_handle(service: ServiceCall) -> None: + """Handle for services.""" + entity_ids = service.data[ATTR_ENTITY_ID] + + targets: list[OnkyoMediaPlayer] = [] + for receiver_entities in hass.data[DATA_MP_ENTITIES].values(): + targets.extend( + entity + for entity in receiver_entities.values() + if entity.entity_id in entity_ids + ) + + for target in targets: + if service.service == SERVICE_SELECT_HDMI_OUTPUT: + await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) + + hass.services.async_register( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_HDMI_OUTPUT, + async_service_handle, + schema=ONKYO_SELECT_OUTPUT_SCHEMA, + ) diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json new file mode 100644 index 00000000000..95ca1199a36 --- /dev/null +++ b/homeassistant/components/onkyo/strings.json @@ -0,0 +1,73 @@ +{ + "config": { + "step": { + "user": { + "menu_options": { + "manual": "Manual entry", + "eiscp_discovery": "Onkyo discovery" + } + }, + "manual": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of the receiver." + } + }, + "eiscp_discovery": { + "data": { + "device": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "device": "Select the receiver to configure." + } + }, + "configure_receiver": { + "description": "Configure {name}", + "data": { + "volume_resolution": "Volume resolution", + "input_sources": "Input sources" + }, + "data_description": { + "volume_resolution": "Number of steps it takes for the receiver to go from the lowest to the highest possible volume.", + "input_sources": "List of input sources supported by the receiver." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "empty_input_source_list": "Input source list cannot be empty", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The serial number of the device does not match the previous serial number", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "options": { + "step": { + "init": { + "data": { + "max_volume": "Maximum volume limit (%)" + }, + "data_description": { + "max_volume": "Maximum volume limit as a percentage. This will associate Home Assistant's maximum volume to this value on the receiver, i.e., if you set this to 50%, then setting the volume to 100% in Home Assistant will cause the volume on the receiver to be set to 50% of its maximum value." + } + } + } + }, + "issues": { + "deprecated_yaml_import_issue_no_discover": { + "title": "The Onkyo YAML configuration import failed", + "description": "Configuring Onkyo using YAML is being removed but no receivers were discovered when importing your YAML configuration.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "deprecated_yaml_import_issue_cannot_connect": { + "title": "The Onkyo YAML configuration import failed", + "description": "Configuring Onkyo using YAML is being removed but there was a connection error when importing your YAML configuration for host {host}.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + } + } +} diff --git a/homeassistant/components/onvif/binary_sensor.py b/homeassistant/components/onvif/binary_sensor.py index 4aa4d81e055..92c5ab45129 100644 --- a/homeassistant/components/onvif/binary_sensor.py +++ b/homeassistant/components/onvif/binary_sensor.py @@ -14,9 +14,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.util.enum import try_parse_enum -from .base import ONVIFBaseEntity from .const import DOMAIN from .device import ONVIFDevice +from .entity import ONVIFBaseEntity async def async_setup_entry( diff --git a/homeassistant/components/onvif/button.py b/homeassistant/components/onvif/button.py index 1e86b73fc66..644a7c942f7 100644 --- a/homeassistant/components/onvif/button.py +++ b/homeassistant/components/onvif/button.py @@ -6,9 +6,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base import ONVIFBaseEntity from .const import DOMAIN from .device import ONVIFDevice +from .entity import ONVIFBaseEntity async def async_setup_entry( diff --git a/homeassistant/components/onvif/camera.py b/homeassistant/components/onvif/camera.py index 4b6dfa1a625..8c0fd027b95 100644 --- a/homeassistant/components/onvif/camera.py +++ b/homeassistant/components/onvif/camera.py @@ -24,7 +24,6 @@ from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base import ONVIFBaseEntity from .const import ( ABSOLUTE_MOVE, ATTR_CONTINUOUS_DURATION, @@ -51,6 +50,7 @@ from .const import ( ZOOM_OUT, ) from .device import ONVIFDevice +from .entity import ONVIFBaseEntity from .models import Profile diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index 30184d1abc3..66e566af0bf 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -102,7 +102,6 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a ONVIF config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry @staticmethod @callback @@ -136,30 +135,28 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication of an existing config entry.""" - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert reauth_entry is not None - self._reauth_entry = reauth_entry return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm reauth.""" - entry = self._reauth_entry errors: dict[str, str] | None = {} + reauth_entry = self._get_reauth_entry() description_placeholders: dict[str, str] | None = None if user_input is not None: - entry_data = entry.data - self.onvif_config = entry_data | user_input + self.onvif_config = reauth_entry.data | user_input errors, description_placeholders = await self.async_setup_profiles( configure_unique_id=False ) if not errors: - return self.async_update_reload_and_abort(entry, data=self.onvif_config) + return self.async_update_reload_and_abort( + reauth_entry, data=self.onvif_config + ) - username = (user_input or {}).get(CONF_USERNAME) or entry.data[CONF_USERNAME] + username = (user_input or {}).get(CONF_USERNAME) or reauth_entry.data[ + CONF_USERNAME + ] return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( @@ -198,7 +195,9 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): hass.async_create_task(self.hass.config_entries.async_reload(entry_id)) return self.async_abort(reason="already_configured") - async def async_step_device(self, user_input=None): + async def async_step_device( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle WS-Discovery. Let user choose between discovered devices and manual configuration. @@ -392,14 +391,15 @@ class OnvifOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize ONVIF options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the ONVIF options.""" return await self.async_step_onvif_devices() - async def async_step_onvif_devices(self, user_input=None): + async def async_step_onvif_devices( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the ONVIF devices options.""" if user_input is not None: self.options[CONF_EXTRA_ARGUMENTS] = user_input[CONF_EXTRA_ARGUMENTS] diff --git a/homeassistant/components/onvif/base.py b/homeassistant/components/onvif/entity.py similarity index 100% rename from homeassistant/components/onvif/base.py rename to homeassistant/components/onvif/entity.py diff --git a/homeassistant/components/onvif/event.py b/homeassistant/components/onvif/event.py index a8f1b7f702d..4b5335f1eb6 100644 --- a/homeassistant/components/onvif/event.py +++ b/homeassistant/components/onvif/event.py @@ -157,6 +157,7 @@ class EventManager: # tns1:RuleEngine/CellMotionDetector/Motion//. # tns1:RuleEngine/CellMotionDetector/Motion # tns1:RuleEngine/CellMotionDetector/Motion/ + # tns1:UserAlarm/IVA/HumanShapeDetect # # Our parser expects the topic to be # tns1:RuleEngine/CellMotionDetector/Motion @@ -164,7 +165,7 @@ class EventManager: if not (parser := PARSERS.get(topic)): if topic not in UNHANDLED_TOPICS: - LOGGER.info( + LOGGER.warning( "%s: No registered handler for event from %s: %s", self.name, unique_id, @@ -176,7 +177,7 @@ class EventManager: event = await parser(unique_id, msg) if not event: - LOGGER.info( + LOGGER.warning( "%s: Unable to parse event from %s: %s", self.name, unique_id, msg ) return diff --git a/homeassistant/components/onvif/icons.json b/homeassistant/components/onvif/icons.json index 4db9a9f9e49..d42985d34e8 100644 --- a/homeassistant/components/onvif/icons.json +++ b/homeassistant/components/onvif/icons.json @@ -13,6 +13,8 @@ } }, "services": { - "ptz": "mdi:pan" + "ptz": { + "service": "mdi:pan" + } } } diff --git a/homeassistant/components/onvif/manifest.json b/homeassistant/components/onvif/manifest.json index d03073dcfd3..02ef16b6787 100644 --- a/homeassistant/components/onvif/manifest.json +++ b/homeassistant/components/onvif/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/onvif", "iot_class": "local_push", "loggers": ["onvif", "wsdiscovery", "zeep"], - "requirements": ["onvif-zeep-async==3.1.12", "WSDiscovery==2.0.0"] + "requirements": ["onvif-zeep-async==3.1.13", "WSDiscovery==2.0.0"] } diff --git a/homeassistant/components/onvif/parsers.py b/homeassistant/components/onvif/parsers.py index c67cdceed54..d7bbaa4fb3f 100644 --- a/homeassistant/components/onvif/parsers.py +++ b/homeassistant/components/onvif/parsers.py @@ -370,6 +370,63 @@ async def async_parse_vehicle_detector(uid: str, msg) -> Event | None: return None +@PARSERS.register("tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent") +@PARSERS.register("tns1:RuleEngine/PeopleDetector/People") +async def async_parse_tplink_detector(uid: str, msg) -> Event | None: + """Handle parsing tplink smart event messages. + + Topic: tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent + Topic: tns1:RuleEngine/PeopleDetector/People + """ + video_source = "" + video_analytics = "" + rule = "" + topic = "" + vehicle = False + person = False + enabled = False + try: + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value + + for item in payload.Data.SimpleItem: + if item.Name == "IsVehicle": + vehicle = True + enabled = item.Value == "true" + if item.Name == "IsPeople": + person = True + enabled = item.Value == "true" + except (AttributeError, KeyError): + return None + + if vehicle: + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Vehicle Detection", + "binary_sensor", + "motion", + None, + enabled, + ) + if person: + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Person Detection", + "binary_sensor", + "motion", + None, + enabled, + ) + + return None + + @PARSERS.register("tns1:RuleEngine/MyRuleDetector/PeopleDetect") async def async_parse_person_detector(uid: str, msg) -> Event | None: """Handle parsing event message. @@ -711,3 +768,29 @@ async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None: ) except (AttributeError, KeyError): return None + + +@PARSERS.register("tns1:UserAlarm/IVA/HumanShapeDetect") +async def async_parse_human_shape_detect(uid: str, msg) -> Event | None: + """Handle parsing event message. + + Topic: tns1:UserAlarm/IVA/HumanShapeDetect + """ + try: + topic, payload = extract_message(msg) + video_source = "" + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + break + + return Event( + f"{uid}_{topic}_{video_source}", + "Human Shape Detect", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) + except (AttributeError, KeyError): + return None diff --git a/homeassistant/components/onvif/sensor.py b/homeassistant/components/onvif/sensor.py index 5b0c72e88dd..46db26361bc 100644 --- a/homeassistant/components/onvif/sensor.py +++ b/homeassistant/components/onvif/sensor.py @@ -13,9 +13,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util.enum import try_parse_enum -from .base import ONVIFBaseEntity from .const import DOMAIN from .device import ONVIFDevice +from .entity import ONVIFBaseEntity async def async_setup_entry( diff --git a/homeassistant/components/onvif/strings.json b/homeassistant/components/onvif/strings.json index c3f0b89df3b..0afb5e59e8e 100644 --- a/homeassistant/components/onvif/strings.json +++ b/homeassistant/components/onvif/strings.json @@ -20,7 +20,7 @@ "auto": "Search automatically" }, "title": "ONVIF device setup", - "description": "By clicking submit, we will search your network for ONVIF devices that support Profile S.\n\nSome manufacturers have started to disable ONVIF by default. Please ensure ONVIF is enabled in your camera's configuration." + "description": "By selecting **Submit**, we will search your network for ONVIF devices that support Profile S.\n\nSome manufacturers have started to disable ONVIF by default. Please ensure ONVIF is enabled in your camera's configuration." }, "device": { "data": { diff --git a/homeassistant/components/onvif/switch.py b/homeassistant/components/onvif/switch.py index 02b48d20bef..ff62e469af0 100644 --- a/homeassistant/components/onvif/switch.py +++ b/homeassistant/components/onvif/switch.py @@ -11,9 +11,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base import ONVIFBaseEntity from .const import DOMAIN from .device import ONVIFDevice +from .entity import ONVIFBaseEntity from .models import Profile diff --git a/homeassistant/components/open_meteo/__init__.py b/homeassistant/components/open_meteo/__init__.py index e3bf763f429..6deb63904ff 100644 --- a/homeassistant/components/open_meteo/__init__.py +++ b/homeassistant/components/open_meteo/__init__.py @@ -62,6 +62,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator[Forecast] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_ZONE]}", update_interval=SCAN_INTERVAL, update_method=async_update_forecast, diff --git a/homeassistant/components/open_meteo/manifest.json b/homeassistant/components/open_meteo/manifest.json index abdb59a48d0..a2f2a724ad5 100644 --- a/homeassistant/components/open_meteo/manifest.json +++ b/homeassistant/components/open_meteo/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/open_meteo", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["open-meteo==0.3.1"] + "requirements": ["open-meteo==0.3.2"] } diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index 75b5db23094..0fbda9b7f4a 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -19,6 +19,7 @@ from homeassistant.exceptions import ( ServiceValidationError, ) from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER @@ -88,7 +89,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> bool: """Set up OpenAI Conversation from a config entry.""" - client = openai.AsyncOpenAI(api_key=entry.data[CONF_API_KEY]) + client = openai.AsyncOpenAI( + api_key=entry.data[CONF_API_KEY], + http_client=get_async_client(hass), + ) + + # Cache current platform data which gets added to each request (caching done by library) + _ = await hass.async_add_executor_job(client.platform_headers) + try: await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list) except openai.AuthenticationError as err: diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index 9a2b1b6fa79..2a1764e6b5e 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -26,6 +26,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, TemplateSelector, ) +from homeassistant.helpers.typing import VolDictType from .const import ( CONF_CHAT_MODEL, @@ -79,7 +80,7 @@ class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=STEP_USER_DATA_SCHEMA ) - errors = {} + errors: dict[str, str] = {} try: await validate_input(self.hass, user_input) @@ -114,7 +115,6 @@ class OpenAIOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) @@ -150,7 +150,7 @@ class OpenAIOptionsFlow(OptionsFlow): def openai_config_option_schema( hass: HomeAssistant, options: dict[str, Any] | MappingProxyType[str, Any], -) -> dict: +) -> VolDictType: """Return a schema for OpenAI completion options.""" hass_apis: list[SelectOptionDict] = [ SelectOptionDict( @@ -166,7 +166,7 @@ def openai_config_option_schema( for api in llm.async_get_apis(hass) ) - schema = { + schema: VolDictType = { vol.Optional( CONF_PROMPT, description={ diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index a7109a6d6ec..9c73766c8d4 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -148,7 +148,7 @@ class OpenAIConversationEntity( LOGGER.error("Error getting LLM API: %s", err) intent_response.async_set_error( intent.IntentResponseErrorCode.UNKNOWN, - f"Error preparing LLM API: {err}", + "Error preparing LLM API", ) return conversation.ConversationResult( response=intent_response, conversation_id=user_input.conversation_id @@ -208,7 +208,7 @@ class OpenAIConversationEntity( intent_response = intent.IntentResponse(language=user_input.language) intent_response.async_set_error( intent.IntentResponseErrorCode.UNKNOWN, - f"Sorry, I had a problem with my template: {err}", + "Sorry, I had a problem with my template", ) return conversation.ConversationResult( response=intent_response, conversation_id=conversation_id @@ -248,10 +248,11 @@ class OpenAIConversationEntity( user=conversation_id, ) except openai.OpenAIError as err: + LOGGER.error("Error talking to OpenAI: %s", err) intent_response = intent.IntentResponse(language=user_input.language) intent_response.async_set_error( intent.IntentResponseErrorCode.UNKNOWN, - f"Sorry, I had a problem talking to OpenAI: {err}", + "Sorry, I had a problem talking to OpenAI", ) return conversation.ConversationResult( response=intent_response, conversation_id=conversation_id diff --git a/homeassistant/components/openai_conversation/icons.json b/homeassistant/components/openai_conversation/icons.json index 7f736a5ff3b..3abecd640d1 100644 --- a/homeassistant/components/openai_conversation/icons.json +++ b/homeassistant/components/openai_conversation/icons.json @@ -1,5 +1,7 @@ { "services": { - "generate_image": "mdi:image-sync" + "generate_image": { + "service": "mdi:image-sync" + } } } diff --git a/homeassistant/components/openalpr_cloud/manifest.json b/homeassistant/components/openalpr_cloud/manifest.json index 45bce5c7345..5148cb396b6 100644 --- a/homeassistant/components/openalpr_cloud/manifest.json +++ b/homeassistant/components/openalpr_cloud/manifest.json @@ -3,5 +3,6 @@ "name": "OpenALPR Cloud", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/openalpr_cloud", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/openerz/manifest.json b/homeassistant/components/openerz/manifest.json index c7a5a202568..f75e3e492a8 100644 --- a/homeassistant/components/openerz/manifest.json +++ b/homeassistant/components/openerz/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/openerz", "iot_class": "cloud_polling", "loggers": ["openerz_api"], + "quality_scale": "legacy", "requirements": ["openerz-api==0.3.0"] } diff --git a/homeassistant/components/openevse/manifest.json b/homeassistant/components/openevse/manifest.json index 066eb5ee384..45452fe325b 100644 --- a/homeassistant/components/openevse/manifest.json +++ b/homeassistant/components/openevse/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/openevse", "iot_class": "local_polling", "loggers": ["openevsewifi"], + "quality_scale": "legacy", "requirements": ["openevsewifi==1.1.2"] } diff --git a/homeassistant/components/openexchangerates/config_flow.py b/homeassistant/components/openexchangerates/config_flow.py index df83690d2e3..ffcc60bfa26 100644 --- a/homeassistant/components/openexchangerates/config_flow.py +++ b/homeassistant/components/openexchangerates/config_flow.py @@ -13,7 +13,7 @@ from aioopenexchangerates import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_BASE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import AbortFlow @@ -54,7 +54,6 @@ class OpenExchangeRatesConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" self.currencies: dict[str, str] = {} - self._reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -63,9 +62,9 @@ class OpenExchangeRatesConfigFlow(ConfigFlow, domain=DOMAIN): currencies = await self.async_get_currencies() if user_input is None: - existing_data: Mapping[str, str] | dict[str, str] = ( - self._reauth_entry.data if self._reauth_entry else {} - ) + existing_data: Mapping[str, Any] = {} + if self.source == SOURCE_REAUTH: + existing_data = self._get_reauth_entry().data return self.async_show_form( step_id="user", data_schema=get_data_schema(currencies, existing_data), @@ -95,12 +94,10 @@ class OpenExchangeRatesConfigFlow(ConfigFlow, domain=DOMAIN): } ) - if self._reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_create_entry(title=info["title"], data=user_input) @@ -115,9 +112,6 @@ class OpenExchangeRatesConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() async def async_get_currencies(self) -> dict[str, str]: diff --git a/homeassistant/components/openexchangerates/manifest.json b/homeassistant/components/openexchangerates/manifest.json index a93a87a0785..9e5cd95a93d 100644 --- a/homeassistant/components/openexchangerates/manifest.json +++ b/homeassistant/components/openexchangerates/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/openexchangerates", "iot_class": "cloud_polling", - "requirements": ["aioopenexchangerates==0.4.0"] + "requirements": ["aioopenexchangerates==0.6.8"] } diff --git a/homeassistant/components/opengarage/cover.py b/homeassistant/components/opengarage/cover.py index a165fcc4785..9623050c090 100644 --- a/homeassistant/components/opengarage/cover.py +++ b/homeassistant/components/opengarage/cover.py @@ -9,9 +9,9 @@ from homeassistant.components.cover import ( CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -21,7 +21,7 @@ from .entity import OpenGarageEntity _LOGGER = logging.getLogger(__name__) -STATES_MAP = {0: STATE_CLOSED, 1: STATE_OPEN} +STATES_MAP = {0: CoverState.CLOSED, 1: CoverState.OPEN} async def async_setup_entry( @@ -54,36 +54,36 @@ class OpenGarageCover(OpenGarageEntity, CoverEntity): """Return if the cover is closed.""" if self._state is None: return None - return self._state == STATE_CLOSED + return self._state == CoverState.CLOSED @property def is_closing(self) -> bool | None: """Return if the cover is closing.""" if self._state is None: return None - return self._state == STATE_CLOSING + return self._state == CoverState.CLOSING @property def is_opening(self) -> bool | None: """Return if the cover is opening.""" if self._state is None: return None - return self._state == STATE_OPENING + return self._state == CoverState.OPENING async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - if self._state in [STATE_CLOSED, STATE_CLOSING]: + if self._state in [CoverState.CLOSED, CoverState.CLOSING]: return self._state_before_move = self._state - self._state = STATE_CLOSING + self._state = CoverState.CLOSING await self._push_button() async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - if self._state in [STATE_OPEN, STATE_OPENING]: + if self._state in [CoverState.OPEN, CoverState.OPENING]: return self._state_before_move = self._state - self._state = STATE_OPENING + self._state = CoverState.OPENING await self._push_button() @callback diff --git a/homeassistant/components/openhardwaremonitor/manifest.json b/homeassistant/components/openhardwaremonitor/manifest.json index 562a2433eab..901424eebc1 100644 --- a/homeassistant/components/openhardwaremonitor/manifest.json +++ b/homeassistant/components/openhardwaremonitor/manifest.json @@ -3,5 +3,6 @@ "name": "Open Hardware Monitor", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/openhardwaremonitor", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/openhardwaremonitor/sensor.py b/homeassistant/components/openhardwaremonitor/sensor.py index 4ef71a6c75f..30801a59436 100644 --- a/homeassistant/components/openhardwaremonitor/sensor.py +++ b/homeassistant/components/openhardwaremonitor/sensor.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorEntity, + SensorStateClass, ) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant @@ -60,6 +61,8 @@ def setup_platform( class OpenHardwareMonitorDevice(SensorEntity): """Device used to display information from OpenHardwareMonitor.""" + _attr_state_class = SensorStateClass.MEASUREMENT + def __init__(self, data, name, path, unit_of_measurement): """Initialize an OpenHardwareMonitor sensor.""" self._name = name diff --git a/homeassistant/components/openhome/config_flow.py b/homeassistant/components/openhome/config_flow.py index 5b26b63922b..b495819211b 100644 --- a/homeassistant/components/openhome/config_flow.py +++ b/homeassistant/components/openhome/config_flow.py @@ -24,6 +24,9 @@ def _is_complete_discovery(discovery_info: SsdpServiceInfo) -> bool: class OpenhomeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle an Openhome config flow.""" + _host: str | None + _name: str + async def async_step_ssdp( self, discovery_info: SsdpServiceInfo ) -> ConfigFlowResult: @@ -45,8 +48,8 @@ class OpenhomeConfigFlow(ConfigFlow, domain=DOMAIN): "async_step_ssdp: create entry %s", discovery_info.upnp[ATTR_UPNP_UDN] ) - self.context[CONF_NAME] = discovery_info.upnp[ATTR_UPNP_FRIENDLY_NAME] - self.context[CONF_HOST] = discovery_info.ssdp_location + self._name = discovery_info.upnp[ATTR_UPNP_FRIENDLY_NAME] + self._host = discovery_info.ssdp_location return await self.async_step_confirm() @@ -57,11 +60,11 @@ class OpenhomeConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: return self.async_create_entry( - title=self.context[CONF_NAME], - data={CONF_HOST: self.context[CONF_HOST]}, + title=self._name, + data={CONF_HOST: self._host}, ) return self.async_show_form( step_id="confirm", - description_placeholders={CONF_NAME: self.context[CONF_NAME]}, + description_placeholders={CONF_NAME: self._name}, ) diff --git a/homeassistant/components/openhome/icons.json b/homeassistant/components/openhome/icons.json index 081e97c3489..d75659f17da 100644 --- a/homeassistant/components/openhome/icons.json +++ b/homeassistant/components/openhome/icons.json @@ -1,5 +1,7 @@ { "services": { - "invoke_pin": "mdi:alarm-panel" + "invoke_pin": { + "service": "mdi:alarm-panel" + } } } diff --git a/homeassistant/components/opensensemap/manifest.json b/homeassistant/components/opensensemap/manifest.json index 8fed7ec906e..0256ae42a3a 100644 --- a/homeassistant/components/opensensemap/manifest.json +++ b/homeassistant/components/opensensemap/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/opensensemap", "iot_class": "cloud_polling", "loggers": ["opensensemap_api"], + "quality_scale": "legacy", "requirements": ["opensensemap-api==0.2.0"] } diff --git a/homeassistant/components/opensky/config_flow.py b/homeassistant/components/opensky/config_flow.py index 3cfd1ad30a0..867a4781265 100644 --- a/homeassistant/components/opensky/config_flow.py +++ b/homeassistant/components/opensky/config_flow.py @@ -13,12 +13,11 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, - CONF_NAME, CONF_PASSWORD, CONF_RADIUS, CONF_USERNAME, @@ -45,7 +44,7 @@ class OpenSkyConfigFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OpenSkyOptionsFlowHandler: """Get the options flow for this handler.""" - return OpenSkyOptionsFlowHandler(config_entry) + return OpenSkyOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,7 +82,7 @@ class OpenSkyConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) -class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OpenSkyOptionsFlowHandler(OptionsFlow): """OpenSky Options flow handler.""" async def async_step_init( @@ -112,10 +111,7 @@ class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): except OpenSkyUnauthenticatedError: errors["base"] = "invalid_auth" if not errors: - return self.async_create_entry( - title=self.options.get(CONF_NAME, "OpenSky"), - data=user_input, - ) + return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", @@ -130,6 +126,6 @@ class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): vol.Optional(CONF_CONTRIBUTING_USER, default=False): bool, } ), - user_input or self.options, + user_input or self.config_entry.options, ), ) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index 30410f73c2d..8c92c70ab49 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -4,11 +4,12 @@ import asyncio from datetime import date, datetime import logging -import pyotgw +from pyotgw import OpenThermGateway import pyotgw.vars as gw_vars from serial import SerialException import voluptuous as vol +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( ATTR_DATE, @@ -27,7 +28,12 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType @@ -41,8 +47,7 @@ from .const import ( CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, - CONF_READ_PRECISION, - CONF_SET_PRECISION, + CONF_TEMPORARY_OVRD_MODE, CONNECTION_TIMEOUT, DATA_GATEWAYS, DATA_OPENTHERM_GW, @@ -59,10 +64,13 @@ from .const import ( SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, SERVICE_SET_SB_TEMP, + OpenThermDataSource, + OpenThermDeviceIdentifier, ) _LOGGER = logging.getLogger(__name__) +# *_SCHEMA required for deprecated import from configuration.yaml, can be removed in 2025.4.0 CLIMATE_SCHEMA = vol.Schema( { vol.Optional(CONF_PRECISION): vol.In( @@ -85,12 +93,20 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLATFORMS = [Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, +] async def options_updated(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] + gateway.options = entry.options async_dispatcher_send(hass, gateway.options_update_signal, entry) @@ -102,16 +118,34 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b gateway = OpenThermGatewayHub(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway - if config_entry.options.get(CONF_PRECISION): - migrate_options = dict(config_entry.options) - migrate_options.update( - { - CONF_READ_PRECISION: config_entry.options[CONF_PRECISION], - CONF_SET_PRECISION: config_entry.options[CONF_PRECISION], - } + # Migration can be removed in 2025.4.0 + dev_reg = dr.async_get(hass) + if ( + migrate_device := dev_reg.async_get_device( + {(DOMAIN, config_entry.data[CONF_ID])} + ) + ) is not None: + dev_reg.async_update_device( + migrate_device.id, + new_identifiers={ + ( + DOMAIN, + f"{config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}", + ) + }, + ) + + # Migration can be removed in 2025.4.0 + ent_reg = er.async_get(hass) + if ( + entity_id := ent_reg.async_get_entity_id( + CLIMATE_DOMAIN, DOMAIN, config_entry.data[CONF_ID] + ) + ) is not None: + ent_reg.async_update_entity( + entity_id, + new_unique_id=f"{config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity", ) - del migrate_options[CONF_PRECISION] - hass.config_entries.async_update_entry(config_entry, options=migrate_options) config_entry.add_update_listener(options_updated) @@ -130,8 +164,20 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True +# Deprecated import from configuration.yaml, can be removed in 2025.4.0 async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the OpenTherm Gateway component.""" + if DOMAIN in config: + ir.async_create_issue( + hass, + DOMAIN, + "deprecated_import_from_configuration_yaml", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_import_from_configuration_yaml", + ) if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: conf = config[DOMAIN] for device_id, device_config in conf.items(): @@ -425,12 +471,11 @@ class OpenThermGatewayHub: self.device_path = config_entry.data[CONF_DEVICE] self.hub_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] - self.climate_config = config_entry.options + self.options = config_entry.options self.config_entry_id = config_entry.entry_id - self.status = gw_vars.DEFAULT_STATUS self.update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_options_update" - self.gateway = pyotgw.OpenThermGateway() + self.gateway = OpenThermGateway() self.gw_version = None async def cleanup(self, event=None) -> None: @@ -441,11 +486,11 @@ class OpenThermGatewayHub: async def connect_and_subscribe(self) -> None: """Connect to serial device and subscribe report handler.""" - self.status = await self.gateway.connect(self.device_path) - if not self.status: + status = await self.gateway.connect(self.device_path) + if not status: await self.cleanup() raise ConnectionError - version_string = self.status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT) + version_string = status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_ABOUT) self.gw_version = version_string[18:] if version_string else None _LOGGER.debug( "Connected to OpenTherm Gateway %s at %s", self.gw_version, self.device_path @@ -453,25 +498,78 @@ class OpenThermGatewayHub: dev_reg = dr.async_get(self.hass) gw_dev = dev_reg.async_get_or_create( config_entry_id=self.config_entry_id, - identifiers={(DOMAIN, self.hub_id)}, - name=self.name, + identifiers={ + (DOMAIN, f"{self.hub_id}-{OpenThermDeviceIdentifier.GATEWAY}") + }, manufacturer="Schelte Bron", model="OpenTherm Gateway", + translation_key="gateway_device", sw_version=self.gw_version, ) if gw_dev.sw_version != self.gw_version: dev_reg.async_update_device(gw_dev.id, sw_version=self.gw_version) + + boiler_device = dev_reg.async_get_or_create( + config_entry_id=self.config_entry_id, + identifiers={(DOMAIN, f"{self.hub_id}-{OpenThermDeviceIdentifier.BOILER}")}, + translation_key="boiler_device", + ) + thermostat_device = dev_reg.async_get_or_create( + config_entry_id=self.config_entry_id, + identifiers={ + (DOMAIN, f"{self.hub_id}-{OpenThermDeviceIdentifier.THERMOSTAT}") + }, + translation_key="thermostat_device", + ) + self.hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.cleanup) async def handle_report(status): """Handle reports from the OpenTherm Gateway.""" _LOGGER.debug("Received report: %s", status) - self.status = status async_dispatcher_send(self.hass, self.update_signal, status) + dev_reg.async_update_device( + boiler_device.id, + manufacturer=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_MEMBERID + ), + model_id=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_PRODUCT_TYPE + ), + hw_version=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_PRODUCT_VERSION + ), + sw_version=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_OT_VERSION + ), + ) + + dev_reg.async_update_device( + thermostat_device.id, + manufacturer=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_MEMBERID + ), + model_id=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_PRODUCT_TYPE + ), + hw_version=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_PRODUCT_VERSION + ), + sw_version=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_OT_VERSION + ), + ) + self.gateway.subscribe(handle_report) @property def connected(self): """Report whether or not we are connected to the gateway.""" return self.gateway.connection.connected + + async def set_room_setpoint(self, temp) -> float: + """Set the room temperature setpoint on the gateway. Return the new temperature.""" + return await self.gateway.set_target_temp( + temp, self.options.get(CONF_TEMPORARY_OVRD_MODE, True) + ) diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index f978a2695d7..5d542bedc07 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -5,281 +5,387 @@ from dataclasses import dataclass from pyotgw import vars as gw_vars from homeassistant.components.binary_sensor import ( - ENTITY_ID_FORMAT, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ID +from homeassistant.const import CONF_ID, EntityCategory from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import OpenThermGatewayHub -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW -from .entity import OpenThermEntity, OpenThermEntityDescription +from .const import ( + BOILER_DEVICE_DESCRIPTION, + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + THERMOSTAT_DEVICE_DESCRIPTION, + OpenThermDataSource, +) +from .entity import OpenThermEntityDescription, OpenThermStatusEntity @dataclass(frozen=True, kw_only=True) class OpenThermBinarySensorEntityDescription( - BinarySensorEntityDescription, OpenThermEntityDescription + OpenThermEntityDescription, BinarySensorEntityDescription ): """Describes opentherm_gw binary sensor entity.""" -BINARY_SENSOR_INFO: tuple[ - tuple[list[str], OpenThermBinarySensorEntityDescription], ... -] = ( - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_CH_ENABLED, - friendly_name_format="Thermostat Central Heating {}", - ), +BINARY_SENSOR_DESCRIPTIONS: tuple[OpenThermBinarySensorEntityDescription, ...] = ( + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FAULT_IND, + translation_key="fault_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_DHW_ENABLED, - friendly_name_format="Thermostat Hot Water {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_COOLING_ENABLED, - friendly_name_format="Thermostat Cooling {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_OTC_ENABLED, - friendly_name_format="Thermostat Outside Temperature Correction {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_ACTIVE, + translation_key="hot_water", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_CH2_ENABLED, - friendly_name_format="Thermostat Central Heating 2 {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FLAME_ON, + translation_key="flame", + device_class=BinarySensorDeviceClass.HEAT, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_FAULT_IND, - friendly_name_format="Boiler Fault {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, + translation_key="cooling", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH_ACTIVE, - friendly_name_format="Boiler Central Heating {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DIAG_IND, + translation_key="diagnostic_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_ACTIVE, - friendly_name_format="Boiler Hot Water {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_PRESENT, + translation_key="supports_hot_water", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_FLAME_ON, - friendly_name_format="Boiler Flame {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CONTROL_TYPE, + translation_key="control_type", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, - friendly_name_format="Boiler Cooling {}", - device_class=BinarySensorDeviceClass.COLD, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, + translation_key="supports_cooling", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH2_ACTIVE, - friendly_name_format="Boiler Central Heating 2 {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_CONFIG, + translation_key="hot_water_config", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DIAG_IND, - friendly_name_format="Boiler Diagnostics {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, + translation_key="supports_pump_control", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_PRESENT, - friendly_name_format="Boiler Hot Water Present {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_PRESENT, + translation_key="supports_ch_2", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CONTROL_TYPE, - friendly_name_format="Boiler Control Type {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_SERVICE_REQ, + translation_key="service_required", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, - friendly_name_format="Boiler Cooling Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_REMOTE_RESET, + translation_key="supports_remote_reset", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_CONFIG, - friendly_name_format="Boiler Hot Water Configuration {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, + translation_key="low_water_pressure", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, - friendly_name_format="Boiler Pump Commands Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_GAS_FAULT, + translation_key="gas_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH2_PRESENT, - friendly_name_format="Boiler Central Heating 2 Present {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, + translation_key="air_pressure_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_SERVICE_REQ, - friendly_name_format="Boiler Service Required {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, + translation_key="water_overtemperature", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_REMOTE_RESET, - friendly_name_format="Boiler Remote Reset Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, + translation_key="supports_central_heating_setpoint_transfer", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, - friendly_name_format="Boiler Low Water Pressure {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_MAX_CH, + translation_key="supports_central_heating_setpoint_writing", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_GAS_FAULT, - friendly_name_format="Boiler Gas Fault {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_DHW, + translation_key="supports_hot_water_setpoint_transfer", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, - friendly_name_format="Boiler Air Pressure Fault {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_DHW, + translation_key="supports_hot_water_setpoint_writing", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, - friendly_name_format="Boiler Water Overtemperature {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_A_STATE, + translation_key="gpio_state_n", + translation_placeholders={"gpio_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_TRANSFER_DHW, - friendly_name_format="Remote Hot Water Setpoint Transfer Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_B_STATE, + translation_key="gpio_state_n", + translation_placeholders={"gpio_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, - friendly_name_format="Remote Maximum Central Heating Setpoint Write Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_IGNORE_TRANSITIONS, + translation_key="ignore_transitions", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_RW_DHW, - friendly_name_format="Remote Hot Water Setpoint Write Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_OVRD_HB, + translation_key="override_high_byte", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_RW_MAX_CH, - friendly_name_format="Remote Central Heating Setpoint Write Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_ROVRD_MAN_PRIO, - friendly_name_format="Remote Override Manual Change Priority {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH2_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_ROVRD_AUTO_PRIO, - friendly_name_format="Remote Override Program Change Priority {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_DHW_ENABLED, + translation_key="hot_water", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_GPIO_A_STATE, - friendly_name_format="Gateway GPIO A {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_COOLING_ENABLED, + translation_key="cooling", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_GPIO_B_STATE, - friendly_name_format="Gateway GPIO B {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_OTC_ENABLED, + translation_key="outside_temp_correction", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_IGNORE_TRANSITIONS, - friendly_name_format="Gateway Ignore Transitions {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_MAN_PRIO, + translation_key="override_manual_change_prio", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_OVRD_HB, - friendly_name_format="Gateway Override High Byte {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_AUTO_PRIO, + translation_key="override_program_change_prio", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FAULT_IND, + translation_key="fault_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_ACTIVE, + translation_key="hot_water", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FLAME_ON, + translation_key="flame", + device_class=BinarySensorDeviceClass.HEAT, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, + translation_key="cooling", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DIAG_IND, + translation_key="diagnostic_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_PRESENT, + translation_key="supports_hot_water", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CONTROL_TYPE, + translation_key="control_type", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, + translation_key="supports_cooling", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_CONFIG, + translation_key="hot_water_config", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, + translation_key="supports_pump_control", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_PRESENT, + translation_key="supports_ch_2", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_SERVICE_REQ, + translation_key="service_required", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_REMOTE_RESET, + translation_key="supports_remote_reset", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, + translation_key="low_water_pressure", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_GAS_FAULT, + translation_key="gas_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, + translation_key="air_pressure_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, + translation_key="water_overtemperature", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, + translation_key="supports_central_heating_setpoint_transfer", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_MAX_CH, + translation_key="supports_central_heating_setpoint_writing", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_DHW, + translation_key="supports_hot_water_setpoint_transfer", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_DHW, + translation_key="supports_hot_water_setpoint_writing", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH2_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_DHW_ENABLED, + translation_key="hot_water", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_COOLING_ENABLED, + translation_key="cooling", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_OTC_ENABLED, + translation_key="outside_temp_correction", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_MAN_PRIO, + translation_key="override_manual_change_prio", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_AUTO_PRIO, + translation_key="override_program_change_prio", + device_description=BOILER_DEVICE_DESCRIPTION, ), ) @@ -293,35 +399,22 @@ async def async_setup_entry( gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( - OpenThermBinarySensor(gw_hub, source, description) - for sources, description in BINARY_SENSOR_INFO - for source in sources + OpenThermBinarySensor(gw_hub, description) + for description in BINARY_SENSOR_DESCRIPTIONS ) -class OpenThermBinarySensor(OpenThermEntity, BinarySensorEntity): +class OpenThermBinarySensor(OpenThermStatusEntity, BinarySensorEntity): """Represent an OpenTherm Gateway binary sensor.""" + _attr_entity_category = EntityCategory.DIAGNOSTIC entity_description: OpenThermBinarySensorEntityDescription - def __init__( - self, - gw_hub: OpenThermGatewayHub, - source: str, - description: OpenThermBinarySensorEntityDescription, - ) -> None: - """Initialize the binary sensor.""" - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, - f"{description.key}_{source}_{gw_hub.hub_id}", - hass=gw_hub.hass, - ) - super().__init__(gw_hub, source, description) - @callback - def receive_report(self, status: dict[str, dict]) -> None: + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" - self._attr_available = self._gateway.connected - state = status[self._source].get(self.entity_description.key) + state = status[self.entity_description.device_description.data_source].get( + self.entity_description.key + ) self._attr_is_on = None if state is None else bool(state) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/button.py b/homeassistant/components/opentherm_gw/button.py new file mode 100644 index 00000000000..bac50295199 --- /dev/null +++ b/homeassistant/components/opentherm_gw/button.py @@ -0,0 +1,63 @@ +"""Support for OpenTherm Gateway buttons.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +import pyotgw.vars as gw_vars + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OpenThermGatewayHub +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, GATEWAY_DEVICE_DESCRIPTION +from .entity import OpenThermEntity, OpenThermEntityDescription + + +@dataclass(frozen=True, kw_only=True) +class OpenThermButtonEntityDescription( + ButtonEntityDescription, OpenThermEntityDescription +): + """Describes an opentherm_gw button entity.""" + + action: Callable[[OpenThermGatewayHub], Awaitable] + + +BUTTON_DESCRIPTIONS: tuple[OpenThermButtonEntityDescription, ...] = ( + OpenThermButtonEntityDescription( + key="restart_button", + device_class=ButtonDeviceClass.RESTART, + device_description=GATEWAY_DEVICE_DESCRIPTION, + action=lambda hub: hub.gateway.set_mode(gw_vars.OTGW_MODE_RESET), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the OpenTherm Gateway buttons.""" + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + + async_add_entities( + OpenThermButton(gw_hub, description) for description in BUTTON_DESCRIPTIONS + ) + + +class OpenThermButton(OpenThermEntity, ButtonEntity): + """Representation of an OpenTherm button.""" + + _attr_entity_category = EntityCategory.CONFIG + entity_description: OpenThermButtonEntityDescription + + async def async_press(self) -> None: + """Perform button action.""" + await self.entity_description.action(self._gateway) diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index bf295fb1fb7..e8aa99f7325 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -2,50 +2,51 @@ from __future__ import annotations +from dataclasses import dataclass import logging +from types import MappingProxyType from typing import Any from pyotgw import vars as gw_vars from homeassistant.components.climate import ( - ENTITY_ID_FORMAT, PRESET_AWAY, PRESET_NONE, ClimateEntity, + ClimateEntityDescription, ClimateEntityFeature, HVACAction, HVACMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - CONF_ID, - PRECISION_HALVES, - PRECISION_TENTHS, - PRECISION_WHOLE, - UnitOfTemperature, -) +from homeassistant.const import ATTR_TEMPERATURE, CONF_ID, UnitOfTemperature from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN +from . import OpenThermGatewayHub from .const import ( - CONF_FLOOR_TEMP, CONF_READ_PRECISION, CONF_SET_PRECISION, - CONF_TEMPORARY_OVRD_MODE, DATA_GATEWAYS, DATA_OPENTHERM_GW, + THERMOSTAT_DEVICE_DESCRIPTION, + OpenThermDataSource, ) +from .entity import OpenThermEntityDescription, OpenThermStatusEntity _LOGGER = logging.getLogger(__name__) DEFAULT_FLOOR_TEMP = False +@dataclass(frozen=True, kw_only=True) +class OpenThermClimateEntityDescription( + ClimateEntityDescription, OpenThermEntityDescription +): + """Describes an opentherm_gw climate entity.""" + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -56,6 +57,10 @@ async def async_setup_entry( ents.append( OpenThermClimate( hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]], + OpenThermClimateEntityDescription( + key="thermostat_entity", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), config_entry.options, ) ) @@ -63,98 +68,79 @@ async def async_setup_entry( async_add_entities(ents) -class OpenThermClimate(ClimateEntity): +class OpenThermClimate(OpenThermStatusEntity, ClimateEntity): """Representation of a climate device.""" - _attr_should_poll = False _attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_available = False _attr_hvac_modes = [] + _attr_name = None _attr_preset_modes = [] _attr_min_temp = 1 _attr_max_temp = 30 - _hvac_mode = HVACMode.HEAT - _current_temperature: float | None = None - _new_target_temperature: float | None = None - _target_temperature: float | None = None + _attr_hvac_mode = HVACMode.HEAT _away_mode_a: int | None = None _away_mode_b: int | None = None _away_state_a = False _away_state_b = False - _current_operation: HVACAction | None = None - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, gw_hub, options): - """Initialize the device.""" - self._gateway = gw_hub - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, gw_hub.hub_id, hass=gw_hub.hass - ) - self.friendly_name = gw_hub.name - self._attr_name = self.friendly_name - self.floor_temp = options.get(CONF_FLOOR_TEMP, DEFAULT_FLOOR_TEMP) - self.temp_read_precision = options.get(CONF_READ_PRECISION) - self.temp_set_precision = options.get(CONF_SET_PRECISION) - self.temporary_ovrd_mode = options.get(CONF_TEMPORARY_OVRD_MODE, True) - self._unsub_options = None - self._unsub_updates = None - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_hub.hub_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_hub.name, - sw_version=gw_hub.gw_version, - ) - self._attr_unique_id = gw_hub.hub_id + _target_temperature: float | None = None + _new_target_temperature: float | None = None + entity_description: OpenThermClimateEntityDescription + + def __init__( + self, + gw_hub: OpenThermGatewayHub, + description: OpenThermClimateEntityDescription, + options: MappingProxyType[str, Any], + ) -> None: + """Initialize the entity.""" + super().__init__(gw_hub, description) + if CONF_READ_PRECISION in options: + self._attr_precision = options[CONF_READ_PRECISION] + self._attr_target_temperature_step = options.get(CONF_SET_PRECISION) @callback def update_options(self, entry): """Update climate entity options.""" - self.floor_temp = entry.options[CONF_FLOOR_TEMP] - self.temp_read_precision = entry.options[CONF_READ_PRECISION] - self.temp_set_precision = entry.options[CONF_SET_PRECISION] - self.temporary_ovrd_mode = entry.options[CONF_TEMPORARY_OVRD_MODE] + self._attr_precision = entry.options[CONF_READ_PRECISION] + self._attr_target_temperature_step = entry.options[CONF_SET_PRECISION] self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Connect to the OpenTherm Gateway device.""" - _LOGGER.debug("Added OpenTherm Gateway climate device %s", self.friendly_name) - self._unsub_updates = async_dispatcher_connect( - self.hass, self._gateway.update_signal, self.receive_report + await super().async_added_to_hass() + self.async_on_remove( + async_dispatcher_connect( + self.hass, self._gateway.options_update_signal, self.update_options + ) ) - self._unsub_options = async_dispatcher_connect( - self.hass, self._gateway.options_update_signal, self.update_options - ) - - async def async_will_remove_from_hass(self) -> None: - """Unsubscribe from updates from the component.""" - _LOGGER.debug("Removing OpenTherm Gateway climate %s", self.friendly_name) - self._unsub_options() - self._unsub_updates() @callback - def receive_report(self, status): + def receive_report(self, status: dict[OpenThermDataSource, dict]): """Receive and handle a new report from the Gateway.""" - self._attr_available = self._gateway.connected - ch_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE) - flame_on = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON) - cooling_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_COOLING_ACTIVE) + ch_active = status[OpenThermDataSource.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE) + flame_on = status[OpenThermDataSource.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON) + cooling_active = status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_COOLING_ACTIVE + ) if ch_active and flame_on: - self._current_operation = HVACAction.HEATING - self._hvac_mode = HVACMode.HEAT + self._attr_hvac_action = HVACAction.HEATING + self._attr_hvac_mode = HVACMode.HEAT elif cooling_active: - self._current_operation = HVACAction.COOLING - self._hvac_mode = HVACMode.COOL + self._attr_hvac_action = HVACAction.COOLING + self._attr_hvac_mode = HVACMode.COOL else: - self._current_operation = HVACAction.IDLE + self._attr_hvac_action = HVACAction.IDLE - self._current_temperature = status[gw_vars.THERMOSTAT].get( + self._attr_current_temperature = status[OpenThermDataSource.THERMOSTAT].get( gw_vars.DATA_ROOM_TEMP ) - temp_upd = status[gw_vars.THERMOSTAT].get(gw_vars.DATA_ROOM_SETPOINT) + temp_upd = status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_ROOM_SETPOINT + ) if self._target_temperature != temp_upd: self._new_target_temperature = None @@ -162,82 +148,35 @@ class OpenThermClimate(ClimateEntity): # GPIO mode 5: 0 == Away # GPIO mode 6: 1 == Away - gpio_a_state = status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_A) - if gpio_a_state == 5: - self._away_mode_a = 0 - elif gpio_a_state == 6: - self._away_mode_a = 1 - else: - self._away_mode_a = None - gpio_b_state = status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_B) - if gpio_b_state == 5: - self._away_mode_b = 0 - elif gpio_b_state == 6: - self._away_mode_b = 1 - else: - self._away_mode_b = None - if self._away_mode_a is not None: - self._away_state_a = ( - status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_A_STATE) == self._away_mode_a + gpio_a_state = status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_A) + gpio_b_state = status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_B) + self._away_mode_a = gpio_a_state - 5 if gpio_a_state in (5, 6) else None + self._away_mode_b = gpio_b_state - 5 if gpio_b_state in (5, 6) else None + self._away_state_a = ( + ( + status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_A_STATE) + == self._away_mode_a ) - if self._away_mode_b is not None: - self._away_state_b = ( - status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_B_STATE) == self._away_mode_b + if self._away_mode_a is not None + else False + ) + self._away_state_b = ( + ( + status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_B_STATE) + == self._away_mode_b ) + if self._away_mode_b is not None + else False + ) self.async_write_ha_state() @property - def precision(self): - """Return the precision of the system.""" - if self.temp_read_precision: - return self.temp_read_precision - if self.hass.config.units.temperature_unit == UnitOfTemperature.CELSIUS: - return PRECISION_HALVES - return PRECISION_WHOLE - - @property - def hvac_action(self) -> HVACAction | None: - """Return current HVAC operation.""" - return self._current_operation - - @property - def hvac_mode(self) -> HVACMode: - """Return current HVAC mode.""" - return self._hvac_mode - - def set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set the HVAC mode.""" - _LOGGER.warning("Changing HVAC mode is not supported") - - @property - def current_temperature(self): - """Return the current temperature.""" - if self._current_temperature is None: - return None - if self.floor_temp is True: - if self.precision == PRECISION_HALVES: - return int(2 * self._current_temperature) / 2 - if self.precision == PRECISION_TENTHS: - return int(10 * self._current_temperature) / 10 - return int(self._current_temperature) - return self._current_temperature - - @property - def target_temperature(self): + def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" return self._new_target_temperature or self._target_temperature @property - def target_temperature_step(self): - """Return the supported step of target temperature.""" - if self.temp_set_precision: - return self.temp_set_precision - if self.hass.config.units.temperature_unit == UnitOfTemperature.CELSIUS: - return PRECISION_HALVES - return PRECISION_WHOLE - - @property - def preset_mode(self): + def preset_mode(self) -> str: """Return current preset mode.""" if self._away_state_a or self._away_state_b: return PRESET_AWAY @@ -253,7 +192,5 @@ class OpenThermClimate(ClimateEntity): temp = float(kwargs[ATTR_TEMPERATURE]) if temp == self.target_temperature: return - self._new_target_temperature = await self._gateway.gateway.set_target_temp( - temp, self.temporary_ovrd_mode - ) + self._new_target_temperature = await self._gateway.set_room_setpoint(temp) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 1ebf462a5c7..80c16ee88e1 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -34,6 +34,7 @@ from .const import ( CONF_SET_PRECISION, CONF_TEMPORARY_OVRD_MODE, CONNECTION_TIMEOUT, + OpenThermDataSource, ) @@ -48,9 +49,11 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OpenThermGwOptionsFlow: """Get the options flow for this handler.""" - return OpenThermGwOptionsFlow(config_entry) + return OpenThermGwOptionsFlow() - async def async_step_init(self, info=None): + async def async_step_init( + self, info: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle config flow initiation.""" if info: name = info[CONF_NAME] @@ -72,7 +75,7 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): await otgw.disconnect() if not status: raise ConnectionError - return status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT) + return status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_ABOUT) try: async with asyncio.timeout(CONNECTION_TIMEOUT): @@ -92,19 +95,20 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): """Handle manual initiation of the config flow.""" return await self.async_step_init(user_input) - async def async_step_import(self, import_config): + # Deprecated import from configuration.yaml, can be removed in 2025.4.0 + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import an OpenTherm Gateway device as a config entry. This flow is triggered by `async_setup` for configured devices. """ formatted_config = { - CONF_NAME: import_config.get(CONF_NAME, import_config[CONF_ID]), - CONF_DEVICE: import_config[CONF_DEVICE], - CONF_ID: import_config[CONF_ID], + CONF_NAME: import_data.get(CONF_NAME, import_data[CONF_ID]), + CONF_DEVICE: import_data[CONF_DEVICE], + CONF_ID: import_data[CONF_ID], } return await self.async_step_init(info=formatted_config) - def _show_form(self, errors=None): + def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: """Show the config flow form with possible errors.""" return self.async_show_form( step_id="init", @@ -128,11 +132,9 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): class OpenThermGwOptionsFlow(OptionsFlow): """Handle opentherm_gw options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the opentherm_gw options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/opentherm_gw/const.py b/homeassistant/components/opentherm_gw/const.py index c1932c7b2bd..c842ff568ae 100644 --- a/homeassistant/components/opentherm_gw/const.py +++ b/homeassistant/components/opentherm_gw/const.py @@ -1,5 +1,10 @@ """Constants for the opentherm_gw integration.""" +from dataclasses import dataclass +from enum import StrEnum + +from pyotgw import vars as gw_vars + ATTR_GW_ID = "gateway_id" ATTR_LEVEL = "level" ATTR_DHW_OVRD = "dhw_override" @@ -33,3 +38,41 @@ SERVICE_SET_MAX_MOD = "set_max_modulation" SERVICE_SET_OAT = "set_outside_temperature" SERVICE_SET_SB_TEMP = "set_setback_temperature" SERVICE_SEND_TRANSP_CMD = "send_transparent_command" + + +class OpenThermDataSource(StrEnum): + """List valid OpenTherm data sources.""" + + BOILER = gw_vars.BOILER + GATEWAY = gw_vars.OTGW + THERMOSTAT = gw_vars.THERMOSTAT + + +class OpenThermDeviceIdentifier(StrEnum): + """List valid OpenTherm device identifiers.""" + + BOILER = "boiler" + GATEWAY = "gateway" + THERMOSTAT = "thermostat" + + +@dataclass(frozen=True, kw_only=True) +class OpenThermDeviceDescription: + """Describe OpenTherm device properties.""" + + data_source: OpenThermDataSource + device_identifier: OpenThermDeviceIdentifier + + +BOILER_DEVICE_DESCRIPTION = OpenThermDeviceDescription( + data_source=OpenThermDataSource.BOILER, + device_identifier=OpenThermDeviceIdentifier.BOILER, +) +GATEWAY_DEVICE_DESCRIPTION = OpenThermDeviceDescription( + data_source=OpenThermDataSource.GATEWAY, + device_identifier=OpenThermDeviceIdentifier.GATEWAY, +) +THERMOSTAT_DEVICE_DESCRIPTION = OpenThermDeviceDescription( + data_source=OpenThermDataSource.THERMOSTAT, + device_identifier=OpenThermDeviceIdentifier.THERMOSTAT, +) diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py index a1035b946c2..e87a6c182aa 100644 --- a/homeassistant/components/opentherm_gw/entity.py +++ b/homeassistant/components/opentherm_gw/entity.py @@ -10,7 +10,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity, EntityDescription from . import OpenThermGatewayHub -from .const import DOMAIN +from .const import DOMAIN, OpenThermDataSource, OpenThermDeviceDescription _LOGGER = logging.getLogger(__name__) @@ -24,45 +24,45 @@ TRANSLATE_SOURCE = { class OpenThermEntityDescription(EntityDescription): """Describe common opentherm_gw entity properties.""" - friendly_name_format: str + device_description: OpenThermDeviceDescription class OpenThermEntity(Entity): - """Represent an OpenTherm Gateway entity.""" + """Represent an OpenTherm entity.""" + _attr_has_entity_name = True _attr_should_poll = False - _attr_entity_registry_enabled_default = False - _attr_available = False entity_description: OpenThermEntityDescription def __init__( self, gw_hub: OpenThermGatewayHub, - source: str, description: OpenThermEntityDescription, ) -> None: """Initialize the entity.""" self.entity_description = description self._gateway = gw_hub - self._source = source - friendly_name_format = ( - f"{description.friendly_name_format} ({TRANSLATE_SOURCE[source]})" - if TRANSLATE_SOURCE[source] is not None - else description.friendly_name_format - ) - self._attr_name = friendly_name_format.format(gw_hub.name) - self._attr_unique_id = f"{gw_hub.hub_id}-{source}-{description.key}" + self._attr_unique_id = f"{gw_hub.hub_id}-{description.device_description.device_identifier}-{description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_hub.hub_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_hub.name, - sw_version=gw_hub.gw_version, + identifiers={ + ( + DOMAIN, + f"{gw_hub.hub_id}-{description.device_description.device_identifier}", + ) + }, ) + @property + def available(self) -> bool: + """Return connection status of the hub to indicate availability.""" + return self._gateway.connected + + +class OpenThermStatusEntity(OpenThermEntity): + """Represent an OpenTherm entity that receives status updates.""" + async def async_added_to_hass(self) -> None: """Subscribe to updates from the component.""" - _LOGGER.debug("Added OpenTherm Gateway entity %s", self._attr_name) self.async_on_remove( async_dispatcher_connect( self.hass, self._gateway.update_signal, self.receive_report @@ -70,7 +70,7 @@ class OpenThermEntity(Entity): ) @callback - def receive_report(self, status: dict[str, dict]) -> None: + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" # Must be implemented at the platform level. raise NotImplementedError diff --git a/homeassistant/components/opentherm_gw/icons.json b/homeassistant/components/opentherm_gw/icons.json index 13dbe0a70a1..37942aa0e63 100644 --- a/homeassistant/components/opentherm_gw/icons.json +++ b/homeassistant/components/opentherm_gw/icons.json @@ -1,16 +1,40 @@ { "services": { - "reset_gateway": "mdi:reload", - "set_central_heating_ovrd": "mdi:heat-wave", - "set_clock": "mdi:clock", - "set_control_setpoint": "mdi:thermometer-lines", - "set_hot_water_ovrd": "mdi:thermometer-lines", - "set_hot_water_setpoint": "mdi:thermometer-lines", - "set_gpio_mode": "mdi:cable-data", - "set_led_mode": "mdi:led-on", - "set_max_modulation": "mdi:thermometer-lines", - "set_outside_temperature": "mdi:thermometer-lines", - "set_setback_temperature": "mdi:thermometer-lines", - "send_transparent_command": "mdi:console" + "reset_gateway": { + "service": "mdi:reload" + }, + "set_central_heating_ovrd": { + "service": "mdi:heat-wave" + }, + "set_clock": { + "service": "mdi:clock" + }, + "set_control_setpoint": { + "service": "mdi:thermometer-lines" + }, + "set_hot_water_ovrd": { + "service": "mdi:thermometer-lines" + }, + "set_hot_water_setpoint": { + "service": "mdi:thermometer-lines" + }, + "set_gpio_mode": { + "service": "mdi:cable-data" + }, + "set_led_mode": { + "service": "mdi:led-on" + }, + "set_max_modulation": { + "service": "mdi:thermometer-lines" + }, + "set_outside_temperature": { + "service": "mdi:thermometer-lines" + }, + "set_setback_temperature": { + "service": "mdi:thermometer-lines" + }, + "send_transparent_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/opentherm_gw/manifest.json b/homeassistant/components/opentherm_gw/manifest.json index b6ebef6e83c..ecd0a6b99d5 100644 --- a/homeassistant/components/opentherm_gw/manifest.json +++ b/homeassistant/components/opentherm_gw/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/opentherm_gw", "iot_class": "local_push", "loggers": ["pyotgw"], - "requirements": ["pyotgw==2.2.0"] + "requirements": ["pyotgw==2.2.2"] } diff --git a/homeassistant/components/opentherm_gw/select.py b/homeassistant/components/opentherm_gw/select.py new file mode 100644 index 00000000000..cee1632dc48 --- /dev/null +++ b/homeassistant/components/opentherm_gw/select.py @@ -0,0 +1,270 @@ +"""Support for OpenTherm Gateway select entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from enum import IntEnum, StrEnum +from functools import partial + +from pyotgw.vars import ( + OTGW_GPIO_A, + OTGW_GPIO_B, + OTGW_LED_A, + OTGW_LED_B, + OTGW_LED_C, + OTGW_LED_D, + OTGW_LED_E, + OTGW_LED_F, +) + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OpenThermGatewayHub +from .const import ( + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + OpenThermDataSource, +) +from .entity import OpenThermEntityDescription, OpenThermStatusEntity + + +class OpenThermSelectGPIOMode(StrEnum): + """OpenTherm Gateway GPIO modes.""" + + INPUT = "input" + GROUND = "ground" + VCC = "vcc" + LED_E = "led_e" + LED_F = "led_f" + HOME = "home" + AWAY = "away" + DS1820 = "ds1820" + DHW_BLOCK = "dhw_block" + + +class OpenThermSelectLEDMode(StrEnum): + """OpenThermGateway LED modes.""" + + RX_ANY = "receive_any" + TX_ANY = "transmit_any" + THERMOSTAT_TRAFFIC = "thermostat_traffic" + BOILER_TRAFFIC = "boiler_traffic" + SETPOINT_OVERRIDE_ACTIVE = "setpoint_override_active" + FLAME_ON = "flame_on" + CENTRAL_HEATING_ON = "central_heating_on" + HOT_WATER_ON = "hot_water_on" + COMFORT_MODE_ON = "comfort_mode_on" + TX_ERROR_DETECTED = "transmit_error_detected" + BOILER_MAINTENANCE_REQUIRED = "boiler_maintenance_required" + RAISED_POWER_MODE_ACTIVE = "raised_power_mode_active" + + +class PyotgwGPIOMode(IntEnum): + """pyotgw GPIO modes.""" + + INPUT = 0 + GROUND = 1 + VCC = 2 + LED_E = 3 + LED_F = 4 + HOME = 5 + AWAY = 6 + DS1820 = 7 + DHW_BLOCK = 8 + + +class PyotgwLEDMode(StrEnum): + """pyotgw LED modes.""" + + RX_ANY = "R" + TX_ANY = "X" + THERMOSTAT_TRAFFIC = "T" + BOILER_TRAFFIC = "B" + SETPOINT_OVERRIDE_ACTIVE = "O" + FLAME_ON = "F" + CENTRAL_HEATING_ON = "H" + HOT_WATER_ON = "W" + COMFORT_MODE_ON = "C" + TX_ERROR_DETECTED = "E" + BOILER_MAINTENANCE_REQUIRED = "M" + RAISED_POWER_MODE_ACTIVE = "P" + + +def pyotgw_led_mode_to_ha_led_mode( + pyotgw_led_mode: PyotgwLEDMode, +) -> OpenThermSelectLEDMode | None: + """Convert pyotgw LED mode to Home Assistant LED mode.""" + return ( + OpenThermSelectLEDMode[PyotgwLEDMode(pyotgw_led_mode).name] + if pyotgw_led_mode in PyotgwLEDMode + else None + ) + + +async def set_gpio_mode( + gpio_id: str, gw_hub: OpenThermGatewayHub, mode: str +) -> OpenThermSelectGPIOMode | None: + """Set gpio mode, return selected option or None.""" + value = await gw_hub.gateway.set_gpio_mode( + gpio_id, PyotgwGPIOMode[OpenThermSelectGPIOMode(mode).name] + ) + return ( + OpenThermSelectGPIOMode[PyotgwGPIOMode(value).name] + if value in PyotgwGPIOMode + else None + ) + + +async def set_led_mode( + led_id: str, gw_hub: OpenThermGatewayHub, mode: str +) -> OpenThermSelectLEDMode | None: + """Set gpio mode, return selected option or None.""" + value = await gw_hub.gateway.set_led_mode( + led_id, PyotgwLEDMode[OpenThermSelectLEDMode(mode).name] + ) + return ( + OpenThermSelectLEDMode[PyotgwLEDMode(value).name] + if value in PyotgwLEDMode + else None + ) + + +@dataclass(frozen=True, kw_only=True) +class OpenThermSelectEntityDescription( + OpenThermEntityDescription, SelectEntityDescription +): + """Describes an opentherm_gw select entity.""" + + select_action: Callable[[OpenThermGatewayHub, str], Awaitable] + convert_pyotgw_state_to_ha_state: Callable + + +SELECT_DESCRIPTIONS: tuple[OpenThermSelectEntityDescription, ...] = ( + OpenThermSelectEntityDescription( + key=OTGW_GPIO_A, + translation_key="gpio_mode_n", + translation_placeholders={"gpio_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=[ + mode + for mode in OpenThermSelectGPIOMode + if mode != OpenThermSelectGPIOMode.DS1820 + ], + select_action=partial(set_gpio_mode, "A"), + convert_pyotgw_state_to_ha_state=( + lambda state: OpenThermSelectGPIOMode[PyotgwGPIOMode(state).name] + if state in PyotgwGPIOMode + else None + ), + ), + OpenThermSelectEntityDescription( + key=OTGW_GPIO_B, + translation_key="gpio_mode_n", + translation_placeholders={"gpio_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectGPIOMode), + select_action=partial(set_gpio_mode, "B"), + convert_pyotgw_state_to_ha_state=( + lambda state: OpenThermSelectGPIOMode[PyotgwGPIOMode(state).name] + if state in PyotgwGPIOMode + else None + ), + ), + OpenThermSelectEntityDescription( + key=OTGW_LED_A, + translation_key="led_mode_n", + translation_placeholders={"led_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectLEDMode), + select_action=partial(set_led_mode, "A"), + convert_pyotgw_state_to_ha_state=pyotgw_led_mode_to_ha_led_mode, + ), + OpenThermSelectEntityDescription( + key=OTGW_LED_B, + translation_key="led_mode_n", + translation_placeholders={"led_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectLEDMode), + select_action=partial(set_led_mode, "B"), + convert_pyotgw_state_to_ha_state=pyotgw_led_mode_to_ha_led_mode, + ), + OpenThermSelectEntityDescription( + key=OTGW_LED_C, + translation_key="led_mode_n", + translation_placeholders={"led_id": "C"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectLEDMode), + select_action=partial(set_led_mode, "C"), + convert_pyotgw_state_to_ha_state=pyotgw_led_mode_to_ha_led_mode, + ), + OpenThermSelectEntityDescription( + key=OTGW_LED_D, + translation_key="led_mode_n", + translation_placeholders={"led_id": "D"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectLEDMode), + select_action=partial(set_led_mode, "D"), + convert_pyotgw_state_to_ha_state=pyotgw_led_mode_to_ha_led_mode, + ), + OpenThermSelectEntityDescription( + key=OTGW_LED_E, + translation_key="led_mode_n", + translation_placeholders={"led_id": "E"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectLEDMode), + select_action=partial(set_led_mode, "E"), + convert_pyotgw_state_to_ha_state=pyotgw_led_mode_to_ha_led_mode, + ), + OpenThermSelectEntityDescription( + key=OTGW_LED_F, + translation_key="led_mode_n", + translation_placeholders={"led_id": "F"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + options=list(OpenThermSelectLEDMode), + select_action=partial(set_led_mode, "F"), + convert_pyotgw_state_to_ha_state=pyotgw_led_mode_to_ha_led_mode, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the OpenTherm Gateway select entities.""" + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + + async_add_entities( + OpenThermSelect(gw_hub, description) for description in SELECT_DESCRIPTIONS + ) + + +class OpenThermSelect(OpenThermStatusEntity, SelectEntity): + """Represent an OpenTherm Gateway select.""" + + _attr_current_option = None + _attr_entity_category = EntityCategory.CONFIG + entity_description: OpenThermSelectEntityDescription + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + new_option = await self.entity_description.select_action(self._gateway, option) + if new_option is not None: + self._attr_current_option = new_option + self.async_write_ha_state() + + @callback + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: + """Handle status updates from the component.""" + state = status[self.entity_description.device_description.data_source].get( + self.entity_description.key + ) + self._attr_current_option = ( + self.entity_description.convert_pyotgw_state_to_ha_state(state) + ) + self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index fb30b2ce35c..5ccb4166665 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -5,7 +5,6 @@ from dataclasses import dataclass import pyotgw.vars as gw_vars from homeassistant.components.sensor import ( - ENTITY_ID_FORMAT, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -15,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_ID, PERCENTAGE, + EntityCategory, UnitOfPower, UnitOfPressure, UnitOfTemperature, @@ -22,12 +22,17 @@ from homeassistant.const import ( UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import OpenThermGatewayHub -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW -from .entity import OpenThermEntity, OpenThermEntityDescription +from .const import ( + BOILER_DEVICE_DESCRIPTION, + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + THERMOSTAT_DEVICE_DESCRIPTION, + OpenThermDataSource, +) +from .entity import OpenThermEntityDescription, OpenThermStatusEntity SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 @@ -36,584 +41,833 @@ SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 class OpenThermSensorEntityDescription( SensorEntityDescription, OpenThermEntityDescription ): - """Describes opentherm_gw sensor entity.""" + """Describes an opentherm_gw sensor entity.""" -SENSOR_INFO: tuple[tuple[list[str], OpenThermSensorEntityDescription], ...] = ( - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CONTROL_SETPOINT, - friendly_name_format="Control Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), +SENSOR_DESCRIPTIONS: tuple[OpenThermSensorEntityDescription, ...] = ( + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_MEMBERID, - friendly_name_format="Thermostat Member ID {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT_2, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MEMBERID, - friendly_name_format="Boiler Member ID {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MEMBERID, + translation_key="manufacturer_id", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_OEM_FAULT, - friendly_name_format="Boiler OEM Fault Code {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OEM_FAULT, + translation_key="oem_fault_code", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_COOLING_CONTROL, - friendly_name_format="Cooling Control Signal {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_COOLING_CONTROL, + translation_key="cooling_control", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CONTROL_SETPOINT_2, - friendly_name_format="Control Setpoint 2 {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, + translation_key="max_relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_SETPOINT_OVRD, - friendly_name_format="Room Setpoint Override {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_CAPACITY, + translation_key="max_capacity", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, - friendly_name_format="Boiler Maximum Relative Modulation {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, + translation_key="min_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MAX_CAPACITY, - friendly_name_format="Boiler Maximum Capacity {}", - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.POWER, - native_unit_of_measurement=UnitOfPower.KILO_WATT, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_REL_MOD_LEVEL, + translation_key="relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, - friendly_name_format="Boiler Minimum Modulation Level {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_PRESS, + translation_key="central_heating_pressure", + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_SETPOINT, - friendly_name_format="Room Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_FLOW_RATE, + translation_key="hot_water_flow_rate", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_REL_MOD_LEVEL, - friendly_name_format="Relative Modulation Level {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_WATER_PRESS, - friendly_name_format="Central Heating Water Pressure {}", - device_class=SensorDeviceClass.PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfPressure.BAR, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP_2, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_FLOW_RATE, - friendly_name_format="Hot Water Flow Rate {}", - device_class=SensorDeviceClass.VOLUME_FLOW_RATE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_SETPOINT_2, - friendly_name_format="Room Setpoint 2 {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP_2, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_TEMP, - friendly_name_format="Room Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_RETURN_WATER_TEMP, + translation_key="return_water_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_WATER_TEMP, - friendly_name_format="Central Heating Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_STORAGE_TEMP, + translation_key="solar_storage_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_TEMP, - friendly_name_format="Hot Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_COLL_TEMP, + translation_key="solar_collector_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_OUTSIDE_TEMP, - friendly_name_format="Outside Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_EXHAUST_TEMP, + translation_key="exhaust_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_RETURN_WATER_TEMP, - friendly_name_format="Return Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, + translation_key="max_hot_water_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SOLAR_STORAGE_TEMP, - friendly_name_format="Solar Storage Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, + translation_key="max_hot_water_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SOLAR_COLL_TEMP, - friendly_name_format="Solar Collector Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MAX_SETP, + translation_key="max_central_heating_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_WATER_TEMP_2, - friendly_name_format="Central Heating 2 Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MIN_SETP, + translation_key="max_central_heating_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_TEMP_2, - friendly_name_format="Hot Water 2 Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_SETPOINT, + translation_key="hot_water_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_EXHAUST_TEMP, - friendly_name_format="Exhaust Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MAX_CH_SETPOINT, + translation_key="max_central_heating_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, - friendly_name_format="Hot Water Maximum Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OEM_DIAG, + translation_key="oem_diagnostic_code", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, - friendly_name_format="Hot Water Minimum Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_STARTS, + translation_key="total_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH_MAX_SETP, - friendly_name_format="Boiler Maximum Central Heating Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_STARTS, + translation_key="central_heating_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH_MIN_SETP, - friendly_name_format="Boiler Minimum Central Heating Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_STARTS, + translation_key="hot_water_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_SETPOINT, - friendly_name_format="Hot Water Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_STARTS, + translation_key="hot_water_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MAX_CH_SETPOINT, - friendly_name_format="Maximum Central Heating Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_HOURS, + translation_key="total_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_OEM_DIAG, - friendly_name_format="OEM Diagnostic Code {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_HOURS, + translation_key="central_heating_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_TOTAL_BURNER_STARTS, - friendly_name_format="Total Burner Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_HOURS, + translation_key="hot_water_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_PUMP_STARTS, - friendly_name_format="Central Heating Pump Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_HOURS, + translation_key="hot_water_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_PUMP_STARTS, - friendly_name_format="Hot Water Pump Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_BURNER_STARTS, - friendly_name_format="Hot Water Burner Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, + translation_key="product_type", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_TOTAL_BURNER_HOURS, - friendly_name_format="Total Burner Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, + translation_key="product_version", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_PUMP_HOURS, - friendly_name_format="Central Heating Pump Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_MODE, + translation_key="operating_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_PUMP_HOURS, - friendly_name_format="Hot Water Pump Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_DHW_OVRD, + translation_key="hot_water_override_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_BURNER_HOURS, - friendly_name_format="Hot Water Burner Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_ABOUT, + translation_key="firmware_version", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_OT_VERSION, - friendly_name_format="Thermostat OpenTherm Version {}", - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_BUILD, + translation_key="firmware_build", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_OT_VERSION, - friendly_name_format="Boiler OpenTherm Version {}", - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_CLOCKMHZ, + translation_key="clock_speed", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_PRODUCT_TYPE, - friendly_name_format="Thermostat Product Type {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_A, + translation_key="led_mode_n", + translation_placeholders={"led_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_PRODUCT_VERSION, - friendly_name_format="Thermostat Product Version {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_B, + translation_key="led_mode_n", + translation_placeholders={"led_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, - friendly_name_format="Boiler Product Type {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_C, + translation_key="led_mode_n", + translation_placeholders={"led_id": "C"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, - friendly_name_format="Boiler Product Version {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_D, + translation_key="led_mode_n", + translation_placeholders={"led_id": "D"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_MODE, - friendly_name_format="Gateway/Monitor Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_E, + translation_key="led_mode_n", + translation_placeholders={"led_id": "E"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_DHW_OVRD, - friendly_name_format="Gateway Hot Water Override Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_F, + translation_key="led_mode_n", + translation_placeholders={"led_id": "F"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_ABOUT, - friendly_name_format="Gateway Firmware Version {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_A, + translation_key="gpio_mode_n", + translation_placeholders={"gpio_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_BUILD, - friendly_name_format="Gateway Firmware Build {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_B, + translation_key="gpio_mode_n", + translation_placeholders={"gpio_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_CLOCKMHZ, - friendly_name_format="Gateway Clock Speed {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SB_TEMP, + translation_key="setback_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_A, - friendly_name_format="Gateway LED A Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SETP_OVRD_MODE, + translation_key="room_setpoint_override_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_B, - friendly_name_format="Gateway LED B Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SMART_PWR, + translation_key="smart_power_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_C, - friendly_name_format="Gateway LED C Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_THRM_DETECT, + translation_key="thermostat_detection_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_D, - friendly_name_format="Gateway LED D Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_VREF, + translation_key="reference_voltage", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_E, - friendly_name_format="Gateway LED E Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_MEMBERID, + translation_key="manufacturer_id", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_F, - friendly_name_format="Gateway LED F Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_OVRD, + translation_key="room_setpoint_override", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_GPIO_A, - friendly_name_format="Gateway GPIO A Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_GPIO_B, - friendly_name_format="Gateway GPIO B Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_2, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_SB_TEMP, - friendly_name_format="Gateway Setback Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_TEMP, + translation_key="room_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_SETP_OVRD_MODE, - friendly_name_format="Gateway Room Setpoint Override Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OUTSIDE_TEMP, + translation_key="outside_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_SMART_PWR, - friendly_name_format="Gateway Smart Power Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_THRM_DETECT, - friendly_name_format="Gateway Thermostat Detection {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_TYPE, + translation_key="product_type", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_VREF, - friendly_name_format="Gateway Reference Voltage Setting {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_VERSION, + translation_key="product_version", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT_2, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MEMBERID, + translation_key="manufacturer_id", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OEM_FAULT, + translation_key="oem_fault_code", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_COOLING_CONTROL, + translation_key="cooling_control", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, + translation_key="max_relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_CAPACITY, + translation_key="max_capacity", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, + translation_key="min_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_REL_MOD_LEVEL, + translation_key="relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_PRESS, + translation_key="central_heating_pressure", + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_FLOW_RATE, + translation_key="hot_water_flow_rate", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP_2, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP_2, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_RETURN_WATER_TEMP, + translation_key="return_water_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_STORAGE_TEMP, + translation_key="solar_storage_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_COLL_TEMP, + translation_key="solar_collector_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_EXHAUST_TEMP, + translation_key="exhaust_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, + translation_key="max_hot_water_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, + translation_key="max_hot_water_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MAX_SETP, + translation_key="max_central_heating_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MIN_SETP, + translation_key="max_central_heating_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_SETPOINT, + translation_key="hot_water_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MAX_CH_SETPOINT, + translation_key="max_central_heating_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OEM_DIAG, + translation_key="oem_diagnostic_code", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_STARTS, + translation_key="total_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_STARTS, + translation_key="central_heating_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_STARTS, + translation_key="hot_water_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_STARTS, + translation_key="hot_water_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_HOURS, + translation_key="total_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_HOURS, + translation_key="central_heating_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_HOURS, + translation_key="hot_water_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_HOURS, + translation_key="hot_water_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, + translation_key="product_type", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, + translation_key="product_version", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_MEMBERID, + translation_key="manufacturer_id", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_OVRD, + translation_key="room_setpoint_override", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_2, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_TEMP, + translation_key="room_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OUTSIDE_TEMP, + translation_key="outside_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_TYPE, + translation_key="product_type", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_VERSION, + translation_key="product_version", + device_description=BOILER_DEVICE_DESCRIPTION, ), ) @@ -629,37 +883,22 @@ async def async_setup_entry( async_add_entities( OpenThermSensor( gw_hub, - source, description, ) - for sources, description in SENSOR_INFO - for source in sources + for description in SENSOR_DESCRIPTIONS ) -class OpenThermSensor(OpenThermEntity, SensorEntity): - """Representation of an OpenTherm Gateway sensor.""" +class OpenThermSensor(OpenThermStatusEntity, SensorEntity): + """Representation of an OpenTherm sensor.""" + _attr_entity_category = EntityCategory.DIAGNOSTIC entity_description: OpenThermSensorEntityDescription - def __init__( - self, - gw_hub: OpenThermGatewayHub, - source: str, - description: OpenThermSensorEntityDescription, - ) -> None: - """Initialize the OpenTherm Gateway sensor.""" - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, - f"{description.key}_{source}_{gw_hub.hub_id}", - hass=gw_hub.hass, - ) - super().__init__(gw_hub, source, description) - @callback - def receive_report(self, status: dict[str, dict]) -> None: + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" - self._attr_available = self._gateway.connected - value = status[self._source].get(self.entity_description.key) - self._attr_native_value = value + self._attr_native_value = status[ + self.entity_description.device_description.data_source + ].get(self.entity_description.key) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index 9eb97539df9..834168eb113 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -1,4 +1,8 @@ { + "common": { + "state_not_supported": "Not supported", + "state_supported": "Supported" + }, "config": { "step": { "init": { @@ -16,6 +20,341 @@ "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]" } }, + "device": { + "boiler_device": { + "name": "OpenTherm Boiler" + }, + "gateway_device": { + "name": "OpenTherm Gateway" + }, + "thermostat_device": { + "name": "OpenTherm Thermostat" + } + }, + "entity": { + "binary_sensor": { + "fault_indication": { + "name": "Fault indication" + }, + "central_heating_n": { + "name": "Central heating {circuit_number}" + }, + "cooling": { + "name": "Cooling" + }, + "flame": { + "name": "Flame" + }, + "hot_water": { + "name": "Hot water" + }, + "diagnostic_indication": { + "name": "Diagnostic indication" + }, + "supports_hot_water": { + "name": "Hot water support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "control_type": { + "name": "Control type" + }, + "supports_cooling": { + "name": "Cooling support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "hot_water_config": { + "name": "Hot water system type", + "state": { + "off": "Instantaneous or unspecified", + "on": "Storage tank" + } + }, + "supports_pump_control": { + "name": "Pump control support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_ch_2": { + "name": "Central heating 2 support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "service_required": { + "name": "Service required" + }, + "supports_remote_reset": { + "name": "Remote reset support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "low_water_pressure": { + "name": "Low water pressure" + }, + "gas_fault": { + "name": "Gas fault" + }, + "air_pressure_fault": { + "name": "Air pressure fault" + }, + "water_overtemperature": { + "name": "Water overtemperature" + }, + "supports_central_heating_setpoint_transfer": { + "name": "Central heating setpoint transfer support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_central_heating_setpoint_writing": { + "name": "Central heating setpoint write support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_hot_water_setpoint_transfer": { + "name": "Hot water setpoint transfer support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_hot_water_setpoint_writing": { + "name": "Hot water setpoint write support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "gpio_state_n": { + "name": "GPIO {gpio_id} state" + }, + "ignore_transitions": { + "name": "Ignore transitions" + }, + "override_high_byte": { + "name": "Override high byte" + }, + "outside_temp_correction": { + "name": "Outside temperature correction" + }, + "override_manual_change_prio": { + "name": "Manual change has priority over override" + }, + "override_program_change_prio": { + "name": "Programmed change has priority over override" + } + }, + "select": { + "gpio_mode_n": { + "name": "GPIO {gpio_id} mode", + "state": { + "input": "Input", + "ground": "Ground", + "vcc": "Vcc (5V)", + "led_e": "LED E", + "led_f": "LED F", + "home": "Home", + "away": "Away", + "ds1820": "DS1820", + "dhw_block": "Block hot water" + } + }, + "led_mode_n": { + "name": "LED {led_id} mode", + "state": { + "receive_any": "Receiving on any interface", + "transmit_any": "Transmitting on any interface", + "thermostat_traffic": "Traffic on the thermostat interface", + "boiler_traffic": "Traffic on the boiler interface", + "setpoint_override_active": "Setpoint override is active", + "flame_on": "Boiler flame is on", + "central_heating_on": "Central heating is on", + "hot_water_on": "Hot water is on", + "comfort_mode_on": "Comfort mode is on", + "transmit_error_detected": "Transmit error detected", + "boiler_maintenance_required": "Boiler maintenance required", + "raised_power_mode_active": "Raised power mode active" + } + } + }, + "sensor": { + "control_setpoint_n": { + "name": "Control setpoint {circuit_number}" + }, + "manufacturer_id": { + "name": "Manufacturer ID" + }, + "oem_fault_code": { + "name": "Manufacturer-specific fault code" + }, + "cooling_control": { + "name": "Cooling control signal" + }, + "max_relative_mod_level": { + "name": "Maximum relative modulation level" + }, + "max_capacity": { + "name": "Maximum capacity" + }, + "min_mod_level": { + "name": "Minimum modulation level" + }, + "relative_mod_level": { + "name": "Relative modulation level" + }, + "central_heating_pressure": { + "name": "Central heating water pressure" + }, + "hot_water_flow_rate": { + "name": "Hot water flow rate" + }, + "central_heating_temperature_n": { + "name": "Central heating {circuit_number} water temperature" + }, + "hot_water_temperature_n": { + "name": "Hot water {circuit_number} temperature" + }, + "return_water_temperature": { + "name": "Return water temperature" + }, + "solar_storage_temperature": { + "name": "Solar storage temperature" + }, + "solar_collector_temperature": { + "name": "Solar collector temperature" + }, + "exhaust_temperature": { + "name": "Exhaust temperature" + }, + "max_hot_water_setpoint_upper": { + "name": "Maximum hot water setpoint upper bound" + }, + "max_hot_water_setpoint_lower": { + "name": "Maximum hot water setpoint lower bound" + }, + "max_central_heating_setpoint_upper": { + "name": "Maximum central heating setpoint upper bound" + }, + "max_central_heating_setpoint_lower": { + "name": "Maximum central heating setpoint lower bound" + }, + "hot_water_setpoint": { + "name": "Hot water setpoint" + }, + "max_central_heating_setpoint": { + "name": "Maximum central heating setpoint" + }, + "oem_diagnostic_code": { + "name": "Manufacturer-specific diagnostic code" + }, + "total_burner_starts": { + "name": "Burner start count" + }, + "central_heating_pump_starts": { + "name": "Central heating pump start count" + }, + "hot_water_pump_starts": { + "name": "Hot water pump start count" + }, + "hot_water_burner_starts": { + "name": "Hot water burner start count" + }, + "total_burner_hours": { + "name": "Burner running time" + }, + "central_heating_pump_hours": { + "name": "Central heating pump running time" + }, + "hot_water_pump_hours": { + "name": "Hot water pump running time" + }, + "hot_water_burner_hours": { + "name": "Hot water burner running time" + }, + "opentherm_version": { + "name": "OpenTherm protocol version" + }, + "product_type": { + "name": "Product type" + }, + "product_version": { + "name": "Product version" + }, + "operating_mode": { + "name": "Operating mode" + }, + "hot_water_override_mode": { + "name": "Hot water override mode" + }, + "firmware_version": { + "name": "Firmware version" + }, + "firmware_build": { + "name": "Firmware build" + }, + "clock_speed": { + "name": "Clock speed" + }, + "led_mode_n": { + "name": "LED {led_id} mode" + }, + "gpio_mode_n": { + "name": "GPIO {gpio_id} mode" + }, + "setback_temperature": { + "name": "Setback temperature" + }, + "room_setpoint_override_mode": { + "name": "Room setpoint override mode" + }, + "smart_power_mode": { + "name": "Smart power mode" + }, + "thermostat_detection_mode": { + "name": "Thermostat detection mode" + }, + "reference_voltage": { + "name": "Reference voltage setting" + }, + "room_setpoint_override": { + "name": "Room setpoint override" + }, + "room_setpoint_n": { + "name": "Room setpoint {setpoint_id}" + }, + "room_temperature": { + "name": "Room temperature" + }, + "outside_temperature": { + "name": "Outside temperature" + } + }, + "switch": { + "central_heating_override_n": { + "name": "Force central heating {circuit_number} on" + } + } + }, + "issues": { + "deprecated_import_from_configuration_yaml": { + "title": "Deprecated configuration", + "description": "Configuration of the OpenTherm Gateway integration through configuration.yaml is deprecated. Your configuration has been migrated to config entries. Please remove any OpenTherm Gateway configuration from your configuration.yaml." + } + }, "options": { "step": { "init": { diff --git a/homeassistant/components/opentherm_gw/switch.py b/homeassistant/components/opentherm_gw/switch.py new file mode 100644 index 00000000000..41ffa03a932 --- /dev/null +++ b/homeassistant/components/opentherm_gw/switch.py @@ -0,0 +1,79 @@ +"""Support for OpenTherm Gateway switches.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OpenThermGatewayHub +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, GATEWAY_DEVICE_DESCRIPTION +from .entity import OpenThermEntity, OpenThermEntityDescription + + +@dataclass(frozen=True, kw_only=True) +class OpenThermSwitchEntityDescription( + OpenThermEntityDescription, SwitchEntityDescription +): + """Describes an opentherm_gw switch entity.""" + + turn_off_action: Callable[[OpenThermGatewayHub], Awaitable[int | None]] + turn_on_action: Callable[[OpenThermGatewayHub], Awaitable[int | None]] + + +SWITCH_DESCRIPTIONS: tuple[OpenThermSwitchEntityDescription, ...] = ( + OpenThermSwitchEntityDescription( + key="central_heating_1_override", + translation_key="central_heating_override_n", + translation_placeholders={"circuit_number": "1"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + turn_off_action=lambda hub: hub.gateway.set_ch_enable_bit(0), + turn_on_action=lambda hub: hub.gateway.set_ch_enable_bit(1), + ), + OpenThermSwitchEntityDescription( + key="central_heating_2_override", + translation_key="central_heating_override_n", + translation_placeholders={"circuit_number": "2"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + turn_off_action=lambda hub: hub.gateway.set_ch2_enable_bit(0), + turn_on_action=lambda hub: hub.gateway.set_ch2_enable_bit(1), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the OpenTherm Gateway switches.""" + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + + async_add_entities( + OpenThermSwitch(gw_hub, description) for description in SWITCH_DESCRIPTIONS + ) + + +class OpenThermSwitch(OpenThermEntity, SwitchEntity): + """Represent an OpenTherm Gateway switch.""" + + _attr_assumed_state = True + _attr_entity_category = EntityCategory.CONFIG + _attr_entity_registry_enabled_default = False + entity_description: OpenThermSwitchEntityDescription + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + value = await self.entity_description.turn_off_action(self._gateway) + self._attr_is_on = bool(value) if value is not None else None + self.async_write_ha_state() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + value = await self.entity_description.turn_on_action(self._gateway) + self._attr_is_on = bool(value) if value is not None else None + self.async_write_ha_state() diff --git a/homeassistant/components/openuv/__init__.py b/homeassistant/components/openuv/__init__.py index b7c13ad49f1..19e63747e4b 100644 --- a/homeassistant/components/openuv/__init__.py +++ b/homeassistant/components/openuv/__init__.py @@ -19,9 +19,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( CONF_FROM_WINDOW, @@ -110,26 +107,3 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: LOGGER.debug("Migration to version %s successful", version) return True - - -class OpenUvEntity(CoordinatorEntity): - """Define a generic OpenUV entity.""" - - _attr_has_entity_name = True - - def __init__( - self, coordinator: OpenUvCoordinator, description: EntityDescription - ) -> None: - """Initialize.""" - super().__init__(coordinator) - - self._attr_extra_state_attributes = {} - self._attr_unique_id = ( - f"{coordinator.latitude}_{coordinator.longitude}_{description.key}" - ) - self.entity_description = description - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{coordinator.latitude}_{coordinator.longitude}")}, - name="OpenUV", - entry_type=DeviceEntryType.SERVICE, - ) diff --git a/homeassistant/components/openuv/binary_sensor.py b/homeassistant/components/openuv/binary_sensor.py index da4dfc3f742..018d91710df 100644 --- a/homeassistant/components/openuv/binary_sensor.py +++ b/homeassistant/components/openuv/binary_sensor.py @@ -9,9 +9,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import as_local, parse_datetime, utcnow -from . import OpenUvEntity from .const import DATA_PROTECTION_WINDOW, DOMAIN, LOGGER, TYPE_PROTECTION_WINDOW from .coordinator import OpenUvCoordinator +from .entity import OpenUvEntity ATTR_PROTECTION_WINDOW_ENDING_TIME = "end_time" ATTR_PROTECTION_WINDOW_ENDING_UV = "end_uv" @@ -51,7 +51,7 @@ class OpenUvBinarySensor(OpenUvEntity, BinarySensorEntity): for key in ("from_time", "to_time", "from_uv", "to_uv"): if not data.get(key): - LOGGER.info("Skipping update due to missing data: %s", key) + LOGGER.warning("Skipping update due to missing data: %s", key) return if self.entity_description.key == TYPE_PROTECTION_WINDOW: diff --git a/homeassistant/components/openuv/entity.py b/homeassistant/components/openuv/entity.py new file mode 100644 index 00000000000..f3015815bf1 --- /dev/null +++ b/homeassistant/components/openuv/entity.py @@ -0,0 +1,33 @@ +"""Support for UV data from openuv.io.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import OpenUvCoordinator + + +class OpenUvEntity(CoordinatorEntity): + """Define a generic OpenUV entity.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: OpenUvCoordinator, description: EntityDescription + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._attr_extra_state_attributes = {} + self._attr_unique_id = ( + f"{coordinator.latitude}_{coordinator.longitude}_{description.key}" + ) + self.entity_description = description + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{coordinator.latitude}_{coordinator.longitude}")}, + name="OpenUV", + entry_type=DeviceEntryType.SERVICE, + ) diff --git a/homeassistant/components/openuv/sensor.py b/homeassistant/components/openuv/sensor.py index a79bc410715..742017be639 100644 --- a/homeassistant/components/openuv/sensor.py +++ b/homeassistant/components/openuv/sensor.py @@ -18,7 +18,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import as_local, parse_datetime -from . import OpenUvEntity from .const import ( DATA_UV, DOMAIN, @@ -34,6 +33,7 @@ from .const import ( TYPE_SAFE_EXPOSURE_TIME_6, ) from .coordinator import OpenUvCoordinator +from .entity import OpenUvEntity ATTR_MAX_UV_TIME = "time" diff --git a/homeassistant/components/openweathermap/__init__.py b/homeassistant/components/openweathermap/__init__.py index 747b93179bc..33cd23c4f6c 100644 --- a/homeassistant/components/openweathermap/__init__.py +++ b/homeassistant/components/openweathermap/__init__.py @@ -88,7 +88,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: version=CONFIG_FLOW_VERSION, ) - _LOGGER.info("Migration to version %s successful", CONFIG_FLOW_VERSION) + _LOGGER.debug("Migration to version %s successful", CONFIG_FLOW_VERSION) return True diff --git a/homeassistant/components/openweathermap/config_flow.py b/homeassistant/components/openweathermap/config_flow.py index 5fe06ea2dcd..8d33e117287 100644 --- a/homeassistant/components/openweathermap/config_flow.py +++ b/homeassistant/components/openweathermap/config_flow.py @@ -44,7 +44,7 @@ class OpenWeatherMapConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OpenWeatherMapOptionsFlow: """Get the options flow for this handler.""" - return OpenWeatherMapOptionsFlow(config_entry) + return OpenWeatherMapOptionsFlow() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" @@ -97,10 +97,6 @@ class OpenWeatherMapConfigFlow(ConfigFlow, domain=DOMAIN): class OpenWeatherMapOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/openweathermap/const.py b/homeassistant/components/openweathermap/const.py index d34125a2405..81a6544c7ce 100644 --- a/homeassistant/components/openweathermap/const.py +++ b/homeassistant/components/openweathermap/const.py @@ -63,12 +63,12 @@ OWM_MODE_FREE_FORECAST = "forecast" OWM_MODE_V30 = "v3.0" OWM_MODE_V25 = "v2.5" OWM_MODES = [ - OWM_MODE_FREE_CURRENT, - OWM_MODE_FREE_FORECAST, OWM_MODE_V30, OWM_MODE_V25, + OWM_MODE_FREE_CURRENT, + OWM_MODE_FREE_FORECAST, ] -DEFAULT_OWM_MODE = OWM_MODE_FREE_CURRENT +DEFAULT_OWM_MODE = OWM_MODE_V30 LANGUAGES = [ "af", diff --git a/homeassistant/components/openweathermap/coordinator.py b/homeassistant/components/openweathermap/coordinator.py index f7672a1290b..3ef0eda0c8f 100644 --- a/homeassistant/components/openweathermap/coordinator.py +++ b/homeassistant/components/openweathermap/coordinator.py @@ -192,12 +192,13 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): @staticmethod def _get_precipitation_value(precipitation): """Get precipitation value from weather data.""" - if "all" in precipitation: - return round(precipitation["all"], 2) - if "3h" in precipitation: - return round(precipitation["3h"], 2) - if "1h" in precipitation: - return round(precipitation["1h"], 2) + if precipitation is not None: + if "all" in precipitation: + return round(precipitation["all"], 2) + if "3h" in precipitation: + return round(precipitation["3h"], 2) + if "1h" in precipitation: + return round(precipitation["1h"], 2) return 0 def _get_condition(self, weather_code, timestamp=None): diff --git a/homeassistant/components/openweathermap/manifest.json b/homeassistant/components/openweathermap/manifest.json index 199e750ad4f..14313a5a77e 100644 --- a/homeassistant/components/openweathermap/manifest.json +++ b/homeassistant/components/openweathermap/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/openweathermap", "iot_class": "cloud_polling", "loggers": ["pyopenweathermap"], - "requirements": ["pyopenweathermap==0.1.1"] + "requirements": ["pyopenweathermap==0.2.1"] } diff --git a/homeassistant/components/opnsense/manifest.json b/homeassistant/components/opnsense/manifest.json index bf8a41d1785..4dd82216f1a 100644 --- a/homeassistant/components/opnsense/manifest.json +++ b/homeassistant/components/opnsense/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/opnsense", "iot_class": "local_polling", "loggers": ["pbr", "pyopnsense"], + "quality_scale": "legacy", "requirements": ["pyopnsense==0.4.0"] } diff --git a/homeassistant/components/opower/config_flow.py b/homeassistant/components/opower/config_flow.py index 574062aca52..6396ba24a15 100644 --- a/homeassistant/components/opower/config_flow.py +++ b/homeassistant/components/opower/config_flow.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Mapping import logging -import socket from typing import Any from opower import ( @@ -16,7 +15,7 @@ from opower import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -40,7 +39,7 @@ async def _validate_login( ) -> dict[str, str]: """Validate login data and return any errors.""" api = Opower( - async_create_clientsession(hass, family=socket.AF_INET), + async_create_clientsession(hass), login_data[CONF_UTILITY], login_data[CONF_USERNAME], login_data[CONF_PASSWORD], @@ -50,8 +49,12 @@ async def _validate_login( try: await api.async_login() except InvalidAuth: + _LOGGER.exception( + "Invalid auth when connecting to %s", login_data[CONF_UTILITY] + ) errors["base"] = "invalid_auth" except CannotConnect: + _LOGGER.exception("Could not connect to %s", login_data[CONF_UTILITY]) errors["base"] = "cannot_connect" return errors @@ -63,7 +66,6 @@ class OpowerConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new OpowerConfigFlow.""" - self.reauth_entry: ConfigEntry | None = None self.utility_info: dict[str, Any] | None = None async def async_step_user( @@ -132,35 +134,29 @@ class OpowerConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self.reauth_entry errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: - data = {**self.reauth_entry.data, **user_input} + data = {**reauth_entry.data, **user_input} errors = await _validate_login(self.hass, data) if not errors: - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=data - ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) + schema: VolDictType = { - vol.Required(CONF_USERNAME): self.reauth_entry.data[CONF_USERNAME], + vol.Required(CONF_USERNAME): reauth_entry.data[CONF_USERNAME], vol.Required(CONF_PASSWORD): str, } - if select_utility(self.reauth_entry.data[CONF_UTILITY]).accepts_mfa(): + if select_utility(reauth_entry.data[CONF_UTILITY]).accepts_mfa(): schema[vol.Optional(CONF_TOTP_SECRET)] = str return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema(schema), errors=errors, - description_placeholders={CONF_NAME: self.reauth_entry.title}, + description_placeholders={CONF_NAME: reauth_entry.title}, ) diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index d0795ae4e15..629dce0823c 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta import logging -import socket from types import MappingProxyType from typing import Any, cast @@ -54,7 +53,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): update_interval=timedelta(hours=12), ) self.api = Opower( - aiohttp_client.async_get_clientsession(hass, family=socket.AF_INET), + aiohttp_client.async_get_clientsession(hass), entry_data[CONF_UTILITY], entry_data[CONF_USERNAME], entry_data[CONF_PASSWORD], @@ -98,7 +97,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): account.meter_type.name.lower(), # Some utilities like AEP have "-" in their account id. # Replace it with "_" to avoid "Invalid statistic_id" - account.utility_account_id.replace("-", "_"), + account.utility_account_id.replace("-", "_").lower(), ) ) cost_statistic_id = f"{DOMAIN}:{id_prefix}_energy_cost" @@ -110,7 +109,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): ) last_stat = await get_instance(self.hass).async_add_executor_job( - get_last_statistics, self.hass, 1, cost_statistic_id, True, set() + get_last_statistics, self.hass, 1, consumption_statistic_id, True, set() ) if not last_stat: _LOGGER.debug("Updating statistic for the first time") @@ -124,24 +123,39 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): cost_reads = await self._async_get_cost_reads( account, self.api.utility.timezone(), - last_stat[cost_statistic_id][0]["start"], + last_stat[consumption_statistic_id][0]["start"], ) if not cost_reads: _LOGGER.debug("No recent usage/cost data. Skipping update") continue - stats = await get_instance(self.hass).async_add_executor_job( - statistics_during_period, - self.hass, - cost_reads[0].start_time, - None, - {cost_statistic_id, consumption_statistic_id}, - "hour" if account.meter_type == MeterType.ELEC else "day", - None, - {"sum"}, - ) + start = cost_reads[0].start_time + _LOGGER.debug("Getting statistics at: %s", start) + # In the common case there should be a previous statistic at start time + # so we only need to fetch one statistic. If there isn't any, fetch all. + for end in (start + timedelta(seconds=1), None): + stats = await get_instance(self.hass).async_add_executor_job( + statistics_during_period, + self.hass, + start, + end, + {cost_statistic_id, consumption_statistic_id}, + "hour", + None, + {"sum"}, + ) + if stats: + break + if end: + _LOGGER.debug( + "Not found. Trying to find the oldest statistic after %s", + start, + ) + # We are in this code path only if get_last_statistics found a stat + # so statistics_during_period should also have found at least one. + assert stats cost_sum = cast(float, stats[cost_statistic_id][0]["sum"]) consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"]) - last_stats_time = stats[cost_statistic_id][0]["start"] + last_stats_time = stats[consumption_statistic_id][0]["start"] cost_statistics = [] consumption_statistics = [] @@ -187,7 +201,17 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else UnitOfVolume.CENTUM_CUBIC_FEET, ) + _LOGGER.debug( + "Adding %s statistics for %s", + len(cost_statistics), + cost_statistic_id, + ) async_add_external_statistics(self.hass, cost_metadata, cost_statistics) + _LOGGER.debug( + "Adding %s statistics for %s", + len(consumption_statistics), + consumption_statistic_id, + ) async_add_external_statistics( self.hass, consumption_metadata, consumption_statistics ) @@ -227,9 +251,11 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else: start = datetime.fromtimestamp(start_time, tz=tz) - timedelta(days=30) end = dt_util.now(tz) + _LOGGER.debug("Getting monthly cost reads: %s - %s", start, end) cost_reads = await self.api.async_get_cost_reads( account, AggregateType.BILL, start, end ) + _LOGGER.debug("Got %s monthly cost reads", len(cost_reads)) if account.read_resolution == ReadResolution.BILLING: return cost_reads @@ -240,9 +266,11 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): start = cost_reads[0].start_time assert start start = max(start, end - timedelta(days=3 * 365)) + _LOGGER.debug("Getting daily cost reads: %s - %s", start, end) daily_cost_reads = await self.api.async_get_cost_reads( account, AggregateType.DAY, start, end ) + _LOGGER.debug("Got %s daily cost reads", len(daily_cost_reads)) _update_with_finer_cost_reads(cost_reads, daily_cost_reads) if account.read_resolution == ReadResolution.DAY: return cost_reads @@ -252,8 +280,11 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else: assert start start = max(start, end - timedelta(days=2 * 30)) + _LOGGER.debug("Getting hourly cost reads: %s - %s", start, end) hourly_cost_reads = await self.api.async_get_cost_reads( account, AggregateType.HOUR, start, end ) + _LOGGER.debug("Got %s hourly cost reads", len(hourly_cost_reads)) _update_with_finer_cost_reads(cost_reads, hourly_cost_reads) + _LOGGER.debug("Got %s cost reads", len(cost_reads)) return cost_reads diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index 02b98cfaf00..593e4cf34b8 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.7.0"] + "requirements": ["opower==0.8.6"] } diff --git a/homeassistant/components/opple/light.py b/homeassistant/components/opple/light.py index a4aa98bbf69..da2993d1996 100644 --- a/homeassistant/components/opple/light.py +++ b/homeassistant/components/opple/light.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, LightEntity, @@ -20,10 +20,6 @@ from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) _LOGGER = logging.getLogger(__name__) @@ -58,6 +54,8 @@ class OppleLight(LightEntity): _attr_color_mode = ColorMode.COLOR_TEMP _attr_supported_color_modes = {ColorMode.COLOR_TEMP} + _attr_min_color_temp_kelvin = 3000 # 333 Mireds + _attr_max_color_temp_kelvin = 5700 # 175 Mireds def __init__(self, name, host): """Initialize an Opple light.""" @@ -67,7 +65,6 @@ class OppleLight(LightEntity): self._name = name self._is_on = None self._brightness = None - self._color_temp = None @property def available(self) -> bool: @@ -94,21 +91,6 @@ class OppleLight(LightEntity): """Return the brightness of the light.""" return self._brightness - @property - def color_temp(self): - """Return the color temperature of this light.""" - return kelvin_to_mired(self._color_temp) - - @property - def min_mireds(self): - """Return minimum supported color temperature.""" - return 175 - - @property - def max_mireds(self): - """Return maximum supported color temperature.""" - return 333 - def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" _LOGGER.debug("Turn on light %s %s", self._device.ip, kwargs) @@ -118,9 +100,11 @@ class OppleLight(LightEntity): if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]: self._device.brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]: - color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) - self._device.color_temperature = color_temp + if ( + ATTR_COLOR_TEMP_KELVIN in kwargs + and self.color_temp_kelvin != kwargs[ATTR_COLOR_TEMP_KELVIN] + ): + self._device.color_temperature = kwargs[ATTR_COLOR_TEMP_KELVIN] def turn_off(self, **kwargs: Any) -> None: """Instruct the light to turn off.""" @@ -136,7 +120,7 @@ class OppleLight(LightEntity): prev_available == self.available and self._is_on == self._device.power_on and self._brightness == self._device.brightness - and self._color_temp == self._device.color_temperature + and self._attr_color_temp_kelvin == self._device.color_temperature ): return @@ -146,7 +130,7 @@ class OppleLight(LightEntity): self._is_on = self._device.power_on self._brightness = self._device.brightness - self._color_temp = self._device.color_temperature + self._attr_color_temp_kelvin = self._device.color_temperature if not self.is_on: _LOGGER.debug("Update light %s success: power off", self._device.ip) @@ -155,5 +139,5 @@ class OppleLight(LightEntity): "Update light %s success: power on brightness %s color temperature %s", self._device.ip, self._brightness, - self._color_temp, + self._attr_color_temp_kelvin, ) diff --git a/homeassistant/components/opple/manifest.json b/homeassistant/components/opple/manifest.json index 174907dfd0f..dc28d1f0f33 100644 --- a/homeassistant/components/opple/manifest.json +++ b/homeassistant/components/opple/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/opple", "iot_class": "local_polling", "loggers": ["pyoppleio"], + "quality_scale": "legacy", "requirements": ["pyoppleio-legacy==1.0.8"] } diff --git a/homeassistant/components/oru/manifest.json b/homeassistant/components/oru/manifest.json index 23c43e32306..347388b6f15 100644 --- a/homeassistant/components/oru/manifest.json +++ b/homeassistant/components/oru/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/oru", "iot_class": "cloud_polling", "loggers": ["oru"], + "quality_scale": "legacy", "requirements": ["oru==0.1.11"] } diff --git a/homeassistant/components/orvibo/manifest.json b/homeassistant/components/orvibo/manifest.json index 05ce5edd8bd..e3a6676b2f2 100644 --- a/homeassistant/components/orvibo/manifest.json +++ b/homeassistant/components/orvibo/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/orvibo", "iot_class": "local_push", "loggers": ["orvibo"], + "quality_scale": "legacy", "requirements": ["orvibo==1.1.2"] } diff --git a/homeassistant/components/orvibo/switch.py b/homeassistant/components/orvibo/switch.py index 34bf63aaaab..2f990333cf6 100644 --- a/homeassistant/components/orvibo/switch.py +++ b/homeassistant/components/orvibo/switch.py @@ -59,7 +59,7 @@ def setup_platform( switch_conf = config.get(CONF_SWITCHES, [config]) if config.get(CONF_DISCOVERY): - _LOGGER.info("Discovering S20 switches") + _LOGGER.debug("Discovering S20 switches") switch_data.update(discover()) for switch in switch_conf: @@ -70,7 +70,7 @@ def setup_platform( switches.append( S20Switch(data.get(CONF_NAME), S20(host, mac=data.get(CONF_MAC))) ) - _LOGGER.info("Initialized S20 at %s", host) + _LOGGER.debug("Initialized S20 at %s", host) except S20Exception: _LOGGER.error("S20 at %s couldn't be initialized", host) diff --git a/homeassistant/components/osoenergy/config_flow.py b/homeassistant/components/osoenergy/config_flow.py index e0afc5292ae..a47f90e3c04 100644 --- a/homeassistant/components/osoenergy/config_flow.py +++ b/homeassistant/components/osoenergy/config_flow.py @@ -7,12 +7,7 @@ from typing import Any from apyosoenergyapi import OSOEnergy import voluptuous as vol -from homeassistant.config_entries import ( - SOURCE_REAUTH, - ConfigEntry, - ConfigFlow, - ConfigFlowResult, -) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.helpers import aiohttp_client @@ -27,10 +22,6 @@ class OSOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self.entry: ConfigEntry | None = None - async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors = {} @@ -40,12 +31,10 @@ class OSOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): if user_email := await self.get_user_email(user_input[CONF_API_KEY]): await self.async_set_unique_id(user_email) - if self.context["source"] == SOURCE_REAUTH and self.entry: - self.hass.config_entries.async_update_entry( - self.entry, title=user_email, data=user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=user_email, data=user_input ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") self._abort_if_unique_id_configured() return self.async_create_entry(title=user_email, data=user_input) @@ -69,9 +58,12 @@ class OSOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): return None async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Re Authenticate a user.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - data = {CONF_API_KEY: user_input[CONF_API_KEY]} - return await self.async_step_user(data) + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + _SCHEMA_STEP_USER, self._get_reauth_entry().data + ), + ) diff --git a/homeassistant/components/osoenergy/icons.json b/homeassistant/components/osoenergy/icons.json index 60b2d257b8a..42d1f2cc480 100644 --- a/homeassistant/components/osoenergy/icons.json +++ b/homeassistant/components/osoenergy/icons.json @@ -11,5 +11,22 @@ "default": "mdi:water-boiler" } } + }, + "services": { + "get_profile": { + "service": "mdi:thermometer-lines" + }, + "set_profile": { + "service": "mdi:thermometer-lines" + }, + "set_v40_min": { + "service": "mdi:car-coolant-level" + }, + "turn_off": { + "service": "mdi:water-boiler-off" + }, + "turn_on": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/osoenergy/services.yaml b/homeassistant/components/osoenergy/services.yaml new file mode 100644 index 00000000000..6c8f5512215 --- /dev/null +++ b/homeassistant/components/osoenergy/services.yaml @@ -0,0 +1,261 @@ +get_profile: + target: + entity: + domain: water_heater +set_profile: + target: + entity: + domain: water_heater + fields: + hour_00: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_01: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_02: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_03: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_04: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_05: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_06: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_07: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_08: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_09: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_10: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_11: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_12: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_13: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_14: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_15: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_16: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_17: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_18: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_19: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_20: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_21: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_22: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_23: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C +set_v40_min: + target: + entity: + domain: water_heater + fields: + v40_min: + required: true + example: 240 + selector: + number: + min: 200 + max: 550 + step: 1 + unit_of_measurement: L +turn_off: + target: + entity: + domain: water_heater + fields: + until_temp_limit: + required: true + default: false + example: false + selector: + boolean: +turn_on: + target: + entity: + domain: water_heater + fields: + until_temp_limit: + required: true + default: false + example: false + selector: + boolean: diff --git a/homeassistant/components/osoenergy/strings.json b/homeassistant/components/osoenergy/strings.json index a7963bfa436..b8f95c021fa 100644 --- a/homeassistant/components/osoenergy/strings.json +++ b/homeassistant/components/osoenergy/strings.json @@ -91,5 +91,143 @@ "name": "Temperature one" } } + }, + "services": { + "get_profile": { + "name": "Get heater profile", + "description": "Get the temperature profile of water heater" + }, + "set_profile": { + "name": "Set heater profile", + "description": "Set the temperature profile of water heater", + "fields": { + "hour_00": { + "name": "00:00", + "description": "00:00 hour" + }, + "hour_01": { + "name": "01:00", + "description": "01:00 hour" + }, + "hour_02": { + "name": "02:00", + "description": "02:00 hour" + }, + "hour_03": { + "name": "03:00", + "description": "03:00 hour" + }, + "hour_04": { + "name": "04:00", + "description": "04:00 hour" + }, + "hour_05": { + "name": "05:00", + "description": "05:00 hour" + }, + "hour_06": { + "name": "06:00", + "description": "06:00 hour" + }, + "hour_07": { + "name": "07:00", + "description": "07:00 hour" + }, + "hour_08": { + "name": "08:00", + "description": "08:00 hour" + }, + "hour_09": { + "name": "09:00", + "description": "09:00 hour" + }, + "hour_10": { + "name": "10:00", + "description": "10:00 hour" + }, + "hour_11": { + "name": "11:00", + "description": "11:00 hour" + }, + "hour_12": { + "name": "12:00", + "description": "12:00 hour" + }, + "hour_13": { + "name": "13:00", + "description": "13:00 hour" + }, + "hour_14": { + "name": "14:00", + "description": "14:00 hour" + }, + "hour_15": { + "name": "15:00", + "description": "15:00 hour" + }, + "hour_16": { + "name": "16:00", + "description": "16:00 hour" + }, + "hour_17": { + "name": "17:00", + "description": "17:00 hour" + }, + "hour_18": { + "name": "18:00", + "description": "18:00 hour" + }, + "hour_19": { + "name": "19:00", + "description": "19:00 hour" + }, + "hour_20": { + "name": "20:00", + "description": "20:00 hour" + }, + "hour_21": { + "name": "21:00", + "description": "21:00 hour" + }, + "hour_22": { + "name": "22:00", + "description": "22:00 hour" + }, + "hour_23": { + "name": "23:00", + "description": "23:00 hour" + } + } + }, + "set_v40_min": { + "name": "Set v40 min", + "description": "Set the minimum quantity of water at 40°C for a heater", + "fields": { + "v40_min": { + "name": "V40 Min", + "description": "Minimum quantity of water at 40°C (200-350 for SAGA S200, 300-550 for SAGA S300)" + } + } + }, + "turn_off": { + "name": "Turn off heating", + "description": "Turn off heating for one hour or until min temperature is reached", + "fields": { + "until_temp_limit": { + "name": "Until temperature limit", + "description": "Choose if heating should be off until min temperature (True) is reached or for one hour (False)" + } + } + }, + "turn_on": { + "name": "Turn on heating", + "description": "Turn on heating for one hour or until max temperature is reached", + "fields": { + "until_temp_limit": { + "name": "Until temperature limit", + "description": "Choose if heating should be on until max temperature (True) is reached or for one hour (False)" + } + } + } } } diff --git a/homeassistant/components/osoenergy/water_heater.py b/homeassistant/components/osoenergy/water_heater.py index 55229e42c2f..ff117d6577d 100644 --- a/homeassistant/components/osoenergy/water_heater.py +++ b/homeassistant/components/osoenergy/water_heater.py @@ -1,9 +1,11 @@ """Support for OSO Energy water heaters.""" +import datetime as dt from typing import Any from apyosoenergyapi import OSOEnergy from apyosoenergyapi.helper.const import OSOEnergyWaterHeaterData +import voluptuous as vol from homeassistant.components.water_heater import ( STATE_ECO, @@ -15,12 +17,17 @@ from homeassistant.components.water_heater import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse +from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.util.dt as dt_util +from homeassistant.util.json import JsonValueType from .const import DOMAIN from .entity import OSOEnergyEntity +ATTR_UNTIL_TEMP_LIMIT = "until_temp_limit" +ATTR_V40MIN = "v40_min" CURRENT_OPERATION_MAP: dict[str, Any] = { "default": { "off": STATE_OFF, @@ -34,6 +41,11 @@ CURRENT_OPERATION_MAP: dict[str, Any] = { "extraenergy": STATE_HIGH_DEMAND, }, } +SERVICE_GET_PROFILE = "get_profile" +SERVICE_SET_PROFILE = "set_profile" +SERVICE_SET_V40MIN = "set_v40_min" +SERVICE_TURN_OFF = "turn_off" +SERVICE_TURN_ON = "turn_on" async def async_setup_entry( @@ -46,6 +58,102 @@ async def async_setup_entry( return async_add_entities((OSOEnergyWaterHeater(osoenergy, dev) for dev in devices), True) + platform = entity_platform.async_get_current_platform() + + platform.async_register_entity_service( + SERVICE_GET_PROFILE, + {}, + OSOEnergyWaterHeater.async_get_profile.__name__, + supports_response=SupportsResponse.ONLY, + ) + + service_set_profile_schema = cv.make_entity_service_schema( + { + vol.Optional(f"hour_{hour:02d}"): vol.All( + vol.Coerce(int), vol.Range(min=10, max=75) + ) + for hour in range(24) + } + ) + + platform.async_register_entity_service( + SERVICE_SET_PROFILE, + service_set_profile_schema, + OSOEnergyWaterHeater.async_set_profile.__name__, + ) + + platform.async_register_entity_service( + SERVICE_SET_V40MIN, + { + vol.Required(ATTR_V40MIN): vol.All( + vol.Coerce(float), vol.Range(min=200, max=550) + ), + }, + OSOEnergyWaterHeater.async_set_v40_min.__name__, + ) + + platform.async_register_entity_service( + SERVICE_TURN_OFF, + {vol.Required(ATTR_UNTIL_TEMP_LIMIT): vol.All(cv.boolean)}, + OSOEnergyWaterHeater.async_oso_turn_off.__name__, + ) + + platform.async_register_entity_service( + SERVICE_TURN_ON, + {vol.Required(ATTR_UNTIL_TEMP_LIMIT): vol.All(cv.boolean)}, + OSOEnergyWaterHeater.async_oso_turn_on.__name__, + ) + + +def _get_utc_hour(local_hour: int) -> dt.datetime: + """Convert the requested local hour to a utc hour for the day. + + Args: + local_hour: the local hour (0-23) for the current day to be converted. + + Returns: + Datetime representation for the requested hour in utc time for the day. + + """ + now = dt_util.now() + local_time = now.replace(hour=local_hour, minute=0, second=0, microsecond=0) + return dt_util.as_utc(local_time) + + +def _get_local_hour(utc_hour: int) -> dt.datetime: + """Convert the requested utc hour to a local hour for the day. + + Args: + utc_hour: the utc hour (0-23) for the current day to be converted. + + Returns: + Datetime representation for the requested hour in local time for the day. + + """ + utc_now = dt_util.utcnow() + utc_time = utc_now.replace(hour=utc_hour, minute=0, second=0, microsecond=0) + return dt_util.as_local(utc_time) + + +def _convert_profile_to_local(values: list[float]) -> list[JsonValueType]: + """Convert UTC profile to local. + + Receives a device temperature schedule - 24 values for the day where the index represents the hour of the day in UTC. + Converts the schedule to local time. + + Args: + values: list of floats representing the 24 hour temperature schedule for the device + Returns: + The device temperature schedule in local time. + + """ + profile: list[JsonValueType] = [0.0] * 24 + for hour in range(24): + local_hour = _get_local_hour(hour) + profile[local_hour.hour] = float(values[hour]) + + return profile + class OSOEnergyWaterHeater( OSOEnergyEntity[OSOEnergyWaterHeaterData], WaterHeaterEntity @@ -53,7 +161,9 @@ class OSOEnergyWaterHeater( """OSO Energy Water Heater Device.""" _attr_name = None - _attr_supported_features = WaterHeaterEntityFeature.TARGET_TEMPERATURE + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE | WaterHeaterEntityFeature.ON_OFF + ) _attr_temperature_unit = UnitOfTemperature.CELSIUS def __init__( @@ -131,6 +241,36 @@ class OSOEnergyWaterHeater( await self.osoenergy.hotwater.set_profile(self.entity_data, profile) + async def async_get_profile(self) -> ServiceResponse: + """Return the current temperature profile of the device.""" + + profile = self.entity_data.profile + return {"profile": _convert_profile_to_local(profile)} + + async def async_set_profile(self, **kwargs: Any) -> None: + """Handle the service call.""" + profile = self.entity_data.profile + + for hour in range(24): + hour_key = f"hour_{hour:02d}" + + if hour_key in kwargs: + profile[_get_utc_hour(hour).hour] = kwargs[hour_key] + + await self.osoenergy.hotwater.set_profile(self.entity_data, profile) + + async def async_set_v40_min(self, v40_min) -> None: + """Handle the service call.""" + await self.osoenergy.hotwater.set_v40_min(self.entity_data, v40_min) + + async def async_oso_turn_off(self, until_temp_limit) -> None: + """Handle the service call.""" + await self.osoenergy.hotwater.turn_off(self.entity_data, until_temp_limit) + + async def async_oso_turn_on(self, until_temp_limit) -> None: + """Handle the service call.""" + await self.osoenergy.hotwater.turn_on(self.entity_data, until_temp_limit) + async def async_update(self) -> None: """Update all Node data from Hive.""" await self.osoenergy.session.update_data() diff --git a/homeassistant/components/osramlightify/light.py b/homeassistant/components/osramlightify/light.py index 0254c478b42..6ddd392af7b 100644 --- a/homeassistant/components/osramlightify/light.py +++ b/homeassistant/components/osramlightify/light.py @@ -11,7 +11,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -191,10 +191,7 @@ class Luminary(LightEntity): self._effect_list = [] self._is_on = False self._available = True - self._min_mireds = None - self._max_mireds = None self._brightness = None - self._color_temp = None self._rgb_color = None self._device_attributes = None @@ -256,11 +253,6 @@ class Luminary(LightEntity): """Return last hs color value set.""" return color_util.color_RGB_to_hs(*self._rgb_color) - @property - def color_temp(self): - """Return the color temperature.""" - return self._color_temp - @property def brightness(self): """Return brightness of the luminary (0..255).""" @@ -276,16 +268,6 @@ class Luminary(LightEntity): """List of supported effects.""" return self._effect_list - @property - def min_mireds(self): - """Return the coldest color_temp that this light supports.""" - return self._min_mireds - - @property - def max_mireds(self): - """Return the warmest color_temp that this light supports.""" - return self._max_mireds - @property def unique_id(self): """Return a unique ID.""" @@ -326,12 +308,10 @@ class Luminary(LightEntity): self._rgb_color = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR]) self._luminary.set_rgb(*self._rgb_color, transition) - if ATTR_COLOR_TEMP in kwargs: - self._color_temp = kwargs[ATTR_COLOR_TEMP] - self._luminary.set_temperature( - int(color_util.color_temperature_mired_to_kelvin(self._color_temp)), - transition, - ) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] + self._attr_color_temp_kelvin = color_temp_kelvin + self._luminary.set_temperature(color_temp_kelvin, transition) self._is_on = True if ATTR_BRIGHTNESS in kwargs: @@ -362,10 +342,10 @@ class Luminary(LightEntity): self._attr_supported_features = self._get_supported_features() self._effect_list = self._get_effect_list() if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: - self._min_mireds = color_util.color_temperature_kelvin_to_mired( + self._attr_max_color_temp_kelvin = ( self._luminary.max_temp() or DEFAULT_KELVIN ) - self._max_mireds = color_util.color_temperature_kelvin_to_mired( + self._attr_min_color_temp_kelvin = ( self._luminary.min_temp() or DEFAULT_KELVIN ) if len(self._attr_supported_color_modes) == 1: @@ -380,9 +360,7 @@ class Luminary(LightEntity): self._brightness = int(self._luminary.lum() * 2.55) if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: - self._color_temp = color_util.color_temperature_kelvin_to_mired( - self._luminary.temp() or DEFAULT_KELVIN - ) + self._attr_color_temp_kelvin = self._luminary.temp() or DEFAULT_KELVIN if ColorMode.HS in self._attr_supported_color_modes: self._rgb_color = self._luminary.rgb() diff --git a/homeassistant/components/osramlightify/manifest.json b/homeassistant/components/osramlightify/manifest.json index f6a922a09ec..3b11200f1e5 100644 --- a/homeassistant/components/osramlightify/manifest.json +++ b/homeassistant/components/osramlightify/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/osramlightify", "iot_class": "local_polling", "loggers": ["lightify"], + "quality_scale": "legacy", "requirements": ["lightify==1.0.7.3"] } diff --git a/homeassistant/components/otbr/__init__.py b/homeassistant/components/otbr/__init__.py index 3e53358a162..4b95be1d40d 100644 --- a/homeassistant/components/otbr/__init__.py +++ b/homeassistant/components/otbr/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + import aiohttp import python_otbr_api @@ -14,22 +16,28 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from . import websocket_api -from .const import DATA_OTBR, DOMAIN -from .util import OTBRData, update_issues +from .const import DOMAIN +from .util import ( + GetBorderAgentIdNotSupported, + OTBRData, + update_issues, + update_unique_id, +) + +_LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +type OTBRConfigEntry = ConfigEntry[OTBRData] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Open Thread Border Router component.""" websocket_api.async_setup(hass) - if len(config_entries := hass.config_entries.async_entries(DOMAIN)): - for config_entry in config_entries[1:]: - await hass.config_entries.async_remove(config_entry.entry_id) return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> bool: """Set up an Open Thread Border Router config entry.""" api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10) @@ -38,13 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: border_agent_id = await otbrdata.get_border_agent_id() dataset_tlvs = await otbrdata.get_active_dataset_tlvs() extended_address = await otbrdata.get_extended_address() - except ( - HomeAssistantError, - aiohttp.ClientError, - TimeoutError, - ) as err: - raise ConfigEntryNotReady("Unable to connect") from err - if border_agent_id is None: + except GetBorderAgentIdNotSupported: ir.async_create_issue( hass, DOMAIN, @@ -55,6 +57,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: translation_key="get_get_border_agent_id_unsupported", ) return False + except ( + HomeAssistantError, + aiohttp.ClientError, + TimeoutError, + ) as err: + raise ConfigEntryNotReady("Unable to connect") from err + await update_unique_id(hass, entry, border_agent_id) if dataset_tlvs: await update_issues(hass, otbrdata, dataset_tlvs) await async_add_dataset( @@ -66,18 +75,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - - hass.data[DATA_OTBR] = otbrdata + entry.runtime_data = otbrdata return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> bool: """Unload a config entry.""" - hass.data.pop(DATA_OTBR) return True -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/otbr/config_flow.py b/homeassistant/components/otbr/config_flow.py index 8cffc0a99e6..aff79ca4651 100644 --- a/homeassistant/components/otbr/config_flow.py +++ b/homeassistant/components/otbr/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from contextlib import suppress import logging -from typing import cast +from typing import TYPE_CHECKING, cast import aiohttp import python_otbr_api @@ -13,18 +13,15 @@ from python_otbr_api.tlv_parser import MeshcopTLVType import voluptuous as vol import yarl -from homeassistant.components.hassio import ( - HassioAPIError, - HassioServiceInfo, - async_get_addon_info, -) +from homeassistant.components.hassio import AddonError, AddonManager from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware from homeassistant.components.thread import async_get_preferred_dataset from homeassistant.config_entries import SOURCE_HASSIO, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_URL -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DEFAULT_CHANNEL, DOMAIN from .util import ( @@ -33,9 +30,22 @@ from .util import ( get_allowed_channel, ) +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) +class AlreadyConfigured(HomeAssistantError): + """Raised when the router is already configured.""" + + +@callback +def get_addon_manager(hass: HomeAssistant, slug: str) -> AddonManager: + """Get the add-on manager.""" + return AddonManager(hass, _LOGGER, "OpenThread Border Router", slug) + + def _is_yellow(hass: HomeAssistant) -> bool: """Return True if Home Assistant is running on a Home Assistant Yellow.""" try: @@ -48,10 +58,11 @@ def _is_yellow(hass: HomeAssistant) -> bool: async def _title(hass: HomeAssistant, discovery_info: HassioServiceInfo) -> str: """Return config entry title.""" device: str | None = None + addon_manager = get_addon_manager(hass, discovery_info.slug) - with suppress(HassioAPIError): - addon_info = await async_get_addon_info(hass, discovery_info.slug) - device = addon_info.get("options", {}).get("device") + with suppress(AddonError): + addon_info = await addon_manager.async_get_addon_info() + device = addon_info.options.get("device") if _is_yellow(hass) and device == "/dev/ttyAMA1": return f"Home Assistant Yellow ({discovery_info.name})" @@ -70,9 +81,8 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _connect_and_set_dataset(self, otbr_url: str) -> None: + async def _set_dataset(self, api: python_otbr_api.OTBR, otbr_url: str) -> None: """Connect to the OTBR and create or apply a dataset if it doesn't have one.""" - api = python_otbr_api.OTBR(otbr_url, async_get_clientsession(self.hass), 10) if await api.get_active_dataset_tlvs() is None: allowed_channel = await get_allowed_channel(self.hass, otbr_url) @@ -89,7 +99,9 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): await api.set_active_dataset_tlvs(bytes.fromhex(thread_dataset_tlv)) else: _LOGGER.debug( - "not importing TLV with channel %s", thread_dataset_channel + "not importing TLV with channel %s for %s", + thread_dataset_channel, + otbr_url, ) pan_id = generate_random_pan_id() await api.create_active_dataset( @@ -101,27 +113,65 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): ) await api.set_enabled(True) + async def _is_border_agent_id_configured(self, border_agent_id: bytes) -> bool: + """Return True if another config entry's OTBR has the same border agent id.""" + config_entry: OTBRConfigEntry + for config_entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + data = config_entry.runtime_data + try: + other_border_agent_id = await data.get_border_agent_id() + except HomeAssistantError: + _LOGGER.debug( + "Could not read border agent id from %s", data.url, exc_info=True + ) + continue + _LOGGER.debug( + "border agent id for existing url %s: %s", + data.url, + other_border_agent_id.hex(), + ) + if border_agent_id == other_border_agent_id: + return True + return False + + async def _connect_and_configure_router(self, otbr_url: str) -> bytes: + """Connect to the router and configure it if needed. + + Will raise if the router's border agent id is in use by another config entry. + Returns the router's border agent id. + """ + api = python_otbr_api.OTBR(otbr_url, async_get_clientsession(self.hass), 10) + border_agent_id = await api.get_border_agent_id() + _LOGGER.debug("border agent id for url %s: %s", otbr_url, border_agent_id.hex()) + + if await self._is_border_agent_id_configured(border_agent_id): + raise AlreadyConfigured + + await self._set_dataset(api, otbr_url) + + return border_agent_id + async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Set up by user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: url = user_input[CONF_URL].rstrip("/") try: - await self._connect_and_set_dataset(url) + border_agent_id = await self._connect_and_configure_router(url) + except AlreadyConfigured: + errors["base"] = "already_configured" except ( python_otbr_api.OTBRError, aiohttp.ClientError, TimeoutError, - ): + ) as exc: + _LOGGER.debug("Failed to communicate with OTBR@%s: %s", url, exc) errors["base"] = "cannot_connect" else: - await self.async_set_unique_id(DOMAIN) + await self.async_set_unique_id(border_agent_id.hex()) return self.async_create_entry( title="Open Thread Border Router", data={CONF_URL: url}, @@ -140,34 +190,35 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): url = f"http://{config['host']}:{config['port']}" config_entry_data = {"url": url} - if self._async_in_progress(include_uninitialized=True): - # We currently don't handle multiple config entries, abort if hassio - # discovers multiple addons with otbr support - return self.async_abort(reason="single_instance_allowed") - if current_entries := self._async_current_entries(): for current_entry in current_entries: if current_entry.source != SOURCE_HASSIO: continue current_url = yarl.URL(current_entry.data["url"]) - if ( + if not (unique_id := current_entry.unique_id): # The first version did not set a unique_id # so if the entry does not have a unique_id # we have to assume it's the first version - current_entry.unique_id - and (current_entry.unique_id != discovery_info.uuid) + # This check can be removed in HA Core 2025.9 + unique_id = discovery_info.uuid + if ( + unique_id != discovery_info.uuid or current_url.host != config["host"] or current_url.port == config["port"] ): continue # Update URL with the new port self.hass.config_entries.async_update_entry( - current_entry, data=config_entry_data + current_entry, + data=config_entry_data, + unique_id=unique_id, # Remove in HA Core 2025.9 ) - return self.async_abort(reason="single_instance_allowed") + return self.async_abort(reason="already_configured") try: - await self._connect_and_set_dataset(url) + await self._connect_and_configure_router(url) + except AlreadyConfigured: + return self.async_abort(reason="already_configured") except ( python_otbr_api.OTBRError, aiohttp.ClientError, diff --git a/homeassistant/components/otbr/const.py b/homeassistant/components/otbr/const.py index cf1678466a4..c38b3cc1250 100644 --- a/homeassistant/components/otbr/const.py +++ b/homeassistant/components/otbr/const.py @@ -2,14 +2,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING - -from homeassistant.util.hass_dict import HassKey - -if TYPE_CHECKING: - from .util import OTBRData - DOMAIN = "otbr" -DATA_OTBR: HassKey[OTBRData] = HassKey(DOMAIN) DEFAULT_CHANNEL = 15 diff --git a/homeassistant/components/otbr/silabs_multiprotocol.py b/homeassistant/components/otbr/silabs_multiprotocol.py index b3a711968fd..d97e6811e6d 100644 --- a/homeassistant/components/otbr/silabs_multiprotocol.py +++ b/homeassistant/components/otbr/silabs_multiprotocol.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Coroutine from functools import wraps import logging -from typing import Any, Concatenate +from typing import TYPE_CHECKING, Any, Concatenate import aiohttp from python_otbr_api import tlv_parser @@ -18,9 +18,12 @@ from homeassistant.components.thread import async_add_dataset from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .const import DATA_OTBR, DOMAIN +from .const import DOMAIN from .util import OTBRData +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) @@ -45,15 +48,13 @@ def async_get_otbr_data[**_P, _R, _R_Def]( hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs ) -> _R | _R_Def: """Fetch OTBR data and pass to orig_func.""" - if DATA_OTBR not in hass.data: - return retval + config_entry: OTBRConfigEntry + for config_entry in hass.config_entries.async_loaded_entries(DOMAIN): + data = config_entry.runtime_data + if is_multiprotocol_url(data.url): + return await orig_func(hass, data, *args, **kwargs) - data = hass.data[DATA_OTBR] - - if not is_multiprotocol_url(data.url): - return retval - - return await orig_func(hass, data, *args, **kwargs) + return retval return async_get_otbr_data_wrapper diff --git a/homeassistant/components/otbr/strings.json b/homeassistant/components/otbr/strings.json index 838ebeb5b8c..e1afa5b8909 100644 --- a/homeassistant/components/otbr/strings.json +++ b/homeassistant/components/otbr/strings.json @@ -9,10 +9,13 @@ } }, "error": { + "already_configured": "The Thread border router is already configured", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "already_configured": "The Thread border router is already configured", + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "issues": { diff --git a/homeassistant/components/otbr/util.py b/homeassistant/components/otbr/util.py index d426ca9ba17..351e23c7736 100644 --- a/homeassistant/components/otbr/util.py +++ b/homeassistant/components/otbr/util.py @@ -7,7 +7,7 @@ import dataclasses from functools import wraps import logging import random -from typing import Any, Concatenate, cast +from typing import TYPE_CHECKING, Any, Concatenate, cast import aiohttp import python_otbr_api @@ -22,12 +22,16 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon multi_pan_addon_using_device, ) from homeassistant.components.homeassistant_yellow import RADIO_DEVICE as YELLOW_RADIO +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from .const import DOMAIN +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) INFO_URL_SKY_CONNECT = ( @@ -48,6 +52,10 @@ INSECURE_PASSPHRASES = ( ) +class GetBorderAgentIdNotSupported(HomeAssistantError): + """Raised from python_otbr_api.GetBorderAgentIdNotSupportedError.""" + + def compose_default_network_name(pan_id: int) -> str: """Generate a default network name.""" return f"ha-thread-{pan_id:04x}" @@ -83,7 +91,7 @@ class OTBRData: entry_id: str @_handle_otbr_error - async def factory_reset(self) -> None: + async def factory_reset(self, hass: HomeAssistant) -> None: """Reset the router.""" try: await self.api.factory_reset() @@ -92,14 +100,19 @@ class OTBRData: "OTBR does not support factory reset, attempting to delete dataset" ) await self.delete_active_dataset() + await update_unique_id( + hass, + hass.config_entries.async_get_entry(self.entry_id), + await self.get_border_agent_id(), + ) @_handle_otbr_error - async def get_border_agent_id(self) -> bytes | None: + async def get_border_agent_id(self) -> bytes: """Get the border agent ID or None if not supported by the router.""" try: return await self.api.get_border_agent_id() - except python_otbr_api.GetBorderAgentIdNotSupportedError: - return None + except python_otbr_api.GetBorderAgentIdNotSupportedError as exc: + raise GetBorderAgentIdNotSupported from exc @_handle_otbr_error async def set_enabled(self, enabled: bool) -> None: @@ -258,3 +271,18 @@ async def update_issues( """Raise or clear repair issues related to network settings.""" await _warn_on_channel_collision(hass, otbrdata, dataset_tlvs) _warn_on_default_network_settings(hass, otbrdata, dataset_tlvs) + + +async def update_unique_id( + hass: HomeAssistant, entry: OTBRConfigEntry | None, border_agent_id: bytes +) -> None: + """Update the config entry's unique_id if not matching.""" + border_agent_id_hex = border_agent_id.hex() + if entry and entry.source == SOURCE_USER and entry.unique_id != border_agent_id_hex: + _LOGGER.debug( + "Updating unique_id of entry %s from %s to %s", + entry.entry_id, + entry.unique_id, + border_agent_id_hex, + ) + hass.config_entries.async_update_entry(entry, unique_id=border_agent_id_hex) diff --git a/homeassistant/components/otbr/websocket_api.py b/homeassistant/components/otbr/websocket_api.py index 577f9cc381d..2bcd0da8f16 100644 --- a/homeassistant/components/otbr/websocket_api.py +++ b/homeassistant/components/otbr/websocket_api.py @@ -2,7 +2,7 @@ from collections.abc import Callable, Coroutine from functools import wraps -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast import python_otbr_api from python_otbr_api import PENDING_DATASET_DELAY_TIMER, tlv_parser @@ -17,7 +17,7 @@ from homeassistant.components.thread import async_add_dataset, async_get_dataset from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from .const import DATA_OTBR, DEFAULT_CHANNEL, DOMAIN +from .const import DEFAULT_CHANNEL, DOMAIN from .util import ( OTBRData, compose_default_network_name, @@ -26,6 +26,9 @@ from .util import ( update_issues, ) +if TYPE_CHECKING: + from . import OTBRConfigEntry + @callback def async_setup(hass: HomeAssistant) -> None: @@ -47,41 +50,45 @@ async def websocket_info( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get OTBR info.""" - if DATA_OTBR not in hass.data: + config_entries: list[OTBRConfigEntry] + config_entries = hass.config_entries.async_loaded_entries(DOMAIN) + + if not config_entries: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data = hass.data[DATA_OTBR] + response: dict[str, dict[str, Any]] = {} - try: - border_agent_id = await data.get_border_agent_id() - dataset = await data.get_active_dataset() - dataset_tlvs = await data.get_active_dataset_tlvs() - extended_address = (await data.get_extended_address()).hex() - except HomeAssistantError as exc: - connection.send_error(msg["id"], "otbr_info_failed", str(exc)) - return + for config_entry in config_entries: + data = config_entry.runtime_data + try: + border_agent_id = await data.get_border_agent_id() + dataset = await data.get_active_dataset() + dataset_tlvs = await data.get_active_dataset_tlvs() + extended_address = (await data.get_extended_address()).hex() + except HomeAssistantError as exc: + connection.send_error(msg["id"], "otbr_info_failed", str(exc)) + return - # The border agent ID is checked when the OTBR config entry is setup, - # we can assert it's not None - assert border_agent_id is not None + # The border agent ID is checked when the OTBR config entry is setup, + # we can assert it's not None + assert border_agent_id is not None - extended_pan_id = ( - dataset.extended_pan_id.lower() if dataset and dataset.extended_pan_id else None - ) - connection.send_result( - msg["id"], - { - extended_address: { - "active_dataset_tlvs": dataset_tlvs.hex() if dataset_tlvs else None, - "border_agent_id": border_agent_id.hex(), - "channel": dataset.channel if dataset else None, - "extended_address": extended_address, - "extended_pan_id": extended_pan_id, - "url": data.url, - } - }, - ) + extended_pan_id = ( + dataset.extended_pan_id.lower() + if dataset and dataset.extended_pan_id + else None + ) + response[extended_address] = { + "active_dataset_tlvs": dataset_tlvs.hex() if dataset_tlvs else None, + "border_agent_id": border_agent_id.hex(), + "channel": dataset.channel if dataset else None, + "extended_address": extended_address, + "extended_pan_id": extended_pan_id, + "url": data.url, + } + + connection.send_result(msg["id"], response) def async_get_otbr_data( @@ -99,22 +106,29 @@ def async_get_otbr_data( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Fetch OTBR data and pass to orig_func.""" - if DATA_OTBR not in hass.data: + config_entries: list[OTBRConfigEntry] + config_entries = hass.config_entries.async_loaded_entries(DOMAIN) + + if not config_entries: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data = hass.data[DATA_OTBR] + for config_entry in config_entries: + data = config_entry.runtime_data + try: + extended_address = await data.get_extended_address() + except HomeAssistantError as exc: + connection.send_error( + msg["id"], "get_extended_address_failed", str(exc) + ) + return + if extended_address.hex() != msg["extended_address"]: + continue - try: - extended_address = await data.get_extended_address() - except HomeAssistantError as exc: - connection.send_error(msg["id"], "get_extended_address_failed", str(exc)) - return - if extended_address.hex() != msg["extended_address"]: - connection.send_error(msg["id"], "unknown_router", "") + await orig_func(hass, connection, msg, data) return - await orig_func(hass, connection, msg, data) + connection.send_error(msg["id"], "unknown_router", "") return async_check_extended_address_func @@ -144,7 +158,7 @@ async def websocket_create_network( return try: - await data.factory_reset() + await data.factory_reset(hass) except HomeAssistantError as exc: connection.send_error(msg["id"], "factory_reset_failed", str(exc)) return diff --git a/homeassistant/components/otp/config_flow.py b/homeassistant/components/otp/config_flow.py index 6aa4532683a..33f63a04d68 100644 --- a/homeassistant/components/otp/config_flow.py +++ b/homeassistant/components/otp/config_flow.py @@ -82,15 +82,15 @@ class TOTPConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from yaml.""" - await self.async_set_unique_id(import_info[CONF_TOKEN]) + await self.async_set_unique_id(import_data[CONF_TOKEN]) self._abort_if_unique_id_configured() return self.async_create_entry( - title=import_info.get(CONF_NAME, DEFAULT_NAME), - data=import_info, + title=import_data.get(CONF_NAME, DEFAULT_NAME), + data=import_data, ) async def async_step_confirm( diff --git a/homeassistant/components/overkiz/alarm_control_panel.py b/homeassistant/components/overkiz/alarm_control_panel.py index 151f91790cf..bdbf4d0cc8d 100644 --- a/homeassistant/components/overkiz/alarm_control_panel.py +++ b/homeassistant/components/overkiz/alarm_control_panel.py @@ -14,18 +14,10 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityDescription, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -41,7 +33,7 @@ class OverkizAlarmDescription(AlarmControlPanelEntityDescription): """Class to describe an Overkiz alarm control panel.""" supported_features: AlarmControlPanelEntityFeature - fn_state: Callable[[Callable[[str], OverkizStateType]], str] + fn_state: Callable[[Callable[[str], OverkizStateType]], AlarmControlPanelState] alarm_disarm: str | None = None alarm_disarm_args: OverkizStateType | list[OverkizStateType] = None @@ -55,42 +47,44 @@ class OverkizAlarmDescription(AlarmControlPanelEntityDescription): alarm_trigger_args: OverkizStateType | list[OverkizStateType] = None -MAP_INTERNAL_STATUS_STATE: dict[str, str] = { - OverkizCommandParam.OFF: STATE_ALARM_DISARMED, - OverkizCommandParam.ZONE_1: STATE_ALARM_ARMED_HOME, - OverkizCommandParam.ZONE_2: STATE_ALARM_ARMED_NIGHT, - OverkizCommandParam.TOTAL: STATE_ALARM_ARMED_AWAY, +MAP_INTERNAL_STATUS_STATE: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.OFF: AlarmControlPanelState.DISARMED, + OverkizCommandParam.ZONE_1: AlarmControlPanelState.ARMED_HOME, + OverkizCommandParam.ZONE_2: AlarmControlPanelState.ARMED_NIGHT, + OverkizCommandParam.TOTAL: AlarmControlPanelState.ARMED_AWAY, } -def _state_tsk_alarm_controller(select_state: Callable[[str], OverkizStateType]) -> str: +def _state_tsk_alarm_controller( + select_state: Callable[[str], OverkizStateType], +) -> AlarmControlPanelState: """Return the state of the device.""" if ( cast(str, select_state(OverkizState.INTERNAL_INTRUSION_DETECTED)) == OverkizCommandParam.DETECTED ): - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED if cast(str, select_state(OverkizState.INTERNAL_CURRENT_ALARM_MODE)) != cast( str, select_state(OverkizState.INTERNAL_TARGET_ALARM_MODE) ): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING return MAP_INTERNAL_STATUS_STATE[ cast(str, select_state(OverkizState.INTERNAL_TARGET_ALARM_MODE)) ] -MAP_CORE_ACTIVE_ZONES: dict[str, str] = { - OverkizCommandParam.A: STATE_ALARM_ARMED_HOME, - f"{OverkizCommandParam.A},{OverkizCommandParam.B}": STATE_ALARM_ARMED_NIGHT, - f"{OverkizCommandParam.A},{OverkizCommandParam.B},{OverkizCommandParam.C}": STATE_ALARM_ARMED_AWAY, +MAP_CORE_ACTIVE_ZONES: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.A: AlarmControlPanelState.ARMED_HOME, + f"{OverkizCommandParam.A},{OverkizCommandParam.B}": AlarmControlPanelState.ARMED_NIGHT, + f"{OverkizCommandParam.A},{OverkizCommandParam.B},{OverkizCommandParam.C}": AlarmControlPanelState.ARMED_AWAY, } def _state_stateful_alarm_controller( select_state: Callable[[str], OverkizStateType], -) -> str: +) -> AlarmControlPanelState: """Return the state of the device.""" if state := cast(str, select_state(OverkizState.CORE_ACTIVE_ZONES)): # The Stateful Alarm Controller has 3 zones with the following options: @@ -99,44 +93,44 @@ def _state_stateful_alarm_controller( if state in MAP_CORE_ACTIVE_ZONES: return MAP_CORE_ACTIVE_ZONES[state] - return STATE_ALARM_ARMED_CUSTOM_BYPASS + return AlarmControlPanelState.ARMED_CUSTOM_BYPASS - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED -MAP_MYFOX_STATUS_STATE: dict[str, str] = { - OverkizCommandParam.ARMED: STATE_ALARM_ARMED_AWAY, - OverkizCommandParam.DISARMED: STATE_ALARM_DISARMED, - OverkizCommandParam.PARTIAL: STATE_ALARM_ARMED_NIGHT, +MAP_MYFOX_STATUS_STATE: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.ARMED: AlarmControlPanelState.ARMED_AWAY, + OverkizCommandParam.DISARMED: AlarmControlPanelState.DISARMED, + OverkizCommandParam.PARTIAL: AlarmControlPanelState.ARMED_NIGHT, } def _state_myfox_alarm_controller( select_state: Callable[[str], OverkizStateType], -) -> str: +) -> AlarmControlPanelState: """Return the state of the device.""" if ( cast(str, select_state(OverkizState.CORE_INTRUSION)) == OverkizCommandParam.DETECTED ): - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED return MAP_MYFOX_STATUS_STATE[ cast(str, select_state(OverkizState.MYFOX_ALARM_STATUS)) ] -MAP_ARM_TYPE: dict[str, str] = { - OverkizCommandParam.DISARMED: STATE_ALARM_DISARMED, - OverkizCommandParam.ARMED_DAY: STATE_ALARM_ARMED_HOME, - OverkizCommandParam.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - OverkizCommandParam.ARMED: STATE_ALARM_ARMED_AWAY, +MAP_ARM_TYPE: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.DISARMED: AlarmControlPanelState.DISARMED, + OverkizCommandParam.ARMED_DAY: AlarmControlPanelState.ARMED_HOME, + OverkizCommandParam.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + OverkizCommandParam.ARMED: AlarmControlPanelState.ARMED_AWAY, } def _state_alarm_panel_controller( select_state: Callable[[str], OverkizStateType], -) -> str: +) -> AlarmControlPanelState: """Return the state of the device.""" return MAP_ARM_TYPE[ cast(str, select_state(OverkizState.VERISURE_ALARM_PANEL_MAIN_ARM_TYPE)) @@ -254,7 +248,7 @@ class OverkizAlarmControlPanel(OverkizDescriptiveEntity, AlarmControlPanelEntity self._attr_supported_features = self.entity_description.supported_features @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" return self.entity_description.fn_state(self.executor.select_state) diff --git a/homeassistant/components/overkiz/climate.py b/homeassistant/components/overkiz/climate.py deleted file mode 100644 index 1663834abee..00000000000 --- a/homeassistant/components/overkiz/climate.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Support for Overkiz climate devices.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import HomeAssistantOverkizData -from .climate_entities import ( - WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY, - WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY, - WIDGET_TO_CLIMATE_ENTITY, -) -from .const import DOMAIN - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Overkiz climate from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - - # Match devices based on the widget. - entities_based_on_widget: list[Entity] = [ - WIDGET_TO_CLIMATE_ENTITY[device.widget](device.device_url, data.coordinator) - for device in data.platforms[Platform.CLIMATE] - if device.widget in WIDGET_TO_CLIMATE_ENTITY - ] - - # Match devices based on the widget and controllableName. - # ie Atlantic APC - entities_based_on_widget_and_controllable: list[Entity] = [ - WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget][ - device.controllable_name - ](device.device_url, data.coordinator) - for device in data.platforms[Platform.CLIMATE] - if device.widget in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY - and device.controllable_name - in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget] - ] - - # Match devices based on the widget and protocol. - # #ie Hitachi Air To Air Heat Pumps - entities_based_on_widget_and_protocol: list[Entity] = [ - WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget][device.protocol]( - device.device_url, data.coordinator - ) - for device in data.platforms[Platform.CLIMATE] - if device.widget in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY - and device.protocol in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget] - ] - - async_add_entities( - entities_based_on_widget - + entities_based_on_widget_and_controllable - + entities_based_on_widget_and_protocol - ) diff --git a/homeassistant/components/overkiz/climate_entities/__init__.py b/homeassistant/components/overkiz/climate/__init__.py similarity index 59% rename from homeassistant/components/overkiz/climate_entities/__init__.py rename to homeassistant/components/overkiz/climate/__init__.py index ac864686432..97840df7a41 100644 --- a/homeassistant/components/overkiz/climate_entities/__init__.py +++ b/homeassistant/components/overkiz/climate/__init__.py @@ -1,10 +1,20 @@ """Climate entities for the Overkiz (by Somfy) integration.""" +from __future__ import annotations + from enum import StrEnum, unique from pyoverkiz.enums import Protocol from pyoverkiz.enums.ui import UIWidget +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .. import HomeAssistantOverkizData +from ..const import DOMAIN from .atlantic_electrical_heater import AtlanticElectricalHeater from .atlantic_electrical_heater_with_adjustable_temperature_setpoint import ( AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint, @@ -65,3 +75,48 @@ WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY = { Protocol.OVP: HitachiAirToAirHeatPumpOVP, }, } + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Overkiz climate from a config entry.""" + data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + + # Match devices based on the widget. + entities_based_on_widget: list[Entity] = [ + WIDGET_TO_CLIMATE_ENTITY[device.widget](device.device_url, data.coordinator) + for device in data.platforms[Platform.CLIMATE] + if device.widget in WIDGET_TO_CLIMATE_ENTITY + ] + + # Match devices based on the widget and controllableName. + # ie Atlantic APC + entities_based_on_widget_and_controllable: list[Entity] = [ + WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget][ + device.controllable_name # type: ignore[index] + ](device.device_url, data.coordinator) + for device in data.platforms[Platform.CLIMATE] + if device.widget in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY + and device.controllable_name + in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget] + ] + + # Match devices based on the widget and protocol. + # #ie Hitachi Air To Air Heat Pumps + entities_based_on_widget_and_protocol: list[Entity] = [ + WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget][device.protocol]( + device.device_url, data.coordinator + ) + for device in data.platforms[Platform.CLIMATE] + if device.widget in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY + and device.protocol in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget] + ] + + async_add_entities( + entities_based_on_widget + + entities_based_on_widget_and_controllable + + entities_based_on_widget_and_protocol + ) diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_electrical_heater.py b/homeassistant/components/overkiz/climate/atlantic_electrical_heater.py similarity index 97% rename from homeassistant/components/overkiz/climate_entities/atlantic_electrical_heater.py rename to homeassistant/components/overkiz/climate/atlantic_electrical_heater.py index ce9857f9d8c..059e64ef55d 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_electrical_heater.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_heater.py @@ -54,7 +54,6 @@ class AtlanticElectricalHeater(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py b/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py similarity index 98% rename from homeassistant/components/overkiz/climate_entities/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py rename to homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py index 64a7dc1e645..93c7d03293b 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py @@ -76,7 +76,6 @@ class AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint( | ClimateEntityFeature.TURN_ON ) _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_electrical_towel_dryer.py b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py similarity index 98% rename from homeassistant/components/overkiz/climate_entities/atlantic_electrical_towel_dryer.py rename to homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py index e49fc4358e9..92bd6ceae82 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_electrical_towel_dryer.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py @@ -46,7 +46,6 @@ class AtlanticElectricalTowelDryer(OverkizEntity, ClimateEntity): _attr_preset_modes = [*PRESET_MODE_TO_OVERKIZ] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_heat_recovery_ventilation.py b/homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/atlantic_heat_recovery_ventilation.py rename to homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py index f1d96b5687b..bb84fa76f22 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_heat_recovery_ventilation.py +++ b/homeassistant/components/overkiz/climate/atlantic_heat_recovery_ventilation.py @@ -55,7 +55,6 @@ class AtlanticHeatRecoveryVentilation(OverkizEntity, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py similarity index 97% rename from homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py rename to homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py index 1cd13205b13..800516e4bda 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heat_pump_main_component.py @@ -41,7 +41,6 @@ class AtlanticPassAPCHeatPumpMainComponent(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py rename to homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py index 3da2ccc922b..3df31fb44fc 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heating_zone.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_heating_zone.py @@ -92,7 +92,6 @@ class AtlanticPassAPCHeatingZone(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py similarity index 98% rename from homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control.py rename to homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py index 7fbab821b8d..7846b058619 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control.py @@ -31,7 +31,6 @@ class AtlanticPassAPCZoneControl(OverkizEntity, ClimateEntity): _attr_supported_features = ( ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py b/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control_zone.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py rename to homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control_zone.py index 9027dcf8d03..5ba9dabe038 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py +++ b/homeassistant/components/overkiz/climate/atlantic_pass_apc_zone_control_zone.py @@ -3,9 +3,9 @@ from __future__ import annotations from asyncio import sleep -from functools import cached_property from typing import Any, cast +from propcache import cached_property from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.climate import ( diff --git a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_hlrrwifi.py b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_hlrrwifi.py rename to homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py index efdae2165a9..41da90f1ce8 100644 --- a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_hlrrwifi.py +++ b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_hlrrwifi.py @@ -91,7 +91,6 @@ class HitachiAirToAirHeatPumpHLRRWIFI(OverkizEntity, ClimateEntity): _attr_target_temperature_step = 1.0 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py rename to homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py index b31ecf91ec0..f60cbbeca2b 100644 --- a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_ovp.py +++ b/homeassistant/components/overkiz/climate/hitachi_air_to_air_heat_pump_ovp.py @@ -95,7 +95,6 @@ class HitachiAirToAirHeatPumpOVP(OverkizEntity, ClimateEntity): _attr_target_temperature_step = 1.0 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py b/homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py rename to homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py index acc761664ec..5ca17f9b6b1 100644 --- a/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py +++ b/homeassistant/components/overkiz/climate/somfy_heating_temperature_interface.py @@ -82,7 +82,6 @@ class SomfyHeatingTemperatureInterface(OverkizEntity, ClimateEntity): # Both min and max temp values have been retrieved from the Somfy Application. _attr_min_temp = 15.0 _attr_max_temp = 26.0 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/climate_entities/somfy_thermostat.py b/homeassistant/components/overkiz/climate/somfy_thermostat.py similarity index 99% rename from homeassistant/components/overkiz/climate_entities/somfy_thermostat.py rename to homeassistant/components/overkiz/climate/somfy_thermostat.py index 829a3bad03b..66a04af4e7a 100644 --- a/homeassistant/components/overkiz/climate_entities/somfy_thermostat.py +++ b/homeassistant/components/overkiz/climate/somfy_thermostat.py @@ -65,7 +65,6 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): _attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ] _attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ] _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False # Both min and max temp values have been retrieved from the Somfy Application. _attr_min_temp = 15.0 diff --git a/homeassistant/components/overkiz/climate_entities/valve_heating_temperature_interface.py b/homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py similarity index 98% rename from homeassistant/components/overkiz/climate_entities/valve_heating_temperature_interface.py rename to homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py index e2165e8b6c6..54c00b33167 100644 --- a/homeassistant/components/overkiz/climate_entities/valve_heating_temperature_interface.py +++ b/homeassistant/components/overkiz/climate/valve_heating_temperature_interface.py @@ -56,7 +56,6 @@ class ValveHeatingTemperatureInterface(OverkizEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index 79a8328f874..471a13d0de2 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -24,7 +24,7 @@ from pyoverkiz.utils import generate_local_server, is_overkiz_gateway import voluptuous as vol from homeassistant.components import dhcp, zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -47,7 +47,6 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None _api_type: APIType = APIType.CLOUD _user: str | None = None _server: str = DEFAULT_SERVER @@ -174,27 +173,13 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" LOGGER.exception("Unknown error") else: - if self._reauth_entry: - if self._reauth_entry.unique_id != self.unique_id: - return self.async_abort(reason="reauth_wrong_account") + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_wrong_account") - # Update existing entry during reauth - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={ - **self._reauth_entry.data, - **user_input, - }, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - ) - - return self.async_abort(reason="reauth_successful") - # Create new entry self._abort_if_unique_id_configured() @@ -257,27 +242,13 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" LOGGER.exception("Unknown error") else: - if self._reauth_entry: - if self._reauth_entry.unique_id != self.unique_id: - return self.async_abort(reason="reauth_wrong_account") + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_wrong_account") - # Update existing entry during reauth - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={ - **self._reauth_entry.data, - **user_input, - }, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - ) - - return self.async_abort(reason="reauth_successful") - # Create new entry self._abort_if_unique_id_configured() @@ -346,21 +317,15 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - self._reauth_entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) + # overkiz entries always have unique IDs + self.context["title_placeholders"] = {"gateway_id": cast(str, self.unique_id)} - self.context["title_placeholders"] = { - "gateway_id": self._reauth_entry.unique_id - } - - self._user = self._reauth_entry.data[CONF_USERNAME] - self._server = self._reauth_entry.data[CONF_HUB] - self._api_type = self._reauth_entry.data.get(CONF_API_TYPE, APIType.CLOUD) + self._user = entry_data[CONF_USERNAME] + self._server = entry_data[CONF_HUB] + self._api_type = entry_data.get(CONF_API_TYPE, APIType.CLOUD) if self._api_type == APIType.LOCAL: - self._host = self._reauth_entry.data[CONF_HOST] + self._host = entry_data[CONF_HOST] return await self.async_step_user(dict(entry_data)) diff --git a/homeassistant/components/overkiz/cover.py b/homeassistant/components/overkiz/cover/__init__.py similarity index 83% rename from homeassistant/components/overkiz/cover.py rename to homeassistant/components/overkiz/cover/__init__.py index 51d2c9f2334..f9df3256253 100644 --- a/homeassistant/components/overkiz/cover.py +++ b/homeassistant/components/overkiz/cover/__init__.py @@ -7,11 +7,11 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN -from .cover_entities.awning import Awning -from .cover_entities.generic_cover import OverkizGenericCover -from .cover_entities.vertical_cover import LowSpeedCover, VerticalCover +from .. import HomeAssistantOverkizData +from ..const import DOMAIN +from .awning import Awning +from .generic_cover import OverkizGenericCover +from .vertical_cover import LowSpeedCover, VerticalCover async def async_setup_entry( diff --git a/homeassistant/components/overkiz/cover_entities/awning.py b/homeassistant/components/overkiz/cover/awning.py similarity index 100% rename from homeassistant/components/overkiz/cover_entities/awning.py rename to homeassistant/components/overkiz/cover/awning.py diff --git a/homeassistant/components/overkiz/cover_entities/generic_cover.py b/homeassistant/components/overkiz/cover/generic_cover.py similarity index 100% rename from homeassistant/components/overkiz/cover_entities/generic_cover.py rename to homeassistant/components/overkiz/cover/generic_cover.py diff --git a/homeassistant/components/overkiz/cover_entities/vertical_cover.py b/homeassistant/components/overkiz/cover/vertical_cover.py similarity index 100% rename from homeassistant/components/overkiz/cover_entities/vertical_cover.py rename to homeassistant/components/overkiz/cover/vertical_cover.py diff --git a/homeassistant/components/overkiz/cover_entities/__init__.py b/homeassistant/components/overkiz/cover_entities/__init__.py deleted file mode 100644 index 930202450d4..00000000000 --- a/homeassistant/components/overkiz/cover_entities/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Cover entities for the Overkiz (by Somfy) integration.""" diff --git a/homeassistant/components/overkiz/executor.py b/homeassistant/components/overkiz/executor.py index 94b2c1b25fa..02829eaf1a3 100644 --- a/homeassistant/components/overkiz/executor.py +++ b/homeassistant/components/overkiz/executor.py @@ -81,8 +81,14 @@ class OverkizExecutor: return None - async def async_execute_command(self, command_name: str, *args: Any) -> None: - """Execute device command in async context.""" + async def async_execute_command( + self, command_name: str, *args: Any, refresh_afterwards: bool = True + ) -> None: + """Execute device command in async context. + + :param refresh_afterwards: Whether to refresh the device state after the command is executed. + If several commands are executed, it will be refreshed only once. + """ parameters = [arg for arg in args if arg is not None] # Set the execution duration to 0 seconds for RTS devices on supported commands # Default execution duration is 30 seconds and will block consecutive commands @@ -107,8 +113,8 @@ class OverkizExecutor: "device_url": self.device.device_url, "command_name": command_name, } - - await self.coordinator.async_refresh() + if refresh_afterwards: + await self.coordinator.async_refresh() async def async_cancel_command( self, commands_to_cancel: list[OverkizCommand] diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 19850f0b57e..8c750aec6bd 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.13.14"], + "requirements": ["pyoverkiz==1.15.0"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/homeassistant/components/overkiz/water_heater/__init__.py b/homeassistant/components/overkiz/water_heater/__init__.py new file mode 100644 index 00000000000..1fb5e5696bd --- /dev/null +++ b/homeassistant/components/overkiz/water_heater/__init__.py @@ -0,0 +1,57 @@ +"""Support for Overkiz water heater devices.""" + +from __future__ import annotations + +from pyoverkiz.enums.ui import UIWidget + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .. import HomeAssistantOverkizData +from ..const import DOMAIN +from ..entity import OverkizEntity +from .atlantic_domestic_hot_water_production_mlb_component import ( + AtlanticDomesticHotWaterProductionMBLComponent, +) +from .atlantic_pass_apc_dhw import AtlanticPassAPCDHW +from .domestic_hot_water_production import DomesticHotWaterProduction +from .hitachi_dhw import HitachiDHW + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Overkiz DHW from a config entry.""" + data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + entities: list[OverkizEntity] = [] + + for device in data.platforms[Platform.WATER_HEATER]: + if device.controllable_name in CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY: + entities.append( + CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY[device.controllable_name]( + device.device_url, data.coordinator + ) + ) + elif device.widget in WIDGET_TO_WATER_HEATER_ENTITY: + entities.append( + WIDGET_TO_WATER_HEATER_ENTITY[device.widget]( + device.device_url, data.coordinator + ) + ) + + async_add_entities(entities) + + +WIDGET_TO_WATER_HEATER_ENTITY = { + UIWidget.ATLANTIC_PASS_APC_DHW: AtlanticPassAPCDHW, + UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: DomesticHotWaterProduction, + UIWidget.HITACHI_DHW: HitachiDHW, +} + +CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY = { + "modbuslink:AtlanticDomesticHotWaterProductionMBLComponent": AtlanticDomesticHotWaterProductionMBLComponent, +} diff --git a/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py similarity index 69% rename from homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py rename to homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py index 0f57d13433b..8ba2c1678c2 100644 --- a/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py +++ b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py @@ -13,6 +13,7 @@ from homeassistant.components.water_heater import ( WaterHeaterEntityFeature, ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.util import dt as dt_util from .. import OverkizDataUpdateCoordinator from ..entity import OverkizEntity @@ -97,9 +98,9 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE @property def is_away_mode_on(self) -> bool: """Return true if away mode is on.""" - return ( - self.executor.select_state(OverkizState.MODBUSLINK_DHW_ABSENCE_MODE) - == OverkizCommandParam.ON + return self.executor.select_state(OverkizState.MODBUSLINK_DHW_ABSENCE_MODE) in ( + OverkizCommandParam.ON, + OverkizCommandParam.PROG, ) @property @@ -151,10 +152,50 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE await self.async_turn_away_mode_on() async def async_turn_away_mode_on(self) -> None: - """Turn away mode on.""" + """Turn away mode on. + + This requires the start date and the end date to be also set, and those dates have to match the device datetime. + The API accepts setting dates in the format of the core:DateTimeState state for the DHW + {'day': 11, 'hour': 21, 'minute': 12, 'month': 7, 'second': 53, 'weekday': 3, 'year': 2024} + The dict is then passed as an actual device date, the away mode start date, and then as an end date, + but with the year incremented by 1, so the away mode is getting turned on for the next year. + The weekday number seems to have no effect so the calculation of the future date's weekday number is redundant, + but possible via homeassistant dt_util to form both start and end dates dictionaries from scratch + based on datetime.now() and datetime.timedelta into the future. + If you execute `setAbsenceStartDate`, `setAbsenceEndDate` and `setAbsenceMode`, + the API answers with "too many requests", as there's a polling update after each command execution, + and the device becomes unavailable until the API is available again. + With `refresh_afterwards=False` on the first commands, and `refresh_afterwards=True` only the last command, + the API is not choking and the transition is smooth without the unavailability state. + """ + now = dt_util.now() + now_date = { + "month": now.month, + "hour": now.hour, + "year": now.year, + "weekday": now.weekday(), + "day": now.day, + "minute": now.minute, + "second": now.second, + } await self.executor.async_execute_command( - OverkizCommand.SET_ABSENCE_MODE, OverkizCommandParam.ON + OverkizCommand.SET_DATE_TIME, + now_date, + refresh_afterwards=False, ) + await self.executor.async_execute_command( + OverkizCommand.SET_ABSENCE_START_DATE, now_date, refresh_afterwards=False + ) + now_date["year"] = now_date["year"] + 1 + await self.executor.async_execute_command( + OverkizCommand.SET_ABSENCE_END_DATE, now_date, refresh_afterwards=False + ) + await self.executor.async_execute_command( + OverkizCommand.SET_ABSENCE_MODE, + OverkizCommandParam.PROG, + refresh_afterwards=False, + ) + await self.coordinator.async_refresh() async def async_turn_away_mode_off(self) -> None: """Turn away mode off.""" diff --git a/homeassistant/components/overkiz/water_heater_entities/atlantic_pass_apc_dhw.py b/homeassistant/components/overkiz/water_heater/atlantic_pass_apc_dhw.py similarity index 100% rename from homeassistant/components/overkiz/water_heater_entities/atlantic_pass_apc_dhw.py rename to homeassistant/components/overkiz/water_heater/atlantic_pass_apc_dhw.py diff --git a/homeassistant/components/overkiz/water_heater_entities/domestic_hot_water_production.py b/homeassistant/components/overkiz/water_heater/domestic_hot_water_production.py similarity index 100% rename from homeassistant/components/overkiz/water_heater_entities/domestic_hot_water_production.py rename to homeassistant/components/overkiz/water_heater/domestic_hot_water_production.py diff --git a/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py b/homeassistant/components/overkiz/water_heater/hitachi_dhw.py similarity index 100% rename from homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py rename to homeassistant/components/overkiz/water_heater/hitachi_dhw.py diff --git a/homeassistant/components/overkiz/water_heater_entities/__init__.py b/homeassistant/components/overkiz/water_heater_entities/__init__.py deleted file mode 100644 index fdc41f213c6..00000000000 --- a/homeassistant/components/overkiz/water_heater_entities/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Water heater entities for the Overkiz (by Somfy) integration.""" - -from pyoverkiz.enums.ui import UIWidget - -from .atlantic_domestic_hot_water_production_mlb_component import ( - AtlanticDomesticHotWaterProductionMBLComponent, -) -from .atlantic_pass_apc_dhw import AtlanticPassAPCDHW -from .domestic_hot_water_production import DomesticHotWaterProduction -from .hitachi_dhw import HitachiDHW - -WIDGET_TO_WATER_HEATER_ENTITY = { - UIWidget.ATLANTIC_PASS_APC_DHW: AtlanticPassAPCDHW, - UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: DomesticHotWaterProduction, - UIWidget.HITACHI_DHW: HitachiDHW, -} - -CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY = { - "modbuslink:AtlanticDomesticHotWaterProductionMBLComponent": AtlanticDomesticHotWaterProductionMBLComponent, -} diff --git a/homeassistant/components/ovo_energy/__init__.py b/homeassistant/components/ovo_energy/__init__.py index d207f3161f4..436180407f4 100644 --- a/homeassistant/components/ovo_energy/__init__.py +++ b/homeassistant/components/ovo_energy/__init__.py @@ -15,12 +15,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util from .const import CONF_ACCOUNT, DATA_CLIENT, DATA_COORDINATOR, DOMAIN @@ -37,7 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client_session=async_get_clientsession(hass), ) - if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + if (custom_account := entry.data.get(CONF_ACCOUNT)) is not None: client.custom_account_id = custom_account try: @@ -54,7 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_update_data() -> OVODailyUsage: """Fetch data from OVO Energy.""" - if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + if (custom_account := entry.data.get(CONF_ACCOUNT)) is not None: client.custom_account_id = custom_account async with asyncio.timeout(10): @@ -72,6 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator[OVODailyUsage]( hass, _LOGGER, + config_entry=entry, # Name of the data. For logging purposes. name="sensor", update_method=async_update_data, @@ -102,32 +98,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: del hass.data[DOMAIN][entry.entry_id] return unload_ok - - -class OVOEnergyEntity(CoordinatorEntity[DataUpdateCoordinator[OVODailyUsage]]): - """Defines a base OVO Energy entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: DataUpdateCoordinator[OVODailyUsage], - client: OVOEnergy, - ) -> None: - """Initialize the OVO Energy entity.""" - super().__init__(coordinator) - self._client = client - - -class OVOEnergyDeviceEntity(OVOEnergyEntity): - """Defines a OVO Energy device entity.""" - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this OVO Energy instance.""" - return DeviceInfo( - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, self._client.account_id)}, - manufacturer="OVO Energy", - name=self._client.username, - ) diff --git a/homeassistant/components/ovo_energy/config_flow.py b/homeassistant/components/ovo_energy/config_flow.py index 87d53e5fbf9..53fc4f8eff6 100644 --- a/homeassistant/components/ovo_energy/config_flow.py +++ b/homeassistant/components/ovo_energy/config_flow.py @@ -46,7 +46,7 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): client_session=async_get_clientsession(self.hass), ) - if custom_account := user_input.get(CONF_ACCOUNT) is not None: + if (custom_account := user_input.get(CONF_ACCOUNT)) is not None: client.custom_account_id = custom_account try: @@ -79,20 +79,26 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_reauth( self, - user_input: Mapping[str, Any], + entry_data: Mapping[str, Any], + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self.username = entry_data.get(CONF_USERNAME) + self.account = entry_data.get(CONF_ACCOUNT) + + if self.username: + # If we have a username, use it as flow title + self.context["title_placeholders"] = {CONF_USERNAME: self.username} + + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, + user_input: Mapping[str, Any] | None = None, ) -> ConfigFlowResult: """Handle configuration by re-auth.""" errors = {} - if user_input and user_input.get(CONF_USERNAME): - self.username = user_input[CONF_USERNAME] - - if user_input and user_input.get(CONF_ACCOUNT): - self.account = user_input[CONF_ACCOUNT] - - self.context["title_placeholders"] = {CONF_USERNAME: self.username} - - if user_input is not None and user_input.get(CONF_PASSWORD) is not None: + if user_input is not None: client = OVOEnergy( client_session=async_get_clientsession(self.hass), ) @@ -109,19 +115,13 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "connection_error" else: if authenticated: - entry = await self.async_set_unique_id(self.username) - if entry: - self.hass.config_entries.async_update_entry( - entry, - data={ - CONF_USERNAME: self.username, - CONF_PASSWORD: user_input[CONF_PASSWORD], - }, - ) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + ) errors["base"] = "authorization_error" return self.async_show_form( - step_id="reauth", data_schema=REAUTH_SCHEMA, errors=errors + step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, errors=errors ) diff --git a/homeassistant/components/ovo_energy/entity.py b/homeassistant/components/ovo_energy/entity.py new file mode 100644 index 00000000000..ed8a24b0542 --- /dev/null +++ b/homeassistant/components/ovo_energy/entity.py @@ -0,0 +1,43 @@ +"""Support for OVO Energy.""" + +from __future__ import annotations + +from ovoenergy import OVOEnergy +from ovoenergy.models import OVODailyUsage + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN + + +class OVOEnergyEntity(CoordinatorEntity[DataUpdateCoordinator[OVODailyUsage]]): + """Defines a base OVO Energy entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DataUpdateCoordinator[OVODailyUsage], + client: OVOEnergy, + ) -> None: + """Initialize the OVO Energy entity.""" + super().__init__(coordinator) + self._client = client + + +class OVOEnergyDeviceEntity(OVOEnergyEntity): + """Defines a OVO Energy device entity.""" + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this OVO Energy instance.""" + return DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, self._client.account_id)}, + manufacturer="OVO Energy", + name=self._client.username, + ) diff --git a/homeassistant/components/ovo_energy/sensor.py b/homeassistant/components/ovo_energy/sensor.py index 3012a130a1a..8cada86da34 100644 --- a/homeassistant/components/ovo_energy/sensor.py +++ b/homeassistant/components/ovo_energy/sensor.py @@ -24,8 +24,8 @@ from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util import dt as dt_util -from . import OVOEnergyDeviceEntity from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN +from .entity import OVOEnergyDeviceEntity SCAN_INTERVAL = timedelta(seconds=300) PARALLEL_UPDATES = 4 diff --git a/homeassistant/components/ovo_energy/strings.json b/homeassistant/components/ovo_energy/strings.json index fda0c2996dc..3dc11e3a601 100644 --- a/homeassistant/components/ovo_energy/strings.json +++ b/homeassistant/components/ovo_energy/strings.json @@ -1,10 +1,15 @@ { "config": { "flow_title": "{username}", + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "connection_error": "[%key:common::config_flow::error::cannot_connect%]", + "authorization_error": "[%key:common::config_flow::error::invalid_auth%]" }, "step": { "user": { @@ -16,7 +21,7 @@ "description": "Set up an OVO Energy instance to access your energy usage.", "title": "Add OVO Energy Account" }, - "reauth": { + "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, diff --git a/homeassistant/components/owntracks/__init__.py b/homeassistant/components/owntracks/__init__.py index f57d305d355..720c3718a4f 100644 --- a/homeassistant/components/owntracks/__init__.py +++ b/homeassistant/components/owntracks/__init__.py @@ -261,7 +261,7 @@ class OwnTracksContext: return False if self.max_gps_accuracy is not None and acc > self.max_gps_accuracy: - _LOGGER.info( + _LOGGER.warning( "Ignoring %s update because expected GPS accuracy %s is not met: %s", message["_type"], self.max_gps_accuracy, diff --git a/homeassistant/components/owntracks/config_flow.py b/homeassistant/components/owntracks/config_flow.py index 390cc880c1e..b92f5d7ce06 100644 --- a/homeassistant/components/owntracks/config_flow.py +++ b/homeassistant/components/owntracks/config_flow.py @@ -23,9 +23,6 @@ class OwnTracksFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a user initiated set up flow to create OwnTracks webhook.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form(step_id="user") diff --git a/homeassistant/components/owntracks/device_tracker.py b/homeassistant/components/owntracks/device_tracker.py index 31af3d845ae..6a6f0f078b1 100644 --- a/homeassistant/components/owntracks/device_tracker.py +++ b/homeassistant/components/owntracks/device_tracker.py @@ -2,7 +2,7 @@ from homeassistant.components.device_tracker import ( ATTR_SOURCE_TYPE, - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, SourceType, TrackerEntity, ) @@ -66,7 +66,7 @@ class OwnTracksEntity(TrackerEntity, RestoreEntity): """Set up OwnTracks entity.""" self._dev_id = dev_id self._data = data or {} - self.entity_id = f"{DOMAIN}.{dev_id}" + self.entity_id = f"{DEVICE_TRACKER_DOMAIN}.{dev_id}" @property def unique_id(self): diff --git a/homeassistant/components/owntracks/manifest.json b/homeassistant/components/owntracks/manifest.json index 79af00627a4..7ff5a143451 100644 --- a/homeassistant/components/owntracks/manifest.json +++ b/homeassistant/components/owntracks/manifest.json @@ -8,5 +8,6 @@ "documentation": "https://www.home-assistant.io/integrations/owntracks", "iot_class": "local_push", "loggers": ["nacl"], - "requirements": ["PyNaCl==1.5.0"] + "requirements": ["PyNaCl==1.5.0"], + "single_config_entry": true } diff --git a/homeassistant/components/owntracks/messages.py b/homeassistant/components/owntracks/messages.py index 011b4f75489..93d079b783d 100644 --- a/homeassistant/components/owntracks/messages.py +++ b/homeassistant/components/owntracks/messages.py @@ -214,14 +214,14 @@ async def _async_transition_message_enter(hass, context, message, location): beacons = context.mobile_beacons_active[dev_id] if location not in beacons: beacons.add(location) - _LOGGER.info("Added beacon %s", location) + _LOGGER.debug("Added beacon %s", location) context.async_see_beacons(hass, dev_id, kwargs) else: # Normal region regions = context.regions_entered[dev_id] if location not in regions: regions.append(location) - _LOGGER.info("Enter region %s", location) + _LOGGER.debug("Enter region %s", location) _set_gps_from_zone(kwargs, location, zone) context.async_see(**kwargs) context.async_see_beacons(hass, dev_id, kwargs) @@ -238,7 +238,7 @@ async def _async_transition_message_leave(hass, context, message, location): beacons = context.mobile_beacons_active[dev_id] if location in beacons: beacons.remove(location) - _LOGGER.info("Remove beacon %s", location) + _LOGGER.debug("Remove beacon %s", location) context.async_see_beacons(hass, dev_id, kwargs) else: new_region = regions[-1] if regions else None @@ -246,12 +246,12 @@ async def _async_transition_message_leave(hass, context, message, location): # Exit to previous region zone = hass.states.get(f"zone.{slugify(new_region)}") _set_gps_from_zone(kwargs, new_region, zone) - _LOGGER.info("Exit to %s", new_region) + _LOGGER.debug("Exit to %s", new_region) context.async_see(**kwargs) context.async_see_beacons(hass, dev_id, kwargs) return - _LOGGER.info("Exit to GPS") + _LOGGER.debug("Exit to GPS") # Check for GPS accuracy if context.async_valid_accuracy(message): @@ -335,7 +335,7 @@ async def async_handle_waypoints_message(hass, context, message): wayps = message.get("waypoints", [message]) - _LOGGER.info("Got %d waypoints from %s", len(wayps), message["topic"]) + _LOGGER.debug("Got %d waypoints from %s", len(wayps), message["topic"]) name_base = " ".join(_parse_topic(message["topic"], context.mqtt_topic)) diff --git a/homeassistant/components/owntracks/strings.json b/homeassistant/components/owntracks/strings.json index 499b598d7ae..3c08550dab7 100644 --- a/homeassistant/components/owntracks/strings.json +++ b/homeassistant/components/owntracks/strings.json @@ -7,11 +7,10 @@ } }, "abort": { - "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]" }, "create_entry": { - "default": "\n\nOn Android, open [the OwnTracks app]({android_url}), go to Preferences > Connection. Change the following settings:\n - Mode: HTTP\n - Host: {webhook_url}\n - Identification:\n - Username: `'(Your name)'`\n - Device ID: `'(Your device name)'`\n\nOn iOS, open [the OwnTracks app]({ios_url}), tap (i) icon in top left > Settings. Change the following settings:\n - Mode: HTTP\n - URL: {webhook_url}\n - Turn on authentication\n - UserID: `'(Your name)'`\n\n{secret}\n\nSee [the documentation]({docs_url}) for more information." + "default": "On Android, open [the OwnTracks app]({android_url}), go to Preferences > Connection. Change the following settings:\n - Mode: HTTP\n - Host: {webhook_url}\n - Identification:\n - Username: `'(Your name)'`\n - Device ID: `'(Your device name)'`\n\nOn iOS, open [the OwnTracks app]({ios_url}), tap (i) icon in top left > Settings. Change the following settings:\n - Mode: HTTP\n - URL: {webhook_url}\n - Turn on authentication\n - UserID: `'(Your name)'`\n\n{secret}\n\nSee [the documentation]({docs_url}) for more information." } } } diff --git a/homeassistant/components/p1_monitor/__init__.py b/homeassistant/components/p1_monitor/__init__.py index 8125e9f7a55..d2ccc83972a 100644 --- a/homeassistant/components/p1_monitor/__init__.py +++ b/homeassistant/components/p1_monitor/__init__.py @@ -3,14 +3,16 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN +from .const import LOGGER from .coordinator import P1MonitorDataUpdateCoordinator -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type P1MonitorConfigEntry = ConfigEntry[P1MonitorDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -23,16 +25,35 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.p1monitor.close() raise - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + LOGGER.debug("Migrating from version %s", config_entry.version) + + if config_entry.version == 1: + # Migrate to split host and port + host = config_entry.data[CONF_HOST] + if ":" in host: + host, port = host.split(":") + else: + port = 80 + + new_data = { + **config_entry.data, + CONF_HOST: host, + CONF_PORT: int(port), + } + + hass.config_entries.async_update_entry(config_entry, data=new_data, version=2) + LOGGER.debug("Migration to version %s successful", config_entry.version) + return True + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload P1 Monitor config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - del hass.data[DOMAIN][entry.entry_id] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/p1_monitor/config_flow.py b/homeassistant/components/p1_monitor/config_flow.py index 9c039d06b94..a7ede186d72 100644 --- a/homeassistant/components/p1_monitor/config_flow.py +++ b/homeassistant/components/p1_monitor/config_flow.py @@ -8,9 +8,14 @@ from p1monitor import P1Monitor, P1MonitorError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.selector import TextSelector +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + TextSelector, +) from .const import DOMAIN @@ -18,7 +23,7 @@ from .const import DOMAIN class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for P1 Monitor.""" - VERSION = 1 + VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -31,7 +36,9 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN): session = async_get_clientsession(self.hass) try: async with P1Monitor( - host=user_input[CONF_HOST], session=session + host=user_input[CONF_HOST], + port=user_input[CONF_PORT], + session=session, ) as client: await client.smartmeter() except P1MonitorError: @@ -41,6 +48,7 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN): title="P1 Monitor", data={ CONF_HOST: user_input[CONF_HOST], + CONF_PORT: user_input[CONF_PORT], }, ) @@ -49,6 +57,14 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema( { vol.Required(CONF_HOST): TextSelector(), + vol.Required(CONF_PORT, default=80): vol.All( + NumberSelector( + NumberSelectorConfig( + min=1, max=65535, mode=NumberSelectorMode.BOX + ), + ), + vol.Coerce(int), + ), } ), errors=errors, diff --git a/homeassistant/components/p1_monitor/coordinator.py b/homeassistant/components/p1_monitor/coordinator.py index 49844adf39b..5459f88c388 100644 --- a/homeassistant/components/p1_monitor/coordinator.py +++ b/homeassistant/components/p1_monitor/coordinator.py @@ -15,7 +15,7 @@ from p1monitor import ( ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -59,7 +59,9 @@ class P1MonitorDataUpdateCoordinator(DataUpdateCoordinator[P1MonitorData]): ) self.p1monitor = P1Monitor( - self.config_entry.data[CONF_HOST], session=async_get_clientsession(hass) + host=self.config_entry.data[CONF_HOST], + port=self.config_entry.data[CONF_PORT], + session=async_get_clientsession(hass), ) async def _async_update_data(self) -> P1MonitorData: diff --git a/homeassistant/components/p1_monitor/diagnostics.py b/homeassistant/components/p1_monitor/diagnostics.py index 5fb8cb472e8..d2e2ec5c24e 100644 --- a/homeassistant/components/p1_monitor/diagnostics.py +++ b/homeassistant/components/p1_monitor/diagnostics.py @@ -7,47 +7,41 @@ from typing import TYPE_CHECKING, Any, cast from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from .const import ( - DOMAIN, SERVICE_PHASES, SERVICE_SETTINGS, SERVICE_SMARTMETER, SERVICE_WATERMETER, ) -from .coordinator import P1MonitorDataUpdateCoordinator if TYPE_CHECKING: from _typeshed import DataclassInstance -TO_REDACT = { - CONF_HOST, -} +TO_REDACT = {CONF_HOST, CONF_PORT} async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: P1MonitorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - data = { "entry": { "title": entry.title, "data": async_redact_data(entry.data, TO_REDACT), }, "data": { - "smartmeter": asdict(coordinator.data[SERVICE_SMARTMETER]), - "phases": asdict(coordinator.data[SERVICE_PHASES]), - "settings": asdict(coordinator.data[SERVICE_SETTINGS]), + "smartmeter": asdict(entry.runtime_data.data[SERVICE_SMARTMETER]), + "phases": asdict(entry.runtime_data.data[SERVICE_PHASES]), + "settings": asdict(entry.runtime_data.data[SERVICE_SETTINGS]), }, } - if coordinator.has_water_meter: + if entry.runtime_data.has_water_meter: data["data"]["watermeter"] = asdict( - cast("DataclassInstance", coordinator.data[SERVICE_WATERMETER]) + cast("DataclassInstance", entry.runtime_data.data[SERVICE_WATERMETER]) ) return data diff --git a/homeassistant/components/p1_monitor/manifest.json b/homeassistant/components/p1_monitor/manifest.json index 4702de3546d..28016242a6a 100644 --- a/homeassistant/components/p1_monitor/manifest.json +++ b/homeassistant/components/p1_monitor/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/p1_monitor", "iot_class": "local_polling", "loggers": ["p1monitor"], - "quality_scale": "platinum", - "requirements": ["p1monitor==3.0.1"] + "requirements": ["p1monitor==3.1.0"] } diff --git a/homeassistant/components/p1_monitor/sensor.py b/homeassistant/components/p1_monitor/sensor.py index 88f6d165f14..771ef0e19af 100644 --- a/homeassistant/components/p1_monitor/sensor.py +++ b/homeassistant/components/p1_monitor/sensor.py @@ -239,11 +239,10 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up P1 Monitor Sensors based on a config entry.""" - coordinator: P1MonitorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] entities: list[P1MonitorSensorEntity] = [] entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="SmartMeter", service=SERVICE_SMARTMETER, @@ -252,7 +251,7 @@ async def async_setup_entry( ) entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="Phases", service=SERVICE_PHASES, @@ -261,17 +260,17 @@ async def async_setup_entry( ) entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="Settings", service=SERVICE_SETTINGS, ) for description in SENSORS_SETTINGS ) - if coordinator.has_water_meter: + if entry.runtime_data.has_water_meter: entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="WaterMeter", service=SERVICE_WATERMETER, @@ -291,24 +290,26 @@ class P1MonitorSensorEntity( def __init__( self, *, - coordinator: P1MonitorDataUpdateCoordinator, + entry: ConfigEntry, description: SensorEntityDescription, name: str, service: Literal["smartmeter", "watermeter", "phases", "settings"], ) -> None: """Initialize P1 Monitor sensor.""" - super().__init__(coordinator=coordinator) + super().__init__(coordinator=entry.runtime_data) self._service_key = service self.entity_description = description self._attr_unique_id = ( - f"{coordinator.config_entry.entry_id}_{service}_{description.key}" + f"{entry.runtime_data.config_entry.entry_id}_{service}_{description.key}" ) self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, f"{coordinator.config_entry.entry_id}_{service}")}, - configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", + identifiers={ + (DOMAIN, f"{entry.runtime_data.config_entry.entry_id}_{service}") + }, + configuration_url=f"http://{entry.runtime_data.config_entry.data[CONF_HOST]}", manufacturer="P1 Monitor", name=name, ) diff --git a/homeassistant/components/p1_monitor/strings.json b/homeassistant/components/p1_monitor/strings.json index 781ca109235..b64f1dcc291 100644 --- a/homeassistant/components/p1_monitor/strings.json +++ b/homeassistant/components/p1_monitor/strings.json @@ -4,10 +4,12 @@ "user": { "description": "Set up P1 Monitor to integrate with Home Assistant.", "data": { - "host": "[%key:common::config_flow::data::host%]" + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" }, "data_description": { - "host": "The IP address or hostname of your P1 Monitor installation." + "host": "The IP address or hostname of your P1 Monitor installation.", + "port": "The port of your P1 Monitor installation." } } }, diff --git a/homeassistant/components/palazzetti/__init__.py b/homeassistant/components/palazzetti/__init__.py new file mode 100644 index 00000000000..f20b3d11261 --- /dev/null +++ b/homeassistant/components/palazzetti/__init__.py @@ -0,0 +1,27 @@ +"""The Palazzetti integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import PalazzettiConfigEntry, PalazzettiDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: PalazzettiConfigEntry) -> bool: + """Set up Palazzetti from a config entry.""" + + coordinator = PalazzettiDataUpdateCoordinator(hass) + + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: PalazzettiConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/palazzetti/climate.py b/homeassistant/components/palazzetti/climate.py new file mode 100644 index 00000000000..356f3a7306f --- /dev/null +++ b/homeassistant/components/palazzetti/climate.py @@ -0,0 +1,153 @@ +"""Support for Palazzetti climates.""" + +from typing import Any + +from pypalazzetti.exceptions import CommunicationError, ValidationError + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import PalazzettiConfigEntry +from .const import DOMAIN, FAN_AUTO, FAN_HIGH, FAN_MODES, FAN_SILENT +from .coordinator import PalazzettiDataUpdateCoordinator +from .entity import PalazzettiEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PalazzettiConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Palazzetti climates based on a config entry.""" + async_add_entities([PalazzettiClimateEntity(entry.runtime_data)]) + + +class PalazzettiClimateEntity(PalazzettiEntity, ClimateEntity): + """Defines a Palazzetti climate.""" + + _attr_has_entity_name = True + _attr_name = None + _attr_translation_key = DOMAIN + _attr_target_temperature_step = 1.0 + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + ) + + def __init__(self, coordinator: PalazzettiDataUpdateCoordinator) -> None: + """Initialize Palazzetti climate.""" + super().__init__(coordinator) + client = coordinator.client + mac = coordinator.config_entry.unique_id + self._attr_unique_id = mac + self._attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF] + self._attr_min_temp = client.target_temperature_min + self._attr_max_temp = client.target_temperature_max + self._attr_fan_modes = list( + map(str, range(client.fan_speed_min, client.fan_speed_max + 1)) + ) + if client.has_fan_silent: + self._attr_fan_modes.insert(0, FAN_SILENT) + if client.has_fan_high: + self._attr_fan_modes.append(FAN_HIGH) + if client.has_fan_auto: + self._attr_fan_modes.append(FAN_AUTO) + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac operation ie. heat or off mode.""" + return HVACMode.HEAT if self.coordinator.client.is_on else HVACMode.OFF + + @property + def hvac_action(self) -> HVACAction: + """Return hvac action ie. heating or idle.""" + return ( + HVACAction.HEATING + if self.coordinator.client.is_heating + else HVACAction.IDLE + ) + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + try: + await self.coordinator.client.set_on(hvac_mode != HVACMode.OFF) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="on_off_not_available" + ) from err + await self.coordinator.async_refresh() + + @property + def current_temperature(self) -> float | None: + """Return current temperature.""" + return self.coordinator.client.room_temperature + + @property + def target_temperature(self) -> int | None: + """Return the temperature.""" + return self.coordinator.client.target_temperature + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new temperature.""" + temperature = int(kwargs[ATTR_TEMPERATURE]) + try: + await self.coordinator.client.set_target_temperature(temperature) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_target_temperature", + translation_placeholders={ + "value": str(temperature), + }, + ) from err + await self.coordinator.async_refresh() + + @property + def fan_mode(self) -> str | None: + """Return the fan mode.""" + api_state = self.coordinator.client.fan_speed + return FAN_MODES[api_state] + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set new fan mode.""" + try: + if fan_mode == FAN_SILENT: + await self.coordinator.client.set_fan_silent() + elif fan_mode == FAN_HIGH: + await self.coordinator.client.set_fan_high() + elif fan_mode == FAN_AUTO: + await self.coordinator.client.set_fan_auto() + else: + await self.coordinator.client.set_fan_speed(FAN_MODES.index(fan_mode)) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_fan_mode", + translation_placeholders={ + "value": fan_mode, + }, + ) from err + await self.coordinator.async_refresh() diff --git a/homeassistant/components/palazzetti/config_flow.py b/homeassistant/components/palazzetti/config_flow.py new file mode 100644 index 00000000000..fe892b6624d --- /dev/null +++ b/homeassistant/components/palazzetti/config_flow.py @@ -0,0 +1,91 @@ +"""Config flow for Palazzetti.""" + +from typing import Any + +from pypalazzetti.client import PalazzettiClient +from pypalazzetti.exceptions import CommunicationError +import voluptuous as vol + +from homeassistant.components import dhcp +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN, LOGGER + + +class PalazzettiConfigFlow(ConfigFlow, domain=DOMAIN): + """Palazzetti config flow.""" + + _discovered_device: PalazzettiClient + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """User configuration step.""" + errors: dict[str, str] = {} + if user_input is not None: + host = user_input[CONF_HOST] + client = PalazzettiClient(hostname=host) + try: + await client.connect() + except CommunicationError: + LOGGER.exception("Communication error") + errors["base"] = "cannot_connect" + else: + formatted_mac = dr.format_mac(client.mac) + + # Assign a unique ID to the flow + await self.async_set_unique_id(formatted_mac) + + # Abort the flow if a config entry with the same unique ID exists + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=client.name, + data=user_input, + ) + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + errors=errors, + ) + + async def async_step_dhcp( + self, discovery_info: dhcp.DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle DHCP discovery.""" + + LOGGER.debug( + "DHCP discovery detected Palazzetti: %s", discovery_info.macaddress + ) + + await self.async_set_unique_id(dr.format_mac(discovery_info.macaddress)) + self._abort_if_unique_id_configured() + self._discovered_device = PalazzettiClient(hostname=discovery_info.ip) + try: + await self._discovered_device.connect() + except CommunicationError: + return self.async_abort(reason="cannot_connect") + + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self._discovered_device.name, + data={CONF_HOST: self._discovered_device.host}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={ + "name": self._discovered_device.name, + "host": self._discovered_device.host, + }, + ) diff --git a/homeassistant/components/palazzetti/const.py b/homeassistant/components/palazzetti/const.py new file mode 100644 index 00000000000..b2e27b2a6fd --- /dev/null +++ b/homeassistant/components/palazzetti/const.py @@ -0,0 +1,71 @@ +"""Constants for the Palazzetti integration.""" + +from datetime import timedelta +import logging +from typing import Final + +from homeassistant.helpers.typing import StateType + +DOMAIN: Final = "palazzetti" +PALAZZETTI: Final = "Palazzetti" +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(seconds=30) +ON_OFF_NOT_AVAILABLE = "on_off_not_available" +ERROR_INVALID_FAN_MODE = "invalid_fan_mode" +ERROR_INVALID_TARGET_TEMPERATURE = "invalid_target_temperature" +ERROR_CANNOT_CONNECT = "cannot_connect" + +FAN_SILENT: Final = "silent" +FAN_HIGH: Final = "high" +FAN_AUTO: Final = "auto" +FAN_MODES: Final = [FAN_SILENT, "1", "2", "3", "4", "5", FAN_HIGH, FAN_AUTO] + +STATUS_TO_HA: Final[dict[StateType, str]] = { + 0: "off", + 1: "off_timer", + 2: "test_fire", + 3: "heatup", + 4: "fueling", + 5: "ign_test", + 6: "burning", + 7: "burning_mod", + 8: "unknown", + 9: "cool_fluid", + 10: "fire_stop", + 11: "clean_fire", + 12: "cooling", + 50: "cleanup", + 51: "ecomode", + 241: "chimney_alarm", + 243: "grate_error", + 244: "pellet_water_error", + 245: "t05_error", + 247: "hatch_door_open", + 248: "pressure_error", + 249: "main_probe_failure", + 250: "flue_probe_failure", + 252: "exhaust_temp_high", + 253: "pellet_finished", + 501: "off", + 502: "fueling", + 503: "ign_test", + 504: "burning", + 505: "firewood_finished", + 506: "cooling", + 507: "clean_fire", + 1000: "general_error", + 1001: "general_error", + 1239: "door_open", + 1240: "temp_too_high", + 1241: "cleaning_warning", + 1243: "fuel_error", + 1244: "pellet_water_error", + 1245: "t05_error", + 1247: "hatch_door_open", + 1248: "pressure_error", + 1249: "main_probe_failure", + 1250: "flue_probe_failure", + 1252: "exhaust_temp_high", + 1253: "pellet_finished", + 1508: "general_error", +} diff --git a/homeassistant/components/palazzetti/coordinator.py b/homeassistant/components/palazzetti/coordinator.py new file mode 100644 index 00000000000..d992bd3fb62 --- /dev/null +++ b/homeassistant/components/palazzetti/coordinator.py @@ -0,0 +1,47 @@ +"""Helpers to help coordinate updates.""" + +from pypalazzetti.client import PalazzettiClient +from pypalazzetti.exceptions import CommunicationError, ValidationError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + +type PalazzettiConfigEntry = ConfigEntry[PalazzettiDataUpdateCoordinator] + + +class PalazzettiDataUpdateCoordinator(DataUpdateCoordinator[None]): + """Class to manage fetching Palazzetti data from a Palazzetti hub.""" + + config_entry: PalazzettiConfigEntry + client: PalazzettiClient + + def __init__( + self, + hass: HomeAssistant, + ) -> None: + """Initialize global Palazzetti data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = PalazzettiClient(self.config_entry.data[CONF_HOST]) + + async def _async_setup(self) -> None: + try: + await self.client.connect() + await self.client.update_state() + except (CommunicationError, ValidationError) as err: + raise UpdateFailed(f"Error communicating with the API: {err}") from err + + async def _async_update_data(self) -> None: + """Fetch data from Palazzetti.""" + try: + await self.client.update_state() + except (CommunicationError, ValidationError) as err: + raise UpdateFailed(f"Error communicating with the API: {err}") from err diff --git a/homeassistant/components/palazzetti/diagnostics.py b/homeassistant/components/palazzetti/diagnostics.py new file mode 100644 index 00000000000..3843f0ec111 --- /dev/null +++ b/homeassistant/components/palazzetti/diagnostics.py @@ -0,0 +1,20 @@ +"""Provides diagnostics for Palazzetti.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import PalazzettiConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PalazzettiConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + client = entry.runtime_data.client + + return { + "api_data": client.to_dict(redact=True), + } diff --git a/homeassistant/components/palazzetti/entity.py b/homeassistant/components/palazzetti/entity.py new file mode 100644 index 00000000000..677c6ccbdc4 --- /dev/null +++ b/homeassistant/components/palazzetti/entity.py @@ -0,0 +1,32 @@ +"""Base class for Palazzetti entities.""" + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import PALAZZETTI +from .coordinator import PalazzettiDataUpdateCoordinator + + +class PalazzettiEntity(CoordinatorEntity[PalazzettiDataUpdateCoordinator]): + """Defines a base Palazzetti entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: PalazzettiDataUpdateCoordinator) -> None: + """Initialize Palazzetti entity.""" + super().__init__(coordinator) + client = coordinator.client + mac = coordinator.config_entry.unique_id + assert mac is not None + self._attr_device_info = dr.DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, mac)}, + name=client.name, + manufacturer=PALAZZETTI, + sw_version=client.sw_version, + hw_version=client.hw_version, + ) + + @property + def available(self) -> bool: + """Is the entity available.""" + return super().available and self.coordinator.client.connected diff --git a/homeassistant/components/palazzetti/manifest.json b/homeassistant/components/palazzetti/manifest.json new file mode 100644 index 00000000000..05a5d260b50 --- /dev/null +++ b/homeassistant/components/palazzetti/manifest.json @@ -0,0 +1,19 @@ +{ + "domain": "palazzetti", + "name": "Palazzetti", + "codeowners": ["@dotvav"], + "config_flow": true, + "dhcp": [ + { + "hostname": "connbox*", + "macaddress": "40F3857*" + }, + { + "registered_devices": true + } + ], + "documentation": "https://www.home-assistant.io/integrations/palazzetti", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["pypalazzetti==0.1.14"] +} diff --git a/homeassistant/components/palazzetti/number.py b/homeassistant/components/palazzetti/number.py new file mode 100644 index 00000000000..06114bfef54 --- /dev/null +++ b/homeassistant/components/palazzetti/number.py @@ -0,0 +1,66 @@ +"""Number platform for Palazzetti settings.""" + +from __future__ import annotations + +from pypalazzetti.exceptions import CommunicationError, ValidationError + +from homeassistant.components.number import NumberDeviceClass, NumberEntity +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import PalazzettiConfigEntry +from .const import DOMAIN +from .coordinator import PalazzettiDataUpdateCoordinator +from .entity import PalazzettiEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: PalazzettiConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Palazzetti number platform.""" + async_add_entities([PalazzettiCombustionPowerEntity(config_entry.runtime_data)]) + + +class PalazzettiCombustionPowerEntity(PalazzettiEntity, NumberEntity): + """Representation of Palazzetti number entity for Combustion power.""" + + _attr_translation_key = "combustion_power" + _attr_device_class = NumberDeviceClass.POWER_FACTOR + _attr_native_min_value = 1 + _attr_native_max_value = 5 + _attr_native_step = 1 + + def __init__( + self, + coordinator: PalazzettiDataUpdateCoordinator, + ) -> None: + """Initialize the Palazzetti number entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.unique_id}-combustion_power" + + @property + def native_value(self) -> float: + """Return the state of the setting entity.""" + return self.coordinator.client.power_mode + + async def async_set_native_value(self, value: float) -> None: + """Update the setting.""" + try: + await self.coordinator.client.set_power_mode(int(value)) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_combustion_power", + translation_placeholders={ + "value": str(value), + }, + ) from err + + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/palazzetti/quality_scale.yaml b/homeassistant/components/palazzetti/quality_scale.yaml new file mode 100644 index 00000000000..493b2595117 --- /dev/null +++ b/homeassistant/components/palazzetti/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not register actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not register actions. + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + This integration does not subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have configuration. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: todo + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: done + icon-translations: + status: exempt + comment: | + This integration does not have custom icons. + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/palazzetti/sensor.py b/homeassistant/components/palazzetti/sensor.py new file mode 100644 index 00000000000..11462201f4e --- /dev/null +++ b/homeassistant/components/palazzetti/sensor.py @@ -0,0 +1,123 @@ +"""Support for Palazzetti sensors.""" + +from dataclasses import dataclass + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfLength, UnitOfMass, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import PalazzettiConfigEntry +from .const import STATUS_TO_HA +from .coordinator import PalazzettiDataUpdateCoordinator +from .entity import PalazzettiEntity + + +@dataclass(frozen=True, kw_only=True) +class PropertySensorEntityDescription(SensorEntityDescription): + """Describes a Palazzetti sensor entity that is read from a `PalazzettiClient` property.""" + + client_property: str + property_map: dict[StateType, str] | None = None + presence_flag: None | str = None + + +PROPERTY_SENSOR_DESCRIPTIONS: list[PropertySensorEntityDescription] = [ + PropertySensorEntityDescription( + key="status", + device_class=SensorDeviceClass.ENUM, + translation_key="status", + client_property="status", + property_map=STATUS_TO_HA, + options=list(STATUS_TO_HA.values()), + ), + PropertySensorEntityDescription( + key="pellet_quantity", + device_class=SensorDeviceClass.WEIGHT, + native_unit_of_measurement=UnitOfMass.KILOGRAMS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="pellet_quantity", + client_property="pellet_quantity", + ), + PropertySensorEntityDescription( + key="pellet_level", + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.CENTIMETERS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="pellet_level", + presence_flag="has_pellet_level", + client_property="pellet_level", + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PalazzettiConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Palazzetti sensor entities based on a config entry.""" + + coordinator = entry.runtime_data + + sensors = [ + PalazzettiSensor( + coordinator, + PropertySensorEntityDescription( + key=sensor.description_key.value, + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=sensor.description_key.value, + client_property=sensor.state_property, + ), + ) + for sensor in coordinator.client.list_temperatures() + ] + + sensors.extend( + [ + PalazzettiSensor(coordinator, description) + for description in PROPERTY_SENSOR_DESCRIPTIONS + if not description.presence_flag + or getattr(coordinator.client, description.presence_flag) + ] + ) + + if sensors: + async_add_entities(sensors) + + +class PalazzettiSensor(PalazzettiEntity, SensorEntity): + """Define a Palazzetti sensor.""" + + entity_description: PropertySensorEntityDescription + + def __init__( + self, + coordinator: PalazzettiDataUpdateCoordinator, + description: PropertySensorEntityDescription, + ) -> None: + """Initialize Palazzetti sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{description.key}" + + @property + def native_value(self) -> StateType: + """Return the state value of the sensor.""" + + raw_value = getattr( + self.coordinator.client, self.entity_description.client_property + ) + + if self.entity_description.property_map: + return self.entity_description.property_map[raw_value] + + return raw_value diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json new file mode 100644 index 00000000000..ad7bc498bd1 --- /dev/null +++ b/homeassistant/components/palazzetti/strings.json @@ -0,0 +1,125 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The host name or the IP address of the Palazzetti CBox" + } + }, + "discovery_confirm": { + "description": "Do you want to add {name} ({host}) to Home Assistant?" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + } + }, + "exceptions": { + "on_off_not_available": { + "message": "The appliance cannot be turned on or off." + }, + "invalid_fan_mode": { + "message": "Fan mode {value} is invalid." + }, + "invalid_target_temperature": { + "message": "Target temperature {value} is invalid." + }, + "invalid_combustion_power": { + "message": "Combustion power {value} is invalid." + }, + "cannot_connect": { + "message": "Could not connect to the device." + } + }, + "entity": { + "climate": { + "palazzetti": { + "state_attributes": { + "fan_mode": { + "state": { + "silent": "Silent", + "auto": "Auto", + "high": "High" + } + } + } + } + }, + "number": { + "combustion_power": { + "name": "Combustion power" + } + }, + "sensor": { + "status": { + "name": "Status", + "state": { + "off": "Off", + "off_timer": "Timer-regulated switch off", + "test_fire": "Ignition test", + "heatup": "Pellet feed", + "fueling": "Ignition", + "ign_test": "Fuel check", + "burning": "Operating", + "burning_mod": "Operating - Modulating", + "unknown": "Unknown", + "cool_fluid": "Stand-by", + "fire_stop": "Switch off", + "clean_fire": "Burn pot cleaning", + "cooling": "Cooling in progress", + "cleanup": "Final cleaning", + "ecomode": "Ecomode", + "chimney_alarm": "Chimney alarm", + "grate_error": "Grate error", + "pellet_water_error": "Pellet probe or return water error", + "t05_error": "T05 error disconnected or faulty probe", + "hatch_door_open": "Feed hatch or door open", + "pressure_error": "Safety pressure switch error", + "main_probe_failure": "Main probe failure", + "flue_probe_failure": "Flue gas probe failure", + "exhaust_temp_high": "Too high exhaust gas temperature", + "pellet_finished": "Pellets finished or ignition failed", + "firewood_finished": "Firewood finished", + "general_error": "General error", + "door_open": "Door open", + "temp_too_high": "Temperature too high", + "cleaning_warning": "Cleaning warning", + "fuel_error": "Fuel error" + } + }, + "pellet_quantity": { + "name": "Pellet quantity" + }, + "pellet_level": { + "name": "Pellet level" + }, + "air_outlet_temperature": { + "name": "Air outlet temperature" + }, + "wood_combustion_temperature": { + "name": "Wood combustion temperature" + }, + "room_temperature": { + "name": "Room temperature" + }, + "return_water_temperature": { + "name": "Return water temperature" + }, + "tank_water_temperature": { + "name": "Tank water temperature" + }, + "t1_hydro": { + "name": "Hydro temperature 1" + }, + "t2_hydro": { + "name": "Hydro temperature 2" + } + } + } +} diff --git a/homeassistant/components/panasonic_bluray/manifest.json b/homeassistant/components/panasonic_bluray/manifest.json index fa0202c0871..3de12b051e5 100644 --- a/homeassistant/components/panasonic_bluray/manifest.json +++ b/homeassistant/components/panasonic_bluray/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/panasonic_bluray", "iot_class": "local_polling", "loggers": ["panacotta"], + "quality_scale": "legacy", "requirements": ["panacotta==0.2"] } diff --git a/homeassistant/components/panasonic_viera/config_flow.py b/homeassistant/components/panasonic_viera/config_flow.py index 0226fb33c9e..b00fee513a6 100644 --- a/homeassistant/components/panasonic_viera/config_flow.py +++ b/homeassistant/components/panasonic_viera/config_flow.py @@ -157,11 +157,9 @@ class PanasonicVieraConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(user_input=import_config) + return await self.async_step_user(user_input=import_data) async def async_load_data(self, config: dict[str, Any]) -> None: """Load the data.""" diff --git a/homeassistant/components/pandora/manifest.json b/homeassistant/components/pandora/manifest.json index b86f0754af3..e7d8946fb38 100644 --- a/homeassistant/components/pandora/manifest.json +++ b/homeassistant/components/pandora/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pandora", "iot_class": "local_polling", "loggers": ["pexpect", "ptyprocess"], + "quality_scale": "legacy", "requirements": ["pexpect==4.6.0"] } diff --git a/homeassistant/components/pandora/media_player.py b/homeassistant/components/pandora/media_player.py index eb6815959c2..f781f366173 100644 --- a/homeassistant/components/pandora/media_player.py +++ b/homeassistant/components/pandora/media_player.py @@ -98,7 +98,7 @@ class PandoraMediaPlayer(MediaPlayerEntity): if self.state != MediaPlayerState.OFF: return self._pianobar = pexpect.spawn("pianobar") - _LOGGER.info("Started pianobar subprocess") + _LOGGER.debug("Started pianobar subprocess") mode = self._pianobar.expect( ["Receiving new playlist", "Select station:", "Email:"] ) @@ -126,7 +126,7 @@ class PandoraMediaPlayer(MediaPlayerEntity): def turn_off(self) -> None: """Turn the media player off.""" if self._pianobar is None: - _LOGGER.info("Pianobar subprocess already stopped") + _LOGGER.warning("Pianobar subprocess already stopped") return self._pianobar.send("q") try: @@ -212,7 +212,7 @@ class PandoraMediaPlayer(MediaPlayerEntity): ] ) except pexpect.exceptions.EOF: - _LOGGER.info("Pianobar process already exited") + _LOGGER.warning("Pianobar process already exited") return None self._log_match() @@ -289,7 +289,7 @@ class PandoraMediaPlayer(MediaPlayerEntity): command = CMD_MAP.get(service_cmd) _LOGGER.debug("Sending pinaobar command %s for %s", command, service_cmd) if command is None: - _LOGGER.info("Command %s not supported yet", service_cmd) + _LOGGER.warning("Command %s not supported yet", service_cmd) self._clear_buffer() self._pianobar.sendline(command) diff --git a/homeassistant/components/panel_custom/manifest.json b/homeassistant/components/panel_custom/manifest.json index ab5c4931b57..1b4bef6bc99 100644 --- a/homeassistant/components/panel_custom/manifest.json +++ b/homeassistant/components/panel_custom/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@home-assistant/frontend"], "dependencies": ["frontend"], "documentation": "https://www.home-assistant.io/integrations/panel_custom", + "integration_type": "system", "quality_scale": "internal" } diff --git a/homeassistant/components/panel_iframe/__init__.py b/homeassistant/components/panel_iframe/__init__.py deleted file mode 100644 index 1b6dfebd6b0..00000000000 --- a/homeassistant/components/panel_iframe/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Register an iFrame front end panel.""" - -import voluptuous as vol - -from homeassistant.components import lovelace -from homeassistant.components.lovelace import dashboard -from homeassistant.const import CONF_ICON, CONF_URL -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.storage import Store -from homeassistant.helpers.typing import ConfigType - -DOMAIN = "panel_iframe" - -CONF_TITLE = "title" - -CONF_RELATIVE_URL_ERROR_MSG = "Invalid relative URL. Absolute path required." -CONF_RELATIVE_URL_REGEX = r"\A/" -CONF_REQUIRE_ADMIN = "require_admin" - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: cv.schema_with_slug_keys( - vol.Schema( - { - vol.Optional(CONF_TITLE): cv.string, - vol.Optional(CONF_ICON): cv.icon, - vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, - vol.Required(CONF_URL): vol.Any( - vol.Match( - CONF_RELATIVE_URL_REGEX, msg=CONF_RELATIVE_URL_ERROR_MSG - ), - vol.Url(), - ), - } - ) - ) - }, - extra=vol.ALLOW_EXTRA, -) - -STORAGE_KEY = DOMAIN -STORAGE_VERSION_MAJOR = 1 - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the iFrame frontend panels.""" - async_create_issue( - hass, - DOMAIN, - "deprecated_yaml", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "iframe Panel", - }, - ) - - store: Store[dict[str, bool]] = Store( - hass, - STORAGE_VERSION_MAJOR, - STORAGE_KEY, - ) - data = await store.async_load() - if data: - return True - - dashboards_collection: dashboard.DashboardsCollection = hass.data[lovelace.DOMAIN][ - "dashboards_collection" - ] - - for url_path, info in config[DOMAIN].items(): - dashboard_create_data = { - lovelace.CONF_ALLOW_SINGLE_WORD: True, - lovelace.CONF_URL_PATH: url_path, - } - for key in (CONF_ICON, CONF_REQUIRE_ADMIN, CONF_TITLE): - if key in info: - dashboard_create_data[key] = info[key] - - await dashboards_collection.async_create_item(dashboard_create_data) - - dashboard_store: dashboard.LovelaceStorage = hass.data[lovelace.DOMAIN][ - "dashboards" - ][url_path] - await dashboard_store.async_save( - {"strategy": {"type": "iframe", "url": info[CONF_URL]}} - ) - - await store.async_save({"migrated": True}) - - return True diff --git a/homeassistant/components/panel_iframe/manifest.json b/homeassistant/components/panel_iframe/manifest.json deleted file mode 100644 index 7a39e0ba17d..00000000000 --- a/homeassistant/components/panel_iframe/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "panel_iframe", - "name": "iframe Panel", - "codeowners": ["@home-assistant/frontend"], - "dependencies": ["frontend", "lovelace"], - "documentation": "https://www.home-assistant.io/integrations/panel_iframe", - "quality_scale": "internal" -} diff --git a/homeassistant/components/panel_iframe/strings.json b/homeassistant/components/panel_iframe/strings.json deleted file mode 100644 index 595b1f04818..00000000000 --- a/homeassistant/components/panel_iframe/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "deprecated_yaml": { - "title": "The {integration_title} YAML configuration is being removed", - "description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically as a regular dashboard.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." - } - } -} diff --git a/homeassistant/components/peco/__init__.py b/homeassistant/components/peco/__init__.py index 12979f27793..1de5d4bb6a2 100644 --- a/homeassistant/components/peco/__init__.py +++ b/homeassistant/components/peco/__init__.py @@ -68,6 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: outage_coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name="PECO Outage Count", update_method=async_update_outage_data, update_interval=timedelta(minutes=OUTAGE_SCAN_INTERVAL), @@ -97,6 +98,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: meter_coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name="PECO Smart Meter", update_method=async_update_meter_data, update_interval=timedelta(minutes=SMART_METER_SCAN_INTERVAL), diff --git a/homeassistant/components/pegel_online/coordinator.py b/homeassistant/components/pegel_online/coordinator.py index 1802af8e05c..c8233673fde 100644 --- a/homeassistant/components/pegel_online/coordinator.py +++ b/homeassistant/components/pegel_online/coordinator.py @@ -7,7 +7,7 @@ from aiopegelonline import CONNECT_ERRORS, PegelOnline, Station, StationMeasurem from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import MIN_TIME_BETWEEN_UPDATES +from .const import DOMAIN, MIN_TIME_BETWEEN_UPDATES _LOGGER = logging.getLogger(__name__) @@ -33,4 +33,8 @@ class PegelOnlineDataUpdateCoordinator(DataUpdateCoordinator[StationMeasurements try: return await self.api.async_get_station_measurements(self.station.uuid) except CONNECT_ERRORS as err: - raise UpdateFailed(f"Failed to communicate with API: {err}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(err)}, + ) from err diff --git a/homeassistant/components/pegel_online/diagnostics.py b/homeassistant/components/pegel_online/diagnostics.py new file mode 100644 index 00000000000..b68437c5ee7 --- /dev/null +++ b/homeassistant/components/pegel_online/diagnostics.py @@ -0,0 +1,21 @@ +"""Diagnostics support for pegel_online.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import PegelOnlineConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PegelOnlineConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + + return { + "entry": entry.as_dict(), + "data": coordinator.data, + } diff --git a/homeassistant/components/pegel_online/entity.py b/homeassistant/components/pegel_online/entity.py index 4ad12f12913..4e157a5f63b 100644 --- a/homeassistant/components/pegel_online/entity.py +++ b/homeassistant/components/pegel_online/entity.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN @@ -29,4 +29,5 @@ class PegelOnlineEntity(CoordinatorEntity[PegelOnlineDataUpdateCoordinator]): name=f"{self.station.name} {self.station.water_name}", manufacturer=self.station.agency, configuration_url=self.station.base_data_url, + entry_type=DeviceEntryType.SERVICE, ) diff --git a/homeassistant/components/pegel_online/manifest.json b/homeassistant/components/pegel_online/manifest.json index d51278d0c1b..443e8c58467 100644 --- a/homeassistant/components/pegel_online/manifest.json +++ b/homeassistant/components/pegel_online/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aiopegelonline"], - "requirements": ["aiopegelonline==0.0.10"] + "requirements": ["aiopegelonline==0.1.0"] } diff --git a/homeassistant/components/pegel_online/strings.json b/homeassistant/components/pegel_online/strings.json index e777f6169ba..b8d18e63a4f 100644 --- a/homeassistant/components/pegel_online/strings.json +++ b/homeassistant/components/pegel_online/strings.json @@ -48,5 +48,10 @@ "name": "Water temperature" } } + }, + "exceptions": { + "communication_error": { + "message": "Failed to communicate with API: {error}" + } } } diff --git a/homeassistant/components/pencom/manifest.json b/homeassistant/components/pencom/manifest.json index 34ebe315972..306b2e7be49 100644 --- a/homeassistant/components/pencom/manifest.json +++ b/homeassistant/components/pencom/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pencom", "iot_class": "local_polling", "loggers": ["pencompy"], + "quality_scale": "legacy", "requirements": ["pencompy==0.0.3"] } diff --git a/homeassistant/components/permobil/config_flow.py b/homeassistant/components/permobil/config_flow.py index cb47640e55f..07ddefa9dce 100644 --- a/homeassistant/components/permobil/config_flow.py +++ b/homeassistant/components/permobil/config_flow.py @@ -14,7 +14,7 @@ from mypermobil import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CODE, CONF_EMAIL, CONF_REGION, CONF_TOKEN, CONF_TTL from homeassistant.core import HomeAssistant, async_get_hass from homeassistant.helpers import selector @@ -158,20 +158,20 @@ class PermobilConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={"app_name": "MyPermobil"}, ) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=self.data[CONF_EMAIL], data=self.data + ) + return self.async_create_entry(title=self.data[CONF_EMAIL], data=self.data) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert reauth_entry - try: - email: str = reauth_entry.data[CONF_EMAIL] - region: str = reauth_entry.data[CONF_REGION] + email: str = entry_data[CONF_EMAIL] + region: str = entry_data[CONF_REGION] self.p_api.set_email(email) self.p_api.set_region(region) self.data = { diff --git a/homeassistant/components/permobil/strings.json b/homeassistant/components/permobil/strings.json index d3a9290854e..0b55162b53e 100644 --- a/homeassistant/components/permobil/strings.json +++ b/homeassistant/components/permobil/strings.json @@ -15,12 +15,14 @@ "region": { "description": "Select the region of your account.", "data": { - "code": "Region" + "region": "Region" } } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { "unknown": "Unexpected error, more information in the logs", diff --git a/homeassistant/components/persistent_notification/__init__.py b/homeassistant/components/persistent_notification/__init__.py index a785d015ffb..a5eb8bb4f4d 100644 --- a/homeassistant/components/persistent_notification/__init__.py +++ b/homeassistant/components/persistent_notification/__init__.py @@ -184,8 +184,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: create_service, vol.Schema( { - vol.Required(ATTR_MESSAGE): vol.Any(cv.dynamic_template, cv.string), - vol.Optional(ATTR_TITLE): vol.Any(cv.dynamic_template, cv.string), + vol.Required(ATTR_MESSAGE): cv.string, + vol.Optional(ATTR_TITLE): cv.string, vol.Optional(ATTR_NOTIFICATION_ID): cv.string, } ), diff --git a/homeassistant/components/persistent_notification/icons.json b/homeassistant/components/persistent_notification/icons.json index 9c782bd7b21..30847357a47 100644 --- a/homeassistant/components/persistent_notification/icons.json +++ b/homeassistant/components/persistent_notification/icons.json @@ -1,7 +1,13 @@ { "services": { - "create": "mdi:message-badge", - "dismiss": "mdi:bell-off", - "dismiss_all": "mdi:notification-clear-all" + "create": { + "service": "mdi:message-badge" + }, + "dismiss": { + "service": "mdi:bell-off" + }, + "dismiss_all": { + "service": "mdi:notification-clear-all" + } } } diff --git a/homeassistant/components/persistent_notification/strings.json b/homeassistant/components/persistent_notification/strings.json index b9a4ae4f10f..e6c3d3b7775 100644 --- a/homeassistant/components/persistent_notification/strings.json +++ b/homeassistant/components/persistent_notification/strings.json @@ -21,17 +21,17 @@ }, "dismiss": { "name": "Dismiss", - "description": "Removes a notification from the notifications panel.", + "description": "Deletes a notification from the notifications panel.", "fields": { "notification_id": { "name": "[%key:component::persistent_notification::services::create::fields::notification_id::name%]", - "description": "ID of the notification to be removed." + "description": "ID of the notification to be deleted." } } }, "dismiss_all": { "name": "Dismiss all", - "description": "Removes all notifications from the notifications panel." + "description": "Deletes all notifications from the notifications panel." } } } diff --git a/homeassistant/components/person/icons.json b/homeassistant/components/person/icons.json index fbfd5be75d2..f645d9c2090 100644 --- a/homeassistant/components/person/icons.json +++ b/homeassistant/components/person/icons.json @@ -8,6 +8,8 @@ } }, "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/philips_js/config_flow.py b/homeassistant/components/philips_js/config_flow.py index a73145f7c1c..66b4439acd8 100644 --- a/homeassistant/components/philips_js/config_flow.py +++ b/homeassistant/components/philips_js/config_flow.py @@ -9,7 +9,12 @@ from typing import Any from haphilipsjs import ConnectionFailure, PairingFailure, PhilipsTV import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import ( CONF_API_VERSION, CONF_HOST, @@ -75,18 +80,13 @@ class PhilipsJSConfigFlow(ConfigFlow, domain=DOMAIN): self._current: dict[str, Any] = {} self._hub: PhilipsTV | None = None self._pair_state: Any = None - self._entry: ConfigEntry | None = None async def _async_create_current(self) -> ConfigFlowResult: system = self._current[CONF_SYSTEM] - if self._entry: - self.hass.config_entries.async_update_entry( - self._entry, data=self._entry.data | self._current + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=self._current ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=f"{system['name']} ({system['serialnumber']})", @@ -150,7 +150,6 @@ class PhilipsJSConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) self._current[CONF_HOST] = entry_data[CONF_HOST] self._current[CONF_API_VERSION] = entry_data[CONF_API_VERSION] return await self.async_step_user() @@ -175,7 +174,7 @@ class PhilipsJSConfigFlow(ConfigFlow, domain=DOMAIN): else: if serialnumber := hub.system.get("serialnumber"): await self.async_set_unique_id(serialnumber) - if self._entry is None: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() self._current[CONF_SYSTEM] = hub.system diff --git a/homeassistant/components/philips_js/strings.json b/homeassistant/components/philips_js/strings.json index 3ea632ce436..1f187d89dda 100644 --- a/homeassistant/components/philips_js/strings.json +++ b/homeassistant/components/philips_js/strings.json @@ -18,11 +18,11 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "unknown": "[%key:common::config_flow::error::unknown%]", - "pairing_failure": "Unable to pair: {error_id}", "invalid_pin": "Invalid PIN" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "pairing_failure": "Unable to pair: {error_id}", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/homeassistant/components/pi_hole/__init__.py b/homeassistant/components/pi_hole/__init__.py index bf314e96dec..5cc21cef3a9 100644 --- a/homeassistant/components/pi_hole/__init__.py +++ b/homeassistant/components/pi_hole/__init__.py @@ -22,12 +22,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_STATISTICS_ONLY, DOMAIN, MIN_TIME_BETWEEN_UPDATES @@ -123,6 +118,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PiHoleConfigEntry) -> bo coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=name, update_method=async_update_data, update_interval=MIN_TIME_BETWEEN_UPDATES, @@ -140,35 +136,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: PiHoleConfigEntry) -> bo async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Pi-hole entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class PiHoleEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): - """Representation of a Pi-hole entity.""" - - def __init__( - self, - api: Hole, - coordinator: DataUpdateCoordinator[None], - name: str, - server_unique_id: str, - ) -> None: - """Initialize a Pi-hole entity.""" - super().__init__(coordinator) - self.api = api - self._name = name - self._server_unique_id = server_unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device information of the entity.""" - if self.api.tls: - config_url = f"https://{self.api.host}/{self.api.location}" - else: - config_url = f"http://{self.api.host}/{self.api.location}" - - return DeviceInfo( - identifiers={(DOMAIN, self._server_unique_id)}, - name=self._name, - manufacturer="Pi-hole", - configuration_url=config_url, - ) diff --git a/homeassistant/components/pi_hole/binary_sensor.py b/homeassistant/components/pi_hole/binary_sensor.py index 001a2ebcee8..5e3ce560ab4 100644 --- a/homeassistant/components/pi_hole/binary_sensor.py +++ b/homeassistant/components/pi_hole/binary_sensor.py @@ -17,7 +17,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import PiHoleConfigEntry, PiHoleEntity +from . import PiHoleConfigEntry +from .entity import PiHoleEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/pi_hole/config_flow.py b/homeassistant/components/pi_hole/config_flow.py index d6f42d57deb..e50b018caa4 100644 --- a/homeassistant/components/pi_hole/config_flow.py +++ b/homeassistant/components/pi_hole/config_flow.py @@ -136,15 +136,9 @@ class PiHoleFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is not None: self._config = {**self._config, CONF_API_KEY: user_input[CONF_API_KEY]} if not (errors := await self._async_try_connect()): - entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._config ) - assert entry - self.hass.config_entries.async_update_entry(entry, data=self._config) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.context["entry_id"]) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/pi_hole/entity.py b/homeassistant/components/pi_hole/entity.py new file mode 100644 index 00000000000..0f5c6039232 --- /dev/null +++ b/homeassistant/components/pi_hole/entity.py @@ -0,0 +1,45 @@ +"""The pi_hole component.""" + +from __future__ import annotations + +from hole import Hole + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN + + +class PiHoleEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): + """Representation of a Pi-hole entity.""" + + def __init__( + self, + api: Hole, + coordinator: DataUpdateCoordinator[None], + name: str, + server_unique_id: str, + ) -> None: + """Initialize a Pi-hole entity.""" + super().__init__(coordinator) + self.api = api + self._name = name + self._server_unique_id = server_unique_id + + @property + def device_info(self) -> DeviceInfo: + """Return the device information of the entity.""" + if self.api.tls: + config_url = f"https://{self.api.host}/{self.api.location}" + else: + config_url = f"http://{self.api.host}/{self.api.location}" + + return DeviceInfo( + identifiers={(DOMAIN, self._server_unique_id)}, + name=self._name, + manufacturer="Pi-hole", + configuration_url=config_url, + ) diff --git a/homeassistant/components/pi_hole/icons.json b/homeassistant/components/pi_hole/icons.json index 58f20da5a2d..3a45f8ab454 100644 --- a/homeassistant/components/pi_hole/icons.json +++ b/homeassistant/components/pi_hole/icons.json @@ -36,6 +36,8 @@ } }, "services": { - "disable": "mdi:server-off" + "disable": { + "service": "mdi:server-off" + } } } diff --git a/homeassistant/components/pi_hole/sensor.py b/homeassistant/components/pi_hole/sensor.py index 14ad3ac82dd..4cf5133e700 100644 --- a/homeassistant/components/pi_hole/sensor.py +++ b/homeassistant/components/pi_hole/sensor.py @@ -11,13 +11,13 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import PiHoleConfigEntry, PiHoleEntity +from . import PiHoleConfigEntry +from .entity import PiHoleEntity SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key="ads_blocked_today", translation_key="ads_blocked_today", - native_unit_of_measurement="ads", ), SensorEntityDescription( key="ads_percentage_today", @@ -27,38 +27,20 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key="clients_ever_seen", translation_key="clients_ever_seen", - native_unit_of_measurement="clients", ), SensorEntityDescription( - key="dns_queries_today", - translation_key="dns_queries_today", - native_unit_of_measurement="queries", + key="dns_queries_today", translation_key="dns_queries_today" ), SensorEntityDescription( key="domains_being_blocked", translation_key="domains_being_blocked", - native_unit_of_measurement="domains", ), + SensorEntityDescription(key="queries_cached", translation_key="queries_cached"), SensorEntityDescription( - key="queries_cached", - translation_key="queries_cached", - native_unit_of_measurement="queries", - ), - SensorEntityDescription( - key="queries_forwarded", - translation_key="queries_forwarded", - native_unit_of_measurement="queries", - ), - SensorEntityDescription( - key="unique_clients", - translation_key="unique_clients", - native_unit_of_measurement="clients", - ), - SensorEntityDescription( - key="unique_domains", - translation_key="unique_domains", - native_unit_of_measurement="domains", + key="queries_forwarded", translation_key="queries_forwarded" ), + SensorEntityDescription(key="unique_clients", translation_key="unique_clients"), + SensorEntityDescription(key="unique_domains", translation_key="unique_domains"), ) diff --git a/homeassistant/components/pi_hole/strings.json b/homeassistant/components/pi_hole/strings.json index b76b61f1903..9e1d5948a09 100644 --- a/homeassistant/components/pi_hole/strings.json +++ b/homeassistant/components/pi_hole/strings.json @@ -41,31 +41,39 @@ }, "sensor": { "ads_blocked_today": { - "name": "Ads blocked today" + "name": "Ads blocked today", + "unit_of_measurement": "ads" }, "ads_percentage_today": { "name": "Ads percentage blocked today" }, "clients_ever_seen": { - "name": "Seen clients" + "name": "Seen clients", + "unit_of_measurement": "clients" }, "dns_queries_today": { - "name": "DNS queries today" + "name": "DNS queries today", + "unit_of_measurement": "queries" }, "domains_being_blocked": { - "name": "Domains blocked" + "name": "Domains blocked", + "unit_of_measurement": "domains" }, "queries_cached": { - "name": "DNS queries cached" + "name": "DNS queries cached", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::dns_queries_today::unit_of_measurement%]" }, "queries_forwarded": { - "name": "DNS queries forwarded" + "name": "DNS queries forwarded", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::dns_queries_today::unit_of_measurement%]" }, "unique_clients": { - "name": "DNS unique clients" + "name": "DNS unique clients", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::clients_ever_seen::unit_of_measurement%]" }, "unique_domains": { - "name": "DNS unique domains" + "name": "DNS unique domains", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::domains_being_blocked::unit_of_measurement%]" } }, "update": { diff --git a/homeassistant/components/pi_hole/switch.py b/homeassistant/components/pi_hole/switch.py index 83ed3e6d787..805ba479a9e 100644 --- a/homeassistant/components/pi_hole/switch.py +++ b/homeassistant/components/pi_hole/switch.py @@ -14,8 +14,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import PiHoleConfigEntry, PiHoleEntity +from . import PiHoleConfigEntry from .const import SERVICE_DISABLE, SERVICE_DISABLE_ATTR_DURATION +from .entity import PiHoleEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/pi_hole/update.py b/homeassistant/components/pi_hole/update.py index c1a435f628c..510f5d1dc19 100644 --- a/homeassistant/components/pi_hole/update.py +++ b/homeassistant/components/pi_hole/update.py @@ -13,7 +13,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import PiHoleConfigEntry, PiHoleEntity +from . import PiHoleConfigEntry +from .entity import PiHoleEntity @dataclass(frozen=True) diff --git a/homeassistant/components/picnic/icons.json b/homeassistant/components/picnic/icons.json index d8f99153f33..78803b6d263 100644 --- a/homeassistant/components/picnic/icons.json +++ b/homeassistant/components/picnic/icons.json @@ -57,6 +57,8 @@ } }, "services": { - "add_product": "mdi:cart-plus" + "add_product": { + "service": "mdi:cart-plus" + } } } diff --git a/homeassistant/components/picotts/manifest.json b/homeassistant/components/picotts/manifest.json index 74b91e187ba..6e8c346a3c9 100644 --- a/homeassistant/components/picotts/manifest.json +++ b/homeassistant/components/picotts/manifest.json @@ -3,5 +3,6 @@ "name": "Pico TTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/picotts", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pilight/binary_sensor.py b/homeassistant/components/pilight/binary_sensor.py index 4d68748e0f7..0a94147af70 100644 --- a/homeassistant/components/pilight/binary_sensor.py +++ b/homeassistant/components/pilight/binary_sensor.py @@ -24,7 +24,7 @@ from homeassistant.helpers.event import track_point_in_time from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util -from .. import pilight +from . import EVENT CONF_VARIABLE = "variable" CONF_RESET_DELAY_SEC = "reset_delay_sec" @@ -96,7 +96,7 @@ class PilightBinarySensor(BinarySensorEntity): self._on_value = on_value self._off_value = off_value - hass.bus.listen(pilight.EVENT, self._handle_code) + hass.bus.listen(EVENT, self._handle_code) @property def name(self): @@ -150,7 +150,7 @@ class PilightTriggerSensor(BinarySensorEntity): self._delay_after = None self._hass = hass - hass.bus.listen(pilight.EVENT, self._handle_code) + hass.bus.listen(EVENT, self._handle_code) @property def name(self): diff --git a/homeassistant/components/pilight/base_class.py b/homeassistant/components/pilight/entity.py similarity index 100% rename from homeassistant/components/pilight/base_class.py rename to homeassistant/components/pilight/entity.py diff --git a/homeassistant/components/pilight/icons.json b/homeassistant/components/pilight/icons.json index c1b8e741e45..cbc48cf2105 100644 --- a/homeassistant/components/pilight/icons.json +++ b/homeassistant/components/pilight/icons.json @@ -1,5 +1,7 @@ { "services": { - "send": "mdi:send" + "send": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/pilight/light.py b/homeassistant/components/pilight/light.py index 5665e96b9c9..c3d1a3c234c 100644 --- a/homeassistant/components/pilight/light.py +++ b/homeassistant/components/pilight/light.py @@ -18,8 +18,8 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .base_class import SWITCHES_SCHEMA, PilightBaseDevice from .const import CONF_DIMLEVEL_MAX, CONF_DIMLEVEL_MIN +from .entity import SWITCHES_SCHEMA, PilightBaseDevice LIGHTS_SCHEMA = SWITCHES_SCHEMA.extend( { diff --git a/homeassistant/components/pilight/manifest.json b/homeassistant/components/pilight/manifest.json index cd542f11a0c..da07c4ee645 100644 --- a/homeassistant/components/pilight/manifest.json +++ b/homeassistant/components/pilight/manifest.json @@ -1,9 +1,10 @@ { "domain": "pilight", "name": "Pilight", - "codeowners": ["@trekky12"], + "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pilight", "iot_class": "local_push", "loggers": ["pilight"], + "quality_scale": "legacy", "requirements": ["pilight==0.1.1"] } diff --git a/homeassistant/components/pilight/sensor.py b/homeassistant/components/pilight/sensor.py index 8e5f3b7d78a..5ab80f57dc6 100644 --- a/homeassistant/components/pilight/sensor.py +++ b/homeassistant/components/pilight/sensor.py @@ -16,7 +16,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .. import pilight +from . import EVENT _LOGGER = logging.getLogger(__name__) @@ -67,7 +67,7 @@ class PilightSensor(SensorEntity): self._payload = payload self._unit_of_measurement = unit_of_measurement - hass.bus.listen(pilight.EVENT, self._handle_code) + hass.bus.listen(EVENT, self._handle_code) @property def name(self): diff --git a/homeassistant/components/pilight/switch.py b/homeassistant/components/pilight/switch.py index 5be63064b4a..a1976921269 100644 --- a/homeassistant/components/pilight/switch.py +++ b/homeassistant/components/pilight/switch.py @@ -14,7 +14,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .base_class import SWITCHES_SCHEMA, PilightBaseDevice +from .entity import SWITCHES_SCHEMA, PilightBaseDevice PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( {vol.Required(CONF_SWITCHES): vol.Schema({cv.string: SWITCHES_SCHEMA})} diff --git a/homeassistant/components/ping/__init__.py b/homeassistant/components/ping/__init__.py index f4a04caae5b..4b03e5e4407 100644 --- a/homeassistant/components/ping/__init__.py +++ b/homeassistant/components/ping/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from dataclasses import dataclass import logging from icmplib import SocketPermissionError, async_ping @@ -12,6 +11,7 @@ from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import CONF_PING_COUNT, DOMAIN from .coordinator import PingUpdateCoordinator @@ -21,13 +21,7 @@ _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.SENSOR] - - -@dataclass(slots=True) -class PingDomainData: - """Dataclass to store privileged status.""" - - privileged: bool | None +DATA_PRIVILEGED_KEY: HassKey[bool | None] = HassKey(DOMAIN) type PingConfigEntry = ConfigEntry[PingUpdateCoordinator] @@ -35,29 +29,25 @@ type PingConfigEntry = ConfigEntry[PingUpdateCoordinator] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the ping integration.""" - - hass.data[DOMAIN] = PingDomainData( - privileged=await _can_use_icmp_lib_with_privilege(), - ) + hass.data[DATA_PRIVILEGED_KEY] = await _can_use_icmp_lib_with_privilege() return True async def async_setup_entry(hass: HomeAssistant, entry: PingConfigEntry) -> bool: """Set up Ping (ICMP) from a config entry.""" - - data: PingDomainData = hass.data[DOMAIN] + privileged = hass.data[DATA_PRIVILEGED_KEY] host: str = entry.options[CONF_HOST] count: int = int(entry.options[CONF_PING_COUNT]) ping_cls: type[PingDataICMPLib | PingDataSubProcess] - if data.privileged is None: + if privileged is None: ping_cls = PingDataSubProcess else: ping_cls = PingDataICMPLib coordinator = PingUpdateCoordinator( - hass=hass, ping=ping_cls(hass, host, count, data.privileged) + hass=hass, ping=ping_cls(hass, host, count, privileged) ) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/ping/binary_sensor.py b/homeassistant/components/ping/binary_sensor.py index 93f4e0f3896..5c50e4335f9 100644 --- a/homeassistant/components/ping/binary_sensor.py +++ b/homeassistant/components/ping/binary_sensor.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import Any - from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -17,11 +15,6 @@ from .const import CONF_IMPORTED_BY from .coordinator import PingUpdateCoordinator from .entity import PingEntity -ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" -ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" -ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" -ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" - async def async_setup_entry( hass: HomeAssistant, entry: PingConfigEntry, async_add_entities: AddEntitiesCallback @@ -53,13 +46,3 @@ class PingBinarySensor(PingEntity, BinarySensorEntity): def is_on(self) -> bool: """Return true if the binary sensor is on.""" return self.coordinator.data.is_alive - - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes of the ICMP checo request.""" - return { - ATTR_ROUND_TRIP_TIME_AVG: self.coordinator.data.data.get("avg"), - ATTR_ROUND_TRIP_TIME_MAX: self.coordinator.data.data.get("max"), - ATTR_ROUND_TRIP_TIME_MDEV: self.coordinator.data.data.get("mdev"), - ATTR_ROUND_TRIP_TIME_MIN: self.coordinator.data.data.get("min"), - } diff --git a/homeassistant/components/ping/config_flow.py b/homeassistant/components/ping/config_flow.py index 9470b2134d4..27cb3f62bcd 100644 --- a/homeassistant/components/ping/config_flow.py +++ b/homeassistant/components/ping/config_flow.py @@ -27,6 +27,12 @@ from .const import CONF_PING_COUNT, DEFAULT_PING_COUNT, DOMAIN _LOGGER = logging.getLogger(__name__) +def _clean_user_input(user_input: dict[str, Any]) -> dict[str, Any]: + """Clean up the user input.""" + user_input[CONF_HOST] = user_input[CONF_HOST].strip() + return user_input + + class PingConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Ping.""" @@ -46,6 +52,7 @@ class PingConfigFlow(ConfigFlow, domain=DOMAIN): ), ) + user_input = _clean_user_input(user_input) if not is_ip_address(user_input[CONF_HOST]): self.async_abort(reason="invalid_ip_address") @@ -66,22 +73,18 @@ class PingConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an options flow for Ping.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - return self.async_create_entry(title="", data=user_input) + return self.async_create_entry(title="", data=_clean_user_input(user_input)) return self.async_show_form( step_id="init", diff --git a/homeassistant/components/ping/device_tracker.py b/homeassistant/components/ping/device_tracker.py index ce7cc4522a0..29a4e922234 100644 --- a/homeassistant/components/ping/device_tracker.py +++ b/homeassistant/components/ping/device_tracker.py @@ -8,7 +8,6 @@ from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, ScannerEntity, - SourceType, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -57,11 +56,6 @@ class PingDeviceTracker(CoordinatorEntity[PingUpdateCoordinator], ScannerEntity) """Return a unique ID.""" return self.config_entry.entry_id - @property - def source_type(self) -> SourceType: - """Return the source type which is router.""" - return SourceType.ROUTER - @property def is_connected(self) -> bool: """Return true if ping returns is_alive or considered home.""" diff --git a/homeassistant/components/pioneer/manifest.json b/homeassistant/components/pioneer/manifest.json index c8aa3a79789..019b7680e09 100644 --- a/homeassistant/components/pioneer/manifest.json +++ b/homeassistant/components/pioneer/manifest.json @@ -3,5 +3,6 @@ "name": "Pioneer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pioneer", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pjlink/manifest.json b/homeassistant/components/pjlink/manifest.json index 553ed185241..787311b250a 100644 --- a/homeassistant/components/pjlink/manifest.json +++ b/homeassistant/components/pjlink/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pjlink", "iot_class": "local_polling", "loggers": ["pypjlink"], + "quality_scale": "legacy", "requirements": ["pypjlink2==1.2.1"] } diff --git a/homeassistant/components/plaato/__init__.py b/homeassistant/components/plaato/__init__.py index 59441f25025..585b6ecfd82 100644 --- a/homeassistant/components/plaato/__init__.py +++ b/homeassistant/components/plaato/__init__.py @@ -64,10 +64,10 @@ WEBHOOK_SCHEMA = vol.Schema( vol.Required(ATTR_DEVICE_NAME): cv.string, vol.Required(ATTR_DEVICE_ID): cv.positive_int, vol.Required(ATTR_TEMP_UNIT): vol.In( - UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT + [UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT] ), vol.Required(ATTR_VOLUME_UNIT): vol.In( - UnitOfVolume.LITERS, UnitOfVolume.GALLONS + [UnitOfVolume.LITERS, UnitOfVolume.GALLONS] ), vol.Required(ATTR_BPM): cv.positive_int, vol.Required(ATTR_TEMP): vol.Coerce(float), diff --git a/homeassistant/components/plaato/config_flow.py b/homeassistant/components/plaato/config_flow.py index 3ada4fdc312..f398a733cd6 100644 --- a/homeassistant/components/plaato/config_flow.py +++ b/homeassistant/components/plaato/config_flow.py @@ -71,7 +71,9 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_api_method(self, user_input=None): + async def async_step_api_method( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle device type step.""" device_type = self._init_info[CONF_DEVICE_TYPE] @@ -90,7 +92,9 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): return await self._show_api_method_form(device_type) - async def async_step_webhook(self, user_input=None): + async def async_step_webhook( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Validate config step.""" use_webhook = self._init_info[CONF_USE_WEBHOOK] @@ -136,8 +140,8 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): ) async def _show_api_method_form( - self, device_type: PlaatoDeviceType, errors: dict | None = None - ): + self, device_type: PlaatoDeviceType, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: data_schema = vol.Schema({vol.Optional(CONF_TOKEN, default=""): str}) if device_type == PlaatoDeviceType.Airlock: @@ -172,23 +176,19 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> PlaatoOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> PlaatoOptionsFlowHandler: """Get the options flow for this handler.""" - return PlaatoOptionsFlowHandler(config_entry) + return PlaatoOptionsFlowHandler() class PlaatoOptionsFlowHandler(OptionsFlow): """Handle Plaato options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize domain options flow.""" - super().__init__() - - self._config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the options.""" - use_webhook = self._config_entry.data.get(CONF_USE_WEBHOOK, False) + use_webhook = self.config_entry.data.get(CONF_USE_WEBHOOK, False) if use_webhook: return await self.async_step_webhook() @@ -207,7 +207,7 @@ class PlaatoOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_SCAN_INTERVAL, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): cv.positive_int @@ -215,12 +215,14 @@ class PlaatoOptionsFlowHandler(OptionsFlow): ), ) - async def async_step_webhook(self, user_input=None): + async def async_step_webhook( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options for webhook device.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) - webhook_id = self._config_entry.data.get(CONF_WEBHOOK_ID, None) + webhook_id = self.config_entry.data.get(CONF_WEBHOOK_ID, None) webhook_url = ( "" if webhook_id is None diff --git a/homeassistant/components/plaato/manifest.json b/homeassistant/components/plaato/manifest.json index aac7ec2d06f..1547501ac50 100644 --- a/homeassistant/components/plaato/manifest.json +++ b/homeassistant/components/plaato/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/plaato", "iot_class": "cloud_push", "loggers": ["pyplaato"], - "requirements": ["pyplaato==0.0.18"] + "requirements": ["pyplaato==0.0.19"] } diff --git a/homeassistant/components/plaato/sensor.py b/homeassistant/components/plaato/sensor.py index 7aa30dd2fe0..b11bac40144 100644 --- a/homeassistant/components/plaato/sensor.py +++ b/homeassistant/components/plaato/sensor.py @@ -44,7 +44,7 @@ async def async_setup_entry( entry_data = hass.data[DOMAIN][entry.entry_id] @callback - async def _async_update_from_webhook(device_id, sensor_data: PlaatoDevice): + def _async_update_from_webhook(device_id, sensor_data: PlaatoDevice): """Update/Create the sensors.""" entry_data[SENSOR_DATA] = sensor_data diff --git a/homeassistant/components/plaato/strings.json b/homeassistant/components/plaato/strings.json index 934628e82c2..23568258118 100644 --- a/homeassistant/components/plaato/strings.json +++ b/homeassistant/components/plaato/strings.json @@ -41,7 +41,7 @@ "step": { "webhook": { "title": "Options for Plaato Airlock", - "description": "Webhook info:\n\n- URL: `{webhook_url}`\n- Method: POST\n\n" + "description": "Webhook info:\n\n- URL: `{webhook_url}`\n- Method: POST" }, "user": { "title": "Options for Plaato", diff --git a/homeassistant/components/plant/__init__.py b/homeassistant/components/plant/__init__.py index 2a5253d3faa..48c606865df 100644 --- a/homeassistant/components/plant/__init__.py +++ b/homeassistant/components/plant/__init__.py @@ -1,4 +1,8 @@ -"""Support for monitoring plants.""" +"""Support for monitoring plants. + +DEVELOPMENT OF THE PLANT INTEGRATION IS FROZEN +PENDING A DESIGN EVALUATION. +""" from collections import deque from contextlib import suppress @@ -128,6 +132,9 @@ class Plant(Entity): It also checks the measurements against configurable min and max values. + + DEVELOPMENT OF THE PLANT INTEGRATION IS FROZEN + PENDING A DESIGN EVALUATION. """ _attr_should_poll = False @@ -148,7 +155,7 @@ class Plant(Entity): "max": CONF_MAX_MOISTURE, }, READING_CONDUCTIVITY: { - ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS, + ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM, "min": CONF_MIN_CONDUCTIVITY, "max": CONF_MAX_CONDUCTIVITY, }, @@ -363,6 +370,9 @@ class DailyHistory: """Stores one measurement per day for a maximum number of days. At the moment only the maximum value per day is kept. + + DEVELOPMENT OF THE PLANT INTEGRATION IS FROZEN + PENDING A DESIGN EVALUATION. """ def __init__(self, max_length): diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index 7162e517e23..ae7cbb12574 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -import copy +from copy import deepcopy import logging from typing import TYPE_CHECKING, Any @@ -35,7 +35,7 @@ from homeassistant.const import ( CONF_URL, CONF_VERIFY_SSL, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv @@ -71,7 +71,7 @@ _LOGGER = logging.getLogger(__package__) @callback -def configured_servers(hass): +def configured_servers(hass: HomeAssistant) -> set[str]: """Return a set of the configured Plex servers.""" return { entry.data[CONF_SERVER_IDENTIFIER] @@ -79,7 +79,7 @@ def configured_servers(hass): } -async def async_discover(hass): +async def async_discover(hass: HomeAssistant) -> None: """Scan for available Plex servers.""" gdm = GDM() await hass.async_add_executor_job(gdm.scan) @@ -97,6 +97,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + available_servers: list[tuple[str, str, str]] + plexauth: PlexAuth + @staticmethod @callback def async_get_options_flow( @@ -108,28 +111,34 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the Plex flow.""" self.current_login: dict[str, Any] = {} - self.available_servers = None - self.plexauth = None self.token = None self.client_id = None self._manual = False self._reauth_config: dict[str, Any] | None = None - async def async_step_user(self, user_input=None, errors=None): + async def async_step_user( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if user_input is not None: - return await self.async_step_plex_website_auth() + return await self._async_step_plex_website_auth() if self.show_advanced_options: return await self.async_step_user_advanced(errors=errors) return self.async_show_form(step_id="user", errors=errors) - async def async_step_user_advanced(self, user_input=None, errors=None): + async def async_step_user_advanced( + self, + user_input: dict[str, str] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Handle an advanced mode flow initialized by the user.""" if user_input is not None: if user_input.get("setup_method") == MANUAL_SETUP_STRING: self._manual = True return await self.async_step_manual_setup() - return await self.async_step_plex_website_auth() + return await self._async_step_plex_website_auth() data_schema = vol.Schema( { @@ -142,7 +151,11 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user_advanced", data_schema=data_schema, errors=errors ) - async def async_step_manual_setup(self, user_input=None, errors=None): + async def async_step_manual_setup( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Begin manual configuration.""" if user_input is not None and errors is None: user_input.pop(CONF_URL, None) @@ -264,7 +277,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=url, data=data) - async def async_step_select_server(self, user_input=None): + async def async_step_select_server( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Use selected Plex server.""" config = dict(self.current_login) if user_input is not None: @@ -292,7 +307,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): errors={}, ) - async def async_step_integration_discovery(self, discovery_info): + async def async_step_integration_discovery( + self, discovery_info: dict[str, Any] + ) -> ConfigFlowResult: """Handle GDM discovery.""" machine_identifier = discovery_info["data"]["Resource-Identifier"] await self.async_set_unique_id(machine_identifier) @@ -305,7 +322,7 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): } return await self.async_step_user() - async def async_step_plex_website_auth(self): + async def _async_step_plex_website_auth(self) -> ConfigFlowResult: """Begin external auth flow on Plex website.""" self.hass.http.register_view(PlexAuthorizationCallbackView) if (req := http.current_request.get()) is None: @@ -329,7 +346,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): auth_url = self.plexauth.auth_url(forward_url) return self.async_external_step(step_id="obtain_token", url=auth_url) - async def async_step_obtain_token(self, user_input=None): + async def async_step_obtain_token( + self, user_input: None = None + ) -> ConfigFlowResult: """Obtain token after external auth completed.""" token = await self.plexauth.token(10) @@ -340,11 +359,13 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): self.client_id = self.plexauth.client_identifier return self.async_external_step_done(next_step_id="use_external_token") - async def async_step_timed_out(self, user_input=None): + async def async_step_timed_out(self, user_input: None = None) -> ConfigFlowResult: """Abort flow when time expires.""" return self.async_abort(reason="token_request_timeout") - async def async_step_use_external_token(self, user_input=None): + async def async_step_use_external_token( + self, user_input: None = None + ) -> ConfigFlowResult: """Continue server validation with external token.""" server_config = {CONF_TOKEN: self.token} return await self.async_step_server_validate(server_config) @@ -364,14 +385,16 @@ class PlexOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Plex options flow.""" - self.options = copy.deepcopy(dict(config_entry.options)) + self.options = deepcopy(dict(config_entry.options)) self.server_id = config_entry.data[CONF_SERVER_IDENTIFIER] - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the Plex options.""" return await self.async_step_plex_mp_settings() - async def async_step_plex_mp_settings(self, user_input=None): + async def async_step_plex_mp_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the Plex media_player options.""" plex_server = get_plex_server(self.hass, self.server_id) diff --git a/homeassistant/components/plex/icons.json b/homeassistant/components/plex/icons.json index 03bc835d2f6..2d3a7342ad2 100644 --- a/homeassistant/components/plex/icons.json +++ b/homeassistant/components/plex/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "refresh_library": "mdi:refresh", - "scan_for_clients": "mdi:database-refresh" + "refresh_library": { + "service": "mdi:refresh" + }, + "scan_for_clients": { + "service": "mdi:database-refresh" + } } } diff --git a/homeassistant/components/plex/server.py b/homeassistant/components/plex/server.py index fbb98e8e19f..0716b3606af 100644 --- a/homeassistant/components/plex/server.py +++ b/homeassistant/components/plex/server.py @@ -2,6 +2,7 @@ from __future__ import annotations +from copy import copy import logging import ssl import time @@ -664,3 +665,14 @@ class PlexServer: def sensor_attributes(self): """Return active session information for use in activity sensor.""" return {x.sensor_user: x.sensor_title for x in self.active_sessions.values()} + + def set_plex_server(self, plex_server: PlexServer) -> None: + """Set the PlexServer instance.""" + self._plex_server = plex_server + + def switch_user(self, username: str) -> PlexServer: + """Return a shallow copy of a PlexServer as the provided user.""" + new_server = copy(self) + new_server.set_plex_server(self.plex_server.switchUser(username)) + + return new_server diff --git a/homeassistant/components/plex/services.py b/homeassistant/components/plex/services.py index e0fe79be182..c70ddb6ed53 100644 --- a/homeassistant/components/plex/services.py +++ b/homeassistant/components/plex/services.py @@ -133,6 +133,8 @@ def process_plex_payload( elif content_id.startswith(PLEX_URI_SCHEME): # Handle standard media_browser payloads plex_url = URL(content_id) + # https://github.com/pylint-dev/pylint/issues/3484 + # pylint: disable-next=using-constant-test if plex_url.name: if len(plex_url.parts) == 2: if plex_url.name == "search": @@ -161,6 +163,11 @@ def process_plex_payload( if not plex_server: plex_server = get_plex_server(hass) + if isinstance(content, dict): + if plex_user := content.pop("username", None): + _LOGGER.debug("Switching to Plex user: %s", plex_user) + plex_server = plex_server.switch_user(plex_user) + if content_type == "station": if not supports_playqueues: raise HomeAssistantError("Plex stations are not supported on this device") diff --git a/homeassistant/components/plugwise/__init__.py b/homeassistant/components/plugwise/__init__.py index de2250ac72e..a100103b029 100644 --- a/homeassistant/components/plugwise/__init__.py +++ b/homeassistant/components/plugwise/__init__.py @@ -31,9 +31,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: PlugwiseConfigEntry) -> identifiers={(DOMAIN, str(coordinator.api.gateway_id))}, manufacturer="Plugwise", model=coordinator.api.smile_model, + model_id=coordinator.api.smile_model_id, name=coordinator.api.smile_name, - sw_version=coordinator.api.smile_version[0], - ) + sw_version=str(coordinator.api.smile_version), + ) # required for adding the entity-less P1 Gateway await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -82,7 +83,7 @@ def migrate_sensor_entities( # Migrating opentherm_outdoor_temperature # to opentherm_outdoor_air_temperature sensor for device_id, device in coordinator.data.devices.items(): - if device.get("dev_class") != "heater_central": + if device["dev_class"] != "heater_central": continue old_unique_id = f"{device_id}-outdoor_temperature" diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index 4b251d20a02..539fa243d6c 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -9,6 +9,7 @@ from typing import Any from plugwise.constants import BinarySensorType from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) @@ -22,6 +23,9 @@ from .entity import PlugwiseEntity SEVERITIES = ["other", "info", "warning", "error"] +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseBinarySensorEntityDescription(BinarySensorEntityDescription): @@ -31,6 +35,11 @@ class PlugwiseBinarySensorEntityDescription(BinarySensorEntityDescription): BINARY_SENSORS: tuple[PlugwiseBinarySensorEntityDescription, ...] = ( + PlugwiseBinarySensorEntityDescription( + key="low_battery", + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), PlugwiseBinarySensorEntityDescription( key="compressor_state", translation_key="compressor_state", @@ -49,7 +58,6 @@ BINARY_SENSORS: tuple[PlugwiseBinarySensorEntityDescription, ...] = ( PlugwiseBinarySensorEntityDescription( key="flame_state", translation_key="flame_state", - name="Flame state", entity_category=EntityCategory.DIAGNOSTIC, ), PlugwiseBinarySensorEntityDescription( diff --git a/homeassistant/components/plugwise/button.py b/homeassistant/components/plugwise/button.py index 078d31bea12..8a05ede3496 100644 --- a/homeassistant/components/plugwise/button.py +++ b/homeassistant/components/plugwise/button.py @@ -13,6 +13,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 7b0fe35835d..3caed1e7bc2 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import PlugwiseConfigEntry @@ -24,6 +24,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -39,11 +41,19 @@ async def async_setup_entry( if not coordinator.new_devices: return - async_add_entities( - PlugwiseClimateEntity(coordinator, device_id) - for device_id in coordinator.new_devices - if coordinator.data.devices[device_id]["dev_class"] in MASTER_THERMOSTATS - ) + if coordinator.data.gateway["smile_name"] == "Adam": + async_add_entities( + PlugwiseClimateEntity(coordinator, device_id) + for device_id in coordinator.new_devices + if coordinator.data.devices[device_id]["dev_class"] == "climate" + ) + else: + async_add_entities( + PlugwiseClimateEntity(coordinator, device_id) + for device_id in coordinator.new_devices + if coordinator.data.devices[device_id]["dev_class"] + in MASTER_THERMOSTATS + ) _add_entities() entry.async_on_unload(coordinator.async_add_listener(_add_entities)) @@ -52,11 +62,9 @@ async def async_setup_entry( class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """Representation of a Plugwise thermostat.""" - _attr_has_entity_name = True _attr_name = None _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN - _enable_turn_on_off_backwards_compatibility = False _previous_mode: str = "heating" @@ -67,17 +75,20 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): ) -> None: """Set up the Plugwise API.""" super().__init__(coordinator, device_id) - self._attr_extra_state_attributes = {} self._attr_unique_id = f"{device_id}-climate" - self.cdr_gateway = coordinator.data.gateway - gateway_id: str = coordinator.data.gateway["gateway_id"] - self.gateway_data = coordinator.data.devices[gateway_id] + + self._devices = coordinator.data.devices + self._gateway = coordinator.data.gateway + gateway_id: str = self._gateway["gateway_id"] + self._gateway_data = self._devices[gateway_id] + + self._location = device_id + if (location := self.device.get("location")) is not None: + self._location = location + # Determine supported features self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - if ( - self.cdr_gateway["cooling_present"] - and self.cdr_gateway["smile_name"] != "Adam" - ): + if self._gateway["cooling_present"] and self._gateway["smile_name"] != "Adam": self._attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ) @@ -103,10 +114,10 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """ # When no cooling available, _previous_mode is always heating if ( - "regulation_modes" in self.gateway_data - and "cooling" in self.gateway_data["regulation_modes"] + "regulation_modes" in self._gateway_data + and "cooling" in self._gateway_data["regulation_modes"] ): - mode = self.gateway_data["select_regulation_mode"] + mode = self._gateway_data["select_regulation_mode"] if mode in ("cooling", "heating"): self._previous_mode = mode @@ -143,7 +154,9 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return HVAC operation ie. auto, cool, heat, heat_cool, or off mode.""" - if (mode := self.device.get("mode")) is None or mode not in self.hvac_modes: + if ( + mode := self.device.get("climate_mode") + ) is None or mode not in self.hvac_modes: return HVACMode.HEAT return HVACMode(mode) @@ -151,17 +164,17 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): def hvac_modes(self) -> list[HVACMode]: """Return a list of available HVACModes.""" hvac_modes: list[HVACMode] = [] - if "regulation_modes" in self.gateway_data: + if "regulation_modes" in self._gateway_data: hvac_modes.append(HVACMode.OFF) if "available_schedules" in self.device: hvac_modes.append(HVACMode.AUTO) - if self.cdr_gateway["cooling_present"]: - if "regulation_modes" in self.gateway_data: - if self.gateway_data["select_regulation_mode"] == "cooling": + if self._gateway["cooling_present"]: + if "regulation_modes" in self._gateway_data: + if self._gateway_data["select_regulation_mode"] == "cooling": hvac_modes.append(HVACMode.COOL) - if self.gateway_data["select_regulation_mode"] == "heating": + if self._gateway_data["select_regulation_mode"] == "heating": hvac_modes.append(HVACMode.HEAT) else: hvac_modes.append(HVACMode.HEAT_COOL) @@ -175,23 +188,8 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """Return the current running hvac operation if supported.""" # Keep track of the previous action-mode self._previous_action_mode(self.coordinator) - - # Adam provides the hvac_action for each thermostat - if (control_state := self.device.get("control_state")) == "cooling": - return HVACAction.COOLING - if control_state == "heating": - return HVACAction.HEATING - if control_state == "preheating": - return HVACAction.PREHEATING - if control_state == "off": - return HVACAction.IDLE - - heater: str = self.coordinator.data.gateway["heater_id"] - heater_data = self.coordinator.data.devices[heater] - if heater_data["binary_sensors"]["heating_state"]: - return HVACAction.HEATING - if heater_data["binary_sensors"].get("cooling_state", False): - return HVACAction.COOLING + if (action := self.device.get("control_state")) is not None: + return HVACAction(action) return HVACAction.IDLE @@ -211,22 +209,24 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): if ATTR_TARGET_TEMP_LOW in kwargs: data["setpoint_low"] = kwargs.get(ATTR_TARGET_TEMP_LOW) - for temperature in data.values(): - if temperature is None or not ( - self._attr_min_temp <= temperature <= self._attr_max_temp - ): - raise ValueError("Invalid temperature change requested") - if mode := kwargs.get(ATTR_HVAC_MODE): await self.async_set_hvac_mode(mode) - await self.coordinator.api.set_temperature(self.device["location"], data) + await self.coordinator.api.set_temperature(self._location, data) @plugwise_command async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set the hvac mode.""" if hvac_mode not in self.hvac_modes: - raise HomeAssistantError("Unsupported hvac_mode") + hvac_modes = ", ".join(self.hvac_modes) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="unsupported_hvac_mode_requested", + translation_placeholders={ + "hvac_mode": hvac_mode, + "hvac_modes": hvac_modes, + }, + ) if hvac_mode == self.hvac_mode: return @@ -235,7 +235,7 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): await self.coordinator.api.set_regulation_mode(hvac_mode) else: await self.coordinator.api.set_schedule_state( - self.device["location"], + self._location, "on" if hvac_mode == HVACMode.AUTO else "off", ) if self.hvac_mode == HVACMode.OFF: @@ -244,4 +244,4 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): @plugwise_command async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode.""" - await self.coordinator.api.set_preset(self.device["location"], preset_mode) + await self.coordinator.api.set_preset(self._location, preset_mode) diff --git a/homeassistant/components/plugwise/config_flow.py b/homeassistant/components/plugwise/config_flow.py index 1e0f34007c9..57abb1ccb86 100644 --- a/homeassistant/components/plugwise/config_flow.py +++ b/homeassistant/components/plugwise/config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, Self from plugwise import Smile from plugwise.exceptions import ( @@ -16,8 +16,9 @@ from plugwise.exceptions import ( import voluptuous as vol from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult from homeassistant.const import ( + ATTR_CONFIGURATION_URL, CONF_BASE, CONF_HOST, CONF_NAME, @@ -29,13 +30,11 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import ( - API, DEFAULT_PORT, DEFAULT_USERNAME, DOMAIN, FLOW_SMILE, FLOW_STRETCH, - PW_TYPE, SMILE, STRETCH, STRETCH_USERNAME, @@ -43,12 +42,12 @@ from .const import ( ) -def _base_gw_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: +def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: """Generate base schema for gateways.""" - base_gw_schema = vol.Schema({vol.Required(CONF_PASSWORD): str}) + schema = vol.Schema({vol.Required(CONF_PASSWORD): str}) if not discovery_info: - base_gw_schema = base_gw_schema.extend( + schema = schema.extend( { vol.Required(CONF_HOST): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, @@ -58,13 +57,13 @@ def _base_gw_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: } ) - return base_gw_schema + return schema -async def validate_gw_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: """Validate whether the user input allows us to connect to the gateway. - Data has the keys from _base_gw_schema() with values provided by the user. + Data has the keys from base_schema() with values provided by the user. """ websession = async_get_clientsession(hass, verify_ssl=False) api = Smile( @@ -72,7 +71,6 @@ async def validate_gw_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: password=data[CONF_PASSWORD], port=data[CONF_PORT], username=data[CONF_USERNAME], - timeout=30, websession=websession, ) await api.connect() @@ -85,6 +83,7 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 discovery_info: ZeroconfServiceInfo | None = None + product: str = "Unknown Smile" _username: str = DEFAULT_USERNAME async def async_step_zeroconf( @@ -97,7 +96,7 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): unique_id = discovery_info.hostname.split(".")[0].split("-")[0] if config_entry := await self.async_set_unique_id(unique_id): try: - await validate_gw_input( + await validate_input( self.hass, { CONF_HOST: discovery_info.host, @@ -118,7 +117,7 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): if DEFAULT_USERNAME not in unique_id: self._username = STRETCH_USERNAME - _product = _properties.get("product", None) + self.product = _product = _properties.get("product", "Unknown Smile") _version = _properties.get("version", "n/a") _name = f"{ZEROCONF_MAP.get(_product, _product)} v{_version}" @@ -130,45 +129,36 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): # If we have discovered an Adam or Anna, both might be on the network. # In that case, we need to cancel the Anna flow, as the Adam should # be added. - for flow in self._async_in_progress(): - # This is an Anna, and there is already an Adam flow in progress - if ( - _product == "smile_thermo" - and "context" in flow - and flow["context"].get("product") == "smile_open_therm" - ): - return self.async_abort(reason="anna_with_adam") - - # This is an Adam, and there is already an Anna flow in progress - if ( - _product == "smile_open_therm" - and "context" in flow - and flow["context"].get("product") == "smile_thermo" - and "flow_id" in flow - ): - self.hass.config_entries.flow.async_abort(flow["flow_id"]) + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="anna_with_adam") self.context.update( { - "title_placeholders": { - CONF_HOST: discovery_info.host, - CONF_NAME: _name, - CONF_PORT: discovery_info.port, - CONF_USERNAME: self._username, - }, - "configuration_url": ( + "title_placeholders": {CONF_NAME: _name}, + ATTR_CONFIGURATION_URL: ( f"http://{discovery_info.host}:{discovery_info.port}" ), - "product": _product, } ) return await self.async_step_user() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + # This is an Anna, and there is already an Adam flow in progress + if self.product == "smile_thermo" and other_flow.product == "smile_open_therm": + return True + + # This is an Adam, and there is already an Anna flow in progress + if self.product == "smile_open_therm" and other_flow.product == "smile_thermo": + self.hass.config_entries.flow.async_abort(other_flow.flow_id) + + return False + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step when using network/gateway setups.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if self.discovery_info: @@ -177,7 +167,7 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_USERNAME] = self._username try: - api = await validate_gw_input(self.hass, user_input) + api = await validate_input(self.hass, user_input) except ConnectionFailedError: errors[CONF_BASE] = "cannot_connect" except InvalidAuthentication: @@ -196,11 +186,10 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): ) self._abort_if_unique_id_configured() - user_input[PW_TYPE] = API return self.async_create_entry(title=api.smile_name, data=user_input) return self.async_show_form( - step_id="user", - data_schema=_base_gw_schema(self.discovery_info), + step_id=SOURCE_USER, + data_schema=base_schema(self.discovery_info), errors=errors, ) diff --git a/homeassistant/components/plugwise/coordinator.py b/homeassistant/components/plugwise/coordinator.py index 8958ecae930..7ac0cc21c51 100644 --- a/homeassistant/components/plugwise/coordinator.py +++ b/homeassistant/components/plugwise/coordinator.py @@ -2,6 +2,7 @@ from datetime import timedelta +from packaging.version import Version from plugwise import PlugwiseData, Smile from plugwise.exceptions import ( ConnectionFailedError, @@ -53,7 +54,6 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): username=self.config_entry.data.get(CONF_USERNAME, DEFAULT_USERNAME), password=self.config_entry.data[CONF_PASSWORD], port=self.config_entry.data.get(CONF_PORT, DEFAULT_PORT), - timeout=30, websession=async_get_clientsession(hass, verify_ssl=False), ) self._current_devices: set[str] = set() @@ -61,31 +61,44 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): async def _connect(self) -> None: """Connect to the Plugwise Smile.""" - self._connected = await self.api.connect() - self.api.get_all_devices() + version = await self.api.connect() + self._connected = isinstance(version, Version) + if self._connected: + self.api.get_all_gateway_entities() async def _async_update_data(self) -> PlugwiseData: """Fetch data from Plugwise.""" - data = PlugwiseData({}, {}) try: if not self._connected: await self._connect() data = await self.api.async_update() except ConnectionFailedError as err: - raise UpdateFailed("Failed to connect") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="failed_to_connect", + ) from err except InvalidAuthentication as err: - raise ConfigEntryError("Authentication failed") from err + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="authentication_failed", + ) from err except (InvalidXMLError, ResponseError) as err: raise UpdateFailed( - "Invalid XML data, or error indication received from the Plugwise Adam/Smile/Stretch" + translation_domain=DOMAIN, + translation_key="invalid_xml_data", ) from err except PlugwiseError as err: - raise UpdateFailed("Data incomplete or missing") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="data_incomplete_or_missing", + ) from err except UnsupportedDeviceError as err: - raise ConfigEntryError("Device with unsupported firmware") from err - else: - self._async_add_remove_devices(data, self.config_entry) + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="unsupported_firmware", + ) from err + self._async_add_remove_devices(data, self.config_entry) return data def _async_add_remove_devices(self, data: PlugwiseData, entry: ConfigEntry) -> None: @@ -104,19 +117,20 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): device_list = dr.async_entries_for_config_entry( device_reg, self.config_entry.entry_id ) - # via_device cannot be None, this will result in the deletion - # of other Plugwise Gateways when present! - via_device: str = "" + # First find the Plugwise via_device + gateway_device = device_reg.async_get_device( + {(DOMAIN, data.gateway[GATEWAY_ID])} + ) + assert gateway_device is not None + via_device_id = gateway_device.id + + # Then remove the connected orphaned device(s) for device_entry in device_list: - if device_entry.identifiers: - item = list(list(device_entry.identifiers)[0]) - if item[0] == DOMAIN: - # First find the Plugwise via_device, this is always the first device - if item[1] == data.gateway[GATEWAY_ID]: - via_device = device_entry.id - elif ( # then remove the connected orphaned device(s) - device_entry.via_device_id == via_device - and item[1] not in data.devices + for identifier in device_entry.identifiers: + if identifier[0] == DOMAIN: + if ( + device_entry.via_device_id == via_device_id + and identifier[1] not in data.devices ): device_reg.async_update_device( device_entry.id, remove_config_entry_id=entry.entry_id @@ -125,5 +139,5 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): "Removed %s device %s %s from device_registry", DOMAIN, device_entry.model, - item[1], + identifier[1], ) diff --git a/homeassistant/components/plugwise/diagnostics.py b/homeassistant/components/plugwise/diagnostics.py index 9d15ea4fe28..47ff7d1a9fb 100644 --- a/homeassistant/components/plugwise/diagnostics.py +++ b/homeassistant/components/plugwise/diagnostics.py @@ -15,6 +15,6 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" coordinator = entry.runtime_data return { - "gateway": coordinator.data.gateway, "devices": coordinator.data.devices, + "gateway": coordinator.data.gateway, } diff --git a/homeassistant/components/plugwise/entity.py b/homeassistant/components/plugwise/entity.py index b2562ef8f39..3f63abaff43 100644 --- a/homeassistant/components/plugwise/entity.py +++ b/homeassistant/components/plugwise/entity.py @@ -2,7 +2,7 @@ from __future__ import annotations -from plugwise.constants import DeviceData +from plugwise.constants import GwEntityData from homeassistant.const import ATTR_NAME, ATTR_VIA_DEVICE, CONF_HOST from homeassistant.helpers.device_registry import ( @@ -47,6 +47,7 @@ class PlugwiseEntity(CoordinatorEntity[PlugwiseDataUpdateCoordinator]): connections=connections, manufacturer=data.get("vendor"), model=data.get("model"), + model_id=data.get("model_id"), name=coordinator.data.gateway["smile_name"], sw_version=data.get("firmware"), hw_version=data.get("hardware"), @@ -73,11 +74,6 @@ class PlugwiseEntity(CoordinatorEntity[PlugwiseDataUpdateCoordinator]): ) @property - def device(self) -> DeviceData: + def device(self) -> GwEntityData: """Return data for this device.""" return self.coordinator.data.devices[self._dev_id] - - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self._handle_coordinator_update() - await super().async_added_to_hass() diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 10faf75d0f1..80f5be974e1 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==0.38.3"], + "requirements": ["plugwise==1.6.4"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/plugwise/number.py b/homeassistant/components/plugwise/number.py index 06db5faa55b..1d0b1382c24 100644 --- a/homeassistant/components/plugwise/number.py +++ b/homeassistant/components/plugwise/number.py @@ -20,6 +20,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PlugwiseNumberEntityDescription(NumberEntityDescription): @@ -91,12 +93,12 @@ class PlugwiseNumberEntity(PlugwiseEntity, NumberEntity): ) -> None: """Initiate Plugwise Number.""" super().__init__(coordinator, device_id) - self.device_id = device_id - self.entity_description = description - self._attr_unique_id = f"{device_id}-{description.key}" self._attr_mode = NumberMode.BOX self._attr_native_max_value = self.device[description.key]["upper_bound"] self._attr_native_min_value = self.device[description.key]["lower_bound"] + self._attr_unique_id = f"{device_id}-{description.key}" + self.device_id = device_id + self.entity_description = description native_step = self.device[description.key]["resolution"] if description.key != "temperature_offset": diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml new file mode 100644 index 00000000000..ce0788c44f7 --- /dev/null +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -0,0 +1,85 @@ +rules: + ## Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: done + dependency-transparency: done + action-setup: + status: exempt + comment: Plugwise integration has no custom actions + common-modules: done + docs-high-level-description: + status: todo + comment: Rewrite top section, docs PR prepared waiting for 36087 merge + docs-installation-instructions: + status: todo + comment: Docs PR 36087 + docs-removal-instructions: done + docs-actions: done + brands: done + ## Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: The hubs have a hardcoded `Smile ID` printed on the sticker used as password, it can not be changed + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: + status: todo + comment: Docs PR 36087 (partial) + todo rewrite generically (PR prepared) + docs-configuration-parameters: + status: exempt + comment: Plugwise has no options flow + ## Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: done + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: todo + comment: This integration does not have any reconfiguration steps (yet) investigate how/why + dynamic-devices: done + discovery-update-info: done + repair-issues: + status: exempt + comment: This integration does not have repairs + docs-use-cases: + status: todo + comment: Check for completeness, PR prepared waiting for 36087 merge + docs-supported-devices: + status: todo + comment: The list is there but could be improved for readability, PR prepared waiting for 36087 merge + docs-supported-functions: + status: todo + comment: Check for completeness, PR prepared waiting for 36087 merge + docs-data-update: done + docs-known-limitations: + status: todo + comment: Partial in 36087 but could be more elaborate + docs-troubleshooting: + status: todo + comment: Check for completeness, PR prepared waiting for 36087 merge + docs-examples: + status: todo + comment: Check for completeness, PR prepared waiting for 36087 merge + ## Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/plugwise/select.py b/homeassistant/components/plugwise/select.py index b7d4a0a1ded..ff268d8eded 100644 --- a/homeassistant/components/plugwise/select.py +++ b/homeassistant/components/plugwise/select.py @@ -10,11 +10,13 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import PlugwiseConfigEntry -from .const import LOCATION, SelectOptionsType, SelectType +from .const import SelectOptionsType, SelectType from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PlugwiseSelectEntityDescription(SelectEntityDescription): @@ -89,8 +91,12 @@ class PlugwiseSelectEntity(PlugwiseEntity, SelectEntity): ) -> None: """Initialise the selector.""" super().__init__(coordinator, device_id) - self.entity_description = entity_description self._attr_unique_id = f"{device_id}-{entity_description.key}" + self.entity_description = entity_description + + self._location = device_id + if (location := self.device.get("location")) is not None: + self._location = location @property def current_option(self) -> str: @@ -106,8 +112,8 @@ class PlugwiseSelectEntity(PlugwiseEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Change to the selected entity option. - self.device[LOCATION] and STATE_ON are required for the thermostat-schedule select. + self._location and STATE_ON are required for the thermostat-schedule select. """ await self.coordinator.api.set_select( - self.entity_description.key, self.device[LOCATION], option, STATE_ON + self.entity_description.key, self._location, option, STATE_ON ) diff --git a/homeassistant/components/plugwise/sensor.py b/homeassistant/components/plugwise/sensor.py index ae5b4e6ed91..14b42682376 100644 --- a/homeassistant/components/plugwise/sensor.py +++ b/homeassistant/components/plugwise/sensor.py @@ -31,6 +31,9 @@ from . import PlugwiseConfigEntry from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseSensorEntityDescription(SensorEntityDescription): @@ -439,8 +442,8 @@ class PlugwiseSensorEntity(PlugwiseEntity, SensorEntity): ) -> None: """Initialise the sensor.""" super().__init__(coordinator, device_id) - self.entity_description = description self._attr_unique_id = f"{device_id}-{description.key}" + self.entity_description = description @property def native_value(self) -> int | float: diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index f74fc036e2a..87a8e120591 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -11,7 +11,10 @@ "username": "Smile Username" }, "data_description": { - "host": "Leave empty if using Auto Discovery" + "password": "The Smile ID printed on the label on the back of your Adam, Smile-T, or P1.", + "host": "The hostname or IP-address of your Smile. You can find it in your router or the Plugwise App.", + "port": "By default your Smile uses port 80, normally you should not have to change this.", + "username": "Default is `smile`, or `stretch` for the legacy Stretch." } } }, @@ -281,5 +284,28 @@ "name": "Relay" } } + }, + "exceptions": { + "authentication_failed": { + "message": "[%key:common::config_flow::error::invalid_auth%]" + }, + "data_incomplete_or_missing": { + "message": "Data incomplete or missing." + }, + "error_communicating_with_api": { + "message": "Error communicating with API: {error}." + }, + "failed_to_connect": { + "message": "[%key:common::config_flow::error::cannot_connect%]" + }, + "invalid_xml_data": { + "message": "[%key:component::plugwise::config::error::response_error%]" + }, + "unsupported_firmware": { + "message": "[%key:component::plugwise::config::error::unsupported%]" + }, + "unsupported_hvac_mode_requested": { + "message": "Unsupported mode {hvac_mode} requested, valid modes are: {hvac_modes}." + } } } diff --git a/homeassistant/components/plugwise/switch.py b/homeassistant/components/plugwise/switch.py index a134ab5b044..ea6d6f18b7f 100644 --- a/homeassistant/components/plugwise/switch.py +++ b/homeassistant/components/plugwise/switch.py @@ -21,6 +21,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseSwitchEntityDescription(SwitchEntityDescription): @@ -48,7 +50,6 @@ SWITCHES: tuple[PlugwiseSwitchEntityDescription, ...] = ( PlugwiseSwitchEntityDescription( key="cooling_ena_switch", translation_key="cooling_ena_switch", - name="Cooling", entity_category=EntityCategory.CONFIG, ), ) @@ -93,8 +94,8 @@ class PlugwiseSwitchEntity(PlugwiseEntity, SwitchEntity): ) -> None: """Set up the Plugwise API.""" super().__init__(coordinator, device_id) - self.entity_description = description self._attr_unique_id = f"{device_id}-{description.key}" + self.entity_description = description @property def is_on(self) -> bool: diff --git a/homeassistant/components/plugwise/util.py b/homeassistant/components/plugwise/util.py index d998711f2b9..c830e5f69f3 100644 --- a/homeassistant/components/plugwise/util.py +++ b/homeassistant/components/plugwise/util.py @@ -7,6 +7,7 @@ from plugwise.exceptions import PlugwiseException from homeassistant.exceptions import HomeAssistantError +from .const import DOMAIN from .entity import PlugwiseEntity @@ -24,10 +25,14 @@ def plugwise_command[_PlugwiseEntityT: PlugwiseEntity, **_P, _R]( ) -> _R: try: return await func(self, *args, **kwargs) - except PlugwiseException as error: + except PlugwiseException as err: raise HomeAssistantError( - f"Error communicating with API: {error}" - ) from error + translation_domain=DOMAIN, + translation_key="error_communicating_with_api", + translation_placeholders={ + "error": str(err), + }, + ) from err finally: await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/pocketcasts/manifest.json b/homeassistant/components/pocketcasts/manifest.json index 3cb6f52995e..f2a85ecac0d 100644 --- a/homeassistant/components/pocketcasts/manifest.json +++ b/homeassistant/components/pocketcasts/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pocketcasts", "iot_class": "cloud_polling", "loggers": ["pycketcasts"], + "quality_scale": "legacy", "requirements": ["pycketcasts==1.0.1"] } diff --git a/homeassistant/components/point/__init__.py b/homeassistant/components/point/__init__.py index d5babef5b2a..e446606f191 100644 --- a/homeassistant/components/point/__init__.py +++ b/homeassistant/components/point/__init__.py @@ -1,38 +1,39 @@ """Support for Minut Point.""" import asyncio +from dataclasses import dataclass +from http import HTTPStatus import logging -from aiohttp import web -from httpx import ConnectTimeout +from aiohttp import ClientError, ClientResponseError, web from pypoint import PointSession import voluptuous as vol -from homeassistant import config_entries from homeassistant.components import webhook -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, - CONF_TOKEN, CONF_WEBHOOK_ID, Platform, ) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import ( + aiohttp_client, + config_entry_oauth2_flow, + config_validation as cv, ) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType -from homeassistant.util.dt import as_local, parse_datetime, utc_from_timestamp -from . import config_flow +from . import api from .const import ( CONF_WEBHOOK_URL, DOMAIN, @@ -45,11 +46,10 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -DATA_CONFIG_ENTRY_LOCK = "point_config_entry_lock" -CONFIG_ENTRY_IS_SETUP = "point_config_entry_is_setup" - PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] +type PointConfigEntry = ConfigEntry[PointData] + CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -70,62 +70,87 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: conf = config[DOMAIN] - config_flow.register_flow_implementation( - hass, DOMAIN, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET] + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Point", + }, ) - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT} + if not hass.config_entries.async_entries(DOMAIN): + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential( + conf[CONF_CLIENT_ID], + conf[CONF_CLIENT_SECRET], + ), + ) + + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=conf + ) ) - ) return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Point from a config entry.""" +async def async_setup_entry(hass: HomeAssistant, entry: PointConfigEntry) -> bool: + """Set up Minut Point from a config entry.""" - async def token_saver(token, **kwargs): - _LOGGER.debug("Saving updated token %s", token) - hass.config_entries.async_update_entry( - entry, data={**entry.data, CONF_TOKEN: token} + if "auth_implementation" not in entry.data: + raise ConfigEntryAuthFailed("Authentication failed. Please re-authenticate.") + + implementation = ( + await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry ) - - session = PointSession( - async_get_clientsession(hass), - entry.data["refresh_args"][CONF_CLIENT_ID], - entry.data["refresh_args"][CONF_CLIENT_SECRET], - token=entry.data[CONF_TOKEN], - token_saver=token_saver, ) + session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + auth = api.AsyncConfigEntryAuth( + aiohttp_client.async_get_clientsession(hass), session + ) + try: - # the call to user() implicitly calls ensure_active_token() in authlib - await session.user() - except ConnectTimeout as err: - _LOGGER.debug("Connection Timeout") + await auth.async_get_access_token() + except ClientResponseError as err: + if err.status in {HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN}: + raise ConfigEntryAuthFailed from err + raise ConfigEntryNotReady from err + except ClientError as err: raise ConfigEntryNotReady from err - except Exception: # noqa: BLE001 - _LOGGER.error("Authentication Error") - return False - hass.data[DATA_CONFIG_ENTRY_LOCK] = asyncio.Lock() - hass.data[CONFIG_ENTRY_IS_SETUP] = set() + point_session = PointSession(auth) - await async_setup_webhook(hass, entry, session) - client = MinutPointClient(hass, entry, session) - hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: client}) + client = MinutPointClient(hass, entry, point_session) hass.async_create_task(client.update()) + entry.runtime_data = PointData(client) + + await async_setup_webhook(hass, entry, point_session) + await hass.config_entries.async_forward_entry_setups( + entry, [*PLATFORMS, Platform.ALARM_CONTROL_PANEL] + ) return True -async def async_setup_webhook(hass: HomeAssistant, entry: ConfigEntry, session): +async def async_setup_webhook( + hass: HomeAssistant, entry: PointConfigEntry, session: PointSession +) -> None: """Set up a webhook to handle binary sensor events.""" if CONF_WEBHOOK_ID not in entry.data: webhook_id = webhook.async_generate_id() webhook_url = webhook.async_generate_url(hass, webhook_id) - _LOGGER.info("Registering new webhook at: %s", webhook_url) + _LOGGER.debug("Registering new webhook at: %s", webhook_url) hass.config_entries.async_update_entry( entry, @@ -135,27 +160,26 @@ async def async_setup_webhook(hass: HomeAssistant, entry: ConfigEntry, session): CONF_WEBHOOK_URL: webhook_url, }, ) + await session.update_webhook( - entry.data[CONF_WEBHOOK_URL], + webhook.async_generate_url(hass, entry.data[CONF_WEBHOOK_ID]), entry.data[CONF_WEBHOOK_ID], ["*"], ) - webhook.async_register( hass, DOMAIN, "Point", entry.data[CONF_WEBHOOK_ID], handle_webhook ) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: PointConfigEntry) -> bool: """Unload a config entry.""" - webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID]) - session = hass.data[DOMAIN].pop(entry.entry_id) - await session.remove_webhook() - - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if not hass.data[DOMAIN]: - hass.data.pop(DOMAIN) - + if unload_ok := await hass.config_entries.async_unload_platforms( + entry, [*PLATFORMS, Platform.ALARM_CONTROL_PANEL] + ): + session: PointSession = entry.runtime_data.client + if CONF_WEBHOOK_ID in entry.data: + webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID]) + await session.remove_webhook() return unload_ok @@ -203,29 +227,23 @@ class MinutPointClient: async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY) return - async def new_device(device_id, platform): - """Load new device.""" - config_entries_key = f"{platform}.{DOMAIN}" - async with self._hass.data[DATA_CONFIG_ENTRY_LOCK]: - if config_entries_key not in self._hass.data[CONFIG_ENTRY_IS_SETUP]: - await self._hass.config_entries.async_forward_entry_setups( - self._config_entry, [platform] - ) - self._hass.data[CONFIG_ENTRY_IS_SETUP].add(config_entries_key) - - async_dispatcher_send( - self._hass, POINT_DISCOVERY_NEW.format(platform, DOMAIN), device_id - ) - self._is_available = True for home_id in self._client.homes: if home_id not in self._known_homes: - await new_device(home_id, "alarm_control_panel") + async_dispatcher_send( + self._hass, + POINT_DISCOVERY_NEW.format(Platform.ALARM_CONTROL_PANEL), + home_id, + ) self._known_homes.add(home_id) for device in self._client.devices: if device.device_id not in self._known_devices: for platform in PLATFORMS: - await new_device(device.device_id, platform) + async_dispatcher_send( + self._hass, + POINT_DISCOVERY_NEW.format(platform), + device.device_id, + ) self._known_devices.add(device.device_id) async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY) @@ -257,83 +275,9 @@ class MinutPointClient: return await self._client.alarm_arm(home_id) -class MinutPointEntity(Entity): - """Base Entity used by the sensors.""" +@dataclass +class PointData: + """Point Data.""" - _attr_should_poll = False - - def __init__(self, point_client, device_id, device_class): - """Initialize the entity.""" - self._async_unsub_dispatcher_connect = None - self._client = point_client - self._id = device_id - self._name = self.device.name - self._attr_device_class = device_class - self._updated = utc_from_timestamp(0) - self._attr_unique_id = f"point.{device_id}-{device_class}" - device = self.device.device - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, device["device_mac"])}, - identifiers={(DOMAIN, device["device_id"])}, - manufacturer="Minut", - model=f"Point v{device['hardware_version']}", - name=device["description"], - sw_version=device["firmware"]["installed"], - via_device=(DOMAIN, device["home"]), - ) - if device_class: - self._attr_name = f"{self._name} {device_class.capitalize()}" - - def __str__(self): - """Return string representation of device.""" - return f"MinutPoint {self.name}" - - async def async_added_to_hass(self): - """Call when entity is added to hass.""" - _LOGGER.debug("Created device %s", self) - self._async_unsub_dispatcher_connect = async_dispatcher_connect( - self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback - ) - await self._update_callback() - - async def async_will_remove_from_hass(self): - """Disconnect dispatcher listener when removed.""" - if self._async_unsub_dispatcher_connect: - self._async_unsub_dispatcher_connect() - - async def _update_callback(self): - """Update the value of the sensor.""" - - @property - def available(self): - """Return true if device is not offline.""" - return self._client.is_available(self.device_id) - - @property - def device(self): - """Return the representation of the device.""" - return self._client.device(self.device_id) - - @property - def device_id(self): - """Return the id of the device.""" - return self._id - - @property - def extra_state_attributes(self): - """Return status of device.""" - attrs = self.device.device_status - attrs["last_heard_from"] = as_local(self.last_update).strftime( - "%Y-%m-%d %H:%M:%S" - ) - return attrs - - @property - def is_updated(self): - """Return true if sensor have been updated.""" - return self.last_update > self._updated - - @property - def last_update(self): - """Return the last_update time for the device.""" - return parse_datetime(self.device.last_update) + client: MinutPointClient + entry_lock: asyncio.Lock = asyncio.Lock() diff --git a/homeassistant/components/point/alarm_control_panel.py b/homeassistant/components/point/alarm_control_panel.py index 844d1eba553..4e4e4238176 100644 --- a/homeassistant/components/point/alarm_control_panel.py +++ b/homeassistant/components/point/alarm_control_panel.py @@ -6,16 +6,12 @@ from collections.abc import Callable import logging from homeassistant.components.alarm_control_panel import ( - DOMAIN, + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -28,9 +24,9 @@ _LOGGER = logging.getLogger(__name__) EVENT_MAP = { - "off": STATE_ALARM_DISARMED, - "alarm_silenced": STATE_ALARM_DISARMED, - "alarm_grace_period_expired": STATE_ALARM_TRIGGERED, + "off": AlarmControlPanelState.DISARMED, + "alarm_silenced": AlarmControlPanelState.DISARMED, + "alarm_grace_period_expired": AlarmControlPanelState.TRIGGERED, } @@ -43,11 +39,13 @@ async def async_setup_entry( async def async_discover_home(home_id): """Discover and add a discovered home.""" - client = hass.data[POINT_DOMAIN][config_entry.entry_id] + client = config_entry.runtime_data.client async_add_entities([MinutPointAlarmControl(client, home_id)], True) async_dispatcher_connect( - hass, POINT_DISCOVERY_NEW.format(DOMAIN, POINT_DOMAIN), async_discover_home + hass, + POINT_DISCOVERY_NEW.format(ALARM_CONTROL_PANEL_DOMAIN, POINT_DOMAIN), + async_discover_home, ) @@ -101,9 +99,11 @@ class MinutPointAlarmControl(AlarmControlPanelEntity): self.async_write_ha_state() @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return state of the device.""" - return EVENT_MAP.get(self._home["alarm_status"], STATE_ALARM_ARMED_AWAY) + return EVENT_MAP.get( + self._home["alarm_status"], AlarmControlPanelState.ARMED_AWAY + ) async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" diff --git a/homeassistant/components/point/api.py b/homeassistant/components/point/api.py new file mode 100644 index 00000000000..cd854c2b7ec --- /dev/null +++ b/homeassistant/components/point/api.py @@ -0,0 +1,25 @@ +"""API for Minut Point bound to Home Assistant OAuth.""" + +from aiohttp import ClientSession +import pypoint + +from homeassistant.helpers import config_entry_oauth2_flow + + +class AsyncConfigEntryAuth(pypoint.AbstractAuth): + """Provide Minut Point authentication tied to an OAuth2 based config entry.""" + + def __init__( + self, + websession: ClientSession, + oauth_session: config_entry_oauth2_flow.OAuth2Session, + ) -> None: + """Initialize Minut Point auth.""" + super().__init__(websession) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + await self._oauth_session.async_ensure_token_valid() + + return self._oauth_session.token["access_token"] diff --git a/homeassistant/components/point/application_credentials.py b/homeassistant/components/point/application_credentials.py new file mode 100644 index 00000000000..03cd02761f9 --- /dev/null +++ b/homeassistant/components/point/application_credentials.py @@ -0,0 +1,14 @@ +"""application_credentials platform the Minut Point integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) diff --git a/homeassistant/components/point/binary_sensor.py b/homeassistant/components/point/binary_sensor.py index 7a698925db6..546c7d9cb0f 100644 --- a/homeassistant/components/point/binary_sensor.py +++ b/homeassistant/components/point/binary_sensor.py @@ -7,7 +7,7 @@ import logging from pypoint import EVENTS from homeassistant.components.binary_sensor import ( - DOMAIN, + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, ) @@ -16,8 +16,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import MinutPointEntity from .const import DOMAIN as POINT_DOMAIN, POINT_DISCOVERY_NEW, SIGNAL_WEBHOOK +from .entity import MinutPointEntity _LOGGER = logging.getLogger(__name__) @@ -49,7 +49,7 @@ async def async_setup_entry( async def async_discover_sensor(device_id): """Discover and add a discovered sensor.""" - client = hass.data[POINT_DOMAIN][config_entry.entry_id] + client = config_entry.runtime_data.client async_add_entities( ( MinutPointBinarySensor(client, device_id, device_name) @@ -60,7 +60,9 @@ async def async_setup_entry( ) async_dispatcher_connect( - hass, POINT_DISCOVERY_NEW.format(DOMAIN, POINT_DOMAIN), async_discover_sensor + hass, + POINT_DISCOVERY_NEW.format(BINARY_SENSOR_DOMAIN, POINT_DOMAIN), + async_discover_sensor, ) diff --git a/homeassistant/components/point/config_flow.py b/homeassistant/components/point/config_flow.py index b2455438208..a0a51c7b9e6 100644 --- a/homeassistant/components/point/config_flow.py +++ b/homeassistant/components/point/config_flow.py @@ -1,193 +1,62 @@ """Config flow for Minut Point.""" -import asyncio -from collections import OrderedDict +from collections.abc import Mapping import logging from typing import Any -from pypoint import PointSession -import voluptuous as vol - -from homeassistant.components.http import KEY_HASS, HomeAssistantView -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET -from homeassistant.core import callback -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.components.webhook import async_generate_id +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_TOKEN, CONF_WEBHOOK_ID +from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler from .const import DOMAIN -AUTH_CALLBACK_PATH = "/api/minut" -AUTH_CALLBACK_NAME = "api:minut" -DATA_FLOW_IMPL = "point_flow_implementation" +class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): + """Config flow to handle Minut Point OAuth2 authentication.""" -_LOGGER = logging.getLogger(__name__) + DOMAIN = DOMAIN + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) -@callback -def register_flow_implementation(hass, domain, client_id, client_secret): - """Register a flow implementation. + async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: + """Handle import from YAML.""" + return await self.async_step_user() - domain: Domain of the component responsible for the implementation. - name: Name of the component. - client_id: Client id. - client_secret: Client secret. - """ - if DATA_FLOW_IMPL not in hass.data: - hass.data[DATA_FLOW_IMPL] = OrderedDict() + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() - hass.data[DATA_FLOW_IMPL][domain] = { - CONF_CLIENT_ID: client_id, - CONF_CLIENT_SECRET: client_secret, - } - - -class PointFlowHandler(ConfigFlow, domain=DOMAIN): - """Handle a config flow.""" - - VERSION = 1 - - def __init__(self) -> None: - """Initialize flow.""" - self.flow_impl = None - - async def async_step_import(self, user_input=None): - """Handle external yaml configuration.""" - if self._async_current_entries(): - return self.async_abort(reason="already_setup") - - self.flow_impl = DOMAIN - - return await self.async_step_auth() - - async def async_step_user( + async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle a flow start.""" - flows = self.hass.data.get(DATA_FLOW_IMPL, {}) + """Dialog that informs the user that reauth is required.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + return await self.async_step_user() - if self._async_current_entries(): - return self.async_abort(reason="already_setup") + async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: + """Create an oauth config entry or update existing entry for reauth.""" + user_id = str(data[CONF_TOKEN]["user_id"]) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() - if not flows: - _LOGGER.debug("no flows") - return self.async_abort(reason="no_flows") - - if len(flows) == 1: - self.flow_impl = list(flows)[0] - return await self.async_step_auth() - - if user_input is not None: - self.flow_impl = user_input["flow_impl"] - return await self.async_step_auth() - - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required("flow_impl"): vol.In(list(flows))}), - ) - - async def async_step_auth(self, user_input=None): - """Create an entry for auth.""" - if self._async_current_entries(): - return self.async_abort(reason="external_setup") - - errors = {} - - if user_input is not None: - errors["base"] = "follow_link" - - try: - async with asyncio.timeout(10): - url = await self._get_authorization_url() - except TimeoutError: - return self.async_abort(reason="authorize_url_timeout") - except Exception: - _LOGGER.exception("Unexpected error generating auth url") - return self.async_abort(reason="unknown_authorize_url_generation") - return self.async_show_form( - step_id="auth", - description_placeholders={"authorization_url": url}, - errors=errors, - ) - - async def _get_authorization_url(self): - """Create Minut Point session and get authorization url.""" - flow = self.hass.data[DATA_FLOW_IMPL][self.flow_impl] - client_id = flow[CONF_CLIENT_ID] - client_secret = flow[CONF_CLIENT_SECRET] - point_session = PointSession( - async_get_clientsession(self.hass), - client_id, - client_secret, - ) - - self.hass.http.register_view(MinutAuthCallbackView()) - - return point_session.get_authorization_url - - async def async_step_code(self, code=None): - """Received code for authentication.""" - if self._async_current_entries(): - return self.async_abort(reason="already_setup") - - if code is None: - return self.async_abort(reason="no_code") - - _LOGGER.debug( - "Should close all flows below %s", - self._async_in_progress(), - ) - # Remove notification if no other discovery config entries in progress - - return await self._async_create_session(code) - - async def _async_create_session(self, code): - """Create point session and entries.""" - - flow = self.hass.data[DATA_FLOW_IMPL][DOMAIN] - client_id = flow[CONF_CLIENT_ID] - client_secret = flow[CONF_CLIENT_SECRET] - point_session = PointSession( - async_get_clientsession(self.hass), - client_id, - client_secret, - ) - token = await point_session.get_access_token(code) - _LOGGER.debug("Got new token") - if not point_session.is_authorized: - _LOGGER.error("Authentication Error") - return self.async_abort(reason="auth_error") - - _LOGGER.info("Successfully authenticated Point") - user_email = (await point_session.user()).get("email") or "" - - return self.async_create_entry( - title=user_email, - data={ - "token": token, - "refresh_args": { - CONF_CLIENT_ID: client_id, - CONF_CLIENT_SECRET: client_secret, - }, - }, - ) - - -class MinutAuthCallbackView(HomeAssistantView): - """Minut Authorization Callback View.""" - - requires_auth = False - url = AUTH_CALLBACK_PATH - name = AUTH_CALLBACK_NAME - - @staticmethod - async def get(request): - """Receive authorization code.""" - hass = request.app[KEY_HASS] - if "code" in request.query: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": "code"}, data=request.query["code"] - ) + return self.async_create_entry( + title="Minut Point", + data={**data, CONF_WEBHOOK_ID: async_generate_id()}, ) - return "OK!" + + reauth_entry = self._get_reauth_entry() + if reauth_entry.unique_id is not None: + self._abort_if_unique_id_mismatch(reason="wrong_account") + + logging.debug("user_id: %s", user_id) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=data, unique_id=user_id + ) diff --git a/homeassistant/components/point/const.py b/homeassistant/components/point/const.py index c8c8f14d019..1122cf69c0a 100644 --- a/homeassistant/components/point/const.py +++ b/homeassistant/components/point/const.py @@ -7,8 +7,12 @@ DOMAIN = "point" SCAN_INTERVAL = timedelta(minutes=1) CONF_WEBHOOK_URL = "webhook_url" +CONF_REFRESH_TOKEN = "refresh_token" EVENT_RECEIVED = "point_webhook_received" SIGNAL_UPDATE_ENTITY = "point_update" SIGNAL_WEBHOOK = "point_webhook" -POINT_DISCOVERY_NEW = "point_new_{}_{}" +POINT_DISCOVERY_NEW = "point_new_{}" + +OAUTH2_AUTHORIZE = "https://api.minut.com/v8/oauth/authorize" +OAUTH2_TOKEN = "https://api.minut.com/v8/oauth/token" diff --git a/homeassistant/components/point/entity.py b/homeassistant/components/point/entity.py new file mode 100644 index 00000000000..4784dd43180 --- /dev/null +++ b/homeassistant/components/point/entity.py @@ -0,0 +1,95 @@ +"""Support for Minut Point.""" + +import logging + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity +from homeassistant.util.dt import as_local, parse_datetime, utc_from_timestamp + +from .const import DOMAIN, SIGNAL_UPDATE_ENTITY + +_LOGGER = logging.getLogger(__name__) + + +class MinutPointEntity(Entity): + """Base Entity used by the sensors.""" + + _attr_should_poll = False + + def __init__(self, point_client, device_id, device_class) -> None: + """Initialize the entity.""" + self._async_unsub_dispatcher_connect = None + self._client = point_client + self._id = device_id + self._name = self.device.name + self._attr_device_class = device_class + self._updated = utc_from_timestamp(0) + self._attr_unique_id = f"point.{device_id}-{device_class}" + device = self.device.device + self._attr_device_info = DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, device["device_mac"])}, + identifiers={(DOMAIN, device["device_id"])}, + manufacturer="Minut", + model=f"Point v{device['hardware_version']}", + name=device["description"], + sw_version=device["firmware"]["installed"], + via_device=(DOMAIN, device["home"]), + ) + if device_class: + self._attr_name = f"{self._name} {device_class.capitalize()}" + + def __str__(self) -> str: + """Return string representation of device.""" + return f"MinutPoint {self.name}" + + async def async_added_to_hass(self): + """Call when entity is added to hass.""" + _LOGGER.debug("Created device %s", self) + self._async_unsub_dispatcher_connect = async_dispatcher_connect( + self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback + ) + await self._update_callback() + + async def async_will_remove_from_hass(self): + """Disconnect dispatcher listener when removed.""" + if self._async_unsub_dispatcher_connect: + self._async_unsub_dispatcher_connect() + + async def _update_callback(self): + """Update the value of the sensor.""" + + @property + def available(self): + """Return true if device is not offline.""" + return self._client.is_available(self.device_id) + + @property + def device(self): + """Return the representation of the device.""" + return self._client.device(self.device_id) + + @property + def device_id(self): + """Return the id of the device.""" + return self._id + + @property + def extra_state_attributes(self): + """Return status of device.""" + attrs = self.device.device_status + attrs["last_heard_from"] = as_local(self.last_update).strftime( + "%Y-%m-%d %H:%M:%S" + ) + return attrs + + @property + def is_updated(self): + """Return true if sensor have been updated.""" + return self.last_update > self._updated + + @property + def last_update(self): + """Return the last_update time for the device.""" + return parse_datetime(self.device.last_update) diff --git a/homeassistant/components/point/manifest.json b/homeassistant/components/point/manifest.json index 0e8d7068a4f..5aa733b510f 100644 --- a/homeassistant/components/point/manifest.json +++ b/homeassistant/components/point/manifest.json @@ -3,10 +3,9 @@ "name": "Minut Point", "codeowners": ["@fredrike"], "config_flow": true, - "dependencies": ["webhook", "http"], + "dependencies": ["application_credentials", "http", "webhook"], "documentation": "https://www.home-assistant.io/integrations/point", "iot_class": "cloud_polling", "loggers": ["pypoint"], - "quality_scale": "silver", - "requirements": ["pypoint==2.3.2"] + "requirements": ["pypoint==3.0.0"] } diff --git a/homeassistant/components/point/sensor.py b/homeassistant/components/point/sensor.py index f648bb4daf9..d864c8bb18c 100644 --- a/homeassistant/components/point/sensor.py +++ b/homeassistant/components/point/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from homeassistant.components.sensor import ( - DOMAIN, + DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -17,8 +17,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import parse_datetime -from . import MinutPointEntity from .const import DOMAIN as POINT_DOMAIN, POINT_DISCOVERY_NEW +from .entity import MinutPointEntity _LOGGER = logging.getLogger(__name__) @@ -54,7 +54,7 @@ async def async_setup_entry( async def async_discover_sensor(device_id): """Discover and add a discovered sensor.""" - client = hass.data[POINT_DOMAIN][config_entry.entry_id] + client = config_entry.runtime_data.client async_add_entities( [ MinutPointSensor(client, device_id, description) @@ -64,7 +64,9 @@ async def async_setup_entry( ) async_dispatcher_connect( - hass, POINT_DISCOVERY_NEW.format(DOMAIN, POINT_DOMAIN), async_discover_sensor + hass, + POINT_DISCOVERY_NEW.format(SENSOR_DOMAIN, POINT_DOMAIN), + async_discover_sensor, ) diff --git a/homeassistant/components/point/strings.json b/homeassistant/components/point/strings.json index 8a28e314b69..b2e8d9309d9 100644 --- a/homeassistant/components/point/strings.json +++ b/homeassistant/components/point/strings.json @@ -1,29 +1,31 @@ { "config": { - "step": { - "user": { - "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]", - "description": "[%key:common::config_flow::description::confirm_setup%]", - "data": { "flow_impl": "Provider" } - }, - "auth": { - "title": "Authenticate Point", - "description": "Please follow the link below and **Accept** access to your Minut account, then come back and press **Submit** below.\n\n[Link]({authorization_url})" - } + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_account": "You can only reauthenticate this account with the same user." }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" }, - "error": { - "no_token": "[%key:common::config_flow::error::invalid_access_token%]", - "follow_link": "Please follow the link and authenticate before pressing Submit" - }, - "abort": { - "already_setup": "[%key:common::config_flow::abort::single_instance_allowed%]", - "external_setup": "Point successfully configured from another flow.", - "no_flows": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", - "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", - "unknown_authorize_url_generation": "[%key:common::config_flow::abort::unknown_authorize_url_generation%]" + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Point integration needs to re-authenticate your account" + } } } } diff --git a/homeassistant/components/powerfox/__init__.py b/homeassistant/components/powerfox/__init__.py new file mode 100644 index 00000000000..243f3aacc4f --- /dev/null +++ b/homeassistant/components/powerfox/__init__.py @@ -0,0 +1,55 @@ +"""The Powerfox integration.""" + +from __future__ import annotations + +import asyncio + +from powerfox import Powerfox, PowerfoxConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import PowerfoxDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type PowerfoxConfigEntry = ConfigEntry[list[PowerfoxDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: PowerfoxConfigEntry) -> bool: + """Set up Powerfox from a config entry.""" + client = Powerfox( + username=entry.data[CONF_EMAIL], + password=entry.data[CONF_PASSWORD], + session=async_get_clientsession(hass), + ) + + try: + devices = await client.all_devices() + except PowerfoxConnectionError as err: + await client.close() + raise ConfigEntryNotReady from err + + coordinators: list[PowerfoxDataUpdateCoordinator] = [ + PowerfoxDataUpdateCoordinator(hass, client, device) for device in devices + ] + + await asyncio.gather( + *[ + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ] + ) + + entry.runtime_data = coordinators + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: PowerfoxConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/powerfox/config_flow.py b/homeassistant/components/powerfox/config_flow.py new file mode 100644 index 00000000000..dd17badf881 --- /dev/null +++ b/homeassistant/components/powerfox/config_flow.py @@ -0,0 +1,135 @@ +"""Config flow for Powerfox integration.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any + +from powerfox import Powerfox, PowerfoxAuthenticationError, PowerfoxConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + + +class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Powerfox.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors = {} + + if user_input is not None: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + client = Powerfox( + username=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_EMAIL: user_input[CONF_EMAIL], + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=STEP_USER_DATA_SCHEMA, + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication flow for Powerfox.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication flow for Powerfox.""" + errors = {} + + reauth_entry = self._get_reauth_entry() + if user_input is not None: + client = Powerfox( + username=reauth_entry.data[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + data_schema=STEP_REAUTH_SCHEMA, + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Reconfigure Powerfox configuration.""" + errors = {} + + reconfigure_entry = self._get_reconfigure_entry() + if user_input is not None: + client = Powerfox( + username=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + if reconfigure_entry.data[CONF_EMAIL] != user_input[CONF_EMAIL]: + self._async_abort_entries_match( + {CONF_EMAIL: user_input[CONF_EMAIL]} + ) + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/powerfox/const.py b/homeassistant/components/powerfox/const.py new file mode 100644 index 00000000000..0970e8a1b66 --- /dev/null +++ b/homeassistant/components/powerfox/const.py @@ -0,0 +1,11 @@ +"""Constants for the Powerfox integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Final + +DOMAIN: Final = "powerfox" +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(minutes=1) diff --git a/homeassistant/components/powerfox/coordinator.py b/homeassistant/components/powerfox/coordinator.py new file mode 100644 index 00000000000..f7ec5ab6716 --- /dev/null +++ b/homeassistant/components/powerfox/coordinator.py @@ -0,0 +1,49 @@ +"""Coordinator for Powerfox integration.""" + +from __future__ import annotations + +from powerfox import ( + Device, + Powerfox, + PowerfoxAuthenticationError, + PowerfoxConnectionError, + Poweropti, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +class PowerfoxDataUpdateCoordinator(DataUpdateCoordinator[Poweropti]): + """Class to manage fetching Powerfox data from the API.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + client: Powerfox, + device: Device, + ) -> None: + """Initialize global Powerfox data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = client + self.device = device + + async def _async_update_data(self) -> Poweropti: + """Fetch data from Powerfox API.""" + try: + return await self.client.device(device_id=self.device.id) + except PowerfoxAuthenticationError as err: + raise ConfigEntryAuthFailed(err) from err + except PowerfoxConnectionError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/powerfox/diagnostics.py b/homeassistant/components/powerfox/diagnostics.py new file mode 100644 index 00000000000..8f6b847fca0 --- /dev/null +++ b/homeassistant/components/powerfox/diagnostics.py @@ -0,0 +1,58 @@ +"""Support for Powerfox diagnostics.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any + +from powerfox import PowerMeter, WaterMeter + +from homeassistant.core import HomeAssistant + +from . import PowerfoxConfigEntry, PowerfoxDataUpdateCoordinator + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PowerfoxConfigEntry +) -> dict[str, Any]: + """Return diagnostics for Powerfox config entry.""" + powerfox_data: list[PowerfoxDataUpdateCoordinator] = entry.runtime_data + + return { + "devices": [ + { + **( + { + "power_meter": { + "outdated": coordinator.data.outdated, + "timestamp": datetime.strftime( + coordinator.data.timestamp, "%Y-%m-%d %H:%M:%S" + ), + "power": coordinator.data.power, + "energy_usage": coordinator.data.energy_usage, + "energy_return": coordinator.data.energy_return, + "energy_usage_high_tariff": coordinator.data.energy_usage_high_tariff, + "energy_usage_low_tariff": coordinator.data.energy_usage_low_tariff, + } + } + if isinstance(coordinator.data, PowerMeter) + else {} + ), + **( + { + "water_meter": { + "outdated": coordinator.data.outdated, + "timestamp": datetime.strftime( + coordinator.data.timestamp, "%Y-%m-%d %H:%M:%S" + ), + "cold_water": coordinator.data.cold_water, + "warm_water": coordinator.data.warm_water, + } + } + if isinstance(coordinator.data, WaterMeter) + else {} + ), + } + for coordinator in powerfox_data + ], + } diff --git a/homeassistant/components/powerfox/entity.py b/homeassistant/components/powerfox/entity.py new file mode 100644 index 00000000000..0ab7200ffe8 --- /dev/null +++ b/homeassistant/components/powerfox/entity.py @@ -0,0 +1,32 @@ +"""Generic entity for Powerfox.""" + +from __future__ import annotations + +from powerfox import Device + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PowerfoxDataUpdateCoordinator + + +class PowerfoxEntity(CoordinatorEntity[PowerfoxDataUpdateCoordinator]): + """Base entity for Powerfox.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: PowerfoxDataUpdateCoordinator, + device: Device, + ) -> None: + """Initialize Powerfox entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.id)}, + manufacturer="Powerfox", + model=device.type.human_readable, + name=device.name, + serial_number=device.id, + ) diff --git a/homeassistant/components/powerfox/manifest.json b/homeassistant/components/powerfox/manifest.json new file mode 100644 index 00000000000..7083ffe8de7 --- /dev/null +++ b/homeassistant/components/powerfox/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "powerfox", + "name": "Powerfox", + "codeowners": ["@klaasnicolaas"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/powerfox", + "iot_class": "cloud_polling", + "quality_scale": "silver", + "requirements": ["powerfox==1.0.0"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "powerfox*" + } + ] +} diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml new file mode 100644 index 00000000000..f72d25c3684 --- /dev/null +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: | + This integration uses a coordinator to handle updates. + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration is connecting to a cloud service. + discovery: + status: done + comment: | + It can find poweropti devices via zeroconf, and will start a normal user flow. + docs-data-update: done + docs-examples: todo + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that should disabled by default. + entity-translations: done + exception-translations: done + icon-translations: + status: exempt + comment: | + There is no need for icon translations. + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/powerfox/sensor.py b/homeassistant/components/powerfox/sensor.py new file mode 100644 index 00000000000..7771f96dd81 --- /dev/null +++ b/homeassistant/components/powerfox/sensor.py @@ -0,0 +1,146 @@ +"""Sensors for Powerfox integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from powerfox import Device, PowerMeter, WaterMeter + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfVolume +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import PowerfoxConfigEntry +from .coordinator import PowerfoxDataUpdateCoordinator +from .entity import PowerfoxEntity + + +@dataclass(frozen=True, kw_only=True) +class PowerfoxSensorEntityDescription[T: (PowerMeter, WaterMeter)]( + SensorEntityDescription +): + """Describes Poweropti sensor entity.""" + + value_fn: Callable[[T], float | int | None] + + +SENSORS_POWER: tuple[PowerfoxSensorEntityDescription[PowerMeter], ...] = ( + PowerfoxSensorEntityDescription[PowerMeter]( + key="power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda meter: meter.power, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_usage", + translation_key="energy_usage", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_usage, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_usage_low_tariff", + translation_key="energy_usage_low_tariff", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_usage_low_tariff, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_usage_high_tariff", + translation_key="energy_usage_high_tariff", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_usage_high_tariff, + ), + PowerfoxSensorEntityDescription[PowerMeter]( + key="energy_return", + translation_key="energy_return", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.energy_return, + ), +) + + +SENSORS_WATER: tuple[PowerfoxSensorEntityDescription[WaterMeter], ...] = ( + PowerfoxSensorEntityDescription[WaterMeter]( + key="cold_water", + translation_key="cold_water", + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.cold_water, + ), + PowerfoxSensorEntityDescription[WaterMeter]( + key="warm_water", + translation_key="warm_water", + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda meter: meter.warm_water, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PowerfoxConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Powerfox sensors based on a config entry.""" + entities: list[SensorEntity] = [] + for coordinator in entry.runtime_data: + if isinstance(coordinator.data, PowerMeter): + entities.extend( + PowerfoxSensorEntity( + coordinator=coordinator, + description=description, + device=coordinator.device, + ) + for description in SENSORS_POWER + if description.value_fn(coordinator.data) is not None + ) + if isinstance(coordinator.data, WaterMeter): + entities.extend( + PowerfoxSensorEntity( + coordinator=coordinator, + description=description, + device=coordinator.device, + ) + for description in SENSORS_WATER + ) + async_add_entities(entities) + + +class PowerfoxSensorEntity(PowerfoxEntity, SensorEntity): + """Defines a powerfox power meter sensor.""" + + entity_description: PowerfoxSensorEntityDescription + + def __init__( + self, + coordinator: PowerfoxDataUpdateCoordinator, + device: Device, + description: PowerfoxSensorEntityDescription, + ) -> None: + """Initialize Powerfox power meter sensor.""" + super().__init__(coordinator, device) + self.entity_description = description + self._attr_unique_id = f"{device.id}_{description.key}" + + @property + def native_value(self) -> float | int | None: + """Return the state of the entity.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/powerfox/strings.json b/homeassistant/components/powerfox/strings.json new file mode 100644 index 00000000000..4a7c8e8fa4d --- /dev/null +++ b/homeassistant/components/powerfox/strings.json @@ -0,0 +1,70 @@ +{ + "config": { + "step": { + "user": { + "description": "Connect to your Powerfox account to get information about your energy, heat or water consumption.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "The email address of your Powerfox account.", + "password": "The password of your Powerfox account." + } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The password for {email} is no longer valid.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::powerfox::config::step::user::data_description::password%]" + } + }, + "reconfigure": { + "title": "Reconfigure your Powerfox account", + "description": "Powerfox is already configured. Would you like to reconfigure it?", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::powerfox::config::step::user::data_description::email%]", + "password": "[%key:component::powerfox::config::step::user::data_description::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "entity": { + "sensor": { + "energy_usage": { + "name": "Energy usage" + }, + "energy_usage_low_tariff": { + "name": "Energy usage low tariff" + }, + "energy_usage_high_tariff": { + "name": "Energy usage high tariff" + }, + "energy_return": { + "name": "Energy return" + }, + "cold_water": { + "name": "Cold water" + }, + "warm_water": { + "name": "Warm water" + } + } + } +} diff --git a/homeassistant/components/powerwall/__init__.py b/homeassistant/components/powerwall/__init__.py index 0b6f889b90a..6a2522ac43b 100644 --- a/homeassistant/components/powerwall/__init__.py +++ b/homeassistant/components/powerwall/__init__.py @@ -168,6 +168,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerwallConfigEntry) -> coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="Powerwall site", update_method=manager.async_update_data, update_interval=timedelta(seconds=UPDATE_INTERVAL), diff --git a/homeassistant/components/powerwall/config_flow.py b/homeassistant/components/powerwall/config_flow.py index 3e2a5fdfd2d..0c39392ca19 100644 --- a/homeassistant/components/powerwall/config_flow.py +++ b/homeassistant/components/powerwall/config_flow.py @@ -99,7 +99,6 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the powerwall flow.""" self.ip_address: str | None = None self.title: str | None = None - self.reauth_entry: ConfigEntry | None = None async def _async_powerwall_is_offline(self, entry: ConfigEntry) -> bool: """Check if the power wall is offline. @@ -188,9 +187,9 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm a discovered powerwall.""" assert self.ip_address is not None + assert self.title is not None assert self.unique_id is not None if user_input is not None: - assert self.title is not None return self.async_create_entry( title=self.title, data={ @@ -250,19 +249,22 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirmation.""" - assert self.reauth_entry is not None errors: dict[str, str] | None = {} description_placeholders: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: - entry_data = self.reauth_entry.data errors, _, description_placeholders = await self._async_try_connect( - {CONF_IP_ADDRESS: entry_data[CONF_IP_ADDRESS], **user_input} + {CONF_IP_ADDRESS: reauth_entry.data[CONF_IP_ADDRESS], **user_input} ) if not errors: return self.async_update_reload_and_abort( - self.reauth_entry, data={**entry_data, **user_input} + reauth_entry, data_updates=user_input ) + self.context["title_placeholders"] = { + "name": reauth_entry.title, + "ip_address": reauth_entry.data[CONF_IP_ADDRESS], + } return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema({vol.Optional(CONF_PASSWORD): str}), @@ -274,9 +276,6 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() diff --git a/homeassistant/components/powerwall/sensor.py b/homeassistant/components/powerwall/sensor.py index 9423d65b0fc..28506e2a60c 100644 --- a/homeassistant/components/powerwall/sensor.py +++ b/homeassistant/components/powerwall/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from operator import attrgetter, methodcaller -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING from tesla_powerwall import GridState, MeterResponse, MeterType @@ -35,14 +35,12 @@ from .models import BatteryResponse, PowerwallConfigEntry, PowerwallRuntimeData _METER_DIRECTION_EXPORT = "export" _METER_DIRECTION_IMPORT = "import" -_ValueParamT = TypeVar("_ValueParamT") -_ValueT = TypeVar("_ValueT", bound=float | int | str | None) +type _ValueType = float | int | str | None @dataclass(frozen=True, kw_only=True) -class PowerwallSensorEntityDescription( - SensorEntityDescription, - Generic[_ValueParamT, _ValueT], +class PowerwallSensorEntityDescription[_ValueParamT, _ValueT: _ValueType]( + SensorEntityDescription ): """Describes Powerwall entity.""" @@ -389,7 +387,7 @@ class PowerWallImportSensor(PowerWallEnergyDirectionSensor): return meter.get_energy_imported() -class PowerWallBatterySensor(BatteryEntity, SensorEntity, Generic[_ValueT]): +class PowerWallBatterySensor[_ValueT: _ValueType](BatteryEntity, SensorEntity): """Representation of an Powerwall Battery sensor.""" entity_description: PowerwallSensorEntityDescription[BatteryResponse, _ValueT] diff --git a/homeassistant/components/profiler/__init__.py b/homeassistant/components/profiler/__init__.py index 9b2b9736574..389e3384ad9 100644 --- a/homeassistant/components/profiler/__init__.py +++ b/homeassistant/components/profiler/__init__.py @@ -436,6 +436,10 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall) # Imports deferred to avoid loading modules # in memory since usually only one part of this # integration is used at a time + if sys.version_info >= (3, 13): + raise HomeAssistantError( + "Memory profiling is not supported on Python 3.13. Please use Python 3.12." + ) from guppy import hpy # pylint: disable=import-outside-toplevel start_time = int(time.time() * 1000000) diff --git a/homeassistant/components/profiler/config_flow.py b/homeassistant/components/profiler/config_flow.py index 19995cf79aa..766d847e4a4 100644 --- a/homeassistant/components/profiler/config_flow.py +++ b/homeassistant/components/profiler/config_flow.py @@ -16,9 +16,6 @@ class ProfilerConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry(title=DEFAULT_NAME, data={}) diff --git a/homeassistant/components/profiler/icons.json b/homeassistant/components/profiler/icons.json index 4dda003c186..c1f996b6eb1 100644 --- a/homeassistant/components/profiler/icons.json +++ b/homeassistant/components/profiler/icons.json @@ -1,16 +1,40 @@ { "services": { - "start": "mdi:play", - "memory": "mdi:memory", - "start_log_objects": "mdi:invoice-text-plus", - "stop_log_objects": "mdi:invoice-text-remove", - "dump_log_objects": "mdi:invoice-export-outline", - "start_log_object_sources": "mdi:play", - "stop_log_object_sources": "mdi:stop", - "lru_stats": "mdi:chart-areaspline", - "log_current_tasks": "mdi:format-list-bulleted", - "log_thread_frames": "mdi:format-list-bulleted", - "log_event_loop_scheduled": "mdi:calendar-clock", - "set_asyncio_debug": "mdi:bug-check" + "start": { + "service": "mdi:play" + }, + "memory": { + "service": "mdi:memory" + }, + "start_log_objects": { + "service": "mdi:invoice-text-plus" + }, + "stop_log_objects": { + "service": "mdi:invoice-text-remove" + }, + "dump_log_objects": { + "service": "mdi:invoice-export-outline" + }, + "start_log_object_sources": { + "service": "mdi:play" + }, + "stop_log_object_sources": { + "service": "mdi:stop" + }, + "lru_stats": { + "service": "mdi:chart-areaspline" + }, + "log_current_tasks": { + "service": "mdi:format-list-bulleted" + }, + "log_thread_frames": { + "service": "mdi:format-list-bulleted" + }, + "log_event_loop_scheduled": { + "service": "mdi:calendar-clock" + }, + "set_asyncio_debug": { + "service": "mdi:bug-check" + } } } diff --git a/homeassistant/components/profiler/manifest.json b/homeassistant/components/profiler/manifest.json index ceaab458e69..8d2814c8c7f 100644 --- a/homeassistant/components/profiler/manifest.json +++ b/homeassistant/components/profiler/manifest.json @@ -7,7 +7,8 @@ "quality_scale": "internal", "requirements": [ "pyprof2calltree==1.4.5", - "guppy3==3.1.4.post1", + "guppy3==3.1.4.post1;python_version<'3.13'", "objgraph==3.5.0" - ] + ], + "single_config_entry": true } diff --git a/homeassistant/components/profiler/strings.json b/homeassistant/components/profiler/strings.json index 7a31c567040..f363b5a22cb 100644 --- a/homeassistant/components/profiler/strings.json +++ b/homeassistant/components/profiler/strings.json @@ -4,9 +4,6 @@ "user": { "description": "[%key:common::config_flow::description::confirm_setup%]" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "services": { diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index 95596b940a4..2202678da9b 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,6 +1,6 @@ """Config flow for ProgettiHWSW Automation integration.""" -from typing import Any +from typing import TYPE_CHECKING, Any from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI import voluptuous as vol @@ -42,9 +42,13 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize class variables.""" self.s1_in: dict[str, Any] | None = None - async def async_step_relay_modes(self, user_input=None): + async def async_step_relay_modes( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Manage relay modes step.""" - errors = {} + errors: dict[str, str] = {} + if TYPE_CHECKING: + assert self.s1_in is not None if user_input is not None: whole_data = user_input whole_data.update(self.s1_in) diff --git a/homeassistant/components/proliphix/climate.py b/homeassistant/components/proliphix/climate.py index 18b974800a3..be7d394993a 100644 --- a/homeassistant/components/proliphix/climate.py +++ b/homeassistant/components/proliphix/climate.py @@ -61,7 +61,6 @@ class ProliphixThermostat(ClimateEntity): _attr_precision = PRECISION_TENTHS _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT - _enable_turn_on_off_backwards_compatibility = False def __init__(self, pdp): """Initialize the thermostat.""" diff --git a/homeassistant/components/proliphix/manifest.json b/homeassistant/components/proliphix/manifest.json index 2b01d5deb46..9cf0b9b0950 100644 --- a/homeassistant/components/proliphix/manifest.json +++ b/homeassistant/components/proliphix/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/proliphix", "iot_class": "local_polling", "loggers": ["proliphix"], + "quality_scale": "legacy", "requirements": ["proliphix==0.4.1"] } diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index 8cc0a8f4b6a..c243bf90dc0 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -14,6 +14,7 @@ from prometheus_client.metrics import MetricWrapperBase import voluptuous as vol from homeassistant import core as hacore +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, @@ -51,16 +52,6 @@ from homeassistant.const import ( CONTENT_TYPE_TEXT_PLAIN, EVENT_STATE_CHANGED, PERCENTAGE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_CLOSING, STATE_ON, @@ -85,6 +76,8 @@ from homeassistant.util.unit_conversion import TemperatureConverter _LOGGER = logging.getLogger(__name__) API_ENDPOINT = "/api/prometheus" +IGNORED_STATES = frozenset({STATE_UNAVAILABLE, STATE_UNKNOWN}) + DOMAIN = "prometheus" CONF_FILTER = "filter" @@ -98,6 +91,7 @@ CONF_OVERRIDE_METRIC = "override_metric" COMPONENT_CONFIG_SCHEMA_ENTRY = vol.Schema( {vol.Optional(CONF_OVERRIDE_METRIC): cv.string} ) +ALLOWED_METRIC_CHARS = set(string.ascii_letters + string.digits + "_:") DEFAULT_NAMESPACE = "homeassistant" @@ -219,14 +213,6 @@ class PrometheusMetrics: """Add/update a state in Prometheus.""" entity_id = state.entity_id _LOGGER.debug("Handling state update for %s", entity_id) - domain, _ = hacore.split_entity_id(entity_id) - - ignored_states = (STATE_UNAVAILABLE, STATE_UNKNOWN) - - handler = f"_handle_{domain}" - - if hasattr(self, handler) and state.state not in ignored_states: - getattr(self, handler)(state) labels = self._labels(state) state_change = self._metric( @@ -239,7 +225,7 @@ class PrometheusMetrics: prometheus_client.Gauge, "Entity is available (not in the unavailable or unknown state)", ) - entity_available.labels(**labels).set(float(state.state not in ignored_states)) + entity_available.labels(**labels).set(float(state.state not in IGNORED_STATES)) last_updated_time_seconds = self._metric( "last_updated_time_seconds", @@ -248,6 +234,18 @@ class PrometheusMetrics: ) last_updated_time_seconds.labels(**labels).set(state.last_updated.timestamp()) + if state.state in IGNORED_STATES: + self._remove_labelsets( + entity_id, + None, + {state_change, entity_available, last_updated_time_seconds}, + ) + else: + domain, _ = hacore.split_entity_id(entity_id) + handler = f"_handle_{domain}" + if hasattr(self, handler) and state.state: + getattr(self, handler)(state) + def handle_entity_registry_updated( self, event: Event[EventEntityRegistryUpdatedData] ) -> None: @@ -274,10 +272,17 @@ class PrometheusMetrics: self._remove_labelsets(metrics_entity_id) def _remove_labelsets( - self, entity_id: str, friendly_name: str | None = None + self, + entity_id: str, + friendly_name: str | None = None, + ignored_metrics: set[MetricWrapperBase] | None = None, ) -> None: - """Remove labelsets matching the given entity id from all metrics.""" + """Remove labelsets matching the given entity id from all non-ignored metrics.""" + if ignored_metrics is None: + ignored_metrics = set() for metric in list(self._metrics.values()): + if metric in ignored_metrics: + continue for sample in cast(list[prometheus_client.Metric], metric.collect())[ 0 ].samples: @@ -334,17 +339,12 @@ class PrometheusMetrics: @staticmethod def _sanitize_metric_name(metric: str) -> str: return "".join( - [ - c - if c in string.ascii_letters + string.digits + "_:" - else f"u{hex(ord(c))}" - for c in metric - ] + [c if c in ALLOWED_METRIC_CHARS else f"u{hex(ord(c))}" for c in metric] ) @staticmethod - def state_as_number(state: State) -> float: - """Return a state casted to a float.""" + def state_as_number(state: State) -> float | None: + """Return state as a float, or None if state cannot be converted.""" try: if state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP: value = as_timestamp(state.state) @@ -352,7 +352,7 @@ class PrometheusMetrics: value = state_helper.state_as_number(state) except ValueError: _LOGGER.debug("Could not convert %s to float", state) - value = 0 + value = None return value @staticmethod @@ -382,8 +382,8 @@ class PrometheusMetrics: prometheus_client.Gauge, "State of the binary sensor (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_input_boolean(self, state: State) -> None: metric = self._metric( @@ -391,8 +391,8 @@ class PrometheusMetrics: prometheus_client.Gauge, "State of the input boolean (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _numeric_handler(self, state: State, domain: str, title: str) -> None: if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)): @@ -408,8 +408,7 @@ class PrometheusMetrics: f"State of the {title}", ) - with suppress(ValueError): - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: if ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.FAHRENHEIT @@ -431,15 +430,15 @@ class PrometheusMetrics: prometheus_client.Gauge, "State of the device tracker (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_person(self, state: State) -> None: metric = self._metric( "person_state", prometheus_client.Gauge, "State of the person (0/1)" ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_cover(self, state: State) -> None: metric = self._metric( @@ -480,23 +479,19 @@ class PrometheusMetrics: "Light brightness percentage (0..100)", ) - try: + if (value := self.state_as_number(state)) is not None: brightness = state.attributes.get(ATTR_BRIGHTNESS) if state.state == STATE_ON and brightness is not None: - value = brightness / 255.0 - else: - value = self.state_as_number(state) + value = float(brightness) / 255.0 value = value * 100 metric.labels(**self._labels(state)).set(value) - except ValueError: - pass def _handle_lock(self, state: State) -> None: metric = self._metric( "lock_state", prometheus_client.Gauge, "State of the lock (0/1)" ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_climate_temp( self, state: State, attr: str, metric_name: str, metric_description: str @@ -608,11 +603,8 @@ class PrometheusMetrics: prometheus_client.Gauge, "State of the humidifier (0/1)", ) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: metric.labels(**self._labels(state)).set(value) - except ValueError: - pass current_mode = state.attributes.get(ATTR_MODE) available_modes = state.attributes.get(ATTR_AVAILABLE_MODES) @@ -643,8 +635,7 @@ class PrometheusMetrics: _metric = self._metric(metric, prometheus_client.Gauge, documentation) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: if ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.FAHRENHEIT @@ -653,8 +644,6 @@ class PrometheusMetrics: value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS ) _metric.labels(**self._labels(state)).set(value) - except ValueError: - pass self._battery(state) @@ -687,20 +676,15 @@ class PrometheusMetrics: def _sensor_override_component_metric( self, state: State, unit: str | None ) -> str | None: - """Get metric from override in component confioguration.""" + """Get metric from override in component configuration.""" return self._component_config.get(state.entity_id).get(CONF_OVERRIDE_METRIC) @staticmethod def _sensor_fallback_metric(state: State, unit: str | None) -> str | None: """Get metric from fallback logic for compatibility.""" - if unit in (None, ""): - try: - state_helper.state_as_number(state) - except ValueError: - _LOGGER.debug("Unsupported sensor: %s", state.entity_id) - return None - return "sensor_state" - return f"sensor_unit_{unit}" + if unit not in (None, ""): + return f"sensor_unit_{unit}" + return "sensor_state" @staticmethod def _unit_string(unit: str | None) -> str | None: @@ -722,11 +706,8 @@ class PrometheusMetrics: "switch_state", prometheus_client.Gauge, "State of the switch (0/1)" ) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: metric.labels(**self._labels(state)).set(value) - except ValueError: - pass self._handle_attributes(state) @@ -735,11 +716,8 @@ class PrometheusMetrics: "fan_state", prometheus_client.Gauge, "State of the fan (0/1)" ) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: metric.labels(**self._labels(state)).set(value) - except ValueError: - pass fan_speed_percent = state.attributes.get(ATTR_PERCENTAGE) if fan_speed_percent is not None: @@ -805,8 +783,8 @@ class PrometheusMetrics: prometheus_client.Gauge, "Value of counter entities", ) - - metric.labels(**self._labels(state)).set(self.state_as_number(state)) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_update(self, state: State) -> None: metric = self._metric( @@ -814,8 +792,8 @@ class PrometheusMetrics: prometheus_client.Gauge, "Update state, indicating if an update is available (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_alarm_control_panel(self, state: State) -> None: current_state = state.state @@ -828,22 +806,9 @@ class PrometheusMetrics: ["state"], ) - alarm_states = [ - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - STATE_ALARM_PENDING, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMING, - ] - - for alarm_state in alarm_states: - metric.labels(**dict(self._labels(state), state=alarm_state)).set( - float(alarm_state == current_state) + for alarm_state in AlarmControlPanelState: + metric.labels(**dict(self._labels(state), state=alarm_state.value)).set( + float(alarm_state.value == current_state) ) diff --git a/homeassistant/components/prometheus/manifest.json b/homeassistant/components/prometheus/manifest.json index cb8defb2ed5..e747226074c 100644 --- a/homeassistant/components/prometheus/manifest.json +++ b/homeassistant/components/prometheus/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/prometheus", "iot_class": "assumed_state", "loggers": ["prometheus_client"], - "requirements": ["prometheus-client==0.17.1"] + "quality_scale": "legacy", + "requirements": ["prometheus-client==0.21.0"] } diff --git a/homeassistant/components/prosegur/alarm_control_panel.py b/homeassistant/components/prosegur/alarm_control_panel.py index ffedcf30770..1c58b64cf55 100644 --- a/homeassistant/components/prosegur/alarm_control_panel.py +++ b/homeassistant/components/prosegur/alarm_control_panel.py @@ -10,13 +10,9 @@ from pyprosegur.installation import Installation, Status from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -26,10 +22,10 @@ from . import DOMAIN _LOGGER = logging.getLogger(__name__) STATE_MAPPING = { - Status.DISARMED: STATE_ALARM_DISARMED, - Status.ARMED: STATE_ALARM_ARMED_AWAY, - Status.PARTIALLY: STATE_ALARM_ARMED_HOME, - Status.ERROR_PARTIALLY: STATE_ALARM_ARMED_HOME, + Status.DISARMED: AlarmControlPanelState.DISARMED, + Status.ARMED: AlarmControlPanelState.ARMED_AWAY, + Status.PARTIALLY: AlarmControlPanelState.ARMED_HOME, + Status.ERROR_PARTIALLY: AlarmControlPanelState.ARMED_HOME, } @@ -82,7 +78,7 @@ class ProsegurAlarm(AlarmControlPanelEntity): self._attr_available = False return - self._attr_state = STATE_MAPPING.get(self._installation.status) + self._attr_alarm_state = STATE_MAPPING.get(self._installation.status) self._attr_available = True async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/prosegur/config_flow.py b/homeassistant/components/prosegur/config_flow.py index 7a8f67cef7d..74e4d268144 100644 --- a/homeassistant/components/prosegur/config_flow.py +++ b/homeassistant/components/prosegur/config_flow.py @@ -2,13 +2,13 @@ from collections.abc import Mapping import logging -from typing import Any, cast +from typing import Any from pyprosegur.auth import COUNTRY, Auth from pyprosegur.installation import Installation import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -46,7 +46,6 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Prosegur Alarm.""" VERSION = 1 - entry: ConfigEntry auth: Auth user_input: dict contracts: list[dict[str, str]] @@ -110,19 +109,18 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Prosegur.""" - self.entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle re-authentication with Prosegur.""" - errors = {} + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input: try: - user_input[CONF_COUNTRY] = self.entry.data[CONF_COUNTRY] + user_input[CONF_COUNTRY] = reauth_entry.data[CONF_COUNTRY] self.auth, self.contracts = await validate_input(self.hass, user_input) except CannotConnect: @@ -133,25 +131,20 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( { vol.Required( - CONF_USERNAME, default=self.entry.data[CONF_USERNAME] + CONF_USERNAME, default=reauth_entry.data[CONF_USERNAME] ): str, vol.Required(CONF_PASSWORD): str, } diff --git a/homeassistant/components/prosegur/icons.json b/homeassistant/components/prosegur/icons.json index 33cddefdaea..8f175ab9056 100644 --- a/homeassistant/components/prosegur/icons.json +++ b/homeassistant/components/prosegur/icons.json @@ -1,5 +1,7 @@ { "services": { - "request_image": "mdi:image-sync" + "request_image": { + "service": "mdi:image-sync" + } } } diff --git a/homeassistant/components/prowl/manifest.json b/homeassistant/components/prowl/manifest.json index 50decb3f046..049d95fb94c 100644 --- a/homeassistant/components/prowl/manifest.json +++ b/homeassistant/components/prowl/manifest.json @@ -3,5 +3,6 @@ "name": "Prowl", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/prowl", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/proximity/config_flow.py b/homeassistant/components/proximity/config_flow.py index d133b14cb6a..5818ec2979b 100644 --- a/homeassistant/components/proximity/config_flow.py +++ b/homeassistant/components/proximity/config_flow.py @@ -89,7 +89,7 @@ class ProximityConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return ProximityOptionsFlow(config_entry) + return ProximityOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -117,20 +117,10 @@ class ProximityConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=self._user_form_schema(user_input), ) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Import a yaml config entry.""" - return await self.async_step_user(user_input) - class ProximityOptionsFlow(OptionsFlow): """Handle a option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - def _user_form_schema(self, user_input: dict[str, Any]) -> vol.Schema: return vol.Schema(_base_schema(user_input)) diff --git a/homeassistant/components/proxmoxve/manifest.json b/homeassistant/components/proxmoxve/manifest.json index 8cf3bc7932d..45ead1330e2 100644 --- a/homeassistant/components/proxmoxve/manifest.json +++ b/homeassistant/components/proxmoxve/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/proxmoxve", "iot_class": "local_polling", "loggers": ["proxmoxer"], + "quality_scale": "legacy", "requirements": ["proxmoxer==2.0.1"] } diff --git a/homeassistant/components/proxy/manifest.json b/homeassistant/components/proxy/manifest.json index 1e70c4d3e10..e73eddf3cdd 100644 --- a/homeassistant/components/proxy/manifest.json +++ b/homeassistant/components/proxy/manifest.json @@ -3,5 +3,6 @@ "name": "Camera Proxy", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/proxy", - "requirements": ["Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/prusalink/__init__.py b/homeassistant/components/prusalink/__init__.py index 62eeb91d3e1..1415e3dd0a6 100644 --- a/homeassistant/components/prusalink/__init__.py +++ b/homeassistant/components/prusalink/__init__.py @@ -16,9 +16,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .config_flow import ConfigFlow from .const import DOMAIN @@ -26,7 +24,6 @@ from .coordinator import ( InfoUpdateCoordinator, JobUpdateCoordinator, LegacyStatusCoordinator, - PrusaLinkUpdateCoordinator, StatusCoordinator, ) @@ -128,19 +125,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class PrusaLinkEntity(CoordinatorEntity[PrusaLinkUpdateCoordinator]): - """Defines a base PrusaLink entity.""" - - _attr_has_entity_name = True - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this PrusaLink device.""" - return DeviceInfo( - identifiers={(DOMAIN, self.coordinator.config_entry.entry_id)}, - name=self.coordinator.config_entry.title, - manufacturer="Prusa", - configuration_url=self.coordinator.api.client.host, - ) diff --git a/homeassistant/components/prusalink/binary_sensor.py b/homeassistant/components/prusalink/binary_sensor.py index abeb79c2876..d40ac8a4cfa 100644 --- a/homeassistant/components/prusalink/binary_sensor.py +++ b/homeassistant/components/prusalink/binary_sensor.py @@ -17,9 +17,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import PrusaLinkEntity from .const import DOMAIN from .coordinator import PrusaLinkUpdateCoordinator +from .entity import PrusaLinkEntity T = TypeVar("T", PrinterStatus, LegacyPrinterStatus, JobInfo, PrinterInfo) diff --git a/homeassistant/components/prusalink/button.py b/homeassistant/components/prusalink/button.py index 0ad7e531d46..06d356b2ca6 100644 --- a/homeassistant/components/prusalink/button.py +++ b/homeassistant/components/prusalink/button.py @@ -15,9 +15,9 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import PrusaLinkEntity from .const import DOMAIN from .coordinator import PrusaLinkUpdateCoordinator +from .entity import PrusaLinkEntity T = TypeVar("T", PrinterStatus, LegacyPrinterStatus, JobInfo) diff --git a/homeassistant/components/prusalink/camera.py b/homeassistant/components/prusalink/camera.py index 2185c5f3cf6..eee655447cc 100644 --- a/homeassistant/components/prusalink/camera.py +++ b/homeassistant/components/prusalink/camera.py @@ -9,9 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import PrusaLinkEntity from .const import DOMAIN from .coordinator import JobUpdateCoordinator +from .entity import PrusaLinkEntity async def async_setup_entry( diff --git a/homeassistant/components/prusalink/entity.py b/homeassistant/components/prusalink/entity.py new file mode 100644 index 00000000000..e0bc62ba3c0 --- /dev/null +++ b/homeassistant/components/prusalink/entity.py @@ -0,0 +1,25 @@ +"""The PrusaLink integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PrusaLinkUpdateCoordinator + + +class PrusaLinkEntity(CoordinatorEntity[PrusaLinkUpdateCoordinator]): + """Defines a base PrusaLink entity.""" + + _attr_has_entity_name = True + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this PrusaLink device.""" + return DeviceInfo( + identifiers={(DOMAIN, self.coordinator.config_entry.entry_id)}, + name=self.coordinator.config_entry.title, + manufacturer="Prusa", + configuration_url=self.coordinator.api.client.host, + ) diff --git a/homeassistant/components/prusalink/manifest.json b/homeassistant/components/prusalink/manifest.json index 6c64419debb..c41b55bd5ab 100644 --- a/homeassistant/components/prusalink/manifest.json +++ b/homeassistant/components/prusalink/manifest.json @@ -1,7 +1,7 @@ { "domain": "prusalink", "name": "PrusaLink", - "codeowners": ["@balloob", "@Skaronator"], + "codeowners": ["@balloob"], "config_flow": true, "dhcp": [ { diff --git a/homeassistant/components/prusalink/sensor.py b/homeassistant/components/prusalink/sensor.py index 96cd4979b11..0c746adbe2e 100644 --- a/homeassistant/components/prusalink/sensor.py +++ b/homeassistant/components/prusalink/sensor.py @@ -29,9 +29,9 @@ from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utcnow from homeassistant.util.variance import ignore_variance -from . import PrusaLinkEntity from .const import DOMAIN from .coordinator import PrusaLinkUpdateCoordinator +from .entity import PrusaLinkEntity T = TypeVar("T", PrinterStatus, LegacyPrinterStatus, JobInfo, PrinterInfo) diff --git a/homeassistant/components/ps4/__init__.py b/homeassistant/components/ps4/__init__.py index 3e92861b963..0ada2885fa7 100644 --- a/homeassistant/components/ps4/__init__.py +++ b/homeassistant/components/ps4/__init__.py @@ -111,7 +111,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: device[CONF_REGION] = country version = 2 config_entries.async_update_entry(entry, data=data, version=2) - _LOGGER.info( + _LOGGER.debug( "PlayStation 4 Config Updated: Region changed to: %s", country, ) @@ -143,7 +143,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: config_entry=entry, device_id=e_entry.device_id, ) - _LOGGER.info( + _LOGGER.debug( "PlayStation 4 identifier for entity: %s has changed", entity_id, ) diff --git a/homeassistant/components/ps4/config_flow.py b/homeassistant/components/ps4/config_flow.py index cdbf02dcc90..877fb595fc0 100644 --- a/homeassistant/components/ps4/config_flow.py +++ b/homeassistant/components/ps4/config_flow.py @@ -48,13 +48,13 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" self.helper = Helper() - self.creds = None + self.creds: str | None = None self.name = None self.host = None self.region = None - self.pin = None + self.pin: str | None = None self.m_device = None - self.location = None + self.location: location.LocationInfo | None = None self.device_list: list[str] = [] async def async_step_user( @@ -69,7 +69,9 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason=reason) return await self.async_step_creds() - async def async_step_creds(self, user_input=None): + async def async_step_creds( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Return PS4 credentials from 2nd Screen App.""" errors = {} if user_input is not None: @@ -85,7 +87,9 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="creds", errors=errors) - async def async_step_mode(self, user_input=None): + async def async_step_mode( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Prompt for mode.""" errors = {} mode = [CONF_AUTO, CONF_MANUAL] @@ -100,7 +104,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): if not errors: return await self.async_step_link() - mode_schema = OrderedDict() + mode_schema = OrderedDict[vol.Marker, Any]() mode_schema[vol.Required(CONF_MODE, default=CONF_AUTO)] = vol.In(list(mode)) mode_schema[vol.Optional(CONF_IP_ADDRESS)] = str @@ -108,7 +112,9 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): step_id="mode", data_schema=vol.Schema(mode_schema), errors=errors ) - async def async_step_link(self, user_input=None): + async def async_step_link( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Prompt user input. Create or edit entry.""" regions = sorted(COUNTRIES.keys()) default_region = None @@ -193,7 +199,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): default_region = country # Show User Input form. - link_schema = OrderedDict() + link_schema = OrderedDict[vol.Marker, Any]() link_schema[vol.Required(CONF_IP_ADDRESS)] = vol.In(list(self.device_list)) link_schema[vol.Required(CONF_REGION, default=default_region)] = vol.In( list(regions) diff --git a/homeassistant/components/ps4/icons.json b/homeassistant/components/ps4/icons.json index 8da5909213b..21f8405f816 100644 --- a/homeassistant/components/ps4/icons.json +++ b/homeassistant/components/ps4/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "send_command": "mdi:console" + "send_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/ps4/media_player.py b/homeassistant/components/ps4/media_player.py index 77477ba7901..8db24beae20 100644 --- a/homeassistant/components/ps4/media_player.py +++ b/homeassistant/components/ps4/media_player.py @@ -96,11 +96,10 @@ class PS4Device(MediaPlayerEntity): self._retry = 0 self._disconnected = False - @callback def status_callback(self) -> None: """Handle status callback. Parse status.""" self._parse_status() - self.async_write_ha_state() + self.schedule_update_ha_state() @callback def subscribe_to_protocol(self) -> None: @@ -118,7 +117,7 @@ class PS4Device(MediaPlayerEntity): """Display logger msg if region is deprecated.""" # Non-Breaking although data returned may be inaccurate. if self._region in deprecated_regions: - _LOGGER.info( + _LOGGER.warning( """Region: %s has been deprecated. Please remove PS4 integration and Re-configure again to utilize @@ -157,7 +156,7 @@ class PS4Device(MediaPlayerEntity): self._ps4.ddp_protocol = self.hass.data[PS4_DATA].protocol self.subscribe_to_protocol() - self._parse_status() + await self.hass.async_add_executor_job(self._parse_status) def _parse_status(self) -> None: """Parse status.""" @@ -340,7 +339,7 @@ class PS4Device(MediaPlayerEntity): """Set device info for registry.""" # If cannot get status on startup, assume info from registry. if status is None: - _LOGGER.info("Assuming status from registry") + _LOGGER.debug("Assuming status from registry") e_registry = er.async_get(self.hass) d_registry = dr.async_get(self.hass) diff --git a/homeassistant/components/ps4/strings.json b/homeassistant/components/ps4/strings.json index 163f2cc9b94..6b1d4cd690b 100644 --- a/homeassistant/components/ps4/strings.json +++ b/homeassistant/components/ps4/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "creds": { - "description": "Credentials needed. Press 'Submit' and then in the PS4 2nd Screen App, refresh devices and select the 'Home-Assistant' device to continue." + "description": "Credentials needed. Select **Submit** and then in the PS4 2nd Screen App, refresh devices and select the **Home-Assistant** device to continue." }, "mode": { "data": { @@ -21,12 +21,12 @@ "ip_address": "[%key:common::config_flow::data::ip%]" }, "data_description": { - "code": "Navigate to 'Settings' on your PlayStation 4 console. Then navigate to 'Mobile App Connection Settings' and select 'Add Device' to get the pin." + "code": "On your PlayStation 4 console, go to **Settings**. Then, go to **Mobile App Connection Settings** and select **Add Device** to get the pin." } } }, "error": { - "credential_timeout": "Credential service timed out. Press submit to restart.", + "credential_timeout": "Credential service timed out. Select **Submit** to restart.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "login_failed": "Failed to pair to PlayStation 4. Verify PIN is correct.", "no_ipaddress": "Enter the IP address of the PlayStation 4 you would like to configure." diff --git a/homeassistant/components/pulseaudio_loopback/manifest.json b/homeassistant/components/pulseaudio_loopback/manifest.json index a67dc614c50..90666d18997 100644 --- a/homeassistant/components/pulseaudio_loopback/manifest.json +++ b/homeassistant/components/pulseaudio_loopback/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pulseaudio_loopback", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pulsectl==23.5.2"] } diff --git a/homeassistant/components/pure_energie/__init__.py b/homeassistant/components/pure_energie/__init__.py index 459dc5c055c..4de1ce02810 100644 --- a/homeassistant/components/pure_energie/__init__.py +++ b/homeassistant/components/pure_energie/__init__.py @@ -7,13 +7,14 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN from .coordinator import PureEnergieDataUpdateCoordinator -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type PureEnergieConfigEntry = ConfigEntry[PureEnergieDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: PureEnergieConfigEntry) -> bool: """Set up Pure Energie from a config entry.""" coordinator = PureEnergieDataUpdateCoordinator(hass) @@ -23,14 +24,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.gridnet.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: PureEnergieConfigEntry +) -> bool: """Unload Pure Energie config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - del hass.data[DOMAIN][entry.entry_id] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/pure_energie/diagnostics.py b/homeassistant/components/pure_energie/diagnostics.py index 6e2b8ee7a35..de9134129ed 100644 --- a/homeassistant/components/pure_energie/diagnostics.py +++ b/homeassistant/components/pure_energie/diagnostics.py @@ -6,12 +6,10 @@ from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import PureEnergieDataUpdateCoordinator +from . import PureEnergieConfigEntry TO_REDACT = { CONF_HOST, @@ -20,18 +18,18 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: PureEnergieConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: PureEnergieDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - return { "entry": { "title": entry.title, "data": async_redact_data(entry.data, TO_REDACT), }, "data": { - "device": async_redact_data(asdict(coordinator.data.device), TO_REDACT), - "smartbridge": asdict(coordinator.data.smartbridge), + "device": async_redact_data( + asdict(entry.runtime_data.data.device), TO_REDACT + ), + "smartbridge": asdict(entry.runtime_data.data.smartbridge), }, } diff --git a/homeassistant/components/pure_energie/manifest.json b/homeassistant/components/pure_energie/manifest.json index ff52ec0ecf9..9efb1734f84 100644 --- a/homeassistant/components/pure_energie/manifest.json +++ b/homeassistant/components/pure_energie/manifest.json @@ -5,7 +5,6 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/pure_energie", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["gridnet==5.0.1"], "zeroconf": [ { diff --git a/homeassistant/components/pure_energie/sensor.py b/homeassistant/components/pure_energie/sensor.py index 85f4672a618..468858f117f 100644 --- a/homeassistant/components/pure_energie/sensor.py +++ b/homeassistant/components/pure_energie/sensor.py @@ -12,13 +12,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import PureEnergieConfigEntry from .const import DOMAIN from .coordinator import PureEnergieData, PureEnergieDataUpdateCoordinator @@ -59,12 +59,13 @@ SENSORS: tuple[PureEnergieSensorEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PureEnergieConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Pure Energie Sensors based on a config entry.""" async_add_entities( PureEnergieSensorEntity( - coordinator=hass.data[DOMAIN][entry.entry_id], description=description, entry=entry, ) @@ -83,21 +84,22 @@ class PureEnergieSensorEntity( def __init__( self, *, - coordinator: PureEnergieDataUpdateCoordinator, description: PureEnergieSensorEntityDescription, - entry: ConfigEntry, + entry: PureEnergieConfigEntry, ) -> None: """Initialize Pure Energie sensor.""" - super().__init__(coordinator=coordinator) + super().__init__(coordinator=entry.runtime_data) self.entity_id = f"{SENSOR_DOMAIN}.pem_{description.key}" self.entity_description = description - self._attr_unique_id = f"{coordinator.data.device.n2g_id}_{description.key}" + self._attr_unique_id = ( + f"{entry.runtime_data.data.device.n2g_id}_{description.key}" + ) self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.data.device.n2g_id)}, - configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", - sw_version=coordinator.data.device.firmware, - manufacturer=coordinator.data.device.manufacturer, - model=coordinator.data.device.model, + identifiers={(DOMAIN, entry.runtime_data.data.device.n2g_id)}, + configuration_url=f"http://{entry.runtime_data.config_entry.data[CONF_HOST]}", + sw_version=entry.runtime_data.data.device.firmware, + manufacturer=entry.runtime_data.data.device.manufacturer, + model=entry.runtime_data.data.device.model, name=entry.title, ) diff --git a/homeassistant/components/purpleair/__init__.py b/homeassistant/components/purpleair/__init__.py index fb86612597a..2d4022946b2 100644 --- a/homeassistant/components/purpleair/__init__.py +++ b/homeassistant/components/purpleair/__init__.py @@ -2,21 +2,9 @@ from __future__ import annotations -from collections.abc import Mapping -from typing import Any - -from aiopurpleair.models.sensors import SensorModel - from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_LATITUDE, - ATTR_LONGITUDE, - CONF_SHOW_ON_MAP, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PurpleAirDataUpdateCoordinator @@ -48,53 +36,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class PurpleAirEntity(CoordinatorEntity[PurpleAirDataUpdateCoordinator]): - """Define a base PurpleAir entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: PurpleAirDataUpdateCoordinator, - entry: ConfigEntry, - sensor_index: int, - ) -> None: - """Initialize.""" - super().__init__(coordinator) - - self._sensor_index = sensor_index - - self._attr_device_info = DeviceInfo( - configuration_url=self.coordinator.async_get_map_url(sensor_index), - hw_version=self.sensor_data.hardware, - identifiers={(DOMAIN, str(sensor_index))}, - manufacturer="PurpleAir, Inc.", - model=self.sensor_data.model, - name=self.sensor_data.name, - sw_version=self.sensor_data.firmware_version, - ) - self._entry = entry - - @property - def extra_state_attributes(self) -> Mapping[str, Any]: - """Return entity specific state attributes.""" - attrs = {} - - # Displaying the geography on the map relies upon putting the latitude/longitude - # in the entity attributes with "latitude" and "longitude" as the keys. - # Conversely, we can hide the location on the map by using other keys, like - # "lati" and "long": - if self._entry.options.get(CONF_SHOW_ON_MAP): - attrs[ATTR_LATITUDE] = self.sensor_data.latitude - attrs[ATTR_LONGITUDE] = self.sensor_data.longitude - else: - attrs["lati"] = self.sensor_data.latitude - attrs["long"] = self.sensor_data.longitude - return attrs - - @property - def sensor_data(self) -> SensorModel: - """Define a property to get this entity's SensorModel object.""" - return self.coordinator.data.data[self._sensor_index] diff --git a/homeassistant/components/purpleair/config_flow.py b/homeassistant/components/purpleair/config_flow.py index 050200f50d4..3ca7870b3cb 100644 --- a/homeassistant/components/purpleair/config_flow.py +++ b/homeassistant/components/purpleair/config_flow.py @@ -202,7 +202,6 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize.""" self._flow_data: dict[str, Any] = {} - self._reauth_entry: ConfigEntry | None = None @staticmethod @callback @@ -210,7 +209,7 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> PurpleAirOptionsFlowHandler: """Define the config flow to handle options.""" - return PurpleAirOptionsFlowHandler(config_entry) + return PurpleAirOptionsFlowHandler() async def async_step_by_coordinates( self, user_input: dict[str, Any] | None = None @@ -265,9 +264,6 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -289,15 +285,9 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): errors=validation.errors, ) - assert self._reauth_entry - - self.hass.config_entries.async_update_entry( - self._reauth_entry, data={CONF_API_KEY: api_key} + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={CONF_API_KEY: api_key} ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -325,10 +315,9 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): class PurpleAirOptionsFlowHandler(OptionsFlow): """Handle a PurpleAir options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize.""" self._flow_data: dict[str, Any] = {} - self.config_entry = config_entry @property def settings_schema(self) -> vol.Schema: diff --git a/homeassistant/components/purpleair/diagnostics.py b/homeassistant/components/purpleair/diagnostics.py index a3b3af857fb..f7c44b7e9b2 100644 --- a/homeassistant/components/purpleair/diagnostics.py +++ b/homeassistant/components/purpleair/diagnostics.py @@ -37,7 +37,7 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "data": coordinator.data.dict(), + "data": coordinator.data.model_dump(), }, TO_REDACT, ) diff --git a/homeassistant/components/purpleair/entity.py b/homeassistant/components/purpleair/entity.py new file mode 100644 index 00000000000..4f7be1874ed --- /dev/null +++ b/homeassistant/components/purpleair/entity.py @@ -0,0 +1,66 @@ +"""The PurpleAir integration.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any + +from aiopurpleair.models.sensors import SensorModel + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PurpleAirDataUpdateCoordinator + + +class PurpleAirEntity(CoordinatorEntity[PurpleAirDataUpdateCoordinator]): + """Define a base PurpleAir entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: PurpleAirDataUpdateCoordinator, + entry: ConfigEntry, + sensor_index: int, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._sensor_index = sensor_index + + self._attr_device_info = DeviceInfo( + configuration_url=self.coordinator.async_get_map_url(sensor_index), + hw_version=self.sensor_data.hardware, + identifiers={(DOMAIN, str(sensor_index))}, + manufacturer="PurpleAir, Inc.", + model=self.sensor_data.model, + name=self.sensor_data.name, + sw_version=self.sensor_data.firmware_version, + ) + self._entry = entry + + @property + def extra_state_attributes(self) -> Mapping[str, Any]: + """Return entity specific state attributes.""" + attrs = {} + + # Displaying the geography on the map relies upon putting the latitude/longitude + # in the entity attributes with "latitude" and "longitude" as the keys. + # Conversely, we can hide the location on the map by using other keys, like + # "lati" and "long": + if self._entry.options.get(CONF_SHOW_ON_MAP): + attrs[ATTR_LATITUDE] = self.sensor_data.latitude + attrs[ATTR_LONGITUDE] = self.sensor_data.longitude + else: + attrs["lati"] = self.sensor_data.latitude + attrs["long"] = self.sensor_data.longitude + return attrs + + @property + def sensor_data(self) -> SensorModel: + """Define a property to get this entity's SensorModel object.""" + return self.coordinator.data.data[self._sensor_index] diff --git a/homeassistant/components/purpleair/manifest.json b/homeassistant/components/purpleair/manifest.json index cf74365d6d8..87cb375c347 100644 --- a/homeassistant/components/purpleair/manifest.json +++ b/homeassistant/components/purpleair/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/purpleair", "iot_class": "cloud_polling", - "requirements": ["aiopurpleair==2022.12.1"] + "requirements": ["aiopurpleair==2023.12.0"] } diff --git a/homeassistant/components/purpleair/sensor.py b/homeassistant/components/purpleair/sensor.py index d1db77c2c31..9fb0249a360 100644 --- a/homeassistant/components/purpleair/sensor.py +++ b/homeassistant/components/purpleair/sensor.py @@ -27,9 +27,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import PurpleAirEntity from .const import CONF_SENSOR_INDICES, DOMAIN from .coordinator import PurpleAirDataUpdateCoordinator +from .entity import PurpleAirEntity CONCENTRATION_PARTICLES_PER_100_MILLILITERS = f"particles/100{UnitOfVolume.MILLILITERS}" diff --git a/homeassistant/components/push/camera.py b/homeassistant/components/push/camera.py index eb51ba49aa2..37ac6144d0d 100644 --- a/homeassistant/components/push/camera.py +++ b/homeassistant/components/push/camera.py @@ -13,10 +13,10 @@ import voluptuous as vol from homeassistant.components import webhook from homeassistant.components.camera import ( - DOMAIN, + DOMAIN as CAMERA_DOMAIN, PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, - STATE_IDLE, Camera, + CameraState, ) from homeassistant.const import CONF_NAME, CONF_TIMEOUT, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant, callback @@ -121,7 +121,7 @@ class PushCamera(Camera): try: webhook.async_register( - self.hass, DOMAIN, self.name, self.webhook_id, handle_webhook + self.hass, CAMERA_DOMAIN, self.name, self.webhook_id, handle_webhook ) except ValueError: _LOGGER.error( @@ -135,7 +135,7 @@ class PushCamera(Camera): async def update_image(self, image, filename): """Update the camera image.""" - if self.state == STATE_IDLE: + if self.state == CameraState.IDLE: self._attr_is_recording = True self._last_trip = dt_util.utcnow() self.queue.clear() @@ -165,7 +165,7 @@ class PushCamera(Camera): ) -> bytes | None: """Return a still image response.""" if self.queue: - if self.state == STATE_IDLE: + if self.state == CameraState.IDLE: self.queue.rotate(1) self._current_image = self.queue[0] diff --git a/homeassistant/components/push/manifest.json b/homeassistant/components/push/manifest.json index 900ac25edbf..81cb2dce00c 100644 --- a/homeassistant/components/push/manifest.json +++ b/homeassistant/components/push/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@dgomes"], "dependencies": ["webhook"], "documentation": "https://www.home-assistant.io/integrations/push", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pushbullet/notify.py b/homeassistant/components/pushbullet/notify.py index 96f78c4a35d..f2e70695b27 100644 --- a/homeassistant/components/pushbullet/notify.py +++ b/homeassistant/components/pushbullet/notify.py @@ -92,7 +92,7 @@ class PushBulletNotificationService(BaseNotificationService): # This also seems to work to send to all devices in own account. if ttype == "email": self._push_data(message, title, data, self.pushbullet, email=tname) - _LOGGER.info("Sent notification to email %s", tname) + _LOGGER.debug("Sent notification to email %s", tname) continue # Target is sms, send directly, don't use a target object. @@ -100,7 +100,7 @@ class PushBulletNotificationService(BaseNotificationService): self._push_data( message, title, data, self.pushbullet, phonenumber=tname ) - _LOGGER.info("Sent sms notification to %s", tname) + _LOGGER.debug("Sent sms notification to %s", tname) continue if ttype not in self.pbtargets: diff --git a/homeassistant/components/pushsafer/manifest.json b/homeassistant/components/pushsafer/manifest.json index e9018e2a2ba..8b4ec94b9a5 100644 --- a/homeassistant/components/pushsafer/manifest.json +++ b/homeassistant/components/pushsafer/manifest.json @@ -3,5 +3,6 @@ "name": "Pushsafer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pushsafer", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pvoutput/config_flow.py b/homeassistant/components/pvoutput/config_flow.py index 9d18952e7b4..ad2d759056f 100644 --- a/homeassistant/components/pvoutput/config_flow.py +++ b/homeassistant/components/pvoutput/config_flow.py @@ -8,7 +8,7 @@ from typing import Any from pvo import PVOutput, PVOutputAuthenticationError, PVOutputError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -33,7 +33,6 @@ class PVOutputFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 imported_name: str | None = None - reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -88,9 +87,6 @@ class PVOutputFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with PVOutput.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -99,29 +95,22 @@ class PVOutputFlowHandler(ConfigFlow, domain=DOMAIN): """Handle re-authentication with PVOutput.""" errors = {} - if user_input is not None and self.reauth_entry: + if user_input is not None: + reauth_entry = self._get_reauth_entry() try: await validate_input( self.hass, api_key=user_input[CONF_API_KEY], - system_id=self.reauth_entry.data[CONF_SYSTEM_ID], + system_id=reauth_entry.data[CONF_SYSTEM_ID], ) except PVOutputAuthenticationError: errors["base"] = "invalid_auth" except PVOutputError: errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, - CONF_API_KEY: user_input[CONF_API_KEY], - }, + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/pvoutput/manifest.json b/homeassistant/components/pvoutput/manifest.json index 61bd6fd6164..9dbdad53bcb 100644 --- a/homeassistant/components/pvoutput/manifest.json +++ b/homeassistant/components/pvoutput/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvoutput", "integration_type": "device", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["pvo==2.1.1"] + "requirements": ["pvo==2.2.0"] } diff --git a/homeassistant/components/pvpc_hourly_pricing/config_flow.py b/homeassistant/components/pvpc_hourly_pricing/config_flow.py index 239e1bcb0e9..3c6b510004a 100644 --- a/homeassistant/components/pvpc_hourly_pricing/config_flow.py +++ b/homeassistant/components/pvpc_hourly_pricing/config_flow.py @@ -9,10 +9,11 @@ from aiopvpc import DEFAULT_POWER_KW, PVPCData import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import callback @@ -48,7 +49,6 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): _use_api_token: bool = False _api_token: str | None = None _api: PVPCData | None = None - _reauth_entry: ConfigEntry | None = None @staticmethod @callback @@ -56,7 +56,7 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> PVPCOptionsFlowHandler: """Get the options flow for this handler.""" - return PVPCOptionsFlowHandler(config_entry) + return PVPCOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -141,12 +141,10 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): ATTR_POWER_P3: self._power_p3, CONF_API_TOKEN: self._api_token if self._use_api_token else None, } - if self._reauth_entry: - self.hass.config_entries.async_update_entry(self._reauth_entry, data=data) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - return self.async_abort(reason="reauth_successful") assert self._name is not None return self.async_create_entry(title=self._name, data=data) @@ -155,9 +153,6 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with ESIOS Token.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) self._api_token = entry_data.get(CONF_API_TOKEN) self._use_api_token = self._api_token is not None self._name = entry_data[CONF_NAME] @@ -183,7 +178,7 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="reauth_confirm", data_schema=data_schema) -class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): +class PVPCOptionsFlowHandler(OptionsFlow): """Handle PVPC options.""" _power: float | None = None @@ -204,7 +199,7 @@ class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): ) # Fill options with entry data - api_token = self.options.get( + api_token = self.config_entry.options.get( CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) ) return self.async_show_form( @@ -234,13 +229,11 @@ class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): ) # Fill options with entry data - power = self.options.get(ATTR_POWER, self.config_entry.data[ATTR_POWER]) - power_valley = self.options.get( - ATTR_POWER_P3, self.config_entry.data[ATTR_POWER_P3] - ) - api_token = self.options.get( - CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) - ) + options = self.config_entry.options + data = self.config_entry.data + power = options.get(ATTR_POWER, data[ATTR_POWER]) + power_valley = options.get(ATTR_POWER_P3, data[ATTR_POWER_P3]) + api_token = options.get(CONF_API_TOKEN, data.get(CONF_API_TOKEN)) use_api_token = api_token is not None schema = vol.Schema( { diff --git a/homeassistant/components/pvpc_hourly_pricing/manifest.json b/homeassistant/components/pvpc_hourly_pricing/manifest.json index 8db978135f6..ccddbece7e4 100644 --- a/homeassistant/components/pvpc_hourly_pricing/manifest.json +++ b/homeassistant/components/pvpc_hourly_pricing/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvpc_hourly_pricing", "iot_class": "cloud_polling", "loggers": ["aiopvpc"], - "quality_scale": "platinum", "requirements": ["aiopvpc==4.2.2"] } diff --git a/homeassistant/components/pyload/config_flow.py b/homeassistant/components/pyload/config_flow.py index 2f4f9519d30..3e6cbd33bb3 100644 --- a/homeassistant/components/pyload/config_flow.py +++ b/homeassistant/components/pyload/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import TYPE_CHECKING, Any +from typing import Any from aiohttp import CookieJar from pyloadapi.api import PyLoadAPI @@ -30,7 +30,6 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from . import PyLoadConfigEntry from .const import DEFAULT_HOST, DEFAULT_NAME, DEFAULT_PORT, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -101,7 +100,6 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for pyLoad.""" VERSION = 1 - config_entry: PyLoadConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -133,16 +131,16 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from yaml.""" config = { - CONF_NAME: import_info.get(CONF_NAME), - CONF_HOST: import_info.get(CONF_HOST, DEFAULT_HOST), - CONF_PASSWORD: import_info.get(CONF_PASSWORD, ""), - CONF_PORT: import_info.get(CONF_PORT, DEFAULT_PORT), - CONF_SSL: import_info.get(CONF_SSL, False), - CONF_USERNAME: import_info.get(CONF_USERNAME, ""), + CONF_NAME: import_data.get(CONF_NAME), + CONF_HOST: import_data.get(CONF_HOST, DEFAULT_HOST), + CONF_PASSWORD: import_data.get(CONF_PASSWORD, ""), + CONF_PORT: import_data.get(CONF_PORT, DEFAULT_PORT), + CONF_SSL: import_data.get(CONF_SSL, False), + CONF_USERNAME: import_data.get(CONF_USERNAME, ""), CONF_VERIFY_SSL: False, } @@ -156,9 +154,6 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -166,12 +161,10 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors = {} - - if TYPE_CHECKING: - assert self.config_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: - new_input = self.config_entry.data | user_input + new_input = reauth_entry.data | user_input try: await validate_input(self.hass, new_input) except (CannotConnect, ParserError): @@ -182,9 +175,7 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - self.config_entry, data=new_input - ) + return self.async_update_reload_and_abort(reauth_entry, data=new_input) return self.async_show_form( step_id="reauth_confirm", @@ -193,30 +184,19 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): { CONF_USERNAME: user_input[CONF_USERNAME] if user_input is not None - else self.config_entry.data[CONF_USERNAME] + else reauth_entry.data[CONF_USERNAME] }, ), - description_placeholders={CONF_NAME: self.config_entry.data[CONF_USERNAME]}, + description_placeholders={CONF_NAME: reauth_entry.data[CONF_USERNAME]}, errors=errors, ) async def async_step_reconfigure( - self, entry_data: Mapping[str, Any] - ) -> ConfigFlowResult: - """Perform a reconfiguration.""" - self.config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the reconfiguration flow.""" errors = {} - - if TYPE_CHECKING: - assert self.config_entry + reconfig_entry = self._get_reconfigure_entry() if user_input is not None: try: @@ -230,18 +210,17 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" else: return self.async_update_reload_and_abort( - self.config_entry, + reconfig_entry, data=user_input, reload_even_if_entry_is_unchanged=False, - reason="reconfigure_successful", ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( STEP_USER_DATA_SCHEMA, - user_input or self.config_entry.data, + user_input or reconfig_entry.data, ), - description_placeholders={CONF_NAME: self.config_entry.data[CONF_USERNAME]}, + description_placeholders={CONF_NAME: reconfig_entry.data[CONF_USERNAME]}, errors=errors, ) diff --git a/homeassistant/components/pyload/manifest.json b/homeassistant/components/pyload/manifest.json index 788cdd1eb05..e21167cf10b 100644 --- a/homeassistant/components/pyload/manifest.json +++ b/homeassistant/components/pyload/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "local_polling", "loggers": ["pyloadapi"], - "quality_scale": "platinum", "requirements": ["PyLoadAPI==1.3.2"] } diff --git a/homeassistant/components/pyload/strings.json b/homeassistant/components/pyload/strings.json index bbe6989f5e7..4ae4c4fee67 100644 --- a/homeassistant/components/pyload/strings.json +++ b/homeassistant/components/pyload/strings.json @@ -15,7 +15,7 @@ "port": "pyLoad uses port 8000 by default." } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index 70e9c5b0d29..af773278029 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -1,5 +1,6 @@ """Component to allow running Python scripts.""" +from collections.abc import Mapping, Sequence import datetime import glob import logging @@ -7,6 +8,7 @@ from numbers import Number import operator import os import time +import types from typing import Any from RestrictedPython import ( @@ -167,6 +169,20 @@ IOPERATOR_TO_OPERATOR = { } +def guarded_import( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = (), + level: int = 0, +) -> types.ModuleType: + """Guard imports.""" + # Allow import of _strptime needed by datetime.datetime.strptime + if name == "_strptime": + return __import__(name, globals, locals, fromlist, level) + raise ScriptError(f"Not allowed to import {name}") + + def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any: """Implement augmented-assign (+=, -=, etc.) operators for restricted code. @@ -232,6 +248,7 @@ def execute(hass, filename, source, data=None, return_response=False): return getattr(obj, name, default) extra_builtins = { + "__import__": guarded_import, "datetime": datetime, "sorted": sorted, "time": TimeWrapper(), diff --git a/homeassistant/components/python_script/icons.json b/homeassistant/components/python_script/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/python_script/icons.json +++ b/homeassistant/components/python_script/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/python_script/manifest.json b/homeassistant/components/python_script/manifest.json index dcc0e38c737..4348fdd9911 100644 --- a/homeassistant/components/python_script/manifest.json +++ b/homeassistant/components/python_script/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/python_script", "loggers": ["RestrictedPython"], "quality_scale": "internal", - "requirements": ["RestrictedPython==7.0"] + "requirements": ["RestrictedPython==7.4"] } diff --git a/homeassistant/components/qbittorrent/icons.json b/homeassistant/components/qbittorrent/icons.json index 68fc1020dae..cede127ebe8 100644 --- a/homeassistant/components/qbittorrent/icons.json +++ b/homeassistant/components/qbittorrent/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "get_torrents": "mdi:file-arrow-up-down-outline", - "get_all_torrents": "mdi:file-arrow-up-down-outline" + "get_torrents": { + "service": "mdi:file-arrow-up-down-outline" + }, + "get_all_torrents": { + "service": "mdi:file-arrow-up-down-outline" + } } } diff --git a/homeassistant/components/qbittorrent/sensor.py b/homeassistant/components/qbittorrent/sensor.py index cd65fb766e4..67eb856bb83 100644 --- a/homeassistant/components/qbittorrent/sensor.py +++ b/homeassistant/components/qbittorrent/sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_IDLE, UnitOfDataRate @@ -79,6 +80,7 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_DOWNLOAD_SPEED, translation_key="download_speed", + state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.DATA_RATE, native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, suggested_display_precision=2, @@ -88,6 +90,7 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_UPLOAD_SPEED, translation_key="upload_speed", + state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.DATA_RATE, native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, suggested_display_precision=2, @@ -97,13 +100,11 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_ALL_TORRENTS, translation_key="all_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states(coordinator, []), ), QBittorrentSensorEntityDescription( key=SENSOR_TYPE_ACTIVE_TORRENTS, translation_key="active_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["downloading", "uploading"] ), @@ -111,7 +112,6 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_INACTIVE_TORRENTS, translation_key="inactive_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["stalledDL", "stalledUP"] ), @@ -119,7 +119,6 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_PAUSED_TORRENTS, translation_key="paused_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["pausedDL", "pausedUP"] ), @@ -177,8 +176,12 @@ def count_torrents_in_states( # When torrents are not in the returned data, there are none, return 0. try: torrents = cast(Mapping[str, Mapping], coordinator.data.get("torrents")) + if torrents is None: + return 0 + if not states: return len(torrents) + return len( [torrent for torrent in torrents.values() if torrent.get("state") in states] ) diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index 88015dad5c3..9c9ee371737 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -36,16 +36,20 @@ } }, "active_torrents": { - "name": "Active torrents" + "name": "Active torrents", + "unit_of_measurement": "torrents" }, "inactive_torrents": { - "name": "Inactive torrents" + "name": "Inactive torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" }, "paused_torrents": { - "name": "Paused torrents" + "name": "Paused torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" }, "all_torrents": { - "name": "All torrents" + "name": "All torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" } }, "switch": { diff --git a/homeassistant/components/qld_bushfire/manifest.json b/homeassistant/components/qld_bushfire/manifest.json index 282a931bf05..79a29e6fddb 100644 --- a/homeassistant/components/qld_bushfire/manifest.json +++ b/homeassistant/components/qld_bushfire/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["georss_qld_bushfire_alert_client"], + "quality_scale": "legacy", "requirements": ["georss-qld-bushfire-alert-client==0.8"] } diff --git a/homeassistant/components/qnap/sensor.py b/homeassistant/components/qnap/sensor.py index 526516bfcdd..383a4e5f572 100644 --- a/homeassistant/components/qnap/sensor.py +++ b/homeassistant/components/qnap/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - ATTR_NAME, PERCENTAGE, EntityCategory, UnitOfDataRate, @@ -375,17 +374,6 @@ class QNAPMemorySensor(QNAPSensor): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"]["memory"] - size = round(float(data["total"]) / 1024, 2) - return {ATTR_MEMORY_SIZE: f"{size} {UnitOfInformation.GIBIBYTES}"} - return None - class QNAPNetworkSensor(QNAPSensor): """A QNAP sensor that monitors network stats.""" @@ -414,22 +402,6 @@ class QNAPNetworkSensor(QNAPSensor): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"]["nics"][self.monitor_device] - return { - ATTR_IP: data["ip"], - ATTR_MASK: data["mask"], - ATTR_MAC: data["mac"], - ATTR_MAX_SPEED: data["max_speed"], - ATTR_PACKETS_ERR: data["err_packets"], - } - return None - class QNAPSystemSensor(QNAPSensor): """A QNAP sensor that monitors overall system health.""" @@ -455,25 +427,6 @@ class QNAPSystemSensor(QNAPSensor): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"] - days = int(data["uptime"]["days"]) - hours = int(data["uptime"]["hours"]) - minutes = int(data["uptime"]["minutes"]) - - return { - ATTR_NAME: data["system"]["name"], - ATTR_MODEL: data["system"]["model"], - ATTR_SERIAL: data["system"]["serial_number"], - ATTR_UPTIME: f"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m", - } - return None - class QNAPDriveSensor(QNAPSensor): """A QNAP sensor that monitors HDD/SSD drive stats.""" @@ -533,17 +486,3 @@ class QNAPVolumeSensor(QNAPSensor): return used_gb / total_gb * 100 return None - - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["volumes"][self.monitor_device] - total_gb = int(data["total_size"]) / 1024 / 1024 / 1024 - - return { - ATTR_VOLUME_SIZE: f"{round(total_gb, 1)} {UnitOfInformation.GIBIBYTES}" - } - return None diff --git a/homeassistant/components/qnap_qsw/diagnostics.py b/homeassistant/components/qnap_qsw/diagnostics.py index e732c551a40..6f42fb82cb7 100644 --- a/homeassistant/components/qnap_qsw/diagnostics.py +++ b/homeassistant/components/qnap_qsw/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from aioqsw.const import QSD_MAC, QSD_SERIAL -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_UNIQUE_ID, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/qnap_qsw/sensor.py b/homeassistant/components/qnap_qsw/sensor.py index 009bc63b2c6..45ec1828b9d 100644 --- a/homeassistant/components/qnap_qsw/sensor.py +++ b/homeassistant/components/qnap_qsw/sensor.py @@ -2,7 +2,9 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass, replace +from datetime import datetime from typing import Final from aioqsw.const import ( @@ -26,8 +28,11 @@ from aioqsw.const import ( QSD_TX_OCTETS, QSD_TX_SPEED, QSD_UPTIME_SECONDS, + QSD_UPTIME_TIMESTAMP, ) +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -43,8 +48,10 @@ from homeassistant.const import ( UnitOfTime, ) from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import UNDEFINED +from homeassistant.helpers.typing import UNDEFINED, StateType +from homeassistant.util import dt as dt_util from .const import ATTR_MAX, DOMAIN, QSW_COORD_DATA, RPM from .coordinator import QswDataCoordinator @@ -58,6 +65,17 @@ class QswSensorEntityDescription(SensorEntityDescription, QswEntityDescription): attributes: dict[str, list[str]] | None = None qsw_type: QswEntityType | None = None sep_key: str = "_" + value_fn: Callable[[str], datetime | StateType] = lambda value: value + + +DEPRECATED_UPTIME_SECONDS = QswSensorEntityDescription( + translation_key="uptime", + key=QSD_SYSTEM_TIME, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfTime.SECONDS, + state_class=SensorStateClass.TOTAL_INCREASING, + subkey=QSD_UPTIME_SECONDS, +) SENSOR_TYPES: Final[tuple[QswSensorEntityDescription, ...]] = ( @@ -140,12 +158,12 @@ SENSOR_TYPES: Final[tuple[QswSensorEntityDescription, ...]] = ( subkey=QSD_TX_SPEED, ), QswSensorEntityDescription( - translation_key="uptime", + translation_key="uptime_timestamp", key=QSD_SYSTEM_TIME, + device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfTime.SECONDS, - state_class=SensorStateClass.TOTAL_INCREASING, - subkey=QSD_UPTIME_SECONDS, + subkey=QSD_UPTIME_TIMESTAMP, + value_fn=dt_util.parse_datetime, ), ) @@ -337,6 +355,46 @@ async def async_setup_entry( ) entities.append(QswSensor(coordinator, _desc, entry, port_id)) + # Can be removed in HA 2025.5.0 + entity_reg = er.async_get(hass) + reg_entities = er.async_entries_for_config_entry(entity_reg, entry.entry_id) + for entity in reg_entities: + if entity.domain == "sensor" and entity.unique_id.endswith( + ("_uptime", "_uptime_seconds") + ): + entity_id = entity.entity_id + + if entity.disabled: + entity_reg.async_remove(entity_id) + continue + + if ( + DEPRECATED_UPTIME_SECONDS.key in coordinator.data + and DEPRECATED_UPTIME_SECONDS.subkey + in coordinator.data[DEPRECATED_UPTIME_SECONDS.key] + ): + entities.append( + QswSensor(coordinator, DEPRECATED_UPTIME_SECONDS, entry) + ) + + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + + for item in entity_automations + entity_scripts: + ir.async_create_issue( + hass, + DOMAIN, + f"uptime_seconds_deprecated_{entity_id}_{item}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key="uptime_seconds_deprecated", + translation_placeholders={ + "entity": entity_id, + "info": item, + }, + ) + async_add_entities(entities) @@ -374,5 +432,5 @@ class QswSensor(QswSensorEntity, SensorEntity): self.entity_description.subkey, self.entity_description.qsw_type, ) - self._attr_native_value = value + self._attr_native_value = self.entity_description.value_fn(value) super()._async_update_attrs() diff --git a/homeassistant/components/qnap_qsw/strings.json b/homeassistant/components/qnap_qsw/strings.json index c8cd5ffb861..462e66a25c3 100644 --- a/homeassistant/components/qnap_qsw/strings.json +++ b/homeassistant/components/qnap_qsw/strings.json @@ -52,7 +52,16 @@ }, "uptime": { "name": "Uptime" + }, + "uptime_timestamp": { + "name": "Uptime timestamp" } } + }, + "issues": { + "uptime_seconds_deprecated": { + "title": "QNAP QSW uptime seconds sensor deprecated", + "description": "The QNAP QSW uptime seconds sensor entity is deprecated and will be removed in HA 2025.2.0.\nHome Assistant detected that entity `{entity}` is being used in `{info}`\n\nYou should remove the uptime seconds entity from `{info}` then click submit to fix this issue." + } } } diff --git a/homeassistant/components/qrcode/manifest.json b/homeassistant/components/qrcode/manifest.json index 14f2d093f37..9634d45b069 100644 --- a/homeassistant/components/qrcode/manifest.json +++ b/homeassistant/components/qrcode/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/qrcode", "iot_class": "calculated", "loggers": ["pyzbar"], - "requirements": ["Pillow==10.4.0", "pyzbar==0.1.7"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0", "pyzbar==0.1.7"] } diff --git a/homeassistant/components/quantum_gateway/device_tracker.py b/homeassistant/components/quantum_gateway/device_tracker.py index 88cb5d60028..dc68472d94e 100644 --- a/homeassistant/components/quantum_gateway/device_tracker.py +++ b/homeassistant/components/quantum_gateway/device_tracker.py @@ -9,7 +9,7 @@ from requests.exceptions import RequestException import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -35,7 +35,7 @@ def get_scanner( hass: HomeAssistant, config: ConfigType ) -> QuantumGatewayDeviceScanner | None: """Validate the configuration and return a Quantum Gateway scanner.""" - scanner = QuantumGatewayDeviceScanner(config[DOMAIN]) + scanner = QuantumGatewayDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None diff --git a/homeassistant/components/quantum_gateway/manifest.json b/homeassistant/components/quantum_gateway/manifest.json index 4494e5a2576..98c6c715417 100644 --- a/homeassistant/components/quantum_gateway/manifest.json +++ b/homeassistant/components/quantum_gateway/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@cisasteelersfan"], "documentation": "https://www.home-assistant.io/integrations/quantum_gateway", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["quantum-gateway==0.0.8"] } diff --git a/homeassistant/components/qvr_pro/icons.json b/homeassistant/components/qvr_pro/icons.json index 556a8d40752..3b57387d251 100644 --- a/homeassistant/components/qvr_pro/icons.json +++ b/homeassistant/components/qvr_pro/icons.json @@ -1,6 +1,10 @@ { "services": { - "start_record": "mdi:record-rec", - "stop_record": "mdi:stop" + "start_record": { + "service": "mdi:record-rec" + }, + "stop_record": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/qvr_pro/manifest.json b/homeassistant/components/qvr_pro/manifest.json index 9c0e92698df..2553e1d27c4 100644 --- a/homeassistant/components/qvr_pro/manifest.json +++ b/homeassistant/components/qvr_pro/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/qvr_pro", "iot_class": "local_polling", "loggers": ["pyqvrpro"], + "quality_scale": "legacy", "requirements": ["pyqvrpro==0.52"] } diff --git a/homeassistant/components/qwikswitch/__init__.py b/homeassistant/components/qwikswitch/__init__.py index eea110a02d7..776e32dded1 100644 --- a/homeassistant/components/qwikswitch/__init__.py +++ b/homeassistant/components/qwikswitch/__init__.py @@ -9,7 +9,6 @@ from pyqwikswitch.qwikswitch import CMD_BUTTONS, QS_CMD, QS_ID, SENSORS, QSType import voluptuous as vol from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA -from homeassistant.components.light import ATTR_BRIGHTNESS from homeassistant.const import ( CONF_SENSORS, CONF_SWITCHES, @@ -22,11 +21,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) @@ -70,70 +65,6 @@ CONFIG_SCHEMA = vol.Schema( ) -class QSEntity(Entity): - """Qwikswitch Entity base.""" - - _attr_should_poll = False - - def __init__(self, qsid, name): - """Initialize the QSEntity.""" - self._name = name - self.qsid = qsid - - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def unique_id(self): - """Return a unique identifier for this sensor.""" - return f"qs{self.qsid}" - - @callback - def update_packet(self, packet): - """Receive update packet from QSUSB. Match dispather_send signature.""" - self.async_write_ha_state() - - async def async_added_to_hass(self): - """Listen for updates from QSUSb via dispatcher.""" - self.async_on_remove( - async_dispatcher_connect(self.hass, self.qsid, self.update_packet) - ) - - -class QSToggleEntity(QSEntity): - """Representation of a Qwikswitch Toggle Entity. - - Implemented: - - QSLight extends QSToggleEntity and Light[2] (ToggleEntity[1]) - - QSSwitch extends QSToggleEntity and SwitchEntity[3] (ToggleEntity[1]) - - [1] /helpers/entity.py - [2] /components/light/__init__.py - [3] /components/switch/__init__.py - """ - - def __init__(self, qsid, qsusb): - """Initialize the ToggleEntity.""" - self.device = qsusb.devices[qsid] - super().__init__(qsid, self.device.name) - - @property - def is_on(self): - """Check if device is on (non-zero).""" - return self.device.value > 0 - - async def async_turn_on(self, **kwargs): - """Turn the device on.""" - new = kwargs.get(ATTR_BRIGHTNESS, 255) - self.hass.data[DOMAIN].devices.set_value(self.qsid, new) - - async def async_turn_off(self, **_): - """Turn the device off.""" - self.hass.data[DOMAIN].devices.set_value(self.qsid, 0) - - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Qwiskswitch component setup.""" diff --git a/homeassistant/components/qwikswitch/binary_sensor.py b/homeassistant/components/qwikswitch/binary_sensor.py index b35908da12c..195433ebc17 100644 --- a/homeassistant/components/qwikswitch/binary_sensor.py +++ b/homeassistant/components/qwikswitch/binary_sensor.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as QWIKSWITCH, QSEntity +from . import DOMAIN as QWIKSWITCH +from .entity import QSEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/qwikswitch/entity.py b/homeassistant/components/qwikswitch/entity.py new file mode 100644 index 00000000000..3a2ec5a9206 --- /dev/null +++ b/homeassistant/components/qwikswitch/entity.py @@ -0,0 +1,74 @@ +"""Support for Qwikswitch devices.""" + +from __future__ import annotations + +from homeassistant.components.light import ATTR_BRIGHTNESS +from homeassistant.core import callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from . import DOMAIN + + +class QSEntity(Entity): + """Qwikswitch Entity base.""" + + _attr_should_poll = False + + def __init__(self, qsid, name): + """Initialize the QSEntity.""" + self._name = name + self.qsid = qsid + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def unique_id(self): + """Return a unique identifier for this sensor.""" + return f"qs{self.qsid}" + + @callback + def update_packet(self, packet): + """Receive update packet from QSUSB. Match dispather_send signature.""" + self.async_write_ha_state() + + async def async_added_to_hass(self): + """Listen for updates from QSUSb via dispatcher.""" + self.async_on_remove( + async_dispatcher_connect(self.hass, self.qsid, self.update_packet) + ) + + +class QSToggleEntity(QSEntity): + """Representation of a Qwikswitch Toggle Entity. + + Implemented: + - QSLight extends QSToggleEntity and Light[2] (ToggleEntity[1]) + - QSSwitch extends QSToggleEntity and SwitchEntity[3] (ToggleEntity[1]) + + [1] /helpers/entity.py + [2] /components/light/__init__.py + [3] /components/switch/__init__.py + """ + + def __init__(self, qsid, qsusb): + """Initialize the ToggleEntity.""" + self.device = qsusb.devices[qsid] + super().__init__(qsid, self.device.name) + + @property + def is_on(self): + """Check if device is on (non-zero).""" + return self.device.value > 0 + + async def async_turn_on(self, **kwargs): + """Turn the device on.""" + new = kwargs.get(ATTR_BRIGHTNESS, 255) + self.hass.data[DOMAIN].devices.set_value(self.qsid, new) + + async def async_turn_off(self, **_): + """Turn the device off.""" + self.hass.data[DOMAIN].devices.set_value(self.qsid, 0) diff --git a/homeassistant/components/qwikswitch/light.py b/homeassistant/components/qwikswitch/light.py index 12c2763d3a4..073f7bb873a 100644 --- a/homeassistant/components/qwikswitch/light.py +++ b/homeassistant/components/qwikswitch/light.py @@ -7,7 +7,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as QWIKSWITCH, QSToggleEntity +from . import DOMAIN as QWIKSWITCH +from .entity import QSToggleEntity async def async_setup_platform( diff --git a/homeassistant/components/qwikswitch/manifest.json b/homeassistant/components/qwikswitch/manifest.json index e30ebffbf2f..750e104d1a3 100644 --- a/homeassistant/components/qwikswitch/manifest.json +++ b/homeassistant/components/qwikswitch/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/qwikswitch", "iot_class": "local_push", "loggers": ["pyqwikswitch"], + "quality_scale": "legacy", "requirements": ["pyqwikswitch==0.93"] } diff --git a/homeassistant/components/qwikswitch/sensor.py b/homeassistant/components/qwikswitch/sensor.py index 856949d8926..64e560b4f08 100644 --- a/homeassistant/components/qwikswitch/sensor.py +++ b/homeassistant/components/qwikswitch/sensor.py @@ -12,7 +12,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as QWIKSWITCH, QSEntity +from . import DOMAIN as QWIKSWITCH +from .entity import QSEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/qwikswitch/switch.py b/homeassistant/components/qwikswitch/switch.py index 1623bfb3361..ec47b4d99f2 100644 --- a/homeassistant/components/qwikswitch/switch.py +++ b/homeassistant/components/qwikswitch/switch.py @@ -7,7 +7,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as QWIKSWITCH, QSToggleEntity +from . import DOMAIN as QWIKSWITCH +from .entity import QSToggleEntity async def async_setup_platform( diff --git a/homeassistant/components/rabbitair/fan.py b/homeassistant/components/rabbitair/fan.py index ba1896cba2f..cfbee0be67c 100644 --- a/homeassistant/components/rabbitair/fan.py +++ b/homeassistant/components/rabbitair/fan.py @@ -55,7 +55,6 @@ class RabbitAirFanEntity(RabbitAirBaseEntity, FanEntity): | FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/rachio/__init__.py b/homeassistant/components/rachio/__init__.py index 6976d3f5ba6..d6cdd2701b6 100644 --- a/homeassistant/components/rachio/__init__.py +++ b/homeassistant/components/rachio/__init__.py @@ -83,7 +83,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if not person.controllers and not person.base_stations: _LOGGER.error("No Rachio devices found in account %s", person.username) return False - _LOGGER.info( + _LOGGER.debug( ( "%d Rachio device(s) found; The url %s must be accessible from the internet" " in order to receive updates" diff --git a/homeassistant/components/rachio/config_flow.py b/homeassistant/components/rachio/config_flow.py index bdd2f81536d..fac93952b35 100644 --- a/homeassistant/components/rachio/config_flow.py +++ b/homeassistant/components/rachio/config_flow.py @@ -108,17 +108,15 @@ class RachioConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Rachio.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, int] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/rachio/coordinator.py b/homeassistant/components/rachio/coordinator.py index 25c40bd6656..62d42f2afda 100644 --- a/homeassistant/components/rachio/coordinator.py +++ b/homeassistant/components/rachio/coordinator.py @@ -8,6 +8,7 @@ from typing import Any from rachiopy import Rachio from requests.exceptions import Timeout +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -38,6 +39,7 @@ class RachioUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, rachio: Rachio, + config_entry: ConfigEntry, base_station, base_count: int, ) -> None: @@ -48,6 +50,7 @@ class RachioUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): super().__init__( hass, _LOGGER, + config_entry=config_entry, name=f"{DOMAIN} update coordinator", # To avoid exceeding the rate limit, increase polling interval for # each additional base station on the account @@ -76,6 +79,7 @@ class RachioScheduleUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]] self, hass: HomeAssistant, rachio: Rachio, + config_entry: ConfigEntry, base_station, ) -> None: """Initialize a Rachio schedule coordinator.""" @@ -85,6 +89,7 @@ class RachioScheduleUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]] super().__init__( hass, _LOGGER, + config_entry=config_entry, name=f"{DOMAIN} schedule update coordinator", update_interval=timedelta(minutes=30), ) diff --git a/homeassistant/components/rachio/device.py b/homeassistant/components/rachio/device.py index 0bbb862753e..179e5f5ec0d 100644 --- a/homeassistant/components/rachio/device.py +++ b/homeassistant/components/rachio/device.py @@ -164,7 +164,7 @@ class RachioPerson: # rachio hands us back a dict if isinstance(webhooks, dict): if webhooks.get("code") == PERMISSION_ERROR: - _LOGGER.info( + _LOGGER.warning( ( "Not adding controller '%s', only controllers owned by '%s'" " may be added" @@ -189,13 +189,15 @@ class RachioPerson: RachioBaseStation( rachio, base, - RachioUpdateCoordinator(hass, rachio, base, base_count), - RachioScheduleUpdateCoordinator(hass, rachio, base), + RachioUpdateCoordinator( + hass, rachio, self.config_entry, base, base_count + ), + RachioScheduleUpdateCoordinator(hass, rachio, self.config_entry, base), ) for base in base_stations ) - _LOGGER.info('Using Rachio API as user "%s"', self.username) + _LOGGER.debug('Using Rachio API as user "%s"', self.username) @property def user_id(self) -> str | None: @@ -334,7 +336,7 @@ class RachioIro: def stop_watering(self) -> None: """Stop watering all zones connected to this controller.""" self.rachio.device.stop_water(self.controller_id) - _LOGGER.info("Stopped watering of all zones on %s", self) + _LOGGER.debug("Stopped watering of all zones on %s", self) def pause_watering(self, duration) -> None: """Pause watering on this controller.""" diff --git a/homeassistant/components/rachio/icons.json b/homeassistant/components/rachio/icons.json index dfab8788fc8..df30929ab4c 100644 --- a/homeassistant/components/rachio/icons.json +++ b/homeassistant/components/rachio/icons.json @@ -10,11 +10,23 @@ } }, "services": { - "set_zone_moisture_percent": "mdi:water-percent", - "start_multiple_zone_schedule": "mdi:play", - "pause_watering": "mdi:pause", - "resume_watering": "mdi:play", - "stop_watering": "mdi:stop", - "start_watering": "mdi:water" + "set_zone_moisture_percent": { + "service": "mdi:water-percent" + }, + "start_multiple_zone_schedule": { + "service": "mdi:play" + }, + "pause_watering": { + "service": "mdi:pause" + }, + "resume_watering": { + "service": "mdi:play" + }, + "stop_watering": { + "service": "mdi:stop" + }, + "start_watering": { + "service": "mdi:water" + } } } diff --git a/homeassistant/components/rachio/strings.json b/homeassistant/components/rachio/strings.json index ad7a277d23a..308403d805d 100644 --- a/homeassistant/components/rachio/strings.json +++ b/homeassistant/components/rachio/strings.json @@ -3,7 +3,7 @@ "step": { "user": { "title": "Connect to your Rachio device", - "description": "You will need the API Key from https://app.rach.io/. Go to Settings, then click 'GET API KEY'.", + "description": "You will need the API Key from https://app.rach.io/. Go to Settings, then select 'GET API KEY'.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } diff --git a/homeassistant/components/radarr/__init__.py b/homeassistant/components/radarr/__init__.py index 1023bf10659..5c225697f98 100644 --- a/homeassistant/components/radarr/__init__.py +++ b/homeassistant/components/radarr/__init__.py @@ -3,26 +3,15 @@ from __future__ import annotations from dataclasses import dataclass, fields -from typing import cast from aiopyarr.models.host_configuration import PyArrHostConfiguration from aiopyarr.radarr_client import RadarrClient from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SW_VERSION, - CONF_API_KEY, - CONF_URL, - CONF_VERIFY_SSL, - Platform, -) +from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DEFAULT_NAME, DOMAIN from .coordinator import ( CalendarUpdateCoordinator, DiskSpaceDataUpdateCoordinator, @@ -31,7 +20,6 @@ from .coordinator import ( QueueDataUpdateCoordinator, RadarrDataUpdateCoordinator, StatusDataUpdateCoordinator, - T, ) PLATFORMS = [Platform.BINARY_SENSOR, Platform.CALENDAR, Platform.SENSOR] @@ -89,36 +77,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: RadarrConfigEntry) -> bo async def async_unload_entry(hass: HomeAssistant, entry: RadarrConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class RadarrEntity(CoordinatorEntity[RadarrDataUpdateCoordinator[T]]): - """Defines a base Radarr entity.""" - - _attr_has_entity_name = True - coordinator: RadarrDataUpdateCoordinator[T] - - def __init__( - self, - coordinator: RadarrDataUpdateCoordinator[T], - description: EntityDescription, - ) -> None: - """Create Radarr entity.""" - super().__init__(coordinator) - self.entity_description = description - self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}" - - @property - def device_info(self) -> DeviceInfo: - """Return device information about the Radarr instance.""" - device_info = DeviceInfo( - configuration_url=self.coordinator.host_configuration.url, - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, self.coordinator.config_entry.entry_id)}, - manufacturer=DEFAULT_NAME, - name=self.coordinator.config_entry.title, - ) - if isinstance(self.coordinator, StatusDataUpdateCoordinator): - device_info[ATTR_SW_VERSION] = cast( - StatusDataUpdateCoordinator, self.coordinator - ).data.version - return device_info diff --git a/homeassistant/components/radarr/binary_sensor.py b/homeassistant/components/radarr/binary_sensor.py index 6c0468cff58..953c7dead18 100644 --- a/homeassistant/components/radarr/binary_sensor.py +++ b/homeassistant/components/radarr/binary_sensor.py @@ -13,8 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RadarrConfigEntry, RadarrEntity +from . import RadarrConfigEntry from .const import HEALTH_ISSUES +from .entity import RadarrEntity BINARY_SENSOR_TYPE = BinarySensorEntityDescription( key="health", diff --git a/homeassistant/components/radarr/calendar.py b/homeassistant/components/radarr/calendar.py index 4f866123a1a..c741c178862 100644 --- a/homeassistant/components/radarr/calendar.py +++ b/homeassistant/components/radarr/calendar.py @@ -9,8 +9,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RadarrConfigEntry, RadarrEntity +from . import RadarrConfigEntry from .coordinator import CalendarUpdateCoordinator, RadarrEvent +from .entity import RadarrEntity CALENDAR_TYPE = EntityDescription( key="calendar", diff --git a/homeassistant/components/radarr/config_flow.py b/homeassistant/components/radarr/config_flow.py index 3bf0796a9a8..d02038d7131 100644 --- a/homeassistant/components/radarr/config_flow.py +++ b/homeassistant/components/radarr/config_flow.py @@ -10,13 +10,13 @@ from aiopyarr import exceptions from aiopyarr.models.host_configuration import PyArrHostConfiguration from aiopyarr.radarr_client import RadarrClient import voluptuous as vol +from yarl import URL -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import RadarrConfigEntry from .const import DEFAULT_NAME, DEFAULT_URL, DOMAIN @@ -24,12 +24,11 @@ class RadarrConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Radarr.""" VERSION = 1 - entry: RadarrConfigEntry | None = None - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -48,10 +47,13 @@ class RadarrConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by the user.""" errors = {} - if user_input is None: - user_input = dict(self.entry.data) if self.entry else None + if user_input is not None: + # aiopyarr defaults to the service port if one isn't given + # this is counter to standard practice where http = 80 + # and https = 443. + url = URL(user_input[CONF_URL]) + user_input[CONF_URL] = f"{url.scheme}://{url.host}:{url.port}{url.path}" - else: try: if result := await validate_input(self.hass, user_input): user_input[CONF_API_KEY] = result[1] @@ -66,20 +68,21 @@ class RadarrConfigFlow(ConfigFlow, domain=DOMAIN): except exceptions.ArrException: errors = {"base": "unknown"} if not errors: - if self.entry: - self.hass.config_entries.async_update_entry( - self.entry, data=user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=DEFAULT_NAME, data=user_input, ) - user_input = user_input or {} + if user_input is None: + user_input = {} + if self.source == SOURCE_REAUTH: + user_input = dict(self._get_reauth_entry().data) + return self.async_show_form( step_id="user", data_schema=vol.Schema( diff --git a/homeassistant/components/radarr/entity.py b/homeassistant/components/radarr/entity.py new file mode 100644 index 00000000000..bc2c17821cc --- /dev/null +++ b/homeassistant/components/radarr/entity.py @@ -0,0 +1,46 @@ +"""The Radarr component.""" + +from __future__ import annotations + +from typing import cast + +from homeassistant.const import ATTR_SW_VERSION +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DEFAULT_NAME, DOMAIN +from .coordinator import RadarrDataUpdateCoordinator, StatusDataUpdateCoordinator, T + + +class RadarrEntity(CoordinatorEntity[RadarrDataUpdateCoordinator[T]]): + """Defines a base Radarr entity.""" + + _attr_has_entity_name = True + coordinator: RadarrDataUpdateCoordinator[T] + + def __init__( + self, + coordinator: RadarrDataUpdateCoordinator[T], + description: EntityDescription, + ) -> None: + """Create Radarr entity.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}" + + @property + def device_info(self) -> DeviceInfo: + """Return device information about the Radarr instance.""" + device_info = DeviceInfo( + configuration_url=self.coordinator.host_configuration.url, + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, self.coordinator.config_entry.entry_id)}, + manufacturer=DEFAULT_NAME, + name=self.coordinator.config_entry.title, + ) + if isinstance(self.coordinator, StatusDataUpdateCoordinator): + device_info[ATTR_SW_VERSION] = cast( + StatusDataUpdateCoordinator, self.coordinator + ).data.version + return device_info diff --git a/homeassistant/components/radarr/sensor.py b/homeassistant/components/radarr/sensor.py index 441c44de781..df1a0686e00 100644 --- a/homeassistant/components/radarr/sensor.py +++ b/homeassistant/components/radarr/sensor.py @@ -19,8 +19,9 @@ from homeassistant.const import EntityCategory, UnitOfInformation from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RadarrConfigEntry, RadarrEntity +from . import RadarrConfigEntry from .coordinator import RadarrDataUpdateCoordinator, T +from .entity import RadarrEntity def get_space(data: list[Diskspace], name: str) -> str: diff --git a/homeassistant/components/radio_browser/config_flow.py b/homeassistant/components/radio_browser/config_flow.py index 137ee7c8e87..411259f31d3 100644 --- a/homeassistant/components/radio_browser/config_flow.py +++ b/homeassistant/components/radio_browser/config_flow.py @@ -18,9 +18,6 @@ class RadioBrowserConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry(title="Radio Browser", data={}) diff --git a/homeassistant/components/radio_browser/manifest.json b/homeassistant/components/radio_browser/manifest.json index 4192805ec62..943187596d7 100644 --- a/homeassistant/components/radio_browser/manifest.json +++ b/homeassistant/components/radio_browser/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/radio_browser", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["radios==0.3.1"] + "requirements": ["radios==0.3.2", "pycountry==24.6.1"], + "single_config_entry": true } diff --git a/homeassistant/components/radio_browser/media_source.py b/homeassistant/components/radio_browser/media_source.py index 2f95acf407d..dc91525677b 100644 --- a/homeassistant/components/radio_browser/media_source.py +++ b/homeassistant/components/radio_browser/media_source.py @@ -4,16 +4,16 @@ from __future__ import annotations import mimetypes +import pycountry from radios import FilterBy, Order, RadioBrowser, Station -from radios.radio_browser import pycountry from homeassistant.components.media_player import MediaClass, MediaType -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.core import HomeAssistant, callback diff --git a/homeassistant/components/radio_browser/strings.json b/homeassistant/components/radio_browser/strings.json index fd0470d26dc..5dd0ad3dcf7 100644 --- a/homeassistant/components/radio_browser/strings.json +++ b/homeassistant/components/radio_browser/strings.json @@ -4,9 +4,6 @@ "user": { "description": "Do you want to add Radio Browser to Home Assistant?" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/radiotherm/climate.py b/homeassistant/components/radiotherm/climate.py index 73ab3644a0b..af52c5fcea3 100644 --- a/homeassistant/components/radiotherm/climate.py +++ b/homeassistant/components/radiotherm/climate.py @@ -107,7 +107,6 @@ class RadioThermostat(RadioThermostatEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT _attr_precision = PRECISION_HALVES _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: RadioThermUpdateCoordinator) -> None: """Initialize the thermostat.""" diff --git a/homeassistant/components/radiotherm/config_flow.py b/homeassistant/components/radiotherm/config_flow.py index e9904318ae9..e29c4703e08 100644 --- a/homeassistant/components/radiotherm/config_flow.py +++ b/homeassistant/components/radiotherm/config_flow.py @@ -60,7 +60,9 @@ class RadioThermConfigFlow(ConfigFlow, domain=DOMAIN): self.discovered_ip = discovery_info.ip return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Attempt to confirm.""" ip_address = self.discovered_ip init_data = self.discovered_init_data @@ -75,7 +77,7 @@ class RadioThermConfigFlow(ConfigFlow, domain=DOMAIN): self._set_confirm_only() placeholders = { "name": init_data.name, - "host": self.discovered_ip, + "host": ip_address, "model": init_data.model or "Unknown", } self.context["title_placeholders"] = placeholders diff --git a/homeassistant/components/rainbird/__init__.py b/homeassistant/components/rainbird/__init__.py index da2a0e4b475..4827ac3e67c 100644 --- a/homeassistant/components/rainbird/__init__.py +++ b/homeassistant/components/rainbird/__init__.py @@ -7,9 +7,8 @@ from typing import Any import aiohttp from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController -from pyrainbird.exceptions import RainbirdApiException +from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -18,12 +17,17 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import format_mac from .const import CONF_SERIAL_NUMBER -from .coordinator import RainbirdData, async_create_clientsession +from .coordinator import ( + RainbirdScheduleUpdateCoordinator, + RainbirdUpdateCoordinator, + async_create_clientsession, +) +from .types import RainbirdConfigEntry, RainbirdData _LOGGER = logging.getLogger(__name__) @@ -40,7 +44,9 @@ DOMAIN = "rainbird" def _async_register_clientsession_shutdown( - hass: HomeAssistant, entry: ConfigEntry, clientsession: aiohttp.ClientSession + hass: HomeAssistant, + entry: RainbirdConfigEntry, + clientsession: aiohttp.ClientSession, ) -> None: """Register cleanup hooks for the clientsession.""" @@ -55,11 +61,9 @@ def _async_register_clientsession_shutdown( entry.async_on_unload(_async_close_websession) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) -> bool: """Set up the config entry for Rain Bird.""" - hass.data.setdefault(DOMAIN, {}) - clientsession = async_create_clientsession() _async_register_clientsession_shutdown(hass, entry, clientsession) @@ -91,21 +95,37 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: model_info = await controller.get_model_and_version() + except RainbirdAuthException as err: + raise ConfigEntryAuthFailed from err except RainbirdApiException as err: raise ConfigEntryNotReady from err - data = RainbirdData(hass, entry, controller, model_info) + data = RainbirdData( + controller, + model_info, + coordinator=RainbirdUpdateCoordinator( + hass, + name=entry.title, + controller=controller, + unique_id=entry.unique_id, + model_info=model_info, + ), + schedule_coordinator=RainbirdScheduleUpdateCoordinator( + hass, + name=f"{entry.title} Schedule", + controller=controller, + ), + ) await data.coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = data - + entry.runtime_data = data await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True async def _async_fix_unique_id( - hass: HomeAssistant, controller: AsyncRainbirdController, entry: ConfigEntry + hass: HomeAssistant, controller: AsyncRainbirdController, entry: RainbirdConfigEntry ) -> bool: """Update the config entry with a unique id based on the mac address.""" _LOGGER.debug("Checking for migration of config entry (%s)", entry.unique_id) @@ -234,10 +254,6 @@ def _async_fix_device_id( ) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) -> bool: """Unload a config entry.""" - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/rainbird/binary_sensor.py b/homeassistant/components/rainbird/binary_sensor.py index d44022b0a2d..5722b8852dd 100644 --- a/homeassistant/components/rainbird/binary_sensor.py +++ b/homeassistant/components/rainbird/binary_sensor.py @@ -8,13 +8,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -27,11 +26,11 @@ RAIN_SENSOR_ENTITY_DESCRIPTION = BinarySensorEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird binary_sensor.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id].coordinator + coordinator = config_entry.runtime_data.coordinator async_add_entities([RainBirdSensor(coordinator, RAIN_SENSOR_ENTITY_DESCRIPTION)]) diff --git a/homeassistant/components/rainbird/calendar.py b/homeassistant/components/rainbird/calendar.py index 42c1cce69d3..160fe70c61e 100644 --- a/homeassistant/components/rainbird/calendar.py +++ b/homeassistant/components/rainbird/calendar.py @@ -6,7 +6,6 @@ from datetime import datetime import logging from homeassistant.components.calendar import CalendarEntity, CalendarEvent -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -14,19 +13,19 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .const import DOMAIN from .coordinator import RainbirdScheduleUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird irrigation calendar.""" - data = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data if not data.model_info.model_info.max_programs: return diff --git a/homeassistant/components/rainbird/config_flow.py b/homeassistant/components/rainbird/config_flow.py index c1c814b05c4..1390650ea02 100644 --- a/homeassistant/components/rainbird/config_flow.py +++ b/homeassistant/components/rainbird/config_flow.py @@ -3,28 +3,22 @@ from __future__ import annotations import asyncio +from collections.abc import Mapping import logging from typing import Any -from pyrainbird.async_client import ( - AsyncRainbirdClient, - AsyncRainbirdController, - RainbirdApiException, -) +from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController from pyrainbird.data import WifiParams +from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, selector from homeassistant.helpers.device_registry import format_mac +from . import RainbirdConfigEntry from .const import ( ATTR_DURATION, CONF_SERIAL_NUMBER, @@ -45,6 +39,13 @@ DATA_SCHEMA = vol.Schema( ), } ) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): selector.TextSelector( + selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD) + ), + } +) class ConfigFlowError(Exception): @@ -59,13 +60,44 @@ class ConfigFlowError(Exception): class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Rain Bird.""" + host: str + @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, ) -> RainBirdOptionsFlowHandler: """Define the config flow to handle options.""" - return RainBirdOptionsFlowHandler(config_entry) + return RainBirdOptionsFlowHandler() + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauthentication upon an API authentication error.""" + self.host = entry_data[CONF_HOST] + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauthentication dialog.""" + errors: dict[str, str] = {} + if user_input: + try: + await self._test_connection(self.host, user_input[CONF_PASSWORD]) + except ConfigFlowError as err: + _LOGGER.error("Error during config flow: %s", err) + errors["base"] = err.error_code + else: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=REAUTH_SCHEMA, + errors=errors, + ) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -123,6 +155,11 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): f"Timeout connecting to Rain Bird controller: {err!s}", "timeout_connect", ) from err + except RainbirdAuthException as err: + raise ConfigFlowError( + f"Authentication error connecting from Rain Bird controller: {err!s}", + "invalid_auth", + ) from err except RainbirdApiException as err: raise ConfigFlowError( f"Error connecting to Rain Bird controller: {err!s}", @@ -165,10 +202,6 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): class RainBirdOptionsFlowHandler(OptionsFlow): """Handle a RainBird options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize RainBirdOptionsFlowHandler.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rainbird/coordinator.py b/homeassistant/components/rainbird/coordinator.py index 83db2d584d2..2ccfa0af62a 100644 --- a/homeassistant/components/rainbird/coordinator.py +++ b/homeassistant/components/rainbird/coordinator.py @@ -5,7 +5,6 @@ from __future__ import annotations import asyncio from dataclasses import dataclass import datetime -from functools import cached_property import logging import aiohttp @@ -16,13 +15,13 @@ from pyrainbird.async_client import ( ) from pyrainbird.data import ModelAndVersion, Schedule -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, MANUFACTURER, TIMEOUT_SECONDS +from .types import RainbirdConfigEntry UPDATE_INTERVAL = datetime.timedelta(minutes=1) # The calendar data requires RPCs for each program/zone, and the data rarely @@ -141,7 +140,7 @@ class RainbirdUpdateCoordinator(DataUpdateCoordinator[RainbirdDeviceState]): class RainbirdScheduleUpdateCoordinator(DataUpdateCoordinator[Schedule]): """Coordinator for rainbird irrigation schedule calls.""" - config_entry: ConfigEntry + config_entry: RainbirdConfigEntry def __init__( self, @@ -166,36 +165,3 @@ class RainbirdScheduleUpdateCoordinator(DataUpdateCoordinator[Schedule]): return await self._controller.get_schedule() except RainbirdApiException as err: raise UpdateFailed(f"Error communicating with Device: {err}") from err - - -@dataclass -class RainbirdData: - """Holder for shared integration data. - - The coordinators are lazy since they may only be used by some platforms when needed. - """ - - hass: HomeAssistant - entry: ConfigEntry - controller: AsyncRainbirdController - model_info: ModelAndVersion - - @cached_property - def coordinator(self) -> RainbirdUpdateCoordinator: - """Return RainbirdUpdateCoordinator.""" - return RainbirdUpdateCoordinator( - self.hass, - name=self.entry.title, - controller=self.controller, - unique_id=self.entry.unique_id, - model_info=self.model_info, - ) - - @cached_property - def schedule_coordinator(self) -> RainbirdScheduleUpdateCoordinator: - """Return RainbirdScheduleUpdateCoordinator.""" - return RainbirdScheduleUpdateCoordinator( - self.hass, - name=f"{self.entry.title} Schedule", - controller=self.controller, - ) diff --git a/homeassistant/components/rainbird/icons.json b/homeassistant/components/rainbird/icons.json index 79d2256f184..61c09f74e88 100644 --- a/homeassistant/components/rainbird/icons.json +++ b/homeassistant/components/rainbird/icons.json @@ -22,7 +22,11 @@ } }, "services": { - "start_irrigation": "mdi:water", - "set_rain_delay": "mdi:water-sync" + "start_irrigation": { + "service": "mdi:water" + }, + "set_rain_delay": { + "service": "mdi:water-sync" + } } } diff --git a/homeassistant/components/rainbird/number.py b/homeassistant/components/rainbird/number.py index 507a31e59a4..d8081a796b9 100644 --- a/homeassistant/components/rainbird/number.py +++ b/homeassistant/components/rainbird/number.py @@ -7,29 +7,28 @@ import logging from pyrainbird.exceptions import RainbirdApiException, RainbirdDeviceBusyException from homeassistant.components.number import NumberEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird number platform.""" async_add_entities( [ RainDelayNumber( - hass.data[DOMAIN][config_entry.entry_id].coordinator, + config_entry.runtime_data.coordinator, ) ] ) diff --git a/homeassistant/components/rainbird/quality_scale.yaml b/homeassistant/components/rainbird/quality_scale.yaml new file mode 100644 index 00000000000..8b4805a9b0e --- /dev/null +++ b/homeassistant/components/rainbird/quality_scale.yaml @@ -0,0 +1,89 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: done + common-modules: done + has-entity-name: done + action-setup: + status: done + comment: | + The integration only has an entity service, registered in the platform. + appropriate-polling: + status: done + comment: | + Rainbird devices are local. Irrigation valve/controller status is polled + once per minute to get fast updates when turning on/off the valves. + The irrigation schedule uses a 15 minute poll interval since it rarely + changes. + + Rainbird devices can only accept a single http connection, so this uses a + an aiohttp.ClientSession with a connection limit, and also uses a request + debouncer. + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration is polling and does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: + status: todo + comment: | + The introduction can be improved and is missing pre-requisites such as + installing the app. + docs-removal-instructions: todo + test-before-setup: done + docs-high-level-description: done + config-flow-test-coverage: + status: todo + comment: | + All config flow tests should finish with CREATE_ENTRY and ABORT to + test they are able to recover from errors + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: done + config-entry-unloading: done + reauthentication-flow: done + action-exceptions: done + docs-installation-parameters: + status: todo + comment: The documentation does not mention installation parameters + integration-owner: done + parallel-updates: + status: todo + comment: The integration does not explicitly set a number of parallel updates. + test-coverage: done + docs-configuration-parameters: + status: todo + comment: The documentation for configuration parameters could be improved. + entity-unavailable: done + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/homeassistant/components/rainbird/sensor.py b/homeassistant/components/rainbird/sensor.py index 649d643a20c..4725a33bc9a 100644 --- a/homeassistant/components/rainbird/sensor.py +++ b/homeassistant/components/rainbird/sensor.py @@ -5,14 +5,13 @@ from __future__ import annotations import logging from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -25,14 +24,14 @@ RAIN_DELAY_ENTITY_DESCRIPTION = SensorEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird sensor.""" async_add_entities( [ RainBirdSensor( - hass.data[DOMAIN][config_entry.entry_id].coordinator, + config_entry.runtime_data.coordinator, RAIN_DELAY_ENTITY_DESCRIPTION, ) ] diff --git a/homeassistant/components/rainbird/strings.json b/homeassistant/components/rainbird/strings.json index ea0d64f6208..6f92b1bdb97 100644 --- a/homeassistant/components/rainbird/strings.json +++ b/homeassistant/components/rainbird/strings.json @@ -9,16 +9,29 @@ "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "host": "The hostname or IP address of your Rain Bird device." + "host": "The hostname or IP address of your Rain Bird device.", + "password": "The password used to authenticate with the Rain Bird device." + } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Rain Bird integration needs to re-authenticate with the device.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "The password to authenticate with your Rain Bird device." } } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]" + "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, "options": { @@ -27,6 +40,9 @@ "title": "[%key:component::rainbird::config::step::user::title%]", "data": { "duration": "Default irrigation time in minutes" + }, + "data_description": { + "duration": "The default duration the sprinkler will run when turned on." } } } diff --git a/homeassistant/components/rainbird/switch.py b/homeassistant/components/rainbird/switch.py index 62a2a7c4a32..f622a1b9b2c 100644 --- a/homeassistant/components/rainbird/switch.py +++ b/homeassistant/components/rainbird/switch.py @@ -8,7 +8,6 @@ from pyrainbird.exceptions import RainbirdApiException, RainbirdDeviceBusyExcept import voluptuous as vol from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_platform @@ -19,6 +18,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ATTR_DURATION, CONF_IMPORTED_NAMES, DOMAIN, MANUFACTURER from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -31,11 +31,11 @@ SERVICE_SCHEMA_IRRIGATION: VolDictType = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird irrigation switches.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id].coordinator + coordinator = config_entry.runtime_data.coordinator async_add_entities( RainBirdSwitch( coordinator, diff --git a/homeassistant/components/rainbird/types.py b/homeassistant/components/rainbird/types.py new file mode 100644 index 00000000000..cc43353ac17 --- /dev/null +++ b/homeassistant/components/rainbird/types.py @@ -0,0 +1,33 @@ +"""Types for Rain Bird integration.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from pyrainbird.async_client import AsyncRainbirdController +from pyrainbird.data import ModelAndVersion + +from homeassistant.config_entries import ConfigEntry + +if TYPE_CHECKING: + from .coordinator import ( + RainbirdScheduleUpdateCoordinator, + RainbirdUpdateCoordinator, + ) + + +@dataclass +class RainbirdData: + """Holder for shared integration data. + + The coordinators are lazy since they may only be used by some platforms when needed. + """ + + controller: AsyncRainbirdController + model_info: ModelAndVersion + coordinator: RainbirdUpdateCoordinator + schedule_coordinator: RainbirdScheduleUpdateCoordinator + + +type RainbirdConfigEntry = ConfigEntry[RainbirdData] diff --git a/homeassistant/components/raincloud/__init__.py b/homeassistant/components/raincloud/__init__.py index a805024357c..f1eef40f307 100644 --- a/homeassistant/components/raincloud/__init__.py +++ b/homeassistant/components/raincloud/__init__.py @@ -8,76 +8,24 @@ from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from homeassistant.components import persistent_notification -from homeassistant.const import ( - CONF_PASSWORD, - CONF_SCAN_INTERVAL, - CONF_USERNAME, - PERCENTAGE, - UnitOfTime, -) +from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.event import track_time_interval from homeassistant.helpers.typing import ConfigType +from .const import DATA_RAINCLOUD, SIGNAL_UPDATE_RAINCLOUD + _LOGGER = logging.getLogger(__name__) -ALLOWED_WATERING_TIME = [5, 10, 15, 30, 45, 60] - -CONF_WATERING_TIME = "watering_minutes" - NOTIFICATION_ID = "raincloud_notification" NOTIFICATION_TITLE = "Rain Cloud Setup" -DATA_RAINCLOUD = "raincloud" DOMAIN = "raincloud" -DEFAULT_WATERING_TIME = 15 - -KEY_MAP = { - "auto_watering": "Automatic Watering", - "battery": "Battery", - "is_watering": "Watering", - "manual_watering": "Manual Watering", - "next_cycle": "Next Cycle", - "rain_delay": "Rain Delay", - "status": "Status", - "watering_time": "Remaining Watering Time", -} - -ICON_MAP = { - "auto_watering": "mdi:autorenew", - "battery": "", - "is_watering": "", - "manual_watering": "mdi:water-pump", - "next_cycle": "mdi:calendar-clock", - "rain_delay": "mdi:weather-rainy", - "status": "", - "watering_time": "mdi:water-pump", -} - -UNIT_OF_MEASUREMENT_MAP = { - "auto_watering": "", - "battery": PERCENTAGE, - "is_watering": "", - "manual_watering": "", - "next_cycle": "", - "rain_delay": UnitOfTime.DAYS, - "status": "", - "watering_time": UnitOfTime.MINUTES, -} - -BINARY_SENSORS = ["is_watering", "status"] - -SENSORS = ["battery", "next_cycle", "rain_delay", "watering_time"] - -SWITCHES = ["auto_watering", "manual_watering"] SCAN_INTERVAL = timedelta(seconds=20) -SIGNAL_UPDATE_RAINCLOUD = "raincloud_update" - CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -132,43 +80,3 @@ class RainCloudHub: def __init__(self, data): """Initialize the entity.""" self.data = data - - -class RainCloudEntity(Entity): - """Entity class for RainCloud devices.""" - - _attr_attribution = "Data provided by Melnor Aquatimer.com" - - def __init__(self, data, sensor_type): - """Initialize the RainCloud entity.""" - self.data = data - self._sensor_type = sensor_type - self._name = f"{self.data.name} {KEY_MAP.get(self._sensor_type)}" - self._state = None - - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - async def async_added_to_hass(self): - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, SIGNAL_UPDATE_RAINCLOUD, self._update_callback - ) - ) - - def _update_callback(self): - """Call update method.""" - self.schedule_update_ha_state(True) - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - return {"identifier": self.data.serial} - - @property - def icon(self): - """Return the icon to use in the frontend, if any.""" - return ICON_MAP.get(self._sensor_type) diff --git a/homeassistant/components/raincloud/binary_sensor.py b/homeassistant/components/raincloud/binary_sensor.py index 90ad36985ef..2696c192ed6 100644 --- a/homeassistant/components/raincloud/binary_sensor.py +++ b/homeassistant/components/raincloud/binary_sensor.py @@ -16,10 +16,13 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import BINARY_SENSORS, DATA_RAINCLOUD, ICON_MAP, RainCloudEntity +from .const import DATA_RAINCLOUD, ICON_MAP +from .entity import RainCloudEntity _LOGGER = logging.getLogger(__name__) +BINARY_SENSORS = ["is_watering", "status"] + PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_MONITORED_CONDITIONS, default=list(BINARY_SENSORS)): vol.All( diff --git a/homeassistant/components/raincloud/const.py b/homeassistant/components/raincloud/const.py new file mode 100644 index 00000000000..957830ffcc5 --- /dev/null +++ b/homeassistant/components/raincloud/const.py @@ -0,0 +1,17 @@ +"""Support for Melnor RainCloud sprinkler water timer.""" + +DATA_RAINCLOUD = "raincloud" + +ICON_MAP = { + "auto_watering": "mdi:autorenew", + "battery": "", + "is_watering": "", + "manual_watering": "mdi:water-pump", + "next_cycle": "mdi:calendar-clock", + "rain_delay": "mdi:weather-rainy", + "status": "", + "watering_time": "mdi:water-pump", +} + + +SIGNAL_UPDATE_RAINCLOUD = "raincloud_update" diff --git a/homeassistant/components/raincloud/entity.py b/homeassistant/components/raincloud/entity.py new file mode 100644 index 00000000000..337324d96eb --- /dev/null +++ b/homeassistant/components/raincloud/entity.py @@ -0,0 +1,68 @@ +"""Support for Melnor RainCloud sprinkler water timer.""" + +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import SIGNAL_UPDATE_RAINCLOUD + +KEY_MAP = { + "auto_watering": "Automatic Watering", + "battery": "Battery", + "is_watering": "Watering", + "manual_watering": "Manual Watering", + "next_cycle": "Next Cycle", + "rain_delay": "Rain Delay", + "status": "Status", + "watering_time": "Remaining Watering Time", +} + +ICON_MAP = { + "auto_watering": "mdi:autorenew", + "battery": "", + "is_watering": "", + "manual_watering": "mdi:water-pump", + "next_cycle": "mdi:calendar-clock", + "rain_delay": "mdi:weather-rainy", + "status": "", + "watering_time": "mdi:water-pump", +} + + +class RainCloudEntity(Entity): + """Entity class for RainCloud devices.""" + + _attr_attribution = "Data provided by Melnor Aquatimer.com" + + def __init__(self, data, sensor_type): + """Initialize the RainCloud entity.""" + self.data = data + self._sensor_type = sensor_type + self._name = f"{self.data.name} {KEY_MAP.get(self._sensor_type)}" + self._state = None + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + async def async_added_to_hass(self): + """Register callbacks.""" + self.async_on_remove( + async_dispatcher_connect( + self.hass, SIGNAL_UPDATE_RAINCLOUD, self._update_callback + ) + ) + + def _update_callback(self): + """Call update method.""" + self.schedule_update_ha_state(True) + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + return {"identifier": self.data.serial} + + @property + def icon(self): + """Return the icon to use in the frontend, if any.""" + return ICON_MAP.get(self._sensor_type) diff --git a/homeassistant/components/raincloud/manifest.json b/homeassistant/components/raincloud/manifest.json index 70f62d2beee..b5179622441 100644 --- a/homeassistant/components/raincloud/manifest.json +++ b/homeassistant/components/raincloud/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/raincloud", "iot_class": "cloud_polling", "loggers": ["raincloudy"], + "quality_scale": "legacy", "requirements": ["raincloudy==0.0.7"] } diff --git a/homeassistant/components/raincloud/sensor.py b/homeassistant/components/raincloud/sensor.py index 34a7cf73490..1f9d8d7b2c5 100644 --- a/homeassistant/components/raincloud/sensor.py +++ b/homeassistant/components/raincloud/sensor.py @@ -10,23 +10,20 @@ from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorEntity, ) -from homeassistant.const import CONF_MONITORED_CONDITIONS +from homeassistant.const import CONF_MONITORED_CONDITIONS, PERCENTAGE, UnitOfTime from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.icon import icon_for_battery_level from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - DATA_RAINCLOUD, - ICON_MAP, - SENSORS, - UNIT_OF_MEASUREMENT_MAP, - RainCloudEntity, -) +from .const import DATA_RAINCLOUD, ICON_MAP +from .entity import RainCloudEntity _LOGGER = logging.getLogger(__name__) +SENSORS = ["battery", "next_cycle", "rain_delay", "watering_time"] + PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)): vol.All( @@ -35,6 +32,17 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( } ) +UNIT_OF_MEASUREMENT_MAP = { + "auto_watering": "", + "battery": PERCENTAGE, + "is_watering": "", + "manual_watering": "", + "next_cycle": "", + "rain_delay": UnitOfTime.DAYS, + "status": "", + "watering_time": UnitOfTime.MINUTES, +} + def setup_platform( hass: HomeAssistant, diff --git a/homeassistant/components/raincloud/switch.py b/homeassistant/components/raincloud/switch.py index 45d0b4f0fc5..59a11a6b167 100644 --- a/homeassistant/components/raincloud/switch.py +++ b/homeassistant/components/raincloud/switch.py @@ -17,17 +17,17 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - ALLOWED_WATERING_TIME, - CONF_WATERING_TIME, - DATA_RAINCLOUD, - DEFAULT_WATERING_TIME, - SWITCHES, - RainCloudEntity, -) +from .const import DATA_RAINCLOUD +from .entity import RainCloudEntity _LOGGER = logging.getLogger(__name__) +ALLOWED_WATERING_TIME = [5, 10, 15, 30, 45, 60] +CONF_WATERING_TIME = "watering_minutes" +DEFAULT_WATERING_TIME = 15 + +SWITCHES = ["auto_watering", "manual_watering"] + PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SWITCHES)): vol.All( diff --git a/homeassistant/components/rainforest_raven/__init__.py b/homeassistant/components/rainforest_raven/__init__.py index 76f82624160..b68d995262a 100644 --- a/homeassistant/components/rainforest_raven/__init__.py +++ b/homeassistant/components/rainforest_raven/__init__.py @@ -2,29 +2,23 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import RAVEnDataCoordinator +from .coordinator import RAVEnConfigEntry, RAVEnDataCoordinator PLATFORMS = (Platform.SENSOR,) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: RAVEnConfigEntry) -> bool: """Set up Rainforest RAVEn device from a config entry.""" coordinator = RAVEnDataCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RAVEnConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/rainforest_raven/coordinator.py b/homeassistant/components/rainforest_raven/coordinator.py index d08a10c2670..31df922a168 100644 --- a/homeassistant/components/rainforest_raven/coordinator.py +++ b/homeassistant/components/rainforest_raven/coordinator.py @@ -20,6 +20,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN +type RAVEnConfigEntry = ConfigEntry[RAVEnDataCoordinator] + _LOGGER = logging.getLogger(__name__) @@ -67,32 +69,18 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): _raven_device: RAVEnSerialDevice | None = None _device_info: RAVEnDeviceInfo | None = None + config_entry: RAVEnConfigEntry - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: RAVEnConfigEntry) -> None: """Initialize the data object.""" - self.entry = entry - super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=timedelta(seconds=30), ) - @property - def device_fw_version(self) -> str | None: - """Return the firmware version of the device.""" - if self._device_info: - return self._device_info.fw_version - return None - - @property - def device_hw_version(self) -> str | None: - """Return the hardware version of the device.""" - if self._device_info: - return self._device_info.hw_version - return None - @property def device_mac_address(self) -> str | None: """Return the MAC address of the device.""" @@ -100,36 +88,20 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): return self._device_info.device_mac_id.hex() return None - @property - def device_manufacturer(self) -> str | None: - """Return the manufacturer of the device.""" - if self._device_info: - return self._device_info.manufacturer - return None - - @property - def device_model(self) -> str | None: - """Return the model of the device.""" - if self._device_info: - return self._device_info.model_id - return None - - @property - def device_name(self) -> str: - """Return the product name of the device.""" - return "RAVEn Device" - @property def device_info(self) -> DeviceInfo | None: """Return device info.""" - if self._device_info and self.device_mac_address: + if (device_info := self._device_info) and ( + mac_address := self.device_mac_address + ): return DeviceInfo( - identifiers={(DOMAIN, self.device_mac_address)}, - manufacturer=self.device_manufacturer, - model=self.device_model, - name=self.device_name, - sw_version=self.device_fw_version, - hw_version=self.device_hw_version, + identifiers={(DOMAIN, mac_address)}, + manufacturer=device_info.manufacturer, + model=device_info.model_id, + model_id=device_info.model_id, + name="RAVEn Device", + sw_version=device_info.fw_version, + hw_version=device_info.hw_version, ) return None @@ -142,7 +114,7 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): try: device = await self._get_device() async with asyncio.timeout(5): - return await _get_all_data(device, self.entry.data[CONF_MAC]) + return await _get_all_data(device, self.config_entry.data[CONF_MAC]) except RAVEnConnectionError as err: await self._cleanup_device() raise UpdateFailed(f"RAVEnConnectionError: {err}") from err @@ -159,7 +131,7 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): if self._raven_device is not None: return self._raven_device - device = RAVEnSerialDevice(self.entry.data[CONF_DEVICE]) + device = RAVEnSerialDevice(self.config_entry.data[CONF_DEVICE]) try: async with asyncio.timeout(5): diff --git a/homeassistant/components/rainforest_raven/diagnostics.py b/homeassistant/components/rainforest_raven/diagnostics.py index 820c4826f00..6c06b0d65cc 100644 --- a/homeassistant/components/rainforest_raven/diagnostics.py +++ b/homeassistant/components/rainforest_raven/diagnostics.py @@ -6,12 +6,10 @@ from collections.abc import Mapping from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC from homeassistant.core import HomeAssistant, callback -from .const import DOMAIN -from .coordinator import RAVEnDataCoordinator +from .coordinator import RAVEnConfigEntry TO_REDACT_CONFIG = {CONF_MAC} TO_REDACT_DATA = {"device_mac_id", "meter_mac_id"} @@ -31,14 +29,13 @@ def async_redact_meter_macs(data: dict) -> dict: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: RAVEnConfigEntry ) -> Mapping[str, Any]: """Return diagnostics for a config entry.""" - coordinator: RAVEnDataCoordinator = hass.data[DOMAIN][config_entry.entry_id] return { "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT_CONFIG), "data": async_redact_meter_macs( - async_redact_data(coordinator.data, TO_REDACT_DATA) + async_redact_data(config_entry.runtime_data.data, TO_REDACT_DATA) ), } diff --git a/homeassistant/components/rainforest_raven/sensor.py b/homeassistant/components/rainforest_raven/sensor.py index 23ca3220694..1025e92ef86 100644 --- a/homeassistant/components/rainforest_raven/sensor.py +++ b/homeassistant/components/rainforest_raven/sensor.py @@ -10,9 +10,7 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, SensorStateClass, - StateType, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_MAC, PERCENTAGE, @@ -22,10 +20,10 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import RAVEnDataCoordinator +from .coordinator import RAVEnConfigEntry, RAVEnDataCoordinator @dataclass(frozen=True, kw_only=True) @@ -80,10 +78,12 @@ DIAGNOSTICS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: RAVEnConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities: list[RAVEnSensor] = [ RAVEnSensor(coordinator, description) for description in DIAGNOSTICS ] diff --git a/homeassistant/components/rainmachine/__init__.py b/homeassistant/components/rainmachine/__init__.py index b10d562ac67..4d486c9c6aa 100644 --- a/homeassistant/components/rainmachine/__init__.py +++ b/homeassistant/components/rainmachine/__init__.py @@ -31,8 +31,7 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, ) -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity, UpdateFailed +from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.util.dt import as_timestamp, utcnow from homeassistant.util.network import is_ip_address @@ -54,7 +53,6 @@ from .const import ( LOGGER, ) from .coordinator import RainMachineDataUpdateCoordinator -from .model import RainMachineEntityDescription DEFAULT_SSL = True @@ -291,7 +289,7 @@ async def async_setup_entry( # noqa: C901 else: data = await controller.zones.all(details=True, include_inactive=True) except UnknownAPICallError: - LOGGER.info( + LOGGER.warning( "Skipping unsupported API call for controller %s: %s", controller.name, api_category, @@ -518,7 +516,7 @@ async def async_migrate_entry( await er.async_migrate_entries(hass, entry.entry_id, migrate_unique_id) - LOGGER.info("Migration to version %s successful", version) + LOGGER.debug("Migration to version %s successful", version) return True @@ -528,64 +526,3 @@ async def async_reload_entry( ) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -class RainMachineEntity(CoordinatorEntity[RainMachineDataUpdateCoordinator]): - """Define a generic RainMachine entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - entry: RainMachineConfigEntry, - data: RainMachineData, - description: RainMachineEntityDescription, - ) -> None: - """Initialize.""" - super().__init__(data.coordinators[description.api_category]) - - self._attr_extra_state_attributes = {} - self._attr_unique_id = f"{data.controller.mac}_{description.key}" - self._entry = entry - self._data = data - self._version_coordinator = data.coordinators[DATA_API_VERSIONS] - self.entity_description = description - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this controller.""" - return DeviceInfo( - identifiers={(DOMAIN, self._data.controller.mac)}, - configuration_url=( - f"https://{self._entry.data[CONF_IP_ADDRESS]}:" - f"{self._entry.data[CONF_PORT]}" - ), - connections={(dr.CONNECTION_NETWORK_MAC, self._data.controller.mac)}, - name=self._data.controller.name.capitalize(), - manufacturer="RainMachine", - model=( - f"Version {self._version_coordinator.data['hwVer']} " - f"(API: {self._version_coordinator.data['apiVer']})" - ), - sw_version=self._version_coordinator.data["swVer"], - ) - - @callback - def _handle_coordinator_update(self) -> None: - """Respond to a DataUpdateCoordinator update.""" - self.update_from_latest_data() - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """When entity is added to hass.""" - await super().async_added_to_hass() - self.async_on_remove( - self._version_coordinator.async_add_listener( - self._handle_coordinator_update, self.coordinator_context - ) - ) - self.update_from_latest_data() - - @callback - def update_from_latest_data(self) -> None: - """Update the state.""" diff --git a/homeassistant/components/rainmachine/binary_sensor.py b/homeassistant/components/rainmachine/binary_sensor.py index 574f458ec47..4ba9b58d596 100644 --- a/homeassistant/components/rainmachine/binary_sensor.py +++ b/homeassistant/components/rainmachine/binary_sensor.py @@ -11,9 +11,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RainMachineConfigEntry, RainMachineEntity +from . import RainMachineConfigEntry from .const import DATA_PROVISION_SETTINGS, DATA_RESTRICTIONS_CURRENT -from .model import RainMachineEntityDescription +from .entity import RainMachineEntity, RainMachineEntityDescription from .util import ( EntityDomainReplacementStrategy, async_finish_entity_domain_replacements, diff --git a/homeassistant/components/rainmachine/button.py b/homeassistant/components/rainmachine/button.py index 7087e5e5b8e..2f68c6a8a9c 100644 --- a/homeassistant/components/rainmachine/button.py +++ b/homeassistant/components/rainmachine/button.py @@ -19,9 +19,9 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RainMachineConfigEntry, RainMachineEntity +from . import RainMachineConfigEntry from .const import DATA_PROVISION_SETTINGS -from .model import RainMachineEntityDescription +from .entity import RainMachineEntity, RainMachineEntityDescription @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/rainmachine/config_flow.py b/homeassistant/components/rainmachine/config_flow.py index 5c07f04c163..0b40d506566 100644 --- a/homeassistant/components/rainmachine/config_flow.py +++ b/homeassistant/components/rainmachine/config_flow.py @@ -63,7 +63,7 @@ class RainMachineFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> RainMachineOptionsFlowHandler: """Define the config flow to handle options.""" - return RainMachineOptionsFlowHandler(config_entry) + return RainMachineOptionsFlowHandler() async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -168,10 +168,6 @@ class RainMachineFlowHandler(ConfigFlow, domain=DOMAIN): class RainMachineOptionsFlowHandler(OptionsFlow): """Handle a RainMachine options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rainmachine/entity.py b/homeassistant/components/rainmachine/entity.py new file mode 100644 index 00000000000..1289d3e808e --- /dev/null +++ b/homeassistant/components/rainmachine/entity.py @@ -0,0 +1,84 @@ +"""Support for RainMachine devices.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import RainMachineConfigEntry, RainMachineData +from .const import DATA_API_VERSIONS, DOMAIN +from .coordinator import RainMachineDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class RainMachineEntityDescription(EntityDescription): + """Describe a RainMachine entity.""" + + api_category: str + + +class RainMachineEntity(CoordinatorEntity[RainMachineDataUpdateCoordinator]): + """Define a generic RainMachine entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + entry: RainMachineConfigEntry, + data: RainMachineData, + description: RainMachineEntityDescription, + ) -> None: + """Initialize.""" + super().__init__(data.coordinators[description.api_category]) + + self._attr_extra_state_attributes = {} + self._attr_unique_id = f"{data.controller.mac}_{description.key}" + self._entry = entry + self._data = data + self._version_coordinator = data.coordinators[DATA_API_VERSIONS] + self.entity_description = description + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this controller.""" + return DeviceInfo( + identifiers={(DOMAIN, self._data.controller.mac)}, + configuration_url=( + f"https://{self._entry.data[CONF_IP_ADDRESS]}:" + f"{self._entry.data[CONF_PORT]}" + ), + connections={(dr.CONNECTION_NETWORK_MAC, self._data.controller.mac)}, + name=self._data.controller.name.capitalize(), + manufacturer="RainMachine", + model=( + f"Version {self._version_coordinator.data['hwVer']} " + f"(API: {self._version_coordinator.data['apiVer']})" + ), + sw_version=self._version_coordinator.data["swVer"], + ) + + @callback + def _handle_coordinator_update(self) -> None: + """Respond to a DataUpdateCoordinator update.""" + self.update_from_latest_data() + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self.async_on_remove( + self._version_coordinator.async_add_listener( + self._handle_coordinator_update, self.coordinator_context + ) + ) + self.update_from_latest_data() + + @callback + def update_from_latest_data(self) -> None: + """Update the state.""" diff --git a/homeassistant/components/rainmachine/icons.json b/homeassistant/components/rainmachine/icons.json index 32988081a18..ca85d81346e 100644 --- a/homeassistant/components/rainmachine/icons.json +++ b/homeassistant/components/rainmachine/icons.json @@ -70,16 +70,38 @@ } }, "services": { - "pause_watering": "mdi:pause", - "restrict_watering": "mdi:cancel", - "start_program": "mdi:play", - "start_zone": "mdi:play", - "stop_all": "mdi:stop", - "stop_program": "mdi:stop", - "stop_zone": "mdi:stop", - "unpause_watering": "mdi:play-pause", - "push_flow_meter_data": "mdi:database-arrow-up", - "push_weather_data": "mdi:database-arrow-up", - "unrestrict_watering": "mdi:check" + "pause_watering": { + "service": "mdi:pause" + }, + "restrict_watering": { + "service": "mdi:cancel" + }, + "start_program": { + "service": "mdi:play" + }, + "start_zone": { + "service": "mdi:play" + }, + "stop_all": { + "service": "mdi:stop" + }, + "stop_program": { + "service": "mdi:stop" + }, + "stop_zone": { + "service": "mdi:stop" + }, + "unpause_watering": { + "service": "mdi:play-pause" + }, + "push_flow_meter_data": { + "service": "mdi:database-arrow-up" + }, + "push_weather_data": { + "service": "mdi:database-arrow-up" + }, + "unrestrict_watering": { + "service": "mdi:check" + } } } diff --git a/homeassistant/components/rainmachine/model.py b/homeassistant/components/rainmachine/model.py deleted file mode 100644 index ee5567112cf..00000000000 --- a/homeassistant/components/rainmachine/model.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Define RainMachine data models.""" - -from dataclasses import dataclass - -from homeassistant.helpers.entity import EntityDescription - - -@dataclass(frozen=True, kw_only=True) -class RainMachineEntityDescription(EntityDescription): - """Describe a RainMachine entity.""" - - api_category: str diff --git a/homeassistant/components/rainmachine/select.py b/homeassistant/components/rainmachine/select.py index 73de33cc8ed..1d9225a5bb2 100644 --- a/homeassistant/components/rainmachine/select.py +++ b/homeassistant/components/rainmachine/select.py @@ -14,9 +14,9 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM, UnitSystem -from . import RainMachineConfigEntry, RainMachineData, RainMachineEntity +from . import RainMachineConfigEntry, RainMachineData from .const import DATA_RESTRICTIONS_UNIVERSAL -from .model import RainMachineEntityDescription +from .entity import RainMachineEntity, RainMachineEntityDescription from .util import key_exists diff --git a/homeassistant/components/rainmachine/sensor.py b/homeassistant/components/rainmachine/sensor.py index 5363000a8ac..64f9ecf3990 100644 --- a/homeassistant/components/rainmachine/sensor.py +++ b/homeassistant/components/rainmachine/sensor.py @@ -20,9 +20,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utc_from_timestamp, utcnow -from . import RainMachineConfigEntry, RainMachineData, RainMachineEntity +from . import RainMachineConfigEntry, RainMachineData from .const import DATA_PROGRAMS, DATA_PROVISION_SETTINGS, DATA_ZONES -from .model import RainMachineEntityDescription +from .entity import RainMachineEntity, RainMachineEntityDescription from .util import ( RUN_STATE_MAP, EntityDomainReplacementStrategy, diff --git a/homeassistant/components/rainmachine/switch.py b/homeassistant/components/rainmachine/switch.py index 8368db47d61..2a065f18976 100644 --- a/homeassistant/components/rainmachine/switch.py +++ b/homeassistant/components/rainmachine/switch.py @@ -20,12 +20,7 @@ from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType -from . import ( - RainMachineConfigEntry, - RainMachineData, - RainMachineEntity, - async_update_programs_and_zones, -) +from . import RainMachineConfigEntry, RainMachineData, async_update_programs_and_zones from .const import ( CONF_ALLOW_INACTIVE_ZONES_TO_RUN, CONF_DEFAULT_ZONE_RUN_TIME, @@ -37,7 +32,7 @@ from .const import ( DATA_ZONES, DEFAULT_ZONE_RUN, ) -from .model import RainMachineEntityDescription +from .entity import RainMachineEntity, RainMachineEntityDescription from .util import RUN_STATE_MAP, key_exists ATTR_ACTIVITY_TYPE = "activity_type" diff --git a/homeassistant/components/rainmachine/update.py b/homeassistant/components/rainmachine/update.py index a7c11061718..39156b05cd4 100644 --- a/homeassistant/components/rainmachine/update.py +++ b/homeassistant/components/rainmachine/update.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import dataclass from enum import Enum from typing import Any @@ -10,15 +11,16 @@ from regenmaschine.errors import RequestError from homeassistant.components.update import ( UpdateDeviceClass, UpdateEntity, + UpdateEntityDescription, UpdateEntityFeature, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RainMachineConfigEntry, RainMachineEntity +from . import RainMachineConfigEntry from .const import DATA_MACHINE_FIRMWARE_UPDATE_STATUS -from .model import RainMachineEntityDescription +from .entity import RainMachineEntity, RainMachineEntityDescription class UpdateStates(Enum): @@ -42,7 +44,14 @@ UPDATE_STATE_MAP = { } -UPDATE_DESCRIPTION = RainMachineEntityDescription( +@dataclass(frozen=True, kw_only=True) +class RainMachineUpdateEntityDescription( + UpdateEntityDescription, RainMachineEntityDescription +): + """Describe a RainMachine update.""" + + +UPDATE_DESCRIPTION = RainMachineUpdateEntityDescription( key="update", api_category=DATA_MACHINE_FIRMWARE_UPDATE_STATUS, ) diff --git a/homeassistant/components/rainmachine/util.py b/homeassistant/components/rainmachine/util.py index f3823d21164..c784c3c471f 100644 --- a/homeassistant/components/rainmachine/util.py +++ b/homeassistant/components/rainmachine/util.py @@ -63,7 +63,7 @@ def async_finish_entity_domain_replacements( old_entity_id = registry_entry.entity_id if strategy.remove_old_entity: - LOGGER.info('Removing old entity: "%s"', old_entity_id) + LOGGER.debug('Removing old entity: "%s"', old_entity_id) ent_reg.async_remove(old_entity_id) diff --git a/homeassistant/components/random/binary_sensor.py b/homeassistant/components/random/binary_sensor.py index 9d33ad52692..ae9a5886d59 100644 --- a/homeassistant/components/random/binary_sensor.py +++ b/homeassistant/components/random/binary_sensor.py @@ -59,10 +59,9 @@ class RandomBinarySensor(BinarySensorEntity): def __init__(self, config: Mapping[str, Any], entry_id: str | None = None) -> None: """Initialize the Random binary sensor.""" - self._attr_name = config.get(CONF_NAME) + self._attr_name = config[CONF_NAME] self._attr_device_class = config.get(CONF_DEVICE_CLASS) - if entry_id: - self._attr_unique_id = entry_id + self._attr_unique_id = entry_id async def async_update(self) -> None: """Get new state and update the sensor's state.""" diff --git a/homeassistant/components/random/config_flow.py b/homeassistant/components/random/config_flow.py index fcbd77916a9..35b7757580e 100644 --- a/homeassistant/components/random/config_flow.py +++ b/homeassistant/components/random/config_flow.py @@ -95,7 +95,7 @@ def _generate_schema(domain: str, flow_type: _FlowType) -> vol.Schema: async def choose_options_step(options: dict[str, Any]) -> str: - """Return next step_id for options flow according to template_type.""" + """Return next step_id for options flow according to entity_type.""" return cast(str, options["entity_type"]) @@ -106,8 +106,12 @@ def _validate_unit(options: dict[str, Any]) -> None: and (units := DEVICE_CLASS_UNITS.get(device_class)) and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units ): + # Sort twice to make sure strings with same case-insensitive order of + # letters are sorted consistently still (sorted() is guaranteed stable). sorted_units = sorted( - [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + sorted( + [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + ), key=str.casefold, ) if len(sorted_units) == 1: @@ -122,7 +126,7 @@ def _validate_unit(options: dict[str, Any]) -> None: def validate_user_input( - template_type: str, + entity_type: str, ) -> Callable[ [SchemaCommonFlowHandler, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]], @@ -136,10 +140,10 @@ def validate_user_input( _: SchemaCommonFlowHandler, user_input: dict[str, Any], ) -> dict[str, Any]: - """Add template type to user input.""" - if template_type == Platform.SENSOR: + """Add entity type to user input.""" + if entity_type == Platform.SENSOR: _validate_unit(user_input) - return {"entity_type": template_type} | user_input + return {"entity_type": entity_type} | user_input return _validate_user_input diff --git a/homeassistant/components/random/sensor.py b/homeassistant/components/random/sensor.py index 3c6e67c9918..aad4fcb851c 100644 --- a/homeassistant/components/random/sensor.py +++ b/homeassistant/components/random/sensor.py @@ -70,22 +70,22 @@ class RandomSensor(SensorEntity): """Representation of a Random number sensor.""" _attr_translation_key = "random" + _unrecorded_attributes = frozenset({ATTR_MAXIMUM, ATTR_MINIMUM}) def __init__(self, config: Mapping[str, Any], entry_id: str | None = None) -> None: """Initialize the Random sensor.""" - self._attr_name = config.get(CONF_NAME) - self._minimum = config.get(CONF_MINIMUM, DEFAULT_MIN) - self._maximum = config.get(CONF_MAXIMUM, DEFAULT_MAX) + self._attr_name = config[CONF_NAME] + self._minimum = config[CONF_MINIMUM] + self._maximum = config[CONF_MAXIMUM] self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._attr_extra_state_attributes = { ATTR_MAXIMUM: self._maximum, ATTR_MINIMUM: self._minimum, } - if entry_id: - self._attr_unique_id = entry_id + self._attr_unique_id = entry_id async def async_update(self) -> None: - """Get a new number and updates the states.""" + """Get a new number and update the state.""" self._attr_native_value = randrange(self._minimum, self._maximum + 1) diff --git a/homeassistant/components/random/strings.json b/homeassistant/components/random/strings.json index 98072a21fe1..e5c5543e39f 100644 --- a/homeassistant/components/random/strings.json +++ b/homeassistant/components/random/strings.json @@ -1,4 +1,5 @@ { + "title": "Random", "config": { "step": { "binary_sensor": { @@ -19,12 +20,12 @@ "title": "Random sensor" }, "user": { - "description": "This helper allows you to create a helper that emits a random value.", + "description": "This helper allows you to create an entity that emits a random value.", "menu_options": { "binary_sensor": "Random binary sensor", "sensor": "Random sensor" }, - "title": "Random helper" + "title": "Create Random helper" } } }, diff --git a/homeassistant/components/raspberry_pi/__init__.py b/homeassistant/components/raspberry_pi/__init__.py index d1dcd04922f..8095eb9dfe0 100644 --- a/homeassistant/components/raspberry_pi/__init__.py +++ b/homeassistant/components/raspberry_pi/__init__.py @@ -2,10 +2,11 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.hassio import is_hassio async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/raspberry_pi/manifest.json b/homeassistant/components/raspberry_pi/manifest.json index 5ed68154ce1..c8317f7ef1e 100644 --- a/homeassistant/components/raspberry_pi/manifest.json +++ b/homeassistant/components/raspberry_pi/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware"], "documentation": "https://www.home-assistant.io/integrations/raspberry_pi", - "integration_type": "hardware" + "integration_type": "hardware", + "quality_scale": "legacy" } diff --git a/homeassistant/components/raspyrfm/manifest.json b/homeassistant/components/raspyrfm/manifest.json index 0fa4ce77200..d001e2b1118 100644 --- a/homeassistant/components/raspyrfm/manifest.json +++ b/homeassistant/components/raspyrfm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/raspyrfm", "iot_class": "assumed_state", "loggers": ["raspyrfm_client"], + "quality_scale": "legacy", "requirements": ["raspyrfm-client==1.2.8"] } diff --git a/homeassistant/components/rdw/__init__.py b/homeassistant/components/rdw/__init__.py index f123db7c697..6051576026b 100644 --- a/homeassistant/components/rdw/__init__.py +++ b/homeassistant/components/rdw/__init__.py @@ -23,6 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator[Vehicle] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_APK", update_interval=SCAN_INTERVAL, update_method=rdw.vehicle, diff --git a/homeassistant/components/rdw/manifest.json b/homeassistant/components/rdw/manifest.json index 7af3e861347..2ab90e55ef0 100644 --- a/homeassistant/components/rdw/manifest.json +++ b/homeassistant/components/rdw/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rdw", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["vehicle==2.2.2"] } diff --git a/homeassistant/components/recollect_waste/__init__.py b/homeassistant/components/recollect_waste/__init__.py index bd01aed5473..1710fb8c816 100644 --- a/homeassistant/components/recollect_waste/__init__.py +++ b/homeassistant/components/recollect_waste/__init__.py @@ -52,6 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=( f"Place {entry.data[CONF_PLACE_ID]}, Service {entry.data[CONF_SERVICE_ID]}" ), @@ -109,6 +110,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await er.async_migrate_entries(hass, entry.entry_id, migrate_unique_id) - LOGGER.info("Migration to version %s successful", version) + LOGGER.debug("Migration to version %s successful", version) return True diff --git a/homeassistant/components/recollect_waste/config_flow.py b/homeassistant/components/recollect_waste/config_flow.py index 882eb6a00d2..299af2609e3 100644 --- a/homeassistant/components/recollect_waste/config_flow.py +++ b/homeassistant/components/recollect_waste/config_flow.py @@ -34,9 +34,9 @@ class RecollectWasteConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> RecollectWasteOptionsFlowHandler: """Define the config flow to handle options.""" - return RecollectWasteOptionsFlowHandler(config_entry) + return RecollectWasteOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,10 +79,6 @@ class RecollectWasteConfigFlow(ConfigFlow, domain=DOMAIN): class RecollectWasteOptionsFlowHandler(OptionsFlow): """Handle a Recollect Waste options flow.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize.""" - self._entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -96,7 +92,7 @@ class RecollectWasteOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_FRIENDLY_NAME, - default=self._entry.options.get(CONF_FRIENDLY_NAME), + default=self.config_entry.options.get(CONF_FRIENDLY_NAME), ): bool } ), diff --git a/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py b/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py index 06a5c5258f1..b73744ef0d1 100644 --- a/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py +++ b/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py @@ -247,12 +247,11 @@ def delete_statistics_meta_duplicates(instance: Recorder, session: Session) -> N """Identify and delete duplicated statistics_meta. This is used when migrating from schema version 28 to schema version 29. + Note: If this needs to be called during live schema migration it needs to + be modified to reload the statistics_meta_manager. """ deleted_statistics_rows = _delete_statistics_meta_duplicates(session) if deleted_statistics_rows: - statistics_meta_manager = instance.statistics_meta_manager - statistics_meta_manager.reset() - statistics_meta_manager.load(session) _LOGGER.info( "Deleted %s duplicated statistics_meta rows", deleted_statistics_rows ) diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index 066ae938971..409641e54c9 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -54,7 +54,6 @@ ATTR_APPLY_FILTER = "apply_filter" KEEPALIVE_TIME = 30 -STATISTICS_ROWS_SCHEMA_VERSION = 23 CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36 EVENT_TYPE_IDS_SCHEMA_VERSION = 37 STATES_META_SCHEMA_VERSION = 38 @@ -63,13 +62,15 @@ LAST_REPORTED_SCHEMA_VERSION = 43 LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28 INTEGRATION_PLATFORM_COMPILE_STATISTICS = "compile_statistics" -INTEGRATION_PLATFORM_VALIDATE_STATISTICS = "validate_statistics" INTEGRATION_PLATFORM_LIST_STATISTIC_IDS = "list_statistic_ids" +INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES = "update_statistics_issues" +INTEGRATION_PLATFORM_VALIDATE_STATISTICS = "validate_statistics" INTEGRATION_PLATFORM_METHODS = { INTEGRATION_PLATFORM_COMPILE_STATISTICS, - INTEGRATION_PLATFORM_VALIDATE_STATISTICS, INTEGRATION_PLATFORM_LIST_STATISTIC_IDS, + INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES, + INTEGRATION_PLATFORM_VALIDATE_STATISTICS, } diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index c57274317e3..76cf0a7c05e 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -7,7 +7,6 @@ from collections.abc import Callable, Iterable from concurrent.futures import CancelledError import contextlib from datetime import datetime, timedelta -from functools import cached_property import logging import queue import sqlite3 @@ -15,6 +14,7 @@ import threading import time from typing import TYPE_CHECKING, Any, cast +from propcache import cached_property import psutil_home_assistant as ha_psutil from sqlalchemy import create_engine, event as sqlalchemy_event, exc, select, update from sqlalchemy.engine import Engine @@ -63,7 +63,6 @@ from .const import ( MYSQLDB_URL_PREFIX, SQLITE_MAX_BIND_VARS, SQLITE_URL_PREFIX, - STATISTICS_ROWS_SCHEMA_VERSION, SupportedDialect, ) from .db_schema import ( @@ -79,16 +78,8 @@ from .db_schema import ( StatisticsShortTerm, ) from .executor import DBInterruptibleThreadPoolExecutor -from .migration import ( - EntityIDMigration, - EventIDPostMigration, - EventsContextIDMigration, - EventTypeIDMigration, - StatesContextIDMigration, -) from .models import DatabaseEngine, StatisticData, StatisticMetaData, UnsupportedDialect from .pool import POOL_SIZE, MutexPool, RecorderPool -from .queries import get_migration_changes from .table_managers.event_data import EventDataManager from .table_managers.event_types import EventTypeManager from .table_managers.recorder_runs import RecorderRunsManager @@ -121,7 +112,6 @@ from .util import ( build_mysqldb_conv, dburl_to_path, end_incomplete_runs, - execute_stmt_lambda_element, is_second_sunday, move_away_broken_database, session_scope, @@ -225,7 +215,6 @@ class Recorder(threading.Thread): self.event_session: Session | None = None self._get_session: Callable[[], Session] | None = None self._completed_first_database_setup: bool | None = None - self.async_migration_event = asyncio.Event() self.migration_in_progress = False self.migration_is_live = False self.use_legacy_events_index = False @@ -572,9 +561,11 @@ class Recorder(threading.Thread): ) @callback - def async_clear_statistics(self, statistic_ids: list[str]) -> None: + def async_clear_statistics( + self, statistic_ids: list[str], *, on_done: Callable[[], None] | None = None + ) -> None: """Clear statistics for a list of statistic_ids.""" - self.queue_task(ClearStatisticsTask(statistic_ids)) + self.queue_task(ClearStatisticsTask(on_done, statistic_ids)) @callback def async_update_statistics_metadata( @@ -583,11 +574,12 @@ class Recorder(threading.Thread): *, new_statistic_id: str | UndefinedType = UNDEFINED, new_unit_of_measurement: str | None | UndefinedType = UNDEFINED, + on_done: Callable[[], None] | None = None, ) -> None: """Update statistics metadata for a statistic_id.""" self.queue_task( UpdateStatisticsMetadataTask( - statistic_id, new_statistic_id, new_unit_of_measurement + on_done, statistic_id, new_statistic_id, new_unit_of_measurement ) ) @@ -739,12 +731,17 @@ class Recorder(threading.Thread): # First do non-live migration steps, if needed if schema_status.migration_needed: + # Do non-live schema migration result, schema_status = self._migrate_schema_offline(schema_status) if not result: self._notify_migration_failed() self.migration_in_progress = False return self.schema_version = schema_status.current_version + + # Do non-live data migration + self._migrate_data_offline(schema_status) + # Non-live migration is now completed, remaining steps are live self.migration_is_live = True @@ -798,24 +795,9 @@ class Recorder(threading.Thread): # since we want the frontend queries to avoid a thundering # herd of queries to find the statistics meta data if # there are a lot of statistics graphs on the frontend. - schema_version = self.schema_version - if schema_version >= STATISTICS_ROWS_SCHEMA_VERSION: - self.statistics_meta_manager.load(session) + self.statistics_meta_manager.load(session) - migration_changes: dict[str, int] = { - row[0]: row[1] - for row in execute_stmt_lambda_element(session, get_migration_changes()) - } - - for migrator_cls in ( - StatesContextIDMigration, - EventsContextIDMigration, - EventTypeIDMigration, - EntityIDMigration, - EventIDPostMigration, - ): - migrator = migrator_cls(schema_status.start_version, migration_changes) - migrator.do_migrate(self, session) + migration.migrate_data_live(self, self.get_session, schema_status) # We must only set the db ready after we have set the table managers # to active if there is no data to migrate. @@ -934,10 +916,12 @@ class Recorder(threading.Thread): return False - @callback - def _async_migration_started(self) -> None: - """Set the migration started event.""" - self.async_migration_event.set() + def _migrate_data_offline( + self, schema_status: migration.SchemaValidationStatus + ) -> None: + """Migrate data.""" + with self.hass.timeout.freeze(DOMAIN): + migration.migrate_data_non_live(self, self.get_session, schema_status) def _migrate_schema_offline( self, schema_status: migration.SchemaValidationStatus @@ -963,7 +947,6 @@ class Recorder(threading.Thread): "Database upgrade in progress", "recorder_database_migration", ) - self.hass.add_job(self._async_migration_started) return self._migrate_schema(schema_status, True) def _migrate_schema( @@ -988,6 +971,7 @@ class Recorder(threading.Thread): new_schema_status = migration.SchemaValidationStatus( current_version=SCHEMA_VERSION, migration_needed=False, + non_live_data_migration_needed=False, schema_errors=set(), start_version=SCHEMA_VERSION, ) @@ -1144,7 +1128,6 @@ class Recorder(threading.Thread): # Map the event data to the StateAttributes table shared_attrs = shared_attrs_bytes.decode("utf-8") - dbstate.attributes = None # Matching attributes found in the pending commit if pending_event_data := state_attributes_manager.get_pending(shared_attrs): dbstate.state_attributes = pending_event_data @@ -1290,14 +1273,6 @@ class Recorder(threading.Thread): self.event_session = self.get_session() self.event_session.expire_on_commit = False - def _post_schema_migration(self, old_version: int, new_version: int) -> None: - """Run post schema migration tasks.""" - migration.post_schema_migration(self, old_version, new_version) - - def _post_migrate_entity_ids(self) -> bool: - """Post migrate entity_ids if needed.""" - return migration.post_migrate_entity_ids(self) - def _send_keep_alive(self) -> None: """Send a keep alive to keep the db connection open.""" assert self.event_session is not None diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index f84459675ae..fb57a1c73e2 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -162,14 +162,14 @@ class Unused(CHAR): """An unused column type that behaves like a string.""" -@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] -@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") +@compiles(Unused, "mysql", "mariadb", "sqlite") def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) -@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] +@compiles(Unused, "postgresql") def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: """Compile Unused as CHAR(1) on postgresql.""" return "CHAR(1)" # Uses 1 byte @@ -375,9 +375,8 @@ class EventData(Base): event: Event, dialect: SupportedDialect | None ) -> bytes: """Create shared_data from an event.""" - if dialect == SupportedDialect.POSTGRESQL: - bytes_result = json_bytes_strip_null(event.data) - bytes_result = json_bytes(event.data) + encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes + bytes_result = encoder(event.data) if len(bytes_result) > MAX_EVENT_DATA_BYTES: _LOGGER.warning( "Event data for %s exceed maximum size of %s bytes. " @@ -692,14 +691,16 @@ class StatisticsBase: duration: timedelta @classmethod - def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: - """Create object from a statistics with datatime objects.""" + def from_stats( + cls, metadata_id: int, stats: StatisticData, now_timestamp: float | None = None + ) -> Self: + """Create object from a statistics with datetime objects.""" return cls( # type: ignore[call-arg] metadata_id=metadata_id, created=None, - created_ts=time.time(), + created_ts=now_timestamp or time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), @@ -710,12 +711,17 @@ class StatisticsBase: ) @classmethod - def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self: + def from_stats_ts( + cls, + metadata_id: int, + stats: StatisticDataTimestamp, + now_timestamp: float | None = None, + ) -> Self: """Create object from a statistics with timestamps.""" return cls( # type: ignore[call-arg] metadata_id=metadata_id, created=None, - created_ts=time.time(), + created_ts=now_timestamp or time.time(), start=None, start_ts=stats["start_ts"], mean=stats.get("mean"), diff --git a/homeassistant/components/recorder/executor.py b/homeassistant/components/recorder/executor.py index 8102c769ac1..6b8192d1e14 100644 --- a/homeassistant/components/recorder/executor.py +++ b/homeassistant/components/recorder/executor.py @@ -55,7 +55,7 @@ class DBInterruptibleThreadPoolExecutor(InterruptibleThreadPoolExecutor): num_threads = len(self._threads) if num_threads < self._max_workers: - thread_name = "%s_%d" % (self._thread_name_prefix or self, num_threads) + thread_name = f"{self._thread_name_prefix or self}_{num_threads}" executor_thread = threading.Thread( name=thread_name, target=_worker_with_shutdown_hook, diff --git a/homeassistant/components/recorder/history/__init__.py b/homeassistant/components/recorder/history/__init__.py index de7002eb6a4..a28027adb1a 100644 --- a/homeassistant/components/recorder/history/__init__.py +++ b/homeassistant/components/recorder/history/__init__.py @@ -8,8 +8,8 @@ from typing import Any from sqlalchemy.orm.session import Session from homeassistant.core import HomeAssistant, State +from homeassistant.helpers.recorder import get_instance -from ... import recorder from ..filters import Filters from .const import NEED_ATTRIBUTE_DOMAINS, SIGNIFICANT_DOMAINS from .modern import ( @@ -44,7 +44,7 @@ def get_full_significant_states_with_session( no_attributes: bool = False, ) -> dict[str, list[State]]: """Return a dict of significant states during a time period.""" - if not recorder.get_instance(hass).states_meta_manager.active: + if not get_instance(hass).states_meta_manager.active: from .legacy import ( # pylint: disable=import-outside-toplevel get_full_significant_states_with_session as _legacy_get_full_significant_states_with_session, ) @@ -69,7 +69,7 @@ def get_last_state_changes( hass: HomeAssistant, number_of_states: int, entity_id: str ) -> dict[str, list[State]]: """Return the last number_of_states.""" - if not recorder.get_instance(hass).states_meta_manager.active: + if not get_instance(hass).states_meta_manager.active: from .legacy import ( # pylint: disable=import-outside-toplevel get_last_state_changes as _legacy_get_last_state_changes, ) @@ -93,7 +93,7 @@ def get_significant_states( compressed_state_format: bool = False, ) -> dict[str, list[State | dict[str, Any]]]: """Return a dict of significant states during a time period.""" - if not recorder.get_instance(hass).states_meta_manager.active: + if not get_instance(hass).states_meta_manager.active: from .legacy import ( # pylint: disable=import-outside-toplevel get_significant_states as _legacy_get_significant_states, ) @@ -129,7 +129,7 @@ def get_significant_states_with_session( compressed_state_format: bool = False, ) -> dict[str, list[State | dict[str, Any]]]: """Return a dict of significant states during a time period.""" - if not recorder.get_instance(hass).states_meta_manager.active: + if not get_instance(hass).states_meta_manager.active: from .legacy import ( # pylint: disable=import-outside-toplevel get_significant_states_with_session as _legacy_get_significant_states_with_session, ) @@ -163,7 +163,7 @@ def state_changes_during_period( include_start_time_state: bool = True, ) -> dict[str, list[State]]: """Return a list of states that changed during a time period.""" - if not recorder.get_instance(hass).states_meta_manager.active: + if not get_instance(hass).states_meta_manager.active: from .legacy import ( # pylint: disable=import-outside-toplevel state_changes_during_period as _legacy_state_changes_during_period, ) diff --git a/homeassistant/components/recorder/history/common.py b/homeassistant/components/recorder/history/common.py deleted file mode 100644 index 3427ee9d7ee..00000000000 --- a/homeassistant/components/recorder/history/common.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Common functions for history.""" - -from __future__ import annotations - -from homeassistant.core import HomeAssistant - -from ... import recorder - - -def _schema_version(hass: HomeAssistant) -> int: - return recorder.get_instance(hass).schema_version diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index 8ee3cd30316..da90b296fe3 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -19,24 +19,14 @@ from sqlalchemy.sql.lambdas import StatementLambdaElement from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_STATE from homeassistant.core import HomeAssistant, State, split_entity_id +from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ... import recorder from ..db_schema import RecorderRuns, StateAttributes, States from ..filters import Filters -from ..models import ( - process_datetime_to_timestamp, - process_timestamp, - process_timestamp_to_utc_isoformat, -) -from ..models.legacy import ( - LegacyLazyState, - LegacyLazyStatePreSchema31, - legacy_row_to_compressed_state, - legacy_row_to_compressed_state_pre_schema_31, -) +from ..models import process_timestamp, process_timestamp_to_utc_isoformat +from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope -from .common import _schema_version from .const import ( LAST_CHANGED_KEY, NEED_ATTRIBUTE_DOMAINS, @@ -137,7 +127,7 @@ _FIELD_MAP_PRE_SCHEMA_31 = { def _lambda_stmt_and_join_attributes( - schema_version: int, no_attributes: bool, include_last_changed: bool = True + no_attributes: bool, include_last_changed: bool = True ) -> tuple[StatementLambdaElement, bool]: """Return the lambda_stmt and if StateAttributes should be joined. @@ -148,54 +138,19 @@ def _lambda_stmt_and_join_attributes( # without the attributes fields and do not join the # state_attributes table if no_attributes: - if schema_version >= 31: - if include_last_changed: - return ( - lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR)), - False, - ) - return ( - lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED)), - False, - ) if include_last_changed: return ( - lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR_PRE_SCHEMA_31)), + lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR)), False, ) return ( - lambda_stmt( - lambda: select(*_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED_PRE_SCHEMA_31) - ), - False, - ) - # If we in the process of migrating schema we do - # not want to join the state_attributes table as we - # do not know if it will be there yet - if schema_version < 25: - if include_last_changed: - return ( - lambda_stmt(lambda: select(*_QUERY_STATES_PRE_SCHEMA_25)), - False, - ) - return ( - lambda_stmt(lambda: select(*_QUERY_STATES_PRE_SCHEMA_25_NO_LAST_CHANGED)), + lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED)), False, ) - if schema_version >= 31: - if include_last_changed: - return lambda_stmt(lambda: select(*_QUERY_STATES)), True - return lambda_stmt(lambda: select(*_QUERY_STATES_NO_LAST_CHANGED)), True - # Finally if no migration is in progress and no_attributes - # was not requested, we query both attributes columns and - # join state_attributes if include_last_changed: - return lambda_stmt(lambda: select(*_QUERY_STATES_PRE_SCHEMA_31)), True - return ( - lambda_stmt(lambda: select(*_QUERY_STATES_NO_LAST_CHANGED_PRE_SCHEMA_31)), - True, - ) + return lambda_stmt(lambda: select(*_QUERY_STATES)), True + return lambda_stmt(lambda: select(*_QUERY_STATES_NO_LAST_CHANGED)), True def get_significant_states( @@ -228,7 +183,6 @@ def get_significant_states( def _significant_states_stmt( - schema_version: int, start_time: datetime, end_time: datetime | None, entity_ids: list[str], @@ -237,71 +191,43 @@ def _significant_states_stmt( ) -> StatementLambdaElement: """Query the database for significant state changes.""" stmt, join_attributes = _lambda_stmt_and_join_attributes( - schema_version, no_attributes, include_last_changed=not significant_changes_only + no_attributes, include_last_changed=not significant_changes_only ) if ( len(entity_ids) == 1 and significant_changes_only and split_entity_id(entity_ids[0])[0] not in SIGNIFICANT_DOMAINS ): - if schema_version >= 31: - stmt += lambda q: q.filter( - (States.last_changed_ts == States.last_updated_ts) - | States.last_changed_ts.is_(None) - ) - else: - stmt += lambda q: q.filter( - (States.last_changed == States.last_updated) - | States.last_changed.is_(None) - ) + stmt += lambda q: q.filter( + (States.last_changed_ts == States.last_updated_ts) + | States.last_changed_ts.is_(None) + ) elif significant_changes_only: - if schema_version >= 31: - stmt += lambda q: q.filter( - or_( - *[ - States.entity_id.like(entity_domain) - for entity_domain in SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE - ], - ( - (States.last_changed_ts == States.last_updated_ts) - | States.last_changed_ts.is_(None) - ), - ) - ) - else: - stmt += lambda q: q.filter( - or_( - *[ - States.entity_id.like(entity_domain) - for entity_domain in SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE - ], - ( - (States.last_changed == States.last_updated) - | States.last_changed.is_(None) - ), - ) + stmt += lambda q: q.filter( + or_( + *[ + States.entity_id.like(entity_domain) + for entity_domain in SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE + ], + ( + (States.last_changed_ts == States.last_updated_ts) + | States.last_changed_ts.is_(None) + ), ) + ) stmt += lambda q: q.filter(States.entity_id.in_(entity_ids)) - if schema_version >= 31: - start_time_ts = start_time.timestamp() - stmt += lambda q: q.filter(States.last_updated_ts > start_time_ts) - if end_time: - end_time_ts = end_time.timestamp() - stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts) - else: - stmt += lambda q: q.filter(States.last_updated > start_time) - if end_time: - stmt += lambda q: q.filter(States.last_updated < end_time) + start_time_ts = start_time.timestamp() + stmt += lambda q: q.filter(States.last_updated_ts > start_time_ts) + if end_time: + end_time_ts = end_time.timestamp() + stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts) if join_attributes: stmt += lambda q: q.outerjoin( StateAttributes, States.attributes_id == StateAttributes.attributes_id ) - if schema_version >= 31: - stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts) - else: - stmt += lambda q: q.order_by(States.entity_id, States.last_updated) + stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts) return stmt @@ -334,7 +260,6 @@ def get_significant_states_with_session( if not entity_ids: raise ValueError("entity_ids must be provided") stmt = _significant_states_stmt( - _schema_version(hass), start_time, end_time, entity_ids, @@ -389,7 +314,6 @@ def get_full_significant_states_with_session( def _state_changed_during_period_stmt( - schema_version: int, start_time: datetime, end_time: datetime | None, entity_id: str, @@ -398,47 +322,28 @@ def _state_changed_during_period_stmt( limit: int | None, ) -> StatementLambdaElement: stmt, join_attributes = _lambda_stmt_and_join_attributes( - schema_version, no_attributes, include_last_changed=False + no_attributes, include_last_changed=False ) - if schema_version >= 31: - start_time_ts = start_time.timestamp() - stmt += lambda q: q.filter( - ( - (States.last_changed_ts == States.last_updated_ts) - | States.last_changed_ts.is_(None) - ) - & (States.last_updated_ts > start_time_ts) - ) - else: - stmt += lambda q: q.filter( - ( - (States.last_changed == States.last_updated) - | States.last_changed.is_(None) - ) - & (States.last_updated > start_time) + start_time_ts = start_time.timestamp() + stmt += lambda q: q.filter( + ( + (States.last_changed_ts == States.last_updated_ts) + | States.last_changed_ts.is_(None) ) + & (States.last_updated_ts > start_time_ts) + ) if end_time: - if schema_version >= 31: - end_time_ts = end_time.timestamp() - stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts) - else: - stmt += lambda q: q.filter(States.last_updated < end_time) + end_time_ts = end_time.timestamp() + stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts) stmt += lambda q: q.filter(States.entity_id == entity_id) if join_attributes: stmt += lambda q: q.outerjoin( StateAttributes, States.attributes_id == StateAttributes.attributes_id ) if descending: - if schema_version >= 31: - stmt += lambda q: q.order_by( - States.entity_id, States.last_updated_ts.desc() - ) - else: - stmt += lambda q: q.order_by(States.entity_id, States.last_updated.desc()) - elif schema_version >= 31: - stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts) + stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts.desc()) else: - stmt += lambda q: q.order_by(States.entity_id, States.last_updated) + stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts) if limit: stmt += lambda q: q.limit(limit) @@ -461,7 +366,6 @@ def state_changes_during_period( entity_ids = [entity_id.lower()] with session_scope(hass=hass, read_only=True) as session: stmt = _state_changed_during_period_stmt( - _schema_version(hass), start_time, end_time, entity_id, @@ -484,33 +388,21 @@ def state_changes_during_period( def _get_last_state_changes_stmt( - schema_version: int, number_of_states: int, entity_id: str + number_of_states: int, entity_id: str ) -> StatementLambdaElement: stmt, join_attributes = _lambda_stmt_and_join_attributes( - schema_version, False, include_last_changed=False + False, include_last_changed=False + ) + stmt += lambda q: q.where( + States.state_id + == ( + select(States.state_id) + .filter(States.entity_id == entity_id) + .order_by(States.last_updated_ts.desc()) + .limit(number_of_states) + .subquery() + ).c.state_id ) - if schema_version >= 31: - stmt += lambda q: q.where( - States.state_id - == ( - select(States.state_id) - .filter(States.entity_id == entity_id) - .order_by(States.last_updated_ts.desc()) - .limit(number_of_states) - .subquery() - ).c.state_id - ) - else: - stmt += lambda q: q.where( - States.state_id - == ( - select(States.state_id) - .filter(States.entity_id == entity_id) - .order_by(States.last_updated.desc()) - .limit(number_of_states) - .subquery() - ).c.state_id - ) if join_attributes: stmt += lambda q: q.outerjoin( StateAttributes, States.attributes_id == StateAttributes.attributes_id @@ -528,9 +420,7 @@ def get_last_state_changes( entity_ids = [entity_id_lower] with session_scope(hass=hass, read_only=True) as session: - stmt = _get_last_state_changes_stmt( - _schema_version(hass), number_of_states, entity_id_lower - ) + stmt = _get_last_state_changes_stmt(number_of_states, entity_id_lower) states = list(execute_stmt_lambda_element(session, stmt)) return cast( dict[str, list[State]], @@ -546,7 +436,6 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - schema_version: int, run_start: datetime, utc_point_in_time: datetime, entity_ids: list[str], @@ -554,58 +443,34 @@ def _get_states_for_entities_stmt( ) -> StatementLambdaElement: """Baked query to get states for specific entities.""" stmt, join_attributes = _lambda_stmt_and_join_attributes( - schema_version, no_attributes, include_last_changed=True + no_attributes, include_last_changed=True ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. - if schema_version >= 31: - run_start_ts = process_timestamp(run_start).timestamp() - utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) - stmt += lambda q: q.join( - ( - most_recent_states_for_entities_by_date := ( - select( - States.entity_id.label("max_entity_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < utc_point_in_time_ts) - ) - .filter(States.entity_id.in_(entity_ids)) - .group_by(States.entity_id) - .subquery() - ) - ), - and_( - States.entity_id - == most_recent_states_for_entities_by_date.c.max_entity_id, - States.last_updated_ts - == most_recent_states_for_entities_by_date.c.max_last_updated, - ), - ) - else: - stmt += lambda q: q.join( - ( - most_recent_states_for_entities_by_date := select( + run_start_ts = process_timestamp(run_start).timestamp() + utc_point_in_time_ts = utc_point_in_time.timestamp() + stmt += lambda q: q.join( + ( + most_recent_states_for_entities_by_date := ( + select( States.entity_id.label("max_entity_id"), - func.max(States.last_updated).label("max_last_updated"), + func.max(States.last_updated_ts).label("max_last_updated"), ) .filter( - (States.last_updated >= run_start) - & (States.last_updated < utc_point_in_time) + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < utc_point_in_time_ts) ) .filter(States.entity_id.in_(entity_ids)) .group_by(States.entity_id) .subquery() - ), - and_( - States.entity_id - == most_recent_states_for_entities_by_date.c.max_entity_id, - States.last_updated - == most_recent_states_for_entities_by_date.c.max_last_updated, - ), - ) + ) + ), + and_( + States.entity_id == most_recent_states_for_entities_by_date.c.max_entity_id, + States.last_updated_ts + == most_recent_states_for_entities_by_date.c.max_last_updated, + ), + ) if join_attributes: stmt += lambda q: q.outerjoin( StateAttributes, (States.attributes_id == StateAttributes.attributes_id) @@ -622,17 +487,16 @@ def _get_rows_with_session( no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" - schema_version = _schema_version(hass) if len(entity_ids) == 1: return execute_stmt_lambda_element( session, _get_single_entity_states_stmt( - schema_version, utc_point_in_time, entity_ids[0], no_attributes + utc_point_in_time, entity_ids[0], no_attributes ), ) if run is None: - run = recorder.get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) if run is None or process_timestamp(run.start) > utc_point_in_time: # History did not run before utc_point_in_time @@ -641,13 +505,12 @@ def _get_rows_with_session( # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - schema_version, run.start, utc_point_in_time, entity_ids, no_attributes + run.start, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) def _get_single_entity_states_stmt( - schema_version: int, utc_point_in_time: datetime, entity_id: str, no_attributes: bool = False, @@ -655,27 +518,17 @@ def _get_single_entity_states_stmt( # Use an entirely different (and extremely fast) query if we only # have a single entity id stmt, join_attributes = _lambda_stmt_and_join_attributes( - schema_version, no_attributes, include_last_changed=True + no_attributes, include_last_changed=True ) - if schema_version >= 31: - utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) - stmt += ( - lambda q: q.filter( - States.last_updated_ts < utc_point_in_time_ts, - States.entity_id == entity_id, - ) - .order_by(States.last_updated_ts.desc()) - .limit(1) - ) - else: - stmt += ( - lambda q: q.filter( - States.last_updated < utc_point_in_time, - States.entity_id == entity_id, - ) - .order_by(States.last_updated.desc()) - .limit(1) + utc_point_in_time_ts = utc_point_in_time.timestamp() + stmt += ( + lambda q: q.filter( + States.last_updated_ts < utc_point_in_time_ts, + States.entity_id == entity_id, ) + .order_by(States.last_updated_ts.desc()) + .limit(1) + ) if join_attributes: stmt += lambda q: q.outerjoin( StateAttributes, States.attributes_id == StateAttributes.attributes_id @@ -705,26 +558,15 @@ def _sorted_states_to_dict( each list of states, otherwise our graphs won't start on the Y axis correctly. """ - schema_version = _schema_version(hass) - _process_timestamp: Callable[[datetime], float | str] - field_map = _FIELD_MAP if schema_version >= 31 else _FIELD_MAP_PRE_SCHEMA_31 state_class: Callable[ [Row, dict[str, dict[str, Any]], datetime | None], State | dict[str, Any] ] if compressed_state_format: - if schema_version >= 31: - state_class = legacy_row_to_compressed_state - else: - state_class = legacy_row_to_compressed_state_pre_schema_31 - _process_timestamp = process_datetime_to_timestamp + state_class = legacy_row_to_compressed_state attr_time = COMPRESSED_STATE_LAST_UPDATED attr_state = COMPRESSED_STATE_STATE else: - if schema_version >= 31: - state_class = LegacyLazyState - else: - state_class = LegacyLazyStatePreSchema31 - _process_timestamp = process_timestamp_to_utc_isoformat + state_class = LegacyLazyState attr_time = LAST_CHANGED_KEY attr_state = STATE_KEY @@ -781,7 +623,7 @@ def _sorted_states_to_dict( prev_state = first_state.state ent_results.append(state_class(first_state, attr_cache, None)) - state_idx = field_map["state"] + state_idx = _FIELD_MAP["state"] # # minimal_response only makes sense with last_updated == last_updated @@ -790,20 +632,7 @@ def _sorted_states_to_dict( # # With minimal response we do not care about attribute # changes so we can filter out duplicate states - if schema_version < 31: - last_updated_idx = field_map["last_updated"] - for row in group: - if (state := row[state_idx]) != prev_state: - ent_results.append( - { - attr_state: state, - attr_time: _process_timestamp(row[last_updated_idx]), - } - ) - prev_state = state - continue - - last_updated_ts_idx = field_map["last_updated_ts"] + last_updated_ts_idx = _FIELD_MAP["last_updated_ts"] if compressed_state_format: for row in group: if (state := row[state_idx]) != prev_state: diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 3cbec60e83f..9159bbc6181 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -24,9 +24,9 @@ from sqlalchemy.orm.session import Session from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_STATE from homeassistant.core import HomeAssistant, State, split_entity_id +from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ... import recorder from ..const import LAST_REPORTED_SCHEMA_VERSION from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States from ..filters import Filters @@ -231,7 +231,7 @@ def get_significant_states_with_session( raise ValueError("entity_ids must be provided") entity_id_to_metadata_id: dict[str, int | None] | None = None metadata_ids_in_significant_domains: list[int] = [] - instance = recorder.get_instance(hass) + instance = get_instance(hass) if not ( entity_id_to_metadata_id := instance.states_meta_manager.get_many( entity_ids, session, False @@ -251,7 +251,7 @@ def get_significant_states_with_session( run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False - start_time_ts = dt_util.utc_to_timestamp(start_time) + start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None stmt = lambda_stmt( @@ -393,14 +393,14 @@ def state_changes_during_period( ) -> dict[str, list[State]]: """Return states changes during UTC period start_time - end_time.""" has_last_reported = ( - recorder.get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION + get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION ) if not entity_id: raise ValueError("entity_id must be provided") entity_ids = [entity_id.lower()] with session_scope(hass=hass, read_only=True) as session: - instance = recorder.get_instance(hass) + instance = get_instance(hass) if not ( possible_metadata_id := instance.states_meta_manager.get( entity_id, session, False @@ -416,7 +416,7 @@ def state_changes_during_period( run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False - start_time_ts = dt_util.utc_to_timestamp(start_time) + start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) stmt = lambda_stmt( lambda: _state_changed_during_period_stmt( @@ -507,7 +507,7 @@ def get_last_state_changes( ) -> dict[str, list[State]]: """Return the last number_of_states.""" has_last_reported = ( - recorder.get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION + get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION ) entity_id_lower = entity_id.lower() entity_ids = [entity_id_lower] @@ -517,7 +517,7 @@ def get_last_state_changes( # because the metadata_id_last_updated_ts index is in ascending order. with session_scope(hass=hass, read_only=True) as session: - instance = recorder.get_instance(hass) + instance = get_instance(hass) if not ( possible_metadata_id := instance.states_meta_manager.get( entity_id, session, False @@ -604,7 +604,7 @@ def _get_run_start_ts_for_utc_point_in_time( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: """Return the start time of a run.""" - run = recorder.get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) if ( run is not None and (run_start := process_timestamp(run.start)) < utc_point_in_time diff --git a/homeassistant/components/recorder/icons.json b/homeassistant/components/recorder/icons.json index 1090401abd5..9e41637184a 100644 --- a/homeassistant/components/recorder/icons.json +++ b/homeassistant/components/recorder/icons.json @@ -1,8 +1,16 @@ { "services": { - "purge": "mdi:database-sync", - "purge_entities": "mdi:database-sync", - "disable": "mdi:database-off", - "enable": "mdi:database" + "purge": { + "service": "mdi:database-sync" + }, + "purge_entities": { + "service": "mdi:database-sync" + }, + "disable": { + "service": "mdi:database-off" + }, + "enable": { + "service": "mdi:database" + } } } diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 2be4b6862ba..93ffb12d18c 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "quality_scale": "internal", "requirements": [ - "SQLAlchemy==2.0.31", + "SQLAlchemy==2.0.36", "fnv-hash-fast==1.0.2", "psutil-home-assistant==0.0.1" ] diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 7127a576580..b28ca4399c8 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -91,27 +91,24 @@ from .queries import ( find_states_context_ids_to_migrate, find_unmigrated_short_term_statistics_rows, find_unmigrated_statistics_rows, + get_migration_changes, has_entity_ids_to_migrate, has_event_type_to_migrate, has_events_context_ids_to_migrate, has_states_context_ids_to_migrate, + has_used_states_entity_ids, has_used_states_event_ids, migrate_single_short_term_statistics_row_to_timestamp, migrate_single_statistics_row_to_timestamp, ) -from .statistics import get_start_time -from .tasks import ( - CommitTask, - EntityIDPostMigrationTask, - PostSchemaMigrationTask, - RecorderTask, - StatisticsTimestampMigrationCleanupTask, -) +from .statistics import cleanup_statistics_timestamp_migration, get_start_time +from .tasks import RecorderTask from .util import ( database_job_retry_wrapper, + database_job_retry_wrapper_method, execute_stmt_lambda_element, get_index_by_name, - retryable_database_job, + retryable_database_job_method, session_scope, ) @@ -128,6 +125,11 @@ MIGRATION_NOTE_OFFLINE = ( "Home Assistant will not start until the upgrade is completed. Please be patient " "and do not turn off or restart Home Assistant while the upgrade is in progress!" ) +MIGRATION_NOTE_MINUTES = ( + "Note: this may take several minutes on large databases and slow machines. " + "Please be patient!" +) +MIGRATION_NOTE_WHILE = "This will take a while; please be patient!" _EMPTY_ENTITY_ID = "missing.entity_id" _EMPTY_EVENT_TYPE = "missing_event_type" @@ -198,12 +200,13 @@ def get_schema_version(session_maker: Callable[[], Session]) -> int | None: return None -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class SchemaValidationStatus: """Store schema validation status.""" current_version: int migration_needed: bool + non_live_data_migration_needed: bool schema_errors: set[str] start_version: int @@ -233,8 +236,17 @@ def validate_db_schema( # columns may otherwise not exist etc. schema_errors = _find_schema_errors(hass, instance, session_maker) + schema_migration_needed = not is_current + _non_live_data_migration_needed = non_live_data_migration_needed( + instance, session_maker, current_version + ) + return SchemaValidationStatus( - current_version, not is_current, schema_errors, current_version + current_version=current_version, + non_live_data_migration_needed=_non_live_data_migration_needed, + migration_needed=schema_migration_needed or _non_live_data_migration_needed, + schema_errors=schema_errors, + start_version=current_version, ) @@ -251,7 +263,10 @@ def _find_schema_errors( def live_migration(schema_status: SchemaValidationStatus) -> bool: """Check if live migration is possible.""" - return schema_status.current_version >= LIVE_MIGRATION_MIN_SCHEMA_VERSION + return ( + schema_status.current_version >= LIVE_MIGRATION_MIN_SCHEMA_VERSION + and not schema_status.non_live_data_migration_needed + ) def pre_migrate_schema(engine: Engine) -> None: @@ -288,15 +303,17 @@ def _migrate_schema( "The database is about to upgrade from schema version %s to %s%s", current_version, end_version, - f". {MIGRATION_NOTE_OFFLINE}" - if current_version < LIVE_MIGRATION_MIN_SCHEMA_VERSION - else "", + ( + f". {MIGRATION_NOTE_OFFLINE}" + if current_version < LIVE_MIGRATION_MIN_SCHEMA_VERSION + else "" + ), ) schema_status = dataclass_replace(schema_status, current_version=end_version) for version in range(current_version, end_version): new_version = version + 1 - _LOGGER.info("Upgrading recorder db schema to version %s", new_version) + _LOGGER.warning("Upgrading recorder db schema to version %s", new_version) _apply_update(instance, hass, engine, session_maker, new_version, start_version) with session_scope(session=session_maker()) as session: session.add(SchemaChanges(schema_version=new_version)) @@ -345,16 +362,71 @@ def migrate_schema_live( states_correct_db_schema(instance, schema_errors) events_correct_db_schema(instance, schema_errors) - start_version = schema_status.start_version - if start_version != SCHEMA_VERSION: - instance.queue_task(PostSchemaMigrationTask(start_version, SCHEMA_VERSION)) - # Make sure the post schema migration task is committed in case - # the next task does not have commit_before = True - instance.queue_task(CommitTask()) - return schema_status +def _get_migration_changes(session: Session) -> dict[str, int]: + """Return migration changes as a dict.""" + migration_changes: dict[str, int] = { + row[0]: row[1] + for row in execute_stmt_lambda_element(session, get_migration_changes()) + } + return migration_changes + + +def non_live_data_migration_needed( + instance: Recorder, + session_maker: Callable[[], Session], + schema_version: int, +) -> bool: + """Return True if non-live data migration is needed. + + This must only be called if database schema is current. + """ + migration_needed = False + with session_scope(session=session_maker()) as session: + migration_changes = _get_migration_changes(session) + for migrator_cls in NON_LIVE_DATA_MIGRATORS: + migrator = migrator_cls(schema_version, migration_changes) + migration_needed |= migrator.needs_migrate(instance, session) + + return migration_needed + + +def migrate_data_non_live( + instance: Recorder, + session_maker: Callable[[], Session], + schema_status: SchemaValidationStatus, +) -> None: + """Do non-live data migration. + + This must be called after non-live schema migration is completed. + """ + with session_scope(session=session_maker()) as session: + migration_changes = _get_migration_changes(session) + + for migrator_cls in NON_LIVE_DATA_MIGRATORS: + migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator.migrate_all(instance, session_maker) + + +def migrate_data_live( + instance: Recorder, + session_maker: Callable[[], Session], + schema_status: SchemaValidationStatus, +) -> None: + """Queue live schema migration tasks. + + This must be called after live schema migration is completed. + """ + with session_scope(session=session_maker()) as session: + migration_changes = _get_migration_changes(session) + + for migrator_cls in LIVE_DATA_MIGRATORS: + migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator.queue_migration(instance, session) + + def _create_index( session_maker: Callable[[], Session], table_name: str, index_name: str ) -> None: @@ -373,11 +445,10 @@ def _create_index( index = index_list[0] _LOGGER.debug("Creating %s index", index_name) _LOGGER.warning( - "Adding index `%s` to table `%s`. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!", + "Adding index `%s` to table `%s`. %s", index_name, table_name, + MIGRATION_NOTE_MINUTES, ) with session_scope(session=session_maker()) as session: try: @@ -422,11 +493,10 @@ def _drop_index( DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT. """ _LOGGER.warning( - "Dropping index `%s` from table `%s`. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!", + "Dropping index `%s` from table `%s`. %s", index_name, table_name, + MIGRATION_NOTE_MINUTES, ) index_to_drop: str | None = None with session_scope(session=session_maker()) as session: @@ -472,13 +542,10 @@ def _add_columns( ) -> None: """Add columns to a table.""" _LOGGER.warning( - ( - "Adding columns %s to table %s. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!" - ), + "Adding columns %s to table %s. %s", ", ".join(column.split(" ")[0] for column in columns_def), table_name, + MIGRATION_NOTE_MINUTES, ) columns_def = [f"ADD {col_def}" for col_def in columns_def] @@ -487,11 +554,7 @@ def _add_columns( try: connection = session.connection() connection.execute( - text( - "ALTER TABLE {table} {columns_def}".format( - table=table_name, columns_def=", ".join(columns_def) - ) - ) + text(f"ALTER TABLE {table_name} {', '.join(columns_def)}") ) except (InternalError, OperationalError, ProgrammingError): # Some engines support adding all columns at once, @@ -534,21 +597,16 @@ def _modify_columns( return _LOGGER.warning( - ( - "Modifying columns %s in table %s. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!" - ), + "Modifying columns %s in table %s. %s", ", ".join(column.split(" ")[0] for column in columns_def), table_name, + MIGRATION_NOTE_MINUTES, ) if engine.dialect.name == SupportedDialect.POSTGRESQL: columns_def = [ - "ALTER {column} TYPE {type}".format( - **dict(zip(["column", "type"], col_def.split(" ", 1), strict=False)) - ) - for col_def in columns_def + f"ALTER {column} TYPE {type_}" + for column, type_ in (col_def.split(" ", 1) for col_def in columns_def) ] elif engine.dialect.name == "mssql": columns_def = [f"ALTER COLUMN {col_def}" for col_def in columns_def] @@ -559,11 +617,7 @@ def _modify_columns( try: connection = session.connection() connection.execute( - text( - "ALTER TABLE {table} {columns_def}".format( - table=table_name, columns_def=", ".join(columns_def) - ) - ) + text(f"ALTER TABLE {table_name} {', '.join(columns_def)}") ) except (InternalError, OperationalError): _LOGGER.info("Unable to use quick column modify. Modifying 1 by 1") @@ -1417,6 +1471,12 @@ class _SchemaVersion32Migrator(_SchemaVersionMigrator, target_version=32): _drop_index(self.session_maker, "events", "ix_events_event_type_time_fired") _drop_index(self.session_maker, "states", "ix_states_last_updated") _drop_index(self.session_maker, "events", "ix_events_time_fired") + with session_scope(session=self.session_maker()) as session: + # In version 31 we migrated all the time_fired, last_updated, and last_changed + # columns to be timestamps. In version 32 we need to wipe the old columns + # since they are no longer used and take up a significant amount of space. + assert self.instance.engine is not None, "engine should never be None" + _wipe_old_string_time_columns(self.instance, self.instance.engine, session) class _SchemaVersion33Migrator(_SchemaVersionMigrator, target_version=33): @@ -1495,6 +1555,12 @@ class _SchemaVersion35Migrator(_SchemaVersionMigrator, target_version=35): # ix_statistics_start and ix_statistics_statistic_id_start are still used # for the post migration cleanup and can be removed in a future version. + # In version 34 we migrated all the created, start, and last_reset + # columns to be timestamps. In version 35 we need to wipe the old columns + # since they are no longer used and take up a significant amount of space. + while not cleanup_statistics_timestamp_migration(self.instance): + pass + class _SchemaVersion36Migrator(_SchemaVersionMigrator, target_version=36): def _apply_update(self) -> None: @@ -1781,10 +1847,9 @@ def _migrate_statistics_columns_to_timestamp_removing_duplicates( except IntegrityError as ex: _LOGGER.error( "Statistics table contains duplicate entries: %s; " - "Cleaning up duplicates and trying again; " - "This will take a while; " - "Please be patient!", + "Cleaning up duplicates and trying again; %s", ex, + MIGRATION_NOTE_WHILE, ) # There may be duplicated statistics entries, delete duplicates # and try again @@ -1812,10 +1877,9 @@ def _correct_table_character_set_and_collation( """Correct issues detected by validate_db_schema.""" # Attempt to convert the table to utf8mb4 _LOGGER.warning( - "Updating character set and collation of table %s to utf8mb4. " - "Note: this can take several minutes on large databases and slow " - "machines. Please be patient!", + "Updating character set and collation of table %s to utf8mb4. %s", table, + MIGRATION_NOTE_MINUTES, ) with ( contextlib.suppress(SQLAlchemyError), @@ -1833,40 +1897,6 @@ def _correct_table_character_set_and_collation( ) -def post_schema_migration( - instance: Recorder, - old_version: int, - new_version: int, -) -> None: - """Post schema migration. - - Run any housekeeping tasks after the schema migration has completed. - - Post schema migration is run after the schema migration has completed - and the queue has been processed to ensure that we reduce the memory - pressure since events are held in memory until the queue is processed - which is blocked from being processed until the schema migration is - complete. - """ - if old_version < 32 <= new_version: - # In version 31 we migrated all the time_fired, last_updated, and last_changed - # columns to be timestamps. In version 32 we need to wipe the old columns - # since they are no longer used and take up a significant amount of space. - assert instance.event_session is not None - assert instance.engine is not None - _wipe_old_string_time_columns(instance, instance.engine, instance.event_session) - if old_version < 35 <= new_version: - # In version 34 we migrated all the created, start, and last_reset - # columns to be timestamps. In version 35 we need to wipe the old columns - # since they are no longer used and take up a significant amount of space. - _wipe_old_string_statistics_columns(instance) - - -def _wipe_old_string_statistics_columns(instance: Recorder) -> None: - """Wipe old string statistics columns to save space.""" - instance.queue_task(StatisticsTimestampMigrationCleanupTask()) - - @database_job_retry_wrapper("Wipe old string time columns", 3) def _wipe_old_string_time_columns( instance: Recorder, engine: Engine, session: Session @@ -2154,7 +2184,6 @@ def _generate_ulid_bytes_at_time(timestamp: float | None) -> bytes: return ulid_to_bytes(ulid_at_time(timestamp or time())) -@retryable_database_job("post migrate states entity_ids to states_meta") def post_migrate_entity_ids(instance: Recorder) -> bool: """Remove old entity_id strings from states. @@ -2169,58 +2198,10 @@ def post_migrate_entity_ids(instance: Recorder) -> bool: # If there is more work to do return False # so that we can be called again - if is_done: - # Drop the old indexes since they are no longer needed - _drop_index(session_maker, "states", LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) - _LOGGER.debug("Cleanup legacy entity_ids done=%s", is_done) return is_done -@retryable_database_job("cleanup_legacy_event_ids") -def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: - """Remove old event_id index from states. - - We used to link states to events using the event_id column but we no - longer store state changed events in the events table. - - If all old states have been purged and existing states are in the new - format we can drop the index since it can take up ~10MB per 1M rows. - """ - session_maker = instance.get_session - _LOGGER.debug("Cleanup legacy entity_ids") - with session_scope(session=session_maker()) as session: - result = session.execute(has_used_states_event_ids()).scalar() - # In the future we may migrate existing states to the new format - # but in practice very few of these still exist in production and - # removing the index is the likely all that needs to happen. - all_gone = not result - - if all_gone: - # Only drop the index if there are no more event_ids in the states table - # ex all NULL - assert instance.engine is not None, "engine should never be None" - if instance.dialect_name == SupportedDialect.SQLITE: - # SQLite does not support dropping foreign key constraints - # so we have to rebuild the table - fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States) - else: - try: - _drop_foreign_key_constraints( - session_maker, instance.engine, TABLE_STATES, "event_id" - ) - except (InternalError, OperationalError): - fk_remove_ok = False - else: - fk_remove_ok = True - if fk_remove_ok: - _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) - instance.use_legacy_events_index = False - _mark_migration_done(session, EventIDPostMigration) - - return True - - def _initialize_database(session: Session) -> bool: """Initialize a new database. @@ -2275,8 +2256,6 @@ class MigrationTask(RecorderTask): if not self.migrator.migrate_data(instance): # Schedule a new migration task if this one didn't finish instance.queue_task(MigrationTask(self.migrator)) - else: - self.migrator.migration_done(instance, None) @dataclass(slots=True) @@ -2287,45 +2266,53 @@ class CommitBeforeMigrationTask(MigrationTask): @dataclass(frozen=True, kw_only=True) -class NeedsMigrateResult: - """Container for the return value of BaseRunTimeMigration.needs_migrate_impl.""" +class DataMigrationStatus: + """Container for data migrator status.""" needs_migrate: bool migration_done: bool -class BaseRunTimeMigration(ABC): - """Base class for run time migrations.""" +class BaseMigration(ABC): + """Base class for migrations.""" + index_to_drop: tuple[str, str] | None = None required_schema_version = 0 migration_version = 1 migration_id: str - task = MigrationTask def __init__(self, schema_version: int, migration_changes: dict[str, int]) -> None: """Initialize a new BaseRunTimeMigration.""" self.schema_version = schema_version self.migration_changes = migration_changes - def do_migrate(self, instance: Recorder, session: Session) -> None: - """Start migration if needed.""" - if self.needs_migrate(instance, session): - instance.queue_task(self.task(self)) - else: - self.migration_done(instance, session) - - @staticmethod @abstractmethod - def migrate_data(instance: Recorder) -> bool: - """Migrate some data, returns True if migration is completed.""" + def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, return True if migration is completed.""" - def migration_done(self, instance: Recorder, session: Session | None) -> None: + def _migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, returns True if migration is completed.""" + status = self.migrate_data_impl(instance) + if status.migration_done: + if self.index_to_drop is not None: + table, index = self.index_to_drop + _drop_index(instance.get_session, table, index) + with session_scope(session=instance.get_session()) as session: + self.migration_done(instance, session) + _mark_migration_done(session, self.__class__) + return not status.needs_migrate + + @abstractmethod + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: + """Migrate some data, return if the migration needs to run and if it is done.""" + + def migration_done(self, instance: Recorder, session: Session) -> None: """Will be called after migrate returns True or if migration is not needed.""" @abstractmethod def needs_migrate_impl( self, instance: Recorder, session: Session - ) -> NeedsMigrateResult: + ) -> DataMigrationStatus: """Return if the migration needs to run and if it is done.""" def needs_migrate(self, instance: Recorder, session: Session) -> bool: @@ -2339,20 +2326,86 @@ class BaseRunTimeMigration(ABC): """ if self.schema_version < self.required_schema_version: # Schema is too old, we must have to migrate + _LOGGER.info( + "Data migration '%s' needed, schema too old", self.migration_id + ) return True if self.migration_changes.get(self.migration_id, -1) >= self.migration_version: # The migration changes table indicates that the migration has been done + _LOGGER.debug( + "Data migration '%s' not needed, already completed", self.migration_id + ) return False # We do not know if the migration is done from the - # migration changes table so we must check the data + # migration changes table so we must check the index and data # This is the slow path + if ( + self.index_to_drop is not None + and get_index_by_name(session, self.index_to_drop[0], self.index_to_drop[1]) + is not None + ): + _LOGGER.info( + "Data migration '%s' needed, index to drop still exists", + self.migration_id, + ) + return True needs_migrate = self.needs_migrate_impl(instance, session) if needs_migrate.migration_done: _mark_migration_done(session, self.__class__) + _LOGGER.info( + "Data migration '%s' needed: %s", + self.migration_id, + needs_migrate.needs_migrate, + ) return needs_migrate.needs_migrate -class BaseRunTimeMigrationWithQuery(BaseRunTimeMigration): +class BaseOffLineMigration(BaseMigration): + """Base class for off line migrations.""" + + def migrate_all( + self, instance: Recorder, session_maker: Callable[[], Session] + ) -> None: + """Migrate all data.""" + with session_scope(session=session_maker()) as session: + if not self.needs_migrate(instance, session): + _LOGGER.debug("Migration not needed for '%s'", self.migration_id) + self.migration_done(instance, session) + return + _LOGGER.warning( + "The database is about to do data migration step '%s', %s", + self.migration_id, + MIGRATION_NOTE_OFFLINE, + ) + while not self.migrate_data(instance): + pass + _LOGGER.warning("Data migration step '%s' completed", self.migration_id) + + @database_job_retry_wrapper_method("migrate data", 10) + def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, returns True if migration is completed.""" + return self._migrate_data(instance) + + +class BaseRunTimeMigration(BaseMigration): + """Base class for run time migrations.""" + + task = MigrationTask + + def queue_migration(self, instance: Recorder, session: Session) -> None: + """Start migration if needed.""" + if self.needs_migrate(instance, session): + instance.queue_task(self.task(self)) + else: + self.migration_done(instance, session) + + @retryable_database_job_method("migrate data") + def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, returns True if migration is completed.""" + return self._migrate_data(instance) + + +class BaseMigrationWithQuery(BaseMigration): """Base class for run time migrations.""" @abstractmethod @@ -2361,23 +2414,23 @@ class BaseRunTimeMigrationWithQuery(BaseRunTimeMigration): def needs_migrate_impl( self, instance: Recorder, session: Session - ) -> NeedsMigrateResult: + ) -> DataMigrationStatus: """Return if the migration needs to run.""" needs_migrate = execute_stmt_lambda_element(session, self.needs_migrate_query()) - return NeedsMigrateResult( + return DataMigrationStatus( needs_migrate=bool(needs_migrate), migration_done=not needs_migrate ) -class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): +class StatesContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate states context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION migration_id = "state_context_id_as_binary" + migration_version = 2 + index_to_drop = ("states", "ix_states_context_id") - @staticmethod - @retryable_database_job("migrate states context_ids to binary format") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate states context_ids to use binary format, return True if completed.""" _to_bytes = _context_id_to_bytes session_maker = instance.get_session @@ -2402,31 +2455,25 @@ class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): for state_id, last_updated_ts, context_id, context_user_id, context_parent_id in states ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not states: - _mark_migration_done(session, StatesContextIDMigration) - - if is_done: - _drop_index(session_maker, "states", "ix_states_context_id") + is_done = not states _LOGGER.debug("Migrating states context_ids to binary format: done=%s", is_done) - return is_done + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) def needs_migrate_query(self) -> StatementLambdaElement: """Return the query to check if the migration needs to run.""" return has_states_context_ids_to_migrate() -class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): +class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate events context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION migration_id = "event_context_id_as_binary" + migration_version = 2 + index_to_drop = ("events", "ix_events_context_id") - @staticmethod - @retryable_database_job("migrate events context_ids to binary format") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate events context_ids to use binary format, return True if completed.""" _to_bytes = _context_id_to_bytes session_maker = instance.get_session @@ -2451,35 +2498,23 @@ class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): for event_id, time_fired_ts, context_id, context_user_id, context_parent_id in events ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not events: - _mark_migration_done(session, EventsContextIDMigration) - - if is_done: - _drop_index(session_maker, "events", "ix_events_context_id") + is_done = not events _LOGGER.debug("Migrating events context_ids to binary format: done=%s", is_done) - return is_done + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) def needs_migrate_query(self) -> StatementLambdaElement: """Return the query to check if the migration needs to run.""" return has_events_context_ids_to_migrate() -class EventTypeIDMigration(BaseRunTimeMigrationWithQuery): +class EventTypeIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate event_type to event_type_ids.""" required_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION migration_id = "event_type_id_migration" - task = CommitBeforeMigrationTask - # We have to commit before to make sure there are - # no new pending event_types about to be added to - # the db since this happens live - @staticmethod - @retryable_database_job("migrate events event_types to event_type_ids") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate event_type to event_type_ids, return True if completed.""" session_maker = instance.get_session _LOGGER.debug("Migrating event_types") @@ -2532,37 +2567,23 @@ class EventTypeIDMigration(BaseRunTimeMigrationWithQuery): ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not events: - _mark_migration_done(session, EventTypeIDMigration) + is_done = not events _LOGGER.debug("Migrating event_types done=%s", is_done) - return is_done - - def migration_done(self, instance: Recorder, session: Session | None) -> None: - """Will be called after migrate returns True.""" - _LOGGER.debug("Activating event_types manager as all data is migrated") - instance.event_type_manager.active = True + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) def needs_migrate_query(self) -> StatementLambdaElement: """Check if the data is migrated.""" return has_event_type_to_migrate() -class EntityIDMigration(BaseRunTimeMigrationWithQuery): +class EntityIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate entity_ids to states_meta.""" required_schema_version = STATES_META_SCHEMA_VERSION migration_id = "entity_id_migration" - task = CommitBeforeMigrationTask - # We have to commit before to make sure there are - # no new pending states_meta about to be added to - # the db since this happens live - @staticmethod - @retryable_database_job("migrate states entity_ids to states_meta") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate entity_ids to states_meta, return True if completed. We do this in two steps because we need the history queries to work @@ -2625,42 +2646,10 @@ class EntityIDMigration(BaseRunTimeMigrationWithQuery): ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not states: - _mark_migration_done(session, EntityIDMigration) + is_done = not states _LOGGER.debug("Migrating entity_ids done=%s", is_done) - return is_done - - def migration_done(self, instance: Recorder, _session: Session | None) -> None: - """Will be called after migrate returns True.""" - # The migration has finished, now we start the post migration - # to remove the old entity_id data from the states table - # at this point we can also start using the StatesMeta table - # so we set active to True - _LOGGER.debug("Activating states_meta manager as all data is migrated") - instance.states_meta_manager.active = True - session_generator = ( - contextlib.nullcontext(_session) - if _session - else session_scope(session=instance.get_session()) - ) - with ( - contextlib.suppress(SQLAlchemyError), - session_generator as session, - ): - # If ix_states_entity_id_last_updated_ts still exists - # on the states table it means the entity id migration - # finished by the EntityIDPostMigrationTask did not - # complete because they restarted in the middle of it. We need - # to pick back up where we left off. - if get_index_by_name( - session, - TABLE_STATES, - LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX, - ): - instance.queue_task(EntityIDPostMigrationTask()) + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) def needs_migrate_query(self) -> StatementLambdaElement: """Check if the data is migrated.""" @@ -2674,10 +2663,49 @@ class EventIDPostMigration(BaseRunTimeMigration): task = MigrationTask migration_version = 2 - @staticmethod - def migrate_data(instance: Recorder) -> bool: - """Migrate some data, returns True if migration is completed.""" - return cleanup_legacy_states_event_ids(instance) + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: + """Remove old event_id index from states, returns True if completed. + + We used to link states to events using the event_id column but we no + longer store state changed events in the events table. + + If all old states have been purged and existing states are in the new + format we can drop the index since it can take up ~10MB per 1M rows. + """ + session_maker = instance.get_session + _LOGGER.debug("Cleanup legacy entity_ids") + with session_scope(session=session_maker()) as session: + result = session.execute(has_used_states_event_ids()).scalar() + # In the future we may migrate existing states to the new format + # but in practice very few of these still exist in production and + # removing the index is the likely all that needs to happen. + all_gone = not result + + fk_remove_ok = False + if all_gone: + # Only drop the index if there are no more event_ids in the states table + # ex all NULL + assert instance.engine is not None, "engine should never be None" + if instance.dialect_name == SupportedDialect.SQLITE: + # SQLite does not support dropping foreign key constraints + # so we have to rebuild the table + fk_remove_ok = rebuild_sqlite_table( + session_maker, instance.engine, States + ) + else: + try: + _drop_foreign_key_constraints( + session_maker, instance.engine, TABLE_STATES, "event_id" + ) + except (InternalError, OperationalError): + fk_remove_ok = False + else: + fk_remove_ok = True + if fk_remove_ok: + _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + instance.use_legacy_events_index = False + + return DataMigrationStatus(needs_migrate=False, migration_done=fk_remove_ok) @staticmethod def _legacy_event_id_foreign_key_exists(instance: Recorder) -> bool: @@ -2698,21 +2726,51 @@ class EventIDPostMigration(BaseRunTimeMigration): def needs_migrate_impl( self, instance: Recorder, session: Session - ) -> NeedsMigrateResult: + ) -> DataMigrationStatus: """Return if the migration needs to run.""" if self.schema_version <= LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: - return NeedsMigrateResult(needs_migrate=False, migration_done=False) + return DataMigrationStatus(needs_migrate=False, migration_done=False) if get_index_by_name( session, TABLE_STATES, LEGACY_STATES_EVENT_ID_INDEX ) is not None or self._legacy_event_id_foreign_key_exists(instance): instance.use_legacy_events_index = True - return NeedsMigrateResult(needs_migrate=True, migration_done=False) - return NeedsMigrateResult(needs_migrate=False, migration_done=True) + return DataMigrationStatus(needs_migrate=True, migration_done=False) + return DataMigrationStatus(needs_migrate=False, migration_done=True) -def _mark_migration_done( - session: Session, migration: type[BaseRunTimeMigration] -) -> None: +class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): + """Migration to remove old entity_id strings from states. + + Introduced in HA Core 2023.4 by PR #89557. + """ + + migration_id = "entity_id_post_migration" + index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) + + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: + """Migrate some data, returns True if migration is completed.""" + is_done = post_migrate_entity_ids(instance) + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) + + def needs_migrate_query(self) -> StatementLambdaElement: + """Check if the data is migrated.""" + return has_used_states_entity_ids() + + +NON_LIVE_DATA_MIGRATORS: tuple[type[BaseOffLineMigration], ...] = ( + StatesContextIDMigration, # Introduced in HA Core 2023.4 + EventsContextIDMigration, # Introduced in HA Core 2023.4 + EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 + EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 + EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 +) + +LIVE_DATA_MIGRATORS: tuple[type[BaseRunTimeMigration], ...] = ( + EventIDPostMigration, # Introduced in HA Core 2023.4 by PR #89901 +) + + +def _mark_migration_done(session: Session, migration: type[BaseMigration]) -> None: """Mark a migration as done in the database.""" session.merge( MigrationChanges( @@ -2736,10 +2794,7 @@ def rebuild_sqlite_table( orig_name = table_table.name temp_name = f"{table_table.name}_temp_{int(time())}" - _LOGGER.warning( - "Rebuilding SQLite table %s; This will take a while; Please be patient!", - orig_name, - ) + _LOGGER.warning("Rebuilding SQLite table %s; %s", orig_name, MIGRATION_NOTE_WHILE) try: # 12 step SQLite table rebuild diff --git a/homeassistant/components/recorder/models/__init__.py b/homeassistant/components/recorder/models/__init__.py index d43a1da161e..ea7a6c86854 100644 --- a/homeassistant/components/recorder/models/__init__.py +++ b/homeassistant/components/recorder/models/__init__.py @@ -23,7 +23,6 @@ from .statistics import ( ) from .time import ( datetime_to_timestamp_or_none, - process_datetime_to_timestamp, process_timestamp, process_timestamp_to_utc_isoformat, timestamp_to_datetime_or_none, @@ -47,7 +46,6 @@ __all__ = [ "datetime_to_timestamp_or_none", "extract_event_type_ids", "extract_metadata_ids", - "process_datetime_to_timestamp", "process_timestamp", "process_timestamp_to_utc_isoformat", "row_to_compressed_state", diff --git a/homeassistant/components/recorder/models/legacy.py b/homeassistant/components/recorder/models/legacy.py index 4b32ae65748..a469aa49ab2 100644 --- a/homeassistant/components/recorder/models/legacy.py +++ b/homeassistant/components/recorder/models/legacy.py @@ -17,166 +17,7 @@ from homeassistant.core import Context, State import homeassistant.util.dt as dt_util from .state_attributes import decode_attributes_from_source -from .time import ( - process_datetime_to_timestamp, - process_timestamp, - process_timestamp_to_utc_isoformat, -) - - -class LegacyLazyStatePreSchema31(State): - """A lazy version of core State before schema 31.""" - - __slots__ = [ - "_row", - "_attributes", - "_last_changed", - "_last_updated", - "_context", - "attr_cache", - ] - - def __init__( # pylint: disable=super-init-not-called - self, - row: Row, - attr_cache: dict[str, dict[str, Any]], - start_time: datetime | None, - ) -> None: - """Init the lazy state.""" - self._row = row - self.entity_id: str = self._row.entity_id - self.state = self._row.state or "" - self._attributes: dict[str, Any] | None = None - self._last_changed: datetime | None = start_time - self._last_reported: datetime | None = start_time - self._last_updated: datetime | None = start_time - self._context: Context | None = None - self.attr_cache = attr_cache - - @property # type: ignore[override] - def attributes(self) -> dict[str, Any]: - """State attributes.""" - if self._attributes is None: - self._attributes = decode_attributes_from_row_legacy( - self._row, self.attr_cache - ) - return self._attributes - - @attributes.setter - def attributes(self, value: dict[str, Any]) -> None: - """Set attributes.""" - self._attributes = value - - @property - def context(self) -> Context: - """State context.""" - if self._context is None: - self._context = Context(id=None) - return self._context - - @context.setter - def context(self, value: Context) -> None: - """Set context.""" - self._context = value - - @property - def last_changed(self) -> datetime: - """Last changed datetime.""" - if self._last_changed is None: - if (last_changed := self._row.last_changed) is not None: - self._last_changed = process_timestamp(last_changed) - else: - self._last_changed = self.last_updated - return self._last_changed - - @last_changed.setter - def last_changed(self, value: datetime) -> None: - """Set last changed datetime.""" - self._last_changed = value - - @property - def last_reported(self) -> datetime: - """Last reported datetime.""" - if self._last_reported is None: - self._last_reported = self.last_updated - return self._last_reported - - @last_reported.setter - def last_reported(self, value: datetime) -> None: - """Set last reported datetime.""" - self._last_reported = value - - @property - def last_updated(self) -> datetime: - """Last updated datetime.""" - if self._last_updated is None: - self._last_updated = process_timestamp(self._row.last_updated) - return self._last_updated - - @last_updated.setter - def last_updated(self, value: datetime) -> None: - """Set last updated datetime.""" - self._last_updated = value - - def as_dict(self) -> dict[str, Any]: # type: ignore[override] - """Return a dict representation of the LazyState. - - Async friendly. - - To be used for JSON serialization. - """ - if self._last_changed is None and self._last_updated is None: - last_updated_isoformat = process_timestamp_to_utc_isoformat( - self._row.last_updated - ) - if ( - self._row.last_changed is None - or self._row.last_changed == self._row.last_updated - ): - last_changed_isoformat = last_updated_isoformat - else: - last_changed_isoformat = process_timestamp_to_utc_isoformat( - self._row.last_changed - ) - else: - last_updated_isoformat = self.last_updated.isoformat() - if self.last_changed == self.last_updated: - last_changed_isoformat = last_updated_isoformat - else: - last_changed_isoformat = self.last_changed.isoformat() - return { - "entity_id": self.entity_id, - "state": self.state, - "attributes": self._attributes or self.attributes, - "last_changed": last_changed_isoformat, - "last_updated": last_updated_isoformat, - } - - -def legacy_row_to_compressed_state_pre_schema_31( - row: Row, - attr_cache: dict[str, dict[str, Any]], - start_time: datetime | None, -) -> dict[str, Any]: - """Convert a database row to a compressed state before schema 31.""" - comp_state = { - COMPRESSED_STATE_STATE: row.state, - COMPRESSED_STATE_ATTRIBUTES: decode_attributes_from_row_legacy(row, attr_cache), - } - if start_time: - comp_state[COMPRESSED_STATE_LAST_UPDATED] = start_time.timestamp() - else: - row_last_updated: datetime = row.last_updated - comp_state[COMPRESSED_STATE_LAST_UPDATED] = process_datetime_to_timestamp( - row_last_updated - ) - if ( - row_changed_changed := row.last_changed - ) and row_last_updated != row_changed_changed: - comp_state[COMPRESSED_STATE_LAST_CHANGED] = process_datetime_to_timestamp( - row_changed_changed - ) - return comp_state +from .time import process_timestamp class LegacyLazyState(State): @@ -187,6 +28,7 @@ class LegacyLazyState(State): "_attributes", "_last_changed_ts", "_last_updated_ts", + "_last_reported_ts", "_context", "attr_cache", ] @@ -204,7 +46,7 @@ class LegacyLazyState(State): self.state = self._row.state or "" self._attributes: dict[str, Any] | None = None self._last_updated_ts: float | None = self._row.last_updated_ts or ( - dt_util.utc_to_timestamp(start_time) if start_time else None + start_time.timestamp() if start_time else None ) self._last_changed_ts: float | None = ( self._row.last_changed_ts or self._last_updated_ts @@ -304,7 +146,7 @@ def legacy_row_to_compressed_state( COMPRESSED_STATE_ATTRIBUTES: decode_attributes_from_row_legacy(row, attr_cache), } if start_time: - comp_state[COMPRESSED_STATE_LAST_UPDATED] = dt_util.utc_to_timestamp(start_time) + comp_state[COMPRESSED_STATE_LAST_UPDATED] = start_time.timestamp() else: row_last_updated_ts: float = row.last_updated_ts comp_state[COMPRESSED_STATE_LAST_UPDATED] = row_last_updated_ts diff --git a/homeassistant/components/recorder/models/state.py b/homeassistant/components/recorder/models/state.py index 139522a3d20..fbf73e75025 100644 --- a/homeassistant/components/recorder/models/state.py +++ b/homeassistant/components/recorder/models/state.py @@ -3,10 +3,10 @@ from __future__ import annotations from datetime import datetime -from functools import cached_property import logging from typing import TYPE_CHECKING, Any +from propcache import cached_property from sqlalchemy.engine.row import Row from homeassistant.const import ( @@ -96,6 +96,29 @@ class LazyState(State): assert self._last_updated_ts is not None return dt_util.utc_from_timestamp(self._last_updated_ts) + @cached_property + def last_updated_timestamp(self) -> float: # type: ignore[override] + """Last updated timestamp.""" + if TYPE_CHECKING: + assert self._last_updated_ts is not None + return self._last_updated_ts + + @cached_property + def last_changed_timestamp(self) -> float: # type: ignore[override] + """Last changed timestamp.""" + ts = self._last_changed_ts or self._last_updated_ts + if TYPE_CHECKING: + assert ts is not None + return ts + + @cached_property + def last_reported_timestamp(self) -> float: # type: ignore[override] + """Last reported timestamp.""" + ts = self._last_reported_ts or self._last_updated_ts + if TYPE_CHECKING: + assert ts is not None + return ts + def as_dict(self) -> dict[str, Any]: # type: ignore[override] """Return a dict representation of the LazyState. diff --git a/homeassistant/components/recorder/models/time.py b/homeassistant/components/recorder/models/time.py index 6295060c8d3..33218000faa 100644 --- a/homeassistant/components/recorder/models/time.py +++ b/homeassistant/components/recorder/models/time.py @@ -52,22 +52,9 @@ def process_timestamp_to_utc_isoformat(ts: datetime | None) -> str | None: return ts.astimezone(dt_util.UTC).isoformat() -def process_datetime_to_timestamp(ts: datetime) -> float: - """Process a datebase datetime to epoch. - - Mirrors the behavior of process_timestamp_to_utc_isoformat - except it returns the epoch time. - """ - if ts.tzinfo is None or ts.tzinfo == dt_util.UTC: - return dt_util.utc_to_timestamp(ts) - return ts.timestamp() - - def datetime_to_timestamp_or_none(dt: datetime | None) -> float | None: """Convert a datetime to a timestamp.""" - if dt is None: - return None - return dt_util.utc_to_timestamp(dt) + return None if dt is None else dt.timestamp() def timestamp_to_datetime_or_none(ts: float | None) -> datetime | None: diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index 30f8fa8d07a..fc2a8ccb1cc 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -16,7 +16,7 @@ from sqlalchemy.pool import ( StaticPool, ) -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.util.loop import raise_for_blocking_call _LOGGER = logging.getLogger(__name__) @@ -108,14 +108,14 @@ class RecorderPool(SingletonThreadPool, NullPool): # raise_for_blocking_call will raise an exception def _do_get_db_connection_protected(self) -> ConnectionPoolEntry: - report( + report_usage( ( "accesses the database without the database executor; " f"{ADVISE_MSG} " "for faster database operations" ), exclude_integrations={"recorder"}, - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) return NullPool._create_connection(self) # noqa: SLF001 diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index d28e7e2a547..eb67300e8d4 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -110,14 +110,13 @@ def purge_old_data( _LOGGER.debug("Purging hasn't fully completed yet") return False - if apply_filter and _purge_filtered_data(instance, session) is False: + if apply_filter and not _purge_filtered_data(instance, session): _LOGGER.debug("Cleanup filtered data hasn't fully completed yet") return False # This purge cycle is finished, clean up old event types and # recorder runs - if instance.event_type_manager.active: - _purge_old_event_types(instance, session) + _purge_old_event_types(instance, session) if instance.states_meta_manager.active: _purge_old_entity_ids(instance, session) @@ -631,7 +630,10 @@ def _purge_old_entity_ids(instance: Recorder, session: Session) -> None: def _purge_filtered_data(instance: Recorder, session: Session) -> bool: - """Remove filtered states and events that shouldn't be in the database.""" + """Remove filtered states and events that shouldn't be in the database. + + Returns true if all states and events are purged. + """ _LOGGER.debug("Cleanup filtered data") database_engine = instance.database_engine assert database_engine is not None @@ -639,7 +641,7 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool: # Check if excluded entity_ids are in database entity_filter = instance.entity_filter - has_more_states_to_purge = False + has_more_to_purge = False excluded_metadata_ids: list[str] = [ metadata_id for (metadata_id, entity_id) in session.query( @@ -648,12 +650,11 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool: if entity_filter and not entity_filter(entity_id) ] if excluded_metadata_ids: - has_more_states_to_purge = _purge_filtered_states( + has_more_to_purge |= not _purge_filtered_states( instance, session, excluded_metadata_ids, database_engine, now_timestamp ) # Check if excluded event_types are in database - has_more_events_to_purge = False if ( event_type_to_event_type_ids := instance.event_type_manager.get_many( instance.exclude_event_types, session @@ -665,12 +666,12 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool: if event_type_id is not None ] ): - has_more_events_to_purge = _purge_filtered_events( + has_more_to_purge |= not _purge_filtered_events( instance, session, excluded_event_type_ids, now_timestamp ) # Purge has completed if there are not more state or events to purge - return not (has_more_states_to_purge or has_more_events_to_purge) + return not has_more_to_purge def _purge_filtered_states( diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index a5be5dffe10..2e4b588a0b0 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -608,7 +608,8 @@ def delete_recorder_runs_rows( """Delete recorder_runs rows.""" return lambda_stmt( lambda: delete(RecorderRuns) - .filter(RecorderRuns.start < purge_before) + .filter(RecorderRuns.end.is_not(None)) + .filter(RecorderRuns.end < purge_before) .filter(RecorderRuns.run_id != current_run_id) .execution_options(synchronize_session=False) ) @@ -763,6 +764,13 @@ def batch_cleanup_entity_ids() -> StatementLambdaElement: ) +def has_used_states_entity_ids() -> StatementLambdaElement: + """Check if there are used entity_ids in the states table.""" + return lambda_stmt( + lambda: select(States.state_id).filter(States.entity_id.isnot(None)).limit(1) + ) + + def has_used_states_event_ids() -> StatementLambdaElement: """Check if there are used event_ids in the states table.""" return lambda_stmt( diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 6532935ae0e..3f1d5b981e3 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -11,6 +11,7 @@ from itertools import chain, groupby import logging from operator import itemgetter import re +from time import time as time_time from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text @@ -27,7 +28,9 @@ from homeassistant.helpers.singleton import singleton from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + AreaConverter, BaseUnitConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -52,6 +55,7 @@ from .const import ( EVENT_RECORDER_HOURLY_STATISTICS_GENERATED, INTEGRATION_PLATFORM_COMPILE_STATISTICS, INTEGRATION_PLATFORM_LIST_STATISTIC_IDS, + INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES, INTEGRATION_PLATFORM_VALIDATE_STATISTICS, SupportedDialect, ) @@ -127,6 +131,11 @@ QUERY_STATISTICS_SUMMARY_SUM = ( STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { + **{unit: AreaConverter for unit in AreaConverter.VALID_UNITS}, + **{ + unit: BloodGlucoseConcentrationConverter + for unit in BloodGlucoseConcentrationConverter.VALID_UNITS + }, **{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS}, **{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS}, **{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS}, @@ -438,8 +447,9 @@ def _compile_hourly_statistics(session: Session, start: datetime) -> None: } # Insert compiled hourly statistics in the database + now_timestamp = time_time() session.add_all( - Statistics.from_stats_ts(metadata_id, summary_item) + Statistics.from_stats_ts(metadata_id, summary_item, now_timestamp) for metadata_id, summary_item in summary.items() ) @@ -570,6 +580,7 @@ def _compile_statistics( new_short_term_stats: list[StatisticsBase] = [] updated_metadata_ids: set[int] = set() + now_timestamp = time_time() # Insert collected statistics in the database for stats in platform_stats: modified_statistic_id, metadata_id = statistics_meta_manager.update_or_add( @@ -579,13 +590,21 @@ def _compile_statistics( modified_statistic_ids.add(modified_statistic_id) updated_metadata_ids.add(metadata_id) if new_stat := _insert_statistics( - session, - StatisticsShortTerm, - metadata_id, - stats["stat"], + session, StatisticsShortTerm, metadata_id, stats["stat"], now_timestamp ): new_short_term_stats.append(new_stat) + if start.minute == 50: + # Once every hour, update issues + for platform in instance.hass.data[DOMAIN].recorder_platforms.values(): + if not ( + platform_update_issues := getattr( + platform, INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES, None + ) + ): + continue + platform_update_issues(instance.hass, session) + if start.minute == 55: # A full hour is ready, summarize it _compile_hourly_statistics(session, start) @@ -647,10 +666,11 @@ def _insert_statistics( table: type[StatisticsBase], metadata_id: int, statistic: StatisticData, + now_timestamp: float, ) -> StatisticsBase | None: """Insert statistics in the database.""" try: - stat = table.from_stats(metadata_id, statistic) + stat = table.from_stats(metadata_id, statistic, now_timestamp) session.add(stat) except SQLAlchemyError: _LOGGER.exception( @@ -2088,71 +2108,38 @@ def _build_stats( db_rows: list[Row], table_duration_seconds: float, start_ts_idx: int, - mean_idx: int | None, - min_idx: int | None, - max_idx: int | None, - last_reset_ts_idx: int | None, - state_idx: int | None, - sum_idx: int | None, + row_mapping: tuple[tuple[str, int], ...], ) -> list[StatisticsRow]: """Build a list of statistics without unit conversion.""" - result: list[StatisticsRow] = [] - ent_results_append = result.append - for db_row in db_rows: - row: StatisticsRow = { + return [ + { "start": (start_ts := db_row[start_ts_idx]), "end": start_ts + table_duration_seconds, + **{key: db_row[idx] for key, idx in row_mapping}, # type: ignore[typeddict-item] } - if last_reset_ts_idx is not None: - row["last_reset"] = db_row[last_reset_ts_idx] - if mean_idx is not None: - row["mean"] = db_row[mean_idx] - if min_idx is not None: - row["min"] = db_row[min_idx] - if max_idx is not None: - row["max"] = db_row[max_idx] - if state_idx is not None: - row["state"] = db_row[state_idx] - if sum_idx is not None: - row["sum"] = db_row[sum_idx] - ent_results_append(row) - return result + for db_row in db_rows + ] def _build_converted_stats( db_rows: list[Row], table_duration_seconds: float, start_ts_idx: int, - mean_idx: int | None, - min_idx: int | None, - max_idx: int | None, - last_reset_ts_idx: int | None, - state_idx: int | None, - sum_idx: int | None, + row_mapping: tuple[tuple[str, int], ...], convert: Callable[[float | None], float | None] | Callable[[float], float], ) -> list[StatisticsRow]: """Build a list of statistics with unit conversion.""" - result: list[StatisticsRow] = [] - ent_results_append = result.append - for db_row in db_rows: - row: StatisticsRow = { + return [ + { "start": (start_ts := db_row[start_ts_idx]), "end": start_ts + table_duration_seconds, + **{ + key: None if (v := db_row[idx]) is None else convert(v) # type: ignore[typeddict-item] + for key, idx in row_mapping + }, } - if last_reset_ts_idx is not None: - row["last_reset"] = db_row[last_reset_ts_idx] - if mean_idx is not None: - row["mean"] = None if (v := db_row[mean_idx]) is None else convert(v) - if min_idx is not None: - row["min"] = None if (v := db_row[min_idx]) is None else convert(v) - if max_idx is not None: - row["max"] = None if (v := db_row[max_idx]) is None else convert(v) - if state_idx is not None: - row["state"] = None if (v := db_row[state_idx]) is None else convert(v) - if sum_idx is not None: - row["sum"] = None if (v := db_row[sum_idx]) is None else convert(v) - ent_results_append(row) - return result + for db_row in db_rows + ] def _sorted_statistics_to_dict( @@ -2192,14 +2179,11 @@ def _sorted_statistics_to_dict( # Figure out which fields we need to extract from the SQL result # and which indices they have in the result so we can avoid the overhead # of doing a dict lookup for each row - mean_idx = field_map["mean"] if "mean" in types else None - min_idx = field_map["min"] if "min" in types else None - max_idx = field_map["max"] if "max" in types else None - last_reset_ts_idx = field_map["last_reset_ts"] if "last_reset" in types else None - state_idx = field_map["state"] if "state" in types else None + if "last_reset_ts" in field_map: + field_map["last_reset"] = field_map.pop("last_reset_ts") sum_idx = field_map["sum"] if "sum" in types else None sum_only = len(types) == 1 and sum_idx is not None - row_idxes = (mean_idx, min_idx, max_idx, last_reset_ts_idx, state_idx, sum_idx) + row_mapping = tuple((key, field_map[key]) for key in types if key in field_map) # Append all statistic entries, and optionally do unit conversion table_duration_seconds = table.duration.total_seconds() for meta_id, db_rows in stats_by_meta_id.items(): @@ -2228,9 +2212,9 @@ def _sorted_statistics_to_dict( else: _stats = _build_sum_stats(*build_args, sum_idx) elif convert: - _stats = _build_converted_stats(*build_args, *row_idxes, convert) + _stats = _build_converted_stats(*build_args, row_mapping, convert) else: - _stats = _build_stats(*build_args, *row_idxes) + _stats = _build_stats(*build_args, row_mapping) result[statistic_id] = _stats @@ -2248,6 +2232,16 @@ def validate_statistics(hass: HomeAssistant) -> dict[str, list[ValidationIssue]] return platform_validation +def update_statistics_issues(hass: HomeAssistant) -> None: + """Update statistics issues.""" + with session_scope(hass=hass, read_only=True) as session: + for platform in hass.data[DOMAIN].recorder_platforms.values(): + if platform_update_statistics_issues := getattr( + platform, INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES, None + ): + platform_update_statistics_issues(hass, session) + + def _statistics_exists( session: Session, table: type[StatisticsBase], @@ -2354,11 +2348,12 @@ def _import_statistics_with_session( _, metadata_id = statistics_meta_manager.update_or_add( session, metadata, old_metadata_dict ) + now_timestamp = time_time() for stat in statistics: if stat_id := _statistics_exists(session, table, metadata_id, stat["start"]): _update_statistics(session, table, stat_id, stat) else: - _insert_statistics(session, table, metadata_id, stat) + _insert_statistics(session, table, metadata_id, stat, now_timestamp) if table != StatisticsShortTerm: return True diff --git a/homeassistant/components/recorder/table_managers/event_types.py b/homeassistant/components/recorder/table_managers/event_types.py index 81bddce948d..266c970fe1f 100644 --- a/homeassistant/components/recorder/table_managers/event_types.py +++ b/homeassistant/components/recorder/table_managers/event_types.py @@ -28,8 +28,6 @@ CACHE_SIZE = 2048 class EventTypeManager(BaseLRUTableManager[EventTypes]): """Manage the EventTypes table.""" - active = False - def __init__(self, recorder: Recorder) -> None: """Initialize the event type manager.""" super().__init__(recorder, CACHE_SIZE) diff --git a/homeassistant/components/recorder/table_managers/states_meta.py b/homeassistant/components/recorder/table_managers/states_meta.py index 80d20dbec94..75afb6589a1 100644 --- a/homeassistant/components/recorder/table_managers/states_meta.py +++ b/homeassistant/components/recorder/table_managers/states_meta.py @@ -24,7 +24,7 @@ CACHE_SIZE = 8192 class StatesMetaManager(BaseLRUTableManager[StatesMeta]): """Manage the StatesMeta table.""" - active = False + active = True def __init__(self, recorder: Recorder) -> None: """Initialize the states meta manager.""" diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index 46e529d4909..783f0a80b8e 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -60,17 +60,21 @@ class ChangeStatisticsUnitTask(RecorderTask): class ClearStatisticsTask(RecorderTask): """Object to store statistics_ids which for which to remove statistics.""" + on_done: Callable[[], None] | None statistic_ids: list[str] def run(self, instance: Recorder) -> None: """Handle the task.""" statistics.clear_statistics(instance, self.statistic_ids) + if self.on_done: + self.on_done() @dataclass(slots=True) class UpdateStatisticsMetadataTask(RecorderTask): """Object to store statistics_id and unit for update of statistics metadata.""" + on_done: Callable[[], None] | None statistic_id: str new_statistic_id: str | None | UndefinedType new_unit_of_measurement: str | None | UndefinedType @@ -83,6 +87,8 @@ class UpdateStatisticsMetadataTask(RecorderTask): self.new_statistic_id, self.new_unit_of_measurement, ) + if self.on_done: + self.on_done() @dataclass(slots=True) @@ -322,31 +328,6 @@ class SynchronizeTask(RecorderTask): instance.hass.loop.call_soon_threadsafe(self.event.set) -@dataclass(slots=True) -class PostSchemaMigrationTask(RecorderTask): - """Post migration task to update schema.""" - - old_version: int - new_version: int - - def run(self, instance: Recorder) -> None: - """Handle the task.""" - instance._post_schema_migration( # noqa: SLF001 - self.old_version, self.new_version - ) - - -@dataclass(slots=True) -class StatisticsTimestampMigrationCleanupTask(RecorderTask): - """An object to insert into the recorder queue to run a statistics migration cleanup task.""" - - def run(self, instance: Recorder) -> None: - """Run statistics timestamp cleanup task.""" - if not statistics.cleanup_statistics_timestamp_migration(instance): - # Schedule a new statistics migration task if this one didn't finish - instance.queue_task(StatisticsTimestampMigrationCleanupTask()) - - @dataclass(slots=True) class AdjustLRUSizeTask(RecorderTask): """An object to insert into the recorder queue to adjust the LRU size.""" @@ -358,19 +339,6 @@ class AdjustLRUSizeTask(RecorderTask): instance._adjust_lru_size() # noqa: SLF001 -@dataclass(slots=True) -class EntityIDPostMigrationTask(RecorderTask): - """An object to insert into the recorder queue to cleanup after entity_ids migration.""" - - def run(self, instance: Recorder) -> None: - """Run entity_id post migration task.""" - if ( - not instance._post_migrate_entity_ids() # noqa: SLF001 - ): - # Schedule a new migration task if this one didn't finish - instance.queue_task(EntityIDPostMigrationTask()) - - @dataclass(slots=True) class RefreshEventTypesTask(RecorderTask): """An object to insert into the recorder queue to refresh event types.""" diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 4d494aed7d5..2e7ac0c092d 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -644,48 +644,71 @@ def _is_retryable_error(instance: Recorder, err: OperationalError) -> bool: ) -type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], _R] +type _FuncType[**P, R] = Callable[Concatenate[Recorder, P], R] +type _MethType[Self, **P, R] = Callable[Concatenate[Self, Recorder, P], R] +type _FuncOrMethType[**_P, _R] = Callable[_P, _R] -def retryable_database_job[_RecorderT: Recorder, **_P]( +def retryable_database_job[**_P]( description: str, -) -> Callable[[_FuncType[_RecorderT, _P, bool]], _FuncType[_RecorderT, _P, bool]]: - """Try to execute a database job. +) -> Callable[[_FuncType[_P, bool]], _FuncType[_P, bool]]: + """Execute a database job repeatedly until it succeeds. The job should return True if it finished, and False if it needs to be rescheduled. """ - def decorator( - job: _FuncType[_RecorderT, _P, bool], - ) -> _FuncType[_RecorderT, _P, bool]: - @functools.wraps(job) - def wrapper(instance: _RecorderT, *args: _P.args, **kwargs: _P.kwargs) -> bool: - try: - return job(instance, *args, **kwargs) - except OperationalError as err: - if _is_retryable_error(instance, err): - assert isinstance(err.orig, BaseException) # noqa: PT017 - _LOGGER.info( - "%s; %s not completed, retrying", err.orig.args[1], description - ) - time.sleep(instance.db_retry_wait) - # Failed with retryable error - return False - - _LOGGER.warning("Error executing %s: %s", description, err) - - # Failed with permanent error - return True - - return wrapper + def decorator(job: _FuncType[_P, bool]) -> _FuncType[_P, bool]: + return _wrap_retryable_database_job_func_or_meth(job, description, False) return decorator -def database_job_retry_wrapper[_RecorderT: Recorder, **_P]( - description: str, attempts: int = 5 -) -> Callable[[_FuncType[_RecorderT, _P, None]], _FuncType[_RecorderT, _P, None]]: - """Try to execute a database job multiple times. +def retryable_database_job_method[_Self, **_P]( + description: str, +) -> Callable[[_MethType[_Self, _P, bool]], _MethType[_Self, _P, bool]]: + """Execute a database job repeatedly until it succeeds. + + The job should return True if it finished, and False if it needs to be rescheduled. + """ + + def decorator(job: _MethType[_Self, _P, bool]) -> _MethType[_Self, _P, bool]: + return _wrap_retryable_database_job_func_or_meth(job, description, True) + + return decorator + + +def _wrap_retryable_database_job_func_or_meth[**_P]( + job: _FuncOrMethType[_P, bool], description: str, method: bool +) -> _FuncOrMethType[_P, bool]: + recorder_pos = 1 if method else 0 + + @functools.wraps(job) + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> bool: + instance: Recorder = args[recorder_pos] # type: ignore[assignment] + try: + return job(*args, **kwargs) + except OperationalError as err: + if _is_retryable_error(instance, err): + assert isinstance(err.orig, BaseException) # noqa: PT017 + _LOGGER.info( + "%s; %s not completed, retrying", err.orig.args[1], description + ) + time.sleep(instance.db_retry_wait) + # Failed with retryable error + return False + + _LOGGER.warning("Error executing %s: %s", description, err) + + # Failed with permanent error + return True + + return wrapper + + +def database_job_retry_wrapper[**_P, _R]( + description: str, attempts: int +) -> Callable[[_FuncType[_P, _R]], _FuncType[_P, _R]]: + """Execute a database job repeatedly until it succeeds, at most attempts times. This wrapper handles InnoDB deadlocks and lock timeouts. @@ -694,32 +717,63 @@ def database_job_retry_wrapper[_RecorderT: Recorder, **_P]( """ def decorator( - job: _FuncType[_RecorderT, _P, None], - ) -> _FuncType[_RecorderT, _P, None]: - @functools.wraps(job) - def wrapper(instance: _RecorderT, *args: _P.args, **kwargs: _P.kwargs) -> None: - for attempt in range(attempts): - try: - job(instance, *args, **kwargs) - except OperationalError as err: - if attempt == attempts - 1 or not _is_retryable_error( - instance, err - ): - raise - assert isinstance(err.orig, BaseException) # noqa: PT017 - _LOGGER.info( - "%s; %s failed, retrying", err.orig.args[1], description - ) - time.sleep(instance.db_retry_wait) - # Failed with retryable error - else: - return - - return wrapper + job: _FuncType[_P, _R], + ) -> _FuncType[_P, _R]: + return _database_job_retry_wrapper_func_or_meth( + job, description, attempts, False + ) return decorator +def database_job_retry_wrapper_method[_Self, **_P, _R]( + description: str, attempts: int +) -> Callable[[_MethType[_Self, _P, _R]], _MethType[_Self, _P, _R]]: + """Execute a database job repeatedly until it succeeds, at most attempts times. + + This wrapper handles InnoDB deadlocks and lock timeouts. + + This is different from retryable_database_job in that it will retry the job + attempts number of times instead of returning False if the job fails. + """ + + def decorator( + job: _MethType[_Self, _P, _R], + ) -> _MethType[_Self, _P, _R]: + return _database_job_retry_wrapper_func_or_meth( + job, description, attempts, True + ) + + return decorator + + +def _database_job_retry_wrapper_func_or_meth[**_P, _R]( + job: _FuncOrMethType[_P, _R], + description: str, + attempts: int, + method: bool, +) -> _FuncOrMethType[_P, _R]: + recorder_pos = 1 if method else 0 + + @functools.wraps(job) + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: + instance: Recorder = args[recorder_pos] # type: ignore[assignment] + for attempt in range(attempts): + try: + return job(*args, **kwargs) + except OperationalError as err: + # Failed with retryable error + if attempt == attempts - 1 or not _is_retryable_error(instance, err): + raise + assert isinstance(err.orig, BaseException) # noqa: PT017 + _LOGGER.info("%s; %s failed, retrying", err.orig.args[1], description) + time.sleep(instance.db_retry_wait) + + raise ValueError("attempts must be a positive integer") + + return wrapper + + def periodic_db_cleanups(instance: Recorder) -> None: """Run any database cleanups that need to happen periodically. @@ -838,17 +892,16 @@ def resolve_period( start_time += timedelta(days=cal_offset * 7) end_time = start_time + timedelta(weeks=1) elif calendar_period == "month": - start_time = start_of_day.replace(day=28) - # This works for up to 48 months of offset - start_time = (start_time + timedelta(days=cal_offset * 31)).replace(day=1) + month_now = start_of_day.month + new_month = (month_now - 1 + cal_offset) % 12 + 1 + new_year = start_of_day.year + (month_now - 1 + cal_offset) // 12 + start_time = start_of_day.replace(year=new_year, month=new_month, day=1) end_time = (start_time + timedelta(days=31)).replace(day=1) else: # calendar_period = "year" - start_time = start_of_day.replace(month=12, day=31) - # This works for 100+ years of offset - start_time = (start_time + timedelta(days=cal_offset * 366)).replace( - month=1, day=1 + start_time = start_of_day.replace( + year=start_of_day.year + cal_offset, month=1, day=1 ) - end_time = (start_time + timedelta(days=365)).replace(day=1) + end_time = (start_time + timedelta(days=366)).replace(day=1) start_time = dt_util.as_utc(start_time) end_time = dt_util.as_utc(end_time) diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index 5e0eef37721..ee5c5dd6d75 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from datetime import datetime as dt from typing import Any, Literal, cast @@ -15,6 +16,9 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + AreaConverter, + BloodGlucoseConcentrationConverter, + ConductivityConverter, DataRateConverter, DistanceConverter, DurationConverter, @@ -42,13 +46,21 @@ from .statistics import ( list_statistic_ids, statistic_during_period, statistics_during_period, + update_statistics_issues, validate_statistics, ) from .util import PERIOD_SCHEMA, get_instance, resolve_period +CLEAR_STATISTICS_TIME_OUT = 10 +UPDATE_STATISTICS_METADATA_TIME_OUT = 10 + UNIT_SCHEMA = vol.Schema( { - vol.Optional("conductivity"): vol.In(DataRateConverter.VALID_UNITS), + vol.Optional("area"): vol.In(AreaConverter.VALID_UNITS), + vol.Optional("blood_glucose_concentration"): vol.In( + BloodGlucoseConcentrationConverter.VALID_UNITS + ), + vol.Optional("conductivity"): vol.In(ConductivityConverter.VALID_UNITS), vol.Optional("data_rate"): vol.In(DataRateConverter.VALID_UNITS), vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS), vol.Optional("duration"): vol.In(DurationConverter.VALID_UNITS), @@ -79,6 +91,7 @@ def async_setup(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, ws_get_statistics_metadata) websocket_api.async_register_command(hass, ws_list_statistic_ids) websocket_api.async_register_command(hass, ws_import_statistics) + websocket_api.async_register_command(hass, ws_update_statistics_issues) websocket_api.async_register_command(hass, ws_update_statistics_metadata) websocket_api.async_register_command(hass, ws_validate_statistics) @@ -291,6 +304,24 @@ async def ws_validate_statistics( connection.send_result(msg["id"], statistic_ids) +@websocket_api.websocket_command( + { + vol.Required("type"): "recorder/update_statistics_issues", + } +) +@websocket_api.async_response +async def ws_update_statistics_issues( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] +) -> None: + """Update statistics issues.""" + instance = get_instance(hass) + await instance.async_add_executor_job( + update_statistics_issues, + hass, + ) + connection.send_result(msg["id"]) + + @websocket_api.require_admin @websocket_api.websocket_command( { @@ -298,8 +329,8 @@ async def ws_validate_statistics( vol.Required("statistic_ids"): [str], } ) -@callback -def ws_clear_statistics( +@websocket_api.async_response +async def ws_clear_statistics( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Clear statistics for a list of statistic_ids. @@ -307,7 +338,23 @@ def ws_clear_statistics( Note: The WS call posts a job to the recorder's queue and then returns, it doesn't wait until the job is completed. """ - get_instance(hass).async_clear_statistics(msg["statistic_ids"]) + done_event = asyncio.Event() + + def clear_statistics_done() -> None: + hass.loop.call_soon_threadsafe(done_event.set) + + get_instance(hass).async_clear_statistics( + msg["statistic_ids"], on_done=clear_statistics_done + ) + try: + async with asyncio.timeout(CLEAR_STATISTICS_TIME_OUT): + await done_event.wait() + except TimeoutError: + connection.send_error( + msg["id"], websocket_api.ERR_TIMEOUT, "clear_statistics timed out" + ) + return + connection.send_result(msg["id"]) @@ -336,17 +383,33 @@ async def ws_get_statistics_metadata( vol.Required("unit_of_measurement"): vol.Any(str, None), } ) -@callback -def ws_update_statistics_metadata( +@websocket_api.async_response +async def ws_update_statistics_metadata( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Update statistics metadata for a statistic_id. Only the normalized unit of measurement can be updated. """ + done_event = asyncio.Event() + + def update_statistics_metadata_done() -> None: + hass.loop.call_soon_threadsafe(done_event.set) + get_instance(hass).async_update_statistics_metadata( - msg["statistic_id"], new_unit_of_measurement=msg["unit_of_measurement"] + msg["statistic_id"], + new_unit_of_measurement=msg["unit_of_measurement"], + on_done=update_statistics_metadata_done, ) + try: + async with asyncio.timeout(UPDATE_STATISTICS_METADATA_TIME_OUT): + await done_event.wait() + except TimeoutError: + connection.send_error( + msg["id"], websocket_api.ERR_TIMEOUT, "update_statistics_metadata timed out" + ) + return + connection.send_result(msg["id"]) diff --git a/homeassistant/components/recswitch/manifest.json b/homeassistant/components/recswitch/manifest.json index 3e243d8f0d2..1273d498efd 100644 --- a/homeassistant/components/recswitch/manifest.json +++ b/homeassistant/components/recswitch/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/recswitch", "iot_class": "local_polling", "loggers": ["pyrecswitch"], + "quality_scale": "legacy", "requirements": ["pyrecswitch==1.0.2"] } diff --git a/homeassistant/components/reddit/manifest.json b/homeassistant/components/reddit/manifest.json index beb2b168e88..a2e20329be0 100644 --- a/homeassistant/components/reddit/manifest.json +++ b/homeassistant/components/reddit/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/reddit", "iot_class": "cloud_polling", "loggers": ["praw", "prawcore"], + "quality_scale": "legacy", "requirements": ["praw==7.5.0"] } diff --git a/homeassistant/components/refoss/const.py b/homeassistant/components/refoss/const.py index 0542afe8afb..851f8ba8f77 100644 --- a/homeassistant/components/refoss/const.py +++ b/homeassistant/components/refoss/const.py @@ -20,6 +20,9 @@ COORDINATOR = "coordinator" MAX_ERRORS = 2 +# Energy monitoring +SENSOR_EM = "em" + CHANNEL_DISPLAY_NAME: dict[str, dict[int, str]] = { "em06": { 1: "A1", @@ -28,5 +31,25 @@ CHANNEL_DISPLAY_NAME: dict[str, dict[int, str]] = { 4: "A2", 5: "B2", 6: "C2", - } + }, + "em16": { + 1: "A1", + 2: "A2", + 3: "A3", + 4: "A4", + 5: "A5", + 6: "A6", + 7: "B1", + 8: "B2", + 9: "B3", + 10: "B4", + 11: "B5", + 12: "B6", + 13: "C1", + 14: "C2", + 15: "C3", + 16: "C4", + 17: "C5", + 18: "C6", + }, } diff --git a/homeassistant/components/refoss/manifest.json b/homeassistant/components/refoss/manifest.json index bf046e954d1..da7050433f3 100644 --- a/homeassistant/components/refoss/manifest.json +++ b/homeassistant/components/refoss/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/refoss", "iot_class": "local_polling", - "requirements": ["refoss-ha==1.2.4"] + "requirements": ["refoss-ha==1.2.5"] } diff --git a/homeassistant/components/refoss/sensor.py b/homeassistant/components/refoss/sensor.py index f65724ddd77..26454cae48d 100644 --- a/homeassistant/components/refoss/sensor.py +++ b/homeassistant/components/refoss/sensor.py @@ -31,6 +31,7 @@ from .const import ( COORDINATORS, DISPATCH_DEVICE_DISCOVERED, DOMAIN, + SENSOR_EM, ) from .entity import RefossEntity @@ -43,8 +44,13 @@ class RefossSensorEntityDescription(SensorEntityDescription): fn: Callable[[float], float] = lambda x: x +DEVICETYPE_SENSOR: dict[str, str] = { + "em06": SENSOR_EM, + "em16": SENSOR_EM, +} + SENSORS: dict[str, tuple[RefossSensorEntityDescription, ...]] = { - "em06": ( + SENSOR_EM: ( RefossSensorEntityDescription( key="power", translation_key="power", @@ -121,8 +127,11 @@ async def async_setup_entry( if not isinstance(device, ElectricityXMix): return + + sensor_type = DEVICETYPE_SENSOR.get(device.device_type, "") + descriptions: tuple[RefossSensorEntityDescription, ...] = SENSORS.get( - device.device_type, () + sensor_type, () ) async_add_entities( diff --git a/homeassistant/components/rejseplanen/manifest.json b/homeassistant/components/rejseplanen/manifest.json index 72da7a65f45..6d0642cc996 100644 --- a/homeassistant/components/rejseplanen/manifest.json +++ b/homeassistant/components/rejseplanen/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rejseplanen", "iot_class": "cloud_polling", "loggers": ["rjpl"], + "quality_scale": "legacy", "requirements": ["rjpl==0.3.6"] } diff --git a/homeassistant/components/remember_the_milk/__init__.py b/homeassistant/components/remember_the_milk/__init__.py index 425a12d5c4d..d544c42efe1 100644 --- a/homeassistant/components/remember_the_milk/__init__.py +++ b/homeassistant/components/remember_the_milk/__init__.py @@ -4,17 +4,18 @@ import json import logging import os -from rtmapi import Rtm, RtmRequestFailedException +from rtmapi import Rtm import voluptuous as vol from homeassistant.components import configurator -from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_NAME, CONF_TOKEN, STATE_OK -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_NAME, CONF_TOKEN +from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from .entity import RememberTheMilkEntity + # httplib2 is a transitive dependency from RtmAPI. If this dependency is not # set explicitly, the library does not work. _LOGGER = logging.getLogger(__name__) @@ -53,12 +54,12 @@ SERVICE_SCHEMA_COMPLETE_TASK = vol.Schema({vol.Required(CONF_ID): cv.string}) def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Remember the milk component.""" - component = EntityComponent[RememberTheMilk](_LOGGER, DOMAIN, hass) + component = EntityComponent[RememberTheMilkEntity](_LOGGER, DOMAIN, hass) stored_rtm_config = RememberTheMilkConfiguration(hass) for rtm_config in config[DOMAIN]: account_name = rtm_config[CONF_NAME] - _LOGGER.info("Adding Remember the milk account %s", account_name) + _LOGGER.debug("Adding Remember the milk account %s", account_name) api_key = rtm_config[CONF_API_KEY] shared_secret = rtm_config[CONF_SHARED_SECRET] token = stored_rtm_config.get_token(account_name) @@ -85,7 +86,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def _create_instance( hass, account_name, api_key, shared_secret, token, stored_rtm_config, component ): - entity = RememberTheMilk( + entity = RememberTheMilkEntity( account_name, api_key, shared_secret, token, stored_rtm_config ) component.add_entities([entity]) @@ -237,134 +238,3 @@ class RememberTheMilkConfiguration: if hass_id in self._config[profile_name][CONF_ID_MAP]: del self._config[profile_name][CONF_ID_MAP][hass_id] self.save_config() - - -class RememberTheMilk(Entity): - """Representation of an interface to Remember The Milk.""" - - def __init__(self, name, api_key, shared_secret, token, rtm_config): - """Create new instance of Remember The Milk component.""" - self._name = name - self._api_key = api_key - self._shared_secret = shared_secret - self._token = token - self._rtm_config = rtm_config - self._rtm_api = Rtm(api_key, shared_secret, "delete", token) - self._token_valid = None - self._check_token() - _LOGGER.debug("Instance created for account %s", self._name) - - def _check_token(self): - """Check if the API token is still valid. - - If it is not valid any more, delete it from the configuration. This - will trigger a new authentication process. - """ - valid = self._rtm_api.token_valid() - if not valid: - _LOGGER.error( - "Token for account %s is invalid. You need to register again!", - self.name, - ) - self._rtm_config.delete_token(self._name) - self._token_valid = False - else: - self._token_valid = True - return self._token_valid - - def create_task(self, call: ServiceCall) -> None: - """Create a new task on Remember The Milk. - - You can use the smart syntax to define the attributes of a new task, - e.g. "my task #some_tag ^today" will add tag "some_tag" and set the - due date to today. - """ - try: - task_name = call.data[CONF_NAME] - hass_id = call.data.get(CONF_ID) - rtm_id = None - if hass_id is not None: - rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id) - result = self._rtm_api.rtm.timelines.create() - timeline = result.timeline.value - - if hass_id is None or rtm_id is None: - result = self._rtm_api.rtm.tasks.add( - timeline=timeline, name=task_name, parse="1" - ) - _LOGGER.debug( - "Created new task '%s' in account %s", task_name, self.name - ) - self._rtm_config.set_rtm_id( - self._name, - hass_id, - result.list.id, - result.list.taskseries.id, - result.list.taskseries.task.id, - ) - else: - self._rtm_api.rtm.tasks.setName( - name=task_name, - list_id=rtm_id[0], - taskseries_id=rtm_id[1], - task_id=rtm_id[2], - timeline=timeline, - ) - _LOGGER.debug( - "Updated task with id '%s' in account %s to name %s", - hass_id, - self.name, - task_name, - ) - except RtmRequestFailedException as rtm_exception: - _LOGGER.error( - "Error creating new Remember The Milk task for account %s: %s", - self._name, - rtm_exception, - ) - - def complete_task(self, call: ServiceCall) -> None: - """Complete a task that was previously created by this component.""" - hass_id = call.data[CONF_ID] - rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id) - if rtm_id is None: - _LOGGER.error( - ( - "Could not find task with ID %s in account %s. " - "So task could not be closed" - ), - hass_id, - self._name, - ) - return - try: - result = self._rtm_api.rtm.timelines.create() - timeline = result.timeline.value - self._rtm_api.rtm.tasks.complete( - list_id=rtm_id[0], - taskseries_id=rtm_id[1], - task_id=rtm_id[2], - timeline=timeline, - ) - self._rtm_config.delete_rtm_id(self._name, hass_id) - _LOGGER.debug( - "Completed task with id %s in account %s", hass_id, self._name - ) - except RtmRequestFailedException as rtm_exception: - _LOGGER.error( - "Error creating new Remember The Milk task for account %s: %s", - self._name, - rtm_exception, - ) - - @property - def name(self): - """Return the name of the device.""" - return self._name - - @property - def state(self): - """Return the state of the device.""" - if not self._token_valid: - return "API token invalid" - return STATE_OK diff --git a/homeassistant/components/remember_the_milk/entity.py b/homeassistant/components/remember_the_milk/entity.py new file mode 100644 index 00000000000..8fa52b6c06c --- /dev/null +++ b/homeassistant/components/remember_the_milk/entity.py @@ -0,0 +1,142 @@ +"""Support to interact with Remember The Milk.""" + +import logging + +from rtmapi import Rtm, RtmRequestFailedException + +from homeassistant.const import CONF_ID, CONF_NAME, STATE_OK +from homeassistant.core import ServiceCall +from homeassistant.helpers.entity import Entity + +_LOGGER = logging.getLogger(__name__) + + +class RememberTheMilkEntity(Entity): + """Representation of an interface to Remember The Milk.""" + + def __init__(self, name, api_key, shared_secret, token, rtm_config): + """Create new instance of Remember The Milk component.""" + self._name = name + self._api_key = api_key + self._shared_secret = shared_secret + self._token = token + self._rtm_config = rtm_config + self._rtm_api = Rtm(api_key, shared_secret, "delete", token) + self._token_valid = None + self._check_token() + _LOGGER.debug("Instance created for account %s", self._name) + + def _check_token(self): + """Check if the API token is still valid. + + If it is not valid any more, delete it from the configuration. This + will trigger a new authentication process. + """ + valid = self._rtm_api.token_valid() + if not valid: + _LOGGER.error( + "Token for account %s is invalid. You need to register again!", + self.name, + ) + self._rtm_config.delete_token(self._name) + self._token_valid = False + else: + self._token_valid = True + return self._token_valid + + def create_task(self, call: ServiceCall) -> None: + """Create a new task on Remember The Milk. + + You can use the smart syntax to define the attributes of a new task, + e.g. "my task #some_tag ^today" will add tag "some_tag" and set the + due date to today. + """ + try: + task_name = call.data[CONF_NAME] + hass_id = call.data.get(CONF_ID) + rtm_id = None + if hass_id is not None: + rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id) + result = self._rtm_api.rtm.timelines.create() + timeline = result.timeline.value + + if hass_id is None or rtm_id is None: + result = self._rtm_api.rtm.tasks.add( + timeline=timeline, name=task_name, parse="1" + ) + _LOGGER.debug( + "Created new task '%s' in account %s", task_name, self.name + ) + self._rtm_config.set_rtm_id( + self._name, + hass_id, + result.list.id, + result.list.taskseries.id, + result.list.taskseries.task.id, + ) + else: + self._rtm_api.rtm.tasks.setName( + name=task_name, + list_id=rtm_id[0], + taskseries_id=rtm_id[1], + task_id=rtm_id[2], + timeline=timeline, + ) + _LOGGER.debug( + "Updated task with id '%s' in account %s to name %s", + hass_id, + self.name, + task_name, + ) + except RtmRequestFailedException as rtm_exception: + _LOGGER.error( + "Error creating new Remember The Milk task for account %s: %s", + self._name, + rtm_exception, + ) + + def complete_task(self, call: ServiceCall) -> None: + """Complete a task that was previously created by this component.""" + hass_id = call.data[CONF_ID] + rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id) + if rtm_id is None: + _LOGGER.error( + ( + "Could not find task with ID %s in account %s. " + "So task could not be closed" + ), + hass_id, + self._name, + ) + return + try: + result = self._rtm_api.rtm.timelines.create() + timeline = result.timeline.value + self._rtm_api.rtm.tasks.complete( + list_id=rtm_id[0], + taskseries_id=rtm_id[1], + task_id=rtm_id[2], + timeline=timeline, + ) + self._rtm_config.delete_rtm_id(self._name, hass_id) + _LOGGER.debug( + "Completed task with id %s in account %s", hass_id, self._name + ) + except RtmRequestFailedException as rtm_exception: + _LOGGER.error( + "Error creating new Remember The Milk task for account %s: %s", + self._name, + rtm_exception, + ) + + @property + def name(self): + """Return the name of the device.""" + return self._name + + @property + def state(self): + """Return the state of the device.""" + if not self._token_valid: + return "API token invalid" + return STATE_OK diff --git a/homeassistant/components/remember_the_milk/icons.json b/homeassistant/components/remember_the_milk/icons.json index 3ca17113fb8..04502aea5ef 100644 --- a/homeassistant/components/remember_the_milk/icons.json +++ b/homeassistant/components/remember_the_milk/icons.json @@ -1,6 +1,10 @@ { "services": { - "create_task": "mdi:check", - "complete_task": "mdi:check-all" + "create_task": { + "service": "mdi:check" + }, + "complete_task": { + "service": "mdi:check-all" + } } } diff --git a/homeassistant/components/remember_the_milk/manifest.json b/homeassistant/components/remember_the_milk/manifest.json index ab309c765fc..13c37d56dba 100644 --- a/homeassistant/components/remember_the_milk/manifest.json +++ b/homeassistant/components/remember_the_milk/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/remember_the_milk", "iot_class": "cloud_push", "loggers": ["rtmapi"], + "quality_scale": "legacy", "requirements": ["RtmAPI==0.7.2", "httplib2==0.20.4"] } diff --git a/homeassistant/components/remote/__init__.py b/homeassistant/components/remote/__init__.py index cb67a7568e2..36e482f0a29 100644 --- a/homeassistant/components/remote/__init__.py +++ b/homeassistant/components/remote/__init__.py @@ -6,10 +6,10 @@ from collections.abc import Iterable from datetime import timedelta from enum import IntFlag import functools as ft -from functools import cached_property import logging from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -22,20 +22,16 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey _LOGGER = logging.getLogger(__name__) DOMAIN = "remote" +DATA_COMPONENT: HassKey[EntityComponent[RemoteEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -72,19 +68,6 @@ class RemoteEntityFeature(IntFlag): ACTIVITY = 4 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the RemoteEntityFeature enum instead. -_DEPRECATED_SUPPORT_LEARN_COMMAND = DeprecatedConstantEnum( - RemoteEntityFeature.LEARN_COMMAND, "2025.1" -) -_DEPRECATED_SUPPORT_DELETE_COMMAND = DeprecatedConstantEnum( - RemoteEntityFeature.DELETE_COMMAND, "2025.1" -) -_DEPRECATED_SUPPORT_ACTIVITY = DeprecatedConstantEnum( - RemoteEntityFeature.ACTIVITY, "2025.1" -) - - REMOTE_SERVICE_ACTIVITY_SCHEMA = cv.make_entity_service_schema( {vol.Optional(ATTR_ACTIVITY): cv.string} ) @@ -98,7 +81,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for remotes.""" - component = hass.data[DOMAIN] = EntityComponent[RemoteEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[RemoteEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -155,14 +138,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[RemoteEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[RemoteEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class RemoteEntityDescription(ToggleEntityDescription, frozen_or_thawed=True): @@ -189,19 +170,6 @@ class RemoteEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) """Flag supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> RemoteEntityFeature: - """Return the supported features as RemoteEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = RemoteEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - @cached_property def current_activity(self) -> str | None: """Active activity.""" @@ -216,7 +184,7 @@ class RemoteEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) @property def state_attributes(self) -> dict[str, Any] | None: """Return optional state attributes.""" - if RemoteEntityFeature.ACTIVITY not in self.supported_features_compat: + if RemoteEntityFeature.ACTIVITY not in self.supported_features: return None return { @@ -251,11 +219,3 @@ class RemoteEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) await self.hass.async_add_executor_job( ft.partial(self.delete_command, **kwargs) ) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/remote/icons.json b/homeassistant/components/remote/icons.json index 07526a4bc79..43a7f6ee7b6 100644 --- a/homeassistant/components/remote/icons.json +++ b/homeassistant/components/remote/icons.json @@ -8,11 +8,23 @@ } }, "services": { - "delete_command": "mdi:delete", - "learn_command": "mdi:school", - "send_command": "mdi:remote", - "toggle": "mdi:remote", - "turn_off": "mdi:remote-off", - "turn_on": "mdi:remote" + "delete_command": { + "service": "mdi:delete" + }, + "learn_command": { + "service": "mdi:school" + }, + "send_command": { + "service": "mdi:remote" + }, + "toggle": { + "service": "mdi:remote" + }, + "turn_off": { + "service": "mdi:remote-off" + }, + "turn_on": { + "service": "mdi:remote" + } } } diff --git a/homeassistant/components/remote/strings.json b/homeassistant/components/remote/strings.json index e3df487a57b..09b270b9687 100644 --- a/homeassistant/components/remote/strings.json +++ b/homeassistant/components/remote/strings.json @@ -28,7 +28,7 @@ "services": { "turn_on": { "name": "[%key:common::action::turn_on%]", - "description": "Sends the power on command.", + "description": "Sends the turn on command.", "fields": { "activity": { "name": "Activity", @@ -38,11 +38,11 @@ }, "toggle": { "name": "[%key:common::action::toggle%]", - "description": "Toggles a device on/off." + "description": "Sends the toggle command." }, "turn_off": { "name": "[%key:common::action::turn_off%]", - "description": "Turns the device off." + "description": "Sends the turn off command." }, "send_command": { "name": "Send command", diff --git a/homeassistant/components/remote_rpi_gpio/binary_sensor.py b/homeassistant/components/remote_rpi_gpio/binary_sensor.py index 98ae7328bc5..b3a8075c6ba 100644 --- a/homeassistant/components/remote_rpi_gpio/binary_sensor.py +++ b/homeassistant/components/remote_rpi_gpio/binary_sensor.py @@ -15,7 +15,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .. import remote_rpi_gpio from . import ( CONF_BOUNCETIME, CONF_INVERT_LOGIC, @@ -23,6 +22,8 @@ from . import ( DEFAULT_BOUNCETIME, DEFAULT_INVERT_LOGIC, DEFAULT_PULL_MODE, + read_input, + setup_input, ) CONF_PORTS = "ports" @@ -56,9 +57,7 @@ def setup_platform( devices = [] for port_num, port_name in ports.items(): try: - remote_sensor = remote_rpi_gpio.setup_input( - address, port_num, pull_mode, bouncetime - ) + remote_sensor = setup_input(address, port_num, pull_mode, bouncetime) except (ValueError, IndexError, KeyError, OSError): return new_sensor = RemoteRPiGPIOBinarySensor(port_name, remote_sensor, invert_logic) @@ -84,7 +83,7 @@ class RemoteRPiGPIOBinarySensor(BinarySensorEntity): def read_gpio(): """Read state from GPIO.""" - self._state = remote_rpi_gpio.read_input(self._sensor) + self._state = read_input(self._sensor) self.schedule_update_ha_state() self._sensor.when_deactivated = read_gpio @@ -108,6 +107,6 @@ class RemoteRPiGPIOBinarySensor(BinarySensorEntity): def update(self) -> None: """Update the GPIO state.""" try: - self._state = remote_rpi_gpio.read_input(self._sensor) + self._state = read_input(self._sensor) except requests.exceptions.ConnectionError: return diff --git a/homeassistant/components/remote_rpi_gpio/manifest.json b/homeassistant/components/remote_rpi_gpio/manifest.json index 3a369d859f8..b7e3b55d564 100644 --- a/homeassistant/components/remote_rpi_gpio/manifest.json +++ b/homeassistant/components/remote_rpi_gpio/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/remote_rpi_gpio", "iot_class": "local_push", "loggers": ["gpiozero", "pigpio"], + "quality_scale": "legacy", "requirements": ["gpiozero==1.6.2", "pigpio==1.78"] } diff --git a/homeassistant/components/remote_rpi_gpio/switch.py b/homeassistant/components/remote_rpi_gpio/switch.py index ff9ecbcd97b..bf31e4bb55a 100644 --- a/homeassistant/components/remote_rpi_gpio/switch.py +++ b/homeassistant/components/remote_rpi_gpio/switch.py @@ -16,8 +16,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .. import remote_rpi_gpio -from . import CONF_INVERT_LOGIC, DEFAULT_INVERT_LOGIC +from . import CONF_INVERT_LOGIC, DEFAULT_INVERT_LOGIC, setup_output, write_output CONF_PORTS = "ports" @@ -46,7 +45,7 @@ def setup_platform( devices = [] for port, name in ports.items(): try: - led = remote_rpi_gpio.setup_output(address, port, invert_logic) + led = setup_output(address, port, invert_logic) except (ValueError, IndexError, KeyError, OSError): return new_switch = RemoteRPiGPIOSwitch(name, led) @@ -83,12 +82,12 @@ class RemoteRPiGPIOSwitch(SwitchEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" - remote_rpi_gpio.write_output(self._switch, 1) + write_output(self._switch, 1) self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs: Any) -> None: """Turn the device off.""" - remote_rpi_gpio.write_output(self._switch, 0) + write_output(self._switch, 0) self._state = False self.schedule_update_ha_state() diff --git a/homeassistant/components/renault/binary_sensor.py b/homeassistant/components/renault/binary_sensor.py index 2041499b711..a8fdf324f1c 100644 --- a/homeassistant/components/renault/binary_sensor.py +++ b/homeassistant/components/renault/binary_sensor.py @@ -19,6 +19,9 @@ from homeassistant.helpers.typing import StateType from . import RenaultConfigEntry from .entity import RenaultDataEntity, RenaultDataEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RenaultBinarySensorEntityDescription( @@ -28,7 +31,7 @@ class RenaultBinarySensorEntityDescription( """Class describing Renault binary sensor entities.""" on_key: str - on_value: StateType + on_value: StateType | list[StateType] async def async_setup_entry( @@ -58,6 +61,9 @@ class RenaultBinarySensor( """Return true if the binary sensor is on.""" if (data := self._get_data_attr(self.entity_description.on_key)) is None: return None + + if isinstance(self.entity_description.on_value, list): + return data in self.entity_description.on_value return data == self.entity_description.on_value @@ -68,7 +74,10 @@ BINARY_SENSOR_TYPES: tuple[RenaultBinarySensorEntityDescription, ...] = tuple( coordinator="battery", device_class=BinarySensorDeviceClass.PLUG, on_key="plugStatus", - on_value=PlugState.PLUGGED.value, + on_value=[ + PlugState.PLUGGED.value, + PlugState.PLUGGED_WAITING_FOR_CHARGE.value, + ], ), RenaultBinarySensorEntityDescription( key="charging", @@ -104,13 +113,13 @@ BINARY_SENSOR_TYPES: tuple[RenaultBinarySensorEntityDescription, ...] = tuple( ] + [ RenaultBinarySensorEntityDescription( - key=f"{door.replace(' ','_').lower()}_door_status", + key=f"{door.replace(' ', '_').lower()}_door_status", coordinator="lock_status", # On means open, Off means closed device_class=BinarySensorDeviceClass.DOOR, - on_key=f"doorStatus{door.replace(' ','')}", + on_key=f"doorStatus{door.replace(' ', '')}", on_value="open", - translation_key=f"{door.lower().replace(' ','_')}_door_status", + translation_key=f"{door.lower().replace(' ', '_')}_door_status", ) for door in ("Rear Left", "Rear Right", "Driver", "Passenger") ], diff --git a/homeassistant/components/renault/button.py b/homeassistant/components/renault/button.py index d3666388fbb..6a9f5e05a38 100644 --- a/homeassistant/components/renault/button.py +++ b/homeassistant/components/renault/button.py @@ -13,6 +13,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RenaultConfigEntry from .entity import RenaultEntity +# Coordinator is used to centralize the data updates +# but renault servers are unreliable and it's safer to queue action calls +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class RenaultButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/renault/config_flow.py b/homeassistant/components/renault/config_flow.py index 82429dd146c..70544a5637f 100644 --- a/homeassistant/components/renault/config_flow.py +++ b/homeassistant/components/renault/config_flow.py @@ -3,9 +3,11 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any +from typing import Any +import aiohttp from renault_api.const import AVAILABLE_LOCALES +from renault_api.gigya.exceptions import GigyaException import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -14,17 +16,24 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_KAMEREON_ACCOUNT_ID, CONF_LOCALE, DOMAIN from .renault_hub import RenaultHub +USER_SCHEMA = vol.Schema( + { + vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) +REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) + class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Renault config flow.""" - VERSION = 1 + renault_hub: RenaultHub def __init__(self) -> None: """Initialize the Renault config flow.""" - self._original_data: Mapping[str, Any] | None = None self.renault_config: dict[str, Any] = {} - self.renault_hub: RenaultHub | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -33,30 +42,28 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): Ask the user for API keys. """ + errors: dict[str, str] = {} if user_input: locale = user_input[CONF_LOCALE] self.renault_config.update(user_input) self.renault_config.update(AVAILABLE_LOCALES[locale]) self.renault_hub = RenaultHub(self.hass, locale) - if not await self.renault_hub.attempt_login( - user_input[CONF_USERNAME], user_input[CONF_PASSWORD] - ): - return self._show_user_form({"base": "invalid_credentials"}) - return await self.async_step_kamereon() - return self._show_user_form() - - def _show_user_form(self, errors: dict[str, Any] | None = None) -> ConfigFlowResult: - """Show the API keys form.""" + try: + login_success = await self.renault_hub.attempt_login( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD] + ) + except (aiohttp.ClientConnectionError, GigyaException): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + else: + if login_success: + return await self.async_step_kamereon() + errors["base"] = "invalid_credentials" return self.async_show_form( step_id="user", - data_schema=vol.Schema( - { - vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } - ), - errors=errors or {}, + data_schema=USER_SCHEMA, + errors=errors, ) async def async_step_kamereon( @@ -72,18 +79,12 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): title=user_input[CONF_KAMEREON_ACCOUNT_ID], data=self.renault_config ) - assert self.renault_hub accounts = await self.renault_hub.get_account_ids() if len(accounts) == 0: return self.async_abort(reason="kamereon_no_account") if len(accounts) == 1: - await self.async_set_unique_id(accounts[0]) - self._abort_if_unique_id_configured() - - self.renault_config[CONF_KAMEREON_ACCOUNT_ID] = accounts[0] - return self.async_create_entry( - title=self.renault_config[CONF_KAMEREON_ACCOUNT_ID], - data=self.renault_config, + return await self.async_step_kamereon( + user_input={CONF_KAMEREON_ACCOUNT_ID: accounts[0]} ) return self.async_show_form( @@ -97,48 +98,29 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._original_data = entry_data return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - if not user_input: - return self._show_reauth_confirm_form() + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() + if user_input: + # Check credentials + self.renault_hub = RenaultHub(self.hass, reauth_entry.data[CONF_LOCALE]) + if await self.renault_hub.attempt_login( + reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD] + ): + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + ) + errors = {"base": "invalid_credentials"} - if TYPE_CHECKING: - assert self._original_data - - # Check credentials - self.renault_hub = RenaultHub(self.hass, self._original_data[CONF_LOCALE]) - if not await self.renault_hub.attempt_login( - self._original_data[CONF_USERNAME], user_input[CONF_PASSWORD] - ): - return self._show_reauth_confirm_form({"base": "invalid_credentials"}) - - # Update existing entry - data = {**self._original_data, CONF_PASSWORD: user_input[CONF_PASSWORD]} - existing_entry = await self.async_set_unique_id( - self._original_data[CONF_KAMEREON_ACCOUNT_ID] - ) - if TYPE_CHECKING: - assert existing_entry - self.hass.config_entries.async_update_entry(existing_entry, data=data) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - def _show_reauth_confirm_form( - self, errors: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Show the API keys form.""" - if TYPE_CHECKING: - assert self._original_data return self.async_show_form( step_id="reauth_confirm", - data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), - errors=errors or {}, - description_placeholders={ - CONF_USERNAME: self._original_data[CONF_USERNAME] - }, + data_schema=REAUTH_SCHEMA, + errors=errors, + description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]}, ) diff --git a/homeassistant/components/renault/coordinator.py b/homeassistant/components/renault/coordinator.py index d7aed6e3560..89e62867130 100644 --- a/homeassistant/components/renault/coordinator.py +++ b/homeassistant/components/renault/coordinator.py @@ -18,7 +18,7 @@ from renault_api.kamereon.models import KamereonVehicleDataAttributes from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -T = TypeVar("T", bound=KamereonVehicleDataAttributes | None) +T = TypeVar("T", bound=KamereonVehicleDataAttributes) # We have potentially 7 coordinators per vehicle _PARALLEL_SEMAPHORE = asyncio.Semaphore(1) @@ -27,6 +27,8 @@ _PARALLEL_SEMAPHORE = asyncio.Semaphore(1) class RenaultDataUpdateCoordinator(DataUpdateCoordinator[T]): """Handle vehicle communication with Renault servers.""" + update_method: Callable[[], Awaitable[T]] + def __init__( self, hass: HomeAssistant, @@ -50,8 +52,6 @@ class RenaultDataUpdateCoordinator(DataUpdateCoordinator[T]): async def _async_update_data(self) -> T: """Fetch the latest data from the source.""" - if self.update_method is None: - raise NotImplementedError("Update method not implemented") try: async with _PARALLEL_SEMAPHORE: data = await self.update_method() diff --git a/homeassistant/components/renault/device_tracker.py b/homeassistant/components/renault/device_tracker.py index db889868cae..08a2a698802 100644 --- a/homeassistant/components/renault/device_tracker.py +++ b/homeassistant/components/renault/device_tracker.py @@ -2,15 +2,30 @@ from __future__ import annotations +from dataclasses import dataclass + from renault_api.kamereon.models import KamereonVehicleLocationData -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import ( + TrackerEntity, + TrackerEntityDescription, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RenaultConfigEntry from .entity import RenaultDataEntity, RenaultDataEntityDescription +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class RenaultTrackerEntityDescription( + TrackerEntityDescription, RenaultDataEntityDescription +): + """Class describing Renault tracker entities.""" + async def async_setup_entry( hass: HomeAssistant, @@ -32,6 +47,8 @@ class RenaultDeviceTracker( ): """Mixin for device tracker specific attributes.""" + entity_description: RenaultTrackerEntityDescription + @property def latitude(self) -> float | None: """Return latitude value of the device.""" @@ -42,14 +59,9 @@ class RenaultDeviceTracker( """Return longitude value of the device.""" return self.coordinator.data.gpsLongitude if self.coordinator.data else None - @property - def source_type(self) -> SourceType: - """Return the source type of the device.""" - return SourceType.GPS - -DEVICE_TRACKER_TYPES: tuple[RenaultDataEntityDescription, ...] = ( - RenaultDataEntityDescription( +DEVICE_TRACKER_TYPES: tuple[RenaultTrackerEntityDescription, ...] = ( + RenaultTrackerEntityDescription( key="location", coordinator="location", translation_key="location", diff --git a/homeassistant/components/renault/entity.py b/homeassistant/components/renault/entity.py index 10de028b2d0..7beb91e9603 100644 --- a/homeassistant/components/renault/entity.py +++ b/homeassistant/components/renault/entity.py @@ -59,6 +59,4 @@ class RenaultDataEntity( def _get_data_attr(self, key: str) -> StateType: """Return the attribute value from the coordinator data.""" - if self.coordinator.data is None: - return None # type: ignore[unreachable] return cast(StateType, getattr(self.coordinator.data, key)) diff --git a/homeassistant/components/renault/icons.json b/homeassistant/components/renault/icons.json index 75356fda411..8b9c4885eaa 100644 --- a/homeassistant/components/renault/icons.json +++ b/homeassistant/components/renault/icons.json @@ -64,8 +64,17 @@ } }, "services": { - "ac_start": "mdi:hvac", - "ac_cancel": "mdi:hvac-off", - "charge_set_schedules": "mdi:calendar-clock" + "ac_start": { + "service": "mdi:hvac" + }, + "ac_cancel": { + "service": "mdi:hvac-off" + }, + "charge_set_schedules": { + "service": "mdi:calendar-clock" + }, + "ac_set_schedules": { + "service": "mdi:calendar-clock" + } } } diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 6691921e850..a4817fc84e6 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["renault_api"], - "quality_scale": "platinum", - "requirements": ["renault-api==0.2.5"] + "quality_scale": "silver", + "requirements": ["renault-api==0.2.8"] } diff --git a/homeassistant/components/renault/quality_scale.yaml b/homeassistant/components/renault/quality_scale.yaml new file mode 100644 index 00000000000..f2d70622192 --- /dev/null +++ b/homeassistant/components/renault/quality_scale.yaml @@ -0,0 +1,64 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No options flow + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Discovery not possible + discovery: + status: exempt + comment: Discovery not possible + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: done + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/renault/renault_hub.py b/homeassistant/components/renault/renault_hub.py index 97a9d080b86..76b197b2aaf 100644 --- a/homeassistant/components/renault/renault_hub.py +++ b/homeassistant/components/renault/renault_hub.py @@ -16,8 +16,8 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, - ATTR_SW_VERSION, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -106,7 +106,7 @@ class RenaultHub: manufacturer=vehicle.device_info[ATTR_MANUFACTURER], name=vehicle.device_info[ATTR_NAME], model=vehicle.device_info[ATTR_MODEL], - sw_version=vehicle.device_info[ATTR_SW_VERSION], + model_id=vehicle.device_info[ATTR_MODEL_ID], ) self._vehicles[vehicle_link.vin] = vehicle diff --git a/homeassistant/components/renault/renault_vehicle.py b/homeassistant/components/renault/renault_vehicle.py index d5c4f78126c..d8266d75319 100644 --- a/homeassistant/components/renault/renault_vehicle.py +++ b/homeassistant/components/renault/renault_vehicle.py @@ -76,8 +76,8 @@ class RenaultVehicleProxy: identifiers={(DOMAIN, cast(str, details.vin))}, manufacturer=(details.get_brand_label() or "").capitalize(), model=(details.get_model_label() or "").capitalize(), + model_id=(details.get_model_code() or ""), name=details.registrationNumber or "", - sw_version=details.get_model_code() or "", ) self.coordinators: dict[str, RenaultDataUpdateCoordinator] = {} self.hvac_target_temperature = 21 @@ -167,6 +167,18 @@ class RenaultVehicleProxy: """Start vehicle ac.""" return await self._vehicle.set_ac_start(temperature, when) + @with_error_wrapping + async def get_hvac_settings(self) -> models.KamereonVehicleHvacSettingsData: + """Get vehicle hvac settings.""" + return await self._vehicle.get_hvac_settings() + + @with_error_wrapping + async def set_hvac_schedules( + self, schedules: list[models.HvacSchedule] + ) -> models.KamereonVehicleHvacScheduleActionData: + """Set vehicle hvac schedules.""" + return await self._vehicle.set_hvac_schedules(schedules) + @with_error_wrapping async def get_charging_settings(self) -> models.KamereonVehicleChargingSettingsData: """Get vehicle charging settings.""" diff --git a/homeassistant/components/renault/select.py b/homeassistant/components/renault/select.py index b430da9396e..cab1d1f4d8a 100644 --- a/homeassistant/components/renault/select.py +++ b/homeassistant/components/renault/select.py @@ -15,6 +15,10 @@ from homeassistant.helpers.typing import StateType from . import RenaultConfigEntry from .entity import RenaultDataEntity, RenaultDataEntityDescription +# Coordinator is used to centralize the data updates +# but renault servers are unreliable and it's safer to queue action calls +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class RenaultSelectEntityDescription( diff --git a/homeassistant/components/renault/sensor.py b/homeassistant/components/renault/sensor.py index 5cb4ee333cc..7854d70b1c4 100644 --- a/homeassistant/components/renault/sensor.py +++ b/homeassistant/components/renault/sensor.py @@ -40,6 +40,9 @@ from .coordinator import T from .entity import RenaultDataEntity, RenaultDataEntityDescription from .renault_vehicle import RenaultVehicleProxy +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RenaultSensorEntityDescription( @@ -197,7 +200,13 @@ SENSOR_TYPES: tuple[RenaultSensorEntityDescription[Any], ...] = ( translation_key="plug_state", device_class=SensorDeviceClass.ENUM, entity_class=RenaultSensor[KamereonVehicleBatteryStatusData], - options=["unplugged", "plugged", "plug_error", "plug_unknown"], + options=[ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], value_lambda=_get_plug_state_formatted, ), RenaultSensorEntityDescription( diff --git a/homeassistant/components/renault/services.py b/homeassistant/components/renault/services.py index e02a0febdf2..80fb2363b1e 100644 --- a/homeassistant/components/renault/services.py +++ b/homeassistant/components/renault/services.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, device_registry as dr from .const import DOMAIN @@ -66,10 +67,43 @@ SERVICE_CHARGE_SET_SCHEDULES_SCHEMA = SERVICE_VEHICLE_SCHEMA.extend( } ) +SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA = vol.Schema( + { + vol.Required("readyAtTime"): cv.string, + } +) + +SERVICE_AC_SET_SCHEDULE_SCHEMA = vol.Schema( + { + vol.Required("id"): cv.positive_int, + vol.Optional("activated"): cv.boolean, + vol.Optional("monday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + vol.Optional("tuesday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + vol.Optional("wednesday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + vol.Optional("thursday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + vol.Optional("friday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + vol.Optional("saturday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + vol.Optional("sunday"): vol.Any(None, SERVICE_AC_SET_SCHEDULE_DAY_SCHEMA), + } +) +SERVICE_AC_SET_SCHEDULES_SCHEMA = SERVICE_VEHICLE_SCHEMA.extend( + { + vol.Required(ATTR_SCHEDULES): vol.All( + cv.ensure_list, [SERVICE_AC_SET_SCHEDULE_SCHEMA] + ), + } +) + SERVICE_AC_CANCEL = "ac_cancel" SERVICE_AC_START = "ac_start" SERVICE_CHARGE_SET_SCHEDULES = "charge_set_schedules" -SERVICES = [SERVICE_AC_CANCEL, SERVICE_AC_START, SERVICE_CHARGE_SET_SCHEDULES] +SERVICE_AC_SET_SCHEDULES = "ac_set_schedules" +SERVICES = [ + SERVICE_AC_CANCEL, + SERVICE_AC_START, + SERVICE_CHARGE_SET_SCHEDULES, + SERVICE_AC_SET_SCHEDULES, +] def setup_services(hass: HomeAssistant) -> None: @@ -111,24 +145,52 @@ def setup_services(hass: HomeAssistant) -> None: "It may take some time before these changes are reflected in your vehicle" ) + async def ac_set_schedules(service_call: ServiceCall) -> None: + """Set A/C schedules.""" + schedules: list[dict[str, Any]] = service_call.data[ATTR_SCHEDULES] + proxy = get_vehicle_proxy(service_call.data) + hvac_schedules = await proxy.get_hvac_settings() + + for schedule in schedules: + hvac_schedules.update(schedule) + + if TYPE_CHECKING: + assert hvac_schedules.schedules is not None + LOGGER.debug("HVAC set schedules attempt: %s", schedules) + result = await proxy.set_hvac_schedules(hvac_schedules.schedules) + + LOGGER.debug("HVAC set schedules result: %s", result) + LOGGER.debug( + "It may take some time before these changes are reflected in your vehicle" + ) + def get_vehicle_proxy(service_call_data: Mapping) -> RenaultVehicleProxy: """Get vehicle from service_call data.""" device_registry = dr.async_get(hass) device_id = service_call_data[ATTR_VEHICLE] device_entry = device_registry.async_get(device_id) if device_entry is None: - raise ValueError(f"Unable to find device with id: {device_id}") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device_id", + translation_placeholders={"device_id": device_id}, + ) loaded_entries: list[RenaultConfigEntry] = [ entry for entry in hass.config_entries.async_entries(DOMAIN) if entry.state == ConfigEntryState.LOADED + and entry.entry_id in device_entry.config_entries ] for entry in loaded_entries: for vin, vehicle in entry.runtime_data.vehicles.items(): if (DOMAIN, vin) in device_entry.identifiers: return vehicle - raise ValueError(f"Unable to find vehicle with VIN: {device_entry.identifiers}") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_config_entry_for_device", + translation_placeholders={"device_id": device_entry.name or device_id}, + ) hass.services.async_register( DOMAIN, @@ -148,3 +210,9 @@ def setup_services(hass: HomeAssistant) -> None: charge_set_schedules, schema=SERVICE_CHARGE_SET_SCHEDULES_SCHEMA, ) + hass.services.async_register( + DOMAIN, + SERVICE_AC_SET_SCHEDULES, + ac_set_schedules, + schema=SERVICE_AC_SET_SCHEDULES_SCHEMA, + ) diff --git a/homeassistant/components/renault/services.yaml b/homeassistant/components/renault/services.yaml index 2dc99833d5f..835a57bd9c1 100644 --- a/homeassistant/components/renault/services.yaml +++ b/homeassistant/components/renault/services.yaml @@ -27,6 +27,33 @@ ac_cancel: device: integration: renault +ac_set_schedules: + fields: + vehicle: + required: true + selector: + device: + integration: renault + schedules: + example: + - id: 1 + activated: false + - id: 2 + activated: true + monday: + readyAtTime: "T20:45Z" + sunday: + readyAtTime: "T20:45Z" + - id: 3 + activated: false + - id: 4 + activated: false + - id: 5 + activated: false + required: true + selector: + object: + charge_set_schedules: fields: vehicle: @@ -35,31 +62,53 @@ charge_set_schedules: device: integration: renault schedules: - example: >- - [ - { - 'id':1, - 'activated':true, - 'monday':{'startTime':'T12:00Z','duration':15}, - 'tuesday':{'startTime':'T12:00Z','duration':15}, - 'wednesday':{'startTime':'T12:00Z','duration':15}, - 'thursday':{'startTime':'T12:00Z','duration':15}, - 'friday':{'startTime':'T12:00Z','duration':15}, - 'saturday':{'startTime':'T12:00Z','duration':15}, - 'sunday':{'startTime':'T12:00Z','duration':15} - }, - { - 'id':2, - 'activated':false, - 'monday':{'startTime':'T12:00Z','duration':240}, - 'tuesday':{'startTime':'T12:00Z','duration':240}, - 'wednesday':{'startTime':'T12:00Z','duration':240}, - 'thursday':{'startTime':'T12:00Z','duration':240}, - 'friday':{'startTime':'T12:00Z','duration':240}, - 'saturday':{'startTime':'T12:00Z','duration':240}, - 'sunday':{'startTime':'T12:00Z','duration':240} - }, - ] + example: + - id: 1 + activated: true + monday: + startTime: "T12:00Z" + duration: 15 + tuesday: + startTime: "T12:00Z" + duration: 15 + wednesday: + startTime: "T12:00Z" + duration: 15 + thursday: + startTime: "T12:00Z" + duration: 15 + friday: + startTime: "T12:00Z" + duration: 15 + saturday: + startTime: "T12:00Z" + duration: 15 + sunday: + startTime: "T12:00Z" + duration: 15 + - id: 2 + activated: true + monday: + startTime: "T12:00Z" + duration: 240 + tuesday: + startTime: "T12:00Z" + duration: 240 + wednesday: + startTime: "T12:00Z" + duration: 240 + thursday: + startTime: "T12:00Z" + duration: 240 + friday: + startTime: "T12:00Z" + duration: 240 + saturday: + startTime: "T12:00Z" + duration: 240 + sunday: + startTime: "T12:00Z" + duration: 240 required: true selector: object: diff --git a/homeassistant/components/renault/strings.json b/homeassistant/components/renault/strings.json index 5217b4ff65a..7d9cae1bcf1 100644 --- a/homeassistant/components/renault/strings.json +++ b/homeassistant/components/renault/strings.json @@ -6,19 +6,28 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { - "invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { "kamereon": { "data": { - "kamereon_account_id": "Kamereon account id" + "kamereon_account_id": "Account ID" }, - "title": "Select Kamereon account id" + "data_description": { + "kamereon_account_id": "The Kamereon account ID associated with your vehicle" + }, + "title": "Kamereon Account ID", + "description": "You have multiple Kamereon accounts associated to this email, please select one" }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "password": "Your MyRenault phone application password" + }, "description": "Please update your password for {username}", "title": "[%key:common::config_flow::title::reauth%]" }, @@ -28,6 +37,11 @@ "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "locale": "Your country code", + "username": "Your MyRenault phone application email address", + "password": "Your MyRenault phone application password" + }, "title": "Set Renault credentials" } } @@ -141,6 +155,7 @@ "state": { "unplugged": "Unplugged", "plugged": "Plugged in", + "plugged_waiting_for_charge": "Plugged in, waiting for charge", "plug_error": "Plug error", "plug_unknown": "Plug unknown" } @@ -174,7 +189,7 @@ }, "ac_cancel": { "name": "Cancel A/C", - "description": "Canceles A/C on vehicle.", + "description": "Cancels A/C on vehicle.", "fields": { "vehicle": { "name": "Vehicle", @@ -195,6 +210,28 @@ "description": "Schedule details." } } + }, + "ac_set_schedules": { + "name": "Update A/C schedule", + "description": "Updates A/C schedule on vehicle.", + "fields": { + "vehicle": { + "name": "Vehicle", + "description": "[%key:component::renault::services::ac_start::fields::vehicle::description%]" + }, + "schedules": { + "name": "Schedules", + "description": "[%key:component::renault::services::charge_set_schedules::fields::schedules::description%]" + } + } + } + }, + "exceptions": { + "invalid_device_id": { + "message": "No device with id {device_id} was found" + }, + "no_config_entry_for_device": { + "message": "No loaded config entry was found for device with id {device_id}" } } } diff --git a/homeassistant/components/renson/fan.py b/homeassistant/components/renson/fan.py index 44bea28ce3c..56b3655ef94 100644 --- a/homeassistant/components/renson/fan.py +++ b/homeassistant/components/renson/fan.py @@ -127,7 +127,6 @@ class RensonFan(RensonEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, api: RensonVentilation, coordinator: RensonCoordinator) -> None: """Initialize the Renson fan.""" diff --git a/homeassistant/components/renson/icons.json b/homeassistant/components/renson/icons.json index b7b1fdfdd8c..b558759a0dd 100644 --- a/homeassistant/components/renson/icons.json +++ b/homeassistant/components/renson/icons.json @@ -17,8 +17,14 @@ } }, "services": { - "set_timer_level": "mdi:timer", - "set_breeze": "mdi:weather-windy", - "set_pollution_settings": "mdi:air-filter" + "set_timer_level": { + "service": "mdi:timer" + }, + "set_breeze": { + "service": "mdi:weather-windy" + }, + "set_pollution_settings": { + "service": "mdi:air-filter" + } } } diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index f64c6bd9cf3..ae0badb3d84 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -9,8 +9,8 @@ import logging from reolink_aio.api import RETRY_ATTEMPTS from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import ( @@ -22,11 +22,11 @@ from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DOMAIN +from .const import CONF_USE_HTTPS, DOMAIN from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost from .services import async_setup_services -from .util import ReolinkData, get_device_uid_and_ch +from .util import ReolinkConfigEntry, ReolinkData, get_device_uid_and_ch _LOGGER = logging.getLogger(__name__) @@ -56,7 +56,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: ReolinkConfigEntry +) -> bool: """Set up Reolink from a config entry.""" host = ReolinkHost(hass, config_entry.data, config_entry.options) @@ -81,6 +83,24 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, host.stop) ) + # update the port info if needed for the next time + if ( + host.api.port != config_entry.data[CONF_PORT] + or host.api.use_https != config_entry.data[CONF_USE_HTTPS] + ): + _LOGGER.warning( + "HTTP(s) port of Reolink %s, changed from %s to %s", + host.api.nvr_name, + config_entry.data[CONF_PORT], + host.api.port, + ) + data = { + **config_entry.data, + CONF_PORT: host.api.port, + CONF_USE_HTTPS: host.api.use_https, + } + hass.config_entries.async_update_entry(config_entry, data=data) + async def async_device_config_update() -> None: """Update the host state cache and renew the ONVIF-subscription.""" async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)): @@ -101,6 +121,12 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)): await host.renew() + if host.api.new_devices and config_entry.state == ConfigEntryState.LOADED: + # Their are new cameras/chimes connected, reload to add them. + hass.async_create_task( + hass.config_entries.async_reload(config_entry.entry_id) + ) + async def async_check_firmware_update() -> None: """Check for firmware updates.""" async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)): @@ -126,6 +152,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b device_coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=config_entry, name=f"reolink.{host.api.nvr_name}", update_method=async_device_config_update, update_interval=DEVICE_UPDATE_INTERVAL, @@ -133,6 +160,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b firmware_coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=config_entry, name=f"reolink.{host.api.nvr_name}.firmware", update_method=async_check_firmware_update, update_interval=FIRMWARE_UPDATE_INTERVAL, @@ -151,7 +179,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b await host.stop() raise - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = ReolinkData( + config_entry.runtime_data = ReolinkData( host=host, device_coordinator=device_coordinator, firmware_coordinator=firmware_coordinator, @@ -168,30 +196,29 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True -async def entry_update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def entry_update_listener( + hass: HomeAssistant, config_entry: ReolinkConfigEntry +) -> None: """Update the configuration of the host entity.""" await hass.config_entries.async_reload(config_entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: ReolinkConfigEntry +) -> bool: """Unload a config entry.""" - host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host + host: ReolinkHost = config_entry.runtime_data.host await host.stop() - if unload_ok := await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ): - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry + hass: HomeAssistant, config_entry: ReolinkConfigEntry, device: dr.DeviceEntry ) -> bool: """Remove a device from a config entry.""" - host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host + host: ReolinkHost = config_entry.runtime_data.host (device_uid, ch, is_chime) = get_device_uid_and_ch(device, host) if is_chime: @@ -299,7 +326,19 @@ def migrate_entity_ids( else: new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}" new_identifiers = {(DOMAIN, new_device_id)} - device_reg.async_update_device(device.id, new_identifiers=new_identifiers) + existing_device = device_reg.async_get_device(identifiers=new_identifiers) + if existing_device is None: + device_reg.async_update_device( + device.id, new_identifiers=new_identifiers + ) + else: + _LOGGER.warning( + "Reolink device with uid %s already exists, " + "removing device with uid %s", + new_device_id, + device_uid, + ) + device_reg.async_remove_device(device.id) entity_reg = er.async_get(hass) entities = er.async_entries_for_config_entry(entity_reg, config_entry_id) @@ -325,4 +364,18 @@ def migrate_entity_ids( id_parts = entity.unique_id.split("_", 2) if host.api.supported(ch, "UID") and id_parts[1] != host.api.camera_uid(ch): new_id = f"{host.unique_id}_{host.api.camera_uid(ch)}_{id_parts[2]}" - entity_reg.async_update_entity(entity.entity_id, new_unique_id=new_id) + existing_entity = entity_reg.async_get_entity_id( + entity.domain, entity.platform, new_id + ) + if existing_entity is None: + entity_reg.async_update_entity( + entity.entity_id, new_unique_id=new_id + ) + else: + _LOGGER.warning( + "Reolink entity with unique_id %s already exists, " + "removing device with unique_id %s", + new_id, + entity.unique_id, + ) + entity_reg.async_remove(entity.entity_id) diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index 70c21849bc2..c168c97e809 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -20,15 +20,15 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData -from .const import DOMAIN from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .util import ReolinkConfigEntry, ReolinkData + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -44,29 +44,34 @@ class ReolinkBinarySensorEntityDescription( BINARY_PUSH_SENSORS = ( ReolinkBinarySensorEntityDescription( key="motion", + cmd_id=33, device_class=BinarySensorDeviceClass.MOTION, value=lambda api, ch: api.motion_detected(ch), ), ReolinkBinarySensorEntityDescription( key=FACE_DETECTION_TYPE, + cmd_id=33, translation_key="face", value=lambda api, ch: api.ai_detected(ch, FACE_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, FACE_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key=PERSON_DETECTION_TYPE, + cmd_id=33, translation_key="person", value=lambda api, ch: api.ai_detected(ch, PERSON_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, PERSON_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key=VEHICLE_DETECTION_TYPE, + cmd_id=33, translation_key="vehicle", value=lambda api, ch: api.ai_detected(ch, VEHICLE_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, VEHICLE_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key=PET_DETECTION_TYPE, + cmd_id=33, translation_key="pet", value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE), supported=lambda api, ch: ( @@ -76,18 +81,21 @@ BINARY_PUSH_SENSORS = ( ), ReolinkBinarySensorEntityDescription( key=PET_DETECTION_TYPE, + cmd_id=33, translation_key="animal", value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE), supported=lambda api, ch: api.supported(ch, "ai_animal"), ), ReolinkBinarySensorEntityDescription( key=PACKAGE_DETECTION_TYPE, + cmd_id=33, translation_key="package", value=lambda api, ch: api.ai_detected(ch, PACKAGE_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, PACKAGE_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key="visitor", + cmd_id=33, translation_key="visitor", value=lambda api, ch: api.visitor_detected(ch), supported=lambda api, ch: api.is_doorbell(ch), @@ -97,6 +105,7 @@ BINARY_PUSH_SENSORS = ( BINARY_SENSORS = ( ReolinkBinarySensorEntityDescription( key="sleep", + cmd_id=145, cmd_key="GetChannelstatus", translation_key="sleep", entity_category=EntityCategory.DIAGNOSTIC, @@ -108,11 +117,11 @@ BINARY_SENSORS = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink IP Camera.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ReolinkBinarySensorEntity] = [] for channel in reolink_data.host.api.channels: @@ -167,14 +176,14 @@ class ReolinkPushBinarySensorEntity(ReolinkBinarySensorEntity): self.async_on_remove( async_dispatcher_connect( self.hass, - f"{self._host.webhook_id}_{self._channel}", + f"{self._host.unique_id}_{self._channel}", self._async_handle_event, ) ) self.async_on_remove( async_dispatcher_connect( self.hass, - f"{self._host.webhook_id}_all", + f"{self._host.unique_id}_all", self._async_handle_event, ) ) diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index eba0570a3fb..cd1e1b05fae 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -16,7 +16,6 @@ from homeassistant.components.button import ( ButtonEntityDescription, ) from homeassistant.components.camera import CameraEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -26,17 +25,18 @@ from homeassistant.helpers.entity_platform import ( async_get_current_platform, ) -from . import ReolinkData -from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) +from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 ATTR_SPEED = "speed" SUPPORT_PTZ_SPEED = CameraEntityFeature.STREAM +SERVICE_PTZ_MOVE = "ptz_move" @dataclass(frozen=True, kw_only=True) @@ -151,11 +151,11 @@ HOST_BUTTON_ENTITIES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink button entities.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ReolinkButtonEntity | ReolinkHostButtonEntity] = [ ReolinkButtonEntity(reolink_data, channel, entity_description) @@ -172,7 +172,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( - "ptz_move", + SERVICE_PTZ_MOVE, {vol.Required(ATTR_SPEED): cv.positive_int}, "async_ptz_move", [SUPPORT_PTZ_SPEED], @@ -212,7 +212,7 @@ class ReolinkButtonEntity(ReolinkChannelCoordinatorEntity, ButtonEntity): except ReolinkError as err: raise HomeAssistantError(err) from err - async def async_ptz_move(self, **kwargs) -> None: + async def async_ptz_move(self, **kwargs: Any) -> None: """PTZ move with speed.""" speed = kwargs[ATTR_SPEED] try: diff --git a/homeassistant/components/reolink/camera.py b/homeassistant/components/reolink/camera.py index 4adac1a96d8..26ef0b0f4fc 100644 --- a/homeassistant/components/reolink/camera.py +++ b/homeassistant/components/reolink/camera.py @@ -13,16 +13,15 @@ from homeassistant.components.camera import ( CameraEntityDescription, CameraEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData -from .const import DOMAIN from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .util import ReolinkConfigEntry, ReolinkData _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -91,11 +90,11 @@ CAMERA_ENTITIES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink IP Camera.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ReolinkCamera] = [] for entity_description in CAMERA_ENTITIES: diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 6d0381b025f..c28e076aab4 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -7,12 +7,18 @@ import logging from typing import Any from reolink_aio.api import ALLOWED_SPECIAL_CHARS -from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + LoginFirmwareError, + ReolinkError, +) import voluptuous as vol from homeassistant.components import dhcp from homeassistant.config_entries import ( - ConfigEntry, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigFlow, ConfigFlowResult, OptionsFlow, @@ -37,7 +43,7 @@ from .exceptions import ( UserNotAdmin, ) from .host import ReolinkHost -from .util import is_connected +from .util import ReolinkConfigEntry, is_connected _LOGGER = logging.getLogger(__name__) @@ -48,10 +54,6 @@ DEFAULT_OPTIONS = {CONF_PROTOCOL: DEFAULT_PROTOCOL} class ReolinkOptionsFlowHandler(OptionsFlow): """Handle Reolink options.""" - def __init__(self, config_entry): - """Initialize ReolinkOptionsFlowHandler.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -99,15 +101,14 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): self._host: str | None = None self._username: str = "admin" self._password: str | None = None - self._reauth: bool = False @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, ) -> ReolinkOptionsFlowHandler: """Options callback for Reolink.""" - return ReolinkOptionsFlowHandler(config_entry) + return ReolinkOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -116,23 +117,29 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): self._host = entry_data[CONF_HOST] self._username = entry_data[CONF_USERNAME] self._password = entry_data[CONF_PASSWORD] - self._reauth = True - self.context["title_placeholders"]["ip_address"] = entry_data[CONF_HOST] - self.context["title_placeholders"]["hostname"] = self.context[ - "title_placeholders" - ]["name"] + placeholders = { + **self.context["title_placeholders"], + "ip_address": entry_data[CONF_HOST], + "hostname": self.context["title_placeholders"]["name"], + } + self.context["title_placeholders"] = placeholders return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Dialog that informs the user that reauth is required.""" - if user_input is not None: - return await self.async_step_user() - placeholders = {"name": self.context["title_placeholders"]["name"]} - return self.async_show_form( - step_id="reauth_confirm", description_placeholders=placeholders - ) + """Perform a reauthentication.""" + return await self.async_step_user() + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Perform a reconfiguration.""" + entry_data = self._get_reconfigure_entry().data + self._host = entry_data[CONF_HOST] + self._username = entry_data[CONF_USERNAME] + self._password = entry_data[CONF_PASSWORD] + return await self.async_step_user() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -205,6 +212,11 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): if CONF_HOST not in user_input: user_input[CONF_HOST] = self._host + # remember input in case of a error + self._username = user_input[CONF_USERNAME] + self._password = user_input[CONF_PASSWORD] + self._host = user_input[CONF_HOST] + host = ReolinkHost(self.hass, user_input, DEFAULT_OPTIONS) try: await host.async_init() @@ -217,6 +229,15 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): placeholders["special_chars"] = ALLOWED_SPECIAL_CHARS except CredentialsInvalidError: errors[CONF_PASSWORD] = "invalid_auth" + except LoginFirmwareError: + errors["base"] = "update_needed" + placeholders["current_firmware"] = host.api.sw_version + placeholders["needed_firmware"] = ( + host.api.sw_version_required.version_string + ) + placeholders["download_center_url"] = ( + "https://reolink.com/download-center" + ) except ApiError as err: placeholders["error"] = str(err) errors[CONF_HOST] = "api_error" @@ -241,18 +262,18 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): user_input[CONF_USE_HTTPS] = host.api.use_https mac_address = format_mac(host.api.mac_address) - existing_entry = await self.async_set_unique_id( - mac_address, raise_on_progress=False - ) - if existing_entry and self._reauth: - if self.hass.config_entries.async_update_entry( - existing_entry, data=user_input - ): - await self.hass.config_entries.async_reload( - existing_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") - self._abort_if_unique_id_configured(updates=user_input) + await self.async_set_unique_id(mac_address, raise_on_progress=False) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + entry=self._get_reauth_entry(), data=user_input + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + entry=self._get_reconfigure_entry(), data=user_input + ) + self._abort_if_unique_id_configured() return self.async_create_entry( title=str(host.api.nvr_name), @@ -266,7 +287,7 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): vol.Required(CONF_PASSWORD, default=self._password): str, } ) - if self._host is None or errors: + if self._host is None or self.source == SOURCE_RECONFIGURE or errors: data_schema = data_schema.extend( { vol.Required(CONF_HOST, default=self._host): str, diff --git a/homeassistant/components/reolink/diagnostics.py b/homeassistant/components/reolink/diagnostics.py index b06ddcd458f..693f2ba59a4 100644 --- a/homeassistant/components/reolink/diagnostics.py +++ b/homeassistant/components/reolink/diagnostics.py @@ -4,18 +4,16 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import ReolinkData -from .const import DOMAIN +from .util import ReolinkConfigEntry, ReolinkData async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: ReolinkConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data host = reolink_data.host api = host.api diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index c47822e125c..dc2366e8f56 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -7,6 +7,7 @@ from dataclasses import dataclass from reolink_aio.api import DUAL_LENS_MODELS, Chime, Host +from homeassistant.core import callback from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import ( @@ -19,21 +20,34 @@ from .const import DOMAIN @dataclass(frozen=True, kw_only=True) -class ReolinkChannelEntityDescription(EntityDescription): - """A class that describes entities for a camera channel.""" +class ReolinkEntityDescription(EntityDescription): + """A class that describes entities for Reolink.""" cmd_key: str | None = None + cmd_id: int | None = None + + +@dataclass(frozen=True, kw_only=True) +class ReolinkChannelEntityDescription(ReolinkEntityDescription): + """A class that describes entities for a camera channel.""" + supported: Callable[[Host, int], bool] = lambda api, ch: True @dataclass(frozen=True, kw_only=True) -class ReolinkHostEntityDescription(EntityDescription): +class ReolinkHostEntityDescription(ReolinkEntityDescription): """A class that describes host entities.""" - cmd_key: str | None = None supported: Callable[[Host], bool] = lambda api: True +@dataclass(frozen=True, kw_only=True) +class ReolinkChimeEntityDescription(ReolinkEntityDescription): + """A class that describes entities for a chime.""" + + supported: Callable[[Chime], bool] = lambda chime: True + + class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): """Parent class for entities that control the Reolink NVR itself, without a channel. @@ -42,7 +56,7 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None] """ _attr_has_entity_name = True - entity_description: ReolinkHostEntityDescription | ReolinkChannelEntityDescription + entity_description: ReolinkEntityDescription def __init__( self, @@ -78,18 +92,35 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None] """Return True if entity is available.""" return self._host.api.session_active and super().available + @callback + def _push_callback(self) -> None: + """Handle incoming TCP push event.""" + self.async_write_ha_state() + + def register_callback(self, unique_id: str, cmd_id: int) -> None: + """Register callback for TCP push events.""" + self._host.api.baichuan.register_callback( # pragma: no cover + unique_id, self._push_callback, cmd_id + ) + async def async_added_to_hass(self) -> None: """Entity created.""" await super().async_added_to_hass() cmd_key = self.entity_description.cmd_key + cmd_id = self.entity_description.cmd_id if cmd_key is not None: self._host.async_register_update_cmd(cmd_key) + if cmd_id is not None and self._attr_unique_id is not None: + self.register_callback(self._attr_unique_id, cmd_id) async def async_will_remove_from_hass(self) -> None: """Entity removed.""" cmd_key = self.entity_description.cmd_key + cmd_id = self.entity_description.cmd_id if cmd_key is not None: self._host.async_unregister_update_cmd(cmd_key) + if cmd_id is not None and self._attr_unique_id is not None: + self._host.api.baichuan.unregister_callback(self._attr_unique_id) await super().async_will_remove_from_hass() @@ -102,8 +133,6 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None] class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): """Parent class for Reolink hardware camera entities connected to a channel of the NVR.""" - entity_description: ReolinkChannelEntityDescription - def __init__( self, reolink_data: ReolinkData, @@ -145,6 +174,17 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): configuration_url=self._conf_url, ) + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and self._host.api.camera_online(self._channel) + + def register_callback(self, unique_id: str, cmd_id: int) -> None: + """Register callback for TCP push events.""" + self._host.api.baichuan.register_callback( + unique_id, self._push_callback, cmd_id, self._channel + ) + async def async_added_to_hass(self) -> None: """Entity created.""" await super().async_added_to_hass() diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index 310188b720e..97d888c0323 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -25,10 +25,12 @@ from homeassistant.const import ( ) from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later from homeassistant.helpers.network import NoURLAvailableError, get_url +from homeassistant.util.ssl import SSLCipherList from .const import CONF_USE_HTTPS, DOMAIN from .exceptions import ( @@ -39,6 +41,7 @@ from .exceptions import ( ) DEFAULT_TIMEOUT = 30 +FIRST_TCP_PUSH_TIMEOUT = 10 FIRST_ONVIF_TIMEOUT = 10 FIRST_ONVIF_LONG_POLL_TIMEOUT = 90 SUBSCRIPTION_RENEW_THRESHOLD = 300 @@ -64,10 +67,16 @@ class ReolinkHost: ) -> None: """Initialize Reolink Host. Could be either NVR, or Camera.""" self._hass: HomeAssistant = hass - - self._clientsession: aiohttp.ClientSession | None = None self._unique_id: str = "" + def get_aiohttp_session() -> aiohttp.ClientSession: + """Return the HA aiohttp session.""" + return async_get_clientsession( + hass, + verify_ssl=False, + ssl_cipher=SSLCipherList.INSECURE, + ) + self._api = Host( config[CONF_HOST], config[CONF_USERNAME], @@ -76,6 +85,7 @@ class ReolinkHost: use_https=config.get(CONF_USE_HTTPS), protocol=options[CONF_PROTOCOL], timeout=DEFAULT_TIMEOUT, + aiohttp_get_session_callback=get_aiohttp_session, ) self.last_wake: float = 0 @@ -96,9 +106,11 @@ class ReolinkHost: self._long_poll_received: bool = False self._long_poll_error: bool = False self._cancel_poll: CALLBACK_TYPE | None = None + self._cancel_tcp_push_check: CALLBACK_TYPE | None = None self._cancel_onvif_check: CALLBACK_TYPE | None = None self._cancel_long_poll_check: CALLBACK_TYPE | None = None self._poll_job = HassJob(self._async_poll_all_motion, cancel_on_shutdown=True) + self._fast_poll_error: bool = False self._long_poll_task: asyncio.Task | None = None self._lost_subscription: bool = False @@ -211,49 +223,14 @@ class ReolinkHost: else: self._unique_id = format_mac(self._api.mac_address) - if self._onvif_push_supported: - try: - await self.subscribe() - except ReolinkError: - self._onvif_push_supported = False - self.unregister_webhook() - await self._api.unsubscribe() - else: - if self._api.supported(None, "initial_ONVIF_state"): - _LOGGER.debug( - "Waiting for initial ONVIF state on webhook '%s'", - self._webhook_url, - ) - else: - _LOGGER.debug( - "Camera model %s most likely does not push its initial state" - " upon ONVIF subscription, do not check", - self._api.model, - ) - self._cancel_onvif_check = async_call_later( - self._hass, FIRST_ONVIF_TIMEOUT, self._async_check_onvif - ) - if not self._onvif_push_supported: - _LOGGER.debug( - "Camera model %s does not support ONVIF push, using ONVIF long polling instead", - self._api.model, + try: + await self._api.baichuan.subscribe_events() + except ReolinkError: + await self._async_check_tcp_push() + else: + self._cancel_tcp_push_check = async_call_later( + self._hass, FIRST_TCP_PUSH_TIMEOUT, self._async_check_tcp_push ) - try: - await self._async_start_long_polling(initial=True) - except NotSupportedError: - _LOGGER.debug( - "Camera model %s does not support ONVIF long polling, using fast polling instead", - self._api.model, - ) - self._onvif_long_poll_supported = False - await self._api.unsubscribe() - await self._async_poll_all_motion() - else: - self._cancel_long_poll_check = async_call_later( - self._hass, - FIRST_ONVIF_LONG_POLL_TIMEOUT, - self._async_check_onvif_long_poll, - ) ch_list: list[int | None] = [None] if self._api.is_nvr: @@ -285,7 +262,68 @@ class ReolinkHost: else: ir.async_delete_issue(self._hass, DOMAIN, f"firmware_update_{key}") - async def _async_check_onvif(self, *_) -> None: + async def _async_check_tcp_push(self, *_: Any) -> None: + """Check the TCP push subscription.""" + if self._api.baichuan.events_active: + ir.async_delete_issue(self._hass, DOMAIN, "webhook_url") + self._cancel_tcp_push_check = None + return + + _LOGGER.debug( + "Reolink %s, did not receive initial TCP push event after %i seconds", + self._api.nvr_name, + FIRST_TCP_PUSH_TIMEOUT, + ) + + if self._onvif_push_supported: + try: + await self.subscribe() + except ReolinkError: + self._onvif_push_supported = False + self.unregister_webhook() + await self._api.unsubscribe() + else: + if self._api.supported(None, "initial_ONVIF_state"): + _LOGGER.debug( + "Waiting for initial ONVIF state on webhook '%s'", + self._webhook_url, + ) + else: + _LOGGER.debug( + "Camera model %s most likely does not push its initial state" + " upon ONVIF subscription, do not check", + self._api.model, + ) + self._cancel_onvif_check = async_call_later( + self._hass, FIRST_ONVIF_TIMEOUT, self._async_check_onvif + ) + + # start long polling if ONVIF push failed immediately + if not self._onvif_push_supported: + _LOGGER.debug( + "Camera model %s does not support ONVIF push, using ONVIF long polling instead", + self._api.model, + ) + try: + await self._async_start_long_polling(initial=True) + except NotSupportedError: + _LOGGER.debug( + "Camera model %s does not support ONVIF long polling, using fast polling instead", + self._api.model, + ) + self._onvif_long_poll_supported = False + await self._api.unsubscribe() + await self._async_poll_all_motion() + else: + self._cancel_long_poll_check = async_call_later( + self._hass, + FIRST_ONVIF_LONG_POLL_TIMEOUT, + self._async_check_onvif_long_poll, + ) + + self._cancel_tcp_push_check = None + + async def _async_check_onvif(self, *_: Any) -> None: """Check the ONVIF subscription.""" if self._webhook_reachable: ir.async_delete_issue(self._hass, DOMAIN, "webhook_url") @@ -306,7 +344,7 @@ class ReolinkHost: self._cancel_onvif_check = None - async def _async_check_onvif_long_poll(self, *_) -> None: + async def _async_check_onvif_long_poll(self, *_: Any) -> None: """Check if ONVIF long polling is working.""" if not self._long_poll_received: _LOGGER.debug( @@ -382,6 +420,16 @@ class ReolinkHost: async def disconnect(self) -> None: """Disconnect from the API, so the connection will be released.""" + try: + await self._api.baichuan.unsubscribe_events() + except ReolinkError as err: + _LOGGER.error( + "Reolink error while unsubscribing Baichuan from host %s:%s: %s", + self._api.host, + self._api.port, + err, + ) + try: await self._api.unsubscribe() except ReolinkError as err: @@ -402,7 +450,7 @@ class ReolinkHost: err, ) - async def _async_start_long_polling(self, initial=False) -> None: + async def _async_start_long_polling(self, initial: bool = False) -> None: """Start ONVIF long polling task.""" if self._long_poll_task is None: try: @@ -437,13 +485,24 @@ class ReolinkHost: self._long_poll_task.cancel() self._long_poll_task = None - await self._api.unsubscribe(sub_type=SubType.long_poll) + try: + await self._api.unsubscribe(sub_type=SubType.long_poll) + except ReolinkError as err: + _LOGGER.error( + "Reolink error while unsubscribing from host %s:%s: %s", + self._api.host, + self._api.port, + err, + ) - async def stop(self, event=None) -> None: + async def stop(self, *_: Any) -> None: """Disconnect the API.""" if self._cancel_poll is not None: self._cancel_poll() self._cancel_poll = None + if self._cancel_tcp_push_check is not None: + self._cancel_tcp_push_check() + self._cancel_tcp_push_check = None if self._cancel_onvif_check is not None: self._cancel_onvif_check() self._cancel_onvif_check = None @@ -477,8 +536,15 @@ class ReolinkHost: async def renew(self) -> None: """Renew the subscription of motion events (lease time is 15 minutes).""" + await self._api.baichuan.check_subscribe_events() + + if self._api.baichuan.events_active and self._api.subscribed(SubType.push): + # TCP push active, unsubscribe from ONVIF push because not needed + self.unregister_webhook() + await self._api.unsubscribe() + try: - if self._onvif_push_supported: + if self._onvif_push_supported and not self._api.baichuan.events_active: await self._renew(SubType.push) if self._onvif_long_poll_supported and self._long_poll_task is not None: @@ -511,9 +577,7 @@ class ReolinkHost: ) if sub_type == SubType.push: await self.subscribe() - else: - await self._api.subscribe(self._webhook_url, sub_type) - return + return timer = self._api.renewtimer(sub_type) _LOGGER.debug( @@ -555,7 +619,9 @@ class ReolinkHost: def register_webhook(self) -> None: """Register the webhook for motion events.""" - self.webhook_id = f"{DOMAIN}_{self.unique_id.replace(':', '')}_ONVIF" + self.webhook_id = ( + f"{DOMAIN}_{self.unique_id.replace(':', '')}_{webhook.async_generate_id()}" + ) event_id = self.webhook_id webhook.async_register( @@ -587,11 +653,12 @@ class ReolinkHost: webhook.async_unregister(self._hass, self.webhook_id) self.webhook_id = None - async def _async_long_polling(self, *_) -> None: + async def _async_long_polling(self, *_: Any) -> None: """Use ONVIF long polling to immediately receive events.""" # This task will be cancelled once _async_stop_long_polling is called while True: - if self._webhook_reachable: + if self._api.baichuan.events_active or self._webhook_reachable: + # TCP push or ONVIF push working, stop long polling self._long_poll_task = None await self._async_stop_long_polling() return @@ -623,22 +690,32 @@ class ReolinkHost: # Cooldown to prevent CPU over usage on camera freezes await asyncio.sleep(LONG_POLL_COOLDOWN) - async def _async_poll_all_motion(self, *_) -> None: + async def _async_poll_all_motion(self, *_: Any) -> None: """Poll motion and AI states until the first ONVIF push is received.""" - if self._webhook_reachable or self._long_poll_received: - # ONVIF push or long polling is working, stop fast polling + if ( + self._api.baichuan.events_active + or self._webhook_reachable + or self._long_poll_received + ): + # TCP push, ONVIF push or long polling is working, stop fast polling self._cancel_poll = None return try: - await self._api.get_motion_state_all_ch() + if self._api.session_active: + await self._api.get_motion_state_all_ch() except ReolinkError as err: - _LOGGER.error( - "Reolink error while polling motion state for host %s:%s: %s", - self._api.host, - self._api.port, - err, - ) + if not self._fast_poll_error: + _LOGGER.error( + "Reolink error while polling motion state for host %s:%s: %s", + self._api.host, + self._api.port, + err, + ) + self._fast_poll_error = True + else: + if self._api.session_active: + self._fast_poll_error = False finally: # schedule next poll if not self._hass.is_stopping: @@ -646,7 +723,7 @@ class ReolinkHost: self._hass, POLL_INTERVAL_NO_PUSH, self._poll_job ) - self._signal_write_ha_state(None) + self._signal_write_ha_state() async def handle_webhook( self, hass: HomeAssistant, webhook_id: str, request: Request @@ -705,7 +782,7 @@ class ReolinkHost: "Could not poll motion state after losing connection during receiving ONVIF event" ) return - async_dispatcher_send(hass, f"{webhook_id}_all", {}) + self._signal_write_ha_state() return message = data.decode("utf-8") @@ -718,18 +795,20 @@ class ReolinkHost: self._signal_write_ha_state(channels) - def _signal_write_ha_state(self, channels: list[int] | None) -> None: + def _signal_write_ha_state(self, channels: list[int] | None = None) -> None: """Update the binary sensors with async_write_ha_state.""" if channels is None: - async_dispatcher_send(self._hass, f"{self.webhook_id}_all", {}) + async_dispatcher_send(self._hass, f"{self.unique_id}_all", {}) return for channel in channels: - async_dispatcher_send(self._hass, f"{self.webhook_id}_{channel}", {}) + async_dispatcher_send(self._hass, f"{self.unique_id}_{channel}", {}) @property def event_connection(self) -> str: """Type of connection to receive events.""" + if self._api.baichuan.events_active: + return "TCP push" if self._webhook_reachable: return "ONVIF push" if self._long_poll_received: diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index f7729789c4e..cee044189ea 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -101,7 +101,22 @@ "default": "mdi:spotlight-beam" }, "volume": { - "default": "mdi:volume-high" + "default": "mdi:volume-high", + "state": { + "0": "mdi:volume-off" + } + }, + "alarm_volume": { + "default": "mdi:volume-high", + "state": { + "0": "mdi:volume-off" + } + }, + "message_volume": { + "default": "mdi:volume-high", + "state": { + "0": "mdi:volume-off" + } }, "guard_return_time": { "default": "mdi:crosshairs-gps" @@ -207,19 +222,70 @@ "hdr": { "default": "mdi:hdr" }, + "binning_mode": { + "default": "mdi:code-block-brackets" + }, + "hub_alarm_ringtone": { + "default": "mdi:music-note", + "state": { + "alarm": "mdi:bullhorn" + } + }, + "hub_visitor_ringtone": { + "default": "mdi:music-note", + "state": { + "alarm": "mdi:bullhorn" + } + }, "motion_tone": { - "default": "mdi:music-note" + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } }, "people_tone": { - "default": "mdi:music-note" + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } + }, + "vehicle_tone": { + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } }, "visitor_tone": { - "default": "mdi:music-note" + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } + }, + "package_tone": { + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } + }, + "main_frame_rate": { + "default": "mdi:play-speed" + }, + "sub_frame_rate": { + "default": "mdi:play-speed" + }, + "main_bit_rate": { + "default": "mdi:play-speed" + }, + "sub_bit_rate": { + "default": "mdi:play-speed" } }, "sensor": { "ptz_pan_position": { - "default": "mdi:pan" + "default": "mdi:pan-horizontal" + }, + "ptz_tilt_position": { + "default": "mdi:pan-vertical" }, "battery_temperature": { "default": "mdi:thermometer" @@ -230,6 +296,9 @@ "wifi_signal": { "default": "mdi:wifi" }, + "cpu_usage": { + "default": "mdi:cpu-64-bit" + }, "hdd_storage": { "default": "mdi:harddisk" }, @@ -279,8 +348,8 @@ "manual_record": { "default": "mdi:record-rec" }, - "buzzer": { - "default": "mdi:room-service" + "hub_ringtone_on_event": { + "default": "mdi:music-note" }, "doorbell_button_sound": { "default": "mdi:volume-high" @@ -300,7 +369,11 @@ } }, "services": { - "ptz_move": "mdi:pan", - "play_chime": "mdi:music" + "ptz_move": { + "service": "mdi:pan" + }, + "play_chime": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index 877bf80080b..3bd9a120798 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -15,15 +15,20 @@ from homeassistant.components.light import ( LightEntity, LightEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData -from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, +) +from .util import ReolinkConfigEntry, ReolinkData + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -39,10 +44,22 @@ class ReolinkLightEntityDescription( turn_on_off_fn: Callable[[Host, int, bool], Any] +@dataclass(frozen=True, kw_only=True) +class ReolinkHostLightEntityDescription( + LightEntityDescription, + ReolinkHostEntityDescription, +): + """A class that describes host light entities.""" + + is_on_fn: Callable[[Host], bool] + turn_on_off_fn: Callable[[Host, bool], Any] + + LIGHT_ENTITIES = ( ReolinkLightEntityDescription( key="floodlight", cmd_key="GetWhiteLed", + cmd_id=291, translation_key="floodlight", supported=lambda api, ch: api.supported(ch, "floodLight"), is_on_fn=lambda api, ch: api.whiteled_state(ch), @@ -61,22 +78,41 @@ LIGHT_ENTITIES = ( ), ) +HOST_LIGHT_ENTITIES = ( + ReolinkHostLightEntityDescription( + key="hub_status_led", + cmd_key="GetStateLight", + translation_key="status_led", + entity_category=EntityCategory.CONFIG, + supported=lambda api: api.supported(None, "state_light"), + is_on_fn=lambda api: api.state_light, + turn_on_off_fn=lambda api, value: api.set_state_light(value), + ), +) + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink light entities.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data - async_add_entities( + entities: list[ReolinkLightEntity | ReolinkHostLightEntity] = [ ReolinkLightEntity(reolink_data, channel, entity_description) for entity_description in LIGHT_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) + ] + entities.extend( + ReolinkHostLightEntity(reolink_data, entity_description) + for entity_description in HOST_LIGHT_ENTITIES + if entity_description.supported(reolink_data.host.api) ) + async_add_entities(entities) + class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): """Base light entity class for Reolink IP cameras.""" @@ -108,8 +144,7 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): @property def brightness(self) -> int | None: """Return the brightness of this light between 0.255.""" - if self.entity_description.get_brightness_fn is None: - return None + assert self.entity_description.get_brightness_fn is not None bright_pct = self.entity_description.get_brightness_fn( self._host.api, self._channel @@ -151,3 +186,41 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): except ReolinkError as err: raise HomeAssistantError(err) from err self.async_write_ha_state() + + +class ReolinkHostLightEntity(ReolinkHostCoordinatorEntity, LightEntity): + """Base host light entity class for Reolink IP cameras.""" + + entity_description: ReolinkHostLightEntityDescription + _attr_supported_color_modes = {ColorMode.ONOFF} + _attr_color_mode = ColorMode.ONOFF + + def __init__( + self, + reolink_data: ReolinkData, + entity_description: ReolinkHostLightEntityDescription, + ) -> None: + """Initialize Reolink host light entity.""" + self.entity_description = entity_description + super().__init__(reolink_data) + + @property + def is_on(self) -> bool: + """Return true if light is on.""" + return self.entity_description.is_on_fn(self._host.api) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn light off.""" + try: + await self.entity_description.turn_on_off_fn(self._host.api, False) + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn light on.""" + try: + await self.entity_description.turn_on_off_fn(self._host.api, True) + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 9671a4b4fc1..7aced174e30 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -1,6 +1,6 @@ { "domain": "reolink", - "name": "Reolink IP NVR/camera", + "name": "Reolink", "codeowners": ["@starkillerOG"], "config_flow": true, "dependencies": ["webhook"], @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.7"] + "requirements": ["reolink-aio==0.11.5"] } diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index ae865b77913..0c23bed7e2f 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -10,20 +10,21 @@ from reolink_aio.enums import VodRequestType from homeassistant.components.camera import DOMAIN as CAM_DOMAIN, DynamicStreamSettings from homeassistant.components.media_player import MediaClass, MediaType -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia, + Unresolvable, ) from homeassistant.components.stream import create_stream from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import ReolinkData from .const import DOMAIN +from .host import ReolinkHost +from .util import ReolinkConfigEntry _LOGGER = logging.getLogger(__name__) @@ -46,6 +47,15 @@ def res_name(stream: str) -> str: return "Low res." +def get_host(hass: HomeAssistant, config_entry_id: str) -> ReolinkHost: + """Return the Reolink host from the config entry id.""" + config_entry: ReolinkConfigEntry | None = hass.config_entries.async_get_entry( + config_entry_id + ) + assert config_entry is not None + return config_entry.runtime_data.host + + class ReolinkVODMediaSource(MediaSource): """Provide Reolink camera VODs as media sources.""" @@ -58,15 +68,16 @@ class ReolinkVODMediaSource(MediaSource): async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" - identifier = item.identifier.split("|", 5) + identifier = ["UNKNOWN"] + if item.identifier is not None: + identifier = item.identifier.split("|", 5) if identifier[0] != "FILE": raise Unresolvable(f"Unknown media item '{item.identifier}'.") _, config_entry_id, channel_str, stream_res, filename = identifier channel = int(channel_str) - data: dict[str, ReolinkData] = self.hass.data[DOMAIN] - host = data[config_entry_id].host + host = get_host(self.hass, config_entry_id) def get_vod_type() -> VodRequestType: if filename.endswith(".mp4"): @@ -104,7 +115,7 @@ class ReolinkVODMediaSource(MediaSource): item: MediaSourceItem, ) -> BrowseMediaSource: """Return media.""" - if item.identifier is None: + if not item.identifier: return await self._async_generate_root() identifier = item.identifier.split("|", 7) @@ -151,8 +162,7 @@ class ReolinkVODMediaSource(MediaSource): if config_entry.state != ConfigEntryState.LOADED: continue channels: list[str] = [] - data: dict[str, ReolinkData] = self.hass.data[DOMAIN] - host = data[config_entry.entry_id].host + host = config_entry.runtime_data.host entities = er.async_entries_for_config_entry( entity_reg, config_entry.entry_id ) @@ -174,10 +184,7 @@ class ReolinkVODMediaSource(MediaSource): if len(ch_id) > 3: ch = host.api.channel_for_uid(ch_id) - if ( - host.api.api_version("recReplay", int(ch)) < 1 - or not host.api.hdd_info - ): + if not host.api.supported(int(ch), "replay") or not host.api.hdd_info: # playback stream not supported by this camera or no storage installed continue @@ -216,8 +223,7 @@ class ReolinkVODMediaSource(MediaSource): self, config_entry_id: str, channel: int ) -> BrowseMediaSource: """Allow the user to select the high or low playback resolution, (low loads faster).""" - data: dict[str, ReolinkData] = self.hass.data[DOMAIN] - host = data[config_entry_id].host + host = get_host(self.hass, config_entry_id) main_enc = await host.api.get_encoding(channel, "main") if main_enc == "h265": @@ -281,12 +287,16 @@ class ReolinkVODMediaSource(MediaSource): config_entry_id, channel, "sub" ) + title = host.api.camera_name(channel) + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"RESs|{config_entry_id}|{channel}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=host.api.camera_name(channel), + title=title, can_play=False, can_expand=True, children=children, @@ -296,8 +306,7 @@ class ReolinkVODMediaSource(MediaSource): self, config_entry_id: str, channel: int, stream: str ) -> BrowseMediaSource: """Return all days on which recordings are available for a reolink camera.""" - data: dict[str, ReolinkData] = self.hass.data[DOMAIN] - host = data[config_entry_id].host + host = get_host(self.hass, config_entry_id) # We want today of the camera, not necessarily today of the server now = host.api.time() or await host.api.async_get_time() @@ -328,12 +337,16 @@ class ReolinkVODMediaSource(MediaSource): for day in status.days ] + title = f"{host.api.camera_name(channel)} {res_name(stream)}" + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel} {res_name(stream)}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"DAYS|{config_entry_id}|{channel}|{stream}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=f"{host.api.camera_name(channel)} {res_name(stream)}", + title=title, can_play=False, can_expand=True, children=children, @@ -349,8 +362,7 @@ class ReolinkVODMediaSource(MediaSource): day: int, ) -> BrowseMediaSource: """Return all recording files on a specific day of a Reolink camera.""" - data: dict[str, ReolinkData] = self.hass.data[DOMAIN] - host = data[config_entry_id].host + host = get_host(self.hass, config_entry_id) start = dt.datetime(year, month, day, hour=0, minute=0, second=0) end = dt.datetime(year, month, day, hour=23, minute=59, second=59) @@ -388,12 +400,18 @@ class ReolinkVODMediaSource(MediaSource): ) ) + title = ( + f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}" + ) + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel} {res_name(stream)} {year}/{month}/{day}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"FILES|{config_entry_id}|{channel}|{stream}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}", + title=title, can_play=False, can_expand=True, children=children, diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 1dc99c886e1..692b43bca9e 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -14,19 +14,22 @@ from homeassistant.components.number import ( NumberEntityDescription, NumberMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData -from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, ReolinkChimeCoordinatorEntity, + ReolinkChimeEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, ) +from .util import ReolinkConfigEntry, ReolinkData + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -43,10 +46,22 @@ class ReolinkNumberEntityDescription( value: Callable[[Host, int], float | None] +@dataclass(frozen=True, kw_only=True) +class ReolinkHostNumberEntityDescription( + NumberEntityDescription, + ReolinkHostEntityDescription, +): + """A class that describes number entities for the host.""" + + method: Callable[[Host, float], Any] + mode: NumberMode = NumberMode.AUTO + value: Callable[[Host], float | None] + + @dataclass(frozen=True, kw_only=True) class ReolinkChimeNumberEntityDescription( NumberEntityDescription, - ReolinkChannelEntityDescription, + ReolinkChimeEntityDescription, ): """A class that describes number entities for a chime.""" @@ -475,6 +490,33 @@ NUMBER_ENTITIES = ( ), ) +HOST_NUMBER_ENTITIES = ( + ReolinkHostNumberEntityDescription( + key="alarm_volume", + cmd_key="GetDeviceAudioCfg", + translation_key="alarm_volume", + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api: api.supported(None, "hub_audio"), + value=lambda api: api.alarm_volume, + method=lambda api, value: api.set_hub_audio(alarm_volume=int(value)), + ), + ReolinkHostNumberEntityDescription( + key="message_volume", + cmd_key="GetDeviceAudioCfg", + translation_key="message_volume", + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api: api.supported(None, "hub_audio"), + value=lambda api: api.message_volume, + method=lambda api, value: api.set_hub_audio(message_volume=int(value)), + ), +) + CHIME_NUMBER_ENTITIES = ( ReolinkChimeNumberEntityDescription( key="volume", @@ -492,18 +534,23 @@ CHIME_NUMBER_ENTITIES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink number entities.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data - entities: list[ReolinkNumberEntity | ReolinkChimeNumberEntity] = [ + entities: list[NumberEntity] = [ ReolinkNumberEntity(reolink_data, channel, entity_description) for entity_description in NUMBER_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) ] + entities.extend( + ReolinkHostNumberEntity(reolink_data, entity_description) + for entity_description in HOST_NUMBER_ENTITIES + if entity_description.supported(reolink_data.host.api) + ) entities.extend( ReolinkChimeNumberEntity(reolink_data, chime, entity_description) for entity_description in CHIME_NUMBER_ENTITIES @@ -553,6 +600,38 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): self.async_write_ha_state() +class ReolinkHostNumberEntity(ReolinkHostCoordinatorEntity, NumberEntity): + """Base number entity class for Reolink Host.""" + + entity_description: ReolinkHostNumberEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + entity_description: ReolinkHostNumberEntityDescription, + ) -> None: + """Initialize Reolink number entity.""" + self.entity_description = entity_description + super().__init__(reolink_data) + + self._attr_mode = entity_description.mode + + @property + def native_value(self) -> float | None: + """State of the number entity.""" + return self.entity_description.value(self._host.api) + + async def async_set_native_value(self, value: float) -> None: + """Update the current value.""" + try: + await self.entity_description.method(self._host.api, value) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() + + class ReolinkChimeNumberEntity(ReolinkChimeCoordinatorEntity, NumberEntity): """Base number entity class for Reolink IP cameras.""" diff --git a/homeassistant/components/reolink/quality_scale.yaml b/homeassistant/components/reolink/quality_scale.yaml new file mode 100644 index 00000000000..540cf19e22a --- /dev/null +++ b/homeassistant/components/reolink/quality_scale.yaml @@ -0,0 +1,71 @@ +rules: + # Bronze + action-setup: + status: done + comment: | + play_chime service is setup in async_setup + ptz_move service is setup in async_setup_entry since it is a entity_service + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: | + Coordinators are used and asyncio mutex locks ensure safe operation in the upstream lib + Parallel_update=0 set on all platforms + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: done + comment: | + For standalone cameras this does not apply: the integration should be removed. + For cameras connected to a NVR/Hub: the entities of a device are marked unavailable when power is unplugged. They can be removed using async_remove_config_entry_device. + Chimes can be uncoupled from the doorbell and removed from HA using async_remove_config_entry_device + Automatic removal lead to many user issues when a device was temporarily out of wifi range or disconnected from power, so not implemented anymore. + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index 94cfdf6751b..8625f7fb600 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -8,11 +8,13 @@ import logging from typing import Any from reolink_aio.api import ( + BinningModeEnum, Chime, ChimeToneEnum, DayNightEnum, HDREnum, Host, + HubToneEnum, SpotlightModeEnum, StatusLedEnum, TrackMethodEnum, @@ -20,21 +22,21 @@ from reolink_aio.api import ( from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory +from homeassistant.const import EntityCategory, UnitOfDataRate, UnitOfFrequency from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData -from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, ReolinkChimeCoordinatorEntity, + ReolinkChimeEntityDescription, ) +from .util import ReolinkConfigEntry, ReolinkData _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -52,7 +54,7 @@ class ReolinkSelectEntityDescription( @dataclass(frozen=True, kw_only=True) class ReolinkChimeSelectEntityDescription( SelectEntityDescription, - ReolinkChannelEntityDescription, + ReolinkChimeEntityDescription, ): """A class that describes select entities for a chime.""" @@ -115,6 +117,32 @@ SELECT_ENTITIES = ( api.set_quick_reply(ch, file_id=_get_quick_reply_id(api, ch, mess)) ), ), + ReolinkSelectEntityDescription( + key="hub_alarm_ringtone", + cmd_key="GetDeviceAudioCfg", + translation_key="hub_alarm_ringtone", + entity_category=EntityCategory.CONFIG, + get_options=[mode.name for mode in HubToneEnum], + supported=lambda api, ch: api.supported(ch, "hub_audio"), + value=lambda api, ch: HubToneEnum(api.hub_alarm_tone_id(ch)).name, + method=lambda api, ch, name: ( + api.set_hub_audio(ch, alarm_tone_id=HubToneEnum[name].value) + ), + ), + ReolinkSelectEntityDescription( + key="hub_visitor_ringtone", + cmd_key="GetDeviceAudioCfg", + translation_key="hub_visitor_ringtone", + entity_category=EntityCategory.CONFIG, + get_options=[mode.name for mode in HubToneEnum], + supported=lambda api, ch: ( + api.supported(ch, "hub_audio") and api.is_doorbell(ch) + ), + value=lambda api, ch: HubToneEnum(api.hub_visitor_tone_id(ch)).name, + method=lambda api, ch, name: ( + api.set_hub_audio(ch, visitor_tone_id=HubToneEnum[name].value) + ), + ), ReolinkSelectEntityDescription( key="auto_track_method", cmd_key="GetAiCfg", @@ -148,6 +176,67 @@ SELECT_ENTITIES = ( value=lambda api, ch: HDREnum(api.HDR_state(ch)).name, method=lambda api, ch, name: api.set_HDR(ch, HDREnum[name].value), ), + ReolinkSelectEntityDescription( + key="binning_mode", + cmd_key="GetIsp", + translation_key="binning_mode", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + get_options=[method.name for method in BinningModeEnum], + supported=lambda api, ch: api.supported(ch, "binning_mode"), + value=lambda api, ch: BinningModeEnum(api.binning_mode(ch)).name, + method=lambda api, ch, name: api.set_binning_mode( + ch, BinningModeEnum[name].value + ), + ), + ReolinkSelectEntityDescription( + key="main_frame_rate", + cmd_key="GetEnc", + translation_key="main_frame_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfFrequency.HERTZ, + get_options=lambda api, ch: [str(v) for v in api.frame_rate_list(ch, "main")], + supported=lambda api, ch: api.supported(ch, "frame_rate"), + value=lambda api, ch: str(api.frame_rate(ch, "main")), + method=lambda api, ch, value: api.set_frame_rate(ch, int(value), "main"), + ), + ReolinkSelectEntityDescription( + key="sub_frame_rate", + cmd_key="GetEnc", + translation_key="sub_frame_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfFrequency.HERTZ, + get_options=lambda api, ch: [str(v) for v in api.frame_rate_list(ch, "sub")], + supported=lambda api, ch: api.supported(ch, "frame_rate"), + value=lambda api, ch: str(api.frame_rate(ch, "sub")), + method=lambda api, ch, value: api.set_frame_rate(ch, int(value), "sub"), + ), + ReolinkSelectEntityDescription( + key="main_bit_rate", + cmd_key="GetEnc", + translation_key="main_bit_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, + get_options=lambda api, ch: [str(v) for v in api.bit_rate_list(ch, "main")], + supported=lambda api, ch: api.supported(ch, "bit_rate"), + value=lambda api, ch: str(api.bit_rate(ch, "main")), + method=lambda api, ch, value: api.set_bit_rate(ch, int(value), "main"), + ), + ReolinkSelectEntityDescription( + key="sub_bit_rate", + cmd_key="GetEnc", + translation_key="sub_bit_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, + get_options=lambda api, ch: [str(v) for v in api.bit_rate_list(ch, "sub")], + supported=lambda api, ch: api.supported(ch, "bit_rate"), + value=lambda api, ch: str(api.bit_rate(ch, "sub")), + method=lambda api, ch, value: api.set_bit_rate(ch, int(value), "sub"), + ), ) CHIME_SELECT_ENTITIES = ( @@ -156,6 +245,7 @@ CHIME_SELECT_ENTITIES = ( cmd_key="GetDingDongCfg", translation_key="motion_tone", entity_category=EntityCategory.CONFIG, + supported=lambda chime: "md" in chime.chime_event_types, get_options=[method.name for method in ChimeToneEnum], value=lambda chime: ChimeToneEnum(chime.tone("md")).name, method=lambda chime, name: chime.set_tone("md", ChimeToneEnum[name].value), @@ -166,28 +256,50 @@ CHIME_SELECT_ENTITIES = ( translation_key="people_tone", entity_category=EntityCategory.CONFIG, get_options=[method.name for method in ChimeToneEnum], + supported=lambda chime: "people" in chime.chime_event_types, value=lambda chime: ChimeToneEnum(chime.tone("people")).name, method=lambda chime, name: chime.set_tone("people", ChimeToneEnum[name].value), ), + ReolinkChimeSelectEntityDescription( + key="vehicle_tone", + cmd_key="GetDingDongCfg", + translation_key="vehicle_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + supported=lambda chime: "vehicle" in chime.chime_event_types, + value=lambda chime: ChimeToneEnum(chime.tone("vehicle")).name, + method=lambda chime, name: chime.set_tone("vehicle", ChimeToneEnum[name].value), + ), ReolinkChimeSelectEntityDescription( key="visitor_tone", cmd_key="GetDingDongCfg", translation_key="visitor_tone", entity_category=EntityCategory.CONFIG, get_options=[method.name for method in ChimeToneEnum], + supported=lambda chime: "visitor" in chime.chime_event_types, value=lambda chime: ChimeToneEnum(chime.tone("visitor")).name, method=lambda chime, name: chime.set_tone("visitor", ChimeToneEnum[name].value), ), + ReolinkChimeSelectEntityDescription( + key="package_tone", + cmd_key="GetDingDongCfg", + translation_key="package_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + supported=lambda chime: "package" in chime.chime_event_types, + value=lambda chime: ChimeToneEnum(chime.tone("package")).name, + method=lambda chime, name: chime.set_tone("package", ChimeToneEnum[name].value), + ), ) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink select entities.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ReolinkSelectEntity | ReolinkChimeSelectEntity] = [ ReolinkSelectEntity(reolink_data, channel, entity_description) @@ -199,6 +311,7 @@ async def async_setup_entry( ReolinkChimeSelectEntity(reolink_data, chime, entity_description) for entity_description in CHIME_SELECT_ENTITIES for chime in reolink_data.host.api.chime_list + if entity_description.supported(chime) ) async_add_entities(entities) @@ -232,7 +345,7 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): try: option = self.entity_description.value(self._host.api, self._channel) - except ValueError: + except (ValueError, KeyError): if self._log_error: _LOGGER.exception("Reolink '%s' has an unknown value", self.name) self._log_error = False @@ -274,7 +387,7 @@ class ReolinkChimeSelectEntity(ReolinkChimeCoordinatorEntity, SelectEntity): """Return the current option.""" try: option = self.entity_description.value(self._chime) - except ValueError: + except (ValueError, KeyError): if self._log_error: _LOGGER.exception("Reolink '%s' has an unknown value", self.name) self._log_error = False diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index 988b091735e..36900da99ca 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -16,20 +16,20 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import ReolinkData -from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) +from .util import ReolinkConfigEntry, ReolinkData + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -60,10 +60,20 @@ SENSORS = ( state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda api, ch: api.ptz_pan_position(ch), - supported=lambda api, ch: api.supported(ch, "ptz_position"), + supported=lambda api, ch: api.supported(ch, "ptz_pan_position"), + ), + ReolinkSensorEntityDescription( + key="ptz_tilt_position", + cmd_key="GetPtzCurPos", + translation_key="ptz_tilt_position", + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + value=lambda api, ch: api.ptz_tilt_position(ch), + supported=lambda api, ch: api.supported(ch, "ptz_tilt_position"), ), ReolinkSensorEntityDescription( key="battery_percent", + cmd_id=252, cmd_key="GetBatteryInfo", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, @@ -74,6 +84,7 @@ SENSORS = ( ), ReolinkSensorEntityDescription( key="battery_temperature", + cmd_id=252, cmd_key="GetBatteryInfo", translation_key="battery_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, @@ -86,6 +97,7 @@ SENSORS = ( ), ReolinkSensorEntityDescription( key="battery_state", + cmd_id=252, cmd_key="GetBatteryInfo", translation_key="battery_state", device_class=SensorDeviceClass.ENUM, @@ -108,6 +120,17 @@ HOST_SENSORS = ( value=lambda api: api.wifi_signal, supported=lambda api: api.supported(None, "wifi") and api.wifi_connection, ), + ReolinkHostSensorEntityDescription( + key="cpu_usage", + cmd_key="GetPerformance", + translation_key="cpu_usage", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + value=lambda api: api.cpu_usage, + supported=lambda api: api.supported(None, "performance"), + ), ) HDD_SENSORS = ( @@ -126,11 +149,11 @@ HDD_SENSORS = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink IP Camera.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ ReolinkSensorEntity | ReolinkHostSensorEntity | ReolinkHddSensorEntity diff --git a/homeassistant/components/reolink/services.py b/homeassistant/components/reolink/services.py index d5cb402c74b..326093e7a93 100644 --- a/homeassistant/components/reolink/services.py +++ b/homeassistant/components/reolink/services.py @@ -47,7 +47,7 @@ def async_setup_services(hass: HomeAssistant) -> None: translation_key="service_entry_ex", translation_placeholders={"service_name": "play_chime"}, ) - host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host + host: ReolinkHost = config_entry.runtime_data.host (device_uid, chime_id, is_chime) = get_device_uid_and_ch(device, host) chime: Chime | None = host.api.chime(chime_id) if not is_chime or chime is None: diff --git a/homeassistant/components/reolink/siren.py b/homeassistant/components/reolink/siren.py index 269c0690105..cb12eb5d38c 100644 --- a/homeassistant/components/reolink/siren.py +++ b/homeassistant/components/reolink/siren.py @@ -14,14 +14,14 @@ from homeassistant.components.siren import ( SirenEntityDescription, SirenEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData -from .const import DOMAIN from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .util import ReolinkConfigEntry, ReolinkData + +PARALLEL_UPDATES = 0 @dataclass(frozen=True) @@ -42,11 +42,11 @@ SIREN_ENTITIES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink siren entities.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data async_add_entities( ReolinkSirenEntity(reolink_data, channel, entity_description) diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 3710c3743fa..ac73581ce22 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -18,10 +18,6 @@ "username": "Username to login to the Reolink device itself. Not the Reolink cloud account.", "password": "Password to login to the Reolink device itself. Not the Reolink cloud account." } - }, - "reauth_confirm": { - "title": "[%key:common::config_flow::title::reauth%]", - "description": "The Reolink integration needs to re-authenticate your connection details" } }, "error": { @@ -31,11 +27,14 @@ "not_admin": "User needs to be admin, user \"{username}\" has authorisation level \"{userlevel}\"", "password_incompatible": "Password contains incompatible special character, only these characters are allowed: a-z, A-Z, 0-9 or {special_chars}", "unknown": "[%key:common::config_flow::error::unknown%]", + "update_needed": "Failed to login because of outdated firmware, please update the firmware to version {needed_firmware} using the Reolink Download Center: {download_center_url}, currently version {current_firmware} is installed", "webhook_exception": "Home Assistant URL is not available, go to Settings > System > Network > Home Assistant URL and correct the URLs, see {more_info}" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The mac address of the device does not match the previous mac address" } }, "options": { @@ -82,6 +81,10 @@ "hdr_switch_deprecated": { "title": "Reolink HDR switch deprecated", "description": "The Reolink HDR switch entity is deprecated and will be removed in HA 2025.2.0. It has been replaced by a HDR select entity offering options `on`, `off` and `auto`. To remove this issue, please adjust automations accordingly and disable the HDR switch entity." + }, + "hub_switch_deprecated": { + "title": "Reolink Home Hub switches deprecated", + "description": "The redundant 'Record', 'Email on event', 'FTP upload', 'Push notifications', and 'Buzzer on event' switches on the Reolink Home Hub are depricated since the new firmware no longer supports these. Please use the equally named switches under each of the camera devices connected to the Home Hub instead. To remove this issue, please adjust automations accordingly and disable the switch entities mentioned." } }, "services": { @@ -394,6 +397,12 @@ "volume": { "name": "Volume" }, + "alarm_volume": { + "name": "Alarm volume" + }, + "message_volume": { + "name": "Message volume" + }, "guard_return_time": { "name": "Guard return time" }, @@ -478,7 +487,7 @@ "name": "Floodlight mode", "state": { "off": "[%key:common::state::off%]", - "auto": "Auto", + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]", "onatnight": "On at night", "schedule": "Schedule", "adaptive": "Adaptive", @@ -517,7 +526,7 @@ "name": "Doorbell LED", "state": { "stayoff": "Stay off", - "auto": "Auto", + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]", "alwaysonatnight": "Auto & always on at night", "alwayson": "Always on" } @@ -527,7 +536,47 @@ "state": { "off": "[%key:common::state::off%]", "on": "[%key:common::state::on%]", - "auto": "Auto" + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]" + } + }, + "binning_mode": { + "name": "Binning mode", + "state": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]", + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]" + } + }, + "hub_alarm_ringtone": { + "name": "Hub alarm ringtone", + "state": { + "alarm": "Alarm", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } + }, + "hub_visitor_ringtone": { + "name": "Hub visitor ringtone", + "state": { + "alarm": "[%key:component::reolink::entity::select::hub_alarm_ringtone::state::alarm%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } }, "motion_tone": { @@ -562,6 +611,22 @@ "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } }, + "vehicle_tone": { + "name": "Vehicle ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } + }, "visitor_tone": { "name": "Visitor ringtone", "state": { @@ -577,15 +642,49 @@ "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } + }, + "package_tone": { + "name": "Package ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } + }, + "main_frame_rate": { + "name": "Clear frame rate" + }, + "sub_frame_rate": { + "name": "Fluent frame rate" + }, + "main_bit_rate": { + "name": "Clear bit rate" + }, + "sub_bit_rate": { + "name": "Fluent bit rate" } }, "sensor": { "wifi_signal": { "name": "Wi-Fi signal" }, + "cpu_usage": { + "name": "CPU usage" + }, "ptz_pan_position": { "name": "PTZ pan position" }, + "ptz_tilt_position": { + "name": "PTZ tilt position" + }, "battery_temperature": { "name": "Battery temperature" }, @@ -646,8 +745,8 @@ "manual_record": { "name": "Manual record" }, - "buzzer": { - "name": "Buzzer on event" + "hub_ringtone_on_event": { + "name": "Hub ringtone on event" }, "doorbell_button_sound": { "name": "Doorbell button sound" diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index 2bf7689b32f..c274609599d 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -10,22 +10,24 @@ from reolink_aio.api import Chime, Host from reolink_aio.exceptions import ReolinkError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ReolinkData from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, ReolinkChimeCoordinatorEntity, + ReolinkChimeEntityDescription, ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) +from .util import ReolinkConfigEntry, ReolinkData + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -53,7 +55,7 @@ class ReolinkNVRSwitchEntityDescription( @dataclass(frozen=True, kw_only=True) class ReolinkChimeSwitchEntityDescription( SwitchEntityDescription, - ReolinkChannelEntityDescription, + ReolinkChimeEntityDescription, ): """A class that describes switch entities for a chime.""" @@ -171,7 +173,7 @@ SWITCH_ENTITIES = ( ReolinkSwitchEntityDescription( key="buzzer", cmd_key="GetBuzzerAlarmV20", - translation_key="buzzer", + translation_key="hub_ringtone_on_event", entity_category=EntityCategory.CONFIG, supported=lambda api, ch: api.supported(ch, "buzzer") and api.is_nvr, value=lambda api, ch: api.buzzer_enabled(ch), @@ -214,7 +216,7 @@ NVR_SWITCH_ENTITIES = ( cmd_key="GetEmail", translation_key="email", entity_category=EntityCategory.CONFIG, - supported=lambda api: api.supported(None, "email"), + supported=lambda api: api.supported(None, "email") and not api.is_hub, value=lambda api: api.email_enabled(), method=lambda api, value: api.set_email(None, value), ), @@ -223,7 +225,7 @@ NVR_SWITCH_ENTITIES = ( cmd_key="GetFtp", translation_key="ftp_upload", entity_category=EntityCategory.CONFIG, - supported=lambda api: api.supported(None, "ftp"), + supported=lambda api: api.supported(None, "ftp") and not api.is_hub, value=lambda api: api.ftp_enabled(), method=lambda api, value: api.set_ftp(None, value), ), @@ -232,7 +234,7 @@ NVR_SWITCH_ENTITIES = ( cmd_key="GetPush", translation_key="push_notifications", entity_category=EntityCategory.CONFIG, - supported=lambda api: api.supported(None, "push"), + supported=lambda api: api.supported(None, "push") and not api.is_hub, value=lambda api: api.push_enabled(), method=lambda api, value: api.set_push(None, value), ), @@ -241,17 +243,16 @@ NVR_SWITCH_ENTITIES = ( cmd_key="GetRec", translation_key="record", entity_category=EntityCategory.CONFIG, - supported=lambda api: api.supported(None, "recording"), + supported=lambda api: api.supported(None, "recording") and not api.is_hub, value=lambda api: api.recording_enabled(), method=lambda api, value: api.set_recording(None, value), ), ReolinkNVRSwitchEntityDescription( key="buzzer", cmd_key="GetBuzzerAlarmV20", - translation_key="buzzer", - icon="mdi:room-service", + translation_key="hub_ringtone_on_event", entity_category=EntityCategory.CONFIG, - supported=lambda api: api.supported(None, "buzzer"), + supported=lambda api: api.supported(None, "buzzer") and not api.is_hub, value=lambda api: api.buzzer_enabled(), method=lambda api, value: api.set_buzzer(None, value), ), @@ -280,14 +281,64 @@ DEPRECATED_HDR = ReolinkSwitchEntityDescription( method=lambda api, ch, value: api.set_HDR(ch, value), ) +# Can be removed in HA 2025.4.0 +DEPRECATED_NVR_SWITCHES = [ + ReolinkNVRSwitchEntityDescription( + key="email", + cmd_key="GetEmail", + translation_key="email", + entity_category=EntityCategory.CONFIG, + supported=lambda api: api.is_hub, + value=lambda api: api.email_enabled(), + method=lambda api, value: api.set_email(None, value), + ), + ReolinkNVRSwitchEntityDescription( + key="ftp_upload", + cmd_key="GetFtp", + translation_key="ftp_upload", + entity_category=EntityCategory.CONFIG, + supported=lambda api: api.is_hub, + value=lambda api: api.ftp_enabled(), + method=lambda api, value: api.set_ftp(None, value), + ), + ReolinkNVRSwitchEntityDescription( + key="push_notifications", + cmd_key="GetPush", + translation_key="push_notifications", + entity_category=EntityCategory.CONFIG, + supported=lambda api: api.is_hub, + value=lambda api: api.push_enabled(), + method=lambda api, value: api.set_push(None, value), + ), + ReolinkNVRSwitchEntityDescription( + key="record", + cmd_key="GetRec", + translation_key="record", + entity_category=EntityCategory.CONFIG, + supported=lambda api: api.is_hub, + value=lambda api: api.recording_enabled(), + method=lambda api, value: api.set_recording(None, value), + ), + ReolinkNVRSwitchEntityDescription( + key="buzzer", + cmd_key="GetBuzzerAlarmV20", + translation_key="hub_ringtone_on_event", + icon="mdi:room-service", + entity_category=EntityCategory.CONFIG, + supported=lambda api: api.is_hub, + value=lambda api: api.buzzer_enabled(), + method=lambda api, value: api.set_buzzer(None, value), + ), +] + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Reolink switch entities.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ ReolinkSwitchEntity | ReolinkNVRSwitchEntity | ReolinkChimeSwitchEntity @@ -308,10 +359,17 @@ async def async_setup_entry( for chime in reolink_data.host.api.chime_list ) - # Can be removed in HA 2025.2.0 + # Can be removed in HA 2025.4.0 + depricated_dict = {} + for desc in DEPRECATED_NVR_SWITCHES: + if not desc.supported(reolink_data.host.api): + continue + depricated_dict[f"{reolink_data.host.unique_id}_{desc.key}"] = desc + entity_reg = er.async_get(hass) reg_entities = er.async_entries_for_config_entry(entity_reg, config_entry.entry_id) for entity in reg_entities: + # Can be removed in HA 2025.2.0 if entity.domain == "switch" and entity.unique_id.endswith("_hdr"): if entity.disabled: entity_reg.async_remove(entity.entity_id) @@ -330,7 +388,24 @@ async def async_setup_entry( for channel in reolink_data.host.api.channels if DEPRECATED_HDR.supported(reolink_data.host.api, channel) ) - break + + # Can be removed in HA 2025.4.0 + if entity.domain == "switch" and entity.unique_id in depricated_dict: + if entity.disabled: + entity_reg.async_remove(entity.entity_id) + continue + + ir.async_create_issue( + hass, + DOMAIN, + "hub_switch_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key="hub_switch_deprecated", + ) + entities.append( + ReolinkNVRSwitchEntity(reolink_data, depricated_dict[entity.unique_id]) + ) async_add_entities(entities) diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 9b710c6576d..aa607e2b29e 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -3,11 +3,10 @@ from __future__ import annotations from dataclasses import dataclass -from datetime import datetime from typing import Any from reolink_aio.exceptions import ReolinkError -from reolink_aio.software_version import NewSoftwareVersion +from reolink_aio.software_version import NewSoftwareVersion, SoftwareVersion from homeassistant.components.update import ( UpdateDeviceClass, @@ -15,22 +14,28 @@ from homeassistant.components.update import ( UpdateEntityDescription, UpdateEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) -from . import ReolinkData -from .const import DOMAIN +from . import DEVICE_UPDATE_INTERVAL from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) +from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 +RESUME_AFTER_INSTALL = 15 POLL_AFTER_INSTALL = 120 +POLL_PROGRESS = 2 @dataclass(frozen=True, kw_only=True) @@ -68,11 +73,11 @@ HOST_UPDATE_ENTITIES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ReolinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up update entities for Reolink component.""" - reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + reolink_data: ReolinkData = config_entry.runtime_data entities: list[ReolinkUpdateEntity | ReolinkHostUpdateEntity] = [ ReolinkUpdateEntity(reolink_data, channel, entity_description) @@ -88,25 +93,28 @@ async def async_setup_entry( async_add_entities(entities) -class ReolinkUpdateEntity( - ReolinkChannelCoordinatorEntity, - UpdateEntity, +class ReolinkUpdateBaseEntity( + CoordinatorEntity[DataUpdateCoordinator[None]], UpdateEntity ): - """Base update entity class for Reolink IP cameras.""" + """Base update entity class for Reolink.""" - entity_description: ReolinkUpdateEntityDescription _attr_release_url = "https://reolink.com/download-center/" def __init__( self, reolink_data: ReolinkData, - channel: int, - entity_description: ReolinkUpdateEntityDescription, + channel: int | None, + coordinator: DataUpdateCoordinator[None], ) -> None: """Initialize Reolink update entity.""" - self.entity_description = entity_description - super().__init__(reolink_data, channel, reolink_data.firmware_coordinator) + CoordinatorEntity.__init__(self, coordinator) + self._channel = channel + self._host = reolink_data.host self._cancel_update: CALLBACK_TYPE | None = None + self._cancel_resume: CALLBACK_TYPE | None = None + self._cancel_progress: CALLBACK_TYPE | None = None + self._installing: bool = False + self._reolink_data = reolink_data @property def installed_version(self) -> str | None: @@ -125,6 +133,16 @@ class ReolinkUpdateEntity( return new_firmware.version_string + @property + def in_progress(self) -> bool: + """Update installation progress.""" + return self._host.api.sw_upload_progress(self._channel) < 100 + + @property + def update_percentage(self) -> int: + """Update installation progress.""" + return self._host.api.sw_upload_progress(self._channel) + @property def supported_features(self) -> UpdateEntityFeature: """Flag supported features.""" @@ -132,13 +150,31 @@ class ReolinkUpdateEntity( new_firmware = self._host.api.firmware_update_available(self._channel) if isinstance(new_firmware, NewSoftwareVersion): supported_features |= UpdateEntityFeature.RELEASE_NOTES + supported_features |= UpdateEntityFeature.PROGRESS return supported_features + @property + def available(self) -> bool: + """Return True if entity is available.""" + if self._installing or self._cancel_update is not None: + return True + return super().available + + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return True if latest_version is newer than installed_version.""" + try: + installed = SoftwareVersion(installed_version) + latest = SoftwareVersion(latest_version) + except ReolinkError: + # when the online update API returns a unexpected string + return True + + return latest > installed + async def async_release_notes(self) -> str | None: """Return the release notes.""" new_firmware = self._host.api.firmware_update_available(self._channel) - if not isinstance(new_firmware, NewSoftwareVersion): - return None + assert isinstance(new_firmware, NewSoftwareVersion) return ( "If the install button fails, download this" @@ -151,6 +187,11 @@ class ReolinkUpdateEntity( self, version: str | None, backup: bool, **kwargs: Any ) -> None: """Install the latest firmware version.""" + self._installing = True + await self._pause_update_coordinator() + self._cancel_progress = async_call_later( + self.hass, POLL_PROGRESS, self._async_update_progress + ) try: await self._host.api.update_firmware(self._channel) except ReolinkError as err: @@ -162,10 +203,38 @@ class ReolinkUpdateEntity( self._cancel_update = async_call_later( self.hass, POLL_AFTER_INSTALL, self._async_update_future ) + self._cancel_resume = async_call_later( + self.hass, RESUME_AFTER_INSTALL, self._resume_update_coordinator + ) + self._installing = False - async def _async_update_future(self, now: datetime | None = None) -> None: + async def _pause_update_coordinator(self) -> None: + """Pause updating the states using the data update coordinator (during reboots).""" + self._reolink_data.device_coordinator.update_interval = None + self._reolink_data.device_coordinator.async_set_updated_data(None) + + async def _resume_update_coordinator(self, *args: Any) -> None: + """Resume updating the states using the data update coordinator (after reboots).""" + self._reolink_data.device_coordinator.update_interval = DEVICE_UPDATE_INTERVAL + try: + await self._reolink_data.device_coordinator.async_refresh() + finally: + self._cancel_resume = None + + async def _async_update_progress(self, *args: Any) -> None: """Request update.""" - await self.async_update() + self.async_write_ha_state() + if self._installing: + self._cancel_progress = async_call_later( + self.hass, POLL_PROGRESS, self._async_update_progress + ) + + async def _async_update_future(self, *args: Any) -> None: + """Request update.""" + try: + await self.async_update() + finally: + self._cancel_update = None async def async_added_to_hass(self) -> None: """Entity created.""" @@ -179,16 +248,44 @@ class ReolinkUpdateEntity( self._host.firmware_ch_list.remove(self._channel) if self._cancel_update is not None: self._cancel_update() + if self._cancel_progress is not None: + self._cancel_progress() + if self._cancel_resume is not None: + self._cancel_resume() + + +class ReolinkUpdateEntity( + ReolinkUpdateBaseEntity, + ReolinkChannelCoordinatorEntity, +): + """Base update entity class for Reolink IP cameras.""" + + entity_description: ReolinkUpdateEntityDescription + _channel: int + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + entity_description: ReolinkUpdateEntityDescription, + ) -> None: + """Initialize Reolink update entity.""" + self.entity_description = entity_description + ReolinkUpdateBaseEntity.__init__( + self, reolink_data, channel, reolink_data.firmware_coordinator + ) + ReolinkChannelCoordinatorEntity.__init__( + self, reolink_data, channel, reolink_data.firmware_coordinator + ) class ReolinkHostUpdateEntity( + ReolinkUpdateBaseEntity, ReolinkHostCoordinatorEntity, - UpdateEntity, ): """Update entity class for Reolink Host.""" entity_description: ReolinkHostUpdateEntityDescription - _attr_release_url = "https://reolink.com/download-center/" def __init__( self, @@ -197,77 +294,9 @@ class ReolinkHostUpdateEntity( ) -> None: """Initialize Reolink update entity.""" self.entity_description = entity_description - super().__init__(reolink_data, reolink_data.firmware_coordinator) - self._cancel_update: CALLBACK_TYPE | None = None - - @property - def installed_version(self) -> str | None: - """Version currently in use.""" - return self._host.api.sw_version - - @property - def latest_version(self) -> str | None: - """Latest version available for install.""" - new_firmware = self._host.api.firmware_update_available() - if not new_firmware: - return self.installed_version - - if isinstance(new_firmware, str): - return new_firmware - - return new_firmware.version_string - - @property - def supported_features(self) -> UpdateEntityFeature: - """Flag supported features.""" - supported_features = UpdateEntityFeature.INSTALL - new_firmware = self._host.api.firmware_update_available() - if isinstance(new_firmware, NewSoftwareVersion): - supported_features |= UpdateEntityFeature.RELEASE_NOTES - return supported_features - - async def async_release_notes(self) -> str | None: - """Return the release notes.""" - new_firmware = self._host.api.firmware_update_available() - if not isinstance(new_firmware, NewSoftwareVersion): - return None - - return ( - "If the install button fails, download this" - f" [firmware zip file]({new_firmware.download_url})." - " Then, follow the installation guide (PDF in the zip file).\n\n" - f"## Release notes\n\n{new_firmware.release_notes}" + ReolinkUpdateBaseEntity.__init__( + self, reolink_data, None, reolink_data.firmware_coordinator + ) + ReolinkHostCoordinatorEntity.__init__( + self, reolink_data, reolink_data.firmware_coordinator ) - - async def async_install( - self, version: str | None, backup: bool, **kwargs: Any - ) -> None: - """Install the latest firmware version.""" - try: - await self._host.api.update_firmware() - except ReolinkError as err: - raise HomeAssistantError( - f"Error trying to update Reolink firmware: {err}" - ) from err - finally: - self.async_write_ha_state() - self._cancel_update = async_call_later( - self.hass, POLL_AFTER_INSTALL, self._async_update_future - ) - - async def _async_update_future(self, now: datetime | None = None) -> None: - """Request update.""" - await self.async_update() - - async def async_added_to_hass(self) -> None: - """Entity created.""" - await super().async_added_to_hass() - self._host.firmware_ch_list.append(None) - - async def async_will_remove_from_hass(self) -> None: - """Entity removed.""" - await super().async_will_remove_from_hass() - if None in self._host.firmware_ch_list: - self._host.firmware_ch_list.remove(None) - if self._cancel_update is not None: - self._cancel_update() diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index 305579e35cb..98c0e7b925b 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -12,6 +12,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN from .host import ReolinkHost +type ReolinkConfigEntry = config_entries.ConfigEntry[ReolinkData] + @dataclass class ReolinkData: @@ -24,13 +26,10 @@ class ReolinkData: def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) -> bool: """Check if an existing entry has a proper connection.""" - reolink_data: ReolinkData | None = hass.data.get(DOMAIN, {}).get( - config_entry.entry_id - ) return ( - reolink_data is not None + hasattr(config_entry, "runtime_data") and config_entry.state == config_entries.ConfigEntryState.LOADED - and reolink_data.device_coordinator.last_update_success + and config_entry.runtime_data.device_coordinator.last_update_success ) diff --git a/homeassistant/components/repairs/issue_handler.py b/homeassistant/components/repairs/issue_handler.py index 38dcea1668d..cc7e017699d 100644 --- a/homeassistant/components/repairs/issue_handler.py +++ b/homeassistant/components/repairs/issue_handler.py @@ -53,7 +53,7 @@ class RepairsFlowManager(data_entry_flow.FlowManager): self, handler_key: str, *, - context: dict[str, Any] | None = None, + context: data_entry_flow.FlowContext | None = None, data: dict[str, Any] | None = None, ) -> RepairsFlow: """Create a flow. platform is a repairs module.""" @@ -82,7 +82,11 @@ class RepairsFlowManager(data_entry_flow.FlowManager): async def async_finish_flow( self, flow: data_entry_flow.FlowHandler, result: data_entry_flow.FlowResult ) -> data_entry_flow.FlowResult: - """Complete a fix flow.""" + """Complete a fix flow. + + This method is called when a flow step returns FlowResultType.ABORT or + FlowResultType.CREATE_ENTRY. + """ if result.get("type") != data_entry_flow.FlowResultType.ABORT: ir.async_delete_issue(self.hass, flow.handler, flow.init_data["issue_id"]) if "result" not in result: diff --git a/homeassistant/components/repetier/__init__.py b/homeassistant/components/repetier/__init__.py index 2642e78e7ec..27ddc62a847 100644 --- a/homeassistant/components/repetier/__init__.py +++ b/homeassistant/components/repetier/__init__.py @@ -133,6 +133,7 @@ class RepetierRequiredKeysMixin: @dataclass(frozen=True) +# pylint: disable-next=hass-enforce-class-module class RepetierSensorEntityDescription( SensorEntityDescription, RepetierRequiredKeysMixin ): diff --git a/homeassistant/components/repetier/manifest.json b/homeassistant/components/repetier/manifest.json index dfddb298284..7392ae0b23e 100644 --- a/homeassistant/components/repetier/manifest.json +++ b/homeassistant/components/repetier/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/repetier", "iot_class": "local_polling", "loggers": ["pyrepetierng"], + "quality_scale": "legacy", "requirements": ["pyrepetierng==0.1.0"] } diff --git a/homeassistant/components/rest/__init__.py b/homeassistant/components/rest/__init__.py index 59239ad6744..5695e51933e 100644 --- a/homeassistant/components/rest/__init__.py +++ b/homeassistant/components/rest/__init__.py @@ -180,6 +180,7 @@ def _rest_coordinator( return DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name="rest data", update_method=update_method, update_interval=update_interval, diff --git a/homeassistant/components/rest/icons.json b/homeassistant/components/rest/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/rest/icons.json +++ b/homeassistant/components/rest/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/rest_command/icons.json b/homeassistant/components/rest_command/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/rest_command/icons.json +++ b/homeassistant/components/rest_command/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/rflink/__init__.py b/homeassistant/components/rflink/__init__.py index e5d5e97fa84..7e86854dbce 100644 --- a/homeassistant/components/rflink/__init__.py +++ b/homeassistant/components/rflink/__init__.py @@ -6,85 +6,59 @@ import asyncio from collections import defaultdict import logging -from rflink.protocol import ProtocolBase, create_rflink_connection +from rflink.protocol import create_rflink_connection from serial import SerialException import voluptuous as vol from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_STATE, CONF_COMMAND, CONF_DEVICE_ID, CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, - STATE_ON, ) from homeassistant.core import CoreState, HassJob, HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_call_later -from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType -from .utils import brightness_to_rflink +from .const import ( + DATA_DEVICE_REGISTER, + DATA_ENTITY_GROUP_LOOKUP, + DATA_ENTITY_LOOKUP, + EVENT_KEY_COMMAND, + EVENT_KEY_ID, + EVENT_KEY_SENSOR, + SIGNAL_AVAILABILITY, + SIGNAL_HANDLE_EVENT, + TMP_ENTITY, +) +from .entity import RflinkCommand +from .utils import identify_event_type _LOGGER = logging.getLogger(__name__) -ATTR_EVENT = "event" - -CONF_ALIASES = "aliases" -CONF_GROUP_ALIASES = "group_aliases" -CONF_GROUP = "group" -CONF_NOGROUP_ALIASES = "nogroup_aliases" -CONF_DEVICE_DEFAULTS = "device_defaults" -CONF_AUTOMATIC_ADD = "automatic_add" -CONF_FIRE_EVENT = "fire_event" CONF_IGNORE_DEVICES = "ignore_devices" CONF_RECONNECT_INTERVAL = "reconnect_interval" -CONF_SIGNAL_REPETITIONS = "signal_repetitions" CONF_WAIT_FOR_ACK = "wait_for_ack" CONF_KEEPALIVE_IDLE = "tcp_keepalive_idle_timer" -DATA_DEVICE_REGISTER = "rflink_device_register" -DATA_ENTITY_LOOKUP = "rflink_entity_lookup" -DATA_ENTITY_GROUP_LOOKUP = "rflink_entity_group_only_lookup" DEFAULT_RECONNECT_INTERVAL = 10 -DEFAULT_SIGNAL_REPETITIONS = 1 DEFAULT_TCP_KEEPALIVE_IDLE_TIMER = 3600 CONNECTION_TIMEOUT = 10 -EVENT_BUTTON_PRESSED = "button_pressed" -EVENT_KEY_COMMAND = "command" -EVENT_KEY_ID = "id" -EVENT_KEY_SENSOR = "sensor" -EVENT_KEY_UNIT = "unit" - RFLINK_GROUP_COMMANDS = ["allon", "alloff"] DOMAIN = "rflink" SERVICE_SEND_COMMAND = "send_command" -SIGNAL_AVAILABILITY = "rflink_device_available" -SIGNAL_HANDLE_EVENT = "rflink_handle_event_{}" SIGNAL_EVENT = "rflink_event" -TMP_ENTITY = "tmp.{}" - -DEVICE_DEFAULTS_SCHEMA = vol.Schema( - { - vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, - vol.Optional( - CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS - ): vol.Coerce(int), - } -) CONFIG_SCHEMA = vol.Schema( { @@ -113,18 +87,6 @@ SEND_COMMAND_SCHEMA = vol.Schema( ) -def identify_event_type(event): - """Look at event to determine type of device. - - Async friendly. - """ - if EVENT_KEY_COMMAND in event: - return EVENT_KEY_COMMAND - if EVENT_KEY_SENSOR in event: - return EVENT_KEY_SENSOR - return "unknown" - - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Rflink component.""" # Allow entities to register themselves by device_id to be looked up when @@ -264,7 +226,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def connect(): """Set up connection and hook it into HA for reconnect/shutdown.""" - _LOGGER.info("Initiating Rflink connection") + _LOGGER.debug("Initiating Rflink connection") # Rflink create_rflink_connection decides based on the value of host # (string or None) if serial or tcp mode should be used @@ -311,303 +273,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: EVENT_HOMEASSISTANT_STOP, lambda x: transport.close() ) - _LOGGER.info("Connected to Rflink") + _LOGGER.debug("Connected to Rflink") hass.async_create_task(connect(), eager_start=False) async_dispatcher_connect(hass, SIGNAL_EVENT, event_callback) return True - - -class RflinkDevice(Entity): - """Representation of a Rflink device. - - Contains the common logic for Rflink entities. - """ - - _state: bool | None = None - _available = True - _attr_should_poll = False - - def __init__( - self, - device_id, - initial_event=None, - name=None, - aliases=None, - group=True, - group_aliases=None, - nogroup_aliases=None, - fire_event=False, - signal_repetitions=DEFAULT_SIGNAL_REPETITIONS, - ): - """Initialize the device.""" - # Rflink specific attributes for every component type - self._initial_event = initial_event - self._device_id = device_id - self._attr_unique_id = device_id - if name: - self._name = name - else: - self._name = device_id - - self._aliases = aliases - self._group = group - self._group_aliases = group_aliases - self._nogroup_aliases = nogroup_aliases - self._should_fire_event = fire_event - self._signal_repetitions = signal_repetitions - - @callback - def handle_event_callback(self, event): - """Handle incoming event for device type.""" - # Call platform specific event handler - self._handle_event(event) - - # Propagate changes through ha - self.async_write_ha_state() - - # Put command onto bus for user to subscribe to - if self._should_fire_event and identify_event_type(event) == EVENT_KEY_COMMAND: - self.hass.bus.async_fire( - EVENT_BUTTON_PRESSED, - {ATTR_ENTITY_ID: self.entity_id, ATTR_STATE: event[EVENT_KEY_COMMAND]}, - ) - _LOGGER.debug( - "Fired bus event for %s: %s", self.entity_id, event[EVENT_KEY_COMMAND] - ) - - def _handle_event(self, event): - """Platform specific event handler.""" - raise NotImplementedError - - @property - def name(self): - """Return a name for the device.""" - return self._name - - @property - def is_on(self): - """Return true if device is on.""" - if self.assumed_state: - return False - return self._state - - @property - def assumed_state(self): - """Assume device state until first device event sets state.""" - return self._state is None - - @property - def available(self): - """Return True if entity is available.""" - return self._available - - @callback - def _availability_callback(self, availability): - """Update availability state.""" - self._available = availability - self.async_write_ha_state() - - async def async_added_to_hass(self): - """Register update callback.""" - await super().async_added_to_hass() - # Remove temporary bogus entity_id if added - tmp_entity = TMP_ENTITY.format(self._device_id) - if ( - tmp_entity - in self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][self._device_id] - ): - self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][ - self._device_id - ].remove(tmp_entity) - - # Register id and aliases - self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][self._device_id].append( - self.entity_id - ) - if self._group: - self.hass.data[DATA_ENTITY_GROUP_LOOKUP][EVENT_KEY_COMMAND][ - self._device_id - ].append(self.entity_id) - # aliases respond to both normal and group commands (allon/alloff) - if self._aliases: - for _id in self._aliases: - self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][_id].append( - self.entity_id - ) - self.hass.data[DATA_ENTITY_GROUP_LOOKUP][EVENT_KEY_COMMAND][_id].append( - self.entity_id - ) - # group_aliases only respond to group commands (allon/alloff) - if self._group_aliases: - for _id in self._group_aliases: - self.hass.data[DATA_ENTITY_GROUP_LOOKUP][EVENT_KEY_COMMAND][_id].append( - self.entity_id - ) - # nogroup_aliases only respond to normal commands - if self._nogroup_aliases: - for _id in self._nogroup_aliases: - self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][_id].append( - self.entity_id - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, SIGNAL_AVAILABILITY, self._availability_callback - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_HANDLE_EVENT.format(self.entity_id), - self.handle_event_callback, - ) - ) - - # Process the initial event now that the entity is created - if self._initial_event: - self.handle_event_callback(self._initial_event) - - -class RflinkCommand(RflinkDevice): - """Singleton class to make Rflink command interface available to entities. - - This class is to be inherited by every Entity class that is actionable - (switches/lights). It exposes the Rflink command interface for these - entities. - - The Rflink interface is managed as a class level and set during setup (and - reset on reconnect). - """ - - # Keep repetition tasks to cancel if state is changed before repetitions - # are sent - _repetition_task: asyncio.Task[None] | None = None - - _protocol: ProtocolBase | None = None - - _wait_ack: bool | None = None - - @classmethod - def set_rflink_protocol( - cls, protocol: ProtocolBase | None, wait_ack: bool | None = None - ) -> None: - """Set the Rflink asyncio protocol as a class variable.""" - cls._protocol = protocol - if wait_ack is not None: - cls._wait_ack = wait_ack - - @classmethod - def is_connected(cls): - """Return connection status.""" - return bool(cls._protocol) - - @classmethod - async def send_command(cls, device_id, action): - """Send device command to Rflink and wait for acknowledgement.""" - return await cls._protocol.send_command_ack(device_id, action) - - async def _async_handle_command(self, command, *args): - """Do bookkeeping for command, send it to rflink and update state.""" - self.cancel_queued_send_commands() - - if command == "turn_on": - cmd = "on" - self._state = True - - elif command == "turn_off": - cmd = "off" - self._state = False - - elif command == "dim": - # convert brightness to rflink dim level - cmd = str(brightness_to_rflink(args[0])) - self._state = True - - elif command == "toggle": - cmd = "on" - # if the state is unknown or false, it gets set as true - # if the state is true, it gets set as false - self._state = self._state in [None, False] - - # Cover options for RFlink - elif command == "close_cover": - cmd = "DOWN" - self._state = False - - elif command == "open_cover": - cmd = "UP" - self._state = True - - elif command == "stop_cover": - cmd = "STOP" - self._state = True - - # Send initial command and queue repetitions. - # This allows the entity state to be updated quickly and not having to - # wait for all repetitions to be sent - await self._async_send_command(cmd, self._signal_repetitions) - - # Update state of entity - self.async_write_ha_state() - - def cancel_queued_send_commands(self): - """Cancel queued signal repetition commands. - - For example when user changed state while repetitions are still - queued for broadcast. Or when an incoming Rflink command (remote - switch) changes the state. - """ - # cancel any outstanding tasks from the previous state change - if self._repetition_task: - self._repetition_task.cancel() - - async def _async_send_command(self, cmd, repetitions): - """Send a command for device to Rflink gateway.""" - _LOGGER.debug("Sending command: %s to Rflink device: %s", cmd, self._device_id) - - if not self.is_connected(): - raise HomeAssistantError("Cannot send command, not connected!") - - if self._wait_ack: - # Puts command on outgoing buffer then waits for Rflink to confirm - # the command has been sent out. - await self._protocol.send_command_ack(self._device_id, cmd) - else: - # Puts command on outgoing buffer and returns straight away. - # Rflink protocol/transport handles asynchronous writing of buffer - # to serial/tcp device. Does not wait for command send - # confirmation. - self._protocol.send_command(self._device_id, cmd) - - if repetitions > 1: - self._repetition_task = self.hass.async_create_task( - self._async_send_command(cmd, repetitions - 1), eager_start=False - ) - - -class SwitchableRflinkDevice(RflinkCommand, RestoreEntity): - """Rflink entity which can switch on/off (eg: light, switch).""" - - async def async_added_to_hass(self): - """Restore RFLink device state (ON/OFF).""" - await super().async_added_to_hass() - if (old_state := await self.async_get_last_state()) is not None: - self._state = old_state.state == STATE_ON - - def _handle_event(self, event): - """Adjust state if Rflink picks up a remote command for this device.""" - self.cancel_queued_send_commands() - - command = event["command"] - if command in ["on", "allon"]: - self._state = True - elif command in ["off", "alloff"]: - self._state = False - - async def async_turn_on(self, **kwargs): - """Turn the device on.""" - await self._async_handle_command("turn_on") - - async def async_turn_off(self, **kwargs): - """Turn the device off.""" - await self._async_handle_command("turn_off") diff --git a/homeassistant/components/rflink/binary_sensor.py b/homeassistant/components/rflink/binary_sensor.py index b731037fbfc..29046ba7616 100644 --- a/homeassistant/components/rflink/binary_sensor.py +++ b/homeassistant/components/rflink/binary_sensor.py @@ -26,7 +26,8 @@ import homeassistant.helpers.event as evt from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import CONF_ALIASES, RflinkDevice +from .const import CONF_ALIASES +from .entity import RflinkDevice CONF_OFF_DELAY = "off_delay" DEFAULT_FORCE_UPDATE = False diff --git a/homeassistant/components/rflink/const.py b/homeassistant/components/rflink/const.py new file mode 100644 index 00000000000..cc52ea978bd --- /dev/null +++ b/homeassistant/components/rflink/const.py @@ -0,0 +1,40 @@ +"""Support for Rflink devices.""" + +from __future__ import annotations + +import voluptuous as vol + +import homeassistant.helpers.config_validation as cv + +CONF_ALIASES = "aliases" +CONF_GROUP_ALIASES = "group_aliases" +CONF_GROUP = "group" +CONF_NOGROUP_ALIASES = "nogroup_aliases" +CONF_DEVICE_DEFAULTS = "device_defaults" +CONF_AUTOMATIC_ADD = "automatic_add" +CONF_FIRE_EVENT = "fire_event" +CONF_SIGNAL_REPETITIONS = "signal_repetitions" + +DATA_DEVICE_REGISTER = "rflink_device_register" +DATA_ENTITY_GROUP_LOOKUP = "rflink_entity_group_only_lookup" +DATA_ENTITY_LOOKUP = "rflink_entity_lookup" +DEFAULT_SIGNAL_REPETITIONS = 1 + +EVENT_KEY_COMMAND = "command" +EVENT_KEY_ID = "id" +EVENT_KEY_SENSOR = "sensor" +EVENT_KEY_UNIT = "unit" + +SIGNAL_AVAILABILITY = "rflink_device_available" +SIGNAL_HANDLE_EVENT = "rflink_handle_event_{}" + +TMP_ENTITY = "tmp.{}" + +DEVICE_DEFAULTS_SCHEMA = vol.Schema( + { + vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, + vol.Optional( + CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS + ): vol.Coerce(int), + } +) diff --git a/homeassistant/components/rflink/cover.py b/homeassistant/components/rflink/cover.py index 54a84a68a2e..695825cf31b 100644 --- a/homeassistant/components/rflink/cover.py +++ b/homeassistant/components/rflink/cover.py @@ -10,15 +10,16 @@ import voluptuous as vol from homeassistant.components.cover import ( PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverEntity, + CoverState, ) -from homeassistant.const import CONF_DEVICES, CONF_NAME, CONF_TYPE, STATE_OPEN +from homeassistant.const import CONF_DEVICES, CONF_NAME, CONF_TYPE from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( CONF_ALIASES, CONF_DEVICE_DEFAULTS, CONF_FIRE_EVENT, @@ -27,8 +28,8 @@ from . import ( CONF_NOGROUP_ALIASES, CONF_SIGNAL_REPETITIONS, DEVICE_DEFAULTS_SCHEMA, - RflinkCommand, ) +from .entity import RflinkCommand _LOGGER = logging.getLogger(__name__) @@ -133,7 +134,7 @@ class RflinkCover(RflinkCommand, CoverEntity, RestoreEntity): """Restore RFLink cover state (OPEN/CLOSE).""" await super().async_added_to_hass() if (old_state := await self.async_get_last_state()) is not None: - self._state = old_state.state == STATE_OPEN + self._state = old_state.state == CoverState.OPEN def _handle_event(self, event): """Adjust state if Rflink picks up a remote command for this device.""" diff --git a/homeassistant/components/rflink/entity.py b/homeassistant/components/rflink/entity.py new file mode 100644 index 00000000000..26153acf7ba --- /dev/null +++ b/homeassistant/components/rflink/entity.py @@ -0,0 +1,325 @@ +"""Support for Rflink devices.""" + +from __future__ import annotations + +import asyncio +import logging + +from rflink.protocol import ProtocolBase + +from homeassistant.const import ATTR_ENTITY_ID, ATTR_STATE, STATE_ON +from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.restore_state import RestoreEntity + +from .const import ( + DATA_ENTITY_GROUP_LOOKUP, + DATA_ENTITY_LOOKUP, + DEFAULT_SIGNAL_REPETITIONS, + EVENT_KEY_COMMAND, + SIGNAL_AVAILABILITY, + SIGNAL_HANDLE_EVENT, + TMP_ENTITY, +) +from .utils import brightness_to_rflink, identify_event_type + +_LOGGER = logging.getLogger(__name__) + +EVENT_BUTTON_PRESSED = "button_pressed" + + +class RflinkDevice(Entity): + """Representation of a Rflink device. + + Contains the common logic for Rflink entities. + """ + + _state: bool | None = None + _available = True + _attr_should_poll = False + + def __init__( + self, + device_id, + initial_event=None, + name=None, + aliases=None, + group=True, + group_aliases=None, + nogroup_aliases=None, + fire_event=False, + signal_repetitions=DEFAULT_SIGNAL_REPETITIONS, + ): + """Initialize the device.""" + # Rflink specific attributes for every component type + self._initial_event = initial_event + self._device_id = device_id + self._attr_unique_id = device_id + if name: + self._name = name + else: + self._name = device_id + + self._aliases = aliases + self._group = group + self._group_aliases = group_aliases + self._nogroup_aliases = nogroup_aliases + self._should_fire_event = fire_event + self._signal_repetitions = signal_repetitions + + @callback + def handle_event_callback(self, event): + """Handle incoming event for device type.""" + # Call platform specific event handler + self._handle_event(event) + + # Propagate changes through ha + self.async_write_ha_state() + + # Put command onto bus for user to subscribe to + if self._should_fire_event and identify_event_type(event) == EVENT_KEY_COMMAND: + self.hass.bus.async_fire( + EVENT_BUTTON_PRESSED, + {ATTR_ENTITY_ID: self.entity_id, ATTR_STATE: event[EVENT_KEY_COMMAND]}, + ) + _LOGGER.debug( + "Fired bus event for %s: %s", self.entity_id, event[EVENT_KEY_COMMAND] + ) + + def _handle_event(self, event): + """Platform specific event handler.""" + raise NotImplementedError + + @property + def name(self): + """Return a name for the device.""" + return self._name + + @property + def is_on(self): + """Return true if device is on.""" + if self.assumed_state: + return False + return self._state + + @property + def assumed_state(self): + """Assume device state until first device event sets state.""" + return self._state is None + + @property + def available(self): + """Return True if entity is available.""" + return self._available + + @callback + def _availability_callback(self, availability): + """Update availability state.""" + self._available = availability + self.async_write_ha_state() + + async def async_added_to_hass(self): + """Register update callback.""" + await super().async_added_to_hass() + # Remove temporary bogus entity_id if added + tmp_entity = TMP_ENTITY.format(self._device_id) + if ( + tmp_entity + in self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][self._device_id] + ): + self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][ + self._device_id + ].remove(tmp_entity) + + # Register id and aliases + self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][self._device_id].append( + self.entity_id + ) + if self._group: + self.hass.data[DATA_ENTITY_GROUP_LOOKUP][EVENT_KEY_COMMAND][ + self._device_id + ].append(self.entity_id) + # aliases respond to both normal and group commands (allon/alloff) + if self._aliases: + for _id in self._aliases: + self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][_id].append( + self.entity_id + ) + self.hass.data[DATA_ENTITY_GROUP_LOOKUP][EVENT_KEY_COMMAND][_id].append( + self.entity_id + ) + # group_aliases only respond to group commands (allon/alloff) + if self._group_aliases: + for _id in self._group_aliases: + self.hass.data[DATA_ENTITY_GROUP_LOOKUP][EVENT_KEY_COMMAND][_id].append( + self.entity_id + ) + # nogroup_aliases only respond to normal commands + if self._nogroup_aliases: + for _id in self._nogroup_aliases: + self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND][_id].append( + self.entity_id + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, SIGNAL_AVAILABILITY, self._availability_callback + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + SIGNAL_HANDLE_EVENT.format(self.entity_id), + self.handle_event_callback, + ) + ) + + # Process the initial event now that the entity is created + if self._initial_event: + self.handle_event_callback(self._initial_event) + + +class RflinkCommand(RflinkDevice): + """Singleton class to make Rflink command interface available to entities. + + This class is to be inherited by every Entity class that is actionable + (switches/lights). It exposes the Rflink command interface for these + entities. + + The Rflink interface is managed as a class level and set during setup (and + reset on reconnect). + """ + + # Keep repetition tasks to cancel if state is changed before repetitions + # are sent + _repetition_task: asyncio.Task[None] | None = None + + _protocol: ProtocolBase | None = None + + _wait_ack: bool | None = None + + @classmethod + def set_rflink_protocol( + cls, protocol: ProtocolBase | None, wait_ack: bool | None = None + ) -> None: + """Set the Rflink asyncio protocol as a class variable.""" + cls._protocol = protocol + if wait_ack is not None: + cls._wait_ack = wait_ack + + @classmethod + def is_connected(cls): + """Return connection status.""" + return bool(cls._protocol) + + @classmethod + async def send_command(cls, device_id, action): + """Send device command to Rflink and wait for acknowledgement.""" + return await cls._protocol.send_command_ack(device_id, action) + + async def _async_handle_command(self, command, *args): + """Do bookkeeping for command, send it to rflink and update state.""" + self.cancel_queued_send_commands() + + if command == "turn_on": + cmd = "on" + self._state = True + + elif command == "turn_off": + cmd = "off" + self._state = False + + elif command == "dim": + # convert brightness to rflink dim level + cmd = str(brightness_to_rflink(args[0])) + self._state = True + + elif command == "toggle": + cmd = "on" + # if the state is unknown or false, it gets set as true + # if the state is true, it gets set as false + self._state = self._state in [None, False] + + # Cover options for RFlink + elif command == "close_cover": + cmd = "DOWN" + self._state = False + + elif command == "open_cover": + cmd = "UP" + self._state = True + + elif command == "stop_cover": + cmd = "STOP" + self._state = True + + # Send initial command and queue repetitions. + # This allows the entity state to be updated quickly and not having to + # wait for all repetitions to be sent + await self._async_send_command(cmd, self._signal_repetitions) + + # Update state of entity + self.async_write_ha_state() + + def cancel_queued_send_commands(self): + """Cancel queued signal repetition commands. + + For example when user changed state while repetitions are still + queued for broadcast. Or when an incoming Rflink command (remote + switch) changes the state. + """ + # cancel any outstanding tasks from the previous state change + if self._repetition_task: + self._repetition_task.cancel() + + async def _async_send_command(self, cmd, repetitions): + """Send a command for device to Rflink gateway.""" + _LOGGER.debug("Sending command: %s to Rflink device: %s", cmd, self._device_id) + + if not self.is_connected(): + raise HomeAssistantError("Cannot send command, not connected!") + + if self._wait_ack: + # Puts command on outgoing buffer then waits for Rflink to confirm + # the command has been sent out. + await self._protocol.send_command_ack(self._device_id, cmd) + else: + # Puts command on outgoing buffer and returns straight away. + # Rflink protocol/transport handles asynchronous writing of buffer + # to serial/tcp device. Does not wait for command send + # confirmation. + self._protocol.send_command(self._device_id, cmd) + + if repetitions > 1: + self._repetition_task = self.hass.async_create_task( + self._async_send_command(cmd, repetitions - 1), eager_start=False + ) + + +class SwitchableRflinkDevice(RflinkCommand, RestoreEntity): + """Rflink entity which can switch on/off (eg: light, switch).""" + + async def async_added_to_hass(self): + """Restore RFLink device state (ON/OFF).""" + await super().async_added_to_hass() + if (old_state := await self.async_get_last_state()) is not None: + self._state = old_state.state == STATE_ON + + def _handle_event(self, event): + """Adjust state if Rflink picks up a remote command for this device.""" + self.cancel_queued_send_commands() + + command = event["command"] + if command in ["on", "allon"]: + self._state = True + elif command in ["off", "alloff"]: + self._state = False + + async def async_turn_on(self, **kwargs): + """Turn the device on.""" + await self._async_handle_command("turn_on") + + async def async_turn_off(self, **kwargs): + """Turn the device off.""" + await self._async_handle_command("turn_off") diff --git a/homeassistant/components/rflink/icons.json b/homeassistant/components/rflink/icons.json index 988b048eee7..de2942f44ac 100644 --- a/homeassistant/components/rflink/icons.json +++ b/homeassistant/components/rflink/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_command": "mdi:send" + "send_command": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/rflink/light.py b/homeassistant/components/rflink/light.py index b29bb4f1d48..00117140abb 100644 --- a/homeassistant/components/rflink/light.py +++ b/homeassistant/components/rflink/light.py @@ -20,7 +20,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( CONF_ALIASES, CONF_AUTOMATIC_ADD, CONF_DEVICE_DEFAULTS, @@ -33,8 +33,8 @@ from . import ( DEVICE_DEFAULTS_SCHEMA, EVENT_KEY_COMMAND, EVENT_KEY_ID, - SwitchableRflinkDevice, ) +from .entity import SwitchableRflinkDevice from .utils import brightness_to_rflink, rflink_to_brightness _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/rflink/manifest.json b/homeassistant/components/rflink/manifest.json index 7917fa0bded..f5f372d2d33 100644 --- a/homeassistant/components/rflink/manifest.json +++ b/homeassistant/components/rflink/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rflink", "iot_class": "assumed_state", "loggers": ["rflink"], + "quality_scale": "legacy", "requirements": ["rflink==0.0.66"] } diff --git a/homeassistant/components/rflink/sensor.py b/homeassistant/components/rflink/sensor.py index f3c3df7f46b..89632ac50b3 100644 --- a/homeassistant/components/rflink/sensor.py +++ b/homeassistant/components/rflink/sensor.py @@ -40,7 +40,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( CONF_ALIASES, CONF_AUTOMATIC_ADD, DATA_DEVICE_REGISTER, @@ -51,8 +51,8 @@ from . import ( SIGNAL_AVAILABILITY, SIGNAL_HANDLE_EVENT, TMP_ENTITY, - RflinkDevice, ) +from .entity import RflinkDevice SENSOR_TYPES = ( # check new descriptors against PACKET_FIELDS & UNITS from rflink.parser @@ -71,6 +71,8 @@ SENSOR_TYPES = ( native_unit_of_measurement=UnitOfPressure.HPA, ), SensorEntityDescription( + # Rflink devices reports ok/low so device class can’t be used + # It should be migrated to a binary sensor key="battery", name="Battery", icon="mdi:battery", diff --git a/homeassistant/components/rflink/switch.py b/homeassistant/components/rflink/switch.py index af4bbc43700..23b93896878 100644 --- a/homeassistant/components/rflink/switch.py +++ b/homeassistant/components/rflink/switch.py @@ -14,7 +14,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( CONF_ALIASES, CONF_DEVICE_DEFAULTS, CONF_FIRE_EVENT, @@ -23,8 +23,8 @@ from . import ( CONF_NOGROUP_ALIASES, CONF_SIGNAL_REPETITIONS, DEVICE_DEFAULTS_SCHEMA, - SwitchableRflinkDevice, ) +from .entity import SwitchableRflinkDevice PARALLEL_UPDATES = 0 diff --git a/homeassistant/components/rflink/utils.py b/homeassistant/components/rflink/utils.py index 9738d9f74fa..7a05c596773 100644 --- a/homeassistant/components/rflink/utils.py +++ b/homeassistant/components/rflink/utils.py @@ -1,5 +1,7 @@ """RFLink integration utils.""" +from .const import EVENT_KEY_COMMAND, EVENT_KEY_SENSOR + def brightness_to_rflink(brightness: int) -> int: """Convert 0-255 brightness to RFLink dim level (0-15).""" @@ -9,3 +11,15 @@ def brightness_to_rflink(brightness: int) -> int: def rflink_to_brightness(dim_level: int) -> int: """Convert RFLink dim level (0-15) to 0-255 brightness.""" return int(dim_level * 17) + + +def identify_event_type(event): + """Look at event to determine type of device. + + Async friendly. + """ + if EVENT_KEY_COMMAND in event: + return EVENT_KEY_COMMAND + if EVENT_KEY_SENSOR in event: + return EVENT_KEY_SENSOR + return "unknown" diff --git a/homeassistant/components/rfxtrx/__init__.py b/homeassistant/components/rfxtrx/__init__.py index f3466aa704d..d100999527f 100644 --- a/homeassistant/components/rfxtrx/__init__.py +++ b/homeassistant/components/rfxtrx/__init__.py @@ -25,21 +25,16 @@ from homeassistant.const import ( from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.device_registry import ( - DeviceInfo, - EventDeviceRegistryUpdatedData, -) +from homeassistant.helpers.device_registry import EventDeviceRegistryUpdatedData from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.restore_state import RestoreEntity from .const import ( ATTR_EVENT, - COMMAND_GROUP_LIST, CONF_AUTOMATIC_ADD, CONF_DATA_BITS, CONF_PROTOCOLS, @@ -48,11 +43,11 @@ from .const import ( DOMAIN, EVENT_RFXTRX_EVENT, SERVICE_SEND, + SIGNAL_EVENT, ) DEFAULT_OFF_DELAY = 2.0 -SIGNAL_EVENT = f"{DOMAIN}_event" CONNECT_TIMEOUT = 30.0 _LOGGER = logging.getLogger(__name__) @@ -231,7 +226,7 @@ async def async_setup_internal(hass: HomeAssistant, entry: ConfigEntry) -> None: config = {} config[CONF_DEVICE_ID] = device_id - _LOGGER.info( + _LOGGER.debug( "Added device (Device ID: %s Class: %s Sub: %s, Event: %s)", event.device.id_string.lower(), event.device.__class__.__name__, @@ -416,7 +411,7 @@ def find_possible_pt2262_device(device_ids: set[str], device_id: str) -> str | N size = i if size is not None: size = len(dev_id) - size - 1 - _LOGGER.info( + _LOGGER.debug( ( "Found possible device %s for %s " "with the following configuration:\n" @@ -461,14 +456,6 @@ def get_device_tuple_from_identifiers( return DeviceTuple(identifier2[1], identifier2[2], identifier2[3]) -def get_identifiers_from_device_tuple( - device_tuple: DeviceTuple, -) -> set[tuple[str, str]]: - """Calculate the device identifier from a device tuple.""" - # work around legacy identifier, being a multi tuple value - return {(DOMAIN, *device_tuple)} # type: ignore[arg-type] - - async def async_remove_config_entry_device( hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry ) -> bool: @@ -477,102 +464,3 @@ async def async_remove_config_entry_device( The actual cleanup is done in the device registry event """ return True - - -class RfxtrxEntity(RestoreEntity): - """Represents a Rfxtrx device. - - Contains the common logic for Rfxtrx lights and switches. - """ - - _attr_assumed_state = True - _attr_has_entity_name = True - _attr_should_poll = False - _device: rfxtrxmod.RFXtrxDevice - _event: rfxtrxmod.RFXtrxEvent | None - - def __init__( - self, - device: rfxtrxmod.RFXtrxDevice, - device_id: DeviceTuple, - event: rfxtrxmod.RFXtrxEvent | None = None, - ) -> None: - """Initialize the device.""" - self._attr_device_info = DeviceInfo( - identifiers=get_identifiers_from_device_tuple(device_id), - model=device.type_string, - name=f"{device.type_string} {device.id_string}", - ) - self._attr_unique_id = "_".join(x for x in device_id) - self._device = device - self._event = event - self._device_id = device_id - # If id_string is 213c7f2:1, the group_id is 213c7f2, and the device will respond to - # group events regardless of their group indices. - (self._group_id, _, _) = cast(str, device.id_string).partition(":") - - async def async_added_to_hass(self) -> None: - """Restore RFXtrx device state (ON/OFF).""" - if self._event: - self._apply_event(self._event) - - self.async_on_remove( - async_dispatcher_connect(self.hass, SIGNAL_EVENT, self._handle_event) - ) - - @property - def extra_state_attributes(self) -> dict[str, str] | None: - """Return the device state attributes.""" - if not self._event: - return None - return {ATTR_EVENT: "".join(f"{x:02x}" for x in self._event.data)} - - def _event_applies( - self, event: rfxtrxmod.RFXtrxEvent, device_id: DeviceTuple - ) -> bool: - """Check if event applies to me.""" - if isinstance(event, rfxtrxmod.ControlEvent): - if ( - "Command" in event.values - and event.values["Command"] in COMMAND_GROUP_LIST - ): - device: rfxtrxmod.RFXtrxDevice = event.device - (group_id, _, _) = cast(str, device.id_string).partition(":") - return group_id == self._group_id - - # Otherwise, the event only applies to the matching device. - return device_id == self._device_id - - def _apply_event(self, event: rfxtrxmod.RFXtrxEvent) -> None: - """Apply a received event.""" - self._event = event - - @callback - def _handle_event( - self, event: rfxtrxmod.RFXtrxEvent, device_id: DeviceTuple - ) -> None: - """Handle a reception of data, overridden by other classes.""" - - -class RfxtrxCommandEntity(RfxtrxEntity): - """Represents a Rfxtrx device. - - Contains the common logic for Rfxtrx lights and switches. - """ - - _attr_name = None - - def __init__( - self, - device: rfxtrxmod.RFXtrxDevice, - device_id: DeviceTuple, - event: rfxtrxmod.RFXtrxEvent | None = None, - ) -> None: - """Initialzie a switch or light device.""" - super().__init__(device, device_id, event=event) - - async def _async_send[*_Ts]( - self, fun: Callable[[rfxtrxmod.PySerialTransport, *_Ts], None], *args: *_Ts - ) -> None: - rfx_object: rfxtrxmod.Connect = self.hass.data[DOMAIN][DATA_RFXOBJECT] - await self.hass.async_add_executor_job(fun, rfx_object.transport, *args) diff --git a/homeassistant/components/rfxtrx/binary_sensor.py b/homeassistant/components/rfxtrx/binary_sensor.py index 03c22167358..316cf44ef0d 100644 --- a/homeassistant/components/rfxtrx/binary_sensor.py +++ b/homeassistant/components/rfxtrx/binary_sensor.py @@ -19,7 +19,7 @@ from homeassistant.helpers import event as evt from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DeviceTuple, RfxtrxEntity, async_setup_platform_entry, get_pt2262_cmd +from . import DeviceTuple, async_setup_platform_entry, get_pt2262_cmd from .const import ( COMMAND_OFF_LIST, COMMAND_ON_LIST, @@ -27,6 +27,7 @@ from .const import ( CONF_OFF_DELAY, DEVICE_PACKET_TYPE_LIGHTING4, ) +from .entity import RfxtrxEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/rfxtrx/config_flow.py b/homeassistant/components/rfxtrx/config_flow.py index ceb9bea4661..866d9ecb1bb 100644 --- a/homeassistant/components/rfxtrx/config_flow.py +++ b/homeassistant/components/rfxtrx/config_flow.py @@ -87,9 +87,8 @@ class RfxtrxOptionsFlow(OptionsFlow): _device_registry: dr.DeviceRegistry _device_entries: list[dr.DeviceEntry] - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize rfxtrx options flow.""" - self._config_entry = config_entry self._global_options: dict[str, Any] = {} self._selected_device: dict[str, Any] = {} self._selected_device_entry_id: str | None = None @@ -120,9 +119,7 @@ class RfxtrxOptionsFlow(OptionsFlow): event_code = device_data["event_code"] assert event_code self._selected_device_event_code = event_code - self._selected_device = self._config_entry.data[CONF_DEVICES][ - event_code - ] + self._selected_device = self.config_entry.data[CONF_DEVICES][event_code] self._selected_device_object = get_rfx_object(event_code) return await self.async_step_set_device_options() if CONF_EVENT_CODE in user_input: @@ -148,7 +145,7 @@ class RfxtrxOptionsFlow(OptionsFlow): device_registry = dr.async_get(self.hass) device_entries = dr.async_entries_for_config_entry( - device_registry, self._config_entry.entry_id + device_registry, self.config_entry.entry_id ) self._device_registry = device_registry self._device_entries = device_entries @@ -162,11 +159,11 @@ class RfxtrxOptionsFlow(OptionsFlow): options = { vol.Optional( CONF_AUTOMATIC_ADD, - default=self._config_entry.data[CONF_AUTOMATIC_ADD], + default=self.config_entry.data[CONF_AUTOMATIC_ADD], ): bool, vol.Optional( CONF_PROTOCOLS, - default=self._config_entry.data.get(CONF_PROTOCOLS) or [], + default=self.config_entry.data.get(CONF_PROTOCOLS) or [], ): cv.multi_select(RECV_MODES), vol.Optional(CONF_EVENT_CODE): str, vol.Optional(CONF_DEVICE): vol.In(configure_devices), @@ -425,7 +422,7 @@ class RfxtrxOptionsFlow(OptionsFlow): def _can_add_device(self, new_rfx_obj: rfxtrxmod.RFXtrxEvent) -> bool: """Check if device does not already exist.""" new_device_id = get_device_id(new_rfx_obj.device) - for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items(): + for packet_id, entity_info in self.config_entry.data[CONF_DEVICES].items(): rfx_obj = get_rfx_object(packet_id) assert rfx_obj @@ -468,7 +465,7 @@ class RfxtrxOptionsFlow(OptionsFlow): assert entry device_id = get_device_tuple_from_identifiers(entry.identifiers) assert device_id - for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items(): + for packet_id, entity_info in self.config_entry.data[CONF_DEVICES].items(): if tuple(entity_info.get(CONF_DEVICE_ID)) == device_id: event_code = cast(str, packet_id) break @@ -481,8 +478,8 @@ class RfxtrxOptionsFlow(OptionsFlow): devices: dict[str, Any] | None = None, ) -> None: """Update data in ConfigEntry.""" - entry_data = self._config_entry.data.copy() - entry_data[CONF_DEVICES] = copy.deepcopy(self._config_entry.data[CONF_DEVICES]) + entry_data = self.config_entry.data.copy() + entry_data[CONF_DEVICES] = copy.deepcopy(self.config_entry.data[CONF_DEVICES]) if global_options: entry_data.update(global_options) if devices: @@ -494,9 +491,9 @@ class RfxtrxOptionsFlow(OptionsFlow): entry_data[CONF_DEVICES].pop(event_code, None) else: entry_data[CONF_DEVICES][event_code] = options - self.hass.config_entries.async_update_entry(self._config_entry, data=entry_data) + self.hass.config_entries.async_update_entry(self.config_entry, data=entry_data) self.hass.async_create_task( - self.hass.config_entries.async_reload(self._config_entry.entry_id) + self.hass.config_entries.async_reload(self.config_entry.entry_id) ) @@ -637,9 +634,11 @@ class RfxtrxConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> RfxtrxOptionsFlow: """Get the options flow for this handler.""" - return RfxtrxOptionsFlow(config_entry) + return RfxtrxOptionsFlow() def _test_transport(host: str | None, port: int | None, device: str | None) -> bool: diff --git a/homeassistant/components/rfxtrx/const.py b/homeassistant/components/rfxtrx/const.py index 7a6e333d3db..f932c825f75 100644 --- a/homeassistant/components/rfxtrx/const.py +++ b/homeassistant/components/rfxtrx/const.py @@ -46,3 +46,5 @@ EVENT_RFXTRX_EVENT = "rfxtrx_event" DATA_RFXOBJECT = "rfxobject" DOMAIN = "rfxtrx" + +SIGNAL_EVENT = f"{DOMAIN}_event" diff --git a/homeassistant/components/rfxtrx/cover.py b/homeassistant/components/rfxtrx/cover.py index 9e9e5a090e4..473a0d94056 100644 --- a/homeassistant/components/rfxtrx/cover.py +++ b/homeassistant/components/rfxtrx/cover.py @@ -7,14 +7,13 @@ from typing import Any import RFXtrx as rfxtrxmod -from homeassistant.components.cover import CoverEntity, CoverEntityFeature +from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OPEN from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DeviceTuple, RfxtrxCommandEntity, async_setup_platform_entry +from . import DeviceTuple, async_setup_platform_entry from .const import ( COMMAND_OFF_LIST, COMMAND_ON_LIST, @@ -22,6 +21,7 @@ from .const import ( CONST_VENETIAN_BLIND_MODE_EU, CONST_VENETIAN_BLIND_MODE_US, ) +from .entity import RfxtrxCommandEntity _LOGGER = logging.getLogger(__name__) @@ -96,7 +96,7 @@ class RfxtrxCover(RfxtrxCommandEntity, CoverEntity): if self._event is None: old_state = await self.async_get_last_state() if old_state is not None: - self._attr_is_closed = old_state.state != STATE_OPEN + self._attr_is_closed = old_state.state != CoverState.OPEN async def async_open_cover(self, **kwargs: Any) -> None: """Move the cover up.""" diff --git a/homeassistant/components/rfxtrx/device_action.py b/homeassistant/components/rfxtrx/device_action.py index 65cf1a11911..405daa37ec5 100644 --- a/homeassistant/components/rfxtrx/device_action.py +++ b/homeassistant/components/rfxtrx/device_action.py @@ -6,9 +6,7 @@ from collections.abc import Callable import voluptuous as vol -from homeassistant.components.device_automation.exceptions import ( - InvalidDeviceAutomationConfig, -) +from homeassistant.components.device_automation import InvalidDeviceAutomationConfig from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE from homeassistant.core import Context, HomeAssistant import homeassistant.helpers.config_validation as cv diff --git a/homeassistant/components/rfxtrx/device_trigger.py b/homeassistant/components/rfxtrx/device_trigger.py index 9e42cfa3919..35c1944948b 100644 --- a/homeassistant/components/rfxtrx/device_trigger.py +++ b/homeassistant/components/rfxtrx/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger diff --git a/homeassistant/components/rfxtrx/entity.py b/homeassistant/components/rfxtrx/entity.py new file mode 100644 index 00000000000..b5752e366bc --- /dev/null +++ b/homeassistant/components/rfxtrx/entity.py @@ -0,0 +1,123 @@ +"""Support for RFXtrx devices.""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import cast + +import RFXtrx as rfxtrxmod + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.restore_state import RestoreEntity + +from . import DeviceTuple +from .const import ATTR_EVENT, COMMAND_GROUP_LIST, DATA_RFXOBJECT, DOMAIN, SIGNAL_EVENT + + +def _get_identifiers_from_device_tuple( + device_tuple: DeviceTuple, +) -> set[tuple[str, str]]: + """Calculate the device identifier from a device tuple.""" + # work around legacy identifier, being a multi tuple value + return {(DOMAIN, *device_tuple)} # type: ignore[arg-type] + + +class RfxtrxEntity(RestoreEntity): + """Represents a Rfxtrx device. + + Contains the common logic for Rfxtrx lights and switches. + """ + + _attr_assumed_state = True + _attr_has_entity_name = True + _attr_should_poll = False + _device: rfxtrxmod.RFXtrxDevice + _event: rfxtrxmod.RFXtrxEvent | None + + def __init__( + self, + device: rfxtrxmod.RFXtrxDevice, + device_id: DeviceTuple, + event: rfxtrxmod.RFXtrxEvent | None = None, + ) -> None: + """Initialize the device.""" + self._attr_device_info = DeviceInfo( + identifiers=_get_identifiers_from_device_tuple(device_id), + model=device.type_string, + name=f"{device.type_string} {device.id_string}", + ) + self._attr_unique_id = "_".join(x for x in device_id) + self._device = device + self._event = event + self._device_id = device_id + # If id_string is 213c7f2:1, the group_id is 213c7f2, and the device will respond to + # group events regardless of their group indices. + (self._group_id, _, _) = cast(str, device.id_string).partition(":") + + async def async_added_to_hass(self) -> None: + """Restore RFXtrx device state (ON/OFF).""" + if self._event: + self._apply_event(self._event) + + self.async_on_remove( + async_dispatcher_connect(self.hass, SIGNAL_EVENT, self._handle_event) + ) + + @property + def extra_state_attributes(self) -> dict[str, str] | None: + """Return the device state attributes.""" + if not self._event: + return None + return {ATTR_EVENT: "".join(f"{x:02x}" for x in self._event.data)} + + def _event_applies( + self, event: rfxtrxmod.RFXtrxEvent, device_id: DeviceTuple + ) -> bool: + """Check if event applies to me.""" + if isinstance(event, rfxtrxmod.ControlEvent): + if ( + "Command" in event.values + and event.values["Command"] in COMMAND_GROUP_LIST + ): + device: rfxtrxmod.RFXtrxDevice = event.device + (group_id, _, _) = cast(str, device.id_string).partition(":") + return group_id == self._group_id + + # Otherwise, the event only applies to the matching device. + return device_id == self._device_id + + def _apply_event(self, event: rfxtrxmod.RFXtrxEvent) -> None: + """Apply a received event.""" + self._event = event + + @callback + def _handle_event( + self, event: rfxtrxmod.RFXtrxEvent, device_id: DeviceTuple + ) -> None: + """Handle a reception of data, overridden by other classes.""" + + +class RfxtrxCommandEntity(RfxtrxEntity): + """Represents a Rfxtrx device. + + Contains the common logic for Rfxtrx lights and switches. + """ + + _attr_name = None + + def __init__( + self, + device: rfxtrxmod.RFXtrxDevice, + device_id: DeviceTuple, + event: rfxtrxmod.RFXtrxEvent | None = None, + ) -> None: + """Initialzie a switch or light device.""" + super().__init__(device, device_id, event=event) + + async def _async_send[*_Ts]( + self, fun: Callable[[rfxtrxmod.PySerialTransport, *_Ts], None], *args: *_Ts + ) -> None: + rfx_object: rfxtrxmod.Connect = self.hass.data[DOMAIN][DATA_RFXOBJECT] + await self.hass.async_add_executor_job(fun, rfx_object.transport, *args) diff --git a/homeassistant/components/rfxtrx/event.py b/homeassistant/components/rfxtrx/event.py index 5c3944dc74b..212d93b5019 100644 --- a/homeassistant/components/rfxtrx/event.py +++ b/homeassistant/components/rfxtrx/event.py @@ -14,8 +14,9 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import slugify -from . import DeviceTuple, RfxtrxEntity, async_setup_platform_entry +from . import DeviceTuple, async_setup_platform_entry from .const import DEVICE_PACKET_TYPE_LIGHTING4 +from .entity import RfxtrxEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/rfxtrx/icons.json b/homeassistant/components/rfxtrx/icons.json index c1b8e741e45..cbc48cf2105 100644 --- a/homeassistant/components/rfxtrx/icons.json +++ b/homeassistant/components/rfxtrx/icons.json @@ -1,5 +1,7 @@ { "services": { - "send": "mdi:send" + "send": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/rfxtrx/light.py b/homeassistant/components/rfxtrx/light.py index f9bbbc28a8d..0e2f7bef65a 100644 --- a/homeassistant/components/rfxtrx/light.py +++ b/homeassistant/components/rfxtrx/light.py @@ -14,8 +14,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DeviceTuple, RfxtrxCommandEntity, async_setup_platform_entry +from . import DeviceTuple, async_setup_platform_entry from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST +from .entity import RfxtrxCommandEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/rfxtrx/sensor.py b/homeassistant/components/rfxtrx/sensor.py index 46a3f021122..4f8ae9767e2 100644 --- a/homeassistant/components/rfxtrx/sensor.py +++ b/homeassistant/components/rfxtrx/sensor.py @@ -39,8 +39,9 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import DeviceTuple, RfxtrxEntity, async_setup_platform_entry, get_rfx_object +from . import DeviceTuple, async_setup_platform_entry, get_rfx_object from .const import ATTR_EVENT +from .entity import RfxtrxEntity _LOGGER = logging.getLogger(__name__) @@ -181,13 +182,11 @@ SENSOR_TYPES = ( key="Count", translation_key="count", state_class=SensorStateClass.TOTAL_INCREASING, - native_unit_of_measurement="count", ), RfxtrxSensorEntityDescription( key="Counter value", translation_key="counter_value", state_class=SensorStateClass.TOTAL_INCREASING, - native_unit_of_measurement="count", ), RfxtrxSensorEntityDescription( key="Chill", diff --git a/homeassistant/components/rfxtrx/siren.py b/homeassistant/components/rfxtrx/siren.py index 67a0c6b7dce..1635f1f55a9 100644 --- a/homeassistant/components/rfxtrx/siren.py +++ b/homeassistant/components/rfxtrx/siren.py @@ -14,13 +14,9 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later -from . import ( - DEFAULT_OFF_DELAY, - DeviceTuple, - RfxtrxCommandEntity, - async_setup_platform_entry, -) +from . import DEFAULT_OFF_DELAY, DeviceTuple, async_setup_platform_entry from .const import CONF_OFF_DELAY +from .entity import RfxtrxCommandEntity SECURITY_PANIC_ON = "Panic" SECURITY_PANIC_OFF = "End Panic" @@ -93,7 +89,7 @@ async def async_setup_entry( ) -class RfxtrxOffDelayMixin(Entity): +class RfxtrxOffDelayMixin(Entity): # pylint: disable=hass-enforce-class-module """Mixin to support timeouts on data. Many 433 devices only send data when active. They will diff --git a/homeassistant/components/rfxtrx/switch.py b/homeassistant/components/rfxtrx/switch.py index fad395f41c2..1464cccb5c4 100644 --- a/homeassistant/components/rfxtrx/switch.py +++ b/homeassistant/components/rfxtrx/switch.py @@ -14,19 +14,15 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ( - DOMAIN, - DeviceTuple, - RfxtrxCommandEntity, - async_setup_platform_entry, - get_pt2262_cmd, -) +from . import DeviceTuple, async_setup_platform_entry, get_pt2262_cmd from .const import ( COMMAND_OFF_LIST, COMMAND_ON_LIST, CONF_DATA_BITS, DEVICE_PACKET_TYPE_LIGHTING4, + DOMAIN, ) +from .entity import RfxtrxCommandEntity DATA_SWITCH = f"{DOMAIN}_switch" diff --git a/homeassistant/components/rhasspy/config_flow.py b/homeassistant/components/rhasspy/config_flow.py index 114d74d4d05..ea79f6b8845 100644 --- a/homeassistant/components/rhasspy/config_flow.py +++ b/homeassistant/components/rhasspy/config_flow.py @@ -20,9 +20,6 @@ class RhasspyConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form(step_id="user", data_schema=vol.Schema({})) diff --git a/homeassistant/components/rhasspy/manifest.json b/homeassistant/components/rhasspy/manifest.json index 2675935618c..f3496f7eeab 100644 --- a/homeassistant/components/rhasspy/manifest.json +++ b/homeassistant/components/rhasspy/manifest.json @@ -5,5 +5,6 @@ "config_flow": true, "dependencies": ["intent"], "documentation": "https://www.home-assistant.io/integrations/rhasspy", - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true } diff --git a/homeassistant/components/rhasspy/strings.json b/homeassistant/components/rhasspy/strings.json index 4d2111ebd8a..3d574d30117 100644 --- a/homeassistant/components/rhasspy/strings.json +++ b/homeassistant/components/rhasspy/strings.json @@ -4,9 +4,6 @@ "user": { "description": "Do you want to enable Rhasspy support?" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/ridwell/__init__.py b/homeassistant/components/ridwell/__init__.py index cf584207091..71e80086833 100644 --- a/homeassistant/components/ridwell/__init__.py +++ b/homeassistant/components/ridwell/__init__.py @@ -55,6 +55,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await er.async_migrate_entries(hass, entry.entry_id, migrate_unique_id) - LOGGER.info("Migration to version %s successful", version) + LOGGER.debug("Migration to version %s successful", version) return True diff --git a/homeassistant/components/ridwell/config_flow.py b/homeassistant/components/ridwell/config_flow.py index a54d4debe75..f03679c8315 100644 --- a/homeassistant/components/ridwell/config_flow.py +++ b/homeassistant/components/ridwell/config_flow.py @@ -93,6 +93,9 @@ class RidwellConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle re-auth completion.""" if not user_input: + if TYPE_CHECKING: + assert self._username + return self.async_show_form( step_id="reauth_confirm", data_schema=STEP_REAUTH_CONFIRM_DATA_SCHEMA, diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index 14ab435fda6..edc084fb57b 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -5,18 +5,19 @@ from __future__ import annotations from dataclasses import dataclass import logging from typing import Any, cast +import uuid from ring_doorbell import Auth, Ring, RingDevices +from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.const import APPLICATION_NAME, CONF_TOKEN, __version__ -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.const import APPLICATION_NAME, CONF_DEVICE_ID, CONF_TOKEN +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from .const import DOMAIN, PLATFORMS -from .coordinator import RingDataCoordinator, RingNotificationsCoordinator +from .const import CONF_LISTEN_CREDENTIALS, DOMAIN, PLATFORMS +from .coordinator import RingDataCoordinator, RingListenCoordinator _LOGGER = logging.getLogger(__name__) @@ -28,10 +29,21 @@ class RingData: api: Ring devices: RingDevices devices_coordinator: RingDataCoordinator - notifications_coordinator: RingNotificationsCoordinator + listen_coordinator: RingListenCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type RingConfigEntry = ConfigEntry[RingData] + + +def get_auth_user_agent() -> str: + """Return user-agent for Auth instantiation. + + user_agent will be the display name in the ring.com authorised devices. + """ + return f"{APPLICATION_NAME}/{DOMAIN}-integration" + + +async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool: """Set up a config entry.""" def token_updater(token: dict[str, Any]) -> None: @@ -41,77 +53,47 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: data={**entry.data, CONF_TOKEN: token}, ) + def listen_credentials_updater(token: dict[str, Any]) -> None: + """Handle from async context when token is updated.""" + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_LISTEN_CREDENTIALS: token}, + ) + + user_agent = get_auth_user_agent() + client_session = async_get_clientsession(hass) auth = Auth( - f"{APPLICATION_NAME}/{__version__}", + user_agent, entry.data[CONF_TOKEN], token_updater, - http_client_session=async_get_clientsession(hass), + hardware_id=entry.data[CONF_DEVICE_ID], + http_client_session=client_session, ) ring = Ring(auth) - await _migrate_old_unique_ids(hass, entry.entry_id) - devices_coordinator = RingDataCoordinator(hass, ring) - notifications_coordinator = RingNotificationsCoordinator(hass, ring) - await devices_coordinator.async_config_entry_first_refresh() - await notifications_coordinator.async_config_entry_first_refresh() + listen_credentials = entry.data.get(CONF_LISTEN_CREDENTIALS) + listen_coordinator = RingListenCoordinator( + hass, ring, listen_credentials, listen_credentials_updater + ) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = RingData( + await devices_coordinator.async_config_entry_first_refresh() + + entry.runtime_data = RingData( api=ring, devices=ring.devices(), devices_coordinator=devices_coordinator, - notifications_coordinator=notifications_coordinator, + listen_coordinator=listen_coordinator, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - if hass.services.has_service(DOMAIN, "update"): - return True - - async def async_refresh_all(_: ServiceCall) -> None: - """Refresh all ring data.""" - _LOGGER.warning( - "Detected use of service 'ring.update'. " - "This is deprecated and will stop working in Home Assistant 2024.10. " - "Use 'homeassistant.update_entity' instead which updates all ring entities", - ) - async_create_issue( - hass, - DOMAIN, - "deprecated_service_ring_update", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_service_ring_update", - ) - - for info in hass.data[DOMAIN].values(): - ring_data = cast(RingData, info) - await ring_data.devices_coordinator.async_refresh() - await ring_data.notifications_coordinator.async_refresh() - - # register service - hass.services.async_register(DOMAIN, "update", async_refresh_all) - return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Ring entry.""" - if not await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - return False - - hass.data[DOMAIN].pop(entry.entry_id) - - if len(hass.data[DOMAIN]) != 0: - return True - - # Last entry unloaded, clean up service - hass.services.async_remove(DOMAIN, "update") - - return True + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_remove_config_entry_device( @@ -121,27 +103,83 @@ async def async_remove_config_entry_device( return True -async def _migrate_old_unique_ids(hass: HomeAssistant, entry_id: str) -> None: - entity_registry = er.async_get(hass) +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old config entry.""" + entry_version = entry.version + entry_minor_version = entry.minor_version + entry_id = entry.entry_id - @callback - def _async_migrator(entity_entry: er.RegistryEntry) -> dict[str, str] | None: - # Old format for camera and light was int - unique_id = cast(str | int, entity_entry.unique_id) - if isinstance(unique_id, int): - new_unique_id = str(unique_id) - if existing_entity_id := entity_registry.async_get_entity_id( - entity_entry.domain, entity_entry.platform, new_unique_id + new_minor_version = 2 + if entry_version == 1 and entry_minor_version == 1: + _LOGGER.debug( + "Migrating from version %s.%s", entry_version, entry_minor_version + ) + # Migrate non-str unique ids + # This step used to run unconditionally from async_setup_entry + entity_registry = er.async_get(hass) + + @callback + def _async_str_unique_id_migrator( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + # Old format for camera and light was int + unique_id = cast(str | int, entity_entry.unique_id) + if isinstance(unique_id, int): + new_unique_id = str(unique_id) + if existing_entity_id := entity_registry.async_get_entity_id( + entity_entry.domain, entity_entry.platform, new_unique_id + ): + _LOGGER.error( + "Cannot migrate to unique_id '%s', already exists for '%s', " + "You may have to delete unavailable ring entities", + new_unique_id, + existing_entity_id, + ) + return None + _LOGGER.debug("Fixing non string unique id %s", entity_entry.unique_id) + return {"new_unique_id": new_unique_id} + return None + + await er.async_migrate_entries(hass, entry_id, _async_str_unique_id_migrator) + + # Migrate the hardware id + hardware_id = str(uuid.uuid4()) + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_DEVICE_ID: hardware_id}, + minor_version=new_minor_version, + ) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version + ) + + entry_minor_version = entry.minor_version + new_minor_version = 3 + if entry_version == 1 and entry_minor_version == 2: + _LOGGER.debug( + "Migrating from version %s.%s", entry_version, entry_minor_version + ) + + @callback + def _async_camera_unique_id_migrator( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + # Migrate camera unique ids to append -last + if entity_entry.domain == CAMERA_DOMAIN and not isinstance( + cast(str | int, entity_entry.unique_id), int ): - _LOGGER.error( - "Cannot migrate to unique_id '%s', already exists for '%s', " - "You may have to delete unavailable ring entities", - new_unique_id, - existing_entity_id, - ) - return None - _LOGGER.info("Fixing non string unique id %s", entity_entry.unique_id) - return {"new_unique_id": new_unique_id} - return None + new_unique_id = f"{entity_entry.unique_id}-last_recording" + return {"new_unique_id": new_unique_id} + return None - await er.async_migrate_entries(hass, entry_id, _async_migrator) + await er.async_migrate_entries(hass, entry_id, _async_camera_unique_id_migrator) + + hass.config_entries.async_update_entry( + entry, + minor_version=new_minor_version, + ) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version + ) + + return True diff --git a/homeassistant/components/ring/binary_sensor.py b/homeassistant/components/ring/binary_sensor.py index 2db04cfd461..85a916e95cd 100644 --- a/homeassistant/components/ring/binary_sensor.py +++ b/homeassistant/components/ring/binary_sensor.py @@ -2,123 +2,151 @@ from __future__ import annotations -from collections.abc import Callable, Mapping +from collections.abc import Mapping from dataclasses import dataclass from datetime import datetime -from typing import Any +from typing import Any, Generic -from ring_doorbell import Ring, RingEvent, RingGeneric +from ring_doorbell import RingCapability, RingEvent +from ring_doorbell.const import KIND_DING, KIND_MOTION from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_at -from . import RingData -from .const import DOMAIN -from .coordinator import RingNotificationsCoordinator -from .entity import RingBaseEntity +from . import RingConfigEntry +from .coordinator import RingListenCoordinator +from .entity import ( + DeprecatedInfo, + RingBaseEntity, + RingDeviceT, + RingEntityDescription, + async_check_create_deprecated, +) @dataclass(frozen=True, kw_only=True) -class RingBinarySensorEntityDescription(BinarySensorEntityDescription): +class RingBinarySensorEntityDescription( + BinarySensorEntityDescription, RingEntityDescription, Generic[RingDeviceT] +): """Describes Ring binary sensor entity.""" - exists_fn: Callable[[RingGeneric], bool] + capability: RingCapability BINARY_SENSOR_TYPES: tuple[RingBinarySensorEntityDescription, ...] = ( RingBinarySensorEntityDescription( - key="ding", - translation_key="ding", + key=KIND_DING, + translation_key=KIND_DING, device_class=BinarySensorDeviceClass.OCCUPANCY, - exists_fn=lambda device: device.family - in {"doorbots", "authorized_doorbots", "other"}, + capability=RingCapability.DING, + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), RingBinarySensorEntityDescription( - key="motion", + key=KIND_MOTION, + translation_key=KIND_MOTION, device_class=BinarySensorDeviceClass.MOTION, - exists_fn=lambda device: device.family - in {"doorbots", "authorized_doorbots", "stickup_cams"}, + capability=RingCapability.MOTION_DETECTION, + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), ) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Ring binary sensors from a config entry.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data + listen_coordinator = ring_data.listen_coordinator - entities = [ - RingBinarySensor( - ring_data.api, - device, - ring_data.notifications_coordinator, - description, - ) + async_add_entities( + RingBinarySensor(device, listen_coordinator, description) for description in BINARY_SENSOR_TYPES for device in ring_data.devices.all_devices - if description.exists_fn(device) - ] - - async_add_entities(entities) + if device.has_capability(description.capability) + and async_check_create_deprecated( + hass, + Platform.BINARY_SENSOR, + f"{device.id}-{description.key}", + description, + ) + ) class RingBinarySensor( - RingBaseEntity[RingNotificationsCoordinator], BinarySensorEntity + RingBaseEntity[RingListenCoordinator, RingDeviceT], BinarySensorEntity ): """A binary sensor implementation for Ring device.""" _active_alert: RingEvent | None = None - entity_description: RingBinarySensorEntityDescription + RingBinarySensorEntityDescription[RingDeviceT] def __init__( self, - ring: Ring, - device: RingGeneric, - coordinator: RingNotificationsCoordinator, - description: RingBinarySensorEntityDescription, + device: RingDeviceT, + coordinator: RingListenCoordinator, + description: RingBinarySensorEntityDescription[RingDeviceT], ) -> None: - """Initialize a sensor for Ring device.""" + """Initialize a binary sensor for Ring device.""" super().__init__( device, coordinator, ) self.entity_description = description - self._ring = ring self._attr_unique_id = f"{device.id}-{description.key}" - self._update_alert() + self._attr_is_on = False + self._active_alert: RingEvent | None = None + self._cancel_callback: CALLBACK_TYPE | None = None @callback - def _handle_coordinator_update(self, _: Any = None) -> None: - """Call update method.""" - self._update_alert() - super()._handle_coordinator_update() + def _async_handle_event(self, alert: RingEvent) -> None: + """Handle the event.""" + self._attr_is_on = True + self._active_alert = alert + loop = self.hass.loop + when = loop.time() + alert.expires_in + if self._cancel_callback: + self._cancel_callback() + self._cancel_callback = async_call_at(self.hass, self._async_cancel_event, when) @callback - def _update_alert(self) -> None: - """Update active alert.""" - self._active_alert = next( - ( - alert - for alert in self._ring.active_alerts() - if alert["kind"] == self.entity_description.key - and alert["doorbot_id"] == self._device.id - ), - None, + def _async_cancel_event(self, _now: Any) -> None: + """Clear the event.""" + self._cancel_callback = None + self._attr_is_on = False + self._active_alert = None + self.async_write_ha_state() + + def _get_coordinator_alert(self) -> RingEvent | None: + return self.coordinator.alerts.get( + (self._device.device_api_id, self.entity_description.key) ) + @callback + def _handle_coordinator_update(self) -> None: + if alert := self._get_coordinator_alert(): + self._async_handle_event(alert) + super()._handle_coordinator_update() + @property - def is_on(self) -> bool: - """Return True if the binary sensor is on.""" - return self._active_alert is not None + def available(self) -> bool: + """Return if entity is available.""" + return self.coordinator.event_listener.started + + async def async_update(self) -> None: + """All updates are passive.""" @property def extra_state_attributes(self) -> Mapping[str, Any] | None: @@ -129,9 +157,9 @@ class RingBinarySensor( return attrs assert isinstance(attrs, dict) - attrs["state"] = self._active_alert["state"] - now = self._active_alert.get("now") - expires_in = self._active_alert.get("expires_in") + attrs["state"] = self._active_alert.state + now = self._active_alert.now + expires_in = self._active_alert.expires_in assert now and expires_in attrs["expires_at"] = datetime.fromtimestamp(now + expires_in).isoformat() diff --git a/homeassistant/components/ring/button.py b/homeassistant/components/ring/button.py index c8d7d902d18..b9d5cceb373 100644 --- a/homeassistant/components/ring/button.py +++ b/homeassistant/components/ring/button.py @@ -5,12 +5,10 @@ from __future__ import annotations from ring_doorbell import RingOther from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -21,11 +19,11 @@ BUTTON_DESCRIPTION = ButtonEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the buttons for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index b45803f3618..ccd91c163d6 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -2,26 +2,37 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass from datetime import timedelta import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Generic from aiohttp import web from haffmpeg.camera import CameraMjpeg from ring_doorbell import RingDoorBell +from ring_doorbell.webrtcstream import RingWebRtcMessage from homeassistant.components import ffmpeg -from homeassistant.components.camera import Camera -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.camera import ( + Camera, + CameraEntityDescription, + CameraEntityFeature, + RTCIceCandidateInit, + WebRTCAnswer, + WebRTCCandidate, + WebRTCError, + WebRTCSendMessage, +) from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator -from .entity import RingEntity, exception_wrap +from .entity import RingDeviceT, RingEntity, exception_wrap FORCE_REFRESH_INTERVAL = timedelta(minutes=3) MOTION_DETECTION_CAPABILITY = "motion_detection" @@ -29,20 +40,49 @@ MOTION_DETECTION_CAPABILITY = "motion_detection" _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class RingCameraEntityDescription(CameraEntityDescription, Generic[RingDeviceT]): + """Base class for event entity description.""" + + exists_fn: Callable[[RingDoorBell], bool] + live_stream: bool + motion_detection: bool + + +CAMERA_DESCRIPTIONS: tuple[RingCameraEntityDescription, ...] = ( + RingCameraEntityDescription( + key="live_view", + translation_key="live_view", + exists_fn=lambda _: True, + live_stream=True, + motion_detection=False, + ), + RingCameraEntityDescription( + key="last_recording", + translation_key="last_recording", + entity_registry_enabled_default=False, + exists_fn=lambda camera: camera.has_subscription, + live_stream=False, + motion_detection=True, + ), +) + + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Ring Door Bell and StickUp Camera.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) cams = [ - RingCam(camera, devices_coordinator, ffmpeg_manager) + RingCam(camera, devices_coordinator, description, ffmpeg_manager=ffmpeg_manager) + for description in CAMERA_DESCRIPTIONS for camera in ring_data.devices.video_devices - if camera.has_subscription + if description.exists_fn(camera) ] async_add_entities(cams) @@ -51,26 +91,31 @@ async def async_setup_entry( class RingCam(RingEntity[RingDoorBell], Camera): """An implementation of a Ring Door Bell camera.""" - _attr_name = None - def __init__( self, device: RingDoorBell, coordinator: RingDataCoordinator, + description: RingCameraEntityDescription, + *, ffmpeg_manager: ffmpeg.FFmpegManager, ) -> None: """Initialize a Ring Door Bell camera.""" super().__init__(device, coordinator) + self.entity_description = description Camera.__init__(self) self._ffmpeg_manager = ffmpeg_manager self._last_event: dict[str, Any] | None = None self._last_video_id: int | None = None self._video_url: str | None = None - self._image: bytes | None = None + self._images: dict[tuple[int | None, int | None], bytes] = {} self._expires_at = dt_util.utcnow() - FORCE_REFRESH_INTERVAL - self._attr_unique_id = str(device.id) - if device.has_capability(MOTION_DETECTION_CAPABILITY): + self._attr_unique_id = f"{device.id}-{description.key}" + if description.motion_detection and device.has_capability( + MOTION_DETECTION_CAPABILITY + ): self._attr_motion_detection_enabled = device.motion_detection + if description.live_stream: + self._attr_supported_features |= CameraEntityFeature.STREAM @callback def _handle_coordinator_update(self) -> None: @@ -81,12 +126,14 @@ class RingCam(RingEntity[RingDoorBell], Camera): history_data = self._device.last_history if history_data: self._last_event = history_data[0] + # will call async_update to update the attributes and get the + # video url from the api self.async_schedule_update_ha_state(True) else: self._last_event = None self._last_video_id = None self._video_url = None - self._image = None + self._images = {} self._expires_at = dt_util.utcnow() self.async_write_ha_state() @@ -102,7 +149,8 @@ class RingCam(RingEntity[RingDoorBell], Camera): self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return a still image response from the camera.""" - if self._image is None and self._video_url is not None: + key = (width, height) + if not (image := self._images.get(key)) and self._video_url is not None: image = await ffmpeg.async_get_image( self.hass, self._video_url, @@ -111,9 +159,9 @@ class RingCam(RingEntity[RingDoorBell], Camera): ) if image: - self._image = image + self._images[key] = image - return self._image + return image async def handle_async_mjpeg_stream( self, request: web.Request @@ -136,6 +184,47 @@ class RingCam(RingEntity[RingDoorBell], Camera): finally: await stream.close() + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + """Return the source of the stream.""" + + def message_wrapper(ring_message: RingWebRtcMessage) -> None: + if ring_message.error_code: + msg = ring_message.error_message or "" + send_message(WebRTCError(ring_message.error_code, msg)) + elif ring_message.answer: + send_message(WebRTCAnswer(ring_message.answer)) + elif ring_message.candidate: + send_message( + WebRTCCandidate( + RTCIceCandidateInit( + ring_message.candidate, + sdp_m_line_index=ring_message.sdp_m_line_index or 0, + ) + ) + ) + + return await self._device.generate_async_webrtc_stream( + offer_sdp, session_id, message_wrapper, keep_alive_timeout=None + ) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle a WebRTC candidate.""" + if candidate.sdp_m_line_index is None: + msg = "The sdp_m_line_index is required for ring webrtc streaming" + raise HomeAssistantError(msg) + await self._device.on_webrtc_candidate( + session_id, candidate.candidate, candidate.sdp_m_line_index + ) + + @callback + def close_webrtc_session(self, session_id: str) -> None: + """Close a WebRTC session.""" + self._device.sync_close_webrtc_stream(session_id) + async def async_update(self) -> None: """Update camera entity and refresh attributes.""" if ( @@ -157,7 +246,7 @@ class RingCam(RingEntity[RingDoorBell], Camera): return if self._last_video_id != self._last_event["id"]: - self._image = None + self._images = {} self._video_url = await self._async_get_video() @@ -183,7 +272,7 @@ class RingCam(RingEntity[RingDoorBell], Camera): await self._device.async_set_motion_detection(new_state) self._attr_motion_detection_enabled = new_state - self.async_schedule_update_ha_state(False) + self.async_write_ha_state() async def async_enable_motion_detection(self) -> None: """Enable motion detection in the camera.""" diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index ee78541dec7..a1024186349 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -3,22 +3,32 @@ from collections.abc import Mapping import logging from typing import Any +import uuid from ring_doorbell import Auth, AuthenticationError, Requires2FAError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.components import dhcp +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import ( - APPLICATION_NAME, + CONF_DEVICE_ID, + CONF_NAME, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME, - __version__ as ha_version, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.device_registry as dr -from .const import CONF_2FA, DOMAIN +from . import get_auth_user_agent +from .const import CONF_2FA, CONF_CONFIG_ENTRY_MINOR_VERSION, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -27,11 +37,22 @@ STEP_USER_DATA_SCHEMA = vol.Schema( ) STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) +STEP_RECONFIGURE_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) -async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, Any]: +UNKNOWN_RING_ACCOUNT = "unknown_ring_account" + + +async def validate_input( + hass: HomeAssistant, hardware_id: str, data: dict[str, str] +) -> dict[str, Any]: """Validate the user input allows us to connect.""" - auth = Auth(f"{APPLICATION_NAME}/{ha_version}") + user_agent = get_auth_user_agent() + auth = Auth( + user_agent, + http_client_session=async_get_clientsession(hass), + hardware_id=hardware_id, + ) try: token = await auth.async_fetch_token( @@ -51,9 +72,29 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Ring.""" VERSION = 1 + MINOR_VERSION = CONF_CONFIG_ENTRY_MINOR_VERSION user_pass: dict[str, Any] = {} - reauth_entry: ConfigEntry | None = None + hardware_id: str | None = None + + async def async_step_dhcp( + self, discovery_info: dhcp.DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle discovery via dhcp.""" + # Ring has a single config entry per cloud username rather than per device + # so we check whether that device is already configured. + # If the device is not configured there's either no ring config entry + # yet or the device is registered to a different account + await self.async_set_unique_id(UNKNOWN_RING_ACCOUNT) + self._abort_if_unique_id_configured() + if self.hass.config_entries.async_has_entries(DOMAIN): + device_registry = dr.async_get(self.hass) + if device_registry.async_get_device( + identifiers={(DOMAIN, discovery_info.macaddress)} + ): + return self.async_abort(reason="already_configured") + + return await self.async_step_user() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -61,8 +102,12 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: + await self.async_set_unique_id(user_input[CONF_USERNAME]) + self._abort_if_unique_id_configured() + if not self.hardware_id: + self.hardware_id = str(uuid.uuid4()) try: - token = await validate_input(self.hass, user_input) + token = await validate_input(self.hass, self.hardware_id, user_input) except Require2FA: self.user_pass = user_input @@ -73,10 +118,13 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - await self.async_set_unique_id(user_input[CONF_USERNAME]) return self.async_create_entry( title=user_input[CONF_USERNAME], - data={CONF_USERNAME: user_input[CONF_USERNAME], CONF_TOKEN: token}, + data={ + CONF_DEVICE_ID: self.hardware_id, + CONF_USERNAME: user_input[CONF_USERNAME], + CONF_TOKEN: token, + }, ) return self.async_show_form( @@ -88,11 +136,16 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle 2fa step.""" if user_input: - if self.reauth_entry: + if self.source == SOURCE_REAUTH: return await self.async_step_reauth_confirm( {**self.user_pass, **user_input} ) + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_reconfigure( + {**self.user_pass, **user_input} + ) + return await self.async_step_user({**self.user_pass, **user_input}) return self.async_show_form( @@ -104,9 +157,6 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -114,12 +164,17 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self.reauth_entry is not None + reauth_entry = self._get_reauth_entry() if user_input: - user_input[CONF_USERNAME] = self.reauth_entry.data[CONF_USERNAME] + user_input[CONF_USERNAME] = reauth_entry.data[CONF_USERNAME] + # Reauth will use the same hardware id and re-authorise an existing + # authorised device. + if not self.hardware_id: + self.hardware_id = reauth_entry.data[CONF_DEVICE_ID] + assert self.hardware_id try: - token = await validate_input(self.hass, user_input) + token = await validate_input(self.hass, self.hardware_id, user_input) except Require2FA: self.user_pass = user_input return await self.async_step_2fa() @@ -132,19 +187,59 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): data = { CONF_USERNAME: user_input[CONF_USERNAME], CONF_TOKEN: token, + CONF_DEVICE_ID: self.hardware_id, } - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=data - ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) return self.async_show_form( step_id="reauth_confirm", data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, description_placeholders={ - CONF_USERNAME: self.reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.data[CONF_USERNAME], + }, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Trigger a reconfiguration flow.""" + errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() + username = reconfigure_entry.data[CONF_USERNAME] + await self.async_set_unique_id(username) + if user_input: + user_input[CONF_USERNAME] = username + # Reconfigure will generate a new hardware id and create a new + # authorised device at ring.com. + if not self.hardware_id: + self.hardware_id = str(uuid.uuid4()) + try: + assert self.hardware_id + token = await validate_input(self.hass, self.hardware_id, user_input) + except Require2FA: + self.user_pass = user_input + return await self.async_step_2fa() + except InvalidAuth: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + data = { + CONF_USERNAME: username, + CONF_TOKEN: token, + CONF_DEVICE_ID: self.hardware_id, + } + return self.async_update_reload_and_abort(reconfigure_entry, data=data) + + return self.async_show_form( + step_id="reconfigure", + data_schema=STEP_RECONFIGURE_DATA_SCHEMA, + errors=errors, + description_placeholders={ + CONF_USERNAME: username, }, ) diff --git a/homeassistant/components/ring/const.py b/homeassistant/components/ring/const.py index 70813a78c76..68ac00d69f6 100644 --- a/homeassistant/components/ring/const.py +++ b/homeassistant/components/ring/const.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Final from homeassistant.const import Platform @@ -18,7 +19,9 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CAMERA, + Platform.EVENT, Platform.LIGHT, + Platform.NUMBER, Platform.SENSOR, Platform.SIREN, Platform.SWITCH, @@ -26,6 +29,8 @@ PLATFORMS = [ SCAN_INTERVAL = timedelta(minutes=1) -NOTIFICATIONS_SCAN_INTERVAL = timedelta(seconds=5) CONF_2FA = "2fa" +CONF_LISTEN_CREDENTIALS = "listen_token" + +CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 3 diff --git a/homeassistant/components/ring/coordinator.py b/homeassistant/components/ring/coordinator.py index 600743005eb..b143fd3dda0 100644 --- a/homeassistant/components/ring/coordinator.py +++ b/homeassistant/components/ring/coordinator.py @@ -3,15 +3,28 @@ from asyncio import TaskGroup from collections.abc import Callable, Coroutine import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from ring_doorbell import AuthenticationError, Ring, RingDevices, RingError, RingTimeout +from ring_doorbell import ( + AuthenticationError, + Ring, + RingDevices, + RingError, + RingEvent, + RingTimeout, +) +from ring_doorbell.listen import RingEventListener -from homeassistant.core import HomeAssistant +from homeassistant import config_entries +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import ( + BaseDataUpdateCoordinatorProtocol, + DataUpdateCoordinator, + UpdateFailed, +) -from .const import NOTIFICATIONS_SCAN_INTERVAL, SCAN_INTERVAL +from .const import SCAN_INTERVAL _LOGGER = logging.getLogger(__name__) @@ -91,19 +104,112 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): return devices -class RingNotificationsCoordinator(DataUpdateCoordinator[None]): +class RingListenCoordinator(BaseDataUpdateCoordinatorProtocol): """Global notifications coordinator.""" - def __init__(self, hass: HomeAssistant, ring_api: Ring) -> None: - """Initialize my coordinator.""" - super().__init__( - hass, - logger=_LOGGER, - name="active dings", - update_interval=NOTIFICATIONS_SCAN_INTERVAL, - ) - self.ring_api: Ring = ring_api + config_entry: config_entries.ConfigEntry - async def _async_update_data(self) -> None: - """Fetch data from API endpoint.""" - await _call_api(self.hass, self.ring_api.async_update_dings) + def __init__( + self, + hass: HomeAssistant, + ring_api: Ring, + listen_credentials: dict[str, Any] | None, + listen_credentials_updater: Callable[[dict[str, Any]], None], + ) -> None: + """Initialize my coordinator.""" + self.hass = hass + self.logger = _LOGGER + self.ring_api: Ring = ring_api + self.event_listener = RingEventListener( + ring_api, listen_credentials, listen_credentials_updater + ) + self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} + self._listen_callback_id: int | None = None + + config_entry = config_entries.current_entry.get() + if TYPE_CHECKING: + assert config_entry + self.config_entry = config_entry + self.start_timeout = 10 + self.config_entry.async_on_unload(self.async_shutdown) + self.index_alerts() + + def index_alerts(self) -> None: + "Index the active alerts." + self.alerts = { + (alert.doorbot_id, alert.kind): alert + for alert in self.ring_api.active_alerts() + } + + async def async_shutdown(self) -> None: + """Cancel any scheduled call, and ignore new runs.""" + if self.event_listener.started: + await self._async_stop_listen() + + async def _async_stop_listen(self) -> None: + self.logger.debug("Stopped ring listener") + await self.event_listener.stop() + self.logger.debug("Stopped ring listener") + + async def _async_start_listen(self) -> None: + """Start listening for realtime events.""" + self.logger.debug("Starting ring listener.") + await self.event_listener.start( + timeout=self.start_timeout, + ) + if self.event_listener.started is True: + self.logger.debug("Started ring listener") + else: + self.logger.warning( + "Ring event listener failed to start after %s seconds", + self.start_timeout, + ) + self._listen_callback_id = self.event_listener.add_notification_callback( + self._on_event + ) + self.index_alerts() + # Update the listeners so they switch from Unavailable to Unknown + self._async_update_listeners() + + def _on_event(self, event: RingEvent) -> None: + self.logger.debug("Ring event received: %s", event) + self.index_alerts() + self._async_update_listeners(event.doorbot_id) + + @callback + def _async_update_listeners(self, doorbot_id: int | None = None) -> None: + """Update all registered listeners.""" + for update_callback, device_api_id in list(self._listeners.values()): + if not doorbot_id or device_api_id == doorbot_id: + update_callback() + + @callback + def async_add_listener( + self, update_callback: CALLBACK_TYPE, context: Any = None + ) -> Callable[[], None]: + """Listen for data updates.""" + start_listen = not self._listeners + + @callback + def remove_listener() -> None: + """Remove update listener.""" + self._listeners.pop(remove_listener) + if not self._listeners: + self.config_entry.async_create_task( + self.hass, + self._async_stop_listen(), + "Ring event listener stop", + eager_start=True, + ) + + self._listeners[remove_listener] = (update_callback, context) + + # This is the first listener, start the event listener. + if start_listen: + self.config_entry.async_create_task( + self.hass, + self._async_start_listen(), + "Ring event listener start", + eager_start=True, + ) + return remove_listener diff --git a/homeassistant/components/ring/diagnostics.py b/homeassistant/components/ring/diagnostics.py index 2e7604d9f50..cecf26a46a7 100644 --- a/homeassistant/components/ring/diagnostics.py +++ b/homeassistant/components/ring/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry TO_REDACT = { "id", @@ -29,10 +27,10 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: RingConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - ring_data: RingData = hass.data[DOMAIN][entry.entry_id] + ring_data = entry.runtime_data devices_data = ring_data.api.devices_data devices_raw = [ devices_data[device_type][device_id] diff --git a/homeassistant/components/ring/entity.py b/homeassistant/components/ring/entity.py index 72deb09b76f..b93a7f35322 100644 --- a/homeassistant/components/ring/entity.py +++ b/homeassistant/components/ring/entity.py @@ -1,6 +1,7 @@ """Base class for Ring entity.""" -from collections.abc import Callable, Coroutine +from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import dataclass from typing import Any, Concatenate, Generic, cast from ring_doorbell import ( @@ -12,22 +13,46 @@ from ring_doorbell import ( ) from typing_extensions import TypeVar -from homeassistant.core import callback +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.update_coordinator import ( + BaseCoordinatorEntity, + CoordinatorEntity, +) from .const import ATTRIBUTION, DOMAIN -from .coordinator import RingDataCoordinator, RingNotificationsCoordinator +from .coordinator import RingDataCoordinator, RingListenCoordinator RingDeviceT = TypeVar("RingDeviceT", bound=RingGeneric, default=RingGeneric) _RingCoordinatorT = TypeVar( "_RingCoordinatorT", - bound=(RingDataCoordinator | RingNotificationsCoordinator), + bound=(RingDataCoordinator | RingListenCoordinator), ) +@dataclass(slots=True) +class DeprecatedInfo: + """Class to define deprecation info for deprecated entities.""" + + new_platform: Platform + breaks_in_ha_version: str + + +@dataclass(frozen=True, kw_only=True) +class RingEntityDescription(EntityDescription): + """Base class for a ring entity description.""" + + deprecated_info: DeprecatedInfo | None = None + + def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R]( async_func: Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]], ) -> Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]]: @@ -51,8 +76,79 @@ def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R]( return _wrap +def refresh_after[_RingEntityT: RingEntity[Any], **_P]( + func: Callable[Concatenate[_RingEntityT, _P], Awaitable[None]], +) -> Callable[Concatenate[_RingEntityT, _P], Coroutine[Any, Any, None]]: + """Define a wrapper to handle api call errors or refresh after success.""" + + @exception_wrap + async def _wrap(self: _RingEntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + await func(self, *args, **kwargs) + await self.coordinator.async_request_refresh() + + return _wrap + + +def async_check_create_deprecated( + hass: HomeAssistant, + platform: Platform, + unique_id: str, + entity_description: RingEntityDescription, +) -> bool: + """Return true if the entitty should be created based on the deprecated_info. + + If deprecated_info is not defined will return true. + If entity not yet created will return false. + If entity disabled will delete it and return false. + Otherwise will return true and create issues for scripts or automations. + """ + if not entity_description.deprecated_info: + return True + + ent_reg = er.async_get(hass) + entity_id = ent_reg.async_get_entity_id( + platform, + DOMAIN, + unique_id, + ) + if not entity_id: + return False + + entity_entry = ent_reg.async_get(entity_id) + assert entity_entry + if entity_entry.disabled: + # If the entity exists and is disabled then we want to remove + # the entity so that the user is just using the new entity. + ent_reg.async_remove(entity_id) + return False + + # Check for issues that need to be created + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + if entity_automations or entity_scripts: + deprecated_info = entity_description.deprecated_info + for item in entity_automations + entity_scripts: + async_create_issue( + hass, + DOMAIN, + f"deprecated_entity_{entity_id}_{item}", + breaks_in_ha_version=deprecated_info.breaks_in_ha_version, + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_entity", + translation_placeholders={ + "entity": entity_id, + "info": item, + "platform": platform, + "new_platform": deprecated_info.new_platform, + }, + ) + return True + + class RingBaseEntity( - CoordinatorEntity[_RingCoordinatorT], Generic[_RingCoordinatorT, RingDeviceT] + BaseCoordinatorEntity[_RingCoordinatorT], Generic[_RingCoordinatorT, RingDeviceT] ): """Base implementation for Ring device.""" @@ -77,7 +173,7 @@ class RingBaseEntity( ) -class RingEntity(RingBaseEntity[RingDataCoordinator, RingDeviceT]): +class RingEntity(RingBaseEntity[RingDataCoordinator, RingDeviceT], CoordinatorEntity): """Implementation for Ring devices.""" def _get_coordinator_data(self) -> RingDevices: diff --git a/homeassistant/components/ring/event.py b/homeassistant/components/ring/event.py new file mode 100644 index 00000000000..71a4bc8aea5 --- /dev/null +++ b/homeassistant/components/ring/event.py @@ -0,0 +1,109 @@ +"""Component providing support for ring events.""" + +from dataclasses import dataclass +from typing import Generic + +from ring_doorbell import RingCapability, RingEvent as RingAlert +from ring_doorbell.const import KIND_DING, KIND_INTERCOM_UNLOCK, KIND_MOTION + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import RingConfigEntry +from .coordinator import RingListenCoordinator +from .entity import RingBaseEntity, RingDeviceT + + +@dataclass(frozen=True, kw_only=True) +class RingEventEntityDescription(EventEntityDescription, Generic[RingDeviceT]): + """Base class for event entity description.""" + + capability: RingCapability + + +EVENT_DESCRIPTIONS: tuple[RingEventEntityDescription, ...] = ( + RingEventEntityDescription( + key=KIND_DING, + translation_key=KIND_DING, + device_class=EventDeviceClass.DOORBELL, + event_types=[KIND_DING], + capability=RingCapability.DING, + ), + RingEventEntityDescription( + key=KIND_MOTION, + translation_key=KIND_MOTION, + device_class=EventDeviceClass.MOTION, + event_types=[KIND_MOTION], + capability=RingCapability.MOTION_DETECTION, + ), + RingEventEntityDescription( + key=KIND_INTERCOM_UNLOCK, + translation_key=KIND_INTERCOM_UNLOCK, + device_class=EventDeviceClass.BUTTON, + event_types=[KIND_INTERCOM_UNLOCK], + capability=RingCapability.OPEN, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: RingConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up events for a Ring device.""" + ring_data = entry.runtime_data + listen_coordinator = ring_data.listen_coordinator + + async_add_entities( + RingEvent(device, listen_coordinator, description) + for description in EVENT_DESCRIPTIONS + for device in ring_data.devices.all_devices + if device.has_capability(description.capability) + ) + + +class RingEvent(RingBaseEntity[RingListenCoordinator, RingDeviceT], EventEntity): + """An event implementation for Ring device.""" + + entity_description: RingEventEntityDescription[RingDeviceT] + + def __init__( + self, + device: RingDeviceT, + coordinator: RingListenCoordinator, + description: RingEventEntityDescription[RingDeviceT], + ) -> None: + """Initialize a event entity for Ring device.""" + super().__init__(device, coordinator) + self.entity_description = description + self._attr_unique_id = f"{device.id}-{description.key}" + + @callback + def _async_handle_event(self, event: str) -> None: + """Handle the event.""" + self._trigger_event(event) + + def _get_coordinator_alert(self) -> RingAlert | None: + return self.coordinator.alerts.get( + (self._device.device_api_id, self.entity_description.key) + ) + + @callback + def _handle_coordinator_update(self) -> None: + if (alert := self._get_coordinator_alert()) and not alert.is_update: + self._async_handle_event(alert.kind) + super()._handle_coordinator_update() + + @property + def available(self) -> bool: + """Return if entity is available.""" + return self.coordinator.event_listener.started + + async def async_update(self) -> None: + """All updates are passive.""" diff --git a/homeassistant/components/ring/icons.json b/homeassistant/components/ring/icons.json index 9dd31fd0fd1..de999a5ef37 100644 --- a/homeassistant/components/ring/icons.json +++ b/homeassistant/components/ring/icons.json @@ -1,5 +1,19 @@ { "entity": { + "number": { + "volume": { + "default": "mdi:bell-ring" + }, + "doorbell_volume": { + "default": "mdi:bell-ring" + }, + "mic_volume": { + "default": "mdi:microphone" + }, + "voice_volume": { + "default": "mdi:account-voice" + } + }, "sensor": { "last_activity": { "default": "mdi:history" @@ -35,10 +49,19 @@ "switch": { "siren": { "default": "mdi:alarm-bell" + }, + "in_home_chime": { + "default": "mdi:bell-ring-outline", + "state": { + "on": "mdi:bell-ring" + } + }, + "motion_detection": { + "default": "mdi:motion-sensor-off", + "state": { + "on": "mdi:motion-sensor" + } } } - }, - "services": { - "update": "mdi:refresh" } } diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index f7f7f9b44ae..9e29373a3aa 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -8,13 +8,11 @@ from typing import Any from ring_doorbell import RingStickUpCam from homeassistant.components.light import ColorMode, LightEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -38,11 +36,11 @@ class OnOffState(StrEnum): async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the lights for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( @@ -86,7 +84,7 @@ class RingLight(RingEntity[RingStickUpCam], LightEntity): self._attr_is_on = new_state == OnOffState.ON self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.async_schedule_update_ha_state() + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on for 30 seconds.""" diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index 23e7b882efe..86758b26794 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -8,11 +8,26 @@ { "hostname": "ring*", "macaddress": "0CAE7D*" + }, + { + "hostname": "ring*", + "macaddress": "2CAB33*" + }, + { + "hostname": "ring*", + "macaddress": "94E36D*" + }, + { + "hostname": "ring*", + "macaddress": "9C7613*" + }, + { + "hostname": "ring*", + "macaddress": "341513*" } ], "documentation": "https://www.home-assistant.io/integrations/ring", "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], - "quality_scale": "silver", - "requirements": ["ring-doorbell[listen]==0.9.0"] + "requirements": ["ring-doorbell==0.9.13"] } diff --git a/homeassistant/components/ring/number.py b/homeassistant/components/ring/number.py new file mode 100644 index 00000000000..91aabb6c800 --- /dev/null +++ b/homeassistant/components/ring/number.py @@ -0,0 +1,150 @@ +"""Component providing HA number support for Ring Door Bell/Chimes.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any, Generic, cast + +from ring_doorbell import RingChime, RingDoorBell, RingGeneric, RingOther +import ring_doorbell.const + +from homeassistant.components.number import ( + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import RingConfigEntry +from .coordinator import RingDataCoordinator +from .entity import RingDeviceT, RingEntity, refresh_after + + +async def async_setup_entry( + hass: HomeAssistant, + entry: RingConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up a numbers for a Ring device.""" + ring_data = entry.runtime_data + devices_coordinator = ring_data.devices_coordinator + + async_add_entities( + RingNumber(device, devices_coordinator, description) + for description in NUMBER_TYPES + for device in ring_data.devices.all_devices + if description.exists_fn(device) + ) + + +@dataclass(frozen=True, kw_only=True) +class RingNumberEntityDescription(NumberEntityDescription, Generic[RingDeviceT]): + """Describes Ring number entity.""" + + value_fn: Callable[[RingDeviceT], StateType] + setter_fn: Callable[[RingDeviceT, float], Awaitable[None]] + exists_fn: Callable[[RingGeneric], bool] + + +NUMBER_TYPES: tuple[RingNumberEntityDescription[Any], ...] = ( + RingNumberEntityDescription[RingChime]( + key="volume", + translation_key="volume", + mode=NumberMode.SLIDER, + native_min_value=ring_doorbell.const.CHIME_VOL_MIN, + native_max_value=ring_doorbell.const.CHIME_VOL_MAX, + native_step=1, + value_fn=lambda device: device.volume, + setter_fn=lambda device, value: device.async_set_volume(int(value)), + exists_fn=lambda device: isinstance(device, RingChime), + ), + RingNumberEntityDescription[RingDoorBell]( + key="volume", + translation_key="volume", + mode=NumberMode.SLIDER, + native_min_value=ring_doorbell.const.DOORBELL_VOL_MIN, + native_max_value=ring_doorbell.const.DOORBELL_VOL_MAX, + native_step=1, + value_fn=lambda device: device.volume, + setter_fn=lambda device, value: device.async_set_volume(int(value)), + exists_fn=lambda device: isinstance(device, RingDoorBell), + ), + RingNumberEntityDescription[RingOther]( + key="doorbell_volume", + translation_key="doorbell_volume", + mode=NumberMode.SLIDER, + native_min_value=ring_doorbell.const.OTHER_DOORBELL_VOL_MIN, + native_max_value=ring_doorbell.const.OTHER_DOORBELL_VOL_MAX, + native_step=1, + value_fn=lambda device: device.doorbell_volume, + setter_fn=lambda device, value: device.async_set_doorbell_volume(int(value)), + exists_fn=lambda device: isinstance(device, RingOther), + ), + RingNumberEntityDescription[RingOther]( + key="mic_volume", + translation_key="mic_volume", + mode=NumberMode.SLIDER, + native_min_value=ring_doorbell.const.MIC_VOL_MIN, + native_max_value=ring_doorbell.const.MIC_VOL_MAX, + native_step=1, + value_fn=lambda device: device.mic_volume, + setter_fn=lambda device, value: device.async_set_mic_volume(int(value)), + exists_fn=lambda device: isinstance(device, RingOther), + ), + RingNumberEntityDescription[RingOther]( + key="voice_volume", + translation_key="voice_volume", + mode=NumberMode.SLIDER, + native_min_value=ring_doorbell.const.VOICE_VOL_MIN, + native_max_value=ring_doorbell.const.VOICE_VOL_MAX, + native_step=1, + value_fn=lambda device: device.voice_volume, + setter_fn=lambda device, value: device.async_set_voice_volume(int(value)), + exists_fn=lambda device: isinstance(device, RingOther), + ), +) + + +class RingNumber(RingEntity[RingDeviceT], NumberEntity): + """A number implementation for Ring device.""" + + entity_description: RingNumberEntityDescription[RingDeviceT] + + def __init__( + self, + device: RingDeviceT, + coordinator: RingDataCoordinator, + description: RingNumberEntityDescription[RingDeviceT], + ) -> None: + """Initialize a number for Ring device.""" + super().__init__(device, coordinator) + self.entity_description = description + self._attr_unique_id = f"{device.id}-{description.key}" + self._update_native_value() + + def _update_native_value(self) -> None: + native_value = self.entity_description.value_fn(self._device) + if native_value is not None: + self._attr_native_value = float(native_value) + + @callback + def _handle_coordinator_update(self) -> None: + """Call update method.""" + + self._device = cast( + RingDeviceT, + self._get_coordinator_data().get_device(self._device.device_api_id), + ) + + self._update_native_value() + + super()._handle_coordinator_update() + + @refresh_after + async def async_set_native_value(self, value: float) -> None: + """Call setter on Ring device.""" + await self.entity_description.setter_fn(self._device, value) + + self._attr_native_value = value + self.async_write_ha_state() diff --git a/homeassistant/components/ring/sensor.py b/homeassistant/components/ring/sensor.py index b6849e37d96..dee67882857 100644 --- a/homeassistant/components/ring/sensor.py +++ b/homeassistant/components/ring/sensor.py @@ -21,29 +21,34 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator -from .entity import RingDeviceT, RingEntity +from .entity import ( + DeprecatedInfo, + RingDeviceT, + RingEntity, + RingEntityDescription, + async_check_create_deprecated, +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a sensor for a Ring device.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator entities = [ @@ -51,6 +56,12 @@ async def async_setup_entry( for description in SENSOR_TYPES for device in ring_data.devices.all_devices if description.exists_fn(device) + and async_check_create_deprecated( + hass, + Platform.SENSOR, + f"{device.id}-{description.key}", + description, + ) ] async_add_entities(entities) @@ -122,7 +133,9 @@ def _get_last_event_attrs( @dataclass(frozen=True, kw_only=True) -class RingSensorEntityDescription(SensorEntityDescription, Generic[RingDeviceT]): +class RingSensorEntityDescription( + SensorEntityDescription, RingEntityDescription, Generic[RingDeviceT] +): """Describes Ring sensor entity.""" value_fn: Callable[[RingDeviceT], StateType] = lambda _: True @@ -174,6 +187,9 @@ SENSOR_TYPES: tuple[RingSensorEntityDescription[Any], ...] = ( ) else None, exists_fn=lambda device: device.has_capability(RingCapability.HISTORY), + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingGeneric]( key="last_motion", @@ -190,30 +206,45 @@ SENSOR_TYPES: tuple[RingSensorEntityDescription[Any], ...] = ( ) else None, exists_fn=lambda device: device.has_capability(RingCapability.HISTORY), + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingDoorBell | RingChime]( key="volume", translation_key="volume", value_fn=lambda device: device.volume, exists_fn=lambda device: isinstance(device, (RingDoorBell, RingChime)), + deprecated_info=DeprecatedInfo( + new_platform=Platform.NUMBER, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingOther]( key="doorbell_volume", translation_key="doorbell_volume", value_fn=lambda device: device.doorbell_volume, exists_fn=lambda device: isinstance(device, RingOther), + deprecated_info=DeprecatedInfo( + new_platform=Platform.NUMBER, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingOther]( key="mic_volume", translation_key="mic_volume", value_fn=lambda device: device.mic_volume, exists_fn=lambda device: isinstance(device, RingOther), + deprecated_info=DeprecatedInfo( + new_platform=Platform.NUMBER, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingOther]( key="voice_volume", translation_key="voice_volume", value_fn=lambda device: device.voice_volume, exists_fn=lambda device: isinstance(device, RingOther), + deprecated_info=DeprecatedInfo( + new_platform=Platform.NUMBER, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingGeneric]( key="wifi_signal_category", diff --git a/homeassistant/components/ring/services.yaml b/homeassistant/components/ring/services.yaml deleted file mode 100644 index 91b8669505b..00000000000 --- a/homeassistant/components/ring/services.yaml +++ /dev/null @@ -1 +0,0 @@ -update: diff --git a/homeassistant/components/ring/siren.py b/homeassistant/components/ring/siren.py index 665de07a5bb..b1452f7aeb5 100644 --- a/homeassistant/components/ring/siren.py +++ b/homeassistant/components/ring/siren.py @@ -1,54 +1,161 @@ """Component providing HA Siren support for Ring Chimes.""" +from collections.abc import Callable, Coroutine +from dataclasses import dataclass import logging -from typing import Any +from typing import Any, Generic, cast -from ring_doorbell import RingChime, RingEventKind +from ring_doorbell import ( + RingCapability, + RingChime, + RingEventKind, + RingGeneric, + RingStickUpCam, +) -from homeassistant.components.siren import ATTR_TONE, SirenEntity, SirenEntityFeature -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.components.siren import ( + ATTR_TONE, + SirenEntity, + SirenEntityDescription, + SirenEntityFeature, + SirenTurnOnServiceParameters, +) +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator -from .entity import RingEntity, exception_wrap +from .entity import ( + RingDeviceT, + RingEntity, + RingEntityDescription, + async_check_create_deprecated, + refresh_after, +) _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class RingSirenEntityDescription( + SirenEntityDescription, RingEntityDescription, Generic[RingDeviceT] +): + """Describes a Ring siren entity.""" + + exists_fn: Callable[[RingGeneric], bool] + unique_id_fn: Callable[[RingDeviceT], str] = lambda device: str( + device.device_api_id + ) + is_on_fn: Callable[[RingDeviceT], bool] | None = None + turn_on_fn: ( + Callable[[RingDeviceT, SirenTurnOnServiceParameters], Coroutine[Any, Any, Any]] + | None + ) = None + turn_off_fn: Callable[[RingDeviceT], Coroutine[Any, Any, None]] | None = None + + +SIRENS: tuple[RingSirenEntityDescription[Any], ...] = ( + RingSirenEntityDescription[RingChime]( + key="siren", + translation_key="siren", + available_tones=[RingEventKind.DING.value, RingEventKind.MOTION.value], + # Historically the chime siren entity has appended `siren` to the unique id + unique_id_fn=lambda device: f"{device.device_api_id}-siren", + exists_fn=lambda device: isinstance(device, RingChime), + turn_on_fn=lambda device, kwargs: device.async_test_sound( + kind=str(kwargs.get(ATTR_TONE) or "") or RingEventKind.DING.value + ), + ), + RingSirenEntityDescription[RingStickUpCam]( + key="siren", + translation_key="siren", + exists_fn=lambda device: device.has_capability(RingCapability.SIREN), + is_on_fn=lambda device: device.siren > 0, + turn_on_fn=lambda device, _: device.async_set_siren(1), + turn_off_fn=lambda device: device.async_set_siren(0), + ), +) + + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the sirens for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( - RingChimeSiren(device, devices_coordinator) - for device in ring_data.devices.chimes + RingSiren(device, devices_coordinator, description) + for device in ring_data.devices.all_devices + for description in SIRENS + if description.exists_fn(device) + and async_check_create_deprecated( + hass, + Platform.SIREN, + description.unique_id_fn(device), + description, + ) ) -class RingChimeSiren(RingEntity[RingChime], SirenEntity): +class RingSiren(RingEntity[RingDeviceT], SirenEntity): """Creates a siren to play the test chimes of a Chime device.""" - _attr_available_tones = [RingEventKind.DING.value, RingEventKind.MOTION.value] - _attr_supported_features = SirenEntityFeature.TURN_ON | SirenEntityFeature.TONES - _attr_translation_key = "siren" + entity_description: RingSirenEntityDescription[RingDeviceT] - def __init__(self, device: RingChime, coordinator: RingDataCoordinator) -> None: + def __init__( + self, + device: RingDeviceT, + coordinator: RingDataCoordinator, + description: RingSirenEntityDescription[RingDeviceT], + ) -> None: """Initialize a Ring Chime siren.""" super().__init__(device, coordinator) - # Entity class attributes - self._attr_unique_id = f"{self._device.id}-siren" + self.entity_description = description + self._attr_unique_id = description.unique_id_fn(device) + if description.is_on_fn: + self._attr_is_on = description.is_on_fn(self._device) + features = SirenEntityFeature(0) + if description.turn_on_fn: + features = features | SirenEntityFeature.TURN_ON + if description.turn_off_fn: + features = features | SirenEntityFeature.TURN_OFF + if description.available_tones: + features = features | SirenEntityFeature.TONES + self._attr_supported_features = features - @exception_wrap + async def _async_set_siren(self, siren_on: bool, **kwargs: Any) -> None: + if siren_on and self.entity_description.turn_on_fn: + turn_on_params = cast(SirenTurnOnServiceParameters, kwargs) + await self.entity_description.turn_on_fn(self._device, turn_on_params) + elif not siren_on and self.entity_description.turn_off_fn: + await self.entity_description.turn_off_fn(self._device) + + if self.entity_description.is_on_fn: + self._attr_is_on = siren_on + self.async_write_ha_state() + + @refresh_after async def async_turn_on(self, **kwargs: Any) -> None: - """Play the test sound on a Ring Chime device.""" - tone = kwargs.get(ATTR_TONE) or RingEventKind.DING.value + """Turn on the siren.""" + await self._async_set_siren(True, **kwargs) - await self._device.async_test_sound(kind=tone) + @refresh_after + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the siren.""" + await self._async_set_siren(False) + + @callback + def _handle_coordinator_update(self) -> None: + """Call update method.""" + if not self.entity_description.is_on_fn: + return + self._device = cast( + RingDeviceT, + self._get_coordinator_data().get_device(self._device.device_api_id), + ) + self._attr_is_on = self.entity_description.is_on_fn(self._device) + super()._handle_coordinator_update() diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index ed0319b7a4b..8170ec8e161 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -20,6 +20,13 @@ "data": { "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure": { + "title": "Reconfigure Ring Integration", + "description": "Will create a new Authorized Device for {username} at ring.com", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { @@ -27,14 +34,26 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { "binary_sensor": { "ding": { "name": "Ding" + }, + "motion": { + "name": "Motion" + } + }, + "event": { + "ding": { + "name": "Ding" + }, + "intercom_unlock": { + "name": "Intercom unlock" } }, "button": { @@ -47,6 +66,20 @@ "name": "[%key:component::light::title%]" } }, + "number": { + "volume": { + "name": "Volume" + }, + "doorbell_volume": { + "name": "Doorbell volume" + }, + "mic_volume": { + "name": "Mic volume" + }, + "voice_volume": { + "name": "Voice volume" + } + }, "siren": { "siren": { "name": "[%key:component::siren::title%]" @@ -84,26 +117,27 @@ "switch": { "siren": { "name": "[%key:component::siren::title%]" + }, + "in_home_chime": { + "name": "In-home chime" + }, + "motion_detection": { + "name": "Motion detection" + } + }, + "camera": { + "live_view": { + "name": "Live view" + }, + "last_recording": { + "name": "Last recording" } - } - }, - "services": { - "update": { - "name": "Update", - "description": "Updates the data we have for all your ring devices." } }, "issues": { - "deprecated_service_ring_update": { - "title": "Detected use of deprecated action `ring.update`", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::ring::issues::deprecated_service_ring_update::title%]", - "description": "Use `homeassistant.update_entity` instead which will update all ring entities.\n\nPlease replace uses of this action and adjust your automations and scripts and select **submit** to close this issue." - } - } - } + "deprecated_entity": { + "title": "Detected deprecated {platform} entity usage", + "description": "We detected that entity `{entity}` is being used in `{info}`\n\nWe have created a new `{new_platform}` entity and you should migrate `{info}` to use this new entity.\n\nWhen you are done migrating `{info}` and are ready to have the deprecated `{entity}` entity removed, disable the entity and restart Home Assistant." } } } diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index 810011d68c8..0ac31fec209 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -1,98 +1,150 @@ """Component providing HA switch support for Ring Door Bell/Chimes.""" -from datetime import timedelta +from collections.abc import Callable, Coroutine, Sequence +from dataclasses import dataclass import logging -from typing import Any +from typing import Any, Generic, Self, cast -from ring_doorbell import RingStickUpCam +from ring_doorbell import RingCapability, RingDoorBell, RingStickUpCam +from ring_doorbell.const import DOORBELL_EXISTING_TYPE -from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator -from .entity import RingEntity, exception_wrap +from .entity import ( + DeprecatedInfo, + RingDeviceT, + RingEntity, + RingEntityDescription, + async_check_create_deprecated, + refresh_after, +) _LOGGER = logging.getLogger(__name__) +IN_HOME_CHIME_IS_PRESENT = {v for k, v in DOORBELL_EXISTING_TYPE.items() if k != 2} -# It takes a few seconds for the API to correctly return an update indicating -# that the changes have been made. Once we request a change (i.e. a light -# being turned on) we simply wait for this time delta before we allow -# updates to take place. -SKIP_UPDATES_DELAY = timedelta(seconds=5) +@dataclass(frozen=True, kw_only=True) +class RingSwitchEntityDescription( + SwitchEntityDescription, RingEntityDescription, Generic[RingDeviceT] +): + """Describes a Ring switch entity.""" + + exists_fn: Callable[[RingDeviceT], bool] + unique_id_fn: Callable[[Self, RingDeviceT], str] = ( + lambda self, device: f"{device.device_api_id}-{self.key}" + ) + is_on_fn: Callable[[RingDeviceT], bool] + turn_on_fn: Callable[[RingDeviceT], Coroutine[Any, Any, None]] + turn_off_fn: Callable[[RingDeviceT], Coroutine[Any, Any, None]] + + +SWITCHES: Sequence[RingSwitchEntityDescription[Any]] = ( + RingSwitchEntityDescription[RingStickUpCam]( + key="siren", + translation_key="siren", + exists_fn=lambda device: device.has_capability(RingCapability.SIREN), + is_on_fn=lambda device: device.siren > 0, + turn_on_fn=lambda device: device.async_set_siren(1), + turn_off_fn=lambda device: device.async_set_siren(0), + deprecated_info=DeprecatedInfo( + new_platform=Platform.SIREN, breaks_in_ha_version="2025.4.0" + ), + ), + RingSwitchEntityDescription[RingDoorBell]( + key="in_home_chime", + translation_key="in_home_chime", + exists_fn=lambda device: device.family == "doorbots" + and device.existing_doorbell_type in IN_HOME_CHIME_IS_PRESENT, + is_on_fn=lambda device: device.existing_doorbell_type_enabled or False, + turn_on_fn=lambda device: device.async_set_existing_doorbell_type_enabled(True), + turn_off_fn=lambda device: device.async_set_existing_doorbell_type_enabled( + False + ), + ), + RingSwitchEntityDescription[RingDoorBell]( + key="motion_detection", + translation_key="motion_detection", + exists_fn=lambda device: device.has_capability(RingCapability.MOTION_DETECTION), + is_on_fn=lambda device: device.motion_detection, + turn_on_fn=lambda device: device.async_set_motion_detection(True), + turn_off_fn=lambda device: device.async_set_motion_detection(False), + ), +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the switches for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( - SirenSwitch(device, devices_coordinator) - for device in ring_data.devices.stickup_cams - if device.has_capability("siren") + RingSwitch(device, devices_coordinator, description) + for description in SWITCHES + for device in ring_data.devices.all_devices + if description.exists_fn(device) + and async_check_create_deprecated( + hass, + Platform.SWITCH, + description.unique_id_fn(description, device), + description, + ) ) -class BaseRingSwitch(RingEntity[RingStickUpCam], SwitchEntity): +class RingSwitch(RingEntity[RingDeviceT], SwitchEntity): """Represents a switch for controlling an aspect of a ring device.""" + entity_description: RingSwitchEntityDescription[RingDeviceT] + def __init__( - self, device: RingStickUpCam, coordinator: RingDataCoordinator, device_type: str + self, + device: RingDeviceT, + coordinator: RingDataCoordinator, + description: RingSwitchEntityDescription[RingDeviceT], ) -> None: """Initialize the switch.""" super().__init__(device, coordinator) - self._device_type = device_type - self._attr_unique_id = f"{self._device.id}-{self._device_type}" - - -class SirenSwitch(BaseRingSwitch): - """Creates a switch to turn the ring cameras siren on and off.""" - - _attr_translation_key = "siren" - - def __init__( - self, device: RingStickUpCam, coordinator: RingDataCoordinator - ) -> None: - """Initialize the switch for a device with a siren.""" - super().__init__(device, coordinator, "siren") + self.entity_description = description self._no_updates_until = dt_util.utcnow() - self._attr_is_on = device.siren > 0 + self._attr_unique_id = description.unique_id_fn(description, device) + self._attr_is_on = description.is_on_fn(device) @callback def _handle_coordinator_update(self) -> None: """Call update method.""" - if self._no_updates_until > dt_util.utcnow(): - return - device = self._get_coordinator_data().get_stickup_cam( - self._device.device_api_id + self._device = cast( + RingDeviceT, + self._get_coordinator_data().get_device(self._device.device_api_id), ) - self._attr_is_on = device.siren > 0 + self._attr_is_on = self.entity_description.is_on_fn(self._device) super()._handle_coordinator_update() - @exception_wrap - async def _async_set_switch(self, new_state: int) -> None: + @refresh_after + async def _async_set_switch(self, switch_on: bool) -> None: """Update switch state, and causes Home Assistant to correctly update.""" - await self._device.async_set_siren(new_state) + if switch_on: + await self.entity_description.turn_on_fn(self._device) + else: + await self.entity_description.turn_off_fn(self._device) - self._attr_is_on = new_state > 0 - self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.async_schedule_update_ha_state() + self._attr_is_on = switch_on + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the siren on for 30 seconds.""" - await self._async_set_switch(1) + await self._async_set_switch(True) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the siren off.""" - await self._async_set_switch(0) + await self._async_set_switch(False) diff --git a/homeassistant/components/ripple/manifest.json b/homeassistant/components/ripple/manifest.json index 72df64ac850..17ff6b34f38 100644 --- a/homeassistant/components/ripple/manifest.json +++ b/homeassistant/components/ripple/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ripple", "iot_class": "cloud_polling", "loggers": ["pyripple"], + "quality_scale": "legacy", "requirements": ["python-ripple-api==0.0.3"] } diff --git a/homeassistant/components/risco/alarm_control_panel.py b/homeassistant/components/risco/alarm_control_panel.py index 08dee936d37..b1eae8fd917 100644 --- a/homeassistant/components/risco/alarm_control_panel.py +++ b/homeassistant/components/risco/alarm_control_panel.py @@ -12,19 +12,11 @@ from pyrisco.local.partition import Partition as LocalPartition from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_PIN, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -48,10 +40,10 @@ from .entity import RiscoCloudEntity _LOGGER = logging.getLogger(__name__) STATES_TO_SUPPORTED_FEATURES = { - STATE_ALARM_ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, - STATE_ALARM_ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, + AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, } @@ -116,14 +108,14 @@ class RiscoAlarm(AlarmControlPanelEntity): self._attr_supported_features |= STATES_TO_SUPPORTED_FEATURES[state] @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self._partition.triggered: - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED if self._partition.arming: - return STATE_ALARM_ARMING + return AlarmControlPanelState.ARMING if self._partition.disarmed: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED if self._partition.armed: return self._risco_to_ha[RISCO_ARM] if self._partition.partially_armed: @@ -148,21 +140,21 @@ class RiscoAlarm(AlarmControlPanelEntity): async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - await self._arm(STATE_ALARM_ARMED_HOME, code) + await self._arm(AlarmControlPanelState.ARMED_HOME, code) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - await self._arm(STATE_ALARM_ARMED_AWAY, code) + await self._arm(AlarmControlPanelState.ARMED_AWAY, code) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - await self._arm(STATE_ALARM_ARMED_NIGHT, code) + await self._arm(AlarmControlPanelState.ARMED_NIGHT, code) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Send arm custom bypass command.""" - await self._arm(STATE_ALARM_ARMED_CUSTOM_BYPASS, code) + await self._arm(AlarmControlPanelState.ARMED_CUSTOM_BYPASS, code) - async def _arm(self, mode: str, code: str | None) -> None: + async def _arm(self, mode: AlarmControlPanelState, code: str | None) -> None: if self.code_arm_required and not self._validate_code(code): _LOGGER.warning("Wrong code entered for %s", mode) return diff --git a/homeassistant/components/risco/config_flow.py b/homeassistant/components/risco/config_flow.py index 735880df09b..f7365d35414 100644 --- a/homeassistant/components/risco/config_flow.py +++ b/homeassistant/components/risco/config_flow.py @@ -9,6 +9,7 @@ from typing import Any from pyrisco import CannotConnectError, RiscoCloud, RiscoLocal, UnauthorizedError import voluptuous as vol +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -23,10 +24,6 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_TYPE, CONF_USERNAME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -64,10 +61,10 @@ LOCAL_SCHEMA = vol.Schema( } ) HA_STATES = [ - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_AWAY.value, + AlarmControlPanelState.ARMED_HOME.value, + AlarmControlPanelState.ARMED_NIGHT.value, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS.value, ] @@ -223,7 +220,6 @@ class RiscoOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize.""" - self.config_entry = config_entry self._data = {**DEFAULT_OPTIONS, **config_entry.options} def _options_schema(self) -> vol.Schema: diff --git a/homeassistant/components/risco/const.py b/homeassistant/components/risco/const.py index f1240a704de..078e26c43b5 100644 --- a/homeassistant/components/risco/const.py +++ b/homeassistant/components/risco/const.py @@ -1,10 +1,7 @@ """Constants for the Risco integration.""" -from homeassistant.const import ( - CONF_SCAN_INTERVAL, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState +from homeassistant.const import CONF_SCAN_INTERVAL DOMAIN = "risco" @@ -33,16 +30,18 @@ RISCO_ARM = "arm" RISCO_PARTIAL_ARM = "partial_arm" RISCO_STATES = [RISCO_ARM, RISCO_PARTIAL_ARM, *RISCO_GROUPS] -DEFAULT_RISCO_GROUPS_TO_HA = {group: STATE_ALARM_ARMED_HOME for group in RISCO_GROUPS} +DEFAULT_RISCO_GROUPS_TO_HA = { + group: AlarmControlPanelState.ARMED_HOME for group in RISCO_GROUPS +} DEFAULT_RISCO_STATES_TO_HA = { - RISCO_ARM: STATE_ALARM_ARMED_AWAY, - RISCO_PARTIAL_ARM: STATE_ALARM_ARMED_HOME, + RISCO_ARM: AlarmControlPanelState.ARMED_AWAY, + RISCO_PARTIAL_ARM: AlarmControlPanelState.ARMED_HOME, **DEFAULT_RISCO_GROUPS_TO_HA, } DEFAULT_HA_STATES_TO_RISCO = { - STATE_ALARM_ARMED_AWAY: RISCO_ARM, - STATE_ALARM_ARMED_HOME: RISCO_PARTIAL_ARM, + AlarmControlPanelState.ARMED_AWAY: RISCO_ARM, + AlarmControlPanelState.ARMED_HOME: RISCO_PARTIAL_ARM, } DEFAULT_OPTIONS = { diff --git a/homeassistant/components/risco/manifest.json b/homeassistant/components/risco/manifest.json index 372d8e0c629..149b8761589 100644 --- a/homeassistant/components/risco/manifest.json +++ b/homeassistant/components/risco/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/risco", "iot_class": "local_push", "loggers": ["pyrisco"], - "quality_scale": "platinum", - "requirements": ["pyrisco==0.6.4"] + "requirements": ["pyrisco==0.6.5"] } diff --git a/homeassistant/components/risco/strings.json b/homeassistant/components/risco/strings.json index e35b13394cb..86d131b4f80 100644 --- a/homeassistant/components/risco/strings.json +++ b/homeassistant/components/risco/strings.json @@ -28,7 +28,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { diff --git a/homeassistant/components/rituals_perfume_genie/__init__.py b/homeassistant/components/rituals_perfume_genie/__init__.py index 792a470ca3c..d0d16ba6324 100644 --- a/homeassistant/components/rituals_perfume_genie/__init__.py +++ b/homeassistant/components/rituals_perfume_genie/__init__.py @@ -12,7 +12,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ACCOUNT_HASH, DOMAIN +from .const import ACCOUNT_HASH, DOMAIN, UPDATE_INTERVAL from .coordinator import RitualsDataUpdateCoordinator PLATFORMS = [ @@ -37,9 +37,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Migrate old unique_ids to the new format async_migrate_entities_unique_ids(hass, entry, account_devices) + # The API provided by Rituals is currently rate limited to 30 requests + # per hour per IP address. To avoid hitting this limit, we will adjust + # the polling interval based on the number of diffusers one has. + update_interval = UPDATE_INTERVAL * len(account_devices) + # Create a coordinator for each diffuser coordinators = { - diffuser.hublot: RitualsDataUpdateCoordinator(hass, diffuser) + diffuser.hublot: RitualsDataUpdateCoordinator(hass, diffuser, update_interval) for diffuser in account_devices } diff --git a/homeassistant/components/rituals_perfume_genie/config_flow.py b/homeassistant/components/rituals_perfume_genie/config_flow.py index 4f108d9bc22..f6736ab78e4 100644 --- a/homeassistant/components/rituals_perfume_genie/config_flow.py +++ b/homeassistant/components/rituals_perfume_genie/config_flow.py @@ -45,6 +45,7 @@ class RitualsPerfumeGenieConfigFlow(ConfigFlow, domain=DOMAIN): try: await account.authenticate() except ClientResponseError: + _LOGGER.exception("Unexpected response") errors["base"] = "cannot_connect" except AuthenticationException: errors["base"] = "invalid_auth" diff --git a/homeassistant/components/rituals_perfume_genie/const.py b/homeassistant/components/rituals_perfume_genie/const.py index 35d1c32d306..45428ced9d2 100644 --- a/homeassistant/components/rituals_perfume_genie/const.py +++ b/homeassistant/components/rituals_perfume_genie/const.py @@ -6,4 +6,8 @@ DOMAIN = "rituals_perfume_genie" ACCOUNT_HASH = "account_hash" -UPDATE_INTERVAL = timedelta(minutes=2) +# The API provided by Rituals is currently rate limited to 30 requests +# per hour per IP address. To avoid hitting this limit, the polling +# interval is set to 3 minutes. This also gives a little room for +# Home Assistant restarts. +UPDATE_INTERVAL = timedelta(minutes=3) diff --git a/homeassistant/components/rituals_perfume_genie/coordinator.py b/homeassistant/components/rituals_perfume_genie/coordinator.py index 4c86f110b17..a83e823bd4e 100644 --- a/homeassistant/components/rituals_perfume_genie/coordinator.py +++ b/homeassistant/components/rituals_perfume_genie/coordinator.py @@ -1,5 +1,6 @@ """The Rituals Perfume Genie data update coordinator.""" +from datetime import timedelta import logging from pyrituals import Diffuser @@ -7,7 +8,7 @@ from pyrituals import Diffuser from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN, UPDATE_INTERVAL +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -15,14 +16,19 @@ _LOGGER = logging.getLogger(__name__) class RitualsDataUpdateCoordinator(DataUpdateCoordinator[None]): """Class to manage fetching Rituals Perfume Genie device data from single endpoint.""" - def __init__(self, hass: HomeAssistant, diffuser: Diffuser) -> None: + def __init__( + self, + hass: HomeAssistant, + diffuser: Diffuser, + update_interval: timedelta, + ) -> None: """Initialize global Rituals Perfume Genie data updater.""" self.diffuser = diffuser super().__init__( hass, _LOGGER, name=f"{DOMAIN}-{diffuser.hublot}", - update_interval=UPDATE_INTERVAL, + update_interval=update_interval, ) async def _async_update_data(self) -> None: diff --git a/homeassistant/components/rituals_perfume_genie/manifest.json b/homeassistant/components/rituals_perfume_genie/manifest.json index 996dd1faecf..114491d9122 100644 --- a/homeassistant/components/rituals_perfume_genie/manifest.json +++ b/homeassistant/components/rituals_perfume_genie/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie", "iot_class": "cloud_polling", "loggers": ["pyrituals"], - "quality_scale": "silver", "requirements": ["pyrituals==0.0.6"] } diff --git a/homeassistant/components/rituals_perfume_genie/select.py b/homeassistant/components/rituals_perfume_genie/select.py index e93d6ae03ef..27aff70649b 100644 --- a/homeassistant/components/rituals_perfume_genie/select.py +++ b/homeassistant/components/rituals_perfume_genie/select.py @@ -9,7 +9,7 @@ from pyrituals import Diffuser from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry -from homeassistant.const import AREA_SQUARE_METERS, EntityCategory +from homeassistant.const import EntityCategory, UnitOfArea from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,7 +30,7 @@ ENTITY_DESCRIPTIONS = ( RitualsSelectEntityDescription( key="room_size_square_meter", translation_key="room_size_square_meter", - unit_of_measurement=AREA_SQUARE_METERS, + unit_of_measurement=UnitOfArea.SQUARE_METERS, entity_category=EntityCategory.CONFIG, options=["15", "30", "60", "100"], current_fn=lambda diffuser: str(diffuser.room_size_square_meter), diff --git a/homeassistant/components/rmvtransport/manifest.json b/homeassistant/components/rmvtransport/manifest.json index 81b650bcdc0..30be5417ff6 100644 --- a/homeassistant/components/rmvtransport/manifest.json +++ b/homeassistant/components/rmvtransport/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rmvtransport", "iot_class": "cloud_polling", "loggers": ["RMVtransport"], + "quality_scale": "legacy", "requirements": ["PyRMVtransport==0.3.3"] } diff --git a/homeassistant/components/rmvtransport/sensor.py b/homeassistant/components/rmvtransport/sensor.py index e8b976129c5..8fd437e7e1d 100644 --- a/homeassistant/components/rmvtransport/sensor.py +++ b/homeassistant/components/rmvtransport/sensor.py @@ -271,7 +271,7 @@ class RMVDepartureData: if not dest_found: continue - elif ( + if ( self._lines and journey["number"] not in self._lines or journey["minutes"] < self._time_offset @@ -289,6 +289,6 @@ class RMVDepartureData: if not self._error_notification and _deps_not_found: self._error_notification = True - _LOGGER.info("Destination(s) %s not found", ", ".join(_deps_not_found)) + _LOGGER.warning("Destination(s) %s not found", ", ".join(_deps_not_found)) self.departures = _deps diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index 88a603eca2b..d02dddece42 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -47,7 +47,6 @@ class RoborockCoordinators: async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> bool: """Set up roborock from a config entry.""" - _LOGGER.debug("Integration async setup entry: %s", entry.as_dict()) entry.async_on_unload(entry.add_update_listener(update_listener)) user_data = UserData.from_dict(entry.data[CONF_USER_DATA]) @@ -151,7 +150,7 @@ async def setup_device( ) if device.pv == "A01": return await setup_device_a01(hass, user_data, device, product_info) - _LOGGER.info( + _LOGGER.warning( "Not adding device %s because its protocol version %s or category %s is not supported", device.duid, device.pv, @@ -169,7 +168,7 @@ async def setup_device_v1( ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" mqtt_client = await hass.async_add_executor_job( - RoborockMqttClientV1, user_data, DeviceData(device, product_info.name) + RoborockMqttClientV1, user_data, DeviceData(device, product_info.model) ) try: networking = await mqtt_client.get_networking() diff --git a/homeassistant/components/roborock/binary_sensor.py b/homeassistant/components/roborock/binary_sensor.py index fb35a50c210..b88556ea857 100644 --- a/homeassistant/components/roborock/binary_sensor.py +++ b/homeassistant/components/roborock/binary_sensor.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RoborockConfigEntry from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockCoordinatedEntityV1 +from .entity import RoborockCoordinatedEntityV1 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/roborock/button.py b/homeassistant/components/roborock/button.py index 31421320c41..2f214c7c51c 100644 --- a/homeassistant/components/roborock/button.py +++ b/homeassistant/components/roborock/button.py @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RoborockConfigEntry from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockEntityV1 +from .entity import RoborockEntityV1 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index c6dee7ce4ed..200614b024e 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +from copy import deepcopy import logging from typing import Any @@ -19,11 +20,11 @@ from roborock.web_api import RoborockApiClient import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_USERNAME from homeassistant.core import callback @@ -44,7 +45,6 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Roborock.""" VERSION = 1 - reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -116,11 +116,12 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self.reauth_entry is not None: + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() self.hass.config_entries.async_update_entry( - self.reauth_entry, + reauth_entry, data={ - **self.reauth_entry.data, + **reauth_entry.data, CONF_USER_DATA: login_data.as_dict(), }, ) @@ -140,9 +141,6 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): self._username = entry_data[CONF_USERNAME] assert self._username self._client = RoborockApiClient(self._username) - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -173,14 +171,18 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> RoborockOptionsFlowHandler: """Create the options flow.""" return RoborockOptionsFlowHandler(config_entry) -class RoborockOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RoborockOptionsFlowHandler(OptionsFlow): """Handle an option flow for Roborock.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.options = deepcopy(dict(config_entry.options)) + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index 615d18c3019..fe592074f71 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -2,11 +2,10 @@ from __future__ import annotations -import asyncio from datetime import timedelta -from functools import cached_property import logging +from propcache import cached_property from roborock import HomeDataRoom from roborock.code_mappings import RoborockCategory from roborock.containers import DeviceData, HomeDataDevice, HomeDataProduct, NetworkInfo @@ -63,6 +62,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): identifiers={(DOMAIN, self.roborock_device_info.device.duid)}, manufacturer="Roborock", model=self.roborock_device_info.product.model, + model_id=self.roborock_device_info.product.model, sw_version=self.roborock_device_info.device.fv, ) self.current_map: int | None = None @@ -106,8 +106,12 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): async def _async_update_data(self) -> DeviceProp: """Update data via library.""" try: - await asyncio.gather(*(self._update_device_prop(), self.get_rooms())) + # Update device props and standard api information + await self._update_device_prop() + # Set the new map id from the updated device props self._set_current_map() + # Get the rooms for that map id. + await self.get_rooms() except RoborockException as ex: raise UpdateFailed(ex) from ex return self.roborock_device_info.props diff --git a/homeassistant/components/roborock/diagnostics.py b/homeassistant/components/roborock/diagnostics.py index 63de0da6a7f..e784e4ce837 100644 --- a/homeassistant/components/roborock/diagnostics.py +++ b/homeassistant/components/roborock/diagnostics.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.const import CONF_UNIQUE_ID from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/roborock/device.py b/homeassistant/components/roborock/entity.py similarity index 100% rename from homeassistant/components/roborock/device.py rename to homeassistant/components/roborock/entity.py diff --git a/homeassistant/components/roborock/icons.json b/homeassistant/components/roborock/icons.json index 6a615ab82a1..c7df6d35460 100644 --- a/homeassistant/components/roborock/icons.json +++ b/homeassistant/components/roborock/icons.json @@ -119,6 +119,8 @@ } }, "services": { - "get_maps": "mdi:floor-plan" + "get_maps": { + "service": "mdi:floor-plan" + } } } diff --git a/homeassistant/components/roborock/image.py b/homeassistant/components/roborock/image.py index 4ead7e9635d..ee48656290f 100644 --- a/homeassistant/components/roborock/image.py +++ b/homeassistant/components/roborock/image.py @@ -23,7 +23,7 @@ import homeassistant.util.dt as dt_util from . import RoborockConfigEntry from .const import DEFAULT_DRAWABLES, DOMAIN, DRAWABLES, IMAGE_CACHE_INTERVAL, MAP_SLEEP from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockCoordinatedEntityV1 +from .entity import RoborockCoordinatedEntityV1 async def async_setup_entry( diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 3bb3b9b2046..c305e4710fc 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.6.0", + "python-roborock==2.7.2", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/homeassistant/components/roborock/number.py b/homeassistant/components/roborock/number.py index 92552ca85d8..7f568ae824b 100644 --- a/homeassistant/components/roborock/number.py +++ b/homeassistant/components/roborock/number.py @@ -13,11 +13,12 @@ from roborock.version_1_apis.roborock_client_v1 import AttributeCache from homeassistant.components.number import NumberEntity, NumberEntityDescription from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RoborockConfigEntry +from . import DOMAIN, RoborockConfigEntry from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockEntityV1 +from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) @@ -107,6 +108,12 @@ class RoborockNumberEntity(RoborockEntityV1, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Set number value.""" - await self.entity_description.update_value( - self.get_cache(self.entity_description.cache_key), value - ) + try: + await self.entity_description.update_value( + self.get_cache(self.entity_description.cache_key), value + ) + except RoborockException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="update_options_failed", + ) from err diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index f047ec475c2..73cb95d2d7c 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -1,5 +1,6 @@ """Support for Roborock select.""" +import asyncio from collections.abc import Callable from dataclasses import dataclass @@ -13,8 +14,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RoborockConfigEntry +from .const import MAP_SLEEP from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockCoordinatedEntityV1 +from .entity import RoborockCoordinatedEntityV1 @dataclass(frozen=True, kw_only=True) @@ -133,6 +135,12 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): RoborockCommand.LOAD_MULTI_MAP, [map_id], ) + # Update the current map id manually so that nothing gets broken + # if another service hits the api. + self.coordinator.current_map = map_id + # We need to wait after updating the map + # so that other commands will be executed correctly. + await asyncio.sleep(MAP_SLEEP) break @property @@ -143,6 +151,9 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): @property def current_option(self) -> str | None: """Get the current status of the select entity from device_status.""" - if current_map := self.coordinator.current_map: + if ( + (current_map := self.coordinator.current_map) is not None + and current_map in self.coordinator.maps + ): # 63 means it is searching for a map. return self.coordinator.maps[current_map].name return None diff --git a/homeassistant/components/roborock/sensor.py b/homeassistant/components/roborock/sensor.py index b247dc6936d..47849ed5cc5 100644 --- a/homeassistant/components/roborock/sensor.py +++ b/homeassistant/components/roborock/sensor.py @@ -25,19 +25,14 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.const import ( - AREA_SQUARE_METERS, - PERCENTAGE, - EntityCategory, - UnitOfTime, -) +from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfArea, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import RoborockConfigEntry from .coordinator import RoborockDataUpdateCoordinator, RoborockDataUpdateCoordinatorA01 -from .device import RoborockCoordinatedEntityA01, RoborockCoordinatedEntityV1 +from .entity import RoborockCoordinatedEntityA01, RoborockCoordinatedEntityV1 @dataclass(frozen=True, kw_only=True) @@ -131,14 +126,14 @@ SENSOR_DESCRIPTIONS = [ translation_key="cleaning_area", value_fn=lambda data: data.status.square_meter_clean_area, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, ), RoborockSensorDescription( key="total_cleaning_area", translation_key="total_cleaning_area", value_fn=lambda data: data.clean_summary.square_meter_clean_area, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, ), RoborockSensorDescription( key="vacuum_error", diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index d1fc50f27e8..8ff82cae393 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -419,6 +419,9 @@ }, "no_coordinators": { "message": "No devices were able to successfully setup" + }, + "update_options_failed": { + "message": "Failed to update Roborock options" } }, "services": { diff --git a/homeassistant/components/roborock/switch.py b/homeassistant/components/roborock/switch.py index ef46fe61415..b0c8c880188 100644 --- a/homeassistant/components/roborock/switch.py +++ b/homeassistant/components/roborock/switch.py @@ -9,16 +9,18 @@ import logging from typing import Any from roborock.command_cache import CacheableAttribute +from roborock.exceptions import RoborockException from roborock.version_1_apis.roborock_client_v1 import AttributeCache from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RoborockConfigEntry +from . import DOMAIN, RoborockConfigEntry from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockEntityV1 +from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) @@ -149,15 +151,27 @@ class RoborockSwitch(RoborockEntityV1, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" - await self.entity_description.update_value( - self.get_cache(self.entity_description.cache_key), False - ) + try: + await self.entity_description.update_value( + self.get_cache(self.entity_description.cache_key), False + ) + except RoborockException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="update_options_failed", + ) from err async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the switch.""" - await self.entity_description.update_value( - self.get_cache(self.entity_description.cache_key), True - ) + try: + await self.entity_description.update_value( + self.get_cache(self.entity_description.cache_key), True + ) + except RoborockException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="update_options_failed", + ) from err @property def is_on(self) -> bool | None: diff --git a/homeassistant/components/roborock/time.py b/homeassistant/components/roborock/time.py index 1136170192d..1dd681dff1f 100644 --- a/homeassistant/components/roborock/time.py +++ b/homeassistant/components/roborock/time.py @@ -15,11 +15,12 @@ from roborock.version_1_apis.roborock_client_v1 import AttributeCache from homeassistant.components.time import TimeEntity, TimeEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RoborockConfigEntry +from . import DOMAIN, RoborockConfigEntry from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockEntityV1 +from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) @@ -172,6 +173,12 @@ class RoborockTimeEntity(RoborockEntityV1, TimeEntity): async def async_set_value(self, value: time) -> None: """Set the time.""" - await self.entity_description.update_value( - self.get_cache(self.entity_description.cache_key), value - ) + try: + await self.entity_description.update_value( + self.get_cache(self.entity_description.cache_key), value + ) + except RoborockException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="update_options_failed", + ) from err diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index 81a10e26415..d3413bd7cbd 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -8,13 +8,8 @@ from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse @@ -24,32 +19,32 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RoborockConfigEntry from .const import DOMAIN, GET_MAPS_SERVICE_NAME from .coordinator import RoborockDataUpdateCoordinator -from .device import RoborockCoordinatedEntityV1 +from .entity import RoborockCoordinatedEntityV1 STATE_CODE_TO_STATE = { - RoborockStateCode.starting: STATE_IDLE, # "Starting" - RoborockStateCode.charger_disconnected: STATE_IDLE, # "Charger disconnected" - RoborockStateCode.idle: STATE_IDLE, # "Idle" - RoborockStateCode.remote_control_active: STATE_CLEANING, # "Remote control active" - RoborockStateCode.cleaning: STATE_CLEANING, # "Cleaning" - RoborockStateCode.returning_home: STATE_RETURNING, # "Returning home" - RoborockStateCode.manual_mode: STATE_CLEANING, # "Manual mode" - RoborockStateCode.charging: STATE_DOCKED, # "Charging" - RoborockStateCode.charging_problem: STATE_ERROR, # "Charging problem" - RoborockStateCode.paused: STATE_PAUSED, # "Paused" - RoborockStateCode.spot_cleaning: STATE_CLEANING, # "Spot cleaning" - RoborockStateCode.error: STATE_ERROR, # "Error" - RoborockStateCode.shutting_down: STATE_IDLE, # "Shutting down" - RoborockStateCode.updating: STATE_DOCKED, # "Updating" - RoborockStateCode.docking: STATE_RETURNING, # "Docking" - RoborockStateCode.going_to_target: STATE_CLEANING, # "Going to target" - RoborockStateCode.zoned_cleaning: STATE_CLEANING, # "Zoned cleaning" - RoborockStateCode.segment_cleaning: STATE_CLEANING, # "Segment cleaning" - RoborockStateCode.emptying_the_bin: STATE_DOCKED, # "Emptying the bin" on s7+ - RoborockStateCode.washing_the_mop: STATE_DOCKED, # "Washing the mop" on s7maxV - RoborockStateCode.going_to_wash_the_mop: STATE_RETURNING, # "Going to wash the mop" on s7maxV - RoborockStateCode.charging_complete: STATE_DOCKED, # "Charging complete" - RoborockStateCode.device_offline: STATE_ERROR, # "Device offline" + RoborockStateCode.starting: VacuumActivity.IDLE, # "Starting" + RoborockStateCode.charger_disconnected: VacuumActivity.IDLE, # "Charger disconnected" + RoborockStateCode.idle: VacuumActivity.IDLE, # "Idle" + RoborockStateCode.remote_control_active: VacuumActivity.CLEANING, # "Remote control active" + RoborockStateCode.cleaning: VacuumActivity.CLEANING, # "Cleaning" + RoborockStateCode.returning_home: VacuumActivity.RETURNING, # "Returning home" + RoborockStateCode.manual_mode: VacuumActivity.CLEANING, # "Manual mode" + RoborockStateCode.charging: VacuumActivity.DOCKED, # "Charging" + RoborockStateCode.charging_problem: VacuumActivity.ERROR, # "Charging problem" + RoborockStateCode.paused: VacuumActivity.PAUSED, # "Paused" + RoborockStateCode.spot_cleaning: VacuumActivity.CLEANING, # "Spot cleaning" + RoborockStateCode.error: VacuumActivity.ERROR, # "Error" + RoborockStateCode.shutting_down: VacuumActivity.IDLE, # "Shutting down" + RoborockStateCode.updating: VacuumActivity.DOCKED, # "Updating" + RoborockStateCode.docking: VacuumActivity.RETURNING, # "Docking" + RoborockStateCode.going_to_target: VacuumActivity.CLEANING, # "Going to target" + RoborockStateCode.zoned_cleaning: VacuumActivity.CLEANING, # "Zoned cleaning" + RoborockStateCode.segment_cleaning: VacuumActivity.CLEANING, # "Segment cleaning" + RoborockStateCode.emptying_the_bin: VacuumActivity.DOCKED, # "Emptying the bin" on s7+ + RoborockStateCode.washing_the_mop: VacuumActivity.DOCKED, # "Washing the mop" on s7maxV + RoborockStateCode.going_to_wash_the_mop: VacuumActivity.RETURNING, # "Going to wash the mop" on s7maxV + RoborockStateCode.charging_complete: VacuumActivity.DOCKED, # "Charging complete" + RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline" } @@ -112,7 +107,7 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity): self._attr_fan_speed_list = self._device_status.fan_power_options @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the status of the vacuum cleaner.""" assert self._device_status.state is not None return STATE_CODE_TO_STATE.get(self._device_status.state) diff --git a/homeassistant/components/rocketchat/manifest.json b/homeassistant/components/rocketchat/manifest.json index 50d7579df02..f4f72f02a10 100644 --- a/homeassistant/components/rocketchat/manifest.json +++ b/homeassistant/components/rocketchat/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rocketchat", "iot_class": "cloud_push", "loggers": ["rocketchat_API"], + "quality_scale": "legacy", "requirements": ["rocketchat-API==0.6.1"] } diff --git a/homeassistant/components/rocketchat/notify.py b/homeassistant/components/rocketchat/notify.py index e39fb2dc0a1..a06226d22ee 100644 --- a/homeassistant/components/rocketchat/notify.py +++ b/homeassistant/components/rocketchat/notify.py @@ -52,8 +52,10 @@ def get_service( except RocketConnectionException: _LOGGER.warning("Unable to connect to Rocket.Chat server at %s", url) except RocketAuthenticationException: - _LOGGER.warning("Rocket.Chat authentication failed for user %s", username) - _LOGGER.info("Please check your username/password") + _LOGGER.warning( + "Rocket.Chat authentication failed for user %s. Please check your username/password", + username, + ) return None diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index 7515f375054..e6b92d91335 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -6,7 +6,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID from .coordinator import RokuDataUpdateCoordinator PLATFORMS = [ @@ -17,26 +17,38 @@ PLATFORMS = [ Platform.SENSOR, ] +type RokuConfigEntry = ConfigEntry[RokuDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> bool: """Set up Roku from a config entry.""" if (device_id := entry.unique_id) is None: device_id = entry.entry_id coordinator = RokuDataUpdateCoordinator( - hass, host=entry.data[CONF_HOST], device_id=device_id + hass, + host=entry.data[CONF_HOST], + device_id=device_id, + play_media_app_id=entry.options.get( + CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID + ), ) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(async_reload_entry)) + return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_reload_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> None: + """Reload the config entry when it changed.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/roku/binary_sensor.py b/homeassistant/components/roku/binary_sensor.py index 0f5f29f63f6..2e7fd12788c 100644 --- a/homeassistant/components/roku/binary_sensor.py +++ b/homeassistant/components/roku/binary_sensor.py @@ -11,14 +11,16 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import RokuConfigEntry from .entity import RokuEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RokuBinarySensorEntityDescription(BinarySensorEntityDescription): @@ -56,15 +58,13 @@ BINARY_SENSORS: tuple[RokuBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Roku binary sensors based on a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( RokuBinarySensorEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in BINARY_SENSORS diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index 7757cc53e1c..bc0092d6953 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -10,12 +10,18 @@ from rokuecp import Roku, RokuError import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from . import RokuConfigEntry +from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -52,20 +58,38 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): self.discovery_info = {} @callback - def _show_form(self, errors: dict[str, Any] | None = None) -> ConfigFlowResult: + def _show_form( + self, + user_input: dict[str, Any] | None, + errors: dict[str, Any] | None = None, + ) -> ConfigFlowResult: """Show the form to the user.""" + suggested_values = user_input + if suggested_values is None and self.source == SOURCE_RECONFIGURE: + suggested_values = { + CONF_HOST: self._get_reconfigure_entry().data[CONF_HOST] + } + return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA, suggested_values + ), errors=errors or {}, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + return await self.async_step_user(user_input) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if not user_input: - return self._show_form() + return self._show_form(user_input) errors = {} @@ -74,13 +98,21 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): except RokuError: _LOGGER.debug("Roku Error", exc_info=True) errors["base"] = ERROR_CANNOT_CONNECT - return self._show_form(errors) + return self._show_form(user_input, errors) except Exception: _LOGGER.exception("Unknown error trying to connect") return self.async_abort(reason=ERROR_UNKNOWN) await self.async_set_unique_id(info["serial_number"]) - self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) + + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={CONF_HOST: user_input[CONF_HOST]}, + ) + + self._abort_if_unique_id_configured() return self.async_create_entry(title=info["title"], data=user_input) @@ -155,3 +187,36 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): title=self.discovery_info[CONF_NAME], data=self.discovery_info, ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: RokuConfigEntry, + ) -> RokuOptionsFlowHandler: + """Create the options flow.""" + return RokuOptionsFlowHandler() + + +class RokuOptionsFlowHandler(OptionsFlow): + """Handle Roku options.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage Roku options.""" + if user_input is not None: + return self.async_create_entry(title="", data=user_input) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Optional( + CONF_PLAY_MEDIA_APP_ID, + default=self.config_entry.options.get( + CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID + ), + ): str, + } + ), + ) diff --git a/homeassistant/components/roku/const.py b/homeassistant/components/roku/const.py index ab633a4044c..f0c7d4e2537 100644 --- a/homeassistant/components/roku/const.py +++ b/homeassistant/components/roku/const.py @@ -15,3 +15,9 @@ DEFAULT_PORT = 8060 # Services SERVICE_SEARCH = "search" + +# Config +CONF_PLAY_MEDIA_APP_ID = "play_media_app_id" + +# Defaults +DEFAULT_PLAY_MEDIA_APP_ID = "15985" diff --git a/homeassistant/components/roku/coordinator.py b/homeassistant/components/roku/coordinator.py index 303d0e91a36..7900669d02f 100644 --- a/homeassistant/components/roku/coordinator.py +++ b/homeassistant/components/roku/coordinator.py @@ -29,15 +29,12 @@ class RokuDataUpdateCoordinator(DataUpdateCoordinator[Device]): roku: Roku def __init__( - self, - hass: HomeAssistant, - *, - host: str, - device_id: str, + self, hass: HomeAssistant, *, host: str, device_id: str, play_media_app_id: str ) -> None: """Initialize global Roku data updater.""" self.device_id = device_id self.roku = Roku(host=host, session=async_get_clientsession(hass)) + self.play_media_app_id = play_media_app_id self.full_update_interval = timedelta(minutes=15) self.last_full_update = None diff --git a/homeassistant/components/roku/diagnostics.py b/homeassistant/components/roku/diagnostics.py index 6c6809ee33a..e98837ca442 100644 --- a/homeassistant/components/roku/diagnostics.py +++ b/homeassistant/components/roku/diagnostics.py @@ -4,25 +4,21 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, entry: RokuConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - return { "entry": { "data": { - **config_entry.data, + **entry.data, }, - "unique_id": config_entry.unique_id, + "unique_id": entry.unique_id, }, - "data": coordinator.data.as_dict(), + "data": entry.runtime_data.data.as_dict(), } diff --git a/homeassistant/components/roku/icons.json b/homeassistant/components/roku/icons.json index 02e5d1e5698..355b5a715e5 100644 --- a/homeassistant/components/roku/icons.json +++ b/homeassistant/components/roku/icons.json @@ -32,6 +32,8 @@ } }, "services": { - "search": "mdi:magnify" + "search": { + "service": "mdi:magnify" + } } } diff --git a/homeassistant/components/roku/manifest.json b/homeassistant/components/roku/manifest.json index fa9823de172..7fe2fb3b686 100644 --- a/homeassistant/components/roku/manifest.json +++ b/homeassistant/components/roku/manifest.json @@ -10,7 +10,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["rokuecp"], - "quality_scale": "silver", "requirements": ["rokuecp==0.19.3"], "ssdp": [ { diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index 5b15253068e..0c1f92521af 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -23,13 +23,13 @@ from homeassistant.components.media_player import ( async_process_play_media_url, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType +from . import RokuConfigEntry from .browse_media import async_browse_media from .const import ( ATTR_ARTIST_NAME, @@ -38,7 +38,6 @@ from .const import ( ATTR_KEYWORD, ATTR_MEDIA_TYPE, ATTR_THUMBNAIL, - DOMAIN, SERVICE_SEARCH, ) from .coordinator import RokuDataUpdateCoordinator @@ -47,7 +46,6 @@ from .helpers import format_channel_name, roku_exception_handler _LOGGER = logging.getLogger(__name__) - STREAM_FORMAT_TO_MEDIA_TYPE = { "dash": MediaType.VIDEO, "hls": MediaType.VIDEO, @@ -81,17 +79,17 @@ ATTRS_TO_PLAY_ON_ROKU_AUDIO_PARAMS = { SEARCH_SCHEMA: VolDictType = {vol.Required(ATTR_KEYWORD): str} +PARALLEL_UPDATES = 1 + async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the Roku config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( [ RokuMediaPlayer( - coordinator=coordinator, + coordinator=entry.runtime_data, ) ], True, @@ -445,17 +443,25 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity): if attr in extra } - params = {"t": "a", **params} + params = {"u": media_id, "t": "a", **params} - await self.coordinator.roku.play_on_roku(media_id, params) + await self.coordinator.roku.launch( + self.coordinator.play_media_app_id, + params, + ) elif media_type in {MediaType.URL, MediaType.VIDEO}: params = { param: extra[attr] for (attr, param) in ATTRS_TO_PLAY_ON_ROKU_PARAMS.items() if attr in extra } + params["u"] = media_id + params["t"] = "v" - await self.coordinator.roku.play_on_roku(media_id, params) + await self.coordinator.roku.launch( + self.coordinator.play_media_app_id, + params, + ) else: _LOGGER.error("Media type %s is not supported", original_media_type) return diff --git a/homeassistant/components/roku/remote.py b/homeassistant/components/roku/remote.py index fa351e021e8..f7916fb23a2 100644 --- a/homeassistant/components/roku/remote.py +++ b/homeassistant/components/roku/remote.py @@ -6,28 +6,26 @@ from collections.abc import Iterable from typing import Any from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity from .helpers import roku_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Load Roku remote based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( [ RokuRemote( - coordinator=coordinator, + coordinator=entry.runtime_data, ) ], True, diff --git a/homeassistant/components/roku/select.py b/homeassistant/components/roku/select.py index 5f3b9d4049b..360d4e25415 100644 --- a/homeassistant/components/roku/select.py +++ b/homeassistant/components/roku/select.py @@ -9,15 +9,15 @@ from rokuecp import Roku from rokuecp.models import Device as RokuDevice from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity from .helpers import format_channel_name, roku_exception_handler +PARALLEL_UPDATES = 1 + def _get_application_name(device: RokuDevice) -> str | None: if device.app is None or device.app.name is None: @@ -108,16 +108,15 @@ CHANNEL_ENTITY = RokuSelectEntityDescription( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Roku select based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - device: RokuDevice = coordinator.data + device: RokuDevice = entry.runtime_data.data entities: list[RokuSelectEntity] = [ RokuSelectEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in ENTITIES @@ -126,7 +125,7 @@ async def async_setup_entry( if len(device.channels) > 0: entities.append( RokuSelectEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=CHANNEL_ENTITY, ) ) diff --git a/homeassistant/components/roku/sensor.py b/homeassistant/components/roku/sensor.py index ed134cc4c2a..870386945a6 100644 --- a/homeassistant/components/roku/sensor.py +++ b/homeassistant/components/roku/sensor.py @@ -8,15 +8,16 @@ from dataclasses import dataclass from rokuecp.models import Device as RokuDevice from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RokuSensorEntityDescription(SensorEntityDescription): @@ -43,15 +44,13 @@ SENSORS: tuple[RokuSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Roku sensor based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( RokuSensorEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in SENSORS diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 9eef366163e..bd47585db1b 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -21,7 +21,21 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_device": "This Roku device does not match the existing device id. Please make sure you entered the correct host information." + } + }, + "options": { + "step": { + "init": { + "data": { + "play_media_app_id": "Play Media Roku Application ID" + }, + "data_description": { + "play_media_app_id": "The application ID to use when launching media playback. Must support the PlayOnRoku API." + } + } } }, "entity": { diff --git a/homeassistant/components/romy/sensor.py b/homeassistant/components/romy/sensor.py index bdd486c4f8f..341125b86ba 100644 --- a/homeassistant/components/romy/sensor.py +++ b/homeassistant/components/romy/sensor.py @@ -8,10 +8,10 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - AREA_SQUARE_METERS, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfArea, UnitOfLength, UnitOfTime, ) @@ -61,7 +61,7 @@ SENSORS: list[SensorEntityDescription] = [ key="total_area_cleaned", translation_key="total_area_cleaned", state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, entity_category=EntityCategory.DIAGNOSTIC, ), SensorEntityDescription( diff --git a/homeassistant/components/romy/vacuum.py b/homeassistant/components/romy/vacuum.py index de74d371f0e..49129daabbd 100644 --- a/homeassistant/components/romy/vacuum.py +++ b/homeassistant/components/romy/vacuum.py @@ -6,7 +6,11 @@ https://home-assistant.io/components/vacuum.romy/. from typing import Any -from homeassistant.components.vacuum import StateVacuumEntity, VacuumEntityFeature +from homeassistant.components.vacuum import ( + StateVacuumEntity, + VacuumActivity, + VacuumEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -75,7 +79,14 @@ class RomyVacuumEntity(RomyEntity, StateVacuumEntity): """Handle updated data from the coordinator.""" self._attr_fan_speed = FAN_SPEEDS[self.romy.fan_speed] self._attr_battery_level = self.romy.battery_level - self._attr_state = self.romy.status + if (status := self.romy.status) is None: + self._attr_activity = None + self.async_write_ha_state() + return + try: + self._attr_activity = VacuumActivity(status) + except ValueError: + self._attr_activity = None self.async_write_ha_state() diff --git a/homeassistant/components/roomba/binary_sensor.py b/homeassistant/components/roomba/binary_sensor.py index 40a5535d5af..baf66375036 100644 --- a/homeassistant/components/roomba/binary_sensor.py +++ b/homeassistant/components/roomba/binary_sensor.py @@ -7,7 +7,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import roomba_reported_state from .const import DOMAIN -from .irobot_base import IRobotEntity +from .entity import IRobotEntity from .models import RoombaData diff --git a/homeassistant/components/roomba/braava.py b/homeassistant/components/roomba/braava.py deleted file mode 100644 index 37411680d0b..00000000000 --- a/homeassistant/components/roomba/braava.py +++ /dev/null @@ -1,128 +0,0 @@ -"""Class for Braava devices.""" - -import logging - -from homeassistant.components.vacuum import VacuumEntityFeature - -from .irobot_base import SUPPORT_IROBOT, IRobotVacuum - -_LOGGER = logging.getLogger(__name__) - -ATTR_DETECTED_PAD = "detected_pad" -ATTR_LID_CLOSED = "lid_closed" -ATTR_TANK_PRESENT = "tank_present" -ATTR_TANK_LEVEL = "tank_level" -ATTR_PAD_WETNESS = "spray_amount" - -OVERLAP_STANDARD = 67 -OVERLAP_DEEP = 85 -OVERLAP_EXTENDED = 25 -MOP_STANDARD = "Standard" -MOP_DEEP = "Deep" -MOP_EXTENDED = "Extended" -BRAAVA_MOP_BEHAVIORS = [MOP_STANDARD, MOP_DEEP, MOP_EXTENDED] -BRAAVA_SPRAY_AMOUNT = [1, 2, 3] - -# Braava Jets can set mopping behavior through fanspeed -SUPPORT_BRAAVA = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED - - -class BraavaJet(IRobotVacuum): - """Braava Jet.""" - - _attr_supported_features = SUPPORT_BRAAVA - - def __init__(self, roomba, blid): - """Initialize the Roomba handler.""" - super().__init__(roomba, blid) - - # Initialize fan speed list - self._attr_fan_speed_list = [ - f"{behavior}-{spray}" - for behavior in BRAAVA_MOP_BEHAVIORS - for spray in BRAAVA_SPRAY_AMOUNT - ] - - @property - def fan_speed(self): - """Return the fan speed of the vacuum cleaner.""" - # Mopping behavior and spray amount as fan speed - rank_overlap = self.vacuum_state.get("rankOverlap", {}) - behavior = None - if rank_overlap == OVERLAP_STANDARD: - behavior = MOP_STANDARD - elif rank_overlap == OVERLAP_DEEP: - behavior = MOP_DEEP - elif rank_overlap == OVERLAP_EXTENDED: - behavior = MOP_EXTENDED - pad_wetness = self.vacuum_state.get("padWetness", {}) - # "disposable" and "reusable" values are always the same - pad_wetness_value = pad_wetness.get("disposable") - return f"{behavior}-{pad_wetness_value}" - - async def async_set_fan_speed(self, fan_speed, **kwargs): - """Set fan speed.""" - try: - split = fan_speed.split("-", 1) - behavior = split[0] - spray = int(split[1]) - if behavior.capitalize() in BRAAVA_MOP_BEHAVIORS: - behavior = behavior.capitalize() - except IndexError: - _LOGGER.error( - "Fan speed error: expected {behavior}-{spray_amount}, got '%s'", - fan_speed, - ) - return - except ValueError: - _LOGGER.error("Spray amount error: expected integer, got '%s'", split[1]) - return - if behavior not in BRAAVA_MOP_BEHAVIORS: - _LOGGER.error( - "Mop behavior error: expected one of %s, got '%s'", - str(BRAAVA_MOP_BEHAVIORS), - behavior, - ) - return - if spray not in BRAAVA_SPRAY_AMOUNT: - _LOGGER.error( - "Spray amount error: expected one of %s, got '%d'", - str(BRAAVA_SPRAY_AMOUNT), - spray, - ) - return - - overlap = 0 - if behavior == MOP_STANDARD: - overlap = OVERLAP_STANDARD - elif behavior == MOP_DEEP: - overlap = OVERLAP_DEEP - else: - overlap = OVERLAP_EXTENDED - await self.hass.async_add_executor_job( - self.vacuum.set_preference, "rankOverlap", overlap - ) - await self.hass.async_add_executor_job( - self.vacuum.set_preference, - "padWetness", - {"disposable": spray, "reusable": spray}, - ) - - @property - def extra_state_attributes(self): - """Return the state attributes of the device.""" - state_attrs = super().extra_state_attributes - - # Get Braava state - state = self.vacuum_state - detected_pad = state.get("detectedPad") - mop_ready = state.get("mopReady", {}) - lid_closed = mop_ready.get("lidClosed") - tank_present = mop_ready.get("tankPresent") - tank_level = state.get("tankLvl") - state_attrs[ATTR_DETECTED_PAD] = detected_pad - state_attrs[ATTR_LID_CLOSED] = lid_closed - state_attrs[ATTR_TANK_PRESENT] = tank_present - state_attrs[ATTR_TANK_LEVEL] = tank_level - - return state_attrs diff --git a/homeassistant/components/roomba/config_flow.py b/homeassistant/components/roomba/config_flow.py index 53ea9aa7c44..d040074246a 100644 --- a/homeassistant/components/roomba/config_flow.py +++ b/homeassistant/components/roomba/config_flow.py @@ -16,7 +16,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_DELAY, CONF_HOST, CONF_NAME, CONF_PASSWORD from homeassistant.core import HomeAssistant, callback @@ -41,7 +41,9 @@ DEFAULT_OPTIONS = {CONF_CONTINUOUS: DEFAULT_CONTINUOUS, CONF_DELAY: DEFAULT_DELA MAX_NUM_DEVICES_TO_DISCOVER = 25 AUTH_HELP_URL_KEY = "auth_help_url" -AUTH_HELP_URL_VALUE = "https://www.home-assistant.io/integrations/roomba/#manually-retrieving-your-credentials" +AUTH_HELP_URL_VALUE = ( + "https://www.home-assistant.io/integrations/roomba/#retrieving-your-credentials" +) async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: @@ -55,7 +57,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, address=data[CONF_HOST], blid=data[CONF_BLID], password=data[CONF_PASSWORD], - continuous=False, + continuous=True, delay=data[CONF_DELAY], ) ) @@ -77,7 +79,7 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 name: str | None = None - blid: str | None = None + blid: str host: str | None = None def __init__(self) -> None: @@ -90,7 +92,7 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> RoombaOptionsFlowHandler: """Get the options flow for this handler.""" - return RoombaOptionsFlowHandler(config_entry) + return RoombaOptionsFlowHandler() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -128,7 +130,9 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): # going for a longer hostname we abort so the user # does not see two flows if discovery fails. for progress in self._async_in_progress(): - flow_unique_id: str = progress["context"]["unique_id"] + flow_unique_id = progress["context"].get("unique_id") + if not flow_unique_id: + continue if flow_unique_id.startswith(self.blid): return self.async_abort(reason="short_blid") if self.blid.startswith(flow_unique_id): @@ -296,7 +300,7 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): ) -class RoombaOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RoombaOptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -306,17 +310,18 @@ class RoombaOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: return self.async_create_entry(title="", data=user_input) + options = self.config_entry.options return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Optional( CONF_CONTINUOUS, - default=self.options.get(CONF_CONTINUOUS, DEFAULT_CONTINUOUS), + default=options.get(CONF_CONTINUOUS, DEFAULT_CONTINUOUS), ): bool, vol.Optional( CONF_DELAY, - default=self.options.get(CONF_DELAY, DEFAULT_DELAY), + default=options.get(CONF_DELAY, DEFAULT_DELAY), ): int, } ), diff --git a/homeassistant/components/roomba/const.py b/homeassistant/components/roomba/const.py index 331c0900682..7f1e3b8e1ee 100644 --- a/homeassistant/components/roomba/const.py +++ b/homeassistant/components/roomba/const.py @@ -9,5 +9,5 @@ CONF_CONTINUOUS = "continuous" CONF_BLID = "blid" DEFAULT_CERT = "/etc/ssl/certs/ca-certificates.crt" DEFAULT_CONTINUOUS = True -DEFAULT_DELAY = 1 +DEFAULT_DELAY = 30 ROOMBA_SESSION = "roomba_session" diff --git a/homeassistant/components/roomba/entity.py b/homeassistant/components/roomba/entity.py new file mode 100644 index 00000000000..d55a260e53a --- /dev/null +++ b/homeassistant/components/roomba/entity.py @@ -0,0 +1,95 @@ +"""Base class for iRobot devices.""" + +from __future__ import annotations + +from homeassistant.const import ATTR_CONNECTIONS +import homeassistant.helpers.device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity +import homeassistant.util.dt as dt_util + +from . import roomba_reported_state +from .const import DOMAIN + + +class IRobotEntity(Entity): + """Base class for iRobot Entities.""" + + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__(self, roomba, blid) -> None: + """Initialize the iRobot handler.""" + self.vacuum = roomba + self._blid = blid + self.vacuum_state = roomba_reported_state(roomba) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.robot_unique_id)}, + serial_number=self.vacuum_state.get("hwPartsRev", {}).get("navSerialNo"), + manufacturer="iRobot", + model=self.vacuum_state.get("sku"), + name=str(self.vacuum_state.get("name")), + sw_version=self.vacuum_state.get("softwareVer"), + hw_version=self.vacuum_state.get("hardwareRev"), + ) + + if mac_address := self.vacuum_state.get("hwPartsRev", {}).get( + "wlan0HwAddr", self.vacuum_state.get("mac") + ): + self._attr_device_info[ATTR_CONNECTIONS] = { + (dr.CONNECTION_NETWORK_MAC, mac_address) + } + + @property + def robot_unique_id(self): + """Return the uniqueid of the vacuum cleaner.""" + return f"roomba_{self._blid}" + + @property + def unique_id(self): + """Return the uniqueid of the vacuum cleaner.""" + return self.robot_unique_id + + @property + def battery_level(self): + """Return the battery level of the vacuum cleaner.""" + return self.vacuum_state.get("batPct") + + @property + def run_stats(self): + """Return the run stats.""" + return self.vacuum_state.get("bbrun", {}) + + @property + def mission_stats(self): + """Return the mission stats.""" + return self.vacuum_state.get("bbmssn", {}) + + @property + def battery_stats(self): + """Return the battery stats.""" + return self.vacuum_state.get("bbchg3", {}) + + @property + def last_mission(self): + """Return last mission start time.""" + if ( + ts := self.vacuum_state.get("cleanMissionStatus", {}).get("mssnStrtTm") + ) is None or ts == 0: + return None + return dt_util.utc_from_timestamp(ts) + + async def async_added_to_hass(self): + """Register callback function.""" + self.vacuum.register_on_message_callback(self.on_message) + + def new_state_filter(self, new_state): + """Filter out wifi state messages.""" + return len(new_state) > 1 or "signal" not in new_state + + def on_message(self, json_data): + """Update state on message change.""" + state = json_data.get("state", {}).get("reported", {}) + if self.new_state_filter(state): + self.schedule_update_ha_state() diff --git a/homeassistant/components/roomba/icons.json b/homeassistant/components/roomba/icons.json index cdb36ef97e5..8466ecb51e3 100644 --- a/homeassistant/components/roomba/icons.json +++ b/homeassistant/components/roomba/icons.json @@ -32,6 +32,9 @@ }, "total_cleaned_area": { "default": "mdi:texture-box" + }, + "last_mission": { + "default": "mdi:calendar-clock" } } } diff --git a/homeassistant/components/roomba/irobot_base.py b/homeassistant/components/roomba/irobot_base.py deleted file mode 100644 index 4850dc0b7e9..00000000000 --- a/homeassistant/components/roomba/irobot_base.py +++ /dev/null @@ -1,269 +0,0 @@ -"""Base class for iRobot devices.""" - -from __future__ import annotations - -import asyncio -import logging - -from homeassistant.components.vacuum import ( - ATTR_STATUS, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_RETURNING, - StateVacuumEntity, - VacuumEntityFeature, -) -from homeassistant.const import ATTR_CONNECTIONS, STATE_IDLE, STATE_PAUSED -import homeassistant.helpers.device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity -import homeassistant.util.dt as dt_util -from homeassistant.util.unit_system import METRIC_SYSTEM - -from . import roomba_reported_state -from .const import DOMAIN - -_LOGGER = logging.getLogger(__name__) - -ATTR_CLEANING_TIME = "cleaning_time" -ATTR_CLEANED_AREA = "cleaned_area" -ATTR_ERROR = "error" -ATTR_ERROR_CODE = "error_code" -ATTR_POSITION = "position" -ATTR_SOFTWARE_VERSION = "software_version" - -# Commonly supported features -SUPPORT_IROBOT = ( - VacuumEntityFeature.BATTERY - | VacuumEntityFeature.PAUSE - | VacuumEntityFeature.RETURN_HOME - | VacuumEntityFeature.SEND_COMMAND - | VacuumEntityFeature.START - | VacuumEntityFeature.STATE - | VacuumEntityFeature.STOP - | VacuumEntityFeature.LOCATE -) - -STATE_MAP = { - "": STATE_IDLE, - "charge": STATE_DOCKED, - "evac": STATE_RETURNING, # Emptying at cleanbase - "hmMidMsn": STATE_CLEANING, # Recharging at the middle of a cycle - "hmPostMsn": STATE_RETURNING, # Cycle finished - "hmUsrDock": STATE_RETURNING, - "pause": STATE_PAUSED, - "run": STATE_CLEANING, - "stop": STATE_IDLE, - "stuck": STATE_ERROR, -} - - -class IRobotEntity(Entity): - """Base class for iRobot Entities.""" - - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__(self, roomba, blid): - """Initialize the iRobot handler.""" - self.vacuum = roomba - self._blid = blid - self.vacuum_state = roomba_reported_state(roomba) - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self.robot_unique_id)}, - serial_number=self.vacuum_state.get("hwPartsRev", {}).get("navSerialNo"), - manufacturer="iRobot", - model=self.vacuum_state.get("sku"), - name=str(self.vacuum_state.get("name")), - sw_version=self.vacuum_state.get("softwareVer"), - hw_version=self.vacuum_state.get("hardwareRev"), - ) - - if mac_address := self.vacuum_state.get("hwPartsRev", {}).get( - "wlan0HwAddr", self.vacuum_state.get("mac") - ): - self._attr_device_info[ATTR_CONNECTIONS] = { - (dr.CONNECTION_NETWORK_MAC, mac_address) - } - - @property - def robot_unique_id(self): - """Return the uniqueid of the vacuum cleaner.""" - return f"roomba_{self._blid}" - - @property - def unique_id(self): - """Return the uniqueid of the vacuum cleaner.""" - return self.robot_unique_id - - @property - def battery_level(self): - """Return the battery level of the vacuum cleaner.""" - return self.vacuum_state.get("batPct") - - @property - def run_stats(self): - """Return the run stats.""" - return self.vacuum_state.get("bbrun", {}) - - @property - def mission_stats(self): - """Return the mission stats.""" - return self.vacuum_state.get("bbmssn", {}) - - @property - def battery_stats(self): - """Return the battery stats.""" - return self.vacuum_state.get("bbchg3", {}) - - @property - def _robot_state(self): - """Return the state of the vacuum cleaner.""" - clean_mission_status = self.vacuum_state.get("cleanMissionStatus", {}) - cycle = clean_mission_status.get("cycle") - phase = clean_mission_status.get("phase") - try: - state = STATE_MAP[phase] - except KeyError: - return STATE_ERROR - if cycle != "none" and state in (STATE_IDLE, STATE_DOCKED): - state = STATE_PAUSED - return state - - async def async_added_to_hass(self): - """Register callback function.""" - self.vacuum.register_on_message_callback(self.on_message) - - def new_state_filter(self, new_state): - """Filter out wifi state messages.""" - return len(new_state) > 1 or "signal" not in new_state - - def on_message(self, json_data): - """Update state on message change.""" - state = json_data.get("state", {}).get("reported", {}) - if self.new_state_filter(state): - self.schedule_update_ha_state() - - -class IRobotVacuum(IRobotEntity, StateVacuumEntity): - """Base class for iRobot robots.""" - - _attr_name = None - _attr_supported_features = SUPPORT_IROBOT - _attr_available = True # Always available, otherwise setup will fail - - def __init__(self, roomba, blid): - """Initialize the iRobot handler.""" - super().__init__(roomba, blid) - self._cap_position = self.vacuum_state.get("cap", {}).get("pose") == 1 - - @property - def state(self): - """Return the state of the vacuum cleaner.""" - return self._robot_state - - @property - def extra_state_attributes(self): - """Return the state attributes of the device.""" - state = self.vacuum_state - - # Roomba software version - software_version = state.get("softwareVer") - - # Set properties that are to appear in the GUI - state_attrs = {ATTR_SOFTWARE_VERSION: software_version} - - # Set legacy status to avoid break changes - state_attrs[ATTR_STATUS] = self.vacuum.current_state - - # Only add cleaning time and cleaned area attrs when the vacuum is - # currently on - if self.state == STATE_CLEANING: - # Get clean mission status - ( - state_attrs[ATTR_CLEANING_TIME], - state_attrs[ATTR_CLEANED_AREA], - ) = self.get_cleaning_status(state) - - # Error - if self.vacuum.error_code != 0: - state_attrs[ATTR_ERROR] = self.vacuum.error_message - state_attrs[ATTR_ERROR_CODE] = self.vacuum.error_code - - # Not all Roombas expose position data - # https://github.com/koalazak/dorita980/issues/48 - if self._cap_position: - pos_state = state.get("pose", {}) - position = None - pos_x = pos_state.get("point", {}).get("x") - pos_y = pos_state.get("point", {}).get("y") - theta = pos_state.get("theta") - if all(item is not None for item in (pos_x, pos_y, theta)): - position = f"({pos_x}, {pos_y}, {theta})" - state_attrs[ATTR_POSITION] = position - - return state_attrs - - def get_cleaning_status(self, state) -> tuple[int, int]: - """Return the cleaning time and cleaned area from the device.""" - if not (mission_state := state.get("cleanMissionStatus")): - return (0, 0) - - if cleaning_time := mission_state.get("mssnM", 0): - pass - elif start_time := mission_state.get("mssnStrtTm"): - now = dt_util.as_timestamp(dt_util.utcnow()) - if now > start_time: - cleaning_time = (now - start_time) // 60 - - if cleaned_area := mission_state.get("sqft", 0): # Imperial - # Convert to m2 if the unit_system is set to metric - if self.hass.config.units is METRIC_SYSTEM: - cleaned_area = round(cleaned_area * 0.0929) - - return (cleaning_time, cleaned_area) - - def on_message(self, json_data): - """Update state on message change.""" - state = json_data.get("state", {}).get("reported", {}) - if self.new_state_filter(state): - _LOGGER.debug("Got new state from the vacuum: %s", json_data) - self.schedule_update_ha_state() - - async def async_start(self): - """Start or resume the cleaning task.""" - if self.state == STATE_PAUSED: - await self.hass.async_add_executor_job(self.vacuum.send_command, "resume") - else: - await self.hass.async_add_executor_job(self.vacuum.send_command, "start") - - async def async_stop(self, **kwargs): - """Stop the vacuum cleaner.""" - await self.hass.async_add_executor_job(self.vacuum.send_command, "stop") - - async def async_pause(self): - """Pause the cleaning cycle.""" - await self.hass.async_add_executor_job(self.vacuum.send_command, "pause") - - async def async_return_to_base(self, **kwargs): - """Set the vacuum cleaner to return to the dock.""" - if self.state == STATE_CLEANING: - await self.async_pause() - for _ in range(10): - if self.state == STATE_PAUSED: - break - await asyncio.sleep(1) - await self.hass.async_add_executor_job(self.vacuum.send_command, "dock") - - async def async_locate(self, **kwargs): - """Located vacuum.""" - await self.hass.async_add_executor_job(self.vacuum.send_command, "find") - - async def async_send_command(self, command, params=None, **kwargs): - """Send raw command.""" - _LOGGER.debug("async_send_command %s (%s), %s", command, params, kwargs) - await self.hass.async_add_executor_job( - self.vacuum.send_command, command, params - ) diff --git a/homeassistant/components/roomba/manifest.json b/homeassistant/components/roomba/manifest.json index a697680b379..edb317f9752 100644 --- a/homeassistant/components/roomba/manifest.json +++ b/homeassistant/components/roomba/manifest.json @@ -1,7 +1,7 @@ { "domain": "roomba", "name": "iRobot Roomba and Braava", - "codeowners": ["@pschmitt", "@cyr-ius", "@shenxn", "@Xitee1", "@Orhideous"], + "codeowners": ["@pschmitt", "@cyr-ius", "@shenxn", "@Orhideous"], "config_flow": true, "dhcp": [ { diff --git a/homeassistant/components/roomba/roomba.py b/homeassistant/components/roomba/roomba.py deleted file mode 100644 index 5d774120634..00000000000 --- a/homeassistant/components/roomba/roomba.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Class for Roomba devices.""" - -import logging - -from homeassistant.components.vacuum import VacuumEntityFeature - -from .irobot_base import SUPPORT_IROBOT, IRobotVacuum - -_LOGGER = logging.getLogger(__name__) - -ATTR_BIN_FULL = "bin_full" -ATTR_BIN_PRESENT = "bin_present" - -FAN_SPEED_AUTOMATIC = "Automatic" -FAN_SPEED_ECO = "Eco" -FAN_SPEED_PERFORMANCE = "Performance" -FAN_SPEEDS = [FAN_SPEED_AUTOMATIC, FAN_SPEED_ECO, FAN_SPEED_PERFORMANCE] - -# Only Roombas with CarpetBost can set their fanspeed -SUPPORT_ROOMBA_CARPET_BOOST = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED - - -class RoombaVacuum(IRobotVacuum): - """Basic Roomba robot (without carpet boost).""" - - @property - def extra_state_attributes(self): - """Return the state attributes of the device.""" - state_attrs = super().extra_state_attributes - - # Get bin state - bin_raw_state = self.vacuum_state.get("bin", {}) - bin_state = {} - if bin_raw_state.get("present") is not None: - bin_state[ATTR_BIN_PRESENT] = bin_raw_state.get("present") - if bin_raw_state.get("full") is not None: - bin_state[ATTR_BIN_FULL] = bin_raw_state.get("full") - state_attrs.update(bin_state) - - return state_attrs - - -class RoombaVacuumCarpetBoost(RoombaVacuum): - """Roomba robot with carpet boost.""" - - _attr_fan_speed_list = FAN_SPEEDS - _attr_supported_features = SUPPORT_ROOMBA_CARPET_BOOST - - @property - def fan_speed(self): - """Return the fan speed of the vacuum cleaner.""" - fan_speed = None - carpet_boost = self.vacuum_state.get("carpetBoost") - high_perf = self.vacuum_state.get("vacHigh") - if carpet_boost is not None and high_perf is not None: - if carpet_boost: - fan_speed = FAN_SPEED_AUTOMATIC - elif high_perf: - fan_speed = FAN_SPEED_PERFORMANCE - else: # carpet_boost and high_perf are False - fan_speed = FAN_SPEED_ECO - return fan_speed - - async def async_set_fan_speed(self, fan_speed, **kwargs): - """Set fan speed.""" - if fan_speed.capitalize() in FAN_SPEEDS: - fan_speed = fan_speed.capitalize() - _LOGGER.debug("Set fan speed to: %s", fan_speed) - high_perf = None - carpet_boost = None - if fan_speed == FAN_SPEED_AUTOMATIC: - high_perf = False - carpet_boost = True - elif fan_speed == FAN_SPEED_ECO: - high_perf = False - carpet_boost = False - elif fan_speed == FAN_SPEED_PERFORMANCE: - high_perf = True - carpet_boost = False - else: - _LOGGER.error("No such fan speed available: %s", fan_speed) - return - # The set_preference method does only accept string values - await self.hass.async_add_executor_job( - self.vacuum.set_preference, "carpetBoost", str(carpet_boost) - ) - await self.hass.async_add_executor_job( - self.vacuum.set_preference, "vacHigh", str(high_perf) - ) diff --git a/homeassistant/components/roomba/sensor.py b/homeassistant/components/roomba/sensor.py index 6e043d237f3..d358dcb428c 100644 --- a/homeassistant/components/roomba/sensor.py +++ b/homeassistant/components/roomba/sensor.py @@ -12,18 +12,13 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - AREA_SQUARE_METERS, - PERCENTAGE, - EntityCategory, - UnitOfTime, -) +from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfArea, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from .const import DOMAIN -from .irobot_base import IRobotEntity +from .entity import IRobotEntity from .models import RoombaData @@ -108,7 +103,7 @@ SENSORS: list[RoombaSensorEntityDescription] = [ RoombaSensorEntityDescription( key="total_cleaned_area", translation_key="total_cleaned_area", - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda self: ( None if (sqft := self.run_stats.get("sqft")) is None else sqft * 9.29 @@ -116,6 +111,14 @@ SENSORS: list[RoombaSensorEntityDescription] = [ suggested_display_precision=0, entity_registry_enabled_default=False, ), + RoombaSensorEntityDescription( + key="last_mission", + translation_key="last_mission", + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda self: self.last_mission, + entity_registry_enabled_default=False, + ), ] diff --git a/homeassistant/components/roomba/strings.json b/homeassistant/components/roomba/strings.json index 088918824d2..0db70a6a141 100644 --- a/homeassistant/components/roomba/strings.json +++ b/homeassistant/components/roomba/strings.json @@ -87,6 +87,9 @@ }, "total_cleaned_area": { "name": "Total cleaned area" + }, + "last_mission": { + "name": "Last mission start time" } } } diff --git a/homeassistant/components/roomba/vacuum.py b/homeassistant/components/roomba/vacuum.py index e4a83375ccc..92063f74afa 100644 --- a/homeassistant/components/roomba/vacuum.py +++ b/homeassistant/components/roomba/vacuum.py @@ -2,16 +2,88 @@ from __future__ import annotations +import asyncio +import logging +from typing import Any + +from homeassistant.components.vacuum import ( + ATTR_STATUS, + StateVacuumEntity, + VacuumActivity, + VacuumEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util +from homeassistant.util.unit_system import METRIC_SYSTEM from . import roomba_reported_state -from .braava import BraavaJet from .const import DOMAIN -from .irobot_base import IRobotVacuum +from .entity import IRobotEntity from .models import RoombaData -from .roomba import RoombaVacuum, RoombaVacuumCarpetBoost + +SUPPORT_IROBOT = ( + VacuumEntityFeature.BATTERY + | VacuumEntityFeature.PAUSE + | VacuumEntityFeature.RETURN_HOME + | VacuumEntityFeature.SEND_COMMAND + | VacuumEntityFeature.START + | VacuumEntityFeature.STATE + | VacuumEntityFeature.STOP + | VacuumEntityFeature.LOCATE +) + +STATE_MAP = { + "": VacuumActivity.IDLE, + "charge": VacuumActivity.DOCKED, + "evac": VacuumActivity.RETURNING, # Emptying at cleanbase + "hmMidMsn": VacuumActivity.CLEANING, # Recharging at the middle of a cycle + "hmPostMsn": VacuumActivity.RETURNING, # Cycle finished + "hmUsrDock": VacuumActivity.RETURNING, + "pause": VacuumActivity.PAUSED, + "run": VacuumActivity.CLEANING, + "stop": VacuumActivity.IDLE, + "stuck": VacuumActivity.ERROR, +} + +_LOGGER = logging.getLogger(__name__) +ATTR_SOFTWARE_VERSION = "software_version" +ATTR_CLEANING_TIME = "cleaning_time" +ATTR_CLEANED_AREA = "cleaned_area" +ATTR_ERROR = "error" +ATTR_ERROR_CODE = "error_code" +ATTR_POSITION = "position" +ATTR_SOFTWARE_VERSION = "software_version" + +ATTR_BIN_FULL = "bin_full" +ATTR_BIN_PRESENT = "bin_present" + +FAN_SPEED_AUTOMATIC = "Automatic" +FAN_SPEED_ECO = "Eco" +FAN_SPEED_PERFORMANCE = "Performance" +FAN_SPEEDS = [FAN_SPEED_AUTOMATIC, FAN_SPEED_ECO, FAN_SPEED_PERFORMANCE] + +# Only Roombas with CarpetBost can set their fanspeed +SUPPORT_ROOMBA_CARPET_BOOST = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED + +ATTR_DETECTED_PAD = "detected_pad" +ATTR_LID_CLOSED = "lid_closed" +ATTR_TANK_PRESENT = "tank_present" +ATTR_TANK_LEVEL = "tank_level" +ATTR_PAD_WETNESS = "spray_amount" + +OVERLAP_STANDARD = 67 +OVERLAP_DEEP = 85 +OVERLAP_EXTENDED = 25 +MOP_STANDARD = "Standard" +MOP_DEEP = "Deep" +MOP_EXTENDED = "Extended" +BRAAVA_MOP_BEHAVIORS = [MOP_STANDARD, MOP_DEEP, MOP_EXTENDED] +BRAAVA_SPRAY_AMOUNT = [1, 2, 3] + +# Braava Jets can set mopping behavior through fanspeed +SUPPORT_BRAAVA = SUPPORT_IROBOT | VacuumEntityFeature.FAN_SPEED async def async_setup_entry( @@ -39,3 +111,304 @@ async def async_setup_entry( roomba_vac = constructor(roomba, blid) async_add_entities([roomba_vac]) + + +class IRobotVacuum(IRobotEntity, StateVacuumEntity): + """Base class for iRobot robots.""" + + _attr_name = None + _attr_supported_features = SUPPORT_IROBOT + _attr_available = True # Always available, otherwise setup will fail + + def __init__(self, roomba, blid) -> None: + """Initialize the iRobot handler.""" + super().__init__(roomba, blid) + self._cap_position = self.vacuum_state.get("cap", {}).get("pose") == 1 + + @property + def activity(self): + """Return the state of the vacuum cleaner.""" + clean_mission_status = self.vacuum_state.get("cleanMissionStatus", {}) + cycle = clean_mission_status.get("cycle") + phase = clean_mission_status.get("phase") + try: + state = STATE_MAP[phase] + except KeyError: + return VacuumActivity.ERROR + if cycle != "none" and state in (VacuumActivity.IDLE, VacuumActivity.DOCKED): + state = VacuumActivity.PAUSED + return state + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the device.""" + state = self.vacuum_state + + # Roomba software version + software_version = state.get("softwareVer") + + # Set properties that are to appear in the GUI + state_attrs = {ATTR_SOFTWARE_VERSION: software_version} + + # Set legacy status to avoid break changes + state_attrs[ATTR_STATUS] = self.vacuum.current_state + + # Only add cleaning time and cleaned area attrs when the vacuum is + # currently on + if self.state == VacuumActivity.CLEANING: + # Get clean mission status + ( + state_attrs[ATTR_CLEANING_TIME], + state_attrs[ATTR_CLEANED_AREA], + ) = self.get_cleaning_status(state) + + # Error + if self.vacuum.error_code != 0: + state_attrs[ATTR_ERROR] = self.vacuum.error_message + state_attrs[ATTR_ERROR_CODE] = self.vacuum.error_code + + # Not all Roombas expose position data + # https://github.com/koalazak/dorita980/issues/48 + if self._cap_position: + pos_state = state.get("pose", {}) + position = None + pos_x = pos_state.get("point", {}).get("x") + pos_y = pos_state.get("point", {}).get("y") + theta = pos_state.get("theta") + if all(item is not None for item in (pos_x, pos_y, theta)): + position = f"({pos_x}, {pos_y}, {theta})" + state_attrs[ATTR_POSITION] = position + + return state_attrs + + def get_cleaning_status(self, state) -> tuple[int, int]: + """Return the cleaning time and cleaned area from the device.""" + if not (mission_state := state.get("cleanMissionStatus")): + return (0, 0) + + if cleaning_time := mission_state.get("mssnM", 0): + pass + elif start_time := mission_state.get("mssnStrtTm"): + now = dt_util.as_timestamp(dt_util.utcnow()) + if now > start_time: + cleaning_time = (now - start_time) // 60 + + if cleaned_area := mission_state.get("sqft", 0): # Imperial + # Convert to m2 if the unit_system is set to metric + if self.hass.config.units is METRIC_SYSTEM: + cleaned_area = round(cleaned_area * 0.0929) + + return (cleaning_time, cleaned_area) + + def on_message(self, json_data): + """Update state on message change.""" + state = json_data.get("state", {}).get("reported", {}) + if self.new_state_filter(state): + _LOGGER.debug("Got new state from the vacuum: %s", json_data) + self.schedule_update_ha_state() + + async def async_start(self) -> None: + """Start or resume the cleaning task.""" + if self.state == VacuumActivity.PAUSED: + await self.hass.async_add_executor_job(self.vacuum.send_command, "resume") + else: + await self.hass.async_add_executor_job(self.vacuum.send_command, "start") + + async def async_stop(self, **kwargs): + """Stop the vacuum cleaner.""" + await self.hass.async_add_executor_job(self.vacuum.send_command, "stop") + + async def async_pause(self) -> None: + """Pause the cleaning cycle.""" + await self.hass.async_add_executor_job(self.vacuum.send_command, "pause") + + async def async_return_to_base(self, **kwargs): + """Set the vacuum cleaner to return to the dock.""" + if self.state == VacuumActivity.CLEANING: + await self.async_pause() + for _ in range(10): + if self.state == VacuumActivity.PAUSED: + break + await asyncio.sleep(1) + await self.hass.async_add_executor_job(self.vacuum.send_command, "dock") + + async def async_locate(self, **kwargs): + """Located vacuum.""" + await self.hass.async_add_executor_job(self.vacuum.send_command, "find") + + async def async_send_command(self, command, params=None, **kwargs): + """Send raw command.""" + _LOGGER.debug("async_send_command %s (%s), %s", command, params, kwargs) + await self.hass.async_add_executor_job( + self.vacuum.send_command, command, params + ) + + +class RoombaVacuum(IRobotVacuum): + """Basic Roomba robot (without carpet boost).""" + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the device.""" + state_attrs = super().extra_state_attributes + + # Get bin state + bin_raw_state = self.vacuum_state.get("bin", {}) + bin_state = {} + if bin_raw_state.get("present") is not None: + bin_state[ATTR_BIN_PRESENT] = bin_raw_state.get("present") + if bin_raw_state.get("full") is not None: + bin_state[ATTR_BIN_FULL] = bin_raw_state.get("full") + state_attrs.update(bin_state) + + return state_attrs + + +class RoombaVacuumCarpetBoost(RoombaVacuum): + """Roomba robot with carpet boost.""" + + _attr_fan_speed_list = FAN_SPEEDS + _attr_supported_features = SUPPORT_ROOMBA_CARPET_BOOST + + @property + def fan_speed(self): + """Return the fan speed of the vacuum cleaner.""" + fan_speed = None + carpet_boost = self.vacuum_state.get("carpetBoost") + high_perf = self.vacuum_state.get("vacHigh") + if carpet_boost is not None and high_perf is not None: + if carpet_boost: + fan_speed = FAN_SPEED_AUTOMATIC + elif high_perf: + fan_speed = FAN_SPEED_PERFORMANCE + else: # carpet_boost and high_perf are False + fan_speed = FAN_SPEED_ECO + return fan_speed + + async def async_set_fan_speed(self, fan_speed, **kwargs): + """Set fan speed.""" + if fan_speed.capitalize() in FAN_SPEEDS: + fan_speed = fan_speed.capitalize() + _LOGGER.debug("Set fan speed to: %s", fan_speed) + high_perf = None + carpet_boost = None + if fan_speed == FAN_SPEED_AUTOMATIC: + high_perf = False + carpet_boost = True + elif fan_speed == FAN_SPEED_ECO: + high_perf = False + carpet_boost = False + elif fan_speed == FAN_SPEED_PERFORMANCE: + high_perf = True + carpet_boost = False + else: + _LOGGER.error("No such fan speed available: %s", fan_speed) + return + # The set_preference method does only accept string values + await self.hass.async_add_executor_job( + self.vacuum.set_preference, "carpetBoost", str(carpet_boost) + ) + await self.hass.async_add_executor_job( + self.vacuum.set_preference, "vacHigh", str(high_perf) + ) + + +class BraavaJet(IRobotVacuum): + """Braava Jet.""" + + _attr_supported_features = SUPPORT_BRAAVA + + def __init__(self, roomba, blid) -> None: + """Initialize the Roomba handler.""" + super().__init__(roomba, blid) + + # Initialize fan speed list + self._attr_fan_speed_list = [ + f"{behavior}-{spray}" + for behavior in BRAAVA_MOP_BEHAVIORS + for spray in BRAAVA_SPRAY_AMOUNT + ] + + @property + def fan_speed(self): + """Return the fan speed of the vacuum cleaner.""" + # Mopping behavior and spray amount as fan speed + rank_overlap = self.vacuum_state.get("rankOverlap", {}) + behavior = None + if rank_overlap == OVERLAP_STANDARD: + behavior = MOP_STANDARD + elif rank_overlap == OVERLAP_DEEP: + behavior = MOP_DEEP + elif rank_overlap == OVERLAP_EXTENDED: + behavior = MOP_EXTENDED + pad_wetness = self.vacuum_state.get("padWetness", {}) + # "disposable" and "reusable" values are always the same + pad_wetness_value = pad_wetness.get("disposable") + return f"{behavior}-{pad_wetness_value}" + + async def async_set_fan_speed(self, fan_speed, **kwargs): + """Set fan speed.""" + try: + split = fan_speed.split("-", 1) + behavior = split[0] + spray = int(split[1]) + if behavior.capitalize() in BRAAVA_MOP_BEHAVIORS: + behavior = behavior.capitalize() + except IndexError: + _LOGGER.error( + "Fan speed error: expected {behavior}-{spray_amount}, got '%s'", + fan_speed, + ) + return + except ValueError: + _LOGGER.error("Spray amount error: expected integer, got '%s'", split[1]) + return + if behavior not in BRAAVA_MOP_BEHAVIORS: + _LOGGER.error( + "Mop behavior error: expected one of %s, got '%s'", + str(BRAAVA_MOP_BEHAVIORS), + behavior, + ) + return + if spray not in BRAAVA_SPRAY_AMOUNT: + _LOGGER.error( + "Spray amount error: expected one of %s, got '%d'", + str(BRAAVA_SPRAY_AMOUNT), + spray, + ) + return + + overlap = 0 + if behavior == MOP_STANDARD: + overlap = OVERLAP_STANDARD + elif behavior == MOP_DEEP: + overlap = OVERLAP_DEEP + else: + overlap = OVERLAP_EXTENDED + await self.hass.async_add_executor_job( + self.vacuum.set_preference, "rankOverlap", overlap + ) + await self.hass.async_add_executor_job( + self.vacuum.set_preference, + "padWetness", + {"disposable": spray, "reusable": spray}, + ) + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the device.""" + state_attrs = super().extra_state_attributes + + # Get Braava state + state = self.vacuum_state + detected_pad = state.get("detectedPad") + mop_ready = state.get("mopReady", {}) + lid_closed = mop_ready.get("lidClosed") + tank_present = mop_ready.get("tankPresent") + tank_level = state.get("tankLvl") + state_attrs[ATTR_DETECTED_PAD] = detected_pad + state_attrs[ATTR_LID_CLOSED] = lid_closed + state_attrs[ATTR_TANK_PRESENT] = tank_present + state_attrs[ATTR_TANK_LEVEL] = tank_level + + return state_attrs diff --git a/homeassistant/components/roon/config_flow.py b/homeassistant/components/roon/config_flow.py index de220454852..b896f6775ae 100644 --- a/homeassistant/components/roon/config_flow.py +++ b/homeassistant/components/roon/config_flow.py @@ -142,9 +142,11 @@ class RoonConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_fallback() - async def async_step_fallback(self, user_input=None): + async def async_step_fallback( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Get host and port details from the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: self._host = user_input["host"] @@ -155,7 +157,9 @@ class RoonConfigFlow(ConfigFlow, domain=DOMAIN): step_id="fallback", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_link(self, user_input=None): + async def async_step_link( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle linking and authenticating with the roon server.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/roon/icons.json b/homeassistant/components/roon/icons.json index 571ca3f45a2..1e1dd42b765 100644 --- a/homeassistant/components/roon/icons.json +++ b/homeassistant/components/roon/icons.json @@ -1,5 +1,7 @@ { "services": { - "transfer": "mdi:monitor-multiple" + "transfer": { + "service": "mdi:monitor-multiple" + } } } diff --git a/homeassistant/components/roon/media_browser.py b/homeassistant/components/roon/media_browser.py index 806375bc902..13b2d9594e8 100644 --- a/homeassistant/components/roon/media_browser.py +++ b/homeassistant/components/roon/media_browser.py @@ -2,8 +2,7 @@ import logging -from homeassistant.components.media_player import BrowseMedia, MediaClass -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import BrowseError, BrowseMedia, MediaClass class UnknownMediaType(BrowseError): diff --git a/homeassistant/components/roon/strings.json b/homeassistant/components/roon/strings.json index 853bcc6c585..463f0431891 100644 --- a/homeassistant/components/roon/strings.json +++ b/homeassistant/components/roon/strings.json @@ -10,8 +10,8 @@ } }, "link": { - "title": "Authorize HomeAssistant in Roon", - "description": "You must authorize Home Assistant in Roon. After you click submit, go to the Roon Core application, open Settings and enable HomeAssistant on the Extensions tab." + "title": "Authorize Home Assistant in Roon", + "description": "You must authorize Home Assistant in Roon. After you select **Submit**, go to the Roon Core application, open **Settings** and enable Home Assistant on the **Extensions** tab." } }, "error": { diff --git a/homeassistant/components/route53/icons.json b/homeassistant/components/route53/icons.json index 30a854991f0..5afe13ce949 100644 --- a/homeassistant/components/route53/icons.json +++ b/homeassistant/components/route53/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_records": "mdi:database-refresh" + "update_records": { + "service": "mdi:database-refresh" + } } } diff --git a/homeassistant/components/route53/manifest.json b/homeassistant/components/route53/manifest.json index 6db240bdcab..978c916e3ee 100644 --- a/homeassistant/components/route53/manifest.json +++ b/homeassistant/components/route53/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/route53", "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], + "quality_scale": "legacy", "requirements": ["boto3==1.34.131"] } diff --git a/homeassistant/components/rova/config_flow.py b/homeassistant/components/rova/config_flow.py index e5e3a31b8af..c25737160f4 100644 --- a/homeassistant/components/rova/config_flow.py +++ b/homeassistant/components/rova/config_flow.py @@ -59,31 +59,3 @@ class RovaConfigFlow(ConfigFlow, domain=DOMAIN): ), errors=errors, ) - - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - zip_code = user_input[CONF_ZIP_CODE] - number = user_input[CONF_HOUSE_NUMBER] - suffix = user_input[CONF_HOUSE_NUMBER_SUFFIX] - - await self.async_set_unique_id(f"{zip_code}{number}{suffix}".strip()) - self._abort_if_unique_id_configured() - - api = Rova(zip_code, number, suffix) - - try: - result = await self.hass.async_add_executor_job(api.is_rova_area) - - if result: - return self.async_create_entry( - title=f"{zip_code} {number} {suffix}".strip(), - data={ - CONF_ZIP_CODE: zip_code, - CONF_HOUSE_NUMBER: number, - CONF_HOUSE_NUMBER_SUFFIX: suffix, - }, - ) - return self.async_abort(reason="invalid_rova_area") - - except (ConnectTimeout, HTTPError): - return self.async_abort(reason="cannot_connect") diff --git a/homeassistant/components/rova/sensor.py b/homeassistant/components/rova/sensor.py index e44e84f52fa..589183eb7a8 100644 --- a/homeassistant/components/rova/sensor.py +++ b/homeassistant/components/rova/sensor.py @@ -4,26 +4,18 @@ from __future__ import annotations from datetime import datetime -import voluptuous as vol - from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -import homeassistant.helpers.config_validation as cv +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_HOUSE_NUMBER, CONF_HOUSE_NUMBER_SUFFIX, CONF_ZIP_CODE, DOMAIN +from .const import DOMAIN from .coordinator import RovaCoordinator ISSUE_PLACEHOLDER = {"url": "/config/integrations/dashboard/add?domain=rova"} @@ -47,62 +39,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( ), ) -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ZIP_CODE): cv.string, - vol.Required(CONF_HOUSE_NUMBER): cv.string, - vol.Optional(CONF_HOUSE_NUMBER_SUFFIX, default=""): cv.string, - vol.Optional(CONF_NAME, default="Rova"): cv.string, - vol.Optional(CONF_MONITORED_CONDITIONS, default=["bio"]): vol.All( - cv.ensure_list, [vol.In(["bio", "paper", "plastic", "residual"])] - ), - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the rova sensor platform through yaml configuration.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config, - ) - if ( - result["type"] == FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Rova", - }, - ) - else: - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders=ISSUE_PLACEHOLDER, - ) - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/rova/strings.json b/homeassistant/components/rova/strings.json index 709e5450411..3b89fc789ee 100644 --- a/homeassistant/components/rova/strings.json +++ b/homeassistant/components/rova/strings.json @@ -12,7 +12,8 @@ }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "invalid_rova_area": "Rova does not collect at this address" + "invalid_rova_area": "Rova does not collect at this address", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", @@ -21,14 +22,6 @@ } }, "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Rova YAML configuration import failed", - "description": "Configuring Rova using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Rova works and restart Home Assistant to try again or remove the Rova YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_rova_area": { - "title": "The Rova YAML configuration import failed", - "description": "There was an error when trying to import your Rova YAML configuration.\n\nRova does not collect at this address.\n\nEnsure the imported configuration is correct and remove the Rova YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, "no_rova_area": { "title": "Rova does not collect at this address anymore", "description": "Rova does not collect at {zip_code} anymore.\n\nPlease remove the integration." diff --git a/homeassistant/components/rpi_camera/manifest.json b/homeassistant/components/rpi_camera/manifest.json index 9f7346ea353..aab16b1c462 100644 --- a/homeassistant/components/rpi_camera/manifest.json +++ b/homeassistant/components/rpi_camera/manifest.json @@ -3,5 +3,6 @@ "name": "Raspberry Pi Camera", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/rpi_camera", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/rpi_power/binary_sensor.py b/homeassistant/components/rpi_power/binary_sensor.py index a7306899bde..00d7ec0e3f4 100644 --- a/homeassistant/components/rpi_power/binary_sensor.py +++ b/homeassistant/components/rpi_power/binary_sensor.py @@ -55,5 +55,5 @@ class RaspberryChargerBinarySensor(BinarySensorEntity): if value: _LOGGER.warning(DESCRIPTION_UNDER_VOLTAGE) else: - _LOGGER.info(DESCRIPTION_NORMALIZED) + _LOGGER.debug(DESCRIPTION_NORMALIZED) self._attr_is_on = value diff --git a/homeassistant/components/rpi_power/config_flow.py b/homeassistant/components/rpi_power/config_flow.py index c44bb65d79a..0151a92856d 100644 --- a/homeassistant/components/rpi_power/config_flow.py +++ b/homeassistant/components/rpi_power/config_flow.py @@ -37,8 +37,6 @@ class RPiPowerFlow(DiscoveryFlowHandler[Awaitable[bool]], domain=DOMAIN): self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by onboarding.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") has_devices = await self._discovery_function(self.hass) if not has_devices: diff --git a/homeassistant/components/rpi_power/manifest.json b/homeassistant/components/rpi_power/manifest.json index 7da5897c00d..d5704f61564 100644 --- a/homeassistant/components/rpi_power/manifest.json +++ b/homeassistant/components/rpi_power/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/rpi_power", "iot_class": "local_polling", "loggers": ["rpi_bad_power"], - "requirements": ["rpi-bad-power==0.1.0"] + "requirements": ["rpi-bad-power==0.1.0"], + "single_config_entry": true } diff --git a/homeassistant/components/rpi_power/strings.json b/homeassistant/components/rpi_power/strings.json index 9a46ca1e10e..796a973335b 100644 --- a/homeassistant/components/rpi_power/strings.json +++ b/homeassistant/components/rpi_power/strings.json @@ -7,7 +7,6 @@ } }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "no_devices_found": "Can't find the system class needed for this component, make sure that your kernel is recent and the hardware is supported" } } diff --git a/homeassistant/components/rtorrent/manifest.json b/homeassistant/components/rtorrent/manifest.json index 96b079c4363..bcd39a03aa3 100644 --- a/homeassistant/components/rtorrent/manifest.json +++ b/homeassistant/components/rtorrent/manifest.json @@ -3,5 +3,6 @@ "name": "rTorrent", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/rtorrent", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/rtsp_to_webrtc/__init__.py b/homeassistant/components/rtsp_to_webrtc/__init__.py index 77bf7ffeb8f..0fc257c463f 100644 --- a/homeassistant/components/rtsp_to_webrtc/__init__.py +++ b/homeassistant/components/rtsp_to_webrtc/__init__.py @@ -12,7 +12,7 @@ the offer/answer SDP protocol, other than as a signal path pass through. Other integrations may use this integration with these steps: - Check if this integration is loaded -- Call is_suported_stream_source for compatibility +- Call is_supported_stream_source for compatibility - Call async_offer_for_stream_source to get back an answer for a client offer """ @@ -20,17 +20,17 @@ from __future__ import annotations import asyncio import logging -from typing import Any from rtsp_to_webrtc.client import get_adaptive_client from rtsp_to_webrtc.exceptions import ClientError, ResponseError from rtsp_to_webrtc.interface import WebRTCClientInterface -import voluptuous as vol +from webrtc_models import RTCIceServer -from homeassistant.components import camera, websocket_api +from homeassistant.components import camera from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession _LOGGER = logging.getLogger(__name__) @@ -41,10 +41,24 @@ DATA_UNSUB = "unsub" TIMEOUT = 10 CONF_STUN_SERVER = "stun_server" +_DEPRECATED = "deprecated" + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up RTSPtoWebRTC from a config entry.""" hass.data.setdefault(DOMAIN, {}) + ir.async_create_issue( + hass, + DOMAIN, + _DEPRECATED, + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key=_DEPRECATED, + translation_placeholders={ + "go2rtc": "[go2rtc](https://www.home-assistant.io/integrations/go2rtc/)", + }, + ) client: WebRTCClientInterface try: @@ -57,7 +71,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (TimeoutError, ClientError) as err: raise ConfigEntryNotReady from err - hass.data[DOMAIN][CONF_STUN_SERVER] = entry.options.get(CONF_STUN_SERVER, "") + hass.data[DOMAIN][CONF_STUN_SERVER] = entry.options.get(CONF_STUN_SERVER) + if server := entry.options.get(CONF_STUN_SERVER): + + @callback + def get_servers() -> list[RTCIceServer]: + return [RTCIceServer(urls=[server])] + + entry.async_on_unload(camera.async_register_ice_servers(hass, get_servers)) async def async_offer_for_stream_source( stream_source: str, @@ -85,8 +106,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - websocket_api.async_register_command(hass, ws_get_settings) - return True @@ -94,26 +113,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if DOMAIN in hass.data: del hass.data[DOMAIN] + ir.async_delete_issue(hass, DOMAIN, _DEPRECATED) return True async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reload config entry when options change.""" - if hass.data[DOMAIN][CONF_STUN_SERVER] != entry.options.get(CONF_STUN_SERVER, ""): + if hass.data[DOMAIN][CONF_STUN_SERVER] != entry.options.get(CONF_STUN_SERVER): await hass.config_entries.async_reload(entry.entry_id) - - -@websocket_api.websocket_command( - { - vol.Required("type"): "rtsp_to_webrtc/get_settings", - } -) -@callback -def ws_get_settings( - hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] -) -> None: - """Handle the websocket command.""" - connection.send_result( - msg["id"], - {CONF_STUN_SERVER: hass.data.get(DOMAIN, {}).get(CONF_STUN_SERVER, "")}, - ) diff --git a/homeassistant/components/rtsp_to_webrtc/config_flow.py b/homeassistant/components/rtsp_to_webrtc/config_flow.py index adab1a456d0..22502659757 100644 --- a/homeassistant/components/rtsp_to_webrtc/config_flow.py +++ b/homeassistant/components/rtsp_to_webrtc/config_flow.py @@ -9,7 +9,6 @@ from urllib.parse import urlparse import rtsp_to_webrtc import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -19,6 +18,7 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import CONF_STUN_SERVER, DATA_SERVER_URL, DOMAIN @@ -119,16 +119,12 @@ class RTSPToWebRTCConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create an options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """RTSPtoWeb Options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rtsp_to_webrtc/strings.json b/homeassistant/components/rtsp_to_webrtc/strings.json index e52ab554473..c8dcbb7f462 100644 --- a/homeassistant/components/rtsp_to_webrtc/strings.json +++ b/homeassistant/components/rtsp_to_webrtc/strings.json @@ -24,6 +24,12 @@ "server_unreachable": "[%key:component::rtsp_to_webrtc::config::error::server_unreachable%]" } }, + "issues": { + "deprecated": { + "title": "The RTSPtoWebRTC integration is deprecated", + "description": "The RTSPtoWebRTC integration is deprecated and will be removed. Please use the {go2rtc} integration instead, which is enabled by default and provides a better experience. You only need to remove the RTSPtoWebRTC config entry." + } + }, "options": { "step": { "init": { diff --git a/homeassistant/components/ruckus_unleashed/__init__.py b/homeassistant/components/ruckus_unleashed/__init__.py index c2c46fcc125..4ee870e8322 100644 --- a/homeassistant/components/ruckus_unleashed/__init__.py +++ b/homeassistant/components/ruckus_unleashed/__init__.py @@ -1,4 +1,4 @@ -"""The Ruckus Unleashed integration.""" +"""The Ruckus integration.""" import logging @@ -24,13 +24,13 @@ from .const import ( PLATFORMS, UNDO_UPDATE_LISTENERS, ) -from .coordinator import RuckusUnleashedDataUpdateCoordinator +from .coordinator import RuckusDataUpdateCoordinator _LOGGER = logging.getLogger(__package__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Ruckus Unleashed from a config entry.""" + """Set up Ruckus from a config entry.""" ruckus = AjaxSession.async_create( entry.data[CONF_HOST], @@ -46,7 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await ruckus.close() raise ConfigEntryAuthFailed from autherr - coordinator = RuckusUnleashedDataUpdateCoordinator(hass, ruckus=ruckus) + coordinator = RuckusDataUpdateCoordinator(hass, ruckus=ruckus) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/ruckus_unleashed/config_flow.py b/homeassistant/components/ruckus_unleashed/config_flow.py index d2f27e4ef05..0743b19bdaf 100644 --- a/homeassistant/components/ruckus_unleashed/config_flow.py +++ b/homeassistant/components/ruckus_unleashed/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for Ruckus Unleashed integration.""" +"""Config flow for Ruckus integration.""" from collections.abc import Mapping import logging @@ -8,7 +8,7 @@ from aioruckus import AjaxSession, SystemStat from aioruckus.exceptions import AuthenticationError, SchemaError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -59,13 +59,11 @@ async def validate_input(hass: HomeAssistant, data): } -class RuckusUnleashedConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a config flow for Ruckus Unleashed.""" +class RuckusConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Ruckus.""" VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -82,27 +80,24 @@ class RuckusUnleashedConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self._reauth_entry is None: - await self.async_set_unique_id(info[KEY_SYS_SERIAL]) + await self.async_set_unique_id(info[KEY_SYS_SERIAL]) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title=info[KEY_SYS_TITLE], data=user_input ) - if info[KEY_SYS_SERIAL] == self._reauth_entry.unique_id: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input + reauth_entry = self._get_reauth_entry() + if info[KEY_SYS_SERIAL] == reauth_entry.unique_id: + return self.async_update_reload_and_abort( + reauth_entry, data=user_input ) - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - ) - return self.async_abort(reason="reauth_successful") errors["base"] = "invalid_host" - data_schema = self.add_suggested_values_to_schema( - DATA_SCHEMA, self._reauth_entry.data if self._reauth_entry else {} - ) + data_schema = DATA_SCHEMA + if self.source == SOURCE_REAUTH: + data_schema = self.add_suggested_values_to_schema( + data_schema, self._get_reauth_entry().data + ) return self.async_show_form( step_id="user", data_schema=data_schema, errors=errors ) @@ -111,9 +106,6 @@ class RuckusUnleashedConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/ruckus_unleashed/const.py b/homeassistant/components/ruckus_unleashed/const.py index 9076437b8c7..1aae3041e73 100644 --- a/homeassistant/components/ruckus_unleashed/const.py +++ b/homeassistant/components/ruckus_unleashed/const.py @@ -1,4 +1,4 @@ -"""Constants for the Ruckus Unleashed integration.""" +"""Constants for the Ruckus integration.""" from homeassistant.const import Platform diff --git a/homeassistant/components/ruckus_unleashed/coordinator.py b/homeassistant/components/ruckus_unleashed/coordinator.py index 989748af86e..d9f20883559 100644 --- a/homeassistant/components/ruckus_unleashed/coordinator.py +++ b/homeassistant/components/ruckus_unleashed/coordinator.py @@ -1,4 +1,4 @@ -"""Ruckus Unleashed DataUpdateCoordinator.""" +"""Ruckus DataUpdateCoordinator.""" from datetime import timedelta import logging @@ -15,11 +15,11 @@ from .const import API_CLIENT_MAC, DOMAIN, KEY_SYS_CLIENTS, SCAN_INTERVAL _LOGGER = logging.getLogger(__package__) -class RuckusUnleashedDataUpdateCoordinator(DataUpdateCoordinator): - """Coordinator to manage data from Ruckus Unleashed client.""" +class RuckusDataUpdateCoordinator(DataUpdateCoordinator): + """Coordinator to manage data from Ruckus client.""" def __init__(self, hass: HomeAssistant, *, ruckus: AjaxSession) -> None: - """Initialize global Ruckus Unleashed data updater.""" + """Initialize global Ruckus data updater.""" self.ruckus = ruckus update_interval = timedelta(seconds=SCAN_INTERVAL) @@ -38,7 +38,7 @@ class RuckusUnleashedDataUpdateCoordinator(DataUpdateCoordinator): return {client[API_CLIENT_MAC]: client for client in clients} async def _async_update_data(self) -> dict: - """Fetch Ruckus Unleashed data.""" + """Fetch Ruckus data.""" try: return {KEY_SYS_CLIENTS: await self._fetch_clients()} except AuthenticationError as autherror: diff --git a/homeassistant/components/ruckus_unleashed/device_tracker.py b/homeassistant/components/ruckus_unleashed/device_tracker.py index 233e5cd4945..8a5e8b79294 100644 --- a/homeassistant/components/ruckus_unleashed/device_tracker.py +++ b/homeassistant/components/ruckus_unleashed/device_tracker.py @@ -1,10 +1,10 @@ -"""Support for Ruckus Unleashed devices.""" +"""Support for Ruckus devices.""" from __future__ import annotations import logging -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er @@ -19,7 +19,7 @@ from .const import ( KEY_SYS_CLIENTS, UNDO_UPDATE_LISTENERS, ) -from .coordinator import RuckusUnleashedDataUpdateCoordinator +from .coordinator import RuckusDataUpdateCoordinator _LOGGER = logging.getLogger(__package__) @@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__package__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: - """Set up device tracker for Ruckus Unleashed component.""" + """Set up device tracker for Ruckus component.""" coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR] tracked: set[str] = set() @@ -58,9 +58,7 @@ def add_new_entities(coordinator, async_add_entities, tracked): device = coordinator.data[KEY_SYS_CLIENTS][mac] _LOGGER.debug("adding new device: [%s] %s", mac, device[API_CLIENT_HOSTNAME]) - new_tracked.append( - RuckusUnleashedDevice(coordinator, mac, device[API_CLIENT_HOSTNAME]) - ) + new_tracked.append(RuckusDevice(coordinator, mac, device[API_CLIENT_HOSTNAME])) tracked.add(mac) async_add_entities(new_tracked) @@ -69,13 +67,13 @@ def add_new_entities(coordinator, async_add_entities, tracked): @callback def restore_entities( registry: er.EntityRegistry, - coordinator: RuckusUnleashedDataUpdateCoordinator, + coordinator: RuckusDataUpdateCoordinator, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, tracked: set[str], ) -> None: """Restore clients that are not a part of active clients list.""" - missing: list[RuckusUnleashedDevice] = [] + missing: list[RuckusDevice] = [] for entity in registry.entities.get_entries_for_config_entry_id(entry.entry_id): if ( @@ -83,9 +81,7 @@ def restore_entities( and entity.unique_id not in coordinator.data[KEY_SYS_CLIENTS] ): missing.append( - RuckusUnleashedDevice( - coordinator, entity.unique_id, entity.original_name - ) + RuckusDevice(coordinator, entity.unique_id, entity.original_name) ) tracked.add(entity.unique_id) @@ -93,11 +89,11 @@ def restore_entities( async_add_entities(missing) -class RuckusUnleashedDevice(CoordinatorEntity, ScannerEntity): - """Representation of a Ruckus Unleashed client.""" +class RuckusDevice(CoordinatorEntity, ScannerEntity): + """Representation of a Ruckus client.""" def __init__(self, coordinator, mac, name) -> None: - """Initialize a Ruckus Unleashed client.""" + """Initialize a Ruckus client.""" super().__init__(coordinator) self._mac = mac self._name = name @@ -125,8 +121,3 @@ class RuckusUnleashedDevice(CoordinatorEntity, ScannerEntity): def is_connected(self) -> bool: """Return true if the device is connected to the network.""" return self._mac in self.coordinator.data[KEY_SYS_CLIENTS] - - @property - def source_type(self) -> SourceType: - """Return the source type.""" - return SourceType.ROUTER diff --git a/homeassistant/components/ruckus_unleashed/manifest.json b/homeassistant/components/ruckus_unleashed/manifest.json index edaf0aa95d2..8d56f3a5563 100644 --- a/homeassistant/components/ruckus_unleashed/manifest.json +++ b/homeassistant/components/ruckus_unleashed/manifest.json @@ -1,11 +1,11 @@ { "domain": "ruckus_unleashed", - "name": "Ruckus Unleashed", + "name": "Ruckus", "codeowners": ["@lanrat", "@ms264556", "@gabe565"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ruckus_unleashed", "integration_type": "hub", "iot_class": "local_polling", - "loggers": ["aioruckus", "xmltodict"], - "requirements": ["aioruckus==0.34"] + "loggers": ["aioruckus"], + "requirements": ["aioruckus==0.42"] } diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 8627c636ef2..b068fbd1892 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -3,20 +3,21 @@ import asyncio import logging -from aiorussound import Russound +from aiorussound import RussoundClient, RussoundTcpConnectionHandler +from aiorussound.models import CallbackType from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS +from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS PLATFORMS = [Platform.MEDIA_PLAYER] _LOGGER = logging.getLogger(__name__) -type RussoundConfigEntry = ConfigEntry[Russound] +type RussoundConfigEntry = ConfigEntry[RussoundClient] async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: @@ -24,37 +25,42 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] - russ = Russound(hass.loop, host, port) + client = RussoundClient(RussoundTcpConnectionHandler(host, port)) - @callback - def is_connected_updated(connected: bool) -> None: - if connected: - _LOGGER.warning("Reconnected to controller at %s:%s", host, port) - else: - _LOGGER.warning( - "Disconnected from controller at %s:%s", - host, - port, - ) + async def _connection_update_callback( + _client: RussoundClient, _callback_type: CallbackType + ) -> None: + """Call when the device is notified of changes.""" + if _callback_type == CallbackType.CONNECTION: + if _client.is_connected(): + _LOGGER.warning("Reconnected to device at %s", entry.data[CONF_HOST]) + else: + _LOGGER.warning("Disconnected from device at %s", entry.data[CONF_HOST]) - russ.add_connection_callback(is_connected_updated) + await client.register_state_update_callbacks(_connection_update_callback) try: async with asyncio.timeout(CONNECT_TIMEOUT): - await russ.connect() + await client.connect() except RUSSOUND_RIO_EXCEPTIONS as err: - raise ConfigEntryNotReady(f"Error while connecting to {host}:{port}") from err - - entry.runtime_data = russ + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": host, + "port": port, + }, + ) from err + entry.runtime_data = client await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - await entry.runtime_data.close() + await entry.runtime_data.disconnect() return unload_ok diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index e25ac7dde2e..15d002b3f49 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -6,19 +6,14 @@ import asyncio import logging from typing import Any -from aiorussound import Controller, Russound +from aiorussound import RussoundClient, RussoundTcpConnectionHandler import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers import config_validation as cv -from .const import ( - CONNECT_TIMEOUT, - DOMAIN, - RUSSOUND_RIO_EXCEPTIONS, - NoPrimaryControllerException, -) +from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS DATA_SCHEMA = vol.Schema( { @@ -30,16 +25,6 @@ DATA_SCHEMA = vol.Schema( _LOGGER = logging.getLogger(__name__) -def find_primary_controller_metadata( - controllers: dict[int, Controller], -) -> tuple[str, str]: - """Find the mac address of the primary Russound controller.""" - if 1 in controllers: - c = controllers[1] - return c.mac_address, c.controller_type - raise NoPrimaryControllerException - - class FlowHandler(ConfigFlow, domain=DOMAIN): """Russound RIO configuration flow.""" @@ -54,60 +39,47 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): host = user_input[CONF_HOST] port = user_input[CONF_PORT] - controllers = None - russ = Russound(self.hass.loop, host, port) + client = RussoundClient(RussoundTcpConnectionHandler(host, port)) try: async with asyncio.timeout(CONNECT_TIMEOUT): - await russ.connect() - controllers = await russ.enumerate_controllers() - metadata = find_primary_controller_metadata(controllers) - await russ.close() + await client.connect() + controller = client.controllers[1] + await client.disconnect() except RUSSOUND_RIO_EXCEPTIONS: _LOGGER.exception("Could not connect to Russound RIO") errors["base"] = "cannot_connect" - except NoPrimaryControllerException: - _LOGGER.exception( - "Russound RIO device doesn't have a primary controller", - ) - errors["base"] = "no_primary_controller" else: - await self.async_set_unique_id(metadata[0]) + await self.async_set_unique_id(controller.mac_address) self._abort_if_unique_id_configured() data = {CONF_HOST: host, CONF_PORT: port} - return self.async_create_entry(title=metadata[1], data=data) + return self.async_create_entry( + title=controller.controller_type, data=data + ) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" - self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) - host = import_config[CONF_HOST] - port = import_config.get(CONF_PORT, 9621) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) + host = import_data[CONF_HOST] + port = import_data.get(CONF_PORT, 9621) # Connection logic is repeated here since this method will be removed in future releases - russ = Russound(self.hass.loop, host, port) + client = RussoundClient(RussoundTcpConnectionHandler(host, port)) try: async with asyncio.timeout(CONNECT_TIMEOUT): - await russ.connect() - controllers = await russ.enumerate_controllers() - metadata = find_primary_controller_metadata(controllers) - await russ.close() + await client.connect() + controller = client.controllers[1] + await client.disconnect() except RUSSOUND_RIO_EXCEPTIONS: _LOGGER.exception("Could not connect to Russound RIO") return self.async_abort( reason="cannot_connect", description_placeholders={} ) - except NoPrimaryControllerException: - _LOGGER.exception("Russound RIO device doesn't have a primary controller") - return self.async_abort( - reason="no_primary_controller", description_placeholders={} - ) else: - await self.async_set_unique_id(metadata[0]) + await self.async_set_unique_id(controller.mac_address) self._abort_if_unique_id_configured() data = {CONF_HOST: host, CONF_PORT: port} - return self.async_create_entry(title=metadata[1], data=data) + return self.async_create_entry(title=controller.controller_type, data=data) diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py index d1f4e1c4c0e..af52e89d399 100644 --- a/homeassistant/components/russound_rio/const.py +++ b/homeassistant/components/russound_rio/const.py @@ -2,7 +2,7 @@ import asyncio -from aiorussound import CommandException +from aiorussound import CommandError from aiorussound.const import FeatureFlag from homeassistant.components.media_player import MediaPlayerEntityFeature @@ -10,18 +10,14 @@ from homeassistant.components.media_player import MediaPlayerEntityFeature DOMAIN = "russound_rio" RUSSOUND_RIO_EXCEPTIONS = ( - CommandException, + CommandError, ConnectionRefusedError, TimeoutError, asyncio.CancelledError, ) -class NoPrimaryControllerException(Exception): - """Thrown when the Russound device is not the primary unit in the RNET stack.""" - - -CONNECT_TIMEOUT = 5 +CONNECT_TIMEOUT = 15 MP_FEATURES_BY_FLAG = { FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE diff --git a/homeassistant/components/russound_rio/diagnostics.py b/homeassistant/components/russound_rio/diagnostics.py new file mode 100644 index 00000000000..0e96413c41a --- /dev/null +++ b/homeassistant/components/russound_rio/diagnostics.py @@ -0,0 +1,14 @@ +"""Diagnostics platform for Russound RIO.""" + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import RussoundConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: RussoundConfigEntry +) -> dict[str, Any]: + """Return diagnostics for the provided config entry.""" + return entry.runtime_data.state diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 0e4d5cf7dde..9790ff43e68 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -4,9 +4,9 @@ from collections.abc import Awaitable, Callable, Coroutine from functools import wraps from typing import Any, Concatenate -from aiorussound import Controller +from aiorussound import Controller, RussoundClient, RussoundTcpConnectionHandler +from aiorussound.models import CallbackType -from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity @@ -26,7 +26,12 @@ def command[_EntityT: RussoundBaseEntity, **_P]( await func(self, *args, **kwargs) except RUSSOUND_RIO_EXCEPTIONS as exc: raise HomeAssistantError( - f"Error executing {func.__name__} on entity {self.entity_id}," + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={ + "function_name": func.__name__, + "entity_id": self.entity_id, + }, ) from exc return decorator @@ -43,17 +48,16 @@ class RussoundBaseEntity(Entity): controller: Controller, ) -> None: """Initialize the entity.""" - self._instance = controller.instance + self._client = controller.client self._controller = controller self._primary_mac_address = ( - controller.mac_address or controller.parent_controller.mac_address + controller.mac_address or self._client.controllers[1].mac_address ) self._device_identifier = ( self._controller.mac_address or f"{self._primary_mac_address}-{self._controller.controller_id}" ) self._attr_device_info = DeviceInfo( - configuration_url=f"http://{self._instance.host}", # Use MAC address of Russound device as identifier identifiers={(DOMAIN, self._device_identifier)}, manufacturer="Russound", @@ -61,26 +65,35 @@ class RussoundBaseEntity(Entity): model=controller.controller_type, sw_version=controller.firmware_version, ) - if controller.parent_controller: + if isinstance(self._client.connection_handler, RussoundTcpConnectionHandler): + self._attr_device_info["configuration_url"] = ( + f"http://{self._client.connection_handler.host}" + ) + if controller.controller_id != 1: + assert self._client.controllers[1].mac_address self._attr_device_info["via_device"] = ( DOMAIN, - controller.parent_controller.mac_address, + self._client.controllers[1].mac_address, ) else: + assert controller.mac_address self._attr_device_info["connections"] = { (CONNECTION_NETWORK_MAC, controller.mac_address) } - @callback - def _is_connected_updated(self, connected: bool) -> None: - """Update the state when the device is ready to receive commands or is unavailable.""" - self._attr_available = connected + async def _state_update_callback( + self, _client: RussoundClient, _callback_type: CallbackType + ) -> None: + """Call when the device is notified of changes.""" + if _callback_type == CallbackType.CONNECTION: + self._attr_available = _client.is_connected() + self._controller = _client.controllers[self._controller.controller_id] self.async_write_ha_state() async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self._instance.add_connection_callback(self._is_connected_updated) + """Register callback handlers.""" + await self._client.register_state_update_callbacks(self._state_update_callback) async def async_will_remove_from_hass(self) -> None: """Remove callbacks.""" - self._instance.remove_connection_callback(self._is_connected_updated) + self._client.unregister_state_update_callbacks(self._state_update_callback) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 6c473d94874..2cd153c232c 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], - "quality_scale": "silver", - "requirements": ["aiorussound==2.3.2"] + "requirements": ["aiorussound==4.1.0"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 20aaf0f3c08..02467731ec3 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -3,8 +3,13 @@ from __future__ import annotations import logging +from typing import TYPE_CHECKING -from aiorussound import Source, Zone +from aiorussound import Controller +from aiorussound.const import FeatureFlag +from aiorussound.models import PlayStatus, Source +from aiorussound.rio import ZoneControlSurface +from aiorussound.util import is_feature_supported from homeassistant.components.media_player import ( MediaPlayerDeviceClass, @@ -14,8 +19,7 @@ from homeassistant.components.media_player import ( MediaType, ) from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -27,6 +31,8 @@ from .entity import RussoundBaseEntity, command _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + async def async_setup_platform( hass: HomeAssistant, @@ -82,29 +88,14 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Russound RIO platform.""" - russ = entry.runtime_data + client = entry.runtime_data + sources = client.sources - # Discover controllers - controllers = await russ.enumerate_controllers() - - entities = [] - for controller in controllers.values(): - sources = controller.sources - for source in sources.values(): - await source.watch() - for zone in controller.zones.values(): - await zone.watch() - mp = RussoundZoneDevice(zone, sources) - entities.append(mp) - - @callback - def on_stop(event): - """Shutdown cleanly when hass stops.""" - hass.loop.create_task(russ.close()) - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_stop) - - async_add_entities(entities) + async_add_entities( + RussoundZoneDevice(controller, zone_id, sources) + for controller in client.controllers.values() + for zone_id in controller.zones + ) class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @@ -120,85 +111,96 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): | MediaPlayerEntityFeature.SELECT_SOURCE ) - def __init__(self, zone: Zone, sources: dict[int, Source]) -> None: + def __init__( + self, controller: Controller, zone_id: int, sources: dict[int, Source] + ) -> None: """Initialize the zone device.""" - super().__init__(zone.controller) - self._zone = zone + super().__init__(controller) + self._zone_id = zone_id + _zone = self._zone self._sources = sources - self._attr_name = zone.name - self._attr_unique_id = f"{self._primary_mac_address}-{zone.device_str()}" + self._attr_name = _zone.name + self._attr_unique_id = f"{self._primary_mac_address}-{_zone.device_str}" for flag, feature in MP_FEATURES_BY_FLAG.items(): - if flag in zone.instance.supported_features: + if flag in self._client.supported_features: self._attr_supported_features |= feature - def _callback_handler(self, device_str, *args): - if ( - device_str == self._zone.device_str() - or device_str == self._current_source().device_str() - ): - self.schedule_update_ha_state() + @property + def _zone(self) -> ZoneControlSurface: + return self._controller.zones[self._zone_id] - async def async_added_to_hass(self) -> None: - """Register callback handlers.""" - await super().async_added_to_hass() - self._zone.add_callback(self._callback_handler) - - async def async_will_remove_from_hass(self) -> None: - """Remove callbacks.""" - await super().async_will_remove_from_hass() - self._zone.remove_callback(self._callback_handler) - - def _current_source(self) -> Source: + @property + def _source(self) -> Source: return self._zone.fetch_current_source() @property def state(self) -> MediaPlayerState | None: """Return the state of the device.""" status = self._zone.status - if status == "ON": - return MediaPlayerState.ON - if status == "OFF": + play_status = self._source.play_status + if not status: return MediaPlayerState.OFF - return None + if play_status == PlayStatus.PLAYING: + return MediaPlayerState.PLAYING + if play_status == PlayStatus.PAUSED: + return MediaPlayerState.PAUSED + if play_status == PlayStatus.TRANSITIONING: + return MediaPlayerState.BUFFERING + if play_status == PlayStatus.STOPPED: + return MediaPlayerState.IDLE + return MediaPlayerState.ON @property - def source(self): + def source(self) -> str: """Get the currently selected source.""" - return self._current_source().name + return self._source.name @property - def source_list(self): + def source_list(self) -> list[str]: """Return a list of available input sources.""" - return [x.name for x in self._sources.values()] + if TYPE_CHECKING: + assert self._client.rio_version + available_sources = ( + [ + source + for source_id, source in self._sources.items() + if source_id in self._zone.enabled_sources + ] + if is_feature_supported( + self._client.rio_version, FeatureFlag.SUPPORT_ZONE_SOURCE_EXCLUSION + ) + else self._sources.values() + ) + return [x.name for x in available_sources] @property - def media_title(self): + def media_title(self) -> str | None: """Title of current playing media.""" - return self._current_source().song_name + return self._source.song_name @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media, music track only.""" - return self._current_source().artist_name + return self._source.artist_name @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album name of current playing media, music track only.""" - return self._current_source().album_name + return self._source.album_name @property - def media_image_url(self): + def media_image_url(self) -> str | None: """Image url of current playing media.""" - return self._current_source().cover_art_url + return self._source.cover_art_url @property - def volume_level(self): + def volume_level(self) -> float: """Volume level of the media player (0..1). Value is returned based on a range (0..50). Therefore float divide by 50 to get to the required range. """ - return float(self._zone.volume or "0") / 50.0 + return self._zone.volume / 50.0 @command async def async_turn_off(self) -> None: @@ -214,7 +216,7 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): async def async_set_volume_level(self, volume: float) -> None: """Set the volume level.""" rvol = int(volume * 50.0) - await self._zone.set_volume(rvol) + await self._zone.set_volume(str(rvol)) @command async def async_select_source(self, source: str) -> None: diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml new file mode 100644 index 00000000000..3a5e8f9adb7 --- /dev/null +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: | + The data_description fields in translations are missing. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + # Gold + entity-translations: + status: exempt + comment: | + There are no entities to translate. + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: | + This integration doesn't have enough / noisy entities that warrant being disabled by default. + discovery: todo + stale-devices: todo + diagnostics: done + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: done + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration uses telnet exclusively and does not make http calls. + strict-typing: done diff --git a/homeassistant/components/russound_rio/strings.json b/homeassistant/components/russound_rio/strings.json index a8b89e3dae3..b8c29c08301 100644 --- a/homeassistant/components/russound_rio/strings.json +++ b/homeassistant/components/russound_rio/strings.json @@ -1,7 +1,6 @@ { "common": { - "error_cannot_connect": "Failed to connect to Russound device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect.", - "error_no_primary_controller": "No primary controller was detected for the Russound device. Please make sure that the target Russound device has it's controller ID set to 1 (using the selector on the back of the unit)." + "error_cannot_connect": "Failed to connect to Russound device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect." }, "config": { "step": { @@ -14,12 +13,10 @@ } }, "error": { - "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]", - "no_primary_controller": "[%key:component::russound_rio::common::error_no_primary_controller%]" + "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]" }, "abort": { "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]", - "no_primary_controller": "[%key:component::russound_rio::common::error_no_primary_controller%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, @@ -36,5 +33,13 @@ "title": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::title%]", "description": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::description%]" } + }, + "exceptions": { + "entry_cannot_connect": { + "message": "Error while connecting to {host}:{port}" + }, + "command_error": { + "message": "Error executing {function_name} on entity {entity_id}" + } } } diff --git a/homeassistant/components/russound_rnet/manifest.json b/homeassistant/components/russound_rnet/manifest.json index a93e3fe5a87..27fbfbca57f 100644 --- a/homeassistant/components/russound_rnet/manifest.json +++ b/homeassistant/components/russound_rnet/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rnet", "iot_class": "local_polling", "loggers": ["russound"], - "requirements": ["russound==0.1.9"] + "quality_scale": "legacy", + "requirements": ["russound==0.2.0"] } diff --git a/homeassistant/components/russound_rnet/media_player.py b/homeassistant/components/russound_rnet/media_player.py index a08cfbe7747..f8369ed64ca 100644 --- a/homeassistant/components/russound_rnet/media_player.py +++ b/homeassistant/components/russound_rnet/media_player.py @@ -96,7 +96,13 @@ class RussoundRNETDevice(MediaPlayerEntity): # Updated this function to make a single call to get_zone_info, so that # with a single call we can get On/Off, Volume and Source, reducing the # amount of traffic and speeding up the update process. - ret = self._russ.get_zone_info(self._controller_id, self._zone_id, 4) + try: + ret = self._russ.get_zone_info(self._controller_id, self._zone_id, 4) + except BrokenPipeError: + _LOGGER.error("Broken Pipe Error, trying to reconnect to Russound RNET") + self._russ.connect() + ret = self._russ.get_zone_info(self._controller_id, self._zone_id, 4) + _LOGGER.debug("ret= %s", ret) if ret is not None: _LOGGER.debug( diff --git a/homeassistant/components/rympro/config_flow.py b/homeassistant/components/rympro/config_flow.py index be35c48ac5b..1d5d8a9e79d 100644 --- a/homeassistant/components/rympro/config_flow.py +++ b/homeassistant/components/rympro/config_flow.py @@ -9,7 +9,7 @@ from typing import Any from pyrympro import CannotConnectError, RymPro, UnauthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -46,10 +46,6 @@ class RymproConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Init the config flow.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -74,19 +70,17 @@ class RymproConfigFlow(ConfigFlow, domain=DOMAIN): title = user_input[CONF_EMAIL] data = {**user_input, **info} - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: await self.async_set_unique_id(info[CONF_UNIQUE_ID]) self._abort_if_unique_id_configured() return self.async_create_entry(title=title, data=data) - self.hass.config_entries.async_update_entry( - self._reauth_entry, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=title, data=data, unique_id=info[CONF_UNIQUE_ID], ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors @@ -96,7 +90,4 @@ class RymproConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/rympro/strings.json b/homeassistant/components/rympro/strings.json index c58bf5b93ba..2c1e2ad93c9 100644 --- a/homeassistant/components/rympro/strings.json +++ b/homeassistant/components/rympro/strings.json @@ -14,7 +14,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index a827e9a36a4..2e3d6dd613c 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -8,40 +8,27 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry, ConfigEntryState -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SENSORS, - CONF_SSL, - Platform, -) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries +from homeassistant.helpers import config_validation as cv +import homeassistant.helpers.issue_registry as ir from homeassistant.helpers.typing import ConfigType from .const import ( ATTR_API_KEY, ATTR_SPEED, - DEFAULT_HOST, - DEFAULT_NAME, - DEFAULT_PORT, DEFAULT_SPEED_LIMIT, - DEFAULT_SSL, DOMAIN, SERVICE_PAUSE, SERVICE_RESUME, SERVICE_SET_SPEED, ) -from .coordinator import SabnzbdUpdateCoordinator -from .sab import get_client -from .sensor import OLD_SENSOR_KEYS +from .coordinator import SabnzbdConfigEntry, SabnzbdUpdateCoordinator +from .helpers import get_client -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.NUMBER, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) SERVICES = ( @@ -62,122 +49,26 @@ SERVICE_SPEED_SCHEMA = SERVICE_BASE_SCHEMA.extend( } ) -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - vol.All( - cv.deprecated(CONF_HOST), - cv.deprecated(CONF_PORT), - cv.deprecated(CONF_SENSORS), - cv.deprecated(CONF_SSL), - { - vol.Required(CONF_API_KEY): str, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): str, - vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Optional(CONF_SENSORS): vol.All( - cv.ensure_list, [vol.In(OLD_SENSOR_KEYS)] - ), - vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, - }, - ) - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the SABnzbd component.""" - hass.data.setdefault(DOMAIN, {}) - - if hass.config_entries.async_entries(DOMAIN): - return True - - if DOMAIN in config: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config[DOMAIN], - ) - ) - - return True +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @callback -def async_get_entry_id_for_service_call(hass: HomeAssistant, call: ServiceCall) -> str: +def async_get_entry_for_service_call( + hass: HomeAssistant, call: ServiceCall +) -> SabnzbdConfigEntry: """Get the entry ID related to a service call (by device ID).""" call_data_api_key = call.data[ATTR_API_KEY] for entry in hass.config_entries.async_entries(DOMAIN): if entry.data[ATTR_API_KEY] == call_data_api_key: - return entry.entry_id + return entry raise ValueError(f"No api for API key: {call_data_api_key}") -def update_device_identifiers(hass: HomeAssistant, entry: ConfigEntry): - """Update device identifiers to new identifiers.""" - device_registry = dr.async_get(hass) - device_entry = device_registry.async_get_device(identifiers={(DOMAIN, DOMAIN)}) - if device_entry and entry.entry_id in device_entry.config_entries: - new_identifiers = {(DOMAIN, entry.entry_id)} - _LOGGER.debug( - "Updating device id <%s> with new identifiers <%s>", - device_entry.id, - new_identifiers, - ) - device_registry.async_update_device( - device_entry.id, new_identifiers=new_identifiers - ) - - -async def migrate_unique_id(hass: HomeAssistant, entry: ConfigEntry): - """Migrate entities to new unique ids (with entry_id).""" - - @callback - def async_migrate_callback(entity_entry: RegistryEntry) -> dict | None: - """Define a callback to migrate appropriate SabnzbdSensor entities to new unique IDs. - - Old: description.key - New: {entry_id}_description.key - """ - entry_id = entity_entry.config_entry_id - if entry_id is None: - return None - if entity_entry.unique_id.startswith(entry_id): - return None - - new_unique_id = f"{entry_id}_{entity_entry.unique_id}" - - _LOGGER.debug( - "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", - entity_entry.entity_id, - entity_entry.unique_id, - new_unique_id, - ) - - return {"new_unique_id": new_unique_id} - - await async_migrate_entries(hass, entry.entry_id, async_migrate_callback) - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the SabNzbd Component.""" - sab_api = await get_client(hass, entry.data) - if not sab_api: - raise ConfigEntryNotReady - - await migrate_unique_id(hass, entry) - update_device_identifiers(hass, entry) - - coordinator = SabnzbdUpdateCoordinator(hass, sab_api) - await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator - @callback def extract_api( func: Callable[ @@ -188,8 +79,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def wrapper(call: ServiceCall) -> None: """Wrap the service function.""" - entry_id = async_get_entry_id_for_service_call(hass, call) - coordinator: SabnzbdUpdateCoordinator = hass.data[DOMAIN][entry_id] + config_entry = async_get_entry_for_service_call(hass, call) + coordinator = config_entry.runtime_data try: await func(call, coordinator) @@ -204,18 +95,45 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_pause_queue( call: ServiceCall, coordinator: SabnzbdUpdateCoordinator ) -> None: + ir.async_create_issue( + hass, + DOMAIN, + "pause_action_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + breaks_in_ha_version="2025.6", + translation_key="pause_action_deprecated", + ) await coordinator.sab_api.pause_queue() @extract_api async def async_resume_queue( call: ServiceCall, coordinator: SabnzbdUpdateCoordinator ) -> None: + ir.async_create_issue( + hass, + DOMAIN, + "resume_action_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + breaks_in_ha_version="2025.6", + translation_key="resume_action_deprecated", + ) await coordinator.sab_api.resume_queue() @extract_api async def async_set_queue_speed( call: ServiceCall, coordinator: SabnzbdUpdateCoordinator ) -> None: + ir.async_create_issue( + hass, + DOMAIN, + "set_speed_action_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + breaks_in_ha_version="2025.6", + translation_key="set_speed_action_deprecated", + ) speed = call.data.get(ATTR_SPEED) await coordinator.sab_api.set_speed_limit(speed) @@ -224,21 +142,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: (SERVICE_RESUME, async_resume_queue, SERVICE_BASE_SCHEMA), (SERVICE_SET_SPEED, async_set_queue_speed, SERVICE_SPEED_SCHEMA), ): - if hass.services.has_service(DOMAIN, service): - continue - hass.services.async_register(DOMAIN, service, method, schema=schema) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: + """Set up the SabNzbd Component.""" + + sab_api = await get_client(hass, entry.data) + if not sab_api: + raise ConfigEntryNotReady + + coordinator = SabnzbdUpdateCoordinator(hass, entry, sab_api) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: """Unload a Sabnzbd config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) loaded_entries = [ entry diff --git a/homeassistant/components/sabnzbd/binary_sensor.py b/homeassistant/components/sabnzbd/binary_sensor.py new file mode 100644 index 00000000000..1d65bf01211 --- /dev/null +++ b/homeassistant/components/sabnzbd/binary_sensor.py @@ -0,0 +1,61 @@ +"""Binary sensor platform for SABnzbd.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SabnzbdConfigEntry +from .entity import SabnzbdEntity + + +@dataclass(frozen=True, kw_only=True) +class SabnzbdBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Sabnzbd binary sensor entity.""" + + is_on_fn: Callable[[dict[str, Any]], bool] + + +BINARY_SENSORS: tuple[SabnzbdBinarySensorEntityDescription, ...] = ( + SabnzbdBinarySensorEntityDescription( + key="warnings", + translation_key="warnings", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda data: data["have_warnings"] != "0", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SabnzbdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up a Sabnzbd sensor entry.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [SabnzbdBinarySensor(coordinator, sensor) for sensor in BINARY_SENSORS] + ) + + +class SabnzbdBinarySensor(SabnzbdEntity, BinarySensorEntity): + """Representation of an SABnzbd binary sensor.""" + + entity_description: SabnzbdBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Return latest sensor data.""" + return self.entity_description.is_on_fn(self.coordinator.data) diff --git a/homeassistant/components/sabnzbd/button.py b/homeassistant/components/sabnzbd/button.py new file mode 100644 index 00000000000..1ff26b41655 --- /dev/null +++ b/homeassistant/components/sabnzbd/button.py @@ -0,0 +1,68 @@ +"""Button platform for the SABnzbd component.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from pysabnzbd import SabnzbdApiException + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import SabnzbdConfigEntry, SabnzbdUpdateCoordinator +from .entity import SabnzbdEntity + + +@dataclass(kw_only=True, frozen=True) +class SabnzbdButtonEntityDescription(ButtonEntityDescription): + """Describes SABnzbd button entity.""" + + press_fn: Callable[[SabnzbdUpdateCoordinator], Any] + + +BUTTON_DESCRIPTIONS: tuple[SabnzbdButtonEntityDescription, ...] = ( + SabnzbdButtonEntityDescription( + key="pause", + translation_key="pause", + press_fn=lambda coordinator: coordinator.sab_api.pause_queue(), + ), + SabnzbdButtonEntityDescription( + key="resume", + translation_key="resume", + press_fn=lambda coordinator: coordinator.sab_api.resume_queue(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SabnzbdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up buttons from a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + SabnzbdButton(coordinator, description) for description in BUTTON_DESCRIPTIONS + ) + + +class SabnzbdButton(SabnzbdEntity, ButtonEntity): + """Representation of a SABnzbd button.""" + + entity_description: SabnzbdButtonEntityDescription + + async def async_press(self) -> None: + """Handle the button press.""" + try: + await self.entity_description.press_fn(self.coordinator) + except SabnzbdApiException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/sabnzbd/config_flow.py b/homeassistant/components/sabnzbd/config_flow.py index 944c3f2936c..ce9b0a13b18 100644 --- a/homeassistant/components/sabnzbd/config_flow.py +++ b/homeassistant/components/sabnzbd/config_flow.py @@ -6,27 +6,38 @@ import logging from typing import Any import voluptuous as vol +import yarl -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SSL, - CONF_URL, +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, ) +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) +from homeassistant.util import slugify -from .const import DEFAULT_NAME, DOMAIN -from .sab import get_client +from .const import DOMAIN +from .helpers import get_client _LOGGER = logging.getLogger(__name__) USER_SCHEMA = vol.Schema( { - vol.Required(CONF_API_KEY): str, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): str, - vol.Required(CONF_URL): str, + vol.Required(CONF_URL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.URL, + ) + ), + vol.Required(CONF_API_KEY): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + ) + ), } ) @@ -36,39 +47,47 @@ class SABnzbdConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _async_validate_input(self, user_input): - """Validate the user input allows us to connect.""" - errors = {} - sab_api = await get_client(self.hass, user_input) - if not sab_api: - errors["base"] = "cannot_connect" - - return errors + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration flow.""" + return await self.async_step_user(user_input) async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors = {} - if user_input is not None: - errors = await self._async_validate_input(user_input) - if not errors: + if user_input is not None: + sab_api = await get_client(self.hass, user_input) + if not sab_api: + errors["base"] = "cannot_connect" + else: + self._async_abort_entries_match( + { + CONF_URL: user_input[CONF_URL], + CONF_API_KEY: user_input[CONF_API_KEY], + } + ) + + if self.source == SOURCE_RECONFIGURE: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data_updates=user_input + ) + + parsed_url = yarl.URL(user_input[CONF_URL]) return self.async_create_entry( - title=user_input[CONF_API_KEY][:12], data=user_input + title=slugify(parsed_url.host), data=user_input ) return self.async_show_form( step_id="user", - data_schema=USER_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + USER_SCHEMA, + self._get_reconfigure_entry().data + if self.source == SOURCE_RECONFIGURE + else user_input, + ), errors=errors, ) - - async def async_step_import(self, import_data): - """Import sabnzbd config from configuration.yaml.""" - protocol = "https://" if import_data[CONF_SSL] else "http://" - import_data[CONF_URL] = ( - f"{protocol}{import_data[CONF_HOST]}:{import_data[CONF_PORT]}" - ) - return await self.async_step_user(import_data) diff --git a/homeassistant/components/sabnzbd/const.py b/homeassistant/components/sabnzbd/const.py index 55346509133..991490f5716 100644 --- a/homeassistant/components/sabnzbd/const.py +++ b/homeassistant/components/sabnzbd/const.py @@ -7,7 +7,6 @@ ATTR_SPEED = "speed" ATTR_API_KEY = "api_key" DEFAULT_HOST = "localhost" -DEFAULT_NAME = "SABnzbd" DEFAULT_PORT = 8080 DEFAULT_SPEED_LIMIT = "100" DEFAULT_SSL = False diff --git a/homeassistant/components/sabnzbd/coordinator.py b/homeassistant/components/sabnzbd/coordinator.py index 5db59bb584b..dac8d8a8e95 100644 --- a/homeassistant/components/sabnzbd/coordinator.py +++ b/homeassistant/components/sabnzbd/coordinator.py @@ -6,18 +6,24 @@ from typing import Any from pysabnzbd import SabnzbdApi, SabnzbdApiException +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed _LOGGER = logging.getLogger(__name__) +type SabnzbdConfigEntry = ConfigEntry[SabnzbdUpdateCoordinator] + class SabnzbdUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """The SABnzbd update coordinator.""" + config_entry: SabnzbdConfigEntry + def __init__( self, hass: HomeAssistant, + config_entry: SabnzbdConfigEntry, sab_api: SabnzbdApi, ) -> None: """Initialize the SABnzbd update coordinator.""" @@ -26,6 +32,7 @@ class SabnzbdUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): super().__init__( hass, _LOGGER, + config_entry=config_entry, name="SABnzbd", update_interval=timedelta(seconds=30), ) diff --git a/homeassistant/components/sabnzbd/entity.py b/homeassistant/components/sabnzbd/entity.py new file mode 100644 index 00000000000..60a2eb8d251 --- /dev/null +++ b/homeassistant/components/sabnzbd/entity.py @@ -0,0 +1,33 @@ +"""Base entity for Sabnzbd.""" + +from homeassistant.const import CONF_URL +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import SabnzbdUpdateCoordinator + + +class SabnzbdEntity(CoordinatorEntity[SabnzbdUpdateCoordinator]): + """Defines a base Sabnzbd entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SabnzbdUpdateCoordinator, + description: EntityDescription, + ) -> None: + """Initialize the base entity.""" + super().__init__(coordinator) + + entry_id = coordinator.config_entry.entry_id + self._attr_unique_id = f"{entry_id}_{description.key}" + self.entity_description = description + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, entry_id)}, + sw_version=coordinator.data["version"], + configuration_url=coordinator.config_entry.data[CONF_URL], + ) diff --git a/homeassistant/components/sabnzbd/sab.py b/homeassistant/components/sabnzbd/helpers.py similarity index 100% rename from homeassistant/components/sabnzbd/sab.py rename to homeassistant/components/sabnzbd/helpers.py diff --git a/homeassistant/components/sabnzbd/icons.json b/homeassistant/components/sabnzbd/icons.json index a693e9fec86..b0a72040b4b 100644 --- a/homeassistant/components/sabnzbd/icons.json +++ b/homeassistant/components/sabnzbd/icons.json @@ -1,7 +1,28 @@ { + "entity": { + "button": { + "pause": { + "default": "mdi:pause" + }, + "resume": { + "default": "mdi:play" + } + }, + "number": { + "speedlimit": { + "default": "mdi:speedometer" + } + } + }, "services": { - "pause": "mdi:pause", - "resume": "mdi:play", - "set_speed": "mdi:speedometer" + "pause": { + "service": "mdi:pause" + }, + "resume": { + "service": "mdi:play" + }, + "set_speed": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/sabnzbd/number.py b/homeassistant/components/sabnzbd/number.py new file mode 100644 index 00000000000..53c8d462f11 --- /dev/null +++ b/homeassistant/components/sabnzbd/number.py @@ -0,0 +1,81 @@ +"""Number entities for the SABnzbd integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from pysabnzbd import SabnzbdApiException + +from homeassistant.components.number import ( + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import PERCENTAGE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import SabnzbdConfigEntry, SabnzbdUpdateCoordinator +from .entity import SabnzbdEntity + + +@dataclass(frozen=True, kw_only=True) +class SabnzbdNumberEntityDescription(NumberEntityDescription): + """Class describing a SABnzbd number entities.""" + + set_fn: Callable[[SabnzbdUpdateCoordinator, float], Awaitable] + + +NUMBER_DESCRIPTIONS: tuple[SabnzbdNumberEntityDescription, ...] = ( + SabnzbdNumberEntityDescription( + key="speedlimit", + translation_key="speedlimit", + mode=NumberMode.BOX, + native_max_value=100, + native_min_value=0, + native_step=1, + native_unit_of_measurement=PERCENTAGE, + set_fn=lambda coordinator, speed: ( + coordinator.sab_api.set_speed_limit(int(speed)) + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SabnzbdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the SABnzbd number entity.""" + coordinator = config_entry.runtime_data + + async_add_entities( + SabnzbdNumber(coordinator, description) for description in NUMBER_DESCRIPTIONS + ) + + +class SabnzbdNumber(SabnzbdEntity, NumberEntity): + """Representation of a SABnzbd number.""" + + entity_description: SabnzbdNumberEntityDescription + + @property + def native_value(self) -> float: + """Return latest value for number.""" + return self.coordinator.data[self.entity_description.key] + + async def async_set_native_value(self, value: float) -> None: + """Set the new number value.""" + try: + await self.entity_description.set_fn(self.coordinator, value) + except SabnzbdApiException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml new file mode 100644 index 00000000000..c3fea2427ce --- /dev/null +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: todo + comment: | + Do not remove services when all config entries are removed. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: | + const.py has unused variables. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration has deprecated the actions, thus the documentation has been removed. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + Raise ServiceValidationError in async_get_entry_for_service_call. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + The integration does not provide any additional options. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: + status: todo + comment: | + Coverage for loading and unloading config entries is missing. + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered. + discovery: + status: exempt + comment: | + This integration cannot be discovered. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: + status: todo + comment: | + Describe the state of the sensor and make it a enum sensor. + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + The integration connects to a single service per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration connect to a single service per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/sabnzbd/sensor.py b/homeassistant/components/sabnzbd/sensor.py index d956d06f1ac..662ae739d15 100644 --- a/homeassistant/components/sabnzbd/sensor.py +++ b/homeassistant/components/sabnzbd/sensor.py @@ -10,16 +10,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfDataRate, UnitOfInformation from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DOMAIN, SabnzbdUpdateCoordinator -from .const import DEFAULT_NAME +from .coordinator import SabnzbdConfigEntry +from .entity import SabnzbdEntity @dataclass(frozen=True, kw_only=True) @@ -114,59 +111,22 @@ SENSOR_TYPES: tuple[SabnzbdSensorEntityDescription, ...] = ( ), ) -OLD_SENSOR_KEYS = [ - "current_status", - "speed", - "queue_size", - "queue_remaining", - "disk_size", - "disk_free", - "queue_count", - "day_size", - "week_size", - "month_size", - "total_size", -] - async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SabnzbdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Sabnzbd sensor entry.""" + coordinator = config_entry.runtime_data - entry_id = config_entry.entry_id - coordinator: SabnzbdUpdateCoordinator = hass.data[DOMAIN][entry_id] - - async_add_entities( - [SabnzbdSensor(coordinator, sensor, entry_id) for sensor in SENSOR_TYPES] - ) + async_add_entities([SabnzbdSensor(coordinator, sensor) for sensor in SENSOR_TYPES]) -class SabnzbdSensor(CoordinatorEntity[SabnzbdUpdateCoordinator], SensorEntity): +class SabnzbdSensor(SabnzbdEntity, SensorEntity): """Representation of an SABnzbd sensor.""" entity_description: SabnzbdSensorEntityDescription - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__( - self, - coordinator: SabnzbdUpdateCoordinator, - description: SabnzbdSensorEntityDescription, - entry_id, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - - self._attr_unique_id = f"{entry_id}_{description.key}" - self.entity_description = description - self._attr_device_info = DeviceInfo( - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, entry_id)}, - name=DEFAULT_NAME, - ) @property def native_value(self) -> StateType: diff --git a/homeassistant/components/sabnzbd/strings.json b/homeassistant/components/sabnzbd/strings.json index f8c831cd95a..0ac8b93c57f 100644 --- a/homeassistant/components/sabnzbd/strings.json +++ b/homeassistant/components/sabnzbd/strings.json @@ -4,17 +4,42 @@ "user": { "data": { "api_key": "[%key:common::config_flow::data::api_key%]", - "name": "[%key:common::config_flow::data::name%]", "url": "[%key:common::config_flow::data::url%]" + }, + "data_description": { + "url": "The full URL, including port, of the SABnzbd server. Example: `http://localhost:8080` or `http://a02368d7-sabnzbd:8080`, if you are using the add-on.", + "api_key": "The API key of the SABnzbd server. This can be found in the SABnzbd web interface under Config cog (top right) > General > Security." } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { + "binary_sensor": { + "warnings": { + "name": "Warnings" + } + }, + "button": { + "pause": { + "name": "[%key:common::action::pause%]" + }, + "resume": { + "name": "[%key:component::sabnzbd::services::resume::name%]" + } + }, + "number": { + "speedlimit": { + "name": "Speedlimit" + } + }, "sensor": { "status": { "name": "Status" @@ -86,5 +111,24 @@ } } } + }, + "issues": { + "pause_action_deprecated": { + "title": "SABnzbd pause action deprecated", + "description": "The 'Pause' action is deprecated and will be removed in a future version. Please use the 'Pause' button instead. To remove this issue, please adjust automations accordingly and restart Home Assistant." + }, + "resume_action_deprecated": { + "title": "SABnzbd resume action deprecated", + "description": "The 'Resume' action is deprecated and will be removed in a future version. Please use the 'Resume' button instead. To remove this issue, please adjust automations accordingly and restart Home Assistant." + }, + "set_speed_action_deprecated": { + "title": "SABnzbd set_speed action deprecated", + "description": "The 'Set speed' action is deprecated and will be removed in a future version. Please use the 'Speedlimit' number entity instead. To remove this issue, please adjust automations accordingly and restart Home Assistant." + } + }, + "exceptions": { + "service_call_exception": { + "message": "Unable to send command to SABnzbd due to a connection error, try again later" + } } } diff --git a/homeassistant/components/saj/manifest.json b/homeassistant/components/saj/manifest.json index e882c9f0d02..2a4243f7489 100644 --- a/homeassistant/components/saj/manifest.json +++ b/homeassistant/components/saj/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/saj", "iot_class": "local_polling", "loggers": ["pysaj"], + "quality_scale": "legacy", "requirements": ["pysaj==0.0.16"] } diff --git a/homeassistant/components/samsungtv/__init__.py b/homeassistant/components/samsungtv/__init__.py index f3b967a485e..6d4e491b839 100644 --- a/homeassistant/components/samsungtv/__init__.py +++ b/homeassistant/components/samsungtv/__init__.py @@ -10,7 +10,7 @@ from urllib.parse import urlparse import getmac from homeassistant.components import ssdp -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -36,7 +36,6 @@ from .const import ( CONF_SESSION_ID, CONF_SSDP_MAIN_TV_AGENT_LOCATION, CONF_SSDP_RENDERING_CONTROL_LOCATION, - DOMAIN, ENTRY_RELOAD_COOLDOWN, LEGACY_PORT, LOGGER, @@ -135,16 +134,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SamsungTVConfigEntry) -> def _access_denied() -> None: """Access denied callback.""" LOGGER.debug("Access denied in getting remote object") - hass.create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) - ) + entry.async_start_reauth(hass) bridge.register_reauth_callback(_access_denied) @@ -208,7 +198,7 @@ async def _async_create_bridge_with_updated_data( "Failed to determine connection method, make sure the device is on." ) - LOGGER.info("Updated port to %s and method to %s for %s", port, method, host) + LOGGER.debug("Updated port to %s and method to %s for %s", port, method, host) updated_data[CONF_PORT] = port updated_data[CONF_METHOD] = method @@ -235,21 +225,21 @@ async def _async_create_bridge_with_updated_data( if mac and mac != "none": # Samsung sometimes returns a value of "none" for the mac address # this should be ignored - LOGGER.info("Updated mac to %s for %s", mac, host) + LOGGER.debug("Updated mac to %s for %s", mac, host) updated_data[CONF_MAC] = dr.format_mac(mac) else: - LOGGER.info("Failed to get mac for %s", host) + LOGGER.warning("Failed to get mac for %s", host) if not model: LOGGER.debug("Attempting to get model for %s", host) if info: model = info.get("device", {}).get("modelName") if model: - LOGGER.info("Updated model to %s for %s", model, host) + LOGGER.debug("Updated model to %s for %s", model, host) updated_data[CONF_MODEL] = model if model_requires_encryption(model) and method != METHOD_ENCRYPTED_WEBSOCKET: - LOGGER.info( + LOGGER.debug( ( "Detected model %s for %s. Some televisions from H and J series use " "an encrypted protocol but you are using %s which may not be supported" diff --git a/homeassistant/components/samsungtv/bridge.py b/homeassistant/components/samsungtv/bridge.py index f9f5b0d6e73..b4d060372e6 100644 --- a/homeassistant/components/samsungtv/bridge.py +++ b/homeassistant/components/samsungtv/bridge.py @@ -536,7 +536,7 @@ class SamsungTVWSBridge( LOGGER.debug("Working config: %s", config) return RESULT_SUCCESS except ConnectionClosedError as err: - LOGGER.info( + LOGGER.warning( ( "Working but unsupported config: %s, error: '%s'; this may be" " an indication that access to the TV has been denied. Please" @@ -609,7 +609,7 @@ class SamsungTVWSBridge( try: await self._remote.start_listening(self._remote_event) except UnauthorizedError as err: - LOGGER.info( + LOGGER.warning( "Failed to get remote for %s, re-authentication required: %s", self.host, repr(err), @@ -618,7 +618,7 @@ class SamsungTVWSBridge( self._notify_reauth_callback() self._remote = None except ConnectionClosedError as err: - LOGGER.info( + LOGGER.warning( "Failed to get remote for %s: %s", self.host, repr(err), @@ -643,7 +643,7 @@ class SamsungTVWSBridge( # Initialise device info on first connect await self.async_device_info() if self.token != self._remote.token: - LOGGER.info( + LOGGER.warning( "SamsungTVWSBridge has provided a new token %s", self._remote.token, ) diff --git a/homeassistant/components/samsungtv/config_flow.py b/homeassistant/components/samsungtv/config_flow.py index e89c5e59b0e..837651f9900 100644 --- a/homeassistant/components/samsungtv/config_flow.py +++ b/homeassistant/components/samsungtv/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping from functools import partial import socket -from typing import Any +from typing import Any, Self from urllib.parse import urlparse import getmac @@ -105,7 +105,6 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize flow.""" - self._reauth_entry: ConfigEntry | None = None self._host: str = "" self._mac: str | None = None self._udn: str | None = None @@ -425,10 +424,12 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): @callback def _async_abort_if_host_already_in_progress(self) -> None: - self.context[CONF_HOST] = self._host - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == self._host: - raise AbortFlow("already_in_progress") + if self.hass.config_entries.flow.async_has_matching_flow(self): + raise AbortFlow("already_in_progress") + + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow._host == self._host # noqa: SLF001 @callback def _abort_if_manufacturer_is_not_samsung(self) -> None: @@ -527,9 +528,6 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) if entry_data.get(CONF_MODEL) and entry_data.get(CONF_NAME): self._title = f"{entry_data[CONF_NAME]} ({entry_data[CONF_MODEL]})" else: @@ -541,22 +539,23 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth.""" errors = {} - assert self._reauth_entry - method = self._reauth_entry.data[CONF_METHOD] + + reauth_entry = self._get_reauth_entry() + method = reauth_entry.data[CONF_METHOD] if user_input is not None: if method == METHOD_ENCRYPTED_WEBSOCKET: return await self.async_step_reauth_confirm_encrypted() bridge = SamsungTVBridge.get_bridge( self.hass, method, - self._reauth_entry.data[CONF_HOST], + reauth_entry.data[CONF_HOST], ) result = await bridge.async_try_connect() if result == RESULT_SUCCESS: - new_data = dict(self._reauth_entry.data) + new_data = dict(reauth_entry.data) new_data[CONF_TOKEN] = bridge.token return self.async_update_reload_and_abort( - self._reauth_entry, + reauth_entry, data=new_data, ) if result not in (RESULT_AUTH_MISSING, RESULT_CANNOT_CONNECT): @@ -585,8 +584,9 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth (encrypted method).""" errors = {} - assert self._reauth_entry - await self._async_start_encrypted_pairing(self._reauth_entry.data[CONF_HOST]) + + reauth_entry = self._get_reauth_entry() + await self._async_start_encrypted_pairing(reauth_entry.data[CONF_HOST]) assert self._authenticator is not None if user_input is not None: @@ -596,9 +596,8 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): and (session_id := await self._authenticator.get_session_id_and_close()) ): return self.async_update_reload_and_abort( - self._reauth_entry, - data={ - **self._reauth_entry.data, + reauth_entry, + data_updates={ CONF_TOKEN: token, CONF_SESSION_ID: session_id, }, diff --git a/homeassistant/components/samsungtv/device_trigger.py b/homeassistant/components/samsungtv/device_trigger.py index 0e5c6608a17..2b3d9dbe666 100644 --- a/homeassistant/components/samsungtv/device_trigger.py +++ b/homeassistant/components/samsungtv/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.const import CONF_DEVICE_ID, CONF_PLATFORM, CONF_TYPE diff --git a/homeassistant/components/samsungtv/entity.py b/homeassistant/components/samsungtv/entity.py index 030eaf98d9b..61aa8abce53 100644 --- a/homeassistant/components/samsungtv/entity.py +++ b/homeassistant/components/samsungtv/entity.py @@ -42,6 +42,7 @@ class SamsungTVEntity(CoordinatorEntity[SamsungTVDataUpdateCoordinator], Entity) name=config_entry.data.get(CONF_NAME), manufacturer=config_entry.data.get(CONF_MANUFACTURER), model=config_entry.data.get(CONF_MODEL), + model_id=config_entry.data.get(CONF_MODEL), ) if self.unique_id: self._attr_device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self.unique_id)} @@ -92,7 +93,7 @@ class SamsungTVEntity(CoordinatorEntity[SamsungTVDataUpdateCoordinator], Entity) LOGGER.debug("Attempting to turn on %s via automation", self.entity_id) await self._turn_on_action.async_run(self.hass, self._context) elif self._mac: - LOGGER.info( + LOGGER.warning( "Attempting to turn on %s via Wake-On-Lan; if this does not work, " "please ensure that Wake-On-Lan is available for your device or use " "a turn_on automation", diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index aecde9e4c26..1a6b5ed5313 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -37,9 +37,9 @@ "requirements": [ "getmac==0.9.4", "samsungctl[websocket]==0.7.1", - "samsungtvws[async,encrypted]==2.6.0", + "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", - "async-upnp-client==0.40.0" + "async-upnp-client==0.41.0" ], "ssdp": [ { diff --git a/homeassistant/components/samsungtv/media_player.py b/homeassistant/components/samsungtv/media_player.py index 960b69f71e3..7180e8a0c1a 100644 --- a/homeassistant/components/samsungtv/media_player.py +++ b/homeassistant/components/samsungtv/media_player.py @@ -284,7 +284,7 @@ class SamsungTVDevice(SamsungTVEntity, MediaPlayerEntity): async def _async_launch_app(self, app_id: str) -> None: """Send launch_app to the tv.""" if self._bridge.power_off_in_progress: - LOGGER.info("TV is powering off, not sending launch_app command") + LOGGER.debug("TV is powering off, not sending launch_app command") return assert isinstance(self._bridge, SamsungTVWSBridge) await self._bridge.async_launch_app(app_id) @@ -293,7 +293,7 @@ class SamsungTVDevice(SamsungTVEntity, MediaPlayerEntity): """Send a key to the tv and handles exceptions.""" assert keys if self._bridge.power_off_in_progress and keys[0] != "KEY_POWEROFF": - LOGGER.info("TV is powering off, not sending keys: %s", keys) + LOGGER.debug("TV is powering off, not sending keys: %s", keys) return await self._bridge.async_send_keys(keys) @@ -304,7 +304,7 @@ class SamsungTVDevice(SamsungTVEntity, MediaPlayerEntity): async def async_set_volume_level(self, volume: float) -> None: """Set volume level on the media player.""" if (dmr_device := self._dmr_device) is None: - LOGGER.info("Upnp services are not available on %s", self._host) + LOGGER.warning("Upnp services are not available on %s", self._host) return try: await dmr_device.async_set_volume_level(volume) diff --git a/homeassistant/components/samsungtv/remote.py b/homeassistant/components/samsungtv/remote.py index afbac341226..401a5d383f0 100644 --- a/homeassistant/components/samsungtv/remote.py +++ b/homeassistant/components/samsungtv/remote.py @@ -46,7 +46,7 @@ class SamsungTVRemote(SamsungTVEntity, RemoteEntity): See https://github.com/jaruba/ha-samsungtv-tizen/blob/master/Key_codes.md """ if self._bridge.power_off_in_progress: - LOGGER.info("TV is powering off, not sending keys: %s", command) + LOGGER.debug("TV is powering off, not sending keys: %s", command) return num_repeats = kwargs[ATTR_NUM_REPEATS] diff --git a/homeassistant/components/satel_integra/alarm_control_panel.py b/homeassistant/components/satel_integra/alarm_control_panel.py index f9e261b25b1..39c0d6b876d 100644 --- a/homeassistant/components/satel_integra/alarm_control_panel.py +++ b/homeassistant/components/satel_integra/alarm_control_panel.py @@ -11,15 +11,9 @@ from satel_integra.satel_integra import AlarmState from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -67,7 +61,6 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity): _attr_code_format = CodeFormat.NUMBER _attr_should_poll = False - _attr_state: str | None _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME | AlarmControlPanelEntityFeature.ARM_AWAY @@ -95,8 +88,8 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity): """Handle alarm status update.""" state = self._read_alarm_state() _LOGGER.debug("Got status update, current status: %s", state) - if state != self._attr_state: - self._attr_state = state + if state != self._attr_alarm_state: + self._attr_alarm_state = state self.async_write_ha_state() else: _LOGGER.debug("Ignoring alarm status message, same state") @@ -105,22 +98,28 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity): """Read current status of the alarm and translate it into HA status.""" # Default - disarmed: - hass_alarm_status = STATE_ALARM_DISARMED + hass_alarm_status = AlarmControlPanelState.DISARMED if not self._satel.connected: return None state_map = OrderedDict( [ - (AlarmState.TRIGGERED, STATE_ALARM_TRIGGERED), - (AlarmState.TRIGGERED_FIRE, STATE_ALARM_TRIGGERED), - (AlarmState.ENTRY_TIME, STATE_ALARM_PENDING), - (AlarmState.ARMED_MODE3, STATE_ALARM_ARMED_HOME), - (AlarmState.ARMED_MODE2, STATE_ALARM_ARMED_HOME), - (AlarmState.ARMED_MODE1, STATE_ALARM_ARMED_HOME), - (AlarmState.ARMED_MODE0, STATE_ALARM_ARMED_AWAY), - (AlarmState.EXIT_COUNTDOWN_OVER_10, STATE_ALARM_PENDING), - (AlarmState.EXIT_COUNTDOWN_UNDER_10, STATE_ALARM_PENDING), + (AlarmState.TRIGGERED, AlarmControlPanelState.TRIGGERED), + (AlarmState.TRIGGERED_FIRE, AlarmControlPanelState.TRIGGERED), + (AlarmState.ENTRY_TIME, AlarmControlPanelState.PENDING), + (AlarmState.ARMED_MODE3, AlarmControlPanelState.ARMED_HOME), + (AlarmState.ARMED_MODE2, AlarmControlPanelState.ARMED_HOME), + (AlarmState.ARMED_MODE1, AlarmControlPanelState.ARMED_HOME), + (AlarmState.ARMED_MODE0, AlarmControlPanelState.ARMED_AWAY), + ( + AlarmState.EXIT_COUNTDOWN_OVER_10, + AlarmControlPanelState.PENDING, + ), + ( + AlarmState.EXIT_COUNTDOWN_UNDER_10, + AlarmControlPanelState.PENDING, + ), ] ) _LOGGER.debug("State map of Satel: %s", self._satel.partition_states) @@ -141,9 +140,11 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity): _LOGGER.debug("Code was empty or None") return - clear_alarm_necessary = self._attr_state == STATE_ALARM_TRIGGERED + clear_alarm_necessary = ( + self._attr_alarm_state == AlarmControlPanelState.TRIGGERED + ) - _LOGGER.debug("Disarming, self._attr_state: %s", self._attr_state) + _LOGGER.debug("Disarming, self._attr_alarm_state: %s", self._attr_alarm_state) await self._satel.disarm(code, [self._partition_id]) diff --git a/homeassistant/components/satel_integra/manifest.json b/homeassistant/components/satel_integra/manifest.json index 828261aa466..a90ea1db5a5 100644 --- a/homeassistant/components/satel_integra/manifest.json +++ b/homeassistant/components/satel_integra/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/satel_integra", "iot_class": "local_push", "loggers": ["satel_integra"], + "quality_scale": "legacy", "requirements": ["satel-integra==0.3.7"] } diff --git a/homeassistant/components/scene/__init__.py b/homeassistant/components/scene/__init__.py index 596d256ffb7..d1b34b50770 100644 --- a/homeassistant/components/scene/__init__.py +++ b/homeassistant/components/scene/__init__.py @@ -17,8 +17,10 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey DOMAIN: Final = "scene" +DATA_COMPONENT: HassKey[EntityComponent[Scene]] = HassKey(DOMAIN) STATES: Final = "states" @@ -60,7 +62,7 @@ PLATFORM_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the scenes.""" - component = hass.data[DOMAIN] = EntityComponent[Scene]( + component = hass.data[DATA_COMPONENT] = EntityComponent[Scene]( logging.getLogger(__name__), DOMAIN, hass ) @@ -83,14 +85,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[Scene] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[Scene] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class Scene(RestoreEntity): diff --git a/homeassistant/components/scene/icons.json b/homeassistant/components/scene/icons.json index 563c0f31ddc..b08d06fb434 100644 --- a/homeassistant/components/scene/icons.json +++ b/homeassistant/components/scene/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "turn_on": "mdi:power", - "reload": "mdi:reload", - "apply": "mdi:check", - "create": "mdi:plus", - "delete": "mdi:delete" + "turn_on": { + "service": "mdi:power" + }, + "reload": { + "service": "mdi:reload" + }, + "apply": { + "service": "mdi:check" + }, + "create": { + "service": "mdi:plus" + }, + "delete": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/schedule/__init__.py b/homeassistant/components/schedule/__init__.py index 08d0b083f7c..24ce4f3b3fa 100644 --- a/homeassistant/components/schedule/__init__.py +++ b/homeassistant/components/schedule/__init__.py @@ -39,6 +39,7 @@ from homeassistant.util import dt as dt_util from .const import ( ATTR_NEXT_EVENT, CONF_ALL_DAYS, + CONF_DATA, CONF_FROM, CONF_TO, DOMAIN, @@ -55,7 +56,7 @@ def valid_schedule(schedule: list[dict[str, str]]) -> list[dict[str, str]]: Ensure they have no overlap and the end time is greater than the start time. """ - # Emtpty schedule is valid + # Empty schedule is valid if not schedule: return schedule @@ -109,9 +110,13 @@ BASE_SCHEMA: VolDictType = { vol.Optional(CONF_ICON): cv.icon, } +# Extra data that the user can set on each time range +CUSTOM_DATA_SCHEMA = vol.Schema({str: vol.Any(bool, str, int, float)}) + TIME_RANGE_SCHEMA: VolDictType = { vol.Required(CONF_FROM): cv.time, vol.Required(CONF_TO): deserialize_to_time, + vol.Optional(CONF_DATA): CUSTOM_DATA_SCHEMA, } # Serialize time in validated config @@ -119,6 +124,7 @@ STORAGE_TIME_RANGE_SCHEMA = vol.Schema( { vol.Required(CONF_FROM): vol.Coerce(str), vol.Required(CONF_TO): serialize_to_time, + vol.Optional(CONF_DATA): CUSTOM_DATA_SCHEMA, } ) @@ -135,7 +141,6 @@ STORAGE_SCHEDULE_SCHEMA: VolDictType = { for day in CONF_ALL_DAYS } - # Validate YAML config CONFIG_SCHEMA = vol.Schema( {DOMAIN: cv.schema_with_slug_keys(vol.All(BASE_SCHEMA | SCHEDULE_SCHEMA))}, @@ -152,7 +157,7 @@ ENTITY_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up an input select.""" + """Set up a schedule.""" component = EntityComponent[Schedule](LOGGER, DOMAIN, hass) id_manager = IDManager() @@ -253,6 +258,12 @@ class Schedule(CollectionEntity): self._attr_name = self._config[CONF_NAME] self._attr_unique_id = self._config[CONF_ID] + # Exclude any custom attributes that may be present on time ranges from recording. + self._unrecorded_attributes = self.all_custom_data_keys() + self._Entity__combined_unrecorded_attributes = ( + self._entity_component_unrecorded_attributes | self._unrecorded_attributes + ) + @classmethod def from_storage(cls, config: ConfigType) -> Schedule: """Return entity instance initialized from storage.""" @@ -300,9 +311,11 @@ class Schedule(CollectionEntity): # Note that any time in the day is treated as smaller than time.max. if now.time() < time_range[CONF_TO] or time_range[CONF_TO] == time.max: self._attr_state = STATE_ON + current_data = time_range.get(CONF_DATA) break else: self._attr_state = STATE_OFF + current_data = None # Find next event in the schedule, loop over each day (starting with # the current day) until the next event has been found. @@ -344,6 +357,11 @@ class Schedule(CollectionEntity): self._attr_extra_state_attributes = { ATTR_NEXT_EVENT: next_event, } + + if current_data: + # Add each key/value pair in the data to the entity's state attributes + self._attr_extra_state_attributes.update(current_data) + self.async_write_ha_state() if next_event: @@ -352,3 +370,23 @@ class Schedule(CollectionEntity): self._update, next_event, ) + + def all_custom_data_keys(self) -> frozenset[str]: + """Return the set of all currently used custom data attribute keys.""" + data_keys = set() + + for weekday in WEEKDAY_TO_CONF.values(): + if not (weekday_config := self._config.get(weekday)): + continue # this weekday is not configured + + for time_range in weekday_config: + time_range_custom_data = time_range.get(CONF_DATA) + + if not time_range_custom_data or not isinstance( + time_range_custom_data, dict + ): + continue # this time range has no custom data, or it is not a dict + + data_keys.update(time_range_custom_data.keys()) + + return frozenset(data_keys) diff --git a/homeassistant/components/schedule/const.py b/homeassistant/components/schedule/const.py index 5ec57aae78d..6687dafefdb 100644 --- a/homeassistant/components/schedule/const.py +++ b/homeassistant/components/schedule/const.py @@ -6,6 +6,7 @@ from typing import Final DOMAIN: Final = "schedule" LOGGER = logging.getLogger(__package__) +CONF_DATA: Final = "data" CONF_FRIDAY: Final = "friday" CONF_FROM: Final = "from" CONF_MONDAY: Final = "monday" diff --git a/homeassistant/components/schedule/icons.json b/homeassistant/components/schedule/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/schedule/icons.json +++ b/homeassistant/components/schedule/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/schlage/__init__.py b/homeassistant/components/schlage/__init__.py index 1c3ad547f3d..6eae69d9542 100644 --- a/homeassistant/components/schlage/__init__.py +++ b/homeassistant/components/schlage/__init__.py @@ -10,18 +10,20 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN from .coordinator import SchlageDataUpdateCoordinator PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.LOCK, + Platform.SELECT, Platform.SENSOR, Platform.SWITCH, ] +type SchlageConfigEntry = ConfigEntry[SchlageDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: SchlageConfigEntry) -> bool: """Set up Schlage from a config entry.""" username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] @@ -31,15 +33,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryAuthFailed from ex coordinator = SchlageDataUpdateCoordinator(hass, username, pyschlage.Schlage(auth)) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await coordinator.async_config_entry_first_refresh() await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SchlageConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/schlage/binary_sensor.py b/homeassistant/components/schlage/binary_sensor.py index a141403bdf4..f928d42b3ee 100644 --- a/homeassistant/components/schlage/binary_sensor.py +++ b/homeassistant/components/schlage/binary_sensor.py @@ -10,12 +10,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import SchlageConfigEntry from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity @@ -40,20 +39,25 @@ _DESCRIPTIONS: tuple[SchlageBinarySensorEntityDescription] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary_sensors based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - SchlageBinarySensor( - coordinator=coordinator, - description=description, - device_id=device_id, + coordinator = config_entry.runtime_data + + def _add_new_locks(locks: dict[str, LockData]) -> None: + async_add_entities( + SchlageBinarySensor( + coordinator=coordinator, + description=description, + device_id=device_id, + ) + for device_id in locks + for description in _DESCRIPTIONS ) - for device_id in coordinator.data.locks - for description in _DESCRIPTIONS - ) + + _add_new_locks(coordinator.data.locks) + coordinator.new_locks_callbacks.append(_add_new_locks) class SchlageBinarySensor(SchlageEntity, BinarySensorEntity): diff --git a/homeassistant/components/schlage/config_flow.py b/homeassistant/components/schlage/config_flow.py index a6104702396..6e8f94473dd 100644 --- a/homeassistant/components/schlage/config_flow.py +++ b/homeassistant/components/schlage/config_flow.py @@ -9,7 +9,7 @@ import pyschlage from pyschlage.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN, LOGGER @@ -25,15 +25,13 @@ class SchlageConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is None: return self._show_user_form({}) - username = user_input[CONF_USERNAME] + username = user_input[CONF_USERNAME].lower() password = user_input[CONF_PASSWORD] user_id, errors = await self.hass.async_add_executor_job( _authenticate, username, password @@ -42,7 +40,14 @@ class SchlageConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_user_form(errors) await self.async_set_unique_id(user_id) - return self.async_create_entry(title=username, data=user_input) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title=username, + data={ + CONF_USERNAME: username, + CONF_PASSWORD: password, + }, + ) def _show_user_form(self, errors: dict[str, str]) -> ConfigFlowResult: """Show the user form.""" @@ -54,20 +59,17 @@ class SchlageConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self.reauth_entry is not None if user_input is None: return self._show_reauth_form({}) - username = self.reauth_entry.data[CONF_USERNAME] + reauth_entry = self._get_reauth_entry() + username = reauth_entry.data[CONF_USERNAME] password = user_input[CONF_PASSWORD] user_id, errors = await self.hass.async_add_executor_job( _authenticate, username, password @@ -75,16 +77,14 @@ class SchlageConfigFlow(ConfigFlow, domain=DOMAIN): if user_id is None: return self._show_reauth_form(errors) - if self.reauth_entry.unique_id != user_id: - return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") data = { CONF_USERNAME: username, CONF_PASSWORD: user_input[CONF_PASSWORD], } - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) def _show_reauth_form(self, errors: dict[str, str]) -> ConfigFlowResult: """Show the reauth form.""" diff --git a/homeassistant/components/schlage/coordinator.py b/homeassistant/components/schlage/coordinator.py index 959d1e215f8..b319b21be0c 100644 --- a/homeassistant/components/schlage/coordinator.py +++ b/homeassistant/components/schlage/coordinator.py @@ -3,14 +3,17 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from dataclasses import dataclass from pyschlage import Lock, Schlage from pyschlage.exceptions import Error as SchlageError, NotAuthorizedError from pyschlage.log import LockLog -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed +import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, UPDATE_INTERVAL @@ -34,12 +37,17 @@ class SchlageData: class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): """The Schlage data update coordinator.""" + config_entry: ConfigEntry + def __init__(self, hass: HomeAssistant, username: str, api: Schlage) -> None: """Initialize the class.""" super().__init__( hass, LOGGER, name=f"{DOMAIN} ({username})", update_interval=UPDATE_INTERVAL ) + self.data = SchlageData(locks={}) self.api = api + self.new_locks_callbacks: list[Callable[[dict[str, LockData]], None]] = [] + self.async_add_listener(self._add_remove_locks) async def _async_update_data(self) -> SchlageData: """Fetch the latest data from the Schlage API.""" @@ -48,16 +56,16 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): except NotAuthorizedError as ex: raise ConfigEntryAuthFailed from ex except SchlageError as ex: - raise UpdateFailed("Failed to refresh Schlage data") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="schlage_refresh_failed" + ) from ex lock_data = await asyncio.gather( *( self.hass.async_add_executor_job(self._get_lock_data, lock) for lock in locks ) ) - return SchlageData( - locks={ld.lock.device_id: ld for ld in lock_data}, - ) + return SchlageData(locks={ld.lock.device_id: ld for ld in lock_data}) def _get_lock_data(self, lock: Lock) -> LockData: logs: list[LockLog] = [] @@ -74,3 +82,34 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): LOGGER.debug('Failed to read logs for lock "%s": %s', lock.name, ex) return LockData(lock=lock, logs=logs) + + @callback + def _add_remove_locks(self) -> None: + """Add newly discovered locks and remove nonexistent locks.""" + device_registry = dr.async_get(self.hass) + devices = dr.async_entries_for_config_entry( + device_registry, self.config_entry.entry_id + ) + previous_locks = set() + previous_locks_by_lock_id = {} + for device in devices: + for domain, identifier in device.identifiers: + if domain == DOMAIN: + previous_locks.add(identifier) + previous_locks_by_lock_id[identifier] = device + continue + current_locks = set(self.data.locks.keys()) + + if removed_locks := previous_locks - current_locks: + LOGGER.debug("Removed locks: %s", ", ".join(removed_locks)) + for lock_id in removed_locks: + device_registry.async_update_device( + device_id=previous_locks_by_lock_id[lock_id].id, + remove_config_entry_id=self.config_entry.entry_id, + ) + + if new_lock_ids := current_locks - previous_locks: + LOGGER.debug("New locks found: %s", ", ".join(new_lock_ids)) + new_locks = {lock_id: self.data.locks[lock_id] for lock_id in new_lock_ids} + for new_lock_callback in self.new_locks_callbacks: + new_lock_callback(new_locks) diff --git a/homeassistant/components/schlage/diagnostics.py b/homeassistant/components/schlage/diagnostics.py index af1bf311676..ec4d9c489e3 100644 --- a/homeassistant/components/schlage/diagnostics.py +++ b/homeassistant/components/schlage/diagnostics.py @@ -4,19 +4,17 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import SchlageDataUpdateCoordinator +from . import SchlageConfigEntry async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data # NOTE: Schlage diagnostics are already redacted. return { "locks": [ld.lock.get_diagnostics() for ld in coordinator.data.locks.values()] diff --git a/homeassistant/components/schlage/entity.py b/homeassistant/components/schlage/entity.py index 61bdbcb7730..cc4745e51cc 100644 --- a/homeassistant/components/schlage/entity.py +++ b/homeassistant/components/schlage/entity.py @@ -42,5 +42,4 @@ class SchlageEntity(CoordinatorEntity[SchlageDataUpdateCoordinator]): @property def available(self) -> bool: """Return if entity is available.""" - # When is_locked is None the lock is unavailable. - return super().available and self._lock.is_locked is not None + return super().available and self.device_id in self.coordinator.data.locks diff --git a/homeassistant/components/schlage/lock.py b/homeassistant/components/schlage/lock.py index 7e6f60211b0..d203913191d 100644 --- a/homeassistant/components/schlage/lock.py +++ b/homeassistant/components/schlage/lock.py @@ -5,26 +5,30 @@ from __future__ import annotations from typing import Any from homeassistant.components.lock import LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import SchlageDataUpdateCoordinator +from . import SchlageConfigEntry +from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SchlageConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Schlage WiFi locks based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - SchlageLockEntity(coordinator=coordinator, device_id=device_id) - for device_id in coordinator.data.locks - ) + coordinator = config_entry.runtime_data + + def _add_new_locks(locks: dict[str, LockData]) -> None: + async_add_entities( + SchlageLockEntity(coordinator=coordinator, device_id=device_id) + for device_id in locks + ) + + _add_new_locks(coordinator.data.locks) + coordinator.new_locks_callbacks.append(_add_new_locks) class SchlageLockEntity(SchlageEntity, LockEntity): @@ -42,8 +46,9 @@ class SchlageLockEntity(SchlageEntity, LockEntity): @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._update_attrs() - return super()._handle_coordinator_update() + if self.device_id in self.coordinator.data.locks: + self._update_attrs() + super()._handle_coordinator_update() def _update_attrs(self) -> None: """Update our internal state attributes.""" diff --git a/homeassistant/components/schlage/manifest.json b/homeassistant/components/schlage/manifest.json index 5619cf7b312..61cc2a3c63d 100644 --- a/homeassistant/components/schlage/manifest.json +++ b/homeassistant/components/schlage/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/schlage", "iot_class": "cloud_polling", - "requirements": ["pyschlage==2024.8.0"] + "requirements": ["pyschlage==2024.11.0"] } diff --git a/homeassistant/components/schlage/select.py b/homeassistant/components/schlage/select.py new file mode 100644 index 00000000000..6cf0853835f --- /dev/null +++ b/homeassistant/components/schlage/select.py @@ -0,0 +1,77 @@ +"""Platform for Schlage select integration.""" + +from __future__ import annotations + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SchlageConfigEntry +from .coordinator import LockData, SchlageDataUpdateCoordinator +from .entity import SchlageEntity + +_DESCRIPTIONS = ( + SelectEntityDescription( + key="auto_lock_time", + translation_key="auto_lock_time", + entity_category=EntityCategory.CONFIG, + # valid values are from Schlage UI and validated by pyschlage + options=[ + "0", + "15", + "30", + "60", + "120", + "240", + "300", + ], + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SchlageConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up selects based on a config entry.""" + coordinator = config_entry.runtime_data + + def _add_new_locks(locks: dict[str, LockData]) -> None: + async_add_entities( + SchlageSelect( + coordinator=coordinator, + description=description, + device_id=device_id, + ) + for device_id in locks + for description in _DESCRIPTIONS + ) + + _add_new_locks(coordinator.data.locks) + coordinator.new_locks_callbacks.append(_add_new_locks) + + +class SchlageSelect(SchlageEntity, SelectEntity): + """Schlage select entity.""" + + def __init__( + self, + coordinator: SchlageDataUpdateCoordinator, + description: SelectEntityDescription, + device_id: str, + ) -> None: + """Initialize a SchlageSelect.""" + super().__init__(coordinator, device_id) + self.entity_description = description + self._attr_unique_id = f"{device_id}_{self.entity_description.key}" + + @property + def current_option(self) -> str: + """Return the current option.""" + return str(self._lock_data.lock.auto_lock_time) + + def select_option(self, option: str) -> None: + """Set the current option.""" + self._lock.set_auto_lock_time(int(option)) diff --git a/homeassistant/components/schlage/sensor.py b/homeassistant/components/schlage/sensor.py index 2cf1694e111..a15d1740b91 100644 --- a/homeassistant/components/schlage/sensor.py +++ b/homeassistant/components/schlage/sensor.py @@ -13,8 +13,7 @@ from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import SchlageDataUpdateCoordinator +from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity _SENSOR_DESCRIPTIONS: list[SensorEntityDescription] = [ @@ -34,16 +33,21 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensors based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - SchlageBatterySensor( - coordinator=coordinator, - description=description, - device_id=device_id, + coordinator = config_entry.runtime_data + + def _add_new_locks(locks: dict[str, LockData]) -> None: + async_add_entities( + SchlageBatterySensor( + coordinator=coordinator, + description=description, + device_id=device_id, + ) + for description in _SENSOR_DESCRIPTIONS + for device_id in locks ) - for description in _SENSOR_DESCRIPTIONS - for device_id in coordinator.data.locks - ) + + _add_new_locks(coordinator.data.locks) + coordinator.new_locks_callbacks.append(_add_new_locks) class SchlageBatterySensor(SchlageEntity, SensorEntity): @@ -64,5 +68,6 @@ class SchlageBatterySensor(SchlageEntity, SensorEntity): @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._attr_native_value = getattr(self._lock, self.entity_description.key) - return super()._handle_coordinator_update() + if self.device_id in self.coordinator.data.locks: + self._attr_native_value = getattr(self._lock, self.entity_description.key) + super()._handle_coordinator_update() diff --git a/homeassistant/components/schlage/strings.json b/homeassistant/components/schlage/strings.json index 721d9e80286..56e72c2d2c0 100644 --- a/homeassistant/components/schlage/strings.json +++ b/homeassistant/components/schlage/strings.json @@ -31,6 +31,20 @@ "name": "Keypad disabled" } }, + "select": { + "auto_lock_time": { + "name": "Auto-Lock time", + "state": { + "0": "Disabled", + "15": "15 seconds", + "30": "30 seconds", + "60": "1 minute", + "120": "2 minutes", + "240": "4 minutes", + "300": "5 minutes" + } + } + }, "switch": { "beeper": { "name": "Keypress Beep" @@ -39,5 +53,10 @@ "name": "1-Touch Locking" } } + }, + "exceptions": { + "schlage_refresh_failed": { + "message": "Failed to refresh Schlage data" + } } } diff --git a/homeassistant/components/schlage/switch.py b/homeassistant/components/schlage/switch.py index 53771768ccd..39fe6dbbc99 100644 --- a/homeassistant/components/schlage/switch.py +++ b/homeassistant/components/schlage/switch.py @@ -19,8 +19,7 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import SchlageDataUpdateCoordinator +from .coordinator import LockData, SchlageDataUpdateCoordinator from .entity import SchlageEntity @@ -61,16 +60,21 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switches based on a config entry.""" - coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - SchlageSwitch( - coordinator=coordinator, - description=description, - device_id=device_id, + coordinator = config_entry.runtime_data + + def _add_new_locks(locks: dict[str, LockData]) -> None: + async_add_entities( + SchlageSwitch( + coordinator=coordinator, + description=description, + device_id=device_id, + ) + for device_id in locks + for description in SWITCHES ) - for device_id in coordinator.data.locks - for description in SWITCHES - ) + + _add_new_locks(coordinator.data.locks) + coordinator.new_locks_callbacks.append(_add_new_locks) class SchlageSwitch(SchlageEntity, SwitchEntity): diff --git a/homeassistant/components/schluter/climate.py b/homeassistant/components/schluter/climate.py index 6f0a49e6eb9..7db15d3923c 100644 --- a/homeassistant/components/schluter/climate.py +++ b/homeassistant/components/schluter/climate.py @@ -82,7 +82,6 @@ class SchluterThermostat(CoordinatorEntity, ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, serial_number, api, session_id): """Initialize the thermostat.""" diff --git a/homeassistant/components/schluter/manifest.json b/homeassistant/components/schluter/manifest.json index e96058cc146..0302ce09440 100644 --- a/homeassistant/components/schluter/manifest.json +++ b/homeassistant/components/schluter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/schluter", "iot_class": "cloud_polling", "loggers": ["schluter"], + "quality_scale": "legacy", "requirements": ["py-schluter==0.1.7"] } diff --git a/homeassistant/components/scrape/__init__.py b/homeassistant/components/scrape/__init__.py index 16220d5c567..ff991c5f348 100644 --- a/homeassistant/components/scrape/__init__.py +++ b/homeassistant/components/scrape/__init__.py @@ -72,7 +72,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: scan_interval: timedelta = resource_config.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ) - coordinator = ScrapeCoordinator(hass, rest, scan_interval) + coordinator = ScrapeCoordinator(hass, None, rest, scan_interval) sensors: list[ConfigType] = resource_config.get(SENSOR_DOMAIN, []) if sensors: @@ -100,6 +100,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bo coordinator = ScrapeCoordinator( hass, + entry, rest, DEFAULT_SCAN_INTERVAL, ) diff --git a/homeassistant/components/scrape/coordinator.py b/homeassistant/components/scrape/coordinator.py index 74fd510ac94..b5cabc6b94e 100644 --- a/homeassistant/components/scrape/coordinator.py +++ b/homeassistant/components/scrape/coordinator.py @@ -8,6 +8,7 @@ import logging from bs4 import BeautifulSoup from homeassistant.components.rest import RestData +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -18,12 +19,17 @@ class ScrapeCoordinator(DataUpdateCoordinator[BeautifulSoup]): """Scrape Coordinator.""" def __init__( - self, hass: HomeAssistant, rest: RestData, update_interval: timedelta + self, + hass: HomeAssistant, + config_entry: ConfigEntry | None, + rest: RestData, + update_interval: timedelta, ) -> None: """Initialize Scrape coordinator.""" super().__init__( hass, _LOGGER, + config_entry=config_entry, name="Scrape Coordinator", update_interval=update_interval, ) diff --git a/homeassistant/components/scrape/manifest.json b/homeassistant/components/scrape/manifest.json index f39f662de3e..56b9470b4f7 100644 --- a/homeassistant/components/scrape/manifest.json +++ b/homeassistant/components/scrape/manifest.json @@ -6,5 +6,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/scrape", "iot_class": "cloud_polling", - "requirements": ["beautifulsoup4==4.12.3", "lxml==5.1.0"] + "requirements": ["beautifulsoup4==4.12.3", "lxml==5.3.0"] } diff --git a/homeassistant/components/screenlogic/binary_sensor.py b/homeassistant/components/screenlogic/binary_sensor.py index 13582b81196..fda1c348edf 100644 --- a/homeassistant/components/screenlogic/binary_sensor.py +++ b/homeassistant/components/screenlogic/binary_sensor.py @@ -9,7 +9,7 @@ from screenlogicpy.const.msg import CODE from screenlogicpy.device_const.system import EQUIPMENT_FLAG from homeassistant.components.binary_sensor import ( - DOMAIN, + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, @@ -202,7 +202,9 @@ async def async_setup_entry( chem_sensor_description.key, ) if EQUIPMENT_FLAG.INTELLICHEM not in gateway.equipment_flags: - cleanup_excluded_entity(coordinator, DOMAIN, chem_sensor_data_path) + cleanup_excluded_entity( + coordinator, BINARY_SENSOR_DOMAIN, chem_sensor_data_path + ) continue if gateway.get_data(*chem_sensor_data_path): entities.append( @@ -216,7 +218,9 @@ async def async_setup_entry( scg_sensor_description.key, ) if EQUIPMENT_FLAG.CHLORINATOR not in gateway.equipment_flags: - cleanup_excluded_entity(coordinator, DOMAIN, scg_sensor_data_path) + cleanup_excluded_entity( + coordinator, BINARY_SENSOR_DOMAIN, scg_sensor_data_path + ) continue if gateway.get_data(*scg_sensor_data_path): entities.append( diff --git a/homeassistant/components/screenlogic/climate.py b/homeassistant/components/screenlogic/climate.py index 4d93dcf81d3..08300900f5d 100644 --- a/homeassistant/components/screenlogic/climate.py +++ b/homeassistant/components/screenlogic/climate.py @@ -80,7 +80,6 @@ class ScreenLogicClimate(ScreenLogicPushEntity, ClimateEntity, RestoreEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, entity_description) -> None: """Initialize a ScreenLogic climate entity.""" diff --git a/homeassistant/components/screenlogic/config_flow.py b/homeassistant/components/screenlogic/config_flow.py index 74a01fdeaa2..19db89dc03d 100644 --- a/homeassistant/components/screenlogic/config_flow.py +++ b/homeassistant/components/screenlogic/config_flow.py @@ -32,9 +32,9 @@ GATEWAY_MANUAL_ENTRY = "manual" PENTAIR_OUI = "00-C0-33" -async def async_discover_gateways_by_unique_id(hass): +async def async_discover_gateways_by_unique_id() -> dict[str, dict[str, Any]]: """Discover gateways and return a dict of them by unique id.""" - discovered_gateways = {} + discovered_gateways: dict[str, dict[str, Any]] = {} try: hosts = await discovery.async_discover() _LOGGER.debug("Discovered hosts: %s", hosts) @@ -51,16 +51,16 @@ async def async_discover_gateways_by_unique_id(hass): return discovered_gateways -def _extract_mac_from_name(name): +def _extract_mac_from_name(name: str) -> str: return format_mac(f"{PENTAIR_OUI}-{name.split(':')[1].strip()}") -def short_mac(mac): +def short_mac(mac: str) -> str: """Short version of the mac as seen in the app.""" return "-".join(mac.split(":")[3:]).upper() -def name_for_mac(mac): +def name_for_mac(mac: str) -> str: """Derive the gateway name from the mac.""" return f"Pentair: {short_mac(mac)}" @@ -81,11 +81,13 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> ScreenLogicOptionsFlowHandler: """Get the options flow for ScreenLogic.""" - return ScreenLogicOptionsFlowHandler(config_entry) + return ScreenLogicOptionsFlowHandler() - async def async_step_user(self, user_input=None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" - self.discovered_gateways = await async_discover_gateways_by_unique_id(self.hass) + self.discovered_gateways = await async_discover_gateways_by_unique_id() return await self.async_step_gateway_select() async def async_step_dhcp( @@ -190,10 +192,6 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN): class ScreenLogicOptionsFlowHandler(OptionsFlow): """Handles the options for the ScreenLogic integration.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init the screen logic options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/screenlogic/coordinator.py b/homeassistant/components/screenlogic/coordinator.py index 281bac86e01..a90c9cb2cf4 100644 --- a/homeassistant/components/screenlogic/coordinator.py +++ b/homeassistant/components/screenlogic/coordinator.py @@ -2,6 +2,7 @@ from datetime import timedelta import logging +from typing import TYPE_CHECKING from screenlogicpy import ScreenLogicGateway from screenlogicpy.const.common import ( @@ -33,11 +34,13 @@ async def async_get_connect_info( """Construct connect_info from configuration entry and returns it to caller.""" mac = entry.unique_id # Attempt to rediscover gateway to follow IP changes - discovered_gateways = await async_discover_gateways_by_unique_id(hass) + discovered_gateways = await async_discover_gateways_by_unique_id() if mac in discovered_gateways: return discovered_gateways[mac] _LOGGER.debug("Gateway rediscovery failed for %s", entry.title) + if TYPE_CHECKING: + assert mac is not None # Static connection defined or fallback from discovery return { SL_GATEWAY_NAME: name_for_mac(mac), diff --git a/homeassistant/components/screenlogic/icons.json b/homeassistant/components/screenlogic/icons.json index d8d021c20e6..ef8dc46f61d 100644 --- a/homeassistant/components/screenlogic/icons.json +++ b/homeassistant/components/screenlogic/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_color_mode": "mdi:palette", - "start_super_chlorination": "mdi:pool", - "stop_super_chlorination": "mdi:pool" + "set_color_mode": { + "service": "mdi:palette" + }, + "start_super_chlorination": { + "service": "mdi:pool" + }, + "stop_super_chlorination": { + "service": "mdi:pool" + } } } diff --git a/homeassistant/components/screenlogic/number.py b/homeassistant/components/screenlogic/number.py index c5d67b8f285..d0eb6a71ec8 100644 --- a/homeassistant/components/screenlogic/number.py +++ b/homeassistant/components/screenlogic/number.py @@ -9,7 +9,7 @@ from screenlogicpy.const.msg import CODE from screenlogicpy.device_const.system import EQUIPMENT_FLAG from homeassistant.components.number import ( - DOMAIN, + DOMAIN as NUMBER_DOMAIN, NumberEntity, NumberEntityDescription, NumberMode, @@ -111,7 +111,7 @@ async def async_setup_entry( chem_number_description.key, ) if EQUIPMENT_FLAG.INTELLICHEM not in gateway.equipment_flags: - cleanup_excluded_entity(coordinator, DOMAIN, chem_number_data_path) + cleanup_excluded_entity(coordinator, NUMBER_DOMAIN, chem_number_data_path) continue if gateway.get_data(*chem_number_data_path): entities.append( @@ -124,7 +124,7 @@ async def async_setup_entry( scg_number_description.key, ) if EQUIPMENT_FLAG.CHLORINATOR not in gateway.equipment_flags: - cleanup_excluded_entity(coordinator, DOMAIN, scg_number_data_path) + cleanup_excluded_entity(coordinator, NUMBER_DOMAIN, scg_number_data_path) continue if gateway.get_data(*scg_number_data_path): entities.append(ScreenLogicSCGNumber(coordinator, scg_number_description)) diff --git a/homeassistant/components/screenlogic/sensor.py b/homeassistant/components/screenlogic/sensor.py index 0b8e4147420..c580204221f 100644 --- a/homeassistant/components/screenlogic/sensor.py +++ b/homeassistant/components/screenlogic/sensor.py @@ -12,7 +12,7 @@ from screenlogicpy.device_const.pump import PUMP_TYPE from screenlogicpy.device_const.system import EQUIPMENT_FLAG from homeassistant.components.sensor import ( - DOMAIN, + DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -267,7 +267,7 @@ async def async_setup_entry( chem_sensor_description.key, ) if EQUIPMENT_FLAG.INTELLICHEM not in gateway.equipment_flags: - cleanup_excluded_entity(coordinator, DOMAIN, chem_sensor_data_path) + cleanup_excluded_entity(coordinator, SENSOR_DOMAIN, chem_sensor_data_path) continue if gateway.get_data(*chem_sensor_data_path): chem_sensor_description = dataclasses.replace( @@ -282,7 +282,7 @@ async def async_setup_entry( scg_sensor_description.key, ) if EQUIPMENT_FLAG.CHLORINATOR not in gateway.equipment_flags: - cleanup_excluded_entity(coordinator, DOMAIN, scg_sensor_data_path) + cleanup_excluded_entity(coordinator, SENSOR_DOMAIN, scg_sensor_data_path) continue if gateway.get_data(*scg_sensor_data_path): scg_sensor_description = dataclasses.replace( diff --git a/homeassistant/components/screenlogic/services.py b/homeassistant/components/screenlogic/services.py index 3177f27ab2a..44d8ad3ed81 100644 --- a/homeassistant/components/screenlogic/services.py +++ b/homeassistant/components/screenlogic/services.py @@ -10,12 +10,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - config_validation as cv, - issue_registry as ir, - selector, -) -from homeassistant.helpers.service import async_extract_config_entry_ids +from homeassistant.helpers import selector from .const import ( ATTR_COLOR_MODE, @@ -44,19 +39,10 @@ BASE_SERVICE_SCHEMA = vol.Schema( } ) -SET_COLOR_MODE_SCHEMA = vol.All( - vol.Schema( - { - vol.Optional(ATTR_CONFIG_ENTRY): selector.ConfigEntrySelector( - { - "integration": DOMAIN, - } - ), - **cv.ENTITY_SERVICE_FIELDS, - vol.Required(ATTR_COLOR_MODE): vol.In(SUPPORTED_COLOR_MODES), - } - ), - cv.has_at_least_one_key(ATTR_CONFIG_ENTRY, *cv.ENTITY_SERVICE_FIELDS), +SET_COLOR_MODE_SCHEMA = BASE_SERVICE_SCHEMA.extend( + { + vol.Required(ATTR_COLOR_MODE): vol.In(SUPPORTED_COLOR_MODES), + } ) TURN_ON_SUPER_CHLOR_SCHEMA = BASE_SERVICE_SCHEMA.extend( @@ -72,37 +58,10 @@ TURN_ON_SUPER_CHLOR_SCHEMA = BASE_SERVICE_SCHEMA.extend( def async_load_screenlogic_services(hass: HomeAssistant): """Set up services for the ScreenLogic integration.""" - async def extract_screenlogic_config_entry_ids(service_call: ServiceCall): - if not ( - screenlogic_entry_ids := await async_extract_config_entry_ids( - hass, service_call - ) - ): - raise ServiceValidationError( - f"Failed to call service '{service_call.service}'. Config entry for " - "target not found" - ) - return screenlogic_entry_ids - async def get_coordinators( service_call: ServiceCall, ) -> list[ScreenlogicDataUpdateCoordinator]: - entry_ids: set[str] - if entry_id := service_call.data.get(ATTR_CONFIG_ENTRY): - entry_ids = {entry_id} - else: - ir.async_create_issue( - hass, - DOMAIN, - "service_target_deprecation", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=ir.IssueSeverity.WARNING, - translation_key="service_target_deprecation", - ) - entry_ids = await extract_screenlogic_config_entry_ids(service_call) - + entry_ids = {service_call.data[ATTR_CONFIG_ENTRY]} coordinators: list[ScreenlogicDataUpdateCoordinator] = [] for entry_id in entry_ids: config_entry = cast( diff --git a/homeassistant/components/screenlogic/services.yaml b/homeassistant/components/screenlogic/services.yaml index f05537640ca..1dc2e0339f2 100644 --- a/homeassistant/components/screenlogic/services.yaml +++ b/homeassistant/components/screenlogic/services.yaml @@ -2,7 +2,7 @@ set_color_mode: fields: config_entry: - required: false + required: true selector: config_entry: integration: screenlogic diff --git a/homeassistant/components/screenlogic/strings.json b/homeassistant/components/screenlogic/strings.json index 2370d78a6ce..91395a0e86d 100644 --- a/homeassistant/components/screenlogic/strings.json +++ b/homeassistant/components/screenlogic/strings.json @@ -75,18 +75,5 @@ } } } - }, - "issues": { - "service_target_deprecation": { - "title": "Deprecating use of target for ScreenLogic actions", - "fix_flow": { - "step": { - "confirm": { - "title": "Deprecating target for ScreenLogic actions", - "description": "Use of an Area, Device, or Entity as a target for ScreenLogic actions is being deprecated. Instead, use `config_entry` with the entry_id of the desired ScreenLogic integration.\n\nPlease update your automations and scripts and select **submit** to fix this issue." - } - } - } - } } } diff --git a/homeassistant/components/script/__init__.py b/homeassistant/components/script/__init__.py index 6fd26b2ea8d..c0d79c446bb 100644 --- a/homeassistant/components/script/__init__.py +++ b/homeassistant/components/script/__init__.py @@ -5,10 +5,10 @@ from __future__ import annotations from abc import ABC, abstractmethod import asyncio from dataclasses import dataclass -from functools import cached_property import logging from typing import TYPE_CHECKING, Any, cast +from propcache import cached_property import voluptuous as vol from homeassistant.components import websocket_api diff --git a/homeassistant/components/script/helpers.py b/homeassistant/components/script/helpers.py index b070a4d60ce..31aac506b35 100644 --- a/homeassistant/components/script/helpers.py +++ b/homeassistant/components/script/helpers.py @@ -1,6 +1,6 @@ """Helpers for automation integration.""" -from homeassistant.components.blueprint import DomainBlueprints +from homeassistant.components.blueprint import BLUEPRINT_SCHEMA, DomainBlueprints from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.singleton import singleton @@ -27,5 +27,10 @@ async def _reload_blueprint_scripts(hass: HomeAssistant, blueprint_path: str) -> def async_get_blueprints(hass: HomeAssistant) -> DomainBlueprints: """Get script blueprints.""" return DomainBlueprints( - hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_scripts + hass, + DOMAIN, + LOGGER, + _blueprint_in_use, + _reload_blueprint_scripts, + BLUEPRINT_SCHEMA, ) diff --git a/homeassistant/components/script/icons.json b/homeassistant/components/script/icons.json index d253d0fd829..7e160941c05 100644 --- a/homeassistant/components/script/icons.json +++ b/homeassistant/components/script/icons.json @@ -8,9 +8,17 @@ } }, "services": { - "reload": "mdi:reload", - "turn_on": "mdi:script-text-play", - "turn_off": "mdi:script-text", - "toggle": "mdi:script-text" + "reload": { + "service": "mdi:reload" + }, + "turn_on": { + "service": "mdi:script-text-play" + }, + "turn_off": { + "service": "mdi:script-text" + }, + "toggle": { + "service": "mdi:script-text" + } } } diff --git a/homeassistant/components/scsgate/__init__.py b/homeassistant/components/scsgate/__init__.py index db96ccb688a..9aabb315942 100644 --- a/homeassistant/components/scsgate/__init__.py +++ b/homeassistant/components/scsgate/__init__.py @@ -43,7 +43,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def stop_monitor(event): """Stop the SCSGate.""" - _LOGGER.info("Stopping SCSGate monitor thread") + _LOGGER.debug("Stopping SCSGate monitor thread") scsgate.stop() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_monitor) diff --git a/homeassistant/components/scsgate/manifest.json b/homeassistant/components/scsgate/manifest.json index 3f20762cf73..a3b08f86719 100644 --- a/homeassistant/components/scsgate/manifest.json +++ b/homeassistant/components/scsgate/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/scsgate", "iot_class": "local_polling", "loggers": ["scsgate"], + "quality_scale": "legacy", "requirements": ["scsgate==0.1.0"] } diff --git a/homeassistant/components/season/manifest.json b/homeassistant/components/season/manifest.json index 0e758dc4296..b695fea85b5 100644 --- a/homeassistant/components/season/manifest.json +++ b/homeassistant/components/season/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_polling", "loggers": ["ephem"], "quality_scale": "internal", - "requirements": ["ephem==4.1.5"] + "requirements": ["ephem==4.1.6"] } diff --git a/homeassistant/components/select/__init__.py b/homeassistant/components/select/__init__.py index 24f7d8bffea..3834dc4a0c7 100644 --- a/homeassistant/components/select/__init__.py +++ b/homeassistant/components/select/__init__.py @@ -3,10 +3,10 @@ from __future__ import annotations from datetime import timedelta -from functools import cached_property import logging from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -16,6 +16,7 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import ( ATTR_CYCLE, @@ -31,6 +32,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[SelectEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -59,7 +61,7 @@ __all__ = [ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Select entities.""" - component = hass.data[DOMAIN] = EntityComponent[SelectEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[SelectEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -99,14 +101,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[SelectEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[SelectEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class SelectEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/select/icons.json b/homeassistant/components/select/icons.json index 1b440d2a1de..fbd1d4568f1 100644 --- a/homeassistant/components/select/icons.json +++ b/homeassistant/components/select/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "select_first": "mdi:format-list-bulleted", - "select_last": "mdi:format-list-bulleted", - "select_next": "mdi:format-list-bulleted", - "select_option": "mdi:format-list-bulleted", - "select_previous": "mdi:format-list-bulleted" + "select_first": { + "service": "mdi:format-list-bulleted" + }, + "select_last": { + "service": "mdi:format-list-bulleted" + }, + "select_next": { + "service": "mdi:format-list-bulleted" + }, + "select_option": { + "service": "mdi:format-list-bulleted" + }, + "select_previous": { + "service": "mdi:format-list-bulleted" + } } } diff --git a/homeassistant/components/sendgrid/manifest.json b/homeassistant/components/sendgrid/manifest.json index c38952e1a04..ec89ae0a363 100644 --- a/homeassistant/components/sendgrid/manifest.json +++ b/homeassistant/components/sendgrid/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sendgrid", "iot_class": "cloud_push", "loggers": ["sendgrid"], + "quality_scale": "legacy", "requirements": ["sendgrid==6.8.2"] } diff --git a/homeassistant/components/sense/__init__.py b/homeassistant/components/sense/__init__.py index 58e993ad6e0..e919d48e96d 100644 --- a/homeassistant/components/sense/__init__.py +++ b/homeassistant/components/sense/__init__.py @@ -1,10 +1,8 @@ """Support for monitoring a Sense energy sensor.""" from dataclasses import dataclass -from datetime import timedelta from functools import partial import logging -from typing import Any from sense_energy import ( ASyncSenseable, @@ -13,26 +11,18 @@ from sense_energy import ( ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_EMAIL, - CONF_TIMEOUT, - EVENT_HOMEASSISTANT_STOP, - Platform, -) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import CONF_TIMEOUT, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( ACTIVE_UPDATE_RATE, SENSE_CONNECT_EXCEPTIONS, - SENSE_DEVICE_UPDATE, SENSE_TIMEOUT_EXCEPTIONS, SENSE_WEBSOCKET_EXCEPTIONS, ) +from .coordinator import SenseRealtimeCoordinator, SenseTrendCoordinator _LOGGER = logging.getLogger(__name__) @@ -40,37 +30,19 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] type SenseConfigEntry = ConfigEntry[SenseData] -class SenseDevicesData: - """Data for each sense device.""" - - def __init__(self): - """Create.""" - self._data_by_device = {} - - def set_devices_data(self, devices): - """Store a device update.""" - self._data_by_device = {device["id"]: device for device in devices} - - def get_device_by_id(self, sense_device_id): - """Get the latest device data.""" - return self._data_by_device.get(sense_device_id) - - @dataclass(kw_only=True, slots=True) class SenseData: """Sense data type.""" data: ASyncSenseable - device_data: SenseDevicesData - trends: DataUpdateCoordinator[None] - discovered: list[dict[str, Any]] + trends: SenseTrendCoordinator + rt: SenseRealtimeCoordinator async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> bool: """Set up Sense from a config entry.""" entry_data = entry.data - email = entry_data[CONF_EMAIL] timeout = entry_data[CONF_TIMEOUT] access_token = entry_data.get("access_token", "") @@ -108,7 +80,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo raise ConfigEntryNotReady(str(err)) from err try: - sense_discovered_devices = await gateway.get_discovered_device_data() + await gateway.fetch_devices() await gateway.update_realtime() except SENSE_TIMEOUT_EXCEPTIONS as err: raise ConfigEntryNotReady( @@ -117,26 +89,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo except SENSE_WEBSOCKET_EXCEPTIONS as err: raise ConfigEntryNotReady(str(err) or "Error during realtime update") from err - async def _async_update_trend(): - """Update the trend data.""" - try: - await gateway.update_trend_data() - except (SenseAuthenticationException, SenseMFARequiredException) as err: - _LOGGER.warning("Sense authentication expired") - raise ConfigEntryAuthFailed(err) from err - except SENSE_CONNECT_EXCEPTIONS as err: - raise UpdateFailed(err) from err - - trends_coordinator: DataUpdateCoordinator[None] = DataUpdateCoordinator( - hass, - _LOGGER, - name=f"Sense Trends {email}", - update_method=_async_update_trend, - update_interval=timedelta(seconds=300), - ) - # Start out as unavailable so we do not report 0 data - # until the update happens - trends_coordinator.last_update_success = False + trends_coordinator = SenseTrendCoordinator(hass, gateway) + realtime_coordinator = SenseRealtimeCoordinator(hass, gateway) # This can take longer than 60s and we already know # sense is online since get_discovered_device_data was @@ -146,45 +100,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo trends_coordinator.async_request_refresh(), "sense.trends-coordinator-refresh", ) + entry.async_create_background_task( + hass, + realtime_coordinator.async_request_refresh(), + "sense.realtime-coordinator-refresh", + ) entry.runtime_data = SenseData( data=gateway, - device_data=SenseDevicesData(), trends=trends_coordinator, - discovered=sense_discovered_devices, + rt=realtime_coordinator, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - async def async_sense_update(_): - """Retrieve latest state.""" - try: - await gateway.update_realtime() - except SENSE_TIMEOUT_EXCEPTIONS as ex: - _LOGGER.error("Timeout retrieving data: %s", ex) - except SENSE_WEBSOCKET_EXCEPTIONS as ex: - _LOGGER.error("Failed to update data: %s", ex) - - data = gateway.get_realtime() - if "devices" in data: - entry.runtime_data.device_data.set_devices_data(data["devices"]) - async_dispatcher_send(hass, f"{SENSE_DEVICE_UPDATE}-{gateway.sense_monitor_id}") - - remove_update_callback = async_track_time_interval( - hass, async_sense_update, timedelta(seconds=ACTIVE_UPDATE_RATE) - ) - - @callback - def _remove_update_callback_at_stop(event): - remove_update_callback() - - entry.async_on_unload(remove_update_callback) - entry.async_on_unload( - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_STOP, _remove_update_callback_at_stop - ) - ) - return True diff --git a/homeassistant/components/sense/binary_sensor.py b/homeassistant/components/sense/binary_sensor.py index 5640dd19961..d06b3a62937 100644 --- a/homeassistant/components/sense/binary_sensor.py +++ b/homeassistant/components/sense/binary_sensor.py @@ -2,17 +2,20 @@ import logging +from sense_energy.sense_api import SenseDevice + from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SenseConfigEntry -from .const import ATTRIBUTION, DOMAIN, MDI_ICONS, SENSE_DEVICE_UPDATE +from .const import DOMAIN +from .coordinator import SenseRealtimeCoordinator +from .entity import SenseDeviceEntity _LOGGER = logging.getLogger(__name__) @@ -24,13 +27,11 @@ async def async_setup_entry( ) -> None: """Set up the Sense binary sensor.""" sense_monitor_id = config_entry.runtime_data.data.sense_monitor_id + realtime_coordinator = config_entry.runtime_data.rt - sense_devices = config_entry.runtime_data.discovered - device_data = config_entry.runtime_data.device_data devices = [ - SenseDevice(device_data, device, sense_monitor_id) - for device in sense_devices - if device["tags"]["DeviceListAllowed"] == "true" + SenseBinarySensor(device, realtime_coordinator, sense_monitor_id) + for device in config_entry.runtime_data.data.devices ] await _migrate_old_unique_ids(hass, devices) @@ -38,65 +39,46 @@ async def async_setup_entry( async_add_entities(devices) -async def _migrate_old_unique_ids(hass, devices): +class SenseBinarySensor(SenseDeviceEntity, BinarySensorEntity): + """Implementation of a Sense energy device binary sensor.""" + + _attr_device_class = BinarySensorDeviceClass.POWER + + def __init__( + self, + device: SenseDevice, + coordinator: SenseRealtimeCoordinator, + sense_monitor_id: str, + ) -> None: + """Initialize the Sense binary sensor.""" + super().__init__(device, coordinator, sense_monitor_id, device.id) + self._id = device.id + + @property + def old_unique_id(self) -> str: + """Return the old not so unique id of the binary sensor.""" + return self._id + + @property + def is_on(self) -> bool: + """Return the state of the sensor.""" + return self._device.is_on + + +async def _migrate_old_unique_ids( + hass: HomeAssistant, devices: list[SenseBinarySensor] +) -> None: registry = er.async_get(hass) for device in devices: # Migration of old not so unique ids old_entity_id = registry.async_get_entity_id( "binary_sensor", DOMAIN, device.old_unique_id ) - if old_entity_id is not None: + updated_id = device.unique_id + if old_entity_id is not None and updated_id is not None: _LOGGER.debug( "Migrating unique_id from [%s] to [%s]", device.old_unique_id, device.unique_id, ) - registry.async_update_entity(old_entity_id, new_unique_id=device.unique_id) - - -def sense_to_mdi(sense_icon): - """Convert sense icon to mdi icon.""" - return "mdi:{}".format(MDI_ICONS.get(sense_icon, "power-plug")) - - -class SenseDevice(BinarySensorEntity): - """Implementation of a Sense energy device binary sensor.""" - - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - _attr_available = False - _attr_device_class = BinarySensorDeviceClass.POWER - - def __init__(self, sense_devices_data, device, sense_monitor_id): - """Initialize the Sense binary sensor.""" - self._attr_name = device["name"] - self._id = device["id"] - self._sense_monitor_id = sense_monitor_id - self._attr_unique_id = f"{sense_monitor_id}-{self._id}" - self._attr_icon = sense_to_mdi(device["icon"]) - self._sense_devices_data = sense_devices_data - - @property - def old_unique_id(self): - """Return the old not so unique id of the binary sensor.""" - return self._id - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) - ) - - @callback - def _async_update_from_data(self): - """Get the latest data, update state. Must not do I/O.""" - new_state = bool(self._sense_devices_data.get_device_by_id(self._id)) - if self._attr_available and self._attr_is_on == new_state: - return - self._attr_available = True - self._attr_is_on = new_state - self.async_write_ha_state() + registry.async_update_entity(old_entity_id, new_unique_id=updated_id) diff --git a/homeassistant/components/sense/config_flow.py b/homeassistant/components/sense/config_flow.py index dab80b99e1a..c0df40aec9d 100644 --- a/homeassistant/components/sense/config_flow.py +++ b/homeassistant/components/sense/config_flow.py @@ -34,13 +34,13 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): - """Init Config .""" - self._gateway = None - self._auth_data = {} - super().__init__() + _gateway: ASyncSenseable - async def validate_input(self, data): + def __init__(self) -> None: + """Init Config .""" + self._auth_data: dict[str, Any] = {} + + async def validate_input(self, data: Mapping[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. @@ -64,7 +64,7 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): self._auth_data[CONF_EMAIL], self._auth_data[CONF_PASSWORD] ) - async def create_entry_from_data(self): + async def create_entry_from_data(self) -> ConfigFlowResult: """Create the entry from the config data.""" self._auth_data["access_token"] = self._gateway.sense_access_token self._auth_data["user_id"] = self._gateway.sense_user_id @@ -79,7 +79,9 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_update_reload_and_abort(existing_entry, data=self._auth_data) - async def validate_input_and_create_entry(self, user_input, errors): + async def validate_input_and_create_entry( + self, user_input: Mapping[str, Any], errors: dict[str, str] + ) -> ConfigFlowResult | None: """Validate the input and create the entry from the data.""" try: await self.validate_input(user_input) @@ -96,7 +98,9 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): return await self.create_entry_from_data() return None - async def async_step_validation(self, user_input=None): + async def async_step_validation( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle validation (2fa) step.""" errors = {} if user_input: @@ -118,9 +122,11 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if result := await self.validate_input_and_create_entry(user_input, errors): return result @@ -136,9 +142,11 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): self._auth_data = dict(entry_data) return await self.async_step_reauth_validate(entry_data) - async def async_step_reauth_validate(self, user_input=None): + async def async_step_reauth_validate( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauth and validation.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if result := await self.validate_input_and_create_entry(user_input, errors): return result diff --git a/homeassistant/components/sense/const.py b/homeassistant/components/sense/const.py index 5e944c18d8d..b23117c977d 100644 --- a/homeassistant/components/sense/const.py +++ b/homeassistant/components/sense/const.py @@ -11,6 +11,7 @@ from sense_energy import ( DOMAIN = "sense" DEFAULT_TIMEOUT = 30 ACTIVE_UPDATE_RATE = 60 +TREND_UPDATE_RATE = 300 DEFAULT_NAME = "Sense" SENSE_DEVICE_UPDATE = "sense_devices_update" @@ -19,7 +20,7 @@ ACTIVE_TYPE = "active" ATTRIBUTION = "Data provided by Sense.com" -CONSUMPTION_NAME = "Usage" +CONSUMPTION_NAME = "Energy" CONSUMPTION_ID = "usage" PRODUCTION_NAME = "Production" PRODUCTION_ID = "production" diff --git a/homeassistant/components/sense/coordinator.py b/homeassistant/components/sense/coordinator.py new file mode 100644 index 00000000000..c0029cd79ea --- /dev/null +++ b/homeassistant/components/sense/coordinator.py @@ -0,0 +1,76 @@ +"""Sense Coordinators.""" + +from datetime import timedelta +import logging + +from sense_energy import ( + ASyncSenseable, + SenseAuthenticationException, + SenseMFARequiredException, +) + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + ACTIVE_UPDATE_RATE, + SENSE_CONNECT_EXCEPTIONS, + SENSE_TIMEOUT_EXCEPTIONS, + SENSE_WEBSOCKET_EXCEPTIONS, + TREND_UPDATE_RATE, +) + +_LOGGER = logging.getLogger(__name__) + + +class SenseCoordinator(DataUpdateCoordinator[None]): + """Sense Trend Coordinator.""" + + def __init__( + self, hass: HomeAssistant, gateway: ASyncSenseable, name: str, update: int + ) -> None: + """Initialize.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"Sense {name} {gateway.sense_monitor_id}", + update_interval=timedelta(seconds=update), + ) + self._gateway = gateway + self.last_update_success = False + + +class SenseTrendCoordinator(SenseCoordinator): + """Sense Trend Coordinator.""" + + def __init__(self, hass: HomeAssistant, gateway: ASyncSenseable) -> None: + """Initialize.""" + super().__init__(hass, gateway, "Trends", TREND_UPDATE_RATE) + + async def _async_update_data(self) -> None: + """Update the trend data.""" + try: + await self._gateway.update_trend_data() + except (SenseAuthenticationException, SenseMFARequiredException) as err: + _LOGGER.warning("Sense authentication expired") + raise ConfigEntryAuthFailed(err) from err + except SENSE_CONNECT_EXCEPTIONS as err: + raise UpdateFailed(err) from err + + +class SenseRealtimeCoordinator(SenseCoordinator): + """Sense Realtime Coordinator.""" + + def __init__(self, hass: HomeAssistant, gateway: ASyncSenseable) -> None: + """Initialize.""" + super().__init__(hass, gateway, "Realtime", ACTIVE_UPDATE_RATE) + + async def _async_update_data(self) -> None: + """Retrieve latest state.""" + try: + await self._gateway.update_realtime() + except SENSE_TIMEOUT_EXCEPTIONS as ex: + _LOGGER.error("Timeout retrieving data: %s", ex) + except SENSE_WEBSOCKET_EXCEPTIONS as ex: + _LOGGER.error("Failed to update data: %s", ex) diff --git a/homeassistant/components/sense/entity.py b/homeassistant/components/sense/entity.py new file mode 100644 index 00000000000..248be53ceb7 --- /dev/null +++ b/homeassistant/components/sense/entity.py @@ -0,0 +1,71 @@ +"""Base entities for Sense energy.""" + +from sense_energy import ASyncSenseable +from sense_energy.sense_api import SenseDevice + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTRIBUTION, DOMAIN, MDI_ICONS +from .coordinator import SenseCoordinator + + +def sense_to_mdi(sense_icon: str) -> str: + """Convert sense icon to mdi icon.""" + return f"mdi:{MDI_ICONS.get(sense_icon, "power-plug")}" + + +class SenseEntity(CoordinatorEntity[SenseCoordinator]): + """Base implementation of a Sense sensor.""" + + _attr_attribution = ATTRIBUTION + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, + gateway: ASyncSenseable, + coordinator: SenseCoordinator, + sense_monitor_id: str, + unique_id: str, + ) -> None: + """Initialize the Sense sensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{sense_monitor_id}-{unique_id}" + self._gateway = gateway + self._attr_device_info = DeviceInfo( + name=f"Sense {sense_monitor_id}", + identifiers={(DOMAIN, sense_monitor_id)}, + model="Sense", + manufacturer="Sense Labs, Inc.", + configuration_url="https://home.sense.com", + ) + + +class SenseDeviceEntity(CoordinatorEntity[SenseCoordinator]): + """Base implementation of a Sense sensor.""" + + _attr_attribution = ATTRIBUTION + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, + device: SenseDevice, + coordinator: SenseCoordinator, + sense_monitor_id: str, + unique_id: str, + ) -> None: + """Initialize the Sense sensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{sense_monitor_id}-{unique_id}" + self._device = device + self._attr_icon = sense_to_mdi(device.icon) + self._attr_device_info = DeviceInfo( + name=device.name, + identifiers={(DOMAIN, f"{sense_monitor_id}:{device.id}")}, + model="Sense", + manufacturer="Sense Labs, Inc.", + configuration_url="https://home.sense.com", + via_device=(DOMAIN, sense_monitor_id), + ) diff --git a/homeassistant/components/sense/manifest.json b/homeassistant/components/sense/manifest.json index 116b714ba82..966488b6a48 100644 --- a/homeassistant/components/sense/manifest.json +++ b/homeassistant/components/sense/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/sense", "iot_class": "cloud_polling", "loggers": ["sense_energy"], - "requirements": ["sense-energy==0.12.4"] + "requirements": ["sense-energy==0.13.4"] } diff --git a/homeassistant/components/sense/sensor.py b/homeassistant/components/sense/sensor.py index 129b1262fd0..2f5c82675d5 100644 --- a/homeassistant/components/sense/sensor.py +++ b/homeassistant/components/sense/sensor.py @@ -1,5 +1,10 @@ """Support for monitoring a Sense energy sensor.""" +from datetime import datetime + +from sense_energy import ASyncSenseable, Scale +from sense_energy.sense_api import SenseDevice + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -11,55 +16,37 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfPower, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import SenseConfigEntry from .const import ( - ACTIVE_NAME, ACTIVE_TYPE, - ATTRIBUTION, CONSUMPTION_ID, CONSUMPTION_NAME, - DOMAIN, FROM_GRID_ID, FROM_GRID_NAME, - MDI_ICONS, NET_PRODUCTION_ID, NET_PRODUCTION_NAME, PRODUCTION_ID, PRODUCTION_NAME, PRODUCTION_PCT_ID, PRODUCTION_PCT_NAME, - SENSE_DEVICE_UPDATE, SOLAR_POWERED_ID, SOLAR_POWERED_NAME, TO_GRID_ID, TO_GRID_NAME, ) - - -class SensorConfig: - """Data structure holding sensor configuration.""" - - def __init__(self, name, sensor_type): - """Sensor name and type to pass to API.""" - self.name = name - self.sensor_type = sensor_type - - -# Sensor types/ranges -ACTIVE_SENSOR_TYPE = SensorConfig(ACTIVE_NAME, ACTIVE_TYPE) +from .coordinator import SenseRealtimeCoordinator, SenseTrendCoordinator +from .entity import SenseDeviceEntity, SenseEntity # Sensor types/ranges TRENDS_SENSOR_TYPES = { - "daily": SensorConfig("Daily", "DAY"), - "weekly": SensorConfig("Weekly", "WEEK"), - "monthly": SensorConfig("Monthly", "MONTH"), - "yearly": SensorConfig("Yearly", "YEAR"), + Scale.DAY: "Daily", + Scale.WEEK: "Weekly", + Scale.MONTH: "Monthly", + Scale.YEAR: "Yearly", + Scale.CYCLE: "Bill", } # Production/consumption variants @@ -76,11 +63,6 @@ TREND_SENSOR_VARIANTS = [ ] -def sense_to_mdi(sense_icon): - """Convert sense icon to mdi icon.""" - return "mdi:{}".format(MDI_ICONS.get(sense_icon, "power-plug")) - - async def async_setup_entry( hass: HomeAssistant, config_entry: SenseConfigEntry, @@ -89,58 +71,46 @@ async def async_setup_entry( """Set up the Sense sensor.""" data = config_entry.runtime_data.data trends_coordinator = config_entry.runtime_data.trends + realtime_coordinator = config_entry.runtime_data.rt # Request only in case it takes longer # than 60s await trends_coordinator.async_request_refresh() sense_monitor_id = data.sense_monitor_id - sense_devices = config_entry.runtime_data.discovered - device_data = config_entry.runtime_data.device_data - entities: list[SensorEntity] = [ - SenseEnergyDevice(device_data, device, sense_monitor_id) - for device in sense_devices - if device["tags"]["DeviceListAllowed"] == "true" - ] + entities: list[SensorEntity] = [] + + for device in config_entry.runtime_data.data.devices: + entities.append( + SenseDevicePowerSensor(device, sense_monitor_id, realtime_coordinator) + ) + entities.extend( + SenseDeviceEnergySensor(device, scale, trends_coordinator, sense_monitor_id) + for scale in Scale + ) for variant_id, variant_name in SENSOR_VARIANTS: - name = ACTIVE_SENSOR_TYPE.name - sensor_type = ACTIVE_SENSOR_TYPE.sensor_type - - unique_id = f"{sense_monitor_id}-active-{variant_id}" entities.append( - SenseActiveSensor( - data, - name, - sensor_type, - sense_monitor_id, - variant_id, - variant_name, - unique_id, + SensePowerSensor( + data, sense_monitor_id, variant_id, variant_name, realtime_coordinator ) ) entities.extend( - SenseVoltageSensor(data, i, sense_monitor_id) + SenseVoltageSensor(data, i, sense_monitor_id, realtime_coordinator) for i in range(len(data.active_voltage)) ) - for type_id, typ in TRENDS_SENSOR_TYPES.items(): + for scale in Scale: for variant_id, variant_name in TREND_SENSOR_VARIANTS: - name = typ.name - sensor_type = typ.sensor_type - - unique_id = f"{sense_monitor_id}-{type_id}-{variant_id}" entities.append( SenseTrendsSensor( data, - name, - sensor_type, + scale, variant_id, variant_name, trends_coordinator, - unique_id, sense_monitor_id, ) ) @@ -148,131 +118,89 @@ async def async_setup_entry( async_add_entities(entities) -class SenseActiveSensor(SensorEntity): +class SensePowerSensor(SenseEntity, SensorEntity): """Implementation of a Sense energy sensor.""" _attr_device_class = SensorDeviceClass.POWER _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - _attr_available = False _attr_state_class = SensorStateClass.MEASUREMENT def __init__( self, - data, - name, - sensor_type, - sense_monitor_id, - variant_id, - variant_name, - unique_id, - ): + gateway: ASyncSenseable, + sense_monitor_id: str, + variant_id: str, + variant_name: str, + realtime_coordinator: SenseRealtimeCoordinator, + ) -> None: """Initialize the Sense sensor.""" - self._attr_name = f"{name} {variant_name}" - self._attr_unique_id = unique_id - self._data = data - self._sense_monitor_id = sense_monitor_id - self._sensor_type = sensor_type + super().__init__( + gateway, + realtime_coordinator, + sense_monitor_id, + f"{ACTIVE_TYPE}-{variant_id}", + ) + self._attr_name = variant_name self._variant_id = variant_id - self._variant_name = variant_name - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) - ) - - @callback - def _async_update_from_data(self): - """Update the sensor from the data. Must not do I/O.""" - new_state = round( - self._data.active_solar_power + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return round( + self._gateway.active_solar_power if self._variant_id == PRODUCTION_ID - else self._data.active_power + else self._gateway.active_power ) - if self._attr_available and self._attr_native_value == new_state: - return - self._attr_native_value = new_state - self._attr_available = True - self.async_write_ha_state() -class SenseVoltageSensor(SensorEntity): +class SenseVoltageSensor(SenseEntity, SensorEntity): """Implementation of a Sense energy voltage sensor.""" _attr_device_class = SensorDeviceClass.VOLTAGE _attr_state_class = SensorStateClass.MEASUREMENT _attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - _attr_available = False def __init__( self, - data, - index, - sense_monitor_id, - ): + gateway: ASyncSenseable, + index: int, + sense_monitor_id: str, + realtime_coordinator: SenseRealtimeCoordinator, + ) -> None: """Initialize the Sense sensor.""" - line_num = index + 1 - self._attr_name = f"L{line_num} Voltage" - self._attr_unique_id = f"{sense_monitor_id}-L{line_num}" - self._data = data - self._sense_monitor_id = sense_monitor_id + super().__init__( + gateway, realtime_coordinator, sense_monitor_id, f"L{index + 1}" + ) + self._attr_name = f"L{index + 1} Voltage" self._voltage_index = index - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) - ) - - @callback - def _async_update_from_data(self): - """Update the sensor from the data. Must not do I/O.""" - new_state = round(self._data.active_voltage[self._voltage_index], 1) - if self._attr_available and self._attr_native_value == new_state: - return - self._attr_available = True - self._attr_native_value = new_state - self.async_write_ha_state() + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return round(self._gateway.active_voltage[self._voltage_index], 1) -class SenseTrendsSensor(CoordinatorEntity, SensorEntity): +class SenseTrendsSensor(SenseEntity, SensorEntity): """Implementation of a Sense energy sensor.""" - _attr_device_class = SensorDeviceClass.ENERGY - _attr_state_class = SensorStateClass.TOTAL - _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - def __init__( self, - data, - name, - sensor_type, - variant_id, - variant_name, - trends_coordinator, - unique_id, - sense_monitor_id, - ): + gateway: ASyncSenseable, + scale: Scale, + variant_id: str, + variant_name: str, + trends_coordinator: SenseTrendCoordinator, + sense_monitor_id: str, + ) -> None: """Initialize the Sense sensor.""" - super().__init__(trends_coordinator) - self._attr_name = f"{name} {variant_name}" - self._attr_unique_id = unique_id - self._data = data - self._sensor_type = sensor_type + super().__init__( + gateway, + trends_coordinator, + sense_monitor_id, + f"{TRENDS_SENSOR_TYPES[scale].lower()}-{variant_id}", + ) + self._attr_name = f"{TRENDS_SENSOR_TYPES[scale]} {variant_name}" + self._scale = scale self._variant_id = variant_id self._had_any_update = False if variant_id in [PRODUCTION_PCT_ID, SOLAR_POWERED_ID]: @@ -280,66 +208,75 @@ class SenseTrendsSensor(CoordinatorEntity, SensorEntity): self._attr_entity_registry_enabled_default = False self._attr_state_class = None self._attr_device_class = None - self._attr_device_info = DeviceInfo( - name=f"Sense {sense_monitor_id}", - identifiers={(DOMAIN, sense_monitor_id)}, - model="Sense", - manufacturer="Sense Labs, Inc.", - configuration_url="https://home.sense.com", - ) + else: + self._attr_device_class = SensorDeviceClass.ENERGY + self._attr_state_class = SensorStateClass.TOTAL + self._attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR @property - def native_value(self): + def native_value(self) -> float: """Return the state of the sensor.""" - return round(self._data.get_trend(self._sensor_type, self._variant_id), 1) + return round(self._gateway.get_stat(self._scale, self._variant_id), 1) @property - def last_reset(self): + def last_reset(self) -> datetime | None: """Return the time when the sensor was last reset, if any.""" if self._attr_state_class == SensorStateClass.TOTAL: - return self._data.trend_start(self._sensor_type) + return self._gateway.trend_start(self._scale) return None -class SenseEnergyDevice(SensorEntity): +class SenseDevicePowerSensor(SenseDeviceEntity, SensorEntity): """Implementation of a Sense energy device.""" - _attr_available = False _attr_state_class = SensorStateClass.MEASUREMENT _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_attribution = ATTRIBUTION _attr_device_class = SensorDeviceClass.POWER - _attr_should_poll = False - def __init__(self, sense_devices_data, device, sense_monitor_id): - """Initialize the Sense binary sensor.""" - self._attr_name = f"{device['name']} {CONSUMPTION_NAME}" - self._id = device["id"] - self._sense_monitor_id = sense_monitor_id - self._attr_unique_id = f"{sense_monitor_id}-{self._id}-{CONSUMPTION_ID}" - self._attr_icon = sense_to_mdi(device["icon"]) - self._sense_devices_data = sense_devices_data - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) + def __init__( + self, + device: SenseDevice, + sense_monitor_id: str, + coordinator: SenseRealtimeCoordinator, + ) -> None: + """Initialize the Sense device sensor.""" + super().__init__( + device, coordinator, sense_monitor_id, f"{device.id}-{CONSUMPTION_ID}" ) - @callback - def _async_update_from_data(self): - """Get the latest data, update state. Must not do I/O.""" - device_data = self._sense_devices_data.get_device_by_id(self._id) - if not device_data or "w" not in device_data: - new_state = 0 - else: - new_state = int(device_data["w"]) - if self._attr_available and self._attr_native_value == new_state: - return - self._attr_native_value = new_state - self._attr_available = True - self.async_write_ha_state() + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return self._device.power_w + + +class SenseDeviceEnergySensor(SenseDeviceEntity, SensorEntity): + """Implementation of a Sense device energy sensor.""" + + _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL_INCREASING + _attr_device_class = SensorDeviceClass.ENERGY + + def __init__( + self, + device: SenseDevice, + scale: Scale, + coordinator: SenseTrendCoordinator, + sense_monitor_id: str, + ) -> None: + """Initialize the Sense device sensor.""" + super().__init__( + device, + coordinator, + sense_monitor_id, + f"{device.id}-{TRENDS_SENSOR_TYPES[scale].lower()}-energy", + ) + self._attr_translation_key = f"{TRENDS_SENSOR_TYPES[scale].lower()}_energy" + self._attr_suggested_display_precision = 2 + self._scale = scale + self._device = device + + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return self._device.energy_kwh[self._scale] diff --git a/homeassistant/components/sense/strings.json b/homeassistant/components/sense/strings.json index a519155bee1..4579c84f050 100644 --- a/homeassistant/components/sense/strings.json +++ b/homeassistant/components/sense/strings.json @@ -32,5 +32,24 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "entity": { + "sensor": { + "daily_energy": { + "name": "Daily energy" + }, + "weekly_energy": { + "name": "Weekly energy" + }, + "monthly_energy": { + "name": "Monthly energy" + }, + "yearly_energy": { + "name": "Yearly energy" + }, + "bill_energy": { + "name": "Bill energy" + } + } } } diff --git a/homeassistant/components/sensibo/__init__.py b/homeassistant/components/sensibo/__init__.py index b2b6ac15958..15ef3def1f5 100644 --- a/homeassistant/components/sensibo/__init__.py +++ b/homeassistant/components/sensibo/__init__.py @@ -21,7 +21,7 @@ type SensiboConfigEntry = ConfigEntry[SensiboDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: SensiboConfigEntry) -> bool: """Set up Sensibo from a config entry.""" - coordinator = SensiboDataUpdateCoordinator(hass) + coordinator = SensiboDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/sensibo/climate.py b/homeassistant/components/sensibo/climate.py index 390ebc080b8..5bf455c3631 100644 --- a/homeassistant/components/sensibo/climate.py +++ b/homeassistant/components/sensibo/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from bisect import bisect_left -from typing import TYPE_CHECKING, Any +from typing import Any import voluptuous as vol @@ -22,7 +22,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_conversion import TemperatureConverter @@ -108,7 +108,7 @@ AC_STATE_TO_DATA = { } -def _find_valid_target_temp(target: int, valid_targets: list[int]) -> int: +def _find_valid_target_temp(target: float, valid_targets: list[int]) -> int: if target <= valid_targets[0]: return valid_targets[0] if target >= valid_targets[-1]: @@ -194,7 +194,6 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): _attr_name = None _attr_precision = PRECISION_TENTHS _attr_translation_key = "climate_device" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: SensiboDataUpdateCoordinator, device_id: str @@ -232,10 +231,9 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): @property def hvac_modes(self) -> list[HVACMode]: """Return the list of available hvac operation modes.""" - if TYPE_CHECKING: - assert self.device_data.hvac_modes - hvac_modes = [SENSIBO_TO_HA[mode] for mode in self.device_data.hvac_modes] - return hvac_modes if hvac_modes else [HVACMode.OFF] + if not self.device_data.hvac_modes: + return [HVACMode.OFF] + return [SENSIBO_TO_HA[mode] for mode in self.device_data.hvac_modes] @property def current_temperature(self) -> float | None: @@ -260,52 +258,42 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - target_temp: int | None = self.device_data.target_temp - return target_temp + return self.device_data.target_temp @property def target_temperature_step(self) -> float | None: """Return the supported step of target temperature.""" - target_temp_step: int = self.device_data.temp_step - return target_temp_step + return self.device_data.temp_step @property def fan_mode(self) -> str | None: """Return the fan setting.""" - fan_mode: str | None = self.device_data.fan_mode - return fan_mode + return self.device_data.fan_mode @property def fan_modes(self) -> list[str] | None: """Return the list of available fan modes.""" - if self.device_data.fan_modes: - return self.device_data.fan_modes - return None + return self.device_data.fan_modes @property def swing_mode(self) -> str | None: """Return the swing setting.""" - swing_mode: str | None = self.device_data.swing_mode - return swing_mode + return self.device_data.swing_mode @property def swing_modes(self) -> list[str] | None: """Return the list of available swing modes.""" - if self.device_data.swing_modes: - return self.device_data.swing_modes - return None + return self.device_data.swing_modes @property def min_temp(self) -> float: """Return the minimum temperature.""" - min_temp: int = self.device_data.temp_list[0] - return min_temp + return self.device_data.temp_list[0] @property def max_temp(self) -> float: """Return the maximum temperature.""" - max_temp: int = self.device_data.temp_list[-1] - return max_temp + return self.device_data.temp_list[-1] @property def available(self) -> bool: @@ -320,12 +308,7 @@ class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity): translation_key="no_target_temperature_in_features", ) - if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="no_target_temperature", - ) - + temperature: float = kwargs[ATTR_TEMPERATURE] if temperature == self.target_temperature: return diff --git a/homeassistant/components/sensibo/config_flow.py b/homeassistant/components/sensibo/config_flow.py index 667f96fe1c2..b8b1029f141 100644 --- a/homeassistant/components/sensibo/config_flow.py +++ b/homeassistant/components/sensibo/config_flow.py @@ -8,8 +8,9 @@ from typing import Any from pysensibo.exceptions import AuthenticationError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant from homeassistant.helpers.selector import TextSelector from .const import DEFAULT_NAME, DOMAIN @@ -22,19 +23,34 @@ DATA_SCHEMA = vol.Schema( ) +async def validate_api( + hass: HomeAssistant, api_key: str +) -> tuple[str | None, dict[str, str]]: + """Validate the API key.""" + errors: dict[str, str] = {} + username: str | None = None + try: + username = await async_validate_api(hass, api_key) + except AuthenticationError: + errors["base"] = "invalid_auth" + except ConnectionError: + errors["base"] = "cannot_connect" + except NoDevicesError: + errors["base"] = "no_devices" + except NoUsernameError: + errors["base"] = "no_username" + return (username, errors) + + class SensiboConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Sensibo integration.""" VERSION = 2 - entry: ConfigEntry | None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Sensibo.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -45,24 +61,13 @@ class SensiboConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: api_key = user_input[CONF_API_KEY] - try: - username = await async_validate_api(self.hass, api_key) - except AuthenticationError: - errors["base"] = "invalid_auth" - except ConnectionError: - errors["base"] = "cannot_connect" - except NoDevicesError: - errors["base"] = "no_devices" - except NoUsernameError: - errors["base"] = "no_username" - else: - assert self.entry is not None - - if username == self.entry.unique_id: + username, errors = await validate_api(self.hass, api_key) + if username: + reauth_entry = self._get_reauth_entry() + if username == reauth_entry.unique_id: return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, + reauth_entry, + data_updates={ CONF_API_KEY: api_key, }, ) @@ -74,6 +79,32 @@ class SensiboConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Reconfigure Sensibo.""" + errors: dict[str, str] = {} + + if user_input: + api_key = user_input[CONF_API_KEY] + username, errors = await validate_api(self.hass, api_key) + if username: + reconfigure_entry = self._get_reconfigure_entry() + if username == reconfigure_entry.unique_id: + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={ + CONF_API_KEY: api_key, + }, + ) + errors["base"] = "incorrect_api_key" + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,17 +114,8 @@ class SensiboConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: api_key = user_input[CONF_API_KEY] - try: - username = await async_validate_api(self.hass, api_key) - except AuthenticationError: - errors["base"] = "invalid_auth" - except ConnectionError: - errors["base"] = "cannot_connect" - except NoDevicesError: - errors["base"] = "no_devices" - except NoUsernameError: - errors["base"] = "no_username" - else: + username, errors = await validate_api(self.hass, api_key) + if username: await self.async_set_unique_id(username) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/sensibo/coordinator.py b/homeassistant/components/sensibo/coordinator.py index d654a7cb072..cfd40195de3 100644 --- a/homeassistant/components/sensibo/coordinator.py +++ b/homeassistant/components/sensibo/coordinator.py @@ -29,11 +29,12 @@ class SensiboDataUpdateCoordinator(DataUpdateCoordinator[SensiboData]): config_entry: SensiboConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, config_entry: SensiboConfigEntry) -> None: """Initialize the Sensibo coordinator.""" super().__init__( hass, LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), # We don't want an immediate refresh since the device diff --git a/homeassistant/components/sensibo/diagnostics.py b/homeassistant/components/sensibo/diagnostics.py index e08ad9f8b53..f781887ec0a 100644 --- a/homeassistant/components/sensibo/diagnostics.py +++ b/homeassistant/components/sensibo/diagnostics.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant from . import SensiboConfigEntry diff --git a/homeassistant/components/sensibo/icons.json b/homeassistant/components/sensibo/icons.json index e26840e48eb..ccab3c198d2 100644 --- a/homeassistant/components/sensibo/icons.json +++ b/homeassistant/components/sensibo/icons.json @@ -45,10 +45,20 @@ } }, "services": { - "assume_state": "mdi:shape-outline", - "enable_timer": "mdi:timer-play", - "enable_pure_boost": "mdi:air-filter", - "full_state": "mdi:shape", - "enable_climate_react": "mdi:wizard-hat" + "assume_state": { + "service": "mdi:shape-outline" + }, + "enable_timer": { + "service": "mdi:timer-play" + }, + "enable_pure_boost": { + "service": "mdi:air-filter" + }, + "full_state": { + "service": "mdi:shape" + }, + "enable_climate_react": { + "service": "mdi:wizard-hat" + } } } diff --git a/homeassistant/components/sensibo/manifest.json b/homeassistant/components/sensibo/manifest.json index 610695aaf7b..e6398c5076e 100644 --- a/homeassistant/components/sensibo/manifest.json +++ b/homeassistant/components/sensibo/manifest.json @@ -14,6 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["pysensibo"], - "quality_scale": "platinum", "requirements": ["pysensibo==1.1.0"] } diff --git a/homeassistant/components/sensibo/sensor.py b/homeassistant/components/sensibo/sensor.py index a6a70ea6c49..b395f8eb1ee 100644 --- a/homeassistant/components/sensibo/sensor.py +++ b/homeassistant/components/sensibo/sensor.py @@ -178,6 +178,7 @@ AIRQ_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = ( value_fn=lambda data: data.co2, extra_fn=None, ), + *DEVICE_SENSOR_TYPES, ) ELEMENT_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = ( diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index 60a32028017..302e34bb5aa 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -2,7 +2,8 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -27,6 +28,14 @@ "data_description": { "api_key": "[%key:component::sensibo::config::step::user::data_description::api_key%]" } + }, + "reconfigure": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "api_key": "[%key:component::sensibo::config::step::user::data_description::api_key%]" + } } } }, @@ -491,9 +500,6 @@ "no_target_temperature_in_features": { "message": "Current mode doesn't support setting target temperature" }, - "no_target_temperature": { - "message": "No target temperature provided" - }, "no_fan_level_in_features": { "message": "Current mode doesn't support setting fan level" }, diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index e7f4b00fd77..2933d779b4b 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -8,41 +8,14 @@ from contextlib import suppress from dataclasses import dataclass from datetime import UTC, date, datetime, timedelta from decimal import Decimal, InvalidOperation as DecimalInvalidOperation -from functools import cached_property, partial import logging from math import ceil, floor, isfinite, log10 from typing import Any, Final, Self, cast, final, override +from propcache import cached_property + from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( # noqa: F401 - _DEPRECATED_DEVICE_CLASS_AQI, - _DEPRECATED_DEVICE_CLASS_BATTERY, - _DEPRECATED_DEVICE_CLASS_CO, - _DEPRECATED_DEVICE_CLASS_CO2, - _DEPRECATED_DEVICE_CLASS_CURRENT, - _DEPRECATED_DEVICE_CLASS_DATE, - _DEPRECATED_DEVICE_CLASS_ENERGY, - _DEPRECATED_DEVICE_CLASS_FREQUENCY, - _DEPRECATED_DEVICE_CLASS_GAS, - _DEPRECATED_DEVICE_CLASS_HUMIDITY, - _DEPRECATED_DEVICE_CLASS_ILLUMINANCE, - _DEPRECATED_DEVICE_CLASS_MONETARY, - _DEPRECATED_DEVICE_CLASS_NITROGEN_DIOXIDE, - _DEPRECATED_DEVICE_CLASS_NITROGEN_MONOXIDE, - _DEPRECATED_DEVICE_CLASS_NITROUS_OXIDE, - _DEPRECATED_DEVICE_CLASS_OZONE, - _DEPRECATED_DEVICE_CLASS_PM1, - _DEPRECATED_DEVICE_CLASS_PM10, - _DEPRECATED_DEVICE_CLASS_PM25, - _DEPRECATED_DEVICE_CLASS_POWER, - _DEPRECATED_DEVICE_CLASS_POWER_FACTOR, - _DEPRECATED_DEVICE_CLASS_PRESSURE, - _DEPRECATED_DEVICE_CLASS_SIGNAL_STRENGTH, - _DEPRECATED_DEVICE_CLASS_SULPHUR_DIOXIDE, - _DEPRECATED_DEVICE_CLASS_TEMPERATURE, - _DEPRECATED_DEVICE_CLASS_TIMESTAMP, - _DEPRECATED_DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS, - _DEPRECATED_DEVICE_CLASS_VOLTAGE, ATTR_UNIT_OF_MEASUREMENT, CONF_UNIT_OF_MEASUREMENT, EntityCategory, @@ -51,11 +24,6 @@ from homeassistant.const import ( # noqa: F401 from homeassistant.core import HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_registry as er -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform @@ -63,11 +31,9 @@ from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity from homeassistant.helpers.typing import UNDEFINED, ConfigType, StateType, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum +from homeassistant.util.hass_dict import HassKey from .const import ( # noqa: F401 - _DEPRECATED_STATE_CLASS_MEASUREMENT, - _DEPRECATED_STATE_CLASS_TOTAL, - _DEPRECATED_STATE_CLASS_TOTAL_INCREASING, ATTR_LAST_RESET, ATTR_OPTIONS, ATTR_STATE_CLASS, @@ -88,6 +54,7 @@ from .websocket_api import async_setup as async_setup_ws_api _LOGGER: Final = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[SensorEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -115,7 +82,7 @@ __all__ = [ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for sensors.""" - component = hass.data[DOMAIN] = EntityComponent[SensorEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[SensorEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -126,14 +93,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[SensorEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[SensorEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class SensorEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -529,7 +494,20 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ): return self.hass.config.units.temperature_unit - # Fourth priority: Native unit + # Fourth priority: Unit translation + if (translation_key := self._unit_of_measurement_translation_key) and ( + unit_of_measurement + := self.platform.default_language_platform_translations.get(translation_key) + ): + if native_unit_of_measurement is not None: + raise ValueError( + f"Sensor {type(self)} from integration '{self.platform.platform_name}' " + f"has a translation key for unit_of_measurement '{unit_of_measurement}', " + f"but also has a native_unit_of_measurement '{native_unit_of_measurement}'" + ) + return unit_of_measurement + + # Lowest priority: Native unit return native_unit_of_measurement @final @@ -964,13 +942,3 @@ def async_rounded_state(hass: HomeAssistant, entity_id: str, state: State) -> st value = f"{numerical_value:z.{precision}f}" return value - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 8f63e346caf..2fb563051a9 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -3,7 +3,6 @@ from __future__ import annotations from enum import StrEnum -from functools import partial from typing import Final import voluptuous as vol @@ -17,6 +16,8 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -39,14 +40,10 @@ from homeassistant.const import ( UnitOfVolumeFlowRate, UnitOfVolumetricFlux, ) -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.util.unit_conversion import ( + AreaConverter, BaseUnitConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -115,6 +112,12 @@ class SensorDeviceClass(StrEnum): Unit of measurement: `None` """ + AREA = "area" + """Area + + Unit of measurement: `UnitOfArea` units + """ + ATMOSPHERIC_PRESSURE = "atmospheric_pressure" """Atmospheric pressure. @@ -127,6 +130,12 @@ class SensorDeviceClass(StrEnum): Unit of measurement: `%` """ + BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" + """Blood glucose concentration. + + Unit of measurement: `mg/dL`, `mmol/L` + """ + CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -182,7 +191,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring energy consumption, for example electric energy consumption. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -191,7 +200,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ FREQUENCY = "frequency" @@ -299,7 +308,7 @@ class SensorDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW` + Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` """ PRECIPITATION = "precipitation" @@ -350,8 +359,8 @@ class SensorDeviceClass(StrEnum): """Generic speed. Unit of measurement: `SPEED_*` units or `UnitOfVolumetricFlux` - - SI /metric: `mm/d`, `mm/h`, `m/s`, `km/h` - - USCS / imperial: `in/d`, `in/h`, `ft/s`, `mph` + - SI /metric: `mm/d`, `mm/h`, `m/s`, `km/h`, `mm/s` + - USCS / imperial: `in/d`, `in/h`, `in/s`, `ft/s`, `mph` - Nautical: `kn` - Beaufort: `Beaufort` """ @@ -383,7 +392,7 @@ class SensorDeviceClass(StrEnum): VOLTAGE = "voltage" """Voltage. - Unit of measurement: `V`, `mV` + Unit of measurement: `V`, `mV`, `µV` """ VOLUME = "volume" @@ -411,7 +420,7 @@ class SensorDeviceClass(StrEnum): """Generic flow rate Unit of measurement: UnitOfVolumeFlowRate - - SI / metric: `m³/h`, `L/min` + - SI / metric: `m³/h`, `L/min`, `mL/s` - USCS / imperial: `ft³/min`, `gal/min` """ @@ -478,21 +487,12 @@ class SensorStateClass(StrEnum): STATE_CLASSES_SCHEMA: Final = vol.All(vol.Lower, vol.Coerce(SensorStateClass)) -# STATE_CLASS* is deprecated as of 2021.12 -# use the SensorStateClass enum instead. -_DEPRECATED_STATE_CLASS_MEASUREMENT: Final = DeprecatedConstantEnum( - SensorStateClass.MEASUREMENT, "2025.1" -) -_DEPRECATED_STATE_CLASS_TOTAL: Final = DeprecatedConstantEnum( - SensorStateClass.TOTAL, "2025.1" -) -_DEPRECATED_STATE_CLASS_TOTAL_INCREASING: Final = DeprecatedConstantEnum( - SensorStateClass.TOTAL_INCREASING, "2025.1" -) STATE_CLASSES: Final[list[str]] = [cls.value for cls in SensorStateClass] UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = { + SensorDeviceClass.AREA: AreaConverter, SensorDeviceClass.ATMOSPHERIC_PRESSURE: PressureConverter, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: BloodGlucoseConcentrationConverter, SensorDeviceClass.CONDUCTIVITY: ConductivityConverter, SensorDeviceClass.CURRENT: ElectricCurrentConverter, SensorDeviceClass.DATA_RATE: DataRateConverter, @@ -522,8 +522,10 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.APPARENT_POWER: set(UnitOfApparentPower), SensorDeviceClass.AQI: {None}, + SensorDeviceClass.AREA: set(UnitOfArea), SensorDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), SensorDeviceClass.BATTERY: {PERCENTAGE}, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), SensorDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), @@ -559,7 +561,13 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, - SensorDeviceClass.POWER: {UnitOfPower.WATT, UnitOfPower.KILO_WATT}, + SensorDeviceClass.POWER: { + UnitOfPower.WATT, + UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, + }, SensorDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), SensorDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), SensorDeviceClass.PRESSURE: set(UnitOfPressure), @@ -597,8 +605,10 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { SensorDeviceClass.APPARENT_POWER: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.AQI: {SensorStateClass.MEASUREMENT}, + SensorDeviceClass.AREA: set(SensorStateClass), SensorDeviceClass.ATMOSPHERIC_PRESSURE: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.BATTERY: {SensorStateClass.MEASUREMENT}, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO2: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CONDUCTIVITY: {SensorStateClass.MEASUREMENT}, @@ -661,10 +671,3 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { }, SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT}, } - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/sensor/device_condition.py b/homeassistant/components/sensor/device_condition.py index 21258db2ac5..fc25dce18fc 100644 --- a/homeassistant/components/sensor/device_condition.py +++ b/homeassistant/components/sensor/device_condition.py @@ -5,10 +5,8 @@ from __future__ import annotations import voluptuous as vol from homeassistant.components.device_automation import ( - async_get_entity_registry_entry_or_raise, -) -from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, + async_get_entity_registry_entry_or_raise, ) from homeassistant.const import ( CONF_ABOVE, @@ -37,8 +35,10 @@ DEVICE_CLASS_NONE = "none" CONF_IS_APPARENT_POWER = "is_apparent_power" CONF_IS_AQI = "is_aqi" +CONF_IS_AREA = "is_area" CONF_IS_ATMOSPHERIC_PRESSURE = "is_atmospheric_pressure" CONF_IS_BATTERY_LEVEL = "is_battery_level" +CONF_IS_BLOOD_GLUCOSE_CONCENTRATION = "is_blood_glucose_concentration" CONF_IS_CO = "is_carbon_monoxide" CONF_IS_CO2 = "is_carbon_dioxide" CONF_IS_CONDUCTIVITY = "is_conductivity" @@ -87,8 +87,12 @@ CONF_IS_WIND_SPEED = "is_wind_speed" ENTITY_CONDITIONS = { SensorDeviceClass.APPARENT_POWER: [{CONF_TYPE: CONF_IS_APPARENT_POWER}], SensorDeviceClass.AQI: [{CONF_TYPE: CONF_IS_AQI}], + SensorDeviceClass.AREA: [{CONF_TYPE: CONF_IS_AREA}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_IS_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_IS_BATTERY_LEVEL}], + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ + {CONF_TYPE: CONF_IS_BLOOD_GLUCOSE_CONCENTRATION} + ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_IS_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_IS_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_IS_CONDUCTIVITY}], @@ -151,8 +155,10 @@ CONDITION_SCHEMA = vol.All( [ CONF_IS_APPARENT_POWER, CONF_IS_AQI, + CONF_IS_AREA, CONF_IS_ATMOSPHERIC_PRESSURE, CONF_IS_BATTERY_LEVEL, + CONF_IS_BLOOD_GLUCOSE_CONCENTRATION, CONF_IS_CO, CONF_IS_CO2, CONF_IS_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/device_trigger.py b/homeassistant/components/sensor/device_trigger.py index 0ffc42127bc..d75b3aa6e41 100644 --- a/homeassistant/components/sensor/device_trigger.py +++ b/homeassistant/components/sensor/device_trigger.py @@ -4,10 +4,8 @@ import voluptuous as vol from homeassistant.components.device_automation import ( DEVICE_TRIGGER_BASE_SCHEMA, - async_get_entity_registry_entry_or_raise, -) -from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, + async_get_entity_registry_entry_or_raise, ) from homeassistant.components.homeassistant.triggers import ( numeric_state as numeric_state_trigger, @@ -36,8 +34,10 @@ DEVICE_CLASS_NONE = "none" CONF_APPARENT_POWER = "apparent_power" CONF_AQI = "aqi" +CONF_AREA = "area" CONF_ATMOSPHERIC_PRESSURE = "atmospheric_pressure" CONF_BATTERY_LEVEL = "battery_level" +CONF_BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" CONF_CO = "carbon_monoxide" CONF_CO2 = "carbon_dioxide" CONF_CONDUCTIVITY = "conductivity" @@ -86,8 +86,12 @@ CONF_WIND_SPEED = "wind_speed" ENTITY_TRIGGERS = { SensorDeviceClass.APPARENT_POWER: [{CONF_TYPE: CONF_APPARENT_POWER}], SensorDeviceClass.AQI: [{CONF_TYPE: CONF_AQI}], + SensorDeviceClass.AREA: [{CONF_TYPE: CONF_AREA}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_BATTERY_LEVEL}], + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ + {CONF_TYPE: CONF_BLOOD_GLUCOSE_CONCENTRATION} + ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_CONDUCTIVITY}], @@ -151,8 +155,10 @@ TRIGGER_SCHEMA = vol.All( [ CONF_APPARENT_POWER, CONF_AQI, + CONF_AREA, CONF_ATMOSPHERIC_PRESSURE, CONF_BATTERY_LEVEL, + CONF_BLOOD_GLUCOSE_CONCENTRATION, CONF_CO, CONF_CO2, CONF_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/icons.json b/homeassistant/components/sensor/icons.json index 6132fcbc1e9..5f770765ee3 100644 --- a/homeassistant/components/sensor/icons.json +++ b/homeassistant/components/sensor/icons.json @@ -9,9 +9,15 @@ "aqi": { "default": "mdi:air-filter" }, + "area": { + "default": "mdi:texture-box" + }, "atmospheric_pressure": { "default": "mdi:thermometer-lines" }, + "blood_glucose_concentration": { + "default": "mdi:spoon-sugar" + }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index be4dce28546..675d24b9240 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Callable, Iterable +from contextlib import suppress import datetime import itertools import logging @@ -30,12 +31,16 @@ from homeassistant.const import ( UnitOfSoundPressure, UnitOfVolume, ) -from homeassistant.core import HomeAssistant, State, split_entity_id +from homeassistant.core import HomeAssistant, State, callback, split_entity_id from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity import entity_sources +from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.loader import async_suggest_report_issue from homeassistant.util import dt as dt_util +from homeassistant.util.async_ import run_callback_threadsafe from homeassistant.util.enum import try_parse_enum +from homeassistant.util.hass_dict import HassKey from .const import ( ATTR_LAST_RESET, @@ -62,14 +67,15 @@ EQUIVALENT_UNITS = { "ft³/m": UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, } + # Keep track of entities for which a warning about decreasing value has been logged -SEEN_DIP = "sensor_seen_total_increasing_dip" -WARN_DIP = "sensor_warn_total_increasing_dip" +SEEN_DIP: HassKey[set[str]] = HassKey(f"{DOMAIN}_seen_total_increasing_dip") +WARN_DIP: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_dip") # Keep track of entities for which a warning about negative value has been logged -WARN_NEGATIVE = "sensor_warn_total_increasing_negative" +WARN_NEGATIVE: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_negative") # Keep track of entities for which a warning about unsupported unit has been logged -WARN_UNSUPPORTED_UNIT = "sensor_warn_unsupported_unit" -WARN_UNSTABLE_UNIT = "sensor_warn_unstable_unit" +WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit") +WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit") # Link to dev statistics where issues around LTS can be fixed LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics" @@ -174,6 +180,14 @@ def _entity_history_to_float_and_state( return float_states +def _is_numeric(state: State) -> bool: + """Return if the state is numeric.""" + with suppress(ValueError, TypeError): + if (num_state := float(state.state)) is not None and math.isfinite(num_state): + return True + return False + + def _normalize_states( hass: HomeAssistant, old_metadatas: dict[str, tuple[int, StatisticMetaData]], @@ -220,13 +234,13 @@ def _normalize_states( LINK_DEV_STATISTICS, ) return None, [] - state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) + return state_unit, fstates converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit] valid_fstates: list[tuple[float, State]] = [] convert: Callable[[float], float] | None = None - last_unit: str | None | object = object() + last_unit: str | None | UndefinedType = UNDEFINED valid_units = converter.VALID_UNITS for fstate, state in fstates: @@ -669,6 +683,118 @@ def list_statistic_ids( return result +@callback +def _update_issues( + report_issue: Callable[[str, str, dict[str, Any]], None], + sensor_states: list[State], + metadatas: dict[str, tuple[int, StatisticMetaData]], +) -> None: + """Update repair issues.""" + for state in sensor_states: + entity_id = state.entity_id + numeric = _is_numeric(state) + state_class = try_parse_enum( + SensorStateClass, state.attributes.get(ATTR_STATE_CLASS) + ) + state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + + if metadata := metadatas.get(entity_id): + if numeric and state_class is None: + # Sensor no longer has a valid state class + report_issue( + "state_class_removed", + entity_id, + {"statistic_id": entity_id}, + ) + + metadata_unit = metadata[1]["unit_of_measurement"] + converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(metadata_unit) + if not converter: + if numeric and not _equivalent_units({state_unit, metadata_unit}): + # The unit has changed, and it's not possible to convert + report_issue( + "units_changed", + entity_id, + { + "statistic_id": entity_id, + "state_unit": state_unit, + "metadata_unit": metadata_unit, + "supported_unit": metadata_unit, + }, + ) + elif numeric and state_unit not in converter.VALID_UNITS: + # The state unit can't be converted to the unit in metadata + valid_units = (unit or "" for unit in converter.VALID_UNITS) + valid_units_str = ", ".join(sorted(valid_units)) + report_issue( + "units_changed", + entity_id, + { + "statistic_id": entity_id, + "state_unit": state_unit, + "metadata_unit": metadata_unit, + "supported_unit": valid_units_str, + }, + ) + + +def update_statistics_issues( + hass: HomeAssistant, + session: Session, +) -> None: + """Validate statistics.""" + instance = get_instance(hass) + sensor_states = hass.states.all(DOMAIN) + metadatas = statistics.get_metadata_with_session( + instance, session, statistic_source=RECORDER_DOMAIN + ) + + @callback + def get_sensor_statistics_issues(hass: HomeAssistant) -> set[str]: + """Return a list of statistics issues.""" + issues = set() + issue_registry = ir.async_get(hass) + for issue in issue_registry.issues.values(): + if ( + issue.domain != DOMAIN + or not (issue_data := issue.data) + or issue_data.get("issue_type") + not in ("state_class_removed", "units_changed") + ): + continue + issues.add(issue.issue_id) + return issues + + issues = run_callback_threadsafe( + hass.loop, get_sensor_statistics_issues, hass + ).result() + + def create_issue_registry_issue( + issue_type: str, statistic_id: str, data: dict[str, Any] + ) -> None: + """Create an issue registry issue.""" + issue_id = f"{issue_type}_{statistic_id}" + issues.discard(issue_id) + ir.create_issue( + hass, + DOMAIN, + issue_id, + data=data | {"issue_type": issue_type}, + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key=issue_type, + translation_placeholders=data, + ) + + _update_issues( + create_issue_registry_issue, + sensor_states, + metadatas, + ) + for issue_id in issues: + hass.loop.call_soon_threadsafe(ir.async_delete_issue, hass, DOMAIN, issue_id) + + def validate_statistics( hass: HomeAssistant, ) -> dict[str, list[statistics.ValidationIssue]]: @@ -682,14 +808,27 @@ def validate_statistics( instance = get_instance(hass) entity_filter = instance.entity_filter + def create_statistic_validation_issue( + issue_type: str, statistic_id: str, data: dict[str, Any] + ) -> None: + """Create a statistic validation issue.""" + validation_result[statistic_id].append( + statistics.ValidationIssue(issue_type, data) + ) + + _update_issues( + create_statistic_validation_issue, + sensor_states, + metadatas, + ) + for state in sensor_states: entity_id = state.entity_id state_class = try_parse_enum( SensorStateClass, state.attributes.get(ATTR_STATE_CLASS) ) - state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - if metadata := metadatas.get(entity_id): + if entity_id in metadatas: if entity_filter and not entity_filter(state.entity_id): # Sensor was previously recorded, but no longer is validation_result[entity_id].append( @@ -698,47 +837,6 @@ def validate_statistics( {"statistic_id": entity_id}, ) ) - - if state_class is None: - # Sensor no longer has a valid state class - validation_result[entity_id].append( - statistics.ValidationIssue( - "unsupported_state_class", - {"statistic_id": entity_id, "state_class": state_class}, - ) - ) - - metadata_unit = metadata[1]["unit_of_measurement"] - converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(metadata_unit) - if not converter: - if not _equivalent_units({state_unit, metadata_unit}): - # The unit has changed, and it's not possible to convert - validation_result[entity_id].append( - statistics.ValidationIssue( - "units_changed", - { - "statistic_id": entity_id, - "state_unit": state_unit, - "metadata_unit": metadata_unit, - "supported_unit": metadata_unit, - }, - ) - ) - elif state_unit not in converter.VALID_UNITS: - # The state unit can't be converted to the unit in metadata - valid_units = (unit or "" for unit in converter.VALID_UNITS) - valid_units_str = ", ".join(sorted(valid_units)) - validation_result[entity_id].append( - statistics.ValidationIssue( - "units_changed", - { - "statistic_id": entity_id, - "state_unit": state_unit, - "metadata_unit": metadata_unit, - "supported_unit": valid_units_str, - }, - ) - ) elif state_class is not None: if entity_filter and not entity_filter(state.entity_id): # Sensor is not recorded diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index fc85f4b05a9..0bc370398b5 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -4,8 +4,10 @@ "condition_type": { "is_apparent_power": "Current {entity_name} apparent power", "is_aqi": "Current {entity_name} air quality index", + "is_area": "Current {entity_name} area", "is_atmospheric_pressure": "Current {entity_name} atmospheric pressure", "is_battery_level": "Current {entity_name} battery level", + "is_blood_glucose_concentration": "Current {entity_name} blood glucose concentration", "is_carbon_monoxide": "Current {entity_name} carbon monoxide concentration level", "is_carbon_dioxide": "Current {entity_name} carbon dioxide concentration level", "is_conductivity": "Current {entity_name} conductivity", @@ -54,8 +56,10 @@ "trigger_type": { "apparent_power": "{entity_name} apparent power changes", "aqi": "{entity_name} air quality index changes", + "area": "{entity_name} area changes", "atmospheric_pressure": "{entity_name} atmospheric pressure changes", "battery_level": "{entity_name} battery level changes", + "blood_glucose_concentration": "{entity_name} blood glucose concentration changes", "carbon_monoxide": "{entity_name} carbon monoxide concentration changes", "carbon_dioxide": "{entity_name} carbon dioxide concentration changes", "conductivity": "{entity_name} conductivity changes", @@ -143,12 +147,18 @@ "aqi": { "name": "Air quality index" }, + "area": { + "name": "Area" + }, "atmospheric_pressure": { "name": "Atmospheric pressure" }, "battery": { "name": "Battery" }, + "blood_glucose_concentration": { + "name": "Blood glucose concentration" + }, "carbon_monoxide": { "name": "Carbon monoxide" }, @@ -287,5 +297,15 @@ "wind_speed": { "name": "Wind speed" } + }, + "issues": { + "state_class_removed": { + "title": "{statistic_id} no longer has a state class", + "description": "" + }, + "units_changed": { + "title": "The unit of {statistic_id} has changed", + "description": "" + } } } diff --git a/homeassistant/components/sensor/websocket_api.py b/homeassistant/components/sensor/websocket_api.py index 2110ccc7253..92df6fa69e9 100644 --- a/homeassistant/components/sensor/websocket_api.py +++ b/homeassistant/components/sensor/websocket_api.py @@ -16,6 +16,8 @@ from .const import ( SensorDeviceClass, ) +_NUMERIC_DEVICE_CLASSES = list(set(SensorDeviceClass) - NON_NUMERIC_DEVICE_CLASSES) + @callback def async_setup(hass: HomeAssistant) -> None: @@ -55,7 +57,6 @@ def ws_numeric_device_classes( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Return numeric sensor device classes.""" - numeric_device_classes = set(SensorDeviceClass) - NON_NUMERIC_DEVICE_CLASSES connection.send_result( - msg["id"], {"numeric_device_classes": list(numeric_device_classes)} + msg["id"], {"numeric_device_classes": _NUMERIC_DEVICE_CLASSES} ) diff --git a/homeassistant/components/sensorpush/manifest.json b/homeassistant/components/sensorpush/manifest.json index 0222a1c2884..7729a67d7a1 100644 --- a/homeassistant/components/sensorpush/manifest.json +++ b/homeassistant/components/sensorpush/manifest.json @@ -17,5 +17,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/sensorpush", "iot_class": "local_push", - "requirements": ["sensorpush-ble==1.6.2"] + "requirements": ["sensorpush-ble==1.7.1"] } diff --git a/homeassistant/components/sensoterra/__init__.py b/homeassistant/components/sensoterra/__init__.py new file mode 100644 index 00000000000..b1428351f09 --- /dev/null +++ b/homeassistant/components/sensoterra/__init__.py @@ -0,0 +1,38 @@ +"""The Sensoterra integration.""" + +from __future__ import annotations + +from sensoterra.customerapi import CustomerApi + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_TOKEN, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SensoterraCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type SensoterraConfigEntry = ConfigEntry[SensoterraCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: SensoterraConfigEntry) -> bool: + """Set up Sensoterra platform based on a configuration entry.""" + + # Create a coordinator and add an API instance to it. Store the coordinator + # in the configuration entry. + api = CustomerApi() + api.set_language(hass.config.language) + api.set_token(entry.data[CONF_TOKEN]) + + coordinator = SensoterraCoordinator(hass, api) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SensoterraConfigEntry) -> bool: + """Unload the configuration entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sensoterra/config_flow.py b/homeassistant/components/sensoterra/config_flow.py new file mode 100644 index 00000000000..c98710dfa7d --- /dev/null +++ b/homeassistant/components/sensoterra/config_flow.py @@ -0,0 +1,90 @@ +"""Config flow for Sensoterra integration.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any + +from jwt import DecodeError, decode +from sensoterra.customerapi import ( + CustomerApi, + InvalidAuth as StInvalidAuth, + Timeout as StTimeout, +) +import voluptuous as vol + +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN, LOGGER, TOKEN_EXPIRATION_DAYS + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig(type=TextSelectorType.EMAIL, autocomplete="email") + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } +) + + +class SensoterraConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Sensoterra.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Create hub entry based on config flow.""" + errors: dict[str, str] = {} + + if user_input is not None: + api = CustomerApi(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) + # We need a unique tag per HA instance + uuid = self.hass.data["core.uuid"] + expiration = datetime.now() + timedelta(TOKEN_EXPIRATION_DAYS) + + try: + token: str = await api.get_token( + f"Home Assistant {uuid}", "READONLY", expiration + ) + decoded_token = decode( + token, algorithms=["HS256"], options={"verify_signature": False} + ) + + except StInvalidAuth as exp: + LOGGER.error( + "Login attempt with %s: %s", user_input[CONF_EMAIL], exp.message + ) + errors["base"] = "invalid_auth" + except StTimeout: + LOGGER.error("Login attempt with %s: time out", user_input[CONF_EMAIL]) + errors["base"] = "cannot_connect" + except DecodeError: + LOGGER.error("Login attempt with %s: bad token", user_input[CONF_EMAIL]) + errors["base"] = "invalid_access_token" + else: + device_unique_id = decoded_token["sub"] + await self.async_set_unique_id(device_unique_id) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_TOKEN: token, + CONF_EMAIL: user_input[CONF_EMAIL], + }, + ) + + return self.async_show_form( + step_id=SOURCE_USER, + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + ) diff --git a/homeassistant/components/sensoterra/const.py b/homeassistant/components/sensoterra/const.py new file mode 100644 index 00000000000..7c4ccf2944c --- /dev/null +++ b/homeassistant/components/sensoterra/const.py @@ -0,0 +1,10 @@ +"""Constants for the Sensoterra integration.""" + +import logging + +DOMAIN = "sensoterra" +SCAN_INTERVAL_MINUTES = 15 +SENSOR_EXPIRATION_DAYS = 2 +TOKEN_EXPIRATION_DAYS = 10 * 365 +CONFIGURATION_URL = "https://monitor.sensoterra.com" +LOGGER: logging.Logger = logging.getLogger(__package__) diff --git a/homeassistant/components/sensoterra/coordinator.py b/homeassistant/components/sensoterra/coordinator.py new file mode 100644 index 00000000000..2dffdceb443 --- /dev/null +++ b/homeassistant/components/sensoterra/coordinator.py @@ -0,0 +1,54 @@ +"""Polling coordinator for the Sensoterra integration.""" + +from collections.abc import Callable +from datetime import timedelta + +from sensoterra.customerapi import ( + CustomerApi, + InvalidAuth as ApiAuthError, + Timeout as ApiTimeout, +) +from sensoterra.probe import Probe, Sensor + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER, SCAN_INTERVAL_MINUTES + + +class SensoterraCoordinator(DataUpdateCoordinator[list[Probe]]): + """Sensoterra coordinator.""" + + def __init__(self, hass: HomeAssistant, api: CustomerApi) -> None: + """Initialize Sensoterra coordinator.""" + super().__init__( + hass, + LOGGER, + name="Sensoterra probe", + update_interval=timedelta(minutes=SCAN_INTERVAL_MINUTES), + ) + self.api = api + self.add_devices_callback: Callable[[list[Probe]], None] | None = None + + async def _async_update_data(self) -> list[Probe]: + """Fetch data from Sensoterra Customer API endpoint.""" + try: + probes = await self.api.poll() + except ApiAuthError as err: + raise ConfigEntryError(err) from err + except ApiTimeout as err: + raise UpdateFailed("Timeout communicating with Sensotera API") from err + + if self.add_devices_callback is not None: + self.add_devices_callback(probes) + + return probes + + def get_sensor(self, id: str | None) -> Sensor | None: + """Try to find the sensor in the API result.""" + for probe in self.data: + for sensor in probe.sensors(): + if sensor.id == id: + return sensor + return None diff --git a/homeassistant/components/sensoterra/manifest.json b/homeassistant/components/sensoterra/manifest.json new file mode 100644 index 00000000000..942741fdb2f --- /dev/null +++ b/homeassistant/components/sensoterra/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "sensoterra", + "name": "Sensoterra", + "codeowners": ["@markruys"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/sensoterra", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["sensoterra==2.0.1"] +} diff --git a/homeassistant/components/sensoterra/sensor.py b/homeassistant/components/sensoterra/sensor.py new file mode 100644 index 00000000000..7e9f4d0840e --- /dev/null +++ b/homeassistant/components/sensoterra/sensor.py @@ -0,0 +1,172 @@ +"""Sensoterra devices.""" + +from __future__ import annotations + +from datetime import UTC, datetime, timedelta +from enum import StrEnum, auto + +from sensoterra.probe import Probe, Sensor + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import SensoterraConfigEntry +from .const import CONFIGURATION_URL, DOMAIN, SENSOR_EXPIRATION_DAYS +from .coordinator import SensoterraCoordinator + + +class ProbeSensorType(StrEnum): + """Generic sensors within a Sensoterra probe.""" + + MOISTURE = auto() + SI = auto() + TEMPERATURE = auto() + BATTERY = auto() + RSSI = auto() + + +SENSORS: dict[ProbeSensorType, SensorEntityDescription] = { + ProbeSensorType.MOISTURE: SensorEntityDescription( + key=ProbeSensorType.MOISTURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.MOISTURE, + native_unit_of_measurement=PERCENTAGE, + translation_key="soil_moisture_at_cm", + ), + ProbeSensorType.SI: SensorEntityDescription( + key=ProbeSensorType.SI, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + translation_key="si_at_cm", + ), + ProbeSensorType.TEMPERATURE: SensorEntityDescription( + key=ProbeSensorType.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ProbeSensorType.BATTERY: SensorEntityDescription( + key=ProbeSensorType.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.BATTERY, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + ), + ProbeSensorType.RSSI: SensorEntityDescription( + key=ProbeSensorType.RSSI, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SensoterraConfigEntry, + async_add_devices: AddEntitiesCallback, +) -> None: + """Set up Sensoterra sensor.""" + + coordinator = entry.runtime_data + + @callback + def _async_add_devices(probes: list[Probe]) -> None: + aha = coordinator.async_contexts() + current_sensors = set(aha) + async_add_devices( + SensoterraEntity( + coordinator, + probe, + sensor, + SENSORS[ProbeSensorType[sensor.type]], + ) + for probe in probes + for sensor in probe.sensors() + if sensor.type is not None + and sensor.type.lower() in SENSORS + and sensor.id not in current_sensors + ) + + coordinator.add_devices_callback = _async_add_devices + + _async_add_devices(coordinator.data) + + +class SensoterraEntity(CoordinatorEntity[SensoterraCoordinator], SensorEntity): + """Sensoterra sensor like a soil moisture or temperature sensor.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SensoterraCoordinator, + probe: Probe, + sensor: Sensor, + entity_description: SensorEntityDescription, + ) -> None: + """Initialize entity.""" + super().__init__(coordinator, context=sensor.id) + + self._sensor_id = sensor.id + self._attr_unique_id = self._sensor_id + self._attr_translation_placeholders = { + "depth": "?" if sensor.depth is None else str(sensor.depth) + } + + self.entity_description = entity_description + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, probe.serial)}, + name=probe.name, + model=probe.sku, + manufacturer="Sensoterra", + serial_number=probe.serial, + suggested_area=probe.location, + configuration_url=CONFIGURATION_URL, + ) + + @property + def sensor(self) -> Sensor | None: + """Return the sensor, or None if it doesn't exist.""" + return self.coordinator.get_sensor(self._sensor_id) + + @property + def native_value(self) -> StateType: + """Return the value reported by the sensor.""" + assert self.sensor + return self.sensor.value + + @property + def available(self) -> bool: + """Return True if entity is available.""" + if not super().available or (sensor := self.sensor) is None: + return False + + if sensor.timestamp is None: + return False + + # Expire sensor if no update within the last few days. + expiration = datetime.now(UTC) - timedelta(days=SENSOR_EXPIRATION_DAYS) + return sensor.timestamp >= expiration diff --git a/homeassistant/components/sensoterra/strings.json b/homeassistant/components/sensoterra/strings.json new file mode 100644 index 00000000000..86c4f2c2912 --- /dev/null +++ b/homeassistant/components/sensoterra/strings.json @@ -0,0 +1,38 @@ +{ + "config": { + "step": { + "user": { + "description": "Enter credentials to obtain a token", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "reconfigure": { + "description": "[%key:component::sensoterra::config::step::user::description%]", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + } + }, + "entity": { + "sensor": { + "soil_moisture_at_cm": { + "name": "Soil moisture @ {depth} cm" + }, + "si_at_cm": { + "name": "SI @ {depth} cm" + } + } + } +} diff --git a/homeassistant/components/sentry/config_flow.py b/homeassistant/components/sentry/config_flow.py index 59cd1f3f0e9..2fead7c27cd 100644 --- a/homeassistant/components/sentry/config_flow.py +++ b/homeassistant/components/sentry/config_flow.py @@ -49,7 +49,7 @@ class SentryConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SentryOptionsFlow: """Get the options flow for this handler.""" - return SentryOptionsFlow(config_entry) + return SentryOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -78,10 +78,6 @@ class SentryConfigFlow(ConfigFlow, domain=DOMAIN): class SentryOptionsFlow(OptionsFlow): """Handle Sentry options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Sentry options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/senz/__init__.py b/homeassistant/components/senz/__init__.py index bd4dfae4571..c3238f7355f 100644 --- a/homeassistant/components/senz/__init__.py +++ b/homeassistant/components/senz/__init__.py @@ -60,6 +60,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: SENZDataUpdateCoordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=account.username, update_interval=UPDATE_INTERVAL, update_method=update_thermostats, diff --git a/homeassistant/components/senz/climate.py b/homeassistant/components/senz/climate.py index 3b834654ca6..d5749a3f040 100644 --- a/homeassistant/components/senz/climate.py +++ b/homeassistant/components/senz/climate.py @@ -46,7 +46,6 @@ class SENZClimate(CoordinatorEntity, ClimateEntity): _attr_min_temp = 5 _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/serial/sensor.py b/homeassistant/components/serial/sensor.py index e7c39d97f6a..a09401473b2 100644 --- a/homeassistant/components/serial/sensor.py +++ b/homeassistant/components/serial/sensor.py @@ -196,7 +196,7 @@ class SerialSensor(SensorEntity): logged_error = True await self._handle_error() else: - _LOGGER.info("Serial device %s connected", device) + _LOGGER.debug("Serial device %s connected", device) while True: try: line = await reader.readline() diff --git a/homeassistant/components/serial_pm/manifest.json b/homeassistant/components/serial_pm/manifest.json index 9b61cb3d20b..25b3e61f93d 100644 --- a/homeassistant/components/serial_pm/manifest.json +++ b/homeassistant/components/serial_pm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/serial_pm", "iot_class": "local_polling", "loggers": ["pmsensor"], + "quality_scale": "legacy", "requirements": ["pmsensor==0.4"] } diff --git a/homeassistant/components/sesame/manifest.json b/homeassistant/components/sesame/manifest.json index d2204629cde..7ed370db082 100644 --- a/homeassistant/components/sesame/manifest.json +++ b/homeassistant/components/sesame/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sesame", "iot_class": "cloud_polling", "loggers": ["pysesame2"], + "quality_scale": "legacy", "requirements": ["pysesame2==1.0.1"] } diff --git a/homeassistant/components/seven_segments/image_processing.py b/homeassistant/components/seven_segments/image_processing.py index 7b41a1702c0..63fd27e0dd0 100644 --- a/homeassistant/components/seven_segments/image_processing.py +++ b/homeassistant/components/seven_segments/image_processing.py @@ -82,7 +82,7 @@ class ImageProcessingSsocr(ImageProcessingEntity): self.filepath = os.path.join( self.hass.config.config_dir, - "ssocr-{}.png".format(self._name.replace(" ", "_")), + f"ssocr-{self._name.replace(' ', '_')}.png", ) crop = [ "crop", diff --git a/homeassistant/components/seven_segments/manifest.json b/homeassistant/components/seven_segments/manifest.json index 2f39644d6d3..bf98140a4d6 100644 --- a/homeassistant/components/seven_segments/manifest.json +++ b/homeassistant/components/seven_segments/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/seven_segments", "iot_class": "local_polling", - "requirements": ["Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/seventeentrack/__init__.py b/homeassistant/components/seventeentrack/__init__.py index 56d87b1935d..695ca179966 100644 --- a/homeassistant/components/seventeentrack/__init__.py +++ b/homeassistant/components/seventeentrack/__init__.py @@ -1,136 +1,30 @@ """The seventeentrack component.""" -from typing import Final - from pyseventeentrack import Client as SeventeenTrackClient from pyseventeentrack.errors import SeventeenTrackError -from pyseventeentrack.package import PACKAGE_STATUS_MAP -import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState -from homeassistant.const import ( - ATTR_FRIENDLY_NAME, - ATTR_LOCATION, - CONF_PASSWORD, - CONF_USERNAME, - Platform, -) -from homeassistant.core import ( - HomeAssistant, - ServiceCall, - ServiceResponse, - SupportsResponse, -) -from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError -from homeassistant.helpers import config_validation as cv, selector +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType -from homeassistant.util import slugify -from .const import ( - ATTR_CONFIG_ENTRY_ID, - ATTR_DESTINATION_COUNTRY, - ATTR_INFO_TEXT, - ATTR_ORIGIN_COUNTRY, - ATTR_PACKAGE_STATE, - ATTR_PACKAGE_TYPE, - ATTR_STATUS, - ATTR_TIMESTAMP, - ATTR_TRACKING_INFO_LANGUAGE, - ATTR_TRACKING_NUMBER, - DOMAIN, - SERVICE_GET_PACKAGES, -) +from .const import DOMAIN from .coordinator import SeventeenTrackCoordinator +from .services import setup_services PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -SERVICE_SCHEMA: Final = vol.Schema( - { - vol.Required(ATTR_CONFIG_ENTRY_ID): selector.ConfigEntrySelector( - { - "integration": DOMAIN, - } - ), - vol.Optional(ATTR_PACKAGE_STATE): selector.SelectSelector( - selector.SelectSelectorConfig( - multiple=True, - options=[ - value.lower().replace(" ", "_") - for value in PACKAGE_STATUS_MAP.values() - ], - mode=selector.SelectSelectorMode.DROPDOWN, - translation_key=ATTR_PACKAGE_STATE, - ) - ), - } -) - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the 17Track component.""" - async def get_packages(call: ServiceCall) -> ServiceResponse: - """Get packages from 17Track.""" - config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] - package_states = call.data.get(ATTR_PACKAGE_STATE, []) + setup_services(hass) - entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id) - - if not entry: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_config_entry", - translation_placeholders={ - "config_entry_id": config_entry_id, - }, - ) - if entry.state != ConfigEntryState.LOADED: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="unloaded_config_entry", - translation_placeholders={ - "config_entry_id": entry.title, - }, - ) - - seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ - config_entry_id - ] - live_packages = sorted( - await seventeen_coordinator.client.profile.packages( - show_archived=seventeen_coordinator.show_archived - ) - ) - - return { - "packages": [ - { - ATTR_DESTINATION_COUNTRY: package.destination_country, - ATTR_ORIGIN_COUNTRY: package.origin_country, - ATTR_PACKAGE_TYPE: package.package_type, - ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, - ATTR_TRACKING_NUMBER: package.tracking_number, - ATTR_LOCATION: package.location, - ATTR_STATUS: package.status, - ATTR_TIMESTAMP: package.timestamp, - ATTR_INFO_TEXT: package.info_text, - ATTR_FRIENDLY_NAME: package.friendly_name, - } - for package in live_packages - if slugify(package.status) in package_states or package_states == [] - ] - } - - hass.services.async_register( - DOMAIN, - SERVICE_GET_PACKAGES, - get_packages, - schema=SERVICE_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) return True diff --git a/homeassistant/components/seventeentrack/config_flow.py b/homeassistant/components/seventeentrack/config_flow.py index 4433a73cd51..f4f3b3e82ae 100644 --- a/homeassistant/components/seventeentrack/config_flow.py +++ b/homeassistant/components/seventeentrack/config_flow.py @@ -97,38 +97,6 @@ class SeventeenTrackConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import 17Track config from configuration.yaml.""" - - client = self._get_client() - - try: - login_result = await client.profile.login( - import_data[CONF_USERNAME], import_data[CONF_PASSWORD] - ) - except SeventeenTrackError: - return self.async_abort(reason="cannot_connect") - - if not login_result: - return self.async_abort(reason="invalid_auth") - - account_id = client.profile.account_id - - await self.async_set_unique_id(account_id) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=import_data[CONF_USERNAME], - data=import_data, - options={ - CONF_SHOW_ARCHIVED: import_data.get( - CONF_SHOW_ARCHIVED, DEFAULT_SHOW_ARCHIVED - ), - CONF_SHOW_DELIVERED: import_data.get( - CONF_SHOW_DELIVERED, DEFAULT_SHOW_DELIVERED - ), - }, - ) - @callback def _get_client(self): session = aiohttp_client.async_get_clientsession(self.hass) diff --git a/homeassistant/components/seventeentrack/const.py b/homeassistant/components/seventeentrack/const.py index 584eca507e9..6b888590600 100644 --- a/homeassistant/components/seventeentrack/const.py +++ b/homeassistant/components/seventeentrack/const.py @@ -42,8 +42,11 @@ NOTIFICATION_DELIVERED_MESSAGE = ( VALUE_DELIVERED = "Delivered" SERVICE_GET_PACKAGES = "get_packages" +SERVICE_ARCHIVE_PACKAGE = "archive_package" ATTR_PACKAGE_STATE = "package_state" +ATTR_PACKAGE_TRACKING_NUMBER = "package_tracking_number" ATTR_CONFIG_ENTRY_ID = "config_entry_id" + DEPRECATED_KEY = "deprecated" diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json index 78ca65edc4d..a5cac0a9f84 100644 --- a/homeassistant/components/seventeentrack/icons.json +++ b/homeassistant/components/seventeentrack/icons.json @@ -28,6 +28,11 @@ } }, "services": { - "get_packages": "mdi:package" + "get_packages": { + "service": "mdi:package" + }, + "archive_package": { + "service": "mdi:archive" + } } } diff --git a/homeassistant/components/seventeentrack/repairs.py b/homeassistant/components/seventeentrack/repairs.py index 71616e98506..ce72960ea91 100644 --- a/homeassistant/components/seventeentrack/repairs.py +++ b/homeassistant/components/seventeentrack/repairs.py @@ -42,8 +42,8 @@ async def async_create_fix_flow( hass: HomeAssistant, issue_id: str, data: dict ) -> RepairsFlow: """Create flow.""" - if issue_id.startswith("deprecate_sensor_"): - entry = hass.config_entries.async_get_entry(data["entry_id"]) - assert entry + if issue_id.startswith("deprecate_sensor_") and ( + entry := hass.config_entries.async_get_entry(data["entry_id"]) + ): return SensorDeprecationRepairFlow(entry) return ConfirmRepairFlow() diff --git a/homeassistant/components/seventeentrack/sensor.py b/homeassistant/components/seventeentrack/sensor.py index 3122065adae..4e561a87961 100644 --- a/homeassistant/components/seventeentrack/sensor.py +++ b/homeassistant/components/seventeentrack/sensor.py @@ -4,31 +4,15 @@ from __future__ import annotations from typing import Any -import voluptuous as vol - from homeassistant.components import persistent_notification -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorEntity, -) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - ATTR_FRIENDLY_NAME, - ATTR_LOCATION, - CONF_PASSWORD, - CONF_USERNAME, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import ( - config_validation as cv, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.components.sensor import SensorEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_LOCATION +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType +from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import SeventeenTrackCoordinator @@ -43,8 +27,6 @@ from .const import ( ATTR_TRACKING_INFO_LANGUAGE, ATTR_TRACKING_NUMBER, ATTRIBUTION, - CONF_SHOW_ARCHIVED, - CONF_SHOW_DELIVERED, DEPRECATED_KEY, DOMAIN, LOGGER, @@ -54,59 +36,6 @@ from .const import ( VALUE_DELIVERED, ) -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_SHOW_ARCHIVED, default=False): cv.boolean, - vol.Optional(CONF_SHOW_DELIVERED, default=False): cv.boolean, - } -) - -ISSUE_PLACEHOLDER = {"url": "/config/integrations/dashboard/add?domain=seventeentrack"} - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Initialize 17Track import from config.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - if ( - result["type"] == FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - breaks_in_ha_version="2024.10.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "17Track", - }, - ) - else: - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders=ISSUE_PLACEHOLDER, - ) - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/seventeentrack/services.py b/homeassistant/components/seventeentrack/services.py new file mode 100644 index 00000000000..54c23e6d619 --- /dev/null +++ b/homeassistant/components/seventeentrack/services.py @@ -0,0 +1,150 @@ +"""Services for the seventeentrack integration.""" + +from typing import Any, Final + +from pyseventeentrack.package import PACKAGE_STATUS_MAP, Package +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_LOCATION +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.util import slugify + +from . import SeventeenTrackCoordinator +from .const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_DESTINATION_COUNTRY, + ATTR_INFO_TEXT, + ATTR_ORIGIN_COUNTRY, + ATTR_PACKAGE_STATE, + ATTR_PACKAGE_TRACKING_NUMBER, + ATTR_PACKAGE_TYPE, + ATTR_STATUS, + ATTR_TIMESTAMP, + ATTR_TRACKING_INFO_LANGUAGE, + ATTR_TRACKING_NUMBER, + DOMAIN, + SERVICE_ARCHIVE_PACKAGE, + SERVICE_GET_PACKAGES, +) + +SERVICE_ADD_PACKAGES_SCHEMA: Final = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string, + vol.Optional(ATTR_PACKAGE_STATE): selector.SelectSelector( + selector.SelectSelectorConfig( + multiple=True, + options=[ + value.lower().replace(" ", "_") + for value in PACKAGE_STATUS_MAP.values() + ], + mode=selector.SelectSelectorMode.DROPDOWN, + translation_key=ATTR_PACKAGE_STATE, + ) + ), + } +) + +SERVICE_ARCHIVE_PACKAGE_SCHEMA: Final = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string, + vol.Required(ATTR_PACKAGE_TRACKING_NUMBER): cv.string, + } +) + + +def setup_services(hass: HomeAssistant) -> None: + """Set up the services for the seventeentrack integration.""" + + async def get_packages(call: ServiceCall) -> ServiceResponse: + """Get packages from 17Track.""" + config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] + package_states = call.data.get(ATTR_PACKAGE_STATE, []) + + await _validate_service(config_entry_id) + + seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ + config_entry_id + ] + live_packages = sorted( + await seventeen_coordinator.client.profile.packages( + show_archived=seventeen_coordinator.show_archived + ) + ) + + return { + "packages": [ + package_to_dict(package) + for package in live_packages + if slugify(package.status) in package_states or package_states == [] + ] + } + + async def archive_package(call: ServiceCall) -> None: + config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] + tracking_number = call.data[ATTR_PACKAGE_TRACKING_NUMBER] + + await _validate_service(config_entry_id) + + seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ + config_entry_id + ] + + await seventeen_coordinator.client.profile.archive_package(tracking_number) + + def package_to_dict(package: Package) -> dict[str, Any]: + result = { + ATTR_DESTINATION_COUNTRY: package.destination_country, + ATTR_ORIGIN_COUNTRY: package.origin_country, + ATTR_PACKAGE_TYPE: package.package_type, + ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } + if timestamp := package.timestamp: + result[ATTR_TIMESTAMP] = timestamp.isoformat() + return result + + async def _validate_service(config_entry_id): + entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id) + if not entry: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_config_entry", + translation_placeholders={ + "config_entry_id": config_entry_id, + }, + ) + if entry.state != ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="unloaded_config_entry", + translation_placeholders={ + "config_entry_id": entry.title, + }, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_GET_PACKAGES, + get_packages, + schema=SERVICE_ADD_PACKAGES_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_ARCHIVE_PACKAGE, + archive_package, + schema=SERVICE_ARCHIVE_PACKAGE_SCHEMA, + ) diff --git a/homeassistant/components/seventeentrack/services.yaml b/homeassistant/components/seventeentrack/services.yaml index 41cb66ada5f..d4592dc8aab 100644 --- a/homeassistant/components/seventeentrack/services.yaml +++ b/homeassistant/components/seventeentrack/services.yaml @@ -18,3 +18,14 @@ get_packages: selector: config_entry: integration: seventeentrack +archive_package: + fields: + package_tracking_number: + required: true + selector: + text: + config_entry_id: + required: true + selector: + config_entry: + integration: seventeentrack diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index 0fbac13736e..bbd01ed3055 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -38,14 +38,6 @@ } }, "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The 17Track YAML configuration import cannot connect to server", - "description": "Configuring 17Track using YAML is being removed but there was a connection error importing your YAML configuration.\n\nThings you can try:\nMake sure your home assistant can reach the web.\n\nThen restart Home Assistant to try importing this integration again.\n\nAlternatively, you may remove the 17Track configuration from your YAML configuration entirely, restart Home Assistant, and add the 17Track integration manually." - }, - "deprecated_yaml_import_issue_invalid_auth": { - "title": "The 17Track YAML configuration import request failed due to invalid authentication", - "description": "Configuring 17Track using YAML is being removed but there were invalid credentials provided while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your 17Track credentials are correct and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the 17Track configuration from your YAML configuration entirely, restart Home Assistant, and add the 17Track integration manually." - }, "deprecate_sensor": { "title": "17Track package sensors are being deprecated", "fix_flow": { @@ -100,6 +92,20 @@ "description": "The packages will be retrieved for the selected service." } } + }, + "archive_package": { + "name": "Archive package", + "description": "Archive a package", + "fields": { + "package_tracking_number": { + "name": "Package tracking number", + "description": "The package will be archived for the specified tracking number." + }, + "config_entry_id": { + "name": "[%key:component::seventeentrack::services::get_packages::fields::config_entry_id::name%]", + "description": "The package will be archived for the selected service." + } + } } }, "selector": { diff --git a/homeassistant/components/sfr_box/__init__.py b/homeassistant/components/sfr_box/__init__.py index dade1af0e52..927e3cb0ef2 100644 --- a/homeassistant/components/sfr_box/__init__.py +++ b/homeassistant/components/sfr_box/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from typing import TYPE_CHECKING from sfrbox_api.bridge import SFRBox from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError @@ -46,6 +47,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Preload system information await data.system.async_config_entry_first_refresh() system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None # Preload other coordinators (based on net infrastructure) tasks = [data.wan.async_config_entry_first_refresh()] @@ -63,6 +66,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: identifiers={(DOMAIN, system_info.mac_addr)}, name="SFR Box", model=system_info.product_id, + model_id=system_info.product_id, sw_version=system_info.version_mainfirmware, configuration_url=f"http://{entry.data[CONF_HOST]}", ) diff --git a/homeassistant/components/sfr_box/binary_sensor.py b/homeassistant/components/sfr_box/binary_sensor.py index b299af33513..4ef5e87761d 100644 --- a/homeassistant/components/sfr_box/binary_sensor.py +++ b/homeassistant/components/sfr_box/binary_sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from sfrbox_api.models import DslInfo, FtthInfo, SystemInfo, WanInfo @@ -65,19 +66,22 @@ async def async_setup_entry( ) -> None: """Set up the sensors.""" data: DomainData = hass.data[DOMAIN][entry.entry_id] + system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None entities: list[SFRBoxBinarySensor] = [ - SFRBoxBinarySensor(data.wan, description, data.system.data) + SFRBoxBinarySensor(data.wan, description, system_info) for description in WAN_SENSOR_TYPES ] - if (net_infra := data.system.data.net_infra) == "adsl": + if (net_infra := system_info.net_infra) == "adsl": entities.extend( - SFRBoxBinarySensor(data.dsl, description, data.system.data) + SFRBoxBinarySensor(data.dsl, description, system_info) for description in DSL_SENSOR_TYPES ) elif net_infra == "ftth": entities.extend( - SFRBoxBinarySensor(data.ftth, description, data.system.data) + SFRBoxBinarySensor(data.ftth, description, system_info) for description in FTTH_SENSOR_TYPES ) @@ -111,4 +115,6 @@ class SFRBoxBinarySensor[_T]( @property def is_on(self) -> bool | None: """Return the native value of the device.""" + if self.coordinator.data is None: + return None return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/sfr_box/button.py b/homeassistant/components/sfr_box/button.py index f6d3100d692..bddb1e8f926 100644 --- a/homeassistant/components/sfr_box/button.py +++ b/homeassistant/components/sfr_box/button.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass from functools import wraps -from typing import Any, Concatenate +from typing import TYPE_CHECKING, Any, Concatenate from sfrbox_api.bridge import SFRBox from sfrbox_api.exceptions import SFRBoxError @@ -69,10 +69,12 @@ async def async_setup_entry( ) -> None: """Set up the buttons.""" data: DomainData = hass.data[DOMAIN][entry.entry_id] + system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None entities = [ - SFRBoxButton(data.box, description, data.system.data) - for description in BUTTON_TYPES + SFRBoxButton(data.box, description, system_info) for description in BUTTON_TYPES ] async_add_entities(entities) diff --git a/homeassistant/components/sfr_box/config_flow.py b/homeassistant/components/sfr_box/config_flow.py index f7d72c01ccd..629f6ad291f 100644 --- a/homeassistant/components/sfr_box/config_flow.py +++ b/homeassistant/components/sfr_box/config_flow.py @@ -3,13 +3,13 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from sfrbox_api.bridge import SFRBox from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import selector from homeassistant.helpers.httpx_client import get_async_client @@ -37,7 +37,6 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 _box: SFRBox _config: dict[str, Any] = {} - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, str] | None = None @@ -51,6 +50,8 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN): except SFRBoxError: errors["base"] = "cannot_connect" else: + if TYPE_CHECKING: + assert system_info is not None await self.async_set_unique_id(system_info.mac_addr) self._abort_if_unique_id_configured() self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) @@ -86,19 +87,16 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN): except SFRBoxAuthenticationError: errors["base"] = "invalid_auth" else: - if reauth_entry := self._reauth_entry: - data = {**reauth_entry.data, **user_input} - self.hass.config_entries.async_update_entry(reauth_entry, data=data) - self.hass.async_create_task( - self.hass.config_entries.async_reload(reauth_entry.entry_id) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - return self.async_abort(reason="reauth_successful") self._config.update(user_input) return self.async_create_entry(title="SFR Box", data=self._config) suggested_values: Mapping[str, Any] | None = user_input - if self._reauth_entry and not suggested_values: - suggested_values = self._reauth_entry.data + if self.source == SOURCE_REAUTH and not suggested_values: + suggested_values = self._get_reauth_entry().data data_schema = self.add_suggested_values_to_schema(AUTH_SCHEMA, suggested_values) return self.async_show_form( @@ -115,8 +113,5 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle failed credentials.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) self._box = SFRBox(ip=entry_data[CONF_HOST], client=get_async_client(self.hass)) return await self.async_step_auth() diff --git a/homeassistant/components/sfr_box/coordinator.py b/homeassistant/components/sfr_box/coordinator.py index af3195723f4..5877d5a454a 100644 --- a/homeassistant/components/sfr_box/coordinator.py +++ b/homeassistant/components/sfr_box/coordinator.py @@ -15,7 +15,7 @@ _LOGGER = logging.getLogger(__name__) _SCAN_INTERVAL = timedelta(minutes=1) -class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): +class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT | None]): """Coordinator to manage data updates.""" def __init__( @@ -23,14 +23,14 @@ class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): hass: HomeAssistant, box: SFRBox, name: str, - method: Callable[[SFRBox], Coroutine[Any, Any, _DataT]], + method: Callable[[SFRBox], Coroutine[Any, Any, _DataT | None]], ) -> None: """Initialize coordinator.""" self.box = box self._method = method super().__init__(hass, _LOGGER, name=name, update_interval=_SCAN_INTERVAL) - async def _async_update_data(self) -> _DataT: + async def _async_update_data(self) -> _DataT | None: """Update data.""" try: return await self._method(self.box) diff --git a/homeassistant/components/sfr_box/diagnostics.py b/homeassistant/components/sfr_box/diagnostics.py index b5aca834af5..0553bfe4233 100644 --- a/homeassistant/components/sfr_box/diagnostics.py +++ b/homeassistant/components/sfr_box/diagnostics.py @@ -3,7 +3,7 @@ from __future__ import annotations import dataclasses -from typing import Any +from typing import TYPE_CHECKING, Any from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry @@ -12,9 +12,18 @@ from homeassistant.core import HomeAssistant from .const import DOMAIN from .models import DomainData +if TYPE_CHECKING: + from _typeshed import DataclassInstance + TO_REDACT = {"mac_addr", "serial_number", "ip_addr", "ipv6_addr"} +def _async_redact_data(obj: DataclassInstance | None) -> dict[str, Any] | None: + if obj is None: + return None + return async_redact_data(dataclasses.asdict(obj), TO_REDACT) + + async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: @@ -27,21 +36,9 @@ async def async_get_config_entry_diagnostics( "data": dict(entry.data), }, "data": { - "dsl": async_redact_data( - dataclasses.asdict(await data.system.box.dsl_get_info()), - TO_REDACT, - ), - "ftth": async_redact_data( - dataclasses.asdict(await data.system.box.ftth_get_info()), - TO_REDACT, - ), - "system": async_redact_data( - dataclasses.asdict(await data.system.box.system_get_info()), - TO_REDACT, - ), - "wan": async_redact_data( - dataclasses.asdict(await data.system.box.wan_get_info()), - TO_REDACT, - ), + "dsl": _async_redact_data(await data.system.box.dsl_get_info()), + "ftth": _async_redact_data(await data.system.box.ftth_get_info()), + "system": _async_redact_data(await data.system.box.system_get_info()), + "wan": _async_redact_data(await data.system.box.wan_get_info()), }, } diff --git a/homeassistant/components/sfr_box/manifest.json b/homeassistant/components/sfr_box/manifest.json index bf4d91a50f1..a2d65e9819d 100644 --- a/homeassistant/components/sfr_box/manifest.json +++ b/homeassistant/components/sfr_box/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sfr_box", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["sfrbox-api==0.0.8"] + "requirements": ["sfrbox-api==0.0.11"] } diff --git a/homeassistant/components/sfr_box/sensor.py b/homeassistant/components/sfr_box/sensor.py index d19ff82b393..ee3285a8f38 100644 --- a/homeassistant/components/sfr_box/sensor.py +++ b/homeassistant/components/sfr_box/sensor.py @@ -2,6 +2,7 @@ from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from sfrbox_api.models import DslInfo, SystemInfo, WanInfo @@ -129,7 +130,7 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = ( "unknown", ], translation_key="dsl_line_status", - value_fn=lambda x: x.line_status.lower().replace(" ", "_"), + value_fn=lambda x: _value_to_option(x.line_status), ), SFRBoxSensorEntityDescription[DslInfo]( key="training", @@ -149,7 +150,7 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = ( "unknown", ], translation_key="dsl_training", - value_fn=lambda x: x.training.lower().replace(" ", "_").replace(".", "_"), + value_fn=lambda x: _value_to_option(x.training), ), ) SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = ( @@ -181,7 +182,7 @@ SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda x: None if x.temperature is None else x.temperature / 1000, + value_fn=lambda x: _get_temperature(x.temperature), ), ) WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = ( @@ -203,23 +204,38 @@ WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = ( ) +def _value_to_option(value: str | None) -> str | None: + if value is None: + return value + return value.lower().replace(" ", "_").replace(".", "_") + + +def _get_temperature(value: float | None) -> float | None: + if value is None or value < 1000: + return value + return value / 1000 + + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the sensors.""" data: DomainData = hass.data[DOMAIN][entry.entry_id] + system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None entities: list[SFRBoxSensor] = [ - SFRBoxSensor(data.system, description, data.system.data) + SFRBoxSensor(data.system, description, system_info) for description in SYSTEM_SENSOR_TYPES ] entities.extend( - SFRBoxSensor(data.wan, description, data.system.data) + SFRBoxSensor(data.wan, description, system_info) for description in WAN_SENSOR_TYPES ) - if data.system.data.net_infra == "adsl": + if system_info.net_infra == "adsl": entities.extend( - SFRBoxSensor(data.dsl, description, data.system.data) + SFRBoxSensor(data.dsl, description, system_info) for description in DSL_SENSOR_TYPES ) @@ -251,4 +267,6 @@ class SFRBoxSensor[_T](CoordinatorEntity[SFRDataUpdateCoordinator[_T]], SensorEn @property def native_value(self) -> StateType: """Return the native value of the device.""" + if self.coordinator.data is None: + return None return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/sharkiq/config_flow.py b/homeassistant/components/sharkiq/config_flow.py index 492b8f2a365..87367fcf093 100644 --- a/homeassistant/components/sharkiq/config_flow.py +++ b/homeassistant/components/sharkiq/config_flow.py @@ -116,9 +116,15 @@ class SharkIqConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth if login is invalid.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by reauthentication.""" errors: dict[str, str] = {} if user_input is not None: @@ -134,7 +140,7 @@ class SharkIqConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason=errors["base"]) return self.async_show_form( - step_id="reauth", + step_id="reauth_confirm", data_schema=SHARKIQ_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/sharkiq/icons.json b/homeassistant/components/sharkiq/icons.json index 13fd58ce66d..e58a317f503 100644 --- a/homeassistant/components/sharkiq/icons.json +++ b/homeassistant/components/sharkiq/icons.json @@ -1,5 +1,7 @@ { "services": { - "clean_room": "mdi:robot-vacuum" + "clean_room": { + "service": "mdi:robot-vacuum" + } } } diff --git a/homeassistant/components/sharkiq/strings.json b/homeassistant/components/sharkiq/strings.json index 63d4f6af48b..40b569e13b7 100644 --- a/homeassistant/components/sharkiq/strings.json +++ b/homeassistant/components/sharkiq/strings.json @@ -13,7 +13,7 @@ "region": "Shark IQ uses different services in the EU. Select your region to connect to the correct service for your account." } }, - "reauth": { + "reauth_confirm": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", diff --git a/homeassistant/components/sharkiq/vacuum.py b/homeassistant/components/sharkiq/vacuum.py index 8f0547980c3..873d3fbd290 100644 --- a/homeassistant/components/sharkiq/vacuum.py +++ b/homeassistant/components/sharkiq/vacuum.py @@ -9,12 +9,8 @@ from sharkiq import OperatingModes, PowerModes, Properties, SharkIqVacuum import voluptuous as vol from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -30,10 +26,10 @@ from .const import DOMAIN, LOGGER, SERVICE_CLEAN_ROOM, SHARK from .coordinator import SharkIqUpdateCoordinator OPERATING_STATE_MAP = { - OperatingModes.PAUSE: STATE_PAUSED, - OperatingModes.START: STATE_CLEANING, - OperatingModes.STOP: STATE_IDLE, - OperatingModes.RETURN: STATE_RETURNING, + OperatingModes.PAUSE: VacuumActivity.PAUSED, + OperatingModes.START: VacuumActivity.CLEANING, + OperatingModes.STOP: VacuumActivity.IDLE, + OperatingModes.RETURN: VacuumActivity.RETURNING, } FAN_SPEEDS_MAP = { @@ -150,19 +146,13 @@ class SharkVacuumEntity(CoordinatorEntity[SharkIqUpdateCoordinator], StateVacuum return None return self.sharkiq.error_text - @property - def operating_mode(self) -> str | None: - """Operating mode.""" - op_mode = self.sharkiq.get_property_value(Properties.OPERATING_MODE) - return OPERATING_STATE_MAP.get(op_mode) - @property def recharging_to_resume(self) -> int | None: """Return True if vacuum set to recharge and resume cleaning.""" return self.sharkiq.get_property_value(Properties.RECHARGING_TO_RESUME) @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Get the current vacuum state. NB: Currently, we do not return an error state because they can be very, very stale. @@ -170,8 +160,9 @@ class SharkVacuumEntity(CoordinatorEntity[SharkIqUpdateCoordinator], StateVacuum user a notification. """ if self.sharkiq.get_property_value(Properties.CHARGING_STATUS): - return STATE_DOCKED - return self.operating_mode + return VacuumActivity.DOCKED + op_mode = self.sharkiq.get_property_value(Properties.OPERATING_MODE) + return OPERATING_STATE_MAP.get(op_mode) @property def available(self) -> bool: diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 1d3f67220fa..e0d9d17d55d 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -290,6 +290,11 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) ) runtime_data.rpc = ShellyRpcCoordinator(hass, entry, device) runtime_data.rpc.async_setup(runtime_data.platforms) + # Try to connect to the device, if we reached here from config flow + # and user woke up the device when adding it, we can continue setup + # otherwise we will wait for the device to wake up + if sleep_period: + await runtime_data.rpc.async_device_online("setup") else: # Restore sensors for sleeping device LOGGER.debug("Setting up offline RPC device %s", entry.title) diff --git a/homeassistant/components/shelly/binary_sensor.py b/homeassistant/components/shelly/binary_sensor.py index c2127828b07..556274aa51a 100644 --- a/homeassistant/components/shelly/binary_sensor.py +++ b/homeassistant/components/shelly/binary_sensor.py @@ -34,7 +34,7 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( - async_remove_orphaned_virtual_entities, + async_remove_orphaned_entities, get_device_entry_gen, get_virtual_component_ids, is_block_momentary_input, @@ -263,13 +263,13 @@ async def async_setup_entry( virtual_binary_sensor_ids = get_virtual_component_ids( coordinator.device.config, BINARY_SENSOR_PLATFORM ) - async_remove_orphaned_virtual_entities( + async_remove_orphaned_entities( hass, config_entry.entry_id, coordinator.mac, BINARY_SENSOR_PLATFORM, - "boolean", virtual_binary_sensor_ids, + "boolean", ) return diff --git a/homeassistant/components/shelly/bluetooth/__init__.py b/homeassistant/components/shelly/bluetooth/__init__.py index fad7ddf4424..f2b71d19d61 100644 --- a/homeassistant/components/shelly/bluetooth/__init__.py +++ b/homeassistant/components/shelly/bluetooth/__init__.py @@ -5,13 +5,7 @@ from __future__ import annotations from typing import TYPE_CHECKING from aioshelly.ble import async_start_scanner, create_scanner -from aioshelly.ble.const import ( - BLE_SCAN_RESULT_EVENT, - BLE_SCAN_RESULT_VERSION, - DEFAULT_DURATION_MS, - DEFAULT_INTERVAL_MS, - DEFAULT_WINDOW_MS, -) +from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT, BLE_SCAN_RESULT_VERSION from homeassistant.components.bluetooth import async_register_scanner from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback @@ -43,9 +37,6 @@ async def async_connect_scanner( active=scanner_mode == BLEScannerMode.ACTIVE, event_type=BLE_SCAN_RESULT_EVENT, data_version=BLE_SCAN_RESULT_VERSION, - interval_ms=DEFAULT_INTERVAL_MS, - window_ms=DEFAULT_WINDOW_MS, - duration_ms=DEFAULT_DURATION_MS, ) @hass_callback diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index b77f45afb3f..842abc5ecc4 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -172,7 +172,6 @@ class BlockSleepingClimate( ) _attr_target_temperature_step = SHTRV_01_TEMPERATURE_SETTINGS["step"] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -456,7 +455,6 @@ class RpcClimate(ShellyRpcEntity, ClimateEntity): ) _attr_target_temperature_step = RPC_THERMOSTAT_SETTINGS["step"] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: ShellyRpcCoordinator, id_: int) -> None: """Initialize.""" diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index c80d1e84d6f..55686464637 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, Final +from typing import Any, Final from aioshelly.block_device import BlockDevice from aioshelly.common import ConnectionOptions, get_info @@ -12,6 +12,7 @@ from aioshelly.exceptions import ( CustomPortNotSupported, DeviceConnectionError, InvalidAuthError, + MacAddressMismatchError, ) from aioshelly.rpc_device import RpcDevice import voluptuous as vol @@ -146,7 +147,6 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): port: int = DEFAULT_HTTP_PORT info: dict[str, Any] = {} device_info: dict[str, Any] = {} - entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -177,6 +177,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): ) except DeviceConnectionError: errors["base"] = "cannot_connect" + except MacAddressMismatchError: + errors["base"] = "mac_address_mismatch" except CustomPortNotSupported: errors["base"] = "custom_port_not_supported" except Exception: # noqa: BLE001 @@ -216,6 +218,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except DeviceConnectionError: errors["base"] = "cannot_connect" + except MacAddressMismatchError: + errors["base"] = "mac_address_mismatch" except Exception: # noqa: BLE001 LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -356,7 +360,6 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -364,9 +367,9 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self.entry is not None - host = self.entry.data[CONF_HOST] - port = get_http_port(self.entry.data) + reauth_entry = self._get_reauth_entry() + host = reauth_entry.data[CONF_HOST] + port = get_http_port(reauth_entry.data) if user_input is not None: try: @@ -374,18 +377,20 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): except (DeviceConnectionError, InvalidAuthError): return self.async_abort(reason="reauth_unsuccessful") - if get_device_entry_gen(self.entry) != 1: + if get_device_entry_gen(reauth_entry) != 1: user_input[CONF_USERNAME] = "admin" try: await validate_input(self.hass, host, port, info, user_input) except (DeviceConnectionError, InvalidAuthError): return self.async_abort(reason="reauth_unsuccessful") + except MacAddressMismatchError: + return self.async_abort(reason="mac_address_mismatch") return self.async_update_reload_and_abort( - self.entry, data={**self.entry.data, **user_input} + reauth_entry, data_updates=user_input ) - if get_device_entry_gen(self.entry) in BLOCK_GENERATIONS: + if get_device_entry_gen(reauth_entry) in BLOCK_GENERATIONS: schema = { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, @@ -400,28 +405,13 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reconfigure( - self, _: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - - if TYPE_CHECKING: - assert entry is not None - - self.host = entry.data[CONF_HOST] - self.port = entry.data.get(CONF_PORT, DEFAULT_HTTP_PORT) - self.entry = entry - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors = {} - - if TYPE_CHECKING: - assert self.entry is not None + reconfigure_entry = self._get_reconfigure_entry() + self.host = reconfigure_entry.data[CONF_HOST] + self.port = reconfigure_entry.data.get(CONF_PORT, DEFAULT_HTTP_PORT) if user_input is not None: host = user_input[CONF_HOST] @@ -433,23 +423,23 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): except CustomPortNotSupported: errors["base"] = "custom_port_not_supported" else: - if info[CONF_MAC] != self.entry.unique_id: - return self.async_abort(reason="another_device") + await self.async_set_unique_id(info[CONF_MAC]) + self._abort_if_unique_id_mismatch(reason="another_device") - data = {**self.entry.data, CONF_HOST: host, CONF_PORT: port} - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={CONF_HOST: host, CONF_PORT: port}, + ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=self.host): str, vol.Required(CONF_PORT, default=self.port): vol.Coerce(int), } ), - description_placeholders={"device_name": self.entry.title}, + description_placeholders={"device_name": reconfigure_entry.title}, errors=errors, ) @@ -461,7 +451,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() @classmethod @callback @@ -477,10 +467,6 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle the option flow for shelly.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index fe4108a1f52..88d8c1f5f17 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -239,8 +239,6 @@ DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( CONF_GEN = "gen" -SHELLY_PLUS_RGBW_CHANNELS = 4 - VIRTUAL_COMPONENTS_MAP = { "binary_sensor": {"types": ["boolean"], "modes": ["label"]}, "number": {"types": ["number"], "modes": ["field", "slider"]}, @@ -257,3 +255,5 @@ VIRTUAL_NUMBER_MODE_MAP = { API_WS_URL = "/api/shelly/ws" + +COMPONENT_ID_PATTERN = re.compile(r"[a-z\d]+:\d+") diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 03dcdedbb6f..f20b283cacf 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -11,8 +11,14 @@ from typing import Any, cast from aioshelly.ble import async_ensure_ble_enabled, async_stop_scanner from aioshelly.block_device import BlockDevice, BlockUpdateType from aioshelly.const import MODEL_NAMES, MODEL_VALVE -from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError +from aioshelly.exceptions import ( + DeviceConnectionError, + InvalidAuthError, + MacAddressMismatchError, + RpcCallError, +) from aioshelly.rpc_device import RpcDevice, RpcUpdateType +from propcache import cached_property from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import ( @@ -120,12 +126,12 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop) ) - @property + @cached_property def model(self) -> str: """Model of the device.""" return cast(str, self.entry.data["model"]) - @property + @cached_property def mac(self) -> str: """Mac address of the device.""" return cast(str, self.entry.unique_id) @@ -172,8 +178,8 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( try: await self.device.initialize() update_device_fw_info(self.hass, self.device, self.entry) - except DeviceConnectionError as err: - LOGGER.error( + except (DeviceConnectionError, MacAddressMismatchError) as err: + LOGGER.debug( "Error connecting to Shelly device %s, error: %r", self.name, err ) return False @@ -449,7 +455,7 @@ class ShellyRestCoordinator(ShellyCoordinatorBase[BlockDevice]): if self.device.status["uptime"] > 2 * REST_SENSORS_UPDATE_INTERVAL: return await self.device.update_shelly() - except DeviceConnectionError as err: + except (DeviceConnectionError, MacAddressMismatchError) as err: raise UpdateFailed(f"Error fetching data: {err!r}") from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() @@ -480,15 +486,17 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): self._connect_task: asyncio.Task | None = None entry.async_on_unload(entry.add_update_listener(self._async_update_listener)) - async def async_device_online(self) -> None: + async def async_device_online(self, source: str) -> None: """Handle device going online.""" if not self.sleep_period: await self.async_request_refresh() elif not self._came_online_once or not self.device.initialized: LOGGER.debug( - "Sleepy device %s is online, trying to poll and configure", self.name + "Sleepy device %s is online (source: %s), trying to poll and configure", + self.name, + source, ) - # Zeroconf told us the device is online, try to poll + # Source told us the device is online, try to poll # the device and if possible, set up the outbound # websocket so the device will send us updates # instead of relying on polling it fast enough before @@ -600,7 +608,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_update_data(self) -> None: """Fetch data.""" - if self.update_sleep_period(): + if self.update_sleep_period() or self.hass.is_stopping: return if self.sleep_period: @@ -790,8 +798,7 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: - await self.device.update_status() - await self.device.get_dynamic_components() + await self.device.poll() except (DeviceConnectionError, RpcCallError) as err: raise UpdateFailed(f"Device disconnected: {err!r}") from err except InvalidAuthError: @@ -847,7 +854,7 @@ async def async_reconnect_soon(hass: HomeAssistant, entry: ShellyConfigEntry) -> ): entry.async_create_background_task( hass, - coordinator.async_device_online(), + coordinator.async_device_online("zeroconf"), "reconnect soon", eager_start=True, ) diff --git a/homeassistant/components/shelly/cover.py b/homeassistant/components/shelly/cover.py index 395df95735b..09e8279bf9b 100644 --- a/homeassistant/components/shelly/cover.py +++ b/homeassistant/components/shelly/cover.py @@ -9,6 +9,7 @@ from aioshelly.const import RPC_GENERATIONS from homeassistant.components.cover import ( ATTR_POSITION, + ATTR_TILT_POSITION, CoverDeviceClass, CoverEntity, CoverEntityFeature, @@ -157,6 +158,13 @@ class RpcShellyCover(ShellyRpcEntity, CoverEntity): self._id = id_ if self.status["pos_control"]: self._attr_supported_features |= CoverEntityFeature.SET_POSITION + if coordinator.device.config[f"cover:{id_}"].get("slat", {}).get("enable"): + self._attr_supported_features |= ( + CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.STOP_TILT + | CoverEntityFeature.SET_TILT_POSITION + ) @property def is_closed(self) -> bool | None: @@ -171,6 +179,14 @@ class RpcShellyCover(ShellyRpcEntity, CoverEntity): return cast(int, self.status["current_pos"]) + @property + def current_cover_tilt_position(self) -> int | None: + """Return current position of cover tilt.""" + if "slat_pos" not in self.status: + return None + + return cast(int, self.status["slat_pos"]) + @property def is_closing(self) -> bool: """Return if the cover is closing.""" @@ -198,3 +214,22 @@ class RpcShellyCover(ShellyRpcEntity, CoverEntity): async def async_stop_cover(self, **_kwargs: Any) -> None: """Stop the cover.""" await self.call_rpc("Cover.Stop", {"id": self._id}) + + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Open the cover tilt.""" + await self.call_rpc("Cover.GoToPosition", {"id": self._id, "slat_pos": 100}) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Close the cover tilt.""" + await self.call_rpc("Cover.GoToPosition", {"id": self._id, "slat_pos": 0}) + + async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: + """Move the cover tilt to a specific position.""" + await self.call_rpc( + "Cover.GoToPosition", + {"id": self._id, "slat_pos": kwargs[ATTR_TILT_POSITION]}, + ) + + async def async_stop_cover_tilt(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self.call_rpc("Cover.Stop", {"id": self._id}) diff --git a/homeassistant/components/shelly/device_trigger.py b/homeassistant/components/shelly/device_trigger.py index 9aa57fa1d15..6e96eb5ed21 100644 --- a/homeassistant/components/shelly/device_trigger.py +++ b/homeassistant/components/shelly/device_trigger.py @@ -6,8 +6,8 @@ from typing import Final import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 980a39feaba..aea060e09e2 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -488,7 +488,7 @@ class ShellyRestAttributeEntity(CoordinatorEntity[ShellyBlockCoordinator]): @property def attribute_value(self) -> StateType: """Value of sensor.""" - if callable(self.entity_description.value): + if self.entity_description.value is not None: self._last_value = self.entity_description.value( self.block_coordinator.device.status, self._last_value ) @@ -518,7 +518,7 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): id_key = key.split(":")[-1] self._id = int(id_key) if id_key.isnumeric() else None - if callable(description.unit): + if description.unit is not None: self._attr_native_unit_of_measurement = description.unit( coordinator.device.config[key] ) @@ -544,7 +544,7 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): @property def attribute_value(self) -> StateType: """Value of sensor.""" - if callable(self.entity_description.value): + if self.entity_description.value is not None: # using "get" here since subkey might not exist (e.g. "errors" sub_key) self._last_value = self.entity_description.value( self.status.get(self.entity_description.sub_key), self._last_value diff --git a/homeassistant/components/shelly/light.py b/homeassistant/components/shelly/light.py index 24231fbb33a..5d7bad810b4 100644 --- a/homeassistant/components/shelly/light.py +++ b/homeassistant/components/shelly/light.py @@ -34,14 +34,13 @@ from .const import ( RGBW_MODELS, RPC_MIN_TRANSITION_TIME_SEC, SHBLB_1_RGB_EFFECTS, - SHELLY_PLUS_RGBW_CHANNELS, STANDARD_RGB_EFFECTS, ) from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ShellyBlockEntity, ShellyRpcEntity from .utils import ( + async_remove_orphaned_entities, async_remove_shelly_entity, - async_remove_shelly_rpc_entities, brightness_to_percentage, get_device_entry_gen, get_rpc_key_ids, @@ -119,30 +118,25 @@ def async_setup_rpc_entry( ) return + entities: list[RpcShellyLightBase] = [] if light_key_ids := get_rpc_key_ids(coordinator.device.status, "light"): - # Light mode remove RGB & RGBW entities, add light entities - async_remove_shelly_rpc_entities( - hass, LIGHT_DOMAIN, coordinator.mac, ["rgb:0", "rgbw:0"] - ) - async_add_entities(RpcShellyLight(coordinator, id_) for id_ in light_key_ids) - return - - light_keys = [f"light:{i}" for i in range(SHELLY_PLUS_RGBW_CHANNELS)] - + entities.extend(RpcShellyLight(coordinator, id_) for id_ in light_key_ids) + if cct_key_ids := get_rpc_key_ids(coordinator.device.status, "cct"): + entities.extend(RpcShellyCctLight(coordinator, id_) for id_ in cct_key_ids) if rgb_key_ids := get_rpc_key_ids(coordinator.device.status, "rgb"): - # RGB mode remove light & RGBW entities, add RGB entity - async_remove_shelly_rpc_entities( - hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgbw:0"] - ) - async_add_entities(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids) - return - + entities.extend(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids) if rgbw_key_ids := get_rpc_key_ids(coordinator.device.status, "rgbw"): - # RGBW mode remove light & RGB entities, add RGBW entity - async_remove_shelly_rpc_entities( - hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgb:0"] - ) - async_add_entities(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids) + entities.extend(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids) + + async_add_entities(entities) + + async_remove_orphaned_entities( + hass, + config_entry.entry_id, + coordinator.mac, + LIGHT_DOMAIN, + coordinator.device.status, + ) class BlockShellyLight(ShellyBlockEntity, LightEntity): @@ -427,6 +421,9 @@ class RpcShellyLightBase(ShellyRpcEntity, LightEntity): if ATTR_BRIGHTNESS in kwargs: params["brightness"] = brightness_to_percentage(kwargs[ATTR_BRIGHTNESS]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + params["ct"] = kwargs[ATTR_COLOR_TEMP_KELVIN] + if ATTR_TRANSITION in kwargs: params["transition_duration"] = max( kwargs[ATTR_TRANSITION], RPC_MIN_TRANSITION_TIME_SEC @@ -472,6 +469,29 @@ class RpcShellyLight(RpcShellyLightBase): _attr_supported_features = LightEntityFeature.TRANSITION +class RpcShellyCctLight(RpcShellyLightBase): + """Entity that controls a CCT light on RPC based Shelly devices.""" + + _component = "CCT" + + _attr_color_mode = ColorMode.COLOR_TEMP + _attr_supported_color_modes = {ColorMode.COLOR_TEMP} + _attr_supported_features = LightEntityFeature.TRANSITION + + def __init__(self, coordinator: ShellyRpcCoordinator, id_: int) -> None: + """Initialize light.""" + color_temp_range = coordinator.device.config[f"cct:{id_}"]["ct_range"] + self._attr_min_color_temp_kelvin = color_temp_range[0] + self._attr_max_color_temp_kelvin = color_temp_range[1] + + super().__init__(coordinator, id_) + + @property + def color_temp_kelvin(self) -> int: + """Return the CT color value in Kelvin.""" + return cast(int, self.status["ct"]) + + class RpcShellyRgbLight(RpcShellyLightBase): """Entity that controls a RGB light on RPC based Shelly devices.""" diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index da3bbc4bb6e..3489a2d06d9 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -8,8 +8,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioshelly"], - "quality_scale": "platinum", - "requirements": ["aioshelly==11.2.4"], + "requirements": ["aioshelly==12.1.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index 67c33faf150..2aed38fb723 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -35,7 +35,7 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( - async_remove_orphaned_virtual_entities, + async_remove_orphaned_entities, get_device_entry_gen, get_virtual_component_ids, ) @@ -115,13 +115,13 @@ async def async_setup_entry( virtual_number_ids = get_virtual_component_ids( coordinator.device.config, NUMBER_PLATFORM ) - async_remove_orphaned_virtual_entities( + async_remove_orphaned_entities( hass, config_entry.entry_id, coordinator.mac, NUMBER_PLATFORM, - "number", virtual_number_ids, + "number", ) return @@ -207,17 +207,17 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity): """Initialize sensor.""" super().__init__(coordinator, key, attribute, description) - if callable(description.max_fn): + if description.max_fn is not None: self._attr_native_max_value = description.max_fn( coordinator.device.config[key] ) - if callable(description.min_fn): + if description.min_fn is not None: self._attr_native_min_value = description.min_fn( coordinator.device.config[key] ) - if callable(description.step_fn): + if description.step_fn is not None: self._attr_native_step = description.step_fn(coordinator.device.config[key]) - if callable(description.mode_fn): + if description.mode_fn is not None: self._attr_mode = description.mode_fn(coordinator.device.config[key]) @property diff --git a/homeassistant/components/shelly/select.py b/homeassistant/components/shelly/select.py index 588a49ac017..0caf4661240 100644 --- a/homeassistant/components/shelly/select.py +++ b/homeassistant/components/shelly/select.py @@ -22,7 +22,7 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( - async_remove_orphaned_virtual_entities, + async_remove_orphaned_entities, get_device_entry_gen, get_virtual_component_ids, ) @@ -61,13 +61,13 @@ async def async_setup_entry( virtual_text_ids = get_virtual_component_ids( coordinator.device.config, SELECT_PLATFORM ) - async_remove_orphaned_virtual_entities( + async_remove_orphaned_entities( hass, config_entry.entry_id, coordinator.mac, SELECT_PLATFORM, - "enum", virtual_text_ids, + "enum", ) diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 0d782f46c24..dd0ace9a6b9 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -53,7 +53,7 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( - async_remove_orphaned_virtual_entities, + async_remove_orphaned_entities, get_device_entry_gen, get_device_uptime, get_virtual_component_ids, @@ -392,6 +392,14 @@ RPC_SENSORS: Final = { device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, ), + "power_cct": RpcSensorDescription( + key="cct", + sub_key="apower", + name="Power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), "power_rgb": RpcSensorDescription( key="rgb", sub_key="apower", @@ -552,6 +560,17 @@ RPC_SENSORS: Final = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + "voltage_cct": RpcSensorDescription( + key="cct", + sub_key="voltage", + name="Voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value=lambda status, _: None if status is None else float(status), + suggested_display_precision=1, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), "voltage_rgb": RpcSensorDescription( key="rgb", sub_key="voltage", @@ -641,6 +660,16 @@ RPC_SENSORS: Final = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + "current_cct": RpcSensorDescription( + key="cct", + sub_key="current", + name="Current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value=lambda status, _: None if status is None else float(status), + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), "current_rgb": RpcSensorDescription( key="rgb", sub_key="current", @@ -741,6 +770,17 @@ RPC_SENSORS: Final = { device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), + "energy_cct": RpcSensorDescription( + key="cct", + sub_key="aenergy", + name="Energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value=lambda status, _: status["total"], + suggested_display_precision=2, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), "energy_rgb": RpcSensorDescription( key="rgb", sub_key="aenergy", @@ -975,6 +1015,19 @@ RPC_SENSORS: Final = { entity_category=EntityCategory.DIAGNOSTIC, use_polling_coordinator=True, ), + "temperature_cct": RpcSensorDescription( + key="cct", + sub_key="temperature", + name="Device temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value=lambda status, _: status["tC"], + suggested_display_precision=1, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + use_polling_coordinator=True, + ), "temperature_rgb": RpcSensorDescription( key="rgb", sub_key="temperature", @@ -1066,7 +1119,7 @@ RPC_SENSORS: Final = { "analoginput": RpcSensorDescription( key="input", sub_key="percent", - name="Analog input", + name="analog", native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, removal_condition=lambda config, _, key: ( @@ -1076,7 +1129,7 @@ RPC_SENSORS: Final = { "analoginput_xpercent": RpcSensorDescription( key="input", sub_key="xpercent", - name="Analog value", + name="analog value", removal_condition=lambda config, status, key: ( config[key]["type"] != "analog" or config[key]["enable"] is False @@ -1087,7 +1140,7 @@ RPC_SENSORS: Final = { "pulse_counter": RpcSensorDescription( key="input", sub_key="counts", - name="Pulse counter", + name="pulse counter", native_unit_of_measurement="pulse", state_class=SensorStateClass.TOTAL, value=lambda status, _: status["total"], @@ -1098,7 +1151,7 @@ RPC_SENSORS: Final = { "counter_value": RpcSensorDescription( key="input", sub_key="counts", - name="Counter value", + name="counter value", value=lambda status, _: status["xtotal"], removal_condition=lambda config, status, key: ( config[key]["type"] != "count" @@ -1110,7 +1163,7 @@ RPC_SENSORS: Final = { "counter_frequency": RpcSensorDescription( key="input", sub_key="freq", - name="Pulse counter frequency", + name="pulse counter frequency", native_unit_of_measurement=UnitOfFrequency.HERTZ, state_class=SensorStateClass.MEASUREMENT, removal_condition=lambda config, _, key: ( @@ -1120,7 +1173,7 @@ RPC_SENSORS: Final = { "counter_frequency_value": RpcSensorDescription( key="input", sub_key="xfreq", - name="Pulse counter frequency value", + name="pulse counter frequency value", removal_condition=lambda config, status, key: ( config[key]["type"] != "count" or config[key]["enable"] is False @@ -1174,19 +1227,27 @@ async def async_setup_entry( hass, config_entry, async_add_entities, RPC_SENSORS, RpcSensor ) + async_remove_orphaned_entities( + hass, + config_entry.entry_id, + coordinator.mac, + SENSOR_PLATFORM, + coordinator.device.status, + ) + # the user can remove virtual components from the device configuration, so # we need to remove orphaned entities + virtual_component_ids = get_virtual_component_ids( + coordinator.device.config, SENSOR_PLATFORM + ) for component in ("enum", "number", "text"): - virtual_component_ids = get_virtual_component_ids( - coordinator.device.config, SENSOR_PLATFORM - ) - async_remove_orphaned_virtual_entities( + async_remove_orphaned_entities( hass, config_entry.entry_id, coordinator.mac, SENSOR_PLATFORM, - component, virtual_component_ids, + component, ) return @@ -1266,13 +1327,15 @@ class RpcSensor(ShellyRpcAttributeEntity, SensorEntity): @property def native_value(self) -> StateType: """Return value of sensor.""" - if not self.option_map: - return self.attribute_value + attribute_value = self.attribute_value - if not isinstance(self.attribute_value, str): + if not self.option_map: + return attribute_value + + if not isinstance(attribute_value, str): return None - return self.option_map[self.attribute_value] + return self.option_map[attribute_value] class BlockSleepingSensor(ShellySleepingBlockAttributeEntity, RestoreSensor): diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index f76319eb08c..eb869b54e4c 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -28,7 +28,7 @@ "confirm_discovery": { "description": "Do you want to set up the {model} at {host}?\n\nBattery-powered devices that are password protected must be woken up before continuing with setting up.\nBattery-powered devices that are not password protected will be added when the device wakes up, you can now manually wake the device up using a button on it or wait for the next data update from the device." }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update configuration for {device_name}.\n\nBefore setup, battery-powered devices must be woken up, you can now wake the device up using a button on it.", "data": { "host": "[%key:common::config_flow::data::host%]", @@ -45,7 +45,8 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", "firmware_not_fully_provisioned": "Device not fully provisioned. Please contact Shelly support", - "custom_port_not_supported": "Gen1 device does not support custom port." + "custom_port_not_supported": "Gen1 device does not support custom port.", + "mac_address_mismatch": "The MAC address of the device does not match the one in the configuration, please reboot the device and try again." }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", @@ -53,7 +54,8 @@ "reauth_unsuccessful": "Re-authentication was unsuccessful, please remove the integration and set it up again.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "another_device": "Re-configuration was unsuccessful, the IP address/hostname of another Shelly device was used.", - "ipv6_not_supported": "IPv6 is not supported." + "ipv6_not_supported": "IPv6 is not supported.", + "mac_address_mismatch": "[%key:component::shelly::config::error::mac_address_mismatch%]" } }, "device_automation": { diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 2b9b1cadc69..134704cb0ff 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -32,7 +32,7 @@ from .entity import ( async_setup_rpc_attribute_entities, ) from .utils import ( - async_remove_orphaned_virtual_entities, + async_remove_orphaned_entities, async_remove_shelly_entity, get_device_entry_gen, get_rpc_key_ids, @@ -66,6 +66,13 @@ RPC_VIRTUAL_SWITCH = RpcSwitchDescription( sub_key="value", ) +RPC_SCRIPT_SWITCH = RpcSwitchDescription( + key="script", + sub_key="running", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, +) + async def async_setup_entry( hass: HomeAssistant, @@ -176,18 +183,37 @@ def async_setup_rpc_entry( RpcVirtualSwitch, ) + async_setup_rpc_attribute_entities( + hass, + config_entry, + async_add_entities, + {"script": RPC_SCRIPT_SWITCH}, + RpcScriptSwitch, + ) + # the user can remove virtual components from the device configuration, so we need # to remove orphaned entities virtual_switch_ids = get_virtual_component_ids( coordinator.device.config, SWITCH_PLATFORM ) - async_remove_orphaned_virtual_entities( + async_remove_orphaned_entities( hass, config_entry.entry_id, coordinator.mac, SWITCH_PLATFORM, - "boolean", virtual_switch_ids, + "boolean", + ) + + # if the script is removed, from the device configuration, we need + # to remove orphaned entities + async_remove_orphaned_entities( + hass, + config_entry.entry_id, + coordinator.mac, + SWITCH_PLATFORM, + coordinator.device.status, + "script", ) if not switch_ids: @@ -317,3 +343,23 @@ class RpcVirtualSwitch(ShellyRpcAttributeEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off relay.""" await self.call_rpc("Boolean.Set", {"id": self._id, "value": False}) + + +class RpcScriptSwitch(ShellyRpcAttributeEntity, SwitchEntity): + """Entity that controls a script component on RPC based Shelly devices.""" + + entity_description: RpcSwitchDescription + _attr_has_entity_name = True + + @property + def is_on(self) -> bool: + """If switch is on.""" + return bool(self.status["running"]) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on relay.""" + await self.call_rpc("Script.Start", {"id": self._id}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off relay.""" + await self.call_rpc("Script.Stop", {"id": self._id}) diff --git a/homeassistant/components/shelly/text.py b/homeassistant/components/shelly/text.py index ec290def45d..66e2ee4c715 100644 --- a/homeassistant/components/shelly/text.py +++ b/homeassistant/components/shelly/text.py @@ -22,7 +22,7 @@ from .entity import ( async_setup_entry_rpc, ) from .utils import ( - async_remove_orphaned_virtual_entities, + async_remove_orphaned_entities, get_device_entry_gen, get_virtual_component_ids, ) @@ -61,13 +61,13 @@ async def async_setup_entry( virtual_text_ids = get_virtual_component_ids( coordinator.device.config, TEXT_PLATFORM ) - async_remove_orphaned_virtual_entities( + async_remove_orphaned_entities( hass, config_entry.entry_id, coordinator.mac, TEXT_PLATFORM, - "text", virtual_text_ids, + "text", ) diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index 0678da44472..f22547acf50 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -9,6 +9,7 @@ from typing import Any, Final, cast from aioshelly.const import RPC_GENERATIONS from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError +from awesomeversion import AwesomeVersion, AwesomeVersionStrategy from homeassistant.components.update import ( ATTR_INSTALLED_VERSION, @@ -58,7 +59,7 @@ class RestUpdateDescription(RestEntityDescription, UpdateEntityDescription): REST_UPDATES: Final = { "fwupdate": RestUpdateDescription( - name="Firmware update", + name="Firmware", key="fwupdate", latest_version=lambda status: status["update"]["new_version"], beta=False, @@ -67,7 +68,7 @@ REST_UPDATES: Final = { entity_registry_enabled_default=False, ), "fwupdate_beta": RestUpdateDescription( - name="Beta firmware update", + name="Beta firmware", key="fwupdate", latest_version=lambda status: status["update"].get("beta_version"), beta=True, @@ -79,7 +80,7 @@ REST_UPDATES: Final = { RPC_UPDATES: Final = { "fwupdate": RpcUpdateDescription( - name="Firmware update", + name="Firmware", key="sys", sub_key="available_updates", latest_version=lambda status: status.get("stable", {"version": ""})["version"], @@ -88,7 +89,7 @@ RPC_UPDATES: Final = { entity_category=EntityCategory.CONFIG, ), "fwupdate_beta": RpcUpdateDescription( - name="Beta firmware update", + name="Beta firmware", key="sys", sub_key="available_updates", latest_version=lambda status: status.get("beta", {"version": ""})["version"], @@ -203,6 +204,22 @@ class RestUpdateEntity(ShellyRestAttributeEntity, UpdateEntity): else: LOGGER.debug("Result of OTA update call: %s", result) + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return True if available version is newer then installed version. + + Default strategy generate an exception with Shelly firmware format + thus making the entity state always true. + """ + return AwesomeVersion( + latest_version, + find_first_match=True, + ensure_strategy=[AwesomeVersionStrategy.SEMVER], + ) > AwesomeVersion( + installed_version, + find_first_match=True, + ensure_strategy=[AwesomeVersionStrategy.SEMVER], + ) + class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): """Represent a RPC update entity.""" @@ -221,7 +238,8 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): ) -> None: """Initialize update entity.""" super().__init__(coordinator, key, attribute, description) - self._ota_in_progress: bool | int = False + self._ota_in_progress = False + self._ota_progress_percentage: int | None = None self._attr_release_url = get_release_url( coordinator.device.gen, coordinator.model, description.beta ) @@ -239,11 +257,12 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): if self.in_progress is not False: event_type = event["event"] if event_type == OTA_BEGIN: - self._ota_in_progress = 0 + self._ota_progress_percentage = 0 elif event_type == OTA_PROGRESS: - self._ota_in_progress = event["progress_percent"] + self._ota_progress_percentage = event["progress_percent"] elif event_type in (OTA_ERROR, OTA_SUCCESS): self._ota_in_progress = False + self._ota_progress_percentage = None self.async_write_ha_state() @property @@ -261,10 +280,15 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): return self.installed_version @property - def in_progress(self) -> bool | int: + def in_progress(self) -> bool: """Update installation in progress.""" return self._ota_in_progress + @property + def update_percentage(self) -> int | None: + """Update installation progress.""" + return self._ota_progress_percentage + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -293,6 +317,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): await self.coordinator.async_shutdown_device_and_start_reauth() else: self._ota_in_progress = True + self._ota_progress_percentage = None LOGGER.debug("OTA update call for %s successful", self.coordinator.name) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index d0a8a1230c5..df374624e3d 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -2,9 +2,9 @@ from __future__ import annotations +from collections.abc import Iterable from datetime import datetime, timedelta from ipaddress import IPv4Address, IPv6Address, ip_address -import re from types import MappingProxyType from typing import Any, cast @@ -43,6 +43,7 @@ from homeassistant.util.dt import utcnow from .const import ( API_WS_URL, BASIC_INPUTS_EVENTS_TYPES, + COMPONENT_ID_PATTERN, CONF_COAP_PORT, CONF_GEN, DEVICES_WITHOUT_FIRMWARE_CHANGELOG, @@ -319,15 +320,19 @@ def get_rpc_channel_name(device: RpcDevice, key: str) -> str: device_name = device.name entity_name: str | None = None if key in device.config: - entity_name = device.config[key].get("name", device_name) + entity_name = device.config[key].get("name") if entity_name is None: - if key.startswith(("input:", "light:", "switch:")): - return f"{device_name} {key.replace(':', '_')}" + channel = key.split(":")[0] + channel_id = key.split(":")[-1] + if key.startswith(("cover:", "input:", "light:", "switch:", "thermostat:")): + return f"{device_name} {channel.title()} {channel_id}" + if key.startswith(("cct", "rgb:", "rgbw:")): + return f"{device_name} {channel.upper()} light {channel_id}" if key.startswith("em1"): - return f"{device_name} EM{key.split(':')[-1]}" + return f"{device_name} EM{channel_id}" if key.startswith(("boolean:", "enum:", "number:", "text:")): - return key.replace(":", " ").title() + return f"{channel.title()} {channel_id}" return device_name return entity_name @@ -540,15 +545,15 @@ def get_virtual_component_ids(config: dict[str, Any], platform: str) -> list[str @callback -def async_remove_orphaned_virtual_entities( +def async_remove_orphaned_entities( hass: HomeAssistant, config_entry_id: str, mac: str, platform: str, - virt_comp_type: str, - virt_comp_ids: list[str], + keys: Iterable[str], + key_suffix: str | None = None, ) -> None: - """Remove orphaned virtual entities.""" + """Remove orphaned entities.""" orphaned_entities = [] entity_reg = er.async_get(hass) device_reg = dr.async_get(hass) @@ -563,14 +568,15 @@ def async_remove_orphaned_virtual_entities( for entity in entities: if not entity.entity_id.startswith(platform): continue - if virt_comp_type not in entity.unique_id: + if key_suffix is not None and key_suffix not in entity.unique_id: continue - # we are looking for the component ID, e.g. boolean:201 - if not (match := re.search(r"[a-z]+:\d+", entity.unique_id)): + # we are looking for the component ID, e.g. boolean:201, em1data:1 + if not (match := COMPONENT_ID_PATTERN.search(entity.unique_id)): continue - virt_comp_id = match.group() - if virt_comp_id not in virt_comp_ids: - orphaned_entities.append(f"{virt_comp_id}-{virt_comp_type}") + + key = match.group() + if key not in keys: + orphaned_entities.append(entity.unique_id.split("-", 1)[1]) if orphaned_entities: async_remove_shelly_rpc_entities(hass, platform, mac, orphaned_entities) diff --git a/homeassistant/components/shodan/manifest.json b/homeassistant/components/shodan/manifest.json index 9155311a2ad..afd75e3fed5 100644 --- a/homeassistant/components/shodan/manifest.json +++ b/homeassistant/components/shodan/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/shodan", "iot_class": "cloud_polling", "loggers": ["shodan"], + "quality_scale": "legacy", "requirements": ["shodan==1.28.0"] } diff --git a/homeassistant/components/shopping_list/__init__.py b/homeassistant/components/shopping_list/__init__.py index 20d3078228c..531bbf37980 100644 --- a/homeassistant/components/shopping_list/__init__.py +++ b/homeassistant/components/shopping_list/__init__.py @@ -320,15 +320,15 @@ class ShoppingData: # Remove the item from mapping after it's appended in the result array. del all_items_mapping[item_id] # Append the rest of the items - for key in all_items_mapping: + for value in all_items_mapping.values(): # All the unchecked items must be passed in the item_ids array, # so all items left in the mapping should be checked items. - if all_items_mapping[key]["complete"] is False: + if value["complete"] is False: raise vol.Invalid( "The item ids array doesn't contain all the unchecked shopping list" " items." ) - new_items.append(all_items_mapping[key]) + new_items.append(value) self.items = new_items self.hass.async_add_executor_job(self.save) self._async_notify() diff --git a/homeassistant/components/shopping_list/icons.json b/homeassistant/components/shopping_list/icons.json index 7de3eb1b948..9b3d8a08a79 100644 --- a/homeassistant/components/shopping_list/icons.json +++ b/homeassistant/components/shopping_list/icons.json @@ -7,13 +7,29 @@ } }, "services": { - "add_item": "mdi:cart-plus", - "remove_item": "mdi:cart-remove", - "complete_item": "mdi:cart-check", - "incomplete_item": "mdi:cart-off", - "complete_all": "mdi:cart-check", - "incomplete_all": "mdi:cart-off", - "clear_completed_items": "mdi:cart-remove", - "sort": "mdi:sort" + "add_item": { + "service": "mdi:cart-plus" + }, + "remove_item": { + "service": "mdi:cart-remove" + }, + "complete_item": { + "service": "mdi:cart-check" + }, + "incomplete_item": { + "service": "mdi:cart-off" + }, + "complete_all": { + "service": "mdi:cart-check" + }, + "incomplete_all": { + "service": "mdi:cart-off" + }, + "clear_completed_items": { + "service": "mdi:cart-remove" + }, + "sort": { + "service": "mdi:sort" + } } } diff --git a/homeassistant/components/shopping_list/intent.py b/homeassistant/components/shopping_list/intent.py index d45085be5fa..1a6370f4168 100644 --- a/homeassistant/components/shopping_list/intent.py +++ b/homeassistant/components/shopping_list/intent.py @@ -29,7 +29,7 @@ class AddItemIntent(intent.IntentHandler): async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: """Handle the intent.""" slots = self.async_validate_slots(intent_obj.slots) - item = slots["item"]["value"] + item = slots["item"]["value"].strip() await intent_obj.hass.data[DOMAIN].async_add(item) response = intent_obj.create_response() @@ -53,10 +53,8 @@ class ListTopItemsIntent(intent.IntentHandler): if not items: response.async_set_speech("There are no items on your shopping list") else: + items_list = ", ".join(itm["name"] for itm in reversed(items)) response.async_set_speech( - "These are the top {} items on your shopping list: {}".format( - min(len(items), 5), - ", ".join(itm["name"] for itm in reversed(items)), - ) + f"These are the top {min(len(items), 5)} items on your shopping list: {items_list}" ) return response diff --git a/homeassistant/components/shopping_list/strings.json b/homeassistant/components/shopping_list/strings.json index c184a1d2227..8618d9241b4 100644 --- a/homeassistant/components/shopping_list/strings.json +++ b/homeassistant/components/shopping_list/strings.json @@ -62,7 +62,7 @@ }, "clear_completed_items": { "name": "Clear completed items", - "description": "Clears completed items from the shopping list." + "description": "Removes completed items from the shopping list." }, "sort": { "name": "Sort all items", diff --git a/homeassistant/components/sia/alarm_control_panel.py b/homeassistant/components/sia/alarm_control_panel.py index 42ce81cbfc1..7ea878f538d 100644 --- a/homeassistant/components/sia/alarm_control_panel.py +++ b/homeassistant/components/sia/alarm_control_panel.py @@ -4,28 +4,22 @@ from __future__ import annotations from dataclasses import dataclass import logging +from typing import TYPE_CHECKING from pysiaalarm import SIAEvent from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityDescription, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - STATE_UNAVAILABLE, -) +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from .const import CONF_ACCOUNT, CONF_ACCOUNTS, CONF_ZONES, KEY_ALARM, PREVIOUS_STATE -from .sia_entity_base import SIABaseEntity, SIAEntityDescription +from .entity import SIABaseEntity, SIAEntityDescription _LOGGER = logging.getLogger(__name__) @@ -41,31 +35,32 @@ class SIAAlarmControlPanelEntityDescription( ENTITY_DESCRIPTION_ALARM = SIAAlarmControlPanelEntityDescription( key=KEY_ALARM, code_consequences={ - "PA": STATE_ALARM_TRIGGERED, - "JA": STATE_ALARM_TRIGGERED, - "TA": STATE_ALARM_TRIGGERED, - "BA": STATE_ALARM_TRIGGERED, - "CA": STATE_ALARM_ARMED_AWAY, - "CB": STATE_ALARM_ARMED_AWAY, - "CG": STATE_ALARM_ARMED_AWAY, - "CL": STATE_ALARM_ARMED_AWAY, - "CP": STATE_ALARM_ARMED_AWAY, - "CQ": STATE_ALARM_ARMED_AWAY, - "CS": STATE_ALARM_ARMED_AWAY, - "CF": STATE_ALARM_ARMED_CUSTOM_BYPASS, - "NP": STATE_ALARM_DISARMED, - "NO": STATE_ALARM_DISARMED, - "OA": STATE_ALARM_DISARMED, - "OB": STATE_ALARM_DISARMED, - "OG": STATE_ALARM_DISARMED, - "OP": STATE_ALARM_DISARMED, - "OQ": STATE_ALARM_DISARMED, - "OR": STATE_ALARM_DISARMED, - "OS": STATE_ALARM_DISARMED, - "NC": STATE_ALARM_ARMED_NIGHT, - "NL": STATE_ALARM_ARMED_NIGHT, - "NE": STATE_ALARM_ARMED_NIGHT, - "NF": STATE_ALARM_ARMED_NIGHT, + "PA": AlarmControlPanelState.TRIGGERED, + "JA": AlarmControlPanelState.TRIGGERED, + "TA": AlarmControlPanelState.TRIGGERED, + "BA": AlarmControlPanelState.TRIGGERED, + "HA": AlarmControlPanelState.TRIGGERED, + "CA": AlarmControlPanelState.ARMED_AWAY, + "CB": AlarmControlPanelState.ARMED_AWAY, + "CG": AlarmControlPanelState.ARMED_AWAY, + "CL": AlarmControlPanelState.ARMED_AWAY, + "CP": AlarmControlPanelState.ARMED_AWAY, + "CQ": AlarmControlPanelState.ARMED_AWAY, + "CS": AlarmControlPanelState.ARMED_AWAY, + "CF": AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + "NP": AlarmControlPanelState.DISARMED, + "NO": AlarmControlPanelState.DISARMED, + "OA": AlarmControlPanelState.DISARMED, + "OB": AlarmControlPanelState.DISARMED, + "OG": AlarmControlPanelState.DISARMED, + "OP": AlarmControlPanelState.DISARMED, + "OQ": AlarmControlPanelState.DISARMED, + "OR": AlarmControlPanelState.DISARMED, + "OS": AlarmControlPanelState.DISARMED, + "NC": AlarmControlPanelState.ARMED_NIGHT, + "NL": AlarmControlPanelState.ARMED_NIGHT, + "NE": AlarmControlPanelState.ARMED_NIGHT, + "NF": AlarmControlPanelState.ARMED_NIGHT, "BR": PREVIOUS_STATE, }, ) @@ -109,13 +104,17 @@ class SIAAlarmControlPanel(SIABaseEntity, AlarmControlPanelEntity): entity_description, ) - self._attr_state: StateType = None - self._old_state: StateType = None + self._attr_alarm_state: AlarmControlPanelState | None = None + self._old_state: AlarmControlPanelState | None = None def handle_last_state(self, last_state: State | None) -> None: """Handle the last state.""" - if last_state is not None: - self._attr_state = last_state.state + self._attr_alarm_state = None + if last_state is not None and last_state.state not in ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + self._attr_alarm_state = AlarmControlPanelState(last_state.state) if self.state == STATE_UNAVAILABLE: self._attr_available = False @@ -132,5 +131,7 @@ class SIAAlarmControlPanel(SIABaseEntity, AlarmControlPanelEntity): _LOGGER.debug("New state will be %s", new_state) if new_state == PREVIOUS_STATE: new_state = self._old_state - self._attr_state, self._old_state = new_state, self._attr_state + if TYPE_CHECKING: + assert isinstance(new_state, AlarmControlPanelState) + self._attr_alarm_state, self._old_state = new_state, self._attr_alarm_state return True diff --git a/homeassistant/components/sia/binary_sensor.py b/homeassistant/components/sia/binary_sensor.py index 307b5073e90..4c8e4ca6130 100644 --- a/homeassistant/components/sia/binary_sensor.py +++ b/homeassistant/components/sia/binary_sensor.py @@ -28,7 +28,7 @@ from .const import ( KEY_SMOKE, SIA_HUB_ZONE, ) -from .sia_entity_base import SIABaseEntity, SIAEntityDescription +from .entity import SIABaseEntity, SIAEntityDescription _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/sia/config_flow.py b/homeassistant/components/sia/config_flow.py index cb451133d41..a23978145e7 100644 --- a/homeassistant/components/sia/config_flow.py +++ b/homeassistant/components/sia/config_flow.py @@ -181,7 +181,6 @@ class SIAOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize SIA options flow.""" - self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) self.hub: SIAHub | None = None self.accounts_todo: list = [] diff --git a/homeassistant/components/sia/sia_entity_base.py b/homeassistant/components/sia/entity.py similarity index 97% rename from homeassistant/components/sia/sia_entity_base.py rename to homeassistant/components/sia/entity.py index aecac2b540b..48af8e0beb4 100644 --- a/homeassistant/components/sia/sia_entity_base.py +++ b/homeassistant/components/sia/entity.py @@ -8,6 +8,7 @@ import logging from pysiaalarm import SIAEvent +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PORT from homeassistant.core import CALLBACK_TYPE, State, callback @@ -40,7 +41,7 @@ _LOGGER = logging.getLogger(__name__) class SIARequiredKeysMixin: """Required keys for SIA entities.""" - code_consequences: dict[str, StateType | bool] + code_consequences: dict[str, StateType | bool | AlarmControlPanelState] @dataclass(frozen=True) diff --git a/homeassistant/components/sigfox/manifest.json b/homeassistant/components/sigfox/manifest.json index 3b581e4a081..f3f44bf8979 100644 --- a/homeassistant/components/sigfox/manifest.json +++ b/homeassistant/components/sigfox/manifest.json @@ -3,5 +3,6 @@ "name": "Sigfox", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/sigfox", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/sighthound/image_processing.py b/homeassistant/components/sighthound/image_processing.py index 706a8dd037a..acc8309af26 100644 --- a/homeassistant/components/sighthound/image_processing.py +++ b/homeassistant/components/sighthound/image_processing.py @@ -157,7 +157,7 @@ class SighthoundEntity(ImageProcessingEntity): if self._save_timestamped_file: timestamp_save_path = directory / f"{self._name}_{self._last_detection}.jpg" img.save(timestamp_save_path) - _LOGGER.info("Sighthound saved file %s", timestamp_save_path) + _LOGGER.debug("Sighthound saved file %s", timestamp_save_path) @property def camera_entity(self): diff --git a/homeassistant/components/sighthound/manifest.json b/homeassistant/components/sighthound/manifest.json index 875c98acb6d..1efd572425b 100644 --- a/homeassistant/components/sighthound/manifest.json +++ b/homeassistant/components/sighthound/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sighthound", "iot_class": "cloud_polling", "loggers": ["simplehound"], - "requirements": ["Pillow==10.4.0", "simplehound==0.3"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0", "simplehound==0.3"] } diff --git a/homeassistant/components/signal_messenger/manifest.json b/homeassistant/components/signal_messenger/manifest.json index 217109bfa2c..5ff63052691 100644 --- a/homeassistant/components/signal_messenger/manifest.json +++ b/homeassistant/components/signal_messenger/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/signal_messenger", "iot_class": "cloud_push", "loggers": ["pysignalclirestapi"], + "quality_scale": "legacy", "requirements": ["pysignalclirestapi==0.3.24"] } diff --git a/homeassistant/components/signal_messenger/notify.py b/homeassistant/components/signal_messenger/notify.py index 9321bc3232f..53a255da5ff 100644 --- a/homeassistant/components/signal_messenger/notify.py +++ b/homeassistant/components/signal_messenger/notify.py @@ -166,12 +166,11 @@ class SignalNotificationService(BaseNotificationService): and int(str(resp.headers.get("Content-Length"))) > attachment_size_limit ): + content_length = int(str(resp.headers.get("Content-Length"))) raise ValueError( # noqa: TRY301 - "Attachment too large (Content-Length reports {}). Max size: {}" - " bytes".format( - int(str(resp.headers.get("Content-Length"))), - CONF_MAX_ALLOWED_DOWNLOAD_SIZE_BYTES, - ) + "Attachment too large (Content-Length reports " + f"{content_length}). Max size: " + f"{CONF_MAX_ALLOWED_DOWNLOAD_SIZE_BYTES} bytes" ) size = 0 diff --git a/homeassistant/components/simplisafe/__init__.py b/homeassistant/components/simplisafe/__init__.py index b23358c985f..b72519f9734 100644 --- a/homeassistant/components/simplisafe/__init__.py +++ b/homeassistant/components/simplisafe/__init__.py @@ -3,12 +3,11 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Iterable +from collections.abc import Callable, Coroutine from datetime import timedelta from typing import Any, cast from simplipy import API -from simplipy.device import Device, DeviceTypes from simplipy.errors import ( EndpointUnavailableError, InvalidCredentialsError, @@ -31,14 +30,8 @@ from simplipy.system.v3 import ( from simplipy.websocket import ( EVENT_AUTOMATIC_TEST, EVENT_CAMERA_MOTION_DETECTED, - EVENT_CONNECTION_LOST, - EVENT_CONNECTION_RESTORED, EVENT_DEVICE_TEST, EVENT_DOORBELL_DETECTED, - EVENT_LOCK_LOCKED, - EVENT_LOCK_UNLOCKED, - EVENT_POWER_OUTAGE, - EVENT_POWER_RESTORED, EVENT_SECRET_ALERT_TRIGGERED, EVENT_SENSOR_PAIRED_AND_NAMED, EVENT_USER_INITIATED_TEST, @@ -67,20 +60,12 @@ from homeassistant.helpers import ( config_validation as cv, device_registry as dr, ) -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.service import ( async_register_admin_service, verify_domain_control, ) -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( ATTR_ALARM_DURATION, @@ -90,8 +75,14 @@ from .const import ( ATTR_ENTRY_DELAY_HOME, ATTR_EXIT_DELAY_AWAY, ATTR_EXIT_DELAY_HOME, + ATTR_LAST_EVENT_INFO, + ATTR_LAST_EVENT_SENSOR_NAME, + ATTR_LAST_EVENT_SENSOR_TYPE, + ATTR_LAST_EVENT_TIMESTAMP, ATTR_LIGHT, + ATTR_SYSTEM_ID, ATTR_VOICE_PROMPT_VOLUME, + DISPATCHER_TOPIC_WEBSOCKET_EVENT, DOMAIN, LOGGER, ) @@ -99,27 +90,18 @@ from .typing import SystemType ATTR_CATEGORY = "category" ATTR_LAST_EVENT_CHANGED_BY = "last_event_changed_by" -ATTR_LAST_EVENT_INFO = "last_event_info" -ATTR_LAST_EVENT_SENSOR_NAME = "last_event_sensor_name" ATTR_LAST_EVENT_SENSOR_SERIAL = "last_event_sensor_serial" -ATTR_LAST_EVENT_SENSOR_TYPE = "last_event_sensor_type" -ATTR_LAST_EVENT_TIMESTAMP = "last_event_timestamp" ATTR_LAST_EVENT_TYPE = "last_event_type" ATTR_LAST_EVENT_TYPE = "last_event_type" ATTR_MESSAGE = "message" ATTR_PIN_LABEL = "label" ATTR_PIN_LABEL_OR_VALUE = "label_or_pin" ATTR_PIN_VALUE = "pin" -ATTR_SYSTEM_ID = "system_id" ATTR_TIMESTAMP = "timestamp" -DEFAULT_CONFIG_URL = "https://webapp.simplisafe.com/new/#/dashboard" -DEFAULT_ENTITY_MODEL = "Alarm control panel" -DEFAULT_ERROR_THRESHOLD = 2 DEFAULT_SCAN_INTERVAL = timedelta(seconds=30) DEFAULT_SOCKET_MIN_RETRY = 15 -DISPATCHER_TOPIC_WEBSOCKET_EVENT = "simplisafe_websocket_event_{0}" EVENT_SIMPLISAFE_EVENT = "SIMPLISAFE_EVENT" EVENT_SIMPLISAFE_NOTIFICATION = "SIMPLISAFE_NOTIFICATION" @@ -201,7 +183,6 @@ SERVICE_SET_SYSTEM_PROPERTIES_SCHEMA = vol.Schema( } ) -WEBSOCKET_EVENTS_REQUIRING_SERIAL = [EVENT_LOCK_LOCKED, EVENT_LOCK_UNLOCKED] WEBSOCKET_EVENTS_TO_FIRE_HASS_EVENT = [ EVENT_AUTOMATIC_TEST, EVENT_CAMERA_MOTION_DETECTED, @@ -504,7 +485,7 @@ class SimpliSafe: except Exception as err: # noqa: BLE001 LOGGER.error("Unknown exception while connecting to websocket: %s", err) - LOGGER.info("Reconnecting to websocket") + LOGGER.warning("Reconnecting to websocket") await self._async_cancel_websocket_loop() self._websocket_reconnect_task = self._hass.async_create_task( self._async_start_websocket_loop() @@ -604,7 +585,7 @@ class SimpliSafe: @callback def async_save_refresh_token(token: str) -> None: """Save a refresh token to the config entry.""" - LOGGER.info("Saving new refresh token to HASS storage") + LOGGER.debug("Saving new refresh token to HASS storage") self._hass.config_entries.async_update_entry( self.entry, data={**self.entry.data, CONF_TOKEN: token}, @@ -647,198 +628,7 @@ class SimpliSafe: # In case the user attempts an action not allowed in their current plan, # we merely log that message at INFO level (so the user is aware, # but not spammed with ERROR messages that they cannot change): - LOGGER.info(result) + LOGGER.debug(result) if isinstance(result, SimplipyError): raise UpdateFailed(f"SimpliSafe error while updating: {result}") - - -class SimpliSafeEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): - """Define a base SimpliSafe entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - simplisafe: SimpliSafe, - system: SystemType, - *, - device: Device | None = None, - additional_websocket_events: Iterable[str] | None = None, - ) -> None: - """Initialize.""" - assert simplisafe.coordinator - super().__init__(simplisafe.coordinator) - - # SimpliSafe can incorrectly return an error state when there isn't any - # error. This can lead to entities having an unknown state frequently. - # To protect against that, we measure an error count for each entity and only - # mark the state as unavailable if we detect a few in a row: - self._error_count = 0 - - if device: - model = device.type.name.capitalize().replace("_", " ") - device_name = f"{device.name.capitalize()} {model}" - serial = device.serial - else: - model = device_name = DEFAULT_ENTITY_MODEL - serial = system.serial - - event = simplisafe.initial_event_to_use[system.system_id] - - if raw_type := event.get("sensorType"): - try: - device_type = DeviceTypes(raw_type) - except ValueError: - device_type = DeviceTypes.UNKNOWN - else: - device_type = DeviceTypes.UNKNOWN - - self._attr_extra_state_attributes = { - ATTR_LAST_EVENT_INFO: event.get("info"), - ATTR_LAST_EVENT_SENSOR_NAME: event.get("sensorName"), - ATTR_LAST_EVENT_SENSOR_TYPE: device_type.name.lower(), - ATTR_LAST_EVENT_TIMESTAMP: event.get("eventTimestamp"), - ATTR_SYSTEM_ID: system.system_id, - } - - self._attr_device_info = DeviceInfo( - configuration_url=DEFAULT_CONFIG_URL, - identifiers={(DOMAIN, serial)}, - manufacturer="SimpliSafe", - model=model, - name=device_name, - via_device=(DOMAIN, str(system.system_id)), - ) - - self._attr_unique_id = serial - self._device = device - self._online = True - self._simplisafe = simplisafe - self._system = system - self._websocket_events_to_listen_for = [ - EVENT_CONNECTION_LOST, - EVENT_CONNECTION_RESTORED, - EVENT_POWER_OUTAGE, - EVENT_POWER_RESTORED, - ] - if additional_websocket_events: - self._websocket_events_to_listen_for += additional_websocket_events - - @property - def available(self) -> bool: - """Return whether the entity is available.""" - # We can easily detect if the V3 system is offline, but no simple check exists - # for the V2 system. Therefore, assuming the coordinator hasn't failed, we mark - # the entity as available if: - # 1. We can verify that the system is online (assuming True if we can't) - # 2. We can verify that the entity is online - if isinstance(self._system, SystemV3): - system_offline = self._system.offline - else: - system_offline = False - - return ( - self._error_count < DEFAULT_ERROR_THRESHOLD - and self._online - and not system_offline - ) - - @callback - def _handle_coordinator_update(self) -> None: - """Update the entity with new REST API data.""" - if self.coordinator.last_update_success: - self.async_reset_error_count() - else: - self.async_increment_error_count() - - self.async_update_from_rest_api() - self.async_write_ha_state() - - @callback - def _handle_websocket_update(self, event: WebsocketEvent) -> None: - """Update the entity with new websocket data.""" - # Ignore this event if it belongs to a system other than this one: - if event.system_id != self._system.system_id: - return - - # Ignore this event if this entity hasn't expressed interest in its type: - if event.event_type not in self._websocket_events_to_listen_for: - return - - # Ignore this event if it belongs to a entity with a different serial - # number from this one's: - if ( - self._device - and event.event_type in WEBSOCKET_EVENTS_REQUIRING_SERIAL - and event.sensor_serial != self._device.serial - ): - return - - sensor_type: str | None - if event.sensor_type: - sensor_type = event.sensor_type.name - else: - sensor_type = None - - self._attr_extra_state_attributes.update( - { - ATTR_LAST_EVENT_INFO: event.info, - ATTR_LAST_EVENT_SENSOR_NAME: event.sensor_name, - ATTR_LAST_EVENT_SENSOR_TYPE: sensor_type, - ATTR_LAST_EVENT_TIMESTAMP: event.timestamp, - } - ) - - # It's unknown whether these events reach the base station (since the connection - # is lost); we include this for completeness and coverage: - if event.event_type in (EVENT_CONNECTION_LOST, EVENT_POWER_OUTAGE): - self._online = False - return - - # If the base station comes back online, set entities to available, but don't - # instruct the entities to update their state (since there won't be anything new - # until the next websocket event or REST API update: - if event.event_type in (EVENT_CONNECTION_RESTORED, EVENT_POWER_RESTORED): - self._online = True - return - - self.async_update_from_websocket_event(event) - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - await super().async_added_to_hass() - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - DISPATCHER_TOPIC_WEBSOCKET_EVENT.format(self._system.system_id), - self._handle_websocket_update, - ) - ) - - self.async_update_from_rest_api() - - @callback - def async_increment_error_count(self) -> None: - """Increment this entity's error count.""" - LOGGER.debug('Error for entity "%s" (total: %s)', self.name, self._error_count) - self._error_count += 1 - - @callback - def async_reset_error_count(self) -> None: - """Reset this entity's error count.""" - if self._error_count == 0: - return - - LOGGER.debug('Resetting error count for "%s"', self.name) - self._error_count = 0 - - @callback - def async_update_from_rest_api(self) -> None: - """Update the entity when new data comes from the REST API.""" - - @callback - def async_update_from_websocket_event(self, event: WebsocketEvent) -> None: - """Update the entity when new data comes from the websocket.""" diff --git a/homeassistant/components/simplisafe/alarm_control_panel.py b/homeassistant/components/simplisafe/alarm_control_panel.py index 28ebd246623..18f2d8ddcd5 100644 --- a/homeassistant/components/simplisafe/alarm_control_panel.py +++ b/homeassistant/components/simplisafe/alarm_control_panel.py @@ -26,21 +26,14 @@ from simplipy.websocket import ( from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SimpliSafe, SimpliSafeEntity +from . import SimpliSafe from .const import ( ATTR_ALARM_DURATION, ATTR_ALARM_VOLUME, @@ -54,6 +47,7 @@ from .const import ( DOMAIN, LOGGER, ) +from .entity import SimpliSafeEntity from .typing import SystemType ATTR_BATTERY_BACKUP_POWER_LEVEL = "battery_backup_power_level" @@ -64,33 +58,33 @@ ATTR_WALL_POWER_LEVEL = "wall_power_level" ATTR_WIFI_STRENGTH = "wifi_strength" STATE_MAP_FROM_REST_API = { - SystemStates.ALARM: STATE_ALARM_TRIGGERED, - SystemStates.ALARM_COUNT: STATE_ALARM_PENDING, - SystemStates.AWAY: STATE_ALARM_ARMED_AWAY, - SystemStates.AWAY_COUNT: STATE_ALARM_ARMING, - SystemStates.ENTRY_DELAY: STATE_ALARM_PENDING, - SystemStates.EXIT_DELAY: STATE_ALARM_ARMING, - SystemStates.HOME: STATE_ALARM_ARMED_HOME, - SystemStates.HOME_COUNT: STATE_ALARM_ARMING, - SystemStates.OFF: STATE_ALARM_DISARMED, - SystemStates.TEST: STATE_ALARM_DISARMED, + SystemStates.ALARM: AlarmControlPanelState.TRIGGERED, + SystemStates.ALARM_COUNT: AlarmControlPanelState.PENDING, + SystemStates.AWAY: AlarmControlPanelState.ARMED_AWAY, + SystemStates.AWAY_COUNT: AlarmControlPanelState.ARMING, + SystemStates.ENTRY_DELAY: AlarmControlPanelState.PENDING, + SystemStates.EXIT_DELAY: AlarmControlPanelState.ARMING, + SystemStates.HOME: AlarmControlPanelState.ARMED_HOME, + SystemStates.HOME_COUNT: AlarmControlPanelState.ARMING, + SystemStates.OFF: AlarmControlPanelState.DISARMED, + SystemStates.TEST: AlarmControlPanelState.DISARMED, } STATE_MAP_FROM_WEBSOCKET_EVENT = { - EVENT_ALARM_CANCELED: STATE_ALARM_DISARMED, - EVENT_ALARM_TRIGGERED: STATE_ALARM_TRIGGERED, - EVENT_ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - EVENT_ARMED_AWAY_BY_KEYPAD: STATE_ALARM_ARMED_AWAY, - EVENT_ARMED_AWAY_BY_REMOTE: STATE_ALARM_ARMED_AWAY, - EVENT_ARMED_HOME: STATE_ALARM_ARMED_HOME, - EVENT_AWAY_EXIT_DELAY_BY_KEYPAD: STATE_ALARM_ARMING, - EVENT_AWAY_EXIT_DELAY_BY_REMOTE: STATE_ALARM_ARMING, - EVENT_DISARMED_BY_KEYPAD: STATE_ALARM_DISARMED, - EVENT_DISARMED_BY_REMOTE: STATE_ALARM_DISARMED, - EVENT_ENTRY_DELAY: STATE_ALARM_PENDING, - EVENT_HOME_EXIT_DELAY: STATE_ALARM_ARMING, - EVENT_SECRET_ALERT_TRIGGERED: STATE_ALARM_TRIGGERED, - EVENT_USER_INITIATED_TEST: STATE_ALARM_DISARMED, + EVENT_ALARM_CANCELED: AlarmControlPanelState.DISARMED, + EVENT_ALARM_TRIGGERED: AlarmControlPanelState.TRIGGERED, + EVENT_ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + EVENT_ARMED_AWAY_BY_KEYPAD: AlarmControlPanelState.ARMED_AWAY, + EVENT_ARMED_AWAY_BY_REMOTE: AlarmControlPanelState.ARMED_AWAY, + EVENT_ARMED_HOME: AlarmControlPanelState.ARMED_HOME, + EVENT_AWAY_EXIT_DELAY_BY_KEYPAD: AlarmControlPanelState.ARMING, + EVENT_AWAY_EXIT_DELAY_BY_REMOTE: AlarmControlPanelState.ARMING, + EVENT_DISARMED_BY_KEYPAD: AlarmControlPanelState.DISARMED, + EVENT_DISARMED_BY_REMOTE: AlarmControlPanelState.DISARMED, + EVENT_ENTRY_DELAY: AlarmControlPanelState.PENDING, + EVENT_HOME_EXIT_DELAY: AlarmControlPanelState.ARMING, + EVENT_SECRET_ALERT_TRIGGERED: AlarmControlPanelState.TRIGGERED, + EVENT_USER_INITIATED_TEST: AlarmControlPanelState.DISARMED, } WEBSOCKET_EVENTS_TO_LISTEN_FOR = ( @@ -144,9 +138,9 @@ class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity): def _set_state_from_system_data(self) -> None: """Set the state based on the latest REST API data.""" if self._system.alarm_going_off: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED elif state := STATE_MAP_FROM_REST_API.get(self._system.state): - self._attr_state = state + self._attr_alarm_state = state self.async_reset_error_count() else: LOGGER.warning("Unexpected system state (REST API): %s", self._system.state) @@ -161,7 +155,7 @@ class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity): f'Error while disarming "{self._system.system_id}": {err}' ) from err - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED self.async_write_ha_state() async def async_alarm_arm_home(self, code: str | None = None) -> None: @@ -173,7 +167,7 @@ class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity): f'Error while arming (home) "{self._system.system_id}": {err}' ) from err - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME self.async_write_ha_state() async def async_alarm_arm_away(self, code: str | None = None) -> None: @@ -185,7 +179,7 @@ class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity): f'Error while arming (away) "{self._system.system_id}": {err}' ) from err - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING self.async_write_ha_state() @callback @@ -229,7 +223,7 @@ class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity): assert event.event_type if state := STATE_MAP_FROM_WEBSOCKET_EVENT.get(event.event_type): - self._attr_state = state + self._attr_alarm_state = state self.async_reset_error_count() else: LOGGER.error("Unknown alarm websocket event: %s", event.event_type) diff --git a/homeassistant/components/simplisafe/binary_sensor.py b/homeassistant/components/simplisafe/binary_sensor.py index 3f56149a9f8..0310e958e6e 100644 --- a/homeassistant/components/simplisafe/binary_sensor.py +++ b/homeassistant/components/simplisafe/binary_sensor.py @@ -15,8 +15,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SimpliSafe, SimpliSafeEntity +from . import SimpliSafe from .const import DOMAIN, LOGGER +from .entity import SimpliSafeEntity SUPPORTED_BATTERY_SENSOR_TYPES = [ DeviceTypes.CARBON_MONOXIDE, @@ -63,7 +64,7 @@ async def async_setup_entry( for system in simplisafe.systems.values(): if system.version == 2: - LOGGER.info("Skipping sensor setup for V2 system: %s", system.system_id) + LOGGER.warning("Skipping sensor setup for V2 system: %s", system.system_id) continue for sensor in system.sensors.values(): diff --git a/homeassistant/components/simplisafe/button.py b/homeassistant/components/simplisafe/button.py index 40bf857da2a..f0272d09f61 100644 --- a/homeassistant/components/simplisafe/button.py +++ b/homeassistant/components/simplisafe/button.py @@ -15,8 +15,9 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SimpliSafe, SimpliSafeEntity +from . import SimpliSafe from .const import DOMAIN +from .entity import SimpliSafeEntity from .typing import SystemType diff --git a/homeassistant/components/simplisafe/config_flow.py b/homeassistant/components/simplisafe/config_flow.py index c0d98c5644f..68974fe118f 100644 --- a/homeassistant/components/simplisafe/config_flow.py +++ b/homeassistant/components/simplisafe/config_flow.py @@ -67,9 +67,11 @@ class SimpliSafeFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SimpliSafeOptionsFlowHandler: """Define the config flow to handle options.""" - return SimpliSafeOptionsFlowHandler(config_entry) + return SimpliSafeOptionsFlowHandler() - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self._reauth = True return await self.async_step_user() @@ -151,10 +153,6 @@ class SimpliSafeFlowHandler(ConfigFlow, domain=DOMAIN): class SimpliSafeOptionsFlowHandler(OptionsFlow): """Handle a SimpliSafe options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/simplisafe/const.py b/homeassistant/components/simplisafe/const.py index 1ed77bcd685..95bb72913d0 100644 --- a/homeassistant/components/simplisafe/const.py +++ b/homeassistant/components/simplisafe/const.py @@ -13,5 +13,12 @@ ATTR_ENTRY_DELAY_AWAY = "entry_delay_away" ATTR_ENTRY_DELAY_HOME = "entry_delay_home" ATTR_EXIT_DELAY_AWAY = "exit_delay_away" ATTR_EXIT_DELAY_HOME = "exit_delay_home" +ATTR_LAST_EVENT_INFO = "last_event_info" +ATTR_LAST_EVENT_SENSOR_NAME = "last_event_sensor_name" +ATTR_LAST_EVENT_SENSOR_TYPE = "last_event_sensor_type" +ATTR_LAST_EVENT_TIMESTAMP = "last_event_timestamp" ATTR_LIGHT = "light" +ATTR_SYSTEM_ID = "system_id" ATTR_VOICE_PROMPT_VOLUME = "voice_prompt_volume" + +DISPATCHER_TOPIC_WEBSOCKET_EVENT = "simplisafe_websocket_event_{0}" diff --git a/homeassistant/components/simplisafe/entity.py b/homeassistant/components/simplisafe/entity.py new file mode 100644 index 00000000000..ff1dd49e9fc --- /dev/null +++ b/homeassistant/components/simplisafe/entity.py @@ -0,0 +1,235 @@ +"""Support for SimpliSafe alarm systems.""" + +from __future__ import annotations + +from collections.abc import Iterable + +from simplipy.device import Device, DeviceTypes +from simplipy.system.v3 import SystemV3 +from simplipy.websocket import ( + EVENT_CONNECTION_LOST, + EVENT_CONNECTION_RESTORED, + EVENT_LOCK_LOCKED, + EVENT_LOCK_UNLOCKED, + EVENT_POWER_OUTAGE, + EVENT_POWER_RESTORED, + WebsocketEvent, +) + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from . import SimpliSafe +from .const import ( + ATTR_LAST_EVENT_INFO, + ATTR_LAST_EVENT_SENSOR_NAME, + ATTR_LAST_EVENT_SENSOR_TYPE, + ATTR_LAST_EVENT_TIMESTAMP, + ATTR_SYSTEM_ID, + DISPATCHER_TOPIC_WEBSOCKET_EVENT, + DOMAIN, + LOGGER, +) +from .typing import SystemType + +DEFAULT_CONFIG_URL = "https://webapp.simplisafe.com/new/#/dashboard" +DEFAULT_ENTITY_MODEL = "Alarm control panel" +DEFAULT_ERROR_THRESHOLD = 2 + +WEBSOCKET_EVENTS_REQUIRING_SERIAL = [EVENT_LOCK_LOCKED, EVENT_LOCK_UNLOCKED] + + +class SimpliSafeEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): + """Define a base SimpliSafe entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + simplisafe: SimpliSafe, + system: SystemType, + *, + device: Device | None = None, + additional_websocket_events: Iterable[str] | None = None, + ) -> None: + """Initialize.""" + assert simplisafe.coordinator + super().__init__(simplisafe.coordinator) + + # SimpliSafe can incorrectly return an error state when there isn't any + # error. This can lead to entities having an unknown state frequently. + # To protect against that, we measure an error count for each entity and only + # mark the state as unavailable if we detect a few in a row: + self._error_count = 0 + + if device: + model = device.type.name.capitalize().replace("_", " ") + device_name = f"{device.name.capitalize()} {model}" + serial = device.serial + else: + model = device_name = DEFAULT_ENTITY_MODEL + serial = system.serial + + event = simplisafe.initial_event_to_use[system.system_id] + + if raw_type := event.get("sensorType"): + try: + device_type = DeviceTypes(raw_type) + except ValueError: + device_type = DeviceTypes.UNKNOWN + else: + device_type = DeviceTypes.UNKNOWN + + self._attr_extra_state_attributes = { + ATTR_LAST_EVENT_INFO: event.get("info"), + ATTR_LAST_EVENT_SENSOR_NAME: event.get("sensorName"), + ATTR_LAST_EVENT_SENSOR_TYPE: device_type.name.lower(), + ATTR_LAST_EVENT_TIMESTAMP: event.get("eventTimestamp"), + ATTR_SYSTEM_ID: system.system_id, + } + + self._attr_device_info = DeviceInfo( + configuration_url=DEFAULT_CONFIG_URL, + identifiers={(DOMAIN, serial)}, + manufacturer="SimpliSafe", + model=model, + name=device_name, + via_device=(DOMAIN, str(system.system_id)), + ) + + self._attr_unique_id = serial + self._device = device + self._online = True + self._simplisafe = simplisafe + self._system = system + self._websocket_events_to_listen_for = [ + EVENT_CONNECTION_LOST, + EVENT_CONNECTION_RESTORED, + EVENT_POWER_OUTAGE, + EVENT_POWER_RESTORED, + ] + if additional_websocket_events: + self._websocket_events_to_listen_for += additional_websocket_events + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + # We can easily detect if the V3 system is offline, but no simple check exists + # for the V2 system. Therefore, assuming the coordinator hasn't failed, we mark + # the entity as available if: + # 1. We can verify that the system is online (assuming True if we can't) + # 2. We can verify that the entity is online + if isinstance(self._system, SystemV3): + system_offline = self._system.offline + else: + system_offline = False + + return ( + self._error_count < DEFAULT_ERROR_THRESHOLD + and self._online + and not system_offline + ) + + @callback + def _handle_coordinator_update(self) -> None: + """Update the entity with new REST API data.""" + if self.coordinator.last_update_success: + self.async_reset_error_count() + else: + self.async_increment_error_count() + + self.async_update_from_rest_api() + self.async_write_ha_state() + + @callback + def _handle_websocket_update(self, event: WebsocketEvent) -> None: + """Update the entity with new websocket data.""" + # Ignore this event if it belongs to a system other than this one: + if event.system_id != self._system.system_id: + return + + # Ignore this event if this entity hasn't expressed interest in its type: + if event.event_type not in self._websocket_events_to_listen_for: + return + + # Ignore this event if it belongs to a entity with a different serial + # number from this one's: + if ( + self._device + and event.event_type in WEBSOCKET_EVENTS_REQUIRING_SERIAL + and event.sensor_serial != self._device.serial + ): + return + + sensor_type: str | None + if event.sensor_type: + sensor_type = event.sensor_type.name + else: + sensor_type = None + + self._attr_extra_state_attributes.update( + { + ATTR_LAST_EVENT_INFO: event.info, + ATTR_LAST_EVENT_SENSOR_NAME: event.sensor_name, + ATTR_LAST_EVENT_SENSOR_TYPE: sensor_type, + ATTR_LAST_EVENT_TIMESTAMP: event.timestamp, + } + ) + + # It's unknown whether these events reach the base station (since the connection + # is lost); we include this for completeness and coverage: + if event.event_type in (EVENT_CONNECTION_LOST, EVENT_POWER_OUTAGE): + self._online = False + return + + # If the base station comes back online, set entities to available, but don't + # instruct the entities to update their state (since there won't be anything new + # until the next websocket event or REST API update: + if event.event_type in (EVENT_CONNECTION_RESTORED, EVENT_POWER_RESTORED): + self._online = True + return + + self.async_update_from_websocket_event(event) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + + self.async_on_remove( + async_dispatcher_connect( + self.hass, + DISPATCHER_TOPIC_WEBSOCKET_EVENT.format(self._system.system_id), + self._handle_websocket_update, + ) + ) + + self.async_update_from_rest_api() + + @callback + def async_increment_error_count(self) -> None: + """Increment this entity's error count.""" + LOGGER.debug('Error for entity "%s" (total: %s)', self.name, self._error_count) + self._error_count += 1 + + @callback + def async_reset_error_count(self) -> None: + """Reset this entity's error count.""" + if self._error_count == 0: + return + + LOGGER.debug('Resetting error count for "%s"', self.name) + self._error_count = 0 + + @callback + def async_update_from_rest_api(self) -> None: + """Update the entity when new data comes from the REST API.""" + + @callback + def async_update_from_websocket_event(self, event: WebsocketEvent) -> None: + """Update the entity when new data comes from the websocket.""" diff --git a/homeassistant/components/simplisafe/icons.json b/homeassistant/components/simplisafe/icons.json index 60ddb7f0982..8552993210f 100644 --- a/homeassistant/components/simplisafe/icons.json +++ b/homeassistant/components/simplisafe/icons.json @@ -1,7 +1,13 @@ { "services": { - "remove_pin": "mdi:alarm-panel-outline", - "set_pin": "mdi:alarm-panel", - "set_system_properties": "mdi:cog" + "remove_pin": { + "service": "mdi:alarm-panel-outline" + }, + "set_pin": { + "service": "mdi:alarm-panel" + }, + "set_system_properties": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/simplisafe/lock.py b/homeassistant/components/simplisafe/lock.py index 680fc0f4c0f..c610223bff1 100644 --- a/homeassistant/components/simplisafe/lock.py +++ b/homeassistant/components/simplisafe/lock.py @@ -15,8 +15,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SimpliSafe, SimpliSafeEntity +from . import SimpliSafe from .const import DOMAIN, LOGGER +from .entity import SimpliSafeEntity ATTR_LOCK_LOW_BATTERY = "lock_low_battery" ATTR_PIN_PAD_LOW_BATTERY = "pin_pad_low_battery" @@ -38,7 +39,7 @@ async def async_setup_entry( for system in simplisafe.systems.values(): if system.version == 2: - LOGGER.info("Skipping lock setup for V2 system: %s", system.system_id) + LOGGER.warning("Skipping lock setup for V2 system: %s", system.system_id) continue locks.extend( diff --git a/homeassistant/components/simplisafe/sensor.py b/homeassistant/components/simplisafe/sensor.py index fbccfc4b2f9..a5f46e87a7c 100644 --- a/homeassistant/components/simplisafe/sensor.py +++ b/homeassistant/components/simplisafe/sensor.py @@ -16,8 +16,9 @@ from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SimpliSafe, SimpliSafeEntity +from . import SimpliSafe from .const import DOMAIN, LOGGER +from .entity import SimpliSafeEntity async def async_setup_entry( @@ -29,7 +30,7 @@ async def async_setup_entry( for system in simplisafe.systems.values(): if system.version == 2: - LOGGER.info("Skipping sensor setup for V2 system: %s", system.system_id) + LOGGER.warning("Skipping sensor setup for V2 system: %s", system.system_id) continue sensors.extend( diff --git a/homeassistant/components/simulated/__init__.py b/homeassistant/components/simulated/__init__.py deleted file mode 100644 index 35c6d106d03..00000000000 --- a/homeassistant/components/simulated/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The simulated component.""" diff --git a/homeassistant/components/simulated/manifest.json b/homeassistant/components/simulated/manifest.json deleted file mode 100644 index e76bf142086..00000000000 --- a/homeassistant/components/simulated/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "simulated", - "name": "Simulated", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/simulated", - "iot_class": "local_polling", - "quality_scale": "internal" -} diff --git a/homeassistant/components/simulated/sensor.py b/homeassistant/components/simulated/sensor.py deleted file mode 100644 index 22ce4bd7cea..00000000000 --- a/homeassistant/components/simulated/sensor.py +++ /dev/null @@ -1,175 +0,0 @@ -"""Adds a simulated sensor.""" - -from __future__ import annotations - -from datetime import datetime -import math -from random import Random - -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorEntity, -) -from homeassistant.const import CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -import homeassistant.util.dt as dt_util - -CONF_AMP = "amplitude" -CONF_FWHM = "spread" -CONF_MEAN = "mean" -CONF_PERIOD = "period" -CONF_PHASE = "phase" -CONF_SEED = "seed" -CONF_UNIT = "unit" -CONF_RELATIVE_TO_EPOCH = "relative_to_epoch" - -DEFAULT_AMP = 1 -DEFAULT_FWHM = 0 -DEFAULT_MEAN = 0 -DEFAULT_NAME = "simulated" -DEFAULT_PERIOD = 60 -DEFAULT_PHASE = 0 -DEFAULT_SEED = 999 -DEFAULT_UNIT = "value" -DEFAULT_RELATIVE_TO_EPOCH = True - -DOMAIN = "simulated" - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_AMP, default=DEFAULT_AMP): vol.Coerce(float), - vol.Optional(CONF_FWHM, default=DEFAULT_FWHM): vol.Coerce(float), - vol.Optional(CONF_MEAN, default=DEFAULT_MEAN): vol.Coerce(float), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_PERIOD, default=DEFAULT_PERIOD): cv.positive_int, - vol.Optional(CONF_PHASE, default=DEFAULT_PHASE): vol.Coerce(float), - vol.Optional(CONF_SEED, default=DEFAULT_SEED): cv.positive_int, - vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): cv.string, - vol.Optional( - CONF_RELATIVE_TO_EPOCH, default=DEFAULT_RELATIVE_TO_EPOCH - ): cv.boolean, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the simulated sensor.""" - # Simulated has been deprecated and will be removed in 2025.1 - - ir.async_create_issue( - hass, - DOMAIN, - DOMAIN, - breaks_in_ha_version="2025.1.0", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="simulated_deprecation", - translation_placeholders={"integration": DOMAIN}, - learn_more_url="https://www.home-assistant.io/integrations/simulated", - ) - - name = config.get(CONF_NAME) - unit = config.get(CONF_UNIT) - amp = config.get(CONF_AMP) - mean = config.get(CONF_MEAN) - period = config.get(CONF_PERIOD) - phase = config.get(CONF_PHASE) - fwhm = config.get(CONF_FWHM) - seed = config.get(CONF_SEED) - relative_to_epoch = config.get(CONF_RELATIVE_TO_EPOCH) - - sensor = SimulatedSensor( - name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch - ) - async_add_entities([sensor], True) - - -class SimulatedSensor(SensorEntity): - """Class for simulated sensor.""" - - _attr_icon = "mdi:chart-line" - - def __init__( - self, name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch - ): - """Init the class.""" - self._name = name - self._unit = unit - self._amp = amp - self._mean = mean - self._period = period - self._phase = phase # phase in degrees - self._fwhm = fwhm - self._seed = seed - self._random = Random(seed) # A local seeded Random - self._start_time = ( - datetime(1970, 1, 1, tzinfo=dt_util.UTC) - if relative_to_epoch - else dt_util.utcnow() - ) - self._relative_to_epoch = relative_to_epoch - self._state = None - - def time_delta(self): - """Return the time delta.""" - dt0 = self._start_time - dt1 = dt_util.utcnow() - return dt1 - dt0 - - def signal_calc(self): - """Calculate the signal.""" - mean = self._mean - amp = self._amp - time_delta = self.time_delta().total_seconds() * 1e6 # to milliseconds - period = self._period * 1e6 # to milliseconds - fwhm = self._fwhm / 2 - phase = math.radians(self._phase) - if period == 0: - periodic = 0 - else: - periodic = amp * (math.sin((2 * math.pi * time_delta / period) + phase)) - noise = self._random.gauss(mu=0, sigma=fwhm) - return round(mean + periodic + noise, 3) - - async def async_update(self) -> None: - """Update the sensor.""" - self._state = self.signal_calc() - - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def native_value(self): - """Return the state of the sensor.""" - return self._state - - @property - def native_unit_of_measurement(self): - """Return the unit this state is expressed in.""" - return self._unit - - @property - def extra_state_attributes(self): - """Return other details about the sensor state.""" - return { - "amplitude": self._amp, - "mean": self._mean, - "period": self._period, - "phase": self._phase, - "spread": self._fwhm, - "seed": self._seed, - "relative_to_epoch": self._relative_to_epoch, - } diff --git a/homeassistant/components/simulated/strings.json b/homeassistant/components/simulated/strings.json deleted file mode 100644 index d25a84f48a5..00000000000 --- a/homeassistant/components/simulated/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "simulated_deprecation": { - "description": "The {integration} integration is deprecated", - "title": "The {integration} integration has been deprecated and will be removed in 2025.1. Please remove the {integration} from your configuration.yaml settings and restart Home Assistant to fix this issue." - } - } -} diff --git a/homeassistant/components/sinch/manifest.json b/homeassistant/components/sinch/manifest.json index 21a80f63b1f..4af90b759ee 100644 --- a/homeassistant/components/sinch/manifest.json +++ b/homeassistant/components/sinch/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sinch", "iot_class": "cloud_push", "loggers": ["clx"], + "quality_scale": "legacy", "requirements": ["clx-sdk-xms==1.0.0"] } diff --git a/homeassistant/components/siren/__init__.py b/homeassistant/components/siren/__init__.py index 801ca4f2bee..9ce6898fd93 100644 --- a/homeassistant/components/siren/__init__.py +++ b/homeassistant/components/siren/__init__.py @@ -3,31 +3,22 @@ from __future__ import annotations from datetime import timedelta -from functools import cached_property, partial import logging from typing import Any, TypedDict, cast, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType, VolDictType +from homeassistant.util.hass_dict import HassKey -from .const import ( # noqa: F401 - _DEPRECATED_SUPPORT_DURATION, - _DEPRECATED_SUPPORT_TONES, - _DEPRECATED_SUPPORT_TURN_OFF, - _DEPRECATED_SUPPORT_TURN_ON, - _DEPRECATED_SUPPORT_VOLUME_SET, +from .const import ( ATTR_AVAILABLE_TONES, ATTR_DURATION, ATTR_TONE, @@ -38,6 +29,7 @@ from .const import ( # noqa: F401 _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[SirenEntity]] = HassKey(DOMAIN) PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL = timedelta(seconds=60) @@ -104,7 +96,7 @@ def process_turn_on_params( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up siren devices.""" - component = hass.data[DOMAIN] = EntityComponent[SirenEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[SirenEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -143,14 +135,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[SirenEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[SirenEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class SirenEntityDescription(ToggleEntityDescription, frozen_or_thawed=True): @@ -201,19 +191,4 @@ class SirenEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @cached_property def supported_features(self) -> SirenEntityFeature: """Return the list of supported features.""" - features = self._attr_supported_features - if type(features) is int: # noqa: E721 - new_features = SirenEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) + return self._attr_supported_features diff --git a/homeassistant/components/siren/const.py b/homeassistant/components/siren/const.py index 9e46d8dc997..26a158bd8ea 100644 --- a/homeassistant/components/siren/const.py +++ b/homeassistant/components/siren/const.py @@ -1,16 +1,8 @@ """Constants for the siren component.""" from enum import IntFlag -from functools import partial from typing import Final -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) - DOMAIN: Final = "siren" ATTR_TONE: Final = "tone" @@ -28,29 +20,3 @@ class SirenEntityFeature(IntFlag): TONES = 4 VOLUME_SET = 8 DURATION = 16 - - -# These constants are deprecated as of Home Assistant 2022.5 -# Please use the SirenEntityFeature enum instead. -_DEPRECATED_SUPPORT_TURN_ON: Final = DeprecatedConstantEnum( - SirenEntityFeature.TURN_ON, "2025.1" -) -_DEPRECATED_SUPPORT_TURN_OFF: Final = DeprecatedConstantEnum( - SirenEntityFeature.TURN_OFF, "2025.1" -) -_DEPRECATED_SUPPORT_TONES: Final = DeprecatedConstantEnum( - SirenEntityFeature.TONES, "2025.1" -) -_DEPRECATED_SUPPORT_VOLUME_SET: Final = DeprecatedConstantEnum( - SirenEntityFeature.VOLUME_SET, "2025.1" -) -_DEPRECATED_SUPPORT_DURATION: Final = DeprecatedConstantEnum( - SirenEntityFeature.DURATION, "2025.1" -) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/siren/icons.json b/homeassistant/components/siren/icons.json index 0083a2540c7..75caf6417da 100644 --- a/homeassistant/components/siren/icons.json +++ b/homeassistant/components/siren/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "toggle": "mdi:bullhorn", - "turn_off": "mdi:bullhorn", - "turn_on": "mdi:bullhorn" + "toggle": { + "service": "mdi:bullhorn" + }, + "turn_off": { + "service": "mdi:bullhorn" + }, + "turn_on": { + "service": "mdi:bullhorn" + } } } diff --git a/homeassistant/components/sisyphus/manifest.json b/homeassistant/components/sisyphus/manifest.json index dbb40344d66..f62d19b77c1 100644 --- a/homeassistant/components/sisyphus/manifest.json +++ b/homeassistant/components/sisyphus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sisyphus", "iot_class": "local_push", "loggers": ["sisyphus_control"], - "requirements": ["sisyphus-control==3.1.3"] + "quality_scale": "legacy", + "requirements": ["sisyphus-control==3.1.4"] } diff --git a/homeassistant/components/sky_hub/device_tracker.py b/homeassistant/components/sky_hub/device_tracker.py index 140a174cc97..b0ad48ed985 100644 --- a/homeassistant/components/sky_hub/device_tracker.py +++ b/homeassistant/components/sky_hub/device_tracker.py @@ -8,7 +8,7 @@ from pyskyqhub.skyq_hub import SkyQHub import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -29,7 +29,7 @@ async def async_get_scanner( hass: HomeAssistant, config: ConfigType ) -> SkyHubDeviceScanner | None: """Return a Sky Hub scanner if successful.""" - host = config[DOMAIN].get(CONF_HOST, "192.168.1.254") + host = config[DEVICE_TRACKER_DOMAIN].get(CONF_HOST, "192.168.1.254") websession = async_get_clientsession(hass) hub = SkyQHub(websession, host) diff --git a/homeassistant/components/sky_hub/manifest.json b/homeassistant/components/sky_hub/manifest.json index 541cc6e0b03..1030da4d0ff 100644 --- a/homeassistant/components/sky_hub/manifest.json +++ b/homeassistant/components/sky_hub/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sky_hub", "iot_class": "local_polling", "loggers": ["pyskyqhub"], + "quality_scale": "legacy", "requirements": ["pyskyqhub==0.1.4"] } diff --git a/homeassistant/components/sky_remote/__init__.py b/homeassistant/components/sky_remote/__init__.py new file mode 100644 index 00000000000..4daad78c558 --- /dev/null +++ b/homeassistant/components/sky_remote/__init__.py @@ -0,0 +1,39 @@ +"""The Sky Remote Control integration.""" + +import logging + +from skyboxremote import RemoteControl, SkyBoxConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +PLATFORMS = [Platform.REMOTE] + +_LOGGER = logging.getLogger(__name__) + + +type SkyRemoteConfigEntry = ConfigEntry[RemoteControl] + + +async def async_setup_entry(hass: HomeAssistant, entry: SkyRemoteConfigEntry) -> bool: + """Set up Sky remote.""" + host = entry.data[CONF_HOST] + port = entry.data[CONF_PORT] + + _LOGGER.debug("Setting up Host: %s, Port: %s", host, port) + remote = RemoteControl(host, port) + try: + await remote.check_connectable() + except SkyBoxConnectionError as e: + raise ConfigEntryNotReady from e + + entry.runtime_data = remote + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sky_remote/config_flow.py b/homeassistant/components/sky_remote/config_flow.py new file mode 100644 index 00000000000..a55dfb2a52b --- /dev/null +++ b/homeassistant/components/sky_remote/config_flow.py @@ -0,0 +1,64 @@ +"""Config flow for sky_remote.""" + +import logging +from typing import Any + +from skyboxremote import RemoteControl, SkyBoxConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PORT +import homeassistant.helpers.config_validation as cv + +from .const import DEFAULT_PORT, DOMAIN, LEGACY_PORT + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): cv.string, + } +) + + +async def async_find_box_port(host: str) -> int: + """Find port box uses for communication.""" + logging.debug("Attempting to find port to connect to %s on", host) + remote = RemoteControl(host, DEFAULT_PORT) + try: + await remote.check_connectable() + except SkyBoxConnectionError: + # Try legacy port if the default one failed + remote = RemoteControl(host, LEGACY_PORT) + await remote.check_connectable() + return LEGACY_PORT + return DEFAULT_PORT + + +class SkyRemoteConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Sky Remote.""" + + VERSION = 1 + MINOR_VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + + errors: dict[str, str] = {} + if user_input is not None: + logging.debug("user_input: %s", user_input) + self._async_abort_entries_match(user_input) + try: + port = await async_find_box_port(user_input[CONF_HOST]) + except SkyBoxConnectionError: + logging.exception("while finding port of skybox") + errors["base"] = "cannot_connect" + else: + return self.async_create_entry( + title=user_input[CONF_HOST], + data={**user_input, CONF_PORT: port}, + ) + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/sky_remote/const.py b/homeassistant/components/sky_remote/const.py new file mode 100644 index 00000000000..e67744a741b --- /dev/null +++ b/homeassistant/components/sky_remote/const.py @@ -0,0 +1,6 @@ +"""Constants.""" + +DOMAIN = "sky_remote" + +DEFAULT_PORT = 49160 +LEGACY_PORT = 5900 diff --git a/homeassistant/components/sky_remote/manifest.json b/homeassistant/components/sky_remote/manifest.json new file mode 100644 index 00000000000..b00ff309b10 --- /dev/null +++ b/homeassistant/components/sky_remote/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "sky_remote", + "name": "Sky Remote Control", + "codeowners": ["@dunnmj", "@saty9"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/sky_remote", + "integration_type": "device", + "iot_class": "assumed_state", + "requirements": ["skyboxremote==0.0.6"] +} diff --git a/homeassistant/components/sky_remote/remote.py b/homeassistant/components/sky_remote/remote.py new file mode 100644 index 00000000000..05a464f73a6 --- /dev/null +++ b/homeassistant/components/sky_remote/remote.py @@ -0,0 +1,70 @@ +"""Home Assistant integration to control a sky box using the remote platform.""" + +from collections.abc import Iterable +import logging +from typing import Any + +from skyboxremote import VALID_KEYS, RemoteControl + +from homeassistant.components.remote import RemoteEntity +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SkyRemoteConfigEntry +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config: SkyRemoteConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Sky remote platform.""" + async_add_entities( + [SkyRemote(config.runtime_data, config.entry_id)], + True, + ) + + +class SkyRemote(RemoteEntity): + """Representation of a Sky Remote.""" + + _attr_has_entity_name = True + _attr_name = None + + def __init__(self, remote: RemoteControl, unique_id: str) -> None: + """Initialize the Sky Remote.""" + self._remote = remote + self._attr_unique_id = unique_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="SKY", + model="Sky Box", + name=remote.host, + ) + + def turn_on(self, activity: str | None = None, **kwargs: Any) -> None: + """Send the power on command.""" + self.send_command(["sky"]) + + def turn_off(self, activity: str | None = None, **kwargs: Any) -> None: + """Send the power command.""" + self.send_command(["power"]) + + def send_command(self, command: Iterable[str], **kwargs: Any) -> None: + """Send a list of commands to the device.""" + for cmd in command: + if cmd not in VALID_KEYS: + raise ServiceValidationError( + f"{cmd} is not in Valid Keys: {VALID_KEYS}" + ) + try: + self._remote.send_keys(command) + except ValueError as err: + _LOGGER.error("Invalid command: %s. Error: %s", command, err) + return + _LOGGER.debug("Successfully sent command %s", command) diff --git a/homeassistant/components/sky_remote/strings.json b/homeassistant/components/sky_remote/strings.json new file mode 100644 index 00000000000..af794490c43 --- /dev/null +++ b/homeassistant/components/sky_remote/strings.json @@ -0,0 +1,21 @@ +{ + "config": { + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "step": { + "user": { + "title": "Add Sky Remote", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of your Sky device" + } + } + } + } +} diff --git a/homeassistant/components/skybeacon/manifest.json b/homeassistant/components/skybeacon/manifest.json index deda02f64f7..379f10e8873 100644 --- a/homeassistant/components/skybeacon/manifest.json +++ b/homeassistant/components/skybeacon/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/skybeacon", "iot_class": "local_polling", "loggers": ["pygatt"], + "quality_scale": "legacy", "requirements": ["pygatt[GATTTOOL]==4.0.5"] } diff --git a/homeassistant/components/skybeacon/sensor.py b/homeassistant/components/skybeacon/sensor.py index a3a5eb48098..6cb5064b40e 100644 --- a/homeassistant/components/skybeacon/sensor.py +++ b/homeassistant/components/skybeacon/sensor.py @@ -69,7 +69,7 @@ def setup_platform( def monitor_stop(_service_or_event): """Stop the monitor thread.""" - _LOGGER.info("Stopping monitor for %s", name) + _LOGGER.debug("Stopping monitor for %s", name) mon.terminate() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop) @@ -163,7 +163,7 @@ class Monitor(threading.Thread, SensorEntity): # Magic: writing this makes device happy device.char_write_handle(0x1B, bytearray([255]), False) device.subscribe(BLE_TEMP_UUID, self._update) - _LOGGER.info("Subscribed to %s", self.name) + _LOGGER.debug("Subscribed to %s", self.name) while self.keep_going: # protect against stale connections, just read temperature device.char_read(BLE_TEMP_UUID, timeout=CONNECT_TIMEOUT) @@ -184,7 +184,7 @@ class Monitor(threading.Thread, SensorEntity): value[2], value[1], ) - self.data["temp"] = float("%d.%d" % (value[0], value[2])) + self.data["temp"] = float(f"{value[0]}.{value[2]}") self.data["humid"] = value[1] def terminate(self): diff --git a/homeassistant/components/skybell/config_flow.py b/homeassistant/components/skybell/config_flow.py index 385f3dc39d7..a32441f4cf8 100644 --- a/homeassistant/components/skybell/config_flow.py +++ b/homeassistant/components/skybell/config_flow.py @@ -34,16 +34,11 @@ class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): errors = {} if user_input: password = user_input[CONF_PASSWORD] - entry_id = self.context["entry_id"] - if entry := self.hass.config_entries.async_get_entry(entry_id): - _, error = await self._async_validate_input(self.reauth_email, password) - if error is None: - self.hass.config_entries.async_update_entry( - entry, - data=entry.data | user_input, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + _, error = await self._async_validate_input(self.reauth_email, password) + if error is None: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) errors["base"] = error return self.async_show_form( diff --git a/homeassistant/components/slack/__init__.py b/homeassistant/components/slack/__init__.py index e5f6a50122e..6fce38e4774 100644 --- a/homeassistant/components/slack/__init__.py +++ b/homeassistant/components/slack/__init__.py @@ -13,8 +13,6 @@ from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client, config_validation as cv, discovery -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.typing import ConfigType from .const import ( @@ -22,7 +20,6 @@ from .const import ( ATTR_USER_ID, DATA_CLIENT, DATA_HASS_CONFIG, - DEFAULT_NAME, DOMAIN, SLACK_DATA, ) @@ -74,28 +71,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) return True - - -class SlackEntity(Entity): - """Representation of a Slack entity.""" - - _attr_attribution = "Data provided by Slack" - _attr_has_entity_name = True - - def __init__( - self, - data: dict[str, str | WebClient], - description: EntityDescription, - entry: ConfigEntry, - ) -> None: - """Initialize a Slack entity.""" - self._client = data[DATA_CLIENT] - self.entity_description = description - self._attr_unique_id = f"{data[ATTR_USER_ID]}_{description.key}" - self._attr_device_info = DeviceInfo( - configuration_url=data[ATTR_URL], - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, entry.entry_id)}, - manufacturer=DEFAULT_NAME, - name=entry.title, - ) diff --git a/homeassistant/components/slack/entity.py b/homeassistant/components/slack/entity.py new file mode 100644 index 00000000000..7147186ee9b --- /dev/null +++ b/homeassistant/components/slack/entity.py @@ -0,0 +1,36 @@ +"""The slack integration.""" + +from __future__ import annotations + +from slack import WebClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import Entity, EntityDescription + +from .const import ATTR_URL, ATTR_USER_ID, DATA_CLIENT, DEFAULT_NAME, DOMAIN + + +class SlackEntity(Entity): + """Representation of a Slack entity.""" + + _attr_attribution = "Data provided by Slack" + _attr_has_entity_name = True + + def __init__( + self, + data: dict[str, str | WebClient], + description: EntityDescription, + entry: ConfigEntry, + ) -> None: + """Initialize a Slack entity.""" + self._client = data[DATA_CLIENT] + self.entity_description = description + self._attr_unique_id = f"{data[ATTR_USER_ID]}_{description.key}" + self._attr_device_info = DeviceInfo( + configuration_url=data[ATTR_URL], + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer=DEFAULT_NAME, + name=entry.title, + ) diff --git a/homeassistant/components/slack/sensor.py b/homeassistant/components/slack/sensor.py index b4d7fd28bd7..9e3beaadd8b 100644 --- a/homeassistant/components/slack/sensor.py +++ b/homeassistant/components/slack/sensor.py @@ -14,8 +14,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import SlackEntity from .const import ATTR_SNOOZE, DOMAIN, SLACK_DATA +from .entity import SlackEntity async def async_setup_entry( diff --git a/homeassistant/components/sleepiq/config_flow.py b/homeassistant/components/sleepiq/config_flow.py index 4a4813192c3..0a473404eb9 100644 --- a/homeassistant/components/sleepiq/config_flow.py +++ b/homeassistant/components/sleepiq/config_flow.py @@ -9,7 +9,7 @@ from typing import Any from asyncsleepiq import AsyncSleepIQ, SleepIQLoginException, SleepIQTimeoutException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -24,26 +24,20 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the config flow.""" - self._reauth_entry: ConfigEntry | None = None - - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a SleepIQ account as a config entry. This flow is triggered by 'async_setup' for configured accounts. """ - await self.async_set_unique_id(import_config[CONF_USERNAME].lower()) + await self.async_set_unique_id(import_data[CONF_USERNAME].lower()) self._abort_if_unique_id_configured() - if error := await try_connection(self.hass, import_config): + if error := await try_connection(self.hass, import_data): _LOGGER.error("Could not authenticate with SleepIQ server: %s", error) return self.async_abort(reason=error) return self.async_create_entry( - title=import_config[CONF_USERNAME], data=import_config + title=import_data[CONF_USERNAME], data=import_data ) async def async_step_user( @@ -86,9 +80,6 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -96,19 +87,16 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth.""" errors: dict[str, str] = {} - assert self._reauth_entry is not None + + reauth_entry = self._get_reauth_entry() if user_input is not None: data = { - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], } if not (error := await try_connection(self.hass, data)): - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=data - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) errors["base"] = error return self.async_show_form( @@ -116,7 +104,7 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], }, ) diff --git a/homeassistant/components/sleepiq/number.py b/homeassistant/components/sleepiq/number.py index 905ceab18bd..e4fa60a4a43 100644 --- a/homeassistant/components/sleepiq/number.py +++ b/homeassistant/components/sleepiq/number.py @@ -58,14 +58,14 @@ def _get_actuator_name(bed: SleepIQBed, actuator: SleepIQActuator) -> str: f" {bed.name} {actuator.side_full} {actuator.actuator_full} {ENTITY_TYPES[ACTUATOR]}" ) - return f"SleepNumber {bed.name} {actuator.actuator_full} {ENTITY_TYPES[ACTUATOR]}" + return f"SleepNumber {bed.name} {actuator.actuator_full} {ENTITY_TYPES[ACTUATOR]}" # type: ignore[unreachable] def _get_actuator_unique_id(bed: SleepIQBed, actuator: SleepIQActuator) -> str: if actuator.side: return f"{bed.id}_{actuator.side.value}_{actuator.actuator}" - return f"{bed.id}_{actuator.actuator}" + return f"{bed.id}_{actuator.actuator}" # type: ignore[unreachable] def _get_sleeper_name(bed: SleepIQBed, sleeper: SleepIQSleeper) -> str: diff --git a/homeassistant/components/slide/cover.py b/homeassistant/components/slide/cover.py index 5186b3d0fea..d4927775a97 100644 --- a/homeassistant/components/slide/cover.py +++ b/homeassistant/components/slide/cover.py @@ -6,7 +6,7 @@ import logging from typing import Any from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity -from homeassistant.const import ATTR_ID, STATE_CLOSED, STATE_CLOSING, STATE_OPENING +from homeassistant.const import ATTR_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -15,6 +15,10 @@ from .const import API, DEFAULT_OFFSET, DOMAIN, SLIDES _LOGGER = logging.getLogger(__name__) +CLOSED = "closed" +CLOSING = "closing" +OPENING = "opening" + async def async_setup_platform( hass: HomeAssistant, @@ -55,19 +59,19 @@ class SlideCover(CoverEntity): @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._slide["state"] == STATE_OPENING + return self._slide["state"] == OPENING @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._slide["state"] == STATE_CLOSING + return self._slide["state"] == CLOSING @property def is_closed(self) -> bool | None: """Return None if status is unknown, True if closed, else False.""" if self._slide["state"] is None: return None - return self._slide["state"] == STATE_CLOSED + return self._slide["state"] == CLOSED @property def available(self) -> bool: @@ -87,12 +91,12 @@ class SlideCover(CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - self._slide["state"] = STATE_OPENING + self._slide["state"] = OPENING await self._api.slide_open(self._id) async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - self._slide["state"] = STATE_CLOSING + self._slide["state"] = CLOSING await self._api.slide_close(self._id) async def async_stop_cover(self, **kwargs: Any) -> None: @@ -107,8 +111,8 @@ class SlideCover(CoverEntity): if self._slide["pos"] is not None: if position > self._slide["pos"]: - self._slide["state"] = STATE_CLOSING + self._slide["state"] = CLOSING else: - self._slide["state"] = STATE_OPENING + self._slide["state"] = OPENING await self._api.slide_set_position(self._id, position) diff --git a/homeassistant/components/slide/manifest.json b/homeassistant/components/slide/manifest.json index bb25e10658a..2b56185efa1 100644 --- a/homeassistant/components/slide/manifest.json +++ b/homeassistant/components/slide/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/slide", "iot_class": "cloud_polling", "loggers": ["goslideapi"], - "requirements": ["goslide-api==0.5.1"] + "quality_scale": "legacy", + "requirements": ["goslide-api==0.7.0"] } diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py new file mode 100644 index 00000000000..5b4867bf337 --- /dev/null +++ b/homeassistant/components/slide_local/__init__.py @@ -0,0 +1,38 @@ +"""Component for the Slide local API.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SlideCoordinator + +PLATFORMS = [Platform.BUTTON, Platform.COVER, Platform.SWITCH] +type SlideConfigEntry = ConfigEntry[SlideCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: + """Set up the slide_local integration.""" + + coordinator = SlideCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def update_listener(hass: HomeAssistant, entry: SlideConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_unload_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/slide_local/button.py b/homeassistant/components/slide_local/button.py new file mode 100644 index 00000000000..9c285881116 --- /dev/null +++ b/homeassistant/components/slide_local/button.py @@ -0,0 +1,42 @@ +"""Support for Slide button.""" + +from __future__ import annotations + +from homeassistant.components.button import ButtonEntity +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up button for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities([SlideButton(coordinator)]) + + +class SlideButton(SlideEntity, ButtonEntity): + """Defines a Slide button.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "calibrate" + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the slide button.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.data["mac"]}-calibrate" + + async def async_press(self) -> None: + """Send out a calibrate command.""" + await self.coordinator.slide.slide_calibrate(self.coordinator.host) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py new file mode 100644 index 00000000000..3ccc89be375 --- /dev/null +++ b/homeassistant/components/slide_local/config_flow.py @@ -0,0 +1,216 @@ +"""Config flow for slide_local integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, + GoSlideLocal as SlideLocalApi, +) +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.core import callback +from homeassistant.helpers.device_registry import format_mac + +from . import SlideConfigEntry +from .const import CONF_INVERT_POSITION, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class SlideConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for slide_local.""" + + _mac: str = "" + _host: str = "" + _api_version: int | None = None + + VERSION = 1 + MINOR_VERSION = 1 + + @staticmethod + @callback + def async_get_options_flow( + config_entry: SlideConfigEntry, + ) -> SlideOptionsFlowHandler: + """Get the options flow for this handler.""" + return SlideOptionsFlowHandler() + + async def async_test_connection( + self, user_input: dict[str, str | int] + ) -> dict[str, str]: + """Reusable Auth Helper.""" + slide = SlideLocalApi() + + # first test, if API version 2 is working + await slide.slide_add( + user_input[CONF_HOST], + user_input.get(CONF_PASSWORD, ""), + 2, + ) + + try: + result = await slide.slide_info(user_input[CONF_HOST]) + except (ClientConnectionError, ClientTimeoutError): + return {"base": "cannot_connect"} + except (AuthenticationFailed, DigestAuthCalcError): + return {"base": "invalid_auth"} + except Exception: # noqa: BLE001 + _LOGGER.exception("Exception occurred during connection test") + return {"base": "unknown"} + + if result is not None: + self._api_version = 2 + self._mac = format_mac(result["mac"]) + return {} + + # API version 2 is not working, try API version 1 instead + await slide.slide_del(user_input[CONF_HOST]) + await slide.slide_add( + user_input[CONF_HOST], + user_input.get(CONF_PASSWORD, ""), + 1, + ) + + try: + result = await slide.slide_info(user_input[CONF_HOST]) + except (ClientConnectionError, ClientTimeoutError): + return {"base": "cannot_connect"} + except (AuthenticationFailed, DigestAuthCalcError): + return {"base": "invalid_auth"} + except Exception: # noqa: BLE001 + _LOGGER.exception("Exception occurred during connection test") + return {"base": "unknown"} + + if result is None: + # API version 1 isn't working either + return {"base": "unknown"} + + self._api_version = 1 + self._mac = format_mac(result["mac"]) + + return {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + errors = {} + if user_input is not None: + if not (errors := await self.async_test_connection(user_input)): + await self.async_set_unique_id(self._mac) + self._abort_if_unique_id_configured() + user_input |= { + CONF_MAC: self._mac, + CONF_API_VERSION: self._api_version, + } + + return self.async_create_entry( + title=user_input[CONF_HOST], + data=user_input, + options={CONF_INVERT_POSITION: False}, + ) + + if user_input is not None and user_input.get(CONF_HOST) is not None: + self._host = user_input[CONF_HOST] + + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Optional(CONF_PASSWORD): str, + } + ), + {CONF_HOST: self._host}, + ), + errors=errors, + ) + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + + # id is in the format 'slide_000000000000' + self._mac = format_mac(str(discovery_info.properties.get("id"))[6:]) + + await self.async_set_unique_id(self._mac) + + self._abort_if_unique_id_configured( + {CONF_HOST: discovery_info.host}, reload_on_update=True + ) + + errors = {} + if errors := await self.async_test_connection( + { + CONF_HOST: self._host, + } + ): + return self.async_abort( + reason="discovery_connection_failed", + description_placeholders={ + "error": errors["base"], + }, + ) + + self._host = discovery_info.host + + return await self.async_step_zeroconf_confirm() + + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + + if user_input is not None: + user_input |= { + CONF_HOST: self._host, + CONF_API_VERSION: 2, + CONF_MAC: format_mac(self._mac), + } + return self.async_create_entry( + title=user_input[CONF_HOST], + data=user_input, + options={CONF_INVERT_POSITION: False}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={ + "host": self._host, + }, + ) + + +class SlideOptionsFlowHandler(OptionsFlow): + """Handle a options flow for slide_local.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_INVERT_POSITION): bool, + } + ), + {CONF_INVERT_POSITION: self.config_entry.options[CONF_INVERT_POSITION]}, + ), + ) diff --git a/homeassistant/components/slide_local/const.py b/homeassistant/components/slide_local/const.py new file mode 100644 index 00000000000..9dc6d4ac925 --- /dev/null +++ b/homeassistant/components/slide_local/const.py @@ -0,0 +1,13 @@ +"""Define constants for the Slide component.""" + +API_LOCAL = "api_local" +ATTR_TOUCHGO = "touchgo" +CONF_INVERT_POSITION = "invert_position" +CONF_VERIFY_SSL = "verify_ssl" +DOMAIN = "slide_local" +SLIDES = "slides" +SLIDES_LOCAL = "slides_local" +DEFAULT_OFFSET = 0.15 +DEFAULT_RETRY = 120 +SERVICE_CALIBRATE = "calibrate" +SERVICE_TOUCHGO = "touchgo" diff --git a/homeassistant/components/slide_local/coordinator.py b/homeassistant/components/slide_local/coordinator.py new file mode 100644 index 00000000000..e5311967198 --- /dev/null +++ b/homeassistant/components/slide_local/coordinator.py @@ -0,0 +1,112 @@ +"""DataUpdateCoordinator for slide_local integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import TYPE_CHECKING, Any + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, + GoSlideLocal as SlideLocalApi, +) + +from homeassistant.const import ( + CONF_API_VERSION, + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEFAULT_OFFSET, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +if TYPE_CHECKING: + from . import SlideConfigEntry + + +class SlideCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Get and update the latest data.""" + + def __init__(self, hass: HomeAssistant, entry: SlideConfigEntry) -> None: + """Initialize the data object.""" + super().__init__( + hass, _LOGGER, name="Slide", update_interval=timedelta(seconds=15) + ) + self.slide = SlideLocalApi() + self.api_version = entry.data[CONF_API_VERSION] + self.mac = entry.data[CONF_MAC] + self.host = entry.data[CONF_HOST] + self.password = entry.data[CONF_PASSWORD] if self.api_version == 1 else "" + + async def _async_setup(self) -> None: + """Do initialization logic for Slide coordinator.""" + _LOGGER.debug("Initializing Slide coordinator") + + await self.slide.slide_add( + self.host, + self.password, + self.api_version, + ) + + _LOGGER.debug("Slide coordinator initialized") + + async def _async_update_data(self) -> dict[str, Any]: + """Update the data from the Slide device.""" + _LOGGER.debug("Start data update") + + try: + data = await self.slide.slide_info(self.host) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + ) from ex + + if data is None: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + ) + + if "pos" in data: + if self.data is None: + oldpos = None + else: + oldpos = self.data.get("pos") + + data["pos"] = max(0, min(1, data["pos"])) + + if oldpos is None or oldpos == data["pos"]: + data["state"] = ( + STATE_CLOSED if data["pos"] > (1 - DEFAULT_OFFSET) else STATE_OPEN + ) + elif oldpos < data["pos"]: + data["state"] = ( + STATE_CLOSED + if data["pos"] >= (1 - DEFAULT_OFFSET) + else STATE_CLOSING + ) + else: + data["state"] = ( + STATE_OPEN if data["pos"] <= DEFAULT_OFFSET else STATE_OPENING + ) + + _LOGGER.debug("Data successfully updated: %s", data) + + return data diff --git a/homeassistant/components/slide_local/cover.py b/homeassistant/components/slide_local/cover.py new file mode 100644 index 00000000000..cf04f46d139 --- /dev/null +++ b/homeassistant/components/slide_local/cover.py @@ -0,0 +1,113 @@ +"""Support for Slide covers.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity +from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPENING +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .const import CONF_INVERT_POSITION, DEFAULT_OFFSET +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up cover(s) for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + [ + SlideCoverLocal( + coordinator, + entry, + ) + ] + ) + + +class SlideCoverLocal(SlideEntity, CoverEntity): + """Representation of a Slide Local API cover.""" + + _attr_assumed_state = True + _attr_device_class = CoverDeviceClass.CURTAIN + + def __init__( + self, + coordinator: SlideCoordinator, + entry: SlideConfigEntry, + ) -> None: + """Initialize the cover.""" + super().__init__(coordinator) + + self._attr_name = None + self.invert = entry.options[CONF_INVERT_POSITION] + self._attr_unique_id = coordinator.data["mac"] + + @property + def is_opening(self) -> bool: + """Return if the cover is opening or not.""" + return self.coordinator.data["state"] == STATE_OPENING + + @property + def is_closing(self) -> bool: + """Return if the cover is closing or not.""" + return self.coordinator.data["state"] == STATE_CLOSING + + @property + def is_closed(self) -> bool: + """Return None if status is unknown, True if closed, else False.""" + return self.coordinator.data["state"] == STATE_CLOSED + + @property + def current_cover_position(self) -> int | None: + """Return the current position of cover shutter.""" + pos = self.coordinator.data["pos"] + if pos is not None: + if (1 - pos) <= DEFAULT_OFFSET or pos <= DEFAULT_OFFSET: + pos = round(pos) + if not self.invert: + pos = 1 - pos + pos = int(pos * 100) + return pos + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + self.coordinator.data["state"] = STATE_OPENING + await self.coordinator.slide.slide_open(self.coordinator.host) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + self.coordinator.data["state"] = STATE_CLOSING + await self.coordinator.slide.slide_close(self.coordinator.host) + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self.coordinator.slide.slide_stop(self.coordinator.host) + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the cover to a specific position.""" + position = kwargs[ATTR_POSITION] / 100 + if not self.invert: + position = 1 - position + + if self.coordinator.data["pos"] is not None: + if position > self.coordinator.data["pos"]: + self.coordinator.data["state"] = STATE_CLOSING + else: + self.coordinator.data["state"] = STATE_OPENING + + await self.coordinator.slide.slide_set_position(self.coordinator.host, position) diff --git a/homeassistant/components/slide_local/entity.py b/homeassistant/components/slide_local/entity.py new file mode 100644 index 00000000000..51269649add --- /dev/null +++ b/homeassistant/components/slide_local/entity.py @@ -0,0 +1,27 @@ +"""Entities for slide_local integration.""" + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import SlideCoordinator + + +class SlideEntity(CoordinatorEntity[SlideCoordinator]): + """Base class of a Slide local API cover.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the Slide device.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + manufacturer="Innovation in Motion", + connections={(dr.CONNECTION_NETWORK_MAC, coordinator.data["mac"])}, + name=coordinator.data["device_name"], + sw_version=coordinator.api_version, + hw_version=coordinator.data["board_rev"], + serial_number=coordinator.data["mac"], + configuration_url=f"http://{coordinator.host}", + ) diff --git a/homeassistant/components/slide_local/icons.json b/homeassistant/components/slide_local/icons.json new file mode 100644 index 00000000000..70d53e7f7a3 --- /dev/null +++ b/homeassistant/components/slide_local/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "button": { + "calibrate": { + "default": "mdi:tape-measure" + } + } + } +} diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json new file mode 100644 index 00000000000..42c74b2c308 --- /dev/null +++ b/homeassistant/components/slide_local/manifest.json @@ -0,0 +1,17 @@ +{ + "domain": "slide_local", + "name": "Slide Local", + "codeowners": ["@dontinelli"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/slide_local", + "integration_type": "device", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["goslide-api==0.7.0"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "slide*" + } + ] +} diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml new file mode 100644 index 00000000000..4eda62f6497 --- /dev/null +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -0,0 +1,66 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: No explicit event subscriptions. + dependency-transparency: done + action-setup: done + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: todo + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: todo + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: todo + diagnostics: todo + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: todo + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + This integration doesn't have known issues that could be resolved by the user. + docs-examples: done + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json new file mode 100644 index 00000000000..24c03d2ff96 --- /dev/null +++ b/homeassistant/components/slide_local/strings.json @@ -0,0 +1,61 @@ +{ + "config": { + "step": { + "user": { + "description": "Provide information to connect to the Slide device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your local Slide", + "password": "The device code of your Slide (inside of the Slide or in the box, length is 8 characters). If your Slide runs firmware version 2 this is optional, as it is not used by the local API." + } + }, + "zeroconf_confirm": { + "title": "Confirm setup for Slide", + "description": "Do you want to setup {host}?" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually." + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "options": { + "step": { + "init": { + "title": "Configure Slide", + "description": "Reconfigure the Slide device", + "data": { + "invert_position": "Invert position" + }, + "data_description": { + "invert_position": "Invert the position of your slide cover." + } + } + } + }, + "entity": { + "button": { + "calibrate": { + "name": "Calibrate" + } + }, + "switch": { + "touchgo": { + "name": "TouchGo" + } + } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + } + } +} diff --git a/homeassistant/components/slide_local/switch.py b/homeassistant/components/slide_local/switch.py new file mode 100644 index 00000000000..6d357864c48 --- /dev/null +++ b/homeassistant/components/slide_local/switch.py @@ -0,0 +1,56 @@ +"""Support for Slide switch.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up switch for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities([SlideSwitch(coordinator)]) + + +class SlideSwitch(SlideEntity, SwitchEntity): + """Defines a Slide switch.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "touchgo" + _attr_device_class = SwitchDeviceClass.SWITCH + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the slide switch.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.data["mac"]}-touchgo" + + @property + def is_on(self) -> bool: + """Return if switch is on.""" + return self.coordinator.data["touch_go"] + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off touchgo.""" + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + await self.coordinator.async_request_refresh() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on touchgo.""" + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/sma/__init__.py b/homeassistant/components/sma/__init__.py index febd4e34aaf..37fb4d72284 100644 --- a/homeassistant/components/sma/__init__.py +++ b/homeassistant/components/sma/__init__.py @@ -92,6 +92,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="sma", update_method=async_update_data, update_interval=interval, @@ -135,3 +136,21 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: data[PYSMA_REMOVE_LISTENER]() return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate entry.""" + + _LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version == 1: + # 1 -> 2: Unique ID from integer to string + if entry.minor_version == 1: + minor_version = 2 + hass.config_entries.async_update_entry( + entry, unique_id=str(entry.unique_id), minor_version=minor_version + ) + + _LOGGER.debug("Migration successful") + + return True diff --git a/homeassistant/components/sma/config_flow.py b/homeassistant/components/sma/config_flow.py index fe26cbee2c8..4b3e01a79a8 100644 --- a/homeassistant/components/sma/config_flow.py +++ b/homeassistant/components/sma/config_flow.py @@ -40,6 +40,7 @@ class SmaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for SMA.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize.""" @@ -76,7 +77,7 @@ class SmaConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" if not errors: - await self.async_set_unique_id(device_info["serial"]) + await self.async_set_unique_id(str(device_info["serial"])) self._abort_if_unique_id_configured(updates=self._data) return self.async_create_entry( title=self._data[CONF_HOST], data=self._data diff --git a/homeassistant/components/smappee/__init__.py b/homeassistant/components/smappee/__init__.py index c7edd46c7e2..7fa30965aa8 100644 --- a/homeassistant/components/smappee/__init__.py +++ b/homeassistant/components/smappee/__init__.py @@ -25,6 +25,8 @@ from .const import ( TOKEN_URL, ) +type SmappeeConfigEntry = ConfigEntry[SmappeeBase] + CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -72,7 +74,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SmappeeConfigEntry) -> bool: """Set up Smappee from a zeroconf or config entry.""" if CONF_IP_ADDRESS in entry.data: if helper.is_smappee_genius(entry.data[CONF_SERIALNUMBER]): @@ -103,31 +105,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: smappee = Smappee(api=smappee_api) await hass.async_add_executor_job(smappee.load_service_locations) - hass.data[DOMAIN][entry.entry_id] = SmappeeBase(hass, smappee) + entry.runtime_data = SmappeeBase(hass, smappee) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SmappeeConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id, None) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) class SmappeeBase: """An object to hold the PySmappee instance.""" - def __init__(self, hass, smappee): + def __init__(self, hass: HomeAssistant, smappee: Smappee) -> None: """Initialize the Smappee API wrapper class.""" self.hass = hass self.smappee = smappee @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): + async def async_update(self) -> None: """Update all Smappee trends and appliance states.""" await self.hass.async_add_executor_job( self.smappee.update_trends_and_appliance_states diff --git a/homeassistant/components/smappee/binary_sensor.py b/homeassistant/components/smappee/binary_sensor.py index a653896f1c2..86bc225dba1 100644 --- a/homeassistant/components/smappee/binary_sensor.py +++ b/homeassistant/components/smappee/binary_sensor.py @@ -6,11 +6,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import SmappeeConfigEntry from .const import DOMAIN BINARY_SENSOR_PREFIX = "Appliance" @@ -36,11 +36,11 @@ ICON_MAPPING = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SmappeeConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Smappee binary sensor.""" - smappee_base = hass.data[DOMAIN][config_entry.entry_id] + smappee_base = config_entry.runtime_data entities: list[BinarySensorEntity] = [] for service_location in smappee_base.smappee.service_locations.values(): diff --git a/homeassistant/components/smappee/config_flow.py b/homeassistant/components/smappee/config_flow.py index d5073bd9c34..4f7a71218ab 100644 --- a/homeassistant/components/smappee/config_flow.py +++ b/homeassistant/components/smappee/config_flow.py @@ -28,6 +28,9 @@ class SmappeeFlowHandler( DOMAIN = DOMAIN + ip_address: str # Set by zeroconf step, used by zeroconf_confirm step + serial_number: str # Set by zeroconf step, used by zeroconf_confirm step + async def async_oauth_create_entry(self, data): """Create an entry for the flow.""" @@ -59,52 +62,49 @@ class SmappeeFlowHandler( if self.is_cloud_device_already_added(): return self.async_abort(reason="already_configured_device") - self.context.update( - { - CONF_IP_ADDRESS: discovery_info.host, - CONF_SERIALNUMBER: serial_number, - "title_placeholders": {"name": serial_number}, - } - ) + self.context["title_placeholders"] = {"name": serial_number} + self.ip_address = discovery_info.host + self.serial_number = serial_number return await self.async_step_zeroconf_confirm() - async def async_step_zeroconf_confirm(self, user_input=None): + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm zeroconf flow.""" - errors = {} + errors: dict[str, str] = {} # Check if already configured (cloud) if self.is_cloud_device_already_added(): return self.async_abort(reason="already_configured_device") if user_input is None: - serialnumber = self.context.get(CONF_SERIALNUMBER) return self.async_show_form( step_id="zeroconf_confirm", - description_placeholders={"serialnumber": serialnumber}, + description_placeholders={"serialnumber": self.serial_number}, errors=errors, ) - ip_address = self.context.get(CONF_IP_ADDRESS) - serial_number = self.context.get(CONF_SERIALNUMBER) - # Attempt to make a connection to the local device - if helper.is_smappee_genius(serial_number): + if helper.is_smappee_genius(self.serial_number): # next generation device, attempt connect to the local mqtt broker - smappee_mqtt = mqtt.SmappeeLocalMqtt(serial_number=serial_number) + smappee_mqtt = mqtt.SmappeeLocalMqtt(serial_number=self.serial_number) connect = await self.hass.async_add_executor_job(smappee_mqtt.start_attempt) if not connect: return self.async_abort(reason="cannot_connect") else: # legacy devices, without local mqtt broker, try api access - smappee_api = api.api.SmappeeLocalApi(ip=ip_address) + smappee_api = api.api.SmappeeLocalApi(ip=self.ip_address) logon = await self.hass.async_add_executor_job(smappee_api.logon) if logon is None: return self.async_abort(reason="cannot_connect") return self.async_create_entry( - title=f"{DOMAIN}{serial_number}", - data={CONF_IP_ADDRESS: ip_address, CONF_SERIALNUMBER: serial_number}, + title=f"{DOMAIN}{self.serial_number}", + data={ + CONF_IP_ADDRESS: self.ip_address, + CONF_SERIALNUMBER: self.serial_number, + }, ) async def async_step_user( @@ -118,7 +118,9 @@ class SmappeeFlowHandler( return await self.async_step_environment() - async def async_step_environment(self, user_input=None): + async def async_step_environment( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Decide environment, cloud or local.""" if user_input is None: return self.async_show_form( @@ -144,7 +146,9 @@ class SmappeeFlowHandler( return await self.async_step_pick_implementation() - async def async_step_local(self, user_input=None): + async def async_step_local( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle local flow.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/smappee/sensor.py b/homeassistant/components/smappee/sensor.py index c984d936b06..2f9d6443568 100644 --- a/homeassistant/components/smappee/sensor.py +++ b/homeassistant/components/smappee/sensor.py @@ -10,12 +10,12 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfElectricPotential, UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import SmappeeConfigEntry from .const import DOMAIN @@ -188,11 +188,11 @@ VOLTAGE_SENSORS: tuple[SmappeeVoltageSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SmappeeConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Smappee sensor.""" - smappee_base = hass.data[DOMAIN][config_entry.entry_id] + smappee_base = config_entry.runtime_data entities = [] for service_location in smappee_base.smappee.service_locations.values(): diff --git a/homeassistant/components/smappee/strings.json b/homeassistant/components/smappee/strings.json index 2bdbf0dabe8..2966b5cd753 100644 --- a/homeassistant/components/smappee/strings.json +++ b/homeassistant/components/smappee/strings.json @@ -23,6 +23,7 @@ } }, "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured_local_device": "Local device(s) is already configured. Please remove those first before configuring a cloud device.", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", diff --git a/homeassistant/components/smappee/switch.py b/homeassistant/components/smappee/switch.py index 1bc5d159145..bccf816c823 100644 --- a/homeassistant/components/smappee/switch.py +++ b/homeassistant/components/smappee/switch.py @@ -3,11 +3,11 @@ from typing import Any from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import SmappeeConfigEntry from .const import DOMAIN SWITCH_PREFIX = "Switch" @@ -15,11 +15,11 @@ SWITCH_PREFIX = "Switch" async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SmappeeConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Smappee Comfort Plugs.""" - smappee_base = hass.data[DOMAIN][config_entry.entry_id] + smappee_base = config_entry.runtime_data entities = [] for service_location in smappee_base.smappee.service_locations.values(): diff --git a/homeassistant/components/smart_meter_texas/__init__.py b/homeassistant/components/smart_meter_texas/__init__.py index c6e466392f0..1cd7df68e91 100644 --- a/homeassistant/components/smart_meter_texas/__init__.py +++ b/homeassistant/components/smart_meter_texas/__init__.py @@ -64,6 +64,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="Smart Meter Texas", update_method=async_update_data, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/smartthings/__init__.py b/homeassistant/components/smartthings/__init__.py index 9bfa11d3293..bcc752ff173 100644 --- a/homeassistant/components/smartthings/__init__.py +++ b/homeassistant/components/smartthings/__init__.py @@ -11,7 +11,6 @@ import logging from aiohttp.client_exceptions import ClientConnectionError, ClientResponseError from pysmartapp.event import EVENT_TYPE_DEVICE from pysmartthings import Attribute, Capability, SmartThings -from pysmartthings.device import DeviceEntity from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET @@ -19,12 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration @@ -433,42 +427,3 @@ class DeviceBroker: updated_devices.add(device.device_id) async_dispatcher_send(self._hass, SIGNAL_SMARTTHINGS_UPDATE, updated_devices) - - -class SmartThingsEntity(Entity): - """Defines a SmartThings entity.""" - - _attr_should_poll = False - - def __init__(self, device: DeviceEntity) -> None: - """Initialize the instance.""" - self._device = device - self._dispatcher_remove = None - self._attr_name = device.label - self._attr_unique_id = device.device_id - self._attr_device_info = DeviceInfo( - configuration_url="https://account.smartthings.com", - identifiers={(DOMAIN, device.device_id)}, - manufacturer=device.status.ocf_manufacturer_name, - model=device.status.ocf_model_number, - name=device.label, - hw_version=device.status.ocf_hardware_version, - sw_version=device.status.ocf_firmware_version, - ) - - async def async_added_to_hass(self): - """Device added to hass.""" - - async def async_update_state(devices): - """Update device state.""" - if self._device.device_id in devices: - await self.async_update_ha_state(True) - - self._dispatcher_remove = async_dispatcher_connect( - self.hass, SIGNAL_SMARTTHINGS_UPDATE, async_update_state - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect the device when removed.""" - if self._dispatcher_remove: - self._dispatcher_remove() diff --git a/homeassistant/components/smartthings/binary_sensor.py b/homeassistant/components/smartthings/binary_sensor.py index 4bb60217eee..611473b011d 100644 --- a/homeassistant/components/smartthings/binary_sensor.py +++ b/homeassistant/components/smartthings/binary_sensor.py @@ -15,8 +15,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity CAPABILITY_TO_ATTRIB = { Capability.acceleration_sensor: Attribute.acceleration, diff --git a/homeassistant/components/smartthings/climate.py b/homeassistant/components/smartthings/climate.py index c3929ababc1..d9535272295 100644 --- a/homeassistant/components/smartthings/climate.py +++ b/homeassistant/components/smartthings/climate.py @@ -28,8 +28,8 @@ from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity ATTR_OPERATION_STATE = "operation_state" MODE_TO_STATE = { @@ -143,7 +143,6 @@ def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None: # Or must have all of these thermostat capabilities thermostat_capabilities = [ Capability.temperature_measurement, - Capability.thermostat_cooling_setpoint, Capability.thermostat_heating_setpoint, Capability.thermostat_mode, ] @@ -165,8 +164,6 @@ def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None: class SmartThingsThermostat(SmartThingsEntity, ClimateEntity): """Define a SmartThings climate entities.""" - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, device): """Init the class.""" super().__init__(device) @@ -348,7 +345,6 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): """Define a SmartThings Air Conditioner.""" _hvac_modes: list[HVACMode] - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device) -> None: """Init the class.""" diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index 9072683328d..081f833787e 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -42,21 +42,22 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 2 + api: SmartThings + app_id: str + location_id: str + def __init__(self) -> None: """Create a new instance of the flow handler.""" - self.access_token = None - self.app_id = None - self.api = None + self.access_token: str | None = None self.oauth_client_secret = None self.oauth_client_id = None self.installed_app_id = None self.refresh_token = None - self.location_id = None self.endpoints_initialized = False - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Occurs when a previously entry setup fails and is re-initiated.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -91,9 +92,11 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): # Show the next screen return await self.async_step_pat() - async def async_step_pat(self, user_input=None): + async def async_step_pat( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Get the Personal Access Token and validate it.""" - errors = {} + errors: dict[str, str] = {} if user_input is None or CONF_ACCESS_TOKEN not in user_input: return self._show_step_pat(errors) @@ -169,7 +172,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_select_location() - async def async_step_select_location(self, user_input=None): + async def async_step_select_location( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Ask user to select the location to setup.""" if user_input is None or CONF_LOCATION_ID not in user_input: # Get available locations @@ -196,7 +201,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(format_unique_id(self.app_id, self.location_id)) return await self.async_step_authorize() - async def async_step_authorize(self, user_input=None): + async def async_step_authorize( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Wait for the user to authorize the app installation.""" user_input = {} if user_input is None else user_input self.installed_app_id = user_input.get(CONF_INSTALLED_APP_ID) @@ -233,7 +240,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_install(self, data=None): + async def async_step_install( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Create a config entry at completion of a flow and authorization of the app.""" data = { CONF_ACCESS_TOKEN: self.access_token, diff --git a/homeassistant/components/smartthings/cover.py b/homeassistant/components/smartthings/cover.py index 276a68176b4..55e86bd582e 100644 --- a/homeassistant/components/smartthings/cover.py +++ b/homeassistant/components/smartthings/cover.py @@ -10,28 +10,25 @@ from pysmartthings import Attribute, Capability from homeassistant.components.cover import ( ATTR_POSITION, DOMAIN as COVER_DOMAIN, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_BATTERY_LEVEL from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity VALUE_TO_STATE = { - "closed": STATE_CLOSED, - "closing": STATE_CLOSING, - "open": STATE_OPEN, - "opening": STATE_OPENING, - "partially open": STATE_OPEN, + "closed": CoverState.CLOSED, + "closing": CoverState.CLOSING, + "open": CoverState.OPEN, + "opening": CoverState.OPENING, + "partially open": CoverState.OPEN, "unknown": None, } @@ -147,16 +144,16 @@ class SmartThingsCover(SmartThingsEntity, CoverEntity): @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state == STATE_OPENING + return self._state == CoverState.OPENING @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state == STATE_CLOSING + return self._state == CoverState.CLOSING @property def is_closed(self) -> bool | None: """Return if the cover is closed or not.""" - if self._state == STATE_CLOSED: + if self._state == CoverState.CLOSED: return True return None if self._state is None else False diff --git a/homeassistant/components/smartthings/entity.py b/homeassistant/components/smartthings/entity.py new file mode 100644 index 00000000000..cc63213d122 --- /dev/null +++ b/homeassistant/components/smartthings/entity.py @@ -0,0 +1,50 @@ +"""Support for SmartThings Cloud.""" + +from __future__ import annotations + +from pysmartthings.device import DeviceEntity + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE + + +class SmartThingsEntity(Entity): + """Defines a SmartThings entity.""" + + _attr_should_poll = False + + def __init__(self, device: DeviceEntity) -> None: + """Initialize the instance.""" + self._device = device + self._dispatcher_remove = None + self._attr_name = device.label + self._attr_unique_id = device.device_id + self._attr_device_info = DeviceInfo( + configuration_url="https://account.smartthings.com", + identifiers={(DOMAIN, device.device_id)}, + manufacturer=device.status.ocf_manufacturer_name, + model=device.status.ocf_model_number, + name=device.label, + hw_version=device.status.ocf_hardware_version, + sw_version=device.status.ocf_firmware_version, + ) + + async def async_added_to_hass(self): + """Device added to hass.""" + + async def async_update_state(devices): + """Update device state.""" + if self._device.device_id in devices: + await self.async_update_ha_state(True) + + self._dispatcher_remove = async_dispatcher_connect( + self.hass, SIGNAL_SMARTTHINGS_UPDATE, async_update_state + ) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect the device when removed.""" + if self._dispatcher_remove: + self._dispatcher_remove() diff --git a/homeassistant/components/smartthings/fan.py b/homeassistant/components/smartthings/fan.py index 840c04c2a10..61e30589273 100644 --- a/homeassistant/components/smartthings/fan.py +++ b/homeassistant/components/smartthings/fan.py @@ -18,8 +18,8 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity SPEED_RANGE = (1, 3) # off is not included @@ -70,7 +70,6 @@ class SmartThingsFan(SmartThingsEntity, FanEntity): """Define a SmartThings Fan.""" _attr_speed_count = int_states_in_range(SPEED_RANGE) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device): """Init the class.""" diff --git a/homeassistant/components/smartthings/light.py b/homeassistant/components/smartthings/light.py index 24a44a99d94..eb7c9af246b 100644 --- a/homeassistant/components/smartthings/light.py +++ b/homeassistant/components/smartthings/light.py @@ -10,7 +10,7 @@ from pysmartthings import Capability from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -21,10 +21,9 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -import homeassistant.util.color as color_util -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity async def async_setup_entry( @@ -79,12 +78,12 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # lowest kelvin found supported across 20+ handlers. - _attr_max_mireds = 500 # 2000K + _attr_min_color_temp_kelvin = 2000 # 500 mireds # SmartThings does not expose this attribute, instead it's # implemented within each device-type handler. This value is the # highest kelvin found supported across 20+ handlers. - _attr_min_mireds = 111 # 9000K + _attr_max_color_temp_kelvin = 9000 # 111 mireds def __init__(self, device): """Initialize a SmartThingsLight.""" @@ -122,8 +121,8 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): """Turn the light on.""" tasks = [] # Color temperature - if ATTR_COLOR_TEMP in kwargs: - tasks.append(self.async_set_color_temp(kwargs[ATTR_COLOR_TEMP])) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + tasks.append(self.async_set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN])) # Color if ATTR_HS_COLOR in kwargs: tasks.append(self.async_set_color(kwargs[ATTR_HS_COLOR])) @@ -164,9 +163,7 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): ) # Color Temperature if ColorMode.COLOR_TEMP in self._attr_supported_color_modes: - self._attr_color_temp = color_util.color_temperature_kelvin_to_mired( - self._device.status.color_temperature - ) + self._attr_color_temp_kelvin = self._device.status.color_temperature # Color if ColorMode.HS in self._attr_supported_color_modes: self._attr_hs_color = ( @@ -181,10 +178,9 @@ class SmartThingsLight(SmartThingsEntity, LightEntity): saturation = max(min(float(hs_color[1]), 100.0), 0.0) await self._device.set_color(hue, saturation, set_status=True) - async def async_set_color_temp(self, value: float): + async def async_set_color_temp(self, value: int): """Set the color temperature of the device.""" - kelvin = color_util.color_temperature_mired_to_kelvin(value) - kelvin = max(min(kelvin, 30000), 1) + kelvin = max(min(value, 30000), 1) await self._device.set_color_temperature(kelvin, set_status=True) async def async_set_level(self, brightness: int, transition: int): diff --git a/homeassistant/components/smartthings/lock.py b/homeassistant/components/smartthings/lock.py index 0cd954e7542..a0ae9e50443 100644 --- a/homeassistant/components/smartthings/lock.py +++ b/homeassistant/components/smartthings/lock.py @@ -12,8 +12,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity ST_STATE_LOCKED = "locked" ST_LOCK_ATTR_MAP = { diff --git a/homeassistant/components/smartthings/sensor.py b/homeassistant/components/smartthings/sensor.py index 2a61be3dc75..8bd0421d2bc 100644 --- a/homeassistant/components/smartthings/sensor.py +++ b/homeassistant/components/smartthings/sensor.py @@ -15,11 +15,11 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - AREA_SQUARE_METERS, CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, EntityCategory, + UnitOfArea, UnitOfElectricPotential, UnitOfEnergy, UnitOfMass, @@ -31,8 +31,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity class Map(NamedTuple): @@ -95,7 +95,7 @@ CAPABILITY_TO_SENSORS: dict[str, list[Map]] = { Map( Attribute.bmi_measurement, "Body Mass Index", - f"{UnitOfMass.KILOGRAMS}/{AREA_SQUARE_METERS}", + f"{UnitOfMass.KILOGRAMS}/{UnitOfArea.SQUARE_METERS}", None, SensorStateClass.MEASUREMENT, None, diff --git a/homeassistant/components/smartthings/strings.json b/homeassistant/components/smartthings/strings.json index 7fbf966fa89..de94e5adfcd 100644 --- a/homeassistant/components/smartthings/strings.json +++ b/homeassistant/components/smartthings/strings.json @@ -7,14 +7,14 @@ }, "pat": { "title": "Enter Personal Access Token", - "description": "Please enter a SmartThings [Personal Access Token]({token_url}) that has been created per the [instructions]({component_url}). This will be used to create the Home Assistant integration within your SmartThings account.", + "description": "Please enter a SmartThings [Personal Access Token]({token_url}) that has been created per the [instructions]({component_url}). This will be used to create the Home Assistant integration within your SmartThings account.", "data": { "access_token": "[%key:common::config_flow::data::access_token%]" } }, "select_location": { "title": "Select Location", - "description": "Please select the SmartThings Location you wish to add to Home Assistant. We will then open a new window and ask you to login and authorize installation of the Home Assistant integration into the selected location.", + "description": "Please select the SmartThings Location you wish to add to Home Assistant. We will then open a new window and ask you to login and authorize installation of the Home Assistant integration into the selected location.", "data": { "location_id": "[%key:common::config_flow::data::location%]" } }, "authorize": { "title": "Authorize Home Assistant" } @@ -27,7 +27,7 @@ "token_invalid_format": "The token must be in the UID/GUID format", "token_unauthorized": "The token is invalid or no longer authorized.", "token_forbidden": "The token does not have the required OAuth scopes.", - "app_setup_error": "Unable to set up the SmartApp. Please try again.", + "app_setup_error": "Unable to set up the SmartApp. Please try again.", "webhook_error": "SmartThings could not validate the webhook URL. Please ensure the webhook URL is reachable from the internet and try again." } } diff --git a/homeassistant/components/smartthings/switch.py b/homeassistant/components/smartthings/switch.py index bd5f7bc0b68..5cfe4576d6a 100644 --- a/homeassistant/components/smartthings/switch.py +++ b/homeassistant/components/smartthings/switch.py @@ -12,8 +12,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN +from .entity import SmartThingsEntity async def async_setup_entry( diff --git a/homeassistant/components/smarttub/climate.py b/homeassistant/components/smarttub/climate.py index f0bb84b3390..7f3163834e0 100644 --- a/homeassistant/components/smarttub/climate.py +++ b/homeassistant/components/smarttub/climate.py @@ -68,7 +68,6 @@ class SmartTubThermostat(SmartTubEntity, ClimateEntity): ) _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_preset_modes = list(PRESET_MODES.values()) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator, spa): """Initialize the entity.""" diff --git a/homeassistant/components/smarttub/config_flow.py b/homeassistant/components/smarttub/config_flow.py index 827375c907c..cf96d7082a1 100644 --- a/homeassistant/components/smarttub/config_flow.py +++ b/homeassistant/components/smarttub/config_flow.py @@ -3,12 +3,12 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any +from typing import Any from smarttub import LoginFailed import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from .const import DOMAIN @@ -24,12 +24,6 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Instantiate config flow.""" - super().__init__() - self._reauth_input: Mapping[str, Any] | None = None - self._reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -48,24 +42,17 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): else: await self.async_set_unique_id(account.id) - if self._reauth_input is None: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title=user_input[CONF_EMAIL], data=user_input ) # this is a reauth attempt - if TYPE_CHECKING: - assert self._reauth_entry - if self._reauth_entry.unique_id != self.unique_id: - # there is a config entry matching this account, - # but it is not the one we were trying to reauth - return self.async_abort(reason="already_configured") - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input + self._abort_if_unique_id_mismatch(reason="already_configured") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors @@ -75,20 +62,19 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Get new credentials if the current ones don't work anymore.""" - self._reauth_input = entry_data - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" if user_input is None: # same as DATA_SCHEMA but with default email data_schema = vol.Schema( { vol.Required( - CONF_EMAIL, default=self._reauth_input.get(CONF_EMAIL) + CONF_EMAIL, + default=self._get_reauth_entry().data.get(CONF_EMAIL), ): str, vol.Required(CONF_PASSWORD): str, } diff --git a/homeassistant/components/smarttub/icons.json b/homeassistant/components/smarttub/icons.json index 7ae96d03383..2b89445754c 100644 --- a/homeassistant/components/smarttub/icons.json +++ b/homeassistant/components/smarttub/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_primary_filtration": "mdi:filter", - "set_secondary_filtration": "mdi:filter-multiple", - "snooze_reminder": "mdi:timer-pause", - "reset_reminder": "mdi:timer-sync" + "set_primary_filtration": { + "service": "mdi:filter" + }, + "set_secondary_filtration": { + "service": "mdi:filter-multiple" + }, + "snooze_reminder": { + "service": "mdi:timer-pause" + }, + "reset_reminder": { + "service": "mdi:timer-sync" + } } } diff --git a/homeassistant/components/smarttub/manifest.json b/homeassistant/components/smarttub/manifest.json index f2514063a40..d5102f14437 100644 --- a/homeassistant/components/smarttub/manifest.json +++ b/homeassistant/components/smarttub/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/smarttub", "iot_class": "cloud_polling", "loggers": ["smarttub"], - "quality_scale": "platinum", - "requirements": ["python-smarttub==0.0.36"] + "requirements": ["python-smarttub==0.0.38"] } diff --git a/homeassistant/components/smarty/__init__.py b/homeassistant/components/smarty/__init__.py index cc2e3850ef9..0d043804c3d 100644 --- a/homeassistant/components/smarty/__init__.py +++ b/homeassistant/components/smarty/__init__.py @@ -1,23 +1,20 @@ """Support to control a Salda Smarty XP/XV ventilation unit.""" -from datetime import timedelta import ipaddress import logging -from pysmarty import Smarty import voluptuous as vol +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_NAME, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import discovery +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import dispatcher_send -from homeassistant.helpers.event import track_time_interval from homeassistant.helpers.typing import ConfigType -DOMAIN = "smarty" -DATA_SMARTY = "smarty" -SMARTY_NAME = "Smarty" +from .const import DOMAIN +from .coordinator import SmartyConfigEntry, SmartyCoordinator _LOGGER = logging.getLogger(__name__) @@ -26,48 +23,84 @@ CONFIG_SCHEMA = vol.Schema( DOMAIN: vol.Schema( { vol.Required(CONF_HOST): vol.All(ipaddress.ip_address, cv.string), - vol.Optional(CONF_NAME, default=SMARTY_NAME): cv.string, + vol.Optional(CONF_NAME, default="Smarty"): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) -RPM = "rpm" -SIGNAL_UPDATE_SMARTY = "smarty_update" +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.FAN, + Platform.SENSOR, + Platform.SWITCH, +] -def setup(hass: HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: + """Create a smarty system.""" + if config := hass_config.get(DOMAIN): + hass.async_create_task(_async_import(hass, config)) + return True + + +async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: """Set up the smarty environment.""" - conf = config[DOMAIN] + if not hass.config_entries.async_entries(DOMAIN): + # Start import flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + if result["type"] == FlowResultType.ABORT: + ir.async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{result['reason']}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Smarty", + }, + ) + return - host = conf[CONF_HOST] - name = conf[CONF_NAME] + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Smarty", + }, + ) - _LOGGER.debug("Name: %s, host: %s", name, host) - smarty = Smarty(host=host) +async def async_setup_entry(hass: HomeAssistant, entry: SmartyConfigEntry) -> bool: + """Set up the Smarty environment from a config entry.""" - hass.data[DOMAIN] = {"api": smarty, "name": name} + coordinator = SmartyCoordinator(hass) - # Initial update - smarty.update() + await coordinator.async_config_entry_first_refresh() - # Load platforms - discovery.load_platform(hass, Platform.FAN, DOMAIN, {}, config) - discovery.load_platform(hass, Platform.SENSOR, DOMAIN, {}, config) - discovery.load_platform(hass, Platform.BINARY_SENSOR, DOMAIN, {}, config) + entry.runtime_data = coordinator - def poll_device_update(event_time): - """Update Smarty device.""" - _LOGGER.debug("Updating Smarty device") - if smarty.update(): - _LOGGER.debug("Update success") - dispatcher_send(hass, SIGNAL_UPDATE_SMARTY) - else: - _LOGGER.debug("Update failed") - - track_time_interval(hass, poll_device_update, timedelta(seconds=30)) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SmartyConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/smarty/binary_sensor.py b/homeassistant/components/smarty/binary_sensor.py index cf40dc7b982..213cb00d47c 100644 --- a/homeassistant/components/smarty/binary_sensor.py +++ b/homeassistant/components/smarty/binary_sensor.py @@ -2,111 +2,86 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass import logging -from pysmarty import Smarty +from pysmarty2 import Smarty from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, + BinarySensorEntityDescription, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN, SIGNAL_UPDATE_SMARTY +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity _LOGGER = logging.getLogger(__name__) -async def async_setup_platform( +@dataclass(frozen=True, kw_only=True) +class SmartyBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Smarty binary sensor entities.""" + + value_fn: Callable[[Smarty], bool] + + +ENTITIES: tuple[SmartyBinarySensorEntityDescription, ...] = ( + SmartyBinarySensorEntityDescription( + key="alarm", + translation_key="alarm", + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda smarty: smarty.alarm, + ), + SmartyBinarySensorEntityDescription( + key="warning", + translation_key="warning", + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda smarty: smarty.warning, + ), + SmartyBinarySensorEntityDescription( + key="boost", + translation_key="boost_state", + value_fn=lambda smarty: smarty.boost, + ), +) + + +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: SmartyConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Smarty Binary Sensor Platform.""" - smarty: Smarty = hass.data[DOMAIN]["api"] - name: str = hass.data[DOMAIN]["name"] - sensors = [ - AlarmSensor(name, smarty), - WarningSensor(name, smarty), - BoostSensor(name, smarty), - ] + coordinator = entry.runtime_data - async_add_entities(sensors, True) + async_add_entities( + SmartyBinarySensor(coordinator, description) for description in ENTITIES + ) -class SmartyBinarySensor(BinarySensorEntity): +class SmartyBinarySensor(SmartyEntity, BinarySensorEntity): """Representation of a Smarty Binary Sensor.""" - _attr_should_poll = False + entity_description: SmartyBinarySensorEntityDescription def __init__( self, - name: str, - device_class: BinarySensorDeviceClass | None, - smarty: Smarty, + coordinator: SmartyCoordinator, + entity_description: SmartyBinarySensorEntityDescription, ) -> None: """Initialize the entity.""" - self._attr_name = name - self._attr_device_class = device_class - self._smarty = smarty - - async def async_added_to_hass(self) -> None: - """Call to update.""" - async_dispatcher_connect(self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback) - - @callback - def _update_callback(self) -> None: - """Call update method.""" - self.async_schedule_update_ha_state(True) - - -class BoostSensor(SmartyBinarySensor): - """Boost State Binary Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Alarm Sensor Init.""" - super().__init__(name=f"{name} Boost State", device_class=None, smarty=smarty) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_is_on = self._smarty.boost - - -class AlarmSensor(SmartyBinarySensor): - """Alarm Binary Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Alarm Sensor Init.""" - super().__init__( - name=f"{name} Alarm", - device_class=BinarySensorDeviceClass.PROBLEM, - smarty=smarty, + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" ) - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_is_on = self._smarty.alarm - - -class WarningSensor(SmartyBinarySensor): - """Warning Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Warning Sensor Init.""" - super().__init__( - name=f"{name} Warning", - device_class=BinarySensorDeviceClass.PROBLEM, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_is_on = self._smarty.warning + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/smarty/button.py b/homeassistant/components/smarty/button.py new file mode 100644 index 00000000000..b8e31cf6fc8 --- /dev/null +++ b/homeassistant/components/smarty/button.py @@ -0,0 +1,74 @@ +"""Platform to control a Salda Smarty XP/XV ventilation unit.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmarty2 import Smarty + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmartyButtonDescription(ButtonEntityDescription): + """Class describing Smarty button.""" + + press_fn: Callable[[Smarty], bool | None] + + +ENTITIES: tuple[SmartyButtonDescription, ...] = ( + SmartyButtonDescription( + key="reset_filters_timer", + translation_key="reset_filters_timer", + press_fn=lambda smarty: smarty.reset_filters_timer(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Smarty Button Platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + SmartyButton(coordinator, description) for description in ENTITIES + ) + + +class SmartyButton(SmartyEntity, ButtonEntity): + """Representation of a Smarty Button.""" + + entity_description: SmartyButtonDescription + + def __init__( + self, + coordinator: SmartyCoordinator, + entity_description: SmartyButtonDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" + ) + + async def async_press(self, **kwargs: Any) -> None: + """Press the button.""" + await self.hass.async_add_executor_job( + self.entity_description.press_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/smarty/config_flow.py b/homeassistant/components/smarty/config_flow.py new file mode 100644 index 00000000000..9a55356a990 --- /dev/null +++ b/homeassistant/components/smarty/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Smarty integration.""" + +from typing import Any + +from pysmarty2 import Smarty +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_NAME + +from .const import DOMAIN + + +class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): + """Smarty config flow.""" + + def _test_connection(self, host: str) -> str | None: + """Test the connection to the Smarty API.""" + smarty = Smarty(host=host) + try: + if smarty.update(): + return None + except Exception: # noqa: BLE001 + return "unknown" + else: + return "cannot_connect" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + + if user_input is not None: + self._async_abort_entries_match(user_input) + error = await self.hass.async_add_executor_job( + self._test_connection, user_input[CONF_HOST] + ) + if not error: + return self.async_create_entry( + title=user_input[CONF_HOST], data=user_input + ) + errors["base"] = error + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + errors=errors, + ) + + async def async_step_import( + self, import_config: dict[str, Any] + ) -> ConfigFlowResult: + """Handle a flow initialized by import.""" + error = await self.hass.async_add_executor_job( + self._test_connection, import_config[CONF_HOST] + ) + if not error: + return self.async_create_entry( + title=import_config[CONF_NAME], + data={CONF_HOST: import_config[CONF_HOST]}, + ) + return self.async_abort(reason=error) diff --git a/homeassistant/components/smarty/const.py b/homeassistant/components/smarty/const.py new file mode 100644 index 00000000000..926c4233750 --- /dev/null +++ b/homeassistant/components/smarty/const.py @@ -0,0 +1,3 @@ +"""Constants for the Smarty component.""" + +DOMAIN = "smarty" diff --git a/homeassistant/components/smarty/coordinator.py b/homeassistant/components/smarty/coordinator.py new file mode 100644 index 00000000000..d7f3e2452d1 --- /dev/null +++ b/homeassistant/components/smarty/coordinator.py @@ -0,0 +1,44 @@ +"""Smarty Coordinator.""" + +from datetime import timedelta +import logging + +from pysmarty2 import Smarty + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + +type SmartyConfigEntry = ConfigEntry[SmartyCoordinator] + + +class SmartyCoordinator(DataUpdateCoordinator[None]): + """Smarty Coordinator.""" + + config_entry: SmartyConfigEntry + software_version: str + configuration_version: str + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize.""" + super().__init__( + hass, + logger=_LOGGER, + name="Smarty", + update_interval=timedelta(seconds=30), + ) + self.client = Smarty(host=self.config_entry.data[CONF_HOST]) + + async def _async_setup(self) -> None: + if not await self.hass.async_add_executor_job(self.client.update): + raise UpdateFailed("Failed to update Smarty data") + self.software_version = self.client.get_software_version() + self.configuration_version = self.client.get_configuration_version() + + async def _async_update_data(self) -> None: + """Fetch data from Smarty.""" + if not await self.hass.async_add_executor_job(self.client.update): + raise UpdateFailed("Failed to update Smarty data") diff --git a/homeassistant/components/smarty/entity.py b/homeassistant/components/smarty/entity.py new file mode 100644 index 00000000000..d26b56d489f --- /dev/null +++ b/homeassistant/components/smarty/entity.py @@ -0,0 +1,23 @@ +"""Smarty Entity class.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import DOMAIN +from .coordinator import SmartyCoordinator + + +class SmartyEntity(CoordinatorEntity[SmartyCoordinator]): + """Representation of a Smarty Entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SmartyCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + manufacturer="Salda", + sw_version=self.coordinator.software_version, + hw_version=self.coordinator.configuration_version, + ) diff --git a/homeassistant/components/smarty/fan.py b/homeassistant/components/smarty/fan.py index 37f7c2e493f..2804f14ee15 100644 --- a/homeassistant/components/smarty/fan.py +++ b/homeassistant/components/smarty/fan.py @@ -6,21 +6,19 @@ import logging import math from typing import Any -from pysmarty import Smarty - from homeassistant.components.fan import FanEntity, FanEntityFeature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, ) from homeassistant.util.scaling import int_states_in_range -from . import DOMAIN, SIGNAL_UPDATE_SMARTY +from . import SmartyConfigEntry +from .coordinator import SmartyCoordinator +from .entity import SmartyEntity _LOGGER = logging.getLogger(__name__) @@ -28,36 +26,35 @@ DEFAULT_ON_PERCENTAGE = 66 SPEED_RANGE = (1, 3) # off is not included -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: SmartyConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Smarty Fan Platform.""" - smarty: Smarty = hass.data[DOMAIN]["api"] - name: str = hass.data[DOMAIN]["name"] - async_add_entities([SmartyFan(name, smarty)], True) + coordinator = entry.runtime_data + + async_add_entities([SmartyFan(coordinator)]) -class SmartyFan(FanEntity): +class SmartyFan(SmartyEntity, FanEntity): """Representation of a Smarty Fan.""" - _attr_icon = "mdi:air-conditioner" - _attr_should_poll = False + _attr_name = None + _attr_translation_key = "fan" _attr_supported_features = ( FanEntityFeature.SET_SPEED | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False - def __init__(self, name, smarty): + def __init__(self, coordinator: SmartyCoordinator) -> None: """Initialize the entity.""" - self._attr_name = name + super().__init__(coordinator) self._smarty_fan_speed = 0 - self._smarty = smarty + self._smarty = coordinator.client + self._attr_unique_id = coordinator.config_entry.entry_id @property def is_on(self) -> bool: @@ -111,17 +108,8 @@ class SmartyFan(FanEntity): self._smarty_fan_speed = 0 self.schedule_update_ha_state() - async def async_added_to_hass(self) -> None: - """Call to update fan.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback - ) - ) - @callback - def _update_callback(self) -> None: + def _handle_coordinator_update(self) -> None: """Call update method.""" - _LOGGER.debug("Updating state") self._smarty_fan_speed = self._smarty.fan_speed - self.async_write_ha_state() + super()._handle_coordinator_update() diff --git a/homeassistant/components/smarty/icons.json b/homeassistant/components/smarty/icons.json new file mode 100644 index 00000000000..97e74199f0a --- /dev/null +++ b/homeassistant/components/smarty/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "fan": { + "fan": { + "default": "mdi:air-conditioner" + } + } + } +} diff --git a/homeassistant/components/smarty/manifest.json b/homeassistant/components/smarty/manifest.json index 8769aa666a7..ca3133d8add 100644 --- a/homeassistant/components/smarty/manifest.json +++ b/homeassistant/components/smarty/manifest.json @@ -2,9 +2,10 @@ "domain": "smarty", "name": "Salda Smarty", "codeowners": ["@z0mbieprocess"], - "disabled": "Dependencies not compatible with the new pip resolver", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/smarty", + "integration_type": "hub", "iot_class": "local_polling", - "loggers": ["pymodbus", "pysmarty"], - "requirements": ["pysmarty==0.8"] + "loggers": ["pymodbus", "pysmarty2"], + "requirements": ["pysmarty2==0.10.1"] } diff --git a/homeassistant/components/smarty/sensor.py b/homeassistant/components/smarty/sensor.py index a0c15b3825f..9d847003a59 100644 --- a/homeassistant/components/smarty/sensor.py +++ b/homeassistant/components/smarty/sensor.py @@ -2,182 +2,118 @@ from __future__ import annotations -import datetime as dt +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta import logging -from pysmarty import Smarty +from pysmarty2 import Smarty -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.const import UnitOfTemperature -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import REVOLUTIONS_PER_MINUTE, UnitOfTemperature +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -from . import DOMAIN, SIGNAL_UPDATE_SMARTY +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity _LOGGER = logging.getLogger(__name__) -async def async_setup_platform( +def get_filter_days_left(smarty: Smarty) -> datetime | None: + """Return the date when the filter needs to be replaced.""" + if (days_left := smarty.filter_timer) is not None: + return dt_util.now() + timedelta(days=days_left) + return None + + +@dataclass(frozen=True, kw_only=True) +class SmartySensorDescription(SensorEntityDescription): + """Class describing Smarty sensor.""" + + value_fn: Callable[[Smarty], float | datetime | None] + + +ENTITIES: tuple[SmartySensorDescription, ...] = ( + SmartySensorDescription( + key="supply_air_temperature", + translation_key="supply_air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda smarty: smarty.supply_air_temperature, + ), + SmartySensorDescription( + key="extract_air_temperature", + translation_key="extract_air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda smarty: smarty.extract_air_temperature, + ), + SmartySensorDescription( + key="outdoor_air_temperature", + translation_key="outdoor_air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda smarty: smarty.outdoor_air_temperature, + ), + SmartySensorDescription( + key="supply_fan_speed", + translation_key="supply_fan_speed", + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + value_fn=lambda smarty: smarty.supply_fan_speed, + ), + SmartySensorDescription( + key="extract_fan_speed", + translation_key="extract_fan_speed", + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + value_fn=lambda smarty: smarty.extract_fan_speed, + ), + SmartySensorDescription( + key="filter_days_left", + translation_key="filter_days_left", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=get_filter_days_left, + ), +) + + +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: SmartyConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Smarty Sensor Platform.""" - smarty: Smarty = hass.data[DOMAIN]["api"] - name: str = hass.data[DOMAIN]["name"] - sensors = [ - SupplyAirTemperatureSensor(name, smarty), - ExtractAirTemperatureSensor(name, smarty), - OutdoorAirTemperatureSensor(name, smarty), - SupplyFanSpeedSensor(name, smarty), - ExtractFanSpeedSensor(name, smarty), - FilterDaysLeftSensor(name, smarty), - ] + coordinator = entry.runtime_data - async_add_entities(sensors, True) + async_add_entities( + SmartySensor(coordinator, description) for description in ENTITIES + ) -class SmartySensor(SensorEntity): +class SmartySensor(SmartyEntity, SensorEntity): """Representation of a Smarty Sensor.""" - _attr_should_poll = False + entity_description: SmartySensorDescription def __init__( self, - name: str, - device_class: SensorDeviceClass | None, - smarty: Smarty, - unit_of_measurement: str | None, + coordinator: SmartyCoordinator, + entity_description: SmartySensorDescription, ) -> None: """Initialize the entity.""" - self._attr_name = name - self._attr_native_value = None - self._attr_device_class = device_class - self._attr_native_unit_of_measurement = unit_of_measurement - self._smarty = smarty - - async def async_added_to_hass(self) -> None: - """Call to update.""" - async_dispatcher_connect(self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback) - - @callback - def _update_callback(self) -> None: - """Call update method.""" - self.async_schedule_update_ha_state(True) - - -class SupplyAirTemperatureSensor(SmartySensor): - """Supply Air Temperature Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Supply Air Temperature Init.""" - super().__init__( - name=f"{name} Supply Air Temperature", - device_class=SensorDeviceClass.TEMPERATURE, - unit_of_measurement=UnitOfTemperature.CELSIUS, - smarty=smarty, + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" ) - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.supply_air_temperature - - -class ExtractAirTemperatureSensor(SmartySensor): - """Extract Air Temperature Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Supply Air Temperature Init.""" - super().__init__( - name=f"{name} Extract Air Temperature", - device_class=SensorDeviceClass.TEMPERATURE, - unit_of_measurement=UnitOfTemperature.CELSIUS, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.extract_air_temperature - - -class OutdoorAirTemperatureSensor(SmartySensor): - """Extract Air Temperature Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Outdoor Air Temperature Init.""" - super().__init__( - name=f"{name} Outdoor Air Temperature", - device_class=SensorDeviceClass.TEMPERATURE, - unit_of_measurement=UnitOfTemperature.CELSIUS, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.outdoor_air_temperature - - -class SupplyFanSpeedSensor(SmartySensor): - """Supply Fan Speed RPM.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Supply Fan Speed RPM Init.""" - super().__init__( - name=f"{name} Supply Fan Speed", - device_class=None, - unit_of_measurement=None, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.supply_fan_speed - - -class ExtractFanSpeedSensor(SmartySensor): - """Extract Fan Speed RPM.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Extract Fan Speed RPM Init.""" - super().__init__( - name=f"{name} Extract Fan Speed", - device_class=None, - unit_of_measurement=None, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.extract_fan_speed - - -class FilterDaysLeftSensor(SmartySensor): - """Filter Days Left.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Filter Days Left Init.""" - super().__init__( - name=f"{name} Filter Days Left", - device_class=SensorDeviceClass.TIMESTAMP, - unit_of_measurement=None, - smarty=smarty, - ) - self._days_left = 91 - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - days_left = self._smarty.filter_timer - if days_left is not None and days_left != self._days_left: - self._attr_native_value = dt_util.now() + dt.timedelta(days=days_left) - self._days_left = days_left + @property + def native_value(self) -> float | datetime | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/smarty/strings.json b/homeassistant/components/smarty/strings.json new file mode 100644 index 00000000000..341a300a26e --- /dev/null +++ b/homeassistant/components/smarty/strings.json @@ -0,0 +1,80 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Smarty device" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "issues": { + "deprecated_yaml_import_issue_unknown": { + "title": "YAML import failed with unknown error", + "description": "Configuring {integration_title} using YAML is being removed but there was an unknown error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + }, + "deprecated_yaml_import_issue_auth_error": { + "title": "YAML import failed due to an authentication error", + "description": "Configuring {integration_title} using YAML is being removed but there was an authentication error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + }, + "deprecated_yaml_import_issue_cannot_connect": { + "title": "YAML import failed due to a connection error", + "description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + } + }, + "entity": { + "binary_sensor": { + "alarm": { + "name": "Alarm" + }, + "warning": { + "name": "Warning" + }, + "boost_state": { + "name": "Boost state" + } + }, + "button": { + "reset_filters_timer": { + "name": "Reset filters timer" + } + }, + "sensor": { + "supply_air_temperature": { + "name": "Supply air temperature" + }, + "extract_air_temperature": { + "name": "Extract air temperature" + }, + "outdoor_air_temperature": { + "name": "Outdoor air temperature" + }, + "supply_fan_speed": { + "name": "Supply fan speed" + }, + "extract_fan_speed": { + "name": "Extract fan speed" + }, + "filter_days_left": { + "name": "Filter days left" + } + }, + "switch": { + "boost": { + "name": "Boost" + } + } + } +} diff --git a/homeassistant/components/smarty/switch.py b/homeassistant/components/smarty/switch.py new file mode 100644 index 00000000000..bf5fe80db44 --- /dev/null +++ b/homeassistant/components/smarty/switch.py @@ -0,0 +1,90 @@ +"""Platform to control a Salda Smarty XP/XV ventilation unit.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmarty2 import Smarty + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmartySwitchDescription(SwitchEntityDescription): + """Class describing Smarty switch.""" + + is_on_fn: Callable[[Smarty], bool] + turn_on_fn: Callable[[Smarty], bool | None] + turn_off_fn: Callable[[Smarty], bool | None] + + +ENTITIES: tuple[SmartySwitchDescription, ...] = ( + SmartySwitchDescription( + key="boost", + translation_key="boost", + is_on_fn=lambda smarty: smarty.boost, + turn_on_fn=lambda smarty: smarty.enable_boost(), + turn_off_fn=lambda smarty: smarty.disable_boost(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Smarty Switch Platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + SmartySwitch(coordinator, description) for description in ENTITIES + ) + + +class SmartySwitch(SmartyEntity, SwitchEntity): + """Representation of a Smarty Switch.""" + + entity_description: SmartySwitchDescription + + def __init__( + self, + coordinator: SmartyCoordinator, + entity_description: SmartySwitchDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" + ) + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.is_on_fn(self.coordinator.client) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_on_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_off_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/smhi/config_flow.py b/homeassistant/components/smhi/config_flow.py index b3350f6bb18..2992b176f24 100644 --- a/homeassistant/components/smhi/config_flow.py +++ b/homeassistant/components/smhi/config_flow.py @@ -8,7 +8,7 @@ from smhi.smhi_lib import Smhi, SmhiForecastException import voluptuous as vol from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import ( @@ -39,7 +39,6 @@ class SmhiFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for SMHI component.""" VERSION = 2 - config_entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,19 +82,10 @@ class SmhiFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors: dict[str, str] = {} - assert self.config_entry + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: lat: float = user_input[CONF_LOCATION][CONF_LATITUDE] @@ -105,8 +95,8 @@ class SmhiFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() - old_lat = self.config_entry.data[CONF_LOCATION][CONF_LATITUDE] - old_lon = self.config_entry.data[CONF_LOCATION][CONF_LONGITUDE] + old_lat = reconfigure_entry.data[CONF_LOCATION][CONF_LATITUDE] + old_lon = reconfigure_entry.data[CONF_LOCATION][CONF_LONGITUDE] entity_reg = er.async_get(self.hass) if entity := entity_reg.async_get_entity_id( @@ -125,17 +115,16 @@ class SmhiFlowHandler(ConfigFlow, domain=DOMAIN): ) return self.async_update_reload_and_abort( - self.config_entry, + reconfigure_entry, unique_id=unique_id, - data={**self.config_entry.data, **user_input}, - reason="reconfigure_successful", + data_updates=user_input, ) errors["base"] = "wrong_location" schema = self.add_suggested_values_to_schema( vol.Schema({vol.Required(CONF_LOCATION): LocationSelector()}), - self.config_entry.data, + reconfigure_entry.data, ) return self.async_show_form( - step_id="reconfigure_confirm", data_schema=schema, errors=errors + step_id="reconfigure", data_schema=schema, errors=errors ) diff --git a/homeassistant/components/smhi/manifest.json b/homeassistant/components/smhi/manifest.json index 261e24d6f97..76f9812e815 100644 --- a/homeassistant/components/smhi/manifest.json +++ b/homeassistant/components/smhi/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/smhi", "iot_class": "cloud_polling", "loggers": ["smhi"], - "requirements": ["smhi-pkg==1.0.16"] + "requirements": ["smhi-pkg==1.0.18"] } diff --git a/homeassistant/components/smhi/strings.json b/homeassistant/components/smhi/strings.json index e78fee64a2b..3d2a790e6b6 100644 --- a/homeassistant/components/smhi/strings.json +++ b/homeassistant/components/smhi/strings.json @@ -12,7 +12,7 @@ "longitude": "[%key:common::config_flow::data::longitude%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure your location in Sweden", "data": { "latitude": "[%key:common::config_flow::data::latitude%]", diff --git a/homeassistant/components/smhi/weather.py b/homeassistant/components/smhi/weather.py index aac4c5d24be..3d5642a2784 100644 --- a/homeassistant/components/smhi/weather.py +++ b/homeassistant/components/smhi/weather.py @@ -218,9 +218,7 @@ class SmhiWeather(WeatherEntity): data.append( { - ATTR_FORECAST_TIME: forecast.valid_time.replace( - tzinfo=dt_util.UTC - ).isoformat(), + ATTR_FORECAST_TIME: forecast.valid_time.isoformat(), ATTR_FORECAST_NATIVE_TEMP: forecast.temperature_max, ATTR_FORECAST_NATIVE_TEMP_LOW: forecast.temperature_min, ATTR_FORECAST_NATIVE_PRECIPITATION: forecast.total_precipitation, diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 16eb60b9c87..cbfb8162d63 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -2,26 +2,59 @@ from __future__ import annotations +from dataclasses import dataclass + +from pysmlight import Api2 + from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .coordinator import SmDataUpdateCoordinator +from .coordinator import SmDataUpdateCoordinator, SmFirmwareUpdateCoordinator PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.SENSOR, + Platform.SWITCH, + Platform.UPDATE, ] -type SmConfigEntry = ConfigEntry[SmDataUpdateCoordinator] + + +@dataclass(kw_only=True) +class SmlightData: + """Coordinator data class.""" + + data: SmDataUpdateCoordinator + firmware: SmFirmwareUpdateCoordinator + + +type SmConfigEntry = ConfigEntry[SmlightData] async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: """Set up SMLIGHT Zigbee from a config entry.""" - coordinator = SmDataUpdateCoordinator(hass, entry.data[CONF_HOST]) - await coordinator.async_config_entry_first_refresh() - entry.runtime_data = coordinator + client = Api2(host=entry.data[CONF_HOST], session=async_get_clientsession(hass)) + + data_coordinator = SmDataUpdateCoordinator(hass, entry.data[CONF_HOST], client) + firmware_coordinator = SmFirmwareUpdateCoordinator( + hass, entry.data[CONF_HOST], client + ) + + await data_coordinator.async_config_entry_first_refresh() + await firmware_coordinator.async_config_entry_first_refresh() + + if data_coordinator.data.info.legacy_api < 2: + entry.async_create_background_task( + hass, client.sse.client(), "smlight-sse-client" + ) + + entry.runtime_data = SmlightData( + data=data_coordinator, firmware=firmware_coordinator + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True diff --git a/homeassistant/components/smlight/binary_sensor.py b/homeassistant/components/smlight/binary_sensor.py new file mode 100644 index 00000000000..b1aba3a52fe --- /dev/null +++ b/homeassistant/components/smlight/binary_sensor.py @@ -0,0 +1,141 @@ +"""Support for SLZB-06 binary sensors.""" + +from __future__ import annotations + +from _collections_abc import Callable +from dataclasses import dataclass + +from pysmlight import Sensors +from pysmlight.const import Events as SmEvents +from pysmlight.sse import MessageEvent + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import SCAN_INTERNET_INTERVAL +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + +SCAN_INTERVAL = SCAN_INTERNET_INTERVAL + + +@dataclass(frozen=True, kw_only=True) +class SmBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing SMLIGHT binary sensor entities.""" + + value_fn: Callable[[Sensors], bool] + + +SENSORS = [ + SmBinarySensorEntityDescription( + key="ethernet", + translation_key="ethernet", + value_fn=lambda x: x.ethernet, + ), + SmBinarySensorEntityDescription( + key="vpn", + translation_key="vpn", + entity_registry_enabled_default=False, + value_fn=lambda x: x.vpn_status, + ), + SmBinarySensorEntityDescription( + key="wifi", + translation_key="wifi", + entity_registry_enabled_default=False, + value_fn=lambda x: x.wifi_connected, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SMLIGHT sensor based on a config entry.""" + coordinator = entry.runtime_data.data + + async_add_entities( + [ + *( + SmBinarySensorEntity(coordinator, description) + for description in SENSORS + ), + SmInternetSensorEntity(coordinator), + ] + ) + + +class SmBinarySensorEntity(SmEntity, BinarySensorEntity): + """Representation of a slzb binary sensor.""" + + entity_description: SmBinarySensorEntityDescription + _attr_device_class = BinarySensorDeviceClass.CONNECTIVITY + _attr_entity_category = EntityCategory.DIAGNOSTIC + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmBinarySensorEntityDescription, + ) -> None: + """Initialize slzb binary sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data.sensors) + + +class SmInternetSensorEntity(SmEntity, BinarySensorEntity): + """Representation of the SLZB internet sensor.""" + + _attr_translation_key = "internet" + _attr_device_class = BinarySensorDeviceClass.CONNECTIVITY + _attr_entity_category = EntityCategory.DIAGNOSTIC + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + ) -> None: + """Initialize slzb binary sensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.unique_id}_{self._attr_translation_key}" + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.async_on_remove( + self.coordinator.client.sse.register_callback( + SmEvents.EVENT_INET_STATE, self.internet_callback + ) + ) + await self.async_update() + + @callback + def internet_callback(self, event: MessageEvent) -> None: + """Update internet state from event.""" + self._attr_is_on = event.data == "ok" + self.async_write_ha_state() + + @property + def should_poll(self) -> bool: + """Poll entity for internet connected updates.""" + return True + + async def async_update(self) -> None: + """Update the sensor. + + This is an async api, device will respond with EVENT_INET_STATE event. + """ + await self.coordinator.client.get_param("inetState") diff --git a/homeassistant/components/smlight/button.py b/homeassistant/components/smlight/button.py new file mode 100644 index 00000000000..d82034b87fb --- /dev/null +++ b/homeassistant/components/smlight/button.py @@ -0,0 +1,115 @@ +"""Support for SLZB-06 buttons.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +import logging + +from pysmlight.web import CmdWrapper + +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmButtonDescription(ButtonEntityDescription): + """Class to describe a Button entity.""" + + press_fn: Callable[[CmdWrapper], Awaitable[None]] + + +BUTTONS: list[SmButtonDescription] = [ + SmButtonDescription( + key="core_restart", + translation_key="core_restart", + device_class=ButtonDeviceClass.RESTART, + press_fn=lambda cmd: cmd.reboot(), + ), + SmButtonDescription( + key="zigbee_restart", + translation_key="zigbee_restart", + device_class=ButtonDeviceClass.RESTART, + press_fn=lambda cmd: cmd.zb_restart(), + ), + SmButtonDescription( + key="zigbee_flash_mode", + translation_key="zigbee_flash_mode", + entity_registry_enabled_default=False, + press_fn=lambda cmd: cmd.zb_bootloader(), + ), +] + +ROUTER = SmButtonDescription( + key="reconnect_zigbee_router", + translation_key="reconnect_zigbee_router", + entity_registry_enabled_default=False, + press_fn=lambda cmd: cmd.zb_router(), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SMLIGHT buttons based on a config entry.""" + coordinator = entry.runtime_data.data + + async_add_entities(SmButton(coordinator, button) for button in BUTTONS) + entity_created = False + + @callback + def _check_router(startup: bool = False) -> None: + nonlocal entity_created + + if coordinator.data.info.zb_type == 1 and not entity_created: + async_add_entities([SmButton(coordinator, ROUTER)]) + entity_created = True + elif coordinator.data.info.zb_type != 1 and (startup or entity_created): + entity_registry = er.async_get(hass) + if entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, DOMAIN, f"{coordinator.unique_id}-{ROUTER.key}" + ): + entity_registry.async_remove(entity_id) + + coordinator.async_add_listener(_check_router) + _check_router(startup=True) + + +class SmButton(SmEntity, ButtonEntity): + """Defines a SLZB-06 button.""" + + coordinator: SmDataUpdateCoordinator + entity_description: SmButtonDescription + _attr_entity_category = EntityCategory.CONFIG + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmButtonDescription, + ) -> None: + """Initialize SLZB-06 button entity.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + + async def async_press(self) -> None: + """Trigger button press.""" + await self.entity_description.press_fn(self.coordinator.client.cmds) diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index 1b8cc4efeb1..92b543e0441 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from pysmlight import Api2 @@ -33,10 +34,11 @@ STEP_AUTH_DATA_SCHEMA = vol.Schema( class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for SMLIGHT Zigbee.""" + host: str + def __init__(self) -> None: """Initialize the config flow.""" self.client: Api2 - self.host: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -45,9 +47,8 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - host = user_input[CONF_HOST] - self.client = Api2(host, session=async_get_clientsession(self.hass)) - self.host = host + self.host = user_input[CONF_HOST] + self.client = Api2(self.host, session=async_get_clientsession(self.hass)) try: if not await self._async_check_auth_required(user_input): @@ -94,8 +95,13 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): mac = discovery_info.properties.get("mac") # fallback for legacy firmware if mac is None: - info = await self.client.get_info() + try: + info = await self.client.get_info() + except SmlightConnectionError: + # User is likely running unsupported ESPHome firmware + return self.async_abort(reason="cannot_connect") mac = info.MAC + await self.async_set_unique_id(format_mac(mac)) self._abort_if_unique_id_configured() @@ -127,6 +133,42 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reauth when API Authentication failed.""" + + self.host = entry_data[CONF_HOST] + self.client = Api2(self.host, session=async_get_clientsession(self.hass)) + + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication of an existing config entry.""" + errors = {} + if user_input is not None: + try: + await self.client.authenticate( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD] + ) + except SmlightAuthError: + errors["base"] = "invalid_auth" + except SmlightConnectionError: + return self.async_abort(reason="cannot_connect") + else: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=STEP_AUTH_DATA_SCHEMA, + description_placeholders=self.context["title_placeholders"], + errors=errors, + ) + async def _async_check_auth_required(self, user_input: dict[str, Any]) -> bool: """Check if auth required and attempt to authenticate.""" if await self.client.check_auth_needed(): @@ -148,4 +190,5 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_HOST] = self.host assert info.model is not None - return self.async_create_entry(title=info.model, data=user_input) + title = self.context.get("title_placeholders", {}).get(CONF_NAME) or info.model + return self.async_create_entry(title=title, data=user_input) diff --git a/homeassistant/components/smlight/const.py b/homeassistant/components/smlight/const.py index de3270fe3be..669094b2441 100644 --- a/homeassistant/components/smlight/const.py +++ b/homeassistant/components/smlight/const.py @@ -6,6 +6,11 @@ import logging DOMAIN = "smlight" ATTR_MANUFACTURER = "SMLIGHT" +DATA_COORDINATOR = "data" +FIRMWARE_COORDINATOR = "firmware" +SCAN_FIRMWARE_INTERVAL = timedelta(hours=6) LOGGER = logging.getLogger(__package__) SCAN_INTERVAL = timedelta(seconds=300) +SCAN_INTERNET_INTERVAL = timedelta(minutes=15) +UPTIME_DEVIATION = timedelta(seconds=5) diff --git a/homeassistant/components/smlight/coordinator.py b/homeassistant/components/smlight/coordinator.py index 6a29f14fafd..5b38ec4a89e 100644 --- a/homeassistant/components/smlight/coordinator.py +++ b/homeassistant/components/smlight/coordinator.py @@ -1,19 +1,28 @@ """DataUpdateCoordinator for Smlight.""" +from __future__ import annotations + +from abc import abstractmethod from dataclasses import dataclass +from typing import TYPE_CHECKING from pysmlight import Api2, Info, Sensors +from pysmlight.const import Settings, SettingsProp from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +from pysmlight.web import Firmware -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.issue_registry import IssueSeverity from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DOMAIN, LOGGER, SCAN_INTERVAL +from .const import DOMAIN, LOGGER, SCAN_FIRMWARE_INTERVAL, SCAN_INTERVAL + +if TYPE_CHECKING: + from . import SmConfigEntry @dataclass @@ -24,12 +33,21 @@ class SmData: info: Info -class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): - """Class to manage fetching SMLIGHT data.""" +@dataclass +class SmFwData: + """SMLIGHT firmware data stored in the FirmwareUpdateCoordinator.""" - config_entry: ConfigEntry + info: Info + esp_firmware: list[Firmware] | None + zb_firmware: list[Firmware] | None - def __init__(self, hass: HomeAssistant, host: str) -> None: + +class SmBaseDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): + """Base Coordinator for SMLIGHT.""" + + config_entry: SmConfigEntry + + def __init__(self, hass: HomeAssistant, host: str, client: Api2) -> None: """Initialize the coordinator.""" super().__init__( hass, @@ -38,8 +56,9 @@ class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): update_interval=SCAN_INTERVAL, ) + self.client = client self.unique_id: str | None = None - self.client = Api2(host=host, session=async_get_clientsession(hass)) + self.legacy_api: int = 0 async def _async_setup(self) -> None: """Authenticate if needed during initial setup.""" @@ -54,18 +73,82 @@ class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): self.config_entry.data[CONF_PASSWORD], ) except SmlightAuthError as err: - LOGGER.error("Failed to authenticate: %s", err) - raise ConfigEntryError from err + raise ConfigEntryAuthFailed from err + else: + # Auth required but no credentials available + raise ConfigEntryAuthFailed info = await self.client.get_info() self.unique_id = format_mac(info.MAC) - - async def _async_update_data(self) -> SmData: - """Fetch data from the SMLIGHT device.""" - try: - return SmData( - sensors=await self.client.get_sensors(), - info=await self.client.get_info(), + self.legacy_api = info.legacy_api + if info.legacy_api == 2: + ir.async_create_issue( + self.hass, + DOMAIN, + "unsupported_firmware", + is_fixable=False, + is_persistent=False, + learn_more_url="https://smlight.tech/flasher/#SLZB-06", + severity=IssueSeverity.ERROR, + translation_key="unsupported_firmware", ) + + async def _async_update_data(self) -> _DataT: + try: + return await self._internal_update_data() + except SmlightAuthError as err: + raise ConfigEntryAuthFailed from err + except SmlightConnectionError as err: raise UpdateFailed(err) from err + + @abstractmethod + async def _internal_update_data(self) -> _DataT: + """Update coordinator data.""" + + +class SmDataUpdateCoordinator(SmBaseDataUpdateCoordinator[SmData]): + """Class to manage fetching SMLIGHT sensor data.""" + + def update_setting(self, setting: Settings, value: bool | int) -> None: + """Update the sensor value from event.""" + + prop = SettingsProp[setting.name].value + setattr(self.data.sensors, prop, value) + + self.async_set_updated_data(self.data) + + async def _internal_update_data(self) -> SmData: + """Fetch sensor data from the SMLIGHT device.""" + sensors = Sensors() + if not self.legacy_api: + sensors = await self.client.get_sensors() + + return SmData( + sensors=sensors, + info=await self.client.get_info(), + ) + + +class SmFirmwareUpdateCoordinator(SmBaseDataUpdateCoordinator[SmFwData]): + """Class to manage fetching SMLIGHT firmware update data from cloud.""" + + def __init__(self, hass: HomeAssistant, host: str, client: Api2) -> None: + """Initialize the coordinator.""" + super().__init__(hass, host, client) + + self.update_interval = SCAN_FIRMWARE_INTERVAL + # only one update can run at a time (core or zibgee) + self.in_progress = False + + async def _internal_update_data(self) -> SmFwData: + """Fetch data from the SMLIGHT device.""" + info = await self.client.get_info() + + return SmFwData( + info=info, + esp_firmware=await self.client.get_firmware_version(info.fw_channel), + zb_firmware=await self.client.get_firmware_version( + info.fw_channel, device=info.model, mode="zigbee" + ), + ) diff --git a/homeassistant/components/smlight/diagnostics.py b/homeassistant/components/smlight/diagnostics.py new file mode 100644 index 00000000000..d303e5803bb --- /dev/null +++ b/homeassistant/components/smlight/diagnostics.py @@ -0,0 +1,25 @@ +"""Collect diagnostics for SMLIGHT devices.""" + +from __future__ import annotations + +from typing import Any + +from pysmlight.const import Actions + +from homeassistant.core import HomeAssistant + +from . import SmConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: SmConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordintator = config_entry.runtime_data.data + info = await coordintator.client.get_info() + log = await coordintator.client.get({"action": Actions.API_GET_LOG.value}) or "none" + + return { + "info": info.to_dict(), + "log": log.split("\n"), + } diff --git a/homeassistant/components/smlight/entity.py b/homeassistant/components/smlight/entity.py index 50767d3bf74..7e6213cbdf1 100644 --- a/homeassistant/components/smlight/entity.py +++ b/homeassistant/components/smlight/entity.py @@ -10,15 +10,15 @@ from homeassistant.helpers.device_registry import ( from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ATTR_MANUFACTURER -from .coordinator import SmDataUpdateCoordinator +from .coordinator import SmBaseDataUpdateCoordinator -class SmEntity(CoordinatorEntity[SmDataUpdateCoordinator]): +class SmEntity(CoordinatorEntity[SmBaseDataUpdateCoordinator]): """Base class for all SMLight entities.""" _attr_has_entity_name = True - def __init__(self, coordinator: SmDataUpdateCoordinator) -> None: + def __init__(self, coordinator: SmBaseDataUpdateCoordinator) -> None: """Initialize entity with device.""" super().__init__(coordinator) mac = format_mac(coordinator.data.info.MAC) diff --git a/homeassistant/components/smlight/icons.json b/homeassistant/components/smlight/icons.json new file mode 100644 index 00000000000..3d086466b4f --- /dev/null +++ b/homeassistant/components/smlight/icons.json @@ -0,0 +1,15 @@ +{ + "entity": { + "sensor": { + "device_mode": { + "default": "mdi:connection" + }, + "firmware_channel": { + "default": "mdi:update" + }, + "zigbee_type": { + "default": "mdi:zigbee" + } + } + } +} diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 72d915666e5..cb791ac111b 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -5,8 +5,8 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", - "iot_class": "local_polling", - "requirements": ["pysmlight==0.0.13"], + "iot_class": "local_push", + "requirements": ["pysmlight==0.1.4"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py index d9c03760fb8..1116b99f8c1 100644 --- a/homeassistant/components/smlight/sensor.py +++ b/homeassistant/components/smlight/sensor.py @@ -4,8 +4,10 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime, timedelta +from itertools import chain -from pysmlight import Sensors +from pysmlight import Info, Sensors from homeassistant.components.sensor import ( SensorDeviceClass, @@ -16,8 +18,11 @@ from homeassistant.components.sensor import ( from homeassistant.const import EntityCategory, UnitOfInformation, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.util.dt import utcnow from . import SmConfigEntry +from .const import UPTIME_DEVIATION from .coordinator import SmDataUpdateCoordinator from .entity import SmEntity @@ -26,11 +31,42 @@ from .entity import SmEntity class SmSensorEntityDescription(SensorEntityDescription): """Class describing SMLIGHT sensor entities.""" - entity_category = EntityCategory.DIAGNOSTIC value_fn: Callable[[Sensors], float | None] -SENSORS = [ +@dataclass(frozen=True, kw_only=True) +class SmInfoEntityDescription(SensorEntityDescription): + """Class describing SMLIGHT information entities.""" + + value_fn: Callable[[Info], StateType] + + +INFO: list[SmInfoEntityDescription] = [ + SmInfoEntityDescription( + key="device_mode", + translation_key="device_mode", + device_class=SensorDeviceClass.ENUM, + options=["eth", "wifi", "usb"], + value_fn=lambda x: x.coord_mode, + ), + SmInfoEntityDescription( + key="firmware_channel", + translation_key="firmware_channel", + device_class=SensorDeviceClass.ENUM, + options=["dev", "release"], + value_fn=lambda x: x.fw_channel, + ), + SmInfoEntityDescription( + key="zigbee_type", + translation_key="zigbee_type", + device_class=SensorDeviceClass.ENUM, + options=["coordinator", "router", "thread"], + value_fn=lambda x: x.zb_type, + ), +] + + +SENSORS: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( key="core_temperature", translation_key="core_temperature", @@ -67,6 +103,23 @@ SENSORS = [ ), ] +UPTIME: list[SmSensorEntityDescription] = [ + SmSensorEntityDescription( + key="core_uptime", + translation_key="core_uptime", + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + value_fn=lambda x: x.uptime, + ), + SmSensorEntityDescription( + key="socket_uptime", + translation_key="socket_uptime", + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + value_fn=lambda x: x.socket_uptime, + ), +] + async def async_setup_entry( hass: HomeAssistant, @@ -74,17 +127,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up SMLIGHT sensor based on a config entry.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.data async_add_entities( - SmSensorEntity(coordinator, description) for description in SENSORS + chain( + (SmInfoSensorEntity(coordinator, description) for description in INFO), + (SmSensorEntity(coordinator, description) for description in SENSORS), + (SmUptimeSensorEntity(coordinator, description) for description in UPTIME), + ) ) class SmSensorEntity(SmEntity, SensorEntity): """Representation of a slzb sensor.""" + coordinator: SmDataUpdateCoordinator entity_description: SmSensorEntityDescription + _attr_entity_category = EntityCategory.DIAGNOSTIC def __init__( self, @@ -98,6 +157,78 @@ class SmSensorEntity(SmEntity, SensorEntity): self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" @property - def native_value(self) -> float | None: + def native_value(self) -> datetime | str | float | None: """Return the sensor value.""" return self.entity_description.value_fn(self.coordinator.data.sensors) + + +class SmInfoSensorEntity(SmEntity, SensorEntity): + """Representation of a slzb info sensor.""" + + coordinator: SmDataUpdateCoordinator + entity_description: SmInfoEntityDescription + _attr_entity_category = EntityCategory.DIAGNOSTIC + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmInfoEntityDescription, + ) -> None: + """Initiate slzb sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def native_value(self) -> StateType: + """Return the sensor value.""" + value = self.entity_description.value_fn(self.coordinator.data.info) + options = self.entity_description.options + + if isinstance(value, int) and options is not None: + value = options[value] if 0 <= value < len(options) else None + + return value + + +class SmUptimeSensorEntity(SmSensorEntity): + """Representation of a slzb uptime sensor.""" + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmSensorEntityDescription, + ) -> None: + "Initialize uptime sensor instance." + super().__init__(coordinator, description) + self._last_uptime: datetime | None = None + + def get_uptime(self, uptime: float | None) -> datetime | None: + """Return device uptime or zigbee socket uptime. + + Converts uptime from seconds to a datetime value, allow up to 5 + seconds deviation. This avoids unnecessary updates to sensor state, + that may be caused by clock jitter. + """ + if uptime is None: + # reset to unknown state + self._last_uptime = None + return None + + new_uptime = utcnow() - timedelta(seconds=uptime) + + if ( + not self._last_uptime + or abs(new_uptime - self._last_uptime) > UPTIME_DEVIATION + ): + self._last_uptime = new_uptime + + return self._last_uptime + + @property + def native_value(self) -> datetime | None: + """Return the sensor value.""" + value = self.entity_description.value_fn(self.coordinator.data.sensors) + + return self.get_uptime(value) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index 02b9ebcc4e8..1e6a533beef 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -17,6 +17,14 @@ "password": "[%key:common::config_flow::data::password%]" } }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "Please enter the correct username and password", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, "confirm_discovery": { "description": "Do you want to set up SMLIGHT at {host}?" } @@ -27,10 +35,27 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "reauth_failed": "[%key:common::config_flow::error::invalid_auth%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { + "binary_sensor": { + "ethernet": { + "name": "Ethernet" + }, + "internet": { + "name": "Internet" + }, + "vpn": { + "name": "VPN" + }, + "wifi": { + "name": "Wi-Fi" + } + }, "sensor": { "zigbee_temperature": { "name": "Zigbee chip temp" @@ -43,7 +68,78 @@ }, "ram_usage": { "name": "RAM usage" + }, + "core_uptime": { + "name": "Core uptime" + }, + "socket_uptime": { + "name": "Zigbee uptime" + }, + "device_mode": { + "name": "Connection mode", + "state": { + "eth": "Ethernet", + "wifi": "Wi-Fi", + "usb": "USB" + } + }, + "firmware_channel": { + "name": "Firmware channel", + "state": { + "dev": "Development", + "release": "Stable" + } + }, + "zigbee_type": { + "name": "Zigbee type", + "state": { + "coordinator": "Coordinator", + "router": "Router", + "thread": "Thread" + } } + }, + "button": { + "core_restart": { + "name": "Core restart" + }, + "zigbee_restart": { + "name": "Zigbee restart" + }, + "zigbee_flash_mode": { + "name": "Zigbee flash mode" + }, + "reconnect_zigbee_router": { + "name": "Reconnect zigbee router" + } + }, + "switch": { + "auto_zigbee_update": { + "name": "Auto Zigbee update" + }, + "disable_led": { + "name": "Disable LEDs" + }, + "night_mode": { + "name": "LED night mode" + }, + "vpn_enabled": { + "name": "VPN enabled" + } + }, + "update": { + "core_update": { + "name": "Core firmware" + }, + "zigbee_update": { + "name": "Zigbee firmware" + } + } + }, + "issues": { + "unsupported_firmware": { + "title": "SLZB core firmware update required", + "description": "Your SMLIGHT SLZB-06x device is running an unsupported core firmware version. Please update it to the latest version to enjoy all the features of this integration." } } } diff --git a/homeassistant/components/smlight/switch.py b/homeassistant/components/smlight/switch.py new file mode 100644 index 00000000000..1c591e3dbe8 --- /dev/null +++ b/homeassistant/components/smlight/switch.py @@ -0,0 +1,130 @@ +"""Support for SLZB-06 switches.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmlight import Sensors, SettingsEvent +from pysmlight.const import Settings + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SmConfigEntry +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmSwitchEntityDescription(SwitchEntityDescription): + """Class to describe a Switch entity.""" + + setting: Settings + state_fn: Callable[[Sensors], bool | None] + + +SWITCHES: list[SmSwitchEntityDescription] = [ + SmSwitchEntityDescription( + key="disable_led", + translation_key="disable_led", + setting=Settings.DISABLE_LEDS, + state_fn=lambda x: x.disable_leds, + ), + SmSwitchEntityDescription( + key="night_mode", + translation_key="night_mode", + setting=Settings.NIGHT_MODE, + state_fn=lambda x: x.night_mode, + ), + SmSwitchEntityDescription( + key="auto_zigbee_update", + translation_key="auto_zigbee_update", + entity_category=EntityCategory.CONFIG, + setting=Settings.ZB_AUTOUPDATE, + entity_registry_enabled_default=False, + state_fn=lambda x: x.auto_zigbee, + ), + SmSwitchEntityDescription( + key="vpn_enabled", + translation_key="vpn_enabled", + setting=Settings.ENABLE_VPN, + entity_registry_enabled_default=False, + state_fn=lambda x: x.vpn_enabled, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize switches for SLZB-06 device.""" + coordinator = entry.runtime_data.data + + async_add_entities(SmSwitch(coordinator, switch) for switch in SWITCHES) + + +class SmSwitch(SmEntity, SwitchEntity): + """Representation of a SLZB-06 switch.""" + + coordinator: SmDataUpdateCoordinator + entity_description: SmSwitchEntityDescription + _attr_device_class = SwitchDeviceClass.SWITCH + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmSwitchEntityDescription, + ) -> None: + """Initialize the switch.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + + self._page, self._toggle = description.setting.value + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.async_on_remove( + self.coordinator.client.sse.register_settings_cb( + self.entity_description.setting, self.event_callback + ) + ) + + async def set_smlight(self, state: bool) -> None: + """Set the state on SLZB device.""" + await self.coordinator.client.set_toggle(self._page, self._toggle, state) + + @callback + def event_callback(self, event: SettingsEvent) -> None: + """Handle switch events from the SLZB device.""" + if event.setting is not None: + self.coordinator.update_setting( + self.entity_description.setting, event.setting[self._toggle] + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.set_smlight(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.set_smlight(False) + + @property + def is_on(self) -> bool | None: + """Return the state of the switch.""" + return self.entity_description.state_fn(self.coordinator.data.sensors) diff --git a/homeassistant/components/smlight/update.py b/homeassistant/components/smlight/update.py new file mode 100644 index 00000000000..147b1d766ef --- /dev/null +++ b/homeassistant/components/smlight/update.py @@ -0,0 +1,216 @@ +"""Support updates for SLZB-06 ESP32 and Zigbee firmwares.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any, Final + +from pysmlight.const import Events as SmEvents +from pysmlight.models import Firmware, Info +from pysmlight.sse import MessageEvent + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityDescription, + UpdateEntityFeature, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SmConfigEntry +from .const import LOGGER +from .coordinator import SmFirmwareUpdateCoordinator, SmFwData +from .entity import SmEntity + + +@dataclass(frozen=True, kw_only=True) +class SmUpdateEntityDescription(UpdateEntityDescription): + """Describes SMLIGHT SLZB-06 update entity.""" + + installed_version: Callable[[Info], str | None] + fw_list: Callable[[SmFwData], list[Firmware] | None] + + +UPDATE_ENTITIES: Final = [ + SmUpdateEntityDescription( + key="core_update", + translation_key="core_update", + installed_version=lambda x: x.sw_version, + fw_list=lambda x: x.esp_firmware, + ), + SmUpdateEntityDescription( + key="zigbee_update", + translation_key="zigbee_update", + installed_version=lambda x: x.zb_version, + fw_list=lambda x: x.zb_firmware, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, entry: SmConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the SMLIGHT update entities.""" + coordinator = entry.runtime_data.firmware + + async_add_entities( + SmUpdateEntity(coordinator, description) for description in UPDATE_ENTITIES + ) + + +class SmUpdateEntity(SmEntity, UpdateEntity): + """Representation for SLZB-06 update entities.""" + + coordinator: SmFirmwareUpdateCoordinator + entity_description: SmUpdateEntityDescription + _attr_entity_category = EntityCategory.CONFIG + _attr_device_class = UpdateDeviceClass.FIRMWARE + _attr_supported_features = ( + UpdateEntityFeature.INSTALL + | UpdateEntityFeature.PROGRESS + | UpdateEntityFeature.RELEASE_NOTES + ) + + def __init__( + self, + coordinator: SmFirmwareUpdateCoordinator, + description: SmUpdateEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + + self._finished_event = asyncio.Event() + self._firmware: Firmware | None = None + self._unload: list[Callable] = [] + + @property + def installed_version(self) -> str | None: + """Version installed..""" + data = self.coordinator.data + + version = self.entity_description.installed_version(data.info) + return version if version != "-1" else None + + @property + def latest_version(self) -> str | None: + """Latest version available for install.""" + data = self.coordinator.data + if self.coordinator.legacy_api == 2: + return None + + fw = self.entity_description.fw_list(data) + + if fw and self.entity_description.key == "zigbee_update": + fw = [f for f in fw if f.type == data.info.zb_type] + + if fw: + self._firmware = fw[0] + return self._firmware.ver + + return None + + def register_callbacks(self) -> None: + """Register callbacks for SSE update events.""" + self._unload.append( + self.coordinator.client.sse.register_callback( + SmEvents.ZB_FW_prgs, self._update_progress + ) + ) + self._unload.append( + self.coordinator.client.sse.register_callback( + SmEvents.FW_UPD_done, self._update_finished + ) + ) + if self.coordinator.legacy_api == 1: + self._unload.append( + self.coordinator.client.sse.register_callback( + SmEvents.ESP_UPD_done, self._update_finished + ) + ) + self._unload.append( + self.coordinator.client.sse.register_callback( + SmEvents.ZB_FW_err, self._update_failed + ) + ) + + def release_notes(self) -> str | None: + """Return release notes for firmware.""" + + if self._firmware and self._firmware.notes: + return self._firmware.notes + + return None + + @callback + def _update_progress(self, progress: MessageEvent) -> None: + """Update install progress on event.""" + + progress = int(progress.data) + self._attr_update_percentage = progress + self.async_write_ha_state() + + def _update_done(self) -> None: + """Handle cleanup for update done.""" + self._finished_event.set() + + for remove_cb in self._unload: + remove_cb() + self._unload.clear() + + self._attr_in_progress = False + self._attr_update_percentage = None + self.async_write_ha_state() + + @callback + def _update_finished(self, event: MessageEvent) -> None: + """Handle event for update finished.""" + + self._update_done() + + @callback + def _update_failed(self, event: MessageEvent) -> None: + self._update_done() + self.coordinator.in_progress = False + raise HomeAssistantError(f"Update failed for {self.name}") + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install firmware update.""" + + if not self.coordinator.in_progress and self._firmware: + self.coordinator.in_progress = True + self._attr_in_progress = True + self._attr_update_percentage = None + self.register_callbacks() + + await self.coordinator.client.fw_update(self._firmware) + + # block until update finished event received + await self._finished_event.wait() + + # allow time for SLZB-06 to reboot before updating coordinator data + try: + async with asyncio.timeout(180): + while ( + self.coordinator.in_progress + and self.installed_version != self._firmware.ver + ): + await self.coordinator.async_refresh() + await asyncio.sleep(1) + except TimeoutError: + LOGGER.warning( + "Timeout waiting for %s to reboot after update", + self.coordinator.data.info.hostname, + ) + + self.coordinator.in_progress = False + self._finished_event.clear() diff --git a/homeassistant/components/sms/config_flow.py b/homeassistant/components/sms/config_flow.py index aec9674da9d..d2188a94632 100644 --- a/homeassistant/components/sms/config_flow.py +++ b/homeassistant/components/sms/config_flow.py @@ -1,11 +1,12 @@ """Config flow for SMS integration.""" import logging +from typing import Any import gammu import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_DEVICE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -26,7 +27,7 @@ DATA_SCHEMA = vol.Schema( ) -async def get_imei_from_config(hass: HomeAssistant, data): +async def get_imei_from_config(hass: HomeAssistant, data: dict[str, Any]) -> str: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. @@ -56,7 +57,9 @@ class SMSFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -79,10 +82,6 @@ class SMSFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): - """Handle import.""" - return await self.async_step_user(user_input) - class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" diff --git a/homeassistant/components/sms/gateway.py b/homeassistant/components/sms/gateway.py index 60962f198b2..a11996e3dfc 100644 --- a/homeassistant/components/sms/gateway.py +++ b/homeassistant/components/sms/gateway.py @@ -128,7 +128,7 @@ class Gateway: except gammu.ERR_EMPTY: # error is raised if memory is empty (this induces wrong reported # memory status) - _LOGGER.info("Failed to read messages!") + _LOGGER.warning("Failed to read messages!") # Link all SMS when there are concatenated messages return gammu.LinkSMS(entries) diff --git a/homeassistant/components/smtp/icons.json b/homeassistant/components/smtp/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/smtp/icons.json +++ b/homeassistant/components/smtp/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/smtp/manifest.json b/homeassistant/components/smtp/manifest.json index 0e0bba707ac..66954eebccc 100644 --- a/homeassistant/components/smtp/manifest.json +++ b/homeassistant/components/smtp/manifest.json @@ -3,5 +3,6 @@ "name": "SMTP", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/smtp", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/snapcast/__init__.py b/homeassistant/components/snapcast/__init__.py index a4163355944..b853535b525 100644 --- a/homeassistant/components/snapcast/__init__.py +++ b/homeassistant/components/snapcast/__init__.py @@ -1,37 +1,28 @@ """Snapcast Integration.""" -import logging - -import snapcast.control - from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from .const import DOMAIN, PLATFORMS -from .server import HomeAssistantSnapcast - -_LOGGER = logging.getLogger(__name__) +from .coordinator import SnapcastUpdateCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Snapcast from a config entry.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] + coordinator = SnapcastUpdateCoordinator(hass, host, port) + try: - server = await snapcast.control.create_server( - hass.loop, host, port, reconnect=True - ) + await coordinator.async_config_entry_first_refresh() except OSError as ex: raise ConfigEntryNotReady( f"Could not connect to Snapcast server at {host}:{port}" ) from ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantSnapcast( - hass, server, f"{host}:{port}", entry.entry_id - ) - + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/snapcast/coordinator.py b/homeassistant/components/snapcast/coordinator.py new file mode 100644 index 00000000000..5bb9ae4e51f --- /dev/null +++ b/homeassistant/components/snapcast/coordinator.py @@ -0,0 +1,72 @@ +"""Data update coordinator for Snapcast server.""" + +from __future__ import annotations + +import logging + +from snapcast.control.server import Snapserver + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +class SnapcastUpdateCoordinator(DataUpdateCoordinator[None]): + """Data update coordinator for pushed data from Snapcast server.""" + + def __init__(self, hass: HomeAssistant, host: str, port: int) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"{host}:{port}", + update_interval=None, # Disable update interval as server pushes + ) + + self._server = Snapserver(hass.loop, host, port, True) + self.last_update_success = False + + self._server.set_on_update_callback(self._on_update) + self._server.set_new_client_callback(self._on_update) + self._server.set_on_connect_callback(self._on_connect) + self._server.set_on_disconnect_callback(self._on_disconnect) + + def _on_update(self) -> None: + """Snapserver on_update callback.""" + # Assume availability if an update is received. + self.last_update_success = True + self.async_update_listeners() + + def _on_connect(self) -> None: + """Snapserver on_connect callback.""" + self.last_update_success = True + self.async_update_listeners() + + def _on_disconnect(self, ex): + """Snapsever on_disconnect callback.""" + self.async_set_update_error(ex) + + async def _async_setup(self) -> None: + """Perform async setup for the coordinator.""" + # Start the server + try: + await self._server.start() + except OSError as ex: + raise UpdateFailed from ex + + async def _async_update_data(self) -> None: + """Empty update method since data is pushed.""" + + async def disconnect(self) -> None: + """Disconnect from the server.""" + self._server.set_on_update_callback(None) + self._server.set_on_connect_callback(None) + self._server.set_on_disconnect_callback(None) + self._server.set_new_client_callback(None) + self._server.stop() + + @property + def server(self) -> Snapserver: + """Get the Snapserver object.""" + return self._server diff --git a/homeassistant/components/snapcast/entity.py b/homeassistant/components/snapcast/entity.py new file mode 100644 index 00000000000..cceeb6227fd --- /dev/null +++ b/homeassistant/components/snapcast/entity.py @@ -0,0 +1,11 @@ +"""Coordinator entity for Snapcast server.""" + +from __future__ import annotations + +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import SnapcastUpdateCoordinator + + +class SnapcastCoordinatorEntity(CoordinatorEntity[SnapcastUpdateCoordinator]): + """Coordinator entity for Snapcast.""" diff --git a/homeassistant/components/snapcast/icons.json b/homeassistant/components/snapcast/icons.json index bdc20665282..d6511d768e2 100644 --- a/homeassistant/components/snapcast/icons.json +++ b/homeassistant/components/snapcast/icons.json @@ -1,9 +1,19 @@ { "services": { - "join": "mdi:music-note-plus", - "unjoin": "mdi:music-note-minus", - "snapshot": "mdi:camera", - "restore": "mdi:camera-retake", - "set_latency": "mdi:camera-timer" + "join": { + "service": "mdi:music-note-plus" + }, + "unjoin": { + "service": "mdi:music-note-minus" + }, + "snapshot": { + "service": "mdi:camera" + }, + "restore": { + "service": "mdi:camera-retake" + }, + "set_latency": { + "service": "mdi:camera-timer" + } } } diff --git a/homeassistant/components/snapcast/media_player.py b/homeassistant/components/snapcast/media_player.py index bda411acde3..0ec27c1ad9c 100644 --- a/homeassistant/components/snapcast/media_player.py +++ b/homeassistant/components/snapcast/media_player.py @@ -2,18 +2,29 @@ from __future__ import annotations -from snapcast.control.server import Snapserver +from collections.abc import Mapping +import logging +from typing import Any + +from snapcast.control.client import Snapclient +from snapcast.control.group import Snapgroup import voluptuous as vol from homeassistant.components.media_player import ( + DOMAIN as MEDIA_PLAYER_DOMAIN, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, entity_platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + entity_registry as er, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( @@ -30,6 +41,8 @@ from .const import ( SERVICE_SNAPSHOT, SERVICE_UNJOIN, ) +from .coordinator import SnapcastUpdateCoordinator +from .entity import SnapcastCoordinatorEntity STREAM_STATUS = { "idle": MediaPlayerState.IDLE, @@ -37,21 +50,23 @@ STREAM_STATUS = { "unknown": None, } +_LOGGER = logging.getLogger(__name__) -def register_services(): + +def register_services() -> None: """Register snapcast services.""" platform = entity_platform.async_get_current_platform() platform.async_register_entity_service(SERVICE_SNAPSHOT, None, "snapshot") platform.async_register_entity_service(SERVICE_RESTORE, None, "async_restore") platform.async_register_entity_service( - SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, handle_async_join + SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join" ) - platform.async_register_entity_service(SERVICE_UNJOIN, None, handle_async_unjoin) + platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin") platform.async_register_entity_service( SERVICE_SET_LATENCY, {vol.Required(ATTR_LATENCY): cv.positive_int}, - handle_set_latency, + "async_set_latency", ) @@ -61,51 +76,103 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the snapcast config entry.""" - snapcast_server: Snapserver = hass.data[DOMAIN][config_entry.entry_id].server + + # Fetch coordinator from global data + coordinator: SnapcastUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + # Create an ID for the Snapserver + host = config_entry.data[CONF_HOST] + port = config_entry.data[CONF_PORT] + host_id = f"{host}:{port}" register_services() - host = config_entry.data[CONF_HOST] - port = config_entry.data[CONF_PORT] - hpid = f"{host}:{port}" + _known_group_ids: set[str] = set() + _known_client_ids: set[str] = set() - groups: list[MediaPlayerEntity] = [ - SnapcastGroupDevice(group, hpid, config_entry.entry_id) - for group in snapcast_server.groups - ] - clients: list[MediaPlayerEntity] = [ - SnapcastClientDevice(client, hpid, config_entry.entry_id) - for client in snapcast_server.clients - ] - async_add_entities(clients + groups) - hass.data[DOMAIN][ - config_entry.entry_id - ].hass_async_add_entities = async_add_entities + @callback + def _check_entities() -> None: + nonlocal _known_group_ids, _known_client_ids + + def _update_known_ids(known_ids, ids) -> tuple[set[str], set[str]]: + ids_to_add = ids - known_ids + ids_to_remove = known_ids - ids + + # Update known IDs + known_ids.difference_update(ids_to_remove) + known_ids.update(ids_to_add) + + return ids_to_add, ids_to_remove + + group_ids = {g.identifier for g in coordinator.server.groups} + groups_to_add, groups_to_remove = _update_known_ids(_known_group_ids, group_ids) + + client_ids = {c.identifier for c in coordinator.server.clients} + clients_to_add, clients_to_remove = _update_known_ids( + _known_client_ids, client_ids + ) + + # Exit early if no changes + if not (groups_to_add | groups_to_remove | clients_to_add | clients_to_remove): + return + + _LOGGER.debug( + "New clients: %s", + str([coordinator.server.client(c).friendly_name for c in clients_to_add]), + ) + _LOGGER.debug( + "New groups: %s", + str([coordinator.server.group(g).friendly_name for g in groups_to_add]), + ) + _LOGGER.debug( + "Remove client IDs: %s", + str([list(clients_to_remove)]), + ) + _LOGGER.debug( + "Remove group IDs: %s", + str(list(groups_to_remove)), + ) + + # Add new entities + async_add_entities( + [ + SnapcastGroupDevice( + coordinator, coordinator.server.group(group_id), host_id + ) + for group_id in groups_to_add + ] + + [ + SnapcastClientDevice( + coordinator, coordinator.server.client(client_id), host_id + ) + for client_id in clients_to_add + ] + ) + + # Remove stale entities + entity_registry = er.async_get(hass) + for group_id in groups_to_remove: + if entity_id := entity_registry.async_get_entity_id( + MEDIA_PLAYER_DOMAIN, + DOMAIN, + SnapcastGroupDevice.get_unique_id(host_id, group_id), + ): + entity_registry.async_remove(entity_id) + + for client_id in clients_to_remove: + if entity_id := entity_registry.async_get_entity_id( + MEDIA_PLAYER_DOMAIN, + DOMAIN, + SnapcastClientDevice.get_unique_id(host_id, client_id), + ): + entity_registry.async_remove(entity_id) + + coordinator.async_add_listener(_check_entities) + _check_entities() -async def handle_async_join(entity, service_call): - """Handle the entity service join.""" - if not isinstance(entity, SnapcastClientDevice): - raise TypeError("Entity is not a client. Can only join clients.") - await entity.async_join(service_call.data[ATTR_MASTER]) - - -async def handle_async_unjoin(entity, service_call): - """Handle the entity service unjoin.""" - if not isinstance(entity, SnapcastClientDevice): - raise TypeError("Entity is not a client. Can only unjoin clients.") - await entity.async_unjoin() - - -async def handle_set_latency(entity, service_call): - """Handle the entity service set_latency.""" - if not isinstance(entity, SnapcastClientDevice): - raise TypeError("Latency can only be set for a Snapcast client.") - await entity.async_set_latency(service_call.data[ATTR_LATENCY]) - - -class SnapcastGroupDevice(MediaPlayerEntity): - """Representation of a Snapcast group device.""" +class SnapcastBaseDevice(SnapcastCoordinatorEntity, MediaPlayerEntity): + """Base class representing a Snapcast device.""" _attr_should_poll = False _attr_supported_features = ( @@ -114,166 +181,172 @@ class SnapcastGroupDevice(MediaPlayerEntity): | MediaPlayerEntityFeature.SELECT_SOURCE ) - def __init__(self, group, uid_part, entry_id): - """Initialize the Snapcast group device.""" - self._attr_available = True - self._group = group - self._entry_id = entry_id - self._attr_unique_id = f"{GROUP_PREFIX}{uid_part}_{self._group.identifier}" + def __init__( + self, + coordinator: SnapcastUpdateCoordinator, + device: Snapgroup | Snapclient, + host_id: str, + ) -> None: + """Initialize the base device.""" + super().__init__(coordinator) + + self._device = device + self._attr_unique_id = self.get_unique_id(host_id, device.identifier) + + @classmethod + def get_unique_id(cls, host, id) -> str: + """Build a unique ID.""" + raise NotImplementedError + + @property + def _current_group(self) -> Snapgroup: + """Return the group.""" + raise NotImplementedError async def async_added_to_hass(self) -> None: - """Subscribe to group events.""" - self._group.set_callback(self.schedule_update_ha_state) - self.hass.data[DOMAIN][self._entry_id].groups.append(self) + """Subscribe to events.""" + await super().async_added_to_hass() + self._device.set_callback(self.schedule_update_ha_state) async def async_will_remove_from_hass(self) -> None: - """Disconnect group object when removed.""" - self._group.set_callback(None) - self.hass.data[DOMAIN][self._entry_id].groups.remove(self) + """Disconnect object when removed.""" + self._device.set_callback(None) - def set_availability(self, available: bool) -> None: - """Set availability of group.""" - self._attr_available = available - self.schedule_update_ha_state() + @property + def identifier(self) -> str: + """Return the snapcast identifier.""" + return self._device.identifier + + @property + def source(self) -> str | None: + """Return the current input source.""" + return self._current_group.stream + + @property + def source_list(self) -> list[str]: + """List of available input sources.""" + return list(self._current_group.streams_by_name().keys()) + + async def async_select_source(self, source: str) -> None: + """Set input source.""" + streams = self._current_group.streams_by_name() + if source in streams: + await self._current_group.set_stream(streams[source].identifier) + self.async_write_ha_state() + + @property + def is_volume_muted(self) -> bool: + """Volume muted.""" + return self._device.muted + + async def async_mute_volume(self, mute: bool) -> None: + """Send the mute command.""" + await self._device.set_muted(mute) + self.async_write_ha_state() + + @property + def volume_level(self) -> float: + """Return the volume level.""" + return self._device.volume / 100 + + async def async_set_volume_level(self, volume: float) -> None: + """Set the volume level.""" + await self._device.set_volume(round(volume * 100)) + self.async_write_ha_state() + + def snapshot(self) -> None: + """Snapshot the group state.""" + self._device.snapshot() + + async def async_restore(self) -> None: + """Restore the group state.""" + await self._device.restore() + self.async_write_ha_state() + + async def async_set_latency(self, latency) -> None: + """Handle the set_latency service.""" + raise NotImplementedError + + async def async_join(self, master) -> None: + """Handle the join service.""" + raise NotImplementedError + + async def async_unjoin(self) -> None: + """Handle the unjoin service.""" + raise NotImplementedError + + +class SnapcastGroupDevice(SnapcastBaseDevice): + """Representation of a Snapcast group device.""" + + _device: Snapgroup + + @classmethod + def get_unique_id(cls, host, id) -> str: + """Get a unique ID for a group.""" + return f"{GROUP_PREFIX}{host}_{id}" + + @property + def _current_group(self) -> Snapgroup: + """Return the group.""" + return self._device + + @property + def name(self) -> str: + """Return the name of the device.""" + return f"{self._device.friendly_name} {GROUP_SUFFIX}" @property def state(self) -> MediaPlayerState | None: """Return the state of the player.""" if self.is_volume_muted: return MediaPlayerState.IDLE - return STREAM_STATUS.get(self._group.stream_status) + return STREAM_STATUS.get(self._device.stream_status) - @property - def identifier(self): - """Return the snapcast identifier.""" - return self._group.identifier + async def async_set_latency(self, latency) -> None: + """Handle the set_latency service.""" + raise ServiceValidationError("Latency can only be set for a Snapcast client.") - @property - def name(self): - """Return the name of the device.""" - return f"{self._group.friendly_name} {GROUP_SUFFIX}" + async def async_join(self, master) -> None: + """Handle the join service.""" + raise ServiceValidationError("Entity is not a client. Can only join clients.") - @property - def source(self): - """Return the current input source.""" - return self._group.stream - - @property - def volume_level(self): - """Return the volume level.""" - return self._group.volume / 100 - - @property - def is_volume_muted(self): - """Volume muted.""" - return self._group.muted - - @property - def source_list(self): - """List of available input sources.""" - return list(self._group.streams_by_name().keys()) - - async def async_select_source(self, source: str) -> None: - """Set input source.""" - streams = self._group.streams_by_name() - if source in streams: - await self._group.set_stream(streams[source].identifier) - self.async_write_ha_state() - - async def async_mute_volume(self, mute: bool) -> None: - """Send the mute command.""" - await self._group.set_muted(mute) - self.async_write_ha_state() - - async def async_set_volume_level(self, volume: float) -> None: - """Set the volume level.""" - await self._group.set_volume(round(volume * 100)) - self.async_write_ha_state() - - def snapshot(self): - """Snapshot the group state.""" - self._group.snapshot() - - async def async_restore(self): - """Restore the group state.""" - await self._group.restore() - self.async_write_ha_state() + async def async_unjoin(self) -> None: + """Handle the unjoin service.""" + raise ServiceValidationError("Entity is not a client. Can only unjoin clients.") -class SnapcastClientDevice(MediaPlayerEntity): +class SnapcastClientDevice(SnapcastBaseDevice): """Representation of a Snapcast client device.""" - _attr_should_poll = False - _attr_supported_features = ( - MediaPlayerEntityFeature.VOLUME_MUTE - | MediaPlayerEntityFeature.VOLUME_SET - | MediaPlayerEntityFeature.SELECT_SOURCE - ) + _device: Snapclient - def __init__(self, client, uid_part, entry_id): - """Initialize the Snapcast client device.""" - self._attr_available = True - self._client = client - # Note: Host part is needed, when using multiple snapservers - self._attr_unique_id = f"{CLIENT_PREFIX}{uid_part}_{self._client.identifier}" - self._entry_id = entry_id - - async def async_added_to_hass(self) -> None: - """Subscribe to client events.""" - self._client.set_callback(self.schedule_update_ha_state) - self.hass.data[DOMAIN][self._entry_id].clients.append(self) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect client object when removed.""" - self._client.set_callback(None) - self.hass.data[DOMAIN][self._entry_id].clients.remove(self) - - def set_availability(self, available: bool) -> None: - """Set availability of group.""" - self._attr_available = available - self.schedule_update_ha_state() + @classmethod + def get_unique_id(cls, host, id) -> str: + """Get a unique ID for a client.""" + return f"{CLIENT_PREFIX}{host}_{id}" @property - def identifier(self): - """Return the snapcast identifier.""" - return self._client.identifier + def _current_group(self) -> Snapgroup: + """Return the group the client is associated with.""" + return self._device.group @property - def name(self): + def name(self) -> str: """Return the name of the device.""" - return f"{self._client.friendly_name} {CLIENT_SUFFIX}" - - @property - def source(self): - """Return the current input source.""" - return self._client.group.stream - - @property - def volume_level(self): - """Return the volume level.""" - return self._client.volume / 100 - - @property - def is_volume_muted(self): - """Volume muted.""" - return self._client.muted - - @property - def source_list(self): - """List of available input sources.""" - return list(self._client.group.streams_by_name().keys()) + return f"{self._device.friendly_name} {CLIENT_SUFFIX}" @property def state(self) -> MediaPlayerState | None: """Return the state of the player.""" - if self._client.connected: - if self.is_volume_muted or self._client.group.muted: + if self._device.connected: + if self.is_volume_muted or self._current_group.muted: return MediaPlayerState.IDLE - return STREAM_STATUS.get(self._client.group.stream_status) + return STREAM_STATUS.get(self._current_group.stream_status) return MediaPlayerState.STANDBY @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> Mapping[str, Any]: """Return the state attributes.""" state_attrs = {} if self.latency is not None: @@ -281,60 +354,40 @@ class SnapcastClientDevice(MediaPlayerEntity): return state_attrs @property - def latency(self): + def latency(self) -> float | None: """Latency for Client.""" - return self._client.latency + return self._device.latency - async def async_select_source(self, source: str) -> None: - """Set input source.""" - streams = self._client.group.streams_by_name() - if source in streams: - await self._client.group.set_stream(streams[source].identifier) - self.async_write_ha_state() - - async def async_mute_volume(self, mute: bool) -> None: - """Send the mute command.""" - await self._client.set_muted(mute) + async def async_set_latency(self, latency) -> None: + """Set the latency of the client.""" + await self._device.set_latency(latency) self.async_write_ha_state() - async def async_set_volume_level(self, volume: float) -> None: - """Set the volume level.""" - await self._client.set_volume(round(volume * 100)) - self.async_write_ha_state() - - async def async_join(self, master): + async def async_join(self, master) -> None: """Join the group of the master player.""" - master_entity = next( - entity - for entity in self.hass.data[DOMAIN][self._entry_id].clients - if entity.entity_id == master - ) - if not isinstance(master_entity, SnapcastClientDevice): - raise TypeError("Master is not a client device. Can only join clients.") + entity_registry = er.async_get(self.hass) + master_entity = entity_registry.async_get(master) + if master_entity is None: + raise ServiceValidationError(f"Master entity '{master}' not found.") + # Validate master entity is a client + unique_id = master_entity.unique_id + if not unique_id.startswith(CLIENT_PREFIX): + raise ServiceValidationError( + "Master is not a client device. Can only join clients." + ) + + # Extract the client ID and locate it's group + identifier = unique_id.split("_")[-1] master_group = next( group - for group in self._client.groups_available() - if master_entity.identifier in group.clients + for group in self._device.groups_available() + if identifier in group.clients ) - await master_group.add_client(self._client.identifier) + await master_group.add_client(self._device.identifier) self.async_write_ha_state() - async def async_unjoin(self): + async def async_unjoin(self) -> None: """Unjoin the group the player is currently in.""" - await self._client.group.remove_client(self._client.identifier) - self.async_write_ha_state() - - def snapshot(self): - """Snapshot the client state.""" - self._client.snapshot() - - async def async_restore(self): - """Restore the client state.""" - await self._client.restore() - self.async_write_ha_state() - - async def async_set_latency(self, latency): - """Set the latency of the client.""" - await self._client.set_latency(latency) + await self._current_group.remove_client(self._device.identifier) self.async_write_ha_state() diff --git a/homeassistant/components/snapcast/server.py b/homeassistant/components/snapcast/server.py deleted file mode 100644 index 4714156c4c2..00000000000 --- a/homeassistant/components/snapcast/server.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Snapcast Integration.""" - -from __future__ import annotations - -import logging - -import snapcast.control -from snapcast.control.client import Snapclient - -from homeassistant.components.media_player import MediaPlayerEntity -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .media_player import SnapcastClientDevice, SnapcastGroupDevice - -_LOGGER = logging.getLogger(__name__) - - -class HomeAssistantSnapcast: - """Snapcast server and data stored in the Home Assistant data object.""" - - hass: HomeAssistant - - def __init__( - self, - hass: HomeAssistant, - server: snapcast.control.Snapserver, - hpid: str, - entry_id: str, - ) -> None: - """Initialize the HomeAssistantSnapcast object. - - Parameters - ---------- - hass: HomeAssistant - hass object - server : snapcast.control.Snapserver - Snapcast server - hpid : str - host and port - entry_id: str - ConfigEntry entry_id - - Returns - ------- - None - - """ - self.hass: HomeAssistant = hass - self.server: snapcast.control.Snapserver = server - self.hpid: str = hpid - self._entry_id = entry_id - self.clients: list[SnapcastClientDevice] = [] - self.groups: list[SnapcastGroupDevice] = [] - self.hass_async_add_entities: AddEntitiesCallback - # connect callbacks - self.server.set_on_update_callback(self.on_update) - self.server.set_on_connect_callback(self.on_connect) - self.server.set_on_disconnect_callback(self.on_disconnect) - self.server.set_new_client_callback(self.on_add_client) - - async def disconnect(self) -> None: - """Disconnect from server.""" - self.server.set_on_update_callback(None) - self.server.set_on_connect_callback(None) - self.server.set_on_disconnect_callback(None) - self.server.set_new_client_callback(None) - self.server.stop() - - def on_update(self) -> None: - """Update all entities. - - Retrieve all groups/clients from server and add/update/delete entities. - """ - if not self.hass_async_add_entities: - return - new_groups: list[MediaPlayerEntity] = [] - groups: list[MediaPlayerEntity] = [] - hass_groups = {g.identifier: g for g in self.groups} - for group in self.server.groups: - if group.identifier in hass_groups: - groups.append(hass_groups[group.identifier]) - hass_groups[group.identifier].async_schedule_update_ha_state() - else: - new_groups.append(SnapcastGroupDevice(group, self.hpid, self._entry_id)) - new_clients: list[MediaPlayerEntity] = [] - clients: list[MediaPlayerEntity] = [] - hass_clients = {c.identifier: c for c in self.clients} - for client in self.server.clients: - if client.identifier in hass_clients: - clients.append(hass_clients[client.identifier]) - hass_clients[client.identifier].async_schedule_update_ha_state() - else: - new_clients.append( - SnapcastClientDevice(client, self.hpid, self._entry_id) - ) - del_entities: list[MediaPlayerEntity] = [ - x for x in self.groups if x not in groups - ] - del_entities.extend([x for x in self.clients if x not in clients]) - - _LOGGER.debug("New clients: %s", str([c.name for c in new_clients])) - _LOGGER.debug("New groups: %s", str([g.name for g in new_groups])) - _LOGGER.debug("Delete: %s", str(del_entities)) - - ent_reg = er.async_get(self.hass) - for entity in del_entities: - ent_reg.async_remove(entity.entity_id) - self.hass_async_add_entities(new_clients + new_groups) - - def on_connect(self) -> None: - """Activate all entities and update.""" - for client in self.clients: - client.set_availability(True) - for group in self.groups: - group.set_availability(True) - _LOGGER.info("Server connected: %s", self.hpid) - self.on_update() - - def on_disconnect(self, ex: Exception | None) -> None: - """Deactivate all entities.""" - for client in self.clients: - client.set_availability(False) - for group in self.groups: - group.set_availability(False) - _LOGGER.warning( - "Server disconnected: %s. Trying to reconnect. %s", self.hpid, str(ex or "") - ) - - def on_add_client(self, client: Snapclient) -> None: - """Add a Snapcast client. - - Parameters - ---------- - client : Snapclient - Snapcast client to be added to HA. - - """ - if not self.hass_async_add_entities: - return - clients = [SnapcastClientDevice(client, self.hpid, self._entry_id)] - self.hass_async_add_entities(clients) diff --git a/homeassistant/components/snips/__init__.py b/homeassistant/components/snips/__init__.py index 4731a0f324a..70837b95ec5 100644 --- a/homeassistant/components/snips/__init__.py +++ b/homeassistant/components/snips/__init__.py @@ -140,7 +140,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: slots = {} for slot in request.get("slots", []): slots[slot["slotName"]] = {"value": resolve_slot_values(slot)} - slots["{}_raw".format(slot["slotName"])] = {"value": slot["rawValue"]} + slots[f"{slot['slotName']}_raw"] = {"value": slot["rawValue"]} slots["site_id"] = {"value": request.get("siteId")} slots["session_id"] = {"value": request.get("sessionId")} slots["confidenceScore"] = {"value": request["intent"]["confidenceScore"]} diff --git a/homeassistant/components/snips/icons.json b/homeassistant/components/snips/icons.json index 0d465465fe4..9c86a7ad5b3 100644 --- a/homeassistant/components/snips/icons.json +++ b/homeassistant/components/snips/icons.json @@ -1,8 +1,16 @@ { "services": { - "feedback_off": "mdi:message-alert", - "feedback_on": "mdi:message-alert", - "say": "mdi:chat", - "say_action": "mdi:account-voice" + "feedback_off": { + "service": "mdi:message-alert" + }, + "feedback_on": { + "service": "mdi:message-alert" + }, + "say": { + "service": "mdi:chat" + }, + "say_action": { + "service": "mdi:account-voice" + } } } diff --git a/homeassistant/components/snips/manifest.json b/homeassistant/components/snips/manifest.json index 16620eb4bfb..ec768b2b3d4 100644 --- a/homeassistant/components/snips/manifest.json +++ b/homeassistant/components/snips/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/snips", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/snmp/device_tracker.py b/homeassistant/components/snmp/device_tracker.py index 9741a48dd9f..3c4a0a0725c 100644 --- a/homeassistant/components/snmp/device_tracker.py +++ b/homeassistant/components/snmp/device_tracker.py @@ -18,7 +18,7 @@ from pysnmp.hlapi.asyncio import ( import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -59,7 +59,7 @@ async def async_get_scanner( hass: HomeAssistant, config: ConfigType ) -> SnmpScanner | None: """Validate the configuration and return an SNMP scanner.""" - scanner = SnmpScanner(config[DOMAIN]) + scanner = SnmpScanner(config[DEVICE_TRACKER_DOMAIN]) await scanner.async_init(hass) return scanner if scanner.success_init else None diff --git a/homeassistant/components/snmp/manifest.json b/homeassistant/components/snmp/manifest.json index c3970e1e00a..a2a4405a1b5 100644 --- a/homeassistant/components/snmp/manifest.json +++ b/homeassistant/components/snmp/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/snmp", "iot_class": "local_polling", "loggers": ["pyasn1", "pysmi", "pysnmp"], - "requirements": ["pysnmp==6.2.5"] + "quality_scale": "legacy", + "requirements": ["pysnmp==6.2.6"] } diff --git a/homeassistant/components/snooz/fan.py b/homeassistant/components/snooz/fan.py index 8c721432709..bfe773b4780 100644 --- a/homeassistant/components/snooz/fan.py +++ b/homeassistant/components/snooz/fan.py @@ -83,7 +83,6 @@ class SnoozFan(FanEntity, RestoreEntity): _attr_should_poll = False _is_on: bool | None = None _percentage: int | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, data: SnoozConfigurationData) -> None: """Initialize a Snooz fan entity.""" diff --git a/homeassistant/components/snooz/icons.json b/homeassistant/components/snooz/icons.json index d9cccfff4ea..be7d2714a20 100644 --- a/homeassistant/components/snooz/icons.json +++ b/homeassistant/components/snooz/icons.json @@ -1,6 +1,10 @@ { "services": { - "transition_on": "mdi:blur", - "transition_off": "mdi:blur-off" + "transition_on": { + "service": "mdi:blur" + }, + "transition_off": { + "service": "mdi:blur-off" + } } } diff --git a/homeassistant/components/snooz/strings.json b/homeassistant/components/snooz/strings.json index 5a31cea6cac..94ca434e589 100644 --- a/homeassistant/components/snooz/strings.json +++ b/homeassistant/components/snooz/strings.json @@ -12,7 +12,7 @@ "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" }, "pairing_timeout": { - "description": "The device did not enter pairing mode. Click Submit to try again.\n\n### Troubleshooting\n1. Check that the device isn't connected to the mobile app.\n2. Unplug the device for 5 seconds, then plug it back in." + "description": "The device did not enter pairing mode. Select Submit to try again.\n\n### Troubleshooting\n1. Check that the device isn't connected to the mobile app.\n2. Unplug the device for 5 seconds, then plug it back in." } }, "progress": { diff --git a/homeassistant/components/solaredge/coordinator.py b/homeassistant/components/solaredge/coordinator.py index 0c264c1c514..d37cf355fce 100644 --- a/homeassistant/components/solaredge/coordinator.py +++ b/homeassistant/components/solaredge/coordinator.py @@ -93,7 +93,7 @@ class SolarEdgeOverviewDataService(SolarEdgeDataService): for index, key in enumerate(energy_keys, start=1): # All coming values in list should be larger than the current value. if any(self.data[k] > self.data[key] for k in energy_keys[index:]): - LOGGER.info( + LOGGER.warning( "Ignoring invalid energy value %s for %s", self.data[key], key ) self.data.pop(key) diff --git a/homeassistant/components/solaredge_local/manifest.json b/homeassistant/components/solaredge_local/manifest.json index d65aa06ea0a..61c08b3b152 100644 --- a/homeassistant/components/solaredge_local/manifest.json +++ b/homeassistant/components/solaredge_local/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/solaredge_local", "iot_class": "local_polling", "loggers": ["solaredge_local"], + "quality_scale": "legacy", "requirements": ["solaredge-local==0.2.3"] } diff --git a/homeassistant/components/solarlog/__init__.py b/homeassistant/components/solarlog/__init__.py index 962efa4e190..5937c8a496d 100644 --- a/homeassistant/components/solarlog/__init__.py +++ b/homeassistant/components/solarlog/__init__.py @@ -7,17 +7,18 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .coordinator import SolarlogData +from .const import CONF_HAS_PWD +from .coordinator import SolarLogCoordinator _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR] -type SolarlogConfigEntry = ConfigEntry[SolarlogData] +type SolarlogConfigEntry = ConfigEntry[SolarLogCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: SolarlogConfigEntry) -> bool: """Set up a config entry for solarlog.""" - coordinator = SolarlogData(hass, entry) + coordinator = SolarLogCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -57,12 +58,13 @@ async def async_migrate_entry( entity.entity_id, new_unique_id=new_uid ) + if config_entry.minor_version < 3: # migrate config_entry new = {**config_entry.data} - new["extended_data"] = False + new[CONF_HAS_PWD] = False hass.config_entries.async_update_entry( - config_entry, data=new, minor_version=2, version=1 + config_entry, data=new, minor_version=3, version=1 ) _LOGGER.debug( diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 7c8401be2b8..767079ea1f8 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -1,46 +1,33 @@ """Config flow for solarlog integration.""" -import logging -from typing import TYPE_CHECKING, Any +from collections.abc import Mapping +from typing import Any from urllib.parse import ParseResult, urlparse from solarlog_cli.solarlog_connector import SolarLogConnector -from solarlog_cli.solarlog_exceptions import SolarLogConnectionError, SolarLogError +from solarlog_cli.solarlog_exceptions import ( + SolarLogAuthenticationError, + SolarLogConnectionError, + SolarLogError, +) import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import HomeAssistant, callback -from homeassistant.util import slugify +from homeassistant.const import CONF_HOST, CONF_PASSWORD -from .const import DEFAULT_HOST, DEFAULT_NAME, DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -@callback -def solarlog_entries(hass: HomeAssistant): - """Return the hosts already configured.""" - return { - entry.data[CONF_HOST] for entry in hass.config_entries.async_entries(DOMAIN) - } +from .const import CONF_HAS_PWD, DEFAULT_HOST, DOMAIN class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for solarlog.""" VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 3 def __init__(self) -> None: """Initialize the config flow.""" self._errors: dict = {} - - def _host_in_configuration_exists(self, host) -> bool: - """Return True if host exists in configuration.""" - if host in solarlog_entries(self.hass): - return True - return False + self._user_input: dict = {} def _parse_url(self, host: str) -> str: """Return parsed host url.""" @@ -50,7 +37,7 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): url = ParseResult("http", netloc, path, *url[3:]) return url.geturl() - async def _test_connection(self, host): + async def _test_connection(self, host: str) -> bool: """Check if we can connect to the Solar-Log device.""" solarlog = SolarLogConnector(host) try: @@ -66,82 +53,143 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): return True - async def async_step_user(self, user_input=None) -> ConfigFlowResult: + async def _test_extended_data(self, host: str, pwd: str = "") -> bool: + """Check if we get extended data from Solar-Log device.""" + response: bool = False + solarlog = SolarLogConnector(host, password=pwd) + try: + response = await solarlog.test_extended_data_available() + except SolarLogAuthenticationError: + self._errors = {CONF_HOST: "password_error"} + response = False + except SolarLogError: + self._errors = {CONF_HOST: "unknown"} + response = False + finally: + await solarlog.client.close() + + return response + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Step when user initializes a integration.""" self._errors = {} if user_input is not None: - # set some defaults in case we need to return to the form - user_input[CONF_NAME] = slugify(user_input[CONF_NAME]) user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) - if self._host_in_configuration_exists(user_input[CONF_HOST]): - self._errors[CONF_HOST] = "already_configured" - elif await self._test_connection(user_input[CONF_HOST]): + self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + + if await self._test_connection(user_input[CONF_HOST]): + if user_input[CONF_HAS_PWD]: + self._user_input = user_input + return await self.async_step_password() + return self.async_create_entry( - title=user_input[CONF_NAME], data=user_input + title=user_input[CONF_HOST], data=user_input ) else: - user_input = {} - user_input[CONF_NAME] = DEFAULT_NAME - user_input[CONF_HOST] = DEFAULT_HOST + user_input = {CONF_HOST: DEFAULT_HOST} return self.async_show_form( step_id="user", data_schema=vol.Schema( { - vol.Required( - CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME) - ): str, - vol.Required( - CONF_HOST, default=user_input.get(CONF_HOST, DEFAULT_HOST) - ): str, - vol.Required("extended_data", default=False): bool, + vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, + vol.Required(CONF_HAS_PWD, default=False): bool, } ), errors=self._errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry.""" + async def async_step_password( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Step when user sets password .""" + self._errors = {} + if user_input is not None: + if await self._test_extended_data( + self._user_input[CONF_HOST], user_input[CONF_PASSWORD] + ): + self._user_input |= user_input + return self.async_create_entry( + title=self._user_input[CONF_HOST], data=self._user_input + ) + else: + user_input = {CONF_PASSWORD: ""} - user_input = { - CONF_HOST: DEFAULT_HOST, - CONF_NAME: DEFAULT_NAME, - "extended_data": False, - **user_input, - } - - user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) - - if self._host_in_configuration_exists(user_input[CONF_HOST]): - return self.async_abort(reason="already_configured") - - return await self.async_step_user(user_input) + return self.async_show_form( + step_id="password", + data_schema=vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } + ), + errors=self._errors, + ) async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - - if TYPE_CHECKING: - assert entry is not None - + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - return self.async_update_reload_and_abort( - entry, - reason="reconfigure_successful", - data={**entry.data, **user_input}, - ) + if not user_input[CONF_HAS_PWD] or user_input.get(CONF_PASSWORD, "") == "": + user_input[CONF_PASSWORD] = "" + user_input[CONF_HAS_PWD] = False + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + + if await self._test_extended_data( + reconfigure_entry.data[CONF_HOST], user_input.get(CONF_PASSWORD, "") + ): + # if password has been provided, only save if extended data is available + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=user_input, + ) return self.async_show_form( step_id="reconfigure", data_schema=vol.Schema( { - vol.Required( - "extended_data", default=entry.data["extended_data"] + vol.Optional( + CONF_HAS_PWD, default=reconfigure_entry.data[CONF_HAS_PWD] ): bool, + vol.Optional(CONF_PASSWORD): str, } ), ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle flow upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reauthorization flow.""" + reauth_entry = self._get_reauth_entry() + if user_input and await self._test_extended_data( + reauth_entry.data[CONF_HOST], user_input.get(CONF_PASSWORD, "") + ): + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input + ) + + data_schema = vol.Schema( + { + vol.Optional( + CONF_HAS_PWD, default=reauth_entry.data[CONF_HAS_PWD] + ): bool, + vol.Optional(CONF_PASSWORD): str, + } + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=data_schema, + errors=self._errors, + ) diff --git a/homeassistant/components/solarlog/const.py b/homeassistant/components/solarlog/const.py index 31f17af83b5..3e814705589 100644 --- a/homeassistant/components/solarlog/const.py +++ b/homeassistant/components/solarlog/const.py @@ -6,4 +6,5 @@ DOMAIN = "solarlog" # Default config for solarlog. DEFAULT_HOST = "http://solar-log" -DEFAULT_NAME = "solarlog" + +CONF_HAS_PWD = "has_password" diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index d2963e1950e..11f268db32a 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from datetime import timedelta import logging from typing import TYPE_CHECKING @@ -9,14 +10,21 @@ from urllib.parse import ParseResult, urlparse from solarlog_cli.solarlog_connector import SolarLogConnector from solarlog_cli.solarlog_exceptions import ( + SolarLogAuthenticationError, SolarLogConnectionError, SolarLogUpdateError, ) +from solarlog_cli.solarlog_models import SolarlogData from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import update_coordinator +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import slugify + +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -24,7 +32,7 @@ if TYPE_CHECKING: from . import SolarlogConfigEntry -class SolarlogData(update_coordinator.DataUpdateCoordinator): +class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): """Get and update the latest data.""" def __init__(self, hass: HomeAssistant, entry: SolarlogConfigEntry) -> None: @@ -33,33 +41,136 @@ class SolarlogData(update_coordinator.DataUpdateCoordinator): hass, _LOGGER, name="SolarLog", update_interval=timedelta(seconds=60) ) + self.new_device_callbacks: list[Callable[[int], None]] = [] + self._devices_last_update: set[tuple[int, str]] = set() + host_entry = entry.data[CONF_HOST] + password = entry.data.get("password", "") url = urlparse(host_entry, "http") netloc = url.netloc or url.path path = url.path if url.netloc else "" url = ParseResult("http", netloc, path, *url[3:]) self.unique_id = entry.entry_id - self.name = entry.title self.host = url.geturl() - extended_data = entry.data["extended_data"] - self.solarlog = SolarLogConnector( - self.host, extended_data, hass.config.time_zone + self.host, + tz=hass.config.time_zone, + password=password, + session=async_get_clientsession(hass), ) - async def _async_update_data(self): + async def _async_setup(self) -> None: + """Do initialization logic.""" + _LOGGER.debug("Start async_setup") + logged_in = False + if self.solarlog.password != "": + if logged_in := await self.renew_authentication(): + await self.solarlog.test_extended_data_available() + if logged_in or await self.solarlog.test_extended_data_available(): + device_list = await self.solarlog.update_device_list() + self.solarlog.set_enabled_devices({key: True for key in device_list}) + + async def _async_update_data(self) -> SolarlogData: """Update the data from the SolarLog device.""" _LOGGER.debug("Start data update") try: data = await self.solarlog.update_data() - except SolarLogConnectionError as err: - raise ConfigEntryNotReady(err) from err - except SolarLogUpdateError as err: - raise update_coordinator.UpdateFailed(err) from err + if self.solarlog.extended_data: + await self.solarlog.update_device_list() + data.inverter_data = await self.solarlog.update_inverter_data() + except SolarLogConnectionError as ex: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from ex + except SolarLogAuthenticationError as ex: + if await self.renew_authentication(): + # login was successful, update availability of extended data, retry data update + await self.solarlog.test_extended_data_available() + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from ex + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from ex + except SolarLogUpdateError as ex: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + ) from ex _LOGGER.debug("Data successfully updated") + if self.solarlog.extended_data: + self._async_add_remove_devices(data) + _LOGGER.debug("Add_remove_devices finished") + return data + + def _async_add_remove_devices(self, data: SolarlogData) -> None: + """Add new devices, remove non-existing devices.""" + if ( + current_devices := { + (k, self.solarlog.device_name(k)) for k in data.inverter_data + } + ) == self._devices_last_update: + return + + # remove old devices + if removed_devices := self._devices_last_update - current_devices: + _LOGGER.debug("Removed device(s): %s", ", ".join(map(str, removed_devices))) + device_registry = dr.async_get(self.hass) + + for removed_device in removed_devices: + device_name = "" + for did, dn in self._devices_last_update: + if did == removed_device[0]: + device_name = dn + break + if device := device_registry.async_get_device( + identifiers={ + ( + DOMAIN, + f"{self.unique_id}_{slugify(device_name)}", + ) + } + ): + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.unique_id, + ) + _LOGGER.debug("Device removed from device registry: %s", device.id) + + # add new devices + if new_devices := current_devices - self._devices_last_update: + _LOGGER.debug("New device(s) found: %s", ", ".join(map(str, new_devices))) + for device_id in new_devices: + for callback in self.new_device_callbacks: + callback(device_id[0]) + + self._devices_last_update = current_devices + + async def renew_authentication(self) -> bool: + """Renew access token for SolarLog API.""" + logged_in = False + try: + logged_in = await self.solarlog.login() + except SolarLogAuthenticationError as ex: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from ex + except (SolarLogConnectionError, SolarLogUpdateError) as ex: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from ex + + _LOGGER.debug("Credentials successfully updated? %s", logged_in) + + return logged_in diff --git a/homeassistant/components/solarlog/diagnostics.py b/homeassistant/components/solarlog/diagnostics.py new file mode 100644 index 00000000000..02f6c96edc2 --- /dev/null +++ b/homeassistant/components/solarlog/diagnostics.py @@ -0,0 +1,27 @@ +"""Provides diagnostics for Solarlog.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from . import SolarlogConfigEntry + +TO_REDACT = [ + CONF_HOST, +] + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: SolarlogConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data.data + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "solarlog_data": data.to_dict(), + } diff --git a/homeassistant/components/solarlog/entity.py b/homeassistant/components/solarlog/entity.py new file mode 100644 index 00000000000..bfdc52dccf1 --- /dev/null +++ b/homeassistant/components/solarlog/entity.py @@ -0,0 +1,71 @@ +"""Entities for SolarLog integration.""" + +from __future__ import annotations + +from homeassistant.components.sensor import SensorEntityDescription +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util import slugify + +from .const import DOMAIN +from .coordinator import SolarLogCoordinator + + +class SolarLogBaseEntity(CoordinatorEntity[SolarLogCoordinator]): + """SolarLog base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SolarLogCoordinator, + description: SensorEntityDescription, + ) -> None: + """Initialize the SolarLogCoordinator sensor.""" + super().__init__(coordinator) + + self.entity_description = description + + +class SolarLogCoordinatorEntity(SolarLogBaseEntity): + """Base SolarLog Coordinator entity.""" + + def __init__( + self, + coordinator: SolarLogCoordinator, + description: SensorEntityDescription, + ) -> None: + """Initialize the SolarLogCoordinator sensor.""" + super().__init__(coordinator, description) + + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + manufacturer="Solar-Log", + model="Controller", + identifiers={(DOMAIN, coordinator.unique_id)}, + name="SolarLog", + configuration_url=coordinator.host, + ) + + +class SolarLogInverterEntity(SolarLogBaseEntity): + """Base SolarLog inverter entity.""" + + def __init__( + self, + coordinator: SolarLogCoordinator, + description: SensorEntityDescription, + device_id: int, + ) -> None: + """Initialize the SolarLogInverter sensor.""" + super().__init__(coordinator, description) + name = f"{coordinator.unique_id}_{slugify(coordinator.solarlog.device_name(device_id))}" + self._attr_unique_id = f"{name}_{description.key}" + self._attr_device_info = DeviceInfo( + manufacturer="Solar-Log", + model="Inverter", + identifiers={(DOMAIN, name)}, + name=coordinator.solarlog.device_name(device_id), + via_device=(DOMAIN, coordinator.unique_id), + ) + self.device_id = device_id diff --git a/homeassistant/components/solarlog/manifest.json b/homeassistant/components/solarlog/manifest.json index 0c097b7146d..486b30edfd3 100644 --- a/homeassistant/components/solarlog/manifest.json +++ b/homeassistant/components/solarlog/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/solarlog", "iot_class": "local_polling", "loggers": ["solarlog_cli"], - "requirements": ["solarlog_cli==0.1.6"] + "quality_scale": "platinum", + "requirements": ["solarlog_cli==0.4.0"] } diff --git a/homeassistant/components/solarlog/quality_scale.yaml b/homeassistant/components/solarlog/quality_scale.yaml new file mode 100644 index 00000000000..543889ee18c --- /dev/null +++ b/homeassistant/components/solarlog/quality_scale.yaml @@ -0,0 +1,81 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: No explicit event subscriptions. + dependency-transparency: done + action-setup: + status: exempt + comment: No custom action. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: No custom action. + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: done + parallel-updates: + status: exempt + comment: Coordinator and sensor only platform. + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: No options flow. + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: + status: exempt + comment: Solar-Log device cannot be discovered. + stale-devices: done + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: done + discovery-update-info: + status: exempt + comment: Solar-Log device cannot be discovered. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + This integration doesn't have known issues that could be resolved by the user. + docs-examples: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/solarlog/sensor.py b/homeassistant/components/solarlog/sensor.py index 45961133e8a..bcff5d57e1b 100644 --- a/homeassistant/components/solarlog/sensor.py +++ b/homeassistant/components/solarlog/sensor.py @@ -1,9 +1,13 @@ """Platform for solarlog sensors.""" +from __future__ import annotations + from collections.abc import Callable from dataclasses import dataclass from datetime import datetime +from solarlog_cli.solarlog_models import InverterData, SolarlogData + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -17,184 +21,254 @@ from homeassistant.const import ( UnitOfPower, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers.typing import StateType -from . import SolarlogConfigEntry, SolarlogData -from .const import DOMAIN +from . import SolarlogConfigEntry +from .entity import SolarLogCoordinatorEntity, SolarLogInverterEntity -@dataclass(frozen=True) -class SolarLogSensorEntityDescription(SensorEntityDescription): - """Describes Solarlog sensor entity.""" +@dataclass(frozen=True, kw_only=True) +class SolarLogCoordinatorSensorEntityDescription(SensorEntityDescription): + """Describes Solarlog coordinator sensor entity.""" - value: Callable[[float | int], float] | Callable[[datetime], datetime] | None = None + value_fn: Callable[[SolarlogData], StateType | datetime | None] -SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( - SolarLogSensorEntityDescription( +@dataclass(frozen=True, kw_only=True) +class SolarLogInverterSensorEntityDescription(SensorEntityDescription): + """Describes Solarlog inverter sensor entity.""" + + value_fn: Callable[[InverterData], float | None] + + +SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = ( + SolarLogCoordinatorSensorEntityDescription( key="last_updated", translation_key="last_update", device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.last_updated, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="power_ac", translation_key="power_ac", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.power_ac, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="power_dc", translation_key="power_dc", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.power_dc, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="voltage_ac", translation_key="voltage_ac", native_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.voltage_ac, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="voltage_dc", translation_key="voltage_dc", native_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.voltage_dc, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_day", translation_key="yield_day", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=3, + value_fn=lambda data: data.yield_day, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_yesterday", translation_key="yield_yesterday", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.yield_yesterday, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_month", translation_key="yield_month", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=3, + value_fn=lambda data: data.yield_month, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_year", translation_key="yield_year", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda data: data.yield_year, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_total", translation_key="yield_total", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value=lambda value: round(value / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.yield_total, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_ac", translation_key="consumption_ac", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.consumption_ac, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_day", translation_key="consumption_day", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=3, + value_fn=lambda data: data.consumption_day, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_yesterday", translation_key="consumption_yesterday", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_yesterday, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_month", translation_key="consumption_month", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=3, + value_fn=lambda data: data.consumption_month, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_year", translation_key="consumption_year", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=3, + value_fn=lambda data: data.consumption_year, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_total", translation_key="consumption_total", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value=lambda value: round(value / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_total, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="self_consumption_year", translation_key="self_consumption_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda data: data.self_consumption_year, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="total_power", translation_key="total_power", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.total_power, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="alternator_loss", translation_key="alternator_loss", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.alternator_loss, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="capacity", translation_key="capacity", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value=lambda value: round(value * 100, 1), + suggested_display_precision=1, + value_fn=lambda data: data.capacity, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="efficiency", translation_key="efficiency", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value=lambda value: round(value * 100, 1), + suggested_display_precision=1, + value_fn=lambda data: data.efficiency, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="power_available", translation_key="power_available", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.power_available, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="usage", translation_key="usage", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value=lambda value: round(value * 100, 1), + suggested_display_precision=1, + value_fn=lambda data: data.usage, + ), +) + +INVERTER_SENSOR_TYPES: tuple[SolarLogInverterSensorEntityDescription, ...] = ( + SolarLogInverterSensorEntityDescription( + key="current_power", + translation_key="current_power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + value_fn=( + lambda inverter: None if inverter is None else inverter.current_power + ), + ), + SolarLogInverterSensorEntityDescription( + key="consumption_year", + translation_key="consumption_year", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=3, + value_fn=( + lambda inverter: None if inverter is None else inverter.consumption_year + ), ), ) @@ -206,39 +280,53 @@ async def async_setup_entry( ) -> None: """Add solarlog entry.""" coordinator = entry.runtime_data - async_add_entities( - SolarlogSensor(coordinator, description) for description in SENSOR_TYPES - ) + entities: list[SensorEntity] = [ + SolarLogCoordinatorSensor(coordinator, sensor) + for sensor in SOLARLOG_SENSOR_TYPES + ] -class SolarlogSensor(CoordinatorEntity[SolarlogData], SensorEntity): - """Representation of a Sensor.""" + device_data = coordinator.data.inverter_data - _attr_has_entity_name = True - - entity_description: SolarLogSensorEntityDescription - - def __init__( - self, - coordinator: SolarlogData, - description: SolarLogSensorEntityDescription, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - self.entity_description = description - self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.unique_id)}, - manufacturer="Solar-Log", - name=coordinator.name, - configuration_url=coordinator.host, + if device_data: + entities.extend( + SolarLogInverterSensor(coordinator, sensor, device_id) + for device_id in device_data + for sensor in INVERTER_SENSOR_TYPES ) - @property - def native_value(self): - """Return the native sensor value.""" - raw_attr = self.coordinator.data.get(self.entity_description.key) + async_add_entities(entities) - if self.entity_description.value: - return self.entity_description.value(raw_attr) - return raw_attr + def _async_add_new_device(device_id: int) -> None: + async_add_entities( + SolarLogInverterSensor(coordinator, sensor, device_id) + for sensor in INVERTER_SENSOR_TYPES + ) + + coordinator.new_device_callbacks.append(_async_add_new_device) + + +class SolarLogCoordinatorSensor(SolarLogCoordinatorEntity, SensorEntity): + """Represents a SolarLog sensor.""" + + entity_description: SolarLogCoordinatorSensorEntityDescription + + @property + def native_value(self) -> StateType | datetime: + """Return the state for this sensor.""" + + return self.entity_description.value_fn(self.coordinator.data) + + +class SolarLogInverterSensor(SolarLogInverterEntity, SensorEntity): + """Represents a SolarLog inverter sensor.""" + + entity_description: SolarLogInverterSensorEntityDescription + + @property + def native_value(self) -> StateType: + """Return the state for this sensor.""" + + return self.entity_description.value_fn( + self.coordinator.data.inverter_data[self.device_id] + ) diff --git a/homeassistant/components/solarlog/strings.json b/homeassistant/components/solarlog/strings.json index f5f5e064294..bf87b0b0938 100644 --- a/homeassistant/components/solarlog/strings.json +++ b/homeassistant/components/solarlog/strings.json @@ -5,27 +5,54 @@ "title": "Define your Solar-Log connection", "data": { "host": "[%key:common::config_flow::data::host%]", - "name": "The prefix to be used for your Solar-Log sensors", - "extended_data": "Get additional data from Solar-Log. Extended data is only accessible, if no password is set for the Solar-Log. Use at your own risk!" + "has_password": "I have the password for the Solar-Log user account." }, "data_description": { - "host": "The hostname or IP address of your Solar-Log device." + "host": "The hostname or IP address of your Solar-Log device.", + "has_password": "The password is required, if the open JSON-API is deactivated or if you would like to access additional data provided by your Solar-Log device." + } + }, + "password": { + "title": "Define your Solar-Log connection", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "The password for the general user of your Solar-Log device." + } + }, + "reauth_confirm": { + "description": "Update your credentials for Solar-Log device", + "data": { + "has_password": "[%key:component::solarlog::config::step::user::data::has_password%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "has_password": "[%key:component::solarlog::config::step::user::data_description::has_password%]", + "password": "[%key:component::solarlog::config::step::password::data_description::password%]" } }, "reconfigure": { "title": "Configure SolarLog", "data": { - "extended_data": "[%key:component::solarlog::config::step::user::data::extended_data%]" + "has_password": "[%key:component::solarlog::config::step::user::data::has_password%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "has_password": "[%key:component::solarlog::config::step::user::data_description::has_password%]", + "password": "[%key:component::solarlog::config::step::password::data_description::password%]" } } }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "password_error": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, @@ -101,5 +128,16 @@ "name": "Usage" } } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + }, + "config_entry_not_ready": { + "message": "Error while loading the config entry." + }, + "auth_failed": { + "message": "Error while logging in to the API." + } } } diff --git a/homeassistant/components/solax/__init__.py b/homeassistant/components/solax/__init__.py index 253f3b55e0a..3b9df623559 100644 --- a/homeassistant/components/solax/__init__.py +++ b/homeassistant/components/solax/__init__.py @@ -54,6 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SolaxConfigEntry) -> boo coordinator = SolaxDataUpdateCoordinator( hass, logger=_LOGGER, + config_entry=entry, name=f"solax {entry.title}", update_interval=SCAN_INTERVAL, update_method=_async_update, diff --git a/homeassistant/components/solax/manifest.json b/homeassistant/components/solax/manifest.json index 2ca246a4e77..631ace3792f 100644 --- a/homeassistant/components/solax/manifest.json +++ b/homeassistant/components/solax/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/solax", "iot_class": "local_polling", "loggers": ["solax"], - "requirements": ["solax==3.1.1"] + "requirements": ["solax==3.2.1"] } diff --git a/homeassistant/components/soma/__init__.py b/homeassistant/components/soma/__init__.py index 7b14aaa3c81..9ffe5539ff3 100644 --- a/homeassistant/components/soma/__init__.py +++ b/homeassistant/components/soma/__init__.py @@ -2,12 +2,7 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine -import logging -from typing import Any - from api.soma_api import SomaApi -from requests import RequestException import voluptuous as vol from homeassistant import config_entries @@ -15,16 +10,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType -from .const import API, DOMAIN, HOST, PORT -from .utils import is_api_response_success - -_LOGGER = logging.getLogger(__name__) - -DEVICES = "devices" +from .const import API, DEVICES, DOMAIN, HOST, PORT CONFIG_SCHEMA = vol.Schema( vol.All( @@ -72,98 +60,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -def soma_api_call[_SomaEntityT: SomaEntity]( - api_call: Callable[[_SomaEntityT], Coroutine[Any, Any, dict]], -) -> Callable[[_SomaEntityT], Coroutine[Any, Any, dict]]: - """Soma api call decorator.""" - - async def inner(self: _SomaEntityT) -> dict: - response = {} - try: - response_from_api = await api_call(self) - except RequestException: - if self.api_is_available: - _LOGGER.warning("Connection to SOMA Connect failed") - self.api_is_available = False - else: - if not self.api_is_available: - self.api_is_available = True - _LOGGER.info("Connection to SOMA Connect succeeded") - - if not is_api_response_success(response_from_api): - if self.is_available: - self.is_available = False - _LOGGER.warning( - ( - "Device is unreachable (%s). Error while fetching the" - " state: %s" - ), - self.name, - response_from_api["msg"], - ) - else: - if not self.is_available: - self.is_available = True - _LOGGER.info("Device %s is now reachable", self.name) - response = response_from_api - return response - - return inner - - -class SomaEntity(Entity): - """Representation of a generic Soma device.""" - - _attr_has_entity_name = True - - def __init__(self, device, api): - """Initialize the Soma device.""" - self.device = device - self.api = api - self.current_position = 50 - self.battery_state = 0 - self.is_available = True - self.api_is_available = True - - @property - def available(self): - """Return true if the last API commands returned successfully.""" - return self.is_available - - @property - def unique_id(self): - """Return the unique id base on the id returned by pysoma API.""" - return self.device["mac"] - - @property - def device_info(self) -> DeviceInfo: - """Return device specific attributes. - - Implemented by platform classes. - """ - return DeviceInfo( - identifiers={(DOMAIN, self.unique_id)}, - manufacturer="Wazombi Labs", - name=self.device["name"], - ) - - def set_position(self, position: int) -> None: - """Set the current device position.""" - self.current_position = position - self.schedule_update_ha_state() - - @soma_api_call - async def get_shade_state_from_api(self) -> dict: - """Return the shade state from the api.""" - return await self.hass.async_add_executor_job( - self.api.get_shade_state, self.device["mac"] - ) - - @soma_api_call - async def get_battery_level_from_api(self) -> dict: - """Return the battery level from the api.""" - return await self.hass.async_add_executor_job( - self.api.get_battery_level, self.device["mac"] - ) diff --git a/homeassistant/components/soma/config_flow.py b/homeassistant/components/soma/config_flow.py index 23aabf5a5e0..346f499c6fa 100644 --- a/homeassistant/components/soma/config_flow.py +++ b/homeassistant/components/soma/config_flow.py @@ -39,7 +39,7 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_creation(user_input) - async def async_step_creation(self, user_input=None): + async def async_step_creation(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Finish config flow.""" try: api = await self.hass.async_add_executor_job( @@ -50,7 +50,7 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="connection_error") try: result = await self.hass.async_add_executor_job(api.list_devices) - _LOGGER.info("Successfully set up Soma Connect") + _LOGGER.debug("Successfully set up Soma Connect") if result["result"] == "success": return self.async_create_entry( title="Soma Connect", @@ -67,8 +67,8 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Connection to SOMA Connect failed with KeyError") return self.async_abort(reason="connection_error") - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle flow start from existing config section.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - return await self.async_step_creation(user_input) + return await self.async_step_creation(import_data) diff --git a/homeassistant/components/soma/const.py b/homeassistant/components/soma/const.py index 815a0176e7e..b34596abe93 100644 --- a/homeassistant/components/soma/const.py +++ b/homeassistant/components/soma/const.py @@ -4,3 +4,5 @@ DOMAIN = "soma" HOST = "host" PORT = "port" API = "api" + +DEVICES = "devices" diff --git a/homeassistant/components/soma/cover.py b/homeassistant/components/soma/cover.py index a5d9507af4a..50f7d34e406 100644 --- a/homeassistant/components/soma/cover.py +++ b/homeassistant/components/soma/cover.py @@ -16,7 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import API, DEVICES, DOMAIN, SomaEntity +from .const import API, DEVICES, DOMAIN +from .entity import SomaEntity from .utils import is_api_response_success diff --git a/homeassistant/components/soma/entity.py b/homeassistant/components/soma/entity.py new file mode 100644 index 00000000000..f9824d107b1 --- /dev/null +++ b/homeassistant/components/soma/entity.py @@ -0,0 +1,112 @@ +"""Support for Soma Smartshades.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +import logging +from typing import Any + +from requests import RequestException + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN +from .utils import is_api_response_success + +_LOGGER = logging.getLogger(__name__) + + +def soma_api_call[_SomaEntityT: SomaEntity]( + api_call: Callable[[_SomaEntityT], Coroutine[Any, Any, dict]], +) -> Callable[[_SomaEntityT], Coroutine[Any, Any, dict]]: + """Soma api call decorator.""" + + async def inner(self: _SomaEntityT) -> dict: + response = {} + try: + response_from_api = await api_call(self) + except RequestException: + if self.api_is_available: + _LOGGER.warning("Connection to SOMA Connect failed") + self.api_is_available = False + else: + if not self.api_is_available: + self.api_is_available = True + _LOGGER.info("Connection to SOMA Connect succeeded") + + if not is_api_response_success(response_from_api): + if self.is_available: + self.is_available = False + _LOGGER.warning( + ( + "Device is unreachable (%s). Error while fetching the" + " state: %s" + ), + self.name, + response_from_api["msg"], + ) + else: + if not self.is_available: + self.is_available = True + _LOGGER.info("Device %s is now reachable", self.name) + response = response_from_api + return response + + return inner + + +class SomaEntity(Entity): + """Representation of a generic Soma device.""" + + _attr_has_entity_name = True + + def __init__(self, device, api): + """Initialize the Soma device.""" + self.device = device + self.api = api + self.current_position = 50 + self.battery_state = 0 + self.is_available = True + self.api_is_available = True + + @property + def available(self): + """Return true if the last API commands returned successfully.""" + return self.is_available + + @property + def unique_id(self): + """Return the unique id base on the id returned by pysoma API.""" + return self.device["mac"] + + @property + def device_info(self) -> DeviceInfo: + """Return device specific attributes. + + Implemented by platform classes. + """ + return DeviceInfo( + identifiers={(DOMAIN, self.unique_id)}, + manufacturer="Wazombi Labs", + name=self.device["name"], + ) + + def set_position(self, position: int) -> None: + """Set the current device position.""" + self.current_position = position + self.schedule_update_ha_state() + + @soma_api_call + async def get_shade_state_from_api(self) -> dict: + """Return the shade state from the api.""" + return await self.hass.async_add_executor_job( + self.api.get_shade_state, self.device["mac"] + ) + + @soma_api_call + async def get_battery_level_from_api(self) -> dict: + """Return the battery level from the api.""" + return await self.hass.async_add_executor_job( + self.api.get_battery_level, self.device["mac"] + ) diff --git a/homeassistant/components/soma/sensor.py b/homeassistant/components/soma/sensor.py index 4992ec5cde4..806886009f3 100644 --- a/homeassistant/components/soma/sensor.py +++ b/homeassistant/components/soma/sensor.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import Throttle -from . import DEVICES, SomaEntity -from .const import API, DOMAIN +from .const import API, DEVICES, DOMAIN +from .entity import SomaEntity MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30) diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index 9a8b5d76d3f..c2d85160175 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -116,11 +116,6 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input): - """Handle import.""" - self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) - return await self.async_step_user(user_input) - @staticmethod @callback def async_get_options_flow( @@ -135,9 +130,8 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) - self._target_id = None + self._target_id: str | None = None @callback def _async_callback_targets(self): @@ -155,7 +149,9 @@ class OptionsFlowHandler(OptionsFlow): return cover["name"] raise KeyError - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if self.config_entry.state is not ConfigEntryState.LOADED: @@ -178,9 +174,13 @@ class OptionsFlowHandler(OptionsFlow): return self.async_show_form(step_id="init", data_schema=data_schema, errors={}) - async def async_step_target_config(self, user_input=None, target_id=None): + async def async_step_target_config( + self, user_input: dict[str, bool] | None = None, target_id: str | None = None + ) -> ConfigFlowResult: """Handle options flow for target.""" - reversed_target_ids = self.options.setdefault(CONF_REVERSED_TARGET_IDS, {}) + reversed_target_ids: dict[str | None, bool] = self.options.setdefault( + CONF_REVERSED_TARGET_IDS, {} + ) if user_input is not None: if user_input[CONF_REVERSE] != reversed_target_ids.get(self._target_id): diff --git a/homeassistant/components/somfy_mylink/cover.py b/homeassistant/components/somfy_mylink/cover.py index 577795d172b..8c64e58362b 100644 --- a/homeassistant/components/somfy_mylink/cover.py +++ b/homeassistant/components/somfy_mylink/cover.py @@ -3,9 +3,8 @@ import logging from typing import Any -from homeassistant.components.cover import CoverDeviceClass, CoverEntity +from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -52,7 +51,7 @@ async def async_setup_entry( cover_list.append(SomfyShade(somfy_mylink, **cover_config)) - _LOGGER.info( + _LOGGER.debug( "Adding Somfy Cover: %s with targetID %s", cover_config["name"], cover_config["target_id"], @@ -131,7 +130,7 @@ class SomfyShade(RestoreEntity, CoverEntity): last_state = await self.async_get_last_state() if last_state is not None and last_state.state in ( - STATE_OPEN, - STATE_CLOSED, + CoverState.OPEN, + CoverState.CLOSED, ): - self._attr_is_closed = last_state.state == STATE_CLOSED + self._attr_is_closed = last_state.state == CoverState.CLOSED diff --git a/homeassistant/components/sonarr/__init__.py b/homeassistant/components/sonarr/__init__.py index 89c247ebbfb..7718ff799f5 100644 --- a/homeassistant/components/sonarr/__init__.py +++ b/homeassistant/components/sonarr/__init__.py @@ -107,7 +107,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: } hass.config_entries.async_update_entry(entry, data=data, version=2) - LOGGER.info("Migration to version %s successful", entry.version) + LOGGER.debug("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/sonarr/config_flow.py b/homeassistant/components/sonarr/config_flow.py index 84bae85571e..e1cedba10e7 100644 --- a/homeassistant/components/sonarr/config_flow.py +++ b/homeassistant/components/sonarr/config_flow.py @@ -13,6 +13,7 @@ import voluptuous as vol import yarl from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -58,22 +59,16 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 - def __init__(self) -> None: - """Initialize the flow.""" - self.entry: ConfigEntry | None = None - @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> SonarrOptionsFlowHandler: """Get the options flow for this handler.""" - return SonarrOptionsFlowHandler(config_entry) + return SonarrOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -81,10 +76,11 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - assert self.entry is not None return self.async_show_form( step_id="reauth_confirm", - description_placeholders={"url": self.entry.data[CONF_URL]}, + description_placeholders={ + "url": self._get_reauth_entry().data[CONF_URL] + }, errors={}, ) @@ -97,8 +93,15 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} if user_input is not None: - if self.entry: - user_input = {**self.entry.data, **user_input} + # aiopyarr defaults to the service port if one isn't given + # this is counter to standard practice where http = 80 + # and https = 443. + if CONF_URL in user_input: + url = yarl.URL(user_input[CONF_URL]) + user_input[CONF_URL] = f"{url.scheme}://{url.host}:{url.port}{url.path}" + + if self.source == SOURCE_REAUTH: + user_input = {**self._get_reauth_entry().data, **user_input} if CONF_VERIFY_SSL not in user_input: user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL @@ -113,8 +116,10 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") else: - if self.entry: - return await self._async_reauth_update_entry(user_input) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input + ) parsed = yarl.URL(user_input[CONF_URL]) @@ -129,19 +134,9 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def _async_reauth_update_entry( - self, data: dict[str, Any] - ) -> ConfigFlowResult: - """Update existing config entry.""" - assert self.entry is not None - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - - return self.async_abort(reason="reauth_successful") - def _get_user_data_schema(self) -> dict[vol.Marker, type]: """Get the data schema to display user form.""" - if self.entry: + if self.source == SOURCE_REAUTH: return {vol.Required(CONF_API_KEY): str} data_schema: dict[vol.Marker, type] = { @@ -160,10 +155,6 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): class SonarrOptionsFlowHandler(OptionsFlow): """Handle Sonarr client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/sonarr/manifest.json b/homeassistant/components/sonarr/manifest.json index bfc2b6f787f..c81dc9c3972 100644 --- a/homeassistant/components/sonarr/manifest.json +++ b/homeassistant/components/sonarr/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sonarr", "iot_class": "local_polling", "loggers": ["aiopyarr"], - "quality_scale": "silver", "requirements": ["aiopyarr==23.4.0"] } diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index 0724646a594..41cc0763642 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from songpal import Device, SongpalException @@ -21,9 +21,11 @@ _LOGGER = logging.getLogger(__name__) class SongpalConfig: """Device Configuration.""" - def __init__(self, name, host, endpoint): + def __init__(self, name: str, host: str | None, endpoint: str) -> None: """Initialize Configuration.""" self.name = name + if TYPE_CHECKING: + assert host is not None self.host = host self.endpoint = endpoint @@ -33,12 +35,10 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the flow.""" - self.conf: SongpalConfig | None = None + conf: SongpalConfig async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if user_input is None: @@ -75,7 +75,9 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_init(user_input) - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" # Check if already configured self._async_abort_entries_match({CONF_ENDPOINT: self.conf.endpoint}) @@ -106,7 +108,7 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.debug("Discovered: %s", discovery_info) friendly_name = discovery_info.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] - parsed_url = urlparse(discovery_info.ssdp_location) + hostname = urlparse(discovery_info.ssdp_location).hostname scalarweb_info = discovery_info.upnp["X_ScalarWebAPI_DeviceInfo"] endpoint = scalarweb_info["X_ScalarWebAPI_BaseURL"] service_types = scalarweb_info["X_ScalarWebAPI_ServiceList"][ @@ -117,19 +119,24 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): if "videoScreen" in service_types: return self.async_abort(reason="not_songpal_device") + if TYPE_CHECKING: + # the hostname must be str because the ssdp_location is not bytes and + # not a relative url + assert isinstance(hostname, str) + self.context["title_placeholders"] = { CONF_NAME: friendly_name, - CONF_HOST: parsed_url.hostname, + CONF_HOST: hostname, } - self.conf = SongpalConfig(friendly_name, parsed_url.hostname, endpoint) + self.conf = SongpalConfig(friendly_name, hostname, endpoint) return await self.async_step_init() - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, str]) -> ConfigFlowResult: """Import a config entry.""" - name = user_input.get(CONF_NAME) - endpoint = user_input.get(CONF_ENDPOINT) + name = import_data.get(CONF_NAME) + endpoint = import_data[CONF_ENDPOINT] parsed_url = urlparse(endpoint) # Try to connect to test the endpoint @@ -146,4 +153,4 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): self.conf = SongpalConfig(name, parsed_url.hostname, endpoint) - return await self.async_step_init(user_input) + return await self.async_step_init(import_data) diff --git a/homeassistant/components/songpal/icons.json b/homeassistant/components/songpal/icons.json index 1c831fbbd00..6e7cf359c23 100644 --- a/homeassistant/components/songpal/icons.json +++ b/homeassistant/components/songpal/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_sound_setting": "mdi:volume-high" + "set_sound_setting": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/songpal/manifest.json b/homeassistant/components/songpal/manifest.json index c4dec6b938d..a04bea0c48d 100644 --- a/homeassistant/components/songpal/manifest.json +++ b/homeassistant/components/songpal/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/songpal", "iot_class": "local_push", "loggers": ["songpal"], - "quality_scale": "gold", "requirements": ["python-songpal==0.16.2"], "ssdp": [ { diff --git a/homeassistant/components/songpal/media_player.py b/homeassistant/components/songpal/media_player.py index 9f828591a08..b4063b09691 100644 --- a/homeassistant/components/songpal/media_player.py +++ b/homeassistant/components/songpal/media_player.py @@ -167,7 +167,7 @@ class SongpalEntity(MediaPlayerEntity): async def async_activate_websocket(self): """Activate websocket for listening if wanted.""" - _LOGGER.info("Activating websocket connection") + _LOGGER.debug("Activating websocket connection") async def _volume_changed(volume: VolumeChange): _LOGGER.debug("Volume changed: %s", volume) diff --git a/homeassistant/components/sonos/__init__.py b/homeassistant/components/sonos/__init__.py index 912a8d04f4e..82e4a5ebfba 100644 --- a/homeassistant/components/sonos/__init__.py +++ b/homeassistant/components/sonos/__init__.py @@ -413,7 +413,7 @@ class SonosDiscoveryManager: continue if self.hosts_in_error.pop(ip_addr, None): - _LOGGER.info("Connection reestablished to Sonos device %s", ip_addr) + _LOGGER.warning("Connection reestablished to Sonos device %s", ip_addr) # Each speaker has the topology for other online speakers, so add them in here if they were not # configured. The metadata is already in Soco for these. if new_hosts := { diff --git a/homeassistant/components/sonos/entity.py b/homeassistant/components/sonos/entity.py index bd7256493e8..98dc8b8b752 100644 --- a/homeassistant/components/sonos/entity.py +++ b/homeassistant/components/sonos/entity.py @@ -85,6 +85,7 @@ class SonosEntity(Entity): identifiers={(DOMAIN, self.soco.uid)}, name=self.speaker.zone_name, model=self.speaker.model_name.replace("Sonos ", ""), + model_id=self.speaker.model_number, sw_version=self.speaker.version, connections={ (dr.CONNECTION_NETWORK_MAC, self.speaker.mac_address), diff --git a/homeassistant/components/sonos/exception.py b/homeassistant/components/sonos/exception.py index 6f7483f4188..4fd17d84392 100644 --- a/homeassistant/components/sonos/exception.py +++ b/homeassistant/components/sonos/exception.py @@ -1,6 +1,6 @@ """Sonos specific exceptions.""" -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import BrowseError from homeassistant.exceptions import HomeAssistantError diff --git a/homeassistant/components/sonos/icons.json b/homeassistant/components/sonos/icons.json index e2545358ba6..45027d8eabd 100644 --- a/homeassistant/components/sonos/icons.json +++ b/homeassistant/components/sonos/icons.json @@ -44,12 +44,29 @@ } }, "services": { - "snapshot": "mdi:camera", - "restore": "mdi:camera-retake", - "set_sleep_timer": "mdi:alarm", - "clear_sleep_timer": "mdi:alarm-off", - "play_queue": "mdi:play", - "remove_from_queue": "mdi:playlist-remove", - "update_alarm": "mdi:alarm" + "snapshot": { + "service": "mdi:camera" + }, + "restore": { + "service": "mdi:camera-retake" + }, + "set_sleep_timer": { + "service": "mdi:alarm" + }, + "clear_sleep_timer": { + "service": "mdi:alarm-off" + }, + "play_queue": { + "service": "mdi:play" + }, + "remove_from_queue": { + "service": "mdi:playlist-remove" + }, + "update_alarm": { + "service": "mdi:alarm" + }, + "get_queue": { + "service": "mdi:queue-first-in-last-out" + } } } diff --git a/homeassistant/components/sonos/manifest.json b/homeassistant/components/sonos/manifest.json index d6c5eb298d8..76a7d0bfa91 100644 --- a/homeassistant/components/sonos/manifest.json +++ b/homeassistant/components/sonos/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/sonos", "iot_class": "local_push", "loggers": ["soco"], - "requirements": ["soco==0.30.4", "sonos-websocket==0.1.3"], + "requirements": ["soco==0.30.6", "sonos-websocket==0.1.3"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:ZonePlayer:1" diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index e68d3dfa97a..8d0917c5dba 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -14,7 +14,7 @@ from soco.core import ( PLAY_MODE_BY_MEANING, PLAY_MODES, ) -from soco.data_structures import DidlFavorite +from soco.data_structures import DidlFavorite, DidlMusicTrack from soco.ms_data_structures import MusicServiceItem from sonos_websocket.exception import SonosWebsocketError import voluptuous as vol @@ -22,8 +22,12 @@ import voluptuous as vol from homeassistant.components import media_source, spotify from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, + ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_ARTIST, + ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_TITLE, BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEnqueue, @@ -38,7 +42,7 @@ from homeassistant.components.plex import PLEX_URI_SCHEME from homeassistant.components.plex.services import process_plex_payload from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TIME -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform, service from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -80,6 +84,7 @@ REPEAT_TO_SONOS = { SONOS_TO_REPEAT = {meaning: mode for mode, meaning in REPEAT_TO_SONOS.items()} UPNP_ERRORS_TO_IGNORE = ["701", "711", "712"] +ANNOUNCE_NOT_SUPPORTED_ERRORS: list[str] = ["globalError"] SERVICE_SNAPSHOT = "snapshot" SERVICE_RESTORE = "restore" @@ -88,6 +93,7 @@ SERVICE_CLEAR_TIMER = "clear_sleep_timer" SERVICE_UPDATE_ALARM = "update_alarm" SERVICE_PLAY_QUEUE = "play_queue" SERVICE_REMOVE_FROM_QUEUE = "remove_from_queue" +SERVICE_GET_QUEUE = "get_queue" ATTR_SLEEP_TIME = "sleep_time" ATTR_ALARM_ID = "alarm_id" @@ -190,6 +196,13 @@ async def async_setup_entry( "remove_from_queue", ) + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + None, + "get_queue", + supports_response=SupportsResponse.ONLY, + ) + class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Representation of a Sonos entity.""" @@ -544,11 +557,24 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): ) from exc if response.get("success"): return - raise HomeAssistantError( - translation_domain=SONOS_DOMAIN, - translation_key="announce_media_error", - translation_placeholders={"media_id": media_id, "response": response}, - ) + if response.get("type") in ANNOUNCE_NOT_SUPPORTED_ERRORS: + # If the speaker does not support announce do not raise and + # fall through to_play_media to play the clip directly. + _LOGGER.debug( + "Speaker %s does not support announce, media_id %s response %s", + self.speaker.zone_name, + media_id, + response, + ) + else: + raise HomeAssistantError( + translation_domain=SONOS_DOMAIN, + translation_key="announce_media_error", + translation_placeholders={ + "media_id": media_id, + "response": response, + }, + ) if spotify.is_spotify_media_type(media_type): media_type = spotify.resolve_spotify_media_type(media_type) @@ -660,14 +686,23 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): soco.play_from_queue(0) elif media_type in PLAYABLE_MEDIA_TYPES: item = media_browser.get_media(self.media.library, media_id, media_type) - if not item: - _LOGGER.error('Could not find "%s" in the library', media_id) - return - + raise ServiceValidationError( + translation_domain=SONOS_DOMAIN, + translation_key="invalid_media", + translation_placeholders={ + "media_id": media_id, + }, + ) self._play_media_queue(soco, item, enqueue) else: - _LOGGER.error('Sonos does not support a media type of "%s"', media_type) + raise ServiceValidationError( + translation_domain=SONOS_DOMAIN, + translation_key="invalid_content_type", + translation_placeholders={ + "media_type": media_type, + }, + ) def _play_media_queue( self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue @@ -741,6 +776,20 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Remove item from the queue.""" self.coordinator.soco.remove_from_queue(queue_position) + @soco_error() + def get_queue(self) -> list[dict]: + """Get the queue.""" + queue: list[DidlMusicTrack] = self.coordinator.soco.get_queue(max_items=0) + return [ + { + ATTR_MEDIA_TITLE: getattr(track, "title", None), + ATTR_MEDIA_ALBUM_NAME: getattr(track, "album", None), + ATTR_MEDIA_ARTIST: getattr(track, "creator", None), + ATTR_MEDIA_CONTENT_ID: track.get_uri(), + } + for track in queue + ] + @property def extra_state_attributes(self) -> dict[str, Any]: """Return entity specific state attributes.""" diff --git a/homeassistant/components/sonos/services.yaml b/homeassistant/components/sonos/services.yaml index f6df83ef6ed..89706428899 100644 --- a/homeassistant/components/sonos/services.yaml +++ b/homeassistant/components/sonos/services.yaml @@ -63,6 +63,12 @@ remove_from_queue: max: 10000 mode: box +get_queue: + target: + entity: + integration: sonos + domain: media_player + update_alarm: target: device: diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 7a73378d69b..d3774e85213 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -172,6 +172,10 @@ "description": "Enable or disable including grouped rooms." } } + }, + "get_queue": { + "name": "Get queue", + "description": "Returns the contents of the queue." } }, "exceptions": { @@ -181,6 +185,12 @@ "invalid_sonos_playlist": { "message": "Could not find Sonos playlist: {name}" }, + "invalid_media": { + "message": "Could not find media in library: {media_id}" + }, + "invalid_content_type": { + "message": "Sonos does not support media content type: {media_type}" + }, "announce_media_error": { "message": "Announcing clip {media_id} failed {response}" } diff --git a/homeassistant/components/sony_projector/manifest.json b/homeassistant/components/sony_projector/manifest.json index 5cf5df4c96f..f674f6fa56b 100644 --- a/homeassistant/components/sony_projector/manifest.json +++ b/homeassistant/components/sony_projector/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sony_projector", "iot_class": "local_polling", "loggers": ["pysdcp"], + "quality_scale": "legacy", "requirements": ["pySDCP==1"] } diff --git a/homeassistant/components/soundtouch/config_flow.py b/homeassistant/components/soundtouch/config_flow.py index fea63366db9..af45b8f6bdc 100644 --- a/homeassistant/components/soundtouch/config_flow.py +++ b/homeassistant/components/soundtouch/config_flow.py @@ -1,6 +1,5 @@ """Config flow for Bose SoundTouch integration.""" -import logging from typing import Any from libsoundtouch import soundtouch_device @@ -14,8 +13,6 @@ from homeassistant.helpers import config_validation as cv from .const import DOMAIN -_LOGGER = logging.getLogger(__name__) - class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Bose SoundTouch.""" @@ -25,7 +22,7 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new SoundTouch config flow.""" self.host: str | None = None - self.name = None + self.name: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -65,17 +62,21 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): except RequestException: return self.async_abort(reason="cannot_connect") - self.context["title_placeholders"] = {"name": self.name} + if self.name: + # If we have a name, use it as flow title + self.context["title_placeholders"] = {"name": self.name} return await self.async_step_zeroconf_confirm() - async def async_step_zeroconf_confirm(self, user_input=None): + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is not None: return await self._async_create_soundtouch_entry() return self.async_show_form( step_id="zeroconf_confirm", last_step=True, - description_placeholders={"name": self.name}, + description_placeholders={"name": self.name or "?"}, ) async def _async_get_device_id(self, raise_on_progress: bool = True) -> None: @@ -90,10 +91,10 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): self.name = device.config.name - async def _async_create_soundtouch_entry(self): + async def _async_create_soundtouch_entry(self) -> ConfigFlowResult: """Finish config flow and create a SoundTouch config entry.""" return self.async_create_entry( - title=self.name, + title=self.name or "SoundTouch", data={ CONF_HOST: self.host, }, diff --git a/homeassistant/components/soundtouch/icons.json b/homeassistant/components/soundtouch/icons.json index 0dd41f4f881..721a5c77032 100644 --- a/homeassistant/components/soundtouch/icons.json +++ b/homeassistant/components/soundtouch/icons.json @@ -1,8 +1,16 @@ { "services": { - "play_everywhere": "mdi:play", - "create_zone": "mdi:plus", - "add_zone_slave": "mdi:plus", - "remove_zone_slave": "mdi:minus" + "play_everywhere": { + "service": "mdi:play" + }, + "create_zone": { + "service": "mdi:plus" + }, + "add_zone_slave": { + "service": "mdi:plus" + }, + "remove_zone_slave": { + "service": "mdi:minus" + } } } diff --git a/homeassistant/components/soundtouch/media_player.py b/homeassistant/components/soundtouch/media_player.py index c09c4ed72c4..5edd42b931a 100644 --- a/homeassistant/components/soundtouch/media_player.py +++ b/homeassistant/components/soundtouch/media_player.py @@ -289,7 +289,7 @@ class SoundTouchMediaPlayer(MediaPlayerEntity): if not slaves: _LOGGER.warning("Unable to create zone without slaves") else: - _LOGGER.info("Creating zone with master %s", self._device.config.name) + _LOGGER.debug("Creating zone with master %s", self._device.config.name) self._device.create_zone([slave.device for slave in slaves]) def remove_zone_slave(self, slaves): @@ -305,7 +305,7 @@ class SoundTouchMediaPlayer(MediaPlayerEntity): if not slaves: _LOGGER.warning("Unable to find slaves to remove") else: - _LOGGER.info( + _LOGGER.debug( "Removing slaves from zone with master %s", self._device.config.name ) # SoundTouch API seems to have a bug and won't remove slaves if there are @@ -327,7 +327,7 @@ class SoundTouchMediaPlayer(MediaPlayerEntity): if not slaves: _LOGGER.warning("Unable to find slaves to add") else: - _LOGGER.info( + _LOGGER.debug( "Adding slaves to zone with master %s", self._device.config.name ) self._device.add_zone_slave([slave.device for slave in slaves]) diff --git a/homeassistant/components/spaceapi/__init__.py b/homeassistant/components/spaceapi/__init__.py index 93d448bd17f..90281fe311c 100644 --- a/homeassistant/components/spaceapi/__init__.py +++ b/homeassistant/components/spaceapi/__init__.py @@ -1,6 +1,7 @@ """Support for the SpaceAPI.""" from contextlib import suppress +import math import voluptuous as vol @@ -254,7 +255,17 @@ class APISpaceApiView(HomeAssistantView): """Get data from a sensor.""" if not (sensor_state := hass.states.get(sensor)): return None - sensor_data = {ATTR_NAME: sensor_state.name, ATTR_VALUE: sensor_state.state} + + # SpaceAPI sensor values must be numbers + try: + state = float(sensor_state.state) + except ValueError: + state = math.nan + sensor_data = { + ATTR_NAME: sensor_state.name, + ATTR_VALUE: state, + } + if ATTR_SENSOR_LOCATION in sensor_state.attributes: sensor_data[ATTR_LOCATION] = sensor_state.attributes[ATTR_SENSOR_LOCATION] else: diff --git a/homeassistant/components/spaceapi/manifest.json b/homeassistant/components/spaceapi/manifest.json index 84add9bb4ed..798930bbef5 100644 --- a/homeassistant/components/spaceapi/manifest.json +++ b/homeassistant/components/spaceapi/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@fabaff"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/spaceapi", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/spc/alarm_control_panel.py b/homeassistant/components/spc/alarm_control_panel.py index 7e584ff5e63..44e0572c9e9 100644 --- a/homeassistant/components/spc/alarm_control_panel.py +++ b/homeassistant/components/spc/alarm_control_panel.py @@ -9,13 +9,7 @@ from pyspcwebgw.const import AreaMode from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -25,17 +19,17 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import DATA_API, SIGNAL_UPDATE_ALARM -def _get_alarm_state(area: Area) -> str | None: +def _get_alarm_state(area: Area) -> AlarmControlPanelState | None: """Get the alarm state.""" if area.verified_alarm: - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED mode_to_state = { - AreaMode.UNSET: STATE_ALARM_DISARMED, - AreaMode.PART_SET_A: STATE_ALARM_ARMED_HOME, - AreaMode.PART_SET_B: STATE_ALARM_ARMED_NIGHT, - AreaMode.FULL_SET: STATE_ALARM_ARMED_AWAY, + AreaMode.UNSET: AlarmControlPanelState.DISARMED, + AreaMode.PART_SET_A: AlarmControlPanelState.ARMED_HOME, + AreaMode.PART_SET_B: AlarmControlPanelState.ARMED_NIGHT, + AreaMode.FULL_SET: AlarmControlPanelState.ARMED_AWAY, } return mode_to_state.get(area.mode) @@ -91,7 +85,7 @@ class SpcAlarm(AlarmControlPanelEntity): return self._area.last_changed_by @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return _get_alarm_state(self._area) diff --git a/homeassistant/components/spc/manifest.json b/homeassistant/components/spc/manifest.json index a707e1a7804..b3c37ce2e2b 100644 --- a/homeassistant/components/spc/manifest.json +++ b/homeassistant/components/spc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/spc", "iot_class": "local_push", "loggers": ["pyspcwebgw"], + "quality_scale": "legacy", "requirements": ["pyspcwebgw==0.7.0"] } diff --git a/homeassistant/components/speedtestdotnet/__init__.py b/homeassistant/components/speedtestdotnet/__init__.py index aed1cce33db..e4c51ab7aa0 100644 --- a/homeassistant/components/speedtestdotnet/__init__.py +++ b/homeassistant/components/speedtestdotnet/__init__.py @@ -6,7 +6,7 @@ from functools import partial import speedtest -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -35,7 +35,10 @@ async def async_setup_entry( async def _async_finish_startup(hass: HomeAssistant) -> None: """Run this only when HA has finished its startup.""" - await coordinator.async_config_entry_first_refresh() + if config_entry.state is ConfigEntryState.LOADED: + await coordinator.async_refresh() + else: + await coordinator.async_config_entry_first_refresh() # Don't start a speedtest during startup async_at_started(hass, _async_finish_startup) diff --git a/homeassistant/components/speedtestdotnet/config_flow.py b/homeassistant/components/speedtestdotnet/config_flow.py index dc64448bbef..3bfd4eb6e4a 100644 --- a/homeassistant/components/speedtestdotnet/config_flow.py +++ b/homeassistant/components/speedtestdotnet/config_flow.py @@ -30,7 +30,7 @@ class SpeedTestFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: SpeedTestConfigEntry, ) -> SpeedTestOptionsFlowHandler: """Get the options flow for this handler.""" - return SpeedTestOptionsFlowHandler(config_entry) + return SpeedTestOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -48,9 +48,8 @@ class SpeedTestFlowHandler(ConfigFlow, domain=DOMAIN): class SpeedTestOptionsFlowHandler(OptionsFlow): """Handle SpeedTest options.""" - def __init__(self, config_entry: SpeedTestConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._servers: dict = {} async def async_step_init( diff --git a/homeassistant/components/spider/__init__.py b/homeassistant/components/spider/__init__.py index 782486de2d8..4b138ec77a8 100644 --- a/homeassistant/components/spider/__init__.py +++ b/homeassistant/components/spider/__init__.py @@ -1,87 +1,39 @@ -"""Support for Spider Smart devices.""" +"""The Spider integration.""" -import logging +from __future__ import annotations -from spiderpy.spiderapi import SpiderApi, SpiderApiException, UnauthorizedException -import voluptuous as vol - -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers import issue_registry as ir -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS +DOMAIN = "spider" -_LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Optional( - CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL - ): cv.time_period, - } - ) +async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool: + """Set up Spider from a config entry.""" + ir.async_create_issue( + hass, + DOMAIN, + DOMAIN, + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="integration_removed", + translation_placeholders={ + "link": "https://www.ithodaalderop.nl/additionelespiderproducten", + "entries": "/config/integrations/integration/spider", }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up a config entry.""" - hass.data[DOMAIN] = {} - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - if not hass.config_entries.async_entries(DOMAIN): - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=conf - ) - ) - - return True - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Spider via config entry.""" - try: - api = await hass.async_add_executor_job( - SpiderApi, - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - entry.data[CONF_SCAN_INTERVAL], - ) - except UnauthorizedException: - _LOGGER.error("Authorization failed") - return False - except SpiderApiException as err: - _LOGGER.error("Can't connect to the Spider API: %s", err) - raise ConfigEntryNotReady from err - - hass.data[DOMAIN][entry.entry_id] = api - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload Spider entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if not unload_ok: - return False - - hass.data[DOMAIN].pop(entry.entry_id) + """Unload a config entry.""" + if all( + config_entry.state is ConfigEntryState.NOT_LOADED + for config_entry in hass.config_entries.async_entries(DOMAIN) + if config_entry.entry_id != entry.entry_id + ): + ir.async_delete_issue(hass, DOMAIN, DOMAIN) return True diff --git a/homeassistant/components/spider/climate.py b/homeassistant/components/spider/climate.py deleted file mode 100644 index 11e84a942f4..00000000000 --- a/homeassistant/components/spider/climate.py +++ /dev/null @@ -1,144 +0,0 @@ -"""Support for Spider thermostats.""" - -from typing import Any - -from homeassistant.components.climate import ( - ClimateEntity, - ClimateEntityFeature, - HVACMode, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import DOMAIN - -HA_STATE_TO_SPIDER = { - HVACMode.COOL: "Cool", - HVACMode.HEAT: "Heat", - HVACMode.OFF: "Idle", -} - -SPIDER_STATE_TO_HA = {value: key for key, value in HA_STATE_TO_SPIDER.items()} - - -async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Initialize a Spider thermostat.""" - api = hass.data[DOMAIN][config.entry_id] - - async_add_entities( - [ - SpiderThermostat(api, entity) - for entity in await hass.async_add_executor_job(api.get_thermostats) - ] - ) - - -class SpiderThermostat(ClimateEntity): - """Representation of a thermostat.""" - - _attr_has_entity_name = True - _attr_name = None - _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False - - def __init__(self, api, thermostat): - """Initialize the thermostat.""" - self.api = api - self.thermostat = thermostat - self.support_fan = thermostat.fan_speed_values - self.support_hvac = [] - for operation_value in thermostat.operation_values: - if operation_value in SPIDER_STATE_TO_HA: - self.support_hvac.append(SPIDER_STATE_TO_HA[operation_value]) - self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE - if len(self.hvac_modes) > 1 and HVACMode.OFF in self.hvac_modes: - self._attr_supported_features |= ( - ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON - ) - if thermostat.has_fan_mode: - self._attr_supported_features |= ClimateEntityFeature.FAN_MODE - - @property - def device_info(self) -> DeviceInfo: - """Return the device_info of the device.""" - return DeviceInfo( - configuration_url="https://mijn.ithodaalderop.nl/", - identifiers={(DOMAIN, self.thermostat.id)}, - manufacturer=self.thermostat.manufacturer, - model=self.thermostat.model, - name=self.thermostat.name, - ) - - @property - def unique_id(self): - """Return the id of the thermostat, if any.""" - return self.thermostat.id - - @property - def current_temperature(self): - """Return the current temperature.""" - return self.thermostat.current_temperature - - @property - def target_temperature(self): - """Return the temperature we try to reach.""" - return self.thermostat.target_temperature - - @property - def target_temperature_step(self): - """Return the supported step of target temperature.""" - return self.thermostat.temperature_steps - - @property - def min_temp(self): - """Return the minimum temperature.""" - return self.thermostat.minimum_temperature - - @property - def max_temp(self): - """Return the maximum temperature.""" - return self.thermostat.maximum_temperature - - @property - def hvac_mode(self) -> HVACMode: - """Return current operation ie. heat, cool, idle.""" - return SPIDER_STATE_TO_HA[self.thermostat.operation_mode] - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return the list of available operation modes.""" - return self.support_hvac - - def set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: - return - - self.thermostat.set_temperature(temperature) - - def set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set new target operation mode.""" - self.thermostat.set_operation_mode(HA_STATE_TO_SPIDER.get(hvac_mode)) - - @property - def fan_mode(self): - """Return the fan setting.""" - return self.thermostat.current_fan_speed - - def set_fan_mode(self, fan_mode: str) -> None: - """Set fan mode.""" - self.thermostat.set_fan_speed(fan_mode) - - @property - def fan_modes(self): - """List of available fan modes.""" - return self.support_fan - - def update(self) -> None: - """Get the latest data.""" - self.thermostat = self.api.get_thermostat(self.unique_id) diff --git a/homeassistant/components/spider/config_flow.py b/homeassistant/components/spider/config_flow.py index f3076c0c28d..d96fb9e88b6 100644 --- a/homeassistant/components/spider/config_flow.py +++ b/homeassistant/components/spider/config_flow.py @@ -1,87 +1,11 @@ -"""Config flow for Spider.""" +"""Config flow for Spider integration.""" -import logging -from typing import Any +from homeassistant.config_entries import ConfigFlow -from spiderpy.spiderapi import SpiderApi, SpiderApiException, UnauthorizedException -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME - -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN - -_LOGGER = logging.getLogger(__name__) - -DATA_SCHEMA_USER = vol.Schema( - {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} -) - -RESULT_AUTH_FAILED = "auth_failed" -RESULT_CONN_ERROR = "conn_error" -RESULT_SUCCESS = "success" +from . import DOMAIN class SpiderConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a Spider config flow.""" + """Handle a config flow for Spider.""" VERSION = 1 - - def __init__(self) -> None: - """Initialize the Spider flow.""" - self.data = { - CONF_USERNAME: "", - CONF_PASSWORD: "", - CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, - } - - def _try_connect(self): - """Try to connect and check auth.""" - try: - SpiderApi( - self.data[CONF_USERNAME], - self.data[CONF_PASSWORD], - self.data[CONF_SCAN_INTERVAL], - ) - except SpiderApiException: - return RESULT_CONN_ERROR - except UnauthorizedException: - return RESULT_AUTH_FAILED - - return RESULT_SUCCESS - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - - errors = {} - if user_input is not None: - self.data[CONF_USERNAME] = user_input["username"] - self.data[CONF_PASSWORD] = user_input["password"] - - result = await self.hass.async_add_executor_job(self._try_connect) - - if result == RESULT_SUCCESS: - return self.async_create_entry( - title=DOMAIN, - data=self.data, - ) - if result != RESULT_AUTH_FAILED: - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - return self.async_abort(reason=result) - - errors["base"] = "invalid_auth" - - return self.async_show_form( - step_id="user", - data_schema=DATA_SCHEMA_USER, - errors=errors, - ) - - async def async_step_import(self, import_data): - """Import spider config from configuration.yaml.""" - return await self.async_step_user(import_data) diff --git a/homeassistant/components/spider/const.py b/homeassistant/components/spider/const.py deleted file mode 100644 index 189763f4e98..00000000000 --- a/homeassistant/components/spider/const.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Constants for the Spider integration.""" - -from homeassistant.const import Platform - -DOMAIN = "spider" -DEFAULT_SCAN_INTERVAL = 300 - -PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] diff --git a/homeassistant/components/spider/manifest.json b/homeassistant/components/spider/manifest.json index a80fd178898..76d148954f2 100644 --- a/homeassistant/components/spider/manifest.json +++ b/homeassistant/components/spider/manifest.json @@ -1,10 +1,9 @@ { "domain": "spider", "name": "Itho Daalderop Spider", - "codeowners": ["@peternijssen"], - "config_flow": true, + "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/spider", + "integration_type": "system", "iot_class": "cloud_polling", - "loggers": ["spiderpy"], - "requirements": ["spiderpy==1.6.1"] + "requirements": [] } diff --git a/homeassistant/components/spider/sensor.py b/homeassistant/components/spider/sensor.py deleted file mode 100644 index 70c38a40e15..00000000000 --- a/homeassistant/components/spider/sensor.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Support for Spider Powerplugs (energy & power).""" - -from __future__ import annotations - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorStateClass, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfEnergy, UnitOfPower -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import DOMAIN - - -async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Initialize a Spider Power Plug.""" - api = hass.data[DOMAIN][config.entry_id] - entities: list[SensorEntity] = [] - - for entity in await hass.async_add_executor_job(api.get_power_plugs): - entities.append(SpiderPowerPlugEnergy(api, entity)) - entities.append(SpiderPowerPlugPower(api, entity)) - - async_add_entities(entities) - - -class SpiderPowerPlugEnergy(SensorEntity): - """Representation of a Spider Power Plug (energy).""" - - _attr_has_entity_name = True - _attr_translation_key = "total_energy_today" - _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR - _attr_device_class = SensorDeviceClass.ENERGY - _attr_state_class = SensorStateClass.TOTAL_INCREASING - - def __init__(self, api, power_plug) -> None: - """Initialize the Spider Power Plug.""" - self.api = api - self.power_plug = power_plug - - @property - def device_info(self) -> DeviceInfo: - """Return the device_info of the device.""" - return DeviceInfo( - identifiers={(DOMAIN, self.power_plug.id)}, - manufacturer=self.power_plug.manufacturer, - model=self.power_plug.model, - name=self.power_plug.name, - ) - - @property - def unique_id(self) -> str: - """Return the ID of this sensor.""" - return f"{self.power_plug.id}_total_energy_today" - - @property - def native_value(self) -> float: - """Return todays energy usage in Kwh.""" - return round(self.power_plug.today_energy_consumption / 1000, 2) - - def update(self) -> None: - """Get the latest data.""" - self.power_plug = self.api.get_power_plug(self.power_plug.id) - - -class SpiderPowerPlugPower(SensorEntity): - """Representation of a Spider Power Plug (power).""" - - _attr_has_entity_name = True - _attr_translation_key = "power_consumption" - _attr_device_class = SensorDeviceClass.POWER - _attr_state_class = SensorStateClass.MEASUREMENT - _attr_native_unit_of_measurement = UnitOfPower.WATT - - def __init__(self, api, power_plug) -> None: - """Initialize the Spider Power Plug.""" - self.api = api - self.power_plug = power_plug - - @property - def device_info(self) -> DeviceInfo: - """Return the device_info of the device.""" - return DeviceInfo( - identifiers={(DOMAIN, self.power_plug.id)}, - manufacturer=self.power_plug.manufacturer, - model=self.power_plug.model, - name=self.power_plug.name, - ) - - @property - def unique_id(self) -> str: - """Return the ID of this sensor.""" - return f"{self.power_plug.id}_power_consumption" - - @property - def native_value(self) -> float: - """Return the current power usage in W.""" - return round(self.power_plug.current_energy_consumption) - - def update(self) -> None: - """Get the latest data.""" - self.power_plug = self.api.get_power_plug(self.power_plug.id) diff --git a/homeassistant/components/spider/strings.json b/homeassistant/components/spider/strings.json index c8d67be36ae..338ae3aa762 100644 --- a/homeassistant/components/spider/strings.json +++ b/homeassistant/components/spider/strings.json @@ -1,30 +1,8 @@ { - "config": { - "step": { - "user": { - "title": "Sign-in with mijn.ithodaalderop.nl account", - "data": { - "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" - } - } - }, - "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - } - }, - "entity": { - "sensor": { - "power_consumption": { - "name": "Power consumption" - }, - "total_energy_today": { - "name": "Total energy today" - } + "issues": { + "integration_removed": { + "title": "The Spider integration has been removed", + "description": "The Spider integration has been removed from Home Assistant.\n\nItho daalderop has [discontinued]({link}) the Spider Connect System.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing Spider integration entries]({entries})." } } } diff --git a/homeassistant/components/spider/switch.py b/homeassistant/components/spider/switch.py deleted file mode 100644 index 63f0ec6cb69..00000000000 --- a/homeassistant/components/spider/switch.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Support for Spider switches.""" - -from typing import Any - -from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import DOMAIN - - -async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Initialize a Spider Power Plug.""" - api = hass.data[DOMAIN][config.entry_id] - async_add_entities( - [ - SpiderPowerPlug(api, entity) - for entity in await hass.async_add_executor_job(api.get_power_plugs) - ] - ) - - -class SpiderPowerPlug(SwitchEntity): - """Representation of a Spider Power Plug.""" - - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, api, power_plug): - """Initialize the Spider Power Plug.""" - self.api = api - self.power_plug = power_plug - - @property - def device_info(self) -> DeviceInfo: - """Return the device_info of the device.""" - return DeviceInfo( - configuration_url="https://mijn.ithodaalderop.nl/", - identifiers={(DOMAIN, self.power_plug.id)}, - manufacturer=self.power_plug.manufacturer, - model=self.power_plug.model, - name=self.power_plug.name, - ) - - @property - def unique_id(self): - """Return the ID of this switch.""" - return self.power_plug.id - - @property - def is_on(self): - """Return true if switch is on. Standby is on.""" - return self.power_plug.is_on - - @property - def available(self) -> bool: - """Return true if switch is available.""" - return self.power_plug.is_available - - def turn_on(self, **kwargs: Any) -> None: - """Turn device on.""" - self.power_plug.turn_on() - - def turn_off(self, **kwargs: Any) -> None: - """Turn device off.""" - self.power_plug.turn_off() - - def update(self) -> None: - """Get the latest data.""" - self.power_plug = self.api.get_power_plug(self.power_plug.id) diff --git a/homeassistant/components/splunk/manifest.json b/homeassistant/components/splunk/manifest.json index 947af317b35..4b287c8950c 100644 --- a/homeassistant/components/splunk/manifest.json +++ b/homeassistant/components/splunk/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/splunk", "iot_class": "local_push", "loggers": ["hass_splunk"], + "quality_scale": "legacy", "requirements": ["hass-splunk==0.1.1"] } diff --git a/homeassistant/components/spotify/__init__.py b/homeassistant/components/spotify/__init__.py index becf90b04cd..37580ac432d 100644 --- a/homeassistant/components/spotify/__init__.py +++ b/homeassistant/components/spotify/__init__.py @@ -3,16 +3,16 @@ from __future__ import annotations from datetime import timedelta -from typing import Any +from typing import TYPE_CHECKING import aiohttp -import requests -from spotipy import Spotify, SpotifyException +from spotifyaio import Device, SpotifyClient, SpotifyConnectionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import ( OAuth2Session, async_get_config_entry_implementation, @@ -21,7 +21,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .browse_media import async_browse_media from .const import DOMAIN, LOGGER, SPOTIFY_SCOPES -from .models import HomeAssistantSpotifyData +from .coordinator import SpotifyConfigEntry, SpotifyCoordinator +from .models import SpotifyData from .util import ( is_spotify_media_type, resolve_spotify_media_type, @@ -39,9 +40,6 @@ __all__ = [ ] -type SpotifyConfigEntry = ConfigEntry[HomeAssistantSpotifyData] - - async def async_setup_entry(hass: HomeAssistant, entry: SpotifyConfigEntry) -> bool: """Set up Spotify from a config entry.""" implementation = await async_get_config_entry_implementation(hass, entry) @@ -52,52 +50,40 @@ async def async_setup_entry(hass: HomeAssistant, entry: SpotifyConfigEntry) -> b except aiohttp.ClientError as err: raise ConfigEntryNotReady from err - spotify = Spotify(auth=session.token["access_token"]) + spotify = SpotifyClient(async_get_clientsession(hass)) - try: - current_user = await hass.async_add_executor_job(spotify.me) - except SpotifyException as err: - raise ConfigEntryNotReady from err + spotify.authenticate(session.token[CONF_ACCESS_TOKEN]) - if not current_user: - raise ConfigEntryNotReady + async def _refresh_token() -> str: + await session.async_ensure_token_valid() + token = session.token[CONF_ACCESS_TOKEN] + if TYPE_CHECKING: + assert isinstance(token, str) + return token - async def _update_devices() -> list[dict[str, Any]]: - if not session.valid_token: - await session.async_ensure_token_valid() - await hass.async_add_executor_job( - spotify.set_auth, session.token["access_token"] - ) + spotify.refresh_token_function = _refresh_token + coordinator = SpotifyCoordinator(hass, spotify) + + await coordinator.async_config_entry_first_refresh() + + async def _update_devices() -> list[Device]: try: - devices: dict[str, Any] | None = await hass.async_add_executor_job( - spotify.devices - ) - except (requests.RequestException, SpotifyException) as err: + return await spotify.get_devices() + except SpotifyConnectionError as err: raise UpdateFailed from err - if devices is None: - return [] - - return devices.get("devices", []) - - device_coordinator: DataUpdateCoordinator[list[dict[str, Any]]] = ( - DataUpdateCoordinator( - hass, - LOGGER, - name=f"{entry.title} Devices", - update_interval=timedelta(minutes=5), - update_method=_update_devices, - ) + device_coordinator: DataUpdateCoordinator[list[Device]] = DataUpdateCoordinator( + hass, + LOGGER, + name=f"{entry.title} Devices", + config_entry=entry, + update_interval=timedelta(minutes=5), + update_method=_update_devices, ) await device_coordinator.async_config_entry_first_refresh() - entry.runtime_data = HomeAssistantSpotifyData( - client=spotify, - current_user=current_user, - devices=device_coordinator, - session=session, - ) + entry.runtime_data = SpotifyData(coordinator, session, device_coordinator) if not set(session.token["scope"].split(" ")).issuperset(SPOTIFY_SCOPES): raise ConfigEntryAuthFailed diff --git a/homeassistant/components/spotify/browse_media.py b/homeassistant/components/spotify/browse_media.py index abcb6df6205..81cdfdfb3cf 100644 --- a/homeassistant/components/spotify/browse_media.py +++ b/homeassistant/components/spotify/browse_media.py @@ -3,11 +3,18 @@ from __future__ import annotations from enum import StrEnum -from functools import partial import logging -from typing import Any +from typing import TYPE_CHECKING, Any, TypedDict -from spotipy import Spotify +from spotifyaio import ( + Artist, + BasePlaylist, + SimplifiedAlbum, + SimplifiedTrack, + SpotifyClient, + Track, +) +from spotifyaio.models import ItemType, SimplifiedEpisode import yarl from homeassistant.components.media_player import ( @@ -16,11 +23,10 @@ from homeassistant.components.media_player import ( MediaClass, MediaType, ) +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from .const import DOMAIN, MEDIA_PLAYER_PREFIX, MEDIA_TYPE_SHOW, PLAYABLE_MEDIA_TYPES -from .models import HomeAssistantSpotifyData from .util import fetch_image_url BROWSE_LIMIT = 48 @@ -29,6 +35,72 @@ BROWSE_LIMIT = 48 _LOGGER = logging.getLogger(__name__) +class ItemPayload(TypedDict): + """TypedDict for item payload.""" + + name: str + type: str + uri: str + id: str | None + thumbnail: str | None + + +def _get_artist_item_payload(artist: Artist) -> ItemPayload: + return { + "id": artist.artist_id, + "name": artist.name, + "type": MediaType.ARTIST, + "uri": artist.uri, + "thumbnail": fetch_image_url(artist.images), + } + + +def _get_album_item_payload(album: SimplifiedAlbum) -> ItemPayload: + return { + "id": album.album_id, + "name": album.name, + "type": MediaType.ALBUM, + "uri": album.uri, + "thumbnail": fetch_image_url(album.images), + } + + +def _get_playlist_item_payload(playlist: BasePlaylist) -> ItemPayload: + return { + "id": playlist.playlist_id, + "name": playlist.name, + "type": MediaType.PLAYLIST, + "uri": playlist.uri, + "thumbnail": fetch_image_url(playlist.images), + } + + +def _get_track_item_payload( + track: SimplifiedTrack, show_thumbnails: bool = True +) -> ItemPayload: + return { + "id": track.track_id, + "name": track.name, + "type": MediaType.TRACK, + "uri": track.uri, + "thumbnail": ( + fetch_image_url(track.album.images) + if show_thumbnails and isinstance(track, Track) + else None + ), + } + + +def _get_episode_item_payload(episode: SimplifiedEpisode) -> ItemPayload: + return { + "id": episode.episode_id, + "name": episode.name, + "type": MediaType.EPISODE, + "uri": episode.uri, + "thumbnail": fetch_image_url(episode.images), + } + + class BrowsableMedia(StrEnum): """Enum of browsable media.""" @@ -40,8 +112,6 @@ class BrowsableMedia(StrEnum): CURRENT_USER_RECENTLY_PLAYED = "current_user_recently_played" CURRENT_USER_TOP_ARTISTS = "current_user_top_artists" CURRENT_USER_TOP_TRACKS = "current_user_top_tracks" - CATEGORIES = "categories" - FEATURED_PLAYLISTS = "featured_playlists" NEW_RELEASES = "new_releases" @@ -54,8 +124,6 @@ LIBRARY_MAP = { BrowsableMedia.CURRENT_USER_RECENTLY_PLAYED.value: "Recently played", BrowsableMedia.CURRENT_USER_TOP_ARTISTS.value: "Top Artists", BrowsableMedia.CURRENT_USER_TOP_TRACKS.value: "Top Tracks", - BrowsableMedia.CATEGORIES.value: "Categories", - BrowsableMedia.FEATURED_PLAYLISTS.value: "Featured Playlists", BrowsableMedia.NEW_RELEASES.value: "New Releases", } @@ -92,18 +160,6 @@ CONTENT_TYPE_MEDIA_CLASS: dict[str, Any] = { "parent": MediaClass.DIRECTORY, "children": MediaClass.TRACK, }, - BrowsableMedia.FEATURED_PLAYLISTS.value: { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.PLAYLIST, - }, - BrowsableMedia.CATEGORIES.value: { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.GENRE, - }, - "category_playlists": { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.PLAYLIST, - }, BrowsableMedia.NEW_RELEASES.value: { "parent": MediaClass.DIRECTORY, "children": MediaClass.ALBUM, @@ -183,7 +239,7 @@ async def async_browse_media( or hass.config_entries.async_get_entry(host.upper()) ) is None - or not isinstance(entry.runtime_data, HomeAssistantSpotifyData) + or entry.state is not ConfigEntryState.LOADED ): raise BrowseError("Invalid Spotify account specified") media_content_id = parsed_url.name @@ -191,15 +247,13 @@ async def async_browse_media( result = await async_browse_media_internal( hass, - info.client, - info.session, - info.current_user, + info.coordinator.client, media_content_type, media_content_id, can_play_artist=can_play_artist, ) - # Build new URLs with config entry specifyers + # Build new URLs with config entry specifiers result.media_content_id = str(parsed_url.with_name(result.media_content_id)) if result.children: for child in result.children: @@ -209,9 +263,7 @@ async def async_browse_media( async def async_browse_media_internal( hass: HomeAssistant, - spotify: Spotify, - session: OAuth2Session, - current_user: dict[str, Any], + spotify: SpotifyClient, media_content_type: str | None, media_content_id: str | None, *, @@ -219,15 +271,7 @@ async def async_browse_media_internal( ) -> BrowseMedia: """Browse spotify media.""" if media_content_type in (None, f"{MEDIA_PLAYER_PREFIX}library"): - return await hass.async_add_executor_job( - partial(library_payload, can_play_artist=can_play_artist) - ) - - if not session.valid_token: - await session.async_ensure_token_valid() - await hass.async_add_executor_job( - spotify.set_auth, session.token["access_token"] - ) + return await library_payload(can_play_artist=can_play_artist) # Strip prefix if media_content_type: @@ -237,23 +281,18 @@ async def async_browse_media_internal( "media_content_type": media_content_type, "media_content_id": media_content_id, } - response = await hass.async_add_executor_job( - partial( - build_item_response, - spotify, - current_user, - payload, - can_play_artist=can_play_artist, - ) + response = await build_item_response( + spotify, + payload, + can_play_artist=can_play_artist, ) if response is None: raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}") return response -def build_item_response( # noqa: C901 - spotify: Spotify, - user: dict[str, Any], +async def build_item_response( # noqa: C901 + spotify: SpotifyClient, payload: dict[str, str | None], *, can_play_artist: bool, @@ -265,80 +304,89 @@ def build_item_response( # noqa: C901 if media_content_type is None or media_content_id is None: return None - title = None - image = None - media: dict[str, Any] | None = None - items = [] + title: str | None = None + image: str | None = None + items: list[ItemPayload] = [] if media_content_type == BrowsableMedia.CURRENT_USER_PLAYLISTS: - if media := spotify.current_user_playlists(limit=BROWSE_LIMIT): - items = media.get("items", []) + if playlists := await spotify.get_playlists_for_current_user(): + items = [_get_playlist_item_payload(playlist) for playlist in playlists] elif media_content_type == BrowsableMedia.CURRENT_USER_FOLLOWED_ARTISTS: - if media := spotify.current_user_followed_artists(limit=BROWSE_LIMIT): - items = media.get("artists", {}).get("items", []) + if artists := await spotify.get_followed_artists(): + items = [_get_artist_item_payload(artist) for artist in artists] elif media_content_type == BrowsableMedia.CURRENT_USER_SAVED_ALBUMS: - if media := spotify.current_user_saved_albums(limit=BROWSE_LIMIT): - items = [item["album"] for item in media.get("items", [])] + if saved_albums := await spotify.get_saved_albums(): + items = [ + _get_album_item_payload(saved_album.album) + for saved_album in saved_albums + ] elif media_content_type == BrowsableMedia.CURRENT_USER_SAVED_TRACKS: - if media := spotify.current_user_saved_tracks(limit=BROWSE_LIMIT): - items = [item["track"] for item in media.get("items", [])] + if saved_tracks := await spotify.get_saved_tracks(): + items = [ + _get_track_item_payload(saved_track.track) + for saved_track in saved_tracks + ] elif media_content_type == BrowsableMedia.CURRENT_USER_SAVED_SHOWS: - if media := spotify.current_user_saved_shows(limit=BROWSE_LIMIT): - items = [item["show"] for item in media.get("items", [])] + if saved_shows := await spotify.get_saved_shows(): + items = [ + { + "id": saved_show.show.show_id, + "name": saved_show.show.name, + "type": MEDIA_TYPE_SHOW, + "uri": saved_show.show.uri, + "thumbnail": fetch_image_url(saved_show.show.images), + } + for saved_show in saved_shows + ] elif media_content_type == BrowsableMedia.CURRENT_USER_RECENTLY_PLAYED: - if media := spotify.current_user_recently_played(limit=BROWSE_LIMIT): - items = [item["track"] for item in media.get("items", [])] + if recently_played_tracks := await spotify.get_recently_played_tracks(): + items = [ + _get_track_item_payload(item.track) for item in recently_played_tracks + ] elif media_content_type == BrowsableMedia.CURRENT_USER_TOP_ARTISTS: - if media := spotify.current_user_top_artists(limit=BROWSE_LIMIT): - items = media.get("items", []) + if top_artists := await spotify.get_top_artists(): + items = [_get_artist_item_payload(artist) for artist in top_artists] elif media_content_type == BrowsableMedia.CURRENT_USER_TOP_TRACKS: - if media := spotify.current_user_top_tracks(limit=BROWSE_LIMIT): - items = media.get("items", []) - elif media_content_type == BrowsableMedia.FEATURED_PLAYLISTS: - if media := spotify.featured_playlists( - country=user["country"], limit=BROWSE_LIMIT - ): - items = media.get("playlists", {}).get("items", []) - elif media_content_type == BrowsableMedia.CATEGORIES: - if media := spotify.categories(country=user["country"], limit=BROWSE_LIMIT): - items = media.get("categories", {}).get("items", []) - elif media_content_type == "category_playlists": - if ( - media := spotify.category_playlists( - category_id=media_content_id, - country=user["country"], - limit=BROWSE_LIMIT, - ) - ) and (category := spotify.category(media_content_id, country=user["country"])): - title = category.get("name") - image = fetch_image_url(category, key="icons") - items = media.get("playlists", {}).get("items", []) + if top_tracks := await spotify.get_top_tracks(): + items = [_get_track_item_payload(track) for track in top_tracks] elif media_content_type == BrowsableMedia.NEW_RELEASES: - if media := spotify.new_releases(country=user["country"], limit=BROWSE_LIMIT): - items = media.get("albums", {}).get("items", []) + if new_releases := await spotify.get_new_releases(): + items = [_get_album_item_payload(album) for album in new_releases] elif media_content_type == MediaType.PLAYLIST: - if media := spotify.playlist(media_content_id): - items = [item["track"] for item in media.get("tracks", {}).get("items", [])] + if playlist := await spotify.get_playlist(media_content_id): + title = playlist.name + image = playlist.images[0].url if playlist.images else None + for playlist_item in playlist.tracks.items: + if playlist_item.track.type is ItemType.TRACK: + if TYPE_CHECKING: + assert isinstance(playlist_item.track, Track) + items.append(_get_track_item_payload(playlist_item.track)) + elif playlist_item.track.type is ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(playlist_item.track, SimplifiedEpisode) + items.append(_get_episode_item_payload(playlist_item.track)) elif media_content_type == MediaType.ALBUM: - if media := spotify.album(media_content_id): - items = media.get("tracks", {}).get("items", []) + if album := await spotify.get_album(media_content_id): + title = album.name + image = album.images[0].url if album.images else None + items = [ + _get_track_item_payload(track, show_thumbnails=False) + for track in album.tracks + ] elif media_content_type == MediaType.ARTIST: - if (media := spotify.artist_albums(media_content_id, limit=BROWSE_LIMIT)) and ( - artist := spotify.artist(media_content_id) + if (artist_albums := await spotify.get_artist_albums(media_content_id)) and ( + artist := await spotify.get_artist(media_content_id) ): - title = artist.get("name") - image = fetch_image_url(artist) - items = media.get("items", []) + title = artist.name + image = artist.images[0].url if artist.images else None + items = [_get_album_item_payload(album) for album in artist_albums] elif media_content_type == MEDIA_TYPE_SHOW: - if (media := spotify.show_episodes(media_content_id, limit=BROWSE_LIMIT)) and ( - show := spotify.show(media_content_id) + if (show_episodes := await spotify.get_show_episodes(media_content_id)) and ( + show := await spotify.get_show(media_content_id) ): - title = show.get("name") - image = fetch_image_url(show) - items = media.get("items", []) - - if media is None: - return None + title = show.name + image = show.images[0].url if show.images else None + items = [_get_episode_item_payload(episode) for episode in show_episodes] try: media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type] @@ -346,47 +394,15 @@ def build_item_response( # noqa: C901 _LOGGER.debug("Unknown media type received: %s", media_content_type) return None - if media_content_type == BrowsableMedia.CATEGORIES: - media_item = BrowseMedia( - can_expand=True, - can_play=False, - children_media_class=media_class["children"], - media_class=media_class["parent"], - media_content_id=media_content_id, - media_content_type=f"{MEDIA_PLAYER_PREFIX}{media_content_type}", - title=LIBRARY_MAP.get(media_content_id, "Unknown"), - ) - - media_item.children = [] - for item in items: - try: - item_id = item["id"] - except KeyError: - _LOGGER.debug("Missing ID for media item: %s", item) - continue - media_item.children.append( - BrowseMedia( - can_expand=True, - can_play=False, - children_media_class=MediaClass.TRACK, - media_class=MediaClass.PLAYLIST, - media_content_id=item_id, - media_content_type=f"{MEDIA_PLAYER_PREFIX}category_playlists", - thumbnail=fetch_image_url(item, key="icons"), - title=item.get("name"), - ) - ) - return media_item - if title is None: title = LIBRARY_MAP.get(media_content_id, "Unknown") - if "name" in media: - title = media["name"] can_play = media_content_type in PLAYABLE_MEDIA_TYPES and ( media_content_type != MediaType.ARTIST or can_play_artist ) + if TYPE_CHECKING: + assert title browse_media = BrowseMedia( can_expand=True, can_play=can_play, @@ -407,23 +423,16 @@ def build_item_response( # noqa: C901 except (MissingMediaInformation, UnknownMediaType): continue - if "images" in media: - browse_media.thumbnail = fetch_image_url(media) - return browse_media -def item_payload(item: dict[str, Any], *, can_play_artist: bool) -> BrowseMedia: +def item_payload(item: ItemPayload, *, can_play_artist: bool) -> BrowseMedia: """Create response payload for a single media item. Used by async_browse_media. """ - try: - media_type = item["type"] - media_id = item["uri"] - except KeyError as err: - _LOGGER.debug("Missing type or URI for media item: %s", item) - raise MissingMediaInformation from err + media_type = item["type"] + media_id = item["uri"] try: media_class = CONTENT_TYPE_MEDIA_CLASS[media_type] @@ -440,25 +449,19 @@ def item_payload(item: dict[str, Any], *, can_play_artist: bool) -> BrowseMedia: media_type != MediaType.ARTIST or can_play_artist ) - browse_media = BrowseMedia( + return BrowseMedia( can_expand=can_expand, can_play=can_play, children_media_class=media_class["children"], media_class=media_class["parent"], media_content_id=media_id, media_content_type=f"{MEDIA_PLAYER_PREFIX}{media_type}", - title=item.get("name", "Unknown"), + title=item["name"], + thumbnail=item["thumbnail"], ) - if "images" in item: - browse_media.thumbnail = fetch_image_url(item) - elif MediaType.ALBUM in item: - browse_media.thumbnail = fetch_image_url(item[MediaType.ALBUM]) - return browse_media - - -def library_payload(*, can_play_artist: bool) -> BrowseMedia: +async def library_payload(*, can_play_artist: bool) -> BrowseMedia: """Create response payload to describe contents of a specific library. Used by async_browse_media. @@ -474,10 +477,16 @@ def library_payload(*, can_play_artist: bool) -> BrowseMedia: ) browse_media.children = [] - for item in [{"name": n, "type": t} for t, n in LIBRARY_MAP.items()]: + for item_type, item_name in LIBRARY_MAP.items(): browse_media.children.append( item_payload( - {"name": item["name"], "type": item["type"], "uri": item["type"]}, + { + "name": item_name, + "type": item_type, + "uri": item_type, + "id": None, + "thumbnail": None, + }, can_play_artist=can_play_artist, ) ) diff --git a/homeassistant/components/spotify/config_flow.py b/homeassistant/components/spotify/config_flow.py index 58c7e612a35..d99fa7793df 100644 --- a/homeassistant/components/spotify/config_flow.py +++ b/homeassistant/components/spotify/config_flow.py @@ -6,10 +6,12 @@ from collections.abc import Mapping import logging from typing import Any -from spotipy import Spotify +from spotifyaio import SpotifyClient -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN, SPOTIFY_SCOPES @@ -22,8 +24,6 @@ class SpotifyFlowHandler( DOMAIN = DOMAIN VERSION = 1 - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -36,50 +36,43 @@ class SpotifyFlowHandler( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for Spotify.""" - spotify = Spotify(auth=data["token"]["access_token"]) + spotify = SpotifyClient(async_get_clientsession(self.hass)) + spotify.authenticate(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) try: - current_user = await self.hass.async_add_executor_job(spotify.current_user) + current_user = await spotify.get_current_user() except Exception: # noqa: BLE001 return self.async_abort(reason="connection_error") - name = data["id"] = current_user["id"] + name = current_user.display_name - if self.reauth_entry and self.reauth_entry.data["id"] != current_user["id"]: - return self.async_abort(reason="reauth_account_mismatch") + await self.async_set_unique_id(current_user.user_id) - if current_user.get("display_name"): - name = current_user["display_name"] - data["name"] = name - - await self.async_set_unique_id(current_user["id"]) - - return self.async_create_entry(title=name, data=data) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=name, data=data + ) + return self.async_create_entry(title=name, data={**data, CONF_NAME: name}) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon migration of old entries.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm reauth dialog.""" - if self.reauth_entry is None: - return self.async_abort(reason="reauth_account_mismatch") - - if user_input is None and self.reauth_entry: + reauth_entry = self._get_reauth_entry() + if user_input is None: return self.async_show_form( step_id="reauth_confirm", - description_placeholders={"account": self.reauth_entry.data["id"]}, + description_placeholders={"account": reauth_entry.data["id"]}, errors={}, ) return await self.async_step_pick_implementation( - user_input={"implementation": self.reauth_entry.data["auth_implementation"]} + user_input={"implementation": reauth_entry.data["auth_implementation"]} ) diff --git a/homeassistant/components/spotify/coordinator.py b/homeassistant/components/spotify/coordinator.py new file mode 100644 index 00000000000..099b1cb3ca8 --- /dev/null +++ b/homeassistant/components/spotify/coordinator.py @@ -0,0 +1,128 @@ +"""Coordinator for Spotify.""" + +from dataclasses import dataclass +from datetime import datetime, timedelta +import logging +from typing import TYPE_CHECKING + +from spotifyaio import ( + ContextType, + PlaybackState, + Playlist, + SpotifyClient, + SpotifyConnectionError, + SpotifyNotFoundError, + UserProfile, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +import homeassistant.util.dt as dt_util + +from .const import DOMAIN + +if TYPE_CHECKING: + from .models import SpotifyData + +_LOGGER = logging.getLogger(__name__) + + +type SpotifyConfigEntry = ConfigEntry[SpotifyData] + + +@dataclass +class SpotifyCoordinatorData: + """Class to hold Spotify data.""" + + current_playback: PlaybackState | None + position_updated_at: datetime | None + playlist: Playlist | None + dj_playlist: bool = False + + +# This is a minimal representation of the DJ playlist that Spotify now offers +# The DJ is not fully integrated with the playlist API, so we need to guard +# against trying to fetch it as a regular playlist +SPOTIFY_DJ_PLAYLIST_URI = "spotify:playlist:37i9dQZF1EYkqdzj48dyYq" + + +class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): + """Class to manage fetching Spotify data.""" + + current_user: UserProfile + config_entry: SpotifyConfigEntry + + def __init__(self, hass: HomeAssistant, client: SpotifyClient) -> None: + """Initialize.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=30), + ) + self.client = client + self._playlist: Playlist | None = None + self._checked_playlist_id: str | None = None + + async def _async_setup(self) -> None: + """Set up the coordinator.""" + try: + self.current_user = await self.client.get_current_user() + except SpotifyConnectionError as err: + raise UpdateFailed("Error communicating with Spotify API") from err + + async def _async_update_data(self) -> SpotifyCoordinatorData: + try: + current = await self.client.get_playback() + except SpotifyConnectionError as err: + raise UpdateFailed("Error communicating with Spotify API") from err + if not current: + return SpotifyCoordinatorData( + current_playback=None, + position_updated_at=None, + playlist=None, + ) + # Record the last updated time, because Spotify's timestamp property is unreliable + # and doesn't actually return the fetch time as is mentioned in the API description + position_updated_at = dt_util.utcnow() + + dj_playlist = False + if (context := current.context) is not None: + dj_playlist = context.uri == SPOTIFY_DJ_PLAYLIST_URI + if not ( + context.uri + in ( + self._checked_playlist_id, + SPOTIFY_DJ_PLAYLIST_URI, + ) + or (self._playlist is None and context.uri == self._checked_playlist_id) + ): + self._checked_playlist_id = context.uri + self._playlist = None + if context.context_type == ContextType.PLAYLIST: + # Make sure any playlist lookups don't break the current + # playback state update + try: + self._playlist = await self.client.get_playlist(context.uri) + except SpotifyNotFoundError: + _LOGGER.debug( + "Spotify playlist '%s' not found. " + "Most likely a Spotify-created playlist", + context.uri, + ) + self._playlist = None + except SpotifyConnectionError: + _LOGGER.debug( + "Unable to load spotify playlist '%s'. " + "Continuing without playlist data", + context.uri, + ) + self._playlist = None + self._checked_playlist_id = None + return SpotifyCoordinatorData( + current_playback=current, + position_updated_at=position_updated_at, + playlist=self._playlist, + dj_playlist=dj_playlist, + ) diff --git a/homeassistant/components/spotify/diagnostics.py b/homeassistant/components/spotify/diagnostics.py new file mode 100644 index 00000000000..82ce40eb22a --- /dev/null +++ b/homeassistant/components/spotify/diagnostics.py @@ -0,0 +1,21 @@ +"""Diagnostics support for Spotify.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.core import HomeAssistant + +from .coordinator import SpotifyConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: SpotifyConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + return { + "playback": asdict(entry.runtime_data.coordinator.data), + "devices": [asdict(dev) for dev in entry.runtime_data.devices.data], + } diff --git a/homeassistant/components/spotify/entity.py b/homeassistant/components/spotify/entity.py new file mode 100644 index 00000000000..6ab82977089 --- /dev/null +++ b/homeassistant/components/spotify/entity.py @@ -0,0 +1,25 @@ +"""Base entity for Spotify.""" + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import SpotifyCoordinator + + +class SpotifyEntity(CoordinatorEntity[SpotifyCoordinator]): + """Defines a base Spotify entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SpotifyCoordinator) -> None: + """Initialize the Spotify entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.current_user.user_id)}, + manufacturer="Spotify AB", + model=f"Spotify {coordinator.current_user.product}", + name=f"Spotify {coordinator.config_entry.title}", + entry_type=DeviceEntryType.SERVICE, + configuration_url="https://open.spotify.com", + ) diff --git a/homeassistant/components/spotify/manifest.json b/homeassistant/components/spotify/manifest.json index 84f2bc102e3..27b8da7cecf 100644 --- a/homeassistant/components/spotify/manifest.json +++ b/homeassistant/components/spotify/manifest.json @@ -7,8 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/spotify", "integration_type": "service", "iot_class": "cloud_polling", - "loggers": ["spotipy"], - "quality_scale": "silver", - "requirements": ["spotipy==2.23.0"], + "loggers": ["spotifyaio"], + "requirements": ["spotifyaio==0.8.11"], "zeroconf": ["_spotify-connect._tcp.local."] } diff --git a/homeassistant/components/spotify/media_player.py b/homeassistant/components/spotify/media_player.py index 3653bdb149a..20a634efb42 100644 --- a/homeassistant/components/spotify/media_player.py +++ b/homeassistant/components/spotify/media_player.py @@ -2,14 +2,22 @@ from __future__ import annotations -from asyncio import run_coroutine_threadsafe -from collections.abc import Callable -from datetime import timedelta +import asyncio +from collections.abc import Awaitable, Callable, Coroutine +import datetime as dt import logging -from typing import Any, Concatenate +from typing import TYPE_CHECKING, Any, Concatenate -import requests -from spotipy import SpotifyException +from spotifyaio import ( + Device, + Episode, + Item, + ItemType, + PlaybackState, + ProductType, + RepeatMode as SpotifyRepeatMode, + Track, +) from yarl import URL from homeassistant.components.media_player import ( @@ -22,23 +30,17 @@ from homeassistant.components.media_player import ( MediaType, RepeatMode, ) -from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.dt import utcnow +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import SpotifyConfigEntry from .browse_media import async_browse_media_internal -from .const import DOMAIN, MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES -from .models import HomeAssistantSpotifyData -from .util import fetch_image_url +from .const import MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES +from .coordinator import SpotifyConfigEntry, SpotifyCoordinator +from .entity import SpotifyEntity _LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(seconds=30) - SUPPORT_SPOTIFY = ( MediaPlayerEntityFeature.BROWSE_MEDIA | MediaPlayerEntityFeature.NEXT_TRACK @@ -54,18 +56,15 @@ SUPPORT_SPOTIFY = ( ) REPEAT_MODE_MAPPING_TO_HA = { - "context": RepeatMode.ALL, - "off": RepeatMode.OFF, - "track": RepeatMode.ONE, + SpotifyRepeatMode.CONTEXT: RepeatMode.ALL, + SpotifyRepeatMode.OFF: RepeatMode.OFF, + SpotifyRepeatMode.TRACK: RepeatMode.ONE, } REPEAT_MODE_MAPPING_TO_SPOTIFY = { value: key for key, value in REPEAT_MODE_MAPPING_TO_HA.items() } - -# This is a minimal representation of the DJ playlist that Spotify now offers -# The DJ is not fully integrated with the playlist API, so needs to have the playlist response mocked in order to maintain functionality -SPOTIFY_DJ_PLAYLIST = {"uri": "spotify:playlist:37i9dQZF1EYkqdzj48dyYq", "name": "DJ"} +AFTER_REQUEST_SLEEP = 1 async def async_setup_entry( @@ -74,262 +73,247 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Spotify based on a config entry.""" + data = entry.runtime_data + assert entry.unique_id is not None spotify = SpotifyMediaPlayer( - entry.runtime_data, - entry.data[CONF_ID], - entry.title, + data.coordinator, + data.devices, ) - async_add_entities([spotify], True) + async_add_entities([spotify]) -def spotify_exception_handler[_SpotifyMediaPlayerT: SpotifyMediaPlayer, **_P, _R]( - func: Callable[Concatenate[_SpotifyMediaPlayerT, _P], _R], -) -> Callable[Concatenate[_SpotifyMediaPlayerT, _P], _R | None]: - """Decorate Spotify calls to handle Spotify exception. +def ensure_item[_R]( + func: Callable[[SpotifyMediaPlayer, Item], _R], +) -> Callable[[SpotifyMediaPlayer], _R | None]: + """Ensure that the currently playing item is available.""" - A decorator that wraps the passed in function, catches Spotify errors, - aiohttp exceptions and handles the availability of the media player. - """ - - def wrapper( - self: _SpotifyMediaPlayerT, *args: _P.args, **kwargs: _P.kwargs - ) -> _R | None: - try: - result = func(self, *args, **kwargs) - except requests.RequestException: - self._attr_available = False + def wrapper(self: SpotifyMediaPlayer) -> _R | None: + if not self.currently_playing or not self.currently_playing.item: return None - except SpotifyException as exc: - self._attr_available = False - if exc.reason == "NO_ACTIVE_DEVICE": - raise HomeAssistantError("No active playback device found") from None - raise HomeAssistantError(f"Spotify error: {exc.reason}") from exc - self._attr_available = True - return result + return func(self, self.currently_playing.item) return wrapper -class SpotifyMediaPlayer(MediaPlayerEntity): +def async_refresh_after[_T: SpotifyEntity, **_P]( + func: Callable[Concatenate[_T, _P], Awaitable[None]], +) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]: + """Define a wrapper to yield and refresh after.""" + + async def _async_wrap(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: + await func(self, *args, **kwargs) + await asyncio.sleep(AFTER_REQUEST_SLEEP) + await self.coordinator.async_refresh() + + return _async_wrap + + +class SpotifyMediaPlayer(SpotifyEntity, MediaPlayerEntity): """Representation of a Spotify controller.""" - _attr_has_entity_name = True _attr_media_image_remotely_accessible = False _attr_name = None _attr_translation_key = "spotify" def __init__( self, - data: HomeAssistantSpotifyData, - user_id: str, - name: str, + coordinator: SpotifyCoordinator, + device_coordinator: DataUpdateCoordinator[list[Device]], ) -> None: """Initialize.""" - self._id = user_id - self.data = data + super().__init__(coordinator) + self.devices = device_coordinator + self._attr_unique_id = coordinator.current_user.user_id - self._attr_unique_id = user_id - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, user_id)}, - manufacturer="Spotify AB", - model=f"Spotify {data.current_user['product']}", - name=f"Spotify {name}", - entry_type=DeviceEntryType.SERVICE, - configuration_url="https://open.spotify.com", - ) - self._currently_playing: dict | None = {} - self._playlist: dict | None = None - self._restricted_device: bool = False + @property + def currently_playing(self) -> PlaybackState | None: + """Return the current playback.""" + return self.coordinator.data.current_playback @property def supported_features(self) -> MediaPlayerEntityFeature: """Return the supported features.""" - if self.data.current_user["product"] != "premium": + if self.coordinator.current_user.product != ProductType.PREMIUM: return MediaPlayerEntityFeature(0) - if self._restricted_device or not self._currently_playing: + if not self.currently_playing or self.currently_playing.device.is_restricted: return MediaPlayerEntityFeature.SELECT_SOURCE return SUPPORT_SPOTIFY @property def state(self) -> MediaPlayerState: """Return the playback state.""" - if not self._currently_playing: + if not self.currently_playing: return MediaPlayerState.IDLE - if self._currently_playing["is_playing"]: + if self.currently_playing.is_playing: return MediaPlayerState.PLAYING return MediaPlayerState.PAUSED @property def volume_level(self) -> float | None: """Return the device volume.""" - if not self._currently_playing: + if not self.currently_playing: return None - return self._currently_playing.get("device", {}).get("volume_percent", 0) / 100 + return self.currently_playing.device.volume_percent / 100 @property - def media_content_id(self) -> str | None: + @ensure_item + def media_content_id(self, item: Item) -> str: # noqa: PLR0206 """Return the media URL.""" - if not self._currently_playing: - return None - item = self._currently_playing.get("item") or {} - return item.get("uri") + return item.uri @property - def media_content_type(self) -> str | None: + @ensure_item + def media_content_type(self, item: Item) -> str: # noqa: PLR0206 """Return the media type.""" - if not self._currently_playing: - return None - item = self._currently_playing.get("item") or {} - is_episode = item.get("type") == MediaType.EPISODE - return MediaType.PODCAST if is_episode else MediaType.MUSIC + return MediaType.PODCAST if item.type == ItemType.EPISODE else MediaType.MUSIC @property - def media_duration(self) -> int | None: + @ensure_item + def media_duration(self, item: Item) -> int: # noqa: PLR0206 """Duration of current playing media in seconds.""" - if ( - self._currently_playing is None - or self._currently_playing.get("item") is None - ): - return None - return self._currently_playing["item"]["duration_ms"] / 1000 + return round(item.duration_ms / 1000) @property def media_position(self) -> int | None: """Position of current playing media in seconds.""" - if ( - not self._currently_playing - or self._currently_playing.get("progress_ms") is None - ): + if not self.currently_playing or self.currently_playing.progress_ms is None: return None - return self._currently_playing["progress_ms"] / 1000 + return round(self.currently_playing.progress_ms / 1000) @property - def media_image_url(self) -> str | None: + def media_position_updated_at(self) -> dt.datetime | None: + """When was the position of the current playing media valid.""" + if not self.currently_playing: + return None + return self.coordinator.data.position_updated_at + + @property + @ensure_item + def media_image_url(self, item: Item) -> str | None: # noqa: PLR0206 """Return the media image URL.""" - if not self._currently_playing or self._currently_playing.get("item") is None: + if item.type == ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(item, Episode) + if item.images: + return item.images[0].url + if item.show and item.show.images: + return item.show.images[0].url return None - - item = self._currently_playing["item"] - if item["type"] == MediaType.EPISODE: - if item["images"]: - return fetch_image_url(item) - if item["show"]["images"]: - return fetch_image_url(item["show"]) + if TYPE_CHECKING: + assert isinstance(item, Track) + if not item.album.images: return None - - if not item["album"]["images"]: - return None - return fetch_image_url(item["album"]) + return item.album.images[0].url @property - def media_title(self) -> str | None: + @ensure_item + def media_title(self, item: Item) -> str: # noqa: PLR0206 """Return the media title.""" - if not self._currently_playing: - return None - item = self._currently_playing.get("item") or {} - return item.get("name") + return item.name @property - def media_artist(self) -> str | None: + @ensure_item + def media_artist(self, item: Item) -> str: # noqa: PLR0206 """Return the media artist.""" - if not self._currently_playing or self._currently_playing.get("item") is None: - return None + if item.type == ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(item, Episode) + return item.show.publisher - item = self._currently_playing["item"] - if item["type"] == MediaType.EPISODE: - return item["show"]["publisher"] - - return ", ".join(artist["name"] for artist in item["artists"]) + if TYPE_CHECKING: + assert isinstance(item, Track) + return ", ".join(artist.name for artist in item.artists) @property - def media_album_name(self) -> str | None: + @ensure_item + def media_album_name(self, item: Item) -> str: # noqa: PLR0206 """Return the media album.""" - if not self._currently_playing or self._currently_playing.get("item") is None: - return None + if item.type == ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(item, Episode) + return item.show.name - item = self._currently_playing["item"] - if item["type"] == MediaType.EPISODE: - return item["show"]["name"] - - return item["album"]["name"] + if TYPE_CHECKING: + assert isinstance(item, Track) + return item.album.name @property - def media_track(self) -> int | None: + @ensure_item + def media_track(self, item: Item) -> int | None: # noqa: PLR0206 """Track number of current playing media, music track only.""" - if not self._currently_playing: + if item.type == ItemType.EPISODE: return None - item = self._currently_playing.get("item") or {} - return item.get("track_number") + if TYPE_CHECKING: + assert isinstance(item, Track) + return item.track_number @property - def media_playlist(self): + def media_playlist(self) -> str | None: """Title of Playlist currently playing.""" - if self._playlist is None: + if self.coordinator.data.dj_playlist: + return "DJ" + if self.coordinator.data.playlist is None: return None - return self._playlist["name"] + return self.coordinator.data.playlist.name @property def source(self) -> str | None: """Return the current playback device.""" - if not self._currently_playing: + if not self.currently_playing: return None - return self._currently_playing.get("device", {}).get("name") + return self.currently_playing.device.name @property def source_list(self) -> list[str] | None: """Return a list of source devices.""" - return [device["name"] for device in self.data.devices.data] + return [device.name for device in self.devices.data] @property def shuffle(self) -> bool | None: """Shuffling state.""" - if not self._currently_playing: + if not self.currently_playing: return None - return self._currently_playing.get("shuffle_state") + return self.currently_playing.shuffle @property def repeat(self) -> RepeatMode | None: """Return current repeat mode.""" - if ( - not self._currently_playing - or (repeat_state := self._currently_playing.get("repeat_state")) is None - ): + if not self.currently_playing: return None - return REPEAT_MODE_MAPPING_TO_HA.get(repeat_state) + return REPEAT_MODE_MAPPING_TO_HA.get(self.currently_playing.repeat_mode) - @spotify_exception_handler - def set_volume_level(self, volume: float) -> None: + @async_refresh_after + async def async_set_volume_level(self, volume: float) -> None: """Set the volume level.""" - self.data.client.volume(int(volume * 100)) + await self.coordinator.client.set_volume(int(volume * 100)) - @spotify_exception_handler - def media_play(self) -> None: + @async_refresh_after + async def async_media_play(self) -> None: """Start or resume playback.""" - self.data.client.start_playback() + await self.coordinator.client.start_playback() - @spotify_exception_handler - def media_pause(self) -> None: + @async_refresh_after + async def async_media_pause(self) -> None: """Pause playback.""" - self.data.client.pause_playback() + await self.coordinator.client.pause_playback() - @spotify_exception_handler - def media_previous_track(self) -> None: + @async_refresh_after + async def async_media_previous_track(self) -> None: """Skip to previous track.""" - self.data.client.previous_track() + await self.coordinator.client.previous_track() - @spotify_exception_handler - def media_next_track(self) -> None: + @async_refresh_after + async def async_media_next_track(self) -> None: """Skip to next track.""" - self.data.client.next_track() + await self.coordinator.client.next_track() - @spotify_exception_handler - def media_seek(self, position: float) -> None: + @async_refresh_after + async def async_media_seek(self, position: float) -> None: """Send seek command.""" - self.data.client.seek_track(int(position * 1000)) + await self.coordinator.client.seek_track(int(position * 1000)) - @spotify_exception_handler - def play_media( + @async_refresh_after + async def async_play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Play media.""" @@ -353,12 +337,8 @@ class SpotifyMediaPlayer(MediaPlayerEntity): _LOGGER.error("Media type %s is not supported", media_type) return - if ( - self._currently_playing - and not self._currently_playing.get("device") - and self.data.devices.data - ): - kwargs["device_id"] = self.data.devices.data[0].get("id") + if not self.currently_playing and self.devices.data: + kwargs["device_id"] = self.devices.data[0].device_id if enqueue == MediaPlayerEnqueue.ADD: if media_type not in { @@ -369,84 +349,34 @@ class SpotifyMediaPlayer(MediaPlayerEntity): raise ValueError( f"Media type {media_type} is not supported when enqueue is ADD" ) - self.data.client.add_to_queue(media_id, kwargs.get("device_id")) + await self.coordinator.client.add_to_queue( + media_id, kwargs.get("device_id") + ) return - self.data.client.start_playback(**kwargs) + await self.coordinator.client.start_playback(**kwargs) - @spotify_exception_handler - def select_source(self, source: str) -> None: + @async_refresh_after + async def async_select_source(self, source: str) -> None: """Select playback device.""" - for device in self.data.devices.data: - if device["name"] == source: - self.data.client.transfer_playback( - device["id"], self.state == MediaPlayerState.PLAYING - ) + for device in self.devices.data: + if device.name == source: + if TYPE_CHECKING: + assert device.device_id is not None + await self.coordinator.client.transfer_playback(device.device_id) return - @spotify_exception_handler - def set_shuffle(self, shuffle: bool) -> None: + @async_refresh_after + async def async_set_shuffle(self, shuffle: bool) -> None: """Enable/Disable shuffle mode.""" - self.data.client.shuffle(shuffle) + await self.coordinator.client.set_shuffle(state=shuffle) - @spotify_exception_handler - def set_repeat(self, repeat: RepeatMode) -> None: + @async_refresh_after + async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set repeat mode.""" if repeat not in REPEAT_MODE_MAPPING_TO_SPOTIFY: raise ValueError(f"Unsupported repeat mode: {repeat}") - self.data.client.repeat(REPEAT_MODE_MAPPING_TO_SPOTIFY[repeat]) - - @spotify_exception_handler - def update(self) -> None: - """Update state and attributes.""" - if not self.enabled: - return - - if not self.data.session.valid_token or self.data.client is None: - run_coroutine_threadsafe( - self.data.session.async_ensure_token_valid(), self.hass.loop - ).result() - self.data.client.set_auth(auth=self.data.session.token["access_token"]) - - current = self.data.client.current_playback( - additional_types=[MediaType.EPISODE] - ) - self._currently_playing = current or {} - # Record the last updated time, because Spotify's timestamp property is unreliable - # and doesn't actually return the fetch time as is mentioned in the API description - self._attr_media_position_updated_at = utcnow() if current is not None else None - - context = self._currently_playing.get("context") or {} - - # For some users in some cases, the uri is formed like - # "spotify:user:{name}:playlist:{id}" and spotipy wants - # the type to be playlist. - uri = context.get("uri") - if uri is not None: - parts = uri.split(":") - if len(parts) == 5 and parts[1] == "user" and parts[3] == "playlist": - uri = ":".join([parts[0], parts[3], parts[4]]) - - if context and (self._playlist is None or self._playlist["uri"] != uri): - self._playlist = None - if context["type"] == MediaType.PLAYLIST: - # The Spotify API does not currently support doing a lookup for the DJ playlist, so just use the minimal mock playlist object - if uri == SPOTIFY_DJ_PLAYLIST["uri"]: - self._playlist = SPOTIFY_DJ_PLAYLIST - else: - # Make sure any playlist lookups don't break the current playback state update - try: - self._playlist = self.data.client.playlist(uri) - except SpotifyException: - _LOGGER.debug( - "Unable to load spotify playlist '%s'. Continuing without playlist data", - uri, - ) - self._playlist = None - - device = self._currently_playing.get("device") - if device is not None: - self._restricted_device = device["is_restricted"] + await self.coordinator.client.set_repeat(REPEAT_MODE_MAPPING_TO_SPOTIFY[repeat]) async def async_browse_media( self, @@ -457,9 +387,7 @@ class SpotifyMediaPlayer(MediaPlayerEntity): return await async_browse_media_internal( self.hass, - self.data.client, - self.data.session, - self.data.current_user, + self.coordinator.client, media_content_type, media_content_id, ) @@ -475,5 +403,5 @@ class SpotifyMediaPlayer(MediaPlayerEntity): """When entity is added to hass.""" await super().async_added_to_hass() self.async_on_remove( - self.data.devices.async_add_listener(self._handle_devices_update) + self.devices.async_add_listener(self._handle_devices_update) ) diff --git a/homeassistant/components/spotify/models.py b/homeassistant/components/spotify/models.py index bbec134d89d..ca323267f79 100644 --- a/homeassistant/components/spotify/models.py +++ b/homeassistant/components/spotify/models.py @@ -1,19 +1,19 @@ """Models for use in Spotify integration.""" from dataclasses import dataclass -from typing import Any -from spotipy import Spotify +from spotifyaio import Device from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from .coordinator import SpotifyCoordinator + @dataclass -class HomeAssistantSpotifyData: - """Spotify data stored in the Home Assistant data object.""" +class SpotifyData: + """Class to hold Spotify data.""" - client: Spotify - current_user: dict[str, Any] - devices: DataUpdateCoordinator[list[dict[str, Any]]] + coordinator: SpotifyCoordinator session: OAuth2Session + devices: DataUpdateCoordinator[list[Device]] diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index e58d2098bde..90e573a1706 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -14,11 +14,13 @@ "missing_configuration": "The Spotify integration is not configured. Please follow the documentation.", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "reauth_account_mismatch": "The Spotify account authenticated with, does not match the account needed re-authentication.", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "connection_error": "Could not fetch account information. Is the user registered in the Spotify Developer Dashboard?" + "connection_error": "Could not fetch account information. Is the user registered in the Spotify Developer Dashboard?", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "Successfully authenticated with Spotify." diff --git a/homeassistant/components/spotify/system_health.py b/homeassistant/components/spotify/system_health.py index 963c3bfb0ef..5ed6defe090 100644 --- a/homeassistant/components/spotify/system_health.py +++ b/homeassistant/components/spotify/system_health.py @@ -1,5 +1,7 @@ """Provide info to system health.""" +from typing import Any + from homeassistant.components import system_health from homeassistant.core import HomeAssistant, callback @@ -12,7 +14,7 @@ def async_register( register.async_register_info(system_health_info) -async def system_health_info(hass): +async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: """Get info for the info page.""" return { "api_endpoint_reachable": system_health.async_check_can_reach_url( diff --git a/homeassistant/components/spotify/util.py b/homeassistant/components/spotify/util.py index 98bce980e5b..d882e9c58b8 100644 --- a/homeassistant/components/spotify/util.py +++ b/homeassistant/components/spotify/util.py @@ -2,8 +2,7 @@ from __future__ import annotations -from typing import Any - +from spotifyaio import Image import yarl from .const import MEDIA_PLAYER_PREFIX @@ -19,12 +18,11 @@ def resolve_spotify_media_type(media_content_type: str) -> str: return media_content_type.removeprefix(MEDIA_PLAYER_PREFIX) -def fetch_image_url(item: dict[str, Any], key="images") -> str | None: +def fetch_image_url(images: list[Image]) -> str | None: """Fetch image url.""" - source = item.get(key, []) - if isinstance(source, list) and source: - return source[0].get("url") - return None + if not images: + return None + return images[0].url def spotify_uri_from_media_browser_url(media_content_id: str) -> str: diff --git a/homeassistant/components/sql/config_flow.py b/homeassistant/components/sql/config_flow.py index 5537c7ff3b0..4fe04f2401c 100644 --- a/homeassistant/components/sql/config_flow.py +++ b/homeassistant/components/sql/config_flow.py @@ -23,7 +23,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_DEVICE_CLASS, @@ -144,7 +144,7 @@ class SQLConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SQLOptionsFlowHandler: """Get the options flow for this handler.""" - return SQLOptionsFlowHandler(config_entry) + return SQLOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -209,7 +209,7 @@ class SQLConfigFlow(ConfigFlow, domain=DOMAIN): ) -class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): +class SQLOptionsFlowHandler(OptionsFlow): """Handle SQL options.""" async def async_step_init( @@ -223,7 +223,7 @@ class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): db_url = user_input.get(CONF_DB_URL) query = user_input[CONF_QUERY] column = user_input[CONF_COLUMN_NAME] - name = self.options.get(CONF_NAME, self.config_entry.title) + name = self.config_entry.options.get(CONF_NAME, self.config_entry.title) try: query = validate_sql_select(query) @@ -275,7 +275,7 @@ class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): return self.async_show_form( step_id="init", data_schema=self.add_suggested_values_to_schema( - OPTIONS_SCHEMA, user_input or self.options + OPTIONS_SCHEMA, user_input or self.config_entry.options ), errors=errors, description_placeholders=description_placeholders, diff --git a/homeassistant/components/sql/manifest.json b/homeassistant/components/sql/manifest.json index dcb5f47829c..01c95d6c5e4 100644 --- a/homeassistant/components/sql/manifest.json +++ b/homeassistant/components/sql/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sql", "iot_class": "local_polling", - "requirements": ["SQLAlchemy==2.0.31", "sqlparse==0.5.0"] + "requirements": ["SQLAlchemy==2.0.36", "sqlparse==0.5.0"] } diff --git a/homeassistant/components/squeezebox/__init__.py b/homeassistant/components/squeezebox/__init__.py index b6c7f049311..f466f3bcb62 100644 --- a/homeassistant/components/squeezebox/__init__.py +++ b/homeassistant/components/squeezebox/__init__.py @@ -1,9 +1,11 @@ """The Squeezebox integration.""" from asyncio import timeout +from dataclasses import dataclass +from datetime import datetime import logging -from pysqueezebox import Server +from pysqueezebox import Player, Server from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -15,23 +17,56 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceEntryType, + format_mac, +) +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_call_later from .const import ( CONF_HTTPS, + DISCOVERY_INTERVAL, DISCOVERY_TASK, DOMAIN, + KNOWN_PLAYERS, + KNOWN_SERVERS, + MANUFACTURER, + SERVER_MODEL, + SIGNAL_PLAYER_DISCOVERED, + SIGNAL_PLAYER_REDISCOVERED, STATUS_API_TIMEOUT, STATUS_QUERY_LIBRARYNAME, + STATUS_QUERY_MAC, STATUS_QUERY_UUID, + STATUS_QUERY_VERSION, +) +from .coordinator import ( + LMSStatusDataUpdateCoordinator, + SqueezeBoxPlayerUpdateCoordinator, ) _LOGGER = logging.getLogger(__name__) -PLATFORMS = [Platform.MEDIA_PLAYER] +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.MEDIA_PLAYER, + Platform.SENSOR, +] -type SqueezeboxConfigEntry = ConfigEntry[Server] +@dataclass +class SqueezeboxData: + """SqueezeboxData data class.""" + + coordinator: LMSStatusDataUpdateCoordinator + server: Server + + +type SqueezeboxConfigEntry = ConfigEntry[SqueezeboxData] async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -> bool: @@ -66,25 +101,91 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) - _LOGGER.debug("LMS Status for setup = %s", status) lms.uuid = status[STATUS_QUERY_UUID] + _LOGGER.debug("LMS %s = '%s' with uuid = %s ", lms.name, host, lms.uuid) lms.name = ( (STATUS_QUERY_LIBRARYNAME in status and status[STATUS_QUERY_LIBRARYNAME]) and status[STATUS_QUERY_LIBRARYNAME] or host ) - _LOGGER.debug("LMS %s = '%s' with uuid = %s ", lms.name, host, lms.uuid) + version = STATUS_QUERY_VERSION in status and status[STATUS_QUERY_VERSION] or None + # mac can be missing + mac_connect = ( + {(CONNECTION_NETWORK_MAC, format_mac(status[STATUS_QUERY_MAC]))} + if STATUS_QUERY_MAC in status + else None + ) - entry.runtime_data = lms + device_registry = dr.async_get(hass) + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, lms.uuid)}, + name=lms.name, + manufacturer=MANUFACTURER, + model=SERVER_MODEL, + sw_version=version, + entry_type=DeviceEntryType.SERVICE, + connections=mac_connect, + ) + _LOGGER.debug("LMS Device %s", device) + server_coordinator = LMSStatusDataUpdateCoordinator(hass, lms) + + entry.runtime_data = SqueezeboxData( + coordinator=server_coordinator, + server=lms, + ) + + # set up player discovery + known_servers = hass.data.setdefault(DOMAIN, {}).setdefault(KNOWN_SERVERS, {}) + known_players = known_servers.setdefault(lms.uuid, {}).setdefault(KNOWN_PLAYERS, []) + + async def _player_discovery(now: datetime | None = None) -> None: + """Discover squeezebox players by polling server.""" + + async def _discovered_player(player: Player) -> None: + """Handle a (re)discovered player.""" + if player.player_id in known_players: + await player.async_update() + async_dispatcher_send( + hass, SIGNAL_PLAYER_REDISCOVERED, player.player_id, player.connected + ) + else: + _LOGGER.debug("Adding new entity: %s", player) + player_coordinator = SqueezeBoxPlayerUpdateCoordinator( + hass, player, lms.uuid + ) + known_players.append(player.player_id) + async_dispatcher_send( + hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator + ) + + if players := await lms.async_get_players(): + for player in players: + hass.async_create_task(_discovered_player(player)) + + entry.async_on_unload( + async_call_later(hass, DISCOVERY_INTERVAL, _player_discovery) + ) + + await server_coordinator.async_config_entry_first_refresh() await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + _LOGGER.debug( + "Adding player discovery job for LMS server: %s", entry.data[CONF_HOST] + ) + entry.async_create_background_task( + hass, _player_discovery(), "squeezebox.media_player.player_discovery" + ) + return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -> bool: """Unload a config entry.""" # Stop player discovery task for this config entry. _LOGGER.debug( "Reached async_unload_entry for LMS=%s(%s)", - entry.runtime_data.name or "Unknown", + entry.runtime_data.server.name or "Unknown", entry.entry_id, ) diff --git a/homeassistant/components/squeezebox/binary_sensor.py b/homeassistant/components/squeezebox/binary_sensor.py new file mode 100644 index 00000000000..ec0bac0fe43 --- /dev/null +++ b/homeassistant/components/squeezebox/binary_sensor.py @@ -0,0 +1,54 @@ +"""Binary sensor platform for Squeezebox integration.""" + +from __future__ import annotations + +import logging + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SqueezeboxConfigEntry +from .const import STATUS_SENSOR_NEEDSRESTART, STATUS_SENSOR_RESCAN +from .entity import LMSStatusEntity + +SENSORS: tuple[BinarySensorEntityDescription, ...] = ( + BinarySensorEntityDescription( + key=STATUS_SENSOR_RESCAN, + device_class=BinarySensorDeviceClass.RUNNING, + ), + BinarySensorEntityDescription( + key=STATUS_SENSOR_NEEDSRESTART, + device_class=BinarySensorDeviceClass.UPDATE, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SqueezeboxConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Platform setup using common elements.""" + + async_add_entities( + ServerStatusBinarySensor(entry.runtime_data.coordinator, description) + for description in SENSORS + ) + + +class ServerStatusBinarySensor(LMSStatusEntity, BinarySensorEntity): + """LMS Status based sensor from LMS via cooridnatior.""" + + @property + def is_on(self) -> bool: + """LMS Status directly from coordinator data.""" + return bool(self.coordinator.data[self.entity_description.key]) diff --git a/homeassistant/components/squeezebox/browse_media.py b/homeassistant/components/squeezebox/browse_media.py index bc63bcb7f2f..4d1c98bc4fc 100644 --- a/homeassistant/components/squeezebox/browse_media.py +++ b/homeassistant/components/squeezebox/browse_media.py @@ -1,24 +1,41 @@ """Support for media browsing.""" +from __future__ import annotations + import contextlib +from typing import Any + +from pysqueezebox import Player from homeassistant.components import media_source from homeassistant.components.media_player import ( BrowseError, BrowseMedia, MediaClass, + MediaPlayerEntity, MediaType, ) +from homeassistant.core import HomeAssistant from homeassistant.helpers.network import is_internal_request -LIBRARY = ["Artists", "Albums", "Tracks", "Playlists", "Genres"] +LIBRARY = [ + "Favorites", + "Artists", + "Albums", + "Tracks", + "Playlists", + "Genres", + "New Music", +] MEDIA_TYPE_TO_SQUEEZEBOX = { + "Favorites": "favorites", "Artists": "artists", "Albums": "albums", "Tracks": "titles", "Playlists": "playlists", "Genres": "genres", + "New Music": "new music", MediaType.ALBUM: "album", MediaType.ARTIST: "artist", MediaType.TRACK: "title", @@ -32,14 +49,17 @@ SQUEEZEBOX_ID_BY_TYPE = { MediaType.TRACK: "track_id", MediaType.PLAYLIST: "playlist_id", MediaType.GENRE: "genre_id", + "Favorites": "item_id", } -CONTENT_TYPE_MEDIA_CLASS = { +CONTENT_TYPE_MEDIA_CLASS: dict[str | MediaType, dict[str, MediaClass | None]] = { + "Favorites": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, "Artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST}, "Albums": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM}, "Tracks": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, "Playlists": {"item": MediaClass.DIRECTORY, "children": MediaClass.PLAYLIST}, "Genres": {"item": MediaClass.DIRECTORY, "children": MediaClass.GENRE}, + "New Music": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM}, MediaType.ALBUM: {"item": MediaClass.ALBUM, "children": MediaClass.TRACK}, MediaType.ARTIST: {"item": MediaClass.ARTIST, "children": MediaClass.ALBUM}, MediaType.TRACK: {"item": MediaClass.TRACK, "children": None}, @@ -57,20 +77,29 @@ CONTENT_TYPE_TO_CHILD_TYPE = { "Tracks": MediaType.TRACK, "Playlists": MediaType.PLAYLIST, "Genres": MediaType.GENRE, + "Favorites": None, # can only be determined after inspecting the item + "New Music": MediaType.ALBUM, } BROWSE_LIMIT = 1000 -async def build_item_response(entity, player, payload): +async def build_item_response( + entity: MediaPlayerEntity, player: Player, payload: dict[str, str | None] +) -> BrowseMedia: """Create response payload for search described by payload.""" + internal_request = is_internal_request(entity.hass) search_id = payload["search_id"] search_type = payload["search_type"] - + assert ( + search_type is not None + ) # async_browse_media will not call this function if search_type is None media_class = CONTENT_TYPE_MEDIA_CLASS[search_type] + children = None + if search_id and search_id != search_type: browse_id = (SQUEEZEBOX_ID_BY_TYPE[search_type], search_id) else: @@ -82,35 +111,58 @@ async def build_item_response(entity, player, payload): browse_id=browse_id, ) - children = None - if result is not None and result.get("items"): item_type = CONTENT_TYPE_TO_CHILD_TYPE[search_type] - child_media_class = CONTENT_TYPE_MEDIA_CLASS[item_type] children = [] for item in result["items"]: item_id = str(item["id"]) - item_thumbnail = None + item_thumbnail: str | None = None + if item_type: + child_item_type: MediaType | str = item_type + child_media_class = CONTENT_TYPE_MEDIA_CLASS[item_type] + can_expand = child_media_class["children"] is not None + can_play = True + + if search_type == "Favorites": + if "album_id" in item: + item_id = str(item["album_id"]) + child_item_type = MediaType.ALBUM + child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.ALBUM] + can_expand = True + can_play = True + elif item["hasitems"]: + child_item_type = "Favorites" + child_media_class = CONTENT_TYPE_MEDIA_CLASS["Favorites"] + can_expand = True + can_play = False + else: + child_item_type = "Favorites" + child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.TRACK] + can_expand = False + can_play = True if artwork_track_id := item.get("artwork_track_id"): if internal_request: item_thumbnail = player.generate_image_url_from_track_id( artwork_track_id ) - else: + elif item_type is not None: item_thumbnail = entity.get_browse_image_url( item_type, item_id, artwork_track_id ) + else: + item_thumbnail = item.get("image_url") # will not be proxied by HA + assert child_media_class["item"] is not None children.append( BrowseMedia( title=item["title"], media_class=child_media_class["item"], media_content_id=item_id, - media_content_type=item_type, - can_play=True, - can_expand=child_media_class["children"] is not None, + media_content_type=child_item_type, + can_play=can_play, + can_expand=can_expand, thumbnail=item_thumbnail, ) ) @@ -118,21 +170,24 @@ async def build_item_response(entity, player, payload): if children is None: raise BrowseError(f"Media not found: {search_type} / {search_id}") + assert media_class["item"] is not None + if not search_id: + search_id = search_type return BrowseMedia( title=result.get("title"), media_class=media_class["item"], children_media_class=media_class["children"], media_content_id=search_id, media_content_type=search_type, - can_play=True, + can_play=search_type != "Favorites", children=children, can_expand=True, ) -async def library_payload(hass, player): +async def library_payload(hass: HomeAssistant, player: Player) -> BrowseMedia: """Create response payload to describe contents of library.""" - library_info = { + library_info: dict[str, Any] = { "title": "Music Library", "media_class": MediaClass.DIRECTORY, "media_content_id": "library", @@ -144,31 +199,33 @@ async def library_payload(hass, player): for item in LIBRARY: media_class = CONTENT_TYPE_MEDIA_CLASS[item] + result = await player.async_browse( MEDIA_TYPE_TO_SQUEEZEBOX[item], limit=1, ) if result is not None and result.get("items") is not None: + assert media_class["children"] is not None library_info["children"].append( BrowseMedia( title=item, media_class=media_class["children"], media_content_id=item, media_content_type=item, - can_play=True, + can_play=item != "Favorites", can_expand=True, ) ) with contextlib.suppress(media_source.BrowseError): - item = await media_source.async_browse_media( + browse = await media_source.async_browse_media( hass, None, content_filter=media_source_content_filter ) # If domain is None, it's overview of available sources - if item.domain is None: - library_info["children"].extend(item.children) + if browse.domain is None: + library_info["children"].extend(browse.children) else: - library_info["children"].append(item) + library_info["children"].append(browse) return BrowseMedia(**library_info) @@ -178,16 +235,19 @@ def media_source_content_filter(item: BrowseMedia) -> bool: return item.media_content_type.startswith("audio/") -async def generate_playlist(player, payload): +async def generate_playlist(player: Player, payload: dict[str, str]) -> list | None: """Generate playlist from browsing payload.""" media_type = payload["search_type"] media_id = payload["search_id"] if media_type not in SQUEEZEBOX_ID_BY_TYPE: - return None + raise BrowseError(f"Media type not supported: {media_type}") browse_id = (SQUEEZEBOX_ID_BY_TYPE[media_type], media_id) result = await player.async_browse( "titles", limit=BROWSE_LIMIT, browse_id=browse_id ) - return result.get("items") + if result and "items" in result: + items: list = result["items"] + return items + raise BrowseError(f"Media not found: {media_type} / {media_id}") diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index fe57b12516a..c372c7262d4 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -1,5 +1,7 @@ """Config flow for Squeezebox integration.""" +from __future__ import annotations + import asyncio from http import HTTPStatus import logging @@ -24,9 +26,11 @@ _LOGGER = logging.getLogger(__name__) TIMEOUT = 5 -def _base_schema(discovery_info=None): +def _base_schema( + discovery_info: dict[str, Any] | None = None, +) -> vol.Schema: """Generate base schema.""" - base_schema = {} + base_schema: dict[Any, Any] = {} if discovery_info and CONF_HOST in discovery_info: base_schema.update( { @@ -71,14 +75,14 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize an instance of the squeezebox config flow.""" self.data_schema = _base_schema() - self.discovery_info = None + self.discovery_info: dict[str, Any] | None = None - async def _discover(self, uuid=None): + async def _discover(self, uuid: str | None = None) -> None: """Discover an unconfigured LMS server.""" self.discovery_info = None discovery_event = asyncio.Event() - def _discovery_callback(server): + def _discovery_callback(server: Server) -> None: if server.uuid: # ignore already configured uuids for entry in self._async_current_entries(): @@ -156,7 +160,9 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_edit(self, user_input=None): + async def async_step_edit( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Edit a discovered or manually inputted server.""" errors = {} if user_input: @@ -171,7 +177,9 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): step_id="edit", data_schema=self.data_schema, errors=errors ) - async def async_step_integration_discovery(self, discovery_info): + async def async_step_integration_discovery( + self, discovery_info: dict[str, Any] + ) -> ConfigFlowResult: """Handle discovery of a server.""" _LOGGER.debug("Reached server discovery flow with info: %s", discovery_info) if "uuid" in discovery_info: diff --git a/homeassistant/components/squeezebox/const.py b/homeassistant/components/squeezebox/const.py index a814cf6ecc4..8bc33214170 100644 --- a/homeassistant/components/squeezebox/const.py +++ b/homeassistant/components/squeezebox/const.py @@ -5,8 +5,30 @@ DISCOVERY_TASK = "discovery_task" DOMAIN = "squeezebox" DEFAULT_PORT = 9000 KNOWN_PLAYERS = "known_players" +KNOWN_SERVERS = "known_servers" +MANUFACTURER = "https://lyrion.org/" +PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub" SENSOR_UPDATE_INTERVAL = 60 +SERVER_MODEL = "Lyrion Music Server" STATUS_API_TIMEOUT = 10 +STATUS_SENSOR_LASTSCAN = "lastscan" +STATUS_SENSOR_NEEDSRESTART = "needsrestart" +STATUS_SENSOR_NEWVERSION = "newversion" +STATUS_SENSOR_NEWPLUGINS = "newplugins" +STATUS_SENSOR_RESCAN = "rescan" +STATUS_SENSOR_INFO_TOTAL_ALBUMS = "info total albums" +STATUS_SENSOR_INFO_TOTAL_ARTISTS = "info total artists" +STATUS_SENSOR_INFO_TOTAL_DURATION = "info total duration" +STATUS_SENSOR_INFO_TOTAL_GENRES = "info total genres" +STATUS_SENSOR_INFO_TOTAL_SONGS = "info total songs" +STATUS_SENSOR_PLAYER_COUNT = "player count" +STATUS_SENSOR_OTHER_PLAYER_COUNT = "other player count" STATUS_QUERY_LIBRARYNAME = "libraryname" +STATUS_QUERY_MAC = "mac" STATUS_QUERY_UUID = "uuid" +STATUS_QUERY_VERSION = "version" SQUEEZEBOX_SOURCE_STRINGS = ("source:", "wavin:", "spotify:") +SIGNAL_PLAYER_DISCOVERED = "squeezebox_player_discovered" +SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" +DISCOVERY_INTERVAL = 60 +PLAYER_UPDATE_INTERVAL = 5 diff --git a/homeassistant/components/squeezebox/coordinator.py b/homeassistant/components/squeezebox/coordinator.py new file mode 100644 index 00000000000..f3aacbc9833 --- /dev/null +++ b/homeassistant/components/squeezebox/coordinator.py @@ -0,0 +1,120 @@ +"""DataUpdateCoordinator for the Squeezebox integration.""" + +from asyncio import timeout +from collections.abc import Callable +from datetime import timedelta +import logging +import re +from typing import Any + +from pysqueezebox import Player, Server + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import dt as dt_util + +from .const import ( + PLAYER_UPDATE_INTERVAL, + SENSOR_UPDATE_INTERVAL, + SIGNAL_PLAYER_REDISCOVERED, + STATUS_API_TIMEOUT, + STATUS_SENSOR_LASTSCAN, + STATUS_SENSOR_NEEDSRESTART, + STATUS_SENSOR_RESCAN, +) + +_LOGGER = logging.getLogger(__name__) + + +class LMSStatusDataUpdateCoordinator(DataUpdateCoordinator): + """LMS Status custom coordinator.""" + + def __init__(self, hass: HomeAssistant, lms: Server) -> None: + """Initialize my coordinator.""" + super().__init__( + hass, + _LOGGER, + name=lms.name, + update_interval=timedelta(seconds=SENSOR_UPDATE_INTERVAL), + always_update=False, + ) + self.lms = lms + self.newversion_regex = re.compile("<.*$") + + async def _async_update_data(self) -> dict: + """Fetch data from LMS status call. + + Then we process only a subset to make then nice for HA + """ + async with timeout(STATUS_API_TIMEOUT): + data = await self.lms.async_status() + + if not data: + raise UpdateFailed("No data from status poll") + _LOGGER.debug("Raw serverstatus %s=%s", self.lms.name, data) + + return self._prepare_status_data(data) + + def _prepare_status_data(self, data: dict) -> dict: + """Sensors that need the data changing for HA presentation.""" + + # Binary sensors + # rescan bool are we rescanning alter poll not present if false + data[STATUS_SENSOR_RESCAN] = STATUS_SENSOR_RESCAN in data + # needsrestart bool pending lms plugin updates not present if false + data[STATUS_SENSOR_NEEDSRESTART] = STATUS_SENSOR_NEEDSRESTART in data + + # Sensors that need special handling + # 'lastscan': '1718431678', epoc -> ISO 8601 not always present + data[STATUS_SENSOR_LASTSCAN] = ( + dt_util.utc_from_timestamp(int(data[STATUS_SENSOR_LASTSCAN])) + if STATUS_SENSOR_LASTSCAN in data + else None + ) + + _LOGGER.debug("Processed serverstatus %s=%s", self.lms.name, data) + return data + + +class SqueezeBoxPlayerUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Coordinator for Squeezebox players.""" + + def __init__(self, hass: HomeAssistant, player: Player, server_uuid: str) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + _LOGGER, + name=player.name, + update_interval=timedelta(seconds=PLAYER_UPDATE_INTERVAL), + always_update=True, + ) + self.player = player + self.available = True + self._remove_dispatcher: Callable | None = None + self.server_uuid = server_uuid + + async def _async_update_data(self) -> dict[str, Any]: + """Update Player if available, or listen for rediscovery if not.""" + if self.available: + # Only update players available at last update, unavailable players are rediscovered instead + await self.player.async_update() + + if self.player.connected is False: + _LOGGER.debug("Player %s is not available", self.name) + self.available = False + + # start listening for restored players + self._remove_dispatcher = async_dispatcher_connect( + self.hass, SIGNAL_PLAYER_REDISCOVERED, self.rediscovered + ) + return {} + + @callback + def rediscovered(self, unique_id: str, connected: bool) -> None: + """Make a player available again.""" + if unique_id == self.player.player_id and connected: + self.available = True + _LOGGER.debug("Player %s is available again", self.name) + if self._remove_dispatcher: + self._remove_dispatcher() diff --git a/homeassistant/components/squeezebox/entity.py b/homeassistant/components/squeezebox/entity.py new file mode 100644 index 00000000000..027ca68edc6 --- /dev/null +++ b/homeassistant/components/squeezebox/entity.py @@ -0,0 +1,31 @@ +"""Base class for Squeezebox Sensor entities.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, STATUS_QUERY_UUID +from .coordinator import LMSStatusDataUpdateCoordinator + + +class LMSStatusEntity(CoordinatorEntity[LMSStatusDataUpdateCoordinator]): + """Defines a base status sensor entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: LMSStatusDataUpdateCoordinator, + description: EntityDescription, + ) -> None: + """Initialize status sensor entity.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_translation_key = description.key.replace(" ", "_") + self._attr_unique_id = ( + f"{coordinator.data[STATUS_QUERY_UUID]}_{description.key}" + ) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.data[STATUS_QUERY_UUID])}, + ) diff --git a/homeassistant/components/squeezebox/icons.json b/homeassistant/components/squeezebox/icons.json index d58f0d5634d..29911ddad77 100644 --- a/homeassistant/components/squeezebox/icons.json +++ b/homeassistant/components/squeezebox/icons.json @@ -1,8 +1,32 @@ { + "entity": { + "sensor": { + "info_total_albums": { + "default": "mdi:album" + }, + "info_total_artists": { + "default": "mdi:account-music" + }, + "info_total_genres": { + "default": "mdi:drama-masks" + }, + "info_total_songs": { + "default": "mdi:file-music" + }, + "player_count": { + "default": "mdi:folder-play" + }, + "other_player_count": { + "default": "mdi:folder-play-outline" + } + } + }, "services": { - "call_method": "mdi:console", - "call_query": "mdi:database", - "sync": "mdi:sync", - "unsync": "mdi:sync-off" + "call_method": { + "service": "mdi:console" + }, + "call_query": { + "service": "mdi:database" + } } } diff --git a/homeassistant/components/squeezebox/manifest.json b/homeassistant/components/squeezebox/manifest.json index 40bc8f36d22..aa595340d56 100644 --- a/homeassistant/components/squeezebox/manifest.json +++ b/homeassistant/components/squeezebox/manifest.json @@ -1,7 +1,7 @@ { "domain": "squeezebox", "name": "Squeezebox (Lyrion Music Server)", - "codeowners": ["@rajlaud"], + "codeowners": ["@rajlaud", "@pssc", "@peteS-UK"], "config_flow": true, "dhcp": [ { @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/squeezebox", "iot_class": "local_polling", "loggers": ["pysqueezebox"], - "requirements": ["pysqueezebox==0.7.1"] + "requirements": ["pysqueezebox==0.10.0"] } diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 552b8ed800c..19cd1e36910 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -6,14 +6,16 @@ from collections.abc import Callable from datetime import datetime import json import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from pysqueezebox import Player, async_discover +from pysqueezebox import Server, async_discover import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, + BrowseError, + BrowseMedia, MediaPlayerEnqueue, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -23,58 +25,58 @@ from homeassistant.components.media_player import ( async_process_play_media_url, ) from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY -from homeassistant.const import ATTR_COMMAND, CONF_HOST, CONF_PORT +from homeassistant.const import ATTR_COMMAND, CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import ( config_validation as cv, discovery_flow, entity_platform, + entity_registry as er, ) from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, DeviceInfo, format_mac, ) -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later from homeassistant.helpers.start import async_at_start +from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow -from . import SqueezeboxConfigEntry from .browse_media import ( build_item_response, generate_playlist, library_payload, media_source_content_filter, ) -from .const import DISCOVERY_TASK, DOMAIN, KNOWN_PLAYERS, SQUEEZEBOX_SOURCE_STRINGS +from .const import ( + DISCOVERY_TASK, + DOMAIN, + KNOWN_PLAYERS, + KNOWN_SERVERS, + SIGNAL_PLAYER_DISCOVERED, + SQUEEZEBOX_SOURCE_STRINGS, +) +from .coordinator import SqueezeBoxPlayerUpdateCoordinator + +if TYPE_CHECKING: + from . import SqueezeboxConfigEntry SERVICE_CALL_METHOD = "call_method" SERVICE_CALL_QUERY = "call_query" -SERVICE_SYNC = "sync" -SERVICE_UNSYNC = "unsync" ATTR_QUERY_RESULT = "query_result" -ATTR_SYNC_GROUP = "sync_group" - -SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" _LOGGER = logging.getLogger(__name__) -DISCOVERY_INTERVAL = 60 - -KNOWN_SERVERS = "known_servers" ATTR_PARAMETERS = "parameters" ATTR_OTHER_PLAYER = "other_player" ATTR_TO_PROPERTY = [ ATTR_QUERY_RESULT, - ATTR_SYNC_GROUP, ] SQUEEZEBOX_MODE = { @@ -87,7 +89,7 @@ SQUEEZEBOX_MODE = { async def start_server_discovery(hass: HomeAssistant) -> None: """Start a server discovery task.""" - def _discovered_server(server): + def _discovered_server(server: Server) -> None: discovery_flow.async_create_flow( hass, DOMAIN, @@ -113,49 +115,15 @@ async def async_setup_entry( entry: SqueezeboxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Set up an player discovery from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - known_players = hass.data[DOMAIN].setdefault(KNOWN_PLAYERS, []) - lms = entry.runtime_data + """Set up the Squeezebox media_player platform from a server config entry.""" - async def _player_discovery(now=None): - """Discover squeezebox players by polling server.""" + # Add media player entities when discovered + async def _player_discovered(player: SqueezeBoxPlayerUpdateCoordinator) -> None: + _LOGGER.debug("Setting up media_player entity for player %s", player) + async_add_entities([SqueezeBoxMediaPlayerEntity(player)]) - async def _discovered_player(player): - """Handle a (re)discovered player.""" - entity = next( - ( - known - for known in known_players - if known.unique_id == player.player_id - ), - None, - ) - if entity: - await player.async_update() - async_dispatcher_send( - hass, SIGNAL_PLAYER_REDISCOVERED, player.player_id, player.connected - ) - - if not entity: - _LOGGER.debug("Adding new entity: %s", player) - entity = SqueezeBoxEntity(player) - known_players.append(entity) - async_add_entities([entity]) - - if players := await lms.async_get_players(): - for player in players: - hass.async_create_task(_discovered_player(player)) - - entry.async_on_unload( - async_call_later(hass, DISCOVERY_INTERVAL, _player_discovery) - ) - - _LOGGER.debug( - "Adding player discovery job for LMS server: %s", entry.data[CONF_HOST] - ) - entry.async_create_background_task( - hass, _player_discovery(), "squeezebox.media_player.player_discovery" + entry.async_on_unload( + async_dispatcher_connect(hass, SIGNAL_PLAYER_DISCOVERED, _player_discovered) ) # Register entity services @@ -180,19 +148,15 @@ async def async_setup_entry( }, "async_call_query", ) - platform.async_register_entity_service( - SERVICE_SYNC, - {vol.Required(ATTR_OTHER_PLAYER): cv.string}, - "async_sync", - ) - platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running entry.async_on_unload(async_at_start(hass, start_server_discovery)) -class SqueezeBoxEntity(MediaPlayerEntity): - """Representation of a SqueezeBox device. +class SqueezeBoxMediaPlayerEntity( + CoordinatorEntity[SqueezeBoxPlayerUpdateCoordinator], MediaPlayerEntity +): + """Representation of the media player features of a SqueezeBox device. Wraps a pysqueezebox.Player() object. """ @@ -219,22 +183,53 @@ class SqueezeBoxEntity(MediaPlayerEntity): _attr_has_entity_name = True _attr_name = None _last_update: datetime | None = None - _attr_available = True - def __init__(self, player: Player) -> None: + def __init__( + self, + coordinator: SqueezeBoxPlayerUpdateCoordinator, + ) -> None: """Initialize the SqueezeBox device.""" + super().__init__(coordinator) + player = coordinator.player self._player = player self._query_result: bool | dict = {} self._remove_dispatcher: Callable | None = None + self._previous_media_position = 0 self._attr_unique_id = format_mac(player.player_id) + _manufacturer = None + if player.model == "SqueezeLite" or "SqueezePlay" in player.model: + _manufacturer = "Ralph Irving" + elif ( + "Squeezebox" in player.model + or "Transporter" in player.model + or "Slim" in player.model + ): + _manufacturer = "Logitech" + self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, self._attr_unique_id)}, name=player.name, connections={(CONNECTION_NETWORK_MAC, self._attr_unique_id)}, + via_device=(DOMAIN, coordinator.server_uuid), + model=player.model, + manufacturer=_manufacturer, ) + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if self._previous_media_position != self.media_position: + self._previous_media_position = self.media_position + self._last_update = utcnow() + self.async_write_ha_state() + @property - def extra_state_attributes(self): + def available(self) -> bool: + """Return True if entity is available.""" + return self.coordinator.available and super().available + + @property + def extra_state_attributes(self) -> dict[str, Any]: """Return device-specific attributes.""" return { attr: getattr(self, attr) @@ -242,68 +237,49 @@ class SqueezeBoxEntity(MediaPlayerEntity): if getattr(self, attr) is not None } - @callback - def rediscovered(self, unique_id, connected): - """Make a player available again.""" - if unique_id == self.unique_id and connected: - self._attr_available = True - _LOGGER.debug("Player %s is available again", self.name) - self._remove_dispatcher() - @property def state(self) -> MediaPlayerState | None: """Return the state of the device.""" if not self._player.power: return MediaPlayerState.OFF - if self._player.mode: - return SQUEEZEBOX_MODE.get(self._player.mode) + if self._player.mode and self._player.mode in SQUEEZEBOX_MODE: + return SQUEEZEBOX_MODE[self._player.mode] + _LOGGER.error( + "Received unknown mode %s from player %s", self._player.mode, self.name + ) return None - async def async_update(self) -> None: - """Update the Player() object.""" - # only update available players, newly available players will be rediscovered and marked available - if self._attr_available: - last_media_position = self.media_position - await self._player.async_update() - if self.media_position != last_media_position: - self._last_update = utcnow() - if self._player.connected is False: - _LOGGER.debug("Player %s is not available", self.name) - self._attr_available = False - - # start listening for restored players - self._remove_dispatcher = async_dispatcher_connect( - self.hass, SIGNAL_PLAYER_REDISCOVERED, self.rediscovered - ) - async def async_will_remove_from_hass(self) -> None: """Remove from list of known players when removed from hass.""" - self.hass.data[DOMAIN][KNOWN_PLAYERS].remove(self) + known_servers = self.hass.data[DOMAIN][KNOWN_SERVERS] + known_players = known_servers[self.coordinator.server_uuid][KNOWN_PLAYERS] + known_players.remove(self.coordinator.player.player_id) @property def volume_level(self) -> float | None: """Volume level of the media player (0..1).""" if self._player.volume: return int(float(self._player.volume)) / 100.0 + return None @property - def is_volume_muted(self): + def is_volume_muted(self) -> bool: """Return true if volume is muted.""" - return self._player.muting + return bool(self._player.muting) @property - def media_content_id(self): + def media_content_id(self) -> str | None: """Content ID of current playing media.""" if not self._player.playlist: return None if len(self._player.playlist) > 1: urls = [{"url": track["url"]} for track in self._player.playlist] return json.dumps({"index": self._player.current_index, "urls": urls}) - return self._player.url + return str(self._player.url) @property - def media_content_type(self): + def media_content_type(self) -> MediaType | None: """Content type of current playing media.""" if not self._player.playlist: return None @@ -312,47 +288,47 @@ class SqueezeBoxEntity(MediaPlayerEntity): return MediaType.MUSIC @property - def media_duration(self): + def media_duration(self) -> int: """Duration of current playing media in seconds.""" - return self._player.duration + return int(self._player.duration) if self._player.duration else 0 @property - def media_position(self): + def media_position(self) -> int: """Position of current playing media in seconds.""" - return self._player.time + return int(self._player.time) if self._player.time else 0 @property - def media_position_updated_at(self): + def media_position_updated_at(self) -> datetime | None: """Last time status was updated.""" return self._last_update @property - def media_image_url(self): + def media_image_url(self) -> str | None: """Image url of current playing media.""" - return self._player.image_url + return str(self._player.image_url) if self._player.image_url else None @property - def media_title(self): + def media_title(self) -> str | None: """Title of current playing media.""" - return self._player.title + return str(self._player.title) @property - def media_channel(self): + def media_channel(self) -> str | None: """Channel (e.g. webradio name) of current playing media.""" - return self._player.remote_title + return str(self._player.remote_title) @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media.""" - return self._player.artist + return str(self._player.artist) @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album of current playing media.""" - return self._player.album + return str(self._player.album) @property - def repeat(self): + def repeat(self) -> RepeatMode: """Repeat setting.""" if self._player.repeat == "song": return RepeatMode.ONE @@ -361,85 +337,95 @@ class SqueezeBoxEntity(MediaPlayerEntity): return RepeatMode.OFF @property - def shuffle(self): + def shuffle(self) -> bool: """Boolean if shuffle is enabled.""" # Squeezebox has a third shuffle mode (album) not recognized by Home Assistant - return self._player.shuffle == "song" + return bool(self._player.shuffle == "song") @property - def group_members(self): + def group_members(self) -> list[str]: """List players we are synced with.""" - player_ids = { - p.unique_id: p.entity_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] - } + ent_reg = er.async_get(self.hass) return [ - player_ids[player] + entity_id for player in self._player.sync_group - if player in player_ids + if ( + entity_id := ent_reg.async_get_entity_id( + Platform.MEDIA_PLAYER, DOMAIN, player + ) + ) ] @property - def sync_group(self): - """List players we are synced with. Deprecated.""" - return self.group_members - - @property - def query_result(self): + def query_result(self) -> dict | bool: """Return the result from the call_query service.""" return self._query_result async def async_turn_off(self) -> None: """Turn off media player.""" await self._player.async_set_power(False) + await self.coordinator.async_refresh() async def async_volume_up(self) -> None: """Volume up media player.""" await self._player.async_set_volume("+5") + await self.coordinator.async_refresh() async def async_volume_down(self) -> None: """Volume down media player.""" await self._player.async_set_volume("-5") + await self.coordinator.async_refresh() async def async_set_volume_level(self, volume: float) -> None: """Set volume level, range 0..1.""" volume_percent = str(int(volume * 100)) await self._player.async_set_volume(volume_percent) + await self.coordinator.async_refresh() async def async_mute_volume(self, mute: bool) -> None: """Mute (true) or unmute (false) media player.""" await self._player.async_set_muting(mute) + await self.coordinator.async_refresh() async def async_media_stop(self) -> None: """Send stop command to media player.""" await self._player.async_stop() + await self.coordinator.async_refresh() async def async_media_play_pause(self) -> None: """Send pause command to media player.""" await self._player.async_toggle_pause() + await self.coordinator.async_refresh() async def async_media_play(self) -> None: """Send play command to media player.""" await self._player.async_play() + await self.coordinator.async_refresh() async def async_media_pause(self) -> None: """Send pause command to media player.""" await self._player.async_pause() + await self.coordinator.async_refresh() async def async_media_next_track(self) -> None: """Send next track command.""" await self._player.async_index("+1") + await self.coordinator.async_refresh() async def async_media_previous_track(self) -> None: """Send next track command.""" await self._player.async_index("-1") + await self.coordinator.async_refresh() async def async_media_seek(self, position: float) -> None: """Send seek command.""" await self._player.async_time(position) + await self.coordinator.async_refresh() async def async_turn_on(self) -> None: """Turn the media player on.""" await self._player.async_set_power(True) + await self.coordinator.async_refresh() async def async_play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any @@ -477,11 +463,11 @@ class SqueezeBoxEntity(MediaPlayerEntity): try: # a saved playlist by number payload = { - "search_id": int(media_id), + "search_id": media_id, "search_type": MediaType.PLAYLIST, } playlist = await generate_playlist(self._player, payload) - except ValueError: + except BrowseError: # a list of urls content = json.loads(media_id) playlist = content["urls"] @@ -498,6 +484,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): await self._player.async_load_playlist(playlist, cmd) if index is not None: await self._player.async_index(index) + await self.coordinator.async_refresh() async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set the repeat mode.""" @@ -509,17 +496,22 @@ class SqueezeBoxEntity(MediaPlayerEntity): repeat_mode = "none" await self._player.async_set_repeat(repeat_mode) + await self.coordinator.async_refresh() async def async_set_shuffle(self, shuffle: bool) -> None: """Enable/disable shuffle mode.""" shuffle_mode = "song" if shuffle else "none" await self._player.async_set_shuffle(shuffle_mode) + await self.coordinator.async_refresh() async def async_clear_playlist(self) -> None: """Send the media player the command for clear playlist.""" await self._player.async_clear_playlist() + await self.coordinator.async_refresh() - async def async_call_method(self, command, parameters=None): + async def async_call_method( + self, command: str, parameters: list[str] | None = None + ) -> None: """Call Squeezebox JSON/RPC method. Additional parameters are added to the command to form the list of @@ -530,7 +522,9 @@ class SqueezeBoxEntity(MediaPlayerEntity): all_params.extend(parameters) await self._player.async_query(*all_params) - async def async_call_query(self, command, parameters=None): + async def async_call_query( + self, command: str, parameters: list[str] | None = None + ) -> None: """Call Squeezebox JSON/RPC method where we care about the result. Additional parameters are added to the command to form the list of @@ -541,6 +535,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): all_params.extend(parameters) self._query_result = await self._player.async_query(*all_params) _LOGGER.debug("call_query got result %s", self._query_result) + self.async_write_ha_state() async def async_join_players(self, group_members: list[str]) -> None: """Add other Squeezebox players to this player's sync group. @@ -548,39 +543,30 @@ class SqueezeBoxEntity(MediaPlayerEntity): If the other player is a member of a sync group, it will leave the current sync group without asking. """ - player_ids = { - p.entity_id: p.unique_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] - } - - for other_player in group_members: - if other_player_id := player_ids.get(other_player): + ent_reg = er.async_get(self.hass) + for other_player_entity_id in group_members: + other_player = ent_reg.async_get(other_player_entity_id) + if other_player is None: + raise ServiceValidationError( + f"Could not find player with entity_id {other_player_entity_id}" + ) + if other_player_id := other_player.unique_id: await self._player.async_sync(other_player_id) else: - _LOGGER.debug( - "Could not find player_id for %s. Not syncing", other_player + raise ServiceValidationError( + f"Could not join unknown player {other_player_entity_id}" ) - async def async_sync(self, other_player): - """Sync this Squeezebox player to another. Deprecated.""" - _LOGGER.warning( - "Service squeezebox.sync is deprecated; use media_player.join_players" - " instead" - ) - await self.async_join_players([other_player]) - async def async_unjoin_player(self) -> None: """Unsync this Squeezebox player.""" await self._player.async_unsync() + await self.coordinator.async_refresh() - async def async_unsync(self): - """Unsync this Squeezebox player. Deprecated.""" - _LOGGER.warning( - "Service squeezebox.unsync is deprecated; use media_player.unjoin_player" - " instead" - ) - await self.async_unjoin_player() - - async def async_browse_media(self, media_content_type=None, media_content_id=None): + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: """Implement the websocket media browsing helper.""" _LOGGER.debug( "Reached async_browse_media with content_type %s and content_id %s", @@ -591,7 +577,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): if media_content_type in [None, "library"]: return await library_payload(self.hass, self._player) - if media_source.is_media_source_id(media_content_id): + if media_content_id and media_source.is_media_source_id(media_content_id): return await media_source.async_browse_media( self.hass, media_content_id, content_filter=media_source_content_filter ) diff --git a/homeassistant/components/squeezebox/sensor.py b/homeassistant/components/squeezebox/sensor.py new file mode 100644 index 00000000000..0ca33179f9f --- /dev/null +++ b/homeassistant/components/squeezebox/sensor.py @@ -0,0 +1,92 @@ +"""Platform for sensor integration for squeezebox.""" + +from __future__ import annotations + +import logging +from typing import cast + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import SqueezeboxConfigEntry +from .const import ( + STATUS_SENSOR_INFO_TOTAL_ALBUMS, + STATUS_SENSOR_INFO_TOTAL_ARTISTS, + STATUS_SENSOR_INFO_TOTAL_DURATION, + STATUS_SENSOR_INFO_TOTAL_GENRES, + STATUS_SENSOR_INFO_TOTAL_SONGS, + STATUS_SENSOR_LASTSCAN, + STATUS_SENSOR_OTHER_PLAYER_COUNT, + STATUS_SENSOR_PLAYER_COUNT, +) +from .entity import LMSStatusEntity + +SENSORS: tuple[SensorEntityDescription, ...] = ( + SensorEntityDescription( + key=STATUS_SENSOR_INFO_TOTAL_ALBUMS, + state_class=SensorStateClass.TOTAL, + ), + SensorEntityDescription( + key=STATUS_SENSOR_INFO_TOTAL_ARTISTS, + state_class=SensorStateClass.TOTAL, + ), + SensorEntityDescription( + key=STATUS_SENSOR_INFO_TOTAL_DURATION, + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + ), + SensorEntityDescription( + key=STATUS_SENSOR_INFO_TOTAL_GENRES, + state_class=SensorStateClass.TOTAL, + ), + SensorEntityDescription( + key=STATUS_SENSOR_INFO_TOTAL_SONGS, + state_class=SensorStateClass.TOTAL, + ), + SensorEntityDescription( + key=STATUS_SENSOR_LASTSCAN, + device_class=SensorDeviceClass.TIMESTAMP, + ), + SensorEntityDescription( + key=STATUS_SENSOR_PLAYER_COUNT, + state_class=SensorStateClass.TOTAL, + ), + SensorEntityDescription( + key=STATUS_SENSOR_OTHER_PLAYER_COUNT, + state_class=SensorStateClass.TOTAL, + entity_registry_visible_default=False, + ), +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SqueezeboxConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Platform setup using common elements.""" + + async_add_entities( + ServerStatusSensor(entry.runtime_data.coordinator, description) + for description in SENSORS + ) + + +class ServerStatusSensor(LMSStatusEntity, SensorEntity): + """LMS Status based sensor from LMS via cooridnatior.""" + + @property + def native_value(self) -> StateType: + """LMS Status directly from coordinator data.""" + return cast(StateType, self.coordinator.data[self.entity_description.key]) diff --git a/homeassistant/components/squeezebox/services.yaml b/homeassistant/components/squeezebox/services.yaml index 90f9bf2d769..07885ae5dd6 100644 --- a/homeassistant/components/squeezebox/services.yaml +++ b/homeassistant/components/squeezebox/services.yaml @@ -30,19 +30,3 @@ call_query: advanced: true selector: object: -sync: - target: - entity: - integration: squeezebox - domain: media_player - fields: - other_player: - required: true - example: "media_player.living_room" - selector: - text: -unsync: - target: - entity: - integration: squeezebox - domain: media_player diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index 899d35813aa..406c7243a1a 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -60,20 +60,48 @@ "description": "[%key:component::squeezebox::services::call_method::fields::parameters::description%]" } } - }, - "sync": { - "name": "Sync", - "description": "Adds another player to this player's sync group. If the other player is already in a sync group, it will leave it.\n.", - "fields": { - "other_player": { - "name": "Other player", - "description": "Name of the other Squeezebox player to link." - } + } + }, + "entity": { + "binary_sensor": { + "rescan": { + "name": "Library rescan" + }, + "needsrestart": { + "name": "Needs restart" } }, - "unsync": { - "name": "Unsync", - "description": "Removes this player from its sync group." + "sensor": { + "lastscan": { + "name": "Last scan" + }, + "info_total_albums": { + "name": "Total albums", + "unit_of_measurement": "albums" + }, + "info_total_artists": { + "name": "Total artists", + "unit_of_measurement": "artists" + }, + "info_total_duration": { + "name": "Total duration" + }, + "info_total_genres": { + "name": "Total genres", + "unit_of_measurement": "genres" + }, + "info_total_songs": { + "name": "Total songs", + "unit_of_measurement": "songs" + }, + "player_count": { + "name": "Player count", + "unit_of_measurement": "players" + }, + "other_player_count": { + "name": "Player count off service", + "unit_of_measurement": "[%key:component::squeezebox::entity::sensor::player_count::unit_of_measurement%]" + } } } } diff --git a/homeassistant/components/srp_energy/strings.json b/homeassistant/components/srp_energy/strings.json index 191d10a70dd..eca4f465435 100644 --- a/homeassistant/components/srp_energy/strings.json +++ b/homeassistant/components/srp_energy/strings.json @@ -17,7 +17,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "unknown": "Unexpected error" } }, "entity": { diff --git a/homeassistant/components/ssdp/__init__.py b/homeassistant/components/ssdp/__init__.py index 7ca2f3e9318..ccd69961975 100644 --- a/homeassistant/components/ssdp/__init__.py +++ b/homeassistant/components/ssdp/__init__.py @@ -12,7 +12,7 @@ from ipaddress import IPv4Address, IPv6Address import logging import socket from time import time -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urljoin import xml.etree.ElementTree as ET @@ -47,6 +47,7 @@ from homeassistant.core import Event, HassJob, HomeAssistant, callback as core_c from homeassistant.data_entry_flow import BaseServiceInfo from homeassistant.helpers import config_validation as cv, discovery_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.instance_id import async_get as async_get_instance_id from homeassistant.helpers.network import NoURLAvailableError, get_url @@ -284,16 +285,13 @@ class IntegrationMatchers: def async_matching_domains(self, info_with_desc: CaseInsensitiveDict) -> set[str]: """Find domains matching the passed CaseInsensitiveDict.""" assert self._match_by_key is not None - domains = set() - for key, matchers_by_key in self._match_by_key.items(): - if not (match_value := info_with_desc.get(key)): - continue - for domain, matcher in matchers_by_key.get(match_value, []): - if domain in domains: - continue - if all(info_with_desc.get(k) == v for (k, v) in matcher.items()): - domains.add(domain) - return domains + return { + domain + for key, matchers_by_key in self._match_by_key.items() + if (match_value := info_with_desc.get(key)) + for domain, matcher in matchers_by_key.get(match_value, ()) + if info_with_desc.items() >= matcher.items() + } class Scanner: @@ -397,6 +395,12 @@ class Scanner: self.hass, self.async_scan, SCAN_INTERVAL, name="SSDP scanner" ) + async_dispatcher_connect( + self.hass, + config_entries.signal_discovered_config_entry_removed(DOMAIN), + self._handle_config_entry_removed, + ) + # Trigger the initial-scan. await self.async_scan() @@ -505,6 +509,7 @@ class Scanner: dst: DeviceOrServiceType, source: SsdpSource, info_desc: Mapping[str, Any], + skip_callbacks: bool = False, ) -> None: """Handle a device/service change.""" matching_domains: set[str] = set() @@ -529,7 +534,7 @@ class Scanner: ) discovery_info.x_homeassistant_matching_domains = matching_domains - if callbacks: + if callbacks and not skip_callbacks: ssdp_change = SSDP_SOURCE_SSDP_CHANGE_MAPPING[source] _async_process_callbacks(self.hass, callbacks, discovery_info, ssdp_change) @@ -540,14 +545,20 @@ class Scanner: _LOGGER.debug("Discovery info: %s", discovery_info) - location = ssdp_device.location + if not matching_domains: + return # avoid creating DiscoveryKey if there are no matches + + discovery_key = discovery_flow.DiscoveryKey( + domain=DOMAIN, key=ssdp_device.udn, version=1 + ) for domain in matching_domains: - _LOGGER.debug("Discovered %s at %s", domain, location) + _LOGGER.debug("Discovered %s at %s", domain, ssdp_device.location) discovery_flow.async_create_flow( self.hass, domain, {"source": config_entries.SOURCE_SSDP}, discovery_info, + discovery_key=discovery_key, ) def _async_dismiss_discoveries( @@ -568,14 +579,13 @@ class Scanner: ) -> Mapping[str, str]: """Get description dict.""" assert self._description_cache is not None + cache = self._description_cache - has_description, description = self._description_cache.peek_description_dict( - location - ) + has_description, description = cache.peek_description_dict(location) if has_description: return description or {} - return await self._description_cache.async_get_description_dict(location) or {} + return await cache.async_get_description_dict(location) or {} async def _async_headers_to_discovery_info( self, ssdp_device: SsdpDevice, headers: CaseInsensitiveDict @@ -584,8 +594,6 @@ class Scanner: Building this is a bit expensive so we only do it on demand. """ - assert self._description_cache is not None - location = headers["location"] info_desc = await self._async_get_description_dict(location) return discovery_info_from_headers_and_description( @@ -621,6 +629,37 @@ class Scanner: if ssdp_device.udn == udn ] + @core_callback + def _handle_config_entry_removed( + self, + entry: config_entries.ConfigEntry, + ) -> None: + """Handle config entry changes.""" + if TYPE_CHECKING: + assert self._description_cache is not None + cache = self._description_cache + for discovery_key in entry.discovery_keys[DOMAIN]: + if discovery_key.version != 1 or not isinstance(discovery_key.key, str): + continue + udn = discovery_key.key + _LOGGER.debug("Rediscover service %s", udn) + + for ssdp_device in self._ssdp_devices: + if ssdp_device.udn != udn: + continue + for dst in ssdp_device.all_combined_headers: + has_cached_desc, info_desc = cache.peek_description_dict( + ssdp_device.location + ) + if has_cached_desc and info_desc: + self._ssdp_listener_process_callback( + ssdp_device, + dst, + SsdpSource.SEARCH, + info_desc, + True, # Skip integration callbacks + ) + def discovery_info_from_headers_and_description( ssdp_device: SsdpDevice, diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index 8b94b8c5895..e9d4f57d5fb 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["async_upnp_client"], "quality_scale": "internal", - "requirements": ["async-upnp-client==0.40.0"] + "requirements": ["async-upnp-client==0.41.0"] } diff --git a/homeassistant/components/starline/binary_sensor.py b/homeassistant/components/starline/binary_sensor.py index 0383fc8ade6..69f0ae06d02 100644 --- a/homeassistant/components/starline/binary_sensor.py +++ b/homeassistant/components/starline/binary_sensor.py @@ -41,6 +41,11 @@ BINARY_SENSOR_TYPES: tuple[BinarySensorEntityDescription, ...] = ( translation_key="doors", device_class=BinarySensorDeviceClass.LOCK, ), + BinarySensorEntityDescription( + key="run", + translation_key="is_running", + device_class=BinarySensorDeviceClass.RUNNING, + ), BinarySensorEntityDescription( key="hfree", translation_key="handsfree", diff --git a/homeassistant/components/starline/button.py b/homeassistant/components/starline/button.py index ea1a27adc15..6fb307cda74 100644 --- a/homeassistant/components/starline/button.py +++ b/homeassistant/components/starline/button.py @@ -16,6 +16,20 @@ BUTTON_TYPES: tuple[ButtonEntityDescription, ...] = ( key="poke", translation_key="horn", ), + ButtonEntityDescription( + key="panic", + translation_key="panic", + entity_registry_enabled_default=False, + ), + *[ + ButtonEntityDescription( + key=f"flex_{i}", + translation_key="flex", + translation_placeholders={"num": str(i)}, + entity_registry_enabled_default=False, + ) + for i in range(1, 10) + ], ) diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index e27885e6c60..a899b562f36 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import Any - from starline import StarlineAuth import voluptuous as vol @@ -33,6 +31,11 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _app_code: str + _app_token: str + _captcha_image: str + _phone_number: str + def __init__(self) -> None: """Initialize flow.""" self._app_id: str | None = None @@ -41,59 +44,63 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._password: str | None = None self._mfa_code: str | None = None - self._app_code = None - self._app_token = None self._user_slid = None self._user_id = None self._slnet_token = None self._slnet_token_expires = None - self._captcha_image = None - self._captcha_sid = None - self._captcha_code = None - self._phone_number = None + self._captcha_sid: str | None = None + self._captcha_code: str | None = None self._auth = StarlineAuth() async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" return await self.async_step_auth_app(user_input) - async def async_step_auth_app(self, user_input=None, error=None): + async def async_step_auth_app( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Authenticate application step.""" if user_input is not None: self._app_id = user_input[CONF_APP_ID] self._app_secret = user_input[CONF_APP_SECRET] - return await self._async_authenticate_app(error) - return self._async_form_auth_app(error) + return await self._async_authenticate_app() + return self._async_form_auth_app() - async def async_step_auth_user(self, user_input=None, error=None): + async def async_step_auth_user( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Authenticate user step.""" if user_input is not None: self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] - return await self._async_authenticate_user(error) - return self._async_form_auth_user(error) + return await self._async_authenticate_user() + return self._async_form_auth_user() - async def async_step_auth_mfa(self, user_input=None, error=None): + async def async_step_auth_mfa( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Authenticate mfa step.""" if user_input is not None: self._mfa_code = user_input[CONF_MFA_CODE] - return await self._async_authenticate_user(error) - return self._async_form_auth_mfa(error) + return await self._async_authenticate_user() + return self._async_form_auth_mfa() - async def async_step_auth_captcha(self, user_input=None, error=None): + async def async_step_auth_captcha( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Captcha verification step.""" if user_input is not None: self._captcha_code = user_input[CONF_CAPTCHA_CODE] - return await self._async_authenticate_user(error) - return self._async_form_auth_captcha(error) + return await self._async_authenticate_user() + return self._async_form_auth_captcha() @callback - def _async_form_auth_app(self, error=None): + def _async_form_auth_app(self, error: str | None = None) -> ConfigFlowResult: """Authenticate application form.""" - errors = {} + errors: dict[str, str] = {} if error is not None: errors["base"] = error @@ -113,7 +120,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): ) @callback - def _async_form_auth_user(self, error=None): + def _async_form_auth_user(self, error: str | None = None) -> ConfigFlowResult: """Authenticate user form.""" errors = {} if error is not None: @@ -135,7 +142,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): ) @callback - def _async_form_auth_mfa(self, error=None): + def _async_form_auth_mfa(self, error: str | None = None) -> ConfigFlowResult: """Authenticate mfa form.""" errors = {} if error is not None: @@ -155,7 +162,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): ) @callback - def _async_form_auth_captcha(self, error=None): + def _async_form_auth_captcha(self, error: str | None = None) -> ConfigFlowResult: """Captcha verification form.""" errors = {} if error is not None: @@ -176,7 +183,9 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def _async_authenticate_app(self, error=None): + async def _async_authenticate_app( + self, error: str | None = None + ) -> ConfigFlowResult: """Authenticate application.""" try: self._app_code = await self.hass.async_add_executor_job( @@ -190,7 +199,9 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Error auth StarLine: %s", err) return self._async_form_auth_app(ERROR_AUTH_APP) - async def _async_authenticate_user(self, error=None): + async def _async_authenticate_user( + self, error: str | None = None + ) -> ConfigFlowResult: """Authenticate user.""" try: state, data = await self.hass.async_add_executor_job( @@ -223,7 +234,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Error auth user: %s", err) return self._async_form_auth_user(ERROR_AUTH_USER) - async def _async_get_entry(self): + async def _async_get_entry(self) -> ConfigFlowResult: """Create entry.""" ( self._slnet_token, diff --git a/homeassistant/components/starline/device_tracker.py b/homeassistant/components/starline/device_tracker.py index 11b0d433787..610317b72c3 100644 --- a/homeassistant/components/starline/device_tracker.py +++ b/homeassistant/components/starline/device_tracker.py @@ -1,6 +1,6 @@ """StarLine device tracker.""" -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -56,8 +56,3 @@ class StarlineDeviceTracker(StarlineEntity, TrackerEntity, RestoreEntity): def longitude(self): """Return longitude value of the device.""" return self._device.position["y"] - - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS diff --git a/homeassistant/components/starline/icons.json b/homeassistant/components/starline/icons.json index b98c4178af1..d7d20ae03bd 100644 --- a/homeassistant/components/starline/icons.json +++ b/homeassistant/components/starline/icons.json @@ -12,11 +12,20 @@ }, "moving_ban": { "default": "mdi:car-off" + }, + "is_running": { + "default": "mdi:speedometer" } }, "button": { "horn": { "default": "mdi:bullhorn-outline" + }, + "flex": { + "default": "mdi:star-circle-outline" + }, + "panic": { + "default": "mdi:alarm-note" } }, "device_tracker": { @@ -60,9 +69,6 @@ "on": "mdi:access-point-network" } }, - "horn": { - "default": "mdi:bullhorn-outline" - }, "service_mode": { "default": "mdi:car-wrench", "state": { @@ -72,8 +78,14 @@ } }, "services": { - "update_state": "mdi:reload", - "set_scan_interval": "mdi:timer", - "set_scan_obd_interval": "mdi:timer" + "update_state": { + "service": "mdi:reload" + }, + "set_scan_interval": { + "service": "mdi:timer" + }, + "set_scan_obd_interval": { + "service": "mdi:timer" + } } } diff --git a/homeassistant/components/starline/strings.json b/homeassistant/components/starline/strings.json index 14a8ed5a035..0a30ea5b5be 100644 --- a/homeassistant/components/starline/strings.json +++ b/homeassistant/components/starline/strings.json @@ -63,6 +63,9 @@ }, "moving_ban": { "name": "Moving ban" + }, + "is_running": { + "name": "Running" } }, "device_tracker": { @@ -121,6 +124,12 @@ "button": { "horn": { "name": "Horn" + }, + "flex": { + "name": "Flex logic {num}" + }, + "panic": { + "name": "Panic mode" } } }, diff --git a/homeassistant/components/starline/switch.py b/homeassistant/components/starline/switch.py index 1b48a72c732..05193d98c8a 100644 --- a/homeassistant/components/starline/switch.py +++ b/homeassistant/components/starline/switch.py @@ -78,8 +78,6 @@ class StarlineSwitch(StarlineEntity, SwitchEntity): @property def is_on(self): """Return True if entity is on.""" - if self._key == "poke": - return False return self._device.car_state.get(self._key) def turn_on(self, **kwargs: Any) -> None: @@ -88,6 +86,4 @@ class StarlineSwitch(StarlineEntity, SwitchEntity): def turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - if self._key == "poke": - return self._account.api.set_car_state(self._device.device_id, self._key, False) diff --git a/homeassistant/components/starlingbank/manifest.json b/homeassistant/components/starlingbank/manifest.json index ef9be6d6da8..f7ab72c4379 100644 --- a/homeassistant/components/starlingbank/manifest.json +++ b/homeassistant/components/starlingbank/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/starlingbank", "iot_class": "cloud_polling", "loggers": ["starlingbank"], + "quality_scale": "legacy", "requirements": ["starlingbank==3.2"] } diff --git a/homeassistant/components/starlingbank/sensor.py b/homeassistant/components/starlingbank/sensor.py index fd351416c28..282323d8b7b 100644 --- a/homeassistant/components/starlingbank/sensor.py +++ b/homeassistant/components/starlingbank/sensor.py @@ -92,9 +92,8 @@ class StarlingBalanceSensor(SensorEntity): @property def name(self): """Return the name of the sensor.""" - return "{} {}".format( - self._account_name, self._balance_data_type.replace("_", " ").capitalize() - ) + balance_data_type = self._balance_data_type.replace("_", " ").capitalize() + return f"{self._account_name} {balance_data_type}" @property def native_value(self): diff --git a/homeassistant/components/starlink/coordinator.py b/homeassistant/components/starlink/coordinator.py index a891941fb8e..89d03a4fadc 100644 --- a/homeassistant/components/starlink/coordinator.py +++ b/homeassistant/components/starlink/coordinator.py @@ -14,8 +14,11 @@ from starlink_grpc import ( GrpcError, LocationDict, ObstructionDict, + PowerDict, StatusDict, + UsageDict, get_sleep_config, + history_stats, location_data, reboot, set_sleep_config, @@ -39,6 +42,8 @@ class StarlinkData: status: StatusDict obstruction: ObstructionDict alert: AlertDict + usage: UsageDict + consumption: PowerDict class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]): @@ -57,11 +62,15 @@ class StarlinkUpdateCoordinator(DataUpdateCoordinator[StarlinkData]): def _get_starlink_data(self) -> StarlinkData: """Retrieve Starlink data.""" - channel_context = self.channel_context - status = status_data(channel_context) - location = location_data(channel_context) - sleep = get_sleep_config(channel_context) - return StarlinkData(location, sleep, *status) + context = self.channel_context + status = status_data(context) + location = location_data(context) + sleep = get_sleep_config(context) + status, obstruction, alert = status_data(context) + usage, consumption = history_stats(parse_samples=-1, context=context)[-2:] + return StarlinkData( + location, sleep, status, obstruction, alert, usage, consumption + ) async def _async_update_data(self) -> StarlinkData: async with asyncio.timeout(4): diff --git a/homeassistant/components/starlink/device_tracker.py b/homeassistant/components/starlink/device_tracker.py index 34769d687ff..5174be19760 100644 --- a/homeassistant/components/starlink/device_tracker.py +++ b/homeassistant/components/starlink/device_tracker.py @@ -4,10 +4,12 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Any -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import ( + TrackerEntity, + TrackerEntityDescription, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ATTR_ALTITUDE, DOMAIN @@ -28,7 +30,7 @@ async def async_setup_entry( @dataclass(frozen=True, kw_only=True) -class StarlinkDeviceTrackerEntityDescription(EntityDescription): +class StarlinkDeviceTrackerEntityDescription(TrackerEntityDescription): """Describes a Starlink button entity.""" latitude_fn: Callable[[StarlinkData], float] @@ -53,11 +55,6 @@ class StarlinkDeviceTrackerEntity(StarlinkEntity, TrackerEntity): entity_description: StarlinkDeviceTrackerEntityDescription - @property - def source_type(self) -> SourceType | str: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - @property def latitude(self) -> float | None: """Return latitude value of the device.""" diff --git a/homeassistant/components/starlink/diagnostics.py b/homeassistant/components/starlink/diagnostics.py index 88e6485cf77..c619458b1dd 100644 --- a/homeassistant/components/starlink/diagnostics.py +++ b/homeassistant/components/starlink/diagnostics.py @@ -3,7 +3,7 @@ from dataclasses import asdict from typing import Any -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/starlink/icons.json b/homeassistant/components/starlink/icons.json index 65cb273e24b..02de62aeb8a 100644 --- a/homeassistant/components/starlink/icons.json +++ b/homeassistant/components/starlink/icons.json @@ -18,6 +18,12 @@ }, "last_boot_time": { "default": "mdi:clock" + }, + "upload": { + "default": "mdi:upload" + }, + "download": { + "default": "mdi:download" } } } diff --git a/homeassistant/components/starlink/manifest.json b/homeassistant/components/starlink/manifest.json index b8733dd2435..15bad3ebc2e 100644 --- a/homeassistant/components/starlink/manifest.json +++ b/homeassistant/components/starlink/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/starlink", "iot_class": "local_polling", - "quality_scale": "silver", - "requirements": ["starlink-grpc-core==1.1.3"] + "requirements": ["starlink-grpc-core==1.2.2"] } diff --git a/homeassistant/components/starlink/sensor.py b/homeassistant/components/starlink/sensor.py index 21f2400022c..5481e310fbd 100644 --- a/homeassistant/components/starlink/sensor.py +++ b/homeassistant/components/starlink/sensor.py @@ -18,6 +18,9 @@ from homeassistant.const import ( PERCENTAGE, EntityCategory, UnitOfDataRate, + UnitOfEnergy, + UnitOfInformation, + UnitOfPower, UnitOfTime, ) from homeassistant.core import HomeAssistant @@ -120,4 +123,36 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, value_fn=lambda data: data.status["pop_ping_drop_rate"] * 100, ), + StarlinkSensorEntityDescription( + key="upload", + translation_key="upload", + device_class=SensorDeviceClass.DATA_SIZE, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfInformation.BYTES, + suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, + value_fn=lambda data: data.usage["upload_usage"], + ), + StarlinkSensorEntityDescription( + key="download", + translation_key="download", + device_class=SensorDeviceClass.DATA_SIZE, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfInformation.BYTES, + suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, + value_fn=lambda data: data.usage["download_usage"], + ), + StarlinkSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_fn=lambda data: data.consumption["latest_power"], + ), + StarlinkSensorEntityDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda data: data.consumption["total_energy"], + ), ) diff --git a/homeassistant/components/starlink/strings.json b/homeassistant/components/starlink/strings.json index 36a4f176e70..395b6288c71 100644 --- a/homeassistant/components/starlink/strings.json +++ b/homeassistant/components/starlink/strings.json @@ -70,6 +70,12 @@ }, "ping_drop_rate": { "name": "Ping drop rate" + }, + "upload": { + "name": "Upload" + }, + "download": { + "name": "Download" } }, "switch": { diff --git a/homeassistant/components/startca/manifest.json b/homeassistant/components/startca/manifest.json index 8c74a655ce3..958477c193b 100644 --- a/homeassistant/components/startca/manifest.json +++ b/homeassistant/components/startca/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/startca", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["xmltodict==0.13.0"] } diff --git a/homeassistant/components/statistics/config_flow.py b/homeassistant/components/statistics/config_flow.py index 773c3d1c364..4c78afbde9c 100644 --- a/homeassistant/components/statistics/config_flow.py +++ b/homeassistant/components/statistics/config_flow.py @@ -3,14 +3,17 @@ from __future__ import annotations from collections.abc import Mapping +from datetime import timedelta from typing import Any, cast import voluptuous as vol +from homeassistant.components import websocket_api from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import CONF_ENTITY_ID, CONF_NAME -from homeassistant.core import split_entity_id +from homeassistant.core import HomeAssistant, callback, split_entity_id +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.schema_config_entry_flow import ( SchemaCommonFlowHandler, SchemaConfigFlowHandler, @@ -44,6 +47,7 @@ from .sensor import ( DEFAULT_PRECISION, STATS_BINARY_SUPPORT, STATS_NUMERIC_SUPPORT, + StatisticsSensor, ) @@ -53,9 +57,9 @@ async def get_state_characteristics(handler: SchemaCommonFlowHandler) -> vol.Sch split_entity_id(handler.options[CONF_ENTITY_ID])[0] == BINARY_SENSOR_DOMAIN ) if is_binary: - options = STATS_BINARY_SUPPORT + options = list(STATS_BINARY_SUPPORT) else: - options = STATS_NUMERIC_SUPPORT + options = list(STATS_NUMERIC_SUPPORT) return vol.Schema( { @@ -129,12 +133,14 @@ CONFIG_FLOW = { "options": SchemaFlowFormStep( schema=DATA_SCHEMA_OPTIONS, validate_user_input=validate_options, + preview="statistics", ), } OPTIONS_FLOW = { "init": SchemaFlowFormStep( DATA_SCHEMA_OPTIONS, validate_user_input=validate_options, + preview="statistics", ), } @@ -148,3 +154,86 @@ class StatisticsConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): def async_config_entry_title(self, options: Mapping[str, Any]) -> str: """Return config entry title.""" return cast(str, options[CONF_NAME]) + + @staticmethod + async def async_setup_preview(hass: HomeAssistant) -> None: + """Set up preview WS API.""" + websocket_api.async_register_command(hass, ws_start_preview) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "statistics/start_preview", + vol.Required("flow_id"): str, + vol.Required("flow_type"): vol.Any("config_flow", "options_flow"), + vol.Required("user_input"): dict, + } +) +@websocket_api.async_response +async def ws_start_preview( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate a preview.""" + + if msg["flow_type"] == "config_flow": + flow_status = hass.config_entries.flow.async_get(msg["flow_id"]) + flow_sets = hass.config_entries.flow._handler_progress_index.get( # noqa: SLF001 + flow_status["handler"] + ) + options = {} + assert flow_sets + for active_flow in flow_sets: + options = active_flow._common_handler.options # type: ignore [attr-defined] # noqa: SLF001 + config_entry = hass.config_entries.async_get_entry(flow_status["handler"]) + entity_id = options[CONF_ENTITY_ID] + name = options[CONF_NAME] + state_characteristic = options[CONF_STATE_CHARACTERISTIC] + else: + flow_status = hass.config_entries.options.async_get(msg["flow_id"]) + config_entry = hass.config_entries.async_get_entry(flow_status["handler"]) + if not config_entry: + raise HomeAssistantError("Config entry not found") + entity_id = config_entry.options[CONF_ENTITY_ID] + name = config_entry.options[CONF_NAME] + state_characteristic = config_entry.options[CONF_STATE_CHARACTERISTIC] + + @callback + def async_preview_updated(state: str, attributes: Mapping[str, Any]) -> None: + """Forward config entry state events to websocket.""" + connection.send_message( + websocket_api.event_message( + msg["id"], {"attributes": attributes, "state": state} + ) + ) + + sampling_size = msg["user_input"].get(CONF_SAMPLES_MAX_BUFFER_SIZE) + if sampling_size: + sampling_size = int(sampling_size) + + max_age = None + if max_age_input := msg["user_input"].get(CONF_MAX_AGE): + max_age = timedelta( + hours=max_age_input["hours"], + minutes=max_age_input["minutes"], + seconds=max_age_input["seconds"], + ) + preview_entity = StatisticsSensor( + hass, + entity_id, + name, + None, + state_characteristic, + sampling_size, + max_age, + msg["user_input"].get(CONF_KEEP_LAST_SAMPLE), + msg["user_input"].get(CONF_PRECISION), + msg["user_input"].get(CONF_PERCENTILE), + ) + preview_entity.hass = hass + + connection.send_result(msg["id"]) + connection.subscriptions[msg["id"]] = await preview_entity.async_start_preview( + async_preview_updated + ) diff --git a/homeassistant/components/statistics/icons.json b/homeassistant/components/statistics/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/statistics/icons.json +++ b/homeassistant/components/statistics/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/statistics/manifest.json b/homeassistant/components/statistics/manifest.json index 24d4b4914cb..8eaed552edd 100644 --- a/homeassistant/components/statistics/manifest.json +++ b/homeassistant/components/statistics/manifest.json @@ -2,7 +2,7 @@ "domain": "statistics", "name": "Statistics", "after_dependencies": ["recorder"], - "codeowners": ["@ThomDietrich"], + "codeowners": ["@ThomDietrich", "@gjohansson-ST"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/statistics", "integration_type": "helper", diff --git a/homeassistant/components/statistics/sensor.py b/homeassistant/components/statistics/sensor.py index ca1d75b57ed..5252c23fd3d 100644 --- a/homeassistant/components/statistics/sensor.py +++ b/homeassistant/components/statistics/sensor.py @@ -3,12 +3,13 @@ from __future__ import annotations from collections import deque -from collections.abc import Callable +from collections.abc import Callable, Mapping import contextlib from datetime import datetime, timedelta import logging import math import statistics +import time from typing import Any, cast import voluptuous as vol @@ -17,6 +18,7 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.recorder import get_instance, history from homeassistant.components.sensor import ( DEVICE_CLASS_STATE_CLASSES, + DEVICE_CLASS_UNITS, PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, @@ -37,6 +39,7 @@ from homeassistant.core import ( CALLBACK_TYPE, Event, EventStateChangedData, + EventStateReportedData, HomeAssistant, State, callback, @@ -48,10 +51,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import ( async_track_point_in_utc_time, async_track_state_change_event, + async_track_state_report_event, ) from homeassistant.helpers.reload import async_setup_reload_service -from homeassistant.helpers.start import async_at_start -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum @@ -95,47 +98,373 @@ STAT_VALUE_MAX = "value_max" STAT_VALUE_MIN = "value_min" STAT_VARIANCE = "variance" + +def _callable_characteristic_fn( + characteristic: str, binary: bool +) -> Callable[[deque[bool | float], deque[float], int], float | int | datetime | None]: + """Return the function callable of one characteristic function.""" + Callable[[deque[bool | float], deque[datetime], int], datetime | int | float | None] + if binary: + return STATS_BINARY_SUPPORT[characteristic] + return STATS_NUMERIC_SUPPORT[characteristic] + + +# Statistics for numeric sensor + + +def _stat_average_linear( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return states[0] + if len(states) >= 2: + area: float = 0 + for i in range(1, len(states)): + area += 0.5 * (states[i] + states[i - 1]) * (ages[i] - ages[i - 1]) + age_range_seconds = ages[-1] - ages[0] + return area / age_range_seconds + return None + + +def _stat_average_step( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return states[0] + if len(states) >= 2: + area: float = 0 + for i in range(1, len(states)): + area += states[i - 1] * (ages[i] - ages[i - 1]) + age_range_seconds = ages[-1] - ages[0] + return area / age_range_seconds + return None + + +def _stat_average_timeless( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + return _stat_mean(states, ages, percentile) + + +def _stat_change( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return states[-1] - states[0] + return None + + +def _stat_change_sample( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 1: + return (states[-1] - states[0]) / (len(states) - 1) + return None + + +def _stat_change_second( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 1: + age_range_seconds = ages[-1] - ages[0] + if age_range_seconds > 0: + return (states[-1] - states[0]) / age_range_seconds + return None + + +def _stat_count( + states: deque[bool | float], ages: deque[float], percentile: int +) -> int | None: + return len(states) + + +def _stat_datetime_newest( + states: deque[bool | float], ages: deque[float], percentile: int +) -> datetime | None: + if len(states) > 0: + return dt_util.utc_from_timestamp(ages[-1]) + return None + + +def _stat_datetime_oldest( + states: deque[bool | float], ages: deque[float], percentile: int +) -> datetime | None: + if len(states) > 0: + return dt_util.utc_from_timestamp(ages[0]) + return None + + +def _stat_datetime_value_max( + states: deque[bool | float], ages: deque[float], percentile: int +) -> datetime | None: + if len(states) > 0: + return dt_util.utc_from_timestamp(ages[states.index(max(states))]) + return None + + +def _stat_datetime_value_min( + states: deque[bool | float], ages: deque[float], percentile: int +) -> datetime | None: + if len(states) > 0: + return dt_util.utc_from_timestamp(ages[states.index(min(states))]) + return None + + +def _stat_distance_95_percent_of_values( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) >= 1: + return ( + 2 * 1.96 * cast(float, _stat_standard_deviation(states, ages, percentile)) + ) + return None + + +def _stat_distance_99_percent_of_values( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) >= 1: + return ( + 2 * 2.58 * cast(float, _stat_standard_deviation(states, ages, percentile)) + ) + return None + + +def _stat_distance_absolute( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return max(states) - min(states) + return None + + +def _stat_mean( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return statistics.mean(states) + return None + + +def _stat_mean_circular( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + sin_sum = sum(math.sin(math.radians(x)) for x in states) + cos_sum = sum(math.cos(math.radians(x)) for x in states) + return (math.degrees(math.atan2(sin_sum, cos_sum)) + 360) % 360 + return None + + +def _stat_median( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return statistics.median(states) + return None + + +def _stat_noisiness( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return cast(float, _stat_sum_differences(states, ages, percentile)) / ( + len(states) - 1 + ) + return None + + +def _stat_percentile( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return states[0] + if len(states) >= 2: + percentiles = statistics.quantiles(states, n=100, method="exclusive") + return percentiles[percentile - 1] + return None + + +def _stat_standard_deviation( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return statistics.stdev(states) + return None + + +def _stat_sum( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return sum(states) + return None + + +def _stat_sum_differences( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return sum( + abs(j - i) for i, j in zip(list(states), list(states)[1:], strict=False) + ) + return None + + +def _stat_sum_differences_nonnegative( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return sum( + (j - i if j >= i else j - 0) + for i, j in zip(list(states), list(states)[1:], strict=False) + ) + return None + + +def _stat_total( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + return _stat_sum(states, ages, percentile) + + +def _stat_value_max( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return max(states) + return None + + +def _stat_value_min( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return min(states) + return None + + +def _stat_variance( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return 0.0 + if len(states) >= 2: + return statistics.variance(states) + return None + + +# Statistics for binary sensor + + +def _stat_binary_average_step( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) == 1: + return 100.0 * int(states[0] is True) + if len(states) >= 2: + on_seconds: float = 0 + for i in range(1, len(states)): + if states[i - 1] is True: + on_seconds += ages[i] - ages[i - 1] + age_range_seconds = ages[-1] - ages[0] + return 100 / age_range_seconds * on_seconds + return None + + +def _stat_binary_average_timeless( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + return _stat_binary_mean(states, ages, percentile) + + +def _stat_binary_count( + states: deque[bool | float], ages: deque[float], percentile: int +) -> int | None: + return len(states) + + +def _stat_binary_count_on( + states: deque[bool | float], ages: deque[float], percentile: int +) -> int | None: + return states.count(True) + + +def _stat_binary_count_off( + states: deque[bool | float], ages: deque[float], percentile: int +) -> int | None: + return states.count(False) + + +def _stat_binary_datetime_newest( + states: deque[bool | float], ages: deque[float], percentile: int +) -> datetime | None: + return _stat_datetime_newest(states, ages, percentile) + + +def _stat_binary_datetime_oldest( + states: deque[bool | float], ages: deque[float], percentile: int +) -> datetime | None: + return _stat_datetime_oldest(states, ages, percentile) + + +def _stat_binary_mean( + states: deque[bool | float], ages: deque[float], percentile: int +) -> float | None: + if len(states) > 0: + return 100.0 / len(states) * states.count(True) + return None + + # Statistics supported by a sensor source (numeric) STATS_NUMERIC_SUPPORT = { - STAT_AVERAGE_LINEAR, - STAT_AVERAGE_STEP, - STAT_AVERAGE_TIMELESS, - STAT_CHANGE_SAMPLE, - STAT_CHANGE_SECOND, - STAT_CHANGE, - STAT_COUNT, - STAT_DATETIME_NEWEST, - STAT_DATETIME_OLDEST, - STAT_DATETIME_VALUE_MAX, - STAT_DATETIME_VALUE_MIN, - STAT_DISTANCE_95P, - STAT_DISTANCE_99P, - STAT_DISTANCE_ABSOLUTE, - STAT_MEAN, - STAT_MEAN_CIRCULAR, - STAT_MEDIAN, - STAT_NOISINESS, - STAT_PERCENTILE, - STAT_STANDARD_DEVIATION, - STAT_SUM, - STAT_SUM_DIFFERENCES, - STAT_SUM_DIFFERENCES_NONNEGATIVE, - STAT_TOTAL, - STAT_VALUE_MAX, - STAT_VALUE_MIN, - STAT_VARIANCE, + STAT_AVERAGE_LINEAR: _stat_average_linear, + STAT_AVERAGE_STEP: _stat_average_step, + STAT_AVERAGE_TIMELESS: _stat_average_timeless, + STAT_CHANGE_SAMPLE: _stat_change_sample, + STAT_CHANGE_SECOND: _stat_change_second, + STAT_CHANGE: _stat_change, + STAT_COUNT: _stat_count, + STAT_DATETIME_NEWEST: _stat_datetime_newest, + STAT_DATETIME_OLDEST: _stat_datetime_oldest, + STAT_DATETIME_VALUE_MAX: _stat_datetime_value_max, + STAT_DATETIME_VALUE_MIN: _stat_datetime_value_min, + STAT_DISTANCE_95P: _stat_distance_95_percent_of_values, + STAT_DISTANCE_99P: _stat_distance_99_percent_of_values, + STAT_DISTANCE_ABSOLUTE: _stat_distance_absolute, + STAT_MEAN: _stat_mean, + STAT_MEAN_CIRCULAR: _stat_mean_circular, + STAT_MEDIAN: _stat_median, + STAT_NOISINESS: _stat_noisiness, + STAT_PERCENTILE: _stat_percentile, + STAT_STANDARD_DEVIATION: _stat_standard_deviation, + STAT_SUM: _stat_sum, + STAT_SUM_DIFFERENCES: _stat_sum_differences, + STAT_SUM_DIFFERENCES_NONNEGATIVE: _stat_sum_differences_nonnegative, + STAT_TOTAL: _stat_total, + STAT_VALUE_MAX: _stat_value_max, + STAT_VALUE_MIN: _stat_value_min, + STAT_VARIANCE: _stat_variance, } # Statistics supported by a binary_sensor source STATS_BINARY_SUPPORT = { - STAT_AVERAGE_STEP, - STAT_AVERAGE_TIMELESS, - STAT_COUNT, - STAT_COUNT_BINARY_ON, - STAT_COUNT_BINARY_OFF, - STAT_DATETIME_NEWEST, - STAT_DATETIME_OLDEST, - STAT_MEAN, + STAT_AVERAGE_STEP: _stat_binary_average_step, + STAT_AVERAGE_TIMELESS: _stat_binary_average_timeless, + STAT_COUNT: _stat_binary_count, + STAT_COUNT_BINARY_ON: _stat_binary_count_on, + STAT_COUNT_BINARY_OFF: _stat_binary_count_off, + STAT_DATETIME_NEWEST: _stat_binary_datetime_newest, + STAT_DATETIME_OLDEST: _stat_binary_datetime_oldest, + STAT_MEAN: _stat_binary_mean, } STATS_NOT_A_NUMBER = { @@ -296,12 +625,8 @@ async def async_setup_entry( sampling_size = int(sampling_size) max_age = None - if max_age_input := entry.options.get(CONF_MAX_AGE): - max_age = timedelta( - hours=max_age_input["hours"], - minutes=max_age_input["minutes"], - seconds=max_age_input["seconds"], - ) + if max_age := entry.options.get(CONF_MAX_AGE): + max_age = timedelta(**max_age) async_add_entities( [ @@ -354,64 +679,119 @@ class StatisticsSensor(SensorEntity): ) self._state_characteristic: str = state_characteristic self._samples_max_buffer_size: int | None = samples_max_buffer_size - self._samples_max_age: timedelta | None = samples_max_age + self._samples_max_age: float | None = ( + samples_max_age.total_seconds() if samples_max_age else None + ) self.samples_keep_last: bool = samples_keep_last self._precision: int = precision self._percentile: int = percentile - self._value: StateType | datetime = None - self._unit_of_measurement: str | None = None - self._available: bool = False + self._attr_available: bool = False - self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size) - self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size) - self.attributes: dict[str, StateType] = {} + self.states: deque[float | bool] = deque(maxlen=samples_max_buffer_size) + self.ages: deque[float] = deque(maxlen=samples_max_buffer_size) + self._attr_extra_state_attributes = {} - self._state_characteristic_fn: Callable[[], StateType | datetime] = ( - self._callable_characteristic_fn(self._state_characteristic) - ) + self._state_characteristic_fn: Callable[ + [deque[bool | float], deque[float], int], + float | int | datetime | None, + ] = _callable_characteristic_fn(state_characteristic, self.is_binary) self._update_listener: CALLBACK_TYPE | None = None + self._preview_callback: Callable[[str, Mapping[str, Any]], None] | None = None - @callback - def _async_stats_sensor_state_listener( + async def async_start_preview( self, - event: Event[EventStateChangedData], + preview_callback: Callable[[str, Mapping[str, Any]], None], + ) -> CALLBACK_TYPE: + """Render a preview.""" + # abort early if there is no entity_id + # as without we can't track changes + # or either size or max_age is not set + if not self._source_entity_id or ( + self._samples_max_buffer_size is None and self._samples_max_age is None + ): + self._attr_available = False + calculated_state = self._async_calculate_state() + preview_callback(calculated_state.state, calculated_state.attributes) + return self._call_on_remove_callbacks + + self._preview_callback = preview_callback + + await self._async_stats_sensor_startup() + return self._call_on_remove_callbacks + + def _async_handle_new_state( + self, + reported_state: State | None, ) -> None: """Handle the sensor state changes.""" - if (new_state := event.data["new_state"]) is None: + if (new_state := reported_state) is None: return self._add_state_to_queue(new_state) self._async_purge_update_and_schedule() - self.async_write_ha_state() + + if self._preview_callback: + calculated_state = self._async_calculate_state() + self._preview_callback(calculated_state.state, calculated_state.attributes) + # only write state to the state machine if we are not in preview mode + if not self._preview_callback: + self.async_write_ha_state() @callback - def _async_stats_sensor_startup(self, _: HomeAssistant) -> None: - """Add listener and get recorded state.""" + def _async_stats_sensor_state_change_listener( + self, + event: Event[EventStateChangedData], + ) -> None: + self._async_handle_new_state(event.data["new_state"]) + + @callback + def _async_stats_sensor_state_report_listener( + self, + event: Event[EventStateReportedData], + ) -> None: + self._async_handle_new_state(event.data["new_state"]) + + async def _async_stats_sensor_startup(self) -> None: + """Add listener and get recorded state. + + Historical data needs to be loaded from the database first before we + can start accepting new incoming changes. + This is needed to ensure that the buffer is properly sorted by time. + """ _LOGGER.debug("Startup for %s", self.entity_id) + if "recorder" in self.hass.config.components: + await self._initialize_from_database() self.async_on_remove( async_track_state_change_event( self.hass, [self._source_entity_id], - self._async_stats_sensor_state_listener, + self._async_stats_sensor_state_change_listener, + ) + ) + self.async_on_remove( + async_track_state_report_event( + self.hass, + [self._source_entity_id], + self._async_stats_sensor_state_report_listener, ) ) - if "recorder" in self.hass.config.components: - self.hass.async_create_task(self._initialize_from_database()) async def async_added_to_hass(self) -> None: """Register callbacks.""" - self.async_on_remove( - async_at_start(self.hass, self._async_stats_sensor_startup) - ) + await self._async_stats_sensor_startup() def _add_state_to_queue(self, new_state: State) -> None: """Add the state to the queue.""" - self._available = new_state.state != STATE_UNAVAILABLE + + # Attention: it is not safe to store the new_state object, + # since the "last_reported" value will be updated over time. + # Here we make a copy the current value, which is okay. + self._attr_available = new_state.state != STATE_UNAVAILABLE if new_state.state == STATE_UNAVAILABLE: - self.attributes[STAT_SOURCE_VALUE_VALID] = None + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = None return if new_state.state in (STATE_UNKNOWN, None, ""): - self.attributes[STAT_SOURCE_VALUE_VALID] = False + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False return try: @@ -420,10 +800,10 @@ class StatisticsSensor(SensorEntity): self.states.append(new_state.state == "on") else: self.states.append(float(new_state.state)) - self.ages.append(new_state.last_updated) - self.attributes[STAT_SOURCE_VALUE_VALID] = True + self.ages.append(new_state.last_reported_timestamp) + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = True except ValueError: - self.attributes[STAT_SOURCE_VALUE_VALID] = False + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False _LOGGER.error( "%s: parsing error. Expected number or binary state, but received '%s'", self.entity_id, @@ -431,136 +811,157 @@ class StatisticsSensor(SensorEntity): ) return - self._unit_of_measurement = self._derive_unit_of_measurement(new_state) + self._calculate_state_attributes(new_state) + + def _calculate_state_attributes(self, new_state: State) -> None: + """Set the entity state attributes.""" + + self._attr_native_unit_of_measurement = self._calculate_unit_of_measurement( + new_state + ) + self._attr_device_class = self._calculate_device_class( + new_state, self._attr_native_unit_of_measurement + ) + self._attr_state_class = self._calculate_state_class(new_state) + + def _calculate_unit_of_measurement(self, new_state: State) -> str | None: + """Return the calculated unit of measurement. + + The unit of measurement is that of the source sensor, adjusted based on the + state characteristics. + """ - def _derive_unit_of_measurement(self, new_state: State) -> str | None: base_unit: str | None = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - unit: str | None - if self.is_binary and self._state_characteristic in STATS_BINARY_PERCENTAGE: + unit: str | None = None + stat_type = self._state_characteristic + if self.is_binary and stat_type in STATS_BINARY_PERCENTAGE: unit = PERCENTAGE elif not base_unit: unit = None - elif self._state_characteristic in STATS_NUMERIC_RETAIN_UNIT: + elif stat_type in STATS_NUMERIC_RETAIN_UNIT: unit = base_unit - elif ( - self._state_characteristic in STATS_NOT_A_NUMBER - or self._state_characteristic - in ( - STAT_COUNT, - STAT_COUNT_BINARY_ON, - STAT_COUNT_BINARY_OFF, - ) + elif stat_type in STATS_NOT_A_NUMBER or stat_type in ( + STAT_COUNT, + STAT_COUNT_BINARY_ON, + STAT_COUNT_BINARY_OFF, ): unit = None - elif self._state_characteristic == STAT_VARIANCE: + elif stat_type == STAT_VARIANCE: unit = base_unit + "²" - elif self._state_characteristic == STAT_CHANGE_SAMPLE: + elif stat_type == STAT_CHANGE_SAMPLE: unit = base_unit + "/sample" - elif self._state_characteristic == STAT_CHANGE_SECOND: + elif stat_type == STAT_CHANGE_SECOND: unit = base_unit + "/s" + return unit - @property - def device_class(self) -> SensorDeviceClass | None: - """Return the class of this device.""" - if self._state_characteristic in STATS_DATETIME: - return SensorDeviceClass.TIMESTAMP - if self._state_characteristic in STATS_NUMERIC_RETAIN_UNIT: - source_state = self.hass.states.get(self._source_entity_id) - if source_state is None: - return None - source_device_class = source_state.attributes.get(ATTR_DEVICE_CLASS) - if source_device_class is None: - return None - sensor_device_class = try_parse_enum(SensorDeviceClass, source_device_class) - if sensor_device_class is None: - return None - sensor_state_classes = DEVICE_CLASS_STATE_CLASSES.get( - sensor_device_class, set() - ) - if SensorStateClass.MEASUREMENT not in sensor_state_classes: - return None - return sensor_device_class - return None + def _calculate_device_class( + self, new_state: State, unit: str | None + ) -> SensorDeviceClass | None: + """Return the calculated device class. - @property - def state_class(self) -> SensorStateClass | None: - """Return the state class of this entity.""" + The device class is calculated based on the state characteristics, + the source device class and the unit of measurement is + in the device class units list. + """ + + device_class: SensorDeviceClass | None = None + stat_type = self._state_characteristic + if stat_type in STATS_DATETIME: + return SensorDeviceClass.TIMESTAMP + if stat_type in STATS_NUMERIC_RETAIN_UNIT: + device_class = new_state.attributes.get(ATTR_DEVICE_CLASS) + if device_class is None: + return None + if ( + sensor_device_class := try_parse_enum(SensorDeviceClass, device_class) + ) is None: + return None + if ( + sensor_device_class + and ( + sensor_state_classes := DEVICE_CLASS_STATE_CLASSES.get( + sensor_device_class + ) + ) + and sensor_state_classes + and SensorStateClass.MEASUREMENT not in sensor_state_classes + ): + return None + if device_class not in DEVICE_CLASS_UNITS: + return None + if ( + device_class in DEVICE_CLASS_UNITS + and unit not in DEVICE_CLASS_UNITS[device_class] + ): + return None + + return device_class + + def _calculate_state_class(self, new_state: State) -> SensorStateClass | None: + """Return the calculated state class. + + Will be None if the characteristics is not numerical, otherwise + SensorStateClass.MEASUREMENT. + """ if self._state_characteristic in STATS_NOT_A_NUMBER: return None return SensorStateClass.MEASUREMENT - @property - def native_value(self) -> StateType | datetime: - """Return the state of the sensor.""" - return self._value - - @property - def native_unit_of_measurement(self) -> str | None: - """Return the unit the value is expressed in.""" - return self._unit_of_measurement - - @property - def available(self) -> bool: - """Return the availability of the sensor linked to the source sensor.""" - return self._available - - @property - def extra_state_attributes(self) -> dict[str, StateType] | None: - """Return the state attributes of the sensor.""" - return { - key: value for key, value in self.attributes.items() if value is not None - } - - def _purge_old_states(self, max_age: timedelta) -> None: + def _purge_old_states(self, max_age: float) -> None: """Remove states which are older than a given age.""" - now = dt_util.utcnow() + now_timestamp = time.time() + debug = _LOGGER.isEnabledFor(logging.DEBUG) - _LOGGER.debug( - "%s: purging records older then %s(%s)(keep_last_sample: %s)", - self.entity_id, - dt_util.as_local(now - max_age), - self._samples_max_age, - self.samples_keep_last, - ) + if debug: + _LOGGER.debug( + "%s: purging records older then %s(%s)(keep_last_sample: %s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(now_timestamp - max_age)), + self._samples_max_age, + self.samples_keep_last, + ) - while self.ages and (now - self.ages[0]) > max_age: + while self.ages and (now_timestamp - self.ages[0]) > max_age: if self.samples_keep_last and len(self.ages) == 1: # Under normal circumstance this will not be executed, as a purge will not # be scheduled for the last value if samples_keep_last is enabled. # If this happens to be called outside normal scheduling logic or a # source sensor update, this ensures the last value is preserved. - _LOGGER.debug( - "%s: preserving expired record with datetime %s(%s)", - self.entity_id, - dt_util.as_local(self.ages[0]), - (now - self.ages[0]), - ) + if debug: + _LOGGER.debug( + "%s: preserving expired record with datetime %s(%s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(self.ages[0])), + dt_util.utc_from_timestamp(now_timestamp - self.ages[0]), + ) break - _LOGGER.debug( - "%s: purging record with datetime %s(%s)", - self.entity_id, - dt_util.as_local(self.ages[0]), - (now - self.ages[0]), - ) + if debug: + _LOGGER.debug( + "%s: purging record with datetime %s(%s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(self.ages[0])), + dt_util.utc_from_timestamp(now_timestamp - self.ages[0]), + ) self.ages.popleft() self.states.popleft() @callback - def _async_next_to_purge_timestamp(self) -> datetime | None: + def _async_next_to_purge_timestamp(self) -> float | None: """Find the timestamp when the next purge would occur.""" if self.ages and self._samples_max_age: if self.samples_keep_last and len(self.ages) == 1: # Preserve the most recent entry if it is the only value. # Do not schedule another purge. When a new source # value is inserted it will restart purge cycle. - _LOGGER.debug( - "%s: skipping purge cycle for last record with datetime %s(%s)", - self.entity_id, - dt_util.as_local(self.ages[0]), - (dt_util.utcnow() - self.ages[0]), - ) + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "%s: skipping purge cycle for last record with datetime %s(%s)", + self.entity_id, + dt_util.as_local(dt_util.utc_from_timestamp(self.ages[0])), + (dt_util.utcnow() - dt_util.utc_from_timestamp(self.ages[0])), + ) return None # Take the oldest entry from the ages list and add the configured max_age. # If executed after purging old states, the result is the next timestamp @@ -578,17 +979,24 @@ class StatisticsSensor(SensorEntity): if self._samples_max_age is not None: self._purge_old_states(self._samples_max_age) - self._update_attributes() + self._update_extra_state_attributes() self._update_value() # If max_age is set, ensure to update again after the defined interval. # By basing updates off the timestamps of sampled data we avoid updating # when none of the observed entities change. if timestamp := self._async_next_to_purge_timestamp(): - _LOGGER.debug("%s: scheduling update at %s", self.entity_id, timestamp) + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "%s: scheduling update at %s", + self.entity_id, + dt_util.utc_from_timestamp(timestamp), + ) self._async_cancel_update_listener() self._update_listener = async_track_point_in_utc_time( - self.hass, self._async_scheduled_update, timestamp + self.hass, + self._async_scheduled_update, + dt_util.utc_from_timestamp(timestamp), ) @callback @@ -604,15 +1012,19 @@ class StatisticsSensor(SensorEntity): _LOGGER.debug("%s: executing scheduled update", self.entity_id) self._async_cancel_update_listener() self._async_purge_update_and_schedule() - self.async_write_ha_state() + # only write state to the state machine if we are not in preview mode + if not self._preview_callback: + self.async_write_ha_state() def _fetch_states_from_database(self) -> list[State]: """Fetch the states from the database.""" _LOGGER.debug("%s: initializing values from the database", self.entity_id) lower_entity_id = self._source_entity_id.lower() - if self._samples_max_age is not None: + if (max_age := self._samples_max_age) is not None: start_date = ( - dt_util.utcnow() - self._samples_max_age - timedelta(microseconds=1) + dt_util.utcnow() + - timedelta(seconds=max_age) + - timedelta(microseconds=1) ) _LOGGER.debug( "%s: retrieve records not older then %s", @@ -646,27 +1058,32 @@ class StatisticsSensor(SensorEntity): ): for state in reversed(states): self._add_state_to_queue(state) - + self._calculate_state_attributes(state) self._async_purge_update_and_schedule() - self.async_write_ha_state() + + # only write state to the state machine if we are not in preview mode + if self._preview_callback: + calculated_state = self._async_calculate_state() + self._preview_callback(calculated_state.state, calculated_state.attributes) + else: + self.async_write_ha_state() _LOGGER.debug("%s: initializing from database completed", self.entity_id) - def _update_attributes(self) -> None: + def _update_extra_state_attributes(self) -> None: """Calculate and update the various attributes.""" if self._samples_max_buffer_size is not None: - self.attributes[STAT_BUFFER_USAGE_RATIO] = round( + self._attr_extra_state_attributes[STAT_BUFFER_USAGE_RATIO] = round( len(self.states) / self._samples_max_buffer_size, 2 ) - if self._samples_max_age is not None: + if (max_age := self._samples_max_age) is not None: if len(self.states) >= 1: - self.attributes[STAT_AGE_COVERAGE_RATIO] = round( - (self.ages[-1] - self.ages[0]).total_seconds() - / self._samples_max_age.total_seconds(), + self._attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = round( + (self.ages[-1] - self.ages[0]) / max_age, 2, ) else: - self.attributes[STAT_AGE_COVERAGE_RATIO] = None + self._attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = 0 def _update_value(self) -> None: """Front to call the right statistical characteristics functions. @@ -674,215 +1091,13 @@ class StatisticsSensor(SensorEntity): One of the _stat_*() functions is represented by self._state_characteristic_fn(). """ - value = self._state_characteristic_fn() - + value = self._state_characteristic_fn(self.states, self.ages, self._percentile) + _LOGGER.debug( + "Updating value: states: %s, ages: %s => %s", self.states, self.ages, value + ) if self._state_characteristic not in STATS_NOT_A_NUMBER: with contextlib.suppress(TypeError): value = round(cast(float, value), self._precision) if self._precision == 0: value = int(value) - self._value = value - - def _callable_characteristic_fn( - self, characteristic: str - ) -> Callable[[], StateType | datetime]: - """Return the function callable of one characteristic function.""" - function: Callable[[], StateType | datetime] = getattr( - self, - f"_stat_binary_{characteristic}" - if self.is_binary - else f"_stat_{characteristic}", - ) - return function - - # Statistics for numeric sensor - - def _stat_average_linear(self) -> StateType: - if len(self.states) >= 2: - area: float = 0 - for i in range(1, len(self.states)): - area += ( - 0.5 - * (self.states[i] + self.states[i - 1]) - * (self.ages[i] - self.ages[i - 1]).total_seconds() - ) - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - return area / age_range_seconds - return None - - def _stat_average_step(self) -> StateType: - if len(self.states) >= 2: - area: float = 0 - for i in range(1, len(self.states)): - area += ( - self.states[i - 1] - * (self.ages[i] - self.ages[i - 1]).total_seconds() - ) - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - return area / age_range_seconds - return None - - def _stat_average_timeless(self) -> StateType: - return self._stat_mean() - - def _stat_change(self) -> StateType: - if len(self.states) > 0: - return self.states[-1] - self.states[0] - return None - - def _stat_change_sample(self) -> StateType: - if len(self.states) > 1: - return (self.states[-1] - self.states[0]) / (len(self.states) - 1) - return None - - def _stat_change_second(self) -> StateType: - if len(self.states) > 1: - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - if age_range_seconds > 0: - return (self.states[-1] - self.states[0]) / age_range_seconds - return None - - def _stat_count(self) -> StateType: - return len(self.states) - - def _stat_datetime_newest(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[-1] - return None - - def _stat_datetime_oldest(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[0] - return None - - def _stat_datetime_value_max(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[self.states.index(max(self.states))] - return None - - def _stat_datetime_value_min(self) -> datetime | None: - if len(self.states) > 0: - return self.ages[self.states.index(min(self.states))] - return None - - def _stat_distance_95_percent_of_values(self) -> StateType: - if len(self.states) >= 2: - return 2 * 1.96 * cast(float, self._stat_standard_deviation()) - return None - - def _stat_distance_99_percent_of_values(self) -> StateType: - if len(self.states) >= 2: - return 2 * 2.58 * cast(float, self._stat_standard_deviation()) - return None - - def _stat_distance_absolute(self) -> StateType: - if len(self.states) > 0: - return max(self.states) - min(self.states) - return None - - def _stat_mean(self) -> StateType: - if len(self.states) > 0: - return statistics.mean(self.states) - return None - - def _stat_mean_circular(self) -> StateType: - if len(self.states) > 0: - sin_sum = sum(math.sin(math.radians(x)) for x in self.states) - cos_sum = sum(math.cos(math.radians(x)) for x in self.states) - return (math.degrees(math.atan2(sin_sum, cos_sum)) + 360) % 360 - return None - - def _stat_median(self) -> StateType: - if len(self.states) > 0: - return statistics.median(self.states) - return None - - def _stat_noisiness(self) -> StateType: - if len(self.states) >= 2: - return cast(float, self._stat_sum_differences()) / (len(self.states) - 1) - return None - - def _stat_percentile(self) -> StateType: - if len(self.states) >= 2: - percentiles = statistics.quantiles(self.states, n=100, method="exclusive") - return percentiles[self._percentile - 1] - return None - - def _stat_standard_deviation(self) -> StateType: - if len(self.states) >= 2: - return statistics.stdev(self.states) - return None - - def _stat_sum(self) -> StateType: - if len(self.states) > 0: - return sum(self.states) - return None - - def _stat_sum_differences(self) -> StateType: - if len(self.states) >= 2: - return sum( - abs(j - i) - for i, j in zip(list(self.states), list(self.states)[1:], strict=False) - ) - return None - - def _stat_sum_differences_nonnegative(self) -> StateType: - if len(self.states) >= 2: - return sum( - (j - i if j >= i else j - 0) - for i, j in zip(list(self.states), list(self.states)[1:], strict=False) - ) - return None - - def _stat_total(self) -> StateType: - return self._stat_sum() - - def _stat_value_max(self) -> StateType: - if len(self.states) > 0: - return max(self.states) - return None - - def _stat_value_min(self) -> StateType: - if len(self.states) > 0: - return min(self.states) - return None - - def _stat_variance(self) -> StateType: - if len(self.states) >= 2: - return statistics.variance(self.states) - return None - - # Statistics for binary sensor - - def _stat_binary_average_step(self) -> StateType: - if len(self.states) >= 2: - on_seconds: float = 0 - for i in range(1, len(self.states)): - if self.states[i - 1] is True: - on_seconds += (self.ages[i] - self.ages[i - 1]).total_seconds() - age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds() - return 100 / age_range_seconds * on_seconds - return None - - def _stat_binary_average_timeless(self) -> StateType: - return self._stat_binary_mean() - - def _stat_binary_count(self) -> StateType: - return len(self.states) - - def _stat_binary_count_on(self) -> StateType: - return self.states.count(True) - - def _stat_binary_count_off(self) -> StateType: - return self.states.count(False) - - def _stat_binary_datetime_newest(self) -> datetime | None: - return self._stat_datetime_newest() - - def _stat_binary_datetime_oldest(self) -> datetime | None: - return self._stat_datetime_oldest() - - def _stat_binary_mean(self) -> StateType: - if len(self.states) > 0: - return 100.0 / len(self.states) * self.states.count(True) - return None + self._attr_native_value = value diff --git a/homeassistant/components/statistics/strings.json b/homeassistant/components/statistics/strings.json index 5f32b203bfd..91aead261ff 100644 --- a/homeassistant/components/statistics/strings.json +++ b/homeassistant/components/statistics/strings.json @@ -1,4 +1,5 @@ { + "title": "Statistics", "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" @@ -9,7 +10,7 @@ }, "step": { "user": { - "description": "Add a statistics sensor", + "description": "Create a statistics sensor", "data": { "name": "[%key:common::config_flow::data::name%]", "entity_id": "Entity" @@ -22,10 +23,10 @@ "state_characteristic": { "description": "Read the documention for further details on available options and how to use them.", "data": { - "state_characteristic": "State_characteristic" + "state_characteristic": "Statistic characteristic" }, "data_description": { - "state_characteristic": "The characteristic that should be used as the state of the statistics sensor." + "state_characteristic": "The statistic characteristic that should be used as the state of the sensor." } }, "options": { diff --git a/homeassistant/components/statsd/__init__.py b/homeassistant/components/statsd/__init__.py index efe1c818025..50b74b20028 100644 --- a/homeassistant/components/statsd/__init__.py +++ b/homeassistant/components/statsd/__init__.py @@ -80,7 +80,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: # Send attribute values for key, value in states.items(): if isinstance(value, (float, int)): - stat = "{}.{}".format(state.entity_id, key.replace(" ", "_")) + stat = f"{state.entity_id}.{key.replace(' ', '_')}" statsd_client.gauge(stat, value, sample_rate) elif isinstance(_state, (float, int)): diff --git a/homeassistant/components/statsd/manifest.json b/homeassistant/components/statsd/manifest.json index 73296a23dd9..4f0ea93eb98 100644 --- a/homeassistant/components/statsd/manifest.json +++ b/homeassistant/components/statsd/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/statsd", "iot_class": "local_push", "loggers": ["statsd"], + "quality_scale": "legacy", "requirements": ["statsd==3.2.1"] } diff --git a/homeassistant/components/steam_online/config_flow.py b/homeassistant/components/steam_online/config_flow.py index 4b99bf7738d..69009fca8c4 100644 --- a/homeassistant/components/steam_online/config_flow.py +++ b/homeassistant/components/steam_online/config_flow.py @@ -36,15 +36,11 @@ def validate_input(user_input: dict[str, str]) -> dict[str, str | int]: class SteamFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Steam.""" - def __init__(self) -> None: - """Initialize the flow.""" - self.entry: SteamConfigEntry | None = None - @staticmethod @callback def async_get_options_flow( config_entry: SteamConfigEntry, - ) -> OptionsFlow: + ) -> SteamOptionsFlowHandler: """Get the options flow for this handler.""" return SteamOptionsFlowHandler(config_entry) @@ -53,8 +49,8 @@ class SteamFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} - if user_input is None and self.entry: - user_input = {CONF_ACCOUNT: self.entry.data[CONF_ACCOUNT]} + if user_input is None and self.source == SOURCE_REAUTH: + user_input = {CONF_ACCOUNT: self._get_reauth_entry().data[CONF_ACCOUNT]} elif user_input is not None: try: res = await self.hass.async_add_executor_job(validate_input, user_input) @@ -102,8 +98,6 @@ class SteamFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a reauthorization flow request.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -129,7 +123,6 @@ class SteamOptionsFlowHandler(OptionsFlow): def __init__(self, entry: SteamConfigEntry) -> None: """Initialize options flow.""" - self.entry = entry self.options = dict(entry.options) async def async_step_init( @@ -137,7 +130,7 @@ class SteamOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage Steam options.""" if user_input is not None: - await self.hass.config_entries.async_unload(self.entry.entry_id) + await self.hass.config_entries.async_unload(self.config_entry.entry_id) for _id in self.options[CONF_ACCOUNTS]: if _id not in user_input[CONF_ACCOUNTS] and ( entity_id := er.async_get(self.hass).async_get_entity_id( @@ -152,7 +145,7 @@ class SteamOptionsFlowHandler(OptionsFlow): if _id in user_input[CONF_ACCOUNTS] } } - await self.hass.config_entries.async_reload(self.entry.entry_id) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) return self.async_create_entry(title="", data=channel_data) error = None try: @@ -182,7 +175,9 @@ class SteamOptionsFlowHandler(OptionsFlow): """Get accounts.""" interface = steam.api.interface("ISteamUser") try: - friends = interface.GetFriendList(steamid=self.entry.data[CONF_ACCOUNT]) + friends = interface.GetFriendList( + steamid=self.config_entry.data[CONF_ACCOUNT] + ) _users_str = [user["steamid"] for user in friends["friendslist"]["friends"]] except steam.api.HTTPError: return [] diff --git a/homeassistant/components/steam_online/coordinator.py b/homeassistant/components/steam_online/coordinator.py index 6e7bdf4b91c..81a3bb0d898 100644 --- a/homeassistant/components/steam_online/coordinator.py +++ b/homeassistant/components/steam_online/coordinator.py @@ -60,9 +60,9 @@ class SteamDataUpdateCoordinator( for player in response["response"]["players"]["player"] if player["steamid"] in _ids } - for k in players: - data = self.player_interface.GetSteamLevel(steamid=players[k]["steamid"]) - players[k]["level"] = data["response"].get("player_level") + for value in players.values(): + data = self.player_interface.GetSteamLevel(steamid=value["steamid"]) + value["level"] = data["response"].get("player_level") return players async def _async_update_data(self) -> dict[str, dict[str, str | int]]: diff --git a/homeassistant/components/steamist/config_flow.py b/homeassistant/components/steamist/config_flow.py index b5cb6527fa3..f22eafc6afd 100644 --- a/homeassistant/components/steamist/config_flow.py +++ b/homeassistant/components/steamist/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import Any, Self from aiosteamist import Steamist from discovery30303 import Device30303, normalize_mac @@ -33,6 +33,8 @@ class SteamistConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + host: str | None = None + def __init__(self) -> None: """Initialize the config flow.""" self._discovered_devices: dict[str, Device30303] = {} @@ -78,10 +80,9 @@ class SteamistConfigFlow(ConfigFlow, domain=DOMAIN): ): self.hass.config_entries.async_schedule_reload(entry.entry_id) return self.async_abort(reason="already_configured") - self.context[CONF_HOST] = host - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == host: - return self.async_abort(reason="already_in_progress") + self.host = host + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") if not device.name: discovery = await async_discover_device(self.hass, device.ipaddress) if not discovery: @@ -92,6 +93,10 @@ class SteamistConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="not_steamist_device") return await self.async_step_discovery_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow.host == self.host + async def async_step_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/stiebel_eltron/climate.py b/homeassistant/components/stiebel_eltron/climate.py index 41015ac16a4..676f613f382 100644 --- a/homeassistant/components/stiebel_eltron/climate.py +++ b/homeassistant/components/stiebel_eltron/climate.py @@ -80,7 +80,6 @@ class StiebelEltron(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, name, ste_data): """Initialize the unit.""" diff --git a/homeassistant/components/stiebel_eltron/manifest.json b/homeassistant/components/stiebel_eltron/manifest.json index 6592851d641..9580cd4d4ca 100644 --- a/homeassistant/components/stiebel_eltron/manifest.json +++ b/homeassistant/components/stiebel_eltron/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/stiebel_eltron", "iot_class": "local_polling", "loggers": ["pymodbus", "pystiebeleltron"], + "quality_scale": "legacy", "requirements": ["pystiebeleltron==0.0.1.dev2"] } diff --git a/homeassistant/components/stookalert/__init__.py b/homeassistant/components/stookalert/__init__.py deleted file mode 100644 index 0ef9c7fa845..00000000000 --- a/homeassistant/components/stookalert/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -"""The Stookalert integration.""" - -from __future__ import annotations - -import stookalert - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .const import CONF_PROVINCE, DOMAIN - -PLATFORMS = [Platform.BINARY_SENSOR] - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Stookalert from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = stookalert.stookalert(entry.data[CONF_PROVINCE]) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload Stookalert config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - del hass.data[DOMAIN][entry.entry_id] - return unload_ok diff --git a/homeassistant/components/stookalert/binary_sensor.py b/homeassistant/components/stookalert/binary_sensor.py deleted file mode 100644 index a2fff52f2a3..00000000000 --- a/homeassistant/components/stookalert/binary_sensor.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Support for Stookalert Binary Sensor.""" - -from __future__ import annotations - -from datetime import timedelta - -import stookalert - -from homeassistant.components.binary_sensor import ( - BinarySensorDeviceClass, - BinarySensorEntity, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import CONF_PROVINCE, DOMAIN - -SCAN_INTERVAL = timedelta(minutes=60) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Stookalert binary sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([StookalertBinarySensor(client, entry)], update_before_add=True) - - -class StookalertBinarySensor(BinarySensorEntity): - """Defines a Stookalert binary sensor.""" - - _attr_attribution = "Data provided by rivm.nl" - _attr_device_class = BinarySensorDeviceClass.SAFETY - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, client: stookalert.stookalert, entry: ConfigEntry) -> None: - """Initialize a Stookalert device.""" - self._client = client - self._attr_unique_id = entry.unique_id - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{entry.entry_id}")}, - name=f"Stookalert {entry.data[CONF_PROVINCE]}", - manufacturer="RIVM", - model="Stookalert", - entry_type=DeviceEntryType.SERVICE, - configuration_url="https://www.rivm.nl/stookalert", - ) - - def update(self) -> None: - """Update the data from the Stookalert handler.""" - self._client.get_alerts() - self._attr_is_on = self._client.state == 1 diff --git a/homeassistant/components/stookalert/config_flow.py b/homeassistant/components/stookalert/config_flow.py deleted file mode 100644 index 0d3bc0c1761..00000000000 --- a/homeassistant/components/stookalert/config_flow.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Config flow to configure the Stookalert integration.""" - -from __future__ import annotations - -from typing import Any - -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult - -from .const import CONF_PROVINCE, DOMAIN, PROVINCES - - -class StookalertFlowHandler(ConfigFlow, domain=DOMAIN): - """Config flow for Stookalert.""" - - VERSION = 1 - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a flow initialized by the user.""" - if user_input is not None: - await self.async_set_unique_id(user_input[CONF_PROVINCE]) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=user_input[CONF_PROVINCE], data=user_input - ) - - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required(CONF_PROVINCE): vol.In(PROVINCES)}), - ) diff --git a/homeassistant/components/stookalert/const.py b/homeassistant/components/stookalert/const.py deleted file mode 100644 index 9896eea212a..00000000000 --- a/homeassistant/components/stookalert/const.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Constants for the Stookalert integration.""" - -import logging -from typing import Final - -DOMAIN: Final = "stookalert" -LOGGER = logging.getLogger(__package__) - -CONF_PROVINCE: Final = "province" - -PROVINCES: Final = ( - "Drenthe", - "Flevoland", - "Friesland", - "Gelderland", - "Groningen", - "Limburg", - "Noord-Brabant", - "Noord-Holland", - "Overijssel", - "Utrecht", - "Zeeland", - "Zuid-Holland", -) diff --git a/homeassistant/components/stookalert/manifest.json b/homeassistant/components/stookalert/manifest.json deleted file mode 100644 index 2bebc639720..00000000000 --- a/homeassistant/components/stookalert/manifest.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "domain": "stookalert", - "name": "RIVM Stookalert", - "codeowners": ["@fwestenberg", "@frenck"], - "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/stookalert", - "integration_type": "service", - "iot_class": "cloud_polling", - "requirements": ["stookalert==0.1.4"] -} diff --git a/homeassistant/components/stookalert/strings.json b/homeassistant/components/stookalert/strings.json deleted file mode 100644 index a05ae4e61e7..00000000000 --- a/homeassistant/components/stookalert/strings.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "config": { - "step": { - "user": { - "data": { - "province": "Province" - } - } - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" - } - } -} diff --git a/homeassistant/components/stookwijzer/__init__.py b/homeassistant/components/stookwijzer/__init__.py index a714e3bd368..d8b9561bde9 100644 --- a/homeassistant/components/stookwijzer/__init__.py +++ b/homeassistant/components/stookwijzer/__init__.py @@ -2,29 +2,89 @@ from __future__ import annotations +from typing import Any + from stookwijzer import Stookwijzer -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from .const import DOMAIN, LOGGER +from .coordinator import StookwijzerConfigEntry, StookwijzerCoordinator PLATFORMS = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: StookwijzerConfigEntry) -> bool: """Set up Stookwijzer from a config entry.""" - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = Stookwijzer( - entry.data[CONF_LOCATION][CONF_LATITUDE], - entry.data[CONF_LOCATION][CONF_LONGITUDE], - ) + await er.async_migrate_entries(hass, entry.entry_id, async_migrate_entity_entry) + + coordinator = StookwijzerCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: StookwijzerConfigEntry +) -> bool: """Unload Stookwijzer config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - del hass.data[DOMAIN][entry.entry_id] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry( + hass: HomeAssistant, entry: StookwijzerConfigEntry +) -> bool: + """Migrate old entry.""" + LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version == 1: + latitude, longitude = await Stookwijzer.async_transform_coordinates( + async_get_clientsession(hass), + entry.data[CONF_LOCATION][CONF_LATITUDE], + entry.data[CONF_LOCATION][CONF_LONGITUDE], + ) + + if not latitude or not longitude: + ir.async_create_issue( + hass, + DOMAIN, + "location_migration_failed", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="location_migration_failed", + translation_placeholders={ + "entry_title": entry.title, + }, + ) + return False + + hass.config_entries.async_update_entry( + entry, + version=2, + data={ + CONF_LATITUDE: latitude, + CONF_LONGITUDE: longitude, + }, + ) + + LOGGER.debug("Migration to version %s successful", entry.version) + + return True + + +@callback +def async_migrate_entity_entry(entity_entry: er.RegistryEntry) -> dict[str, Any] | None: + """Migrate Stookwijzer entity entries. + + - Migrates unique ID for the old Stookwijzer sensors to the new unique ID. + """ + if entity_entry.unique_id == entity_entry.config_entry_id: + return {"new_unique_id": f"{entity_entry.config_entry_id}_advice"} + + # No migration needed + return None diff --git a/homeassistant/components/stookwijzer/config_flow.py b/homeassistant/components/stookwijzer/config_flow.py index be53ce56390..32b4836763f 100644 --- a/homeassistant/components/stookwijzer/config_flow.py +++ b/homeassistant/components/stookwijzer/config_flow.py @@ -4,10 +4,12 @@ from __future__ import annotations from typing import Any +from stookwijzer import Stookwijzer import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import LocationSelector from .const import DOMAIN @@ -16,21 +18,29 @@ from .const import DOMAIN class StookwijzerFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for Stookwijzer.""" - VERSION = 1 + VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - + errors = {} if user_input is not None: - return self.async_create_entry( - title="Stookwijzer", - data=user_input, + latitude, longitude = await Stookwijzer.async_transform_coordinates( + async_get_clientsession(self.hass), + user_input[CONF_LOCATION][CONF_LATITUDE], + user_input[CONF_LOCATION][CONF_LONGITUDE], ) + if latitude and longitude: + return self.async_create_entry( + title="Stookwijzer", + data={CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude}, + ) + errors["base"] = "unknown" return self.async_show_form( step_id="user", + errors=errors, data_schema=vol.Schema( { vol.Required( diff --git a/homeassistant/components/stookwijzer/const.py b/homeassistant/components/stookwijzer/const.py index e8cb3d818e6..1b0be86d375 100644 --- a/homeassistant/components/stookwijzer/const.py +++ b/homeassistant/components/stookwijzer/const.py @@ -1,16 +1,7 @@ """Constants for the Stookwijzer integration.""" -from enum import StrEnum import logging from typing import Final DOMAIN: Final = "stookwijzer" LOGGER = logging.getLogger(__package__) - - -class StookwijzerState(StrEnum): - """Stookwijzer states for sensor entity.""" - - BLUE = "blauw" - ORANGE = "oranje" - RED = "rood" diff --git a/homeassistant/components/stookwijzer/coordinator.py b/homeassistant/components/stookwijzer/coordinator.py new file mode 100644 index 00000000000..23092bed66e --- /dev/null +++ b/homeassistant/components/stookwijzer/coordinator.py @@ -0,0 +1,44 @@ +"""Class representing a Stookwijzer update coordinator.""" + +from datetime import timedelta + +from stookwijzer import Stookwijzer + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +SCAN_INTERVAL = timedelta(minutes=60) + +type StookwijzerConfigEntry = ConfigEntry[StookwijzerCoordinator] + + +class StookwijzerCoordinator(DataUpdateCoordinator[None]): + """Stookwijzer update coordinator.""" + + def __init__(self, hass: HomeAssistant, entry: StookwijzerConfigEntry) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = Stookwijzer( + async_get_clientsession(hass), + entry.data[CONF_LATITUDE], + entry.data[CONF_LONGITUDE], + ) + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.client.async_update() + if self.client.advice is None: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="no_data_received", + ) diff --git a/homeassistant/components/stookwijzer/diagnostics.py b/homeassistant/components/stookwijzer/diagnostics.py index c7bf4fad14d..2849e0e976a 100644 --- a/homeassistant/components/stookwijzer/diagnostics.py +++ b/homeassistant/components/stookwijzer/diagnostics.py @@ -4,29 +4,18 @@ from __future__ import annotations from typing import Any -from stookwijzer import Stookwijzer - -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .coordinator import StookwijzerConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: StookwijzerConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - client: Stookwijzer = hass.data[DOMAIN][entry.entry_id] - - last_updated = None - if client.last_updated: - last_updated = client.last_updated.isoformat() - + client = entry.runtime_data.client return { - "state": client.state, - "last_updated": last_updated, - "lqi": client.lqi, - "windspeed": client.windspeed, - "weather": client.weather, - "concentrations": client.concentrations, + "advice": client.advice, + "air_quality_index": client.lki, + "windspeed_ms": client.windspeed_ms, } diff --git a/homeassistant/components/stookwijzer/manifest.json b/homeassistant/components/stookwijzer/manifest.json index dbf902b1e1e..3fe16fb3d33 100644 --- a/homeassistant/components/stookwijzer/manifest.json +++ b/homeassistant/components/stookwijzer/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/stookwijzer", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["stookwijzer==1.3.0"] + "requirements": ["stookwijzer==1.5.1"] } diff --git a/homeassistant/components/stookwijzer/quality_scale.yaml b/homeassistant/components/stookwijzer/quality_scale.yaml new file mode 100644 index 00000000000..20e64efaa92 --- /dev/null +++ b/homeassistant/components/stookwijzer/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + The integration doesn't provide any additional service actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration doesn't provide any additional service actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + The integration doesn't subscribe to any events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: todo + test-before-setup: done + unique-config-entry: todo + + # Silver + action-exceptions: + status: exempt + comment: | + This integration is read-only and doesn't provide any actions. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: | + This integration is read-only and doesn't provide any actions. Querying + the service for data is handled centrally using a data update coordinator. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require re-authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + The integration cannot be discovered, as it is an external service. + discovery: + status: exempt + comment: | + The integration cannot be discovered, as it is an external service. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration provides a single device entry for the service. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration provides a single device entry for the service. + # Platinum + async-dependency: done + inject-websession: done + strict-typing: + status: todo + comment: | + Requirement 'stookwijzer==1.5.1' appears untyped diff --git a/homeassistant/components/stookwijzer/sensor.py b/homeassistant/components/stookwijzer/sensor.py index b8f9a660598..2660ff2ddb2 100644 --- a/homeassistant/components/stookwijzer/sensor.py +++ b/homeassistant/components/stookwijzer/sensor.py @@ -2,65 +2,95 @@ from __future__ import annotations -from datetime import timedelta +from collections.abc import Callable +from dataclasses import dataclass from stookwijzer import Stookwijzer -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfSpeed from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, StookwijzerState +from .const import DOMAIN +from .coordinator import StookwijzerConfigEntry, StookwijzerCoordinator -SCAN_INTERVAL = timedelta(minutes=60) + +@dataclass(kw_only=True, frozen=True) +class StookwijzerSensorDescription(SensorEntityDescription): + """Class describing Stookwijzer sensor entities.""" + + value_fn: Callable[[Stookwijzer], int | float | str | None] + + +STOOKWIJZER_SENSORS = [ + StookwijzerSensorDescription( + key="windspeed", + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + suggested_unit_of_measurement=UnitOfSpeed.BEAUFORT, + device_class=SensorDeviceClass.WIND_SPEED, + suggested_display_precision=0, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda client: client.windspeed_ms, + ), + StookwijzerSensorDescription( + key="air_quality_index", + device_class=SensorDeviceClass.AQI, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda client: client.lki, + ), + StookwijzerSensorDescription( + key="advice", + translation_key="advice", + device_class=SensorDeviceClass.ENUM, + value_fn=lambda client: client.advice, + options=["code_yellow", "code_orange", "code_red"], + ), +] async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: StookwijzerConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Stookwijzer sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([StookwijzerSensor(client, entry)], update_before_add=True) + async_add_entities( + StookwijzerSensor(description, entry) for description in STOOKWIJZER_SENSORS + ) -class StookwijzerSensor(SensorEntity): +class StookwijzerSensor(CoordinatorEntity[StookwijzerCoordinator], SensorEntity): """Defines a Stookwijzer binary sensor.""" - _attr_attribution = "Data provided by stookwijzer.nu" - _attr_device_class = SensorDeviceClass.ENUM + entity_description: StookwijzerSensorDescription + _attr_attribution = "Data provided by atlasleefomgeving.nl" _attr_has_entity_name = True - _attr_name = None - _attr_translation_key = "stookwijzer" - def __init__(self, client: Stookwijzer, entry: ConfigEntry) -> None: + def __init__( + self, + description: StookwijzerSensorDescription, + entry: StookwijzerConfigEntry, + ) -> None: """Initialize a Stookwijzer device.""" - self._client = client - self._attr_options = [cls.value for cls in StookwijzerState] - self._attr_unique_id = entry.entry_id + super().__init__(entry.runtime_data) + self.entity_description = description + self._attr_unique_id = f"{entry.entry_id}_{description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{entry.entry_id}")}, - name="Stookwijzer", - manufacturer="stookwijzer.nu", + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Atlas Leefomgeving", entry_type=DeviceEntryType.SERVICE, - configuration_url="https://www.stookwijzer.nu", + configuration_url="https://www.atlasleefomgeving.nl/stookwijzer", ) - def update(self) -> None: - """Update the data from the Stookwijzer handler.""" - self._client.update() - @property - def available(self) -> bool: - """Return if entity is available.""" - return self._client.state is not None - - @property - def native_value(self) -> str | None: + def native_value(self) -> int | float | str | None: """Return the state of the device.""" - if self._client.state is None: - return None - return StookwijzerState(self._client.state).value + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/stookwijzer/strings.json b/homeassistant/components/stookwijzer/strings.json index 549673165ec..189af89b282 100644 --- a/homeassistant/components/stookwijzer/strings.json +++ b/homeassistant/components/stookwijzer/strings.json @@ -5,19 +5,37 @@ "description": "Select the location you want to recieve the Stookwijzer information for.", "data": { "location": "[%key:common::config_flow::data::location%]" + }, + "data_description": { + "location": "Use the map to set the location for Stookwijzer." } } + }, + "error": { + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "entity": { "sensor": { - "stookwijzer": { + "advice": { + "name": "Advice code", "state": { - "blauw": "Blue", - "oranje": "Orange", - "rood": "Red" + "code_yellow": "Yellow", + "code_orange": "Orange", + "code_red": "Red" } } } + }, + "issues": { + "location_migration_failed": { + "description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integrations uses.\n\nMake sure you are connected to the internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.", + "title": "Migration of your location failed" + } + }, + "exceptions": { + "no_data_received": { + "message": "No data received from Stookwijzer." + } } } diff --git a/homeassistant/components/stream/const.py b/homeassistant/components/stream/const.py index a2fa065e019..66455ffad1a 100644 --- a/homeassistant/components/stream/const.py +++ b/homeassistant/components/stream/const.py @@ -1,5 +1,9 @@ """Constants for Stream component.""" +from __future__ import annotations + +from typing import Final + DOMAIN = "stream" ATTR_ENDPOINTS = "endpoints" @@ -11,8 +15,8 @@ RECORDER_PROVIDER = "recorder" OUTPUT_FORMATS = [HLS_PROVIDER] -SEGMENT_CONTAINER_FORMAT = "mp4" # format for segments -RECORDER_CONTAINER_FORMAT = "mp4" # format for recorder output +SEGMENT_CONTAINER_FORMAT: Final = "mp4" # format for segments +RECORDER_CONTAINER_FORMAT: Final = "mp4" # format for recorder output AUDIO_CODECS = {"aac", "mp3"} FORMAT_CONTENT_TYPE = {HLS_PROVIDER: "application/vnd.apple.mpegurl"} diff --git a/homeassistant/components/stream/core.py b/homeassistant/components/stream/core.py index 68c08a4f072..4184b23b9a0 100644 --- a/homeassistant/components/stream/core.py +++ b/homeassistant/components/stream/core.py @@ -9,7 +9,7 @@ from dataclasses import dataclass, field import datetime from enum import IntEnum import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast from aiohttp import web import numpy as np @@ -27,7 +27,7 @@ from .const import ( ) if TYPE_CHECKING: - from av import CodecContext, Packet + from av import Packet, VideoCodecContext from homeassistant.components.camera import DynamicStreamSettings @@ -438,17 +438,17 @@ class KeyFrameConverter: """Initialize.""" # Keep import here so that we can import stream integration - # without installingreqs + # without installing reqs # pylint: disable-next=import-outside-toplevel from homeassistant.components.camera.img_util import TurboJPEGSingleton - self._packet: Packet = None + self._packet: Packet | None = None self._event: asyncio.Event = asyncio.Event() self._hass = hass self._image: bytes | None = None self._turbojpeg = TurboJPEGSingleton.instance() self._lock = asyncio.Lock() - self._codec_context: CodecContext | None = None + self._codec_context: VideoCodecContext | None = None self._stream_settings = stream_settings self._dynamic_stream_settings = dynamic_stream_settings @@ -460,7 +460,7 @@ class KeyFrameConverter: self._packet = packet self._hass.loop.call_soon_threadsafe(self._event.set) - def create_codec_context(self, codec_context: CodecContext) -> None: + def create_codec_context(self, codec_context: VideoCodecContext) -> None: """Create a codec context to be used for decoding the keyframes. This is run by the worker thread and will only be called once per worker. @@ -474,7 +474,9 @@ class KeyFrameConverter: # pylint: disable-next=import-outside-toplevel from av import CodecContext - self._codec_context = CodecContext.create(codec_context.name, "r") + self._codec_context = cast( + "VideoCodecContext", CodecContext.create(codec_context.name, "r") + ) self._codec_context.extradata = codec_context.extradata self._codec_context.skip_frame = "NONKEY" self._codec_context.thread_type = "NONE" @@ -506,9 +508,8 @@ class KeyFrameConverter: frames = self._codec_context.decode(None) break except EOFError: - _LOGGER.debug("Codec context needs flushing, attempting to reopen") - self._codec_context.close() - self._codec_context.open() + _LOGGER.debug("Codec context needs flushing") + self._codec_context.flush_buffers() else: _LOGGER.debug("Unable to decode keyframe") return diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index 37158aa5fe3..b9368565e2f 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.1", "ha-av==10.1.1", "numpy==1.26.0"] + "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.2.0"] } diff --git a/homeassistant/components/stream/recorder.py b/homeassistant/components/stream/recorder.py index 6dfc09891b7..a24440e6d19 100644 --- a/homeassistant/components/stream/recorder.py +++ b/homeassistant/components/stream/recorder.py @@ -9,6 +9,7 @@ import os from typing import TYPE_CHECKING import av +import av.container from homeassistant.core import HomeAssistant, callback @@ -105,24 +106,23 @@ class RecorderOutput(StreamOutput): # Create output on first segment if not output: + container_options: dict[str, str] = { + "video_track_timescale": str(int(1 / source_v.time_base)), # type: ignore[operator] + "movflags": "frag_keyframe+empty_moov", + "min_frag_duration": str(self.stream_settings.min_segment_duration), + } output = av.open( self.video_path + ".tmp", "w", format=RECORDER_CONTAINER_FORMAT, - container_options={ - "video_track_timescale": str(int(1 / source_v.time_base)), - "movflags": "frag_keyframe+empty_moov", - "min_frag_duration": str( - self.stream_settings.min_segment_duration - ), - }, + container_options=container_options, ) # Add output streams if necessary if not output_v: output_v = output.add_stream(template=source_v) context = output_v.codec_context - context.flags |= "GLOBAL_HEADER" + context.global_header = True if source_a and not output_a: output_a = output.add_stream(template=source_a) @@ -132,21 +132,23 @@ class RecorderOutput(StreamOutput): last_stream_id = segment.stream_id pts_adjuster["video"] = int( (running_duration - source.start_time) - / (av.time_base * source_v.time_base) + / (av.time_base * source_v.time_base) # type: ignore[operator] ) if source_a: pts_adjuster["audio"] = int( (running_duration - source.start_time) - / (av.time_base * source_a.time_base) + / (av.time_base * source_a.time_base) # type: ignore[operator] ) # Remux video for packet in source.demux(): - if packet.dts is None: + if packet.pts is None: continue - packet.pts += pts_adjuster[packet.stream.type] - packet.dts += pts_adjuster[packet.stream.type] - packet.stream = output_v if packet.stream.type == "video" else output_a + packet.pts += pts_adjuster[packet.stream.type] # type: ignore[operator] + packet.dts += pts_adjuster[packet.stream.type] # type: ignore[operator] + stream = output_v if packet.stream.type == "video" else output_a + assert stream + packet.stream = stream output.mux(packet) running_duration += source.duration - source.start_time @@ -169,7 +171,9 @@ class RecorderOutput(StreamOutput): os.remove(video_path + ".tmp") def finish_writing( - segments: deque[Segment], output: av.OutputContainer, video_path: str + segments: deque[Segment], + output: av.container.OutputContainer | None, + video_path: str, ) -> None: """Finish writing output.""" # Should only have 0 or 1 segments, but loop through just in case diff --git a/homeassistant/components/stream/worker.py b/homeassistant/components/stream/worker.py index 354cc476186..8c9bb1b8e9e 100644 --- a/homeassistant/components/stream/worker.py +++ b/homeassistant/components/stream/worker.py @@ -13,6 +13,9 @@ from threading import Event from typing import Any, Self, cast import av +import av.audio +import av.container +import av.stream from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -47,10 +50,10 @@ class StreamWorkerError(Exception): """An exception thrown while processing a stream.""" -def redact_av_error_string(err: av.AVError) -> str: +def redact_av_error_string(err: av.FFmpegError) -> str: """Return an error string with credentials redacted from the url.""" - parts = [str(err.type), err.strerror] - if err.filename is not None: + parts = [str(err.type), err.strerror] # type: ignore[attr-defined] + if err.filename: parts.append(redact_credentials(err.filename)) return ", ".join(parts) @@ -123,30 +126,31 @@ class StreamState: class StreamMuxer: """StreamMuxer re-packages video/audio packets for output.""" + _segment_start_dts: int + _memory_file: BytesIO + _av_output: av.container.OutputContainer + _output_video_stream: av.VideoStream + _output_audio_stream: av.audio.AudioStream | None + _segment: Segment | None + # the following 2 member variables are used for Part formation + _memory_file_pos: int + _part_start_dts: float + def __init__( self, hass: HomeAssistant, - video_stream: av.video.VideoStream, - audio_stream: av.audio.stream.AudioStream | None, - audio_bsf: av.BitStreamFilter | None, + video_stream: av.VideoStream, + audio_stream: av.audio.AudioStream | None, + audio_bsf: str | None, stream_state: StreamState, stream_settings: StreamSettings, ) -> None: """Initialize StreamMuxer.""" self._hass = hass - self._segment_start_dts: int = cast(int, None) - self._memory_file: BytesIO = cast(BytesIO, None) - self._av_output: av.container.OutputContainer = None - self._input_video_stream: av.video.VideoStream = video_stream - self._input_audio_stream: av.audio.stream.AudioStream | None = audio_stream + self._input_video_stream = video_stream + self._input_audio_stream = audio_stream self._audio_bsf = audio_bsf - self._audio_bsf_context: av.BitStreamFilterContext = None - self._output_video_stream: av.video.VideoStream = None - self._output_audio_stream: av.audio.stream.AudioStream | None = None - self._segment: Segment | None = None - # the following 3 member variables are used for Part formation - self._memory_file_pos: int = cast(int, None) - self._part_start_dts: int = cast(int, None) + self._audio_bsf_context: av.BitStreamFilterContext | None = None self._part_has_keyframe = False self._stream_settings = stream_settings self._stream_state = stream_state @@ -156,83 +160,83 @@ class StreamMuxer: self, memory_file: BytesIO, sequence: int, - input_vstream: av.video.VideoStream, - input_astream: av.audio.stream.AudioStream | None, + input_vstream: av.VideoStream, + input_astream: av.audio.AudioStream | None, ) -> tuple[ av.container.OutputContainer, - av.video.VideoStream, - av.audio.stream.AudioStream | None, + av.VideoStream, + av.audio.AudioStream | None, ]: """Make a new av OutputContainer and add output streams.""" + container_options: dict[str, str] = { + # Removed skip_sidx - see: + # https://github.com/home-assistant/core/pull/39970 + # "cmaf" flag replaces several of the movflags used, + # but too recent to use for now + "movflags": "frag_custom+empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", + # Sometimes the first segment begins with negative timestamps, + # and this setting just + # adjusts the timestamps in the output from that segment to start + # from 0. Helps from having to make some adjustments + # in test_durations + "avoid_negative_ts": "make_non_negative", + "fragment_index": str(sequence + 1), + "video_track_timescale": str(int(1 / input_vstream.time_base)), # type: ignore[operator] + # Only do extra fragmenting if we are using ll_hls + # Let ffmpeg do the work using frag_duration + # Fragment durations may exceed the 15% allowed variance but it seems ok + **( + { + "movflags": "empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", + # Create a fragment every TARGET_PART_DURATION. The data from + # each fragment is stored in a "Part" that can be combined with + # the data from all the other "Part"s, plus an init section, + # to reconstitute the data in a "Segment". + # + # The LL-HLS spec allows for a fragment's duration to be within + # the range [0.85x,1.0x] of the part target duration. We use the + # frag_duration option to tell ffmpeg to try to cut the + # fragments when they reach frag_duration. However, + # the resulting fragments can have variability in their + # durations and can end up being too short or too long. With a + # video track with no audio, the discrete nature of frames means + # that the frame at the end of a fragment will sometimes extend + # slightly beyond the desired frag_duration. + # + # If there are two tracks, as in the case of a video feed with + # audio, there is an added wrinkle as the fragment cut seems to + # be done on the first track that crosses the desired threshold, + # and cutting on the audio track may also result in a shorter + # video fragment than desired. + # + # Given this, our approach is to give ffmpeg a frag_duration + # somewhere in the middle of the range, hoping that the parts + # stay pretty well bounded, and we adjust the part durations + # a bit in the hls metadata so that everything "looks" ok. + "frag_duration": str( + int(self._stream_settings.part_target_duration * 9e5) + ), + } + if self._stream_settings.ll_hls + else {} + ), + } container = av.open( memory_file, mode="w", format=SEGMENT_CONTAINER_FORMAT, - container_options={ - # Removed skip_sidx - see: - # https://github.com/home-assistant/core/pull/39970 - # "cmaf" flag replaces several of the movflags used, - # but too recent to use for now - "movflags": "frag_custom+empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", - # Sometimes the first segment begins with negative timestamps, - # and this setting just - # adjusts the timestamps in the output from that segment to start - # from 0. Helps from having to make some adjustments - # in test_durations - "avoid_negative_ts": "make_non_negative", - "fragment_index": str(sequence + 1), - "video_track_timescale": str(int(1 / input_vstream.time_base)), - # Only do extra fragmenting if we are using ll_hls - # Let ffmpeg do the work using frag_duration - # Fragment durations may exceed the 15% allowed variance but it seems ok - **( - { - "movflags": "empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", - # Create a fragment every TARGET_PART_DURATION. The data from - # each fragment is stored in a "Part" that can be combined with - # the data from all the other "Part"s, plus an init section, - # to reconstitute the data in a "Segment". - # - # The LL-HLS spec allows for a fragment's duration to be within - # the range [0.85x,1.0x] of the part target duration. We use the - # frag_duration option to tell ffmpeg to try to cut the - # fragments when they reach frag_duration. However, - # the resulting fragments can have variability in their - # durations and can end up being too short or too long. With a - # video track with no audio, the discrete nature of frames means - # that the frame at the end of a fragment will sometimes extend - # slightly beyond the desired frag_duration. - # - # If there are two tracks, as in the case of a video feed with - # audio, there is an added wrinkle as the fragment cut seems to - # be done on the first track that crosses the desired threshold, - # and cutting on the audio track may also result in a shorter - # video fragment than desired. - # - # Given this, our approach is to give ffmpeg a frag_duration - # somewhere in the middle of the range, hoping that the parts - # stay pretty well bounded, and we adjust the part durations - # a bit in the hls metadata so that everything "looks" ok. - "frag_duration": str( - int(self._stream_settings.part_target_duration * 9e5) - ), - } - if self._stream_settings.ll_hls - else {} - ), - }, + container_options=container_options, ) output_vstream = container.add_stream(template=input_vstream) # Check if audio is requested output_astream = None if input_astream: if self._audio_bsf: - self._audio_bsf_context = self._audio_bsf.create() - self._audio_bsf_context.set_input_stream(input_astream) - output_astream = container.add_stream( - template=self._audio_bsf_context or input_astream - ) - return container, output_vstream, output_astream + self._audio_bsf_context = av.BitStreamFilterContext( + self._audio_bsf, input_astream + ) + output_astream = container.add_stream(template=input_astream) + return container, output_vstream, output_astream # type: ignore[return-value] def reset(self, video_dts: int) -> None: """Initialize a new stream segment.""" @@ -251,7 +255,7 @@ class StreamMuxer: input_astream=self._input_audio_stream, ) if self._output_video_stream.name == "hevc": - self._output_video_stream.codec_tag = "hvc1" + self._output_video_stream.codec_context.codec_tag = "hvc1" def mux_packet(self, packet: av.Packet) -> None: """Mux a packet to the appropriate output stream.""" @@ -273,11 +277,11 @@ class StreamMuxer: self._part_has_keyframe |= packet.is_keyframe elif packet.stream == self._input_audio_stream: + assert self._output_audio_stream if self._audio_bsf_context: - self._audio_bsf_context.send(packet) - while packet := self._audio_bsf_context.recv(): - packet.stream = self._output_audio_stream - self._av_output.mux(packet) + for audio_packet in self._audio_bsf_context.filter(packet): + audio_packet.stream = self._output_audio_stream + self._av_output.mux(audio_packet) return packet.stream = self._output_audio_stream self._av_output.mux(packet) @@ -367,12 +371,14 @@ class StreamMuxer: data=self._memory_file.read(), ), ( - segment_duration := float( - (adjusted_dts - self._segment_start_dts) * packet.time_base + ( + segment_duration := float( + (adjusted_dts - self._segment_start_dts) * packet.time_base + ) ) - ) - if last_part - else 0, + if last_part + else 0 + ), ) if last_part: # If we've written the last part, we can close the memory_file. @@ -393,7 +399,7 @@ class StreamMuxer: self._memory_file.close() -class PeekIterator(Iterator): +class PeekIterator(Iterator[av.Packet]): """An Iterator that may allow multiple passes. This may be consumed like a normal Iterator, however also supports a @@ -457,7 +463,7 @@ class TimestampValidator: """Validate the packet timestamp based on ordering within the stream.""" # Discard packets missing DTS. Terminate if too many are missing. if packet.dts is None: - if self._missing_dts >= MAX_MISSING_DTS: + if self._missing_dts >= MAX_MISSING_DTS: # type: ignore[unreachable] raise StreamWorkerError( f"No dts in {MAX_MISSING_DTS+1} consecutive packets" ) @@ -484,7 +490,7 @@ def is_keyframe(packet: av.Packet) -> Any: def get_audio_bitstream_filter( packets: Iterator[av.Packet], audio_stream: Any -) -> av.BitStreamFilterContext | None: +) -> str | None: """Return the aac_adtstoasc bitstream filter if ADTS AAC is detected.""" if not audio_stream: return None @@ -501,7 +507,7 @@ def get_audio_bitstream_filter( _LOGGER.debug( "ADTS AAC detected. Adding aac_adtstoaac bitstream filter" ) - return av.BitStreamFilter("aac_adtstoasc") + return "aac_adtstoasc" break return None @@ -522,7 +528,7 @@ def stream_worker( del pyav_options["stimeout"] try: container = av.open(source, options=pyav_options, timeout=SOURCE_TIMEOUT) - except av.AVError as err: + except av.FFmpegError as err: raise StreamWorkerError( f"Error opening stream ({redact_av_error_string(err)})" ) from err @@ -539,7 +545,7 @@ def stream_worker( audio_stream = None # Some audio streams do not have a profile and throw errors when remuxing if audio_stream and audio_stream.profile is None: - audio_stream = None + audio_stream = None # type: ignore[unreachable] # Disable ll-hls for hls inputs if container.format.name == "hls": for field in fields(StreamSettings): @@ -554,8 +560,8 @@ def stream_worker( stream_state.diagnostics.set_value("audio_codec", audio_stream.name) dts_validator = TimestampValidator( - int(1 / video_stream.time_base), - 1 / audio_stream.time_base if audio_stream else 1, + int(1 / video_stream.time_base), # type: ignore[operator] + int(1 / audio_stream.time_base) if audio_stream else 1, # type: ignore[operator] ) container_packets = PeekIterator( filter(dts_validator.is_valid, container.demux((video_stream, audio_stream))) @@ -596,7 +602,7 @@ def stream_worker( except StopIteration as ex: container.close() raise StreamEndedError("Stream ended; no additional packets") from ex - except av.AVError as ex: + except av.FFmpegError as ex: container.close() raise StreamWorkerError( f"Error demuxing stream while finding first packet ({redact_av_error_string(ex)})" @@ -623,7 +629,7 @@ def stream_worker( raise except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex - except av.AVError as ex: + except av.FFmpegError as ex: raise StreamWorkerError( f"Error demuxing stream ({redact_av_error_string(ex)})" ) from ex diff --git a/homeassistant/components/streamlabswater/icons.json b/homeassistant/components/streamlabswater/icons.json index aebe224b35e..0cc64fd24cb 100644 --- a/homeassistant/components/streamlabswater/icons.json +++ b/homeassistant/components/streamlabswater/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_away_mode": "mdi:home" + "set_away_mode": { + "service": "mdi:home" + } } } diff --git a/homeassistant/components/stt/__init__.py b/homeassistant/components/stt/__init__.py index f6c38c1e0b7..d3c85aba1e7 100644 --- a/homeassistant/components/stt/__init__.py +++ b/homeassistant/components/stt/__init__.py @@ -30,6 +30,7 @@ from homeassistant.loader import async_suggest_report_issue from homeassistant.util import dt as dt_util, language as language_util from .const import ( + DATA_COMPONENT, DATA_PROVIDERS, DOMAIN, AudioBitRates, @@ -72,11 +73,9 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @callback def async_default_engine(hass: HomeAssistant) -> str | None: """Return the domain or entity id of the default engine.""" - component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] - default_entity_id: str | None = None - for entity in component.entities: + for entity in hass.data[DATA_COMPONENT].entities: if entity.platform and entity.platform.platform_name == "cloud": return entity.entity_id @@ -91,9 +90,7 @@ def async_get_speech_to_text_entity( hass: HomeAssistant, entity_id: str ) -> SpeechToTextEntity | None: """Return stt entity.""" - component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] - - return component.get_entity(entity_id) + return hass.data[DATA_COMPONENT].get_entity(entity_id) @callback @@ -111,13 +108,11 @@ def async_get_speech_to_text_languages(hass: HomeAssistant) -> set[str]: """Return a set with the union of languages supported by stt engines.""" languages = set() - component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] - legacy_providers: dict[str, Provider] = hass.data[DATA_PROVIDERS] - for entity in component.entities: + for entity in hass.data[DATA_COMPONENT].entities: for language_tag in entity.supported_languages: languages.add(language_tag) - for engine in legacy_providers.values(): + for engine in hass.data[DATA_PROVIDERS].values(): for language_tag in engine.supported_languages: languages.add(language_tag) @@ -128,7 +123,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up STT.""" websocket_api.async_register_command(hass, websocket_list_engines) - component = hass.data[DOMAIN] = EntityComponent[SpeechToTextEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[SpeechToTextEntity]( _LOGGER, DOMAIN, hass ) @@ -150,14 +145,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class SpeechToTextEntity(RestoreEntity): @@ -426,15 +419,12 @@ def websocket_list_engines( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """List speech-to-text engines and, optionally, if they support a given language.""" - component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] - legacy_providers: dict[str, Provider] = hass.data[DATA_PROVIDERS] - country = msg.get("country") language = msg.get("language") providers = [] provider_info: dict[str, Any] - for entity in component.entities: + for entity in hass.data[DATA_COMPONENT].entities: provider_info = { "engine_id": entity.entity_id, "supported_languages": entity.supported_languages, @@ -445,9 +435,10 @@ def websocket_list_engines( ) providers.append(provider_info) - for engine_id, provider in legacy_providers.items(): + for engine_id, provider in hass.data[DATA_PROVIDERS].items(): provider_info = { "engine_id": engine_id, + "name": provider.name, "supported_languages": provider.supported_languages, } if language: diff --git a/homeassistant/components/stt/const.py b/homeassistant/components/stt/const.py index 2df5bea0316..1c4172cfc89 100644 --- a/homeassistant/components/stt/const.py +++ b/homeassistant/components/stt/const.py @@ -1,9 +1,21 @@ """STT constante.""" +from __future__ import annotations + from enum import Enum +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import SpeechToTextEntity + from .legacy import Provider DOMAIN = "stt" -DATA_PROVIDERS = f"{DOMAIN}_providers" +DATA_COMPONENT: HassKey[EntityComponent[SpeechToTextEntity]] = HassKey(DOMAIN) +DATA_PROVIDERS: HassKey[dict[str, Provider]] = HassKey(f"{DOMAIN}_providers") class AudioCodecs(str, Enum): diff --git a/homeassistant/components/stt/legacy.py b/homeassistant/components/stt/legacy.py index 7bb0d84c289..13144eae5b4 100644 --- a/homeassistant/components/stt/legacy.py +++ b/homeassistant/components/stt/legacy.py @@ -34,7 +34,8 @@ _LOGGER = logging.getLogger(__name__) @callback def async_default_provider(hass: HomeAssistant) -> str | None: """Return the domain of the default provider.""" - return next(iter(hass.data[DATA_PROVIDERS]), None) + providers = hass.data[DATA_PROVIDERS] + return next(iter(providers), None) @callback @@ -42,7 +43,7 @@ def async_get_provider( hass: HomeAssistant, domain: str | None = None ) -> Provider | None: """Return provider.""" - providers: dict[str, Provider] = hass.data[DATA_PROVIDERS] + providers = hass.data[DATA_PROVIDERS] if domain: return providers.get(domain) diff --git a/homeassistant/components/subaru/__init__.py b/homeassistant/components/subaru/__init__.py index db2ee7fdbbc..3762b16e58b 100644 --- a/homeassistant/components/subaru/__init__.py +++ b/homeassistant/components/subaru/__init__.py @@ -85,6 +85,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=COORDINATOR_NAME, update_method=async_update_data, update_interval=timedelta(seconds=FETCH_INTERVAL), diff --git a/homeassistant/components/subaru/config_flow.py b/homeassistant/components/subaru/config_flow.py index 3d96a89a14f..0ef4ed29941 100644 --- a/homeassistant/components/subaru/config_flow.py +++ b/homeassistant/components/subaru/config_flow.py @@ -106,7 +106,7 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def validate_login_creds(self, data): """Validate the user input allows us to connect. @@ -218,10 +218,6 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Subaru.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/subaru/device_tracker.py b/homeassistant/components/subaru/device_tracker.py index 5d25056312e..d406234c36e 100644 --- a/homeassistant/components/subaru/device_tracker.py +++ b/homeassistant/components/subaru/device_tracker.py @@ -6,7 +6,6 @@ from typing import Any from subarulink.const import LATITUDE, LONGITUDE, TIMESTAMP -from homeassistant.components.device_tracker import SourceType from homeassistant.components.device_tracker.config_entry import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -78,11 +77,6 @@ class SubaruDeviceTracker( """Return longitude value of the vehicle.""" return self.coordinator.data[self.vin][VEHICLE_STATUS].get(LONGITUDE) - @property - def source_type(self) -> SourceType: - """Return the source type of the vehicle.""" - return SourceType.GPS - @property def available(self) -> bool: """Return if entity is available.""" diff --git a/homeassistant/components/subaru/diagnostics.py b/homeassistant/components/subaru/diagnostics.py index 5d95cd0464b..eec5b01ab56 100644 --- a/homeassistant/components/subaru/diagnostics.py +++ b/homeassistant/components/subaru/diagnostics.py @@ -12,7 +12,7 @@ from subarulink.const import ( VEHICLE_NAME, ) -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE_ID, CONF_PASSWORD, CONF_PIN, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/subaru/icons.json b/homeassistant/components/subaru/icons.json index f6c3597c3c3..ca8648296c7 100644 --- a/homeassistant/components/subaru/icons.json +++ b/homeassistant/components/subaru/icons.json @@ -24,6 +24,8 @@ } }, "services": { - "unlock_specific_door": "mdi:lock-open-variant" + "unlock_specific_door": { + "service": "mdi:lock-open-variant" + } } } diff --git a/homeassistant/components/subaru/strings.json b/homeassistant/components/subaru/strings.json index 78625192e4a..00da729dccd 100644 --- a/homeassistant/components/subaru/strings.json +++ b/homeassistant/components/subaru/strings.json @@ -37,13 +37,13 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "incorrect_pin": "Incorrect PIN", "bad_pin_format": "PIN should be 4 digits", - "two_factor_request_failed": "Request for 2FA code failed, please try again", "bad_validation_code_format": "Validation code should be 6 digits", "incorrect_validation_code": "Incorrect validation code" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "two_factor_request_failed": "Request for 2FA code failed, please try again" } }, "options": { diff --git a/homeassistant/components/suez_water/__init__.py b/homeassistant/components/suez_water/__init__.py index f5b2880e011..cbaac912642 100644 --- a/homeassistant/components/suez_water/__init__.py +++ b/homeassistant/components/suez_water/__init__.py @@ -2,48 +2,27 @@ from __future__ import annotations -from pysuez import SuezClient -from pysuez.client import PySuezError - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady -from .const import CONF_COUNTER_ID, DOMAIN +from .coordinator import SuezWaterConfigEntry, SuezWaterCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SuezWaterConfigEntry) -> bool: """Set up Suez Water from a config entry.""" - def get_client() -> SuezClient: - try: - client = SuezClient( - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - entry.data[CONF_COUNTER_ID], - provider=None, - ) - if not client.check_credentials(): - raise ConfigEntryError - except PySuezError as ex: - raise ConfigEntryNotReady from ex - return client + coordinator = SuezWaterCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[ - entry.entry_id - ] = await hass.async_add_executor_job(get_client) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SuezWaterConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index 28b211dc808..b24dc1815ee 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -5,8 +5,7 @@ from __future__ import annotations import logging from typing import Any -from pysuez import SuezClient -from pysuez.client import PySuezError +from pysuez import PySuezError, SuezClient import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -21,27 +20,36 @@ STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_COUNTER_ID): str, + vol.Optional(CONF_COUNTER_ID): str, } ) -def validate_input(data: dict[str, Any]) -> None: +async def validate_input(data: dict[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ try: + counter_id = data.get(CONF_COUNTER_ID) client = SuezClient( data[CONF_USERNAME], data[CONF_PASSWORD], - data[CONF_COUNTER_ID], - provider=None, + counter_id, ) - if not client.check_credentials(): - raise InvalidAuth - except PySuezError as ex: - raise CannotConnect from ex + try: + if not await client.check_credentials(): + raise InvalidAuth + except PySuezError as ex: + raise CannotConnect from ex + + if counter_id is None: + try: + data[CONF_COUNTER_ID] = await client.find_counter() + except PySuezError as ex: + raise CounterNotFound from ex + finally: + await client.close_session() class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): @@ -58,11 +66,13 @@ class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() try: - await self.hass.async_add_executor_job(validate_input, user_input) + await validate_input(user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" + except CounterNotFound: + errors["base"] = "counter_not_found" except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -72,7 +82,10 @@ class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + description_placeholders={"tout_sur_mon_eau": "Tout sur mon Eau"}, ) @@ -82,3 +95,7 @@ class CannotConnect(HomeAssistantError): class InvalidAuth(HomeAssistantError): """Error to indicate there is invalid auth.""" + + +class CounterNotFound(HomeAssistantError): + """Error to indicate we cannot automatically found the counter id.""" diff --git a/homeassistant/components/suez_water/const.py b/homeassistant/components/suez_water/const.py index 7afc0d3ce3e..cecd779c22c 100644 --- a/homeassistant/components/suez_water/const.py +++ b/homeassistant/components/suez_water/const.py @@ -1,5 +1,9 @@ """Constants for the Suez Water integration.""" +from datetime import timedelta + DOMAIN = "suez_water" CONF_COUNTER_ID = "counter_id" + +DATA_REFRESH_INTERVAL = timedelta(hours=12) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py new file mode 100644 index 00000000000..72da68c0f5d --- /dev/null +++ b/homeassistant/components/suez_water/coordinator.py @@ -0,0 +1,91 @@ +"""Suez water update coordinator.""" + +from collections.abc import Mapping +from dataclasses import dataclass +from datetime import date +from typing import Any + +from pysuez import PySuezError, SuezClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import _LOGGER, HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_COUNTER_ID, DATA_REFRESH_INTERVAL, DOMAIN + + +@dataclass +class SuezWaterAggregatedAttributes: + """Class containing aggregated sensor extra attributes.""" + + this_month_consumption: dict[date, float] + previous_month_consumption: dict[date, float] + last_year_overall: dict[str, float] + this_year_overall: dict[str, float] + history: dict[date, float] + highest_monthly_consumption: float + + +@dataclass +class SuezWaterData: + """Class used to hold all fetch data from suez api.""" + + aggregated_value: float + aggregated_attr: Mapping[str, Any] + price: float + + +type SuezWaterConfigEntry = ConfigEntry[SuezWaterCoordinator] + + +class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): + """Suez water coordinator.""" + + _suez_client: SuezClient + config_entry: SuezWaterConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: SuezWaterConfigEntry) -> None: + """Initialize suez water coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=DATA_REFRESH_INTERVAL, + always_update=True, + config_entry=config_entry, + ) + + async def _async_setup(self) -> None: + self._suez_client = SuezClient( + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + counter_id=self.config_entry.data[CONF_COUNTER_ID], + ) + if not await self._suez_client.check_credentials(): + raise ConfigEntryError("Invalid credentials for suez water") + + async def _async_update_data(self) -> SuezWaterData: + """Fetch data from API endpoint.""" + try: + aggregated = await self._suez_client.fetch_aggregated_data() + data = SuezWaterData( + aggregated_value=aggregated.value, + aggregated_attr={ + "this_month_consumption": aggregated.current_month, + "previous_month_consumption": aggregated.previous_month, + "highest_monthly_consumption": aggregated.highest_monthly_consumption, + "last_year_overall": aggregated.previous_year, + "this_year_overall": aggregated.current_year, + "history": aggregated.history, + }, + price=(await self._suez_client.get_price()).price, + ) + except PySuezError as err: + _LOGGER.exception(err) + raise UpdateFailed( + f"Suez coordinator error communicating with API: {err}" + ) from err + _LOGGER.debug("Successfully fetched suez data") + return data diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 4503d7a1119..f39411e8afa 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -1,10 +1,11 @@ { "domain": "suez_water", "name": "Suez Water", - "codeowners": ["@ooii"], + "codeowners": ["@ooii", "@jb101010-2"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuez==0.2.0"] + "quality_scale": "bronze", + "requirements": ["pysuezV2==1.3.5"] } diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml new file mode 100644 index 00000000000..399c0b73a5a --- /dev/null +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: no subscription to api + dependency-transparency: done + action-setup: + status: exempt + comment: no service action + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: no service action + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: done + action-exceptions: + status: exempt + comment: no service action + reauthentication-flow: todo + parallel-updates: + status: exempt + comment: no service action and coordinator updates + test-coverage: done + integration-owner: done + docs-installation-parameters: + status: todo + comment: missing user/password + docs-configuration-parameters: + status: exempt + comment: no configuration option + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: todo + entity-disabled-by-default: todo + discovery: + status: exempt + comment: api only, nothing on local network to discover services + stale-devices: + status: exempt + comment: one device only + diagnostics: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: one device only + discovery-update-info: + status: exempt + comment: fixed api + repair-issues: + status: exempt + comment: No repair issues to be raised + docs-use-cases: done + docs-supported-devices: todo + docs-supported-functions: done + docs-data-update: + status: todo + comment: make it clearer + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: done diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 5b00cbf2dc4..e4e53dd7f6d 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -2,94 +2,97 @@ from __future__ import annotations -from datetime import timedelta -import logging +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from typing import Any -from pysuez import SuezClient -from pysuez.client import PySuezError +from pysuez.const import ATTRIBUTION -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfVolume +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import CURRENCY_EURO, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_COUNTER_ID, DOMAIN +from .coordinator import SuezWaterConfigEntry, SuezWaterCoordinator, SuezWaterData -_LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(hours=12) +@dataclass(frozen=True, kw_only=True) +class SuezWaterSensorEntityDescription(SensorEntityDescription): + """Describes Suez water sensor entity.""" + + value_fn: Callable[[SuezWaterData], float | str | None] + attr_fn: Callable[[SuezWaterData], Mapping[str, Any] | None] = lambda _: None + + +SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( + SuezWaterSensorEntityDescription( + key="water_usage_yesterday", + translation_key="water_usage_yesterday", + native_unit_of_measurement=UnitOfVolume.LITERS, + device_class=SensorDeviceClass.WATER, + value_fn=lambda suez_data: suez_data.aggregated_value, + attr_fn=lambda suez_data: suez_data.aggregated_attr, + ), + SuezWaterSensorEntityDescription( + key="water_price", + translation_key="water_price", + native_unit_of_measurement=CURRENCY_EURO, + device_class=SensorDeviceClass.MONETARY, + value_fn=lambda suez_data: suez_data.price, + ), +) async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SuezWaterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Suez Water sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([SuezSensor(client, entry.data[CONF_COUNTER_ID])], True) + coordinator = entry.runtime_data + counter_id = entry.data[CONF_COUNTER_ID] + + async_add_entities( + SuezWaterSensor(coordinator, counter_id, description) for description in SENSORS + ) -class SuezSensor(SensorEntity): - """Representation of a Sensor.""" +class SuezWaterSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): + """Representation of a Suez water sensor.""" _attr_has_entity_name = True - _attr_translation_key = "water_usage_yesterday" - _attr_native_unit_of_measurement = UnitOfVolume.LITERS - _attr_device_class = SensorDeviceClass.WATER + _attr_attribution = ATTRIBUTION + entity_description: SuezWaterSensorEntityDescription - def __init__(self, client: SuezClient, counter_id: int) -> None: - """Initialize the data object.""" - self.client = client - self._attr_extra_state_attributes = {} - self._attr_unique_id = f"{counter_id}_water_usage_yesterday" + def __init__( + self, + coordinator: SuezWaterCoordinator, + counter_id: int, + entity_description: SuezWaterSensorEntityDescription, + ) -> None: + """Initialize the suez water sensor entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{counter_id}_{entity_description.key}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, str(counter_id))}, entry_type=DeviceEntryType.SERVICE, manufacturer="Suez", ) + self.entity_description = entity_description - def _fetch_data(self) -> None: - """Fetch latest data from Suez.""" - try: - self.client.update() - # _state holds the volume of consumed water during previous day - self._attr_native_value = self.client.state - self._attr_available = True - self._attr_attribution = self.client.attributes["attribution"] + @property + def native_value(self) -> float | str | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) - self._attr_extra_state_attributes["this_month_consumption"] = {} - for item in self.client.attributes["thisMonthConsumption"]: - self._attr_extra_state_attributes["this_month_consumption"][item] = ( - self.client.attributes["thisMonthConsumption"][item] - ) - self._attr_extra_state_attributes["previous_month_consumption"] = {} - for item in self.client.attributes["previousMonthConsumption"]: - self._attr_extra_state_attributes["previous_month_consumption"][ - item - ] = self.client.attributes["previousMonthConsumption"][item] - self._attr_extra_state_attributes["highest_monthly_consumption"] = ( - self.client.attributes["highestMonthlyConsumption"] - ) - self._attr_extra_state_attributes["last_year_overall"] = ( - self.client.attributes["lastYearOverAll"] - ) - self._attr_extra_state_attributes["this_year_overall"] = ( - self.client.attributes["thisYearOverAll"] - ) - self._attr_extra_state_attributes["history"] = {} - for item in self.client.attributes["history"]: - self._attr_extra_state_attributes["history"][item] = ( - self.client.attributes["history"][item] - ) - - except PySuezError: - self._attr_available = False - _LOGGER.warning("Unable to fetch data") - - def update(self) -> None: - """Return the latest collected data from Suez.""" - self._fetch_data() - _LOGGER.debug("Suez data state is: %s", self.native_value) + @property + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return extra state of the sensor.""" + return self.entity_description.attr_fn(self.coordinator.data) diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index f9abd70fc19..be2d4849e76 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -5,14 +5,21 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "counter_id": "Counter id" - } + "counter_id": "Meter id" + }, + "data_description": { + "username": "Enter your login associated with your {tout_sur_mon_eau} account", + "password": "Enter your password associated with your {tout_sur_mon_eau} account", + "counter_id": "Enter your meter id (ex: 12345678). Should be found automatically during setup, if not see integration documentation for more information" + }, + "description": "Connect your suez water {tout_sur_mon_eau} account to retrieve your water consumption" } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]", + "counter_not_found": "Could not find meter id automatically" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" @@ -22,6 +29,9 @@ "sensor": { "water_usage_yesterday": { "name": "Water usage yesterday" + }, + "water_price": { + "name": "Water price" } } } diff --git a/homeassistant/components/sun/__init__.py b/homeassistant/components/sun/__init__.py index 8f6f3098ee8..f42f5450462 100644 --- a/homeassistant/components/sun/__init__.py +++ b/homeassistant/components/sun/__init__.py @@ -2,10 +2,13 @@ from __future__ import annotations +import logging + from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType # The sensor platform is pre-imported here to ensure @@ -23,6 +26,8 @@ from .entity import Sun, SunConfigEntry CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +_LOGGER = logging.getLogger(__name__) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track the state of the sun.""" @@ -42,7 +47,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool: """Set up from a config entry.""" - entry.runtime_data = sun = Sun(hass) + sun = Sun(hass) + component = EntityComponent[Sun](_LOGGER, DOMAIN, hass) + await component.async_add_entities([sun]) + entry.runtime_data = sun entry.async_on_unload(sun.remove_listeners) await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR]) return True @@ -53,6 +61,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool if unload_ok := await hass.config_entries.async_unload_platforms( entry, [Platform.SENSOR] ): - sun = entry.runtime_data - hass.states.async_remove(sun.entity_id) + await entry.runtime_data.async_remove() return unload_ok diff --git a/homeassistant/components/sun/config_flow.py b/homeassistant/components/sun/config_flow.py index 30b64c60b9f..16c465be8ad 100644 --- a/homeassistant/components/sun/config_flow.py +++ b/homeassistant/components/sun/config_flow.py @@ -23,6 +23,6 @@ class SunConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user") - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from configuration.yaml.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/sun/entity.py b/homeassistant/components/sun/entity.py index 10d328afde7..925845c8b4d 100644 --- a/homeassistant/components/sun/entity.py +++ b/homeassistant/components/sun/entity.py @@ -100,9 +100,6 @@ class Sun(Entity): _attr_name = "Sun" entity_id = ENTITY_ID - # This entity is legacy and does not have a platform. - # We can't fix this easily without breaking changes. - _no_platform_reported = True location: Location elevation: Elevation @@ -122,18 +119,16 @@ class Sun(Entity): self.hass = hass self.phase: str | None = None - # This is normally done by async_internal_added_to_hass which is not called - # for sun because sun has no platform - self._state_info = { - "unrecorded_attributes": self._Entity__combined_unrecorded_attributes # type: ignore[attr-defined] - } - self._config_listener: CALLBACK_TYPE | None = None self._update_events_listener: CALLBACK_TYPE | None = None self._update_sun_position_listener: CALLBACK_TYPE | None = None self._config_listener = self.hass.bus.async_listen( EVENT_CORE_CONFIG_UPDATE, self.update_location ) + + async def async_added_to_hass(self) -> None: + """Update after entity has been added.""" + await super().async_added_to_hass() self.update_location(initial=True) @callback diff --git a/homeassistant/components/sunweg/config_flow.py b/homeassistant/components/sunweg/config_flow.py index 2b5e49c2cb9..24df8c02f55 100644 --- a/homeassistant/components/sunweg/config_flow.py +++ b/homeassistant/components/sunweg/config_flow.py @@ -124,12 +124,6 @@ class SunWEGConfigFlow(ConfigFlow, domain=DOMAIN): if conf_result is not None: return conf_result - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - if entry is not None: - data: Mapping[str, Any] = self.data - self.hass.config_entries.async_update_entry(entry, data=data) - self.hass.async_create_task( - self.hass.config_entries.async_reload(entry.entry_id) - ) - - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self.data + ) diff --git a/homeassistant/components/sunweg/manifest.json b/homeassistant/components/sunweg/manifest.json index 998d3610735..3ebe9ef8cb4 100644 --- a/homeassistant/components/sunweg/manifest.json +++ b/homeassistant/components/sunweg/manifest.json @@ -3,7 +3,7 @@ "name": "Sun WEG", "codeowners": ["@rokam"], "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/sunweg/", + "documentation": "https://www.home-assistant.io/integrations/sunweg", "iot_class": "cloud_polling", "loggers": ["sunweg"], "requirements": ["sunweg==3.0.2"] diff --git a/homeassistant/components/sunweg/sensor.py b/homeassistant/components/sunweg/sensor/__init__.py similarity index 93% rename from homeassistant/components/sunweg/sensor.py rename to homeassistant/components/sunweg/sensor/__init__.py index 004dd7276a7..e582b5135d3 100644 --- a/homeassistant/components/sunweg/sensor.py +++ b/homeassistant/components/sunweg/sensor/__init__.py @@ -17,13 +17,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import SunWEGData -from .const import CONF_PLANT_ID, DEFAULT_PLANT_ID, DOMAIN, DeviceType -from .sensor_types.inverter import INVERTER_SENSOR_TYPES -from .sensor_types.phase import PHASE_SENSOR_TYPES -from .sensor_types.sensor_entity_description import SunWEGSensorEntityDescription -from .sensor_types.string import STRING_SENSOR_TYPES -from .sensor_types.total import TOTAL_SENSOR_TYPES +from .. import SunWEGData +from ..const import CONF_PLANT_ID, DEFAULT_PLANT_ID, DOMAIN, DeviceType +from .inverter import INVERTER_SENSOR_TYPES +from .phase import PHASE_SENSOR_TYPES +from .sensor_entity_description import SunWEGSensorEntityDescription +from .string import STRING_SENSOR_TYPES +from .total import TOTAL_SENSOR_TYPES _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/sunweg/sensor_types/inverter.py b/homeassistant/components/sunweg/sensor/inverter.py similarity index 100% rename from homeassistant/components/sunweg/sensor_types/inverter.py rename to homeassistant/components/sunweg/sensor/inverter.py diff --git a/homeassistant/components/sunweg/sensor_types/phase.py b/homeassistant/components/sunweg/sensor/phase.py similarity index 100% rename from homeassistant/components/sunweg/sensor_types/phase.py rename to homeassistant/components/sunweg/sensor/phase.py diff --git a/homeassistant/components/sunweg/sensor_types/sensor_entity_description.py b/homeassistant/components/sunweg/sensor/sensor_entity_description.py similarity index 100% rename from homeassistant/components/sunweg/sensor_types/sensor_entity_description.py rename to homeassistant/components/sunweg/sensor/sensor_entity_description.py diff --git a/homeassistant/components/sunweg/sensor_types/string.py b/homeassistant/components/sunweg/sensor/string.py similarity index 100% rename from homeassistant/components/sunweg/sensor_types/string.py rename to homeassistant/components/sunweg/sensor/string.py diff --git a/homeassistant/components/sunweg/sensor_types/total.py b/homeassistant/components/sunweg/sensor/total.py similarity index 100% rename from homeassistant/components/sunweg/sensor_types/total.py rename to homeassistant/components/sunweg/sensor/total.py diff --git a/homeassistant/components/sunweg/sensor_types/__init__.py b/homeassistant/components/sunweg/sensor_types/__init__.py deleted file mode 100644 index f370fddd16b..00000000000 --- a/homeassistant/components/sunweg/sensor_types/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Sensor types for supported Sun WEG systems.""" diff --git a/homeassistant/components/sunweg/strings.json b/homeassistant/components/sunweg/strings.json index 6033bc314bc..9ab7be053b1 100644 --- a/homeassistant/components/sunweg/strings.json +++ b/homeassistant/components/sunweg/strings.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_plants": "No plants have been found on this account", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, diff --git a/homeassistant/components/supervisord/manifest.json b/homeassistant/components/supervisord/manifest.json index 7586a435ed7..3cdbdd230aa 100644 --- a/homeassistant/components/supervisord/manifest.json +++ b/homeassistant/components/supervisord/manifest.json @@ -3,5 +3,6 @@ "name": "Supervisord", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/supervisord", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/supla/entity.py b/homeassistant/components/supla/entity.py index fa257e39a06..446d67d19d6 100644 --- a/homeassistant/components/supla/entity.py +++ b/homeassistant/components/supla/entity.py @@ -27,10 +27,9 @@ class SuplaEntity(CoordinatorEntity): @property def unique_id(self) -> str: """Return a unique ID.""" - return "supla-{}-{}".format( - self.channel_data["iodevice"]["gUIDString"].lower(), - self.channel_data["channelNumber"], - ) + uid = self.channel_data["iodevice"]["gUIDString"].lower() + channel_number = self.channel_data["channelNumber"] + return f"supla-{uid}-{channel_number}" @property def name(self) -> str | None: diff --git a/homeassistant/components/supla/manifest.json b/homeassistant/components/supla/manifest.json index 6927c92c6e1..803a321c0d6 100644 --- a/homeassistant/components/supla/manifest.json +++ b/homeassistant/components/supla/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/supla", "iot_class": "cloud_polling", "loggers": ["asyncpysupla"], + "quality_scale": "legacy", "requirements": ["asyncpysupla==0.0.5"] } diff --git a/homeassistant/components/surepetcare/config_flow.py b/homeassistant/components/surepetcare/config_flow.py index 6626b1d6dee..472d7ac10f0 100644 --- a/homeassistant/components/surepetcare/config_flow.py +++ b/homeassistant/components/surepetcare/config_flow.py @@ -12,7 +12,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN, SURE_API_TIMEOUT @@ -27,57 +26,41 @@ USER_DATA_SCHEMA = vol.Schema( ) -async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: - """Validate the user input allows us to connect.""" - surepy_client = surepy.Surepy( - data[CONF_USERNAME], - data[CONF_PASSWORD], - auth_token=None, - api_timeout=SURE_API_TIMEOUT, - session=async_get_clientsession(hass), - ) - - token = await surepy_client.sac.get_token() - - return {CONF_TOKEN: token} - - class SurePetCareConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Sure Petcare.""" VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._username: str | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if user_input is None: - return self.async_show_form(step_id="user", data_schema=USER_DATA_SCHEMA) - errors = {} - - try: - info = await validate_input(self.hass, user_input) - except SurePetcareAuthenticationError: - errors["base"] = "invalid_auth" - except SurePetcareError: - errors["base"] = "cannot_connect" - except Exception: - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - else: - await self.async_set_unique_id(user_input[CONF_USERNAME].lower()) - self._abort_if_unique_id_configured() - - user_input[CONF_TOKEN] = info[CONF_TOKEN] - return self.async_create_entry( - title="Sure Petcare", - data=user_input, + if user_input is not None: + client = surepy.Surepy( + user_input[CONF_USERNAME], + user_input[CONF_PASSWORD], + auth_token=None, + api_timeout=SURE_API_TIMEOUT, + session=async_get_clientsession(self.hass), ) + try: + token = await client.sac.get_token() + except SurePetcareAuthenticationError: + errors["base"] = "invalid_auth" + except SurePetcareError: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(user_input[CONF_USERNAME].lower()) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title="Sure Petcare", + data={**user_input, CONF_TOKEN: token}, + ) return self.async_show_form( step_id="user", data_schema=USER_DATA_SCHEMA, errors=errors @@ -87,7 +70,6 @@ class SurePetCareConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._username = entry_data[CONF_USERNAME] return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -95,10 +77,17 @@ class SurePetCareConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input[CONF_USERNAME] = self._username + client = surepy.Surepy( + reauth_entry.data[CONF_USERNAME], + user_input[CONF_PASSWORD], + auth_token=None, + api_timeout=SURE_API_TIMEOUT, + session=async_get_clientsession(self.hass), + ) try: - await validate_input(self.hass, user_input) + token = await client.sac.get_token() except SurePetcareAuthenticationError: errors["base"] = "invalid_auth" except SurePetcareError: @@ -107,16 +96,17 @@ class SurePetCareConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - existing_entry = await self.async_set_unique_id( - user_input[CONF_USERNAME].lower() + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ + CONF_PASSWORD: user_input[CONF_PASSWORD], + CONF_TOKEN: token, + }, ) - if existing_entry: - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={"username": self._username}, + description_placeholders={"username": reauth_entry.data[CONF_USERNAME]}, data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, ) diff --git a/homeassistant/components/surepetcare/icons.json b/homeassistant/components/surepetcare/icons.json index 1db15b599df..0daad594c48 100644 --- a/homeassistant/components/surepetcare/icons.json +++ b/homeassistant/components/surepetcare/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_lock_state": "mdi:lock", - "set_pet_location": "mdi:dog" + "set_lock_state": { + "service": "mdi:lock" + }, + "set_pet_location": { + "service": "mdi:dog" + } } } diff --git a/homeassistant/components/surepetcare/lock.py b/homeassistant/components/surepetcare/lock.py index cd79e06c5c3..f960400bcbc 100644 --- a/homeassistant/components/surepetcare/lock.py +++ b/homeassistant/components/surepetcare/lock.py @@ -5,11 +5,10 @@ from __future__ import annotations from typing import Any from surepy.entities import SurepyEntity -from surepy.enums import EntityType, LockState +from surepy.enums import EntityType, LockState as SurepyLockState -from homeassistant.components.lock import LockEntity +from homeassistant.components.lock import LockEntity, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,9 +29,9 @@ async def async_setup_entry( for surepy_entity in coordinator.data.values() if surepy_entity.type in [EntityType.CAT_FLAP, EntityType.PET_FLAP] for lock_state in ( - LockState.LOCKED_IN, - LockState.LOCKED_OUT, - LockState.LOCKED_ALL, + SurepyLockState.LOCKED_IN, + SurepyLockState.LOCKED_OUT, + SurepyLockState.LOCKED_ALL, ) ) @@ -44,7 +43,7 @@ class SurePetcareLock(SurePetcareEntity, LockEntity): self, surepetcare_id: int, coordinator: SurePetcareDataCoordinator, - lock_state: LockState, + lock_state: SurepyLockState, ) -> None: """Initialize a Sure Petcare lock.""" self._lock_state = lock_state.name.lower() @@ -66,14 +65,14 @@ class SurePetcareLock(SurePetcareEntity, LockEntity): status = surepy_entity.raw_data()["status"] self._attr_is_locked = ( - LockState(status["locking"]["mode"]).name.lower() == self._lock_state + SurepyLockState(status["locking"]["mode"]).name.lower() == self._lock_state ) self._available = bool(status.get("online")) async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - if self.state != STATE_UNLOCKED: + if self.state != LockState.UNLOCKED: return self._attr_is_locking = True self.async_write_ha_state() @@ -87,7 +86,7 @@ class SurePetcareLock(SurePetcareEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - if self.state != STATE_LOCKED: + if self.state != LockState.LOCKED: return self._attr_is_unlocking = True self.async_write_ha_state() diff --git a/homeassistant/components/surepetcare/strings.json b/homeassistant/components/surepetcare/strings.json index c3b7864f36a..58db669732a 100644 --- a/homeassistant/components/surepetcare/strings.json +++ b/homeassistant/components/surepetcare/strings.json @@ -21,7 +21,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "services": { diff --git a/homeassistant/components/swiss_hydrological_data/manifest.json b/homeassistant/components/swiss_hydrological_data/manifest.json index 14e2882804e..11b49a42e3f 100644 --- a/homeassistant/components/swiss_hydrological_data/manifest.json +++ b/homeassistant/components/swiss_hydrological_data/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/swiss_hydrological_data", "iot_class": "cloud_polling", "loggers": ["swisshydrodata"], + "quality_scale": "legacy", "requirements": ["swisshydrodata==0.1.0"] } diff --git a/homeassistant/components/swiss_hydrological_data/sensor.py b/homeassistant/components/swiss_hydrological_data/sensor.py index c67045521b5..3d88182eaa4 100644 --- a/homeassistant/components/swiss_hydrological_data/sensor.py +++ b/homeassistant/components/swiss_hydrological_data/sensor.py @@ -103,7 +103,7 @@ class SwissHydrologicalDataSensor(SensorEntity): @property def name(self): """Return the name of the sensor.""" - return "{} {}".format(self._data["water-body-name"], self._condition) + return f"{self._data['water-body-name']} {self._condition}" @property def unique_id(self) -> str: diff --git a/homeassistant/components/swiss_public_transport/__init__.py b/homeassistant/components/swiss_public_transport/__init__.py index dc1d0eb236c..628f6e95c2a 100644 --- a/homeassistant/components/swiss_public_transport/__init__.py +++ b/homeassistant/components/swiss_public_transport/__init__.py @@ -8,8 +8,8 @@ from opendata_transport.exceptions import ( OpendataTransportError, ) -from homeassistant import config_entries, core from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import ( config_validation as cv, @@ -19,9 +19,22 @@ from homeassistant.helpers import ( from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType -from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, PLACEHOLDERS -from .coordinator import SwissPublicTransportDataUpdateCoordinator -from .helper import unique_id_from_config +from .const import ( + CONF_DESTINATION, + CONF_START, + CONF_TIME_FIXED, + CONF_TIME_OFFSET, + CONF_TIME_STATION, + CONF_VIA, + DEFAULT_TIME_STATION, + DOMAIN, + PLACEHOLDERS, +) +from .coordinator import ( + SwissPublicTransportConfigEntry, + SwissPublicTransportDataUpdateCoordinator, +) +from .helper import offset_opendata, unique_id_from_config from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -32,14 +45,14 @@ PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -async def async_setup(hass: core.HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Swiss public transport component.""" setup_services(hass) return True async def async_setup_entry( - hass: core.HomeAssistant, entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: SwissPublicTransportConfigEntry ) -> bool: """Set up Swiss public transport from a config entry.""" config = entry.data @@ -47,8 +60,19 @@ async def async_setup_entry( start = config[CONF_START] destination = config[CONF_DESTINATION] + time_offset: dict[str, int] | None = config.get(CONF_TIME_OFFSET) + session = async_get_clientsession(hass) - opendata = OpendataTransport(start, destination, session, via=config.get(CONF_VIA)) + opendata = OpendataTransport( + start, + destination, + session, + via=config.get(CONF_VIA), + time=config.get(CONF_TIME_FIXED), + isArrivalTime=config.get(CONF_TIME_STATION, DEFAULT_TIME_STATION) == "arrival", + ) + if time_offset: + offset_opendata(opendata, time_offset) try: await opendata.async_get_data() @@ -72,31 +96,28 @@ async def async_setup_entry( }, ) from e - coordinator = SwissPublicTransportDataUpdateCoordinator(hass, opendata) + coordinator = SwissPublicTransportDataUpdateCoordinator(hass, opendata, time_offset) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True async def async_unload_entry( - hass: core.HomeAssistant, entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: SwissPublicTransportConfigEntry ) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_migrate_entry( - hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry + hass: HomeAssistant, config_entry: SwissPublicTransportConfigEntry ) -> bool: """Migrate config entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) - if config_entry.version > 2: + if config_entry.version > 3: # This means the user has downgraded from a future version return False @@ -131,9 +152,9 @@ async def async_migrate_entry( config_entry, unique_id=new_unique_id, minor_version=2 ) - if config_entry.version < 2: - # Via stations now available, which are not backwards compatible if used, changes unique id - hass.config_entries.async_update_entry(config_entry, version=2, minor_version=1) + if config_entry.version < 3: + # Via stations and time/offset settings now available, which are not backwards compatible if used, changes unique id + hass.config_entries.async_update_entry(config_entry, version=3, minor_version=1) _LOGGER.debug( "Migration to version %s.%s successful", diff --git a/homeassistant/components/swiss_public_transport/config_flow.py b/homeassistant/components/swiss_public_transport/config_flow.py index 74c6223f1d9..58d674f0c26 100644 --- a/homeassistant/components/swiss_public_transport/config_flow.py +++ b/homeassistant/components/swiss_public_transport/config_flow.py @@ -14,15 +14,35 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.selector import ( + DurationSelector, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, TextSelector, TextSelectorConfig, TextSelectorType, + TimeSelector, ) -from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, MAX_VIA, PLACEHOLDERS -from .helper import unique_id_from_config +from .const import ( + CONF_DESTINATION, + CONF_START, + CONF_TIME_FIXED, + CONF_TIME_MODE, + CONF_TIME_OFFSET, + CONF_TIME_STATION, + CONF_VIA, + DEFAULT_TIME_MODE, + DEFAULT_TIME_STATION, + DOMAIN, + IS_ARRIVAL_OPTIONS, + MAX_VIA, + PLACEHOLDERS, + TIME_MODE_OPTIONS, +) +from .helper import offset_opendata, unique_id_from_config -DATA_SCHEMA = vol.Schema( +USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_START): cv.string, vol.Optional(CONF_VIA): TextSelector( @@ -32,8 +52,25 @@ DATA_SCHEMA = vol.Schema( ), ), vol.Required(CONF_DESTINATION): cv.string, + vol.Optional(CONF_TIME_MODE, default=DEFAULT_TIME_MODE): SelectSelector( + SelectSelectorConfig( + options=TIME_MODE_OPTIONS, + mode=SelectSelectorMode.DROPDOWN, + translation_key="time_mode", + ), + ), + vol.Optional(CONF_TIME_STATION, default=DEFAULT_TIME_STATION): SelectSelector( + SelectSelectorConfig( + options=IS_ARRIVAL_OPTIONS, + mode=SelectSelectorMode.DROPDOWN, + translation_key="time_station", + ), + ), } ) +ADVANCED_TIME_DATA_SCHEMA = {vol.Optional(CONF_TIME_FIXED): TimeSelector()} +ADVANCED_TIME_OFFSET_DATA_SCHEMA = {vol.Optional(CONF_TIME_OFFSET): DurationSelector()} + _LOGGER = logging.getLogger(__name__) @@ -41,39 +78,33 @@ _LOGGER = logging.getLogger(__name__) class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): """Swiss public transport config flow.""" - VERSION = 2 + VERSION = 3 MINOR_VERSION = 1 + user_input: dict[str, Any] + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Async user step to set up the connection.""" errors: dict[str, str] = {} if user_input is not None: - unique_id = unique_id_from_config(user_input) - await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured() - if CONF_VIA in user_input and len(user_input[CONF_VIA]) > MAX_VIA: errors["base"] = "too_many_via_stations" else: - session = async_get_clientsession(self.hass) - opendata = OpendataTransport( - user_input[CONF_START], - user_input[CONF_DESTINATION], - session, - via=user_input.get(CONF_VIA), - ) - try: - await opendata.async_get_data() - except OpendataTransportConnectionError: - errors["base"] = "cannot_connect" - except OpendataTransportError: - errors["base"] = "bad_config" - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unknown error") - errors["base"] = "unknown" + err = await self.fetch_connections(user_input) + if err: + errors["base"] = err else: + self.user_input = user_input + if user_input[CONF_TIME_MODE] == "fixed": + return await self.async_step_time_fixed() + if user_input[CONF_TIME_MODE] == "offset": + return await self.async_step_time_offset() + + unique_id = unique_id_from_config(user_input) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() return self.async_create_entry( title=unique_id, data=user_input, @@ -81,7 +112,85 @@ class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + data_schema=USER_DATA_SCHEMA, + suggested_values=user_input, + ), errors=errors, description_placeholders=PLACEHOLDERS, ) + + async def async_step_time_fixed( + self, time_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Async time step to set up the connection.""" + return await self._async_step_time_mode( + CONF_TIME_FIXED, vol.Schema(ADVANCED_TIME_DATA_SCHEMA), time_input + ) + + async def async_step_time_offset( + self, time_offset_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Async time offset step to set up the connection.""" + return await self._async_step_time_mode( + CONF_TIME_OFFSET, + vol.Schema(ADVANCED_TIME_OFFSET_DATA_SCHEMA), + time_offset_input, + ) + + async def _async_step_time_mode( + self, + step_id: str, + time_mode_schema: vol.Schema, + time_mode_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Async time mode step to set up the connection.""" + errors: dict[str, str] = {} + if time_mode_input is not None: + unique_id = unique_id_from_config({**self.user_input, **time_mode_input}) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + err = await self.fetch_connections( + {**self.user_input, **time_mode_input}, + time_mode_input.get(CONF_TIME_OFFSET), + ) + if err: + errors["base"] = err + else: + return self.async_create_entry( + title=unique_id, + data={**self.user_input, **time_mode_input}, + ) + + return self.async_show_form( + step_id=step_id, + data_schema=time_mode_schema, + errors=errors, + description_placeholders=PLACEHOLDERS, + ) + + async def fetch_connections( + self, input: dict[str, Any], time_offset: dict[str, int] | None = None + ) -> str | None: + """Fetch the connections and advancedly return an error.""" + try: + session = async_get_clientsession(self.hass) + opendata = OpendataTransport( + input[CONF_START], + input[CONF_DESTINATION], + session, + via=input.get(CONF_VIA), + time=input.get(CONF_TIME_FIXED), + ) + if time_offset: + offset_opendata(opendata, time_offset) + await opendata.async_get_data() + except OpendataTransportConnectionError: + return "cannot_connect" + except OpendataTransportError: + return "bad_config" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unknown error") + return "unknown" + return None diff --git a/homeassistant/components/swiss_public_transport/const.py b/homeassistant/components/swiss_public_transport/const.py index c02f36f2f25..10bfc0d0355 100644 --- a/homeassistant/components/swiss_public_transport/const.py +++ b/homeassistant/components/swiss_public_transport/const.py @@ -7,13 +7,21 @@ DOMAIN = "swiss_public_transport" CONF_DESTINATION: Final = "to" CONF_START: Final = "from" CONF_VIA: Final = "via" +CONF_TIME_STATION: Final = "time_station" +CONF_TIME_MODE: Final = "time_mode" +CONF_TIME_FIXED: Final = "time_fixed" +CONF_TIME_OFFSET: Final = "time_offset" DEFAULT_NAME = "Next Destination" DEFAULT_UPDATE_TIME = 90 +DEFAULT_TIME_STATION = "departure" +DEFAULT_TIME_MODE = "now" MAX_VIA = 5 CONNECTIONS_COUNT = 3 CONNECTIONS_MAX = 15 +IS_ARRIVAL_OPTIONS = ["departure", "arrival"] +TIME_MODE_OPTIONS = ["now", "fixed", "offset"] PLACEHOLDERS = { diff --git a/homeassistant/components/swiss_public_transport/coordinator.py b/homeassistant/components/swiss_public_transport/coordinator.py index 114215520ac..59602e7b982 100644 --- a/homeassistant/components/swiss_public_transport/coordinator.py +++ b/homeassistant/components/swiss_public_transport/coordinator.py @@ -16,11 +16,17 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util +from homeassistant.util.json import JsonValueType from .const import CONNECTIONS_COUNT, DEFAULT_UPDATE_TIME, DOMAIN +from .helper import offset_opendata _LOGGER = logging.getLogger(__name__) +type SwissPublicTransportConfigEntry = ConfigEntry[ + SwissPublicTransportDataUpdateCoordinator +] + class DataConnection(TypedDict): """A connection data class.""" @@ -34,6 +40,7 @@ class DataConnection(TypedDict): train_number: str transfers: int delay: int + line: str def calculate_duration_in_seconds(duration_text: str) -> int | None: @@ -49,9 +56,14 @@ class SwissPublicTransportDataUpdateCoordinator( ): """A SwissPublicTransport Data Update Coordinator.""" - config_entry: ConfigEntry + config_entry: SwissPublicTransportConfigEntry - def __init__(self, hass: HomeAssistant, opendata: OpendataTransport) -> None: + def __init__( + self, + hass: HomeAssistant, + opendata: OpendataTransport, + time_offset: dict[str, int] | None, + ) -> None: """Initialize the SwissPublicTransport data coordinator.""" super().__init__( hass, @@ -60,6 +72,7 @@ class SwissPublicTransportDataUpdateCoordinator( update_interval=timedelta(seconds=DEFAULT_UPDATE_TIME), ) self._opendata = opendata + self._time_offset = time_offset def remaining_time(self, departure) -> timedelta | None: """Calculate the remaining time for the departure.""" @@ -69,19 +82,15 @@ class SwissPublicTransportDataUpdateCoordinator( return departure_datetime - dt_util.as_local(dt_util.utcnow()) return None - def nth_departure_time(self, i: int) -> datetime | None: - """Get nth departure time.""" - connections = self._opendata.connections - if len(connections) > i and connections[i] is not None: - return dt_util.parse_datetime(connections[i]["departure"]) - return None - async def _async_update_data(self) -> list[DataConnection]: return await self.fetch_connections(limit=CONNECTIONS_COUNT) async def fetch_connections(self, limit: int) -> list[DataConnection]: """Fetch connections using the opendata api.""" self._opendata.limit = limit + if self._time_offset: + offset_opendata(self._opendata, self._time_offset) + try: await self._opendata.async_get_data() except OpendataTransportConnectionError as e: @@ -95,7 +104,7 @@ class SwissPublicTransportDataUpdateCoordinator( connections = self._opendata.connections return [ DataConnection( - departure=self.nth_departure_time(i), + departure=dt_util.parse_datetime(connections[i]["departure"]), train_number=connections[i]["number"], platform=connections[i]["platform"], transfers=connections[i]["transfers"], @@ -104,7 +113,28 @@ class SwissPublicTransportDataUpdateCoordinator( destination=self._opendata.to_name, remaining_time=str(self.remaining_time(connections[i]["departure"])), delay=connections[i]["delay"], + line=connections[i]["line"], ) for i in range(limit) if len(connections) > i and connections[i] is not None ] + + async def fetch_connections_as_json(self, limit: int) -> list[JsonValueType]: + """Fetch connections using the opendata api.""" + return [ + { + "departure": connection["departure"].isoformat() + if connection["departure"] + else None, + "duration": connection["duration"], + "platform": connection["platform"], + "remaining_time": connection["remaining_time"], + "start": connection["start"], + "destination": connection["destination"], + "train_number": connection["train_number"], + "transfers": connection["transfers"], + "delay": connection["delay"], + "line": connection["line"], + } + for connection in await self.fetch_connections(limit) + ] diff --git a/homeassistant/components/swiss_public_transport/helper.py b/homeassistant/components/swiss_public_transport/helper.py index af03f7ad193..704479b77d6 100644 --- a/homeassistant/components/swiss_public_transport/helper.py +++ b/homeassistant/components/swiss_public_transport/helper.py @@ -1,15 +1,59 @@ """Helper functions for swiss_public_transport.""" +from datetime import timedelta from types import MappingProxyType from typing import Any -from .const import CONF_DESTINATION, CONF_START, CONF_VIA +from opendata_transport import OpendataTransport + +import homeassistant.util.dt as dt_util + +from .const import ( + CONF_DESTINATION, + CONF_START, + CONF_TIME_FIXED, + CONF_TIME_OFFSET, + CONF_TIME_STATION, + CONF_VIA, + DEFAULT_TIME_STATION, +) + + +def offset_opendata(opendata: OpendataTransport, offset: dict[str, int]) -> None: + """In place offset the opendata connector.""" + + duration = timedelta(**offset) + if duration: + now_offset = dt_util.as_local(dt_util.now() + duration) + opendata.date = now_offset.date() + opendata.time = now_offset.time() + + +def dict_duration_to_str_duration( + d: dict[str, int], +) -> str: + """Build a string from a dict duration.""" + return f"{d['hours']:02d}:{d['minutes']:02d}:{d['seconds']:02d}" def unique_id_from_config(config: MappingProxyType[str, Any] | dict[str, Any]) -> str: """Build a unique id from a config entry.""" - return f"{config[CONF_START]} {config[CONF_DESTINATION]}" + ( - " via " + ", ".join(config[CONF_VIA]) - if CONF_VIA in config and len(config[CONF_VIA]) > 0 - else "" + return ( + f"{config[CONF_START]} {config[CONF_DESTINATION]}" + + ( + " via " + ", ".join(config[CONF_VIA]) + if CONF_VIA in config and len(config[CONF_VIA]) > 0 + else "" + ) + + ( + " arrival" + if config.get(CONF_TIME_STATION, DEFAULT_TIME_STATION) == "arrival" + else "" + ) + + (" at " + config[CONF_TIME_FIXED] if CONF_TIME_FIXED in config else "") + + ( + " in " + dict_duration_to_str_duration(config[CONF_TIME_OFFSET]) + if CONF_TIME_OFFSET in config + else "" + ) ) diff --git a/homeassistant/components/swiss_public_transport/icons.json b/homeassistant/components/swiss_public_transport/icons.json index 7c2e5436834..06a640a06b2 100644 --- a/homeassistant/components/swiss_public_transport/icons.json +++ b/homeassistant/components/swiss_public_transport/icons.json @@ -21,10 +21,15 @@ }, "delay": { "default": "mdi:clock-plus" + }, + "line": { + "default": "mdi:transit-connection-variant" } } }, "services": { - "fetch_connections": "mdi:bus-clock" + "fetch_connections": { + "service": "mdi:bus-clock" + } } } diff --git a/homeassistant/components/swiss_public_transport/manifest.json b/homeassistant/components/swiss_public_transport/manifest.json index 6f8e603bbe7..10509328043 100644 --- a/homeassistant/components/swiss_public_transport/manifest.json +++ b/homeassistant/components/swiss_public_transport/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/swiss_public_transport", "iot_class": "cloud_polling", "loggers": ["opendata_transport"], - "requirements": ["python-opendata-transport==0.4.0"] + "requirements": ["python-opendata-transport==0.5.0"] } diff --git a/homeassistant/components/swiss_public_transport/sensor.py b/homeassistant/components/swiss_public_transport/sensor.py index c186b963705..452ec31972f 100644 --- a/homeassistant/components/swiss_public_transport/sensor.py +++ b/homeassistant/components/swiss_public_transport/sensor.py @@ -8,20 +8,24 @@ from datetime import datetime, timedelta import logging from typing import TYPE_CHECKING -from homeassistant import config_entries, core from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, ) from homeassistant.const import UnitOfTime +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONNECTIONS_COUNT, DOMAIN -from .coordinator import DataConnection, SwissPublicTransportDataUpdateCoordinator +from .coordinator import ( + DataConnection, + SwissPublicTransportConfigEntry, + SwissPublicTransportDataUpdateCoordinator, +) _LOGGER = logging.getLogger(__name__) @@ -71,24 +75,27 @@ SENSORS: tuple[SwissPublicTransportSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfTime.MINUTES, value_fn=lambda data_connection: data_connection["delay"], ), + SwissPublicTransportSensorEntityDescription( + key="line", + translation_key="line", + value_fn=lambda data_connection: data_connection["line"], + ), ) async def async_setup_entry( - hass: core.HomeAssistant, - config_entry: config_entries.ConfigEntry, + hass: HomeAssistant, + config_entry: SwissPublicTransportConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the sensor from a config entry created in the integrations UI.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] - unique_id = config_entry.unique_id if TYPE_CHECKING: assert unique_id async_add_entities( - SwissPublicTransportSensor(coordinator, description, unique_id) + SwissPublicTransportSensor(config_entry.runtime_data, description, unique_id) for description in SENSORS ) diff --git a/homeassistant/components/swiss_public_transport/services.py b/homeassistant/components/swiss_public_transport/services.py index e8b7c6bd458..3abf1a14b9f 100644 --- a/homeassistant/components/swiss_public_transport/services.py +++ b/homeassistant/components/swiss_public_transport/services.py @@ -2,7 +2,6 @@ import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, @@ -26,6 +25,7 @@ from .const import ( DOMAIN, SERVICE_FETCH_CONNECTIONS, ) +from .coordinator import SwissPublicTransportConfigEntry SERVICE_FETCH_CONNECTIONS_SCHEMA = vol.Schema( { @@ -41,7 +41,7 @@ SERVICE_FETCH_CONNECTIONS_SCHEMA = vol.Schema( def async_get_entry( hass: HomeAssistant, config_entry_id: str -) -> config_entries.ConfigEntry: +) -> SwissPublicTransportConfigEntry: """Get the Swiss public transport config entry.""" if not (entry := hass.config_entries.async_get_entry(config_entry_id)): raise ServiceValidationError( @@ -66,10 +66,12 @@ def setup_services(hass: HomeAssistant) -> None: ) -> ServiceResponse: """Fetch a set of connections.""" config_entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID]) + limit = call.data.get(ATTR_LIMIT) or CONNECTIONS_COUNT - coordinator = hass.data[DOMAIN][config_entry.entry_id] try: - connections = await coordinator.fetch_connections(limit=int(limit)) + connections = await config_entry.runtime_data.fetch_connections_as_json( + limit=int(limit) + ) except UpdateFailed as e: raise HomeAssistantError( translation_domain=DOMAIN, diff --git a/homeassistant/components/swiss_public_transport/strings.json b/homeassistant/components/swiss_public_transport/strings.json index 29e73978538..91645b2fee4 100644 --- a/homeassistant/components/swiss_public_transport/strings.json +++ b/homeassistant/components/swiss_public_transport/strings.json @@ -17,10 +17,30 @@ "data": { "from": "Start station", "to": "End station", - "via": "List of up to 5 via stations" + "via": "List of up to 5 via stations", + "time_station": "Select the relevant station", + "time_mode": "Select a time mode" + }, + "data_description": { + "time_station": "Usually the departure time of a connection when it leaves the start station is tracked. Alternatively, track the time when the connection arrives at its end station.", + "time_mode": "Time mode lets you change the departure timing and fix it to a specific time (e.g. 7:12:00 AM every morning) or add a moving offset (e.g. +00:05:00 taking into account the time to walk to the station)." }, "description": "Provide start and end station for your connection,\nand optionally up to 5 via stations.\n\nCheck the [stationboard]({stationboard_url}) for valid stations.", "title": "Swiss Public Transport" + }, + "time_fixed": { + "data": { + "time_fixed": "Time of day" + }, + "description": "Please select the relevant time for the connection (e.g. 7:12:00 AM every morning).", + "title": "Swiss Public Transport" + }, + "time_offset": { + "data": { + "time_offset": "Time offset" + }, + "description": "Please select the relevant offset to add to the earliest possible connection (e.g. add +00:05:00 offset, taking into account the time to walk to the station)", + "title": "Swiss Public Transport" } } }, @@ -46,6 +66,9 @@ }, "delay": { "name": "Delay" + }, + "line": { + "name": "Line" } } }, @@ -81,5 +104,20 @@ "config_entry_not_found": { "message": "Swiss public transport integration instance \"{target}\" not found." } + }, + "selector": { + "time_station": { + "options": { + "departure": "Show departure time from start station", + "arrival": "Show arrival time at end station" + } + }, + "time_mode": { + "options": { + "now": "Now", + "fixed": "At a fixed time of day", + "offset": "At an offset from now" + } + } } } diff --git a/homeassistant/components/swisscom/device_tracker.py b/homeassistant/components/swisscom/device_tracker.py index c13e5a322aa..66537a4311e 100644 --- a/homeassistant/components/swisscom/device_tracker.py +++ b/homeassistant/components/swisscom/device_tracker.py @@ -9,7 +9,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -31,7 +31,7 @@ def get_scanner( hass: HomeAssistant, config: ConfigType ) -> SwisscomDeviceScanner | None: """Return the Swisscom device scanner.""" - scanner = SwisscomDeviceScanner(config[DOMAIN]) + scanner = SwisscomDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -70,7 +70,7 @@ class SwisscomDeviceScanner(DeviceScanner): if not self.success_init: return False - _LOGGER.info("Loading data from Swisscom Internet Box") + _LOGGER.debug("Loading data from Swisscom Internet Box") if not (data := self.get_swisscom_data()): return False @@ -95,11 +95,11 @@ class SwisscomDeviceScanner(DeviceScanner): requests.exceptions.Timeout, requests.exceptions.ConnectTimeout, ): - _LOGGER.info("No response from Swisscom Internet Box") + _LOGGER.debug("No response from Swisscom Internet Box") return devices if "status" not in request.json(): - _LOGGER.info("No status in response from Swisscom Internet Box") + _LOGGER.debug("No status in response from Swisscom Internet Box") return devices for device in request.json()["status"]: diff --git a/homeassistant/components/swisscom/manifest.json b/homeassistant/components/swisscom/manifest.json index cb0e674570e..cf1ea01ea9c 100644 --- a/homeassistant/components/swisscom/manifest.json +++ b/homeassistant/components/swisscom/manifest.json @@ -3,5 +3,6 @@ "name": "Swisscom Internet-Box", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/swisscom", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/switch/__init__.py b/homeassistant/components/switch/__init__.py index 43971741e51..61ee2908009 100644 --- a/homeassistant/components/switch/__init__.py +++ b/homeassistant/components/switch/__init__.py @@ -4,9 +4,9 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import cached_property, partial import logging +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -18,21 +18,17 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[SwitchEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -49,16 +45,8 @@ class SwitchDeviceClass(StrEnum): DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(SwitchDeviceClass)) - -# DEVICE_CLASS* below are deprecated as of 2021.12 -# use the SwitchDeviceClass enum instead. DEVICE_CLASSES = [cls.value for cls in SwitchDeviceClass] -_DEPRECATED_DEVICE_CLASS_OUTLET = DeprecatedConstantEnum( - SwitchDeviceClass.OUTLET, "2025.1" -) -_DEPRECATED_DEVICE_CLASS_SWITCH = DeprecatedConstantEnum( - SwitchDeviceClass.SWITCH, "2025.1" -) + # mypy: disallow-any-generics @@ -74,7 +62,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for switches.""" - component = hass.data[DOMAIN] = EntityComponent[SwitchEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[SwitchEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -88,14 +76,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[SwitchEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[SwitchEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class SwitchEntityDescription(ToggleEntityDescription, frozen_or_thawed=True): @@ -123,11 +109,3 @@ class SwitchEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_) if hasattr(self, "entity_description"): return self.entity_description.device_class return None - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/switch/icons.json b/homeassistant/components/switch/icons.json index fbc1af5a126..10299a2ffc8 100644 --- a/homeassistant/components/switch/icons.json +++ b/homeassistant/components/switch/icons.json @@ -20,8 +20,14 @@ } }, "services": { - "toggle": "mdi:toggle-switch-variant", - "turn_off": "mdi:toggle-switch-variant-off", - "turn_on": "mdi:toggle-switch-variant" + "toggle": { + "service": "mdi:toggle-switch-variant" + }, + "turn_off": { + "service": "mdi:toggle-switch-variant-off" + }, + "turn_on": { + "service": "mdi:toggle-switch-variant" + } } } diff --git a/homeassistant/components/switch_as_x/config_flow.py b/homeassistant/components/switch_as_x/config_flow.py index 37df3affbad..aa9f1d411ce 100644 --- a/homeassistant/components/switch_as_x/config_flow.py +++ b/homeassistant/components/switch_as_x/config_flow.py @@ -18,12 +18,12 @@ from homeassistant.helpers.schema_config_entry_flow import ( from .const import CONF_INVERT, CONF_TARGET_DOMAIN, DOMAIN TARGET_DOMAIN_OPTIONS = [ - selector.SelectOptionDict(value=Platform.COVER, label="Cover"), - selector.SelectOptionDict(value=Platform.FAN, label="Fan"), - selector.SelectOptionDict(value=Platform.LIGHT, label="Light"), - selector.SelectOptionDict(value=Platform.LOCK, label="Lock"), - selector.SelectOptionDict(value=Platform.SIREN, label="Siren"), - selector.SelectOptionDict(value=Platform.VALVE, label="Valve"), + Platform.COVER, + Platform.FAN, + Platform.LIGHT, + Platform.LOCK, + Platform.SIREN, + Platform.VALVE, ] CONFIG_FLOW = { @@ -35,7 +35,9 @@ CONFIG_FLOW = { ), vol.Optional(CONF_INVERT, default=False): selector.BooleanSelector(), vol.Required(CONF_TARGET_DOMAIN): selector.SelectSelector( - selector.SelectSelectorConfig(options=TARGET_DOMAIN_OPTIONS), + selector.SelectSelectorConfig( + options=TARGET_DOMAIN_OPTIONS, translation_key="target_domain" + ), ), } ) diff --git a/homeassistant/components/switch_as_x/fan.py b/homeassistant/components/switch_as_x/fan.py index 91d3a4d119a..858379e71df 100644 --- a/homeassistant/components/switch_as_x/fan.py +++ b/homeassistant/components/switch_as_x/fan.py @@ -46,7 +46,6 @@ class FanSwitch(BaseToggleEntity, FanEntity): """Represents a Switch as a Fan.""" _attr_supported_features = FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON - _enable_turn_on_off_backwards_compatibility = False @property def is_on(self) -> bool | None: diff --git a/homeassistant/components/switch_as_x/strings.json b/homeassistant/components/switch_as_x/strings.json index 81567ef9e40..9c3db05231b 100644 --- a/homeassistant/components/switch_as_x/strings.json +++ b/homeassistant/components/switch_as_x/strings.json @@ -26,5 +26,17 @@ } } } + }, + "selector": { + "target_domain": { + "options": { + "cover": "[%key:component::cover::title%]", + "fan": "[%key:component::fan::title%]", + "light": "[%key:component::light::title%]", + "lock": "[%key:component::lock::title%]", + "siren": "[%key:component::siren::title%]", + "valve": "[%key:component::valve::title%]" + } + } } } diff --git a/homeassistant/components/switchbee/__init__.py b/homeassistant/components/switchbee/__init__.py index d5e182a31dc..758698a7d67 100644 --- a/homeassistant/components/switchbee/__init__.py +++ b/homeassistant/components/switchbee/__init__.py @@ -115,7 +115,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> rf"(?:{old_unique_id})-(?P\d+)", entity_entry.unique_id ): entity_new_unique_id = f'{new_unique_id}-{match.group("id")}' - _LOGGER.info( + _LOGGER.debug( "Migrating entity %s from %s to new id %s", entity_entry.entity_id, entity_entry.unique_id, @@ -141,7 +141,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> f"{match.group('id')}-{new_unique_id}", ) } - _LOGGER.info( + _LOGGER.debug( "Migrating device %s identifiers from %s to %s", device_entry.name, device_entry.identifiers, @@ -158,6 +158,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> hass.config_entries.async_update_entry(config_entry, version=2) - _LOGGER.info("Migration to version %s successful", config_entry.version) + _LOGGER.debug("Migration to version %s successful", config_entry.version) return True diff --git a/homeassistant/components/switchbee/climate.py b/homeassistant/components/switchbee/climate.py index 7ec0ad4d88b..d946ed1761b 100644 --- a/homeassistant/components/switchbee/climate.py +++ b/homeassistant/components/switchbee/climate.py @@ -90,7 +90,6 @@ class SwitchBeeClimateEntity(SwitchBeeDeviceEntity[SwitchBeeThermostat], Climate _attr_fan_modes = SUPPORTED_FAN_MODES _attr_target_temperature_step = 1 - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/switchbee/entity.py b/homeassistant/components/switchbee/entity.py index 893f052c8a0..d2d58a3ace3 100644 --- a/homeassistant/components/switchbee/entity.py +++ b/homeassistant/components/switchbee/entity.py @@ -88,7 +88,7 @@ class SwitchBeeDeviceEntity[_DeviceTypeT: SwitchBeeBaseDevice]( def _check_if_became_online(self) -> None: """Check if the device was offline (now online) and bring it back.""" if not self._is_online: - _LOGGER.info( + _LOGGER.warning( "%s device is now responding", self.name, ) diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index 75845d3f3ce..c2b4b2ad736 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -41,6 +41,7 @@ PLATFORMS_BY_TYPE = { Platform.SENSOR, ], SupportedModels.HYGROMETER.value: [Platform.SENSOR], + SupportedModels.HYGROMETER_CO2.value: [Platform.SENSOR], SupportedModels.CONTACT.value: [Platform.BINARY_SENSOR, Platform.SENSOR], SupportedModels.MOTION.value: [Platform.BINARY_SENSOR, Platform.SENSOR], SupportedModels.HUMIDIFIER.value: [Platform.HUMIDIFIER, Platform.SENSOR], diff --git a/homeassistant/components/switchbot/config_flow.py b/homeassistant/components/switchbot/config_flow.py index a1c947fd611..a0e45169770 100644 --- a/homeassistant/components/switchbot/config_flow.py +++ b/homeassistant/components/switchbot/config_flow.py @@ -38,13 +38,16 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import ( CONF_ENCRYPTION_KEY, CONF_KEY_ID, + CONF_LOCK_NIGHTLATCH, CONF_RETRY_COUNT, CONNECTABLE_SUPPORTED_MODEL_TYPES, + DEFAULT_LOCK_NIGHTLATCH, DEFAULT_RETRY_COUNT, DOMAIN, NON_CONNECTABLE_SUPPORTED_MODEL_TYPES, SUPPORTED_LOCK_MODELS, SUPPORTED_MODEL_TYPES, + SupportedModels, ) _LOGGER = logging.getLogger(__name__) @@ -77,7 +80,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SwitchbotOptionsFlowHandler: """Get the options flow for this handler.""" - return SwitchbotOptionsFlowHandler(config_entry) + return SwitchbotOptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" @@ -343,10 +346,6 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): class SwitchbotOptionsFlowHandler(OptionsFlow): """Handle Switchbot options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -355,7 +354,7 @@ class SwitchbotOptionsFlowHandler(OptionsFlow): # Update common entity options for all other entities. return self.async_create_entry(title="", data=user_input) - options = { + options: dict[vol.Optional, Any] = { vol.Optional( CONF_RETRY_COUNT, default=self.config_entry.options.get( @@ -363,5 +362,16 @@ class SwitchbotOptionsFlowHandler(OptionsFlow): ), ): int } + if self.config_entry.data.get(CONF_SENSOR_TYPE) == SupportedModels.LOCK_PRO: + options.update( + { + vol.Optional( + CONF_LOCK_NIGHTLATCH, + default=self.config_entry.options.get( + CONF_LOCK_NIGHTLATCH, DEFAULT_LOCK_NIGHTLATCH + ), + ): bool + } + ) return self.async_show_form(step_id="init", data_schema=vol.Schema(options)) diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index 0a1ac01e530..b8cf4e8e1ab 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -20,6 +20,7 @@ class SupportedModels(StrEnum): CEILING_LIGHT = "ceiling_light" CURTAIN = "curtain" HYGROMETER = "hygrometer" + HYGROMETER_CO2 = "hygrometer_co2" LIGHT_STRIP = "light_strip" CONTACT = "contact" PLUG = "plug" @@ -48,6 +49,8 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = { NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { SwitchbotModel.METER: SupportedModels.HYGROMETER, SwitchbotModel.IO_METER: SupportedModels.HYGROMETER, + SwitchbotModel.METER_PRO: SupportedModels.HYGROMETER, + SwitchbotModel.METER_PRO_C: SupportedModels.HYGROMETER_CO2, SwitchbotModel.CONTACT_SENSOR: SupportedModels.CONTACT, SwitchbotModel.MOTION_SENSOR: SupportedModels.MOTION, } @@ -64,13 +67,10 @@ HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = { # Config Defaults DEFAULT_RETRY_COUNT = 3 +DEFAULT_LOCK_NIGHTLATCH = False # Config Options CONF_RETRY_COUNT = "retry_count" CONF_KEY_ID = "key_id" CONF_ENCRYPTION_KEY = "encryption_key" - -# Deprecated config Entry Options to be removed in 2023.4 -CONF_TIME_BETWEEN_UPDATE_COMMAND = "update_time" -CONF_RETRY_TIMEOUT = "retry_timeout" -CONF_SCAN_TIMEOUT = "scan_timeout" +CONF_LOCK_NIGHTLATCH = "lock_force_nightlatch" diff --git a/homeassistant/components/switchbot/light.py b/homeassistant/components/switchbot/light.py index 836ba1bd4f3..927ad5120c7 100644 --- a/homeassistant/components/switchbot/light.py +++ b/homeassistant/components/switchbot/light.py @@ -8,17 +8,13 @@ from switchbot import ColorMode as SwitchBotColorMode, SwitchbotBaseLight from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ColorMode, LightEntity, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator from .entity import SwitchbotEntity @@ -50,8 +46,8 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): """Initialize the Switchbot light.""" super().__init__(coordinator) device = self._device - self._attr_min_mireds = color_temperature_kelvin_to_mired(device.max_temp) - self._attr_max_mireds = color_temperature_kelvin_to_mired(device.min_temp) + self._attr_max_color_temp_kelvin = device.max_temp + self._attr_min_color_temp_kelvin = device.min_temp self._attr_supported_color_modes = { SWITCHBOT_COLOR_MODE_TO_HASS[mode] for mode in device.color_modes } @@ -64,7 +60,7 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): self._attr_is_on = self._device.on self._attr_brightness = max(0, min(255, round(device.brightness * 2.55))) if device.color_mode == SwitchBotColorMode.COLOR_TEMP: - self._attr_color_temp = color_temperature_kelvin_to_mired(device.color_temp) + self._attr_color_temp_kelvin = device.color_temp self._attr_color_mode = ColorMode.COLOR_TEMP return self._attr_rgb_color = device.rgb @@ -77,10 +73,9 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): if ( self.supported_color_modes and ColorMode.COLOR_TEMP in self.supported_color_modes - and ATTR_COLOR_TEMP in kwargs + and ATTR_COLOR_TEMP_KELVIN in kwargs ): - color_temp = kwargs[ATTR_COLOR_TEMP] - kelvin = max(2700, min(6500, color_temperature_mired_to_kelvin(color_temp))) + kelvin = max(2700, min(6500, kwargs[ATTR_COLOR_TEMP_KELVIN])) await self._device.set_color_temp(brightness, kelvin) return if ATTR_RGB_COLOR in kwargs: diff --git a/homeassistant/components/switchbot/lock.py b/homeassistant/components/switchbot/lock.py index cb41d14cf66..a3bee5661b2 100644 --- a/homeassistant/components/switchbot/lock.py +++ b/homeassistant/components/switchbot/lock.py @@ -9,6 +9,7 @@ from homeassistant.components.lock import LockEntity, LockEntityFeature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .const import CONF_LOCK_NIGHTLATCH, DEFAULT_LOCK_NIGHTLATCH from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator from .entity import SwitchbotEntity @@ -19,7 +20,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Switchbot lock based on a config entry.""" - async_add_entities([(SwitchBotLock(entry.runtime_data))]) + force_nightlatch = entry.options.get(CONF_LOCK_NIGHTLATCH, DEFAULT_LOCK_NIGHTLATCH) + async_add_entities([SwitchBotLock(entry.runtime_data, force_nightlatch)]) # noinspection PyAbstractClass @@ -30,11 +32,13 @@ class SwitchBotLock(SwitchbotEntity, LockEntity): _attr_name = None _device: switchbot.SwitchbotLock - def __init__(self, coordinator: SwitchbotDataUpdateCoordinator) -> None: + def __init__( + self, coordinator: SwitchbotDataUpdateCoordinator, force_nightlatch + ) -> None: """Initialize the entity.""" super().__init__(coordinator) self._async_update_attrs() - if self._device.is_night_latch_enabled(): + if self._device.is_night_latch_enabled() or force_nightlatch: self._attr_supported_features = LockEntityFeature.OPEN def _async_update_attrs(self) -> None: @@ -55,7 +59,7 @@ class SwitchBotLock(SwitchbotEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - if self._device.is_night_latch_enabled(): + if self._attr_supported_features & (LockEntityFeature.OPEN): self._last_run_success = await self._device.unlock_without_unlatch() else: self._last_run_success = await self._device.unlock() diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 0cbbd70a805..5a328650aca 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.48.1"] + "requirements": ["PySwitchbot==0.54.0"] } diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index e696f21e082..fd3de3e31e9 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -10,6 +10,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -50,6 +51,12 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), + "co2": SensorEntityDescription( + key="co2", + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CO2, + ), "lightLevel": SensorEntityDescription( key="lightLevel", translation_key="light_level", diff --git a/homeassistant/components/switchbot/strings.json b/homeassistant/components/switchbot/strings.json index a20b4939f8f..80ca32d4826 100644 --- a/homeassistant/components/switchbot/strings.json +++ b/homeassistant/components/switchbot/strings.json @@ -54,7 +54,8 @@ "step": { "init": { "data": { - "retry_count": "Retry count" + "retry_count": "Retry count", + "lock_force_nightlatch": "Force Nightlatch operation mode" } } } diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index c79ba41018f..625b4698301 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -15,7 +15,13 @@ from .const import DOMAIN from .coordinator import SwitchBotCoordinator _LOGGER = getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] +PLATFORMS: list[Platform] = [ + Platform.CLIMATE, + Platform.LOCK, + Platform.SENSOR, + Platform.SWITCH, + Platform.VACUUM, +] @dataclass @@ -25,6 +31,8 @@ class SwitchbotDevices: climates: list[Remote] = field(default_factory=list) switches: list[Device | Remote] = field(default_factory=list) sensors: list[Device] = field(default_factory=list) + vacuums: list[Device] = field(default_factory=list) + locks: list[Device] = field(default_factory=list) @dataclass @@ -77,10 +85,27 @@ def make_device_data( "Meter", "MeterPlus", "WoIOSensor", + "Hub 2", + "MeterPro", + "MeterPro(CO2)", ]: devices_data.sensors.append( prepare_device(hass, api, device, coordinators_by_id) ) + if isinstance(device, Device) and device.device_type in [ + "K10+", + "K10+ Pro", + "Robot Vacuum Cleaner S1", + "Robot Vacuum Cleaner S1 Plus", + ]: + devices_data.vacuums.append( + prepare_device(hass, api, device, coordinators_by_id) + ) + + if isinstance(device, Device) and device.device_type.startswith("Smart Lock"): + devices_data.locks.append( + prepare_device(hass, api, device, coordinators_by_id) + ) return devices_data diff --git a/homeassistant/components/switchbot_cloud/climate.py b/homeassistant/components/switchbot_cloud/climate.py index e04145933ae..4e05e9e9a1e 100644 --- a/homeassistant/components/switchbot_cloud/climate.py +++ b/homeassistant/components/switchbot_cloud/climate.py @@ -79,8 +79,9 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): _attr_hvac_mode = HVACMode.FAN_ONLY _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature = 21 + _attr_target_temperature_step = 1 + _attr_precision = 1 _attr_name = None - _enable_turn_on_off_backwards_compatibility = False async def _do_send_command( self, @@ -95,9 +96,9 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): new_fan_speed = _SWITCHBOT_FAN_MODES.get( fan_mode or self._attr_fan_mode, _DEFAULT_SWITCHBOT_FAN_MODE ) - await self.send_command( + await self.send_api_command( AirConditionerCommands.SET_ALL, - parameters=f"{new_temperature},{new_mode},{new_fan_speed},on", + parameters=f"{int(new_temperature)},{new_mode},{new_fan_speed},on", ) async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: diff --git a/homeassistant/components/switchbot_cloud/const.py b/homeassistant/components/switchbot_cloud/const.py index 66c84b63047..b849194537a 100644 --- a/homeassistant/components/switchbot_cloud/const.py +++ b/homeassistant/components/switchbot_cloud/const.py @@ -10,3 +10,8 @@ DEFAULT_SCAN_INTERVAL = timedelta(seconds=600) SENSOR_KIND_TEMPERATURE = "temperature" SENSOR_KIND_HUMIDITY = "humidity" SENSOR_KIND_BATTERY = "battery" + +VACUUM_FAN_SPEED_QUIET = "quiet" +VACUUM_FAN_SPEED_STANDARD = "standard" +VACUUM_FAN_SPEED_STRONG = "strong" +VACUUM_FAN_SPEED_MAX = "max" diff --git a/homeassistant/components/switchbot_cloud/entity.py b/homeassistant/components/switchbot_cloud/entity.py index 7bb00cda945..f77adb7b192 100644 --- a/homeassistant/components/switchbot_cloud/entity.py +++ b/homeassistant/components/switchbot_cloud/entity.py @@ -35,7 +35,7 @@ class SwitchBotCloudEntity(CoordinatorEntity[SwitchBotCoordinator]): model=device.device_type, ) - async def send_command( + async def send_api_command( self, command: Commands, command_type: str = "command", diff --git a/homeassistant/components/switchbot_cloud/lock.py b/homeassistant/components/switchbot_cloud/lock.py new file mode 100644 index 00000000000..2fbd551b919 --- /dev/null +++ b/homeassistant/components/switchbot_cloud/lock.py @@ -0,0 +1,53 @@ +"""Support for the Switchbot lock.""" + +from typing import Any + +from switchbot_api import LockCommands + +from homeassistant.components.lock import LockEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SwitchbotCloudData +from .const import DOMAIN +from .entity import SwitchBotCloudEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SwitchBot Cloud entry.""" + data: SwitchbotCloudData = hass.data[DOMAIN][config.entry_id] + async_add_entities( + SwitchBotCloudLock(data.api, device, coordinator) + for device, coordinator in data.devices.locks + ) + + +class SwitchBotCloudLock(SwitchBotCloudEntity, LockEntity): + """Representation of a SwitchBot lock.""" + + _attr_name = None + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if coord_data := self.coordinator.data: + self._attr_is_locked = coord_data["lockState"] == "locked" + self.async_write_ha_state() + + async def async_lock(self, **kwargs: Any) -> None: + """Lock the lock.""" + await self.send_api_command(LockCommands.LOCK) + self._attr_is_locked = True + self.async_write_ha_state() + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock the lock.""" + + await self.send_api_command(LockCommands.UNLOCK) + self._attr_is_locked = False + self.async_write_ha_state() diff --git a/homeassistant/components/switchbot_cloud/manifest.json b/homeassistant/components/switchbot_cloud/manifest.json index 0bafdec9f68..eb08d2183b1 100644 --- a/homeassistant/components/switchbot_cloud/manifest.json +++ b/homeassistant/components/switchbot_cloud/manifest.json @@ -1,7 +1,7 @@ { "domain": "switchbot_cloud", "name": "SwitchBot Cloud", - "codeowners": ["@SeraphicRav", "@laurence-presland"], + "codeowners": ["@SeraphicRav", "@laurence-presland", "@Gigatrappeur"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/switchbot_cloud", "integration_type": "hub", diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index ac612aea119..90135ad96b3 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -9,7 +9,11 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import PERCENTAGE, UnitOfTemperature +from homeassistant.const import ( + CONCENTRATION_PARTS_PER_MILLION, + PERCENTAGE, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -21,6 +25,7 @@ from .entity import SwitchBotCloudEntity SENSOR_TYPE_TEMPERATURE = "temperature" SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_BATTERY = "battery" +SENSOR_TYPE_CO2 = "CO2" METER_PLUS_SENSOR_DESCRIPTIONS = ( SensorEntityDescription( @@ -43,6 +48,16 @@ METER_PLUS_SENSOR_DESCRIPTIONS = ( ), ) +METER_PRO_CO2_SENSOR_DESCRIPTIONS = ( + *METER_PLUS_SENSOR_DESCRIPTIONS, + SensorEntityDescription( + key=SENSOR_TYPE_CO2, + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CO2, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -55,7 +70,11 @@ async def async_setup_entry( async_add_entities( SwitchBotCloudSensor(data.api, device, coordinator, description) for device, coordinator in data.devices.sensors - for description in METER_PLUS_SENSOR_DESCRIPTIONS + for description in ( + METER_PRO_CO2_SENSOR_DESCRIPTIONS + if device.device_type == "MeterPro(CO2)" + else METER_PLUS_SENSOR_DESCRIPTIONS + ) ) diff --git a/homeassistant/components/switchbot_cloud/switch.py b/homeassistant/components/switchbot_cloud/switch.py index fbcd4430f6e..c30e60086fa 100644 --- a/homeassistant/components/switchbot_cloud/switch.py +++ b/homeassistant/components/switchbot_cloud/switch.py @@ -36,13 +36,13 @@ class SwitchBotCloudSwitch(SwitchBotCloudEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" - await self.send_command(CommonCommands.ON) + await self.send_api_command(CommonCommands.ON) self._attr_is_on = True self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the device off.""" - await self.send_command(CommonCommands.OFF) + await self.send_api_command(CommonCommands.OFF) self._attr_is_on = False self.async_write_ha_state() diff --git a/homeassistant/components/switchbot_cloud/vacuum.py b/homeassistant/components/switchbot_cloud/vacuum.py new file mode 100644 index 00000000000..2d2a1783d73 --- /dev/null +++ b/homeassistant/components/switchbot_cloud/vacuum.py @@ -0,0 +1,122 @@ +"""Support for SwitchBot vacuum.""" + +from typing import Any + +from switchbot_api import Device, Remote, SwitchBotAPI, VacuumCommands + +from homeassistant.components.vacuum import ( + StateVacuumEntity, + VacuumActivity, + VacuumEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SwitchbotCloudData +from .const import ( + DOMAIN, + VACUUM_FAN_SPEED_MAX, + VACUUM_FAN_SPEED_QUIET, + VACUUM_FAN_SPEED_STANDARD, + VACUUM_FAN_SPEED_STRONG, +) +from .coordinator import SwitchBotCoordinator +from .entity import SwitchBotCloudEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SwitchBot Cloud entry.""" + data: SwitchbotCloudData = hass.data[DOMAIN][config.entry_id] + async_add_entities( + _async_make_entity(data.api, device, coordinator) + for device, coordinator in data.devices.vacuums + ) + + +VACUUM_SWITCHBOT_STATE_TO_HA_STATE: dict[str, VacuumActivity] = { + "StandBy": VacuumActivity.IDLE, + "Clearing": VacuumActivity.CLEANING, + "Paused": VacuumActivity.PAUSED, + "GotoChargeBase": VacuumActivity.RETURNING, + "Charging": VacuumActivity.DOCKED, + "ChargeDone": VacuumActivity.DOCKED, + "Dormant": VacuumActivity.IDLE, + "InTrouble": VacuumActivity.ERROR, + "InRemoteControl": VacuumActivity.CLEANING, + "InDustCollecting": VacuumActivity.DOCKED, +} + +VACUUM_FAN_SPEED_TO_SWITCHBOT_FAN_SPEED: dict[str, str] = { + VACUUM_FAN_SPEED_QUIET: "0", + VACUUM_FAN_SPEED_STANDARD: "1", + VACUUM_FAN_SPEED_STRONG: "2", + VACUUM_FAN_SPEED_MAX: "3", +} + + +# https://github.com/OpenWonderLabs/SwitchBotAPI?tab=readme-ov-file#robot-vacuum-cleaner-s1-plus-1 +class SwitchBotCloudVacuum(SwitchBotCloudEntity, StateVacuumEntity): + """Representation of a SwitchBot vacuum.""" + + _attr_supported_features: VacuumEntityFeature = ( + VacuumEntityFeature.BATTERY + | VacuumEntityFeature.FAN_SPEED + | VacuumEntityFeature.PAUSE + | VacuumEntityFeature.RETURN_HOME + | VacuumEntityFeature.START + | VacuumEntityFeature.STATE + ) + + _attr_name = None + _attr_fan_speed_list: list[str] = list( + VACUUM_FAN_SPEED_TO_SWITCHBOT_FAN_SPEED.keys() + ) + + async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None: + """Set fan speed.""" + self._attr_fan_speed = fan_speed + if fan_speed in VACUUM_FAN_SPEED_TO_SWITCHBOT_FAN_SPEED: + await self.send_api_command( + VacuumCommands.POW_LEVEL, + parameters=VACUUM_FAN_SPEED_TO_SWITCHBOT_FAN_SPEED[fan_speed], + ) + self.async_write_ha_state() + + async def async_pause(self) -> None: + """Pause the cleaning task.""" + await self.send_api_command(VacuumCommands.STOP) + + async def async_return_to_base(self, **kwargs: Any) -> None: + """Set the vacuum cleaner to return to the dock.""" + await self.send_api_command(VacuumCommands.DOCK) + + async def async_start(self) -> None: + """Start or resume the cleaning task.""" + await self.send_api_command(VacuumCommands.START) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if not self.coordinator.data: + return + + self._attr_battery_level = self.coordinator.data.get("battery") + self._attr_available = self.coordinator.data.get("onlineStatus") == "online" + + switchbot_state = str(self.coordinator.data.get("workingStatus")) + self._attr_activity = VACUUM_SWITCHBOT_STATE_TO_HA_STATE.get(switchbot_state) + + self.async_write_ha_state() + + +@callback +def _async_make_entity( + api: SwitchBotAPI, device: Device | Remote, coordinator: SwitchBotCoordinator +) -> SwitchBotCloudVacuum: + """Make a SwitchBotCloudVacuum.""" + return SwitchBotCloudVacuum(api, device, coordinator) diff --git a/homeassistant/components/switcher_kis/__init__.py b/homeassistant/components/switcher_kis/__init__.py index 555ba951041..840b62252f1 100644 --- a/homeassistant/components/switcher_kis/__init__.py +++ b/homeassistant/components/switcher_kis/__init__.py @@ -8,7 +8,7 @@ from aioswitcher.bridge import SwitcherBridge from aioswitcher.device import SwitcherBase from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.const import CONF_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import device_registry as dr @@ -19,6 +19,7 @@ PLATFORMS = [ Platform.BUTTON, Platform.CLIMATE, Platform.COVER, + Platform.LIGHT, Platform.SENSOR, Platform.SWITCH, ] @@ -32,6 +33,8 @@ type SwitcherConfigEntry = ConfigEntry[dict[str, SwitcherDataUpdateCoordinator]] async def async_setup_entry(hass: HomeAssistant, entry: SwitcherConfigEntry) -> bool: """Set up Switcher from a config entry.""" + token = entry.data.get(CONF_TOKEN) + @callback def on_device_data_callback(device: SwitcherBase) -> None: """Use as a callback for device data.""" @@ -45,14 +48,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitcherConfigEntry) -> # New device - create device _LOGGER.info( - "Discovered Switcher device - id: %s, key: %s, name: %s, type: %s (%s)", + "Discovered Switcher device - id: %s, key: %s, name: %s, type: %s (%s), is_token_needed: %s", device.device_id, device.device_key, device.name, device.device_type.value, device.device_type.hex_rep, + device.token_needed, ) + if device.token_needed and not token: + entry.async_start_reauth(hass) + return + coordinator = SwitcherDataUpdateCoordinator(hass, entry, device) coordinator.async_setup() coordinators[device.device_id] = coordinator diff --git a/homeassistant/components/switcher_kis/button.py b/homeassistant/components/switcher_kis/button.py index 2e559ba9f3b..d2686e2e550 100644 --- a/homeassistant/components/switcher_kis/button.py +++ b/homeassistant/components/switcher_kis/button.py @@ -10,7 +10,6 @@ from aioswitcher.api import ( DeviceState, SwitcherApi, SwitcherBaseResponse, - SwitcherType2Api, ThermostatSwing, ) from aioswitcher.api.remotes import SwitcherBreezeRemote @@ -20,15 +19,13 @@ from homeassistant.components.button import ButtonEntity, ButtonEntityDescriptio from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import SwitcherConfigEntry from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator +from .entity import SwitcherEntity from .utils import get_breeze_remote_manager @@ -106,13 +103,10 @@ async def async_setup_entry( ) -class SwitcherThermostatButtonEntity( - CoordinatorEntity[SwitcherDataUpdateCoordinator], ButtonEntity -): +class SwitcherThermostatButtonEntity(SwitcherEntity, ButtonEntity): """Representation of a Switcher climate entity.""" entity_description: SwitcherThermostatButtonEntityDescription - _attr_has_entity_name = True def __init__( self, @@ -126,9 +120,6 @@ class SwitcherThermostatButtonEntity( self._remote = remote self._attr_unique_id = f"{coordinator.mac_address}-{description.key}" - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} - ) async def async_press(self) -> None: """Press the button.""" @@ -136,7 +127,7 @@ class SwitcherThermostatButtonEntity( error = None try: - async with SwitcherType2Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index 511630251f2..5285e7549ef 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any, cast -from aioswitcher.api import SwitcherBaseResponse, SwitcherType2Api +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.api.remotes import SwitcherBreezeRemote from aioswitcher.device import ( DeviceCategory, @@ -29,15 +29,13 @@ from homeassistant.components.climate import ( from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import SwitcherConfigEntry from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator +from .entity import SwitcherEntity from .utils import get_breeze_remote_manager DEVICE_MODE_TO_HA = { @@ -81,14 +79,10 @@ async def async_setup_entry( ) -class SwitcherClimateEntity( - CoordinatorEntity[SwitcherDataUpdateCoordinator], ClimateEntity -): +class SwitcherClimateEntity(SwitcherEntity, ClimateEntity): """Representation of a Switcher climate entity.""" - _attr_has_entity_name = True _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: SwitcherDataUpdateCoordinator, remote: SwitcherBreezeRemote @@ -98,9 +92,6 @@ class SwitcherClimateEntity( self._remote = remote self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} - ) self._attr_min_temp = remote.min_temperature self._attr_max_temp = remote.max_temperature @@ -168,7 +159,7 @@ class SwitcherClimateEntity( error = None try: - async with SwitcherType2Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, diff --git a/homeassistant/components/switcher_kis/config_flow.py b/homeassistant/components/switcher_kis/config_flow.py index 31764ecf390..e6c2e8e8589 100644 --- a/homeassistant/components/switcher_kis/config_flow.py +++ b/homeassistant/components/switcher_kis/config_flow.py @@ -2,9 +2,114 @@ from __future__ import annotations -from homeassistant.helpers import config_entry_flow +from collections.abc import Mapping +import logging +from typing import Any, Final + +from aioswitcher.bridge import SwitcherBase +from aioswitcher.device.tools import validate_token +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_TOKEN, CONF_USERNAME from .const import DOMAIN -from .utils import async_has_devices +from .utils import async_discover_devices -config_entry_flow.register_discovery_flow(DOMAIN, "Switcher", async_has_devices) +_LOGGER = logging.getLogger(__name__) + + +CONFIG_SCHEMA: Final = vol.Schema( + { + vol.Required(CONF_USERNAME, default=""): str, + vol.Required(CONF_TOKEN, default=""): str, + } +) + + +class SwitcherFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle Switcher config flow.""" + + VERSION = 1 + + username: str | None = None + token: str | None = None + discovered_devices: dict[str, SwitcherBase] = {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the start of the config flow.""" + self.discovered_devices = await async_discover_devices() + + return self.async_show_form(step_id="confirm") + + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle user-confirmation of the config flow.""" + if len(self.discovered_devices) == 0: + return self.async_abort(reason="no_devices_found") + + for device_id, device in self.discovered_devices.items(): + if device.token_needed: + _LOGGER.debug("Device with ID %s requires a token", device_id) + return await self.async_step_credentials() + return await self._create_entry() + + async def async_step_credentials( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the credentials step.""" + errors: dict[str, str] = {} + if user_input is not None: + self.username = user_input.get(CONF_USERNAME) + self.token = user_input.get(CONF_TOKEN) + + token_is_valid = await validate_token( + user_input[CONF_USERNAME], user_input[CONF_TOKEN] + ) + if token_is_valid: + return await self._create_entry() + errors["base"] = "invalid_auth" + + return self.async_show_form( + step_id="credentials", data_schema=CONFIG_SCHEMA, errors=errors + ) + + async def async_step_reauth( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + + if user_input is not None: + token_is_valid = await validate_token( + user_input[CONF_USERNAME], user_input[CONF_TOKEN] + ) + if token_is_valid: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) + errors["base"] = "invalid_auth" + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=CONFIG_SCHEMA, + errors=errors, + ) + + async def _create_entry(self) -> ConfigFlowResult: + return self.async_create_entry( + title="Switcher", + data={ + CONF_USERNAME: self.username, + CONF_TOKEN: self.token, + }, + ) diff --git a/homeassistant/components/switcher_kis/coordinator.py b/homeassistant/components/switcher_kis/coordinator.py index 1fdefda23a2..118c86b8d78 100644 --- a/homeassistant/components/switcher_kis/coordinator.py +++ b/homeassistant/components/switcher_kis/coordinator.py @@ -8,6 +8,7 @@ import logging from aioswitcher.device import SwitcherBase from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_TOKEN from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, update_coordinator from homeassistant.helpers.dispatcher import async_dispatcher_send @@ -22,18 +23,24 @@ class SwitcherDataUpdateCoordinator( ): """Switcher device data update coordinator.""" + config_entry: ConfigEntry + def __init__( - self, hass: HomeAssistant, entry: ConfigEntry, device: SwitcherBase + self, + hass: HomeAssistant, + entry: ConfigEntry, + device: SwitcherBase, ) -> None: """Initialize the Switcher device coordinator.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=device.name, update_interval=timedelta(seconds=MAX_UPDATE_INTERVAL_SEC), ) - self.entry = entry self.data = device + self.token = entry.data.get(CONF_TOKEN) async def _async_update_data(self) -> SwitcherBase: """Mark device offline if no data.""" @@ -62,7 +69,7 @@ class SwitcherDataUpdateCoordinator( """Set up the coordinator.""" dev_reg = dr.async_get(self.hass) dev_reg.async_get_or_create( - config_entry_id=self.entry.entry_id, + config_entry_id=self.config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, self.mac_address)}, identifiers={(DOMAIN, self.device_id)}, manufacturer="Switcher", diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index 19c40d05e63..513b786a033 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -2,10 +2,8 @@ from __future__ import annotations -import logging from typing import Any, cast -from aioswitcher.api import SwitcherBaseResponse, SwitcherType2Api from aioswitcher.device import DeviceCategory, ShutterDirection, SwitcherShutter from homeassistant.components.cover import ( @@ -16,17 +14,12 @@ from homeassistant.components.cover import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator - -_LOGGER = logging.getLogger(__name__) +from .entity import SwitcherEntity API_SET_POSITON = "set_position" API_STOP = "stop_shutter" @@ -42,21 +35,31 @@ async def async_setup_entry( @callback def async_add_cover(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add cover from Switcher device.""" - if coordinator.data.device_type.category == DeviceCategory.SHUTTER: - async_add_entities([SwitcherCoverEntity(coordinator)]) + entities: list[CoverEntity] = [] + + if coordinator.data.device_type.category in ( + DeviceCategory.SHUTTER, + DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, + DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, + ): + number_of_covers = len(cast(SwitcherShutter, coordinator.data).position) + if number_of_covers == 1: + entities.append(SwitcherSingleCoverEntity(coordinator, 0)) + else: + entities.extend( + SwitcherMultiCoverEntity(coordinator, i) + for i in range(number_of_covers) + ) + async_add_entities(entities) config_entry.async_on_unload( async_dispatcher_connect(hass, SIGNAL_DEVICE_ADD, async_add_cover) ) -class SwitcherCoverEntity( - CoordinatorEntity[SwitcherDataUpdateCoordinator], CoverEntity -): +class SwitcherBaseCoverEntity(SwitcherEntity, CoverEntity): """Representation of a Switcher cover entity.""" - _attr_has_entity_name = True - _attr_name = None _attr_device_class = CoverDeviceClass.SHUTTER _attr_supported_features = ( CoverEntityFeature.OPEN @@ -64,17 +67,7 @@ class SwitcherCoverEntity( | CoverEntityFeature.SET_POSITION | CoverEntityFeature.STOP ) - - def __init__(self, coordinator: SwitcherDataUpdateCoordinator) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - - self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} - ) - - self._update_data() + _cover_id: int @callback def _handle_coordinator_update(self) -> None: @@ -85,48 +78,70 @@ class SwitcherCoverEntity( def _update_data(self) -> None: """Update data from device.""" data = cast(SwitcherShutter, self.coordinator.data) - self._attr_current_cover_position = data.position - self._attr_is_closed = data.position == 0 - self._attr_is_closing = data.direction == ShutterDirection.SHUTTER_DOWN - self._attr_is_opening = data.direction == ShutterDirection.SHUTTER_UP - - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherType2Api( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call api for {self.name} failed, api: '{api}', " - f"args: {args}, response/error: {response or error}" - ) + self._attr_current_cover_position = data.position[self._cover_id] + self._attr_is_closed = data.position[self._cover_id] == 0 + self._attr_is_closing = ( + data.direction[self._cover_id] == ShutterDirection.SHUTTER_DOWN + ) + self._attr_is_opening = ( + data.direction[self._cover_id] == ShutterDirection.SHUTTER_UP + ) async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" - await self._async_call_api(API_SET_POSITON, 0) + await self._async_call_api(API_SET_POSITON, 0, self._cover_id) async def async_open_cover(self, **kwargs: Any) -> None: """Open cover.""" - await self._async_call_api(API_SET_POSITON, 100) + await self._async_call_api(API_SET_POSITON, 100, self._cover_id) async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - await self._async_call_api(API_SET_POSITON, kwargs[ATTR_POSITION]) + await self._async_call_api( + API_SET_POSITON, kwargs[ATTR_POSITION], self._cover_id + ) async def async_stop_cover(self, **kwargs: Any) -> None: """Stop the cover.""" - await self._async_call_api(API_STOP) + await self._async_call_api(API_STOP, self._cover_id) + + +class SwitcherSingleCoverEntity(SwitcherBaseCoverEntity): + """Representation of a Switcher single cover entity.""" + + _attr_name = None + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + cover_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._cover_id = cover_id + + self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" + + self._update_data() + + +class SwitcherMultiCoverEntity(SwitcherBaseCoverEntity): + """Representation of a Switcher multiple cover entity.""" + + _attr_translation_key = "cover" + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + cover_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._cover_id = cover_id + + self._attr_translation_placeholders = {"cover_id": str(cover_id + 1)} + self._attr_unique_id = ( + f"{coordinator.device_id}-{coordinator.mac_address}-{cover_id}" + ) + + self._update_data() diff --git a/homeassistant/components/switcher_kis/entity.py b/homeassistant/components/switcher_kis/entity.py new file mode 100644 index 00000000000..e24f59a4a1c --- /dev/null +++ b/homeassistant/components/switcher_kis/entity.py @@ -0,0 +1,54 @@ +"""Base class for Switcher entities.""" + +import logging +from typing import Any + +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse + +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import SwitcherDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): + """Base class for Switcher entities.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SwitcherDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} + ) + + async def _async_call_api(self, api: str, *args: Any) -> None: + """Call Switcher API.""" + _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) + response: SwitcherBaseResponse | None = None + error = None + + try: + async with SwitcherApi( + self.coordinator.data.device_type, + self.coordinator.data.ip_address, + self.coordinator.data.device_id, + self.coordinator.data.device_key, + self.coordinator.token, + ) as swapi: + response = await getattr(swapi, api)(*args) + except (TimeoutError, OSError, RuntimeError) as err: + error = repr(err) + + if error or not response or not response.successful: + self.coordinator.last_update_success = False + self.async_write_ha_state() + raise HomeAssistantError( + f"Call api for {self.name} failed, api: '{api}', " + f"args: {args}, response/error: {response or error}" + ) diff --git a/homeassistant/components/switcher_kis/icons.json b/homeassistant/components/switcher_kis/icons.json index 4d3576f1a99..bd770d3e656 100644 --- a/homeassistant/components/switcher_kis/icons.json +++ b/homeassistant/components/switcher_kis/icons.json @@ -20,11 +20,18 @@ }, "auto_shutdown": { "default": "mdi:progress-clock" + }, + "temperature": { + "default": "mdi:thermometer" } } }, "services": { - "set_auto_off": "mdi:progress-clock", - "turn_on_with_timer": "mdi:timer" + "set_auto_off": { + "service": "mdi:progress-clock" + }, + "turn_on_with_timer": { + "service": "mdi:timer" + } } } diff --git a/homeassistant/components/switcher_kis/light.py b/homeassistant/components/switcher_kis/light.py new file mode 100644 index 00000000000..75156044efa --- /dev/null +++ b/homeassistant/components/switcher_kis/light.py @@ -0,0 +1,128 @@ +"""Switcher integration Light platform.""" + +from __future__ import annotations + +from typing import Any, cast + +from aioswitcher.device import DeviceCategory, DeviceState, SwitcherLight + +from homeassistant.components.light import ColorMode, LightEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import SIGNAL_DEVICE_ADD +from .coordinator import SwitcherDataUpdateCoordinator +from .entity import SwitcherEntity + +API_SET_LIGHT = "set_light" + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Switcher light from a config entry.""" + + @callback + def async_add_light(coordinator: SwitcherDataUpdateCoordinator) -> None: + """Add light from Switcher device.""" + entities: list[LightEntity] = [] + + if coordinator.data.device_type.category in ( + DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, + DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, + DeviceCategory.LIGHT, + ): + number_of_lights = len(cast(SwitcherLight, coordinator.data).light) + if number_of_lights == 1: + entities.append(SwitcherSingleLightEntity(coordinator, 0)) + else: + entities.extend( + SwitcherMultiLightEntity(coordinator, i) + for i in range(number_of_lights) + ) + async_add_entities(entities) + + config_entry.async_on_unload( + async_dispatcher_connect(hass, SIGNAL_DEVICE_ADD, async_add_light) + ) + + +class SwitcherBaseLightEntity(SwitcherEntity, LightEntity): + """Representation of a Switcher light entity.""" + + _attr_color_mode = ColorMode.ONOFF + _attr_supported_color_modes = {ColorMode.ONOFF} + control_result: bool | None = None + _light_id: int + + @callback + def _handle_coordinator_update(self) -> None: + """When device updates, clear control result that overrides state.""" + self.control_result = None + self.async_write_ha_state() + + @property + def is_on(self) -> bool: + """Return True if entity is on.""" + if self.control_result is not None: + return self.control_result + + data = cast(SwitcherLight, self.coordinator.data) + return bool(data.light[self._light_id] == DeviceState.ON) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the light on.""" + await self._async_call_api(API_SET_LIGHT, DeviceState.ON, self._light_id) + self.control_result = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._async_call_api(API_SET_LIGHT, DeviceState.OFF, self._light_id) + self.control_result = False + self.async_write_ha_state() + + +class SwitcherSingleLightEntity(SwitcherBaseLightEntity): + """Representation of a Switcher single light entity.""" + + _attr_name = None + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + light_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._light_id = light_id + self.control_result: bool | None = None + + # Entity class attributes + self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" + + +class SwitcherMultiLightEntity(SwitcherBaseLightEntity): + """Representation of a Switcher multiple light entity.""" + + _attr_translation_key = "light" + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + light_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._light_id = light_id + self.control_result: bool | None = None + + # Entity class attributes + self._attr_translation_placeholders = {"light_id": str(light_id + 1)} + self._attr_unique_id = ( + f"{coordinator.device_id}-{coordinator.mac_address}-{light_id}" + ) diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 75ace60e942..d0731c5ae3b 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -1,12 +1,11 @@ { "domain": "switcher_kis", "name": "Switcher", - "codeowners": ["@thecode"], + "codeowners": ["@thecode", "@YogevBokobza"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/switcher_kis", "iot_class": "local_push", "loggers": ["aioswitcher"], - "quality_scale": "platinum", - "requirements": ["aioswitcher==4.0.2"], + "requirements": ["aioswitcher==5.1.0"], "single_config_entry": true } diff --git a/homeassistant/components/switcher_kis/sensor.py b/homeassistant/components/switcher_kis/sensor.py index ee503dcda95..0ed60e5a721 100644 --- a/homeassistant/components/switcher_kis/sensor.py +++ b/homeassistant/components/switcher_kis/sensor.py @@ -13,15 +13,13 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfElectricCurrent, UnitOfPower from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator +from .entity import SwitcherEntity POWER_SENSORS: list[SensorEntityDescription] = [ SensorEntityDescription( @@ -48,9 +46,16 @@ TIME_SENSORS: list[SensorEntityDescription] = [ entity_registry_enabled_default=False, ), ] +TEMPERATURE_SENSORS: list[SensorEntityDescription] = [ + SensorEntityDescription( + key="temperature", + translation_key="temperature", + ), +] POWER_PLUG_SENSORS = POWER_SENSORS WATER_HEATER_SENSORS = [*POWER_SENSORS, *TIME_SENSORS] +THERMOSTAT_SENSORS = TEMPERATURE_SENSORS async def async_setup_entry( @@ -73,19 +78,20 @@ async def async_setup_entry( SwitcherSensorEntity(coordinator, description) for description in WATER_HEATER_SENSORS ) + elif coordinator.data.device_type.category == DeviceCategory.THERMOSTAT: + async_add_entities( + SwitcherSensorEntity(coordinator, description) + for description in THERMOSTAT_SENSORS + ) config_entry.async_on_unload( async_dispatcher_connect(hass, SIGNAL_DEVICE_ADD, async_add_sensors) ) -class SwitcherSensorEntity( - CoordinatorEntity[SwitcherDataUpdateCoordinator], SensorEntity -): +class SwitcherSensorEntity(SwitcherEntity, SensorEntity): """Representation of a Switcher sensor entity.""" - _attr_has_entity_name = True - def __init__( self, coordinator: SwitcherDataUpdateCoordinator, @@ -98,9 +104,6 @@ class SwitcherSensorEntity( self._attr_unique_id = ( f"{coordinator.device_id}-{coordinator.mac_address}-{description.key}" ) - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} - ) @property def native_value(self) -> StateType: diff --git a/homeassistant/components/switcher_kis/strings.json b/homeassistant/components/switcher_kis/strings.json index e21bdbcdf7a..844cbb4ca98 100644 --- a/homeassistant/components/switcher_kis/strings.json +++ b/homeassistant/components/switcher_kis/strings.json @@ -3,11 +3,29 @@ "step": { "confirm": { "description": "[%key:common::config_flow::description::confirm_setup%]" + }, + "credentials": { + "description": "Found a Switcher device that requires a token\nEnter your username and token\nFor more information see https://www.home-assistant.io/integrations/switcher_kis/#prerequisites", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "token": "[%key:common::config_flow::data::access_token%]" + } + }, + "reauth_confirm": { + "description": "Found a Switcher device that requires a token\nEnter your username and token\nFor more information see https://www.home-assistant.io/integrations/switcher_kis/#prerequisites", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "token": "[%key:common::config_flow::data::access_token%]" + } } }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + }, "abort": { "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { @@ -25,12 +43,25 @@ "name": "Vertical swing off" } }, + "cover": { + "cover": { + "name": "Cover {cover_id}" + } + }, + "light": { + "light": { + "name": "Light {light_id}" + } + }, "sensor": { "remaining_time": { "name": "Remaining time" }, "auto_shutdown": { "name": "Auto shutdown" + }, + "temperature": { + "name": "Current temperature" } } }, diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index c667a6dd473..ba0a99b4089 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -6,23 +6,17 @@ from datetime import timedelta import logging from typing import Any -from aioswitcher.api import Command, SwitcherBaseResponse, SwitcherType1Api +from aioswitcher.api import Command from aioswitcher.device import DeviceCategory, DeviceState import voluptuous as vol from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_platform, -) -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( CONF_AUTO_OFF, @@ -32,6 +26,7 @@ from .const import ( SIGNAL_DEVICE_ADD, ) from .coordinator import SwitcherDataUpdateCoordinator +from .entity import SwitcherEntity _LOGGER = logging.getLogger(__name__) @@ -82,12 +77,9 @@ async def async_setup_entry( ) -class SwitcherBaseSwitchEntity( - CoordinatorEntity[SwitcherDataUpdateCoordinator], SwitchEntity -): +class SwitcherBaseSwitchEntity(SwitcherEntity, SwitchEntity): """Representation of a Switcher switch entity.""" - _attr_has_entity_name = True _attr_name = None def __init__(self, coordinator: SwitcherDataUpdateCoordinator) -> None: @@ -97,9 +89,6 @@ class SwitcherBaseSwitchEntity( # Entity class attributes self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} - ) @callback def _handle_coordinator_update(self) -> None: @@ -107,35 +96,6 @@ class SwitcherBaseSwitchEntity( self.control_result = None self.async_write_ha_state() - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug( - "Calling api for %s, api: '%s', args: %s", self.coordinator.name, api, args - ) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherType1Api( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - _LOGGER.error( - "Call api for %s failed, api: '%s', args: %s, response/error: %s", - self.coordinator.name, - api, - args, - response or error, - ) - self.coordinator.last_update_success = False - @property def is_on(self) -> bool: """Return True if entity is on.""" diff --git a/homeassistant/components/switcher_kis/utils.py b/homeassistant/components/switcher_kis/utils.py index ad23d51e44d..50bfb883e6c 100644 --- a/homeassistant/components/switcher_kis/utils.py +++ b/homeassistant/components/switcher_kis/utils.py @@ -16,7 +16,7 @@ from .const import DISCOVERY_TIME_SEC _LOGGER = logging.getLogger(__name__) -async def async_has_devices(hass: HomeAssistant) -> bool: +async def async_discover_devices() -> dict[str, SwitcherBase]: """Discover Switcher devices.""" _LOGGER.debug("Starting discovery") discovered_devices = {} @@ -35,7 +35,7 @@ async def async_has_devices(hass: HomeAssistant) -> bool: await bridge.stop() _LOGGER.debug("Finished discovery, discovered devices: %s", len(discovered_devices)) - return len(discovered_devices) > 0 + return discovered_devices @singleton.singleton("switcher_breeze_remote_manager") diff --git a/homeassistant/components/switchmate/manifest.json b/homeassistant/components/switchmate/manifest.json index 5467dc512c3..f21819e1bc0 100644 --- a/homeassistant/components/switchmate/manifest.json +++ b/homeassistant/components/switchmate/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/switchmate", "iot_class": "local_polling", "loggers": ["switchmate"], + "quality_scale": "legacy", "requirements": ["PySwitchmate==0.5.1"] } diff --git a/homeassistant/components/syncthing/__init__.py b/homeassistant/components/syncthing/__init__.py index 28ec14a1935..8ef63e76825 100644 --- a/homeassistant/components/syncthing/__init__.py +++ b/homeassistant/components/syncthing/__init__.py @@ -124,7 +124,7 @@ class SyncthingClient: while True: if await self._server_available(): if server_was_unavailable: - _LOGGER.info( + _LOGGER.warning( "The syncthing server '%s' is back online", self._client.url ) async_dispatcher_send( @@ -153,7 +153,7 @@ class SyncthingClient: event, ) except aiosyncthing.exceptions.SyncthingError: - _LOGGER.info( + _LOGGER.warning( ( "The syncthing server '%s' is not available. Sleeping %i" " seconds and retrying" diff --git a/homeassistant/components/syncthing/manifest.json b/homeassistant/components/syncthing/manifest.json index f7fd2b7ece6..612665913d0 100644 --- a/homeassistant/components/syncthing/manifest.json +++ b/homeassistant/components/syncthing/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/syncthing", "iot_class": "local_polling", "loggers": ["aiosyncthing"], - "quality_scale": "silver", "requirements": ["aiosyncthing==0.5.1"] } diff --git a/homeassistant/components/syncthru/__init__.py b/homeassistant/components/syncthru/__init__.py index c6764de51a7..2817f4c21ce 100644 --- a/homeassistant/components/syncthru/__init__.py +++ b/homeassistant/components/syncthru/__init__.py @@ -37,7 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await printer.update() except SyncThruAPINotSupported as api_error: # if an exception is thrown, printer does not support syncthru - _LOGGER.info( + _LOGGER.debug( "Configured printer at %s does not provide SyncThru JSON API", printer.url, exc_info=api_error, @@ -52,6 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator[SyncThru]( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=timedelta(seconds=30), diff --git a/homeassistant/components/syncthru/config_flow.py b/homeassistant/components/syncthru/config_flow.py index 180ba0d9e34..1fb155a5648 100644 --- a/homeassistant/components/syncthru/config_flow.py +++ b/homeassistant/components/syncthru/config_flow.py @@ -64,7 +64,9 @@ class SyncThruConfigFlow(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {CONF_NAME: self.name} return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle discovery confirmation by user.""" if user_input is not None: return await self._async_check_and_create("confirm", user_input) diff --git a/homeassistant/components/synology_chat/manifest.json b/homeassistant/components/synology_chat/manifest.json index 3ac663ff91e..c9bd3396097 100644 --- a/homeassistant/components/synology_chat/manifest.json +++ b/homeassistant/components/synology_chat/manifest.json @@ -3,5 +3,6 @@ "name": "Synology Chat", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/synology_chat", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/synology_dsm/common.py b/homeassistant/components/synology_dsm/common.py index e2023aa91a1..9a6284eff2b 100644 --- a/homeassistant/components/synology_dsm/common.py +++ b/homeassistant/components/synology_dsm/common.py @@ -138,7 +138,7 @@ class SynoApi: except SYNOLOGY_CONNECTION_EXCEPTIONS: self._with_surveillance_station = False self.dsm.reset(SynoSurveillanceStation.API_KEY) - LOGGER.info( + LOGGER.warning( "Surveillance Station found, but disabled due to missing user" " permissions" ) diff --git a/homeassistant/components/synology_dsm/config_flow.py b/homeassistant/components/synology_dsm/config_flow.py index d019361edad..918a24035f8 100644 --- a/homeassistant/components/synology_dsm/config_flow.py +++ b/homeassistant/components/synology_dsm/config_flow.py @@ -118,7 +118,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SynologyDSMOptionsFlowHandler: """Get the options flow for this handler.""" - return SynologyDSMOptionsFlowHandler(config_entry) + return SynologyDSMOptionsFlowHandler() def __init__(self) -> None: """Initialize the synology_dsm config flow.""" @@ -289,7 +289,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): and existing_entry.data[CONF_HOST] != host and ip(existing_entry.data[CONF_HOST]).version == ip(host).version ): - _LOGGER.info( + _LOGGER.debug( "Update host from '%s' to '%s' for NAS '%s' via discovery", existing_entry.data[CONF_HOST], host, @@ -326,7 +326,11 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self.reauth_conf = entry_data - self.context["title_placeholders"][CONF_HOST] = entry_data[CONF_HOST] + placeholders = { + **self.context["title_placeholders"], + CONF_HOST: entry_data[CONF_HOST], + } + self.context["title_placeholders"] = placeholders return await self.async_step_reauth_confirm() @@ -372,10 +376,6 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): class SynologyDSMOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/synology_dsm/icons.json b/homeassistant/components/synology_dsm/icons.json index 8e6d2b17f02..3c4d028dc7a 100644 --- a/homeassistant/components/synology_dsm/icons.json +++ b/homeassistant/components/synology_dsm/icons.json @@ -78,7 +78,11 @@ } }, "services": { - "reboot": "mdi:restart", - "shutdown": "mdi:power" + "reboot": { + "service": "mdi:restart" + }, + "shutdown": { + "service": "mdi:power" + } } } diff --git a/homeassistant/components/synology_dsm/manifest.json b/homeassistant/components/synology_dsm/manifest.json index 5d42188357b..b85189715ef 100644 --- a/homeassistant/components/synology_dsm/manifest.json +++ b/homeassistant/components/synology_dsm/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/synology_dsm", "iot_class": "local_polling", "loggers": ["synology_dsm"], - "requirements": ["py-synologydsm-api==2.5.2"], + "requirements": ["py-synologydsm-api==2.5.3"], "ssdp": [ { "manufacturer": "Synology", diff --git a/homeassistant/components/synology_srm/device_tracker.py b/homeassistant/components/synology_srm/device_tracker.py index 7c7343e88f6..3e0e7add185 100644 --- a/homeassistant/components/synology_srm/device_tracker.py +++ b/homeassistant/components/synology_srm/device_tracker.py @@ -8,7 +8,7 @@ import synology_srm import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -75,7 +75,7 @@ def get_scanner( hass: HomeAssistant, config: ConfigType ) -> SynologySrmDeviceScanner | None: """Validate the configuration and return Synology SRM scanner.""" - scanner = SynologySrmDeviceScanner(config[DOMAIN]) + scanner = SynologySrmDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -100,8 +100,6 @@ class SynologySrmDeviceScanner(DeviceScanner): self.devices = [] self.success_init = self._update_info() - _LOGGER.info("Synology SRM scanner initialized") - def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() diff --git a/homeassistant/components/synology_srm/manifest.json b/homeassistant/components/synology_srm/manifest.json index 9980f37969e..0d712b6742b 100644 --- a/homeassistant/components/synology_srm/manifest.json +++ b/homeassistant/components/synology_srm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/synology_srm", "iot_class": "local_polling", "loggers": ["synology_srm"], + "quality_scale": "legacy", "requirements": ["synology-srm==0.2.0"] } diff --git a/homeassistant/components/syslog/manifest.json b/homeassistant/components/syslog/manifest.json index 380628ffa66..bf327baec10 100644 --- a/homeassistant/components/syslog/manifest.json +++ b/homeassistant/components/syslog/manifest.json @@ -3,5 +3,6 @@ "name": "Syslog", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/syslog", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/system_bridge/config_flow.py b/homeassistant/components/system_bridge/config_flow.py index dc1736ea337..98396e52545 100644 --- a/homeassistant/components/system_bridge/config_flow.py +++ b/homeassistant/components/system_bridge/config_flow.py @@ -17,7 +17,7 @@ from systembridgemodels.modules import GetData, Module import voluptuous as vol from homeassistant.components import zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -120,11 +120,11 @@ class SystemBridgeConfigFlow( VERSION = 1 MINOR_VERSION = 2 + _name: str + def __init__(self) -> None: """Initialize flow.""" - self._name: str | None = None self._input: dict[str, Any] = {} - self._reauth = False async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -157,15 +157,13 @@ class SystemBridgeConfigFlow( user_input = {**self._input, **user_input} errors, info = await _async_get_info(self.hass, user_input) if not errors and info is not None: - # Check if already configured - existing_entry = await self.async_set_unique_id(info["uuid"]) + await self.async_set_unique_id(info["uuid"]) - if self._reauth and existing_entry: - self.hass.config_entries.async_update_entry( - existing_entry, data=user_input + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") self._abort_if_unique_id_configured( updates={CONF_HOST: info["hostname"]} @@ -212,7 +210,6 @@ class SystemBridgeConfigFlow( CONF_HOST: entry_data[CONF_HOST], CONF_PORT: entry_data[CONF_PORT], } - self._reauth = True return await self.async_step_authenticate() diff --git a/homeassistant/components/system_bridge/icons.json b/homeassistant/components/system_bridge/icons.json index cc648889f0b..a03f77049a3 100644 --- a/homeassistant/components/system_bridge/icons.json +++ b/homeassistant/components/system_bridge/icons.json @@ -1,11 +1,25 @@ { "services": { - "get_process_by_id": "mdi:console", - "get_processes_by_name": "mdi:console", - "open_path": "mdi:folder-open", - "open_url": "mdi:web", - "send_keypress": "mdi:keyboard", - "send_text": "mdi:keyboard", - "power_command": "mdi:power" + "get_process_by_id": { + "service": "mdi:console" + }, + "get_processes_by_name": { + "service": "mdi:console" + }, + "open_path": { + "service": "mdi:folder-open" + }, + "open_url": { + "service": "mdi:web" + }, + "send_keypress": { + "service": "mdi:keyboard" + }, + "send_text": { + "service": "mdi:keyboard" + }, + "power_command": { + "service": "mdi:power" + } } } diff --git a/homeassistant/components/system_bridge/manifest.json b/homeassistant/components/system_bridge/manifest.json index e886bcad150..2799cf31fdd 100644 --- a/homeassistant/components/system_bridge/manifest.json +++ b/homeassistant/components/system_bridge/manifest.json @@ -9,7 +9,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["systembridgeconnector"], - "quality_scale": "silver", "requirements": ["systembridgeconnector==4.1.5", "systembridgemodels==4.2.4"], "zeroconf": ["_system-bridge._tcp.local."] } diff --git a/homeassistant/components/system_bridge/media_source.py b/homeassistant/components/system_bridge/media_source.py index cd0ef8ee60f..53bc4f32506 100644 --- a/homeassistant/components/system_bridge/media_source.py +++ b/homeassistant/components/system_bridge/media_source.py @@ -7,8 +7,9 @@ from systembridgemodels.media_files import MediaFile, MediaFiles from systembridgemodels.media_get_files import MediaGetFiles from homeassistant.components.media_player import MediaClass -from homeassistant.components.media_source import MEDIA_CLASS_MAP, MEDIA_MIME_TYPES -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( + MEDIA_CLASS_MAP, + MEDIA_MIME_TYPES, BrowseMediaSource, MediaSource, MediaSourceItem, diff --git a/homeassistant/components/system_bridge/strings.json b/homeassistant/components/system_bridge/strings.json index b5ceba9bd84..ef7495ef74f 100644 --- a/homeassistant/components/system_bridge/strings.json +++ b/homeassistant/components/system_bridge/strings.json @@ -3,6 +3,7 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unique_id_mismatch": "The identifier does not match the previous identifier", "unsupported_version": "Your version of System Bridge is not supported. Please upgrade to the latest version.", "unknown": "[%key:common::config_flow::error::unknown%]" }, diff --git a/homeassistant/components/system_log/__init__.py b/homeassistant/components/system_log/__init__.py index 0749f87a67f..22950aa9f1e 100644 --- a/homeassistant/components/system_log/__init__.py +++ b/homeassistant/components/system_log/__init__.py @@ -299,9 +299,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass_path: str = HOMEASSISTANT_PATH[0] config_dir = hass.config.config_dir - paths_re = re.compile( - r"(?:{})/(.*)".format("|".join([re.escape(x) for x in (hass_path, config_dir)])) - ) + paths_re = re.compile(rf"(?:{re.escape(hass_path)}|{re.escape(config_dir)})/(.*)") handler = LogErrorHandler( hass, conf[CONF_MAX_ENTRIES], conf[CONF_FIRE_EVENT], paths_re ) diff --git a/homeassistant/components/system_log/icons.json b/homeassistant/components/system_log/icons.json index 436a6c34808..fe269c5154d 100644 --- a/homeassistant/components/system_log/icons.json +++ b/homeassistant/components/system_log/icons.json @@ -1,6 +1,10 @@ { "services": { - "clear": "mdi:delete", - "write": "mdi:pencil" + "clear": { + "service": "mdi:delete" + }, + "write": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/system_log/strings.json b/homeassistant/components/system_log/strings.json index ed1ca79fe07..db71cd6ace4 100644 --- a/homeassistant/components/system_log/strings.json +++ b/homeassistant/components/system_log/strings.json @@ -1,8 +1,8 @@ { "services": { "clear": { - "name": "Clear all", - "description": "Clears all log entries." + "name": "Clear", + "description": "Deletes all log entries." }, "write": { "name": "Write", diff --git a/homeassistant/components/systemmonitor/coordinator.py b/homeassistant/components/systemmonitor/coordinator.py index d12eddbb14a..32a171a11ca 100644 --- a/homeassistant/components/systemmonitor/coordinator.py +++ b/homeassistant/components/systemmonitor/coordinator.py @@ -37,17 +37,29 @@ class SensorData: def as_dict(self) -> dict[str, Any]: """Return as dict.""" + disk_usage = None + if self.disk_usage: + disk_usage = {k: str(v) for k, v in self.disk_usage.items()} + io_counters = None + if self.io_counters: + io_counters = {k: str(v) for k, v in self.io_counters.items()} + addresses = None + if self.addresses: + addresses = {k: str(v) for k, v in self.addresses.items()} + temperatures = None + if self.temperatures: + temperatures = {k: str(v) for k, v in self.temperatures.items()} return { - "disk_usage": {k: str(v) for k, v in self.disk_usage.items()}, + "disk_usage": disk_usage, "swap": str(self.swap), "memory": str(self.memory), - "io_counters": {k: str(v) for k, v in self.io_counters.items()}, - "addresses": {k: str(v) for k, v in self.addresses.items()}, + "io_counters": io_counters, + "addresses": addresses, "load": str(self.load), "cpu_percent": str(self.cpu_percent), "boot_time": str(self.boot_time), "processes": str(self.processes), - "temperatures": {k: str(v) for k, v in self.temperatures.items()}, + "temperatures": temperatures, } diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index 236f25bb1ed..4c6ae0653d3 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"] + "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.0"] } diff --git a/homeassistant/components/systemmonitor/strings.json b/homeassistant/components/systemmonitor/strings.json index dde97918bc3..e595e628853 100644 --- a/homeassistant/components/systemmonitor/strings.json +++ b/homeassistant/components/systemmonitor/strings.json @@ -5,7 +5,7 @@ }, "step": { "user": { - "description": "Press submit for initial setup. On the created config entry, press configure to add sensors for selected processes" + "description": "Select **Submit** for initial setup. On the created config entry, select configure to add sensors for selected processes" } } }, diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 084819d8e68..cc5dee77617 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -1,9 +1,7 @@ """Support for the (unofficial) Tado API.""" -from dataclasses import dataclass from datetime import timedelta import logging -from typing import Any import requests.exceptions @@ -22,9 +20,6 @@ from .const import ( CONST_OVERLAY_TADO_MODE, CONST_OVERLAY_TADO_OPTIONS, DOMAIN, - UPDATE_LISTENER, - UPDATE_MOBILE_DEVICE_TRACK, - UPDATE_TRACK, ) from .services import setup_services from .tado_connector import TadoConnector @@ -55,17 +50,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -type TadoConfigEntry = ConfigEntry[TadoRuntimeData] - - -@dataclass -class TadoRuntimeData: - """Dataclass for Tado runtime data.""" - - tadoconnector: TadoConnector - update_track: Any - update_mobile_device_track: Any - update_listener: Any +type TadoConfigEntry = ConfigEntry[TadoConnector] async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: @@ -99,26 +84,25 @@ async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool await hass.async_add_executor_job(tadoconnector.update) # Poll for updates in the background - update_track = async_track_time_interval( - hass, - lambda now: tadoconnector.update(), - SCAN_INTERVAL, + entry.async_on_unload( + async_track_time_interval( + hass, + lambda now: tadoconnector.update(), + SCAN_INTERVAL, + ) ) - update_mobile_devices = async_track_time_interval( - hass, - lambda now: tadoconnector.update_mobile_devices(), - SCAN_MOBILE_DEVICE_INTERVAL, + entry.async_on_unload( + async_track_time_interval( + hass, + lambda now: tadoconnector.update_mobile_devices(), + SCAN_MOBILE_DEVICE_INTERVAL, + ) ) - update_listener = entry.add_update_listener(_async_update_listener) + entry.async_on_unload(entry.add_update_listener(_async_update_listener)) - entry.runtime_data = TadoRuntimeData( - tadoconnector=tadoconnector, - update_track=update_track, - update_mobile_device_track=update_mobile_devices, - update_listener=update_listener, - ) + entry.runtime_data = tadoconnector await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -147,15 +131,6 @@ async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> Non await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - hass.data[DOMAIN][entry.entry_id][UPDATE_TRACK]() - hass.data[DOMAIN][entry.entry_id][UPDATE_LISTENER]() - hass.data[DOMAIN][entry.entry_id][UPDATE_MOBILE_DEVICE_TRACK]() - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/tado/binary_sensor.py b/homeassistant/components/tado/binary_sensor.py index ec8eb9331ac..25c1c801155 100644 --- a/homeassistant/components/tado/binary_sensor.py +++ b/homeassistant/components/tado/binary_sensor.py @@ -121,7 +121,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado sensor platform.""" - tado: TadoConnector = entry.runtime_data.tadoconnector + tado = entry.runtime_data devices = tado.devices zones = tado.zones entities: list[BinarySensorEntity] = [] diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index 314a2315d0a..5a81e951293 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -16,6 +16,7 @@ from homeassistant.components.climate import ( SWING_BOTH, SWING_HORIZONTAL, SWING_OFF, + SWING_ON, SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, @@ -47,7 +48,6 @@ from .const import ( HA_TO_TADO_FAN_MODE_MAP, HA_TO_TADO_FAN_MODE_MAP_LEGACY, HA_TO_TADO_HVAC_MODE_MAP, - HA_TO_TADO_SWING_MODE_MAP, ORDERED_KNOWN_TADO_MODES, PRESET_AUTO, SIGNAL_TADO_UPDATE_RECEIVED, @@ -55,17 +55,20 @@ from .const import ( SUPPORT_PRESET_MANUAL, TADO_DEFAULT_MAX_TEMP, TADO_DEFAULT_MIN_TEMP, - TADO_FAN_LEVELS, - TADO_FAN_SPEEDS, + TADO_FANLEVEL_SETTING, + TADO_FANSPEED_SETTING, + TADO_HORIZONTAL_SWING_SETTING, TADO_HVAC_ACTION_TO_HA_HVAC_ACTION, TADO_MODES_WITH_NO_TEMP_SETTING, TADO_SWING_OFF, TADO_SWING_ON, + TADO_SWING_SETTING, TADO_TO_HA_FAN_MODE_MAP, TADO_TO_HA_FAN_MODE_MAP_LEGACY, TADO_TO_HA_HVAC_MODE_MAP, TADO_TO_HA_OFFSET_MAP, TADO_TO_HA_SWING_MODE_MAP, + TADO_VERTICAL_SWING_SETTING, TEMP_OFFSET, TYPE_AIR_CONDITIONING, TYPE_HEATING, @@ -102,7 +105,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado climate platform.""" - tado: TadoConnector = entry.runtime_data.tadoconnector + tado = entry.runtime_data entities = await hass.async_add_executor_job(_generate_entities, tado) platform = entity_platform.async_get_current_platform() @@ -166,29 +169,30 @@ def create_climate_entity( supported_hvac_modes.append(TADO_TO_HA_HVAC_MODE_MAP[mode]) if ( - capabilities[mode].get("swings") - or capabilities[mode].get("verticalSwing") - or capabilities[mode].get("horizontalSwing") + TADO_SWING_SETTING in capabilities[mode] + or TADO_VERTICAL_SWING_SETTING in capabilities[mode] + or TADO_VERTICAL_SWING_SETTING in capabilities[mode] ): support_flags |= ClimateEntityFeature.SWING_MODE supported_swing_modes = [] - if capabilities[mode].get("swings"): + if TADO_SWING_SETTING in capabilities[mode]: supported_swing_modes.append( TADO_TO_HA_SWING_MODE_MAP[TADO_SWING_ON] ) - if capabilities[mode].get("verticalSwing"): + if TADO_VERTICAL_SWING_SETTING in capabilities[mode]: supported_swing_modes.append(SWING_VERTICAL) - if capabilities[mode].get("horizontalSwing"): + if TADO_HORIZONTAL_SWING_SETTING in capabilities[mode]: supported_swing_modes.append(SWING_HORIZONTAL) if ( SWING_HORIZONTAL in supported_swing_modes - and SWING_HORIZONTAL in supported_swing_modes + and SWING_VERTICAL in supported_swing_modes ): supported_swing_modes.append(SWING_BOTH) supported_swing_modes.append(TADO_TO_HA_SWING_MODE_MAP[TADO_SWING_OFF]) - if not capabilities[mode].get("fanSpeeds") and not capabilities[mode].get( - "fanLevel" + if ( + TADO_FANSPEED_SETTING not in capabilities[mode] + and TADO_FANLEVEL_SETTING not in capabilities[mode] ): continue @@ -197,14 +201,15 @@ def create_climate_entity( if supported_fan_modes: continue - if capabilities[mode].get("fanSpeeds"): + if TADO_FANSPEED_SETTING in capabilities[mode]: supported_fan_modes = generate_supported_fanmodes( - TADO_TO_HA_FAN_MODE_MAP_LEGACY, capabilities[mode]["fanSpeeds"] + TADO_TO_HA_FAN_MODE_MAP_LEGACY, + capabilities[mode][TADO_FANSPEED_SETTING], ) else: supported_fan_modes = generate_supported_fanmodes( - TADO_TO_HA_FAN_MODE_MAP, capabilities[mode]["fanLevel"] + TADO_TO_HA_FAN_MODE_MAP, capabilities[mode][TADO_FANLEVEL_SETTING] ) cool_temperatures = capabilities[CONST_MODE_COOL]["temperatures"] @@ -264,7 +269,6 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): _attr_name = None _attr_translation_key = DOMAIN _available = False - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -316,12 +320,16 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._target_temp: float | None = None self._current_tado_fan_speed = CONST_FAN_OFF + self._current_tado_fan_level = CONST_FAN_OFF self._current_tado_hvac_mode = CONST_MODE_OFF self._current_tado_hvac_action = HVACAction.OFF self._current_tado_swing_mode = TADO_SWING_OFF self._current_tado_vertical_swing = TADO_SWING_OFF self._current_tado_horizontal_swing = TADO_SWING_OFF + capabilities = tado.get_capabilities(zone_id) + self._current_tado_capabilities = capabilities + self._tado_zone_data: PyTado.TadoZone = {} self._tado_geofence_data: dict[str, str] | None = None @@ -382,20 +390,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): def fan_mode(self) -> str | None: """Return the fan setting.""" if self._ac_device: - return TADO_TO_HA_FAN_MODE_MAP.get( - self._current_tado_fan_speed, - TADO_TO_HA_FAN_MODE_MAP_LEGACY.get( + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + return TADO_TO_HA_FAN_MODE_MAP_LEGACY.get( self._current_tado_fan_speed, FAN_AUTO - ), - ) + ) + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + return TADO_TO_HA_FAN_MODE_MAP.get( + self._current_tado_fan_level, FAN_AUTO + ) + return FAN_AUTO return None def set_fan_mode(self, fan_mode: str) -> None: """Turn fan on/off.""" - if self._current_tado_fan_speed in TADO_FAN_LEVELS: - self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) - else: + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP_LEGACY[fan_mode]) + elif self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) @property def preset_mode(self) -> str: @@ -555,24 +566,30 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): swing = None if self._attr_swing_modes is None: return - if ( - SWING_VERTICAL in self._attr_swing_modes - or SWING_HORIZONTAL in self._attr_swing_modes - ): - if swing_mode == SWING_VERTICAL: + if swing_mode == SWING_OFF: + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + horizontal_swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + vertical_swing = TADO_SWING_OFF + if swing_mode == SWING_ON: + swing = TADO_SWING_ON + if swing_mode == SWING_VERTICAL: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): vertical_swing = TADO_SWING_ON - elif swing_mode == SWING_HORIZONTAL: + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + horizontal_swing = TADO_SWING_OFF + if swing_mode == SWING_HORIZONTAL: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + vertical_swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - elif swing_mode == SWING_BOTH: + if swing_mode == SWING_BOTH: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): vertical_swing = TADO_SWING_ON + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - elif swing_mode == SWING_OFF: - if SWING_VERTICAL in self._attr_swing_modes: - vertical_swing = TADO_SWING_OFF - if SWING_HORIZONTAL in self._attr_swing_modes: - horizontal_swing = TADO_SWING_OFF - else: - swing = HA_TO_TADO_SWING_MODE_MAP[swing_mode] self._control_hvac( swing_mode=swing, @@ -596,21 +613,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._device_id ][TEMP_OFFSET][offset_key] - self._current_tado_fan_speed = ( - self._tado_zone_data.current_fan_level - if self._tado_zone_data.current_fan_level is not None - else self._tado_zone_data.current_fan_speed - ) - self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode self._current_tado_hvac_action = self._tado_zone_data.current_hvac_action - self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode - self._current_tado_vertical_swing = ( - self._tado_zone_data.current_vertical_swing_mode - ) - self._current_tado_horizontal_swing = ( - self._tado_zone_data.current_horizontal_swing_mode - ) + + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = self._tado_zone_data.current_fan_level + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = self._tado_zone_data.current_fan_speed + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + self._current_tado_vertical_swing = ( + self._tado_zone_data.current_vertical_swing_mode + ) + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + self._current_tado_horizontal_swing = ( + self._tado_zone_data.current_horizontal_swing_mode + ) @callback def _async_update_zone_callback(self) -> None: @@ -665,7 +684,10 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._target_temp = target_temp if fan_mode: - self._current_tado_fan_speed = fan_mode + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = fan_mode + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = fan_mode if swing_mode: self._current_tado_swing_mode = swing_mode @@ -735,21 +757,32 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): fan_speed = None fan_level = None if self.supported_features & ClimateEntityFeature.FAN_MODE: - if self._current_tado_fan_speed in TADO_FAN_LEVELS: - fan_level = self._current_tado_fan_speed - elif self._current_tado_fan_speed in TADO_FAN_SPEEDS: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_FANSPEED_SETTING, self._current_tado_fan_speed + ): fan_speed = self._current_tado_fan_speed + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_FANLEVEL_SETTING, self._current_tado_fan_level + ): + fan_level = self._current_tado_fan_level + swing = None vertical_swing = None horizontal_swing = None if ( self.supported_features & ClimateEntityFeature.SWING_MODE ) and self._attr_swing_modes is not None: - if SWING_VERTICAL in self._attr_swing_modes: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_VERTICAL_SWING_SETTING, self._current_tado_vertical_swing + ): vertical_swing = self._current_tado_vertical_swing - if SWING_HORIZONTAL in self._attr_swing_modes: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_HORIZONTAL_SWING_SETTING, self._current_tado_horizontal_swing + ): horizontal_swing = self._current_tado_horizontal_swing - if vertical_swing is None and horizontal_swing is None: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_SWING_SETTING, self._current_tado_swing_mode + ): swing = self._current_tado_swing_mode self._tado.set_zone_overlay( @@ -765,3 +798,20 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): vertical_swing=vertical_swing, # api defaults to not sending verticalSwing if swing not None horizontal_swing=horizontal_swing, # api defaults to not sending horizontalSwing if swing not None ) + + def _is_valid_setting_for_hvac_mode(self, setting: str) -> bool: + return ( + self._current_tado_capabilities.get(self._current_tado_hvac_mode, {}).get( + setting + ) + is not None + ) + + def _is_current_setting_supported_by_current_hvac_mode( + self, setting: str, current_state: str | None + ) -> bool: + if self._is_valid_setting_for_hvac_mode(setting): + return current_state in self._current_tado_capabilities[ + self._current_tado_hvac_mode + ].get(setting, []) + return False diff --git a/homeassistant/components/tado/config_flow.py b/homeassistant/components/tado/config_flow.py index d27a8c4b10b..c7bb7684901 100644 --- a/homeassistant/components/tado/config_flow.py +++ b/homeassistant/components/tado/config_flow.py @@ -73,7 +73,6 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Tado.""" VERSION = 1 - config_entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -118,22 +117,13 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors: dict[str, str] = {} - assert self.config_entry + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - user_input[CONF_USERNAME] = self.config_entry.data[CONF_USERNAME] + user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] try: await validate_input(self.hass, user_input) except CannotConnect: @@ -148,13 +138,11 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: return self.async_update_reload_and_abort( - self.config_entry, - data={**self.config_entry.data, **user_input}, - reason="reconfigure_successful", + reconfigure_entry, data_updates=user_input ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_PASSWORD): str, @@ -162,7 +150,7 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): ), errors=errors, description_placeholders={ - CONF_USERNAME: self.config_entry.data[CONF_USERNAME] + CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] }, ) @@ -172,16 +160,12 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an option flow for Tado.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/tado/const.py b/homeassistant/components/tado/const.py index 5c6a80c5beb..bdc4bff1943 100644 --- a/homeassistant/components/tado/const.py +++ b/homeassistant/components/tado/const.py @@ -38,8 +38,6 @@ TADO_HVAC_ACTION_TO_HA_HVAC_ACTION = { CONF_FALLBACK = "fallback" CONF_HOME_ID = "home_id" DATA = "data" -UPDATE_TRACK = "update_track" -UPDATE_MOBILE_DEVICE_TRACK = "update_mobile_device_track" # Weather CONDITIONS_MAP = { @@ -207,8 +205,6 @@ DEFAULT_NAME = "Tado" TADO_HOME = "Home" TADO_ZONE = "Zone" -UPDATE_LISTENER = "update_listener" - # Constants for Temperature Offset INSIDE_TEMPERATURE_MEASUREMENT = "INSIDE_TEMPERATURE_MEASUREMENT" TEMP_OFFSET = "temperatureOffset" @@ -234,3 +230,10 @@ CONF_READING = "reading" ATTR_MESSAGE = "message" WATER_HEATER_FALLBACK_REPAIR = "water_heater_fallback" + +TADO_SWING_SETTING = "swings" +TADO_FANSPEED_SETTING = "fanSpeeds" + +TADO_FANLEVEL_SETTING = "fanLevel" +TADO_VERTICAL_SWING_SETTING = "verticalSwing" +TADO_HORIZONTAL_SWING_SETTING = "horizontalSwing" diff --git a/homeassistant/components/tado/device_tracker.py b/homeassistant/components/tado/device_tracker.py index b4456591b49..95e031329c3 100644 --- a/homeassistant/components/tado/device_tracker.py +++ b/homeassistant/components/tado/device_tracker.py @@ -6,7 +6,6 @@ import logging from homeassistant.components.device_tracker import ( DOMAIN as DEVICE_TRACKER_DOMAIN, - SourceType, TrackerEntity, ) from homeassistant.const import STATE_HOME, STATE_NOT_HOME @@ -29,7 +28,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado device scannery entity.""" _LOGGER.debug("Setting up Tado device scanner entity") - tado: TadoConnector = entry.runtime_data.tadoconnector + tado = entry.runtime_data tracked: set = set() # Fix non-string unique_id for device trackers @@ -101,8 +100,6 @@ class TadoDeviceTrackerEntity(TrackerEntity): self._device_name = device_name self._tado = tado self._active = False - self._latitude = None - self._longitude = None @callback def update_state(self) -> None: @@ -160,18 +157,3 @@ class TadoDeviceTrackerEntity(TrackerEntity): def location_name(self) -> str: """Return the state of the device.""" return STATE_HOME if self._active else STATE_NOT_HOME - - @property - def latitude(self) -> None: - """Return latitude value of the device.""" - return None - - @property - def longitude(self) -> None: - """Return longitude value of the device.""" - return None - - @property - def source_type(self) -> SourceType: - """Return the source type.""" - return SourceType.GPS diff --git a/homeassistant/components/tado/icons.json b/homeassistant/components/tado/icons.json index 83ef6d4b332..c799bef0260 100644 --- a/homeassistant/components/tado/icons.json +++ b/homeassistant/components/tado/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_climate_timer": "mdi:timer", - "set_water_heater_timer": "mdi:timer", - "set_climate_temperature_offset": "mdi:thermometer", - "add_meter_reading": "mdi:counter" + "set_climate_timer": { + "service": "mdi:timer" + }, + "set_water_heater_timer": { + "service": "mdi:timer" + }, + "set_climate_temperature_offset": { + "service": "mdi:thermometer" + }, + "add_meter_reading": { + "service": "mdi:counter" + } } } diff --git a/homeassistant/components/tado/sensor.py b/homeassistant/components/tado/sensor.py index e5e2948b3a9..8bb13a02cd1 100644 --- a/homeassistant/components/tado/sensor.py +++ b/homeassistant/components/tado/sensor.py @@ -71,10 +71,8 @@ def get_automatic_geofencing(data: dict[str, str]) -> bool: def get_geofencing_mode(data: dict[str, str]) -> str: """Return Geofencing Mode based on Presence and Presence Locked attributes.""" - tado_mode = "" tado_mode = data.get("presence", "unknown") - geofencing_switch_mode = "" if "presenceLocked" in data: if data["presenceLocked"]: geofencing_switch_mode = "manual" @@ -199,7 +197,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado sensor platform.""" - tado: TadoConnector = entry.runtime_data.tadoconnector + tado = entry.runtime_data zones = tado.zones entities: list[SensorEntity] = [] diff --git a/homeassistant/components/tado/services.py b/homeassistant/components/tado/services.py index 8401f1925eb..89711808066 100644 --- a/homeassistant/components/tado/services.py +++ b/homeassistant/components/tado/services.py @@ -15,7 +15,6 @@ from .const import ( DOMAIN, SERVICE_ADD_METER_READING, ) -from .tado_connector import TadoConnector _LOGGER = logging.getLogger(__name__) SCHEMA_ADD_METER_READING = vol.Schema( @@ -44,7 +43,7 @@ def setup_services(hass: HomeAssistant) -> None: if entry is None: raise ServiceValidationError("Config entry not found") - tadoconnector: TadoConnector = entry.runtime_data.tadoconnector + tadoconnector = entry.runtime_data response: dict = await hass.async_add_executor_job( tadoconnector.set_meter_reading, call.data[CONF_READING] diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index 39453cb5fe1..8124570f9c9 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -12,7 +12,7 @@ }, "title": "Connect to your Tado account" }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure your Tado", "description": "Reconfigure the entry, for your account: `{username}`.", "data": { diff --git a/homeassistant/components/tado/water_heater.py b/homeassistant/components/tado/water_heater.py index 896c10acf67..6c964cfaddd 100644 --- a/homeassistant/components/tado/water_heater.py +++ b/homeassistant/components/tado/water_heater.py @@ -67,7 +67,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado water heater platform.""" - tado: TadoConnector = entry.runtime_data.tadoconnector + tado = entry.runtime_data entities = await hass.async_add_executor_job(_generate_entities, tado) platform = entity_platform.async_get_current_platform() diff --git a/homeassistant/components/tag/__init__.py b/homeassistant/components/tag/__init__.py index 0462c5bec34..47c1d14ce60 100644 --- a/homeassistant/components/tag/__init__.py +++ b/homeassistant/components/tag/__init__.py @@ -84,7 +84,9 @@ def _create_entry( original_name=f"{DEFAULT_NAME} {tag_id}", suggested_object_id=slugify(name) if name else tag_id, ) - return entity_registry.async_update_entity(entry.entity_id, name=name) + if name: + return entity_registry.async_update_entity(entry.entity_id, name=name) + return entry class TagStore(Store[collection.SerializedStorageCollection]): @@ -104,7 +106,6 @@ class TagStore(Store[collection.SerializedStorageCollection]): for tag in data["items"]: # Copy name in tag store to the entity registry _create_entry(entity_registry, tag[CONF_ID], tag.get(CONF_NAME)) - tag["migrated"] = True if old_major_version == 1 and old_minor_version < 3: # Version 1.3 removes tag_id from the store for tag in data["items"]: @@ -176,10 +177,7 @@ class TagStorageCollection(collection.DictStorageCollection): We don't store the name, it's stored in the entity registry. """ - # Preserve the name of migrated entries to allow downgrading to 2024.5 - # without losing tag names. This can be removed in HA Core 2025.1. - migrated = item_id in self.data and "migrated" in self.data[item_id] - return {k: v for k, v in item.items() if k != CONF_NAME or migrated} + return {k: v for k, v in item.items() if k != CONF_NAME} class TagDictStorageCollectionWebsocket( diff --git a/homeassistant/components/tailscale/__init__.py b/homeassistant/components/tailscale/__init__.py index 5498687332f..549bf07e181 100644 --- a/homeassistant/components/tailscale/__init__.py +++ b/homeassistant/components/tailscale/__init__.py @@ -2,17 +2,9 @@ from __future__ import annotations -from tailscale import Device as TailscaleDevice - from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) from .const import DOMAIN from .coordinator import TailscaleDataUpdateCoordinator @@ -37,41 +29,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if unload_ok: del hass.data[DOMAIN][entry.entry_id] return unload_ok - - -class TailscaleEntity(CoordinatorEntity): - """Defines a Tailscale base entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - *, - coordinator: DataUpdateCoordinator, - device: TailscaleDevice, - description: EntityDescription, - ) -> None: - """Initialize a Tailscale sensor.""" - super().__init__(coordinator=coordinator) - self.entity_description = description - self.device_id = device.device_id - self._attr_unique_id = f"{device.device_id}_{description.key}" - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - device: TailscaleDevice = self.coordinator.data[self.device_id] - - configuration_url = "https://login.tailscale.com/admin/machines/" - if device.addresses: - configuration_url += device.addresses[0] - - return DeviceInfo( - configuration_url=configuration_url, - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, device.device_id)}, - manufacturer="Tailscale Inc.", - model=device.os, - name=device.name.split(".")[0], - sw_version=device.client_version, - ) diff --git a/homeassistant/components/tailscale/binary_sensor.py b/homeassistant/components/tailscale/binary_sensor.py index 7803a7eb472..981f871de09 100644 --- a/homeassistant/components/tailscale/binary_sensor.py +++ b/homeassistant/components/tailscale/binary_sensor.py @@ -17,8 +17,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TailscaleEntity from .const import DOMAIN +from .entity import TailscaleEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailscale/config_flow.py b/homeassistant/components/tailscale/config_flow.py index ef70ed0afcc..ab57e9eadc6 100644 --- a/homeassistant/components/tailscale/config_flow.py +++ b/homeassistant/components/tailscale/config_flow.py @@ -8,13 +8,15 @@ from typing import Any from tailscale import Tailscale, TailscaleAuthenticationError, TailscaleError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_TAILNET, DOMAIN +AUTHKEYS_URL = "https://login.tailscale.com/admin/settings/keys" + async def validate_input(hass: HomeAssistant, *, tailnet: str, api_key: str) -> None: """Try using the give tailnet & api key against the Tailscale API.""" @@ -32,8 +34,6 @@ class TailscaleFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -66,9 +66,7 @@ class TailscaleFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - description_placeholders={ - "authkeys_url": "https://login.tailscale.com/admin/settings/authkeys" - }, + description_placeholders={"authkeys_url": AUTHKEYS_URL}, data_schema=vol.Schema( { vol.Required( @@ -86,9 +84,6 @@ class TailscaleFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Tailscale.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -97,11 +92,12 @@ class TailscaleFlowHandler(ConfigFlow, domain=DOMAIN): """Handle re-authentication with Tailscale.""" errors = {} - if user_input is not None and self.reauth_entry: + if user_input is not None: + reauth_entry = self._get_reauth_entry() try: await validate_input( self.hass, - tailnet=self.reauth_entry.data[CONF_TAILNET], + tailnet=reauth_entry.data[CONF_TAILNET], api_key=user_input[CONF_API_KEY], ) except TailscaleAuthenticationError: @@ -109,20 +105,14 @@ class TailscaleFlowHandler(ConfigFlow, domain=DOMAIN): except TailscaleError: errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, - CONF_API_KEY: user_input[CONF_API_KEY], - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_API_KEY: user_input[CONF_API_KEY]}, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", + description_placeholders={"authkeys_url": AUTHKEYS_URL}, data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), errors=errors, ) diff --git a/homeassistant/components/tailscale/entity.py b/homeassistant/components/tailscale/entity.py new file mode 100644 index 00000000000..a14b873a00f --- /dev/null +++ b/homeassistant/components/tailscale/entity.py @@ -0,0 +1,52 @@ +"""The Tailscale integration.""" + +from __future__ import annotations + +from tailscale import Device as TailscaleDevice + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN + + +class TailscaleEntity(CoordinatorEntity): + """Defines a Tailscale base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + *, + coordinator: DataUpdateCoordinator, + device: TailscaleDevice, + description: EntityDescription, + ) -> None: + """Initialize a Tailscale sensor.""" + super().__init__(coordinator=coordinator) + self.entity_description = description + self.device_id = device.device_id + self._attr_unique_id = f"{device.device_id}_{description.key}" + + @property + def device_info(self) -> DeviceInfo: + """Return the device info.""" + device: TailscaleDevice = self.coordinator.data[self.device_id] + + configuration_url = "https://login.tailscale.com/admin/machines/" + if device.addresses: + configuration_url += device.addresses[0] + + return DeviceInfo( + configuration_url=configuration_url, + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, device.device_id)}, + manufacturer="Tailscale Inc.", + model=device.os, + name=device.name.split(".")[0], + sw_version=device.client_version, + ) diff --git a/homeassistant/components/tailscale/manifest.json b/homeassistant/components/tailscale/manifest.json index 24f485fcdbd..7d571fe0675 100644 --- a/homeassistant/components/tailscale/manifest.json +++ b/homeassistant/components/tailscale/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tailscale", "integration_type": "hub", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["tailscale==0.6.1"] } diff --git a/homeassistant/components/tailscale/sensor.py b/homeassistant/components/tailscale/sensor.py index 99b91d17442..fa4c966a7d7 100644 --- a/homeassistant/components/tailscale/sensor.py +++ b/homeassistant/components/tailscale/sensor.py @@ -18,8 +18,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TailscaleEntity from .const import DOMAIN +from .entity import TailscaleEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailscale/strings.json b/homeassistant/components/tailscale/strings.json index 8d7fcc0c87b..89a1d4554b2 100644 --- a/homeassistant/components/tailscale/strings.json +++ b/homeassistant/components/tailscale/strings.json @@ -2,14 +2,14 @@ "config": { "step": { "user": { - "description": "This integration monitors your Tailscale network, it **DOES NOT** make your Home Assistant accessible via Tailscale VPN. \n\nTo authenticate with Tailscale you'll need to create an API key at {authkeys_url}.\n\nA Tailnet is the name of your Tailscale network. You can find it in the top left corner in the Tailscale Admin Panel (beside the Tailscale logo).", + "description": "This integration monitors your Tailscale network, it **DOES NOT** make your Home Assistant accessible via Tailscale VPN. \n\nTo authenticate with Tailscale you'll need to create an API access token at {authkeys_url}.\n\nA Tailnet is the name of your Tailscale network. You can find it in the top left corner in the Tailscale Admin Panel (beside the Tailscale logo).", "data": { "tailnet": "Tailnet", "api_key": "[%key:common::config_flow::data::api_key%]" } }, "reauth_confirm": { - "description": "Tailscale API tokens are valid for 90-days. You can create a fresh Tailscale API key at https://login.tailscale.com/admin/settings/authkeys.", + "description": "Tailscale API access tokens are valid for 90-days. You can create a fresh Tailscale API access token at {authkeys_url}.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } diff --git a/homeassistant/components/tailwind/__init__.py b/homeassistant/components/tailwind/__init__.py index 6f1a234e94a..b191d78f2a6 100644 --- a/homeassistant/components/tailwind/__init__.py +++ b/homeassistant/components/tailwind/__init__.py @@ -2,14 +2,12 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from .const import DOMAIN -from .coordinator import TailwindDataUpdateCoordinator -from .typing import TailwindConfigEntry +from .coordinator import TailwindConfigEntry, TailwindDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.COVER, Platform.NUMBER] @@ -39,6 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TailwindConfigEntry) -> return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TailwindConfigEntry) -> bool: """Unload Tailwind config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/tailwind/binary_sensor.py b/homeassistant/components/tailwind/binary_sensor.py index 0ce0b4bd964..d2f8e1e2ced 100644 --- a/homeassistant/components/tailwind/binary_sensor.py +++ b/homeassistant/components/tailwind/binary_sensor.py @@ -16,8 +16,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .coordinator import TailwindConfigEntry from .entity import TailwindDoorEntity -from .typing import TailwindConfigEntry @dataclass(kw_only=True, frozen=True) diff --git a/homeassistant/components/tailwind/button.py b/homeassistant/components/tailwind/button.py index 2a675bbfdf7..edff3434866 100644 --- a/homeassistant/components/tailwind/button.py +++ b/homeassistant/components/tailwind/button.py @@ -19,8 +19,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TailwindConfigEntry from .entity import TailwindEntity -from .typing import TailwindConfigEntry @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailwind/config_flow.py b/homeassistant/components/tailwind/config_flow.py index 1cb94625266..48fe2d23727 100644 --- a/homeassistant/components/tailwind/config_flow.py +++ b/homeassistant/components/tailwind/config_flow.py @@ -17,7 +17,7 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -41,7 +41,6 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 host: str - reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -144,11 +143,10 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle initiation of re-authentication with a Tailwind device.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -157,10 +155,10 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): """Handle re-authentication with a Tailwind device.""" errors = {} - if user_input is not None and self.reauth_entry: + if user_input is not None: try: return await self._async_step_create_entry( - host=self.reauth_entry.data[CONF_HOST], + host=self._get_reauth_entry().data[CONF_HOST], token=user_input[CONF_TOKEN], ) except TailwindAuthenticationError: @@ -212,9 +210,9 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): except TailwindUnsupportedFirmwareVersionError: return self.async_abort(reason="unsupported_firmware") - if self.reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.reauth_entry, + self._get_reauth_entry(), data={ CONF_HOST: host, CONF_TOKEN: token, diff --git a/homeassistant/components/tailwind/coordinator.py b/homeassistant/components/tailwind/coordinator.py index 4d1b4af74c9..770751ccc3b 100644 --- a/homeassistant/components/tailwind/coordinator.py +++ b/homeassistant/components/tailwind/coordinator.py @@ -18,11 +18,13 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER +type TailwindConfigEntry = ConfigEntry[TailwindDataUpdateCoordinator] + class TailwindDataUpdateCoordinator(DataUpdateCoordinator[TailwindDeviceStatus]): """Class to manage fetching Tailwind data.""" - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: TailwindConfigEntry) -> None: """Initialize the coordinator.""" self.tailwind = Tailwind( host=entry.data[CONF_HOST], @@ -32,6 +34,7 @@ class TailwindDataUpdateCoordinator(DataUpdateCoordinator[TailwindDeviceStatus]) super().__init__( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_HOST]}", update_interval=timedelta(seconds=5), ) diff --git a/homeassistant/components/tailwind/cover.py b/homeassistant/components/tailwind/cover.py index 8fb0f313480..8ea1c7d4f6d 100644 --- a/homeassistant/components/tailwind/cover.py +++ b/homeassistant/components/tailwind/cover.py @@ -5,6 +5,7 @@ from __future__ import annotations from typing import Any from gotailwind import ( + TailwindDoorAlreadyInStateError, TailwindDoorDisabledError, TailwindDoorLockedOutError, TailwindDoorOperationCommand, @@ -21,9 +22,9 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from .const import DOMAIN, LOGGER +from .coordinator import TailwindConfigEntry from .entity import TailwindDoorEntity -from .typing import TailwindConfigEntry async def async_setup_entry( @@ -77,6 +78,8 @@ class TailwindDoorCoverEntity(TailwindDoorEntity, CoverEntity): translation_domain=DOMAIN, translation_key="door_locked_out", ) from exc + except TailwindDoorAlreadyInStateError: + LOGGER.debug("Already in the requested state: %s", self.entity_id) except TailwindError as exc: raise HomeAssistantError( translation_domain=DOMAIN, @@ -109,6 +112,8 @@ class TailwindDoorCoverEntity(TailwindDoorEntity, CoverEntity): translation_domain=DOMAIN, translation_key="door_locked_out", ) from exc + except TailwindDoorAlreadyInStateError: + LOGGER.debug("Already in the requested state: %s", self.entity_id) except TailwindError as exc: raise HomeAssistantError( translation_domain=DOMAIN, diff --git a/homeassistant/components/tailwind/diagnostics.py b/homeassistant/components/tailwind/diagnostics.py index 5d681356647..b7a51b56775 100644 --- a/homeassistant/components/tailwind/diagnostics.py +++ b/homeassistant/components/tailwind/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from .typing import TailwindConfigEntry +from .coordinator import TailwindConfigEntry async def async_get_config_entry_diagnostics( diff --git a/homeassistant/components/tailwind/manifest.json b/homeassistant/components/tailwind/manifest.json index 2cc5f04fd16..7ad43c929a7 100644 --- a/homeassistant/components/tailwind/manifest.json +++ b/homeassistant/components/tailwind/manifest.json @@ -11,8 +11,7 @@ "documentation": "https://www.home-assistant.io/integrations/tailwind", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "platinum", - "requirements": ["gotailwind==0.2.3"], + "requirements": ["gotailwind==0.3.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/tailwind/number.py b/homeassistant/components/tailwind/number.py index 0ff1f444280..b67df9a6a25 100644 --- a/homeassistant/components/tailwind/number.py +++ b/homeassistant/components/tailwind/number.py @@ -15,8 +15,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TailwindConfigEntry from .entity import TailwindEntity -from .typing import TailwindConfigEntry @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailwind/quality_scale.yaml b/homeassistant/components/tailwind/quality_scale.yaml new file mode 100644 index 00000000000..90c5d0d5837 --- /dev/null +++ b/homeassistant/components/tailwind/quality_scale.yaml @@ -0,0 +1,76 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not register custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: exempt + comment: | + The coordinator needs translation when the update failed. + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/tailwind/typing.py b/homeassistant/components/tailwind/typing.py deleted file mode 100644 index 514a94a8e78..00000000000 --- a/homeassistant/components/tailwind/typing.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Typings for the Tailwind integration.""" - -from homeassistant.config_entries import ConfigEntry - -from .coordinator import TailwindDataUpdateCoordinator - -type TailwindConfigEntry = ConfigEntry[TailwindDataUpdateCoordinator] diff --git a/homeassistant/components/tami4/button.py b/homeassistant/components/tami4/button.py index 2d8af3fcf89..11377a2dcfb 100644 --- a/homeassistant/components/tami4/button.py +++ b/homeassistant/components/tami4/button.py @@ -5,10 +5,12 @@ from dataclasses import dataclass import logging from Tami4EdgeAPI import Tami4EdgeAPI +from Tami4EdgeAPI.drink import Drink from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import API, DOMAIN @@ -24,12 +26,17 @@ class Tami4EdgeButtonEntityDescription(ButtonEntityDescription): press_fn: Callable[[Tami4EdgeAPI], None] -BUTTONS: tuple[Tami4EdgeButtonEntityDescription] = ( - Tami4EdgeButtonEntityDescription( - key="boil_water", - translation_key="boil_water", - press_fn=lambda api: api.boil_water(), - ), +@dataclass(frozen=True, kw_only=True) +class Tami4EdgeDrinkButtonEntityDescription(ButtonEntityDescription): + """A class that describes Tami4Edge Drink button entities.""" + + press_fn: Callable[[Tami4EdgeAPI, Drink], None] + + +BOIL_WATER_BUTTON = Tami4EdgeButtonEntityDescription( + key="boil_water", + translation_key="boil_water", + press_fn=lambda api: api.boil_water(), ) @@ -37,12 +44,29 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Perform the setup for Tami4Edge.""" - api: Tami4EdgeAPI = hass.data[DOMAIN][entry.entry_id][API] - async_add_entities( - Tami4EdgeButton(api, entity_description) for entity_description in BUTTONS + api: Tami4EdgeAPI = hass.data[DOMAIN][entry.entry_id][API] + buttons: list[Tami4EdgeBaseEntity] = [Tami4EdgeButton(api, BOIL_WATER_BUTTON)] + + device = await hass.async_add_executor_job(api.get_device) + drinks = device.drinks + + buttons.extend( + Tami4EdgeDrinkButton( + api=api, + entity_description=Tami4EdgeDrinkButtonEntityDescription( + key=drink.id, + translation_key="prepare_drink", + translation_placeholders={"drink_name": drink.name}, + press_fn=lambda api, drink: api.prepare_drink(drink), + ), + drink=drink, + ) + for drink in drinks ) + async_add_entities(buttons) + class Tami4EdgeButton(Tami4EdgeBaseEntity, ButtonEntity): """Button entity for Tami4Edge.""" @@ -52,3 +76,20 @@ class Tami4EdgeButton(Tami4EdgeBaseEntity, ButtonEntity): def press(self) -> None: """Handle the button press.""" self.entity_description.press_fn(self._api) + + +class Tami4EdgeDrinkButton(Tami4EdgeBaseEntity, ButtonEntity): + """Drink Button entity for Tami4Edge.""" + + entity_description: Tami4EdgeDrinkButtonEntityDescription + + def __init__( + self, api: Tami4EdgeAPI, entity_description: EntityDescription, drink: Drink + ) -> None: + """Initialize the drink button.""" + super().__init__(api=api, entity_description=entity_description) + self.drink = drink + + def press(self) -> None: + """Handle the button press.""" + self.entity_description.press_fn(self._api, self.drink) diff --git a/homeassistant/components/tami4/icons.json b/homeassistant/components/tami4/icons.json index d623bdc6007..803ed9a5016 100644 --- a/homeassistant/components/tami4/icons.json +++ b/homeassistant/components/tami4/icons.json @@ -3,6 +3,9 @@ "button": { "boil_water": { "default": "mdi:kettle-steam" + }, + "prepare_drink": { + "default": "mdi:beer" } }, "sensor": { diff --git a/homeassistant/components/tami4/strings.json b/homeassistant/components/tami4/strings.json index 406964a3bff..040c18fc56d 100644 --- a/homeassistant/components/tami4/strings.json +++ b/homeassistant/components/tami4/strings.json @@ -1,18 +1,12 @@ { "entity": { "sensor": { - "uv_last_replacement": { - "name": "UV last replacement" - }, "uv_upcoming_replacement": { "name": "UV upcoming replacement" }, "uv_installed": { "name": "UV installed" }, - "filter_last_replacement": { - "name": "Filter last replacement" - }, "filter_upcoming_replacement": { "name": "Filter upcoming replacement" }, @@ -26,6 +20,9 @@ "button": { "boil_water": { "name": "Boil water" + }, + "prepare_drink": { + "name": "Prepare {drink_name}" } } }, diff --git a/homeassistant/components/tank_utility/manifest.json b/homeassistant/components/tank_utility/manifest.json index d73c62fa5ec..76240252696 100644 --- a/homeassistant/components/tank_utility/manifest.json +++ b/homeassistant/components/tank_utility/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tank_utility", "iot_class": "cloud_polling", "loggers": ["tank_utility"], + "quality_scale": "legacy", "requirements": ["tank-utility==1.5.0"] } diff --git a/homeassistant/components/tank_utility/sensor.py b/homeassistant/components/tank_utility/sensor.py index 9bdcc1b6f4f..6d4327a1d06 100644 --- a/homeassistant/components/tank_utility/sensor.py +++ b/homeassistant/components/tank_utility/sensor.py @@ -125,7 +125,7 @@ class TankUtilitySensor(SensorEntity): requests.codes.unauthorized, requests.codes.bad_request, ): - _LOGGER.info("Getting new token") + _LOGGER.debug("Getting new token") self._token = auth.get_token(self._email, self._password, force=True) data = tank_monitor.get_device_data(self._token, self.device) else: diff --git a/homeassistant/components/tankerkoenig/config_flow.py b/homeassistant/components/tankerkoenig/config_flow.py index e5a84374a09..509f293665d 100644 --- a/homeassistant/components/tankerkoenig/config_flow.py +++ b/homeassistant/components/tankerkoenig/config_flow.py @@ -74,7 +74,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -144,9 +144,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): if not user_input: return self._show_form_reauth() - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - user_input = {**entry.data, **user_input} + reauth_entry = self._get_reauth_entry() + user_input = {**reauth_entry.data, **user_input} tankerkoenig = Tankerkoenig( api_key=user_input[CONF_API_KEY], @@ -157,9 +156,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): except TankerkoenigInvalidKeyError: return self._show_form_reauth(user_input, {CONF_API_KEY: "invalid_auth"}) - self.hass.config_entries.async_update_entry(entry, data=user_input) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) def _show_form_user( self, @@ -239,9 +236,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle an options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._stations: dict[str, str] = {} async def async_step_init( diff --git a/homeassistant/components/tankerkoenig/manifest.json b/homeassistant/components/tankerkoenig/manifest.json index c754094655d..72248d006e0 100644 --- a/homeassistant/components/tankerkoenig/manifest.json +++ b/homeassistant/components/tankerkoenig/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tankerkoenig", "iot_class": "cloud_polling", "loggers": ["aiotankerkoenig"], - "quality_scale": "platinum", - "requirements": ["aiotankerkoenig==0.4.1"] + "requirements": ["aiotankerkoenig==0.4.2"] } diff --git a/homeassistant/components/tankerkoenig/strings.json b/homeassistant/components/tankerkoenig/strings.json index 7017c6e5fed..29f4f439dd5 100644 --- a/homeassistant/components/tankerkoenig/strings.json +++ b/homeassistant/components/tankerkoenig/strings.json @@ -42,6 +42,9 @@ "show_on_map": "Show stations on map" } } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, "entity": { diff --git a/homeassistant/components/tapsaff/manifest.json b/homeassistant/components/tapsaff/manifest.json index 861329827d7..c4853ca1c8d 100644 --- a/homeassistant/components/tapsaff/manifest.json +++ b/homeassistant/components/tapsaff/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tapsaff", "iot_class": "local_polling", "loggers": ["tapsaff"], + "quality_scale": "legacy", "requirements": ["tapsaff==0.2.1"] } diff --git a/homeassistant/components/tasmota/binary_sensor.py b/homeassistant/components/tasmota/binary_sensor.py index 071cce81880..8a4b501af05 100644 --- a/homeassistant/components/tasmota/binary_sensor.py +++ b/homeassistant/components/tasmota/binary_sensor.py @@ -20,7 +20,7 @@ import homeassistant.helpers.event as evt from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW -from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate +from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate async def async_setup_entry( diff --git a/homeassistant/components/tasmota/cover.py b/homeassistant/components/tasmota/cover.py index 4ab9464e9f9..2cb3cfeea25 100644 --- a/homeassistant/components/tasmota/cover.py +++ b/homeassistant/components/tasmota/cover.py @@ -22,7 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW -from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate +from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate async def async_setup_entry( diff --git a/homeassistant/components/tasmota/mixins.py b/homeassistant/components/tasmota/entity.py similarity index 100% rename from homeassistant/components/tasmota/mixins.py rename to homeassistant/components/tasmota/entity.py diff --git a/homeassistant/components/tasmota/fan.py b/homeassistant/components/tasmota/fan.py index 340edff3b35..e927bd6ad72 100644 --- a/homeassistant/components/tasmota/fan.py +++ b/homeassistant/components/tasmota/fan.py @@ -24,7 +24,7 @@ from homeassistant.util.percentage import ( from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW -from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate +from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate ORDERED_NAMED_FAN_SPEEDS = [ tasmota_const.FAN_SPEED_LOW, @@ -72,7 +72,6 @@ class TasmotaFan( ) _fan_speed = tasmota_const.FAN_SPEED_MEDIUM _tasmota_entity: tasmota_fan.TasmotaFan - _enable_turn_on_off_backwards_compatibility = False def __init__(self, **kwds: Any) -> None: """Initialize the Tasmota fan.""" diff --git a/homeassistant/components/tasmota/light.py b/homeassistant/components/tasmota/light.py index 5effc9c4997..a06e77eceb1 100644 --- a/homeassistant/components/tasmota/light.py +++ b/homeassistant/components/tasmota/light.py @@ -18,7 +18,7 @@ from hatasmota.models import DiscoveryHashType from homeassistant.components import light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -32,10 +32,11 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW -from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate, TasmotaOnOffEntity +from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate, TasmotaOnOffEntity DEFAULT_BRIGHTNESS_MAX = 255 TASMOTA_BRIGHTNESS_MAX = 100 @@ -199,19 +200,27 @@ class TasmotaLight( return self._color_mode @property - def color_temp(self) -> int | None: - """Return the color temperature in mired.""" - return self._color_temp + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(self._color_temp) + if self._color_temp + else None + ) @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._tasmota_entity.min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self._tasmota_entity.min_mireds + ) @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._tasmota_entity.max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self._tasmota_entity.max_mireds + ) @property def effect(self) -> str | None: @@ -255,8 +264,13 @@ class TasmotaLight( if ATTR_BRIGHTNESS in kwargs and brightness_supported(supported_color_modes): attributes["brightness"] = scale_brightness(kwargs[ATTR_BRIGHTNESS]) - if ATTR_COLOR_TEMP in kwargs and ColorMode.COLOR_TEMP in supported_color_modes: - attributes["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ( + ATTR_COLOR_TEMP_KELVIN in kwargs + and ColorMode.COLOR_TEMP in supported_color_modes + ): + attributes["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if ATTR_EFFECT in kwargs: attributes["effect"] = kwargs[ATTR_EFFECT] diff --git a/homeassistant/components/tasmota/sensor.py b/homeassistant/components/tasmota/sensor.py index 30649fa38bd..8cc538e706a 100644 --- a/homeassistant/components/tasmota/sensor.py +++ b/homeassistant/components/tasmota/sensor.py @@ -44,7 +44,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW -from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate +from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate DEVICE_CLASS = "device_class" STATE_CLASS = "state_class" diff --git a/homeassistant/components/tasmota/switch.py b/homeassistant/components/tasmota/switch.py index 44c45621e09..b5c19fc2431 100644 --- a/homeassistant/components/tasmota/switch.py +++ b/homeassistant/components/tasmota/switch.py @@ -15,7 +15,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW -from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate, TasmotaOnOffEntity +from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate, TasmotaOnOffEntity async def async_setup_entry( diff --git a/homeassistant/components/tautulli/__init__.py b/homeassistant/components/tautulli/__init__.py index 7d3efa4f283..a031354ae7d 100644 --- a/homeassistant/components/tautulli/__init__.py +++ b/homeassistant/components/tautulli/__init__.py @@ -2,17 +2,13 @@ from __future__ import annotations -from pytautulli import PyTautulli, PyTautulliApiUser, PyTautulliHostConfiguration +from pytautulli import PyTautulli, PyTautulliHostConfiguration from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DEFAULT_NAME, DOMAIN from .coordinator import TautulliDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] @@ -42,29 +38,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: TautulliConfigEntry) -> async def async_unload_entry(hass: HomeAssistant, entry: TautulliConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class TautulliEntity(CoordinatorEntity[TautulliDataUpdateCoordinator]): - """Defines a base Tautulli entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: TautulliDataUpdateCoordinator, - description: EntityDescription, - user: PyTautulliApiUser | None = None, - ) -> None: - """Initialize the Tautulli entity.""" - super().__init__(coordinator) - entry_id = coordinator.config_entry.entry_id - self._attr_unique_id = f"{entry_id}_{description.key}" - self.entity_description = description - self.user = user - self._attr_device_info = DeviceInfo( - configuration_url=coordinator.host_configuration.base_url, - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, user.user_id if user else entry_id)}, - manufacturer=DEFAULT_NAME, - name=user.username if user else DEFAULT_NAME, - ) diff --git a/homeassistant/components/tautulli/config_flow.py b/homeassistant/components/tautulli/config_flow.py index a8378786d18..369f9ead2f2 100644 --- a/homeassistant/components/tautulli/config_flow.py +++ b/homeassistant/components/tautulli/config_flow.py @@ -60,14 +60,11 @@ class TautulliConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - if user_input is not None and ( - entry := self.hass.config_entries.async_get_entry(self.context["entry_id"]) - ): - _input = {**entry.data, CONF_API_KEY: user_input[CONF_API_KEY]} + if user_input is not None: + reauth_entry = self._get_reauth_entry() + _input = {**reauth_entry.data, CONF_API_KEY: user_input[CONF_API_KEY]} if (error := await self.validate_input(_input)) is None: - self.hass.config_entries.async_update_entry(entry, data=_input) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=_input) errors["base"] = error return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/tautulli/entity.py b/homeassistant/components/tautulli/entity.py new file mode 100644 index 00000000000..692c2141954 --- /dev/null +++ b/homeassistant/components/tautulli/entity.py @@ -0,0 +1,38 @@ +"""The Tautulli integration.""" + +from __future__ import annotations + +from pytautulli import PyTautulliApiUser + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DEFAULT_NAME, DOMAIN +from .coordinator import TautulliDataUpdateCoordinator + + +class TautulliEntity(CoordinatorEntity[TautulliDataUpdateCoordinator]): + """Defines a base Tautulli entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: TautulliDataUpdateCoordinator, + description: EntityDescription, + user: PyTautulliApiUser | None = None, + ) -> None: + """Initialize the Tautulli entity.""" + super().__init__(coordinator) + entry_id = coordinator.config_entry.entry_id + self._attr_unique_id = f"{entry_id}_{description.key}" + self.entity_description = description + self.user = user + self._attr_device_info = DeviceInfo( + configuration_url=coordinator.host_configuration.base_url, + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, user.user_id if user else entry_id)}, + manufacturer=DEFAULT_NAME, + name=user.username if user else DEFAULT_NAME, + ) diff --git a/homeassistant/components/tautulli/sensor.py b/homeassistant/components/tautulli/sensor.py index 26b7c602de8..cd21630031a 100644 --- a/homeassistant/components/tautulli/sensor.py +++ b/homeassistant/components/tautulli/sensor.py @@ -26,9 +26,10 @@ from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from . import TautulliConfigEntry, TautulliEntity +from . import TautulliConfigEntry from .const import ATTR_TOP_USER, DOMAIN from .coordinator import TautulliDataUpdateCoordinator +from .entity import TautulliEntity def get_top_stats( diff --git a/homeassistant/components/tcp/binary_sensor.py b/homeassistant/components/tcp/binary_sensor.py index 638dfd53de5..13fd0787b5d 100644 --- a/homeassistant/components/tcp/binary_sensor.py +++ b/homeassistant/components/tcp/binary_sensor.py @@ -12,8 +12,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .common import TCP_PLATFORM_SCHEMA, TcpEntity +from .common import TCP_PLATFORM_SCHEMA from .const import CONF_VALUE_ON +from .entity import TcpEntity PLATFORM_SCHEMA: Final = BINARY_SENSOR_PLATFORM_SCHEMA.extend(TCP_PLATFORM_SCHEMA) diff --git a/homeassistant/components/tcp/common.py b/homeassistant/components/tcp/common.py index 263fc416026..a89cd999ddd 100644 --- a/homeassistant/components/tcp/common.py +++ b/homeassistant/components/tcp/common.py @@ -2,10 +2,6 @@ from __future__ import annotations -import logging -import select -import socket -import ssl from typing import Any, Final import voluptuous as vol @@ -21,11 +17,7 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, CONF_VERIFY_SSL, ) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import TemplateError import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.typing import ConfigType from .const import ( CONF_BUFFER_SIZE, @@ -36,10 +28,6 @@ from .const import ( DEFAULT_TIMEOUT, DEFAULT_VERIFY_SSL, ) -from .model import TcpSensorConfig - -_LOGGER: Final = logging.getLogger(__name__) - TCP_PLATFORM_SCHEMA: Final[dict[vol.Marker, Any]] = { vol.Required(CONF_HOST): cv.string, @@ -54,103 +42,3 @@ TCP_PLATFORM_SCHEMA: Final[dict[vol.Marker, Any]] = { vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, } - - -class TcpEntity(Entity): - """Base entity class for TCP platform.""" - - def __init__(self, hass: HomeAssistant, config: ConfigType) -> None: - """Set all the config values if they exist and get initial state.""" - - self._hass = hass - self._config: TcpSensorConfig = { - CONF_NAME: config[CONF_NAME], - CONF_HOST: config[CONF_HOST], - CONF_PORT: config[CONF_PORT], - CONF_TIMEOUT: config[CONF_TIMEOUT], - CONF_PAYLOAD: config[CONF_PAYLOAD], - CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT), - CONF_VALUE_TEMPLATE: config.get(CONF_VALUE_TEMPLATE), - CONF_VALUE_ON: config.get(CONF_VALUE_ON), - CONF_BUFFER_SIZE: config[CONF_BUFFER_SIZE], - CONF_SSL: config[CONF_SSL], - CONF_VERIFY_SSL: config[CONF_VERIFY_SSL], - } - - self._ssl_context: ssl.SSLContext | None = None - if self._config[CONF_SSL]: - self._ssl_context = ssl.create_default_context() - if not self._config[CONF_VERIFY_SSL]: - self._ssl_context.check_hostname = False - self._ssl_context.verify_mode = ssl.CERT_NONE - - self._state: str | None = None - self.update() - - @property - def name(self) -> str: - """Return the name of this sensor.""" - return self._config[CONF_NAME] - - def update(self) -> None: - """Get the latest value for this sensor.""" - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - sock.settimeout(self._config[CONF_TIMEOUT]) - try: - sock.connect((self._config[CONF_HOST], self._config[CONF_PORT])) - except OSError as err: - _LOGGER.error( - "Unable to connect to %s on port %s: %s", - self._config[CONF_HOST], - self._config[CONF_PORT], - err, - ) - return - - if self._ssl_context is not None: - sock = self._ssl_context.wrap_socket( - sock, server_hostname=self._config[CONF_HOST] - ) - - try: - sock.send(self._config[CONF_PAYLOAD].encode()) - except OSError as err: - _LOGGER.error( - "Unable to send payload %r to %s on port %s: %s", - self._config[CONF_PAYLOAD], - self._config[CONF_HOST], - self._config[CONF_PORT], - err, - ) - return - - readable, _, _ = select.select([sock], [], [], self._config[CONF_TIMEOUT]) - if not readable: - _LOGGER.warning( - ( - "Timeout (%s second(s)) waiting for a response after " - "sending %r to %s on port %s" - ), - self._config[CONF_TIMEOUT], - self._config[CONF_PAYLOAD], - self._config[CONF_HOST], - self._config[CONF_PORT], - ) - return - - value = sock.recv(self._config[CONF_BUFFER_SIZE]).decode() - - value_template = self._config[CONF_VALUE_TEMPLATE] - if value_template is not None: - try: - self._state = value_template.render(parse_result=False, value=value) - except TemplateError: - _LOGGER.error( - "Unable to render template of %r with value: %r", - self._config[CONF_VALUE_TEMPLATE], - value, - ) - return - return - - self._state = value diff --git a/homeassistant/components/tcp/entity.py b/homeassistant/components/tcp/entity.py new file mode 100644 index 00000000000..eaf5cb6963e --- /dev/null +++ b/homeassistant/components/tcp/entity.py @@ -0,0 +1,130 @@ +"""Common code for TCP component.""" + +from __future__ import annotations + +import logging +import select +import socket +import ssl +from typing import Final + +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PAYLOAD, + CONF_PORT, + CONF_SSL, + CONF_TIMEOUT, + CONF_UNIT_OF_MEASUREMENT, + CONF_VALUE_TEMPLATE, + CONF_VERIFY_SSL, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import TemplateError +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.typing import ConfigType + +from .const import CONF_BUFFER_SIZE, CONF_VALUE_ON +from .model import TcpSensorConfig + +_LOGGER: Final = logging.getLogger(__name__) + + +class TcpEntity(Entity): + """Base entity class for TCP platform.""" + + def __init__(self, hass: HomeAssistant, config: ConfigType) -> None: + """Set all the config values if they exist and get initial state.""" + + self._hass = hass + self._config: TcpSensorConfig = { + CONF_NAME: config[CONF_NAME], + CONF_HOST: config[CONF_HOST], + CONF_PORT: config[CONF_PORT], + CONF_TIMEOUT: config[CONF_TIMEOUT], + CONF_PAYLOAD: config[CONF_PAYLOAD], + CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT), + CONF_VALUE_TEMPLATE: config.get(CONF_VALUE_TEMPLATE), + CONF_VALUE_ON: config.get(CONF_VALUE_ON), + CONF_BUFFER_SIZE: config[CONF_BUFFER_SIZE], + CONF_SSL: config[CONF_SSL], + CONF_VERIFY_SSL: config[CONF_VERIFY_SSL], + } + + self._ssl_context: ssl.SSLContext | None = None + if self._config[CONF_SSL]: + self._ssl_context = ssl.create_default_context() + if not self._config[CONF_VERIFY_SSL]: + self._ssl_context.check_hostname = False + self._ssl_context.verify_mode = ssl.CERT_NONE + + self._state: str | None = None + self.update() + + @property + def name(self) -> str: + """Return the name of this sensor.""" + return self._config[CONF_NAME] + + def update(self) -> None: + """Get the latest value for this sensor.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.settimeout(self._config[CONF_TIMEOUT]) + try: + sock.connect((self._config[CONF_HOST], self._config[CONF_PORT])) + except OSError as err: + _LOGGER.error( + "Unable to connect to %s on port %s: %s", + self._config[CONF_HOST], + self._config[CONF_PORT], + err, + ) + return + + if self._ssl_context is not None: + sock = self._ssl_context.wrap_socket( + sock, server_hostname=self._config[CONF_HOST] + ) + + try: + sock.send(self._config[CONF_PAYLOAD].encode()) + except OSError as err: + _LOGGER.error( + "Unable to send payload %r to %s on port %s: %s", + self._config[CONF_PAYLOAD], + self._config[CONF_HOST], + self._config[CONF_PORT], + err, + ) + return + + readable, _, _ = select.select([sock], [], [], self._config[CONF_TIMEOUT]) + if not readable: + _LOGGER.warning( + ( + "Timeout (%s second(s)) waiting for a response after " + "sending %r to %s on port %s" + ), + self._config[CONF_TIMEOUT], + self._config[CONF_PAYLOAD], + self._config[CONF_HOST], + self._config[CONF_PORT], + ) + return + + value = sock.recv(self._config[CONF_BUFFER_SIZE]).decode() + + value_template = self._config[CONF_VALUE_TEMPLATE] + if value_template is not None: + try: + self._state = value_template.render(parse_result=False, value=value) + except TemplateError: + _LOGGER.error( + "Unable to render template of %r with value: %r", + self._config[CONF_VALUE_TEMPLATE], + value, + ) + return + return + + self._state = value diff --git a/homeassistant/components/tcp/manifest.json b/homeassistant/components/tcp/manifest.json index e15200f49f8..7eacff6c50a 100644 --- a/homeassistant/components/tcp/manifest.json +++ b/homeassistant/components/tcp/manifest.json @@ -3,5 +3,6 @@ "name": "TCP", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/tcp", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/tcp/sensor.py b/homeassistant/components/tcp/sensor.py index a3bd4b2c619..1d53b21bc2e 100644 --- a/homeassistant/components/tcp/sensor.py +++ b/homeassistant/components/tcp/sensor.py @@ -13,7 +13,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from .common import TCP_PLATFORM_SCHEMA, TcpEntity +from .common import TCP_PLATFORM_SCHEMA +from .entity import TcpEntity PLATFORM_SCHEMA: Final = SENSOR_PLATFORM_SCHEMA.extend(TCP_PLATFORM_SCHEMA) diff --git a/homeassistant/components/technove/manifest.json b/homeassistant/components/technove/manifest.json index ae0e491235f..722aa4004e1 100644 --- a/homeassistant/components/technove/manifest.json +++ b/homeassistant/components/technove/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/technove", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["python-technove==1.3.1"], "zeroconf": ["_technove-stations._tcp.local."] } diff --git a/homeassistant/components/technove/strings.json b/homeassistant/components/technove/strings.json index 06c93939db8..7175b7c2de5 100644 --- a/homeassistant/components/technove/strings.json +++ b/homeassistant/components/technove/strings.json @@ -93,7 +93,7 @@ }, "issues": { "deprecated_entity_is_session_active": { - "title": "The TechnoVE `{sensor_name}` binary sensor is deprecated", + "title": "The TechnoVE {sensor_name} binary sensor is deprecated", "description": "`{entity}` is deprecated.\nPlease update your automations and scripts to replace the binary sensor entity with the newly added switch entity.\nWhen you are done migrating you can disable `{entity}`." } } diff --git a/homeassistant/components/ted5000/manifest.json b/homeassistant/components/ted5000/manifest.json index b2aa68f884b..3e28d963957 100644 --- a/homeassistant/components/ted5000/manifest.json +++ b/homeassistant/components/ted5000/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ted5000", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["xmltodict==0.13.0"] } diff --git a/homeassistant/components/ted5000/sensor.py b/homeassistant/components/ted5000/sensor.py index 68f4520a7e3..26f469349b4 100644 --- a/homeassistant/components/ted5000/sensor.py +++ b/homeassistant/components/ted5000/sensor.py @@ -136,8 +136,8 @@ class Ted5000Gateway: mtus = int(doc["LiveData"]["System"]["NumberMTU"]) for mtu in range(1, mtus + 1): - power = int(doc["LiveData"]["Power"]["MTU%d" % mtu]["PowerNow"]) - voltage = int(doc["LiveData"]["Voltage"]["MTU%d" % mtu]["VoltageNow"]) + power = int(doc["LiveData"]["Power"][f"MTU{mtu}"]["PowerNow"]) + voltage = int(doc["LiveData"]["Voltage"][f"MTU{mtu}"]["VoltageNow"]) self.data[mtu] = { UnitOfPower.WATT: power, diff --git a/homeassistant/components/tedee/__init__.py b/homeassistant/components/tedee/__init__.py index a1b87cf13a4..95348053805 100644 --- a/homeassistant/components/tedee/__init__.py +++ b/homeassistant/components/tedee/__init__.py @@ -7,7 +7,7 @@ from typing import Any from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response -from pytedee_async.exception import TedeeDataUpdateException, TedeeWebhookException +from aiotedee.exception import TedeeDataUpdateException, TedeeWebhookException from homeassistant.components.http import HomeAssistantView from homeassistant.components.webhook import ( @@ -16,14 +16,13 @@ from homeassistant.components.webhook import ( async_register as webhook_register, async_unregister as webhook_unregister, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.network import get_url from .const import DOMAIN, NAME -from .coordinator import TedeeApiCoordinator +from .coordinator import TedeeApiCoordinator, TedeeConfigEntry PLATFORMS = [ Platform.BINARY_SENSOR, @@ -33,13 +32,11 @@ PLATFORMS = [ _LOGGER = logging.getLogger(__name__) -type TedeeConfigEntry = ConfigEntry[TedeeApiCoordinator] - async def async_setup_entry(hass: HomeAssistant, entry: TedeeConfigEntry) -> bool: """Integration setup.""" - coordinator = TedeeApiCoordinator(hass) + coordinator = TedeeApiCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() @@ -101,7 +98,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TedeeConfigEntry) -> boo return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TedeeConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -133,7 +130,9 @@ def get_webhook_handler( return async_webhook_handler -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: TedeeConfigEntry +) -> bool: """Migrate old entry.""" if config_entry.version > 1: # This means the user has downgraded from a future version diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index 3a7d1a12f2e..94d3f0b6831 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from pytedee_async import TedeeLock -from pytedee_async.lock import TedeeLockState +from aiotedee import TedeeLock +from aiotedee.lock import TedeeLockState from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -15,9 +15,12 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TedeeConfigEntry +from .coordinator import TedeeConfigEntry from .entity import TedeeDescriptionEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class TedeeBinarySensorEntityDescription( diff --git a/homeassistant/components/tedee/config_flow.py b/homeassistant/components/tedee/config_flow.py index b3088bfa2cf..422d818d1b5 100644 --- a/homeassistant/components/tedee/config_flow.py +++ b/homeassistant/components/tedee/config_flow.py @@ -4,7 +4,7 @@ from collections.abc import Mapping import logging from typing import Any -from pytedee_async import ( +from aiotedee import ( TedeeAuthException, TedeeClient, TedeeClientException, @@ -14,7 +14,12 @@ from pytedee_async import ( import voluptuous as vol from homeassistant.components.webhook import async_generate_id as webhook_generate_id -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -29,9 +34,6 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - reauth_entry: ConfigEntry | None = None - reconfigure_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -39,8 +41,8 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - if self.reauth_entry: - host = self.reauth_entry.data[CONF_HOST] + if self.source == SOURCE_REAUTH: + host = self._get_reauth_entry().data[CONF_HOST] else: host = user_input[CONF_HOST] local_access_token = user_input[CONF_LOCAL_ACCESS_TOKEN] @@ -59,19 +61,17 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.error("Error during local bridge discovery: %s", exc) errors["base"] = "cannot_connect" else: - if self.reauth_entry: - return self.async_update_reload_and_abort( - self.reauth_entry, - data={**self.reauth_entry.data, **user_input}, - reason="reauth_successful", - ) - if self.reconfigure_entry: - return self.async_update_reload_and_abort( - self.reconfigure_entry, - data={**self.reconfigure_entry.data, **user_input}, - reason="reconfigure_successful", - ) await self.async_set_unique_id(local_bridge.serial) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data_updates=user_input + ) self._abort_if_unique_id_configured() return self.async_create_entry( title=NAME, @@ -97,17 +97,12 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self.reauth_entry - if not user_input: return self.async_show_form( step_id="reauth_confirm", @@ -115,39 +110,7 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): { vol.Required( CONF_LOCAL_ACCESS_TOKEN, - default=self.reauth_entry.data[CONF_LOCAL_ACCESS_TOKEN], - ): str, - } - ), - ) - return await self.async_step_user(user_input) - - async def async_step_reconfigure( - self, entry_data: Mapping[str, Any] - ) -> ConfigFlowResult: - """Perform a reconfiguration.""" - self.reconfigure_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Add reconfigure step to allow to reconfigure a config entry.""" - assert self.reconfigure_entry - - if not user_input: - return self.async_show_form( - step_id="reconfigure_confirm", - data_schema=vol.Schema( - { - vol.Required( - CONF_HOST, default=self.reconfigure_entry.data[CONF_HOST] - ): str, - vol.Required( - CONF_LOCAL_ACCESS_TOKEN, - default=self.reconfigure_entry.data[ + default=self._get_reauth_entry().data[ CONF_LOCAL_ACCESS_TOKEN ], ): str, @@ -155,3 +118,25 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): ), ) return await self.async_step_user(user_input) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Perform a reconfiguration.""" + if not user_input: + reconfigure_entry = self._get_reconfigure_entry() + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema( + { + vol.Required( + CONF_HOST, default=reconfigure_entry.data[CONF_HOST] + ): str, + vol.Required( + CONF_LOCAL_ACCESS_TOKEN, + default=reconfigure_entry.data[CONF_LOCAL_ACCESS_TOKEN], + ): str, + } + ), + ) + return await self.async_step_user(user_input) diff --git a/homeassistant/components/tedee/coordinator.py b/homeassistant/components/tedee/coordinator.py index 51dc6a57d90..4012b6d07c5 100644 --- a/homeassistant/components/tedee/coordinator.py +++ b/homeassistant/components/tedee/coordinator.py @@ -1,12 +1,14 @@ """Coordinator for Tedee locks.""" +from __future__ import annotations + from collections.abc import Awaitable, Callable from datetime import timedelta import logging import time from typing import Any -from pytedee_async import ( +from aiotedee import ( TedeeClient, TedeeClientException, TedeeDataUpdateException, @@ -14,7 +16,7 @@ from pytedee_async import ( TedeeLock, TedeeWebhookException, ) -from pytedee_async.bridge import TedeeBridge +from aiotedee.bridge import TedeeBridge from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -31,22 +33,25 @@ GET_LOCKS_INTERVAL_SECONDS = 3600 _LOGGER = logging.getLogger(__name__) +type TedeeConfigEntry = ConfigEntry[TedeeApiCoordinator] + class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): """Class to handle fetching data from the tedee API centrally.""" - config_entry: ConfigEntry + config_entry: TedeeConfigEntry + bridge: TedeeBridge - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: TedeeConfigEntry) -> None: """Initialize coordinator.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) - self._bridge: TedeeBridge | None = None self.tedee_client = TedeeClient( local_token=self.config_entry.data[CONF_LOCAL_ACCESS_TOKEN], local_ip=self.config_entry.data[CONF_HOST], @@ -58,21 +63,17 @@ class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): self.new_lock_callbacks: list[Callable[[int], None]] = [] self.tedee_webhook_id: int | None = None - @property - def bridge(self) -> TedeeBridge: - """Return bridge.""" - assert self._bridge - return self._bridge + async def _async_setup(self) -> None: + """Set up the coordinator.""" + + async def _async_get_bridge() -> None: + self.bridge = await self.tedee_client.get_local_bridge() + + _LOGGER.debug("Update coordinator: Getting bridge from API") + await self._async_update(_async_get_bridge) async def _async_update_data(self) -> dict[int, TedeeLock]: """Fetch data from API endpoint.""" - if self._bridge is None: - - async def _async_get_bridge() -> None: - self._bridge = await self.tedee_client.get_local_bridge() - - _LOGGER.debug("Update coordinator: Getting bridge from API") - await self._async_update(_async_get_bridge) _LOGGER.debug("Update coordinator: Getting locks from API") # once every hours get all lock details, otherwise use the sync endpoint @@ -98,14 +99,19 @@ class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): await update_fn() except TedeeLocalAuthException as ex: raise ConfigEntryAuthFailed( - "Authentication failed. Local access token is invalid" + translation_domain=DOMAIN, + translation_key="authentification_failed", ) from ex except TedeeDataUpdateException as ex: _LOGGER.debug("Error while updating data: %s", str(ex)) - raise UpdateFailed(f"Error while updating data: {ex!s}") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="update_failed" + ) from ex except (TedeeClientException, TimeoutError) as ex: - raise UpdateFailed(f"Querying API failed. Error: {ex!s}") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from ex def webhook_received(self, message: dict[str, Any]) -> None: """Handle webhook message.""" diff --git a/homeassistant/components/tedee/entity.py b/homeassistant/components/tedee/entity.py index 59e3354aa1a..96cc6f2b3f5 100644 --- a/homeassistant/components/tedee/entity.py +++ b/homeassistant/components/tedee/entity.py @@ -1,6 +1,6 @@ """Bases for Tedee entities.""" -from pytedee_async.lock import TedeeLock +from aiotedee.lock import TedeeLock from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo @@ -32,6 +32,7 @@ class TedeeEntity(CoordinatorEntity[TedeeApiCoordinator]): name=lock.lock_name, manufacturer="Tedee", model=lock.lock_type, + model_id=lock.lock_type, via_device=(DOMAIN, coordinator.bridge.serial), ) diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index 8d5fa028e12..38df85a9cdb 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -2,17 +2,19 @@ from typing import Any -from pytedee_async import TedeeClientException, TedeeLock, TedeeLockState +from aiotedee import TedeeClientException, TedeeLock, TedeeLockState from homeassistant.components.lock import LockEntity, LockEntityFeature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TedeeConfigEntry -from .coordinator import TedeeApiCoordinator +from .const import DOMAIN +from .coordinator import TedeeApiCoordinator, TedeeConfigEntry from .entity import TedeeEntity +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, @@ -108,7 +110,9 @@ class TedeeLockEntity(TedeeEntity, LockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - f"Failed to unlock the door. Lock {self._lock.lock_id}" + translation_domain=DOMAIN, + translation_key="unlock_failed", + translation_placeholders={"lock_id": str(self._lock.lock_id)}, ) from ex async def async_lock(self, **kwargs: Any) -> None: @@ -121,7 +125,9 @@ class TedeeLockEntity(TedeeEntity, LockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - f"Failed to lock the door. Lock {self._lock.lock_id}" + translation_domain=DOMAIN, + translation_key="lock_failed", + translation_placeholders={"lock_id": str(self._lock.lock_id)}, ) from ex @@ -143,5 +149,7 @@ class TedeeLockWithLatchEntity(TedeeLockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - f"Failed to unlatch the door. Lock {self._lock.lock_id}" + translation_domain=DOMAIN, + translation_key="open_failed", + translation_placeholders={"lock_id": str(self._lock.lock_id)}, ) from ex diff --git a/homeassistant/components/tedee/manifest.json b/homeassistant/components/tedee/manifest.json index 4f071267a25..bca51f08f93 100644 --- a/homeassistant/components/tedee/manifest.json +++ b/homeassistant/components/tedee/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["http", "webhook"], "documentation": "https://www.home-assistant.io/integrations/tedee", "iot_class": "local_push", - "loggers": ["pytedee_async"], + "loggers": ["aiotedee"], "quality_scale": "platinum", - "requirements": ["pytedee-async==0.2.20"] + "requirements": ["aiotedee==0.2.20"] } diff --git a/homeassistant/components/tedee/quality_scale.yaml b/homeassistant/components/tedee/quality_scale.yaml new file mode 100644 index 00000000000..974c8f82ec9 --- /dev/null +++ b/homeassistant/components/tedee/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + Options flow not documented, doesn't have one + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: | + Handled by coordinator + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + No discovery + discovery: + status: exempt + comment: | + No discovery supported atm + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + Currently no repairs/issues + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/tedee/sensor.py b/homeassistant/components/tedee/sensor.py index c7d14af1f31..d61e7360dc4 100644 --- a/homeassistant/components/tedee/sensor.py +++ b/homeassistant/components/tedee/sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from pytedee_async import TedeeLock +from aiotedee import TedeeLock from homeassistant.components.sensor import ( SensorDeviceClass, @@ -15,9 +15,12 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TedeeConfigEntry +from .coordinator import TedeeConfigEntry from .entity import TedeeDescriptionEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class TedeeSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/tedee/strings.json b/homeassistant/components/tedee/strings.json index 0668d1370b4..78cacd706d3 100644 --- a/homeassistant/components/tedee/strings.json +++ b/homeassistant/components/tedee/strings.json @@ -22,7 +22,7 @@ "local_access_token": "[%key:component::tedee::config::step::user::data_description::local_access_token%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure Tedee", "description": "Update the settings of this integration.", "data": { @@ -38,7 +38,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "You selected a different bridge than the one this config entry was configured with, this is not allowed." }, "error": { "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]", @@ -63,5 +64,25 @@ "name": "Pullspring duration" } } + }, + "exceptions": { + "api_error": { + "message": "Error while communicating with the API" + }, + "authentication_failed": { + "message": "Authentication failed. Local access token is invalid" + }, + "lock_failed": { + "message": "Failed to lock the door. Lock {lock_id}" + }, + "unlock_failed": { + "message": "Failed to unlock the door. Lock {lock_id}" + }, + "update_failed": { + "message": "Error while updating data" + }, + "open_failed": { + "message": "Failed to unlatch the door. Lock {lock_id}" + } } } diff --git a/homeassistant/components/telegram/icons.json b/homeassistant/components/telegram/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/telegram/icons.json +++ b/homeassistant/components/telegram/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/telegram/manifest.json b/homeassistant/components/telegram/manifest.json index ce4457b3129..9022f357970 100644 --- a/homeassistant/components/telegram/manifest.json +++ b/homeassistant/components/telegram/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["telegram_bot"], "documentation": "https://www.home-assistant.io/integrations/telegram", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 9d1a5398055..b9a032d7f28 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -37,10 +37,10 @@ from homeassistant.const import ( HTTP_DIGEST_AUTHENTICATION, ) from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration +from homeassistant.util.ssl import get_default_context, get_default_no_verify_context _LOGGER = logging.getLogger(__name__) @@ -174,14 +174,14 @@ BASE_SERVICE_SCHEMA = vol.Schema( ) SERVICE_SCHEMA_SEND_MESSAGE = BASE_SERVICE_SCHEMA.extend( - {vol.Required(ATTR_MESSAGE): cv.template, vol.Optional(ATTR_TITLE): cv.template} + {vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_TITLE): cv.string} ) SERVICE_SCHEMA_SEND_FILE = BASE_SERVICE_SCHEMA.extend( { - vol.Optional(ATTR_URL): cv.template, - vol.Optional(ATTR_FILE): cv.template, - vol.Optional(ATTR_CAPTION): cv.template, + vol.Optional(ATTR_URL): cv.string, + vol.Optional(ATTR_FILE): cv.string, + vol.Optional(ATTR_CAPTION): cv.string, vol.Optional(ATTR_USERNAME): cv.string, vol.Optional(ATTR_PASSWORD): cv.string, vol.Optional(ATTR_AUTHENTICATION): cv.string, @@ -195,8 +195,8 @@ SERVICE_SCHEMA_SEND_STICKER = SERVICE_SCHEMA_SEND_FILE.extend( SERVICE_SCHEMA_SEND_LOCATION = BASE_SERVICE_SCHEMA.extend( { - vol.Required(ATTR_LONGITUDE): cv.template, - vol.Required(ATTR_LATITUDE): cv.template, + vol.Required(ATTR_LONGITUDE): cv.string, + vol.Required(ATTR_LATITUDE): cv.string, } ) @@ -228,7 +228,7 @@ SERVICE_SCHEMA_EDIT_CAPTION = vol.Schema( cv.positive_int, vol.All(cv.string, "last") ), vol.Required(ATTR_CHAT_ID): vol.Coerce(int), - vol.Required(ATTR_CAPTION): cv.template, + vol.Required(ATTR_CAPTION): cv.string, vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list, }, extra=vol.ALLOW_EXTRA, @@ -247,7 +247,7 @@ SERVICE_SCHEMA_EDIT_REPLYMARKUP = vol.Schema( SERVICE_SCHEMA_ANSWER_CALLBACK_QUERY = vol.Schema( { - vol.Required(ATTR_MESSAGE): cv.template, + vol.Required(ATTR_MESSAGE): cv.string, vol.Required(ATTR_CALLBACK_QUERY_ID): vol.Coerce(int), vol.Optional(ATTR_SHOW_ALERT): cv.boolean, }, @@ -378,12 +378,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: for p_config in domain_config: # Each platform config gets its own bot - bot = initialize_bot(hass, p_config) + bot = await hass.async_add_executor_job(initialize_bot, hass, p_config) p_type: str = p_config[CONF_PLATFORM] platform = platforms[p_type] - _LOGGER.info("Setting up %s.%s", DOMAIN, p_type) + _LOGGER.debug("Setting up %s.%s", DOMAIN, p_type) try: receiver_service = await platform.async_setup_platform(hass, bot, p_config) if receiver_service is False: @@ -401,38 +401,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_send_telegram_message(service: ServiceCall) -> None: """Handle sending Telegram Bot message service calls.""" - def _render_template_attr(data, attribute): - if attribute_templ := data.get(attribute): - if any( - isinstance(attribute_templ, vtype) for vtype in (float, int, str) - ): - data[attribute] = attribute_templ - else: - try: - data[attribute] = attribute_templ.async_render( - parse_result=False - ) - except TemplateError as exc: - _LOGGER.error( - "TemplateError in %s: %s -> %s", - attribute, - attribute_templ.template, - exc, - ) - data[attribute] = attribute_templ.template - msgtype = service.service kwargs = dict(service.data) - for attribute in ( - ATTR_MESSAGE, - ATTR_TITLE, - ATTR_URL, - ATTR_FILE, - ATTR_CAPTION, - ATTR_LONGITUDE, - ATTR_LATITUDE, - ): - _render_template_attr(kwargs, attribute) _LOGGER.debug("New telegram message %s: %s", msgtype, kwargs) if msgtype == SERVICE_SEND_MESSAGE: @@ -486,7 +456,7 @@ def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot: # Auth can actually be stuffed into the URL, but the docs have previously # indicated to put them here. auth = proxy_params.pop("username"), proxy_params.pop("password") - ir.async_create_issue( + ir.create_issue( hass, DOMAIN, "proxy_params_auth_deprecation", @@ -503,7 +473,7 @@ def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot: learn_more_url="https://github.com/home-assistant/core/pull/112778", ) else: - ir.async_create_issue( + ir.create_issue( hass, DOMAIN, "proxy_params_deprecation", @@ -852,7 +822,11 @@ class TelegramNotificationService: username=kwargs.get(ATTR_USERNAME), password=kwargs.get(ATTR_PASSWORD), authentication=kwargs.get(ATTR_AUTHENTICATION), - verify_ssl=kwargs.get(ATTR_VERIFY_SSL), + verify_ssl=( + get_default_context() + if kwargs.get(ATTR_VERIFY_SSL, False) + else get_default_no_verify_context() + ), ) if file_content: diff --git a/homeassistant/components/telegram_bot/icons.json b/homeassistant/components/telegram_bot/icons.json index f410d387435..0acf20d561a 100644 --- a/homeassistant/components/telegram_bot/icons.json +++ b/homeassistant/components/telegram_bot/icons.json @@ -1,18 +1,46 @@ { "services": { - "send_message": "mdi:send", - "send_photo": "mdi:camera", - "send_sticker": "mdi:sticker", - "send_animation": "mdi:animation", - "send_video": "mdi:video", - "send_voice": "mdi:microphone", - "send_document": "mdi:file-document", - "send_location": "mdi:map-marker", - "send_poll": "mdi:poll", - "edit_message": "mdi:pencil", - "edit_caption": "mdi:pencil", - "edit_replymarkup": "mdi:pencil", - "answer_callback_query": "mdi:check", - "delete_message": "mdi:delete" + "send_message": { + "service": "mdi:send" + }, + "send_photo": { + "service": "mdi:camera" + }, + "send_sticker": { + "service": "mdi:sticker" + }, + "send_animation": { + "service": "mdi:animation" + }, + "send_video": { + "service": "mdi:video" + }, + "send_voice": { + "service": "mdi:microphone" + }, + "send_document": { + "service": "mdi:file-document" + }, + "send_location": { + "service": "mdi:map-marker" + }, + "send_poll": { + "service": "mdi:poll" + }, + "edit_message": { + "service": "mdi:pencil" + }, + "edit_caption": { + "service": "mdi:pencil" + }, + "edit_replymarkup": { + "service": "mdi:pencil" + }, + "answer_callback_query": { + "service": "mdi:check" + }, + "delete_message": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/telegram_bot/manifest.json b/homeassistant/components/telegram_bot/manifest.json index c176e6c2cdf..3474d39b1d6 100644 --- a/homeassistant/components/telegram_bot/manifest.json +++ b/homeassistant/components/telegram_bot/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/telegram_bot", "iot_class": "cloud_push", "loggers": ["telegram"], - "requirements": ["python-telegram-bot[socks]==21.0.1"] + "quality_scale": "legacy", + "requirements": ["python-telegram-bot[socks]==21.5"] } diff --git a/homeassistant/components/telegram_bot/polling.py b/homeassistant/components/telegram_bot/polling.py index 45d2ee65b45..bee7f752f6c 100644 --- a/homeassistant/components/telegram_bot/polling.py +++ b/homeassistant/components/telegram_bot/polling.py @@ -25,14 +25,22 @@ async def async_setup_platform(hass, bot, config): async def process_error(update: Update, context: CallbackContext) -> None: """Telegram bot error handler.""" + if context.error: + error_callback(context.error, update) + + +def error_callback(error: Exception, update: Update | None = None) -> None: + """Log the error.""" try: - if context.error: - raise context.error + raise error except (TimedOut, NetworkError, RetryAfter): # Long polling timeout or connection problem. Nothing serious. pass except TelegramError: - _LOGGER.error('Update "%s" caused error: "%s"', update, context.error) + if update is not None: + _LOGGER.error('Update "%s" caused error: "%s"', update, error) + else: + _LOGGER.error("%s: %s", error.__class__.__name__, error) class PollBot(BaseTelegramBotEntity): @@ -53,7 +61,7 @@ class PollBot(BaseTelegramBotEntity): """Start the polling task.""" _LOGGER.debug("Starting polling") await self.application.initialize() - await self.application.updater.start_polling() + await self.application.updater.start_polling(error_callback=error_callback) await self.application.start() async def stop_polling(self, event=None): diff --git a/homeassistant/components/telegram_bot/webhooks.py b/homeassistant/components/telegram_bot/webhooks.py index 41835f955ed..3eb3c71a0bb 100644 --- a/homeassistant/components/telegram_bot/webhooks.py +++ b/homeassistant/components/telegram_bot/webhooks.py @@ -112,7 +112,7 @@ class PushBot(BaseTelegramBotEntity): if current_status and current_status["url"] != self.webhook_url: result = await self._try_to_set_webhook() if result: - _LOGGER.info("Set new telegram webhook %s", self.webhook_url) + _LOGGER.debug("Set new telegram webhook %s", self.webhook_url) else: _LOGGER.error("Set telegram webhook failed %s", self.webhook_url) return False diff --git a/homeassistant/components/tellduslive/binary_sensor.py b/homeassistant/components/tellduslive/binary_sensor.py index 1eead7b55a5..33f936beb54 100644 --- a/homeassistant/components/tellduslive/binary_sensor.py +++ b/homeassistant/components/tellduslive/binary_sensor.py @@ -7,8 +7,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import tellduslive -from .entry import TelldusLiveEntity +from .const import DOMAIN, TELLDUS_DISCOVERY_NEW +from .entity import TelldusLiveEntity async def async_setup_entry( @@ -20,14 +20,12 @@ async def async_setup_entry( async def async_discover_binary_sensor(device_id): """Discover and add a discovered sensor.""" - client = hass.data[tellduslive.DOMAIN] + client = hass.data[DOMAIN] async_add_entities([TelldusLiveSensor(client, device_id)]) async_dispatcher_connect( hass, - tellduslive.TELLDUS_DISCOVERY_NEW.format( - binary_sensor.DOMAIN, tellduslive.DOMAIN - ), + TELLDUS_DISCOVERY_NEW.format(binary_sensor.DOMAIN, DOMAIN), async_discover_binary_sensor, ) diff --git a/homeassistant/components/tellduslive/config_flow.py b/homeassistant/components/tellduslive/config_flow.py index 6d68c37d821..365a363ca28 100644 --- a/homeassistant/components/tellduslive/config_flow.py +++ b/homeassistant/components/tellduslive/config_flow.py @@ -35,14 +35,15 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _session: Session + def __init__(self) -> None: """Init config flow.""" self._hosts = [CLOUD_NAME] self._host = None - self._session = None self._scan_interval = SCAN_INTERVAL - def _get_auth_url(self): + def _get_auth_url(self) -> str | None: self._session = Session( public_key=PUBLIC_KEY, private_key=NOT_SO_PRIVATE_KEY, @@ -70,7 +71,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_auth(self, user_input=None): + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the submitted configuration.""" errors = {} if user_input is not None: @@ -114,25 +117,28 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_discovery(self, discovery_info): + async def async_step_discovery( + self, + discovery_info: list[str], # type: ignore[override] + ) -> ConfigFlowResult: """Run when a Tellstick is discovered.""" await self._async_handle_discovery_without_unique_id() - _LOGGER.info("Discovered tellstick device: %s", discovery_info) + _LOGGER.debug("Discovered tellstick device: %s", discovery_info) if supports_local_api(discovery_info[1]): - _LOGGER.info("%s support local API", discovery_info[1]) + _LOGGER.debug("%s support local API", discovery_info[1]) self._hosts.append(discovery_info[0]) return await self.async_step_user() - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - self._scan_interval = user_input[KEY_SCAN_INTERVAL] - if user_input[CONF_HOST] != DOMAIN: - self._hosts.append(user_input[CONF_HOST]) + self._scan_interval = import_data[KEY_SCAN_INTERVAL] + if import_data[CONF_HOST] != DOMAIN: + self._hosts.append(import_data[CONF_HOST]) if not await self.hass.async_add_executor_job( os.path.isfile, self.hass.config.path(TELLDUS_CONFIG_FILE) @@ -144,7 +150,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ) host = next(iter(conf)) - if user_input[CONF_HOST] != host: + if import_data[CONF_HOST] != host: return await self.async_step_user() host = CLOUD_NAME if host == "tellduslive" else host diff --git a/homeassistant/components/tellduslive/cover.py b/homeassistant/components/tellduslive/cover.py index de962041333..d55a72cd633 100644 --- a/homeassistant/components/tellduslive/cover.py +++ b/homeassistant/components/tellduslive/cover.py @@ -9,9 +9,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import tellduslive from . import TelldusLiveClient -from .entry import TelldusLiveEntity +from .const import DOMAIN, TELLDUS_DISCOVERY_NEW +from .entity import TelldusLiveEntity async def async_setup_entry( @@ -23,12 +23,12 @@ async def async_setup_entry( async def async_discover_cover(device_id): """Discover and add a discovered sensor.""" - client: TelldusLiveClient = hass.data[tellduslive.DOMAIN] + client: TelldusLiveClient = hass.data[DOMAIN] async_add_entities([TelldusLiveCover(client, device_id)]) async_dispatcher_connect( hass, - tellduslive.TELLDUS_DISCOVERY_NEW.format(cover.DOMAIN, tellduslive.DOMAIN), + TELLDUS_DISCOVERY_NEW.format(cover.DOMAIN, DOMAIN), async_discover_cover, ) diff --git a/homeassistant/components/tellduslive/entry.py b/homeassistant/components/tellduslive/entity.py similarity index 100% rename from homeassistant/components/tellduslive/entry.py rename to homeassistant/components/tellduslive/entity.py diff --git a/homeassistant/components/tellduslive/light.py b/homeassistant/components/tellduslive/light.py index 101ccb0dab0..005bf97d8c0 100644 --- a/homeassistant/components/tellduslive/light.py +++ b/homeassistant/components/tellduslive/light.py @@ -10,8 +10,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import tellduslive -from .entry import TelldusLiveEntity +from .const import DOMAIN, TELLDUS_DISCOVERY_NEW +from .entity import TelldusLiveEntity _LOGGER = logging.getLogger(__name__) @@ -25,12 +25,12 @@ async def async_setup_entry( async def async_discover_light(device_id): """Discover and add a discovered sensor.""" - client = hass.data[tellduslive.DOMAIN] + client = hass.data[DOMAIN] async_add_entities([TelldusLiveLight(client, device_id)]) async_dispatcher_connect( hass, - tellduslive.TELLDUS_DISCOVERY_NEW.format(light.DOMAIN, tellduslive.DOMAIN), + TELLDUS_DISCOVERY_NEW.format(light.DOMAIN, DOMAIN), async_discover_light, ) @@ -67,7 +67,7 @@ class TelldusLiveLight(TelldusLiveEntity, LightEntity): brightness = kwargs.get(ATTR_BRIGHTNESS, self._last_brightness) if brightness == 0: fallback_brightness = 100 - _LOGGER.info( + _LOGGER.debug( "Setting brightness to %d%%, because it was 0", fallback_brightness ) brightness = int(fallback_brightness * 255 / 100) diff --git a/homeassistant/components/tellduslive/manifest.json b/homeassistant/components/tellduslive/manifest.json index 929d502971f..4ebf1a334bd 100644 --- a/homeassistant/components/tellduslive/manifest.json +++ b/homeassistant/components/tellduslive/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/tellduslive", "iot_class": "cloud_polling", - "quality_scale": "silver", - "requirements": ["tellduslive==0.10.11"] + "requirements": ["tellduslive==0.10.12"] } diff --git a/homeassistant/components/tellduslive/sensor.py b/homeassistant/components/tellduslive/sensor.py index 36520044101..9bd2b1fe599 100644 --- a/homeassistant/components/tellduslive/sensor.py +++ b/homeassistant/components/tellduslive/sensor.py @@ -25,8 +25,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import tellduslive -from .entry import TelldusLiveEntity +from .const import DOMAIN, TELLDUS_DISCOVERY_NEW +from .entity import TelldusLiveEntity SENSOR_TYPE_TEMPERATURE = "temp" SENSOR_TYPE_HUMIDITY = "humidity" @@ -127,12 +127,12 @@ async def async_setup_entry( async def async_discover_sensor(device_id): """Discover and add a discovered sensor.""" - client = hass.data[tellduslive.DOMAIN] + client = hass.data[DOMAIN] async_add_entities([TelldusLiveSensor(client, device_id)]) async_dispatcher_connect( hass, - tellduslive.TELLDUS_DISCOVERY_NEW.format(sensor.DOMAIN, tellduslive.DOMAIN), + TELLDUS_DISCOVERY_NEW.format(sensor.DOMAIN, DOMAIN), async_discover_sensor, ) @@ -194,4 +194,4 @@ class TelldusLiveSensor(TelldusLiveEntity, SensorEntity): @property def unique_id(self) -> str: """Return a unique ID.""" - return "{}-{}-{}".format(*self._id) + return "-".join(map(str, self._id)) diff --git a/homeassistant/components/tellduslive/strings.json b/homeassistant/components/tellduslive/strings.json index 16c847f0077..e363aced667 100644 --- a/homeassistant/components/tellduslive/strings.json +++ b/homeassistant/components/tellduslive/strings.json @@ -11,7 +11,7 @@ }, "step": { "auth": { - "description": "To link your TelldusLive account:\n 1. Click the link below\n 2. Login to Telldus Live\n 3. Authorize **{app_name}** (click **Yes**).\n 4. Come back here and click **SUBMIT**.\n\n [Link TelldusLive account]({auth_url})", + "description": "To link your TelldusLive account:\n 1. Click the link below\n 2. Login to Telldus Live\n 3. Authorize **{app_name}** (select **Yes**).\n 4. Come back here and select **Submit**.\n\n [Link TelldusLive account]({auth_url})", "title": "Authenticate against TelldusLive" }, "user": { diff --git a/homeassistant/components/tellduslive/switch.py b/homeassistant/components/tellduslive/switch.py index cd28a170442..bd770ab08f5 100644 --- a/homeassistant/components/tellduslive/switch.py +++ b/homeassistant/components/tellduslive/switch.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import tellduslive -from .entry import TelldusLiveEntity +from .const import DOMAIN, TELLDUS_DISCOVERY_NEW +from .entity import TelldusLiveEntity async def async_setup_entry( @@ -22,12 +22,12 @@ async def async_setup_entry( async def async_discover_switch(device_id): """Discover and add a discovered sensor.""" - client = hass.data[tellduslive.DOMAIN] + client = hass.data[DOMAIN] async_add_entities([TelldusLiveSwitch(client, device_id)]) async_dispatcher_connect( hass, - tellduslive.TELLDUS_DISCOVERY_NEW.format(switch.DOMAIN, tellduslive.DOMAIN), + TELLDUS_DISCOVERY_NEW.format(switch.DOMAIN, DOMAIN), async_discover_switch, ) diff --git a/homeassistant/components/tellstick/__init__.py b/homeassistant/components/tellstick/__init__.py index 1a60927e25f..9d120b7aaa8 100644 --- a/homeassistant/components/tellstick/__init__.py +++ b/homeassistant/components/tellstick/__init__.py @@ -1,15 +1,8 @@ """Support for Tellstick.""" import logging -import threading -from tellcore.constants import ( - TELLSTICK_DIM, - TELLSTICK_TURNOFF, - TELLSTICK_TURNON, - TELLSTICK_UP, -) -from tellcore.library import TelldusError +from tellcore.constants import TELLSTICK_DIM, TELLSTICK_UP from tellcore.telldus import AsyncioCallbackDispatcher, TelldusCore from tellcorenet import TellCoreClient import voluptuous as vol @@ -18,29 +11,23 @@ from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType +from .const import ( + ATTR_DISCOVER_CONFIG, + ATTR_DISCOVER_DEVICES, + DATA_TELLSTICK, + DEFAULT_SIGNAL_REPETITIONS, + SIGNAL_TELLCORE_CALLBACK, +) + _LOGGER = logging.getLogger(__name__) -ATTR_DISCOVER_CONFIG = "config" -ATTR_DISCOVER_DEVICES = "devices" CONF_SIGNAL_REPETITIONS = "signal_repetitions" -DEFAULT_SIGNAL_REPETITIONS = 1 DOMAIN = "tellstick" -DATA_TELLSTICK = "tellstick_device" -SIGNAL_TELLCORE_CALLBACK = "tellstick_callback" - -# Use a global tellstick domain lock to avoid getting Tellcore errors when -# calling concurrently. -TELLSTICK_LOCK = threading.RLock() - CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -64,7 +51,7 @@ def _discover(hass, config, component_name, found_tellcore_devices): if not found_tellcore_devices: return - _LOGGER.info( + _LOGGER.debug( "Discovered %d new %s devices", len(found_tellcore_devices), component_name ) @@ -162,136 +149,3 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, clean_up_callback) return True - - -class TellstickDevice(Entity): - """Representation of a Tellstick device. - - Contains the common logic for all Tellstick devices. - """ - - _attr_assumed_state = True - _attr_should_poll = False - - def __init__(self, tellcore_device, signal_repetitions): - """Init the Tellstick device.""" - self._signal_repetitions = signal_repetitions - self._state = None - self._requested_state = None - self._requested_data = None - self._repeats_left = 0 - - # Look up our corresponding tellcore device - self._tellcore_device = tellcore_device - self._attr_name = tellcore_device.name - self._attr_unique_id = tellcore_device.id - - async def async_added_to_hass(self): - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, SIGNAL_TELLCORE_CALLBACK, self.update_from_callback - ) - ) - - @property - def is_on(self): - """Return true if the device is on.""" - return self._state - - def _parse_ha_data(self, kwargs): - """Turn the value from HA into something useful.""" - raise NotImplementedError - - def _parse_tellcore_data(self, tellcore_data): - """Turn the value received from tellcore into something useful.""" - raise NotImplementedError - - def _update_model(self, new_state, data): - """Update the device entity state to match the arguments.""" - raise NotImplementedError - - def _send_device_command(self, requested_state, requested_data): - """Let tellcore update the actual device to the requested state.""" - raise NotImplementedError - - def _send_repeated_command(self): - """Send a tellstick command once and decrease the repeat count.""" - - with TELLSTICK_LOCK: - if self._repeats_left > 0: - self._repeats_left -= 1 - try: - self._send_device_command( - self._requested_state, self._requested_data - ) - except TelldusError as err: - _LOGGER.error(err) - - def _change_device_state(self, new_state, data): - """Turn on or off the device.""" - with TELLSTICK_LOCK: - # Set the requested state and number of repeats before calling - # _send_repeated_command the first time. Subsequent calls will be - # made from the callback. (We don't want to queue a lot of commands - # in case the user toggles the switch the other way before the - # queue is fully processed.) - self._requested_state = new_state - self._requested_data = data - self._repeats_left = self._signal_repetitions - self._send_repeated_command() - - # Sooner or later this will propagate to the model from the - # callback, but for a fluid UI experience update it directly. - self._update_model(new_state, data) - self.schedule_update_ha_state() - - def turn_on(self, **kwargs): - """Turn the switch on.""" - self._change_device_state(True, self._parse_ha_data(kwargs)) - - def turn_off(self, **kwargs): - """Turn the switch off.""" - self._change_device_state(False, None) - - def _update_model_from_command(self, tellcore_command, tellcore_data): - """Update the model, from a sent tellcore command and data.""" - - if tellcore_command not in [TELLSTICK_TURNON, TELLSTICK_TURNOFF, TELLSTICK_DIM]: - _LOGGER.debug("Unhandled tellstick command: %d", tellcore_command) - return - - self._update_model( - tellcore_command != TELLSTICK_TURNOFF, - self._parse_tellcore_data(tellcore_data), - ) - - def update_from_callback(self, tellcore_id, tellcore_command, tellcore_data): - """Handle updates from the tellcore callback.""" - if tellcore_id != self._tellcore_device.id: - return - - self._update_model_from_command(tellcore_command, tellcore_data) - self.schedule_update_ha_state() - - # This is a benign race on _repeats_left -- it's checked with the lock - # in _send_repeated_command. - if self._repeats_left > 0: - self._send_repeated_command() - - def _update_from_tellcore(self): - """Read the current state of the device from the tellcore library.""" - - with TELLSTICK_LOCK: - try: - last_command = self._tellcore_device.last_sent_command( - TELLSTICK_TURNON | TELLSTICK_TURNOFF | TELLSTICK_DIM - ) - last_data = self._tellcore_device.last_sent_value() - self._update_model_from_command(last_command, last_data) - except TelldusError as err: - _LOGGER.error(err) - - def update(self): - """Poll the current state of the device.""" - self._update_from_tellcore() diff --git a/homeassistant/components/tellstick/const.py b/homeassistant/components/tellstick/const.py new file mode 100644 index 00000000000..64730a1161d --- /dev/null +++ b/homeassistant/components/tellstick/const.py @@ -0,0 +1,10 @@ +"""Support for Tellstick.""" + +ATTR_DISCOVER_CONFIG = "config" +ATTR_DISCOVER_DEVICES = "devices" + +DATA_TELLSTICK = "tellstick_device" + +DEFAULT_SIGNAL_REPETITIONS = 1 + +SIGNAL_TELLCORE_CALLBACK = "tellstick_callback" diff --git a/homeassistant/components/tellstick/cover.py b/homeassistant/components/tellstick/cover.py index cb49d876e71..255892c1f6c 100644 --- a/homeassistant/components/tellstick/cover.py +++ b/homeassistant/components/tellstick/cover.py @@ -9,13 +9,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( ATTR_DISCOVER_CONFIG, ATTR_DISCOVER_DEVICES, DATA_TELLSTICK, DEFAULT_SIGNAL_REPETITIONS, - TellstickDevice, ) +from .entity import TellstickDevice def setup_platform( diff --git a/homeassistant/components/tellstick/entity.py b/homeassistant/components/tellstick/entity.py new file mode 100644 index 00000000000..746c7f4dd4d --- /dev/null +++ b/homeassistant/components/tellstick/entity.py @@ -0,0 +1,151 @@ +"""Support for Tellstick.""" + +import logging +import threading + +from tellcore.constants import TELLSTICK_DIM, TELLSTICK_TURNOFF, TELLSTICK_TURNON +from tellcore.library import TelldusError + +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import SIGNAL_TELLCORE_CALLBACK + +_LOGGER = logging.getLogger(__name__) + +# Use a global tellstick domain lock to avoid getting Tellcore errors when +# calling concurrently. +TELLSTICK_LOCK = threading.RLock() + + +class TellstickDevice(Entity): + """Representation of a Tellstick device. + + Contains the common logic for all Tellstick devices. + """ + + _attr_assumed_state = True + _attr_should_poll = False + + def __init__(self, tellcore_device, signal_repetitions): + """Init the Tellstick device.""" + self._signal_repetitions = signal_repetitions + self._state = None + self._requested_state = None + self._requested_data = None + self._repeats_left = 0 + + # Look up our corresponding tellcore device + self._tellcore_device = tellcore_device + self._attr_name = tellcore_device.name + self._attr_unique_id = tellcore_device.id + + async def async_added_to_hass(self): + """Register callbacks.""" + self.async_on_remove( + async_dispatcher_connect( + self.hass, SIGNAL_TELLCORE_CALLBACK, self.update_from_callback + ) + ) + + @property + def is_on(self): + """Return true if the device is on.""" + return self._state + + def _parse_ha_data(self, kwargs): + """Turn the value from HA into something useful.""" + raise NotImplementedError + + def _parse_tellcore_data(self, tellcore_data): + """Turn the value received from tellcore into something useful.""" + raise NotImplementedError + + def _update_model(self, new_state, data): + """Update the device entity state to match the arguments.""" + raise NotImplementedError + + def _send_device_command(self, requested_state, requested_data): + """Let tellcore update the actual device to the requested state.""" + raise NotImplementedError + + def _send_repeated_command(self): + """Send a tellstick command once and decrease the repeat count.""" + + with TELLSTICK_LOCK: + if self._repeats_left > 0: + self._repeats_left -= 1 + try: + self._send_device_command( + self._requested_state, self._requested_data + ) + except TelldusError as err: + _LOGGER.error(err) + + def _change_device_state(self, new_state, data): + """Turn on or off the device.""" + with TELLSTICK_LOCK: + # Set the requested state and number of repeats before calling + # _send_repeated_command the first time. Subsequent calls will be + # made from the callback. (We don't want to queue a lot of commands + # in case the user toggles the switch the other way before the + # queue is fully processed.) + self._requested_state = new_state + self._requested_data = data + self._repeats_left = self._signal_repetitions + self._send_repeated_command() + + # Sooner or later this will propagate to the model from the + # callback, but for a fluid UI experience update it directly. + self._update_model(new_state, data) + self.schedule_update_ha_state() + + def turn_on(self, **kwargs): + """Turn the switch on.""" + self._change_device_state(True, self._parse_ha_data(kwargs)) + + def turn_off(self, **kwargs): + """Turn the switch off.""" + self._change_device_state(False, None) + + def _update_model_from_command(self, tellcore_command, tellcore_data): + """Update the model, from a sent tellcore command and data.""" + + if tellcore_command not in [TELLSTICK_TURNON, TELLSTICK_TURNOFF, TELLSTICK_DIM]: + _LOGGER.debug("Unhandled tellstick command: %d", tellcore_command) + return + + self._update_model( + tellcore_command != TELLSTICK_TURNOFF, + self._parse_tellcore_data(tellcore_data), + ) + + def update_from_callback(self, tellcore_id, tellcore_command, tellcore_data): + """Handle updates from the tellcore callback.""" + if tellcore_id != self._tellcore_device.id: + return + + self._update_model_from_command(tellcore_command, tellcore_data) + self.schedule_update_ha_state() + + # This is a benign race on _repeats_left -- it's checked with the lock + # in _send_repeated_command. + if self._repeats_left > 0: + self._send_repeated_command() + + def _update_from_tellcore(self): + """Read the current state of the device from the tellcore library.""" + + with TELLSTICK_LOCK: + try: + last_command = self._tellcore_device.last_sent_command( + TELLSTICK_TURNON | TELLSTICK_TURNOFF | TELLSTICK_DIM + ) + last_data = self._tellcore_device.last_sent_value() + self._update_model_from_command(last_command, last_data) + except TelldusError as err: + _LOGGER.error(err) + + def update(self): + """Poll the current state of the device.""" + self._update_from_tellcore() diff --git a/homeassistant/components/tellstick/light.py b/homeassistant/components/tellstick/light.py index acbcf2d6cb5..0b7878cd10e 100644 --- a/homeassistant/components/tellstick/light.py +++ b/homeassistant/components/tellstick/light.py @@ -7,13 +7,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( ATTR_DISCOVER_CONFIG, ATTR_DISCOVER_DEVICES, DATA_TELLSTICK, DEFAULT_SIGNAL_REPETITIONS, - TellstickDevice, ) +from .entity import TellstickDevice def setup_platform( diff --git a/homeassistant/components/tellstick/manifest.json b/homeassistant/components/tellstick/manifest.json index c64a51b09e4..40956b06ac6 100644 --- a/homeassistant/components/tellstick/manifest.json +++ b/homeassistant/components/tellstick/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tellstick", "iot_class": "assumed_state", "loggers": ["tellcore"], + "quality_scale": "legacy", "requirements": ["tellcore-net==0.4", "tellcore-py==1.1.2"] } diff --git a/homeassistant/components/tellstick/switch.py b/homeassistant/components/tellstick/switch.py index e3eb4825d91..fc9a44ef66c 100644 --- a/homeassistant/components/tellstick/switch.py +++ b/homeassistant/components/tellstick/switch.py @@ -7,13 +7,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( +from .const import ( ATTR_DISCOVER_CONFIG, ATTR_DISCOVER_DEVICES, DATA_TELLSTICK, DEFAULT_SIGNAL_REPETITIONS, - TellstickDevice, ) +from .entity import TellstickDevice def setup_platform( diff --git a/homeassistant/components/telnet/manifest.json b/homeassistant/components/telnet/manifest.json index 48a79afc528..68353104839 100644 --- a/homeassistant/components/telnet/manifest.json +++ b/homeassistant/components/telnet/manifest.json @@ -3,5 +3,6 @@ "name": "Telnet", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/telnet", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/temper/manifest.json b/homeassistant/components/temper/manifest.json index dbad8827877..ad1fcd40525 100644 --- a/homeassistant/components/temper/manifest.json +++ b/homeassistant/components/temper/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/temper", "iot_class": "local_polling", "loggers": ["pyusb", "temperusb"], + "quality_scale": "legacy", "requirements": ["temperusb==1.6.1"] } diff --git a/homeassistant/components/template/__init__.py b/homeassistant/components/template/__init__.py index efa99342699..390a4a31bdb 100644 --- a/homeassistant/components/template/__init__.py +++ b/homeassistant/components/template/__init__.py @@ -3,13 +3,20 @@ from __future__ import annotations import asyncio +from collections.abc import Coroutine import logging +from typing import Any from homeassistant import config as conf_util from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEVICE_ID, CONF_UNIQUE_ID, SERVICE_RELOAD +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_UNIQUE_ID, + SERVICE_RELOAD, +) from homeassistant.core import Event, HomeAssistant, ServiceCall -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ConfigEntryError, HomeAssistantError from homeassistant.helpers import discovery from homeassistant.helpers.device import ( async_remove_stale_devices_links_keep_current_device, @@ -18,15 +25,29 @@ from homeassistant.helpers.reload import async_reload_integration_platforms from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_integration +from homeassistant.util.hass_dict import HassKey -from .const import CONF_TRIGGER, DOMAIN, PLATFORMS +from .const import CONF_MAX, CONF_MIN, CONF_STEP, CONF_TRIGGER, DOMAIN, PLATFORMS from .coordinator import TriggerUpdateCoordinator +from .helpers import async_get_blueprints _LOGGER = logging.getLogger(__name__) +DATA_COORDINATORS: HassKey[list[TriggerUpdateCoordinator]] = HassKey(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the template integration.""" + + # Register template as valid domain for Blueprint + blueprints = async_get_blueprints(hass) + + # Add some default blueprints to blueprints/template, does nothing + # if blueprints/template already exists but still has to create + # an executor job to check if the folder exists so we run it in a + # separate task to avoid waiting for it to finish setting up + # since a tracked task will be waited at the end of startup + hass.async_create_task(blueprints.async_populate(), eager_start=True) + if DOMAIN in config: await _process_config(hass, config) @@ -67,6 +88,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.options.get(CONF_DEVICE_ID), ) + for key in (CONF_MAX, CONF_MIN, CONF_STEP): + if key not in entry.options: + continue + if isinstance(entry.options[key], str): + raise ConfigEntryError( + f"The '{entry.options.get(CONF_NAME) or ""}' number template needs to " + f"be reconfigured, {key} must be a number, got '{entry.options[key]}'" + ) + await hass.config_entries.async_forward_entry_setups( entry, (entry.options["template_type"],) ) @@ -88,19 +118,21 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _process_config(hass: HomeAssistant, hass_config: ConfigType) -> None: """Process config.""" - coordinators: list[TriggerUpdateCoordinator] | None = hass.data.pop(DOMAIN, None) + coordinators = hass.data.pop(DATA_COORDINATORS, None) # Remove old ones if coordinators: for coordinator in coordinators: coordinator.async_remove() - async def init_coordinator(hass, conf_section): + async def init_coordinator( + hass: HomeAssistant, conf_section: dict[str, Any] + ) -> TriggerUpdateCoordinator: coordinator = TriggerUpdateCoordinator(hass, conf_section) await coordinator.async_setup(hass_config) return coordinator - coordinator_tasks = [] + coordinator_tasks: list[Coroutine[Any, Any, TriggerUpdateCoordinator]] = [] for conf_section in hass_config[DOMAIN]: if CONF_TRIGGER in conf_section: @@ -116,7 +148,14 @@ async def _process_config(hass: HomeAssistant, hass_config: ConfigType) -> None: DOMAIN, { "unique_id": conf_section.get(CONF_UNIQUE_ID), - "entities": conf_section[platform_domain], + "entities": [ + { + **entity_conf, + "raw_blueprint_inputs": conf_section.raw_blueprint_inputs, + "raw_configs": conf_section.raw_config, + } + for entity_conf in conf_section[platform_domain] + ], }, hass_config, ), @@ -124,4 +163,4 @@ async def _process_config(hass: HomeAssistant, hass_config: ConfigType) -> None: ) if coordinator_tasks: - hass.data[DOMAIN] = await asyncio.gather(*coordinator_tasks) + hass.data[DATA_COORDINATORS] = await asyncio.gather(*coordinator_tasks) diff --git a/homeassistant/components/template/alarm_control_panel.py b/homeassistant/components/template/alarm_control_panel.py index 7c23fdcebcc..aa1f99f0423 100644 --- a/homeassistant/components/template/alarm_control_panel.py +++ b/homeassistant/components/template/alarm_control_panel.py @@ -4,6 +4,7 @@ from __future__ import annotations from enum import Enum import logging +from typing import Any import voluptuous as vol @@ -12,46 +13,45 @@ from homeassistant.components.alarm_control_panel import ( PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_CODE, + CONF_DEVICE_ID, CONF_NAME, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNAVAILABLE, + STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError +from homeassistant.helpers import selector import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import slugify from .const import DOMAIN from .template_entity import TemplateEntity, rewrite_common_legacy_to_modern_conf _LOGGER = logging.getLogger(__name__) _VALID_STATES = [ - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.TRIGGERED, STATE_UNAVAILABLE, ] @@ -102,8 +102,29 @@ PLATFORM_SCHEMA = ALARM_CONTROL_PANEL_PLATFORM_SCHEMA.extend( } ) +ALARM_CONTROL_PANEL_CONFIG_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.template, + vol.Optional(CONF_VALUE_TEMPLATE): cv.template, + vol.Optional(CONF_DISARM_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_ARM_AWAY_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_ARM_CUSTOM_BYPASS_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_ARM_HOME_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_ARM_NIGHT_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_ARM_VACATION_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_TRIGGER_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, + vol.Optional(CONF_CODE_FORMAT, default=TemplateCodeFormat.number.name): cv.enum( + TemplateCodeFormat + ), + vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), + } +) -async def _async_create_entities(hass, config): + +async def _async_create_entities( + hass: HomeAssistant, config: dict[str, Any] +) -> list[AlarmControlPanelTemplate]: """Create Template Alarm Control Panels.""" alarm_control_panels = [] @@ -123,6 +144,27 @@ async def _async_create_entities(hass, config): return alarm_control_panels +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize config entry.""" + _options = dict(config_entry.options) + _options.pop("template_type") + validated_config = ALARM_CONTROL_PANEL_CONFIG_SCHEMA(_options) + async_add_entities( + [ + AlarmControlPanelTemplate( + hass, + slugify(_options[CONF_NAME]), + validated_config, + config_entry.entry_id, + ) + ] + ) + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -133,18 +175,18 @@ async def async_setup_platform( async_add_entities(await _async_create_entities(hass, config)) -class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity): +class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, RestoreEntity): """Representation of a templated Alarm Control Panel.""" _attr_should_poll = False def __init__( self, - hass, - object_id, - config, - unique_id, - ): + hass: HomeAssistant, + object_id: str, + config: dict, + unique_id: str | None, + ) -> None: """Initialize the panel.""" super().__init__( hass, config=config, fallback_name=object_id, unique_id=unique_id @@ -153,6 +195,7 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity): ENTITY_ID_FORMAT, object_id, hass=hass ) name = self._attr_name + assert name is not None self._template = config.get(CONF_VALUE_TEMPLATE) self._disarm_script = None self._attr_code_arm_required: bool = config[CONF_CODE_ARM_REQUIRED] @@ -182,8 +225,11 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity): if (trigger_action := config.get(CONF_TRIGGER_ACTION)) is not None: self._trigger_script = Script(hass, trigger_action, name, DOMAIN) - self._state: str | None = None - + self._state: AlarmControlPanelState | None = None + self._attr_device_info = async_device_info_to_link_from_device_id( + hass, + config.get(CONF_DEVICE_ID), + ) supported_features = AlarmControlPanelEntityFeature(0) if self._arm_night_script is not None: supported_features = ( @@ -216,8 +262,21 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity): ) self._attr_supported_features = supported_features + async def async_added_to_hass(self) -> None: + """Restore last state.""" + await super().async_added_to_hass() + if ( + (last_state := await self.async_get_last_state()) is not None + and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) + and last_state.state in _VALID_STATES + # The trigger might have fired already while we waited for stored data, + # then we should not restore state + and self._state is None + ): + self._state = AlarmControlPanelState(last_state.state) + @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return self._state @@ -268,31 +327,39 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity): async def async_alarm_arm_away(self, code: str | None = None) -> None: """Arm the panel to Away.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_AWAY, script=self._arm_away_script, code=code + AlarmControlPanelState.ARMED_AWAY, + script=self._arm_away_script, + code=code, ) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Arm the panel to Home.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_HOME, script=self._arm_home_script, code=code + AlarmControlPanelState.ARMED_HOME, + script=self._arm_home_script, + code=code, ) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Arm the panel to Night.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_NIGHT, script=self._arm_night_script, code=code + AlarmControlPanelState.ARMED_NIGHT, + script=self._arm_night_script, + code=code, ) async def async_alarm_arm_vacation(self, code: str | None = None) -> None: """Arm the panel to Vacation.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_VACATION, script=self._arm_vacation_script, code=code + AlarmControlPanelState.ARMED_VACATION, + script=self._arm_vacation_script, + code=code, ) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Arm the panel to Custom Bypass.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, script=self._arm_custom_bypass_script, code=code, ) @@ -300,11 +367,13 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity): async def async_alarm_disarm(self, code: str | None = None) -> None: """Disarm the panel.""" await self._async_alarm_arm( - STATE_ALARM_DISARMED, script=self._disarm_script, code=code + AlarmControlPanelState.DISARMED, script=self._disarm_script, code=code ) async def async_alarm_trigger(self, code: str | None = None) -> None: """Trigger the panel.""" await self._async_alarm_arm( - STATE_ALARM_TRIGGERED, script=self._trigger_script, code=code + AlarmControlPanelState.TRIGGERED, + script=self._trigger_script, + code=code, ) diff --git a/homeassistant/components/template/binary_sensor.py b/homeassistant/components/template/binary_sensor.py index 187c7079f59..922f1d88ffb 100644 --- a/homeassistant/components/template/binary_sensor.py +++ b/homeassistant/components/template/binary_sensor.py @@ -250,7 +250,6 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity): self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._template = config[CONF_STATE] - self._state: bool | None = None self._delay_cancel = None self._delay_on = None self._delay_on_raw = config.get(CONF_DELAY_ON) @@ -268,7 +267,7 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity): and (last_state := await self.async_get_last_state()) is not None and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) ): - self._state = last_state.state == STATE_ON + self._attr_is_on = last_state.state == STATE_ON await super().async_added_to_hass() @callback @@ -308,7 +307,7 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity): else template.result_as_boolean(result) ) - if state == self._state: + if state == self._attr_is_on: return # state without delay @@ -317,24 +316,19 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity, RestoreEntity): or (state and not self._delay_on) or (not state and not self._delay_off) ): - self._state = state + self._attr_is_on = state return @callback def _set_state(_): """Set state of template binary sensor.""" - self._state = state + self._attr_is_on = state self.async_write_ha_state() delay = (self._delay_on if state else self._delay_off).total_seconds() # state with delay. Cancelled if template result changes. self._delay_cancel = async_call_later(self.hass, delay, _set_state) - @property - def is_on(self) -> bool | None: - """Return true if sensor is on.""" - return self._state - class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity): """Sensor entity based on trigger data.""" @@ -359,7 +353,6 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity self._delay_cancel: CALLBACK_TYPE | None = None self._auto_off_cancel: CALLBACK_TYPE | None = None self._auto_off_time: datetime | None = None - self._state: bool | None = None async def async_added_to_hass(self) -> None: """Restore last state.""" @@ -371,9 +364,9 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) # The trigger might have fired already while we waited for stored data, # then we should not restore state - and self._state is None + and self._attr_is_on is None ): - self._state = last_state.state == STATE_ON + self._attr_is_on = last_state.state == STATE_ON self.restore_attributes(last_state) if CONF_AUTO_OFF not in self._config: @@ -383,16 +376,11 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity auto_off_time := extra_data.auto_off_time ) is not None and auto_off_time <= dt_util.utcnow(): # It's already past the saved auto off time - self._state = False + self._attr_is_on = False - if self._state and auto_off_time is not None: + if self._attr_is_on and auto_off_time is not None: self._set_auto_off(auto_off_time) - @property - def is_on(self) -> bool | None: - """Return state of the sensor.""" - return self._state - @callback def _handle_coordinator_update(self) -> None: """Handle update of the data.""" @@ -418,7 +406,7 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity delay = self._rendered.get(key) or self._config.get(key) # state without delay. None means rendering failed. - if self._state == state or state is None or delay is None: + if self._attr_is_on == state or state is None or delay is None: self._set_state(state) return @@ -439,7 +427,7 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity @callback def _set_state(self, state, _=None): """Set up auto off.""" - self._state = state + self._attr_is_on = state self.async_set_context(self.coordinator.data["context"]) self.async_write_ha_state() @@ -469,7 +457,7 @@ class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity @callback def _auto_off(_): """Reset state of template binary sensor.""" - self._state = False + self._attr_is_on = False self.async_write_ha_state() self._auto_off_time = auto_off_time diff --git a/homeassistant/components/template/blueprints/inverted_binary_sensor.yaml b/homeassistant/components/template/blueprints/inverted_binary_sensor.yaml new file mode 100644 index 00000000000..5be18404a36 --- /dev/null +++ b/homeassistant/components/template/blueprints/inverted_binary_sensor.yaml @@ -0,0 +1,27 @@ +blueprint: + name: Invert a binary sensor + description: Creates a binary_sensor which holds the inverted value of a reference binary_sensor + domain: template + source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/template/blueprints/inverted_binary_sensor.yaml + input: + reference_entity: + name: Binary sensor to be inverted + description: The binary_sensor which needs to have its value inverted + selector: + entity: + domain: binary_sensor +variables: + reference_entity: !input reference_entity +binary_sensor: + state: > + {% if states(reference_entity) == 'on' %} + off + {% elif states(reference_entity) == 'off' %} + on + {% else %} + {{ states(reference_entity) }} + {% endif %} + # delay_on: not_used in this example + # delay_off: not_used in this example + # auto_off: not_used in this example + availability: "{{ states(reference_entity) not in ('unknown', 'unavailable') }}" diff --git a/homeassistant/components/template/button.py b/homeassistant/components/template/button.py index 52435d88971..67ce7e7a16b 100644 --- a/homeassistant/components/template/button.py +++ b/homeassistant/components/template/button.py @@ -51,7 +51,7 @@ BUTTON_SCHEMA = ( CONFIG_BUTTON_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME): cv.template, - vol.Optional(CONF_PRESS): selector.ActionSelector(), + vol.Optional(CONF_PRESS): cv.SCRIPT_SCHEMA, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), } diff --git a/homeassistant/components/template/config.py b/homeassistant/components/template/config.py index e2015743a0e..e0c5514def9 100644 --- a/homeassistant/components/template/config.py +++ b/homeassistant/components/template/config.py @@ -1,10 +1,15 @@ """Template config validator.""" +from contextlib import suppress import logging import voluptuous as vol from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.blueprint import ( + BLUEPRINT_INSTANCE_FIELDS, + is_blueprint_instance_config, +) from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN @@ -12,9 +17,16 @@ from homeassistant.components.select import DOMAIN as SELECT_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN from homeassistant.config import async_log_schema_error, config_without_domain -from homeassistant.const import CONF_BINARY_SENSORS, CONF_SENSORS, CONF_UNIQUE_ID +from homeassistant.const import ( + CONF_BINARY_SENSORS, + CONF_NAME, + CONF_SENSORS, + CONF_UNIQUE_ID, + CONF_VARIABLES, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.condition import async_validate_conditions_config from homeassistant.helpers.trigger import async_validate_trigger_config from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_notify_setup_error @@ -28,7 +40,15 @@ from . import ( sensor as sensor_platform, weather as weather_platform, ) -from .const import CONF_ACTION, CONF_TRIGGER, DOMAIN +from .const import ( + CONF_ACTION, + CONF_CONDITION, + CONF_TRIGGER, + DOMAIN, + PLATFORMS, + TemplateConfig, +) +from .helpers import async_get_blueprints PACKAGE_MERGE_HINT = "list" @@ -36,7 +56,9 @@ CONFIG_SECTION_SCHEMA = vol.Schema( { vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Optional(CONF_TRIGGER): cv.TRIGGER_SCHEMA, + vol.Optional(CONF_CONDITION): cv.CONDITIONS_SCHEMA, vol.Optional(CONF_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA, vol.Optional(NUMBER_DOMAIN): vol.All( cv.ensure_list, [number_platform.NUMBER_SCHEMA] ), @@ -64,9 +86,73 @@ CONFIG_SECTION_SCHEMA = vol.Schema( vol.Optional(WEATHER_DOMAIN): vol.All( cv.ensure_list, [weather_platform.WEATHER_SCHEMA] ), - } + }, ) +TEMPLATE_BLUEPRINT_INSTANCE_SCHEMA = vol.Schema( + { + vol.Optional(CONF_NAME): cv.string, + vol.Optional(CONF_UNIQUE_ID): cv.string, + } +).extend(BLUEPRINT_INSTANCE_FIELDS.schema) + + +async def _async_resolve_blueprints( + hass: HomeAssistant, + config: ConfigType, +) -> TemplateConfig: + """If a config item requires a blueprint, resolve that item to an actual config.""" + raw_config = None + raw_blueprint_inputs = None + + with suppress(ValueError): # Invalid config + raw_config = dict(config) + + if is_blueprint_instance_config(config): + config = TEMPLATE_BLUEPRINT_INSTANCE_SCHEMA(config) + blueprints = async_get_blueprints(hass) + + blueprint_inputs = await blueprints.async_inputs_from_config(config) + raw_blueprint_inputs = blueprint_inputs.config_with_inputs + + config = blueprint_inputs.async_substitute() + + platforms = [platform for platform in PLATFORMS if platform in config] + if len(platforms) > 1: + raise vol.Invalid("more than one platform defined per blueprint") + if len(platforms) == 1: + platform = platforms.pop() + for prop in (CONF_NAME, CONF_UNIQUE_ID, CONF_VARIABLES): + if prop in config: + config[platform][prop] = config.pop(prop) + raw_config = dict(config) + + template_config = TemplateConfig(CONFIG_SECTION_SCHEMA(config)) + template_config.raw_blueprint_inputs = raw_blueprint_inputs + template_config.raw_config = raw_config + + return template_config + + +async def async_validate_config_section( + hass: HomeAssistant, config: ConfigType +) -> TemplateConfig: + """Validate an entire config section for the template integration.""" + + validated_config = await _async_resolve_blueprints(hass, config) + + if CONF_TRIGGER in validated_config: + validated_config[CONF_TRIGGER] = await async_validate_trigger_config( + hass, validated_config[CONF_TRIGGER] + ) + + if CONF_CONDITION in validated_config: + validated_config[CONF_CONDITION] = await async_validate_conditions_config( + hass, validated_config[CONF_CONDITION] + ) + + return validated_config + async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> ConfigType: """Validate config.""" @@ -77,12 +163,9 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf for cfg in cv.ensure_list(config[DOMAIN]): try: - cfg = CONFIG_SECTION_SCHEMA(cfg) - - if CONF_TRIGGER in cfg: - cfg[CONF_TRIGGER] = await async_validate_trigger_config( - hass, cfg[CONF_TRIGGER] - ) + template_config: TemplateConfig = await async_validate_config_section( + hass, cfg + ) except vol.Invalid as err: async_log_schema_error(err, DOMAIN, cfg, hass) async_notify_setup_error(hass, DOMAIN) @@ -102,7 +185,7 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf binary_sensor_platform.rewrite_legacy_to_modern_conf, ), ): - if old_key not in cfg: + if old_key not in template_config: continue if not legacy_warn_printed: @@ -114,11 +197,13 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf "https://www.home-assistant.io/integrations/template#configuration-for-trigger-based-template-sensors" ) - definitions = list(cfg[new_key]) if new_key in cfg else [] - definitions.extend(transform(hass, cfg[old_key])) - cfg = {**cfg, new_key: definitions} + definitions = ( + list(template_config[new_key]) if new_key in template_config else [] + ) + definitions.extend(transform(hass, template_config[old_key])) + template_config = TemplateConfig({**template_config, new_key: definitions}) - config_sections.append(cfg) + config_sections.append(template_config) # Create a copy of the configuration with all config for current # component removed and add validated config back in. diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index 2c12a0d03e9..e6cc377bc26 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -39,6 +39,18 @@ from homeassistant.helpers.schema_config_entry_flow import ( SchemaFlowMenuStep, ) +from .alarm_control_panel import ( + CONF_ARM_AWAY_ACTION, + CONF_ARM_CUSTOM_BYPASS_ACTION, + CONF_ARM_HOME_ACTION, + CONF_ARM_NIGHT_ACTION, + CONF_ARM_VACATION_ACTION, + CONF_CODE_ARM_REQUIRED, + CONF_CODE_FORMAT, + CONF_DISARM_ACTION, + CONF_TRIGGER_ACTION, + TemplateCodeFormat, +) from .binary_sensor import async_create_preview_binary_sensor from .const import CONF_PRESS, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN from .number import ( @@ -68,6 +80,30 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: if flow_type == "config": schema = {vol.Required(CONF_NAME): selector.TextSelector()} + if domain == Platform.ALARM_CONTROL_PANEL: + schema |= { + vol.Optional(CONF_VALUE_TEMPLATE): selector.TemplateSelector(), + vol.Optional(CONF_DISARM_ACTION): selector.ActionSelector(), + vol.Optional(CONF_ARM_AWAY_ACTION): selector.ActionSelector(), + vol.Optional(CONF_ARM_CUSTOM_BYPASS_ACTION): selector.ActionSelector(), + vol.Optional(CONF_ARM_HOME_ACTION): selector.ActionSelector(), + vol.Optional(CONF_ARM_NIGHT_ACTION): selector.ActionSelector(), + vol.Optional(CONF_ARM_VACATION_ACTION): selector.ActionSelector(), + vol.Optional(CONF_TRIGGER_ACTION): selector.ActionSelector(), + vol.Optional( + CONF_CODE_ARM_REQUIRED, default=True + ): selector.BooleanSelector(), + vol.Optional( + CONF_CODE_FORMAT, default=TemplateCodeFormat.number.name + ): selector.SelectSelector( + selector.SelectSelectorConfig( + options=[e.name for e in TemplateCodeFormat], + mode=selector.SelectSelectorMode.DROPDOWN, + translation_key="alarm_control_panel_code_format", + ) + ), + } + if domain == Platform.BINARY_SENSOR: schema |= _SCHEMA_STATE if flow_type == "config": @@ -107,16 +143,21 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: if domain == Platform.NUMBER: schema |= { vol.Required(CONF_STATE): selector.TemplateSelector(), - vol.Required( - CONF_MIN, default=f"{{{{{DEFAULT_MIN_VALUE}}}}}" - ): selector.TemplateSelector(), - vol.Required( - CONF_MAX, default=f"{{{{{DEFAULT_MAX_VALUE}}}}}" - ): selector.TemplateSelector(), - vol.Required( - CONF_STEP, default=f"{{{{{DEFAULT_STEP}}}}}" - ): selector.TemplateSelector(), - vol.Optional(CONF_SET_VALUE): selector.ActionSelector(), + vol.Required(CONF_MIN, default=DEFAULT_MIN_VALUE): selector.NumberSelector( + selector.NumberSelectorConfig(mode=selector.NumberSelectorMode.BOX), + ), + vol.Required(CONF_MAX, default=DEFAULT_MAX_VALUE): selector.NumberSelector( + selector.NumberSelectorConfig(mode=selector.NumberSelectorMode.BOX), + ), + vol.Required(CONF_STEP, default=DEFAULT_STEP): selector.NumberSelector( + selector.NumberSelectorConfig(mode=selector.NumberSelectorMode.BOX), + ), + vol.Optional(CONF_UNIT_OF_MEASUREMENT): selector.TextSelector( + selector.TextSelectorConfig( + type=selector.TextSelectorType.TEXT, multiline=False + ) + ), + vol.Required(CONF_SET_VALUE): selector.ActionSelector(), } if domain == Platform.SELECT: @@ -194,8 +235,12 @@ def _validate_unit(options: dict[str, Any]) -> None: and (units := DEVICE_CLASS_UNITS.get(device_class)) is not None and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units ): + # Sort twice to make sure strings with same case-insensitive order of + # letters are sorted consistently still. sorted_units = sorted( - [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + sorted( + [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + ), key=str.casefold, ) if len(sorted_units) == 1: @@ -260,6 +305,7 @@ def validate_user_input( TEMPLATE_TYPES = [ + "alarm_control_panel", "binary_sensor", "button", "image", @@ -271,6 +317,10 @@ TEMPLATE_TYPES = [ CONFIG_FLOW = { "user": SchemaFlowMenuStep(TEMPLATE_TYPES), + Platform.ALARM_CONTROL_PANEL: SchemaFlowFormStep( + config_schema(Platform.ALARM_CONTROL_PANEL), + validate_user_input=validate_user_input(Platform.ALARM_CONTROL_PANEL), + ), Platform.BINARY_SENSOR: SchemaFlowFormStep( config_schema(Platform.BINARY_SENSOR), preview="template", @@ -308,6 +358,10 @@ CONFIG_FLOW = { OPTIONS_FLOW = { "init": SchemaFlowFormStep(next_step=choose_options_step), + Platform.ALARM_CONTROL_PANEL: SchemaFlowFormStep( + options_schema(Platform.ALARM_CONTROL_PANEL), + validate_user_input=validate_user_input(Platform.ALARM_CONTROL_PANEL), + ), Platform.BINARY_SENSOR: SchemaFlowFormStep( options_schema(Platform.BINARY_SENSOR), preview="template", diff --git a/homeassistant/components/template/const.py b/homeassistant/components/template/const.py index 8b4e46ba383..f333d14797e 100644 --- a/homeassistant/components/template/const.py +++ b/homeassistant/components/template/const.py @@ -1,11 +1,24 @@ """Constants for the Template Platform Components.""" +from homeassistant.components.blueprint import BLUEPRINT_SCHEMA from homeassistant.const import Platform +from homeassistant.helpers.typing import ConfigType CONF_ACTION = "action" -CONF_AVAILABILITY_TEMPLATE = "availability_template" CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" +CONF_ATTRIBUTES = "attributes" +CONF_AVAILABILITY = "availability" +CONF_AVAILABILITY_TEMPLATE = "availability_template" +CONF_CONDITION = "condition" +CONF_MAX = "max" +CONF_MIN = "min" +CONF_OBJECT_ID = "object_id" +CONF_PICTURE = "picture" +CONF_PRESS = "press" +CONF_STEP = "step" CONF_TRIGGER = "trigger" +CONF_TURN_OFF = "turn_off" +CONF_TURN_ON = "turn_on" DOMAIN = "template" @@ -28,11 +41,11 @@ PLATFORMS = [ Platform.WEATHER, ] -CONF_AVAILABILITY = "availability" -CONF_ATTRIBUTES = "attributes" -CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" -CONF_PICTURE = "picture" -CONF_PRESS = "press" -CONF_OBJECT_ID = "object_id" -CONF_TURN_OFF = "turn_off" -CONF_TURN_ON = "turn_on" +TEMPLATE_BLUEPRINT_SCHEMA = BLUEPRINT_SCHEMA + + +class TemplateConfig(dict): + """Dummy class to allow adding attributes.""" + + raw_config: ConfigType | None = None + raw_blueprint_inputs: ConfigType | None = None diff --git a/homeassistant/components/template/coordinator.py b/homeassistant/components/template/coordinator.py index d2ce44a0ad1..4d8fe78f2b5 100644 --- a/homeassistant/components/template/coordinator.py +++ b/homeassistant/components/template/coordinator.py @@ -1,16 +1,18 @@ """Data update coordinator for trigger based template entities.""" -from collections.abc import Callable +from collections.abc import Callable, Mapping import logging +from typing import TYPE_CHECKING, Any from homeassistant.const import EVENT_HOMEASSISTANT_START -from homeassistant.core import Context, CoreState, callback -from homeassistant.helpers import discovery, trigger as trigger_helper +from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback +from homeassistant.helpers import condition, discovery, trigger as trigger_helper from homeassistant.helpers.script import Script -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.trace import trace_get +from homeassistant.helpers.typing import ConfigType, TemplateVarsType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import CONF_ACTION, CONF_TRIGGER, DOMAIN, PLATFORMS +from .const import CONF_ACTION, CONF_CONDITION, CONF_TRIGGER, DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) @@ -20,10 +22,13 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): REMOVE_TRIGGER = object() - def __init__(self, hass, config): + def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None: """Instantiate trigger data.""" - super().__init__(hass, _LOGGER, name="Trigger Update Coordinator") + super().__init__( + hass, _LOGGER, config_entry=None, name="Trigger Update Coordinator" + ) self.config = config + self._cond_func: Callable[[Mapping[str, Any] | None], bool] | None = None self._unsub_start: Callable[[], None] | None = None self._unsub_trigger: Callable[[], None] | None = None self._script: Script | None = None @@ -34,7 +39,7 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): return self.config.get("unique_id") @callback - def async_remove(self): + def async_remove(self) -> None: """Signal that the entities need to remove themselves.""" if self._unsub_start: self._unsub_start() @@ -63,7 +68,7 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): eager_start=True, ) - async def _attach_triggers(self, start_event=None) -> None: + async def _attach_triggers(self, start_event: Event | None = None) -> None: """Attach the triggers.""" if CONF_ACTION in self.config: self._script = Script( @@ -73,6 +78,11 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): DOMAIN, ) + if CONF_CONDITION in self.config: + self._cond_func = await condition.async_conditions_from_config( + self.hass, self.config[CONF_CONDITION], _LOGGER, "template entity" + ) + if start_event is not None: self._unsub_start = None @@ -91,16 +101,43 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): start_event is not None, ) - async def _handle_triggered_with_script(self, run_variables, context=None): + async def _handle_triggered_with_script( + self, run_variables: TemplateVarsType, context: Context | None = None + ) -> None: + if not self._check_condition(run_variables): + return # Create a context referring to the trigger context. trigger_context_id = None if context is None else context.id script_context = Context(parent_id=trigger_context_id) + if TYPE_CHECKING: + # This method is only called if there's a script + assert self._script is not None if script_result := await self._script.async_run(run_variables, script_context): run_variables = script_result.variables - self._handle_triggered(run_variables, context) + self._execute_update(run_variables, context) + + async def _handle_triggered( + self, run_variables: TemplateVarsType, context: Context | None = None + ) -> None: + if not self._check_condition(run_variables): + return + self._execute_update(run_variables, context) + + def _check_condition(self, run_variables: TemplateVarsType) -> bool: + if not self._cond_func: + return True + condition_result = self._cond_func(run_variables) + if condition_result is False: + _LOGGER.debug( + "Conditions not met, aborting template trigger update. Condition summary: %s", + trace_get(clear=False), + ) + return condition_result @callback - def _handle_triggered(self, run_variables, context=None): + def _execute_update( + self, run_variables: TemplateVarsType, context: Context | None = None + ) -> None: self.async_set_updated_data( {"run_variables": run_variables, "context": context} ) diff --git a/homeassistant/components/template/cover.py b/homeassistant/components/template/cover.py index 2c84387ed64..2642ede9c3a 100644 --- a/homeassistant/components/template/cover.py +++ b/homeassistant/components/template/cover.py @@ -24,10 +24,6 @@ from homeassistant.const import ( CONF_OPTIMISTIC, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError @@ -45,11 +41,17 @@ from .template_entity import ( ) _LOGGER = logging.getLogger(__name__) + +OPEN_STATE = "open" +OPENING_STATE = "opening" +CLOSED_STATE = "closed" +CLOSING_STATE = "closing" + _VALID_STATES = [ - STATE_OPEN, - STATE_OPENING, - STATE_CLOSED, - STATE_CLOSING, + OPEN_STATE, + OPENING_STATE, + CLOSED_STATE, + CLOSING_STATE, "true", "false", "none", @@ -227,13 +229,13 @@ class CoverTemplate(TemplateEntity, CoverEntity): if state in _VALID_STATES: if not self._position_template: - if state in ("true", STATE_OPEN): + if state in ("true", OPEN_STATE): self._position = 100 else: self._position = 0 - self._is_opening = state == STATE_OPENING - self._is_closing = state == STATE_CLOSING + self._is_opening = state == OPENING_STATE + self._is_closing = state == CLOSING_STATE else: _LOGGER.error( "Received invalid cover is_on state: %s for entity %s. Expected: %s", diff --git a/homeassistant/components/template/fan.py b/homeassistant/components/template/fan.py index cedd7d0d725..7720ef7e1b3 100644 --- a/homeassistant/components/template/fan.py +++ b/homeassistant/components/template/fan.py @@ -124,7 +124,6 @@ class TemplateFan(TemplateEntity, FanEntity): """A template fan component.""" _attr_should_poll = False - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/template/helpers.py b/homeassistant/components/template/helpers.py new file mode 100644 index 00000000000..b320f2128cd --- /dev/null +++ b/homeassistant/components/template/helpers.py @@ -0,0 +1,63 @@ +"""Helpers for template integration.""" + +import logging + +from homeassistant.components import blueprint +from homeassistant.const import SERVICE_RELOAD +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import async_get_platforms +from homeassistant.helpers.singleton import singleton + +from .const import DOMAIN, TEMPLATE_BLUEPRINT_SCHEMA +from .template_entity import TemplateEntity + +DATA_BLUEPRINTS = "template_blueprints" + +LOGGER = logging.getLogger(__name__) + + +@callback +def templates_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list[str]: + """Return all template entity ids that reference the blueprint.""" + return [ + entity_id + for platform in async_get_platforms(hass, DOMAIN) + for entity_id, template_entity in platform.entities.items() + if isinstance(template_entity, TemplateEntity) + and template_entity.referenced_blueprint == blueprint_path + ] + + +@callback +def blueprint_in_template(hass: HomeAssistant, entity_id: str) -> str | None: + """Return the blueprint the template entity is based on or None.""" + for platform in async_get_platforms(hass, DOMAIN): + if isinstance( + (template_entity := platform.entities.get(entity_id)), TemplateEntity + ): + return template_entity.referenced_blueprint + return None + + +def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool: + """Return True if any template references the blueprint.""" + return len(templates_with_blueprint(hass, blueprint_path)) > 0 + + +async def _reload_blueprint_templates(hass: HomeAssistant, blueprint_path: str) -> None: + """Reload all templates that rely on a specific blueprint.""" + await hass.services.async_call(DOMAIN, SERVICE_RELOAD) + + +@singleton(DATA_BLUEPRINTS) +@callback +def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints: + """Get template blueprints.""" + return blueprint.DomainBlueprints( + hass, + DOMAIN, + LOGGER, + _blueprint_in_use, + _reload_blueprint_templates, + TEMPLATE_BLUEPRINT_SCHEMA, + ) diff --git a/homeassistant/components/template/icons.json b/homeassistant/components/template/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/template/icons.json +++ b/homeassistant/components/template/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index cae6c0cebc1..0654a42406a 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -9,7 +9,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -39,6 +39,7 @@ from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import color as color_util from .const import DOMAIN from .template_entity import ( @@ -77,6 +78,9 @@ CONF_TEMPERATURE_TEMPLATE = "temperature_template" CONF_WHITE_VALUE_ACTION = "set_white_value" CONF_WHITE_VALUE_TEMPLATE = "white_value_template" +DEFAULT_MIN_MIREDS = 153 +DEFAULT_MAX_MIREDS = 500 + LIGHT_SCHEMA = vol.All( cv.deprecated(CONF_ENTITY_ID), vol.Schema( @@ -262,25 +266,27 @@ class LightTemplate(TemplateEntity, LightEntity): return self._brightness @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self._temperature + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if self._temperature is None: + return None + return color_util.color_temperature_mired_to_kelvin(self._temperature) @property - def max_mireds(self) -> int: - """Return the max mireds value in mireds.""" + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" if self._max_mireds is not None: - return self._max_mireds + return color_util.color_temperature_mired_to_kelvin(self._max_mireds) - return super().max_mireds + return super().min_color_temp_kelvin @property - def min_mireds(self) -> int: - """Return the min mireds value in mireds.""" + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" if self._min_mireds is not None: - return self._min_mireds + return color_util.color_temperature_mired_to_kelvin(self._min_mireds) - return super().min_mireds + return super().max_color_temp_kelvin @property def hs_color(self) -> tuple[float, float] | None: @@ -447,13 +453,16 @@ class LightTemplate(TemplateEntity, LightEntity): self._brightness = kwargs[ATTR_BRIGHTNESS] optimistic_set = True - if self._temperature_template is None and ATTR_COLOR_TEMP in kwargs: + if self._temperature_template is None and ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) _LOGGER.debug( "Optimistically setting color temperature to %s", - kwargs[ATTR_COLOR_TEMP], + color_temp, ) self._color_mode = ColorMode.COLOR_TEMP - self._temperature = kwargs[ATTR_COLOR_TEMP] + self._temperature = color_temp if self._hs_template is None and self._color_template is None: self._hs_color = None if self._rgb_template is None: @@ -544,8 +553,10 @@ class LightTemplate(TemplateEntity, LightEntity): if ATTR_TRANSITION in kwargs and self._supports_transition is True: common_params["transition"] = kwargs[ATTR_TRANSITION] - if ATTR_COLOR_TEMP in kwargs and self._temperature_script: - common_params["color_temp"] = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs and self._temperature_script: + common_params["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) await self.async_run_script( self._temperature_script, @@ -756,7 +767,9 @@ class LightTemplate(TemplateEntity, LightEntity): self._temperature = None return temperature = int(render) - if self.min_mireds <= temperature <= self.max_mireds: + min_mireds = self._min_mireds or DEFAULT_MIN_MIREDS + max_mireds = self._max_mireds or DEFAULT_MAX_MIREDS + if min_mireds <= temperature <= max_mireds: self._temperature = temperature else: _LOGGER.error( @@ -766,8 +779,8 @@ class LightTemplate(TemplateEntity, LightEntity): ), temperature, self.entity_id, - self.min_mireds, - self.max_mireds, + min_mireds, + max_mireds, ) self._temperature = None except ValueError: diff --git a/homeassistant/components/template/lock.py b/homeassistant/components/template/lock.py index 5c0b67a23dc..f194154a50c 100644 --- a/homeassistant/components/template/lock.py +++ b/homeassistant/components/template/lock.py @@ -2,16 +2,15 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol from homeassistant.components.lock import ( PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA, - STATE_JAMMED, - STATE_LOCKING, - STATE_UNLOCKING, LockEntity, + LockEntityFeature, + LockState, ) from homeassistant.const import ( ATTR_CODE, @@ -19,9 +18,6 @@ from homeassistant.const import ( CONF_OPTIMISTIC, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, - STATE_LOCKED, - STATE_ON, - STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError, TemplateError @@ -40,6 +36,7 @@ from .template_entity import ( CONF_CODE_FORMAT_TEMPLATE = "code_format_template" CONF_LOCK = "lock" CONF_UNLOCK = "unlock" +CONF_OPEN = "open" DEFAULT_NAME = "Template Lock" DEFAULT_OPTIMISTIC = False @@ -49,6 +46,7 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_LOCK): cv.SCRIPT_SCHEMA, vol.Required(CONF_UNLOCK): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_OPEN): cv.SCRIPT_SCHEMA, vol.Required(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_CODE_FORMAT_TEMPLATE): cv.template, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, @@ -57,7 +55,9 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( ).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema) -async def _async_create_entities(hass, config): +async def _async_create_entities( + hass: HomeAssistant, config: dict[str, Any] +) -> list[TemplateLock]: """Create the Template lock.""" config = rewrite_common_legacy_to_modern_conf(hass, config) return [TemplateLock(hass, config, config.get(CONF_UNIQUE_ID))] @@ -80,47 +80,56 @@ class TemplateLock(TemplateEntity, LockEntity): def __init__( self, - hass, - config, - unique_id, - ): + hass: HomeAssistant, + config: dict[str, Any], + unique_id: str | None, + ) -> None: """Initialize the lock.""" super().__init__( hass, config=config, fallback_name=DEFAULT_NAME, unique_id=unique_id ) - self._state = None + self._state: LockState | None = None name = self._attr_name + assert name self._state_template = config.get(CONF_VALUE_TEMPLATE) self._command_lock = Script(hass, config[CONF_LOCK], name, DOMAIN) self._command_unlock = Script(hass, config[CONF_UNLOCK], name, DOMAIN) + if CONF_OPEN in config: + self._command_open = Script(hass, config[CONF_OPEN], name, DOMAIN) + self._attr_supported_features |= LockEntityFeature.OPEN self._code_format_template = config.get(CONF_CODE_FORMAT_TEMPLATE) - self._code_format = None - self._code_format_template_error = None + self._code_format: str | None = None + self._code_format_template_error: TemplateError | None = None self._optimistic = config.get(CONF_OPTIMISTIC) self._attr_assumed_state = bool(self._optimistic) @property def is_locked(self) -> bool: """Return true if lock is locked.""" - return self._state in ("true", STATE_ON, STATE_LOCKED) + return self._state == LockState.LOCKED @property def is_jammed(self) -> bool: """Return true if lock is jammed.""" - return self._state == STATE_JAMMED + return self._state == LockState.JAMMED @property def is_unlocking(self) -> bool: """Return true if lock is unlocking.""" - return self._state == STATE_UNLOCKING + return self._state == LockState.UNLOCKING @property def is_locking(self) -> bool: """Return true if lock is locking.""" - return self._state == STATE_LOCKING + return self._state == LockState.LOCKING + + @property + def is_open(self) -> bool: + """Return true if lock is open.""" + return self._state == LockState.OPEN @callback - def _update_state(self, result): + def _update_state(self, result: str | TemplateError) -> None: """Update the state from the template.""" super()._update_state(result) if isinstance(result, TemplateError): @@ -128,11 +137,27 @@ class TemplateLock(TemplateEntity, LockEntity): return if isinstance(result, bool): - self._state = STATE_LOCKED if result else STATE_UNLOCKED + self._state = LockState.LOCKED if result else LockState.UNLOCKED return if isinstance(result, str): - self._state = result.lower() + if result.lower() in ( + "true", + "on", + "locked", + ): + self._state = LockState.LOCKED + elif result.lower() in ( + "false", + "off", + "unlocked", + ): + self._state = LockState.UNLOCKED + else: + try: + self._state = LockState(result.lower()) + except ValueError: + self._state = None return self._state = None @@ -145,6 +170,8 @@ class TemplateLock(TemplateEntity, LockEntity): @callback def _async_setup_templates(self) -> None: """Set up templates.""" + if TYPE_CHECKING: + assert self._state_template is not None self.add_template_attribute( "_state", self._state_template, None, self._update_state ) @@ -172,10 +199,12 @@ class TemplateLock(TemplateEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the device.""" + # Check if we need to raise for incorrect code format + # template before processing the action. self._raise_template_error_if_available() if self._optimistic: - self._state = True + self._state = LockState.LOCKED self.async_write_ha_state() tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} @@ -186,10 +215,12 @@ class TemplateLock(TemplateEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the device.""" + # Check if we need to raise for incorrect code format + # template before processing the action. self._raise_template_error_if_available() if self._optimistic: - self._state = False + self._state = LockState.UNLOCKED self.async_write_ha_state() tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} @@ -198,7 +229,24 @@ class TemplateLock(TemplateEntity, LockEntity): self._command_unlock, run_variables=tpl_vars, context=self._context ) + async def async_open(self, **kwargs: Any) -> None: + """Open the device.""" + # Check if we need to raise for incorrect code format + # template before processing the action. + self._raise_template_error_if_available() + + if self._optimistic: + self._state = LockState.OPEN + self.async_write_ha_state() + + tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} + + await self.async_run_script( + self._command_open, run_variables=tpl_vars, context=self._context + ) + def _raise_template_error_if_available(self): + """Raise an error if the rendered code format is not valid.""" if self._code_format_template_error is not None: raise ServiceValidationError( translation_domain=DOMAIN, diff --git a/homeassistant/components/template/manifest.json b/homeassistant/components/template/manifest.json index 4112ca7a73f..f1225f74f06 100644 --- a/homeassistant/components/template/manifest.json +++ b/homeassistant/components/template/manifest.json @@ -2,8 +2,9 @@ "domain": "template", "name": "Template", "after_dependencies": ["group"], - "codeowners": ["@PhracturedBlue", "@tetienne", "@home-assistant/core"], + "codeowners": ["@PhracturedBlue", "@home-assistant/core"], "config_flow": true, + "dependencies": ["blueprint"], "documentation": "https://www.home-assistant.io/integrations/template", "integration_type": "helper", "iot_class": "local_push", diff --git a/homeassistant/components/template/number.py b/homeassistant/components/template/number.py index 955600a9b9e..90dd555ca42 100644 --- a/homeassistant/components/template/number.py +++ b/homeassistant/components/template/number.py @@ -22,6 +22,7 @@ from homeassistant.const import ( CONF_OPTIMISTIC, CONF_STATE, CONF_UNIQUE_ID, + CONF_UNIT_OF_MEASUREMENT, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, selector @@ -31,7 +32,7 @@ from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import TriggerUpdateCoordinator -from .const import DOMAIN +from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN from .template_entity import ( TEMPLATE_ENTITY_AVAILABILITY_SCHEMA, TEMPLATE_ENTITY_ICON_SCHEMA, @@ -42,9 +43,6 @@ from .trigger_entity import TriggerEntity _LOGGER = logging.getLogger(__name__) CONF_SET_VALUE = "set_value" -CONF_MIN = "min" -CONF_MAX = "max" -CONF_STEP = "step" DEFAULT_NAME = "Template Number" DEFAULT_OPTIMISTIC = False @@ -58,6 +56,7 @@ NUMBER_SCHEMA = ( vol.Required(CONF_STEP): cv.template, vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): cv.template, vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): cv.template, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -70,9 +69,10 @@ NUMBER_CONFIG_SCHEMA = vol.Schema( vol.Required(CONF_NAME): cv.template, vol.Required(CONF_STATE): cv.template, vol.Required(CONF_STEP): cv.template, - vol.Optional(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, + vol.Required(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, vol.Optional(CONF_MIN): cv.template, vol.Optional(CONF_MAX): cv.template, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), } ) @@ -154,15 +154,15 @@ class TemplateNumber(TemplateEntity, NumberEntity): super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None self._value_template = config[CONF_STATE] - self._command_set_value = ( - Script(hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN) - if config.get(CONF_SET_VALUE, None) is not None - else None + self._command_set_value = Script( + hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN ) + self._step_template = config[CONF_STEP] self._min_value_template = config[CONF_MIN] self._max_value_template = config[CONF_MAX] self._attr_assumed_state = self._optimistic = config.get(CONF_OPTIMISTIC) + self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) self._attr_native_step = DEFAULT_STEP self._attr_native_min_value = DEFAULT_MIN_VALUE self._attr_native_max_value = DEFAULT_MAX_VALUE @@ -234,6 +234,7 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): ) -> None: """Initialize the entity.""" super().__init__(hass, coordinator, config) + self._command_set_value = Script( hass, config[CONF_SET_VALUE], @@ -241,6 +242,8 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): DOMAIN, ) + self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) + @property def native_value(self) -> float | None: """Return the currently selected option.""" diff --git a/homeassistant/components/template/strings.json b/homeassistant/components/template/strings.json index fa365bf3cfd..66864a027ba 100644 --- a/homeassistant/components/template/strings.json +++ b/homeassistant/components/template/strings.json @@ -1,6 +1,26 @@ { "config": { "step": { + "alarm_control_panel": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "value_template": "[%key:component::template::config::step::switch::data::value_template%]", + "name": "[%key:common::config_flow::data::name%]", + "disarm": "Disarm action", + "arm_away": "Arm away action", + "arm_custom_bypass": "Arm custom bypass action", + "arm_home": "Arm home action", + "arm_night": "Arm night action", + "arm_vacation": "Arm vacation action", + "trigger": "Trigger action", + "code_arm_required": "Code arm required", + "code_format": "Code format" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "Template alarm control panel" + }, "binary_sensor": { "data": { "device_id": "[%key:common::config_flow::data::device%]", @@ -45,7 +65,8 @@ "step": "Step value", "set_value": "Actions on set value", "max": "Maximum value", - "min": "Minimum value" + "min": "Minimum value", + "unit_of_measurement": "[%key:component::template::config::step::sensor::data::unit_of_measurement%]" }, "data_description": { "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" @@ -82,9 +103,10 @@ "user": { "description": "This helper allows you to create helper entities that define their state using a template.", "menu_options": { + "alarm_control_panel": "Template an alarm control panel", "binary_sensor": "Template a binary sensor", "button": "Template a button", - "image": "Template a image", + "image": "Template an image", "number": "Template a number", "select": "Template a select", "sensor": "Template a sensor", @@ -110,6 +132,25 @@ }, "options": { "step": { + "alarm_control_panel": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "value_template": "[%key:component::template::config::step::switch::data::value_template%]", + "disarm": "[%key:component::template::config::step::alarm_control_panel::data::disarm%]", + "arm_away": "[%key:component::template::config::step::alarm_control_panel::data::arm_away%]", + "arm_custom_bypass": "[%key:component::template::config::step::alarm_control_panel::data::arm_custom_bypass%]", + "arm_home": "[%key:component::template::config::step::alarm_control_panel::data::arm_home%]", + "arm_night": "[%key:component::template::config::step::alarm_control_panel::data::arm_night%]", + "arm_vacation": "[%key:component::template::config::step::alarm_control_panel::data::arm_vacation%]", + "trigger": "[%key:component::template::config::step::alarm_control_panel::data::trigger%]", + "code_arm_required": "[%key:component::template::config::step::alarm_control_panel::data::code_arm_required%]", + "code_format": "[%key:component::template::config::step::alarm_control_panel::data::code_format%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "[%key:component::template::config::step::alarm_control_panel::title%]" + }, "binary_sensor": { "data": { "device_id": "[%key:common::config_flow::data::device%]", @@ -199,6 +240,13 @@ } }, "selector": { + "alarm_control_panel_code_format": { + "options": { + "no_code": "No code format", + "number": "Number", + "text": "Text" + } + }, "binary_sensor_device_class": { "options": { "battery": "[%key:component::binary_sensor::entity_component::battery::name%]", diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index 9145625f706..bddb51e5e67 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -64,8 +64,8 @@ SWITCH_CONFIG_SCHEMA = vol.Schema( { vol.Required(CONF_NAME): cv.template, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, - vol.Optional(CONF_TURN_ON): selector.ActionSelector(), - vol.Optional(CONF_TURN_OFF): selector.ActionSelector(), + vol.Optional(CONF_TURN_ON): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), } ) diff --git a/homeassistant/components/template/template_entity.py b/homeassistant/components/template/template_entity.py index a074f828284..f5b84b1ad7a 100644 --- a/homeassistant/components/template/template_entity.py +++ b/homeassistant/components/template/template_entity.py @@ -4,19 +4,22 @@ from __future__ import annotations from collections.abc import Callable, Mapping import contextlib -from functools import cached_property import itertools import logging -from typing import Any +from typing import Any, cast +from propcache import under_cached_property import voluptuous as vol +from homeassistant.components.blueprint import CONF_USE_BLUEPRINT from homeassistant.const import ( CONF_ENTITY_PICTURE_TEMPLATE, CONF_FRIENDLY_NAME, CONF_ICON, CONF_ICON_TEMPLATE, CONF_NAME, + CONF_PATH, + CONF_VARIABLES, STATE_UNKNOWN, ) from homeassistant.core import ( @@ -77,6 +80,7 @@ TEMPLATE_ENTITY_COMMON_SCHEMA = vol.Schema( { vol.Optional(CONF_ATTRIBUTES): vol.Schema({cv.string: cv.template}), vol.Optional(CONF_AVAILABILITY): cv.template, + vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA, } ).extend(TEMPLATE_ENTITY_BASE_SCHEMA.schema) @@ -244,7 +248,7 @@ class _TemplateAttribute: return -class TemplateEntity(Entity): +class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module """Entity that uses templates to calculate attributes.""" _attr_available = True @@ -287,12 +291,16 @@ class TemplateEntity(Entity): self._icon_template = icon_template self._entity_picture_template = entity_picture_template self._friendly_name_template = None + self._run_variables = {} + self._blueprint_inputs = None else: self._attribute_templates = config.get(CONF_ATTRIBUTES) self._availability_template = config.get(CONF_AVAILABILITY) self._icon_template = config.get(CONF_ICON) self._entity_picture_template = config.get(CONF_PICTURE) self._friendly_name_template = config.get(CONF_NAME) + self._run_variables = config.get(CONF_VARIABLES, {}) + self._blueprint_inputs = config.get("raw_blueprint_inputs") class DummyState(State): """None-state for template entities not yet added to the state machine.""" @@ -302,7 +310,7 @@ class TemplateEntity(Entity): super().__init__("unknown.unknown", STATE_UNKNOWN) self.entity_id = None # type: ignore[assignment] - @cached_property + @under_cached_property def name(self) -> str: """Name of this state.""" return "" @@ -331,6 +339,18 @@ class TemplateEntity(Entity): variables=variables, parse_result=False ) + @callback + def _render_variables(self) -> dict: + if isinstance(self._run_variables, dict): + return self._run_variables + + return self._run_variables.async_render( + self.hass, + { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + }, + ) + @callback def _update_available(self, result: str | TemplateError) -> None: if isinstance(result, TemplateError): @@ -360,6 +380,13 @@ class TemplateEntity(Entity): attribute_key, attribute_template, None, _update_attribute ) + @property + def referenced_blueprint(self) -> str | None: + """Return referenced blueprint or None.""" + if self._blueprint_inputs is None: + return None + return cast(str, self._blueprint_inputs[CONF_USE_BLUEPRINT][CONF_PATH]) + def add_template_attribute( self, attribute: str, @@ -459,7 +486,10 @@ class TemplateEntity(Entity): template_var_tups: list[TrackTemplate] = [] has_availability_template = False - variables = {"this": TemplateStateFromEntityId(self.hass, self.entity_id)} + variables = { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **self._render_variables(), + } for template, attributes in self._template_attrs.items(): template_var_tup = TrackTemplate(template, variables) @@ -505,13 +535,15 @@ class TemplateEntity(Entity): ) if self._entity_picture_template is not None: self.add_template_attribute( - "_attr_entity_picture", self._entity_picture_template + "_attr_entity_picture", self._entity_picture_template, cv.string ) if ( self._friendly_name_template is not None and not self._friendly_name_template.is_static ): - self.add_template_attribute("_attr_name", self._friendly_name_template) + self.add_template_attribute( + "_attr_name", self._friendly_name_template, cv.string + ) @callback def async_start_preview( @@ -563,6 +595,7 @@ class TemplateEntity(Entity): await script.async_run( run_variables={ "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **self._render_variables(), **run_variables, }, context=context, diff --git a/homeassistant/components/template/trigger_entity.py b/homeassistant/components/template/trigger_entity.py index 697cd827b9e..5130f332d5b 100644 --- a/homeassistant/components/template/trigger_entity.py +++ b/homeassistant/components/template/trigger_entity.py @@ -3,13 +3,16 @@ from __future__ import annotations from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.template import TemplateStateFromEntityId from homeassistant.helpers.trigger_template_entity import TriggerBaseEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import TriggerUpdateCoordinator -class TriggerEntity(TriggerBaseEntity, CoordinatorEntity[TriggerUpdateCoordinator]): +class TriggerEntity( # pylint: disable=hass-enforce-class-module + TriggerBaseEntity, CoordinatorEntity[TriggerUpdateCoordinator] +): """Template entity based on trigger data.""" def __init__( @@ -39,11 +42,11 @@ class TriggerEntity(TriggerBaseEntity, CoordinatorEntity[TriggerUpdateCoordinato def _process_data(self) -> None: """Process new data.""" - this = None - if state := self.hass.states.get(self.entity_id): - this = state.as_dict() run_variables = self.coordinator.data["run_variables"] - variables = {"this": this, **(run_variables or {})} + variables = { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **(run_variables or {}), + } self._render_templates(variables) diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index 1d021bcb571..19029cc708b 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -17,13 +17,8 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -58,12 +53,12 @@ CONF_FAN_SPEED_TEMPLATE = "fan_speed_template" ENTITY_ID_FORMAT = VACUUM_DOMAIN + ".{}" _VALID_STATES = [ - STATE_CLEANING, - STATE_DOCKED, - STATE_PAUSED, - STATE_IDLE, - STATE_RETURNING, - STATE_ERROR, + VacuumActivity.CLEANING, + VacuumActivity.DOCKED, + VacuumActivity.PAUSED, + VacuumActivity.IDLE, + VacuumActivity.RETURNING, + VacuumActivity.ERROR, ] VACUUM_SCHEMA = vol.All( @@ -202,7 +197,7 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): self._attr_fan_speed_list = config[CONF_FAN_SPEED_LIST] @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the status of the vacuum cleaner.""" return self._state diff --git a/homeassistant/components/template/weather.py b/homeassistant/components/template/weather.py index ec6d1f08dd3..7f597f1d9a8 100644 --- a/homeassistant/components/template/weather.py +++ b/homeassistant/components/template/weather.py @@ -92,7 +92,6 @@ CONF_WIND_SPEED_TEMPLATE = "wind_speed_template" CONF_WIND_BEARING_TEMPLATE = "wind_bearing_template" CONF_OZONE_TEMPLATE = "ozone_template" CONF_VISIBILITY_TEMPLATE = "visibility_template" -CONF_FORECAST_TEMPLATE = "forecast_template" CONF_FORECAST_DAILY_TEMPLATE = "forecast_daily_template" CONF_FORECAST_HOURLY_TEMPLATE = "forecast_hourly_template" CONF_FORECAST_TWICE_DAILY_TEMPLATE = "forecast_twice_daily_template" @@ -133,10 +132,7 @@ WEATHER_SCHEMA = vol.Schema( } ) -PLATFORM_SCHEMA = vol.All( - cv.deprecated(CONF_FORECAST_TEMPLATE), - WEATHER_PLATFORM_SCHEMA.extend(WEATHER_SCHEMA.schema), -) +PLATFORM_SCHEMA = WEATHER_PLATFORM_SCHEMA.extend(WEATHER_SCHEMA.schema) async def async_setup_platform( diff --git a/homeassistant/components/tensorflow/image_processing.py b/homeassistant/components/tensorflow/image_processing.py index f13c0b24d0b..f4a3a7bfe07 100644 --- a/homeassistant/components/tensorflow/image_processing.py +++ b/homeassistant/components/tensorflow/image_processing.py @@ -324,13 +324,13 @@ class TensorFlowImageProcessor(ImageProcessingEntity): # Draw detected objects for instance in values: - label = "{} {:.1f}%".format(category, instance["score"]) + label = f"{category} {instance['score']:.1f}%" draw_box( draw, instance["box"], img_width, img_height, label, (255, 255, 0) ) for path in paths: - _LOGGER.info("Saving results image to %s", path) + _LOGGER.debug("Saving results image to %s", path) os.makedirs(os.path.dirname(path), exist_ok=True) img.save(path) diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index 941ec130db2..16de386b15d 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -5,11 +5,12 @@ "documentation": "https://www.home-assistant.io/integrations/tensorflow", "iot_class": "local_polling", "loggers": ["tensorflow"], + "quality_scale": "legacy", "requirements": [ "tensorflow==2.5.0", "tf-models-official==2.5.0", "pycocotools==2.0.6", - "numpy==1.26.0", - "Pillow==10.4.0" + "numpy==2.2.0", + "Pillow==11.0.0" ] } diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 47a2a9173a5..bc837aa4cac 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -3,8 +3,14 @@ import asyncio from typing import Final +from aiohttp.client_exceptions import ClientResponseError import jwt -from tesla_fleet_api import EnergySpecific, TeslaFleetApi, VehicleSpecific +from tesla_fleet_api import ( + EnergySpecific, + TeslaFleetApi, + VehicleSigned, + VehicleSpecific, +) from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidRegion, @@ -28,7 +34,6 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from .config_flow import OAuth2FlowHandler from .const import DOMAIN, LOGGER, MODELS from .coordinator import ( TeslaFleetEnergySiteInfoCoordinator, @@ -36,9 +41,20 @@ from .coordinator import ( TeslaFleetVehicleDataCoordinator, ) from .models import TeslaFleetData, TeslaFleetEnergyData, TeslaFleetVehicleData -from .oauth import TeslaSystemImplementation -PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.SENSOR] +PLATFORMS: Final = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.COVER, + Platform.DEVICE_TRACKER, + Platform.LOCK, + Platform.MEDIA_PLAYER, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, +] type TeslaFleetConfigEntry = ConfigEntry[TeslaFleetData] @@ -52,13 +68,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - session = async_get_clientsession(hass) token = jwt.decode(access_token, options={"verify_signature": False}) - scopes = token["scp"] - region = token["ou_code"].lower() - - OAuth2FlowHandler.async_register_implementation( - hass, - TeslaSystemImplementation(hass), - ) + scopes: list[Scope] = [Scope(s) for s in token["scp"]] + region: str = token["ou_code"].lower() implementation = await async_get_config_entry_implementation(hass, entry) oauth_session = OAuth2Session(hass, entry, implementation) @@ -66,7 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - async def _refresh_token() -> str: async with refresh_lock: - await oauth_session.async_ensure_token_valid() + try: + await oauth_session.async_ensure_token_valid() + except ClientResponseError as e: + if e.status == 401: + raise ConfigEntryAuthFailed from e + raise ConfigEntryNotReady from e token: str = oauth_session.token[CONF_ACCESS_TOKEN] return token @@ -87,7 +103,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - raise ConfigEntryAuthFailed from e except InvalidRegion: try: - LOGGER.info("Region is invalid, trying to find the correct region") + LOGGER.warning("Region is invalid, trying to find the correct region") await tesla.find_server() try: products = (await tesla.products())["response"] @@ -108,7 +124,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] - api = VehicleSpecific(tesla.vehicle, vin) + signing = product["command_signing"] == "required" + if signing: + if not tesla.private_key: + await tesla.get_private_key(hass.config.path("tesla_fleet.key")) + api = VehicleSigned(tesla.vehicle, vin) + else: + api = VehicleSpecific(tesla.vehicle, vin) coordinator = TeslaFleetVehicleDataCoordinator(hass, api, product) await coordinator.async_config_entry_first_refresh() @@ -127,6 +149,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - coordinator=coordinator, vin=vin, device=device, + signing=signing, ) ) elif "energy_site_id" in product and hasattr(tesla, "energy"): diff --git a/homeassistant/components/tesla_fleet/binary_sensor.py b/homeassistant/components/tesla_fleet/binary_sensor.py index 2469092513a..b92ef9233d1 100644 --- a/homeassistant/components/tesla_fleet/binary_sensor.py +++ b/homeassistant/components/tesla_fleet/binary_sensor.py @@ -165,6 +165,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetBinarySensorEntityDescription, ...] = ( ENERGY_LIVE_DESCRIPTIONS: tuple[BinarySensorEntityDescription, ...] = ( BinarySensorEntityDescription(key="backup_capable"), BinarySensorEntityDescription(key="grid_services_active"), + BinarySensorEntityDescription(key="storm_mode_active"), ) diff --git a/homeassistant/components/tesla_fleet/button.py b/homeassistant/components/tesla_fleet/button.py new file mode 100644 index 00000000000..aea0f91a97c --- /dev/null +++ b/homeassistant/components/tesla_fleet/button.py @@ -0,0 +1,96 @@ +"""Button platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from tesla_fleet_api.const import Scope + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .entity import TeslaFleetVehicleEntity +from .helpers import handle_vehicle_command +from .models import TeslaFleetVehicleData + +PARALLEL_UPDATES = 0 + + +async def do_nothing() -> dict[str, dict[str, bool]]: + """Do nothing with a positive result.""" + return {"response": {"result": True}} + + +@dataclass(frozen=True, kw_only=True) +class TeslaFleetButtonEntityDescription(ButtonEntityDescription): + """Describes a TeslaFleet Button entity.""" + + func: Callable[[TeslaFleetButtonEntity], Awaitable[Any]] + + +DESCRIPTIONS: tuple[TeslaFleetButtonEntityDescription, ...] = ( + TeslaFleetButtonEntityDescription( + key="wake", func=lambda self: do_nothing() + ), # Every button runs wakeup, so func does nothing + TeslaFleetButtonEntityDescription( + key="flash_lights", func=lambda self: self.api.flash_lights() + ), + TeslaFleetButtonEntityDescription( + key="honk", func=lambda self: self.api.honk_horn() + ), + TeslaFleetButtonEntityDescription( + key="enable_keyless_driving", func=lambda self: self.api.remote_start_drive() + ), + TeslaFleetButtonEntityDescription( + key="boombox", func=lambda self: self.api.remote_boombox(0) + ), + TeslaFleetButtonEntityDescription( + key="homelink", + func=lambda self: self.api.trigger_homelink( + lat=self.coordinator.data["drive_state_latitude"], + lon=self.coordinator.data["drive_state_longitude"], + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the TeslaFleet Button platform from a config entry.""" + + async_add_entities( + TeslaFleetButtonEntity(vehicle, description) + for vehicle in entry.runtime_data.vehicles + for description in DESCRIPTIONS + if Scope.VEHICLE_CMDS in entry.runtime_data.scopes + ) + + +class TeslaFleetButtonEntity(TeslaFleetVehicleEntity, ButtonEntity): + """Base class for TeslaFleet buttons.""" + + entity_description: TeslaFleetButtonEntityDescription + + def __init__( + self, + data: TeslaFleetVehicleData, + description: TeslaFleetButtonEntityDescription, + ) -> None: + """Initialize the button.""" + self.entity_description = description + super().__init__(data, description.key) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + + async def async_press(self) -> None: + """Press the button.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.entity_description.func(self)) diff --git a/homeassistant/components/tesla_fleet/climate.py b/homeassistant/components/tesla_fleet/climate.py new file mode 100644 index 00000000000..06e9c9d7c64 --- /dev/null +++ b/homeassistant/components/tesla_fleet/climate.py @@ -0,0 +1,329 @@ +"""Climate platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from itertools import chain +from typing import Any, cast + +from tesla_fleet_api.const import CabinOverheatProtectionTemp, Scope + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ( + ATTR_TEMPERATURE, + PRECISION_HALVES, + PRECISION_WHOLE, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .const import DOMAIN, TeslaFleetClimateSide +from .entity import TeslaFleetVehicleEntity +from .helpers import handle_vehicle_command +from .models import TeslaFleetVehicleData + +DEFAULT_MIN_TEMP = 15 +DEFAULT_MAX_TEMP = 28 + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Tesla Fleet Climate platform from a config entry.""" + + async_add_entities( + chain( + ( + TeslaFleetClimateEntity( + vehicle, TeslaFleetClimateSide.DRIVER, entry.runtime_data.scopes + ) + for vehicle in entry.runtime_data.vehicles + ), + ( + TeslaFleetCabinOverheatProtectionEntity( + vehicle, entry.runtime_data.scopes + ) + for vehicle in entry.runtime_data.vehicles + ), + ) + ) + + +class TeslaFleetClimateEntity(TeslaFleetVehicleEntity, ClimateEntity): + """Tesla Fleet vehicle climate entity.""" + + _attr_precision = PRECISION_HALVES + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_hvac_modes = [HVACMode.HEAT_COOL, HVACMode.OFF] + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + ) + _attr_preset_modes = ["off", "keep", "dog", "camp"] + + def __init__( + self, + data: TeslaFleetVehicleData, + side: TeslaFleetClimateSide, + scopes: Scope, + ) -> None: + """Initialize the climate.""" + + self.read_only = Scope.VEHICLE_CMDS not in scopes + + if self.read_only: + self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] + + super().__init__( + data, + side, + ) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + value = self.get("climate_state_is_climate_on") + if value is None: + self._attr_hvac_mode = None + elif value: + self._attr_hvac_mode = HVACMode.HEAT_COOL + else: + self._attr_hvac_mode = HVACMode.OFF + + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and self.read_only: + self._attr_hvac_modes = [self._attr_hvac_mode] + + self._attr_current_temperature = self.get("climate_state_inside_temp") + self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") + self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") + self._attr_min_temp = cast( + float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) + ) + self._attr_max_temp = cast( + float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) + ) + + async def async_turn_on(self) -> None: + """Set the climate state to on.""" + + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_start()) + + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + async def async_turn_off(self) -> None: + """Set the climate state to off.""" + + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_stop()) + + self._attr_hvac_mode = HVACMode.OFF + self._attr_preset_mode = self._attr_preset_modes[0] + self.async_write_ha_state() + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set the climate temperature.""" + + if ATTR_TEMPERATURE not in kwargs: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_temperature", + ) + + temp = kwargs[ATTR_TEMPERATURE] + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.set_temps( + driver_temp=temp, + passenger_temp=temp, + ) + ) + self._attr_target_temperature = temp + + if mode := kwargs.get(ATTR_HVAC_MODE): + # Set HVAC mode will call write_ha_state + await self.async_set_hvac_mode(mode) + else: + self.async_write_ha_state() + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + if hvac_mode not in self.hvac_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_hvac_mode", + translation_placeholders={"hvac_mode": hvac_mode}, + ) + if hvac_mode == HVACMode.OFF: + await self.async_turn_off() + else: + await self.async_turn_on() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set the climate preset mode.""" + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.set_climate_keeper_mode( + climate_keeper_mode=self._attr_preset_modes.index(preset_mode) + ) + ) + self._attr_preset_mode = preset_mode + if preset_mode != self._attr_preset_modes[0]: + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + +COP_MODES = { + "Off": HVACMode.OFF, + "On": HVACMode.COOL, + "FanOnly": HVACMode.FAN_ONLY, +} + +# String to celsius +COP_LEVELS = { + "Low": 30, + "Medium": 35, + "High": 40, +} + +# Celsius to IntEnum +TEMP_LEVELS = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} + + +class TeslaFleetCabinOverheatProtectionEntity(TeslaFleetVehicleEntity, ClimateEntity): + """Tesla Fleet vehicle cabin overheat protection entity.""" + + _attr_precision = PRECISION_WHOLE + _attr_target_temperature_step = 5 + _attr_min_temp = COP_LEVELS["Low"] + _attr_max_temp = COP_LEVELS["High"] + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_hvac_modes = list(COP_MODES.values()) + + _attr_entity_registry_enabled_default = False + + def __init__( + self, + data: TeslaFleetVehicleData, + scopes: Scope, + ) -> None: + """Initialize the cabin overheat climate entity.""" + + # Scopes + self.read_only = Scope.VEHICLE_CMDS not in scopes + + # Supported Features + if self.read_only: + self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] + else: + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + + super().__init__(data, "climate_state_cabin_overheat_protection") + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + + if (state := self.get("climate_state_cabin_overheat_protection")) is None: + self._attr_hvac_mode = None + else: + self._attr_hvac_mode = COP_MODES.get(state) + + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and self.read_only: + self._attr_hvac_modes = [self._attr_hvac_mode] + + if (level := self.get("climate_state_cop_activation_temperature")) is None: + self._attr_target_temperature = None + else: + self._attr_target_temperature = COP_LEVELS.get(level) + + self._attr_current_temperature = self.get("climate_state_inside_temp") + + @property + def supported_features(self) -> ClimateEntityFeature: + """Return the list of supported features.""" + if not self.read_only and self.get( + "vehicle_config_cop_user_set_temp_supported" + ): + return ( + self._attr_supported_features | ClimateEntityFeature.TARGET_TEMPERATURE + ) + return self._attr_supported_features + + async def async_turn_on(self) -> None: + """Set the climate state to on.""" + await self.async_set_hvac_mode(HVACMode.COOL) + + async def async_turn_off(self) -> None: + """Set the climate state to off.""" + await self.async_set_hvac_mode(HVACMode.OFF) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set the climate temperature.""" + + if ATTR_TEMPERATURE not in kwargs: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_temperature", + ) + + temp = kwargs[ATTR_TEMPERATURE] + if (cop_mode := TEMP_LEVELS.get(temp)) is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_cop_temp", + ) + + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) + self._attr_target_temperature = temp + + if mode := kwargs.get(ATTR_HVAC_MODE): + await self._async_set_cop(mode) + + self.async_write_ha_state() + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + await self.wake_up_if_asleep() + await self._async_set_cop(hvac_mode) + self.async_write_ha_state() + + async def _async_set_cop(self, hvac_mode: HVACMode) -> None: + if hvac_mode == HVACMode.OFF: + await handle_vehicle_command( + self.api.set_cabin_overheat_protection(on=False, fan_only=False) + ) + elif hvac_mode == HVACMode.COOL: + await handle_vehicle_command( + self.api.set_cabin_overheat_protection(on=True, fan_only=False) + ) + elif hvac_mode == HVACMode.FAN_ONLY: + await handle_vehicle_command( + self.api.set_cabin_overheat_protection(on=True, fan_only=True) + ) + + self._attr_hvac_mode = hvac_mode diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index 0ffdca1aec6..feeb5e74ca6 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -8,11 +8,10 @@ from typing import Any import jwt -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, LOGGER -from .oauth import TeslaSystemImplementation class OAuth2FlowHandler( @@ -21,7 +20,6 @@ class OAuth2FlowHandler( """Config flow to handle Tesla Fleet API OAuth2 authentication.""" DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -32,11 +30,6 @@ class OAuth2FlowHandler( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow start.""" - self.async_register_implementation( - self.hass, - TeslaSystemImplementation(self.hass), - ) - return await super().async_step_user() async def async_oauth_create_entry( @@ -50,32 +43,19 @@ class OAuth2FlowHandler( ) uid = token["sub"] - if not self.reauth_entry: - await self.async_set_unique_id(uid) - self._abort_if_unique_id_configured() - - return self.async_create_entry(title=uid, data=data) - - if self.reauth_entry.unique_id == uid: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data=data, + await self.async_set_unique_id(uid) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( - reason="reauth_account_mismatch", - description_placeholders={"title": self.reauth_entry.title}, - ) + self._abort_if_unique_id_configured() + return self.async_create_entry(title=uid, data=data) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -83,5 +63,8 @@ class OAuth2FlowHandler( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - return self.async_show_form(step_id="reauth_confirm") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"name": "Tesla Fleet"}, + ) return await self.async_step_user() diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index 081225c296c..9b3baf49bfb 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -21,6 +21,7 @@ SCOPES = [ Scope.OPENID, Scope.OFFLINE_ACCESS, Scope.VEHICLE_DEVICE_DATA, + Scope.VEHICLE_LOCATION, Scope.VEHICLE_CMDS, Scope.VEHICLE_CHARGING_CMDS, Scope.ENERGY_DEVICE_DATA, @@ -32,6 +33,8 @@ MODELS = { "3": "Model 3", "X": "Model X", "Y": "Model Y", + "C": "Cybertruck", + "T": "Tesla Semi", } @@ -41,3 +44,10 @@ class TeslaFleetState(StrEnum): ONLINE = "online" ASLEEP = "asleep" OFFLINE = "offline" + + +class TeslaFleetClimateSide(StrEnum): + """Tesla Fleet Climate Keeper Modes.""" + + DRIVER = "driver_temp" + PASSENGER = "passenger_temp" diff --git a/homeassistant/components/tesla_fleet/cover.py b/homeassistant/components/tesla_fleet/cover.py new file mode 100644 index 00000000000..f270734424f --- /dev/null +++ b/homeassistant/components/tesla_fleet/cover.py @@ -0,0 +1,247 @@ +"""Cover platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from typing import Any + +from tesla_fleet_api.const import Scope, SunRoofCommand, Trunk, WindowCommand + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .entity import TeslaFleetVehicleEntity +from .helpers import handle_vehicle_command +from .models import TeslaFleetVehicleData + +OPEN = 1 +CLOSED = 0 + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the TeslaFleet cover platform from a config entry.""" + + async_add_entities( + klass(vehicle, entry.runtime_data.scopes) + for (klass) in ( + TeslaFleetWindowEntity, + TeslaFleetChargePortEntity, + TeslaFleetFrontTrunkEntity, + TeslaFleetRearTrunkEntity, + TeslaFleetSunroofEntity, + ) + for vehicle in entry.runtime_data.vehicles + ) + + +class TeslaFleetWindowEntity(TeslaFleetVehicleEntity, CoverEntity): + """Cover entity for the windows.""" + + _attr_device_class = CoverDeviceClass.WINDOW + + def __init__(self, data: TeslaFleetVehicleData, scopes: list[Scope]) -> None: + """Initialize the cover.""" + super().__init__(data, "windows") + self.scoped = Scope.VEHICLE_CMDS in scopes + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + ) + if not self.scoped: + self._attr_supported_features = CoverEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the entity attributes.""" + fd = self.get("vehicle_state_fd_window") + fp = self.get("vehicle_state_fp_window") + rd = self.get("vehicle_state_rd_window") + rp = self.get("vehicle_state_rp_window") + + # Any open set to open + if OPEN in (fd, fp, rd, rp): + self._attr_is_closed = False + # All closed set to closed + elif CLOSED == fd == fp == rd == rp: + self._attr_is_closed = True + # Otherwise, set to unknown + else: + self._attr_is_closed = None + + async def async_open_cover(self, **kwargs: Any) -> None: + """Vent windows.""" + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.window_control(command=WindowCommand.VENT) + ) + self._attr_is_closed = False + self.async_write_ha_state() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close windows.""" + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.window_control(command=WindowCommand.CLOSE) + ) + self._attr_is_closed = True + self.async_write_ha_state() + + +class TeslaFleetChargePortEntity(TeslaFleetVehicleEntity, CoverEntity): + """Cover entity for the charge port.""" + + _attr_device_class = CoverDeviceClass.DOOR + + def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: + """Initialize the cover.""" + super().__init__(vehicle, "charge_state_charge_port_door_open") + self.scoped = any( + scope in scopes + for scope in (Scope.VEHICLE_CMDS, Scope.VEHICLE_CHARGING_CMDS) + ) + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + ) + if not self.scoped: + self._attr_supported_features = CoverEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the entity attributes.""" + self._attr_is_closed = not self._value + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open charge port.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.charge_port_door_open()) + self._attr_is_closed = False + self.async_write_ha_state() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close charge port.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.charge_port_door_close()) + self._attr_is_closed = True + self.async_write_ha_state() + + +class TeslaFleetFrontTrunkEntity(TeslaFleetVehicleEntity, CoverEntity): + """Cover entity for the front trunk.""" + + _attr_device_class = CoverDeviceClass.DOOR + + def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: + """Initialize the cover.""" + super().__init__(vehicle, "vehicle_state_ft") + + self.scoped = Scope.VEHICLE_CMDS in scopes + self._attr_supported_features = CoverEntityFeature.OPEN + if not self.scoped: + self._attr_supported_features = CoverEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the entity attributes.""" + self._attr_is_closed = self._value == CLOSED + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open front trunk.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.actuate_trunk(Trunk.FRONT)) + self._attr_is_closed = False + self.async_write_ha_state() + + +class TeslaFleetRearTrunkEntity(TeslaFleetVehicleEntity, CoverEntity): + """Cover entity for the rear trunk.""" + + _attr_device_class = CoverDeviceClass.DOOR + + def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: + """Initialize the cover.""" + super().__init__(vehicle, "vehicle_state_rt") + + self.scoped = Scope.VEHICLE_CMDS in scopes + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + ) + if not self.scoped: + self._attr_supported_features = CoverEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the entity attributes.""" + self._attr_is_closed = self._value == CLOSED + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open rear trunk.""" + if self.is_closed is not False: + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.actuate_trunk(Trunk.REAR)) + self._attr_is_closed = False + self.async_write_ha_state() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close rear trunk.""" + if self.is_closed is not True: + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.actuate_trunk(Trunk.REAR)) + self._attr_is_closed = True + self.async_write_ha_state() + + +class TeslaFleetSunroofEntity(TeslaFleetVehicleEntity, CoverEntity): + """Cover entity for the sunroof.""" + + _attr_device_class = CoverDeviceClass.WINDOW + _attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP + ) + _attr_entity_registry_enabled_default = False + + def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: + """Initialize the sensor.""" + super().__init__(vehicle, "vehicle_state_sun_roof_state") + + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = CoverEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the entity attributes.""" + value = self._value + if value in (None, "unknown"): + self._attr_is_closed = None + else: + self._attr_is_closed = value == "closed" + + self._attr_current_cover_position = self.get( + "vehicle_state_sun_roof_percent_open" + ) + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open sunroof.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.sun_roof_control(SunRoofCommand.VENT)) + self._attr_is_closed = False + self.async_write_ha_state() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close sunroof.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.sun_roof_control(SunRoofCommand.CLOSE)) + self._attr_is_closed = True + self.async_write_ha_state() + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Close sunroof.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.sun_roof_control(SunRoofCommand.STOP)) + self._attr_is_closed = False + self.async_write_ha_state() diff --git a/homeassistant/components/tesla_fleet/device_tracker.py b/homeassistant/components/tesla_fleet/device_tracker.py index 1d396286d7c..d6dcef895a6 100644 --- a/homeassistant/components/tesla_fleet/device_tracker.py +++ b/homeassistant/components/tesla_fleet/device_tracker.py @@ -2,9 +2,9 @@ from __future__ import annotations -from homeassistant.components.device_tracker import SourceType from homeassistant.components.device_tracker.config_entry import TrackerEntity from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_HOME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity @@ -33,9 +33,6 @@ class TeslaFleetDeviceTrackerEntity( ): """Base class for Tesla Fleet device tracker entities.""" - _attr_latitude: float | None = None - _attr_longitude: float | None = None - def __init__( self, vehicle: TeslaFleetVehicleData, @@ -54,21 +51,6 @@ class TeslaFleetDeviceTrackerEntity( self._attr_latitude = state.attributes.get("latitude") self._attr_longitude = state.attributes.get("longitude") - @property - def latitude(self) -> float | None: - """Return latitude value of the device.""" - return self._attr_latitude - - @property - def longitude(self) -> float | None: - """Return longitude value of the device.""" - return self._attr_longitude - - @property - def source_type(self) -> SourceType | str: - """Return the source type of the device tracker.""" - return SourceType.GPS - class TeslaFleetDeviceTrackerLocationEntity(TeslaFleetDeviceTrackerEntity): """Vehicle Location device tracker Class.""" @@ -103,4 +85,7 @@ class TeslaFleetDeviceTrackerRouteEntity(TeslaFleetDeviceTrackerEntity): @property def location_name(self) -> str | None: """Return a location name for the current location of the device.""" - return self.get("drive_state_active_route_destination") + location = self.get("drive_state_active_route_destination") + if location == "Home": + return STATE_HOME + return location diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index c853bb798b5..0ee41b5e322 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -4,7 +4,9 @@ from abc import abstractmethod from typing import Any from tesla_fleet_api import EnergySpecific, VehicleSpecific +from tesla_fleet_api.const import Scope +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -14,6 +16,7 @@ from .coordinator import ( TeslaFleetEnergySiteLiveCoordinator, TeslaFleetVehicleDataCoordinator, ) +from .helpers import wake_up_vehicle from .models import TeslaFleetEnergyData, TeslaFleetVehicleData @@ -27,6 +30,8 @@ class TeslaFleetEntity( """Parent class for all TeslaFleet entities.""" _attr_has_entity_name = True + read_only: bool + scoped: bool def __init__( self, @@ -57,6 +62,12 @@ class TeslaFleetEntity( """Return a specific value from coordinator data.""" return self.coordinator.data.get(key, default) + def get_number(self, key: str, default: float) -> float: + """Return a specific number from coordinator data.""" + if isinstance(value := self.coordinator.data.get(key), (int, float)): + return value + return default + @property def is_none(self) -> bool: """Return if the value is a literal None.""" @@ -76,6 +87,14 @@ class TeslaFleetEntity( def _async_update_attrs(self) -> None: """Update the attributes of the entity.""" + def raise_for_read_only(self, scope: Scope) -> None: + """Raise an error if a scope is not available.""" + if not self.scoped: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key=f"missing_scope_{scope.name.lower()}", + ) + class TeslaFleetVehicleEntity(TeslaFleetEntity): """Parent class for TeslaFleet Vehicle entities.""" @@ -100,6 +119,10 @@ class TeslaFleetVehicleEntity(TeslaFleetEntity): """Return a specific value from coordinator data.""" return self.coordinator.data.get(self.key) + async def wake_up_if_asleep(self) -> None: + """Wake up the vehicle if its asleep.""" + await wake_up_vehicle(self.vehicle) + class TeslaFleetEnergyLiveEntity(TeslaFleetEntity): """Parent class for TeslaFleet Energy Site Live entities.""" diff --git a/homeassistant/components/tesla_fleet/helpers.py b/homeassistant/components/tesla_fleet/helpers.py new file mode 100644 index 00000000000..d554ccce70c --- /dev/null +++ b/homeassistant/components/tesla_fleet/helpers.py @@ -0,0 +1,80 @@ +"""Tesla Fleet helper functions.""" + +import asyncio +from collections.abc import Awaitable +from typing import Any + +from tesla_fleet_api.exceptions import TeslaFleetError + +from homeassistant.exceptions import HomeAssistantError + +from .const import DOMAIN, LOGGER, TeslaFleetState +from .models import TeslaFleetVehicleData + + +async def wake_up_vehicle(vehicle: TeslaFleetVehicleData) -> None: + """Wake up a vehicle.""" + async with vehicle.wakelock: + times = 0 + while vehicle.coordinator.data["state"] != TeslaFleetState.ONLINE: + try: + if times == 0: + cmd = await vehicle.api.wake_up() + else: + cmd = await vehicle.api.vehicle() + state = cmd["response"]["state"] + except TeslaFleetError as e: + raise HomeAssistantError(str(e)) from e + vehicle.coordinator.data["state"] = state + if state != TeslaFleetState.ONLINE: + times += 1 + if times >= 4: # Give up after 30 seconds total + raise HomeAssistantError("Could not wake up vehicle") + await asyncio.sleep(times * 5) + + +async def handle_command(command: Awaitable) -> dict[str, Any]: + """Handle a command.""" + try: + result = await command + except TeslaFleetError as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_failed", + translation_placeholders={"message": e.message}, + ) from e + LOGGER.debug("Command result: %s", result) + return result + + +async def handle_vehicle_command(command: Awaitable) -> bool: + """Handle a vehicle command.""" + result = await handle_command(command) + if (response := result.get("response")) is None: + if error := result.get("error"): + # No response with error + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={"error": error}, + ) + # No response without error (unexpected) + raise HomeAssistantError(f"Unknown response: {response}") + if (result := response.get("result")) is not True: + if reason := response.get("reason"): + if reason in ("already_set", "not_charging", "requested"): + # Reason is acceptable + return result + # Result of false with reason + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_reason", + translation_placeholders={"reason": reason}, + ) + # Result of false without reason (unexpected) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_no_reason", + ) + # Response with result of true + return result diff --git a/homeassistant/components/tesla_fleet/icons.json b/homeassistant/components/tesla_fleet/icons.json index 2dbde45ee08..449dda93c62 100644 --- a/homeassistant/components/tesla_fleet/icons.json +++ b/homeassistant/components/tesla_fleet/icons.json @@ -7,6 +7,12 @@ "on": "mdi:hvac" } }, + "storm_mode_active": { + "default": "mdi:weather-sunny", + "state": { + "on": "mdi:weather-lightning-rainy" + } + }, "vehicle_state_is_user_present": { "state": { "off": "mdi:account-remove-outline", @@ -38,6 +44,45 @@ } } }, + "button": { + "boombox": { + "default": "mdi:volume-high" + }, + "enable_keyless_driving": { + "default": "mdi:car-key" + }, + "flash_lights": { + "default": "mdi:flashlight" + }, + "homelink": { + "default": "mdi:garage" + }, + "honk": { + "default": "mdi:bullhorn" + }, + "wake": { + "default": "mdi:sleep-off" + } + }, + "climate": { + "driver_temp": { + "state_attributes": { + "preset_mode": { + "state": { + "off": "mdi:power", + "keep": "mdi:fan", + "dog": "mdi:dog", + "camp": "mdi:tent" + } + } + } + } + }, + "cover": { + "charge_state_charge_port_door_open": { + "default": "mdi:ev-plug-ccs2" + } + }, "device_tracker": { "location": { "default": "mdi:map-marker" @@ -46,6 +91,77 @@ "default": "mdi:routes" } }, + "lock": { + "charge_state_charge_port_latch": { + "default": "mdi:ev-plug-tesla" + }, + "vehicle_state_locked": { + "state": { + "locked": "mdi:car-door-lock", + "unlocked": "mdi:car-door-lock-open" + } + } + }, + "select": { + "climate_state_seat_heater_left": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_seat_heater_rear_center": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_seat_heater_rear_left": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_seat_heater_rear_right": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_seat_heater_right": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_seat_heater_third_row_left": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_seat_heater_third_row_right": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "components_customer_preferred_export_rule": { + "default": "mdi:transmission-tower", + "state": { + "battery_ok": "mdi:battery-negative", + "never": "mdi:transmission-tower-off", + "pv_only": "mdi:solar-panel" + } + }, + "default_real_mode": { + "default": "mdi:home-battery", + "state": { + "autonomous": "mdi:auto-fix", + "backup": "mdi:battery-charging-100", + "self_consumption": "mdi:home-battery" + } + } + }, "sensor": { "battery_power": { "default": "mdi:home-battery" @@ -106,6 +222,51 @@ }, "wall_connector_state": { "default": "mdi:ev-station" + }, + "island_status": { + "default": "mdi:help-circle", + "state": { + "on_grid": "mdi:transmission-tower", + "off_grid": "mdi:transmission-tower-off", + "off_grid_unintentional": "mdi:transmission-tower-off", + "island_status_unknown": "mdi:help-circle", + "off_grid_intentional": "mdi:account-cancel" + } + } + }, + "switch": { + "charge_state_user_charge_enable_request": { + "default": "mdi:ev-station" + }, + "climate_state_auto_seat_climate_left": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_auto_seat_climate_right": { + "default": "mdi:car-seat-heater", + "state": { + "off": "mdi:car-seat" + } + }, + "climate_state_auto_steering_wheel_heat": { + "default": "mdi:steering" + }, + "climate_state_defrost_mode": { + "default": "mdi:snowflake-melt" + }, + "components_disallow_charge_from_grid_with_solar_installed": { + "state": { + "false": "mdi:transmission-tower", + "true": "mdi:solar-power" + } + }, + "vehicle_state_sentry_mode": { + "default": "mdi:shield-car" + }, + "vehicle_state_valet_mode": { + "default": "mdi:speedometer-slow" } } } diff --git a/homeassistant/components/tesla_fleet/lock.py b/homeassistant/components/tesla_fleet/lock.py new file mode 100644 index 00000000000..32998d409be --- /dev/null +++ b/homeassistant/components/tesla_fleet/lock.py @@ -0,0 +1,103 @@ +"""Lock platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from typing import Any + +from tesla_fleet_api.const import Scope + +from homeassistant.components.lock import LockEntity +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .const import DOMAIN +from .entity import TeslaFleetVehicleEntity +from .helpers import handle_vehicle_command +from .models import TeslaFleetVehicleData + +ENGAGED = "Engaged" + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the TeslaFleet lock platform from a config entry.""" + + async_add_entities( + klass(vehicle, Scope.VEHICLE_CMDS in entry.runtime_data.scopes) + for klass in ( + TeslaFleetVehicleLockEntity, + TeslaFleetCableLockEntity, + ) + for vehicle in entry.runtime_data.vehicles + ) + + +class TeslaFleetVehicleLockEntity(TeslaFleetVehicleEntity, LockEntity): + """Lock entity for TeslaFleet.""" + + def __init__(self, data: TeslaFleetVehicleData, scoped: bool) -> None: + """Initialize the lock.""" + super().__init__(data, "vehicle_state_locked") + self.scoped = scoped + + def _async_update_attrs(self) -> None: + """Update entity attributes.""" + self._attr_is_locked = self._value + + async def async_lock(self, **kwargs: Any) -> None: + """Lock the doors.""" + self.raise_for_read_only(Scope.VEHICLE_CMDS) + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.door_lock()) + self._attr_is_locked = True + self.async_write_ha_state() + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock the doors.""" + self.raise_for_read_only(Scope.VEHICLE_CMDS) + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.door_unlock()) + self._attr_is_locked = False + self.async_write_ha_state() + + +class TeslaFleetCableLockEntity(TeslaFleetVehicleEntity, LockEntity): + """Cable Lock entity for TeslaFleet.""" + + def __init__( + self, + data: TeslaFleetVehicleData, + scoped: bool, + ) -> None: + """Initialize the lock.""" + super().__init__(data, "charge_state_charge_port_latch") + self.scoped = scoped + + def _async_update_attrs(self) -> None: + """Update entity attributes.""" + if self._value is None: + self._attr_is_locked = None + self._attr_is_locked = self._value == ENGAGED + + async def async_lock(self, **kwargs: Any) -> None: + """Charge cable Lock cannot be manually locked.""" + raise ServiceValidationError( + "Insert cable to lock", + translation_domain=DOMAIN, + translation_key="no_cable", + ) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock charge cable lock.""" + self.raise_for_read_only(Scope.VEHICLE_CMDS) + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.charge_port_door_open()) + self._attr_is_locked = False + self.async_write_ha_state() diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 29966b3b49c..95062a8f856 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,6 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "quality_scale": "gold", - "requirements": ["tesla-fleet-api==0.7.3"] + "requirements": ["tesla-fleet-api==0.8.5"] } diff --git a/homeassistant/components/tesla_fleet/media_player.py b/homeassistant/components/tesla_fleet/media_player.py new file mode 100644 index 00000000000..455c990077d --- /dev/null +++ b/homeassistant/components/tesla_fleet/media_player.py @@ -0,0 +1,149 @@ +"""Media player platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from tesla_fleet_api.const import Scope + +from homeassistant.components.media_player import ( + MediaPlayerDeviceClass, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .entity import TeslaFleetVehicleEntity +from .helpers import handle_vehicle_command +from .models import TeslaFleetVehicleData + +STATES = { + "Playing": MediaPlayerState.PLAYING, + "Paused": MediaPlayerState.PAUSED, + "Stopped": MediaPlayerState.IDLE, + "Off": MediaPlayerState.OFF, +} +VOLUME_MAX = 11.0 +VOLUME_STEP = 1.0 / 3 + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Tesla Fleet Media platform from a config entry.""" + + async_add_entities( + TeslaFleetMediaEntity(vehicle, Scope.VEHICLE_CMDS in entry.runtime_data.scopes) + for vehicle in entry.runtime_data.vehicles + ) + + +class TeslaFleetMediaEntity(TeslaFleetVehicleEntity, MediaPlayerEntity): + """Vehicle media player class.""" + + _attr_device_class = MediaPlayerDeviceClass.SPEAKER + _attr_supported_features = ( + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.VOLUME_SET + ) + _volume_max: float = VOLUME_MAX + + def __init__( + self, + data: TeslaFleetVehicleData, + scoped: bool, + ) -> None: + """Initialize the media player entity.""" + super().__init__(data, "media") + self.scoped = scoped + if not scoped: + self._attr_supported_features = MediaPlayerEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update entity attributes.""" + self._volume_max = ( + self.get("vehicle_state_media_info_audio_volume_max") or VOLUME_MAX + ) + self._attr_state = STATES.get( + self.get("vehicle_state_media_info_media_playback_status") or "Off", + ) + self._attr_volume_step = ( + 1.0 + / self._volume_max + / ( + self.get("vehicle_state_media_info_audio_volume_increment") + or VOLUME_STEP + ) + ) + + if volume := self.get("vehicle_state_media_info_audio_volume"): + self._attr_volume_level = volume / self._volume_max + else: + self._attr_volume_level = None + + if duration := self.get("vehicle_state_media_info_now_playing_duration"): + self._attr_media_duration = duration / 1000 + else: + self._attr_media_duration = None + + if duration and ( + position := self.get("vehicle_state_media_info_now_playing_elapsed") + ): + self._attr_media_position = position / 1000 + else: + self._attr_media_position = None + + self._attr_media_title = self.get("vehicle_state_media_info_now_playing_title") + self._attr_media_artist = self.get( + "vehicle_state_media_info_now_playing_artist" + ) + self._attr_media_album_name = self.get( + "vehicle_state_media_info_now_playing_album" + ) + self._attr_media_playlist = self.get( + "vehicle_state_media_info_now_playing_station" + ) + self._attr_source = self.get("vehicle_state_media_info_now_playing_source") + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level, range 0..1.""" + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.adjust_volume(int(volume * self._volume_max)) + ) + self._attr_volume_level = volume + self.async_write_ha_state() + + async def async_media_play(self) -> None: + """Send play command.""" + if self.state != MediaPlayerState.PLAYING: + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.media_toggle_playback()) + self._attr_state = MediaPlayerState.PLAYING + self.async_write_ha_state() + + async def async_media_pause(self) -> None: + """Send pause command.""" + if self.state == MediaPlayerState.PLAYING: + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.media_toggle_playback()) + self._attr_state = MediaPlayerState.PAUSED + self.async_write_ha_state() + + async def async_media_next_track(self) -> None: + """Send next track command.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.media_next_track()) + + async def async_media_previous_track(self) -> None: + """Send previous track command.""" + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.media_prev_track()) diff --git a/homeassistant/components/tesla_fleet/models.py b/homeassistant/components/tesla_fleet/models.py index 1b1f5f083cd..ae945dd96bf 100644 --- a/homeassistant/components/tesla_fleet/models.py +++ b/homeassistant/components/tesla_fleet/models.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from dataclasses import dataclass from tesla_fleet_api import EnergySpecific, VehicleSpecific @@ -33,6 +34,8 @@ class TeslaFleetVehicleData: coordinator: TeslaFleetVehicleDataCoordinator vin: str device: DeviceInfo + signing: bool + wakelock = asyncio.Lock() @dataclass diff --git a/homeassistant/components/tesla_fleet/number.py b/homeassistant/components/tesla_fleet/number.py new file mode 100644 index 00000000000..b806b4dbc77 --- /dev/null +++ b/homeassistant/components/tesla_fleet/number.py @@ -0,0 +1,206 @@ +"""Number platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from itertools import chain +from typing import Any + +from tesla_fleet_api import EnergySpecific, VehicleSpecific +from tesla_fleet_api.const import Scope + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import PERCENTAGE, PRECISION_WHOLE, UnitOfElectricCurrent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.icon import icon_for_battery_level + +from . import TeslaFleetConfigEntry +from .entity import TeslaFleetEnergyInfoEntity, TeslaFleetVehicleEntity +from .helpers import handle_command, handle_vehicle_command +from .models import TeslaFleetEnergyData, TeslaFleetVehicleData + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class TeslaFleetNumberVehicleEntityDescription(NumberEntityDescription): + """Describes TeslaFleet Number entity.""" + + func: Callable[[VehicleSpecific, float], Awaitable[Any]] + native_min_value: float + native_max_value: float + min_key: str | None = None + max_key: str + scopes: list[Scope] + + +VEHICLE_DESCRIPTIONS: tuple[TeslaFleetNumberVehicleEntityDescription, ...] = ( + TeslaFleetNumberVehicleEntityDescription( + key="charge_state_charge_current_request", + native_step=PRECISION_WHOLE, + native_min_value=0, + native_max_value=32, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=NumberDeviceClass.CURRENT, + mode=NumberMode.AUTO, + max_key="charge_state_charge_current_request_max", + func=lambda api, value: api.set_charging_amps(value), + scopes=[Scope.VEHICLE_CHARGING_CMDS], + ), + TeslaFleetNumberVehicleEntityDescription( + key="charge_state_charge_limit_soc", + native_step=PRECISION_WHOLE, + native_min_value=50, + native_max_value=100, + native_unit_of_measurement=PERCENTAGE, + device_class=NumberDeviceClass.BATTERY, + mode=NumberMode.AUTO, + min_key="charge_state_charge_limit_soc_min", + max_key="charge_state_charge_limit_soc_max", + func=lambda api, value: api.set_charge_limit(value), + scopes=[Scope.VEHICLE_CHARGING_CMDS, Scope.VEHICLE_CMDS], + ), +) + + +@dataclass(frozen=True, kw_only=True) +class TeslaFleetNumberBatteryEntityDescription(NumberEntityDescription): + """Describes TeslaFleet Number entity.""" + + func: Callable[[EnergySpecific, float], Awaitable[Any]] + requires: str | None = None + + +ENERGY_INFO_DESCRIPTIONS: tuple[TeslaFleetNumberBatteryEntityDescription, ...] = ( + TeslaFleetNumberBatteryEntityDescription( + key="backup_reserve_percent", + func=lambda api, value: api.backup(int(value)), + requires="components_battery", + ), + TeslaFleetNumberBatteryEntityDescription( + key="off_grid_vehicle_charging_reserve_percent", + func=lambda api, value: api.off_grid_vehicle_charging_reserve(int(value)), + requires="components_off_grid_vehicle_charging_reserve_supported", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the TeslaFleet number platform from a config entry.""" + + async_add_entities( + chain( + ( # Add vehicle entities + TeslaFleetVehicleNumberEntity( + vehicle, + description, + entry.runtime_data.scopes, + ) + for vehicle in entry.runtime_data.vehicles + for description in VEHICLE_DESCRIPTIONS + ), + ( # Add energy site entities + TeslaFleetEnergyInfoNumberSensorEntity( + energysite, + description, + entry.runtime_data.scopes, + ) + for energysite in entry.runtime_data.energysites + for description in ENERGY_INFO_DESCRIPTIONS + if description.requires is None + or energysite.info_coordinator.data.get(description.requires) + ), + ) + ) + + +class TeslaFleetVehicleNumberEntity(TeslaFleetVehicleEntity, NumberEntity): + """Vehicle number entity base class.""" + + entity_description: TeslaFleetNumberVehicleEntityDescription + + def __init__( + self, + data: TeslaFleetVehicleData, + description: TeslaFleetNumberVehicleEntityDescription, + scopes: list[Scope], + ) -> None: + """Initialize the number entity.""" + self.scoped = any(scope in scopes for scope in description.scopes) + self.entity_description = description + super().__init__( + data, + description.key, + ) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + self._attr_native_value = self._value + + if (min_key := self.entity_description.min_key) is not None: + self._attr_native_min_value = self.get_number( + min_key, + self.entity_description.native_min_value, + ) + else: + self._attr_native_min_value = self.entity_description.native_min_value + + self._attr_native_max_value = self.get_number( + self.entity_description.max_key, + self.entity_description.native_max_value, + ) + + async def async_set_native_value(self, value: float) -> None: + """Set new value.""" + value = int(value) + self.raise_for_read_only(self.entity_description.scopes[0]) + await self.wake_up_if_asleep() + await handle_vehicle_command(self.entity_description.func(self.api, value)) + self._attr_native_value = value + self.async_write_ha_state() + + +class TeslaFleetEnergyInfoNumberSensorEntity(TeslaFleetEnergyInfoEntity, NumberEntity): + """Energy info number entity base class.""" + + entity_description: TeslaFleetNumberBatteryEntityDescription + _attr_native_step = PRECISION_WHOLE + _attr_native_min_value = 0 + _attr_native_max_value = 100 + _attr_device_class = NumberDeviceClass.BATTERY + _attr_native_unit_of_measurement = PERCENTAGE + + def __init__( + self, + data: TeslaFleetEnergyData, + description: TeslaFleetNumberBatteryEntityDescription, + scopes: list[Scope], + ) -> None: + """Initialize the number entity.""" + self.scoped = Scope.ENERGY_CMDS in scopes + self.entity_description = description + super().__init__(data, description.key) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + self._attr_native_value = self._value + self._attr_icon = icon_for_battery_level(self.native_value) + + async def async_set_native_value(self, value: float) -> None: + """Set new value.""" + value = int(value) + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command(self.entity_description.func(self.api, value)) + self._attr_native_value = value + self.async_write_ha_state() diff --git a/homeassistant/components/tesla_fleet/oauth.py b/homeassistant/components/tesla_fleet/oauth.py index 00976abf56f..b25c5216009 100644 --- a/homeassistant/components/tesla_fleet/oauth.py +++ b/homeassistant/components/tesla_fleet/oauth.py @@ -1,8 +1,5 @@ """Provide oauth implementations for the Tesla Fleet integration.""" -import base64 -import hashlib -import secrets from typing import Any from homeassistant.components.application_credentials import ( @@ -11,58 +8,8 @@ from homeassistant.components.application_credentials import ( ClientCredential, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow -from .const import AUTHORIZE_URL, CLIENT_ID, DOMAIN, SCOPES, TOKEN_URL - - -class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementation): - """Tesla Fleet API open source Oauth2 implementation.""" - - code_verifier: str - code_challenge: str - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize open source Oauth2 implementation.""" - - # Setup PKCE - self.code_verifier = secrets.token_urlsafe(32) - hashed_verifier = hashlib.sha256(self.code_verifier.encode()).digest() - self.code_challenge = ( - base64.urlsafe_b64encode(hashed_verifier).decode().replace("=", "") - ) - super().__init__( - hass, - DOMAIN, - CLIENT_ID, - "", - AUTHORIZE_URL, - TOKEN_URL, - ) - - @property - def name(self) -> str: - """Name of the implementation.""" - return "Built-in open source client ID" - - @property - def extra_authorize_data(self) -> dict[str, Any]: - """Extra data that needs to be appended to the authorize url.""" - return { - "scope": " ".join(SCOPES), - "code_challenge": self.code_challenge, # PKCE - } - - async def async_resolve_external_data(self, external_data: Any) -> dict: - """Resolve the authorization code to tokens.""" - return await self._token_request( - { - "grant_type": "authorization_code", - "code": external_data["code"], - "redirect_uri": external_data["state"]["redirect_uri"], - "code_verifier": self.code_verifier, # PKCE - } - ) +from .const import AUTHORIZE_URL, SCOPES, TOKEN_URL class TeslaUserImplementation(AuthImplementation): @@ -83,4 +30,4 @@ class TeslaUserImplementation(AuthImplementation): @property def extra_authorize_data(self) -> dict[str, Any]: """Extra data that needs to be appended to the authorize url.""" - return {"scope": " ".join(SCOPES)} + return {"prompt": "login", "scope": " ".join(SCOPES)} diff --git a/homeassistant/components/tesla_fleet/select.py b/homeassistant/components/tesla_fleet/select.py new file mode 100644 index 00000000000..515a0e7c2e7 --- /dev/null +++ b/homeassistant/components/tesla_fleet/select.py @@ -0,0 +1,264 @@ +"""Select platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from itertools import chain + +from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode, Scope, Seat + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .entity import TeslaFleetEnergyInfoEntity, TeslaFleetVehicleEntity +from .helpers import handle_command, handle_vehicle_command +from .models import TeslaFleetEnergyData, TeslaFleetVehicleData + +OFF = "off" +LOW = "low" +MEDIUM = "medium" +HIGH = "high" + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class SeatHeaterDescription(SelectEntityDescription): + """Seat Heater entity description.""" + + position: Seat + available_fn: Callable[[TeslaFleetSeatHeaterSelectEntity], bool] = lambda _: True + + +SEAT_HEATER_DESCRIPTIONS: tuple[SeatHeaterDescription, ...] = ( + SeatHeaterDescription( + key="climate_state_seat_heater_left", + position=Seat.FRONT_LEFT, + ), + SeatHeaterDescription( + key="climate_state_seat_heater_right", + position=Seat.FRONT_RIGHT, + ), + SeatHeaterDescription( + key="climate_state_seat_heater_rear_left", + position=Seat.REAR_LEFT, + available_fn=lambda self: self.get("vehicle_config_rear_seat_heaters") != 0, + entity_registry_enabled_default=False, + ), + SeatHeaterDescription( + key="climate_state_seat_heater_rear_center", + position=Seat.REAR_CENTER, + available_fn=lambda self: self.get("vehicle_config_rear_seat_heaters") != 0, + entity_registry_enabled_default=False, + ), + SeatHeaterDescription( + key="climate_state_seat_heater_rear_right", + position=Seat.REAR_RIGHT, + available_fn=lambda self: self.get("vehicle_config_rear_seat_heaters") != 0, + entity_registry_enabled_default=False, + ), + SeatHeaterDescription( + key="climate_state_seat_heater_third_row_left", + position=Seat.THIRD_LEFT, + available_fn=lambda self: self.get("vehicle_config_third_row_seats") != "None", + entity_registry_enabled_default=False, + ), + SeatHeaterDescription( + key="climate_state_seat_heater_third_row_right", + position=Seat.THIRD_RIGHT, + available_fn=lambda self: self.get("vehicle_config_third_row_seats") != "None", + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the TeslaFleet select platform from a config entry.""" + + async_add_entities( + chain( + ( + TeslaFleetSeatHeaterSelectEntity( + vehicle, description, entry.runtime_data.scopes + ) + for description in SEAT_HEATER_DESCRIPTIONS + for vehicle in entry.runtime_data.vehicles + ), + ( + TeslaFleetWheelHeaterSelectEntity(vehicle, entry.runtime_data.scopes) + for vehicle in entry.runtime_data.vehicles + ), + ( + TeslaFleetOperationSelectEntity(energysite, entry.runtime_data.scopes) + for energysite in entry.runtime_data.energysites + if energysite.info_coordinator.data.get("components_battery") + ), + ( + TeslaFleetExportRuleSelectEntity(energysite, entry.runtime_data.scopes) + for energysite in entry.runtime_data.energysites + if energysite.info_coordinator.data.get("components_battery") + and energysite.info_coordinator.data.get("components_solar") + ), + ) + ) + + +class TeslaFleetSeatHeaterSelectEntity(TeslaFleetVehicleEntity, SelectEntity): + """Select entity for vehicle seat heater.""" + + entity_description: SeatHeaterDescription + + _attr_options = [ + OFF, + LOW, + MEDIUM, + HIGH, + ] + + def __init__( + self, + data: TeslaFleetVehicleData, + description: SeatHeaterDescription, + scopes: list[Scope], + ) -> None: + """Initialize the vehicle seat select entity.""" + self.entity_description = description + self.scoped = Scope.VEHICLE_CMDS in scopes + super().__init__(data, description.key) + + def _async_update_attrs(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_available = self.entity_description.available_fn(self) + value = self._value + if value is None: + self._attr_current_option = None + else: + self._attr_current_option = self._attr_options[value] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + self.raise_for_read_only(Scope.VEHICLE_CMDS) + await self.wake_up_if_asleep() + level = self._attr_options.index(option) + # AC must be on to turn on seat heater + if level and not self.get("climate_state_is_climate_on"): + await handle_vehicle_command(self.api.auto_conditioning_start()) + await handle_vehicle_command( + self.api.remote_seat_heater_request(self.entity_description.position, level) + ) + self._attr_current_option = option + self.async_write_ha_state() + + +class TeslaFleetWheelHeaterSelectEntity(TeslaFleetVehicleEntity, SelectEntity): + """Select entity for vehicle steering wheel heater.""" + + _attr_options = [ + OFF, + LOW, + HIGH, + ] + + def __init__( + self, + data: TeslaFleetVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the vehicle steering wheel select entity.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + super().__init__( + data, + "climate_state_steering_wheel_heat_level", + ) + + def _async_update_attrs(self) -> None: + """Handle updated data from the coordinator.""" + + value = self._value + if value is None: + self._attr_current_option = None + else: + self._attr_current_option = self._attr_options[value] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + self.raise_for_read_only(Scope.VEHICLE_CMDS) + await self.wake_up_if_asleep() + level = self._attr_options.index(option) + # AC must be on to turn on steering wheel heater + if level and not self.get("climate_state_is_climate_on"): + await handle_vehicle_command(self.api.auto_conditioning_start()) + await handle_vehicle_command( + self.api.remote_steering_wheel_heat_level_request(level) + ) + self._attr_current_option = option + self.async_write_ha_state() + + +class TeslaFleetOperationSelectEntity(TeslaFleetEnergyInfoEntity, SelectEntity): + """Select entity for operation mode select entities.""" + + _attr_options: list[str] = [ + EnergyOperationMode.AUTONOMOUS, + EnergyOperationMode.BACKUP, + EnergyOperationMode.SELF_CONSUMPTION, + ] + + def __init__( + self, + data: TeslaFleetEnergyData, + scopes: list[Scope], + ) -> None: + """Initialize the operation mode select entity.""" + self.scoped = Scope.ENERGY_CMDS in scopes + super().__init__(data, "default_real_mode") + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + self._attr_current_option = self._value + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command(self.api.operation(option)) + self._attr_current_option = option + self.async_write_ha_state() + + +class TeslaFleetExportRuleSelectEntity(TeslaFleetEnergyInfoEntity, SelectEntity): + """Select entity for export rules select entities.""" + + _attr_options: list[str] = [ + EnergyExportMode.NEVER, + EnergyExportMode.BATTERY_OK, + EnergyExportMode.PV_ONLY, + ] + + def __init__( + self, + data: TeslaFleetEnergyData, + scopes: list[Scope], + ) -> None: + """Initialize the export rules select entity.""" + self.scoped = Scope.ENERGY_CMDS in scopes + super().__init__(data, "components_customer_preferred_export_rule") + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + self._attr_current_option = self.get(self.key, EnergyExportMode.NEVER.value) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command( + self.api.grid_import_export(customer_preferred_export_rule=option) + ) + self._attr_current_option = option + self.async_write_ha_state() diff --git a/homeassistant/components/tesla_fleet/sensor.py b/homeassistant/components/tesla_fleet/sensor.py index 4d30a509e1a..b4e7b51faba 100644 --- a/homeassistant/components/tesla_fleet/sensor.py +++ b/homeassistant/components/tesla_fleet/sensor.py @@ -378,6 +378,17 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( device_class=SensorDeviceClass.POWER, entity_registry_enabled_default=False, ), + SensorEntityDescription( + key="island_status", + options=[ + "island_status_unknown", + "on_grid", + "off_grid", + "off_grid_unintentional", + "off_grid_intentional", + ], + device_class=SensorDeviceClass.ENUM, + ), ) WALL_CONNECTOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( @@ -475,7 +486,7 @@ class TeslaFleetVehicleSensorEntity(TeslaFleetVehicleEntity, RestoreSensor): async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" await super().async_added_to_hass() - if self.coordinator.data.get("state") == TeslaFleetState.OFFLINE: + if self.coordinator.data.get("state") != TeslaFleetState.ONLINE: if (sensor_data := await self.async_get_last_sensor_data()) is not None: self._attr_native_value = sensor_data.native_value @@ -513,7 +524,7 @@ class TeslaFleetVehicleTimeSensorEntity(TeslaFleetVehicleEntity, SensorEntity): self._attr_native_value = self._get_timestamp(self._value) -class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, RestoreSensor): +class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity): """Base class for Tesla Fleet energy site metric sensors.""" entity_description: SensorEntityDescription @@ -527,20 +538,13 @@ class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, RestoreSensor self.entity_description = description super().__init__(data, description.key) - async def async_added_to_hass(self) -> None: - """Handle entity which will be added.""" - await super().async_added_to_hass() - if not self.coordinator.updated_once: - if (sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = sensor_data.native_value - def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" self._attr_available = not self.is_none self._attr_native_value = self._value -class TeslaFleetWallConnectorSensorEntity(TeslaFleetWallConnectorEntity, RestoreSensor): +class TeslaFleetWallConnectorSensorEntity(TeslaFleetWallConnectorEntity, SensorEntity): """Base class for Tesla Fleet energy site metric sensors.""" entity_description: SensorEntityDescription @@ -559,20 +563,13 @@ class TeslaFleetWallConnectorSensorEntity(TeslaFleetWallConnectorEntity, Restore description.key, ) - async def async_added_to_hass(self) -> None: - """Handle entity which will be added.""" - await super().async_added_to_hass() - if not self.coordinator.updated_once: - if (sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = sensor_data.native_value - def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" self._attr_available = not self.is_none self._attr_native_value = self._value -class TeslaFleetEnergyInfoSensorEntity(TeslaFleetEnergyInfoEntity, RestoreSensor): +class TeslaFleetEnergyInfoSensorEntity(TeslaFleetEnergyInfoEntity, SensorEntity): """Base class for Tesla Fleet energy site metric sensors.""" entity_description: SensorEntityDescription @@ -586,13 +583,6 @@ class TeslaFleetEnergyInfoSensorEntity(TeslaFleetEnergyInfoEntity, RestoreSensor self.entity_description = description super().__init__(data, description.key) - async def async_added_to_hass(self) -> None: - """Handle entity which will be added.""" - await super().async_added_to_hass() - if not self.coordinator.updated_once: - if (sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = sensor_data.native_value - def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" self._attr_available = not self.is_none diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index 6e74714ddd5..fe5cd06c1ef 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -8,7 +8,9 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reauth_account_mismatch": "The reauthentication account does not match the original account" }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" @@ -19,7 +21,7 @@ }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "The Withings integration needs to re-authenticate your account" + "description": "The {name} integration needs to re-authenticate your account" } }, "create_entry": { @@ -64,6 +66,9 @@ "state": { "name": "Status" }, + "storm_mode_active": { + "name": "Storm watch active" + }, "vehicle_state_dashcam_state": { "name": "Dashcam" }, @@ -107,6 +112,61 @@ "name": "Tire pressure warning rear right" } }, + "button": { + "boombox": { + "name": "Play fart" + }, + "enable_keyless_driving": { + "name": "Keyless driving" + }, + "flash_lights": { + "name": "Flash lights" + }, + "homelink": { + "name": "Homelink" + }, + "honk": { + "name": "Honk horn" + }, + "wake": { + "name": "Wake" + } + }, + "climate": { + "climate_state_cabin_overheat_protection": { + "name": "Cabin overheat protection" + }, + "driver_temp": { + "name": "[%key:component::climate::title%]", + "state_attributes": { + "preset_mode": { + "state": { + "off": "Normal", + "keep": "Keep mode", + "dog": "Dog mode", + "camp": "Camp mode" + } + } + } + } + }, + "cover": { + "charge_state_charge_port_door_open": { + "name": "Charge port door" + }, + "vehicle_state_ft": { + "name": "Frunk" + }, + "vehicle_state_rt": { + "name": "Trunk" + }, + "vehicle_state_sun_roof_state": { + "name": "Sunroof" + }, + "windows": { + "name": "Windows" + } + }, "device_tracker": { "location": { "name": "Location" @@ -115,6 +175,122 @@ "name": "Route" } }, + "lock": { + "charge_state_charge_port_latch": { + "name": "Charge cable lock" + }, + "vehicle_state_locked": { + "name": "[%key:component::lock::title%]" + } + }, + "media_player": { + "media": { + "name": "[%key:component::media_player::title%]" + } + }, + "number": { + "backup_reserve_percent": { + "name": "Backup reserve" + }, + "charge_state_charge_current_request": { + "name": "Charge current" + }, + "charge_state_charge_limit_soc": { + "name": "Charge limit" + }, + "off_grid_vehicle_charging_reserve_percent": { + "name": "Off grid reserve" + } + }, + "select": { + "climate_state_seat_heater_left": { + "name": "Seat heater front left", + "state": { + "high": "High", + "low": "Low", + "medium": "Medium", + "off": "Off" + } + }, + "climate_state_seat_heater_rear_center": { + "name": "Seat heater rear center", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "climate_state_seat_heater_rear_left": { + "name": "Seat heater rear left", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "climate_state_seat_heater_rear_right": { + "name": "Seat heater rear right", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "climate_state_seat_heater_right": { + "name": "Seat heater front right", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "climate_state_seat_heater_third_row_left": { + "name": "Seat heater third row left", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "climate_state_seat_heater_third_row_right": { + "name": "Seat heater third row right", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "medium": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::medium%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "climate_state_steering_wheel_heat_level": { + "name": "Steering wheel heater", + "state": { + "high": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::high%]", + "low": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::low%]", + "off": "[%key:component::tesla_fleet::entity::select::climate_state_seat_heater_left::state::off%]" + } + }, + "components_customer_preferred_export_rule": { + "name": "Allow export", + "state": { + "battery_ok": "Battery", + "never": "Never", + "pv_only": "Solar only" + } + }, + "default_real_mode": { + "name": "Operation mode", + "state": { + "autonomous": "Autonomous", + "backup": "Backup", + "self_consumption": "Self consumption" + } + } + }, "sensor": { "battery_power": { "name": "Battery power" @@ -238,6 +414,16 @@ "vehicle_state_odometer": { "name": "Odometer" }, + "island_status": { + "name": "Grid Status", + "state": { + "island_status_unknown": "Unknown", + "on_grid": "Connected", + "off_grid": "Disconnected", + "off_grid_unintentional": "Disconnected unintentionally", + "off_grid_intentional": "Disconnected intentionally" + } + }, "vehicle_state_tpms_pressure_fl": { "name": "Tire pressure front left" }, @@ -268,11 +454,73 @@ "wall_connector_state": { "name": "State code" } + }, + "switch": { + "charge_state_user_charge_enable_request": { + "name": "Charge" + }, + "climate_state_auto_seat_climate_left": { + "name": "Auto seat climate left" + }, + "climate_state_auto_seat_climate_right": { + "name": "Auto seat climate right" + }, + "climate_state_auto_steering_wheel_heat": { + "name": "Auto steering wheel heater" + }, + "climate_state_defrost_mode": { + "name": "Defrost" + }, + "components_disallow_charge_from_grid_with_solar_installed": { + "name": "Allow charging from grid" + }, + "user_settings_storm_mode_enabled": { + "name": "Storm watch" + }, + "vehicle_state_sentry_mode": { + "name": "Sentry mode" + }, + "vehicle_state_valet_mode": { + "name": "Valet mode" + } } }, "exceptions": { + "no_cable": { + "message": "Charge cable will lock automatically when connected" + }, "update_failed": { - "message": "{endpoint} data request failed. {message}" + "message": "{endpoint} data request failed: {message}" + }, + "command_failed": { + "message": "Command failed: {message}" + }, + "command_error": { + "message": "Command returned an error: {error}" + }, + "command_reason": { + "message": "Command was unsuccessful: {reason}" + }, + "command_no_reason": { + "message": "Command was unsuccessful but did not return a reason why." + }, + "invalid_cop_temp": { + "message": "Cabin overheat protection does not support that temperature." + }, + "invalid_hvac_mode": { + "message": "Climate mode {hvac_mode} is not supported." + }, + "missing_temperature": { + "message": "Temperature is required for this action." + }, + "missing_scope_vehicle_cmds": { + "message": "Missing vehicle commands scope." + }, + "missing_scope_vehicle_charging_cmds": { + "message": "Missing vehicle charging commands scope." + }, + "missing_scope_energy_cmds": { + "message": "Missing energy commands scope." } } } diff --git a/homeassistant/components/tesla_fleet/switch.py b/homeassistant/components/tesla_fleet/switch.py new file mode 100644 index 00000000000..d602cff78c0 --- /dev/null +++ b/homeassistant/components/tesla_fleet/switch.py @@ -0,0 +1,262 @@ +"""Switch platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from itertools import chain +from typing import Any + +from tesla_fleet_api.const import Scope, Seat + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .entity import TeslaFleetEnergyInfoEntity, TeslaFleetVehicleEntity +from .helpers import handle_command, handle_vehicle_command +from .models import TeslaFleetEnergyData, TeslaFleetVehicleData + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class TeslaFleetSwitchEntityDescription(SwitchEntityDescription): + """Describes TeslaFleet Switch entity.""" + + on_func: Callable + off_func: Callable + scopes: list[Scope] + + +VEHICLE_DESCRIPTIONS: tuple[TeslaFleetSwitchEntityDescription, ...] = ( + TeslaFleetSwitchEntityDescription( + key="vehicle_state_sentry_mode", + on_func=lambda api: api.set_sentry_mode(on=True), + off_func=lambda api: api.set_sentry_mode(on=False), + scopes=[Scope.VEHICLE_CMDS], + ), + TeslaFleetSwitchEntityDescription( + key="climate_state_auto_seat_climate_left", + on_func=lambda api: api.remote_auto_seat_climate_request(Seat.FRONT_LEFT, True), + off_func=lambda api: api.remote_auto_seat_climate_request( + Seat.FRONT_LEFT, False + ), + scopes=[Scope.VEHICLE_CMDS], + ), + TeslaFleetSwitchEntityDescription( + key="climate_state_auto_seat_climate_right", + on_func=lambda api: api.remote_auto_seat_climate_request( + Seat.FRONT_RIGHT, True + ), + off_func=lambda api: api.remote_auto_seat_climate_request( + Seat.FRONT_RIGHT, False + ), + scopes=[Scope.VEHICLE_CMDS], + ), + TeslaFleetSwitchEntityDescription( + key="climate_state_auto_steering_wheel_heat", + on_func=lambda api: api.remote_auto_steering_wheel_heat_climate_request( + on=True + ), + off_func=lambda api: api.remote_auto_steering_wheel_heat_climate_request( + on=False + ), + scopes=[Scope.VEHICLE_CMDS], + ), + TeslaFleetSwitchEntityDescription( + key="climate_state_defrost_mode", + on_func=lambda api: api.set_preconditioning_max(on=True, manual_override=False), + off_func=lambda api: api.set_preconditioning_max( + on=False, manual_override=False + ), + scopes=[Scope.VEHICLE_CMDS], + ), +) + +VEHICLE_CHARGE_DESCRIPTION = TeslaFleetSwitchEntityDescription( + key="charge_state_user_charge_enable_request", + on_func=lambda api: api.charge_start(), + off_func=lambda api: api.charge_stop(), + scopes=[Scope.VEHICLE_CHARGING_CMDS, Scope.VEHICLE_CMDS], +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the TeslaFleet Switch platform from a config entry.""" + + async_add_entities( + chain( + ( + TeslaFleetVehicleSwitchEntity( + vehicle, description, entry.runtime_data.scopes + ) + for vehicle in entry.runtime_data.vehicles + for description in VEHICLE_DESCRIPTIONS + ), + ( + TeslaFleetChargeSwitchEntity( + vehicle, VEHICLE_CHARGE_DESCRIPTION, entry.runtime_data.scopes + ) + for vehicle in entry.runtime_data.vehicles + ), + ( + TeslaFleetChargeFromGridSwitchEntity( + energysite, + entry.runtime_data.scopes, + ) + for energysite in entry.runtime_data.energysites + if energysite.info_coordinator.data.get("components_battery") + and energysite.info_coordinator.data.get("components_solar") + ), + ( + TeslaFleetStormModeSwitchEntity(energysite, entry.runtime_data.scopes) + for energysite in entry.runtime_data.energysites + if energysite.info_coordinator.data.get("components_storm_mode_capable") + ), + ) + ) + + +class TeslaFleetSwitchEntity(SwitchEntity): + """Base class for all TeslaFleet switch entities.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + entity_description: TeslaFleetSwitchEntityDescription + + +class TeslaFleetVehicleSwitchEntity(TeslaFleetVehicleEntity, TeslaFleetSwitchEntity): + """Base class for TeslaFleet vehicle switch entities.""" + + def __init__( + self, + data: TeslaFleetVehicleData, + description: TeslaFleetSwitchEntityDescription, + scopes: list[Scope], + ) -> None: + """Initialize the Switch.""" + super().__init__(data, description.key) + self.entity_description = description + self.scoped = any(scope in scopes for scope in description.scopes) + + def _async_update_attrs(self) -> None: + """Update the attributes of the sensor.""" + if self._value is None: + self._attr_is_on = None + else: + self._attr_is_on = bool(self._value) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Switch.""" + self.raise_for_read_only(self.entity_description.scopes[0]) + await self.wake_up_if_asleep() + await handle_vehicle_command(self.entity_description.on_func(self.api)) + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Switch.""" + self.raise_for_read_only(self.entity_description.scopes[0]) + await self.wake_up_if_asleep() + await handle_vehicle_command(self.entity_description.off_func(self.api)) + self._attr_is_on = False + self.async_write_ha_state() + + +class TeslaFleetChargeSwitchEntity(TeslaFleetVehicleSwitchEntity): + """Entity class for TeslaFleet charge switch.""" + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + if self._value is None: + self._attr_is_on = self.get("charge_state_charge_enable_request") + else: + self._attr_is_on = self._value + + +class TeslaFleetChargeFromGridSwitchEntity( + TeslaFleetEnergyInfoEntity, TeslaFleetSwitchEntity +): + """Entity class for Charge From Grid switch.""" + + def __init__( + self, + data: TeslaFleetEnergyData, + scopes: list[Scope], + ) -> None: + """Initialize the Switch.""" + self.scoped = Scope.ENERGY_CMDS in scopes + super().__init__( + data, "components_disallow_charge_from_grid_with_solar_installed" + ) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + # When disallow_charge_from_grid_with_solar_installed is missing, its Off. + # But this sensor is flipped to match how the Tesla app works. + self._attr_is_on = not self.get(self.key, False) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Switch.""" + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command( + self.api.grid_import_export( + disallow_charge_from_grid_with_solar_installed=False + ) + ) + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Switch.""" + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command( + self.api.grid_import_export( + disallow_charge_from_grid_with_solar_installed=True + ) + ) + self._attr_is_on = False + self.async_write_ha_state() + + +class TeslaFleetStormModeSwitchEntity( + TeslaFleetEnergyInfoEntity, TeslaFleetSwitchEntity +): + """Entity class for Storm Mode switch.""" + + def __init__( + self, + data: TeslaFleetEnergyData, + scopes: list[Scope], + ) -> None: + """Initialize the Switch.""" + super().__init__(data, "user_settings_storm_mode_enabled") + self.scoped = Scope.ENERGY_CMDS in scopes + + def _async_update_attrs(self) -> None: + """Update the attributes of the sensor.""" + self._attr_available = self._value is not None + self._attr_is_on = bool(self._value) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Switch.""" + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command(self.api.storm_mode(enabled=True)) + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Switch.""" + self.raise_for_read_only(Scope.ENERGY_CMDS) + await handle_command(self.api.storm_mode(enabled=False)) + self._attr_is_on = False + self.async_write_ha_state() diff --git a/homeassistant/components/tesla_wall_connector/__init__.py b/homeassistant/components/tesla_wall_connector/__init__.py index 28ddc15ade7..01c657fbcaa 100644 --- a/homeassistant/components/tesla_wall_connector/__init__.py +++ b/homeassistant/components/tesla_wall_connector/__init__.py @@ -2,11 +2,9 @@ from __future__ import annotations -from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta import logging -from typing import Any from tesla_wall_connector import WallConnector from tesla_wall_connector.exceptions import ( @@ -20,19 +18,13 @@ from homeassistant.const import CONF_HOST, CONF_SCAN_INTERVAL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( DEFAULT_SCAN_INTERVAL, DOMAIN, WALLCONNECTOR_DATA_LIFETIME, WALLCONNECTOR_DATA_VITALS, - WALLCONNECTOR_DEVICE_NAME, ) PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] @@ -79,6 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="tesla-wallconnector", update_interval=get_poll_interval(entry), update_method=async_update_data, @@ -123,43 +116,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -def get_unique_id(serial_number: str, key: str) -> str: - """Get a unique entity name.""" - return f"{serial_number}-{key}" - - -class WallConnectorEntity(CoordinatorEntity): - """Base class for Wall Connector entities.""" - - _attr_has_entity_name = True - - def __init__(self, wall_connector_data: WallConnectorData) -> None: - """Initialize WallConnector Entity.""" - self.wall_connector_data = wall_connector_data - self._attr_unique_id = get_unique_id( - wall_connector_data.serial_number, self.entity_description.key - ) - super().__init__(wall_connector_data.update_coordinator) - - @property - def device_info(self) -> DeviceInfo: - """Return information about the device.""" - return DeviceInfo( - identifiers={(DOMAIN, self.wall_connector_data.serial_number)}, - name=WALLCONNECTOR_DEVICE_NAME, - model=self.wall_connector_data.part_number, - sw_version=self.wall_connector_data.firmware_version, - manufacturer="Tesla", - ) - - -@dataclass(frozen=True) -class WallConnectorLambdaValueGetterMixin: - """Mixin with a function pointer for getting sensor value.""" - - value_fn: Callable[[dict], Any] - - @dataclass class WallConnectorData: """Data for the Tesla Wall Connector integration.""" diff --git a/homeassistant/components/tesla_wall_connector/binary_sensor.py b/homeassistant/components/tesla_wall_connector/binary_sensor.py index cf8fbf53b52..f7ef385b8ed 100644 --- a/homeassistant/components/tesla_wall_connector/binary_sensor.py +++ b/homeassistant/components/tesla_wall_connector/binary_sensor.py @@ -13,12 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ( - WallConnectorData, - WallConnectorEntity, - WallConnectorLambdaValueGetterMixin, -) +from . import WallConnectorData from .const import DOMAIN, WALLCONNECTOR_DATA_VITALS +from .entity import WallConnectorEntity, WallConnectorLambdaValueGetterMixin _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/tesla_wall_connector/config_flow.py b/homeassistant/components/tesla_wall_connector/config_flow.py index 8390b26b182..3296539f701 100644 --- a/homeassistant/components/tesla_wall_connector/config_flow.py +++ b/homeassistant/components/tesla_wall_connector/config_flow.py @@ -46,7 +46,6 @@ class TeslaWallConnectorConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize config flow.""" super().__init__() self.ip_address: str | None = None - self.serial_number = None async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -70,23 +69,21 @@ class TeslaWallConnectorConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_abort(reason="cannot_connect") - self.serial_number = version.serial_number + serial_number: str = version.serial_number - await self.async_set_unique_id(self.serial_number) + await self.async_set_unique_id(serial_number) self._abort_if_unique_id_configured(updates={CONF_HOST: self.ip_address}) _LOGGER.debug( "No entry found for wall connector with IP %s. Serial nr: %s", self.ip_address, - self.serial_number, + serial_number, ) - placeholders = { + self.context["title_placeholders"] = { CONF_HOST: self.ip_address, - WALLCONNECTOR_SERIAL_NUMBER: self.serial_number, + WALLCONNECTOR_SERIAL_NUMBER: serial_number, } - - self.context["title_placeholders"] = placeholders return await self.async_step_user() async def async_step_user( diff --git a/homeassistant/components/tesla_wall_connector/entity.py b/homeassistant/components/tesla_wall_connector/entity.py new file mode 100644 index 00000000000..ea08a00e791 --- /dev/null +++ b/homeassistant/components/tesla_wall_connector/entity.py @@ -0,0 +1,50 @@ +"""The Tesla Wall Connector integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import WallConnectorData +from .const import DOMAIN, WALLCONNECTOR_DEVICE_NAME + + +@dataclass(frozen=True) +class WallConnectorLambdaValueGetterMixin: + """Mixin with a function pointer for getting sensor value.""" + + value_fn: Callable[[dict], Any] + + +def _get_unique_id(serial_number: str, key: str) -> str: + """Get a unique entity name.""" + return f"{serial_number}-{key}" + + +class WallConnectorEntity(CoordinatorEntity): + """Base class for Wall Connector entities.""" + + _attr_has_entity_name = True + + def __init__(self, wall_connector_data: WallConnectorData) -> None: + """Initialize WallConnector Entity.""" + self.wall_connector_data = wall_connector_data + self._attr_unique_id = _get_unique_id( + wall_connector_data.serial_number, self.entity_description.key + ) + super().__init__(wall_connector_data.update_coordinator) + + @property + def device_info(self) -> DeviceInfo: + """Return information about the device.""" + return DeviceInfo( + identifiers={(DOMAIN, self.wall_connector_data.serial_number)}, + name=WALLCONNECTOR_DEVICE_NAME, + model=self.wall_connector_data.part_number, + sw_version=self.wall_connector_data.firmware_version, + manufacturer="Tesla", + ) diff --git a/homeassistant/components/tesla_wall_connector/sensor.py b/homeassistant/components/tesla_wall_connector/sensor.py index 077f70c5370..a50c81c912e 100644 --- a/homeassistant/components/tesla_wall_connector/sensor.py +++ b/homeassistant/components/tesla_wall_connector/sensor.py @@ -21,12 +21,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ( - WallConnectorData, - WallConnectorEntity, - WallConnectorLambdaValueGetterMixin, -) +from . import WallConnectorData from .const import DOMAIN, WALLCONNECTOR_DATA_LIFETIME, WALLCONNECTOR_DATA_VITALS +from .entity import WallConnectorEntity, WallConnectorLambdaValueGetterMixin _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index 6308d62f3a1..0b61120877a 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -1,6 +1,7 @@ """Teslemetry integration.""" import asyncio +from collections.abc import Callable from typing import Final from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific @@ -10,6 +11,7 @@ from tesla_fleet_api.exceptions import ( SubscriptionRequired, TeslaFleetError, ) +from teslemetry_stream import TeslemetryStream from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform @@ -23,10 +25,12 @@ from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER, MODELS from .coordinator import ( + TeslemetryEnergyHistoryCoordinator, TeslemetryEnergySiteInfoCoordinator, TeslemetryEnergySiteLiveCoordinator, TeslemetryVehicleDataCoordinator, ) +from .helpers import flatten from .models import TeslemetryData, TeslemetryEnergyData, TeslemetryVehicleData from .services import async_register_services @@ -68,8 +72,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - access_token=access_token, ) try: - scopes = (await teslemetry.metadata())["scopes"] - products = (await teslemetry.products())["response"] + calls = await asyncio.gather( + teslemetry.metadata(), + teslemetry.products(), + ) except InvalidToken as e: raise ConfigEntryAuthFailed from e except SubscriptionRequired as e: @@ -77,11 +83,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - except TeslaFleetError as e: raise ConfigEntryNotReady from e + scopes = calls[0]["scopes"] + region = calls[0]["region"] + products = calls[1]["response"] + device_registry = dr.async_get(hass) # Create array of classes vehicles: list[TeslemetryVehicleData] = [] energysites: list[TeslemetryEnergyData] = [] + + # Create the stream + stream = TeslemetryStream( + session, + access_token, + server=f"{region.lower()}.teslemetry.com", + parse_timestamp=True, + ) + for product in products: if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: # Remove the protobuff 'cached_data' that we do not use to save memory @@ -98,21 +117,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - serial_number=vin, ) + remove_listener = stream.async_add_listener( + create_handle_vehicle_stream(vin, coordinator), + {"vin": vin}, + ) + vehicles.append( TeslemetryVehicleData( api=api, coordinator=coordinator, + stream=stream, vin=vin, device=device, + remove_listener=remove_listener, ) ) + elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] - if not ( - product["components"]["battery"] - or product["components"]["solar"] - or "wall_connectors" in product["components"] - ): + powerwall = ( + product["components"]["battery"] or product["components"]["solar"] + ) + wall_connector = "wall_connectors" in product["components"] + if not powerwall and not wall_connector: LOGGER.debug( "Skipping Energy Site %s as it has no components", site_id, @@ -120,8 +147,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - continue api = EnergySpecific(teslemetry.energy, site_id) - live_coordinator = TeslemetryEnergySiteLiveCoordinator(hass, api) - info_coordinator = TeslemetryEnergySiteInfoCoordinator(hass, api, product) device = DeviceInfo( identifiers={(DOMAIN, str(site_id))}, manufacturer="Tesla", @@ -133,8 +158,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - energysites.append( TeslemetryEnergyData( api=api, - live_coordinator=live_coordinator, - info_coordinator=info_coordinator, + live_coordinator=TeslemetryEnergySiteLiveCoordinator(hass, api), + info_coordinator=TeslemetryEnergySiteInfoCoordinator( + hass, api, product + ), + history_coordinator=( + TeslemetryEnergyHistoryCoordinator(hass, api) + if powerwall + else None + ), id=site_id, device=device, ) @@ -154,6 +186,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - energysite.info_coordinator.async_config_entry_first_refresh() for energysite in energysites ), + *( + energysite.history_coordinator.async_config_entry_first_refresh() + for energysite in energysites + if energysite.history_coordinator + ), ) # Add energy device models @@ -207,3 +244,19 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> config_entry, unique_id=metadata["uid"], version=1, minor_version=2 ) return True + + +def create_handle_vehicle_stream(vin: str, coordinator) -> Callable[[dict], None]: + """Create a handle vehicle stream function.""" + + def handle_vehicle_stream(data: dict) -> None: + """Handle vehicle data from the stream.""" + if "vehicle_data" in data: + LOGGER.debug("Streaming received vehicle data from %s", vin) + coordinator.async_set_updated_data(flatten(data["vehicle_data"])) + elif "state" in data: + LOGGER.debug("Streaming received state from %s", vin) + coordinator.data["state"] = data["state"] + coordinator.async_set_updated_data(coordinator.data) + + return handle_vehicle_stream diff --git a/homeassistant/components/teslemetry/binary_sensor.py b/homeassistant/components/teslemetry/binary_sensor.py index e3f9a5716f6..29ebfea4db1 100644 --- a/homeassistant/components/teslemetry/binary_sensor.py +++ b/homeassistant/components/teslemetry/binary_sensor.py @@ -165,6 +165,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryBinarySensorEntityDescription, ...] = ( ENERGY_LIVE_DESCRIPTIONS: tuple[BinarySensorEntityDescription, ...] = ( BinarySensorEntityDescription(key="backup_capable"), BinarySensorEntityDescription(key="grid_services_active"), + BinarySensorEntityDescription(key="storm_mode_active"), ) @@ -222,15 +223,12 @@ class TeslemetryVehicleBinarySensorEntity(TeslemetryVehicleEntity, BinarySensorE def _async_update_attrs(self) -> None: """Update the attributes of the binary sensor.""" - if self.coordinator.updated_once: - if self._value is None: - self._attr_available = False - self._attr_is_on = None - else: - self._attr_available = True - self._attr_is_on = self.entity_description.is_on(self._value) - else: + if self._value is None: + self._attr_available = False self._attr_is_on = None + else: + self._attr_available = True + self._attr_is_on = self.entity_description.is_on(self._value) class TeslemetryEnergyLiveBinarySensorEntity( diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index bd4fb0eba53..95b769a1c2d 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -74,7 +74,6 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): | ClimateEntityFeature.PRESET_MODE ) _attr_preset_modes = ["off", "keep", "dog", "camp"] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -84,8 +83,10 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): ) -> None: """Initialize the climate.""" self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] super().__init__( data, @@ -95,13 +96,15 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): def _async_update_attrs(self) -> None: """Update the attributes of the entity.""" value = self.get("climate_state_is_climate_on") - if value is None: - self._attr_hvac_mode = None - elif value: + if value: self._attr_hvac_mode = HVACMode.HEAT_COOL else: self._attr_hvac_mode = HVACMode.OFF + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and not self.scoped: + self._attr_hvac_modes = [self._attr_hvac_mode] + self._attr_current_temperature = self.get("climate_state_inside_temp") self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") @@ -115,7 +118,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_on(self) -> None: """Set the climate state to on.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.auto_conditioning_start()) @@ -125,7 +128,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.auto_conditioning_stop()) @@ -135,7 +138,6 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - if temp := kwargs.get(ATTR_TEMPERATURE): await self.wake_up_if_asleep() await handle_vehicle_command( @@ -180,23 +182,31 @@ COP_MODES = { "FanOnly": HVACMode.FAN_ONLY, } +# String to celsius COP_LEVELS = { "Low": 30, "Medium": 35, "High": 40, } +# Celsius to IntEnum +TEMP_LEVELS = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} + class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEntity): """Telemetry vehicle cabin overheat protection entity.""" _attr_precision = PRECISION_WHOLE _attr_target_temperature_step = 5 - _attr_min_temp = 30 - _attr_max_temp = 40 + _attr_min_temp = COP_LEVELS["Low"] + _attr_max_temp = COP_LEVELS["High"] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) - _enable_turn_on_off_backwards_compatibility = False + _attr_entity_registry_enabled_default = False def __init__( @@ -206,20 +216,21 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn ) -> None: """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if self.scoped: + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + else: + self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] + super().__init__(data, "climate_state_cabin_overheat_protection") - # Supported Features - self._attr_supported_features = ( - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - if self.get("vehicle_config_cop_user_set_temp_supported"): + # Supported Features from data + if self.scoped and self.get("vehicle_config_cop_user_set_temp_supported"): self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE - # Scopes - self.scoped = Scope.VEHICLE_CMDS in scopes - if not self.scoped: - self._attr_supported_features = ClimateEntityFeature(0) - def _async_update_attrs(self) -> None: """Update the attributes of the entity.""" @@ -228,6 +239,10 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn else: self._attr_hvac_mode = COP_MODES.get(state) + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and not self.scoped: + self._attr_hvac_modes = [self._attr_hvac_mode] + if (level := self.get("climate_state_cop_activation_temperature")) is None: self._attr_target_temperature = None else: @@ -245,18 +260,11 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) - if not (temp := kwargs.get(ATTR_TEMPERATURE)): - return - - if temp == 30: - cop_mode = CabinOverheatProtectionTemp.LOW - elif temp == 35: - cop_mode = CabinOverheatProtectionTemp.MEDIUM - elif temp == 40: - cop_mode = CabinOverheatProtectionTemp.HIGH - else: + if (temp := kwargs.get(ATTR_TEMPERATURE)) is None or ( + cop_mode := TEMP_LEVELS.get(temp) + ) is None: raise ServiceValidationError( translation_domain=DOMAIN, translation_key="invalid_cop_temp", @@ -289,7 +297,7 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set the climate mode and state.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await self._async_set_cop(hvac_mode) self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/config_flow.py b/homeassistant/components/teslemetry/config_flow.py index 73921986f44..d8cf2bd7945 100644 --- a/homeassistant/components/teslemetry/config_flow.py +++ b/homeassistant/components/teslemetry/config_flow.py @@ -14,7 +14,7 @@ from tesla_fleet_api.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -22,6 +22,7 @@ from .const import DOMAIN, LOGGER TESLEMETRY_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}) DESCRIPTION_PLACEHOLDERS = { + "name": "Teslemetry", "short_url": "teslemetry.com/console", "url": "[teslemetry.com/console](https://teslemetry.com/console)", } @@ -32,7 +33,6 @@ class TeslemetryConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - _entry: ConfigEntry | None = None async def async_auth(self, user_input: Mapping[str, Any]) -> dict[str, str]: """Reusable Auth Helper.""" @@ -78,7 +78,6 @@ class TeslemetryConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth on failure.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -86,12 +85,11 @@ class TeslemetryConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle users reauth credentials.""" - assert self._entry errors: dict[str, str] = {} if user_input and not (errors := await self.async_auth(user_input)): return self.async_update_reload_and_abort( - self._entry, + self._get_reauth_entry(), data=user_input, ) diff --git a/homeassistant/components/teslemetry/const.py b/homeassistant/components/teslemetry/const.py index 0c2dc68e7c7..01c6c33f505 100644 --- a/homeassistant/components/teslemetry/const.py +++ b/homeassistant/components/teslemetry/const.py @@ -16,6 +16,30 @@ MODELS = { "Y": "Model Y", } +ENERGY_HISTORY_FIELDS = [ + "solar_energy_exported", + "generator_energy_exported", + "grid_energy_imported", + "grid_services_energy_imported", + "grid_services_energy_exported", + "grid_energy_exported_from_solar", + "grid_energy_exported_from_generator", + "grid_energy_exported_from_battery", + "battery_energy_exported", + "battery_energy_imported_from_grid", + "battery_energy_imported_from_solar", + "battery_energy_imported_from_generator", + "consumer_energy_imported_from_grid", + "consumer_energy_imported_from_solar", + "consumer_energy_imported_from_battery", + "consumer_energy_imported_from_generator", + "total_home_usage", + "total_battery_charge", + "total_battery_discharge", + "total_solar_generation", + "total_grid_energy_exported", +] + class TeslemetryState(StrEnum): """Teslemetry Vehicle States.""" diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index 11fc49e86ee..e7232d0f87c 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -4,25 +4,25 @@ from datetime import datetime, timedelta from typing import Any from tesla_fleet_api import EnergySpecific, VehicleSpecific -from tesla_fleet_api.const import VehicleDataEndpoint +from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( - Forbidden, InvalidToken, SubscriptionRequired, TeslaFleetError, - VehicleOffline, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import LOGGER, TeslemetryState +from .const import ENERGY_HISTORY_FIELDS, LOGGER +from .helpers import flatten VEHICLE_INTERVAL = timedelta(seconds=30) VEHICLE_WAIT = timedelta(minutes=15) ENERGY_LIVE_INTERVAL = timedelta(seconds=30) ENERGY_INFO_INTERVAL = timedelta(seconds=30) +ENERGY_HISTORY_INTERVAL = timedelta(seconds=60) ENDPOINTS = [ VehicleDataEndpoint.CHARGE_STATE, @@ -34,24 +34,9 @@ ENDPOINTS = [ ] -def flatten(data: dict[str, Any], parent: str | None = None) -> dict[str, Any]: - """Flatten the data structure.""" - result = {} - for key, value in data.items(): - if parent: - key = f"{parent}_{key}" - if isinstance(value, dict): - result.update(flatten(value, key)) - else: - result[key] = value - return result - - class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching data from the Teslemetry API.""" - updated_once: bool - pre2021: bool last_active: datetime def __init__( @@ -66,63 +51,24 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) self.api = api self.data = flatten(product) - self.updated_once = False self.last_active = datetime.now() async def _async_update_data(self) -> dict[str, Any]: """Update vehicle data using Teslemetry API.""" - self.update_interval = VEHICLE_INTERVAL - try: - if self.data["state"] != TeslemetryState.ONLINE: - response = await self.api.vehicle() - self.data["state"] = response["response"]["state"] - - if self.data["state"] != TeslemetryState.ONLINE: - return self.data - - response = await self.api.vehicle_data(endpoints=ENDPOINTS) - data = response["response"] - - except VehicleOffline: - self.data["state"] = TeslemetryState.OFFLINE - return self.data - except InvalidToken as e: - raise ConfigEntryAuthFailed from e - except SubscriptionRequired as e: + data = (await self.api.vehicle_data(endpoints=ENDPOINTS))["response"] + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e - self.updated_once = True - - if self.api.pre2021 and data["state"] == TeslemetryState.ONLINE: - # Handle pre-2021 vehicles which cannot sleep by themselves - if ( - data["charge_state"].get("charging_state") == "Charging" - or data["vehicle_state"].get("is_user_present") - or data["vehicle_state"].get("sentry_mode") - ): - # Vehicle is active, reset timer - self.last_active = datetime.now() - else: - elapsed = datetime.now() - self.last_active - if elapsed > timedelta(minutes=20): - # Vehicle didn't sleep, try again in 15 minutes - self.last_active = datetime.now() - elif elapsed > timedelta(minutes=15): - # Let vehicle go to sleep now - self.update_interval = VEHICLE_WAIT - return flatten(data) class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching energy site live status from the Teslemetry API.""" - updated_once: bool - def __init__(self, hass: HomeAssistant, api: EnergySpecific) -> None: """Initialize Teslemetry Energy Site Live coordinator.""" super().__init__( @@ -138,7 +84,7 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) try: data = (await self.api.live_status())["response"] - except (InvalidToken, Forbidden, SubscriptionRequired) as e: + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e @@ -154,8 +100,6 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching energy site info from the Teslemetry API.""" - updated_once: bool - def __init__(self, hass: HomeAssistant, api: EnergySpecific, product: dict) -> None: """Initialize Teslemetry Energy Info coordinator.""" super().__init__( @@ -172,9 +116,41 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) try: data = (await self.api.site_info())["response"] - except (InvalidToken, Forbidden, SubscriptionRequired) as e: + except (InvalidToken, SubscriptionRequired) as e: raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise UpdateFailed(e.message) from e return flatten(data) + + +class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Class to manage fetching energy site info from the Teslemetry API.""" + + def __init__(self, hass: HomeAssistant, api: EnergySpecific) -> None: + """Initialize Teslemetry Energy Info coordinator.""" + super().__init__( + hass, + LOGGER, + name=f"Teslemetry Energy History {api.energy_site_id}", + update_interval=ENERGY_HISTORY_INTERVAL, + ) + self.api = api + + async def _async_update_data(self) -> dict[str, Any]: + """Update energy site data using Teslemetry API.""" + + try: + data = (await self.api.energy_history(TeslaEnergyPeriod.DAY))["response"] + except (InvalidToken, SubscriptionRequired) as e: + raise ConfigEntryAuthFailed from e + except TeslaFleetError as e: + raise UpdateFailed(e.message) from e + + # Add all time periods together + output = {key: 0 for key in ENERGY_HISTORY_FIELDS} + for period in data.get("time_series", []): + for key in ENERGY_HISTORY_FIELDS: + output[key] += period.get(key, 0) + + return output diff --git a/homeassistant/components/teslemetry/cover.py b/homeassistant/components/teslemetry/cover.py index 0b6d30b1faf..d14ef385b9c 100644 --- a/homeassistant/components/teslemetry/cover.py +++ b/homeassistant/components/teslemetry/cover.py @@ -73,13 +73,10 @@ class TeslemetryWindowEntity(TeslemetryVehicleEntity, CoverEntity): # All closed set to closed elif CLOSED == fd == fp == rd == rp: self._attr_is_closed = True - # Otherwise, set to unknown - else: - self._attr_is_closed = None async def async_open_cover(self, **kwargs: Any) -> None: """Vent windows.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command( self.api.window_control(command=WindowCommand.VENT) @@ -89,7 +86,7 @@ class TeslemetryWindowEntity(TeslemetryVehicleEntity, CoverEntity): async def async_close_cover(self, **kwargs: Any) -> None: """Close windows.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command( self.api.window_control(command=WindowCommand.CLOSE) @@ -122,7 +119,7 @@ class TeslemetryChargePortEntity(TeslemetryVehicleEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open charge port.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CHARGING_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.charge_port_door_open()) self._attr_is_closed = False @@ -130,7 +127,7 @@ class TeslemetryChargePortEntity(TeslemetryVehicleEntity, CoverEntity): async def async_close_cover(self, **kwargs: Any) -> None: """Close charge port.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CHARGING_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.charge_port_door_close()) self._attr_is_closed = True @@ -157,7 +154,7 @@ class TeslemetryFrontTrunkEntity(TeslemetryVehicleEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open front trunk.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.actuate_trunk(Trunk.FRONT)) self._attr_is_closed = False @@ -182,18 +179,12 @@ class TeslemetryRearTrunkEntity(TeslemetryVehicleEntity, CoverEntity): def _async_update_attrs(self) -> None: """Update the entity attributes.""" - value = self._value - if value == CLOSED: - self._attr_is_closed = True - elif value == OPEN: - self._attr_is_closed = False - else: - self._attr_is_closed = None + self._attr_is_closed = self._value == CLOSED async def async_open_cover(self, **kwargs: Any) -> None: """Open rear trunk.""" if self.is_closed is not False: - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.actuate_trunk(Trunk.REAR)) self._attr_is_closed = False @@ -202,7 +193,7 @@ class TeslemetryRearTrunkEntity(TeslemetryVehicleEntity, CoverEntity): async def async_close_cover(self, **kwargs: Any) -> None: """Close rear trunk.""" if self.is_closed is not True: - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.actuate_trunk(Trunk.REAR)) self._attr_is_closed = True @@ -240,7 +231,7 @@ class TeslemetrySunroofEntity(TeslemetryVehicleEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open sunroof.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.sun_roof_control(SunRoofCommand.VENT)) self._attr_is_closed = False @@ -248,7 +239,7 @@ class TeslemetrySunroofEntity(TeslemetryVehicleEntity, CoverEntity): async def async_close_cover(self, **kwargs: Any) -> None: """Close sunroof.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.sun_roof_control(SunRoofCommand.CLOSE)) self._attr_is_closed = True @@ -256,7 +247,7 @@ class TeslemetrySunroofEntity(TeslemetryVehicleEntity, CoverEntity): async def async_stop_cover(self, **kwargs: Any) -> None: """Close sunroof.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.sun_roof_control(SunRoofCommand.STOP)) self._attr_is_closed = False diff --git a/homeassistant/components/teslemetry/device_tracker.py b/homeassistant/components/teslemetry/device_tracker.py index 399d28533f1..2b0ffd88cc6 100644 --- a/homeassistant/components/teslemetry/device_tracker.py +++ b/homeassistant/components/teslemetry/device_tracker.py @@ -2,8 +2,8 @@ from __future__ import annotations -from homeassistant.components.device_tracker import SourceType from homeassistant.components.device_tracker.config_entry import TrackerEntity +from homeassistant.const import STATE_HOME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -62,11 +62,6 @@ class TeslemetryDeviceTrackerEntity(TeslemetryVehicleEntity, TrackerEntity): """Return longitude value of the device.""" return self.get(self.lon_key) - @property - def source_type(self) -> SourceType: - """Return the source type of the device tracker.""" - return SourceType.GPS - class TeslemetryDeviceTrackerLocationEntity(TeslemetryDeviceTrackerEntity): """Vehicle location device tracker class.""" @@ -86,4 +81,7 @@ class TeslemetryDeviceTrackerRouteEntity(TeslemetryDeviceTrackerEntity): @property def location_name(self) -> str | None: """Return a location name for the current location of the device.""" - return self.get("drive_state_active_route_destination") + location = self.get("drive_state_active_route_destination") + if location == "Home": + return STATE_HOME + return location diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index 74c1fdd52b1..d14f3a42734 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -4,6 +4,7 @@ from abc import abstractmethod from typing import Any from tesla_fleet_api import EnergySpecific, VehicleSpecific +from tesla_fleet_api.const import Scope from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo @@ -11,6 +12,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import ( + TeslemetryEnergyHistoryCoordinator, TeslemetryEnergySiteInfoCoordinator, TeslemetryEnergySiteLiveCoordinator, TeslemetryVehicleDataCoordinator, @@ -22,6 +24,7 @@ from .models import TeslemetryEnergyData, TeslemetryVehicleData class TeslemetryEntity( CoordinatorEntity[ TeslemetryVehicleDataCoordinator + | TeslemetryEnergyHistoryCoordinator | TeslemetryEnergySiteLiveCoordinator | TeslemetryEnergySiteInfoCoordinator ] @@ -29,18 +32,18 @@ class TeslemetryEntity( """Parent class for all Teslemetry entities.""" _attr_has_entity_name = True + scoped: bool def __init__( self, coordinator: TeslemetryVehicleDataCoordinator + | TeslemetryEnergyHistoryCoordinator | TeslemetryEnergySiteLiveCoordinator | TeslemetryEnergySiteInfoCoordinator, - api: VehicleSpecific | EnergySpecific, key: str, ) -> None: """Initialize common aspects of a Teslemetry entity.""" super().__init__(coordinator) - self.api = api self.key = key self._attr_translation_key = self.key self._async_update_attrs() @@ -84,16 +87,22 @@ class TeslemetryEntity( def _async_update_attrs(self) -> None: """Update the attributes of the entity.""" - def raise_for_scope(self): + def raise_for_scope(self, scope: Scope): """Raise an error if a scope is not available.""" if not self.scoped: - raise ServiceValidationError("Missing required scope") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_scope", + translation_placeholders={"scope": scope}, + ) class TeslemetryVehicleEntity(TeslemetryEntity): """Parent class for Teslemetry Vehicle entities.""" _last_update: int = 0 + api: VehicleSpecific + vehicle: TeslemetryVehicleData def __init__( self, @@ -102,11 +111,11 @@ class TeslemetryVehicleEntity(TeslemetryEntity): ) -> None: """Initialize common aspects of a Teslemetry entity.""" - self._attr_unique_id = f"{data.vin}-{key}" + self.api = data.api self.vehicle = data - + self._attr_unique_id = f"{data.vin}-{key}" self._attr_device_info = data.device - super().__init__(data.coordinator, data.api, key) + super().__init__(data.coordinator, key) @property def _value(self) -> Any | None: @@ -121,39 +130,65 @@ class TeslemetryVehicleEntity(TeslemetryEntity): class TeslemetryEnergyLiveEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Live entities.""" + api: EnergySpecific + def __init__( self, data: TeslemetryEnergyData, key: str, ) -> None: """Initialize common aspects of a Teslemetry Energy Site Live entity.""" + + self.api = data.api self._attr_unique_id = f"{data.id}-{key}" self._attr_device_info = data.device - super().__init__(data.live_coordinator, data.api, key) + super().__init__(data.live_coordinator, key) class TeslemetryEnergyInfoEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Info Entities.""" + api: EnergySpecific + def __init__( self, data: TeslemetryEnergyData, key: str, ) -> None: """Initialize common aspects of a Teslemetry Energy Site Info entity.""" + + self.api = data.api self._attr_unique_id = f"{data.id}-{key}" self._attr_device_info = data.device - super().__init__(data.info_coordinator, data.api, key) + super().__init__(data.info_coordinator, key) -class TeslemetryWallConnectorEntity( - TeslemetryEntity, CoordinatorEntity[TeslemetryEnergySiteLiveCoordinator] -): +class TeslemetryEnergyHistoryEntity(TeslemetryEntity): + """Parent class for Teslemetry Energy History Entities.""" + + def __init__( + self, + data: TeslemetryEnergyData, + key: str, + ) -> None: + """Initialize common aspects of a Teslemetry Energy Site Info entity.""" + + assert data.history_coordinator + + self.api = data.api + self._attr_unique_id = f"{data.id}-{key}" + self._attr_device_info = data.device + + super().__init__(data.history_coordinator, key) + + +class TeslemetryWallConnectorEntity(TeslemetryEntity): """Parent class for Teslemetry Wall Connector Entities.""" _attr_has_entity_name = True + api: EnergySpecific def __init__( self, @@ -162,6 +197,8 @@ class TeslemetryWallConnectorEntity( key: str, ) -> None: """Initialize common aspects of a Teslemetry entity.""" + + self.api = data.api self.din = din self._attr_unique_id = f"{data.id}-{din}-{key}" @@ -182,7 +219,7 @@ class TeslemetryWallConnectorEntity( model=model, ) - super().__init__(data.live_coordinator, data.api, key) + super().__init__(data.live_coordinator, key) @property def _value(self) -> int: @@ -192,3 +229,10 @@ class TeslemetryWallConnectorEntity( .get(self.din, {}) .get(self.key) ) + + @property + def exists(self) -> bool: + """Return True if it exists in the wall connector coordinator data.""" + return self.key in self.coordinator.data.get("wall_connectors", {}).get( + self.din, {} + ) diff --git a/homeassistant/components/teslemetry/helpers.py b/homeassistant/components/teslemetry/helpers.py index a8cfa1051f1..30601feccbc 100644 --- a/homeassistant/components/teslemetry/helpers.py +++ b/homeassistant/components/teslemetry/helpers.py @@ -7,7 +7,20 @@ from tesla_fleet_api.exceptions import TeslaFleetError from homeassistant.exceptions import HomeAssistantError -from .const import LOGGER, TeslemetryState +from .const import DOMAIN, LOGGER, TeslemetryState + + +def flatten(data: dict[str, Any], parent: str | None = None) -> dict[str, Any]: + """Flatten the data structure.""" + result = {} + for key, value in data.items(): + if parent: + key = f"{parent}_{key}" + if isinstance(value, dict): + result.update(flatten(value, key)) + else: + result[key] = value + return result async def wake_up_vehicle(vehicle) -> None: @@ -22,12 +35,19 @@ async def wake_up_vehicle(vehicle) -> None: cmd = await vehicle.api.vehicle() state = cmd["response"]["state"] except TeslaFleetError as e: - raise HomeAssistantError(str(e)) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="wake_up_failed", + translation_placeholders={"message": e.message}, + ) from e vehicle.coordinator.data["state"] = state if state != TeslemetryState.ONLINE: times += 1 if times >= 4: # Give up after 30 seconds total - raise HomeAssistantError("Could not wake up vehicle") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="wake_up_timeout", + ) await asyncio.sleep(times * 5) @@ -36,18 +56,26 @@ async def handle_command(command) -> dict[str, Any]: try: result = await command except TeslaFleetError as e: - raise HomeAssistantError(f"Teslemetry command failed, {e.message}") from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_exception", + translation_placeholders={"message": e.message}, + ) from e LOGGER.debug("Command result: %s", result) return result -async def handle_vehicle_command(command) -> dict[str, Any]: +async def handle_vehicle_command(command) -> Any: """Handle a vehicle command.""" result = await handle_command(command) if (response := result.get("response")) is None: if error := result.get("error"): # No response with error - raise HomeAssistantError(error) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={"error": error}, + ) # No response without error (unexpected) raise HomeAssistantError(f"Unknown response: {response}") if (result := response.get("result")) is not True: @@ -56,8 +84,14 @@ async def handle_vehicle_command(command) -> dict[str, Any]: # Reason is acceptable return result # Result of false with reason - raise HomeAssistantError(reason) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_reason", + translation_placeholders={"reason": reason}, + ) # Result of false without reason (unexpected) - raise HomeAssistantError("Command failed with no reason") + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="command_no_result" + ) # Response with result of true return result diff --git a/homeassistant/components/teslemetry/icons.json b/homeassistant/components/teslemetry/icons.json index aea98e95e0b..6559acf89dc 100644 --- a/homeassistant/components/teslemetry/icons.json +++ b/homeassistant/components/teslemetry/icons.json @@ -7,6 +7,12 @@ "on": "mdi:hvac" } }, + "storm_mode_active": { + "default": "mdi:weather-sunny", + "state": { + "on": "mdi:weather-lightning-rainy" + } + }, "vehicle_state_is_user_present": { "state": { "off": "mdi:account-remove-outline", @@ -129,7 +135,6 @@ "off": "mdi:car-seat" } }, - "components_customer_preferred_export_rule": { "default": "mdi:transmission-tower", "state": { @@ -220,6 +225,69 @@ }, "wall_connector_state": { "default": "mdi:ev-station" + }, + "total_home_usage": { + "default": "mdi:home-lightning-bolt" + }, + "total_battery_charge": { + "default": "mdi:battery-arrow-up" + }, + "total_battery_discharge": { + "default": "mdi:battery-arrow-down" + }, + "total_solar_production": { + "default": "mdi:solar-power-variant" + }, + "grid_energy_imported": { + "default": "mdi:transmission-tower-import" + }, + "total_grid_energy_exported": { + "default": "mdi:transmission-tower-export" + }, + "solar_energy_exported": { + "default": "mdi:solar-power-variant" + }, + "generator_energy_exported": { + "default": "mdi:generator-stationary" + }, + "grid_services_energy_imported": { + "default": "mdi:transmission-tower-import" + }, + "grid_services_energy_exported": { + "default": "mdi:transmission-tower-export" + }, + "grid_energy_exported_from_solar": { + "default": "mdi:solar-power" + }, + "grid_energy_exported_from_generator": { + "default": "mdi:generator-stationary" + }, + "grid_energy_exported_from_battery": { + "default": "mdi:battery-arrow-down" + }, + "battery_energy_exported": { + "default": "mdi:battery-arrow-down" + }, + "battery_energy_imported_from_grid": { + "default": "mdi:transmission-tower-import" + }, + "battery_energy_imported_from_solar": { + "default": "mdi:solar-power" + }, + "battery_energy_imported_from_generator": { + "default": "mdi:generator-stationary" + }, + "consumer_energy_imported_from_grid": { + "default": "mdi:transmission-tower-import" + }, + "consumer_energy_imported_from_solar": { + "default": "mdi:solar-power" + }, + "consumer_energy_imported_from_battery": { + "default": "mdi:home-battery" + }, + "consumer_energy_imported_from_generator": { + "default": "mdi:generator-stationary" } }, "switch": { @@ -259,11 +327,23 @@ } }, "services": { - "navigation_gps_request": "mdi:crosshairs-gps", - "set_scheduled_charging": "mdi:timeline-clock-outline", - "set_scheduled_departure": "mdi:home-clock", - "speed_limit": "mdi:car-speed-limiter", - "valet_mode": "mdi:speedometer-slow", - "time_of_use": "mdi:clock-time-eight-outline" + "navigation_gps_request": { + "service": "mdi:crosshairs-gps" + }, + "set_scheduled_charging": { + "service": "mdi:timeline-clock-outline" + }, + "set_scheduled_departure": { + "service": "mdi:home-clock" + }, + "speed_limit": { + "service": "mdi:car-speed-limiter" + }, + "valet_mode": { + "service": "mdi:speedometer-slow" + }, + "time_of_use": { + "service": "mdi:clock-time-eight-outline" + } } } diff --git a/homeassistant/components/teslemetry/lock.py b/homeassistant/components/teslemetry/lock.py index e23747924f6..4600391145b 100644 --- a/homeassistant/components/teslemetry/lock.py +++ b/homeassistant/components/teslemetry/lock.py @@ -53,7 +53,7 @@ class TeslemetryVehicleLockEntity(TeslemetryVehicleEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the doors.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.door_lock()) self._attr_is_locked = True @@ -61,7 +61,7 @@ class TeslemetryVehicleLockEntity(TeslemetryVehicleEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the doors.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.door_unlock()) self._attr_is_locked = False @@ -82,8 +82,6 @@ class TeslemetryCableLockEntity(TeslemetryVehicleEntity, LockEntity): def _async_update_attrs(self) -> None: """Update entity attributes.""" - if self._value is None: - self._attr_is_locked = None self._attr_is_locked = self._value == ENGAGED async def async_lock(self, **kwargs: Any) -> None: @@ -96,7 +94,7 @@ class TeslemetryCableLockEntity(TeslemetryVehicleEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock charge cable lock.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.charge_port_door_open()) self._attr_is_locked = False diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index 1780d9f0a10..3736d76bf36 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "quality_scale": "platinum", - "requirements": ["tesla-fleet-api==0.7.3"] + "requirements": ["tesla-fleet-api==0.8.5", "teslemetry-stream==0.4.2"] } diff --git a/homeassistant/components/teslemetry/media_player.py b/homeassistant/components/teslemetry/media_player.py index b21ba0f733d..e0e144ffe3a 100644 --- a/homeassistant/components/teslemetry/media_player.py +++ b/homeassistant/components/teslemetry/media_player.py @@ -115,7 +115,7 @@ class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): async def async_set_volume_level(self, volume: float) -> None: """Set volume level, range 0..1.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command( self.api.adjust_volume(int(volume * self._volume_max)) @@ -126,7 +126,7 @@ class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): async def async_media_play(self) -> None: """Send play command.""" if self.state != MediaPlayerState.PLAYING: - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.media_toggle_playback()) self._attr_state = MediaPlayerState.PLAYING @@ -135,7 +135,7 @@ class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): async def async_media_pause(self) -> None: """Send pause command.""" if self.state == MediaPlayerState.PLAYING: - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.media_toggle_playback()) self._attr_state = MediaPlayerState.PAUSED @@ -143,12 +143,12 @@ class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): async def async_media_next_track(self) -> None: """Send next track command.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.media_next_track()) async def async_media_previous_track(self) -> None: """Send previous track command.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.media_prev_track()) diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index d05d713c1eb..d3969b30a7c 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -3,14 +3,17 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from dataclasses import dataclass from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from teslemetry_stream import TeslemetryStream from homeassistant.helpers.device_registry import DeviceInfo from .coordinator import ( + TeslemetryEnergyHistoryCoordinator, TeslemetryEnergySiteInfoCoordinator, TeslemetryEnergySiteLiveCoordinator, TeslemetryVehicleDataCoordinator, @@ -32,9 +35,11 @@ class TeslemetryVehicleData: api: VehicleSpecific coordinator: TeslemetryVehicleDataCoordinator + stream: TeslemetryStream vin: str wakelock = asyncio.Lock() device: DeviceInfo + remove_listener: Callable @dataclass @@ -44,5 +49,6 @@ class TeslemetryEnergyData: api: EnergySpecific live_coordinator: TeslemetryEnergySiteLiveCoordinator info_coordinator: TeslemetryEnergySiteInfoCoordinator + history_coordinator: TeslemetryEnergyHistoryCoordinator | None id: int device: DeviceInfo diff --git a/homeassistant/components/teslemetry/number.py b/homeassistant/components/teslemetry/number.py index 8c14c8e4186..9ba9c28b199 100644 --- a/homeassistant/components/teslemetry/number.py +++ b/homeassistant/components/teslemetry/number.py @@ -164,7 +164,7 @@ class TeslemetryVehicleNumberEntity(TeslemetryVehicleEntity, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Set new value.""" value = int(value) - self.raise_for_scope() + self.raise_for_scope(self.entity_description.scopes[0]) await self.wake_up_if_asleep() await handle_vehicle_command(self.entity_description.func(self.api, value)) self._attr_native_value = value @@ -200,7 +200,7 @@ class TeslemetryEnergyInfoNumberSensorEntity(TeslemetryEnergyInfoEntity, NumberE async def async_set_native_value(self, value: float) -> None: """Set new value.""" value = int(value) - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command(self.entity_description.func(self.api, value)) self._attr_native_value = value self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/select.py b/homeassistant/components/teslemetry/select.py index 7cbdd4e31d2..baf1d80ac6c 100644 --- a/homeassistant/components/teslemetry/select.py +++ b/homeassistant/components/teslemetry/select.py @@ -90,10 +90,12 @@ async def async_setup_entry( ) for description in SEAT_HEATER_DESCRIPTIONS for vehicle in entry.runtime_data.vehicles + if description.key in vehicle.coordinator.data ), ( TeslemetryWheelHeaterSelectEntity(vehicle, entry.runtime_data.scopes) for vehicle in entry.runtime_data.vehicles + if vehicle.coordinator.data.get("climate_state_steering_wheel_heater") ), ( TeslemetryOperationSelectEntity(energysite, entry.runtime_data.scopes) @@ -137,14 +139,14 @@ class TeslemetrySeatHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity): """Handle updated data from the coordinator.""" self._attr_available = self.entity_description.available_fn(self) value = self._value - if value is None: + if not isinstance(value, int): self._attr_current_option = None else: self._attr_current_option = self._attr_options[value] async def async_select_option(self, option: str) -> None: """Change the selected option.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() level = self._attr_options.index(option) # AC must be on to turn on seat heater @@ -182,14 +184,14 @@ class TeslemetryWheelHeaterSelectEntity(TeslemetryVehicleEntity, SelectEntity): """Handle updated data from the coordinator.""" value = self._value - if value is None: + if not isinstance(value, int): self._attr_current_option = None else: self._attr_current_option = self._attr_options[value] async def async_select_option(self, option: str) -> None: """Change the selected option.""" - self.raise_for_scope() + self.raise_for_scope(Scope.VEHICLE_CMDS) await self.wake_up_if_asleep() level = self._attr_options.index(option) # AC must be on to turn on steering wheel heater @@ -226,7 +228,7 @@ class TeslemetryOperationSelectEntity(TeslemetryEnergyInfoEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Change the selected option.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command(self.api.operation(option)) self._attr_current_option = option self.async_write_ha_state() @@ -256,7 +258,7 @@ class TeslemetryExportRuleSelectEntity(TeslemetryEnergyInfoEntity, SelectEntity) async def async_select_option(self, option: str) -> None: """Change the selected option.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command( self.api.grid_import_export(customer_preferred_export_rule=option) ) diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index 90b37cc1dac..95876cc2cf9 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -34,7 +34,9 @@ from homeassistant.util import dt as dt_util from homeassistant.util.variance import ignore_variance from . import TeslemetryConfigEntry +from .const import ENERGY_HISTORY_FIELDS from .entity import ( + TeslemetryEnergyHistoryEntity, TeslemetryEnergyInfoEntity, TeslemetryEnergyLiveEntity, TeslemetryVehicleEntity, @@ -376,21 +378,31 @@ ENERGY_LIVE_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( device_class=SensorDeviceClass.POWER, entity_registry_enabled_default=False, ), - SensorEntityDescription(key="island_status", device_class=SensorDeviceClass.ENUM), + SensorEntityDescription( + key="island_status", + device_class=SensorDeviceClass.ENUM, + options=[ + "on_grid", + "off_grid", + "off_grid_intentional", + "off_grid_unintentional", + "island_status_unknown", + ], + ), ) -WALL_CONNECTOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( - SensorEntityDescription( +WALL_CONNECTOR_DESCRIPTIONS: tuple[TeslemetrySensorEntityDescription, ...] = ( + TeslemetrySensorEntityDescription( key="wall_connector_state", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - SensorEntityDescription( + TeslemetrySensorEntityDescription( key="wall_connector_fault_state", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - SensorEntityDescription( + TeslemetrySensorEntityDescription( key="wall_connector_power", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfPower.WATT, @@ -398,8 +410,9 @@ WALL_CONNECTOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( suggested_display_precision=2, device_class=SensorDeviceClass.POWER, ), - SensorEntityDescription( + TeslemetrySensorEntityDescription( key="vin", + value_fn=lambda vin: vin or "disconnected", ), ) @@ -413,6 +426,21 @@ ENERGY_INFO_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription(key="version"), ) +ENERGY_HISTORY_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = tuple( + SensorEntityDescription( + key=key, + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=2, + state_class=SensorStateClass.TOTAL_INCREASING, + entity_registry_enabled_default=( + key.startswith("total") or key == "grid_energy_imported" + ), + ) + for key in ENERGY_HISTORY_FIELDS +) + async def async_setup_entry( hass: HomeAssistant, @@ -450,6 +478,12 @@ async def async_setup_entry( for description in ENERGY_INFO_DESCRIPTIONS if description.key in energysite.info_coordinator.data ), + ( # Add energy history sensor + TeslemetryEnergyHistorySensorEntity(energysite, description) + for energysite in entry.runtime_data.energysites + for description in ENERGY_HISTORY_DESCRIPTIONS + if energysite.history_coordinator + ), ) ) @@ -525,13 +559,13 @@ class TeslemetryEnergyLiveSensorEntity(TeslemetryEnergyLiveEntity, SensorEntity) class TeslemetryWallConnectorSensorEntity(TeslemetryWallConnectorEntity, SensorEntity): """Base class for Teslemetry energy site metric sensors.""" - entity_description: SensorEntityDescription + entity_description: TeslemetrySensorEntityDescription def __init__( self, data: TeslemetryEnergyData, din: str, - description: SensorEntityDescription, + description: TeslemetrySensorEntityDescription, ) -> None: """Initialize the sensor.""" self.entity_description = description @@ -543,8 +577,8 @@ class TeslemetryWallConnectorSensorEntity(TeslemetryWallConnectorEntity, SensorE def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" - self._attr_available = not self.is_none - self._attr_native_value = self._value + if self.exists: + self._attr_native_value = self.entity_description.value_fn(self._value) class TeslemetryEnergyInfoSensorEntity(TeslemetryEnergyInfoEntity, SensorEntity): @@ -565,3 +599,22 @@ class TeslemetryEnergyInfoSensorEntity(TeslemetryEnergyInfoEntity, SensorEntity) """Update the attributes of the sensor.""" self._attr_available = not self.is_none self._attr_native_value = self._value + + +class TeslemetryEnergyHistorySensorEntity(TeslemetryEnergyHistoryEntity, SensorEntity): + """Base class for Tesla Fleet energy site metric sensors.""" + + entity_description: SensorEntityDescription + + def __init__( + self, + data: TeslemetryEnergyData, + description: SensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + self.entity_description = description + super().__init__(data, description.key) + + def _async_update_attrs(self) -> None: + """Update the attributes of the sensor.""" + self._attr_native_value = self._value diff --git a/homeassistant/components/teslemetry/strings.json b/homeassistant/components/teslemetry/strings.json index 48eb4aae8bc..4f4bc2ae60c 100644 --- a/homeassistant/components/teslemetry/strings.json +++ b/homeassistant/components/teslemetry/strings.json @@ -1,7 +1,9 @@ { "config": { "abort": { - "already_configured": "Account is already configured" + "already_configured": "Account is already configured", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reauth_account_mismatch": "The reauthentication account does not match the original account" }, "error": { "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]", @@ -15,6 +17,13 @@ "access_token": "[%key:common::config_flow::data::access_token%]" }, "description": "Enter an access token from {url}." + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The {name} integration needs to re-authenticate your account, please enter an access token from {url}", + "data": { + "access_token": "[%key:common::config_flow::data::access_token%]" + } } } }, @@ -56,6 +65,9 @@ "state": { "name": "Status" }, + "storm_mode_active": { + "name": "Storm watch active" + }, "vehicle_state_dashcam_state": { "name": "Dashcam" }, @@ -389,6 +401,16 @@ "grid_services_power": { "name": "Grid services power" }, + "island_status": { + "name": "Island status", + "state": { + "island_status_unknown": "Unknown", + "on_grid": "On grid", + "off_grid": "Off grid", + "off_grid_intentional": "Off grid intentional", + "off_grid_unintentional": "Off grid unintentional" + } + }, "load_power": { "name": "Load power" }, @@ -420,7 +442,10 @@ "name": "version" }, "vin": { - "name": "Vehicle" + "name": "Vehicle", + "state": { + "disconnected": "Disconnected" + } }, "vpp_backup_reserve_percent": { "name": "VPP backup reserve" @@ -433,6 +458,69 @@ }, "wall_connector_state": { "name": "State code" + }, + "solar_energy_exported": { + "name": "Solar exported" + }, + "generator_energy_exported": { + "name": "Generator exported" + }, + "grid_energy_imported": { + "name": "Grid imported" + }, + "grid_services_energy_imported": { + "name": "Grid services imported" + }, + "grid_services_energy_exported": { + "name": "Grid services exported" + }, + "grid_energy_exported_from_solar": { + "name": "Grid exported from solar" + }, + "grid_energy_exported_from_generator": { + "name": "Grid exported from generator" + }, + "grid_energy_exported_from_battery": { + "name": "Grid exported from battery" + }, + "battery_energy_exported": { + "name": "Battery exported" + }, + "battery_energy_imported_from_grid": { + "name": "Battery imported from grid" + }, + "battery_energy_imported_from_solar": { + "name": "Battery imported from solar" + }, + "battery_energy_imported_from_generator": { + "name": "Battery imported from generator" + }, + "consumer_energy_imported_from_grid": { + "name": "Consumer imported from grid" + }, + "consumer_energy_imported_from_solar": { + "name": "Consumer imported from solar" + }, + "consumer_energy_imported_from_battery": { + "name": "Consumer imported from battery" + }, + "consumer_energy_imported_from_generator": { + "name": "Consumer imported from generator" + }, + "total_home_usage": { + "name": "Home usage" + }, + "total_battery_charge": { + "name": "Battery charged" + }, + "total_battery_discharge": { + "name": "Battery discharged" + }, + "total_solar_generation": { + "name": "Solar generated" + }, + "total_grid_energy_exported": { + "name": "Grid exported" } }, "switch": { @@ -498,8 +586,26 @@ "no_energy_site_data_for_device": { "message": "No energy site data for device ID: {device_id}" }, + "command_exception": { + "message": "Command returned exception: {message}" + }, "command_error": { "message": "Command returned error: {error}" + }, + "command_reason": { + "message": "Command was rejected: {reason}" + }, + "command_no_result": { + "message": "Command had no result" + }, + "wake_up_failed": { + "message": "Failed to wake up vehicle: {message}" + }, + "wake_up_timeout": { + "message": "Timed out trying to wake up vehicle" + }, + "missing_scope": { + "message": "Missing required scope: {scope}" } }, "services": { diff --git a/homeassistant/components/teslemetry/switch.py b/homeassistant/components/teslemetry/switch.py index 3204d73410f..6a1cff4c5da 100644 --- a/homeassistant/components/teslemetry/switch.py +++ b/homeassistant/components/teslemetry/switch.py @@ -102,6 +102,7 @@ async def async_setup_entry( ) for vehicle in entry.runtime_data.vehicles for description in VEHICLE_DESCRIPTIONS + if description.key in vehicle.coordinator.data ), ( TeslemetryChargeSwitchEntity( @@ -150,14 +151,11 @@ class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEnt def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" - if self._value is None: - self._attr_is_on = None - else: - self._attr_is_on = bool(self._value) + self._attr_is_on = bool(self._value) async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the Switch.""" - self.raise_for_scope() + self.raise_for_scope(self.entity_description.scopes[0]) await self.wake_up_if_asleep() await handle_vehicle_command(self.entity_description.on_func(self.api)) self._attr_is_on = True @@ -165,7 +163,7 @@ class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEnt async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the Switch.""" - self.raise_for_scope() + self.raise_for_scope(self.entity_description.scopes[0]) await self.wake_up_if_asleep() await handle_vehicle_command(self.entity_description.off_func(self.api)) self._attr_is_on = False @@ -207,7 +205,7 @@ class TeslemetryChargeFromGridSwitchEntity( async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the Switch.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command( self.api.grid_import_export( disallow_charge_from_grid_with_solar_installed=False @@ -218,7 +216,7 @@ class TeslemetryChargeFromGridSwitchEntity( async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the Switch.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command( self.api.grid_import_export( disallow_charge_from_grid_with_solar_installed=True @@ -249,14 +247,14 @@ class TeslemetryStormModeSwitchEntity( async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the Switch.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command(self.api.storm_mode(enabled=True)) self._attr_is_on = True self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the Switch.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await handle_command(self.api.storm_mode(enabled=False)) self._attr_is_on = False self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/update.py b/homeassistant/components/teslemetry/update.py index de508fa58d4..670cd0e0eda 100644 --- a/homeassistant/components/teslemetry/update.py +++ b/homeassistant/components/teslemetry/update.py @@ -92,19 +92,20 @@ class TeslemetryUpdateEntity(TeslemetryVehicleEntity, UpdateEntity): SCHEDULED, INSTALLING, ): - self._attr_in_progress = ( - cast(int, self.get("vehicle_state_software_update_install_perc")) - or True - ) + self._attr_in_progress = True + if install_perc := self.get("vehicle_state_software_update_install_perc"): + self._attr_update_percentage = cast(int, install_perc) else: self._attr_in_progress = False + self._attr_update_percentage = None async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: """Install an update.""" - self.raise_for_scope() + self.raise_for_scope(Scope.ENERGY_CMDS) await self.wake_up_if_asleep() await handle_vehicle_command(self.api.schedule_software_update(offset_sec=60)) self._attr_in_progress = True + self._attr_update_percentage = None self.async_write_ha_state() diff --git a/homeassistant/components/tessie/binary_sensor.py b/homeassistant/components/tessie/binary_sensor.py index f425cd10134..fd6565b62b7 100644 --- a/homeassistant/components/tessie/binary_sensor.py +++ b/homeassistant/components/tessie/binary_sensor.py @@ -163,6 +163,7 @@ VEHICLE_DESCRIPTIONS: tuple[TessieBinarySensorEntityDescription, ...] = ( ENERGY_LIVE_DESCRIPTIONS: tuple[BinarySensorEntityDescription, ...] = ( BinarySensorEntityDescription(key="backup_capable"), BinarySensorEntityDescription(key="grid_services_active"), + BinarySensorEntityDescription(key="storm_mode_active"), ) diff --git a/homeassistant/components/tessie/climate.py b/homeassistant/components/tessie/climate.py index e0649432e05..1d26926aeaa 100644 --- a/homeassistant/components/tessie/climate.py +++ b/homeassistant/components/tessie/climate.py @@ -60,7 +60,6 @@ class TessieClimateEntity(TessieEntity, ClimateEntity): TessieClimateKeeper.DOG, TessieClimateKeeper.CAMP, ] - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tessie/config_flow.py b/homeassistant/components/tessie/config_flow.py index 1cbc070e463..14c6b93fdfd 100644 --- a/homeassistant/components/tessie/config_flow.py +++ b/homeassistant/components/tessie/config_flow.py @@ -14,12 +14,12 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import TessieConfigEntry from .const import DOMAIN TESSIE_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}) DESCRIPTION_PLACEHOLDERS = { - "url": "[my.tessie.com/settings/api](https://my.tessie.com/settings/api)" + "name": "Tessie", + "url": "[my.tessie.com/settings/api](https://my.tessie.com/settings/api)", } @@ -28,10 +28,6 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: TessieConfigEntry | None = None - async def async_step_user( self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: @@ -66,12 +62,9 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -79,7 +72,7 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Get update API Key from the user.""" errors: dict[str, str] = {} - assert self._reauth_entry + if user_input: try: await get_state_of_all_vehicles( @@ -95,7 +88,7 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( - self._reauth_entry, data=user_input + self._get_reauth_entry(), data=user_input ) return self.async_show_form( diff --git a/homeassistant/components/tessie/const.py b/homeassistant/components/tessie/const.py index 90862eff969..4731f5168a2 100644 --- a/homeassistant/components/tessie/const.py +++ b/homeassistant/components/tessie/const.py @@ -13,6 +13,16 @@ MODELS = { "models": "Model S", } +TRANSLATED_ERRORS = { + "unknown": "unknown", + "not supported": "not_supported", + "cable connected": "cable_connected", + "already active": "already_active", + "already inactive": "already_inactive", + "incorrect pin": "incorrect_pin", + "no cable": "no_cable", +} + class TessieState(StrEnum): """Tessie status.""" diff --git a/homeassistant/components/tessie/device_tracker.py b/homeassistant/components/tessie/device_tracker.py index d90222bf821..df74cd2a7a7 100644 --- a/homeassistant/components/tessie/device_tracker.py +++ b/homeassistant/components/tessie/device_tracker.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.components.device_tracker import SourceType from homeassistant.components.device_tracker.config_entry import TrackerEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -43,11 +42,6 @@ class TessieDeviceTrackerEntity(TessieEntity, TrackerEntity): """Initialize the device tracker.""" super().__init__(vehicle, self.key) - @property - def source_type(self) -> SourceType | str: - """Return the source type of the device tracker.""" - return SourceType.GPS - class TessieDeviceTrackerLocationEntity(TessieDeviceTrackerEntity): """Vehicle Location Device Tracker Class.""" diff --git a/homeassistant/components/tessie/entity.py b/homeassistant/components/tessie/entity.py index 42a3c92b2be..a2b6d3c9761 100644 --- a/homeassistant/components/tessie/entity.py +++ b/homeassistant/components/tessie/entity.py @@ -10,7 +10,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from .const import DOMAIN, TRANSLATED_ERRORS from .coordinator import ( TessieEnergySiteInfoCoordinator, TessieEnergySiteLiveCoordinator, @@ -107,10 +107,11 @@ class TessieEntity(TessieBaseEntity): if response["result"] is False: name: str = getattr(self, "name", self.entity_id) reason: str = response.get("reason", "unknown") + translation_key = TRANSLATED_ERRORS.get(reason, "command_failed") raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=reason.replace(" ", "_"), - translation_placeholders={"name": name}, + translation_key=translation_key, + translation_placeholders={"name": name, "message": reason}, ) def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/tessie/icons.json b/homeassistant/components/tessie/icons.json index a967c70e285..0ae087f98e2 100644 --- a/homeassistant/components/tessie/icons.json +++ b/homeassistant/components/tessie/icons.json @@ -22,6 +22,12 @@ "climate_state_auto_steering_wheel_heat": { "default": "mdi:steering" }, + "storm_mode_active": { + "default": "mdi:weather-sunny", + "state": { + "on": "mdi:weather-lightning-rainy" + } + }, "grid_services_power": { "default": "mdi:transmission-tower" }, diff --git a/homeassistant/components/tessie/lock.py b/homeassistant/components/tessie/lock.py index 4f6ce3800e3..76d58a9070c 100644 --- a/homeassistant/components/tessie/lock.py +++ b/homeassistant/components/tessie/lock.py @@ -4,21 +4,11 @@ from __future__ import annotations from typing import Any -from tessie_api import ( - disable_speed_limit, - enable_speed_limit, - lock, - open_unlock_charge_port, - unlock, -) +from tessie_api import lock, open_unlock_charge_port, unlock -from homeassistant.components.automation import automations_with_entity -from homeassistant.components.lock import ATTR_CODE, LockEntity -from homeassistant.components.script import scripts_with_entity -from homeassistant.const import Platform +from homeassistant.components.lock import LockEntity from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TessieConfigEntry @@ -37,46 +27,11 @@ async def async_setup_entry( """Set up the Tessie sensor platform from a config entry.""" data = entry.runtime_data - entities: list[TessieEntity] = [ + async_add_entities( klass(vehicle) for klass in (TessieLockEntity, TessieCableLockEntity) for vehicle in data.vehicles - ] - - ent_reg = er.async_get(hass) - - for vehicle in data.vehicles: - entity_id = ent_reg.async_get_entity_id( - Platform.LOCK, - DOMAIN, - f"{vehicle.vin}-vehicle_state_speed_limit_mode_active", - ) - if entity_id: - entity_entry = ent_reg.async_get(entity_id) - assert entity_entry - if entity_entry.disabled: - ent_reg.async_remove(entity_id) - else: - entities.append(TessieSpeedLimitEntity(vehicle)) - - entity_automations = automations_with_entity(hass, entity_id) - entity_scripts = scripts_with_entity(hass, entity_id) - for item in entity_automations + entity_scripts: - ir.async_create_issue( - hass, - DOMAIN, - f"deprecated_speed_limit_{entity_id}_{item}", - breaks_in_ha_version="2024.11.0", - is_fixable=True, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_speed_limit_entity", - translation_placeholders={ - "entity": entity_id, - "info": item, - }, - ) - async_add_entities(entities) + ) class TessieLockEntity(TessieEntity, LockEntity): @@ -105,58 +60,6 @@ class TessieLockEntity(TessieEntity, LockEntity): self.set((self.key, False)) -class TessieSpeedLimitEntity(TessieEntity, LockEntity): - """Speed Limit with PIN entity for Tessie.""" - - _attr_code_format = r"^\d\d\d\d$" - - def __init__( - self, - vehicle: TessieVehicleData, - ) -> None: - """Initialize the sensor.""" - super().__init__(vehicle, "vehicle_state_speed_limit_mode_active") - - @property - def is_locked(self) -> bool | None: - """Return the state of the Lock.""" - return self._value - - async def async_lock(self, **kwargs: Any) -> None: - """Enable speed limit with pin.""" - ir.async_create_issue( - self.coordinator.hass, - DOMAIN, - "deprecated_speed_limit_locked", - breaks_in_ha_version="2024.11.0", - is_fixable=True, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_speed_limit_locked", - ) - code: str | None = kwargs.get(ATTR_CODE) - if code: - await self.run(enable_speed_limit, pin=code) - self.set((self.key, True)) - - async def async_unlock(self, **kwargs: Any) -> None: - """Disable speed limit with pin.""" - ir.async_create_issue( - self.coordinator.hass, - DOMAIN, - "deprecated_speed_limit_unlocked", - breaks_in_ha_version="2024.11.0", - is_fixable=True, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_speed_limit_unlocked", - ) - code: str | None = kwargs.get(ATTR_CODE) - if code: - await self.run(disable_speed_limit, pin=code) - self.set((self.key, False)) - - class TessieCableLockEntity(TessieEntity, LockEntity): """Cable Lock entity for Tessie.""" diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index c921921a0ca..2b8ae924fe3 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "quality_scale": "platinum", - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.7.3"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.5"] } diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index df488523900..5b677594b42 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -1,7 +1,8 @@ { "config": { "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]", @@ -62,9 +63,6 @@ }, "charge_state_charge_port_latch": { "name": "Charge cable lock" - }, - "vehicle_state_speed_limit_mode_active": { - "name": "Speed limit" } }, "media_player": { @@ -391,6 +389,9 @@ "components_grid_services_enabled": { "name": "Grid services enabled" }, + "storm_mode_active": { + "name": "Storm watch active" + }, "grid_services_active": { "name": "Grid services active" }, @@ -528,40 +529,5 @@ "command_failed": { "message": "Command failed, {message}" } - }, - "issues": { - "deprecated_speed_limit_entity": { - "title": "Detected Tessie speed limit lock entity usage", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::tessie::issues::deprecated_speed_limit_entity::title%]", - "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\nHome Assistant detected that entity `{entity}` is being used in `{info}`\n\nYou should remove the speed limit lock entity from `{info}` then click submit to fix this issue." - } - } - } - }, - "deprecated_speed_limit_locked": { - "title": "Detected Tessie speed limit lock entity locked", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::tessie::issues::deprecated_speed_limit_locked::title%]", - "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then click submit to fix this issue." - } - } - } - }, - "deprecated_speed_limit_unlocked": { - "title": "Detected Tessie speed limit lock entity unlocked", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::tessie::issues::deprecated_speed_limit_unlocked::title%]", - "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then click submit to fix this issue." - } - } - } - } } } diff --git a/homeassistant/components/tessie/update.py b/homeassistant/components/tessie/update.py index 959a713047f..f6198fa6c03 100644 --- a/homeassistant/components/tessie/update.py +++ b/homeassistant/components/tessie/update.py @@ -71,14 +71,22 @@ class TessieUpdateEntity(TessieEntity, UpdateEntity): return self.installed_version @property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: + """Update installation progress.""" + return ( + self.get("vehicle_state_software_update_status") + == TessieUpdateStatus.INSTALLING + ) + + @property + def update_percentage(self) -> int | None: """Update installation progress.""" if ( self.get("vehicle_state_software_update_status") == TessieUpdateStatus.INSTALLING ): return self.get("vehicle_state_software_update_install_perc") - return False + return None async def async_install( self, version: str | None, backup: bool, **kwargs: Any diff --git a/homeassistant/components/text/__init__.py b/homeassistant/components/text/__init__.py index 33589be8f41..d0f5ac7d3b7 100644 --- a/homeassistant/components/text/__init__.py +++ b/homeassistant/components/text/__init__.py @@ -5,11 +5,11 @@ from __future__ import annotations from dataclasses import asdict, dataclass from datetime import timedelta from enum import StrEnum -from functools import cached_property import logging import re from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -20,6 +20,7 @@ from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import ( ATTR_MAX, @@ -33,6 +34,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[TextEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -46,7 +48,7 @@ __all__ = ["DOMAIN", "TextEntity", "TextEntityDescription", "TextMode"] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Text entities.""" - component = hass.data[DOMAIN] = EntityComponent[TextEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[TextEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -81,14 +83,12 @@ async def _async_set_value(entity: TextEntity, service_call: ServiceCall) -> Non async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[TextEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[TextEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class TextMode(StrEnum): diff --git a/homeassistant/components/text/icons.json b/homeassistant/components/text/icons.json index 355c439ec33..9448c9a7325 100644 --- a/homeassistant/components/text/icons.json +++ b/homeassistant/components/text/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:form-textbox" + "set_value": { + "service": "mdi:form-textbox" + } } } diff --git a/homeassistant/components/tfiac/climate.py b/homeassistant/components/tfiac/climate.py index 81517a6f1f5..e3aa9060787 100644 --- a/homeassistant/components/tfiac/climate.py +++ b/homeassistant/components/tfiac/climate.py @@ -88,7 +88,6 @@ class TfiacClimate(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hass, client): """Init class.""" diff --git a/homeassistant/components/tfiac/manifest.json b/homeassistant/components/tfiac/manifest.json index 4cac4807ea4..94f82c99d21 100644 --- a/homeassistant/components/tfiac/manifest.json +++ b/homeassistant/components/tfiac/manifest.json @@ -2,7 +2,9 @@ "domain": "tfiac", "name": "Tfiac", "codeowners": ["@fredrike", "@mellado"], + "disabled": "This integration is disabled because we cannot build a valid wheel.", "documentation": "https://www.home-assistant.io/integrations/tfiac", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pytfiac==0.4"] } diff --git a/homeassistant/components/thermoworks_smoke/manifest.json b/homeassistant/components/thermoworks_smoke/manifest.json index 43ce96dd012..f67b041b1e5 100644 --- a/homeassistant/components/thermoworks_smoke/manifest.json +++ b/homeassistant/components/thermoworks_smoke/manifest.json @@ -2,8 +2,10 @@ "domain": "thermoworks_smoke", "name": "ThermoWorks Smoke", "codeowners": [], + "disabled": "This integration is disabled because it creates an unresolvable dependency conflict.", "documentation": "https://www.home-assistant.io/integrations/thermoworks_smoke", "iot_class": "cloud_polling", "loggers": ["thermoworks_smoke"], + "quality_scale": "legacy", "requirements": ["stringcase==1.2.0", "thermoworks-smoke==0.1.8"] } diff --git a/homeassistant/components/thethingsnetwork/__init__.py b/homeassistant/components/thethingsnetwork/__init__.py index 253ce7a052e..d3c6c8356cb 100644 --- a/homeassistant/components/thethingsnetwork/__init__.py +++ b/homeassistant/components/thethingsnetwork/__init__.py @@ -2,55 +2,15 @@ import logging -import voluptuous as vol - from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from .const import CONF_APP_ID, DOMAIN, PLATFORMS, TTN_API_HOST +from .const import DOMAIN, PLATFORMS, TTN_API_HOST from .coordinator import TTNCoordinator _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - { - # Configuration via yaml not longer supported - keeping to warn about migration - DOMAIN: vol.Schema( - { - vol.Required(CONF_APP_ID): cv.string, - vol.Required("access_key"): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Initialize of The Things Network component.""" - - if DOMAIN in config: - ir.async_create_issue( - hass, - DOMAIN, - "manual_migration", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - severity=ir.IssueSeverity.ERROR, - translation_key="manual_migration", - translation_placeholders={ - "domain": DOMAIN, - "v2_v3_migration_url": "https://www.thethingsnetwork.org/forum/c/v2-to-v3-upgrade/102", - "v2_deprecation_url": "https://www.thethingsnetwork.org/forum/t/the-things-network-v2-is-permanently-shutting-down-completed/50710", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with The Things Network.""" diff --git a/homeassistant/components/thethingsnetwork/config_flow.py b/homeassistant/components/thethingsnetwork/config_flow.py index cbb780e7064..412c5da4ef9 100644 --- a/homeassistant/components/thethingsnetwork/config_flow.py +++ b/homeassistant/components/thethingsnetwork/config_flow.py @@ -7,7 +7,7 @@ from typing import Any from ttn_client import TTNAuthError, TTNClient import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.helpers.selector import ( TextSelector, @@ -25,8 +25,6 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: @@ -51,11 +49,9 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): if not errors: # Create entry - if self._reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self._reauth_entry, - data=user_input, - reason="reauth_successful", + self._get_reauth_entry(), data=user_input ) await self.async_set_unique_id(user_input[CONF_APP_ID]) self._abort_if_unique_id_configured() @@ -67,8 +63,8 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): # Show form for user to provide settings if not user_input: - if self._reauth_entry: - user_input = self._reauth_entry.data + if self.source == SOURCE_REAUTH: + user_input = self._get_reauth_entry().data else: user_input = {CONF_HOST: TTN_API_HOST} @@ -89,14 +85,9 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" - - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/thethingsnetwork/sensor.py b/homeassistant/components/thethingsnetwork/sensor.py index 82dd169a52d..25dd2f1e1eb 100644 --- a/homeassistant/components/thethingsnetwork/sensor.py +++ b/homeassistant/components/thethingsnetwork/sensor.py @@ -4,10 +4,11 @@ import logging from ttn_client import TTNSensorValue -from homeassistant.components.sensor import SensorEntity, StateType +from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from .const import CONF_APP_ID, DOMAIN from .entity import TTNEntity diff --git a/homeassistant/components/thethingsnetwork/strings.json b/homeassistant/components/thethingsnetwork/strings.json index 98572cb318c..f5a4fcef8fd 100644 --- a/homeassistant/components/thethingsnetwork/strings.json +++ b/homeassistant/components/thethingsnetwork/strings.json @@ -22,11 +22,5 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" } - }, - "issues": { - "manual_migration": { - "description": "Configuring {domain} using YAML was removed as part of migrating to [The Things Network v3]({v2_v3_migration_url}). [The Things Network v2 has shutted down]({v2_deprecation_url}).\n\nPlease remove the {domain} entry from the configuration.yaml and add re-add the integration using the config_flow", - "title": "The {domain} YAML configuration is not supported" - } } } diff --git a/homeassistant/components/thingspeak/manifest.json b/homeassistant/components/thingspeak/manifest.json index ffdc11d9214..aac0ca06426 100644 --- a/homeassistant/components/thingspeak/manifest.json +++ b/homeassistant/components/thingspeak/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/thingspeak", "iot_class": "cloud_push", "loggers": ["thingspeak"], + "quality_scale": "legacy", "requirements": ["thingspeak==1.0.0"] } diff --git a/homeassistant/components/thinkingcleaner/manifest.json b/homeassistant/components/thinkingcleaner/manifest.json index f480340fcf8..048fcfffa05 100644 --- a/homeassistant/components/thinkingcleaner/manifest.json +++ b/homeassistant/components/thinkingcleaner/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/thinkingcleaner", "iot_class": "local_polling", "loggers": ["pythinkingcleaner"], + "quality_scale": "legacy", "requirements": ["pythinkingcleaner==0.0.3"] } diff --git a/homeassistant/components/thomson/device_tracker.py b/homeassistant/components/thomson/device_tracker.py index 339b12f0dc9..abf3e604472 100644 --- a/homeassistant/components/thomson/device_tracker.py +++ b/homeassistant/components/thomson/device_tracker.py @@ -9,7 +9,7 @@ import telnetlib # pylint: disable=deprecated-module import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -41,7 +41,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> ThomsonDeviceScanner | None: """Validate the configuration and return a THOMSON scanner.""" - scanner = ThomsonDeviceScanner(config[DOMAIN]) + scanner = ThomsonDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -82,7 +82,7 @@ class ThomsonDeviceScanner(DeviceScanner): if not self.success_init: return False - _LOGGER.info("Checking ARP") + _LOGGER.debug("Checking ARP") if not (data := self.get_thomson_data()): return False diff --git a/homeassistant/components/thomson/manifest.json b/homeassistant/components/thomson/manifest.json index 08961cb2746..7f49b57d724 100644 --- a/homeassistant/components/thomson/manifest.json +++ b/homeassistant/components/thomson/manifest.json @@ -3,5 +3,6 @@ "name": "Thomson", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/thomson", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/thread/config_flow.py b/homeassistant/components/thread/config_flow.py index b4b6eac0fc8..568b76d4999 100644 --- a/homeassistant/components/thread/config_flow.py +++ b/homeassistant/components/thread/config_flow.py @@ -15,9 +15,7 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, import_data: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Set up by import from async_setup.""" await self._async_handle_discovery_without_unique_id() return self.async_create_entry(title="Thread", data={}) diff --git a/homeassistant/components/thread/dataset_store.py b/homeassistant/components/thread/dataset_store.py index b880be801a4..fc95e524181 100644 --- a/homeassistant/components/thread/dataset_store.py +++ b/homeassistant/components/thread/dataset_store.py @@ -5,10 +5,10 @@ from __future__ import annotations from asyncio import Event, Task, wait import dataclasses from datetime import datetime -from functools import cached_property import logging from typing import Any, cast +from propcache import cached_property from python_otbr_api import tlv_parser from python_otbr_api.tlv_parser import MeshcopTLVType diff --git a/homeassistant/components/thread/discovery.py b/homeassistant/components/thread/discovery.py index 4f0df6b1533..d4e47c31dd2 100644 --- a/homeassistant/components/thread/discovery.py +++ b/homeassistant/components/thread/discovery.py @@ -8,7 +8,13 @@ import logging from typing import cast from python_otbr_api.mdns import StateBitmap -from zeroconf import BadTypeInNameException, DNSPointer, ServiceListener, Zeroconf +from zeroconf import ( + BadTypeInNameException, + DNSPointer, + ServiceListener, + Zeroconf, + instance_name_from_service_info, +) from zeroconf.asyncio import AsyncServiceInfo, AsyncZeroconf from homeassistant.components import zeroconf @@ -37,6 +43,7 @@ TYPE_PTR = 12 class ThreadRouterDiscoveryData: """Thread router discovery data.""" + instance_name: str addresses: list[str] border_agent_id: str | None brand: str | None @@ -89,6 +96,7 @@ def async_discovery_data_from_service( unconfigured = True return ThreadRouterDiscoveryData( + instance_name=instance_name_from_service_info(service), addresses=service.parsed_addresses(), border_agent_id=border_agent_id.hex() if border_agent_id is not None else None, brand=brand, diff --git a/homeassistant/components/threshold/binary_sensor.py b/homeassistant/components/threshold/binary_sensor.py index a791658f049..3d52d2225be 100644 --- a/homeassistant/components/threshold/binary_sensor.py +++ b/homeassistant/components/threshold/binary_sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable, Mapping import logging -from typing import Any +from typing import Any, Final import voluptuous as vol @@ -37,38 +37,53 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import CONF_HYSTERESIS, CONF_LOWER, CONF_UPPER +from .const import ( + ATTR_HYSTERESIS, + ATTR_LOWER, + ATTR_POSITION, + ATTR_SENSOR_VALUE, + ATTR_TYPE, + ATTR_UPPER, + CONF_HYSTERESIS, + CONF_LOWER, + CONF_UPPER, + DEFAULT_HYSTERESIS, + POSITION_ABOVE, + POSITION_BELOW, + POSITION_IN_RANGE, + POSITION_UNKNOWN, + TYPE_LOWER, + TYPE_RANGE, + TYPE_UPPER, +) _LOGGER = logging.getLogger(__name__) -ATTR_HYSTERESIS = "hysteresis" -ATTR_LOWER = "lower" -ATTR_POSITION = "position" -ATTR_SENSOR_VALUE = "sensor_value" -ATTR_TYPE = "type" -ATTR_UPPER = "upper" +DEFAULT_NAME: Final = "Threshold" -DEFAULT_NAME = "Threshold" -DEFAULT_HYSTERESIS = 0.0 -POSITION_ABOVE = "above" -POSITION_BELOW = "below" -POSITION_IN_RANGE = "in_range" -POSITION_UNKNOWN = "unknown" +def no_missing_threshold(value: dict) -> dict: + """Validate data point list is greater than polynomial degrees.""" + if value.get(CONF_LOWER) is None and value.get(CONF_UPPER) is None: + raise vol.Invalid("Lower or Upper thresholds are not provided") -TYPE_LOWER = "lower" -TYPE_RANGE = "range" -TYPE_UPPER = "upper" + return value -PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ENTITY_ID): cv.entity_id, - vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, - vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce(float), - vol.Optional(CONF_LOWER): vol.Coerce(float), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_UPPER): vol.Coerce(float), - } + +PLATFORM_SCHEMA = vol.All( + BINARY_SENSOR_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ENTITY_ID): cv.entity_id, + vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, + vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce( + float + ), + vol.Optional(CONF_LOWER): vol.Coerce(float), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UPPER): vol.Coerce(float), + } + ), + no_missing_threshold, ) @@ -125,9 +140,6 @@ async def async_setup_platform( hysteresis: float = config[CONF_HYSTERESIS] device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) - if lower is None and upper is None: - raise ValueError("Lower or Upper thresholds not provided") - async_add_entities( [ ThresholdSensor( @@ -150,6 +162,9 @@ class ThresholdSensor(BinarySensorEntity): """Representation of a Threshold sensor.""" _attr_should_poll = False + _unrecorded_attributes = frozenset( + {ATTR_ENTITY_ID, ATTR_HYSTERESIS, ATTR_LOWER, ATTR_TYPE, ATTR_UPPER} + ) def __init__( self, @@ -176,7 +191,6 @@ class ThresholdSensor(BinarySensorEntity): self._hysteresis: float = hysteresis self._attr_device_class = device_class self._state_position = POSITION_UNKNOWN - self._state: bool | None = None self.sensor_value: float | None = None async def async_added_to_hass(self) -> None: @@ -228,11 +242,6 @@ class ThresholdSensor(BinarySensorEntity): ) _update_sensor_state() - @property - def is_on(self) -> bool | None: - """Return true if sensor is on.""" - return self._state - @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes of the sensor.""" @@ -260,53 +269,53 @@ class ThresholdSensor(BinarySensorEntity): if self.sensor_value is None: self._state_position = POSITION_UNKNOWN - self._state = None + self._attr_is_on = None return if self.threshold_type == TYPE_LOWER: - if self._state is None: - self._state = False + if self._attr_is_on is None: + self._attr_is_on = False self._state_position = POSITION_ABOVE if below(self.sensor_value, self._threshold_lower): self._state_position = POSITION_BELOW - self._state = True + self._attr_is_on = True elif above(self.sensor_value, self._threshold_lower): self._state_position = POSITION_ABOVE - self._state = False + self._attr_is_on = False return if self.threshold_type == TYPE_UPPER: assert self._threshold_upper is not None - if self._state is None: - self._state = False + if self._attr_is_on is None: + self._attr_is_on = False self._state_position = POSITION_BELOW if above(self.sensor_value, self._threshold_upper): self._state_position = POSITION_ABOVE - self._state = True + self._attr_is_on = True elif below(self.sensor_value, self._threshold_upper): self._state_position = POSITION_BELOW - self._state = False + self._attr_is_on = False return if self.threshold_type == TYPE_RANGE: - if self._state is None: - self._state = True + if self._attr_is_on is None: + self._attr_is_on = True self._state_position = POSITION_IN_RANGE if below(self.sensor_value, self._threshold_lower): self._state_position = POSITION_BELOW - self._state = False + self._attr_is_on = False if above(self.sensor_value, self._threshold_upper): self._state_position = POSITION_ABOVE - self._state = False + self._attr_is_on = False elif above(self.sensor_value, self._threshold_lower) and below( self.sensor_value, self._threshold_upper ): self._state_position = POSITION_IN_RANGE - self._state = True + self._attr_is_on = True return @callback diff --git a/homeassistant/components/threshold/const.py b/homeassistant/components/threshold/const.py index 2cb9dc88f0f..7dd44a950ed 100644 --- a/homeassistant/components/threshold/const.py +++ b/homeassistant/components/threshold/const.py @@ -1,9 +1,27 @@ """Constants for the Threshold integration.""" -DOMAIN = "threshold" +from typing import Final -CONF_HYSTERESIS = "hysteresis" -CONF_LOWER = "lower" -CONF_UPPER = "upper" +DOMAIN: Final = "threshold" -DEFAULT_HYSTERESIS = 0.0 +DEFAULT_HYSTERESIS: Final = 0.0 + +ATTR_HYSTERESIS: Final = "hysteresis" +ATTR_LOWER: Final = "lower" +ATTR_POSITION: Final = "position" +ATTR_SENSOR_VALUE: Final = "sensor_value" +ATTR_TYPE: Final = "type" +ATTR_UPPER: Final = "upper" + +CONF_HYSTERESIS: Final = "hysteresis" +CONF_LOWER: Final = "lower" +CONF_UPPER: Final = "upper" + +POSITION_ABOVE: Final = "above" +POSITION_BELOW: Final = "below" +POSITION_IN_RANGE: Final = "in_range" +POSITION_UNKNOWN: Final = "unknown" + +TYPE_LOWER: Final = "lower" +TYPE_RANGE: Final = "range" +TYPE_UPPER: Final = "upper" diff --git a/homeassistant/components/threshold/strings.json b/homeassistant/components/threshold/strings.json index fc9ee8fb7bf..94a1932cbbc 100644 --- a/homeassistant/components/threshold/strings.json +++ b/homeassistant/components/threshold/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Threshold Sensor", + "title": "Create Threshold Sensor", "description": "Create a binary sensor that turns on and off depending on the value of a sensor\n\nOnly lower limit configured - Turn on when the input sensor's value is less than the lower limit.\nOnly upper limit configured - Turn on when the input sensor's value is greater than the upper limit.\nBoth lower and upper limit configured - Turn on when the input sensor's value is in the range [lower limit .. upper limit].", "data": { "entity_id": "Input sensor", diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index 7c44e797780..9b5c7ee1168 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -6,19 +6,13 @@ import aiohttp import tibber from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_NAME, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from homeassistant.util import dt as dt_util +from homeassistant.util import dt as dt_util, ssl as ssl_util from .const import DATA_HASS_CONFIG, DOMAIN from .services import async_setup_services @@ -47,6 +41,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: access_token=entry.data[CONF_ACCESS_TOKEN], websession=async_get_clientsession(hass), time_zone=dt_util.get_default_time_zone(), + ssl=ssl_util.get_default_context(), ) hass.data[DOMAIN] = tibber_connection @@ -61,30 +56,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ( TimeoutError, aiohttp.ClientError, - tibber.RetryableHttpException, + tibber.RetryableHttpExceptionError, ) as err: raise ConfigEntryNotReady("Unable to connect") from err - except tibber.InvalidLogin as exp: + except tibber.InvalidLoginError as exp: _LOGGER.error("Failed to login. %s", exp) return False - except tibber.FatalHttpException: + except tibber.FatalHttpExceptionError: return False await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - # Use discovery to load platform legacy notify platform - # The use of the legacy notify service was deprecated with HA Core 2024.6 - # Support will be removed with HA Core 2024.12 - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - {CONF_NAME: DOMAIN}, - hass.data[DATA_HASS_CONFIG], - ) - ) - return True diff --git a/homeassistant/components/tibber/config_flow.py b/homeassistant/components/tibber/config_flow.py index abee3ea50bc..2d4df5107a2 100644 --- a/homeassistant/components/tibber/config_flow.py +++ b/homeassistant/components/tibber/config_flow.py @@ -47,12 +47,12 @@ class TibberConfigFlow(ConfigFlow, domain=DOMAIN): await tibber_connection.update_info() except TimeoutError: errors[CONF_ACCESS_TOKEN] = ERR_TIMEOUT - except tibber.InvalidLogin: + except tibber.InvalidLoginError: errors[CONF_ACCESS_TOKEN] = ERR_TOKEN except ( aiohttp.ClientError, - tibber.RetryableHttpException, - tibber.FatalHttpException, + tibber.RetryableHttpExceptionError, + tibber.FatalHttpExceptionError, ): errors[CONF_ACCESS_TOKEN] = ERR_CLIENT diff --git a/homeassistant/components/tibber/coordinator.py b/homeassistant/components/tibber/coordinator.py index c3746cb9a58..78841f9db91 100644 --- a/homeassistant/components/tibber/coordinator.py +++ b/homeassistant/components/tibber/coordinator.py @@ -49,9 +49,9 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): await self._tibber_connection.fetch_consumption_data_active_homes() await self._tibber_connection.fetch_production_data_active_homes() await self._insert_statistics() - except tibber.RetryableHttpException as err: + except tibber.RetryableHttpExceptionError as err: raise UpdateFailed(f"Error communicating with API ({err.status})") from err - except tibber.FatalHttpException: + except tibber.FatalHttpExceptionError: # Fatal error. Reload config entry to show correct error. self.hass.async_create_task( self.hass.config_entries.async_reload(self.config_entry.entry_id) diff --git a/homeassistant/components/tibber/icons.json b/homeassistant/components/tibber/icons.json index c6cdd9b0e25..ddc8c735145 100644 --- a/homeassistant/components/tibber/icons.json +++ b/homeassistant/components/tibber/icons.json @@ -1,5 +1,7 @@ { "services": { - "get_prices": "mdi:cash" + "get_prices": { + "service": "mdi:cash" + } } } diff --git a/homeassistant/components/tibber/manifest.json b/homeassistant/components/tibber/manifest.json index 1d8120a4321..3a3a772a934 100644 --- a/homeassistant/components/tibber/manifest.json +++ b/homeassistant/components/tibber/manifest.json @@ -7,6 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tibber", "iot_class": "cloud_polling", "loggers": ["tibber"], - "quality_scale": "silver", - "requirements": ["pyTibber==0.28.2"] + "requirements": ["pyTibber==0.30.8"] } diff --git a/homeassistant/components/tibber/notify.py b/homeassistant/components/tibber/notify.py index 1c9f86ed502..fdeeeba68ef 100644 --- a/homeassistant/components/tibber/notify.py +++ b/homeassistant/components/tibber/notify.py @@ -2,38 +2,21 @@ from __future__ import annotations -from collections.abc import Callable -from typing import Any - from tibber import Tibber from homeassistant.components.notify import ( - ATTR_TITLE, ATTR_TITLE_DEFAULT, - BaseNotificationService, NotifyEntity, NotifyEntityFeature, - migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import DOMAIN as TIBBER_DOMAIN -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> TibberNotificationService: - """Get the Tibber notification service.""" - tibber_connection: Tibber = hass.data[TIBBER_DOMAIN] - return TibberNotificationService(tibber_connection.send_notification) - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -41,31 +24,6 @@ async def async_setup_entry( async_add_entities([TibberNotificationEntity(entry.entry_id)]) -class TibberNotificationService(BaseNotificationService): - """Implement the notification service for Tibber.""" - - def __init__(self, notify: Callable) -> None: - """Initialize the service.""" - self._notify = notify - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to Tibber devices.""" - migrate_notify_issue( - self.hass, - TIBBER_DOMAIN, - "Tibber", - "2024.12.0", - service_name=self._service_name, - ) - title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) - try: - await self._notify(title=title, message=message) - except TimeoutError as exc: - raise HomeAssistantError( - translation_domain=TIBBER_DOMAIN, translation_key="send_message_timeout" - ) from exc - - class TibberNotificationEntity(NotifyEntity): """Implement the notification entity service for Tibber.""" diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index a9090add49b..c1ec7bf2a9e 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -50,7 +50,7 @@ ICON = "mdi:currency-usd" SCAN_INTERVAL = timedelta(minutes=1) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5) PARALLEL_UPDATES = 0 - +TWENTY_MINUTES = 20 * 60 RT_SENSORS_UNIQUE_ID_MIGRATION = { "accumulated_consumption_last_hour": "accumulated consumption current hour", @@ -369,7 +369,7 @@ class TibberSensorElPrice(TibberSensor): """Initialize the sensor.""" super().__init__(tibber_home=tibber_home) self._last_updated: datetime.datetime | None = None - self._spread_load_constant = randrange(5000) + self._spread_load_constant = randrange(TWENTY_MINUTES) self._attr_available = False self._attr_extra_state_attributes = { @@ -383,6 +383,7 @@ class TibberSensorElPrice(TibberSensor): "off_peak_1": None, "peak": None, "off_peak_2": None, + "intraday_price_ranking": None, } self._attr_icon = ICON self._attr_unique_id = self._tibber_home.home_id @@ -396,7 +397,7 @@ class TibberSensorElPrice(TibberSensor): if ( not self._tibber_home.last_data_timestamp or (self._tibber_home.last_data_timestamp - now).total_seconds() - < 5 * 3600 + self._spread_load_constant + < 10 * 3600 - self._spread_load_constant or not self.available ): _LOGGER.debug("Asking for new data") @@ -411,8 +412,9 @@ class TibberSensorElPrice(TibberSensor): return res = self._tibber_home.current_price_data() - self._attr_native_value, price_level, self._last_updated = res + self._attr_native_value, price_level, self._last_updated, price_rank = res self._attr_extra_state_attributes["price_level"] = price_level + self._attr_extra_state_attributes["intraday_price_ranking"] = price_rank attrs = self._tibber_home.current_attributes() self._attr_extra_state_attributes.update(attrs) @@ -608,7 +610,7 @@ class TibberRtEntityCreator: self._async_add_entities(new_entities) -class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-enforce-coordinator-module +class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-enforce-class-module """Handle Tibber realtime data.""" def __init__( diff --git a/homeassistant/components/tibber/services.py b/homeassistant/components/tibber/services.py index 82353bb78d7..938e96b9917 100644 --- a/homeassistant/components/tibber/services.py +++ b/homeassistant/components/tibber/services.py @@ -3,8 +3,7 @@ from __future__ import annotations import datetime as dt -from datetime import date, datetime -from functools import partial +from datetime import datetime from typing import Any, Final import voluptuous as vol @@ -33,8 +32,8 @@ SERVICE_SCHEMA: Final = vol.Schema( ) -async def __get_prices(call: ServiceCall, *, hass: HomeAssistant) -> ServiceResponse: - tibber_connection = hass.data[DOMAIN] +async def __get_prices(call: ServiceCall) -> ServiceResponse: + tibber_connection = call.hass.data[DOMAIN] start = __get_date(call.data.get(ATTR_START), "start") end = __get_date(call.data.get(ATTR_END), "end") @@ -47,44 +46,38 @@ async def __get_prices(call: ServiceCall, *, hass: HomeAssistant) -> ServiceResp for tibber_home in tibber_connection.get_homes(only_active=True): home_nickname = tibber_home.name - price_info = tibber_home.info["viewer"]["home"]["currentSubscription"][ - "priceInfo" - ] price_data = [ { - "start_time": dt.datetime.fromisoformat(price["startsAt"]), - "price": price["total"], - "level": price["level"], + "start_time": starts_at, + "price": price, + "level": tibber_home.price_level.get(starts_at), } - for key in ("today", "tomorrow") - for price in price_info[key] + for starts_at, price in tibber_home.price_total.items() ] selected_data = [ price for price in price_data - if price["start_time"].replace(tzinfo=None) >= start - and price["start_time"].replace(tzinfo=None) < end + if start <= dt.datetime.fromisoformat(price["start_time"]) < end ] tibber_prices[home_nickname] = selected_data return {"prices": tibber_prices} -def __get_date(date_input: str | None, mode: str | None) -> date | datetime: +def __get_date(date_input: str | None, mode: str | None) -> datetime: """Get date.""" if not date_input: if mode == "end": increment = dt.timedelta(days=1) else: increment = dt.timedelta() - return datetime.fromisoformat(dt_util.now().date().isoformat()) + increment + return dt_util.start_of_local_day() + increment if value := dt_util.parse_datetime(date_input): - return value + return dt_util.as_local(value) raise ServiceValidationError( - "Invalid datetime provided.", translation_domain=DOMAIN, translation_key="invalid_date", translation_placeholders={ @@ -100,7 +93,7 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, PRICE_SERVICE_NAME, - partial(__get_prices, hass=hass), + __get_prices, schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/tibber/strings.json b/homeassistant/components/tibber/strings.json index 8d73d435c8c..05b98b97995 100644 --- a/homeassistant/components/tibber/strings.json +++ b/homeassistant/components/tibber/strings.json @@ -119,6 +119,9 @@ } }, "exceptions": { + "invalid_date": { + "message": "Invalid datetime provided {date}" + }, "send_message_timeout": { "message": "Timeout sending message with Tibber" } diff --git a/homeassistant/components/tikteck/manifest.json b/homeassistant/components/tikteck/manifest.json index 067dd6f92cf..57e5269d3b0 100644 --- a/homeassistant/components/tikteck/manifest.json +++ b/homeassistant/components/tikteck/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tikteck", "iot_class": "local_polling", "loggers": ["tikteck"], + "quality_scale": "legacy", "requirements": ["tikteck==0.4"] } diff --git a/homeassistant/components/tile/__init__.py b/homeassistant/components/tile/__init__.py index 7dbeea1a4f3..594c4e7bdcb 100644 --- a/homeassistant/components/tile/__init__.py +++ b/homeassistant/components/tile/__init__.py @@ -89,7 +89,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except InvalidAuthError as err: raise ConfigEntryAuthFailed("Invalid credentials") from err except SessionExpiredError: - LOGGER.info("Tile session expired; creating a new one") + LOGGER.debug("Tile session expired; creating a new one") await client.async_init() except TileError as err: raise UpdateFailed(f"Error while retrieving data: {err}") from err @@ -101,6 +101,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = coordinators[tile_uuid] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=tile.name, update_interval=DEFAULT_UPDATE_INTERVAL, update_method=partial(async_update_tile, tile), diff --git a/homeassistant/components/tile/config_flow.py b/homeassistant/components/tile/config_flow.py index 108d9b1b300..53425958341 100644 --- a/homeassistant/components/tile/config_flow.py +++ b/homeassistant/components/tile/config_flow.py @@ -71,11 +71,9 @@ class TileFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=self._username, data=data) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/tile/device_tracker.py b/homeassistant/components/tile/device_tracker.py index b33c2c592b8..71abbbef2c7 100644 --- a/homeassistant/components/tile/device_tracker.py +++ b/homeassistant/components/tile/device_tracker.py @@ -6,11 +6,7 @@ import logging from pytile.tile import Tile -from homeassistant.components.device_tracker import ( - AsyncSeeCallback, - SourceType, - TrackerEntity, -) +from homeassistant.components.device_tracker import AsyncSeeCallback, TrackerEntity from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -71,7 +67,7 @@ async def async_setup_scanner( ) ) - _LOGGER.info( + _LOGGER.debug( "Your Tile configuration has been imported into the UI; " "please remove it from configuration.yaml" ) @@ -102,40 +98,11 @@ class TileDeviceTracker(CoordinatorEntity[DataUpdateCoordinator[None]], TrackerE """Return if entity is available.""" return super().available and not self._tile.dead - @property - def location_accuracy(self) -> int: - """Return the location accuracy of the device. - - Value in meters. - """ - if not self._tile.accuracy: - return super().location_accuracy - return int(self._tile.accuracy) - @property def device_info(self) -> DeviceInfo: """Return device info.""" return DeviceInfo(identifiers={(DOMAIN, self._tile.uuid)}, name=self._tile.name) - @property - def latitude(self) -> float | None: - """Return latitude value of the device.""" - if not self._tile.latitude: - return None - return self._tile.latitude - - @property - def longitude(self) -> float | None: - """Return longitude value of the device.""" - if not self._tile.longitude: - return None - return self._tile.longitude - - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - @callback def _handle_coordinator_update(self) -> None: """Respond to a DataUpdateCoordinator update.""" @@ -145,6 +112,14 @@ class TileDeviceTracker(CoordinatorEntity[DataUpdateCoordinator[None]], TrackerE @callback def _update_from_latest_data(self) -> None: """Update the entity from the latest data.""" + self._attr_longitude = ( + None if not self._tile.longitude else self._tile.longitude + ) + self._attr_latitude = None if not self._tile.latitude else self._tile.latitude + self._attr_location_accuracy = ( + 0 if not self._tile.accuracy else int(self._tile.accuracy) + ) + self._attr_extra_state_attributes = { ATTR_ALTITUDE: self._tile.altitude, ATTR_IS_LOST: self._tile.lost, diff --git a/homeassistant/components/tile/strings.json b/homeassistant/components/tile/strings.json index 504823c4d16..2d34d13c436 100644 --- a/homeassistant/components/tile/strings.json +++ b/homeassistant/components/tile/strings.json @@ -16,7 +16,8 @@ } }, "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", diff --git a/homeassistant/components/time/__init__.py b/homeassistant/components/time/__init__.py index 23c9796ec2e..473472356d4 100644 --- a/homeassistant/components/time/__init__.py +++ b/homeassistant/components/time/__init__.py @@ -3,10 +3,10 @@ from __future__ import annotations from datetime import time, timedelta -from functools import cached_property import logging from typing import final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -16,11 +16,13 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN, SERVICE_SET_VALUE _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[TimeEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -37,7 +39,7 @@ async def _async_set_value(entity: TimeEntity, service_call: ServiceCall) -> Non async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Time entities.""" - component = hass.data[DOMAIN] = EntityComponent[TimeEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[TimeEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -51,14 +53,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[TimeEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[TimeEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class TimeEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/time/icons.json b/homeassistant/components/time/icons.json index c08e457e04d..f172c28ae0d 100644 --- a/homeassistant/components/time/icons.json +++ b/homeassistant/components/time/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:clock-edit" + "set_value": { + "service": "mdi:clock-edit" + } } } diff --git a/homeassistant/components/timer/__init__.py b/homeassistant/components/timer/__init__.py index c2057551239..19b1de427ef 100644 --- a/homeassistant/components/timer/__init__.py +++ b/homeassistant/components/timer/__init__.py @@ -338,7 +338,9 @@ class Timer(collection.CollectionEntity, RestoreEntity): raise HomeAssistantError( f"Timer {self.entity_id} is not running, only active timers can be changed" ) - if self._remaining and (self._remaining + duration) > self._running_duration: + # Check against new remaining time before checking boundaries + new_remaining = (self._end + duration) - dt_util.utcnow().replace(microsecond=0) + if self._remaining and new_remaining > self._running_duration: raise HomeAssistantError( f"Not possible to change timer {self.entity_id} beyond duration" ) @@ -349,7 +351,7 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._listener() self._end += duration - self._remaining = self._end - dt_util.utcnow().replace(microsecond=0) + self._remaining = new_remaining self.async_write_ha_state() self.hass.bus.async_fire(EVENT_TIMER_CHANGED, {ATTR_ENTITY_ID: self.entity_id}) self._listener = async_track_point_in_utc_time( diff --git a/homeassistant/components/timer/icons.json b/homeassistant/components/timer/icons.json index 1e352f7280b..a5319688646 100644 --- a/homeassistant/components/timer/icons.json +++ b/homeassistant/components/timer/icons.json @@ -1,10 +1,22 @@ { "services": { - "start": "mdi:play", - "pause": "mdi:pause", - "cancel": "mdi:cancel", - "finish": "mdi:check", - "change": "mdi:pencil", - "reload": "mdi:reload" + "start": { + "service": "mdi:play" + }, + "pause": { + "service": "mdi:pause" + }, + "cancel": { + "service": "mdi:cancel" + }, + "finish": { + "service": "mdi:check" + }, + "change": { + "service": "mdi:pencil" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/timer/strings.json b/homeassistant/components/timer/strings.json index 1ebf0c6f50a..4fd80f565a2 100644 --- a/homeassistant/components/timer/strings.json +++ b/homeassistant/components/timer/strings.json @@ -1,4 +1,5 @@ { + "title": "Timer", "entity_component": { "_": { "name": "Timer", @@ -33,33 +34,33 @@ "services": { "start": { "name": "[%key:common::action::start%]", - "description": "Starts a timer.", + "description": "Starts a timer or restarts it with a provided duration.", "fields": { "duration": { "name": "Duration", - "description": "Duration the timer requires to finish. [optional]." + "description": "Custom duration to restart the timer with." } } }, "pause": { "name": "[%key:common::action::pause%]", - "description": "Pauses a timer." + "description": "Pauses a running timer, retaining the remaining duration for later continuation." }, "cancel": { "name": "Cancel", - "description": "Cancels a timer." + "description": "Resets a timer's duration to the last known initial value without firing the timer finished event." }, "finish": { "name": "Finish", - "description": "Finishes a timer." + "description": "Finishes a running timer earlier than scheduled." }, "change": { "name": "Change", - "description": "Changes a timer.", + "description": "Changes a timer by adding or subtracting a given duration.", "fields": { "duration": { "name": "Duration", - "description": "Duration to add or subtract to the running timer." + "description": "Duration to add to or subtract from the running timer." } } }, diff --git a/homeassistant/components/tmb/manifest.json b/homeassistant/components/tmb/manifest.json index 16efc870504..0e0324a62f4 100644 --- a/homeassistant/components/tmb/manifest.json +++ b/homeassistant/components/tmb/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tmb", "iot_class": "local_polling", "loggers": ["tmb"], + "quality_scale": "legacy", "requirements": ["tmb==0.0.4"] } diff --git a/homeassistant/components/tod/binary_sensor.py b/homeassistant/components/tod/binary_sensor.py index 907df849ea1..3ac90b5578c 100644 --- a/homeassistant/components/tod/binary_sensor.py +++ b/homeassistant/components/tod/binary_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime, time, timedelta import logging -from typing import TYPE_CHECKING, Any, Literal, TypeGuard +from typing import Any, Literal, TypeGuard import voluptuous as vol @@ -109,6 +109,9 @@ class TodSensor(BinarySensorEntity): """Time of the Day Sensor.""" _attr_should_poll = False + _time_before: datetime + _time_after: datetime + _next_update: datetime def __init__( self, @@ -122,9 +125,6 @@ class TodSensor(BinarySensorEntity): """Init the ToD Sensor...""" self._attr_unique_id = unique_id self._attr_name = name - self._time_before: datetime | None = None - self._time_after: datetime | None = None - self._next_update: datetime | None = None self._after_offset = after_offset self._before_offset = before_offset self._before = before @@ -134,9 +134,6 @@ class TodSensor(BinarySensorEntity): @property def is_on(self) -> bool: """Return True is sensor is on.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None if self._time_after < self._time_before: return self._time_after <= dt_util.utcnow() < self._time_before return False @@ -144,10 +141,6 @@ class TodSensor(BinarySensorEntity): @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the sensor.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None - assert self._next_update is not None if time_zone := dt_util.get_default_time_zone(): return { ATTR_AFTER: self._time_after.astimezone(time_zone).isoformat(), @@ -244,9 +237,6 @@ class TodSensor(BinarySensorEntity): def _turn_to_next_day(self) -> None: """Turn to to the next day.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None if _is_sun_event(self._after): self._time_after = get_astral_event_next( self.hass, self._after, self._time_after - self._after_offset @@ -282,17 +272,12 @@ class TodSensor(BinarySensorEntity): self.async_on_remove(_clean_up_listener) - if TYPE_CHECKING: - assert self._next_update is not None self._unsub_update = event.async_track_point_in_utc_time( self.hass, self._point_in_time_listener, self._next_update ) def _calculate_next_update(self) -> None: """Datetime when the next update to the state.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None now = dt_util.utcnow() if now < self._time_after: self._next_update = self._time_after @@ -309,9 +294,6 @@ class TodSensor(BinarySensorEntity): self._calculate_next_update() self.async_write_ha_state() - if TYPE_CHECKING: - assert self._next_update is not None - self._unsub_update = event.async_track_point_in_utc_time( self.hass, self._point_in_time_listener, self._next_update ) diff --git a/homeassistant/components/tod/strings.json b/homeassistant/components/tod/strings.json index bd4a48df915..c32b996c29a 100644 --- a/homeassistant/components/tod/strings.json +++ b/homeassistant/components/tod/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Times of the Day Sensor", + "title": "Create Times of the Day Sensor", "description": "Create a binary sensor that turns on or off depending on the time.", "data": { "after_time": "On time", diff --git a/homeassistant/components/todo/__init__.py b/homeassistant/components/todo/__init__.py index d35d9d6bbea..e4bc549a16b 100644 --- a/homeassistant/components/todo/__init__.py +++ b/homeassistant/components/todo/__init__.py @@ -1,12 +1,14 @@ """The todo integration.""" +from __future__ import annotations + from collections.abc import Callable, Iterable import dataclasses import datetime -from functools import cached_property import logging from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.components import frontend, websocket_api @@ -36,6 +38,7 @@ from .const import ( ATTR_ITEM, ATTR_RENAME, ATTR_STATUS, + DATA_COMPONENT, DOMAIN, TodoItemStatus, TodoListEntityFeature, @@ -111,7 +114,7 @@ def _validate_supported_features( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Todo entities.""" - component = hass.data[DOMAIN] = EntityComponent[TodoListEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[TodoListEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -194,14 +197,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) @dataclasses.dataclass @@ -331,10 +332,9 @@ async def websocket_handle_subscribe_todo_items( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Subscribe to To-do list item updates.""" - component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] entity_id: str = msg["entity_id"] - if not (entity := component.get_entity(entity_id)): + if not (entity := hass.data[DATA_COMPONENT].get_entity(entity_id)): connection.send_error( msg["id"], "invalid_entity_id", @@ -387,10 +387,9 @@ async def websocket_handle_todo_item_list( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Handle the list of To-do items in a To-do- list.""" - component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] if ( not (entity_id := msg[CONF_ENTITY_ID]) - or not (entity := component.get_entity(entity_id)) + or not (entity := hass.data[DATA_COMPONENT].get_entity(entity_id)) or not isinstance(entity, TodoListEntity) ): connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found") @@ -423,8 +422,7 @@ async def websocket_handle_todo_item_move( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Handle move of a To-do item within a To-do list.""" - component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] - if not (entity := component.get_entity(msg["entity_id"])): + if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])): connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found") return diff --git a/homeassistant/components/todo/const.py b/homeassistant/components/todo/const.py index ee7ef53715d..3b0aa37fa7b 100644 --- a/homeassistant/components/todo/const.py +++ b/homeassistant/components/todo/const.py @@ -1,8 +1,19 @@ """Constants for the To-do integration.""" +from __future__ import annotations + from enum import IntFlag, StrEnum +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import TodoListEntity DOMAIN = "todo" +DATA_COMPONENT: HassKey[EntityComponent[TodoListEntity]] = HassKey(DOMAIN) ATTR_DUE = "due" ATTR_DUE_DATE = "due_date" diff --git a/homeassistant/components/todo/icons.json b/homeassistant/components/todo/icons.json index 05c9af74630..4040a0c6b8f 100644 --- a/homeassistant/components/todo/icons.json +++ b/homeassistant/components/todo/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "add_item": "mdi:clipboard-plus", - "get_items": "mdi:clipboard-arrow-down", - "remove_completed_items": "mdi:clipboard-remove", - "remove_item": "mdi:clipboard-minus", - "update_item": "mdi:clipboard-edit" + "add_item": { + "service": "mdi:clipboard-plus" + }, + "get_items": { + "service": "mdi:clipboard-arrow-down" + }, + "remove_completed_items": { + "service": "mdi:clipboard-remove" + }, + "remove_item": { + "service": "mdi:clipboard-minus" + }, + "update_item": { + "service": "mdi:clipboard-edit" + } } } diff --git a/homeassistant/components/todo/intent.py b/homeassistant/components/todo/intent.py index cd8ad7f02ab..c678408a576 100644 --- a/homeassistant/components/todo/intent.py +++ b/homeassistant/components/todo/intent.py @@ -6,9 +6,9 @@ import voluptuous as vol from homeassistant.core import HomeAssistant from homeassistant.helpers import intent -from homeassistant.helpers.entity_component import EntityComponent -from . import DOMAIN, TodoItem, TodoItemStatus, TodoListEntity +from . import TodoItem, TodoItemStatus, TodoListEntity +from .const import DATA_COMPONENT, DOMAIN INTENT_LIST_ADD_ITEM = "HassListAddItem" @@ -34,10 +34,9 @@ class ListAddItemIntent(intent.IntentHandler): hass = intent_obj.hass slots = self.async_validate_slots(intent_obj.slots) - item = slots["item"]["value"] + item = slots["item"]["value"].strip() list_name = slots["name"]["value"] - component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] target_list: TodoListEntity | None = None # Find matching list @@ -50,7 +49,9 @@ class ListAddItemIntent(intent.IntentHandler): result=match_result, constraints=match_constraints ) - target_list = component.get_entity(match_result.states[0].entity_id) + target_list = hass.data[DATA_COMPONENT].get_entity( + match_result.states[0].entity_id + ) if target_list is None: raise intent.IntentHandleError(f"No to-do list: {list_name}") @@ -61,4 +62,13 @@ class ListAddItemIntent(intent.IntentHandler): response = intent_obj.create_response() response.response_type = intent.IntentResponseType.ACTION_DONE + response.async_set_results( + [ + intent.IntentResponseTarget( + type=intent.IntentResponseTargetType.ENTITY, + name=list_name, + id=match_result.states[0].entity_id, + ) + ] + ) return response diff --git a/homeassistant/components/todo/strings.json b/homeassistant/components/todo/strings.json index 717aa310ecd..245e5c82fc8 100644 --- a/homeassistant/components/todo/strings.json +++ b/homeassistant/components/todo/strings.json @@ -44,11 +44,11 @@ "fields": { "item": { "name": "Item name", - "description": "The name for the to-do list item." + "description": "The current name of the to-do item." }, "rename": { "name": "Rename item", - "description": "The new name of the to-do item" + "description": "The new name for the to-do item" }, "status": { "name": "Set status", @@ -78,7 +78,7 @@ "fields": { "item": { "name": "Item name", - "description": "The name for the to-do list items." + "description": "The name for the to-do list item." } } } diff --git a/homeassistant/components/todoist/__init__.py b/homeassistant/components/todoist/__init__.py index 60c40b1c03c..2e30856d0df 100644 --- a/homeassistant/components/todoist/__init__.py +++ b/homeassistant/components/todoist/__init__.py @@ -25,7 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: token = entry.data[CONF_TOKEN] api = TodoistAPIAsync(token) - coordinator = TodoistCoordinator(hass, _LOGGER, SCAN_INTERVAL, api, token) + coordinator = TodoistCoordinator(hass, _LOGGER, entry, SCAN_INTERVAL, api, token) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {}) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index 2acd4ea6dc6..62f9fafc02a 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -142,7 +142,7 @@ async def async_setup_platform( project_id_lookup = {} api = TodoistAPIAsync(token) - coordinator = TodoistCoordinator(hass, _LOGGER, SCAN_INTERVAL, api, token) + coordinator = TodoistCoordinator(hass, _LOGGER, None, SCAN_INTERVAL, api, token) await coordinator.async_refresh() async def _shutdown_coordinator(_: Event) -> None: @@ -331,7 +331,11 @@ def async_register_services( # noqa: C901 "type": "reminder_add", "temp_id": str(uuid.uuid1()), "uuid": str(uuid.uuid1()), - "args": {"item_id": api_task.id, "due": reminder_due}, + "args": { + "item_id": api_task.id, + "type": "absolute", + "due": reminder_due, + }, } ] } diff --git a/homeassistant/components/todoist/coordinator.py b/homeassistant/components/todoist/coordinator.py index b55680907ac..2f35741c5ab 100644 --- a/homeassistant/components/todoist/coordinator.py +++ b/homeassistant/components/todoist/coordinator.py @@ -6,6 +6,7 @@ import logging from todoist_api_python.api_async import TodoistAPIAsync from todoist_api_python.models import Label, Project, Section, Task +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -17,12 +18,19 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]): self, hass: HomeAssistant, logger: logging.Logger, + entry: ConfigEntry | None, update_interval: timedelta, api: TodoistAPIAsync, token: str, ) -> None: """Initialize the Todoist coordinator.""" - super().__init__(hass, logger, name="Todoist", update_interval=update_interval) + super().__init__( + hass, + logger, + config_entry=entry, + name="Todoist", + update_interval=update_interval, + ) self.api = api self._projects: list[Project] | None = None self._labels: list[Label] | None = None diff --git a/homeassistant/components/todoist/icons.json b/homeassistant/components/todoist/icons.json index d3b881d480c..73778f1ca23 100644 --- a/homeassistant/components/todoist/icons.json +++ b/homeassistant/components/todoist/icons.json @@ -1,5 +1,7 @@ { "services": { - "new_task": "mdi:checkbox-marked-circle-plus-outline" + "new_task": { + "service": "mdi:checkbox-marked-circle-plus-outline" + } } } diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 5b083ac58bf..721b491bbf5 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -78,7 +78,7 @@ "description": "When should user be reminded of this task, in natural language." }, "reminder_date_lang": { - "name": "Reminder data language", + "name": "Reminder date language", "description": "The language of reminder_date_string." }, "reminder_date": { diff --git a/homeassistant/components/tolo/__init__.py b/homeassistant/components/tolo/__init__.py index ed53015ccb4..d2a43ef525b 100644 --- a/homeassistant/components/tolo/__init__.py +++ b/homeassistant/components/tolo/__init__.py @@ -2,23 +2,12 @@ from __future__ import annotations -from datetime import timedelta -import logging -from typing import NamedTuple - -from tololib import ToloClient, ToloSettings, ToloStatus - from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) -from .const import DEFAULT_RETRY_COUNT, DEFAULT_RETRY_TIMEOUT, DOMAIN +from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator PLATFORMS = [ Platform.BINARY_SENSOR, @@ -32,8 +21,6 @@ PLATFORMS = [ Platform.SWITCH, ] -_LOGGER = logging.getLogger(__name__) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up tolo from a config entry.""" @@ -53,57 +40,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class ToloSaunaData(NamedTuple): - """Compound class for reflecting full state (status and info) of a TOLO Sauna.""" - - status: ToloStatus - settings: ToloSettings - - -class ToloSaunaUpdateCoordinator(DataUpdateCoordinator[ToloSaunaData]): # pylint: disable=hass-enforce-coordinator-module - """DataUpdateCoordinator for TOLO Sauna.""" - - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: - """Initialize ToloSaunaUpdateCoordinator.""" - self.client = ToloClient( - address=entry.data[CONF_HOST], - retry_timeout=DEFAULT_RETRY_TIMEOUT, - retry_count=DEFAULT_RETRY_COUNT, - ) - super().__init__( - hass=hass, - logger=_LOGGER, - name=f"{entry.title} ({entry.data[CONF_HOST]}) Data Update Coordinator", - update_interval=timedelta(seconds=5), - ) - - async def _async_update_data(self) -> ToloSaunaData: - return await self.hass.async_add_executor_job(self._get_tolo_sauna_data) - - def _get_tolo_sauna_data(self) -> ToloSaunaData: - try: - status = self.client.get_status() - settings = self.client.get_settings() - except TimeoutError as error: - raise UpdateFailed("communication timeout") from error - return ToloSaunaData(status, settings) - - -class ToloSaunaCoordinatorEntity(CoordinatorEntity[ToloSaunaUpdateCoordinator]): - """CoordinatorEntity for TOLO Sauna.""" - - _attr_has_entity_name = True - - def __init__( - self, coordinator: ToloSaunaUpdateCoordinator, entry: ConfigEntry - ) -> None: - """Initialize ToloSaunaCoordinatorEntity.""" - super().__init__(coordinator) - self._attr_device_info = DeviceInfo( - name="TOLO Sauna", - identifiers={(DOMAIN, entry.entry_id)}, - manufacturer="SteamTec", - model=self.coordinator.data.status.model.name.capitalize(), - ) diff --git a/homeassistant/components/tolo/binary_sensor.py b/homeassistant/components/tolo/binary_sensor.py index f8cb442c92f..845f8ed22e3 100644 --- a/homeassistant/components/tolo/binary_sensor.py +++ b/homeassistant/components/tolo/binary_sensor.py @@ -9,8 +9,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity async def async_setup_entry( diff --git a/homeassistant/components/tolo/button.py b/homeassistant/components/tolo/button.py index 9a8ac67b9fe..b7c4362ca7b 100644 --- a/homeassistant/components/tolo/button.py +++ b/homeassistant/components/tolo/button.py @@ -8,8 +8,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity async def async_setup_entry( diff --git a/homeassistant/components/tolo/climate.py b/homeassistant/components/tolo/climate.py index 2994d97d54a..5e6428525c1 100644 --- a/homeassistant/components/tolo/climate.py +++ b/homeassistant/components/tolo/climate.py @@ -25,8 +25,9 @@ from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTempera from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity async def async_setup_entry( @@ -59,7 +60,6 @@ class SaunaClimate(ToloSaunaCoordinatorEntity, ClimateEntity): ) _attr_target_temperature_step = 1 _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: ToloSaunaUpdateCoordinator, entry: ConfigEntry diff --git a/homeassistant/components/tolo/config_flow.py b/homeassistant/components/tolo/config_flow.py index 5cf91bdc3a8..d5d7e33a5e0 100644 --- a/homeassistant/components/tolo/config_flow.py +++ b/homeassistant/components/tolo/config_flow.py @@ -23,7 +23,7 @@ class ToloSaunaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _discovered_host: str | None = None + _discovered_host: str @staticmethod def _check_device_availability(host: str) -> bool: diff --git a/homeassistant/components/tolo/coordinator.py b/homeassistant/components/tolo/coordinator.py new file mode 100644 index 00000000000..632cc819f5a --- /dev/null +++ b/homeassistant/components/tolo/coordinator.py @@ -0,0 +1,54 @@ +"""Component to control TOLO Sauna/Steam Bath.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import NamedTuple + +from tololib import ToloClient, ToloSettings, ToloStatus + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEFAULT_RETRY_COUNT, DEFAULT_RETRY_TIMEOUT + +_LOGGER = logging.getLogger(__name__) + + +class ToloSaunaData(NamedTuple): + """Compound class for reflecting full state (status and info) of a TOLO Sauna.""" + + status: ToloStatus + settings: ToloSettings + + +class ToloSaunaUpdateCoordinator(DataUpdateCoordinator[ToloSaunaData]): + """DataUpdateCoordinator for TOLO Sauna.""" + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize ToloSaunaUpdateCoordinator.""" + self.client = ToloClient( + address=entry.data[CONF_HOST], + retry_timeout=DEFAULT_RETRY_TIMEOUT, + retry_count=DEFAULT_RETRY_COUNT, + ) + super().__init__( + hass=hass, + logger=_LOGGER, + name=f"{entry.title} ({entry.data[CONF_HOST]}) Data Update Coordinator", + update_interval=timedelta(seconds=5), + ) + + async def _async_update_data(self) -> ToloSaunaData: + return await self.hass.async_add_executor_job(self._get_tolo_sauna_data) + + def _get_tolo_sauna_data(self) -> ToloSaunaData: + try: + status = self.client.get_status() + settings = self.client.get_settings() + except TimeoutError as error: + raise UpdateFailed("communication timeout") from error + return ToloSaunaData(status, settings) diff --git a/homeassistant/components/tolo/entity.py b/homeassistant/components/tolo/entity.py new file mode 100644 index 00000000000..261cfc7cb0c --- /dev/null +++ b/homeassistant/components/tolo/entity.py @@ -0,0 +1,28 @@ +"""Component to control TOLO Sauna/Steam Bath.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator + + +class ToloSaunaCoordinatorEntity(CoordinatorEntity[ToloSaunaUpdateCoordinator]): + """CoordinatorEntity for TOLO Sauna.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: ToloSaunaUpdateCoordinator, entry: ConfigEntry + ) -> None: + """Initialize ToloSaunaCoordinatorEntity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + name="TOLO Sauna", + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="SteamTec", + model=self.coordinator.data.status.model.name.capitalize(), + ) diff --git a/homeassistant/components/tolo/fan.py b/homeassistant/components/tolo/fan.py index 034bdb0b6a6..9e48778b507 100644 --- a/homeassistant/components/tolo/fan.py +++ b/homeassistant/components/tolo/fan.py @@ -9,8 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity async def async_setup_entry( @@ -28,7 +29,6 @@ class ToloFan(ToloSaunaCoordinatorEntity, FanEntity): _attr_translation_key = "fan" _attr_supported_features = FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON - _enable_turn_on_off_backwards_compatibility = False def __init__( self, coordinator: ToloSaunaUpdateCoordinator, entry: ConfigEntry diff --git a/homeassistant/components/tolo/light.py b/homeassistant/components/tolo/light.py index 809bb367072..eeb37305fe8 100644 --- a/homeassistant/components/tolo/light.py +++ b/homeassistant/components/tolo/light.py @@ -9,8 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity async def async_setup_entry( diff --git a/homeassistant/components/tolo/number.py b/homeassistant/components/tolo/number.py index 2d2c20715fa..73505c5b251 100644 --- a/homeassistant/components/tolo/number.py +++ b/homeassistant/components/tolo/number.py @@ -20,8 +20,9 @@ from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tolo/select.py b/homeassistant/components/tolo/select.py index 96335cecc68..fee1ac1774e 100644 --- a/homeassistant/components/tolo/select.py +++ b/homeassistant/components/tolo/select.py @@ -13,8 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN, AromaTherapySlot, LampMode +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tolo/sensor.py b/homeassistant/components/tolo/sensor.py index bee01cc283f..0e94ec0ae1e 100644 --- a/homeassistant/components/tolo/sensor.py +++ b/homeassistant/components/tolo/sensor.py @@ -23,8 +23,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tolo/switch.py b/homeassistant/components/tolo/switch.py index b90f548ee76..d39dd17f0f3 100644 --- a/homeassistant/components/tolo/switch.py +++ b/homeassistant/components/tolo/switch.py @@ -13,8 +13,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ToloSaunaCoordinatorEntity, ToloSaunaUpdateCoordinator from .const import DOMAIN +from .coordinator import ToloSaunaUpdateCoordinator +from .entity import ToloSaunaCoordinatorEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tomato/device_tracker.py b/homeassistant/components/tomato/device_tracker.py index aaa1d10d08d..dfa8d2bd4e1 100644 --- a/homeassistant/components/tomato/device_tracker.py +++ b/homeassistant/components/tomato/device_tracker.py @@ -11,7 +11,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -46,7 +46,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> TomatoDeviceScanner: """Validate the configuration and returns a Tomato scanner.""" - return TomatoDeviceScanner(config[DOMAIN]) + return TomatoDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) class TomatoDeviceScanner(DeviceScanner): @@ -61,9 +61,10 @@ class TomatoDeviceScanner(DeviceScanner): if port is None: port = 443 if self.ssl else 80 + protocol = "https" if self.ssl else "http" self.req = requests.Request( "POST", - "http{}://{}:{}/update.cgi".format("s" if self.ssl else "", host, port), + f"{protocol}://{host}:{port}/update.cgi", data={"_http_id": http_id, "exec": "devlist"}, auth=requests.auth.HTTPBasicAuth(username, password), ).prepare() @@ -96,7 +97,7 @@ class TomatoDeviceScanner(DeviceScanner): Return boolean if scanning successful. """ - _LOGGER.info("Scanning") + _LOGGER.debug("Scanning") try: if self.ssl: diff --git a/homeassistant/components/tomato/manifest.json b/homeassistant/components/tomato/manifest.json index 6db69d50d82..081d55bc46d 100644 --- a/homeassistant/components/tomato/manifest.json +++ b/homeassistant/components/tomato/manifest.json @@ -3,5 +3,6 @@ "name": "Tomato", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/tomato", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/tomorrowio/__init__.py b/homeassistant/components/tomorrowio/__init__.py index 5fd99e86cb4..73f62735e06 100644 --- a/homeassistant/components/tomorrowio/__init__.py +++ b/homeassistant/components/tomorrowio/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations from pytomorrowio import TomorrowioV4 -from pytomorrowio.const import CURRENT from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN @@ -11,10 +10,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTRIBUTION, DOMAIN, INTEGRATION_NAME +from .const import DOMAIN from .coordinator import TomorrowioDataUpdateCoordinator PLATFORMS = [SENSOR_DOMAIN, WEATHER_DOMAIN] @@ -57,35 +54,3 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> hass.data.pop(DOMAIN) return unload_ok - - -class TomorrowioEntity(CoordinatorEntity[TomorrowioDataUpdateCoordinator]): - """Base Tomorrow.io Entity.""" - - _attr_attribution = ATTRIBUTION - _attr_has_entity_name = True - - def __init__( - self, - config_entry: ConfigEntry, - coordinator: TomorrowioDataUpdateCoordinator, - api_version: int, - ) -> None: - """Initialize Tomorrow.io Entity.""" - super().__init__(coordinator) - self.api_version = api_version - self._config_entry = config_entry - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._config_entry.data[CONF_API_KEY])}, - manufacturer=INTEGRATION_NAME, - sw_version=f"v{self.api_version}", - entry_type=DeviceEntryType.SERVICE, - ) - - def _get_current_property(self, property_name: str) -> int | str | float | None: - """Get property from current conditions. - - Used for V4 API. - """ - entry_id = self._config_entry.entry_id - return self.coordinator.data[entry_id].get(CURRENT, {}).get(property_name) diff --git a/homeassistant/components/tomorrowio/config_flow.py b/homeassistant/components/tomorrowio/config_flow.py index 90bb488a7c2..cce41b17498 100644 --- a/homeassistant/components/tomorrowio/config_flow.py +++ b/homeassistant/components/tomorrowio/config_flow.py @@ -91,10 +91,6 @@ def _get_unique_id(hass: HomeAssistant, input_dict: dict[str, Any]): class TomorrowioOptionsConfigFlow(OptionsFlow): """Handle Tomorrow.io options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Tomorrow.io options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -105,7 +101,7 @@ class TomorrowioOptionsConfigFlow(OptionsFlow): options_schema = { vol.Required( CONF_TIMESTEP, - default=self._config_entry.options[CONF_TIMESTEP], + default=self.config_entry.options[CONF_TIMESTEP], ): vol.In([1, 5, 15, 30, 60]), } @@ -125,7 +121,7 @@ class TomorrowioConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TomorrowioOptionsConfigFlow: """Get the options flow for this handler.""" - return TomorrowioOptionsConfigFlow(config_entry) + return TomorrowioOptionsConfigFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/tomorrowio/entity.py b/homeassistant/components/tomorrowio/entity.py new file mode 100644 index 00000000000..6560ac58724 --- /dev/null +++ b/homeassistant/components/tomorrowio/entity.py @@ -0,0 +1,45 @@ +"""The Tomorrow.io integration.""" + +from __future__ import annotations + +from pytomorrowio.const import CURRENT + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTRIBUTION, DOMAIN, INTEGRATION_NAME +from .coordinator import TomorrowioDataUpdateCoordinator + + +class TomorrowioEntity(CoordinatorEntity[TomorrowioDataUpdateCoordinator]): + """Base Tomorrow.io Entity.""" + + _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True + + def __init__( + self, + config_entry: ConfigEntry, + coordinator: TomorrowioDataUpdateCoordinator, + api_version: int, + ) -> None: + """Initialize Tomorrow.io Entity.""" + super().__init__(coordinator) + self.api_version = api_version + self._config_entry = config_entry + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._config_entry.data[CONF_API_KEY])}, + manufacturer=INTEGRATION_NAME, + sw_version=f"v{self.api_version}", + entry_type=DeviceEntryType.SERVICE, + ) + + def _get_current_property(self, property_name: str) -> int | str | float | None: + """Get property from current conditions. + + Used for V4 API. + """ + entry_id = self._config_entry.entry_id + return self.coordinator.data[entry_id].get(CURRENT, {}).get(property_name) diff --git a/homeassistant/components/tomorrowio/sensor.py b/homeassistant/components/tomorrowio/sensor.py index cfe2d870ccb..7ff17961b58 100644 --- a/homeassistant/components/tomorrowio/sensor.py +++ b/homeassistant/components/tomorrowio/sensor.py @@ -38,7 +38,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_conversion import DistanceConverter, SpeedConverter from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from . import TomorrowioEntity from .const import ( DOMAIN, TMRW_ATTR_CARBON_MONOXIDE, @@ -70,6 +69,7 @@ from .const import ( TMRW_ATTR_WIND_GUST, ) from .coordinator import TomorrowioDataUpdateCoordinator +from .entity import TomorrowioEntity @dataclass(frozen=True) diff --git a/homeassistant/components/tomorrowio/weather.py b/homeassistant/components/tomorrowio/weather.py index e77a798f1e4..92b09500e7b 100644 --- a/homeassistant/components/tomorrowio/weather.py +++ b/homeassistant/components/tomorrowio/weather.py @@ -37,7 +37,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sun import is_up from homeassistant.util import dt as dt_util -from . import TomorrowioEntity from .const import ( CLEAR_CONDITIONS, CONDITIONS, @@ -61,6 +60,7 @@ from .const import ( TMRW_ATTR_WIND_SPEED, ) from .coordinator import TomorrowioDataUpdateCoordinator +from .entity import TomorrowioEntity async def async_setup_entry( diff --git a/homeassistant/components/toon/binary_sensor.py b/homeassistant/components/toon/binary_sensor.py index b184e5aacb7..11b13a32ee5 100644 --- a/homeassistant/components/toon/binary_sensor.py +++ b/homeassistant/components/toon/binary_sensor.py @@ -15,7 +15,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import ToonDataUpdateCoordinator -from .models import ( +from .entity import ( ToonBoilerDeviceEntity, ToonBoilerModuleDeviceEntity, ToonDisplayDeviceEntity, diff --git a/homeassistant/components/toon/climate.py b/homeassistant/components/toon/climate.py index 1570a637f95..0c2e5b9b232 100644 --- a/homeassistant/components/toon/climate.py +++ b/homeassistant/components/toon/climate.py @@ -28,8 +28,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ToonDataUpdateCoordinator from .const import DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP, DOMAIN +from .entity import ToonDisplayDeviceEntity from .helpers import toon_exception_handler -from .models import ToonDisplayDeviceEntity async def async_setup_entry( @@ -52,7 +52,6 @@ class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/toon/config_flow.py b/homeassistant/components/toon/config_flow.py index 40e83c3c9be..450d2472a6c 100644 --- a/homeassistant/components/toon/config_flow.py +++ b/homeassistant/components/toon/config_flow.py @@ -23,6 +23,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): agreements: list[Agreement] data: dict[str, Any] + migrate_entry: str | None = None @property def logger(self) -> logging.Logger: @@ -48,7 +49,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): return await self.async_step_agreement() async def async_step_import( - self, config: dict[str, Any] | None = None + self, import_data: dict[str, Any] | None ) -> ConfigFlowResult: """Start a configuration flow based on imported data. @@ -57,8 +58,8 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): the version 1 schema. """ - if config is not None and CONF_MIGRATE in config: - self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) + if import_data is not None and CONF_MIGRATE in import_data: + self.migrate_entry = import_data[CONF_MIGRATE] else: await self._async_handle_discovery_without_unique_id() @@ -88,8 +89,8 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): return await self._create_entry(self.agreements[agreement_index]) async def _create_entry(self, agreement: Agreement) -> ConfigFlowResult: - if CONF_MIGRATE in self.context: - await self.hass.config_entries.async_remove(self.context[CONF_MIGRATE]) + if self.migrate_entry: + await self.hass.config_entries.async_remove(self.migrate_entry) await self.async_set_unique_id(agreement.agreement_id) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/toon/coordinator.py b/homeassistant/components/toon/coordinator.py index 85ea53de705..586eca34959 100644 --- a/homeassistant/components/toon/coordinator.py +++ b/homeassistant/components/toon/coordinator.py @@ -90,7 +90,7 @@ class ToonDataUpdateCoordinator(DataUpdateCoordinator[Status]): await self.toon.subscribe_webhook( application_id=self.entry.entry_id, url=webhook_url ) - _LOGGER.info("Registered Toon webhook: %s", webhook_url) + _LOGGER.debug("Registered Toon webhook: %s", webhook_url) except ToonError as err: _LOGGER.error("Error during webhook registration - %s", err) diff --git a/homeassistant/components/toon/models.py b/homeassistant/components/toon/entity.py similarity index 100% rename from homeassistant/components/toon/models.py rename to homeassistant/components/toon/entity.py diff --git a/homeassistant/components/toon/helpers.py b/homeassistant/components/toon/helpers.py index 0dd740544df..d65a6d76676 100644 --- a/homeassistant/components/toon/helpers.py +++ b/homeassistant/components/toon/helpers.py @@ -8,7 +8,7 @@ from typing import Any, Concatenate from toonapi import ToonConnectionError, ToonError -from .models import ToonEntity +from .entity import ToonEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/toon/icons.json b/homeassistant/components/toon/icons.json index 650bf0b6d19..217f1240893 100644 --- a/homeassistant/components/toon/icons.json +++ b/homeassistant/components/toon/icons.json @@ -1,5 +1,7 @@ { "services": { - "update": "mdi:update" + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/toon/sensor.py b/homeassistant/components/toon/sensor.py index 09fdcb4e4ab..09f36c88079 100644 --- a/homeassistant/components/toon/sensor.py +++ b/homeassistant/components/toon/sensor.py @@ -23,7 +23,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import CURRENCY_EUR, DOMAIN, VOLUME_CM3, VOLUME_LMIN from .coordinator import ToonDataUpdateCoordinator -from .models import ( +from .entity import ( ToonBoilerDeviceEntity, ToonDisplayDeviceEntity, ToonElectricityMeterDeviceEntity, diff --git a/homeassistant/components/toon/strings.json b/homeassistant/components/toon/strings.json index ed29e77a58c..3072896653d 100644 --- a/homeassistant/components/toon/strings.json +++ b/homeassistant/components/toon/strings.json @@ -16,6 +16,7 @@ "already_configured": "The selected agreement is already configured.", "unknown_authorize_url_generation": "[%key:common::config_flow::abort::unknown_authorize_url_generation%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "connection_error": "[%key:common::config_flow::error::cannot_connect%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", "no_agreements": "This account has no Toon displays.", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", diff --git a/homeassistant/components/toon/switch.py b/homeassistant/components/toon/switch.py index b491505a8a5..deb2a12f2d0 100644 --- a/homeassistant/components/toon/switch.py +++ b/homeassistant/components/toon/switch.py @@ -19,8 +19,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import ToonDataUpdateCoordinator +from .entity import ToonDisplayDeviceEntity, ToonEntity, ToonRequiredKeysMixin from .helpers import toon_exception_handler -from .models import ToonDisplayDeviceEntity, ToonEntity, ToonRequiredKeysMixin async def async_setup_entry( diff --git a/homeassistant/components/torque/manifest.json b/homeassistant/components/torque/manifest.json index b966365bdd4..44047c67dd2 100644 --- a/homeassistant/components/torque/manifest.json +++ b/homeassistant/components/torque/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/torque", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index edbbbb06e70..bc33129a741 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -9,24 +9,16 @@ from total_connect_client.location import TotalConnectLocation from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, + CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from .const import CODE_REQUIRED, DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator from .entity import TotalConnectLocationEntity @@ -39,13 +31,10 @@ async def async_setup_entry( ) -> None: """Set up TotalConnect alarm panels based on a config entry.""" coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + code_required = entry.options.get(CODE_REQUIRED, False) async_add_entities( - TotalConnectAlarm( - coordinator, - location, - partition_id, - ) + TotalConnectAlarm(coordinator, location, partition_id, code_required) for location in coordinator.client.locations.values() for partition_id in location.partitions ) @@ -74,13 +63,13 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.ARM_AWAY | AlarmControlPanelEntityFeature.ARM_NIGHT ) - _attr_code_arm_required = False def __init__( self, coordinator: TotalConnectDataUpdateCoordinator, location: TotalConnectLocation, partition_id: int, + require_code: bool, ) -> None: """Initialize the TotalConnect status.""" super().__init__(coordinator, location) @@ -100,8 +89,12 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): self._attr_translation_placeholders = {"partition_id": str(partition_id)} self._attr_unique_id = f"{location.location_id}_{partition_id}" + self._attr_code_arm_required = require_code + if require_code: + self._attr_code_format = CodeFormat.NUMBER + @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" # State attributes can be removed in 2025.3 attr = { @@ -119,29 +112,29 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): else: attr["location_name"] = f"{self.device.name} partition {self._partition_id}" - state: str | None = None + state: AlarmControlPanelState | None = None if self._partition.arming_state.is_disarmed(): - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED elif self._partition.arming_state.is_armed_night(): - state = STATE_ALARM_ARMED_NIGHT + state = AlarmControlPanelState.ARMED_NIGHT elif self._partition.arming_state.is_armed_home(): - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif self._partition.arming_state.is_armed_away(): - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif self._partition.arming_state.is_armed_custom_bypass(): - state = STATE_ALARM_ARMED_CUSTOM_BYPASS + state = AlarmControlPanelState.ARMED_CUSTOM_BYPASS elif self._partition.arming_state.is_arming(): - state = STATE_ALARM_ARMING + state = AlarmControlPanelState.ARMING elif self._partition.arming_state.is_disarming(): - state = STATE_ALARM_DISARMING + state = AlarmControlPanelState.DISARMING elif self._partition.arming_state.is_triggered_police(): - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED attr["triggered_source"] = "Police/Medical" elif self._partition.arming_state.is_triggered_fire(): - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED attr["triggered_source"] = "Fire/Smoke" elif self._partition.arming_state.is_triggered_gas(): - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED attr["triggered_source"] = "Carbon Monoxide" self._attr_extra_state_attributes = attr @@ -150,92 +143,111 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" + self._check_usercode(code) try: await self.hass.async_add_executor_job(self._disarm) except UsercodeInvalid as error: self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError( - "TotalConnect usercode is invalid. Did not disarm" + translation_domain=DOMAIN, + translation_key="disarm_invalid_code", ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to disarm {self.device.name}." + translation_domain=DOMAIN, + translation_key="disarm_failed", + translation_placeholders={"device": self.device.name}, ) from error await self.coordinator.async_request_refresh() - def _disarm(self, code=None): + def _disarm(self) -> None: """Disarm synchronous.""" ArmingHelper(self._partition).disarm() async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" + self._check_usercode(code) try: await self.hass.async_add_executor_job(self._arm_home) except UsercodeInvalid as error: self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError( - "TotalConnect usercode is invalid. Did not arm home" + translation_domain=DOMAIN, + translation_key="arm_home_invalid_code", ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm home {self.device.name}." + translation_domain=DOMAIN, + translation_key="arm_home_failed", + translation_placeholders={"device": self.device.name}, ) from error await self.coordinator.async_request_refresh() - def _arm_home(self): + def _arm_home(self) -> None: """Arm home synchronous.""" ArmingHelper(self._partition).arm_stay() async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" + self._check_usercode(code) try: await self.hass.async_add_executor_job(self._arm_away) except UsercodeInvalid as error: self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError( - "TotalConnect usercode is invalid. Did not arm away" + translation_domain=DOMAIN, + translation_key="arm_away_invalid_code", ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm away {self.device.name}." + translation_domain=DOMAIN, + translation_key="arm_away_failed", + translation_placeholders={"device": self.device.name}, ) from error await self.coordinator.async_request_refresh() - def _arm_away(self, code=None): + def _arm_away(self) -> None: """Arm away synchronous.""" ArmingHelper(self._partition).arm_away() async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" + self._check_usercode(code) try: await self.hass.async_add_executor_job(self._arm_night) except UsercodeInvalid as error: self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError( - "TotalConnect usercode is invalid. Did not arm night" + translation_domain=DOMAIN, + translation_key="arm_night_invalid_code", ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm night {self.device.name}." + translation_domain=DOMAIN, + translation_key="arm_night_failed", + translation_placeholders={"device": self.device.name}, ) from error await self.coordinator.async_request_refresh() - def _arm_night(self, code=None): + def _arm_night(self) -> None: """Arm night synchronous.""" ArmingHelper(self._partition).arm_stay_night() - async def async_alarm_arm_home_instant(self, code: str | None = None) -> None: + async def async_alarm_arm_home_instant(self) -> None: """Send arm home instant command.""" try: await self.hass.async_add_executor_job(self._arm_home_instant) except UsercodeInvalid as error: self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError( - "TotalConnect usercode is invalid. Did not arm home instant" + translation_domain=DOMAIN, + translation_key="arm_home_instant_invalid_code", ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm home instant {self.device.name}." + translation_domain=DOMAIN, + translation_key="arm_home_instant_failed", + translation_placeholders={"device": self.device.name}, ) from error await self.coordinator.async_request_refresh() @@ -243,21 +255,34 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): """Arm home instant synchronous.""" ArmingHelper(self._partition).arm_stay_instant() - async def async_alarm_arm_away_instant(self, code: str | None = None) -> None: + async def async_alarm_arm_away_instant(self) -> None: """Send arm away instant command.""" try: await self.hass.async_add_executor_job(self._arm_away_instant) except UsercodeInvalid as error: self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError( - "TotalConnect usercode is invalid. Did not arm away instant" + translation_domain=DOMAIN, + translation_key="arm_away_instant_invalid_code", ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm away instant {self.device.name}." + translation_domain=DOMAIN, + translation_key="arm_away_instant_failed", + translation_placeholders={"device": self.device.name}, ) from error await self.coordinator.async_request_refresh() - def _arm_away_instant(self, code=None): + def _arm_away_instant(self): """Arm away instant synchronous.""" ArmingHelper(self._partition).arm_away_instant() + + def _check_usercode(self, code): + """Check if the run-time entered code matches configured code.""" + if ( + self._attr_code_arm_required + and self.coordinator.client.usercodes[self._location.location_id] != code + ): + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="invalid_pin" + ) diff --git a/homeassistant/components/totalconnect/config_flow.py b/homeassistant/components/totalconnect/config_flow.py index 63973fd44e9..3f5d05fda13 100644 --- a/homeassistant/components/totalconnect/config_flow.py +++ b/homeassistant/components/totalconnect/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from total_connect_client.client import TotalConnectClient from total_connect_client.exceptions import AuthenticationError @@ -17,8 +17,9 @@ from homeassistant.config_entries import ( ) from homeassistant.const import CONF_LOCATION, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback +from homeassistant.helpers.typing import VolDictType -from .const import AUTO_BYPASS, CONF_USERCODES, DOMAIN +from .const import AUTO_BYPASS, CODE_REQUIRED, CONF_USERCODES, DOMAIN PASSWORD_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) @@ -28,15 +29,16 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + client: TotalConnectClient + def __init__(self) -> None: """Initialize the config flow.""" - self.username = None - self.password = None - self.usercodes: dict[str, Any] = {} - self.client = None + self.username: str | None = None + self.password: str | None = None + self.usercodes: dict[int, str | None] = {} async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} @@ -70,18 +72,20 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors ) - async def async_step_locations(self, user_entry=None): + async def async_step_locations( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the user locations and associated usercodes.""" errors = {} - if user_entry is not None: + if user_input is not None: for location_id in self.usercodes: if self.usercodes[location_id] is None: valid = await self.hass.async_add_executor_job( self.client.locations[location_id].set_usercode, - user_entry[CONF_USERCODES], + user_input[CONF_USERCODES], ) if valid: - self.usercodes[location_id] = user_entry[CONF_USERCODES] + self.usercodes[location_id] = user_input[CONF_USERCODES] else: errors[CONF_LOCATION] = "usercode" break @@ -111,11 +115,11 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): self.usercodes[location_id] = None # show the next location that needs a usercode - location_codes = {} + location_codes: VolDictType = {} location_for_user = "" for location_id in self.usercodes: if self.usercodes[location_id] is None: - location_for_user = location_id + location_for_user = str(location_id) location_codes[ vol.Required( CONF_USERCODES, @@ -141,7 +145,9 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors = {} if user_input is None: @@ -166,6 +172,8 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): ) existing_entry = await self.async_set_unique_id(self.username) + if TYPE_CHECKING: + assert existing_entry is not None new_entry = { CONF_USERNAME: self.username, CONF_PASSWORD: user_input[CONF_PASSWORD], @@ -185,17 +193,15 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TotalConnectOptionsFlowHandler: """Get options flow.""" - return TotalConnectOptionsFlowHandler(config_entry) + return TotalConnectOptionsFlowHandler() class TotalConnectOptionsFlowHandler(OptionsFlow): """TotalConnect options flow handler.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, bool] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) @@ -207,7 +213,11 @@ class TotalConnectOptionsFlowHandler(OptionsFlow): vol.Required( AUTO_BYPASS, default=self.config_entry.options.get(AUTO_BYPASS, False), - ): bool + ): bool, + vol.Required( + CODE_REQUIRED, + default=self.config_entry.options.get(CODE_REQUIRED, False), + ): bool, } ), ) diff --git a/homeassistant/components/totalconnect/const.py b/homeassistant/components/totalconnect/const.py index 1e98adaaa70..005d21a9376 100644 --- a/homeassistant/components/totalconnect/const.py +++ b/homeassistant/components/totalconnect/const.py @@ -3,6 +3,7 @@ DOMAIN = "totalconnect" CONF_USERCODES = "usercodes" AUTO_BYPASS = "auto_bypass_low_battery" +CODE_REQUIRED = "code_required" # Most TotalConnect alarms will work passing '-1' as usercode DEFAULT_USERCODE = "-1" diff --git a/homeassistant/components/totalconnect/icons.json b/homeassistant/components/totalconnect/icons.json index cb62a79c7bb..a21df03e15d 100644 --- a/homeassistant/components/totalconnect/icons.json +++ b/homeassistant/components/totalconnect/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "arm_away_instant": "mdi:shield-lock", - "arm_home_instant": "mdi:shield-home" + "arm_away_instant": { + "service": "mdi:shield-lock" + }, + "arm_home_instant": { + "service": "mdi:shield-home" + } } } diff --git a/homeassistant/components/totalconnect/manifest.json b/homeassistant/components/totalconnect/manifest.json index 87ec14621d9..33306a7adba 100644 --- a/homeassistant/components/totalconnect/manifest.json +++ b/homeassistant/components/totalconnect/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/totalconnect", "iot_class": "cloud_polling", "loggers": ["total_connect_client"], - "requirements": ["total-connect-client==2024.5"] + "requirements": ["total-connect-client==2024.12"] } diff --git a/homeassistant/components/totalconnect/quality_scale.yaml b/homeassistant/components/totalconnect/quality_scale.yaml new file mode 100644 index 00000000000..a8e5b60f7ee --- /dev/null +++ b/homeassistant/components/totalconnect/quality_scale.yaml @@ -0,0 +1,62 @@ +rules: + # Bronze + config-flow: todo + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: todo + runtime-data: todo + test-before-setup: todo + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: todo + dependency-transparency: done + action-setup: todo + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: todo + action-exceptions: todo + reauthentication-flow: done + parallel-updates: todo + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: todo + stale-devices: todo + diagnostics: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: todo + docs-use-cases: done + + # stopped here.... + docs-supported-devices: todo + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: done + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/totalconnect/strings.json b/homeassistant/components/totalconnect/strings.json index faa136137db..004056ef9ac 100644 --- a/homeassistant/components/totalconnect/strings.json +++ b/homeassistant/components/totalconnect/strings.json @@ -33,9 +33,9 @@ "step": { "init": { "title": "TotalConnect Options", - "description": "Automatically bypass zones the moment they report a low battery.", "data": { - "auto_bypass_low_battery": "Auto bypass low battery" + "auto_bypass_low_battery": "Auto bypass low battery", + "code_required": "Require user to enter code for alarm actions" } } } @@ -76,5 +76,46 @@ "name": "Bypass" } } + }, + "exceptions": { + "invalid_pin": { + "message": "Incorrect code entered" + }, + "disarm_failed": { + "message": "Failed to disarm {device}" + }, + "disarm_invalid_code": { + "message": "Usercode is invalid, did not disarm" + }, + "arm_home_failed": { + "message": "Failed to arm home {device}" + }, + "arm_home_invalid_code": { + "message": "Usercode is invalid, did not arm home" + }, + "arm_away_failed": { + "message": "Failed to arm away {device}" + }, + "arm_away_invalid_code": { + "message": "Usercode is invalid, did not arm away" + }, + "arm_night_failed": { + "message": "Failed to arm night {device}" + }, + "arm_night_invalid_code": { + "message": "Usercode is invalid, did not arm night" + }, + "arm_home_instant_failed": { + "message": "Failed to arm home instant {device}" + }, + "arm_home_instant_invalid_code": { + "message": "Usercode is invalid, did not arm home instant" + }, + "arm_away_instant_failed": { + "message": "Failed to arm away instant {device}" + }, + "arm_away_instant_invalid_code": { + "message": "Usercode is invalid, did not arm away instant" + } } } diff --git a/homeassistant/components/touchline/climate.py b/homeassistant/components/touchline/climate.py index 7b14404ee34..e9d27341cb7 100644 --- a/homeassistant/components/touchline/climate.py +++ b/homeassistant/components/touchline/climate.py @@ -70,7 +70,6 @@ class Touchline(ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, touchline_thermostat): """Initialize the Touchline device.""" diff --git a/homeassistant/components/touchline/manifest.json b/homeassistant/components/touchline/manifest.json index 340edb8381a..c003cca97a4 100644 --- a/homeassistant/components/touchline/manifest.json +++ b/homeassistant/components/touchline/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/touchline", "iot_class": "local_polling", "loggers": ["pytouchline"], + "quality_scale": "legacy", "requirements": ["pytouchline==0.7"] } diff --git a/homeassistant/components/touchline_sl/__init__.py b/homeassistant/components/touchline_sl/__init__.py new file mode 100644 index 00000000000..45a85185673 --- /dev/null +++ b/homeassistant/components/touchline_sl/__init__.py @@ -0,0 +1,63 @@ +"""The Roth Touchline SL integration.""" + +from __future__ import annotations + +import asyncio + +from pytouchlinesl import TouchlineSL + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + +type TouchlineSLConfigEntry = ConfigEntry[list[TouchlineSLModuleCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: TouchlineSLConfigEntry) -> bool: + """Set up Roth Touchline SL from a config entry.""" + account = TouchlineSL( + username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD] + ) + + coordinators: list[TouchlineSLModuleCoordinator] = [ + TouchlineSLModuleCoordinator(hass, module) for module in await account.modules() + ] + + await asyncio.gather( + *[ + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ] + ) + + device_registry = dr.async_get(hass) + + # Create a new Device for each coorodinator to represent each module + for c in coordinators: + module = c.data.module + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, module.id)}, + name=module.name, + manufacturer="Roth", + model=module.type, + sw_version=module.version, + ) + + entry.runtime_data = coordinators + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: TouchlineSLConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/touchline_sl/climate.py b/homeassistant/components/touchline_sl/climate.py new file mode 100644 index 00000000000..8a0ffc4cd86 --- /dev/null +++ b/homeassistant/components/touchline_sl/climate.py @@ -0,0 +1,116 @@ +"""Roth Touchline SL climate integration implementation for Home Assistant.""" + +from typing import Any + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TouchlineSLConfigEntry +from .coordinator import TouchlineSLModuleCoordinator +from .entity import TouchlineSLZoneEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TouchlineSLConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Touchline devices.""" + coordinators = entry.runtime_data + async_add_entities( + TouchlineSLZone(coordinator=coordinator, zone_id=zone_id) + for coordinator in coordinators + for zone_id in coordinator.data.zones + ) + + +CONSTANT_TEMPERATURE = "constant_temperature" + + +class TouchlineSLZone(TouchlineSLZoneEntity, ClimateEntity): + """Roth Touchline SL Zone.""" + + _attr_hvac_action = HVACAction.IDLE + _attr_hvac_mode = HVACMode.HEAT + _attr_hvac_modes = [HVACMode.HEAT] + _attr_name = None + _attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "zone" + + def __init__(self, coordinator: TouchlineSLModuleCoordinator, zone_id: int) -> None: + """Construct a Touchline SL climate zone.""" + super().__init__(coordinator, zone_id) + + self._attr_unique_id = ( + f"module-{self.coordinator.data.module.id}-zone-{self.zone_id}" + ) + + # Call this in __init__ so data is populated right away, since it's + # already available in the coordinator data. + self.set_attr() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self.set_attr() + super()._handle_coordinator_update() + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: + return + + await self.zone.set_temperature(temperature) + await self.coordinator.async_request_refresh() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Assign the zone to a particular global schedule.""" + if not self.zone: + return + + if preset_mode == CONSTANT_TEMPERATURE and self._attr_target_temperature: + await self.zone.set_temperature(temperature=self._attr_target_temperature) + await self.coordinator.async_request_refresh() + return + + if schedule := self.coordinator.data.schedules[preset_mode]: + await self.zone.set_schedule(schedule_id=schedule.id) + await self.coordinator.async_request_refresh() + + def set_attr(self) -> None: + """Populate attributes with data from the coordinator.""" + schedule_names = self.coordinator.data.schedules.keys() + + self._attr_current_temperature = self.zone.temperature + self._attr_target_temperature = self.zone.target_temperature + self._attr_current_humidity = int(self.zone.humidity) + self._attr_preset_modes = [*schedule_names, CONSTANT_TEMPERATURE] + + if self.zone.mode == "constantTemp": + self._attr_preset_mode = CONSTANT_TEMPERATURE + elif self.zone.mode == "globalSchedule": + schedule = self.zone.schedule + self._attr_preset_mode = schedule.name + + if self.zone.algorithm == "heating": + self._attr_hvac_action = ( + HVACAction.HEATING if self.zone.relay_on else HVACAction.IDLE + ) + self._attr_hvac_mode = HVACMode.HEAT + self._attr_hvac_modes = [HVACMode.HEAT] + elif self.zone.algorithm == "cooling": + self._attr_hvac_action = ( + HVACAction.COOLING if self.zone.relay_on else HVACAction.IDLE + ) + self._attr_hvac_mode = HVACMode.COOL + self._attr_hvac_modes = [HVACMode.COOL] diff --git a/homeassistant/components/touchline_sl/config_flow.py b/homeassistant/components/touchline_sl/config_flow.py new file mode 100644 index 00000000000..91d959b5a0a --- /dev/null +++ b/homeassistant/components/touchline_sl/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Roth Touchline SL integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pytouchlinesl import TouchlineSL +from pytouchlinesl.client import RothAPIError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class TouchlineSLConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Roth Touchline SL.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step that gathers username and password.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + account = TouchlineSL( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + await account.user_id() + except RothAPIError as e: + if e.status == 401: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + unique_account_id = await account.user_id() + await self.async_set_unique_id(str(unique_account_id)) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=user_input[CONF_USERNAME], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/touchline_sl/const.py b/homeassistant/components/touchline_sl/const.py new file mode 100644 index 00000000000..e441e3721b3 --- /dev/null +++ b/homeassistant/components/touchline_sl/const.py @@ -0,0 +1,3 @@ +"""Constants for the Roth Touchline SL integration.""" + +DOMAIN = "touchline_sl" diff --git a/homeassistant/components/touchline_sl/coordinator.py b/homeassistant/components/touchline_sl/coordinator.py new file mode 100644 index 00000000000..cd74ba6130f --- /dev/null +++ b/homeassistant/components/touchline_sl/coordinator.py @@ -0,0 +1,59 @@ +"""Define an object to manage fetching Touchline SL data.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from pytouchlinesl import Module, Zone +from pytouchlinesl.client import RothAPIError +from pytouchlinesl.client.models import GlobalScheduleModel + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class TouchlineSLModuleData: + """Provide type safe way of accessing module data from the coordinator.""" + + module: Module + zones: dict[int, Zone] + schedules: dict[str, GlobalScheduleModel] + + +class TouchlineSLModuleCoordinator(DataUpdateCoordinator[TouchlineSLModuleData]): + """A coordinator to manage the fetching of Touchline SL data.""" + + def __init__(self, hass: HomeAssistant, module: Module) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"Touchline SL ({module.name})", + update_interval=timedelta(seconds=30), + ) + + self.module = module + + async def _async_update_data(self) -> TouchlineSLModuleData: + """Fetch data from the upstream API and pre-process into the right format.""" + try: + zones = await self.module.zones() + schedules = await self.module.schedules() + except RothAPIError as error: + if error.status == 401: + # Trigger a reauthentication if the data update fails due to + # bad authentication. + raise ConfigEntryAuthFailed from error + raise UpdateFailed(error) from error + + return TouchlineSLModuleData( + module=self.module, + zones={z.id: z for z in zones}, + schedules={s.name: s for s in schedules}, + ) diff --git a/homeassistant/components/touchline_sl/entity.py b/homeassistant/components/touchline_sl/entity.py new file mode 100644 index 00000000000..637ad8955eb --- /dev/null +++ b/homeassistant/components/touchline_sl/entity.py @@ -0,0 +1,38 @@ +"""Base class for Touchline SL zone entities.""" + +from pytouchlinesl import Zone + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + + +class TouchlineSLZoneEntity(CoordinatorEntity[TouchlineSLModuleCoordinator]): + """Defines a base Touchline SL zone entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: TouchlineSLModuleCoordinator, zone_id: int) -> None: + """Initialize touchline entity.""" + super().__init__(coordinator) + self.zone_id = zone_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(zone_id))}, + name=self.zone.name, + manufacturer="Roth", + via_device=(DOMAIN, coordinator.data.module.id), + model="zone", + suggested_area=self.zone.name, + ) + + @property + def zone(self) -> Zone: + """Return the device object from the coordinator data.""" + return self.coordinator.data.zones[self.zone_id] + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.zone_id in self.coordinator.data.zones diff --git a/homeassistant/components/touchline_sl/manifest.json b/homeassistant/components/touchline_sl/manifest.json new file mode 100644 index 00000000000..ab07ae770fd --- /dev/null +++ b/homeassistant/components/touchline_sl/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "touchline_sl", + "name": "Roth Touchline SL", + "codeowners": ["@jnsgruk"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/touchline_sl", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["pytouchlinesl==0.3.0"] +} diff --git a/homeassistant/components/touchline_sl/strings.json b/homeassistant/components/touchline_sl/strings.json new file mode 100644 index 00000000000..e3a0ef5a741 --- /dev/null +++ b/homeassistant/components/touchline_sl/strings.json @@ -0,0 +1,36 @@ +{ + "config": { + "flow_title": "Touchline SL Setup Flow", + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "user": { + "title": "Login to Touchline SL", + "description": "Your credentials for the Roth Touchline SL mobile app/web service", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "climate": { + "zone": { + "state_attributes": { + "preset_mode": { + "state": { + "constant_temperature": "Constant temperature" + } + } + } + } + } + } +} diff --git a/homeassistant/components/tplink/__init__.py b/homeassistant/components/tplink/__init__.py index 83cfc733716..a7ffce686be 100644 --- a/homeassistant/components/tplink/__init__.py +++ b/homeassistant/components/tplink/__init__.py @@ -26,10 +26,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_ALIAS, CONF_AUTHENTICATION, + CONF_DEVICE, CONF_HOST, CONF_MAC, CONF_MODEL, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant, callback @@ -44,8 +46,12 @@ from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType from .const import ( + CONF_AES_KEYS, + CONF_CONFIG_ENTRY_MINOR_VERSION, + CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, + CONF_USES_HTTP, CONNECT_TIMEOUT, DISCOVERY_TIMEOUT, DOMAIN, @@ -85,9 +91,7 @@ def async_trigger_discovery( CONF_ALIAS: device.alias or mac_alias(device.mac), CONF_HOST: device.host, CONF_MAC: formatted_mac, - CONF_DEVICE_CONFIG: device.config.to_dict( - exclude_credentials=True, - ), + CONF_DEVICE: device, }, ) @@ -136,25 +140,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo host: str = entry.data[CONF_HOST] credentials = await get_credentials(hass) entry_credentials_hash = entry.data.get(CONF_CREDENTIALS_HASH) + entry_use_http = entry.data.get(CONF_USES_HTTP, False) + entry_aes_keys = entry.data.get(CONF_AES_KEYS) + port_override = entry.data.get(CONF_PORT) - config: DeviceConfig | None = None - if config_dict := entry.data.get(CONF_DEVICE_CONFIG): + conn_params: Device.ConnectionParameters | None = None + if conn_params_dict := entry.data.get(CONF_CONNECTION_PARAMETERS): try: - config = DeviceConfig.from_dict(config_dict) - except KasaException: + conn_params = Device.ConnectionParameters.from_dict(conn_params_dict) + except (KasaException, TypeError, ValueError, LookupError): _LOGGER.warning( - "Invalid connection type dict for %s: %s", host, config_dict + "Invalid connection parameters dict for %s: %s", host, conn_params_dict ) - if not config: - config = DeviceConfig(host) - else: - config.host = host - - config.timeout = CONNECT_TIMEOUT - if config.uses_http is True: - config.http_client = create_async_tplink_clientsession(hass) - + client = create_async_tplink_clientsession(hass) if entry_use_http else None + config = DeviceConfig( + host, + timeout=CONNECT_TIMEOUT, + http_client=client, + aes_keys=entry_aes_keys, + port_override=port_override, + ) + if conn_params: + config.connection_type = conn_params # If we have in memory credentials use them otherwise check for credentials_hash if credentials: config.credentials = credentials @@ -173,14 +181,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo raise ConfigEntryNotReady from ex device_credentials_hash = device.credentials_hash - device_config_dict = device.config.to_dict(exclude_credentials=True) - # Do not store the credentials hash inside the device_config - device_config_dict.pop(CONF_CREDENTIALS_HASH, None) + + # We not need to update the connection parameters or the use_http here + # because if they were wrong we would have failed to connect. + # Discovery will update those if necessary. updates: dict[str, Any] = {} if device_credentials_hash and device_credentials_hash != entry_credentials_hash: updates[CONF_CREDENTIALS_HASH] = device_credentials_hash - if device_config_dict != config_dict: - updates[CONF_DEVICE_CONFIG] = device_config_dict + if entry_aes_keys != device.config.aes_keys: + updates[CONF_AES_KEYS] = device.config.aes_keys if entry.data.get(CONF_ALIAS) != device.alias: updates[CONF_ALIAS] = device.alias if entry.data.get(CONF_MODEL) != device.model: @@ -307,12 +316,20 @@ def _device_id_is_mac_or_none(mac: str, device_ids: Iterable[str]) -> str | None async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Migrate old entry.""" - version = config_entry.version - minor_version = config_entry.minor_version + entry_version = config_entry.version + entry_minor_version = config_entry.minor_version + # having a condition to check for the current version allows + # tests to be written per migration step. + config_flow_minor_version = CONF_CONFIG_ENTRY_MINOR_VERSION - _LOGGER.debug("Migrating from version %s.%s", version, minor_version) - - if version == 1 and minor_version < 3: + new_minor_version = 3 + if ( + entry_version == 1 + and entry_minor_version < new_minor_version <= config_flow_minor_version + ): + _LOGGER.debug( + "Migrating from version %s.%s", entry_version, entry_minor_version + ) # Previously entities on child devices added themselves to the parent # device and set their device id as identifiers along with mac # as a connection which creates a single device entry linked by all @@ -359,12 +376,19 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> new_identifiers, ) - minor_version = 3 - hass.config_entries.async_update_entry(config_entry, minor_version=3) + hass.config_entries.async_update_entry( + config_entry, minor_version=new_minor_version + ) - _LOGGER.debug("Migration to version %s.%s complete", version, minor_version) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version + ) - if version == 1 and minor_version == 3: + new_minor_version = 4 + if ( + entry_version == 1 + and entry_minor_version < new_minor_version <= config_flow_minor_version + ): # credentials_hash stored in the device_config should be moved to data. updates: dict[str, Any] = {} if config_dict := config_entry.data.get(CONF_DEVICE_CONFIG): @@ -372,15 +396,44 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> if credentials_hash := config_dict.pop(CONF_CREDENTIALS_HASH, None): updates[CONF_CREDENTIALS_HASH] = credentials_hash updates[CONF_DEVICE_CONFIG] = config_dict - minor_version = 4 hass.config_entries.async_update_entry( config_entry, data={ **config_entry.data, **updates, }, - minor_version=minor_version, + minor_version=new_minor_version, + ) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version ) - _LOGGER.debug("Migration to version %s.%s complete", version, minor_version) + new_minor_version = 5 + if ( + entry_version == 1 + and entry_minor_version < new_minor_version <= config_flow_minor_version + ): + # complete device config no longer to be stored, only required + # attributes like connection parameters and aes_keys + updates = {} + entry_data = { + k: v for k, v in config_entry.data.items() if k != CONF_DEVICE_CONFIG + } + if config_dict := config_entry.data.get(CONF_DEVICE_CONFIG): + assert isinstance(config_dict, dict) + if connection_parameters := config_dict.get("connection_type"): + updates[CONF_CONNECTION_PARAMETERS] = connection_parameters + if (use_http := config_dict.get(CONF_USES_HTTP)) is not None: + updates[CONF_USES_HTTP] = use_http + hass.config_entries.async_update_entry( + config_entry, + data={ + **entry_data, + **updates, + }, + minor_version=new_minor_version, + ) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version + ) return True diff --git a/homeassistant/components/tplink/binary_sensor.py b/homeassistant/components/tplink/binary_sensor.py index 97bb794a8f9..e14ecf01749 100644 --- a/homeassistant/components/tplink/binary_sensor.py +++ b/homeassistant/components/tplink/binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Final +from typing import Final, cast from kasa import Feature @@ -58,6 +58,10 @@ BINARY_SENSOR_DESCRIPTIONS: Final = ( key="water_alert", device_class=BinarySensorDeviceClass.MOISTURE, ), + TPLinkBinarySensorEntityDescription( + key="motion_detected", + device_class=BinarySensorDeviceClass.MOTION, + ), ) BINARYSENSOR_DESCRIPTIONS_MAP = {desc.key: desc for desc in BINARY_SENSOR_DESCRIPTIONS} @@ -75,6 +79,7 @@ async def async_setup_entry( device = parent_coordinator.device entities = CoordinatedTPLinkFeatureEntity.entities_for_device_and_its_children( + hass=hass, device=device, coordinator=parent_coordinator, feature_type=Feature.Type.BinarySensor, @@ -93,4 +98,4 @@ class TPLinkBinarySensorEntity(CoordinatedTPLinkFeatureEntity, BinarySensorEntit @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_is_on = self._feature.value + self._attr_is_on = cast(bool | None, self._feature.value) diff --git a/homeassistant/components/tplink/button.py b/homeassistant/components/tplink/button.py index 4dcc27858a8..131325e489d 100644 --- a/homeassistant/components/tplink/button.py +++ b/homeassistant/components/tplink/button.py @@ -7,11 +7,18 @@ from typing import Final from kasa import Feature -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry +from .deprecate import DeprecatedInfo, async_cleanup_deprecated from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription @@ -25,9 +32,23 @@ class TPLinkButtonEntityDescription( BUTTON_DESCRIPTIONS: Final = [ TPLinkButtonEntityDescription( key="test_alarm", + deprecated_info=DeprecatedInfo( + platform=BUTTON_DOMAIN, + new_platform=SIREN_DOMAIN, + breaks_in_ha_version="2025.4.0", + ), ), TPLinkButtonEntityDescription( key="stop_alarm", + deprecated_info=DeprecatedInfo( + platform=BUTTON_DOMAIN, + new_platform=SIREN_DOMAIN, + breaks_in_ha_version="2025.4.0", + ), + ), + TPLinkButtonEntityDescription( + key="reboot", + device_class=ButtonDeviceClass.RESTART, ), ] @@ -46,6 +67,7 @@ async def async_setup_entry( device = parent_coordinator.device entities = CoordinatedTPLinkFeatureEntity.entities_for_device_and_its_children( + hass=hass, device=device, coordinator=parent_coordinator, feature_type=Feature.Type.Action, @@ -53,6 +75,7 @@ async def async_setup_entry( descriptions=BUTTON_DESCRIPTIONS_MAP, child_coordinators=children_coordinators, ) + async_cleanup_deprecated(hass, BUTTON_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) diff --git a/homeassistant/components/tplink/climate.py b/homeassistant/components/tplink/climate.py index 3bd6aba5c26..75a6599959d 100644 --- a/homeassistant/components/tplink/climate.py +++ b/homeassistant/components/tplink/climate.py @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.const import PRECISION_WHOLE +from homeassistant.const import PRECISION_TENTHS from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -64,10 +64,9 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF] - _attr_precision = PRECISION_WHOLE + _attr_precision = PRECISION_TENTHS # This disables the warning for async_turn_{on,off}, can be removed later. - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -116,8 +115,8 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_current_temperature = self._temp_feature.value - self._attr_target_temperature = self._target_feature.value + self._attr_current_temperature = cast(float | None, self._temp_feature.value) + self._attr_target_temperature = cast(float | None, self._target_feature.value) self._attr_hvac_mode = ( HVACMode.HEAT if self._state_feature.value else HVACMode.OFF @@ -134,7 +133,9 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): self._attr_hvac_action = HVACAction.OFF return - self._attr_hvac_action = STATE_TO_ACTION[self._mode_feature.value] + self._attr_hvac_action = STATE_TO_ACTION[ + cast(ThermostatState, self._mode_feature.value) + ] def _get_unique_id(self) -> str: """Return unique id.""" diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index 1c02466aef1..63f1b4e125b 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any, Self from kasa import ( AuthenticationError, @@ -32,6 +32,7 @@ from homeassistant.const import ( CONF_MAC, CONF_MODEL, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import callback @@ -46,9 +47,11 @@ from . import ( set_credentials, ) from .const import ( - CONF_CONNECTION_TYPE, + CONF_AES_KEYS, + CONF_CONFIG_ENTRY_MINOR_VERSION, + CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, - CONF_DEVICE_CONFIG, + CONF_USES_HTTP, CONNECT_TIMEOUT, DOMAIN, ) @@ -64,8 +67,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for tplink.""" VERSION = 1 - MINOR_VERSION = 4 - reauth_entry: ConfigEntry | None = None + MINOR_VERSION = CONF_CONFIG_ENTRY_MINOR_VERSION + + host: str | None = None + port: int | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -87,38 +92,43 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return await self._async_handle_discovery( discovery_info[CONF_HOST], discovery_info[CONF_MAC], - discovery_info[CONF_DEVICE_CONFIG], + discovery_info[CONF_DEVICE], ) @callback def _get_config_updates( - self, entry: ConfigEntry, host: str, config: dict + self, entry: ConfigEntry, host: str, device: Device | None ) -> dict | None: """Return updates if the host or device config has changed.""" entry_data = entry.data - entry_config_dict = entry_data.get(CONF_DEVICE_CONFIG) - if entry_config_dict == config and entry_data[CONF_HOST] == host: + updates: dict[str, Any] = {} + new_connection_params = False + if entry_data[CONF_HOST] != host: + updates[CONF_HOST] = host + if device: + device_conn_params_dict = device.config.connection_type.to_dict() + entry_conn_params_dict = entry_data.get(CONF_CONNECTION_PARAMETERS) + if device_conn_params_dict != entry_conn_params_dict: + new_connection_params = True + updates[CONF_CONNECTION_PARAMETERS] = device_conn_params_dict + updates[CONF_USES_HTTP] = device.config.uses_http + if not updates: return None - updates = {**entry.data, CONF_DEVICE_CONFIG: config, CONF_HOST: host} + updates = {**entry.data, **updates} # If the connection parameters have changed the credentials_hash will be invalid. - if ( - entry_config_dict - and isinstance(entry_config_dict, dict) - and entry_config_dict.get(CONF_CONNECTION_TYPE) - != config.get(CONF_CONNECTION_TYPE) - ): + if new_connection_params: updates.pop(CONF_CREDENTIALS_HASH, None) _LOGGER.debug( "Connection type changed for %s from %s to: %s", host, - entry_config_dict.get(CONF_CONNECTION_TYPE), - config.get(CONF_CONNECTION_TYPE), + entry_conn_params_dict, + device_conn_params_dict, ) return updates @callback def _update_config_if_entry_in_setup_error( - self, entry: ConfigEntry, host: str, config: dict + self, entry: ConfigEntry, host: str, device: Device | None ) -> ConfigFlowResult | None: """If discovery encounters a device that is in SETUP_ERROR or SETUP_RETRY update the device config.""" if entry.state not in ( @@ -126,7 +136,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): ConfigEntryState.SETUP_RETRY, ): return None - if updates := self._get_config_updates(entry, host, config): + if updates := self._get_config_updates(entry, host, device): return self.async_update_reload_and_abort( entry, data=updates, @@ -135,33 +145,36 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return None async def _async_handle_discovery( - self, host: str, formatted_mac: str, config: dict | None = None + self, host: str, formatted_mac: str, device: Device | None = None ) -> ConfigFlowResult: """Handle any discovery.""" current_entry = await self.async_set_unique_id( formatted_mac, raise_on_progress=False ) - if ( - config - and current_entry - and ( - result := self._update_config_if_entry_in_setup_error( - current_entry, host, config - ) + if current_entry and ( + result := self._update_config_if_entry_in_setup_error( + current_entry, host, device ) ): return result self._abort_if_unique_id_configured(updates={CONF_HOST: host}) self._async_abort_entries_match({CONF_HOST: host}) - self.context[CONF_HOST] = host - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == host: - return self.async_abort(reason="already_in_progress") + self.host = host + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") credentials = await get_credentials(self.hass) try: - await self._async_try_discover_and_update( - host, credentials, raise_on_progress=True - ) + # If integration discovery there will be a device or None for dhcp + if device: + self._discovered_device = device + await self._async_try_connect(device, credentials) + else: + await self._async_try_discover_and_update( + host, + credentials, + raise_on_progress=True, + raise_on_timeout=True, + ) except AuthenticationError: return await self.async_step_discovery_auth_confirm() except KasaException: @@ -169,6 +182,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow.host == self.host + async def async_step_discovery_auth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -245,6 +262,26 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): step_id="discovery_confirm", description_placeholders=placeholders ) + @staticmethod + def _async_get_host_port(host_str: str) -> tuple[str, int | None]: + """Parse the host string for host and port.""" + if "[" in host_str: + _, _, bracketed = host_str.partition("[") + host, _, port_str = bracketed.partition("]") + _, _, port_str = port_str.partition(":") + else: + host, _, port_str = host_str.partition(":") + + if not port_str: + return host, None + + try: + port = int(port_str) + except ValueError: + return host, None + + return host, port + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -255,12 +292,29 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: if not (host := user_input[CONF_HOST]): return await self.async_step_pick_device() - self._async_abort_entries_match({CONF_HOST: host}) - self.context[CONF_HOST] = host + + host, port = self._async_get_host_port(host) + + match_dict = {CONF_HOST: host} + if port: + self.port = port + match_dict[CONF_PORT] = port + self._async_abort_entries_match(match_dict) + + self.host = host credentials = await get_credentials(self.hass) try: device = await self._async_try_discover_and_update( - host, credentials, raise_on_progress=False + host, + credentials, + raise_on_progress=False, + raise_on_timeout=False, + port=port, + ) or await self._async_try_connect_all( + host, + credentials=credentials, + raise_on_progress=False, + port=port, ) except AuthenticationError: return await self.async_step_user_auth_confirm() @@ -268,6 +322,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" placeholders["error"] = str(ex) else: + if not device: + return await self.async_step_user_auth_confirm() return self._async_create_entry_from_device(device) return self.async_show_form( @@ -282,18 +338,28 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Dialog that informs the user that auth is required.""" errors: dict[str, str] = {} - host = self.context[CONF_HOST] - placeholders: dict[str, str] = {CONF_HOST: host} + if TYPE_CHECKING: + # self.host is set by async_step_user and async_step_pick_device + assert self.host is not None + placeholders: dict[str, str] = {CONF_HOST: self.host} - assert self._discovered_device is not None if user_input: username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] credentials = Credentials(username, password) + device: Device | None try: - device = await self._async_try_connect( - self._discovered_device, credentials - ) + if self._discovered_device: + device = await self._async_try_connect( + self._discovered_device, credentials + ) + else: + device = await self._async_try_connect_all( + self.host, + credentials=credentials, + raise_on_progress=False, + port=self.port, + ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" placeholders["error"] = str(ex) @@ -301,11 +367,15 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" placeholders["error"] = str(ex) else: - await set_credentials(self.hass, username, password) - self.hass.async_create_task( - self._async_reload_requires_auth_entries(), eager_start=False - ) - return self._async_create_entry_from_device(device) + if not device: + errors["base"] = "cannot_connect" + placeholders["error"] = "try_connect_all failed" + else: + await set_credentials(self.hass, username, password) + self.hass.async_create_task( + self._async_reload_requires_auth_entries(), eager_start=False + ) + return self._async_create_entry_from_device(device) return self.async_show_form( step_id="user_auth_confirm", @@ -322,9 +392,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): mac = user_input[CONF_DEVICE] await self.async_set_unique_id(mac, raise_on_progress=False) self._discovered_device = self._discovered_devices[mac] - host = self._discovered_device.host - - self.context[CONF_HOST] = host + self.host = self._discovered_device.host credentials = await get_credentials(self.hass) try: @@ -360,13 +428,13 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Reload any in progress config flow that now have credentials.""" _config_entries = self.hass.config_entries - if reauth_entry := self.reauth_entry: - await _config_entries.async_reload(reauth_entry.entry_id) + if self.source == SOURCE_REAUTH: + await _config_entries.async_reload(self._get_reauth_entry().entry_id) for flow in _config_entries.flow.async_progress_by_handler( DOMAIN, include_uninitialized=True ): - context: dict[str, Any] = flow["context"] + context = flow["context"] if context.get("source") != SOURCE_REAUTH: continue entry_id: str = context["entry_id"] @@ -381,57 +449,95 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): # This is only ever called after a successful device update so we know that # the credential_hash is correct and should be saved. self._abort_if_unique_id_configured(updates={CONF_HOST: device.host}) - data = { + data: dict[str, Any] = { CONF_HOST: device.host, CONF_ALIAS: device.alias, CONF_MODEL: device.model, - CONF_DEVICE_CONFIG: device.config.to_dict( - exclude_credentials=True, - ), + CONF_CONNECTION_PARAMETERS: device.config.connection_type.to_dict(), + CONF_USES_HTTP: device.config.uses_http, } + if device.config.aes_keys: + data[CONF_AES_KEYS] = device.config.aes_keys if device.credentials_hash: data[CONF_CREDENTIALS_HASH] = device.credentials_hash + if port := device.config.port_override: + data[CONF_PORT] = port return self.async_create_entry( title=f"{device.alias} {device.model}", data=data, ) + async def _async_try_connect_all( + self, + host: str, + credentials: Credentials | None, + raise_on_progress: bool, + *, + port: int | None = None, + ) -> Device | None: + """Try to connect to the device speculatively. + + The connection parameters aren't known but discovery has failed so try + to connect with tcp. + """ + if credentials: + device = await Discover.try_connect_all( + host, + credentials=credentials, + http_client=create_async_tplink_clientsession(self.hass), + port=port, + ) + else: + # This will just try the legacy protocol that doesn't require auth + # and doesn't use http + try: + device = await Device.connect( + config=DeviceConfig(host, port_override=port) + ) + except Exception: # noqa: BLE001 + return None + if device: + await self.async_set_unique_id( + dr.format_mac(device.mac), + raise_on_progress=raise_on_progress, + ) + return device + async def _async_try_discover_and_update( self, host: str, credentials: Credentials | None, raise_on_progress: bool, - ) -> Device: + raise_on_timeout: bool, + *, + port: int | None = None, + ) -> Device | None: """Try to discover the device and call update. - Will try to connect to legacy devices if discovery fails. + Will try to connect directly if discovery fails. """ + self._discovered_device = None try: self._discovered_device = await Discover.discover_single( - host, credentials=credentials + host, + credentials=credentials, + port=port, ) except TimeoutError as ex: - # Try connect() to legacy devices if discovery fails. This is a - # fallback mechanism for legacy that can handle connections without - # discovery info but if it fails raise the original error which is - # applicable for newer devices. - try: - self._discovered_device = await Device.connect( - config=DeviceConfig(host) - ) - except Exception: # noqa: BLE001 - # Raise the original error instead of the fallback error + if raise_on_timeout: raise ex from ex - else: - if self._discovered_device.config.uses_http: - self._discovered_device.config.http_client = ( - create_async_tplink_clientsession(self.hass) - ) - await self._discovered_device.update() + return None + if TYPE_CHECKING: + assert self._discovered_device await self.async_set_unique_id( dr.format_mac(self._discovered_device.mac), raise_on_progress=raise_on_progress, ) + if self._discovered_device.config.uses_http: + self._discovered_device.config.http_client = ( + create_async_tplink_clientsession(self.hass) + ) + await self._discovered_device.update() return self._discovered_device async def _async_try_connect( @@ -460,9 +566,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Start the reauthentication flow if the device needs updated credentials.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -471,10 +574,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} placeholders: dict[str, str] = {} - reauth_entry = self.reauth_entry - assert reauth_entry is not None + reauth_entry = self._get_reauth_entry() entry_data = reauth_entry.data host = entry_data[CONF_HOST] + port = entry_data.get(CONF_PORT) if user_input: username = user_input[CONF_USERNAME] @@ -484,7 +587,14 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._async_try_discover_and_update( host, credentials=credentials, - raise_on_progress=True, + raise_on_progress=False, + raise_on_timeout=False, + port=port, + ) or await self._async_try_connect_all( + host, + credentials=credentials, + raise_on_progress=False, + port=port, ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" @@ -493,16 +603,23 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" placeholders["error"] = str(ex) else: - await set_credentials(self.hass, username, password) - config = device.config.to_dict(exclude_credentials=True) - if updates := self._get_config_updates(reauth_entry, host, config): - self.hass.config_entries.async_update_entry( - reauth_entry, data=updates + if not device: + errors["base"] = "cannot_connect" + placeholders["error"] = "try_connect_all failed" + else: + await self.async_set_unique_id( + dr.format_mac(device.mac), + raise_on_progress=False, ) - self.hass.async_create_task( - self._async_reload_requires_auth_entries(), eager_start=False - ) - return self.async_abort(reason="reauth_successful") + await set_credentials(self.hass, username, password) + if updates := self._get_config_updates(reauth_entry, host, device): + self.hass.config_entries.async_update_entry( + reauth_entry, data=updates + ) + self.hass.async_create_task( + self._async_reload_requires_auth_entries(), eager_start=False + ) + return self.async_abort(reason="reauth_successful") # Old config entries will not have these values. alias = entry_data.get(CONF_ALIAS) or "unknown" diff --git a/homeassistant/components/tplink/const.py b/homeassistant/components/tplink/const.py index babd92e2c34..28e4b04bcf9 100644 --- a/homeassistant/components/tplink/const.py +++ b/homeassistant/components/tplink/const.py @@ -21,7 +21,11 @@ ATTR_TOTAL_ENERGY_KWH: Final = "total_energy_kwh" CONF_DEVICE_CONFIG: Final = "device_config" CONF_CREDENTIALS_HASH: Final = "credentials_hash" -CONF_CONNECTION_TYPE: Final = "connection_type" +CONF_CONNECTION_PARAMETERS: Final = "connection_parameters" +CONF_USES_HTTP: Final = "uses_http" +CONF_AES_KEYS: Final = "aes_keys" + +CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 5 PLATFORMS: Final = [ Platform.BINARY_SENSOR, @@ -32,6 +36,7 @@ PLATFORMS: Final = [ Platform.NUMBER, Platform.SELECT, Platform.SENSOR, + Platform.SIREN, Platform.SWITCH, ] diff --git a/homeassistant/components/tplink/deprecate.py b/homeassistant/components/tplink/deprecate.py new file mode 100644 index 00000000000..738f3d24c38 --- /dev/null +++ b/homeassistant/components/tplink/deprecate.py @@ -0,0 +1,111 @@ +"""Helper class for deprecating entities.""" + +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import DOMAIN + +if TYPE_CHECKING: + from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription + + +@dataclass(slots=True) +class DeprecatedInfo: + """Class to define deprecation info for deprecated entities.""" + + platform: str + new_platform: str + breaks_in_ha_version: str + + +def async_check_create_deprecated( + hass: HomeAssistant, + unique_id: str, + entity_description: TPLinkFeatureEntityDescription, +) -> bool: + """Return true if the entity should be created based on the deprecated_info. + + If deprecated_info is not defined will return true. + If entity not yet created will return false. + If entity disabled will return false. + """ + if not entity_description.deprecated_info: + return True + + deprecated_info = entity_description.deprecated_info + platform = deprecated_info.platform + + ent_reg = er.async_get(hass) + entity_id = ent_reg.async_get_entity_id( + platform, + DOMAIN, + unique_id, + ) + if not entity_id: + return False + + entity_entry = ent_reg.async_get(entity_id) + assert entity_entry + return not entity_entry.disabled + + +def async_cleanup_deprecated( + hass: HomeAssistant, + platform: str, + entry_id: str, + entities: Sequence[CoordinatedTPLinkFeatureEntity], +) -> None: + """Remove disabled deprecated entities or create issues if necessary.""" + ent_reg = er.async_get(hass) + for entity in entities: + if not (deprecated_info := entity.entity_description.deprecated_info): + continue + + assert entity.unique_id + entity_id = ent_reg.async_get_entity_id( + platform, + DOMAIN, + entity.unique_id, + ) + assert entity_id + # Check for issues that need to be created + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + + for item in entity_automations + entity_scripts: + async_create_issue( + hass, + DOMAIN, + f"deprecated_entity_{entity_id}_{item}", + breaks_in_ha_version=deprecated_info.breaks_in_ha_version, + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_entity", + translation_placeholders={ + "entity": entity_id, + "info": item, + "platform": platform, + "new_platform": deprecated_info.new_platform, + }, + ) + + # Remove entities that are no longer provided and have been disabled. + unique_ids = {entity.unique_id for entity in entities} + for entity_entry in er.async_entries_for_config_entry(ent_reg, entry_id): + if ( + entity_entry.domain == platform + and entity_entry.disabled + and entity_entry.unique_id not in unique_ids + ): + ent_reg.async_remove(entity_entry.entity_id) + continue diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index 4ec0480cf82..ef9e2ad5eee 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -18,7 +18,7 @@ from kasa import ( ) from homeassistant.const import EntityCategory -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo @@ -36,6 +36,7 @@ from .const import ( PRIMARY_STATE_ID, ) from .coordinator import TPLinkDataUpdateCoordinator +from .deprecate import DeprecatedInfo, async_check_create_deprecated _LOGGER = logging.getLogger(__name__) @@ -68,6 +69,10 @@ EXCLUDED_FEATURES = { # update "current_firmware_version", "available_firmware_version", + "update_available", + "check_latest_firmware", + # siren + "alarm", } @@ -83,6 +88,8 @@ LEGACY_KEY_MAPPING = { class TPLinkFeatureEntityDescription(EntityDescription): """Base class for a TPLink feature based entity description.""" + deprecated_info: DeprecatedInfo | None = None + def async_refresh_after[_T: CoordinatedTPLinkEntity, **_P]( func: Callable[Concatenate[_T, _P], Awaitable[None]], @@ -247,18 +254,25 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): def _get_unique_id(self) -> str: """Return unique ID for the entity.""" - key = self.entity_description.key + return self._get_feature_unique_id(self._device, self.entity_description) + + @staticmethod + def _get_feature_unique_id( + device: Device, entity_description: TPLinkFeatureEntityDescription + ) -> str: + """Return unique ID for the entity.""" + key = entity_description.key # The unique id for the state feature in the switch platform is the # device_id if key == PRIMARY_STATE_ID: - return legacy_device_id(self._device) + return legacy_device_id(device) # Historically the legacy device emeter attributes which are now # replaced with features used slightly different keys. This ensures # that those entities are not orphaned. Returns the mapped key or the # provided key if not mapped. key = LEGACY_KEY_MAPPING.get(key, key) - return f"{legacy_device_id(self._device)}_{key}" + return f"{legacy_device_id(device)}_{key}" @classmethod def _category_for_feature(cls, feature: Feature | None) -> EntityCategory | None: @@ -317,7 +331,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): and desc.entity_registry_enabled_default, ) - _LOGGER.info( + _LOGGER.debug( "Device feature: %s (%s) needs an entity description defined in HA", feature.name, feature.id, @@ -330,6 +344,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): _D: TPLinkFeatureEntityDescription, ]( cls, + hass: HomeAssistant, device: Device, coordinator: TPLinkDataUpdateCoordinator, *, @@ -364,6 +379,11 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): feat, descriptions, device=device, parent=parent ) ) + and async_check_create_deprecated( + hass, + cls._get_feature_unique_id(device, desc), + desc, + ) ] return entities @@ -373,6 +393,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): _D: TPLinkFeatureEntityDescription, ]( cls, + hass: HomeAssistant, device: Device, coordinator: TPLinkDataUpdateCoordinator, *, @@ -389,6 +410,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): # Add parent entities before children so via_device id works. entities.extend( cls._entities_for_device( + hass, device, coordinator=coordinator, feature_type=feature_type, @@ -408,6 +430,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): child_coordinator = coordinator entities.extend( cls._entities_for_device( + hass, child, coordinator=child_coordinator, feature_type=feature_type, diff --git a/homeassistant/components/tplink/fan.py b/homeassistant/components/tplink/fan.py index f90eadbc531..64ad01eb671 100644 --- a/homeassistant/components/tplink/fan.py +++ b/homeassistant/components/tplink/fan.py @@ -64,7 +64,6 @@ class TPLinkFanEntity(CoordinatedTPLinkEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 3da3b4806d3..0abd68543c5 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -68,6 +68,15 @@ "state": { "on": "mdi:sleep" } + }, + "child_lock": { + "default": "mdi:account-lock" + }, + "pir_enabled": { + "default": "mdi:motion-sensor-off", + "state": { + "on": "mdi:motion-sensor" + } } }, "sensor": { @@ -88,6 +97,9 @@ }, "alarm_source": { "default": "mdi:bell" + }, + "water_alert_timestamp": { + "default": "mdi:clock-alert-outline" } }, "number": { @@ -109,7 +121,11 @@ } }, "services": { - "sequence_effect": "mdi:playlist-play", - "random_effect": "mdi:shuffle-variant" + "sequence_effect": { + "service": "mdi:playlist-play" + }, + "random_effect": { + "service": "mdi:shuffle-variant" + } } } diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 10b0ef61153..6ce46c0d488 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -300,6 +300,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink", "iot_class": "local_polling", "loggers": ["kasa"], - "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.1"] + "requirements": ["python-kasa[speedups]==0.8.1"] } diff --git a/homeassistant/components/tplink/number.py b/homeassistant/components/tplink/number.py index 4b273800e6a..b51c00db7c0 100644 --- a/homeassistant/components/tplink/number.py +++ b/homeassistant/components/tplink/number.py @@ -2,8 +2,9 @@ from __future__ import annotations +from dataclasses import dataclass import logging -from typing import Final +from typing import Final, cast from kasa import Device, Feature @@ -26,6 +27,7 @@ from .entity import ( _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) class TPLinkNumberEntityDescription( NumberEntityDescription, TPLinkFeatureEntityDescription ): @@ -65,6 +67,7 @@ async def async_setup_entry( children_coordinators = data.children_coordinators device = parent_coordinator.device entities = CoordinatedTPLinkFeatureEntity.entities_for_device_and_its_children( + hass=hass, device=device, coordinator=parent_coordinator, feature_type=Feature.Type.Number, @@ -105,4 +108,4 @@ class TPLinkNumberEntity(CoordinatedTPLinkFeatureEntity, NumberEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_native_value = self._feature.value + self._attr_native_value = cast(float | None, self._feature.value) diff --git a/homeassistant/components/tplink/select.py b/homeassistant/components/tplink/select.py index 41703b27e5a..3755a1d0be2 100644 --- a/homeassistant/components/tplink/select.py +++ b/homeassistant/components/tplink/select.py @@ -54,6 +54,7 @@ async def async_setup_entry( device = parent_coordinator.device entities = CoordinatedTPLinkFeatureEntity.entities_for_device_and_its_children( + hass=hass, device=device, coordinator=parent_coordinator, feature_type=Feature.Type.Choice, @@ -92,4 +93,4 @@ class TPLinkSelectEntity(CoordinatedTPLinkFeatureEntity, SelectEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_current_option = self._feature.value + self._attr_current_option = cast(str | None, self._feature.value) diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index 1307079937f..8b7351f8d7d 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -3,11 +3,12 @@ from __future__ import annotations from dataclasses import dataclass -from typing import cast +from typing import TYPE_CHECKING, cast from kasa import Feature from homeassistant.components.sensor import ( + DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -18,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry from .const import UNIT_MAPPING +from .deprecate import async_cleanup_deprecated from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription @@ -95,6 +97,10 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = ( key="device_time", device_class=SensorDeviceClass.TIMESTAMP, ), + TPLinkSensorEntityDescription( + key="water_alert_timestamp", + device_class=SensorDeviceClass.TIMESTAMP, + ), TPLinkSensorEntityDescription( key="humidity", device_class=SensorDeviceClass.HUMIDITY, @@ -110,6 +116,7 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = ( TPLinkSensorEntityDescription( key="temperature", device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, ), ) @@ -128,6 +135,7 @@ async def async_setup_entry( device = parent_coordinator.device entities = CoordinatedTPLinkFeatureEntity.entities_for_device_and_its_children( + hass=hass, device=device, coordinator=parent_coordinator, feature_type=Feature.Type.Sensor, @@ -135,6 +143,7 @@ async def async_setup_entry( descriptions=SENSOR_DESCRIPTIONS_MAP, child_coordinators=children_coordinators, ) + async_cleanup_deprecated(hass, SENSOR_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) @@ -152,6 +161,12 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity): # We probably do not need this, when we are rounding already? self._attr_suggested_display_precision = self._feature.precision_hint + if TYPE_CHECKING: + # pylint: disable-next=import-outside-toplevel + from datetime import date, datetime + + assert isinstance(value, str | int | float | date | datetime | None) + self._attr_native_value = value # Map to homeassistant units and fallback to upstream one if none found if (unit := self._feature.unit) is not None: diff --git a/homeassistant/components/tplink/siren.py b/homeassistant/components/tplink/siren.py new file mode 100644 index 00000000000..c4ece56f0f6 --- /dev/null +++ b/homeassistant/components/tplink/siren.py @@ -0,0 +1,61 @@ +"""Support for TPLink hub alarm.""" + +from __future__ import annotations + +from typing import Any + +from kasa import Device, Module +from kasa.smart.modules.alarm import Alarm + +from homeassistant.components.siren import SirenEntity, SirenEntityFeature +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TPLinkConfigEntry +from .coordinator import TPLinkDataUpdateCoordinator +from .entity import CoordinatedTPLinkEntity, async_refresh_after + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: TPLinkConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up siren entities.""" + data = config_entry.runtime_data + parent_coordinator = data.parent_coordinator + device = parent_coordinator.device + + if Module.Alarm in device.modules: + async_add_entities([TPLinkSirenEntity(device, parent_coordinator)]) + + +class TPLinkSirenEntity(CoordinatedTPLinkEntity, SirenEntity): + """Representation of a tplink hub alarm.""" + + _attr_name = None + _attr_supported_features = SirenEntityFeature.TURN_OFF | SirenEntityFeature.TURN_ON + + def __init__( + self, + device: Device, + coordinator: TPLinkDataUpdateCoordinator, + ) -> None: + """Initialize the siren entity.""" + self._alarm_module: Alarm = device.modules[Module.Alarm] + super().__init__(device, coordinator) + + @async_refresh_after + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the siren on.""" + await self._alarm_module.play() + + @async_refresh_after + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the siren off.""" + await self._alarm_module.stop() + + @callback + def _async_update_attrs(self) -> None: + """Update the entity's attributes.""" + self._attr_is_on = self._alarm_module.active diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 34ce96612f5..8e5118c2720 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -35,10 +35,6 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reauth": { - "title": "[%key:common::config_flow::title::reauth%]", - "description": "[%key:component::tplink::config::step::user_auth_confirm::description%]" - }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", "description": "[%key:component::tplink::config::step::user_auth_confirm::description%]", @@ -55,7 +51,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { @@ -162,6 +159,9 @@ "device_time": { "name": "Device time" }, + "water_alert_timestamp": { + "name": "Last water leak alert" + }, "auto_off_at": { "name": "Auto off at" }, @@ -190,6 +190,12 @@ }, "fan_sleep_mode": { "name": "Fan sleep mode" + }, + "child_lock": { + "name": "Child lock" + }, + "pir_enabled": { + "name": "Motion sensor" } }, "number": { @@ -311,5 +317,11 @@ "device_authentication": { "message": "Device authentication error {func}: {exc}" } + }, + "issues": { + "deprecated_entity": { + "title": "Detected deprecated {platform} entity usage", + "description": "We detected that entity `{entity}` is being used in `{info}`\n\nWe have created a new `{new_platform}` entity and you should migrate `{info}` to use this new entity.\n\nWhen you are done migrating `{info}` and are ready to have the deprecated `{entity}` entity removed, disable the entity and restart Home Assistant." + } } } diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 62957d48ac4..7e223752665 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass import logging -from typing import Any +from typing import Any, cast from kasa import Feature @@ -48,6 +48,12 @@ SWITCH_DESCRIPTIONS: tuple[TPLinkSwitchEntityDescription, ...] = ( TPLinkSwitchEntityDescription( key="fan_sleep_mode", ), + TPLinkSwitchEntityDescription( + key="child_lock", + ), + TPLinkSwitchEntityDescription( + key="pir_enabled", + ), ) SWITCH_DESCRIPTIONS_MAP = {desc.key: desc for desc in SWITCH_DESCRIPTIONS} @@ -64,7 +70,8 @@ async def async_setup_entry( device = parent_coordinator.device entities = CoordinatedTPLinkFeatureEntity.entities_for_device_and_its_children( - device, + hass=hass, + device=device, coordinator=parent_coordinator, feature_type=Feature.Switch, entity_class=TPLinkSwitch, @@ -92,4 +99,4 @@ class TPLinkSwitch(CoordinatedTPLinkFeatureEntity, SwitchEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_is_on = self._feature.value + self._attr_is_on = cast(bool | None, self._feature.value) diff --git a/homeassistant/components/tplink_lte/manifest.json b/homeassistant/components/tplink_lte/manifest.json index 63640628e35..a880594e683 100644 --- a/homeassistant/components/tplink_lte/manifest.json +++ b/homeassistant/components/tplink_lte/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_lte", "iot_class": "local_polling", "loggers": ["tp_connected"], + "quality_scale": "legacy", "requirements": ["tp-connected==0.0.4"] } diff --git a/homeassistant/components/tplink_omada/__init__.py b/homeassistant/components/tplink_omada/__init__.py index 19b3d58dbd4..2d33a890510 100644 --- a/homeassistant/components/tplink_omada/__init__.py +++ b/homeassistant/components/tplink_omada/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from tplink_omada_client import OmadaSite +from tplink_omada_client.devices import OmadaListDevice from tplink_omada_client.exceptions import ( ConnectionFailed, LoginFailed, @@ -10,10 +11,11 @@ from tplink_omada_client.exceptions import ( UnsupportedControllerVersion, ) -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from .config_flow import CONF_SITE, create_omada_client from .const import DOMAIN @@ -22,15 +24,17 @@ from .controller import OmadaSiteController PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, + Platform.SENSOR, Platform.SWITCH, Platform.UPDATE, ] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up TP-Link Omada from a config entry.""" +type OmadaConfigEntry = ConfigEntry[OmadaSiteController] - hass.data.setdefault(DOMAIN, {}) + +async def async_setup_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> bool: + """Set up TP-Link Omada from a config entry.""" try: client = await create_omada_client(hass, entry.data) @@ -52,21 +56,56 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: site_client = await client.get_site_client(OmadaSite("", entry.data[CONF_SITE])) controller = OmadaSiteController(hass, site_client) - gateway_coordinator = await controller.get_gateway_coordinator() - if gateway_coordinator: - await gateway_coordinator.async_config_entry_first_refresh() - await controller.get_clients_coordinator().async_config_entry_first_refresh() + await controller.initialize_first_refresh() - hass.data[DOMAIN][entry.entry_id] = controller + entry.runtime_data = controller + + async def handle_reconnect_client(call: ServiceCall) -> None: + """Handle the service action call.""" + mac: str | None = call.data.get("mac") + if not mac: + return + + await site_client.reconnect_client(mac) + + hass.services.async_register(DOMAIN, "reconnect_client", handle_reconnect_client) + + _remove_old_devices(hass, entry, controller.devices_coordinator.data) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + loaded_entries = [ + entry + for entry in hass.config_entries.async_entries(DOMAIN) + if entry.state == ConfigEntryState.LOADED + ] + if len(loaded_entries) == 1: + # This is the last loaded instance of Omada, deregister any services + hass.services.async_remove(DOMAIN, "reconnect_client") return unload_ok + + +def _remove_old_devices( + hass: HomeAssistant, + entry: OmadaConfigEntry, + omada_devices: dict[str, OmadaListDevice], +) -> None: + device_registry = dr.async_get(hass) + + for registered_device in device_registry.devices.get_devices_for_config_entry_id( + entry.entry_id + ): + mac = next( + (i[1] for i in registered_device.identifiers if i[0] == DOMAIN), None + ) + if mac and mac not in omada_devices: + device_registry.async_update_device( + registered_device.id, remove_config_entry_id=entry.entry_id + ) diff --git a/homeassistant/components/tplink_omada/binary_sensor.py b/homeassistant/components/tplink_omada/binary_sensor.py index c0304c4d1b2..73d5f54b8b3 100644 --- a/homeassistant/components/tplink_omada/binary_sensor.py +++ b/homeassistant/components/tplink_omada/binary_sensor.py @@ -17,24 +17,23 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .controller import OmadaGatewayCoordinator, OmadaSiteController +from . import OmadaConfigEntry +from .controller import OmadaGatewayCoordinator from .entity import OmadaDeviceEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: OmadaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary sensors.""" - controller: OmadaSiteController = hass.data[DOMAIN][config_entry.entry_id] + controller = config_entry.runtime_data - gateway_coordinator = await controller.get_gateway_coordinator() + gateway_coordinator = controller.gateway_coordinator if not gateway_coordinator: return @@ -100,7 +99,6 @@ class OmadaGatewayPortBinarySensor( """Binary status of a property on an internet gateway.""" entity_description: GatewayPortBinarySensorEntityDescription - _attr_has_entity_name = True def __init__( self, diff --git a/homeassistant/components/tplink_omada/config_flow.py b/homeassistant/components/tplink_omada/config_flow.py index 5ea56a9ad9f..eeeddb62495 100644 --- a/homeassistant/components/tplink_omada/config_flow.py +++ b/homeassistant/components/tplink_omada/config_flow.py @@ -179,15 +179,9 @@ class TpLinkOmadaConfigFlow(ConfigFlow, domain=DOMAIN): if info is not None: # Auth successful - update the config entry with the new credentials - entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._omada_opts ) - assert entry is not None - self.hass.config_entries.async_update_entry( - entry, data=self._omada_opts - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/tplink_omada/const.py b/homeassistant/components/tplink_omada/const.py index f63d82c6bb4..bc55c76c931 100644 --- a/homeassistant/components/tplink_omada/const.py +++ b/homeassistant/components/tplink_omada/const.py @@ -1,3 +1,17 @@ """Constants for the TP-Link Omada integration.""" +from enum import StrEnum + DOMAIN = "tplink_omada" + + +class OmadaDeviceStatus(StrEnum): + """Possible composite status values for Omada devices.""" + + DISCONNECTED = "disconnected" + CONNECTED = "connected" + PENDING = "pending" + HEARTBEAT_MISSED = "heartbeat_missed" + ISOLATED = "isolated" + ADOPT_FAILED = "adopt_failed" + MANAGED_EXTERNALLY = "managed_externally" diff --git a/homeassistant/components/tplink_omada/controller.py b/homeassistant/components/tplink_omada/controller.py index d92a6f37e24..658286981f9 100644 --- a/homeassistant/components/tplink_omada/controller.py +++ b/homeassistant/components/tplink_omada/controller.py @@ -7,6 +7,7 @@ from homeassistant.core import HomeAssistant from .coordinator import ( OmadaClientsCoordinator, + OmadaDevicesCoordinator, OmadaGatewayCoordinator, OmadaSwitchPortCoordinator, ) @@ -16,15 +17,33 @@ class OmadaSiteController: """Controller for the Omada SDN site.""" _gateway_coordinator: OmadaGatewayCoordinator | None = None - _initialized_gateway_coordinator = False - _clients_coordinator: OmadaClientsCoordinator | None = None - def __init__(self, hass: HomeAssistant, omada_client: OmadaSiteClient) -> None: + def __init__( + self, + hass: HomeAssistant, + omada_client: OmadaSiteClient, + ) -> None: """Create the controller.""" self._hass = hass self._omada_client = omada_client self._switch_port_coordinators: dict[str, OmadaSwitchPortCoordinator] = {} + self._devices_coordinator = OmadaDevicesCoordinator(hass, omada_client) + self._clients_coordinator = OmadaClientsCoordinator(hass, omada_client) + + async def initialize_first_refresh(self) -> None: + """Initialize the all coordinators, and perform first refresh.""" + await self._devices_coordinator.async_config_entry_first_refresh() + + devices = self._devices_coordinator.data.values() + gateway = next((d for d in devices if d.type == "gateway"), None) + if gateway: + self._gateway_coordinator = OmadaGatewayCoordinator( + self._hass, self._omada_client, gateway.mac + ) + await self._gateway_coordinator.async_config_entry_first_refresh() + + await self.clients_coordinator.async_config_entry_first_refresh() @property def omada_client(self) -> OmadaSiteClient: @@ -42,26 +61,17 @@ class OmadaSiteController: return self._switch_port_coordinators[switch.mac] - async def get_gateway_coordinator(self) -> OmadaGatewayCoordinator | None: - """Get coordinator for site's gateway, or None if there is no gateway.""" - if not self._initialized_gateway_coordinator: - self._initialized_gateway_coordinator = True - devices = await self._omada_client.get_devices() - gateway = next((d for d in devices if d.type == "gateway"), None) - if not gateway: - return None - - self._gateway_coordinator = OmadaGatewayCoordinator( - self._hass, self._omada_client, gateway.mac - ) - + @property + def gateway_coordinator(self) -> OmadaGatewayCoordinator | None: + """Gets the coordinator for site's gateway, or None if there is no gateway.""" return self._gateway_coordinator - def get_clients_coordinator(self) -> OmadaClientsCoordinator: - """Get coordinator for site's clients.""" - if not self._clients_coordinator: - self._clients_coordinator = OmadaClientsCoordinator( - self._hass, self._omada_client - ) + @property + def devices_coordinator(self) -> OmadaDevicesCoordinator: + """Gets the coordinator for site's devices.""" + return self._devices_coordinator + @property + def clients_coordinator(self) -> OmadaClientsCoordinator: + """Gets the coordinator for site's clients.""" return self._clients_coordinator diff --git a/homeassistant/components/tplink_omada/coordinator.py b/homeassistant/components/tplink_omada/coordinator.py index da0a79ef991..a80bedeb65e 100644 --- a/homeassistant/components/tplink_omada/coordinator.py +++ b/homeassistant/components/tplink_omada/coordinator.py @@ -6,7 +6,7 @@ import logging from tplink_omada_client import OmadaSiteClient, OmadaSwitchPortDetails from tplink_omada_client.clients import OmadaWirelessClient -from tplink_omada_client.devices import OmadaGateway, OmadaSwitch +from tplink_omada_client.devices import OmadaGateway, OmadaListDevice, OmadaSwitch from tplink_omada_client.exceptions import OmadaClientException from homeassistant.core import HomeAssistant @@ -17,6 +17,7 @@ _LOGGER = logging.getLogger(__name__) POLL_SWITCH_PORT = 300 POLL_GATEWAY = 300 POLL_CLIENTS = 300 +POLL_DEVICES = 300 class OmadaCoordinator[_T](DataUpdateCoordinator[dict[str, _T]]): @@ -27,14 +28,14 @@ class OmadaCoordinator[_T](DataUpdateCoordinator[dict[str, _T]]): hass: HomeAssistant, omada_client: OmadaSiteClient, name: str, - poll_delay: int = 300, + poll_delay: int | None = 300, ) -> None: """Initialize my coordinator.""" super().__init__( hass, _LOGGER, name=f"Omada API Data - {name}", - update_interval=timedelta(seconds=poll_delay), + update_interval=timedelta(seconds=poll_delay) if poll_delay else None, ) self.omada_client = omada_client @@ -91,6 +92,22 @@ class OmadaGatewayCoordinator(OmadaCoordinator[OmadaGateway]): return {self.mac: gateway} +class OmadaDevicesCoordinator(OmadaCoordinator[OmadaListDevice]): + """Coordinator for generic device lists from the controller.""" + + def __init__( + self, + hass: HomeAssistant, + omada_client: OmadaSiteClient, + ) -> None: + """Initialize my coordinator.""" + super().__init__(hass, omada_client, "DeviceList", POLL_CLIENTS) + + async def poll_update(self) -> dict[str, OmadaListDevice]: + """Poll the site's current registered Omada devices.""" + return {d.mac: d for d in await self.omada_client.get_devices()} + + class OmadaClientsCoordinator(OmadaCoordinator[OmadaWirelessClient]): """Coordinator for getting details about the site's connected clients.""" diff --git a/homeassistant/components/tplink_omada/device_tracker.py b/homeassistant/components/tplink_omada/device_tracker.py index be734592d11..fe78adf8847 100644 --- a/homeassistant/components/tplink_omada/device_tracker.py +++ b/homeassistant/components/tplink_omada/device_tracker.py @@ -4,29 +4,27 @@ import logging from tplink_omada_client.clients import OmadaWirelessClient -from homeassistant.components.device_tracker import ScannerEntity, SourceType -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import OmadaConfigEntry from .config_flow import CONF_SITE -from .const import DOMAIN -from .controller import OmadaClientsCoordinator, OmadaSiteController +from .controller import OmadaClientsCoordinator _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: OmadaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up device trackers and scanners.""" - controller: OmadaSiteController = hass.data[DOMAIN][config_entry.entry_id] + controller = config_entry.runtime_data - clients_coordinator = controller.get_clients_coordinator() site_id = config_entry.data[CONF_SITE] # Add all known WiFi devices as potentially tracked devices. They will only be @@ -34,7 +32,7 @@ async def async_setup_entry( async_add_entities( [ OmadaClientScannerEntity( - site_id, client.mac, client.name, clients_coordinator + site_id, client.mac, client.name, controller.clients_coordinator ) async for client in controller.omada_client.get_known_clients() if isinstance(client, OmadaWirelessClient) @@ -62,11 +60,6 @@ class OmadaClientScannerEntity( self._client_id = client_id self._attr_name = display_name - @property - def source_type(self) -> SourceType: - """Return the source type of the device.""" - return SourceType.ROUTER - def _do_update(self) -> None: self._client_details = self.coordinator.data.get(self._client_id) diff --git a/homeassistant/components/tplink_omada/entity.py b/homeassistant/components/tplink_omada/entity.py index 13ec7b3c6cb..54021a2ef86 100644 --- a/homeassistant/components/tplink_omada/entity.py +++ b/homeassistant/components/tplink_omada/entity.py @@ -5,7 +5,6 @@ from typing import Any from tplink_omada_client.devices import OmadaDevice from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN @@ -15,11 +14,13 @@ from .coordinator import OmadaCoordinator class OmadaDeviceEntity[_T: OmadaCoordinator[Any]](CoordinatorEntity[_T]): """Common base class for all entities associated with Omada SDN Devices.""" + _attr_has_entity_name = True + def __init__(self, coordinator: _T, device: OmadaDevice) -> None: """Initialize the device.""" super().__init__(coordinator) self.device = device - self._attr_device_info = DeviceInfo( + self._attr_device_info = dr.DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, device.mac)}, identifiers={(DOMAIN, device.mac)}, manufacturer="TP-Link", diff --git a/homeassistant/components/tplink_omada/icons.json b/homeassistant/components/tplink_omada/icons.json index d0c407a9326..94f0a6b9764 100644 --- a/homeassistant/components/tplink_omada/icons.json +++ b/homeassistant/components/tplink_omada/icons.json @@ -18,6 +18,19 @@ "off": "mdi:cloud-cancel" } } + }, + "sensor": { + "cpu_usage": { + "default": "mdi:cpu-32-bit" + }, + "mem_usage": { + "default": "mdi:memory" + } + } + }, + "services": { + "reconnect_client": { + "service": "mdi:sync" } } } diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index 6bde656dc30..af20b54675b 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.4.2"] + "requirements": ["tplink-omada-client==1.4.3"] } diff --git a/homeassistant/components/tplink_omada/sensor.py b/homeassistant/components/tplink_omada/sensor.py new file mode 100644 index 00000000000..272334d1b52 --- /dev/null +++ b/homeassistant/components/tplink_omada/sensor.py @@ -0,0 +1,132 @@ +"""Support for TPLink Omada binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from tplink_omada_client.definitions import DeviceStatus, DeviceStatusCategory +from tplink_omada_client.devices import OmadaListDevice + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import PERCENTAGE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import OmadaConfigEntry +from .const import OmadaDeviceStatus +from .coordinator import OmadaDevicesCoordinator +from .entity import OmadaDeviceEntity + +# Useful low level status categories, mapped to a more descriptive status. +DEVICE_STATUS_MAP = { + DeviceStatus.PROVISIONING: OmadaDeviceStatus.PENDING, + DeviceStatus.CONFIGURING: OmadaDeviceStatus.PENDING, + DeviceStatus.UPGRADING: OmadaDeviceStatus.PENDING, + DeviceStatus.REBOOTING: OmadaDeviceStatus.PENDING, + DeviceStatus.ADOPT_FAILED: OmadaDeviceStatus.ADOPT_FAILED, + DeviceStatus.ADOPT_FAILED_WIRELESS: OmadaDeviceStatus.ADOPT_FAILED, + DeviceStatus.MANAGED_EXTERNALLY: OmadaDeviceStatus.MANAGED_EXTERNALLY, + DeviceStatus.MANAGED_EXTERNALLY_WIRELESS: OmadaDeviceStatus.MANAGED_EXTERNALLY, +} + +# High level status categories, suitable for most device statuses. +DEVICE_STATUS_CATEGORY_MAP = { + DeviceStatusCategory.DISCONNECTED: OmadaDeviceStatus.DISCONNECTED, + DeviceStatusCategory.CONNECTED: OmadaDeviceStatus.CONNECTED, + DeviceStatusCategory.PENDING: OmadaDeviceStatus.PENDING, + DeviceStatusCategory.HEARTBEAT_MISSED: OmadaDeviceStatus.HEARTBEAT_MISSED, + DeviceStatusCategory.ISOLATED: OmadaDeviceStatus.ISOLATED, +} + + +def _map_device_status(device: OmadaListDevice) -> str | None: + """Map the API device status to the best available descriptive device status.""" + display_status = DEVICE_STATUS_MAP.get( + device.status + ) or DEVICE_STATUS_CATEGORY_MAP.get(device.status_category) + return display_status.value if display_status else None + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OmadaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + controller = config_entry.runtime_data + + devices_coordinator = controller.devices_coordinator + + async_add_entities( + OmadaDeviceSensor(devices_coordinator, device, desc) + for device in devices_coordinator.data.values() + for desc in OMADA_DEVICE_SENSORS + if desc.exists_func(device) + ) + + +@dataclass(frozen=True, kw_only=True) +class OmadaDeviceSensorEntityDescription(SensorEntityDescription): + """Entity description for a status derived from an Omada device in the device list.""" + + exists_func: Callable[[OmadaListDevice], bool] = lambda _: True + update_func: Callable[[OmadaListDevice], StateType] + + +OMADA_DEVICE_SENSORS: list[OmadaDeviceSensorEntityDescription] = [ + OmadaDeviceSensorEntityDescription( + key="device_status", + translation_key="device_status", + device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, + update_func=_map_device_status, + options=[v.value for v in OmadaDeviceStatus], + ), + OmadaDeviceSensorEntityDescription( + key="cpu_usage", + translation_key="cpu_usage", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + update_func=lambda device: device.cpu_usage, + ), + OmadaDeviceSensorEntityDescription( + key="mem_usage", + translation_key="mem_usage", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + update_func=lambda device: device.mem_usage, + ), +] + + +class OmadaDeviceSensor(OmadaDeviceEntity[OmadaDevicesCoordinator], SensorEntity): + """Sensor for property of a generic Omada device.""" + + entity_description: OmadaDeviceSensorEntityDescription + + def __init__( + self, + coordinator: OmadaDevicesCoordinator, + device: OmadaListDevice, + entity_description: OmadaDeviceSensorEntityDescription, + ) -> None: + """Initialize the device sensor.""" + super().__init__(coordinator, device) + self.entity_description = entity_description + self._attr_unique_id = f"{device.mac}_{entity_description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.update_func( + self.coordinator.data[self.device.mac] + ) diff --git a/homeassistant/components/tplink_omada/services.yaml b/homeassistant/components/tplink_omada/services.yaml new file mode 100644 index 00000000000..19a64ea8625 --- /dev/null +++ b/homeassistant/components/tplink_omada/services.yaml @@ -0,0 +1,7 @@ +reconnect_client: + fields: + mac: + required: true + example: "01-23-45-67-89-AB" + selector: + text: diff --git a/homeassistant/components/tplink_omada/strings.json b/homeassistant/components/tplink_omada/strings.json index 49873b7d088..73cea692dbf 100644 --- a/homeassistant/components/tplink_omada/strings.json +++ b/homeassistant/components/tplink_omada/strings.json @@ -65,6 +65,39 @@ "poe_delivery": { "name": "Port {port_name} PoE Delivery" } + }, + "sensor": { + "device_status": { + "name": "Device status", + "state": { + "error": "Error", + "disconnected": "[%key:common::state::disconnected%]", + "connected": "[%key:common::state::connected%]", + "pending": "Pending", + "heartbeat_missed": "Heartbeat missed", + "isolated": "Isolated", + "adopt_failed": "Adopt failed", + "managed_externally": "Managed externally" + } + }, + "cpu_usage": { + "name": "CPU usage" + }, + "mem_usage": { + "name": "Memory usage" + } + } + }, + "services": { + "reconnect_client": { + "name": "Reconnect wireless client", + "description": "Tries to get wireless client to reconnect to Omada Network.", + "fields": { + "mac": { + "name": "MAC address", + "description": "MAC address of the device." + } + } } } } diff --git a/homeassistant/components/tplink_omada/switch.py b/homeassistant/components/tplink_omada/switch.py index 9f9eeceb866..f99d8aaedde 100644 --- a/homeassistant/components/tplink_omada/switch.py +++ b/homeassistant/components/tplink_omada/switch.py @@ -20,17 +20,12 @@ from tplink_omada_client.devices import ( from tplink_omada_client.omadasiteclient import GatewayPortSettings from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .controller import ( - OmadaGatewayCoordinator, - OmadaSiteController, - OmadaSwitchPortCoordinator, -) +from . import OmadaConfigEntry +from .controller import OmadaGatewayCoordinator, OmadaSwitchPortCoordinator from .coordinator import OmadaCoordinator from .entity import OmadaDeviceEntity @@ -41,11 +36,11 @@ TCoordinator = TypeVar("TCoordinator", bound="OmadaCoordinator[Any]") async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: OmadaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up switches.""" - controller: OmadaSiteController = hass.data[DOMAIN][config_entry.entry_id] + controller = config_entry.runtime_data omada_client = controller.omada_client # Naming fun. Omada switches, as in the network hardware @@ -74,7 +69,7 @@ async def async_setup_entry( if desc.exists_func(switch, port) ) - gateway_coordinator = await controller.get_gateway_coordinator() + gateway_coordinator = controller.gateway_coordinator if gateway_coordinator: for gateway in gateway_coordinator.data.values(): entities.extend( @@ -234,7 +229,6 @@ class OmadaDevicePortSwitchEntity( ): """Generic toggle switch entity for a Netork Port of an Omada Device.""" - _attr_has_entity_name = True entity_description: OmadaDevicePortSwitchEntityDescription[ TCoordinator, TDevice, TPort ] diff --git a/homeassistant/components/tplink_omada/update.py b/homeassistant/components/tplink_omada/update.py index 5e87d11474b..54b586794be 100644 --- a/homeassistant/components/tplink_omada/update.py +++ b/homeassistant/components/tplink_omada/update.py @@ -14,17 +14,14 @@ from homeassistant.components.update import ( UpdateEntity, UpdateEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .controller import OmadaSiteController -from .coordinator import OmadaCoordinator +from . import OmadaConfigEntry +from .coordinator import POLL_DEVICES, OmadaCoordinator, OmadaDevicesCoordinator from .entity import OmadaDeviceEntity -POLL_DELAY_IDLE = 6 * 60 * 60 POLL_DELAY_UPGRADE = 60 @@ -35,15 +32,28 @@ class FirmwareUpdateStatus(NamedTuple): firmware: OmadaFirmwareUpdate | None -class OmadaFirmwareUpdateCoodinator(OmadaCoordinator[FirmwareUpdateStatus]): # pylint: disable=hass-enforce-coordinator-module - """Coordinator for getting details about ports on a switch.""" +class OmadaFirmwareUpdateCoordinator(OmadaCoordinator[FirmwareUpdateStatus]): # pylint: disable=hass-enforce-class-module + """Coordinator for getting details about available firmware updates for Omada devices.""" - def __init__(self, hass: HomeAssistant, omada_client: OmadaSiteClient) -> None: + def __init__( + self, + hass: HomeAssistant, + config_entry: OmadaConfigEntry, + omada_client: OmadaSiteClient, + devices_coordinator: OmadaDevicesCoordinator, + ) -> None: """Initialize my coordinator.""" - super().__init__(hass, omada_client, "Firmware Updates", POLL_DELAY_IDLE) + super().__init__(hass, omada_client, "Firmware Updates", poll_delay=None) + + self._devices_coordinator = devices_coordinator + self._config_entry = config_entry + + config_entry.async_on_unload( + devices_coordinator.async_add_listener(self._handle_devices_update) + ) async def _get_firmware_updates(self) -> list[FirmwareUpdateStatus]: - devices = await self.omada_client.get_devices() + devices = self._devices_coordinator.data.values() updates = [ FirmwareUpdateStatus( @@ -55,12 +65,12 @@ class OmadaFirmwareUpdateCoodinator(OmadaCoordinator[FirmwareUpdateStatus]): # for d in devices ] - # During a firmware upgrade, poll more frequently - self.update_interval = timedelta( + # During a firmware upgrade, poll device list more frequently + self._devices_coordinator.update_interval = timedelta( seconds=( POLL_DELAY_UPGRADE if any(u.device.fw_download for u in updates) - else POLL_DELAY_IDLE + else POLL_DEVICES ) ) return updates @@ -69,26 +79,37 @@ class OmadaFirmwareUpdateCoodinator(OmadaCoordinator[FirmwareUpdateStatus]): # """Poll the state of Omada Devices firmware update availability.""" return {d.device.mac: d for d in await self._get_firmware_updates()} + @callback + def _handle_devices_update(self) -> None: + """Handle updated data from the devices coordinator.""" + # Trigger a refresh of our data, based on the updated device list + self._config_entry.async_create_background_task( + self.hass, self.async_request_refresh(), "Omada Firmware Update Refresh" + ) + async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: OmadaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up switches.""" - controller: OmadaSiteController = hass.data[DOMAIN][config_entry.entry_id] - omada_client = controller.omada_client + controller = config_entry.runtime_data - devices = await omada_client.get_devices() + devices = controller.devices_coordinator.data - coordinator = OmadaFirmwareUpdateCoodinator(hass, omada_client) + coordinator = OmadaFirmwareUpdateCoordinator( + hass, config_entry, controller.omada_client, controller.devices_coordinator + ) - async_add_entities(OmadaDeviceUpdate(coordinator, device) for device in devices) + async_add_entities( + OmadaDeviceUpdate(coordinator, device) for device in devices.values() + ) await coordinator.async_request_refresh() class OmadaDeviceUpdate( - OmadaDeviceEntity[OmadaFirmwareUpdateCoodinator], + OmadaDeviceEntity[OmadaFirmwareUpdateCoordinator], UpdateEntity, ): """Firmware update status for Omada SDN devices.""" @@ -98,12 +119,11 @@ class OmadaDeviceUpdate( | UpdateEntityFeature.PROGRESS | UpdateEntityFeature.RELEASE_NOTES ) - _attr_has_entity_name = True _attr_device_class = UpdateDeviceClass.FIRMWARE def __init__( self, - coordinator: OmadaFirmwareUpdateCoodinator, + coordinator: OmadaFirmwareUpdateCoordinator, device: OmadaListDevice, ) -> None: """Initialize the update entity.""" diff --git a/homeassistant/components/traccar/device_tracker.py b/homeassistant/components/traccar/device_tracker.py index 468d2fd4d05..0fa7fc344ea 100644 --- a/homeassistant/components/traccar/device_tracker.py +++ b/homeassistant/components/traccar/device_tracker.py @@ -4,50 +4,15 @@ from __future__ import annotations from datetime import timedelta import logging -from typing import Any -from pytraccar import ApiClient, TraccarException -import voluptuous as vol - -from homeassistant.components.device_tracker import ( - PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, - AsyncSeeCallback, - SourceType, - TrackerEntity, -) -from homeassistant.components.device_tracker.legacy import ( - YAML_DEVICES, - remove_device_from_config, -) -from homeassistant.config import load_yaml_config_file -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_EVENT, - CONF_HOST, - CONF_MONITORED_CONDITIONS, - CONF_PASSWORD, - CONF_PORT, - CONF_SSL, - CONF_USERNAME, - CONF_VERIFY_SSL, - EVENT_HOMEASSISTANT_STARTED, -) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - Event, - HomeAssistant, - callback, -) -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.components.device_tracker import TrackerEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import slugify from . import DOMAIN, TRACKER_UPDATE from .const import ( @@ -58,8 +23,6 @@ from .const import ( ATTR_LATITUDE, ATTR_LONGITUDE, ATTR_SPEED, - CONF_MAX_ACCURACY, - CONF_SKIP_ACCURACY_ON, EVENT_ALARM, EVENT_ALL_EVENTS, EVENT_COMMAND_RESULT, @@ -104,28 +67,6 @@ EVENTS = [ EVENT_ALL_EVENTS, ] -PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_PORT, default=8082): cv.port, - vol.Optional(CONF_SSL, default=False): cv.boolean, - vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean, - vol.Required(CONF_MAX_ACCURACY, default=0): cv.positive_int, - vol.Optional(CONF_SKIP_ACCURACY_ON, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(CONF_MONITORED_CONDITIONS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(CONF_EVENT, default=[]): vol.All( - cv.ensure_list, - [vol.In(EVENTS)], - ), - } -) - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback @@ -167,80 +108,6 @@ async def async_setup_entry( async_add_entities(entities) -async def async_setup_scanner( - hass: HomeAssistant, - config: ConfigType, - async_see: AsyncSeeCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> bool: - """Import configuration to the new integration.""" - api = ApiClient( - host=config[CONF_HOST], - port=config[CONF_PORT], - ssl=config[CONF_SSL], - username=config[CONF_USERNAME], - password=config[CONF_PASSWORD], - client_session=async_get_clientsession(hass, config[CONF_VERIFY_SSL]), - ) - - async def _run_import(_: Event): - known_devices: dict[str, dict[str, Any]] = {} - try: - known_devices = await hass.async_add_executor_job( - load_yaml_config_file, hass.config.path(YAML_DEVICES) - ) - except (FileNotFoundError, HomeAssistantError): - _LOGGER.debug( - "No valid known_devices.yaml found, " - "skip removal of devices from known_devices.yaml" - ) - - if known_devices: - traccar_devices: list[str] = [] - try: - resp = await api.get_devices() - traccar_devices = [slugify(device["name"]) for device in resp] - except TraccarException as exception: - _LOGGER.error("Error while getting device data: %s", exception) - return - - for dev_name in traccar_devices: - if dev_name in known_devices: - await hass.async_add_executor_job( - remove_device_from_config, hass, dev_name - ) - _LOGGER.debug("Removed device %s from known_devices.yaml", dev_name) - - if not hass.states.async_available(f"device_tracker.{dev_name}"): - hass.states.async_remove(f"device_tracker.{dev_name}") - - hass.async_create_task( - hass.config_entries.flow.async_init( - "traccar_server", - context={"source": SOURCE_IMPORT}, - data=config, - ) - ) - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Traccar", - }, - ) - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _run_import) - return True - - class TraccarEntity(TrackerEntity, RestoreEntity): """Represent a tracked device.""" @@ -249,58 +116,24 @@ class TraccarEntity(TrackerEntity, RestoreEntity): def __init__(self, device, latitude, longitude, battery, accuracy, attributes): """Set up Traccar entity.""" - self._accuracy = accuracy - self._attributes = attributes - self._name = device + self._attr_location_accuracy = accuracy + self._attr_extra_state_attributes = attributes + self._device = device self._battery = battery - self._latitude = latitude - self._longitude = longitude + self._attr_latitude = latitude + self._attr_longitude = longitude self._unsub_dispatcher = None - self._unique_id = device + self._attr_unique_id = device + self._attr_device_info = DeviceInfo( + name=device, + identifiers={(DOMAIN, device)}, + ) @property def battery_level(self): """Return battery value of the device.""" return self._battery - @property - def extra_state_attributes(self): - """Return device specific attributes.""" - return self._attributes - - @property - def latitude(self): - """Return latitude value of the device.""" - return self._latitude - - @property - def longitude(self): - """Return longitude value of the device.""" - return self._longitude - - @property - def location_accuracy(self): - """Return the gps accuracy of the device.""" - return self._accuracy - - @property - def unique_id(self): - """Return the unique ID.""" - return self._unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - return DeviceInfo( - name=self._name, - identifiers={(DOMAIN, self._unique_id)}, - ) - - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS - async def async_added_to_hass(self) -> None: """Register state update callback.""" await super().async_added_to_hass() @@ -309,14 +142,14 @@ class TraccarEntity(TrackerEntity, RestoreEntity): ) # don't restore if we got created with data - if self._latitude is not None or self._longitude is not None: + if self.latitude is not None or self.longitude is not None: return if (state := await self.async_get_last_state()) is None: - self._latitude = None - self._longitude = None - self._accuracy = None - self._attributes = { + self._attr_latitude = None + self._attr_longitude = None + self._attr_location_accuracy = 0 + self._attr_extra_state_attributes = { ATTR_ALTITUDE: None, ATTR_BEARING: None, ATTR_SPEED: None, @@ -325,10 +158,10 @@ class TraccarEntity(TrackerEntity, RestoreEntity): return attr = state.attributes - self._latitude = attr.get(ATTR_LATITUDE) - self._longitude = attr.get(ATTR_LONGITUDE) - self._accuracy = attr.get(ATTR_ACCURACY) - self._attributes = { + self._attr_latitude = attr.get(ATTR_LATITUDE) + self._attr_longitude = attr.get(ATTR_LONGITUDE) + self._attr_location_accuracy = attr.get(ATTR_ACCURACY, 0) + self._attr_extra_state_attributes = { ATTR_ALTITUDE: attr.get(ATTR_ALTITUDE), ATTR_BEARING: attr.get(ATTR_BEARING), ATTR_SPEED: attr.get(ATTR_SPEED), @@ -345,12 +178,12 @@ class TraccarEntity(TrackerEntity, RestoreEntity): self, device, latitude, longitude, battery, accuracy, attributes ): """Mark the device as seen.""" - if device != self._name: + if device != self._device: return - self._latitude = latitude - self._longitude = longitude + self._attr_latitude = latitude + self._attr_longitude = longitude self._battery = battery - self._accuracy = accuracy - self._attributes.update(attributes) + self._attr_location_accuracy = accuracy + self._attr_extra_state_attributes.update(attributes) self.async_write_ha_state() diff --git a/homeassistant/components/traccar_server/config_flow.py b/homeassistant/components/traccar_server/config_flow.py index 45a43c08685..b186424d32c 100644 --- a/homeassistant/components/traccar_server/config_flow.py +++ b/homeassistant/components/traccar_server/config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Mapping from typing import Any from pytraccar import ApiClient, ServerModel, TraccarException @@ -161,41 +160,6 @@ class TraccarServerConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_info: Mapping[str, Any] - ) -> ConfigFlowResult: - """Import an entry.""" - configured_port = str(import_info[CONF_PORT]) - self._async_abort_entries_match( - { - CONF_HOST: import_info[CONF_HOST], - CONF_PORT: configured_port, - } - ) - if "all_events" in (imported_events := import_info.get("event", [])): - events = list(EVENTS.values()) - else: - events = imported_events - return self.async_create_entry( - title=f"{import_info[CONF_HOST]}:{configured_port}", - data={ - CONF_HOST: import_info[CONF_HOST], - CONF_PORT: configured_port, - CONF_SSL: import_info.get(CONF_SSL, False), - CONF_VERIFY_SSL: import_info.get(CONF_VERIFY_SSL, True), - CONF_USERNAME: import_info[CONF_USERNAME], - CONF_PASSWORD: import_info[CONF_PASSWORD], - }, - options={ - CONF_MAX_ACCURACY: import_info[CONF_MAX_ACCURACY], - CONF_EVENTS: events, - CONF_CUSTOM_ATTRIBUTES: import_info.get("monitored_conditions", []), - CONF_SKIP_ACCURACY_FILTER_FOR: import_info.get( - "skip_accuracy_filter_on", [] - ), - }, - ) - @staticmethod @callback def async_get_options_flow( diff --git a/homeassistant/components/traccar_server/device_tracker.py b/homeassistant/components/traccar_server/device_tracker.py index e7dba3ad99d..9e5a3c0ee9f 100644 --- a/homeassistant/components/traccar_server/device_tracker.py +++ b/homeassistant/components/traccar_server/device_tracker.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -57,8 +57,3 @@ class TraccarServerDeviceTracker(TraccarServerEntity, TrackerEntity): def location_accuracy(self) -> int: """Return the gps accuracy of the device.""" return self.traccar_position["accuracy"] - - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.GPS diff --git a/homeassistant/components/trace/__init__.py b/homeassistant/components/trace/__init__.py index 79830e0b63f..9ff645ce4d6 100644 --- a/homeassistant/components/trace/__init__.py +++ b/homeassistant/components/trace/__init__.py @@ -2,30 +2,27 @@ from __future__ import annotations -from collections.abc import Mapping import logging -from typing import Any import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.json import ExtendedJSONEncoder from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType -from homeassistant.util.limited_size_dict import LimitedSizeDict from . import websocket_api from .const import ( CONF_STORED_TRACES, DATA_TRACE, DATA_TRACE_STORE, - DATA_TRACES_RESTORED, DEFAULT_STORED_TRACES, ) -from .models import ActionTrace, BaseTrace, RestoredTrace +from .models import ActionTrace +from .util import async_store_trace _LOGGER = logging.getLogger(__name__) @@ -40,12 +37,12 @@ TRACE_CONFIG_SCHEMA = { CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -type TraceData = dict[str, LimitedSizeDict[str, BaseTrace]] - - -@callback -def _get_data(hass: HomeAssistant) -> TraceData: - return hass.data[DATA_TRACE] # type: ignore[no-any-return] +__all__ = [ + "CONF_STORED_TRACES", + "TRACE_CONFIG_SCHEMA", + "ActionTrace", + "async_store_trace", +] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -62,7 +59,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: _LOGGER.debug("Storing traces") try: await store.async_save( - {key: list(traces.values()) for key, traces in _get_data(hass).items()} + { + key: list(traces.values()) + for key, traces in hass.data[DATA_TRACE].items() + } ) except HomeAssistantError as exc: _LOGGER.error("Error storing traces", exc_info=exc) @@ -71,121 +71,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_store_traces_at_stop) return True - - -async def async_get_trace( - hass: HomeAssistant, key: str, run_id: str -) -> dict[str, BaseTrace]: - """Return the requested trace.""" - # Restore saved traces if not done - await async_restore_traces(hass) - - return _get_data(hass)[key][run_id].as_extended_dict() - - -async def async_list_contexts( - hass: HomeAssistant, key: str | None -) -> dict[str, dict[str, str]]: - """List contexts for which we have traces.""" - # Restore saved traces if not done - await async_restore_traces(hass) - - values: Mapping[str, LimitedSizeDict[str, BaseTrace] | None] - if key is not None: - values = {key: _get_data(hass).get(key)} - else: - values = _get_data(hass) - - def _trace_id(run_id: str, key: str) -> dict[str, str]: - """Make trace_id for the response.""" - domain, item_id = key.split(".", 1) - return {"run_id": run_id, "domain": domain, "item_id": item_id} - - return { - trace.context.id: _trace_id(trace.run_id, key) - for key, traces in values.items() - if traces is not None - for trace in traces.values() - } - - -def _get_debug_traces(hass: HomeAssistant, key: str) -> list[dict[str, Any]]: - """Return a serializable list of debug traces for a script or automation.""" - if traces_for_key := _get_data(hass).get(key): - return [trace.as_short_dict() for trace in traces_for_key.values()] - return [] - - -async def async_list_traces( - hass: HomeAssistant, wanted_domain: str, wanted_key: str | None -) -> list[dict[str, Any]]: - """List traces for a domain.""" - # Restore saved traces if not done already - await async_restore_traces(hass) - - if not wanted_key: - traces: list[dict[str, Any]] = [] - for key in _get_data(hass): - domain = key.split(".", 1)[0] - if domain == wanted_domain: - traces.extend(_get_debug_traces(hass, key)) - else: - traces = _get_debug_traces(hass, wanted_key) - - return traces - - -def async_store_trace( - hass: HomeAssistant, trace: ActionTrace, stored_traces: int -) -> None: - """Store a trace if its key is valid.""" - if key := trace.key: - traces = _get_data(hass) - if key not in traces: - traces[key] = LimitedSizeDict(size_limit=stored_traces) - else: - traces[key].size_limit = stored_traces - traces[key][trace.run_id] = trace - - -def _async_store_restored_trace(hass: HomeAssistant, trace: RestoredTrace) -> None: - """Store a restored trace and move it to the end of the LimitedSizeDict.""" - key = trace.key - traces = _get_data(hass) - if key not in traces: - traces[key] = LimitedSizeDict() - traces[key][trace.run_id] = trace - traces[key].move_to_end(trace.run_id, last=False) - - -async def async_restore_traces(hass: HomeAssistant) -> None: - """Restore saved traces.""" - if DATA_TRACES_RESTORED in hass.data: - return - - hass.data[DATA_TRACES_RESTORED] = True - - store: Store[dict[str, list]] = hass.data[DATA_TRACE_STORE] - try: - restored_traces = await store.async_load() or {} - except HomeAssistantError: - _LOGGER.exception("Error loading traces") - restored_traces = {} - - for key, traces in restored_traces.items(): - # Add stored traces in reversed order to prioritize the newest traces - for json_trace in reversed(traces): - if ( - (stored_traces := _get_data(hass).get(key)) - and stored_traces.size_limit is not None - and len(stored_traces) >= stored_traces.size_limit - ): - break - - try: - trace = RestoredTrace(json_trace) - # Catch any exception to not blow up if the stored trace is invalid - except Exception: - _LOGGER.exception("Failed to restore trace") - continue - _async_store_restored_trace(hass, trace) diff --git a/homeassistant/components/trace/const.py b/homeassistant/components/trace/const.py index f17328325c6..fedbdb71d3a 100644 --- a/homeassistant/components/trace/const.py +++ b/homeassistant/components/trace/const.py @@ -1,7 +1,19 @@ """Shared constants for script and automation tracing and debugging.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.storage import Store + + from .models import TraceData + + CONF_STORED_TRACES = "stored_traces" -DATA_TRACE = "trace" -DATA_TRACE_STORE = "trace_store" -DATA_TRACES_RESTORED = "trace_traces_restored" +DATA_TRACE: HassKey[TraceData] = HassKey("trace") +DATA_TRACE_STORE: HassKey[Store[dict[str, list]]] = HassKey("trace_store") +DATA_TRACES_RESTORED: HassKey[bool] = HassKey("trace_traces_restored") DEFAULT_STORED_TRACES = 5 # Stored traces per script or automation diff --git a/homeassistant/components/trace/models.py b/homeassistant/components/trace/models.py index 9f65b05dcd5..e8ef417ca5f 100644 --- a/homeassistant/components/trace/models.py +++ b/homeassistant/components/trace/models.py @@ -16,8 +16,11 @@ from homeassistant.helpers.trace import ( trace_set_child_id, ) import homeassistant.util.dt as dt_util +from homeassistant.util.limited_size_dict import LimitedSizeDict import homeassistant.util.uuid as uuid_util +type TraceData = dict[str, LimitedSizeDict[str, BaseTrace]] + class BaseTrace(abc.ABC): """Base container for a script or automation trace.""" diff --git a/homeassistant/components/trace/util.py b/homeassistant/components/trace/util.py new file mode 100644 index 00000000000..73e65dd3998 --- /dev/null +++ b/homeassistant/components/trace/util.py @@ -0,0 +1,134 @@ +"""Support for script and automation tracing and debugging.""" + +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.limited_size_dict import LimitedSizeDict + +from .const import DATA_TRACE, DATA_TRACE_STORE, DATA_TRACES_RESTORED +from .models import ActionTrace, BaseTrace, RestoredTrace, TraceData + +_LOGGER = logging.getLogger(__name__) + + +async def async_get_trace( + hass: HomeAssistant, key: str, run_id: str +) -> dict[str, BaseTrace]: + """Return the requested trace.""" + # Restore saved traces if not done + await async_restore_traces(hass) + + return hass.data[DATA_TRACE][key][run_id].as_extended_dict() + + +async def async_list_contexts( + hass: HomeAssistant, key: str | None +) -> dict[str, dict[str, str]]: + """List contexts for which we have traces.""" + # Restore saved traces if not done + await async_restore_traces(hass) + + values: Mapping[str, LimitedSizeDict[str, BaseTrace] | None] | TraceData + if key is not None: + values = {key: hass.data[DATA_TRACE].get(key)} + else: + values = hass.data[DATA_TRACE] + + def _trace_id(run_id: str, key: str) -> dict[str, str]: + """Make trace_id for the response.""" + domain, item_id = key.split(".", 1) + return {"run_id": run_id, "domain": domain, "item_id": item_id} + + return { + trace.context.id: _trace_id(trace.run_id, key) + for key, traces in values.items() + if traces is not None + for trace in traces.values() + } + + +def _get_debug_traces(hass: HomeAssistant, key: str) -> list[dict[str, Any]]: + """Return a serializable list of debug traces for a script or automation.""" + if traces_for_key := hass.data[DATA_TRACE].get(key): + return [trace.as_short_dict() for trace in traces_for_key.values()] + return [] + + +async def async_list_traces( + hass: HomeAssistant, wanted_domain: str, wanted_key: str | None +) -> list[dict[str, Any]]: + """List traces for a domain.""" + # Restore saved traces if not done already + await async_restore_traces(hass) + + if not wanted_key: + traces: list[dict[str, Any]] = [] + for key in hass.data[DATA_TRACE]: + domain = key.split(".", 1)[0] + if domain == wanted_domain: + traces.extend(_get_debug_traces(hass, key)) + else: + traces = _get_debug_traces(hass, wanted_key) + + return traces + + +def async_store_trace( + hass: HomeAssistant, trace: ActionTrace, stored_traces: int +) -> None: + """Store a trace if its key is valid.""" + if key := trace.key: + traces = hass.data[DATA_TRACE] + if key not in traces: + traces[key] = LimitedSizeDict(size_limit=stored_traces) + else: + traces[key].size_limit = stored_traces + traces[key][trace.run_id] = trace + + +def _async_store_restored_trace(hass: HomeAssistant, trace: RestoredTrace) -> None: + """Store a restored trace and move it to the end of the LimitedSizeDict.""" + key = trace.key + traces = hass.data[DATA_TRACE] + if key not in traces: + traces[key] = LimitedSizeDict() + traces[key][trace.run_id] = trace + traces[key].move_to_end(trace.run_id, last=False) + + +async def async_restore_traces(hass: HomeAssistant) -> None: + """Restore saved traces.""" + if DATA_TRACES_RESTORED in hass.data: + return + + hass.data[DATA_TRACES_RESTORED] = True + + store = hass.data[DATA_TRACE_STORE] + try: + restored_traces = await store.async_load() or {} + except HomeAssistantError: + _LOGGER.exception("Error loading traces") + restored_traces = {} + + for key, traces in restored_traces.items(): + # Add stored traces in reversed order to prioritize the newest traces + for json_trace in reversed(traces): + if ( + (stored_traces := hass.data[DATA_TRACE].get(key)) + and stored_traces.size_limit is not None + and len(stored_traces) >= stored_traces.size_limit + ): + break + + try: + trace = RestoredTrace(json_trace) + # Catch any exception to not blow up if the stored trace is invalid + except Exception: + _LOGGER.exception("Failed to restore trace") + continue + _async_store_restored_trace(hass, trace) diff --git a/homeassistant/components/trace/websocket_api.py b/homeassistant/components/trace/websocket_api.py index f1ea6133d43..d75fff1a466 100644 --- a/homeassistant/components/trace/websocket_api.py +++ b/homeassistant/components/trace/websocket_api.py @@ -26,7 +26,7 @@ from homeassistant.helpers.script import ( debug_stop, ) -from .. import trace +from .util import async_get_trace, async_list_contexts, async_list_traces TRACE_DOMAINS = ("automation", "script") @@ -66,7 +66,7 @@ async def websocket_trace_get( run_id = msg["run_id"] try: - requested_trace = await trace.async_get_trace(hass, key, run_id) + requested_trace = await async_get_trace(hass, key, run_id) except KeyError: connection.send_error( msg["id"], websocket_api.ERR_NOT_FOUND, "The trace could not be found" @@ -98,7 +98,7 @@ async def websocket_trace_list( wanted_domain = msg["domain"] key = f"{msg['domain']}.{msg['item_id']}" if "item_id" in msg else None - traces = await trace.async_list_traces(hass, wanted_domain, key) + traces = await async_list_traces(hass, wanted_domain, key) connection.send_result(msg["id"], traces) @@ -120,7 +120,7 @@ async def websocket_trace_contexts( """Retrieve contexts we have traces for.""" key = f"{msg['domain']}.{msg['item_id']}" if "item_id" in msg else None - contexts = await trace.async_list_contexts(hass, key) + contexts = await async_list_contexts(hass, key) connection.send_result(msg["id"], contexts) diff --git a/homeassistant/components/tractive/__init__.py b/homeassistant/components/tractive/__init__.py index 4f0de7b14cd..8bc2d11d047 100644 --- a/homeassistant/components/tractive/__init__.py +++ b/homeassistant/components/tractive/__init__.py @@ -136,7 +136,7 @@ async def _generate_trackables( return None if "details" not in trackable: - _LOGGER.info( + _LOGGER.warning( "Tracker %s has no details and will be skipped. This happens for shared trackers", trackable["device_id"], ) diff --git a/homeassistant/components/tractive/device_tracker.py b/homeassistant/components/tractive/device_tracker.py index d5d6f5f541c..f31afaf92f6 100644 --- a/homeassistant/components/tractive/device_tracker.py +++ b/homeassistant/components/tractive/device_tracker.py @@ -47,9 +47,9 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity): ) self._battery_level: int | None = item.hw_info.get("battery_level") - self._latitude: float = item.pos_report["latlong"][0] - self._longitude: float = item.pos_report["latlong"][1] - self._accuracy: int = item.pos_report["pos_uncertainty"] + self._attr_latitude = item.pos_report["latlong"][0] + self._attr_longitude = item.pos_report["latlong"][1] + self._attr_location_accuracy: int = item.pos_report["pos_uncertainty"] self._source_type: str = item.pos_report["sensor_used"] self._attr_unique_id = item.trackable["_id"] @@ -62,21 +62,6 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity): return SourceType.ROUTER return SourceType.GPS - @property - def latitude(self) -> float: - """Return latitude value of the device.""" - return self._latitude - - @property - def longitude(self) -> float: - """Return longitude value of the device.""" - return self._longitude - - @property - def location_accuracy(self) -> int: - """Return the gps accuracy of the device.""" - return self._accuracy - @property def battery_level(self) -> int | None: """Return the battery level of the device.""" @@ -90,9 +75,9 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity): @callback def _handle_position_update(self, event: dict[str, Any]) -> None: - self._latitude = event["latitude"] - self._longitude = event["longitude"] - self._accuracy = event["accuracy"] + self._attr_latitude = event["latitude"] + self._attr_longitude = event["longitude"] + self._attr_location_accuracy = event["accuracy"] self._source_type = event["sensor_used"] self._attr_available = True self.async_write_ha_state() diff --git a/homeassistant/components/tractive/sensor.py b/homeassistant/components/tractive/sensor.py index a92efa660b6..a3c1893267c 100644 --- a/homeassistant/components/tractive/sensor.py +++ b/homeassistant/components/tractive/sensor.py @@ -16,6 +16,7 @@ from homeassistant.const import ( ATTR_BATTERY_LEVEL, PERCENTAGE, EntityCategory, + UnitOfEnergy, UnitOfTime, ) from homeassistant.core import HomeAssistant, callback @@ -127,7 +128,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = ( TractiveSensorEntityDescription( key=ATTR_CALORIES, translation_key="calories", - native_unit_of_measurement="kcal", + native_unit_of_measurement=UnitOfEnergy.KILO_CALORIE, signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED, state_class=SensorStateClass.TOTAL, ), diff --git a/homeassistant/components/tradfri/config_flow.py b/homeassistant/components/tradfri/config_flow.py index 8de40140339..d9911472a67 100644 --- a/homeassistant/components/tradfri/config_flow.py +++ b/homeassistant/components/tradfri/config_flow.py @@ -60,10 +60,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): return await self._entry_from_data(auth) except AuthError as err: - if err.code == "invalid_security_code": - errors[KEY_SECURITY_CODE] = err.code - else: - errors["base"] = err.code + errors["base"] = err.code else: user_input = {} diff --git a/homeassistant/components/tradfri/cover.py b/homeassistant/components/tradfri/cover.py index 873b5f3cd07..92d10320327 100644 --- a/homeassistant/components/tradfri/cover.py +++ b/homeassistant/components/tradfri/cover.py @@ -12,9 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base_class import TradfriBaseEntity from .const import CONF_GATEWAY_ID, COORDINATOR, COORDINATOR_LIST, DOMAIN, KEY_API from .coordinator import TradfriDeviceDataUpdateCoordinator +from .entity import TradfriBaseEntity async def async_setup_entry( diff --git a/homeassistant/components/tradfri/base_class.py b/homeassistant/components/tradfri/entity.py similarity index 100% rename from homeassistant/components/tradfri/base_class.py rename to homeassistant/components/tradfri/entity.py diff --git a/homeassistant/components/tradfri/fan.py b/homeassistant/components/tradfri/fan.py index 6561fc166dc..3f45ee3e1eb 100644 --- a/homeassistant/components/tradfri/fan.py +++ b/homeassistant/components/tradfri/fan.py @@ -12,9 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base_class import TradfriBaseEntity from .const import CONF_GATEWAY_ID, COORDINATOR, COORDINATOR_LIST, DOMAIN, KEY_API from .coordinator import TradfriDeviceDataUpdateCoordinator +from .entity import TradfriBaseEntity ATTR_AUTO = "Auto" ATTR_MAX_FAN_STEPS = 49 @@ -69,7 +69,6 @@ class TradfriAirPurifierFan(TradfriBaseEntity, FanEntity): # ... with step size 1 # 50 = Max _attr_speed_count = ATTR_MAX_FAN_STEPS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tradfri/light.py b/homeassistant/components/tradfri/light.py index ef65c6bf957..a71691e6e90 100644 --- a/homeassistant/components/tradfri/light.py +++ b/homeassistant/components/tradfri/light.py @@ -9,7 +9,7 @@ from pytradfri.command import Command from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util -from .base_class import TradfriBaseEntity from .const import CONF_GATEWAY_ID, COORDINATOR, COORDINATOR_LIST, DOMAIN, KEY_API from .coordinator import TradfriDeviceDataUpdateCoordinator +from .entity import TradfriBaseEntity async def async_setup_entry( @@ -87,8 +87,16 @@ class TradfriLight(TradfriBaseEntity, LightEntity): self._fixed_color_mode = next(iter(self._attr_supported_color_modes)) if self._device_control: - self._attr_min_mireds = self._device_control.min_mireds - self._attr_max_mireds = self._device_control.max_mireds + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + self._device_control.min_mireds + ) + ) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + self._device_control.max_mireds + ) + ) def _refresh(self) -> None: """Refresh the device.""" @@ -118,11 +126,11 @@ class TradfriLight(TradfriBaseEntity, LightEntity): return cast(int, self._device_data.dimmer) @property - def color_temp(self) -> int | None: - """Return the color temp value in mireds.""" - if not self._device_data: + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if not self._device_data or not (color_temp := self._device_data.color_temp): return None - return cast(int, self._device_data.color_temp) + return color_util.color_temperature_mired_to_kelvin(color_temp) @property def hs_color(self) -> tuple[float, float] | None: @@ -191,18 +199,19 @@ class TradfriLight(TradfriBaseEntity, LightEntity): transition_time = None temp_command = None - if ATTR_COLOR_TEMP in kwargs and ( + if ATTR_COLOR_TEMP_KELVIN in kwargs and ( self._device_control.can_set_temp or self._device_control.can_set_color ): - temp = kwargs[ATTR_COLOR_TEMP] + temp_k = kwargs[ATTR_COLOR_TEMP_KELVIN] # White Spectrum bulb if self._device_control.can_set_temp: - if temp > self.max_mireds: - temp = self.max_mireds - elif temp < self.min_mireds: - temp = self.min_mireds + temp = color_util.color_temperature_kelvin_to_mired(temp_k) + if temp < (min_mireds := self._device_control.min_mireds): + temp = min_mireds + elif temp > (max_mireds := self._device_control.max_mireds): + temp = max_mireds temp_data = { - ATTR_COLOR_TEMP: temp, + "color_temp": temp, "transition_time": transition_time, } temp_command = self._device_control.set_color_temp(**temp_data) @@ -210,7 +219,6 @@ class TradfriLight(TradfriBaseEntity, LightEntity): # Color bulb (CWS) # color_temp needs to be set with hue/saturation elif self._device_control.can_set_color: - temp_k = color_util.color_temperature_mired_to_kelvin(temp) hs_color = color_util.color_temperature_to_hs(temp_k) hue = int(hs_color[0] * (self._device_control.max_hue / 360)) sat = int(hs_color[1] * (self._device_control.max_saturation / 100)) diff --git a/homeassistant/components/tradfri/sensor.py b/homeassistant/components/tradfri/sensor.py index 5d3e63d3a5d..4e560f0e7b5 100644 --- a/homeassistant/components/tradfri/sensor.py +++ b/homeassistant/components/tradfri/sensor.py @@ -26,7 +26,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base_class import TradfriBaseEntity from .const import ( CONF_GATEWAY_ID, COORDINATOR, @@ -36,6 +35,7 @@ from .const import ( LOGGER, ) from .coordinator import TradfriDeviceDataUpdateCoordinator +from .entity import TradfriBaseEntity @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tradfri/strings.json b/homeassistant/components/tradfri/strings.json index 69a28a567ab..9ed7e167e71 100644 --- a/homeassistant/components/tradfri/strings.json +++ b/homeassistant/components/tradfri/strings.json @@ -14,7 +14,7 @@ } }, "error": { - "invalid_key": "Failed to register with provided key. If this keeps happening, try restarting the gateway.", + "invalid_security_code": "Failed to register with provided key. If this keeps happening, try restarting the gateway.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "timeout": "Timeout validating the code.", "cannot_authenticate": "Cannot authenticate, is Gateway paired with another server like e.g. Homekit?" diff --git a/homeassistant/components/tradfri/switch.py b/homeassistant/components/tradfri/switch.py index 20695f26500..088b775b9fd 100644 --- a/homeassistant/components/tradfri/switch.py +++ b/homeassistant/components/tradfri/switch.py @@ -12,9 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base_class import TradfriBaseEntity from .const import CONF_GATEWAY_ID, COORDINATOR, COORDINATOR_LIST, DOMAIN, KEY_API from .coordinator import TradfriDeviceDataUpdateCoordinator +from .entity import TradfriBaseEntity async def async_setup_entry( diff --git a/homeassistant/components/trafikverket_camera/__init__.py b/homeassistant/components/trafikverket_camera/__init__.py index 938bfce2318..614072cc706 100644 --- a/homeassistant/components/trafikverket_camera/__init__.py +++ b/homeassistant/components/trafikverket_camera/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from pytrafikverket.trafikverket_camera import TrafikverketCamera +from pytrafikverket import TrafikverketCamera from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_LOCATION @@ -25,7 +25,7 @@ TVCameraConfigEntry = ConfigEntry[TVDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: TVCameraConfigEntry) -> bool: """Set up Trafikverket Camera from a config entry.""" - coordinator = TVDataUpdateCoordinator(hass) + coordinator = TVDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/trafikverket_camera/config_flow.py b/homeassistant/components/trafikverket_camera/config_flow.py index 501ccb7e0e0..29f3db7beac 100644 --- a/homeassistant/components/trafikverket_camera/config_flow.py +++ b/homeassistant/components/trafikverket_camera/config_flow.py @@ -5,12 +5,20 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any -from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.models import CameraInfoModel -from pytrafikverket.trafikverket_camera import TrafikverketCamera +from pytrafikverket import ( + CameraInfoModel, + InvalidAuthentication, + NoCameraFound, + TrafikverketCamera, + UnknownError, +) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_LOCATION from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( @@ -29,7 +37,6 @@ class TVCameraConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 3 - entry: ConfigEntry | None cameras: list[CameraInfoModel] api_key: str @@ -58,7 +65,6 @@ class TVCameraConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -70,19 +76,13 @@ class TVCameraConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None - errors, _ = await self.validate_input(api_key, self.entry.data[CONF_ID]) + reauth_entry = self._get_reauth_entry() + errors, _ = await self.validate_input(api_key, reauth_entry.data[CONF_ID]) if not errors: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + return self.async_update_reload_and_abort( + reauth_entry, data_updates={CONF_API_KEY: api_key} ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", @@ -94,6 +94,49 @@ class TVCameraConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-configuration with Trafikverket.""" + errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() + + if user_input: + api_key = user_input[CONF_API_KEY] + location = user_input[CONF_LOCATION] + + errors, cameras = await self.validate_input(api_key, location) + + if not errors and cameras: + if len(cameras) > 1: + self.cameras = cameras + self.api_key = api_key + return await self.async_step_multiple_cameras() + await self.async_set_unique_id(f"{DOMAIN}-{cameras[0].camera_id}") + self._abort_if_unique_id_configured() + return self.async_update_reload_and_abort( + reconfigure_entry, + unique_id=f"{DOMAIN}-{cameras[0].camera_id}", + title=cameras[0].camera_name or "Trafikverket Camera", + data={CONF_API_KEY: api_key, CONF_ID: cameras[0].camera_id}, + ) + + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_API_KEY): TextSelector(), + vol.Required(CONF_LOCATION): TextSelector(), + } + ), + {**reconfigure_entry.data, **(user_input or {})}, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=schema, + errors=errors, + ) + async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: @@ -140,6 +183,16 @@ class TVCameraConfigFlow(ConfigFlow, domain=DOMAIN): ) if not errors and cameras: + if self.source == SOURCE_RECONFIGURE: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + unique_id=f"{DOMAIN}-{cameras[0].camera_id}", + title=cameras[0].camera_name or "Trafikverket Camera", + data={ + CONF_API_KEY: self.api_key, + CONF_ID: cameras[0].camera_id, + }, + ) await self.async_set_unique_id(f"{DOMAIN}-{cameras[0].camera_id}") self._abort_if_unique_id_configured() return self.async_create_entry( diff --git a/homeassistant/components/trafikverket_camera/coordinator.py b/homeassistant/components/trafikverket_camera/coordinator.py index 7bc5c556c00..649eb102575 100644 --- a/homeassistant/components/trafikverket_camera/coordinator.py +++ b/homeassistant/components/trafikverket_camera/coordinator.py @@ -9,14 +9,14 @@ import logging from typing import TYPE_CHECKING import aiohttp -from pytrafikverket.exceptions import ( +from pytrafikverket import ( + CameraInfoModel, InvalidAuthentication, MultipleCamerasFound, NoCameraFound, + TrafikverketCamera, UnknownError, ) -from pytrafikverket.models import CameraInfoModel -from pytrafikverket.trafikverket_camera import TrafikverketCamera from homeassistant.const import CONF_API_KEY, CONF_ID from homeassistant.core import HomeAssistant @@ -44,21 +44,20 @@ class CameraData: class TVDataUpdateCoordinator(DataUpdateCoordinator[CameraData]): """A Trafikverket Data Update Coordinator.""" - config_entry: TVCameraConfigEntry - - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, config_entry: TVCameraConfigEntry) -> None: """Initialize the Trafikverket coordinator.""" super().__init__( hass, _LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=TIME_BETWEEN_UPDATES, ) self.session = async_get_clientsession(hass) self._camera_api = TrafikverketCamera( - self.session, self.config_entry.data[CONF_API_KEY] + self.session, config_entry.data[CONF_API_KEY] ) - self._id = self.config_entry.data[CONF_ID] + self._id = config_entry.data[CONF_ID] async def _async_update_data(self) -> CameraData: """Fetch data from Trafikverket.""" diff --git a/homeassistant/components/trafikverket_camera/manifest.json b/homeassistant/components/trafikverket_camera/manifest.json index f424f47f7c5..08d945e0a0c 100644 --- a/homeassistant/components/trafikverket_camera/manifest.json +++ b/homeassistant/components/trafikverket_camera/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_camera", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_camera/strings.json b/homeassistant/components/trafikverket_camera/strings.json index e3a1ceec4c0..b6e2209fc57 100644 --- a/homeassistant/components/trafikverket_camera/strings.json +++ b/homeassistant/components/trafikverket_camera/strings.json @@ -2,7 +2,8 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -25,6 +26,20 @@ "data": { "id": "Choose camera" } + }, + "reauth_confirm": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } + }, + "reconfigure": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]", + "location": "[%key:common::config_flow::data::location%]" + }, + "data_description": { + "location": "[%key:component::trafikverket_camera::config::step::user::data_description::location%]" + } } } }, diff --git a/homeassistant/components/trafikverket_ferry/config_flow.py b/homeassistant/components/trafikverket_ferry/config_flow.py index 1f82a535f16..002dc421273 100644 --- a/homeassistant/components/trafikverket_ferry/config_flow.py +++ b/homeassistant/components/trafikverket_ferry/config_flow.py @@ -9,7 +9,7 @@ from pytrafikverket import TrafikverketFerry from pytrafikverket.exceptions import InvalidAuthentication, NoFerryFound import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_WEEKDAY, WEEKDAYS from homeassistant.helpers import selector from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -49,8 +49,6 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None - async def validate_input( self, api_key: str, ferry_from: str, ferry_to: str ) -> None: @@ -63,8 +61,6 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -76,10 +72,10 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None + reauth_entry = self._get_reauth_entry() try: await self.validate_input( - api_key, self.entry.data[CONF_FROM], self.entry.data[CONF_TO] + api_key, reauth_entry.data[CONF_FROM], reauth_entry.data[CONF_TO] ) except InvalidAuthentication: errors["base"] = "invalid_auth" @@ -88,15 +84,10 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): except Exception: # noqa: BLE001 errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_API_KEY: api_key}, ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/trafikverket_ferry/manifest.json b/homeassistant/components/trafikverket_ferry/manifest.json index 0b7b056754c..4177587db7e 100644 --- a/homeassistant/components/trafikverket_ferry/manifest.json +++ b/homeassistant/components/trafikverket_ferry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_ferry", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_train/__init__.py b/homeassistant/components/trafikverket_train/__init__.py index 3e807df9301..23aee50d816 100644 --- a/homeassistant/components/trafikverket_train/__init__.py +++ b/homeassistant/components/trafikverket_train/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,6 +13,8 @@ from .coordinator import TVDataUpdateCoordinator TVTrainConfigEntry = ConfigEntry[TVDataUpdateCoordinator] +_LOGGER = logging.getLogger(__name__) + async def async_setup_entry(hass: HomeAssistant, entry: TVTrainConfigEntry) -> bool: """Set up Trafikverket Train from a config entry.""" @@ -42,3 +46,24 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) + + +async def async_migrate_entry(hass: HomeAssistant, entry: TVTrainConfigEntry) -> bool: + """Migrate config entry.""" + _LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version > 1: + # This means the user has downgraded from a future version + return False + + if entry.version == 1 and entry.minor_version == 1: + # Remove unique id + hass.config_entries.async_update_entry(entry, unique_id=None, minor_version=2) + + _LOGGER.debug( + "Migration to version %s.%s successful", + entry.version, + entry.minor_version, + ) + + return True diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index d03eeca8f65..363b9bb2542 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -21,7 +21,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_WEEKDAY, WEEKDAYS from homeassistant.core import HomeAssistant, callback @@ -37,7 +37,7 @@ from homeassistant.helpers.selector import ( import homeassistant.util.dt as dt_util from .const import CONF_FILTER_PRODUCT, CONF_FROM, CONF_TIME, CONF_TO, DOMAIN -from .util import create_unique_id, next_departuredate +from .util import next_departuredate _LOGGER = logging.getLogger(__name__) @@ -93,8 +93,8 @@ async def validate_input( try: web_session = async_get_clientsession(hass) train_api = TrafikverketTrain(web_session, api_key) - from_station = await train_api.async_get_train_station(train_from) - to_station = await train_api.async_get_train_station(train_to) + from_station = await train_api.async_search_train_station(train_from) + to_station = await train_api.async_search_train_station(train_to) if train_time: await train_api.async_get_train_stop( from_station, to_station, when, product_filter @@ -125,8 +125,7 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Trafikverket Train integration.""" VERSION = 1 - - entry: ConfigEntry | None + MINOR_VERSION = 2 @staticmethod @callback @@ -134,14 +133,12 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TVTrainOptionsFlowHandler: """Get the options flow for this handler.""" - return TVTrainOptionsFlowHandler(config_entry) + return TVTrainOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -153,26 +150,21 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None + reauth_entry = self._get_reauth_entry() errors = await validate_input( self.hass, api_key, - self.entry.data[CONF_FROM], - self.entry.data[CONF_TO], - self.entry.data.get(CONF_TIME), - self.entry.data[CONF_WEEKDAY], - self.entry.options.get(CONF_FILTER_PRODUCT), + reauth_entry.data[CONF_FROM], + reauth_entry.data[CONF_TO], + reauth_entry.data.get(CONF_TIME), + reauth_entry.data[CONF_WEEKDAY], + reauth_entry.options.get(CONF_FILTER_PRODUCT), ) if not errors: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_API_KEY: api_key}, ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", @@ -211,11 +203,16 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): filter_product, ) if not errors: - unique_id = create_unique_id( - train_from, train_to, train_time, train_days + self._async_abort_entries_match( + { + CONF_API_KEY: api_key, + CONF_FROM: train_from, + CONF_TO: train_to, + CONF_TIME: train_time, + CONF_WEEKDAY: train_days, + CONF_FILTER_PRODUCT: filter_product, + } ) - await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured() return self.async_create_entry( title=name, data={ @@ -238,7 +235,7 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): ) -class TVTrainOptionsFlowHandler(OptionsFlowWithConfigEntry): +class TVTrainOptionsFlowHandler(OptionsFlow): """Handle Trafikverket Train options.""" async def async_step_init( @@ -256,7 +253,7 @@ class TVTrainOptionsFlowHandler(OptionsFlowWithConfigEntry): step_id="init", data_schema=self.add_suggested_values_to_schema( vol.Schema(OPTION_SCHEMA), - user_input or self.options, + user_input or self.config_entry.options, ), errors=errors, ) diff --git a/homeassistant/components/trafikverket_train/coordinator.py b/homeassistant/components/trafikverket_train/coordinator.py index 16a7a649b85..49d4e1ded74 100644 --- a/homeassistant/components/trafikverket_train/coordinator.py +++ b/homeassistant/components/trafikverket_train/coordinator.py @@ -94,10 +94,10 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): async def _async_setup(self) -> None: """Initiate stations.""" try: - self.to_station = await self._train_api.async_get_train_station( + self.to_station = await self._train_api.async_search_train_station( self.config_entry.data[CONF_TO] ) - self.from_station = await self._train_api.async_get_train_station( + self.from_station = await self._train_api.async_search_train_station( self.config_entry.data[CONF_FROM] ) except InvalidAuthentication as error: diff --git a/homeassistant/components/trafikverket_train/manifest.json b/homeassistant/components/trafikverket_train/manifest.json index 222b23dbe9a..40f3a39a2bb 100644 --- a/homeassistant/components/trafikverket_train/manifest.json +++ b/homeassistant/components/trafikverket_train/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_train", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_train/util.py b/homeassistant/components/trafikverket_train/util.py index 9648436f1e5..9a8dd9ea237 100644 --- a/homeassistant/components/trafikverket_train/util.py +++ b/homeassistant/components/trafikverket_train/util.py @@ -2,22 +2,11 @@ from __future__ import annotations -from datetime import date, time, timedelta +from datetime import date, timedelta from homeassistant.const import WEEKDAYS -def create_unique_id( - from_station: str, to_station: str, depart_time: time | str | None, weekdays: list -) -> str: - """Create unique id.""" - timestr = str(depart_time) if depart_time else "" - return ( - f"{from_station.casefold().replace(' ', '')}-{to_station.casefold().replace(' ', '')}" - f"-{timestr.casefold().replace(' ', '')}-{weekdays!s}" - ) - - def next_weekday(fromdate: date, weekday: int) -> date: """Return the date of the next time a specific weekday happen.""" days_ahead = weekday - fromdate.weekday() diff --git a/homeassistant/components/trafikverket_weatherstation/config_flow.py b/homeassistant/components/trafikverket_weatherstation/config_flow.py index cf7ca905acb..28b9a124fc6 100644 --- a/homeassistant/components/trafikverket_weatherstation/config_flow.py +++ b/homeassistant/components/trafikverket_weatherstation/config_flow.py @@ -13,10 +13,15 @@ from pytrafikverket.exceptions import ( from pytrafikverket.trafikverket_weather import TrafikverketWeather import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) from .const import CONF_STATION, DOMAIN @@ -26,8 +31,6 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None = None - async def validate_input(self, sensor_api: str, station: str) -> None: """Validate input from user input.""" web_session = async_get_clientsession(self.hass) @@ -79,8 +82,6 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -88,14 +89,13 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm re-authentication with Trafikverket.""" errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None - try: - await self.validate_input(api_key, self.entry.data[CONF_STATION]) + await self.validate_input(api_key, reauth_entry.data[CONF_STATION]) except InvalidAuthentication: errors["base"] = "invalid_auth" except NoWeatherStationFound: @@ -105,18 +105,56 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): except Exception: # noqa: BLE001 errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + return self.async_update_reload_and_abort( + reauth_entry, data_updates={CONF_API_KEY: api_key} ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema({vol.Required(CONF_API_KEY): cv.string}), errors=errors, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-configuration with Trafikverket.""" + errors: dict[str, str] = {} + + if user_input: + try: + await self.validate_input( + user_input[CONF_API_KEY], user_input[CONF_STATION] + ) + except InvalidAuthentication: + errors["base"] = "invalid_auth" + except NoWeatherStationFound: + errors["base"] = "invalid_station" + except MultipleWeatherStationsFound: + errors["base"] = "more_stations" + except Exception: # noqa: BLE001 + errors["base"] = "cannot_connect" + else: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + title=user_input[CONF_STATION], + data=user_input, + ) + + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_API_KEY): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + vol.Required(CONF_STATION): TextSelector(), + } + ), + {**self._get_reconfigure_entry().data, **(user_input or {})}, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=schema, + errors=errors, + ) diff --git a/homeassistant/components/trafikverket_weatherstation/manifest.json b/homeassistant/components/trafikverket_weatherstation/manifest.json index 85838726178..3996379540f 100644 --- a/homeassistant/components/trafikverket_weatherstation/manifest.json +++ b/homeassistant/components/trafikverket_weatherstation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_weatherstation", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_weatherstation/strings.json b/homeassistant/components/trafikverket_weatherstation/strings.json index a4838dab0e2..90a9f9ba7c1 100644 --- a/homeassistant/components/trafikverket_weatherstation/strings.json +++ b/homeassistant/components/trafikverket_weatherstation/strings.json @@ -2,7 +2,8 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -21,6 +22,12 @@ "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } + }, + "reconfigure": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]", + "station": "[%key:component::trafikverket_weatherstation::config::step::user::data::station%]" + } } } }, diff --git a/homeassistant/components/transmission/__init__.py b/homeassistant/components/transmission/__init__.py index 1c108831acf..1a8ffdea0c2 100644 --- a/homeassistant/components/transmission/__init__.py +++ b/homeassistant/components/transmission/__init__.py @@ -42,6 +42,7 @@ from homeassistant.helpers.typing import ConfigType from .const import ( ATTR_DELETE_DATA, + ATTR_DOWNLOAD_PATH, ATTR_TORRENT, CONF_ENTRY_ID, DEFAULT_DELETE_DATA, @@ -82,7 +83,12 @@ SERVICE_BASE_SCHEMA = vol.Schema( ) SERVICE_ADD_TORRENT_SCHEMA = vol.All( - SERVICE_BASE_SCHEMA.extend({vol.Required(ATTR_TORRENT): cv.string}), + SERVICE_BASE_SCHEMA.extend( + { + vol.Required(ATTR_TORRENT): cv.string, + vol.Optional(ATTR_DOWNLOAD_PATH, default=None): cv.string, + } + ), ) @@ -213,10 +219,18 @@ def setup_hass_services(hass: HomeAssistant) -> None: entry_id: str = service.data[CONF_ENTRY_ID] coordinator = _get_coordinator_from_service_data(hass, entry_id) torrent: str = service.data[ATTR_TORRENT] + download_path: str | None = service.data.get(ATTR_DOWNLOAD_PATH) if torrent.startswith( ("http", "ftp:", "magnet:") ) or hass.config.is_allowed_path(torrent): - await hass.async_add_executor_job(coordinator.api.add_torrent, torrent) + if download_path: + await hass.async_add_executor_job( + partial( + coordinator.api.add_torrent, torrent, download_dir=download_path + ) + ) + else: + await hass.async_add_executor_job(coordinator.api.add_torrent, torrent) await coordinator.async_request_refresh() else: _LOGGER.warning("Could not add torrent: unsupported type or no permission") diff --git a/homeassistant/components/transmission/config_flow.py b/homeassistant/components/transmission/config_flow.py index 2a4fd5aae0b..30e9f5a146b 100644 --- a/homeassistant/components/transmission/config_flow.py +++ b/homeassistant/components/transmission/config_flow.py @@ -15,6 +15,7 @@ from homeassistant.config_entries import ( ) from homeassistant.const import ( CONF_HOST, + CONF_NAME, CONF_PASSWORD, CONF_PATH, CONF_PORT, @@ -55,7 +56,6 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - _reauth_entry: ConfigEntry | None @staticmethod @callback @@ -63,7 +63,7 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TransmissionOptionsFlowHandler: """Get the options flow for this handler.""" - return TransmissionOptionsFlowHandler(config_entry) + return TransmissionOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -100,9 +100,6 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -110,9 +107,9 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} try: await get_api(self.hass, user_input) @@ -121,15 +118,12 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): except (CannotConnect, UnknownError): errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.title, }, step_id="reauth_confirm", data_schema=vol.Schema( @@ -144,10 +138,6 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): class TransmissionOptionsFlowHandler(OptionsFlow): """Handle Transmission client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Transmission options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/transmission/const.py b/homeassistant/components/transmission/const.py index 120918b24a2..c232f26cefd 100644 --- a/homeassistant/components/transmission/const.py +++ b/homeassistant/components/transmission/const.py @@ -40,6 +40,7 @@ STATE_ATTR_TORRENT_INFO = "torrent_info" ATTR_DELETE_DATA = "delete_data" ATTR_TORRENT = "torrent" +ATTR_DOWNLOAD_PATH = "download_path" SERVICE_ADD_TORRENT = "add_torrent" SERVICE_REMOVE_TORRENT = "remove_torrent" diff --git a/homeassistant/components/transmission/coordinator.py b/homeassistant/components/transmission/coordinator.py index e0930bd9e9e..b998ab6fbdd 100644 --- a/homeassistant/components/transmission/coordinator.py +++ b/homeassistant/components/transmission/coordinator.py @@ -102,7 +102,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in current_completed_torrents: if torrent.id not in old_completed_torrents: self.hass.bus.fire( - EVENT_DOWNLOADED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_DOWNLOADED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._completed_torrents = current_completed_torrents @@ -118,7 +123,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in current_started_torrents: if torrent.id not in old_started_torrents: self.hass.bus.fire( - EVENT_STARTED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_STARTED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._started_torrents = current_started_torrents @@ -130,7 +140,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in self._all_torrents: if torrent.id not in current_torrents: self.hass.bus.fire( - EVENT_REMOVED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_REMOVED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._all_torrents = self.torrents.copy() diff --git a/homeassistant/components/transmission/icons.json b/homeassistant/components/transmission/icons.json index 56ae46f933d..4458f510951 100644 --- a/homeassistant/components/transmission/icons.json +++ b/homeassistant/components/transmission/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_torrent": "mdi:download", - "remove_torrent": "mdi:download-off", - "start_torrent": "mdi:play", - "stop_torrent": "mdi:stop" + "add_torrent": { + "service": "mdi:download" + }, + "remove_torrent": { + "service": "mdi:download-off" + }, + "start_torrent": { + "service": "mdi:play" + }, + "stop_torrent": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/transmission/sensor.py b/homeassistant/components/transmission/sensor.py index 737520adb5f..652f5d51fbb 100644 --- a/homeassistant/components/transmission/sensor.py +++ b/homeassistant/components/transmission/sensor.py @@ -83,7 +83,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="active_torrents", translation_key="active_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.active_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="active_torrents" @@ -92,7 +91,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="paused_torrents", translation_key="paused_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.paused_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="paused_torrents" @@ -101,7 +99,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="total_torrents", translation_key="total_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="total_torrents" @@ -110,7 +107,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="completed_torrents", translation_key="completed_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: len( _filter_torrents(coordinator.torrents, MODES["completed_torrents"]) ), @@ -121,7 +117,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="started_torrents", translation_key="started_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: len( _filter_torrents(coordinator.torrents, MODES["started_torrents"]) ), diff --git a/homeassistant/components/transmission/services.yaml b/homeassistant/components/transmission/services.yaml index 2d61bda442f..8f9aadd5009 100644 --- a/homeassistant/components/transmission/services.yaml +++ b/homeassistant/components/transmission/services.yaml @@ -9,6 +9,11 @@ add_torrent: example: http://releases.ubuntu.com/19.04/ubuntu-19.04-desktop-amd64.iso.torrent selector: text: + download_path: + required: false + example: "/path/to/download/directory" + selector: + text: remove_torrent: fields: diff --git a/homeassistant/components/transmission/strings.json b/homeassistant/components/transmission/strings.json index 20ae6ca723d..aabc5827a88 100644 --- a/homeassistant/components/transmission/strings.json +++ b/homeassistant/components/transmission/strings.json @@ -60,19 +60,24 @@ } }, "active_torrents": { - "name": "Active torrents" + "name": "Active torrents", + "unit_of_measurement": "torrents" }, "paused_torrents": { - "name": "Paused torrents" + "name": "Paused torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "total_torrents": { - "name": "Total torrents" + "name": "Total torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "completed_torrents": { - "name": "Completed torrents" + "name": "Completed torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "started_torrents": { - "name": "Started torrents" + "name": "Started torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" } }, "switch": { @@ -96,6 +101,10 @@ "torrent": { "name": "Torrent", "description": "URL, magnet link or Base64 encoded file." + }, + "download_path": { + "name": "Download path", + "description": "Optional path to specify where the torrent should be downloaded. If not specified, the default download directory is used." } } }, diff --git a/homeassistant/components/transport_nsw/manifest.json b/homeassistant/components/transport_nsw/manifest.json index 9d535b99aa1..83c138a4f91 100644 --- a/homeassistant/components/transport_nsw/manifest.json +++ b/homeassistant/components/transport_nsw/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/transport_nsw", "iot_class": "cloud_polling", "loggers": ["TransportNSW"], + "quality_scale": "legacy", "requirements": ["PyTransportNSW==0.1.1"] } diff --git a/homeassistant/components/travisci/manifest.json b/homeassistant/components/travisci/manifest.json index e61a987c86f..be30cf8e1f9 100644 --- a/homeassistant/components/travisci/manifest.json +++ b/homeassistant/components/travisci/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/travisci", "iot_class": "cloud_polling", "loggers": ["travispy"], + "quality_scale": "legacy", "requirements": ["TravisPy==0.3.5"] } diff --git a/homeassistant/components/trend/binary_sensor.py b/homeassistant/components/trend/binary_sensor.py index 693c080e86e..9691ecf0744 100644 --- a/homeassistant/components/trend/binary_sensor.py +++ b/homeassistant/components/trend/binary_sensor.py @@ -199,11 +199,6 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): if sensor_entity_id: self.entity_id = sensor_entity_id - @property - def is_on(self) -> bool | None: - """Return true if sensor is on.""" - return self._state - @property def extra_state_attributes(self) -> Mapping[str, Any]: """Return the state attributes of the sensor.""" @@ -232,10 +227,15 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): state = new_state.attributes.get(self._attribute) else: state = new_state.state - if state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + + if state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + self._attr_available = False + else: + self._attr_available = True sample = (new_state.last_updated.timestamp(), float(state)) # type: ignore[arg-type] self.samples.append(sample) - self.async_schedule_update_ha_state(True) + + self.async_schedule_update_ha_state(True) except (ValueError, TypeError) as ex: _LOGGER.error(ex) @@ -247,9 +247,9 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): if not (state := await self.async_get_last_state()): return - if state.state == STATE_UNKNOWN: + if state.state in {STATE_UNKNOWN, STATE_UNAVAILABLE}: return - self._state = state.state == STATE_ON + self._attr_is_on = state.state == STATE_ON async def async_update(self) -> None: """Get the latest data and update the states.""" @@ -266,13 +266,13 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): await self.hass.async_add_executor_job(self._calculate_gradient) # Update state - self._state = ( + self._attr_is_on = ( abs(self._gradient) > abs(self._min_gradient) and math.copysign(self._gradient, self._min_gradient) == self._gradient ) if self._invert: - self._state = not self._state + self._attr_is_on = not self._attr_is_on def _calculate_gradient(self) -> None: """Compute the linear trend gradient of the current samples. diff --git a/homeassistant/components/trend/icons.json b/homeassistant/components/trend/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/trend/icons.json +++ b/homeassistant/components/trend/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index 110bab99e52..85012939fc1 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -7,5 +7,5 @@ "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["numpy==1.26.0"] + "requirements": ["numpy==2.2.0"] } diff --git a/homeassistant/components/trend/strings.json b/homeassistant/components/trend/strings.json index 2fe0b35ee3c..fb70a6e7032 100644 --- a/homeassistant/components/trend/strings.json +++ b/homeassistant/components/trend/strings.json @@ -1,4 +1,5 @@ { + "title": "Trend", "services": { "reload": { "name": "[%key:common::action::reload%]", diff --git a/homeassistant/components/triggercmd/__init__.py b/homeassistant/components/triggercmd/__init__.py new file mode 100644 index 00000000000..f58b2b481d4 --- /dev/null +++ b/homeassistant/components/triggercmd/__init__.py @@ -0,0 +1,36 @@ +"""The TRIGGERcmd component.""" + +from __future__ import annotations + +from triggercmd import client, ha + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from .const import CONF_TOKEN + +PLATFORMS = [ + Platform.SWITCH, +] + +type TriggercmdConfigEntry = ConfigEntry[ha.Hub] + + +async def async_setup_entry(hass: HomeAssistant, entry: TriggercmdConfigEntry) -> bool: + """Set up TRIGGERcmd from a config entry.""" + hub = ha.Hub(entry.data[CONF_TOKEN]) + + status_code = await client.async_connection_test(entry.data[CONF_TOKEN]) + if status_code != 200: + raise ConfigEntryNotReady + + entry.runtime_data = hub + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: TriggercmdConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/triggercmd/config_flow.py b/homeassistant/components/triggercmd/config_flow.py new file mode 100644 index 00000000000..fc02dd0b2fc --- /dev/null +++ b/homeassistant/components/triggercmd/config_flow.py @@ -0,0 +1,75 @@ +"""Config flow for TRIGGERcmd integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import jwt +from triggercmd import TRIGGERcmdConnectionError, client +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .const import CONF_TOKEN, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +DATA_SCHEMA = vol.Schema({(CONF_TOKEN): str}) + + +async def validate_input(hass: HomeAssistant, data: dict) -> str: + """Validate the user input allows us to connect. + + Data has the keys from DATA_SCHEMA with values provided by the user. + """ + if len(data[CONF_TOKEN]) < 100: + raise InvalidToken + + token_data = jwt.decode(data[CONF_TOKEN], options={"verify_signature": False}) + if not token_data["id"]: + raise InvalidToken + + try: + await client.async_connection_test(data[CONF_TOKEN]) + except Exception as e: + raise TRIGGERcmdConnectionError from e + else: + return token_data["id"] + + +class TriggerCMDConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors = {} + if user_input is not None: + try: + identifier = await validate_input(self.hass, user_input) + except InvalidToken: + errors[CONF_TOKEN] = "invalid_token" + except TRIGGERcmdConnectionError: + errors["base"] = "cannot_connect" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(identifier) + self._abort_if_unique_id_configured() + + return self.async_create_entry(title="TRIGGERcmd Hub", data=user_input) + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) + + +class InvalidToken(HomeAssistantError): + """Invalid token.""" diff --git a/homeassistant/components/triggercmd/const.py b/homeassistant/components/triggercmd/const.py new file mode 100644 index 00000000000..0fc15b2b806 --- /dev/null +++ b/homeassistant/components/triggercmd/const.py @@ -0,0 +1,4 @@ +"""Constants for the TRIGGERcmd integration.""" + +DOMAIN = "triggercmd" +CONF_TOKEN = "token" diff --git a/homeassistant/components/triggercmd/manifest.json b/homeassistant/components/triggercmd/manifest.json new file mode 100644 index 00000000000..a0ee4eaf63e --- /dev/null +++ b/homeassistant/components/triggercmd/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "triggercmd", + "name": "TRIGGERcmd", + "codeowners": ["@rvmey"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/triggercmd", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["triggercmd==0.0.27"] +} diff --git a/homeassistant/components/triggercmd/strings.json b/homeassistant/components/triggercmd/strings.json new file mode 100644 index 00000000000..6725b92f59f --- /dev/null +++ b/homeassistant/components/triggercmd/strings.json @@ -0,0 +1,23 @@ +{ + "config": { + "step": { + "user": { + "data": { + "token": "[%key:common::config_flow::data::access_token%]" + }, + "data_description": { + "token": "The token from the TRIGGERcmd instructions page" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_token": "Invalid token", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/triggercmd/switch.py b/homeassistant/components/triggercmd/switch.py new file mode 100644 index 00000000000..94566fe301d --- /dev/null +++ b/homeassistant/components/triggercmd/switch.py @@ -0,0 +1,85 @@ +"""Platform for switch integration.""" + +from __future__ import annotations + +import logging + +from triggercmd import client, ha + +from homeassistant.components.switch import SwitchEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TriggercmdConfigEntry +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: TriggercmdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Add switch for passed config_entry in HA.""" + hub = config_entry.runtime_data + async_add_entities(TRIGGERcmdSwitch(trigger) for trigger in hub.triggers) + + +class TRIGGERcmdSwitch(SwitchEntity): + """Representation of a Switch.""" + + _attr_has_entity_name = True + _attr_assumed_state = True + _attr_should_poll = False + + computer_id: str + trigger_id: str + firmware_version: str + model: str + hub: ha.Hub + + def __init__(self, trigger: TRIGGERcmdSwitch) -> None: + """Initialize the switch.""" + self._switch = trigger + self._attr_is_on = False + self._attr_unique_id = f"{trigger.computer_id}.{trigger.trigger_id}" + self._attr_name = trigger.trigger_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, trigger.computer_id)}, + name=trigger.computer_id.capitalize(), + sw_version=trigger.firmware_version, + model=trigger.model, + manufacturer=trigger.hub.manufacturer, + ) + + @property + def available(self) -> bool: + """Return True if hub is available.""" + return self._switch.hub.online + + async def async_turn_on(self, **kwargs): + """Turn the switch on.""" + await self.trigger("on") + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs): + """Turn the switch off.""" + await self.trigger("off") + self._attr_is_on = False + self.async_write_ha_state() + + async def trigger(self, params: str): + """Trigger the command.""" + r = await client.async_trigger( + self._switch.hub.token, + { + "computer": self._switch.computer_id, + "trigger": self._switch.trigger_id, + "params": params, + "sender": "Home Assistant", + }, + ) + _LOGGER.debug("TRIGGERcmd trigger response: %s", r.json()) diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 583db4472d4..e7d1091719b 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Mapping from datetime import datetime -from functools import cached_property, partial +from functools import partial import hashlib from http import HTTPStatus import io @@ -13,6 +13,7 @@ import logging import mimetypes import os import re +import secrets import subprocess import tempfile from typing import Any, Final, TypedDict, final @@ -20,6 +21,7 @@ from typing import Any, Final, TypedDict, final from aiohttp import web import mutagen from mutagen.id3 import ID3, TextFrame as ID3Text +from propcache import cached_property import voluptuous as vol from homeassistant.components import ffmpeg, websocket_api @@ -57,6 +59,7 @@ from .const import ( CONF_CACHE, CONF_CACHE_DIR, CONF_TIME_MEMORY, + DATA_COMPONENT, DATA_TTS_MANAGER, DEFAULT_CACHE, DEFAULT_CACHE_DIR, @@ -77,6 +80,7 @@ __all__ = [ "ATTR_PREFERRED_FORMAT", "ATTR_PREFERRED_SAMPLE_RATE", "ATTR_PREFERRED_SAMPLE_CHANNELS", + "ATTR_PREFERRED_SAMPLE_BYTES", "CONF_LANG", "DEFAULT_CACHE_DIR", "generate_media_source_id", @@ -95,6 +99,7 @@ ATTR_AUDIO_OUTPUT = "audio_output" ATTR_PREFERRED_FORMAT = "preferred_format" ATTR_PREFERRED_SAMPLE_RATE = "preferred_sample_rate" ATTR_PREFERRED_SAMPLE_CHANNELS = "preferred_sample_channels" +ATTR_PREFERRED_SAMPLE_BYTES = "preferred_sample_bytes" ATTR_MEDIA_PLAYER_ENTITY_ID = "media_player_entity_id" ATTR_VOICE = "voice" @@ -103,6 +108,7 @@ _PREFFERED_FORMAT_OPTIONS: Final[set[str]] = { ATTR_PREFERRED_FORMAT, ATTR_PREFERRED_SAMPLE_RATE, ATTR_PREFERRED_SAMPLE_CHANNELS, + ATTR_PREFERRED_SAMPLE_BYTES, } CONF_LANG = "language" @@ -134,19 +140,16 @@ def async_default_engine(hass: HomeAssistant) -> str | None: Returns None if no engines found. """ - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - default_entity_id: str | None = None - for entity in component.entities: + for entity in hass.data[DATA_COMPONENT].entities: if entity.platform and entity.platform.platform_name == "cloud": return entity.entity_id if default_entity_id is None: default_entity_id = entity.entity_id - return default_entity_id or next(iter(manager.providers), None) + return default_entity_id or next(iter(hass.data[DATA_TTS_MANAGER].providers), None) @callback @@ -155,11 +158,11 @@ def async_resolve_engine(hass: HomeAssistant, engine: str | None) -> str | None: Returns None if no engines found or invalid engine passed in. """ - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - if engine is not None: - if not component.get_entity(engine) and engine not in manager.providers: + if ( + not hass.data[DATA_COMPONENT].get_entity(engine) + and engine not in hass.data[DATA_TTS_MANAGER].providers + ): return None return engine @@ -176,10 +179,8 @@ async def async_support_options( if (engine_instance := get_engine_instance(hass, engine)) is None: raise HomeAssistantError(f"Provider {engine} not found") - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - try: - manager.process_options(engine_instance, language, options) + hass.data[DATA_TTS_MANAGER].process_options(engine_instance, language, options) except HomeAssistantError: return False @@ -191,8 +192,7 @@ async def async_get_media_source_audio( media_source_id: str, ) -> tuple[str, bytes]: """Get TTS audio as extension, data.""" - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - return await manager.async_get_tts_audio( + return await hass.data[DATA_TTS_MANAGER].async_get_tts_audio( **media_source_id_to_kwargs(media_source_id), ) @@ -202,14 +202,11 @@ def async_get_text_to_speech_languages(hass: HomeAssistant) -> set[str]: """Return a set with the union of languages supported by tts engines.""" languages = set() - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - - for entity in component.entities: + for entity in hass.data[DATA_COMPONENT].entities: for language_tag in entity.supported_languages: languages.add(language_tag) - for tts_engine in manager.providers.values(): + for tts_engine in hass.data[DATA_TTS_MANAGER].providers.values(): for language_tag in tts_engine.supported_languages: languages.add(language_tag) @@ -223,6 +220,7 @@ async def async_convert_audio( to_extension: str, to_sample_rate: int | None = None, to_sample_channels: int | None = None, + to_sample_bytes: int | None = None, ) -> bytes: """Convert audio to a preferred format using ffmpeg.""" ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) @@ -234,6 +232,7 @@ async def async_convert_audio( to_extension, to_sample_rate=to_sample_rate, to_sample_channels=to_sample_channels, + to_sample_bytes=to_sample_bytes, ) ) @@ -245,6 +244,7 @@ def _convert_audio( to_extension: str, to_sample_rate: int | None = None, to_sample_channels: int | None = None, + to_sample_bytes: int | None = None, ) -> bytes: """Convert audio to a preferred format using ffmpeg.""" @@ -277,6 +277,10 @@ def _convert_audio( # Max quality for MP3 command.extend(["-q:a", "0"]) + if to_sample_bytes == 2: + # 16-bit samples + command.extend(["-sample_fmt", "s16"]) + command.append(output_file.name) with subprocess.Popen( @@ -315,7 +319,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return False hass.data[DATA_TTS_MANAGER] = tts - component = hass.data[DOMAIN] = EntityComponent[TextToSpeechEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[TextToSpeechEntity]( _LOGGER, DOMAIN, hass ) @@ -363,14 +367,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) CACHED_PROPERTIES_WITH_ATTR_ = { @@ -539,6 +541,10 @@ class SpeechManager: self.file_cache: dict[str, str] = {} self.mem_cache: dict[str, TTSCache] = {} + # filename <-> token + self.filename_to_token: dict[str, str] = {} + self.token_to_filename: dict[str, str] = {} + def _init_cache(self) -> dict[str, str]: """Init cache folder and fetch files.""" try: @@ -655,7 +661,17 @@ class SpeechManager: engine_instance, cache_key, message, use_cache, language, options ) - return f"/api/tts_proxy/{filename}" + # Use a randomly generated token instead of exposing the filename + token = self.filename_to_token.get(filename) + if not token: + # Keep extension (.mp3, etc.) + token = secrets.token_urlsafe(16) + os.path.splitext(filename)[1] + + # Map token <-> filename + self.filename_to_token[filename] = token + self.token_to_filename[token] = filename + + return f"/api/tts_proxy/{token}" async def async_get_tts_audio( self, @@ -738,11 +754,25 @@ class SpeechManager: else: sample_rate = options.pop(ATTR_PREFERRED_SAMPLE_RATE, None) + if sample_rate is not None: + sample_rate = int(sample_rate) + if ATTR_PREFERRED_SAMPLE_CHANNELS in supported_options: sample_channels = options.get(ATTR_PREFERRED_SAMPLE_CHANNELS) else: sample_channels = options.pop(ATTR_PREFERRED_SAMPLE_CHANNELS, None) + if sample_channels is not None: + sample_channels = int(sample_channels) + + if ATTR_PREFERRED_SAMPLE_BYTES in supported_options: + sample_bytes = options.get(ATTR_PREFERRED_SAMPLE_BYTES) + else: + sample_bytes = options.pop(ATTR_PREFERRED_SAMPLE_BYTES, None) + + if sample_bytes is not None: + sample_bytes = int(sample_bytes) + async def get_tts_data() -> str: """Handle data available.""" if engine_instance.name is None or engine_instance.name is UNDEFINED: @@ -769,6 +799,7 @@ class SpeechManager: (final_extension != extension) or (sample_rate is not None) or (sample_channels is not None) + or (sample_bytes is not None) ) if needs_conversion: @@ -779,6 +810,7 @@ class SpeechManager: to_extension=final_extension, to_sample_rate=sample_rate, to_sample_channels=sample_channels, + to_sample_bytes=sample_bytes, ) # Create file infos @@ -893,11 +925,15 @@ class SpeechManager: ), ) - async def async_read_tts(self, filename: str) -> tuple[str | None, bytes]: + async def async_read_tts(self, token: str) -> tuple[str | None, bytes]: """Read a voice file and return binary. This method is a coroutine. """ + filename = self.token_to_filename.get(token) + if not filename: + raise HomeAssistantError(f"{token} was not recognized!") + if not (record := _RE_VOICE_FILE.match(filename.lower())) and not ( record := _RE_LEGACY_VOICE_FILE.match(filename.lower()) ): @@ -1059,6 +1095,7 @@ class TextToSpeechView(HomeAssistantView): async def get(self, request: web.Request, filename: str) -> web.Response: """Start a get request.""" try: + # filename is actually token, but we keep its name for compatibility content, data = await self.tts.async_read_tts(filename) except HomeAssistantError as err: _LOGGER.error("Error on load tts: %s", err) @@ -1079,15 +1116,13 @@ def websocket_list_engines( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """List text to speech engines and, optionally, if they support a given language.""" - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - country = msg.get("country") language = msg.get("language") providers = [] provider_info: dict[str, Any] + entity_domains: set[str] = set() - for entity in component.entities: + for entity in hass.data[DATA_COMPONENT].entities: provider_info = { "engine_id": entity.entity_id, "supported_languages": entity.supported_languages, @@ -1097,15 +1132,20 @@ def websocket_list_engines( language, entity.supported_languages, country ) providers.append(provider_info) - for engine_id, provider in manager.providers.items(): + if entity.platform: + entity_domains.add(entity.platform.platform_name) + for engine_id, provider in hass.data[DATA_TTS_MANAGER].providers.items(): provider_info = { "engine_id": engine_id, + "name": provider.name, "supported_languages": provider.supported_languages, } if language: provider_info["supported_languages"] = language_util.matches( language, provider.supported_languages, country ) + if engine_id in entity_domains: + provider_info["deprecated"] = True providers.append(provider_info) connection.send_message( @@ -1124,17 +1164,19 @@ def websocket_get_engine( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get text to speech engine info.""" - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - engine_id = msg["engine_id"] provider_info: dict[str, Any] provider: TextToSpeechEntity | Provider | None = next( - (entity for entity in component.entities if entity.entity_id == engine_id), None + ( + entity + for entity in hass.data[DATA_COMPONENT].entities + if entity.entity_id == engine_id + ), + None, ) if not provider: - provider = manager.providers.get(engine_id) + provider = hass.data[DATA_TTS_MANAGER].providers.get(engine_id) if not provider: connection.send_error( @@ -1148,6 +1190,8 @@ def websocket_get_engine( "engine_id": engine_id, "supported_languages": provider.supported_languages, } + if isinstance(provider, Provider): + provider_info["name"] = provider.name connection.send_message( websocket_api.result_message(msg["id"], {"provider": provider_info}) diff --git a/homeassistant/components/tts/const.py b/homeassistant/components/tts/const.py index ab22a44cab6..42c7d710ad4 100644 --- a/homeassistant/components/tts/const.py +++ b/homeassistant/components/tts/const.py @@ -1,5 +1,16 @@ """Text-to-speech constants.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import SpeechManager, TextToSpeechEntity + ATTR_CACHE = "cache" ATTR_LANGUAGE = "language" ATTR_MESSAGE = "message" @@ -15,7 +26,8 @@ DEFAULT_CACHE_DIR = "tts" DEFAULT_TIME_MEMORY = 300 DOMAIN = "tts" +DATA_COMPONENT: HassKey[EntityComponent[TextToSpeechEntity]] = HassKey(DOMAIN) -DATA_TTS_MANAGER = "tts_manager" +DATA_TTS_MANAGER: HassKey[SpeechManager] = HassKey("tts_manager") type TtsAudioType = tuple[str | None, bytes | None] diff --git a/homeassistant/components/tts/helper.py b/homeassistant/components/tts/helper.py index 4b5ef168550..614d848ea6a 100644 --- a/homeassistant/components/tts/helper.py +++ b/homeassistant/components/tts/helper.py @@ -5,12 +5,11 @@ from __future__ import annotations from typing import TYPE_CHECKING from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_component import EntityComponent -from .const import DATA_TTS_MANAGER, DOMAIN +from .const import DATA_COMPONENT, DATA_TTS_MANAGER if TYPE_CHECKING: - from . import SpeechManager, TextToSpeechEntity + from . import TextToSpeechEntity from .legacy import Provider @@ -18,10 +17,7 @@ def get_engine_instance( hass: HomeAssistant, engine: str ) -> TextToSpeechEntity | Provider | None: """Get engine instance.""" - component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] - - if entity := component.get_entity(engine): + if entity := hass.data[DATA_COMPONENT].get_entity(engine): return entity - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - return manager.providers.get(engine) + return hass.data[DATA_TTS_MANAGER].providers.get(engine) diff --git a/homeassistant/components/tts/icons.json b/homeassistant/components/tts/icons.json index cda5f877b25..8cfae7cc8e9 100644 --- a/homeassistant/components/tts/icons.json +++ b/homeassistant/components/tts/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "clear_cache": "mdi:delete", - "say": "mdi:speaker-message", - "speak": "mdi:speaker-message" + "clear_cache": { + "service": "mdi:delete" + }, + "say": { + "service": "mdi:speaker-message" + }, + "speak": { + "service": "mdi:speaker-message" + } } } diff --git a/homeassistant/components/tts/legacy.py b/homeassistant/components/tts/legacy.py index e36a1227603..54ea89cb674 100644 --- a/homeassistant/components/tts/legacy.py +++ b/homeassistant/components/tts/legacy.py @@ -57,9 +57,6 @@ from .const import ( from .media_source import generate_media_source_id from .models import Voice -if TYPE_CHECKING: - from . import SpeechManager - _LOGGER = logging.getLogger(__name__) CONF_SERVICE_NAME = "service_name" @@ -105,8 +102,6 @@ async def async_setup_legacy( hass: HomeAssistant, config: ConfigType ) -> list[Coroutine[Any, Any, None]]: """Set up legacy text-to-speech providers.""" - tts: SpeechManager = hass.data[DATA_TTS_MANAGER] - # Load service descriptions from tts/services.yaml services_yaml = Path(__file__).parent / "services.yaml" services_dict = await hass.async_add_executor_job( @@ -147,7 +142,9 @@ async def async_setup_legacy( _LOGGER.error("Error setting up platform: %s", p_type) return - tts.async_register_legacy_engine(p_type, provider, p_config) + hass.data[DATA_TTS_MANAGER].async_register_legacy_engine( + p_type, provider, p_config + ) except Exception: _LOGGER.exception("Error setting up platform: %s", p_type) return diff --git a/homeassistant/components/tts/media_source.py b/homeassistant/components/tts/media_source.py index a907fc485c9..4f1fa59f001 100644 --- a/homeassistant/components/tts/media_source.py +++ b/homeassistant/components/tts/media_source.py @@ -2,8 +2,9 @@ from __future__ import annotations +import json import mimetypes -from typing import TYPE_CHECKING, TypedDict +from typing import TypedDict from yarl import URL @@ -18,13 +19,11 @@ from homeassistant.components.media_source import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.entity_component import EntityComponent -from .const import DATA_TTS_MANAGER, DOMAIN +from .const import DATA_COMPONENT, DATA_TTS_MANAGER, DOMAIN from .helper import get_engine_instance -if TYPE_CHECKING: - from . import SpeechManager, TextToSpeechEntity +URL_QUERY_TTS_OPTIONS = "tts_options" async def async_get_media_source(hass: HomeAssistant) -> TTSMediaSource: @@ -44,8 +43,6 @@ def generate_media_source_id( """Generate a media source ID for text-to-speech.""" from . import async_resolve_engine # pylint: disable=import-outside-toplevel - manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - if (engine := async_resolve_engine(hass, engine)) is None: raise HomeAssistantError("Invalid TTS provider selected") @@ -53,7 +50,7 @@ def generate_media_source_id( # We raise above if the engine is not resolved, so engine_instance can't be None assert engine_instance is not None - manager.process_options(engine_instance, language, options) + hass.data[DATA_TTS_MANAGER].process_options(engine_instance, language, options) params = { "message": message, } @@ -61,8 +58,7 @@ def generate_media_source_id( params["cache"] = "true" if cache else "false" if language is not None: params["language"] = language - if options is not None: - params.update(options) + params[URL_QUERY_TTS_OPTIONS] = json.dumps(options, separators=(",", ":")) return ms_generate_media_source_id( DOMAIN, @@ -84,19 +80,28 @@ class MediaSourceOptions(TypedDict): def media_source_id_to_kwargs(media_source_id: str) -> MediaSourceOptions: """Turn a media source ID into options.""" parsed = URL(media_source_id) + if URL_QUERY_TTS_OPTIONS in parsed.query: + try: + options = json.loads(parsed.query[URL_QUERY_TTS_OPTIONS]) + except json.JSONDecodeError as err: + raise Unresolvable(f"Invalid TTS options: {err.msg}") from err + else: + options = { + k: v + for k, v in parsed.query.items() + if k not in ("message", "language", "cache") + } if "message" not in parsed.query: raise Unresolvable("No message specified.") - - options = dict(parsed.query) kwargs: MediaSourceOptions = { "engine": parsed.name, - "message": options.pop("message"), - "language": options.pop("language", None), + "message": parsed.query["message"], + "language": parsed.query.get("language"), "options": options, "cache": None, } - if "cache" in options: - kwargs["cache"] = options.pop("cache") == "true" + if "cache" in parsed.query: + kwargs["cache"] = parsed.query["cache"] == "true" return kwargs @@ -113,12 +118,12 @@ class TTSMediaSource(MediaSource): async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" - manager: SpeechManager = self.hass.data[DATA_TTS_MANAGER] - try: - url = await manager.async_get_url_path( + url = await self.hass.data[DATA_TTS_MANAGER].async_get_url_path( **media_source_id_to_kwargs(item.identifier) ) + except Unresolvable: + raise except HomeAssistantError as err: raise Unresolvable(str(err)) from err @@ -136,10 +141,12 @@ class TTSMediaSource(MediaSource): return self._engine_item(engine, params) # Root. List providers. - manager: SpeechManager = self.hass.data[DATA_TTS_MANAGER] - component: EntityComponent[TextToSpeechEntity] = self.hass.data[DOMAIN] - children = [self._engine_item(engine) for engine in manager.providers] + [ - self._engine_item(entity.entity_id) for entity in component.entities + children = [ + self._engine_item(engine) + for engine in self.hass.data[DATA_TTS_MANAGER].providers + ] + [ + self._engine_item(entity.entity_id) + for entity in self.hass.data[DATA_COMPONENT].entities ] return BrowseMediaSource( domain=DOMAIN, diff --git a/homeassistant/components/tuya/__init__.py b/homeassistant/components/tuya/__init__.py index 47143f3595c..c8a639cd239 100644 --- a/homeassistant/components/tuya/__init__.py +++ b/homeassistant/components/tuya/__init__.py @@ -146,14 +146,21 @@ class DeviceListener(SharingDeviceListener): self.hass = hass self.manager = manager - def update_device(self, device: CustomerDevice) -> None: + def update_device( + self, device: CustomerDevice, updated_status_properties: list[str] | None + ) -> None: """Update device status.""" LOGGER.debug( - "Received update for device %s: %s", + "Received update for device %s: %s (updated properties: %s)", device.id, self.manager.device_map[device.id].status, + updated_status_properties, + ) + dispatcher_send( + self.hass, + f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}", + updated_status_properties, ) - dispatcher_send(self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}") def add_device(self, device: CustomerDevice) -> None: """Add device added listener.""" diff --git a/homeassistant/components/tuya/alarm_control_panel.py b/homeassistant/components/tuya/alarm_control_panel.py index 29da625a990..56bccc73581 100644 --- a/homeassistant/components/tuya/alarm_control_panel.py +++ b/homeassistant/components/tuya/alarm_control_panel.py @@ -10,20 +10,15 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityDescription, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import TuyaEntity class Mode(StrEnum): @@ -35,11 +30,11 @@ class Mode(StrEnum): SOS = "sos" -STATE_MAPPING: dict[str, str] = { - Mode.DISARMED: STATE_ALARM_DISARMED, - Mode.ARM: STATE_ALARM_ARMED_AWAY, - Mode.HOME: STATE_ALARM_ARMED_HOME, - Mode.SOS: STATE_ALARM_TRIGGERED, +STATE_MAPPING: dict[str, AlarmControlPanelState] = { + Mode.DISARMED: AlarmControlPanelState.DISARMED, + Mode.ARM: AlarmControlPanelState.ARMED_AWAY, + Mode.HOME: AlarmControlPanelState.ARMED_HOME, + Mode.SOS: AlarmControlPanelState.TRIGGERED, } @@ -115,7 +110,7 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity): self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if not (status := self.device.status.get(self.entity_description.key)): return None diff --git a/homeassistant/components/tuya/binary_sensor.py b/homeassistant/components/tuya/binary_sensor.py index 2d6d9b478c8..12661a26fd1 100644 --- a/homeassistant/components/tuya/binary_sensor.py +++ b/homeassistant/components/tuya/binary_sensor.py @@ -17,8 +17,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode +from .entity import TuyaEntity @dataclass(frozen=True) @@ -150,8 +150,8 @@ BINARY_SENSORS: dict[str, tuple[TuyaBinarySensorEntityDescription, ...]] = { "hps": ( TuyaBinarySensorEntityDescription( key=DPCode.PRESENCE_STATE, - device_class=BinarySensorDeviceClass.MOTION, - on_value="presence", + device_class=BinarySensorDeviceClass.OCCUPANCY, + on_value={"presence", "small_move", "large_move", "peaceful"}, ), ), # Formaldehyde Detector diff --git a/homeassistant/components/tuya/button.py b/homeassistant/components/tuya/button.py index f62bba928b4..f77fed776b0 100644 --- a/homeassistant/components/tuya/button.py +++ b/homeassistant/components/tuya/button.py @@ -11,8 +11,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode +from .entity import TuyaEntity # All descriptions can be found here. # https://developer.tuya.com/en/docs/iot/standarddescription?id=K9i5ql6waswzq diff --git a/homeassistant/components/tuya/camera.py b/homeassistant/components/tuya/camera.py index f3913611b07..9e66531dd51 100644 --- a/homeassistant/components/tuya/camera.py +++ b/homeassistant/components/tuya/camera.py @@ -11,8 +11,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode +from .entity import TuyaEntity # All descriptions can be found here: # https://developer.tuya.com/en/docs/iot/standarddescription?id=K9i5ql6waswzq diff --git a/homeassistant/components/tuya/climate.py b/homeassistant/components/tuya/climate.py index d47c71532a4..62aa29494e9 100644 --- a/homeassistant/components/tuya/climate.py +++ b/homeassistant/components/tuya/climate.py @@ -24,8 +24,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import IntegerTypeData, TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import IntegerTypeData, TuyaEntity TUYA_HVAC_TO_HA = { "auto": HVACMode.HEAT_COOL, @@ -120,7 +120,6 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity): _set_temperature: IntegerTypeData | None = None entity_description: TuyaClimateEntityDescription _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tuya/config_flow.py b/homeassistant/components/tuya/config_flow.py index bdef321de7a..30d04eb61e2 100644 --- a/homeassistant/components/tuya/config_flow.py +++ b/homeassistant/components/tuya/config_flow.py @@ -8,7 +8,7 @@ from typing import Any from tuya_sharing import LoginControl import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.helpers import selector from .const import ( @@ -32,7 +32,6 @@ class TuyaConfigFlow(ConfigFlow, domain=DOMAIN): __user_code: str __qr_code: str - __reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -135,9 +134,9 @@ class TuyaConfigFlow(ConfigFlow, domain=DOMAIN): CONF_ENDPOINT: info[CONF_ENDPOINT], } - if self.__reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.__reauth_entry, + self._get_reauth_entry(), data=entry_data, ) @@ -146,16 +145,12 @@ class TuyaConfigFlow(ConfigFlow, domain=DOMAIN): data=entry_data, ) - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle initiation of re-authentication with Tuya.""" - self.__reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - - if self.__reauth_entry and CONF_USER_CODE in self.__reauth_entry.data: - success, _ = await self.__async_get_qr_code( - self.__reauth_entry.data[CONF_USER_CODE] - ) + if CONF_USER_CODE in entry_data: + success, _ = await self.__async_get_qr_code(entry_data[CONF_USER_CODE]) if success: return await self.async_step_scan() diff --git a/homeassistant/components/tuya/const.py b/homeassistant/components/tuya/const.py index 55af95f0d34..08bdef474ef 100644 --- a/homeassistant/components/tuya/const.py +++ b/homeassistant/components/tuya/const.py @@ -96,6 +96,7 @@ class DPCode(StrEnum): """ AIR_QUALITY = "air_quality" + AIR_QUALITY_INDEX = "air_quality_index" ALARM_SWITCH = "alarm_switch" # Alarm switch ALARM_TIME = "alarm_time" # Alarm time ALARM_VOLUME = "alarm_volume" # Alarm volume @@ -325,6 +326,7 @@ class DPCode(StrEnum): SWITCH_USB6 = "switch_usb6" # USB 6 SWITCH_VERTICAL = "switch_vertical" # Vertical swing flap switch SWITCH_VOICE = "switch_voice" # Voice switch + TARGET_DIS_CLOSEST = "target_dis_closest" # Closest target distance TEMP = "temp" # Temperature setting TEMP_BOILING_C = "temp_boiling_c" TEMP_BOILING_F = "temp_boiling_f" diff --git a/homeassistant/components/tuya/cover.py b/homeassistant/components/tuya/cover.py index e92c6f5c5f2..9c3269c27f2 100644 --- a/homeassistant/components/tuya/cover.py +++ b/homeassistant/components/tuya/cover.py @@ -20,8 +20,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import IntegerTypeData, TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import IntegerTypeData, TuyaEntity @dataclass(frozen=True) diff --git a/homeassistant/components/tuya/base.py b/homeassistant/components/tuya/entity.py similarity index 90% rename from homeassistant/components/tuya/base.py rename to homeassistant/components/tuya/entity.py index 99d81848a91..cc258560067 100644 --- a/homeassistant/components/tuya/base.py +++ b/homeassistant/components/tuya/entity.py @@ -17,6 +17,17 @@ from homeassistant.helpers.entity import Entity from .const import DOMAIN, LOGGER, TUYA_HA_SIGNAL_UPDATE_ENTITY, DPCode, DPType from .util import remap_value +_DPTYPE_MAPPING: dict[str, DPType] = { + "Bitmap": DPType.RAW, + "bitmap": DPType.RAW, + "bool": DPType.BOOLEAN, + "enum": DPType.ENUM, + "json": DPType.JSON, + "raw": DPType.RAW, + "string": DPType.STRING, + "value": DPType.INTEGER, +} + @dataclass class IntegerTypeData: @@ -256,7 +267,13 @@ class TuyaEntity(Entity): order = ["function", "status_range"] for key in order: if dpcode in getattr(self.device, key): - return DPType(getattr(self.device, key)[dpcode].type) + current_type = getattr(self.device, key)[dpcode].type + try: + return DPType(current_type) + except ValueError: + # Sometimes, we get ill-formed DPTypes from the cloud, + # this fixes them and maps them to the correct DPType. + return _DPTYPE_MAPPING.get(current_type) return None @@ -266,10 +283,15 @@ class TuyaEntity(Entity): async_dispatcher_connect( self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{self.device.id}", - self.async_write_ha_state, + self._handle_state_update, ) ) + async def _handle_state_update( + self, updated_status_properties: list[str] | None + ) -> None: + self.async_write_ha_state() + def _send_command(self, commands: list[dict[str, Any]]) -> None: """Send command to the device.""" LOGGER.debug("Sending commands for device %s: %s", self.device.id, commands) diff --git a/homeassistant/components/tuya/fan.py b/homeassistant/components/tuya/fan.py index 01a7ccf5083..ffab9efdde8 100644 --- a/homeassistant/components/tuya/fan.py +++ b/homeassistant/components/tuya/fan.py @@ -21,8 +21,8 @@ from homeassistant.util.percentage import ( ) from . import TuyaConfigEntry -from .base import EnumTypeData, IntegerTypeData, TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import EnumTypeData, IntegerTypeData, TuyaEntity TUYA_SUPPORT_TYPE = { "fs", # Fan @@ -66,7 +66,6 @@ class TuyaFanEntity(TuyaEntity, FanEntity): _speeds: EnumTypeData | None = None _switch: DPCode | None = None _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/tuya/humidifier.py b/homeassistant/components/tuya/humidifier.py index 3d16b0dfbbb..cb872d67719 100644 --- a/homeassistant/components/tuya/humidifier.py +++ b/homeassistant/components/tuya/humidifier.py @@ -17,8 +17,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import IntegerTypeData, TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import IntegerTypeData, TuyaEntity @dataclass(frozen=True) diff --git a/homeassistant/components/tuya/icons.json b/homeassistant/components/tuya/icons.json index 48ae61f36fd..e28371f2b3d 100644 --- a/homeassistant/components/tuya/icons.json +++ b/homeassistant/components/tuya/icons.json @@ -236,6 +236,9 @@ }, "air_quality": { "default": "mdi:air-filter" + }, + "air_quality_index": { + "default": "mdi:air-filter" } }, "switch": { diff --git a/homeassistant/components/tuya/light.py b/homeassistant/components/tuya/light.py index 0c07eb05aac..d7dffc16b58 100644 --- a/homeassistant/components/tuya/light.py +++ b/homeassistant/components/tuya/light.py @@ -10,7 +10,7 @@ from tuya_sharing import CustomerDevice, Manager from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -21,10 +21,11 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from . import TuyaConfigEntry -from .base import IntegerTypeData, TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType, WorkMode +from .entity import IntegerTypeData, TuyaEntity from .util import remap_value @@ -49,6 +50,9 @@ DEFAULT_COLOR_TYPE_DATA_V2 = ColorTypeData( v_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=1000, step=1), ) +MAX_MIREDS = 500 # 2000 K +MIN_MIREDS = 153 # 6500 K + @dataclass(frozen=True) class TuyaLightEntityDescription(LightEntityDescription): @@ -457,6 +461,8 @@ class TuyaLightEntity(TuyaEntity, LightEntity): _color_mode: DPCode | None = None _color_temp: IntegerTypeData | None = None _fixed_color_mode: ColorMode | None = None + _attr_min_color_temp_kelvin = 2000 # 500 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds def __init__( self, @@ -532,7 +538,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity): """Turn on or control the light.""" commands = [{"code": self.entity_description.key, "value": True}] - if self._color_temp and ATTR_COLOR_TEMP in kwargs: + if self._color_temp and ATTR_COLOR_TEMP_KELVIN in kwargs: if self._color_mode_dpcode: commands += [ { @@ -546,9 +552,11 @@ class TuyaLightEntity(TuyaEntity, LightEntity): "code": self._color_temp.dpcode, "value": round( self._color_temp.remap_value_from( - kwargs[ATTR_COLOR_TEMP], - self.min_mireds, - self.max_mireds, + color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ), + MIN_MIREDS, + MAX_MIREDS, reverse=True, ) ), @@ -560,7 +568,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity): or ( ATTR_BRIGHTNESS in kwargs and self.color_mode == ColorMode.HS - and ATTR_COLOR_TEMP not in kwargs + and ATTR_COLOR_TEMP_KELVIN not in kwargs ) ): if self._color_mode_dpcode: @@ -688,8 +696,8 @@ class TuyaLightEntity(TuyaEntity, LightEntity): return round(brightness) @property - def color_temp(self) -> int | None: - """Return the color_temp of the light.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if not self._color_temp: return None @@ -697,9 +705,9 @@ class TuyaLightEntity(TuyaEntity, LightEntity): if temperature is None: return None - return round( + return color_util.color_temperature_mired_to_kelvin( self._color_temp.remap_value_to( - temperature, self.min_mireds, self.max_mireds, reverse=True + temperature, MIN_MIREDS, MAX_MIREDS, reverse=True ) ) diff --git a/homeassistant/components/tuya/manifest.json b/homeassistant/components/tuya/manifest.json index 305a74160de..b53e6fa27d8 100644 --- a/homeassistant/components/tuya/manifest.json +++ b/homeassistant/components/tuya/manifest.json @@ -43,5 +43,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["tuya_iot"], - "requirements": ["tuya-device-sharing-sdk==0.1.9"] + "requirements": ["tuya-device-sharing-sdk==0.2.1"] } diff --git a/homeassistant/components/tuya/number.py b/homeassistant/components/tuya/number.py index d7614fb837a..8d5b5dbfa19 100644 --- a/homeassistant/components/tuya/number.py +++ b/homeassistant/components/tuya/number.py @@ -15,8 +15,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import IntegerTypeData, TuyaEntity from .const import DEVICE_CLASS_UNITS, DOMAIN, TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import IntegerTypeData, TuyaEntity # All descriptions can be found here. Mostly the Integer data types in the # default instructions set of each category end up being a number. @@ -87,13 +87,20 @@ NUMBERS: dict[str, tuple[NumberEntityDescription, ...]] = { NumberEntityDescription( key=DPCode.NEAR_DETECTION, translation_key="near_detection", + device_class=NumberDeviceClass.DISTANCE, entity_category=EntityCategory.CONFIG, ), NumberEntityDescription( key=DPCode.FAR_DETECTION, translation_key="far_detection", + device_class=NumberDeviceClass.DISTANCE, entity_category=EntityCategory.CONFIG, ), + NumberEntityDescription( + key=DPCode.TARGET_DIS_CLOSEST, + translation_key="target_dis_closest", + device_class=NumberDeviceClass.DISTANCE, + ), ), # Coffee maker # https://developer.tuya.com/en/docs/iot/categorykfj?id=Kaiuz2p12pc7f @@ -285,6 +292,17 @@ NUMBERS: dict[str, tuple[NumberEntityDescription, ...]] = { device_class=NumberDeviceClass.TEMPERATURE, ), ), + # CO2 Detector + # https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy + "co2bj": ( + NumberEntityDescription( + key=DPCode.ALARM_TIME, + translation_key="alarm_duration", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=NumberDeviceClass.DURATION, + entity_category=EntityCategory.CONFIG, + ), + ), } diff --git a/homeassistant/components/tuya/select.py b/homeassistant/components/tuya/select.py index 111b9e40918..831d3cb3e0c 100644 --- a/homeassistant/components/tuya/select.py +++ b/homeassistant/components/tuya/select.py @@ -11,8 +11,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import TuyaEntity # All descriptions can be found here. Mostly the Enum data types in the # default instructions set of each category end up being a select. @@ -307,6 +307,15 @@ SELECTS: dict[str, tuple[SelectEntityDescription, ...]] = { entity_category=EntityCategory.CONFIG, ), ), + # CO2 Detector + # https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy + "co2bj": ( + SelectEntityDescription( + key=DPCode.ALARM_VOLUME, + translation_key="volume", + entity_category=EntityCategory.CONFIG, + ), + ), } # Socket (duplicate of `kg`) diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index 1ab3ea700d7..f766c744998 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -27,7 +27,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import TuyaConfigEntry -from .base import ElectricityTypeData, EnumTypeData, IntegerTypeData, TuyaEntity from .const import ( DEVICE_CLASS_UNITS, DOMAIN, @@ -36,6 +35,7 @@ from .const import ( DPType, UnitOfMeasurement, ) +from .entity import ElectricityTypeData, EnumTypeData, IntegerTypeData, TuyaEntity @dataclass(frozen=True) @@ -203,6 +203,23 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), + TuyaSensorEntityDescription( + key=DPCode.CH2O_VALUE, + translation_key="formaldehyde", + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.VOC_VALUE, + translation_key="voc", + device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.PM25_VALUE, + translation_key="pm25", + device_class=SensorDeviceClass.PM25, + state_class=SensorStateClass.MEASUREMENT, + ), *BATTERY_SENSORS, ), # Two-way temperature and humidity switch @@ -243,6 +260,31 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { entity_registry_enabled_default=False, ), ), + # Single Phase power meter + # Note: Undocumented + "aqcz": ( + TuyaSensorEntityDescription( + key=DPCode.CUR_CURRENT, + translation_key="current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TuyaSensorEntityDescription( + key=DPCode.CUR_POWER, + translation_key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TuyaSensorEntityDescription( + key=DPCode.CUR_VOLTAGE, + translation_key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + ), # CO Detector # https://developer.tuya.com/en/docs/iot/categorycobj?id=Kaiuz3u1j6q1v "cobj": ( @@ -264,8 +306,12 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { ), ), # Air Quality Monitor - # No specification on Tuya portal + # https://developer.tuya.com/en/docs/iot/hjjcy?id=Kbeoad8y1nnlv "hjjcy": ( + TuyaSensorEntityDescription( + key=DPCode.AIR_QUALITY_INDEX, + translation_key="air_quality_index", + ), TuyaSensorEntityDescription( key=DPCode.TEMP_CURRENT, translation_key="temperature", @@ -301,6 +347,13 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), + TuyaSensorEntityDescription( + key=DPCode.PM10, + translation_key="pm10", + device_class=SensorDeviceClass.PM10, + state_class=SensorStateClass.MEASUREMENT, + ), + *BATTERY_SENSORS, ), # Formaldehyde Detector # Note: Not documented diff --git a/homeassistant/components/tuya/siren.py b/homeassistant/components/tuya/siren.py index 683705c6546..6f7dfe4c96c 100644 --- a/homeassistant/components/tuya/siren.py +++ b/homeassistant/components/tuya/siren.py @@ -11,13 +11,14 @@ from homeassistant.components.siren import ( SirenEntityDescription, SirenEntityFeature, ) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode +from .entity import TuyaEntity # All descriptions can be found here: # https://developer.tuya.com/en/docs/iot/standarddescription?id=K9i5ql6waswzq @@ -43,6 +44,14 @@ SIRENS: dict[str, tuple[SirenEntityDescription, ...]] = { key=DPCode.SIREN_SWITCH, ), ), + # CO2 Detector + # https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy + "co2bj": ( + SirenEntityDescription( + key=DPCode.ALARM_SWITCH, + entity_category=EntityCategory.CONFIG, + ), + ), } diff --git a/homeassistant/components/tuya/strings.json b/homeassistant/components/tuya/strings.json index 6b699c0ffc0..8ec61cc8aa5 100644 --- a/homeassistant/components/tuya/strings.json +++ b/homeassistant/components/tuya/strings.json @@ -119,6 +119,9 @@ } }, "number": { + "alarm_duration": { + "name": "Alarm duration" + }, "temperature": { "name": "[%key:component::sensor::entity_component::temperature::name%]" }, @@ -146,6 +149,9 @@ "far_detection": { "name": "Far detection" }, + "target_dis_closest": { + "name": "Clostest target distance" + }, "water_level": { "name": "Water level" }, @@ -620,6 +626,17 @@ "good": "Good", "severe": "Severe" } + }, + "air_quality_index": { + "name": "Air quality index", + "state": { + "level_1": "Level 1", + "level_2": "Level 2", + "level_3": "Level 3", + "level_4": "Level 4", + "level_5": "Level 5", + "level_6": "Level 6" + } } }, "switch": { diff --git a/homeassistant/components/tuya/switch.py b/homeassistant/components/tuya/switch.py index 8af9a00ab45..2b5e6fec4a6 100644 --- a/homeassistant/components/tuya/switch.py +++ b/homeassistant/components/tuya/switch.py @@ -17,8 +17,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode +from .entity import TuyaEntity # All descriptions can be found here. Mostly the Boolean data types in the # default instruction set of each category end up being a Switch. @@ -528,6 +528,13 @@ SWITCHES: dict[str, tuple[SwitchEntityDescription, ...]] = { translation_key="switch", ), ), + # Hejhome whitelabel Fingerbot + "znjxs": ( + SwitchEntityDescription( + key=DPCode.SWITCH, + translation_key="switch", + ), + ), # IoT Switch? # Note: Undocumented "tdq": ( diff --git a/homeassistant/components/tuya/vacuum.py b/homeassistant/components/tuya/vacuum.py index 360d6d4f5c3..738492102a1 100644 --- a/homeassistant/components/tuya/vacuum.py +++ b/homeassistant/components/tuya/vacuum.py @@ -7,46 +7,43 @@ from typing import Any from tuya_sharing import CustomerDevice, Manager from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) -from homeassistant.const import STATE_IDLE, STATE_PAUSED from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TuyaConfigEntry -from .base import EnumTypeData, IntegerTypeData, TuyaEntity from .const import TUYA_DISCOVERY_NEW, DPCode, DPType +from .entity import EnumTypeData, IntegerTypeData, TuyaEntity TUYA_MODE_RETURN_HOME = "chargego" TUYA_STATUS_TO_HA = { - "charge_done": STATE_DOCKED, - "chargecompleted": STATE_DOCKED, - "chargego": STATE_DOCKED, - "charging": STATE_DOCKED, - "cleaning": STATE_CLEANING, - "docking": STATE_RETURNING, - "goto_charge": STATE_RETURNING, - "goto_pos": STATE_CLEANING, - "mop_clean": STATE_CLEANING, - "part_clean": STATE_CLEANING, - "paused": STATE_PAUSED, - "pick_zone_clean": STATE_CLEANING, - "pos_arrived": STATE_CLEANING, - "pos_unarrive": STATE_CLEANING, - "random": STATE_CLEANING, - "sleep": STATE_IDLE, - "smart_clean": STATE_CLEANING, - "smart": STATE_CLEANING, - "spot_clean": STATE_CLEANING, - "standby": STATE_IDLE, - "wall_clean": STATE_CLEANING, - "wall_follow": STATE_CLEANING, - "zone_clean": STATE_CLEANING, + "charge_done": VacuumActivity.DOCKED, + "chargecompleted": VacuumActivity.DOCKED, + "chargego": VacuumActivity.DOCKED, + "charging": VacuumActivity.DOCKED, + "cleaning": VacuumActivity.CLEANING, + "docking": VacuumActivity.RETURNING, + "goto_charge": VacuumActivity.RETURNING, + "goto_pos": VacuumActivity.CLEANING, + "mop_clean": VacuumActivity.CLEANING, + "part_clean": VacuumActivity.CLEANING, + "paused": VacuumActivity.PAUSED, + "pick_zone_clean": VacuumActivity.CLEANING, + "pos_arrived": VacuumActivity.CLEANING, + "pos_unarrive": VacuumActivity.CLEANING, + "random": VacuumActivity.CLEANING, + "sleep": VacuumActivity.IDLE, + "smart_clean": VacuumActivity.CLEANING, + "smart": VacuumActivity.CLEANING, + "spot_clean": VacuumActivity.CLEANING, + "standby": VacuumActivity.IDLE, + "wall_clean": VacuumActivity.CLEANING, + "wall_follow": VacuumActivity.CLEANING, + "zone_clean": VacuumActivity.CLEANING, } @@ -137,12 +134,12 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity): return self.device.status.get(DPCode.SUCTION) @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return Tuya vacuum device state.""" if self.device.status.get(DPCode.PAUSE) and not ( self.device.status.get(DPCode.STATUS) ): - return STATE_PAUSED + return VacuumActivity.PAUSED if not (status := self.device.status.get(DPCode.STATUS)): return None return TUYA_STATUS_TO_HA.get(status) diff --git a/homeassistant/components/twentemilieu/__init__.py b/homeassistant/components/twentemilieu/__init__.py index f447ef6257d..1359e707601 100644 --- a/homeassistant/components/twentemilieu/__init__.py +++ b/homeassistant/components/twentemilieu/__init__.py @@ -2,64 +2,35 @@ from __future__ import annotations -from datetime import date, timedelta - -from twentemilieu import TwenteMilieu, WasteType import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import CONF_HOUSE_LETTER, CONF_HOUSE_NUMBER, CONF_POST_CODE, DOMAIN, LOGGER - -SCAN_INTERVAL = timedelta(seconds=3600) +from .coordinator import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator SERVICE_UPDATE = "update" SERVICE_SCHEMA = vol.Schema({vol.Optional(CONF_ID): cv.string}) PLATFORMS = [Platform.CALENDAR, Platform.SENSOR] -type TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator[ - dict[WasteType, list[date]] -] -type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: TwenteMilieuConfigEntry +) -> bool: """Set up Twente Milieu from a config entry.""" - session = async_get_clientsession(hass) - twentemilieu = TwenteMilieu( - post_code=entry.data[CONF_POST_CODE], - house_number=entry.data[CONF_HOUSE_NUMBER], - house_letter=entry.data[CONF_HOUSE_LETTER], - session=session, - ) - - coordinator: TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator( - hass, - LOGGER, - name=DOMAIN, - update_interval=SCAN_INTERVAL, - update_method=twentemilieu.update, - ) + coordinator = TwenteMilieuDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - # For backwards compat, set unique ID - if entry.unique_id is None: - hass.config_entries.async_update_entry( - entry, unique_id=str(entry.data[CONF_ID]) - ) - entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: TwenteMilieuConfigEntry +) -> bool: """Unload Twente Milieu config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twentemilieu/calendar.py b/homeassistant/components/twentemilieu/calendar.py index 8e7452823b7..d163ae4e564 100644 --- a/homeassistant/components/twentemilieu/calendar.py +++ b/homeassistant/components/twentemilieu/calendar.py @@ -10,8 +10,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import TwenteMilieuConfigEntry from .const import WASTE_TYPE_TO_DESCRIPTION +from .coordinator import TwenteMilieuConfigEntry from .entity import TwenteMilieuEntity diff --git a/homeassistant/components/twentemilieu/coordinator.py b/homeassistant/components/twentemilieu/coordinator.py new file mode 100644 index 00000000000..d2cf5a887ef --- /dev/null +++ b/homeassistant/components/twentemilieu/coordinator.py @@ -0,0 +1,49 @@ +"""Data update coordinator for Twente Milieu.""" + +from __future__ import annotations + +from datetime import date + +from twentemilieu import TwenteMilieu, WasteType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import ( + CONF_HOUSE_LETTER, + CONF_HOUSE_NUMBER, + CONF_POST_CODE, + DOMAIN, + LOGGER, + SCAN_INTERVAL, +) + +type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] + + +class TwenteMilieuDataUpdateCoordinator( + DataUpdateCoordinator[dict[WasteType, list[date]]] +): + """Class to manage fetching Twente Milieu data.""" + + def __init__(self, hass: HomeAssistant, entry: TwenteMilieuConfigEntry) -> None: + """Initialize Twente Milieu data update coordinator.""" + self.twentemilieu = TwenteMilieu( + post_code=entry.data[CONF_POST_CODE], + house_number=entry.data[CONF_HOUSE_NUMBER], + house_letter=entry.data[CONF_HOUSE_LETTER], + session=async_get_clientsession(hass), + ) + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + config_entry=entry, + ) + + async def _async_update_data(self) -> dict[WasteType, list[date]]: + """Fetch Twente Milieu data.""" + return await self.twentemilieu.update() diff --git a/homeassistant/components/twentemilieu/diagnostics.py b/homeassistant/components/twentemilieu/diagnostics.py index 9de3f9bfaff..cb3b411c530 100644 --- a/homeassistant/components/twentemilieu/diagnostics.py +++ b/homeassistant/components/twentemilieu/diagnostics.py @@ -4,12 +4,13 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from .coordinator import TwenteMilieuConfigEntry + async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: TwenteMilieuConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" return { diff --git a/homeassistant/components/twentemilieu/entity.py b/homeassistant/components/twentemilieu/entity.py index 896a8e32de9..660dd16288c 100644 --- a/homeassistant/components/twentemilieu/entity.py +++ b/homeassistant/components/twentemilieu/entity.py @@ -2,14 +2,13 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwenteMilieuDataUpdateCoordinator from .const import DOMAIN +from .coordinator import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator class TwenteMilieuEntity(CoordinatorEntity[TwenteMilieuDataUpdateCoordinator], Entity): @@ -17,7 +16,7 @@ class TwenteMilieuEntity(CoordinatorEntity[TwenteMilieuDataUpdateCoordinator], E _attr_has_entity_name = True - def __init__(self, entry: ConfigEntry) -> None: + def __init__(self, entry: TwenteMilieuConfigEntry) -> None: """Initialize the Twente Milieu entity.""" super().__init__(coordinator=entry.runtime_data) self._attr_device_info = DeviceInfo( diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index aef70aa6a10..c04c5492a40 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -7,6 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["twentemilieu"], - "quality_scale": "platinum", - "requirements": ["twentemilieu==2.0.1"] + "quality_scale": "silver", + "requirements": ["twentemilieu==2.2.0"] } diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml new file mode 100644 index 00000000000..42ff152cb4d --- /dev/null +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -0,0 +1,109 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: + status: exempt + comment: | + This integration only polls data using a coordinator. + Since the integration is read-only and poll-only (only provide sensor + data), there is no need to implement parallel updates. + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + + # Gold + entity-translations: + status: todo + comment: | + The calendar entity name isn't translated yet. + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device which represents the service. + diagnostics: done + exception-translations: + status: todo + comment: | + The coordinator raises, and currently, doesn't provide a translation for it. + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single device which represents the service. + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: | + This is an service, which doesn't integrate with any devices. + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/twentemilieu/sensor.py b/homeassistant/components/twentemilieu/sensor.py index 2d2e3de0f0e..4605ede1f87 100644 --- a/homeassistant/components/twentemilieu/sensor.py +++ b/homeassistant/components/twentemilieu/sensor.py @@ -12,12 +12,12 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TwenteMilieuConfigEntry from .entity import TwenteMilieuEntity @@ -64,7 +64,7 @@ SENSORS: tuple[TwenteMilieuSensorDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: TwenteMilieuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Twente Milieu sensor based on a config entry.""" @@ -80,7 +80,7 @@ class TwenteMilieuSensor(TwenteMilieuEntity, SensorEntity): def __init__( self, - entry: ConfigEntry, + entry: TwenteMilieuConfigEntry, description: TwenteMilieuSensorDescription, ) -> None: """Initialize the Twente Milieu entity.""" diff --git a/homeassistant/components/twentemilieu/strings.json b/homeassistant/components/twentemilieu/strings.json index 7797167ea0b..5c40df1b0c2 100644 --- a/homeassistant/components/twentemilieu/strings.json +++ b/homeassistant/components/twentemilieu/strings.json @@ -7,6 +7,11 @@ "post_code": "Postal code", "house_number": "House number", "house_letter": "House letter/additional" + }, + "data_description": { + "post_code": "The postal code of the address, for example 7500AA", + "house_number": "The house number of the address", + "house_letter": "The house letter or additional information of the address" } } }, diff --git a/homeassistant/components/twilio_call/manifest.json b/homeassistant/components/twilio_call/manifest.json index 88f09efdeed..f4389e1c7d7 100644 --- a/homeassistant/components/twilio_call/manifest.json +++ b/homeassistant/components/twilio_call/manifest.json @@ -5,5 +5,6 @@ "dependencies": ["twilio"], "documentation": "https://www.home-assistant.io/integrations/twilio_call", "iot_class": "cloud_push", - "loggers": ["twilio"] + "loggers": ["twilio"], + "quality_scale": "legacy" } diff --git a/homeassistant/components/twilio_call/notify.py b/homeassistant/components/twilio_call/notify.py index 5338bb59a79..ab79ea9692d 100644 --- a/homeassistant/components/twilio_call/notify.py +++ b/homeassistant/components/twilio_call/notify.py @@ -53,7 +53,7 @@ class TwilioCallNotificationService(BaseNotificationService): def send_message(self, message="", **kwargs): """Call to specified target users.""" if not (targets := kwargs.get(ATTR_TARGET)): - _LOGGER.info("At least 1 target is required") + _LOGGER.warning("At least 1 target is required") return if message.startswith(("http://", "https://")): diff --git a/homeassistant/components/twilio_sms/manifest.json b/homeassistant/components/twilio_sms/manifest.json index 8736d58c0da..eed5a1113c6 100644 --- a/homeassistant/components/twilio_sms/manifest.json +++ b/homeassistant/components/twilio_sms/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["twilio"], "documentation": "https://www.home-assistant.io/integrations/twilio_sms", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/twilio_sms/notify.py b/homeassistant/components/twilio_sms/notify.py index d1e2ca2888f..531fadcf259 100644 --- a/homeassistant/components/twilio_sms/notify.py +++ b/homeassistant/components/twilio_sms/notify.py @@ -66,7 +66,7 @@ class TwilioSMSNotificationService(BaseNotificationService): twilio_args[ATTR_MEDIAURL] = data[ATTR_MEDIAURL] if not targets: - _LOGGER.info("At least 1 target is required") + _LOGGER.warning("At least 1 target is required") return for target in targets: diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 2749c9a7764..6f6dffe63d2 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -280,7 +280,7 @@ class TwinklyLight(LightEntity): await self.async_update_current_movie() if not self._attr_available: - _LOGGER.info("Twinkly '%s' is now available", self._client.host) + _LOGGER.warning("Twinkly '%s' is now available", self._client.host) # We don't use the echo API to track the availability since # we already have to pull the device to get its state. @@ -289,7 +289,7 @@ class TwinklyLight(LightEntity): # We log this as "info" as it's pretty common that the Christmas # light are not reachable in July if self._attr_available: - _LOGGER.info( + _LOGGER.warning( "Twinkly '%s' is not reachable (client error)", self._client.host ) self._attr_available = False diff --git a/homeassistant/components/twitch/__init__.py b/homeassistant/components/twitch/__init__.py index 40a744684b9..22a1782f594 100644 --- a/homeassistant/components/twitch/__init__.py +++ b/homeassistant/components/twitch/__init__.py @@ -7,7 +7,6 @@ from typing import cast from aiohttp.client_exceptions import ClientError, ClientResponseError from twitchAPI.twitch import Twitch -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -17,10 +16,11 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( async_get_config_entry_implementation, ) -from .const import CLIENT, DOMAIN, OAUTH_SCOPES, PLATFORMS, SESSION +from .const import OAUTH_SCOPES, PLATFORMS +from .coordinator import TwitchConfigEntry, TwitchCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: TwitchConfigEntry) -> bool: """Set up Twitch from a config entry.""" implementation = cast( LocalOAuth2Implementation, @@ -46,17 +46,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client.auto_refresh_auth = False await client.set_user_authentication(access_token, scope=OAUTH_SCOPES) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { - CLIENT: client, - SESSION: session, - } + coordinator = TwitchCoordinator(hass, client, session, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TwitchConfigEntry) -> bool: """Unload Twitch config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twitch/config_flow.py b/homeassistant/components/twitch/config_flow.py index 7f006f194f5..ed196897c11 100644 --- a/homeassistant/components/twitch/config_flow.py +++ b/homeassistant/components/twitch/config_flow.py @@ -9,7 +9,7 @@ from typing import Any, cast from twitchAPI.helper import first from twitchAPI.twitch import Twitch -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.config_entry_oauth2_flow import LocalOAuth2Implementation @@ -23,7 +23,6 @@ class OAuth2FlowHandler( """Config flow to handle Twitch OAuth2 authentication.""" DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize flow.""" @@ -63,8 +62,8 @@ class OAuth2FlowHandler( user_id = user.id - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() channels = [ @@ -76,38 +75,36 @@ class OAuth2FlowHandler( title=user.display_name, data=data, options={CONF_CHANNELS: channels} ) - if self.reauth_entry.unique_id == user_id: - new_channels = self.reauth_entry.options[CONF_CHANNELS] - # Since we could not get all channels at import, we do it at the reauth - # immediately after. - if "imported" in self.reauth_entry.data: - channels = [ - channel.broadcaster_login - async for channel in await client.get_followed_channels(user_id) - ] - options = list(set(channels) - set(new_channels)) - new_channels = [*new_channels, *options] - - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data=data, - options={CONF_CHANNELS: new_channels}, - ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( + reauth_entry = self._get_reauth_entry() + self._abort_if_unique_id_mismatch( reason="wrong_account", - description_placeholders={"title": self.reauth_entry.title}, + description_placeholders={ + "title": reauth_entry.title, + "username": str(reauth_entry.unique_id), + }, + ) + + new_channels = reauth_entry.options[CONF_CHANNELS] + # Since we could not get all channels at import, we do it at the reauth + # immediately after. + if "imported" in reauth_entry.data: + channels = [ + channel.broadcaster_login + async for channel in await client.get_followed_channels(user_id) + ] + options = list(set(channels) - set(new_channels)) + new_channels = [*new_channels, *options] + + return self.async_update_reload_and_abort( + reauth_entry, + data=data, + options={CONF_CHANNELS: new_channels}, ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/twitch/const.py b/homeassistant/components/twitch/const.py index b46bf8113b4..fc7c2f73487 100644 --- a/homeassistant/components/twitch/const.py +++ b/homeassistant/components/twitch/const.py @@ -17,7 +17,5 @@ CONF_REFRESH_TOKEN = "refresh_token" DOMAIN = "twitch" CONF_CHANNELS = "channels" -CLIENT = "client" -SESSION = "session" OAUTH_SCOPES = [AuthScope.USER_READ_SUBSCRIPTIONS, AuthScope.USER_READ_FOLLOWS] diff --git a/homeassistant/components/twitch/coordinator.py b/homeassistant/components/twitch/coordinator.py new file mode 100644 index 00000000000..c61e80bd2b8 --- /dev/null +++ b/homeassistant/components/twitch/coordinator.py @@ -0,0 +1,134 @@ +"""Define a class to manage fetching Twitch data.""" + +from dataclasses import dataclass +from datetime import datetime, timedelta + +from twitchAPI.helper import first +from twitchAPI.object.api import FollowedChannel, Stream, TwitchUser, UserSubscription +from twitchAPI.twitch import Twitch +from twitchAPI.type import TwitchAPIException, TwitchResourceNotFound + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_CHANNELS, DOMAIN, LOGGER, OAUTH_SCOPES + +type TwitchConfigEntry = ConfigEntry[TwitchCoordinator] + + +def chunk_list(lst: list, chunk_size: int) -> list[list]: + """Split a list into chunks of chunk_size.""" + return [lst[i : i + chunk_size] for i in range(0, len(lst), chunk_size)] + + +@dataclass +class TwitchUpdate: + """Class for holding Twitch data.""" + + name: str + followers: int + is_streaming: bool + game: str | None + title: str | None + started_at: datetime | None + stream_picture: str | None + picture: str + subscribed: bool | None + subscription_gifted: bool | None + subscription_tier: int | None + follows: bool + following_since: datetime | None + viewers: int | None + + +class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): + """Class to manage fetching Twitch data.""" + + config_entry: TwitchConfigEntry + users: list[TwitchUser] + current_user: TwitchUser + + def __init__( + self, + hass: HomeAssistant, + twitch: Twitch, + session: OAuth2Session, + entry: TwitchConfigEntry, + ) -> None: + """Initialize the coordinator.""" + self.twitch = twitch + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=timedelta(minutes=5), + config_entry=entry, + ) + self.session = session + + async def _async_setup(self) -> None: + channels = self.config_entry.options[CONF_CHANNELS] + self.users = [] + # Split channels into chunks of 100 to avoid hitting the rate limit + for chunk in chunk_list(channels, 100): + self.users.extend( + [channel async for channel in self.twitch.get_users(logins=chunk)] + ) + if not (user := await first(self.twitch.get_users())): + raise UpdateFailed("Logged in user not found") + self.current_user = user + + async def _async_update_data(self) -> dict[str, TwitchUpdate]: + await self.session.async_ensure_token_valid() + await self.twitch.set_user_authentication( + self.session.token["access_token"], + OAUTH_SCOPES, + self.session.token["refresh_token"], + False, + ) + data: dict[str, TwitchUpdate] = {} + streams: dict[str, Stream] = { + s.user_id: s + async for s in self.twitch.get_followed_streams( + user_id=self.current_user.id, first=100 + ) + } + follows: dict[str, FollowedChannel] = { + f.broadcaster_id: f + async for f in await self.twitch.get_followed_channels( + user_id=self.current_user.id, first=100 + ) + } + for channel in self.users: + followers = await self.twitch.get_channel_followers(channel.id) + stream = streams.get(channel.id) + follow = follows.get(channel.id) + sub: UserSubscription | None = None + try: + sub = await self.twitch.check_user_subscription( + user_id=self.current_user.id, broadcaster_id=channel.id + ) + except TwitchResourceNotFound: + LOGGER.debug("User is not subscribed to %s", channel.display_name) + except TwitchAPIException as exc: + LOGGER.error("Error response on check_user_subscription: %s", exc) + + data[channel.id] = TwitchUpdate( + channel.display_name, + followers.total, + bool(stream), + stream.game_name if stream else None, + stream.title if stream else None, + stream.started_at if stream else None, + stream.thumbnail_url if stream else None, + channel.profile_image_url, + bool(sub), + sub.is_gift if sub else None, + {"1000": 1, "2000": 2, "3000": 3}.get(sub.tier) if sub else None, + bool(follow), + follow.followed_at if follow else None, + stream.viewer_count if stream else None, + ) + return data diff --git a/homeassistant/components/twitch/sensor.py b/homeassistant/components/twitch/sensor.py index a6e2f4e04af..b407eae0319 100644 --- a/homeassistant/components/twitch/sensor.py +++ b/homeassistant/components/twitch/sensor.py @@ -2,32 +2,25 @@ from __future__ import annotations -from twitchAPI.helper import first -from twitchAPI.twitch import ( - AuthType, - Twitch, - TwitchAPIException, - TwitchResourceNotFound, - TwitchUser, -) +from typing import Any -from homeassistant.components.sensor import SensorEntity -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CLIENT, CONF_CHANNELS, DOMAIN, LOGGER, OAUTH_SCOPES, SESSION +from .coordinator import TwitchConfigEntry, TwitchCoordinator, TwitchUpdate ATTR_GAME = "game" ATTR_TITLE = "title" ATTR_SUBSCRIPTION = "subscribed" -ATTR_SUBSCRIPTION_SINCE = "subscribed_since" ATTR_SUBSCRIPTION_GIFTED = "subscription_is_gifted" +ATTR_SUBSCRIPTION_TIER = "subscription_tier" ATTR_FOLLOW = "following" ATTR_FOLLOW_SINCE = "following_since" ATTR_FOLLOWING = "followers" -ATTR_VIEWS = "views" +ATTR_VIEWERS = "viewers" ATTR_STARTED_AT = "started_at" STATE_OFFLINE = "offline" @@ -36,109 +29,73 @@ STATE_STREAMING = "streaming" PARALLEL_UPDATES = 1 -def chunk_list(lst: list, chunk_size: int) -> list[list]: - """Split a list into chunks of chunk_size.""" - return [lst[i : i + chunk_size] for i in range(0, len(lst), chunk_size)] - - async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: TwitchConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Initialize entries.""" - client = hass.data[DOMAIN][entry.entry_id][CLIENT] - session = hass.data[DOMAIN][entry.entry_id][SESSION] + coordinator = entry.runtime_data - channels = entry.options[CONF_CHANNELS] - - entities: list[TwitchSensor] = [] - - # Split channels into chunks of 100 to avoid hitting the rate limit - for chunk in chunk_list(channels, 100): - entities.extend( - [ - TwitchSensor(channel, session, client) - async for channel in client.get_users(logins=chunk) - ] - ) - - async_add_entities(entities, True) + async_add_entities( + TwitchSensor(coordinator, channel_id) for channel_id in coordinator.data + ) -class TwitchSensor(SensorEntity): +class TwitchSensor(CoordinatorEntity[TwitchCoordinator], SensorEntity): """Representation of a Twitch channel.""" _attr_translation_key = "channel" + _attr_device_class = SensorDeviceClass.ENUM + _attr_options = [STATE_OFFLINE, STATE_STREAMING] - def __init__( - self, channel: TwitchUser, session: OAuth2Session, client: Twitch - ) -> None: + def __init__(self, coordinator: TwitchCoordinator, channel_id: str) -> None: """Initialize the sensor.""" - self._session = session - self._client = client - self._channel = channel - self._enable_user_auth = client.has_required_auth(AuthType.USER, OAUTH_SCOPES) - self._attr_name = channel.display_name - self._attr_unique_id = channel.id + super().__init__(coordinator) + self.channel_id = channel_id + self._attr_unique_id = channel_id + self._attr_name = self.channel.name - async def async_update(self) -> None: - """Update device state.""" - await self._session.async_ensure_token_valid() - await self._client.set_user_authentication( - self._session.token["access_token"], - OAUTH_SCOPES, - self._session.token["refresh_token"], - False, - ) - followers = await self._client.get_channel_followers(self._channel.id) + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.channel_id in self.coordinator.data - self._attr_extra_state_attributes = { - ATTR_FOLLOWING: followers.total, - ATTR_VIEWS: self._channel.view_count, + @property + def channel(self) -> TwitchUpdate: + """Return the channel data.""" + return self.coordinator.data[self.channel_id] + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return STATE_STREAMING if self.channel.is_streaming else STATE_OFFLINE + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes.""" + channel = self.channel + resp = { + ATTR_FOLLOWING: channel.followers, + ATTR_GAME: channel.game, + ATTR_TITLE: channel.title, + ATTR_STARTED_AT: channel.started_at, + ATTR_VIEWERS: channel.viewers, + ATTR_SUBSCRIPTION: False, } - if self._enable_user_auth: - await self._async_add_user_attributes() - if stream := ( - await first(self._client.get_streams(user_id=[self._channel.id], first=1)) - ): - self._attr_native_value = STATE_STREAMING - self._attr_extra_state_attributes[ATTR_GAME] = stream.game_name - self._attr_extra_state_attributes[ATTR_TITLE] = stream.title - self._attr_extra_state_attributes[ATTR_STARTED_AT] = stream.started_at - self._attr_entity_picture = stream.thumbnail_url - if self._attr_entity_picture is not None: - self._attr_entity_picture = self._attr_entity_picture.format( - height=24, - width=24, - ) - else: - self._attr_native_value = STATE_OFFLINE - self._attr_extra_state_attributes[ATTR_GAME] = None - self._attr_extra_state_attributes[ATTR_TITLE] = None - self._attr_extra_state_attributes[ATTR_STARTED_AT] = None - self._attr_entity_picture = self._channel.profile_image_url + if channel.subscribed is not None: + resp[ATTR_SUBSCRIPTION] = channel.subscribed + resp[ATTR_SUBSCRIPTION_GIFTED] = channel.subscription_gifted + resp[ATTR_SUBSCRIPTION_TIER] = channel.subscription_tier + resp[ATTR_FOLLOW] = channel.follows + if channel.follows: + resp[ATTR_FOLLOW_SINCE] = channel.following_since + return resp - async def _async_add_user_attributes(self) -> None: - if not (user := await first(self._client.get_users())): - return - self._attr_extra_state_attributes[ATTR_SUBSCRIPTION] = False - try: - sub = await self._client.check_user_subscription( - user_id=user.id, broadcaster_id=self._channel.id - ) - self._attr_extra_state_attributes[ATTR_SUBSCRIPTION] = True - self._attr_extra_state_attributes[ATTR_SUBSCRIPTION_GIFTED] = sub.is_gift - except TwitchResourceNotFound: - LOGGER.debug("User is not subscribed to %s", self._channel.display_name) - except TwitchAPIException as exc: - LOGGER.error("Error response on check_user_subscription: %s", exc) - - follows = await self._client.get_followed_channels( - user.id, broadcaster_id=self._channel.id - ) - self._attr_extra_state_attributes[ATTR_FOLLOW] = follows.total > 0 - if follows.total: - self._attr_extra_state_attributes[ATTR_FOLLOW_SINCE] = follows.data[ - 0 - ].followed_at + @property + def entity_picture(self) -> str | None: + """Return the picture of the sensor.""" + if self.channel.is_streaming: + assert self.channel.stream_picture is not None + return self.channel.stream_picture + return self.channel.picture diff --git a/homeassistant/components/twitch/strings.json b/homeassistant/components/twitch/strings.json index bbe46526c36..7271b81e924 100644 --- a/homeassistant/components/twitch/strings.json +++ b/homeassistant/components/twitch/strings.json @@ -16,5 +16,47 @@ "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" } + }, + "entity": { + "sensor": { + "channel": { + "state": { + "streaming": "Streaming", + "offline": "Offline" + }, + "state_attributes": { + "followers": { + "name": "Followers" + }, + "game": { + "name": "Game" + }, + "title": { + "name": "Title" + }, + "started_at": { + "name": "Started at" + }, + "viewers": { + "name": "Viewers" + }, + "subscribed": { + "name": "Subscribed" + }, + "subscription_is_gifted": { + "name": "Subscription is gifted" + }, + "subscription_tier": { + "name": "Subscription tier" + }, + "following": { + "name": "Following" + }, + "following_since": { + "name": "Following since" + } + } + } + } } } diff --git a/homeassistant/components/twitter/manifest.json b/homeassistant/components/twitter/manifest.json index 44e8712b029..af4dff4486d 100644 --- a/homeassistant/components/twitter/manifest.json +++ b/homeassistant/components/twitter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/twitter", "iot_class": "cloud_push", "loggers": ["TwitterAPI"], + "quality_scale": "legacy", "requirements": ["TwitterAPI==2.7.12"] } diff --git a/homeassistant/components/ubus/device_tracker.py b/homeassistant/components/ubus/device_tracker.py index 6170ad213a3..285a176af0a 100644 --- a/homeassistant/components/ubus/device_tracker.py +++ b/homeassistant/components/ubus/device_tracker.py @@ -9,7 +9,7 @@ from openwrt.ubus import Ubus import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -38,14 +38,16 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> DeviceScanner | None: """Validate the configuration and return an ubus scanner.""" - dhcp_sw = config[DOMAIN][CONF_DHCP_SOFTWARE] + config = config[DEVICE_TRACKER_DOMAIN] + + dhcp_sw = config[CONF_DHCP_SOFTWARE] scanner: DeviceScanner if dhcp_sw == "dnsmasq": - scanner = DnsmasqUbusDeviceScanner(config[DOMAIN]) + scanner = DnsmasqUbusDeviceScanner(config) elif dhcp_sw == "odhcpd": - scanner = OdhcpdUbusDeviceScanner(config[DOMAIN]) + scanner = OdhcpdUbusDeviceScanner(config) else: - scanner = UbusDeviceScanner(config[DOMAIN]) + scanner = UbusDeviceScanner(config) return scanner if scanner.success_init else None @@ -121,7 +123,7 @@ class UbusDeviceScanner(DeviceScanner): if not self.success_init: return False - _LOGGER.info("Checking hostapd") + _LOGGER.debug("Checking hostapd") if not self.hostapd: hostapd = self.ubus.get_hostapd() diff --git a/homeassistant/components/ubus/manifest.json b/homeassistant/components/ubus/manifest.json index 902b7c9bb82..6053199b4ce 100644 --- a/homeassistant/components/ubus/manifest.json +++ b/homeassistant/components/ubus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ubus", "iot_class": "local_polling", "loggers": ["openwrt"], + "quality_scale": "legacy", "requirements": ["openwrt-ubus-rpc==0.0.2"] } diff --git a/homeassistant/components/uk_transport/manifest.json b/homeassistant/components/uk_transport/manifest.json index f3511e71bfa..d855a04ee29 100644 --- a/homeassistant/components/uk_transport/manifest.json +++ b/homeassistant/components/uk_transport/manifest.json @@ -3,5 +3,6 @@ "name": "UK Transport", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/uk_transport", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ukraine_alarm/__init__.py b/homeassistant/components/ukraine_alarm/__init__.py index b90fb20af75..d850ed6eba8 100644 --- a/homeassistant/components/ukraine_alarm/__init__.py +++ b/homeassistant/components/ukraine_alarm/__init__.py @@ -2,25 +2,13 @@ from __future__ import annotations -from datetime import timedelta -import logging -from typing import Any - -import aiohttp -from aiohttp import ClientSession -from uasiren.client import Client - from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_REGION from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import ALERT_TYPES, DOMAIN, PLATFORMS - -_LOGGER = logging.getLogger(__name__) - -UPDATE_INTERVAL = timedelta(seconds=10) +from .const import DOMAIN, PLATFORMS +from .coordinator import UkraineAlarmDataUpdateCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -45,32 +33,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class UkraineAlarmDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching Ukraine Alarm API.""" - - def __init__( - self, - hass: HomeAssistant, - session: ClientSession, - region_id: str, - ) -> None: - """Initialize.""" - self.region_id = region_id - self.uasiren = Client(session) - - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL) - - async def _async_update_data(self) -> dict[str, Any]: - """Update data via library.""" - try: - res = await self.uasiren.get_alerts(self.region_id) - except aiohttp.ClientError as error: - raise UpdateFailed(f"Error fetching alerts from API: {error}") from error - - current = {alert_type: False for alert_type in ALERT_TYPES} - for alert in res[0]["activeAlerts"]: - current[alert["type"]] = True - - return current diff --git a/homeassistant/components/ukraine_alarm/binary_sensor.py b/homeassistant/components/ukraine_alarm/binary_sensor.py index 0eb8bd7b43c..30cb8e0f553 100644 --- a/homeassistant/components/ukraine_alarm/binary_sensor.py +++ b/homeassistant/components/ukraine_alarm/binary_sensor.py @@ -14,7 +14,6 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import UkraineAlarmDataUpdateCoordinator from .const import ( ALERT_TYPE_AIR, ALERT_TYPE_ARTILLERY, @@ -26,6 +25,7 @@ from .const import ( DOMAIN, MANUFACTURER, ) +from .coordinator import UkraineAlarmDataUpdateCoordinator BINARY_SENSOR_TYPES: tuple[BinarySensorEntityDescription, ...] = ( BinarySensorEntityDescription( diff --git a/homeassistant/components/ukraine_alarm/config_flow.py b/homeassistant/components/ukraine_alarm/config_flow.py index faaa9240df3..12059124fa2 100644 --- a/homeassistant/components/ukraine_alarm/config_flow.py +++ b/homeassistant/components/ukraine_alarm/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any import aiohttp from uasiren.client import Client @@ -25,7 +25,7 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new UkraineAlarmConfigFlow.""" - self.states = None + self.states: list[dict[str, Any]] | None = None self.selected_region: dict[str, Any] | None = None async def async_step_user( @@ -69,17 +69,25 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): return await self._handle_pick_region("user", "district", user_input) - async def async_step_district(self, user_input=None): + async def async_step_district( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle user-chosen district.""" return await self._handle_pick_region("district", "community", user_input) - async def async_step_community(self, user_input=None): + async def async_step_community( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle user-chosen community.""" return await self._handle_pick_region("community", None, user_input, True) async def _handle_pick_region( - self, step_id: str, next_step: str | None, user_input, last_step=False - ): + self, + step_id: str, + next_step: str | None, + user_input: dict[str, str] | None, + last_step: bool = False, + ) -> ConfigFlowResult: """Handle picking a (sub)region.""" if self.selected_region: source = self.selected_region["regionChildIds"] @@ -121,8 +129,10 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): step_id=step_id, data_schema=schema, last_step=last_step ) - async def _async_finish_flow(self): + async def _async_finish_flow(self) -> ConfigFlowResult: """Finish the setup.""" + if TYPE_CHECKING: + assert self.selected_region is not None await self.async_set_unique_id(self.selected_region["regionId"]) self._abort_if_unique_id_configured() @@ -135,10 +145,10 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): ) -def _find(regions, region_id): +def _find(regions: list[dict[str, Any]], region_id): return next((region for region in regions if region["regionId"] == region_id), None) -def _make_regions_object(regions): +def _make_regions_object(regions: list[dict[str, Any]]) -> dict[str, str]: regions = sorted(regions, key=lambda region: region["regionName"].lower()) return {region["regionId"]: region["regionName"] for region in regions} diff --git a/homeassistant/components/ukraine_alarm/coordinator.py b/homeassistant/components/ukraine_alarm/coordinator.py new file mode 100644 index 00000000000..fbf7c9f81c2 --- /dev/null +++ b/homeassistant/components/ukraine_alarm/coordinator.py @@ -0,0 +1,49 @@ +"""The ukraine_alarm component.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Any + +import aiohttp +from aiohttp import ClientSession +from uasiren.client import Client + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ALERT_TYPES, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +UPDATE_INTERVAL = timedelta(seconds=10) + + +class UkraineAlarmDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Class to manage fetching Ukraine Alarm API.""" + + def __init__( + self, + hass: HomeAssistant, + session: ClientSession, + region_id: str, + ) -> None: + """Initialize.""" + self.region_id = region_id + self.uasiren = Client(session) + + super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL) + + async def _async_update_data(self) -> dict[str, Any]: + """Update data via library.""" + try: + res = await self.uasiren.get_alerts(self.region_id) + except aiohttp.ClientError as error: + raise UpdateFailed(f"Error fetching alerts from API: {error}") from error + + current = {alert_type: False for alert_type in ALERT_TYPES} + for alert in res[0]["activeAlerts"]: + current[alert["type"]] = True + + return current diff --git a/homeassistant/components/unifi/button.py b/homeassistant/components/unifi/button.py index c53f8be147f..25c6816d794 100644 --- a/homeassistant/components/unifi/button.py +++ b/homeassistant/components/unifi/button.py @@ -117,6 +117,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiButtonEntityDescription, ...] = ( ), UnifiButtonEntityDescription[Wlans, Wlan]( key="WLAN regenerate password", + translation_key="wlan_regenerate_password", device_class=ButtonDeviceClass.UPDATE, entity_category=EntityCategory.CONFIG, entity_registry_enabled_default=False, diff --git a/homeassistant/components/unifi/config_flow.py b/homeassistant/components/unifi/config_flow.py index b5ad1ea2ff0..63c8533aa2e 100644 --- a/homeassistant/components/unifi/config_flow.py +++ b/homeassistant/components/unifi/config_flow.py @@ -20,7 +20,7 @@ import voluptuous as vol from homeassistant.components import ssdp from homeassistant.config_entries import ( - ConfigEntry, + SOURCE_REAUTH, ConfigEntryState, ConfigFlow, ConfigFlowResult, @@ -78,7 +78,7 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: UnifiConfigEntry, ) -> UnifiOptionsFlowHandler: """Get the options flow for this handler.""" return UnifiOptionsFlowHandler(config_entry) @@ -86,7 +86,6 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): def __init__(self) -> None: """Initialize the UniFi Network flow.""" self.config: dict[str, Any] = {} - self.reauth_config_entry: ConfigEntry | None = None self.reauth_schema: dict[vol.Marker, Any] = {} async def async_step_user( @@ -118,13 +117,14 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): else: if ( - self.reauth_config_entry - and self.reauth_config_entry.unique_id is not None - and self.reauth_config_entry.unique_id in self.sites - ): - return await self.async_step_site( - {CONF_SITE_ID: self.reauth_config_entry.unique_id} + self.source == SOURCE_REAUTH + and ( + (reauth_unique_id := self._get_reauth_entry().unique_id) + is not None ) + and reauth_unique_id in self.sites + ): + return await self.async_step_site({CONF_SITE_ID: reauth_unique_id}) return await self.async_step_site() @@ -160,8 +160,8 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): config_entry = await self.async_set_unique_id(unique_id) abort_reason = "configuration_updated" - if self.reauth_config_entry: - config_entry = self.reauth_config_entry + if self.source == SOURCE_REAUTH: + config_entry = self._get_reauth_entry() abort_reason = "reauth_successful" if config_entry: @@ -192,24 +192,20 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Trigger a reauthentication flow.""" - config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert config_entry - self.reauth_config_entry = config_entry + reauth_entry = self._get_reauth_entry() self.context["title_placeholders"] = { - CONF_HOST: config_entry.data[CONF_HOST], - CONF_SITE_ID: config_entry.title, + CONF_HOST: reauth_entry.data[CONF_HOST], + CONF_SITE_ID: reauth_entry.title, } self.reauth_schema = { - vol.Required(CONF_HOST, default=config_entry.data[CONF_HOST]): str, - vol.Required(CONF_USERNAME, default=config_entry.data[CONF_USERNAME]): str, + vol.Required(CONF_HOST, default=reauth_entry.data[CONF_HOST]): str, + vol.Required(CONF_USERNAME, default=reauth_entry.data[CONF_USERNAME]): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_PORT, default=config_entry.data[CONF_PORT]): int, + vol.Required(CONF_PORT, default=reauth_entry.data[CONF_PORT]): int, vol.Required( - CONF_VERIFY_SSL, default=config_entry.data[CONF_VERIFY_SSL] + CONF_VERIFY_SSL, default=reauth_entry.data[CONF_VERIFY_SSL] ): bool, } @@ -253,7 +249,6 @@ class UnifiOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: UnifiConfigEntry) -> None: """Initialize UniFi Network options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init( diff --git a/homeassistant/components/unifi/device_tracker.py b/homeassistant/components/unifi/device_tracker.py index aae1194b70d..735f76a73bf 100644 --- a/homeassistant/components/unifi/device_tracker.py +++ b/homeassistant/components/unifi/device_tracker.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass from datetime import timedelta -from functools import cached_property import logging from typing import Any @@ -17,8 +16,13 @@ from aiounifi.models.api import ApiItemT from aiounifi.models.client import Client from aiounifi.models.device import Device from aiounifi.models.event import Event, EventKey +from propcache import cached_property -from homeassistant.components.device_tracker import DOMAIN, ScannerEntity, SourceType +from homeassistant.components.device_tracker import ( + DOMAIN as DEVICE_TRACKER_DOMAIN, + ScannerEntity, + ScannerEntityDescription, +) from homeassistant.core import Event as core_Event, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -138,7 +142,9 @@ def async_device_heartbeat_timedelta_fn(hub: UnifiHub, obj_id: str) -> timedelta @dataclass(frozen=True, kw_only=True) -class UnifiTrackerEntityDescription(UnifiEntityDescription[HandlerT, ApiItemT]): +class UnifiTrackerEntityDescription( + UnifiEntityDescription[HandlerT, ApiItemT], ScannerEntityDescription +): """Class describing UniFi device tracker entity.""" heartbeat_timedelta_fn: Callable[[UnifiHub, str], timedelta] @@ -198,11 +204,15 @@ def async_update_unique_id(hass: HomeAssistant, config_entry: UnifiConfigEntry) def update_unique_id(obj_id: str) -> None: """Rework unique ID.""" new_unique_id = f"{hub.site}-{obj_id}" - if ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, new_unique_id): + if ent_reg.async_get_entity_id( + DEVICE_TRACKER_DOMAIN, UNIFI_DOMAIN, new_unique_id + ): return unique_id = f"{obj_id}-{hub.site}" - if entity_id := ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, unique_id): + if entity_id := ent_reg.async_get_entity_id( + DEVICE_TRACKER_DOMAIN, UNIFI_DOMAIN, unique_id + ): ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) for obj_id in list(hub.api.clients) + list(hub.api.clients_all): @@ -267,11 +277,6 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): """Return the mac address of the device.""" return self._obj_id - @cached_property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.ROUTER - @cached_property def unique_id(self) -> str: """Return a unique ID.""" diff --git a/homeassistant/components/unifi/hub/__init__.py b/homeassistant/components/unifi/hub/__init__.py index b8ed15d46f4..dc307206d79 100644 --- a/homeassistant/components/unifi/hub/__init__.py +++ b/homeassistant/components/unifi/hub/__init__.py @@ -1,4 +1,6 @@ """Internal functionality not part of HA infrastructure.""" -from .api import get_unifi_api # noqa: F401 -from .hub import UnifiHub # noqa: F401 +from .api import get_unifi_api +from .hub import UnifiHub + +__all__ = ["UnifiHub", "get_unifi_api"] diff --git a/homeassistant/components/unifi/icons.json b/homeassistant/components/unifi/icons.json index 2d5017a3187..76990c1c4a1 100644 --- a/homeassistant/components/unifi/icons.json +++ b/homeassistant/components/unifi/icons.json @@ -1,6 +1,86 @@ { + "entity": { + "button": { + "wlan_regenerate_password": { + "default": "mdi:form-textbox-password" + } + }, + "image": { + "wlan_qr_code": { + "default": "mdi:qrcode" + } + }, + "sensor": { + "client_bandwidth_rx": { + "default": "mdi:download" + }, + "client_bandwidth_tx": { + "default": "mdi:upload" + }, + "port_bandwidth_rx": { + "default": "mdi:download" + }, + "port_bandwidth_tx": { + "default": "mdi:upload" + }, + "wlan_clients": { + "default": "mdi:account-multiple" + }, + "device_clients": { + "default": "mdi:account-multiple" + }, + "device_uplink_mac": { + "default": "mdi:ethernet" + }, + "device_state": { + "default": "mdi:lan-connect" + }, + "device_cpu_utilization": { + "default": "mdi:chip" + }, + "device_memory_utilization": { + "default": "mdi:memory" + } + }, + "switch": { + "block_client": { + "default": "mdi:ethernet", + "state": { + "off": "mdi:ethernet-off" + } + }, + "dpi_restriction": { + "default": "mdi:network", + "state": { + "off": "mdi:network-off" + } + }, + "port_forward_control": { + "default": "mdi:upload-network" + }, + "traffic_rule_control": { + "default": "mdi:security-network" + }, + "poe_port_control": { + "default": "mdi:ethernet", + "state": { + "off": "mdi:ethernet-off" + } + }, + "wlan_control": { + "default": "mdi:wifi-check", + "state": { + "off": "mdi:wifi-off" + } + } + } + }, "services": { - "reconnect_client": "mdi:sync", - "remove_clients": "mdi:delete" + "reconnect_client": { + "service": "mdi:sync" + }, + "remove_clients": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/unifi/image.py b/homeassistant/components/unifi/image.py index 426f2ce2884..1f54f56b194 100644 --- a/homeassistant/components/unifi/image.py +++ b/homeassistant/components/unifi/image.py @@ -49,6 +49,7 @@ class UnifiImageEntityDescription( ENTITY_DESCRIPTIONS: tuple[UnifiImageEntityDescription, ...] = ( UnifiImageEntityDescription[Wlans, Wlan]( key="WLAN QR Code", + translation_key="wlan_qr_code", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, api_handler_fn=lambda api: api.wlans, diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 6f92dec5361..ce573592153 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -7,8 +7,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aiounifi"], - "quality_scale": "platinum", - "requirements": ["aiounifi==80"], + "requirements": ["aiounifi==81"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 697df00fe55..74d49db6e4e 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -377,11 +377,11 @@ class UnifiSensorEntityDescription( ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( UnifiSensorEntityDescription[Clients, Client]( key="Bandwidth sensor RX", + translation_key="client_bandwidth_rx", device_class=SensorDeviceClass.DATA_RATE, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfDataRate.MEGABYTES_PER_SECOND, - icon="mdi:upload", allowed_fn=async_bandwidth_sensor_allowed_fn, api_handler_fn=lambda api: api.clients, device_info_fn=async_client_device_info_fn, @@ -394,11 +394,11 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Clients, Client]( key="Bandwidth sensor TX", + translation_key="client_bandwidth_tx", device_class=SensorDeviceClass.DATA_RATE, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfDataRate.MEGABYTES_PER_SECOND, - icon="mdi:download", allowed_fn=async_bandwidth_sensor_allowed_fn, api_handler_fn=lambda api: api.clients, device_info_fn=async_client_device_info_fn, @@ -427,13 +427,13 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Ports, Port]( key="Port Bandwidth sensor RX", + translation_key="port_bandwidth_rx", device_class=SensorDeviceClass.DATA_RATE, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND, - icon="mdi:download", allowed_fn=lambda hub, _: hub.config.option_allow_bandwidth_sensors, api_handler_fn=lambda api: api.ports, available_fn=async_device_available_fn, @@ -445,13 +445,13 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Ports, Port]( key="Port Bandwidth sensor TX", + translation_key="port_bandwidth_tx", device_class=SensorDeviceClass.DATA_RATE, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND, - icon="mdi:upload", allowed_fn=lambda hub, _: hub.config.option_allow_bandwidth_sensors, api_handler_fn=lambda api: api.ports, available_fn=async_device_available_fn, @@ -478,6 +478,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Wlans, Wlan]( key="WLAN clients", + translation_key="wlan_clients", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, api_handler_fn=lambda api: api.wlans, @@ -490,6 +491,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Devices, Device]( key="Device clients", + translation_key="device_clients", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -579,6 +581,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Devices, Device]( key="Device Uplink MAC", + translation_key="device_uplink_mac", entity_category=EntityCategory.DIAGNOSTIC, api_handler_fn=lambda api: api.devices, available_fn=async_device_available_fn, @@ -592,6 +595,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Devices, Device]( key="Device State", + translation_key="device_state", device_class=SensorDeviceClass.ENUM, entity_category=EntityCategory.DIAGNOSTIC, api_handler_fn=lambda api: api.devices, @@ -605,6 +609,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Devices, Device]( key="Device CPU utilization", + translation_key="device_cpu_utilization", entity_category=EntityCategory.DIAGNOSTIC, native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, @@ -619,6 +624,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), UnifiSensorEntityDescription[Devices, Device]( key="Device memory utilization", + translation_key="device_memory_utilization", entity_category=EntityCategory.DIAGNOSTIC, native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, diff --git a/homeassistant/components/unifi/strings.json b/homeassistant/components/unifi/strings.json index ba426c2f08a..1c7317c4267 100644 --- a/homeassistant/components/unifi/strings.json +++ b/homeassistant/components/unifi/strings.json @@ -2,6 +2,11 @@ "config": { "flow_title": "{site} ({host})", "step": { + "site": { + "data": { + "site": "Site ID" + } + }, "user": { "title": "Set up UniFi Network", "data": { diff --git a/homeassistant/components/unifi/switch.py b/homeassistant/components/unifi/switch.py index 93a0c81a24e..01843a8a95b 100644 --- a/homeassistant/components/unifi/switch.py +++ b/homeassistant/components/unifi/switch.py @@ -35,7 +35,7 @@ from aiounifi.models.traffic_rule import TrafficRule, TrafficRuleEnableRequest from aiounifi.models.wlan import Wlan, WlanEnableRequest from homeassistant.components.switch import ( - DOMAIN, + DOMAIN as SWITCH_DOMAIN, SwitchDeviceClass, SwitchEntity, SwitchEntityDescription, @@ -88,7 +88,7 @@ def async_dpi_group_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo: """Create device registry entry for DPI group.""" return DeviceInfo( entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, f"unifi_controller_{obj_id}")}, + identifiers={(SWITCH_DOMAIN, f"unifi_controller_{obj_id}")}, manufacturer=ATTR_MANUFACTURER, model="UniFi Network", name="UniFi Network", @@ -102,7 +102,7 @@ def async_unifi_network_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo assert unique_id is not None return DeviceInfo( entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, unique_id)}, + identifiers={(SWITCH_DOMAIN, unique_id)}, manufacturer=ATTR_MANUFACTURER, model="UniFi Network", name="UniFi Network", @@ -194,9 +194,9 @@ class UnifiSwitchEntityDescription( ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( UnifiSwitchEntityDescription[Clients, Client]( key="Block client", + translation_key="block_client", device_class=SwitchDeviceClass.SWITCH, entity_category=EntityCategory.CONFIG, - icon="mdi:ethernet", allowed_fn=async_block_client_allowed_fn, api_handler_fn=lambda api: api.clients, control_fn=async_block_client_control_fn, @@ -210,9 +210,9 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( ), UnifiSwitchEntityDescription[DPIRestrictionGroups, DPIRestrictionGroup]( key="DPI restriction", + translation_key="dpi_restriction", has_entity_name=False, entity_category=EntityCategory.CONFIG, - icon="mdi:network", allowed_fn=lambda hub, obj_id: hub.config.option_dpi_restrictions, api_handler_fn=lambda api: api.dpi_groups, control_fn=async_dpi_group_control_fn, @@ -239,9 +239,9 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( ), UnifiSwitchEntityDescription[PortForwarding, PortForward]( key="Port forward control", + translation_key="port_forward_control", device_class=SwitchDeviceClass.SWITCH, entity_category=EntityCategory.CONFIG, - icon="mdi:upload-network", api_handler_fn=lambda api: api.port_forwarding, control_fn=async_port_forward_control_fn, device_info_fn=async_unifi_network_device_info_fn, @@ -252,9 +252,9 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( ), UnifiSwitchEntityDescription[TrafficRules, TrafficRule]( key="Traffic rule control", + translation_key="traffic_rule_control", device_class=SwitchDeviceClass.SWITCH, entity_category=EntityCategory.CONFIG, - icon="mdi:security-network", api_handler_fn=lambda api: api.traffic_rules, control_fn=async_traffic_rule_control_fn, device_info_fn=async_unifi_network_device_info_fn, @@ -265,10 +265,10 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( ), UnifiSwitchEntityDescription[Ports, Port]( key="PoE port control", + translation_key="poe_port_control", device_class=SwitchDeviceClass.OUTLET, entity_category=EntityCategory.CONFIG, entity_registry_enabled_default=False, - icon="mdi:ethernet", api_handler_fn=lambda api: api.ports, available_fn=async_device_available_fn, control_fn=async_poe_port_control_fn, @@ -281,9 +281,9 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( ), UnifiSwitchEntityDescription[Wlans, Wlan]( key="WLAN control", + translation_key="wlan_control", device_class=SwitchDeviceClass.SWITCH, entity_category=EntityCategory.CONFIG, - icon="mdi:wifi-check", api_handler_fn=lambda api: api.wlans, control_fn=async_wlan_control_fn, device_info_fn=async_wlan_device_info_fn, @@ -307,12 +307,14 @@ def async_update_unique_id(hass: HomeAssistant, config_entry: UnifiConfigEntry) def update_unique_id(obj_id: str, type_name: str) -> None: """Rework unique ID.""" new_unique_id = f"{type_name}-{obj_id}" - if ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, new_unique_id): + if ent_reg.async_get_entity_id(SWITCH_DOMAIN, UNIFI_DOMAIN, new_unique_id): return prefix, _, suffix = obj_id.partition("_") unique_id = f"{prefix}-{type_name}-{suffix}" - if entity_id := ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, unique_id): + if entity_id := ent_reg.async_get_entity_id( + SWITCH_DOMAIN, UNIFI_DOMAIN, unique_id + ): ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) for obj_id in hub.api.outlets: diff --git a/homeassistant/components/unifi_direct/device_tracker.py b/homeassistant/components/unifi_direct/device_tracker.py index c2cb9eba632..d5e2e926114 100644 --- a/homeassistant/components/unifi_direct/device_tracker.py +++ b/homeassistant/components/unifi_direct/device_tracker.py @@ -9,7 +9,7 @@ from unifi_ap import UniFiAP, UniFiAPConnectionException, UniFiAPDataException import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -34,7 +34,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> UnifiDeviceScanner | None: """Validate the configuration and return a Unifi direct scanner.""" - scanner = UnifiDeviceScanner(config[DOMAIN]) + scanner = UnifiDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.update_clients() else None @@ -67,11 +67,11 @@ class UnifiDeviceScanner(DeviceScanner): """Update the client info from AP.""" try: self.clients = self.ap.get_clients() - except UniFiAPConnectionException: - _LOGGER.error("Failed to connect to accesspoint") + except UniFiAPConnectionException as e: + _LOGGER.error("Failed to connect to accesspoint: %s", str(e)) return False - except UniFiAPDataException: - _LOGGER.error("Failed to get proper response from accesspoint") + except UniFiAPDataException as e: + _LOGGER.error("Failed to get proper response from accesspoint: %s", str(e)) return False return True diff --git a/homeassistant/components/unifi_direct/manifest.json b/homeassistant/components/unifi_direct/manifest.json index 8ca8ef27bb2..aa696985dbe 100644 --- a/homeassistant/components/unifi_direct/manifest.json +++ b/homeassistant/components/unifi_direct/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/unifi_direct", "iot_class": "local_polling", "loggers": ["unifi_ap"], - "requirements": ["unifi_ap==0.0.1"] + "quality_scale": "legacy", + "requirements": ["unifi_ap==0.0.2"] } diff --git a/homeassistant/components/unifiled/manifest.json b/homeassistant/components/unifiled/manifest.json index c75efb2053b..a2179c76fd9 100644 --- a/homeassistant/components/unifiled/manifest.json +++ b/homeassistant/components/unifiled/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/unifiled", "iot_class": "local_polling", "loggers": ["unifiled"], + "quality_scale": "legacy", "requirements": ["unifiled==0.11"] } diff --git a/homeassistant/components/unifiprotect/__init__.py b/homeassistant/components/unifiprotect/__init__.py index 394a7f43329..ed409a6eea0 100644 --- a/homeassistant/components/unifiprotect/__init__.py +++ b/homeassistant/components/unifiprotect/__init__.py @@ -45,7 +45,7 @@ from .utils import ( async_create_api_client, async_get_devices, ) -from .views import ThumbnailProxyView, VideoProxyView +from .views import ThumbnailProxyView, VideoEventProxyView, VideoProxyView _LOGGER = logging.getLogger(__name__) @@ -174,6 +174,7 @@ async def _async_setup_entry( await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) hass.http.register_view(ThumbnailProxyView(hass)) hass.http.register_view(VideoProxyView(hass)) + hass.http.register_view(VideoEventProxyView(hass)) async def _async_options_updated(hass: HomeAssistant, entry: UFPConfigEntry) -> None: diff --git a/homeassistant/components/unifiprotect/binary_sensor.py b/homeassistant/components/unifiprotect/binary_sensor.py index 82b2deeae56..a88d4b65678 100644 --- a/homeassistant/components/unifiprotect/binary_sensor.py +++ b/homeassistant/components/unifiprotect/binary_sensor.py @@ -29,12 +29,14 @@ from .data import ProtectData, ProtectDeviceType, UFPConfigEntry from .entity import ( BaseProtectEntity, EventEntityMixin, + PermRequired, ProtectDeviceEntity, + ProtectEntityDescription, + ProtectEventMixin, ProtectIsOnEntity, ProtectNVREntity, async_all_device_entities, ) -from .models import PermRequired, ProtectEntityDescription, ProtectEventMixin _KEY_DOOR = "door" diff --git a/homeassistant/components/unifiprotect/button.py b/homeassistant/components/unifiprotect/button.py index 79985b9c7b2..b24c90be3ec 100644 --- a/homeassistant/components/unifiprotect/button.py +++ b/homeassistant/components/unifiprotect/button.py @@ -23,8 +23,14 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICES_THAT_ADOPT, DOMAIN from .data import ProtectDeviceType, UFPConfigEntry -from .entity import ProtectDeviceEntity, async_all_device_entities -from .models import PermRequired, ProtectEntityDescription, ProtectSetableKeysMixin, T +from .entity import ( + PermRequired, + ProtectDeviceEntity, + ProtectEntityDescription, + ProtectSetableKeysMixin, + T, + async_all_device_entities, +) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/unifiprotect/camera.py b/homeassistant/components/unifiprotect/camera.py index 62c35d00171..0b1c03b8dd6 100644 --- a/homeassistant/components/unifiprotect/camera.py +++ b/homeassistant/components/unifiprotect/camera.py @@ -90,7 +90,7 @@ def _get_camera_channels( is_default = False # no RTSP enabled use first channel with no stream - if is_default: + if is_default and not camera.is_third_party_camera: _create_rtsp_repair(hass, entry, data, camera) yield camera, camera.channels[0], True else: @@ -156,7 +156,8 @@ async def async_setup_entry( async_add_entities(_async_camera_entities(hass, entry, data)) -_EMPTY_CAMERA_FEATURES = CameraEntityFeature(0) +_DISABLE_FEATURE = CameraEntityFeature(0) +_ENABLE_FEATURE = CameraEntityFeature.STREAM class ProtectCamera(ProtectDeviceEntity, Camera): @@ -195,24 +196,22 @@ class ProtectCamera(ProtectDeviceEntity, Camera): self._attr_name = f"{camera_name} (insecure)" # only the default (first) channel is enabled by default self._attr_entity_registry_enabled_default = is_default and secure + # Set the stream source before finishing the init + # because async_added_to_hass is too late and camera + # integration uses async_internal_added_to_hass to access + # the stream source which is called before async_added_to_hass + self._async_set_stream_source() @callback def _async_set_stream_source(self) -> None: - disable_stream = self._disable_stream channel = self.channel - - if not channel.is_rtsp_enabled: - disable_stream = False - - rtsp_url = channel.rtsps_url if self._secure else channel.rtsp_url - - # _async_set_stream_source called by __init__ - # pylint: disable-next=attribute-defined-outside-init - self._stream_source = None if disable_stream else rtsp_url - if self._stream_source: - self._attr_supported_features = CameraEntityFeature.STREAM - else: - self._attr_supported_features = _EMPTY_CAMERA_FEATURES + enable_stream = not self._disable_stream and channel.is_rtsp_enabled + # SRTP disabled because go2rtc does not support it + # https://github.com/AlexxIT/go2rtc/#source-rtsp + rtsp_url = channel.rtsps_no_srtp_url if self._secure else channel.rtsp_url + source = rtsp_url if enable_stream else None + self._attr_supported_features = _ENABLE_FEATURE if source else _DISABLE_FEATURE + self._stream_source = source @callback def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None: diff --git a/homeassistant/components/unifiprotect/config_flow.py b/homeassistant/components/unifiprotect/config_flow.py index 284b7003485..31950f8f7e4 100644 --- a/homeassistant/components/unifiprotect/config_flow.py +++ b/homeassistant/components/unifiprotect/config_flow.py @@ -104,7 +104,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Init the config flow.""" super().__init__() - self.entry: ConfigEntry | None = None self._discovered_device: dict[str, str] = {} async def async_step_dhcp( @@ -226,7 +225,7 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() @callback def _async_create_entry(self, title: str, data: dict[str, Any]) -> ConfigFlowResult: @@ -295,8 +294,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -304,21 +301,21 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm reauth.""" errors: dict[str, str] = {} - assert self.entry is not None # prepopulate fields - form_data = {**self.entry.data} + reauth_entry = self._get_reauth_entry() + form_data = {**reauth_entry.data} if user_input is not None: form_data.update(user_input) # validate login data _, errors = await self._async_get_nvr_data(form_data) if not errors: - return self.async_update_reload_and_abort(self.entry, data=form_data) + return self.async_update_reload_and_abort(reauth_entry, data=form_data) self.context["title_placeholders"] = { - "name": self.entry.title, - "ip_address": self.entry.data[CONF_HOST], + "name": reauth_entry.title, + "ip_address": reauth_entry.data[CONF_HOST], } return self.async_show_form( step_id="reauth_confirm", @@ -379,10 +376,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/unifiprotect/const.py b/homeassistant/components/unifiprotect/const.py index ad251ba6153..7d1e5b55d3f 100644 --- a/homeassistant/components/unifiprotect/const.py +++ b/homeassistant/components/unifiprotect/const.py @@ -1,5 +1,7 @@ """Constant definitions for UniFi Protect Integration.""" +from typing import Final + from uiprotect.data import ModelType, Version from homeassistant.const import Platform @@ -75,3 +77,8 @@ PLATFORMS = [ DISPATCH_ADD = "add_device" DISPATCH_ADOPT = "adopt_device" DISPATCH_CHANNELS = "new_camera_channels" + +EVENT_TYPE_FINGERPRINT_IDENTIFIED: Final = "identified" +EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED: Final = "not_identified" +EVENT_TYPE_NFC_SCANNED: Final = "scanned" +EVENT_TYPE_DOORBELL_RING: Final = "ring" diff --git a/homeassistant/components/unifiprotect/data.py b/homeassistant/components/unifiprotect/data.py index b8e47e0e0f1..baecc7f8323 100644 --- a/homeassistant/components/unifiprotect/data.py +++ b/homeassistant/components/unifiprotect/data.py @@ -164,7 +164,7 @@ class ProtectData: self._auth_failures = 0 if not was_success: - _LOGGER.info("%s: Connection restored", self._entry.title) + _LOGGER.warning("%s: Connection restored", self._entry.title) self._async_process_updates() elif force_update: self._async_process_updates() @@ -349,6 +349,7 @@ def async_ufp_instance_for_config_entry_ids( entry.runtime_data.api for entry_id in config_entry_ids if (entry := hass.config_entries.async_get_entry(entry_id)) + and entry.domain == DOMAIN and hasattr(entry, "runtime_data") ), None, diff --git a/homeassistant/components/unifiprotect/entity.py b/homeassistant/components/unifiprotect/entity.py index 17b9f7c4fe9..1d68b18f1de 100644 --- a/homeassistant/components/unifiprotect/entity.py +++ b/homeassistant/components/unifiprotect/entity.py @@ -2,14 +2,24 @@ from __future__ import annotations -from collections.abc import Callable, Sequence +from collections.abc import Callable, Coroutine, Sequence +from dataclasses import dataclass from datetime import datetime +from enum import Enum from functools import partial import logging from operator import attrgetter -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Generic, TypeVar -from uiprotect.data import NVR, Event, ModelType, ProtectAdoptableDeviceModel, StateType +from uiprotect import make_enabled_getter, make_required_getter, make_value_getter +from uiprotect.data import ( + NVR, + Event, + ModelType, + ProtectAdoptableDeviceModel, + SmartDetectObjectType, + StateType, +) from homeassistant.core import callback import homeassistant.helpers.device_registry as dr @@ -24,10 +34,19 @@ from .const import ( DOMAIN, ) from .data import ProtectData, ProtectDeviceType -from .models import PermRequired, ProtectEntityDescription, ProtectEventMixin _LOGGER = logging.getLogger(__name__) +T = TypeVar("T", bound=ProtectAdoptableDeviceModel | NVR) + + +class PermRequired(int, Enum): + """Type of permission level required for entity.""" + + NO_WRITE = 1 + WRITE = 2 + DELETE = 3 + @callback def _async_device_entities( @@ -278,7 +297,8 @@ class ProtectDeviceEntity(BaseProtectEntity): self._attr_device_info = DeviceInfo( name=self.device.display_name, manufacturer=DEFAULT_BRAND, - model=self.device.type, + model=self.device.market_name or self.device.type, + model_id=self.device.type, via_device=(DOMAIN, self.data.api.bootstrap.nvr.mac), sw_version=self.device.firmware_version, connections={(dr.CONNECTION_NETWORK_MAC, self.device.mac)}, @@ -351,3 +371,82 @@ class EventEntityMixin(ProtectDeviceEntity): and prev_event_end and prev_event.id == event.id ) + + +@dataclass(frozen=True, kw_only=True) +class ProtectEntityDescription(EntityDescription, Generic[T]): + """Base class for protect entity descriptions.""" + + ufp_required_field: str | None = None + ufp_value: str | None = None + ufp_value_fn: Callable[[T], Any] | None = None + ufp_enabled: str | None = None + ufp_perm: PermRequired | None = None + + # The below are set in __post_init__ + has_required: Callable[[T], bool] = bool + get_ufp_enabled: Callable[[T], bool] | None = None + + def get_ufp_value(self, obj: T) -> Any: + """Return value from UniFi Protect device; overridden in __post_init__.""" + # ufp_value or ufp_value_fn are required, the + # RuntimeError is to catch any issues in the code + # with new descriptions. + raise RuntimeError( # pragma: no cover + f"`ufp_value` or `ufp_value_fn` is required for {self}" + ) + + def __post_init__(self) -> None: + """Override get_ufp_value, has_required, and get_ufp_enabled if required.""" + _setter = partial(object.__setattr__, self) + + if (ufp_value := self.ufp_value) is not None: + _setter("get_ufp_value", make_value_getter(ufp_value)) + elif (ufp_value_fn := self.ufp_value_fn) is not None: + _setter("get_ufp_value", ufp_value_fn) + + if (ufp_enabled := self.ufp_enabled) is not None: + _setter("get_ufp_enabled", make_enabled_getter(ufp_enabled)) + + if (ufp_required_field := self.ufp_required_field) is not None: + _setter("has_required", make_required_getter(ufp_required_field)) + + +@dataclass(frozen=True, kw_only=True) +class ProtectEventMixin(ProtectEntityDescription[T]): + """Mixin for events.""" + + ufp_event_obj: str | None = None + ufp_obj_type: SmartDetectObjectType | None = None + + def get_event_obj(self, obj: T) -> Event | None: + """Return value from UniFi Protect device.""" + return None + + def has_matching_smart(self, event: Event) -> bool: + """Determine if the detection type is a match.""" + return ( + not (obj_type := self.ufp_obj_type) or obj_type in event.smart_detect_types + ) + + def __post_init__(self) -> None: + """Override get_event_obj if ufp_event_obj is set.""" + if (_ufp_event_obj := self.ufp_event_obj) is not None: + object.__setattr__(self, "get_event_obj", attrgetter(_ufp_event_obj)) + super().__post_init__() + + +@dataclass(frozen=True, kw_only=True) +class ProtectSetableKeysMixin(ProtectEntityDescription[T]): + """Mixin for settable values.""" + + ufp_set_method: str | None = None + ufp_set_method_fn: Callable[[T, Any], Coroutine[Any, Any, None]] | None = None + + async def ufp_set(self, obj: T, value: Any) -> None: + """Set value for UniFi Protect device.""" + _LOGGER.debug("Setting %s to %s for %s", self.name, value, obj.display_name) + if self.ufp_set_method is not None: + await getattr(obj, self.ufp_set_method)(value) + elif self.ufp_set_method_fn is not None: + await self.ufp_set_method_fn(obj, value) diff --git a/homeassistant/components/unifiprotect/event.py b/homeassistant/components/unifiprotect/event.py index c8269e36326..f126920fb18 100644 --- a/homeassistant/components/unifiprotect/event.py +++ b/homeassistant/components/unifiprotect/event.py @@ -14,32 +14,25 @@ from homeassistant.components.event import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ATTR_EVENT_ID +from .const import ( + ATTR_EVENT_ID, + EVENT_TYPE_DOORBELL_RING, + EVENT_TYPE_FINGERPRINT_IDENTIFIED, + EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED, + EVENT_TYPE_NFC_SCANNED, +) from .data import ProtectData, ProtectDeviceType, UFPConfigEntry -from .entity import EventEntityMixin, ProtectDeviceEntity -from .models import ProtectEventMixin +from .entity import EventEntityMixin, ProtectDeviceEntity, ProtectEventMixin @dataclasses.dataclass(frozen=True, kw_only=True) class ProtectEventEntityDescription(ProtectEventMixin, EventEntityDescription): """Describes UniFi Protect event entity.""" - -EVENT_DESCRIPTIONS: tuple[ProtectEventEntityDescription, ...] = ( - ProtectEventEntityDescription( - key="doorbell", - translation_key="doorbell", - name="Doorbell", - device_class=EventDeviceClass.DOORBELL, - icon="mdi:doorbell-video", - ufp_required_field="feature_flags.is_doorbell", - ufp_event_obj="last_ring_event", - event_types=[EventType.RING], - ), -) + entity_class: type[ProtectDeviceEntity] -class ProtectDeviceEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntity): +class ProtectDeviceRingEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntity): """A UniFi Protect event entity.""" entity_description: ProtectEventEntityDescription @@ -58,26 +51,128 @@ class ProtectDeviceEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntit if ( event and not self._event_already_ended(prev_event, prev_event_end) - and (event_types := description.event_types) - and (event_type := event.type) in event_types + and event.type is EventType.RING ): - self._trigger_event(event_type, {ATTR_EVENT_ID: event.id}) + self._trigger_event(EVENT_TYPE_DOORBELL_RING, {ATTR_EVENT_ID: event.id}) self.async_write_ha_state() +class ProtectDeviceNFCEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntity): + """A UniFi Protect NFC event entity.""" + + entity_description: ProtectEventEntityDescription + + @callback + def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None: + description = self.entity_description + + prev_event = self._event + prev_event_end = self._event_end + super()._async_update_device_from_protect(device) + if event := description.get_event_obj(device): + self._event = event + self._event_end = event.end if event else None + + if ( + event + and not self._event_already_ended(prev_event, prev_event_end) + and event.type is EventType.NFC_CARD_SCANNED + ): + event_data = {ATTR_EVENT_ID: event.id} + if event.metadata and event.metadata.nfc and event.metadata.nfc.nfc_id: + event_data["nfc_id"] = event.metadata.nfc.nfc_id + + self._trigger_event(EVENT_TYPE_NFC_SCANNED, event_data) + self.async_write_ha_state() + + +class ProtectDeviceFingerprintEventEntity( + EventEntityMixin, ProtectDeviceEntity, EventEntity +): + """A UniFi Protect fingerprint event entity.""" + + entity_description: ProtectEventEntityDescription + + @callback + def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None: + description = self.entity_description + + prev_event = self._event + prev_event_end = self._event_end + super()._async_update_device_from_protect(device) + if event := description.get_event_obj(device): + self._event = event + self._event_end = event.end if event else None + + if ( + event + and not self._event_already_ended(prev_event, prev_event_end) + and event.type is EventType.FINGERPRINT_IDENTIFIED + ): + event_data = {ATTR_EVENT_ID: event.id} + if ( + event.metadata + and event.metadata.fingerprint + and event.metadata.fingerprint.ulp_id + ): + event_data["ulp_id"] = event.metadata.fingerprint.ulp_id + event_identified = EVENT_TYPE_FINGERPRINT_IDENTIFIED + else: + event_data["ulp_id"] = "" + event_identified = EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED + + self._trigger_event(event_identified, event_data) + self.async_write_ha_state() + + +EVENT_DESCRIPTIONS: tuple[ProtectEventEntityDescription, ...] = ( + ProtectEventEntityDescription( + key="doorbell", + translation_key="doorbell", + device_class=EventDeviceClass.DOORBELL, + icon="mdi:doorbell-video", + ufp_required_field="feature_flags.is_doorbell", + ufp_event_obj="last_ring_event", + event_types=[EVENT_TYPE_DOORBELL_RING], + entity_class=ProtectDeviceRingEventEntity, + ), + ProtectEventEntityDescription( + key="nfc", + translation_key="nfc", + device_class=EventDeviceClass.DOORBELL, + icon="mdi:nfc", + ufp_required_field="feature_flags.support_nfc", + ufp_event_obj="last_nfc_card_scanned_event", + event_types=[EVENT_TYPE_NFC_SCANNED], + entity_class=ProtectDeviceNFCEventEntity, + ), + ProtectEventEntityDescription( + key="fingerprint", + translation_key="fingerprint", + device_class=EventDeviceClass.DOORBELL, + icon="mdi:fingerprint", + ufp_required_field="feature_flags.has_fingerprint_sensor", + ufp_event_obj="last_fingerprint_identified_event", + event_types=[ + EVENT_TYPE_FINGERPRINT_IDENTIFIED, + EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED, + ], + entity_class=ProtectDeviceFingerprintEventEntity, + ), +) + + @callback def _async_event_entities( data: ProtectData, ufp_device: ProtectAdoptableDeviceModel | None = None, ) -> list[ProtectDeviceEntity]: - entities: list[ProtectDeviceEntity] = [] - for device in data.get_cameras() if ufp_device is None else [ufp_device]: - entities.extend( - ProtectDeviceEventEntity(data, device, description) - for description in EVENT_DESCRIPTIONS - if description.has_required(device) - ) - return entities + return [ + description.entity_class(data, device, description) + for device in (data.get_cameras() if ufp_device is None else [ufp_device]) + for description in EVENT_DESCRIPTIONS + if description.has_required(device) + ] async def async_setup_entry( diff --git a/homeassistant/components/unifiprotect/icons.json b/homeassistant/components/unifiprotect/icons.json index bb713d4ee79..5e80e3095b3 100644 --- a/homeassistant/components/unifiprotect/icons.json +++ b/homeassistant/components/unifiprotect/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_doorbell_text": "mdi:message-plus", - "remove_doorbell_text": "mdi:message-minus", - "set_chime_paired_doorbells": "mdi:bell-cog", - "remove_privacy_zone": "mdi:eye-minus" + "add_doorbell_text": { + "service": "mdi:message-plus" + }, + "remove_doorbell_text": { + "service": "mdi:message-minus" + }, + "set_chime_paired_doorbells": { + "service": "mdi:bell-cog" + }, + "remove_privacy_zone": { + "service": "mdi:eye-minus" + } } } diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 4483a5990eb..81ef72ec50d 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -1,7 +1,7 @@ { "domain": "unifiprotect", "name": "UniFi Protect", - "codeowners": [], + "codeowners": ["@RaHehl"], "config_flow": true, "dependencies": ["http", "repairs"], "dhcp": [ @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.0.2", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==7.0.2", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/media_source.py b/homeassistant/components/unifiprotect/media_source.py index a646c037d62..1e36b59d641 100644 --- a/homeassistant/components/unifiprotect/media_source.py +++ b/homeassistant/components/unifiprotect/media_source.py @@ -14,7 +14,7 @@ from yarl import URL from homeassistant.components.camera import CameraImageView from homeassistant.components.media_player import BrowseError, MediaClass -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, diff --git a/homeassistant/components/unifiprotect/models.py b/homeassistant/components/unifiprotect/models.py deleted file mode 100644 index 23106a4e5d7..00000000000 --- a/homeassistant/components/unifiprotect/models.py +++ /dev/null @@ -1,112 +0,0 @@ -"""The unifiprotect integration models.""" - -from __future__ import annotations - -from collections.abc import Callable, Coroutine -from dataclasses import dataclass -from enum import Enum -from functools import partial -import logging -from operator import attrgetter -from typing import Any, Generic, TypeVar - -from uiprotect import make_enabled_getter, make_required_getter, make_value_getter -from uiprotect.data import ( - NVR, - Event, - ProtectAdoptableDeviceModel, - SmartDetectObjectType, -) - -from homeassistant.helpers.entity import EntityDescription - -_LOGGER = logging.getLogger(__name__) - -T = TypeVar("T", bound=ProtectAdoptableDeviceModel | NVR) - - -class PermRequired(int, Enum): - """Type of permission level required for entity.""" - - NO_WRITE = 1 - WRITE = 2 - DELETE = 3 - - -@dataclass(frozen=True, kw_only=True) -class ProtectEntityDescription(EntityDescription, Generic[T]): - """Base class for protect entity descriptions.""" - - ufp_required_field: str | None = None - ufp_value: str | None = None - ufp_value_fn: Callable[[T], Any] | None = None - ufp_enabled: str | None = None - ufp_perm: PermRequired | None = None - - # The below are set in __post_init__ - has_required: Callable[[T], bool] = bool - get_ufp_enabled: Callable[[T], bool] | None = None - - def get_ufp_value(self, obj: T) -> Any: - """Return value from UniFi Protect device; overridden in __post_init__.""" - # ufp_value or ufp_value_fn are required, the - # RuntimeError is to catch any issues in the code - # with new descriptions. - raise RuntimeError( # pragma: no cover - f"`ufp_value` or `ufp_value_fn` is required for {self}" - ) - - def __post_init__(self) -> None: - """Override get_ufp_value, has_required, and get_ufp_enabled if required.""" - _setter = partial(object.__setattr__, self) - - if (ufp_value := self.ufp_value) is not None: - _setter("get_ufp_value", make_value_getter(ufp_value)) - elif (ufp_value_fn := self.ufp_value_fn) is not None: - _setter("get_ufp_value", ufp_value_fn) - - if (ufp_enabled := self.ufp_enabled) is not None: - _setter("get_ufp_enabled", make_enabled_getter(ufp_enabled)) - - if (ufp_required_field := self.ufp_required_field) is not None: - _setter("has_required", make_required_getter(ufp_required_field)) - - -@dataclass(frozen=True, kw_only=True) -class ProtectEventMixin(ProtectEntityDescription[T]): - """Mixin for events.""" - - ufp_event_obj: str | None = None - ufp_obj_type: SmartDetectObjectType | None = None - - def get_event_obj(self, obj: T) -> Event | None: - """Return value from UniFi Protect device.""" - return None - - def has_matching_smart(self, event: Event) -> bool: - """Determine if the detection type is a match.""" - return ( - not (obj_type := self.ufp_obj_type) or obj_type in event.smart_detect_types - ) - - def __post_init__(self) -> None: - """Override get_event_obj if ufp_event_obj is set.""" - if (_ufp_event_obj := self.ufp_event_obj) is not None: - object.__setattr__(self, "get_event_obj", attrgetter(_ufp_event_obj)) - super().__post_init__() - - -@dataclass(frozen=True, kw_only=True) -class ProtectSetableKeysMixin(ProtectEntityDescription[T]): - """Mixin for settable values.""" - - ufp_set_method: str | None = None - ufp_set_method_fn: Callable[[T, Any], Coroutine[Any, Any, None]] | None = None - - async def ufp_set(self, obj: T, value: Any) -> None: - """Set value for UniFi Protect device.""" - _LOGGER.debug("Setting %s to %s for %s", self.name, value, obj.display_name) - if self.ufp_set_method is not None: - await getattr(obj, self.ufp_set_method)(value) - elif self.ufp_set_method_fn is not None: - await self.ufp_set_method_fn(obj, value) diff --git a/homeassistant/components/unifiprotect/number.py b/homeassistant/components/unifiprotect/number.py index 2de3ef9f2cd..767128337ba 100644 --- a/homeassistant/components/unifiprotect/number.py +++ b/homeassistant/components/unifiprotect/number.py @@ -20,8 +20,14 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from .data import ProtectData, ProtectDeviceType, UFPConfigEntry -from .entity import ProtectDeviceEntity, async_all_device_entities -from .models import PermRequired, ProtectEntityDescription, ProtectSetableKeysMixin, T +from .entity import ( + PermRequired, + ProtectDeviceEntity, + ProtectEntityDescription, + ProtectSetableKeysMixin, + T, + async_all_device_entities, +) @dataclass(frozen=True, kw_only=True) @@ -118,7 +124,7 @@ CAMERA_NUMBERS: tuple[ProtectNumberEntityDescription, ...] = ( name="Infrared custom lux trigger", icon="mdi:white-balance-sunny", entity_category=EntityCategory.CONFIG, - ufp_min=1, + ufp_min=0, ufp_max=30, ufp_step=1, ufp_required_field="feature_flags.has_led_ir", diff --git a/homeassistant/components/unifiprotect/select.py b/homeassistant/components/unifiprotect/select.py index e06ae7bfbec..00c277c957e 100644 --- a/homeassistant/components/unifiprotect/select.py +++ b/homeassistant/components/unifiprotect/select.py @@ -33,8 +33,14 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import TYPE_EMPTY_VALUE from .data import ProtectData, ProtectDeviceType, UFPConfigEntry -from .entity import ProtectDeviceEntity, async_all_device_entities -from .models import PermRequired, ProtectEntityDescription, ProtectSetableKeysMixin, T +from .entity import ( + PermRequired, + ProtectDeviceEntity, + ProtectEntityDescription, + ProtectSetableKeysMixin, + T, + async_all_device_entities, +) from .utils import async_get_light_motion_current _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/unifiprotect/sensor.py b/homeassistant/components/unifiprotect/sensor.py index 786c5bd66c8..09187e023a1 100644 --- a/homeassistant/components/unifiprotect/sensor.py +++ b/homeassistant/components/unifiprotect/sensor.py @@ -44,11 +44,14 @@ from .data import ProtectData, ProtectDeviceType, UFPConfigEntry from .entity import ( BaseProtectEntity, EventEntityMixin, + PermRequired, ProtectDeviceEntity, + ProtectEntityDescription, + ProtectEventMixin, ProtectNVREntity, + T, async_all_device_entities, ) -from .models import PermRequired, ProtectEntityDescription, ProtectEventMixin, T from .utils import async_get_light_motion_current _LOGGER = logging.getLogger(__name__) @@ -242,7 +245,7 @@ CAMERA_SENSORS: tuple[ProtectSensorEntityDescription, ...] = ( name="Recording mode", icon="mdi:video-outline", entity_category=EntityCategory.DIAGNOSTIC, - ufp_value="recording_settings.mode", + ufp_value="recording_settings.mode.value", ufp_perm=PermRequired.NO_WRITE, ), ProtectSensorEntityDescription( @@ -251,7 +254,7 @@ CAMERA_SENSORS: tuple[ProtectSensorEntityDescription, ...] = ( icon="mdi:circle-opacity", entity_category=EntityCategory.DIAGNOSTIC, ufp_required_field="feature_flags.has_led_ir", - ufp_value="isp_settings.ir_led_mode", + ufp_value="isp_settings.ir_led_mode.value", ufp_perm=PermRequired.NO_WRITE, ), ProtectSensorEntityDescription( diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 119fe52756c..35713efdf3d 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import functools from typing import Any, cast from pydantic import ValidationError @@ -88,9 +87,9 @@ def _async_get_ufp_instance(hass: HomeAssistant, device_id: str) -> ProtectApiCl @callback -def _async_get_ufp_camera(hass: HomeAssistant, call: ServiceCall) -> Camera: - ref = async_extract_referenced_entity_ids(hass, call) - entity_registry = er.async_get(hass) +def _async_get_ufp_camera(call: ServiceCall) -> Camera: + ref = async_extract_referenced_entity_ids(call.hass, call) + entity_registry = er.async_get(call.hass) entity_id = ref.indirectly_referenced.pop() camera_entity = entity_registry.async_get(entity_id) @@ -98,30 +97,27 @@ def _async_get_ufp_camera(hass: HomeAssistant, call: ServiceCall) -> Camera: assert camera_entity.device_id is not None camera_mac = _async_unique_id_to_mac(camera_entity.unique_id) - instance = _async_get_ufp_instance(hass, camera_entity.device_id) + instance = _async_get_ufp_instance(call.hass, camera_entity.device_id) return cast(Camera, instance.bootstrap.get_device_from_mac(camera_mac)) @callback -def _async_get_protect_from_call( - hass: HomeAssistant, call: ServiceCall -) -> set[ProtectApiClient]: +def _async_get_protect_from_call(call: ServiceCall) -> set[ProtectApiClient]: return { - _async_get_ufp_instance(hass, device_id) + _async_get_ufp_instance(call.hass, device_id) for device_id in async_extract_referenced_entity_ids( - hass, call + call.hass, call ).referenced_devices } async def _async_service_call_nvr( - hass: HomeAssistant, call: ServiceCall, method: str, *args: Any, **kwargs: Any, ) -> None: - instances = _async_get_protect_from_call(hass, call) + instances = _async_get_protect_from_call(call) try: await asyncio.gather( *(getattr(i.bootstrap.nvr, method)(*args, **kwargs) for i in instances) @@ -130,23 +126,23 @@ async def _async_service_call_nvr( raise HomeAssistantError(str(err)) from err -async def add_doorbell_text(hass: HomeAssistant, call: ServiceCall) -> None: +async def add_doorbell_text(call: ServiceCall) -> None: """Add a custom doorbell text message.""" message: str = call.data[ATTR_MESSAGE] - await _async_service_call_nvr(hass, call, "add_custom_doorbell_message", message) + await _async_service_call_nvr(call, "add_custom_doorbell_message", message) -async def remove_doorbell_text(hass: HomeAssistant, call: ServiceCall) -> None: +async def remove_doorbell_text(call: ServiceCall) -> None: """Remove a custom doorbell text message.""" message: str = call.data[ATTR_MESSAGE] - await _async_service_call_nvr(hass, call, "remove_custom_doorbell_message", message) + await _async_service_call_nvr(call, "remove_custom_doorbell_message", message) -async def remove_privacy_zone(hass: HomeAssistant, call: ServiceCall) -> None: +async def remove_privacy_zone(call: ServiceCall) -> None: """Remove privacy zone from camera.""" name: str = call.data[ATTR_NAME] - camera = _async_get_ufp_camera(hass, call) + camera = _async_get_ufp_camera(call) remove_index: int | None = None for index, zone in enumerate(camera.privacy_zones): @@ -171,10 +167,10 @@ def _async_unique_id_to_mac(unique_id: str) -> str: return unique_id.split("_")[0] -async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> None: +async def set_chime_paired_doorbells(call: ServiceCall) -> None: """Set paired doorbells on chime.""" - ref = async_extract_referenced_entity_ids(hass, call) - entity_registry = er.async_get(hass) + ref = async_extract_referenced_entity_ids(call.hass, call) + entity_registry = er.async_get(call.hass) entity_id = ref.indirectly_referenced.pop() chime_button = entity_registry.async_get(entity_id) @@ -182,13 +178,13 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> assert chime_button.device_id is not None chime_mac = _async_unique_id_to_mac(chime_button.unique_id) - instance = _async_get_ufp_instance(hass, chime_button.device_id) + instance = _async_get_ufp_instance(call.hass, chime_button.device_id) chime = instance.bootstrap.get_device_from_mac(chime_mac) chime = cast(Chime, chime) assert chime is not None call.data = ReadOnlyDict(call.data.get("doorbells") or {}) - doorbell_refs = async_extract_referenced_entity_ids(hass, call) + doorbell_refs = async_extract_referenced_entity_ids(call.hass, call) doorbell_ids: set[str] = set() for camera_id in doorbell_refs.referenced | doorbell_refs.indirectly_referenced: doorbell_sensor = entity_registry.async_get(camera_id) @@ -209,31 +205,32 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> await chime.save_device(data_before_changed) +SERVICES = [ + ( + SERVICE_ADD_DOORBELL_TEXT, + add_doorbell_text, + DOORBELL_TEXT_SCHEMA, + ), + ( + SERVICE_REMOVE_DOORBELL_TEXT, + remove_doorbell_text, + DOORBELL_TEXT_SCHEMA, + ), + ( + SERVICE_SET_CHIME_PAIRED, + set_chime_paired_doorbells, + CHIME_PAIRED_SCHEMA, + ), + ( + SERVICE_REMOVE_PRIVACY_ZONE, + remove_privacy_zone, + REMOVE_PRIVACY_ZONE_SCHEMA, + ), +] + + def async_setup_services(hass: HomeAssistant) -> None: """Set up the global UniFi Protect services.""" - services = [ - ( - SERVICE_ADD_DOORBELL_TEXT, - functools.partial(add_doorbell_text, hass), - DOORBELL_TEXT_SCHEMA, - ), - ( - SERVICE_REMOVE_DOORBELL_TEXT, - functools.partial(remove_doorbell_text, hass), - DOORBELL_TEXT_SCHEMA, - ), - ( - SERVICE_SET_CHIME_PAIRED, - functools.partial(set_chime_paired_doorbells, hass), - CHIME_PAIRED_SCHEMA, - ), - ( - SERVICE_REMOVE_PRIVACY_ZONE, - functools.partial(remove_privacy_zone, hass), - REMOVE_PRIVACY_ZONE_SCHEMA, - ), - ] - for name, method, schema in services: - if hass.services.has_service(DOMAIN, name): - continue + + for name, method, schema in SERVICES: hass.services.async_register(DOMAIN, name, method, schema=schema) diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index aaef111a351..8ecb4076409 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -42,7 +42,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "discovery_started": "Discovery started" + "discovery_started": "Discovery started", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { @@ -136,6 +137,7 @@ }, "event": { "doorbell": { + "name": "Doorbell", "state_attributes": { "event_type": { "state": { @@ -143,6 +145,27 @@ } } } + }, + "nfc": { + "name": "NFC", + "state_attributes": { + "event_type": { + "state": { + "scanned": "Scanned" + } + } + } + }, + "fingerprint": { + "name": "Fingerprint", + "state_attributes": { + "event_type": { + "state": { + "identified": "Identified", + "not_identified": "Not identified" + } + } + } } } }, @@ -181,7 +204,7 @@ "fields": { "device_id": { "name": "Chime", - "description": "The chimes to link to the doorbells to." + "description": "The chimes to link to the doorbells." }, "doorbells": { "name": "Doorbells", diff --git a/homeassistant/components/unifiprotect/switch.py b/homeassistant/components/unifiprotect/switch.py index 9e1e0fa35d0..fa960261cf2 100644 --- a/homeassistant/components/unifiprotect/switch.py +++ b/homeassistant/components/unifiprotect/switch.py @@ -24,12 +24,15 @@ from homeassistant.helpers.restore_state import RestoreEntity from .data import ProtectData, ProtectDeviceType, UFPConfigEntry from .entity import ( BaseProtectEntity, + PermRequired, ProtectDeviceEntity, + ProtectEntityDescription, ProtectIsOnEntity, ProtectNVREntity, + ProtectSetableKeysMixin, + T, async_all_device_entities, ) -from .models import PermRequired, ProtectEntityDescription, ProtectSetableKeysMixin, T ATTR_PREV_MIC = "prev_mic_level" ATTR_PREV_RECORD = "prev_record_mode" diff --git a/homeassistant/components/unifiprotect/text.py b/homeassistant/components/unifiprotect/text.py index 9af946a7e11..0c7e1322f23 100644 --- a/homeassistant/components/unifiprotect/text.py +++ b/homeassistant/components/unifiprotect/text.py @@ -18,8 +18,14 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from .data import ProtectDeviceType, UFPConfigEntry -from .entity import ProtectDeviceEntity, async_all_device_entities -from .models import PermRequired, ProtectEntityDescription, ProtectSetableKeysMixin, T +from .entity import ( + PermRequired, + ProtectDeviceEntity, + ProtectEntityDescription, + ProtectSetableKeysMixin, + T, + async_all_device_entities, +) @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/unifiprotect/views.py b/homeassistant/components/unifiprotect/views.py index 00128492c67..9bf6ed024f5 100644 --- a/homeassistant/components/unifiprotect/views.py +++ b/homeassistant/components/unifiprotect/views.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import datetime from http import HTTPStatus import logging -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlencode from aiohttp import web @@ -30,7 +30,9 @@ def async_generate_thumbnail_url( ) -> str: """Generate URL for event thumbnail.""" - url_format = ThumbnailProxyView.url or "{nvr_id}/{event_id}" + url_format = ThumbnailProxyView.url + if TYPE_CHECKING: + assert url_format is not None url = url_format.format(nvr_id=nvr_id, event_id=event_id) params = {} @@ -50,7 +52,9 @@ def async_generate_event_video_url(event: Event) -> str: if event.start is None or event.end is None: raise ValueError("Event is ongoing") - url_format = VideoProxyView.url or "{nvr_id}/{camera_id}/{start}/{end}" + url_format = VideoProxyView.url + if TYPE_CHECKING: + assert url_format is not None return url_format.format( nvr_id=event.api.bootstrap.nvr.id, camera_id=event.camera_id, @@ -59,6 +63,19 @@ def async_generate_event_video_url(event: Event) -> str: ) +@callback +def async_generate_proxy_event_video_url( + nvr_id: str, + event_id: str, +) -> str: + """Generate proxy URL for event video.""" + + url_format = VideoEventProxyView.url + if TYPE_CHECKING: + assert url_format is not None + return url_format.format(nvr_id=nvr_id, event_id=event_id) + + @callback def _client_error(message: Any, code: HTTPStatus) -> web.Response: _LOGGER.warning("Client error (%s): %s", code.value, message) @@ -107,6 +124,27 @@ class ProtectProxyView(HomeAssistantView): return data return _404("Invalid NVR ID") + @callback + def _async_get_camera(self, data: ProtectData, camera_id: str) -> Camera | None: + if (camera := data.api.bootstrap.cameras.get(camera_id)) is not None: + return camera + + entity_registry = er.async_get(self.hass) + device_registry = dr.async_get(self.hass) + + if (entity := entity_registry.async_get(camera_id)) is None or ( + device := device_registry.async_get(entity.device_id or "") + ) is None: + return None + + macs = [c[1] for c in device.connections if c[0] == dr.CONNECTION_NETWORK_MAC] + for mac in macs: + if (ufp_device := data.api.bootstrap.get_device_from_mac(mac)) is not None: + if isinstance(ufp_device, Camera): + camera = ufp_device + break + return camera + class ThumbnailProxyView(ProtectProxyView): """View to proxy event thumbnails from UniFi Protect.""" @@ -156,27 +194,6 @@ class VideoProxyView(ProtectProxyView): url = "/api/unifiprotect/video/{nvr_id}/{camera_id}/{start}/{end}" name = "api:unifiprotect_thumbnail" - @callback - def _async_get_camera(self, data: ProtectData, camera_id: str) -> Camera | None: - if (camera := data.api.bootstrap.cameras.get(camera_id)) is not None: - return camera - - entity_registry = er.async_get(self.hass) - device_registry = dr.async_get(self.hass) - - if (entity := entity_registry.async_get(camera_id)) is None or ( - device := device_registry.async_get(entity.device_id or "") - ) is None: - return None - - macs = [c[1] for c in device.connections if c[0] == dr.CONNECTION_NETWORK_MAC] - for mac in macs: - if (ufp_device := data.api.bootstrap.get_device_from_mac(mac)) is not None: - if isinstance(ufp_device, Camera): - camera = ufp_device - break - return camera - async def get( self, request: web.Request, nvr_id: str, camera_id: str, start: str, end: str ) -> web.StreamResponse: @@ -226,3 +243,56 @@ class VideoProxyView(ProtectProxyView): if response.prepared: await response.write_eof() return response + + +class VideoEventProxyView(ProtectProxyView): + """View to proxy video clips for events from UniFi Protect.""" + + url = "/api/unifiprotect/video/{nvr_id}/{event_id}" + name = "api:unifiprotect_videoEventView" + + async def get( + self, request: web.Request, nvr_id: str, event_id: str + ) -> web.StreamResponse: + """Get Camera Video clip for an event.""" + + data = self._get_data_or_404(nvr_id) + if isinstance(data, web.Response): + return data + + try: + event = await data.api.get_event(event_id) + except ClientError: + return _404(f"Invalid event ID: {event_id}") + if event.start is None or event.end is None: + return _400("Event is still ongoing") + camera = self._async_get_camera(data, str(event.camera_id)) + if camera is None: + return _404(f"Invalid camera ID: {event.camera_id}") + if not camera.can_read_media(data.api.bootstrap.auth_user): + return _403(f"User cannot read media from camera: {camera.id}") + + response = web.StreamResponse( + status=200, + reason="OK", + headers={ + "Content-Type": "video/mp4", + }, + ) + + async def iterator(total: int, chunk: bytes | None) -> None: + if not response.prepared: + response.content_length = total + await response.prepare(request) + + if chunk is not None: + await response.write(chunk) + + try: + await camera.get_video(event.start, event.end, iterator_callback=iterator) + except ClientError as err: + return _404(err) + + if response.prepared: + await response.write_eof() + return response diff --git a/homeassistant/components/universal/icons.json b/homeassistant/components/universal/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/universal/icons.json +++ b/homeassistant/components/universal/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/universal/media_player.py b/homeassistant/components/universal/media_player.py index c5bd9fb50c4..25188eb3a5d 100644 --- a/homeassistant/components/universal/media_player.py +++ b/homeassistant/components/universal/media_player.py @@ -35,19 +35,19 @@ from homeassistant.components.media_player import ( ATTR_SOUND_MODE, ATTR_SOUND_MODE_LIST, DEVICE_CLASSES_SCHEMA, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, + BrowseMedia, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, RepeatMode, ) -from homeassistant.components.media_player.browse_media import BrowseMedia from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, @@ -292,7 +292,11 @@ class UniversalMediaPlayer(MediaPlayerEntity): service_data[ATTR_ENTITY_ID] = active_child.entity_id await self.hass.services.async_call( - DOMAIN, service_name, service_data, blocking=True, context=self._context + MEDIA_PLAYER_DOMAIN, + service_name, + service_data, + blocking=True, + context=self._context, ) @property @@ -651,7 +655,9 @@ class UniversalMediaPlayer(MediaPlayerEntity): entity_id = self._browse_media_entity if not entity_id and self._child_state: entity_id = self._child_state.entity_id - component: EntityComponent[MediaPlayerEntity] = self.hass.data[DOMAIN] + component: EntityComponent[MediaPlayerEntity] = self.hass.data[ + MEDIA_PLAYER_DOMAIN + ] if entity_id and (entity := component.get_entity(entity_id)): return await entity.async_browse_media(media_content_type, media_content_id) raise NotImplementedError diff --git a/homeassistant/components/upb/__init__.py b/homeassistant/components/upb/__init__.py index 2e5a69393d4..c9f3a2df105 100644 --- a/homeassistant/components/upb/__init__.py +++ b/homeassistant/components/upb/__init__.py @@ -1,12 +1,12 @@ """Support the UPB PIM.""" +import logging + import upb_lib from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_COMMAND, CONF_FILE_PATH, CONF_HOST, Platform -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity +from homeassistant.core import HomeAssistant from .const import ( ATTR_ADDRESS, @@ -16,6 +16,7 @@ from .const import ( EVENT_UPB_SCENE_CHANGED, ) +_LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.LIGHT, Platform.SCENE] @@ -67,58 +68,19 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return unload_ok -class UpbEntity(Entity): - """Base class for all UPB entities.""" +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate entry.""" - _attr_should_poll = False + _LOGGER.debug("Migrating from version %s", entry.version) - def __init__(self, element, unique_id, upb): - """Initialize the base of all UPB devices.""" - self._upb = upb - self._element = element - element_type = "link" if element.addr.is_link else "device" - self._unique_id = f"{unique_id}_{element_type}_{element.addr}" + if entry.version == 1: + # 1 -> 2: Unique ID from integer to string + if entry.minor_version == 1: + minor_version = 2 + hass.config_entries.async_update_entry( + entry, unique_id=str(entry.unique_id), minor_version=minor_version + ) - @property - def unique_id(self): - """Return unique id of the element.""" - return self._unique_id + _LOGGER.debug("Migration successful") - @property - def extra_state_attributes(self): - """Return the default attributes of the element.""" - return self._element.as_dict() - - @property - def available(self): - """Is the entity available to be updated.""" - return self._upb.is_connected() - - def _element_changed(self, element, changeset): - pass - - @callback - def _element_callback(self, element, changeset): - """Handle callback from an UPB element that has changed.""" - self._element_changed(element, changeset) - self.async_write_ha_state() - - async def async_added_to_hass(self): - """Register callback for UPB changes and update entity state.""" - self._element.add_callback(self._element_callback) - self._element_callback(self._element, {}) - - -class UpbAttachedEntity(UpbEntity): - """Base class for UPB attached entities.""" - - @property - def device_info(self) -> DeviceInfo: - """Device info for the entity.""" - return DeviceInfo( - identifiers={(DOMAIN, self._element.index)}, - manufacturer=self._element.manufacturer, - model=self._element.product, - name=self._element.name, - sw_version=self._element.version, - ) + return True diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index 6efd3a685ed..788a0336d73 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -78,10 +78,7 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for UPB PIM.""" VERSION = 1 - - def __init__(self) -> None: - """Initialize the UPB config flow.""" - self.importing = False + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -102,12 +99,9 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" if "base" not in errors: - await self.async_set_unique_id(network_id) + await self.async_set_unique_id(str(network_id)) self._abort_if_unique_id_configured() - if self.importing: - return self.async_create_entry(title=info["title"], data=user_input) - return self.async_create_entry( title=info["title"], data={ @@ -120,11 +114,6 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): - """Handle import.""" - self.importing = True - return await self.async_step_user(user_input) - def _url_already_configured(self, url): """See if we already have a UPB PIM matching user input configured.""" existing_hosts = { diff --git a/homeassistant/components/upb/entity.py b/homeassistant/components/upb/entity.py new file mode 100644 index 00000000000..13037adf680 --- /dev/null +++ b/homeassistant/components/upb/entity.py @@ -0,0 +1,64 @@ +"""Support the UPB PIM.""" + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class UpbEntity(Entity): + """Base class for all UPB entities.""" + + _attr_should_poll = False + + def __init__(self, element, unique_id, upb): + """Initialize the base of all UPB devices.""" + self._upb = upb + self._element = element + element_type = "link" if element.addr.is_link else "device" + self._unique_id = f"{unique_id}_{element_type}_{element.addr}" + + @property + def unique_id(self): + """Return unique id of the element.""" + return self._unique_id + + @property + def extra_state_attributes(self): + """Return the default attributes of the element.""" + return self._element.as_dict() + + @property + def available(self): + """Is the entity available to be updated.""" + return self._upb.is_connected() + + def _element_changed(self, element, changeset): + pass + + @callback + def _element_callback(self, element, changeset): + """Handle callback from an UPB element that has changed.""" + self._element_changed(element, changeset) + self.async_write_ha_state() + + async def async_added_to_hass(self): + """Register callback for UPB changes and update entity state.""" + self._element.add_callback(self._element_callback) + self._element_callback(self._element, {}) + + +class UpbAttachedEntity(UpbEntity): + """Base class for UPB attached entities.""" + + @property + def device_info(self) -> DeviceInfo: + """Device info for the entity.""" + return DeviceInfo( + identifiers={(DOMAIN, self._element.index)}, + manufacturer=self._element.manufacturer, + model=self._element.product, + name=self._element.name, + sw_version=self._element.version, + ) diff --git a/homeassistant/components/upb/icons.json b/homeassistant/components/upb/icons.json index 187f0f60970..0274233da52 100644 --- a/homeassistant/components/upb/icons.json +++ b/homeassistant/components/upb/icons.json @@ -1,12 +1,28 @@ { "services": { - "light_fade_start": "mdi:transition", - "light_fade_stop": "mdi:transition-masked", - "light_blink": "mdi:eye", - "link_deactivate": "mdi:link-off", - "link_goto": "mdi:link-variant", - "link_fade_start": "mdi:transition", - "link_fade_stop": "mdi:transition-masked", - "link_blink": "mdi:eye" + "light_fade_start": { + "service": "mdi:transition" + }, + "light_fade_stop": { + "service": "mdi:transition-masked" + }, + "light_blink": { + "service": "mdi:eye" + }, + "link_deactivate": { + "service": "mdi:link-off" + }, + "link_goto": { + "service": "mdi:link-variant" + }, + "link_fade_start": { + "service": "mdi:transition" + }, + "link_fade_stop": { + "service": "mdi:transition-masked" + }, + "link_blink": { + "service": "mdi:eye" + } } } diff --git a/homeassistant/components/upb/light.py b/homeassistant/components/upb/light.py index 881eda3525f..07bd50b7d9f 100644 --- a/homeassistant/components/upb/light.py +++ b/homeassistant/components/upb/light.py @@ -15,8 +15,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import UpbAttachedEntity from .const import DOMAIN, UPB_BLINK_RATE_SCHEMA, UPB_BRIGHTNESS_RATE_SCHEMA +from .entity import UpbAttachedEntity SERVICE_LIGHT_FADE_START = "light_fade_start" SERVICE_LIGHT_FADE_STOP = "light_fade_stop" diff --git a/homeassistant/components/upb/manifest.json b/homeassistant/components/upb/manifest.json index 6b49c859771..1e61747b3f1 100644 --- a/homeassistant/components/upb/manifest.json +++ b/homeassistant/components/upb/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/upb", "iot_class": "local_push", "loggers": ["upb_lib"], - "requirements": ["upb-lib==0.5.8"] + "requirements": ["upb-lib==0.5.9"] } diff --git a/homeassistant/components/upb/scene.py b/homeassistant/components/upb/scene.py index 276b620d5b5..5a5e17b3e4c 100644 --- a/homeassistant/components/upb/scene.py +++ b/homeassistant/components/upb/scene.py @@ -8,8 +8,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import UpbEntity from .const import DOMAIN, UPB_BLINK_RATE_SCHEMA, UPB_BRIGHTNESS_RATE_SCHEMA +from .entity import UpbEntity SERVICE_LINK_DEACTIVATE = "link_deactivate" SERVICE_LINK_FADE_STOP = "link_fade_stop" diff --git a/homeassistant/components/upc_connect/device_tracker.py b/homeassistant/components/upc_connect/device_tracker.py index 1ec6dcd3107..c279be78666 100644 --- a/homeassistant/components/upc_connect/device_tracker.py +++ b/homeassistant/components/upc_connect/device_tracker.py @@ -9,7 +9,7 @@ from connect_box.exceptions import ConnectBoxError, ConnectBoxLoginError import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -35,7 +35,7 @@ async def async_get_scanner( hass: HomeAssistant, config: ConfigType ) -> UPCDeviceScanner | None: """Return the UPC device scanner.""" - conf = config[DOMAIN] + conf = config[DEVICE_TRACKER_DOMAIN] session = async_get_clientsession(hass) connect_box = ConnectBox(session, conf[CONF_PASSWORD], host=conf[CONF_HOST]) diff --git a/homeassistant/components/upc_connect/manifest.json b/homeassistant/components/upc_connect/manifest.json index 02b852ec3a6..1874e5db028 100644 --- a/homeassistant/components/upc_connect/manifest.json +++ b/homeassistant/components/upc_connect/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/upc_connect", "iot_class": "local_polling", "loggers": ["connect_box"], + "quality_scale": "legacy", "requirements": ["connect-box==0.3.1"] } diff --git a/homeassistant/components/upcloud/__init__.py b/homeassistant/components/upcloud/__init__.py index 4b65406f312..30d7cacba8e 100644 --- a/homeassistant/components/upcloud/__init__.py +++ b/homeassistant/components/upcloud/__init__.py @@ -5,7 +5,6 @@ from __future__ import annotations import dataclasses from datetime import timedelta import logging -from typing import Any import requests.exceptions import upcloud_api @@ -15,44 +14,26 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME, - STATE_OFF, - STATE_ON, - STATE_PROBLEM, Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE, DEFAULT_SCAN_INTERVAL, DOMAIN +from .const import ( + CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE, + DATA_UPCLOUD, + DEFAULT_SCAN_INTERVAL, +) from .coordinator import UpCloudDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) -ATTR_CORE_NUMBER = "core_number" -ATTR_HOSTNAME = "hostname" -ATTR_MEMORY_AMOUNT = "memory_amount" -ATTR_TITLE = "title" -ATTR_UUID = "uuid" -ATTR_ZONE = "zone" - -CONF_SERVERS = "servers" - -DATA_UPCLOUD = "data_upcloud" - -DEFAULT_COMPONENT_NAME = "UpCloud {}" - PLATFORMS = [Platform.BINARY_SENSOR, Platform.SWITCH] -SIGNAL_UPDATE_UPCLOUD = "upcloud_update" - -STATE_MAP = {"error": STATE_PROBLEM, "started": STATE_ON, "stopped": STATE_OFF} - @dataclasses.dataclass class UpCloudHassData: @@ -136,82 +117,3 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> hass.data[DATA_UPCLOUD].coordinators.pop(config_entry.data[CONF_USERNAME]) return unload_ok - - -class UpCloudServerEntity(CoordinatorEntity[UpCloudDataUpdateCoordinator]): - """Entity class for UpCloud servers.""" - - def __init__( - self, - coordinator: UpCloudDataUpdateCoordinator, - uuid: str, - ) -> None: - """Initialize the UpCloud server entity.""" - super().__init__(coordinator) - self.uuid = uuid - - @property - def _server(self) -> upcloud_api.Server: - return self.coordinator.data[self.uuid] - - @property - def unique_id(self) -> str: - """Return unique ID for the entity.""" - return self.uuid - - @property - def name(self) -> str: - """Return the name of the component.""" - try: - return DEFAULT_COMPONENT_NAME.format(self._server.title) - except (AttributeError, KeyError, TypeError): - return DEFAULT_COMPONENT_NAME.format(self.uuid) - - @property - def icon(self) -> str: - """Return the icon of this server.""" - return "mdi:server" if self.is_on else "mdi:server-off" - - @property - def is_on(self) -> bool: - """Return true if the server is on.""" - try: - return STATE_MAP.get(self._server.state, self._server.state) == STATE_ON # type: ignore[no-any-return] - except AttributeError: - return False - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return super().available and STATE_MAP.get( - self._server.state, self._server.state - ) in (STATE_ON, STATE_OFF) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return the state attributes of the UpCloud server.""" - return { - x: getattr(self._server, x, None) - for x in ( - ATTR_UUID, - ATTR_TITLE, - ATTR_HOSTNAME, - ATTR_ZONE, - ATTR_CORE_NUMBER, - ATTR_MEMORY_AMOUNT, - ) - } - - @property - def device_info(self) -> DeviceInfo: - """Return info for device registry.""" - assert self.coordinator.config_entry is not None - return DeviceInfo( - configuration_url="https://hub.upcloud.com", - model="Control Panel", - entry_type=DeviceEntryType.SERVICE, - identifiers={ - (DOMAIN, f"{self.coordinator.config_entry.data[CONF_USERNAME]}@hub") - }, - manufacturer="UpCloud Ltd", - ) diff --git a/homeassistant/components/upcloud/binary_sensor.py b/homeassistant/components/upcloud/binary_sensor.py index 691edde8473..f135eea24b1 100644 --- a/homeassistant/components/upcloud/binary_sensor.py +++ b/homeassistant/components/upcloud/binary_sensor.py @@ -9,7 +9,8 @@ from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DATA_UPCLOUD, UpCloudServerEntity +from .const import DATA_UPCLOUD +from .entity import UpCloudServerEntity async def async_setup_entry( diff --git a/homeassistant/components/upcloud/config_flow.py b/homeassistant/components/upcloud/config_flow.py index 20860df5553..bb988726ba5 100644 --- a/homeassistant/components/upcloud/config_flow.py +++ b/homeassistant/components/upcloud/config_flow.py @@ -95,16 +95,12 @@ class UpCloudConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> UpCloudOptionsFlow: """Get options flow.""" - return UpCloudOptionsFlow(config_entry) + return UpCloudOptionsFlow() class UpCloudOptionsFlow(OptionsFlow): """UpCloud options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/upcloud/const.py b/homeassistant/components/upcloud/const.py index 763462c37f4..a967a43c46e 100644 --- a/homeassistant/components/upcloud/const.py +++ b/homeassistant/components/upcloud/const.py @@ -3,5 +3,6 @@ from datetime import timedelta DOMAIN = "upcloud" +DATA_UPCLOUD = "data_upcloud" DEFAULT_SCAN_INTERVAL = timedelta(seconds=60) CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE = f"{DOMAIN}_config_entry_update:{{}}" diff --git a/homeassistant/components/upcloud/entity.py b/homeassistant/components/upcloud/entity.py new file mode 100644 index 00000000000..c64ca7be2ea --- /dev/null +++ b/homeassistant/components/upcloud/entity.py @@ -0,0 +1,107 @@ +"""Support for UpCloud.""" + +from __future__ import annotations + +import logging +from typing import Any + +import upcloud_api + +from homeassistant.const import CONF_USERNAME, STATE_OFF, STATE_ON, STATE_PROBLEM +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import UpCloudDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + +ATTR_CORE_NUMBER = "core_number" +ATTR_HOSTNAME = "hostname" +ATTR_MEMORY_AMOUNT = "memory_amount" +ATTR_TITLE = "title" +ATTR_UUID = "uuid" +ATTR_ZONE = "zone" + +DEFAULT_COMPONENT_NAME = "UpCloud {}" + +STATE_MAP = {"error": STATE_PROBLEM, "started": STATE_ON, "stopped": STATE_OFF} + + +class UpCloudServerEntity(CoordinatorEntity[UpCloudDataUpdateCoordinator]): + """Entity class for UpCloud servers.""" + + def __init__( + self, + coordinator: UpCloudDataUpdateCoordinator, + uuid: str, + ) -> None: + """Initialize the UpCloud server entity.""" + super().__init__(coordinator) + self.uuid = uuid + + @property + def _server(self) -> upcloud_api.Server: + return self.coordinator.data[self.uuid] + + @property + def unique_id(self) -> str: + """Return unique ID for the entity.""" + return self.uuid + + @property + def name(self) -> str: + """Return the name of the component.""" + try: + return DEFAULT_COMPONENT_NAME.format(self._server.title) + except (AttributeError, KeyError, TypeError): + return DEFAULT_COMPONENT_NAME.format(self.uuid) + + @property + def icon(self) -> str: + """Return the icon of this server.""" + return "mdi:server" if self.is_on else "mdi:server-off" + + @property + def is_on(self) -> bool: + """Return true if the server is on.""" + try: + return STATE_MAP.get(self._server.state, self._server.state) == STATE_ON # type: ignore[no-any-return] + except AttributeError: + return False + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and STATE_MAP.get( + self._server.state, self._server.state + ) in (STATE_ON, STATE_OFF) + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes of the UpCloud server.""" + return { + x: getattr(self._server, x, None) + for x in ( + ATTR_UUID, + ATTR_TITLE, + ATTR_HOSTNAME, + ATTR_ZONE, + ATTR_CORE_NUMBER, + ATTR_MEMORY_AMOUNT, + ) + } + + @property + def device_info(self) -> DeviceInfo: + """Return info for device registry.""" + assert self.coordinator.config_entry is not None + return DeviceInfo( + configuration_url="https://hub.upcloud.com", + model="Control Panel", + entry_type=DeviceEntryType.SERVICE, + identifiers={ + (DOMAIN, f"{self.coordinator.config_entry.data[CONF_USERNAME]}@hub") + }, + manufacturer="UpCloud Ltd", + ) diff --git a/homeassistant/components/upcloud/manifest.json b/homeassistant/components/upcloud/manifest.json index cd829f6dd9d..38581d31709 100644 --- a/homeassistant/components/upcloud/manifest.json +++ b/homeassistant/components/upcloud/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/upcloud", "iot_class": "cloud_polling", - "requirements": ["upcloud-api==2.5.1"] + "requirements": ["upcloud-api==2.6.0"] } diff --git a/homeassistant/components/upcloud/switch.py b/homeassistant/components/upcloud/switch.py index 484b6875d8f..7495357ca9e 100644 --- a/homeassistant/components/upcloud/switch.py +++ b/homeassistant/components/upcloud/switch.py @@ -9,7 +9,10 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DATA_UPCLOUD, SIGNAL_UPDATE_UPCLOUD, UpCloudServerEntity +from .const import DATA_UPCLOUD +from .entity import UpCloudServerEntity + +SIGNAL_UPDATE_UPCLOUD = "upcloud_update" async def async_setup_entry( diff --git a/homeassistant/components/update/__init__.py b/homeassistant/components/update/__init__.py index cd52de6550f..8ef9f44237f 100644 --- a/homeassistant/components/update/__init__.py +++ b/homeassistant/components/update/__init__.py @@ -4,11 +4,12 @@ from __future__ import annotations from datetime import timedelta from enum import StrEnum -from functools import cached_property, lru_cache +from functools import lru_cache import logging from typing import Any, Final, final from awesomeversion import AwesomeVersion, AwesomeVersionCompareException +from propcache import cached_property import voluptuous as vol from homeassistant.components import websocket_api @@ -21,10 +22,12 @@ from homeassistant.helpers.entity import ABCCachedProperties, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import ( ATTR_AUTO_UPDATE, ATTR_BACKUP, + ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, @@ -32,6 +35,7 @@ from .const import ( ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, ATTR_TITLE, + ATTR_UPDATE_PERCENTAGE, ATTR_VERSION, DOMAIN, SERVICE_INSTALL, @@ -41,6 +45,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[UpdateEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -78,7 +83,7 @@ __all__ = [ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Select entities.""" - component = hass.data[DOMAIN] = EntityComponent[UpdateEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[UpdateEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -111,14 +116,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[UpdateEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[UpdateEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) async def async_install(entity: UpdateEntity, service_call: ServiceCall) -> None: @@ -133,7 +136,7 @@ async def async_install(entity: UpdateEntity, service_call: ServiceCall) -> None # If version is specified, but not supported by the entity. if ( version is not None - and UpdateEntityFeature.SPECIFIC_VERSION not in entity.supported_features_compat + and UpdateEntityFeature.SPECIFIC_VERSION not in entity.supported_features ): raise HomeAssistantError( f"Installing a specific version is not supported for {entity.entity_id}" @@ -142,7 +145,7 @@ async def async_install(entity: UpdateEntity, service_call: ServiceCall) -> None # If backup is requested, but not supported by the entity. if ( backup := service_call.data[ATTR_BACKUP] - ) and UpdateEntityFeature.BACKUP not in entity.supported_features_compat: + ) and UpdateEntityFeature.BACKUP not in entity.supported_features: raise HomeAssistantError(f"Backup is not supported for {entity.entity_id}") # Update is already in progress. @@ -176,12 +179,13 @@ class UpdateEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes update entities.""" device_class: UpdateDeviceClass | None = None + display_precision: int = 0 entity_category: EntityCategory | None = EntityCategory.CONFIG @lru_cache(maxsize=256) def _version_is_newer(latest_version: str, installed_version: str) -> bool: - """Return True if version is newer.""" + """Return True if latest_version is newer than installed_version.""" return AwesomeVersion(latest_version) > installed_version @@ -189,12 +193,14 @@ CACHED_PROPERTIES_WITH_ATTR_ = { "auto_update", "installed_version", "device_class", + "display_precision", "in_progress", "latest_version", "release_summary", "release_url", "supported_features", "title", + "update_percentage", } @@ -206,13 +212,20 @@ class UpdateEntity( """Representation of an update entity.""" _entity_component_unrecorded_attributes = frozenset( - {ATTR_ENTITY_PICTURE, ATTR_IN_PROGRESS, ATTR_RELEASE_SUMMARY} + { + ATTR_DISPLAY_PRECISION, + ATTR_ENTITY_PICTURE, + ATTR_IN_PROGRESS, + ATTR_RELEASE_SUMMARY, + ATTR_UPDATE_PERCENTAGE, + } ) entity_description: UpdateEntityDescription _attr_auto_update: bool = False _attr_installed_version: str | None = None _attr_device_class: UpdateDeviceClass | None + _attr_display_precision: int _attr_in_progress: bool | int = False _attr_latest_version: str | None = None _attr_release_summary: str | None = None @@ -220,6 +233,7 @@ class UpdateEntity( _attr_state: None = None _attr_supported_features: UpdateEntityFeature = UpdateEntityFeature(0) _attr_title: str | None = None + _attr_update_percentage: int | float | None = None __skipped_version: str | None = None __in_progress: bool = False @@ -249,6 +263,15 @@ class UpdateEntity( return self.entity_description.device_class return None + @cached_property + def display_precision(self) -> int: + """Return number of decimal digits for display of update progress.""" + if hasattr(self, "_attr_display_precision"): + return self._attr_display_precision + if hasattr(self, "entity_description"): + return self.entity_description.display_precision + return 0 + @property def entity_category(self) -> EntityCategory | None: """Return the category of the entity, if any.""" @@ -256,7 +279,7 @@ class UpdateEntity( return self._attr_entity_category if hasattr(self, "entity_description"): return self.entity_description.entity_category - if UpdateEntityFeature.INSTALL in self.supported_features_compat: + if UpdateEntityFeature.INSTALL in self.supported_features: return EntityCategory.CONFIG return EntityCategory.DIAGNOSTIC @@ -277,8 +300,7 @@ class UpdateEntity( Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. - Can either return a boolean (True if in progress, False if not) - or an integer to indicate the progress in from 0 to 100%. + Should return a boolean (True if in progress, False if not). """ return self._attr_in_progress @@ -315,18 +337,15 @@ class UpdateEntity( """ return self._attr_title - @property - def supported_features_compat(self) -> UpdateEntityFeature: - """Return the supported features as UpdateEntityFeature. + @cached_property + def update_percentage(self) -> int | float | None: + """Update installation progress. - Remove this compatibility shim in 2025.1 or later. + Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. + + Can either return a number to indicate the progress from 0 to 100% or None. """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = UpdateEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features + return self._attr_update_percentage @final async def async_skip(self) -> None: @@ -384,6 +403,11 @@ class UpdateEntity( """ raise NotImplementedError + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return True if latest_version is newer than installed_version.""" + # We don't inline the `_version_is_newer` function because of caching + return _version_is_newer(latest_version, installed_version) + @property @final def state(self) -> str | None: @@ -399,7 +423,7 @@ class UpdateEntity( return STATE_OFF try: - newer = _version_is_newer(latest_version, installed_version) + newer = self.version_is_newer(latest_version, installed_version) except AwesomeVersionCompareException: # Can't compare versions, already tried exact match return STATE_ON @@ -414,10 +438,15 @@ class UpdateEntity( # If entity supports progress, return the in_progress value. # Otherwise, we use the internal progress value. - if UpdateEntityFeature.PROGRESS in self.supported_features_compat: + if UpdateEntityFeature.PROGRESS in self.supported_features: in_progress = self.in_progress + update_percentage = self.update_percentage if in_progress else None + if type(in_progress) is not bool and isinstance(in_progress, int): + update_percentage = in_progress + in_progress = True else: in_progress = self.__in_progress + update_percentage = None installed_version = self.installed_version latest_version = self.latest_version @@ -432,6 +461,7 @@ class UpdateEntity( return { ATTR_AUTO_UPDATE: self.auto_update, + ATTR_DISPLAY_PRECISION: self.display_precision, ATTR_INSTALLED_VERSION: installed_version, ATTR_IN_PROGRESS: in_progress, ATTR_LATEST_VERSION: latest_version, @@ -439,6 +469,7 @@ class UpdateEntity( ATTR_RELEASE_URL: self.release_url, ATTR_SKIPPED_VERSION: skipped_version, ATTR_TITLE: self.title, + ATTR_UPDATE_PERCENTAGE: update_percentage, } @final @@ -450,7 +481,7 @@ class UpdateEntity( Handles setting the in_progress state in case the entity doesn't support it natively. """ - if UpdateEntityFeature.PROGRESS not in self.supported_features_compat: + if UpdateEntityFeature.PROGRESS not in self.supported_features: self.__in_progress = True self.async_write_ha_state() @@ -487,8 +518,7 @@ async def websocket_release_notes( msg: dict[str, Any], ) -> None: """Get the full release notes for a entity.""" - component: EntityComponent[UpdateEntity] = hass.data[DOMAIN] - entity = component.get_entity(msg["entity_id"]) + entity = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) if entity is None: connection.send_error( @@ -496,7 +526,7 @@ async def websocket_release_notes( ) return - if UpdateEntityFeature.RELEASE_NOTES not in entity.supported_features_compat: + if UpdateEntityFeature.RELEASE_NOTES not in entity.supported_features: connection.send_error( msg["id"], websocket_api.ERR_NOT_SUPPORTED, diff --git a/homeassistant/components/update/const.py b/homeassistant/components/update/const.py index 0d7da94f656..83a74ef6789 100644 --- a/homeassistant/components/update/const.py +++ b/homeassistant/components/update/const.py @@ -23,6 +23,7 @@ SERVICE_SKIP: Final = "skip" ATTR_AUTO_UPDATE: Final = "auto_update" ATTR_BACKUP: Final = "backup" +ATTR_DISPLAY_PRECISION: Final = "display_precision" ATTR_INSTALLED_VERSION: Final = "installed_version" ATTR_IN_PROGRESS: Final = "in_progress" ATTR_LATEST_VERSION: Final = "latest_version" @@ -30,4 +31,5 @@ ATTR_RELEASE_SUMMARY: Final = "release_summary" ATTR_RELEASE_URL: Final = "release_url" ATTR_SKIPPED_VERSION: Final = "skipped_version" ATTR_TITLE: Final = "title" +ATTR_UPDATE_PERCENTAGE: Final = "update_percentage" ATTR_VERSION: Final = "version" diff --git a/homeassistant/components/update/icons.json b/homeassistant/components/update/icons.json index 96920c96253..89af07de67f 100644 --- a/homeassistant/components/update/icons.json +++ b/homeassistant/components/update/icons.json @@ -8,8 +8,14 @@ } }, "services": { - "clear_skipped": "mdi:package", - "install": "mdi:package-down", - "skip": "mdi:package-check" + "clear_skipped": { + "service": "mdi:package" + }, + "install": { + "service": "mdi:package-down" + }, + "skip": { + "service": "mdi:package-check" + } } } diff --git a/homeassistant/components/update/strings.json b/homeassistant/components/update/strings.json index eb6db257bb2..5194965cf69 100644 --- a/homeassistant/components/update/strings.json +++ b/homeassistant/components/update/strings.json @@ -56,7 +56,7 @@ "services": { "install": { "name": "Install update", - "description": "Installs an update for this device or service.", + "description": "Installs an update for a device or service.", "fields": { "version": { "name": "Version", @@ -64,7 +64,7 @@ }, "backup": { "name": "Backup", - "description": "If supported by the integration, this creates a backup before starting the update ." + "description": "If supported by the integration, this creates a backup before starting the update." } } }, diff --git a/homeassistant/components/upnp/config_flow.py b/homeassistant/components/upnp/config_flow.py index 1a40d4b3442..41e481fa58c 100644 --- a/homeassistant/components/upnp/config_flow.py +++ b/homeassistant/components/upnp/config_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback @@ -94,9 +93,11 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> UpnpOptionsFlowHandler: """Get the options flow for this handler.""" - return UpnpOptionsFlowHandler(config_entry) + return UpnpOptionsFlowHandler() @property def _discoveries(self) -> dict[str, SsdpServiceInfo]: @@ -299,7 +300,7 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=title, data=data, options=options) -class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): +class UpnpOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -313,7 +314,7 @@ class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONFIG_ENTRY_FORCE_POLL, - default=self.options.get( + default=self.config_entry.options.get( CONFIG_ENTRY_FORCE_POLL, DEFAULT_CONFIG_ENTRY_FORCE_POLL ), ): bool, diff --git a/homeassistant/components/upnp/device.py b/homeassistant/components/upnp/device.py index 923d4828879..7067d1d2e1a 100644 --- a/homeassistant/components/upnp/device.py +++ b/homeassistant/components/upnp/device.py @@ -11,7 +11,7 @@ from urllib.parse import urlparse from async_upnp_client.aiohttp import AiohttpNotifyServer, AiohttpSessionRequester from async_upnp_client.client_factory import UpnpFactory from async_upnp_client.const import AddressTupleVXType -from async_upnp_client.exceptions import UpnpConnectionError +from async_upnp_client.exceptions import UpnpCommunicationError from async_upnp_client.profiles.igd import IgdDevice, IgdStateItem from async_upnp_client.utils import async_get_local_ip from getmac import get_mac_address @@ -206,7 +206,7 @@ class Device: """Subscribe to services.""" try: await self._igd_device.async_subscribe_services(auto_resubscribe=True) - except UpnpConnectionError as ex: + except UpnpCommunicationError as ex: _LOGGER.debug( "Error subscribing to services, falling back to forced polling: %s", ex ) @@ -214,7 +214,10 @@ class Device: async def async_unsubscribe_services(self) -> None: """Unsubscribe from services.""" - await self._igd_device.async_unsubscribe_services() + try: + await self._igd_device.async_unsubscribe_services() + except UpnpCommunicationError as ex: + _LOGGER.debug("Error unsubscribing to services: %s", ex) async def async_get_data( self, entity_description_keys: list[str] | None diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index 30054af0512..b0b4fe35b39 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.40.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/upnp/sensor.py b/homeassistant/components/upnp/sensor.py index d6da50c877d..aae2f8308c1 100644 --- a/homeassistant/components/upnp/sensor.py +++ b/homeassistant/components/upnp/sensor.py @@ -89,6 +89,7 @@ SENSOR_DESCRIPTIONS: tuple[UpnpSensorEntityDescription, ...] = ( UpnpSensorEntityDescription( key=ROUTER_UPTIME, translation_key="uptime", + device_class=SensorDeviceClass.DURATION, native_unit_of_measurement=UnitOfTime.SECONDS, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, diff --git a/homeassistant/components/uptimerobot/manifest.json b/homeassistant/components/uptimerobot/manifest.json index 254409cff7e..67e57f46986 100644 --- a/homeassistant/components/uptimerobot/manifest.json +++ b/homeassistant/components/uptimerobot/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/uptimerobot", "iot_class": "cloud_polling", "loggers": ["pyuptimerobot"], - "quality_scale": "platinum", "requirements": ["pyuptimerobot==22.2.0"] } diff --git a/homeassistant/components/usb/__init__.py b/homeassistant/components/usb/__init__.py index d4201d7f284..2da72d16ac6 100644 --- a/homeassistant/components/usb/__init__.py +++ b/homeassistant/components/usb/__init__.py @@ -16,7 +16,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import websocket_api -from homeassistant.components.websocket_api.connection import ActiveConnection +from homeassistant.components.websocket_api import ActiveConnection from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP from homeassistant.core import ( CALLBACK_TYPE, diff --git a/homeassistant/components/usgs_earthquakes_feed/manifest.json b/homeassistant/components/usgs_earthquakes_feed/manifest.json index ffb9412703f..ea68d00e2a9 100644 --- a/homeassistant/components/usgs_earthquakes_feed/manifest.json +++ b/homeassistant/components/usgs_earthquakes_feed/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_usgs_earthquakes"], + "quality_scale": "legacy", "requirements": ["aio-geojson-usgs-earthquakes==0.3"] } diff --git a/homeassistant/components/utility_meter/__init__.py b/homeassistant/components/utility_meter/__init__.py index c6a8635f831..aac31e085a0 100644 --- a/homeassistant/components/utility_meter/__init__.py +++ b/homeassistant/components/utility_meter/__init__.py @@ -1,9 +1,9 @@ """Support for tracking consumption over given periods of time.""" -from datetime import timedelta +from datetime import datetime, timedelta import logging -from croniter import croniter +from cronsim import CronSim, CronSimError import voluptuous as vol from homeassistant.components.select import DOMAIN as SELECT_DOMAIN @@ -47,9 +47,12 @@ DEFAULT_OFFSET = timedelta(hours=0) def validate_cron_pattern(pattern): """Check that the pattern is well-formed.""" - if croniter.is_valid(pattern): - return pattern - raise vol.Invalid("Invalid pattern") + try: + CronSim(pattern, datetime(2020, 1, 1)) # any date will do + except CronSimError as err: + _LOGGER.error("Invalid cron pattern %s: %s", pattern, err) + raise vol.Invalid("Invalid pattern") from err + return pattern def period_or_cron(config): diff --git a/homeassistant/components/utility_meter/icons.json b/homeassistant/components/utility_meter/icons.json index 3c447b4a810..2539b73d168 100644 --- a/homeassistant/components/utility_meter/icons.json +++ b/homeassistant/components/utility_meter/icons.json @@ -12,7 +12,11 @@ } }, "services": { - "reset": "mdi:numeric-0-box-outline", - "calibrate": "mdi:auto-fix" + "reset": { + "service": "mdi:numeric-0-box-outline" + }, + "calibrate": { + "service": "mdi:auto-fix" + } } } diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 25e803e6a2d..5167c51469d 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/utility_meter", "integration_type": "helper", "iot_class": "local_push", - "loggers": ["croniter"], "quality_scale": "internal", - "requirements": ["croniter==2.0.2"] + "requirements": ["cronsim==2.6"] } diff --git a/homeassistant/components/utility_meter/select.py b/homeassistant/components/utility_meter/select.py index d5b1206d046..5815ce7ec95 100644 --- a/homeassistant/components/utility_meter/select.py +++ b/homeassistant/components/utility_meter/select.py @@ -6,7 +6,7 @@ import logging from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_UNIQUE_ID +from homeassistant.const import CONF_NAME, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.device_registry import DeviceInfo @@ -36,9 +36,9 @@ async def async_setup_entry( ) tariff_select = TariffSelect( - name, - tariffs, - unique_id, + name=name, + tariffs=tariffs, + unique_id=unique_id, device_info=device_info, ) async_add_entities([tariff_select]) @@ -62,13 +62,15 @@ async def async_setup_platform( conf_meter_unique_id: str | None = hass.data[DATA_UTILITY][meter].get( CONF_UNIQUE_ID ) + conf_meter_name = hass.data[DATA_UTILITY][meter].get(CONF_NAME, meter) async_add_entities( [ TariffSelect( - meter, - discovery_info[CONF_TARIFFS], - conf_meter_unique_id, + name=conf_meter_name, + tariffs=discovery_info[CONF_TARIFFS], + yaml_slug=meter, + unique_id=conf_meter_unique_id, ) ] ) @@ -82,12 +84,16 @@ class TariffSelect(SelectEntity, RestoreEntity): def __init__( self, name, - tariffs, - unique_id, + tariffs: list[str], + *, + yaml_slug: str | None = None, + unique_id: str | None = None, device_info: DeviceInfo | None = None, ) -> None: """Initialize a tariff selector.""" self._attr_name = name + if yaml_slug: # Backwards compatibility with YAML configuration entries + self.entity_id = f"select.{yaml_slug}" self._attr_unique_id = unique_id self._attr_device_info = device_info self._current_tariff: str | None = None diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 6b8c07c7ef7..9c13aa1984a 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -9,7 +9,7 @@ from decimal import Decimal, DecimalException, InvalidOperation import logging from typing import Any, Self -from croniter import croniter +from cronsim import CronSim import voluptuous as vol from homeassistant.components.sensor import ( @@ -27,6 +27,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, CONF_UNIQUE_ID, + EVENT_CORE_CONFIG_UPDATE, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -379,14 +380,13 @@ class UtilityMeterSensor(RestoreSensor): self.entity_id = suggested_entity_id self._parent_meter = parent_meter self._sensor_source_id = source_entity - self._state = None self._last_period = Decimal(0) self._last_reset = dt_util.utcnow() self._last_valid_state = None self._collecting = None - self._name = name + self._attr_name = name self._input_device_class = None - self._unit_of_measurement = None + self._attr_native_unit_of_measurement = None self._period = meter_type if meter_type is not None: # For backwards compatibility reasons we convert the period and offset into a cron pattern @@ -405,12 +405,26 @@ class UtilityMeterSensor(RestoreSensor): self._tariff = tariff self._tariff_entity = tariff_entity self._next_reset = None + self._current_tz = None + self._config_scheduler() + + def _config_scheduler(self): + self.scheduler = ( + CronSim( + self._cron_pattern, + dt_util.now( + dt_util.get_default_time_zone() + ), # we need timezone for DST purposes (see issue #102984) + ) + if self._cron_pattern + else None + ) def start(self, attributes: Mapping[str, Any]) -> None: """Initialize unit and state upon source initial update.""" self._input_device_class = attributes.get(ATTR_DEVICE_CLASS) - self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) - self._state = 0 + self._attr_native_unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._attr_native_value = 0 self.async_write_ha_state() @staticmethod @@ -485,13 +499,13 @@ class UtilityMeterSensor(RestoreSensor): ) return - if self._state is None: + if self.native_value is None: # First state update initializes the utility_meter sensors for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][ DATA_TARIFF_SENSORS ]: sensor.start(new_state_attributes) - if self._unit_of_measurement is None: + if self.native_unit_of_measurement is None: _LOGGER.warning( "Source sensor %s has no unit of measurement. Please %s", self._sensor_source_id, @@ -502,10 +516,12 @@ class UtilityMeterSensor(RestoreSensor): adjustment := self.calculate_adjustment(old_state, new_state) ) is not None and (self._sensor_net_consumption or adjustment >= 0): # If net_consumption is off, the adjustment must be non-negative - self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line + self._attr_native_value += adjustment # type: ignore[operator] # self._attr_native_value will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS) - self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._attr_native_unit_of_measurement = new_state_attributes.get( + ATTR_UNIT_OF_MEASUREMENT + ) self._last_valid_state = new_state_val self.async_write_ha_state() @@ -534,7 +550,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "%s - %s - source <%s>", - self._name, + self.name, COLLECTING if self._collecting is not None else PAUSED, self._sensor_source_id, ) @@ -543,11 +559,10 @@ class UtilityMeterSensor(RestoreSensor): async def _program_reset(self): """Program the reset of the utility meter.""" - if self._cron_pattern is not None: - tz = dt_util.get_default_time_zone() - self._next_reset = croniter(self._cron_pattern, dt_util.now(tz)).get_next( - datetime - ) # we need timezone for DST purposes (see issue #102984) + if self.scheduler: + self._next_reset = next(self.scheduler) + + _LOGGER.debug("Next reset of %s is %s", self.entity_id, self._next_reset) self.async_on_remove( async_track_point_in_time( self.hass, @@ -555,6 +570,7 @@ class UtilityMeterSensor(RestoreSensor): self._next_reset, ) ) + self.async_write_ha_state() async def _async_reset_meter(self, event): """Reset the utility meter status.""" @@ -575,20 +591,26 @@ class UtilityMeterSensor(RestoreSensor): return _LOGGER.debug("Reset utility meter <%s>", self.entity_id) self._last_reset = dt_util.utcnow() - self._last_period = Decimal(self._state) if self._state else Decimal(0) - self._state = 0 + self._last_period = ( + Decimal(self.native_value) if self.native_value else Decimal(0) + ) + self._attr_native_value = 0 self.async_write_ha_state() async def async_calibrate(self, value): """Calibrate the Utility Meter with a given value.""" - _LOGGER.debug("Calibrate %s = %s type(%s)", self._name, value, type(value)) - self._state = Decimal(str(value)) + _LOGGER.debug("Calibrate %s = %s type(%s)", self.name, value, type(value)) + self._attr_native_value = Decimal(str(value)) self.async_write_ha_state() async def async_added_to_hass(self): """Handle entity which will be added.""" await super().async_added_to_hass() + # track current timezone in case it changes + # and we need to reconfigure the scheduler + self._current_tz = self.hass.config.time_zone + await self._program_reset() self.async_on_remove( @@ -598,10 +620,11 @@ class UtilityMeterSensor(RestoreSensor): ) if (last_sensor_data := await self.async_get_last_sensor_data()) is not None: - # new introduced in 2022.04 - self._state = last_sensor_data.native_value + self._attr_native_value = last_sensor_data.native_value self._input_device_class = last_sensor_data.input_device_class - self._unit_of_measurement = last_sensor_data.native_unit_of_measurement + self._attr_native_unit_of_measurement = ( + last_sensor_data.native_unit_of_measurement + ) self._last_period = last_sensor_data.last_period self._last_reset = last_sensor_data.last_reset self._last_valid_state = last_sensor_data.last_valid_state @@ -609,39 +632,6 @@ class UtilityMeterSensor(RestoreSensor): # Null lambda to allow cancelling the collection on tariff change self._collecting = lambda: None - elif state := await self.async_get_last_state(): - # legacy to be removed on 2022.10 (we are keeping this to avoid utility_meter counter losses) - try: - self._state = Decimal(state.state) - except InvalidOperation: - _LOGGER.error( - "Could not restore state <%s>. Resetting utility_meter.%s", - state.state, - self.name, - ) - else: - self._unit_of_measurement = state.attributes.get( - ATTR_UNIT_OF_MEASUREMENT - ) - self._last_period = ( - Decimal(state.attributes[ATTR_LAST_PERIOD]) - if state.attributes.get(ATTR_LAST_PERIOD) - and is_number(state.attributes[ATTR_LAST_PERIOD]) - else Decimal(0) - ) - self._last_valid_state = ( - Decimal(state.attributes[ATTR_LAST_VALID_STATE]) - if state.attributes.get(ATTR_LAST_VALID_STATE) - and is_number(state.attributes[ATTR_LAST_VALID_STATE]) - else None - ) - self._last_reset = dt_util.as_utc( - dt_util.parse_datetime(state.attributes.get(ATTR_LAST_RESET)) - ) - if state.attributes.get(ATTR_STATUS) == COLLECTING: - # Null lambda to allow cancelling the collection on tariff change - self._collecting = lambda: None - @callback def async_source_tracking(event): """Wait for source to be ready, then start meter.""" @@ -666,7 +656,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "<%s> collecting %s from %s", self.name, - self._unit_of_measurement, + self.native_unit_of_measurement, self._sensor_source_id, ) self._collecting = async_track_state_change_event( @@ -675,28 +665,34 @@ class UtilityMeterSensor(RestoreSensor): self.async_on_remove(async_at_started(self.hass, async_source_tracking)) + async def async_track_time_zone(event): + """Reconfigure Scheduler after time zone changes.""" + + if self._current_tz != self.hass.config.time_zone: + self._current_tz = self.hass.config.time_zone + + self._config_scheduler() + await self._program_reset() + + self.async_on_remove( + self.hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, async_track_time_zone) + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" if self._collecting: self._collecting() self._collecting = None - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def native_value(self): - """Return the state of the sensor.""" - return self._state - @property def device_class(self): """Return the device class of the sensor.""" if self._input_device_class is not None: return self._input_device_class - if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]: + if ( + self.native_unit_of_measurement + in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY] + ): return SensorDeviceClass.ENERGY return None @@ -709,11 +705,6 @@ class UtilityMeterSensor(RestoreSensor): else SensorStateClass.TOTAL_INCREASING ) - @property - def native_unit_of_measurement(self): - """Return the unit the value is expressed in.""" - return self._unit_of_measurement - @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" diff --git a/homeassistant/components/utility_meter/strings.json b/homeassistant/components/utility_meter/strings.json index fc1c727fb0a..4a8ae415a83 100644 --- a/homeassistant/components/utility_meter/strings.json +++ b/homeassistant/components/utility_meter/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Utility Meter", + "title": "Create Utility Meter", "description": "Create a sensor which tracks consumption of various utilities (e.g., energy, gas, water, heating) over a configured period of time, typically monthly. The utility meter sensor optionally supports splitting the consumption by tariffs, in that case one sensor for each tariff is created as well as a select entity to choose the current tariff.", "data": { "always_available": "Sensor always available", @@ -25,6 +25,9 @@ "tariffs": "A list of supported tariffs, leave empty if only a single tariff is needed." } } + }, + "error": { + "tariffs_not_unique": "Tariffs must be unique" } }, "options": { diff --git a/homeassistant/components/uvc/camera.py b/homeassistant/components/uvc/camera.py index cd9594c7d31..a6f0202ee25 100644 --- a/homeassistant/components/uvc/camera.py +++ b/homeassistant/components/uvc/camera.py @@ -5,9 +5,11 @@ from __future__ import annotations from datetime import datetime import logging import re +from typing import Any, cast -import requests from uvcclient import camera as uvc_camera, nvr +from uvcclient.camera import UVCCameraClient +from uvcclient.nvr import UVCRemote import voluptuous as vol from homeassistant.components.camera import ( @@ -57,11 +59,11 @@ def setup_platform( ssl = config[CONF_SSL] try: - # Exceptions may be raised in all method calls to the nvr library. nvrconn = nvr.UVCRemote(addr, port, key, ssl=ssl) + # Exceptions may be raised in all method calls to the nvr library. cameras = nvrconn.index() - identifier = "id" if nvrconn.server_version >= (3, 2, 0) else "uuid" + identifier = nvrconn.camera_identifier # Filter out airCam models, which are not supported in the latest # version of UnifiVideo and which are EOL by Ubiquiti cameras = [ @@ -75,15 +77,12 @@ def setup_platform( except nvr.NvrError as ex: _LOGGER.error("NVR refuses to talk to me: %s", str(ex)) raise PlatformNotReady from ex - except requests.exceptions.ConnectionError as ex: - _LOGGER.error("Unable to connect to NVR: %s", str(ex)) - raise PlatformNotReady from ex add_entities( - [ + ( UnifiVideoCamera(nvrconn, camera[identifier], camera["name"], password) for camera in cameras - ], + ), True, ) @@ -92,24 +91,19 @@ class UnifiVideoCamera(Camera): """A Ubiquiti Unifi Video Camera.""" _attr_should_poll = True # Cameras default to False + _attr_brand = "Ubiquiti" + _attr_is_streaming = False + _caminfo: dict[str, Any] - def __init__(self, camera, uuid, name, password): + def __init__(self, camera: UVCRemote, uuid: str, name: str, password: str) -> None: """Initialize an Unifi camera.""" super().__init__() self._nvr = camera - self._uuid = uuid - self._name = name + self._uuid = self._attr_unique_id = uuid + self._attr_name = name self._password = password - self._attr_is_streaming = False - self._connect_addr = None - self._camera = None - self._motion_status = False - self._caminfo = None - - @property - def name(self): - """Return the name of this camera.""" - return self._name + self._connect_addr: str | None = None + self._camera: UVCCameraClient | None = None @property def supported_features(self) -> CameraEntityFeature: @@ -122,7 +116,7 @@ class UnifiVideoCamera(Camera): return CameraEntityFeature(0) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the camera state attributes.""" attr = {} if self.motion_detection_enabled: @@ -145,24 +139,14 @@ class UnifiVideoCamera(Camera): @property def motion_detection_enabled(self) -> bool: """Camera Motion Detection Status.""" - return self._caminfo["recordingSettings"]["motionRecordEnabled"] + return bool(self._caminfo["recordingSettings"]["motionRecordEnabled"]) @property - def unique_id(self) -> str: - """Return a unique identifier for this client.""" - return self._uuid - - @property - def brand(self): - """Return the brand of this camera.""" - return "Ubiquiti" - - @property - def model(self): + def model(self) -> str: """Return the model of this camera.""" - return self._caminfo["model"] + return cast(str, self._caminfo["model"]) - def _login(self): + def _login(self) -> bool: """Login to the camera.""" caminfo = self._caminfo if self._connect_addr: @@ -170,6 +154,7 @@ class UnifiVideoCamera(Camera): else: addrs = [caminfo["host"], caminfo["internalHost"]] + client_cls: type[uvc_camera.UVCCameraClient] if self._nvr.server_version >= (3, 2, 0): client_cls = uvc_camera.UVCCameraClientV320 else: @@ -178,15 +163,14 @@ class UnifiVideoCamera(Camera): if caminfo["username"] is None: caminfo["username"] = "ubnt" + assert isinstance(caminfo["username"], str) + camera = None for addr in addrs: try: camera = client_cls(addr, caminfo["username"], self._password) camera.login() - _LOGGER.debug( - "Logged into UVC camera %(name)s via %(addr)s", - {"name": self._name, "addr": addr}, - ) + _LOGGER.debug("Logged into UVC camera %s via %s", self._attr_name, addr) self._connect_addr = addr break except OSError: @@ -197,7 +181,7 @@ class UnifiVideoCamera(Camera): pass if not self._connect_addr: _LOGGER.error("Unable to login to camera") - return None + return False self._camera = camera self._caminfo = caminfo @@ -210,11 +194,13 @@ class UnifiVideoCamera(Camera): if not self._camera and not self._login(): return None - def _get_image(retry=True): + def _get_image(retry: bool = True) -> bytes | None: + assert self._camera is not None try: return self._camera.get_snapshot() except uvc_camera.CameraConnectError: _LOGGER.error("Unable to contact camera") + return None except uvc_camera.CameraAuthError: if retry: self._login() @@ -224,13 +210,12 @@ class UnifiVideoCamera(Camera): return _get_image() - def set_motion_detection(self, mode): + def set_motion_detection(self, mode: bool) -> None: """Set motion detection on or off.""" set_mode = "motion" if mode is True else "none" try: self._nvr.set_recordmode(self._uuid, set_mode) - self._motion_status = mode except nvr.NvrError as err: _LOGGER.error("Unable to set recordmode to %s", set_mode) _LOGGER.debug(err) @@ -243,16 +228,19 @@ class UnifiVideoCamera(Camera): """Disable motion detection in camera.""" self.set_motion_detection(False) - async def stream_source(self): + async def stream_source(self) -> str | None: """Return the source of the stream.""" for channel in self._caminfo["channels"]: if channel["isRtspEnabled"]: - return next( - ( - uri - for i, uri in enumerate(channel["rtspUris"]) - if re.search(self._nvr._host, uri) # noqa: SLF001 - ) + return cast( + str, + next( + ( + uri + for i, uri in enumerate(channel["rtspUris"]) + if re.search(self._nvr._host, uri) # noqa: SLF001 + ) + ), ) return None diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index c72b865b5ef..aeb9b6068ea 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], + "quality_scale": "legacy", "requirements": ["uvcclient==0.12.1"] } diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index 867e25d4b2a..46e35bb3e11 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -2,12 +2,14 @@ from __future__ import annotations +import asyncio from datetime import timedelta from enum import IntFlag -from functools import cached_property, partial +from functools import partial import logging -from typing import Any +from typing import TYPE_CHECKING, Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -17,22 +19,37 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_IDLE, STATE_ON, - STATE_PAUSED, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.deprecation import ( + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.icon import icon_for_battery_level from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.hass_dict import HassKey -from .const import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_ERROR, STATE_RETURNING +from .const import ( # noqa: F401 + _DEPRECATED_STATE_CLEANING, + _DEPRECATED_STATE_DOCKED, + _DEPRECATED_STATE_ERROR, + _DEPRECATED_STATE_RETURNING, + DOMAIN, + VacuumActivity, +) _LOGGER = logging.getLogger(__name__) +DATA_COMPONENT: HassKey[EntityComponent[StateVacuumEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -55,11 +72,13 @@ SERVICE_START = "start" SERVICE_PAUSE = "pause" SERVICE_STOP = "stop" - -STATES = [STATE_CLEANING, STATE_DOCKED, STATE_RETURNING, STATE_ERROR] - DEFAULT_NAME = "Vacuum cleaner robot" +# These STATE_* constants are deprecated as of Home Assistant 2025.1. +# Please use the VacuumActivity enum instead. +_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(VacuumActivity.IDLE, "2026.1") +_DEPRECATED_STATE_PAUSED = DeprecatedConstantEnum(VacuumActivity.PAUSED, "2026.1") + class VacuumEntityFeature(IntFlag): """Supported features of the vacuum entity.""" @@ -82,20 +101,38 @@ class VacuumEntityFeature(IntFlag): # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. # Please use the VacuumEntityFeature enum instead. -SUPPORT_TURN_ON = 1 -SUPPORT_TURN_OFF = 2 -SUPPORT_PAUSE = 4 -SUPPORT_STOP = 8 -SUPPORT_RETURN_HOME = 16 -SUPPORT_FAN_SPEED = 32 -SUPPORT_BATTERY = 64 -SUPPORT_STATUS = 128 -SUPPORT_SEND_COMMAND = 256 -SUPPORT_LOCATE = 512 -SUPPORT_CLEAN_SPOT = 1024 -SUPPORT_MAP = 2048 -SUPPORT_STATE = 4096 -SUPPORT_START = 8192 +_DEPRECATED_SUPPORT_TURN_ON = DeprecatedConstantEnum( + VacuumEntityFeature.TURN_ON, "2025.10" +) +_DEPRECATED_SUPPORT_TURN_OFF = DeprecatedConstantEnum( + VacuumEntityFeature.TURN_OFF, "2025.10" +) +_DEPRECATED_SUPPORT_PAUSE = DeprecatedConstantEnum(VacuumEntityFeature.PAUSE, "2025.10") +_DEPRECATED_SUPPORT_STOP = DeprecatedConstantEnum(VacuumEntityFeature.STOP, "2025.10") +_DEPRECATED_SUPPORT_RETURN_HOME = DeprecatedConstantEnum( + VacuumEntityFeature.RETURN_HOME, "2025.10" +) +_DEPRECATED_SUPPORT_FAN_SPEED = DeprecatedConstantEnum( + VacuumEntityFeature.FAN_SPEED, "2025.10" +) +_DEPRECATED_SUPPORT_BATTERY = DeprecatedConstantEnum( + VacuumEntityFeature.BATTERY, "2025.10" +) +_DEPRECATED_SUPPORT_STATUS = DeprecatedConstantEnum( + VacuumEntityFeature.STATUS, "2025.10" +) +_DEPRECATED_SUPPORT_SEND_COMMAND = DeprecatedConstantEnum( + VacuumEntityFeature.SEND_COMMAND, "2025.10" +) +_DEPRECATED_SUPPORT_LOCATE = DeprecatedConstantEnum( + VacuumEntityFeature.LOCATE, "2025.10" +) +_DEPRECATED_SUPPORT_CLEAN_SPOT = DeprecatedConstantEnum( + VacuumEntityFeature.CLEAN_SPOT, "2025.10" +) +_DEPRECATED_SUPPORT_MAP = DeprecatedConstantEnum(VacuumEntityFeature.MAP, "2025.10") +_DEPRECATED_SUPPORT_STATE = DeprecatedConstantEnum(VacuumEntityFeature.STATE, "2025.10") +_DEPRECATED_SUPPORT_START = DeprecatedConstantEnum(VacuumEntityFeature.START, "2025.10") # mypy: disallow-any-generics @@ -108,7 +145,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the vacuum component.""" - component = hass.data[DOMAIN] = EntityComponent[StateVacuumEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[StateVacuumEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -171,14 +208,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[StateVacuumEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[StateVacuumEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class StateVacuumEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -191,7 +226,7 @@ STATE_VACUUM_CACHED_PROPERTIES_WITH_ATTR_ = { "battery_icon", "fan_speed", "fan_speed_list", - "state", + "activity", } @@ -208,9 +243,58 @@ class StateVacuumEntity( _attr_battery_level: int | None = None _attr_fan_speed: str | None = None _attr_fan_speed_list: list[str] - _attr_state: str | None = None + _attr_activity: VacuumActivity | None = None _attr_supported_features: VacuumEntityFeature = VacuumEntityFeature(0) + __vacuum_legacy_state: bool = False + + def __init_subclass__(cls, **kwargs: Any) -> None: + """Post initialisation processing.""" + super().__init_subclass__(**kwargs) + if any(method in cls.__dict__ for method in ("_attr_state", "state")): + # Integrations should use the 'activity' property instead of + # setting the state directly. + cls.__vacuum_legacy_state = True + + def __setattr__(self, name: str, value: Any) -> None: + """Set attribute. + + Deprecation warning if setting '_attr_state' directly + unless already reported. + """ + if name == "_attr_state": + self._report_deprecated_activity_handling() + return super().__setattr__(name, value) + + @callback + def add_to_platform_start( + self, + hass: HomeAssistant, + platform: EntityPlatform, + parallel_updates: asyncio.Semaphore | None, + ) -> None: + """Start adding an entity to a platform.""" + super().add_to_platform_start(hass, platform, parallel_updates) + if self.__vacuum_legacy_state: + self._report_deprecated_activity_handling() + + @callback + def _report_deprecated_activity_handling(self) -> None: + """Report on deprecated handling of vacuum state. + + Integrations should implement activity instead of using state directly. + """ + report_usage( + "is setting state directly." + f" Entity {self.entity_id} ({type(self)}) should implement the 'activity'" + " property and return its state using the VacuumActivity enum", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2026.1", + integration_domain=self.platform.platform_name if self.platform else None, + exclude_integrations={DOMAIN}, + ) + @cached_property def battery_level(self) -> int | None: """Return the battery level of the vacuum cleaner.""" @@ -219,7 +303,7 @@ class StateVacuumEntity( @property def battery_icon(self) -> str: """Return the battery icon for the vacuum cleaner.""" - charging = bool(self.state == STATE_DOCKED) + charging = bool(self.activity == VacuumActivity.DOCKED) return icon_for_battery_level( battery_level=self.battery_level, charging=charging @@ -228,7 +312,7 @@ class StateVacuumEntity( @property def capability_attributes(self) -> dict[str, Any] | None: """Return capability attributes.""" - if VacuumEntityFeature.FAN_SPEED in self.supported_features_compat: + if VacuumEntityFeature.FAN_SPEED in self.supported_features: return {ATTR_FAN_SPEED_LIST: self.fan_speed_list} return None @@ -246,7 +330,7 @@ class StateVacuumEntity( def state_attributes(self) -> dict[str, Any]: """Return the state attributes of the vacuum cleaner.""" data: dict[str, Any] = {} - supported_features = self.supported_features_compat + supported_features = self.supported_features if VacuumEntityFeature.BATTERY in supported_features: data[ATTR_BATTERY_LEVEL] = self.battery_level @@ -257,29 +341,34 @@ class StateVacuumEntity( return data - @cached_property + @final + @property def state(self) -> str | None: """Return the state of the vacuum cleaner.""" - return self._attr_state + if (activity := self.activity) is not None: + return activity + if self._attr_state is not None: + # Backwards compatibility for integrations that set state directly + # Should be removed in 2026.1 + if TYPE_CHECKING: + assert isinstance(self._attr_state, str) + return self._attr_state + return None + + @cached_property + def activity(self) -> VacuumActivity | None: + """Return the current vacuum activity. + + Integrations should overwrite this or use the '_attr_activity' + attribute to set the vacuum activity using the 'VacuumActivity' enum. + """ + return self._attr_activity @cached_property def supported_features(self) -> VacuumEntityFeature: """Flag vacuum cleaner features that are supported.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> VacuumEntityFeature: - """Return the supported features as VacuumEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = VacuumEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - def stop(self, **kwargs: Any) -> None: """Stop the vacuum cleaner.""" raise NotImplementedError @@ -381,3 +470,13 @@ class StateVacuumEntity( This method must be run in the event loop. """ await self.hass.async_add_executor_job(self.pause) + + +# As we import deprecated constants from the const module, we need to add these two functions +# otherwise this module will be logged for using deprecated constants and not the custom component +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/vacuum/const.py b/homeassistant/components/vacuum/const.py index af1558f8570..f153a11dcb9 100644 --- a/homeassistant/components/vacuum/const.py +++ b/homeassistant/components/vacuum/const.py @@ -1,10 +1,42 @@ """Support for vacuum cleaner robots (botvacs).""" +from __future__ import annotations + +from enum import StrEnum +from functools import partial + +from homeassistant.helpers.deprecation import ( + DeprecatedConstantEnum, + all_with_deprecated_constants, + check_if_deprecated_constant, + dir_with_deprecated_constants, +) + DOMAIN = "vacuum" -STATE_CLEANING = "cleaning" -STATE_DOCKED = "docked" -STATE_RETURNING = "returning" -STATE_ERROR = "error" -STATES = [STATE_CLEANING, STATE_DOCKED, STATE_RETURNING, STATE_ERROR] +class VacuumActivity(StrEnum): + """Vacuum activity states.""" + + CLEANING = "cleaning" + DOCKED = "docked" + IDLE = "idle" + PAUSED = "paused" + RETURNING = "returning" + ERROR = "error" + + +# These STATE_* constants are deprecated as of Home Assistant 2025.1. +# Please use the VacuumActivity enum instead. +_DEPRECATED_STATE_CLEANING = DeprecatedConstantEnum(VacuumActivity.CLEANING, "2026.1") +_DEPRECATED_STATE_DOCKED = DeprecatedConstantEnum(VacuumActivity.DOCKED, "2026.1") +_DEPRECATED_STATE_RETURNING = DeprecatedConstantEnum(VacuumActivity.RETURNING, "2026.1") +_DEPRECATED_STATE_ERROR = DeprecatedConstantEnum(VacuumActivity.ERROR, "2026.1") + + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/vacuum/device_condition.py b/homeassistant/components/vacuum/device_condition.py index f528b0918a1..4da64484bf7 100644 --- a/homeassistant/components/vacuum/device_condition.py +++ b/homeassistant/components/vacuum/device_condition.py @@ -20,7 +20,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_RETURNING +from . import DOMAIN, VacuumActivity CONDITION_TYPES = {"is_cleaning", "is_docked"} @@ -62,9 +62,9 @@ def async_condition_from_config( ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == "is_docked": - test_states = [STATE_DOCKED] + test_states = [VacuumActivity.DOCKED] else: - test_states = [STATE_CLEANING, STATE_RETURNING] + test_states = [VacuumActivity.CLEANING, VacuumActivity.RETURNING] registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[CONF_ENTITY_ID]) diff --git a/homeassistant/components/vacuum/device_trigger.py b/homeassistant/components/vacuum/device_trigger.py index 45b0696f871..fe682ef21d3 100644 --- a/homeassistant/components/vacuum/device_trigger.py +++ b/homeassistant/components/vacuum/device_trigger.py @@ -19,7 +19,7 @@ from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN, STATE_CLEANING, STATE_DOCKED +from . import DOMAIN, VacuumActivity TRIGGER_TYPES = {"cleaning", "docked"} @@ -77,9 +77,9 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Attach a trigger.""" if config[CONF_TYPE] == "cleaning": - to_state = STATE_CLEANING + to_state = VacuumActivity.CLEANING else: - to_state = STATE_DOCKED + to_state = VacuumActivity.DOCKED state_config = { CONF_PLATFORM: "state", diff --git a/homeassistant/components/vacuum/icons.json b/homeassistant/components/vacuum/icons.json index 25f0cfd03ef..4169729efec 100644 --- a/homeassistant/components/vacuum/icons.json +++ b/homeassistant/components/vacuum/icons.json @@ -5,17 +5,41 @@ } }, "services": { - "clean_spot": "mdi:target-variant", - "locate": "mdi:map-marker", - "pause": "mdi:pause", - "return_to_base": "mdi:home-import-outline", - "send_command": "mdi:send", - "set_fan_speed": "mdi:fan", - "start": "mdi:play", - "start_pause": "mdi:play-pause", - "stop": "mdi:stop", - "toggle": "mdi:play-pause", - "turn_off": "mdi:stop", - "turn_on": "mdi:play" + "clean_spot": { + "service": "mdi:target-variant" + }, + "locate": { + "service": "mdi:map-marker" + }, + "pause": { + "service": "mdi:pause" + }, + "return_to_base": { + "service": "mdi:home-import-outline" + }, + "send_command": { + "service": "mdi:send" + }, + "set_fan_speed": { + "service": "mdi:fan" + }, + "start": { + "service": "mdi:play" + }, + "start_pause": { + "service": "mdi:play-pause" + }, + "stop": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:play-pause" + }, + "turn_off": { + "service": "mdi:stop" + }, + "turn_on": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/vacuum/intent.py b/homeassistant/components/vacuum/intent.py index 8952c13875d..48340252b6e 100644 --- a/homeassistant/components/vacuum/intent.py +++ b/homeassistant/components/vacuum/intent.py @@ -18,6 +18,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_START, description="Starts a vacuum", + required_domains={DOMAIN}, platforms={DOMAIN}, ), ) @@ -28,6 +29,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_RETURN_TO_BASE, description="Returns a vacuum to base", + required_domains={DOMAIN}, platforms={DOMAIN}, ), ) diff --git a/homeassistant/components/vacuum/reproduce_state.py b/homeassistant/components/vacuum/reproduce_state.py index 762cd6f2e90..ef3fb329686 100644 --- a/homeassistant/components/vacuum/reproduce_state.py +++ b/homeassistant/components/vacuum/reproduce_state.py @@ -11,10 +11,8 @@ from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_IDLE, STATE_OFF, STATE_ON, - STATE_PAUSED, ) from homeassistant.core import Context, HomeAssistant, State @@ -26,20 +24,18 @@ from . import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, + VacuumActivity, ) _LOGGER = logging.getLogger(__name__) VALID_STATES_TOGGLE = {STATE_ON, STATE_OFF} VALID_STATES_STATE = { - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity.CLEANING, + VacuumActivity.DOCKED, + VacuumActivity.IDLE, + VacuumActivity.PAUSED, + VacuumActivity.RETURNING, } @@ -75,13 +71,13 @@ async def _async_reproduce_state( service = SERVICE_TURN_ON elif state.state == STATE_OFF: service = SERVICE_TURN_OFF - elif state.state == STATE_CLEANING: + elif state.state == VacuumActivity.CLEANING: service = SERVICE_START - elif state.state in [STATE_DOCKED, STATE_RETURNING]: + elif state.state in [VacuumActivity.DOCKED, VacuumActivity.RETURNING]: service = SERVICE_RETURN_TO_BASE - elif state.state == STATE_IDLE: + elif state.state == VacuumActivity.IDLE: service = SERVICE_STOP - elif state.state == STATE_PAUSED: + elif state.state == VacuumActivity.PAUSED: service = SERVICE_PAUSE await hass.services.async_call( diff --git a/homeassistant/components/vallox/__init__.py b/homeassistant/components/vallox/__init__.py index 292786e4c0e..ceb34bc6ff9 100644 --- a/homeassistant/components/vallox/__init__.py +++ b/homeassistant/components/vallox/__init__.py @@ -13,8 +13,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_NAME, Platform from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( DEFAULT_FAN_SPEED_AWAY, @@ -22,6 +20,7 @@ from .const import ( DEFAULT_FAN_SPEED_HOME, DEFAULT_NAME, DOMAIN, + I18N_KEY_TO_VALLOX_PROFILE, ) from .coordinator import ValloxDataUpdateCoordinator @@ -61,6 +60,18 @@ SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED = vol.Schema( } ) +ATTR_PROFILE = "profile" +ATTR_DURATION = "duration" + +SERVICE_SCHEMA_SET_PROFILE = vol.Schema( + { + vol.Required(ATTR_PROFILE): vol.In(I18N_KEY_TO_VALLOX_PROFILE), + vol.Optional(ATTR_DURATION): vol.All( + vol.Coerce(int), vol.Clamp(min=1, max=65535) + ), + } +) + class ServiceMethodDetails(NamedTuple): """Details for SERVICE_TO_METHOD mapping.""" @@ -72,6 +83,7 @@ class ServiceMethodDetails(NamedTuple): SERVICE_SET_PROFILE_FAN_SPEED_HOME = "set_profile_fan_speed_home" SERVICE_SET_PROFILE_FAN_SPEED_AWAY = "set_profile_fan_speed_away" SERVICE_SET_PROFILE_FAN_SPEED_BOOST = "set_profile_fan_speed_boost" +SERVICE_SET_PROFILE = "set_profile" SERVICE_TO_METHOD = { SERVICE_SET_PROFILE_FAN_SPEED_HOME: ServiceMethodDetails( @@ -86,6 +98,9 @@ SERVICE_TO_METHOD = { method="async_set_profile_fan_speed_boost", schema=SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED, ), + SERVICE_SET_PROFILE: ServiceMethodDetails( + method="async_set_profile", schema=SERVICE_SCHEMA_SET_PROFILE + ), } @@ -183,6 +198,22 @@ class ValloxServiceHandler: return False return True + async def async_set_profile( + self, profile: str, duration: int | None = None + ) -> bool: + """Activate profile for given duration.""" + _LOGGER.debug("Activating profile %s for %s min", profile, duration) + try: + await self._client.set_profile( + I18N_KEY_TO_VALLOX_PROFILE[profile], duration + ) + except ValloxApiException as err: + _LOGGER.error( + "Error setting profile %d for duration %s: %s", profile, duration, err + ) + return False + return True + async def async_handle(self, call: ServiceCall) -> None: """Dispatch a service call.""" service_details = SERVICE_TO_METHOD.get(call.service) @@ -201,24 +232,3 @@ class ValloxServiceHandler: # be observed by all parties involved. if result: await self._coordinator.async_request_refresh() - - -class ValloxEntity(CoordinatorEntity[ValloxDataUpdateCoordinator]): - """Representation of a Vallox entity.""" - - _attr_has_entity_name = True - - def __init__(self, name: str, coordinator: ValloxDataUpdateCoordinator) -> None: - """Initialize a Vallox entity.""" - super().__init__(coordinator) - - self._device_uuid = self.coordinator.data.uuid - assert self.coordinator.config_entry is not None - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(self._device_uuid))}, - manufacturer=DEFAULT_NAME, - model=self.coordinator.data.model, - name=name, - sw_version=self.coordinator.data.sw_version, - configuration_url=f"http://{self.coordinator.config_entry.data[CONF_HOST]}", - ) diff --git a/homeassistant/components/vallox/binary_sensor.py b/homeassistant/components/vallox/binary_sensor.py index 20593fa4402..4a0efc7b101 100644 --- a/homeassistant/components/vallox/binary_sensor.py +++ b/homeassistant/components/vallox/binary_sensor.py @@ -13,9 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator +from .entity import ValloxEntity class ValloxBinarySensorEntity(ValloxEntity, BinarySensorEntity): diff --git a/homeassistant/components/vallox/config_flow.py b/homeassistant/components/vallox/config_flow.py index 3660c641b7c..30d1d153d9e 100644 --- a/homeassistant/components/vallox/config_flow.py +++ b/homeassistant/components/vallox/config_flow.py @@ -86,20 +86,18 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration of the Vallox device host address.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - + reconfigure_entry = self._get_reconfigure_entry() if not user_input: return self.async_show_form( step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( - CONFIG_SCHEMA, {CONF_HOST: entry.data.get(CONF_HOST)} + CONFIG_SCHEMA, {CONF_HOST: reconfigure_entry.data.get(CONF_HOST)} ), ) updated_host = user_input[CONF_HOST] - if entry.data.get(CONF_HOST) != updated_host: + if reconfigure_entry.data.get(CONF_HOST) != updated_host: self._async_abort_entries_match({CONF_HOST: updated_host}) errors: dict[str, str] = {} @@ -115,9 +113,7 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "unknown" else: return self.async_update_reload_and_abort( - entry, - data={**entry.data, CONF_HOST: updated_host}, - reason="reconfigure_successful", + reconfigure_entry, data_updates={CONF_HOST: updated_host} ) return self.async_show_form( diff --git a/homeassistant/components/vallox/const.py b/homeassistant/components/vallox/const.py index a2494c594f5..418f57a22c8 100644 --- a/homeassistant/components/vallox/const.py +++ b/homeassistant/components/vallox/const.py @@ -22,14 +22,15 @@ DEFAULT_FAN_SPEED_HOME = 50 DEFAULT_FAN_SPEED_AWAY = 25 DEFAULT_FAN_SPEED_BOOST = 65 -VALLOX_PROFILE_TO_PRESET_MODE_SETTABLE = { - VALLOX_PROFILE.HOME: "Home", - VALLOX_PROFILE.AWAY: "Away", - VALLOX_PROFILE.BOOST: "Boost", - VALLOX_PROFILE.FIREPLACE: "Fireplace", +I18N_KEY_TO_VALLOX_PROFILE = { + "home": VALLOX_PROFILE.HOME, + "away": VALLOX_PROFILE.AWAY, + "boost": VALLOX_PROFILE.BOOST, + "fireplace": VALLOX_PROFILE.FIREPLACE, + "extra": VALLOX_PROFILE.EXTRA, } -VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE = { +VALLOX_PROFILE_TO_PRESET_MODE = { VALLOX_PROFILE.HOME: "Home", VALLOX_PROFILE.AWAY: "Away", VALLOX_PROFILE.BOOST: "Boost", @@ -37,8 +38,8 @@ VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE = { VALLOX_PROFILE.EXTRA: "Extra", } -PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE = { - value: key for (key, value) in VALLOX_PROFILE_TO_PRESET_MODE_SETTABLE.items() +PRESET_MODE_TO_VALLOX_PROFILE = { + value: key for (key, value) in VALLOX_PROFILE_TO_PRESET_MODE.items() } VALLOX_CELL_STATE_TO_STR = { diff --git a/homeassistant/components/vallox/date.py b/homeassistant/components/vallox/date.py index 0236117fd0f..33c3ebb253c 100644 --- a/homeassistant/components/vallox/date.py +++ b/homeassistant/components/vallox/date.py @@ -12,9 +12,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator +from .entity import ValloxEntity class ValloxFilterChangeDateEntity(ValloxEntity, DateEntity): diff --git a/homeassistant/components/vallox/entity.py b/homeassistant/components/vallox/entity.py new file mode 100644 index 00000000000..b0657c561a8 --- /dev/null +++ b/homeassistant/components/vallox/entity.py @@ -0,0 +1,31 @@ +"""Support for Vallox ventilation units.""" + +from __future__ import annotations + +from homeassistant.const import CONF_HOST +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DEFAULT_NAME, DOMAIN +from .coordinator import ValloxDataUpdateCoordinator + + +class ValloxEntity(CoordinatorEntity[ValloxDataUpdateCoordinator]): + """Representation of a Vallox entity.""" + + _attr_has_entity_name = True + + def __init__(self, name: str, coordinator: ValloxDataUpdateCoordinator) -> None: + """Initialize a Vallox entity.""" + super().__init__(coordinator) + + self._device_uuid = self.coordinator.data.uuid + assert self.coordinator.config_entry is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(self._device_uuid))}, + manufacturer=DEFAULT_NAME, + model=self.coordinator.data.model, + name=name, + sw_version=self.coordinator.data.sw_version, + configuration_url=f"http://{self.coordinator.config_entry.data[CONF_HOST]}", + ) diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index 4fe2cfd45d4..3a21ef060a7 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -14,7 +14,6 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import ValloxEntity from .const import ( DOMAIN, METRIC_KEY_MODE, @@ -23,10 +22,11 @@ from .const import ( METRIC_KEY_PROFILE_FAN_SPEED_HOME, MODE_OFF, MODE_ON, - PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE, - VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE, + PRESET_MODE_TO_VALLOX_PROFILE, + VALLOX_PROFILE_TO_PRESET_MODE, ) from .coordinator import ValloxDataUpdateCoordinator +from .entity import ValloxEntity class ExtraStateAttributeDetails(NamedTuple): @@ -83,7 +83,6 @@ class ValloxFanEntity(ValloxEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, @@ -97,7 +96,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): self._client = client self._attr_unique_id = str(self._device_uuid) - self._attr_preset_modes = list(PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE) + self._attr_preset_modes = list(PRESET_MODE_TO_VALLOX_PROFILE) @property def is_on(self) -> bool: @@ -108,7 +107,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): def preset_mode(self) -> str | None: """Return the current preset mode.""" vallox_profile = self.coordinator.data.profile - return VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE.get(vallox_profile) + return VALLOX_PROFILE_TO_PRESET_MODE.get(vallox_profile) @property def percentage(self) -> int | None: @@ -204,7 +203,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): return False try: - profile = PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE[preset_mode] + profile = PRESET_MODE_TO_VALLOX_PROFILE[preset_mode] await self._client.set_profile(profile) except ValloxApiException as err: @@ -220,7 +219,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): Returns true if speed has been changed, false otherwise. """ vallox_profile = ( - PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE[preset_mode] + PRESET_MODE_TO_VALLOX_PROFILE[preset_mode] if preset_mode is not None else self.coordinator.data.profile ) diff --git a/homeassistant/components/vallox/icons.json b/homeassistant/components/vallox/icons.json index 67b41d216d2..9123d1bfe9b 100644 --- a/homeassistant/components/vallox/icons.json +++ b/homeassistant/components/vallox/icons.json @@ -37,8 +37,17 @@ } }, "services": { - "set_profile_fan_speed_home": "mdi:home", - "set_profile_fan_speed_away": "mdi:walk", - "set_profile_fan_speed_boost": "mdi:speedometer" + "set_profile_fan_speed_home": { + "service": "mdi:home" + }, + "set_profile_fan_speed_away": { + "service": "mdi:walk" + }, + "set_profile_fan_speed_boost": { + "service": "mdi:speedometer" + }, + "set_profile": { + "service": "mdi:fan" + } } } diff --git a/homeassistant/components/vallox/number.py b/homeassistant/components/vallox/number.py index 93190da1f16..96bc07b5a93 100644 --- a/homeassistant/components/vallox/number.py +++ b/homeassistant/components/vallox/number.py @@ -16,9 +16,9 @@ from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator +from .entity import ValloxEntity class ValloxNumberEntity(ValloxEntity, NumberEntity): diff --git a/homeassistant/components/vallox/sensor.py b/homeassistant/components/vallox/sensor.py index 0bb509a9c5a..7165947861a 100644 --- a/homeassistant/components/vallox/sensor.py +++ b/homeassistant/components/vallox/sensor.py @@ -25,15 +25,15 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util import dt as dt_util -from . import ValloxEntity from .const import ( DOMAIN, METRIC_KEY_MODE, MODE_ON, VALLOX_CELL_STATE_TO_STR, - VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE, + VALLOX_PROFILE_TO_PRESET_MODE, ) from .coordinator import ValloxDataUpdateCoordinator +from .entity import ValloxEntity class ValloxSensorEntity(ValloxEntity, SensorEntity): @@ -78,7 +78,7 @@ class ValloxProfileSensor(ValloxSensorEntity): def native_value(self) -> StateType: """Return the value reported by the sensor.""" vallox_profile = self.coordinator.data.profile - return VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE.get(vallox_profile) + return VALLOX_PROFILE_TO_PRESET_MODE.get(vallox_profile) # There is a quirk with respect to the fan speed reporting. The device keeps on reporting the last diff --git a/homeassistant/components/vallox/services.yaml b/homeassistant/components/vallox/services.yaml index e6bd3edad11..f2a55032b93 100644 --- a/homeassistant/components/vallox/services.yaml +++ b/homeassistant/components/vallox/services.yaml @@ -27,3 +27,24 @@ set_profile_fan_speed_boost: min: 0 max: 100 unit_of_measurement: "%" + +set_profile: + fields: + profile: + required: true + selector: + select: + translation_key: "profile" + options: + - "home" + - "away" + - "boost" + - "fireplace" + - "extra" + duration: + required: false + selector: + number: + min: 1 + max: 65535 + unit_of_measurement: "minutes" diff --git a/homeassistant/components/vallox/strings.json b/homeassistant/components/vallox/strings.json index 4df57b81bb5..8a30ed4ad01 100644 --- a/homeassistant/components/vallox/strings.json +++ b/homeassistant/components/vallox/strings.json @@ -133,6 +133,31 @@ "description": "[%key:component::vallox::services::set_profile_fan_speed_home::fields::fan_speed::description%]" } } + }, + "set_profile": { + "name": "Activate profile for duration", + "description": "Activate a profile and optionally set duration.", + "fields": { + "profile": { + "name": "Profile", + "description": "Profile to activate" + }, + "duration": { + "name": "Duration", + "description": "Activation duration, if omitted device uses stored duration. Duration of 65535 activates profile without timeout. Duration only applies to Boost, Fireplace and Extra profiles." + } + } + } + }, + "selector": { + "profile": { + "options": { + "home": "Home", + "away": "Away", + "boost": "Boost", + "fireplace": "Fireplace", + "extra": "Extra" + } } } } diff --git a/homeassistant/components/vallox/switch.py b/homeassistant/components/vallox/switch.py index d70de89606d..20b270f8f18 100644 --- a/homeassistant/components/vallox/switch.py +++ b/homeassistant/components/vallox/switch.py @@ -13,9 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator +from .entity import ValloxEntity class ValloxSwitchEntity(ValloxEntity, SwitchEntity): diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index 04ce12e8a8f..7df6f8eac51 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -11,7 +11,7 @@ from typing import Any, final import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( +from homeassistant.const import ( # noqa: F401 SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, @@ -27,10 +27,13 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN, ValveState _LOGGER = logging.getLogger(__name__) -DOMAIN = "valve" +DATA_COMPONENT: HassKey[EntityComponent[ValveEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -64,7 +67,7 @@ ATTR_POSITION = "position" async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for valves.""" - component = hass.data[DOMAIN] = EntityComponent[ValveEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[ValveEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -108,14 +111,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[ValveEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[ValveEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) @dataclass(frozen=True, kw_only=True) @@ -173,18 +174,18 @@ class ValveEntity(Entity): reports_position = self.reports_position if self.is_opening: self.__is_last_toggle_direction_open = True - return STATE_OPENING + return ValveState.OPENING if self.is_closing: self.__is_last_toggle_direction_open = False - return STATE_CLOSING + return ValveState.CLOSING if reports_position is True: if (current_valve_position := self.current_valve_position) is None: return None position_zero = current_valve_position == 0 - return STATE_CLOSED if position_zero else STATE_OPEN + return ValveState.CLOSED if position_zero else ValveState.OPEN if (closed := self.is_closed) is None: return None - return STATE_CLOSED if closed else STATE_OPEN + return ValveState.CLOSED if closed else ValveState.OPEN @final @property diff --git a/homeassistant/components/valve/const.py b/homeassistant/components/valve/const.py new file mode 100644 index 00000000000..5f590b5015a --- /dev/null +++ b/homeassistant/components/valve/const.py @@ -0,0 +1,14 @@ +"""Constants for the Valve entity platform.""" + +from enum import StrEnum + +DOMAIN = "valve" + + +class ValveState(StrEnum): + """State of Valve entities.""" + + OPENING = "opening" + CLOSING = "closing" + CLOSED = "closed" + OPEN = "open" diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 2c887ebf273..c9c6b632dcb 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -17,10 +17,20 @@ } }, "services": { - "close_valve": "mdi:valve-closed", - "open_valve": "mdi:valve-open", - "set_valve_position": "mdi:valve", - "stop_valve": "mdi:stop", - "toggle": "mdi:valve-open" + "close_valve": { + "service": "mdi:valve-closed" + }, + "open_valve": { + "service": "mdi:valve-open" + }, + "set_valve_position": { + "service": "mdi:valve" + }, + "stop_valve": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:valve-open" + } } } diff --git a/homeassistant/components/vasttrafik/manifest.json b/homeassistant/components/vasttrafik/manifest.json index 336d06e182c..73b773720ad 100644 --- a/homeassistant/components/vasttrafik/manifest.json +++ b/homeassistant/components/vasttrafik/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/vasttrafik", "iot_class": "cloud_polling", "loggers": ["vasttrafik"], + "quality_scale": "legacy", "requirements": ["vtjp==0.2.1"] } diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index 685f8b49500..ad1c35a124b 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -2,30 +2,25 @@ from __future__ import annotations -from contextlib import suppress +import asyncio +from dataclasses import dataclass import logging import os import shutil from velbusaio.controller import Velbus -import voluptuous as vol +from velbusaio.exceptions import VelbusConnectionFailed from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.exceptions import PlatformNotReady +from homeassistant.const import CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady, PlatformNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.storage import STORAGE_DIR +from homeassistant.helpers.typing import ConfigType -from .const import ( - CONF_INTERFACE, - CONF_MEMO_TEXT, - DOMAIN, - SERVICE_CLEAR_CACHE, - SERVICE_SCAN, - SERVICE_SET_MEMO_TEXT, - SERVICE_SYNC, -) +from .const import DOMAIN +from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -40,13 +35,25 @@ PLATFORMS = [ Platform.SWITCH, ] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -async def velbus_connect_task( +type VelbusConfigEntry = ConfigEntry[VelbusData] + + +@dataclass +class VelbusData: + """Runtime data for the Velbus config entry.""" + + controller: Velbus + scan_task: asyncio.Task + + +async def velbus_scan_task( controller: Velbus, hass: HomeAssistant, entry_id: str ) -> None: - """Task to offload the long running connect.""" + """Task to offload the long running scan.""" try: - await controller.connect() + await controller.start() except ConnectionError as ex: raise PlatformNotReady( f"Connection error while connecting to Velbus {entry_id}: {ex}" @@ -67,133 +74,41 @@ def _migrate_device_identifiers(hass: HomeAssistant, entry_id: str) -> None: dev_reg.async_update_device(device.id, new_identifiers=new_identifier) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Establish connection with velbus.""" - hass.data.setdefault(DOMAIN, {}) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the actions for the Velbus component.""" + setup_services(hass) + return True + +async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bool: + """Establish connection with velbus.""" controller = Velbus( entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) - hass.data[DOMAIN][entry.entry_id] = {} - hass.data[DOMAIN][entry.entry_id]["cntrl"] = controller - hass.data[DOMAIN][entry.entry_id]["tsk"] = hass.async_create_task( - velbus_connect_task(controller, hass, entry.entry_id) - ) + try: + await controller.connect() + except VelbusConnectionFailed as error: + raise ConfigEntryNotReady("Cannot connect to Velbus") from error + + task = hass.async_create_task(velbus_scan_task(controller, hass, entry.entry_id)) + entry.runtime_data = VelbusData(controller=controller, scan_task=task) _migrate_device_identifiers(hass, entry.entry_id) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - if hass.services.has_service(DOMAIN, SERVICE_SCAN): - return True - - def check_entry_id(interface: str) -> str: - for config_entry in hass.config_entries.async_entries(DOMAIN): - if "port" in config_entry.data and config_entry.data["port"] == interface: - return config_entry.entry_id - raise vol.Invalid( - "The interface provided is not defined as a port in a Velbus integration" - ) - - async def scan(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() - - hass.services.async_register( - DOMAIN, - SERVICE_SCAN, - scan, - vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), - ) - - async def syn_clock(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() - - hass.services.async_register( - DOMAIN, - SERVICE_SYNC, - syn_clock, - vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), - ) - - async def set_memo_text(call: ServiceCall) -> None: - """Handle Memo Text service call.""" - memo_text = call.data[CONF_MEMO_TEXT] - await ( - hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] - .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text.async_render()) - ) - - hass.services.async_register( - DOMAIN, - SERVICE_SET_MEMO_TEXT, - set_memo_text, - vol.Schema( - { - vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), - vol.Required(CONF_ADDRESS): vol.All( - vol.Coerce(int), vol.Range(min=0, max=255) - ), - vol.Optional(CONF_MEMO_TEXT, default=""): cv.template, - } - ), - ) - - async def clear_cache(call: ServiceCall) -> None: - """Handle a clear cache service call.""" - # clear the cache - with suppress(FileNotFoundError): - if call.data.get(CONF_ADDRESS): - await hass.async_add_executor_job( - os.unlink, - hass.config.path( - STORAGE_DIR, - f"velbuscache-{call.data[CONF_INTERFACE]}/{call.data[CONF_ADDRESS]}.p", - ), - ) - else: - await hass.async_add_executor_job( - shutil.rmtree, - hass.config.path( - STORAGE_DIR, f"velbuscache-{call.data[CONF_INTERFACE]}/" - ), - ) - # call a scan to repopulate - await scan(call) - - hass.services.async_register( - DOMAIN, - SERVICE_CLEAR_CACHE, - clear_cache, - vol.Schema( - { - vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), - vol.Optional(CONF_ADDRESS): vol.All( - vol.Coerce(int), vol.Range(min=0, max=255) - ), - } - ), - ) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bool: """Unload (close) the velbus connection.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - await hass.data[DOMAIN][entry.entry_id]["cntrl"].stop() - hass.data[DOMAIN].pop(entry.entry_id) - if not hass.data[DOMAIN]: - hass.data.pop(DOMAIN) - hass.services.async_remove(DOMAIN, SERVICE_SCAN) - hass.services.async_remove(DOMAIN, SERVICE_SYNC) - hass.services.async_remove(DOMAIN, SERVICE_SET_MEMO_TEXT) - hass.services.async_remove(DOMAIN, SERVICE_CLEAR_CACHE) + await entry.runtime_data.controller.stop() return unload_ok -async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_remove_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> None: """Remove the velbus entry, so we also have to cleanup the cache dir.""" await hass.async_add_executor_job( shutil.rmtree, @@ -201,7 +116,9 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: ) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: VelbusConfigEntry +) -> bool: """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) cache_path = hass.config.path(STORAGE_DIR, f"velbuscache-{config_entry.entry_id}/") diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index 5f363c1a035..88dc994efe8 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -3,24 +3,25 @@ from velbusaio.channels import Button as VelbusButton from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.scan_task async_add_entities( - VelbusBinarySensor(channel) for channel in cntrl.get_all("binary_sensor") + VelbusBinarySensor(channel) + for channel in entry.runtime_data.controller.get_all_binary_sensor() ) diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index bd5b81d67a0..fc943159123 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -8,24 +8,27 @@ from velbusaio.channels import ( ) from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusButton(channel) for channel in cntrl.get_all("button")) + await entry.runtime_data.scan_task + async_add_entities( + VelbusButton(channel) + for channel in entry.runtime_data.controller.get_all_button() + ) class VelbusButton(VelbusEntity, ButtonEntity): diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index 34a565c2b37..b2f3077ecee 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -11,24 +11,29 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VelbusConfigEntry from .const import DOMAIN, PRESET_MODES from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusClimate(channel) for channel in cntrl.get_all("climate")) + await entry.runtime_data.scan_task + async_add_entities( + VelbusClimate(channel) + for channel in entry.runtime_data.controller.get_all_climate() + ) class VelbusClimate(VelbusEntity, ClimateEntity): @@ -39,10 +44,8 @@ class VelbusClimate(VelbusEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_hvac_mode = HVACMode.HEAT - _attr_hvac_modes = [HVACMode.HEAT] + _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL] _attr_preset_modes = list(PRESET_MODES) - _enable_turn_on_off_backwards_compatibility = False @property def target_temperature(self) -> float | None: @@ -66,6 +69,11 @@ class VelbusClimate(VelbusEntity, ClimateEntity): """Return the current temperature.""" return self._channel.get_state() + @property + def hvac_mode(self) -> HVACMode: + """Return the current hvac mode based on cool_mode message.""" + return HVACMode.COOL if self._channel.get_cool_mode() else HVACMode.HEAT + @api_call async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" @@ -79,3 +87,15 @@ class VelbusClimate(VelbusEntity, ClimateEntity): """Set the new preset mode.""" await self._channel.set_preset(PRESET_MODES[preset_mode]) self.async_write_ha_state() + + @api_call + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the new hvac mode.""" + if hvac_mode not in self._attr_hvac_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_hvac_mode", + translation_placeholders={"hvac_mode": str(hvac_mode)}, + ) + await self._channel.set_mode(hvac_mode) + self.async_write_ha_state() diff --git a/homeassistant/components/velbus/config_flow.py b/homeassistant/components/velbus/config_flow.py index 0b47dfe6498..26e2fafabbc 100644 --- a/homeassistant/components/velbus/config_flow.py +++ b/homeassistant/components/velbus/config_flow.py @@ -35,7 +35,7 @@ class VelbusConfigFlow(ConfigFlow, domain=DOMAIN): """Try to connect to the velbus with the port specified.""" try: controller = velbusaio.controller.Velbus(prt) - await controller.connect(True) + await controller.connect() await controller.stop() except VelbusConnectionFailed: self._errors[CONF_PORT] = "cannot_connect" diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 8b9d927f3d7..2ddea37f2d6 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -11,23 +11,26 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusCover(channel) for channel in cntrl.get_all("cover")) + await entry.runtime_data.scan_task + async_add_entities( + VelbusCover(channel) + for channel in entry.runtime_data.controller.get_all_cover() + ) class VelbusCover(VelbusEntity, CoverEntity): diff --git a/homeassistant/components/velbus/diagnostics.py b/homeassistant/components/velbus/diagnostics.py index f7e29e2f57e..75b7669edec 100644 --- a/homeassistant/components/velbus/diagnostics.py +++ b/homeassistant/components/velbus/diagnostics.py @@ -7,18 +7,17 @@ from typing import Any from velbusaio.channels import Channel as VelbusChannel from velbusaio.module import Module as VelbusModule -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from .const import DOMAIN +from . import VelbusConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: VelbusConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - controller = hass.data[DOMAIN][entry.entry_id]["cntrl"] + controller = entry.runtime_data.controller data: dict[str, Any] = {"entry": entry.as_dict(), "modules": []} for module in controller.get_modules().values(): data["modules"].append(_build_module_diagnostics_info(module)) @@ -26,10 +25,10 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry + hass: HomeAssistant, entry: VelbusConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device entry.""" - controller = hass.data[DOMAIN][entry.entry_id]["cntrl"] + controller = entry.runtime_data.controller channel = list(next(iter(device.identifiers)))[1] modules = controller.get_modules() return _build_module_diagnostics_info(modules[int(channel)]) diff --git a/homeassistant/components/velbus/icons.json b/homeassistant/components/velbus/icons.json index a806782d189..a46f5e5fbf1 100644 --- a/homeassistant/components/velbus/icons.json +++ b/homeassistant/components/velbus/icons.json @@ -1,8 +1,16 @@ { "services": { - "sync_clock": "mdi:clock", - "scan": "mdi:magnify", - "clear_cache": "mdi:delete", - "set_memo_text": "mdi:note-text" + "sync_clock": { + "service": "mdi:clock" + }, + "scan": { + "service": "mdi:magnify" + }, + "clear_cache": { + "service": "mdi:delete" + }, + "set_memo_text": { + "service": "mdi:note-text" + } } } diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index 7145576be6a..1adf52a8198 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -20,28 +20,32 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.scan_task entities: list[Entity] = [ - VelbusLight(channel) for channel in cntrl.get_all("light") + VelbusLight(channel) + for channel in entry.runtime_data.controller.get_all_light() ] - entities.extend(VelbusButtonLight(channel) for channel in cntrl.get_all("led")) + entities.extend( + VelbusButtonLight(channel) + for channel in entry.runtime_data.controller.get_all_led() + ) async_add_entities(entities) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index c1cf2951bbd..90981c426f9 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.7.6"], + "requirements": ["velbus-aio==2024.12.2"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml new file mode 100644 index 00000000000..477b6768e71 --- /dev/null +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -0,0 +1,79 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: + status: exempt + comment: | + This integration does not poll. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: | + Dynamically build up the port parameter based on inputs provided by the user, do not fill-in a name parameter, build it up in the config flow + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: todo + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: + status: todo + comment: | + Manual step does not generate an unique-id + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: todo + comment: | + Dynamic devices are discovered, but no entities are created for them + entity-category: done + entity-device-class: todo + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration communicates via serial/usb/tcp and does not require a web session. + strict-typing: done diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index 7eecb85fc47..6c2dfe0a3b1 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -3,24 +3,27 @@ from velbusaio.channels import SelectedProgram from homeassistant.components.select import SelectEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus select based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusSelect(channel) for channel in cntrl.get_all("select")) + await entry.runtime_data.scan_task + async_add_entities( + VelbusSelect(channel) + for channel in entry.runtime_data.controller.get_all_select() + ) class VelbusSelect(VelbusEntity, SelectEntity): diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index b765eebcddc..77833da3ee1 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -9,24 +9,24 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.scan_task entities = [] - for channel in cntrl.get_all("sensor"): + for channel in entry.runtime_data.controller.get_all_sensor(): entities.append(VelbusSensor(channel)) if channel.is_counter_channel(): entities.append(VelbusSensor(channel, True)) diff --git a/homeassistant/components/velbus/services.py b/homeassistant/components/velbus/services.py new file mode 100644 index 00000000000..3f0b1bd6cdb --- /dev/null +++ b/homeassistant/components/velbus/services.py @@ -0,0 +1,132 @@ +"""Support for Velbus devices.""" + +from __future__ import annotations + +from contextlib import suppress +import os +import shutil +from typing import TYPE_CHECKING + +import voluptuous as vol + +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.storage import STORAGE_DIR + +if TYPE_CHECKING: + from . import VelbusConfigEntry + +from .const import ( + CONF_INTERFACE, + CONF_MEMO_TEXT, + DOMAIN, + SERVICE_CLEAR_CACHE, + SERVICE_SCAN, + SERVICE_SET_MEMO_TEXT, + SERVICE_SYNC, +) + + +def setup_services(hass: HomeAssistant) -> None: + """Register the velbus services.""" + + def check_entry_id(interface: str) -> str: + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry.entry_id + raise vol.Invalid( + "The interface provided is not defined as a port in a Velbus integration" + ) + + def get_config_entry(interface: str) -> VelbusConfigEntry | None: + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry + return None + + async def scan(call: ServiceCall) -> None: + """Handle a scan service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + await entry.runtime_data.controller.scan() + + async def syn_clock(call: ServiceCall) -> None: + """Handle a sync clock service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + await entry.runtime_data.controller.sync_clock() + + async def set_memo_text(call: ServiceCall) -> None: + """Handle Memo Text service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + memo_text = call.data[CONF_MEMO_TEXT] + module = entry.runtime_data.controller.get_module(call.data[CONF_ADDRESS]) + if module: + await module.set_memo_text(memo_text.async_render()) + + async def clear_cache(call: ServiceCall) -> None: + """Handle a clear cache service call.""" + # clear the cache + with suppress(FileNotFoundError): + if call.data.get(CONF_ADDRESS): + await hass.async_add_executor_job( + os.unlink, + hass.config.path( + STORAGE_DIR, + f"velbuscache-{call.data[CONF_INTERFACE]}/{call.data[CONF_ADDRESS]}.p", + ), + ) + else: + await hass.async_add_executor_job( + shutil.rmtree, + hass.config.path( + STORAGE_DIR, f"velbuscache-{call.data[CONF_INTERFACE]}/" + ), + ) + # call a scan to repopulate + await scan(call) + + hass.services.async_register( + DOMAIN, + SERVICE_SCAN, + scan, + vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SYNC, + syn_clock, + vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SET_MEMO_TEXT, + set_memo_text, + vol.Schema( + { + vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), + vol.Required(CONF_ADDRESS): vol.All( + vol.Coerce(int), vol.Range(min=0, max=255) + ), + vol.Optional(CONF_MEMO_TEXT, default=""): cv.template, + } + ), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_CLEAR_CACHE, + clear_cache, + vol.Schema( + { + vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), + vol.Optional(CONF_ADDRESS): vol.All( + vol.Coerce(int), vol.Range(min=0, max=255) + ), + } + ), + ) diff --git a/homeassistant/components/velbus/strings.json b/homeassistant/components/velbus/strings.json index 948c079444d..55c7fda84ac 100644 --- a/homeassistant/components/velbus/strings.json +++ b/homeassistant/components/velbus/strings.json @@ -17,6 +17,11 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "exceptions": { + "invalid_hvac_mode": { + "message": "Climate mode {hvac_mode} is not supported." + } + }, "services": { "sync_clock": { "name": "Sync clock", diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index 1e6014b8d90..8256e716d4f 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -5,23 +5,26 @@ from typing import Any from velbusaio.channels import Relay as VelbusRelay from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusSwitch(channel) for channel in cntrl.get_all("switch")) + await entry.runtime_data.scan_task + async_add_entities( + VelbusSwitch(channel) + for channel in entry.runtime_data.controller.get_all_switch() + ) class VelbusSwitch(VelbusEntity, SwitchEntity): diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 614ed810429..2f1cab67c16 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -1,11 +1,10 @@ """Support for VELUX KLF 200 devices.""" -from pyvlx import Node, PyVLX, PyVLXException +from pyvlx import PyVLX, PyVLXException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.helpers.entity import Entity +from homeassistant.core import HomeAssistant, ServiceCall from .const import DOMAIN, LOGGER, PLATFORMS @@ -67,33 +66,3 @@ class VeluxModule: LOGGER.debug("Velux interface started") await self.pyvlx.load_scenes() await self.pyvlx.load_nodes() - - -class VeluxEntity(Entity): - """Abstraction for al Velux entities.""" - - _attr_should_poll = False - - def __init__(self, node: Node, config_entry_id: str) -> None: - """Initialize the Velux device.""" - self.node = node - self._attr_unique_id = ( - node.serial_number - if node.serial_number - else f"{config_entry_id}_{node.node_id}" - ) - self._attr_name = node.name if node.name else f"#{node.node_id}" - - @callback - def async_register_callbacks(self): - """Register callbacks to update hass after device was changed.""" - - async def after_update_callback(device): - """Call after device was updated.""" - self.async_write_ha_state() - - self.node.register_device_updated_cb(after_update_callback) - - async def async_added_to_hass(self): - """Store register state change callback.""" - self.async_register_callbacks() diff --git a/homeassistant/components/velux/cover.py b/homeassistant/components/velux/cover.py index cd7564eee81..90745f601b4 100644 --- a/homeassistant/components/velux/cover.py +++ b/homeassistant/components/velux/cover.py @@ -18,7 +18,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, VeluxEntity +from .const import DOMAIN +from .entity import VeluxEntity PARALLEL_UPDATES = 1 @@ -94,6 +95,16 @@ class VeluxCover(VeluxEntity, CoverEntity): """Return if the cover is closed.""" return self.node.position.closed + @property + def is_opening(self) -> bool: + """Return if the cover is opening or not.""" + return self.node.is_opening + + @property + def is_closing(self) -> bool: + """Return if the cover is closing or not.""" + return self.node.is_closing + async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self.node.close(wait_for_completion=False) diff --git a/homeassistant/components/velux/entity.py b/homeassistant/components/velux/entity.py new file mode 100644 index 00000000000..674ba5dde45 --- /dev/null +++ b/homeassistant/components/velux/entity.py @@ -0,0 +1,36 @@ +"""Support for VELUX KLF 200 devices.""" + +from pyvlx import Node + +from homeassistant.core import callback +from homeassistant.helpers.entity import Entity + + +class VeluxEntity(Entity): + """Abstraction for al Velux entities.""" + + _attr_should_poll = False + + def __init__(self, node: Node, config_entry_id: str) -> None: + """Initialize the Velux device.""" + self.node = node + self._attr_unique_id = ( + node.serial_number + if node.serial_number + else f"{config_entry_id}_{node.node_id}" + ) + self._attr_name = node.name if node.name else f"#{node.node_id}" + + @callback + def async_register_callbacks(self): + """Register callbacks to update hass after device was changed.""" + + async def after_update_callback(device): + """Call after device was updated.""" + self.async_write_ha_state() + + self.node.register_device_updated_cb(after_update_callback) + + async def async_added_to_hass(self): + """Store register state change callback.""" + self.async_register_callbacks() diff --git a/homeassistant/components/velux/icons.json b/homeassistant/components/velux/icons.json index a16e7b50093..78cb5b14838 100644 --- a/homeassistant/components/velux/icons.json +++ b/homeassistant/components/velux/icons.json @@ -1,5 +1,7 @@ { "services": { - "reboot_gateway": "mdi:restart" + "reboot_gateway": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/velux/light.py b/homeassistant/components/velux/light.py index e98632701f3..14f12a01060 100644 --- a/homeassistant/components/velux/light.py +++ b/homeassistant/components/velux/light.py @@ -11,7 +11,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, VeluxEntity +from .const import DOMAIN +from .entity import VeluxEntity PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/velux/scene.py b/homeassistant/components/velux/scene.py index 30858b25002..54888413613 100644 --- a/homeassistant/components/velux/scene.py +++ b/homeassistant/components/velux/scene.py @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN +from .const import DOMAIN PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/venstar/__init__.py b/homeassistant/components/venstar/__init__.py index cbcfd3dff90..3243c7a6f47 100644 --- a/homeassistant/components/venstar/__init__.py +++ b/homeassistant/components/venstar/__init__.py @@ -13,9 +13,7 @@ from homeassistant.const import ( CONF_USERNAME, Platform, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.core import HomeAssistant from .const import DOMAIN, VENSTAR_TIMEOUT from .coordinator import VenstarDataUpdateCoordinator @@ -59,35 +57,3 @@ async def async_unload_entry(hass: HomeAssistant, config: ConfigEntry) -> bool: if unload_ok: hass.data[DOMAIN].pop(config.entry_id) return unload_ok - - -class VenstarEntity(CoordinatorEntity[VenstarDataUpdateCoordinator]): - """Representation of a Venstar entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - venstar_data_coordinator: VenstarDataUpdateCoordinator, - config: ConfigEntry, - ) -> None: - """Initialize the data object.""" - super().__init__(venstar_data_coordinator) - self._config = config - self._client = venstar_data_coordinator.client - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - self.async_write_ha_state() - - @property - def device_info(self) -> DeviceInfo: - """Return the device information for this entity.""" - return DeviceInfo( - identifiers={(DOMAIN, self._config.entry_id)}, - name=self._client.name, - manufacturer="Venstar", - model=f"{self._client.model}-{self._client.get_type()}", - sw_version="{}.{}".format(*(self._client.get_firmware_ver())), - ) diff --git a/homeassistant/components/venstar/binary_sensor.py b/homeassistant/components/venstar/binary_sensor.py index 38bdc208d15..315df09b625 100644 --- a/homeassistant/components/venstar/binary_sensor.py +++ b/homeassistant/components/venstar/binary_sensor.py @@ -8,8 +8,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VenstarEntity from .const import DOMAIN +from .entity import VenstarEntity async def async_setup_entry( diff --git a/homeassistant/components/venstar/climate.py b/homeassistant/components/venstar/climate.py index ea833dc3183..c5323e1e9a8 100644 --- a/homeassistant/components/venstar/climate.py +++ b/homeassistant/components/venstar/climate.py @@ -36,7 +36,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import VenstarEntity from .const import ( _LOGGER, ATTR_FAN_STATE, @@ -47,6 +46,7 @@ from .const import ( HOLD_MODE_TEMPERATURE, ) from .coordinator import VenstarDataUpdateCoordinator +from .entity import VenstarEntity PLATFORM_SCHEMA = CLIMATE_PLATFORM_SCHEMA.extend( { @@ -110,7 +110,6 @@ class VenstarThermostat(VenstarEntity, ClimateEntity): _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL, HVACMode.OFF, HVACMode.AUTO] _attr_precision = PRECISION_HALVES _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/venstar/config_flow.py b/homeassistant/components/venstar/config_flow.py index 289f7936676..929f5718c19 100644 --- a/homeassistant/components/venstar/config_flow.py +++ b/homeassistant/components/venstar/config_flow.py @@ -15,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.typing import ConfigType from .const import _LOGGER, DOMAIN, VENSTAR_TIMEOUT @@ -85,7 +84,7 @@ class VenstarConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import entry from configuration.yaml.""" self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) return await self.async_step_user( diff --git a/homeassistant/components/venstar/entity.py b/homeassistant/components/venstar/entity.py new file mode 100644 index 00000000000..b8a4b971a7f --- /dev/null +++ b/homeassistant/components/venstar/entity.py @@ -0,0 +1,44 @@ +"""The venstar component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import VenstarDataUpdateCoordinator + + +class VenstarEntity(CoordinatorEntity[VenstarDataUpdateCoordinator]): + """Representation of a Venstar entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + venstar_data_coordinator: VenstarDataUpdateCoordinator, + config: ConfigEntry, + ) -> None: + """Initialize the data object.""" + super().__init__(venstar_data_coordinator) + self._config = config + self._client = venstar_data_coordinator.client + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self.async_write_ha_state() + + @property + def device_info(self) -> DeviceInfo: + """Return the device information for this entity.""" + firmware_version = self._client.get_firmware_ver() + return DeviceInfo( + identifiers={(DOMAIN, self._config.entry_id)}, + name=self._client.name, + manufacturer="Venstar", + model=f"{self._client.model}-{self._client.get_type()}", + sw_version=f"{firmware_version[0]}.{firmware_version[1]}", + ) diff --git a/homeassistant/components/venstar/sensor.py b/homeassistant/components/venstar/sensor.py index 484aa711c1e..94180f6ad79 100644 --- a/homeassistant/components/venstar/sensor.py +++ b/homeassistant/components/venstar/sensor.py @@ -23,9 +23,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VenstarEntity from .const import DOMAIN from .coordinator import VenstarDataUpdateCoordinator +from .entity import VenstarEntity RUNTIME_HEAT1 = "heat1" RUNTIME_HEAT2 = "heat2" diff --git a/homeassistant/components/vera/__init__.py b/homeassistant/components/vera/__init__.py index 722a6b86d4b..b8f0b702ebe 100644 --- a/homeassistant/components/vera/__init__.py +++ b/homeassistant/components/vera/__init__.py @@ -5,7 +5,6 @@ from __future__ import annotations import asyncio from collections import defaultdict import logging -from typing import Any import pyvera as veraApi from requests.exceptions import RequestException @@ -14,10 +13,6 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_ARMED, - ATTR_BATTERY_LEVEL, - ATTR_LAST_TRIP_TIME, - ATTR_TRIPPED, CONF_EXCLUDE, CONF_LIGHTS, EVENT_HOMEASSISTANT_STOP, @@ -26,10 +21,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType -from homeassistant.util import slugify -from homeassistant.util.dt import utc_from_timestamp from .common import ( ControllerData, @@ -39,7 +31,7 @@ from .common import ( set_controller_data, ) from .config_flow import fix_device_id_list, new_options -from .const import CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN, VERA_ID_FORMAT +from .const import CONF_CONTROLLER, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -204,83 +196,3 @@ def map_vera_device( ), None, ) - - -class VeraDevice[_DeviceTypeT: veraApi.VeraDevice](Entity): - """Representation of a Vera device entity.""" - - def __init__( - self, vera_device: _DeviceTypeT, controller_data: ControllerData - ) -> None: - """Initialize the device.""" - self.vera_device = vera_device - self.controller = controller_data.controller - - self._name = self.vera_device.name - # Append device id to prevent name clashes in HA. - self.vera_id = VERA_ID_FORMAT.format( - slugify(vera_device.name), vera_device.vera_device_id - ) - - if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): - self._unique_id = str(self.vera_device.vera_device_id) - else: - self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" - - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self.controller.register(self.vera_device, self._update_callback) - - def _update_callback(self, _device: _DeviceTypeT) -> None: - """Update the state.""" - self.schedule_update_ha_state(True) - - def update(self): - """Force a refresh from the device if the device is unavailable.""" - refresh_needed = self.vera_device.should_poll or not self.available - _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) - if refresh_needed: - self.vera_device.refresh() - - @property - def name(self) -> str: - """Return the name of the device.""" - return self._name - - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes of the device.""" - attr = {} - - if self.vera_device.has_battery: - attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level - - if self.vera_device.is_armable: - armed = self.vera_device.is_armed - attr[ATTR_ARMED] = "True" if armed else "False" - - if self.vera_device.is_trippable: - if (last_tripped := self.vera_device.last_trip) is not None: - utc_time = utc_from_timestamp(int(last_tripped)) - attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() - else: - attr[ATTR_LAST_TRIP_TIME] = None - tripped = self.vera_device.is_tripped - attr[ATTR_TRIPPED] = "True" if tripped else "False" - - attr["Vera Device Id"] = self.vera_device.vera_device_id - - return attr - - @property - def available(self): - """If device communications have failed return false.""" - return not self.vera_device.comm_failure - - @property - def unique_id(self) -> str: - """Return a unique ID. - - The Vera assigns a unique and immutable ID number to each device. - """ - return self._unique_id diff --git a/homeassistant/components/vera/binary_sensor.py b/homeassistant/components/vera/binary_sensor.py index d90f6a78858..3438ee81d4a 100644 --- a/homeassistant/components/vera/binary_sensor.py +++ b/homeassistant/components/vera/binary_sensor.py @@ -10,8 +10,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity async def async_setup_entry( @@ -30,7 +30,7 @@ async def async_setup_entry( ) -class VeraBinarySensor(VeraDevice[veraApi.VeraBinarySensor], BinarySensorEntity): +class VeraBinarySensor(VeraEntity[veraApi.VeraBinarySensor], BinarySensorEntity): """Representation of a Vera Binary Sensor.""" _attr_is_on = False @@ -39,7 +39,7 @@ class VeraBinarySensor(VeraDevice[veraApi.VeraBinarySensor], BinarySensorEntity) self, vera_device: veraApi.VeraBinarySensor, controller_data: ControllerData ) -> None: """Initialize the binary_sensor.""" - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def update(self) -> None: diff --git a/homeassistant/components/vera/climate.py b/homeassistant/components/vera/climate.py index 79a6c2566e0..eb2a5206f30 100644 --- a/homeassistant/components/vera/climate.py +++ b/homeassistant/components/vera/climate.py @@ -19,8 +19,8 @@ from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity FAN_OPERATION_LIST = [FAN_ON, FAN_AUTO] @@ -43,7 +43,7 @@ async def async_setup_entry( ) -class VeraThermostat(VeraDevice[veraApi.VeraThermostat], ClimateEntity): +class VeraThermostat(VeraEntity[veraApi.VeraThermostat], ClimateEntity): """Representation of a Vera Thermostat.""" _attr_hvac_modes = SUPPORT_HVAC @@ -54,13 +54,12 @@ class VeraThermostat(VeraDevice[veraApi.VeraThermostat], ClimateEntity): | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, vera_device: veraApi.VeraThermostat, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index 181849f46a1..f2b182cc270 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -76,10 +76,6 @@ def options_data(user_input: dict[str, str]) -> dict[str, list[int]]: class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, str] | None = None, @@ -104,7 +100,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -127,7 +123,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initialized by import.""" # If there are entities with the legacy unique_id, then this imported config @@ -146,7 +142,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_finish( { - **config, + **import_data, CONF_SOURCE: SOURCE_IMPORT, CONF_LEGACY_UNIQUE_ID: use_legacy_unique_id, } diff --git a/homeassistant/components/vera/cover.py b/homeassistant/components/vera/cover.py index 25ffe987d5e..b5b57f43c0c 100644 --- a/homeassistant/components/vera/cover.py +++ b/homeassistant/components/vera/cover.py @@ -12,8 +12,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity async def async_setup_entry( @@ -32,14 +32,14 @@ async def async_setup_entry( ) -class VeraCover(VeraDevice[veraApi.VeraCurtain], CoverEntity): +class VeraCover(VeraEntity[veraApi.VeraCurtain], CoverEntity): """Representation a Vera Cover.""" def __init__( self, vera_device: veraApi.VeraCurtain, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property diff --git a/homeassistant/components/vera/entity.py b/homeassistant/components/vera/entity.py new file mode 100644 index 00000000000..84e21e54983 --- /dev/null +++ b/homeassistant/components/vera/entity.py @@ -0,0 +1,103 @@ +"""Support for Vera devices.""" + +from __future__ import annotations + +import logging +from typing import Any + +import pyvera as veraApi + +from homeassistant.const import ( + ATTR_ARMED, + ATTR_BATTERY_LEVEL, + ATTR_LAST_TRIP_TIME, + ATTR_TRIPPED, +) +from homeassistant.helpers.entity import Entity +from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp + +from .common import ControllerData +from .const import CONF_LEGACY_UNIQUE_ID, VERA_ID_FORMAT + +_LOGGER = logging.getLogger(__name__) + + +class VeraEntity[_DeviceTypeT: veraApi.VeraDevice](Entity): + """Representation of a Vera device entity.""" + + def __init__( + self, vera_device: _DeviceTypeT, controller_data: ControllerData + ) -> None: + """Initialize the device.""" + self.vera_device = vera_device + self.controller = controller_data.controller + + self._name = self.vera_device.name + # Append device id to prevent name clashes in HA. + self.vera_id = VERA_ID_FORMAT.format( + slugify(vera_device.name), vera_device.vera_device_id + ) + + if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): + self._unique_id = str(self.vera_device.vera_device_id) + else: + self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.controller.register(self.vera_device, self._update_callback) + + def _update_callback(self, _device: _DeviceTypeT) -> None: + """Update the state.""" + self.schedule_update_ha_state(True) + + def update(self): + """Force a refresh from the device if the device is unavailable.""" + refresh_needed = self.vera_device.should_poll or not self.available + _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) + if refresh_needed: + self.vera_device.refresh() + + @property + def name(self) -> str: + """Return the name of the device.""" + return self._name + + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes of the device.""" + attr = {} + + if self.vera_device.has_battery: + attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + + if self.vera_device.is_armable: + armed = self.vera_device.is_armed + attr[ATTR_ARMED] = "True" if armed else "False" + + if self.vera_device.is_trippable: + if (last_tripped := self.vera_device.last_trip) is not None: + utc_time = utc_from_timestamp(int(last_tripped)) + attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() + else: + attr[ATTR_LAST_TRIP_TIME] = None + tripped = self.vera_device.is_tripped + attr[ATTR_TRIPPED] = "True" if tripped else "False" + + attr["Vera Device Id"] = self.vera_device.vera_device_id + + return attr + + @property + def available(self): + """If device communications have failed return false.""" + return not self.vera_device.comm_failure + + @property + def unique_id(self) -> str: + """Return a unique ID. + + The Vera assigns a unique and immutable ID number to each device. + """ + return self._unique_id diff --git a/homeassistant/components/vera/light.py b/homeassistant/components/vera/light.py index 86e5dfa6a91..e512676de9a 100644 --- a/homeassistant/components/vera/light.py +++ b/homeassistant/components/vera/light.py @@ -19,8 +19,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity async def async_setup_entry( @@ -39,7 +39,7 @@ async def async_setup_entry( ) -class VeraLight(VeraDevice[veraApi.VeraDimmer], LightEntity): +class VeraLight(VeraEntity[veraApi.VeraDimmer], LightEntity): """Representation of a Vera Light, including dimmable.""" _attr_is_on = False @@ -50,7 +50,7 @@ class VeraLight(VeraDevice[veraApi.VeraDimmer], LightEntity): self, vera_device: veraApi.VeraDimmer, controller_data: ControllerData ) -> None: """Initialize the light.""" - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property diff --git a/homeassistant/components/vera/lock.py b/homeassistant/components/vera/lock.py index 01509aa8388..18f0b9de3e2 100644 --- a/homeassistant/components/vera/lock.py +++ b/homeassistant/components/vera/lock.py @@ -12,8 +12,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity ATTR_LAST_USER_NAME = "changed_by_name" ATTR_LOW_BATTERY = "low_battery" @@ -35,14 +35,14 @@ async def async_setup_entry( ) -class VeraLock(VeraDevice[veraApi.VeraLock], LockEntity): +class VeraLock(VeraEntity[veraApi.VeraLock], LockEntity): """Representation of a Vera lock.""" def __init__( self, vera_device: veraApi.VeraLock, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def lock(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/vera/manifest.json b/homeassistant/components/vera/manifest.json index 17b7144fc3d..211162bcbdc 100644 --- a/homeassistant/components/vera/manifest.json +++ b/homeassistant/components/vera/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vera", "iot_class": "local_polling", "loggers": ["pyvera"], - "requirements": ["pyvera==0.3.13"] + "requirements": ["pyvera==0.3.15"] } diff --git a/homeassistant/components/vera/sensor.py b/homeassistant/components/vera/sensor.py index 97e6d6d6314..95f1fa0bd89 100644 --- a/homeassistant/components/vera/sensor.py +++ b/homeassistant/components/vera/sensor.py @@ -23,8 +23,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity SCAN_INTERVAL = timedelta(seconds=5) @@ -45,7 +45,7 @@ async def async_setup_entry( ) -class VeraSensor(VeraDevice[veraApi.VeraSensor], SensorEntity): +class VeraSensor(VeraEntity[veraApi.VeraSensor], SensorEntity): """Representation of a Vera Sensor.""" def __init__( @@ -54,7 +54,7 @@ class VeraSensor(VeraDevice[veraApi.VeraSensor], SensorEntity): """Initialize the sensor.""" self._temperature_units: str | None = None self.last_changed_time = None - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) if self.vera_device.category == veraApi.CATEGORY_TEMPERATURE_SENSOR: self._attr_device_class = SensorDeviceClass.TEMPERATURE diff --git a/homeassistant/components/vera/switch.py b/homeassistant/components/vera/switch.py index 3e594685d6b..ad7fbe68458 100644 --- a/homeassistant/components/vera/switch.py +++ b/homeassistant/components/vera/switch.py @@ -12,8 +12,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VeraDevice from .common import ControllerData, get_controller_data +from .entity import VeraEntity async def async_setup_entry( @@ -32,7 +32,7 @@ async def async_setup_entry( ) -class VeraSwitch(VeraDevice[veraApi.VeraSwitch], SwitchEntity): +class VeraSwitch(VeraEntity[veraApi.VeraSwitch], SwitchEntity): """Representation of a Vera Switch.""" _attr_is_on = False @@ -41,7 +41,7 @@ class VeraSwitch(VeraDevice[veraApi.VeraSwitch], SwitchEntity): self, vera_device: veraApi.VeraSwitch, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraDevice.__init__(self, vera_device, controller_data) + VeraEntity.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def turn_on(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/verisure/__init__.py b/homeassistant/components/verisure/__init__.py index 0f8c8d936ef..e635ab712be 100644 --- a/homeassistant/components/verisure/__init__.py +++ b/homeassistant/components/verisure/__init__.py @@ -108,6 +108,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry(entry, version=2) - LOGGER.info("Migration to version %s successful", entry.version) + LOGGER.debug("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/verisure/alarm_control_panel.py b/homeassistant/components/verisure/alarm_control_panel.py index fc7e7551145..5f34b587163 100644 --- a/homeassistant/components/verisure/alarm_control_panel.py +++ b/homeassistant/components/verisure/alarm_control_panel.py @@ -7,10 +7,10 @@ import asyncio from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ALARM_ARMING, STATE_ALARM_DISARMING from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -86,7 +86,7 @@ class VerisureAlarm( async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._attr_state = STATE_ALARM_DISARMING + self._attr_alarm_state = AlarmControlPanelState.DISARMING self.async_write_ha_state() await self._async_set_arm_state( "DISARMED", self.coordinator.verisure.disarm(code) @@ -94,7 +94,7 @@ class VerisureAlarm( async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING self.async_write_ha_state() await self._async_set_arm_state( "ARMED_HOME", self.coordinator.verisure.arm_home(code) @@ -102,7 +102,7 @@ class VerisureAlarm( async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING self.async_write_ha_state() await self._async_set_arm_state( "ARMED_AWAY", self.coordinator.verisure.arm_away(code) @@ -111,7 +111,7 @@ class VerisureAlarm( @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._attr_state = ALARM_STATE_TO_HA.get( + self._attr_alarm_state = ALARM_STATE_TO_HA.get( self.coordinator.data["alarm"]["statusType"] ) self._attr_changed_by = self.coordinator.data["alarm"].get("name") diff --git a/homeassistant/components/verisure/camera.py b/homeassistant/components/verisure/camera.py index 50606a49eab..70cd436d24c 100644 --- a/homeassistant/components/verisure/camera.py +++ b/homeassistant/components/verisure/camera.py @@ -110,9 +110,7 @@ class VerisureSmartcam(CoordinatorEntity[VerisureDataUpdateCoordinator], Camera) return LOGGER.debug("Download new image %s", new_image_id) - new_image_path = os.path.join( - self._directory_path, "{}{}".format(new_image_id, ".jpg") - ) + new_image_path = os.path.join(self._directory_path, f"{new_image_id}.jpg") new_image_url = new_image["contentUrl"] self.coordinator.verisure.download_image(new_image_url, new_image_path) LOGGER.debug("Old image_id=%s", self._image_id) @@ -123,9 +121,7 @@ class VerisureSmartcam(CoordinatorEntity[VerisureDataUpdateCoordinator], Camera) def delete_image(self, _=None) -> None: """Delete an old image.""" - remove_image = os.path.join( - self._directory_path, "{}{}".format(self._image_id, ".jpg") - ) + remove_image = os.path.join(self._directory_path, f"{self._image_id}.jpg") try: os.remove(remove_image) LOGGER.debug("Deleting old image %s", remove_image) diff --git a/homeassistant/components/verisure/config_flow.py b/homeassistant/components/verisure/config_flow.py index ccf74cd6791..0f1088ccb80 100644 --- a/homeassistant/components/verisure/config_flow.py +++ b/homeassistant/components/verisure/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, cast +from typing import Any from verisure import ( Error as VerisureError, @@ -38,15 +38,16 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 2 email: str - entry: ConfigEntry password: str verisure: Verisure @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> VerisureOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> VerisureOptionsFlowHandler: """Get the options flow for this handler.""" - return VerisureOptionsFlowHandler(config_entry) + return VerisureOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -179,10 +180,6 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Verisure.""" - self.entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -230,25 +227,21 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.debug("Unexpected response from Verisure, %s", ex) errors["base"] = "unknown" else: - data = self.entry.data.copy() - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **data, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ CONF_EMAIL: user_input[CONF_EMAIL], CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( { - vol.Required(CONF_EMAIL, default=self.entry.data[CONF_EMAIL]): str, + vol.Required( + CONF_EMAIL, default=self._get_reauth_entry().data[CONF_EMAIL] + ): str, vol.Required(CONF_PASSWORD): str, } ), @@ -274,18 +267,13 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.debug("Unexpected response from Verisure, %s", ex) errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ CONF_EMAIL: self.email, CONF_PASSWORD: self.password, }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_mfa", @@ -304,10 +292,6 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): class VerisureOptionsFlowHandler(OptionsFlow): """Handle Verisure options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Verisure options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -324,7 +308,7 @@ class VerisureOptionsFlowHandler(OptionsFlow): vol.Optional( CONF_LOCK_CODE_DIGITS, description={ - "suggested_value": self.entry.options.get( + "suggested_value": self.config_entry.options.get( CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS ) }, diff --git a/homeassistant/components/verisure/const.py b/homeassistant/components/verisure/const.py index 5b1aa1a0740..4afb93d957f 100644 --- a/homeassistant/components/verisure/const.py +++ b/homeassistant/components/verisure/const.py @@ -3,12 +3,7 @@ from datetime import timedelta import logging -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState DOMAIN = "verisure" @@ -43,8 +38,8 @@ DEVICE_TYPE_NAME = { } ALARM_STATE_TO_HA = { - "DISARMED": STATE_ALARM_DISARMED, - "ARMED_HOME": STATE_ALARM_ARMED_HOME, - "ARMED_AWAY": STATE_ALARM_ARMED_AWAY, - "PENDING": STATE_ALARM_PENDING, + "DISARMED": AlarmControlPanelState.DISARMED, + "ARMED_HOME": AlarmControlPanelState.ARMED_HOME, + "ARMED_AWAY": AlarmControlPanelState.ARMED_AWAY, + "PENDING": AlarmControlPanelState.PENDING, } diff --git a/homeassistant/components/verisure/icons.json b/homeassistant/components/verisure/icons.json index 35f6960b1e8..809cf004a3f 100644 --- a/homeassistant/components/verisure/icons.json +++ b/homeassistant/components/verisure/icons.json @@ -1,7 +1,13 @@ { "services": { - "capture_smartcam": "mdi:camera", - "enable_autolock": "mdi:lock", - "disable_autolock": "mdi:lock-off" + "capture_smartcam": { + "service": "mdi:camera" + }, + "enable_autolock": { + "service": "mdi:lock" + }, + "disable_autolock": { + "service": "mdi:lock-off" + } } } diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index 5c56fc0df2c..87f5c53880e 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -7,9 +7,9 @@ from typing import Any from verisure import Error as VerisureError -from homeassistant.components.lock import LockEntity +from homeassistant.components.lock import LockEntity, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_CODE, STATE_LOCKED, STATE_UNLOCKED +from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import ( @@ -130,19 +130,19 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt """Send unlock command.""" code = kwargs.get(ATTR_CODE) if code: - await self.async_set_lock_state(code, STATE_UNLOCKED) + await self.async_set_lock_state(code, LockState.UNLOCKED) async def async_lock(self, **kwargs: Any) -> None: """Send lock command.""" code = kwargs.get(ATTR_CODE) if code: - await self.async_set_lock_state(code, STATE_LOCKED) + await self.async_set_lock_state(code, LockState.LOCKED) - async def async_set_lock_state(self, code: str, state: str) -> None: + async def async_set_lock_state(self, code: str, state: LockState) -> None: """Send set lock state command.""" command = ( self.coordinator.verisure.door_lock(self.serial_number, code) - if state == STATE_LOCKED + if state == LockState.LOCKED else self.coordinator.verisure.door_unlock(self.serial_number, code) ) lock_request = await self.hass.async_add_executor_job( @@ -151,7 +151,7 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt ) LOGGER.debug("Verisure doorlock %s", state) transaction_id = lock_request.get("data", {}).get(command["operationName"]) - target_state = "LOCKED" if state == STATE_LOCKED else "UNLOCKED" + target_state = "LOCKED" if state == LockState.LOCKED else "UNLOCKED" lock_status = None attempts = 0 while lock_status != "OK": diff --git a/homeassistant/components/verisure/manifest.json b/homeassistant/components/verisure/manifest.json index f6630f0c6e5..153b2ba4006 100644 --- a/homeassistant/components/verisure/manifest.json +++ b/homeassistant/components/verisure/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["verisure"], - "requirements": ["vsure==2.6.6"] + "requirements": ["vsure==2.6.7"] } diff --git a/homeassistant/components/versasense/__init__.py b/homeassistant/components/versasense/__init__.py index f209234f8c2..ed4a8edf32c 100644 --- a/homeassistant/components/versasense/__init__.py +++ b/homeassistant/components/versasense/__init__.py @@ -55,7 +55,7 @@ async def _configure_entities(hass, config, consumer): switch_info = {} for mac, device in devices.items(): - _LOGGER.info("Device connected: %s %s", device.name, mac) + _LOGGER.debug("Device connected: %s %s", device.name, mac) hass.data[DOMAIN][mac] = {} for peripheral_id, peripheral in device.peripherals.items(): diff --git a/homeassistant/components/versasense/manifest.json b/homeassistant/components/versasense/manifest.json index 421a46bc2f6..1f1ee9e6b9c 100644 --- a/homeassistant/components/versasense/manifest.json +++ b/homeassistant/components/versasense/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/versasense", "iot_class": "local_polling", "loggers": ["pyversasense"], + "quality_scale": "legacy", "requirements": ["pyversasense==0.0.6"] } diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index 04547d33dea..b6f263f3037 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -137,6 +137,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) + hass.data.pop(DOMAIN) return unload_ok diff --git a/homeassistant/components/vesync/common.py b/homeassistant/components/vesync/common.py index 33fc88f32d6..5f7b2a3a29e 100644 --- a/homeassistant/components/vesync/common.py +++ b/homeassistant/components/vesync/common.py @@ -1,14 +1,8 @@ """Common utilities for VeSync Component.""" import logging -from typing import Any -from pyvesync.vesyncbasedevice import VeSyncBaseDevice - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity, ToggleEntity - -from .const import DOMAIN, VS_FANS, VS_LIGHTS, VS_SENSORS, VS_SWITCHES +from .const import VS_FANS, VS_LIGHTS, VS_SENSORS, VS_SWITCHES _LOGGER = logging.getLogger(__name__) @@ -27,17 +21,17 @@ async def async_process_devices(hass, manager): devices[VS_FANS].extend(manager.fans) # Expose fan sensors separately devices[VS_SENSORS].extend(manager.fans) - _LOGGER.info("%d VeSync fans found", len(manager.fans)) + _LOGGER.debug("%d VeSync fans found", len(manager.fans)) if manager.bulbs: devices[VS_LIGHTS].extend(manager.bulbs) - _LOGGER.info("%d VeSync lights found", len(manager.bulbs)) + _LOGGER.debug("%d VeSync lights found", len(manager.bulbs)) if manager.outlets: devices[VS_SWITCHES].extend(manager.outlets) # Expose outlets' voltage, power & energy usage as separate sensors devices[VS_SENSORS].extend(manager.outlets) - _LOGGER.info("%d VeSync outlets found", len(manager.outlets)) + _LOGGER.debug("%d VeSync outlets found", len(manager.outlets)) if manager.switches: for switch in manager.switches: @@ -45,65 +39,6 @@ async def async_process_devices(hass, manager): devices[VS_SWITCHES].append(switch) else: devices[VS_LIGHTS].append(switch) - _LOGGER.info("%d VeSync switches found", len(manager.switches)) + _LOGGER.debug("%d VeSync switches found", len(manager.switches)) return devices - - -class VeSyncBaseEntity(Entity): - """Base class for VeSync Entity Representations.""" - - _attr_has_entity_name = True - - def __init__(self, device: VeSyncBaseDevice) -> None: - """Initialize the VeSync device.""" - self.device = device - self._attr_unique_id = self.base_unique_id - - @property - def base_unique_id(self): - """Return the ID of this device.""" - # The unique_id property may be overridden in subclasses, such as in - # sensors. Maintaining base_unique_id allows us to group related - # entities under a single device. - if isinstance(self.device.sub_device_no, int): - return f"{self.device.cid}{self.device.sub_device_no!s}" - return self.device.cid - - @property - def available(self) -> bool: - """Return True if device is available.""" - return self.device.connection_status == "online" - - @property - def device_info(self) -> DeviceInfo: - """Return device information.""" - return DeviceInfo( - identifiers={(DOMAIN, self.base_unique_id)}, - name=self.device.device_name, - model=self.device.device_type, - manufacturer="VeSync", - sw_version=self.device.current_firm_version, - ) - - def update(self) -> None: - """Update vesync device.""" - self.device.update() - - -class VeSyncDevice(VeSyncBaseEntity, ToggleEntity): - """Base class for VeSync Device Representations.""" - - @property - def details(self): - """Provide access to the device details dictionary.""" - return self.device.details - - @property - def is_on(self) -> bool: - """Return True if device is on.""" - return self.device.device_status == "on" - - def turn_off(self, **kwargs: Any) -> None: - """Turn the device off.""" - self.device.turn_off() diff --git a/homeassistant/components/vesync/config_flow.py b/homeassistant/components/vesync/config_flow.py index 15f9f548e35..6115cb9ee76 100644 --- a/homeassistant/components/vesync/config_flow.py +++ b/homeassistant/components/vesync/config_flow.py @@ -1,40 +1,42 @@ """Config flow utilities.""" -from collections import OrderedDict +from typing import Any from pyvesync import VeSync import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback +import homeassistant.helpers.config_validation as cv from .const import DOMAIN +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + } +) + class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - def __init__(self) -> None: - """Instantiate config flow.""" - self._username = None - self._password = None - self.data_schema = OrderedDict() - self.data_schema[vol.Required(CONF_USERNAME)] = str - self.data_schema[vol.Required(CONF_PASSWORD)] = str - @callback - def _show_form(self, errors=None): + def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: """Show form to the user.""" return self.async_show_form( step_id="user", - data_schema=vol.Schema(self.data_schema), + data_schema=DATA_SCHEMA, errors=errors if errors else {}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -42,15 +44,15 @@ class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): if not user_input: return self._show_form() - self._username = user_input[CONF_USERNAME] - self._password = user_input[CONF_PASSWORD] + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] - manager = VeSync(self._username, self._password) + manager = VeSync(username, password) login = await self.hass.async_add_executor_job(manager.login) if not login: return self._show_form(errors={"base": "invalid_auth"}) return self.async_create_entry( - title=self._username, - data={CONF_USERNAME: self._username, CONF_PASSWORD: self._password}, + title=username, + data={CONF_USERNAME: username, CONF_PASSWORD: password}, ) diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 50dce95e42a..48215819ce5 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -56,6 +56,7 @@ SKU_TO_BASE_DEVICE = { "LAP-V201S-WEU": "Vital200S", # Alt ID Model Vital200S "LAP-V201S-WUS": "Vital200S", # Alt ID Model Vital200S "LAP-V201-AUSR": "Vital200S", # Alt ID Model Vital200S + "LAP-V201S-AUSR": "Vital200S", # Alt ID Model Vital200S "Vital100S": "Vital100S", "LAP-V102S-WUS": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S diff --git a/homeassistant/components/vesync/diagnostics.py b/homeassistant/components/vesync/diagnostics.py index 9af8a7fed67..e1c092b1e32 100644 --- a/homeassistant/components/vesync/diagnostics.py +++ b/homeassistant/components/vesync/diagnostics.py @@ -12,8 +12,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceEntry -from .common import VeSyncBaseDevice from .const import DOMAIN, VS_MANAGER +from .entity import VeSyncBaseDevice KEYS_TO_REDACT = {"manager", "uuid", "mac_id"} diff --git a/homeassistant/components/vesync/entity.py b/homeassistant/components/vesync/entity.py new file mode 100644 index 00000000000..fd636561e9e --- /dev/null +++ b/homeassistant/components/vesync/entity.py @@ -0,0 +1,69 @@ +"""Common entity for VeSync Component.""" + +from typing import Any + +from pyvesync.vesyncbasedevice import VeSyncBaseDevice + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, ToggleEntity + +from .const import DOMAIN + + +class VeSyncBaseEntity(Entity): + """Base class for VeSync Entity Representations.""" + + _attr_has_entity_name = True + + def __init__(self, device: VeSyncBaseDevice) -> None: + """Initialize the VeSync device.""" + self.device = device + self._attr_unique_id = self.base_unique_id + + @property + def base_unique_id(self): + """Return the ID of this device.""" + # The unique_id property may be overridden in subclasses, such as in + # sensors. Maintaining base_unique_id allows us to group related + # entities under a single device. + if isinstance(self.device.sub_device_no, int): + return f"{self.device.cid}{self.device.sub_device_no!s}" + return self.device.cid + + @property + def available(self) -> bool: + """Return True if device is available.""" + return self.device.connection_status == "online" + + @property + def device_info(self) -> DeviceInfo: + """Return device information.""" + return DeviceInfo( + identifiers={(DOMAIN, self.base_unique_id)}, + name=self.device.device_name, + model=self.device.device_type, + manufacturer="VeSync", + sw_version=self.device.current_firm_version, + ) + + def update(self) -> None: + """Update vesync device.""" + self.device.update() + + +class VeSyncDevice(VeSyncBaseEntity, ToggleEntity): + """Base class for VeSync Device Representations.""" + + @property + def details(self): + """Provide access to the device details dictionary.""" + return self.device.details + + @property + def is_on(self) -> bool: + """Return True if device is on.""" + return self.device.device_status == "on" + + def turn_off(self, **kwargs: Any) -> None: + """Turn the device off.""" + self.device.turn_off() diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 6ef9e41eb43..5be6a06e1d0 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -17,8 +17,8 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range -from .common import VeSyncDevice from .const import DEV_TYPE_TO_HA, DOMAIN, SKU_TO_BASE_DEVICE, VS_DISCOVERY, VS_FANS +from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) @@ -94,7 +94,7 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): | FanEntityFeature.TURN_ON ) _attr_name = None - _enable_turn_on_off_backwards_compatibility = False + _attr_translation_key = "vesync" def __init__(self, fan) -> None: """Initialize the VeSync fan device.""" diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index a4bf4afd410..e4769acc9a5 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,5 +1,23 @@ { + "entity": { + "fan": { + "vesync": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "mdi:fan-auto", + "sleep": "mdi:sleep", + "pet": "mdi:paw", + "turbo": "mdi:weather-tornado" + } + } + } + } + } + }, "services": { - "update_devices": "mdi:update" + "update_devices": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/vesync/light.py b/homeassistant/components/vesync/light.py index 9b15e635903..5b08b92f75a 100644 --- a/homeassistant/components/vesync/light.py +++ b/homeassistant/components/vesync/light.py @@ -5,7 +5,7 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ColorMode, LightEntity, ) @@ -13,11 +13,14 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util -from .common import VeSyncDevice from .const import DEV_TYPE_TO_HA, DOMAIN, VS_DISCOVERY, VS_LIGHTS +from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) +MAX_MIREDS = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds +MIN_MIREDS = 153 # 1,000,000 divided by 6500 Kelvin = 153 Mireds async def async_setup_entry( @@ -84,15 +87,16 @@ class VeSyncBaseLight(VeSyncDevice, LightEntity): """Turn the device on.""" attribute_adjustment_only = False # set white temperature - if self.color_mode == ColorMode.COLOR_TEMP and ATTR_COLOR_TEMP in kwargs: + if self.color_mode == ColorMode.COLOR_TEMP and ATTR_COLOR_TEMP_KELVIN in kwargs: # get white temperature from HA data - color_temp = int(kwargs[ATTR_COLOR_TEMP]) + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) # ensure value between min-max supported Mireds - color_temp = max(self.min_mireds, min(color_temp, self.max_mireds)) + color_temp = max(MIN_MIREDS, min(color_temp, MAX_MIREDS)) # convert Mireds to Percent value that api expects color_temp = round( - ((color_temp - self.min_mireds) / (self.max_mireds - self.min_mireds)) - * 100 + ((color_temp - MIN_MIREDS) / (MAX_MIREDS - MIN_MIREDS)) * 100 ) # flip cold/warm to what pyvesync api expects color_temp = 100 - color_temp @@ -138,13 +142,13 @@ class VeSyncTunableWhiteLightHA(VeSyncBaseLight, LightEntity): """Representation of a VeSync Tunable White Light device.""" _attr_color_mode = ColorMode.COLOR_TEMP - _attr_max_mireds = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds - _attr_min_mireds = 154 # 1,000,000 divided by 6500 Kelvin = 154 Mireds + _attr_min_color_temp_kelvin = 2700 # 370 Mireds + _attr_max_color_temp_kelvin = 6500 # 153 Mireds _attr_supported_color_modes = {ColorMode.COLOR_TEMP} @property - def color_temp(self) -> int: - """Get device white temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" # get value from pyvesync library api, result = self.device.color_temp_pct try: @@ -159,15 +163,16 @@ class VeSyncTunableWhiteLightHA(VeSyncBaseLight, LightEntity): ), result, ) - return 0 + return None # flip cold/warm color_temp_value = 100 - color_temp_value # ensure value between 0-100 color_temp_value = max(0, min(color_temp_value, 100)) # convert percent value to Mireds color_temp_value = round( - self.min_mireds - + ((self.max_mireds - self.min_mireds) / 100 * color_temp_value) + MIN_MIREDS + ((MAX_MIREDS - MIN_MIREDS) / 100 * color_temp_value) ) # ensure value between minimum and maximum Mireds - return max(self.min_mireds, min(color_temp_value, self.max_mireds)) + return color_util.color_temperature_mired_to_kelvin( + max(MIN_MIREDS, min(color_temp_value, MAX_MIREDS)) + ) diff --git a/homeassistant/components/vesync/sensor.py b/homeassistant/components/vesync/sensor.py index 8939295a2db..79061ec0c4c 100644 --- a/homeassistant/components/vesync/sensor.py +++ b/homeassistant/components/vesync/sensor.py @@ -30,8 +30,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .common import VeSyncBaseEntity from .const import DEV_TYPE_TO_HA, DOMAIN, SKU_TO_BASE_DEVICE, VS_DISCOVERY, VS_SENSORS +from .entity import VeSyncBaseEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index 5ff0aa58722..b6e4e2fd957 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -42,6 +42,20 @@ "current_voltage": { "name": "Current voltage" } + }, + "fan": { + "vesync": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "Auto", + "sleep": "Sleep", + "pet": "Pet", + "turbo": "Turbo" + } + } + } + } } }, "services": { diff --git a/homeassistant/components/vesync/switch.py b/homeassistant/components/vesync/switch.py index 1d0c3472d53..a162a648ad7 100644 --- a/homeassistant/components/vesync/switch.py +++ b/homeassistant/components/vesync/switch.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .common import VeSyncDevice from .const import DEV_TYPE_TO_HA, DOMAIN, VS_DISCOVERY, VS_SWITCHES +from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/viaggiatreno/manifest.json b/homeassistant/components/viaggiatreno/manifest.json index 904f9c0bebf..584742c8c59 100644 --- a/homeassistant/components/viaggiatreno/manifest.json +++ b/homeassistant/components/viaggiatreno/manifest.json @@ -3,5 +3,6 @@ "name": "Trenitalia ViaggiaTreno", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/viaggiatreno", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/viaggiatreno/sensor.py b/homeassistant/components/viaggiatreno/sensor.py index 1ea12ed6a41..cb652270c69 100644 --- a/homeassistant/components/viaggiatreno/sensor.py +++ b/homeassistant/components/viaggiatreno/sensor.py @@ -174,7 +174,7 @@ class ViaggiaTrenoSensor(SensorEntity): self._state = NO_INFORMATION_STRING self._unit = "" else: - self._state = "Error: {}".format(res["error"]) + self._state = f"Error: {res['error']}" self._unit = "" else: for i in MONITORED_INFO: diff --git a/homeassistant/components/vicare/__init__.py b/homeassistant/components/vicare/__init__.py index 0c87cd6f4fe..d6b9e4b923a 100644 --- a/homeassistant/components/vicare/__init__.py +++ b/homeassistant/components/vicare/__init__.py @@ -15,10 +15,12 @@ from PyViCare.PyViCareUtils import ( PyViCareInvalidCredentialsError, ) +from homeassistant.components.climate import DOMAIN as DOMAIN_CLIMATE from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.storage import STORAGE_DIR from .const import ( @@ -29,7 +31,7 @@ from .const import ( UNSUPPORTED_DEVICES, ) from .types import ViCareDevice -from .utils import get_device +from .utils import get_device, get_device_serial _LOGGER = logging.getLogger(__name__) _TOKEN_FILENAME = "vicare_token.save" @@ -47,6 +49,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError) as err: raise ConfigEntryAuthFailed("Authentication failed") from err + for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST]: + # Migration can be removed in 2025.4.0 + await async_migrate_devices_and_entities(hass, entry, device) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -109,6 +115,74 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok +async def async_migrate_devices_and_entities( + hass: HomeAssistant, entry: ConfigEntry, device: ViCareDevice +) -> None: + """Migrate old entry.""" + device_registry = dr.async_get(hass) + entity_registry = er.async_get(hass) + + gateway_serial: str = device.config.getConfig().serial + device_id = device.config.getId() + device_serial: str | None = await hass.async_add_executor_job( + get_device_serial, device.api + ) + device_model = device.config.getModel() + + old_identifier = gateway_serial + new_identifier = ( + f"{gateway_serial}_{device_serial if device_serial is not None else device_id}" + ) + + # Migrate devices + for device_entry in dr.async_entries_for_config_entry( + device_registry, entry.entry_id + ): + if ( + device_entry.identifiers == {(DOMAIN, old_identifier)} + and device_entry.model == device_model + ): + _LOGGER.debug( + "Migrating device %s to new identifier %s", + device_entry.name, + new_identifier, + ) + device_registry.async_update_device( + device_entry.id, + serial_number=device_serial, + new_identifiers={(DOMAIN, new_identifier)}, + ) + + # Migrate entities + for entity_entry in er.async_entries_for_device( + entity_registry, device_entry.id, True + ): + if entity_entry.unique_id.startswith(new_identifier): + # already correct, nothing to do + continue + unique_id_parts = entity_entry.unique_id.split("-") + # replace old prefix `` + # with `_` + unique_id_parts[0] = new_identifier + # convert climate entity unique id + # from `-` + # to `-heating-` + if entity_entry.domain == DOMAIN_CLIMATE: + unique_id_parts[len(unique_id_parts) - 1] = ( + f"{entity_entry.translation_key}-{unique_id_parts[len(unique_id_parts)-1]}" + ) + entity_new_unique_id = "-".join(unique_id_parts) + + _LOGGER.debug( + "Migrating entity %s to new unique id %s", + entity_entry.name, + entity_new_unique_id, + ) + entity_registry.async_update_entity( + entity_id=entity_entry.entity_id, new_unique_id=entity_new_unique_id + ) + + def get_supported_devices( devices: list[PyViCareDeviceConfig], ) -> list[PyViCareDeviceConfig]: diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 2c114d15b85..55f0ab96ed0 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -31,7 +31,13 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import ViCareDevice, ViCareRequiredKeysMixin -from .utils import get_burners, get_circuits, get_compressors, is_supported +from .utils import ( + get_burners, + get_circuits, + get_compressors, + get_device_serial, + is_supported, +) _LOGGER = logging.getLogger(__name__) @@ -112,61 +118,38 @@ def _build_entities( entities: list[ViCareBinarySensor] = [] for device in device_list: - entities.extend(_build_entities_for_device(device.api, device.config)) + # add device entities entities.extend( - _build_entities_for_component( - get_circuits(device.api), device.config, CIRCUIT_SENSORS + ViCareBinarySensor( + description, + get_device_serial(device.api), + device.config, + device.api, ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device.api) ) - entities.extend( - _build_entities_for_component( - get_burners(device.api), device.config, BURNER_SENSORS + # add component entities + for component_list, entity_description_list in ( + (get_circuits(device.api), CIRCUIT_SENSORS), + (get_burners(device.api), BURNER_SENSORS), + (get_compressors(device.api), COMPRESSOR_SENSORS), + ): + entities.extend( + ViCareBinarySensor( + description, + get_device_serial(device.api), + device.config, + device.api, + component, + ) + for component in component_list + for description in entity_description_list + if is_supported(description.key, description, component) ) - ) - entities.extend( - _build_entities_for_component( - get_compressors(device.api), device.config, COMPRESSOR_SENSORS - ) - ) return entities -def _build_entities_for_device( - device: PyViCareDevice, - device_config: PyViCareDeviceConfig, -) -> list[ViCareBinarySensor]: - """Create device specific ViCare binary sensor entities.""" - - return [ - ViCareBinarySensor( - device_config, - device, - description, - ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device) - ] - - -def _build_entities_for_component( - components: list[PyViCareHeatingDeviceComponent], - device_config: PyViCareDeviceConfig, - entity_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], -) -> list[ViCareBinarySensor]: - """Create component specific ViCare binary sensor entities.""" - - return [ - ViCareBinarySensor( - device_config, - component, - description, - ) - for component in components - for description in entity_descriptions - if is_supported(description.key, description, component) - ] - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -190,12 +173,16 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): def __init__( self, - device_config: PyViCareDeviceConfig, - api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareBinarySensorEntityDescription, + device_serial: str | None, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(device_config, api, description.key) + super().__init__( + description.key, device_serial, device_config, device, component + ) self.entity_description = description @property diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index f880c39ddea..49d142c1edb 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -24,7 +24,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import ViCareDevice, ViCareRequiredKeysMixinWithSet -from .utils import is_supported +from .utils import get_device_serial, is_supported _LOGGER = logging.getLogger(__name__) @@ -54,9 +54,10 @@ def _build_entities( return [ ViCareButton( + description, + get_device_serial(device.api), device.config, device.api, - description, ) for device in device_list for description in BUTTON_DESCRIPTIONS @@ -87,12 +88,13 @@ class ViCareButton(ViCareEntity, ButtonEntity): def __init__( self, + description: ViCareButtonEntityDescription, + device_serial: str | None, device_config: PyViCareDeviceConfig, device: PyViCareDevice, - description: ViCareButtonEntityDescription, ) -> None: """Initialize the button.""" - super().__init__(device_config, device, description.key) + super().__init__(description.key, device_serial, device_config, device) self.entity_description = description def press(self) -> None: diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index df1cde2abca..67330bf201d 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -40,7 +40,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import HeatingProgram, ViCareDevice -from .utils import get_burners, get_circuits, get_compressors +from .utils import get_burners, get_circuits, get_compressors, get_device_serial _LOGGER = logging.getLogger(__name__) @@ -87,6 +87,7 @@ def _build_entities( """Create ViCare climate entities for a device.""" return [ ViCareClimate( + get_device_serial(device.api), device.config, device.api, circuit, @@ -139,19 +140,21 @@ class ViCareClimate(ViCareEntity, ClimateEntity): _current_action: bool | None = None _current_mode: str | None = None _current_program: str | None = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, + device_serial: str | None, device_config: PyViCareDeviceConfig, device: PyViCareDevice, circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the climate device.""" - super().__init__(device_config, device, circuit.id) - self._circuit = circuit + super().__init__( + self._attr_translation_key, device_serial, device_config, device, circuit + ) + self._device = device self._attributes: dict[str, Any] = {} - self._attributes["vicare_programs"] = self._circuit.getPrograms() + self._attributes["vicare_programs"] = self._api.getPrograms() self._attr_preset_modes = [ preset for heating_program in self._attributes["vicare_programs"] @@ -163,11 +166,13 @@ class ViCareClimate(ViCareEntity, ClimateEntity): try: _room_temperature = None with suppress(PyViCareNotSupportedFeatureError): - _room_temperature = self._circuit.getRoomTemperature() + self._attributes["room_temperature"] = _room_temperature = ( + self._api.getRoomTemperature() + ) _supply_temperature = None with suppress(PyViCareNotSupportedFeatureError): - _supply_temperature = self._circuit.getSupplyTemperature() + _supply_temperature = self._api.getSupplyTemperature() if _room_temperature is not None: self._attr_current_temperature = _room_temperature @@ -177,44 +182,39 @@ class ViCareClimate(ViCareEntity, ClimateEntity): self._attr_current_temperature = None with suppress(PyViCareNotSupportedFeatureError): - self._current_program = self._circuit.getActiveProgram() - - with suppress(PyViCareNotSupportedFeatureError): - self._attr_target_temperature = ( - self._circuit.getCurrentDesiredTemperature() + self._attributes["active_vicare_program"] = self._current_program = ( + self._api.getActiveProgram() ) with suppress(PyViCareNotSupportedFeatureError): - self._current_mode = self._circuit.getActiveMode() + self._attr_target_temperature = self._api.getCurrentDesiredTemperature() - # Update the generic device attributes - self._attributes = { - "room_temperature": _room_temperature, - "active_vicare_program": self._current_program, - "active_vicare_mode": self._current_mode, - } + with suppress(PyViCareNotSupportedFeatureError): + self._attributes["active_vicare_mode"] = self._current_mode = ( + self._api.getActiveMode() + ) with suppress(PyViCareNotSupportedFeatureError): self._attributes["heating_curve_slope"] = ( - self._circuit.getHeatingCurveSlope() + self._api.getHeatingCurveSlope() ) with suppress(PyViCareNotSupportedFeatureError): self._attributes["heating_curve_shift"] = ( - self._circuit.getHeatingCurveShift() + self._api.getHeatingCurveShift() ) with suppress(PyViCareNotSupportedFeatureError): - self._attributes["vicare_modes"] = self._circuit.getModes() + self._attributes["vicare_modes"] = self._api.getModes() self._current_action = False # Update the specific device attributes with suppress(PyViCareNotSupportedFeatureError): - for burner in get_burners(self._api): + for burner in get_burners(self._device): self._current_action = self._current_action or burner.getActive() with suppress(PyViCareNotSupportedFeatureError): - for compressor in get_compressors(self._api): + for compressor in get_compressors(self._device): self._current_action = ( self._current_action or compressor.getActive() ) @@ -245,9 +245,9 @@ class ViCareClimate(ViCareEntity, ClimateEntity): raise ValueError(f"Cannot set invalid hvac mode: {hvac_mode}") _LOGGER.debug("Setting hvac mode to %s / %s", hvac_mode, vicare_mode) - self._circuit.setMode(vicare_mode) + self._api.setMode(vicare_mode) - def vicare_mode_from_hvac_mode(self, hvac_mode): + def vicare_mode_from_hvac_mode(self, hvac_mode) -> str | None: """Return the corresponding vicare mode for an hvac_mode.""" if "vicare_modes" not in self._attributes: return None @@ -283,7 +283,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): def set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" if (temp := kwargs.get(ATTR_TEMPERATURE)) is not None: - self._circuit.setProgramTemperature(self._current_program, temp) + self._api.setProgramTemperature(self._current_program, temp) self._attr_target_temperature = temp @property @@ -312,7 +312,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): ): _LOGGER.debug("deactivating %s", self._current_program) try: - self._circuit.deactivateProgram(self._current_program) + self._api.deactivateProgram(self._current_program) except PyViCareCommandError as err: raise ServiceValidationError( translation_domain=DOMAIN, @@ -326,7 +326,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): if target_program in CHANGABLE_HEATING_PROGRAMS: _LOGGER.debug("activating %s", target_program) try: - self._circuit.activateProgram(target_program) + self._api.activateProgram(target_program) except PyViCareCommandError as err: raise ServiceValidationError( translation_domain=DOMAIN, @@ -341,9 +341,9 @@ class ViCareClimate(ViCareEntity, ClimateEntity): """Show Device Attributes.""" return self._attributes - def set_vicare_mode(self, vicare_mode): + def set_vicare_mode(self, vicare_mode) -> None: """Service function to set vicare modes directly.""" if vicare_mode not in self._attributes["vicare_modes"]: raise ValueError(f"Cannot set invalid vicare mode: {vicare_mode}.") - self._circuit.setMode(vicare_mode) + self._api.setMode(vicare_mode) diff --git a/homeassistant/components/vicare/config_flow.py b/homeassistant/components/vicare/config_flow.py index 67ce4f2c186..c711cc06074 100644 --- a/homeassistant/components/vicare/config_flow.py +++ b/homeassistant/components/vicare/config_flow.py @@ -13,7 +13,7 @@ from PyViCare.PyViCareUtils import ( import voluptuous as vol from homeassistant.components import dhcp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import format_mac @@ -50,7 +50,6 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for ViCare.""" VERSION = 1 - entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -81,7 +80,6 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with ViCare.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -89,11 +87,11 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm re-authentication with ViCare.""" errors: dict[str, str] = {} - assert self.entry is not None + reauth_entry = self._get_reauth_entry() if user_input: data = { - **self.entry.data, + **reauth_entry.data, **user_input, } @@ -102,17 +100,12 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: - self.hass.config_entries.async_update_entry( - self.entry, - data=data, - ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) return self.async_show_form( step_id="reauth_confirm", data_schema=self.add_suggested_values_to_schema( - REAUTH_SCHEMA, self.entry.data + REAUTH_SCHEMA, reauth_entry.data ), errors=errors, ) diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 8f8ae3c94e3..828a879927d 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -23,7 +23,6 @@ UNSUPPORTED_DEVICES = [ "E3_FloorHeatingCircuitChannel", "E3_FloorHeatingCircuitDistributorBox", "E3_RoomControl_One_522", - "E3_RoomSensor", ] DEVICE_LIST = "device_list" diff --git a/homeassistant/components/vicare/entity.py b/homeassistant/components/vicare/entity.py index 1bb2993cd3a..2d858185b9f 100644 --- a/homeassistant/components/vicare/entity.py +++ b/homeassistant/components/vicare/entity.py @@ -2,6 +2,9 @@ from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, +) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity @@ -16,21 +19,28 @@ class ViCareEntity(Entity): def __init__( self, + unique_id_suffix: str, + device_serial: str | None, device_config: PyViCareDeviceConfig, device: PyViCareDevice, - unique_id_suffix: str, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the entity.""" - self._api = device + gateway_serial = device_config.getConfig().serial + device_id = device_config.getId() - self._attr_unique_id = f"{device_config.getConfig().serial}-{unique_id_suffix}" - # valid for compressors, circuits, burners (HeatingDeviceWithComponent) - if hasattr(device, "id"): - self._attr_unique_id += f"-{device.id}" + identifier = f"{gateway_serial}_{device_serial.replace("zigbee-", "zigbee_") if device_serial is not None else device_id}" + + self._api: PyViCareDevice | PyViCareHeatingDeviceComponent = ( + component if component else device + ) + self._attr_unique_id = f"{identifier}-{unique_id_suffix}" + if component: + self._attr_unique_id += f"-{component.id}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device_config.getConfig().serial)}, - serial_number=device_config.getConfig().serial, + identifiers={(DOMAIN, identifier)}, + serial_number=device_serial, name=device_config.getModel(), manufacturer="Viessmann", model=device_config.getModel(), diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 5b9dd2787e8..6e8513a1f7e 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -29,6 +29,8 @@ from homeassistant.util.percentage import ( from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity +from .types import ViCareDevice +from .utils import get_device_serial _LOGGER = logging.getLogger(__name__) @@ -89,6 +91,17 @@ ORDERED_NAMED_FAN_SPEEDS = [ ] +def _build_entities( + device_list: list[ViCareDevice], +) -> list[ViCareFan]: + """Create ViCare climate entities for a device.""" + return [ + ViCareFan(get_device_serial(device.api), device.config, device.api) + for device in device_list + if isinstance(device.api, PyViCareVentilationDevice) + ] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -99,37 +112,39 @@ async def async_setup_entry( device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] async_add_entities( - [ - ViCareFan(device.config, device.api) - for device in device_list - if isinstance(device.api, PyViCareVentilationDevice) - ] + await hass.async_add_executor_job( + _build_entities, + device_list, + ) ) class ViCareFan(ViCareEntity, FanEntity): """Representation of the ViCare ventilation device.""" - _attr_preset_modes = list[str]( - [ - VentilationMode.PERMANENT, - VentilationMode.VENTILATION, - VentilationMode.SENSOR_DRIVEN, - VentilationMode.SENSOR_OVERRIDE, - ] - ) _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_supported_features = FanEntityFeature.SET_SPEED _attr_translation_key = "ventilation" - _enable_turn_on_off_backwards_compatibility = False def __init__( self, + device_serial: str | None, device_config: PyViCareDeviceConfig, device: PyViCareDevice, ) -> None: """Initialize the fan entity.""" - super().__init__(device_config, device, self._attr_translation_key) + super().__init__( + self._attr_translation_key, device_serial, device_config, device + ) + # init presets + supported_modes = list[str](self._api.getAvailableModes()) + self._attr_preset_modes = [ + mode + for mode in VentilationMode + if VentilationMode.to_vicare_mode(mode) in supported_modes + ] + if len(self._attr_preset_modes) > 0: + self._attr_supported_features |= FanEntityFeature.PRESET_MODE def update(self) -> None: """Update state of fan.""" @@ -157,6 +172,30 @@ class ViCareFan(ViCareEntity, FanEntity): # Viessmann ventilation unit cannot be turned off return True + @property + def icon(self) -> str | None: + """Return the icon to use in the frontend.""" + if hasattr(self, "_attr_preset_mode"): + if self._attr_preset_mode == VentilationMode.VENTILATION: + return "mdi:fan-clock" + if self._attr_preset_mode in [ + VentilationMode.SENSOR_DRIVEN, + VentilationMode.SENSOR_OVERRIDE, + ]: + return "mdi:fan-auto" + if self._attr_preset_mode == VentilationMode.PERMANENT: + if self._attr_percentage == 0: + return "mdi:fan-off" + if self._attr_percentage is not None: + level = 1 + ORDERED_NAMED_FAN_SPEEDS.index( + percentage_to_ordered_list_item( + ORDERED_NAMED_FAN_SPEEDS, self._attr_percentage + ) + ) + if level < 4: # fan-speed- only supports 1-3 + return f"mdi:fan-speed-{level}" + return "mdi:fan" + def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" if self._attr_preset_mode != str(VentilationMode.PERMANENT): diff --git a/homeassistant/components/vicare/icons.json b/homeassistant/components/vicare/icons.json index 2f40d8a8822..9d0f27a863c 100644 --- a/homeassistant/components/vicare/icons.json +++ b/homeassistant/components/vicare/icons.json @@ -88,6 +88,8 @@ } }, "services": { - "set_vicare_mode": "mdi:cog" + "set_vicare_mode": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 186e9ef6289..0bb5594e829 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare-neo==0.2.1"] + "requirements": ["PyViCare==2.38.0"] } diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index d53b7183327..f9af9636941 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -33,7 +33,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import HeatingProgram, ViCareDevice, ViCareRequiredKeysMixin -from .utils import get_circuits, is_supported +from .utils import get_circuits, get_device_serial, is_supported _LOGGER = logging.getLogger(__name__) @@ -50,6 +50,18 @@ class ViCareNumberEntityDescription(NumberEntityDescription, ViCareRequiredKeysM DEVICE_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( + ViCareNumberEntityDescription( + key="dhw_temperature", + translation_key="dhw_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDomesticHotWaterConfiguredTemperature(), + value_setter=lambda api, value: api.setDomesticHotWaterTemperature(value), + min_value_getter=lambda api: api.getDomesticHotWaterMinTemperature(), + max_value_getter=lambda api: api.getDomesticHotWaterMaxTemperature(), + native_step=1, + ), ViCareNumberEntityDescription( key="dhw_secondary_temperature", translation_key="dhw_secondary_temperature", @@ -63,6 +75,34 @@ DEVICE_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( native_max_value=60, native_step=1, ), + ViCareNumberEntityDescription( + key="dhw_hysteresis_switch_on", + translation_key="dhw_hysteresis_switch_on", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.KELVIN, + value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOn(), + value_setter=lambda api, value: api.setDomesticHotWaterHysteresisSwitchOn( + value + ), + min_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnMin(), + max_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnMax(), + stepping_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnStepping(), + ), + ViCareNumberEntityDescription( + key="dhw_hysteresis_switch_off", + translation_key="dhw_hysteresis_switch_off", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.KELVIN, + value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOff(), + value_setter=lambda api, value: api.setDomesticHotWaterHysteresisSwitchOff( + value + ), + min_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffMin(), + max_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffMax(), + stepping_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffStepping(), + ), ) @@ -225,6 +265,72 @@ CIRCUIT_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( HeatingProgram.COMFORT_HEATING ), ), + ViCareNumberEntityDescription( + key="normal_cooling_temperature", + translation_key="normal_cooling_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDesiredTemperatureForProgram( + HeatingProgram.NORMAL_COOLING + ), + value_setter=lambda api, value: api.setProgramTemperature( + HeatingProgram.NORMAL_COOLING, value + ), + min_value_getter=lambda api: api.getProgramMinTemperature( + HeatingProgram.NORMAL_COOLING + ), + max_value_getter=lambda api: api.getProgramMaxTemperature( + HeatingProgram.NORMAL_COOLING + ), + stepping_getter=lambda api: api.getProgramStepping( + HeatingProgram.NORMAL_COOLING + ), + ), + ViCareNumberEntityDescription( + key="reduced_cooling_temperature", + translation_key="reduced_cooling_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDesiredTemperatureForProgram( + HeatingProgram.REDUCED_COOLING + ), + value_setter=lambda api, value: api.setProgramTemperature( + HeatingProgram.REDUCED_COOLING, value + ), + min_value_getter=lambda api: api.getProgramMinTemperature( + HeatingProgram.REDUCED_COOLING + ), + max_value_getter=lambda api: api.getProgramMaxTemperature( + HeatingProgram.REDUCED_COOLING + ), + stepping_getter=lambda api: api.getProgramStepping( + HeatingProgram.REDUCED_COOLING + ), + ), + ViCareNumberEntityDescription( + key="comfort_cooling_temperature", + translation_key="comfort_cooling_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDesiredTemperatureForProgram( + HeatingProgram.COMFORT_COOLING + ), + value_setter=lambda api, value: api.setProgramTemperature( + HeatingProgram.COMFORT_COOLING, value + ), + min_value_getter=lambda api: api.getProgramMinTemperature( + HeatingProgram.COMFORT_COOLING + ), + max_value_getter=lambda api: api.getProgramMaxTemperature( + HeatingProgram.COMFORT_COOLING + ), + stepping_getter=lambda api: api.getProgramStepping( + HeatingProgram.COMFORT_COOLING + ), + ), ) @@ -233,30 +339,32 @@ def _build_entities( ) -> list[ViCareNumber]: """Create ViCare number entities for a device.""" - entities: list[ViCareNumber] = [ - ViCareNumber( - device.config, - device.api, - description, - ) - for device in device_list - for description in DEVICE_ENTITY_DESCRIPTIONS - if is_supported(description.key, description, device.api) - ] - - entities.extend( - [ + entities: list[ViCareNumber] = [] + for device in device_list: + # add device entities + entities.extend( ViCareNumber( - device.config, - circuit, description, + get_device_serial(device.api), + device.config, + device.api, + ) + for description in DEVICE_ENTITY_DESCRIPTIONS + if is_supported(description.key, description, device.api) + ) + # add component entities + entities.extend( + ViCareNumber( + description, + get_device_serial(device.api), + device.config, + device.api, + circuit, ) - for device in device_list for circuit in get_circuits(device.api) for description in CIRCUIT_ENTITY_DESCRIPTIONS if is_supported(description.key, description, circuit) - ] - ) + ) return entities @@ -283,12 +391,16 @@ class ViCareNumber(ViCareEntity, NumberEntity): def __init__( self, - device_config: PyViCareDeviceConfig, - api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareNumberEntityDescription, + device_serial: str | None, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the number.""" - super().__init__(device_config, api, description.key) + super().__init__( + description.key, device_serial, device_config, device, component + ) self.entity_description = description @property diff --git a/homeassistant/components/vicare/quality_scale.yaml b/homeassistant/components/vicare/quality_scale.yaml new file mode 100644 index 00000000000..436e046204f --- /dev/null +++ b/homeassistant/components/vicare/quality_scale.yaml @@ -0,0 +1,49 @@ +rules: + # Bronze + config-flow: + status: todo + comment: data_description is missing. + test-before-configure: done + unique-config-entry: + status: todo + comment: Uniqueness is not checked yet. + config-flow-test-coverage: done + runtime-data: + status: todo + comment: runtime_data is not used yet. + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: todo + comment: service registered in climate async_setup_entry. + common-modules: + status: done + comment: No coordinator is used, data update is centrally handled by the library. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: todo + comment: removal instructions missing + docs-actions: done + brands: done + # Silver + integration-owner: done + reauthentication-flow: done + config-entry-unloading: done + # Gold + devices: done + diagnostics: done + entity-category: done + dynamic-devices: done + entity-device-class: done + entity-translations: done + entity-disabled-by-default: done + repair-issues: + status: exempt + comment: This integration does not raise any repairable issues. diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 5d51abfbbf6..57b7c0bec9a 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -51,7 +51,13 @@ from .const import ( ) from .entity import ViCareEntity from .types import ViCareDevice, ViCareRequiredKeysMixin -from .utils import get_burners, get_circuits, get_compressors, is_supported +from .utils import ( + get_burners, + get_circuits, + get_compressors, + get_device_serial, + is_supported, +) _LOGGER = logging.getLogger(__name__) @@ -171,6 +177,30 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + ViCareSensorEntityDescription( + key="dhw_storage_temperature", + translation_key="dhw_storage_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDomesticHotWaterStorageTemperature(), + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + ViCareSensorEntityDescription( + key="dhw_storage_top_temperature", + translation_key="dhw_storage_top_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getHotWaterStorageTemperatureTop(), + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + ViCareSensorEntityDescription( + key="dhw_storage_bottom_temperature", + translation_key="dhw_storage_bottom_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getHotWaterStorageTemperatureBottom(), + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), ViCareSensorEntityDescription( key="hotwater_gas_consumption_today", translation_key="hotwater_gas_consumption_today", @@ -400,6 +430,32 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( state_class=SensorStateClass.TOTAL_INCREASING, entity_registry_enabled_default=False, ), + ViCareSensorEntityDescription( + key="energy_consumption_cooling_today", + translation_key="energy_consumption_cooling_today", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_getter=lambda api: api.getPowerConsumptionCoolingToday(), + unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), + state_class=SensorStateClass.TOTAL_INCREASING, + ), + ViCareSensorEntityDescription( + key="energy_consumption_cooling_this_month", + translation_key="energy_consumption_cooling_this_month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_getter=lambda api: api.getPowerConsumptionCoolingThisMonth(), + unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), + state_class=SensorStateClass.TOTAL_INCREASING, + entity_registry_enabled_default=False, + ), + ViCareSensorEntityDescription( + key="energy_consumption_cooling_this_year", + translation_key="energy_consumption_cooling_this_year", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_getter=lambda api: api.getPowerConsumptionCoolingThisYear(), + unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), + state_class=SensorStateClass.TOTAL_INCREASING, + entity_registry_enabled_default=False, + ), ViCareSensorEntityDescription( key="energy_dhw_summary_consumption_heating_currentday", translation_key="energy_dhw_summary_consumption_heating_currentday", @@ -745,9 +801,22 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( options=["ready", "production"], value_getter=lambda api: _filter_pv_states(api.getPhotovoltaicStatus()), ), + ViCareSensorEntityDescription( + key="room_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + value_getter=lambda api: api.getTemperature(), + ), + ViCareSensorEntityDescription( + key="room_humidity", + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + value_getter=lambda api: api.getHumidity(), + ), ) - CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="supply_temperature", @@ -865,61 +934,38 @@ def _build_entities( entities: list[ViCareSensor] = [] for device in device_list: - entities.extend(_build_entities_for_device(device.api, device.config)) + # add device entities entities.extend( - _build_entities_for_component( - get_circuits(device.api), device.config, CIRCUIT_SENSORS + ViCareSensor( + description, + get_device_serial(device.api), + device.config, + device.api, ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device.api) ) - entities.extend( - _build_entities_for_component( - get_burners(device.api), device.config, BURNER_SENSORS + # add component entities + for component_list, entity_description_list in ( + (get_circuits(device.api), CIRCUIT_SENSORS), + (get_burners(device.api), BURNER_SENSORS), + (get_compressors(device.api), COMPRESSOR_SENSORS), + ): + entities.extend( + ViCareSensor( + description, + get_device_serial(device.api), + device.config, + device.api, + component, + ) + for component in component_list + for description in entity_description_list + if is_supported(description.key, description, component) ) - ) - entities.extend( - _build_entities_for_component( - get_compressors(device.api), device.config, COMPRESSOR_SENSORS - ) - ) return entities -def _build_entities_for_device( - device: PyViCareDevice, - device_config: PyViCareDeviceConfig, -) -> list[ViCareSensor]: - """Create device specific ViCare sensor entities.""" - - return [ - ViCareSensor( - device_config, - device, - description, - ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device) - ] - - -def _build_entities_for_component( - components: list[PyViCareHeatingDeviceComponent], - device_config: PyViCareDeviceConfig, - entity_descriptions: tuple[ViCareSensorEntityDescription, ...], -) -> list[ViCareSensor]: - """Create component specific ViCare sensor entities.""" - - return [ - ViCareSensor( - device_config, - component, - description, - ) - for component in components - for description in entity_descriptions - if is_supported(description.key, description, component) - ] - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -932,7 +978,9 @@ async def async_setup_entry( await hass.async_add_executor_job( _build_entities, device_list, - ) + ), + # run update to have device_class set depending on unit_of_measurement + True, ) @@ -943,15 +991,17 @@ class ViCareSensor(ViCareEntity, SensorEntity): def __init__( self, - device_config: PyViCareDeviceConfig, - api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareSensorEntityDescription, + device_serial: str | None, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(device_config, api, description.key) + super().__init__( + description.key, device_serial, device_config, device, component + ) self.entity_description = description - # run update to have device_class set depending on unit_of_measurement - self.update() @property def available(self) -> bool: diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 0452a560cb8..77e570da779 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -97,16 +97,34 @@ "name": "Comfort temperature" }, "normal_heating_temperature": { - "name": "[%key:component::vicare::entity::number::normal_temperature::name%]" + "name": "Normal heating temperature" }, "reduced_heating_temperature": { - "name": "[%key:component::vicare::entity::number::reduced_temperature::name%]" + "name": "Reduced heating temperature" }, "comfort_heating_temperature": { - "name": "[%key:component::vicare::entity::number::comfort_temperature::name%]" + "name": "Comfort heating temperature" + }, + "normal_cooling_temperature": { + "name": "Normal cooling temperature" + }, + "reduced_cooling_temperature": { + "name": "Reduced cooling temperature" + }, + "comfort_cooling_temperature": { + "name": "Comfort cooling temperature" + }, + "dhw_temperature": { + "name": "DHW temperature" }, "dhw_secondary_temperature": { "name": "DHW secondary temperature" + }, + "dhw_hysteresis_switch_on": { + "name": "DHW hysteresis switch on" + }, + "dhw_hysteresis_switch_off": { + "name": "DHW hysteresis switch off" } }, "sensor": { @@ -143,6 +161,15 @@ "hotwater_min_temperature": { "name": "DHW min temperature" }, + "dhw_storage_temperature": { + "name": "DHW storage temperature" + }, + "dhw_storage_top_temperature": { + "name": "DHW storage top temperature" + }, + "dhw_storage_bottom_temperature": { + "name": "DHW storage bottom temperature" + }, "hotwater_gas_consumption_today": { "name": "DHW gas consumption today" }, @@ -216,28 +243,49 @@ "name": "DHW gas consumption last seven days" }, "energy_summary_consumption_heating_currentday": { - "name": "Heating energy consumption today" + "name": "Heating electricity consumption today" }, "energy_summary_consumption_heating_currentmonth": { - "name": "Heating energy consumption this month" + "name": "Heating electricity consumption this month" }, "energy_summary_consumption_heating_currentyear": { - "name": "Heating energy consumption this year" + "name": "Heating electricity consumption this year" }, "energy_summary_consumption_heating_lastsevendays": { - "name": "Heating energy consumption last seven days" + "name": "Heating electricity consumption last seven days" + }, + "energy_consumption_cooling_today": { + "name": "Cooling electricity consumption today" + }, + "energy_consumption_cooling_this_month": { + "name": "Cooling electricity consumption this month" + }, + "energy_consumption_cooling_this_year": { + "name": "Cooling electricity consumption this year" }, "energy_dhw_summary_consumption_heating_currentday": { - "name": "DHW energy consumption today" + "name": "DHW electricity consumption today" }, "energy_dhw_summary_consumption_heating_currentmonth": { - "name": "DHW energy consumption this month" + "name": "DHW electricity consumption this month" }, "energy_dhw_summary_consumption_heating_currentyear": { - "name": "DHW energy consumption this year" + "name": "DHW electricity consumption this year" }, "energy_summary_dhw_consumption_heating_lastsevendays": { - "name": "DHW energy consumption last seven days" + "name": "DHW electricity consumption last seven days" + }, + "power_consumption_today": { + "name": "Electricity consumption today" + }, + "power_consumption_this_week": { + "name": "Electricity consumption this week" + }, + "power_consumption_this_month": { + "name": "Electricity consumption this month" + }, + "power_consumption_this_year": { + "name": "Electricity consumption this year" }, "power_production_current": { "name": "Power production current" @@ -272,18 +320,6 @@ "solar_power_production_this_year": { "name": "Solar energy production this year" }, - "power_consumption_today": { - "name": "Energy consumption today" - }, - "power_consumption_this_week": { - "name": "Power consumption this week" - }, - "power_consumption_this_month": { - "name": "Energy consumption this month" - }, - "power_consumption_this_year": { - "name": "Energy consumption this year" - }, "buffer_top_temperature": { "name": "Buffer top temperature" }, diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 7e1ec7f8bee..98d1c0566ce 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -1,6 +1,7 @@ """Types for the ViCare integration.""" from collections.abc import Callable +from contextlib import suppress from dataclasses import dataclass import enum from typing import Any @@ -24,11 +25,14 @@ class HeatingProgram(enum.StrEnum): COMFORT = "comfort" COMFORT_HEATING = "comfortHeating" + COMFORT_COOLING = "comfortCooling" ECO = "eco" NORMAL = "normal" NORMAL_HEATING = "normalHeating" + NORMAL_COOLING = "normalCooling" REDUCED = "reduced" REDUCED_HEATING = "reducedHeating" + REDUCED_COOLING = "reducedCooling" STANDBY = "standby" @staticmethod @@ -48,8 +52,12 @@ class HeatingProgram(enum.StrEnum): ) -> str | None: """Return the mapped ViCare heating program for the Home Assistant preset.""" for program in supported_heating_programs: - if VICARE_TO_HA_PRESET_HEATING.get(HeatingProgram(program)) == ha_preset: - return program + with suppress(ValueError): + if ( + VICARE_TO_HA_PRESET_HEATING.get(HeatingProgram(program)) + == ha_preset + ): + return program return None diff --git a/homeassistant/components/vicare/utils.py b/homeassistant/components/vicare/utils.py index 2ba5ddbfb0a..5156ea4a41e 100644 --- a/homeassistant/components/vicare/utils.py +++ b/homeassistant/components/vicare/utils.py @@ -7,7 +7,12 @@ from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, ) -from PyViCare.PyViCareUtils import PyViCareNotSupportedFeatureError +from PyViCare.PyViCareUtils import ( + PyViCareInvalidDataError, + PyViCareNotSupportedFeatureError, + PyViCareRateLimitError, +) +import requests from homeassistant.config_entries import ConfigEntry @@ -27,6 +32,23 @@ def get_device( )() +def get_device_serial(device: PyViCareDevice) -> str | None: + """Get device serial for device if supported.""" + try: + return device.getSerial() + except PyViCareNotSupportedFeatureError: + _LOGGER.debug("Device does not offer a 'device.serial' data point") + except PyViCareRateLimitError as limit_exception: + _LOGGER.debug("Vicare API rate limit exceeded: %s", limit_exception) + except PyViCareInvalidDataError as invalid_data_exception: + _LOGGER.debug("Invalid data from Vicare server: %s", invalid_data_exception) + except requests.exceptions.ConnectionError: + _LOGGER.debug("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.debug("Unable to decode data from ViCare server") + return None + + def is_supported( name: str, entity_description: ViCareRequiredKeysMixin, diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index c76c6ea81aa..5e241c9a3be 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -28,7 +28,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import ViCareDevice -from .utils import get_circuits +from .utils import get_circuits, get_device_serial _LOGGER = logging.getLogger(__name__) @@ -69,6 +69,7 @@ def _build_entities( return [ ViCareWater( + get_device_serial(device.api), device.config, device.api, circuit, @@ -108,12 +109,13 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): def __init__( self, + device_serial: str | None, device_config: PyViCareDeviceConfig, device: PyViCareDevice, circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the DHW water_heater device.""" - super().__init__(device_config, device, circuit.id) + super().__init__(circuit.id, device_serial, device_config, device) self._circuit = circuit self._attributes: dict[str, Any] = {} diff --git a/homeassistant/components/vilfo/__init__.py b/homeassistant/components/vilfo/__init__.py index fe00fa494b5..ca74e74f37a 100644 --- a/homeassistant/components/vilfo/__init__.py +++ b/homeassistant/components/vilfo/__init__.py @@ -105,5 +105,5 @@ class VilfoRouterData: return if self.available and self._unavailable_logged: - _LOGGER.info("Vilfo Router %s is available again", self.host) + _LOGGER.warning("Vilfo Router %s is available again", self.host) self._unavailable_logged = False diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index a6cff506f79..cdba7f1b8c2 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -109,7 +109,7 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): try: info = await validate_input(self.hass, user_input) except InvalidHost: - errors[CONF_HOST] = "wrong_host" + errors["base"] = "invalid_host" except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: diff --git a/homeassistant/components/vilfo/strings.json b/homeassistant/components/vilfo/strings.json index f2c4c38780b..55c996d4a3d 100644 --- a/homeassistant/components/vilfo/strings.json +++ b/homeassistant/components/vilfo/strings.json @@ -14,6 +14,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_host": "[%key:common::config_flow::error::invalid_host%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { diff --git a/homeassistant/components/vivotek/manifest.json b/homeassistant/components/vivotek/manifest.json index 5a33ca09908..f0b622afcad 100644 --- a/homeassistant/components/vivotek/manifest.json +++ b/homeassistant/components/vivotek/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/vivotek", "iot_class": "local_polling", "loggers": ["libpyvivotek"], + "quality_scale": "legacy", "requirements": ["libpyvivotek==0.4.0"] } diff --git a/homeassistant/components/vizio/__init__.py b/homeassistant/components/vizio/__init__.py index 09d6f3be090..4af42d76b62 100644 --- a/homeassistant/components/vizio/__init__.py +++ b/homeassistant/components/vizio/__init__.py @@ -4,55 +4,18 @@ from __future__ import annotations from typing import Any -import voluptuous as vol - from homeassistant.components.media_player import MediaPlayerDeviceClass -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry, ConfigEntryState -from homeassistant.const import Platform +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import CONF_DEVICE_CLASS, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.storage import Store -from homeassistant.helpers.typing import ConfigType -from .const import CONF_APPS, CONF_DEVICE_CLASS, DOMAIN, VIZIO_SCHEMA +from .const import CONF_APPS, DOMAIN from .coordinator import VizioAppsDataUpdateCoordinator - -def validate_apps(config: ConfigType) -> ConfigType: - """Validate CONF_APPS is only used when CONF_DEVICE_CLASS is MediaPlayerDeviceClass.TV.""" - if ( - config.get(CONF_APPS) is not None - and config[CONF_DEVICE_CLASS] != MediaPlayerDeviceClass.TV - ): - raise vol.Invalid( - f"'{CONF_APPS}' can only be used if {CONF_DEVICE_CLASS}' is" - f" '{MediaPlayerDeviceClass.TV}'" - ) - - return config - - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.All(cv.ensure_list, [vol.All(VIZIO_SCHEMA, validate_apps)])}, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = [Platform.MEDIA_PLAYER] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Component setup, run import config flow for each entry in config.""" - if DOMAIN in config: - for entry in config[DOMAIN]: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=entry - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load the saved entities.""" diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index d8b99595f54..d3921061d8e 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -14,8 +14,6 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.components.media_player import MediaPlayerDeviceClass from homeassistant.config_entries import ( - SOURCE_IGNORE, - SOURCE_IMPORT, SOURCE_ZEROCONF, ConfigEntry, ConfigFlow, @@ -108,10 +106,6 @@ def _host_is_same(host1: str, host2: str) -> bool: class VizioOptionsConfigFlow(OptionsFlow): """Handle Vizio options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize vizio options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -184,7 +178,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> VizioOptionsConfigFlow: """Get the options flow for this handler.""" - return VizioOptionsConfigFlow(config_entry) + return VizioOptionsConfigFlow() def __init__(self) -> None: """Initialize config flow.""" @@ -255,100 +249,13 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: return await self._create_entry(user_input) - elif self._must_show_form and self.context["source"] == SOURCE_IMPORT: - # Import should always display the config form if CONF_ACCESS_TOKEN - # wasn't included but is needed so that the user can choose to update - # their configuration.yaml or to proceed with config flow pairing. We - # will also provide contextual message to user explaining why - _LOGGER.warning( - ( - "Couldn't complete configuration.yaml import: '%s' key is " - "missing. Either provide '%s' key in configuration.yaml or " - "finish setup by completing configuration via frontend" - ), - CONF_ACCESS_TOKEN, - CONF_ACCESS_TOKEN, - ) - self._must_show_form = False else: self._data = copy.deepcopy(user_input) return await self.async_step_pair_tv() schema = self._user_schema or _get_config_schema() - - if errors and self.context["source"] == SOURCE_IMPORT: - # Log an error message if import config flow fails since otherwise failure is silent - _LOGGER.error( - "Importing from configuration.yaml failed: %s", - ", ".join(errors.values()), - ) - return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - # Check if new config entry matches any existing config entries - for entry in self._async_current_entries(): - # If source is ignore bypass host check and continue through loop - if entry.source == SOURCE_IGNORE: - continue - - if await self.hass.async_add_executor_job( - _host_is_same, entry.data[CONF_HOST], import_config[CONF_HOST] - ): - updated_options: dict[str, Any] = {} - updated_data: dict[str, Any] = {} - remove_apps = False - - if entry.data[CONF_HOST] != import_config[CONF_HOST]: - updated_data[CONF_HOST] = import_config[CONF_HOST] - - if entry.data[CONF_NAME] != import_config[CONF_NAME]: - updated_data[CONF_NAME] = import_config[CONF_NAME] - - # Update entry.data[CONF_APPS] if import_config[CONF_APPS] differs, and - # pop entry.data[CONF_APPS] if import_config[CONF_APPS] is not specified - if entry.data.get(CONF_APPS) != import_config.get(CONF_APPS): - if not import_config.get(CONF_APPS): - remove_apps = True - else: - updated_options[CONF_APPS] = import_config[CONF_APPS] - - if entry.data.get(CONF_VOLUME_STEP) != import_config[CONF_VOLUME_STEP]: - updated_options[CONF_VOLUME_STEP] = import_config[CONF_VOLUME_STEP] - - if updated_options or updated_data or remove_apps: - new_data = entry.data.copy() - new_options = entry.options.copy() - - if remove_apps: - new_data.pop(CONF_APPS) - new_options.pop(CONF_APPS) - - if updated_data: - new_data.update(updated_data) - - # options are stored in entry options and data so update both - if updated_options: - new_data.update(updated_options) - new_options.update(updated_options) - - self.hass.config_entries.async_update_entry( - entry=entry, data=new_data, options=new_options - ) - return self.async_abort(reason="updated_entry") - - return self.async_abort(reason="already_configured_device") - - self._must_show_form = True - # Store config key/value pairs that are not configurable in user step so they - # don't get lost on user step - if import_config.get(CONF_APPS): - self._apps = copy.deepcopy(import_config[CONF_APPS]) - return await self.async_step_user(user_input=import_config) - async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: @@ -439,11 +346,6 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): if pair_data: self._data[CONF_ACCESS_TOKEN] = pair_data.auth_token self._must_show_form = True - - if self.context["source"] == SOURCE_IMPORT: - # If user is pairing via config import, show different message - return await self.async_step_pairing_complete_import() - return await self.async_step_pairing_complete() # If no data was retrieved, it's assumed that the pairing attempt was not diff --git a/homeassistant/components/vizio/const.py b/homeassistant/components/vizio/const.py index 4eb96256d2e..8451ae747de 100644 --- a/homeassistant/components/vizio/const.py +++ b/homeassistant/components/vizio/const.py @@ -10,14 +10,6 @@ from homeassistant.components.media_player import ( MediaPlayerDeviceClass, MediaPlayerEntityFeature, ) -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_DEVICE_CLASS, - CONF_EXCLUDE, - CONF_HOST, - CONF_INCLUDE, - CONF_NAME, -) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import VolDictType @@ -84,43 +76,3 @@ VIZIO_DEVICE_CLASSES = { MediaPlayerDeviceClass.SPEAKER: VIZIO_DEVICE_CLASS_SPEAKER, MediaPlayerDeviceClass.TV: VIZIO_DEVICE_CLASS_TV, } - -VIZIO_SCHEMA = { - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_ACCESS_TOKEN): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.All( - cv.string, - vol.Lower, - vol.In([MediaPlayerDeviceClass.TV, MediaPlayerDeviceClass.SPEAKER]), - ), - vol.Optional(CONF_VOLUME_STEP, default=DEFAULT_VOLUME_STEP): vol.All( - vol.Coerce(int), vol.Range(min=1, max=10) - ), - vol.Optional(CONF_APPS): vol.All( - { - vol.Exclusive(CONF_INCLUDE, "apps_filter"): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Exclusive(CONF_EXCLUDE, "apps_filter"): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(CONF_ADDITIONAL_CONFIGS): vol.All( - cv.ensure_list, - [ - { - vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_CONFIG): { - vol.Required(CONF_APP_ID): cv.string, - vol.Required(CONF_NAME_SPACE): vol.Coerce(int), - vol.Optional(CONF_MESSAGE, default=None): vol.Or( - cv.string, None - ), - }, - }, - ], - ), - }, - cv.has_at_least_one_key(CONF_INCLUDE, CONF_EXCLUDE, CONF_ADDITIONAL_CONFIGS), - ), -} diff --git a/homeassistant/components/vizio/coordinator.py b/homeassistant/components/vizio/coordinator.py index 1930828b595..a7ca7d7f9ed 100644 --- a/homeassistant/components/vizio/coordinator.py +++ b/homeassistant/components/vizio/coordinator.py @@ -34,10 +34,9 @@ class VizioAppsDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]] self.fail_threshold = 10 self.store = store - async def async_config_entry_first_refresh(self) -> None: + async def _async_setup(self) -> None: """Refresh data for the first time when a config entry is setup.""" self.data = await self.store.async_load() or APPS - await super().async_config_entry_first_refresh() async def _async_update_data(self) -> list[dict[str, Any]]: """Update data via library.""" diff --git a/homeassistant/components/vizio/icons.json b/homeassistant/components/vizio/icons.json index ccdaf816bb0..be6f727de6f 100644 --- a/homeassistant/components/vizio/icons.json +++ b/homeassistant/components/vizio/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_setting": "mdi:cog" + "update_setting": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/vizio/manifest.json b/homeassistant/components/vizio/manifest.json index e6812ed58b1..91b2ff46495 100644 --- a/homeassistant/components/vizio/manifest.json +++ b/homeassistant/components/vizio/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["pyvizio"], - "quality_scale": "platinum", "requirements": ["pyvizio==0.1.61"], "zeroconf": ["_viziocast._tcp.local."] } diff --git a/homeassistant/components/vizio/media_player.py b/homeassistant/components/vizio/media_player.py index ba9c92f94f1..5711d8fbac9 100644 --- a/homeassistant/components/vizio/media_player.py +++ b/homeassistant/components/vizio/media_player.py @@ -200,7 +200,7 @@ class VizioDevice(MediaPlayerEntity): return if not self._attr_available: - _LOGGER.info( + _LOGGER.warning( "Restored connection to %s", self._config_entry.data[CONF_HOST] ) self._attr_available = True diff --git a/homeassistant/components/vlc/manifest.json b/homeassistant/components/vlc/manifest.json index 7e4fb7b2a4f..a31fe49859c 100644 --- a/homeassistant/components/vlc/manifest.json +++ b/homeassistant/components/vlc/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/vlc", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["python-vlc==3.0.18122"] } diff --git a/homeassistant/components/vlc_telnet/__init__.py b/homeassistant/components/vlc_telnet/__init__.py index a61fcafd2cb..c327b58a644 100644 --- a/homeassistant/components/vlc_telnet/__init__.py +++ b/homeassistant/components/vlc_telnet/__init__.py @@ -5,6 +5,9 @@ from dataclasses import dataclass from aiovlc.client import Client from aiovlc.exceptions import AuthError, ConnectError +from homeassistant.components.media_player import ( + SCAN_INTERVAL as MEDIAPLAYER_SCAN_INTERVAL, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform from homeassistant.core import HomeAssistant @@ -33,7 +36,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: VlcConfigEntry) -> bool: port = config[CONF_PORT] password = config[CONF_PASSWORD] - vlc = Client(password=password, host=host, port=port) + vlc = Client( + password=password, + host=host, + port=port, + timeout=int(MEDIAPLAYER_SCAN_INTERVAL.total_seconds() - 1), + ) available = True diff --git a/homeassistant/components/vlc_telnet/config_flow.py b/homeassistant/components/vlc_telnet/config_flow.py index 6ccb92e5b8b..08564937959 100644 --- a/homeassistant/components/vlc_telnet/config_flow.py +++ b/homeassistant/components/vlc_telnet/config_flow.py @@ -10,11 +10,11 @@ from aiovlc.client import Client from aiovlc.exceptions import AuthError, ConnectError import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DEFAULT_PORT, DOMAIN @@ -70,7 +70,6 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for VLC media player Telnet.""" VERSION = 1 - entry: ConfigEntry | None = None hassio_discovery: dict[str, Any] | None = None async def async_step_user( @@ -108,21 +107,19 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert self.entry - self.context["title_placeholders"] = {"host": self.entry.data[CONF_HOST]} + self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" - assert self.entry errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: try: - await validate_input(self.hass, {**self.entry.data, **user_input}) + await validate_input(self.hass, {**reauth_entry.data, **user_input}) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: @@ -131,21 +128,14 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_PASSWORD: user_input[CONF_PASSWORD], - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: self.entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: reauth_entry.data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/vlc_telnet/manifest.json b/homeassistant/components/vlc_telnet/manifest.json index 7a5e00cff21..5041619e84f 100644 --- a/homeassistant/components/vlc_telnet/manifest.json +++ b/homeassistant/components/vlc_telnet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vlc_telnet", "iot_class": "local_polling", "loggers": ["aiovlc"], - "requirements": ["aiovlc==0.3.2"] + "requirements": ["aiovlc==0.5.1"] } diff --git a/homeassistant/components/vlc_telnet/media_player.py b/homeassistant/components/vlc_telnet/media_player.py index bd58b2ad23a..b95e987aef8 100644 --- a/homeassistant/components/vlc_telnet/media_player.py +++ b/homeassistant/components/vlc_telnet/media_player.py @@ -131,7 +131,7 @@ class VlcDevice(MediaPlayerEntity): self._attr_state = MediaPlayerState.IDLE self._attr_available = True - LOGGER.info("Connected to vlc host: %s", self._vlc.host) + LOGGER.debug("Connected to vlc host: %s", self._vlc.host) status = await self._vlc.status() LOGGER.debug("Status: %s", status) @@ -175,13 +175,13 @@ class VlcDevice(MediaPlayerEntity): # Fall back to filename. if data_info := data.get("data"): - self._attr_media_title = _get_str(data_info, "filename") + media_title = _get_str(data_info, "filename") # Strip out auth signatures if streaming local media - if (media_title := self.media_title) and ( - pos := media_title.find("?authSig=") - ) != -1: + if media_title and (pos := media_title.find("?authSig=")) != -1: self._attr_media_title = media_title[:pos] + else: + self._attr_media_title = media_title @catch_vlc_errors async def async_media_seek(self, position: float) -> None: diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index 6b6adb6a18d..7a80244f8d6 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -17,7 +17,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -60,13 +59,14 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Vodafone Station.""" VERSION = 1 - entry: ConfigEntry | None = None @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> VodafoneStationOptionsFlowHandler: """Get the options flow for this handler.""" - return VodafoneStationOptionsFlowHandler(config_entry) + return VodafoneStationOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -106,21 +106,19 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert self.entry - self.context["title_placeholders"] = {"host": self.entry.data[CONF_HOST]} + self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" - assert self.entry errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: try: - await validate_input(self.hass, {**self.entry.data, **user_input}) + await validate_input(self.hass, {**reauth_entry.data, **user_input}) except aiovodafone_exceptions.AlreadyLogged: errors["base"] = "already_logged" except aiovodafone_exceptions.CannotConnect: @@ -131,27 +129,22 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: self.entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: reauth_entry.data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) -class VodafoneStationOptionsFlowHandler(OptionsFlowWithConfigEntry): +class VodafoneStationOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" async def async_step_init( @@ -166,7 +159,7 @@ class VodafoneStationOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_CONSIDER_HOME, - default=self.options.get( + default=self.config_entry.options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)) diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index d2f408e355b..e95ca2b5976 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta +from json.decoder import JSONDecodeError from typing import Any from aiovodafone import VodafoneStationDevice, VodafoneStationSercommApi, exceptions @@ -107,6 +108,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): exceptions.CannotConnect, exceptions.AlreadyLogged, exceptions.GenericLoginError, + JSONDecodeError, ) as err: raise UpdateFailed(f"Error fetching data: {err!r}") from err except (ConfigEntryAuthFailed, UpdateFailed): diff --git a/homeassistant/components/vodafone_station/device_tracker.py b/homeassistant/components/vodafone_station/device_tracker.py index 85ad834cd23..3e4d7763bff 100644 --- a/homeassistant/components/vodafone_station/device_tracker.py +++ b/homeassistant/components/vodafone_station/device_tracker.py @@ -2,9 +2,7 @@ from __future__ import annotations -from aiovodafone import VodafoneStationDevice - -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -63,6 +61,7 @@ class VodafoneStationTracker(CoordinatorEntity[VodafoneStationRouter], ScannerEn """Representation of a Vodafone Station device.""" _attr_translation_key = "device_tracker" + mac_address: str def __init__( self, coordinator: VodafoneStationRouter, device_info: VodafoneStationDeviceInfo @@ -70,43 +69,22 @@ class VodafoneStationTracker(CoordinatorEntity[VodafoneStationRouter], ScannerEn """Initialize a Vodafone Station device.""" super().__init__(coordinator) self._coordinator = coordinator - device = device_info.device - mac = device.mac - self._device_mac = mac + mac = device_info.device.mac + self._attr_mac_address = mac self._attr_unique_id = mac - self._attr_name = device.name or mac.replace(":", "_") + self._attr_hostname = device_info.device.name or mac.replace(":", "_") @property def _device_info(self) -> VodafoneStationDeviceInfo: """Return fresh data for the device.""" - return self.coordinator.data.devices[self._device_mac] - - @property - def _device(self) -> VodafoneStationDevice: - """Return fresh data for the device.""" - return self.coordinator.data.devices[self._device_mac].device + return self.coordinator.data.devices[self.mac_address] @property def is_connected(self) -> bool: """Return true if the device is connected to the network.""" return self._device_info.home - @property - def source_type(self) -> SourceType: - """Return the source type.""" - return SourceType.ROUTER - - @property - def hostname(self) -> str | None: - """Return the hostname of device.""" - return self._attr_name - @property def ip_address(self) -> str | None: """Return the primary ip address of the device.""" - return self._device.ip_address - - @property - def mac_address(self) -> str: - """Return the mac address of the device.""" - return self._device_mac + return self._device_info.device.ip_address diff --git a/homeassistant/components/vodafone_station/diagnostics.py b/homeassistant/components/vodafone_station/diagnostics.py new file mode 100644 index 00000000000..e306d6caca2 --- /dev/null +++ b/homeassistant/components/vodafone_station/diagnostics.py @@ -0,0 +1,47 @@ +"""Diagnostics support for Vodafone Station.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import VodafoneStationRouter + +TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + + sensors_data = coordinator.data.sensors + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "device_info": { + "sys_model_name": sensors_data.get("sys_model_name"), + "sys_firmware_version": sensors_data["sys_firmware_version"], + "sys_hardware_version": sensors_data["sys_hardware_version"], + "sys_cpu_usage": sensors_data["sys_cpu_usage"][:-1], + "sys_memory_usage": sensors_data["sys_memory_usage"][:-1], + "sys_reboot_cause": sensors_data["sys_reboot_cause"], + "last_update success": coordinator.last_update_success, + "last_exception": coordinator.last_exception, + "client_devices": [ + { + "hostname": device_info.device.name, + "connection_type": device_info.device.connection_type, + "connected": device_info.device.connected, + "type": device_info.device.type, + } + for _, device_info in coordinator.data.devices.items() + ], + }, + } diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 47137fff26c..4acafc8df3a 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiovodafone"], - "quality_scale": "silver", - "requirements": ["aiovodafone==0.6.0"] + "requirements": ["aiovodafone==0.6.1"] } diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 2a08a9b2ebe..307fcaf0ea8 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Any, Final +from typing import Final from homeassistant.components.sensor import ( SensorDeviceClass, @@ -16,32 +16,49 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import _LOGGER, DOMAIN, LINE_TYPES from .coordinator import VodafoneStationRouter NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] +UPTIME_DEVIATION = 60 @dataclass(frozen=True, kw_only=True) class VodafoneStationEntityDescription(SensorEntityDescription): """Vodafone Station entity description.""" - value: Callable[[Any, Any], Any] = ( - lambda coordinator, key: coordinator.data.sensors[key] - ) + value: Callable[ + [VodafoneStationRouter, str | datetime | float | None, str], + str | datetime | float | None, + ] = lambda coordinator, last_value, key: coordinator.data.sensors[key] is_suitable: Callable[[dict], bool] = lambda val: True -def _calculate_uptime(coordinator: VodafoneStationRouter, key: str) -> datetime: +def _calculate_uptime( + coordinator: VodafoneStationRouter, + last_value: str | datetime | float | None, + key: str, +) -> datetime: """Calculate device uptime.""" - return coordinator.api.convert_uptime(coordinator.data.sensors[key]) + delta_uptime = coordinator.api.convert_uptime(coordinator.data.sensors[key]) + + if ( + not isinstance(last_value, datetime) + or abs((delta_uptime - last_value).total_seconds()) > UPTIME_DEVIATION + ): + return delta_uptime + + return last_value -def _line_connection(coordinator: VodafoneStationRouter, key: str) -> str | None: +def _line_connection( + coordinator: VodafoneStationRouter, + last_value: str | datetime | float | None, + key: str, +) -> str | None: """Identify line type.""" value = coordinator.data.sensors @@ -126,14 +143,18 @@ SENSOR_TYPES: Final = ( translation_key="sys_cpu_usage", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, - value=lambda coordinator, key: float(coordinator.data.sensors[key][:-1]), + value=lambda coordinator, last_value, key: float( + coordinator.data.sensors[key][:-1] + ), ), VodafoneStationEntityDescription( key="sys_memory_usage", translation_key="sys_memory_usage", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, - value=lambda coordinator, key: float(coordinator.data.sensors[key][:-1]), + value=lambda coordinator, last_value, key: float( + coordinator.data.sensors[key][:-1] + ), ), VodafoneStationEntityDescription( key="sys_reboot_cause", @@ -178,10 +199,12 @@ class VodafoneStationSensorEntity( self.entity_description = description self._attr_device_info = coordinator.device_info self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + self._old_state: str | datetime | float | None = None @property - def native_value(self) -> StateType: + def native_value(self) -> str | datetime | float | None: """Sensor value.""" - return self.entity_description.value( - self.coordinator, self.entity_description.key + self._old_state = self.entity_description.value( + self.coordinator, self._old_state, self.entity_description.key ) + return self._old_state diff --git a/homeassistant/components/voicerss/manifest.json b/homeassistant/components/voicerss/manifest.json index bfc61365dc0..1e7da9d220d 100644 --- a/homeassistant/components/voicerss/manifest.json +++ b/homeassistant/components/voicerss/manifest.json @@ -3,5 +3,6 @@ "name": "VoiceRSS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/voicerss", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/voip/__init__.py b/homeassistant/components/voip/__init__.py index 9ab6a8bf0e8..cee0cbb0766 100644 --- a/homeassistant/components/voip/__init__.py +++ b/homeassistant/components/voip/__init__.py @@ -20,6 +20,7 @@ from .devices import VoIPDevices from .voip import HassVoipDatagramProtocol PLATFORMS = ( + Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, Platform.SELECT, Platform.SWITCH, diff --git a/homeassistant/components/voip/assist_satellite.py b/homeassistant/components/voip/assist_satellite.py new file mode 100644 index 00000000000..0100435d6dc --- /dev/null +++ b/homeassistant/components/voip/assist_satellite.py @@ -0,0 +1,326 @@ +"""Assist satellite entity for VoIP integration.""" + +from __future__ import annotations + +import asyncio +from enum import IntFlag +from functools import partial +import io +import logging +from pathlib import Path +from typing import TYPE_CHECKING, Any, Final +import wave + +from voip_utils import RtpDatagramProtocol + +from homeassistant.components import tts +from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType +from homeassistant.components.assist_satellite import ( + AssistSatelliteConfiguration, + AssistSatelliteEntity, + AssistSatelliteEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH +from .devices import VoIPDevice +from .entity import VoIPEntity + +if TYPE_CHECKING: + from . import DomainData + +_LOGGER = logging.getLogger(__name__) + +_PIPELINE_TIMEOUT_SEC: Final = 30 + + +class Tones(IntFlag): + """Feedback tones for specific events.""" + + LISTENING = 1 + PROCESSING = 2 + ERROR = 4 + + +_TONE_FILENAMES: dict[Tones, str] = { + Tones.LISTENING: "tone.pcm", + Tones.PROCESSING: "processing.pcm", + Tones.ERROR: "error.pcm", +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up VoIP Assist satellite entity.""" + domain_data: DomainData = hass.data[DOMAIN] + + @callback + def async_add_device(device: VoIPDevice) -> None: + """Add device.""" + async_add_entities([VoipAssistSatellite(hass, device, config_entry)]) + + domain_data.devices.async_add_new_device_listener(async_add_device) + + entities: list[VoIPEntity] = [ + VoipAssistSatellite(hass, device, config_entry) + for device in domain_data.devices + ] + + async_add_entities(entities) + + +class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol): + """Assist satellite for VoIP devices.""" + + entity_description = AssistSatelliteEntityDescription(key="assist_satellite") + _attr_translation_key = "assist_satellite" + _attr_name = None + + def __init__( + self, + hass: HomeAssistant, + voip_device: VoIPDevice, + config_entry: ConfigEntry, + tones=Tones.LISTENING | Tones.PROCESSING | Tones.ERROR, + ) -> None: + """Initialize an Assist satellite.""" + VoIPEntity.__init__(self, voip_device) + AssistSatelliteEntity.__init__(self) + RtpDatagramProtocol.__init__(self) + + self.config_entry = config_entry + + self._audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + self._audio_chunk_timeout: float = 2.0 + self._run_pipeline_task: asyncio.Task | None = None + self._pipeline_had_error: bool = False + self._tts_done = asyncio.Event() + self._tts_extra_timeout: float = 1.0 + self._tone_bytes: dict[Tones, bytes] = {} + self._tones = tones + self._processing_tone_done = asyncio.Event() + + @property + def pipeline_entity_id(self) -> str | None: + """Return the entity ID of the pipeline to use for the next conversation.""" + return self.voip_device.get_pipeline_entity_id(self.hass) + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Return the entity ID of the VAD sensitivity to use for the next conversation.""" + return self.voip_device.get_vad_sensitivity_entity_id(self.hass) + + @property + def tts_options(self) -> dict[str, Any] | None: + """Options passed for text-to-speech.""" + return { + tts.ATTR_PREFERRED_FORMAT: "wav", + tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.voip_device.protocol = self + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + assert self.voip_device.protocol == self + self.voip_device.protocol = None + + @callback + def async_get_configuration( + self, + ) -> AssistSatelliteConfiguration: + """Get the current satellite configuration.""" + raise NotImplementedError + + async def async_set_configuration( + self, config: AssistSatelliteConfiguration + ) -> None: + """Set the current satellite configuration.""" + raise NotImplementedError + + # ------------------------------------------------------------------------- + # VoIP + # ------------------------------------------------------------------------- + + def on_chunk(self, audio_bytes: bytes) -> None: + """Handle raw audio chunk.""" + if self._run_pipeline_task is None: + # Run pipeline until voice command finishes, then start over + self._clear_audio_queue() + self._tts_done.clear() + self._run_pipeline_task = self.config_entry.async_create_background_task( + self.hass, + self._run_pipeline(), + "voip_pipeline_run", + ) + + self._audio_queue.put_nowait(audio_bytes) + + async def _run_pipeline(self) -> None: + _LOGGER.debug("Starting pipeline") + + self.async_set_context(Context(user_id=self.config_entry.data["user"])) + self.voip_device.set_is_active(True) + + async def stt_stream(): + while True: + async with asyncio.timeout(self._audio_chunk_timeout): + chunk = await self._audio_queue.get() + if not chunk: + break + + yield chunk + + # Play listening tone at the start of each cycle + await self._play_tone(Tones.LISTENING, silence_before=0.2) + + try: + await self.async_accept_pipeline_from_satellite( + audio_stream=stt_stream(), + ) + + if self._pipeline_had_error: + self._pipeline_had_error = False + await self._play_tone(Tones.ERROR) + else: + # Block until TTS is done speaking. + # + # This is set in _send_tts and has a timeout that's based on the + # length of the TTS audio. + await self._tts_done.wait() + except TimeoutError: + self.disconnect() # caller hung up + finally: + # Stop audio stream + await self._audio_queue.put(None) + + self.voip_device.set_is_active(False) + self._run_pipeline_task = None + _LOGGER.debug("Pipeline finished") + + def _clear_audio_queue(self) -> None: + """Ensure audio queue is empty.""" + while not self._audio_queue.empty(): + self._audio_queue.get_nowait() + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Set state based on pipeline stage.""" + if event.type == PipelineEventType.STT_END: + if (self._tones & Tones.PROCESSING) == Tones.PROCESSING: + self._processing_tone_done.clear() + self.config_entry.async_create_background_task( + self.hass, self._play_tone(Tones.PROCESSING), "voip_process_tone" + ) + elif event.type == PipelineEventType.TTS_END: + # Send TTS audio to caller over RTP + if event.data and (tts_output := event.data["tts_output"]): + media_id = tts_output["media_id"] + self.config_entry.async_create_background_task( + self.hass, + self._send_tts(media_id), + "voip_pipeline_tts", + ) + else: + # Empty TTS response + self._tts_done.set() + elif event.type == PipelineEventType.ERROR: + # Play error tone instead of wait for TTS when pipeline is finished. + self._pipeline_had_error = True + _LOGGER.warning(event) + + async def _send_tts(self, media_id: str) -> None: + """Send TTS audio to caller via RTP.""" + try: + if self.transport is None: + return # not connected + + extension, data = await tts.async_get_media_source_audio( + self.hass, + media_id, + ) + + if extension != "wav": + raise ValueError(f"Only WAV audio can be streamed, got {extension}") + + if (self._tones & Tones.PROCESSING) == Tones.PROCESSING: + # Don't overlap TTS and processing beep + _LOGGER.debug("Waiting for processing tone") + await self._processing_tone_done.wait() + + with io.BytesIO(data) as wav_io: + with wave.open(wav_io, "rb") as wav_file: + sample_rate = wav_file.getframerate() + sample_width = wav_file.getsampwidth() + sample_channels = wav_file.getnchannels() + + if ( + (sample_rate != RATE) + or (sample_width != WIDTH) + or (sample_channels != CHANNELS) + ): + raise ValueError( + f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," + f" got {sample_rate}/{sample_width}/{sample_channels}" + ) + + audio_bytes = wav_file.readframes(wav_file.getnframes()) + + _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) + + # Time out 1 second after TTS audio should be finished + tts_samples = len(audio_bytes) / (WIDTH * CHANNELS) + tts_seconds = tts_samples / RATE + + async with asyncio.timeout(tts_seconds + self._tts_extra_timeout): + # TTS audio is 16Khz 16-bit mono + await self._async_send_audio(audio_bytes) + except TimeoutError: + _LOGGER.warning("TTS timeout") + raise + finally: + # Update satellite state + self.tts_response_finished() + + # Signal pipeline to restart + self._tts_done.set() + + async def _async_send_audio(self, audio_bytes: bytes, **kwargs): + """Send audio in executor.""" + await self.hass.async_add_executor_job( + partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs) + ) + + async def _play_tone(self, tone: Tones, silence_before: float = 0.0) -> None: + """Play a tone as feedback to the user if it's enabled.""" + if (self._tones & tone) != tone: + return # not enabled + + if tone not in self._tone_bytes: + # Do I/O in executor + self._tone_bytes[tone] = await self.hass.async_add_executor_job( + self._load_pcm, + _TONE_FILENAMES[tone], + ) + + await self._async_send_audio( + self._tone_bytes[tone], + silence_before=silence_before, + ) + + if tone == Tones.PROCESSING: + self._processing_tone_done.set() + + def _load_pcm(self, file_name: str) -> bytes: + """Load raw audio (16Khz, 16-bit mono).""" + return (Path(__file__).parent / file_name).read_bytes() diff --git a/homeassistant/components/voip/binary_sensor.py b/homeassistant/components/voip/binary_sensor.py index 8eeefbd5d94..f38b228c46c 100644 --- a/homeassistant/components/voip/binary_sensor.py +++ b/homeassistant/components/voip/binary_sensor.py @@ -10,6 +10,7 @@ from homeassistant.components.binary_sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -42,6 +43,7 @@ class VoIPCallInProgress(VoIPEntity, BinarySensorEntity): """Entity to represent voip call is in progress.""" entity_description = BinarySensorEntityDescription( + entity_registry_enabled_default=False, key="call_in_progress", translation_key="call_in_progress", ) @@ -51,10 +53,44 @@ class VoIPCallInProgress(VoIPEntity, BinarySensorEntity): """Call when entity about to be added to hass.""" await super().async_added_to_hass() - self.async_on_remove(self._device.async_listen_update(self._is_active_changed)) + self.async_on_remove( + self.voip_device.async_listen_update(self._is_active_changed) + ) + + await super().async_added_to_hass() + if TYPE_CHECKING: + assert self.registry_entry is not None + ir.async_create_issue( + self.hass, + DOMAIN, + f"assist_in_progress_deprecated_{self.registry_entry.id}", + breaks_in_ha_version="2025.4", + data={ + "entity_id": self.entity_id, + "entity_uuid": self.registry_entry.id, + "integration_name": "VoIP", + }, + is_fixable=True, + severity=ir.IssueSeverity.WARNING, + translation_key="assist_in_progress_deprecated", + translation_placeholders={ + "integration_name": "VoIP", + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Remove issue.""" + await super().async_will_remove_from_hass() + if TYPE_CHECKING: + assert self.registry_entry is not None + ir.async_delete_issue( + self.hass, + DOMAIN, + f"assist_in_progress_deprecated_{self.registry_entry.id}", + ) @callback def _is_active_changed(self, device: VoIPDevice) -> None: """Call when active state changed.""" - self._attr_is_on = self._device.is_active + self._attr_is_on = self.voip_device.is_active self.async_write_ha_state() diff --git a/homeassistant/components/voip/config_flow.py b/homeassistant/components/voip/config_flow.py index 821c7f29a1e..63dcb8f86ee 100644 --- a/homeassistant/components/voip/config_flow.py +++ b/homeassistant/components/voip/config_flow.py @@ -47,16 +47,12 @@ class VoIPConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return VoipOptionsFlowHandler(config_entry) + return VoipOptionsFlowHandler() class VoipOptionsFlowHandler(OptionsFlow): """Handle VoIP options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/voip/devices.py b/homeassistant/components/voip/devices.py index 4e2dca15308..613d05fc614 100644 --- a/homeassistant/components/voip/devices.py +++ b/homeassistant/components/voip/devices.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Iterator from dataclasses import dataclass, field -from voip_utils import CallInfo +from voip_utils import CallInfo, VoipDatagramProtocol from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event, HomeAssistant, callback @@ -22,6 +22,7 @@ class VoIPDevice: device_id: str is_active: bool = False update_listeners: list[Callable[[VoIPDevice], None]] = field(default_factory=list) + protocol: VoipDatagramProtocol | None = None @callback def set_is_active(self, active: bool) -> None: @@ -56,6 +57,18 @@ class VoIPDevice: return False + def get_pipeline_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for pipeline select.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id("select", DOMAIN, f"{self.voip_id}-pipeline") + + def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for VAD sensitivity.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id( + "select", DOMAIN, f"{self.voip_id}-vad_sensitivity" + ) + class VoIPDevices: """Class to store devices.""" diff --git a/homeassistant/components/voip/entity.py b/homeassistant/components/voip/entity.py index 9e1e067b195..e96784bc218 100644 --- a/homeassistant/components/voip/entity.py +++ b/homeassistant/components/voip/entity.py @@ -15,10 +15,10 @@ class VoIPEntity(entity.Entity): _attr_has_entity_name = True _attr_should_poll = False - def __init__(self, device: VoIPDevice) -> None: + def __init__(self, voip_device: VoIPDevice) -> None: """Initialize VoIP entity.""" - self._device = device - self._attr_unique_id = f"{device.voip_id}-{self.entity_description.key}" + self.voip_device = voip_device + self._attr_unique_id = f"{voip_device.voip_id}-{self.entity_description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device.voip_id)}, + identifiers={(DOMAIN, voip_device.voip_id)}, ) diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index 594abc69c13..7dd2e797058 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -3,9 +3,9 @@ "name": "Voice over IP", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": ["assist_pipeline"], + "dependencies": ["assist_pipeline", "assist_satellite"], "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["voip-utils==0.1.0"] + "requirements": ["voip-utils==0.2.1"] } diff --git a/homeassistant/components/voip/repairs.py b/homeassistant/components/voip/repairs.py new file mode 100644 index 00000000000..11cacbb7486 --- /dev/null +++ b/homeassistant/components/voip/repairs.py @@ -0,0 +1,22 @@ +"""Repairs implementation for the VoIP integration.""" + +from __future__ import annotations + +from homeassistant.components.assist_pipeline.repair_flows import ( + AssistInProgressDeprecatedRepairFlow, +) +from homeassistant.components.repairs import RepairsFlow +from homeassistant.core import HomeAssistant + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + if issue_id.startswith("assist_in_progress_deprecated"): + return AssistInProgressDeprecatedRepairFlow(data) + # If VoIP adds confirm-only repairs in the future, this should be changed + # to return a ConfirmRepairFlow instead of raising a ValueError + raise ValueError(f"unknown repair {issue_id}") diff --git a/homeassistant/components/voip/strings.json b/homeassistant/components/voip/strings.json index 8bcbb06d4e2..c25c22f3f80 100644 --- a/homeassistant/components/voip/strings.json +++ b/homeassistant/components/voip/strings.json @@ -37,6 +37,18 @@ } } }, + "issues": { + "assist_in_progress_deprecated": { + "title": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::title%]", + "fix_flow": { + "step": { + "confirm_disable_entity": { + "description": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::fix_flow::step::confirm_disable_entity::description%]" + } + } + } + } + }, "options": { "step": { "init": { diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index be1e58b6eec..6f6cf989d3b 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -3,15 +3,11 @@ from __future__ import annotations import asyncio -from collections import deque -from collections.abc import AsyncIterable, MutableSequence, Sequence from functools import partial -import io import logging from pathlib import Path import time from typing import TYPE_CHECKING -import wave from voip_utils import ( CallInfo, @@ -21,33 +17,19 @@ from voip_utils import ( VoipDatagramProtocol, ) -from homeassistant.components import assist_pipeline, stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, - PipelineEvent, - PipelineEventType, PipelineNotFound, async_get_pipeline, - async_pipeline_from_audio_stream, select as pipeline_select, ) -from homeassistant.components.assist_pipeline.audio_enhancer import ( - AudioEnhancer, - MicroVadSpeexEnhancer, -) -from homeassistant.components.assist_pipeline.vad import ( - AudioBuffer, - VadSensitivity, - VoiceCommandSegmenter, -) from homeassistant.const import __version__ -from homeassistant.core import Context, HomeAssistant -from homeassistant.util.ulid import ulid_now +from homeassistant.core import HomeAssistant from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH if TYPE_CHECKING: - from .devices import VoIPDevice, VoIPDevices + from .devices import VoIPDevices _LOGGER = logging.getLogger(__name__) @@ -60,11 +42,8 @@ def make_protocol( ) -> VoipDatagramProtocol: """Plays a pre-recorded message if pipeline is misconfigured.""" voip_device = devices.async_get_or_create(call_info) - pipeline_id = pipeline_select.get_chosen_pipeline( - hass, - DOMAIN, - voip_device.voip_id, - ) + + pipeline_id = pipeline_select.get_chosen_pipeline(hass, DOMAIN, voip_device.voip_id) try: pipeline: Pipeline | None = async_get_pipeline(hass, pipeline_id) except PipelineNotFound: @@ -83,22 +62,18 @@ def make_protocol( rtcp_state=rtcp_state, ) - vad_sensitivity = pipeline_select.get_vad_sensitivity( - hass, - DOMAIN, - voip_device.voip_id, - ) + if (protocol := voip_device.protocol) is None: + raise ValueError("VoIP satellite not found") - # Pipeline is properly configured - return PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(user_id=devices.config_entry.data["user"]), - opus_payload_type=call_info.opus_payload_type, - silence_seconds=VadSensitivity.to_seconds(vad_sensitivity), - rtcp_state=rtcp_state, - ) + protocol._rtp_input.opus_payload_type = call_info.opus_payload_type # noqa: SLF001 + protocol._rtp_output.opus_payload_type = call_info.opus_payload_type # noqa: SLF001 + + protocol.rtcp_state = rtcp_state + if protocol.rtcp_state is not None: + # Automatically disconnect when BYE is received over RTCP + protocol.rtcp_state.bye_callback = protocol.disconnect + + return protocol class HassVoipDatagramProtocol(VoipDatagramProtocol): @@ -143,372 +118,6 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol): await self._closed_event.wait() -class PipelineRtpDatagramProtocol(RtpDatagramProtocol): - """Run a voice assistant pipeline in a loop for a VoIP call.""" - - def __init__( - self, - hass: HomeAssistant, - language: str, - voip_device: VoIPDevice, - context: Context, - opus_payload_type: int, - pipeline_timeout: float = 30.0, - audio_timeout: float = 2.0, - buffered_chunks_before_speech: int = 100, - listening_tone_enabled: bool = True, - processing_tone_enabled: bool = True, - error_tone_enabled: bool = True, - tone_delay: float = 0.2, - tts_extra_timeout: float = 1.0, - silence_seconds: float = 1.0, - rtcp_state: RtcpState | None = None, - ) -> None: - """Set up pipeline RTP server.""" - super().__init__( - rate=RATE, - width=WIDTH, - channels=CHANNELS, - opus_payload_type=opus_payload_type, - rtcp_state=rtcp_state, - ) - - self.hass = hass - self.language = language - self.voip_device = voip_device - self.pipeline: Pipeline | None = None - self.pipeline_timeout = pipeline_timeout - self.audio_timeout = audio_timeout - self.buffered_chunks_before_speech = buffered_chunks_before_speech - self.listening_tone_enabled = listening_tone_enabled - self.processing_tone_enabled = processing_tone_enabled - self.error_tone_enabled = error_tone_enabled - self.tone_delay = tone_delay - self.tts_extra_timeout = tts_extra_timeout - self.silence_seconds = silence_seconds - - self._audio_queue: asyncio.Queue[bytes] = asyncio.Queue() - self._context = context - self._conversation_id: str | None = None - self._pipeline_task: asyncio.Task | None = None - self._tts_done = asyncio.Event() - self._session_id: str | None = None - self._tone_bytes: bytes | None = None - self._processing_bytes: bytes | None = None - self._error_bytes: bytes | None = None - self._pipeline_error: bool = False - - def connection_made(self, transport): - """Server is ready.""" - super().connection_made(transport) - self.voip_device.set_is_active(True) - - def connection_lost(self, exc): - """Handle connection is lost or closed.""" - super().connection_lost(exc) - self.voip_device.set_is_active(False) - - def on_chunk(self, audio_bytes: bytes) -> None: - """Handle raw audio chunk.""" - if self._pipeline_task is None: - self._clear_audio_queue() - - # Run pipeline until voice command finishes, then start over - self._pipeline_task = self.hass.async_create_background_task( - self._run_pipeline(), - "voip_pipeline_run", - ) - - self._audio_queue.put_nowait(audio_bytes) - - async def _run_pipeline( - self, - ) -> None: - """Forward audio to pipeline STT and handle TTS.""" - if self._session_id is None: - self._session_id = ulid_now() - - # Play listening tone at the start of each cycle - if self.listening_tone_enabled: - await self._play_listening_tone() - - try: - # Wait for speech before starting pipeline - segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - audio_enhancer = MicroVadSpeexEnhancer(0, 0, True) - chunk_buffer: deque[bytes] = deque( - maxlen=self.buffered_chunks_before_speech, - ) - speech_detected = await self._wait_for_speech( - segmenter, - audio_enhancer, - chunk_buffer, - ) - if not speech_detected: - _LOGGER.debug("No speech detected") - return - - _LOGGER.debug("Starting pipeline") - self._tts_done.clear() - - async def stt_stream(): - try: - async for chunk in self._segment_audio( - segmenter, - audio_enhancer, - chunk_buffer, - ): - yield chunk - - if self.processing_tone_enabled: - await self._play_processing_tone() - except TimeoutError: - # Expected after caller hangs up - _LOGGER.debug("Audio timeout") - self._session_id = None - self.disconnect() - finally: - self._clear_audio_queue() - - # Run pipeline with a timeout - async with asyncio.timeout(self.pipeline_timeout): - await async_pipeline_from_audio_stream( - self.hass, - context=self._context, - event_callback=self._event_callback, - stt_metadata=stt.SpeechMetadata( - language="", # set in async_pipeline_from_audio_stream - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=stt_stream(), - pipeline_id=pipeline_select.get_chosen_pipeline( - self.hass, DOMAIN, self.voip_device.voip_id - ), - conversation_id=self._conversation_id, - device_id=self.voip_device.device_id, - tts_audio_output="wav", - ) - - if self._pipeline_error: - self._pipeline_error = False - if self.error_tone_enabled: - await self._play_error_tone() - else: - # Block until TTS is done speaking. - # - # This is set in _send_tts and has a timeout that's based on the - # length of the TTS audio. - await self._tts_done.wait() - - _LOGGER.debug("Pipeline finished") - except PipelineNotFound: - _LOGGER.warning("Pipeline not found") - except TimeoutError: - # Expected after caller hangs up - _LOGGER.debug("Pipeline timeout") - self._session_id = None - self.disconnect() - finally: - # Allow pipeline to run again - self._pipeline_task = None - - async def _wait_for_speech( - self, - segmenter: VoiceCommandSegmenter, - audio_enhancer: AudioEnhancer, - chunk_buffer: MutableSequence[bytes], - ): - """Buffer audio chunks until speech is detected. - - Returns True if speech was detected, False otherwise. - """ - # Timeout if no audio comes in for a while. - # This means the caller hung up. - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) - - while chunk: - chunk_buffer.append(chunk) - - segmenter.process_with_vad( - chunk, - assist_pipeline.SAMPLES_PER_CHUNK, - lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, - vad_buffer, - ) - if segmenter.in_command: - # Buffer until command starts - if len(vad_buffer) > 0: - chunk_buffer.append(vad_buffer.bytes()) - - return True - - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - return False - - async def _segment_audio( - self, - segmenter: VoiceCommandSegmenter, - audio_enhancer: AudioEnhancer, - chunk_buffer: Sequence[bytes], - ) -> AsyncIterable[bytes]: - """Yield audio chunks until voice command has finished.""" - # Buffered chunks first - for buffered_chunk in chunk_buffer: - yield buffered_chunk - - # Timeout if no audio comes in for a while. - # This means the caller hung up. - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) - - while chunk: - if not segmenter.process_with_vad( - chunk, - assist_pipeline.SAMPLES_PER_CHUNK, - lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, - vad_buffer, - ): - # Voice command is finished - break - - yield chunk - - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - def _clear_audio_queue(self) -> None: - while not self._audio_queue.empty(): - self._audio_queue.get_nowait() - - def _event_callback(self, event: PipelineEvent): - if not event.data: - return - - if event.type == PipelineEventType.INTENT_END: - # Capture conversation id - self._conversation_id = event.data["intent_output"]["conversation_id"] - elif event.type == PipelineEventType.TTS_END: - # Send TTS audio to caller over RTP - tts_output = event.data["tts_output"] - if tts_output: - media_id = tts_output["media_id"] - self.hass.async_create_background_task( - self._send_tts(media_id), - "voip_pipeline_tts", - ) - else: - # Empty TTS response - self._tts_done.set() - elif event.type == PipelineEventType.ERROR: - # Play error tone instead of wait for TTS - self._pipeline_error = True - - async def _send_tts(self, media_id: str) -> None: - """Send TTS audio to caller via RTP.""" - try: - if self.transport is None: - return - - extension, data = await tts.async_get_media_source_audio( - self.hass, - media_id, - ) - - if extension != "wav": - raise ValueError(f"Only WAV audio can be streamed, got {extension}") - - with io.BytesIO(data) as wav_io: - with wave.open(wav_io, "rb") as wav_file: - sample_rate = wav_file.getframerate() - sample_width = wav_file.getsampwidth() - sample_channels = wav_file.getnchannels() - - if ( - (sample_rate != RATE) - or (sample_width != WIDTH) - or (sample_channels != CHANNELS) - ): - raise ValueError( - f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," - f" got {sample_rate}/{sample_width}/{sample_channels}" - ) - - audio_bytes = wav_file.readframes(wav_file.getnframes()) - - _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) - - # Time out 1 second after TTS audio should be finished - tts_samples = len(audio_bytes) / (WIDTH * CHANNELS) - tts_seconds = tts_samples / RATE - - async with asyncio.timeout(tts_seconds + self.tts_extra_timeout): - # TTS audio is 16Khz 16-bit mono - await self._async_send_audio(audio_bytes) - except TimeoutError: - _LOGGER.warning("TTS timeout") - raise - finally: - # Signal pipeline to restart - self._tts_done.set() - - async def _async_send_audio(self, audio_bytes: bytes, **kwargs): - """Send audio in executor.""" - await self.hass.async_add_executor_job( - partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs) - ) - - async def _play_listening_tone(self) -> None: - """Play a tone to indicate that Home Assistant is listening.""" - if self._tone_bytes is None: - # Do I/O in executor - self._tone_bytes = await self.hass.async_add_executor_job( - self._load_pcm, - "tone.pcm", - ) - - await self._async_send_audio( - self._tone_bytes, - silence_before=self.tone_delay, - ) - - async def _play_processing_tone(self) -> None: - """Play a tone to indicate that Home Assistant is processing the voice command.""" - if self._processing_bytes is None: - # Do I/O in executor - self._processing_bytes = await self.hass.async_add_executor_job( - self._load_pcm, - "processing.pcm", - ) - - await self._async_send_audio(self._processing_bytes) - - async def _play_error_tone(self) -> None: - """Play a tone to indicate a pipeline error occurred.""" - if self._error_bytes is None: - # Do I/O in executor - self._error_bytes = await self.hass.async_add_executor_job( - self._load_pcm, - "error.pcm", - ) - - await self._async_send_audio(self._error_bytes) - - def _load_pcm(self, file_name: str) -> bytes: - """Load raw audio (16Khz, 16-bit mono).""" - return (Path(__file__).parent / file_name).read_bytes() - - class PreRecordMessageProtocol(RtpDatagramProtocol): """Plays a pre-recorded message on a loop.""" diff --git a/homeassistant/components/volkszaehler/manifest.json b/homeassistant/components/volkszaehler/manifest.json index e9070d0fa87..1427f330e77 100644 --- a/homeassistant/components/volkszaehler/manifest.json +++ b/homeassistant/components/volkszaehler/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/volkszaehler", "iot_class": "local_polling", "loggers": ["volkszaehler"], + "quality_scale": "legacy", "requirements": ["volkszaehler==0.4.0"] } diff --git a/homeassistant/components/volumio/config_flow.py b/homeassistant/components/volumio/config_flow.py index 4c7a48f36c7..7cc58556f3e 100644 --- a/homeassistant/components/volumio/config_flow.py +++ b/homeassistant/components/volumio/config_flow.py @@ -11,7 +11,7 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -25,7 +25,7 @@ DATA_SCHEMA = vol.Schema( ) -async def validate_input(hass, host, port): +async def validate_input(hass: HomeAssistant, host: str, port: int) -> dict[str, Any]: """Validate the user input allows us to connect.""" volumio = Volumio(host, port, async_get_clientsession(hass)) @@ -40,15 +40,13 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize flow.""" - self._host: str | None = None - self._port: int | None = None - self._name: str | None = None - self._uuid: str | None = None + _host: str + _port: int + _name: str + _uuid: str | None @callback - def _async_get_entry(self): + def _async_get_entry(self) -> ConfigFlowResult: return self.async_create_entry( title=self._name, data={ @@ -103,7 +101,7 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle zeroconf discovery.""" self._host = discovery_info.host - self._port = discovery_info.port + self._port = discovery_info.port or 3000 self._name = discovery_info.properties["volumioName"] self._uuid = discovery_info.properties["UUID"] @@ -111,7 +109,9 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() - async def async_step_discovery_confirm(self, user_input=None): + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is not None: try: diff --git a/homeassistant/components/volvooncall/__init__.py b/homeassistant/components/volvooncall/__init__.py index 8bade56fa97..9fc07dd92b0 100644 --- a/homeassistant/components/volvooncall/__init__.py +++ b/homeassistant/components/volvooncall/__init__.py @@ -1,11 +1,6 @@ """Support for Volvo On Call.""" -import asyncio -import logging - -from aiohttp.client_exceptions import ClientResponseError from volvooncall import Connection -from volvooncall.dashboard import Instrument from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -15,30 +10,17 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) from .const import ( - CONF_MUTABLE, CONF_SCANDINAVIAN_MILES, - DEFAULT_UPDATE_INTERVAL, DOMAIN, PLATFORMS, - UNIT_SYSTEM_IMPERIAL, UNIT_SYSTEM_METRIC, UNIT_SYSTEM_SCANDINAVIAN_MILES, - VOLVO_DISCOVERY_NEW, ) -from .errors import InvalidAuth - -_LOGGER = logging.getLogger(__name__) +from .coordinator import VolvoUpdateCoordinator +from .models import VolvoData async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -87,185 +69,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class VolvoData: - """Hold component state.""" - - def __init__( - self, - hass: HomeAssistant, - connection: Connection, - entry: ConfigEntry, - ) -> None: - """Initialize the component state.""" - self.hass = hass - self.vehicles: set[str] = set() - self.instruments: set[Instrument] = set() - self.config_entry = entry - self.connection = connection - - def instrument(self, vin, component, attr, slug_attr): - """Return corresponding instrument.""" - return next( - instrument - for instrument in self.instruments - if instrument.vehicle.vin == vin - and instrument.component == component - and instrument.attr == attr - and instrument.slug_attr == slug_attr - ) - - def vehicle_name(self, vehicle): - """Provide a friendly name for a vehicle.""" - if vehicle.registration_number and vehicle.registration_number != "UNKNOWN": - return vehicle.registration_number - if vehicle.vin: - return vehicle.vin - return "Volvo" - - def discover_vehicle(self, vehicle): - """Load relevant platforms.""" - self.vehicles.add(vehicle.vin) - - dashboard = vehicle.dashboard( - mutable=self.config_entry.data[CONF_MUTABLE], - scandinavian_miles=( - self.config_entry.data[CONF_UNIT_SYSTEM] - == UNIT_SYSTEM_SCANDINAVIAN_MILES - ), - usa_units=( - self.config_entry.data[CONF_UNIT_SYSTEM] == UNIT_SYSTEM_IMPERIAL - ), - ) - - for instrument in ( - instrument - for instrument in dashboard.instruments - if instrument.component in PLATFORMS - ): - self.instruments.add(instrument) - async_dispatcher_send(self.hass, VOLVO_DISCOVERY_NEW, [instrument]) - - async def update(self): - """Update status from the online service.""" - try: - await self.connection.update(journal=True) - except ClientResponseError as ex: - if ex.status == 401: - raise ConfigEntryAuthFailed(ex) from ex - raise UpdateFailed(ex) from ex - - for vehicle in self.connection.vehicles: - if vehicle.vin not in self.vehicles: - self.discover_vehicle(vehicle) - - async def auth_is_valid(self): - """Check if provided username/password/region authenticate.""" - try: - await self.connection.get("customeraccounts") - except ClientResponseError as exc: - raise InvalidAuth from exc - - -class VolvoUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disable=hass-enforce-coordinator-module - """Volvo coordinator.""" - - def __init__(self, hass: HomeAssistant, volvo_data: VolvoData) -> None: - """Initialize the data update coordinator.""" - - super().__init__( - hass, - _LOGGER, - name="volvooncall", - update_interval=DEFAULT_UPDATE_INTERVAL, - ) - - self.volvo_data = volvo_data - - async def _async_update_data(self) -> None: - """Fetch data from API endpoint.""" - - async with asyncio.timeout(10): - await self.volvo_data.update() - - -class VolvoEntity(CoordinatorEntity[VolvoUpdateCoordinator]): - """Base class for all VOC entities.""" - - def __init__( - self, - vin: str, - component: str, - attribute: str, - slug_attr: str, - coordinator: VolvoUpdateCoordinator, - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - - self.vin = vin - self.component = component - self.attribute = attribute - self.slug_attr = slug_attr - - @property - def instrument(self): - """Return corresponding instrument.""" - return self.coordinator.volvo_data.instrument( - self.vin, self.component, self.attribute, self.slug_attr - ) - - @property - def icon(self): - """Return the icon.""" - return self.instrument.icon - - @property - def vehicle(self): - """Return vehicle.""" - return self.instrument.vehicle - - @property - def _entity_name(self): - return self.instrument.name - - @property - def _vehicle_name(self): - return self.coordinator.volvo_data.vehicle_name(self.vehicle) - - @property - def name(self): - """Return full name of the entity.""" - return f"{self._vehicle_name} {self._entity_name}" - - @property - def assumed_state(self): - """Return true if unable to access real state of entity.""" - return True - - @property - def device_info(self) -> DeviceInfo: - """Return a inique set of attributes for each vehicle.""" - return DeviceInfo( - identifiers={(DOMAIN, self.vehicle.vin)}, - name=self._vehicle_name, - model=self.vehicle.vehicle_type, - manufacturer="Volvo", - ) - - @property - def extra_state_attributes(self): - """Return device specific state attributes.""" - return dict( - self.instrument.attributes, - model=f"{self.vehicle.vehicle_type}/{self.vehicle.model_year}", - ) - - @property - def unique_id(self) -> str: - """Return a unique ID.""" - slug_override = "" - if self.instrument.slug_override is not None: - slug_override = f"-{self.instrument.slug_override}" - return f"{self.vin}-{self.component}-{self.attribute}{slug_override}" diff --git a/homeassistant/components/volvooncall/binary_sensor.py b/homeassistant/components/volvooncall/binary_sensor.py index 604dc2313bf..e6104f8d87c 100644 --- a/homeassistant/components/volvooncall/binary_sensor.py +++ b/homeassistant/components/volvooncall/binary_sensor.py @@ -16,8 +16,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW +from .coordinator import VolvoUpdateCoordinator +from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/volvooncall/config_flow.py b/homeassistant/components/volvooncall/config_flow.py index 80358a28ced..ccb0a7f62e1 100644 --- a/homeassistant/components/volvooncall/config_flow.py +++ b/homeassistant/components/volvooncall/config_flow.py @@ -9,7 +9,7 @@ from typing import Any import voluptuous as vol from volvooncall import Connection -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_PASSWORD, CONF_REGION, @@ -18,7 +18,6 @@ from homeassistant.const import ( ) from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import VolvoData from .const import ( CONF_MUTABLE, DOMAIN, @@ -27,6 +26,7 @@ from .const import ( UNIT_SYSTEM_SCANDINAVIAN_MILES, ) from .errors import InvalidAuth +from .models import VolvoData _LOGGER = logging.getLogger(__name__) @@ -35,7 +35,6 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): """VolvoOnCall config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,7 +52,7 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: await self.async_set_unique_id(user_input[CONF_USERNAME]) - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() try: @@ -64,21 +63,18 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unhandled exception in user step") errors["base"] = "unknown" if not errors: - if self._reauth_entry: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - await self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=user_input[CONF_USERNAME], data=user_input ) - elif self._reauth_entry: + elif self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() for key in defaults: - defaults[key] = self._reauth_entry.data.get(key) + defaults[key] = reauth_entry.data.get(key) user_schema = vol.Schema( { @@ -107,12 +103,9 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() async def is_valid(self, user_input): diff --git a/homeassistant/components/volvooncall/coordinator.py b/homeassistant/components/volvooncall/coordinator.py new file mode 100644 index 00000000000..5ac6a58acb0 --- /dev/null +++ b/homeassistant/components/volvooncall/coordinator.py @@ -0,0 +1,34 @@ +"""Support for Volvo On Call.""" + +import asyncio +import logging + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DEFAULT_UPDATE_INTERVAL +from .models import VolvoData + +_LOGGER = logging.getLogger(__name__) + + +class VolvoUpdateCoordinator(DataUpdateCoordinator[None]): + """Volvo coordinator.""" + + def __init__(self, hass: HomeAssistant, volvo_data: VolvoData) -> None: + """Initialize the data update coordinator.""" + + super().__init__( + hass, + _LOGGER, + name="volvooncall", + update_interval=DEFAULT_UPDATE_INTERVAL, + ) + + self.volvo_data = volvo_data + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + + async with asyncio.timeout(10): + await self.volvo_data.update() diff --git a/homeassistant/components/volvooncall/device_tracker.py b/homeassistant/components/volvooncall/device_tracker.py index 51c2f08130b..96fe5a644bb 100644 --- a/homeassistant/components/volvooncall/device_tracker.py +++ b/homeassistant/components/volvooncall/device_tracker.py @@ -4,14 +4,15 @@ from __future__ import annotations from volvooncall.dashboard import Instrument -from homeassistant.components.device_tracker import SourceType, TrackerEntity +from homeassistant.components.device_tracker import TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW +from .coordinator import VolvoUpdateCoordinator +from .entity import VolvoEntity async def async_setup_entry( @@ -60,11 +61,6 @@ class VolvoTrackerEntity(VolvoEntity, TrackerEntity): _, longitude = self._get_pos() return longitude - @property - def source_type(self) -> SourceType | str: - """Return the source type (GPS).""" - return SourceType.GPS - def _get_pos(self) -> tuple[float, float]: volvo_data = self.coordinator.volvo_data instrument = volvo_data.instrument( diff --git a/homeassistant/components/volvooncall/entity.py b/homeassistant/components/volvooncall/entity.py new file mode 100644 index 00000000000..6ebc4bdc754 --- /dev/null +++ b/homeassistant/components/volvooncall/entity.py @@ -0,0 +1,88 @@ +"""Support for Volvo On Call.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import VolvoUpdateCoordinator + + +class VolvoEntity(CoordinatorEntity[VolvoUpdateCoordinator]): + """Base class for all VOC entities.""" + + def __init__( + self, + vin: str, + component: str, + attribute: str, + slug_attr: str, + coordinator: VolvoUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.vin = vin + self.component = component + self.attribute = attribute + self.slug_attr = slug_attr + + @property + def instrument(self): + """Return corresponding instrument.""" + return self.coordinator.volvo_data.instrument( + self.vin, self.component, self.attribute, self.slug_attr + ) + + @property + def icon(self): + """Return the icon.""" + return self.instrument.icon + + @property + def vehicle(self): + """Return vehicle.""" + return self.instrument.vehicle + + @property + def _entity_name(self): + return self.instrument.name + + @property + def _vehicle_name(self): + return self.coordinator.volvo_data.vehicle_name(self.vehicle) + + @property + def name(self): + """Return full name of the entity.""" + return f"{self._vehicle_name} {self._entity_name}" + + @property + def assumed_state(self): + """Return true if unable to access real state of entity.""" + return True + + @property + def device_info(self) -> DeviceInfo: + """Return a inique set of attributes for each vehicle.""" + return DeviceInfo( + identifiers={(DOMAIN, self.vehicle.vin)}, + name=self._vehicle_name, + model=self.vehicle.vehicle_type, + manufacturer="Volvo", + ) + + @property + def extra_state_attributes(self): + """Return device specific state attributes.""" + return dict( + self.instrument.attributes, + model=f"{self.vehicle.vehicle_type}/{self.vehicle.model_year}", + ) + + @property + def unique_id(self) -> str: + """Return a unique ID.""" + slug_override = "" + if self.instrument.slug_override is not None: + slug_override = f"-{self.instrument.slug_override}" + return f"{self.vin}-{self.component}-{self.attribute}{slug_override}" diff --git a/homeassistant/components/volvooncall/lock.py b/homeassistant/components/volvooncall/lock.py index cccd64bce05..cff5df35750 100644 --- a/homeassistant/components/volvooncall/lock.py +++ b/homeassistant/components/volvooncall/lock.py @@ -12,8 +12,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW +from .coordinator import VolvoUpdateCoordinator +from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/volvooncall/models.py b/homeassistant/components/volvooncall/models.py new file mode 100644 index 00000000000..159379a908b --- /dev/null +++ b/homeassistant/components/volvooncall/models.py @@ -0,0 +1,100 @@ +"""Support for Volvo On Call.""" + +from aiohttp.client_exceptions import ClientResponseError +from volvooncall import Connection +from volvooncall.dashboard import Instrument + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_UNIT_SYSTEM +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.update_coordinator import UpdateFailed + +from .const import ( + CONF_MUTABLE, + PLATFORMS, + UNIT_SYSTEM_IMPERIAL, + UNIT_SYSTEM_SCANDINAVIAN_MILES, + VOLVO_DISCOVERY_NEW, +) +from .errors import InvalidAuth + + +class VolvoData: + """Hold component state.""" + + def __init__( + self, + hass: HomeAssistant, + connection: Connection, + entry: ConfigEntry, + ) -> None: + """Initialize the component state.""" + self.hass = hass + self.vehicles: set[str] = set() + self.instruments: set[Instrument] = set() + self.config_entry = entry + self.connection = connection + + def instrument(self, vin, component, attr, slug_attr): + """Return corresponding instrument.""" + return next( + instrument + for instrument in self.instruments + if instrument.vehicle.vin == vin + and instrument.component == component + and instrument.attr == attr + and instrument.slug_attr == slug_attr + ) + + def vehicle_name(self, vehicle): + """Provide a friendly name for a vehicle.""" + if vehicle.registration_number and vehicle.registration_number != "UNKNOWN": + return vehicle.registration_number + if vehicle.vin: + return vehicle.vin + return "Volvo" + + def discover_vehicle(self, vehicle): + """Load relevant platforms.""" + self.vehicles.add(vehicle.vin) + + dashboard = vehicle.dashboard( + mutable=self.config_entry.data[CONF_MUTABLE], + scandinavian_miles=( + self.config_entry.data[CONF_UNIT_SYSTEM] + == UNIT_SYSTEM_SCANDINAVIAN_MILES + ), + usa_units=( + self.config_entry.data[CONF_UNIT_SYSTEM] == UNIT_SYSTEM_IMPERIAL + ), + ) + + for instrument in ( + instrument + for instrument in dashboard.instruments + if instrument.component in PLATFORMS + ): + self.instruments.add(instrument) + async_dispatcher_send(self.hass, VOLVO_DISCOVERY_NEW, [instrument]) + + async def update(self): + """Update status from the online service.""" + try: + await self.connection.update(journal=True) + except ClientResponseError as ex: + if ex.status == 401: + raise ConfigEntryAuthFailed(ex) from ex + raise UpdateFailed(ex) from ex + + for vehicle in self.connection.vehicles: + if vehicle.vin not in self.vehicles: + self.discover_vehicle(vehicle) + + async def auth_is_valid(self): + """Check if provided username/password/region authenticate.""" + try: + await self.connection.get("customeraccounts") + except ClientResponseError as exc: + raise InvalidAuth from exc diff --git a/homeassistant/components/volvooncall/sensor.py b/homeassistant/components/volvooncall/sensor.py index a46c8671929..9916d37197b 100644 --- a/homeassistant/components/volvooncall/sensor.py +++ b/homeassistant/components/volvooncall/sensor.py @@ -10,8 +10,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW +from .coordinator import VolvoUpdateCoordinator +from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/volvooncall/switch.py b/homeassistant/components/volvooncall/switch.py index 23bc452ef66..7e60f47fb44 100644 --- a/homeassistant/components/volvooncall/switch.py +++ b/homeassistant/components/volvooncall/switch.py @@ -12,8 +12,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW +from .coordinator import VolvoUpdateCoordinator +from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/vulcan/calendar.py b/homeassistant/components/vulcan/calendar.py index e068a772345..a89b6b4a116 100644 --- a/homeassistant/components/vulcan/calendar.py +++ b/homeassistant/components/vulcan/calendar.py @@ -133,7 +133,7 @@ class VulcanCalendarEntity(CalendarEntity): events = await get_lessons(self.client) if not self.available: - _LOGGER.info("Restored connection with API") + _LOGGER.warning("Restored connection with API") self._attr_available = True if events == []: diff --git a/homeassistant/components/vulcan/config_flow.py b/homeassistant/components/vulcan/config_flow.py index 5938e4ce690..f02adba9f75 100644 --- a/homeassistant/components/vulcan/config_flow.py +++ b/homeassistant/components/vulcan/config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp import ClientConnectionError import voluptuous as vol @@ -16,6 +16,7 @@ from vulcan import ( UnauthorizedCertificateException, Vulcan, ) +from vulcan.model import Student from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PIN, CONF_REGION, CONF_TOKEN @@ -38,11 +39,12 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + account: Account + keystore: Keystore + def __init__(self) -> None: """Initialize config flow.""" - self.account = None - self.keystore = None - self.students = None + self.students: list[Student] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,13 +55,16 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_auth() - async def async_step_auth(self, user_input=None, errors=None): + async def async_step_auth( + self, + user_input: dict[str, str] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Authorize integration.""" if user_input is not None: try: credentials = await register( - self.hass, user_input[CONF_TOKEN], user_input[CONF_REGION], user_input[CONF_PIN], @@ -107,16 +112,20 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_select_student(self, user_input=None): + async def async_step_select_student( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Allow user to select student.""" - errors = {} - students = {} + errors: dict[str, str] = {} + students: dict[str, str] = {} if self.students is not None: for student in self.students: students[str(student.pupil.id)] = ( f"{student.pupil.first_name} {student.pupil.last_name}" ) if user_input is not None: + if TYPE_CHECKING: + assert self.keystore is not None student_id = user_input["student"] await self.async_set_unique_id(str(student_id)) self._abort_if_unique_id_configured() @@ -135,17 +144,25 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_select_saved_credentials(self, user_input=None, errors=None): + async def async_step_select_saved_credentials( + self, + user_input: dict[str, str] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Allow user to select saved credentials.""" - credentials = {} + credentials: dict[str, Any] = {} for entry in self.hass.config_entries.async_entries(DOMAIN): credentials[entry.entry_id] = entry.data["account"]["UserName"] if user_input is not None: - entry = self.hass.config_entries.async_get_entry(user_input["credentials"]) - keystore = Keystore.load(entry.data["keystore"]) - account = Account.load(entry.data["account"]) + existing_entry = self.hass.config_entries.async_get_entry( + user_input["credentials"] + ) + if TYPE_CHECKING: + assert existing_entry is not None + keystore = Keystore.load(existing_entry.data["keystore"]) + account = Account.load(existing_entry.data["account"]) client = Vulcan(keystore, account, async_get_clientsession(self.hass)) try: students = await client.get_students() @@ -189,12 +206,14 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_add_next_config_entry(self, user_input=None): + async def async_step_add_next_config_entry( + self, user_input: dict[str, bool] | None = None + ) -> ConfigFlowResult: """Flow initialized when user is adding next entry of that integration.""" existing_entries = self.hass.config_entries.async_entries(DOMAIN) - errors = {} + errors: dict[str, str] = {} if user_input is not None: if not user_input["use_saved_credentials"]: @@ -248,13 +267,14 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): """Perform reauth upon an API authentication error.""" return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Reauthorize integration.""" errors = {} if user_input is not None: try: credentials = await register( - self.hass, user_input[CONF_TOKEN], user_input[CONF_REGION], user_input[CONF_PIN], diff --git a/homeassistant/components/vulcan/manifest.json b/homeassistant/components/vulcan/manifest.json index 47ab7ec53cb..554a82e9c2c 100644 --- a/homeassistant/components/vulcan/manifest.json +++ b/homeassistant/components/vulcan/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/vulcan", "iot_class": "cloud_polling", - "quality_scale": "silver", "requirements": ["vulcan-api==2.3.2"] } diff --git a/homeassistant/components/vulcan/register.py b/homeassistant/components/vulcan/register.py index 67cceb8d7b8..a3dec97f622 100644 --- a/homeassistant/components/vulcan/register.py +++ b/homeassistant/components/vulcan/register.py @@ -1,9 +1,11 @@ """Support for register Vulcan account.""" +from typing import Any + from vulcan import Account, Keystore -async def register(hass, token, symbol, pin): +async def register(token: str, symbol: str, pin: str) -> dict[str, Any]: """Register integration and save credentials.""" keystore = await Keystore.create(device_model="Home Assistant") account = await Account.register(keystore, token, symbol, pin) diff --git a/homeassistant/components/vultr/manifest.json b/homeassistant/components/vultr/manifest.json index dc3cd3571eb..713485e7931 100644 --- a/homeassistant/components/vultr/manifest.json +++ b/homeassistant/components/vultr/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/vultr", "iot_class": "cloud_polling", "loggers": ["vultr"], + "quality_scale": "legacy", "requirements": ["vultr==0.1.2"] } diff --git a/homeassistant/components/w800rf32/manifest.json b/homeassistant/components/w800rf32/manifest.json index 769eb96b3c0..4d5074e72c2 100644 --- a/homeassistant/components/w800rf32/manifest.json +++ b/homeassistant/components/w800rf32/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/w800rf32", "iot_class": "local_push", "loggers": ["W800rf32"], + "quality_scale": "legacy", "requirements": ["pyW800rf32==0.4"] } diff --git a/homeassistant/components/wake_on_lan/button.py b/homeassistant/components/wake_on_lan/button.py index 87135a61380..4d6b19bdd8e 100644 --- a/homeassistant/components/wake_on_lan/button.py +++ b/homeassistant/components/wake_on_lan/button.py @@ -60,7 +60,6 @@ class WolButton(ButtonEntity): self._attr_unique_id = dr.format_mac(mac_address) self._attr_device_info = dr.DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, self._attr_unique_id)}, - default_manufacturer="Wake on LAN", default_name=name, ) diff --git a/homeassistant/components/wake_on_lan/icons.json b/homeassistant/components/wake_on_lan/icons.json index 6426c478157..f083b0342f4 100644 --- a/homeassistant/components/wake_on_lan/icons.json +++ b/homeassistant/components/wake_on_lan/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_magic_packet": "mdi:cube-send" + "send_magic_packet": { + "service": "mdi:cube-send" + } } } diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index f4949ec6901..fcf8936d498 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -113,7 +113,7 @@ class WolSwitch(SwitchEntity): if self._broadcast_port is not None: service_kwargs["port"] = self._broadcast_port - _LOGGER.info( + _LOGGER.debug( "Send magic packet to mac %s (broadcast: %s, port: %s)", self._mac_address, self._broadcast_address, diff --git a/homeassistant/components/wake_word/__init__.py b/homeassistant/components/wake_word/__init__.py index 5ce592aacd8..8b3a5bbf331 100644 --- a/homeassistant/components/wake_word/__init__.py +++ b/homeassistant/components/wake_word/__init__.py @@ -19,6 +19,7 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN from .models import DetectionResult, WakeWord @@ -35,6 +36,7 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +DATA_COMPONENT: HassKey[EntityComponent[WakeWordDetectionEntity]] = HassKey(DOMAIN) TIMEOUT_FETCH_WAKE_WORDS = 10 @@ -50,16 +52,16 @@ def async_get_wake_word_detection_entity( hass: HomeAssistant, entity_id: str ) -> WakeWordDetectionEntity | None: """Return wake word entity.""" - component: EntityComponent[WakeWordDetectionEntity] = hass.data[DOMAIN] - - return component.get_entity(entity_id) + return hass.data[DATA_COMPONENT].get_entity(entity_id) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up wake word.""" websocket_api.async_register_command(hass, websocket_entity_info) - component = hass.data[DOMAIN] = EntityComponent(_LOGGER, DOMAIN, hass) + component = hass.data[DATA_COMPONENT] = EntityComponent[WakeWordDetectionEntity]( + _LOGGER, DOMAIN, hass + ) component.register_shutdown() return True @@ -67,14 +69,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class WakeWordDetectionEntity(RestoreEntity): @@ -137,13 +137,11 @@ class WakeWordDetectionEntity(RestoreEntity): } ) @websocket_api.async_response -@callback async def websocket_entity_info( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get info about wake word entity.""" - component: EntityComponent[WakeWordDetectionEntity] = hass.data[DOMAIN] - entity = component.get_entity(msg["entity_id"]) + entity = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) if entity is None: connection.send_error( diff --git a/homeassistant/components/wallbox/__init__.py b/homeassistant/components/wallbox/__init__.py index 4ea2cf98be1..b2f8ac7fd5d 100644 --- a/homeassistant/components/wallbox/__init__.py +++ b/homeassistant/components/wallbox/__init__.py @@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from .const import CONF_STATION, DOMAIN, UPDATE_INTERVAL -from .coordinator import InvalidAuth, WallboxCoordinator +from .coordinator import InvalidAuth, WallboxCoordinator, async_validate_input PLATFORMS = [Platform.LOCK, Platform.NUMBER, Platform.SENSOR, Platform.SWITCH] @@ -22,18 +22,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.data[CONF_PASSWORD], jwtTokenDrift=UPDATE_INTERVAL, ) + try: + await async_validate_input(hass, wallbox) + except InvalidAuth as ex: + raise ConfigEntryAuthFailed from ex + wallbox_coordinator = WallboxCoordinator( entry.data[CONF_STATION], wallbox, hass, ) - - try: - await wallbox_coordinator.async_validate_input() - - except InvalidAuth as ex: - raise ConfigEntryAuthFailed from ex - await wallbox_coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = wallbox_coordinator diff --git a/homeassistant/components/wallbox/config_flow.py b/homeassistant/components/wallbox/config_flow.py index 44c47149554..bdc51eef963 100644 --- a/homeassistant/components/wallbox/config_flow.py +++ b/homeassistant/components/wallbox/config_flow.py @@ -8,12 +8,12 @@ from typing import Any import voluptuous as vol from wallbox import Wallbox -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from .const import CONF_STATION, DOMAIN -from .coordinator import InvalidAuth, WallboxCoordinator +from .coordinator import InvalidAuth, async_validate_input COMPONENT_DOMAIN = DOMAIN @@ -32,9 +32,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ wallbox = Wallbox(data["username"], data["password"]) - wallbox_coordinator = WallboxCoordinator(data["station"], wallbox, hass) - await wallbox_coordinator.async_validate_input() + await async_validate_input(hass, wallbox) # Return info that you want to store in the config entry. return {"title": "Wallbox Portal"} @@ -43,18 +42,10 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, class WallboxConfigFlow(ConfigFlow, domain=COMPONENT_DOMAIN): """Handle a config flow for Wallbox.""" - def __init__(self) -> None: - """Start the Wallbox config flow.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_user() async def async_step_user( @@ -71,18 +62,13 @@ class WallboxConfigFlow(ConfigFlow, domain=COMPONENT_DOMAIN): try: await self.async_set_unique_id(user_input["station"]) - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() info = await validate_input(self.hass, user_input) return self.async_create_entry(title=info["title"], data=user_input) - if user_input["station"] == self._reauth_entry.data[CONF_STATION]: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input, unique_id=user_input["station"] - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") + reauth_entry = self._get_reauth_entry() + if user_input["station"] == reauth_entry.data[CONF_STATION]: + return self.async_update_reload_and_abort(reauth_entry, data=user_input) errors["base"] = "reauth_invalid" except ConnectionError: errors["base"] = "cannot_connect" diff --git a/homeassistant/components/wallbox/const.py b/homeassistant/components/wallbox/const.py index 69633cbda22..c38b8967776 100644 --- a/homeassistant/components/wallbox/const.py +++ b/homeassistant/components/wallbox/const.py @@ -22,11 +22,15 @@ CHARGER_CURRENCY_KEY = "currency" CHARGER_DATA_KEY = "config_data" CHARGER_DEPOT_PRICE_KEY = "depot_price" CHARGER_ENERGY_PRICE_KEY = "energy_price" +CHARGER_FEATURES_KEY = "features" CHARGER_SERIAL_NUMBER_KEY = "serial_number" CHARGER_PART_NUMBER_KEY = "part_number" +CHARGER_PLAN_KEY = "plan" +CHARGER_POWER_BOOST_KEY = "POWER_BOOST" CHARGER_SOFTWARE_KEY = "software" CHARGER_MAX_AVAILABLE_POWER_KEY = "max_available_power" CHARGER_MAX_CHARGING_CURRENT_KEY = "max_charging_current" +CHARGER_MAX_ICP_CURRENT_KEY = "icp_max_current" CHARGER_PAUSE_RESUME_KEY = "paused" CHARGER_LOCKED_UNLOCKED_KEY = "locked" CHARGER_NAME_KEY = "name" diff --git a/homeassistant/components/wallbox/coordinator.py b/homeassistant/components/wallbox/coordinator.py index e24ccd28440..99c565d9c0c 100644 --- a/homeassistant/components/wallbox/coordinator.py +++ b/homeassistant/components/wallbox/coordinator.py @@ -19,8 +19,12 @@ from .const import ( CHARGER_CURRENCY_KEY, CHARGER_DATA_KEY, CHARGER_ENERGY_PRICE_KEY, + CHARGER_FEATURES_KEY, CHARGER_LOCKED_UNLOCKED_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, + CHARGER_PLAN_KEY, + CHARGER_POWER_BOOST_KEY, CHARGER_STATUS_DESCRIPTION_KEY, CHARGER_STATUS_ID_KEY, CODE_KEY, @@ -85,6 +89,21 @@ def _require_authentication[_WallboxCoordinatorT: WallboxCoordinator, **_P]( return require_authentication +def _validate(wallbox: Wallbox) -> None: + """Authenticate using Wallbox API.""" + try: + wallbox.authenticate() + except requests.exceptions.HTTPError as wallbox_connection_error: + if wallbox_connection_error.response.status_code == 403: + raise InvalidAuth from wallbox_connection_error + raise ConnectionError from wallbox_connection_error + + +async def async_validate_input(hass: HomeAssistant, wallbox: Wallbox) -> None: + """Get new sensor data for Wallbox component.""" + await hass.async_add_executor_job(_validate, wallbox) + + class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Wallbox Coordinator class.""" @@ -104,19 +123,6 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Authenticate using Wallbox API.""" self._wallbox.authenticate() - def _validate(self) -> None: - """Authenticate using Wallbox API.""" - try: - self._wallbox.authenticate() - except requests.exceptions.HTTPError as wallbox_connection_error: - if wallbox_connection_error.response.status_code == 403: - raise InvalidAuth from wallbox_connection_error - raise ConnectionError from wallbox_connection_error - - async def async_validate_input(self) -> None: - """Get new sensor data for Wallbox component.""" - await self.hass.async_add_executor_job(self._validate) - @_require_authentication def _get_data(self) -> dict[str, Any]: """Get new sensor data for Wallbox component.""" @@ -130,6 +136,16 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): data[CHARGER_ENERGY_PRICE_KEY] = data[CHARGER_DATA_KEY][ CHARGER_ENERGY_PRICE_KEY ] + # Only show max_icp_current if power_boost is available in the wallbox unit: + if ( + data[CHARGER_DATA_KEY].get(CHARGER_MAX_ICP_CURRENT_KEY, 0) > 0 + and CHARGER_POWER_BOOST_KEY + in data[CHARGER_DATA_KEY][CHARGER_PLAN_KEY][CHARGER_FEATURES_KEY] + ): + data[CHARGER_MAX_ICP_CURRENT_KEY] = data[CHARGER_DATA_KEY][ + CHARGER_MAX_ICP_CURRENT_KEY + ] + data[CHARGER_CURRENCY_KEY] = ( f"{data[CHARGER_DATA_KEY][CHARGER_CURRENCY_KEY][CODE_KEY]}/kWh" ) @@ -160,6 +176,21 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) await self.async_request_refresh() + @_require_authentication + def _set_icp_current(self, icp_current: float) -> None: + """Set maximum icp current for Wallbox.""" + try: + self._wallbox.setIcpMaxCurrent(self._station, icp_current) + except requests.exceptions.HTTPError as wallbox_connection_error: + if wallbox_connection_error.response.status_code == 403: + raise InvalidAuth from wallbox_connection_error + raise + + async def async_set_icp_current(self, icp_current: float) -> None: + """Set maximum icp current for Wallbox.""" + await self.hass.async_add_executor_job(self._set_icp_current, icp_current) + await self.async_request_refresh() + @_require_authentication def _set_energy_cost(self, energy_cost: float) -> None: """Set energy cost for Wallbox.""" diff --git a/homeassistant/components/wallbox/entity.py b/homeassistant/components/wallbox/entity.py index 489e81ed6b0..3fe1865af4a 100644 --- a/homeassistant/components/wallbox/entity.py +++ b/homeassistant/components/wallbox/entity.py @@ -34,7 +34,8 @@ class WallboxEntity(CoordinatorEntity[WallboxCoordinator]): }, name=f"Wallbox {self.coordinator.data[CHARGER_NAME_KEY]}", manufacturer="Wallbox", - model=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_PART_NUMBER_KEY], + model=self.coordinator.data[CHARGER_NAME_KEY].split(" SN")[0], + model_id=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_PART_NUMBER_KEY], sw_version=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_SOFTWARE_KEY][ CHARGER_CURRENT_VERSION_KEY ], diff --git a/homeassistant/components/wallbox/number.py b/homeassistant/components/wallbox/number.py index 8ae4c473299..24cdd16f99d 100644 --- a/homeassistant/components/wallbox/number.py +++ b/homeassistant/components/wallbox/number.py @@ -21,6 +21,7 @@ from .const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_PART_NUMBER_KEY, CHARGER_SERIAL_NUMBER_KEY, DOMAIN, @@ -67,6 +68,16 @@ NUMBER_TYPES: dict[str, WallboxNumberEntityDescription] = { set_value_fn=lambda coordinator: coordinator.async_set_energy_cost, native_step=0.01, ), + CHARGER_MAX_ICP_CURRENT_KEY: WallboxNumberEntityDescription( + key=CHARGER_MAX_ICP_CURRENT_KEY, + translation_key="maximum_icp_current", + max_value_fn=lambda coordinator: cast( + float, coordinator.data[CHARGER_MAX_AVAILABLE_POWER_KEY] + ), + min_value_fn=lambda _: 6, + set_value_fn=lambda coordinator: coordinator.async_set_icp_current, + native_step=1, + ), } diff --git a/homeassistant/components/wallbox/sensor.py b/homeassistant/components/wallbox/sensor.py index eadbc04dca2..18d8afb5612 100644 --- a/homeassistant/components/wallbox/sensor.py +++ b/homeassistant/components/wallbox/sensor.py @@ -38,6 +38,7 @@ from .const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_SERIAL_NUMBER_KEY, CHARGER_STATE_OF_CHARGE_KEY, CHARGER_STATUS_DESCRIPTION_KEY, @@ -145,6 +146,13 @@ SENSOR_TYPES: dict[str, WallboxSensorEntityDescription] = { device_class=SensorDeviceClass.CURRENT, state_class=SensorStateClass.MEASUREMENT, ), + CHARGER_MAX_ICP_CURRENT_KEY: WallboxSensorEntityDescription( + key=CHARGER_MAX_ICP_CURRENT_KEY, + translation_key=CHARGER_MAX_ICP_CURRENT_KEY, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + ), } diff --git a/homeassistant/components/wallbox/strings.json b/homeassistant/components/wallbox/strings.json index dd96cebf605..f4378b328d8 100644 --- a/homeassistant/components/wallbox/strings.json +++ b/homeassistant/components/wallbox/strings.json @@ -38,6 +38,9 @@ }, "energy_price": { "name": "Energy price" + }, + "maximum_icp_current": { + "name": "Maximum ICP current" } }, "sensor": { @@ -79,6 +82,9 @@ }, "max_charging_current": { "name": "Max charging current" + }, + "icp_max_current": { + "name": "Max ICP current" } }, "switch": { diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index e6e424329fb..cac0a365f74 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -5,10 +5,10 @@ from __future__ import annotations from datetime import timedelta from enum import IntFlag import functools as ft -from functools import cached_property import logging from typing import Any, final +from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -25,20 +25,16 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp from homeassistant.helpers.typing import ConfigType, VolDictType +from homeassistant.util.hass_dict import HassKey from homeassistant.util.unit_conversion import TemperatureConverter from .const import DOMAIN +DATA_COMPONENT: HassKey[EntityComponent[WaterHeaterEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -60,7 +56,7 @@ STATE_GAS = "gas" class WaterHeaterEntityFeature(IntFlag): - """Supported features of the fan entity.""" + """Supported features of the water heater entity.""" TARGET_TEMPERATURE = 1 OPERATION_MODE = 2 @@ -68,18 +64,6 @@ class WaterHeaterEntityFeature(IntFlag): ON_OFF = 8 -# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. -# Please use the WaterHeaterEntityFeature enum instead. -_DEPRECATED_SUPPORT_TARGET_TEMPERATURE = DeprecatedConstantEnum( - WaterHeaterEntityFeature.TARGET_TEMPERATURE, "2025.1" -) -_DEPRECATED_SUPPORT_OPERATION_MODE = DeprecatedConstantEnum( - WaterHeaterEntityFeature.OPERATION_MODE, "2025.1" -) -_DEPRECATED_SUPPORT_AWAY_MODE = DeprecatedConstantEnum( - WaterHeaterEntityFeature.AWAY_MODE, "2025.1" -) - ATTR_MAX_TEMP = "max_temp" ATTR_MIN_TEMP = "min_temp" ATTR_AWAY_MODE = "away_mode" @@ -109,7 +93,7 @@ SET_OPERATION_MODE_SCHEMA: VolDictType = { async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up water_heater devices.""" - component = hass.data[DOMAIN] = EntityComponent[WaterHeaterEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[WaterHeaterEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) @@ -137,17 +121,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[WaterHeaterEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[WaterHeaterEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) -class WaterHeaterEntityEntityDescription(EntityDescription, frozen_or_thawed=True): +class WaterHeaterEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes water heater entities.""" @@ -170,7 +152,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): {ATTR_OPERATION_LIST, ATTR_MIN_TEMP, ATTR_MAX_TEMP} ) - entity_description: WaterHeaterEntityEntityDescription + entity_description: WaterHeaterEntityDescription _attr_current_operation: str | None = None _attr_current_temperature: float | None = None _attr_is_away_mode_on: bool | None = None @@ -212,7 +194,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ), } - if WaterHeaterEntityFeature.OPERATION_MODE in self.supported_features_compat: + if WaterHeaterEntityFeature.OPERATION_MODE in self.supported_features: data[ATTR_OPERATION_LIST] = self.operation_list return data @@ -248,7 +230,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ), } - supported_features = self.supported_features_compat + supported_features = self.supported_features if WaterHeaterEntityFeature.OPERATION_MODE in supported_features: data[ATTR_OPERATION_MODE] = self.current_operation @@ -397,19 +379,6 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return the list of supported features.""" return self._attr_supported_features - @property - def supported_features_compat(self) -> WaterHeaterEntityFeature: - """Return the supported features as WaterHeaterEntityFeature. - - Remove this compatibility shim in 2025.1 or later. - """ - features = self.supported_features - if type(features) is int: # noqa: E721 - new_features = WaterHeaterEntityFeature(features) - self._report_deprecated_supported_features_values(new_features) - return new_features - return features - async def async_service_away_mode( entity: WaterHeaterEntity, service: ServiceCall @@ -437,11 +406,3 @@ async def async_service_temperature_set( kwargs[value] = temp await entity.async_set_temperature(**kwargs) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = ft.partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/water_heater/icons.json b/homeassistant/components/water_heater/icons.json index af6996374c5..bc80128c6a3 100644 --- a/homeassistant/components/water_heater/icons.json +++ b/homeassistant/components/water_heater/icons.json @@ -22,10 +22,20 @@ } }, "services": { - "set_away_mode": "mdi:account-arrow-right", - "set_operation_mode": "mdi:water-boiler", - "set_temperature": "mdi:thermometer", - "turn_off": "mdi:water-boiler-off", - "turn_on": "mdi:water-boiler" + "set_away_mode": { + "service": "mdi:account-arrow-right" + }, + "set_operation_mode": { + "service": "mdi:water-boiler" + }, + "set_temperature": { + "service": "mdi:thermometer" + }, + "turn_off": { + "service": "mdi:water-boiler-off" + }, + "turn_on": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/water_heater/strings.json b/homeassistant/components/water_heater/strings.json index 741b277d84d..07e132a0b5b 100644 --- a/homeassistant/components/water_heater/strings.json +++ b/homeassistant/components/water_heater/strings.json @@ -1,4 +1,5 @@ { + "title": "Water heater", "device_automation": { "action_type": { "turn_on": "[%key:common::device_automation::action_type::turn_on%]", @@ -7,7 +8,7 @@ }, "entity_component": { "_": { - "name": "Water heater", + "name": "[%key:component::water_heater::title%]", "state": { "off": "[%key:common::state::off%]", "eco": "Eco", diff --git a/homeassistant/components/waterfurnace/manifest.json b/homeassistant/components/waterfurnace/manifest.json index 9e01f7e6a05..2bf72acb047 100644 --- a/homeassistant/components/waterfurnace/manifest.json +++ b/homeassistant/components/waterfurnace/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/waterfurnace", "iot_class": "cloud_polling", "loggers": ["waterfurnace"], + "quality_scale": "legacy", "requirements": ["waterfurnace==1.1.0"] } diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py new file mode 100644 index 00000000000..1cf38876556 --- /dev/null +++ b/homeassistant/components/watergate/__init__.py @@ -0,0 +1,107 @@ +"""The Watergate integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from http import HTTPStatus +import logging + +from watergate_local_api import WatergateLocalApiClient +from watergate_local_api.models import WebhookEvent + +from homeassistant.components.http import HomeAssistantView +from homeassistant.components.webhook import ( + Request, + Response, + async_generate_url, + async_register, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID, Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import WatergateDataCoordinator + +_LOGGER = logging.getLogger(__name__) + + +PLATFORMS: list[Platform] = [ + Platform.VALVE, +] + +type WatergateConfigEntry = ConfigEntry[WatergateDataCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: WatergateConfigEntry) -> bool: + """Set up Watergate from a config entry.""" + sonic_address = entry.data[CONF_IP_ADDRESS] + webhook_id = entry.data[CONF_WEBHOOK_ID] + + _LOGGER.debug( + "Setting up watergate local api integration for device: IP: %s)", + sonic_address, + ) + + watergate_client = WatergateLocalApiClient( + sonic_address if sonic_address.startswith("http") else f"http://{sonic_address}" + ) + + coordinator = WatergateDataCoordinator(hass, watergate_client) + entry.runtime_data = coordinator + + async_register( + hass, DOMAIN, "Watergate", webhook_id, get_webhook_handler(coordinator) + ) + + _LOGGER.debug("Registered webhook: %s", webhook_id) + + await coordinator.async_config_entry_first_refresh() + + await watergate_client.async_set_webhook_url( + async_generate_url(hass, webhook_id, allow_ip=True, prefer_external=False) + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: WatergateConfigEntry) -> bool: + """Unload a config entry.""" + webhook_id = entry.data[CONF_WEBHOOK_ID] + hass.components.webhook.async_unregister(webhook_id) + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +def get_webhook_handler( + coordinator: WatergateDataCoordinator, +) -> Callable[[HomeAssistant, str, Request], Awaitable[Response | None]]: + """Return webhook handler.""" + + async def async_webhook_handler( + hass: HomeAssistant, webhook_id: str, request: Request + ) -> Response | None: + # Handle http post calls to the path. + if not request.body_exists: + return HomeAssistantView.json( + result="No Body", status_code=HTTPStatus.BAD_REQUEST + ) + + body = await request.json() + + _LOGGER.debug("Received webhook: %s", body) + + data = WebhookEvent.parse_webhook_event(body) + + body_type = body.get("type") + + coordinator_data = coordinator.data + if body_type == Platform.VALVE and coordinator_data: + coordinator_data.valve_state = data.state + + coordinator.async_set_updated_data(coordinator_data) + + return HomeAssistantView.json(result="OK", status_code=HTTPStatus.OK) + + return async_webhook_handler diff --git a/homeassistant/components/watergate/config_flow.py b/homeassistant/components/watergate/config_flow.py new file mode 100644 index 00000000000..de8494053a3 --- /dev/null +++ b/homeassistant/components/watergate/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Watergate.""" + +import logging + +import voluptuous as vol +from watergate_local_api.watergate_api import ( + WatergateApiException, + WatergateLocalApiClient, +) + +from homeassistant.components.webhook import async_generate_id as webhook_generate_id +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +SONIC = "Sonic" +WATERGATE_SCHEMA = vol.Schema( + { + vol.Required(CONF_IP_ADDRESS): str, + } +) + + +class WatergateConfigFlow(ConfigFlow, domain=DOMAIN): + """Watergate config flow.""" + + async def async_step_user( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + if user_input is not None: + watergate_client = WatergateLocalApiClient( + self.prepare_ip_address(user_input[CONF_IP_ADDRESS]) + ) + try: + state = await watergate_client.async_get_device_state() + except WatergateApiException as exception: + _LOGGER.error("Error connecting to Watergate device: %s", exception) + errors[CONF_IP_ADDRESS] = "cannot_connect" + else: + if state is None: + _LOGGER.error("Device state returned as None") + errors[CONF_IP_ADDRESS] = "cannot_connect" + else: + await self.async_set_unique_id(state.serial_number) + self._abort_if_unique_id_configured() + return self.async_create_entry( + data={**user_input, CONF_WEBHOOK_ID: webhook_generate_id()}, + title=SONIC, + ) + + return self.async_show_form( + step_id="user", data_schema=WATERGATE_SCHEMA, errors=errors + ) + + def prepare_ip_address(self, ip_address: str) -> str: + """Prepare the IP address for the Watergate device.""" + return ip_address if ip_address.startswith("http") else f"http://{ip_address}" diff --git a/homeassistant/components/watergate/const.py b/homeassistant/components/watergate/const.py new file mode 100644 index 00000000000..22a14330af9 --- /dev/null +++ b/homeassistant/components/watergate/const.py @@ -0,0 +1,5 @@ +"""Constants for the Watergate integration.""" + +DOMAIN = "watergate" + +MANUFACTURER = "Watergate" diff --git a/homeassistant/components/watergate/coordinator.py b/homeassistant/components/watergate/coordinator.py new file mode 100644 index 00000000000..c0b87feed30 --- /dev/null +++ b/homeassistant/components/watergate/coordinator.py @@ -0,0 +1,35 @@ +"""Coordinator for Watergate API.""" + +from datetime import timedelta +import logging + +from watergate_local_api import WatergateApiException, WatergateLocalApiClient +from watergate_local_api.models import DeviceState + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class WatergateDataCoordinator(DataUpdateCoordinator[DeviceState]): + """Class to manage fetching watergate data.""" + + def __init__(self, hass: HomeAssistant, api: WatergateLocalApiClient) -> None: + """Initialize.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(minutes=2), + ) + self.api = api + + async def _async_update_data(self) -> DeviceState: + try: + state = await self.api.async_get_device_state() + except WatergateApiException as exc: + raise UpdateFailed from exc + return state diff --git a/homeassistant/components/watergate/entity.py b/homeassistant/components/watergate/entity.py new file mode 100644 index 00000000000..977a7fbedb4 --- /dev/null +++ b/homeassistant/components/watergate/entity.py @@ -0,0 +1,30 @@ +"""Watergate Base Entity Definition.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import WatergateDataCoordinator + + +class WatergateEntity(CoordinatorEntity[WatergateDataCoordinator]): + """Define a base Watergate entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_name: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._api_client = coordinator.api + self._attr_unique_id = f"{coordinator.data.serial_number}.{entity_name}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.data.serial_number)}, + name="Sonic", + serial_number=coordinator.data.serial_number, + manufacturer=MANUFACTURER, + sw_version=coordinator.data.firmware_version if coordinator.data else None, + ) diff --git a/homeassistant/components/watergate/manifest.json b/homeassistant/components/watergate/manifest.json new file mode 100644 index 00000000000..46a80e15671 --- /dev/null +++ b/homeassistant/components/watergate/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "watergate", + "name": "Watergate", + "codeowners": ["@adam-the-hero"], + "config_flow": true, + "dependencies": ["http", "webhook"], + "documentation": "https://www.home-assistant.io/integrations/watergate", + "iot_class": "local_push", + "quality_scale": "bronze", + "requirements": ["watergate-local-api==2024.4.1"] +} diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml new file mode 100644 index 00000000000..c6027f6a548 --- /dev/null +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -0,0 +1,43 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: todo diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json new file mode 100644 index 00000000000..2a75c4d103d --- /dev/null +++ b/homeassistant/components/watergate/strings.json @@ -0,0 +1,21 @@ +{ + "config": { + "step": { + "user": { + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "title": "Configure Watergate device", + "data_description": { + "ip_address": "Provide an IP address of your Watergate device." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/watergate/valve.py b/homeassistant/components/watergate/valve.py new file mode 100644 index 00000000000..aecaf3fbca9 --- /dev/null +++ b/homeassistant/components/watergate/valve.py @@ -0,0 +1,82 @@ +"""Support for Watergate Valve.""" + +from homeassistant.components.sensor import Any, HomeAssistant +from homeassistant.components.valve import ( + ValveDeviceClass, + ValveEntity, + ValveEntityFeature, + ValveState, +) +from homeassistant.core import callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import WatergateConfigEntry +from .coordinator import WatergateDataCoordinator +from .entity import WatergateEntity + +ENTITY_NAME = "valve" +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up all entries for Watergate Platform.""" + + async_add_entities([SonicValve(config_entry.runtime_data)]) + + +class SonicValve(WatergateEntity, ValveEntity): + """Define a Sonic Valve entity.""" + + _attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE + _attr_reports_position = False + _valve_state: str | None = None + _attr_device_class = ValveDeviceClass.WATER + _attr_name = None + + def __init__( + self, + coordinator: WatergateDataCoordinator, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator, ENTITY_NAME) + self._valve_state = coordinator.data.valve_state if coordinator.data else None + + @property + def is_closed(self) -> bool: + """Return if the valve is closed or not.""" + return self._valve_state == ValveState.CLOSED + + @property + def is_opening(self) -> bool | None: + """Return if the valve is opening or not.""" + return self._valve_state == ValveState.OPENING + + @property + def is_closing(self) -> bool | None: + """Return if the valve is closing or not.""" + return self._valve_state == ValveState.CLOSING + + @callback + def _handle_coordinator_update(self) -> None: + """Handle data update.""" + self._attr_available = self.coordinator.data is not None + self._valve_state = ( + self.coordinator.data.valve_state if self.coordinator.data else None + ) + self.async_write_ha_state() + + async def async_open_valve(self, **kwargs: Any) -> None: + """Open the valve.""" + await self._api_client.async_set_valve_state(ValveState.OPEN) + self._valve_state = ValveState.OPENING + self.async_write_ha_state() + + async def async_close_valve(self, **kwargs: Any) -> None: + """Close the valve.""" + await self._api_client.async_set_valve_state(ValveState.CLOSED) + self._valve_state = ValveState.CLOSING + self.async_write_ha_state() diff --git a/homeassistant/components/watson_iot/manifest.json b/homeassistant/components/watson_iot/manifest.json index 702c5492246..a457dcc44b1 100644 --- a/homeassistant/components/watson_iot/manifest.json +++ b/homeassistant/components/watson_iot/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/watson_iot", "iot_class": "cloud_push", "loggers": ["ibmiotf", "paho_mqtt"], + "quality_scale": "legacy", "requirements": ["ibmiotf==0.3.4"] } diff --git a/homeassistant/components/watson_tts/manifest.json b/homeassistant/components/watson_tts/manifest.json index f26fc006561..ecc3d97be46 100644 --- a/homeassistant/components/watson_tts/manifest.json +++ b/homeassistant/components/watson_tts/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/watson_tts", "iot_class": "cloud_push", "loggers": ["ibm_cloud_sdk_core", "ibm_watson"], + "quality_scale": "legacy", "requirements": ["ibm-watson==5.2.2"] } diff --git a/homeassistant/components/watttime/__init__.py b/homeassistant/components/watttime/__init__.py index 6b32cf723a3..ed2bdd4ebac 100644 --- a/homeassistant/components/watttime/__init__.py +++ b/homeassistant/components/watttime/__init__.py @@ -58,6 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=entry.title, update_interval=DEFAULT_UPDATE_INTERVAL, update_method=async_update_data, diff --git a/homeassistant/components/watttime/config_flow.py b/homeassistant/components/watttime/config_flow.py index db68738b302..ad676e166c5 100644 --- a/homeassistant/components/watttime/config_flow.py +++ b/homeassistant/components/watttime/config_flow.py @@ -126,9 +126,11 @@ class WattTimeConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> WattTimeOptionsFlowHandler: """Define the config flow to handle options.""" - return WattTimeOptionsFlowHandler(config_entry) + return WattTimeOptionsFlowHandler() async def async_step_coordinates( self, user_input: dict[str, Any] | None = None @@ -241,10 +243,6 @@ class WattTimeConfigFlow(ConfigFlow, domain=DOMAIN): class WattTimeOptionsFlowHandler(OptionsFlow): """Handle a WattTime options flow.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -258,7 +256,7 @@ class WattTimeOptionsFlowHandler(OptionsFlow): { vol.Required( CONF_SHOW_ON_MAP, - default=self.entry.options.get(CONF_SHOW_ON_MAP, True), + default=self.config_entry.options.get(CONF_SHOW_ON_MAP, True), ): bool } ), diff --git a/homeassistant/components/waze_travel_time/__init__.py b/homeassistant/components/waze_travel_time/__init__.py index 83b2e2aa7c7..34f22c9218f 100644 --- a/homeassistant/components/waze_travel_time/__init__.py +++ b/homeassistant/components/waze_travel_time/__init__.py @@ -1,13 +1,15 @@ """The waze_travel_time component.""" import asyncio +from collections.abc import Collection import logging +from typing import Literal from pywaze.route_calculator import CalcRoutesResponse, WazeRouteCalculator, WRCError import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_REGION, Platform +from homeassistant.const import CONF_REGION, Platform, UnitOfLength from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -21,19 +23,26 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, TextSelector, + TextSelectorConfig, + TextSelectorType, ) +from homeassistant.util.unit_conversion import DistanceConverter from .const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, CONF_AVOID_TOLL_ROADS, CONF_DESTINATION, + CONF_EXCL_FILTER, + CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, CONF_UNITS, CONF_VEHICLE_TYPE, + DEFAULT_FILTER, DEFAULT_VEHICLE_TYPE, DOMAIN, + IMPERIAL_UNITS, METRIC_UNITS, REGIONS, SEMAPHORE, @@ -76,6 +85,18 @@ SERVICE_GET_TRAVEL_TIMES_SCHEMA = vol.Schema( vol.Optional(CONF_AVOID_TOLL_ROADS, default=False): BooleanSelector(), vol.Optional(CONF_AVOID_SUBSCRIPTION_ROADS, default=False): BooleanSelector(), vol.Optional(CONF_AVOID_FERRIES, default=False): BooleanSelector(), + vol.Optional(CONF_INCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), + vol.Optional(CONF_EXCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), } ) @@ -86,6 +107,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b """Load the saved entities.""" if SEMAPHORE not in hass.data.setdefault(DOMAIN, {}): hass.data.setdefault(DOMAIN, {})[SEMAPHORE] = asyncio.Semaphore(1) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async def async_get_travel_times_service(service: ServiceCall) -> ServiceResponse: @@ -102,6 +124,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b avoid_subscription_roads=service.data[CONF_AVOID_SUBSCRIPTION_ROADS], avoid_ferries=service.data[CONF_AVOID_FERRIES], realtime=service.data[CONF_REALTIME], + units=service.data[CONF_UNITS], + incl_filters=service.data.get(CONF_INCL_FILTER, DEFAULT_FILTER), + excl_filters=service.data.get(CONF_EXCL_FILTER, DEFAULT_FILTER), ) return {"routes": [vars(route) for route in response]} if response else None @@ -124,11 +149,15 @@ async def async_get_travel_times( avoid_subscription_roads: bool, avoid_ferries: bool, realtime: bool, - incl_filter: str | None = None, - excl_filter: str | None = None, + units: Literal["metric", "imperial"] = "metric", + incl_filters: Collection[str] | None = None, + excl_filters: Collection[str] | None = None, ) -> list[CalcRoutesResponse] | None: """Get all available routes.""" + incl_filters = incl_filters or () + excl_filters = excl_filters or () + _LOGGER.debug( "Getting update for origin: %s destination: %s", origin, @@ -147,28 +176,60 @@ async def async_get_travel_times( real_time=realtime, alternatives=3, ) + _LOGGER.debug("Got routes: %s", routes) - if incl_filter not in {None, ""}: - routes = [ - r - for r in routes - if any( - incl_filter.lower() == street_name.lower() # type: ignore[union-attr] - for street_name in r.street_names + incl_routes: list[CalcRoutesResponse] = [] + + def should_include_route(route: CalcRoutesResponse) -> bool: + if len(incl_filters) < 1: + return True + should_include = any( + street_name in incl_filters or "" in incl_filters + for street_name in route.street_names + ) + if not should_include: + _LOGGER.debug( + "Excluding route [%s], because no inclusive filter matched any streetname", + route.name, ) + return False + return True + + incl_routes = [route for route in routes if should_include_route(route)] + + filtered_routes: list[CalcRoutesResponse] = [] + + def should_exclude_route(route: CalcRoutesResponse) -> bool: + for street_name in route.street_names: + for excl_filter in excl_filters: + if excl_filter == street_name: + _LOGGER.debug( + "Excluding route, because exclusive filter [%s] matched streetname: %s", + excl_filter, + route.name, + ) + return True + return False + + filtered_routes = [ + route for route in incl_routes if not should_exclude_route(route) + ] + + if units == IMPERIAL_UNITS: + filtered_routes = [ + CalcRoutesResponse( + name=route.name, + distance=DistanceConverter.convert( + route.distance, UnitOfLength.KILOMETERS, UnitOfLength.MILES + ), + duration=route.duration, + street_names=route.street_names, + ) + for route in filtered_routes + if route.distance is not None ] - if excl_filter not in {None, ""}: - routes = [ - r - for r in routes - if not any( - excl_filter.lower() == street_name.lower() # type: ignore[union-attr] - for street_name in r.street_names - ) - ] - - if len(routes) < 1: + if len(filtered_routes) < 1: _LOGGER.warning("No routes found") return None except WRCError as exp: @@ -176,9 +237,36 @@ async def async_get_travel_times( return None else: - return routes + return filtered_routes async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate an old config entry.""" + + if config_entry.version == 1: + _LOGGER.debug( + "Migrating from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + options = dict(config_entry.options) + if (incl_filters := options.pop(CONF_INCL_FILTER, None)) not in {None, ""}: + options[CONF_INCL_FILTER] = [incl_filters] + else: + options[CONF_INCL_FILTER] = DEFAULT_FILTER + if (excl_filters := options.pop(CONF_EXCL_FILTER, None)) not in {None, ""}: + options[CONF_EXCL_FILTER] = [excl_filters] + else: + options[CONF_EXCL_FILTER] = DEFAULT_FILTER + hass.config_entries.async_update_entry(config_entry, options=options, version=2) + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + return True diff --git a/homeassistant/components/waze_travel_time/config_flow.py b/homeassistant/components/waze_travel_time/config_flow.py index 12dc8336f92..6ab6a4b121c 100644 --- a/homeassistant/components/waze_travel_time/config_flow.py +++ b/homeassistant/components/waze_travel_time/config_flow.py @@ -7,6 +7,7 @@ from typing import Any import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -20,6 +21,8 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, TextSelector, + TextSelectorConfig, + TextSelectorType, ) from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM @@ -34,6 +37,7 @@ from .const import ( CONF_REALTIME, CONF_UNITS, CONF_VEHICLE_TYPE, + DEFAULT_FILTER, DEFAULT_NAME, DEFAULT_OPTIONS, DOMAIN, @@ -46,8 +50,18 @@ from .helpers import is_valid_config_entry OPTIONS_SCHEMA = vol.Schema( { - vol.Optional(CONF_INCL_FILTER, default=""): TextSelector(), - vol.Optional(CONF_EXCL_FILTER, default=""): TextSelector(), + vol.Optional(CONF_INCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), + vol.Optional(CONF_EXCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), vol.Optional(CONF_REALTIME): BooleanSelector(), vol.Required(CONF_VEHICLE_TYPE): SelectSelector( SelectSelectorConfig( @@ -88,7 +102,7 @@ CONFIG_SCHEMA = vol.Schema( ) -def default_options(hass: HomeAssistant) -> dict[str, str | bool]: +def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: """Get the default options.""" defaults = DEFAULT_OPTIONS.copy() if hass.config.units is US_CUSTOMARY_SYSTEM: @@ -99,13 +113,13 @@ def default_options(hass: HomeAssistant) -> dict[str, str | bool]: class WazeOptionsFlow(OptionsFlow): """Handle an options flow for Waze Travel Time.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize waze options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: + if user_input.get(CONF_INCL_FILTER) is None: + user_input[CONF_INCL_FILTER] = DEFAULT_FILTER + if user_input.get(CONF_EXCL_FILTER) is None: + user_input[CONF_EXCL_FILTER] = DEFAULT_FILTER return self.async_create_entry( title="", data=user_input, @@ -122,11 +136,7 @@ class WazeOptionsFlow(OptionsFlow): class WazeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Waze Travel Time.""" - VERSION = 1 - - def __init__(self) -> None: - """Init Config Flow.""" - self._entry: ConfigEntry | None = None + VERSION = 2 @staticmethod @callback @@ -134,7 +144,7 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> WazeOptionsFlow: """Get the options flow for this handler.""" - return WazeOptionsFlow(config_entry) + return WazeOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -151,12 +161,11 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_DESTINATION], user_input[CONF_REGION], ): - if self._entry: + if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self._entry, + self._get_reconfigure_entry(), title=user_input[CONF_NAME], data=user_input, - reason="reconfigure_successful", ) return self.async_create_entry( title=user_input.get(CONF_NAME, DEFAULT_NAME), @@ -175,13 +184,10 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reconfigure( - self, _: dict[str, Any] | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert self._entry - - data = self._entry.data.copy() + data = self._get_reconfigure_entry().data.copy() data[CONF_REGION] = data[CONF_REGION].lower() return self.async_show_form( diff --git a/homeassistant/components/waze_travel_time/const.py b/homeassistant/components/waze_travel_time/const.py index 84e41c3963f..7c77f43574d 100644 --- a/homeassistant/components/waze_travel_time/const.py +++ b/homeassistant/components/waze_travel_time/const.py @@ -22,6 +22,7 @@ DEFAULT_VEHICLE_TYPE = "car" DEFAULT_AVOID_TOLL_ROADS = False DEFAULT_AVOID_SUBSCRIPTION_ROADS = False DEFAULT_AVOID_FERRIES = False +DEFAULT_FILTER = [""] IMPERIAL_UNITS = "imperial" METRIC_UNITS = "metric" @@ -30,11 +31,13 @@ UNITS = [METRIC_UNITS, IMPERIAL_UNITS] REGIONS = ["us", "na", "eu", "il", "au"] VEHICLE_TYPES = ["car", "taxi", "motorcycle"] -DEFAULT_OPTIONS: dict[str, str | bool] = { +DEFAULT_OPTIONS: dict[str, str | bool | list[str]] = { CONF_REALTIME: DEFAULT_REALTIME, CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, CONF_UNITS: METRIC_UNITS, CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + CONF_INCL_FILTER: DEFAULT_FILTER, + CONF_EXCL_FILTER: DEFAULT_FILTER, } diff --git a/homeassistant/components/waze_travel_time/icons.json b/homeassistant/components/waze_travel_time/icons.json index fa95e8fdd8a..98e6f26774c 100644 --- a/homeassistant/components/waze_travel_time/icons.json +++ b/homeassistant/components/waze_travel_time/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "get_travel_times": "mdi:timelapse" + "get_travel_times": { + "service": "mdi:timelapse" + } } } diff --git a/homeassistant/components/waze_travel_time/sensor.py b/homeassistant/components/waze_travel_time/sensor.py index 7663b4a102e..a216a02f61e 100644 --- a/homeassistant/components/waze_travel_time/sensor.py +++ b/homeassistant/components/waze_travel_time/sensor.py @@ -20,7 +20,6 @@ from homeassistant.const import ( CONF_NAME, CONF_REGION, EVENT_HOMEASSISTANT_STARTED, - UnitOfLength, UnitOfTime, ) from homeassistant.core import CoreState, HomeAssistant @@ -28,7 +27,6 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.location import find_coordinates -from homeassistant.util.unit_conversion import DistanceConverter from . import async_get_travel_times from .const import ( @@ -44,7 +42,6 @@ from .const import ( CONF_VEHICLE_TYPE, DEFAULT_NAME, DOMAIN, - IMPERIAL_UNITS, SEMAPHORE, ) @@ -183,8 +180,8 @@ class WazeTravelTimeData: ) if self.origin is not None and self.destination is not None: # Grab options on every update - incl_filter = self.config_entry.options.get(CONF_INCL_FILTER) - excl_filter = self.config_entry.options.get(CONF_EXCL_FILTER) + incl_filter = self.config_entry.options[CONF_INCL_FILTER] + excl_filter = self.config_entry.options[CONF_EXCL_FILTER] realtime = self.config_entry.options[CONF_REALTIME] vehicle_type = self.config_entry.options[CONF_VEHICLE_TYPE] avoid_toll_roads = self.config_entry.options[CONF_AVOID_TOLL_ROADS] @@ -201,6 +198,7 @@ class WazeTravelTimeData: avoid_subscription_roads, avoid_ferries, realtime, + self.config_entry.options[CONF_UNITS], incl_filter, excl_filter, ) @@ -211,14 +209,5 @@ class WazeTravelTimeData: return self.duration = route.duration - distance = route.distance - - if self.config_entry.options[CONF_UNITS] == IMPERIAL_UNITS: - # Convert to miles. - self.distance = DistanceConverter.convert( - distance, UnitOfLength.KILOMETERS, UnitOfLength.MILES - ) - else: - self.distance = distance - + self.distance = route.distance self.route = route.name diff --git a/homeassistant/components/waze_travel_time/services.yaml b/homeassistant/components/waze_travel_time/services.yaml index 7fba565dd47..fd5f2e9adea 100644 --- a/homeassistant/components/waze_travel_time/services.yaml +++ b/homeassistant/components/waze_travel_time/services.yaml @@ -55,3 +55,13 @@ get_travel_times: required: false selector: boolean: + incl_filter: + required: false + selector: + text: + multiple: true + excl_filter: + required: false + selector: + text: + multiple: true diff --git a/homeassistant/components/waze_travel_time/strings.json b/homeassistant/components/waze_travel_time/strings.json index 6b0b4184af7..cca1789bf7e 100644 --- a/homeassistant/components/waze_travel_time/strings.json +++ b/homeassistant/components/waze_travel_time/strings.json @@ -23,12 +23,12 @@ "options": { "step": { "init": { - "description": "The `substring` inputs will allow you to force the integration to use a particular route or avoid a particular route in its time travel calculation.", + "description": "Some options will allow you to force the integration to use a particular route or avoid a particular route in its time travel calculation.", "data": { "units": "Units", "vehicle_type": "Vehicle Type", - "incl_filter": "Streetname which must be part of the Selected Route", - "excl_filter": "Streetname which must NOT be part of the Selected Route", + "incl_filter": "Exact streetname which must be part of the selected route", + "excl_filter": "Exact streetname which must NOT be part of the selected route", "realtime": "Realtime Travel Time?", "avoid_toll_roads": "Avoid Toll Roads?", "avoid_ferries": "Avoid Ferries?", @@ -100,7 +100,15 @@ }, "avoid_subscription_roads": { "name": "[%key:component::waze_travel_time::options::step::init::data::avoid_subscription_roads%]", - "description": "Whether to avoid subscription roads. " + "description": "Whether to avoid subscription roads." + }, + "incl_filter": { + "name": "[%key:component::waze_travel_time::options::step::init::data::incl_filter%]", + "description": "Exact streetname which must be part of the selected route." + }, + "excl_filter": { + "name": "[%key:component::waze_travel_time::options::step::init::data::excl_filter%]", + "description": "Exact streetname which must NOT be part of the selected route." } } } diff --git a/homeassistant/components/weather/__init__.py b/homeassistant/components/weather/__init__.py index dab3394426e..557765795ee 100644 --- a/homeassistant/components/weather/__init__.py +++ b/homeassistant/components/weather/__init__.py @@ -6,10 +6,11 @@ import abc from collections.abc import Callable, Iterable from contextlib import suppress from datetime import timedelta -from functools import cached_property, partial +from functools import partial import logging from typing import Any, Final, Generic, Literal, Required, TypedDict, cast, final +from propcache import cached_property from typing_extensions import TypeVar import voluptuous as vol @@ -44,7 +45,7 @@ from homeassistant.util.dt import utcnow from homeassistant.util.json import JsonValueType from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from .const import ( +from .const import ( # noqa: F401 ATTR_WEATHER_APPARENT_TEMPERATURE, ATTR_WEATHER_CLOUD_COVERAGE, ATTR_WEATHER_DEW_POINT, @@ -62,7 +63,9 @@ from .const import ( ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, ATTR_WEATHER_WIND_SPEED_UNIT, + DATA_COMPONENT, DOMAIN, + INTENT_GET_WEATHER, UNIT_CONVERSIONS, VALID_UNITS, WeatherEntityFeature, @@ -195,7 +198,7 @@ class Forecast(TypedDict, total=False): async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the weather component.""" - component = hass.data[DOMAIN] = EntityComponent[WeatherEntity]( + component = hass.data[DATA_COMPONENT] = EntityComponent[WeatherEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) component.async_register_entity_service( @@ -216,14 +219,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] - return await component.async_setup_entry(entry) + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] - return await component.async_unload_entry(entry) + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) class WeatherEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/weather/const.py b/homeassistant/components/weather/const.py index 0b5246ab31c..f532b891e3e 100644 --- a/homeassistant/components/weather/const.py +++ b/homeassistant/components/weather/const.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from enum import IntFlag -from typing import Final +from typing import TYPE_CHECKING, Final from homeassistant.const import ( UnitOfLength, @@ -13,6 +13,7 @@ from homeassistant.const import ( UnitOfSpeed, UnitOfTemperature, ) +from homeassistant.util.hass_dict import HassKey from homeassistant.util.unit_conversion import ( DistanceConverter, PressureConverter, @@ -20,6 +21,11 @@ from homeassistant.util.unit_conversion import ( TemperatureConverter, ) +if TYPE_CHECKING: + from homeassistant.helpers.entity_component import EntityComponent + + from . import WeatherEntity + class WeatherEntityFeature(IntFlag): """Supported features of the update entity.""" @@ -48,6 +54,9 @@ ATTR_WEATHER_CLOUD_COVERAGE = "cloud_coverage" ATTR_WEATHER_UV_INDEX = "uv_index" DOMAIN: Final = "weather" +DATA_COMPONENT: HassKey[EntityComponent[WeatherEntity]] = HassKey(DOMAIN) + +INTENT_GET_WEATHER = "HassGetWeather" VALID_UNITS_PRESSURE: set[str] = { UnitOfPressure.HPA, diff --git a/homeassistant/components/weather/icons.json b/homeassistant/components/weather/icons.json index cc53861e700..04b3c1d3df8 100644 --- a/homeassistant/components/weather/icons.json +++ b/homeassistant/components/weather/icons.json @@ -21,7 +21,11 @@ } }, "services": { - "get_forecast": "mdi:weather-cloudy-clock", - "get_forecasts": "mdi:weather-cloudy-clock" + "get_forecast": { + "service": "mdi:weather-cloudy-clock" + }, + "get_forecasts": { + "service": "mdi:weather-cloudy-clock" + } } } diff --git a/homeassistant/components/weather/intent.py b/homeassistant/components/weather/intent.py index e00a386b619..078108d7afe 100644 --- a/homeassistant/components/weather/intent.py +++ b/homeassistant/components/weather/intent.py @@ -7,9 +7,7 @@ import voluptuous as vol from homeassistant.core import HomeAssistant, State from homeassistant.helpers import intent -from . import DOMAIN - -INTENT_GET_WEATHER = "HassGetWeather" +from . import DOMAIN, INTENT_GET_WEATHER async def async_setup_intents(hass: HomeAssistant) -> None: diff --git a/homeassistant/components/weather/strings.json b/homeassistant/components/weather/strings.json index 77c9cce864b..85d331f5bd0 100644 --- a/homeassistant/components/weather/strings.json +++ b/homeassistant/components/weather/strings.json @@ -111,12 +111,12 @@ }, "issues": { "deprecated_service_weather_get_forecast": { - "title": "Detected use of deprecated service `weather.get_forecast`", + "title": "Detected use of deprecated service weather.get_forecast", "fix_flow": { "step": { "confirm": { "title": "[%key:component::weather::issues::deprecated_service_weather_get_forecast::title%]", - "description": "Use `weather.get_forecasts` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **submit** to close this issue." + "description": "Use `weather.get_forecasts` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **Submit** to close this issue." } } } diff --git a/homeassistant/components/weather/websocket_api.py b/homeassistant/components/weather/websocket_api.py index 98adbd1bd02..a96c4fa9973 100644 --- a/homeassistant/components/weather/websocket_api.py +++ b/homeassistant/components/weather/websocket_api.py @@ -9,10 +9,9 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.entity_component import EntityComponent from homeassistant.util.json import JsonValueType -from .const import DOMAIN, VALID_UNITS, WeatherEntityFeature +from .const import DATA_COMPONENT, DOMAIN, VALID_UNITS, WeatherEntityFeature FORECAST_TYPE_TO_FLAG = { "daily": WeatherEntityFeature.FORECAST_DAILY, @@ -56,13 +55,10 @@ async def ws_subscribe_forecast( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Subscribe to weather forecasts.""" - from . import WeatherEntity # pylint: disable=import-outside-toplevel - - component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] entity_id: str = msg["entity_id"] forecast_type: Literal["daily", "hourly", "twice_daily"] = msg["forecast_type"] - if not (entity := component.get_entity(msg["entity_id"])): + if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])): connection.send_error( msg["id"], "invalid_entity_id", diff --git a/homeassistant/components/weatherflow/strings.json b/homeassistant/components/weatherflow/strings.json index d075ee34a05..cf23f02d781 100644 --- a/homeassistant/components/weatherflow/strings.json +++ b/homeassistant/components/weatherflow/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "description": "Unable to discover Tempest WeatherFlow devices. Click submit to try again.", + "description": "Unable to discover Tempest WeatherFlow devices. Select **Submit** to try again.", "data": { "host": "[%key:common::config_flow::data::host%]" }, @@ -13,11 +13,11 @@ }, "error": { "address_in_use": "Unable to open local UDP port 50222.", - "cannot_connect": "UDP discovery error." + "cannot_connect": "UDP discovery error.", + "no_device_found": "[%key:common::config_flow::abort::no_devices_found%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "entity": { diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index e8972c320ed..bdd3003e6b6 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -33,9 +33,15 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by reauthentication.""" errors = {} if user_input is not None: @@ -43,18 +49,14 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): errors = await _validate_api_token(api_token) if not errors: # Update the existing entry and abort - if existing_entry := self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ): - return self.async_update_reload_and_abort( - existing_entry, - data={CONF_API_TOKEN: api_token}, - reason="reauth_successful", - reload_even_if_entry_is_unchanged=False, - ) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={CONF_API_TOKEN: api_token}, + reload_even_if_entry_is_unchanged=False, + ) return self.async_show_form( - step_id="reauth", + step_id="reauth_confirm", data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}), errors=errors, ) diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 354b9642c06..98c98cfbac7 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", "loggers": ["weatherflow4py"], - "requirements": ["weatherflow4py==0.2.21"] + "requirements": ["weatherflow4py==1.0.6"] } diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py index 1c7fa5fb377..aeab955878f 100644 --- a/homeassistant/components/weatherflow_cloud/sensor.py +++ b/homeassistant/components/weatherflow_cloud/sensor.py @@ -180,11 +180,9 @@ async def async_setup_entry( entry.entry_id ] - stations = coordinator.data.keys() - async_add_entities( WeatherFlowCloudSensor(coordinator, sensor_description, station_id) - for station_id in stations + for station_id in coordinator.data for sensor_description in WF_SENSORS ) diff --git a/homeassistant/components/weatherflow_cloud/strings.json b/homeassistant/components/weatherflow_cloud/strings.json index df561c8b753..f707cbb0353 100644 --- a/homeassistant/components/weatherflow_cloud/strings.json +++ b/homeassistant/components/weatherflow_cloud/strings.json @@ -7,7 +7,7 @@ "api_token": "Personal api token" } }, - "reauth": { + "reauth_confirm": { "description": "Reauthenticate with WeatherFlow", "data": { "api_token": "[%key:component::weatherflow_cloud::config::step::user::data::api_token%]" diff --git a/homeassistant/components/weatherkit/manifest.json b/homeassistant/components/weatherkit/manifest.json index a6dd40d5993..f86745f330f 100644 --- a/homeassistant/components/weatherkit/manifest.json +++ b/homeassistant/components/weatherkit/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherkit", "iot_class": "cloud_polling", - "requirements": ["apple_weatherkit==1.1.2"] + "requirements": ["apple_weatherkit==1.1.3"] } diff --git a/homeassistant/components/webmin/config_flow.py b/homeassistant/components/webmin/config_flow.py index 3f55bbd9110..64f8c684dfa 100644 --- a/homeassistant/components/webmin/config_flow.py +++ b/homeassistant/components/webmin/config_flow.py @@ -26,7 +26,7 @@ from homeassistant.helpers.schema_config_entry_flow import ( SchemaFlowFormStep, ) -from .const import DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN +from .const import DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, LOGGER from .helpers import get_instance_from_options, get_sorted_mac_addresses @@ -45,9 +45,8 @@ async def validate_user_input( raise SchemaFlowError("invalid_auth") from err raise SchemaFlowError("cannot_connect") from err except Fault as fault: - raise SchemaFlowError( - f"Fault {fault.faultCode}: {fault.faultString}" - ) from fault + LOGGER.exception(f"Fault {fault.faultCode}: {fault.faultString}") + raise SchemaFlowError("unknown") from fault except ClientConnectionError as err: raise SchemaFlowError("cannot_connect") from err except Exception as err: diff --git a/homeassistant/components/webostv/config_flow.py b/homeassistant/components/webostv/config_flow.py index f380e49f8a3..45395bd282a 100644 --- a/homeassistant/components/webostv/config_flow.py +++ b/homeassistant/components/webostv/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import Any +from typing import Any, Self from urllib.parse import urlparse from aiowebostv import WebOsTvPairError @@ -47,7 +47,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self._host: str = "" self._name: str = "" self._uuid: str | None = None - self._entry: ConfigEntry | None = None @staticmethod @callback @@ -92,7 +91,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Display pairing form.""" self._async_check_configured_entry() - self.context[CONF_HOST] = self._host self.context["title_placeholders"] = {"name": self._name} errors = {} @@ -130,27 +128,27 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(uuid) self._abort_if_unique_id_configured({CONF_HOST: self._host}) - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == self._host: - return self.async_abort(reason="already_in_progress") + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") self._uuid = uuid return await self.async_step_pairing() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow._host == self._host # noqa: SLF001 + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an WebOsTvPairError.""" self._host = entry_data[CONF_HOST] - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self._entry is not None - if user_input is not None: try: client = await async_control_connect(self._host, None) @@ -159,8 +157,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): except WEBOSTV_EXCEPTIONS: return self.async_abort(reason="reauth_unsuccessful") - update_client_key(self.hass, self._entry, client) - await self.hass.config_entries.async_reload(self._entry.entry_id) + reauth_entry = self._get_reauth_entry() + update_client_key(self.hass, reauth_entry, client) + await self.hass.config_entries.async_reload(reauth_entry.entry_id) return self.async_abort(reason="reauth_successful") return self.async_show_form(step_id="reauth_confirm") @@ -171,8 +170,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.options = config_entry.options self.host = config_entry.data[CONF_HOST] self.key = config_entry.data[CONF_CLIENT_SECRET] @@ -189,7 +186,8 @@ class OptionsFlowHandler(OptionsFlow): if not sources_list: errors["base"] = "cannot_retrieve" - sources = [s for s in self.options.get(CONF_SOURCES, []) if s in sources_list] + option_sources = self.config_entry.options.get(CONF_SOURCES, []) + sources = [s for s in option_sources if s in sources_list] if not sources: sources = sources_list diff --git a/homeassistant/components/webostv/device_trigger.py b/homeassistant/components/webostv/device_trigger.py index 17d92b1abf3..f16b1cec4f5 100644 --- a/homeassistant/components/webostv/device_trigger.py +++ b/homeassistant/components/webostv/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.const import CONF_DEVICE_ID, CONF_PLATFORM, CONF_TYPE diff --git a/homeassistant/components/webostv/icons.json b/homeassistant/components/webostv/icons.json index deb9729a99f..edc058d099f 100644 --- a/homeassistant/components/webostv/icons.json +++ b/homeassistant/components/webostv/icons.json @@ -1,7 +1,13 @@ { "services": { - "button": "mdi:button-pointer", - "command": "mdi:console", - "select_sound_output": "mdi:volume-source" + "button": { + "service": "mdi:button-pointer" + }, + "command": { + "service": "mdi:console" + }, + "select_sound_output": { + "service": "mdi:volume-source" + } } } diff --git a/homeassistant/components/webostv/manifest.json b/homeassistant/components/webostv/manifest.json index 679bad9b9f5..6c826c2f997 100644 --- a/homeassistant/components/webostv/manifest.json +++ b/homeassistant/components/webostv/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/webostv", "iot_class": "local_push", "loggers": ["aiowebostv"], - "quality_scale": "platinum", "requirements": ["aiowebostv==0.4.2"], "ssdp": [ { diff --git a/homeassistant/components/webostv/media_player.py b/homeassistant/components/webostv/media_player.py index 099b5a73784..239780e3f01 100644 --- a/homeassistant/components/webostv/media_player.py +++ b/homeassistant/components/webostv/media_player.py @@ -422,13 +422,13 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): partial_match_channel_id = channel["channelId"] if perfect_match_channel_id is not None: - _LOGGER.info( + _LOGGER.debug( "Switching to channel <%s> with perfect match", perfect_match_channel_id, ) await self._client.set_channel(perfect_match_channel_id) elif partial_match_channel_id is not None: - _LOGGER.info( + _LOGGER.debug( "Switching to channel <%s> with partial match", partial_match_channel_id, ) diff --git a/homeassistant/components/webostv/strings.json b/homeassistant/components/webostv/strings.json index 1d045d48ba5..3ceab5f50a3 100644 --- a/homeassistant/components/webostv/strings.json +++ b/homeassistant/components/webostv/strings.json @@ -3,7 +3,7 @@ "flow_title": "LG webOS Smart TV", "step": { "user": { - "description": "Turn on TV, fill the following fields click submit", + "description": "Turn on TV, fill the following fields and select **Submit**", "data": { "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]" @@ -14,7 +14,7 @@ }, "pairing": { "title": "webOS TV Pairing", - "description": "Click submit and accept the pairing request on your TV.\n\n![Image](/static/images/config_webos.png)" + "description": "Select **Submit** and accept the pairing request on your TV.\n\n![Image](/static/images/config_webos.png)" }, "reauth_confirm": { "title": "[%key:component::webostv::config::step::pairing::title%]", @@ -22,7 +22,7 @@ } }, "error": { - "cannot_connect": "Failed to connect, please turn on your TV or check ip address" + "cannot_connect": "Failed to connect, please turn on your TV or check the IP address" }, "abort": { "error_pairing": "Connected to LG webOS TV but not paired", diff --git a/homeassistant/components/websocket_api/commands.py b/homeassistant/components/websocket_api/commands.py index f66930c8d00..cfa132b71eb 100644 --- a/homeassistant/components/websocket_api/commands.py +++ b/homeassistant/components/websocket_api/commands.py @@ -36,6 +36,10 @@ from homeassistant.exceptions import ( ) from homeassistant.helpers import config_validation as cv, entity, template from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entityfilter import ( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, + convert_include_exclude_filter, +) from homeassistant.helpers.event import ( TrackTemplate, TrackTemplateResult, @@ -366,14 +370,17 @@ def _send_handle_get_states_response( @callback def _forward_entity_changes( send_message: Callable[[str | bytes | dict[str, Any]], None], - entity_ids: set[str], + entity_ids: set[str] | None, + entity_filter: Callable[[str], bool] | None, user: User, message_id_as_bytes: bytes, event: Event[EventStateChangedData], ) -> None: """Forward entity state changed events to websocket.""" entity_id = event.data["entity_id"] - if entity_ids and entity_id not in entity_ids: + if (entity_ids and entity_id not in entity_ids) or ( + entity_filter and not entity_filter(entity_id) + ): return # We have to lookup the permissions again because the user might have # changed since the subscription was created. @@ -381,7 +388,7 @@ def _forward_entity_changes( if ( not user.is_admin and not permissions.access_all_entities(POLICY_READ) - and not permissions.check_entity(event.data["entity_id"], POLICY_READ) + and not permissions.check_entity(entity_id, POLICY_READ) ): return send_message(messages.cached_state_diff_message(message_id_as_bytes, event)) @@ -392,43 +399,55 @@ def _forward_entity_changes( { vol.Required("type"): "subscribe_entities", vol.Optional("entity_ids"): cv.entity_ids, + **INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.schema, } ) def handle_subscribe_entities( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle subscribe entities command.""" - entity_ids = set(msg.get("entity_ids", [])) + entity_ids = set(msg.get("entity_ids", [])) or None + _filter = convert_include_exclude_filter(msg) + entity_filter = None if _filter.empty_filter else _filter.get_filter() # We must never await between sending the states and listening for # state changed events or we will introduce a race condition # where some states are missed states = _async_get_allowed_states(hass, connection) - message_id_as_bytes = str(msg["id"]).encode() - connection.subscriptions[msg["id"]] = hass.bus.async_listen( + msg_id = msg["id"] + message_id_as_bytes = str(msg_id).encode() + connection.subscriptions[msg_id] = hass.bus.async_listen( EVENT_STATE_CHANGED, partial( _forward_entity_changes, connection.send_message, entity_ids, + entity_filter, connection.user, message_id_as_bytes, ), ) - connection.send_result(msg["id"]) + connection.send_result(msg_id) # JSON serialize here so we can recover if it blows up due to the # state machine containing unserializable data. This command is required # to succeed for the UI to show. try: - serialized_states = [ - state.as_compressed_state_json - for state in states - if not entity_ids or state.entity_id in entity_ids - ] + if entity_ids or entity_filter: + serialized_states = [ + state.as_compressed_state_json + for state in states + if (not entity_ids or state.entity_id in entity_ids) + and (not entity_filter or entity_filter(state.entity_id)) + ] + else: + # Fast path when not filtering + serialized_states = [state.as_compressed_state_json for state in states] except (ValueError, TypeError): pass else: - _send_handle_entities_init_response(connection, msg["id"], serialized_states) + _send_handle_entities_init_response( + connection, message_id_as_bytes, serialized_states + ) return serialized_states = [] @@ -443,18 +462,22 @@ def handle_subscribe_entities( ), ) - _send_handle_entities_init_response(connection, msg["id"], serialized_states) + _send_handle_entities_init_response( + connection, message_id_as_bytes, serialized_states + ) def _send_handle_entities_init_response( - connection: ActiveConnection, msg_id: int, serialized_states: list[bytes] + connection: ActiveConnection, + message_id_as_bytes: bytes, + serialized_states: list[bytes], ) -> None: """Send handle entities init response.""" connection.send_message( b"".join( ( b'{"id":', - str(msg_id).encode(), + message_id_as_bytes, b',"type":"event","event":{"a":{', b",".join(serialized_states), b"}}}", @@ -836,9 +859,9 @@ def handle_fire_event( @decorators.websocket_command( { vol.Required("type"): "validate_config", - vol.Optional("trigger"): cv.match_all, - vol.Optional("condition"): cv.match_all, - vol.Optional("action"): cv.match_all, + vol.Optional("triggers"): cv.match_all, + vol.Optional("conditions"): cv.match_all, + vol.Optional("actions"): cv.match_all, } ) @decorators.async_response @@ -853,9 +876,13 @@ async def handle_validate_config( result = {} for key, schema, validator in ( - ("trigger", cv.TRIGGER_SCHEMA, trigger.async_validate_trigger_config), - ("condition", cv.CONDITIONS_SCHEMA, condition.async_validate_conditions_config), - ("action", cv.SCRIPT_SCHEMA, script.async_validate_actions_config), + ("triggers", cv.TRIGGER_SCHEMA, trigger.async_validate_trigger_config), + ( + "conditions", + cv.CONDITIONS_SCHEMA, + condition.async_validate_conditions_config, + ), + ("actions", cv.SCRIPT_SCHEMA, script.async_validate_actions_config), ): if key not in msg: continue diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index 6c0c6f0c587..62f1adc39b9 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -16,6 +16,12 @@ from homeassistant.helpers.http import current_request from homeassistant.util.json import JsonValueType from . import const, messages +from .messages import ( + error_message, + event_message, + message_to_json_bytes, + result_message, +) from .util import describe_request if TYPE_CHECKING: @@ -126,12 +132,12 @@ class ActiveConnection: @callback def send_result(self, msg_id: int, result: Any | None = None) -> None: """Send a result message.""" - self.send_message(messages.result_message(msg_id, result)) + self.send_message(message_to_json_bytes(result_message(msg_id, result))) @callback def send_event(self, msg_id: int, event: Any | None = None) -> None: """Send a event message.""" - self.send_message(messages.event_message(msg_id, event)) + self.send_message(message_to_json_bytes(event_message(msg_id, event))) @callback def send_error( @@ -145,13 +151,15 @@ class ActiveConnection: ) -> None: """Send an error message.""" self.send_message( - messages.error_message( - msg_id, - code, - message, - translation_key=translation_key, - translation_domain=translation_domain, - translation_placeholders=translation_placeholders, + message_to_json_bytes( + error_message( + msg_id, + code, + message, + translation_key=translation_key, + translation_domain=translation_domain, + translation_placeholders=translation_placeholders, + ) ) ) diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index 1ad8d909ce8..e7d57aebab6 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -36,6 +36,8 @@ from .error import Disconnect from .messages import message_to_json_bytes from .util import describe_request +CLOSE_MSG_TYPES = {WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING} + if TYPE_CHECKING: from .connection import ActiveConnection @@ -328,7 +330,7 @@ class WebSocketHandler: if TYPE_CHECKING: assert writer is not None - send_bytes_text = partial(writer.send, binary=False) + send_bytes_text = partial(writer.send_frame, opcode=WSMsgType.TEXT) auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) @@ -338,7 +340,7 @@ class WebSocketHandler: try: connection = await self._async_handle_auth_phase(auth, send_bytes_text) self._async_increase_writer_limit(writer) - await self._async_websocket_command_phase(connection, send_bytes_text) + await self._async_websocket_command_phase(connection) except asyncio.CancelledError: logger.debug("%s: Connection cancelled", self.description) raise @@ -448,9 +450,7 @@ class WebSocketHandler: writer._limit = 2**20 # noqa: SLF001 async def _async_websocket_command_phase( - self, - connection: ActiveConnection, - send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + self, connection: ActiveConnection ) -> None: """Handle the command phase of the websocket connection.""" wsock = self._wsock @@ -461,24 +461,26 @@ class WebSocketHandler: # Command phase while not wsock.closed: msg = await wsock.receive() + msg_type = msg.type + msg_data = msg.data - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + if msg_type in CLOSE_MSG_TYPES: break - if msg.type is WSMsgType.BINARY: - if len(msg.data) < 1: + if msg_type is WSMsgType.BINARY: + if len(msg_data) < 1: raise Disconnect("Received invalid binary message.") - handler = msg.data[0] - payload = msg.data[1:] + handler = msg_data[0] + payload = msg_data[1:] async_handle_binary(handler, payload) continue - if msg.type is not WSMsgType.TEXT: + if msg_type is not WSMsgType.TEXT: raise Disconnect("Received non-Text message.") try: - command_msg_data = json_loads(msg.data) + command_msg_data = json_loads(msg_data) except ValueError as ex: raise Disconnect("Received invalid JSON.") from ex diff --git a/homeassistant/components/weheat/__init__.py b/homeassistant/components/weheat/__init__.py new file mode 100644 index 00000000000..d924d6ceaab --- /dev/null +++ b/homeassistant/components/weheat/__init__.py @@ -0,0 +1,56 @@ +"""The Weheat integration.""" + +from __future__ import annotations + +from weheat.abstractions.discovery import HeatPumpDiscovery +from weheat.exceptions import UnauthorizedException + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ACCESS_TOKEN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.config_entry_oauth2_flow import ( + OAuth2Session, + async_get_config_entry_implementation, +) + +from .const import API_URL, LOGGER +from .coordinator import WeheatDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type WeheatConfigEntry = ConfigEntry[list[WeheatDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: WeheatConfigEntry) -> bool: + """Set up Weheat from a config entry.""" + implementation = await async_get_config_entry_implementation(hass, entry) + + session = OAuth2Session(hass, entry, implementation) + + token = session.token[CONF_ACCESS_TOKEN] + entry.runtime_data = [] + + # fetch a list of the heat pumps the entry can access + try: + discovered_heat_pumps = await HeatPumpDiscovery.discover_active(API_URL, token) + except UnauthorizedException as error: + raise ConfigEntryAuthFailed from error + + for pump_info in discovered_heat_pumps: + LOGGER.debug("Adding %s", pump_info) + # for each pump, add a coordinator + new_coordinator = WeheatDataUpdateCoordinator(hass, session, pump_info) + + await new_coordinator.async_config_entry_first_refresh() + + entry.runtime_data.append(new_coordinator) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: WeheatConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/weheat/api.py b/homeassistant/components/weheat/api.py new file mode 100644 index 00000000000..b1f5c0b3eff --- /dev/null +++ b/homeassistant/components/weheat/api.py @@ -0,0 +1,28 @@ +"""API for Weheat bound to Home Assistant OAuth.""" + +from aiohttp import ClientSession +from weheat.abstractions import AbstractAuth + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session + +from .const import API_URL + + +class AsyncConfigEntryAuth(AbstractAuth): + """Provide Weheat authentication tied to an OAuth2 based config entry.""" + + def __init__( + self, + websession: ClientSession, + oauth_session: OAuth2Session, + ) -> None: + """Initialize Weheat auth.""" + super().__init__(websession, host=API_URL) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + await self._oauth_session.async_ensure_token_valid() + + return self._oauth_session.token[CONF_ACCESS_TOKEN] diff --git a/homeassistant/components/weheat/application_credentials.py b/homeassistant/components/weheat/application_credentials.py new file mode 100644 index 00000000000..3f85d4b0558 --- /dev/null +++ b/homeassistant/components/weheat/application_credentials.py @@ -0,0 +1,11 @@ +"""application_credentials platform the Weheat integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer(authorize_url=OAUTH2_AUTHORIZE, token_url=OAUTH2_TOKEN) diff --git a/homeassistant/components/weheat/config_flow.py b/homeassistant/components/weheat/config_flow.py new file mode 100644 index 00000000000..b1a0b5dd4ea --- /dev/null +++ b/homeassistant/components/weheat/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Weheat.""" + +from collections.abc import Mapping +import logging +from typing import Any + +from weheat.abstractions.user import get_user_id_from_token + +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN +from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler + +from .const import API_URL, DOMAIN, ENTRY_TITLE, OAUTH2_SCOPES + + +class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): + """Config flow to handle Weheat OAuth2 authentication.""" + + DOMAIN = DOMAIN + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) + + @property + def extra_authorize_data(self) -> dict[str, str]: + """Extra data that needs to be appended to the authorize url.""" + return { + "scope": " ".join(OAUTH2_SCOPES), + } + + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: + """Override the create entry method to change to the step to find the heat pumps.""" + # get the user id and use that as unique id for this entry + user_id = await get_user_id_from_token( + API_URL, data[CONF_TOKEN][CONF_ACCESS_TOKEN] + ) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() + + return self.async_create_entry(title=ENTRY_TITLE, data=data) + + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=data + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + return await self.async_step_user() diff --git a/homeassistant/components/weheat/const.py b/homeassistant/components/weheat/const.py new file mode 100644 index 00000000000..e33fd983572 --- /dev/null +++ b/homeassistant/components/weheat/const.py @@ -0,0 +1,26 @@ +"""Constants for the Weheat integration.""" + +from logging import Logger, getLogger + +DOMAIN = "weheat" +MANUFACTURER = "Weheat" +ENTRY_TITLE = "Weheat cloud" +ERROR_DESCRIPTION = "error_description" + +OAUTH2_AUTHORIZE = ( + "https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/auth/" +) +OAUTH2_TOKEN = ( + "https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/token/" +) +API_URL = "https://api.weheat.nl" +OAUTH2_SCOPES = ["openid", "offline_access"] + + +UPDATE_INTERVAL = 30 + +LOGGER: Logger = getLogger(__package__) + +DISPLAY_PRECISION_WATTS = 0 +DISPLAY_PRECISION_COP = 1 +DISPLAY_PRECISION_WATER_TEMP = 1 diff --git a/homeassistant/components/weheat/coordinator.py b/homeassistant/components/weheat/coordinator.py new file mode 100644 index 00000000000..a50e9daec18 --- /dev/null +++ b/homeassistant/components/weheat/coordinator.py @@ -0,0 +1,86 @@ +"""Define a custom coordinator for the Weheat heatpump integration.""" + +from datetime import timedelta + +from weheat.abstractions.discovery import HeatPumpDiscovery +from weheat.abstractions.heat_pump import HeatPump +from weheat.exceptions import ( + ApiException, + BadRequestException, + ForbiddenException, + NotFoundException, + ServiceException, + UnauthorizedException, +) + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import API_URL, DOMAIN, LOGGER, UPDATE_INTERVAL + +EXCEPTIONS = ( + ServiceException, + NotFoundException, + ForbiddenException, + BadRequestException, + ApiException, +) + + +class WeheatDataUpdateCoordinator(DataUpdateCoordinator[HeatPump]): + """A custom coordinator for the Weheat heatpump integration.""" + + def __init__( + self, + hass: HomeAssistant, + session: OAuth2Session, + heat_pump: HeatPumpDiscovery.HeatPumpInfo, + ) -> None: + """Initialize the data coordinator.""" + super().__init__( + hass, + logger=LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=UPDATE_INTERVAL), + ) + self.heat_pump_info = heat_pump + self._heat_pump_data = HeatPump(API_URL, heat_pump.uuid) + + self.session = session + + @property + def heatpump_id(self) -> str: + """Return the heat pump id.""" + return self.heat_pump_info.uuid + + @property + def readable_name(self) -> str | None: + """Return the readable name of the heat pump.""" + if self.heat_pump_info.name: + return self.heat_pump_info.name + return self.heat_pump_info.model + + @property + def model(self) -> str: + """Return the model of the heat pump.""" + return self.heat_pump_info.model + + def fetch_data(self) -> HeatPump: + """Get the data from the API.""" + try: + self._heat_pump_data.get_status(self.session.token[CONF_ACCESS_TOKEN]) + except UnauthorizedException as error: + raise ConfigEntryAuthFailed from error + except EXCEPTIONS as error: + raise UpdateFailed(error) from error + + return self._heat_pump_data + + async def _async_update_data(self) -> HeatPump: + """Fetch data from the API.""" + await self.session.async_ensure_token_valid() + + return await self.hass.async_add_executor_job(self.fetch_data) diff --git a/homeassistant/components/weheat/entity.py b/homeassistant/components/weheat/entity.py new file mode 100644 index 00000000000..079db596e19 --- /dev/null +++ b/homeassistant/components/weheat/entity.py @@ -0,0 +1,27 @@ +"""Base entity for Weheat.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import WeheatDataUpdateCoordinator + + +class WeheatEntity(CoordinatorEntity[WeheatDataUpdateCoordinator]): + """Defines a base Weheat entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: WeheatDataUpdateCoordinator, + ) -> None: + """Initialize the Weheat entity.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.heatpump_id)}, + name=coordinator.readable_name, + manufacturer=MANUFACTURER, + model=coordinator.model, + ) diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json new file mode 100644 index 00000000000..6fdae84cfff --- /dev/null +++ b/homeassistant/components/weheat/icons.json @@ -0,0 +1,33 @@ +{ + "entity": { + "sensor": { + "power_output": { + "default": "mdi:heat-wave" + }, + "power_input": { + "default": "mdi:lightning-bolt" + }, + "cop": { + "default": "mdi:speedometer" + }, + "ch_inlet_temperature": { + "default": "mdi:radiator" + }, + "outside_temperature": { + "default": "mdi:home-thermometer-outline" + }, + "thermostat_room_temperature": { + "default": "mdi:home-thermometer" + }, + "thermostat_room_temperature_setpoint": { + "default": "mdi:home-thermometer" + }, + "heat_pump_state": { + "default": "mdi:state-machine" + }, + "electricity_used": { + "default": "mdi:flash" + } + } + } +} diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json new file mode 100644 index 00000000000..61d6a110dbd --- /dev/null +++ b/homeassistant/components/weheat/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "weheat", + "name": "Weheat", + "codeowners": ["@jesperraemaekers"], + "config_flow": true, + "dependencies": ["application_credentials"], + "documentation": "https://www.home-assistant.io/integrations/weheat", + "iot_class": "cloud_polling", + "requirements": ["weheat==2024.11.26"] +} diff --git a/homeassistant/components/weheat/sensor.py b/homeassistant/components/weheat/sensor.py new file mode 100644 index 00000000000..ef5be9030b9 --- /dev/null +++ b/homeassistant/components/weheat/sensor.py @@ -0,0 +1,212 @@ +"""Platform for sensor integration.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from weheat.abstractions.heat_pump import HeatPump + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import WeheatConfigEntry +from .const import ( + DISPLAY_PRECISION_COP, + DISPLAY_PRECISION_WATER_TEMP, + DISPLAY_PRECISION_WATTS, +) +from .coordinator import WeheatDataUpdateCoordinator +from .entity import WeheatEntity + + +@dataclass(frozen=True, kw_only=True) +class WeHeatSensorEntityDescription(SensorEntityDescription): + """Describes Weheat sensor entity.""" + + value_fn: Callable[[HeatPump], StateType] + + +SENSORS = [ + WeHeatSensorEntityDescription( + translation_key="power_output", + key="power_output", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATTS, + value_fn=lambda status: status.power_output, + ), + WeHeatSensorEntityDescription( + translation_key="power_input", + key="power_input", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATTS, + value_fn=lambda status: status.power_input, + ), + WeHeatSensorEntityDescription( + translation_key="cop", + key="cop", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_COP, + value_fn=lambda status: status.cop, + ), + WeHeatSensorEntityDescription( + translation_key="water_inlet_temperature", + key="water_inlet_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.water_inlet_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="water_outlet_temperature", + key="water_outlet_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.water_outlet_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="ch_inlet_temperature", + key="ch_inlet_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.water_house_in_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="outside_temperature", + key="outside_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.air_inlet_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="thermostat_water_setpoint", + key="thermostat_water_setpoint", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.thermostat_water_setpoint, + ), + WeHeatSensorEntityDescription( + translation_key="thermostat_room_temperature", + key="thermostat_room_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.thermostat_room_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="thermostat_room_temperature_setpoint", + key="thermostat_room_temperature_setpoint", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.thermostat_room_temperature_setpoint, + ), + WeHeatSensorEntityDescription( + translation_key="heat_pump_state", + key="heat_pump_state", + name=None, + device_class=SensorDeviceClass.ENUM, + options=[s.name.lower() for s in HeatPump.State], + value_fn=( + lambda status: status.heat_pump_state.name.lower() + if status.heat_pump_state + else None + ), + ), + WeHeatSensorEntityDescription( + translation_key="electricity_used", + key="electricity_used", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda status: status.energy_total, + ), +] + + +DHW_SENSORS = [ + WeHeatSensorEntityDescription( + translation_key="dhw_top_temperature", + key="dhw_top_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.dhw_top_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="dhw_bottom_temperature", + key="dhw_bottom_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.dhw_bottom_temperature, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: WeheatConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensors for weheat heat pump.""" + entities = [ + WeheatHeatPumpSensor(coordinator, entity_description) + for entity_description in SENSORS + for coordinator in entry.runtime_data + ] + entities.extend( + WeheatHeatPumpSensor(coordinator, entity_description) + for entity_description in DHW_SENSORS + for coordinator in entry.runtime_data + if coordinator.heat_pump_info.has_dhw + ) + + async_add_entities(entities) + + +class WeheatHeatPumpSensor(WeheatEntity, SensorEntity): + """Defines a Weheat heat pump sensor.""" + + coordinator: WeheatDataUpdateCoordinator + entity_description: WeHeatSensorEntityDescription + + def __init__( + self, + coordinator: WeheatDataUpdateCoordinator, + entity_description: WeHeatSensorEntityDescription, + ) -> None: + """Pass coordinator to CoordinatorEntity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + self._attr_unique_id = f"{coordinator.heatpump_id}_{entity_description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json new file mode 100644 index 00000000000..0733024cbed --- /dev/null +++ b/homeassistant/components/weheat/strings.json @@ -0,0 +1,90 @@ +{ + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + }, + "find_devices": { + "title": "Select your heat pump" + }, + "reauth_confirm": { + "title": "Re-authenticate with WeHeat", + "description": "You need to re-authenticate with WeHeat to continue" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "no_devices_found": "Could not find any heat pumps on this account", + "wrong_account": "You can only reauthenticate this account with the same user." + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + }, + "entity": { + "sensor": { + "power_output": { + "name": "Output power" + }, + "power_input": { + "name": "Input power" + }, + "cop": { + "name": "COP" + }, + "water_inlet_temperature": { + "name": "Water inlet temperature" + }, + "water_outlet_temperature": { + "name": "Water outlet temperature" + }, + "ch_inlet_temperature": { + "name": "Central heating inlet temperature" + }, + "outside_temperature": { + "name": "Outside temperature" + }, + "thermostat_water_setpoint": { + "name": "Water target temperature" + }, + "thermostat_room_temperature": { + "name": "Current room temperature" + }, + "thermostat_room_temperature_setpoint": { + "name": "Room temperature setpoint" + }, + "dhw_top_temperature": { + "name": "DHW top temperature" + }, + "dhw_bottom_temperature": { + "name": "DHW bottom temperature" + }, + "heat_pump_state": { + "state": { + "standby": "[%key:common::state::standby%]", + "water_check": "Checking water temperature", + "heating": "Heating", + "cooling": "Cooling", + "dhw": "Heating DHW", + "legionella_prevention": "Legionella prevention", + "defrosting": "Defrosting", + "self_test": "Self test", + "manual_control": "Manual control" + } + }, + "electricity_used": { + "name": "Electricity used" + } + } + } +} diff --git a/homeassistant/components/wemo/config_flow.py b/homeassistant/components/wemo/config_flow.py index 10a9bf5604b..361c58953c5 100644 --- a/homeassistant/components/wemo/config_flow.py +++ b/homeassistant/components/wemo/config_flow.py @@ -32,16 +32,12 @@ class WemoFlow(DiscoveryFlowHandler, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return WemoOptionsFlow(config_entry) + return WemoOptionsFlow() class WemoOptionsFlow(OptionsFlow): """Options flow for the WeMo component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/wemo/coordinator.py b/homeassistant/components/wemo/coordinator.py index a186b666470..1f25c12f7ca 100644 --- a/homeassistant/components/wemo/coordinator.py +++ b/homeassistant/components/wemo/coordinator.py @@ -275,6 +275,7 @@ def _device_info(wemo: WeMoDevice) -> DeviceInfo: identifiers={(DOMAIN, wemo.serial_number)}, manufacturer="Belkin", model=wemo.model_name, + model_id=wemo.model, name=wemo.name, sw_version=wemo.firmware_version, ) diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index f9d3270aaa0..42dae679aa5 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -81,7 +81,6 @@ class WemoHumidifier(WemoBinaryStateEntity, FanEntity): ) wemo: Humidifier _last_fan_on_mode: FanMode - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceCoordinator) -> None: """Initialize the WeMo switch.""" diff --git a/homeassistant/components/wemo/icons.json b/homeassistant/components/wemo/icons.json index c5ddf5912d6..af5024afcff 100644 --- a/homeassistant/components/wemo/icons.json +++ b/homeassistant/components/wemo/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_humidity": "mdi:water-percent", - "reset_filter_life": "mdi:refresh" + "set_humidity": { + "service": "mdi:water-percent" + }, + "reset_filter_life": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/wemo/light.py b/homeassistant/components/wemo/light.py index 26dec417631..6068cd3ff0b 100644 --- a/homeassistant/components/wemo/light.py +++ b/homeassistant/components/wemo/light.py @@ -8,9 +8,11 @@ from pywemo import Bridge, BridgeLight, Dimmer from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityFeature, @@ -77,6 +79,8 @@ def async_setup_bridge( class WemoLight(WemoEntity, LightEntity): """Representation of a WeMo light.""" + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN _attr_supported_features = LightEntityFeature.TRANSITION def __init__(self, coordinator: DeviceCoordinator, light: BridgeLight) -> None: @@ -123,9 +127,11 @@ class WemoLight(WemoEntity, LightEntity): return self.light.state.get("color_xy") @property - def color_temp(self) -> int | None: - """Return the color temperature of this light in mireds.""" - return self.light.state.get("temperature_mireds") + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + if not (mireds := self.light.state.get("temperature_mireds")): + return None + return color_util.color_temperature_mired_to_kelvin(mireds) @property def color_mode(self) -> ColorMode: @@ -165,7 +171,7 @@ class WemoLight(WemoEntity, LightEntity): xy_color = None brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness or 255) - color_temp = kwargs.get(ATTR_COLOR_TEMP) + color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hs_color = kwargs.get(ATTR_HS_COLOR) transition_time = int(kwargs.get(ATTR_TRANSITION, 0)) @@ -182,9 +188,9 @@ class WemoLight(WemoEntity, LightEntity): if xy_color is not None: self.light.set_color(xy_color, transition=transition_time) - if color_temp is not None: + if color_temp_kelvin is not None: self.light.set_temperature( - mireds=color_temp, transition=transition_time + kelvin=color_temp_kelvin, transition=transition_time ) self.light.turn_on(**turn_on_kwargs) diff --git a/homeassistant/components/whirlpool/__init__.py b/homeassistant/components/whirlpool/__init__.py index 36f8fbec59d..64adcda4742 100644 --- a/homeassistant/components/whirlpool/__init__.py +++ b/homeassistant/components/whirlpool/__init__.py @@ -20,8 +20,10 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR] +type WhirlpoolConfigEntry = ConfigEntry[WhirlpoolData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> bool: """Set up Whirlpool Sixth Sense from a config entry.""" hass.data.setdefault(DOMAIN, {}) @@ -47,21 +49,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Cannot fetch appliances") return False - hass.data[DOMAIN][entry.entry_id] = WhirlpoolData( - appliances_manager, auth, backend_selector - ) + entry.runtime_data = WhirlpoolData(appliances_manager, auth, backend_selector) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @dataclass diff --git a/homeassistant/components/whirlpool/climate.py b/homeassistant/components/whirlpool/climate.py index aa399746006..943c5d1c956 100644 --- a/homeassistant/components/whirlpool/climate.py +++ b/homeassistant/components/whirlpool/climate.py @@ -23,7 +23,6 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -31,7 +30,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WhirlpoolData +from . import WhirlpoolConfigEntry from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -70,11 +69,11 @@ SUPPORTED_TARGET_TEMPERATURE_STEP = 1 async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: WhirlpoolConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry.""" - whirlpool_data: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + whirlpool_data = config_entry.runtime_data aircons = [ AirConEntity( @@ -110,7 +109,6 @@ class AirConEntity(ClimateEntity): _attr_swing_modes = SUPPORTED_SWING_MODES _attr_target_temperature_step = SUPPORTED_TARGET_TEMPERATURE_STEP _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/whirlpool/config_flow.py b/homeassistant/components/whirlpool/config_flow.py index 7c39b1fbb29..069a5ca1e4f 100644 --- a/homeassistant/components/whirlpool/config_flow.py +++ b/homeassistant/components/whirlpool/config_flow.py @@ -12,7 +12,7 @@ from whirlpool.appliancesmanager import AppliancesManager from whirlpool.auth import Auth from whirlpool.backendselector import BackendSelector -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -71,14 +71,11 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Whirlpool Sixth Sense.""" VERSION = 1 - entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Whirlpool Sixth Sense.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -88,10 +85,10 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input: - assert self.entry is not None + reauth_entry = self._get_reauth_entry() password = user_input[CONF_PASSWORD] brand = user_input[CONF_BRAND] - data = {**self.entry.data, CONF_PASSWORD: password, CONF_BRAND: brand} + data = {**reauth_entry.data, CONF_PASSWORD: password, CONF_BRAND: brand} try: await validate_input(self.hass, data) @@ -100,9 +97,7 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): except (CannotConnect, TimeoutError): errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/whirlpool/diagnostics.py b/homeassistant/components/whirlpool/diagnostics.py index 9b1dd00e7bd..87d6ea827e2 100644 --- a/homeassistant/components/whirlpool/diagnostics.py +++ b/homeassistant/components/whirlpool/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import WhirlpoolData -from .const import DOMAIN +from . import WhirlpoolConfigEntry TO_REDACT = { "SERIAL_NUMBER", @@ -24,11 +22,11 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: WhirlpoolConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - whirlpool: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + whirlpool = config_entry.runtime_data diagnostics_data = { "Washer_dryers": { wd["NAME"]: dict(wd.items()) diff --git a/homeassistant/components/whirlpool/sensor.py b/homeassistant/components/whirlpool/sensor.py index 8c74f01298e..b84518cedf1 100644 --- a/homeassistant/components/whirlpool/sensor.py +++ b/homeassistant/components/whirlpool/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo @@ -23,7 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utcnow -from . import WhirlpoolData +from . import WhirlpoolConfigEntry from .const import DOMAIN TANK_FILL = { @@ -132,12 +131,12 @@ SENSOR_TIMER: tuple[SensorEntityDescription] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: WhirlpoolConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Config flow entry for Whrilpool Laundry.""" entities: list = [] - whirlpool_data: WhirlpoolData = hass.data[DOMAIN][config_entry.entry_id] + whirlpool_data = config_entry.runtime_data for appliance in whirlpool_data.appliances_manager.washer_dryers: _wd = WasherDryer( whirlpool_data.backend_selector, diff --git a/homeassistant/components/whirlpool/strings.json b/homeassistant/components/whirlpool/strings.json index 4b4673b771e..09257652ece 100644 --- a/homeassistant/components/whirlpool/strings.json +++ b/homeassistant/components/whirlpool/strings.json @@ -27,7 +27,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/homeassistant/components/whois/__init__.py b/homeassistant/components/whois/__init__.py index b9f5938d93b..07116825f29 100644 --- a/homeassistant/components/whois/__init__.py +++ b/homeassistant/components/whois/__init__.py @@ -35,6 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator[Domain | None] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_APK", update_interval=SCAN_INTERVAL, update_method=_async_query_domain, diff --git a/homeassistant/components/wiffi/__init__.py b/homeassistant/components/wiffi/__init__.py index c465bc0d2ca..6cf216011f2 100644 --- a/homeassistant/components/wiffi/__init__.py +++ b/homeassistant/components/wiffi/__init__.py @@ -7,26 +7,19 @@ import logging from wiffi import WiffiTcpServer from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PORT, CONF_TIMEOUT, Platform +from homeassistant.const import CONF_PORT, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval -from homeassistant.util.dt import utcnow from .const import ( CHECK_ENTITIES_SIGNAL, CREATE_ENTITY_SIGNAL, - DEFAULT_TIMEOUT, DOMAIN, UPDATE_ENTITY_SIGNAL, ) +from .entity import generate_unique_id _LOGGER = logging.getLogger(__name__) @@ -78,11 +71,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -def generate_unique_id(device, metric): - """Generate a unique string for the entity.""" - return f"{device.mac_address.replace(':', '')}-{metric.name}" - - class WiffiIntegrationApi: """API object for wiffi handling. Stored in hass.data.""" @@ -135,78 +123,3 @@ class WiffiIntegrationApi: def _periodic_tick(self, now=None): """Check if any entity has timed out because it has not been updated.""" async_dispatcher_send(self._hass, CHECK_ENTITIES_SIGNAL) - - -class WiffiEntity(Entity): - """Common functionality for all wiffi entities.""" - - _attr_should_poll = False - - def __init__(self, device, metric, options): - """Initialize the base elements of a wiffi entity.""" - self._id = generate_unique_id(device, metric) - self._attr_unique_id = self._id - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, device.mac_address)}, - identifiers={(DOMAIN, device.mac_address)}, - manufacturer="stall.biz", - model=device.moduletype, - name=f"{device.moduletype} {device.mac_address}", - sw_version=device.sw_version, - configuration_url=device.configuration_url, - ) - self._attr_name = metric.description - self._expiration_date = None - self._value = None - self._timeout = options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT) - - async def async_added_to_hass(self): - """Entity has been added to hass.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{UPDATE_ENTITY_SIGNAL}-{self._id}", - self._update_value_callback, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, CHECK_ENTITIES_SIGNAL, self._check_expiration_date - ) - ) - - def reset_expiration_date(self): - """Reset value expiration date. - - Will be called by derived classes after a value update has been received. - """ - self._expiration_date = utcnow() + timedelta(minutes=self._timeout) - - @callback - def _update_value_callback(self, device, metric): - """Update the value of the entity.""" - - @callback - def _check_expiration_date(self): - """Periodically check if entity value has been updated. - - If there are no more updates from the wiffi device, the value will be - set to unavailable. - """ - if ( - self._value is not None - and self._expiration_date is not None - and utcnow() > self._expiration_date - ): - self._value = None - self.async_write_ha_state() - - def _is_measurement_entity(self): - """Measurement entities have a value in present time.""" - return ( - not self._attr_name.endswith("_gestern") and not self._is_metered_entity() - ) - - def _is_metered_entity(self): - """Metered entities have a value that keeps increasing until reset.""" - return self._attr_name.endswith("_pro_h") or self._attr_name.endswith("_heute") diff --git a/homeassistant/components/wiffi/binary_sensor.py b/homeassistant/components/wiffi/binary_sensor.py index 80088f373b4..b7431b2555c 100644 --- a/homeassistant/components/wiffi/binary_sensor.py +++ b/homeassistant/components/wiffi/binary_sensor.py @@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WiffiEntity from .const import CREATE_ENTITY_SIGNAL +from .entity import WiffiEntity async def async_setup_entry( diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 6e4872ea400..308923597cd 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -34,7 +34,7 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Create Wiffi server setup option flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,11 +79,9 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Wiffi server setup option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, int] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/wiffi/entity.py b/homeassistant/components/wiffi/entity.py new file mode 100644 index 00000000000..fd774c930c8 --- /dev/null +++ b/homeassistant/components/wiffi/entity.py @@ -0,0 +1,93 @@ +"""Component for wiffi support.""" + +from datetime import timedelta + +from homeassistant.const import CONF_TIMEOUT +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity +from homeassistant.util.dt import utcnow + +from .const import CHECK_ENTITIES_SIGNAL, DEFAULT_TIMEOUT, DOMAIN, UPDATE_ENTITY_SIGNAL + + +def generate_unique_id(device, metric): + """Generate a unique string for the entity.""" + return f"{device.mac_address.replace(':', '')}-{metric.name}" + + +class WiffiEntity(Entity): + """Common functionality for all wiffi entities.""" + + _attr_should_poll = False + + def __init__(self, device, metric, options): + """Initialize the base elements of a wiffi entity.""" + self._id = generate_unique_id(device, metric) + self._attr_unique_id = self._id + self._attr_device_info = DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, device.mac_address)}, + identifiers={(DOMAIN, device.mac_address)}, + manufacturer="stall.biz", + model=device.moduletype, + name=f"{device.moduletype} {device.mac_address}", + sw_version=device.sw_version, + configuration_url=device.configuration_url, + ) + self._attr_name = metric.description + self._expiration_date = None + self._value = None + self._timeout = options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT) + + async def async_added_to_hass(self): + """Entity has been added to hass.""" + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{UPDATE_ENTITY_SIGNAL}-{self._id}", + self._update_value_callback, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, CHECK_ENTITIES_SIGNAL, self._check_expiration_date + ) + ) + + def reset_expiration_date(self): + """Reset value expiration date. + + Will be called by derived classes after a value update has been received. + """ + self._expiration_date = utcnow() + timedelta(minutes=self._timeout) + + @callback + def _update_value_callback(self, device, metric): + """Update the value of the entity.""" + + @callback + def _check_expiration_date(self): + """Periodically check if entity value has been updated. + + If there are no more updates from the wiffi device, the value will be + set to unavailable. + """ + if ( + self._value is not None + and self._expiration_date is not None + and utcnow() > self._expiration_date + ): + self._value = None + self.async_write_ha_state() + + def _is_measurement_entity(self): + """Measurement entities have a value in present time.""" + return ( + not self._attr_name.endswith("_gestern") and not self._is_metered_entity() + ) + + def _is_metered_entity(self): + """Metered entities have a value that keeps increasing until reset.""" + return self._attr_name.endswith("_pro_h") or self._attr_name.endswith("_heute") diff --git a/homeassistant/components/wiffi/sensor.py b/homeassistant/components/wiffi/sensor.py index cf8cf8719c3..699a760685a 100644 --- a/homeassistant/components/wiffi/sensor.py +++ b/homeassistant/components/wiffi/sensor.py @@ -11,8 +11,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WiffiEntity from .const import CREATE_ENTITY_SIGNAL +from .entity import WiffiEntity from .wiffi_strings import ( WIFFI_UOM_DEGREE, WIFFI_UOM_LUX, diff --git a/homeassistant/components/wilight/__init__.py b/homeassistant/components/wilight/__init__.py index 067197c8a14..5242f84ab93 100644 --- a/homeassistant/components/wilight/__init__.py +++ b/homeassistant/components/wilight/__init__.py @@ -1,20 +1,13 @@ """The WiLight integration.""" -from typing import Any - -from pywilight.wilight_device import PyWiLightDevice - from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity +from .const import DOMAIN from .parent_device import WiLightParent -DOMAIN = "wilight" - # List the platforms that you want to support. PLATFORMS = [Platform.COVER, Platform.FAN, Platform.LIGHT, Platform.SWITCH] @@ -48,51 +41,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: del hass.data[DOMAIN][entry.entry_id] return unload_ok - - -class WiLightDevice(Entity): - """Representation of a WiLight device. - - Contains the common logic for WiLight entities. - """ - - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: - """Initialize the device.""" - # WiLight specific attributes for every component type - self._device_id = api_device.device_id - self._client = api_device.client - self._index = index - self._status: dict[str, Any] = {} - - self._attr_unique_id = f"{self._device_id}_{index}" - self._attr_device_info = DeviceInfo( - name=item_name, - identifiers={(DOMAIN, self._attr_unique_id)}, - model=api_device.model, - manufacturer="WiLight", - sw_version=api_device.swversion, - via_device=(DOMAIN, self._device_id), - ) - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return bool(self._client.is_connected) - - @callback - def handle_event_callback(self, states: dict[str, Any]) -> None: - """Propagate changes through ha.""" - self._status = states - self.async_write_ha_state() - - async def async_update(self) -> None: - """Synchronize state with api_device.""" - await self._client.status(self._index) - - async def async_added_to_hass(self) -> None: - """Register update callback.""" - self._client.register_status_callback(self.handle_event_callback, self._index) - await self._client.status(self._index) diff --git a/homeassistant/components/wilight/config_flow.py b/homeassistant/components/wilight/config_flow.py index babc011fc35..74663d61d8f 100644 --- a/homeassistant/components/wilight/config_flow.py +++ b/homeassistant/components/wilight/config_flow.py @@ -1,5 +1,6 @@ """Config flow to configure WiLight.""" +from typing import Any from urllib.parse import urlparse import pywilight @@ -8,7 +9,7 @@ from homeassistant.components import ssdp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST -from . import DOMAIN +from .const import DOMAIN CONF_SERIAL_NUMBER = "serial_number" CONF_MODEL_NAME = "model_name" @@ -24,11 +25,12 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _title: str + def __init__(self) -> None: """Initialize the WiLight flow.""" self._host = None self._serial_number = None - self._title = None self._model_name = None self._wilight_components: list[str] = [] self._components_text = "" @@ -89,7 +91,9 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {"name": self._title} return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered WiLight.""" if user_input is not None: return self._get_entry() diff --git a/homeassistant/components/wilight/const.py b/homeassistant/components/wilight/const.py new file mode 100644 index 00000000000..29de5093b70 --- /dev/null +++ b/homeassistant/components/wilight/const.py @@ -0,0 +1,3 @@ +"""The WiLight integration.""" + +DOMAIN = "wilight" diff --git a/homeassistant/components/wilight/cover.py b/homeassistant/components/wilight/cover.py index 4ae4692db40..8a5cb45d909 100644 --- a/homeassistant/components/wilight/cover.py +++ b/homeassistant/components/wilight/cover.py @@ -20,7 +20,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, WiLightDevice +from .const import DOMAIN +from .entity import WiLightDevice from .parent_device import WiLightParent diff --git a/homeassistant/components/wilight/entity.py b/homeassistant/components/wilight/entity.py new file mode 100644 index 00000000000..b8edf44b495 --- /dev/null +++ b/homeassistant/components/wilight/entity.py @@ -0,0 +1,59 @@ +"""The WiLight integration.""" + +from typing import Any + +from pywilight.wilight_device import PyWiLightDevice + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class WiLightDevice(Entity): + """Representation of a WiLight device. + + Contains the common logic for WiLight entities. + """ + + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: + """Initialize the device.""" + # WiLight specific attributes for every component type + self._device_id = api_device.device_id + self._client = api_device.client + self._index = index + self._status: dict[str, Any] = {} + + self._attr_unique_id = f"{self._device_id}_{index}" + self._attr_device_info = DeviceInfo( + name=item_name, + identifiers={(DOMAIN, self._attr_unique_id)}, + model=api_device.model, + manufacturer="WiLight", + sw_version=api_device.swversion, + via_device=(DOMAIN, self._device_id), + ) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return bool(self._client.is_connected) + + @callback + def handle_event_callback(self, states: dict[str, Any]) -> None: + """Propagate changes through ha.""" + self._status = states + self.async_write_ha_state() + + async def async_update(self) -> None: + """Synchronize state with api_device.""" + await self._client.status(self._index) + + async def async_added_to_hass(self) -> None: + """Register update callback.""" + self._client.register_status_callback(self.handle_event_callback, self._index) + await self._client.status(self._index) diff --git a/homeassistant/components/wilight/fan.py b/homeassistant/components/wilight/fan.py index 71559658c35..a14198e3b5d 100644 --- a/homeassistant/components/wilight/fan.py +++ b/homeassistant/components/wilight/fan.py @@ -25,7 +25,8 @@ from homeassistant.util.percentage import ( percentage_to_ordered_list_item, ) -from . import DOMAIN, WiLightDevice +from .const import DOMAIN +from .entity import WiLightDevice from .parent_device import WiLightParent ORDERED_NAMED_FAN_SPEEDS = [WL_SPEED_LOW, WL_SPEED_MEDIUM, WL_SPEED_HIGH] @@ -63,7 +64,6 @@ class WiLightFan(WiLightDevice, FanEntity): | FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF ) - _enable_turn_on_off_backwards_compatibility = False def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: """Initialize the device.""" diff --git a/homeassistant/components/wilight/icons.json b/homeassistant/components/wilight/icons.json index 3c5d0112de1..48bcae2a301 100644 --- a/homeassistant/components/wilight/icons.json +++ b/homeassistant/components/wilight/icons.json @@ -10,8 +10,14 @@ } }, "services": { - "set_watering_time": "mdi:timer", - "set_pause_time": "mdi:timer-pause", - "set_trigger": "mdi:gesture-tap-button" + "set_watering_time": { + "service": "mdi:timer" + }, + "set_pause_time": { + "service": "mdi:timer-pause" + }, + "set_trigger": { + "service": "mdi:gesture-tap-button" + } } } diff --git a/homeassistant/components/wilight/light.py b/homeassistant/components/wilight/light.py index 1a51ecd884e..fbe2499798d 100644 --- a/homeassistant/components/wilight/light.py +++ b/homeassistant/components/wilight/light.py @@ -17,7 +17,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, WiLightDevice +from .const import DOMAIN +from .entity import WiLightDevice from .parent_device import WiLightParent diff --git a/homeassistant/components/wilight/manifest.json b/homeassistant/components/wilight/manifest.json index 8da0ffd9241..7f7e16d55fb 100644 --- a/homeassistant/components/wilight/manifest.json +++ b/homeassistant/components/wilight/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/wilight", "iot_class": "local_polling", "loggers": ["pywilight"], - "quality_scale": "silver", "requirements": ["pywilight==0.0.74"], "ssdp": [ { diff --git a/homeassistant/components/wilight/parent_device.py b/homeassistant/components/wilight/parent_device.py index 6e96274f0a4..6e71649d8fc 100644 --- a/homeassistant/components/wilight/parent_device.py +++ b/homeassistant/components/wilight/parent_device.py @@ -78,7 +78,7 @@ class WiLightParent: EVENT_HOMEASSISTANT_STOP, lambda x: client.stop() ) - _LOGGER.info("Connected to WiLight device: %s", api_device.device_id) + _LOGGER.debug("Connected to WiLight device: %s", api_device.device_id) await connect(api_device) diff --git a/homeassistant/components/wilight/switch.py b/homeassistant/components/wilight/switch.py index 94e39492626..f2a1ce8b0c5 100644 --- a/homeassistant/components/wilight/switch.py +++ b/homeassistant/components/wilight/switch.py @@ -14,7 +14,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, WiLightDevice +from .const import DOMAIN +from .entity import WiLightDevice from .parent_device import WiLightParent from .support import wilight_to_hass_trigger, wilight_trigger as wl_trigger diff --git a/homeassistant/components/wirelesstag/__init__.py b/homeassistant/components/wirelesstag/__init__.py index 710255153c2..a32e940073b 100644 --- a/homeassistant/components/wirelesstag/__init__.py +++ b/homeassistant/components/wirelesstag/__init__.py @@ -6,50 +6,23 @@ from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from wirelesstagpy import WirelessTags from wirelesstagpy.exceptions import WirelessTagsException -from wirelesstagpy.sensortag import SensorTag from homeassistant.components import persistent_notification -from homeassistant.const import ( - ATTR_BATTERY_LEVEL, - ATTR_VOLTAGE, - CONF_PASSWORD, - CONF_USERNAME, - PERCENTAGE, - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, - UnitOfElectricPotential, -) +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import dispatcher_send -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType +from .const import DOMAIN, SIGNAL_BINARY_EVENT_UPDATE, SIGNAL_TAG_UPDATE + _LOGGER = logging.getLogger(__name__) - -# Strength of signal in dBm -ATTR_TAG_SIGNAL_STRENGTH = "signal_strength" -# Indicates if tag is out of range or not -ATTR_TAG_OUT_OF_RANGE = "out_of_range" -# Number in percents from max power of tag receiver -ATTR_TAG_POWER_CONSUMPTION = "power_consumption" - - NOTIFICATION_ID = "wirelesstag_notification" NOTIFICATION_TITLE = "Wireless Sensor Tag Setup" -DOMAIN = "wirelesstag" DEFAULT_ENTITY_NAMESPACE = "wirelesstag" -# Template for signal - first parameter is tag_id, -# second, tag manager mac address -SIGNAL_TAG_UPDATE = "wirelesstag.tag_info_updated_{}_{}" - -# Template for signal - tag_id, sensor type and -# tag manager mac address -SIGNAL_BINARY_EVENT_UPDATE = "wirelesstag.binary_event_updated_{}_{}_{}" - CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -129,22 +102,6 @@ class WirelessTagPlatform: self.api.start_monitoring(push_callback) -def async_migrate_unique_id( - hass: HomeAssistant, tag: SensorTag, domain: str, key: str -) -> None: - """Migrate old unique id to new one with use of tag's uuid.""" - registry = er.async_get(hass) - new_unique_id = f"{tag.uuid}_{key}" - - if registry.async_get_entity_id(domain, DOMAIN, new_unique_id): - return - - old_unique_id = f"{tag.tag_id}_{key}" - if entity_id := registry.async_get_entity_id(domain, DOMAIN, old_unique_id): - _LOGGER.debug("Updating unique id for %s %s", key, entity_id) - registry.async_update_entity(entity_id, new_unique_id=new_unique_id) - - def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Wireless Sensor Tag component.""" conf = config[DOMAIN] @@ -169,76 +126,3 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return False return True - - -class WirelessTagBaseSensor(Entity): - """Base class for HA implementation for Wireless Sensor Tag.""" - - def __init__(self, api, tag): - """Initialize a base sensor for Wireless Sensor Tag platform.""" - self._api = api - self._tag = tag - self._uuid = self._tag.uuid - self.tag_id = self._tag.tag_id - self.tag_manager_mac = self._tag.tag_manager_mac - self._name = self._tag.name - self._state = None - - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def principal_value(self): - """Return base value. - - Subclasses need override based on type of sensor. - """ - return 0 - - def updated_state_value(self): - """Return formatted value. - - The default implementation formats principal value. - """ - return self.decorate_value(self.principal_value) - - def decorate_value(self, value): - """Decorate input value to be well presented for end user.""" - return f"{value:.1f}" - - @property - def available(self): - """Return True if entity is available.""" - return self._tag.is_alive - - def update(self): - """Update state.""" - if not self.should_poll: - return - - updated_tags = self._api.load_tags() - if (updated_tag := updated_tags[self._uuid]) is None: - _LOGGER.error('Unable to update tag: "%s"', self.name) - return - - self._tag = updated_tag - self._state = self.updated_state_value() - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - return { - ATTR_BATTERY_LEVEL: int(self._tag.battery_remaining * 100), - ATTR_VOLTAGE: ( - f"{self._tag.battery_volts:.2f}{UnitOfElectricPotential.VOLT}" - ), - ATTR_TAG_SIGNAL_STRENGTH: ( - f"{self._tag.signal_strength}{SIGNAL_STRENGTH_DECIBELS_MILLIWATT}" - ), - ATTR_TAG_OUT_OF_RANGE: not self._tag.is_in_range, - ATTR_TAG_POWER_CONSUMPTION: ( - f"{self._tag.power_consumption:.2f}{PERCENTAGE}" - ), - } diff --git a/homeassistant/components/wirelesstag/binary_sensor.py b/homeassistant/components/wirelesstag/binary_sensor.py index 052f6547dd2..9e8075dd874 100644 --- a/homeassistant/components/wirelesstag/binary_sensor.py +++ b/homeassistant/components/wirelesstag/binary_sensor.py @@ -15,12 +15,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - DOMAIN as WIRELESSTAG_DOMAIN, - SIGNAL_BINARY_EVENT_UPDATE, - WirelessTagBaseSensor, - async_migrate_unique_id, -) +from .const import DOMAIN, SIGNAL_BINARY_EVENT_UPDATE +from .entity import WirelessTagBaseSensor +from .util import async_migrate_unique_id # On means in range, Off means out of range SENSOR_PRESENCE = "presence" @@ -84,7 +81,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the platform for a WirelessTags.""" - platform = hass.data[WIRELESSTAG_DOMAIN] + platform = hass.data[DOMAIN] sensors = [] tags = platform.tags diff --git a/homeassistant/components/wirelesstag/const.py b/homeassistant/components/wirelesstag/const.py new file mode 100644 index 00000000000..c1384606bf1 --- /dev/null +++ b/homeassistant/components/wirelesstag/const.py @@ -0,0 +1,11 @@ +"""Support for Wireless Sensor Tags.""" + +DOMAIN = "wirelesstag" + +# Template for signal - first parameter is tag_id, +# second, tag manager mac address +SIGNAL_TAG_UPDATE = "wirelesstag.tag_info_updated_{}_{}" + +# Template for signal - tag_id, sensor type and +# tag manager mac address +SIGNAL_BINARY_EVENT_UPDATE = "wirelesstag.binary_event_updated_{}_{}_{}" diff --git a/homeassistant/components/wirelesstag/entity.py b/homeassistant/components/wirelesstag/entity.py new file mode 100644 index 00000000000..31f8ee99d0d --- /dev/null +++ b/homeassistant/components/wirelesstag/entity.py @@ -0,0 +1,95 @@ +"""Support for Wireless Sensor Tags.""" + +import logging + +from homeassistant.const import ( + ATTR_BATTERY_LEVEL, + ATTR_VOLTAGE, + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + UnitOfElectricPotential, +) +from homeassistant.helpers.entity import Entity + +_LOGGER = logging.getLogger(__name__) + + +# Strength of signal in dBm +ATTR_TAG_SIGNAL_STRENGTH = "signal_strength" +# Indicates if tag is out of range or not +ATTR_TAG_OUT_OF_RANGE = "out_of_range" +# Number in percents from max power of tag receiver +ATTR_TAG_POWER_CONSUMPTION = "power_consumption" + + +class WirelessTagBaseSensor(Entity): + """Base class for HA implementation for Wireless Sensor Tag.""" + + def __init__(self, api, tag): + """Initialize a base sensor for Wireless Sensor Tag platform.""" + self._api = api + self._tag = tag + self._uuid = self._tag.uuid + self.tag_id = self._tag.tag_id + self.tag_manager_mac = self._tag.tag_manager_mac + self._name = self._tag.name + self._state = None + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def principal_value(self): + """Return base value. + + Subclasses need override based on type of sensor. + """ + return 0 + + def updated_state_value(self): + """Return formatted value. + + The default implementation formats principal value. + """ + return self.decorate_value(self.principal_value) + + def decorate_value(self, value): + """Decorate input value to be well presented for end user.""" + return f"{value:.1f}" + + @property + def available(self): + """Return True if entity is available.""" + return self._tag.is_alive + + def update(self): + """Update state.""" + if not self.should_poll: + return + + updated_tags = self._api.load_tags() + if (updated_tag := updated_tags[self._uuid]) is None: + _LOGGER.error('Unable to update tag: "%s"', self.name) + return + + self._tag = updated_tag + self._state = self.updated_state_value() + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + return { + ATTR_BATTERY_LEVEL: int(self._tag.battery_remaining * 100), + ATTR_VOLTAGE: ( + f"{self._tag.battery_volts:.2f}{UnitOfElectricPotential.VOLT}" + ), + ATTR_TAG_SIGNAL_STRENGTH: ( + f"{self._tag.signal_strength}{SIGNAL_STRENGTH_DECIBELS_MILLIWATT}" + ), + ATTR_TAG_OUT_OF_RANGE: not self._tag.is_in_range, + ATTR_TAG_POWER_CONSUMPTION: ( + f"{self._tag.power_consumption:.2f}{PERCENTAGE}" + ), + } diff --git a/homeassistant/components/wirelesstag/manifest.json b/homeassistant/components/wirelesstag/manifest.json index 9735c833453..1ff9403d3bc 100644 --- a/homeassistant/components/wirelesstag/manifest.json +++ b/homeassistant/components/wirelesstag/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/wirelesstag", "iot_class": "cloud_push", "loggers": ["wirelesstagpy"], + "quality_scale": "legacy", "requirements": ["wirelesstagpy==0.8.1"] } diff --git a/homeassistant/components/wirelesstag/sensor.py b/homeassistant/components/wirelesstag/sensor.py index 87906bdc2ae..7a3cbe5efe2 100644 --- a/homeassistant/components/wirelesstag/sensor.py +++ b/homeassistant/components/wirelesstag/sensor.py @@ -20,12 +20,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - DOMAIN as WIRELESSTAG_DOMAIN, - SIGNAL_TAG_UPDATE, - WirelessTagBaseSensor, - async_migrate_unique_id, -) +from .const import DOMAIN, SIGNAL_TAG_UPDATE +from .entity import WirelessTagBaseSensor +from .util import async_migrate_unique_id _LOGGER = logging.getLogger(__name__) @@ -81,7 +78,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the sensor platform.""" - platform = hass.data[WIRELESSTAG_DOMAIN] + platform = hass.data[DOMAIN] sensors = [] tags = platform.tags for tag in tags.values(): @@ -113,9 +110,7 @@ class WirelessTagSensor(WirelessTagBaseSensor, SensorEntity): # sensor.wirelesstag_bedroom_temperature # and not as sensor.bedroom for temperature and # sensor.bedroom_2 for humidity - self.entity_id = ( - f"sensor.{WIRELESSTAG_DOMAIN}_{self.underscored_name}_{self._sensor_type}" - ) + self.entity_id = f"sensor.{DOMAIN}_{self.underscored_name}_{self._sensor_type}" async def async_added_to_hass(self) -> None: """Register callbacks.""" diff --git a/homeassistant/components/wirelesstag/switch.py b/homeassistant/components/wirelesstag/switch.py index 239461df4ea..cae5d63988c 100644 --- a/homeassistant/components/wirelesstag/switch.py +++ b/homeassistant/components/wirelesstag/switch.py @@ -17,11 +17,9 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ( - DOMAIN as WIRELESSTAG_DOMAIN, - WirelessTagBaseSensor, - async_migrate_unique_id, -) +from .const import DOMAIN +from .entity import WirelessTagBaseSensor +from .util import async_migrate_unique_id SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SwitchEntityDescription( @@ -64,7 +62,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up switches for a Wireless Sensor Tags.""" - platform = hass.data[WIRELESSTAG_DOMAIN] + platform = hass.data[DOMAIN] tags = platform.load_tags() monitored_conditions = config[CONF_MONITORED_CONDITIONS] diff --git a/homeassistant/components/wirelesstag/util.py b/homeassistant/components/wirelesstag/util.py new file mode 100644 index 00000000000..1b5d6551fc4 --- /dev/null +++ b/homeassistant/components/wirelesstag/util.py @@ -0,0 +1,28 @@ +"""Support for Wireless Sensor Tags.""" + +import logging + +from wirelesstagpy.sensortag import SensorTag + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +def async_migrate_unique_id( + hass: HomeAssistant, tag: SensorTag, domain: str, key: str +) -> None: + """Migrate old unique id to new one with use of tag's uuid.""" + registry = er.async_get(hass) + new_unique_id = f"{tag.uuid}_{key}" + + if registry.async_get_entity_id(domain, DOMAIN, new_unique_id): + return + + old_unique_id = f"{tag.tag_id}_{key}" + if entity_id := registry.async_get_entity_id(domain, DOMAIN, old_unique_id): + _LOGGER.debug("Updating unique id for %s %s", key, entity_id) + registry.async_update_entity(entity_id, new_unique_id=new_unique_id) diff --git a/homeassistant/components/withings/__init__.py b/homeassistant/components/withings/__init__.py index 908548084ae..1c196bd4b92 100644 --- a/homeassistant/components/withings/__init__.py +++ b/homeassistant/components/withings/__init__.py @@ -48,6 +48,7 @@ from .coordinator import ( WithingsActivityDataUpdateCoordinator, WithingsBedPresenceDataUpdateCoordinator, WithingsDataUpdateCoordinator, + WithingsDeviceDataUpdateCoordinator, WithingsGoalsDataUpdateCoordinator, WithingsMeasurementDataUpdateCoordinator, WithingsSleepDataUpdateCoordinator, @@ -73,6 +74,7 @@ class WithingsData: goals_coordinator: WithingsGoalsDataUpdateCoordinator activity_coordinator: WithingsActivityDataUpdateCoordinator workout_coordinator: WithingsWorkoutDataUpdateCoordinator + device_coordinator: WithingsDeviceDataUpdateCoordinator coordinators: set[WithingsDataUpdateCoordinator] = field(default_factory=set) def __post_init__(self) -> None: @@ -84,6 +86,7 @@ class WithingsData: self.goals_coordinator, self.activity_coordinator, self.workout_coordinator, + self.device_coordinator, } @@ -122,6 +125,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: WithingsConfigEntry) -> goals_coordinator=WithingsGoalsDataUpdateCoordinator(hass, client), activity_coordinator=WithingsActivityDataUpdateCoordinator(hass, client), workout_coordinator=WithingsWorkoutDataUpdateCoordinator(hass, client), + device_coordinator=WithingsDeviceDataUpdateCoordinator(hass, client), ) for coordinator in withings_data.coordinators: diff --git a/homeassistant/components/withings/config_flow.py b/homeassistant/components/withings/config_flow.py index 5eb4e08595a..d7f07ccc184 100644 --- a/homeassistant/components/withings/config_flow.py +++ b/homeassistant/components/withings/config_flow.py @@ -9,8 +9,8 @@ from typing import Any from aiowithings import AuthScope from homeassistant.components.webhook import async_generate_id -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult -from homeassistant.const import CONF_TOKEN, CONF_WEBHOOK_ID +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_NAME, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.helpers import config_entry_oauth2_flow from .const import DEFAULT_TITLE, DOMAIN @@ -23,8 +23,6 @@ class WithingsFlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -42,9 +40,6 @@ class WithingsFlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -52,14 +47,17 @@ class WithingsFlowHandler( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - return self.async_show_form(step_id="reauth_confirm") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={CONF_NAME: self._get_reauth_entry().title}, + ) return await self.async_step_user() async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow, or update existing entry.""" user_id = str(data[CONF_TOKEN]["userid"]) - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( @@ -67,9 +65,7 @@ class WithingsFlowHandler( data={**data, CONF_WEBHOOK_ID: async_generate_id()}, ) - if self.reauth_entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.reauth_entry, data={**self.reauth_entry.data, **data} - ) - - return self.async_abort(reason="wrong_account") + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=data + ) diff --git a/homeassistant/components/withings/coordinator.py b/homeassistant/components/withings/coordinator.py index 361a20acafd..79419ae23ff 100644 --- a/homeassistant/components/withings/coordinator.py +++ b/homeassistant/components/withings/coordinator.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING from aiowithings import ( Activity, + Device, Goals, MeasurementPosition, MeasurementType, @@ -291,3 +292,17 @@ class WithingsWorkoutDataUpdateCoordinator( self._previous_data = latest_workout self._last_valid_update = latest_workout.end_date return self._previous_data + + +class WithingsDeviceDataUpdateCoordinator( + WithingsDataUpdateCoordinator[dict[str, Device]] +): + """Withings device coordinator.""" + + coordinator_name: str = "device" + _default_update_interval = timedelta(hours=1) + + async def _internal_update_data(self) -> dict[str, Device]: + """Update coordinator data.""" + devices = await self._client.get_devices() + return {device.device_id: device for device in devices} diff --git a/homeassistant/components/withings/entity.py b/homeassistant/components/withings/entity.py index a5cb62b72a2..5c548fdb260 100644 --- a/homeassistant/components/withings/entity.py +++ b/homeassistant/components/withings/entity.py @@ -4,11 +4,16 @@ from __future__ import annotations from typing import Any +from aiowithings import Device + from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import WithingsDataUpdateCoordinator +from .coordinator import ( + WithingsDataUpdateCoordinator, + WithingsDeviceDataUpdateCoordinator, +) class WithingsEntity[_T: WithingsDataUpdateCoordinator[Any]](CoordinatorEntity[_T]): @@ -28,3 +33,35 @@ class WithingsEntity[_T: WithingsDataUpdateCoordinator[Any]](CoordinatorEntity[_ identifiers={(DOMAIN, str(coordinator.config_entry.unique_id))}, manufacturer="Withings", ) + + +class WithingsDeviceEntity(WithingsEntity[WithingsDeviceDataUpdateCoordinator]): + """Base class for withings device entities.""" + + def __init__( + self, + coordinator: WithingsDeviceDataUpdateCoordinator, + device_id: str, + key: str, + ) -> None: + """Initialize the Withings entity.""" + super().__init__(coordinator, key) + self._attr_unique_id = f"{device_id}_{key}" + self.device_id = device_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device_id)}, + manufacturer="Withings", + name=self.device.raw_model, + model=self.device.raw_model, + via_device=(DOMAIN, str(coordinator.config_entry.unique_id)), + ) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and self.device_id in self.coordinator.data + + @property + def device(self) -> Device: + """Return the Withings device.""" + return self.coordinator.data[self.device_id] diff --git a/homeassistant/components/withings/icons.json b/homeassistant/components/withings/icons.json index f6fb5e74136..8123337dc82 100644 --- a/homeassistant/components/withings/icons.json +++ b/homeassistant/components/withings/icons.json @@ -16,6 +16,9 @@ "heart_pulse": { "default": "mdi:heart-pulse" }, + "height": { + "default": "mdi:human-male-height-variant" + }, "hydration": { "default": "mdi:water" }, @@ -136,6 +139,14 @@ }, "workout_duration": { "default": "mdi:timer" + }, + "battery": { + "default": "mdi:battery-off", + "state": { + "low": "mdi:battery-20", + "medium": "mdi:battery-50", + "high": "mdi:battery" + } } } } diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index a7f632325a0..886eb66f5e0 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -8,6 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], - "quality_scale": "platinum", - "requirements": ["aiowithings==3.0.3"] + "requirements": ["aiowithings==3.1.4"] } diff --git a/homeassistant/components/withings/sensor.py b/homeassistant/components/withings/sensor.py index 20fd72845ae..1005b5995a5 100644 --- a/homeassistant/components/withings/sensor.py +++ b/homeassistant/components/withings/sensor.py @@ -9,6 +9,7 @@ from typing import Any from aiowithings import ( Activity, + Device, Goals, MeasurementPosition, MeasurementType, @@ -23,6 +24,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( PERCENTAGE, Platform, @@ -33,8 +35,8 @@ from homeassistant.const import ( UnitOfTime, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -import homeassistant.helpers.entity_registry as er from homeassistant.helpers.typing import StateType from homeassistant.util import dt as dt_util @@ -51,12 +53,13 @@ from .const import ( from .coordinator import ( WithingsActivityDataUpdateCoordinator, WithingsDataUpdateCoordinator, + WithingsDeviceDataUpdateCoordinator, WithingsGoalsDataUpdateCoordinator, WithingsMeasurementDataUpdateCoordinator, WithingsSleepDataUpdateCoordinator, WithingsWorkoutDataUpdateCoordinator, ) -from .entity import WithingsEntity +from .entity import WithingsDeviceEntity, WithingsEntity @dataclass(frozen=True, kw_only=True) @@ -650,6 +653,24 @@ WORKOUT_SENSORS = [ ] +@dataclass(frozen=True, kw_only=True) +class WithingsDeviceSensorEntityDescription(SensorEntityDescription): + """Immutable class for describing withings data.""" + + value_fn: Callable[[Device], StateType] + + +DEVICE_SENSORS = [ + WithingsDeviceSensorEntityDescription( + key="battery", + translation_key="battery", + options=["low", "medium", "high"], + device_class=SensorDeviceClass.ENUM, + value_fn=lambda device: device.battery, + ) +] + + def get_current_goals(goals: Goals) -> set[str]: """Return a list of present goals.""" result = set() @@ -800,9 +821,52 @@ async def async_setup_entry( _async_add_workout_entities ) + device_coordinator = withings_data.device_coordinator + + current_devices: set[str] = set() + + def _async_device_listener() -> None: + """Add device entities.""" + received_devices = set(device_coordinator.data) + new_devices = received_devices - current_devices + old_devices = current_devices - received_devices + if new_devices: + device_registry = dr.async_get(hass) + for device_id in new_devices: + if device := device_registry.async_get_device({(DOMAIN, device_id)}): + if any( + ( + config_entry := hass.config_entries.async_get_entry( + config_entry_id + ) + ) + and config_entry.state == ConfigEntryState.LOADED + for config_entry_id in device.config_entries + ): + continue + async_add_entities( + WithingsDeviceSensor(device_coordinator, description, device_id) + for description in DEVICE_SENSORS + ) + current_devices.add(device_id) + + if old_devices: + device_registry = dr.async_get(hass) + for device_id in old_devices: + if device := device_registry.async_get_device({(DOMAIN, device_id)}): + device_registry.async_update_device( + device.id, remove_config_entry_id=entry.entry_id + ) + current_devices.remove(device_id) + + device_coordinator.async_add_listener(_async_device_listener) + + _async_device_listener() + if not entities: LOGGER.warning( - "No data found for Withings entry %s, sensors will be added when new data is available" + "No data found for Withings entry %s, sensors will be added when new data is available", + entry.title, ) async_add_entities(entities) @@ -923,3 +987,24 @@ class WithingsWorkoutSensor( if not self.coordinator.data: return None return self.entity_description.value_fn(self.coordinator.data) + + +class WithingsDeviceSensor(WithingsDeviceEntity, SensorEntity): + """Implementation of a Withings workout sensor.""" + + entity_description: WithingsDeviceSensorEntityDescription + + def __init__( + self, + coordinator: WithingsDeviceDataUpdateCoordinator, + entity_description: WithingsDeviceSensorEntityDescription, + device_id: str, + ) -> None: + """Initialize sensor.""" + super().__init__(coordinator, device_id, entity_description.key) + self.entity_description = entity_description + + @property + def native_value(self) -> StateType: + """Return the state of the entity.""" + return self.entity_description.value_fn(self.device) diff --git a/homeassistant/components/withings/strings.json b/homeassistant/components/withings/strings.json index fb86b16c3be..775ef5cdaab 100644 --- a/homeassistant/components/withings/strings.json +++ b/homeassistant/components/withings/strings.json @@ -20,7 +20,9 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "wrong_account": "Authenticated account does not match the account to be reauthenticated. Please log in with the correct account." }, "create_entry": { "default": "Successfully authenticated with Withings." @@ -307,6 +309,14 @@ }, "workout_duration": { "name": "Last workout duration" + }, + "battery": { + "name": "[%key:component::sensor::entity_component::battery::name%]", + "state": { + "low": "Low", + "medium": "Medium", + "high": "High" + } } } } diff --git a/homeassistant/components/wiz/__init__.py b/homeassistant/components/wiz/__init__.py index 1bf3188e9e9..0e986aaefa2 100644 --- a/homeassistant/components/wiz/__init__.py +++ b/homeassistant/components/wiz/__init__.py @@ -103,6 +103,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass=hass, logger=_LOGGER, + config_entry=entry, name=entry.title, update_interval=timedelta(seconds=15), update_method=_async_update, diff --git a/homeassistant/components/wiz/light.py b/homeassistant/components/wiz/light.py index a3f36d580d2..9ef4cd57b3d 100644 --- a/homeassistant/components/wiz/light.py +++ b/homeassistant/components/wiz/light.py @@ -10,7 +10,7 @@ from pywizlight.scenes import get_id_from_scene_name from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -21,10 +21,6 @@ from homeassistant.components.light import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, -) from . import WizConfigEntry from .entity import WizToggleEntity @@ -43,10 +39,10 @@ def _async_pilot_builder(**kwargs: Any) -> PilotBuilder: if ATTR_RGBW_COLOR in kwargs: return PilotBuilder(brightness=brightness, rgbw=kwargs[ATTR_RGBW_COLOR]) - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: return PilotBuilder( brightness=brightness, - colortemp=color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]), + colortemp=kwargs[ATTR_COLOR_TEMP_KELVIN], ) if ATTR_EFFECT in kwargs: @@ -93,8 +89,8 @@ class WizBulbEntity(WizToggleEntity, LightEntity): self._attr_effect_list = wiz_data.scenes if bulb_type.bulb_type != BulbClass.DW: kelvin = bulb_type.kelvin_range - self._attr_min_mireds = color_temperature_kelvin_to_mired(kelvin.max) - self._attr_max_mireds = color_temperature_kelvin_to_mired(kelvin.min) + self._attr_max_color_temp_kelvin = kelvin.max + self._attr_min_color_temp_kelvin = kelvin.min if bulb_type.features.effect: self._attr_supported_features = LightEntityFeature.EFFECT self._async_update_attrs() @@ -111,7 +107,7 @@ class WizBulbEntity(WizToggleEntity, LightEntity): color_temp := state.get_colortemp() ): self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = color_temperature_kelvin_to_mired(color_temp) + self._attr_color_temp_kelvin = color_temp elif ( ColorMode.RGBWW in color_modes and (rgbww := state.get_rgbww()) is not None ): diff --git a/homeassistant/components/wiz/manifest.json b/homeassistant/components/wiz/manifest.json index bb5527bc467..7b1ecdcdb6b 100644 --- a/homeassistant/components/wiz/manifest.json +++ b/homeassistant/components/wiz/manifest.json @@ -26,6 +26,5 @@ ], "documentation": "https://www.home-assistant.io/integrations/wiz", "iot_class": "local_push", - "quality_scale": "platinum", "requirements": ["pywizlight==0.5.14"] } diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index 2798e0d46d1..812a0500d1a 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback @@ -30,9 +30,11 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> WLEDOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> WLEDOptionsFlowHandler: """Get the options flow for this handler.""" - return WLEDOptionsFlowHandler(config_entry) + return WLEDOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -117,7 +119,7 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): return await wled.update() -class WLEDOptionsFlowHandler(OptionsFlowWithConfigEntry): +class WLEDOptionsFlowHandler(OptionsFlow): """Handle WLED options.""" async def async_step_init( @@ -133,7 +135,7 @@ class WLEDOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_KEEP_MAIN_LIGHT, - default=self.options.get( + default=self.config_entry.options.get( CONF_KEEP_MAIN_LIGHT, DEFAULT_KEEP_MAIN_LIGHT ), ): bool, diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index cb39fde5e5a..8e2855e9f05 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -49,6 +49,7 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): super().__init__( hass, LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) @@ -133,6 +134,7 @@ class WLEDReleasesDataUpdateCoordinator(DataUpdateCoordinator[Releases]): super().__init__( hass, LOGGER, + config_entry=None, name=DOMAIN, update_interval=RELEASES_SCAN_INTERVAL, ) diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index 71939127356..326008ae1af 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/wled", "integration_type": "device", "iot_class": "local_push", - "quality_scale": "platinum", - "requirements": ["wled==0.20.2"], + "requirements": ["wled==0.21.0"], "zeroconf": ["_wled._tcp.local."] } diff --git a/homeassistant/components/wmspro/__init__.py b/homeassistant/components/wmspro/__init__.py new file mode 100644 index 00000000000..37bf1495a56 --- /dev/null +++ b/homeassistant/components/wmspro/__init__.py @@ -0,0 +1,66 @@ +"""The WMS WebControl pro API integration.""" + +from __future__ import annotations + +import aiohttp +from wmspro.webcontrol import WebControlPro + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import UNDEFINED + +from .const import DOMAIN, MANUFACTURER + +PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE] + +type WebControlProConfigEntry = ConfigEntry[WebControlPro] + + +async def async_setup_entry( + hass: HomeAssistant, entry: WebControlProConfigEntry +) -> bool: + """Set up wmspro from a config entry.""" + host = entry.data[CONF_HOST] + session = async_get_clientsession(hass) + hub = WebControlPro(host, session) + + try: + await hub.ping() + except aiohttp.ClientError as err: + raise ConfigEntryNotReady(f"Error while connecting to {host}") from err + + entry.runtime_data = hub + + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} + if entry.unique_id + else UNDEFINED, + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer=MANUFACTURER, + model="WMS WebControl pro", + configuration_url=f"http://{hub.host}/system", + ) + + try: + await hub.refresh() + for dest in hub.dests.values(): + await dest.refresh() + except aiohttp.ClientError as err: + raise ConfigEntryNotReady(f"Error while refreshing from {host}") from err + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: WebControlProConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/wmspro/config_flow.py b/homeassistant/components/wmspro/config_flow.py new file mode 100644 index 00000000000..2ce58ec9eca --- /dev/null +++ b/homeassistant/components/wmspro/config_flow.py @@ -0,0 +1,111 @@ +"""Config flow for WMS WebControl pro API integration.""" + +from __future__ import annotations + +import ipaddress +import logging +from typing import Any + +import aiohttp +import voluptuous as vol +from wmspro.webcontrol import WebControlPro + +from homeassistant.components import dhcp +from homeassistant.components.dhcp import DhcpServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import format_mac + +from .const import DOMAIN, SUGGESTED_HOST + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +class WebControlProConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for wmspro.""" + + VERSION = 1 + + async def async_step_dhcp( + self, discovery_info: dhcp.DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle the DHCP discovery step.""" + unique_id = format_mac(discovery_info.macaddress) + await self.async_set_unique_id(unique_id) + + entry = self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, unique_id + ) + if entry: + try: # Check if current host is a valid IP address + ipaddress.ip_address(entry.data[CONF_HOST]) + except ValueError: # Do not touch name-based host + return self.async_abort(reason="already_configured") + else: # Update existing host with new IP address + self._abort_if_unique_id_configured( + updates={CONF_HOST: discovery_info.ip} + ) + + for entry in self.hass.config_entries.async_entries(DOMAIN): + if not entry.unique_id and entry.data[CONF_HOST] in ( + discovery_info.hostname, + discovery_info.ip, + ): + self.hass.config_entries.async_update_entry(entry, unique_id=unique_id) + return self.async_abort(reason="already_configured") + + return await self.async_step_user() + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user-based step.""" + errors: dict[str, str] = {} + if user_input is not None: + self._async_abort_entries_match(user_input) + host = user_input[CONF_HOST] + session = async_get_clientsession(self.hass) + hub = WebControlPro(host, session) + try: + pong = await hub.ping() + except aiohttp.ClientError: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if not pong: + errors["base"] = "cannot_connect" + else: + await hub.refresh() + rooms = set(hub.rooms.keys()) + for entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + if ( + entry.runtime_data + and entry.runtime_data.rooms + and set(entry.runtime_data.rooms.keys()) == rooms + ): + return self.async_abort(reason="already_configured") + return self.async_create_entry(title=host, data=user_input) + + if self.source == dhcp.DOMAIN: + discovery_info: DhcpServiceInfo = self.init_data + data_values = {CONF_HOST: discovery_info.ip} + else: + data_values = {CONF_HOST: SUGGESTED_HOST} + + self.context["title_placeholders"] = data_values + data_schema = self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, data_values + ) + + return self.async_show_form( + step_id="user", data_schema=data_schema, errors=errors + ) diff --git a/homeassistant/components/wmspro/const.py b/homeassistant/components/wmspro/const.py new file mode 100644 index 00000000000..d92534d9e46 --- /dev/null +++ b/homeassistant/components/wmspro/const.py @@ -0,0 +1,9 @@ +"""Constants for the WMS WebControl pro API integration.""" + +DOMAIN = "wmspro" +SUGGESTED_HOST = "webcontrol" + +ATTRIBUTION = "Data provided by WMS WebControl pro API" +MANUFACTURER = "WAREMA Renkhoff SE" + +BRIGHTNESS_SCALE = (1, 100) diff --git a/homeassistant/components/wmspro/cover.py b/homeassistant/components/wmspro/cover.py new file mode 100644 index 00000000000..a36b34642b7 --- /dev/null +++ b/homeassistant/components/wmspro/cover.py @@ -0,0 +1,77 @@ +"""Support for covers connected with WMS WebControl pro.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from wmspro.const import ( + WMS_WebControl_pro_API_actionDescription, + WMS_WebControl_pro_API_actionType, +) + +from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import WebControlProConfigEntry +from .entity import WebControlProGenericEntity + +SCAN_INTERVAL = timedelta(seconds=5) +PARALLEL_UPDATES = 1 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WebControlProConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the WMS based covers from a config entry.""" + hub = config_entry.runtime_data + + entities: list[WebControlProGenericEntity] = [] + for dest in hub.dests.values(): + if dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive): + entities.append(WebControlProAwning(config_entry.entry_id, dest)) # noqa: PERF401 + + async_add_entities(entities) + + +class WebControlProAwning(WebControlProGenericEntity, CoverEntity): + """Representation of a WMS based awning.""" + + _attr_device_class = CoverDeviceClass.AWNING + + @property + def current_cover_position(self) -> int | None: + """Return current position of cover.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) + return 100 - action["percentage"] + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the cover to a specific position.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) + await action(percentage=100 - kwargs[ATTR_POSITION]) + + @property + def is_closed(self) -> bool | None: + """Return if the cover is closed.""" + return self.current_cover_position == 0 + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) + await action(percentage=0) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) + await action(percentage=100) + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the device if in motion.""" + action = self._dest.action( + WMS_WebControl_pro_API_actionDescription.ManualCommand, + WMS_WebControl_pro_API_actionType.Stop, + ) + await action() diff --git a/homeassistant/components/wmspro/diagnostics.py b/homeassistant/components/wmspro/diagnostics.py new file mode 100644 index 00000000000..c35cecc5ab5 --- /dev/null +++ b/homeassistant/components/wmspro/diagnostics.py @@ -0,0 +1,16 @@ +"""Diagnostics support for WMS WebControl pro API integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import WebControlProConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: WebControlProConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + return entry.runtime_data.diag() diff --git a/homeassistant/components/wmspro/entity.py b/homeassistant/components/wmspro/entity.py new file mode 100644 index 00000000000..0bbbc69a294 --- /dev/null +++ b/homeassistant/components/wmspro/entity.py @@ -0,0 +1,43 @@ +"""Generic entity for the WMS WebControl pro API integration.""" + +from __future__ import annotations + +from wmspro.destination import Destination + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import ATTRIBUTION, DOMAIN, MANUFACTURER + + +class WebControlProGenericEntity(Entity): + """Foundation of all WMS based entities.""" + + _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True + _attr_name = None + + def __init__(self, config_entry_id: str, dest: Destination) -> None: + """Initialize the entity with destination channel.""" + dest_id_str = str(dest.id) + self._dest = dest + self._attr_unique_id = dest_id_str + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, dest_id_str)}, + manufacturer=MANUFACTURER, + model=dest.animationType.name, + name=dest.name, + serial_number=dest_id_str, + suggested_area=dest.room.name, + via_device=(DOMAIN, config_entry_id), + configuration_url=f"http://{dest.host}/control", + ) + + async def async_update(self) -> None: + """Update the entity.""" + await self._dest.refresh() + + @property + def available(self) -> bool: + """Return if entity is available.""" + return self._dest.available diff --git a/homeassistant/components/wmspro/light.py b/homeassistant/components/wmspro/light.py new file mode 100644 index 00000000000..9242982bcf9 --- /dev/null +++ b/homeassistant/components/wmspro/light.py @@ -0,0 +1,89 @@ +"""Support for lights connected with WMS WebControl pro.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from wmspro.const import WMS_WebControl_pro_API_actionDescription + +from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.color import brightness_to_value, value_to_brightness + +from . import WebControlProConfigEntry +from .const import BRIGHTNESS_SCALE +from .entity import WebControlProGenericEntity + +SCAN_INTERVAL = timedelta(seconds=5) +PARALLEL_UPDATES = 1 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WebControlProConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the WMS based lights from a config entry.""" + hub = config_entry.runtime_data + + entities: list[WebControlProGenericEntity] = [] + for dest in hub.dests.values(): + if dest.action(WMS_WebControl_pro_API_actionDescription.LightDimming): + entities.append(WebControlProDimmer(config_entry.entry_id, dest)) + elif dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch): + entities.append(WebControlProLight(config_entry.entry_id, dest)) + + async_add_entities(entities) + + +class WebControlProLight(WebControlProGenericEntity, LightEntity): + """Representation of a WMS based light.""" + + _attr_color_mode = ColorMode.ONOFF + _attr_supported_color_modes = {ColorMode.ONOFF} + + @property + def is_on(self) -> bool: + """Return true if light is on.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) + return action["onOffState"] + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the light on.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) + await action(onOffState=True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) + await action(onOffState=False) + + +class WebControlProDimmer(WebControlProLight): + """Representation of a WMS-based dimmable light.""" + + _attr_color_mode = ColorMode.BRIGHTNESS + _attr_supported_color_modes = {ColorMode.BRIGHTNESS} + + @property + def brightness(self) -> int: + """Return the brightness of this light between 1..255.""" + action = self._dest.action( + WMS_WebControl_pro_API_actionDescription.LightDimming + ) + return value_to_brightness(BRIGHTNESS_SCALE, action["percentage"]) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the dimmer on.""" + if ATTR_BRIGHTNESS not in kwargs: + await super().async_turn_on(**kwargs) + return + + action = self._dest.action( + WMS_WebControl_pro_API_actionDescription.LightDimming + ) + await action( + percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS]) + ) diff --git a/homeassistant/components/wmspro/manifest.json b/homeassistant/components/wmspro/manifest.json new file mode 100644 index 00000000000..dd65be3e7e7 --- /dev/null +++ b/homeassistant/components/wmspro/manifest.json @@ -0,0 +1,18 @@ +{ + "domain": "wmspro", + "name": "WMS WebControl pro", + "codeowners": ["@mback2k"], + "config_flow": true, + "dhcp": [ + { + "macaddress": "0023D5*" + }, + { + "registered_devices": true + } + ], + "documentation": "https://www.home-assistant.io/integrations/wmspro", + "integration_type": "hub", + "iot_class": "local_polling", + "requirements": ["pywmspro==0.2.1"] +} diff --git a/homeassistant/components/wmspro/scene.py b/homeassistant/components/wmspro/scene.py new file mode 100644 index 00000000000..de18106b7f0 --- /dev/null +++ b/homeassistant/components/wmspro/scene.py @@ -0,0 +1,64 @@ +"""Support for scenes provided by WMS WebControl pro.""" + +from __future__ import annotations + +from typing import Any + +from wmspro.scene import Scene as WMS_Scene + +from homeassistant.components.scene import Scene +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import WebControlProConfigEntry +from .const import ATTRIBUTION, DOMAIN, MANUFACTURER + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WebControlProConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the WMS based scenes from a config entry.""" + hub = config_entry.runtime_data + + async_add_entities( + WebControlProScene(config_entry.entry_id, scene) + for scene in hub.scenes.values() + ) + + +class WebControlProScene(Scene): + """Representation of a WMS based scene.""" + + _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True + + def __init__(self, config_entry_id: str, scene: WMS_Scene) -> None: + """Initialize the entity with the configured scene.""" + super().__init__() + + # Scene information + self._scene = scene + self._attr_name = scene.name + self._attr_unique_id = str(scene.id) + + # Room information + room = scene.room + room_name = room.name + room_id_str = str(room.id) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, room_id_str)}, + manufacturer=MANUFACTURER, + model="Room", + name=room_name, + serial_number=room_id_str, + suggested_area=room_name, + via_device=(DOMAIN, config_entry_id), + configuration_url=f"http://{scene.host}/control", + ) + + async def async_activate(self, **kwargs: Any) -> None: + """Activate scene. Try to get entities into requested state.""" + await self._scene() diff --git a/homeassistant/components/wmspro/strings.json b/homeassistant/components/wmspro/strings.json new file mode 100644 index 00000000000..9b6d129905b --- /dev/null +++ b/homeassistant/components/wmspro/strings.json @@ -0,0 +1,25 @@ +{ + "config": { + "flow_title": "{host}", + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your WMS WebControl pro." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + } +} diff --git a/homeassistant/components/wolflink/__init__.py b/homeassistant/components/wolflink/__init__.py index ad1759ba2cb..49197ed7d26 100644 --- a/homeassistant/components/wolflink/__init__.py +++ b/homeassistant/components/wolflink/__init__.py @@ -11,6 +11,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -30,6 +31,7 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Wolf SmartSet Service from a config entry.""" + username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] device_name = entry.data[DEVICE_NAME] @@ -98,6 +100,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=timedelta(seconds=60), @@ -125,6 +128,32 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old entry.""" + # convert unique_id to string + if entry.version == 1 and entry.minor_version == 1: + if isinstance(entry.unique_id, int): + hass.config_entries.async_update_entry( + entry, unique_id=str(entry.unique_id) + ) + device_registry = dr.async_get(hass) + for device in dr.async_entries_for_config_entry( + device_registry, entry.entry_id + ): + new_identifiers = set() + for identifier in device.identifiers: + if identifier[0] == DOMAIN: + new_identifiers.add((DOMAIN, str(identifier[1]))) + else: + new_identifiers.add(identifier) + device_registry.async_update_device( + device.id, new_identifiers=new_identifiers + ) + hass.config_entries.async_update_entry(entry, minor_version=2) + + return True + + async def fetch_parameters(client: WolfClient, gateway_id: int, device_id: int): """Fetch all available parameters with usage of WolfClient. diff --git a/homeassistant/components/wolflink/config_flow.py b/homeassistant/components/wolflink/config_flow.py index a2678580a23..54c6db4cb07 100644 --- a/homeassistant/components/wolflink/config_flow.py +++ b/homeassistant/components/wolflink/config_flow.py @@ -1,10 +1,10 @@ """Config flow for Wolf SmartSet Service integration.""" import logging -from typing import Any from httpcore import ConnectError import voluptuous as vol +from wolf_comm.models import Device from wolf_comm.token_auth import InvalidAuth from wolf_comm.wolf_client import WolfClient @@ -24,15 +24,17 @@ class WolfLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Wolf SmartSet Service.""" VERSION = 1 + MINOR_VERSION = 2 + + fetched_systems: list[Device] def __init__(self) -> None: """Initialize with empty username and password.""" - self.username = None - self.password = None - self.fetched_systems = None + self.username: str | None = None + self.password: str | None = None async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle the initial step to get connection parameters.""" errors = {} @@ -57,16 +59,18 @@ class WolfLinkConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=USER_SCHEMA, errors=errors ) - async def async_step_device(self, user_input=None): + async def async_step_device( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Allow user to select device from devices connected to specified account.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: device_name = user_input[DEVICE_NAME] system = [ device for device in self.fetched_systems if device.name == device_name ] device_id = system[0].id - await self.async_set_unique_id(device_id) + await self.async_set_unique_id(str(device_id)) self._abort_if_unique_id_configured() return self.async_create_entry( title=user_input[DEVICE_NAME], diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index 6a98dcd6ca4..4bfc0e6dd83 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.9"] + "requirements": ["wolf-comm==0.0.15"] } diff --git a/homeassistant/components/wolflink/sensor.py b/homeassistant/components/wolflink/sensor.py index 3179a9ff6bd..1f6e6c42464 100644 --- a/homeassistant/components/wolflink/sensor.py +++ b/homeassistant/components/wolflink/sensor.py @@ -63,7 +63,7 @@ class WolfLinkSensor(CoordinatorEntity, SensorEntity): self._attr_unique_id = f"{device_id}:{wolf_object.parameter_id}" self._state = None self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device_id)}, + identifiers={(DOMAIN, str(device_id))}, configuration_url="https://www.wolf-smartset.com/", manufacturer=MANUFACTURER, ) diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index 33c2e249024..f4a2541a1d7 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -90,7 +90,7 @@ def _get_obj_holidays( obj_holidays: HolidayBase = country_holidays( country, subdiv=province, - years=year, + years=[year, year + 1], language=language, categories=set_categories, ) @@ -129,6 +129,7 @@ async def async_setup_entry( ) calc_add_holidays: list[str] = validate_dates(add_holidays) calc_remove_holidays: list[str] = validate_dates(remove_holidays) + next_year = dt_util.now().year + 1 # Add custom holidays try: @@ -152,26 +153,28 @@ async def async_setup_entry( LOGGER.debug("Removed %s by name '%s'", holiday, remove_holiday) except KeyError as unmatched: LOGGER.warning("No holiday found matching %s", unmatched) - if dt_util.parse_date(remove_holiday): - async_create_issue( - hass, - DOMAIN, - f"bad_date_holiday-{entry.entry_id}-{slugify(remove_holiday)}", - is_fixable=True, - is_persistent=False, - severity=IssueSeverity.WARNING, - translation_key="bad_date_holiday", - translation_placeholders={ - CONF_COUNTRY: country if country else "-", - "title": entry.title, - CONF_REMOVE_HOLIDAYS: remove_holiday, - }, - data={ - "entry_id": entry.entry_id, - "country": country, - "named_holiday": remove_holiday, - }, - ) + if _date := dt_util.parse_date(remove_holiday): + if _date.year <= next_year: + # Only check and raise issues for current and next year + async_create_issue( + hass, + DOMAIN, + f"bad_date_holiday-{entry.entry_id}-{slugify(remove_holiday)}", + is_fixable=True, + is_persistent=False, + severity=IssueSeverity.WARNING, + translation_key="bad_date_holiday", + translation_placeholders={ + CONF_COUNTRY: country if country else "-", + "title": entry.title, + CONF_REMOVE_HOLIDAYS: remove_holiday, + }, + data={ + "entry_id": entry.entry_id, + "country": country, + "named_holiday": remove_holiday, + }, + ) else: async_create_issue( hass, diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index ebbc8fb0b99..2036d685d31 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE, CONF_NAME from homeassistant.core import callback @@ -67,12 +67,14 @@ def add_province_and_language_to_schema( _country = country_holidays(country=country) if country_default_language := (_country.default_language): selectable_languages = _country.supported_languages - new_selectable_languages = [lang[:2] for lang in selectable_languages] + new_selectable_languages = list(selectable_languages) language_schema = { vol.Optional( CONF_LANGUAGE, default=country_default_language ): LanguageSelector( - LanguageSelectorConfig(languages=new_selectable_languages) + LanguageSelectorConfig( + languages=new_selectable_languages, native_name=True + ) ) } @@ -219,7 +221,7 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> WorkdayOptionsFlowHandler: """Get the options flow for this handler.""" - return WorkdayOptionsFlowHandler(config_entry) + return WorkdayOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -305,12 +307,12 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, description_placeholders={ "name": self.data[CONF_NAME], - "country": self.data.get(CONF_COUNTRY), + "country": self.data.get(CONF_COUNTRY, "-"), }, ) -class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): +class WorkdayOptionsFlowHandler(OptionsFlow): """Handle Workday options.""" async def async_step_init( @@ -320,7 +322,7 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): errors: dict[str, str] = {} if user_input is not None: - combined_input: dict[str, Any] = {**self.options, **user_input} + combined_input: dict[str, Any] = {**self.config_entry.options, **user_input} if CONF_PROVINCE not in user_input: # Province not present, delete old value (if present) too combined_input.pop(CONF_PROVINCE, None) @@ -340,7 +342,7 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): else: LOGGER.debug("abort_check in options with %s", combined_input) abort_match = { - CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), + CONF_COUNTRY: self.config_entry.options.get(CONF_COUNTRY), CONF_EXCLUDES: combined_input[CONF_EXCLUDES], CONF_OFFSET: combined_input[CONF_OFFSET], CONF_WORKDAYS: combined_input[CONF_WORKDAYS], @@ -357,23 +359,22 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): else: return self.async_create_entry(data=combined_input) + options = self.config_entry.options schema: vol.Schema = await self.hass.async_add_executor_job( add_province_and_language_to_schema, DATA_SCHEMA_OPT, - self.options.get(CONF_COUNTRY), + options.get(CONF_COUNTRY), ) - new_schema = self.add_suggested_values_to_schema( - schema, user_input or self.options - ) + new_schema = self.add_suggested_values_to_schema(schema, user_input or options) LOGGER.debug("Errors have occurred in options %s", errors) return self.async_show_form( step_id="init", data_schema=new_schema, errors=errors, description_placeholders={ - "name": self.options[CONF_NAME], - "country": self.options.get(CONF_COUNTRY), + "name": options[CONF_NAME], + "country": options.get(CONF_COUNTRY, "-"), }, ) diff --git a/homeassistant/components/stookalert/diagnostics.py b/homeassistant/components/workday/diagnostics.py similarity index 62% rename from homeassistant/components/stookalert/diagnostics.py rename to homeassistant/components/workday/diagnostics.py index c15e808ae19..84e5073ca5b 100644 --- a/homeassistant/components/stookalert/diagnostics.py +++ b/homeassistant/components/workday/diagnostics.py @@ -1,20 +1,18 @@ -"""Diagnostics support for Stookalert.""" +"""Diagnostics support for Workday.""" from __future__ import annotations from typing import Any -import stookalert - from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN - async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - client: stookalert.stookalert = hass.data[DOMAIN][entry.entry_id] - return {"state": client.state} + + return { + "config_entry": entry, + } diff --git a/homeassistant/components/workday/icons.json b/homeassistant/components/workday/icons.json index 10d3c93a288..ec5c64dce97 100644 --- a/homeassistant/components/workday/icons.json +++ b/homeassistant/components/workday/icons.json @@ -1,5 +1,7 @@ { "services": { - "check_date": "mdi:calendar-check" + "check_date": { + "service": "mdi:calendar-check" + } } } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index fafa870d00a..de9cbe694d8 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.55"] + "requirements": ["holidays==0.63"] } diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index f3b966e28ea..87fa294dbba 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -14,9 +14,9 @@ "options": { "description": "Set additional options for {name} configured for country {country}", "data": { - "excludes": "Excludes", + "excludes": "Days to exclude", "days_offset": "Offset", - "workdays": "Workdays", + "workdays": "Days to include", "add_holidays": "Add holidays", "remove_holidays": "Remove Holidays", "province": "Subdivision of country", @@ -24,9 +24,9 @@ "category": "Additional category as holiday" }, "data_description": { - "excludes": "List of workdays to exclude, notice the keyword `holiday` and read the documentation on how to use it correctly", + "excludes": "Select which weekdays to exclude as workdays.\nThe key `holidays` adds those for the configured country, customizable by all the settings below. Read the documentation on how to use them correctly.", "days_offset": "Days offset from current day", - "workdays": "List of working days", + "workdays": "Select which weekdays to include as possible workdays.", "add_holidays": "Add custom holidays as YYYY-MM-DD or as range using `,` as separator", "remove_holidays": "Remove holidays as YYYY-MM-DD, as range using `,` as separator or by using partial of name", "province": "State, territory, province or region of country", @@ -86,18 +86,19 @@ "options": { "armed_forces": "Armed forces", "bank": "Bank", + "catholic": "Catholic", + "chinese": "Chinese", + "christian": "Christian", "government": "Government", "half_day": "Half day", + "hebrew": "Hebrew", + "hindu": "Hindu", + "islamic": "Islamic", "optional": "Optional", "public": "Public", "school": "School", "unofficial": "Unofficial", - "workday": "Workday", - "chinese": "Chinese", - "christian": "Christian", - "hebrew": "Hebrew", - "hindu": "Hindu", - "islamic": "Islamic" + "workday": "Workday" } }, "days": { diff --git a/homeassistant/components/worldclock/config_flow.py b/homeassistant/components/worldclock/config_flow.py index a9598c049aa..eebf0d59dcb 100644 --- a/homeassistant/components/worldclock/config_flow.py +++ b/homeassistant/components/worldclock/config_flow.py @@ -28,11 +28,11 @@ TIME_STR_OPTIONS = [ SelectOptionDict( value=DEFAULT_TIME_STR_FORMAT, label=f"14:05 ({DEFAULT_TIME_STR_FORMAT})" ), - SelectOptionDict(value="%I:%M %p", label="11:05 am (%I:%M %p)"), + SelectOptionDict(value="%I:%M %p", label="11:05 AM (%I:%M %p)"), SelectOptionDict(value="%Y-%m-%d %H:%M", label="2024-01-01 14:05 (%Y-%m-%d %H:%M)"), SelectOptionDict( value="%a, %b %d, %Y %I:%M %p", - label="Monday, Jan 01, 2024 11:05 am (%a, %b %d, %Y %I:%M %p)", + label="Mon, Jan 01, 2024 11:05 AM (%a, %b %d, %Y %I:%M %p)", ), ] diff --git a/homeassistant/components/worldtidesinfo/manifest.json b/homeassistant/components/worldtidesinfo/manifest.json index 962e63617f4..c873f2f08f3 100644 --- a/homeassistant/components/worldtidesinfo/manifest.json +++ b/homeassistant/components/worldtidesinfo/manifest.json @@ -3,5 +3,6 @@ "name": "World Tides", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/worldtidesinfo", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/worxlandroid/manifest.json b/homeassistant/components/worxlandroid/manifest.json index a74228295c8..7a65b3b91b6 100644 --- a/homeassistant/components/worxlandroid/manifest.json +++ b/homeassistant/components/worxlandroid/manifest.json @@ -3,5 +3,6 @@ "name": "Worx Landroid", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/worxlandroid", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ws66i/__init__.py b/homeassistant/components/ws66i/__init__.py index 1993f38e0ab..83ad7bbf070 100644 --- a/homeassistant/components/ws66i/__init__.py +++ b/homeassistant/components/ws66i/__init__.py @@ -52,7 +52,7 @@ def _find_zones(hass: HomeAssistant, ws66i: WS66i) -> list[int]: zone_id = (amp_num * 10) + zone_num zone_list.append(zone_id) - _LOGGER.info("Detected %d amp(s)", amp_num - 1) + _LOGGER.debug("Detected %d amp(s)", amp_num - 1) return zone_list diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index 330e9963f95..120b7738d2e 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -49,7 +49,7 @@ FIRST_ZONE = 11 @callback -def _sources_from_config(data): +def _sources_from_config(data: dict[str, str]) -> dict[str, str]: sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } @@ -130,11 +130,13 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> Ws66iOptionsFlowHandler: """Define the config flow to handle options.""" - return Ws66iOptionsFlowHandler(config_entry) + return Ws66iOptionsFlowHandler() @callback -def _key_for_source(index, source, previous_sources): +def _key_for_source( + index: int, source: str, previous_sources: dict[str, str] +) -> vol.Required: return vol.Required( source, description={"suggested_value": previous_sources[str(index)]} ) @@ -143,11 +145,9 @@ def _key_for_source(index, source, previous_sources): class Ws66iOptionsFlowHandler(OptionsFlow): """Handle a WS66i options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry( diff --git a/homeassistant/components/ws66i/manifest.json b/homeassistant/components/ws66i/manifest.json index d259823d5af..c465a9f9f37 100644 --- a/homeassistant/components/ws66i/manifest.json +++ b/homeassistant/components/ws66i/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ws66i", "iot_class": "local_polling", - "quality_scale": "silver", "requirements": ["pyws66i==1.1"] } diff --git a/homeassistant/components/wsdot/manifest.json b/homeassistant/components/wsdot/manifest.json index 4444cfbac4a..9b7746eea74 100644 --- a/homeassistant/components/wsdot/manifest.json +++ b/homeassistant/components/wsdot/manifest.json @@ -3,5 +3,6 @@ "name": "Washington State Department of Transportation (WSDOT)", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/wsdot", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/wyoming/__init__.py b/homeassistant/components/wyoming/__init__.py index 00d587e2bb4..d639933ece6 100644 --- a/homeassistant/components/wyoming/__init__.py +++ b/homeassistant/components/wyoming/__init__.py @@ -14,11 +14,11 @@ from .const import ATTR_SPEAKER, DOMAIN from .data import WyomingService from .devices import SatelliteDevice from .models import DomainDataItem -from .satellite import WyomingSatellite _LOGGER = logging.getLogger(__name__) SATELLITE_PLATFORMS = [ + Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, Platform.SELECT, Platform.SWITCH, @@ -47,51 +47,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(update_listener)) if (satellite_info := service.info.satellite) is not None: - # Create satellite device, etc. - item.satellite = _make_satellite(hass, entry, service) + # Create satellite device + dev_reg = dr.async_get(hass) - # Set up satellite sensors, switches, etc. - await hass.config_entries.async_forward_entry_setups(entry, SATELLITE_PLATFORMS) - - # Start satellite communication - entry.async_create_background_task( - hass, - item.satellite.run(), - f"Satellite {satellite_info.name}", + # Use config entry id since only one satellite per entry is supported + satellite_id = entry.entry_id + device = dev_reg.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, satellite_id)}, + name=satellite_info.name, + suggested_area=satellite_info.area, ) - entry.async_on_unload(item.satellite.stop) + item.device = SatelliteDevice( + satellite_id=satellite_id, + device_id=device.id, + ) + + # Set up satellite entity, sensors, switches, etc. + await hass.config_entries.async_forward_entry_setups(entry, SATELLITE_PLATFORMS) return True -def _make_satellite( - hass: HomeAssistant, config_entry: ConfigEntry, service: WyomingService -) -> WyomingSatellite: - """Create Wyoming satellite/device from config entry and Wyoming service.""" - satellite_info = service.info.satellite - assert satellite_info is not None - - dev_reg = dr.async_get(hass) - - # Use config entry id since only one satellite per entry is supported - satellite_id = config_entry.entry_id - - device = dev_reg.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, satellite_id)}, - name=satellite_info.name, - suggested_area=satellite_info.area, - ) - - satellite_device = SatelliteDevice( - satellite_id=satellite_id, - device_id=device.id, - ) - - return WyomingSatellite(hass, config_entry, service, satellite_device) - - async def update_listener(hass: HomeAssistant, entry: ConfigEntry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) @@ -102,7 +80,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: item: DomainDataItem = hass.data[DOMAIN][entry.entry_id] platforms = list(item.service.platforms) - if item.satellite is not None: + if item.device is not None: platforms += SATELLITE_PLATFORMS unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms) diff --git a/homeassistant/components/wyoming/satellite.py b/homeassistant/components/wyoming/assist_satellite.py similarity index 82% rename from homeassistant/components/wyoming/satellite.py rename to homeassistant/components/wyoming/assist_satellite.py index 781f0706c68..615084bcbf3 100644 --- a/homeassistant/components/wyoming/satellite.py +++ b/homeassistant/components/wyoming/assist_satellite.py @@ -1,12 +1,12 @@ -"""Support for Wyoming satellite services.""" +"""Assist satellite entity for Wyoming integration.""" + +from __future__ import annotations import asyncio from collections.abc import AsyncGenerator import io import logging -import time -from typing import Final -from uuid import uuid4 +from typing import Any, Final import wave from wyoming.asr import Transcribe, Transcript @@ -18,20 +18,28 @@ from wyoming.info import Describe, Info from wyoming.ping import Ping, Pong from wyoming.pipeline import PipelineStage, RunPipeline from wyoming.satellite import PauseSatellite, RunSatellite +from wyoming.snd import Played from wyoming.timer import TimerCancelled, TimerFinished, TimerStarted, TimerUpdated from wyoming.tts import Synthesize, SynthesizeVoice from wyoming.vad import VoiceStarted, VoiceStopped from wyoming.wake import Detect, Detection -from homeassistant.components import assist_pipeline, intent, stt, tts -from homeassistant.components.assist_pipeline import select as pipeline_select -from homeassistant.components.assist_pipeline.vad import VadSensitivity +from homeassistant.components import assist_pipeline, intent, tts +from homeassistant.components.assist_pipeline import PipelineEvent +from homeassistant.components.assist_satellite import ( + AssistSatelliteConfiguration, + AssistSatelliteEntity, + AssistSatelliteEntityDescription, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .data import WyomingService from .devices import SatelliteDevice +from .entity import WyomingSatelliteEntity +from .models import DomainDataItem _LOGGER = logging.getLogger(__name__) @@ -41,7 +49,6 @@ _RESTART_SECONDS: Final = 3 _PING_TIMEOUT: Final = 5 _PING_SEND_DELAY: Final = 2 _PIPELINE_FINISH_TIMEOUT: Final = 1 -_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes # Wyoming stage -> Assist stage _STAGES: dict[PipelineStage, assist_pipeline.PipelineStage] = { @@ -52,21 +59,46 @@ _STAGES: dict[PipelineStage, assist_pipeline.PipelineStage] = { } -class WyomingSatellite: - """Remove voice satellite running the Wyoming protocol.""" +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Wyoming Assist satellite entity.""" + domain_data: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] + assert domain_data.device is not None + + async_add_entities( + [ + WyomingAssistSatellite( + hass, domain_data.service, domain_data.device, config_entry + ) + ] + ) + + +class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): + """Assist satellite for Wyoming devices.""" + + entity_description = AssistSatelliteEntityDescription(key="assist_satellite") + _attr_translation_key = "assist_satellite" + _attr_name = None def __init__( self, hass: HomeAssistant, - config_entry: ConfigEntry, service: WyomingService, device: SatelliteDevice, + config_entry: ConfigEntry, ) -> None: - """Initialize satellite.""" - self.hass = hass - self.config_entry = config_entry + """Initialize an Assist satellite.""" + WyomingSatelliteEntity.__init__(self, device) + AssistSatelliteEntity.__init__(self) + self.service = service self.device = device + self.config_entry = config_entry + self.is_running = True self._client: AsyncTcpClient | None = None @@ -84,6 +116,160 @@ class WyomingSatellite: self.device.set_pipeline_listener(self._pipeline_changed) self.device.set_audio_settings_listener(self._audio_settings_changed) + @property + def pipeline_entity_id(self) -> str | None: + """Return the entity ID of the pipeline to use for the next conversation.""" + return self.device.get_pipeline_entity_id(self.hass) + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Return the entity ID of the VAD sensitivity to use for the next conversation.""" + return self.device.get_vad_sensitivity_entity_id(self.hass) + + @property + def tts_options(self) -> dict[str, Any] | None: + """Options passed for text-to-speech.""" + return { + tts.ATTR_PREFERRED_FORMAT: "wav", + tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.start_satellite() + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + self.stop_satellite() + + @callback + def async_get_configuration( + self, + ) -> AssistSatelliteConfiguration: + """Get the current satellite configuration.""" + raise NotImplementedError + + async def async_set_configuration( + self, config: AssistSatelliteConfiguration + ) -> None: + """Set the current satellite configuration.""" + raise NotImplementedError + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Set state based on pipeline stage.""" + assert self._client is not None + + if event.type == assist_pipeline.PipelineEventType.RUN_END: + # Pipeline run is complete + self._is_pipeline_running = False + self._pipeline_ended_event.set() + self.device.set_is_active(False) + elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START: + self.hass.add_job(self._client.write_event(Detect().event())) + elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_END: + # Wake word detection + # Inform client of wake word detection + if event.data and (wake_word_output := event.data.get("wake_word_output")): + detection = Detection( + name=wake_word_output["wake_word_id"], + timestamp=wake_word_output.get("timestamp"), + ) + self.hass.add_job(self._client.write_event(detection.event())) + elif event.type == assist_pipeline.PipelineEventType.STT_START: + # Speech-to-text + self.device.set_is_active(True) + + if event.data: + self.hass.add_job( + self._client.write_event( + Transcribe(language=event.data["metadata"]["language"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_VAD_START: + # User started speaking + if event.data: + self.hass.add_job( + self._client.write_event( + VoiceStarted(timestamp=event.data["timestamp"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_VAD_END: + # User stopped speaking + if event.data: + self.hass.add_job( + self._client.write_event( + VoiceStopped(timestamp=event.data["timestamp"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_END: + # Speech-to-text transcript + if event.data: + # Inform client of transript + stt_text = event.data["stt_output"]["text"] + self.hass.add_job( + self._client.write_event(Transcript(text=stt_text).event()) + ) + elif event.type == assist_pipeline.PipelineEventType.TTS_START: + # Text-to-speech text + if event.data: + # Inform client of text + self.hass.add_job( + self._client.write_event( + Synthesize( + text=event.data["tts_input"], + voice=SynthesizeVoice( + name=event.data.get("voice"), + language=event.data.get("language"), + ), + ).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.TTS_END: + # TTS stream + if event.data and (tts_output := event.data["tts_output"]): + media_id = tts_output["media_id"] + self.hass.add_job(self._stream_tts(media_id)) + elif event.type == assist_pipeline.PipelineEventType.ERROR: + # Pipeline error + if event.data: + self.hass.add_job( + self._client.write_event( + Error( + text=event.data["message"], code=event.data["code"] + ).event() + ) + ) + + # ------------------------------------------------------------------------- + + def start_satellite(self) -> None: + """Start satellite task.""" + self.is_running = True + + self.config_entry.async_create_background_task( + self.hass, self.run(), "wyoming satellite run" + ) + + def stop_satellite(self) -> None: + """Signal satellite task to stop running.""" + # Stop existing pipeline + self._audio_queue.put_nowait(None) + + # Tell satellite to stop running + self._send_pause() + + # Stop task loop + self.is_running = False + + # Unblock waiting for unmuted + self._muted_changed_event.set() + + # ------------------------------------------------------------------------- + async def run(self) -> None: """Run and maintain a connection to satellite.""" _LOGGER.debug("Running satellite task") @@ -110,6 +296,9 @@ class WyomingSatellite: except Exception as err: # noqa: BLE001 _LOGGER.debug("%s: %s", err.__class__.__name__, str(err)) + # Stop any existing pipeline + self._audio_queue.put_nowait(None) + # Ensure sensor is off (before restart) self.device.set_is_active(False) @@ -123,17 +312,6 @@ class WyomingSatellite: await self.on_stopped() - def stop(self) -> None: - """Signal satellite task to stop running.""" - # Tell satellite to stop running - self._send_pause() - - # Stop task loop - self.is_running = False - - # Unblock waiting for unmuted - self._muted_changed_event.set() - async def on_restart(self) -> None: """Block until pipeline loop will be restarted.""" _LOGGER.warning( @@ -151,7 +329,7 @@ class WyomingSatellite: await asyncio.sleep(_RECONNECT_SECONDS) async def on_muted(self) -> None: - """Block until device may be unmated again.""" + """Block until device may be unmuted again.""" await self._muted_changed_event.wait() async def on_stopped(self) -> None: @@ -252,6 +430,7 @@ class WyomingSatellite: done, pending = await asyncio.wait( pending, return_when=asyncio.FIRST_COMPLETED ) + if pipeline_ended_task in done: # Pipeline run end event was received _LOGGER.debug("Pipeline finished") @@ -302,7 +481,7 @@ class WyomingSatellite: elif AudioStop.is_type(client_event.type) and self._is_pipeline_running: # Stop pipeline _LOGGER.debug("Client requested pipeline to stop") - self._audio_queue.put_nowait(b"") + self._audio_queue.put_nowait(None) elif Info.is_type(client_event.type): client_info = Info.from_event(client_event) _LOGGER.debug("Updated client info: %s", client_info) @@ -329,6 +508,9 @@ class WyomingSatellite: break _LOGGER.debug("Client detected wake word: %s", wake_word_phrase) + elif Played.is_type(client_event.type): + # TTS response has finished playing on satellite + self.tts_response_finished() else: _LOGGER.debug("Unexpected event from satellite: %s", client_event) @@ -353,72 +535,20 @@ class WyomingSatellite: if end_stage is None: raise ValueError(f"Invalid end stage: {end_stage}") - pipeline_id = pipeline_select.get_chosen_pipeline( - self.hass, - DOMAIN, - self.device.satellite_id, - ) - pipeline = assist_pipeline.async_get_pipeline(self.hass, pipeline_id) - assert pipeline is not None - # We will push audio in through a queue self._audio_queue = asyncio.Queue() - stt_stream = self._stt_stream() - - # Start pipeline running - _LOGGER.debug( - "Starting pipeline %s from %s to %s", - pipeline.name, - start_stage, - end_stage, - ) - - # Reset conversation id, if necessary - if (self._conversation_id_time is None) or ( - (time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC - ): - self._conversation_id = None - - if self._conversation_id is None: - self._conversation_id = str(uuid4()) - - # Update timeout - self._conversation_id_time = time.monotonic() self._is_pipeline_running = True self._pipeline_ended_event.clear() self.config_entry.async_create_background_task( self.hass, - assist_pipeline.async_pipeline_from_audio_stream( - self.hass, - context=Context(), - event_callback=self._event_callback, - stt_metadata=stt.SpeechMetadata( - language=pipeline.language, - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=stt_stream, + self.async_accept_pipeline_from_satellite( + audio_stream=self._stt_stream(), start_stage=start_stage, end_stage=end_stage, - tts_audio_output="wav", - pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - noise_suppression_level=self.device.noise_suppression_level, - auto_gain_dbfs=self.device.auto_gain, - volume_multiplier=self.device.volume_multiplier, - silence_seconds=VadSensitivity.to_seconds( - self.device.vad_sensitivity - ), - ), - device_id=self.device.device_id, wake_word_phrase=wake_word_phrase, - conversation_id=self._conversation_id, ), - name="wyoming satellite pipeline", + "wyoming satellite pipeline", ) async def _send_delayed_ping(self) -> None: @@ -431,91 +561,6 @@ class WyomingSatellite: except ConnectionError: pass # handled with timeout - def _event_callback(self, event: assist_pipeline.PipelineEvent) -> None: - """Translate pipeline events into Wyoming events.""" - assert self._client is not None - - if event.type == assist_pipeline.PipelineEventType.RUN_END: - # Pipeline run is complete - self._is_pipeline_running = False - self._pipeline_ended_event.set() - self.device.set_is_active(False) - elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START: - self.hass.add_job(self._client.write_event(Detect().event())) - elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_END: - # Wake word detection - # Inform client of wake word detection - if event.data and (wake_word_output := event.data.get("wake_word_output")): - detection = Detection( - name=wake_word_output["wake_word_id"], - timestamp=wake_word_output.get("timestamp"), - ) - self.hass.add_job(self._client.write_event(detection.event())) - elif event.type == assist_pipeline.PipelineEventType.STT_START: - # Speech-to-text - self.device.set_is_active(True) - - if event.data: - self.hass.add_job( - self._client.write_event( - Transcribe(language=event.data["metadata"]["language"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_VAD_START: - # User started speaking - if event.data: - self.hass.add_job( - self._client.write_event( - VoiceStarted(timestamp=event.data["timestamp"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_VAD_END: - # User stopped speaking - if event.data: - self.hass.add_job( - self._client.write_event( - VoiceStopped(timestamp=event.data["timestamp"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_END: - # Speech-to-text transcript - if event.data: - # Inform client of transript - stt_text = event.data["stt_output"]["text"] - self.hass.add_job( - self._client.write_event(Transcript(text=stt_text).event()) - ) - elif event.type == assist_pipeline.PipelineEventType.TTS_START: - # Text-to-speech text - if event.data: - # Inform client of text - self.hass.add_job( - self._client.write_event( - Synthesize( - text=event.data["tts_input"], - voice=SynthesizeVoice( - name=event.data.get("voice"), - language=event.data.get("language"), - ), - ).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.TTS_END: - # TTS stream - if event.data and (tts_output := event.data["tts_output"]): - media_id = tts_output["media_id"] - self.hass.add_job(self._stream_tts(media_id)) - elif event.type == assist_pipeline.PipelineEventType.ERROR: - # Pipeline error - if event.data: - self.hass.add_job( - self._client.write_event( - Error( - text=event.data["message"], code=event.data["code"] - ).event() - ) - ) - async def _connect(self) -> None: """Connect to satellite over TCP.""" await self._disconnect() @@ -576,16 +621,16 @@ class WyomingSatellite: async def _stt_stream(self) -> AsyncGenerator[bytes]: """Yield audio chunks from a queue.""" - try: - is_first_chunk = True - while chunk := await self._audio_queue.get(): - if is_first_chunk: - is_first_chunk = False - _LOGGER.debug("Receiving audio from satellite") + is_first_chunk = True + while chunk := await self._audio_queue.get(): + if chunk is None: + break - yield chunk - except asyncio.CancelledError: - pass # ignore + if is_first_chunk: + is_first_chunk = False + _LOGGER.debug("Receiving audio from satellite") + + yield chunk @callback def _handle_timer( diff --git a/homeassistant/components/wyoming/binary_sensor.py b/homeassistant/components/wyoming/binary_sensor.py index 4f2c0bb170a..24ee073ec4d 100644 --- a/homeassistant/components/wyoming/binary_sensor.py +++ b/homeassistant/components/wyoming/binary_sensor.py @@ -28,15 +28,16 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - async_add_entities([WyomingSatelliteAssistInProgress(item.satellite.device)]) + async_add_entities([WyomingSatelliteAssistInProgress(item.device)]) class WyomingSatelliteAssistInProgress(WyomingSatelliteEntity, BinarySensorEntity): """Entity to represent Assist is in progress for satellite.""" entity_description = BinarySensorEntityDescription( + entity_registry_enabled_default=False, key="assist_in_progress", translation_key="assist_in_progress", ) diff --git a/homeassistant/components/wyoming/config_flow.py b/homeassistant/components/wyoming/config_flow.py index 8461d9e83ac..5fdcb1a5484 100644 --- a/homeassistant/components/wyoming/config_flow.py +++ b/homeassistant/components/wyoming/config_flow.py @@ -8,9 +8,10 @@ from urllib.parse import urlparse import voluptuous as vol -from homeassistant.components import hassio, zeroconf +from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DOMAIN from .data import WyomingService @@ -30,7 +31,7 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _hassio_discovery: hassio.HassioServiceInfo + _hassio_discovery: HassioServiceInfo _service: WyomingService | None = None _name: str | None = None @@ -61,7 +62,7 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="no_services") async def async_step_hassio( - self, discovery_info: hassio.HassioServiceInfo + self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: """Handle Supervisor add-on discovery.""" _LOGGER.debug("Supervisor discovery info: %s", discovery_info) @@ -123,7 +124,6 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() - self.context[CONF_NAME] = self._name self.context["title_placeholders"] = {"name": self._name} self._service = service diff --git a/homeassistant/components/wyoming/conversation.py b/homeassistant/components/wyoming/conversation.py new file mode 100644 index 00000000000..9a17559c1f8 --- /dev/null +++ b/homeassistant/components/wyoming/conversation.py @@ -0,0 +1,194 @@ +"""Support for Wyoming intent recognition services.""" + +import logging + +from wyoming.asr import Transcript +from wyoming.client import AsyncTcpClient +from wyoming.handle import Handled, NotHandled +from wyoming.info import HandleProgram, IntentProgram +from wyoming.intent import Intent, NotRecognized + +from homeassistant.components import conversation +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import ulid + +from .const import DOMAIN +from .data import WyomingService +from .error import WyomingError +from .models import DomainDataItem + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Wyoming conversation.""" + item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] + async_add_entities( + [ + WyomingConversationEntity(config_entry, item.service), + ] + ) + + +class WyomingConversationEntity( + conversation.ConversationEntity, conversation.AbstractConversationAgent +): + """Wyoming conversation agent.""" + + _attr_has_entity_name = True + + def __init__( + self, + config_entry: ConfigEntry, + service: WyomingService, + ) -> None: + """Set up provider.""" + super().__init__() + + self.service = service + + self._intent_service: IntentProgram | None = None + self._handle_service: HandleProgram | None = None + + for maybe_intent in self.service.info.intent: + if maybe_intent.installed: + self._intent_service = maybe_intent + break + + for maybe_handle in self.service.info.handle: + if maybe_handle.installed: + self._handle_service = maybe_handle + break + + model_languages: set[str] = set() + + if self._intent_service is not None: + for intent_model in self._intent_service.models: + if intent_model.installed: + model_languages.update(intent_model.languages) + + self._attr_name = self._intent_service.name + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + elif self._handle_service is not None: + for handle_model in self._handle_service.models: + if handle_model.installed: + model_languages.update(handle_model.languages) + + self._attr_name = self._handle_service.name + + self._supported_languages = list(model_languages) + self._attr_unique_id = f"{config_entry.entry_id}-conversation" + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._supported_languages + + async def async_process( + self, user_input: conversation.ConversationInput + ) -> conversation.ConversationResult: + """Process a sentence.""" + conversation_id = user_input.conversation_id or ulid.ulid_now() + intent_response = intent.IntentResponse(language=user_input.language) + + try: + async with AsyncTcpClient(self.service.host, self.service.port) as client: + await client.write_event( + Transcript( + user_input.text, context={"conversation_id": conversation_id} + ).event() + ) + + while True: + event = await client.read_event() + if event is None: + _LOGGER.debug("Connection lost") + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + "Connection to service was lost", + ) + return conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + + if Intent.is_type(event.type): + # Success + recognized_intent = Intent.from_event(event) + _LOGGER.debug("Recognized intent: %s", recognized_intent) + + intent_type = recognized_intent.name + intent_slots = { + e.name: {"value": e.value} + for e in recognized_intent.entities + } + intent_response = await intent.async_handle( + self.hass, + DOMAIN, + intent_type, + intent_slots, + text_input=user_input.text, + language=user_input.language, + ) + + if (not intent_response.speech) and recognized_intent.text: + intent_response.async_set_speech(recognized_intent.text) + + break + + if NotRecognized.is_type(event.type): + not_recognized = NotRecognized.from_event(event) + intent_response.async_set_error( + intent.IntentResponseErrorCode.NO_INTENT_MATCH, + not_recognized.text, + ) + break + + if Handled.is_type(event.type): + # Success + handled = Handled.from_event(event) + intent_response.async_set_speech(handled.text) + break + + if NotHandled.is_type(event.type): + not_handled = NotHandled.from_event(event) + intent_response.async_set_error( + intent.IntentResponseErrorCode.FAILED_TO_HANDLE, + not_handled.text, + ) + break + + except (OSError, WyomingError) as err: + _LOGGER.exception("Unexpected error while communicating with service") + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Error communicating with service: {err}", + ) + return conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + except intent.IntentError as err: + _LOGGER.exception("Unexpected error while handling intent") + intent_response.async_set_error( + intent.IntentResponseErrorCode.FAILED_TO_HANDLE, + f"Error handling intent: {err}", + ) + return conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + + # Success + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) diff --git a/homeassistant/components/wyoming/data.py b/homeassistant/components/wyoming/data.py index 1ee0f24f805..a16062ab058 100644 --- a/homeassistant/components/wyoming/data.py +++ b/homeassistant/components/wyoming/data.py @@ -37,6 +37,10 @@ class WyomingService: self.platforms.append(Platform.TTS) if any(wake.installed for wake in info.wake): self.platforms.append(Platform.WAKE_WORD) + if any(intent.installed for intent in info.intent) or any( + handle.installed for handle in info.handle + ): + self.platforms.append(Platform.CONVERSATION) def has_services(self) -> bool: """Return True if services are installed that Home Assistant can use.""" @@ -44,6 +48,8 @@ class WyomingService: any(asr for asr in self.info.asr if asr.installed) or any(tts for tts in self.info.tts if tts.installed) or any(wake for wake in self.info.wake if wake.installed) + or any(intent for intent in self.info.intent if intent.installed) + or any(handle for handle in self.info.handle if handle.installed) or ((self.info.satellite is not None) and self.info.satellite.installed) ) @@ -70,6 +76,16 @@ class WyomingService: if wake_installed: return wake_installed[0].name + # intent recognition (text -> intent) + intent_installed = [intent for intent in self.info.intent if intent.installed] + if intent_installed: + return intent_installed[0].name + + # intent handling (text -> text) + handle_installed = [handle for handle in self.info.handle if handle.installed] + if handle_installed: + return handle_installed[0].name + return None @classmethod diff --git a/homeassistant/components/wyoming/entity.py b/homeassistant/components/wyoming/entity.py index 4591283036f..1ce105fb860 100644 --- a/homeassistant/components/wyoming/entity.py +++ b/homeassistant/components/wyoming/entity.py @@ -6,7 +6,7 @@ from homeassistant.helpers import entity from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from .const import DOMAIN -from .satellite import SatelliteDevice +from .devices import SatelliteDevice class WyomingSatelliteEntity(entity.Entity): diff --git a/homeassistant/components/wyoming/manifest.json b/homeassistant/components/wyoming/manifest.json index 30104a88dce..b837d2a9e76 100644 --- a/homeassistant/components/wyoming/manifest.json +++ b/homeassistant/components/wyoming/manifest.json @@ -3,7 +3,12 @@ "name": "Wyoming Protocol", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": ["assist_pipeline", "intent", "conversation"], + "dependencies": [ + "assist_satellite", + "assist_pipeline", + "intent", + "conversation" + ], "documentation": "https://www.home-assistant.io/integrations/wyoming", "integration_type": "service", "iot_class": "local_push", diff --git a/homeassistant/components/wyoming/models.py b/homeassistant/components/wyoming/models.py index 066af144d78..b819d06f916 100644 --- a/homeassistant/components/wyoming/models.py +++ b/homeassistant/components/wyoming/models.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from .data import WyomingService -from .satellite import WyomingSatellite +from .devices import SatelliteDevice @dataclass @@ -11,4 +11,4 @@ class DomainDataItem: """Domain data item.""" service: WyomingService - satellite: WyomingSatellite | None = None + device: SatelliteDevice | None = None diff --git a/homeassistant/components/wyoming/number.py b/homeassistant/components/wyoming/number.py index 5e769eeb06d..d9a58cc3333 100644 --- a/homeassistant/components/wyoming/number.py +++ b/homeassistant/components/wyoming/number.py @@ -30,13 +30,12 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - device = item.satellite.device async_add_entities( [ - WyomingSatelliteAutoGainNumber(device), - WyomingSatelliteVolumeMultiplierNumber(device), + WyomingSatelliteAutoGainNumber(item.device), + WyomingSatelliteVolumeMultiplierNumber(item.device), ] ) diff --git a/homeassistant/components/wyoming/select.py b/homeassistant/components/wyoming/select.py index f852b4d0434..bbcaab81710 100644 --- a/homeassistant/components/wyoming/select.py +++ b/homeassistant/components/wyoming/select.py @@ -42,14 +42,13 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - device = item.satellite.device async_add_entities( [ - WyomingSatellitePipelineSelect(hass, device), - WyomingSatelliteNoiseSuppressionLevelSelect(device), - WyomingSatelliteVadSensitivitySelect(hass, device), + WyomingSatellitePipelineSelect(hass, item.device), + WyomingSatelliteNoiseSuppressionLevelSelect(item.device), + WyomingSatelliteVadSensitivitySelect(hass, item.device), ] ) diff --git a/homeassistant/components/wyoming/switch.py b/homeassistant/components/wyoming/switch.py index c012c60bc5a..308429331c3 100644 --- a/homeassistant/components/wyoming/switch.py +++ b/homeassistant/components/wyoming/switch.py @@ -27,9 +27,9 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - async_add_entities([WyomingSatelliteMuteSwitch(item.satellite.device)]) + async_add_entities([WyomingSatelliteMuteSwitch(item.device)]) class WyomingSatelliteMuteSwitch( @@ -51,7 +51,7 @@ class WyomingSatelliteMuteSwitch( # Default to off self._attr_is_on = (state is not None) and (state.state == STATE_ON) - self._device.is_muted = self._attr_is_on + self._device.set_is_muted(self._attr_is_on) async def async_turn_on(self, **kwargs: Any) -> None: """Turn on.""" diff --git a/homeassistant/components/x10/light.py b/homeassistant/components/x10/light.py index 29c15f66993..23343cb0f8d 100644 --- a/homeassistant/components/x10/light.py +++ b/homeassistant/components/x10/light.py @@ -54,7 +54,7 @@ def setup_platform( try: x10_command("info") except CalledProcessError as err: - _LOGGER.info("Assuming that the device is CM17A: %s", err.output) + _LOGGER.warning("Assuming that the device is CM17A: %s", err.output) is_cm11a = False add_entities(X10Light(light, is_cm11a) for light in config[CONF_DEVICES]) diff --git a/homeassistant/components/x10/manifest.json b/homeassistant/components/x10/manifest.json index 258080dc374..517bab07f6c 100644 --- a/homeassistant/components/x10/manifest.json +++ b/homeassistant/components/x10/manifest.json @@ -3,5 +3,6 @@ "name": "Heyu X10", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/x10", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/xbox/binary_sensor.py b/homeassistant/components/xbox/binary_sensor.py index 0f0b9799d3d..af95834425a 100644 --- a/homeassistant/components/xbox/binary_sensor.py +++ b/homeassistant/components/xbox/binary_sensor.py @@ -10,9 +10,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base_sensor import XboxBaseSensorEntity from .const import DOMAIN from .coordinator import XboxUpdateCoordinator +from .entity import XboxBaseEntity PRESENCE_ATTRIBUTES = ["online", "in_party", "in_game", "in_multiplayer"] @@ -32,7 +32,7 @@ async def async_setup_entry( update_friends() -class XboxBinarySensorEntity(XboxBaseSensorEntity, BinarySensorEntity): +class XboxBinarySensorEntity(XboxBaseEntity, BinarySensorEntity): """Representation of a Xbox presence state.""" @property diff --git a/homeassistant/components/xbox/base_sensor.py b/homeassistant/components/xbox/entity.py similarity index 97% rename from homeassistant/components/xbox/base_sensor.py rename to homeassistant/components/xbox/entity.py index f252385d4ca..d4a63b71b39 100644 --- a/homeassistant/components/xbox/base_sensor.py +++ b/homeassistant/components/xbox/entity.py @@ -11,7 +11,7 @@ from .const import DOMAIN from .coordinator import PresenceData, XboxUpdateCoordinator -class XboxBaseSensorEntity(CoordinatorEntity[XboxUpdateCoordinator]): +class XboxBaseEntity(CoordinatorEntity[XboxUpdateCoordinator]): """Base Sensor for the Xbox Integration.""" def __init__( diff --git a/homeassistant/components/xbox/manifest.json b/homeassistant/components/xbox/manifest.json index 30a6c3bc700..3fc2071e66b 100644 --- a/homeassistant/components/xbox/manifest.json +++ b/homeassistant/components/xbox/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/xbox", "iot_class": "cloud_polling", - "requirements": ["xbox-webapi==2.0.11"] + "requirements": ["xbox-webapi==2.1.0"] } diff --git a/homeassistant/components/xbox/media_source.py b/homeassistant/components/xbox/media_source.py index a63f3b2027b..4478502b4ca 100644 --- a/homeassistant/components/xbox/media_source.py +++ b/homeassistant/components/xbox/media_source.py @@ -13,7 +13,7 @@ from xbox.webapi.api.provider.screenshots.models import ScreenshotResponse from xbox.webapi.api.provider.smartglass.models import InstalledPackage from homeassistant.components.media_player import MediaClass -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSource, MediaSourceItem, diff --git a/homeassistant/components/xbox/sensor.py b/homeassistant/components/xbox/sensor.py index ff6591d5b3e..f269e0a5bb9 100644 --- a/homeassistant/components/xbox/sensor.py +++ b/homeassistant/components/xbox/sensor.py @@ -10,9 +10,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .base_sensor import XboxBaseSensorEntity from .const import DOMAIN from .coordinator import XboxUpdateCoordinator +from .entity import XboxBaseEntity SENSOR_ATTRIBUTES = ["status", "gamer_score", "account_tier", "gold_tenure"] @@ -34,7 +34,7 @@ async def async_setup_entry( update_friends() -class XboxSensorEntity(XboxBaseSensorEntity, SensorEntity): +class XboxSensorEntity(XboxBaseEntity, SensorEntity): """Representation of a Xbox presence state.""" @property diff --git a/homeassistant/components/xeoma/manifest.json b/homeassistant/components/xeoma/manifest.json index a73b4bb8671..839724cc781 100644 --- a/homeassistant/components/xeoma/manifest.json +++ b/homeassistant/components/xeoma/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xeoma", "iot_class": "local_polling", "loggers": ["pyxeoma"], - "requirements": ["pyxeoma==1.4.1"] + "quality_scale": "legacy", + "requirements": ["pyxeoma==1.4.2"] } diff --git a/homeassistant/components/xiaomi/camera.py b/homeassistant/components/xiaomi/camera.py index 8ab15f85147..cb8d5f39dec 100644 --- a/homeassistant/components/xiaomi/camera.py +++ b/homeassistant/components/xiaomi/camera.py @@ -140,7 +140,7 @@ class XiaomiCamera(Camera): videos = [v for v in ftp.nlst() if ".tmp" not in v] if not videos: - _LOGGER.info('Video folder "%s" is empty; delaying', latest_dir) + _LOGGER.debug('Video folder "%s" is empty; delaying', latest_dir) return False if self._model == MODEL_XIAOFANG: diff --git a/homeassistant/components/xiaomi/device_tracker.py b/homeassistant/components/xiaomi/device_tracker.py index b14ec073938..9d4a29d2c78 100644 --- a/homeassistant/components/xiaomi/device_tracker.py +++ b/homeassistant/components/xiaomi/device_tracker.py @@ -9,7 +9,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -31,7 +31,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> XiaomiDeviceScanner | None: """Validate the configuration and return a Xiaomi Device Scanner.""" - scanner = XiaomiDeviceScanner(config[DOMAIN]) + scanner = XiaomiDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) return scanner if scanner.success_init else None @@ -139,7 +139,7 @@ def _retrieve_list(host, token, **kwargs): _LOGGER.exception("No list in response from mi router. %s", result) return None else: - _LOGGER.info( + _LOGGER.warning( "Receive wrong Xiaomi code %s, expected 0 in response %s", xiaomi_code, result, diff --git a/homeassistant/components/xiaomi/manifest.json b/homeassistant/components/xiaomi/manifest.json index ef7085f2aa4..45540db47f3 100644 --- a/homeassistant/components/xiaomi/manifest.json +++ b/homeassistant/components/xiaomi/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/xiaomi", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/xiaomi_aqara/__init__.py b/homeassistant/components/xiaomi_aqara/__init__.py index ee7948a237e..b7f4aa1942e 100644 --- a/homeassistant/components/xiaomi_aqara/__init__.py +++ b/homeassistant/components/xiaomi_aqara/__init__.py @@ -1,9 +1,7 @@ """Support for Xiaomi Gateways.""" import asyncio -from datetime import timedelta import logging -from typing import Any import voluptuous as vol from xiaomi_gateway import AsyncXiaomiGatewayMulticast, XiaomiGateway @@ -11,11 +9,8 @@ from xiaomi_gateway import AsyncXiaomiGatewayMulticast, XiaomiGateway from homeassistant.components import persistent_notification from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import ( - ATTR_BATTERY_LEVEL, ATTR_DEVICE_ID, - ATTR_VOLTAGE, CONF_HOST, - CONF_MAC, CONF_PORT, CONF_PROTOCOL, EVENT_HOMEASSISTANT_STOP, @@ -24,11 +19,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo, format_mac -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import ConfigType -from homeassistant.util.dt import utcnow from .const import ( CONF_INTERFACE, @@ -58,8 +49,6 @@ ATTR_GW_MAC = "gw_mac" ATTR_RINGTONE_ID = "ringtone_id" ATTR_RINGTONE_VOL = "ringtone_vol" -TIME_TILL_UNAVAILABLE = timedelta(minutes=150) - SERVICE_PLAY_RINGTONE = "play_ringtone" SERVICE_STOP_RINGTONE = "stop_ringtone" SERVICE_ADD_DEVICE = "add_device" @@ -245,152 +234,6 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return unload_ok -class XiaomiDevice(Entity): - """Representation a base Xiaomi device.""" - - _attr_should_poll = False - - def __init__(self, device, device_type, xiaomi_hub, config_entry): - """Initialize the Xiaomi device.""" - self._state = None - self._is_available = True - self._sid = device["sid"] - self._model = device["model"] - self._protocol = device["proto"] - self._name = f"{device_type}_{self._sid}" - self._device_name = f"{self._model}_{self._sid}" - self._type = device_type - self._write_to_hub = xiaomi_hub.write_to_hub - self._get_from_hub = xiaomi_hub.get_from_hub - self._extra_state_attributes = {} - self._remove_unavailability_tracker = None - self._xiaomi_hub = xiaomi_hub - self.parse_data(device["data"], device["raw_data"]) - self.parse_voltage(device["data"]) - - if hasattr(self, "_data_key") and self._data_key: - self._unique_id = f"{self._data_key}{self._sid}" - else: - self._unique_id = f"{self._type}{self._sid}" - - self._gateway_id = config_entry.unique_id - if config_entry.data[CONF_MAC] == format_mac(self._sid): - # this entity belongs to the gateway itself - self._is_gateway = True - self._device_id = config_entry.unique_id - else: - # this entity is connected through zigbee - self._is_gateway = False - self._device_id = self._sid - - async def async_added_to_hass(self): - """Start unavailability tracking.""" - self._xiaomi_hub.callbacks[self._sid].append(self.push_data) - self._async_track_unavailable() - - @property - def name(self): - """Return the name of the device.""" - return self._name - - @property - def unique_id(self) -> str: - """Return a unique ID.""" - return self._unique_id - - @property - def device_id(self): - """Return the device id of the Xiaomi Aqara device.""" - return self._device_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info of the Xiaomi Aqara device.""" - if self._is_gateway: - device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - model=self._model, - ) - else: - device_info = DeviceInfo( - connections={(dr.CONNECTION_ZIGBEE, self._device_id)}, - identifiers={(DOMAIN, self._device_id)}, - manufacturer="Xiaomi Aqara", - model=self._model, - name=self._device_name, - sw_version=self._protocol, - via_device=(DOMAIN, self._gateway_id), - ) - - return device_info - - @property - def available(self): - """Return True if entity is available.""" - return self._is_available - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - return self._extra_state_attributes - - @callback - def _async_set_unavailable(self, now): - """Set state to UNAVAILABLE.""" - self._remove_unavailability_tracker = None - self._is_available = False - self.async_write_ha_state() - - @callback - def _async_track_unavailable(self): - if self._remove_unavailability_tracker: - self._remove_unavailability_tracker() - self._remove_unavailability_tracker = async_track_point_in_utc_time( - self.hass, self._async_set_unavailable, utcnow() + TIME_TILL_UNAVAILABLE - ) - if not self._is_available: - self._is_available = True - return True - return False - - def push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: - """Push from Hub running in another thread.""" - self.hass.loop.call_soon_threadsafe(self.async_push_data, data, raw_data) - - @callback - def async_push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: - """Push from Hub handled in the event loop.""" - _LOGGER.debug("PUSH >> %s: %s", self, data) - was_unavailable = self._async_track_unavailable() - is_data = self.parse_data(data, raw_data) - is_voltage = self.parse_voltage(data) - if is_data or is_voltage or was_unavailable: - self.async_write_ha_state() - - def parse_voltage(self, data): - """Parse battery level data sent by gateway.""" - if "voltage" in data: - voltage_key = "voltage" - elif "battery_voltage" in data: - voltage_key = "battery_voltage" - else: - return False - - max_volt = 3300 - min_volt = 2800 - voltage = data[voltage_key] - self._extra_state_attributes[ATTR_VOLTAGE] = round(voltage / 1000.0, 2) - voltage = min(voltage, max_volt) - voltage = max(voltage, min_volt) - percent = ((voltage - min_volt) / (max_volt - min_volt)) * 100 - self._extra_state_attributes[ATTR_BATTERY_LEVEL] = round(percent, 1) - return True - - def parse_data(self, data, raw_data): - """Parse data sent by gateway.""" - raise NotImplementedError - - def _add_gateway_to_schema(hass, schema): """Extend a voluptuous schema with a gateway validator.""" diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index 75208b142dd..ad91dda2173 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -12,8 +12,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later from homeassistant.helpers.restore_state import RestoreEntity -from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY +from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_aqara/config_flow.py b/homeassistant/components/xiaomi_aqara/config_flow.py index a89bb8447a3..6252e6849d0 100644 --- a/homeassistant/components/xiaomi_aqara/config_flow.py +++ b/homeassistant/components/xiaomi_aqara/config_flow.py @@ -2,7 +2,7 @@ import logging from socket import gaierror -from typing import TYPE_CHECKING, Any +from typing import Any import voluptuous as vol from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery @@ -50,13 +50,14 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + selected_gateway: XiaomiGateway + gateways: dict[str, XiaomiGateway] + def __init__(self) -> None: """Initialize.""" self.host: str | None = None self.interface = DEFAULT_INTERFACE self.sid: str | None = None - self.gateways: dict[str, XiaomiGateway] | None = None - self.selected_gateway: XiaomiGateway | None = None @callback def async_show_form_step_user(self, errors): @@ -99,8 +100,6 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): None, ) - if TYPE_CHECKING: - assert self.selected_gateway if self.selected_gateway.connection_error: errors[CONF_HOST] = "invalid_host" if self.selected_gateway.mac_error: @@ -120,8 +119,6 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): self.gateways = xiaomi.gateways - if TYPE_CHECKING: - assert self.gateways is not None if len(self.gateways) == 1: self.selected_gateway = list(self.gateways.values())[0] self.sid = self.selected_gateway.sid @@ -132,9 +129,11 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "discovery_error" return self.async_show_form_step_user(errors) - async def async_step_select(self, user_input=None): + async def async_step_select( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle multiple aqara gateways found.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: ip_adress = user_input["select_ip"] self.selected_gateway = self.gateways[ip_adress] @@ -192,7 +191,9 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_settings(self, user_input=None): + async def async_step_settings( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Specify settings and connect aqara gateway.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/xiaomi_aqara/cover.py b/homeassistant/components/xiaomi_aqara/cover.py index 64c9f6f208a..e073ef6b683 100644 --- a/homeassistant/components/xiaomi_aqara/cover.py +++ b/homeassistant/components/xiaomi_aqara/cover.py @@ -7,8 +7,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY +from .entity import XiaomiDevice ATTR_CURTAIN_LEVEL = "curtain_level" diff --git a/homeassistant/components/xiaomi_aqara/entity.py b/homeassistant/components/xiaomi_aqara/entity.py new file mode 100644 index 00000000000..db47015c0cf --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/entity.py @@ -0,0 +1,166 @@ +"""Support for Xiaomi Gateways.""" + +from datetime import timedelta +import logging +from typing import Any + +from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_VOLTAGE, CONF_MAC +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo, format_mac +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.event import async_track_point_in_utc_time +from homeassistant.util.dt import utcnow + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +TIME_TILL_UNAVAILABLE = timedelta(minutes=150) + + +class XiaomiDevice(Entity): + """Representation a base Xiaomi device.""" + + _attr_should_poll = False + + def __init__(self, device, device_type, xiaomi_hub, config_entry): + """Initialize the Xiaomi device.""" + self._state = None + self._is_available = True + self._sid = device["sid"] + self._model = device["model"] + self._protocol = device["proto"] + self._name = f"{device_type}_{self._sid}" + self._device_name = f"{self._model}_{self._sid}" + self._type = device_type + self._write_to_hub = xiaomi_hub.write_to_hub + self._get_from_hub = xiaomi_hub.get_from_hub + self._extra_state_attributes = {} + self._remove_unavailability_tracker = None + self._xiaomi_hub = xiaomi_hub + self.parse_data(device["data"], device["raw_data"]) + self.parse_voltage(device["data"]) + + if hasattr(self, "_data_key") and self._data_key: + self._unique_id = f"{self._data_key}{self._sid}" + else: + self._unique_id = f"{self._type}{self._sid}" + + self._gateway_id = config_entry.unique_id + if config_entry.data[CONF_MAC] == format_mac(self._sid): + # this entity belongs to the gateway itself + self._is_gateway = True + self._device_id = config_entry.unique_id + else: + # this entity is connected through zigbee + self._is_gateway = False + self._device_id = self._sid + + async def async_added_to_hass(self): + """Start unavailability tracking.""" + self._xiaomi_hub.callbacks[self._sid].append(self.push_data) + self._async_track_unavailable() + + @property + def name(self): + """Return the name of the device.""" + return self._name + + @property + def unique_id(self) -> str: + """Return a unique ID.""" + return self._unique_id + + @property + def device_id(self): + """Return the device id of the Xiaomi Aqara device.""" + return self._device_id + + @property + def device_info(self) -> DeviceInfo: + """Return the device info of the Xiaomi Aqara device.""" + if self._is_gateway: + device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + connections={(dr.CONNECTION_NETWORK_MAC, self._device_id)}, + model=self._model, + ) + else: + device_info = DeviceInfo( + connections={(dr.CONNECTION_ZIGBEE, self._device_id)}, + identifiers={(DOMAIN, self._device_id)}, + manufacturer="Xiaomi Aqara", + model=self._model, + name=self._device_name, + sw_version=self._protocol, + via_device=(DOMAIN, self._gateway_id), + ) + + return device_info + + @property + def available(self): + """Return True if entity is available.""" + return self._is_available + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + return self._extra_state_attributes + + @callback + def _async_set_unavailable(self, now): + """Set state to UNAVAILABLE.""" + self._remove_unavailability_tracker = None + self._is_available = False + self.async_write_ha_state() + + @callback + def _async_track_unavailable(self): + if self._remove_unavailability_tracker: + self._remove_unavailability_tracker() + self._remove_unavailability_tracker = async_track_point_in_utc_time( + self.hass, self._async_set_unavailable, utcnow() + TIME_TILL_UNAVAILABLE + ) + if not self._is_available: + self._is_available = True + return True + return False + + def push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: + """Push from Hub running in another thread.""" + self.hass.loop.call_soon_threadsafe(self.async_push_data, data, raw_data) + + @callback + def async_push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: + """Push from Hub handled in the event loop.""" + _LOGGER.debug("PUSH >> %s: %s", self, data) + was_unavailable = self._async_track_unavailable() + is_data = self.parse_data(data, raw_data) + is_voltage = self.parse_voltage(data) + if is_data or is_voltage or was_unavailable: + self.async_write_ha_state() + + def parse_voltage(self, data): + """Parse battery level data sent by gateway.""" + if "voltage" in data: + voltage_key = "voltage" + elif "battery_voltage" in data: + voltage_key = "battery_voltage" + else: + return False + + max_volt = 3300 + min_volt = 2800 + voltage = data[voltage_key] + self._extra_state_attributes[ATTR_VOLTAGE] = round(voltage / 1000.0, 2) + voltage = min(voltage, max_volt) + voltage = max(voltage, min_volt) + percent = ((voltage - min_volt) / (max_volt - min_volt)) * 100 + self._extra_state_attributes[ATTR_BATTERY_LEVEL] = round(percent, 1) + return True + + def parse_data(self, data, raw_data): + """Parse data sent by gateway.""" + raise NotImplementedError diff --git a/homeassistant/components/xiaomi_aqara/icons.json b/homeassistant/components/xiaomi_aqara/icons.json index 4975414833d..62149b0dd40 100644 --- a/homeassistant/components/xiaomi_aqara/icons.json +++ b/homeassistant/components/xiaomi_aqara/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_device": "mdi:cellphone-link", - "play_ringtone": "mdi:music", - "remove_device": "mdi:cellphone-link", - "stop_ringtone": "mdi:music-off" + "add_device": { + "service": "mdi:cellphone-link" + }, + "play_ringtone": { + "service": "mdi:music" + }, + "remove_device": { + "service": "mdi:cellphone-link" + }, + "stop_ringtone": { + "service": "mdi:music-off" + } } } diff --git a/homeassistant/components/xiaomi_aqara/light.py b/homeassistant/components/xiaomi_aqara/light.py index fc19a22eb5f..c8057f1df4a 100644 --- a/homeassistant/components/xiaomi_aqara/light.py +++ b/homeassistant/components/xiaomi_aqara/light.py @@ -16,8 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util -from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY +from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_aqara/lock.py b/homeassistant/components/xiaomi_aqara/lock.py index 8499864576a..5e538f25699 100644 --- a/homeassistant/components/xiaomi_aqara/lock.py +++ b/homeassistant/components/xiaomi_aqara/lock.py @@ -2,15 +2,14 @@ from __future__ import annotations -from homeassistant.components.lock import LockEntity +from homeassistant.components.lock import LockEntity, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later -from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY +from .entity import XiaomiDevice FINGER_KEY = "fing_verified" PASSWORD_KEY = "psw_verified" @@ -50,7 +49,7 @@ class XiaomiAqaraLock(LockEntity, XiaomiDevice): def is_locked(self) -> bool | None: """Return true if lock is locked.""" if self._state is not None: - return self._state == STATE_LOCKED + return self._state == LockState.LOCKED return None @property @@ -66,7 +65,7 @@ class XiaomiAqaraLock(LockEntity, XiaomiDevice): @callback def clear_unlock_state(self, _): """Clear unlock state automatically.""" - self._state = STATE_LOCKED + self._state = LockState.LOCKED self.async_write_ha_state() def parse_data(self, data, raw_data): @@ -79,7 +78,7 @@ class XiaomiAqaraLock(LockEntity, XiaomiDevice): if (value := data.get(key)) is not None: self._changed_by = int(value) self._verified_wrong_times = 0 - self._state = STATE_UNLOCKED + self._state = LockState.UNLOCKED async_call_later( self.hass, UNLOCK_MAINTAIN_TIME, self.clear_unlock_state ) diff --git a/homeassistant/components/xiaomi_aqara/sensor.py b/homeassistant/components/xiaomi_aqara/sensor.py index 4b354a6e730..49358276a48 100644 --- a/homeassistant/components/xiaomi_aqara/sensor.py +++ b/homeassistant/components/xiaomi_aqara/sensor.py @@ -22,8 +22,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import XiaomiDevice from .const import BATTERY_MODELS, DOMAIN, GATEWAYS_KEY, POWER_MODELS +from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_aqara/switch.py b/homeassistant/components/xiaomi_aqara/switch.py index b6bd2ca1e6a..f66cf8c7603 100644 --- a/homeassistant/components/xiaomi_aqara/switch.py +++ b/homeassistant/components/xiaomi_aqara/switch.py @@ -8,8 +8,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY +from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_ble/binary_sensor.py b/homeassistant/components/xiaomi_ble/binary_sensor.py index 5336c4d8f7f..b853f83b967 100644 --- a/homeassistant/components/xiaomi_ble/binary_sensor.py +++ b/homeassistant/components/xiaomi_ble/binary_sensor.py @@ -50,6 +50,10 @@ BINARY_SENSOR_DESCRIPTIONS = { key=XiaomiBinarySensorDeviceClass.MOTION, device_class=BinarySensorDeviceClass.MOTION, ), + XiaomiBinarySensorDeviceClass.OCCUPANCY: BinarySensorEntityDescription( + key=XiaomiBinarySensorDeviceClass.OCCUPANCY, + device_class=BinarySensorDeviceClass.OCCUPANCY, + ), XiaomiBinarySensorDeviceClass.OPENING: BinarySensorEntityDescription( key=XiaomiBinarySensorDeviceClass.OPENING, device_class=BinarySensorDeviceClass.OPENING, diff --git a/homeassistant/components/xiaomi_ble/config_flow.py b/homeassistant/components/xiaomi_ble/config_flow.py index 8209c9565bd..df2de381d39 100644 --- a/homeassistant/components/xiaomi_ble/config_flow.py +++ b/homeassistant/components/xiaomi_ble/config_flow.py @@ -4,10 +4,16 @@ from __future__ import annotations from collections.abc import Mapping import dataclasses +import logging from typing import Any import voluptuous as vol -from xiaomi_ble import XiaomiBluetoothDeviceData as DeviceData +from xiaomi_ble import ( + XiaomiBluetoothDeviceData as DeviceData, + XiaomiCloudException, + XiaomiCloudInvalidAuthenticationException, + XiaomiCloudTokenFetch, +) from xiaomi_ble.parser import EncryptionScheme from homeassistant.components import onboarding @@ -17,14 +23,18 @@ from homeassistant.components.bluetooth import ( async_discovered_service_info, async_process_advertisements, ) -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_ADDRESS +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS, CONF_PASSWORD, CONF_USERNAME +from homeassistant.data_entry_flow import AbortFlow +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN # How long to wait for additional advertisement packets if we don't have the right ones ADDITIONAL_DISCOVERY_TIMEOUT = 60 +_LOGGER = logging.getLogger(__name__) + @dataclasses.dataclass class Discovery: @@ -104,7 +114,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): if device.encryption_scheme == EncryptionScheme.MIBEACON_LEGACY: return await self.async_step_get_encryption_key_legacy() if device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5() + return await self.async_step_get_encryption_key_4_5_choose_method() return await self.async_step_bluetooth_confirm() async def async_step_get_encryption_key_legacy( @@ -175,6 +185,67 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_cloud_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the cloud auth step.""" + assert self._discovery_info + + errors: dict[str, str] = {} + description_placeholders: dict[str, str] = {} + if user_input is not None: + session = async_get_clientsession(self.hass) + fetcher = XiaomiCloudTokenFetch( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session + ) + try: + device_details = await fetcher.get_device_info( + self._discovery_info.address + ) + except XiaomiCloudInvalidAuthenticationException as ex: + _LOGGER.debug("Authentication failed: %s", ex, exc_info=True) + errors = {"base": "auth_failed"} + description_placeholders = {"error_detail": str(ex)} + except XiaomiCloudException as ex: + _LOGGER.debug("Failed to connect to MI API: %s", ex, exc_info=True) + raise AbortFlow( + "api_error", description_placeholders={"error_detail": str(ex)} + ) from ex + else: + if device_details: + return await self.async_step_get_encryption_key_4_5( + {"bindkey": device_details.bindkey} + ) + errors = {"base": "api_device_not_found"} + + user_input = user_input or {} + return self.async_show_form( + step_id="cloud_auth", + errors=errors, + data_schema=vol.Schema( + { + vol.Required( + CONF_USERNAME, default=user_input.get(CONF_USERNAME) + ): str, + vol.Required(CONF_PASSWORD): str, + } + ), + description_placeholders={ + **self.context["title_placeholders"], + **description_placeholders, + }, + ) + + async def async_step_get_encryption_key_4_5_choose_method( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Choose method to get the bind key for a version 4/5 device.""" + return self.async_show_menu( + step_id="get_encryption_key_4_5_choose_method", + menu_options=["cloud_auth", "get_encryption_key_4_5"], + description_placeholders=self.context["title_placeholders"], + ) + async def async_step_bluetooth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -231,7 +302,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_get_encryption_key_legacy() if discovery.device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5() + return await self.async_step_get_encryption_key_4_5_choose_method() return self._async_get_or_create_entry() @@ -264,9 +335,6 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry is not None - device: DeviceData = entry_data["device"] self._discovered_device = device @@ -276,7 +344,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_get_encryption_key_legacy() if device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5() + return await self.async_step_get_encryption_key_4_5_choose_method() # Otherwise there wasn't actually encryption so abort return self.async_abort(reason="reauth_successful") @@ -289,10 +357,10 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): if bindkey: data["bindkey"] = bindkey - if entry_id := self.context.get("entry_id"): - entry = self.hass.config_entries.async_get_entry(entry_id) - assert entry is not None - return self.async_update_reload_and_abort(entry, data=data) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_create_entry( title=self.context["title_placeholders"]["name"], diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index 21e9bc45bb8..26dd82c73bc 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.30.2"] + "requirements": ["xiaomi-ble==0.33.0"] } diff --git a/homeassistant/components/xiaomi_ble/sensor.py b/homeassistant/components/xiaomi_ble/sensor.py index 3108c285dbe..ba8f64383ee 100644 --- a/homeassistant/components/xiaomi_ble/sensor.py +++ b/homeassistant/components/xiaomi_ble/sensor.py @@ -48,8 +48,8 @@ SENSOR_DESCRIPTIONS = { ), (DeviceClass.CONDUCTIVITY, Units.CONDUCTIVITY): SensorEntityDescription( key=str(Units.CONDUCTIVITY), - device_class=None, - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + device_class=SensorDeviceClass.CONDUCTIVITY, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, state_class=SensorStateClass.MEASUREMENT, ), ( @@ -155,6 +155,24 @@ SENSOR_DESCRIPTIONS = { (ExtendedSensorDeviceClass.LOCK_METHOD, None): SensorEntityDescription( key=str(ExtendedSensorDeviceClass.LOCK_METHOD), icon="mdi:key-variant" ), + # Duration of detected status (in minutes) for Occpancy Sensor + ( + ExtendedSensorDeviceClass.DURATION_DETECTED, + Units.TIME_MINUTES, + ): SensorEntityDescription( + key=str(ExtendedSensorDeviceClass.DURATION_DETECTED), + native_unit_of_measurement=UnitOfTime.MINUTES, + state_class=SensorStateClass.MEASUREMENT, + ), + # Duration of cleared status (in minutes) for Occpancy Sensor + ( + ExtendedSensorDeviceClass.DURATION_CLEARED, + Units.TIME_MINUTES, + ): SensorEntityDescription( + key=str(ExtendedSensorDeviceClass.DURATION_CLEARED), + native_unit_of_measurement=UnitOfTime.MINUTES, + state_class=SensorStateClass.MEASUREMENT, + ), } diff --git a/homeassistant/components/xiaomi_ble/strings.json b/homeassistant/components/xiaomi_ble/strings.json index 048c9bd92e2..4ea4a47c61e 100644 --- a/homeassistant/components/xiaomi_ble/strings.json +++ b/homeassistant/components/xiaomi_ble/strings.json @@ -25,18 +25,35 @@ "data": { "bindkey": "Bindkey" } + }, + "cloud_auth": { + "description": "Please provide your Mi app username and password. This data won't be saved and only used to retrieve the device encryption key. Usernames and passwords are case sensitive.", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "get_encryption_key_4_5_choose_method": { + "description": "A Mi device can be set up in Home Assistant in two different ways.\n\nYou can enter the bindkey yourself, or Home Assistant can import them from your Mi account.", + "menu_options": { + "cloud_auth": "Mi account (recommended)", + "get_encryption_key_4_5": "Enter encryption key manually" + } } }, "error": { "decryption_failed": "The provided bindkey did not work, sensor data could not be decrypted. Please check it and try again.", "expected_24_characters": "Expected a 24 character hexadecimal bindkey.", - "expected_32_characters": "Expected a 32 character hexadecimal bindkey." + "expected_32_characters": "Expected a 32 character hexadecimal bindkey.", + "auth_failed": "Authentication failed: {error_detail}", + "api_device_not_found": "The device was not found in your Mi account." }, "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "api_error": "Error while communicating with Mi API: {error_detail}" } }, "device_automation": { diff --git a/homeassistant/components/xiaomi_miio/__init__.py b/homeassistant/components/xiaomi_miio/__init__.py index bea8d9b402f..d841045d235 100644 --- a/homeassistant/components/xiaomi_miio/__init__.py +++ b/homeassistant/components/xiaomi_miio/__init__.py @@ -56,6 +56,7 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_ZA5, MODELS_AIR_MONITOR, MODELS_FAN, @@ -118,6 +119,7 @@ MODEL_TO_CLASS_MAP = { MODEL_FAN_P9: FanMiot, MODEL_FAN_P10: FanMiot, MODEL_FAN_P11: FanMiot, + MODEL_FAN_P18: FanMiot, MODEL_FAN_P5: FanP5, MODEL_FAN_ZA5: FanZA5, } @@ -186,7 +188,9 @@ def _async_update_data_default(hass, device): except DeviceException as ex: if getattr(ex, "code", None) != -9999: raise UpdateFailed(ex) from ex - _LOGGER.info("Got exception while fetching the state, trying again: %s", ex) + _LOGGER.error( + "Got exception while fetching the state, trying again: %s", ex + ) # Try to fetch the data a second time after error code -9999 try: return await _async_fetch_data() @@ -273,7 +277,9 @@ def _async_update_data_vacuum( except DeviceException as ex: if getattr(ex, "code", None) != -9999: raise UpdateFailed(ex) from ex - _LOGGER.info("Got exception while fetching the state, trying again: %s", ex) + _LOGGER.error( + "Got exception while fetching the state, trying again: %s", ex + ) # Try to fetch the data a second time after error code -9999 try: @@ -302,6 +308,7 @@ async def async_create_miio_device_and_coordinator( "zhimi.fan.za3": True, "zhimi.fan.za5": True, "zhimi.airpurifier.za1": True, + "dmaker.fan.1c": True, } lazy_discover = LAZY_DISCOVER_FOR_MODEL.get(model, False) @@ -381,6 +388,7 @@ async def async_create_miio_device_and_coordinator( coordinator = coordinator_class( hass, _LOGGER, + config_entry=entry, name=name, update_method=update_method(hass, device), # Polling interval. Will only be polled if there are subscribers. @@ -446,6 +454,7 @@ async def async_setup_gateway_entry(hass: HomeAssistant, entry: ConfigEntry) -> coordinator_dict[sub_device.sid] = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=name, update_method=update_data_factory(sub_device), # Polling interval. Will only be polled if there are subscribers. diff --git a/homeassistant/components/xiaomi_miio/air_quality.py b/homeassistant/components/xiaomi_miio/air_quality.py index 80dd751a98c..199d9161353 100644 --- a/homeassistant/components/xiaomi_miio/air_quality.py +++ b/homeassistant/components/xiaomi_miio/air_quality.py @@ -18,7 +18,7 @@ from .const import ( MODEL_AIRQUALITYMONITOR_S1, MODEL_AIRQUALITYMONITOR_V1, ) -from .device import XiaomiMiioEntity +from .entity import XiaomiMiioEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_miio/alarm_control_panel.py b/homeassistant/components/xiaomi_miio/alarm_control_panel.py index 58d5ed247ad..9c06198bc7e 100644 --- a/homeassistant/components/xiaomi_miio/alarm_control_panel.py +++ b/homeassistant/components/xiaomi_miio/alarm_control_panel.py @@ -10,13 +10,9 @@ from miio import DeviceException from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -106,11 +102,11 @@ class XiaomiGatewayAlarm(AlarmControlPanelEntity): self._attr_available = True if state == XIAOMI_STATE_ARMED_VALUE: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY elif state == XIAOMI_STATE_DISARMED_VALUE: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif state == XIAOMI_STATE_ARMING_VALUE: - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING else: _LOGGER.warning( "New state (%s) doesn't match expected values: %s/%s/%s", @@ -119,6 +115,6 @@ class XiaomiGatewayAlarm(AlarmControlPanelEntity): XIAOMI_STATE_DISARMED_VALUE, XIAOMI_STATE_ARMING_VALUE, ) - self._attr_state = None + self._attr_alarm_state = None - _LOGGER.debug("State value: %s", self._attr_state) + _LOGGER.debug("State value: %s", self._attr_alarm_state) diff --git a/homeassistant/components/xiaomi_miio/binary_sensor.py b/homeassistant/components/xiaomi_miio/binary_sensor.py index 6d1a81007dc..a5ab7e56e6b 100644 --- a/homeassistant/components/xiaomi_miio/binary_sensor.py +++ b/homeassistant/components/xiaomi_miio/binary_sensor.py @@ -32,7 +32,7 @@ from .const import ( MODELS_VACUUM_WITH_MOP, MODELS_VACUUM_WITH_SEPARATE_MOP, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity _LOGGER = logging.getLogger(__name__) @@ -56,13 +56,13 @@ class XiaomiMiioBinarySensorDescription(BinarySensorEntityDescription): BINARY_SENSOR_TYPES = ( XiaomiMiioBinarySensorDescription( key=ATTR_NO_WATER, - name="Water tank empty", + translation_key=ATTR_NO_WATER, icon="mdi:water-off-outline", entity_category=EntityCategory.DIAGNOSTIC, ), XiaomiMiioBinarySensorDescription( key=ATTR_WATER_TANK_DETACHED, - name="Water tank", + translation_key=ATTR_WATER_TANK_DETACHED, icon="mdi:car-coolant-level", device_class=BinarySensorDeviceClass.CONNECTIVITY, value=lambda value: not value, @@ -70,13 +70,13 @@ BINARY_SENSOR_TYPES = ( ), XiaomiMiioBinarySensorDescription( key=ATTR_PTC_STATUS, - name="Auxiliary heat status", + translation_key=ATTR_PTC_STATUS, device_class=BinarySensorDeviceClass.POWER, entity_category=EntityCategory.DIAGNOSTIC, ), XiaomiMiioBinarySensorDescription( key=ATTR_POWERSUPPLY_ATTACHED, - name="Power supply", + translation_key=ATTR_POWERSUPPLY_ATTACHED, device_class=BinarySensorDeviceClass.PLUG, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -88,7 +88,7 @@ FAN_ZA5_BINARY_SENSORS = (ATTR_POWERSUPPLY_ATTACHED,) VACUUM_SENSORS = { ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_BOX_ATTACHED, - name="Mop attached", + translation_key=ATTR_WATER_BOX_ATTACHED, icon="mdi:square-rounded", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -97,7 +97,7 @@ VACUUM_SENSORS = { ), ATTR_WATER_BOX_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_BOX_ATTACHED, - name="Water box attached", + translation_key=ATTR_WATER_BOX_ATTACHED, icon="mdi:water", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -106,7 +106,7 @@ VACUUM_SENSORS = { ), ATTR_WATER_SHORTAGE: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_SHORTAGE, - name="Water shortage", + translation_key=ATTR_WATER_SHORTAGE, icon="mdi:water", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -119,7 +119,7 @@ VACUUM_SENSORS_SEPARATE_MOP = { **VACUUM_SENSORS, ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_MOP_ATTACHED, - name="Mop attached", + translation_key=ATTR_MOP_ATTACHED, icon="mdi:square-rounded", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, diff --git a/homeassistant/components/xiaomi_miio/button.py b/homeassistant/components/xiaomi_miio/button.py index 38e6afa5ffb..9a64941f398 100644 --- a/homeassistant/components/xiaomi_miio/button.py +++ b/homeassistant/components/xiaomi_miio/button.py @@ -24,7 +24,7 @@ from .const import ( MODEL_AIRFRESH_T2017, MODELS_VACUUM, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity # Fans ATTR_RESET_DUST_FILTER = "reset_dust_filter" @@ -51,7 +51,7 @@ BUTTON_TYPES = ( # Fans XiaomiMiioButtonDescription( key=ATTR_RESET_DUST_FILTER, - name="Reset dust filter", + translation_key=ATTR_RESET_DUST_FILTER, icon="mdi:air-filter", method_press="reset_dust_filter", method_press_error_message="Resetting the dust filter lifetime failed", @@ -59,7 +59,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_UPPER_FILTER, - name="Reset upper filter", + translation_key=ATTR_RESET_UPPER_FILTER, icon="mdi:air-filter", method_press="reset_upper_filter", method_press_error_message="Resetting the upper filter lifetime failed.", @@ -68,7 +68,7 @@ BUTTON_TYPES = ( # Vacuums XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_MAIN_BRUSH, - name="Reset main brush", + translation_key=ATTR_RESET_VACUUM_MAIN_BRUSH, icon="mdi:brush", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.MainBrush, @@ -77,7 +77,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_SIDE_BRUSH, - name="Reset side brush", + translation_key=ATTR_RESET_VACUUM_SIDE_BRUSH, icon="mdi:brush", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.SideBrush, @@ -86,7 +86,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_FILTER, - name="Reset filter", + translation_key=ATTR_RESET_VACUUM_FILTER, icon="mdi:air-filter", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.Filter, @@ -95,7 +95,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_SENSOR_DIRTY, - name="Reset sensor dirty", + translation_key=ATTR_RESET_VACUUM_SENSOR_DIRTY, icon="mdi:eye-outline", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.SensorDirty, diff --git a/homeassistant/components/xiaomi_miio/config_flow.py b/homeassistant/components/xiaomi_miio/config_flow.py index c689ede27eb..b068f4a1e61 100644 --- a/homeassistant/components/xiaomi_miio/config_flow.py +++ b/homeassistant/components/xiaomi_miio/config_flow.py @@ -13,7 +13,6 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ( - SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -64,10 +63,6 @@ DEVICE_CLOUD_CONFIG = vol.Schema( class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,14 +78,7 @@ class OptionsFlowHandler(OptionsFlow): not cloud_username or not cloud_password or not cloud_country ): errors["base"] = "cloud_credentials_incomplete" - # trigger re-auth flow - self.hass.async_create_task( - self.hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=self.config_entry.data, - ) - ) + self.config_entry.async_start_reauth(self.hass) if not errors: return self.async_create_entry(title="", data=user_input) @@ -130,7 +118,7 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -237,7 +225,9 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN): step_id="cloud", data_schema=DEVICE_CLOUD_CONFIG, errors=errors ) - miio_cloud = MiCloud(cloud_username, cloud_password) + miio_cloud = await self.hass.async_add_executor_job( + MiCloud, cloud_username, cloud_password + ) try: if not await self.hass.async_add_executor_job(miio_cloud.login): errors["base"] = "cloud_login_error" diff --git a/homeassistant/components/xiaomi_miio/const.py b/homeassistant/components/xiaomi_miio/const.py index a8b1f8d4ba5..2b9cdb2ffdd 100644 --- a/homeassistant/components/xiaomi_miio/const.py +++ b/homeassistant/components/xiaomi_miio/const.py @@ -60,8 +60,8 @@ MODEL_AIRPURIFIER_2H = "zhimi.airpurifier.mc2" MODEL_AIRPURIFIER_2S = "zhimi.airpurifier.mc1" MODEL_AIRPURIFIER_3 = "zhimi.airpurifier.ma4" MODEL_AIRPURIFIER_3C = "zhimi.airpurifier.mb4" +MODEL_AIRPURIFIER_3C_REV_A = "zhimi.airp.mb4a" MODEL_AIRPURIFIER_3H = "zhimi.airpurifier.mb3" -MODEL_AIRPURIFIER_COMPACT = "xiaomi.airp.cpa4" MODEL_AIRPURIFIER_M1 = "zhimi.airpurifier.m1" MODEL_AIRPURIFIER_M2 = "zhimi.airpurifier.m2" MODEL_AIRPURIFIER_MA1 = "zhimi.airpurifier.ma1" @@ -84,7 +84,6 @@ MODEL_AIRHUMIDIFIER_CA4 = "zhimi.humidifier.ca4" MODEL_AIRHUMIDIFIER_CB1 = "zhimi.humidifier.cb1" MODEL_AIRHUMIDIFIER_JSQ = "deerma.humidifier.jsq" MODEL_AIRHUMIDIFIER_JSQ1 = "deerma.humidifier.jsq1" -MODEL_AIRHUMIDIFIER_JSQ2W = "deerma.humidifier.jsq2w" MODEL_AIRHUMIDIFIER_MJJSQ = "deerma.humidifier.mjjsq" MODEL_AIRFRESH_A1 = "dmaker.airfresh.a1" @@ -95,6 +94,7 @@ MODEL_AIRFRESH_T2017 = "dmaker.airfresh.t2017" MODEL_FAN_1C = "dmaker.fan.1c" MODEL_FAN_P10 = "dmaker.fan.p10" MODEL_FAN_P11 = "dmaker.fan.p11" +MODEL_FAN_P18 = "dmaker.fan.p18" MODEL_FAN_P5 = "dmaker.fan.p5" MODEL_FAN_P9 = "dmaker.fan.p9" MODEL_FAN_SA1 = "zhimi.fan.sa1" @@ -119,6 +119,7 @@ MODELS_FAN_MIOT = [ MODEL_FAN_1C, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_P9, MODEL_FAN_ZA5, ] @@ -126,6 +127,7 @@ MODELS_FAN_MIOT = [ MODELS_PURIFIER_MIOT = [ MODEL_AIRPURIFIER_3, MODEL_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_3H, MODEL_AIRPURIFIER_PROH, MODEL_AIRPURIFIER_PROH_EU, @@ -150,7 +152,6 @@ MODELS_PURIFIER_MIIO = [ MODEL_AIRPURIFIER_SA2, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_2H, - MODEL_AIRPURIFIER_COMPACT, MODEL_AIRFRESH_A1, MODEL_AIRFRESH_VA2, MODEL_AIRFRESH_VA4, @@ -165,7 +166,6 @@ MODELS_HUMIDIFIER_MIOT = [MODEL_AIRHUMIDIFIER_CA4] MODELS_HUMIDIFIER_MJJSQ = [ MODEL_AIRHUMIDIFIER_JSQ, MODEL_AIRHUMIDIFIER_JSQ1, - MODEL_AIRHUMIDIFIER_JSQ2W, MODEL_AIRHUMIDIFIER_MJJSQ, ] @@ -493,7 +493,7 @@ FEATURE_FLAGS_FAN_P9 = ( | FEATURE_SET_DELAY_OFF_COUNTDOWN ) -FEATURE_FLAGS_FAN_P10_P11 = ( +FEATURE_FLAGS_FAN_P10_P11_P18 = ( FEATURE_SET_BUZZER | FEATURE_SET_CHILD_LOCK | FEATURE_SET_OSCILLATION_ANGLE diff --git a/homeassistant/components/xiaomi_miio/device.py b/homeassistant/components/xiaomi_miio/device.py index e90a86ab7e9..beeb7e95e54 100644 --- a/homeassistant/components/xiaomi_miio/device.py +++ b/homeassistant/components/xiaomi_miio/device.py @@ -1,24 +1,11 @@ """Code to handle a Xiaomi Device.""" -import datetime -from enum import Enum -from functools import partial import logging -from typing import Any from construct.core import ChecksumError from miio import Device, DeviceException -from homeassistant.const import ATTR_CONNECTIONS, CONF_MAC, CONF_MODEL -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) - -from .const import DOMAIN, AuthException, SetupException +from .const import AuthException, SetupException _LOGGER = logging.getLogger(__name__) @@ -66,131 +53,3 @@ class ConnectXiaomiDevice: self._device_info.firmware_version, self._device_info.hardware_version, ) - - -class XiaomiMiioEntity(Entity): - """Representation of a base Xiaomi Miio Entity.""" - - def __init__(self, name, device, entry, unique_id): - """Initialize the Xiaomi Miio Device.""" - self._device = device - self._model = entry.data[CONF_MODEL] - self._mac = entry.data[CONF_MAC] - self._device_id = entry.unique_id - self._unique_id = unique_id - self._name = name - self._available = None - - @property - def unique_id(self): - """Return an unique ID.""" - return self._unique_id - - @property - def name(self): - """Return the name of this entity, if any.""" - return self._name - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - manufacturer="Xiaomi", - model=self._model, - name=self._name, - ) - - if self._mac is not None: - device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} - - return device_info - - -class XiaomiCoordinatedMiioEntity[_T: DataUpdateCoordinator[Any]]( - CoordinatorEntity[_T] -): - """Representation of a base a coordinated Xiaomi Miio Entity.""" - - _attr_has_entity_name = True - - def __init__(self, device, entry, unique_id, coordinator): - """Initialize the coordinated Xiaomi Miio Device.""" - super().__init__(coordinator) - self._device = device - self._model = entry.data[CONF_MODEL] - self._mac = entry.data[CONF_MAC] - self._device_id = entry.unique_id - self._device_name = entry.title - self._unique_id = unique_id - - @property - def unique_id(self): - """Return an unique ID.""" - return self._unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - manufacturer="Xiaomi", - model=self._model, - name=self._device_name, - ) - - if self._mac is not None: - device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} - - return device_info - - async def _try_command(self, mask_error, func, *args, **kwargs): - """Call a miio device command handling error messages.""" - try: - result = await self.hass.async_add_executor_job( - partial(func, *args, **kwargs) - ) - except DeviceException as exc: - if self.available: - _LOGGER.error(mask_error, exc) - - return False - - _LOGGER.debug("Response received from miio device: %s", result) - return True - - @classmethod - def _extract_value_from_attribute(cls, state, attribute): - value = getattr(state, attribute) - if isinstance(value, Enum): - return value.value - if isinstance(value, datetime.timedelta): - return cls._parse_time_delta(value) - if isinstance(value, datetime.time): - return cls._parse_datetime_time(value) - if isinstance(value, datetime.datetime): - return cls._parse_datetime_datetime(value) - - if value is None: - _LOGGER.debug("Attribute %s is None, this is unexpected", attribute) - - return value - - @staticmethod - def _parse_time_delta(timedelta: datetime.timedelta) -> int: - return int(timedelta.total_seconds()) - - @staticmethod - def _parse_datetime_time(initial_time: datetime.time) -> str: - time = datetime.datetime.now().replace( - hour=initial_time.hour, minute=initial_time.minute, second=0, microsecond=0 - ) - - if time < datetime.datetime.now(): - time += datetime.timedelta(days=1) - - return time.isoformat() - - @staticmethod - def _parse_datetime_datetime(time: datetime.datetime) -> str: - return time.isoformat() diff --git a/homeassistant/components/xiaomi_miio/device_tracker.py b/homeassistant/components/xiaomi_miio/device_tracker.py index 4a7e447b8a5..1dfc5e53410 100644 --- a/homeassistant/components/xiaomi_miio/device_tracker.py +++ b/homeassistant/components/xiaomi_miio/device_tracker.py @@ -8,7 +8,7 @@ from miio import DeviceException, WifiRepeater import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -32,15 +32,17 @@ def get_scanner( ) -> XiaomiMiioDeviceScanner | None: """Return a Xiaomi MiIO device scanner.""" scanner = None - host = config[DOMAIN][CONF_HOST] - token = config[DOMAIN][CONF_TOKEN] + config = config[DEVICE_TRACKER_DOMAIN] - _LOGGER.info("Initializing with host %s (token %s...)", host, token[:5]) + host = config[CONF_HOST] + token = config[CONF_TOKEN] + + _LOGGER.debug("Initializing with host %s (token %s...)", host, token[:5]) try: device = WifiRepeater(host, token) device_info = device.info() - _LOGGER.info( + _LOGGER.debug( "%s %s %s detected", device_info.model, device_info.firmware_version, diff --git a/homeassistant/components/xiaomi_miio/entity.py b/homeassistant/components/xiaomi_miio/entity.py new file mode 100644 index 00000000000..0343a7526d7 --- /dev/null +++ b/homeassistant/components/xiaomi_miio/entity.py @@ -0,0 +1,193 @@ +"""Code to handle a Xiaomi Device.""" + +import datetime +from enum import Enum +from functools import partial +import logging +from typing import Any + +from miio import DeviceException + +from homeassistant.const import ATTR_CONNECTIONS, CONF_MAC, CONF_MODEL +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import ATTR_AVAILABLE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class XiaomiMiioEntity(Entity): + """Representation of a base Xiaomi Miio Entity.""" + + def __init__(self, name, device, entry, unique_id): + """Initialize the Xiaomi Miio Device.""" + self._device = device + self._model = entry.data[CONF_MODEL] + self._mac = entry.data[CONF_MAC] + self._device_id = entry.unique_id + self._unique_id = unique_id + self._name = name + self._available = None + + @property + def unique_id(self): + """Return an unique ID.""" + return self._unique_id + + @property + def name(self): + """Return the name of this entity, if any.""" + return self._name + + @property + def device_info(self) -> DeviceInfo: + """Return the device info.""" + device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer="Xiaomi", + model=self._model, + name=self._name, + ) + + if self._mac is not None: + device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} + + return device_info + + +class XiaomiCoordinatedMiioEntity[_T: DataUpdateCoordinator[Any]]( + CoordinatorEntity[_T] +): + """Representation of a base a coordinated Xiaomi Miio Entity.""" + + _attr_has_entity_name = True + + def __init__(self, device, entry, unique_id, coordinator): + """Initialize the coordinated Xiaomi Miio Device.""" + super().__init__(coordinator) + self._device = device + self._model = entry.data[CONF_MODEL] + self._mac = entry.data[CONF_MAC] + self._device_id = entry.unique_id + self._device_name = entry.title + self._unique_id = unique_id + + @property + def unique_id(self): + """Return an unique ID.""" + return self._unique_id + + @property + def device_info(self) -> DeviceInfo: + """Return the device info.""" + device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer="Xiaomi", + model=self._model, + name=self._device_name, + ) + + if self._mac is not None: + device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} + + return device_info + + async def _try_command(self, mask_error, func, *args, **kwargs): + """Call a miio device command handling error messages.""" + try: + result = await self.hass.async_add_executor_job( + partial(func, *args, **kwargs) + ) + except DeviceException as exc: + if self.available: + _LOGGER.error(mask_error, exc) + + return False + + _LOGGER.debug("Response received from miio device: %s", result) + return True + + @classmethod + def _extract_value_from_attribute(cls, state, attribute): + value = getattr(state, attribute) + if isinstance(value, Enum): + return value.value + if isinstance(value, datetime.timedelta): + return cls._parse_time_delta(value) + if isinstance(value, datetime.time): + return cls._parse_datetime_time(value) + if isinstance(value, datetime.datetime): + return cls._parse_datetime_datetime(value) + + if value is None: + _LOGGER.debug("Attribute %s is None, this is unexpected", attribute) + + return value + + @staticmethod + def _parse_time_delta(timedelta: datetime.timedelta) -> int: + return int(timedelta.total_seconds()) + + @staticmethod + def _parse_datetime_time(initial_time: datetime.time) -> str: + time = datetime.datetime.now().replace( + hour=initial_time.hour, minute=initial_time.minute, second=0, microsecond=0 + ) + + if time < datetime.datetime.now(): + time += datetime.timedelta(days=1) + + return time.isoformat() + + @staticmethod + def _parse_datetime_datetime(time: datetime.datetime) -> str: + return time.isoformat() + + +class XiaomiGatewayDevice(CoordinatorEntity, Entity): + """Representation of a base Xiaomi Gateway Device.""" + + def __init__(self, coordinator, sub_device, entry): + """Initialize the Xiaomi Gateway Device.""" + super().__init__(coordinator) + self._sub_device = sub_device + self._entry = entry + self._unique_id = sub_device.sid + self._name = f"{sub_device.name} ({sub_device.sid})" + + @property + def unique_id(self): + """Return an unique ID.""" + return self._unique_id + + @property + def name(self): + """Return the name of this entity, if any.""" + return self._name + + @property + def device_info(self) -> DeviceInfo: + """Return the device info of the gateway.""" + return DeviceInfo( + identifiers={(DOMAIN, self._sub_device.sid)}, + via_device=(DOMAIN, self._entry.unique_id), + manufacturer="Xiaomi", + name=self._sub_device.name, + model=self._sub_device.model, + sw_version=self._sub_device.firmware_version, + hw_version=self._sub_device.zigbee_model, + ) + + @property + def available(self): + """Return if entity is available.""" + if self.coordinator.data is None: + return False + + return self.coordinator.data[ATTR_AVAILABLE] diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index f075ff8816f..e1de3f56252 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -60,7 +60,7 @@ from .const import ( FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11, + FEATURE_FLAGS_FAN_P10_P11_P18, FEATURE_FLAGS_FAN_ZA5, FEATURE_RESET_FILTER, FEATURE_SET_EXTRA_FEATURES, @@ -71,6 +71,7 @@ from .const import ( MODEL_AIRPURIFIER_2H, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -84,6 +85,7 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_ZA5, MODELS_FAN_MIIO, MODELS_FAN_MIOT, @@ -91,7 +93,7 @@ from .const import ( SERVICE_RESET_FILTER, SERVICE_SET_EXTRA_FEATURES, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) @@ -116,6 +118,10 @@ ATTR_BUTTON_PRESSED = "button_pressed" # Air Fresh A1 ATTR_FAVORITE_SPEED = "favorite_speed" +# Air Purifier 3C +ATTR_FAVORITE_RPM = "favorite_rpm" +ATTR_MOTOR_SPEED = "motor_speed" + # Map attributes to properties of the state object AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON = { ATTR_EXTRA_FEATURES: "extra_features", @@ -215,7 +221,7 @@ async def async_setup_entry( coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR] device = hass.data[DOMAIN][config_entry.entry_id][KEY_DEVICE] - if model == MODEL_AIRPURIFIER_3C: + if model in (MODEL_AIRPURIFIER_3C, MODEL_AIRPURIFIER_3C_REV_A): entity = XiaomiAirPurifierMB4( device, config_entry, @@ -294,7 +300,6 @@ class XiaomiGenericDevice(XiaomiCoordinatedMiioEntity, FanEntity): """Representation of a generic Xiaomi device.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, entry, unique_id, coordinator): """Initialize the generic Xiaomi device.""" @@ -607,28 +612,68 @@ class XiaomiAirPurifierMiot(XiaomiAirPurifier): class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): """Representation of a Xiaomi Air Purifier MB4.""" - def __init__(self, device, entry, unique_id, coordinator): + def __init__(self, device, entry, unique_id, coordinator) -> None: """Initialize Air Purifier MB4.""" super().__init__(device, entry, unique_id, coordinator) self._device_features = FEATURE_FLAGS_AIRPURIFIER_3C self._preset_modes = PRESET_MODES_AIRPURIFIER_3C self._attr_supported_features = ( - FanEntityFeature.PRESET_MODE + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value + self._favorite_rpm: int | None = None + self._speed_range = (300, 2200) + self._motor_speed = 0 @property def operation_mode_class(self): """Hold operation mode class.""" return AirpurifierMiotOperationMode + @property + def percentage(self) -> int | None: + """Return the current percentage based speed.""" + # show the actual fan speed in silent or auto preset mode + if self._mode != self.operation_mode_class["Favorite"].value: + return ranged_value_to_percentage(self._speed_range, self._motor_speed) + if self._favorite_rpm is None: + return None + if self._state: + return ranged_value_to_percentage(self._speed_range, self._favorite_rpm) + + return None + + async def async_set_percentage(self, percentage: int) -> None: + """Set the percentage of the fan. This method is a coroutine.""" + if percentage == 0: + await self.async_turn_off() + return + + favorite_rpm = int( + round(percentage_to_ranged_value(self._speed_range, percentage), -1) + ) + if not favorite_rpm: + return + if await self._try_command( + "Setting fan level of the miio device failed.", + self._device.set_favorite_rpm, + favorite_rpm, + ): + self._favorite_rpm = favorite_rpm + self._mode = self.operation_mode_class["Favorite"].value + self.async_write_ha_state() + async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" + if not self._state: + await self.async_turn_on() + if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -642,6 +687,14 @@ class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): """Fetch state from the device.""" self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value + self._favorite_rpm = getattr(self.coordinator.data, ATTR_FAVORITE_RPM, None) + self._motor_speed = min( + self._speed_range[1], + max( + self._speed_range[0], + getattr(self.coordinator.data, ATTR_MOTOR_SPEED, 0), + ), + ) self.async_write_ha_state() @@ -859,8 +912,8 @@ class XiaomiGenericFan(XiaomiGenericDevice): self._device_features = FEATURE_FLAGS_FAN_1C elif self._model == MODEL_FAN_P9: self._device_features = FEATURE_FLAGS_FAN_P9 - elif self._model in (MODEL_FAN_P10, MODEL_FAN_P11): - self._device_features = FEATURE_FLAGS_FAN_P10_P11 + elif self._model in (MODEL_FAN_P10, MODEL_FAN_P11, MODEL_FAN_P18): + self._device_features = FEATURE_FLAGS_FAN_P10_P11_P18 else: self._device_features = FEATURE_FLAGS_FAN self._attr_supported_features = ( diff --git a/homeassistant/components/xiaomi_miio/gateway.py b/homeassistant/components/xiaomi_miio/gateway.py index 39e8ce503a4..dd5deec2296 100644 --- a/homeassistant/components/xiaomi_miio/gateway.py +++ b/homeassistant/components/xiaomi_miio/gateway.py @@ -8,17 +8,11 @@ from micloud.micloudexception import MiCloudAccessDenied from miio import DeviceException, gateway from miio.gateway.gateway import GATEWAY_MODEL_EU -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.update_coordinator import CoordinatorEntity - from .const import ( - ATTR_AVAILABLE, CONF_CLOUD_COUNTRY, CONF_CLOUD_PASSWORD, CONF_CLOUD_SUBDEVICES, CONF_CLOUD_USERNAME, - DOMAIN, AuthException, SetupException, ) @@ -93,7 +87,7 @@ class ConnectXiaomiGateway: try: self._gateway_device.discover_devices() except DeviceException as error: - _LOGGER.info( + _LOGGER.error( ( "DeviceException during getting subdevices of xiaomi gateway" " with host %s, trying cloud to obtain subdevices: %s" @@ -134,46 +128,3 @@ class ConnectXiaomiGateway: "DeviceException during setup of xiaomi gateway with host" f" {self._host}" ) from error - - -class XiaomiGatewayDevice(CoordinatorEntity, Entity): - """Representation of a base Xiaomi Gateway Device.""" - - def __init__(self, coordinator, sub_device, entry): - """Initialize the Xiaomi Gateway Device.""" - super().__init__(coordinator) - self._sub_device = sub_device - self._entry = entry - self._unique_id = sub_device.sid - self._name = f"{sub_device.name} ({sub_device.sid})" - - @property - def unique_id(self): - """Return an unique ID.""" - return self._unique_id - - @property - def name(self): - """Return the name of this entity, if any.""" - return self._name - - @property - def device_info(self) -> DeviceInfo: - """Return the device info of the gateway.""" - return DeviceInfo( - identifiers={(DOMAIN, self._sub_device.sid)}, - via_device=(DOMAIN, self._entry.unique_id), - manufacturer="Xiaomi", - name=self._sub_device.name, - model=self._sub_device.model, - sw_version=self._sub_device.firmware_version, - hw_version=self._sub_device.zigbee_model, - ) - - @property - def available(self): - """Return if entity is available.""" - if self.coordinator.data is None: - return False - - return self.coordinator.data[ATTR_AVAILABLE] diff --git a/homeassistant/components/xiaomi_miio/humidifier.py b/homeassistant/components/xiaomi_miio/humidifier.py index 8367b063102..4701345756a 100644 --- a/homeassistant/components/xiaomi_miio/humidifier.py +++ b/homeassistant/components/xiaomi_miio/humidifier.py @@ -37,7 +37,7 @@ from .const import ( MODELS_HUMIDIFIER_MIOT, MODELS_HUMIDIFIER_MJJSQ, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index 2e5084a1f6c..cc0800f1d9d 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -14,29 +14,77 @@ } }, "services": { - "fan_reset_filter": "mdi:refresh", - "fan_set_extra_features": "mdi:cog", - "light_set_scene": "mdi:palette", - "light_set_delayed_turn_off": "mdi:timer", - "light_reminder_on": "mdi:alarm", - "light_reminder_off": "mdi:alarm-off", - "light_night_light_mode_on": "mdi:weather-night", - "light_night_light_mode_off": "mdi:weather-sunny", - "light_eyecare_mode_on": "mdi:eye", - "light_eyecare_mode_off": "mdi:eye-off", - "remote_learn_command": "mdi:remote", - "remote_set_led_on": "mdi:led-on", - "remote_set_led_off": "mdi:led-off", - "switch_set_wifi_led_on": "mdi:wifi", - "switch_set_wifi_led_off": "mdi:wifi-off", - "switch_set_power_price": "mdi:currency-usd", - "switch_set_power_mode": "mdi:power", - "vacuum_remote_control_start": "mdi:play", - "vacuum_remote_control_stop": "mdi:stop", - "vacuum_remote_control_move": "mdi:remote", - "vacuum_remote_control_move_step": "mdi:remote", - "vacuum_clean_zone": "mdi:map-marker", - "vacuum_goto": "mdi:map-marker", - "vacuum_clean_segment": "mdi:map-marker" + "fan_reset_filter": { + "service": "mdi:refresh" + }, + "fan_set_extra_features": { + "service": "mdi:cog" + }, + "light_set_scene": { + "service": "mdi:palette" + }, + "light_set_delayed_turn_off": { + "service": "mdi:timer" + }, + "light_reminder_on": { + "service": "mdi:alarm" + }, + "light_reminder_off": { + "service": "mdi:alarm-off" + }, + "light_night_light_mode_on": { + "service": "mdi:weather-night" + }, + "light_night_light_mode_off": { + "service": "mdi:weather-sunny" + }, + "light_eyecare_mode_on": { + "service": "mdi:eye" + }, + "light_eyecare_mode_off": { + "service": "mdi:eye-off" + }, + "remote_learn_command": { + "service": "mdi:remote" + }, + "remote_set_led_on": { + "service": "mdi:led-on" + }, + "remote_set_led_off": { + "service": "mdi:led-off" + }, + "switch_set_wifi_led_on": { + "service": "mdi:wifi" + }, + "switch_set_wifi_led_off": { + "service": "mdi:wifi-off" + }, + "switch_set_power_price": { + "service": "mdi:currency-usd" + }, + "switch_set_power_mode": { + "service": "mdi:power" + }, + "vacuum_remote_control_start": { + "service": "mdi:play" + }, + "vacuum_remote_control_stop": { + "service": "mdi:stop" + }, + "vacuum_remote_control_move": { + "service": "mdi:remote" + }, + "vacuum_remote_control_move_step": { + "service": "mdi:remote" + }, + "vacuum_clean_zone": { + "service": "mdi:map-marker" + }, + "vacuum_goto": { + "service": "mdi:map-marker" + }, + "vacuum_clean_segment": { + "service": "mdi:map-marker" + } } } diff --git a/homeassistant/components/xiaomi_miio/light.py b/homeassistant/components/xiaomi_miio/light.py index 35537e82b2e..3f1f8b926b3 100644 --- a/homeassistant/components/xiaomi_miio/light.py +++ b/homeassistant/components/xiaomi_miio/light.py @@ -28,7 +28,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ColorMode, LightEntity, @@ -45,7 +45,7 @@ from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import color, dt as dt_util +from homeassistant.util import color as color_util, dt as dt_util from .const import ( CONF_FLOW_TYPE, @@ -66,8 +66,7 @@ from .const import ( SERVICE_SET_DELAYED_TURN_OFF, SERVICE_SET_SCENE, ) -from .device import XiaomiMiioEntity -from .gateway import XiaomiGatewayDevice +from .entity import XiaomiGatewayDevice, XiaomiMiioEntity from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) @@ -431,33 +430,54 @@ class XiaomiPhilipsBulb(XiaomiPhilipsGenericLight): self._color_temp = None @property - def color_temp(self): + def _current_mireds(self): """Return the color temperature.""" return self._color_temp @property - def min_mireds(self): + def _min_mireds(self): """Return the coldest color_temp that this light supports.""" return 175 @property - def max_mireds(self): + def _max_mireds(self): """Return the warmest color_temp that this light supports.""" return 333 + @property + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(self._color_temp) + if self._color_temp + else None + ) + + @property + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin(self._max_mireds) + + @property + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin(self._min_mireds) + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - if ATTR_COLOR_TEMP in kwargs: - color_temp = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) percent_color_temp = self.translate( - color_temp, self.max_mireds, self.min_mireds, CCT_MIN, CCT_MAX + color_temp, self._max_mireds, self._min_mireds, CCT_MIN, CCT_MAX ) if ATTR_BRIGHTNESS in kwargs: brightness = kwargs[ATTR_BRIGHTNESS] percent_brightness = ceil(100 * brightness / 255.0) - if ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP in kwargs: + if ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( "Setting brightness and color temperature: %s %s%%, %s mireds, %s%% cct", brightness, @@ -477,7 +497,7 @@ class XiaomiPhilipsBulb(XiaomiPhilipsGenericLight): self._color_temp = color_temp self._brightness = brightness - elif ATTR_COLOR_TEMP in kwargs: + elif ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( "Setting color temperature: %s mireds, %s%% cct", color_temp, @@ -527,7 +547,11 @@ class XiaomiPhilipsBulb(XiaomiPhilipsGenericLight): self._state = state.is_on self._brightness = ceil((255 / 100.0) * state.brightness) self._color_temp = self.translate( - state.color_temperature, CCT_MIN, CCT_MAX, self.max_mireds, self.min_mireds + state.color_temperature, + CCT_MIN, + CCT_MAX, + self._max_mireds, + self._min_mireds, ) delayed_turn_off = self.delayed_turn_off_timestamp( @@ -561,12 +585,12 @@ class XiaomiPhilipsCeilingLamp(XiaomiPhilipsBulb): ) @property - def min_mireds(self): + def _min_mireds(self): """Return the coldest color_temp that this light supports.""" return 175 @property - def max_mireds(self): + def _max_mireds(self): """Return the warmest color_temp that this light supports.""" return 370 @@ -586,7 +610,11 @@ class XiaomiPhilipsCeilingLamp(XiaomiPhilipsBulb): self._state = state.is_on self._brightness = ceil((255 / 100.0) * state.brightness) self._color_temp = self.translate( - state.color_temperature, CCT_MIN, CCT_MAX, self.max_mireds, self.min_mireds + state.color_temperature, + CCT_MIN, + CCT_MAX, + self._max_mireds, + self._min_mireds, ) delayed_turn_off = self.delayed_turn_off_timestamp( @@ -798,12 +826,12 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): ) @property - def min_mireds(self): + def _min_mireds(self): """Return the coldest color_temp that this light supports.""" return 153 @property - def max_mireds(self): + def _max_mireds(self): """Return the warmest color_temp that this light supports.""" return 588 @@ -821,10 +849,12 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - if ATTR_COLOR_TEMP in kwargs: - color_temp = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) percent_color_temp = self.translate( - color_temp, self.max_mireds, self.min_mireds, CCT_MIN, CCT_MAX + color_temp, self._max_mireds, self._min_mireds, CCT_MIN, CCT_MAX ) if ATTR_BRIGHTNESS in kwargs: @@ -833,7 +863,7 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): if ATTR_HS_COLOR in kwargs: hs_color = kwargs[ATTR_HS_COLOR] - rgb = color.color_hs_to_RGB(*hs_color) + rgb = color_util.color_hs_to_RGB(*hs_color) if ATTR_BRIGHTNESS in kwargs and ATTR_HS_COLOR in kwargs: _LOGGER.debug( @@ -854,7 +884,7 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): self._hs_color = hs_color self._brightness = brightness - elif ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP in kwargs: + elif ATTR_BRIGHTNESS in kwargs and ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( ( "Setting brightness and color temperature: " @@ -887,7 +917,7 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): if result: self._hs_color = hs_color - elif ATTR_COLOR_TEMP in kwargs: + elif ATTR_COLOR_TEMP_KELVIN in kwargs: _LOGGER.debug( "Setting color temperature: %s mireds, %s%% cct", color_temp, @@ -937,9 +967,13 @@ class XiaomiPhilipsMoonlightLamp(XiaomiPhilipsBulb): self._state = state.is_on self._brightness = ceil((255 / 100.0) * state.brightness) self._color_temp = self.translate( - state.color_temperature, CCT_MIN, CCT_MAX, self.max_mireds, self.min_mireds + state.color_temperature, + CCT_MIN, + CCT_MAX, + self._max_mireds, + self._min_mireds, ) - self._hs_color = color.color_RGB_to_hs(*state.rgb) + self._hs_color = color_util.color_RGB_to_hs(*state.rgb) self._state_attrs.update( { @@ -1015,7 +1049,7 @@ class XiaomiGatewayLight(LightEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" if ATTR_HS_COLOR in kwargs: - rgb = color.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR]) + rgb = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR]) else: rgb = self._rgb @@ -1053,7 +1087,7 @@ class XiaomiGatewayLight(LightEntity): if self._is_on: self._brightness_pct = state_dict["brightness"] self._rgb = state_dict["rgb"] - self._hs = color.color_RGB_to_hs(*self._rgb) + self._hs = color_util.color_RGB_to_hs(*self._rgb) class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): @@ -1068,7 +1102,7 @@ class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): return round((self._sub_device.status["brightness"] * 255) / 100) @property - def color_temp(self): + def _current_mireds(self): """Return current color temperature.""" return self._sub_device.status["color_temp"] @@ -1078,12 +1112,12 @@ class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): return self._sub_device.status["status"] == "on" @property - def min_mireds(self): + def _min_mireds(self): """Return min cct.""" return self._sub_device.status["cct_min"] @property - def max_mireds(self): + def _max_mireds(self): """Return max cct.""" return self._sub_device.status["cct_max"] @@ -1091,8 +1125,10 @@ class XiaomiGatewayBulb(XiaomiGatewayDevice, LightEntity): """Instruct the light to turn on.""" await self.hass.async_add_executor_job(self._sub_device.on) - if ATTR_COLOR_TEMP in kwargs: - color_temp = kwargs[ATTR_COLOR_TEMP] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + color_temp = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) await self.hass.async_add_executor_job( self._sub_device.set_color_temp, color_temp ) diff --git a/homeassistant/components/xiaomi_miio/number.py b/homeassistant/components/xiaomi_miio/number.py index a0ae0ea5078..a3c501aad3f 100644 --- a/homeassistant/components/xiaomi_miio/number.py +++ b/homeassistant/components/xiaomi_miio/number.py @@ -50,7 +50,7 @@ from .const import ( FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11, + FEATURE_FLAGS_FAN_P10_P11_P18, FEATURE_FLAGS_FAN_ZA5, FEATURE_SET_DELAY_OFF_COUNTDOWN, FEATURE_SET_FAN_LEVEL, @@ -72,6 +72,7 @@ from .const import ( MODEL_AIRHUMIDIFIER_CB1, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -86,6 +87,7 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_SA1, MODEL_FAN_V2, MODEL_FAN_V3, @@ -96,7 +98,7 @@ from .const import ( MODELS_PURIFIER_MIIO, MODELS_PURIFIER_MIOT, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity ATTR_DELAY_OFF_COUNTDOWN = "delay_off_countdown" ATTR_FAN_LEVEL = "fan_level" @@ -139,7 +141,7 @@ class FavoriteLevelValues: NUMBER_TYPES = { FEATURE_SET_MOTOR_SPEED: XiaomiMiioNumberDescription( key=ATTR_MOTOR_SPEED, - name="Motor speed", + translation_key=ATTR_MOTOR_SPEED, icon="mdi:fast-forward-outline", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, native_min_value=200, @@ -151,7 +153,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAVORITE_LEVEL: XiaomiMiioNumberDescription( key=ATTR_FAVORITE_LEVEL, - name="Favorite level", + translation_key=ATTR_FAVORITE_LEVEL, icon="mdi:star-cog", native_min_value=0, native_max_value=17, @@ -161,7 +163,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAN_LEVEL: XiaomiMiioNumberDescription( key=ATTR_FAN_LEVEL, - name="Fan level", + translation_key=ATTR_FAN_LEVEL, icon="mdi:fan", native_min_value=1, native_max_value=3, @@ -171,7 +173,7 @@ NUMBER_TYPES = { ), FEATURE_SET_VOLUME: XiaomiMiioNumberDescription( key=ATTR_VOLUME, - name="Volume", + translation_key=ATTR_VOLUME, icon="mdi:volume-high", native_min_value=0, native_max_value=100, @@ -181,7 +183,7 @@ NUMBER_TYPES = { ), FEATURE_SET_OSCILLATION_ANGLE: XiaomiMiioNumberDescription( key=ATTR_OSCILLATION_ANGLE, - name="Oscillation angle", + translation_key=ATTR_OSCILLATION_ANGLE, icon="mdi:angle-acute", native_unit_of_measurement=DEGREE, native_min_value=1, @@ -192,7 +194,7 @@ NUMBER_TYPES = { ), FEATURE_SET_DELAY_OFF_COUNTDOWN: XiaomiMiioNumberDescription( key=ATTR_DELAY_OFF_COUNTDOWN, - name="Delay off countdown", + translation_key=ATTR_DELAY_OFF_COUNTDOWN, icon="mdi:fan-off", native_unit_of_measurement=UnitOfTime.MINUTES, native_min_value=0, @@ -203,7 +205,7 @@ NUMBER_TYPES = { ), FEATURE_SET_LED_BRIGHTNESS: XiaomiMiioNumberDescription( key=ATTR_LED_BRIGHTNESS, - name="LED brightness", + translation_key=ATTR_LED_BRIGHTNESS, icon="mdi:brightness-6", native_min_value=0, native_max_value=100, @@ -213,7 +215,7 @@ NUMBER_TYPES = { ), FEATURE_SET_LED_BRIGHTNESS_LEVEL: XiaomiMiioNumberDescription( key=ATTR_LED_BRIGHTNESS_LEVEL, - name="LED brightness", + translation_key=ATTR_LED_BRIGHTNESS_LEVEL, icon="mdi:brightness-6", native_min_value=0, native_max_value=8, @@ -223,7 +225,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAVORITE_RPM: XiaomiMiioNumberDescription( key=ATTR_FAVORITE_RPM, - name="Favorite motor speed", + translation_key=ATTR_FAVORITE_RPM, icon="mdi:star-cog", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, native_min_value=300, @@ -244,6 +246,7 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRHUMIDIFIER_CB1: FEATURE_FLAGS_AIRHUMIDIFIER_CA_AND_CB, MODEL_AIRPURIFIER_2S: FEATURE_FLAGS_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C: FEATURE_FLAGS_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A: FEATURE_FLAGS_AIRPURIFIER_3C, MODEL_AIRPURIFIER_PRO: FEATURE_FLAGS_AIRPURIFIER_PRO, MODEL_AIRPURIFIER_PRO_V7: FEATURE_FLAGS_AIRPURIFIER_PRO_V7, MODEL_AIRPURIFIER_V1: FEATURE_FLAGS_AIRPURIFIER_V1, @@ -254,8 +257,9 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRPURIFIER_4_PRO: FEATURE_FLAGS_AIRPURIFIER_4, MODEL_AIRPURIFIER_ZA1: FEATURE_FLAGS_AIRPURIFIER_ZA1, MODEL_FAN_1C: FEATURE_FLAGS_FAN_1C, - MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11, - MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11, + MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P18: FEATURE_FLAGS_FAN_P10_P11_P18, MODEL_FAN_P5: FEATURE_FLAGS_FAN_P5, MODEL_FAN_P9: FEATURE_FLAGS_FAN_P9, MODEL_FAN_SA1: FEATURE_FLAGS_FAN, @@ -273,6 +277,7 @@ OSCILLATION_ANGLE_VALUES = { MODEL_FAN_P9: OscillationAngleValues(max_value=150, min_value=30, step=30), MODEL_FAN_P10: OscillationAngleValues(max_value=140, min_value=30, step=30), MODEL_FAN_P11: OscillationAngleValues(max_value=140, min_value=30, step=30), + MODEL_FAN_P18: OscillationAngleValues(max_value=140, min_value=30, step=30), } FAVORITE_LEVEL_VALUES = { diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index 72707109ad6..9c83f3f4674 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -77,7 +77,7 @@ async def async_setup_platform( token = config[CONF_TOKEN] # Create handler - _LOGGER.info("Initializing with host %s (token %s...)", host, token[:5]) + _LOGGER.debug("Initializing with host %s (token %s...)", host, token[:5]) # The Chuang Mi IR Remote Controller wants to be re-discovered every # 5 minutes. As long as polling is disabled the device should be @@ -89,7 +89,7 @@ async def async_setup_platform( device_info = await hass.async_add_executor_job(device.info) model = device_info.model unique_id = f"{model}-{device_info.mac_address}" - _LOGGER.info( + _LOGGER.debug( "%s %s %s detected", model, device_info.firmware_version, diff --git a/homeassistant/components/xiaomi_miio/select.py b/homeassistant/components/xiaomi_miio/select.py index a8e936aaf8f..eb0d6bca205 100644 --- a/homeassistant/components/xiaomi_miio/select.py +++ b/homeassistant/components/xiaomi_miio/select.py @@ -50,7 +50,6 @@ from .const import ( MODEL_AIRPURIFIER_3H, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_PRO, - MODEL_AIRPURIFIER_COMPACT, MODEL_AIRPURIFIER_M1, MODEL_AIRPURIFIER_M2, MODEL_AIRPURIFIER_MA2, @@ -64,7 +63,7 @@ from .const import ( MODEL_FAN_ZA3, MODEL_FAN_ZA4, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity ATTR_DISPLAY_ORIENTATION = "display_orientation" ATTR_LED_BRIGHTNESS = "led_brightness" @@ -130,9 +129,6 @@ MODEL_TO_ATTR_MAP: dict[str, list] = { MODEL_AIRPURIFIER_4_PRO: [ AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierMiotLedBrightness) ], - MODEL_AIRPURIFIER_COMPACT: [ - AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierMiotLedBrightness) - ], MODEL_AIRPURIFIER_M1: [ AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierLedBrightness) ], diff --git a/homeassistant/components/xiaomi_miio/sensor.py b/homeassistant/components/xiaomi_miio/sensor.py index ab992a8fe96..aafcba97487 100644 --- a/homeassistant/components/xiaomi_miio/sensor.py +++ b/homeassistant/components/xiaomi_miio/sensor.py @@ -24,7 +24,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - AREA_SQUARE_METERS, ATTR_BATTERY_LEVEL, ATTR_TEMPERATURE, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, @@ -37,6 +36,7 @@ from homeassistant.const import ( PERCENTAGE, REVOLUTIONS_PER_MINUTE, EntityCategory, + UnitOfArea, UnitOfPower, UnitOfPressure, UnitOfTemperature, @@ -62,6 +62,7 @@ from .const import ( MODEL_AIRHUMIDIFIER_CA1, MODEL_AIRHUMIDIFIER_CB1, MODEL_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -89,8 +90,7 @@ from .const import ( ROBOROCK_GENERIC, ROCKROBO_GENERIC, ) -from .device import XiaomiCoordinatedMiioEntity, XiaomiMiioEntity -from .gateway import XiaomiGatewayDevice +from .entity import XiaomiCoordinatedMiioEntity, XiaomiGatewayDevice, XiaomiMiioEntity _LOGGER = logging.getLogger(__name__) @@ -162,34 +162,31 @@ class XiaomiMiioSensorDescription(SensorEntityDescription): SENSOR_TYPES = { ATTR_TEMPERATURE: XiaomiMiioSensorDescription( key=ATTR_TEMPERATURE, - name="Temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_HUMIDITY: XiaomiMiioSensorDescription( key=ATTR_HUMIDITY, - name="Humidity", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PRESSURE: XiaomiMiioSensorDescription( key=ATTR_PRESSURE, - name="Pressure", native_unit_of_measurement=UnitOfPressure.HPA, device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_LOAD_POWER: XiaomiMiioSensorDescription( key=ATTR_LOAD_POWER, - name="Load power", + translation_key=ATTR_LOAD_POWER, native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), ATTR_WATER_LEVEL: XiaomiMiioSensorDescription( key=ATTR_WATER_LEVEL, - name="Water level", + translation_key=ATTR_WATER_LEVEL, native_unit_of_measurement=PERCENTAGE, icon="mdi:water-check", state_class=SensorStateClass.MEASUREMENT, @@ -197,7 +194,7 @@ SENSOR_TYPES = { ), ATTR_ACTUAL_SPEED: XiaomiMiioSensorDescription( key=ATTR_ACTUAL_SPEED, - name="Actual speed", + translation_key=ATTR_ACTUAL_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -205,7 +202,7 @@ SENSOR_TYPES = { ), ATTR_CONTROL_SPEED: XiaomiMiioSensorDescription( key=ATTR_CONTROL_SPEED, - name="Control speed", + translation_key=ATTR_CONTROL_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -213,7 +210,7 @@ SENSOR_TYPES = { ), ATTR_FAVORITE_SPEED: XiaomiMiioSensorDescription( key=ATTR_FAVORITE_SPEED, - name="Favorite speed", + translation_key=ATTR_FAVORITE_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -221,7 +218,7 @@ SENSOR_TYPES = { ), ATTR_MOTOR_SPEED: XiaomiMiioSensorDescription( key=ATTR_MOTOR_SPEED, - name="Motor speed", + translation_key=ATTR_MOTOR_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -229,7 +226,7 @@ SENSOR_TYPES = { ), ATTR_MOTOR2_SPEED: XiaomiMiioSensorDescription( key=ATTR_MOTOR2_SPEED, - name="Second motor speed", + translation_key=ATTR_MOTOR2_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -237,7 +234,7 @@ SENSOR_TYPES = { ), ATTR_USE_TIME: XiaomiMiioSensorDescription( key=ATTR_USE_TIME, - name="Use time", + translation_key=ATTR_USE_TIME, native_unit_of_measurement=UnitOfTime.SECONDS, icon="mdi:progress-clock", device_class=SensorDeviceClass.DURATION, @@ -247,54 +244,52 @@ SENSOR_TYPES = { ), ATTR_ILLUMINANCE: XiaomiMiioSensorDescription( key=ATTR_ILLUMINANCE, - name="Illuminance", + translation_key=ATTR_ILLUMINANCE, native_unit_of_measurement=UNIT_LUMEN, state_class=SensorStateClass.MEASUREMENT, ), ATTR_ILLUMINANCE_LUX: XiaomiMiioSensorDescription( key=ATTR_ILLUMINANCE, - name="Illuminance", native_unit_of_measurement=LIGHT_LUX, device_class=SensorDeviceClass.ILLUMINANCE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_AIR_QUALITY: XiaomiMiioSensorDescription( key=ATTR_AIR_QUALITY, + translation_key=ATTR_AIR_QUALITY, native_unit_of_measurement="AQI", icon="mdi:cloud", state_class=SensorStateClass.MEASUREMENT, ), ATTR_TVOC: XiaomiMiioSensorDescription( key=ATTR_TVOC, - name="TVOC", + translation_key=ATTR_TVOC, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, ), ATTR_PM10: XiaomiMiioSensorDescription( key=ATTR_PM10, - name="PM10", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM10, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PM25: XiaomiMiioSensorDescription( key=ATTR_AQI, - name="PM2.5", + translation_key=ATTR_AQI, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PM25_2: XiaomiMiioSensorDescription( key=ATTR_PM25, - name="PM2.5", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), ATTR_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_FILTER_LIFE_REMAINING, - name="Filter lifetime remaining", + translation_key=ATTR_FILTER_LIFE_REMAINING, native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -303,7 +298,7 @@ SENSOR_TYPES = { ), ATTR_FILTER_USE: XiaomiMiioSensorDescription( key=ATTR_FILTER_HOURS_USED, - name="Filter use", + translation_key=ATTR_FILTER_HOURS_USED, native_unit_of_measurement=UnitOfTime.HOURS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -312,7 +307,7 @@ SENSOR_TYPES = { ), ATTR_FILTER_LEFT_TIME: XiaomiMiioSensorDescription( key=ATTR_FILTER_LEFT_TIME, - name="Filter lifetime left", + translation_key=ATTR_FILTER_LEFT_TIME, native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -321,7 +316,7 @@ SENSOR_TYPES = { ), ATTR_DUST_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_DUST_FILTER_LIFE_REMAINING, - name="Dust filter lifetime remaining", + translation_key=ATTR_DUST_FILTER_LIFE_REMAINING, native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -330,7 +325,7 @@ SENSOR_TYPES = { ), ATTR_DUST_FILTER_LIFE_REMAINING_DAYS: XiaomiMiioSensorDescription( key=ATTR_DUST_FILTER_LIFE_REMAINING_DAYS, - name="Dust filter lifetime remaining days", + translation_key=ATTR_DUST_FILTER_LIFE_REMAINING_DAYS, native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -339,7 +334,7 @@ SENSOR_TYPES = { ), ATTR_UPPER_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_UPPER_FILTER_LIFE_REMAINING, - name="Upper filter lifetime remaining", + translation_key=ATTR_UPPER_FILTER_LIFE_REMAINING, native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -348,7 +343,7 @@ SENSOR_TYPES = { ), ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS: XiaomiMiioSensorDescription( key=ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS, - name="Upper filter lifetime remaining days", + translation_key=ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS, native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -357,14 +352,13 @@ SENSOR_TYPES = { ), ATTR_CARBON_DIOXIDE: XiaomiMiioSensorDescription( key=ATTR_CARBON_DIOXIDE, - name="Carbon dioxide", native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PURIFY_VOLUME: XiaomiMiioSensorDescription( key=ATTR_PURIFY_VOLUME, - name="Purify volume", + translation_key=ATTR_PURIFY_VOLUME, native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, @@ -373,7 +367,6 @@ SENSOR_TYPES = { ), ATTR_BATTERY: XiaomiMiioSensorDescription( key=ATTR_BATTERY, - name="Battery", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, state_class=SensorStateClass.MEASUREMENT, @@ -568,6 +561,7 @@ MODEL_TO_SENSORS_MAP: dict[str, tuple[str, ...]] = { MODEL_AIRHUMIDIFIER_CA1: HUMIDIFIER_CA1_CB1_SENSORS, MODEL_AIRHUMIDIFIER_CB1: HUMIDIFIER_CA1_CB1_SENSORS, MODEL_AIRPURIFIER_3C: PURIFIER_3C_SENSORS, + MODEL_AIRPURIFIER_3C_REV_A: PURIFIER_3C_SENSORS, MODEL_AIRPURIFIER_4_LITE_RMA1: PURIFIER_4_LITE_SENSORS, MODEL_AIRPURIFIER_4_LITE_RMB1: PURIFIER_4_LITE_SENSORS, MODEL_AIRPURIFIER_4: PURIFIER_4_SENSORS, @@ -587,7 +581,7 @@ VACUUM_SENSORS = { f"dnd_{ATTR_DND_START}": XiaomiMiioSensorDescription( key=ATTR_DND_START, icon="mdi:minus-circle-off", - name="DnD start", + translation_key="dnd_start", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.dnd_status, entity_registry_enabled_default=False, @@ -596,7 +590,7 @@ VACUUM_SENSORS = { f"dnd_{ATTR_DND_END}": XiaomiMiioSensorDescription( key=ATTR_DND_END, icon="mdi:minus-circle-off", - name="DnD end", + translation_key="dnd_end", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.dnd_status, entity_registry_enabled_default=False, @@ -605,7 +599,7 @@ VACUUM_SENSORS = { f"last_clean_{ATTR_LAST_CLEAN_START}": XiaomiMiioSensorDescription( key=ATTR_LAST_CLEAN_START, icon="mdi:clock-time-twelve", - name="Last clean start", + translation_key="last_clean_start", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, entity_category=EntityCategory.DIAGNOSTIC, @@ -615,7 +609,7 @@ VACUUM_SENSORS = { icon="mdi:clock-time-twelve", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - name="Last clean end", + translation_key="last_clean_end", entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_TIME}": XiaomiMiioSensorDescription( @@ -624,15 +618,15 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_LAST_CLEAN_TIME, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - name="Last clean duration", + translation_key=ATTR_LAST_CLEAN_TIME, entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, icon="mdi:texture-box", key=ATTR_LAST_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - name="Last clean area", + translation_key=ATTR_LAST_CLEAN_AREA, entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_STATUS_CLEAN_TIME}": XiaomiMiioSensorDescription( @@ -641,16 +635,16 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_STATUS_CLEAN_TIME, parent_key=VacuumCoordinatorDataAttributes.status, - name="Current clean duration", + translation_key=ATTR_STATUS_CLEAN_TIME, entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, icon="mdi:texture-box", key=ATTR_STATUS_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.status, entity_category=EntityCategory.DIAGNOSTIC, - name="Current clean area", + translation_key=ATTR_STATUS_CLEAN_AREA, ), f"clean_history_{ATTR_CLEAN_HISTORY_TOTAL_DURATION}": XiaomiMiioSensorDescription( native_unit_of_measurement=UnitOfTime.SECONDS, @@ -658,16 +652,16 @@ VACUUM_SENSORS = { icon="mdi:timer-sand", key=ATTR_CLEAN_HISTORY_TOTAL_DURATION, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total duration", + translation_key=ATTR_CLEAN_HISTORY_TOTAL_DURATION, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), f"clean_history_{ATTR_CLEAN_HISTORY_TOTAL_AREA}": XiaomiMiioSensorDescription( - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, icon="mdi:texture-box", key=ATTR_CLEAN_HISTORY_TOTAL_AREA, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total clean area", + translation_key=ATTR_CLEAN_HISTORY_TOTAL_AREA, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -677,7 +671,7 @@ VACUUM_SENSORS = { state_class=SensorStateClass.TOTAL_INCREASING, key=ATTR_CLEAN_HISTORY_COUNT, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total clean count", + translation_key=ATTR_CLEAN_HISTORY_COUNT, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -687,7 +681,7 @@ VACUUM_SENSORS = { state_class=SensorStateClass.TOTAL_INCREASING, key=ATTR_CLEAN_HISTORY_DUST_COLLECTION_COUNT, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total dust collection count", + translation_key=ATTR_CLEAN_HISTORY_DUST_COLLECTION_COUNT, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -697,7 +691,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_MAIN_BRUSH_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Main brush left", + translation_key=ATTR_CONSUMABLE_STATUS_MAIN_BRUSH_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT}": XiaomiMiioSensorDescription( @@ -706,7 +700,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Side brush left", + translation_key=ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_FILTER_LEFT}": XiaomiMiioSensorDescription( @@ -715,7 +709,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_FILTER_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Filter left", + translation_key=ATTR_CONSUMABLE_STATUS_FILTER_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT}": XiaomiMiioSensorDescription( @@ -724,7 +718,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Sensor dirty left", + translation_key=ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), } diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index bbdc3f5737d..bafc1ec543b 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -105,6 +105,223 @@ } } } + }, + "binary_sensor": { + "no_water": { + "name": "Water tank empty" + }, + "water_tank_detached": { + "name": "Water tank" + }, + "ptc_status": { + "name": "Auxiliary heat status" + }, + "powersupply_attached": { + "name": "Power supply" + }, + "is_water_box_attached": { + "name": "Mop attached" + }, + "is_water_shortage": { + "name": "Water shortage" + }, + "is_water_box_carriage_attached": { + "name": "[%key:component::xiaomi_miio::entity::binary_sensor::is_water_box_attached::name%]" + } + }, + "button": { + "reset_dust_filter": { + "name": "Reset dust filter" + }, + "reset_upper_filter": { + "name": "Reset upper filter" + }, + "reset_vacuum_main_brush": { + "name": "Reset main brush" + }, + "reset_vacuum_side_brush": { + "name": "Reset side brush" + }, + "reset_vacuum_filter": { + "name": "Reset filter" + }, + "reset_vacuum_sensor_dirty": { + "name": "Reset sensor dirty" + } + }, + "number": { + "motor_speed": { + "name": "Motor speed" + }, + "favorite_level": { + "name": "Favorite level" + }, + "fan_level": { + "name": "Fan level" + }, + "volume": { + "name": "Volume" + }, + "angle": { + "name": "Oscillation angle" + }, + "delay_off_countdown": { + "name": "Delay off countdown" + }, + "led_brightness": { + "name": "LED brightness" + }, + "led_brightness_level": { + "name": "LED brightness" + }, + "favorite_rpm": { + "name": "Favorite motor speed" + } + }, + "sensor": { + "load_power": { + "name": "Load power" + }, + "water_level": { + "name": "Water level" + }, + "actual_speed": { + "name": "Actual speed" + }, + "control_speed": { + "name": "Control speed" + }, + "favorite_speed": { + "name": "Favorite speed" + }, + "motor_speed": { + "name": "[%key:component::xiaomi_miio::entity::number::motor_speed::name%]" + }, + "motor2_speed": { + "name": "Second motor speed" + }, + "use_time": { + "name": "Use time" + }, + "illuminance": { + "name": "[%key:component::sensor::entity_component::illuminance::name%]" + }, + "air_quality": { + "name": "Air quality" + }, + "tvoc": { + "name": "TVOC" + }, + "air_quality_index": { + "name": "Air quality index" + }, + "filter_life_remaining": { + "name": "Filter life remaining" + }, + "filter_hours_used": { + "name": "Filter use" + }, + "filter_left_time": { + "name": "Filter lifetime remaining" + }, + "dust_filter_life_remaining": { + "name": "Dust filter life remaining" + }, + "dust_filter_life_remaining_days": { + "name": "Dust filter lifetime remaining days" + }, + "upper_filter_life_remaining": { + "name": "Upper filter life remaining" + }, + "upper_filter_life_remaining_days": { + "name": "Upper filter lifetime remaining days" + }, + "purify_volume": { + "name": "Purify volume" + }, + "dnd_start": { + "name": "DnD start" + }, + "dnd_end": { + "name": "DnD end" + }, + "last_clean_start": { + "name": "Last clean start" + }, + "last_clean_end": { + "name": "Last clean end" + }, + "duration": { + "name": "Last clean duration" + }, + "area": { + "name": "Last clean area" + }, + "clean_time": { + "name": "Current clean duration" + }, + "clean_area": { + "name": "Current clean area" + }, + "total_duration": { + "name": "Total duration" + }, + "total_area": { + "name": "Total clean area" + }, + "count": { + "name": "Total clean count" + }, + "dust_collection_count": { + "name": "Total dust collection count" + }, + "main_brush_left": { + "name": "Main brush remaining" + }, + "side_brush_left": { + "name": "Side brush remaining" + }, + "filter_left": { + "name": "Filter remaining" + }, + "sensor_dirty_left": { + "name": "Sensor dirty remaining" + } + }, + "switch": { + "buzzer": { + "name": "Buzzer" + }, + "child_lock": { + "name": "Child lock" + }, + "display": { + "name": "Display" + }, + "dry": { + "name": "Dry mode" + }, + "clean_mode": { + "name": "Clean mode" + }, + "led": { + "name": "LED" + }, + "learn_mode": { + "name": "Learn mode" + }, + "auto_detect": { + "name": "Auto detect" + }, + "ionizer": { + "name": "Ionizer" + }, + "anion": { + "name": "[%key:component::xiaomi_miio::entity::switch::ionizer::name%]" + }, + "ptc": { + "name": "Auxiliary heat" + } } }, "services": { @@ -222,7 +439,7 @@ }, "remote_learn_command": { "name": "Remote learn command", - "description": "Learns an IR command, press \"Perform action\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", + "description": "Learns an IR command, select **Perform action**, point the remote at the IR device, and the learned command will be shown as a notification in Overview.", "fields": { "slot": { "name": "Slot", diff --git a/homeassistant/components/xiaomi_miio/switch.py b/homeassistant/components/xiaomi_miio/switch.py index 797a98d9fa1..02f4d4e94e5 100644 --- a/homeassistant/components/xiaomi_miio/switch.py +++ b/homeassistant/components/xiaomi_miio/switch.py @@ -59,7 +59,7 @@ from .const import ( FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11, + FEATURE_FLAGS_FAN_P10_P11_P18, FEATURE_FLAGS_FAN_ZA5, FEATURE_SET_ANION, FEATURE_SET_AUTO_DETECT, @@ -84,6 +84,7 @@ from .const import ( MODEL_AIRPURIFIER_2H, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -98,6 +99,7 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_ZA1, MODEL_FAN_ZA3, MODEL_FAN_ZA4, @@ -113,8 +115,7 @@ from .const import ( SERVICE_SET_WIFI_LED_ON, SUCCESS, ) -from .device import XiaomiCoordinatedMiioEntity, XiaomiMiioEntity -from .gateway import XiaomiGatewayDevice +from .entity import XiaomiCoordinatedMiioEntity, XiaomiGatewayDevice, XiaomiMiioEntity from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) @@ -200,6 +201,7 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRPURIFIER_2H: FEATURE_FLAGS_AIRPURIFIER_2S, MODEL_AIRPURIFIER_2S: FEATURE_FLAGS_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C: FEATURE_FLAGS_AIRPURIFIER_3C, + MODEL_AIRPURIFIER_3C_REV_A: FEATURE_FLAGS_AIRPURIFIER_3C, MODEL_AIRPURIFIER_PRO: FEATURE_FLAGS_AIRPURIFIER_PRO, MODEL_AIRPURIFIER_PRO_V7: FEATURE_FLAGS_AIRPURIFIER_PRO_V7, MODEL_AIRPURIFIER_V1: FEATURE_FLAGS_AIRPURIFIER_V1, @@ -210,8 +212,9 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRPURIFIER_4_PRO: FEATURE_FLAGS_AIRPURIFIER_4, MODEL_AIRPURIFIER_ZA1: FEATURE_FLAGS_AIRPURIFIER_ZA1, MODEL_FAN_1C: FEATURE_FLAGS_FAN_1C, - MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11, - MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11, + MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P18: FEATURE_FLAGS_FAN_P10_P11_P18, MODEL_FAN_P5: FEATURE_FLAGS_FAN_P5, MODEL_FAN_P9: FEATURE_FLAGS_FAN_P9, MODEL_FAN_ZA1: FEATURE_FLAGS_FAN, @@ -236,7 +239,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_BUZZER, feature=FEATURE_SET_BUZZER, - name="Buzzer", + translation_key=ATTR_BUZZER, icon="mdi:volume-high", method_on="async_set_buzzer_on", method_off="async_set_buzzer_off", @@ -245,7 +248,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_CHILD_LOCK, feature=FEATURE_SET_CHILD_LOCK, - name="Child lock", + translation_key=ATTR_CHILD_LOCK, icon="mdi:lock", method_on="async_set_child_lock_on", method_off="async_set_child_lock_off", @@ -254,7 +257,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_DISPLAY, feature=FEATURE_SET_DISPLAY, - name="Display", + translation_key=ATTR_DISPLAY, icon="mdi:led-outline", method_on="async_set_display_on", method_off="async_set_display_off", @@ -263,7 +266,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_DRY, feature=FEATURE_SET_DRY, - name="Dry mode", + translation_key=ATTR_DRY, icon="mdi:hair-dryer", method_on="async_set_dry_on", method_off="async_set_dry_off", @@ -272,7 +275,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_CLEAN, feature=FEATURE_SET_CLEAN, - name="Clean mode", + translation_key=ATTR_CLEAN, icon="mdi:shimmer", method_on="async_set_clean_on", method_off="async_set_clean_off", @@ -282,7 +285,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_LED, feature=FEATURE_SET_LED, - name="LED", + translation_key=ATTR_LED, icon="mdi:led-outline", method_on="async_set_led_on", method_off="async_set_led_off", @@ -291,7 +294,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_LEARN_MODE, feature=FEATURE_SET_LEARN_MODE, - name="Learn mode", + translation_key=ATTR_LEARN_MODE, icon="mdi:school-outline", method_on="async_set_learn_mode_on", method_off="async_set_learn_mode_off", @@ -300,7 +303,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_AUTO_DETECT, feature=FEATURE_SET_AUTO_DETECT, - name="Auto detect", + translation_key=ATTR_AUTO_DETECT, method_on="async_set_auto_detect_on", method_off="async_set_auto_detect_off", entity_category=EntityCategory.CONFIG, @@ -308,7 +311,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_IONIZER, feature=FEATURE_SET_IONIZER, - name="Ionizer", + translation_key=ATTR_IONIZER, icon="mdi:shimmer", method_on="async_set_ionizer_on", method_off="async_set_ionizer_off", @@ -317,7 +320,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_ANION, feature=FEATURE_SET_ANION, - name="Ionizer", + translation_key=ATTR_ANION, icon="mdi:shimmer", method_on="async_set_anion_on", method_off="async_set_anion_off", @@ -326,7 +329,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_PTC, feature=FEATURE_SET_PTC, - name="Auxiliary heat", + translation_key=ATTR_PTC, icon="mdi:radiator", method_on="async_set_ptc_on", method_off="async_set_ptc_off", diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index ac833f7646c..532eb9581cd 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -10,13 +10,8 @@ from miio import DeviceException import voluptuous as vol from homeassistant.components.vacuum import ( - STATE_CLEANING, - STATE_DOCKED, - STATE_ERROR, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -41,7 +36,7 @@ from .const import ( SERVICE_START_REMOTE_CONTROL, SERVICE_STOP_REMOTE_CONTROL, ) -from .device import XiaomiCoordinatedMiioEntity +from .entity import XiaomiCoordinatedMiioEntity _LOGGER = logging.getLogger(__name__) @@ -55,29 +50,29 @@ ATTR_ZONE_REPEATER = "repeats" ATTR_TIMERS = "timers" STATE_CODE_TO_STATE = { - 1: STATE_IDLE, # "Starting" - 2: STATE_IDLE, # "Charger disconnected" - 3: STATE_IDLE, # "Idle" - 4: STATE_CLEANING, # "Remote control active" - 5: STATE_CLEANING, # "Cleaning" - 6: STATE_RETURNING, # "Returning home" - 7: STATE_CLEANING, # "Manual mode" - 8: STATE_DOCKED, # "Charging" - 9: STATE_ERROR, # "Charging problem" - 10: STATE_PAUSED, # "Paused" - 11: STATE_CLEANING, # "Spot cleaning" - 12: STATE_ERROR, # "Error" - 13: STATE_IDLE, # "Shutting down" - 14: STATE_DOCKED, # "Updating" - 15: STATE_RETURNING, # "Docking" - 16: STATE_CLEANING, # "Going to target" - 17: STATE_CLEANING, # "Zoned cleaning" - 18: STATE_CLEANING, # "Segment cleaning" - 22: STATE_DOCKED, # "Emptying the bin" on s7+ - 23: STATE_DOCKED, # "Washing the mop" on s7maxV - 26: STATE_RETURNING, # "Going to wash the mop" on s7maxV - 100: STATE_DOCKED, # "Charging complete" - 101: STATE_ERROR, # "Device offline" + 1: VacuumActivity.IDLE, # "Starting" + 2: VacuumActivity.IDLE, # "Charger disconnected" + 3: VacuumActivity.IDLE, # "Idle" + 4: VacuumActivity.CLEANING, # "Remote control active" + 5: VacuumActivity.CLEANING, # "Cleaning" + 6: VacuumActivity.RETURNING, # "Returning home" + 7: VacuumActivity.CLEANING, # "Manual mode" + 8: VacuumActivity.DOCKED, # "Charging" + 9: VacuumActivity.ERROR, # "Charging problem" + 10: VacuumActivity.PAUSED, # "Paused" + 11: VacuumActivity.CLEANING, # "Spot cleaning" + 12: VacuumActivity.ERROR, # "Error" + 13: VacuumActivity.IDLE, # "Shutting down" + 14: VacuumActivity.DOCKED, # "Updating" + 15: VacuumActivity.RETURNING, # "Docking" + 16: VacuumActivity.CLEANING, # "Going to target" + 17: VacuumActivity.CLEANING, # "Zoned cleaning" + 18: VacuumActivity.CLEANING, # "Segment cleaning" + 22: VacuumActivity.DOCKED, # "Emptying the bin" on s7+ + 23: VacuumActivity.DOCKED, # "Washing the mop" on s7maxV + 26: VacuumActivity.RETURNING, # "Going to wash the mop" on s7maxV + 100: VacuumActivity.DOCKED, # "Charging complete" + 101: VacuumActivity.ERROR, # "Device offline" } @@ -211,7 +206,7 @@ class MiroboVacuum( ) -> None: """Initialize the Xiaomi vacuum cleaner robot handler.""" super().__init__(device, entry, unique_id, coordinator) - self._state: str | None = None + self._state: VacuumActivity | None = None async def async_added_to_hass(self) -> None: """Run when entity is about to be added to hass.""" @@ -219,12 +214,12 @@ class MiroboVacuum( self._handle_coordinator_update() @property - def state(self) -> str | None: + def activity(self) -> VacuumActivity | None: """Return the status of the vacuum cleaner.""" # The vacuum reverts back to an idle state after erroring out. # We want to keep returning an error until it has been cleared. if self.coordinator.data.status.got_error: - return STATE_ERROR + return VacuumActivity.ERROR return self._state diff --git a/homeassistant/components/xiaomi_tv/manifest.json b/homeassistant/components/xiaomi_tv/manifest.json index 2e913e80fdc..8335adff333 100644 --- a/homeassistant/components/xiaomi_tv/manifest.json +++ b/homeassistant/components/xiaomi_tv/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xiaomi_tv", "iot_class": "assumed_state", "loggers": ["pymitv"], + "quality_scale": "legacy", "requirements": ["pymitv==1.4.3"] } diff --git a/homeassistant/components/xmpp/manifest.json b/homeassistant/components/xmpp/manifest.json index 308c3d70978..d77d70ff86c 100644 --- a/homeassistant/components/xmpp/manifest.json +++ b/homeassistant/components/xmpp/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xmpp", "iot_class": "cloud_push", "loggers": ["pyasn1", "slixmpp"], + "quality_scale": "legacy", "requirements": ["slixmpp==1.8.5", "emoji==2.8.0"] } diff --git a/homeassistant/components/xmpp/notify.py b/homeassistant/components/xmpp/notify.py index c73248f2524..3fb5dd166a1 100644 --- a/homeassistant/components/xmpp/notify.py +++ b/homeassistant/components/xmpp/notify.py @@ -190,13 +190,13 @@ async def async_send_message( # noqa: C901 _LOGGER.debug("Timeout set to %ss", timeout) url = await self.upload_file(timeout=timeout) - _LOGGER.info("Upload success") + _LOGGER.debug("Upload success") for recipient in recipients: if room: - _LOGGER.info("Sending file to %s", room) + _LOGGER.debug("Sending file to %s", room) message = self.Message(sto=room, stype="groupchat") else: - _LOGGER.info("Sending file to %s", recipient) + _LOGGER.debug("Sending file to %s", recipient) message = self.Message(sto=recipient, stype="chat") message["body"] = url message["oob"]["url"] = url @@ -264,7 +264,7 @@ async def async_send_message( # noqa: C901 uploaded via XEP_0363 and HTTP and returns the resulting URL """ - _LOGGER.info("Getting file from %s", url) + _LOGGER.debug("Getting file from %s", url) def get_url(url): """Return result for GET request to url.""" @@ -295,7 +295,7 @@ async def async_send_message( # noqa: C901 _LOGGER.debug("Got %s extension", extension) filename = self.get_random_filename(None, extension=extension) - _LOGGER.info("Uploading file from URL, %s", filename) + _LOGGER.debug("Uploading file from URL, %s", filename) return await self["xep_0363"].upload_file( filename, @@ -313,7 +313,7 @@ async def async_send_message( # noqa: C901 async def upload_file_from_path(self, path: str, timeout=None): """Upload a file from a local file path via XEP_0363.""" - _LOGGER.info("Uploading file from path, %s", path) + _LOGGER.debug("Uploading file from path, %s", path) if not hass.config.is_allowed_path(path): raise PermissionError("Could not access file. Path not allowed") @@ -374,6 +374,6 @@ async def async_send_message( # noqa: C901 @staticmethod def discard_ssl_invalid_cert(event): """Do nothing if ssl certificate is invalid.""" - _LOGGER.info("Ignoring invalid SSL certificate as requested") + _LOGGER.debug("Ignoring invalid SSL certificate as requested") SendNotificationBot() diff --git a/homeassistant/components/xs1/__init__.py b/homeassistant/components/xs1/__init__.py index e24fbc0181e..6f7197817d7 100644 --- a/homeassistant/components/xs1/__init__.py +++ b/homeassistant/components/xs1/__init__.py @@ -1,6 +1,5 @@ """Support for the EZcontrol XS1 gateway.""" -import asyncio import logging import voluptuous as vol @@ -17,7 +16,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) @@ -44,11 +42,6 @@ CONFIG_SCHEMA = vol.Schema( PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] -# Lock used to limit the amount of concurrent update requests -# as the XS1 Gateway can only handle a very -# small amount of concurrent requests -UPDATE_LOCK = asyncio.Lock() - def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up XS1 integration.""" @@ -88,16 +81,3 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: discovery.load_platform(hass, platform, DOMAIN, {}, config) return True - - -class XS1DeviceEntity(Entity): - """Representation of a base XS1 device.""" - - def __init__(self, device): - """Initialize the XS1 device.""" - self.device = device - - async def async_update(self): - """Retrieve latest device state.""" - async with UPDATE_LOCK: - await self.hass.async_add_executor_job(self.device.update) diff --git a/homeassistant/components/xs1/climate.py b/homeassistant/components/xs1/climate.py index e594f32adff..3bb80df25b2 100644 --- a/homeassistant/components/xs1/climate.py +++ b/homeassistant/components/xs1/climate.py @@ -16,7 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS, XS1DeviceEntity +from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS +from .entity import XS1DeviceEntity MIN_TEMP = 8 MAX_TEMP = 25 @@ -55,7 +56,6 @@ class XS1ThermostatEntity(XS1DeviceEntity, ClimateEntity): _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, sensor): """Initialize the actuator.""" diff --git a/homeassistant/components/xs1/entity.py b/homeassistant/components/xs1/entity.py new file mode 100644 index 00000000000..7239a6fd446 --- /dev/null +++ b/homeassistant/components/xs1/entity.py @@ -0,0 +1,23 @@ +"""Support for the EZcontrol XS1 gateway.""" + +import asyncio + +from homeassistant.helpers.entity import Entity + +# Lock used to limit the amount of concurrent update requests +# as the XS1 Gateway can only handle a very +# small amount of concurrent requests +UPDATE_LOCK = asyncio.Lock() + + +class XS1DeviceEntity(Entity): + """Representation of a base XS1 device.""" + + def __init__(self, device): + """Initialize the XS1 device.""" + self.device = device + + async def async_update(self): + """Retrieve latest device state.""" + async with UPDATE_LOCK: + await self.hass.async_add_executor_job(self.device.update) diff --git a/homeassistant/components/xs1/manifest.json b/homeassistant/components/xs1/manifest.json index 9f4c921642d..88a5e4427ae 100644 --- a/homeassistant/components/xs1/manifest.json +++ b/homeassistant/components/xs1/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xs1", "iot_class": "local_polling", "loggers": ["xs1_api_client"], + "quality_scale": "legacy", "requirements": ["xs1-api-client==3.0.0"] } diff --git a/homeassistant/components/xs1/sensor.py b/homeassistant/components/xs1/sensor.py index e98fd33743b..b3895d67d82 100644 --- a/homeassistant/components/xs1/sensor.py +++ b/homeassistant/components/xs1/sensor.py @@ -9,7 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS, XS1DeviceEntity +from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS +from .entity import XS1DeviceEntity def setup_platform( diff --git a/homeassistant/components/xs1/switch.py b/homeassistant/components/xs1/switch.py index c2af652d6ad..a8f66390a6d 100644 --- a/homeassistant/components/xs1/switch.py +++ b/homeassistant/components/xs1/switch.py @@ -11,7 +11,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, XS1DeviceEntity +from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN +from .entity import XS1DeviceEntity def setup_platform( diff --git a/homeassistant/components/yale/__init__.py b/homeassistant/components/yale/__init__.py new file mode 100644 index 00000000000..1cbd9c87b57 --- /dev/null +++ b/homeassistant/components/yale/__init__.py @@ -0,0 +1,81 @@ +"""Support for Yale devices.""" + +from __future__ import annotations + +from pathlib import Path +from typing import cast + +from aiohttp import ClientResponseError +from yalexs.const import Brand +from yalexs.exceptions import YaleApiError +from yalexs.manager.const import CONF_BRAND +from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation +from yalexs.manager.gateway import Config as YaleXSConfig + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow, device_registry as dr + +from .const import DOMAIN, PLATFORMS +from .data import YaleData +from .gateway import YaleGateway +from .util import async_create_yale_clientsession + +type YaleConfigEntry = ConfigEntry[YaleData] + + +async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: + """Set up yale from a config entry.""" + session = async_create_yale_clientsession(hass) + implementation = ( + await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry + ) + ) + oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + yale_gateway = YaleGateway(Path(hass.config.config_dir), session, oauth_session) + try: + await async_setup_yale(hass, entry, yale_gateway) + except (RequireValidation, InvalidAuth) as err: + raise ConfigEntryAuthFailed from err + except TimeoutError as err: + raise ConfigEntryNotReady("Timed out connecting to yale api") from err + except (YaleApiError, ClientResponseError, CannotConnect) as err: + raise ConfigEntryNotReady from err + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_setup_yale( + hass: HomeAssistant, entry: YaleConfigEntry, yale_gateway: YaleGateway +) -> None: + """Set up the yale component.""" + config = cast(YaleXSConfig, entry.data) + await yale_gateway.async_setup({**config, CONF_BRAND: Brand.YALE_GLOBAL}) + await yale_gateway.async_authenticate() + await yale_gateway.async_refresh_access_token_if_needed() + data = entry.runtime_data = YaleData(hass, yale_gateway) + entry.async_on_unload( + hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, data.async_stop) + ) + entry.async_on_unload(data.async_stop) + await data.async_setup() + + +async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: YaleConfigEntry, device_entry: dr.DeviceEntry +) -> bool: + """Remove yale config entry from a device if its no longer present.""" + return not any( + identifier + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + and config_entry.runtime_data.get_device(identifier[1]) + ) diff --git a/homeassistant/components/yale/application_credentials.py b/homeassistant/components/yale/application_credentials.py new file mode 100644 index 00000000000..31b5b7a92c7 --- /dev/null +++ b/homeassistant/components/yale/application_credentials.py @@ -0,0 +1,15 @@ +"""application_credentials platform the yale integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +OAUTH2_AUTHORIZE = "https://oauth.aaecosystem.com/authorize" +OAUTH2_TOKEN = "https://oauth.aaecosystem.com/access_token" + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) diff --git a/homeassistant/components/yale/binary_sensor.py b/homeassistant/components/yale/binary_sensor.py new file mode 100644 index 00000000000..dbb00ad7d42 --- /dev/null +++ b/homeassistant/components/yale/binary_sensor.py @@ -0,0 +1,188 @@ +"""Support for Yale binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta +from functools import partial +import logging + +from yalexs.activity import Activity, ActivityType +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail, LockDoorStatus +from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL +from yalexs.util import update_lock_detail_from_activity + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_later + +from . import YaleConfigEntry, YaleData +from .entity import YaleDescriptionEntity +from .util import ( + retrieve_ding_activity, + retrieve_doorbell_motion_activity, + retrieve_online_state, + retrieve_time_based_activity, +) + +_LOGGER = logging.getLogger(__name__) + +TIME_TO_RECHECK_DETECTION = timedelta( + seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3 +) + + +@dataclass(frozen=True, kw_only=True) +class YaleDoorbellBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Yale binary_sensor entity.""" + + value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] + is_time_based: bool + + +SENSOR_TYPE_DOOR = BinarySensorEntityDescription( + key="open", + device_class=BinarySensorDeviceClass.DOOR, +) + +SENSOR_TYPES_VIDEO_DOORBELL = ( + YaleDoorbellBinarySensorEntityDescription( + key="motion", + device_class=BinarySensorDeviceClass.MOTION, + value_fn=retrieve_doorbell_motion_activity, + is_time_based=True, + ), + YaleDoorbellBinarySensorEntityDescription( + key="image capture", + translation_key="image_capture", + value_fn=partial( + retrieve_time_based_activity, {ActivityType.DOORBELL_IMAGE_CAPTURE} + ), + is_time_based=True, + ), + YaleDoorbellBinarySensorEntityDescription( + key="online", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=retrieve_online_state, + is_time_based=False, + ), +) + + +SENSOR_TYPES_DOORBELL: tuple[YaleDoorbellBinarySensorEntityDescription, ...] = ( + YaleDoorbellBinarySensorEntityDescription( + key="ding", + translation_key="ding", + device_class=BinarySensorDeviceClass.OCCUPANCY, + value_fn=retrieve_ding_activity, + is_time_based=True, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Yale binary sensors.""" + data = config_entry.runtime_data + entities: list[BinarySensorEntity] = [] + + for lock in data.locks: + detail = data.get_device_detail(lock.device_id) + if detail.doorsense: + entities.append(YaleDoorBinarySensor(data, lock, SENSOR_TYPE_DOOR)) + + if detail.doorbell: + entities.extend( + YaleDoorbellBinarySensor(data, lock, description) + for description in SENSOR_TYPES_DOORBELL + ) + + entities.extend( + YaleDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) + async_add_entities(entities) + + +class YaleDoorBinarySensor(YaleDescriptionEntity, BinarySensorEntity): + """Representation of an Yale Door binary sensor.""" + + _attr_device_class = BinarySensorDeviceClass.DOOR + description: BinarySensorEntityDescription + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + if door_activity := self._get_latest({ActivityType.DOOR_OPERATION}): + update_lock_detail_from_activity(self._detail, door_activity) + if door_activity.was_pushed: + self._detail.set_online(True) + + if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): + update_lock_detail_from_activity(self._detail, bridge_activity) + self._attr_available = self._detail.bridge_is_online + self._attr_is_on = self._detail.door_state == LockDoorStatus.OPEN + + +class YaleDoorbellBinarySensor(YaleDescriptionEntity, BinarySensorEntity): + """Representation of an Yale binary sensor.""" + + entity_description: YaleDoorbellBinarySensorEntityDescription + _check_for_off_update_listener: Callable[[], None] | None = None + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + self._cancel_any_pending_updates() + self._attr_is_on = bool( + self.entity_description.value_fn(self._data, self._detail) + ) + + if self.entity_description.is_time_based: + self._attr_available = retrieve_online_state(self._data, self._detail) + self._schedule_update_to_recheck_turn_off_sensor() + else: + self._attr_available = True + + @callback + def _async_scheduled_update(self, now: datetime) -> None: + """Timer callback for sensor update.""" + self._check_for_off_update_listener = None + self._update_from_data() + if not self.is_on: + self.async_write_ha_state() + + def _schedule_update_to_recheck_turn_off_sensor(self) -> None: + """Schedule an update to recheck the sensor to see if it is ready to turn off.""" + # If the sensor is already off there is nothing to do + if not self.is_on: + return + self._check_for_off_update_listener = async_call_later( + self.hass, TIME_TO_RECHECK_DETECTION, self._async_scheduled_update + ) + + def _cancel_any_pending_updates(self) -> None: + """Cancel any updates to recheck a sensor to see if it is ready to turn off.""" + if not self._check_for_off_update_listener: + return + _LOGGER.debug("%s: canceled pending update", self.entity_id) + self._check_for_off_update_listener() + self._check_for_off_update_listener = None + + async def async_will_remove_from_hass(self) -> None: + """When removing cancel any scheduled updates.""" + self._cancel_any_pending_updates() + await super().async_will_remove_from_hass() diff --git a/homeassistant/components/yale/button.py b/homeassistant/components/yale/button.py new file mode 100644 index 00000000000..b04ad638f0c --- /dev/null +++ b/homeassistant/components/yale/button.py @@ -0,0 +1,32 @@ +"""Support for Yale buttons.""" + +from homeassistant.components.button import ButtonEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .entity import YaleEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale lock wake buttons.""" + data = config_entry.runtime_data + async_add_entities(YaleWakeLockButton(data, lock, "wake") for lock in data.locks) + + +class YaleWakeLockButton(YaleEntity, ButtonEntity): + """Representation of an Yale lock wake button.""" + + _attr_translation_key = "wake" + + async def async_press(self) -> None: + """Wake the device.""" + await self._data.async_status_async(self._device_id, self._hyper_bridge) + + @callback + def _update_from_data(self) -> None: + """Nothing to update as buttons are stateless.""" diff --git a/homeassistant/components/yale/camera.py b/homeassistant/components/yale/camera.py new file mode 100644 index 00000000000..217e8f5f6fd --- /dev/null +++ b/homeassistant/components/yale/camera.py @@ -0,0 +1,90 @@ +"""Support for Yale doorbell camera.""" + +from __future__ import annotations + +import logging + +from aiohttp import ClientSession +from yalexs.activity import ActivityType +from yalexs.doorbell import Doorbell +from yalexs.util import update_doorbell_image_from_activity + +from homeassistant.components.camera import Camera +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry, YaleData +from .const import DEFAULT_NAME, DEFAULT_TIMEOUT +from .entity import YaleEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale cameras.""" + data = config_entry.runtime_data + # Create an aiohttp session instead of using the default one since the + # default one is likely to trigger yale's WAF if another integration + # is also using Cloudflare + session = aiohttp_client.async_create_clientsession(hass) + async_add_entities( + YaleCamera(data, doorbell, session, DEFAULT_TIMEOUT) + for doorbell in data.doorbells + ) + + +class YaleCamera(YaleEntity, Camera): + """An implementation of an Yale security camera.""" + + _attr_translation_key = "camera" + _attr_motion_detection_enabled = True + _attr_brand = DEFAULT_NAME + _image_url: str | None = None + _image_content: bytes | None = None + + def __init__( + self, data: YaleData, device: Doorbell, session: ClientSession, timeout: int + ) -> None: + """Initialize an Yale security camera.""" + super().__init__(data, device, "camera") + self._timeout = timeout + self._session = session + self._attr_model = self._detail.model + + @property + def is_recording(self) -> bool: + """Return true if the device is recording.""" + return self._device.has_subscription + + async def _async_update(self): + """Update device.""" + _LOGGER.debug("async_update called %s", self._detail.device_name) + await self._data.refresh_camera_by_id(self._device_id) + self._update_from_data() + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + if doorbell_activity := self._get_latest( + {ActivityType.DOORBELL_MOTION, ActivityType.DOORBELL_IMAGE_CAPTURE} + ): + update_doorbell_image_from_activity(self._detail, doorbell_activity) + + async def async_camera_image( + self, width: int | None = None, height: int | None = None + ) -> bytes | None: + """Return bytes of camera image.""" + self._update_from_data() + + if self._image_url is not self._detail.image_url: + self._image_content = await self._data.async_get_doorbell_image( + self._device_id, self._session, timeout=self._timeout + ) + self._image_url = self._detail.image_url + + return self._image_content diff --git a/homeassistant/components/yale/config_flow.py b/homeassistant/components/yale/config_flow.py new file mode 100644 index 00000000000..fecf286fdd6 --- /dev/null +++ b/homeassistant/components/yale/config_flow.py @@ -0,0 +1,57 @@ +"""Config flow for Yale integration.""" + +from collections.abc import Mapping +import logging +from typing import Any + +import jwt + +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN): + """Handle a config flow for Yale.""" + + VERSION = 1 + DOMAIN = DOMAIN + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return _LOGGER + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + return await self.async_step_user() + + def _async_get_user_id_from_access_token(self, encoded: str) -> str: + """Get user ID from access token.""" + decoded = jwt.decode( + encoded, + "", + verify=False, + options={"verify_signature": False}, + algorithms=["HS256"], + ) + return decoded["userId"] + + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: + """Create an entry for the flow.""" + user_id = self._async_get_user_id_from_access_token( + data["token"]["access_token"] + ) + await self.async_set_unique_id(user_id) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_invalid_user") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) + self._abort_if_unique_id_configured() + return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/yale/const.py b/homeassistant/components/yale/const.py new file mode 100644 index 00000000000..3da4fb1dfb4 --- /dev/null +++ b/homeassistant/components/yale/const.py @@ -0,0 +1,43 @@ +"""Constants for Yale devices.""" + +from homeassistant.const import Platform + +DEFAULT_TIMEOUT = 25 + +CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file" +CONF_BRAND = "brand" +CONF_LOGIN_METHOD = "login_method" +CONF_INSTALL_ID = "install_id" + +VERIFICATION_CODE_KEY = "verification_code" + +MANUFACTURER = "Yale Home Inc." + +DEFAULT_NAME = "Yale" +DOMAIN = "yale" + +OPERATION_METHOD_AUTORELOCK = "autorelock" +OPERATION_METHOD_REMOTE = "remote" +OPERATION_METHOD_KEYPAD = "keypad" +OPERATION_METHOD_MANUAL = "manual" +OPERATION_METHOD_TAG = "tag" +OPERATION_METHOD_MOBILE_DEVICE = "mobile" + +ATTR_OPERATION_AUTORELOCK = "autorelock" +ATTR_OPERATION_METHOD = "method" +ATTR_OPERATION_REMOTE = "remote" +ATTR_OPERATION_KEYPAD = "keypad" +ATTR_OPERATION_MANUAL = "manual" +ATTR_OPERATION_TAG = "tag" + +LOGIN_METHODS = ["phone", "email"] +DEFAULT_LOGIN_METHOD = "email" + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CAMERA, + Platform.EVENT, + Platform.LOCK, + Platform.SENSOR, +] diff --git a/homeassistant/components/yale/data.py b/homeassistant/components/yale/data.py new file mode 100644 index 00000000000..12736f7733d --- /dev/null +++ b/homeassistant/components/yale/data.py @@ -0,0 +1,52 @@ +"""Support for Yale devices.""" + +from __future__ import annotations + +from yalexs.lock import LockDetail +from yalexs.manager.data import YaleXSData +from yalexs_ble import YaleXSBLEDiscovery + +from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import discovery_flow + +from .gateway import YaleGateway + +YALEXS_BLE_DOMAIN = "yalexs_ble" + + +@callback +def _async_trigger_ble_lock_discovery( + hass: HomeAssistant, locks_with_offline_keys: list[LockDetail] +) -> None: + """Update keys for the yalexs-ble integration if available.""" + for lock_detail in locks_with_offline_keys: + discovery_flow.async_create_flow( + hass, + YALEXS_BLE_DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data=YaleXSBLEDiscovery( + { + "name": lock_detail.device_name, + "address": lock_detail.mac_address, + "serial": lock_detail.serial_number, + "key": lock_detail.offline_key, + "slot": lock_detail.offline_slot, + } + ), + ) + + +class YaleData(YaleXSData): + """yale data object.""" + + def __init__(self, hass: HomeAssistant, yale_gateway: YaleGateway) -> None: + """Init yale data object.""" + self._hass = hass + super().__init__(yale_gateway, HomeAssistantError) + + @callback + def async_offline_key_discovered(self, detail: LockDetail) -> None: + """Handle offline key discovery.""" + _async_trigger_ble_lock_discovery(self._hass, [detail]) diff --git a/homeassistant/components/yale/diagnostics.py b/homeassistant/components/yale/diagnostics.py new file mode 100644 index 00000000000..7e7f6179e7a --- /dev/null +++ b/homeassistant/components/yale/diagnostics.py @@ -0,0 +1,50 @@ +"""Diagnostics support for yale.""" + +from __future__ import annotations + +from typing import Any + +from yalexs.const import Brand + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.core import HomeAssistant + +from . import YaleConfigEntry + +TO_REDACT = { + "HouseID", + "OfflineKeys", + "installUserID", + "invitations", + "key", + "pins", + "pubsubChannel", + "recentImage", + "remoteOperateSecret", + "users", + "zWaveDSK", + "contentToken", +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: YaleConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = entry.runtime_data + + return { + "locks": { + lock.device_id: async_redact_data( + data.get_device_detail(lock.device_id).raw, TO_REDACT + ) + for lock in data.locks + }, + "doorbells": { + doorbell.device_id: async_redact_data( + data.get_device_detail(doorbell.device_id).raw, TO_REDACT + ) + for doorbell in data.doorbells + }, + "brand": Brand.YALE_GLOBAL.value, + } diff --git a/homeassistant/components/yale/entity.py b/homeassistant/components/yale/entity.py new file mode 100644 index 00000000000..152070c0be3 --- /dev/null +++ b/homeassistant/components/yale/entity.py @@ -0,0 +1,115 @@ +"""Base class for Yale entity.""" + +from abc import abstractmethod + +from yalexs.activity import Activity, ActivityType +from yalexs.doorbell import Doorbell, DoorbellDetail +from yalexs.keypad import KeypadDetail +from yalexs.lock import Lock, LockDetail +from yalexs.util import get_configuration_url + +from homeassistant.const import ATTR_CONNECTIONS +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, EntityDescription + +from . import DOMAIN, YaleData +from .const import MANUFACTURER + +DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] + + +class YaleEntity(Entity): + """Base implementation for Yale device.""" + + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, data: YaleData, device: Doorbell | Lock | KeypadDetail, unique_id: str + ) -> None: + """Initialize an Yale device.""" + super().__init__() + self._data = data + self._stream = data.activity_stream + self._device = device + detail = self._detail + self._device_id = device.device_id + self._attr_unique_id = f"{device.device_id}_{unique_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer=MANUFACTURER, + model=detail.model, + name=device.device_name, + sw_version=detail.firmware_version, + suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES), + configuration_url=get_configuration_url(data.brand), + ) + if isinstance(detail, LockDetail) and (mac := detail.mac_address): + self._attr_device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_BLUETOOTH, mac)} + + @property + def _detail(self) -> DoorbellDetail | LockDetail: + return self._data.get_device_detail(self._device.device_id) + + @property + def _hyper_bridge(self) -> bool: + """Check if the lock has a paired hyper bridge.""" + return bool(self._detail.bridge and self._detail.bridge.hyper_bridge) + + @callback + def _get_latest(self, activity_types: set[ActivityType]) -> Activity | None: + """Get the latest activity for the device.""" + return self._stream.get_latest_device_activity(self._device_id, activity_types) + + @callback + def _update_from_data_and_write_state(self) -> None: + self._update_from_data() + self.async_write_ha_state() + + @abstractmethod + def _update_from_data(self) -> None: + """Update the entity state from the data object.""" + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._data.async_subscribe_device_id( + self._device_id, self._update_from_data_and_write_state + ) + ) + self.async_on_remove( + self._stream.async_subscribe_device_id( + self._device_id, self._update_from_data_and_write_state + ) + ) + self._update_from_data() + + +class YaleDescriptionEntity(YaleEntity): + """An Yale entity with a description.""" + + def __init__( + self, + data: YaleData, + device: Doorbell | Lock | KeypadDetail, + description: EntityDescription, + ) -> None: + """Initialize an Yale entity with a description.""" + super().__init__(data, device, description.key) + self.entity_description = description + + +def _remove_device_types(name: str, device_types: list[str]) -> str: + """Strip device types from a string. + + Yale stores the name as Master Bed Lock + or Master Bed Door. We can come up with a + reasonable suggestion by removing the supported + device types from the string. + """ + lower_name = name.lower() + for device_type in device_types: + lower_name = lower_name.removesuffix(f" {device_type}") + return name[: len(lower_name)] diff --git a/homeassistant/components/yale/event.py b/homeassistant/components/yale/event.py new file mode 100644 index 00000000000..935ba7376f8 --- /dev/null +++ b/homeassistant/components/yale/event.py @@ -0,0 +1,98 @@ +"""Support for yale events.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from yalexs.activity import Activity +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry, YaleData +from .entity import YaleDescriptionEntity +from .util import ( + retrieve_ding_activity, + retrieve_doorbell_motion_activity, + retrieve_online_state, +) + + +@dataclass(kw_only=True, frozen=True) +class YaleEventEntityDescription(EventEntityDescription): + """Describe yale event entities.""" + + value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] + + +TYPES_VIDEO_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( + YaleEventEntityDescription( + key="motion", + translation_key="motion", + device_class=EventDeviceClass.MOTION, + event_types=["motion"], + value_fn=retrieve_doorbell_motion_activity, + ), +) + + +TYPES_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( + YaleEventEntityDescription( + key="doorbell", + translation_key="doorbell", + device_class=EventDeviceClass.DOORBELL, + event_types=["ring"], + value_fn=retrieve_ding_activity, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the yale event platform.""" + data = config_entry.runtime_data + entities: list[YaleEventEntity] = [ + YaleEventEntity(data, lock, description) + for description in TYPES_DOORBELL + for lock in data.locks + if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell + ] + entities.extend( + YaleEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) + async_add_entities(entities) + + +class YaleEventEntity(YaleDescriptionEntity, EventEntity): + """An yale event entity.""" + + entity_description: YaleEventEntityDescription + _last_activity: Activity | None = None + + @callback + def _update_from_data(self) -> None: + """Update from data.""" + self._attr_available = retrieve_online_state(self._data, self._detail) + current_activity = self.entity_description.value_fn(self._data, self._detail) + if not current_activity or current_activity == self._last_activity: + return + self._last_activity = current_activity + event_types = self.entity_description.event_types + if TYPE_CHECKING: + assert event_types is not None + self._trigger_event(event_type=event_types[0]) + self.async_write_ha_state() diff --git a/homeassistant/components/yale/gateway.py b/homeassistant/components/yale/gateway.py new file mode 100644 index 00000000000..cd7796182d2 --- /dev/null +++ b/homeassistant/components/yale/gateway.py @@ -0,0 +1,43 @@ +"""Handle Yale connection setup and authentication.""" + +import logging +from pathlib import Path + +from aiohttp import ClientSession +from yalexs.authenticator_common import Authentication, AuthenticationState +from yalexs.manager.gateway import Gateway + +from homeassistant.helpers import config_entry_oauth2_flow + +_LOGGER = logging.getLogger(__name__) + + +class YaleGateway(Gateway): + """Handle the connection to Yale.""" + + def __init__( + self, + config_path: Path, + aiohttp_session: ClientSession, + oauth_session: config_entry_oauth2_flow.OAuth2Session, + ) -> None: + """Init the connection.""" + super().__init__(config_path, aiohttp_session) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Get access token.""" + await self._oauth_session.async_ensure_token_valid() + return self._oauth_session.token["access_token"] + + async def async_refresh_access_token_if_needed(self) -> None: + """Refresh the access token if needed.""" + await self._oauth_session.async_ensure_token_valid() + + async def async_authenticate(self) -> Authentication: + """Authenticate with the details provided to setup.""" + await self._oauth_session.async_ensure_token_valid() + self.authentication = Authentication( + AuthenticationState.AUTHENTICATED, None, None, None + ) + return self.authentication diff --git a/homeassistant/components/yale/icons.json b/homeassistant/components/yale/icons.json new file mode 100644 index 00000000000..b654b6d912a --- /dev/null +++ b/homeassistant/components/yale/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "binary_sensor": { + "image_capture": { + "default": "mdi:file-image" + } + } + } +} diff --git a/homeassistant/components/yale/lock.py b/homeassistant/components/yale/lock.py new file mode 100644 index 00000000000..b911c92ba0f --- /dev/null +++ b/homeassistant/components/yale/lock.py @@ -0,0 +1,147 @@ +"""Support for Yale lock.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +import logging +from typing import Any + +from aiohttp import ClientResponseError +from yalexs.activity import ActivityType, ActivityTypes +from yalexs.lock import Lock, LockStatus +from yalexs.util import get_latest_activity, update_lock_detail_from_activity + +from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature +from homeassistant.const import ATTR_BATTERY_LEVEL +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity +import homeassistant.util.dt as dt_util + +from . import YaleConfigEntry, YaleData +from .entity import YaleEntity + +_LOGGER = logging.getLogger(__name__) + +LOCK_JAMMED_ERR = 531 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale locks.""" + data = config_entry.runtime_data + async_add_entities(YaleLock(data, lock) for lock in data.locks) + + +class YaleLock(YaleEntity, RestoreEntity, LockEntity): + """Representation of an Yale lock.""" + + _attr_name = None + _lock_status: LockStatus | None = None + + def __init__(self, data: YaleData, device: Lock) -> None: + """Initialize the lock.""" + super().__init__(data, device, "lock") + if self._detail.unlatch_supported: + self._attr_supported_features = LockEntityFeature.OPEN + + async def async_lock(self, **kwargs: Any) -> None: + """Lock the device.""" + if self._data.push_updates_connected: + await self._data.async_lock_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_lock) + + async def async_open(self, **kwargs: Any) -> None: + """Open/unlatch the device.""" + if self._data.push_updates_connected: + await self._data.async_unlatch_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_unlatch) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock the device.""" + if self._data.push_updates_connected: + await self._data.async_unlock_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_unlock) + + async def _call_lock_operation( + self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]] + ) -> None: + try: + activities = await lock_operation(self._device_id) + except ClientResponseError as err: + if err.status == LOCK_JAMMED_ERR: + self._detail.lock_status = LockStatus.JAMMED + self._detail.lock_status_datetime = dt_util.utcnow() + else: + raise + else: + for lock_activity in activities: + update_lock_detail_from_activity(self._detail, lock_activity) + + if self._update_lock_status_from_detail(): + _LOGGER.debug( + "async_signal_device_id_update (from lock operation): %s", + self._device_id, + ) + self._data.async_signal_device_id_update(self._device_id) + + def _update_lock_status_from_detail(self) -> bool: + self._attr_available = self._detail.bridge_is_online + + if self._lock_status != self._detail.lock_status: + self._lock_status = self._detail.lock_status + return True + return False + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + detail = self._detail + if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): + self._attr_changed_by = lock_activity.operated_by + lock_activity_without_operator = self._get_latest( + {ActivityType.LOCK_OPERATION_WITHOUT_OPERATOR} + ) + if latest_activity := get_latest_activity( + lock_activity_without_operator, lock_activity + ): + if latest_activity.was_pushed: + self._detail.set_online(True) + update_lock_detail_from_activity(detail, latest_activity) + + if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): + update_lock_detail_from_activity(detail, bridge_activity) + + self._update_lock_status_from_detail() + lock_status = self._lock_status + if lock_status is None or lock_status is LockStatus.UNKNOWN: + self._attr_is_locked = None + else: + self._attr_is_locked = lock_status is LockStatus.LOCKED + self._attr_is_jammed = lock_status is LockStatus.JAMMED + self._attr_is_locking = lock_status is LockStatus.LOCKING + self._attr_is_unlocking = lock_status in ( + LockStatus.UNLOCKING, + LockStatus.UNLATCHING, + ) + self._attr_extra_state_attributes = {ATTR_BATTERY_LEVEL: detail.battery_level} + if keypad := detail.keypad: + self._attr_extra_state_attributes["keypad_battery_level"] = ( + keypad.battery_level + ) + + async def async_added_to_hass(self) -> None: + """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" + await super().async_added_to_hass() + + if not (last_state := await self.async_get_last_state()): + return + + if ATTR_CHANGED_BY in last_state.attributes: + self._attr_changed_by = last_state.attributes[ATTR_CHANGED_BY] diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json new file mode 100644 index 00000000000..7b7edfac77b --- /dev/null +++ b/homeassistant/components/yale/manifest.json @@ -0,0 +1,17 @@ +{ + "domain": "yale", + "name": "Yale", + "codeowners": ["@bdraco"], + "config_flow": true, + "dependencies": ["application_credentials", "cloud"], + "dhcp": [ + { + "hostname": "yale-connect-plus", + "macaddress": "00177A*" + } + ], + "documentation": "https://www.home-assistant.io/integrations/yale", + "iot_class": "cloud_push", + "loggers": ["socketio", "engineio", "yalexs"], + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] +} diff --git a/homeassistant/components/yale/sensor.py b/homeassistant/components/yale/sensor.py new file mode 100644 index 00000000000..bb3d4317277 --- /dev/null +++ b/homeassistant/components/yale/sensor.py @@ -0,0 +1,211 @@ +"""Support for Yale sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any, cast + +from yalexs.activity import ActivityType, LockOperationActivity +from yalexs.doorbell import Doorbell +from yalexs.keypad import KeypadDetail +from yalexs.lock import LockDetail + +from homeassistant.components.sensor import ( + RestoreSensor, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + ATTR_ENTITY_PICTURE, + PERCENTAGE, + STATE_UNAVAILABLE, + EntityCategory, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .const import ( + ATTR_OPERATION_AUTORELOCK, + ATTR_OPERATION_KEYPAD, + ATTR_OPERATION_MANUAL, + ATTR_OPERATION_METHOD, + ATTR_OPERATION_REMOTE, + ATTR_OPERATION_TAG, + OPERATION_METHOD_AUTORELOCK, + OPERATION_METHOD_KEYPAD, + OPERATION_METHOD_MANUAL, + OPERATION_METHOD_MOBILE_DEVICE, + OPERATION_METHOD_REMOTE, + OPERATION_METHOD_TAG, +) +from .entity import YaleDescriptionEntity, YaleEntity + + +def _retrieve_device_battery_state(detail: LockDetail) -> int: + """Get the latest state of the sensor.""" + return detail.battery_level + + +def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: + """Get the latest state of the sensor.""" + return detail.battery_percentage + + +@dataclass(frozen=True, kw_only=True) +class YaleSensorEntityDescription[T: LockDetail | KeypadDetail]( + SensorEntityDescription +): + """Mixin for required keys.""" + + value_fn: Callable[[T], int | None] + + +SENSOR_TYPE_DEVICE_BATTERY = YaleSensorEntityDescription[LockDetail]( + key="device_battery", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=_retrieve_device_battery_state, +) + +SENSOR_TYPE_KEYPAD_BATTERY = YaleSensorEntityDescription[KeypadDetail]( + key="linked_keypad_battery", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=_retrieve_linked_keypad_battery_state, +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Yale sensors.""" + data = config_entry.runtime_data + entities: list[SensorEntity] = [] + + for device in data.locks: + detail = data.get_device_detail(device.device_id) + entities.append(YaleOperatorSensor(data, device, "lock_operator")) + if SENSOR_TYPE_DEVICE_BATTERY.value_fn(detail): + entities.append( + YaleBatterySensor[LockDetail](data, device, SENSOR_TYPE_DEVICE_BATTERY) + ) + if keypad := detail.keypad: + entities.append( + YaleBatterySensor[KeypadDetail]( + data, keypad, SENSOR_TYPE_KEYPAD_BATTERY + ) + ) + + entities.extend( + YaleBatterySensor[Doorbell](data, device, SENSOR_TYPE_DEVICE_BATTERY) + for device in data.doorbells + if SENSOR_TYPE_DEVICE_BATTERY.value_fn(data.get_device_detail(device.device_id)) + ) + + async_add_entities(entities) + + +class YaleOperatorSensor(YaleEntity, RestoreSensor): + """Representation of an Yale lock operation sensor.""" + + _attr_translation_key = "operator" + _operated_remote: bool | None = None + _operated_keypad: bool | None = None + _operated_manual: bool | None = None + _operated_tag: bool | None = None + _operated_autorelock: bool | None = None + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + self._attr_available = True + if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): + lock_activity = cast(LockOperationActivity, lock_activity) + self._attr_native_value = lock_activity.operated_by + self._operated_remote = lock_activity.operated_remote + self._operated_keypad = lock_activity.operated_keypad + self._operated_manual = lock_activity.operated_manual + self._operated_tag = lock_activity.operated_tag + self._operated_autorelock = lock_activity.operated_autorelock + self._attr_entity_picture = lock_activity.operator_thumbnail_url + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the device specific state attributes.""" + attributes: dict[str, Any] = {} + + if self._operated_remote is not None: + attributes[ATTR_OPERATION_REMOTE] = self._operated_remote + if self._operated_keypad is not None: + attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad + if self._operated_manual is not None: + attributes[ATTR_OPERATION_MANUAL] = self._operated_manual + if self._operated_tag is not None: + attributes[ATTR_OPERATION_TAG] = self._operated_tag + if self._operated_autorelock is not None: + attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock + + if self._operated_remote: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE + elif self._operated_keypad: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD + elif self._operated_manual: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MANUAL + elif self._operated_tag: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_TAG + elif self._operated_autorelock: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK + else: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MOBILE_DEVICE + + return attributes + + async def async_added_to_hass(self) -> None: + """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" + await super().async_added_to_hass() + + last_state = await self.async_get_last_state() + last_sensor_state = await self.async_get_last_sensor_data() + if ( + not last_state + or not last_sensor_state + or last_state.state == STATE_UNAVAILABLE + ): + return + + self._attr_native_value = last_sensor_state.native_value + last_attrs = last_state.attributes + if ATTR_ENTITY_PICTURE in last_attrs: + self._attr_entity_picture = last_attrs[ATTR_ENTITY_PICTURE] + if ATTR_OPERATION_REMOTE in last_attrs: + self._operated_remote = last_attrs[ATTR_OPERATION_REMOTE] + if ATTR_OPERATION_KEYPAD in last_attrs: + self._operated_keypad = last_attrs[ATTR_OPERATION_KEYPAD] + if ATTR_OPERATION_MANUAL in last_attrs: + self._operated_manual = last_attrs[ATTR_OPERATION_MANUAL] + if ATTR_OPERATION_TAG in last_attrs: + self._operated_tag = last_attrs[ATTR_OPERATION_TAG] + if ATTR_OPERATION_AUTORELOCK in last_attrs: + self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] + + +class YaleBatterySensor[T: LockDetail | KeypadDetail]( + YaleDescriptionEntity, SensorEntity +): + """Representation of an Yale sensor.""" + + entity_description: YaleSensorEntityDescription[T] + _attr_device_class = SensorDeviceClass.BATTERY + _attr_native_unit_of_measurement = PERCENTAGE + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + self._attr_native_value = self.entity_description.value_fn(self._detail) + self._attr_available = self._attr_native_value is not None diff --git a/homeassistant/components/yale/strings.json b/homeassistant/components/yale/strings.json new file mode 100644 index 00000000000..3fb1345a3b0 --- /dev/null +++ b/homeassistant/components/yale/strings.json @@ -0,0 +1,71 @@ +{ + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_invalid_user": "Reauthenticate must use the same account." + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + }, + "entity": { + "binary_sensor": { + "ding": { + "name": "Doorbell ding" + }, + "image_capture": { + "name": "Image capture" + } + }, + "button": { + "wake": { + "name": "Wake" + } + }, + "camera": { + "camera": { + "name": "[%key:component::camera::title%]" + } + }, + "sensor": { + "operator": { + "name": "Operator" + } + }, + "event": { + "doorbell": { + "state_attributes": { + "event_type": { + "state": { + "ring": "Ring" + } + } + } + }, + "motion": { + "state_attributes": { + "event_type": { + "state": { + "motion": "Motion" + } + } + } + } + } + } +} diff --git a/homeassistant/components/yale/util.py b/homeassistant/components/yale/util.py new file mode 100644 index 00000000000..3462c576fd9 --- /dev/null +++ b/homeassistant/components/yale/util.py @@ -0,0 +1,78 @@ +"""Yale util functions.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from functools import partial + +import aiohttp +from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail +from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import aiohttp_client + +from . import YaleData + +TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds()) + + +@callback +def async_create_yale_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession: + """Create an aiohttp session for the yale integration.""" + # Create an aiohttp session instead of using the default one since the + # default one is likely to trigger yale's WAF if another integration + # is also using Cloudflare + return aiohttp_client.async_create_clientsession(hass) + + +def retrieve_time_based_activity( + activities: set[ActivityType], data: YaleData, detail: DoorbellDetail | LockDetail +) -> Activity | None: + """Get the latest state of the sensor.""" + stream = data.activity_stream + if latest := stream.get_latest_device_activity(detail.device_id, activities): + return _activity_time_based(latest) + return False + + +_RING_ACTIVITIES = {ActivityType.DOORBELL_DING} + + +def retrieve_ding_activity( + data: YaleData, detail: DoorbellDetail | LockDetail +) -> Activity | None: + """Get the ring/ding state.""" + stream = data.activity_stream + latest = stream.get_latest_device_activity(detail.device_id, _RING_ACTIVITIES) + if latest is None or ( + data.push_updates_connected and latest.action == ACTION_DOORBELL_CALL_MISSED + ): + return None + return _activity_time_based(latest) + + +retrieve_doorbell_motion_activity = partial( + retrieve_time_based_activity, {ActivityType.DOORBELL_MOTION} +) + + +def _activity_time_based(latest: Activity) -> Activity | None: + """Get the latest state of the sensor.""" + start = latest.activity_start_time + end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION + if start <= datetime.now() <= end: + return latest + return None + + +def retrieve_online_state(data: YaleData, detail: DoorbellDetail | LockDetail) -> bool: + """Get the latest state of the sensor.""" + # The doorbell will go into standby mode when there is no motion + # for a short while. It will wake by itself when needed so we need + # to consider is available or we will not report motion or dings + if isinstance(detail, DoorbellDetail): + return detail.is_online or detail.is_standby + return detail.bridge_is_online diff --git a/homeassistant/components/yale_home/manifest.json b/homeassistant/components/yale_home/manifest.json index 0e45b0da7d0..c497fa3fe34 100644 --- a/homeassistant/components/yale_home/manifest.json +++ b/homeassistant/components/yale_home/manifest.json @@ -2,5 +2,5 @@ "domain": "yale_home", "name": "Yale Home", "integration_type": "virtual", - "supported_by": "august" + "supported_by": "yale" } diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index 3c853afb6fd..d67e136be4a 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations from homeassistant.components.lock import CONF_DEFAULT_CODE, DOMAIN as LOCK_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_CODE +from homeassistant.const import CONF_CODE, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -27,21 +27,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool return True -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: YaleConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: """Migrate old entry.""" LOGGER.debug("Migrating from version %s", entry.version) if entry.version == 1: + new_options = entry.options.copy() if config_entry_default_code := entry.options.get(CONF_CODE): entity_reg = er.async_get(hass) entries = er.async_entries_for_config_entry(entity_reg, entry.entry_id) @@ -52,13 +53,16 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: LOCK_DOMAIN, {CONF_DEFAULT_CODE: config_entry_default_code}, ) - new_options = entry.options.copy() del new_options[CONF_CODE] - hass.config_entries.async_update_entry(entry, options=new_options) + hass.config_entries.async_update_entry(entry, options=new_options, version=2) - hass.config_entries.async_update_entry(entry, version=2) + if entry.version == 2 and entry.minor_version == 1: + # Removes name from entry data + new_data = entry.data.copy() + del new_data[CONF_NAME] + hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2) - LOGGER.info("Migration to version %s successful", entry.version) + LOGGER.debug("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py index 2fc56a9e5dd..8244d96064a 100644 --- a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py +++ b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py @@ -13,12 +13,11 @@ from yalesmartalarmclient.const import ( from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) -from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from . import YaleConfigEntry from .const import DOMAIN, STATE_MAP, YALE_ALL_ERRORS @@ -47,7 +46,7 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): def __init__(self, coordinator: YaleDataUpdateCoordinator) -> None: """Initialize the Yale Alarm Device.""" super().__init__(coordinator) - self._attr_unique_id = coordinator.entry.entry_id + self._attr_unique_id = coordinator.config_entry.entry_id async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" @@ -84,7 +83,7 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): translation_domain=DOMAIN, translation_key="set_alarm", translation_placeholders={ - "name": self.coordinator.entry.data[CONF_NAME], + "name": self.coordinator.config_entry.title, "error": str(error), }, ) from error @@ -106,6 +105,6 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): return super().available @property - def state(self) -> StateType: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the alarm.""" return STATE_MAP.get(self.coordinator.data["alarm"]) diff --git a/homeassistant/components/yale_smart_alarm/binary_sensor.py b/homeassistant/components/yale_smart_alarm/binary_sensor.py index a1b94b907de..17b6035321a 100644 --- a/homeassistant/components/yale_smart_alarm/binary_sensor.py +++ b/homeassistant/components/yale_smart_alarm/binary_sensor.py @@ -49,9 +49,13 @@ async def async_setup_entry( """Set up the Yale binary sensor entry.""" coordinator = entry.runtime_data - sensors: list[YaleDoorSensor | YaleProblemSensor] = [ + sensors: list[YaleDoorSensor | YaleDoorBatterySensor | YaleProblemSensor] = [ YaleDoorSensor(coordinator, data) for data in coordinator.data["door_windows"] ] + sensors.extend( + YaleDoorBatterySensor(coordinator, data) + for data in coordinator.data["door_windows"] + ) sensors.extend( YaleProblemSensor(coordinator, description) for description in SENSOR_TYPES ) @@ -70,6 +74,27 @@ class YaleDoorSensor(YaleEntity, BinarySensorEntity): return bool(self.coordinator.data["sensor_map"][self._attr_unique_id] == "open") +class YaleDoorBatterySensor(YaleEntity, BinarySensorEntity): + """Representation of a Yale door sensor battery status.""" + + _attr_device_class = BinarySensorDeviceClass.BATTERY + + def __init__( + self, + coordinator: YaleDataUpdateCoordinator, + data: dict, + ) -> None: + """Initiate Yale door battery Sensor.""" + super().__init__(coordinator, data) + self._attr_unique_id = f"{data["address"]}-battery" + + @property + def is_on(self) -> bool: + """Return true if the battery is low.""" + state: bool = self.coordinator.data["sensor_battery_map"][self._attr_unique_id] + return state + + class YaleProblemSensor(YaleAlarmEntity, BinarySensorEntity): """Representation of a Yale problem sensor.""" @@ -83,7 +108,9 @@ class YaleProblemSensor(YaleAlarmEntity, BinarySensorEntity): """Initiate Yale Problem Sensor.""" super().__init__(coordinator) self.entity_description = entity_description - self._attr_unique_id = f"{coordinator.entry.entry_id}-{entity_description.key}" + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}-{entity_description.key}" + ) @property def is_on(self) -> bool: diff --git a/homeassistant/components/yale_smart_alarm/config_flow.py b/homeassistant/components/yale_smart_alarm/config_flow.py index 644160a8d93..3ceee367284 100644 --- a/homeassistant/components/yale_smart_alarm/config_flow.py +++ b/homeassistant/components/yale_smart_alarm/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.config_entries import ( ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback import homeassistant.helpers.config_validation as cv @@ -23,10 +23,7 @@ from .const import ( CONF_AREA_ID, CONF_LOCK_CODE_DIGITS, DEFAULT_AREA_ID, - DEFAULT_LOCK_CODE_DIGITS, - DEFAULT_NAME, DOMAIN, - LOGGER, YALE_BASE_ERRORS, ) @@ -40,66 +37,68 @@ DATA_SCHEMA = vol.Schema( DATA_SCHEMA_AUTH = vol.Schema( { - vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, } ) +OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_LOCK_CODE_DIGITS, + ): int, + } +) + + +def validate_credentials(username: str, password: str) -> dict[str, Any]: + """Validate credentials.""" + errors: dict[str, str] = {} + try: + YaleSmartAlarmClient(username, password) + except AuthenticationError: + errors = {"base": "invalid_auth"} + except YALE_BASE_ERRORS: + errors = {"base": "cannot_connect"} + return errors + class YaleConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Yale integration.""" VERSION = 2 - - entry: ConfigEntry | None + MINOR_VERSION = 2 @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> YaleOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleOptionsFlowHandler(config_entry) + return YaleOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Yale.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: - username = user_input[CONF_USERNAME] + reauth_entry = self._get_reauth_entry() + username = reauth_entry.data[CONF_USERNAME] password = user_input[CONF_PASSWORD] - try: - await self.hass.async_add_executor_job( - YaleSmartAlarmClient, username, password - ) - except AuthenticationError as error: - LOGGER.error("Authentication failed. Check credentials %s", error) - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS as error: - LOGGER.error("Connection to API failed %s", error) - errors = {"base": "cannot_connect"} - + errors = await self.hass.async_add_executor_job( + validate_credentials, username, password + ) if not errors: - existing_entry = await self.async_set_unique_id(username) - if existing_entry and self.entry: - self.hass.config_entries.async_update_entry( - existing_entry, - data={ - **self.entry.data, - CONF_USERNAME: username, - CONF_PASSWORD: password, - }, - ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_PASSWORD: password}, + ) return self.async_show_form( step_id="reauth_confirm", @@ -107,29 +106,51 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of existing entry.""" + errors: dict[str, str] = {} + + if user_input is not None: + reconfigure_entry = self._get_reconfigure_entry() + username = user_input[CONF_USERNAME] + + errors = await self.hass.async_add_executor_job( + validate_credentials, username, user_input[CONF_PASSWORD] + ) + if ( + username != reconfigure_entry.unique_id + and await self.async_set_unique_id(username) + ): + errors["base"] = "unique_id_exists" + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, + unique_id=username, + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] - name = DEFAULT_NAME area = user_input.get(CONF_AREA_ID, DEFAULT_AREA_ID) - try: - await self.hass.async_add_executor_job( - YaleSmartAlarmClient, username, password - ) - except AuthenticationError as error: - LOGGER.error("Authentication failed. Check credentials %s", error) - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS as error: - LOGGER.error("Connection to API failed %s", error) - errors = {"base": "cannot_connect"} - + errors = await self.hass.async_add_executor_job( + validate_credentials, username, password + ) if not errors: await self.async_set_unique_id(username) self._abort_if_unique_id_configured() @@ -139,7 +160,6 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): data={ CONF_USERNAME: username, CONF_PASSWORD: password, - CONF_NAME: name, CONF_AREA_ID: area, }, ) @@ -154,32 +174,18 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): class YaleOptionsFlowHandler(OptionsFlow): """Handle Yale options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Yale options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Yale options.""" - errors: dict[str, Any] = {} - if user_input: + if user_input is not None: return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", - data_schema=vol.Schema( - { - vol.Optional( - CONF_LOCK_CODE_DIGITS, - description={ - "suggested_value": self.entry.options.get( - CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS - ) - }, - ): int, - } + data_schema=self.add_suggested_values_to_schema( + OPTIONS_SCHEMA, + self.config_entry.options, ), - errors=errors, ) diff --git a/homeassistant/components/yale_smart_alarm/const.py b/homeassistant/components/yale_smart_alarm/const.py index e7b732c6cf9..14e31268ec9 100644 --- a/homeassistant/components/yale_smart_alarm/const.py +++ b/homeassistant/components/yale_smart_alarm/const.py @@ -9,12 +9,8 @@ from yalesmartalarmclient.client import ( ) from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - Platform, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState +from homeassistant.const import Platform CONF_AREA_ID = "area_id" CONF_LOCK_CODE_DIGITS = "lock_code_digits" @@ -39,13 +35,15 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.LOCK, + Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, ] STATE_MAP = { - YALE_STATE_DISARM: STATE_ALARM_DISARMED, - YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME, - YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY, + YALE_STATE_DISARM: AlarmControlPanelState.DISARMED, + YALE_STATE_ARM_PARTIAL: AlarmControlPanelState.ARMED_HOME, + YALE_STATE_ARM_FULL: AlarmControlPanelState.ARMED_AWAY, } YALE_BASE_ERRORS = ( diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 1067b9279a4..7ece2a3448b 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -3,17 +3,20 @@ from __future__ import annotations from datetime import timedelta -from typing import Any +from typing import TYPE_CHECKING, Any +from yalesmartalarmclient import YaleLock from yalesmartalarmclient.client import YaleSmartAlarmClient from yalesmartalarmclient.exceptions import AuthenticationError -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +if TYPE_CHECKING: + from . import YaleConfigEntry + from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, YALE_BASE_ERRORS @@ -21,24 +24,29 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """A Yale Data Update Coordinator.""" yale: YaleSmartAlarmClient + config_entry: YaleConfigEntry - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, config_entry: YaleConfigEntry) -> None: """Initialize the Yale hub.""" - self.entry = entry super().__init__( hass, LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), always_update=False, ) + self.locks: list[YaleLock] = [] async def _async_setup(self) -> None: """Set up connection to Yale.""" try: - self.yale = YaleSmartAlarmClient( - self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] + self.yale = await self.hass.async_add_executor_job( + YaleSmartAlarmClient, + self.config_entry.data[CONF_USERNAME], + self.config_entry.data[CONF_PASSWORD], ) + self.locks = await self.hass.async_add_executor_job(self.yale.get_locks) except AuthenticationError as error: raise ConfigEntryAuthFailed from error except YALE_BASE_ERRORS as error: @@ -49,66 +57,15 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): updates = await self.hass.async_add_executor_job(self.get_updates) - locks = [] door_windows = [] temp_sensors = [] for device in updates["cycle"]["device_status"]: state = device["status1"] - if device["type"] == "device_type.door_lock": - lock_status_str = device["minigw_lock_status"] - lock_status = int(str(lock_status_str or 0), 16) - closed = (lock_status & 16) == 16 - locked = (lock_status & 1) == 1 - if not lock_status and "device_status.lock" in state: - device["_state"] = "locked" - device["_state2"] = "unknown" - locks.append(device) - continue - if not lock_status and "device_status.unlock" in state: - device["_state"] = "unlocked" - device["_state2"] = "unknown" - locks.append(device) - continue - if ( - lock_status - and ( - "device_status.lock" in state or "device_status.unlock" in state - ) - and closed - and locked - ): - device["_state"] = "locked" - device["_state2"] = "closed" - locks.append(device) - continue - if ( - lock_status - and ( - "device_status.lock" in state or "device_status.unlock" in state - ) - and closed - and not locked - ): - device["_state"] = "unlocked" - device["_state2"] = "closed" - locks.append(device) - continue - if ( - lock_status - and ( - "device_status.lock" in state or "device_status.unlock" in state - ) - and not closed - ): - device["_state"] = "unlocked" - device["_state2"] = "open" - locks.append(device) - continue - device["_state"] = "unavailable" - locks.append(device) - continue if device["type"] == "device_type.door_contact": + device["_battery"] = False + if "device_status.low_battery" in state: + device["_battery"] = True if "device_status.dc_close" in state: device["_state"] = "closed" door_windows.append(device) @@ -126,19 +83,21 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): _sensor_map = { contact["address"]: contact["_state"] for contact in door_windows } - _lock_map = {lock["address"]: lock["_state"] for lock in locks} + _sensor_battery_map = { + f"{contact["address"]}-battery": contact["_battery"] + for contact in door_windows + } _temp_map = {temp["address"]: temp["status_temp"] for temp in temp_sensors} return { "alarm": updates["arm_status"], - "locks": locks, "door_windows": door_windows, "temp_sensors": temp_sensors, "status": updates["status"], "online": updates["online"], "sensor_map": _sensor_map, + "sensor_battery_map": _sensor_battery_map, "temp_map": _temp_map, - "lock_map": _lock_map, "panel_info": updates["panel_info"], } @@ -147,15 +106,27 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): try: arm_status = self.yale.get_armed_status() data = self.yale.get_information() + if TYPE_CHECKING: + assert data.cycle + for device in data.cycle["data"]["device_status"]: + if device["type"] == YaleLock.DEVICE_TYPE: + for lock in self.locks: + if lock.name == device["name"]: + lock.update(device) except AuthenticationError as error: raise ConfigEntryAuthFailed from error except YALE_BASE_ERRORS as error: raise UpdateFailed from error + cycle = data.cycle["data"] if data.cycle else None + status = data.status["data"] if data.status else None + online = data.online["data"] if data.online else None + panel_info = data.panel_info["data"] if data.panel_info else None + return { "arm_status": arm_status, - "cycle": data.cycle, - "status": data.status, - "online": data.online, - "panel_info": data.panel_info, + "cycle": cycle, + "status": status, + "online": online, + "panel_info": panel_info, } diff --git a/homeassistant/components/yale_smart_alarm/entity.py b/homeassistant/components/yale_smart_alarm/entity.py index 179e20d509d..2610f54f0a9 100644 --- a/homeassistant/components/yale_smart_alarm/entity.py +++ b/homeassistant/components/yale_smart_alarm/entity.py @@ -1,6 +1,8 @@ """Base class for yale_smart_alarm entity.""" -from homeassistant.const import CONF_NAME, CONF_USERNAME +from yalesmartalarmclient import YaleLock + +from homeassistant.const import CONF_USERNAME from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -9,7 +11,7 @@ from .const import DOMAIN, MANUFACTURER, MODEL from .coordinator import YaleDataUpdateCoordinator -class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): +class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): """Base implementation for Yale device.""" _attr_has_entity_name = True @@ -23,10 +25,29 @@ class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): manufacturer=MANUFACTURER, model=MODEL, identifiers={(DOMAIN, data["address"])}, - via_device=(DOMAIN, self.coordinator.entry.data[CONF_USERNAME]), + via_device=(DOMAIN, coordinator.config_entry.data[CONF_USERNAME]), ) +class YaleLockEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): + """Base implementation for Yale lock device.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock) -> None: + """Initialize an Yale device.""" + super().__init__(coordinator) + self._attr_unique_id: str = lock.sid() + self._attr_device_info = DeviceInfo( + name=lock.name, + manufacturer=MANUFACTURER, + model=MODEL, + identifiers={(DOMAIN, lock.sid())}, + via_device=(DOMAIN, coordinator.config_entry.data[CONF_USERNAME]), + ) + self.lock_data = lock + + class YaleAlarmEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): """Base implementation for Yale Alarm device.""" @@ -37,10 +58,10 @@ class YaleAlarmEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): super().__init__(coordinator) panel_info = coordinator.data["panel_info"] self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.entry.data[CONF_USERNAME])}, + identifiers={(DOMAIN, coordinator.config_entry.data[CONF_USERNAME])}, manufacturer=MANUFACTURER, model=MODEL, - name=coordinator.entry.data[CONF_NAME], + name=coordinator.config_entry.title, connections={(CONNECTION_NETWORK_MAC, panel_info["mac"])}, sw_version=panel_info["version"], ) diff --git a/homeassistant/components/yale_smart_alarm/icons.json b/homeassistant/components/yale_smart_alarm/icons.json index 4cb5888a406..fb83ea88f97 100644 --- a/homeassistant/components/yale_smart_alarm/icons.json +++ b/homeassistant/components/yale_smart_alarm/icons.json @@ -4,6 +4,16 @@ "panic": { "default": "mdi:alarm-light" } + }, + "select": { + "volume": { + "default": "mdi:volume-high", + "state": { + "high": "mdi:volume-high", + "low": "mdi:volume-low", + "off": "mdi:volume-off" + } + } } } } diff --git a/homeassistant/components/yale_smart_alarm/lock.py b/homeassistant/components/yale_smart_alarm/lock.py index 386e546afbf..7a93baf0827 100644 --- a/homeassistant/components/yale_smart_alarm/lock.py +++ b/homeassistant/components/yale_smart_alarm/lock.py @@ -2,12 +2,14 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import Any -from homeassistant.components.lock import LockEntity +from yalesmartalarmclient import YaleLock, YaleLockState + +from homeassistant.components.lock import LockEntity, LockState from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry @@ -18,7 +20,13 @@ from .const import ( YALE_ALL_ERRORS, ) from .coordinator import YaleDataUpdateCoordinator -from .entity import YaleEntity +from .entity import YaleLockEntity + +LOCK_STATE_MAP = { + YaleLockState.LOCKED: LockState.LOCKED, + YaleLockState.UNLOCKED: LockState.UNLOCKED, + YaleLockState.DOOR_OPEN: LockState.OPEN, +} async def async_setup_entry( @@ -30,68 +38,55 @@ async def async_setup_entry( code_format = entry.options.get(CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS) async_add_entities( - YaleDoorlock(coordinator, data, code_format) - for data in coordinator.data["locks"] + YaleDoorlock(coordinator, lock, code_format) for lock in coordinator.locks ) -class YaleDoorlock(YaleEntity, LockEntity): +class YaleDoorlock(YaleLockEntity, LockEntity): """Representation of a Yale doorlock.""" _attr_name = None def __init__( - self, coordinator: YaleDataUpdateCoordinator, data: dict, code_format: int + self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock, code_format: int ) -> None: """Initialize the Yale Lock Device.""" - super().__init__(coordinator, data) + super().__init__(coordinator, lock) self._attr_code_format = rf"^\d{{{code_format}}}$" - self.lock_name: str = data["name"] async def async_unlock(self, **kwargs: Any) -> None: """Send unlock command.""" code: str | None = kwargs.get(ATTR_CODE) - return await self.async_set_lock("unlocked", code) + return await self.async_set_lock(YaleLockState.UNLOCKED, code) async def async_lock(self, **kwargs: Any) -> None: """Send lock command.""" - return await self.async_set_lock("locked", None) + return await self.async_set_lock(YaleLockState.LOCKED, None) - async def async_set_lock(self, command: str, code: str | None) -> None: + async def async_set_lock(self, state: YaleLockState, code: str | None) -> None: """Set lock.""" - if TYPE_CHECKING: - assert self.coordinator.yale, "Connection to API is missing" - if command == "unlocked" and not code: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="no_code", - ) - + lock_state = False try: - get_lock = await self.hass.async_add_executor_job( - self.coordinator.yale.lock_api.get, self.lock_name - ) - if get_lock and command == "locked": + if state is YaleLockState.LOCKED: lock_state = await self.hass.async_add_executor_job( - self.coordinator.yale.lock_api.close_lock, - get_lock, + self.lock_data.close ) - if code and get_lock and command == "unlocked": + if code and state is YaleLockState.UNLOCKED: lock_state = await self.hass.async_add_executor_job( - self.coordinator.yale.lock_api.open_lock, get_lock, code + self.lock_data.open, code ) except YALE_ALL_ERRORS as error: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_lock", translation_placeholders={ - "name": self.lock_name, + "name": self.lock_data.name, "error": str(error), }, ) from error if lock_state: - self.coordinator.data["lock_map"][self._attr_unique_id] = command + self.lock_data.set_state(state) self.async_write_ha_state() return raise HomeAssistantError( @@ -102,4 +97,9 @@ class YaleDoorlock(YaleEntity, LockEntity): @property def is_locked(self) -> bool | None: """Return true if the lock is locked.""" - return bool(self.coordinator.data["lock_map"][self._attr_unique_id] == "locked") + return LOCK_STATE_MAP.get(self.lock_data.state()) == LockState.LOCKED + + @property + def is_open(self) -> bool | None: + """Return true if the lock is open.""" + return LOCK_STATE_MAP.get(self.lock_data.state()) == LockState.OPEN diff --git a/homeassistant/components/yale_smart_alarm/manifest.json b/homeassistant/components/yale_smart_alarm/manifest.json index 92dd774d1d9..9a13cf72db9 100644 --- a/homeassistant/components/yale_smart_alarm/manifest.json +++ b/homeassistant/components/yale_smart_alarm/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale_smart_alarm", "iot_class": "cloud_polling", "loggers": ["yalesmartalarmclient"], - "requirements": ["yalesmartalarmclient==0.4.0"] + "requirements": ["yalesmartalarmclient==0.4.3"] } diff --git a/homeassistant/components/yale_smart_alarm/select.py b/homeassistant/components/yale_smart_alarm/select.py new file mode 100644 index 00000000000..55b56dd8e54 --- /dev/null +++ b/homeassistant/components/yale_smart_alarm/select.py @@ -0,0 +1,58 @@ +"""Select for Yale Alarm.""" + +from __future__ import annotations + +from yalesmartalarmclient import YaleLock, YaleLockVolume + +from homeassistant.components.select import SelectEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .coordinator import YaleDataUpdateCoordinator +from .entity import YaleLockEntity + +VOLUME_OPTIONS = {value.name.lower(): str(value.value) for value in YaleLockVolume} + + +async def async_setup_entry( + hass: HomeAssistant, entry: YaleConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the Yale select entry.""" + + coordinator = entry.runtime_data + + async_add_entities( + YaleLockVolumeSelect(coordinator, lock) + for lock in coordinator.locks + if lock.supports_lock_config() + ) + + +class YaleLockVolumeSelect(YaleLockEntity, SelectEntity): + """Representation of a Yale lock volume select.""" + + _attr_translation_key = "volume" + + def __init__(self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock) -> None: + """Initialize the Yale volume select.""" + super().__init__(coordinator, lock) + self._attr_unique_id = f"{lock.sid()}-volume" + self._attr_current_option = self.lock_data.volume().name.lower() + self._attr_options = [volume.name.lower() for volume in YaleLockVolume] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + convert_to_value = VOLUME_OPTIONS[option] + option_enum = YaleLockVolume(convert_to_value) + if await self.hass.async_add_executor_job( + self.lock_data.set_volume, option_enum + ): + self._attr_current_option = self.lock_data.volume().name.lower() + self.async_write_ha_state() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_current_option = self.lock_data.volume().name.lower() + super()._handle_coordinator_update() diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index 63260c03e7f..bd3ba0f0186 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -2,11 +2,13 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unique_id_exists": "Another config entry with this username already exist" }, "step": { "user": { @@ -18,10 +20,14 @@ } }, "reauth_confirm": { + "data": { + "password": "[%key:common::config_flow::data::password%]" + } + }, + "reconfigure": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "name": "[%key:common::config_flow::data::name%]", "area_id": "[%key:component::yale_smart_alarm::config::step::user::data::area_id%]" } } @@ -55,6 +61,21 @@ "panic": { "name": "Panic button" } + }, + "switch": { + "autolock": { + "name": "Autolock" + } + }, + "select": { + "volume": { + "name": "Volume", + "state": { + "high": "High", + "low": "Low", + "off": "[%key:common::state::off%]" + } + } } }, "exceptions": { @@ -67,9 +88,6 @@ "set_lock": { "message": "Could not set lock for {name}: {error}" }, - "no_code": { - "message": "Can not unlock without code" - }, "could_not_change_lock": { "message": "Could not set lock, check system ready for lock" }, diff --git a/homeassistant/components/yale_smart_alarm/switch.py b/homeassistant/components/yale_smart_alarm/switch.py new file mode 100644 index 00000000000..e8c0817c2de --- /dev/null +++ b/homeassistant/components/yale_smart_alarm/switch.py @@ -0,0 +1,59 @@ +"""Switches for Yale Alarm.""" + +from __future__ import annotations + +from typing import Any + +from yalesmartalarmclient import YaleLock + +from homeassistant.components.switch import SwitchEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .coordinator import YaleDataUpdateCoordinator +from .entity import YaleLockEntity + + +async def async_setup_entry( + hass: HomeAssistant, entry: YaleConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the Yale switch entry.""" + + coordinator = entry.runtime_data + + async_add_entities( + YaleAutolockSwitch(coordinator, lock) + for lock in coordinator.locks + if lock.supports_lock_config() + ) + + +class YaleAutolockSwitch(YaleLockEntity, SwitchEntity): + """Representation of a Yale autolock switch.""" + + _attr_translation_key = "autolock" + + def __init__(self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock) -> None: + """Initialize the Yale Autolock Switch.""" + super().__init__(coordinator, lock) + self._attr_unique_id = f"{lock.sid()}-autolock" + self._attr_is_on = self.lock_data.autolock() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + if await self.hass.async_add_executor_job(self.lock_data.set_autolock, True): + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + if await self.hass.async_add_executor_job(self.lock_data.set_autolock, False): + self._attr_is_on = False + self.async_write_ha_state() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_is_on = self.lock_data.autolock() + super()._handle_coordinator_update() diff --git a/homeassistant/components/yalexs_ble/config_flow.py b/homeassistant/components/yalexs_ble/config_flow.py index c0df4e26821..6de74759686 100644 --- a/homeassistant/components/yalexs_ble/config_flow.py +++ b/homeassistant/components/yalexs_ble/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import Any +from typing import Any, Self from bleak_retry_connector import BleakError, BLEDevice import voluptuous as vol @@ -68,12 +68,16 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _address: str | None = None + _local_name_is_unique = False + active = False + local_name: str | None = None + def __init__(self) -> None: """Initialize the config flow.""" self._discovery_info: BluetoothServiceInfoBleak | None = None self._discovered_devices: dict[str, BluetoothServiceInfoBleak] = {} self._lock_cfg: ValidatedLockConfig | None = None - self._reauth_entry: ConfigEntry | None = None async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak @@ -81,7 +85,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the bluetooth discovery step.""" await self.async_set_unique_id(discovery_info.address) self._abort_if_unique_id_configured() - self.context["local_name"] = discovery_info.name + self.local_name = discovery_info.name self._discovery_info = discovery_info self.context["title_placeholders"] = { "name": human_readable_name( @@ -103,8 +107,8 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) address = lock_cfg.address - local_name = lock_cfg.local_name - hass = self.hass + self.local_name = lock_cfg.local_name + self._local_name_is_unique = local_name_is_unique(self.local_name) # We do not want to raise on progress as integration_discovery takes # precedence over other discovery flows since we already have the keys. @@ -116,7 +120,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured(updates=new_data) for entry in self._async_current_entries(): if ( - local_name_is_unique(lock_cfg.local_name) + self._local_name_is_unique and entry.data.get(CONF_LOCAL_NAME) == lock_cfg.local_name ): return self.async_update_reload_and_abort( @@ -124,27 +128,14 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) self._discovery_info = async_find_existing_service_info( - hass, local_name, address + self.hass, self.local_name, address ) if not self._discovery_info: return self.async_abort(reason="no_devices_found") - # Integration discovery should abort other flows unless they - # are already in the process of being set up since this discovery - # will already have all the keys and the user can simply confirm. - for progress in self._async_in_progress(include_uninitialized=True): - context = progress["context"] - if ( - local_name_is_unique(local_name) - and context.get("local_name") == local_name - ) or context.get("unique_id") == address: - if context.get("active"): - # The user has already started interacting with this flow - # and entered the keys. We abort the discovery flow since - # we assume they do not want to use the discovered keys for - # some reason. - raise AbortFlow("already_in_progress") - hass.config_entries.flow.async_abort(progress["flow_id"]) + self._address = address + if self.hass.config_entries.flow.async_has_matching_flow(self): + raise AbortFlow("already_in_progress") self._lock_cfg = lock_cfg self.context["title_placeholders"] = { @@ -154,6 +145,24 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): } return await self.async_step_integration_discovery_confirm() + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + # Integration discovery should abort other flows unless they + # are already in the process of being set up since this discovery + # will already have all the keys and the user can simply confirm. + if ( + self._local_name_is_unique and other_flow.local_name == self.local_name + ) or other_flow.unique_id == self._address: + if other_flow.active: + # The user has already started interacting with this flow + # and entered the keys. We abort the discovery flow since + # we assume they do not want to use the discovered keys for + # some reason. + return True + self.hass.config_entries.flow.async_abort(other_flow.flow_id) + + return False + async def async_step_integration_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -184,9 +193,6 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_validate() async def async_step_reauth_validate( @@ -194,8 +200,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauth and validation.""" errors = {} - reauth_entry = self._reauth_entry - assert reauth_entry is not None + reauth_entry = self._get_reauth_entry() if user_input is not None: if ( device := async_ble_device_from_address( @@ -212,7 +217,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) ): return self.async_update_reload_and_abort( - reauth_entry, data={**reauth_entry.data, **user_input} + reauth_entry, data_updates=user_input ) return self.async_show_form( @@ -234,7 +239,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - self.context["active"] = True + self.active = True address = user_input[CONF_ADDRESS] discovery_info = self._discovered_devices[address] local_name = discovery_info.name @@ -307,16 +312,12 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> YaleXSBLEOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleXSBLEOptionsFlowHandler(config_entry) + return YaleXSBLEOptionsFlowHandler() class YaleXSBLEOptionsFlowHandler(OptionsFlow): """Handle YaleXSBLE options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize YaleXSBLE options flow.""" - self.entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -338,7 +339,9 @@ class YaleXSBLEOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_ALWAYS_CONNECTED, - default=self.entry.options.get(CONF_ALWAYS_CONNECTED, False), + default=self.config_entry.options.get( + CONF_ALWAYS_CONNECTED, False + ), ): bool, } ), diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 293ba87df86..b2c331397b3 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.4.3"] + "requirements": ["yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yamaha/icons.json b/homeassistant/components/yamaha/icons.json index f7075508b0d..40eceda3b3e 100644 --- a/homeassistant/components/yamaha/icons.json +++ b/homeassistant/components/yamaha/icons.json @@ -1,7 +1,13 @@ { "services": { - "enable_output": "mdi:audio-input-stereo-minijack", - "menu_cursor": "mdi:cursor-default", - "select_scene": "mdi:palette" + "enable_output": { + "service": "mdi:audio-input-stereo-minijack" + }, + "menu_cursor": { + "service": "mdi:cursor-default" + }, + "select_scene": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/yamaha/manifest.json b/homeassistant/components/yamaha/manifest.json index 8e6ba0b8854..936028330a5 100644 --- a/homeassistant/components/yamaha/manifest.json +++ b/homeassistant/components/yamaha/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/yamaha", "iot_class": "local_polling", "loggers": ["rxv"], + "quality_scale": "legacy", "requirements": ["rxv==0.7.0"] } diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index 58f501b99be..c16433b3c37 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -2,12 +2,12 @@ from __future__ import annotations -import contextlib import logging from typing import Any import requests import rxv +from rxv import RXV import voluptuous as vol from homeassistant.components.media_player import ( @@ -113,7 +113,7 @@ class YamahaConfigInfo: self.from_discovery = True -def _discovery(config_info): +def _discovery(config_info: YamahaConfigInfo) -> list[RXV]: """Discover list of zone controllers from configuration in the network.""" if config_info.from_discovery: _LOGGER.debug("Discovery Zones") @@ -130,34 +130,7 @@ def _discovery(config_info): zones.extend(recv.zone_controllers()) else: _LOGGER.debug("Config Zones") - zones = None - - # Fix for upstream issues in rxv.find() with some hardware. - with contextlib.suppress(AttributeError, ValueError): - for recv in rxv.find(DISCOVER_TIMEOUT): - _LOGGER.debug( - "Found Serial %s %s %s", - recv.serial_number, - recv.ctrl_url, - recv.zone, - ) - if recv.ctrl_url == config_info.ctrl_url: - _LOGGER.debug( - "Config Zones Matched Serial %s: %s", - recv.ctrl_url, - recv.serial_number, - ) - zones = rxv.RXV( - config_info.ctrl_url, - friendly_name=config_info.name, - serial_number=recv.serial_number, - model_name=recv.model_name, - ).zone_controllers() - break - - if not zones: - _LOGGER.debug("Config Zones Fallback") - zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() _LOGGER.debug("Returned _discover zones: %s", zones) return zones @@ -191,6 +164,7 @@ async def async_setup_platform( _LOGGER.debug("Ignore receiver zone: %s %s", config_info.name, zctrl.zone) continue + assert config_info.name entity = YamahaDeviceZone( config_info.name, zctrl, @@ -234,16 +208,24 @@ async def async_setup_platform( class YamahaDeviceZone(MediaPlayerEntity): """Representation of a Yamaha device zone.""" - def __init__(self, name, zctrl, source_ignore, source_names, zone_names): + _reverse_mapping: dict[str, str] + + def __init__( + self, + name: str, + zctrl: RXV, + source_ignore: list[str] | None, + source_names: dict[str, str] | None, + zone_names: dict[str, str] | None, + ) -> None: """Initialize the Yamaha Receiver.""" self.zctrl = zctrl self._attr_is_volume_muted = False self._attr_volume_level = 0 self._attr_state = MediaPlayerState.OFF - self._source_ignore = source_ignore or [] - self._source_names = source_names or {} - self._zone_names = zone_names or {} - self._reverse_mapping = None + self._source_ignore: list[str] = source_ignore or [] + self._source_names: dict[str, str] = source_names or {} + self._zone_names: dict[str, str] = zone_names or {} self._playback_support = None self._is_playback_supported = False self._play_status = None @@ -295,7 +277,7 @@ class YamahaDeviceZone(MediaPlayerEntity): self._attr_sound_mode = None self._attr_sound_mode_list = None - def build_source_list(self): + def build_source_list(self) -> None: """Build the source list.""" self._reverse_mapping = { alias: source for source, alias in self._source_names.items() @@ -308,7 +290,7 @@ class YamahaDeviceZone(MediaPlayerEntity): ) @property - def name(self): + def name(self) -> str: """Return the name of the device.""" name = self._name zone_name = self._zone_names.get(self._zone, self._zone) @@ -318,7 +300,7 @@ class YamahaDeviceZone(MediaPlayerEntity): return name @property - def zone_id(self): + def zone_id(self) -> str: """Return a zone_id to ensure 1 media player per zone.""" return f"{self.zctrl.ctrl_url}:{self._zone}" @@ -415,15 +397,15 @@ class YamahaDeviceZone(MediaPlayerEntity): if media_type == "NET RADIO": self.zctrl.net_radio(media_id) - def enable_output(self, port, enabled): + def enable_output(self, port: str, enabled: bool) -> None: """Enable or disable an output port..""" self.zctrl.enable_output(port, enabled) - def menu_cursor(self, cursor): + def menu_cursor(self, cursor: str) -> None: """Press a menu cursor button.""" getattr(self.zctrl, CURSOR_TYPE_MAP[cursor])() - def set_scene(self, scene): + def set_scene(self, scene: str) -> None: """Set the current scene.""" try: self.zctrl.scene = scene diff --git a/homeassistant/components/yamaha_musiccast/__init__.py b/homeassistant/components/yamaha_musiccast/__init__.py index 667b411e6c4..a2ce98dde56 100644 --- a/homeassistant/components/yamaha_musiccast/__init__.py +++ b/homeassistant/components/yamaha_musiccast/__init__.py @@ -2,42 +2,22 @@ from __future__ import annotations -from datetime import timedelta import logging -from aiomusiccast import MusicCastConnectionException -from aiomusiccast.capabilities import Capability -from aiomusiccast.musiccast_device import MusicCastData, MusicCastDevice +from aiomusiccast.musiccast_device import MusicCastDevice from homeassistant.components import ssdp from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_CONNECTIONS, ATTR_VIA_DEVICE, CONF_HOST, Platform +from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import ( - CONNECTION_NETWORK_MAC, - DeviceInfo, - format_mac, -) -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) -from .const import ( - BRAND, - CONF_SERIAL, - CONF_UPNP_DESC, - DEFAULT_ZONE, - DOMAIN, - ENTITY_CATEGORY_MAPPING, -) +from .const import CONF_SERIAL, CONF_UPNP_DESC, DOMAIN +from .coordinator import MusicCastDataUpdateCoordinator PLATFORMS = [Platform.MEDIA_PLAYER, Platform.NUMBER, Platform.SELECT, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(seconds=60) async def get_upnp_desc(hass: HomeAssistant, host: str): @@ -103,118 +83,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) - - -class MusicCastDataUpdateCoordinator(DataUpdateCoordinator[MusicCastData]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching data from the API.""" - - def __init__(self, hass: HomeAssistant, client: MusicCastDevice) -> None: - """Initialize.""" - self.musiccast = client - - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) - self.entities: list[MusicCastDeviceEntity] = [] - - async def _async_update_data(self) -> MusicCastData: - """Update data via library.""" - try: - await self.musiccast.fetch() - except MusicCastConnectionException as exception: - raise UpdateFailed from exception - return self.musiccast.data - - -class MusicCastEntity(CoordinatorEntity[MusicCastDataUpdateCoordinator]): - """Defines a base MusicCast entity.""" - - def __init__( - self, - *, - name: str, - icon: str, - coordinator: MusicCastDataUpdateCoordinator, - enabled_default: bool = True, - ) -> None: - """Initialize the MusicCast entity.""" - super().__init__(coordinator) - self._attr_entity_registry_enabled_default = enabled_default - self._attr_icon = icon - self._attr_name = name - - -class MusicCastDeviceEntity(MusicCastEntity): - """Defines a MusicCast device entity.""" - - _zone_id: str = DEFAULT_ZONE - - @property - def device_id(self): - """Return the ID of the current device.""" - if self._zone_id == DEFAULT_ZONE: - return self.coordinator.data.device_id - return f"{self.coordinator.data.device_id}_{self._zone_id}" - - @property - def device_name(self): - """Return the name of the current device.""" - return self.coordinator.data.zones[self._zone_id].name - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this MusicCast device.""" - - device_info = DeviceInfo( - name=self.device_name, - identifiers={ - ( - DOMAIN, - self.device_id, - ) - }, - manufacturer=BRAND, - model=self.coordinator.data.model_name, - sw_version=self.coordinator.data.system_version, - ) - - if self._zone_id == DEFAULT_ZONE: - device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(mac)) - for mac in self.coordinator.data.mac_addresses.values() - } - else: - device_info[ATTR_VIA_DEVICE] = (DOMAIN, self.coordinator.data.device_id) - - return device_info - - async def async_added_to_hass(self): - """Run when this Entity has been added to HA.""" - await super().async_added_to_hass() - # All entities should register callbacks to update HA when their state changes - self.coordinator.musiccast.register_callback(self.async_write_ha_state) - - async def async_will_remove_from_hass(self): - """Entity being removed from hass.""" - await super().async_will_remove_from_hass() - self.coordinator.musiccast.remove_callback(self.async_write_ha_state) - - -class MusicCastCapabilityEntity(MusicCastDeviceEntity): - """Base Entity type for all capabilities.""" - - def __init__( - self, - coordinator: MusicCastDataUpdateCoordinator, - capability: Capability, - zone_id: str | None = None, - ) -> None: - """Initialize a capability based entity.""" - if zone_id is not None: - self._zone_id = zone_id - self.capability = capability - super().__init__(name=capability.name, icon="", coordinator=coordinator) - self._attr_entity_category = ENTITY_CATEGORY_MAPPING.get(capability.entity_type) - - @property - def unique_id(self) -> str: - """Return the unique ID for this entity.""" - return f"{self.device_id}_{self.capability.id}" diff --git a/homeassistant/components/yamaha_musiccast/config_flow.py b/homeassistant/components/yamaha_musiccast/config_flow.py index a074f34c782..d6ad54c4a3d 100644 --- a/homeassistant/components/yamaha_musiccast/config_flow.py +++ b/homeassistant/components/yamaha_musiccast/config_flow.py @@ -10,9 +10,8 @@ from aiohttp import ClientConnectorError from aiomusiccast import MusicCastConnectionException, MusicCastDevice import voluptuous as vol -from homeassistant import data_entry_flow from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -33,7 +32,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None - ) -> data_entry_flow.ConfigFlowResult: + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" # Request user input, unless we are preparing discovery flow if user_input is None: @@ -73,9 +72,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): return self._show_setup_form(errors) - def _show_setup_form( - self, errors: dict | None = None - ) -> data_entry_flow.ConfigFlowResult: + def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult: """Show the setup form to the user.""" return self.async_show_form( step_id="user", @@ -85,7 +82,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_ssdp( self, discovery_info: ssdp.SsdpServiceInfo - ) -> data_entry_flow.ConfigFlowResult: + ) -> ConfigFlowResult: """Handle ssdp discoveries.""" if not await MusicCastDevice.check_yamaha_ssdp( discovery_info.ssdp_location, async_get_clientsession(self.hass) @@ -117,9 +114,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_confirm() - async def async_step_confirm( - self, user_input=None - ) -> data_entry_flow.ConfigFlowResult: + async def async_step_confirm(self, user_input=None) -> ConfigFlowResult: """Allow the user to confirm adding the device.""" if user_input is not None: return self.async_create_entry( diff --git a/homeassistant/components/yamaha_musiccast/coordinator.py b/homeassistant/components/yamaha_musiccast/coordinator.py new file mode 100644 index 00000000000..d5e0c67310a --- /dev/null +++ b/homeassistant/components/yamaha_musiccast/coordinator.py @@ -0,0 +1,41 @@ +"""The MusicCast integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import TYPE_CHECKING + +from aiomusiccast import MusicCastConnectionException +from aiomusiccast.musiccast_device import MusicCastData, MusicCastDevice + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +if TYPE_CHECKING: + from .entity import MusicCastDeviceEntity + +_LOGGER = logging.getLogger(__name__) + +SCAN_INTERVAL = timedelta(seconds=60) + + +class MusicCastDataUpdateCoordinator(DataUpdateCoordinator[MusicCastData]): + """Class to manage fetching data from the API.""" + + def __init__(self, hass: HomeAssistant, client: MusicCastDevice) -> None: + """Initialize.""" + self.musiccast = client + + super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) + self.entities: list[MusicCastDeviceEntity] = [] + + async def _async_update_data(self) -> MusicCastData: + """Update data via library.""" + try: + await self.musiccast.fetch() + except MusicCastConnectionException as exception: + raise UpdateFailed from exception + return self.musiccast.data diff --git a/homeassistant/components/yamaha_musiccast/entity.py b/homeassistant/components/yamaha_musiccast/entity.py new file mode 100644 index 00000000000..4f1add825e4 --- /dev/null +++ b/homeassistant/components/yamaha_musiccast/entity.py @@ -0,0 +1,112 @@ +"""The MusicCast integration.""" + +from __future__ import annotations + +from aiomusiccast.capabilities import Capability + +from homeassistant.const import ATTR_CONNECTIONS, ATTR_VIA_DEVICE +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import BRAND, DEFAULT_ZONE, DOMAIN, ENTITY_CATEGORY_MAPPING +from .coordinator import MusicCastDataUpdateCoordinator + + +class MusicCastEntity(CoordinatorEntity[MusicCastDataUpdateCoordinator]): + """Defines a base MusicCast entity.""" + + def __init__( + self, + *, + name: str, + icon: str, + coordinator: MusicCastDataUpdateCoordinator, + enabled_default: bool = True, + ) -> None: + """Initialize the MusicCast entity.""" + super().__init__(coordinator) + self._attr_entity_registry_enabled_default = enabled_default + self._attr_icon = icon + self._attr_name = name + + +class MusicCastDeviceEntity(MusicCastEntity): + """Defines a MusicCast device entity.""" + + _zone_id: str = DEFAULT_ZONE + + @property + def device_id(self): + """Return the ID of the current device.""" + if self._zone_id == DEFAULT_ZONE: + return self.coordinator.data.device_id + return f"{self.coordinator.data.device_id}_{self._zone_id}" + + @property + def device_name(self): + """Return the name of the current device.""" + return self.coordinator.data.zones[self._zone_id].name + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this MusicCast device.""" + + device_info = DeviceInfo( + name=self.device_name, + identifiers={ + ( + DOMAIN, + self.device_id, + ) + }, + manufacturer=BRAND, + model=self.coordinator.data.model_name, + sw_version=self.coordinator.data.system_version, + ) + + if self._zone_id == DEFAULT_ZONE: + device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(mac)) + for mac in self.coordinator.data.mac_addresses.values() + } + else: + device_info[ATTR_VIA_DEVICE] = (DOMAIN, self.coordinator.data.device_id) + + return device_info + + async def async_added_to_hass(self): + """Run when this Entity has been added to HA.""" + await super().async_added_to_hass() + # All entities should register callbacks to update HA when their state changes + self.coordinator.musiccast.register_callback(self.async_write_ha_state) + + async def async_will_remove_from_hass(self): + """Entity being removed from hass.""" + await super().async_will_remove_from_hass() + self.coordinator.musiccast.remove_callback(self.async_write_ha_state) + + +class MusicCastCapabilityEntity(MusicCastDeviceEntity): + """Base Entity type for all capabilities.""" + + def __init__( + self, + coordinator: MusicCastDataUpdateCoordinator, + capability: Capability, + zone_id: str | None = None, + ) -> None: + """Initialize a capability based entity.""" + if zone_id is not None: + self._zone_id = zone_id + self.capability = capability + super().__init__(name=capability.name, icon="", coordinator=coordinator) + self._attr_entity_category = ENTITY_CATEGORY_MAPPING.get(capability.entity_type) + + @property + def unique_id(self) -> str: + """Return the unique ID for this entity.""" + return f"{self.device_id}_{self.capability.id}" diff --git a/homeassistant/components/yamaha_musiccast/media_player.py b/homeassistant/components/yamaha_musiccast/media_player.py index a068ac6ddca..4384cc34836 100644 --- a/homeassistant/components/yamaha_musiccast/media_player.py +++ b/homeassistant/components/yamaha_musiccast/media_player.py @@ -27,7 +27,6 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import uuid -from . import MusicCastDataUpdateCoordinator, MusicCastDeviceEntity from .const import ( ATTR_MAIN_SYNC, ATTR_MC_LINK, @@ -38,6 +37,8 @@ from .const import ( MEDIA_CLASS_MAPPING, NULL_GROUP, ) +from .coordinator import MusicCastDataUpdateCoordinator +from .entity import MusicCastDeviceEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/yamaha_musiccast/number.py b/homeassistant/components/yamaha_musiccast/number.py index a5a591379c6..02dd6720d91 100644 --- a/homeassistant/components/yamaha_musiccast/number.py +++ b/homeassistant/components/yamaha_musiccast/number.py @@ -9,7 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, MusicCastCapabilityEntity, MusicCastDataUpdateCoordinator +from .const import DOMAIN +from .coordinator import MusicCastDataUpdateCoordinator +from .entity import MusicCastCapabilityEntity async def async_setup_entry( diff --git a/homeassistant/components/yamaha_musiccast/select.py b/homeassistant/components/yamaha_musiccast/select.py index b068b956e1b..3a4649b9ae5 100644 --- a/homeassistant/components/yamaha_musiccast/select.py +++ b/homeassistant/components/yamaha_musiccast/select.py @@ -9,8 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, MusicCastCapabilityEntity, MusicCastDataUpdateCoordinator -from .const import TRANSLATION_KEY_MAPPING +from .const import DOMAIN, TRANSLATION_KEY_MAPPING +from .coordinator import MusicCastDataUpdateCoordinator +from .entity import MusicCastCapabilityEntity async def async_setup_entry( diff --git a/homeassistant/components/yamaha_musiccast/strings.json b/homeassistant/components/yamaha_musiccast/strings.json index d0ee6c030a6..eaa5ac50c80 100644 --- a/homeassistant/components/yamaha_musiccast/strings.json +++ b/homeassistant/components/yamaha_musiccast/strings.json @@ -20,7 +20,9 @@ "yxc_control_url_missing": "The control URL is not given in the ssdp description." }, "error": { - "no_musiccast_device": "This device seems to be no MusicCast Device." + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_musiccast_device": "This device seems to be no MusicCast Device.", + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "entity": { diff --git a/homeassistant/components/yamaha_musiccast/switch.py b/homeassistant/components/yamaha_musiccast/switch.py index 2ae8388027a..49d031a02b5 100644 --- a/homeassistant/components/yamaha_musiccast/switch.py +++ b/homeassistant/components/yamaha_musiccast/switch.py @@ -9,7 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, MusicCastCapabilityEntity, MusicCastDataUpdateCoordinator +from .const import DOMAIN +from .coordinator import MusicCastDataUpdateCoordinator +from .entity import MusicCastCapabilityEntity async def async_setup_entry( diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index 1d1219d5a95..ad31d495253 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["aioymaps==1.2.5"] } diff --git a/homeassistant/components/yandextts/manifest.json b/homeassistant/components/yandextts/manifest.json index e1ab27272ef..418516a2d09 100644 --- a/homeassistant/components/yandextts/manifest.json +++ b/homeassistant/components/yandextts/manifest.json @@ -3,5 +3,6 @@ "name": "Yandex TTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/yandextts", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/yardian/icons.json b/homeassistant/components/yardian/icons.json index 79bcc32adf2..4ca3d83bd15 100644 --- a/homeassistant/components/yardian/icons.json +++ b/homeassistant/components/yardian/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "start_irrigation": "mdi:water" + "start_irrigation": { + "service": "mdi:water" + } } } diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index b4bb7da9a22..7a3a0a2f100 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any +from typing import Any, Self from urllib.parse import urlparse import voluptuous as vol @@ -23,6 +23,7 @@ from homeassistant.const import CONF_DEVICE, CONF_HOST, CONF_ID, CONF_MODEL, CON from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.typing import VolDictType from .const import ( CONF_DETECTED_MODEL, @@ -52,17 +53,20 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _discovered_ip: str = "" + _discovered_model: str + @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" self._discovered_devices: dict[str, Any] = {} - self._discovered_model = None - self._discovered_ip: str | None = None async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -83,9 +87,7 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle discovery from zeroconf.""" self._discovered_ip = discovery_info.host - await self.async_set_unique_id( - "{0:#0{1}x}".format(int(discovery_info.name[-26:-18]), 18) - ) + await self.async_set_unique_id(f"{int(discovery_info.name[-26:-18]):#018x}") return await self._async_handle_discovery_with_unique_id() async def async_step_ssdp( @@ -96,7 +98,7 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(discovery_info.ssdp_headers["id"]) return await self._async_handle_discovery_with_unique_id() - async def _async_handle_discovery_with_unique_id(self): + async def _async_handle_discovery_with_unique_id(self) -> ConfigFlowResult: """Handle any discovery with a unique id.""" for entry in self._async_current_entries(include_ignore=False): if entry.unique_id != self.unique_id and self.unique_id != entry.data.get( @@ -117,12 +119,10 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="already_configured") return await self._async_handle_discovery() - async def _async_handle_discovery(self): + async def _async_handle_discovery(self) -> ConfigFlowResult: """Handle any discovery.""" - self.context[CONF_HOST] = self._discovered_ip - for progress in self._async_in_progress(): - if progress.get("context", {}).get(CONF_HOST) == self._discovered_ip: - return self.async_abort(reason="already_in_progress") + if self.hass.config_entries.flow.async_has_matching_flow(self): + return self.async_abort(reason="already_in_progress") self._async_abort_entries_match({CONF_HOST: self._discovered_ip}) try: @@ -140,7 +140,13 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): ) return await self.async_step_discovery_confirm() - async def async_step_discovery_confirm(self, user_input=None): + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return other_flow._discovered_ip == self._discovered_ip # noqa: SLF001 + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm discovery.""" if user_input is not None or not onboarding.async_is_onboarded(self.hass): return self.async_create_entry( @@ -179,8 +185,6 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: self._abort_if_unique_id_configured() - if TYPE_CHECKING: - assert self.unique_id return self.async_create_entry( title=async_format_model_id(model, self.unique_id), data={ @@ -199,7 +203,9 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_pick_device(self, user_input=None): + async def async_step_pick_device( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the step to pick discovered device.""" if user_input is not None: unique_id = user_input[CONF_DEVICE] @@ -244,23 +250,25 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_DEVICE): vol.In(devices_name)}), ) - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import step.""" - host = user_input[CONF_HOST] + host = import_data[CONF_HOST] try: await self._async_try_connect(host, raise_on_progress=False) except CannotConnect: _LOGGER.error("Failed to import %s: cannot connect", host) return self.async_abort(reason="cannot_connect") - if CONF_NIGHTLIGHT_SWITCH_TYPE in user_input: - user_input[CONF_NIGHTLIGHT_SWITCH] = ( - user_input.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) + if CONF_NIGHTLIGHT_SWITCH_TYPE in import_data: + import_data[CONF_NIGHTLIGHT_SWITCH] = ( + import_data.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) == NIGHTLIGHT_SWITCH_TYPE_LIGHT ) self._abort_if_unique_id_configured() - return self.async_create_entry(title=user_input[CONF_NAME], data=user_input) + return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) - async def _async_try_connect(self, host, raise_on_progress=True): + async def _async_try_connect( + self, host: str, raise_on_progress: bool = True + ) -> str: """Set up with options.""" self._async_abort_entries_match({CONF_HOST: host}) @@ -290,14 +298,12 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Yeelight.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the option flow.""" - self._config_entry = config_entry - - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" - data = self._config_entry.data - options = self._config_entry.options + data = self.config_entry.data + options = self.config_entry.options detected_model = data.get(CONF_DETECTED_MODEL) model = options[CONF_MODEL] or detected_model @@ -306,7 +312,7 @@ class OptionsFlowHandler(OptionsFlow): title="", data={CONF_MODEL: model, **options, **user_input} ) - schema_dict = {} + schema_dict: VolDictType = {} known_models = get_known_models() if is_unknown_model := model not in known_models: known_models.insert(0, model) diff --git a/homeassistant/components/yeelight/device.py b/homeassistant/components/yeelight/device.py index c42fd072728..09086dc91d9 100644 --- a/homeassistant/components/yeelight/device.py +++ b/homeassistant/components/yeelight/device.py @@ -32,13 +32,13 @@ def async_format_model(model: str) -> str: @callback -def async_format_id(id_: str) -> str: +def async_format_id(id_: str | None) -> str: """Generate a more human readable id.""" return hex(int(id_, 16)) if id_ else "None" @callback -def async_format_model_id(model: str, id_: str) -> str: +def async_format_model_id(model: str, id_: str | None) -> str: """Generate a more human readable name.""" return f"{async_format_model(model)} {async_format_id(id_)}" diff --git a/homeassistant/components/yeelight/icons.json b/homeassistant/components/yeelight/icons.json index bf0d0c497f0..898637e752c 100644 --- a/homeassistant/components/yeelight/icons.json +++ b/homeassistant/components/yeelight/icons.json @@ -7,13 +7,29 @@ } }, "services": { - "set_mode": "mdi:cog", - "set_color_scene": "mdi:palette", - "set_hsv_scene": "mdi:palette", - "set_color_temp_scene": "mdi:palette", - "set_color_flow_scene": "mdi:palette", - "set_auto_delay_off_scene": "mdi:timer", - "start_flow": "mdi:play", - "set_music_mode": "mdi:music" + "set_mode": { + "service": "mdi:cog" + }, + "set_color_scene": { + "service": "mdi:palette" + }, + "set_hsv_scene": { + "service": "mdi:palette" + }, + "set_color_temp_scene": { + "service": "mdi:palette" + }, + "set_color_flow_scene": { + "service": "mdi:palette" + }, + "set_auto_delay_off_scene": { + "service": "mdi:timer" + }, + "start_flow": { + "service": "mdi:play" + }, + "set_music_mode": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/yeelight/light.py b/homeassistant/components/yeelight/light.py index d0d53510859..8cc3f2600e5 100644 --- a/homeassistant/components/yeelight/light.py +++ b/homeassistant/components/yeelight/light.py @@ -16,11 +16,10 @@ from yeelight.main import BulbException from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, FLASH_LONG, @@ -40,10 +39,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import VolDictType import homeassistant.util.color as color_util -from homeassistant.util.color import ( - color_temperature_kelvin_to_mired as kelvin_to_mired, - color_temperature_mired_to_kelvin as mired_to_kelvin, -) from . import YEELIGHT_FLOW_TRANSITION_SCHEMA from .const import ( @@ -71,6 +66,7 @@ from .entity import YeelightEntity _LOGGER = logging.getLogger(__name__) ATTR_MINUTES = "minutes" +ATTR_KELVIN = "kelvin" SERVICE_SET_MODE = "set_mode" SERVICE_SET_MUSIC_MODE = "set_music_mode" @@ -440,8 +436,8 @@ class YeelightBaseLight(YeelightEntity, LightEntity): self._effect = None model_specs = self._bulb.get_model_specs() - self._attr_min_mireds = kelvin_to_mired(model_specs["color_temp"]["max"]) - self._attr_max_mireds = kelvin_to_mired(model_specs["color_temp"]["min"]) + self._attr_max_color_temp_kelvin = model_specs["color_temp"]["max"] + self._attr_min_color_temp_kelvin = model_specs["color_temp"]["min"] self._light_type = LightType.Main @@ -476,10 +472,10 @@ class YeelightBaseLight(YeelightEntity, LightEntity): return self._predefined_effects + self.custom_effects_names @property - def color_temp(self) -> int | None: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if temp_in_k := self._get_property("ct"): - self._color_temp = kelvin_to_mired(int(temp_in_k)) + self._color_temp = int(temp_in_k) return self._color_temp @property @@ -678,20 +674,19 @@ class YeelightBaseLight(YeelightEntity, LightEntity): ) @_async_cmd - async def async_set_colortemp(self, colortemp, duration) -> None: + async def async_set_colortemp(self, temp_in_k, duration) -> None: """Set bulb's color temperature.""" if ( - not colortemp + not temp_in_k or not self.supported_color_modes or ColorMode.COLOR_TEMP not in self.supported_color_modes ): return - temp_in_k = mired_to_kelvin(colortemp) if ( not self.device.is_color_flow_enabled and self.color_mode == ColorMode.COLOR_TEMP - and self.color_temp == colortemp + and self.color_temp_kelvin == temp_in_k ): _LOGGER.debug("Color temp already set to: %s", temp_in_k) # Already set, and since we get pushed updates @@ -779,7 +774,7 @@ class YeelightBaseLight(YeelightEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the bulb on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) - colortemp = kwargs.get(ATTR_COLOR_TEMP) + colortemp = kwargs.get(ATTR_COLOR_TEMP_KELVIN) hs_color = kwargs.get(ATTR_HS_COLOR) rgb = kwargs.get(ATTR_RGB_COLOR) flash = kwargs.get(ATTR_FLASH) @@ -933,12 +928,12 @@ class YeelightWithoutNightlightSwitchMixIn(YeelightBaseLight): return super()._brightness_property @property - def color_temp(self) -> int | None: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" if self.device.is_nightlight_enabled: # Enabling the nightlight locks the colortemp to max - return self.max_mireds - return super().color_temp + return self.min_color_temp_kelvin + return super().color_temp_kelvin class YeelightColorLightWithoutNightlightSwitch( @@ -1081,8 +1076,8 @@ class YeelightAmbientLight(YeelightColorLightWithoutNightlightSwitch): def __init__(self, *args, **kwargs): """Initialize the Yeelight Ambient light.""" super().__init__(*args, **kwargs) - self._attr_min_mireds = kelvin_to_mired(6500) - self._attr_max_mireds = kelvin_to_mired(1700) + self._attr_max_color_temp_kelvin = 6500 + self._attr_min_color_temp_kelvin = 1700 self._light_type = LightType.Ambient diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index efb08e26b5a..4da2e0cfc3e 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -16,8 +16,7 @@ }, "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], - "quality_scale": "platinum", - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.40.0"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.41.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/yeelightsunflower/manifest.json b/homeassistant/components/yeelightsunflower/manifest.json index 67746e122cb..bfd185cfa72 100644 --- a/homeassistant/components/yeelightsunflower/manifest.json +++ b/homeassistant/components/yeelightsunflower/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/yeelightsunflower", "iot_class": "local_polling", "loggers": ["yeelightsunflower"], + "quality_scale": "legacy", "requirements": ["yeelightsunflower==0.0.10"] } diff --git a/homeassistant/components/yi/manifest.json b/homeassistant/components/yi/manifest.json index d8514b251cc..24b5aaad758 100644 --- a/homeassistant/components/yi/manifest.json +++ b/homeassistant/components/yi/manifest.json @@ -7,5 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["aioftp"], + "quality_scale": "legacy", "requirements": ["aioftp==0.21.3"] } diff --git a/homeassistant/components/yolink/climate.py b/homeassistant/components/yolink/climate.py index 98f1b764498..ff3bbf0d93b 100644 --- a/homeassistant/components/yolink/climate.py +++ b/homeassistant/components/yolink/climate.py @@ -63,7 +63,6 @@ class YoLinkClimateEntity(YoLinkEntity, ClimateEntity): """YoLink Climate Entity.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/yolink/config_flow.py b/homeassistant/components/yolink/config_flow.py index abdac696248..2e96dcf9f8c 100644 --- a/homeassistant/components/yolink/config_flow.py +++ b/homeassistant/components/yolink/config_flow.py @@ -6,7 +6,7 @@ from collections.abc import Mapping import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -18,7 +18,6 @@ class OAuth2FlowHandler( """Config flow to handle yolink OAuth2 authentication.""" DOMAIN = DOMAIN - _reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -35,9 +34,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(self, user_input=None) -> ConfigFlowResult: @@ -48,12 +44,10 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create an oauth config entry or update existing entry for reauth.""" - if existing_entry := self._reauth_entry: - self.hass.config_entries.async_update_entry( - existing_entry, data=existing_entry.data | data + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=data ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_create_entry(title="YoLink", data=data) async def async_step_user( @@ -61,6 +55,6 @@ class OAuth2FlowHandler( ) -> ConfigFlowResult: """Handle a flow start.""" existing_entry = await self.async_set_unique_id(DOMAIN) - if existing_entry and not self._reauth_entry: + if existing_entry and self.source != SOURCE_REAUTH: return self.async_abort(reason="already_configured") return await super().async_step_user(user_input) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 686160d9248..eb6169eccad 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -19,7 +19,17 @@ DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" +DEV_MODEL_TH_SENSOR_YS8008_UC = "YS8008-UC" +DEV_MODEL_TH_SENSOR_YS8008_EC = "YS8008-EC" DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" +DEV_MODEL_FLEX_FOB_YS3604_UC = "YS3604-UC" +DEV_MODEL_FLEX_FOB_YS3604_EC = "YS3604-EC" +DEV_MODEL_FLEX_FOB_YS3614_UC = "YS3614-UC" +DEV_MODEL_FLEX_FOB_YS3614_EC = "YS3614-EC" +DEV_MODEL_PLUG_YS6602_UC = "YS6602-UC" +DEV_MODEL_PLUG_YS6602_EC = "YS6602-EC" +DEV_MODEL_PLUG_YS6803_UC = "YS6803-UC" +DEV_MODEL_PLUG_YS6803_EC = "YS6803-EC" diff --git a/homeassistant/components/yolink/device_trigger.py b/homeassistant/components/yolink/device_trigger.py index b7f83623be5..6e247bf858e 100644 --- a/homeassistant/components/yolink/device_trigger.py +++ b/homeassistant/components/yolink/device_trigger.py @@ -16,6 +16,12 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType from . import DOMAIN, YOLINK_EVENT +from .const import ( + DEV_MODEL_FLEX_FOB_YS3604_EC, + DEV_MODEL_FLEX_FOB_YS3604_UC, + DEV_MODEL_FLEX_FOB_YS3614_EC, + DEV_MODEL_FLEX_FOB_YS3614_UC, +) CONF_BUTTON_1 = "button_1" CONF_BUTTON_2 = "button_2" @@ -24,7 +30,7 @@ CONF_BUTTON_4 = "button_4" CONF_SHORT_PRESS = "short_press" CONF_LONG_PRESS = "long_press" -REMOTE_TRIGGER_TYPES = { +FLEX_FOB_4_BUTTONS = { f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", @@ -35,14 +41,24 @@ REMOTE_TRIGGER_TYPES = { f"{CONF_BUTTON_4}_{CONF_LONG_PRESS}", } +FLEX_FOB_2_BUTTONS = { + f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", + f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", + f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", + f"{CONF_BUTTON_2}_{CONF_LONG_PRESS}", +} + TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( - {vol.Required(CONF_TYPE): vol.In(REMOTE_TRIGGER_TYPES)} + {vol.Required(CONF_TYPE): vol.In(FLEX_FOB_4_BUTTONS)} ) -# YoLink Remotes YS3604/YS3605/YS3606/YS3607 -DEVICE_TRIGGER_TYPES: dict[str, set[str]] = { - ATTR_DEVICE_SMART_REMOTER: REMOTE_TRIGGER_TYPES, +# YoLink Remotes YS3604/YS3614 +FLEX_FOB_TRIGGER_TYPES: dict[str, set[str]] = { + DEV_MODEL_FLEX_FOB_YS3604_EC: FLEX_FOB_4_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3604_UC: FLEX_FOB_4_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3614_UC: FLEX_FOB_2_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3614_EC: FLEX_FOB_2_BUTTONS, } @@ -54,7 +70,8 @@ async def async_get_triggers( registry_device = device_registry.async_get(device_id) if not registry_device or registry_device.model != ATTR_DEVICE_SMART_REMOTER: return [] - + if registry_device.model_id not in list(FLEX_FOB_TRIGGER_TYPES.keys()): + return [] return [ { CONF_DEVICE_ID: device_id, @@ -62,7 +79,7 @@ async def async_get_triggers( CONF_PLATFORM: "device", CONF_TYPE: trigger, } - for trigger in DEVICE_TRIGGER_TYPES[ATTR_DEVICE_SMART_REMOTER] + for trigger in FLEX_FOB_TRIGGER_TYPES[registry_device.model_id] ] diff --git a/homeassistant/components/yolink/entity.py b/homeassistant/components/yolink/entity.py index d9ca2968493..0f500b72404 100644 --- a/homeassistant/components/yolink/entity.py +++ b/homeassistant/components/yolink/entity.py @@ -55,6 +55,7 @@ class YoLinkEntity(CoordinatorEntity[YoLinkCoordinator]): identifiers={(DOMAIN, self.coordinator.device.device_id)}, manufacturer=MANUFACTURER, model=self.coordinator.device.device_type, + model_id=self.coordinator.device.device_model_name, name=self.coordinator.device.device_name, ) diff --git a/homeassistant/components/yolink/icons.json b/homeassistant/components/yolink/icons.json index ee9037c864a..c58d219a2e0 100644 --- a/homeassistant/components/yolink/icons.json +++ b/homeassistant/components/yolink/icons.json @@ -17,6 +17,9 @@ }, "power_failure_alarm_beep": { "default": "mdi:bullhorn" + }, + "water_meter_reading": { + "default": "mdi:gauge" } }, "switch": { @@ -26,6 +29,8 @@ } }, "services": { - "play_on_speaker_hub": "mdi:speaker" + "play_on_speaker_hub": { + "service": "mdi:speaker" + } } } diff --git a/homeassistant/components/yolink/lock.py b/homeassistant/components/yolink/lock.py index 177a8808de1..d675fd8cf06 100644 --- a/homeassistant/components/yolink/lock.py +++ b/homeassistant/components/yolink/lock.py @@ -1,11 +1,11 @@ -"""YoLink Lock.""" +"""YoLink Lock V1/V2.""" from __future__ import annotations from typing import Any from yolink.client_request import ClientRequest -from yolink.const import ATTR_DEVICE_LOCK +from yolink.const import ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2 from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry @@ -27,7 +27,8 @@ async def async_setup_entry( entities = [ YoLinkLockEntity(config_entry, device_coordinator) for device_coordinator in device_coordinators.values() - if device_coordinator.device.device_type == ATTR_DEVICE_LOCK + if device_coordinator.device.device_type + in [ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2] ] async_add_entities(entities) @@ -50,21 +51,41 @@ class YoLinkLockEntity(YoLinkEntity, LockEntity): def update_entity_state(self, state: dict[str, Any]) -> None: """Update HA Entity State.""" state_value = state.get("state") - self._attr_is_locked = ( - state_value == "locked" if state_value is not None else None - ) + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: + self._attr_is_locked = ( + state_value["lock"] == "locked" if state_value is not None else None + ) + else: + self._attr_is_locked = ( + state_value == "locked" if state_value is not None else None + ) self.async_write_ha_state() async def call_lock_state_change(self, state: str) -> None: """Call setState api to change lock state.""" - await self.call_device(ClientRequest("setState", {"state": state})) + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: + await self.call_device( + ClientRequest("setState", {"state": {"lock": state}}) + ) + else: + await self.call_device(ClientRequest("setState", {"state": state})) self._attr_is_locked = state == "lock" self.async_write_ha_state() async def async_lock(self, **kwargs: Any) -> None: """Lock device.""" - await self.call_lock_state_change("lock") + state_param = ( + "locked" + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 + else "lock" + ) + await self.call_lock_state_change(state_param) async def async_unlock(self, **kwargs: Any) -> None: """Unlock device.""" - await self.call_lock_state_change("unlock") + state_param = ( + "unlocked" + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 + else "unlock" + ) + await self.call_lock_state_change(state_param) diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 77bbccb2f6a..8f263cdae07 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -40,7 +40,9 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfEnergy, UnitOfLength, + UnitOfPower, UnitOfTemperature, UnitOfVolume, ) @@ -49,8 +51,14 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage from .const import ( + DEV_MODEL_PLUG_YS6602_EC, + DEV_MODEL_PLUG_YS6602_UC, + DEV_MODEL_PLUG_YS6803_EC, + DEV_MODEL_PLUG_YS6803_UC, DEV_MODEL_TH_SENSOR_YS8004_EC, DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8008_EC, + DEV_MODEL_TH_SENSOR_YS8008_UC, DEV_MODEL_TH_SENSOR_YS8014_EC, DEV_MODEL_TH_SENSOR_YS8014_UC, DEV_MODEL_TH_SENSOR_YS8017_EC, @@ -119,12 +127,21 @@ MCU_DEV_TEMPERATURE_SENSOR = [ NONE_HUMIDITY_SENSOR_MODELS = [ DEV_MODEL_TH_SENSOR_YS8004_EC, DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8008_EC, + DEV_MODEL_TH_SENSOR_YS8008_UC, DEV_MODEL_TH_SENSOR_YS8014_EC, DEV_MODEL_TH_SENSOR_YS8014_UC, DEV_MODEL_TH_SENSOR_YS8017_UC, DEV_MODEL_TH_SENSOR_YS8017_EC, ] +POWER_SUPPORT_MODELS = [ + DEV_MODEL_PLUG_YS6602_UC, + DEV_MODEL_PLUG_YS6602_EC, + DEV_MODEL_PLUG_YS6803_UC, + DEV_MODEL_PLUG_YS6803_EC, +] + def cvt_battery(val: int | None) -> int | None: """Convert battery to percentage.""" @@ -158,8 +175,10 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.HUMIDITY, native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, - exists_fn=lambda device: device.device_type in [ATTR_DEVICE_TH_SENSOR] - and device.device_model_name not in NONE_HUMIDITY_SENSOR_MODELS, + exists_fn=lambda device: ( + device.device_type in [ATTR_DEVICE_TH_SENSOR] + and device.device_model_name not in NONE_HUMIDITY_SENSOR_MODELS + ), ), YoLinkSensorEntityDescription( key="temperature", @@ -228,12 +247,32 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( key="meter_reading", translation_key="water_meter_reading", device_class=SensorDeviceClass.WATER, - icon="mdi:gauge", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, state_class=SensorStateClass.TOTAL_INCREASING, should_update_entity=lambda value: value is not None, - exists_fn=lambda device: device.device_type - in ATTR_DEVICE_WATER_METER_CONTROLLER, + exists_fn=lambda device: ( + device.device_type in ATTR_DEVICE_WATER_METER_CONTROLLER + ), + ), + YoLinkSensorEntityDescription( + key="power", + translation_key="current_power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + should_update_entity=lambda value: value is not None, + exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, + value=lambda value: value / 10 if value is not None else None, + ), + YoLinkSensorEntityDescription( + key="watt", + translation_key="power_consumption", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + state_class=SensorStateClass.TOTAL, + should_update_entity=lambda value: value is not None, + exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, + value=lambda value: value / 100 if value is not None else None, ), ) diff --git a/homeassistant/components/yolink/strings.json b/homeassistant/components/yolink/strings.json index bc8fb435e76..2f9a9454502 100644 --- a/homeassistant/components/yolink/strings.json +++ b/homeassistant/components/yolink/strings.json @@ -19,7 +19,8 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" @@ -51,6 +52,12 @@ "plug_4": { "name": "Plug 4" } }, "sensor": { + "current_power": { + "name": "Current power" + }, + "power_consumption": { + "name": "Power consumption" + }, "power_failure_alarm": { "name": "Power failure alarm", "state": { diff --git a/homeassistant/components/youless/__init__.py b/homeassistant/components/youless/__init__.py index a968d052922..d475034cc9d 100644 --- a/homeassistant/components/youless/__init__.py +++ b/homeassistant/components/youless/__init__.py @@ -36,6 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="youless_gateway", update_method=async_update_data, update_interval=timedelta(seconds=10), diff --git a/homeassistant/components/youtube/config_flow.py b/homeassistant/components/youtube/config_flow.py index 32b37b93eb2..48336422585 100644 --- a/homeassistant/components/youtube/config_flow.py +++ b/homeassistant/components/youtube/config_flow.py @@ -12,9 +12,10 @@ from youtubeaio.types import AuthScope, ForbiddenError from youtubeaio.youtube import YouTube from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import callback @@ -45,7 +46,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None _youtube: YouTube | None = None @staticmethod @@ -54,7 +54,7 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> YouTubeOptionsFlowHandler: """Get the options flow for this handler.""" - return YouTubeOptionsFlowHandler(config_entry) + return YouTubeOptionsFlowHandler() @property def logger(self) -> logging.Logger: @@ -75,9 +75,6 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -117,22 +114,19 @@ class OAuth2FlowHandler( self._title = own_channel.snippet.title self._data = data - if not self.reauth_entry: - await self.async_set_unique_id(own_channel.channel_id) + await self.async_set_unique_id(own_channel.channel_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return await self.async_step_channels() - if self.reauth_entry.unique_id == own_channel.channel_id: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( + self._abort_if_unique_id_mismatch( reason="wrong_account", description_placeholders={"title": self._title}, ) + return self.async_update_reload_and_abort(self._get_reauth_entry(), data=data) + async def async_step_channels( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -165,7 +159,7 @@ class OAuth2FlowHandler( ) -class YouTubeOptionsFlowHandler(OptionsFlowWithConfigEntry): +class YouTubeOptionsFlowHandler(OptionsFlow): """YouTube Options flow handler.""" async def async_step_init( @@ -200,6 +194,6 @@ class YouTubeOptionsFlowHandler(OptionsFlowWithConfigEntry): ), } ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/youtube/const.py b/homeassistant/components/youtube/const.py index a663c487d0a..da5a554f364 100644 --- a/homeassistant/components/youtube/const.py +++ b/homeassistant/components/youtube/const.py @@ -15,6 +15,7 @@ AUTH = "auth" LOGGER = logging.getLogger(__package__) ATTR_TITLE = "title" +ATTR_TOTAL_VIEWS = "total_views" ATTR_LATEST_VIDEO = "latest_video" ATTR_SUBSCRIBER_COUNT = "subscriber_count" ATTR_DESCRIPTION = "description" diff --git a/homeassistant/components/youtube/coordinator.py b/homeassistant/components/youtube/coordinator.py index 4599342c84d..0da480f1169 100644 --- a/homeassistant/components/youtube/coordinator.py +++ b/homeassistant/components/youtube/coordinator.py @@ -22,6 +22,7 @@ from .const import ( ATTR_SUBSCRIBER_COUNT, ATTR_THUMBNAIL, ATTR_TITLE, + ATTR_TOTAL_VIEWS, ATTR_VIDEO_ID, CONF_CHANNELS, DOMAIN, @@ -68,6 +69,7 @@ class YouTubeDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ATTR_ICON: channel.snippet.thumbnails.get_highest_quality().url, ATTR_LATEST_VIDEO: latest_video, ATTR_SUBSCRIBER_COUNT: channel.statistics.subscriber_count, + ATTR_TOTAL_VIEWS: channel.statistics.view_count, } except UnauthorizedError as err: raise ConfigEntryAuthFailed from err diff --git a/homeassistant/components/youtube/sensor.py b/homeassistant/components/youtube/sensor.py index bc69f92e8fd..8832382508c 100644 --- a/homeassistant/components/youtube/sensor.py +++ b/homeassistant/components/youtube/sensor.py @@ -20,6 +20,7 @@ from .const import ( ATTR_SUBSCRIBER_COUNT, ATTR_THUMBNAIL, ATTR_TITLE, + ATTR_TOTAL_VIEWS, ATTR_VIDEO_ID, COORDINATOR, DOMAIN, @@ -58,6 +59,15 @@ SENSOR_TYPES = [ entity_picture_fn=lambda channel: channel[ATTR_ICON], attributes_fn=None, ), + YouTubeSensorEntityDescription( + key="views", + translation_key="views", + native_unit_of_measurement="views", + available_fn=lambda _: True, + value_fn=lambda channel: channel[ATTR_TOTAL_VIEWS], + entity_picture_fn=lambda channel: channel[ATTR_ICON], + attributes_fn=None, + ), ] diff --git a/homeassistant/components/youtube/strings.json b/homeassistant/components/youtube/strings.json index d664e2f15e7..78ca0532459 100644 --- a/homeassistant/components/youtube/strings.json +++ b/homeassistant/components/youtube/strings.json @@ -10,7 +10,8 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "wrong_account": "Wrong account: please authenticate with the right account." }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", @@ -46,7 +47,8 @@ "published_at": { "name": "Published at" } } }, - "subscribers": { "name": "Subscribers" } + "subscribers": { "name": "Subscribers" }, + "views": { "name": "Views" } } } } diff --git a/homeassistant/components/zabbix/__init__.py b/homeassistant/components/zabbix/__init__.py index 851af54da32..d9bab3e6fe4 100644 --- a/homeassistant/components/zabbix/__init__.py +++ b/homeassistant/components/zabbix/__init__.py @@ -34,13 +34,14 @@ from homeassistant.helpers.entityfilter import ( ) from homeassistant.helpers.typing import ConfigType +from .const import DOMAIN + _LOGGER = logging.getLogger(__name__) CONF_PUBLISH_STATES_HOST = "publish_states_host" DEFAULT_SSL = False DEFAULT_PATH = "zabbix" -DOMAIN = "zabbix" TIMEOUT = 5 RETRY_DELAY = 20 @@ -84,7 +85,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: zapi = ZabbixAPI(url=url, user=username, password=password) - _LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version()) + _LOGGER.debug("Connected to Zabbix API Version %s", zapi.api_version()) except ZabbixAPIException as login_exception: _LOGGER.error("Unable to login to the Zabbix API: %s", login_exception) return False diff --git a/homeassistant/components/zabbix/const.py b/homeassistant/components/zabbix/const.py new file mode 100644 index 00000000000..5f710381f38 --- /dev/null +++ b/homeassistant/components/zabbix/const.py @@ -0,0 +1,3 @@ +"""Constants for Zabbix.""" + +DOMAIN = "zabbix" diff --git a/homeassistant/components/zabbix/manifest.json b/homeassistant/components/zabbix/manifest.json index d1823051636..9c7171bea46 100644 --- a/homeassistant/components/zabbix/manifest.json +++ b/homeassistant/components/zabbix/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zabbix", "iot_class": "local_polling", "loggers": ["pyzabbix"], + "quality_scale": "legacy", "requirements": ["py-zabbix==1.1.7"] } diff --git a/homeassistant/components/zabbix/sensor.py b/homeassistant/components/zabbix/sensor.py index 2187deb22e8..f5d96f106cb 100644 --- a/homeassistant/components/zabbix/sensor.py +++ b/homeassistant/components/zabbix/sensor.py @@ -19,7 +19,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from .. import zabbix +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -52,11 +52,11 @@ def setup_platform( """Set up the Zabbix sensor platform.""" sensors: list[ZabbixTriggerCountSensor] = [] - if not (zapi := hass.data[zabbix.DOMAIN]): + if not (zapi := hass.data[DOMAIN]): _LOGGER.error("Zabbix integration hasn't been loaded? zapi is None") return - _LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version()) + _LOGGER.debug("Connected to Zabbix API Version %s", zapi.api_version()) # The following code seems overly complex. Need to think about this... if trigger_conf := config.get(_CONF_TRIGGERS): diff --git a/homeassistant/components/zengge/manifest.json b/homeassistant/components/zengge/manifest.json index 5a4525079da..03d989c5f3b 100644 --- a/homeassistant/components/zengge/manifest.json +++ b/homeassistant/components/zengge/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zengge", "iot_class": "local_polling", "loggers": ["zengge"], + "quality_scale": "legacy", "requirements": ["bluepy==1.3.0", "zengge==0.2"] } diff --git a/homeassistant/components/zeroconf/__init__.py b/homeassistant/components/zeroconf/__init__.py index bbc89e77a76..449c2ccef91 100644 --- a/homeassistant/components/zeroconf/__init__.py +++ b/homeassistant/components/zeroconf/__init__.py @@ -33,6 +33,8 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.data_entry_flow import BaseServiceInfo from homeassistant.helpers import discovery_flow, instance_id import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.discovery_flow import DiscoveryKey +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.typing import ConfigType from homeassistant.loader import ( @@ -379,11 +381,31 @@ class ZeroconfDiscovery: self.zeroconf, types, handlers=[self.async_service_update] ) + async_dispatcher_connect( + self.hass, + config_entries.signal_discovered_config_entry_removed(DOMAIN), + self._handle_config_entry_removed, + ) + async def async_stop(self) -> None: """Cancel the service browser and stop processing the queue.""" if self.async_service_browser: await self.async_service_browser.async_cancel() + @callback + def _handle_config_entry_removed( + self, + entry: config_entries.ConfigEntry, + ) -> None: + """Handle config entry changes.""" + for discovery_key in entry.discovery_keys[DOMAIN]: + if discovery_key.version != 1: + continue + _type = discovery_key.key[0] + name = discovery_key.key[1] + _LOGGER.debug("Rediscover service %s.%s", _type, name) + self._async_service_update(self.zeroconf, _type, name) + def _async_dismiss_discoveries(self, name: str) -> None: """Dismiss all discoveries for the given name.""" for flow in self.hass.config_entries.flow.async_progress_by_init_data_type( @@ -408,10 +430,20 @@ class ZeroconfDiscovery: state_change, ) - if state_change == ServiceStateChange.Removed: + if state_change is ServiceStateChange.Removed: self._async_dismiss_discoveries(name) return + self._async_service_update(zeroconf, service_type, name) + + @callback + def _async_service_update( + self, + zeroconf: HaZeroconf, + service_type: str, + name: str, + ) -> None: + """Service state added or changed.""" try: async_service_info = AsyncServiceInfo(service_type, name) except BadTypeInNameException as ex: @@ -453,6 +485,11 @@ class ZeroconfDiscovery: return _LOGGER.debug("Discovered new device %s %s", name, info) props: dict[str, str | None] = info.properties + discovery_key = DiscoveryKey( + domain=DOMAIN, + key=(info.type, info.name), + version=1, + ) domain = None # If we can handle it as a HomeKit discovery, we do that here. @@ -467,6 +504,7 @@ class ZeroconfDiscovery: homekit_discovery.domain, {"source": config_entries.SOURCE_HOMEKIT}, info, + discovery_key=discovery_key, ) # Continue on here as homekit_controller # still needs to get updates on devices @@ -502,7 +540,9 @@ class ZeroconfDiscovery: continue matcher_domain = matcher[ATTR_DOMAIN] - context = { + # Create a type annotated regular dict since this is a hot path and creating + # a regular dict is slightly cheaper than calling ConfigFlowContext + context: config_entries.ConfigFlowContext = { "source": config_entries.SOURCE_ZEROCONF, } if domain: @@ -515,6 +555,7 @@ class ZeroconfDiscovery: matcher_domain, context, info, + discovery_key=discovery_key, ) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 0a76af3b9c2..9ad92bb4bc7 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.132.2"] + "requirements": ["zeroconf==0.136.2"] } diff --git a/homeassistant/components/zeroconf/usage.py b/homeassistant/components/zeroconf/usage.py index b9d51cd3c36..8ddfdbd592d 100644 --- a/homeassistant/components/zeroconf/usage.py +++ b/homeassistant/components/zeroconf/usage.py @@ -4,7 +4,7 @@ from typing import Any import zeroconf -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import ReportBehavior, report_usage from .models import HaZeroconf @@ -16,14 +16,14 @@ def install_multiple_zeroconf_catcher(hass_zc: HaZeroconf) -> None: """ def new_zeroconf_new(self: zeroconf.Zeroconf, *k: Any, **kw: Any) -> HaZeroconf: - report( + report_usage( ( "attempted to create another Zeroconf instance. Please use the shared" " Zeroconf via await" " homeassistant.components.zeroconf.async_get_instance(hass)" ), exclude_integrations={"zeroconf"}, - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) return hass_zc diff --git a/homeassistant/components/zerproc/light.py b/homeassistant/components/zerproc/light.py index 71bb38dd80f..ed6ed03ad27 100644 --- a/homeassistant/components/zerproc/light.py +++ b/homeassistant/components/zerproc/light.py @@ -147,7 +147,7 @@ class ZerprocLight(LightEntity): self._attr_available = False return if not self.available: - _LOGGER.info("Reconnected to %s", self._light.address) + _LOGGER.warning("Reconnected to %s", self._light.address) self._attr_available = True self._attr_is_on = state.is_on hsv = color_util.color_RGB_to_hsv(*state.color) diff --git a/homeassistant/components/zestimate/manifest.json b/homeassistant/components/zestimate/manifest.json index a881adf503d..a787a9b1099 100644 --- a/homeassistant/components/zestimate/manifest.json +++ b/homeassistant/components/zestimate/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/zestimate", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["xmltodict==0.13.0"] } diff --git a/homeassistant/components/zeversolar/manifest.json b/homeassistant/components/zeversolar/manifest.json index af197b3aa7c..18bab34c04e 100644 --- a/homeassistant/components/zeversolar/manifest.json +++ b/homeassistant/components/zeversolar/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/zeversolar", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["zeversolar==0.3.1"] + "requirements": ["zeversolar==0.3.2"] } diff --git a/homeassistant/components/zha/alarm_control_panel.py b/homeassistant/components/zha/alarm_control_panel.py index c54d7c7ab2d..734683e5497 100644 --- a/homeassistant/components/zha/alarm_control_panel.py +++ b/homeassistant/components/zha/alarm_control_panel.py @@ -4,9 +4,14 @@ from __future__ import annotations import functools +from zha.application.platforms.alarm_control_panel.const import ( + AlarmState as ZHAAlarmState, +) + from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry @@ -23,6 +28,20 @@ from .helpers import ( get_zha_data, ) +ZHA_STATE_TO_ALARM_STATE_MAP = { + ZHAAlarmState.DISARMED.value: AlarmControlPanelState.DISARMED, + ZHAAlarmState.ARMED_HOME.value: AlarmControlPanelState.ARMED_HOME, + ZHAAlarmState.ARMED_AWAY.value: AlarmControlPanelState.ARMED_AWAY, + ZHAAlarmState.ARMED_NIGHT.value: AlarmControlPanelState.ARMED_NIGHT, + ZHAAlarmState.ARMED_VACATION.value: AlarmControlPanelState.ARMED_VACATION, + ZHAAlarmState.ARMED_CUSTOM_BYPASS.value: AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ZHAAlarmState.PENDING.value: AlarmControlPanelState.PENDING, + ZHAAlarmState.ARMING.value: AlarmControlPanelState.ARMING, + ZHAAlarmState.DISARMING.value: AlarmControlPanelState.DISARMING, + ZHAAlarmState.TRIGGERED.value: AlarmControlPanelState.TRIGGERED, + ZHAAlarmState.UNKNOWN.value: None, +} + async def async_setup_entry( hass: HomeAssistant, @@ -94,6 +113,6 @@ class ZHAAlarmControlPanel(ZHAEntity, AlarmControlPanelEntity): self.async_write_ha_state() @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the entity.""" - return self.entity_data.entity.state["state"] + return ZHA_STATE_TO_ALARM_STATE_MAP.get(self.entity_data.entity.state["state"]) diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index f4fb58c254a..af9f56cd7dc 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -88,7 +88,6 @@ class Thermostat(ZHAEntity, ClimateEntity): _attr_precision = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key: str = "thermostat" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: """Initialize the ZHA thermostat entity.""" @@ -120,8 +119,6 @@ class Thermostat(ZHAEntity, ClimateEntity): features |= ClimateEntityFeature.FAN_MODE if ZHAClimateEntityFeature.SWING_MODE in zha_features: features |= ClimateEntityFeature.SWING_MODE - if ZHAClimateEntityFeature.AUX_HEAT in zha_features: - features |= ClimateEntityFeature.AUX_HEAT if ZHAClimateEntityFeature.TURN_OFF in zha_features: features |= ClimateEntityFeature.TURN_OFF if ZHAClimateEntityFeature.TURN_ON in zha_features: diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 3a7b54652d9..9c515c315b7 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -33,6 +33,7 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.selector import FileSelector, FileSelectorConfig from homeassistant.util import dt as dt_util @@ -69,8 +70,17 @@ UPLOADED_BACKUP_FILE = "uploaded_backup_file" REPAIR_MY_URL = "https://my.home-assistant.io/redirect/repairs/" -DEFAULT_ZHA_ZEROCONF_PORT = 6638 -ESPHOME_API_PORT = 6053 +LEGACY_ZEROCONF_PORT = 6638 +LEGACY_ZEROCONF_ESPHOME_API_PORT = 6053 + +ZEROCONF_SERVICE_TYPE = "_zigbee-coordinator._tcp.local." +ZEROCONF_PROPERTIES_SCHEMA = vol.Schema( + { + vol.Required("radio_type"): vol.All(str, vol.In([t.name for t in RadioType])), + vol.Required("serial_number"): str, + }, + extra=vol.ALLOW_EXTRA, +) def _format_backup_choice( @@ -104,25 +114,26 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]: yellow_radio.description = "Yellow Zigbee module" yellow_radio.manufacturer = "Nabu Casa" - # Present the multi-PAN addon as a setup option, if it's available - multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( - hass - ) - - try: - addon_info = await multipan_manager.async_get_addon_info() - except (AddonError, KeyError): - addon_info = None - - if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED: - addon_port = ListPortInfo( - device=silabs_multiprotocol_addon.get_zigbee_socket(), - skip_link_detection=True, + if is_hassio(hass): + # Present the multi-PAN addon as a setup option, if it's available + multipan_manager = ( + await silabs_multiprotocol_addon.get_multiprotocol_addon_manager(hass) ) - addon_port.description = "Multiprotocol add-on" - addon_port.manufacturer = "Nabu Casa" - ports.append(addon_port) + try: + addon_info = await multipan_manager.async_get_addon_info() + except (AddonError, KeyError): + addon_info = None + + if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED: + addon_port = ListPortInfo( + device=silabs_multiprotocol_addon.get_zigbee_socket(), + skip_link_detection=True, + ) + + addon_port.description = "Multiprotocol add-on" + addon_port.manufacturer = "Nabu Casa" + ports.append(addon_port) return ports @@ -131,6 +142,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow): """Mixin for common ZHA flow steps and forms.""" _hass: HomeAssistant + _title: str def __init__(self) -> None: """Initialize flow instance.""" @@ -138,7 +150,6 @@ class BaseZhaFlow(ConfigEntryBaseFlow): self._hass = None # type: ignore[assignment] self._radio_mgr = ZhaRadioManager() - self._title: str | None = None @property def hass(self) -> HomeAssistant: @@ -153,7 +164,6 @@ class BaseZhaFlow(ConfigEntryBaseFlow): async def _async_create_radio_entry(self) -> ConfigFlowResult: """Create a config entry with the current flow state.""" - assert self._title is not None assert self._radio_mgr.radio_type is not None assert self._radio_mgr.device_path is not None assert self._radio_mgr.device_settings is not None @@ -616,34 +626,65 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle zeroconf discovery.""" - # Hostname is format: livingroom.local. - local_name = discovery_info.hostname[:-1] - port = discovery_info.port or DEFAULT_ZHA_ZEROCONF_PORT + # Transform legacy zeroconf discovery into the new format + if discovery_info.type != ZEROCONF_SERVICE_TYPE: + port = discovery_info.port or LEGACY_ZEROCONF_PORT + name = discovery_info.name - # Fix incorrect port for older TubesZB devices - if "tube" in local_name and port == ESPHOME_API_PORT: - port = DEFAULT_ZHA_ZEROCONF_PORT + # Fix incorrect port for older TubesZB devices + if "tube" in name and port == LEGACY_ZEROCONF_ESPHOME_API_PORT: + port = LEGACY_ZEROCONF_PORT - if "radio_type" in discovery_info.properties: - self._radio_mgr.radio_type = self._radio_mgr.parse_radio_type( - discovery_info.properties["radio_type"] + # Determine the radio type + if "radio_type" in discovery_info.properties: + radio_type = discovery_info.properties["radio_type"] + elif "efr32" in name: + radio_type = RadioType.ezsp.name + elif "zigate" in name: + radio_type = RadioType.zigate.name + else: + radio_type = RadioType.znp.name + + fallback_title = name.split("._", 1)[0] + title = discovery_info.properties.get("name", fallback_title) + + discovery_info = zeroconf.ZeroconfServiceInfo( + ip_address=discovery_info.ip_address, + ip_addresses=discovery_info.ip_addresses, + port=port, + hostname=discovery_info.hostname, + type=ZEROCONF_SERVICE_TYPE, + name=f"{title}.{ZEROCONF_SERVICE_TYPE}", + properties={ + "radio_type": radio_type, + # To maintain backwards compatibility + "serial_number": discovery_info.hostname.removesuffix(".local."), + }, ) - elif "efr32" in local_name: - self._radio_mgr.radio_type = RadioType.ezsp - else: - self._radio_mgr.radio_type = RadioType.znp - node_name = local_name.removesuffix(".local") - device_path = f"socket://{discovery_info.host}:{port}" + try: + discovery_props = ZEROCONF_PROPERTIES_SCHEMA(discovery_info.properties) + except vol.Invalid: + return self.async_abort(reason="invalid_zeroconf_data") + + radio_type = self._radio_mgr.parse_radio_type(discovery_props["radio_type"]) + device_path = f"socket://{discovery_info.host}:{discovery_info.port}" + title = discovery_info.name.removesuffix(f".{ZEROCONF_SERVICE_TYPE}") await self._set_unique_id_and_update_ignored_flow( - unique_id=node_name, + unique_id=discovery_props["serial_number"], device_path=device_path, ) - self.context["title_placeholders"] = {CONF_NAME: node_name} - self._title = device_path + self.context["title_placeholders"] = {CONF_NAME: title} + self._title = title self._radio_mgr.device_path = device_path + self._radio_mgr.radio_type = radio_type + self._radio_mgr.device_settings = { + CONF_DEVICE_PATH: device_path, + CONF_BAUDRATE: 115200, + CONF_FLOW_CONTROL: None, + } return await self.async_step_confirm() @@ -681,8 +722,6 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" super().__init__() - self.config_entry = config_entry - self._radio_mgr.device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] self._radio_mgr.device_settings = config_entry.data[CONF_DEVICE] self._radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]] diff --git a/homeassistant/components/zha/const.py b/homeassistant/components/zha/const.py index 3986a99cf3f..270a3d3fb66 100644 --- a/homeassistant/components/zha/const.py +++ b/homeassistant/components/zha/const.py @@ -43,12 +43,12 @@ CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" -CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" +CONF_ENABLE_MAINS_STARTUP_POLLING = "enable_mains_startup_polling" CONF_ZIGPY = "zigpy_config" CONF_DEVICE_CONFIG = "device_config" diff --git a/homeassistant/components/zha/device_tracker.py b/homeassistant/components/zha/device_tracker.py index 247219777f4..fc374f6c44d 100644 --- a/homeassistant/components/zha/device_tracker.py +++ b/homeassistant/components/zha/device_tracker.py @@ -4,7 +4,7 @@ from __future__ import annotations import functools -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import ScannerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -53,11 +53,6 @@ class ZHADeviceScannerEntity(ScannerEntity, ZHAEntity): """Return true if the device is connected to the network.""" return self.entity_data.entity.is_connected - @property - def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" - return SourceType.ROUTER - @property def battery_level(self) -> int | None: """Return the battery level of the device. diff --git a/homeassistant/components/zha/device_trigger.py b/homeassistant/components/zha/device_trigger.py index a134d2aa59b..8e8509e62a5 100644 --- a/homeassistant/components/zha/device_trigger.py +++ b/homeassistant/components/zha/device_trigger.py @@ -3,8 +3,8 @@ import voluptuous as vol from zha.application.const import ZHA_EVENT -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index ad73978d24d..234f10d59ae 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -23,7 +23,7 @@ from zigpy.profiles import PROFILES from zigpy.types import Channels from zigpy.zcl import Cluster -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, CONF_NAME, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index 348e545f1c4..3e3d0642ca2 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -4,10 +4,11 @@ from __future__ import annotations import asyncio from collections.abc import Callable -import functools +from functools import partial import logging from typing import Any +from propcache import cached_property from zha.mixins import LogMixin from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME, EntityCategory @@ -16,6 +17,7 @@ from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE, DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.typing import UNDEFINED, UndefinedType from .const import DOMAIN from .helpers import SIGNAL_REMOVE_ENTITIES, EntityData, convert_zha_error_to_ha_error @@ -43,15 +45,6 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity): meta = self.entity_data.entity.info_object self._attr_unique_id = meta.unique_id - if meta.translation_key is not None: - self._attr_translation_key = meta.translation_key - elif meta.fallback_name is not None: - # Only custom quirks will create entities with just a fallback name! - # - # This is to allow local development and to register niche devices, since - # their translation_key will probably never be added to `zha/strings.json`. - self._attr_name = meta.fallback_name - if meta.entity_category is not None: self._attr_entity_category = EntityCategory(meta.entity_category) @@ -59,6 +52,23 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity): meta.entity_registry_enabled_default ) + if meta.translation_key is not None: + self._attr_translation_key = meta.translation_key + + @cached_property + def name(self) -> str | UndefinedType | None: + """Return the name of the entity.""" + meta = self.entity_data.entity.info_object + original_name = super().name + + if original_name not in (UNDEFINED, None) or meta.fallback_name is None: + return original_name + + # This is to allow local development and to register niche devices, since + # their translation_key will probably never be added to `zha/strings.json`. + self._attr_name = meta.fallback_name + return super().name + @property def available(self) -> bool: """Return entity availability.""" @@ -102,7 +112,7 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity): async_dispatcher_connect( self.hass, remove_signal, - functools.partial(self.async_remove, force_remove=True), + partial(self.async_remove, force_remove=True), ) ) self.entity_data.device_proxy.gateway_proxy.register_entity_reference( diff --git a/homeassistant/components/zha/fan.py b/homeassistant/components/zha/fan.py index 767c0d4cfb7..73b23e97387 100644 --- a/homeassistant/components/zha/fan.py +++ b/homeassistant/components/zha/fan.py @@ -47,7 +47,6 @@ class ZhaFan(FanEntity, ZHAEntity): """Representation of a ZHA fan.""" _attr_translation_key: str = "fan" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, entity_data: EntityData) -> None: """Initialize the ZHA fan.""" diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index a5446af7e76..2440e18cf53 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -14,7 +14,7 @@ import logging import re import time from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast +from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, cast from zoneinfo import ZoneInfo import voluptuous as vol @@ -104,7 +104,7 @@ from homeassistant.const import ( ATTR_NAME, Platform, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( config_validation as cv, @@ -140,7 +140,6 @@ from .const import ( CONF_ALARM_ARM_REQUIRES_CODE, CONF_ALARM_FAILED_TRIES, CONF_ALARM_MASTER_CODE, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, CONF_BAUDRATE, CONF_CONSIDER_UNAVAILABLE_BATTERY, CONF_CONSIDER_UNAVAILABLE_MAINS, @@ -150,6 +149,7 @@ from .const import ( CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, CONF_ENABLE_IDENTIFY_ON_JOIN, CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, + CONF_ENABLE_MAINS_STARTUP_POLLING, CONF_ENABLE_QUIRKS, CONF_FLOW_CONTROL, CONF_GROUP_MEMBERS_ASSUME_STATE, @@ -170,10 +170,7 @@ if TYPE_CHECKING: from .entity import ZHAEntity from .update import ZHAFirmwareUpdateCoordinator - _LogFilterType = Filter | Callable[[LogRecord], bool] - -_P = ParamSpec("_P") -_EntityT = TypeVar("_EntityT", bound="ZHAEntity") + type _LogFilterType = Filter | Callable[[LogRecord], bool] _LOGGER = logging.getLogger(__name__) @@ -498,7 +495,7 @@ class ZHAGatewayProxy(EventBase): self.hass = hass self.config_entry = config_entry self.gateway = gateway - self.device_proxies: dict[str, ZHADeviceProxy] = {} + self.device_proxies: dict[EUI64, ZHADeviceProxy] = {} self.group_proxies: dict[int, ZHAGroupProxy] = {} self._ha_entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( collections.defaultdict(list) @@ -512,6 +509,12 @@ class ZHAGatewayProxy(EventBase): self._unsubs: list[Callable[[], None]] = [] self._unsubs.append(self.gateway.on_all_events(self._handle_event_protocol)) self._reload_task: asyncio.Task | None = None + config_entry.async_on_unload( + self.hass.bus.async_listen( + er.EVENT_ENTITY_REGISTRY_UPDATED, + self._handle_entity_registry_updated, + ) + ) @property def ha_entity_refs(self) -> collections.defaultdict[EUI64, list[EntityReference]]: @@ -535,6 +538,46 @@ class ZHAGatewayProxy(EventBase): ) ) + async def _handle_entity_registry_updated( + self, event: Event[er.EventEntityRegistryUpdatedData] + ) -> None: + """Handle when entity registry updated.""" + entity_id = event.data["entity_id"] + entity_entry: er.RegistryEntry | None = er.async_get(self.hass).async_get( + entity_id + ) + if ( + entity_entry is None + or entity_entry.config_entry_id != self.config_entry.entry_id + or entity_entry.device_id is None + ): + return + device_entry: dr.DeviceEntry | None = dr.async_get(self.hass).async_get( + entity_entry.device_id + ) + assert device_entry + + ieee_address = next( + identifier + for domain, identifier in device_entry.identifiers + if domain == DOMAIN + ) + assert ieee_address + + ieee = EUI64.convert(ieee_address) + + assert ieee in self.device_proxies + + zha_device_proxy = self.device_proxies[ieee] + entity_key = (entity_entry.domain, entity_entry.unique_id) + if entity_key not in zha_device_proxy.device.platform_entities: + return + platform_entity = zha_device_proxy.device.platform_entities[entity_key] + if entity_entry.disabled: + platform_entity.disable() + else: + platform_entity.enable() + async def async_initialize_devices_and_entities(self) -> None: """Initialize devices and entities.""" for device in self.gateway.devices.values(): @@ -620,9 +663,11 @@ class ZHAGatewayProxy(EventBase): ATTR_NWK: str(event.device_info.nwk), ATTR_IEEE: str(event.device_info.ieee), DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, - ATTR_MODEL: event.device_info.model - if event.device_info.model - else UNKNOWN_MODEL, + ATTR_MODEL: ( + event.device_info.model + if event.device_info.model + else UNKNOWN_MODEL + ), ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, ATTR_SIGNATURE: event.device_info.signature, }, @@ -802,21 +847,24 @@ class ZHAGatewayProxy(EventBase): ) def _cleanup_group_entity_registry_entries( - self, zigpy_group: zigpy.group.Group + self, zha_group_proxy: ZHAGroupProxy ) -> None: """Remove entity registry entries for group entities when the groups are removed from HA.""" # first we collect the potential unique ids for entities that could be created from this group possible_entity_unique_ids = [ - f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" + f"{domain}_zha_group_0x{zha_group_proxy.group.group_id:04x}" for domain in GROUP_ENTITY_DOMAINS ] # then we get all group entity entries tied to the coordinator entity_registry = er.async_get(self.hass) - assert self.coordinator_zha_device + assert self.gateway.coordinator_zha_device + coordinator_proxy = self.device_proxies[ + self.gateway.coordinator_zha_device.ieee + ] all_group_entity_entries = er.async_entries_for_device( entity_registry, - self.coordinator_zha_device.device_id, + coordinator_proxy.device_id, include_disabled_entities=True, ) @@ -922,9 +970,7 @@ class LogRelayHandler(logging.Handler): hass_path: str = HOMEASSISTANT_PATH[0] config_dir = self.hass.config.config_dir self.paths_re = re.compile( - r"(?:{})/(.*)".format( - "|".join([re.escape(x) for x in (hass_path, config_dir)]) - ) + rf"(?:{re.escape(hass_path)}|{re.escape(config_dir)})/(.*)" ) def emit(self, record: LogRecord) -> None: @@ -1025,9 +1071,9 @@ def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: """Convert a cluster command schema to a voluptuous schema.""" return vol.Schema( { - vol.Optional(field.name) - if field.optional - else vol.Required(field.name): schema_type_to_vol(field.type) + ( + vol.Optional(field.name) if field.optional else vol.Required(field.name) + ): schema_type_to_vol(field.type) for field in schema.fields } ) @@ -1107,7 +1153,7 @@ def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True) @callback -async def async_add_entities( +def async_add_entities( _async_add_entities: AddEntitiesCallback, entity_class: type[ZHAEntity], entities: list[EntityData], @@ -1117,7 +1163,7 @@ async def async_add_entities( if not entities: return - entities_to_add = [] + entities_to_add: list[ZHAEntity] = [] for entity_data in entities: try: entities_to_add.append(entity_class(entity_data)) @@ -1129,6 +1175,9 @@ async def async_add_entities( "Error while adding entity from entity data: %s", entity_data ) _async_add_entities(entities_to_add, update_before_add=False) + for entity in entities_to_add: + if not entity.enabled: + entity.entity_data.entity.disable() entities.clear() @@ -1152,7 +1201,6 @@ CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( ), vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, - vol.Required(CONF_ALWAYS_PREFER_XY_COLOR_MODE, default=True): cv.boolean, vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, vol.Optional( @@ -1163,7 +1211,9 @@ CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( CONF_CONSIDER_UNAVAILABLE_BATTERY, default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, ): cv.positive_int, - } + vol.Required(CONF_ENABLE_MAINS_STARTUP_POLLING, default=True): cv.boolean, + }, + extra=vol.REMOVE_EXTRA, ) CONF_ZHA_ALARM_SCHEMA = vol.Schema( @@ -1197,7 +1247,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: # deep copy the yaml config to avoid modifying the original and to safely # pass it to the ZHA library app_config = copy.deepcopy(ha_zha_data.yaml_config.get(CONF_ZIGPY, {})) - database = app_config.get( + database = ha_zha_data.yaml_config.get( CONF_DATABASE, hass.config.path(DEFAULT_DATABASE_NAME), ) @@ -1227,13 +1277,13 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: enable_light_transitioning_flag=zha_options.get( CONF_ENABLE_LIGHT_TRANSITIONING_FLAG ), - always_prefer_xy_color_mode=zha_options.get(CONF_ALWAYS_PREFER_XY_COLOR_MODE), group_members_assume_state=zha_options.get(CONF_GROUP_MEMBERS_ASSUME_STATE), ) device_options: DeviceOptions = DeviceOptions( enable_identify_on_join=zha_options.get(CONF_ENABLE_IDENTIFY_ON_JOIN), consider_unavailable_mains=zha_options.get(CONF_CONSIDER_UNAVAILABLE_MAINS), consider_unavailable_battery=zha_options.get(CONF_CONSIDER_UNAVAILABLE_BATTERY), + enable_mains_startup_polling=zha_options.get(CONF_ENABLE_MAINS_STARTUP_POLLING), ) acp_options: AlarmControlPanelOptions = AlarmControlPanelOptions( master_code=ha_acp_options.get(CONF_ALARM_MASTER_CODE), @@ -1274,7 +1324,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: ) -def convert_zha_error_to_ha_error( +def convert_zha_error_to_ha_error[**_P, _EntityT: ZHAEntity]( func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], ) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: """Decorate ZHA commands and re-raises ZHAException as HomeAssistantError.""" diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 9b060e8105a..6ba4aab18ab 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -45,6 +45,15 @@ "maximum_level": { "default": "mdi:brightness-percent" }, + "default_level_local": { + "default": "mdi:brightness-percent" + }, + "default_level_remote": { + "default": "mdi:brightness-percent" + }, + "state_after_power_restored": { + "default": "mdi:brightness-percent" + }, "auto_off_timer": { "default": "mdi:timer" }, @@ -86,6 +95,18 @@ }, "presence_detection_timeout": { "default": "mdi:timer-edit" + }, + "exercise_trigger_time": { + "default": "mdi:clock" + }, + "external_temperature_sensor": { + "default": "mdi:thermometer" + }, + "load_room_mean": { + "default": "mdi:scale-balance" + }, + "regulation_setpoint_offset": { + "default": "mdi:thermostat" } }, "select": { @@ -94,6 +115,15 @@ }, "keypad_lockout": { "default": "mdi:lock" + }, + "exercise_day_of_week": { + "default": "mdi:wrench-clock" + }, + "off_led_color": { + "default": "mdi:palette-outline" + }, + "on_led_color": { + "default": "mdi:palette" } }, "sensor": { @@ -132,6 +162,15 @@ }, "hooks_state": { "default": "mdi:hook" + }, + "open_window_detected": { + "default": "mdi:window-open" + }, + "load_estimate": { + "default": "mdi:scale-balance" + }, + "preheat_time": { + "default": "mdi:radiator" } }, "switch": { @@ -158,21 +197,63 @@ }, "hooks_locked": { "default": "mdi:lock" + }, + "external_window_sensor": { + "default": "mdi:window-open" + }, + "use_internal_window_detection": { + "default": "mdi:window-open" + }, + "prioritize_external_temperature_sensor": { + "default": "mdi:thermometer" + }, + "heat_available": { + "default": "mdi:water-boiler" + }, + "use_load_balancing": { + "default": "mdi:scale-balance" + }, + "double_up_full": { + "default": "mdi:gesture-double-tap" } } }, "services": { - "permit": "mdi:cellphone-link", - "remove": "mdi:cellphone-remove", - "reconfigure_device": "mdi:cellphone-cog", - "set_zigbee_cluster_attribute": "mdi:cog", - "issue_zigbee_cluster_command": "mdi:console", - "issue_zigbee_group_command": "mdi:console", - "warning_device_squawk": "mdi:alert", - "warning_device_warn": "mdi:alert", - "clear_lock_user_code": "mdi:lock-remove", - "enable_lock_user_code": "mdi:lock", - "disable_lock_user_code": "mdi:lock-off", - "set_lock_user_code": "mdi:lock" + "permit": { + "service": "mdi:cellphone-link" + }, + "remove": { + "service": "mdi:cellphone-remove" + }, + "reconfigure_device": { + "service": "mdi:cellphone-cog" + }, + "set_zigbee_cluster_attribute": { + "service": "mdi:cog" + }, + "issue_zigbee_cluster_command": { + "service": "mdi:console" + }, + "issue_zigbee_group_command": { + "service": "mdi:console" + }, + "warning_device_squawk": { + "service": "mdi:alert" + }, + "warning_device_warn": { + "service": "mdi:alert" + }, + "clear_lock_user_code": { + "service": "mdi:lock-remove" + }, + "enable_lock_user_code": { + "service": "mdi:lock" + }, + "disable_lock_user_code": { + "service": "mdi:lock-off" + }, + "set_lock_user_code": { + "service": "mdi:lock" + } } } diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 4a36030a0dd..2f5d9e9e4c9 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -15,10 +15,9 @@ from zha.application.platforms.light.const import ( from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, - ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, ColorMode, @@ -30,6 +29,7 @@ from homeassistant.const import STATE_ON, Platform from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .entity import ZHAEntity from .helpers import ( @@ -45,12 +45,7 @@ ZHA_TO_HA_COLOR_MODE = { ZhaColorMode.ONOFF: ColorMode.ONOFF, ZhaColorMode.BRIGHTNESS: ColorMode.BRIGHTNESS, ZhaColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP, - ZhaColorMode.HS: ColorMode.HS, ZhaColorMode.XY: ColorMode.XY, - ZhaColorMode.RGB: ColorMode.RGB, - ZhaColorMode.RGBW: ColorMode.RGBW, - ZhaColorMode.RGBWW: ColorMode.RGBWW, - ZhaColorMode.WHITE: ColorMode.WHITE, } HA_TO_ZHA_COLOR_MODE = {v: k for k, v in ZHA_TO_HA_COLOR_MODE.items()} @@ -134,19 +129,18 @@ class Light(LightEntity, ZHAEntity): return self.entity_data.entity.brightness @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self.entity_data.entity.min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self.entity_data.entity.min_mireds + ) @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self.entity_data.entity.max_mireds - - @property - def hs_color(self) -> tuple[float, float] | None: - """Return the hs color value [int, int].""" - return self.entity_data.entity.hs_color + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self.entity_data.entity.max_mireds + ) @property def xy_color(self) -> tuple[float, float] | None: @@ -154,9 +148,13 @@ class Light(LightEntity, ZHAEntity): return self.entity_data.entity.xy_color @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self.entity_data.entity.color_temp + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(mireds) + if (mireds := self.entity_data.entity.color_temp) + else None + ) @property def color_mode(self) -> ColorMode | None: @@ -178,14 +176,18 @@ class Light(LightEntity, ZHAEntity): @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" + color_temp = ( + color_util.color_temperature_kelvin_to_mired(color_temp_k) + if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) + else None + ) await self.entity_data.entity.async_turn_on( transition=kwargs.get(ATTR_TRANSITION), brightness=kwargs.get(ATTR_BRIGHTNESS), effect=kwargs.get(ATTR_EFFECT), flash=kwargs.get(ATTR_FLASH), - color_temp=kwargs.get(ATTR_COLOR_TEMP), + color_temp=color_temp, xy_color=kwargs.get(ATTR_XY_COLOR), - hs_color=kwargs.get(ATTR_HS_COLOR), ) self.async_write_ha_state() @@ -200,14 +202,18 @@ class Light(LightEntity, ZHAEntity): @callback def restore_external_state_attributes(self, state: State) -> None: """Restore entity state.""" + color_temp = ( + color_util.color_temperature_kelvin_to_mired(color_temp_k) + if (color_temp_k := state.attributes.get(ATTR_COLOR_TEMP_KELVIN)) + else None + ) self.entity_data.entity.restore_external_state_attributes( state=(state.state == STATE_ON), off_with_transition=state.attributes.get(OFF_WITH_TRANSITION), off_brightness=state.attributes.get(OFF_BRIGHTNESS), brightness=state.attributes.get(ATTR_BRIGHTNESS), - color_temp=state.attributes.get(ATTR_COLOR_TEMP), + color_temp=color_temp, xy_color=state.attributes.get(ATTR_XY_COLOR), - hs_color=state.attributes.get(ATTR_HS_COLOR), color_mode=( HA_TO_ZHA_COLOR_MODE[ColorMode(state.attributes[ATTR_COLOR_MODE])] if state.attributes.get(ATTR_COLOR_MODE) is not None diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index a5e57fcb1ec..3a301be9b02 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -1,7 +1,7 @@ { "domain": "zha", "name": "Zigbee Home Automation", - "after_dependencies": ["onboarding", "usb"], + "after_dependencies": ["hassio", "onboarding", "usb"], "codeowners": ["@dmulcahey", "@adminiuga", "@puddly", "@TheJulianJES"], "config_flow": true, "dependencies": ["file_upload"], @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.31"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.42"], "usb": [ { "vid": "10C4", @@ -130,6 +130,10 @@ { "type": "_czc._tcp.local.", "name": "czc*" + }, + { + "type": "_zigbee-coordinator._tcp.local.", + "name": "*" } ] } diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 5d81556564a..4706e204872 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -76,7 +76,8 @@ "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "not_zha_device": "This device is not a zha device", "usb_probe_failed": "Failed to probe the usb device", - "wrong_firmware_installed": "Your device is running the wrong firmware and cannot be used with ZHA until the correct firmware is installed. [A repair has been created]({repair_url}) with more information and instructions for how to fix this." + "wrong_firmware_installed": "Your device is running the wrong firmware and cannot be used with ZHA until the correct firmware is installed. [A repair has been created]({repair_url}) with more information and instructions for how to fix this.", + "invalid_zeroconf_data": "The coordinator has invalid zeroconf service info and cannot be identified by ZHA" } }, "options": { @@ -178,11 +179,11 @@ "title": "Global Options", "enhanced_light_transition": "Enable enhanced light color/temperature transition from an off-state", "light_transitioning_flag": "Enable enhanced brightness slider during light transition", - "always_prefer_xy_color_mode": "Always prefer XY color mode", "group_members_assume_state": "Group members assume state of group", "enable_identify_on_join": "Enable identify effect when devices join the network", "default_light_transition": "Default light transition time (seconds)", "consider_unavailable_mains": "Consider mains powered devices unavailable after (seconds)", + "enable_mains_startup_polling": "Refresh state for mains powered devices on startup", "consider_unavailable_battery": "Consider battery powered devices unavailable after (seconds)" }, "zha_alarm_options": { @@ -297,7 +298,7 @@ }, "reconfigure_device": { "name": "Reconfigure device", - "description": "Reconfigures a ZHA device (heal device). Use this if you are having issues with the device. If the device in question is a battery-powered device, ensure it is awake and accepting commands when you use this service.", + "description": "Reconfigures a ZHA device (heal device). Use this if you are having issues with the device. If the device in question is a battery-powered device, ensure it is awake and accepting commands when you use this action.", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", @@ -599,6 +600,12 @@ }, "self_test": { "name": "Self-test" + }, + "reset_summation_delivered": { + "name": "Reset summation delivered" + }, + "restart_device": { + "name": "Restart device" } }, "climate": { @@ -708,6 +715,15 @@ "maximum_level": { "name": "Maximum load dimming level" }, + "default_level_local": { + "name": "Local default dimming level" + }, + "default_level_remote": { + "name": "Remote default dimming level" + }, + "state_after_power_restored": { + "name": "Start-up default dimming level" + }, "auto_off_timer": { "name": "Automatic switch shutoff timer" }, @@ -767,6 +783,45 @@ }, "regulation_setpoint_offset": { "name": "Regulation setpoint offset" + }, + "irrigation_cycles": { + "name": "Irrigation cycles" + }, + "irrigation_target": { + "name": "Irrigation target" + }, + "irrigation_interval": { + "name": "Irrigation interval" + }, + "valve_countdown_1": { + "name": "Irrigation time 1" + }, + "valve_countdown_2": { + "name": "Irrigation time 2" + }, + "on_led_intensity": { + "name": "On LED intensity" + }, + "off_led_intensity": { + "name": "Off LED intensity" + }, + "frost_protection_temperature": { + "name": "Frost protection temperature" + }, + "valve_opening_degree": { + "name": "Valve opening degree" + }, + "valve_closing_degree": { + "name": "Valve closing degree" + }, + "siren_time": { + "name": "Siren time" + }, + "timer_time_left": { + "name": "Timer time left" + }, + "approach_distance": { + "name": "Approach distance" } }, "select": { @@ -818,6 +873,9 @@ "increased_non_neutral_output": { "name": "Non neutral output" }, + "leading_or_trailing_edge": { + "name": "Dimming mode" + }, "feeding_mode": { "name": "Mode" }, @@ -853,6 +911,21 @@ }, "setpoint_response_time": { "name": "Setpoint response time" + }, + "irrigation_mode": { + "name": "Irrigation mode" + }, + "weather_delay": { + "name": "Weather delay" + }, + "on_led_color": { + "name": "On LED color" + }, + "off_led_color": { + "name": "Off LED color" + }, + "external_trigger_mode": { + "name": "External trigger mode" } }, "sensor": { @@ -898,6 +971,12 @@ "device_temperature": { "name": "Device temperature" }, + "internal_temp_monitor": { + "name": "Internal temperature" + }, + "overheated": { + "name": "Overheat protection" + }, "formaldehyde": { "name": "Formaldehyde concentration" }, @@ -1023,6 +1102,36 @@ }, "motor_stepcount": { "name": "Motor stepcount" + }, + "irrigation_duration": { + "name": "Last irrigation duration" + }, + "irrigation_start_time": { + "name": "Irrigation start time" + }, + "irrigation_end_time": { + "name": "Irrigation end time" + }, + "irrigation_duration_1": { + "name": "Irrigation duration 1" + }, + "irriation_duration_2": { + "name": "Irrigation duration 2" + }, + "valve_status_1": { + "name": "Status 1" + }, + "valve_status_2": { + "name": "Status 2" + }, + "timer_state": { + "name": "Timer state" + }, + "last_valve_open_duration": { + "name": "Last valve open duration" + }, + "motion_distance": { + "name": "Motion distance" } }, "switch": { @@ -1127,6 +1236,27 @@ }, "adaptation_run_enabled": { "name": "Adaptation run enabled" + }, + "valve_on_off_1": { + "name": "Valve 1" + }, + "valve_on_off_2": { + "name": "Valve 2" + }, + "double_up_full": { + "name": "Double tap on - full" + }, + "open_window": { + "name": "Open window" + }, + "turbo_mode": { + "name": "Turbo mode" + }, + "detach_relay": { + "name": "Detach relay" + }, + "enable_siren": { + "name": "Enable siren" } } } diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index e12d048b190..cb5c160e7b3 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -4,7 +4,6 @@ from __future__ import annotations import functools import logging -import math from typing import Any from zha.exceptions import ZHAException @@ -37,6 +36,18 @@ from .helpers import ( _LOGGER = logging.getLogger(__name__) +OTA_MESSAGE_BATTERY_POWERED = ( + "Battery powered devices can sometimes take multiple hours to update and you may" + " need to wake the device for the update to begin." +) + +ZHA_DOCS_NETWORK_RELIABILITY = "https://www.home-assistant.io/integrations/zha/#zigbee-interference-avoidance-and-network-rangecoverage-optimization" +OTA_MESSAGE_RELIABILITY = ( + "If you are having issues updating a specific device, make sure that you've" + f" eliminated [common environmental issues]({ZHA_DOCS_NETWORK_RELIABILITY}) that" + " could be affecting network reliability. OTA updates require a reliable network." +) + async def async_setup_entry( hass: HomeAssistant, @@ -64,7 +75,7 @@ async def async_setup_entry( config_entry.async_on_unload(unsub) -class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disable=hass-enforce-coordinator-module +class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disable=hass-enforce-class-module """Firmware update coordinator that broadcasts updates network-wide.""" def __init__( @@ -95,7 +106,9 @@ class ZHAFirmwareUpdateEntity( UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS | UpdateEntityFeature.SPECIFIC_VERSION + | UpdateEntityFeature.RELEASE_NOTES ) + _attr_display_precision = 2 # 40 byte chunks with ~200KB files increments by 0.02% def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: """Initialize the ZHA siren.""" @@ -114,20 +127,19 @@ class ZHAFirmwareUpdateEntity( def in_progress(self) -> bool | int | None: """Update installation progress. + Should return a boolean (True if in progress, False if not). + """ + return self.entity_data.entity.in_progress + + @property + def update_percentage(self) -> int | float | None: + """Update installation progress. + Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. - Can either return a boolean (True if in progress, False if not) - or an integer to indicate the progress in from 0 to 100%. + Can either return a number to indicate the progress from 0 to 100% or None. """ - if not self.entity_data.entity.in_progress: - return self.entity_data.entity.in_progress - - # Stay in an indeterminate state until we actually send something - if self.entity_data.entity.progress == 0: - return True - - # Rescale 0-100% to 2-100% to avoid 0 and 1 colliding with None, False, and True - return int(math.ceil(2 + 98 * self.entity_data.entity.progress / 100)) + return self.entity_data.entity.update_percentage @property def latest_version(self) -> str | None: @@ -143,6 +155,28 @@ class ZHAFirmwareUpdateEntity( """ return self.entity_data.entity.release_summary + async def async_release_notes(self) -> str | None: + """Return full release notes. + + This is suitable for a long changelog that does not fit in the release_summary + property. The returned string can contain markdown. + """ + + if self.entity_data.device_proxy.device.is_mains_powered: + header = ( + "" + f"{OTA_MESSAGE_RELIABILITY}" + "" + ) + else: + header = ( + "" + f"{OTA_MESSAGE_BATTERY_POWERED} {OTA_MESSAGE_RELIABILITY}" + "" + ) + + return f"{header}\n\n{self.entity_data.entity.release_notes or ''}" + @property def release_url(self) -> str | None: """URL to the full release notes of the latest version available.""" @@ -155,7 +189,7 @@ class ZHAFirmwareUpdateEntity( ) -> None: """Install an update.""" try: - await self.entity_data.entity.async_install(version=version, backup=backup) + await self.entity_data.entity.async_install(version=version) except ZHAException as exc: raise HomeAssistantError(exc) from exc finally: diff --git a/homeassistant/components/zha/websocket_api.py b/homeassistant/components/zha/websocket_api.py index 0d4296e4b22..5ffd7117d93 100644 --- a/homeassistant/components/zha/websocket_api.py +++ b/homeassistant/components/zha/websocket_api.py @@ -94,7 +94,7 @@ from .helpers import ( ) if TYPE_CHECKING: - from homeassistant.components.websocket_api.connection import ActiveConnection + from homeassistant.components.websocket_api import ActiveConnection _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zhong_hong/climate.py b/homeassistant/components/zhong_hong/climate.py index eaf00b5432f..b5acc230472 100644 --- a/homeassistant/components/zhong_hong/climate.py +++ b/homeassistant/components/zhong_hong/climate.py @@ -135,7 +135,6 @@ class ZhongHongClimate(ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _enable_turn_on_off_backwards_compatibility = False def __init__(self, hub, addr_out, addr_in): """Set up the ZhongHong climate devices.""" diff --git a/homeassistant/components/zhong_hong/manifest.json b/homeassistant/components/zhong_hong/manifest.json index 06cc06faf0b..3569466fb0a 100644 --- a/homeassistant/components/zhong_hong/manifest.json +++ b/homeassistant/components/zhong_hong/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zhong_hong", "iot_class": "local_push", "loggers": ["zhong_hong_hvac"], - "requirements": ["zhong-hong-hvac==1.0.12"] + "quality_scale": "legacy", + "requirements": ["zhong-hong-hvac==1.0.13"] } diff --git a/homeassistant/components/ziggo_mediabox_xl/manifest.json b/homeassistant/components/ziggo_mediabox_xl/manifest.json index 81aac99e58d..1ae09c9927d 100644 --- a/homeassistant/components/ziggo_mediabox_xl/manifest.json +++ b/homeassistant/components/ziggo_mediabox_xl/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ziggo_mediabox_xl", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["ziggo-mediabox-xl==1.1.0"] } diff --git a/homeassistant/components/ziggo_mediabox_xl/media_player.py b/homeassistant/components/ziggo_mediabox_xl/media_player.py index a81a206b5b2..6e858b454e9 100644 --- a/homeassistant/components/ziggo_mediabox_xl/media_player.py +++ b/homeassistant/components/ziggo_mediabox_xl/media_player.py @@ -64,7 +64,7 @@ def setup_platform( if mediabox.test_connection(): connection_successful = True elif manual_config: - _LOGGER.info("Can't connect to %s", host) + _LOGGER.error("Can't connect to %s", host) else: _LOGGER.error("Can't connect to %s", host) # When the device is in eco mode it's not connected to the network @@ -77,7 +77,7 @@ def setup_platform( except OSError as error: _LOGGER.error("Can't connect to %s: %s", host, error) else: - _LOGGER.info("Ignoring duplicate Ziggo Mediabox XL %s", host) + _LOGGER.warning("Ignoring duplicate Ziggo Mediabox XL %s", host) add_entities(hosts, True) diff --git a/homeassistant/components/zodiac/manifest.json b/homeassistant/components/zodiac/manifest.json index 88f3d7fadef..f641826ca7b 100644 --- a/homeassistant/components/zodiac/manifest.json +++ b/homeassistant/components/zodiac/manifest.json @@ -4,6 +4,5 @@ "codeowners": ["@JulienTant"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/zodiac", - "iot_class": "calculated", - "quality_scale": "silver" + "iot_class": "calculated" } diff --git a/homeassistant/components/zone/icons.json b/homeassistant/components/zone/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/zone/icons.json +++ b/homeassistant/components/zone/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/zoneminder/camera.py b/homeassistant/components/zoneminder/camera.py index ab938472ed7..21513b4bed4 100644 --- a/homeassistant/components/zoneminder/camera.py +++ b/homeassistant/components/zoneminder/camera.py @@ -35,7 +35,7 @@ def setup_platform( ) for monitor in monitors: - _LOGGER.info("Initializing camera %s", monitor.id) + _LOGGER.debug("Initializing camera %s", monitor.id) cameras.append(ZoneMinderCamera(monitor, zm_client.verify_ssl)) add_entities(cameras) diff --git a/homeassistant/components/zoneminder/icons.json b/homeassistant/components/zoneminder/icons.json index 8ca180d7399..3f9f6410a22 100644 --- a/homeassistant/components/zoneminder/icons.json +++ b/homeassistant/components/zoneminder/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_run_state": "mdi:cog" + "set_run_state": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/zoneminder/manifest.json b/homeassistant/components/zoneminder/manifest.json index f441a800555..2501aba2cf1 100644 --- a/homeassistant/components/zoneminder/manifest.json +++ b/homeassistant/components/zoneminder/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zoneminder", "iot_class": "local_polling", "loggers": ["zoneminder"], + "quality_scale": "legacy", "requirements": ["zm-py==0.5.4"] } diff --git a/homeassistant/components/zwave_js/__init__.py b/homeassistant/components/zwave_js/__init__.py index dedae10400f..c8503b1f4c6 100644 --- a/homeassistant/components/zwave_js/__init__.py +++ b/homeassistant/components/zwave_js/__init__.py @@ -9,6 +9,7 @@ import logging from typing import Any from awesomeversion import AwesomeVersion +import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import CommandClass, RemoveNodeReason from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion @@ -87,6 +88,7 @@ from .const import ( CONF_ADDON_S2_AUTHENTICATED_KEY, CONF_ADDON_S2_UNAUTHENTICATED_KEY, CONF_DATA_COLLECTION_OPTED_IN, + CONF_INSTALLER_MODE, CONF_INTEGRATION_CREATED_ADDON, CONF_LR_S2_ACCESS_CONTROL_KEY, CONF_LR_S2_AUTHENTICATED_KEY, @@ -100,6 +102,7 @@ from .const import ( DATA_CLIENT, DOMAIN, EVENT_DEVICE_ADDED_TO_REGISTRY, + EVENT_VALUE_UPDATED, LIB_LOGGER, LOGGER, LR_ADDON_VERSION, @@ -131,12 +134,21 @@ DATA_CLIENT_LISTEN_TASK = "client_listen_task" DATA_DRIVER_EVENTS = "driver_events" DATA_START_CLIENT_TASK = "start_client_task" -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +CONFIG_SCHEMA = vol.Schema( + { + DOMAIN: vol.Schema( + { + vol.Optional(CONF_INSTALLER_MODE, default=False): cv.boolean, + } + ) + }, + extra=vol.ALLOW_EXTRA, +) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Z-Wave JS component.""" - hass.data[DOMAIN] = {} + hass.data[DOMAIN] = config.get(DOMAIN, {}) for entry in hass.config_entries.async_entries(DOMAIN): if not isinstance(entry.unique_id, str): hass.config_entries.async_update_entry( @@ -353,7 +365,7 @@ class ControllerEvents: self.discovered_value_ids: dict[str, set[str]] = defaultdict(set) self.driver_events = driver_events self.dev_reg = driver_events.dev_reg - self.registered_unique_ids: dict[str, dict[str, set[str]]] = defaultdict( + self.registered_unique_ids: dict[str, dict[Platform, set[str]]] = defaultdict( lambda: defaultdict(set) ) self.node_events = NodeEvents(hass, self) @@ -623,7 +635,7 @@ class NodeEvents: ) # add listeners to handle new values that get added later - for event in ("value added", "value updated", "metadata updated"): + for event in ("value added", EVENT_VALUE_UPDATED, "metadata updated"): self.config_entry.async_on_unload( node.on( event, @@ -722,7 +734,7 @@ class NodeEvents: # add listener for value updated events self.config_entry.async_on_unload( disc_info.node.on( - "value updated", + EVENT_VALUE_UPDATED, lambda event: self.async_on_value_updated_fire_event( value_updates_disc_info, event["value"] ), diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index 8f81790708f..1a1cd6ae9c1 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -13,8 +13,10 @@ from zwave_js_server.client import Client from zwave_js_server.const import ( CommandClass, ExclusionStrategy, + InclusionState, InclusionStrategy, LogLevel, + NodeStatus, Protocols, ProvisioningEntryStatus, QRCodeVersion, @@ -41,6 +43,7 @@ from zwave_js_server.model.controller.firmware import ( ControllerFirmwareUpdateResult, ) from zwave_js_server.model.driver import Driver +from zwave_js_server.model.endpoint import Endpoint from zwave_js_server.model.log_config import LogConfig from zwave_js_server.model.log_message import LogMessage from zwave_js_server.model.node import Node, NodeStatistics @@ -53,6 +56,7 @@ from zwave_js_server.model.utils import ( async_parse_qr_code_string, async_try_parse_dsk_from_qr_code_string, ) +from zwave_js_server.model.value import ConfigurationValueFormat from zwave_js_server.util.node import async_set_config_parameter from homeassistant.components import websocket_api @@ -73,8 +77,15 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from .config_validation import BITMASK_SCHEMA from .const import ( + ATTR_COMMAND_CLASS, + ATTR_ENDPOINT, + ATTR_METHOD_NAME, + ATTR_PARAMETERS, + ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, + CONF_INSTALLER_MODE, DATA_CLIENT, + DOMAIN, EVENT_DEVICE_ADDED_TO_REGISTRY, USER_AGENT, ) @@ -98,6 +109,8 @@ PROPERTY = "property" PROPERTY_KEY = "property_key" ENDPOINT = "endpoint" VALUE = "value" +VALUE_SIZE = "value_size" +VALUE_FORMAT = "value_format" # constants for log config commands CONFIG = "config" @@ -382,6 +395,8 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_node_metadata) websocket_api.async_register_command(hass, websocket_node_alerts) websocket_api.async_register_command(hass, websocket_add_node) + websocket_api.async_register_command(hass, websocket_cancel_secure_bootstrap_s2) + websocket_api.async_register_command(hass, websocket_subscribe_s2_inclusion) websocket_api.async_register_command(hass, websocket_grant_security_classes) websocket_api.async_register_command(hass, websocket_validate_dsk_and_enter_pin) websocket_api.async_register_command(hass, websocket_provision_smart_start_node) @@ -408,6 +423,8 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_rebuild_node_routes) websocket_api.async_register_command(hass, websocket_set_config_parameter) websocket_api.async_register_command(hass, websocket_get_config_parameters) + websocket_api.async_register_command(hass, websocket_get_raw_config_parameter) + websocket_api.async_register_command(hass, websocket_set_raw_config_parameter) websocket_api.async_register_command(hass, websocket_subscribe_log_updates) websocket_api.async_register_command(hass, websocket_update_log_config) websocket_api.async_register_command(hass, websocket_get_log_config) @@ -435,6 +452,9 @@ def async_register_api(hass: HomeAssistant) -> None: ) websocket_api.async_register_command(hass, websocket_subscribe_node_statistics) websocket_api.async_register_command(hass, websocket_hard_reset_controller) + websocket_api.async_register_command(hass, websocket_node_capabilities) + websocket_api.async_register_command(hass, websocket_invoke_cc_api) + websocket_api.async_register_command(hass, websocket_get_integration_settings) hass.http.register_view(FirmwareUploadView(dr.async_get(hass))) @@ -693,6 +713,30 @@ async def websocket_add_node( ) ) + @callback + def forward_node_added( + node: Node, low_security: bool, low_security_reason: str | None + ) -> None: + interview_unsubs = [ + node.on("interview started", forward_event), + node.on("interview completed", forward_event), + node.on("interview stage completed", forward_stage), + node.on("interview failed", forward_event), + ] + unsubs.extend(interview_unsubs) + node_details = { + "node_id": node.node_id, + "status": node.status, + "ready": node.ready, + "low_security": low_security, + "low_security_reason": low_security_reason, + } + connection.send_message( + websocket_api.event_message( + msg[ID], {"event": "node added", "node": node_details} + ) + ) + @callback def forward_requested_grant(event: dict) -> None: connection.send_message( @@ -727,24 +771,10 @@ async def websocket_add_node( @callback def node_added(event: dict) -> None: - node = event["node"] - interview_unsubs = [ - node.on("interview started", forward_event), - node.on("interview completed", forward_event), - node.on("interview stage completed", forward_stage), - node.on("interview failed", forward_event), - ] - unsubs.extend(interview_unsubs) - node_details = { - "node_id": node.node_id, - "status": node.status, - "ready": node.ready, - "low_security": event["result"].get("lowSecurity", False), - } - connection.send_message( - websocket_api.event_message( - msg[ID], {"event": "node added", "node": node_details} - ) + forward_node_added( + event["node"], + event["result"].get("lowSecurity", False), + event["result"].get("lowSecurityReason"), ) @callback @@ -776,25 +806,96 @@ async def websocket_add_node( ] msg[DATA_UNSUBSCRIBE] = unsubs - try: - result = await controller.async_begin_inclusion( - INCLUSION_STRATEGY_NOT_SMART_START[inclusion_strategy.value], - force_security=force_security, - provisioning=provisioning, - dsk=dsk, - ) - except ValueError as err: - connection.send_error( + if controller.inclusion_state == InclusionState.INCLUDING: + connection.send_result( msg[ID], - ERR_INVALID_FORMAT, - err.args[0], + True, # Inclusion is already in progress ) - return + # Check for nodes that have been added but not fully included + for node in controller.nodes.values(): + if node.status != NodeStatus.DEAD and not node.ready: + forward_node_added( + node, + not node.is_secure, + None, + ) + else: + try: + result = await controller.async_begin_inclusion( + INCLUSION_STRATEGY_NOT_SMART_START[inclusion_strategy.value], + force_security=force_security, + provisioning=provisioning, + dsk=dsk, + ) + except ValueError as err: + connection.send_error( + msg[ID], + ERR_INVALID_FORMAT, + err.args[0], + ) + return - connection.send_result( - msg[ID], - result, - ) + connection.send_result( + msg[ID], + result, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/cancel_secure_bootstrap_s2", + vol.Required(ENTRY_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_cancel_secure_bootstrap_s2( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Cancel secure bootstrap S2.""" + await driver.controller.async_cancel_secure_bootstrap_s2() + connection.send_result(msg[ID]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/subscribe_s2_inclusion", + vol.Required(ENTRY_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_subscribe_s2_inclusion( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Subscribe to S2 inclusion initiated by the controller.""" + + @callback + def forward_dsk(event: dict) -> None: + connection.send_message( + websocket_api.event_message( + msg[ID], {"event": event["event"], "dsk": event["dsk"]} + ) + ) + + unsub = driver.controller.on("validate dsk and enter pin", forward_dsk) + connection.subscriptions[msg["id"]] = unsub + msg[DATA_UNSUBSCRIBE] = [unsub] + connection.send_result(msg[ID]) @websocket_api.require_admin @@ -1713,6 +1814,7 @@ async def websocket_get_config_parameters( "unit": metadata.unit, "writeable": metadata.writeable, "readable": metadata.readable, + "default": metadata.default, }, "value": zwave_value.value, } @@ -1725,6 +1827,72 @@ async def websocket_get_config_parameters( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/set_raw_config_parameter", + vol.Required(DEVICE_ID): str, + vol.Required(PROPERTY): int, + vol.Required(VALUE): int, + vol.Required(VALUE_SIZE): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), + vol.Required(VALUE_FORMAT): vol.Coerce(ConfigurationValueFormat), + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_set_raw_config_parameter( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Set a custom config parameter value for a Z-Wave node.""" + result = await node.async_set_raw_config_parameter_value( + msg[VALUE], + msg[PROPERTY], + value_size=msg[VALUE_SIZE], + value_format=msg[VALUE_FORMAT], + ) + + connection.send_result( + msg[ID], + { + STATUS: result.status, + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/get_raw_config_parameter", + vol.Required(DEVICE_ID): str, + vol.Required(PROPERTY): int, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_get_raw_config_parameter( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Get a custom config parameter value for a Z-Wave node.""" + value = await node.async_get_raw_config_parameter_value( + msg[PROPERTY], + ) + + connection.send_result( + msg[ID], + { + VALUE: value, + }, + ) + + def filename_is_present_if_logging_to_file(obj: dict) -> dict: """Validate that filename is provided if log_to_file is True.""" if obj.get(LOG_TO_FILE, False) and FILENAME not in obj: @@ -2498,3 +2666,103 @@ async def websocket_hard_reset_controller( ) ] await driver.async_hard_reset() + + +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/node_capabilities", + vol.Required(DEVICE_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_node_capabilities( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Get node endpoints with their support command classes.""" + # consumers expect snake_case at the moment + # remove that addition when consumers are updated + connection.send_result( + msg[ID], + { + idx: [ + command_class.to_dict() | {"is_secure": command_class.is_secure} + for command_class in endpoint.command_classes + ] + for idx, endpoint in node.endpoints.items() + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/invoke_cc_api", + vol.Required(DEVICE_ID): str, + vol.Required(ATTR_COMMAND_CLASS): vol.All( + vol.Coerce(int), vol.Coerce(CommandClass) + ), + vol.Optional(ATTR_ENDPOINT): vol.Coerce(int), + vol.Required(ATTR_METHOD_NAME): cv.string, + vol.Required(ATTR_PARAMETERS): list, + vol.Optional(ATTR_WAIT_FOR_RESULT): cv.boolean, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_invoke_cc_api( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Call invokeCCAPI on the node or provided endpoint.""" + command_class: CommandClass = msg[ATTR_COMMAND_CLASS] + method_name: str = msg[ATTR_METHOD_NAME] + parameters: list[Any] = msg[ATTR_PARAMETERS] + + node_or_endpoint: Node | Endpoint = node + if (endpoint := msg.get(ATTR_ENDPOINT)) is not None: + node_or_endpoint = node.endpoints[endpoint] + + try: + result = await node_or_endpoint.async_invoke_cc_api( + command_class, + method_name, + *parameters, + wait_for_result=msg.get(ATTR_WAIT_FOR_RESULT, False), + ) + except BaseZwaveJSServerError as err: + connection.send_error(msg[ID], err.__class__.__name__, str(err)) + else: + connection.send_result( + msg[ID], + result, + ) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/get_integration_settings", + } +) +def websocket_get_integration_settings( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get Z-Wave JS integration wide configuration.""" + connection.send_result( + msg[ID], + { + # list explicitly to avoid leaking other keys and to set default + CONF_INSTALLER_MODE: hass.data[DOMAIN].get(CONF_INSTALLER_MODE, False), + }, + ) diff --git a/homeassistant/components/zwave_js/binary_sensor.py b/homeassistant/components/zwave_js/binary_sensor.py index bd5ce2d810b..0f1495fc6e6 100644 --- a/homeassistant/components/zwave_js/binary_sensor.py +++ b/homeassistant/components/zwave_js/binary_sensor.py @@ -248,6 +248,16 @@ BOOLEAN_SENSOR_MAPPINGS: dict[int, BinarySensorEntityDescription] = { } +@callback +def is_valid_notification_binary_sensor( + info: ZwaveDiscoveryInfo, +) -> bool | NotificationZWaveJSEntityDescription: + """Return if the notification CC Value is valid as binary sensor.""" + if not info.primary_value.metadata.states: + return False + return len(info.primary_value.metadata.states) > 1 + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -264,16 +274,18 @@ async def async_setup_entry( entities: list[BinarySensorEntity] = [] if info.platform_hint == "notification": + # ensure the notification CC Value is valid as binary sensor + if not is_valid_notification_binary_sensor(info): + return # Get all sensors from Notification CC states for state_key in info.primary_value.metadata.states: # ignore idle key (0) if state_key == "0": continue - + # get (optional) description for this state notification_description: ( NotificationZWaveJSEntityDescription | None ) = None - for description in NOTIFICATION_SENSOR_MAPPINGS: if ( int(description.key) @@ -289,7 +301,6 @@ async def async_setup_entry( and notification_description.off_state == state_key ): continue - entities.append( ZWaveNotificationBinarySensor( config_entry, driver, info, state_key, notification_description diff --git a/homeassistant/components/zwave_js/climate.py b/homeassistant/components/zwave_js/climate.py index 14a3fe579c4..580694cae11 100644 --- a/homeassistant/components/zwave_js/climate.py +++ b/homeassistant/components/zwave_js/climate.py @@ -24,8 +24,6 @@ from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - DEFAULT_MAX_TEMP, - DEFAULT_MIN_TEMP, DOMAIN as CLIMATE_DOMAIN, PRESET_NONE, ClimateEntity, @@ -130,7 +128,6 @@ class ZWaveClimate(ZWaveBaseEntity, ClimateEntity): """Representation of a Z-Wave climate.""" _attr_precision = PRECISION_TENTHS - _enable_turn_on_off_backwards_compatibility = False def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo @@ -421,7 +418,7 @@ class ZWaveClimate(ZWaveBaseEntity, ClimateEntity): @property def min_temp(self) -> float: """Return the minimum temperature.""" - min_temp = DEFAULT_MIN_TEMP + min_temp = 0.0 # Not using DEFAULT_MIN_TEMP to allow wider range base_unit: str = UnitOfTemperature.CELSIUS try: temp = self._setpoint_value_or_raise(self._current_mode_setpoint_enums[0]) @@ -437,7 +434,7 @@ class ZWaveClimate(ZWaveBaseEntity, ClimateEntity): @property def max_temp(self) -> float: """Return the maximum temperature.""" - max_temp = DEFAULT_MAX_TEMP + max_temp = 50.0 # Not using DEFAULT_MAX_TEMP to allow wider range base_unit: str = UnitOfTemperature.CELSIUS try: temp = self._setpoint_value_or_raise(self._current_mode_setpoint_enums[0]) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index e73fa9fc3a7..711eb14070d 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -18,8 +18,6 @@ from homeassistant.components.hassio import ( AddonInfo, AddonManager, AddonState, - HassioServiceInfo, - is_hassio, ) from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import ( @@ -29,6 +27,7 @@ from homeassistant.config_entries import ( ConfigEntryBaseFlow, ConfigEntryState, ConfigFlow, + ConfigFlowContext, ConfigFlowResult, OptionsFlow, OptionsFlowManager, @@ -38,6 +37,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow, FlowManager from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.hassio import is_hassio +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import VolDictType from . import disconnect_client @@ -192,7 +193,7 @@ class BaseZwaveJSFlow(ConfigEntryBaseFlow, ABC): @property @abstractmethod - def flow_manager(self) -> FlowManager[ConfigFlowResult]: + def flow_manager(self) -> FlowManager[ConfigFlowContext, ConfigFlowResult]: """Return the flow manager of the flow.""" async def async_step_install_addon( @@ -346,11 +347,12 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): VERSION = 1 + _title: str + def __init__(self) -> None: """Set up flow instance.""" super().__init__() self.use_addon = False - self._title: str | None = None self._usb_discovery = False @property @@ -364,19 +366,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler(config_entry) - - async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: - """Handle imported data. - - This step will be used when importing data - during Z-Wave to Z-Wave JS migration. - """ - # Note that the data comes from the zwave integration. - # So we don't use our constants here. - self.s0_legacy_key = data.get("network_key") - self.usb_path = data.get("usb_path") - return await self.async_step_user() + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -406,6 +396,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): return await self.async_step_manual({CONF_URL: self.ws_address}) assert self.ws_address + assert self.unique_id return self.async_show_form( step_id="zeroconf_confirm", description_placeholders={ @@ -680,7 +671,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): discovery_info = await self._async_get_addon_discovery_info() self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}" - if not self.unique_id or self.context["source"] == SOURCE_USB: + if not self.unique_id or self.source == SOURCE_USB: if not self.version_info: try: self.version_info = await async_get_version_info( @@ -734,10 +725,9 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): """Handle an options flow for Z-Wave JS.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Set up the options flow.""" super().__init__() - self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/zwave_js/config_validation.py b/homeassistant/components/zwave_js/config_validation.py index 6c060f90ce5..30bc2f16789 100644 --- a/homeassistant/components/zwave_js/config_validation.py +++ b/homeassistant/components/zwave_js/config_validation.py @@ -34,6 +34,8 @@ def boolean(value: Any) -> bool: VALUE_SCHEMA = vol.Any( boolean, + float, + int, vol.Coerce(int), vol.Coerce(float), BITMASK_SCHEMA, diff --git a/homeassistant/components/zwave_js/const.py b/homeassistant/components/zwave_js/const.py index a04f9247548..16cf6f748bb 100644 --- a/homeassistant/components/zwave_js/const.py +++ b/homeassistant/components/zwave_js/const.py @@ -25,6 +25,7 @@ CONF_ADDON_S2_AUTHENTICATED_KEY = "s2_authenticated_key" CONF_ADDON_S2_UNAUTHENTICATED_KEY = "s2_unauthenticated_key" CONF_ADDON_LR_S2_ACCESS_CONTROL_KEY = "lr_s2_access_control_key" CONF_ADDON_LR_S2_AUTHENTICATED_KEY = "lr_s2_authenticated_key" +CONF_INSTALLER_MODE = "installer_mode" CONF_INTEGRATION_CREATED_ADDON = "integration_created_addon" CONF_NETWORK_KEY = "network_key" CONF_S0_LEGACY_KEY = "s0_legacy_key" @@ -42,6 +43,7 @@ DATA_CLIENT = "client" DATA_OLD_SERVER_LOG_LEVEL = "old_server_log_level" EVENT_DEVICE_ADDED_TO_REGISTRY = f"{DOMAIN}_device_added_to_registry" +EVENT_VALUE_UPDATED = "value updated" LOGGER = logging.getLogger(__package__) LIB_LOGGER = logging.getLogger("zwave_js_server") diff --git a/homeassistant/components/zwave_js/cover.py b/homeassistant/components/zwave_js/cover.py index 363b32cedda..218c5cc82fe 100644 --- a/homeassistant/components/zwave_js/cover.py +++ b/homeassistant/components/zwave_js/cover.py @@ -19,6 +19,7 @@ from zwave_js_server.const.command_class.multilevel_switch import ( from zwave_js_server.const.command_class.window_covering import ( NO_POSITION_PROPERTY_KEYS, NO_POSITION_SUFFIX, + WINDOW_COVERING_LEVEL_CHANGE_DOWN_PROPERTY, WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY, SlatStates, ) @@ -341,6 +342,20 @@ class ZWaveWindowCovering(CoverPositionMixin, CoverTiltMixin): super().__init__(config_entry, driver, info) pos_value: ZwaveValue | None = None tilt_value: ZwaveValue | None = None + self._up_value = cast( + ZwaveValue, + self.get_zwave_value( + WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY, + value_property_key=info.primary_value.property_key, + ), + ) + self._down_value = cast( + ZwaveValue, + self.get_zwave_value( + WINDOW_COVERING_LEVEL_CHANGE_DOWN_PROPERTY, + value_property_key=info.primary_value.property_key, + ), + ) # If primary value is for position, we have to search for a tilt value if info.primary_value.property_key in COVER_POSITION_PROPERTY_KEYS: @@ -402,6 +417,18 @@ class ZWaveWindowCovering(CoverPositionMixin, CoverTiltMixin): """Return range of valid tilt positions.""" return abs(SlatStates.CLOSED_2 - SlatStates.CLOSED_1) + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + await self._async_set_value(self._up_value, True) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + await self._async_set_value(self._down_value, True) + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self._async_set_value(self._up_value, False) + class ZwaveMotorizedBarrier(ZWaveBaseEntity, CoverEntity): """Representation of a Z-Wave motorized barrier device.""" diff --git a/homeassistant/components/zwave_js/device_condition.py b/homeassistant/components/zwave_js/device_condition.py index dcd42d4d85d..8a50c838eec 100644 --- a/homeassistant/components/zwave_js/device_condition.py +++ b/homeassistant/components/zwave_js/device_condition.py @@ -8,9 +8,7 @@ import voluptuous as vol from zwave_js_server.const import CommandClass from zwave_js_server.model.value import ConfigurationValue -from homeassistant.components.device_automation.exceptions import ( - InvalidDeviceAutomationConfig, -) +from homeassistant.components.device_automation import InvalidDeviceAutomationConfig from homeassistant.const import CONF_CONDITION, CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError diff --git a/homeassistant/components/zwave_js/device_trigger.py b/homeassistant/components/zwave_js/device_trigger.py index 49027d4d43b..661d4557694 100644 --- a/homeassistant/components/zwave_js/device_trigger.py +++ b/homeassistant/components/zwave_js/device_trigger.py @@ -7,8 +7,8 @@ from typing import Any import voluptuous as vol from zwave_js_server.const import CommandClass -from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA -from homeassistant.components.device_automation.exceptions import ( +from homeassistant.components.device_automation import ( + DEVICE_TRIGGER_BASE_SCHEMA, InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event, state diff --git a/homeassistant/components/zwave_js/diagnostics.py b/homeassistant/components/zwave_js/diagnostics.py index dde455bd9b6..5515100b20b 100644 --- a/homeassistant/components/zwave_js/diagnostics.py +++ b/homeassistant/components/zwave_js/diagnostics.py @@ -12,8 +12,7 @@ from zwave_js_server.model.node import Node from zwave_js_server.model.value import ValueDataType from zwave_js_server.util.node import dump_node_state -from homeassistant.components.diagnostics import REDACTED -from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.components.diagnostics import REDACTED, async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant @@ -81,7 +80,7 @@ def get_device_entities( er.async_get(hass), device.id, include_disabled_entities=True ) entities = [] - for entry in entity_entries: + for entry in sorted(entity_entries): # Skip entities that are not part of this integration if entry.config_entry_id != config_entry.entry_id: continue diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 6e750ee8b2d..5c79c668afc 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -238,6 +238,12 @@ SWITCH_BINARY_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SWITCH_BINARY}, property={CURRENT_VALUE_PROPERTY} ) +COLOR_SWITCH_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( + command_class={CommandClass.SWITCH_COLOR}, + property={CURRENT_COLOR_PROPERTY}, + property_key={None}, +) + SIREN_TONE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SOUND_SWITCH}, property={TONE_ID_PROPERTY}, @@ -762,33 +768,6 @@ DISCOVERY_SCHEMAS = [ }, ), ), - # HomeSeer HSM-200 v1 - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="black_is_off", - manufacturer_id={0x001E}, - product_id={0x0001}, - product_type={0x0004}, - primary_value=ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, - ), - absent_values=[SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA], - ), - # Logic Group ZDB5100 - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="black_is_off", - manufacturer_id={0x0234}, - product_id={0x0121}, - product_type={0x0003}, - primary_value=ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, - ), - ), # ====== START OF GENERIC MAPPING SCHEMAS ======= # locks # Door Lock CC @@ -906,17 +885,6 @@ DISCOVERY_SCHEMAS = [ type={ValueType.BOOLEAN}, ), ), - ZWaveDiscoverySchema( - platform=Platform.BINARY_SENSOR, - hint="notification", - primary_value=ZWaveValueDiscoverySchema( - command_class={ - CommandClass.NOTIFICATION, - }, - type={ValueType.NUMBER}, - ), - allow_multi=True, - ), # binary sensor for Indicator CC ZWaveDiscoverySchema( platform=Platform.BINARY_SENSOR, @@ -978,19 +946,6 @@ DISCOVERY_SCHEMAS = [ ), data_template=NumericSensorDataTemplate(), ), - # special list sensors (Notification CC) - ZWaveDiscoverySchema( - platform=Platform.SENSOR, - hint="list_sensor", - primary_value=ZWaveValueDiscoverySchema( - command_class={ - CommandClass.NOTIFICATION, - }, - type={ValueType.NUMBER}, - ), - allow_multi=True, - entity_registry_enabled_default=False, - ), # number for Indicator CC (exclude property keys 3-5) ZWaveDiscoverySchema( platform=Platform.NUMBER, @@ -1014,11 +969,6 @@ DISCOVERY_SCHEMAS = [ ), entity_category=EntityCategory.CONFIG, ), - # binary switches - ZWaveDiscoverySchema( - platform=Platform.SWITCH, - primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, - ), # switch for Indicator CC ZWaveDiscoverySchema( platform=Platform.SWITCH, @@ -1106,15 +1056,51 @@ DISCOVERY_SCHEMAS = [ device_class_generic={"Thermostat"}, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, ), - # lights - # primary value is the currentValue (brightness) - # catch any device with multilevel CC as light - # NOTE: keep this at the bottom of the discovery scheme, - # to handle all others that need the multilevel CC first + # Handle the different combinations of Binary Switch, Multilevel Switch and Color Switch + # to create switches and/or (colored) lights. The goal is to: + # - couple Color Switch CC with Multilevel Switch CC if possible + # - couple Color Switch CC with Binary Switch CC as the first fallback + # - use Color Switch CC standalone as the last fallback + # + # Multilevel Switch CC (+ Color Switch CC) -> Dimmable light with or without color support. ZWaveDiscoverySchema( platform=Platform.LIGHT, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, ), + # Binary Switch CC when Multilevel Switch and Color Switch CC exist -> + # On/Off switch, assign color to light entity instead + ZWaveDiscoverySchema( + platform=Platform.SWITCH, + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + required_values=[ + SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + COLOR_SWITCH_CURRENT_VALUE_SCHEMA, + ], + ), + # Binary Switch CC and Color Switch CC -> + # Colored light that uses Binary Switch CC for turning on/off. + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="color_onoff", + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + required_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], + ), + # Binary Switch CC without Color Switch CC -> On/Off switch + ZWaveDiscoverySchema( + platform=Platform.SWITCH, + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + absent_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], + ), + # Colored light (legacy device) that can only be controlled through Color Switch CC. + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="color_onoff", + primary_value=COLOR_SWITCH_CURRENT_VALUE_SCHEMA, + absent_values=[ + SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + ], + ), # light for Basic CC with target ZWaveDiscoverySchema( platform=Platform.LIGHT, @@ -1197,6 +1183,7 @@ DISCOVERY_SCHEMAS = [ type={ValueType.NUMBER}, any_available_states={(0, "idle")}, ), + allow_multi=True, ), # event # stateful = False @@ -1219,6 +1206,43 @@ DISCOVERY_SCHEMAS = [ ), entity_category=EntityCategory.DIAGNOSTIC, ), + ZWaveDiscoverySchema( + platform=Platform.BINARY_SENSOR, + hint="notification", + primary_value=ZWaveValueDiscoverySchema( + command_class={ + CommandClass.NOTIFICATION, + }, + type={ValueType.NUMBER}, + ), + # set allow-multi to true because some of the notification sensors + # can not be mapped to a binary sensor and must be handled as a regular sensor + allow_multi=True, + ), + # alarmType, alarmLevel (Notification CC) + ZWaveDiscoverySchema( + platform=Platform.SENSOR, + hint="notification_alarm", + primary_value=ZWaveValueDiscoverySchema( + command_class={ + CommandClass.NOTIFICATION, + }, + property={"alarmType", "alarmLevel"}, + type={ValueType.NUMBER}, + ), + entity_registry_enabled_default=False, + ), + # fallback sensors within Notification CC + ZWaveDiscoverySchema( + platform=Platform.SENSOR, + hint="notification", + primary_value=ZWaveValueDiscoverySchema( + command_class={ + CommandClass.NOTIFICATION, + }, + type={ValueType.NUMBER}, + ), + ), ] @@ -1238,8 +1262,11 @@ def async_discover_single_value( value: ZwaveValue, device: DeviceEntry, discovered_value_ids: dict[str, set[str]] ) -> Generator[ZwaveDiscoveryInfo]: """Run discovery on a single ZWave value and return matching schema info.""" - discovered_value_ids[device.id].add(value.value_id) for schema in DISCOVERY_SCHEMAS: + # abort if attribute(s) already discovered + if value.value_id in discovered_value_ids[device.id]: + continue + # check manufacturer_id, product_id, product_type if ( ( @@ -1298,14 +1325,20 @@ def async_discover_single_value( # check additional required values if schema.required_values is not None and not all( - any(check_value(val, val_scheme) for val in value.node.values.values()) + any( + check_value(val, val_scheme, primary_value=value) + for val in value.node.values.values() + ) for val_scheme in schema.required_values ): continue # check for values that may not be present if schema.absent_values is not None and any( - any(check_value(val, val_scheme) for val in value.node.values.values()) + any( + check_value(val, val_scheme, primary_value=value) + for val in value.node.values.values() + ) for val_scheme in schema.absent_values ): continue @@ -1343,10 +1376,12 @@ def async_discover_single_value( entity_category=schema.entity_category, ) + # prevent re-discovery of the (primary) value if not allowed if not schema.allow_multi: - # return early since this value may not be discovered - # by other schemas/platforms - return + discovered_value_ids[device.id].add(value.value_id) + + # prevent re-discovery of the (primary) value after all schemas have been checked + discovered_value_ids[device.id].add(value.value_id) if value.command_class == CommandClass.CONFIGURATION: yield from async_discover_single_configuration_value( @@ -1422,7 +1457,11 @@ def async_discover_single_configuration_value( @callback -def check_value(value: ZwaveValue, schema: ZWaveValueDiscoverySchema) -> bool: +def check_value( + value: ZwaveValue, + schema: ZWaveValueDiscoverySchema, + primary_value: ZwaveValue | None = None, +) -> bool: """Check if value matches scheme.""" # check command_class if ( @@ -1433,6 +1472,14 @@ def check_value(value: ZwaveValue, schema: ZWaveValueDiscoverySchema) -> bool: # check endpoint if schema.endpoint is not None and value.endpoint not in schema.endpoint: return False + # If the schema does not require an endpoint, make sure the value is on the + # same endpoint as the primary value + if ( + schema.endpoint is None + and primary_value is not None + and value.endpoint != primary_value.endpoint + ): + return False # check property if schema.property is not None and value.property_ not in schema.property: return False diff --git a/homeassistant/components/zwave_js/entity.py b/homeassistant/components/zwave_js/entity.py index 4a6f87cc032..d1ab9009308 100644 --- a/homeassistant/components/zwave_js/entity.py +++ b/homeassistant/components/zwave_js/entity.py @@ -22,11 +22,10 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import UNDEFINED -from .const import DOMAIN, LOGGER +from .const import DOMAIN, EVENT_VALUE_UPDATED, LOGGER from .discovery import ZwaveDiscoveryInfo from .helpers import get_device_id, get_unique_id, get_valueless_base_unique_id -EVENT_VALUE_UPDATED = "value updated" EVENT_VALUE_REMOVED = "value removed" EVENT_DEAD = "dead" EVENT_ALIVE = "alive" @@ -335,5 +334,6 @@ class ZWaveBaseEntity(Entity): value, new_value, options=options, wait_for_result=wait_for_result ) except BaseZwaveJSServerError as err: - LOGGER.error("Unable to set value %s: %s", value.value_id, err) - raise HomeAssistantError from err + raise HomeAssistantError( + f"Unable to set value {value.value_id}: {err}" + ) from err diff --git a/homeassistant/components/zwave_js/fan.py b/homeassistant/components/zwave_js/fan.py index 37d3fc57886..d83132e4b95 100644 --- a/homeassistant/components/zwave_js/fan.py +++ b/homeassistant/components/zwave_js/fan.py @@ -83,7 +83,6 @@ class ZwaveFan(ZWaveBaseEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo diff --git a/homeassistant/components/zwave_js/icons.json b/homeassistant/components/zwave_js/icons.json index 2956cf2c6e0..b52255e09d1 100644 --- a/homeassistant/components/zwave_js/icons.json +++ b/homeassistant/components/zwave_js/icons.json @@ -57,17 +57,41 @@ } }, "services": { - "bulk_set_partial_config_parameters": "mdi:cogs", - "clear_lock_usercode": "mdi:eraser", - "invoke_cc_api": "mdi:api", - "multicast_set_value": "mdi:list-box", - "ping": "mdi:crosshairs-gps", - "refresh_notifications": "mdi:bell", - "refresh_value": "mdi:refresh", - "reset_meter": "mdi:meter-electric", - "set_config_parameter": "mdi:cog", - "set_lock_configuration": "mdi:shield-lock", - "set_lock_usercode": "mdi:lock-smart", - "set_value": "mdi:form-textbox" + "bulk_set_partial_config_parameters": { + "service": "mdi:cogs" + }, + "clear_lock_usercode": { + "service": "mdi:eraser" + }, + "invoke_cc_api": { + "service": "mdi:api" + }, + "multicast_set_value": { + "service": "mdi:list-box" + }, + "ping": { + "service": "mdi:crosshairs-gps" + }, + "refresh_notifications": { + "service": "mdi:bell" + }, + "refresh_value": { + "service": "mdi:refresh" + }, + "reset_meter": { + "service": "mdi:meter-electric" + }, + "set_config_parameter": { + "service": "mdi:cog" + }, + "set_lock_configuration": { + "service": "mdi:shield-lock" + }, + "set_lock_usercode": { + "service": "mdi:lock-smart" + }, + "set_value": { + "service": "mdi:form-textbox" + } } } diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index 020f1b66b3d..e6cfc6c8b29 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -29,7 +29,7 @@ from zwave_js_server.model.value import Value from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_RGBW_COLOR, ATTR_TRANSITION, @@ -60,6 +60,8 @@ MULTI_COLOR_MAP = { ColorComponent.CYAN: COLOR_SWITCH_COMBINED_CYAN, ColorComponent.PURPLE: COLOR_SWITCH_COMBINED_PURPLE, } +MIN_MIREDS = 153 # 6500K as a safe default +MAX_MIREDS = 370 # 2700K as a safe default async def async_setup_entry( @@ -76,8 +78,8 @@ async def async_setup_entry( driver = client.driver assert driver is not None # Driver is ready before platforms are loaded. - if info.platform_hint == "black_is_off": - async_add_entities([ZwaveBlackIsOffLight(config_entry, driver, info)]) + if info.platform_hint == "color_onoff": + async_add_entities([ZwaveColorOnOffLight(config_entry, driver, info)]) else: async_add_entities([ZwaveLight(config_entry, driver, info)]) @@ -103,6 +105,9 @@ def byte_to_zwave_brightness(value: int) -> int: class ZwaveLight(ZWaveBaseEntity, LightEntity): """Representation of a Z-Wave light.""" + _attr_min_color_temp_kelvin = 2700 # 370 mireds as a safe default + _attr_max_color_temp_kelvin = 6500 # 153 mireds as a safe default + def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo ) -> None: @@ -111,12 +116,11 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._supports_color = False self._supports_rgbw = False self._supports_color_temp = False + self._supports_dimming = False + self._color_mode: str | None = None self._hs_color: tuple[float, float] | None = None self._rgbw_color: tuple[int, int, int, int] | None = None - self._color_mode: str | None = None self._color_temp: int | None = None - self._min_mireds = 153 # 6500K as a safe default - self._max_mireds = 370 # 2700K as a safe default self._warm_white = self.get_zwave_value( TARGET_COLOR_PROPERTY, CommandClass.SWITCH_COLOR, @@ -129,15 +133,28 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): ) self._supported_color_modes: set[ColorMode] = set() + self._target_brightness: Value | None = None + # get additional (optional) values and set features - # If the command class is Basic, we must geenerate a name that includes - # the command class name to avoid ambiguity - self._target_brightness = self.get_zwave_value( - TARGET_VALUE_PROPERTY, - CommandClass.SWITCH_MULTILEVEL, - add_to_watched_value_ids=False, - ) - if self.info.primary_value.command_class == CommandClass.BASIC: + if self.info.primary_value.command_class == CommandClass.SWITCH_BINARY: + # This light can not be dimmed separately from the color channels + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_BINARY, + add_to_watched_value_ids=False, + ) + self._supports_dimming = False + elif self.info.primary_value.command_class == CommandClass.SWITCH_MULTILEVEL: + # This light can be dimmed separately from the color channels + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_MULTILEVEL, + add_to_watched_value_ids=False, + ) + self._supports_dimming = True + elif self.info.primary_value.command_class == CommandClass.BASIC: + # If the command class is Basic, we must generate a name that includes + # the command class name to avoid ambiguity self._attr_name = self.generate_name( include_value_name=True, alternate_value_name="Basic" ) @@ -146,6 +163,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): CommandClass.BASIC, add_to_watched_value_ids=False, ) + self._supports_dimming = True + + self._current_color = self.get_zwave_value( + CURRENT_COLOR_PROPERTY, + CommandClass.SWITCH_COLOR, + value_property_key=None, + ) self._target_color = self.get_zwave_value( TARGET_COLOR_PROPERTY, CommandClass.SWITCH_COLOR, @@ -216,24 +240,14 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): @property def rgbw_color(self) -> tuple[int, int, int, int] | None: - """Return the hs color.""" + """Return the RGBW color.""" return self._rgbw_color @property - def color_temp(self) -> int | None: - """Return the color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" return self._color_temp - @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._min_mireds - - @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._max_mireds - @property def supported_color_modes(self) -> set[ColorMode] | None: """Flag supported features.""" @@ -243,11 +257,39 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): """Turn the device on.""" transition = kwargs.get(ATTR_TRANSITION) + brightness = kwargs.get(ATTR_BRIGHTNESS) + + hs_color = kwargs.get(ATTR_HS_COLOR) + color_temp_k = kwargs.get(ATTR_COLOR_TEMP_KELVIN) + rgbw = kwargs.get(ATTR_RGBW_COLOR) + + new_colors = self._get_new_colors(hs_color, color_temp_k, rgbw) + if new_colors is not None: + await self._async_set_colors(new_colors, transition) + + # set brightness (or turn on if dimming is not supported) + await self._async_set_brightness(brightness, transition) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + + def _get_new_colors( + self, + hs_color: tuple[float, float] | None, + color_temp_k: int | None, + rgbw: tuple[int, int, int, int] | None, + brightness_scale: float | None = None, + ) -> dict[ColorComponent, int] | None: + """Determine the new color dict to set.""" # RGB/HS color - hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None and self._supports_color: red, green, blue = color_util.color_hs_to_RGB(*hs_color) + if brightness_scale is not None: + red = round(red * brightness_scale) + green = round(green * brightness_scale) + blue = round(blue * brightness_scale) colors = { ColorComponent.RED: red, ColorComponent.GREEN: green, @@ -257,38 +299,32 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # turn of white leds when setting rgb colors[ColorComponent.WARM_WHITE] = 0 colors[ColorComponent.COLD_WHITE] = 0 - await self._async_set_colors(colors, transition) + return colors # Color temperature - color_temp = kwargs.get(ATTR_COLOR_TEMP) - if color_temp is not None and self._supports_color_temp: + if color_temp_k is not None and self._supports_color_temp: # Limit color temp to min/max values + color_temp = color_util.color_temperature_kelvin_to_mired(color_temp_k) cold = max( 0, min( 255, - round( - (self._max_mireds - color_temp) - / (self._max_mireds - self._min_mireds) - * 255 - ), + round((MAX_MIREDS - color_temp) / (MAX_MIREDS - MIN_MIREDS) * 255), ), ) warm = 255 - cold - await self._async_set_colors( - { - # turn off color leds when setting color temperature - ColorComponent.RED: 0, - ColorComponent.GREEN: 0, - ColorComponent.BLUE: 0, - ColorComponent.WARM_WHITE: warm, - ColorComponent.COLD_WHITE: cold, - }, - transition, - ) + colors = { + ColorComponent.WARM_WHITE: warm, + ColorComponent.COLD_WHITE: cold, + } + if self._supports_color: + # turn off color leds when setting color temperature + colors[ColorComponent.RED] = 0 + colors[ColorComponent.GREEN] = 0 + colors[ColorComponent.BLUE] = 0 + return colors # RGBW - rgbw = kwargs.get(ATTR_RGBW_COLOR) if rgbw is not None and self._supports_rgbw: rgbw_channels = { ColorComponent.RED: rgbw[0], @@ -300,17 +336,15 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): if self._cold_white: rgbw_channels[ColorComponent.COLD_WHITE] = rgbw[3] - await self._async_set_colors(rgbw_channels, transition) - # set brightness - await self._async_set_brightness(kwargs.get(ATTR_BRIGHTNESS), transition) + return rgbw_channels - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the light off.""" - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + return None async def _async_set_colors( - self, colors: dict[ColorComponent, int], transition: float | None = None + self, + colors: dict[ColorComponent, int], + transition: float | None = None, ) -> None: """Set (multiple) defined colors to given value(s).""" # prefer the (new) combined color property @@ -361,9 +395,14 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): zwave_transition = {TRANSITION_DURATION_OPTION: "default"} # setting a value requires setting targetValue - await self._async_set_value( - self._target_brightness, zwave_brightness, zwave_transition - ) + if self._supports_dimming: + await self._async_set_value( + self._target_brightness, zwave_brightness, zwave_transition + ) + else: + await self._async_set_value( + self._target_brightness, zwave_brightness > 0, zwave_transition + ) # We do an optimistic state update when setting to a previous value # to avoid waiting for the value to be updated from the device which is # typically delayed and causes a confusing UX. @@ -427,15 +466,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): """Calculate light colors.""" (red_val, green_val, blue_val, ww_val, cw_val) = self._get_color_values() - # prefer the (new) combined color property - # https://github.com/zwave-js/node-zwave-js/pull/1782 - combined_color_val = self.get_zwave_value( - CURRENT_COLOR_PROPERTY, - CommandClass.SWITCH_COLOR, - value_property_key=None, - ) - if combined_color_val and isinstance(combined_color_val.value, dict): - multi_color = combined_color_val.value + if self._current_color and isinstance(self._current_color.value, dict): + multi_color = self._current_color.value else: multi_color = {} @@ -463,9 +495,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): cold_white = multi_color.get(COLOR_SWITCH_COMBINED_COLD_WHITE, cw_val.value) # Calculate color temps based on whites if cold_white or warm_white: - self._color_temp = round( - self._max_mireds - - ((cold_white / 255) * (self._max_mireds - self._min_mireds)) + self._color_temp = color_util.color_temperature_mired_to_kelvin( + MAX_MIREDS - ((cold_white / 255) * (MAX_MIREDS - MIN_MIREDS)) ) # White channels turned on, set color mode to color_temp self._color_mode = ColorMode.COLOR_TEMP @@ -486,11 +517,10 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._color_mode = ColorMode.RGBW -class ZwaveBlackIsOffLight(ZwaveLight): - """Representation of a Z-Wave light where setting the color to black turns it off. +class ZwaveColorOnOffLight(ZwaveLight): + """Representation of a colored Z-Wave light with an optional binary switch to turn on/off. - Currently only supports lights with RGB, no color temperature, and no white - channels. + Dimming for RGB lights is realized by scaling the color channels. """ def __init__( @@ -499,61 +529,137 @@ class ZwaveBlackIsOffLight(ZwaveLight): """Initialize the light.""" super().__init__(config_entry, driver, info) - self._last_color: dict[str, int] | None = None - self._supported_color_modes.discard(ColorMode.BRIGHTNESS) + self._last_on_color: dict[ColorComponent, int] | None = None + self._last_brightness: int | None = None @property - def brightness(self) -> int: - """Return the brightness of this light between 0..255.""" - return 255 + def brightness(self) -> int | None: + """Return the brightness of this light between 0..255. - @property - def is_on(self) -> bool | None: - """Return true if device is on (brightness above 0).""" + Z-Wave multilevel switches use a range of [0, 99] to control brightness. + """ if self.info.primary_value.value is None: return None - return any(value != 0 for value in self.info.primary_value.value.values()) + if self._target_brightness and self.info.primary_value.value is False: + # Binary switch exists and is turned off + return 0 + + # Brightness is encoded in the color channels by scaling them lower than 255 + color_values = [ + v.value + for v in self._get_color_values() + if v is not None and v.value is not None + ] + return max(color_values) if color_values else 0 async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" + if ( kwargs.get(ATTR_RGBW_COLOR) is not None - or kwargs.get(ATTR_COLOR_TEMP) is not None - or kwargs.get(ATTR_HS_COLOR) is not None + or kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None ): + # RGBW and color temp are not supported in this mode, + # delegate to the parent class await super().async_turn_on(**kwargs) return transition = kwargs.get(ATTR_TRANSITION) - # turn on light to last color if known, otherwise set to white - if self._last_color is not None: - await self._async_set_colors( - { - ColorComponent.RED: self._last_color["red"], - ColorComponent.GREEN: self._last_color["green"], - ColorComponent.BLUE: self._last_color["blue"], - }, - transition, - ) - else: - await self._async_set_colors( - { + brightness = kwargs.get(ATTR_BRIGHTNESS) + hs_color = kwargs.get(ATTR_HS_COLOR) + new_colors: dict[ColorComponent, int] | None = None + scale: float | None = None + + if brightness is None and hs_color is None: + # Turned on without specifying brightness or color + if self._last_on_color is not None: + if self._target_brightness: + # Color is already set, use the binary switch to turn on + await self._async_set_brightness(None, transition) + return + + # Preserve the previous color + new_colors = self._last_on_color + elif self._supports_color: + # Turned on for the first time. Make it white + new_colors = { ColorComponent.RED: 255, ColorComponent.GREEN: 255, ColorComponent.BLUE: 255, - }, - transition, + } + elif brightness is not None: + # If brightness gets set, preserve the color and mix it with the new brightness + if self.color_mode == ColorMode.HS: + scale = brightness / 255 + if ( + self._last_on_color is not None + and None not in self._last_on_color.values() + ): + # Changed brightness from 0 to >0 + old_brightness = max(self._last_on_color.values()) + new_scale = brightness / old_brightness + scale = new_scale + new_colors = {} + for color, value in self._last_on_color.items(): + new_colors[color] = round(value * new_scale) + elif hs_color is None and self._color_mode == ColorMode.HS: + hs_color = self._hs_color + elif hs_color is not None and brightness is None: + # Turned on by using the color controls + current_brightness = self.brightness + if current_brightness == 0 and self._last_brightness is not None: + # Use the last brightness value if the light is currently off + scale = self._last_brightness / 255 + elif current_brightness is not None: + scale = current_brightness / 255 + + # Reset last color until turning off again + self._last_on_color = None + + if new_colors is None: + new_colors = self._get_new_colors( + hs_color=hs_color, color_temp_k=None, rgbw=None, brightness_scale=scale ) + if new_colors is not None: + await self._async_set_colors(new_colors, transition) + + # Turn the binary switch on if there is one + await self._async_set_brightness(brightness, transition) + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - self._last_color = self.info.primary_value.value - await self._async_set_colors( - { + + # Remember last color and brightness to restore it when turning on + self._last_brightness = self.brightness + if self._current_color and isinstance(self._current_color.value, dict): + red = self._current_color.value.get(COLOR_SWITCH_COMBINED_RED) + green = self._current_color.value.get(COLOR_SWITCH_COMBINED_GREEN) + blue = self._current_color.value.get(COLOR_SWITCH_COMBINED_BLUE) + + last_color: dict[ColorComponent, int] = {} + if red is not None: + last_color[ColorComponent.RED] = red + if green is not None: + last_color[ColorComponent.GREEN] = green + if blue is not None: + last_color[ColorComponent.BLUE] = blue + + if last_color: + self._last_on_color = last_color + + if self._target_brightness: + # Turn off the binary switch only + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + else: + # turn off all color channels + colors = { ColorComponent.RED: 0, ColorComponent.GREEN: 0, ColorComponent.BLUE: 0, - }, - kwargs.get(ATTR_TRANSITION), - ) - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + } + + await self._async_set_colors( + colors, + kwargs.get(ATTR_TRANSITION), + ) diff --git a/homeassistant/components/zwave_js/lock.py b/homeassistant/components/zwave_js/lock.py index b16c1090ef3..c14517f4b03 100644 --- a/homeassistant/components/zwave_js/lock.py +++ b/homeassistant/components/zwave_js/lock.py @@ -19,9 +19,8 @@ from zwave_js_server.const.command_class.lock import ( from zwave_js_server.exceptions import BaseZwaveJSServerError from zwave_js_server.util.lock import clear_usercode, set_configuration, set_usercode -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity, LockState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_platform @@ -49,12 +48,12 @@ PARALLEL_UPDATES = 0 STATE_TO_ZWAVE_MAP: dict[int, dict[str, int | bool]] = { CommandClass.DOOR_LOCK: { - STATE_UNLOCKED: DoorLockMode.UNSECURED, - STATE_LOCKED: DoorLockMode.SECURED, + LockState.UNLOCKED: DoorLockMode.UNSECURED, + LockState.LOCKED: DoorLockMode.SECURED, }, CommandClass.LOCK: { - STATE_UNLOCKED: False, - STATE_LOCKED: True, + LockState.UNLOCKED: False, + LockState.LOCKED: True, }, } UNIT16_SCHEMA = vol.All(vol.Coerce(int), vol.Range(min=0, max=65535)) @@ -140,7 +139,7 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): == self.info.primary_value.value ) - async def _set_lock_state(self, target_state: str, **kwargs: Any) -> None: + async def _set_lock_state(self, target_state: LockState, **kwargs: Any) -> None: """Set the lock state.""" target_value = self.get_zwave_value( LOCK_CMD_CLASS_TO_PROPERTY_MAP[ @@ -155,11 +154,11 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - await self._set_lock_state(STATE_LOCKED) + await self._set_lock_state(LockState.LOCKED) async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - await self._set_lock_state(STATE_UNLOCKED) + await self._set_lock_state(LockState.UNLOCKED) async def async_set_lock_usercode(self, code_slot: int, usercode: str) -> None: """Set the usercode to index X on the lock.""" diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index f394537803a..011776f4556 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -1,6 +1,7 @@ { "domain": "zwave_js", "name": "Z-Wave", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/z-wave"], "config_flow": true, "dependencies": ["http", "repairs", "usb", "websocket_api"], @@ -8,8 +9,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["zwave_js_server"], - "quality_scale": "platinum", - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.57.0"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.60.0"], "usb": [ { "vid": "0658", diff --git a/homeassistant/components/zwave_js/migrate.py b/homeassistant/components/zwave_js/migrate.py index bde53137dc1..ac749cb516b 100644 --- a/homeassistant/components/zwave_js/migrate.py +++ b/homeassistant/components/zwave_js/migrate.py @@ -6,20 +6,16 @@ from dataclasses import dataclass import logging from zwave_js_server.model.driver import Driver +from zwave_js_server.model.node import Node from zwave_js_server.model.value import Value as ZwaveValue -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.entity_registry import ( - EntityRegistry, - RegistryEntry, - async_entries_for_device, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from .const import DOMAIN from .discovery import ZwaveDiscoveryInfo -from .helpers import get_unique_id +from .helpers import get_unique_id, get_valueless_base_unique_id _LOGGER = logging.getLogger(__name__) @@ -62,10 +58,10 @@ class ValueID: @callback def async_migrate_old_entity( hass: HomeAssistant, - ent_reg: EntityRegistry, + ent_reg: er.EntityRegistry, registered_unique_ids: set[str], - platform: str, - device: DeviceEntry, + platform: Platform, + device: dr.DeviceEntry, unique_id: str, ) -> None: """Migrate existing entity if current one can't be found and an old one exists.""" @@ -77,8 +73,8 @@ def async_migrate_old_entity( # Look for existing entities in the registry that could be the same value but on # a different endpoint - existing_entity_entries: list[RegistryEntry] = [] - for entry in async_entries_for_device(ent_reg, device.id): + existing_entity_entries: list[er.RegistryEntry] = [] + for entry in er.async_entries_for_device(ent_reg, device.id): # If entity is not in the domain for this discovery info or entity has already # been processed, skip it if entry.domain != platform or entry.unique_id in registered_unique_ids: @@ -109,35 +105,40 @@ def async_migrate_old_entity( @callback def async_migrate_unique_id( - ent_reg: EntityRegistry, platform: str, old_unique_id: str, new_unique_id: str + ent_reg: er.EntityRegistry, + platform: Platform, + old_unique_id: str, + new_unique_id: str, ) -> None: """Check if entity with old unique ID exists, and if so migrate it to new ID.""" - if entity_id := ent_reg.async_get_entity_id(platform, DOMAIN, old_unique_id): + if not (entity_id := ent_reg.async_get_entity_id(platform, DOMAIN, old_unique_id)): + return + + _LOGGER.debug( + "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", + entity_id, + old_unique_id, + new_unique_id, + ) + try: + ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + except ValueError: _LOGGER.debug( - "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", + ( + "Entity %s can't be migrated because the unique ID is taken; " + "Cleaning it up since it is likely no longer valid" + ), entity_id, - old_unique_id, - new_unique_id, ) - try: - ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) - except ValueError: - _LOGGER.debug( - ( - "Entity %s can't be migrated because the unique ID is taken; " - "Cleaning it up since it is likely no longer valid" - ), - entity_id, - ) - ent_reg.async_remove(entity_id) + ent_reg.async_remove(entity_id) @callback def async_migrate_discovered_value( hass: HomeAssistant, - ent_reg: EntityRegistry, + ent_reg: er.EntityRegistry, registered_unique_ids: set[str], - device: DeviceEntry, + device: dr.DeviceEntry, driver: Driver, disc_info: ZwaveDiscoveryInfo, ) -> None: @@ -160,7 +161,7 @@ def async_migrate_discovered_value( ] if ( - disc_info.platform == "binary_sensor" + disc_info.platform == Platform.BINARY_SENSOR and disc_info.platform_hint == "notification" ): for state_key in disc_info.primary_value.metadata.states: @@ -211,6 +212,24 @@ def async_migrate_discovered_value( registered_unique_ids.add(new_unique_id) +@callback +def async_migrate_statistics_sensors( + hass: HomeAssistant, driver: Driver, node: Node, key_map: dict[str, str] +) -> None: + """Migrate statistics sensors to new unique IDs. + + - Migrate camel case keys in unique IDs to snake keys. + """ + ent_reg = er.async_get(hass) + base_unique_id = f"{get_valueless_base_unique_id(driver, node)}.statistics" + for new_key, old_key in key_map.items(): + if new_key == old_key: + continue + old_unique_id = f"{base_unique_id}_{old_key}" + new_unique_id = f"{base_unique_id}_{new_key}" + async_migrate_unique_id(ent_reg, Platform.SENSOR, old_unique_id, new_unique_id) + + @callback def get_old_value_ids(value: ZwaveValue) -> list[str]: """Get old value IDs so we can migrate entity unique ID.""" diff --git a/homeassistant/components/zwave_js/sensor.py b/homeassistant/components/zwave_js/sensor.py index e43c620ff54..b259711d21b 100644 --- a/homeassistant/components/zwave_js/sensor.py +++ b/homeassistant/components/zwave_js/sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass -from datetime import datetime from typing import Any import voluptuous as vol @@ -16,10 +15,10 @@ from zwave_js_server.const.command_class.meter import ( ) from zwave_js_server.exceptions import BaseZwaveJSServerError from zwave_js_server.model.controller import Controller -from zwave_js_server.model.controller.statistics import ControllerStatisticsDataType +from zwave_js_server.model.controller.statistics import ControllerStatistics from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node as ZwaveNode -from zwave_js_server.model.node.statistics import NodeStatisticsDataType +from zwave_js_server.model.node.statistics import NodeStatistics from zwave_js_server.util.command_class.meter import get_meter_type from homeassistant.components.sensor import ( @@ -52,6 +51,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, StateType +from .binary_sensor import is_valid_notification_binary_sensor from .const import ( ATTR_METER_TYPE, ATTR_METER_TYPE_NAME, @@ -90,6 +90,7 @@ from .discovery_data_template import ( ) from .entity import ZWaveBaseEntity from .helpers import get_device_info, get_valueless_base_unique_id +from .migrate import async_migrate_statistics_sensors PARALLEL_UPDATES = 0 @@ -328,152 +329,172 @@ ENTITY_DESCRIPTION_KEY_MAP = { } -def convert_dict_of_dicts( - statistics: ControllerStatisticsDataType | NodeStatisticsDataType, key: str +def convert_nested_attr( + statistics: ControllerStatistics | NodeStatistics, key: str ) -> Any: - """Convert a dictionary of dictionaries to a value.""" - keys = key.split(".") - return statistics.get(keys[0], {}).get(keys[1], {}).get(keys[2]) # type: ignore[attr-defined] + """Convert a string that represents a nested attr to a value.""" + data = statistics + for _key in key.split("."): + if data is None: + return None # type: ignore[unreachable] + data = getattr(data, _key) + return data @dataclass(frozen=True, kw_only=True) class ZWaveJSStatisticsSensorEntityDescription(SensorEntityDescription): """Class to represent a Z-Wave JS statistics sensor entity description.""" - convert: Callable[ - [ControllerStatisticsDataType | NodeStatisticsDataType, str], Any - ] = lambda statistics, key: statistics.get(key) + convert: Callable[[ControllerStatistics | NodeStatistics, str], Any] = getattr entity_registry_enabled_default: bool = False # Controller statistics descriptions ENTITY_DESCRIPTION_CONTROLLER_STATISTICS_LIST = [ ZWaveJSStatisticsSensorEntityDescription( - key="messagesTX", + key="messages_tx", translation_key="successful_messages", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messagesRX", + key="messages_rx", translation_key="successful_messages", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messagesDroppedTX", + key="messages_dropped_tx", translation_key="messages_dropped", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messagesDroppedRX", + key="messages_dropped_rx", translation_key="messages_dropped", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="NAK", translation_key="nak", state_class=SensorStateClass.TOTAL + key="nak", translation_key="nak", state_class=SensorStateClass.TOTAL ), ZWaveJSStatisticsSensorEntityDescription( - key="CAN", translation_key="can", state_class=SensorStateClass.TOTAL + key="can", translation_key="can", state_class=SensorStateClass.TOTAL ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutACK", + key="timeout_ack", translation_key="timeout_ack", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutResponse", + key="timeout_response", translation_key="timeout_response", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutCallback", + key="timeout_callback", translation_key="timeout_callback", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel0.average", + key="background_rssi.channel_0.average", translation_key="average_background_rssi", translation_placeholders={"channel": "0"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel0.current", + key="background_rssi.channel_0.current", translation_key="current_background_rssi", translation_placeholders={"channel": "0"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel1.average", + key="background_rssi.channel_1.average", translation_key="average_background_rssi", translation_placeholders={"channel": "1"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel1.current", + key="background_rssi.channel_1.current", translation_key="current_background_rssi", translation_placeholders={"channel": "1"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel2.average", + key="background_rssi.channel_2.average", translation_key="average_background_rssi", translation_placeholders={"channel": "2"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel2.current", + key="background_rssi.channel_2.current", translation_key="current_background_rssi", translation_placeholders={"channel": "2"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ] +CONTROLLER_STATISTICS_KEY_MAP: dict[str, str] = { + "messages_tx": "messagesTX", + "messages_rx": "messagesRX", + "messages_dropped_tx": "messagesDroppedTX", + "messages_dropped_rx": "messagesDroppedRX", + "nak": "NAK", + "can": "CAN", + "timeout_ack": "timeoutAck", + "timeout_response": "timeoutResponse", + "timeout_callback": "timeoutCallback", + "background_rssi.channel_0.average": "backgroundRSSI.channel0.average", + "background_rssi.channel_0.current": "backgroundRSSI.channel0.current", + "background_rssi.channel_1.average": "backgroundRSSI.channel1.average", + "background_rssi.channel_1.current": "backgroundRSSI.channel1.current", + "background_rssi.channel_2.average": "backgroundRSSI.channel2.average", + "background_rssi.channel_2.current": "backgroundRSSI.channel2.current", +} + # Node statistics descriptions ENTITY_DESCRIPTION_NODE_STATISTICS_LIST = [ ZWaveJSStatisticsSensorEntityDescription( - key="commandsRX", + key="commands_rx", translation_key="successful_commands", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commandsTX", + key="commands_tx", translation_key="successful_commands", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commandsDroppedRX", + key="commands_dropped_rx", translation_key="commands_dropped", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commandsDroppedTX", + key="commands_dropped_tx", translation_key="commands_dropped", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutResponse", + key="timeout_response", translation_key="timeout_response", state_class=SensorStateClass.TOTAL, ), @@ -492,20 +513,24 @@ ENTITY_DESCRIPTION_NODE_STATISTICS_LIST = [ state_class=SensorStateClass.MEASUREMENT, ), ZWaveJSStatisticsSensorEntityDescription( - key="lastSeen", + key="last_seen", translation_key="last_seen", device_class=SensorDeviceClass.TIMESTAMP, - convert=( - lambda statistics, key: ( - datetime.fromisoformat(dt) # type: ignore[arg-type] - if (dt := statistics.get(key)) - else None - ) - ), entity_registry_enabled_default=True, ), ] +NODE_STATISTICS_KEY_MAP: dict[str, str] = { + "commands_rx": "commandsRX", + "commands_tx": "commandsTX", + "commands_dropped_rx": "commandsDroppedRX", + "commands_dropped_tx": "commandsDroppedTX", + "timeout_response": "timeoutResponse", + "rtt": "rtt", + "rssi": "rssi", + "last_seen": "lastSeen", +} + def get_entity_description( data: NumericSensorDataTemplateData, @@ -556,7 +581,10 @@ async def async_setup_entry( data.unit_of_measurement, ) ) - elif info.platform_hint == "list_sensor": + elif info.platform_hint == "notification": + # prevent duplicate entities for values that are already represented as binary sensors + if is_valid_notification_binary_sensor(info): + return entities.append( ZWaveListSensor(config_entry, driver, info, entity_description) ) @@ -588,6 +616,14 @@ async def async_setup_entry( @callback def async_add_statistics_sensors(node: ZwaveNode) -> None: """Add statistics sensors.""" + async_migrate_statistics_sensors( + hass, + driver, + node, + CONTROLLER_STATISTICS_KEY_MAP + if driver.controller.own_node == node + else NODE_STATISTICS_KEY_MAP, + ) async_add_entities( [ ZWaveStatisticsSensor( @@ -750,10 +786,9 @@ class ZWaveMeterSensor(ZWaveNumericSensor): CommandClass.METER, "reset", *args, wait_for_result=False ) except BaseZwaveJSServerError as err: - LOGGER.error( - "Failed to reset meters on node %s endpoint %s: %s", node, endpoint, err - ) - raise HomeAssistantError from err + raise HomeAssistantError( + f"Failed to reset meters on node {node} endpoint {endpoint}: {err}" + ) from err LOGGER.debug( "Meters on node %s endpoint %s reset with the following options: %s", node, @@ -1002,7 +1037,7 @@ class ZWaveStatisticsSensor(SensorEntity): def statistics_updated(self, event_data: dict) -> None: """Call when statistics updated event is received.""" self._attr_native_value = self.entity_description.convert( - event_data["statistics"], self.entity_description.key + event_data["statistics_updated"], self.entity_description.key ) self.async_write_ha_state() @@ -1028,5 +1063,5 @@ class ZWaveStatisticsSensor(SensorEntity): # Set initial state self._attr_native_value = self.entity_description.convert( - self.statistics_src.statistics.data, self.entity_description.key + self.statistics_src.statistics, self.entity_description.key ) diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index 969a235bb41..d1cb66ceafc 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -529,8 +529,15 @@ class ZWaveServices: for node_or_endpoint, result in get_valid_responses_from_results( nodes_or_endpoints_list, _results ): - zwave_value = result[0] - cmd_status = result[1] + if value_size is None: + # async_set_config_parameter still returns (Value, SetConfigParameterResult) + zwave_value = result[0] + cmd_status = result[1] + else: + # async_set_raw_config_parameter_value now returns just SetConfigParameterResult + cmd_status = result + zwave_value = f"parameter {property_or_property_name}" + if cmd_status.status == CommandStatus.ACCEPTED: msg = "Set configuration parameter %s on Node %s with value %s" else: diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index f5063fdfd93..acf6e9a0665 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -51,16 +51,6 @@ set_lock_configuration: min: 0 max: 65535 unit_of_measurement: sec - outside_handles_can_open_door_configuration: - required: false - example: [true, true, true, false] - selector: - object: - inside_handles_can_open_door_configuration: - required: false - example: [true, true, true, false] - selector: - object: auto_relock_time: required: false example: 1 diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index ca7d5153e6e..28789bbf9f4 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -523,10 +523,6 @@ "description": "Duration in seconds the latch stays retracted.", "name": "Hold and release time" }, - "inside_handles_can_open_door_configuration": { - "description": "A list of four booleans which indicate which inside handles can open the door.", - "name": "Inside handles can open door configuration" - }, "lock_timeout": { "description": "Seconds until lock mode times out. Should only be used if operation type is `timed`.", "name": "Lock timeout" @@ -535,10 +531,6 @@ "description": "The operation type of the lock.", "name": "Operation Type" }, - "outside_handles_can_open_door_configuration": { - "description": "A list of four booleans which indicate which outside handles can open the door.", - "name": "Outside handles can open door configuration" - }, "twist_assist": { "description": "Enable Twist Assist.", "name": "Twist assist" diff --git a/homeassistant/components/zwave_js/triggers/event.py b/homeassistant/components/zwave_js/triggers/event.py index 9938d08408c..db52683c173 100644 --- a/homeassistant/components/zwave_js/triggers/event.py +++ b/homeassistant/components/zwave_js/triggers/event.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable import functools -from pydantic import ValidationError +from pydantic.v1 import ValidationError import voluptuous as vol from zwave_js_server.client import Client from zwave_js_server.model.controller import CONTROLLER_EVENT_MODEL_MAP diff --git a/homeassistant/components/zwave_js/triggers/value_updated.py b/homeassistant/components/zwave_js/triggers/value_updated.py index d8c5702ce5d..d6378ea27d5 100644 --- a/homeassistant/components/zwave_js/triggers/value_updated.py +++ b/homeassistant/components/zwave_js/triggers/value_updated.py @@ -32,6 +32,7 @@ from ..const import ( ATTR_PROPERTY_KEY_NAME, ATTR_PROPERTY_NAME, DOMAIN, + EVENT_VALUE_UPDATED, ) from ..helpers import async_get_nodes_from_targets, get_device_id from .trigger_helpers import async_bypass_dynamic_config_validation @@ -184,7 +185,7 @@ async def async_attach_trigger( # We need to store the current value and device for the callback unsubs.append( node.on( - "value updated", + EVENT_VALUE_UPDATED, functools.partial(async_on_value_updated, value, device), ) ) diff --git a/homeassistant/components/zwave_js/update.py b/homeassistant/components/zwave_js/update.py index 02c59d220e1..d060abe007d 100644 --- a/homeassistant/components/zwave_js/update.py +++ b/homeassistant/components/zwave_js/update.py @@ -155,7 +155,8 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): progress: NodeFirmwareUpdateProgress = event["firmware_update_progress"] if not self._latest_version_firmware: return - self._attr_in_progress = int(progress.progress) + self._attr_in_progress = True + self._attr_update_percentage = int(progress.progress) self.async_write_ha_state() @callback @@ -181,6 +182,7 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): self._result = None self._finished_event.clear() self._attr_in_progress = False + self._attr_update_percentage = None if write_state: self.async_write_ha_state() @@ -267,6 +269,7 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): assert firmware self._unsub_firmware_events_and_reset_progress(False) self._attr_in_progress = True + self._attr_update_percentage = None self.async_write_ha_state() self._progress_unsub = self.node.on( diff --git a/homeassistant/components/zwave_me/__init__.py b/homeassistant/components/zwave_me/__init__.py index 7e00924c221..36ee62eec53 100644 --- a/homeassistant/components/zwave_me/__init__.py +++ b/homeassistant/components/zwave_me/__init__.py @@ -1,21 +1,16 @@ """The Z-Wave-Me WS integration.""" -import logging - from zwave_me_ws import ZWaveMe, ZWaveMeData from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_TOKEN, CONF_URL -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import dispatcher_send from .const import DOMAIN, PLATFORMS, ZWaveMePlatform -_LOGGER = logging.getLogger(__name__) ZWAVE_ME_PLATFORMS = [platform.value for platform in ZWaveMePlatform] @@ -111,66 +106,3 @@ async def async_setup_platforms( controller.platforms_inited = True await hass.async_add_executor_job(controller.zwave_api.get_devices) - - -class ZWaveMeEntity(Entity): - """Representation of a ZWaveMe device.""" - - def __init__(self, controller, device): - """Initialize the device.""" - self.controller = controller - self.device = device - self._attr_name = device.title - self._attr_unique_id: str = ( - f"{self.controller.config.unique_id}-{self.device.id}" - ) - self._attr_should_poll = False - - @property - def device_info(self) -> DeviceInfo: - """Return device specific attributes.""" - return DeviceInfo( - identifiers={(DOMAIN, self.device.deviceIdentifier)}, - name=self._attr_name, - manufacturer=self.device.manufacturer, - sw_version=self.device.firmware, - suggested_area=self.device.locationName, - ) - - async def async_added_to_hass(self) -> None: - """Connect to an updater.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, f"ZWAVE_ME_INFO_{self.device.id}", self.get_new_data - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"ZWAVE_ME_UNAVAILABLE_{self.device.id}", - self.set_unavailable_status, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, f"ZWAVE_ME_DESTROY_{self.device.id}", self.delete_entity - ) - ) - - @callback - def get_new_data(self, new_data: ZWaveMeData) -> None: - """Update info in the HAss.""" - self.device = new_data - self._attr_available = not new_data.isFailed - self.async_write_ha_state() - - @callback - def set_unavailable_status(self): - """Update status in the HAss.""" - self._attr_available = False - self.async_write_ha_state() - - @callback - def delete_entity(self) -> None: - """Remove this entity.""" - self.hass.async_create_task(self.async_remove(force_remove=True)) diff --git a/homeassistant/components/zwave_me/binary_sensor.py b/homeassistant/components/zwave_me/binary_sensor.py index 3be8f912b6d..d121c17770b 100644 --- a/homeassistant/components/zwave_me/binary_sensor.py +++ b/homeassistant/components/zwave_me/binary_sensor.py @@ -14,8 +14,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeController, ZWaveMeEntity +from . import ZWaveMeController from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity BINARY_SENSORS_MAP: dict[str, BinarySensorEntityDescription] = { "generic": BinarySensorEntityDescription( diff --git a/homeassistant/components/zwave_me/button.py b/homeassistant/components/zwave_me/button.py index f7f1d5d7945..50ddf01aeab 100644 --- a/homeassistant/components/zwave_me/button.py +++ b/homeassistant/components/zwave_me/button.py @@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.BUTTON diff --git a/homeassistant/components/zwave_me/climate.py b/homeassistant/components/zwave_me/climate.py index 02112e51617..b8eed88b505 100644 --- a/homeassistant/components/zwave_me/climate.py +++ b/homeassistant/components/zwave_me/climate.py @@ -17,8 +17,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity TEMPERATURE_DEFAULT_STEP = 0.5 @@ -57,7 +57,6 @@ class ZWaveMeClimate(ZWaveMeEntity, ClimateEntity): _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - _enable_turn_on_off_backwards_compatibility = False def set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" diff --git a/homeassistant/components/zwave_me/cover.py b/homeassistant/components/zwave_me/cover.py index c2eec09496d..c9359402c01 100644 --- a/homeassistant/components/zwave_me/cover.py +++ b/homeassistant/components/zwave_me/cover.py @@ -14,8 +14,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.COVER diff --git a/homeassistant/components/zwave_me/entity.py b/homeassistant/components/zwave_me/entity.py new file mode 100644 index 00000000000..a02c893d54a --- /dev/null +++ b/homeassistant/components/zwave_me/entity.py @@ -0,0 +1,73 @@ +"""The Z-Wave-Me WS integration.""" + +from zwave_me_ws import ZWaveMeData + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class ZWaveMeEntity(Entity): + """Representation of a ZWaveMe device.""" + + def __init__(self, controller, device): + """Initialize the device.""" + self.controller = controller + self.device = device + self._attr_name = device.title + self._attr_unique_id: str = ( + f"{self.controller.config.unique_id}-{self.device.id}" + ) + self._attr_should_poll = False + + @property + def device_info(self) -> DeviceInfo: + """Return device specific attributes.""" + return DeviceInfo( + identifiers={(DOMAIN, self.device.deviceIdentifier)}, + name=self._attr_name, + manufacturer=self.device.manufacturer, + sw_version=self.device.firmware, + suggested_area=self.device.locationName, + ) + + async def async_added_to_hass(self) -> None: + """Connect to an updater.""" + self.async_on_remove( + async_dispatcher_connect( + self.hass, f"ZWAVE_ME_INFO_{self.device.id}", self.get_new_data + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"ZWAVE_ME_UNAVAILABLE_{self.device.id}", + self.set_unavailable_status, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, f"ZWAVE_ME_DESTROY_{self.device.id}", self.delete_entity + ) + ) + + @callback + def get_new_data(self, new_data: ZWaveMeData) -> None: + """Update info in the HAss.""" + self.device = new_data + self._attr_available = not new_data.isFailed + self.async_write_ha_state() + + @callback + def set_unavailable_status(self): + """Update status in the HAss.""" + self._attr_available = False + self.async_write_ha_state() + + @callback + def delete_entity(self) -> None: + """Remove this entity.""" + self.hass.async_create_task(self.async_remove(force_remove=True)) diff --git a/homeassistant/components/zwave_me/fan.py b/homeassistant/components/zwave_me/fan.py index b8a4b5e4ad2..bd0feba0dfb 100644 --- a/homeassistant/components/zwave_me/fan.py +++ b/homeassistant/components/zwave_me/fan.py @@ -10,8 +10,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.FAN @@ -49,7 +49,6 @@ class ZWaveMeFan(ZWaveMeEntity, FanEntity): | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) - _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int: diff --git a/homeassistant/components/zwave_me/light.py b/homeassistant/components/zwave_me/light.py index 2289fe7b115..ef3eca5d389 100644 --- a/homeassistant/components/zwave_me/light.py +++ b/homeassistant/components/zwave_me/light.py @@ -17,8 +17,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeController, ZWaveMeEntity +from . import ZWaveMeController from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity async def async_setup_entry( @@ -84,8 +85,8 @@ class ZWaveMeRGB(ZWaveMeEntity, LightEntity): self.device.id, f"exact?level={round(brightness / 2.55)}" ) return - cmd = "exact?red={}&green={}&blue={}" - cmd = cmd.format(*color) if any(color) else cmd.format(*(255, 255, 255)) + red, green, blue = color if any(color) else (255, 255, 255) + cmd = f"exact?red={red}&green={green}&blue={blue}" self.controller.zwave_api.send_command(self.device.id, cmd) @property diff --git a/homeassistant/components/zwave_me/lock.py b/homeassistant/components/zwave_me/lock.py index 6218dac1627..0bcc8f092ae 100644 --- a/homeassistant/components/zwave_me/lock.py +++ b/homeassistant/components/zwave_me/lock.py @@ -12,8 +12,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.LOCK diff --git a/homeassistant/components/zwave_me/number.py b/homeassistant/components/zwave_me/number.py index 272e833d678..9a98a4f8d00 100644 --- a/homeassistant/components/zwave_me/number.py +++ b/homeassistant/components/zwave_me/number.py @@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.NUMBER diff --git a/homeassistant/components/zwave_me/sensor.py b/homeassistant/components/zwave_me/sensor.py index 20470e6e62b..be0b0bae284 100644 --- a/homeassistant/components/zwave_me/sensor.py +++ b/homeassistant/components/zwave_me/sensor.py @@ -28,8 +28,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeController, ZWaveMeEntity +from . import ZWaveMeController from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity @dataclass(frozen=True) diff --git a/homeassistant/components/zwave_me/siren.py b/homeassistant/components/zwave_me/siren.py index a1bf8081616..443b2cc7b37 100644 --- a/homeassistant/components/zwave_me/siren.py +++ b/homeassistant/components/zwave_me/siren.py @@ -8,8 +8,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.SIREN diff --git a/homeassistant/components/zwave_me/switch.py b/homeassistant/components/zwave_me/switch.py index 4c11f079b12..05cf06484e9 100644 --- a/homeassistant/components/zwave_me/switch.py +++ b/homeassistant/components/zwave_me/switch.py @@ -13,8 +13,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform +from .entity import ZWaveMeEntity _LOGGER = logging.getLogger(__name__) DEVICE_NAME = ZWaveMePlatform.SWITCH diff --git a/homeassistant/config.py b/homeassistant/config.py index 9063429ca91..e9089f27662 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -17,62 +17,23 @@ import re import shutil from types import ModuleType from typing import TYPE_CHECKING, Any -from urllib.parse import urlparse from awesomeversion import AwesomeVersion import voluptuous as vol from voluptuous.humanize import MAX_VALIDATION_ERROR_ITEM_LENGTH from yaml.error import MarkedYAMLError -from . import auth -from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers -from .const import ( - ATTR_ASSUMED_STATE, - ATTR_FRIENDLY_NAME, - ATTR_HIDDEN, - CONF_ALLOWLIST_EXTERNAL_DIRS, - CONF_ALLOWLIST_EXTERNAL_URLS, - CONF_AUTH_MFA_MODULES, - CONF_AUTH_PROVIDERS, - CONF_COUNTRY, - CONF_CURRENCY, - CONF_CUSTOMIZE, - CONF_CUSTOMIZE_DOMAIN, - CONF_CUSTOMIZE_GLOB, - CONF_DEBUG, - CONF_ELEVATION, - CONF_EXTERNAL_URL, - CONF_ID, - CONF_INTERNAL_URL, - CONF_LANGUAGE, - CONF_LATITUDE, - CONF_LEGACY_TEMPLATES, - CONF_LONGITUDE, - CONF_MEDIA_DIRS, - CONF_NAME, - CONF_PACKAGES, - CONF_PLATFORM, - CONF_RADIUS, - CONF_TEMPERATURE_UNIT, - CONF_TIME_ZONE, - CONF_TYPE, - CONF_UNIT_SYSTEM, - LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, - __version__, -) -from .core import DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, callback +from .const import CONF_PACKAGES, CONF_PLATFORM, __version__ +from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from .core_config import _PACKAGE_DEFINITION_SCHEMA, _PACKAGES_CONFIG_SCHEMA from .exceptions import ConfigValidationError, HomeAssistantError -from .generated.currencies import HISTORIC_CURRENCIES -from .helpers import config_validation as cv, issue_registry as ir -from .helpers.entity_values import EntityValues +from .helpers import config_validation as cv from .helpers.translation import async_get_exception_message from .helpers.typing import ConfigType from .loader import ComponentProtocol, Integration, IntegrationNotFound from .requirements import RequirementsNotFound, async_get_integration_with_requirements from .util.async_ import create_eager_task -from .util.hass_dict import HassKey from .util.package import is_docker_env -from .util.unit_system import get_unit_system, validate_unit_system from .util.yaml import SECRET_YAML, Secrets, YamlTypeError, load_yaml_dict from .util.yaml.objects import NodeStrClass @@ -83,7 +44,6 @@ RE_ASCII = re.compile(r"\033\[[^m]*m") YAML_CONFIG_FILE = "configuration.yaml" VERSION_FILE = ".HA_VERSION" CONFIG_DIR_NAME = ".homeassistant" -DATA_CUSTOMIZE: HassKey[EntityValues] = HassKey("hass_customize") AUTOMATION_CONFIG_PATH = "automations.yaml" SCRIPT_CONFIG_PATH = "scripts.yaml" @@ -172,201 +132,6 @@ class IntegrationConfigInfo: exception_info_list: list[ConfigExceptionInfo] -def _no_duplicate_auth_provider( - configs: Sequence[dict[str, Any]], -) -> Sequence[dict[str, Any]]: - """No duplicate auth provider config allowed in a list. - - Each type of auth provider can only have one config without optional id. - Unique id is required if same type of auth provider used multiple times. - """ - config_keys: set[tuple[str, str | None]] = set() - for config in configs: - key = (config[CONF_TYPE], config.get(CONF_ID)) - if key in config_keys: - raise vol.Invalid( - f"Duplicate auth provider {config[CONF_TYPE]} found. " - "Please add unique IDs " - "if you want to have the same auth provider twice" - ) - config_keys.add(key) - return configs - - -def _no_duplicate_auth_mfa_module( - configs: Sequence[dict[str, Any]], -) -> Sequence[dict[str, Any]]: - """No duplicate auth mfa module item allowed in a list. - - Each type of mfa module can only have one config without optional id. - A global unique id is required if same type of mfa module used multiple - times. - Note: this is different than auth provider - """ - config_keys: set[str] = set() - for config in configs: - key = config.get(CONF_ID, config[CONF_TYPE]) - if key in config_keys: - raise vol.Invalid( - f"Duplicate mfa module {config[CONF_TYPE]} found. " - "Please add unique IDs " - "if you want to have the same mfa module twice" - ) - config_keys.add(key) - return configs - - -def _filter_bad_internal_external_urls(conf: dict) -> dict: - """Filter internal/external URL with a path.""" - for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL: - if key in conf and urlparse(conf[key]).path not in ("", "/"): - # We warn but do not fix, because if this was incorrectly configured, - # adjusting this value might impact security. - _LOGGER.warning( - "Invalid %s set. It's not allowed to have a path (/bla)", key - ) - - return conf - - -# Schema for all packages element -PACKAGES_CONFIG_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list)}) - -# Schema for individual package definition -PACKAGE_DEFINITION_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list, None)}) - -CUSTOMIZE_DICT_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_FRIENDLY_NAME): cv.string, - vol.Optional(ATTR_HIDDEN): cv.boolean, - vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, - }, - extra=vol.ALLOW_EXTRA, -) - -CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( - { - vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema( - {cv.entity_id: CUSTOMIZE_DICT_SCHEMA} - ), - vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema( - {cv.string: CUSTOMIZE_DICT_SCHEMA} - ), - vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema( - {cv.string: CUSTOMIZE_DICT_SCHEMA} - ), - } -) - - -def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: - if currency not in HISTORIC_CURRENCIES: - ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - "historic_currency", - is_fixable=False, - learn_more_url="homeassistant://config/general", - severity=ir.IssueSeverity.WARNING, - translation_key="historic_currency", - translation_placeholders={"currency": currency}, - ) - - -def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: - if country is not None: - ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - "country_not_configured", - is_fixable=False, - learn_more_url="homeassistant://config/general", - severity=ir.IssueSeverity.WARNING, - translation_key="country_not_configured", - ) - - -def _validate_currency(data: Any) -> Any: - try: - return cv.currency(data) - except vol.InInvalid: - with suppress(vol.InInvalid): - return cv.historic_currency(data) - raise - - -CORE_CONFIG_SCHEMA = vol.All( - CUSTOMIZE_CONFIG_SCHEMA.extend( - { - CONF_NAME: vol.Coerce(str), - CONF_LATITUDE: cv.latitude, - CONF_LONGITUDE: cv.longitude, - CONF_ELEVATION: vol.Coerce(int), - CONF_RADIUS: cv.positive_int, - vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, - CONF_UNIT_SYSTEM: validate_unit_system, - CONF_TIME_ZONE: cv.time_zone, - vol.Optional(CONF_INTERNAL_URL): cv.url, - vol.Optional(CONF_EXTERNAL_URL): cv.url, - vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All( - cv.ensure_list, [vol.IsDir()] - ), - vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All( - cv.ensure_list, [vol.IsDir()] - ), - vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All( - cv.ensure_list, [cv.url] - ), - vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA, - vol.Optional(CONF_AUTH_PROVIDERS): vol.All( - cv.ensure_list, - [ - auth_providers.AUTH_PROVIDER_SCHEMA.extend( - { - CONF_TYPE: vol.NotIn( - ["insecure_example"], - ( - "The insecure_example auth provider" - " is for testing only." - ), - ) - } - ) - ], - _no_duplicate_auth_provider, - ), - vol.Optional(CONF_AUTH_MFA_MODULES): vol.All( - cv.ensure_list, - [ - auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend( - { - CONF_TYPE: vol.NotIn( - ["insecure_example"], - "The insecure_example mfa module is for testing only.", - ) - } - ) - ], - _no_duplicate_auth_mfa_module, - ), - vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()), - vol.Remove(CONF_LEGACY_TEMPLATES): cv.boolean, - vol.Optional(CONF_CURRENCY): _validate_currency, - vol.Optional(CONF_COUNTRY): cv.country, - vol.Optional(CONF_LANGUAGE): cv.language, - vol.Optional(CONF_DEBUG): cv.boolean, - } - ), - _filter_bad_internal_external_urls, -) - - def get_default_config_dir() -> str: """Put together the default configuration directory based on the OS.""" data_dir = os.path.expanduser("~") @@ -812,131 +577,6 @@ def format_schema_error( return humanize_error(hass, exc, domain, config, link) -async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: - """Process the [homeassistant] section from the configuration. - - This method is a coroutine. - """ - # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir - # so we need to run it in an executor job. - config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) - - # Only load auth during startup. - if not hasattr(hass, "auth"): - if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: - auth_conf = [{"type": "homeassistant"}] - - mfa_conf = config.get( - CONF_AUTH_MFA_MODULES, - [{"type": "totp", "id": "totp", "name": "Authenticator app"}], - ) - - setattr( - hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf) - ) - - await hass.config.async_load() - - hac = hass.config - - if any( - k in config - for k in ( - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - CONF_ELEVATION, - CONF_TIME_ZONE, - CONF_UNIT_SYSTEM, - CONF_EXTERNAL_URL, - CONF_INTERNAL_URL, - CONF_CURRENCY, - CONF_COUNTRY, - CONF_LANGUAGE, - CONF_RADIUS, - ) - ): - hac.config_source = ConfigSource.YAML - - for key, attr in ( - (CONF_LATITUDE, "latitude"), - (CONF_LONGITUDE, "longitude"), - (CONF_NAME, "location_name"), - (CONF_ELEVATION, "elevation"), - (CONF_INTERNAL_URL, "internal_url"), - (CONF_EXTERNAL_URL, "external_url"), - (CONF_MEDIA_DIRS, "media_dirs"), - (CONF_CURRENCY, "currency"), - (CONF_COUNTRY, "country"), - (CONF_LANGUAGE, "language"), - (CONF_RADIUS, "radius"), - ): - if key in config: - setattr(hac, attr, config[key]) - - if config.get(CONF_DEBUG): - hac.debug = True - - _raise_issue_if_historic_currency(hass, hass.config.currency) - _raise_issue_if_no_country(hass, hass.config.country) - - if CONF_TIME_ZONE in config: - await hac.async_set_time_zone(config[CONF_TIME_ZONE]) - - if CONF_MEDIA_DIRS not in config: - if is_docker_env(): - hac.media_dirs = {"local": "/media"} - else: - hac.media_dirs = {"local": hass.config.path("media")} - - # Init whitelist external dir - hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()} - if CONF_ALLOWLIST_EXTERNAL_DIRS in config: - hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) - - elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: - _LOGGER.warning( - "Key %s has been replaced with %s. Please update your config", - LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, - CONF_ALLOWLIST_EXTERNAL_DIRS, - ) - hac.allowlist_external_dirs.update( - set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS]) - ) - - # Init whitelist external URL list – make sure to add / to every URL that doesn't - # already have it so that we can properly test "path ownership" - if CONF_ALLOWLIST_EXTERNAL_URLS in config: - hac.allowlist_external_urls.update( - url if url.endswith("/") else f"{url}/" - for url in config[CONF_ALLOWLIST_EXTERNAL_URLS] - ) - - # Customize - cust_exact = dict(config[CONF_CUSTOMIZE]) - cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) - cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) - - for name, pkg in config[CONF_PACKAGES].items(): - if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: - continue - - try: - pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) - except vol.Invalid: - _LOGGER.warning("Package %s contains invalid customize", name) - continue - - cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) - cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) - cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) - - hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) - - if CONF_UNIT_SYSTEM in config: - hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) - - def _log_pkg_error( hass: HomeAssistant, package: str, component: str | None, config: dict, message: str ) -> None: @@ -1001,7 +641,7 @@ def _identify_config_schema(module: ComponentProtocol) -> str | None: def _validate_package_definition(name: str, conf: Any) -> None: """Validate basic package definition properties.""" cv.slug(name) - PACKAGE_DEFINITION_SCHEMA(conf) + _PACKAGE_DEFINITION_SCHEMA(conf) def _recursive_merge(conf: dict[str, Any], package: dict[str, Any]) -> str | None: @@ -1040,7 +680,7 @@ async def merge_packages_config( vol.Invalid if whole package config is invalid. """ - PACKAGES_CONFIG_SCHEMA(packages) + _PACKAGES_CONFIG_SCHEMA(packages) invalid_packages = [] for pack_name, pack_conf in packages.items(): @@ -1174,6 +814,8 @@ def _get_log_message_and_stack_print_pref( "domain": domain, "error": str(exception), "p_name": platform_path, + "config_file": "?", + "line": "?", } show_stack_trace: bool | None = _CONFIG_LOG_SHOW_STACK_TRACE.get( diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index e48313cab33..d34828f5e46 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -15,21 +15,29 @@ from collections.abc import ( ) from contextvars import ContextVar from copy import deepcopy +from dataclasses import dataclass, field from datetime import datetime from enum import Enum, StrEnum import functools -from functools import cached_property +from functools import cache import logging from random import randint from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Generic, Self, cast +from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, cast from async_interrupt import interrupt +from propcache import cached_property from typing_extensions import TypeVar +import voluptuous as vol from . import data_entry_flow, loader from .components import persistent_notification -from .const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, Platform +from .const import ( + CONF_NAME, + EVENT_HOMEASSISTANT_STARTED, + EVENT_HOMEASSISTANT_STOP, + Platform, +) from .core import ( CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, @@ -40,23 +48,29 @@ from .core import ( HomeAssistant, callback, ) -from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowResult +from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowContext, FlowResult from .exceptions import ( ConfigEntryAuthFailed, ConfigEntryError, ConfigEntryNotReady, HomeAssistantError, ) -from .helpers import device_registry, entity_registry, issue_registry as ir, storage +from .helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, + storage, +) from .helpers.debounce import Debouncer +from .helpers.discovery_flow import DiscoveryKey from .helpers.dispatcher import SignalType, async_dispatcher_send_internal from .helpers.event import ( RANDOM_MICROSECOND_MAX, RANDOM_MICROSECOND_MIN, async_call_later, ) -from .helpers.frame import report -from .helpers.json import json_bytes, json_fragment +from .helpers.frame import ReportBehavior, report_usage +from .helpers.json import json_bytes, json_bytes_sorted, json_fragment from .helpers.typing import UNDEFINED, ConfigType, DiscoveryInfoType, UndefinedType from .loader import async_suggest_report_issue from .setup import ( @@ -76,10 +90,10 @@ from .util.enum import try_parse_enum if TYPE_CHECKING: from .components.bluetooth import BluetoothServiceInfoBleak from .components.dhcp import DhcpServiceInfo - from .components.hassio import HassioServiceInfo from .components.ssdp import SsdpServiceInfo from .components.usb import UsbServiceInfo from .components.zeroconf import ZeroconfServiceInfo + from .helpers.service_info.hassio import HassioServiceInfo from .helpers.service_info.mqtt import MqttServiceInfo @@ -105,11 +119,6 @@ SOURCE_ZEROCONF = "zeroconf" # source and while it exists normal discoveries with the same unique id are ignored. SOURCE_IGNORE = "ignore" -# This is used when a user uses the "Stop Ignoring" button in the UI (the -# config_entries/ignore_flow websocket command). It's triggered after the -# "ignore" config entry has been removed and unloaded. -SOURCE_UNIGNORE = "unignore" - # This is used to signal that re-authentication is required by the user. SOURCE_REAUTH = "reauth" @@ -120,12 +129,15 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 3 +STORAGE_VERSION_MINOR = 5 SAVE_DELAY = 1 DISCOVERY_COOLDOWN = 1 +ISSUE_UNIQUE_ID_COLLISION = "config_entry_unique_id_collision" +UNIQUE_ID_COLLISION_TITLE_LIMIT = 5 + _DataT = TypeVar("_DataT", default=Any) @@ -173,12 +185,13 @@ DISCOVERY_SOURCES = { SOURCE_DHCP, SOURCE_DISCOVERY, SOURCE_HARDWARE, + SOURCE_HASSIO, SOURCE_HOMEKIT, SOURCE_IMPORT, SOURCE_INTEGRATION_DISCOVERY, SOURCE_MQTT, SOURCE_SSDP, - SOURCE_UNIGNORE, + SOURCE_SYSTEM, SOURCE_USB, SOURCE_ZEROCONF, } @@ -191,6 +204,15 @@ SIGNAL_CONFIG_ENTRY_CHANGED = SignalType["ConfigEntryChange", "ConfigEntry"]( "config_entry_changed" ) + +@cache +def signal_discovered_config_entry_removed( + discovery_domain: str, +) -> SignalType[ConfigEntry]: + """Format signal.""" + return SignalType(f"{discovery_domain}_discovered_config_entry_removed") + + NO_RESET_TRIES_STATES = { ConfigEntryState.SETUP_RETRY, ConfigEntryState.SETUP_IN_PROGRESS, @@ -235,6 +257,10 @@ class UnknownEntry(ConfigError): """Unknown entry specified.""" +class UnknownSubEntry(ConfigError): + """Unknown subentry specified.""" + + class OperationNotAllowed(ConfigError): """Raised when a config entry operation is not allowed.""" @@ -243,14 +269,13 @@ type UpdateListenerType = Callable[ [HomeAssistant, ConfigEntry], Coroutine[Any, Any, None] ] -FROZEN_CONFIG_ENTRY_ATTRS = { - "entry_id", - "domain", +STATE_KEYS = { "state", "reason", "error_reason_translation_key", "error_reason_translation_placeholders", } +FROZEN_CONFIG_ENTRY_ATTRS = {"entry_id", "domain", *STATE_KEYS} UPDATE_ENTRY_CONFIG_ENTRY_ATTRS = { "unique_id", "title", @@ -263,14 +288,82 @@ UPDATE_ENTRY_CONFIG_ENTRY_ATTRS = { } -class ConfigFlowResult(FlowResult, total=False): +class ConfigFlowContext(FlowContext, total=False): + """Typed context dict for config flow.""" + + alternative_domain: str + configuration_url: str + confirm_only: bool + discovery_key: DiscoveryKey + entry_id: str + title_placeholders: Mapping[str, str] + unique_id: str | None + + +class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): """Typed result dict for config flow.""" minor_version: int options: Mapping[str, Any] + subentries: Iterable[ConfigSubentryData] version: int +def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> None: + """Validate config entry item.""" + + # Deprecated in 2022.1, stopped working in 2024.10 + if disabled_by is not None and not isinstance(disabled_by, ConfigEntryDisabler): + raise TypeError( + f"disabled_by must be a ConfigEntryDisabler value, got {disabled_by}" + ) + + +class ConfigSubentryData(TypedDict): + """Container for configuration subentry data. + + Returned by integrations, a subentry_id will be assigned automatically. + """ + + data: Mapping[str, Any] + title: str + unique_id: str | None + + +class ConfigSubentryDataWithId(ConfigSubentryData): + """Container for configuration subentry data. + + This type is used when loading existing subentries from storage. + """ + + subentry_id: str + + +class SubentryFlowResult(FlowResult[FlowContext, tuple[str, str]], total=False): + """Typed result dict for subentry flow.""" + + unique_id: str | None + + +@dataclass(frozen=True, kw_only=True) +class ConfigSubentry: + """Container for a configuration subentry.""" + + data: MappingProxyType[str, Any] + subentry_id: str = field(default_factory=ulid_util.ulid_now) + title: str + unique_id: str | None + + def as_dict(self) -> ConfigSubentryDataWithId: + """Return dictionary version of this subentry.""" + return { + "data": dict(self.data), + "subentry_id": self.subentry_id, + "title": self.title, + "unique_id": self.unique_id, + } + + class ConfigEntry(Generic[_DataT]): """Hold a configuration entry.""" @@ -280,6 +373,7 @@ class ConfigEntry(Generic[_DataT]): data: MappingProxyType[str, Any] runtime_data: _DataT options: MappingProxyType[str, Any] + subentries: MappingProxyType[str, ConfigSubentry] unique_id: str | None state: ConfigEntryState reason: str | None @@ -295,18 +389,19 @@ class ConfigEntry(Generic[_DataT]): supports_remove_device: bool | None _supports_options: bool | None _supports_reconfigure: bool | None + _supported_subentries: tuple[str, ...] | None update_listeners: list[UpdateListenerType] _async_cancel_retry_setup: Callable[[], Any] | None _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None setup_lock: asyncio.Lock _reauth_lock: asyncio.Lock - _reconfigure_lock: asyncio.Lock _tasks: set[asyncio.Future[Any]] _background_tasks: set[asyncio.Future[Any]] _integration_for_domain: loader.Integration | None _tries: int created_at: datetime modified_at: datetime + discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] def __init__( self, @@ -314,6 +409,7 @@ class ConfigEntry(Generic[_DataT]): created_at: datetime | None = None, data: Mapping[str, Any], disabled_by: ConfigEntryDisabler | None = None, + discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]], domain: str, entry_id: str | None = None, minor_version: int, @@ -323,6 +419,7 @@ class ConfigEntry(Generic[_DataT]): pref_disable_polling: bool | None = None, source: str, state: ConfigEntryState = ConfigEntryState.NOT_LOADED, + subentries_data: Iterable[ConfigSubentryData | ConfigSubentryDataWithId] | None, title: str, unique_id: str | None, version: int, @@ -348,6 +445,24 @@ class ConfigEntry(Generic[_DataT]): # Entry options _setter(self, "options", MappingProxyType(options or {})) + # Subentries + subentries_data = subentries_data or () + subentries = {} + for subentry_data in subentries_data: + subentry_kwargs = {} + if "subentry_id" in subentry_data: + # If subentry_data has key "subentry_id", we're loading from storage + subentry_kwargs["subentry_id"] = subentry_data["subentry_id"] # type: ignore[typeddict-item] + subentry = ConfigSubentry( + data=MappingProxyType(subentry_data["data"]), + title=subentry_data["title"], + unique_id=subentry_data.get("unique_id"), + **subentry_kwargs, + ) + subentries[subentry.subentry_id] = subentry + + _setter(self, "subentries", MappingProxyType(subentries)) + # Entry system options if pref_disable_new_entities is None: pref_disable_new_entities = False @@ -369,18 +484,7 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "unique_id", unique_id) # Config entry is disabled - if isinstance(disabled_by, str) and not isinstance( - disabled_by, ConfigEntryDisabler - ): - report( # type: ignore[unreachable] - ( - "uses str for config entry disabled_by. This is deprecated and will" - " stop working in Home Assistant 2022.3, it should be updated to" - " use ConfigEntryDisabler instead" - ), - error_if_core=False, - ) - disabled_by = ConfigEntryDisabler(disabled_by) + _validate_item(disabled_by=disabled_by) _setter(self, "disabled_by", disabled_by) # Supports unload @@ -395,6 +499,9 @@ class ConfigEntry(Generic[_DataT]): # Supports reconfigure _setter(self, "_supports_reconfigure", None) + # Supports subentries + _setter(self, "_supported_subentries", None) + # Listeners to call on update _setter(self, "update_listeners", []) @@ -413,8 +520,6 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "setup_lock", asyncio.Lock()) # Reauth lock to prevent concurrent reauth flows _setter(self, "_reauth_lock", asyncio.Lock()) - # Reconfigure lock to prevent concurrent reconfigure flows - _setter(self, "_reconfigure_lock", asyncio.Lock()) _setter(self, "_tasks", set()) _setter(self, "_background_tasks", set()) @@ -423,6 +528,7 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "_tries", 0) _setter(self, "created_at", created_at or utcnow()) _setter(self, "modified_at", modified_at or utcnow()) + _setter(self, "discovery_keys", discovery_keys) def __repr__(self) -> str: """Representation of ConfigEntry.""" @@ -434,30 +540,15 @@ class ConfigEntry(Generic[_DataT]): def __setattr__(self, key: str, value: Any) -> None: """Set an attribute.""" if key in UPDATE_ENTRY_CONFIG_ENTRY_ATTRS: - if key == "unique_id": - # Setting unique_id directly will corrupt internal state - # There is no deprecation period for this key - # as changing them will corrupt internal state - # so we raise an error here - raise AttributeError( - "unique_id cannot be changed directly, use async_update_entry instead" - ) - report( - f'sets "{key}" directly to update a config entry. This is deprecated and will' - " stop working in Home Assistant 2024.9, it should be updated to use" - " async_update_entry instead", - error_if_core=False, + raise AttributeError( + f"{key} cannot be changed directly, use async_update_entry instead" ) - - elif key in FROZEN_CONFIG_ENTRY_ATTRS: - # These attributes are frozen and cannot be changed - # There is no deprecation period for these - # as changing them will corrupt internal state - # so we raise an error here + if key in FROZEN_CONFIG_ENTRY_ATTRS: raise AttributeError(f"{key} cannot be changed") super().__setattr__(key, value) - self.clear_cache() + self.clear_state_cache() + self.clear_storage_cache() @property def supports_options(self) -> bool: @@ -483,13 +574,25 @@ class ConfigEntry(Generic[_DataT]): ) return self._supports_reconfigure or False - def clear_cache(self) -> None: - """Clear cached properties.""" + @property + def supported_subentries(self) -> tuple[str, ...]: + """Return supported subentries.""" + if self._supported_subentries is None and ( + handler := HANDLERS.get(self.domain) + ): + # work out sub entries supported by the handler + object.__setattr__( + self, "_supported_subentries", handler.async_supported_subentries(self) + ) + return self._supported_subentries or () + + def clear_state_cache(self) -> None: + """Clear cached properties that are included in as_json_fragment.""" self.__dict__.pop("as_json_fragment", None) @cached_property def as_json_fragment(self) -> json_fragment: - """Return JSON fragment of a config entry.""" + """Return JSON fragment of a config entry that is used for the API.""" json_repr = { "created_at": self.created_at.timestamp(), "entry_id": self.entry_id, @@ -502,15 +605,26 @@ class ConfigEntry(Generic[_DataT]): "supports_remove_device": self.supports_remove_device or False, "supports_unload": self.supports_unload or False, "supports_reconfigure": self.supports_reconfigure, + "supported_subentries": self.supported_subentries, "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "disabled_by": self.disabled_by, "reason": self.reason, "error_reason_translation_key": self.error_reason_translation_key, "error_reason_translation_placeholders": self.error_reason_translation_placeholders, + "num_subentries": len(self.subentries), } return json_fragment(json_bytes(json_repr)) + def clear_storage_cache(self) -> None: + """Clear cached properties that are included in as_storage_fragment.""" + self.__dict__.pop("as_storage_fragment", None) + + @cached_property + def as_storage_fragment(self) -> json_fragment: + """Return a storage fragment for this entry.""" + return json_fragment(json_bytes_sorted(self.as_dict())) + async def async_setup( self, hass: HomeAssistant, @@ -518,10 +632,21 @@ class ConfigEntry(Generic[_DataT]): integration: loader.Integration | None = None, ) -> None: """Set up an entry.""" - current_entry.set(self) if self.source == SOURCE_IGNORE or self.disabled_by: return + current_entry.set(self) + try: + await self.__async_setup_with_context(hass, integration) + finally: + current_entry.set(None) + + async def __async_setup_with_context( + self, + hass: HomeAssistant, + integration: loader.Integration | None, + ) -> None: + """Set up an entry, with current_entry set.""" if integration is None and not (integration := self._integration_for_domain): integration = await loader.async_get_integration(hass, self.domain) self._integration_for_domain = integration @@ -843,7 +968,8 @@ class ConfigEntry(Generic[_DataT]): """Invoke remove callback on component.""" old_modified_at = self.modified_at object.__setattr__(self, "modified_at", utcnow()) - self.clear_cache() + self.clear_state_cache() + self.clear_storage_cache() if self.source == SOURCE_IGNORE: return @@ -900,7 +1026,10 @@ class ConfigEntry(Generic[_DataT]): "error_reason_translation_placeholders", error_reason_translation_placeholders, ) - self.clear_cache() + self.clear_state_cache() + # Storage cache is not cleared here because the state is not stored + # in storage and we do not want to clear the cache on every state change + # since state changes are frequent. async_dispatcher_send_internal( hass, SIGNAL_CONFIG_ENTRY_CHANGED, ConfigEntryChange.UPDATED, self ) @@ -968,6 +1097,7 @@ class ConfigEntry(Generic[_DataT]): return { "created_at": self.created_at.isoformat(), "data": dict(self.data), + "discovery_keys": dict(self.discovery_keys), "disabled_by": self.disabled_by, "domain": self.domain, "entry_id": self.entry_id, @@ -977,6 +1107,7 @@ class ConfigEntry(Generic[_DataT]): "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "source": self.source, + "subentries": [subentry.as_dict() for subentry in self.subentries.values()], "title": self.title, "unique_id": self.unique_id, "version": self.version, @@ -1021,7 +1152,7 @@ class ConfigEntry(Generic[_DataT]): def async_start_reauth( self, hass: HomeAssistant, - context: dict[str, Any] | None = None, + context: ConfigFlowContext | None = None, data: dict[str, Any] | None = None, ) -> None: """Start a reauth flow.""" @@ -1039,7 +1170,7 @@ class ConfigEntry(Generic[_DataT]): async def _async_init_reauth( self, hass: HomeAssistant, - context: dict[str, Any] | None = None, + context: ConfigFlowContext | None = None, data: dict[str, Any] | None = None, ) -> None: """Start a reauth flow.""" @@ -1051,12 +1182,12 @@ class ConfigEntry(Generic[_DataT]): return result = await hass.config_entries.flow.async_init( self.domain, - context={ - "source": SOURCE_REAUTH, - "entry_id": self.entry_id, - "title_placeholders": {"name": self.title}, - "unique_id": self.unique_id, - } + context=ConfigFlowContext( + source=SOURCE_REAUTH, + entry_id=self.entry_id, + title_placeholders={"name": self.title}, + unique_id=self.unique_id, + ) | (context or {}), data=self.data | (data or {}), ) @@ -1077,49 +1208,6 @@ class ConfigEntry(Generic[_DataT]): translation_placeholders={"name": self.title}, ) - @callback - def async_start_reconfigure( - self, - hass: HomeAssistant, - context: dict[str, Any] | None = None, - data: dict[str, Any] | None = None, - ) -> None: - """Start a reconfigure flow.""" - # We will check this again in the task when we hold the lock, - # but we also check it now to try to avoid creating the task. - if any(self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH})): - # Reconfigure or reauth flow already in progress for this entry - return - hass.async_create_task( - self._async_init_reconfigure(hass, context, data), - f"config entry reconfigure {self.title} {self.domain} {self.entry_id}", - ) - - async def _async_init_reconfigure( - self, - hass: HomeAssistant, - context: dict[str, Any] | None = None, - data: dict[str, Any] | None = None, - ) -> None: - """Start a reconfigure flow.""" - async with self._reconfigure_lock: - if any( - self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH}) - ): - # Reconfigure or reauth flow already in progress for this entry - return - await hass.config_entries.flow.async_init( - self.domain, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": self.entry_id, - "title_placeholders": {"name": self.title}, - "unique_id": self.unique_id, - } - | (context or {}), - data=self.data | (data or {}), - ) - @callback def async_get_active_flows( self, hass: HomeAssistant, sources: set[str] @@ -1198,18 +1286,19 @@ class FlowCancelledError(Exception): def _report_non_awaited_platform_forwards(entry: ConfigEntry, what: str) -> None: """Report non awaited platform forwards.""" - report( + report_usage( f"calls {what} for integration {entry.domain} with " f"title: {entry.title} and entry_id: {entry.entry_id}, " f"during setup without awaiting {what}, which can cause " - "the setup lock to be released before the setup is done. " - "This will stop working in Home Assistant 2025.1", - error_if_integration=False, - error_if_core=False, + "the setup lock to be released before the setup is done", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.1", ) -class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): +class ConfigEntriesFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] +): """Manage all the config entry flows that are in progress.""" _flow_result = ConfigFlowResult @@ -1255,20 +1344,40 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): return False async def async_init( - self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None + self, + handler: str, + *, + context: ConfigFlowContext | None = None, + data: Any = None, ) -> ConfigFlowResult: """Start a configuration flow.""" if not context or "source" not in context: raise KeyError("Context not set or doesn't have a source set") + # reauth/reconfigure flows should be linked to a config entry + if (source := context["source"]) in { + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + } and "entry_id" not in context: + # Deprecated in 2024.12, should fail in 2025.12 + report_usage( + f"initialises a {source} flow without a link to the config entry", + breaks_in_ha_version="2025.12", + ) + flow_id = ulid_util.ulid_now() # Avoid starting a config flow on an integration that only supports # a single config entry, but which already has an entry if ( - context.get("source") - not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_UNIGNORE, SOURCE_RECONFIGURE} - and self.config_entries.async_has_entries(handler, include_ignore=False) + source not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE} + and ( + self.config_entries.async_has_entries(handler, include_ignore=False) + or ( + self.config_entries.async_has_entries(handler, include_ignore=True) + and source != SOURCE_USER + ) + ) and await _support_single_config_entry_only(self.hass, handler) ): return ConfigFlowResult( @@ -1281,7 +1390,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): loop = self.hass.loop - if context["source"] == SOURCE_IMPORT: + if source == SOURCE_IMPORT: self._pending_import_flows[handler][flow_id] = loop.create_future() cancel_init_future = loop.create_future() @@ -1314,7 +1423,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): self, flow_id: str, handler: str, - context: dict, + context: ConfigFlowContext, data: Any, ) -> tuple[ConfigFlow, ConfigFlowResult]: """Run the init in a task to allow it to be canceled at shutdown.""" @@ -1352,10 +1461,14 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], result: ConfigFlowResult, ) -> ConfigFlowResult: - """Finish a config flow and add an entry.""" + """Finish a config flow and add an entry. + + This method is called when a flow step returns FlowResultType.ABORT or + FlowResultType.CREATE_ENTRY. + """ flow = cast(ConfigFlow, flow) # Mark the step as done. @@ -1377,6 +1490,41 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: + # If there's an ignored config entry with a matching unique ID, + # update the discovery key. + if ( + (discovery_key := flow.context.get("discovery_key")) + and (unique_id := flow.unique_id) is not None + and ( + entry := self.config_entries.async_entry_for_domain_unique_id( + result["handler"], unique_id + ) + ) + and discovery_key + not in ( + known_discovery_keys := entry.discovery_keys.get( + discovery_key.domain, () + ) + ) + ): + new_discovery_keys = MappingProxyType( + entry.discovery_keys + | { + discovery_key.domain: tuple( + [*known_discovery_keys, discovery_key][-10:] + ) + } + ) + _LOGGER.debug( + "Updating discovery keys for %s entry %s %s -> %s", + entry.domain, + unique_id, + entry.discovery_keys, + new_discovery_keys, + ) + self.config_entries.async_update_entry( + entry, discovery_keys=new_discovery_keys + ) return result # Avoid adding a config entry for a integration @@ -1408,6 +1556,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): or progress_unique_id == DEFAULT_DISCOVERY_UNIQUE_ID ): self.async_abort(progress_flow_id) + continue # Abort any flows in progress for the same handler # when integration allows only one config entry @@ -1428,32 +1577,48 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): ) # Unload the entry before setting up the new one. - # We will remove it only after the other one is set up, - # so that device customizations are not getting lost. if existing_entry is not None and existing_entry.state.recoverable: await self.config_entries.async_unload(existing_entry.entry_id) + discovery_key = flow.context.get("discovery_key") + discovery_keys = ( + MappingProxyType({discovery_key.domain: (discovery_key,)}) + if discovery_key + else MappingProxyType({}) + ) entry = ConfigEntry( data=result["data"], + discovery_keys=discovery_keys, domain=result["handler"], minor_version=result["minor_version"], options=result["options"], source=flow.context["source"], + subentries_data=result["subentries"], title=result["title"], unique_id=flow.unique_id, version=result["version"], ) + if existing_entry is not None: + # Unload and remove the existing entry, but don't clean up devices and + # entities until the new entry is added + await self.config_entries._async_remove(existing_entry.entry_id) # noqa: SLF001 await self.config_entries.async_add(entry) if existing_entry is not None: - await self.config_entries.async_remove(existing_entry.entry_id) + # Clean up devices and entities belonging to the existing entry + # which are not present in the new entry + self.config_entries._async_clean_up(existing_entry) # noqa: SLF001 result["result"] = entry return result async def async_create_flow( - self, handler_key: str, *, context: dict | None = None, data: Any = None + self, + handler_key: str, + *, + context: ConfigFlowContext | None = None, + data: Any = None, ) -> ConfigFlow: """Create a flow for specified handler. @@ -1471,7 +1636,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): async def async_post_init( self, - flow: data_entry_flow.FlowHandler[ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], result: ConfigFlowResult, ) -> None: """After a flow is initialised trigger new flow notifications.""" @@ -1507,6 +1672,35 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): notification_id=DISCOVERY_NOTIFICATION_ID, ) + @callback + def async_has_matching_discovery_flow( + self, handler: str, match_context: ConfigFlowContext, data: Any + ) -> bool: + """Check if an existing matching discovery flow is in progress. + + A flow with the same handler, context, and data. + + If match_context is passed, only return flows with a context that is a + superset of match_context. + """ + if not (flows := self._handler_progress_index.get(handler)): + return False + match_items = match_context.items() + for progress in flows: + if match_items <= progress.context.items() and progress.init_data == data: + return True + return False + + @callback + def async_has_matching_flow(self, flow: ConfigFlow) -> bool: + """Check if an existing matching flow is in progress.""" + if not (flows := self._handler_progress_index.get(flow.handler)): + return False + for other_flow in set(flows): + if other_flow is not flow and flow.is_matching(other_flow): # type: ignore[arg-type] + return True + return False + class ConfigEntryItems(UserDict[str, ConfigEntry]): """Container for config items, maps config_entry_id -> entry. @@ -1521,7 +1715,7 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): super().__init__() self._hass = hass self._domain_index: dict[str, list[ConfigEntry]] = {} - self._domain_unique_id_index: dict[str, dict[str, ConfigEntry]] = {} + self._domain_unique_id_index: dict[str, dict[str, list[ConfigEntry]]] = {} def values(self) -> ValuesView[ConfigEntry]: """Return the underlying values to avoid __iter__ overhead.""" @@ -1530,6 +1724,7 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): def __setitem__(self, entry_id: str, entry: ConfigEntry) -> None: """Add an item.""" data = self.data + self.check_unique_id(entry) if entry_id in data: # This is likely a bug in a test that is adding the same entry twice. # In the future, once we have fixed the tests, this will raise HomeAssistantError. @@ -1538,32 +1733,50 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): data[entry_id] = entry self._index_entry(entry) + def check_unique_id(self, entry: ConfigEntry) -> None: + """Check config entry unique id. + + For a string unique id (this is the correct case): return + For a hashable non string unique id: log warning + For a non-hashable unique id: raise error + """ + if (unique_id := entry.unique_id) is None: + return + if isinstance(unique_id, str): + # Unique id should be a string + return + if isinstance(unique_id, Hashable): # type: ignore[unreachable] + # Checks for other non-string was added in HA Core 2024.10 + # In HA Core 2025.10, we should remove the error and instead fail + report_issue = async_suggest_report_issue( + self._hass, integration_domain=entry.domain + ) + _LOGGER.error( + ( + "Config entry '%s' from integration %s has an invalid unique_id" + " '%s' of type %s when a string is expected, please %s" + ), + entry.title, + entry.domain, + entry.unique_id, + type(entry.unique_id).__name__, + report_issue, + ) + else: + # Guard against integrations using unhashable unique_id + # In HA Core 2024.11, the guard was changed from warning to failing + raise HomeAssistantError( + f"The entry unique id {unique_id} is not a string." + ) + def _index_entry(self, entry: ConfigEntry) -> None: """Index an entry.""" + self.check_unique_id(entry) self._domain_index.setdefault(entry.domain, []).append(entry) if entry.unique_id is not None: - unique_id_hash = entry.unique_id - # Guard against integrations using unhashable unique_id - # In HA Core 2024.9, we should remove the guard and instead fail - if not isinstance(entry.unique_id, Hashable): - unique_id_hash = str(entry.unique_id) # type: ignore[unreachable] - report_issue = async_suggest_report_issue( - self._hass, integration_domain=entry.domain - ) - _LOGGER.error( - ( - "Config entry '%s' from integration %s has an invalid unique_id" - " '%s', please %s" - ), - entry.title, - entry.domain, - entry.unique_id, - report_issue, - ) - - self._domain_unique_id_index.setdefault(entry.domain, {})[ - unique_id_hash - ] = entry + self._domain_unique_id_index.setdefault(entry.domain, {}).setdefault( + entry.unique_id, [] + ).append(entry) def _unindex_entry(self, entry_id: str) -> None: """Unindex an entry.""" @@ -1573,10 +1786,9 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): if not self._domain_index[domain]: del self._domain_index[domain] if (unique_id := entry.unique_id) is not None: - # Check type first to avoid expensive isinstance call - if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721 - unique_id = str(entry.unique_id) # type: ignore[unreachable] - del self._domain_unique_id_index[domain][unique_id] + self._domain_unique_id_index[domain][unique_id].remove(entry) + if not self._domain_unique_id_index[domain][unique_id]: + del self._domain_unique_id_index[domain][unique_id] if not self._domain_unique_id_index[domain]: del self._domain_unique_id_index[domain] @@ -1592,9 +1804,11 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): """ entry_id = entry.entry_id self._unindex_entry(entry_id) + self.check_unique_id(entry) object.__setattr__(entry, "unique_id", new_unique_id) self._index_entry(entry) - entry.clear_cache() + entry.clear_state_cache() + entry.clear_storage_cache() def get_entries_for_domain(self, domain: str) -> list[ConfigEntry]: """Get entries for a domain.""" @@ -1604,10 +1818,16 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): self, domain: str, unique_id: str ) -> ConfigEntry | None: """Get entry by domain and unique id.""" - # Check type first to avoid expensive isinstance call - if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721 - unique_id = str(unique_id) # type: ignore[unreachable] - return self._domain_unique_id_index.get(domain, {}).get(unique_id) + if unique_id is None: + return None # type: ignore[unreachable] + if not isinstance(unique_id, Hashable): + raise HomeAssistantError( + f"The entry unique id {unique_id} is not a string." + ) + entries = self._domain_unique_id_index.get(domain, {}).get(unique_id) + if not entries: + return None + return entries[0] class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): @@ -1659,6 +1879,16 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): for entry in data["entries"]: entry["created_at"] = entry["modified_at"] = created_at + if old_minor_version < 4: + # Version 1.4 adds discovery_keys + for entry in data["entries"]: + entry["discovery_keys"] = {} + + if old_minor_version < 5: + # Version 1.4 adds config subentries + for entry in data["entries"]: + entry.setdefault("subentries", entry.get("subentries", {})) + if old_major_version > 1: raise NotImplementedError return data @@ -1675,6 +1905,7 @@ class ConfigEntries: self.hass = hass self.flow = ConfigEntriesFlowManager(hass, self, hass_config) self.options = OptionsFlowManager(hass) + self.subentries = ConfigSubentryFlowManager(hass) self._hass_config = hass_config self._entries = ConfigEntryItems(hass) self._store = ConfigEntryStore(hass) @@ -1699,6 +1930,16 @@ class ConfigEntries: """Return entry with matching entry_id.""" return self._entries.data.get(entry_id) + @callback + def async_get_known_entry(self, entry_id: str) -> ConfigEntry: + """Return entry with matching entry_id. + + Raises UnknownEntry if entry is not found. + """ + if (entry := self.async_get_entry(entry_id)) is None: + raise UnknownEntry + return entry + @callback def async_entry_ids(self) -> list[str]: """Return entry ids.""" @@ -1742,6 +1983,16 @@ class ConfigEntries: and (include_disabled or not entry.disabled_by) ] + @callback + def async_loaded_entries(self, domain: str) -> list[ConfigEntry]: + """Return loaded entries for a specific domain. + + This will exclude ignored or disabled config entruis. + """ + entries = self._entries.get_entries_for_domain(domain) + + return [entry for entry in entries if entry.state == ConfigEntryState.LOADED] + @callback def async_entry_for_domain_unique_id( self, domain: str, unique_id: str @@ -1757,14 +2008,28 @@ class ConfigEntries: ) self._entries[entry.entry_id] = entry + self.async_update_issues() self._async_dispatch(ConfigEntryChange.ADDED, entry) await self.async_setup(entry.entry_id) self._async_schedule_save() async def async_remove(self, entry_id: str) -> dict[str, Any]: - """Remove an entry.""" - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + """Remove, unload and clean up after an entry.""" + unload_success, entry = await self._async_remove(entry_id) + self._async_clean_up(entry) + + for discovery_domain in entry.discovery_keys: + async_dispatcher_send_internal( + self.hass, + signal_discovered_config_entry_removed(discovery_domain), + entry, + ) + + return {"require_restart": not unload_success} + + async def _async_remove(self, entry_id: str) -> tuple[bool, ConfigEntry]: + """Remove and unload an entry.""" + entry = self.async_get_known_entry(entry_id) async with entry.setup_lock: if not entry.state.recoverable: @@ -1775,10 +2040,18 @@ class ConfigEntries: await entry.async_remove(self.hass) del self._entries[entry.entry_id] + self.async_update_issues() self._async_schedule_save() - dev_reg = device_registry.async_get(self.hass) - ent_reg = entity_registry.async_get(self.hass) + return (unload_success, entry) + + @callback + def _async_clean_up(self, entry: ConfigEntry) -> None: + """Clean up after an entry.""" + entry_id = entry.entry_id + + dev_reg = dr.async_get(self.hass) + ent_reg = er.async_get(self.hass) dev_reg.async_clear_config_entry(entry_id) ent_reg.async_clear_config_entry(entry_id) @@ -1794,22 +2067,7 @@ class ConfigEntries: issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) - # After we have fully removed an "ignore" config entry we can try and rediscover - # it so that a user is able to immediately start configuring it. We do this by - # starting a new flow with the 'unignore' step. If the integration doesn't - # implement async_step_unignore then this will be a no-op. - if entry.source == SOURCE_IGNORE: - self.hass.async_create_task_internal( - self.hass.config_entries.flow.async_init( - entry.domain, - context={"source": SOURCE_UNIGNORE}, - data={"unique_id": entry.unique_id}, - ), - f"config entry unignore {entry.title} {entry.domain} {entry.unique_id}", - ) - self._async_dispatch(ConfigEntryChange.REMOVED, entry) - return {"require_restart": not unload_success} @callback def _async_shutdown(self, event: Event) -> None: @@ -1836,6 +2094,12 @@ class ConfigEntries: created_at=datetime.fromisoformat(entry["created_at"]), data=entry["data"], disabled_by=try_parse_enum(ConfigEntryDisabler, entry["disabled_by"]), + discovery_keys=MappingProxyType( + { + domain: tuple(DiscoveryKey.from_json_dict(key) for key in keys) + for domain, keys in entry["discovery_keys"].items() + } + ), domain=entry["domain"], entry_id=entry_id, minor_version=entry["minor_version"], @@ -1844,6 +2108,7 @@ class ConfigEntries: pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], source=entry["source"], + subentries_data=entry["subentries"], title=entry["title"], unique_id=entry["unique_id"], version=entry["version"], @@ -1851,14 +2116,14 @@ class ConfigEntries: entries[entry_id] = config_entry self._entries = entries + self.async_update_issues() async def async_setup(self, entry_id: str, _lock: bool = True) -> bool: """Set up a config entry. Return True if entry has been successfully loaded. """ - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) if entry.state is not ConfigEntryState.NOT_LOADED: raise OperationNotAllowed( @@ -1889,8 +2154,7 @@ class ConfigEntries: async def async_unload(self, entry_id: str, _lock: bool = True) -> bool: """Unload a config entry.""" - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) if not entry.state.recoverable: raise OperationNotAllowed( @@ -1908,8 +2172,7 @@ class ConfigEntries: @callback def async_schedule_reload(self, entry_id: str) -> None: """Schedule a config entry to be reloaded.""" - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) entry.async_cancel_retry_setup() self.hass.async_create_task( self.async_reload(entry_id), @@ -1927,8 +2190,7 @@ class ConfigEntries: If an entry was not loaded, will just load. """ - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) # Cancel the setup retry task before waiting for the # reload lock to reduce the chance of concurrent reload @@ -1958,43 +2220,30 @@ class ConfigEntries: If disabled_by is changed, the config entry will be reloaded. """ - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry - - if isinstance(disabled_by, str) and not isinstance( - disabled_by, ConfigEntryDisabler - ): - report( # type: ignore[unreachable] - ( - "uses str for config entry disabled_by. This is deprecated and will" - " stop working in Home Assistant 2022.3, it should be updated to" - " use ConfigEntryDisabler instead" - ), - error_if_core=False, - ) - disabled_by = ConfigEntryDisabler(disabled_by) + entry = self.async_get_known_entry(entry_id) + _validate_item(disabled_by=disabled_by) if entry.disabled_by is disabled_by: return True entry.disabled_by = disabled_by self._async_schedule_save() - dev_reg = device_registry.async_get(self.hass) - ent_reg = entity_registry.async_get(self.hass) + dev_reg = dr.async_get(self.hass) + ent_reg = er.async_get(self.hass) if not entry.disabled_by: # The config entry will no longer be disabled, enable devices and entities - device_registry.async_config_entry_disabled_by_changed(dev_reg, entry) - entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry) + dr.async_config_entry_disabled_by_changed(dev_reg, entry) + er.async_config_entry_disabled_by_changed(ent_reg, entry) # Load or unload the config entry reload_result = await self.async_reload(entry_id) if entry.disabled_by: # The config entry has been disabled, disable devices and entities - device_registry.async_config_entry_disabled_by_changed(dev_reg, entry) - entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry) + dr.async_config_entry_disabled_by_changed(dev_reg, entry) + er.async_config_entry_disabled_by_changed(ent_reg, entry) return reload_result @@ -2004,6 +2253,8 @@ class ConfigEntries: entry: ConfigEntry, *, data: Mapping[str, Any] | UndefinedType = UNDEFINED, + discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] + | UndefinedType = UNDEFINED, minor_version: int | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, pref_disable_new_entities: bool | UndefinedType = UNDEFINED, @@ -2017,6 +2268,44 @@ class ConfigEntries: If the entry was changed, the update_listeners are fired and this function returns True + If the entry was not changed, the update_listeners are + not fired and this function returns False + """ + return self._async_update_entry( + entry, + data=data, + discovery_keys=discovery_keys, + minor_version=minor_version, + options=options, + pref_disable_new_entities=pref_disable_new_entities, + pref_disable_polling=pref_disable_polling, + title=title, + unique_id=unique_id, + version=version, + ) + + @callback + def _async_update_entry( + self, + entry: ConfigEntry, + *, + data: Mapping[str, Any] | UndefinedType = UNDEFINED, + discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] + | UndefinedType = UNDEFINED, + minor_version: int | UndefinedType = UNDEFINED, + options: Mapping[str, Any] | UndefinedType = UNDEFINED, + pref_disable_new_entities: bool | UndefinedType = UNDEFINED, + pref_disable_polling: bool | UndefinedType = UNDEFINED, + subentries: dict[str, ConfigSubentry] | UndefinedType = UNDEFINED, + title: str | UndefinedType = UNDEFINED, + unique_id: str | None | UndefinedType = UNDEFINED, + version: int | UndefinedType = UNDEFINED, + ) -> bool: + """Update a config entry. + + If the entry was changed, the update_listeners are + fired and this function returns True + If the entry was not changed, the update_listeners are not fired and this function returns False """ @@ -2028,11 +2317,37 @@ class ConfigEntries: _setter = object.__setattr__ if unique_id is not UNDEFINED and entry.unique_id != unique_id: + # Deprecated in 2024.11, should fail in 2025.11 + if ( + # flipr creates duplicates during migration, and asks users to + # remove the duplicate. We don't need warn about it here too. + # We should remove the special case for "flipr" in HA Core 2025.4, + # when the flipr migration period ends + entry.domain != "flipr" + and unique_id is not None + and self.async_entry_for_domain_unique_id(entry.domain, unique_id) + is not None + ): + report_issue = async_suggest_report_issue( + self.hass, integration_domain=entry.domain + ) + _LOGGER.error( + ( + "Unique id of config entry '%s' from integration %s changed to" + " '%s' which is already in use, please %s" + ), + entry.title, + entry.domain, + unique_id, + report_issue, + ) # Reindex the entry if the unique_id has changed self._entries.update_unique_id(entry, unique_id) + self.async_update_issues() changed = True for attr, value in ( + ("discovery_keys", discovery_keys), ("minor_version", minor_version), ("pref_disable_new_entities", pref_disable_new_entities), ("pref_disable_polling", pref_disable_polling), @@ -2053,6 +2368,11 @@ class ConfigEntries: changed = True _setter(entry, "options", MappingProxyType(options)) + if subentries is not UNDEFINED: + if entry.subentries != subentries: + changed = True + _setter(entry, "subentries", MappingProxyType(subentries)) + if not changed: return False @@ -2065,10 +2385,42 @@ class ConfigEntries: ) self._async_schedule_save() - entry.clear_cache() + entry.clear_state_cache() + entry.clear_storage_cache() self._async_dispatch(ConfigEntryChange.UPDATED, entry) return True + @callback + def async_add_subentry(self, entry: ConfigEntry, subentry: ConfigSubentry) -> bool: + """Add a subentry to a config entry.""" + self._raise_if_subentry_unique_id_exists(entry, subentry.unique_id) + + return self._async_update_entry( + entry, + subentries=entry.subentries | {subentry.subentry_id: subentry}, + ) + + @callback + def async_remove_subentry(self, entry: ConfigEntry, subentry_id: str) -> bool: + """Remove a subentry from a config entry.""" + subentries = dict(entry.subentries) + try: + subentries.pop(subentry_id) + except KeyError as err: + raise UnknownSubEntry from err + + return self._async_update_entry(entry, subentries=subentries) + + def _raise_if_subentry_unique_id_exists( + self, entry: ConfigEntry, unique_id: str | None + ) -> None: + """Raise if a subentry with the same unique_id exists.""" + if unique_id is None: + return + for existing_subentry in entry.subentries.values(): + if existing_subentry.unique_id == unique_id: + raise data_entry_flow.AbortFlow("already_configured") + @callback def _async_dispatch( self, change_type: ConfigEntryChange, entry: ConfigEntry @@ -2148,14 +2500,13 @@ class ConfigEntries: multiple platforms at once and is more efficient since it does not require a separate import executor job for each platform. """ - report( + report_usage( "calls async_forward_entry_setup for " f"integration, {entry.domain} with title: {entry.title} " - f"and entry_id: {entry.entry_id}, which is deprecated and " - "will stop working in Home Assistant 2025.6, " + f"and entry_id: {entry.entry_id}, which is deprecated, " "await async_forward_entry_setups instead", - error_if_core=False, - error_if_integration=False, + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.6", ) if not entry.setup_lock.locked(): async with entry.setup_lock: @@ -2248,7 +2599,10 @@ class ConfigEntries: @callback def _data_to_save(self) -> dict[str, list[dict[str, Any]]]: """Return data to save.""" - return {"entries": [entry.as_dict() for entry in self._entries.values()]} + # typing does not know that the storage fragment will serialize to a dict + return { + "entries": [entry.as_storage_fragment for entry in self._entries.values()] # type: ignore[misc] + } async def async_wait_component(self, entry: ConfigEntry) -> bool: """Wait for an entry's component to load and return if the entry is loaded. @@ -2267,6 +2621,84 @@ class ConfigEntries: return False return entry.state is ConfigEntryState.LOADED + @callback + def async_update_issues(self) -> None: + """Update unique id collision issues.""" + issue_registry = ir.async_get(self.hass) + issues: set[str] = set() + + for issue in issue_registry.issues.values(): + if ( + issue.domain != HOMEASSISTANT_DOMAIN + or not (issue_data := issue.data) + or issue_data.get("issue_type") != ISSUE_UNIQUE_ID_COLLISION + ): + continue + issues.add(issue.issue_id) + + for domain, unique_ids in self._entries._domain_unique_id_index.items(): # noqa: SLF001 + # flipr creates duplicates during migration, and asks users to + # remove the duplicate. We don't need warn about it here too. + # We should remove the special case for "flipr" in HA Core 2025.4, + # when the flipr migration period ends + if domain == "flipr": + continue + for unique_id, entries in unique_ids.items(): + # We might mutate the list of entries, so we need a copy to not mess up + # the index + entries = list(entries) + + # There's no need to raise an issue for ignored entries, we can + # safely remove them once we no longer allow unique id collisions. + # Iterate over a copy of the copy to allow mutating while iterating + for entry in list(entries): + if entry.source == SOURCE_IGNORE: + entries.remove(entry) + + if len(entries) < 2: + continue + issue_id = f"{ISSUE_UNIQUE_ID_COLLISION}_{domain}_{unique_id}" + issues.discard(issue_id) + titles = [f"'{entry.title}'" for entry in entries] + translation_placeholders = { + "domain": domain, + "configure_url": f"/config/integrations/integration/{domain}", + "unique_id": str(unique_id), + } + if len(titles) <= UNIQUE_ID_COLLISION_TITLE_LIMIT: + translation_key = "config_entry_unique_id_collision" + translation_placeholders["titles"] = ", ".join(titles) + else: + translation_key = "config_entry_unique_id_collision_many" + translation_placeholders["number_of_entries"] = str(len(titles)) + translation_placeholders["titles"] = ", ".join( + titles[:UNIQUE_ID_COLLISION_TITLE_LIMIT] + ) + translation_placeholders["title_limit"] = str( + UNIQUE_ID_COLLISION_TITLE_LIMIT + ) + + ir.async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + issue_id, + breaks_in_ha_version="2025.11.0", + data={ + "issue_type": ISSUE_UNIQUE_ID_COLLISION, + "unique_id": unique_id, + }, + is_fixable=False, + issue_domain=domain, + severity=ir.IssueSeverity.ERROR, + translation_key=translation_key, + translation_placeholders=translation_placeholders, + ) + + break # Only create one issue per domain + + for issue_id in issues: + ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) + @callback def _async_abort_entries_match( @@ -2288,7 +2720,9 @@ def _async_abort_entries_match( raise data_entry_flow.AbortFlow("already_configured") -class ConfigEntryBaseFlow(data_entry_flow.FlowHandler[ConfigFlowResult]): +class ConfigEntryBaseFlow( + data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult] +): """Base class for config and option flows.""" _flow_result = ConfigFlowResult @@ -2309,7 +2743,7 @@ class ConfigFlow(ConfigEntryBaseFlow): if not self.context: return None - return cast(str | None, self.context.get("unique_id")) + return self.context.get("unique_id") @staticmethod @callback @@ -2323,6 +2757,20 @@ class ConfigFlow(ConfigEntryBaseFlow): """Return options flow support for this handler.""" return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow + @staticmethod + @callback + def async_get_subentry_flow( + config_entry: ConfigEntry, subentry_type: str + ) -> ConfigSubentryFlow: + """Get the subentry flow for this handler.""" + raise NotImplementedError + + @classmethod + @callback + def async_supported_subentries(cls, config_entry: ConfigEntry) -> tuple[str, ...]: + """Return subentries supported by this handler.""" + return () + @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -2335,6 +2783,27 @@ class ConfigFlow(ConfigEntryBaseFlow): self._async_current_entries(include_ignore=False), match_dict ) + @callback + def _abort_if_unique_id_mismatch( + self, + *, + reason: str = "unique_id_mismatch", + description_placeholders: Mapping[str, str] | None = None, + ) -> None: + """Abort if the unique ID does not match the reauth/reconfigure context. + + Requires strings.json entry corresponding to the `reason` parameter + in user visible flows. + """ + if ( + self.source == SOURCE_REAUTH + and self._get_reauth_entry().unique_id != self.unique_id + ) or ( + self.source == SOURCE_RECONFIGURE + and self._get_reconfigure_entry().unique_id != self.unique_id + ): + raise data_entry_flow.AbortFlow(reason, description_placeholders) + @callback def _abort_if_unique_id_configured( self, @@ -2463,14 +2932,23 @@ class ConfigFlow(ConfigEntryBaseFlow): ] async def async_step_ignore(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Ignore this config flow.""" + """Ignore this config flow. + + Ignoring a config flow works by creating a config entry with source set to + SOURCE_IGNORE. + + There will only be a single active discovery flow per device, also when the + integration has multiple discovery sources for the same device. This method + is called when the user ignores a discovered device or service, we then store + the key for the flow being ignored. + + Once the ignore config entry is created, ConfigEntriesFlowManager.async_finish_flow + will make sure the discovery key is kept up to date since it may not be stable + unlike the unique id. + """ await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False) return self.async_create_entry(title=user_input["title"], data={}) - async def async_step_unignore(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Rediscover a config entry by it's unique_id.""" - return self.async_abort(reason="not_implemented") - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -2601,8 +3079,17 @@ class ConfigFlow(ConfigEntryBaseFlow): description: str | None = None, description_placeholders: Mapping[str, str] | None = None, options: Mapping[str, Any] | None = None, + subentries: Iterable[ConfigSubentryData] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" + if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: + report_usage( + f"creates a new entry in a '{self.source}' flow, " + "when it is expected to update an existing entry and abort", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.11", + integration_domain=self.handler, + ) result = super().async_create_entry( title=title, data=data, @@ -2612,6 +3099,7 @@ class ConfigFlow(ConfigEntryBaseFlow): result["minor_version"] = self.MINOR_VERSION result["options"] = options or {} + result["subentries"] = subentries or () result["version"] = self.VERSION return result @@ -2624,11 +3112,30 @@ class ConfigFlow(ConfigEntryBaseFlow): unique_id: str | None | UndefinedType = UNDEFINED, title: str | UndefinedType = UNDEFINED, data: Mapping[str, Any] | UndefinedType = UNDEFINED, + data_updates: Mapping[str, Any] | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, - reason: str = "reauth_successful", + reason: str | UndefinedType = UNDEFINED, reload_even_if_entry_is_unchanged: bool = True, ) -> ConfigFlowResult: - """Update config entry, reload config entry and finish config flow.""" + """Update config entry, reload config entry and finish config flow. + + :param data: replace the entry data with new data + :param data_updates: add items from data_updates to entry data - existing keys + are overridden + :param options: replace the entry options with new options + :param title: replace the title of the entry + :param unique_id: replace the unique_id of the entry + + :param reason: set the reason for the abort, defaults to + `reauth_successful` or `reconfigure_successful` based on flow source + + :param reload_even_if_entry_is_unchanged: set this to `False` if the entry + should not be reloaded if it is unchanged + """ + if data_updates is not UNDEFINED: + if data is not UNDEFINED: + raise ValueError("Cannot set both data and data_updates") + data = entry.data | data_updates result = self.hass.config_entries.async_update_entry( entry=entry, unique_id=unique_id, @@ -2638,32 +3145,205 @@ class ConfigFlow(ConfigEntryBaseFlow): ) if reload_even_if_entry_is_unchanged or result: self.hass.config_entries.async_schedule_reload(entry.entry_id) + if reason is UNDEFINED: + reason = "reauth_successful" + if self.source == SOURCE_RECONFIGURE: + reason = "reconfigure_successful" return self.async_abort(reason=reason) + @callback + def async_show_form( + self, + *, + step_id: str | None = None, + data_schema: vol.Schema | None = None, + errors: dict[str, str] | None = None, + description_placeholders: Mapping[str, str] | None = None, + last_step: bool | None = None, + preview: str | None = None, + ) -> ConfigFlowResult: + """Return the definition of a form to gather user input. -class OptionsFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): - """Flow to set options for a configuration entry.""" + The step_id parameter is deprecated and will be removed in a future release. + """ + if self.source == SOURCE_REAUTH and "entry_id" in self.context: + # If the integration does not provide a name for the reauth title, + # we append it to the description placeholders. + # We also need to check entry_id as some integrations bypass the + # reauth helpers and create a flow without it. + description_placeholders = dict(description_placeholders or {}) + if description_placeholders.get(CONF_NAME) is None: + description_placeholders[CONF_NAME] = self._get_reauth_entry().title + return super().async_show_form( + step_id=step_id, + data_schema=data_schema, + errors=errors, + description_placeholders=description_placeholders, + last_step=last_step, + preview=preview, + ) - _flow_result = ConfigFlowResult + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + raise NotImplementedError + + @property + def _reauth_entry_id(self) -> str: + """Return reauth entry id.""" + if self.source != SOURCE_REAUTH: + raise ValueError(f"Source is {self.source}, expected {SOURCE_REAUTH}") + return self.context["entry_id"] + + @callback + def _get_reauth_entry(self) -> ConfigEntry: + """Return the reauth config entry linked to the current context.""" + return self.hass.config_entries.async_get_known_entry(self._reauth_entry_id) + + @property + def _reconfigure_entry_id(self) -> str: + """Return reconfigure entry id.""" + if self.source != SOURCE_RECONFIGURE: + raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}") + return self.context["entry_id"] + + @callback + def _get_reconfigure_entry(self) -> ConfigEntry: + """Return the reconfigure config entry linked to the current context.""" + return self.hass.config_entries.async_get_known_entry( + self._reconfigure_entry_id + ) + + +class _ConfigSubFlowManager: + """Mixin class for flow managers which manage flows tied to a config entry.""" + + hass: HomeAssistant def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry: """Return config entry or raise if not found.""" - entry = self.hass.config_entries.async_get_entry(config_entry_id) - if entry is None: - raise UnknownEntry(config_entry_id) + return self.hass.config_entries.async_get_known_entry(config_entry_id) - return entry + +class ConfigSubentryFlowManager( + data_entry_flow.FlowManager[FlowContext, SubentryFlowResult, tuple[str, str]], + _ConfigSubFlowManager, +): + """Manage all the config subentry flows that are in progress.""" + + _flow_result = SubentryFlowResult + + async def async_create_flow( + self, + handler_key: tuple[str, str], + *, + context: FlowContext | None = None, + data: dict[str, Any] | None = None, + ) -> ConfigSubentryFlow: + """Create a subentry flow for a config entry. + + The entry_id and flow.handler[0] is the same thing to map entry with flow. + """ + if not context or "source" not in context: + raise KeyError("Context not set or doesn't have a source set") + + entry_id, subentry_type = handler_key + entry = self._async_get_config_entry(entry_id) + handler = await _async_get_flow_handler(self.hass, entry.domain, {}) + if subentry_type not in handler.async_supported_subentries(entry): + raise data_entry_flow.UnknownHandler( + f"Config entry '{entry.domain}' does not support subentry '{subentry_type}'" + ) + subentry_flow = handler.async_get_subentry_flow(entry, subentry_type) + subentry_flow.init_step = context["source"] + return subentry_flow + + async def async_finish_flow( + self, + flow: data_entry_flow.FlowHandler[ + FlowContext, SubentryFlowResult, tuple[str, str] + ], + result: SubentryFlowResult, + ) -> SubentryFlowResult: + """Finish a subentry flow and add a new subentry to the configuration entry. + + The flow.handler[0] and entry_id is the same thing to map flow with entry. + """ + flow = cast(ConfigSubentryFlow, flow) + + if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: + return result + + entry_id = flow.handler[0] + entry = self.hass.config_entries.async_get_entry(entry_id) + if entry is None: + raise UnknownEntry(entry_id) + + unique_id = result.get("unique_id") + if unique_id is not None and not isinstance(unique_id, str): + raise HomeAssistantError("unique_id must be a string") + + self.hass.config_entries.async_add_subentry( + entry, + ConfigSubentry( + data=MappingProxyType(result["data"]), + title=result["title"], + unique_id=unique_id, + ), + ) + + result["result"] = True + return result + + +class ConfigSubentryFlow( + data_entry_flow.FlowHandler[FlowContext, SubentryFlowResult, tuple[str, str]] +): + """Base class for config subentry flows.""" + + _flow_result = SubentryFlowResult + handler: tuple[str, str] + + @callback + def async_create_entry( + self, + *, + title: str | None = None, + data: Mapping[str, Any], + description: str | None = None, + description_placeholders: Mapping[str, str] | None = None, + unique_id: str | None = None, + ) -> SubentryFlowResult: + """Finish config flow and create a config entry.""" + result = super().async_create_entry( + title=title, + data=data, + description=description, + description_placeholders=description_placeholders, + ) + + result["unique_id"] = unique_id + + return result + + +class OptionsFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult], + _ConfigSubFlowManager, +): + """Manage all the config entry option flows that are in progress.""" + + _flow_result = ConfigFlowResult async def async_create_flow( self, handler_key: str, *, - context: dict[str, Any] | None = None, + context: ConfigFlowContext | None = None, data: dict[str, Any] | None = None, ) -> OptionsFlow: """Create an options flow for a config entry. - Entry_id and flow.handler is the same thing to map entry with flow. + The entry_id and the flow.handler is the same thing to map entry with flow. """ entry = self._async_get_config_entry(handler_key) handler = await _async_get_flow_handler(self.hass, entry.domain, {}) @@ -2671,21 +3351,23 @@ class OptionsFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], result: ConfigFlowResult, ) -> ConfigFlowResult: """Finish an options flow and update options for configuration entry. - Flow.handler and entry_id is the same thing to map flow with entry. + This method is called when a flow step returns FlowResultType.ABORT or + FlowResultType.CREATE_ENTRY. + + The flow.handler and the entry_id is the same thing to map flow with entry. """ flow = cast(OptionsFlow, flow) if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: return result - entry = self.hass.config_entries.async_get_entry(flow.handler) - if entry is None: - raise UnknownEntry(flow.handler) + entry = self.hass.config_entries.async_get_known_entry(flow.handler) + if result["data"] is not None: self.hass.config_entries.async_update_entry(entry, options=result["data"]) @@ -2693,7 +3375,7 @@ class OptionsFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): return result async def _async_setup_preview( - self, flow: data_entry_flow.FlowHandler[ConfigFlowResult] + self, flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult] ) -> None: """Set up preview for an option flow handler.""" entry = self._async_get_config_entry(flow.handler) @@ -2708,6 +3390,9 @@ class OptionsFlow(ConfigEntryBaseFlow): handler: str + _config_entry: ConfigEntry + """For compatibility only - to be removed in 2025.12""" + @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -2716,32 +3401,76 @@ class OptionsFlow(ConfigEntryBaseFlow): Requires `already_configured` in strings.json in user visible flows. """ - - config_entry = cast( - ConfigEntry, self.hass.config_entries.async_get_entry(self.handler) - ) _async_abort_entries_match( [ entry - for entry in self.hass.config_entries.async_entries(config_entry.domain) - if entry is not config_entry and entry.source != SOURCE_IGNORE + for entry in self.hass.config_entries.async_entries( + self.config_entry.domain + ) + if entry is not self.config_entry and entry.source != SOURCE_IGNORE ], match_dict, ) + @property + def _config_entry_id(self) -> str: + """Return config entry id. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + # This is the same as handler, but that's an implementation detail + if self.handler is None: + raise ValueError( + "The config entry id is not available during initialisation" + ) + return self.handler + + @property + def config_entry(self) -> ConfigEntry: + """Return the config entry linked to the current options flow. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + # For compatibility only - to be removed in 2025.12 + if hasattr(self, "_config_entry"): + return self._config_entry + + if self.hass is None: + raise ValueError("The config entry is not available during initialisation") + return self.hass.config_entries.async_get_known_entry(self._config_entry_id) + + @config_entry.setter + def config_entry(self, value: ConfigEntry) -> None: + """Set the config entry value.""" + report_usage( + "sets option flow config_entry explicitly, which is deprecated", + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.12", + ) + self._config_entry = value + class OptionsFlowWithConfigEntry(OptionsFlow): - """Base class for options flows with config entry and options.""" + """Base class for options flows with config entry and options. + + This class is being phased out, and should not be referenced in new code. + It is kept only for backward compatibility, and only for custom integrations. + """ def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" self._config_entry = config_entry self._options = deepcopy(dict(config_entry.options)) - - @property - def config_entry(self) -> ConfigEntry: - """Return the config entry.""" - return self._config_entry + report_usage( + "inherits from OptionsFlowWithConfigEntry", + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.IGNORE, + ) @property def options(self) -> dict[str, Any]: @@ -2755,7 +3484,7 @@ class EntityRegistryDisabledHandler: def __init__(self, hass: HomeAssistant) -> None: """Initialize the handler.""" self.hass = hass - self.registry: entity_registry.EntityRegistry | None = None + self.registry: er.EntityRegistry | None = None self.changed: set[str] = set() self._remove_call_later: Callable[[], None] | None = None @@ -2763,18 +3492,18 @@ class EntityRegistryDisabledHandler: def async_setup(self) -> None: """Set up the disable handler.""" self.hass.bus.async_listen( - entity_registry.EVENT_ENTITY_REGISTRY_UPDATED, + er.EVENT_ENTITY_REGISTRY_UPDATED, self._handle_entry_updated, event_filter=_handle_entry_updated_filter, ) @callback def _handle_entry_updated( - self, event: Event[entity_registry.EventEntityRegistryUpdatedData] + self, event: Event[er.EventEntityRegistryUpdatedData] ) -> None: """Handle entity registry entry update.""" if self.registry is None: - self.registry = entity_registry.async_get(self.hass) + self.registry = er.async_get(self.hass) entity_entry = self.registry.async_get(event.data["entity_id"]) @@ -2789,10 +3518,9 @@ class EntityRegistryDisabledHandler: ): return - config_entry = self.hass.config_entries.async_get_entry( + config_entry = self.hass.config_entries.async_get_known_entry( entity_entry.config_entry_id ) - assert config_entry is not None if config_entry.entry_id not in self.changed and config_entry.supports_unload: self.changed.add(config_entry.entry_id) @@ -2832,7 +3560,7 @@ class EntityRegistryDisabledHandler: @callback def _handle_entry_updated_filter( - event_data: entity_registry.EventEntityRegistryUpdatedData, + event_data: er.EventEntityRegistryUpdatedData, ) -> bool: """Handle entity registry entry update filter. @@ -2842,8 +3570,7 @@ def _handle_entry_updated_filter( return not ( event_data["action"] != "update" or "disabled_by" not in event_data["changes"] - or event_data["changes"]["disabled_by"] - is entity_registry.RegistryEntryDisabler.CONFIG_ENTRY + or event_data["changes"]["disabled_by"] is er.RegistryEntryDisabler.CONFIG_ENTRY ) diff --git a/homeassistant/const.py b/homeassistant/const.py index 953f65efce2..c026a8e5427 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Final from .helpers.deprecation import ( DeprecatedConstant, DeprecatedConstantEnum, + EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, @@ -22,15 +23,15 @@ if TYPE_CHECKING: from .helpers.typing import NoEventData APPLICATION_NAME: Final = "HomeAssistant" -MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 9 +MAJOR_VERSION: Final = 2025 +MINOR_VERSION: Final = 1 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) -REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) +REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0) # Truthy date string triggers showing related deprecation warning messages. -REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "" +REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2025.2" # Format for platform files PLATFORM_FORMAT: Final = "{platform}.{domain}" @@ -41,6 +42,7 @@ class Platform(StrEnum): AIR_QUALITY = "air_quality" ALARM_CONTROL_PANEL = "alarm_control_panel" + ASSIST_SATELLITE = "assist_satellite" BINARY_SENSOR = "binary_sensor" BUTTON = "button" CALENDAR = "calendar" @@ -60,7 +62,6 @@ class Platform(StrEnum): LAWN_MOWER = "lawn_mower" LIGHT = "light" LOCK = "lock" - MAILBOX = "mailbox" MEDIA_PLAYER = "media_player" NOTIFY = "notify" NUMBER = "number" @@ -75,9 +76,9 @@ class Platform(StrEnum): TIME = "time" TODO = "todo" TTS = "tts" + UPDATE = "update" VACUUM = "vacuum" VALVE = "valve" - UPDATE = "update" WAKE_WORD = "wake_word" WATER_HEATER = "water_heater" WEATHER = "weather" @@ -282,6 +283,8 @@ CONF_THEN: Final = "then" CONF_TIMEOUT: Final = "timeout" CONF_TIME_ZONE: Final = "time_zone" CONF_TOKEN: Final = "token" +CONF_TRIGGER: Final = "trigger" +CONF_TRIGGERS: Final = "triggers" CONF_TRIGGER_TIME: Final = "trigger_time" CONF_TTL: Final = "ttl" CONF_TYPE: Final = "type" @@ -333,133 +336,6 @@ EVENT_RECORDER_HOURLY_STATISTICS_GENERATED: Final = ( ) EVENT_SHOPPING_LIST_UPDATED: Final = "shopping_list_updated" -# #### DEVICE CLASSES #### -# DEVICE_CLASS_* below are deprecated as of 2021.12 -# use the SensorDeviceClass enum instead. -_DEPRECATED_DEVICE_CLASS_AQI: Final = DeprecatedConstant( - "aqi", "SensorDeviceClass.AQI", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_BATTERY: Final = DeprecatedConstant( - "battery", - "SensorDeviceClass.BATTERY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_CO: Final = DeprecatedConstant( - "carbon_monoxide", - "SensorDeviceClass.CO", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_CO2: Final = DeprecatedConstant( - "carbon_dioxide", - "SensorDeviceClass.CO2", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_CURRENT: Final = DeprecatedConstant( - "current", - "SensorDeviceClass.CURRENT", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_DATE: Final = DeprecatedConstant( - "date", "SensorDeviceClass.DATE", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_ENERGY: Final = DeprecatedConstant( - "energy", - "SensorDeviceClass.ENERGY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_FREQUENCY: Final = DeprecatedConstant( - "frequency", - "SensorDeviceClass.FREQUENCY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_GAS: Final = DeprecatedConstant( - "gas", "SensorDeviceClass.GAS", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_HUMIDITY: Final = DeprecatedConstant( - "humidity", - "SensorDeviceClass.HUMIDITY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_ILLUMINANCE: Final = DeprecatedConstant( - "illuminance", - "SensorDeviceClass.ILLUMINANCE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_MONETARY: Final = DeprecatedConstant( - "monetary", - "SensorDeviceClass.MONETARY", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_NITROGEN_DIOXIDE: Final = DeprecatedConstant( - "nitrogen_dioxide", - "SensorDeviceClass.NITROGEN_DIOXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_NITROGEN_MONOXIDE: Final = DeprecatedConstant( - "nitrogen_monoxide", - "SensorDeviceClass.NITROGEN_MONOXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_NITROUS_OXIDE: Final = DeprecatedConstant( - "nitrous_oxide", - "SensorDeviceClass.NITROUS_OXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_OZONE: Final = DeprecatedConstant( - "ozone", "SensorDeviceClass.OZONE", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PM1: Final = DeprecatedConstant( - "pm1", "SensorDeviceClass.PM1", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PM10: Final = DeprecatedConstant( - "pm10", "SensorDeviceClass.PM10", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PM25: Final = DeprecatedConstant( - "pm25", "SensorDeviceClass.PM25", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_POWER_FACTOR: Final = DeprecatedConstant( - "power_factor", - "SensorDeviceClass.POWER_FACTOR", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_POWER: Final = DeprecatedConstant( - "power", "SensorDeviceClass.POWER", "2025.1" -) -_DEPRECATED_DEVICE_CLASS_PRESSURE: Final = DeprecatedConstant( - "pressure", - "SensorDeviceClass.PRESSURE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_SIGNAL_STRENGTH: Final = DeprecatedConstant( - "signal_strength", - "SensorDeviceClass.SIGNAL_STRENGTH", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_SULPHUR_DIOXIDE: Final = DeprecatedConstant( - "sulphur_dioxide", - "SensorDeviceClass.SULPHUR_DIOXIDE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_TEMPERATURE: Final = DeprecatedConstant( - "temperature", - "SensorDeviceClass.TEMPERATURE", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_TIMESTAMP: Final = DeprecatedConstant( - "timestamp", - "SensorDeviceClass.TIMESTAMP", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS: Final = DeprecatedConstant( - "volatile_organic_compounds", - "SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS", - "2025.1", -) -_DEPRECATED_DEVICE_CLASS_VOLTAGE: Final = DeprecatedConstant( - "voltage", - "SensorDeviceClass.VOLTAGE", - "2025.1", -) # #### STATES #### STATE_ON: Final = "on" @@ -476,25 +352,93 @@ STATE_PLAYING: Final = "playing" STATE_PAUSED: Final = "paused" STATE_IDLE: Final = "idle" STATE_STANDBY: Final = "standby" -STATE_ALARM_DISARMED: Final = "disarmed" -STATE_ALARM_ARMED_HOME: Final = "armed_home" -STATE_ALARM_ARMED_AWAY: Final = "armed_away" -STATE_ALARM_ARMED_NIGHT: Final = "armed_night" -STATE_ALARM_ARMED_VACATION: Final = "armed_vacation" -STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = "armed_custom_bypass" -STATE_ALARM_PENDING: Final = "pending" -STATE_ALARM_ARMING: Final = "arming" -STATE_ALARM_DISARMING: Final = "disarming" -STATE_ALARM_TRIGGERED: Final = "triggered" -STATE_LOCKED: Final = "locked" -STATE_UNLOCKED: Final = "unlocked" -STATE_LOCKING: Final = "locking" -STATE_UNLOCKING: Final = "unlocking" -STATE_JAMMED: Final = "jammed" STATE_UNAVAILABLE: Final = "unavailable" STATE_OK: Final = "ok" STATE_PROBLEM: Final = "problem" +# #### LOCK STATES #### +# STATE_* below are deprecated as of 2024.10 +# use the LockState enum instead. +_DEPRECATED_STATE_LOCKED: Final = DeprecatedConstant( + "locked", + "LockState.LOCKED", + "2025.10", +) +_DEPRECATED_STATE_UNLOCKED: Final = DeprecatedConstant( + "unlocked", + "LockState.UNLOCKED", + "2025.10", +) +_DEPRECATED_STATE_LOCKING: Final = DeprecatedConstant( + "locking", + "LockState.LOCKING", + "2025.10", +) +_DEPRECATED_STATE_UNLOCKING: Final = DeprecatedConstant( + "unlocking", + "LockState.UNLOCKING", + "2025.10", +) +_DEPRECATED_STATE_JAMMED: Final = DeprecatedConstant( + "jammed", + "LockState.JAMMED", + "2025.10", +) + +# #### ALARM CONTROL PANEL STATES #### +# STATE_ALARM_* below are deprecated as of 2024.11 +# use the AlarmControlPanelState enum instead. +_DEPRECATED_STATE_ALARM_DISARMED: Final = DeprecatedConstant( + "disarmed", + "AlarmControlPanelState.DISARMED", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_HOME: Final = DeprecatedConstant( + "armed_home", + "AlarmControlPanelState.ARMED_HOME", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_AWAY: Final = DeprecatedConstant( + "armed_away", + "AlarmControlPanelState.ARMED_AWAY", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_NIGHT: Final = DeprecatedConstant( + "armed_night", + "AlarmControlPanelState.ARMED_NIGHT", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_VACATION: Final = DeprecatedConstant( + "armed_vacation", + "AlarmControlPanelState.ARMED_VACATION", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = DeprecatedConstant( + "armed_custom_bypass", + "AlarmControlPanelState.ARMED_CUSTOM_BYPASS", + "2025.11", +) +_DEPRECATED_STATE_ALARM_PENDING: Final = DeprecatedConstant( + "pending", + "AlarmControlPanelState.PENDING", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMING: Final = DeprecatedConstant( + "arming", + "AlarmControlPanelState.ARMING", + "2025.11", +) +_DEPRECATED_STATE_ALARM_DISARMING: Final = DeprecatedConstant( + "disarming", + "AlarmControlPanelState.DISARMING", + "2025.11", +) +_DEPRECATED_STATE_ALARM_TRIGGERED: Final = DeprecatedConstant( + "triggered", + "AlarmControlPanelState.TRIGGERED", + "2025.11", +) + # #### STATE AND EVENT ATTRIBUTES #### # Attribution ATTR_ATTRIBUTION: Final = "attribution" @@ -641,39 +585,18 @@ class UnitOfApparentPower(StrEnum): VOLT_AMPERE = "VA" -_DEPRECATED_POWER_VOLT_AMPERE: Final = DeprecatedConstantEnum( - UnitOfApparentPower.VOLT_AMPERE, - "2025.1", -) -"""Deprecated: please use UnitOfApparentPower.VOLT_AMPERE.""" - - # Power units class UnitOfPower(StrEnum): """Power units.""" WATT = "W" KILO_WATT = "kW" + MEGA_WATT = "MW" + GIGA_WATT = "GW" + TERA_WATT = "TW" BTU_PER_HOUR = "BTU/h" -_DEPRECATED_POWER_WATT: Final = DeprecatedConstantEnum( - UnitOfPower.WATT, - "2025.1", -) -"""Deprecated: please use UnitOfPower.WATT.""" -_DEPRECATED_POWER_KILO_WATT: Final = DeprecatedConstantEnum( - UnitOfPower.KILO_WATT, - "2025.1", -) -"""Deprecated: please use UnitOfPower.KILO_WATT.""" -_DEPRECATED_POWER_BTU_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfPower.BTU_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfPower.BTU_PER_HOUR.""" - - # Reactive power units class UnitOfReactivePower(StrEnum): """Reactive power units.""" @@ -692,28 +615,20 @@ _DEPRECATED_POWER_VOLT_AMPERE_REACTIVE: Final = DeprecatedConstantEnum( class UnitOfEnergy(StrEnum): """Energy units.""" - GIGA_JOULE = "GJ" - KILO_WATT_HOUR = "kWh" + JOULE = "J" + KILO_JOULE = "kJ" MEGA_JOULE = "MJ" - MEGA_WATT_HOUR = "MWh" + GIGA_JOULE = "GJ" + MILLIWATT_HOUR = "mWh" WATT_HOUR = "Wh" - - -_DEPRECATED_ENERGY_KILO_WATT_HOUR: Final = DeprecatedConstantEnum( - UnitOfEnergy.KILO_WATT_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfEnergy.KILO_WATT_HOUR.""" -_DEPRECATED_ENERGY_MEGA_WATT_HOUR: Final = DeprecatedConstantEnum( - UnitOfEnergy.MEGA_WATT_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfEnergy.MEGA_WATT_HOUR.""" -_DEPRECATED_ENERGY_WATT_HOUR: Final = DeprecatedConstantEnum( - UnitOfEnergy.WATT_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfEnergy.WATT_HOUR.""" + KILO_WATT_HOUR = "kWh" + MEGA_WATT_HOUR = "MWh" + GIGA_WATT_HOUR = "GWh" + TERA_WATT_HOUR = "TWh" + CALORIE = "cal" + KILO_CALORIE = "kcal" + MEGA_CALORIE = "Mcal" + GIGA_CALORIE = "Gcal" # Electric_current units @@ -724,37 +639,15 @@ class UnitOfElectricCurrent(StrEnum): AMPERE = "A" -_DEPRECATED_ELECTRIC_CURRENT_MILLIAMPERE: Final = DeprecatedConstantEnum( - UnitOfElectricCurrent.MILLIAMPERE, - "2025.1", -) -"""Deprecated: please use UnitOfElectricCurrent.MILLIAMPERE.""" -_DEPRECATED_ELECTRIC_CURRENT_AMPERE: Final = DeprecatedConstantEnum( - UnitOfElectricCurrent.AMPERE, - "2025.1", -) -"""Deprecated: please use UnitOfElectricCurrent.AMPERE.""" - - # Electric_potential units class UnitOfElectricPotential(StrEnum): """Electric potential units.""" + MICROVOLT = "µV" MILLIVOLT = "mV" VOLT = "V" -_DEPRECATED_ELECTRIC_POTENTIAL_MILLIVOLT: Final = DeprecatedConstantEnum( - UnitOfElectricPotential.MILLIVOLT, - "2025.1", -) -"""Deprecated: please use UnitOfElectricPotential.MILLIVOLT.""" -_DEPRECATED_ELECTRIC_POTENTIAL_VOLT: Final = DeprecatedConstantEnum( - UnitOfElectricPotential.VOLT, - "2025.1", -) -"""Deprecated: please use UnitOfElectricPotential.VOLT.""" - # Degree units DEGREE: Final = "°" @@ -773,23 +666,6 @@ class UnitOfTemperature(StrEnum): KELVIN = "K" -_DEPRECATED_TEMP_CELSIUS: Final = DeprecatedConstantEnum( - UnitOfTemperature.CELSIUS, - "2025.1", -) -"""Deprecated: please use UnitOfTemperature.CELSIUS""" -_DEPRECATED_TEMP_FAHRENHEIT: Final = DeprecatedConstantEnum( - UnitOfTemperature.FAHRENHEIT, - "2025.1", -) -"""Deprecated: please use UnitOfTemperature.FAHRENHEIT""" -_DEPRECATED_TEMP_KELVIN: Final = DeprecatedConstantEnum( - UnitOfTemperature.KELVIN, - "2025.1", -) -"""Deprecated: please use UnitOfTemperature.KELVIN""" - - # Time units class UnitOfTime(StrEnum): """Time units.""" @@ -805,53 +681,6 @@ class UnitOfTime(StrEnum): YEARS = "y" -_DEPRECATED_TIME_MICROSECONDS: Final = DeprecatedConstantEnum( - UnitOfTime.MICROSECONDS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MICROSECONDS.""" -_DEPRECATED_TIME_MILLISECONDS: Final = DeprecatedConstantEnum( - UnitOfTime.MILLISECONDS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MILLISECONDS.""" -_DEPRECATED_TIME_SECONDS: Final = DeprecatedConstantEnum( - UnitOfTime.SECONDS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.SECONDS.""" -_DEPRECATED_TIME_MINUTES: Final = DeprecatedConstantEnum( - UnitOfTime.MINUTES, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MINUTES.""" -_DEPRECATED_TIME_HOURS: Final = DeprecatedConstantEnum( - UnitOfTime.HOURS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.HOURS.""" -_DEPRECATED_TIME_DAYS: Final = DeprecatedConstantEnum( - UnitOfTime.DAYS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.DAYS.""" -_DEPRECATED_TIME_WEEKS: Final = DeprecatedConstantEnum( - UnitOfTime.WEEKS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.WEEKS.""" -_DEPRECATED_TIME_MONTHS: Final = DeprecatedConstantEnum( - UnitOfTime.MONTHS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.MONTHS.""" -_DEPRECATED_TIME_YEARS: Final = DeprecatedConstantEnum( - UnitOfTime.YEARS, - "2025.1", -) -"""Deprecated: please use UnitOfTime.YEARS.""" - - # Length units class UnitOfLength(StrEnum): """Length units.""" @@ -864,48 +693,7 @@ class UnitOfLength(StrEnum): FEET = "ft" YARDS = "yd" MILES = "mi" - - -_DEPRECATED_LENGTH_MILLIMETERS: Final = DeprecatedConstantEnum( - UnitOfLength.MILLIMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.MILLIMETERS.""" -_DEPRECATED_LENGTH_CENTIMETERS: Final = DeprecatedConstantEnum( - UnitOfLength.CENTIMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.CENTIMETERS.""" -_DEPRECATED_LENGTH_METERS: Final = DeprecatedConstantEnum( - UnitOfLength.METERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.METERS.""" -_DEPRECATED_LENGTH_KILOMETERS: Final = DeprecatedConstantEnum( - UnitOfLength.KILOMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.KILOMETERS.""" -_DEPRECATED_LENGTH_INCHES: Final = DeprecatedConstantEnum( - UnitOfLength.INCHES, - "2025.1", -) -"""Deprecated: please use UnitOfLength.INCHES.""" -_DEPRECATED_LENGTH_FEET: Final = DeprecatedConstantEnum( - UnitOfLength.FEET, - "2025.1", -) -"""Deprecated: please use UnitOfLength.FEET.""" -_DEPRECATED_LENGTH_YARD: Final = DeprecatedConstantEnum( - UnitOfLength.YARDS, - "2025.1", -) -"""Deprecated: please use UnitOfLength.YARDS.""" -_DEPRECATED_LENGTH_MILES: Final = DeprecatedConstantEnum( - UnitOfLength.MILES, - "2025.1", -) -"""Deprecated: please use UnitOfLength.MILES.""" + NAUTICAL_MILES = "nmi" # Frequency units @@ -918,28 +706,6 @@ class UnitOfFrequency(StrEnum): GIGAHERTZ = "GHz" -_DEPRECATED_FREQUENCY_HERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.HERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.HERTZ""" -_DEPRECATED_FREQUENCY_KILOHERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.KILOHERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.KILOHERTZ""" -_DEPRECATED_FREQUENCY_MEGAHERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.MEGAHERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.MEGAHERTZ""" -_DEPRECATED_FREQUENCY_GIGAHERTZ: Final = DeprecatedConstantEnum( - UnitOfFrequency.GIGAHERTZ, - "2025.1", -) -"""Deprecated: please use UnitOfFrequency.GIGAHERTZ""" - - # Pressure units class UnitOfPressure(StrEnum): """Pressure units.""" @@ -955,53 +721,6 @@ class UnitOfPressure(StrEnum): PSI = "psi" -_DEPRECATED_PRESSURE_PA: Final = DeprecatedConstantEnum( - UnitOfPressure.PA, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.PA""" -_DEPRECATED_PRESSURE_HPA: Final = DeprecatedConstantEnum( - UnitOfPressure.HPA, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.HPA""" -_DEPRECATED_PRESSURE_KPA: Final = DeprecatedConstantEnum( - UnitOfPressure.KPA, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.KPA""" -_DEPRECATED_PRESSURE_BAR: Final = DeprecatedConstantEnum( - UnitOfPressure.BAR, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.BAR""" -_DEPRECATED_PRESSURE_CBAR: Final = DeprecatedConstantEnum( - UnitOfPressure.CBAR, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.CBAR""" -_DEPRECATED_PRESSURE_MBAR: Final = DeprecatedConstantEnum( - UnitOfPressure.MBAR, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.MBAR""" -_DEPRECATED_PRESSURE_MMHG: Final = DeprecatedConstantEnum( - UnitOfPressure.MMHG, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.MMHG""" -_DEPRECATED_PRESSURE_INHG: Final = DeprecatedConstantEnum( - UnitOfPressure.INHG, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.INHG""" -_DEPRECATED_PRESSURE_PSI: Final = DeprecatedConstantEnum( - UnitOfPressure.PSI, - "2025.1", -) -"""Deprecated: please use UnitOfPressure.PSI""" - - # Sound pressure units class UnitOfSoundPressure(StrEnum): """Sound pressure units.""" @@ -1010,18 +729,6 @@ class UnitOfSoundPressure(StrEnum): WEIGHTED_DECIBEL_A = "dBA" -_DEPRECATED_SOUND_PRESSURE_DB: Final = DeprecatedConstantEnum( - UnitOfSoundPressure.DECIBEL, - "2025.1", -) -"""Deprecated: please use UnitOfSoundPressure.DECIBEL""" -_DEPRECATED_SOUND_PRESSURE_WEIGHTED_DBA: Final = DeprecatedConstantEnum( - UnitOfSoundPressure.WEIGHTED_DECIBEL_A, - "2025.1", -) -"""Deprecated: please use UnitOfSoundPressure.WEIGHTED_DECIBEL_A""" - - # Volume units class UnitOfVolume(StrEnum): """Volume units.""" @@ -1041,39 +748,6 @@ class UnitOfVolume(StrEnum): British/Imperial fluid ounces are not yet supported""" -_DEPRECATED_VOLUME_LITERS: Final = DeprecatedConstantEnum( - UnitOfVolume.LITERS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.LITERS""" -_DEPRECATED_VOLUME_MILLILITERS: Final = DeprecatedConstantEnum( - UnitOfVolume.MILLILITERS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.MILLILITERS""" -_DEPRECATED_VOLUME_CUBIC_METERS: Final = DeprecatedConstantEnum( - UnitOfVolume.CUBIC_METERS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.CUBIC_METERS""" -_DEPRECATED_VOLUME_CUBIC_FEET: Final = DeprecatedConstantEnum( - UnitOfVolume.CUBIC_FEET, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.CUBIC_FEET""" - -_DEPRECATED_VOLUME_GALLONS: Final = DeprecatedConstantEnum( - UnitOfVolume.GALLONS, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.GALLONS""" -_DEPRECATED_VOLUME_FLUID_OUNCE: Final = DeprecatedConstantEnum( - UnitOfVolume.FLUID_OUNCES, - "2025.1", -) -"""Deprecated: please use UnitOfVolume.FLUID_OUNCES""" - - # Volume Flow Rate units class UnitOfVolumeFlowRate(StrEnum): """Volume flow rate units.""" @@ -1082,21 +756,29 @@ class UnitOfVolumeFlowRate(StrEnum): CUBIC_FEET_PER_MINUTE = "ft³/min" LITERS_PER_MINUTE = "L/min" GALLONS_PER_MINUTE = "gal/min" + MILLILITERS_PER_SECOND = "mL/s" -_DEPRECATED_VOLUME_FLOW_RATE_CUBIC_METERS_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - "2025.1", +class UnitOfArea(StrEnum): + """Area units.""" + + SQUARE_METERS = "m²" + SQUARE_CENTIMETERS = "cm²" + SQUARE_KILOMETERS = "km²" + SQUARE_MILLIMETERS = "mm²" + SQUARE_INCHES = "in²" + SQUARE_FEET = "ft²" + SQUARE_YARDS = "yd²" + SQUARE_MILES = "mi²" + ACRES = "ac" + HECTARES = "ha" + + +_DEPRECATED_AREA_SQUARE_METERS: Final = DeprecatedConstantEnum( + UnitOfArea.SQUARE_METERS, + "2025.12", ) -"""Deprecated: please use UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR""" -_DEPRECATED_VOLUME_FLOW_RATE_CUBIC_FEET_PER_MINUTE: Final = DeprecatedConstantEnum( - UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, - "2025.1", -) -"""Deprecated: please use UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE""" - -# Area units -AREA_SQUARE_METERS: Final = "m²" +"""Deprecated: please use UnitOfArea.SQUARE_METERS""" # Mass units @@ -1112,52 +794,35 @@ class UnitOfMass(StrEnum): STONES = "st" -_DEPRECATED_MASS_GRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.GRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.GRAMS""" -_DEPRECATED_MASS_KILOGRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.KILOGRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.KILOGRAMS""" -_DEPRECATED_MASS_MILLIGRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.MILLIGRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.MILLIGRAMS""" -_DEPRECATED_MASS_MICROGRAMS: Final = DeprecatedConstantEnum( - UnitOfMass.MICROGRAMS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.MICROGRAMS""" -_DEPRECATED_MASS_OUNCES: Final = DeprecatedConstantEnum( - UnitOfMass.OUNCES, - "2025.1", -) -"""Deprecated: please use UnitOfMass.OUNCES""" -_DEPRECATED_MASS_POUNDS: Final = DeprecatedConstantEnum( - UnitOfMass.POUNDS, - "2025.1", -) -"""Deprecated: please use UnitOfMass.POUNDS""" - - -# Conductivity units -class UnitOfConductivity(StrEnum): +class UnitOfConductivity( + StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "SIEMENS": ("UnitOfConductivity.SIEMENS_PER_CM", "2025.11.0"), + "MICROSIEMENS": ("UnitOfConductivity.MICROSIEMENS_PER_CM", "2025.11.0"), + "MILLISIEMENS": ("UnitOfConductivity.MILLISIEMENS_PER_CM", "2025.11.0"), + }, +): """Conductivity units.""" + SIEMENS_PER_CM = "S/cm" + MICROSIEMENS_PER_CM = "µS/cm" + MILLISIEMENS_PER_CM = "mS/cm" + + # Deprecated aliases SIEMENS = "S/cm" + """Deprecated: Please use UnitOfConductivity.SIEMENS_PER_CM""" MICROSIEMENS = "µS/cm" + """Deprecated: Please use UnitOfConductivity.MICROSIEMENS_PER_CM""" MILLISIEMENS = "mS/cm" + """Deprecated: Please use UnitOfConductivity.MILLISIEMENS_PER_CM""" _DEPRECATED_CONDUCTIVITY: Final = DeprecatedConstantEnum( - UnitOfConductivity.MICROSIEMENS, - "2025.6", + UnitOfConductivity.MICROSIEMENS_PER_CM, + "2025.11", ) -"""Deprecated: please use UnitOfConductivity.MICROSIEMENS""" +"""Deprecated: please use UnitOfConductivity.MICROSIEMENS_PER_CM""" # Light units LIGHT_LUX: Final = "lx" @@ -1180,19 +845,6 @@ class UnitOfIrradiance(StrEnum): BTUS_PER_HOUR_SQUARE_FOOT = "BTU/(h⋅ft²)" -# Irradiation units -_DEPRECATED_IRRADIATION_WATTS_PER_SQUARE_METER: Final = DeprecatedConstantEnum( - UnitOfIrradiance.WATTS_PER_SQUARE_METER, - "2025.1", -) -"""Deprecated: please use UnitOfIrradiance.WATTS_PER_SQUARE_METER""" -_DEPRECATED_IRRADIATION_BTUS_PER_HOUR_SQUARE_FOOT: Final = DeprecatedConstantEnum( - UnitOfIrradiance.BTUS_PER_HOUR_SQUARE_FOOT, - "2025.1", -) -"""Deprecated: please use UnitOfIrradiance.BTUS_PER_HOUR_SQUARE_FOOT""" - - class UnitOfVolumetricFlux(StrEnum): """Volumetric flux, commonly used for precipitation intensity. @@ -1230,27 +882,6 @@ class UnitOfPrecipitationDepth(StrEnum): """Derived from cm³/cm²""" -# Precipitation units -_DEPRECATED_PRECIPITATION_INCHES: Final = DeprecatedConstantEnum( - UnitOfPrecipitationDepth.INCHES, "2025.1" -) -"""Deprecated: please use UnitOfPrecipitationDepth.INCHES""" -_DEPRECATED_PRECIPITATION_MILLIMETERS: Final = DeprecatedConstantEnum( - UnitOfPrecipitationDepth.MILLIMETERS, - "2025.1", -) -"""Deprecated: please use UnitOfPrecipitationDepth.MILLIMETERS""" -_DEPRECATED_PRECIPITATION_MILLIMETERS_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR""" -_DEPRECATED_PRECIPITATION_INCHES_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.INCHES_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.INCHES_PER_HOUR""" - # Concentration units CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: Final = "µg/m³" CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: Final = "mg/m³" @@ -1260,55 +891,25 @@ CONCENTRATION_PARTS_PER_MILLION: Final = "ppm" CONCENTRATION_PARTS_PER_BILLION: Final = "ppb" +class UnitOfBloodGlucoseConcentration(StrEnum): + """Blood glucose concentration units.""" + + MILLIGRAMS_PER_DECILITER = "mg/dL" + MILLIMOLE_PER_LITER = "mmol/L" + + # Speed units class UnitOfSpeed(StrEnum): """Speed units.""" BEAUFORT = "Beaufort" FEET_PER_SECOND = "ft/s" + INCHES_PER_SECOND = "in/s" METERS_PER_SECOND = "m/s" KILOMETERS_PER_HOUR = "km/h" KNOTS = "kn" MILES_PER_HOUR = "mph" - - -_DEPRECATED_SPEED_FEET_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfSpeed.FEET_PER_SECOND, "2025.1" -) -"""Deprecated: please use UnitOfSpeed.FEET_PER_SECOND""" -_DEPRECATED_SPEED_METERS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfSpeed.METERS_PER_SECOND, "2025.1" -) -"""Deprecated: please use UnitOfSpeed.METERS_PER_SECOND""" -_DEPRECATED_SPEED_KILOMETERS_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfSpeed.KILOMETERS_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfSpeed.KILOMETERS_PER_HOUR""" -_DEPRECATED_SPEED_KNOTS: Final = DeprecatedConstantEnum(UnitOfSpeed.KNOTS, "2025.1") -"""Deprecated: please use UnitOfSpeed.KNOTS""" -_DEPRECATED_SPEED_MILES_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfSpeed.MILES_PER_HOUR, "2025.1" -) -"""Deprecated: please use UnitOfSpeed.MILES_PER_HOUR""" - -_DEPRECATED_SPEED_MILLIMETERS_PER_DAY: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.MILLIMETERS_PER_DAY""" - -_DEPRECATED_SPEED_INCHES_PER_DAY: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.INCHES_PER_DAY, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.INCHES_PER_DAY""" - -_DEPRECATED_SPEED_INCHES_PER_HOUR: Final = DeprecatedConstantEnum( - UnitOfVolumetricFlux.INCHES_PER_HOUR, - "2025.1", -) -"""Deprecated: please use UnitOfVolumetricFlux.INCHES_PER_HOUR""" + MILLIMETERS_PER_SECOND = "mm/s" # Signal_strength units @@ -1343,90 +944,6 @@ class UnitOfInformation(StrEnum): YOBIBYTES = "YiB" -_DEPRECATED_DATA_BITS: Final = DeprecatedConstantEnum(UnitOfInformation.BITS, "2025.1") -"""Deprecated: please use UnitOfInformation.BITS""" -_DEPRECATED_DATA_KILOBITS: Final = DeprecatedConstantEnum( - UnitOfInformation.KILOBITS, "2025.1" -) -"""Deprecated: please use UnitOfInformation.KILOBITS""" -_DEPRECATED_DATA_MEGABITS: Final = DeprecatedConstantEnum( - UnitOfInformation.MEGABITS, "2025.1" -) -"""Deprecated: please use UnitOfInformation.MEGABITS""" -_DEPRECATED_DATA_GIGABITS: Final = DeprecatedConstantEnum( - UnitOfInformation.GIGABITS, "2025.1" -) -"""Deprecated: please use UnitOfInformation.GIGABITS""" -_DEPRECATED_DATA_BYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.BYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.BYTES""" -_DEPRECATED_DATA_KILOBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.KILOBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.KILOBYTES""" -_DEPRECATED_DATA_MEGABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.MEGABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.MEGABYTES""" -_DEPRECATED_DATA_GIGABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.GIGABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.GIGABYTES""" -_DEPRECATED_DATA_TERABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.TERABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.TERABYTES""" -_DEPRECATED_DATA_PETABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.PETABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.PETABYTES""" -_DEPRECATED_DATA_EXABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.EXABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.EXABYTES""" -_DEPRECATED_DATA_ZETTABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.ZETTABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.ZETTABYTES""" -_DEPRECATED_DATA_YOTTABYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.YOTTABYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.YOTTABYTES""" -_DEPRECATED_DATA_KIBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.KIBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.KIBIBYTES""" -_DEPRECATED_DATA_MEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.MEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.MEBIBYTES""" -_DEPRECATED_DATA_GIBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.GIBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.GIBIBYTES""" -_DEPRECATED_DATA_TEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.TEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.TEBIBYTES""" -_DEPRECATED_DATA_PEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.PEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.PEBIBYTES""" -_DEPRECATED_DATA_EXBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.EXBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.EXBIBYTES""" -_DEPRECATED_DATA_ZEBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.ZEBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.ZEBIBYTES""" -_DEPRECATED_DATA_YOBIBYTES: Final = DeprecatedConstantEnum( - UnitOfInformation.YOBIBYTES, "2025.1" -) -"""Deprecated: please use UnitOfInformation.YOBIBYTES""" - - # Data_rate units class UnitOfDataRate(StrEnum): """Data rate units.""" @@ -1444,63 +961,6 @@ class UnitOfDataRate(StrEnum): GIBIBYTES_PER_SECOND = "GiB/s" -_DEPRECATED_DATA_RATE_BITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.BITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.BITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_KILOBITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.KILOBITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.KILOBITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_MEGABITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.MEGABITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.MEGABITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_GIGABITS_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.GIGABITS_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.GIGABITS_PER_SECOND""" -_DEPRECATED_DATA_RATE_BYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.BYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.BYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_KILOBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.KILOBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.KILOBYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_MEGABYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.MEGABYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.MEGABYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_GIGABYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.GIGABYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.GIGABYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_KIBIBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.KIBIBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.KIBIBYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_MEBIBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.MEBIBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.MEBIBYTES_PER_SECOND""" -_DEPRECATED_DATA_RATE_GIBIBYTES_PER_SECOND: Final = DeprecatedConstantEnum( - UnitOfDataRate.GIBIBYTES_PER_SECOND, - "2025.1", -) -"""Deprecated: please use UnitOfDataRate.GIBIBYTES_PER_SECOND""" - - # States COMPRESSED_STATE_STATE: Final = "s" COMPRESSED_STATE_ATTRIBUTES: Final = "a" @@ -1596,6 +1056,7 @@ RESTART_EXIT_CODE: Final = 100 UNIT_NOT_RECOGNIZED_TEMPLATE: Final = "{} is not a recognized {} unit." LENGTH: Final = "length" +AREA: Final = "area" MASS: Final = "mass" PRESSURE: Final = "pressure" VOLUME: Final = "volume" @@ -1633,14 +1094,6 @@ class EntityCategory(StrEnum): DIAGNOSTIC = "diagnostic" -# ENTITY_CATEGOR* below are deprecated as of 2021.12 -# use the EntityCategory enum instead. -_DEPRECATED_ENTITY_CATEGORY_CONFIG: Final = DeprecatedConstantEnum( - EntityCategory.CONFIG, "2025.1" -) -_DEPRECATED_ENTITY_CATEGORY_DIAGNOSTIC: Final = DeprecatedConstantEnum( - EntityCategory.DIAGNOSTIC, "2025.1" -) ENTITY_CATEGORIES: Final[list[str]] = [cls.value for cls in EntityCategory] # The ID of the Home Assistant Media Player Cast App diff --git a/homeassistant/core.py b/homeassistant/core.py index b797798134e..da7a206b14e 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -18,16 +18,12 @@ from collections.abc import ( ValuesView, ) import concurrent.futures -from contextlib import suppress from dataclasses import dataclass import datetime import enum import functools -from functools import cached_property import inspect import logging -import os -import pathlib import re import threading import time @@ -43,11 +39,10 @@ from typing import ( cast, overload, ) -from urllib.parse import urlparse +from propcache import cached_property, under_cached_property from typing_extensions import TypeVar import voluptuous as vol -import yarl from . import util from .const import ( @@ -55,7 +50,6 @@ from .const import ( ATTR_FRIENDLY_NAME, ATTR_SERVICE, ATTR_SERVICE_DATA, - BASE_PLATFORMS, COMPRESSED_STATE_ATTRIBUTES, COMPRESSED_STATE_CONTEXT, COMPRESSED_STATE_LAST_CHANGED, @@ -77,7 +71,6 @@ from .const import ( MAX_EXPECTED_ENTITY_IDS, MAX_LENGTH_EVENT_EVENT_TYPE, MAX_LENGTH_STATE_STATE, - UnitOfLength, __version__, ) from .exceptions import ( @@ -90,14 +83,15 @@ from .exceptions import ( Unauthorized, ) from .helpers.deprecation import ( - DeprecatedConstantEnum, + DeferredDeprecatedAlias, + EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, ) from .helpers.json import json_bytes, json_fragment -from .helpers.typing import UNDEFINED, UndefinedType, VolSchemaType -from .util import dt as dt_util, location +from .helpers.typing import VolSchemaType +from .util import dt as dt_util from .util.async_ import ( cancelling, create_eager_task, @@ -112,18 +106,11 @@ from .util.json import JsonObjectType from .util.read_only_dict import ReadOnlyDict from .util.timeout import TimeoutManager from .util.ulid import ulid_at_time, ulid_now -from .util.unit_system import ( - _CONF_UNIT_SYSTEM_IMPERIAL, - _CONF_UNIT_SYSTEM_US_CUSTOMARY, - METRIC_SYSTEM, - UnitSystem, - get_unit_system, -) # Typing imports that create a circular dependency if TYPE_CHECKING: from .auth import AuthManager - from .components.http import ApiConfig, HomeAssistantHTTP + from .components.http import HomeAssistantHTTP from .config_entries import ConfigEntries from .helpers.entity import StateInfo @@ -137,10 +124,6 @@ _SENTINEL = object() _DataT = TypeVar("_DataT", bound=Mapping[str, Any], default=Mapping[str, Any]) type CALLBACK_TYPE = Callable[[], None] -CORE_STORAGE_KEY = "core.config" -CORE_STORAGE_VERSION = 1 -CORE_STORAGE_MINOR_VERSION = 4 - DOMAIN = "homeassistant" # How long to wait to log tasks that are blocking @@ -150,7 +133,16 @@ type ServiceResponse = JsonObjectType | None type EntityServiceResponse = dict[str, ServiceResponse] -class ConfigSource(enum.StrEnum): +class ConfigSource( + enum.StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "DEFAULT": ("core_config.ConfigSource.DEFAULT", "2025.11.0"), + "DISCOVERED": ("core_config.ConfigSource.DISCOVERED", "2025.11.0"), + "STORAGE": ("core_config.ConfigSource.STORAGE", "2025.11.0"), + "YAML": ("core_config.ConfigSource.YAML", "2025.11.0"), + }, +): """Source of core configuration.""" DEFAULT = "default" @@ -184,12 +176,17 @@ class EventStateReportedData(EventStateEventData): old_last_reported: datetime.datetime -# SOURCE_* are deprecated as of Home Assistant 2022.2, use ConfigSource instead -_DEPRECATED_SOURCE_DISCOVERED = DeprecatedConstantEnum( - ConfigSource.DISCOVERED, "2025.1" +def _deprecated_core_config() -> Any: + # pylint: disable-next=import-outside-toplevel + from . import core_config + + return core_config.Config + + +# The Config class was moved to core_config in Home Assistant 2024.11 +_DEPRECATED_Config = DeferredDeprecatedAlias( + _deprecated_core_config, "homeassistant.core_config.Config", "2025.11" ) -_DEPRECATED_SOURCE_STORAGE = DeprecatedConstantEnum(ConfigSource.STORAGE, "2025.1") -_DEPRECATED_SOURCE_YAML = DeprecatedConstantEnum(ConfigSource.YAML, "2025.1") # How long to wait until things that run on startup have to finish. @@ -335,6 +332,8 @@ class HassJob[**_P, _R_co]: we run the job. """ + __slots__ = ("target", "name", "_cancel_on_shutdown", "_cache") + def __init__( self, target: Callable[_P, _R_co], @@ -347,12 +346,13 @@ class HassJob[**_P, _R_co]: self.target: Final = target self.name = name self._cancel_on_shutdown = cancel_on_shutdown + self._cache: dict[str, Any] = {} if job_type: # Pre-set the cached_property so we # avoid the function call - self.__dict__["job_type"] = job_type + self._cache["job_type"] = job_type - @cached_property + @under_cached_property def job_type(self) -> HassJobType: """Return the job type.""" return get_hassjob_callable_job_type(self.target) @@ -428,6 +428,9 @@ class HomeAssistant: # pylint: disable-next=import-outside-toplevel from . import loader + # pylint: disable-next=import-outside-toplevel + from .core_config import Config + # This is a dictionary that any component can store any data on. self.data = HassDict() self.loop = asyncio.get_running_loop() @@ -644,12 +647,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_add_job`, which is deprecated and will be removed in Home " - "Assistant 2025.4; Please review " + frame.report_usage( + "calls `async_add_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.4", ) if target is None: @@ -700,12 +703,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_add_hass_job`, which is deprecated and will be removed in Home " - "Assistant 2025.5; Please review " + frame.report_usage( + "calls `async_add_hass_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) return self._async_add_hass_job(hassjob, *args, background=background) @@ -974,12 +977,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_run_job`, which is deprecated and will be removed in Home " - "Assistant 2025.4; Please review " + frame.report_usage( + "calls `async_run_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.4", ) if asyncio.iscoroutine(target): @@ -1244,6 +1247,8 @@ class HomeAssistant: class Context: """The context that triggered something.""" + __slots__ = ("id", "user_id", "parent_id", "origin_event", "_cache") + def __init__( self, user_id: str | None = None, @@ -1255,6 +1260,7 @@ class Context: self.user_id = user_id self.parent_id = parent_id self.origin_event: Event[Any] | None = None + self._cache: dict[str, Any] = {} def __eq__(self, other: object) -> bool: """Compare contexts.""" @@ -1268,7 +1274,7 @@ class Context: """Create a deep copy of this context.""" return Context(user_id=self.user_id, parent_id=self.parent_id, id=self.id) - @cached_property + @under_cached_property def _as_dict(self) -> dict[str, str | None]: """Return a dictionary representation of the context. @@ -1285,12 +1291,12 @@ class Context: """Return a ReadOnlyDict representation of the context.""" return self._as_read_only_dict - @cached_property + @under_cached_property def _as_read_only_dict(self) -> ReadOnlyDict[str, str | None]: """Return a ReadOnlyDict representation of the context.""" return ReadOnlyDict(self._as_dict) - @cached_property + @under_cached_property def json_fragment(self) -> json_fragment: """Return a JSON fragment of the context.""" return json_fragment(json_bytes(self._as_dict)) @@ -1315,6 +1321,15 @@ class EventOrigin(enum.Enum): class Event(Generic[_DataT]): """Representation of an event within the bus.""" + __slots__ = ( + "event_type", + "data", + "origin", + "time_fired_timestamp", + "context", + "_cache", + ) + def __init__( self, event_type: EventType[_DataT] | str, @@ -1333,13 +1348,14 @@ class Event(Generic[_DataT]): self.context = context if not context.origin_event: context.origin_event = self + self._cache: dict[str, Any] = {} - @cached_property + @under_cached_property def time_fired(self) -> datetime.datetime: """Return time fired as a timestamp.""" return dt_util.utc_from_timestamp(self.time_fired_timestamp) - @cached_property + @under_cached_property def _as_dict(self) -> dict[str, Any]: """Create a dict representation of this Event. @@ -1364,7 +1380,7 @@ class Event(Generic[_DataT]): """ return self._as_read_only_dict - @cached_property + @under_cached_property def _as_read_only_dict(self) -> ReadOnlyDict[str, Any]: """Create a ReadOnlyDict representation of this Event.""" as_dict = self._as_dict @@ -1380,7 +1396,7 @@ class Event(Generic[_DataT]): as_dict["context"] = ReadOnlyDict(context) return ReadOnlyDict(as_dict) - @cached_property + @under_cached_property def json_fragment(self) -> json_fragment: """Return an event as a JSON fragment.""" return json_fragment(json_bytes(self._as_dict)) @@ -1610,10 +1626,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_listen` with run_immediately, which is" - " deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + frame.report_usage( + "calls `async_listen` with run_immediately", + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) if event_filter is not None and not is_callback_check_partial(event_filter): @@ -1680,10 +1696,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_listen_once` with run_immediately, which is " - "deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + frame.report_usage( + "calls `async_listen_once` with run_immediately", + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) one_time_listener: _OneTimeListener[_DataT] = _OneTimeListener( @@ -1751,6 +1767,21 @@ class State: object_id: Object id of this state. """ + __slots__ = ( + "entity_id", + "state", + "attributes", + "last_changed", + "last_reported", + "last_updated", + "context", + "state_info", + "domain", + "object_id", + "last_updated_timestamp", + "_cache", + ) + def __init__( self, entity_id: str, @@ -1765,6 +1796,7 @@ class State: last_updated_timestamp: float | None = None, ) -> None: """Initialize a new state.""" + self._cache: dict[str, Any] = {} state = str(state) if validate_entity_id and not valid_entity_id(entity_id): @@ -1798,31 +1830,31 @@ class State: last_updated_timestamp = last_updated.timestamp() self.last_updated_timestamp = last_updated_timestamp if self.last_changed == last_updated: - self.__dict__["last_changed_timestamp"] = last_updated_timestamp + self._cache["last_changed_timestamp"] = last_updated_timestamp # If last_reported is the same as last_updated async_set will pass # the same datetime object for both values so we can use an identity # check here. if self.last_reported is last_updated: - self.__dict__["last_reported_timestamp"] = last_updated_timestamp + self._cache["last_reported_timestamp"] = last_updated_timestamp - @cached_property + @under_cached_property def name(self) -> str: """Name of this state.""" return self.attributes.get(ATTR_FRIENDLY_NAME) or self.object_id.replace( "_", " " ) - @cached_property + @under_cached_property def last_changed_timestamp(self) -> float: """Timestamp of last change.""" return self.last_changed.timestamp() - @cached_property + @under_cached_property def last_reported_timestamp(self) -> float: """Timestamp of last report.""" return self.last_reported.timestamp() - @cached_property + @under_cached_property def _as_dict(self) -> dict[str, Any]: """Return a dict representation of the State. @@ -1863,7 +1895,7 @@ class State: """ return self._as_read_only_dict - @cached_property + @under_cached_property def _as_read_only_dict( self, ) -> ReadOnlyDict[str, datetime.datetime | Collection[Any]]: @@ -1878,17 +1910,17 @@ class State: as_dict["context"] = ReadOnlyDict(context) return ReadOnlyDict(as_dict) - @cached_property + @under_cached_property def as_dict_json(self) -> bytes: """Return a JSON string of the State.""" return json_bytes(self._as_dict) - @cached_property + @under_cached_property def json_fragment(self) -> json_fragment: """Return a JSON fragment of the State.""" return json_fragment(self.as_dict_json) - @cached_property + @under_cached_property def as_compressed_state(self) -> CompressedState: """Build a compressed dict of a state for adds. @@ -1916,7 +1948,7 @@ class State: ) return compressed_state - @cached_property + @under_cached_property def as_compressed_state_json(self) -> bytes: """Build a compressed JSON key value pair of a state for adds. @@ -2308,7 +2340,7 @@ class StateMachine: # mypy does not understand this is only possible if old_state is not None old_last_reported = old_state.last_reported # type: ignore[union-attr] old_state.last_reported = now # type: ignore[union-attr] - old_state.last_reported_timestamp = timestamp # type: ignore[union-attr] + old_state._cache["last_reported_timestamp"] = timestamp # type: ignore[union-attr] # noqa: SLF001 # Avoid creating an EventStateReportedData self._bus.async_fire_internal( # type: ignore[misc] EVENT_STATE_REPORTED, @@ -2400,10 +2432,11 @@ class Service: class ServiceCall: """Representation of a call to a service.""" - __slots__ = ("domain", "service", "data", "context", "return_response") + __slots__ = ("hass", "domain", "service", "data", "context", "return_response") def __init__( self, + hass: HomeAssistant, domain: str, service: str, data: dict[str, Any] | None = None, @@ -2411,6 +2444,7 @@ class ServiceCall: return_response: bool = False, ) -> None: """Initialize a service call.""" + self.hass = hass self.domain = domain self.service = service self.data = ReadOnlyDict(data or {}) @@ -2736,7 +2770,7 @@ class ServiceRegistry: processed_data = service_data service_call = ServiceCall( - domain, service, processed_data, context, return_response + self._hass, domain, service, processed_data, context, return_response ) self._hass.bus.async_fire_internal( @@ -2811,452 +2845,6 @@ class ServiceRegistry: return await self._hass.async_add_executor_job(target, service_call) -class _ComponentSet(set[str]): - """Set of loaded components. - - This set contains both top level components and platforms. - - Examples: - `light`, `switch`, `hue`, `mjpeg.camera`, `universal.media_player`, - `homeassistant.scene` - - The top level components set only contains the top level components. - - The all components set contains all components, including platform - based components. - - """ - - def __init__( - self, top_level_components: set[str], all_components: set[str] - ) -> None: - """Initialize the component set.""" - self._top_level_components = top_level_components - self._all_components = all_components - - def add(self, component: str) -> None: - """Add a component to the store.""" - if "." not in component: - self._top_level_components.add(component) - self._all_components.add(component) - else: - platform, _, domain = component.partition(".") - if domain in BASE_PLATFORMS: - self._all_components.add(platform) - return super().add(component) - - def remove(self, component: str) -> None: - """Remove a component from the store.""" - if "." in component: - raise ValueError("_ComponentSet does not support removing sub-components") - self._top_level_components.remove(component) - return super().remove(component) - - def discard(self, component: str) -> None: - """Remove a component from the store.""" - raise NotImplementedError("_ComponentSet does not support discard, use remove") - - -class Config: - """Configuration settings for Home Assistant.""" - - _store: Config._ConfigStore - - def __init__(self, hass: HomeAssistant, config_dir: str) -> None: - """Initialize a new config object.""" - # pylint: disable-next=import-outside-toplevel - from .components.zone import DEFAULT_RADIUS - - self.hass = hass - - self.latitude: float = 0 - self.longitude: float = 0 - - self.elevation: int = 0 - """Elevation (always in meters regardless of the unit system).""" - - self.radius: int = DEFAULT_RADIUS - """Radius of the Home Zone (always in meters regardless of the unit system).""" - - self.debug: bool = False - self.location_name: str = "Home" - self.time_zone: str = "UTC" - self.units: UnitSystem = METRIC_SYSTEM - self.internal_url: str | None = None - self.external_url: str | None = None - self.currency: str = "EUR" - self.country: str | None = None - self.language: str = "en" - - self.config_source: ConfigSource = ConfigSource.DEFAULT - - # If True, pip install is skipped for requirements on startup - self.skip_pip: bool = False - - # List of packages to skip when installing requirements on startup - self.skip_pip_packages: list[str] = [] - - # Set of loaded top level components - # This set is updated by _ComponentSet - # and should not be modified directly - self.top_level_components: set[str] = set() - - # Set of all loaded components including platform - # based components - self.all_components: set[str] = set() - - # Set of loaded components - self.components: _ComponentSet = _ComponentSet( - self.top_level_components, self.all_components - ) - - # API (HTTP) server configuration - self.api: ApiConfig | None = None - - # Directory that holds the configuration - self.config_dir: str = config_dir - - # List of allowed external dirs to access - self.allowlist_external_dirs: set[str] = set() - - # List of allowed external URLs that integrations may use - self.allowlist_external_urls: set[str] = set() - - # Dictionary of Media folders that integrations may use - self.media_dirs: dict[str, str] = {} - - # If Home Assistant is running in recovery mode - self.recovery_mode: bool = False - - # Use legacy template behavior - self.legacy_templates: bool = False - - # If Home Assistant is running in safe mode - self.safe_mode: bool = False - - def async_initialize(self) -> None: - """Finish initializing a config object. - - This must be called before the config object is used. - """ - self._store = self._ConfigStore(self.hass) - - def distance(self, lat: float, lon: float) -> float | None: - """Calculate distance from Home Assistant. - - Async friendly. - """ - return self.units.length( - location.distance(self.latitude, self.longitude, lat, lon), - UnitOfLength.METERS, - ) - - def path(self, *path: str) -> str: - """Generate path to the file within the configuration directory. - - Async friendly. - """ - return os.path.join(self.config_dir, *path) - - def is_allowed_external_url(self, url: str) -> bool: - """Check if an external URL is allowed.""" - parsed_url = f"{yarl.URL(url)!s}/" - - return any( - allowed - for allowed in self.allowlist_external_urls - if parsed_url.startswith(allowed) - ) - - def is_allowed_path(self, path: str) -> bool: - """Check if the path is valid for access from outside. - - This function does blocking I/O and should not be called from the event loop. - Use hass.async_add_executor_job to schedule it on the executor. - """ - assert path is not None - - thepath = pathlib.Path(path) - try: - # The file path does not have to exist (it's parent should) - if thepath.exists(): - thepath = thepath.resolve() - else: - thepath = thepath.parent.resolve() - except (FileNotFoundError, RuntimeError, PermissionError): - return False - - for allowed_path in self.allowlist_external_dirs: - try: - thepath.relative_to(allowed_path) - except ValueError: - pass - else: - return True - - return False - - def as_dict(self) -> dict[str, Any]: - """Create a dictionary representation of the configuration. - - Async friendly. - """ - allowlist_external_dirs = list(self.allowlist_external_dirs) - return { - "latitude": self.latitude, - "longitude": self.longitude, - "elevation": self.elevation, - "unit_system": self.units.as_dict(), - "location_name": self.location_name, - "time_zone": self.time_zone, - "components": list(self.components), - "config_dir": self.config_dir, - # legacy, backwards compat - "whitelist_external_dirs": allowlist_external_dirs, - "allowlist_external_dirs": allowlist_external_dirs, - "allowlist_external_urls": list(self.allowlist_external_urls), - "version": __version__, - "config_source": self.config_source, - "recovery_mode": self.recovery_mode, - "state": self.hass.state.value, - "external_url": self.external_url, - "internal_url": self.internal_url, - "currency": self.currency, - "country": self.country, - "language": self.language, - "safe_mode": self.safe_mode, - "debug": self.debug, - "radius": self.radius, - } - - async def async_set_time_zone(self, time_zone_str: str) -> None: - """Help to set the time zone.""" - if time_zone := await dt_util.async_get_time_zone(time_zone_str): - self.time_zone = time_zone_str - dt_util.set_default_time_zone(time_zone) - else: - raise ValueError(f"Received invalid time zone {time_zone_str}") - - def set_time_zone(self, time_zone_str: str) -> None: - """Set the time zone. - - This is a legacy method that should not be used in new code. - Use async_set_time_zone instead. - - It will be removed in Home Assistant 2025.6. - """ - # report is imported here to avoid a circular import - from .helpers.frame import report # pylint: disable=import-outside-toplevel - - report( - "set the time zone using set_time_zone instead of async_set_time_zone" - " which will stop working in Home Assistant 2025.6", - error_if_core=True, - error_if_integration=True, - ) - if time_zone := dt_util.get_time_zone(time_zone_str): - self.time_zone = time_zone_str - dt_util.set_default_time_zone(time_zone) - else: - raise ValueError(f"Received invalid time zone {time_zone_str}") - - async def _async_update( - self, - *, - source: ConfigSource, - latitude: float | None = None, - longitude: float | None = None, - elevation: int | None = None, - unit_system: str | None = None, - location_name: str | None = None, - time_zone: str | None = None, - external_url: str | UndefinedType | None = UNDEFINED, - internal_url: str | UndefinedType | None = UNDEFINED, - currency: str | None = None, - country: str | UndefinedType | None = UNDEFINED, - language: str | None = None, - radius: int | None = None, - ) -> None: - """Update the configuration from a dictionary.""" - self.config_source = source - if latitude is not None: - self.latitude = latitude - if longitude is not None: - self.longitude = longitude - if elevation is not None: - self.elevation = elevation - if unit_system is not None: - try: - self.units = get_unit_system(unit_system) - except ValueError: - self.units = METRIC_SYSTEM - if location_name is not None: - self.location_name = location_name - if time_zone is not None: - await self.async_set_time_zone(time_zone) - if external_url is not UNDEFINED: - self.external_url = external_url - if internal_url is not UNDEFINED: - self.internal_url = internal_url - if currency is not None: - self.currency = currency - if country is not UNDEFINED: - self.country = country - if language is not None: - self.language = language - if radius is not None: - self.radius = radius - - async def async_update(self, **kwargs: Any) -> None: - """Update the configuration from a dictionary.""" - # pylint: disable-next=import-outside-toplevel - from .config import ( - _raise_issue_if_historic_currency, - _raise_issue_if_no_country, - ) - - await self._async_update(source=ConfigSource.STORAGE, **kwargs) - await self._async_store() - self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) - - _raise_issue_if_historic_currency(self.hass, self.currency) - _raise_issue_if_no_country(self.hass, self.country) - - async def async_load(self) -> None: - """Load [homeassistant] core config.""" - if not (data := await self._store.async_load()): - return - - # In 2021.9 we fixed validation to disallow a path (because that's never - # correct) but this data still lives in storage, so we print a warning. - if data.get("external_url") and urlparse(data["external_url"]).path not in ( - "", - "/", - ): - _LOGGER.warning("Invalid external_url set. It's not allowed to have a path") - - if data.get("internal_url") and urlparse(data["internal_url"]).path not in ( - "", - "/", - ): - _LOGGER.warning("Invalid internal_url set. It's not allowed to have a path") - - await self._async_update( - source=ConfigSource.STORAGE, - latitude=data.get("latitude"), - longitude=data.get("longitude"), - elevation=data.get("elevation"), - unit_system=data.get("unit_system_v2"), - location_name=data.get("location_name"), - time_zone=data.get("time_zone"), - external_url=data.get("external_url", UNDEFINED), - internal_url=data.get("internal_url", UNDEFINED), - currency=data.get("currency"), - country=data.get("country"), - language=data.get("language"), - radius=data["radius"], - ) - - async def _async_store(self) -> None: - """Store [homeassistant] core config.""" - data = { - "latitude": self.latitude, - "longitude": self.longitude, - "elevation": self.elevation, - # We don't want any integrations to use the name of the unit system - # so we are using the private attribute here - "unit_system_v2": self.units._name, # noqa: SLF001 - "location_name": self.location_name, - "time_zone": self.time_zone, - "external_url": self.external_url, - "internal_url": self.internal_url, - "currency": self.currency, - "country": self.country, - "language": self.language, - "radius": self.radius, - } - await self._store.async_save(data) - - # Circular dependency prevents us from generating the class at top level - # pylint: disable-next=import-outside-toplevel - from .helpers.storage import Store - - class _ConfigStore(Store[dict[str, Any]]): - """Class to help storing Config data.""" - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize storage class.""" - super().__init__( - hass, - CORE_STORAGE_VERSION, - CORE_STORAGE_KEY, - private=True, - atomic_writes=True, - minor_version=CORE_STORAGE_MINOR_VERSION, - ) - self._original_unit_system: str | None = None # from old store 1.1 - - async def _async_migrate_func( - self, - old_major_version: int, - old_minor_version: int, - old_data: dict[str, Any], - ) -> dict[str, Any]: - """Migrate to the new version.""" - - # pylint: disable-next=import-outside-toplevel - from .components.zone import DEFAULT_RADIUS - - data = old_data - if old_major_version == 1 and old_minor_version < 2: - # In 1.2, we remove support for "imperial", replaced by "us_customary" - # Using a new key to allow rollback - self._original_unit_system = data.get("unit_system") - data["unit_system_v2"] = self._original_unit_system - if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: - data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY - if old_major_version == 1 and old_minor_version < 3: - # In 1.3, we add the key "language", initialize it from the - # owner account. - data["language"] = "en" - try: - owner = await self.hass.auth.async_get_owner() - if owner is not None: - # pylint: disable-next=import-outside-toplevel - from .components.frontend import storage as frontend_store - - # pylint: disable-next=import-outside-toplevel - from .helpers import config_validation as cv - - _, owner_data = await frontend_store.async_user_store( - self.hass, owner.id - ) - - if ( - "language" in owner_data - and "language" in owner_data["language"] - ): - with suppress(vol.InInvalid): - data["language"] = cv.language( - owner_data["language"]["language"] - ) - # pylint: disable-next=broad-except - except Exception: - _LOGGER.exception("Unexpected error during core config migration") - if old_major_version == 1 and old_minor_version < 4: - # In 1.4, we add the key "radius", initialize it with the default. - data.setdefault("radius", DEFAULT_RADIUS) - - if old_major_version > 1: - raise NotImplementedError - return data - - async def async_save(self, data: dict[str, Any]) -> None: - if self._original_unit_system: - data["unit_system"] = self._original_unit_system - return await super().async_save(data) - - # These can be removed if no deprecated constant are in this module anymore __getattr__ = functools.partial(check_if_deprecated_constant, module_globals=globals()) __dir__ = functools.partial( diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py new file mode 100644 index 00000000000..38ca07e8f31 --- /dev/null +++ b/homeassistant/core_config.py @@ -0,0 +1,918 @@ +"""Module to help with parsing and generating configuration files.""" + +from __future__ import annotations + +from collections import OrderedDict +from collections.abc import Sequence +from contextlib import suppress +import enum +import logging +import os +import pathlib +from typing import TYPE_CHECKING, Any, Final +from urllib.parse import urlparse + +import voluptuous as vol +from webrtc_models import RTCConfiguration, RTCIceServer +import yarl + +from . import auth +from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers +from .const import ( + ATTR_ASSUMED_STATE, + ATTR_FRIENDLY_NAME, + ATTR_HIDDEN, + BASE_PLATFORMS, + CONF_ALLOWLIST_EXTERNAL_DIRS, + CONF_ALLOWLIST_EXTERNAL_URLS, + CONF_AUTH_MFA_MODULES, + CONF_AUTH_PROVIDERS, + CONF_COUNTRY, + CONF_CURRENCY, + CONF_CUSTOMIZE, + CONF_CUSTOMIZE_DOMAIN, + CONF_CUSTOMIZE_GLOB, + CONF_DEBUG, + CONF_ELEVATION, + CONF_EXTERNAL_URL, + CONF_ID, + CONF_INTERNAL_URL, + CONF_LANGUAGE, + CONF_LATITUDE, + CONF_LEGACY_TEMPLATES, + CONF_LONGITUDE, + CONF_MEDIA_DIRS, + CONF_NAME, + CONF_PACKAGES, + CONF_RADIUS, + CONF_TEMPERATURE_UNIT, + CONF_TIME_ZONE, + CONF_TYPE, + CONF_UNIT_SYSTEM, + CONF_URL, + CONF_USERNAME, + EVENT_CORE_CONFIG_UPDATE, + LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, + UnitOfLength, + __version__, +) +from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from .generated.currencies import HISTORIC_CURRENCIES +from .helpers import config_validation as cv, issue_registry as ir +from .helpers.entity_values import EntityValues +from .helpers.frame import ReportBehavior, report_usage +from .helpers.storage import Store +from .helpers.typing import UNDEFINED, UndefinedType +from .util import dt as dt_util, location +from .util.hass_dict import HassKey +from .util.package import is_docker_env +from .util.unit_system import ( + _CONF_UNIT_SYSTEM_IMPERIAL, + _CONF_UNIT_SYSTEM_METRIC, + _CONF_UNIT_SYSTEM_US_CUSTOMARY, + METRIC_SYSTEM, + UnitSystem, + get_unit_system, +) + +# Typing imports that create a circular dependency +if TYPE_CHECKING: + from .components.http import ApiConfig + +_LOGGER = logging.getLogger(__name__) + +DATA_CUSTOMIZE: HassKey[EntityValues] = HassKey("hass_customize") + +CONF_CREDENTIAL: Final = "credential" +CONF_ICE_SERVERS: Final = "ice_servers" +CONF_WEBRTC: Final = "webrtc" + +CORE_STORAGE_KEY = "core.config" +CORE_STORAGE_VERSION = 1 +CORE_STORAGE_MINOR_VERSION = 4 + + +class ConfigSource(enum.StrEnum): + """Source of core configuration.""" + + DEFAULT = "default" + DISCOVERED = "discovered" + STORAGE = "storage" + YAML = "yaml" + + +def _no_duplicate_auth_provider( + configs: Sequence[dict[str, Any]], +) -> Sequence[dict[str, Any]]: + """No duplicate auth provider config allowed in a list. + + Each type of auth provider can only have one config without optional id. + Unique id is required if same type of auth provider used multiple times. + """ + config_keys: set[tuple[str, str | None]] = set() + for config in configs: + key = (config[CONF_TYPE], config.get(CONF_ID)) + if key in config_keys: + raise vol.Invalid( + f"Duplicate auth provider {config[CONF_TYPE]} found. " + "Please add unique IDs " + "if you want to have the same auth provider twice" + ) + config_keys.add(key) + return configs + + +def _no_duplicate_auth_mfa_module( + configs: Sequence[dict[str, Any]], +) -> Sequence[dict[str, Any]]: + """No duplicate auth mfa module item allowed in a list. + + Each type of mfa module can only have one config without optional id. + A global unique id is required if same type of mfa module used multiple + times. + Note: this is different than auth provider + """ + config_keys: set[str] = set() + for config in configs: + key = config.get(CONF_ID, config[CONF_TYPE]) + if key in config_keys: + raise vol.Invalid( + f"Duplicate mfa module {config[CONF_TYPE]} found. " + "Please add unique IDs " + "if you want to have the same mfa module twice" + ) + config_keys.add(key) + return configs + + +def _filter_bad_internal_external_urls(conf: dict) -> dict: + """Filter internal/external URL with a path.""" + for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL: + if key in conf and urlparse(conf[key]).path not in ("", "/"): + # We warn but do not fix, because if this was incorrectly configured, + # adjusting this value might impact security. + _LOGGER.warning( + "Invalid %s set. It's not allowed to have a path (/bla)", key + ) + + return conf + + +# Schema for all packages element +_PACKAGES_CONFIG_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list)}) + +# Schema for individual package definition +_PACKAGE_DEFINITION_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list, None)}) + +_CUSTOMIZE_DICT_SCHEMA = vol.Schema( + { + vol.Optional(ATTR_FRIENDLY_NAME): cv.string, + vol.Optional(ATTR_HIDDEN): cv.boolean, + vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, + }, + extra=vol.ALLOW_EXTRA, +) + +_CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( + { + vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema( + {cv.entity_id: _CUSTOMIZE_DICT_SCHEMA} + ), + vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema( + {cv.string: _CUSTOMIZE_DICT_SCHEMA} + ), + vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema( + {cv.string: _CUSTOMIZE_DICT_SCHEMA} + ), + } +) + + +def _raise_issue_if_imperial_unit_system( + hass: HomeAssistant, config: dict[str, Any] +) -> dict[str, Any]: + if config.get(CONF_UNIT_SYSTEM) == _CONF_UNIT_SYSTEM_IMPERIAL: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "imperial_unit_system", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="imperial_unit_system", + ) + config[CONF_UNIT_SYSTEM] = _CONF_UNIT_SYSTEM_US_CUSTOMARY + else: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "imperial_unit_system") + + return config + + +def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: + if currency not in HISTORIC_CURRENCIES: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "historic_currency", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="historic_currency", + translation_placeholders={"currency": currency}, + ) + + +def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: + if country is not None: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "country_not_configured", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="country_not_configured", + ) + + +def _validate_currency(data: Any) -> Any: + try: + return cv.currency(data) + except vol.InInvalid: + with suppress(vol.InInvalid): + return cv.historic_currency(data) + raise + + +def _validate_stun_or_turn_url(value: Any) -> str: + """Validate an URL.""" + url_in = str(value) + url = urlparse(url_in) + + if url.scheme not in ("stun", "stuns", "turn", "turns"): + raise vol.Invalid("invalid url") + return url_in + + +CORE_CONFIG_SCHEMA = vol.All( + _CUSTOMIZE_CONFIG_SCHEMA.extend( + { + CONF_NAME: vol.Coerce(str), + CONF_LATITUDE: cv.latitude, + CONF_LONGITUDE: cv.longitude, + CONF_ELEVATION: vol.Coerce(int), + CONF_RADIUS: cv.positive_int, + vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, + CONF_UNIT_SYSTEM: vol.Any( + _CONF_UNIT_SYSTEM_METRIC, + _CONF_UNIT_SYSTEM_US_CUSTOMARY, + _CONF_UNIT_SYSTEM_IMPERIAL, + ), + CONF_TIME_ZONE: cv.time_zone, + vol.Optional(CONF_INTERNAL_URL): cv.url, + vol.Optional(CONF_EXTERNAL_URL): cv.url, + vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All( + cv.ensure_list, [vol.IsDir()] + ), + vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All( + cv.ensure_list, [vol.IsDir()] + ), + vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All( + cv.ensure_list, [cv.url] + ), + vol.Optional(CONF_PACKAGES, default={}): _PACKAGES_CONFIG_SCHEMA, + vol.Optional(CONF_AUTH_PROVIDERS): vol.All( + cv.ensure_list, + [ + auth_providers.AUTH_PROVIDER_SCHEMA.extend( + { + CONF_TYPE: vol.NotIn( + ["insecure_example"], + ( + "The insecure_example auth provider" + " is for testing only." + ), + ) + } + ) + ], + _no_duplicate_auth_provider, + ), + vol.Optional(CONF_AUTH_MFA_MODULES): vol.All( + cv.ensure_list, + [ + auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend( + { + CONF_TYPE: vol.NotIn( + ["insecure_example"], + "The insecure_example mfa module is for testing only.", + ) + } + ) + ], + _no_duplicate_auth_mfa_module, + ), + vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()), + vol.Remove(CONF_LEGACY_TEMPLATES): cv.boolean, + vol.Optional(CONF_CURRENCY): _validate_currency, + vol.Optional(CONF_COUNTRY): cv.country, + vol.Optional(CONF_LANGUAGE): cv.language, + vol.Optional(CONF_DEBUG): cv.boolean, + vol.Optional(CONF_WEBRTC): vol.Schema( + { + vol.Required(CONF_ICE_SERVERS): vol.All( + cv.ensure_list, + [ + vol.Schema( + { + vol.Required(CONF_URL): vol.All( + cv.ensure_list, [_validate_stun_or_turn_url] + ), + vol.Optional(CONF_USERNAME): cv.string, + vol.Optional(CONF_CREDENTIAL): cv.string, + } + ) + ], + ) + } + ), + } + ), + _filter_bad_internal_external_urls, +) + + +async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: + """Process the [homeassistant] section from the configuration. + + This method is a coroutine. + """ + # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir + # so we need to run it in an executor job. + config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) + + # Check if we need to raise an issue for imperial unit system + config = _raise_issue_if_imperial_unit_system(hass, config) + + # Only load auth during startup. + if not hasattr(hass, "auth"): + if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: + auth_conf = [{"type": "homeassistant"}] + + mfa_conf = config.get( + CONF_AUTH_MFA_MODULES, + [{"type": "totp", "id": "totp", "name": "Authenticator app"}], + ) + + setattr( + hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf) + ) + + await hass.config.async_load() + + hac = hass.config + + if any( + k in config + for k in ( + CONF_COUNTRY, + CONF_CURRENCY, + CONF_ELEVATION, + CONF_EXTERNAL_URL, + CONF_INTERNAL_URL, + CONF_LANGUAGE, + CONF_LATITUDE, + CONF_LONGITUDE, + CONF_NAME, + CONF_RADIUS, + CONF_TIME_ZONE, + CONF_UNIT_SYSTEM, + ) + ): + hac.config_source = ConfigSource.YAML + + for key, attr in ( + (CONF_COUNTRY, "country"), + (CONF_CURRENCY, "currency"), + (CONF_ELEVATION, "elevation"), + (CONF_EXTERNAL_URL, "external_url"), + (CONF_INTERNAL_URL, "internal_url"), + (CONF_LANGUAGE, "language"), + (CONF_LATITUDE, "latitude"), + (CONF_LONGITUDE, "longitude"), + (CONF_MEDIA_DIRS, "media_dirs"), + (CONF_NAME, "location_name"), + (CONF_RADIUS, "radius"), + ): + if key in config: + setattr(hac, attr, config[key]) + + if config.get(CONF_DEBUG): + hac.debug = True + + if CONF_WEBRTC in config: + hac.webrtc.ice_servers = [ + RTCIceServer( + server[CONF_URL], + server.get(CONF_USERNAME), + server.get(CONF_CREDENTIAL), + ) + for server in config[CONF_WEBRTC][CONF_ICE_SERVERS] + ] + + _raise_issue_if_historic_currency(hass, hass.config.currency) + _raise_issue_if_no_country(hass, hass.config.country) + + if CONF_TIME_ZONE in config: + await hac.async_set_time_zone(config[CONF_TIME_ZONE]) + + if CONF_MEDIA_DIRS not in config: + if is_docker_env(): + hac.media_dirs = {"local": "/media"} + else: + hac.media_dirs = {"local": hass.config.path("media")} + + # Init whitelist external dir + hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()} + if CONF_ALLOWLIST_EXTERNAL_DIRS in config: + hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) + + elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: + _LOGGER.warning( + "Key %s has been replaced with %s. Please update your config", + LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, + CONF_ALLOWLIST_EXTERNAL_DIRS, + ) + hac.allowlist_external_dirs.update( + set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS]) + ) + + # Init whitelist external URL list – make sure to add / to every URL that doesn't + # already have it so that we can properly test "path ownership" + if CONF_ALLOWLIST_EXTERNAL_URLS in config: + hac.allowlist_external_urls.update( + url if url.endswith("/") else f"{url}/" + for url in config[CONF_ALLOWLIST_EXTERNAL_URLS] + ) + + # Customize + cust_exact = dict(config[CONF_CUSTOMIZE]) + cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) + cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) + + for name, pkg in config[CONF_PACKAGES].items(): + if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: + continue + + try: + pkg_cust = _CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) + except vol.Invalid: + _LOGGER.warning("Package %s contains invalid customize", name) + continue + + cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) + cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) + cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) + + hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) + + if CONF_UNIT_SYSTEM in config: + hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) + + +class _ComponentSet(set[str]): + """Set of loaded components. + + This set contains both top level components and platforms. + + Examples: + `light`, `switch`, `hue`, `mjpeg.camera`, `universal.media_player`, + `homeassistant.scene` + + The top level components set only contains the top level components. + + The all components set contains all components, including platform + based components. + + """ + + def __init__( + self, top_level_components: set[str], all_components: set[str] + ) -> None: + """Initialize the component set.""" + self._top_level_components = top_level_components + self._all_components = all_components + + def add(self, component: str) -> None: + """Add a component to the store.""" + if "." not in component: + self._top_level_components.add(component) + self._all_components.add(component) + else: + platform, _, domain = component.partition(".") + if domain in BASE_PLATFORMS: + self._all_components.add(platform) + return super().add(component) + + def remove(self, component: str) -> None: + """Remove a component from the store.""" + if "." in component: + raise ValueError("_ComponentSet does not support removing sub-components") + self._top_level_components.remove(component) + return super().remove(component) + + def discard(self, component: str) -> None: + """Remove a component from the store.""" + raise NotImplementedError("_ComponentSet does not support discard, use remove") + + +class Config: + """Configuration settings for Home Assistant.""" + + _store: Config._ConfigStore + + def __init__(self, hass: HomeAssistant, config_dir: str) -> None: + """Initialize a new config object.""" + # pylint: disable-next=import-outside-toplevel + from .components.zone import DEFAULT_RADIUS + + self.hass = hass + + self.latitude: float = 0 + self.longitude: float = 0 + + self.elevation: int = 0 + """Elevation (always in meters regardless of the unit system).""" + + self.radius: int = DEFAULT_RADIUS + """Radius of the Home Zone (always in meters regardless of the unit system).""" + + self.debug: bool = False + self.location_name: str = "Home" + self.time_zone: str = "UTC" + self.units: UnitSystem = METRIC_SYSTEM + self.internal_url: str | None = None + self.external_url: str | None = None + self.currency: str = "EUR" + self.country: str | None = None + self.language: str = "en" + + self.config_source: ConfigSource = ConfigSource.DEFAULT + + # If True, pip install is skipped for requirements on startup + self.skip_pip: bool = False + + # List of packages to skip when installing requirements on startup + self.skip_pip_packages: list[str] = [] + + # Set of loaded top level components + # This set is updated by _ComponentSet + # and should not be modified directly + self.top_level_components: set[str] = set() + + # Set of all loaded components including platform + # based components + self.all_components: set[str] = set() + + # Set of loaded components + self.components: _ComponentSet = _ComponentSet( + self.top_level_components, self.all_components + ) + + # API (HTTP) server configuration + self.api: ApiConfig | None = None + + # Directory that holds the configuration + self.config_dir: str = config_dir + + # List of allowed external dirs to access + self.allowlist_external_dirs: set[str] = set() + + # List of allowed external URLs that integrations may use + self.allowlist_external_urls: set[str] = set() + + # Dictionary of Media folders that integrations may use + self.media_dirs: dict[str, str] = {} + + # If Home Assistant is running in recovery mode + self.recovery_mode: bool = False + + # Use legacy template behavior + self.legacy_templates: bool = False + + # If Home Assistant is running in safe mode + self.safe_mode: bool = False + + self.webrtc = RTCConfiguration() + + def async_initialize(self) -> None: + """Finish initializing a config object. + + This must be called before the config object is used. + """ + self._store = self._ConfigStore(self.hass) + + def distance(self, lat: float, lon: float) -> float | None: + """Calculate distance from Home Assistant. + + Async friendly. + """ + return self.units.length( + location.distance(self.latitude, self.longitude, lat, lon), + UnitOfLength.METERS, + ) + + def path(self, *path: str) -> str: + """Generate path to the file within the configuration directory. + + Async friendly. + """ + return os.path.join(self.config_dir, *path) + + def is_allowed_external_url(self, url: str) -> bool: + """Check if an external URL is allowed.""" + parsed_url = f"{yarl.URL(url)!s}/" + + return any( + allowed + for allowed in self.allowlist_external_urls + if parsed_url.startswith(allowed) + ) + + def is_allowed_path(self, path: str) -> bool: + """Check if the path is valid for access from outside. + + This function does blocking I/O and should not be called from the event loop. + Use hass.async_add_executor_job to schedule it on the executor. + """ + assert path is not None + + thepath = pathlib.Path(path) + try: + # The file path does not have to exist (it's parent should) + if thepath.exists(): + thepath = thepath.resolve() + else: + thepath = thepath.parent.resolve() + except (FileNotFoundError, RuntimeError, PermissionError): + return False + + for allowed_path in self.allowlist_external_dirs: + try: + thepath.relative_to(allowed_path) + except ValueError: + pass + else: + return True + + return False + + def as_dict(self) -> dict[str, Any]: + """Return a dictionary representation of the configuration. + + Async friendly. + """ + allowlist_external_dirs = list(self.allowlist_external_dirs) + return { + "allowlist_external_dirs": allowlist_external_dirs, + "allowlist_external_urls": list(self.allowlist_external_urls), + "components": list(self.components), + "config_dir": self.config_dir, + "config_source": self.config_source, + "country": self.country, + "currency": self.currency, + "debug": self.debug, + "elevation": self.elevation, + "external_url": self.external_url, + "internal_url": self.internal_url, + "language": self.language, + "latitude": self.latitude, + "location_name": self.location_name, + "longitude": self.longitude, + "radius": self.radius, + "recovery_mode": self.recovery_mode, + "safe_mode": self.safe_mode, + "state": self.hass.state.value, + "time_zone": self.time_zone, + "unit_system": self.units.as_dict(), + "version": __version__, + # legacy, backwards compat + "whitelist_external_dirs": allowlist_external_dirs, + } + + async def async_set_time_zone(self, time_zone_str: str) -> None: + """Help to set the time zone.""" + if time_zone := await dt_util.async_get_time_zone(time_zone_str): + self.time_zone = time_zone_str + dt_util.set_default_time_zone(time_zone) + else: + raise ValueError(f"Received invalid time zone {time_zone_str}") + + def set_time_zone(self, time_zone_str: str) -> None: + """Set the time zone. + + This is a legacy method that should not be used in new code. + Use async_set_time_zone instead. + + It will be removed in Home Assistant 2025.6. + """ + report_usage( + "sets the time zone using set_time_zone instead of async_set_time_zone", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.ERROR, + breaks_in_ha_version="2025.6", + ) + if time_zone := dt_util.get_time_zone(time_zone_str): + self.time_zone = time_zone_str + dt_util.set_default_time_zone(time_zone) + else: + raise ValueError(f"Received invalid time zone {time_zone_str}") + + async def _async_update( + self, + *, + country: str | UndefinedType | None = UNDEFINED, + currency: str | None = None, + elevation: int | None = None, + external_url: str | UndefinedType | None = UNDEFINED, + internal_url: str | UndefinedType | None = UNDEFINED, + language: str | None = None, + latitude: float | None = None, + location_name: str | None = None, + longitude: float | None = None, + radius: int | None = None, + source: ConfigSource, + time_zone: str | None = None, + unit_system: str | None = None, + ) -> None: + """Update the configuration from a dictionary.""" + self.config_source = source + if country is not UNDEFINED: + self.country = country + if currency is not None: + self.currency = currency + if elevation is not None: + self.elevation = elevation + if external_url is not UNDEFINED: + self.external_url = external_url + if internal_url is not UNDEFINED: + self.internal_url = internal_url + if language is not None: + self.language = language + if latitude is not None: + self.latitude = latitude + if location_name is not None: + self.location_name = location_name + if longitude is not None: + self.longitude = longitude + if radius is not None: + self.radius = radius + if time_zone is not None: + await self.async_set_time_zone(time_zone) + if unit_system is not None: + try: + self.units = get_unit_system(unit_system) + except ValueError: + self.units = METRIC_SYSTEM + + async def async_update(self, **kwargs: Any) -> None: + """Update the configuration from a dictionary.""" + await self._async_update(source=ConfigSource.STORAGE, **kwargs) + await self._async_store() + self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) + + _raise_issue_if_historic_currency(self.hass, self.currency) + _raise_issue_if_no_country(self.hass, self.country) + + async def async_load(self) -> None: + """Load [homeassistant] core config.""" + if not (data := await self._store.async_load()): + return + + # In 2021.9 we fixed validation to disallow a path (because that's never + # correct) but this data still lives in storage, so we print a warning. + if data.get("external_url") and urlparse(data["external_url"]).path not in ( + "", + "/", + ): + _LOGGER.warning("Invalid external_url set. It's not allowed to have a path") + + if data.get("internal_url") and urlparse(data["internal_url"]).path not in ( + "", + "/", + ): + _LOGGER.warning("Invalid internal_url set. It's not allowed to have a path") + + await self._async_update( + source=ConfigSource.STORAGE, + latitude=data.get("latitude"), + longitude=data.get("longitude"), + elevation=data.get("elevation"), + unit_system=data.get("unit_system_v2"), + location_name=data.get("location_name"), + time_zone=data.get("time_zone"), + external_url=data.get("external_url", UNDEFINED), + internal_url=data.get("internal_url", UNDEFINED), + currency=data.get("currency"), + country=data.get("country"), + language=data.get("language"), + radius=data["radius"], + ) + + async def _async_store(self) -> None: + """Store [homeassistant] core config.""" + data = { + "latitude": self.latitude, + "longitude": self.longitude, + "elevation": self.elevation, + # We don't want any integrations to use the name of the unit system + # so we are using the private attribute here + "unit_system_v2": self.units._name, # noqa: SLF001 + "location_name": self.location_name, + "time_zone": self.time_zone, + "external_url": self.external_url, + "internal_url": self.internal_url, + "currency": self.currency, + "country": self.country, + "language": self.language, + "radius": self.radius, + } + await self._store.async_save(data) + + class _ConfigStore(Store[dict[str, Any]]): + """Class to help storing Config data.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize storage class.""" + super().__init__( + hass, + CORE_STORAGE_VERSION, + CORE_STORAGE_KEY, + private=True, + atomic_writes=True, + minor_version=CORE_STORAGE_MINOR_VERSION, + ) + self._original_unit_system: str | None = None # from old store 1.1 + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, Any], + ) -> dict[str, Any]: + """Migrate to the new version.""" + + # pylint: disable-next=import-outside-toplevel + from .components.zone import DEFAULT_RADIUS + + data = old_data + if old_major_version == 1 and old_minor_version < 2: + # In 1.2, we remove support for "imperial", replaced by "us_customary" + # Using a new key to allow rollback + self._original_unit_system = data.get("unit_system") + data["unit_system_v2"] = self._original_unit_system + if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: + data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY + if old_major_version == 1 and old_minor_version < 3: + # In 1.3, we add the key "language", initialize it from the + # owner account. + data["language"] = "en" + try: + owner = await self.hass.auth.async_get_owner() + if owner is not None: + # pylint: disable-next=import-outside-toplevel + from .components.frontend import storage as frontend_store + + _, owner_data = await frontend_store.async_user_store( + self.hass, owner.id + ) + + if ( + "language" in owner_data + and "language" in owner_data["language"] + ): + with suppress(vol.InInvalid): + data["language"] = cv.language( + owner_data["language"]["language"] + ) + # pylint: disable-next=broad-except + except Exception: + _LOGGER.exception("Unexpected error during core config migration") + if old_major_version == 1 and old_minor_version < 4: + # In 1.4, we add the key "radius", initialize it with the default. + data.setdefault("radius", DEFAULT_RADIUS) + + if old_major_version > 1: + raise NotImplementedError + return data + + async def async_save(self, data: dict[str, Any]) -> None: + if self._original_unit_system: + data["unit_system"] = self._original_unit_system + return await super().async_save(data) diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index b8e8f269b82..6df77443e7e 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -10,7 +10,6 @@ from contextlib import suppress import copy from dataclasses import dataclass from enum import StrEnum -from functools import partial import logging from types import MappingProxyType from typing import Any, Generic, Required, TypedDict, cast @@ -20,13 +19,7 @@ import voluptuous as vol from .core import HomeAssistant, callback from .exceptions import HomeAssistantError -from .helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) -from .helpers.frame import report +from .helpers.frame import ReportBehavior, report_usage from .loader import async_suggest_report_issue from .util import uuid as uuid_util @@ -46,26 +39,6 @@ class FlowResultType(StrEnum): MENU = "menu" -# RESULT_TYPE_* is deprecated, to be removed in 2025.1 -_DEPRECATED_RESULT_TYPE_FORM = DeprecatedConstantEnum(FlowResultType.FORM, "2025.1") -_DEPRECATED_RESULT_TYPE_CREATE_ENTRY = DeprecatedConstantEnum( - FlowResultType.CREATE_ENTRY, "2025.1" -) -_DEPRECATED_RESULT_TYPE_ABORT = DeprecatedConstantEnum(FlowResultType.ABORT, "2025.1") -_DEPRECATED_RESULT_TYPE_EXTERNAL_STEP = DeprecatedConstantEnum( - FlowResultType.EXTERNAL_STEP, "2025.1" -) -_DEPRECATED_RESULT_TYPE_EXTERNAL_STEP_DONE = DeprecatedConstantEnum( - FlowResultType.EXTERNAL_STEP_DONE, "2025.1" -) -_DEPRECATED_RESULT_TYPE_SHOW_PROGRESS = DeprecatedConstantEnum( - FlowResultType.SHOW_PROGRESS, "2025.1" -) -_DEPRECATED_RESULT_TYPE_SHOW_PROGRESS_DONE = DeprecatedConstantEnum( - FlowResultType.SHOW_PROGRESS_DONE, "2025.1" -) -_DEPRECATED_RESULT_TYPE_MENU = DeprecatedConstantEnum(FlowResultType.MENU, "2025.1") - # Event that is fired when a flow is progressed via external or progress source. EVENT_DATA_ENTRY_FLOW_PROGRESSED = "data_entry_flow_progressed" @@ -87,7 +60,10 @@ STEP_ID_OPTIONAL_STEPS = { } -_FlowResultT = TypeVar("_FlowResultT", bound="FlowResult[Any]", default="FlowResult") +_FlowContextT = TypeVar("_FlowContextT", bound="FlowContext", default="FlowContext") +_FlowResultT = TypeVar( + "_FlowResultT", bound="FlowResult[Any, Any]", default="FlowResult" +) _HandlerT = TypeVar("_HandlerT", default=str) @@ -123,6 +99,7 @@ class InvalidData(vol.Invalid): schema_errors: dict[str, Any], **kwargs: Any, ) -> None: + """Initialize an invalid data exception.""" super().__init__(message, path, error_message, **kwargs) self.schema_errors = schema_errors @@ -139,13 +116,20 @@ class AbortFlow(FlowError): self.description_placeholders = description_placeholders -class FlowResult(TypedDict, Generic[_HandlerT], total=False): +class FlowContext(TypedDict, total=False): + """Typed context dict.""" + + show_advanced_options: bool + source: str + + +class FlowResult(TypedDict, Generic[_FlowContextT, _HandlerT], total=False): """Typed result dict.""" - context: dict[str, Any] + context: _FlowContextT data_schema: vol.Schema | None data: Mapping[str, Any] - description_placeholders: Mapping[str, str | None] | None + description_placeholders: Mapping[str, str] | None description: str | None errors: dict[str, str] | None extra: str @@ -189,7 +173,7 @@ def _map_error_to_schema_errors( schema_errors[path_part_str] = error.error_message -class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): +class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): """Manage all the flows that are in progress.""" _flow_result: type[_FlowResultT] = FlowResult # type: ignore[assignment] @@ -201,12 +185,14 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): """Initialize the flow manager.""" self.hass = hass self._preview: set[_HandlerT] = set() - self._progress: dict[str, FlowHandler[_FlowResultT, _HandlerT]] = {} + self._progress: dict[ + str, FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] + ] = {} self._handler_progress_index: defaultdict[ - _HandlerT, set[FlowHandler[_FlowResultT, _HandlerT]] + _HandlerT, set[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]] ] = defaultdict(set) self._init_data_process_index: defaultdict[ - type, set[FlowHandler[_FlowResultT, _HandlerT]] + type, set[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]] ] = defaultdict(set) @abc.abstractmethod @@ -214,9 +200,9 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): self, handler_key: _HandlerT, *, - context: dict[str, Any] | None = None, + context: _FlowContextT | None = None, data: dict[str, Any] | None = None, - ) -> FlowHandler[_FlowResultT, _HandlerT]: + ) -> FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]: """Create a flow for specified handler. Handler key is the domain of the component that we want to set up. @@ -224,34 +210,23 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): @abc.abstractmethod async def async_finish_flow( - self, flow: FlowHandler[_FlowResultT, _HandlerT], result: _FlowResultT + self, + flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], + result: _FlowResultT, ) -> _FlowResultT: - """Finish a data entry flow.""" + """Finish a data entry flow. + + This method is called when a flow step returns FlowResultType.ABORT or + FlowResultType.CREATE_ENTRY. + """ async def async_post_init( - self, flow: FlowHandler[_FlowResultT, _HandlerT], result: _FlowResultT + self, + flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], + result: _FlowResultT, ) -> None: """Entry has finished executing its first step asynchronously.""" - @callback - def async_has_matching_flow( - self, handler: _HandlerT, match_context: dict[str, Any], data: Any - ) -> bool: - """Check if an existing matching flow is in progress. - - A flow with the same handler, context, and data. - - If match_context is passed, only return flows with a context that is a - superset of match_context. - """ - if not (flows := self._handler_progress_index.get(handler)): - return False - match_items = match_context.items() - for progress in flows: - if match_items <= progress.context.items() and progress.init_data == data: - return True - return False - @callback def async_get(self, flow_id: str) -> _FlowResultT: """Return a flow in progress as a partial FlowResult.""" @@ -292,18 +267,18 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): ) -> list[_FlowResultT]: """Return flows in progress init matching by data type as a partial FlowResult.""" return self._async_flow_handler_to_flow_result( - ( + [ progress for progress in self._init_data_process_index.get(init_data_type, ()) if matcher(progress.init_data) - ), + ], include_uninitialized, ) @callback def _async_progress_by_handler( self, handler: _HandlerT, match_context: dict[str, Any] | None - ) -> list[FlowHandler[_FlowResultT, _HandlerT]]: + ) -> list[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]]: """Return the flows in progress by handler. If match_context is specified, only return flows with a context that @@ -322,12 +297,12 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): self, handler: _HandlerT, *, - context: dict[str, Any] | None = None, + context: _FlowContextT | None = None, data: Any = None, ) -> _FlowResultT: """Start a data entry flow.""" if context is None: - context = {} + context = cast(_FlowContextT, {}) flow = await self.async_create_flow(handler, context=context, data=data) if not flow: raise UnknownFlow("Flow was not created") @@ -467,7 +442,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): @callback def _async_add_flow_progress( - self, flow: FlowHandler[_FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] ) -> None: """Add a flow to in progress.""" if flow.init_data is not None: @@ -477,7 +452,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): @callback def _async_remove_flow_from_index( - self, flow: FlowHandler[_FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] ) -> None: """Remove a flow from in progress.""" if flow.init_data is not None: @@ -504,7 +479,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): async def _async_handle_step( self, - flow: FlowHandler[_FlowResultT, _HandlerT], + flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], step_id: str, user_input: dict | BaseServiceInfo | None, ) -> _FlowResultT: @@ -529,12 +504,10 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): if not isinstance(result["type"], FlowResultType): result["type"] = FlowResultType(result["type"]) # type: ignore[unreachable] - report( - ( - "does not use FlowResultType enum for data entry flow result type. " - "This is deprecated and will stop working in Home Assistant 2025.1" - ), - error_if_core=False, + report_usage( + "does not use FlowResultType enum for data entry flow result type", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.1", ) if ( @@ -581,7 +554,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): return result def _raise_if_step_does_not_exist( - self, flow: FlowHandler[_FlowResultT, _HandlerT], step_id: str + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], step_id: str ) -> None: """Raise if the step does not exist.""" method = f"async_step_{step_id}" @@ -593,7 +566,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): ) async def _async_setup_preview( - self, flow: FlowHandler[_FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] ) -> None: """Set up preview for a flow handler.""" if flow.handler not in self._preview: @@ -603,7 +576,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): @callback def _async_flow_handler_to_flow_result( self, - flows: Iterable[FlowHandler[_FlowResultT, _HandlerT]], + flows: Iterable[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]], include_uninitialized: bool, ) -> list[_FlowResultT]: """Convert a list of FlowHandler to a partial FlowResult that can be serialized.""" @@ -625,7 +598,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): ] -class FlowHandler(Generic[_FlowResultT, _HandlerT]): +class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): """Handle a data entry flow.""" _flow_result: type[_FlowResultT] = FlowResult # type: ignore[assignment] @@ -639,7 +612,7 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): hass: HomeAssistant = None # type: ignore[assignment] handler: _HandlerT = None # type: ignore[assignment] # Ensure the attribute has a subscriptable, but immutable, default value. - context: dict[str, Any] = MappingProxyType({}) # type: ignore[assignment] + context: _FlowContextT = MappingProxyType({}) # type: ignore[assignment] # Set by _async_create_flow callback init_step = "init" @@ -658,12 +631,12 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): @property def source(self) -> str | None: """Source that initialized the flow.""" - return self.context.get("source", None) # type: ignore[no-any-return] + return self.context.get("source", None) # type: ignore[return-value] @property def show_advanced_options(self) -> bool: """If we should show advanced options.""" - return self.context.get("show_advanced_options", False) # type: ignore[no-any-return] + return self.context.get("show_advanced_options", False) # type: ignore[return-value] def add_suggested_values_to_schema( self, data_schema: vol.Schema, suggested_values: Mapping[str, Any] | None @@ -704,7 +677,7 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): step_id: str | None = None, data_schema: vol.Schema | None = None, errors: dict[str, str] | None = None, - description_placeholders: Mapping[str, str | None] | None = None, + description_placeholders: Mapping[str, str] | None = None, last_step: bool | None = None, preview: str | None = None, ) -> _FlowResultT: @@ -930,11 +903,3 @@ class section: def __call__(self, value: Any) -> Any: """Validate input.""" return self.schema(value) - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index f308cbc5cd8..85fe55277fa 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -270,6 +270,25 @@ class ServiceNotFound(ServiceValidationError): self.generate_message = True +class ServiceNotSupported(ServiceValidationError): + """Raised when an entity action is not supported.""" + + def __init__(self, domain: str, service: str, entity_id: str) -> None: + """Initialize ServiceNotSupported exception.""" + super().__init__( + translation_domain="homeassistant", + translation_key="service_not_supported", + translation_placeholders={ + "domain": domain, + "service": service, + "entity_id": entity_id, + }, + ) + self.domain = domain + self.service = service + self.generate_message = True + + class MaxLengthExceeded(HomeAssistantError): """Raised when a property value has exceeded the max character length.""" diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index dc30f9d76f0..6b3028826dc 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -10,6 +10,7 @@ APPLICATION_CREDENTIALS = [ "google", "google_assistant_sdk", "google_mail", + "google_photos", "google_sheets", "google_tasks", "home_connect", @@ -23,12 +24,15 @@ APPLICATION_CREDENTIALS = [ "neato", "nest", "netatmo", + "point", "senz", "spotify", "tesla_fleet", "twitch", + "weheat", "withings", "xbox", + "yale", "yolink", "youtube", ] diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 2ea604a91a2..a105efc2685 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -8,6 +8,26 @@ from __future__ import annotations from typing import Final BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ + { + "domain": "acaia", + "manufacturer_id": 16962, + }, + { + "domain": "acaia", + "local_name": "ACAIA*", + }, + { + "domain": "acaia", + "local_name": "PYXIS-*", + }, + { + "domain": "acaia", + "local_name": "LUNAR-*", + }, + { + "domain": "acaia", + "local_name": "PROCHBT001", + }, { "domain": "airthings_ble", "manufacturer_id": 820, @@ -279,6 +299,11 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ ], "manufacturer_id": 76, }, + { + "connectable": True, + "domain": "husqvarna_automower_ble", + "service_uuid": "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + }, { "domain": "ibeacon", "manufacturer_data_start": [ diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 5e6d29f29f9..8e88e8a2ae8 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -12,6 +12,7 @@ FLOWS = { "history_stats", "integration", "min_max", + "mold_indicator", "random", "statistics", "switch_as_x", @@ -23,6 +24,7 @@ FLOWS = { ], "integration": [ "abode", + "acaia", "accuweather", "acmeda", "adax", @@ -99,6 +101,7 @@ FLOWS = { "bthome", "buienradar", "caldav", + "cambridge_audio", "canary", "cast", "ccm15", @@ -110,10 +113,12 @@ FLOWS = { "color_extractor", "comelit", "control4", + "cookidoo", "coolmaster", "cpuspeed", "crownstone", "daikin", + "deako", "deconz", "deluge", "denonavr", @@ -136,6 +141,7 @@ FLOWS = { "drop_connect", "dsmr", "dsmr_reader", + "duke_energy", "dunehd", "duotecno", "dwd_weather_warnings", @@ -149,6 +155,7 @@ FLOWS = { "ecowitt", "edl21", "efergy", + "eheimdigital", "electrasmart", "electric_kiwi", "elevenlabs", @@ -156,6 +163,7 @@ FLOWS = { "elkm1", "elmax", "elvia", + "emoncms", "emonitor", "emulated_roku", "energenie_power_sockets", @@ -221,8 +229,10 @@ FLOWS = { "goodwe", "google", "google_assistant_sdk", + "google_cloud", "google_generative_ai_conversation", "google_mail", + "google_photos", "google_sheets", "google_tasks", "google_translate", @@ -251,11 +261,13 @@ FLOWS = { "homewizard", "homeworks", "honeywell", + "html5", "huawei_lte", "hue", "huisbaasje", "hunterdouglas_powerview", "husqvarna_automower", + "husqvarna_automower_ble", "huum", "hvv_departures", "hydrawise", @@ -280,11 +292,13 @@ FLOWS = { "ipp", "iqvia", "iron_os", + "iskra", "islamic_prayer_times", "israel_rail", "iss", "ista_ecotrend", "isy994", + "ituran", "izone", "jellyfin", "jewish_calendar", @@ -314,8 +328,10 @@ FLOWS = { "ld2410_ble", "leaone", "led_ble", + "lektrico", "lg_netcast", "lg_soundbar", + "lg_thinq", "lidarr", "lifx", "linear_garage_door", @@ -324,6 +340,7 @@ FLOWS = { "litterrobot", "livisi", "local_calendar", + "local_file", "local_ip", "local_todo", "locative", @@ -359,6 +376,7 @@ FLOWS = { "modem_callerid", "modern_forms", "moehlenhoff_alpha2", + "monarch_money", "monoprice", "monzo", "moon", @@ -370,12 +388,14 @@ FLOWS = { "mpd", "mqtt", "mullvad", + "music_assistant", "mutesync", "mysensors", "mystrom", "myuplink", "nam", "nanoleaf", + "nasweb", "neato", "nest", "netatmo", @@ -389,22 +409,27 @@ FLOWS = { "nibe_heatpump", "nice_go", "nightscout", + "niko_home_control", "nina", "nmap_tracker", "nobo_hub", + "nordpool", "notion", "nuheat", "nuki", "nut", "nws", + "nyt_games", "nzbget", "obihai", "octoprint", + "ohme", "ollama", "omnilogic", "oncue", "ondilo_ico", "onewire", + "onkyo", "onvif", "open_meteo", "openai_conversation", @@ -425,6 +450,7 @@ FLOWS = { "ovo_energy", "owntracks", "p1_monitor", + "palazzetti", "panasonic_viera", "peco", "pegel_online", @@ -439,6 +465,7 @@ FLOWS = { "plum_lightpad", "point", "poolsense", + "powerfox", "powerwall", "private_ble_device", "profiler", @@ -505,6 +532,7 @@ FLOWS = { "sensirion_ble", "sensorpro", "sensorpush", + "sensoterra", "sentry", "senz", "seventeentrack", @@ -516,15 +544,18 @@ FLOWS = { "simplefin", "simplepush", "simplisafe", + "sky_remote", "skybell", "slack", "sleepiq", + "slide_local", "slimproto", "sma", "smappee", "smart_meter_texas", "smartthings", "smarttub", + "smarty", "smhi", "smlight", "sms", @@ -540,7 +571,6 @@ FLOWS = { "sonos", "soundtouch", "speedtestdotnet", - "spider", "spotify", "sql", "squeezebox", @@ -549,7 +579,6 @@ FLOWS = { "starlink", "steam_online", "steamist", - "stookalert", "stookwijzer", "streamlabswater", "subaru", @@ -594,6 +623,7 @@ FLOWS = { "tomorrowio", "toon", "totalconnect", + "touchline_sl", "tplink", "tplink_omada", "traccar", @@ -605,6 +635,7 @@ FLOWS = { "trafikverket_train", "trafikverket_weatherstation", "transmission", + "triggercmd", "tuya", "twentemilieu", "twilio", @@ -639,6 +670,7 @@ FLOWS = { "wake_on_lan", "wallbox", "waqi", + "watergate", "watttime", "waze_travel_time", "weatherflow", @@ -646,6 +678,7 @@ FLOWS = { "weatherkit", "webmin", "webostv", + "weheat", "wemo", "whirlpool", "whois", @@ -654,6 +687,7 @@ FLOWS = { "withings", "wiz", "wled", + "wmspro", "wolflink", "workday", "worldclock", @@ -663,6 +697,7 @@ FLOWS = { "xiaomi_aqara", "xiaomi_ble", "xiaomi_miio", + "yale", "yale_smart_alarm", "yalexs_ble", "yamaha_musiccast", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index f6df799d01e..22a09945a80 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -12,11 +12,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "airzone", "macaddress": "E84F25*", }, - { - "domain": "august", - "hostname": "yale-connect-plus", - "macaddress": "00177A*", - }, { "domain": "august", "hostname": "connect", @@ -34,12 +29,13 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "august", - "hostname": "august*", - "macaddress": "E076D0*", + "hostname": "connect", + "macaddress": "789C85*", }, { - "domain": "awair", - "macaddress": "70886B1*", + "domain": "august", + "hostname": "august*", + "macaddress": "E076D0*", }, { "domain": "axis", @@ -213,6 +209,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "fully_kiosk", "registered_devices": True, }, + { + "domain": "fyta", + "hostname": "fyta*", + }, { "domain": "goalzero", "registered_devices": True, @@ -240,6 +240,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "guardian*", "macaddress": "30AEA4*", }, + { + "domain": "homewizard", + "registered_devices": True, + }, { "domain": "hunterdouglas_powerview", "registered_devices": True, @@ -280,6 +284,22 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "polisy*", "macaddress": "000DB9*", }, + { + "domain": "lamarzocco", + "registered_devices": True, + }, + { + "domain": "lamarzocco", + "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]", + }, + { + "domain": "lamarzocco", + "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]", + }, + { + "domain": "lamarzocco", + "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]", + }, { "domain": "lametric", "registered_devices": True, @@ -371,6 +391,15 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "gateway*", "macaddress": "F8811A*", }, + { + "domain": "palazzetti", + "hostname": "connbox*", + "macaddress": "40F3857*", + }, + { + "domain": "palazzetti", + "registered_devices": True, + }, { "domain": "powerwall", "hostname": "1118431-*", @@ -432,6 +461,26 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "ring*", "macaddress": "0CAE7D*", }, + { + "domain": "ring", + "hostname": "ring*", + "macaddress": "2CAB33*", + }, + { + "domain": "ring", + "hostname": "ring*", + "macaddress": "94E36D*", + }, + { + "domain": "ring", + "hostname": "ring*", + "macaddress": "9C7613*", + }, + { + "domain": "ring", + "hostname": "ring*", + "macaddress": "341513*", + }, { "domain": "roomba", "hostname": "irobot-*", @@ -1094,6 +1143,19 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "wiz", "hostname": "wiz_*", }, + { + "domain": "wmspro", + "macaddress": "0023D5*", + }, + { + "domain": "wmspro", + "registered_devices": True, + }, + { + "domain": "yale", + "hostname": "yale-connect-plus", + "macaddress": "00177A*", + }, { "domain": "yeelight", "hostname": "yeelink-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 52215d232ad..a94962b458b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -9,7 +9,14 @@ "name": "Abode", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "single_config_entry": true + }, + "acaia": { + "name": "Acaia", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push" }, "accuweather": { "name": "AccuWeather", @@ -407,12 +414,24 @@ "config_flow": false, "iot_class": "cloud_push" }, + "aps": { + "name": "Arizona Public Service (APS)", + "integration_type": "virtual", + "supported_by": "opower" + }, "apsystems": { "name": "APsystems", "integration_type": "device", "config_flow": true, "iot_class": "local_polling" }, + "aqara": { + "name": "Aqara", + "iot_standards": [ + "matter", + "zigbee" + ] + }, "aquacell": { "name": "AquaCell", "integration_type": "device", @@ -686,12 +705,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "bloomsky": { - "name": "BloomSky", - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling" - }, "blue_current": { "name": "Blue Current", "integration_type": "hub", @@ -849,11 +862,18 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "cambridge_audio": { + "name": "Cambridge Audio", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push" + }, "canary": { "name": "Canary", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ccm15": { "name": "Midea ccm15 AC Controller", @@ -940,7 +960,8 @@ "name": "Cloudflare", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "single_config_entry": true }, "cmus": { "name": "cmus", @@ -1023,6 +1044,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "cookidoo": { + "name": "Cookidoo", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "coolmaster": { "name": "CoolMasterNet", "integration_type": "hub", @@ -1037,7 +1064,8 @@ "cpuspeed": { "integration_type": "device", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "cribl": { "name": "Cribl", @@ -1091,6 +1119,13 @@ "config_flow": false, "iot_class": "local_polling" }, + "deako": { + "name": "Deako", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling", + "single_config_entry": true + }, "debugpy": { "name": "Remote Python Debugger", "integration_type": "service", @@ -1135,7 +1170,8 @@ "demo": { "integration_type": "hub", "config_flow": false, - "iot_class": "calculated" + "iot_class": "calculated", + "single_config_entry": true }, "denon": { "name": "Denon", @@ -1344,12 +1380,6 @@ "config_flow": true, "iot_class": "local_push" }, - "dte_energy_bridge": { - "name": "DTE Energy Bridge", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "dublin_bus_transport": { "name": "Dublin Bus", "integration_type": "hub", @@ -1362,6 +1392,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "duke_energy": { + "name": "Duke Energy", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "dunehd": { "name": "Dune HD", "integration_type": "hub", @@ -1372,7 +1408,8 @@ "name": "Duotecno", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "duquesne_light": { "name": "Duquesne Light", @@ -1430,7 +1467,8 @@ "name": "ecobee", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ecoforest": { "name": "Ecoforest", @@ -1486,6 +1524,12 @@ "config_flow": false, "iot_class": "local_polling" }, + "eheimdigital": { + "name": "EHEIM Digital", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "electrasmart": { "name": "Electra Smart", "integration_type": "hub", @@ -1562,7 +1606,7 @@ "integrations": { "emoncms": { "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling", "name": "Emoncms" }, @@ -1628,7 +1672,8 @@ "name": "EnOcean", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "enphase_envoy": { "name": "Enphase Envoy", @@ -2244,16 +2289,10 @@ "name": "Google Assistant SDK" }, "google_cloud": { - "integration_type": "hub", - "config_flow": false, + "integration_type": "service", + "config_flow": true, "iot_class": "cloud_push", - "name": "Google Cloud Platform" - }, - "google_domains": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling", - "name": "Google Domains" + "name": "Google Cloud" }, "google_generative_ai_conversation": { "integration_type": "service", @@ -2273,6 +2312,12 @@ "iot_class": "cloud_polling", "name": "Google Maps" }, + "google_photos": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Google Photos" + }, "google_pubsub": { "integration_type": "hub", "config_flow": false, @@ -2432,7 +2477,8 @@ "name": "Home Assistant Supervisor", "integration_type": "hub", "config_flow": false, - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true }, "havana_shade": { "name": "Havana Shade", @@ -2620,8 +2666,9 @@ "html5": { "name": "HTML5 Push Notifications", "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push" + "config_flow": true, + "iot_class": "cloud_push", + "single_config_entry": true }, "huawei_lte": { "name": "Huawei LTE", @@ -2646,11 +2693,22 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, - "husqvarna_automower": { - "name": "Husqvarna Automower", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push" + "husqvarna": { + "name": "Husqvarna", + "integrations": { + "husqvarna_automower": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Husqvarna Automower" + }, + "husqvarna_automower_ble": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling", + "name": "Husqvarna Automower BLE" + } + } }, "huum": { "name": "Huum", @@ -2692,7 +2750,8 @@ "name": "Jandy iAqualink", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ibm": { "name": "IBM", @@ -2821,7 +2880,8 @@ "name": "Insteon", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "intellifire": { "name": "IntelliFire", @@ -2894,6 +2954,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "iskra": { + "name": "iskra", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "islamic_prayer_times": { "integration_type": "hub", "config_flow": true, @@ -2914,7 +2980,8 @@ "name": "International Space Station (ISS)", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ista_ecotrend": { "name": "ista EcoTrend", @@ -2928,6 +2995,12 @@ "config_flow": true, "iot_class": "local_push" }, + "ituran": { + "name": "Ituran", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "izone": { "name": "iZone", "integration_type": "hub", @@ -3053,7 +3126,8 @@ "name": "Everything but the Kitchen Sink", "integration_type": "hub", "config_flow": false, - "iot_class": "calculated" + "iot_class": "calculated", + "single_config_entry": true }, "kiwi": { "name": "KIWI", @@ -3069,7 +3143,7 @@ }, "knocki": { "name": "Knocki", - "integration_type": "device", + "integration_type": "hub", "config_flow": true, "iot_class": "cloud_push" }, @@ -3167,7 +3241,8 @@ "name": "Launch Library", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "laundrify": { "name": "laundrify", @@ -3204,6 +3279,12 @@ "integration_type": "virtual", "supported_by": "netatmo" }, + "lektrico": { + "name": "Lektrico Charging Station", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "leviton": { "name": "Leviton", "iot_standards": [ @@ -3225,6 +3306,12 @@ "iot_class": "local_polling", "name": "LG Soundbars" }, + "lg_thinq": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "LG ThinQ" + }, "webostv": { "integration_type": "hub", "config_flow": true, @@ -3303,7 +3390,8 @@ "name": "LiteJet", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "litterrobot": { "name": "Litter-Robot", @@ -3331,13 +3419,14 @@ "local_file": { "name": "Local File", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "local_ip": { "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true }, "local_todo": { "integration_type": "hub", @@ -3480,7 +3569,7 @@ "name": "Mastodon", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_push" + "iot_class": "cloud_polling" }, "matrix": { "name": "Matrix", @@ -3758,11 +3847,11 @@ "config_flow": true, "iot_class": "local_push" }, - "mold_indicator": { - "name": "Mold Indicator", + "monarch_money": { + "name": "Monarch Money", "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" + "config_flow": true, + "iot_class": "cloud_polling" }, "monessen": { "name": "Monessen", @@ -3876,6 +3965,12 @@ "iot_class": "cloud_polling", "single_config_entry": true }, + "music_assistant": { + "name": "Music Assistant", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "mutesync": { "name": "mutesync", "integration_type": "hub", @@ -3942,6 +4037,12 @@ "config_flow": true, "iot_class": "local_push" }, + "nasweb": { + "name": "NASweb", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "neato": { "name": "Neato Botvac", "integration_type": "hub", @@ -4056,7 +4157,7 @@ "niko_home_control": { "name": "Niko Home Control", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "nilu": { @@ -4107,6 +4208,13 @@ "config_flow": true, "iot_class": "local_push" }, + "nordpool": { + "name": "Nord Pool", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "single_config_entry": true + }, "norway_air": { "name": "Om Luftkvalitet i Norge (Norway Air)", "integration_type": "hub", @@ -4178,11 +4286,18 @@ "config_flow": false, "iot_class": "local_push" }, + "nyt_games": { + "name": "NYT Games", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "nzbget": { "name": "NZBGet", "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true }, "oasa_telematics": { "name": "OASA Telematics", @@ -4214,6 +4329,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "ohme": { + "name": "Ohme", + "integration_type": "device", + "config_flow": true, + "iot_class": "cloud_polling" + }, "ollama": { "name": "Ollama", "integration_type": "service", @@ -4230,7 +4351,8 @@ "name": "Hayward Omnilogic", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "oncue": { "name": "Oncue by Kohler", @@ -4242,7 +4364,8 @@ "name": "Ondilo ICO", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "onewire": { "name": "1-Wire", @@ -4252,8 +4375,8 @@ }, "onkyo": { "name": "Onkyo", - "integration_type": "hub", - "config_flow": false, + "integration_type": "device", + "config_flow": true, "iot_class": "local_push" }, "onvif": { @@ -4450,7 +4573,8 @@ "name": "OwnTracks", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "p1_monitor": { "name": "P1 Monitor", @@ -4458,6 +4582,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "palazzetti": { + "name": "Palazzetti", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "panasonic": { "name": "Panasonic", "integrations": { @@ -4481,16 +4611,6 @@ "config_flow": false, "iot_class": "local_polling" }, - "panel_custom": { - "name": "Custom Panel", - "integration_type": "hub", - "config_flow": false - }, - "panel_iframe": { - "name": "iframe Panel", - "integration_type": "hub", - "config_flow": false - }, "pcs_lighting": { "name": "PCS Lighting", "integration_type": "virtual", @@ -4661,6 +4781,12 @@ "integration_type": "virtual", "supported_by": "opower" }, + "powerfox": { + "name": "Powerfox", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "private_ble_device": { "name": "Private BLE Device", "integration_type": "hub", @@ -4670,7 +4796,8 @@ "profiler": { "name": "Profiler", "integration_type": "hub", - "config_flow": true + "config_flow": true, + "single_config_entry": true }, "progettihwsw": { "name": "ProgettiHWSW Automation", @@ -4885,7 +5012,8 @@ "name": "Radio Browser", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "radiotherm": { "name": "Radio Thermostat", @@ -5016,7 +5144,7 @@ "iot_class": "local_polling" }, "reolink": { - "name": "Reolink IP NVR/camera", + "name": "Reolink", "integration_type": "hub", "config_flow": true, "iot_class": "local_push" @@ -5060,7 +5188,8 @@ "name": "Rhasspy", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "ridwell": { "name": "Ridwell", @@ -5134,6 +5263,23 @@ "config_flow": true, "iot_class": "local_push" }, + "roth": { + "name": "Roth", + "integrations": { + "touchline": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Roth Touchline" + }, + "touchline_sl": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Roth Touchline SL" + } + } + }, "rova": { "name": "ROVA", "integration_type": "hub", @@ -5159,7 +5305,7 @@ "iot_class": "local_push" }, "ruckus_unleashed": { - "name": "Ruckus Unleashed", + "name": "Ruckus", "integration_type": "hub", "config_flow": true, "iot_class": "local_polling" @@ -5342,6 +5488,12 @@ "config_flow": true, "iot_class": "local_push" }, + "sensoterra": { + "name": "Sensoterra", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "sentry": { "name": "Sentry", "integration_type": "service", @@ -5471,12 +5623,6 @@ "integration_type": "virtual", "supported_by": "overkiz" }, - "simulated": { - "name": "Simulated", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "sinch": { "name": "Sinch SMS", "integration_type": "hub", @@ -5489,11 +5635,22 @@ "config_flow": false, "iot_class": "local_push" }, - "sky_hub": { - "name": "Sky Hub", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" + "sky": { + "name": "Sky", + "integrations": { + "sky_hub": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Sky Hub" + }, + "sky_remote": { + "integration_type": "device", + "config_flow": true, + "iot_class": "assumed_state", + "name": "Sky Remote Control" + } + } }, "skybeacon": { "name": "Skybeacon", @@ -5521,9 +5678,20 @@ }, "slide": { "name": "Slide", - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling" + "integrations": { + "slide": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_polling", + "name": "Slide" + }, + "slide_local": { + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling", + "name": "Slide Local" + } + } }, "slimproto": { "name": "SlimProto (Squeezebox players)", @@ -5579,7 +5747,7 @@ "smarty": { "name": "Salda Smarty", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "smhi": { @@ -5592,7 +5760,7 @@ "name": "SMLIGHT SLZB", "integration_type": "device", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_push" }, "sms": { "name": "SMS notifications via GSM-modem", @@ -5746,12 +5914,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "spider": { - "name": "Itho Daalderop Spider", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "splunk": { "name": "Splunk", "integration_type": "hub", @@ -5824,12 +5986,6 @@ "config_flow": false, "iot_class": "local_polling" }, - "stookalert": { - "name": "RIVM Stookalert", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" - }, "stookwijzer": { "name": "Stookwijzer", "integration_type": "service", @@ -6297,12 +6453,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "touchline": { - "name": "Roth Touchline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "tplink": { "name": "TP-Link", "integrations": { @@ -6405,6 +6555,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "triggercmd": { + "name": "TRIGGERcmd", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "tuya": { "name": "Tuya", "integration_type": "hub", @@ -6771,6 +6927,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "watergate": { + "name": "Watergate", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "watttime": { "name": "WattTime", "integration_type": "service", @@ -6805,6 +6967,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "weheat": { + "name": "Weheat", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "wemo": { "name": "Belkin WeMo", "integration_type": "hub", @@ -6864,6 +7032,12 @@ "config_flow": true, "iot_class": "local_push" }, + "wmspro": { + "name": "WMS WebControl pro", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "wolflink": { "name": "Wolf SmartSet Service", "integration_type": "hub", @@ -6994,8 +7168,14 @@ "yale_home": { "integration_type": "virtual", "config_flow": false, - "supported_by": "august", + "supported_by": "yale", "name": "Yale Home" + }, + "yale": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Yale" } } }, @@ -7201,7 +7381,6 @@ "iot_class": "calculated" }, "history_stats": { - "name": "History Stats", "integration_type": "helper", "config_flow": true, "iot_class": "local_polling" @@ -7246,8 +7425,12 @@ "config_flow": true, "iot_class": "calculated" }, + "mold_indicator": { + "integration_type": "helper", + "config_flow": true, + "iot_class": "calculated" + }, "random": { - "name": "Random", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" @@ -7257,7 +7440,6 @@ "config_flow": false }, "statistics": { - "name": "Statistics", "integration_type": "helper", "config_flow": true, "iot_class": "local_polling" @@ -7279,7 +7461,6 @@ "iot_class": "local_polling" }, "timer": { - "name": "Timer", "integration_type": "helper", "config_flow": false }, @@ -7289,7 +7470,6 @@ "iot_class": "calculated" }, "trend": { - "name": "Trend", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" @@ -7318,6 +7498,7 @@ "google_travel_time", "group", "growatt_server", + "history_stats", "holiday", "homekit_controller", "input_boolean", @@ -7334,20 +7515,25 @@ "min_max", "mobile_app", "moehlenhoff_alpha2", + "mold_indicator", "moon", "nextbus", "nmap_tracker", "plant", "proximity", + "random", "rpi_power", "schedule", "season", "shopping_list", + "statistics", "sun", "switch_as_x", "threshold", "time_date", + "timer", "tod", + "trend", "uptime", "utility_meter", "version", diff --git a/homeassistant/generated/languages.py b/homeassistant/generated/languages.py index 78105c76f4c..7e56952f7a5 100644 --- a/homeassistant/generated/languages.py +++ b/homeassistant/generated/languages.py @@ -28,6 +28,7 @@ LANGUAGES = { "fi", "fr", "fy", + "ga", "gl", "gsw", "he", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 389a4435910..2c914c2d240 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -68,10 +68,6 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, - "LIFX B10": { - "always_discover": True, - "domain": "lifx", - }, "LIFX BR30": { "always_discover": True, "domain": "lifx", @@ -96,6 +92,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Colour": { + "always_discover": True, + "domain": "lifx", + }, "LIFX DLCOL": { "always_discover": True, "domain": "lifx", @@ -144,6 +144,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Permanent Outdoor": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Pls": { "always_discover": True, "domain": "lifx", @@ -164,11 +168,11 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, - "LIFX T10": { + "LIFX Tile": { "always_discover": True, "domain": "lifx", }, - "LIFX Tile": { + "LIFX Tube": { "always_discover": True, "domain": "lifx", }, @@ -275,6 +279,11 @@ HOMEKIT = { } ZEROCONF = { + "_PowerView-G3._tcp.local.": [ + { + "domain": "hunterdouglas_powerview", + }, + ], "_Volumio._tcp.local.": [ { "domain": "volumio", @@ -423,6 +432,11 @@ ZEROCONF = { "domain": "forked_daapd", }, ], + "_deako._tcp.local.": [ + { + "domain": "deako", + }, + ], "_devialet-http._tcp.local.": [ { "domain": "devialet", @@ -522,6 +536,14 @@ ZEROCONF = { "domain": "bosch_shc", "name": "bosch shc*", }, + { + "domain": "eheimdigital", + "name": "eheimdigital._http._tcp.local.", + }, + { + "domain": "lektrico", + "name": "lektrico*", + }, { "domain": "loqed", "name": "loqed*", @@ -536,6 +558,10 @@ ZEROCONF = { "manufacturer": "nettigo", }, }, + { + "domain": "powerfox", + "name": "powerfox*", + }, { "domain": "pure_energie", "name": "smartbridge*", @@ -552,6 +578,10 @@ ZEROCONF = { "domain": "shelly", "name": "shelly*", }, + { + "domain": "slide_local", + "name": "slide*", + }, { "domain": "synology_dsm", "properties": { @@ -608,6 +638,12 @@ ZEROCONF = { }, ], "_lutron._tcp.local.": [ + { + "domain": "lutron_caseta", + "properties": { + "SYSTYPE": "hwqs*", + }, + }, { "domain": "lutron_caseta", "properties": { @@ -627,6 +663,11 @@ ZEROCONF = { }, }, ], + "_mass._tcp.local.": [ + { + "domain": "music_assistant", + }, + ], "_matter._tcp.local.": [ { "domain": "matter", @@ -694,11 +735,6 @@ ZEROCONF = { "domain": "plugwise", }, ], - "_powerview-g3._tcp.local.": [ - { - "domain": "hunterdouglas_powerview", - }, - ], "_powerview._tcp.local.": [ { "domain": "hunterdouglas_powerview", @@ -763,6 +799,11 @@ ZEROCONF = { "name": "slzb-06*", }, ], + "_smoip._tcp.local.": [ + { + "domain": "cambridge_audio", + }, + ], "_sonos._tcp.local.": [ { "domain": "sonos", @@ -792,6 +833,11 @@ ZEROCONF = { "name": "smappee50*", }, ], + "_stream-magic._tcp.local.": [ + { + "domain": "cambridge_audio", + }, + ], "_system-bridge._tcp.local.": [ { "domain": "system_bridge", @@ -850,6 +896,12 @@ ZEROCONF = { "name": "*zigate*", }, ], + "_zigbee-coordinator._tcp.local.": [ + { + "domain": "zha", + "name": "*", + }, + ], "_zigstar_gw._tcp.local.": [ { "domain": "zha", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index d61f889d4b5..f01ae325875 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -32,11 +32,11 @@ if TYPE_CHECKING: from aiohttp.typedefs import JSONDecoder -DATA_CONNECTOR: HassKey[dict[tuple[bool, int], aiohttp.BaseConnector]] = HassKey( +DATA_CONNECTOR: HassKey[dict[tuple[bool, int, str], aiohttp.BaseConnector]] = HassKey( "aiohttp_connector" ) -DATA_CLIENTSESSION: HassKey[dict[tuple[bool, int], aiohttp.ClientSession]] = HassKey( - "aiohttp_clientsession" +DATA_CLIENTSESSION: HassKey[dict[tuple[bool, int, str], aiohttp.ClientSession]] = ( + HassKey("aiohttp_clientsession") ) SERVER_SOFTWARE = ( @@ -44,11 +44,13 @@ SERVER_SOFTWARE = ( f"aiohttp/{aiohttp.__version__} Python/{sys.version_info[0]}.{sys.version_info[1]}" ) -ENABLE_CLEANUP_CLOSED = not (3, 11, 1) <= sys.version_info < (3, 11, 4) -# Enabling cleanup closed on python 3.11.1+ leaks memory relatively quickly -# see https://github.com/aio-libs/aiohttp/issues/7252 -# aiohttp interacts poorly with https://github.com/python/cpython/pull/98540 -# The issue was fixed in 3.11.4 via https://github.com/python/cpython/pull/104485 +ENABLE_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( + 3, + 13, + 1, +) or sys.version_info < (3, 12, 7) +# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 +# which first appeared in Python 3.12.7 and 3.13.1 WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session" @@ -86,12 +88,13 @@ def async_get_clientsession( hass: HomeAssistant, verify_ssl: bool = True, family: socket.AddressFamily = socket.AF_UNSPEC, + ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, ) -> aiohttp.ClientSession: """Return default aiohttp ClientSession. This method must be run in the event loop. """ - session_key = _make_key(verify_ssl, family) + session_key = _make_key(verify_ssl, family, ssl_cipher) sessions = hass.data.setdefault(DATA_CLIENTSESSION, {}) if session_key not in sessions: @@ -100,6 +103,7 @@ def async_get_clientsession( verify_ssl, auto_cleanup_method=_async_register_default_clientsession_shutdown, family=family, + ssl_cipher=ssl_cipher, ) sessions[session_key] = session else: @@ -115,6 +119,7 @@ def async_create_clientsession( verify_ssl: bool = True, auto_cleanup: bool = True, family: socket.AddressFamily = socket.AF_UNSPEC, + ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies. @@ -135,6 +140,7 @@ def async_create_clientsession( verify_ssl, auto_cleanup_method=auto_cleanup_method, family=family, + ssl_cipher=ssl_cipher, **kwargs, ) @@ -146,11 +152,12 @@ def _async_create_clientsession( auto_cleanup_method: Callable[[HomeAssistant, aiohttp.ClientSession], None] | None = None, family: socket.AddressFamily = socket.AF_UNSPEC, + ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies.""" clientsession = aiohttp.ClientSession( - connector=_async_get_connector(hass, verify_ssl, family), + connector=_async_get_connector(hass, verify_ssl, family, ssl_cipher), json_serialize=json_dumps, response_class=HassClientResponse, **kwargs, @@ -279,10 +286,12 @@ def _async_register_default_clientsession_shutdown( @callback def _make_key( - verify_ssl: bool = True, family: socket.AddressFamily = socket.AF_UNSPEC -) -> tuple[bool, socket.AddressFamily]: + verify_ssl: bool = True, + family: socket.AddressFamily = socket.AF_UNSPEC, + ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, +) -> tuple[bool, socket.AddressFamily, ssl_util.SSLCipherList]: """Make a key for connector or session pool.""" - return (verify_ssl, family) + return (verify_ssl, family, ssl_cipher) class HomeAssistantTCPConnector(aiohttp.TCPConnector): @@ -305,21 +314,22 @@ def _async_get_connector( hass: HomeAssistant, verify_ssl: bool = True, family: socket.AddressFamily = socket.AF_UNSPEC, + ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, ) -> aiohttp.BaseConnector: """Return the connector pool for aiohttp. This method must be run in the event loop. """ - connector_key = _make_key(verify_ssl, family) + connector_key = _make_key(verify_ssl, family, ssl_cipher) connectors = hass.data.setdefault(DATA_CONNECTOR, {}) if connector_key in connectors: return connectors[connector_key] if verify_ssl: - ssl_context: SSLContext = ssl_util.get_default_context() + ssl_context: SSLContext = ssl_util.client_context(ssl_cipher) else: - ssl_context = ssl_util.get_default_no_verify_context() + ssl_context = ssl_util.client_context_no_verify(ssl_cipher) connector = HomeAssistantTCPConnector( family=family, diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index 3e101f185ed..f74296a9fb1 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -5,12 +5,11 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Iterable import dataclasses +from dataclasses import dataclass, field from datetime import datetime -from functools import cached_property -from typing import Any, Literal, TypedDict +from typing import TYPE_CHECKING, Any, Literal, TypedDict from homeassistant.core import HomeAssistant, callback -from homeassistant.util import slugify from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -20,13 +19,19 @@ from .json import json_bytes, json_fragment from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, - normalize_name, ) from .registry import BaseRegistry, RegistryIndexType from .singleton import singleton from .storage import Store from .typing import UNDEFINED, UndefinedType +if TYPE_CHECKING: + # mypy cannot workout _cache Protocol with dataclasses + from propcache import cached_property as under_cached_property +else: + from propcache import under_cached_property + + DATA_REGISTRY: HassKey[AreaRegistry] = HassKey("area_registry") EVENT_AREA_REGISTRY_UPDATED: EventType[EventAreaRegistryUpdatedData] = EventType( "area_registry_updated" @@ -63,7 +68,7 @@ class EventAreaRegistryUpdatedData(TypedDict): area_id: str -@dataclasses.dataclass(frozen=True, kw_only=True) +@dataclass(frozen=True, kw_only=True, slots=True) class AreaEntry(NormalizedNameBaseRegistryEntry): """Area Registry Entry.""" @@ -71,10 +76,11 @@ class AreaEntry(NormalizedNameBaseRegistryEntry): floor_id: str | None icon: str | None id: str - labels: set[str] = dataclasses.field(default_factory=set) + labels: set[str] = field(default_factory=set) picture: str | None + _cache: dict[str, Any] = field(default_factory=dict, compare=False, init=False) - @cached_property + @under_cached_property def json_fragment(self) -> json_fragment: """Return a JSON representation of this AreaEntry.""" return json_fragment( @@ -153,22 +159,23 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): def _index_entry(self, key: str, entry: AreaEntry) -> None: """Index an entry.""" + super()._index_entry(key, entry) if entry.floor_id is not None: self._floors_index[entry.floor_id][key] = True for label in entry.labels: self._labels_index[label][key] = True - super()._index_entry(key, entry) def _unindex_entry( self, key: str, replacement_entry: AreaEntry | None = None ) -> None: + # always call base class before other indices + super()._unindex_entry(key, replacement_entry) entry = self.data[key] if labels := entry.labels: for label in labels: self._unindex_entry_value(key, label, self._labels_index) if floor_id := entry.floor_id: self._unindex_entry_value(key, floor_id, self._floors_index) - return super()._unindex_entry(key, replacement_entry) def get_areas_for_label(self, label: str) -> list[AreaEntry]: """Get areas for label.""" @@ -224,6 +231,10 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): return area return self.async_create(name) + def _generate_id(self, name: str) -> str: + """Generate area ID.""" + return self.areas.generate_id_from_name(name) + @callback def async_create( self, @@ -237,28 +248,28 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): ) -> AreaEntry: """Create a new area.""" self.hass.verify_event_loop_thread("area_registry.async_create") - normalized_name = normalize_name(name) - if self.async_get_area_by_name(name): - raise ValueError(f"The name {name} ({normalized_name}) is already in use") + if area := self.async_get_area_by_name(name): + raise ValueError( + f"The name {name} ({area.normalized_name}) is already in use" + ) - area_id = self._generate_area_id(name) area = AreaEntry( aliases=aliases or set(), floor_id=floor_id, icon=icon, - id=area_id, + id=self._generate_id(name), labels=labels or set(), name=name, - normalized_name=normalized_name, picture=picture, ) - assert area.id is not None - self.areas[area.id] = area + area_id = area.id + self.areas[area_id] = area self.async_schedule_save() + self.hass.bus.async_fire_internal( EVENT_AREA_REGISTRY_UPDATED, - EventAreaRegistryUpdatedData(action="create", area_id=area.id), + EventAreaRegistryUpdatedData(action="create", area_id=area_id), ) return area @@ -341,7 +352,6 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): if name is not UNDEFINED and name != old.name: new_values["name"] = name - new_values["normalized_name"] = normalize_name(name) if not new_values: return old @@ -365,7 +375,6 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): if data is not None: for area in data["areas"]: assert area["name"] is not None and area["id"] is not None - normalized_name = normalize_name(area["name"]) areas[area["id"]] = AreaEntry( aliases=set(area["aliases"]), floor_id=area["floor_id"], @@ -373,7 +382,6 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): id=area["id"], labels=set(area["labels"]), name=area["name"], - normalized_name=normalized_name, picture=area["picture"], created_at=datetime.fromisoformat(area["created_at"]), modified_at=datetime.fromisoformat(area["modified_at"]), @@ -402,15 +410,6 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): ] } - def _generate_area_id(self, name: str) -> str: - """Generate area ID.""" - suggestion = suggestion_base = slugify(name) - tries = 1 - while suggestion in self.areas: - tries += 1 - suggestion = f"{suggestion_base}_{tries}" - return suggestion - @callback def _async_setup_cleanup(self) -> None: """Set up the area registry cleanup.""" diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 43021fffac5..4b5e2f277a0 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -13,7 +13,6 @@ import voluptuous as vol from homeassistant import loader from homeassistant.config import ( # type: ignore[attr-defined] CONF_PACKAGES, - CORE_CONFIG_SCHEMA, YAML_CONFIG_FILE, config_per_platform, extract_domain_configs, @@ -23,6 +22,7 @@ from homeassistant.config import ( # type: ignore[attr-defined] merge_packages_config, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core_config import CORE_CONFIG_SCHEMA from homeassistant.exceptions import HomeAssistantError from homeassistant.requirements import ( RequirementsNotFound, diff --git a/homeassistant/helpers/collection.py b/homeassistant/helpers/collection.py index 9151a9dfc6b..86d3450c3a0 100644 --- a/homeassistant/helpers/collection.py +++ b/homeassistant/helpers/collection.py @@ -7,6 +7,7 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from dataclasses import dataclass from functools import partial +from hashlib import md5 from itertools import groupby import logging from operator import attrgetter @@ -25,6 +26,7 @@ from homeassistant.util import slugify from . import entity_registry from .entity import Entity from .entity_component import EntityComponent +from .json import json_bytes from .storage import Store from .typing import ConfigType, VolDictType @@ -50,6 +52,7 @@ class CollectionChange: change_type: str item_id: str item: Any + item_hash: str | None = None type ChangeListener = Callable[ @@ -273,7 +276,9 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( await self.notify_changes( [ - CollectionChange(CHANGE_ADDED, item[CONF_ID], item) + CollectionChange( + CHANGE_ADDED, item[CONF_ID], item, self._hash_item(item) + ) for item in raw_storage["items"] ] ) @@ -313,7 +318,16 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( item = self._create_item(item_id, validated_data) self.data[item_id] = item self._async_schedule_save() - await self.notify_changes([CollectionChange(CHANGE_ADDED, item_id, item)]) + await self.notify_changes( + [ + CollectionChange( + CHANGE_ADDED, + item_id, + item, + self._hash_item(self._serialize_item(item_id, item)), + ) + ] + ) return item async def async_update_item(self, item_id: str, updates: dict) -> _ItemT: @@ -331,7 +345,16 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( self.data[item_id] = updated self._async_schedule_save() - await self.notify_changes([CollectionChange(CHANGE_UPDATED, item_id, updated)]) + await self.notify_changes( + [ + CollectionChange( + CHANGE_UPDATED, + item_id, + updated, + self._hash_item(self._serialize_item(item_id, updated)), + ) + ] + ) return self.data[item_id] @@ -365,6 +388,10 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( def _data_to_save(self) -> _StoreT: """Return JSON-compatible date for storing to file.""" + def _hash_item(self, item: dict) -> str: + """Return a hash of the item.""" + return md5(json_bytes(item)).hexdigest() + class DictStorageCollection(StorageCollection[dict, SerializedStorageCollection]): """A specialized StorageCollection where the items are untyped dicts.""" @@ -464,6 +491,10 @@ class _CollectionLifeCycle(Generic[_EntityT]): async def _update_entity(self, change_set: CollectionChange) -> None: if entity := self.entities.get(change_set.item_id): + if change_set.item_hash: + self.ent_reg.async_update_entity_options( + entity.entity_id, "collection", {"hash": change_set.item_hash} + ) await entity.async_update_config(change_set.item) async def _collection_changed(self, change_set: Iterable[CollectionChange]) -> None: diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index 629cdeef942..5952e28a1eb 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -8,6 +8,7 @@ from collections.abc import Callable, Container, Generator from contextlib import contextmanager from datetime import datetime, time as dt_time, timedelta import functools as ft +import logging import re import sys from typing import Any, Protocol, cast @@ -820,9 +821,15 @@ def time( after_entity.attributes.get("minute", 59), after_entity.attributes.get("second", 59), ) - elif after_entity.attributes.get( - ATTR_DEVICE_CLASS - ) == SensorDeviceClass.TIMESTAMP and after_entity.state not in ( + elif after_entity.domain == "time" and after_entity.state not in ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + after = datetime.strptime(after_entity.state, "%H:%M:%S").time() + elif ( + after_entity.attributes.get(ATTR_DEVICE_CLASS) + == SensorDeviceClass.TIMESTAMP + ) and after_entity.state not in ( STATE_UNAVAILABLE, STATE_UNKNOWN, ): @@ -844,9 +851,15 @@ def time( before_entity.attributes.get("minute", 59), before_entity.attributes.get("second", 59), ) - elif before_entity.attributes.get( - ATTR_DEVICE_CLASS - ) == SensorDeviceClass.TIMESTAMP and before_entity.state not in ( + elif before_entity.domain == "time": + try: + before = datetime.strptime(before_entity.state, "%H:%M:%S").time() + except ValueError: + return False + elif ( + before_entity.attributes.get(ATTR_DEVICE_CLASS) + == SensorDeviceClass.TIMESTAMP + ) and before_entity.state not in ( STATE_UNAVAILABLE, STATE_UNKNOWN, ): @@ -1064,6 +1077,46 @@ async def async_validate_conditions_config( return [await async_validate_condition_config(hass, cond) for cond in conditions] +async def async_conditions_from_config( + hass: HomeAssistant, + condition_configs: list[ConfigType], + logger: logging.Logger, + name: str, +) -> Callable[[TemplateVarsType], bool]: + """AND all conditions.""" + checks: list[ConditionCheckerType] = [ + await async_from_config(hass, condition_config) + for condition_config in condition_configs + ] + + def check_conditions(variables: TemplateVarsType = None) -> bool: + """AND all conditions.""" + errors: list[ConditionErrorIndex] = [] + for index, check in enumerate(checks): + try: + with trace_path(["condition", str(index)]): + if check(hass, variables) is False: + return False + except ConditionError as ex: + errors.append( + ConditionErrorIndex( + "condition", index=index, total=len(checks), error=ex + ) + ) + + if errors: + logger.warning( + "Error evaluating condition in '%s':\n%s", + name, + ConditionErrorContainer("condition", errors=errors), + ) + return False + + return True + + return check_conditions + + @callback def async_extract_entities(config: ConfigType | Template) -> set[str]: """Extract entities from a condition.""" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index d7a5d5ae8a1..3681e941eee 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -4,7 +4,7 @@ # with PEP 695 syntax. Fixed in Python 3.13. # from __future__ import annotations -from collections.abc import Callable, Hashable +from collections.abc import Callable, Hashable, Mapping import contextlib from contextvars import ContextVar from datetime import ( @@ -81,6 +81,8 @@ from homeassistant.const import ( CONF_TARGET, CONF_THEN, CONF_TIMEOUT, + CONF_TRIGGER, + CONF_TRIGGERS, CONF_UNTIL, CONF_VALUE_TEMPLATE, CONF_VARIABLES, @@ -715,8 +717,19 @@ def template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value is None") if isinstance(value, (list, dict, template_helper.Template)): raise vol.Invalid("template value should be a string") + if not (hass := _async_get_hass_or_none()): + # pylint: disable-next=import-outside-toplevel + from .frame import ReportBehavior, report_usage - template_value = template_helper.Template(str(value), _async_get_hass_or_none()) + report_usage( + ( + "validates schema outside the event loop, " + "which will stop working in HA Core 2025.10" + ), + core_behavior=ReportBehavior.LOG, + ) + + template_value = template_helper.Template(str(value), hass) try: template_value.ensure_valid() @@ -733,8 +746,19 @@ def dynamic_template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value should be a string") if not template_helper.is_template_string(str(value)): raise vol.Invalid("template value does not contain a dynamic template") + if not (hass := _async_get_hass_or_none()): + # pylint: disable-next=import-outside-toplevel + from .frame import ReportBehavior, report_usage - template_value = template_helper.Template(str(value), _async_get_hass_or_none()) + report_usage( + ( + "validates schema outside the event loop, " + "which will stop working in HA Core 2025.10" + ), + core_behavior=ReportBehavior.LOG, + ) + + template_value = template_helper.Template(str(value), hass) try: template_value.ensure_valid() @@ -850,7 +874,7 @@ def url_no_path(value: Any) -> str: url_in = url(value) if urlparse(url_in).path not in ("", "/"): - raise vol.Invalid("url it not allowed to have a path component") + raise vol.Invalid("url is not allowed to have a path component") return url_in @@ -1091,7 +1115,13 @@ def key_dependency[_KT: Hashable, _VT]( def custom_serializer(schema: Any) -> Any: """Serialize additional types for voluptuous_serialize.""" - from .. import data_entry_flow # pylint: disable=import-outside-toplevel + return _custom_serializer(schema, allow_section=True) + + +def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: + """Serialize additional types for voluptuous_serialize.""" + from homeassistant import data_entry_flow # pylint: disable=import-outside-toplevel + from . import selector # pylint: disable=import-outside-toplevel if schema is positive_time_period_dict: @@ -1104,10 +1134,15 @@ def custom_serializer(schema: Any) -> Any: return {"type": "boolean"} if isinstance(schema, data_entry_flow.section): + if not allow_section: + raise ValueError("Nesting expandable sections is not supported") return { "type": "expandable", "schema": voluptuous_serialize.convert( - schema.schema, custom_serializer=custom_serializer + schema.schema, + custom_serializer=functools.partial( + _custom_serializer, allow_section=False + ), ), "expanded": not schema.options["collapsed"], } @@ -1305,9 +1340,28 @@ TARGET_SERVICE_FIELDS = { _HAS_ENTITY_SERVICE_FIELD = has_at_least_one_key(*ENTITY_SERVICE_FIELDS) +def is_entity_service_schema(validator: VolSchemaType) -> bool: + """Check if the passed validator is an entity schema validator. + + The validator must be either of: + - A validator returned by cv._make_entity_service_schema + - A validator returned by cv._make_entity_service_schema, wrapped in a vol.Schema + - A validator returned by cv._make_entity_service_schema, wrapped in a vol.All + Nesting is allowed. + """ + if hasattr(validator, "_entity_service_schema"): + return True + if isinstance(validator, (vol.All)): + return any(is_entity_service_schema(val) for val in validator.validators) + if isinstance(validator, (vol.Schema)): + return is_entity_service_schema(validator.schema) + + return False + + def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: """Create an entity service schema.""" - return vol.All( + validator = vol.All( vol.Schema( { # The frontend stores data here. Don't use in core. @@ -1319,6 +1373,8 @@ def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: ), _HAS_ENTITY_SERVICE_FIELD, ) + setattr(validator, "_entity_service_schema", True) + return validator BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) @@ -1518,10 +1574,10 @@ TIME_CONDITION_SCHEMA = vol.All( **CONDITION_BASE_SCHEMA, vol.Required(CONF_CONDITION): "time", vol.Optional("before"): vol.Any( - time, vol.All(str, entity_domain(["input_datetime", "sensor"])) + time, vol.All(str, entity_domain(["input_datetime", "time", "sensor"])) ), vol.Optional("after"): vol.Any( - time, vol.All(str, entity_domain(["input_datetime", "sensor"])) + time, vol.All(str, entity_domain(["input_datetime", "time", "sensor"])) ), vol.Optional("weekday"): weekdays, } @@ -1714,6 +1770,32 @@ CONDITION_ACTION_SCHEMA: vol.Schema = vol.Schema( ) ) + +def _trigger_pre_validator(value: Any | None) -> Any: + """Rewrite trigger `trigger` to `platform`. + + `platform` has been renamed to `trigger` in user documentation and in the automation + editor. The Python trigger implementation still uses `platform`, so we need to + rename `trigger` to `platform. + """ + + if not isinstance(value, Mapping): + # If the value is not a mapping, we let that be handled by the TRIGGER_SCHEMA + return value + + if CONF_TRIGGER in value: + if CONF_PLATFORM in value: + raise vol.Invalid( + "Cannot specify both 'platform' and 'trigger'. Please use 'trigger' only." + ) + value = dict(value) + value[CONF_PLATFORM] = value.pop(CONF_TRIGGER) + elif CONF_PLATFORM not in value: + raise vol.Invalid("required key not provided", [CONF_TRIGGER]) + + return value + + TRIGGER_BASE_SCHEMA = vol.Schema( { vol.Optional(CONF_ALIAS): str, @@ -1728,6 +1810,19 @@ TRIGGER_BASE_SCHEMA = vol.Schema( _base_trigger_validator_schema = TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) +def _base_trigger_list_flatten(triggers: list[Any]) -> list[Any]: + """Flatten trigger arrays containing 'triggers:' sublists into a single list of triggers.""" + flatlist = [] + for t in triggers: + if CONF_TRIGGERS in t and len(t) == 1: + triggerlist = ensure_list(t[CONF_TRIGGERS]) + flatlist.extend(triggerlist) + else: + flatlist.append(t) + + return flatlist + + # This is first round of validation, we don't want to process the config here already, # just ensure basics as platform and ID are there. def _base_trigger_validator(value: Any) -> Any: @@ -1735,7 +1830,11 @@ def _base_trigger_validator(value: Any) -> Any: return value -TRIGGER_SCHEMA = vol.All(ensure_list, [_base_trigger_validator]) +TRIGGER_SCHEMA = vol.All( + ensure_list, + _base_trigger_list_flatten, + [vol.All(_trigger_pre_validator, _base_trigger_validator)], +) _SCRIPT_DELAY_SCHEMA = vol.Schema( { diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index b2cad292e3d..e98061d50b7 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ from . import config_validation as cv _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any], + bound=data_entry_flow.FlowManager[Any, Any, Any], default=data_entry_flow.FlowManager, ) @@ -71,7 +71,7 @@ class FlowManagerIndexView(_BaseFlowManagerView[_FlowManagerT]): async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Initialize a POST request. - Override `_post_impl` in subclasses which need + Override `post` and call `_post_impl` in subclasses which need to implement their own `RequestDataValidator` """ return await self._post_impl(request, data) diff --git a/homeassistant/helpers/deprecation.py b/homeassistant/helpers/deprecation.py index 65e8f4ef97e..81f7821ec79 100644 --- a/homeassistant/helpers/deprecation.py +++ b/homeassistant/helpers/deprecation.py @@ -3,7 +3,8 @@ from __future__ import annotations from collections.abc import Callable -from enum import Enum +from contextlib import suppress +from enum import Enum, EnumType, _EnumDict import functools import inspect import logging @@ -164,6 +165,30 @@ def _print_deprecation_warning_internal( breaks_in_ha_version: str | None, *, log_when_no_integration_is_found: bool, +) -> None: + # Suppress ImportError due to use of deprecated enum in core.py + # Can be removed in HA Core 2025.1 + with suppress(ImportError): + _print_deprecation_warning_internal_impl( + obj_name, + module_name, + replacement, + description, + verb, + breaks_in_ha_version, + log_when_no_integration_is_found=log_when_no_integration_is_found, + ) + + +def _print_deprecation_warning_internal_impl( + obj_name: str, + module_name: str, + replacement: str, + description: str, + verb: str, + breaks_in_ha_version: str | None, + *, + log_when_no_integration_is_found: bool, ) -> None: # pylint: disable=import-outside-toplevel from homeassistant.core import async_get_hass_or_none @@ -338,3 +363,35 @@ def all_with_deprecated_constants(module_globals: dict[str, Any]) -> list[str]: for name in module_globals_keys if name.startswith(_PREFIX_DEPRECATED) ] + + +class EnumWithDeprecatedMembers(EnumType): + """Enum with deprecated members.""" + + def __new__( + mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + cls: str, + bases: tuple[type, ...], + classdict: _EnumDict, + *, + deprecated: dict[str, tuple[str, str]], + **kwds: Any, + ) -> Any: + """Create a new class.""" + classdict["__deprecated__"] = deprecated + return super().__new__(mcs, cls, bases, classdict, **kwds) + + def __getattribute__(cls, name: str) -> Any: + """Warn if accessing a deprecated member.""" + deprecated = super().__getattribute__("__deprecated__") + if name in deprecated: + _print_deprecation_warning_internal( + f"{cls.__name__}.{name}", + cls.__module__, + f"{deprecated[name][0]}", + "enum member", + "used", + deprecated[name][1], + log_when_no_integration_is_found=False, + ) + return super().__getattribute__(name) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 30001a64474..981430f192d 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -6,7 +6,7 @@ from collections import defaultdict from collections.abc import Mapping from datetime import datetime from enum import StrEnum -from functools import cached_property, lru_cache, partial +from functools import lru_cache import logging import time from typing import TYPE_CHECKING, Any, Literal, TypedDict @@ -32,22 +32,21 @@ import homeassistant.util.uuid as uuid_util from . import storage, translation from .debounce import Debouncer -from .deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) -from .frame import report +from .frame import ReportBehavior, report_usage from .json import JSON_DUMP, find_paths_unserializable_data, json_bytes, json_fragment from .registry import BaseRegistry, BaseRegistryItems, RegistryIndexType from .singleton import singleton from .typing import UNDEFINED, UndefinedType if TYPE_CHECKING: + # mypy cannot workout _cache Protocol with attrs + from propcache import cached_property as under_cached_property + from homeassistant.config_entries import ConfigEntry from . import entity_registry +else: + from propcache import under_cached_property _LOGGER = logging.getLogger(__name__) @@ -81,16 +80,6 @@ class DeviceEntryDisabler(StrEnum): USER = "user" -# DISABLED_* are deprecated, to be removed in 2022.3 -_DEPRECATED_DISABLED_CONFIG_ENTRY = DeprecatedConstantEnum( - DeviceEntryDisabler.CONFIG_ENTRY, "2025.1" -) -_DEPRECATED_DISABLED_INTEGRATION = DeprecatedConstantEnum( - DeviceEntryDisabler.INTEGRATION, "2025.1" -) -_DEPRECATED_DISABLED_USER = DeprecatedConstantEnum(DeviceEntryDisabler.USER, "2025.1") - - class DeviceInfo(TypedDict, total=False): """Entity device information for device registry.""" @@ -277,7 +266,7 @@ def _validate_configuration_url(value: Any) -> str | None: return url_as_str -@attr.s(frozen=True) +@attr.s(frozen=True, slots=True) class DeviceEntry: """Device Registry Entry.""" @@ -305,6 +294,7 @@ class DeviceEntry: via_device_id: str | None = attr.ib(default=None) # This value is not stored, just used to keep track of events to fire. is_new: bool = attr.ib(default=False) + _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) @property def disabled(self) -> bool: @@ -341,7 +331,7 @@ class DeviceEntry: "via_device_id": self.via_device_id, } - @cached_property + @under_cached_property def json_repr(self) -> bytes | None: """Return a cached JSON representation of the entry.""" try: @@ -357,7 +347,7 @@ class DeviceEntry: ) return None - @cached_property + @under_cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( @@ -367,7 +357,7 @@ class DeviceEntry: "config_entries": list(self.config_entries), "configuration_url": self.configuration_url, "connections": list(self.connections), - "created_at": self.created_at.isoformat(), + "created_at": self.created_at, "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -377,7 +367,7 @@ class DeviceEntry: "manufacturer": self.manufacturer, "model": self.model, "model_id": self.model_id, - "modified_at": self.modified_at.isoformat(), + "modified_at": self.modified_at, "name_by_user": self.name_by_user, "name": self.name, "primary_config_entry": self.primary_config_entry, @@ -389,7 +379,7 @@ class DeviceEntry: ) -@attr.s(frozen=True) +@attr.s(frozen=True, slots=True) class DeletedDeviceEntry: """Deleted Device Registry Entry.""" @@ -400,6 +390,7 @@ class DeletedDeviceEntry: orphaned_timestamp: float | None = attr.ib() created_at: datetime = attr.ib(factory=utcnow) modified_at: datetime = attr.ib(factory=utcnow) + _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) def to_device_entry( self, @@ -418,7 +409,7 @@ class DeletedDeviceEntry: is_new=True, ) - @cached_property + @under_cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( @@ -426,11 +417,11 @@ class DeletedDeviceEntry: { "config_entries": list(self.config_entries), "connections": list(self.connections), - "created_at": self.created_at.isoformat(), + "created_at": self.created_at, "identifiers": list(self.identifiers), "id": self.id, "orphaned_timestamp": self.orphaned_timestamp, - "modified_at": self.modified_at.isoformat(), + "modified_at": self.modified_at, } ) ) @@ -815,27 +806,23 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): name = default_name if via_device is not None and via_device is not UNDEFINED: - via = self.async_get_device(identifiers={via_device}) + if (via := self.async_get_device(identifiers={via_device})) is None: + report_usage( + "calls `device_registry.async_get_or_create` referencing a " + f"non existing `via_device` {via_device}, " + f"with device info: {device_info}", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.12.0", + ) + via_device_id: str | UndefinedType = via.id if via else UNDEFINED else: via_device_id = UNDEFINED - if isinstance(entry_type, str) and not isinstance(entry_type, DeviceEntryType): - report( # type: ignore[unreachable] - ( - "uses str for device registry entry_type. This is deprecated and" - " will stop working in Home Assistant 2022.3, it should be updated" - " to use DeviceEntryType instead" - ), - error_if_core=False, - ) - entry_type = DeviceEntryType(entry_type) - device = self.async_update_device( device.id, allow_collisions=True, add_config_entry_id=config_entry_id, - add_config_entry=config_entry, configuration_url=configuration_url, device_info_type=device_info_type, disabled_by=disabled_by, @@ -863,7 +850,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): self, device_id: str, *, - add_config_entry: ConfigEntry | UndefinedType = UNDEFINED, add_config_entry_id: str | UndefinedType = UNDEFINED, # Temporary flag so we don't blow up when collisions are implicitly introduced # by calls to async_get_or_create. Must not be set by integrations. @@ -898,13 +884,11 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entries = old.config_entries - if add_config_entry_id is not UNDEFINED and add_config_entry is UNDEFINED: - config_entry = self.hass.config_entries.async_get_entry(add_config_entry_id) - if config_entry is None: + if add_config_entry_id is not UNDEFINED: + if self.hass.config_entries.async_get_entry(add_config_entry_id) is None: raise HomeAssistantError( f"Can't link device to unknown config entry {add_config_entry_id}" ) - add_config_entry = config_entry if not new_connections and not new_identifiers: raise HomeAssistantError( @@ -921,19 +905,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): "Cannot define both merge_identifiers and new_identifiers" ) - if isinstance(disabled_by, str) and not isinstance( - disabled_by, DeviceEntryDisabler - ): - report( # type: ignore[unreachable] - ( - "uses str for device registry disabled_by. This is deprecated and" - " will stop working in Home Assistant 2022.3, it should be updated" - " to use DeviceEntryDisabler instead" - ), - error_if_core=False, - ) - disabled_by = DeviceEntryDisabler(disabled_by) - if ( suggested_area is not None and suggested_area is not UNDEFINED @@ -948,11 +919,11 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): area = ar.async_get(self.hass).async_get_or_create(suggested_area) area_id = area.id - if add_config_entry is not UNDEFINED: + if add_config_entry_id is not UNDEFINED: primary_entry_id = old.primary_config_entry if ( device_info_type == "primary" - and add_config_entry.entry_id != primary_entry_id + and add_config_entry_id != primary_entry_id ): if ( primary_entry_id is None @@ -963,11 +934,11 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ) or primary_entry.domain in LOW_PRIO_CONFIG_ENTRY_DOMAINS ): - new_values["primary_config_entry"] = add_config_entry.entry_id - old_values["primary_config_entry"] = old.primary_config_entry + new_values["primary_config_entry"] = add_config_entry_id + old_values["primary_config_entry"] = primary_entry_id - if add_config_entry.entry_id not in old.config_entries: - config_entries = old.config_entries | {add_config_entry.entry_id} + if add_config_entry_id not in old.config_entries: + config_entries = old.config_entries | {add_config_entry_id} if ( remove_config_entry_id is not UNDEFINED @@ -1493,11 +1464,3 @@ def _normalize_connections(connections: set[tuple[str, str]]) -> set[tuple[str, (key, format_mac(value)) if key == CONNECTION_NETWORK_MAC else (key, value) for key, value in connections } - - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/helpers/discovery.py b/homeassistant/helpers/discovery.py index 9f656dad56c..7c1b5ac4a64 100644 --- a/homeassistant/helpers/discovery.py +++ b/homeassistant/helpers/discovery.py @@ -14,8 +14,8 @@ from typing import Any, TypedDict from homeassistant import core, setup from homeassistant.const import Platform from homeassistant.loader import bind_hass +from homeassistant.util.signal_type import SignalTypeFormat -from ..util.signal_type import SignalTypeFormat from .dispatcher import async_dispatcher_connect, async_dispatcher_send_internal from .typing import ConfigType, DiscoveryInfoType diff --git a/homeassistant/helpers/discovery_flow.py b/homeassistant/helpers/discovery_flow.py index 9ec0b01dc56..fd41c7ffb44 100644 --- a/homeassistant/helpers/discovery_flow.py +++ b/homeassistant/helpers/discovery_flow.py @@ -3,25 +3,49 @@ from __future__ import annotations from collections.abc import Coroutine -from typing import Any, NamedTuple +import dataclasses +from typing import TYPE_CHECKING, Any, NamedTuple, Self -from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import EVENT_HOMEASSISTANT_STARTED from homeassistant.core import CoreState, Event, HomeAssistant, callback from homeassistant.loader import bind_hass from homeassistant.util.async_ import gather_with_limited_concurrency from homeassistant.util.hass_dict import HassKey +if TYPE_CHECKING: + from homeassistant.config_entries import ConfigFlowContext, ConfigFlowResult + FLOW_INIT_LIMIT = 20 DISCOVERY_FLOW_DISPATCHER: HassKey[FlowDispatcher] = HassKey( "discovery_flow_dispatcher" ) +@dataclasses.dataclass(kw_only=True, slots=True) +class DiscoveryKey: + """Serializable discovery key.""" + + domain: str + key: str | tuple[str, ...] + version: int + + @classmethod + def from_json_dict(cls, json_dict: dict[str, Any]) -> Self: + """Construct from JSON dict.""" + if type(key := json_dict["key"]) is list: + key = tuple(key) + return cls(domain=json_dict["domain"], key=key, version=json_dict["version"]) + + @bind_hass @callback def async_create_flow( - hass: HomeAssistant, domain: str, context: dict[str, Any], data: Any + hass: HomeAssistant, + domain: str, + context: ConfigFlowContext, + data: Any, + *, + discovery_key: DiscoveryKey | None = None, ) -> None: """Create a discovery flow.""" dispatcher: FlowDispatcher | None = None @@ -31,6 +55,9 @@ def async_create_flow( dispatcher = hass.data[DISCOVERY_FLOW_DISPATCHER] = FlowDispatcher(hass) dispatcher.async_setup() + if discovery_key: + context = context | {"discovery_key": discovery_key} + if not dispatcher or dispatcher.started: if init_coro := _async_init_flow(hass, domain, context, data): hass.async_create_background_task( @@ -43,7 +70,7 @@ def async_create_flow( @callback def _async_init_flow( - hass: HomeAssistant, domain: str, context: dict[str, Any], data: Any + hass: HomeAssistant, domain: str, context: ConfigFlowContext, data: Any ) -> Coroutine[None, None, ConfigFlowResult] | None: """Create a discovery flow.""" # Avoid spawning flows that have the same initial discovery data @@ -51,7 +78,9 @@ def _async_init_flow( # which can overload devices since zeroconf/ssdp updates can happen # multiple times in the same minute if ( - hass.config_entries.flow.async_has_matching_flow(domain, context, data) + hass.config_entries.flow.async_has_matching_discovery_flow( + domain, context, data + ) or hass.is_stopping ): return None @@ -69,7 +98,7 @@ class PendingFlowKey(NamedTuple): class PendingFlowValue(NamedTuple): """Value for pending flows.""" - context: dict[str, Any] + context: ConfigFlowContext data: Any @@ -108,7 +137,7 @@ class FlowDispatcher: await gather_with_limited_concurrency(FLOW_INIT_LIMIT, *init_coros) @callback - def async_create(self, domain: str, context: dict[str, Any], data: Any) -> None: + def async_create(self, domain: str, context: ConfigFlowContext, data: Any) -> None: """Create and add or queue a flow.""" key = PendingFlowKey(domain, context["source"]) values = PendingFlowValue(context, data) diff --git a/homeassistant/helpers/dispatcher.py b/homeassistant/helpers/dispatcher.py index 173e441781c..a5a790b7ce5 100644 --- a/homeassistant/helpers/dispatcher.py +++ b/homeassistant/helpers/dispatcher.py @@ -151,11 +151,11 @@ def _format_err[*_Ts]( *args: Any, ) -> str: """Format error message.""" - return "Exception in {} when dispatching '{}': {}".format( + + return ( # Functions wrapped in partial do not have a __name__ - getattr(target, "__name__", None) or str(target), - signal, - args, + f"Exception in {getattr(target, "__name__", None) or target} " + f"when dispatching '{signal}': {args}" ) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index dbc1a036ef6..91845cdf521 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -7,9 +7,8 @@ import asyncio from collections import deque from collections.abc import Callable, Coroutine, Iterable, Mapping import dataclasses -from enum import Enum, IntFlag, auto +from enum import Enum, auto import functools as ft -from functools import cached_property import logging import math from operator import attrgetter @@ -19,9 +18,9 @@ import time from types import FunctionType from typing import TYPE_CHECKING, Any, Final, Literal, NotRequired, TypedDict, final +from propcache import cached_property import voluptuous as vol -from homeassistant.config import DATA_CUSTOMIZE from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_ATTRIBUTION, @@ -49,6 +48,7 @@ from homeassistant.core import ( get_hassjob_callable_job_type, get_release_channel, ) +from homeassistant.core_config import DATA_CUSTOMIZE from homeassistant.exceptions import ( HomeAssistantError, InvalidStateError, @@ -337,7 +337,9 @@ class CachedProperties(type): Also invalidates the corresponding cached_property by calling delattr on it. """ - if getattr(o, private_attr_name, _SENTINEL) == val: + if ( + old_val := getattr(o, private_attr_name, _SENTINEL) + ) == val and type(old_val) is type(val): return setattr(o, private_attr_name, val) # Invalidate the cache of the cached property @@ -645,6 +647,22 @@ class Entity( f".{self.translation_key}.name" ) + @cached_property + def _unit_of_measurement_translation_key(self) -> str | None: + """Return translation key for unit of measurement.""" + if self.translation_key is None: + return None + if self.platform is None: + raise ValueError( + f"Entity {type(self)} cannot have a translation key for " + "unit of measurement before being added to the entity platform" + ) + platform = self.platform + return ( + f"component.{platform.platform_name}.entity.{platform.domain}" + f".{self.translation_key}.unit_of_measurement" + ) + def _substitute_name_placeholders(self, name: str) -> str: """Substitute placeholders in entity name.""" try: @@ -1621,31 +1639,6 @@ class Entity( self.hass, integration_domain=platform_name, module=type(self).__module__ ) - @callback - def _report_deprecated_supported_features_values( - self, replacement: IntFlag - ) -> None: - """Report deprecated supported features values.""" - if self._deprecated_supported_features_reported is True: - return - self._deprecated_supported_features_reported = True - report_issue = self._suggest_report_issue() - report_issue += ( - " and reference " - "https://developers.home-assistant.io/blog/2023/12/28/support-feature-magic-numbers-deprecation" - ) - _LOGGER.warning( - ( - "Entity %s (%s) is using deprecated supported features" - " values which will be removed in HA Core 2025.1. Instead it should use" - " %s, please %s" - ), - self.entity_id, - type(self), - repr(replacement), - report_issue, - ) - class ToggleEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes toggle entities.""" diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 76abb3020d1..1be7289401c 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -65,10 +65,13 @@ async def async_update_entity(hass: HomeAssistant, entity_id: str) -> None: class EntityComponent(Generic[_EntityT]): - """The EntityComponent manages platforms that manages entities. + """The EntityComponent manages platforms that manage entities. + + An example of an entity component is 'light', which manages platforms such + as 'hue.light'. This class has the following responsibilities: - - Process the configuration and set up a platform based component. + - Process the configuration and set up a platform based component, for example light. - Manage the platforms and their entities. - Help extract the entities from a service call. - Listen for discovery events for platforms related to the domain. diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index ce107d63b73..0d7614c569c 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -111,7 +111,11 @@ class EntityPlatformModule(Protocol): class EntityPlatform: - """Manage the entities for a single platform.""" + """Manage the entities for a single platform. + + An example of an entity platform is 'hue.light', which is managed by + the entity component 'light'. + """ def __init__( self, @@ -141,6 +145,7 @@ class EntityPlatform: self.platform_translations: dict[str, str] = {} self.object_id_component_translations: dict[str, str] = {} self.object_id_platform_translations: dict[str, str] = {} + self.default_language_platform_translations: dict[str, str] = {} self._tasks: list[asyncio.Task[None]] = [] # Stop tracking tasks after setup is completed self._setup_complete = False @@ -476,6 +481,14 @@ class EntityPlatform: self.object_id_platform_translations = await self._async_get_translations( object_id_language, "entity", self.platform_name ) + if config_language == languages.DEFAULT_LANGUAGE: + self.default_language_platform_translations = self.platform_translations + else: + self.default_language_platform_translations = ( + await self._async_get_translations( + languages.DEFAULT_LANGUAGE, "entity", self.platform_name + ) + ) def _schedule_add_entities( self, new_entities: Iterable[Entity], update_before_add: bool = False @@ -584,7 +597,7 @@ class EntityPlatform: """Add entities for a single platform without updating. In this case we are not updating the entities before adding them - which means its unlikely that we will not have to yield control + which means it is likely that we will not have to yield control to the event loop so we can await the coros directly without scheduling them as tasks. """ @@ -728,7 +741,6 @@ class EntityPlatform: return suggested_object_id: str | None = None - generate_new_entity_id = False entity_name = entity.name if entity_name is UNDEFINED: @@ -838,33 +850,39 @@ class EntityPlatform: entity.device_entry = device entity.entity_id = entry.entity_id - # We won't generate an entity ID if the platform has already set one - # We will however make sure that platform cannot pick a registered ID - elif entity.entity_id is not None and entity_registry.async_is_registered( - entity.entity_id - ): - # If entity already registered, convert entity id to suggestion - suggested_object_id = split_entity_id(entity.entity_id)[1] - generate_new_entity_id = True + else: # entity.unique_id is None + generate_new_entity_id = False + # We won't generate an entity ID if the platform has already set one + # We will however make sure that platform cannot pick a registered ID + if entity.entity_id is not None and entity_registry.async_is_registered( + entity.entity_id + ): + # If entity already registered, convert entity id to suggestion + suggested_object_id = split_entity_id(entity.entity_id)[1] + generate_new_entity_id = True - # Generate entity ID - if entity.entity_id is None or generate_new_entity_id: - suggested_object_id = ( - suggested_object_id or entity.suggested_object_id or DEVICE_DEFAULT_NAME - ) + # Generate entity ID + if entity.entity_id is None or generate_new_entity_id: + suggested_object_id = ( + suggested_object_id + or entity.suggested_object_id + or DEVICE_DEFAULT_NAME + ) - if self.entity_namespace is not None: - suggested_object_id = f"{self.entity_namespace} {suggested_object_id}" - entity.entity_id = entity_registry.async_generate_entity_id( - self.domain, suggested_object_id, self.entities - ) + if self.entity_namespace is not None: + suggested_object_id = ( + f"{self.entity_namespace} {suggested_object_id}" + ) + entity.entity_id = entity_registry.async_generate_entity_id( + self.domain, suggested_object_id, self.entities + ) - # Make sure it is valid in case an entity set the value themselves - # Avoid calling valid_entity_id if we already know it is valid - # since it already made it in the registry - if not entity.registry_entry and not valid_entity_id(entity.entity_id): - entity.add_to_platform_abort() - raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}") + # Make sure it is valid in case an entity set the value themselves + # Avoid calling valid_entity_id if we already know it is valid + # since it already made it in the registry + if not valid_entity_id(entity.entity_id): + entity.add_to_platform_abort() + raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}") already_exists, restored = self._entity_id_already_exists(entity.entity_id) diff --git a/homeassistant/helpers/entity_registry.py b/homeassistant/helpers/entity_registry.py index 5d17c0c46b1..9d50b7ae83b 100644 --- a/homeassistant/helpers/entity_registry.py +++ b/homeassistant/helpers/entity_registry.py @@ -14,7 +14,6 @@ from collections import defaultdict from collections.abc import Callable, Container, Hashable, KeysView, Mapping from datetime import datetime, timedelta from enum import StrEnum -from functools import cached_property import logging import time from typing import TYPE_CHECKING, Any, Literal, NotRequired, TypedDict @@ -65,7 +64,12 @@ from .singleton import singleton from .typing import UNDEFINED, UndefinedType if TYPE_CHECKING: + # mypy cannot workout _cache Protocol with attrs + from propcache import cached_property as under_cached_property + from homeassistant.config_entries import ConfigEntry +else: + from propcache import under_cached_property DATA_REGISTRY: HassKey[EntityRegistry] = HassKey("entity_registry") EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = EventType( @@ -162,7 +166,7 @@ def _protect_entity_options( return ReadOnlyDict({key: ReadOnlyDict(val) for key, val in data.items()}) -@attr.s(frozen=True) +@attr.s(frozen=True, slots=True) class RegistryEntry: """Entity Registry Entry.""" @@ -201,6 +205,7 @@ class RegistryEntry: supported_features: int = attr.ib(default=0) translation_key: str | None = attr.ib(default=None) unit_of_measurement: str | None = attr.ib(default=None) + _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) @domain.default def _domain_default(self) -> str: @@ -235,8 +240,11 @@ class RegistryEntry: display_dict["ec"] = ENTITY_CATEGORY_VALUE_TO_INDEX[category] if self.hidden_by is not None: display_dict["hb"] = True - if not self.name and self.has_entity_name: - display_dict["en"] = self.original_name + if self.has_entity_name: + display_dict["hn"] = True + name = self.name or self.original_name + if name is not None: + display_dict["en"] = name if self.domain == "sensor" and (sensor_options := self.options.get("sensor")): if (precision := sensor_options.get("display_precision")) is not None or ( precision := sensor_options.get("suggested_display_precision") @@ -244,7 +252,7 @@ class RegistryEntry: display_dict["dp"] = precision return display_dict - @cached_property + @under_cached_property def display_json_repr(self) -> bytes | None: """Return a cached partial JSON representation of the entry. @@ -264,7 +272,7 @@ class RegistryEntry: return None return json_repr - @cached_property + @under_cached_property def as_partial_dict(self) -> dict[str, Any]: """Return a partial dict representation of the entry.""" # Convert sets and tuples to lists @@ -293,7 +301,7 @@ class RegistryEntry: "unique_id": self.unique_id, } - @cached_property + @under_cached_property def extended_dict(self) -> dict[str, Any]: """Return a extended dict representation of the entry.""" # Convert sets and tuples to lists @@ -308,7 +316,7 @@ class RegistryEntry: "original_icon": self.original_icon, } - @cached_property + @under_cached_property def partial_json_repr(self) -> bytes | None: """Return a cached partial JSON representation of the entry.""" try: @@ -324,7 +332,7 @@ class RegistryEntry: ) return None - @cached_property + @under_cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( @@ -335,7 +343,7 @@ class RegistryEntry: "categories": self.categories, "capabilities": self.capabilities, "config_entry_id": self.config_entry_id, - "created_at": self.created_at.isoformat(), + "created_at": self.created_at, "device_class": self.device_class, "device_id": self.device_id, "disabled_by": self.disabled_by, @@ -346,7 +354,7 @@ class RegistryEntry: "id": self.id, "has_entity_name": self.has_entity_name, "labels": list(self.labels), - "modified_at": self.modified_at.isoformat(), + "modified_at": self.modified_at, "name": self.name, "options": self.options, "original_device_class": self.original_device_class, @@ -391,7 +399,7 @@ class RegistryEntry: hass.states.async_set(self.entity_id, STATE_UNAVAILABLE, attrs) -@attr.s(frozen=True) +@attr.s(frozen=True, slots=True) class DeletedRegistryEntry: """Deleted Entity Registry Entry.""" @@ -404,23 +412,24 @@ class DeletedRegistryEntry: orphaned_timestamp: float | None = attr.ib() created_at: datetime = attr.ib(factory=utcnow) modified_at: datetime = attr.ib(factory=utcnow) + _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) @domain.default def _domain_default(self) -> str: """Compute domain value.""" return split_entity_id(self.entity_id)[0] - @cached_property + @under_cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( json_bytes( { "config_entry_id": self.config_entry_id, - "created_at": self.created_at.isoformat(), + "created_at": self.created_at, "entity_id": self.entity_id, "id": self.id, - "modified_at": self.modified_at.isoformat(), + "modified_at": self.modified_at, "orphaned_timestamp": self.orphaned_timestamp, "platform": self.platform, "unique_id": self.unique_id, diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 38f461d8d7a..72a4ef3c050 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -90,7 +90,6 @@ RANDOM_MICROSECOND_MIN = 50000 RANDOM_MICROSECOND_MAX = 500000 _TypedDictT = TypeVar("_TypedDictT", bound=Mapping[str, Any]) -_StateEventDataT = TypeVar("_StateEventDataT", bound=EventStateEventData) @dataclass(slots=True, frozen=True) @@ -224,10 +223,10 @@ def async_track_state_change( Must be run within the event loop. """ - frame.report( + frame.report_usage( "calls `async_track_state_change` instead of `async_track_state_change_event`" " which is deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) if from_state is not None: @@ -322,6 +321,10 @@ def async_track_state_change_event( for each one, we keep a dict of entity ids that care about the state change events so we can do a fast dict lookup to route events. + The passed in entity_ids will be automatically lower cased. + + EVENT_STATE_CHANGED is fired on each occasion the state is updated + and changed, opposite of EVENT_STATE_REPORTED. """ if not (entity_ids := _async_string_to_lower_list(entity_ids)): return _remove_empty_listener @@ -329,7 +332,7 @@ def async_track_state_change_event( @callback -def _async_dispatch_entity_id_event_soon( +def _async_dispatch_entity_id_event_soon[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event: Event[_StateEventDataT], @@ -339,7 +342,7 @@ def _async_dispatch_entity_id_event_soon( @callback -def _async_dispatch_entity_id_event( +def _async_dispatch_entity_id_event[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event: Event[_StateEventDataT], @@ -359,7 +362,7 @@ def _async_dispatch_entity_id_event( @callback -def _async_state_filter( +def _async_state_filter[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event_data: _StateEventDataT, @@ -383,7 +386,10 @@ def _async_track_state_change_event( action: Callable[[Event[EventStateChangedData]], Any], job_type: HassJobType | None, ) -> CALLBACK_TYPE: - """async_track_state_change_event without lowercasing.""" + """Faster version of async_track_state_change_event. + + The passed in entity_ids will not be automatically lower cased. + """ return _async_track_event( _KEYED_TRACK_STATE_CHANGE, hass, entity_ids, action, job_type ) @@ -403,7 +409,11 @@ def async_track_state_report_event( action: Callable[[Event[EventStateReportedData]], Any], job_type: HassJobType | None = None, ) -> CALLBACK_TYPE: - """Track EVENT_STATE_REPORTED by entity_id without lowercasing.""" + """Track EVENT_STATE_REPORTED by entity_ids. + + EVENT_STATE_REPORTED is fired on each occasion the state is updated + but not changed, opposite of EVENT_STATE_CHANGED. + """ return _async_track_event( _KEYED_TRACK_STATE_REPORT, hass, entity_ids, action, job_type ) @@ -981,6 +991,17 @@ class TrackTemplateResultInfo: self._last_result: dict[Template, bool | str | TemplateError] = {} + for track_template_ in track_templates: + if track_template_.template.hass: + continue + + frame.report_usage( + "calls async_track_template_result with template without hass", + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.10", + ) + track_template_.template.hass = hass + self._rate_limit = KeyedRateLimit(hass) self._info: dict[Template, RenderInfo] = {} self._track_state_changes: _TrackStateChangeFiltered | None = None diff --git a/homeassistant/helpers/floor_registry.py b/homeassistant/helpers/floor_registry.py index f14edef293a..fcfca8e3212 100644 --- a/homeassistant/helpers/floor_registry.py +++ b/homeassistant/helpers/floor_registry.py @@ -9,7 +9,6 @@ from datetime import datetime from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.util import slugify from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -17,7 +16,6 @@ from homeassistant.util.hass_dict import HassKey from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, - normalize_name, ) from .registry import BaseRegistry from .singleton import singleton @@ -130,15 +128,9 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Get all floors.""" return self.floors.values() - @callback def _generate_id(self, name: str) -> str: """Generate floor ID.""" - suggestion = suggestion_base = slugify(name) - tries = 1 - while suggestion in self.floors: - tries += 1 - suggestion = f"{suggestion_base}_{tries}" - return suggestion + return self.floors.generate_id_from_name(name) @callback def async_create( @@ -151,30 +143,26 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): ) -> FloorEntry: """Create a new floor.""" self.hass.verify_event_loop_thread("floor_registry.async_create") + if floor := self.async_get_floor_by_name(name): raise ValueError( f"The name {name} ({floor.normalized_name}) is already in use" ) - normalized_name = normalize_name(name) - floor = FloorEntry( aliases=aliases or set(), icon=icon, floor_id=self._generate_id(name), name=name, - normalized_name=normalized_name, level=level, ) floor_id = floor.floor_id self.floors[floor_id] = floor self.async_schedule_save() + self.hass.bus.async_fire_internal( EVENT_FLOOR_REGISTRY_UPDATED, - EventFloorRegistryUpdatedData( - action="create", - floor_id=floor_id, - ), + EventFloorRegistryUpdatedData(action="create", floor_id=floor_id), ) return floor @@ -215,7 +203,6 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): } if name is not UNDEFINED and name != old.name: changes["name"] = name - changes["normalized_name"] = normalize_name(name) if not changes: return old @@ -243,14 +230,12 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): if data is not None: for floor in data["floors"]: - normalized_name = normalize_name(floor["name"]) floors[floor["floor_id"]] = FloorEntry( aliases=set(floor["aliases"]), icon=floor["icon"], floor_id=floor["floor_id"], name=floor["name"], level=floor["level"], - normalized_name=normalized_name, created_at=datetime.fromisoformat(floor["created_at"]), modified_at=datetime.fromisoformat(floor["modified_at"]), ) diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index 8a30c26886e..6d03ae4ffd2 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -5,17 +5,23 @@ from __future__ import annotations import asyncio from collections.abc import Callable from dataclasses import dataclass +import enum import functools -from functools import cached_property import linecache import logging import sys from types import FrameType from typing import Any, cast -from homeassistant.core import async_get_hass_or_none +from propcache import cached_property + +from homeassistant.core import HomeAssistant, async_get_hass_or_none from homeassistant.exceptions import HomeAssistantError -from homeassistant.loader import async_suggest_report_issue +from homeassistant.loader import ( + Integration, + async_get_issue_integration, + async_suggest_report_issue, +) _LOGGER = logging.getLogger(__name__) @@ -129,43 +135,177 @@ class MissingIntegrationFrame(HomeAssistantError): def report( what: str, - exclude_integrations: set | None = None, + *, + exclude_integrations: set[str] | None = None, error_if_core: bool = True, + error_if_integration: bool = False, level: int = logging.WARNING, log_custom_component_only: bool = False, - error_if_integration: bool = False, ) -> None: """Report incorrect usage. - Async friendly. + If error_if_core is True, raise instead of log if an integration is not found + when unwinding the stack frame. + If error_if_integration is True, raise instead of log if an integration is found + when unwinding the stack frame. + """ + core_behavior = ReportBehavior.ERROR if error_if_core else ReportBehavior.LOG + core_integration_behavior = ( + ReportBehavior.ERROR if error_if_integration else ReportBehavior.LOG + ) + custom_integration_behavior = core_integration_behavior + + if log_custom_component_only: + if core_behavior is ReportBehavior.LOG: + core_behavior = ReportBehavior.IGNORE + if core_integration_behavior is ReportBehavior.LOG: + core_integration_behavior = ReportBehavior.IGNORE + + report_usage( + what, + core_behavior=core_behavior, + core_integration_behavior=core_integration_behavior, + custom_integration_behavior=custom_integration_behavior, + exclude_integrations=exclude_integrations, + level=level, + ) + + +class ReportBehavior(enum.Enum): + """Enum for behavior on code usage.""" + + IGNORE = enum.auto() + """Ignore the code usage.""" + LOG = enum.auto() + """Log the code usage.""" + ERROR = enum.auto() + """Raise an error on code usage.""" + + +def report_usage( + what: str, + *, + breaks_in_ha_version: str | None = None, + core_behavior: ReportBehavior = ReportBehavior.ERROR, + core_integration_behavior: ReportBehavior = ReportBehavior.LOG, + custom_integration_behavior: ReportBehavior = ReportBehavior.LOG, + exclude_integrations: set[str] | None = None, + integration_domain: str | None = None, + level: int = logging.WARNING, +) -> None: + """Report incorrect code usage. + + :param what: will be wrapped with "Detected that integration 'integration' {what}. + Please create a bug report at https://..." + :param breaks_in_ha_version: if set, the report will be adjusted to specify the + breaking version + :param exclude_integrations: skip specified integration when reviewing the stack. + If no integration is found, the core behavior will be applied + :param integration_domain: fallback for identifying the integration if the + frame is not found """ try: integration_frame = get_integration_frame( exclude_integrations=exclude_integrations ) except MissingIntegrationFrame as err: - msg = f"Detected code that {what}. Please report this issue." - if error_if_core: + if integration := async_get_issue_integration( + hass := async_get_hass_or_none(), integration_domain + ): + _report_integration_domain( + hass, + what, + breaks_in_ha_version, + integration, + core_integration_behavior, + custom_integration_behavior, + level, + ) + return + msg = f"Detected code that {what}. Please report this issue" + if core_behavior is ReportBehavior.ERROR: raise RuntimeError(msg) from err - if not log_custom_component_only: + if core_behavior is ReportBehavior.LOG: + if breaks_in_ha_version: + msg = ( + f"Detected code that {what}. This will stop working in Home " + f"Assistant {breaks_in_ha_version}, please report this issue" + ) _LOGGER.warning(msg, stack_info=True) return - if ( - error_if_integration - or not log_custom_component_only - or integration_frame.custom_integration - ): - _report_integration(what, integration_frame, level, error_if_integration) + integration_behavior = core_integration_behavior + if integration_frame.custom_integration: + integration_behavior = custom_integration_behavior + + if integration_behavior is not ReportBehavior.IGNORE: + _report_integration_frame( + what, + breaks_in_ha_version, + integration_frame, + level, + integration_behavior is ReportBehavior.ERROR, + ) -def _report_integration( +def _report_integration_domain( + hass: HomeAssistant | None, what: str, + breaks_in_ha_version: str | None, + integration: Integration, + core_integration_behavior: ReportBehavior, + custom_integration_behavior: ReportBehavior, + level: int, +) -> None: + """Report incorrect usage in an integration (identified via domain). + + Async friendly. + """ + integration_behavior = core_integration_behavior + if not integration.is_built_in: + integration_behavior = custom_integration_behavior + + if integration_behavior is ReportBehavior.IGNORE: + return + + # Keep track of integrations already reported to prevent flooding + key = f"{integration.domain}:{what}" + if ( + integration_behavior is not ReportBehavior.ERROR + and key in _REPORTED_INTEGRATIONS + ): + return + _REPORTED_INTEGRATIONS.add(key) + + report_issue = async_suggest_report_issue(hass, integration=integration) + integration_type = "" if integration.is_built_in else "custom " + _LOGGER.log( + level, + "Detected that %sintegration '%s' %s. %s %s", + integration_type, + integration.domain, + what, + f"This will stop working in Home Assistant {breaks_in_ha_version}, please" + if breaks_in_ha_version + else "Please", + report_issue, + ) + + if integration_behavior is ReportBehavior.ERROR: + raise RuntimeError( + f"Detected that {integration_type}integration " + f"'{integration.domain}' {what}. Please {report_issue}" + ) + + +def _report_integration_frame( + what: str, + breaks_in_ha_version: str | None, integration_frame: IntegrationFrame, level: int = logging.WARNING, error: bool = False, ) -> None: - """Report incorrect usage in an integration. + """Report incorrect usage in an integration (identified via frame). Async friendly. """ @@ -183,13 +323,16 @@ def _report_integration( integration_type = "custom " if integration_frame.custom_integration else "" _LOGGER.log( level, - "Detected that %sintegration '%s' %s at %s, line %s: %s, please %s", + "Detected that %sintegration '%s' %s at %s, line %s: %s. %s %s", integration_type, integration_frame.integration, what, integration_frame.relative_filename, integration_frame.line_number, integration_frame.line, + f"This will stop working in Home Assistant {breaks_in_ha_version}, please" + if breaks_in_ha_version + else "Please", report_issue, ) if not error: @@ -199,7 +342,7 @@ def _report_integration( f"'{integration_frame.integration}' {what} at " f"{integration_frame.relative_filename}, line " f"{integration_frame.line_number}: {integration_frame.line}. " - f"Please {report_issue}." + f"Please {report_issue}" ) diff --git a/homeassistant/helpers/hassio.py b/homeassistant/helpers/hassio.py new file mode 100644 index 00000000000..51503f709d6 --- /dev/null +++ b/homeassistant/helpers/hassio.py @@ -0,0 +1,22 @@ +"""Hass.io helper.""" + +import os + +from homeassistant.core import HomeAssistant, callback + + +@callback +def is_hassio(hass: HomeAssistant) -> bool: + """Return true if Hass.io is loaded. + + Async friendly. + """ + return "hassio" in hass.config.components + + +@callback +def get_supervisor_ip() -> str | None: + """Return the supervisor ip address.""" + if "SUPERVISOR" not in os.environ: + return None + return os.environ["SUPERVISOR"].partition(":")[0] diff --git a/homeassistant/helpers/icon.py b/homeassistant/helpers/icon.py index e759719f667..ce8205eb915 100644 --- a/homeassistant/helpers/icon.py +++ b/homeassistant/helpers/icon.py @@ -7,7 +7,7 @@ from collections.abc import Iterable from functools import lru_cache import logging import pathlib -from typing import Any +from typing import Any, cast from homeassistant.core import HomeAssistant, callback from homeassistant.loader import Integration, async_get_integrations @@ -21,12 +21,34 @@ ICON_CACHE: HassKey[_IconsCache] = HassKey("icon_cache") _LOGGER = logging.getLogger(__name__) +def convert_shorthand_service_icon( + value: str | dict[str, str | dict[str, str]], +) -> dict[str, str | dict[str, str]]: + """Convert shorthand service icon to dict.""" + if isinstance(value, str): + return {"service": value} + return value + + +def _load_icons_file( + icons_file: pathlib.Path, +) -> dict[str, Any]: + """Load and parse an icons.json file.""" + icons = load_json_object(icons_file) + if "services" not in icons: + return icons + services = cast(dict[str, str | dict[str, str | dict[str, str]]], icons["services"]) + for service, service_icons in services.items(): + services[service] = convert_shorthand_service_icon(service_icons) + return icons + + def _load_icons_files( icons_files: dict[str, pathlib.Path], ) -> dict[str, dict[str, Any]]: """Load and parse icons.json files.""" return { - component: load_json_object(icons_file) + component: _load_icons_file(icons_file) for component, icons_file in icons_files.items() } diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index be9b57bf814..468539f5a9d 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -8,11 +8,11 @@ from collections.abc import Callable, Collection, Coroutine, Iterable import dataclasses from dataclasses import dataclass, field from enum import Enum, StrEnum, auto -from functools import cached_property from itertools import groupby import logging from typing import Any +from propcache import cached_property import voluptuous as vol from homeassistant.components.homeassistant.exposed_entities import async_should_expose @@ -49,6 +49,7 @@ INTENT_NEVERMIND = "HassNevermind" INTENT_SET_POSITION = "HassSetPosition" INTENT_START_TIMER = "HassStartTimer" INTENT_CANCEL_TIMER = "HassCancelTimer" +INTENT_CANCEL_ALL_TIMERS = "HassCancelAllTimers" INTENT_INCREASE_TIMER = "HassIncreaseTimer" INTENT_DECREASE_TIMER = "HassDecreaseTimer" INTENT_PAUSE_TIMER = "HassPauseTimer" @@ -56,6 +57,7 @@ INTENT_UNPAUSE_TIMER = "HassUnpauseTimer" INTENT_TIMER_STATUS = "HassTimerStatus" INTENT_GET_CURRENT_DATE = "HassGetCurrentDate" INTENT_GET_CURRENT_TIME = "HassGetCurrentTime" +INTENT_RESPOND = "HassRespond" SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA) @@ -351,6 +353,7 @@ class MatchTargetsCandidate: """Candidate for async_match_targets.""" state: State + is_exposed: bool entity: entity_registry.RegistryEntry | None = None area: area_registry.AreaEntry | None = None floor: floor_registry.FloorEntry | None = None @@ -514,29 +517,31 @@ def async_match_targets( # noqa: C901 if not states: return MatchTargetsResult(False, MatchFailedReason.DOMAIN) - if constraints.assistant: - # Filter by exposure - states = [ - s - for s in states - if async_should_expose(hass, constraints.assistant, s.entity_id) - ] - if not states: - return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) + candidates = [ + MatchTargetsCandidate( + state=state, + is_exposed=( + async_should_expose(hass, constraints.assistant, state.entity_id) + if constraints.assistant + else True + ), + ) + for state in states + ] if constraints.domains and (not filtered_by_domain): # Filter by domain (if we didn't already do it) - states = [s for s in states if s.domain in constraints.domains] - if not states: + candidates = [c for c in candidates if c.state.domain in constraints.domains] + if not candidates: return MatchTargetsResult(False, MatchFailedReason.DOMAIN) if constraints.states: # Filter by state - states = [s for s in states if s.state in constraints.states] - if not states: + candidates = [c for c in candidates if c.state.state in constraints.states] + if not candidates: return MatchTargetsResult(False, MatchFailedReason.STATE) - # Exit early so we can avoid registry lookups + # Try to exit early so we can avoid registry lookups if not ( constraints.name or constraints.features @@ -544,11 +549,18 @@ def async_match_targets( # noqa: C901 or constraints.area_name or constraints.floor_name ): - return MatchTargetsResult(True, states=states) + if constraints.assistant: + # Check exposure + candidates = [c for c in candidates if c.is_exposed] + if not candidates: + return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) + + return MatchTargetsResult(True, states=[c.state for c in candidates]) # We need entity registry entries now er = entity_registry.async_get(hass) - candidates = [MatchTargetsCandidate(s, er.async_get(s.entity_id)) for s in states] + for candidate in candidates: + candidate.entity = er.async_get(candidate.state.entity_id) if constraints.name: # Filter by entity name or alias @@ -637,6 +649,12 @@ def async_match_targets( # noqa: C901 False, MatchFailedReason.AREA, areas=targeted_areas ) + if constraints.assistant: + # Check exposure + candidates = [c for c in candidates if c.is_exposed] + if not candidates: + return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) + if constraints.name and (not constraints.allow_duplicate_names): # Check for duplicates if not areas_added: diff --git a/homeassistant/helpers/json.py b/homeassistant/helpers/json.py index 1145d785ed3..ebb74856429 100644 --- a/homeassistant/helpers/json.py +++ b/homeassistant/helpers/json.py @@ -162,13 +162,17 @@ def json_dumps(data: Any) -> str: return json_bytes(data).decode("utf-8") +json_bytes_sorted = partial( + orjson.dumps, + option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SORT_KEYS, + default=json_encoder_default, +) +"""Dump json bytes with keys sorted.""" + + def json_dumps_sorted(data: Any) -> str: """Dump json string with keys sorted.""" - return orjson.dumps( - data, - option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SORT_KEYS, - default=json_encoder_default, - ).decode("utf-8") + return json_bytes_sorted(data).decode("utf-8") JSON_DUMP: Final = json_dumps diff --git a/homeassistant/helpers/label_registry.py b/homeassistant/helpers/label_registry.py index 1007b17bc5d..33a05156328 100644 --- a/homeassistant/helpers/label_registry.py +++ b/homeassistant/helpers/label_registry.py @@ -9,7 +9,6 @@ from datetime import datetime from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.util import slugify from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -17,7 +16,6 @@ from homeassistant.util.hass_dict import HassKey from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, - normalize_name, ) from .registry import BaseRegistry from .singleton import singleton @@ -130,15 +128,9 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): """Get all labels.""" return self.labels.values() - @callback def _generate_id(self, name: str) -> str: - """Initialize ID.""" - suggestion = suggestion_base = slugify(name) - tries = 1 - while suggestion in self.labels: - tries += 1 - suggestion = f"{suggestion_base}_{tries}" - return suggestion + """Generate label ID.""" + return self.labels.generate_id_from_name(name) @callback def async_create( @@ -151,30 +143,26 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): ) -> LabelEntry: """Create a new label.""" self.hass.verify_event_loop_thread("label_registry.async_create") + if label := self.async_get_label_by_name(name): raise ValueError( f"The name {name} ({label.normalized_name}) is already in use" ) - normalized_name = normalize_name(name) - label = LabelEntry( color=color, description=description, icon=icon, label_id=self._generate_id(name), name=name, - normalized_name=normalized_name, ) label_id = label.label_id self.labels[label_id] = label self.async_schedule_save() + self.hass.bus.async_fire_internal( EVENT_LABEL_REGISTRY_UPDATED, - EventLabelRegistryUpdatedData( - action="create", - label_id=label_id, - ), + EventLabelRegistryUpdatedData(action="create", label_id=label_id), ) return label @@ -216,7 +204,6 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): if name is not UNDEFINED and name != old.name: changes["name"] = name - changes["normalized_name"] = normalize_name(name) if not changes: return old @@ -244,14 +231,12 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): if data is not None: for label in data["labels"]: - normalized_name = normalize_name(label["name"]) labels[label["label_id"]] = LabelEntry( color=label["color"], description=label["description"], icon=label["icon"], label_id=label["label_id"], name=label["name"], - normalized_name=normalized_name, created_at=datetime.fromisoformat(label["created_at"]), modified_at=datetime.fromisoformat(label["modified_at"]), ) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index e37aa0c532d..38d80d5649d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -14,23 +14,21 @@ import slugify as unicode_slug import voluptuous as vol from voluptuous_openapi import UNSUPPORTED, convert -from homeassistant.components.climate.intent import INTENT_GET_TEMPERATURE -from homeassistant.components.conversation.trace import ( +from homeassistant.components.climate import INTENT_GET_TEMPERATURE +from homeassistant.components.conversation import ( ConversationTraceEventType, async_conversation_trace_append, ) -from homeassistant.components.cover.intent import INTENT_CLOSE_COVER, INTENT_OPEN_COVER -from homeassistant.components.homeassistant.exposed_entities import async_should_expose +from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER +from homeassistant.components.homeassistant import async_should_expose from homeassistant.components.intent import async_device_supports_timers -from homeassistant.components.script import ATTR_VARIABLES, DOMAIN as SCRIPT_DOMAIN -from homeassistant.components.weather.intent import INTENT_GET_WEATHER +from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN +from homeassistant.components.weather import INTENT_GET_WEATHER from homeassistant.const import ( ATTR_DOMAIN, - ATTR_ENTITY_ID, ATTR_SERVICE, EVENT_HOMEASSISTANT_CLOSE, EVENT_SERVICE_REMOVED, - SERVICE_TURN_ON, ) from homeassistant.core import Context, Event, HomeAssistant, callback, split_entity_id from homeassistant.exceptions import HomeAssistantError @@ -279,6 +277,7 @@ class AssistAPI(API): intent.INTENT_TOGGLE, intent.INTENT_GET_CURRENT_DATE, intent.INTENT_GET_CURRENT_TIME, + intent.INTENT_RESPOND, } def __init__(self, hass: HomeAssistant) -> None: @@ -446,12 +445,13 @@ def _get_exposed_entities( entities = {} for state in hass.states.async_all(): - if state.domain == SCRIPT_DOMAIN: - continue - - if not async_should_expose(hass, assistant, state.entity_id): + if ( + not async_should_expose(hass, assistant, state.entity_id) + or state.domain == SCRIPT_DOMAIN + ): continue + description: str | None = None entity_entry = entity_registry.async_get(state.entity_id) names = [state.name] area_names = [] @@ -480,6 +480,9 @@ def _get_exposed_entities( "state": state.state, } + if description: + info["description"] = description + if area_names: info["areas"] = ", ".join(area_names) @@ -597,7 +600,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return {"type": "string", "format": "time"} if isinstance(schema, selector.TriggerSelector): - return convert(cv.TRIGGER_SCHEMA) + return {"type": "array", "items": {"type": "string"}} if schema.config.get("multiple"): return {"type": "array", "items": {"type": "string"}} @@ -605,6 +608,83 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return {"type": "string"} +def _get_cached_script_parameters( + hass: HomeAssistant, entity_id: str +) -> tuple[str | None, vol.Schema]: + """Get script description and schema.""" + entity_registry = er.async_get(hass) + + description = None + parameters = vol.Schema({}) + entity_entry = entity_registry.async_get(entity_id) + if entity_entry and entity_entry.unique_id: + parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) + + if parameters_cache is None: + parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} + + @callback + def clear_cache(event: Event) -> None: + """Clear script parameter cache on script reload or delete.""" + if ( + event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN + and event.data[ATTR_SERVICE] in parameters_cache + ): + parameters_cache.pop(event.data[ATTR_SERVICE]) + + cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) + + @callback + def on_homeassistant_close(event: Event) -> None: + """Cleanup.""" + cancel() + + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close + ) + + if entity_entry.unique_id in parameters_cache: + return parameters_cache[entity_entry.unique_id] + + if service_desc := service.async_get_cached_service_description( + hass, SCRIPT_DOMAIN, entity_entry.unique_id + ): + description = service_desc.get("description") + schema: dict[vol.Marker, Any] = {} + fields = service_desc.get("fields", {}) + + for field, config in fields.items(): + field_description = config.get("description") + if not field_description: + field_description = config.get("name") + key: vol.Marker + if config.get("required"): + key = vol.Required(field, description=field_description) + else: + key = vol.Optional(field, description=field_description) + if "selector" in config: + schema[key] = selector.selector(config["selector"]) + else: + schema[key] = cv.string + + parameters = vol.Schema(schema) + + aliases: list[str] = [] + if entity_entry.name: + aliases.append(entity_entry.name) + if entity_entry.aliases: + aliases.extend(entity_entry.aliases) + if aliases: + if description: + description = description + ". Aliases: " + str(list(aliases)) + else: + description = "Aliases: " + str(list(aliases)) + + parameters_cache[entity_entry.unique_id] = (description, parameters) + + return description, parameters + + class ScriptTool(Tool): """LLM Tool representing a Script.""" @@ -614,86 +694,13 @@ class ScriptTool(Tool): script_entity_id: str, ) -> None: """Init the class.""" - entity_registry = er.async_get(hass) - - self.name = split_entity_id(script_entity_id)[1] + self._object_id = self.name = split_entity_id(script_entity_id)[1] if self.name[0].isdigit(): self.name = "_" + self.name - self._entity_id = script_entity_id - self.parameters = vol.Schema({}) - entity_entry = entity_registry.async_get(script_entity_id) - if entity_entry and entity_entry.unique_id: - parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) - if parameters_cache is None: - parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} - - @callback - def clear_cache(event: Event) -> None: - """Clear script parameter cache on script reload or delete.""" - if ( - event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN - and event.data[ATTR_SERVICE] in parameters_cache - ): - parameters_cache.pop(event.data[ATTR_SERVICE]) - - cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) - - @callback - def on_homeassistant_close(event: Event) -> None: - """Cleanup.""" - cancel() - - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close - ) - - if entity_entry.unique_id in parameters_cache: - self.description, self.parameters = parameters_cache[ - entity_entry.unique_id - ] - return - - if service_desc := service.async_get_cached_service_description( - hass, SCRIPT_DOMAIN, entity_entry.unique_id - ): - self.description = service_desc.get("description") - schema: dict[vol.Marker, Any] = {} - fields = service_desc.get("fields", {}) - - for field, config in fields.items(): - description = config.get("description") - if not description: - description = config.get("name") - key: vol.Marker - if config.get("required"): - key = vol.Required(field, description=description) - else: - key = vol.Optional(field, description=description) - if "selector" in config: - schema[key] = selector.selector(config["selector"]) - else: - schema[key] = cv.string - - self.parameters = vol.Schema(schema) - - aliases: list[str] = [] - if entity_entry.name: - aliases.append(entity_entry.name) - if entity_entry.aliases: - aliases.extend(entity_entry.aliases) - if aliases: - if self.description: - self.description = ( - self.description + ". Aliases: " + str(list(aliases)) - ) - else: - self.description = "Aliases: " + str(list(aliases)) - - parameters_cache[entity_entry.unique_id] = ( - self.description, - self.parameters, - ) + self.description, self.parameters = _get_cached_script_parameters( + hass, script_entity_id + ) async def async_call( self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext @@ -729,14 +736,13 @@ class ScriptTool(Tool): floor = list(intent.find_floors(floor, floor_reg))[0].floor_id tool_input.tool_args[field] = floor - await hass.services.async_call( + result = await hass.services.async_call( SCRIPT_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: self._entity_id, - ATTR_VARIABLES: tool_input.tool_args, - }, + self._object_id, + tool_input.tool_args, context=llm_context.context, + blocking=True, + return_response=True, ) - return {"success": True} + return {"success": True, "result": result} diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index d5891973e40..e39cc2de547 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -6,6 +6,7 @@ from collections.abc import Callable from contextlib import suppress from ipaddress import ip_address +from aiohttp import hdrs from hass_nabucasa import remote import yarl @@ -15,6 +16,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import bind_hass from homeassistant.util.network import is_ip_address, is_loopback, normalize_url +from .hassio import is_hassio + TYPE_URL_INTERNAL = "internal_url" TYPE_URL_EXTERNAL = "external_url" SUPERVISOR_NETWORK_HOST = "homeassistant" @@ -41,10 +44,6 @@ def get_supervisor_network_url( hass: HomeAssistant, *, allow_ssl: bool = False ) -> str | None: """Get URL for home assistant within supervisor network.""" - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.hassio import is_hassio - if hass.config.api is None or not is_hassio(hass): return None @@ -179,20 +178,21 @@ def get_url( and request_host is not None and hass.config.api is not None ): - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.hassio import get_host_info, is_hassio - scheme = "https" if hass.config.api.use_ssl else "http" current_url = yarl.URL.build( scheme=scheme, host=request_host, port=hass.config.api.port ) known_hostnames = ["localhost"] - if is_hassio(hass) and (host_info := get_host_info(hass)): - known_hostnames.extend( - [host_info["hostname"], f"{host_info['hostname']}.local"] - ) + if is_hassio(hass): + # Local import to avoid circular dependencies + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.hassio import get_host_info + + if host_info := get_host_info(hass): + known_hostnames.extend( + [host_info["hostname"], f"{host_info['hostname']}.local"] + ) if ( ( @@ -216,7 +216,18 @@ def _get_request_host() -> str | None: """Get the host address of the current request.""" if (request := http.current_request.get()) is None: raise NoURLAvailableError - return yarl.URL(request.url).host + # partition the host to remove the port + # because the raw host header can contain the port + host = request.headers.get(hdrs.HOST) + if host is None: + return None + # IPv6 addresses are enclosed in brackets + # use same logic as yarl and urllib to extract the host + if "[" in host: + return (host.partition("[")[2]).partition("]")[0] + if ":" in host: + host = host.partition(":")[0] + return host @bind_hass diff --git a/homeassistant/helpers/normalized_name_base_registry.py b/homeassistant/helpers/normalized_name_base_registry.py index 7e7ca9ed884..983d9e55340 100644 --- a/homeassistant/helpers/normalized_name_base_registry.py +++ b/homeassistant/helpers/normalized_name_base_registry.py @@ -4,7 +4,7 @@ from dataclasses import dataclass, field from datetime import datetime from functools import lru_cache -from homeassistant.util import dt as dt_util +from homeassistant.util import dt as dt_util, slugify from .registry import BaseRegistryItems @@ -14,10 +14,14 @@ class NormalizedNameBaseRegistryEntry: """Normalized Name Base Registry Entry.""" name: str - normalized_name: str + normalized_name: str = field(init=False) created_at: datetime = field(default_factory=dt_util.utcnow) modified_at: datetime = field(default_factory=dt_util.utcnow) + def __post_init__(self) -> None: + """Post init.""" + object.__setattr__(self, "normalized_name", normalize_name(self.name)) + @lru_cache(maxsize=1024) def normalize_name(name: str) -> str: @@ -43,7 +47,7 @@ class NormalizedNameBaseRegistryItems[_VT: NormalizedNameBaseRegistryEntry]( old_entry = self.data[key] if ( replacement_entry is not None - and (normalized_name := normalize_name(replacement_entry.name)) + and (normalized_name := replacement_entry.normalized_name) != old_entry.normalized_name and normalized_name in self._normalized_names ): @@ -53,8 +57,17 @@ class NormalizedNameBaseRegistryItems[_VT: NormalizedNameBaseRegistryEntry]( del self._normalized_names[old_entry.normalized_name] def _index_entry(self, key: str, entry: _VT) -> None: - self._normalized_names[normalize_name(entry.name)] = entry + self._normalized_names[entry.normalized_name] = entry def get_by_name(self, name: str) -> _VT | None: """Get entry by name.""" return self._normalized_names.get(normalize_name(name)) + + def generate_id_from_name(self, name: str) -> str: + """Generate ID from name.""" + suggestion = suggestion_base = slugify(name) + tries = 1 + while suggestion in self: + tries += 1 + suggestion = f"{suggestion_base}_{tries}" + return suggestion diff --git a/homeassistant/helpers/restore_state.py b/homeassistant/helpers/restore_state.py index a2b4b3a9b9a..fd1f84a85ff 100644 --- a/homeassistant/helpers/restore_state.py +++ b/homeassistant/helpers/restore_state.py @@ -17,7 +17,6 @@ from homeassistant.util.json import json_loads from . import start from .entity import Entity from .event import async_track_time_interval -from .frame import report from .json import JSONEncoder from .singleton import singleton from .storage import Store @@ -116,21 +115,6 @@ class RestoreStateData: """Dump states now.""" await async_get(hass).async_dump_states() - @classmethod - async def async_get_instance(cls, hass: HomeAssistant) -> RestoreStateData: - """Return the instance of this class.""" - # Nothing should actually be calling this anymore, but we'll keep it - # around for a while to avoid breaking custom components. - # - # In fact they should not be accessing this at all. - report( - "restore_state.RestoreStateData.async_get_instance is deprecated, " - "and not intended to be called by custom components; Please" - "refactor your code to use RestoreEntity instead;" - " restore_state.async_get(hass) can be used in the meantime", - ) - return async_get(hass) - def __init__(self, hass: HomeAssistant) -> None: """Initialize the restore state data class.""" self.hass: HomeAssistant = hass diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index 7463c9945b2..af8c4c6402d 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback, split_entity_id from homeassistant.data_entry_flow import UnknownHandler @@ -403,7 +402,7 @@ class SchemaConfigFlowHandler(ConfigFlow, ABC): ) -class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): +class SchemaOptionsFlowHandler(OptionsFlow): """Handle a schema based options flow.""" def __init__( @@ -422,10 +421,8 @@ class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): options, which is the union of stored options and user input from the options flow steps. """ - super().__init__(config_entry) - self._common_handler = SchemaCommonFlowHandler( - self, options_flow, self._options - ) + self._options = copy.deepcopy(dict(config_entry.options)) + self._common_handler = SchemaCommonFlowHandler(self, options_flow, self.options) self._async_options_flow_finished = async_options_flow_finished for step in options_flow: @@ -438,6 +435,11 @@ class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): if async_setup_preview: setattr(self, "async_setup_preview", async_setup_preview) + @property + def options(self) -> dict[str, Any]: + """Return a mutable copy of the config entry options.""" + return self._options + @staticmethod def _async_step( step_id: str, diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 26a9b6e069e..a67ef60c799 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -9,13 +9,14 @@ from contextvars import ContextVar from copy import copy from dataclasses import dataclass from datetime import datetime, timedelta -from functools import cached_property, partial +from functools import partial import itertools import logging from types import MappingProxyType from typing import Any, Literal, TypedDict, cast, overload import async_interrupt +from propcache import cached_property import voluptuous as vol from homeassistant import exceptions @@ -472,13 +473,13 @@ class _ScriptRun: script_execution_set("aborted") except _StopScript as err: script_execution_set("finished", err.response) - response = err.response # Let the _StopScript bubble up if this is a sub-script if not self._script.top_level: - # We already consumed the response, do not pass it on - err.response = None raise + + response = err.response + except Exception: script_execution_set("error") raise @@ -1132,7 +1133,11 @@ class _ScriptRun: self._step_log("wait for trigger", timeout) variables = {**self._variables} - self._variables["wait"] = {"remaining": timeout, "trigger": None} + self._variables["wait"] = { + "remaining": timeout, + "completed": False, + "trigger": None, + } trace_set_result(wait=self._variables["wait"]) if timeout == 0: @@ -1150,6 +1155,7 @@ class _ScriptRun: variables: dict[str, Any], context: Context | None = None ) -> None: self._async_set_remaining_time_var(timeout_handle) + self._variables["wait"]["completed"] = True self._variables["wait"]["trigger"] = variables["trigger"] _set_result_unless_done(done) @@ -1349,7 +1355,7 @@ async def _async_stop_scripts_at_shutdown(hass: HomeAssistant, event: Event) -> ) -type _VarsType = dict[str, Any] | MappingProxyType[str, Any] +type _VarsType = dict[str, Any] | Mapping[str, Any] | MappingProxyType[str, Any] def _referenced_extract_ids(data: Any, key: str, found: set[str]) -> None: diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 0551b5289c5..35135010452 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -42,6 +42,7 @@ from homeassistant.core import ( ) from homeassistant.exceptions import ( HomeAssistantError, + ServiceNotSupported, TemplateError, Unauthorized, UnknownUser, @@ -571,20 +572,32 @@ def async_extract_referenced_entity_ids( # noqa: C901 for area_entry in area_reg.areas.get_areas_for_floor(floor_id) ) - # Find devices for targeted areas - selected.referenced_devices.update(selector.device_ids) - selected.referenced_areas.update(selector.area_ids) - if selected.referenced_areas: - for area_id in selected.referenced_areas: - selected.referenced_devices.update( - device_entry.id - for device_entry in dev_reg.devices.get_devices_for_area_id(area_id) - ) + selected.referenced_devices.update(selector.device_ids) if not selected.referenced_areas and not selected.referenced_devices: return selected + # Add indirectly referenced by device + selected.indirectly_referenced.update( + entry.entity_id + for device_id in selected.referenced_devices + for entry in entities.get_entries_for_device_id(device_id) + # Do not add entities which are hidden or which are config + # or diagnostic entities. + if (entry.entity_category is None and entry.hidden_by is None) + ) + + # Find devices for targeted areas + referenced_devices_by_area: set[str] = set() + if selected.referenced_areas: + for area_id in selected.referenced_areas: + referenced_devices_by_area.update( + device_entry.id + for device_entry in dev_reg.devices.get_devices_for_area_id(area_id) + ) + selected.referenced_devices.update(referenced_devices_by_area) + # Add indirectly referenced by area selected.indirectly_referenced.update( entry.entity_id @@ -595,10 +608,10 @@ def async_extract_referenced_entity_ids( # noqa: C901 # or diagnostic entities. if entry.entity_category is None and entry.hidden_by is None ) - # Add indirectly referenced by device + # Add indirectly referenced by area through device selected.indirectly_referenced.update( entry.entity_id - for device_id in selected.referenced_devices + for device_id in referenced_devices_by_area for entry in entities.get_entries_for_device_id(device_id) # Do not add entities which are hidden or which are config # or diagnostic entities. @@ -610,11 +623,10 @@ def async_extract_referenced_entity_ids( # noqa: C901 # by an area and the entity # has no explicitly set area not entry.area_id - # The entity's device matches a targeted device - or device_id in selector.device_ids ) ) ) + return selected @@ -975,9 +987,7 @@ async def entity_service_call( ): # If entity explicitly referenced, raise an error if referenced is not None and entity.entity_id in referenced.referenced: - raise HomeAssistantError( - f"Entity {entity.entity_id} does not support this service." - ) + raise ServiceNotSupported(call.domain, call.service, entity.entity_id) continue @@ -1264,19 +1274,14 @@ def async_register_entity_service( """ if schema is None or isinstance(schema, dict): schema = cv.make_entity_service_schema(schema) - # Do a sanity check to check this is a valid entity service schema, - # the check could be extended to require All/Any to have sub schema(s) - # with all entity service fields - elif ( - # Don't check All/Any - not isinstance(schema, (vol.All, vol.Any)) - # Don't check All/Any wrapped in schema - and not isinstance(schema.schema, (vol.All, vol.Any)) - and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) - ): - raise HomeAssistantError( - "The schema does not include all required keys: " - f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" + elif not cv.is_entity_service_schema(schema): + # pylint: disable-next=import-outside-toplevel + from .frame import ReportBehavior, report_usage + + report_usage( + "registers an entity service with a non entity service schema", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.9", ) service_func: str | HassJob[..., Any] diff --git a/homeassistant/helpers/service_info/hassio.py b/homeassistant/helpers/service_info/hassio.py new file mode 100644 index 00000000000..0125fef3017 --- /dev/null +++ b/homeassistant/helpers/service_info/hassio.py @@ -0,0 +1,16 @@ +"""Hassio Discovery data.""" + +from dataclasses import dataclass +from typing import Any + +from homeassistant.data_entry_flow import BaseServiceInfo + + +@dataclass(slots=True) +class HassioServiceInfo(BaseServiceInfo): + """Prepared info from hassio entries.""" + + config: dict[str, Any] + name: str + slug: str + uuid: str diff --git a/homeassistant/helpers/service_info/mqtt.py b/homeassistant/helpers/service_info/mqtt.py index 6ffc981ced1..a5284807617 100644 --- a/homeassistant/helpers/service_info/mqtt.py +++ b/homeassistant/helpers/service_info/mqtt.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from homeassistant.data_entry_flow import BaseServiceInfo -type ReceivePayloadType = str | bytes +type ReceivePayloadType = str | bytes | bytearray @dataclass(slots=True) diff --git a/homeassistant/helpers/state.py b/homeassistant/helpers/state.py index 71b1b2658e2..70f64d5296a 100644 --- a/homeassistant/helpers/state.py +++ b/homeassistant/helpers/state.py @@ -9,17 +9,16 @@ import logging from types import ModuleType from typing import Any +from homeassistant.components.lock import LockState from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON from homeassistant.const import ( STATE_CLOSED, STATE_HOME, - STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_UNKNOWN, - STATE_UNLOCKED, ) from homeassistant.core import Context, HomeAssistant, State from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass @@ -79,7 +78,7 @@ def state_as_number(state: State) -> float: """ if state.state in ( STATE_ON, - STATE_LOCKED, + LockState.LOCKED, STATE_ABOVE_HORIZON, STATE_OPEN, STATE_HOME, @@ -87,7 +86,7 @@ def state_as_number(state: State) -> float: return 1 if state.state in ( STATE_OFF, - STATE_UNLOCKED, + LockState.UNLOCKED, STATE_UNKNOWN, STATE_BELOW_HORIZON, STATE_CLOSED, diff --git a/homeassistant/helpers/storage.py b/homeassistant/helpers/storage.py index 7e3c12cfc01..080599f54d8 100644 --- a/homeassistant/helpers/storage.py +++ b/homeassistant/helpers/storage.py @@ -6,7 +6,6 @@ import asyncio from collections.abc import Callable, Iterable, Mapping, Sequence from contextlib import suppress from copy import deepcopy -from functools import cached_property import inspect from json import JSONDecodeError, JSONEncoder import logging @@ -14,6 +13,8 @@ import os from pathlib import Path from typing import Any +from propcache import cached_property + from homeassistant.const import ( EVENT_HOMEASSISTANT_FINAL_WRITE, EVENT_HOMEASSISTANT_STARTED, diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index 69e03904caa..53866428332 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -14,6 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass from homeassistant.util.package import is_docker_env, is_virtual_env +from .hassio import is_hassio from .importlib import async_import_module from .singleton import singleton @@ -52,13 +53,13 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: else: hassio = await async_import_module(hass, "homeassistant.components.hassio") - is_hassio = hassio.is_hassio(hass) + is_hassio_ = is_hassio(hass) info_object = { "installation_type": "Unknown", "version": current_version, "dev": "dev" in current_version, - "hassio": is_hassio, + "hassio": is_hassio_, "virtualenv": is_virtual_env(), "python_version": platform.python_version(), "docker": False, @@ -70,7 +71,10 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: try: info_object["user"] = cached_get_user() - except KeyError: + except (KeyError, OSError): + # OSError on python >= 3.13, KeyError on python < 3.13 + # KeyError can be removed when 3.12 support is dropped + # see https://docs.python.org/3/whatsnew/3.13.html info_object["user"] = None if platform.system() == "Darwin": @@ -89,7 +93,7 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: info_object["installation_type"] = "Home Assistant Core" # Enrich with Supervisor information - if is_hassio: + if is_hassio_: if not (info := hassio.get_info(hass)): _LOGGER.warning("No Home Assistant Supervisor info available") info = {} diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index e090e0de2d1..5b4a48bb07c 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -9,8 +9,9 @@ import collections.abc from collections.abc import Callable, Generator, Iterable from contextlib import AbstractContextManager from contextvars import ContextVar +from copy import deepcopy from datetime import date, datetime, time, timedelta -from functools import cache, cached_property, lru_cache, partial, wraps +from functools import cache, lru_cache, partial, wraps import json import logging import math @@ -22,7 +23,16 @@ import statistics from struct import error as StructError, pack, unpack_from import sys from types import CodeType, TracebackType -from typing import Any, Concatenate, Literal, NoReturn, Self, cast, overload +from typing import ( + TYPE_CHECKING, + Any, + Concatenate, + Literal, + NoReturn, + Self, + cast, + overload, +) from urllib.parse import urlencode as urllib_urlencode import weakref @@ -34,6 +44,7 @@ from jinja2.sandbox import ImmutableSandboxedEnvironment from jinja2.utils import Namespace from lru import LRU import orjson +from propcache import under_cached_property import voluptuous as vol from homeassistant.const import ( @@ -51,6 +62,7 @@ from homeassistant.const import ( from homeassistant.core import ( Context, HomeAssistant, + ServiceResponse, State, callback, split_entity_id, @@ -80,10 +92,14 @@ from . import ( label_registry, location as loc_helper, ) +from .deprecation import deprecated_function from .singleton import singleton from .translation import async_translate_state from .typing import TemplateVarsType +if TYPE_CHECKING: + from _typeshed import OptExcInfo + # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) @@ -149,6 +165,7 @@ CACHED_TEMPLATE_STATES = 512 EVAL_CACHE_SIZE = 512 MAX_CUSTOM_TEMPLATE_SIZE = 5 * 1024 * 1024 +MAX_TEMPLATE_OUTPUT = 256 * 1024 # 256KiB CACHED_TEMPLATE_LRU: LRU[State, TemplateState] = LRU(CACHED_TEMPLATE_STATES) CACHED_TEMPLATE_NO_COLLECT_LRU: LRU[State, TemplateState] = LRU(CACHED_TEMPLATE_STATES) @@ -205,15 +222,24 @@ def async_setup(hass: HomeAssistant) -> bool: @bind_hass +@deprecated_function( + "automatic setting of Template.hass introduced by HA Core PR #89242", + breaks_in_ha_version="2025.10", +) def attach(hass: HomeAssistant, obj: Any) -> None: + """Recursively attach hass to all template instances in list and dict.""" + return _attach(hass, obj) + + +def _attach(hass: HomeAssistant, obj: Any) -> None: """Recursively attach hass to all template instances in list and dict.""" if isinstance(obj, list): for child in obj: - attach(hass, child) + _attach(hass, child) elif isinstance(obj, collections.abc.Mapping): for child_key, child_value in obj.items(): - attach(hass, child_key) - attach(hass, child_value) + _attach(hass, child_key) + _attach(hass, child_value) elif isinstance(obj, Template): obj.hass = hass @@ -495,16 +521,30 @@ class Template: ) def __init__(self, template: str, hass: HomeAssistant | None = None) -> None: - """Instantiate a template.""" + """Instantiate a template. + + Note: A valid hass instance should always be passed in. The hass parameter + will be non optional in Home Assistant Core 2025.10. + """ + # pylint: disable-next=import-outside-toplevel + from .frame import ReportBehavior, report_usage + if not isinstance(template, str): raise TypeError("Expected template to be a string") + if not hass: + report_usage( + "creates a template object without passing hass", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.10", + ) + self.template: str = template.strip() self._compiled_code: CodeType | None = None self._compiled: jinja2.Template | None = None self.hass = hass self.is_static = not is_template_string(template) - self._exc_info: sys._OptExcInfo | None = None + self._exc_info: OptExcInfo | None = None self._limited: bool | None = None self._strict: bool | None = None self._log_fn: Callable[[int, str], None] | None = None @@ -604,6 +644,11 @@ class Template: except Exception as err: raise TemplateError(err) from err + if len(render_result) > MAX_TEMPLATE_OUTPUT: + raise TemplateError( + f"Template output exceeded maximum size of {MAX_TEMPLATE_OUTPUT} characters" + ) + render_result = render_result.strip() if not parse_result or self.hass and self.hass.config.legacy_templates: @@ -990,6 +1035,8 @@ class DomainStates: class TemplateStateBase(State): """Class to represent a state object in a template.""" + __slots__ = ("_hass", "_collect", "_entity_id", "_state") + _state: State __setitem__ = _readonly @@ -1002,6 +1049,7 @@ class TemplateStateBase(State): self._hass = hass self._collect = collect self._entity_id = entity_id + self._cache: dict[str, Any] = {} def _collect_state(self) -> None: if self._collect and (render_info := _render_info.get()): @@ -1022,7 +1070,7 @@ class TemplateStateBase(State): return self.state_with_unit raise KeyError - @cached_property + @under_cached_property def entity_id(self) -> str: # type: ignore[override] """Wrap State.entity_id. @@ -1079,7 +1127,7 @@ class TemplateStateBase(State): return self._state.object_id @property - def name(self) -> str: + def name(self) -> str: # type: ignore[override] """Wrap State.name.""" self._collect_state() return self._state.name @@ -1116,7 +1164,7 @@ class TemplateStateBase(State): class TemplateState(TemplateStateBase): """Class to represent a state object in a template.""" - __slots__ = ("_state",) + __slots__ = () # Inheritance is done so functions that check against State keep working def __init__(self, hass: HomeAssistant, state: State, collect: bool = True) -> None: @@ -1132,6 +1180,8 @@ class TemplateState(TemplateStateBase): class TemplateStateFromEntityId(TemplateStateBase): """Class to represent a state object in a template.""" + __slots__ = () + def __init__( self, hass: HomeAssistant, entity_id: str, collect: bool = True ) -> None: @@ -1241,7 +1291,7 @@ def result_as_boolean(template_result: Any | None) -> bool: True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy - + All other values are falsy """ if template_result is None: return False @@ -2112,6 +2162,63 @@ def as_timedelta(value: str) -> timedelta | None: return dt_util.parse_duration(value) +def merge_response(value: ServiceResponse) -> list[Any]: + """Merge action responses into single list. + + Checks that the input is a correct service response: + { + "entity_id": {str: dict[str, Any]}, + } + If response is a single list, it will extend the list with the items + and add the entity_id and value_key to each dictionary for reference. + If response is a dictionary or multiple lists, + it will append the dictionary/lists to the list + and add the entity_id to each dictionary for reference. + """ + if not isinstance(value, dict): + raise TypeError("Response is not a dictionary") + if not value: + # Bail out early if response is an empty dictionary + return [] + + is_single_list = False + response_items: list = [] + input_service_response = deepcopy(value) + for entity_id, entity_response in input_service_response.items(): # pylint: disable=too-many-nested-blocks + if not isinstance(entity_response, dict): + raise TypeError("Response is not a dictionary") + for value_key, type_response in entity_response.items(): + if len(entity_response) == 1 and isinstance(type_response, list): + # Provides special handling for responses such as calendar events + # and weather forecasts where the response contains a single list with multiple + # dictionaries inside. + is_single_list = True + for dict_in_list in type_response: + if isinstance(dict_in_list, dict): + if ATTR_ENTITY_ID in dict_in_list: + raise ValueError( + f"Response dictionary already contains key '{ATTR_ENTITY_ID}'" + ) + dict_in_list[ATTR_ENTITY_ID] = entity_id + dict_in_list["value_key"] = value_key + response_items.extend(type_response) + else: + # Break the loop if not a single list as the logic is then managed in the outer loop + # which handles both dictionaries and in the case of multiple lists. + break + + if not is_single_list: + _response = entity_response.copy() + if ATTR_ENTITY_ID in _response: + raise ValueError( + f"Response dictionary already contains key '{ATTR_ENTITY_ID}'" + ) + _response[ATTR_ENTITY_ID] = entity_id + response_items.append(_response) + + return response_items + + def strptime(string, fmt, default=_SENTINEL): """Parse a time string to datetime.""" try: @@ -2827,6 +2934,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["as_timedelta"] = as_timedelta self.globals["as_timestamp"] = forgiving_as_timestamp self.globals["timedelta"] = timedelta + self.globals["merge_response"] = merge_response self.globals["strptime"] = strptime self.globals["urlencode"] = urlencode self.globals["average"] = average diff --git a/homeassistant/helpers/trigger.py b/homeassistant/helpers/trigger.py index a0abbaa390c..67e9010df79 100644 --- a/homeassistant/helpers/trigger.py +++ b/homeassistant/helpers/trigger.py @@ -225,7 +225,7 @@ async def _async_get_trigger_platform( try: integration = await async_get_integration(hass, platform) except IntegrationNotFound: - raise vol.Invalid(f"Invalid platform '{platform}' specified") from None + raise vol.Invalid(f"Invalid trigger '{platform}' specified") from None try: return await integration.async_get_platform("trigger") except ImportError: diff --git a/homeassistant/helpers/trigger_template_entity.py b/homeassistant/helpers/trigger_template_entity.py index 7f8ad41d7bb..1486e33d6fa 100644 --- a/homeassistant/helpers/trigger_template_entity.py +++ b/homeassistant/helpers/trigger_template_entity.py @@ -30,7 +30,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import config_validation as cv from .entity import Entity -from .template import render_complex +from .template import TemplateStateFromEntityId, render_complex from .typing import ConfigType CONF_AVAILABILITY = "availability" @@ -231,16 +231,14 @@ class ManualTriggerEntity(TriggerBaseEntity): Ex: self._process_manual_data(payload) """ - self.async_write_ha_state() - this = None - if state := self.hass.states.get(self.entity_id): - this = state.as_dict() - run_variables: dict[str, Any] = {"value": value} # Silently try if variable is a json and store result in `value_json` if it is. with contextlib.suppress(*JSON_DECODE_EXCEPTIONS): run_variables["value_json"] = json_loads(run_variables["value"]) - variables = {"this": this, **(run_variables or {})} + variables = { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **(run_variables or {}), + } self._render_templates(variables) diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 4fe4953d752..6cc4584935e 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -6,7 +6,6 @@ from abc import abstractmethod import asyncio from collections.abc import Awaitable, Callable, Coroutine, Generator from datetime import datetime, timedelta -from functools import cached_property import logging from random import randint from time import monotonic @@ -14,6 +13,7 @@ from typing import Any, Generic, Protocol import urllib.error import aiohttp +from propcache import cached_property import requests from typing_extensions import TypeVar @@ -24,11 +24,14 @@ from homeassistant.exceptions import ( ConfigEntryAuthFailed, ConfigEntryError, ConfigEntryNotReady, + HomeAssistantError, ) from homeassistant.util.dt import utcnow from . import entity, event from .debounce import Debouncer +from .frame import report_usage +from .typing import UNDEFINED, UndefinedType REQUEST_REFRESH_DEFAULT_COOLDOWN = 10 REQUEST_REFRESH_DEFAULT_IMMEDIATE = True @@ -41,7 +44,7 @@ _DataUpdateCoordinatorT = TypeVar( ) -class UpdateFailed(Exception): +class UpdateFailed(HomeAssistantError): """Raised when an update has failed.""" @@ -68,6 +71,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): hass: HomeAssistant, logger: logging.Logger, *, + config_entry: config_entries.ConfigEntry | None | UndefinedType = UNDEFINED, name: str, update_interval: timedelta | None = None, update_method: Callable[[], Awaitable[_DataT]] | None = None, @@ -84,7 +88,12 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self._update_interval_seconds: float | None = None self.update_interval = update_interval self._shutdown_requested = False - self.config_entry = config_entries.current_entry.get() + if config_entry is UNDEFINED: + self.config_entry = config_entries.current_entry.get() + # This should be deprecated once all core integrations are updated + # to pass in the config entry explicitly. + else: + self.config_entry = config_entry self.always_update = always_update # It's None before the first successful update. @@ -277,6 +286,22 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): fails. Additionally logging is handled by config entry setup to ensure that multiple retries do not cause log spam. """ + if self.config_entry is None: + report_usage( + "uses `async_config_entry_first_refresh`, which is only supported " + "for coordinators with a config entry", + breaks_in_ha_version="2025.11", + ) + elif ( + self.config_entry.state + is not config_entries.ConfigEntryState.SETUP_IN_PROGRESS + ): + report_usage( + "uses `async_config_entry_first_refresh`, which is only supported " + f"when entry state is {config_entries.ConfigEntryState.SETUP_IN_PROGRESS}, " + f"but it is in state {self.config_entry.state}", + breaks_in_ha_version="2025.11", + ) if await self.__wrap_async_setup(): await self._async_refresh( log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 90b88ba2109..1fa9d0cd49d 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -11,7 +11,6 @@ from collections.abc import Callable, Iterable from contextlib import suppress from dataclasses import dataclass import functools as ft -from functools import cached_property import importlib import logging import os @@ -26,6 +25,7 @@ from awesomeversion import ( AwesomeVersionException, AwesomeVersionStrategy, ) +from propcache import cached_property import voluptuous as vol from . import generated @@ -206,7 +206,7 @@ class USBMatcherOptional(TypedDict, total=False): class USBMatcher(USBMatcherRequired, USBMatcherOptional): - """Matcher for the bluetooth integration.""" + """Matcher for the USB integration.""" @dataclass(slots=True) @@ -255,6 +255,7 @@ class Manifest(TypedDict, total=False): usb: list[dict[str, str]] homekit: dict[str, list[str]] is_built_in: bool + overwrites_built_in: bool version: str codeowners: list[str] loggers: list[str] @@ -282,9 +283,7 @@ def manifest_from_legacy_module(domain: str, module: ModuleType) -> Manifest: } -async def _async_get_custom_components( - hass: HomeAssistant, -) -> dict[str, Integration]: +def _get_custom_components(hass: HomeAssistant) -> dict[str, Integration]: """Return list of custom integrations.""" if hass.config.recovery_mode or hass.config.safe_mode: return {} @@ -294,21 +293,14 @@ async def _async_get_custom_components( except ImportError: return {} - def get_sub_directories(paths: list[str]) -> list[pathlib.Path]: - """Return all sub directories in a set of paths.""" - return [ - entry - for path in paths - for entry in pathlib.Path(path).iterdir() - if entry.is_dir() - ] + dirs = [ + entry + for path in custom_components.__path__ + for entry in pathlib.Path(path).iterdir() + if entry.is_dir() + ] - dirs = await hass.async_add_executor_job( - get_sub_directories, custom_components.__path__ - ) - - integrations = await hass.async_add_executor_job( - _resolve_integrations_from_root, + integrations = _resolve_integrations_from_root( hass, custom_components, [comp.name for comp in dirs], @@ -329,7 +321,7 @@ async def async_get_custom_components( if comps_or_future is None: future = hass.data[DATA_CUSTOM_COMPONENTS] = hass.loop.create_future() - comps = await _async_get_custom_components(hass) + comps = await hass.async_add_executor_job(_get_custom_components, hass) hass.data[DATA_CUSTOM_COMPONENTS] = comps future.set_result(comps) @@ -451,6 +443,7 @@ async def async_get_integration_descriptions( "single_config_entry": integration.manifest.get( "single_config_entry", False ), + "overwrites_built_in": integration.overwrites_built_in, } custom_flows[integration_key][integration.domain] = metadata @@ -762,6 +755,7 @@ class Integration: self.file_path = file_path self.manifest = manifest manifest["is_built_in"] = self.is_built_in + manifest["overwrites_built_in"] = self.overwrites_built_in if self.dependencies: self._all_dependencies_resolved: bool | None = None @@ -836,6 +830,9 @@ class Integration: @cached_property def quality_scale(self) -> str | None: """Return Integration Quality Scale.""" + # Custom integrations default to "custom" quality scale. + if not self.is_built_in or self.overwrites_built_in: + return "custom" return self.manifest.get("quality_scale") @cached_property @@ -909,6 +906,16 @@ class Integration: """Test if package is a built-in integration.""" return self.pkg_path.startswith(PACKAGE_BUILTIN) + @property + def overwrites_built_in(self) -> bool: + """Return if package overwrites a built-in integration.""" + if self.is_built_in: + return False + core_comp_path = ( + pathlib.Path(__file__).parent / "components" / self.domain / "manifest.json" + ) + return core_comp_path.is_file() + @property def version(self) -> AwesomeVersion | None: """Return the version of the integration.""" @@ -1552,16 +1559,16 @@ class Components: raise ImportError(f"Unable to load {comp_name}") # Local import to avoid circular dependencies - from .helpers.frame import report # pylint: disable=import-outside-toplevel + # pylint: disable-next=import-outside-toplevel + from .helpers.frame import ReportBehavior, report_usage - report( - ( - f"accesses hass.components.{comp_name}." - " This is deprecated and will stop working in Home Assistant 2024.9, it" - f" should be updated to import functions used from {comp_name} directly" - ), - error_if_core=False, - log_custom_component_only=True, + report_usage( + f"accesses hass.components.{comp_name}, which" + f" should be updated to import functions used from {comp_name} directly", + core_behavior=ReportBehavior.IGNORE, + core_integration_behavior=ReportBehavior.IGNORE, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.3", ) wrapped = ModuleWrapper(self._hass, component) @@ -1581,16 +1588,18 @@ class Helpers: helper = importlib.import_module(f"homeassistant.helpers.{helper_name}") # Local import to avoid circular dependencies - from .helpers.frame import report # pylint: disable=import-outside-toplevel + # pylint: disable-next=import-outside-toplevel + from .helpers.frame import ReportBehavior, report_usage - report( + report_usage( ( - f"accesses hass.helpers.{helper_name}." - " This is deprecated and will stop working in Home Assistant 2024.11, it" + f"accesses hass.helpers.{helper_name}, which" f" should be updated to import functions used from {helper_name} directly" ), - error_if_core=False, - log_custom_component_only=True, + core_behavior=ReportBehavior.IGNORE, + core_integration_behavior=ReportBehavior.IGNORE, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) wrapped = ModuleWrapper(self._hass, helper) @@ -1677,6 +1686,29 @@ def is_component_module_loaded(hass: HomeAssistant, module: str) -> bool: return module in hass.data[DATA_COMPONENTS] +@callback +def async_get_issue_integration( + hass: HomeAssistant | None, + integration_domain: str | None, +) -> Integration | None: + """Return details of an integration for issue reporting.""" + integration: Integration | None = None + if not hass or not integration_domain: + # We are unable to get the integration + return None + + if (comps_or_future := hass.data.get(DATA_CUSTOM_COMPONENTS)) and not isinstance( + comps_or_future, asyncio.Future + ): + integration = comps_or_future.get(integration_domain) + + if not integration: + with suppress(IntegrationNotLoaded): + integration = async_get_loaded_integration(hass, integration_domain) + + return integration + + @callback def async_get_issue_tracker( hass: HomeAssistant | None, @@ -1690,20 +1722,11 @@ def async_get_issue_tracker( "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue" ) if not integration and not integration_domain and not module: - # If we know nothing about the entity, suggest opening an issue on HA core + # If we know nothing about the integration, suggest opening an issue on HA core return issue_tracker - if ( - not integration - and (hass and integration_domain) - and (comps_or_future := hass.data.get(DATA_CUSTOM_COMPONENTS)) - and not isinstance(comps_or_future, asyncio.Future) - ): - integration = comps_or_future.get(integration_domain) - - if not integration and (hass and integration_domain): - with suppress(IntegrationNotLoaded): - integration = async_get_loaded_integration(hass, integration_domain) + if not integration: + integration = async_get_issue_integration(hass, integration_domain) if integration and not integration.is_built_in: return integration.issue_tracker diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 767804e5136..add20ef0870 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,67 +3,75 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.5 +aiohasupervisor==0.2.2b2 +aiohttp-fast-zlib==0.2.0 +aiohttp==3.11.10 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 -async-upnp-client==0.40.0 +async-upnp-client==0.41.0 atomicwrites-homeassistant==1.4.1 -attrs==23.2.0 +attrs==24.2.0 +audioop-lts==0.2.1;python_version>='3.13' +av==13.1.0 awesomeversion==24.6.0 -bcrypt==4.1.3 -bleak-retry-connector==3.5.0 -bleak==0.22.2 -bluetooth-adapters==0.19.4 +bcrypt==4.2.0 +bleak-retry-connector==3.6.0 +bleak==0.22.3 +bluetooth-adapters==0.20.2 bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.20.0 -cached-ipaddress==0.5.0 +cached-ipaddress==0.8.0 certifi>=2021.5.30 -ciso8601==2.3.1 -cryptography==43.0.0 -dbus-fast==2.23.0 +ciso8601==2.3.2 +cryptography==44.0.0 +dbus-fast==2.24.3 fnv-hash-fast==1.0.2 -ha-av==10.1.1 -ha-ffmpeg==3.2.0 -habluetooth==3.3.2 -hass-nabucasa==0.81.1 -hassil==1.7.4 -home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240809.0 -home-assistant-intents==2024.8.7 -httpx==0.27.0 +go2rtc-client==0.1.2 +ha-ffmpeg==3.2.2 +habluetooth==3.6.0 +hass-nabucasa==0.87.0 +hassil==2.0.5 +home-assistant-bluetooth==1.13.0 +home-assistant-frontend==20241127.8 +home-assistant-intents==2024.12.9 +httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.7 +orjson==3.10.12 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==10.4.0 -pip>=21.3.1 +Pillow==11.0.0 +propcache==0.2.1 psutil-home-assistant==0.0.1 -PyJWT==2.9.0 +PyJWT==2.10.1 pymicro-vad==1.0.1 PyNaCl==1.5.0 -pyOpenSSL==24.2.1 +pyOpenSSL==24.3.0 pyserial==3.5 -pyspeex-noise==1.0.0 +pyspeex-noise==1.0.2 python-slugify==8.0.4 -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 pyudev==0.24.1 PyYAML==6.0.2 requests==2.32.3 -SQLAlchemy==2.0.31 +securetar==2024.11.0 +SQLAlchemy==2.0.36 +standard-aifc==3.13.0;python_version>='3.13' +standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 +uv==0.5.8 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.4 -zeroconf==0.132.2 +webrtc-models==0.3.0 +yarl==1.18.3 +zeroconf==0.136.2 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -76,9 +84,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.59.0 -grpcio-status==1.59.0 -grpcio-reflection==1.59.0 +grpcio==1.67.1 +grpcio-status==1.67.1 +grpcio-reflection==1.67.1 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -98,7 +106,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.4.0 +anyio==4.6.2.post1 h11==0.14.0 httpcore==1.0.5 @@ -107,13 +115,8 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==1.26.0 - -# Prevent dependency conflicts between sisyphus-control and aioambient -# until upper bounds for sisyphus-control have been updated -# https://github.com/jkeljo/sisyphus-control/issues/6 -python-engineio>=3.13.1,<4.0 -python-socketio>=4.6.0,<5.0 +numpy==2.2.0 +pandas~=2.2.3 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -122,9 +125,8 @@ multidict>=6.0.2 # Version 2.0 added typing, prevent accidental fallbacks backoff>=2.0 -# Required to avoid breaking (#101042). -# v2 has breaking changes (#99218). -pydantic==1.10.17 +# ensure pydantic version does not float since it might have breaking changes +pydantic==2.10.3 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 @@ -143,16 +145,18 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.4 +protobuf==5.28.3 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder faust-cchardet>=2.1.18 -# websockets 11.0 is missing files in the source distribution -# which break wheel builds so we need at least 11.0.1 -# https://github.com/aaugustin/websockets/issues/1329 -websockets>=11.0.1 +# websockets 13.1 is the first version to fully support the new +# asyncio implementation. The legacy implementation is now +# deprecated as of websockets 14.0. +# https://websockets.readthedocs.io/en/13.0.1/howto/upgrade.html#missing-features +# https://websockets.readthedocs.io/en/stable/howto/upgrade.html +websockets>=13.1 # pysnmplib is no longer maintained and does not work with newer # python @@ -165,15 +169,12 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.2.0 +charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for -# Roborock, NAM, Brother, and GIOS. +# NAM, Brother, and GIOS. dacite>=1.7.0 -# Musle wheels for pandas 2.2.0 cannot be build for any architecture. -pandas==2.1.4 - # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -181,8 +182,8 @@ chacha20poly1305-reuseable>=0.13.0 # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy<2.5.0 will not work with python3.12 -scapy>=2.5.0 +# scapy==2.6.0 causes CI failures due to a race condition +scapy>=2.6.1 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -191,3 +192,15 @@ tuf>=4.0.0 # https://github.com/jd/tenacity/issues/471 tenacity!=8.4.0 + +# 5.0.0 breaks Timeout as a context manager +# TypeError: 'Timeout' object does not support the context manager protocol +async-timeout==4.0.3 + +# aiofiles keeps getting downgraded by custom components +# causing newer methods to not be available and breaking +# some integrations at startup +# https://github.com/home-assistant/core/issues/127529 +# https://github.com/home-assistant/core/issues/122508 +# https://github.com/home-assistant/core/issues/118004 +aiofiles>=24.1.0 diff --git a/homeassistant/runner.py b/homeassistant/runner.py index 4bac12ec399..59775655854 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -3,10 +3,8 @@ from __future__ import annotations import asyncio -from asyncio import events import dataclasses import logging -import os import subprocess import threading from time import monotonic @@ -58,22 +56,6 @@ class RuntimeConfig: safe_mode: bool = False -def can_use_pidfd() -> bool: - """Check if pidfd_open is available. - - Back ported from cpython 3.12 - """ - if not hasattr(os, "pidfd_open"): - return False - try: - pid = os.getpid() - os.close(os.pidfd_open(pid, 0)) - except OSError: - # blocked by security policy like SECCOMP - return False - return True - - class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Event loop policy for Home Assistant.""" @@ -81,23 +63,6 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Init the event loop policy.""" super().__init__() self.debug = debug - self._watcher: asyncio.AbstractChildWatcher | None = None - - def _init_watcher(self) -> None: - """Initialize the watcher for child processes. - - Back ported from cpython 3.12 - """ - with events._lock: # type: ignore[attr-defined] # noqa: SLF001 - if self._watcher is None: # pragma: no branch - if can_use_pidfd(): - self._watcher = asyncio.PidfdChildWatcher() - else: - self._watcher = asyncio.ThreadedChildWatcher() - if threading.current_thread() is threading.main_thread(): - self._watcher.attach_loop( - self._local._loop # type: ignore[attr-defined] # noqa: SLF001 - ) @property def loop_name(self) -> str: @@ -175,7 +140,7 @@ def _enable_posix_spawn() -> None: # less efficient. This is a workaround to force posix_spawn() # when using musl since cpython is not aware its supported. tag = next(packaging.tags.sys_tags()) - subprocess._USE_POSIX_SPAWN = "musllinux" in tag.platform # noqa: SLF001 + subprocess._USE_POSIX_SPAWN = "musllinux" in tag.platform # type: ignore[misc] # noqa: SLF001 def run(runtime_config: RuntimeConfig) -> int: diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 102c48e1d07..331389da7c6 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -281,7 +281,7 @@ async def _async_setup_component( integration = await loader.async_get_integration(hass, domain) except loader.IntegrationNotFound: _log_error_setup_error(hass, domain, None, "Integration not found.") - if not hass.config.safe_mode: + if not hass.config.safe_mode and hass.config_entries.async_entries(domain): ir.async_create_issue( hass, HOMEASSISTANT_DOMAIN, diff --git a/homeassistant/util/aiohttp.py b/homeassistant/util/aiohttp.py index 2a4616ee634..5571861f417 100644 --- a/homeassistant/util/aiohttp.py +++ b/homeassistant/util/aiohttp.py @@ -28,6 +28,19 @@ class MockStreamReader: return self._content.read(byte_count) +class MockPayloadWriter: + """Small mock to imitate payload writer.""" + + def enable_chunking(self) -> None: + """Enable chunking.""" + + async def write_headers(self, *args: Any, **kwargs: Any) -> None: + """Write headers.""" + + +_MOCK_PAYLOAD_WRITER = MockPayloadWriter() + + class MockRequest: """Mock an aiohttp request.""" @@ -49,8 +62,14 @@ class MockRequest: self.status = status self.headers: CIMultiDict[str] = CIMultiDict(headers or {}) self.query_string = query_string or "" + self.keep_alive = False + self.version = (1, 1) self._content = content self.mock_source = mock_source + self._payload_writer = _MOCK_PAYLOAD_WRITER + + async def _prepare_hook(self, response: Any) -> None: + """Prepare hook.""" @property def query(self) -> MultiDict[str]: @@ -90,7 +109,7 @@ def serialize_response(response: web.Response) -> dict[str, Any]: if (body := response.body) is None: body_decoded = None elif isinstance(body, payload.StringPayload): - body_decoded = body._value.decode(body.encoding) # noqa: SLF001 + body_decoded = body._value.decode(body.encoding or "utf-8") # noqa: SLF001 elif isinstance(body, bytes): body_decoded = body.decode(response.charset or "utf-8") else: diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index d010d8cb341..f8901d11114 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -39,7 +39,7 @@ def create_eager_task[_T]( # pylint: disable-next=import-outside-toplevel from homeassistant.helpers import frame - frame.report("attempted to create an asyncio task from a thread") + frame.report_usage("attempted to create an asyncio task from a thread") raise return Task(coro, loop=loop, name=name, eager_start=True) diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index 0745bc96dfb..18f8182650b 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -377,7 +377,7 @@ def color_hsv_to_RGB(iH: float, iS: float, iV: float) -> tuple[int, int, int]: Val is scaled 0-100 """ fRGB = colorsys.hsv_to_rgb(iH / 360, iS / 100, iV / 100) - return (int(fRGB[0] * 255), int(fRGB[1] * 255), int(fRGB[2] * 255)) + return (round(fRGB[0] * 255), round(fRGB[1] * 255), round(fRGB[2] * 255)) def color_hs_to_RGB(iH: float, iS: float) -> tuple[int, int, int]: diff --git a/homeassistant/util/dt.py b/homeassistant/util/dt.py index 30cf7222f3a..eb898e4b544 100644 --- a/homeassistant/util/dt.py +++ b/homeassistant/util/dt.py @@ -13,6 +13,8 @@ import zoneinfo from aiozoneinfo import async_get_time_zone as _async_get_time_zone import ciso8601 +from homeassistant.helpers.deprecation import deprecated_function + DATE_STR_FORMAT = "%Y-%m-%d" UTC = dt.UTC DEFAULT_TIME_ZONE: dt.tzinfo = dt.UTC @@ -95,7 +97,7 @@ def set_default_time_zone(time_zone: dt.tzinfo) -> None: get_default_time_zone.cache_clear() -def get_time_zone(time_zone_str: str) -> dt.tzinfo | None: +def get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: """Get time zone from string. Return None if unable to determine. Must be run in the executor if the ZoneInfo is not already @@ -107,7 +109,7 @@ def get_time_zone(time_zone_str: str) -> dt.tzinfo | None: return None -async def async_get_time_zone(time_zone_str: str) -> dt.tzinfo | None: +async def async_get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: """Get time zone from string. Return None if unable to determine. Async friendly. @@ -170,6 +172,7 @@ utc_from_timestamp = partial(dt.datetime.fromtimestamp, tz=UTC) """Return a UTC time from a timestamp.""" +@deprecated_function("datetime.timestamp", breaks_in_ha_version="2026.1") def utc_to_timestamp(utc_dt: dt.datetime) -> float: """Fast conversion of a datetime in UTC to a timestamp.""" # Taken from diff --git a/homeassistant/util/frozen_dataclass_compat.py b/homeassistant/util/frozen_dataclass_compat.py index 6184e4564eb..81ce9961a0b 100644 --- a/homeassistant/util/frozen_dataclass_compat.py +++ b/homeassistant/util/frozen_dataclass_compat.py @@ -8,7 +8,10 @@ from __future__ import annotations import dataclasses import sys -from typing import Any, dataclass_transform +from typing import TYPE_CHECKING, Any, cast, dataclass_transform + +if TYPE_CHECKING: + from _typeshed import DataclassInstance def _class_fields(cls: type, kw_only: bool) -> list[tuple[str, Any, Any]]: @@ -111,6 +114,8 @@ class FrozenOrThawed(type): """ cls, *_args = args if dataclasses.is_dataclass(cls): + if TYPE_CHECKING: + cls = cast(type[DataclassInstance], cls) return object.__new__(cls) return cls._dataclass(*_args, **kwargs) diff --git a/homeassistant/util/json.py b/homeassistant/util/json.py index fa67f6b1dcc..968567ae0c9 100644 --- a/homeassistant/util/json.py +++ b/homeassistant/util/json.py @@ -30,32 +30,30 @@ class SerializationError(HomeAssistantError): """Error serializing the data to JSON.""" -def json_loads(__obj: bytes | bytearray | memoryview | str) -> JsonValueType: +def json_loads(obj: bytes | bytearray | memoryview | str, /) -> JsonValueType: """Parse JSON data. This adds a workaround for orjson not handling subclasses of str, https://github.com/ijl/orjson/issues/445. """ # Avoid isinstance overhead for the common case - if type(__obj) not in (bytes, bytearray, memoryview, str) and isinstance( - __obj, str - ): - return orjson.loads(str(__obj)) # type:ignore[no-any-return] - return orjson.loads(__obj) # type:ignore[no-any-return] + if type(obj) not in (bytes, bytearray, memoryview, str) and isinstance(obj, str): + return orjson.loads(str(obj)) # type:ignore[no-any-return] + return orjson.loads(obj) # type:ignore[no-any-return] -def json_loads_array(__obj: bytes | bytearray | memoryview | str) -> JsonArrayType: +def json_loads_array(obj: bytes | bytearray | memoryview | str, /) -> JsonArrayType: """Parse JSON data and ensure result is a list.""" - value: JsonValueType = json_loads(__obj) + value: JsonValueType = json_loads(obj) # Avoid isinstance overhead as we are not interested in list subclasses if type(value) is list: # noqa: E721 return value raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}") -def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObjectType: +def json_loads_object(obj: bytes | bytearray | memoryview | str, /) -> JsonObjectType: """Parse JSON data and ensure result is a dictionary.""" - value: JsonValueType = json_loads(__obj) + value: JsonValueType = json_loads(obj) # Avoid isinstance overhead as we are not interested in dict subclasses if type(value) is dict: # noqa: E721 return value diff --git a/homeassistant/util/logging.py b/homeassistant/util/logging.py index d2554ef543c..2c4eb744614 100644 --- a/homeassistant/util/logging.py +++ b/homeassistant/util/logging.py @@ -196,8 +196,8 @@ def async_create_catching_coro[_T]( trace = traceback.extract_stack() return catch_log_coro_exception( target, - lambda: "Exception in {} called from\n {}".format( - target.__name__, - "".join(traceback.format_list(trace[:-1])), + lambda: ( + f"Exception in {target.__name__} called from\n" + + "".join(traceback.format_list(trace[:-1])) ), ) diff --git a/homeassistant/util/package.py b/homeassistant/util/package.py index 067bf5ff36d..da0666290a1 100644 --- a/homeassistant/util/package.py +++ b/homeassistant/util/package.py @@ -8,6 +8,7 @@ from importlib.metadata import PackageNotFoundError, version import logging import os from pathlib import Path +import site from subprocess import PIPE, Popen import sys from urllib.parse import urlparse @@ -83,6 +84,12 @@ def is_installed(requirement_str: str) -> bool: return False +_UV_ENV_PYTHON_VARS = ( + "UV_SYSTEM_PYTHON", + "UV_PYTHON", +) + + def install_package( package: str, upgrade: bool = True, @@ -94,22 +101,44 @@ def install_package( Return boolean if install successful. """ - # Not using 'import pip; pip.main([])' because it breaks the logger _LOGGER.info("Attempting install of %s", package) env = os.environ.copy() - args = [sys.executable, "-m", "pip", "install", "--quiet", package] + args = [ + sys.executable, + "-m", + "uv", + "pip", + "install", + "--quiet", + package, + # We need to use unsafe-first-match for custom components + # which can use a different version of a package than the one + # we have built the wheel for. + "--index-strategy", + "unsafe-first-match", + ] if timeout: - args += ["--timeout", str(timeout)] + env["HTTP_TIMEOUT"] = str(timeout) if upgrade: args.append("--upgrade") if constraints is not None: args += ["--constraint", constraints] if target: - assert not is_virtual_env() - # This only works if not running in venv - args += ["--user"] - env["PYTHONUSERBASE"] = os.path.abspath(target) - _LOGGER.debug("Running pip command: args=%s", args) + abs_target = os.path.abspath(target) + args += ["--target", abs_target] + elif ( + not is_virtual_env() + and not (any(var in env for var in _UV_ENV_PYTHON_VARS)) + and (abs_target := site.getusersitepackages()) + ): + # Pip compatibility + # Uv has currently no support for --user + # See https://github.com/astral-sh/uv/issues/2077 + # Using workaround to install to site-packages + # https://github.com/astral-sh/uv/issues/2077#issuecomment-2150406001 + args += ["--python", sys.executable, "--target", abs_target] + + _LOGGER.debug("Running uv pip command: args=%s", args) with Popen( args, stdin=PIPE, diff --git a/homeassistant/util/pil.py b/homeassistant/util/pil.py index 733f640ce48..6925cd03a4c 100644 --- a/homeassistant/util/pil.py +++ b/homeassistant/util/pil.py @@ -28,7 +28,7 @@ def draw_box( """ line_width = 3 - font_height = 8 + font_height = 20 y_min, x_min, y_max, x_max = box (left, right, top, bottom) = ( x_min * img_width, @@ -43,5 +43,8 @@ def draw_box( ) if text: draw.text( - (left + line_width, abs(top - line_width - font_height)), text, fill=color + (left + line_width, abs(top - line_width - font_height)), + text, + fill=color, + font_size=font_height, ) diff --git a/homeassistant/util/ssl.py b/homeassistant/util/ssl.py index 7c1e653ce75..a22fd0c8fb4 100644 --- a/homeassistant/util/ssl.py +++ b/homeassistant/util/ssl.py @@ -15,6 +15,7 @@ class SSLCipherList(StrEnum): PYTHON_DEFAULT = "python_default" INTERMEDIATE = "intermediate" MODERN = "modern" + INSECURE = "insecure" SSL_CIPHER_LISTS = { @@ -58,11 +59,12 @@ SSL_CIPHER_LISTS = { "ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:" "ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256" ), + SSLCipherList.INSECURE: "DEFAULT:@SECLEVEL=0", } @cache -def _create_no_verify_ssl_context(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: +def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: # This is a copy of aiohttp's create_default_context() function, with the # ssl verify turned off. # https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911 @@ -80,16 +82,10 @@ def _create_no_verify_ssl_context(ssl_cipher_list: SSLCipherList) -> ssl.SSLCont return sslcontext -def create_no_verify_ssl_context( +@cache +def _client_context( ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, ) -> ssl.SSLContext: - """Return an SSL context that does not verify the server certificate.""" - - return _create_no_verify_ssl_context(ssl_cipher_list=ssl_cipher_list) - - -@cache -def _client_context(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: # Reuse environment variable definition from requests, since it's already a # requirement. If the environment variable has no value, fall back to using # certs from certifi package. @@ -104,17 +100,19 @@ def _client_context(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: return sslcontext -def client_context( - ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, -) -> ssl.SSLContext: - """Return an SSL context for making requests.""" - - return _client_context(ssl_cipher_list=ssl_cipher_list) - - # Create this only once and reuse it -_DEFAULT_SSL_CONTEXT = client_context() -_DEFAULT_NO_VERIFY_SSL_CONTEXT = create_no_verify_ssl_context() +_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT) +_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT) +_NO_VERIFY_SSL_CONTEXTS = { + SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE), + SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN), + SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE), +} +_SSL_CONTEXTS = { + SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE), + SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN), + SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE), +} def get_default_context() -> ssl.SSLContext: @@ -127,6 +125,27 @@ def get_default_no_verify_context() -> ssl.SSLContext: return _DEFAULT_NO_VERIFY_SSL_CONTEXT +def client_context_no_verify( + ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, +) -> ssl.SSLContext: + """Return a SSL context with no verification with a specific ssl cipher.""" + return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT) + + +def client_context( + ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, +) -> ssl.SSLContext: + """Return an SSL context for making requests.""" + return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT) + + +def create_no_verify_ssl_context( + ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, +) -> ssl.SSLContext: + """Return an SSL context that does not verify the server certificate.""" + return _client_context_no_verify(ssl_cipher_list) + + def server_context_modern() -> ssl.SSLContext: """Return an SSL context following the Mozilla recommendations. diff --git a/homeassistant/util/timeout.py b/homeassistant/util/timeout.py index bf7bd90657e..7ae74624a0b 100644 --- a/homeassistant/util/timeout.py +++ b/homeassistant/util/timeout.py @@ -16,7 +16,7 @@ from .async_ import run_callback_threadsafe ZONE_GLOBAL = "global" -class _State(str, enum.Enum): +class _State(enum.Enum): """States of a task.""" INIT = "INIT" @@ -161,11 +161,16 @@ class _GlobalTaskContext: self._wait_zone: asyncio.Event = asyncio.Event() self._state: _State = _State.INIT self._cool_down: float = cool_down + self._cancelling = 0 async def __aenter__(self) -> Self: self._manager.global_tasks.append(self) self._start_timer() self._state = _State.ACTIVE + # Remember if the task was already cancelling + # so when we __aexit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = self._task.cancelling() return self async def __aexit__( @@ -178,7 +183,15 @@ class _GlobalTaskContext: self._manager.global_tasks.remove(self) # Timeout on exit - if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT: + if exc_type is asyncio.CancelledError and self.state is _State.TIMEOUT: + # The timeout was hit, and the task was cancelled + # so we need to uncancel the task since the cancellation + # should not leak out of the context manager + if self._task.uncancel() > self._cancelling: + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + return None raise TimeoutError self._state = _State.EXIT @@ -267,6 +280,7 @@ class _ZoneTaskContext: self._time_left: float = timeout self._expiration_time: float | None = None self._timeout_handler: asyncio.Handle | None = None + self._cancelling = 0 @property def state(self) -> _State: @@ -281,6 +295,11 @@ class _ZoneTaskContext: if self._zone.freezes_done: self._start_timer() + # Remember if the task was already cancelling + # so when we __aexit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = self._task.cancelling() + return self async def __aexit__( @@ -293,7 +312,15 @@ class _ZoneTaskContext: self._stop_timer() # Timeout on exit - if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT: + if exc_type is asyncio.CancelledError and self.state is _State.TIMEOUT: + # The timeout was hit, and the task was cancelled + # so we need to uncancel the task since the cancellation + # should not leak out of the context manager + if self._task.uncancel() > self._cancelling: + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + return None raise TimeoutError self._state = _State.EXIT diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 2b9f73afab7..8bf6d4b9fc9 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -10,6 +10,8 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, UNIT_NOT_RECOGNIZED_TEMPLATE, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -41,12 +43,29 @@ _MILE_TO_M = _YARD_TO_M * 1760 # 1760 yard = 1 mile (1609.344 m) _NAUTICAL_MILE_TO_M = 1852 # 1 nautical mile = 1852 m +# Area constants to square meters +_CM2_TO_M2 = _CM_TO_M**2 # 1 cm² = 0.0001 m² +_MM2_TO_M2 = _MM_TO_M**2 # 1 mm² = 0.000001 m² +_KM2_TO_M2 = _KM_TO_M**2 # 1 km² = 1,000,000 m² + +_IN2_TO_M2 = _IN_TO_M**2 # 1 in² = 0.00064516 m² +_FT2_TO_M2 = _FOOT_TO_M**2 # 1 ft² = 0.092903 m² +_YD2_TO_M2 = _YARD_TO_M**2 # 1 yd² = 0.836127 m² +_MI2_TO_M2 = _MILE_TO_M**2 # 1 mi² = 2,590,000 m² + +_ACRE_TO_M2 = 66 * 660 * _FT2_TO_M2 # 1 acre = 4,046.86 m² +_HECTARE_TO_M2 = 100 * 100 # 1 hectare = 10,000 m² + # Duration conversion constants _MIN_TO_SEC = 60 # 1 min = 60 seconds _HRS_TO_MINUTES = 60 # 1 hr = 60 minutes _HRS_TO_SECS = _HRS_TO_MINUTES * _MIN_TO_SEC # 1 hr = 60 minutes = 3600 seconds _DAYS_TO_SECS = 24 * _HRS_TO_SECS # 1 day = 24 hours = 86400 seconds +# Energy conversion constants +_WH_TO_J = 3600 # 1 Wh = 3600 J +_WH_TO_CAL = _WH_TO_J / 4.184 # 1 Wh = 860.42065 cal + # Mass conversion constants _POUND_TO_G = 453.59237 _OUNCE_TO_G = _POUND_TO_G / 16 # 16 ounces to a pound @@ -68,7 +87,6 @@ class BaseUnitConverter: """Define the format of a conversion utility.""" UNIT_CLASS: str - NORMALIZED_UNIT: str | None VALID_UNITS: set[str | None] _UNIT_CONVERSION: dict[str | None, float] @@ -125,7 +143,6 @@ class DataRateConverter(BaseUnitConverter): """Utility to convert data rate values.""" UNIT_CLASS = "data_rate" - NORMALIZED_UNIT = UnitOfDataRate.BITS_PER_SECOND # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfDataRate.BITS_PER_SECOND: 1, @@ -143,11 +160,29 @@ class DataRateConverter(BaseUnitConverter): VALID_UNITS = set(UnitOfDataRate) +class AreaConverter(BaseUnitConverter): + """Utility to convert area values.""" + + UNIT_CLASS = "area" + _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfArea.SQUARE_METERS: 1, + UnitOfArea.SQUARE_CENTIMETERS: 1 / _CM2_TO_M2, + UnitOfArea.SQUARE_MILLIMETERS: 1 / _MM2_TO_M2, + UnitOfArea.SQUARE_KILOMETERS: 1 / _KM2_TO_M2, + UnitOfArea.SQUARE_INCHES: 1 / _IN2_TO_M2, + UnitOfArea.SQUARE_FEET: 1 / _FT2_TO_M2, + UnitOfArea.SQUARE_YARDS: 1 / _YD2_TO_M2, + UnitOfArea.SQUARE_MILES: 1 / _MI2_TO_M2, + UnitOfArea.ACRES: 1 / _ACRE_TO_M2, + UnitOfArea.HECTARES: 1 / _HECTARE_TO_M2, + } + VALID_UNITS = set(UnitOfArea) + + class DistanceConverter(BaseUnitConverter): """Utility to convert distance values.""" UNIT_CLASS = "distance" - NORMALIZED_UNIT = UnitOfLength.METERS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfLength.METERS: 1, UnitOfLength.MILLIMETERS: 1 / _MM_TO_M, @@ -157,10 +192,12 @@ class DistanceConverter(BaseUnitConverter): UnitOfLength.FEET: 1 / _FOOT_TO_M, UnitOfLength.YARDS: 1 / _YARD_TO_M, UnitOfLength.MILES: 1 / _MILE_TO_M, + UnitOfLength.NAUTICAL_MILES: 1 / _NAUTICAL_MILE_TO_M, } VALID_UNITS = { UnitOfLength.KILOMETERS, UnitOfLength.MILES, + UnitOfLength.NAUTICAL_MILES, UnitOfLength.FEET, UnitOfLength.METERS, UnitOfLength.CENTIMETERS, @@ -170,15 +207,25 @@ class DistanceConverter(BaseUnitConverter): } +class BloodGlucoseConcentrationConverter(BaseUnitConverter): + """Utility to convert blood glucose concentration values.""" + + UNIT_CLASS = "blood_glucose_concentration" + _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1, + } + VALID_UNITS = set(UnitOfBloodGlucoseConcentration) + + class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "conductivity" - NORMALIZED_UNIT = UnitOfConductivity.MICROSIEMENS _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfConductivity.MICROSIEMENS: 1, - UnitOfConductivity.MILLISIEMENS: 1e-3, - UnitOfConductivity.SIEMENS: 1e-6, + UnitOfConductivity.MICROSIEMENS_PER_CM: 1, + UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3, + UnitOfConductivity.SIEMENS_PER_CM: 1e-6, } VALID_UNITS = set(UnitOfConductivity) @@ -187,7 +234,6 @@ class ElectricCurrentConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "electric_current" - NORMALIZED_UNIT = UnitOfElectricCurrent.AMPERE _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricCurrent.AMPERE: 1, UnitOfElectricCurrent.MILLIAMPERE: 1e3, @@ -199,14 +245,15 @@ class ElectricPotentialConverter(BaseUnitConverter): """Utility to convert electric potential values.""" UNIT_CLASS = "voltage" - NORMALIZED_UNIT = UnitOfElectricPotential.VOLT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricPotential.VOLT: 1, UnitOfElectricPotential.MILLIVOLT: 1e3, + UnitOfElectricPotential.MICROVOLT: 1e6, } VALID_UNITS = { UnitOfElectricPotential.VOLT, UnitOfElectricPotential.MILLIVOLT, + UnitOfElectricPotential.MICROVOLT, } @@ -214,28 +261,29 @@ class EnergyConverter(BaseUnitConverter): """Utility to convert energy values.""" UNIT_CLASS = "energy" - NORMALIZED_UNIT = UnitOfEnergy.KILO_WATT_HOUR _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfEnergy.WATT_HOUR: 1 * 1000, + UnitOfEnergy.JOULE: _WH_TO_J * 1e3, + UnitOfEnergy.KILO_JOULE: _WH_TO_J, + UnitOfEnergy.MEGA_JOULE: _WH_TO_J / 1e3, + UnitOfEnergy.GIGA_JOULE: _WH_TO_J / 1e6, + UnitOfEnergy.MILLIWATT_HOUR: 1e6, + UnitOfEnergy.WATT_HOUR: 1e3, UnitOfEnergy.KILO_WATT_HOUR: 1, - UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1000, - UnitOfEnergy.MEGA_JOULE: 3.6, - UnitOfEnergy.GIGA_JOULE: 3.6 / 1000, - } - VALID_UNITS = { - UnitOfEnergy.WATT_HOUR, - UnitOfEnergy.KILO_WATT_HOUR, - UnitOfEnergy.MEGA_WATT_HOUR, - UnitOfEnergy.MEGA_JOULE, - UnitOfEnergy.GIGA_JOULE, + UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, + UnitOfEnergy.GIGA_WATT_HOUR: 1 / 1e6, + UnitOfEnergy.TERA_WATT_HOUR: 1 / 1e9, + UnitOfEnergy.CALORIE: _WH_TO_CAL * 1e3, + UnitOfEnergy.KILO_CALORIE: _WH_TO_CAL, + UnitOfEnergy.MEGA_CALORIE: _WH_TO_CAL / 1e3, + UnitOfEnergy.GIGA_CALORIE: _WH_TO_CAL / 1e6, } + VALID_UNITS = set(UnitOfEnergy) class InformationConverter(BaseUnitConverter): """Utility to convert information values.""" UNIT_CLASS = "information" - NORMALIZED_UNIT = UnitOfInformation.BITS # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfInformation.BITS: 1, @@ -267,7 +315,6 @@ class MassConverter(BaseUnitConverter): """Utility to convert mass values.""" UNIT_CLASS = "mass" - NORMALIZED_UNIT = UnitOfMass.GRAMS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfMass.MICROGRAMS: 1 * 1000 * 1000, UnitOfMass.MILLIGRAMS: 1 * 1000, @@ -292,14 +339,19 @@ class PowerConverter(BaseUnitConverter): """Utility to convert power values.""" UNIT_CLASS = "power" - NORMALIZED_UNIT = UnitOfPower.WATT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, + UnitOfPower.MEGA_WATT: 1 / 1e6, + UnitOfPower.GIGA_WATT: 1 / 1e9, + UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { UnitOfPower.WATT, UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, } @@ -307,7 +359,6 @@ class PressureConverter(BaseUnitConverter): """Utility to convert pressure values.""" UNIT_CLASS = "pressure" - NORMALIZED_UNIT = UnitOfPressure.PA _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPressure.PA: 1, UnitOfPressure.HPA: 1 / 100, @@ -338,16 +389,17 @@ class SpeedConverter(BaseUnitConverter): """Utility to convert speed values.""" UNIT_CLASS = "speed" - NORMALIZED_UNIT = UnitOfSpeed.METERS_PER_SECOND _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumetricFlux.INCHES_PER_DAY: _DAYS_TO_SECS / _IN_TO_M, UnitOfVolumetricFlux.INCHES_PER_HOUR: _HRS_TO_SECS / _IN_TO_M, UnitOfVolumetricFlux.MILLIMETERS_PER_DAY: _DAYS_TO_SECS / _MM_TO_M, UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR: _HRS_TO_SECS / _MM_TO_M, UnitOfSpeed.FEET_PER_SECOND: 1 / _FOOT_TO_M, + UnitOfSpeed.INCHES_PER_SECOND: 1 / _IN_TO_M, UnitOfSpeed.KILOMETERS_PER_HOUR: _HRS_TO_SECS / _KM_TO_M, UnitOfSpeed.KNOTS: _HRS_TO_SECS / _NAUTICAL_MILE_TO_M, UnitOfSpeed.METERS_PER_SECOND: 1, + UnitOfSpeed.MILLIMETERS_PER_SECOND: 1 / _MM_TO_M, UnitOfSpeed.MILES_PER_HOUR: _HRS_TO_SECS / _MILE_TO_M, UnitOfSpeed.BEAUFORT: 1, } @@ -356,11 +408,13 @@ class SpeedConverter(BaseUnitConverter): UnitOfVolumetricFlux.INCHES_PER_HOUR, UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, + UnitOfSpeed.INCHES_PER_SECOND, UnitOfSpeed.FEET_PER_SECOND, UnitOfSpeed.KILOMETERS_PER_HOUR, UnitOfSpeed.KNOTS, UnitOfSpeed.METERS_PER_SECOND, UnitOfSpeed.MILES_PER_HOUR, + UnitOfSpeed.MILLIMETERS_PER_SECOND, UnitOfSpeed.BEAUFORT, } @@ -433,7 +487,6 @@ class TemperatureConverter(BaseUnitConverter): """Utility to convert temperature values.""" UNIT_CLASS = "temperature" - NORMALIZED_UNIT = UnitOfTemperature.CELSIUS VALID_UNITS = { UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT, @@ -564,7 +617,6 @@ class UnitlessRatioConverter(BaseUnitConverter): """Utility to convert unitless ratios.""" UNIT_CLASS = "unitless" - NORMALIZED_UNIT = None _UNIT_CONVERSION: dict[str | None, float] = { None: 1, CONCENTRATION_PARTS_PER_BILLION: 1000000000, @@ -581,7 +633,6 @@ class VolumeConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume" - NORMALIZED_UNIT = UnitOfVolume.CUBIC_METERS # Units in terms of m³ _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolume.LITERS: 1 / _L_TO_CUBIC_METER, @@ -607,7 +658,6 @@ class VolumeFlowRateConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume_flow_rate" - NORMALIZED_UNIT = UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR # Units in terms of m³/h _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR: 1, @@ -617,12 +667,15 @@ class VolumeFlowRateConverter(BaseUnitConverter): / (_HRS_TO_MINUTES * _L_TO_CUBIC_METER), UnitOfVolumeFlowRate.GALLONS_PER_MINUTE: 1 / (_HRS_TO_MINUTES * _GALLON_TO_CUBIC_METER), + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND: 1 + / (_HRS_TO_SECS * _ML_TO_CUBIC_METER), } VALID_UNITS = { UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, UnitOfVolumeFlowRate.LITERS_PER_MINUTE, UnitOfVolumeFlowRate.GALLONS_PER_MINUTE, + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, } @@ -630,7 +683,6 @@ class DurationConverter(BaseUnitConverter): """Utility to convert duration values.""" UNIT_CLASS = "duration" - NORMALIZED_UNIT = UnitOfTime.SECONDS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfTime.MICROSECONDS: 1000000, UnitOfTime.MILLISECONDS: 1000, diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index bd31b4286ab..15993cbae47 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -9,6 +9,7 @@ import voluptuous as vol from homeassistant.const import ( ACCUMULATED_PRECIPITATION, + AREA, LENGTH, MASS, PRESSURE, @@ -16,6 +17,7 @@ from homeassistant.const import ( UNIT_NOT_RECOGNIZED_TEMPLATE, VOLUME, WIND_SPEED, + UnitOfArea, UnitOfLength, UnitOfMass, UnitOfPrecipitationDepth, @@ -27,6 +29,7 @@ from homeassistant.const import ( ) from .unit_conversion import ( + AreaConverter, DistanceConverter, PressureConverter, SpeedConverter, @@ -41,6 +44,8 @@ _CONF_UNIT_SYSTEM_IMPERIAL: Final = "imperial" _CONF_UNIT_SYSTEM_METRIC: Final = "metric" _CONF_UNIT_SYSTEM_US_CUSTOMARY: Final = "us_customary" +AREA_UNITS = AreaConverter.VALID_UNITS + LENGTH_UNITS = DistanceConverter.VALID_UNITS MASS_UNITS: set[str] = { @@ -58,23 +63,22 @@ WIND_SPEED_UNITS = SpeedConverter.VALID_UNITS TEMPERATURE_UNITS: set[str] = {UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS} +_VALID_BY_TYPE: dict[str, set[str] | set[str | None]] = { + LENGTH: LENGTH_UNITS, + ACCUMULATED_PRECIPITATION: LENGTH_UNITS, + WIND_SPEED: WIND_SPEED_UNITS, + TEMPERATURE: TEMPERATURE_UNITS, + MASS: MASS_UNITS, + VOLUME: VOLUME_UNITS, + PRESSURE: PRESSURE_UNITS, + AREA: AREA_UNITS, +} + def _is_valid_unit(unit: str, unit_type: str) -> bool: """Check if the unit is valid for it's type.""" - if unit_type == LENGTH: - return unit in LENGTH_UNITS - if unit_type == ACCUMULATED_PRECIPITATION: - return unit in LENGTH_UNITS - if unit_type == WIND_SPEED: - return unit in WIND_SPEED_UNITS - if unit_type == TEMPERATURE: - return unit in TEMPERATURE_UNITS - if unit_type == MASS: - return unit in MASS_UNITS - if unit_type == VOLUME: - return unit in VOLUME_UNITS - if unit_type == PRESSURE: - return unit in PRESSURE_UNITS + if units := _VALID_BY_TYPE.get(unit_type): + return unit in units return False @@ -86,6 +90,7 @@ class UnitSystem: name: str, *, accumulated_precipitation: UnitOfPrecipitationDepth, + area: UnitOfArea, conversions: dict[tuple[SensorDeviceClass | str | None, str | None], str], length: UnitOfLength, mass: UnitOfMass, @@ -99,6 +104,7 @@ class UnitSystem: UNIT_NOT_RECOGNIZED_TEMPLATE.format(unit, unit_type) for unit, unit_type in ( (accumulated_precipitation, ACCUMULATED_PRECIPITATION), + (area, AREA), (temperature, TEMPERATURE), (length, LENGTH), (wind_speed, WIND_SPEED), @@ -114,10 +120,11 @@ class UnitSystem: self._name = name self.accumulated_precipitation_unit = accumulated_precipitation - self.temperature_unit = temperature + self.area_unit = area self.length_unit = length self.mass_unit = mass self.pressure_unit = pressure + self.temperature_unit = temperature self.volume_unit = volume self.wind_speed_unit = wind_speed self._conversions = conversions @@ -151,6 +158,16 @@ class UnitSystem: precip, from_unit, self.accumulated_precipitation_unit ) + def area(self, area: float | None, from_unit: str) -> float: + """Convert the given area to this unit system.""" + if not isinstance(area, Number): + raise TypeError(f"{area!s} is not a numeric value.") + + # type ignore: https://github.com/python/mypy/issues/7207 + return AreaConverter.convert( # type: ignore[unreachable] + area, from_unit, self.area_unit + ) + def pressure(self, pressure: float | None, from_unit: str) -> float: """Convert the given pressure to this unit system.""" if not isinstance(pressure, Number): @@ -186,6 +203,7 @@ class UnitSystem: return { LENGTH: self.length_unit, ACCUMULATED_PRECIPITATION: self.accumulated_precipitation_unit, + AREA: self.area_unit, MASS: self.mass_unit, PRESSURE: self.pressure_unit, TEMPERATURE: self.temperature_unit, @@ -215,7 +233,6 @@ def _deprecated_unit_system(value: str) -> str: """Convert deprecated unit system.""" if value == _CONF_UNIT_SYSTEM_IMPERIAL: - # need to add warning in 2023.1 return _CONF_UNIT_SYSTEM_US_CUSTOMARY return value @@ -236,10 +253,17 @@ METRIC_SYSTEM = UnitSystem( for unit in UnitOfPressure if unit != UnitOfPressure.HPA }, + # Convert non-metric area + ("area", UnitOfArea.SQUARE_INCHES): UnitOfArea.SQUARE_CENTIMETERS, + ("area", UnitOfArea.SQUARE_FEET): UnitOfArea.SQUARE_METERS, + ("area", UnitOfArea.SQUARE_MILES): UnitOfArea.SQUARE_KILOMETERS, + ("area", UnitOfArea.SQUARE_YARDS): UnitOfArea.SQUARE_METERS, + ("area", UnitOfArea.ACRES): UnitOfArea.HECTARES, # Convert non-metric distances ("distance", UnitOfLength.FEET): UnitOfLength.METERS, ("distance", UnitOfLength.INCHES): UnitOfLength.MILLIMETERS, ("distance", UnitOfLength.MILES): UnitOfLength.KILOMETERS, + ("distance", UnitOfLength.NAUTICAL_MILES): UnitOfLength.KILOMETERS, ("distance", UnitOfLength.YARDS): UnitOfLength.METERS, # Convert non-metric volumes of gas meters ("gas", UnitOfVolume.CENTUM_CUBIC_FEET): UnitOfVolume.CUBIC_METERS, @@ -260,6 +284,7 @@ METRIC_SYSTEM = UnitSystem( ("pressure", UnitOfPressure.INHG): UnitOfPressure.HPA, # Convert non-metric speeds except knots to km/h ("speed", UnitOfSpeed.FEET_PER_SECOND): UnitOfSpeed.KILOMETERS_PER_HOUR, + ("speed", UnitOfSpeed.INCHES_PER_SECOND): UnitOfSpeed.MILLIMETERS_PER_SECOND, ("speed", UnitOfSpeed.MILES_PER_HOUR): UnitOfSpeed.KILOMETERS_PER_HOUR, ( "speed", @@ -285,6 +310,7 @@ METRIC_SYSTEM = UnitSystem( if unit not in (UnitOfSpeed.KILOMETERS_PER_HOUR, UnitOfSpeed.KNOTS) }, }, + area=UnitOfArea.SQUARE_METERS, length=UnitOfLength.KILOMETERS, mass=UnitOfMass.GRAMS, pressure=UnitOfPressure.PA, @@ -303,6 +329,12 @@ US_CUSTOMARY_SYSTEM = UnitSystem( for unit in UnitOfPressure if unit != UnitOfPressure.INHG }, + # Convert non-USCS areas + ("area", UnitOfArea.SQUARE_METERS): UnitOfArea.SQUARE_FEET, + ("area", UnitOfArea.SQUARE_CENTIMETERS): UnitOfArea.SQUARE_INCHES, + ("area", UnitOfArea.SQUARE_MILLIMETERS): UnitOfArea.SQUARE_INCHES, + ("area", UnitOfArea.SQUARE_KILOMETERS): UnitOfArea.SQUARE_MILES, + ("area", UnitOfArea.HECTARES): UnitOfArea.ACRES, # Convert non-USCS distances ("distance", UnitOfLength.CENTIMETERS): UnitOfLength.INCHES, ("distance", UnitOfLength.KILOMETERS): UnitOfLength.MILES, @@ -332,6 +364,7 @@ US_CUSTOMARY_SYSTEM = UnitSystem( ("pressure", UnitOfPressure.MMHG): UnitOfPressure.INHG, # Convert non-USCS speeds, except knots, to mph ("speed", UnitOfSpeed.METERS_PER_SECOND): UnitOfSpeed.MILES_PER_HOUR, + ("speed", UnitOfSpeed.MILLIMETERS_PER_SECOND): UnitOfSpeed.INCHES_PER_SECOND, ("speed", UnitOfSpeed.KILOMETERS_PER_HOUR): UnitOfSpeed.MILES_PER_HOUR, ( "speed", @@ -355,6 +388,7 @@ US_CUSTOMARY_SYSTEM = UnitSystem( if unit not in (UnitOfSpeed.KNOTS, UnitOfSpeed.MILES_PER_HOUR) }, }, + area=UnitOfArea.SQUARE_FEET, length=UnitOfLength.MILES, mass=UnitOfMass.POUNDS, pressure=UnitOfPressure.PSI, diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index a56cf126f79..39d38a8f47d 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -22,10 +22,9 @@ except ImportError: SafeLoader as FastestAvailableSafeLoader, ) -from functools import cached_property +from propcache import cached_property from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.frame import report from .const import SECRET_YAML from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass @@ -144,37 +143,6 @@ class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): self.secrets = secrets -class SafeLoader(FastSafeLoader): - """Provided for backwards compatibility. Logs when instantiated.""" - - def __init__(*args: Any, **kwargs: Any) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.__init__(*args, **kwargs) - - @classmethod - def add_constructor(cls, tag: str, constructor: Callable) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.add_constructor(tag, constructor) - - @classmethod - def add_multi_constructor( - cls, tag_prefix: str, multi_constructor: Callable - ) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) - - @staticmethod - def __report_deprecated() -> None: - """Log deprecation warning.""" - report( - "uses deprecated 'SafeLoader' instead of 'FastSafeLoader', " - "which will stop working in HA Core 2024.6," - ) - - class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): """Python safe loader.""" @@ -184,37 +152,6 @@ class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): self.secrets = secrets -class SafeLineLoader(PythonSafeLoader): - """Provided for backwards compatibility. Logs when instantiated.""" - - def __init__(*args: Any, **kwargs: Any) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.__init__(*args, **kwargs) - - @classmethod - def add_constructor(cls, tag: str, constructor: Callable) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.add_constructor(tag, constructor) - - @classmethod - def add_multi_constructor( - cls, tag_prefix: str, multi_constructor: Callable - ) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) - - @staticmethod - def __report_deprecated() -> None: - """Log deprecation warning.""" - report( - "uses deprecated 'SafeLineLoader' instead of 'PythonSafeLoader', " - "which will stop working in HA Core 2024.6," - ) - - type LoaderType = FastSafeLoader | PythonSafeLoader @@ -385,7 +322,7 @@ def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: return _add_reference(loaded_yaml, loader, node) except FileNotFoundError as exc: raise HomeAssistantError( - f"{node.start_mark}: Unable to read file {fname}." + f"{node.start_mark}: Unable to read file {fname}" ) from exc diff --git a/mypy.ini b/mypy.ini index 2a361f56397..15b96e0a802 100644 --- a/mypy.ini +++ b/mypy.ini @@ -5,18 +5,18 @@ [mypy] python_version = 3.12 platform = linux -plugins = pydantic.mypy +plugins = pydantic.mypy, pydantic.v1.mypy show_error_codes = true follow_imports = normal -enable_incomplete_feature = NewGenericSyntax local_partial_types = true strict_equality = true +strict_bytes = true no_implicit_optional = true warn_incomplete_stub = true warn_redundant_casts = true warn_unused_configs = true warn_unused_ignores = true -enable_error_code = ignore-without-code, redundant-self, truthy-iterable +enable_error_code = deprecated, ignore-without-code, redundant-self, truthy-iterable disable_error_code = annotation-unchecked, import-not-found, import-untyped extra_checks = false check_untyped_defs = true @@ -165,6 +165,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.acaia.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.accuweather.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -705,6 +715,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.assist_satellite.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.asuswrt.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -855,6 +875,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bluesound.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.bluetooth.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -975,6 +1005,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.cambridge_audio.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.camera.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1085,6 +1125,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.cookidoo.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.counter.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1145,6 +1195,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.deako.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.deconz.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1396,6 +1456,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.eheimdigital.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.electrasmart.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1806,6 +1876,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.go2rtc.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.goalzero.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1836,6 +1916,26 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.google_cloud.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + +[mypy-homeassistant.components.google_photos.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.google_sheets.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1846,6 +1946,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.govee_ble.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.gpsd.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2356,6 +2466,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.iron_os.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.islamic_prayer_times.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2536,6 +2656,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.lektrico.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.lidarr.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2696,16 +2826,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.mailbox.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.manual.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2716,16 +2836,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.map.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.mastodon.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2866,6 +2976,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.mold_indicator.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.monzo.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2916,6 +3036,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.music_assistant.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.my.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2966,6 +3096,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.nasweb.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.neato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3056,6 +3196,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.nordpool.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.notify.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3126,6 +3276,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.onkyo.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.open_meteo.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3136,6 +3296,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.openai_conversation.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.openexchangerates.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3206,6 +3376,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.panel_custom.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.peco.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3256,6 +3436,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.powerfox.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.powerwall.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3376,6 +3566,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.radio_browser.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.rainforest_raven.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3456,6 +3656,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.reolink.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.repairs.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3596,6 +3806,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.russound_rio.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ruuvi_gateway.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3646,6 +3866,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.schlage.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.scrape.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3716,6 +3946,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.sensoterra.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.senz.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3736,6 +3976,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.shell_command.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.shelly.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3826,6 +4076,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.smlight.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.snooz.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3836,6 +4096,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.solarlog.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.sonarr.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3856,6 +4126,17 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.spotify.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true +no_implicit_reexport = true + [mypy-homeassistant.components.sql.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3866,6 +4147,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.squeezebox.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ssdp.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3906,7 +4197,7 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.stookalert.*] +[mypy-homeassistant.components.stookwijzer.*] check_untyped_defs = true disallow_incomplete_defs = true disallow_subclassing_any = true @@ -3987,6 +4278,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.switch_as_x.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.switchbee.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4458,6 +4759,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.uvc.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.vacuum.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4658,6 +4969,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.workday.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.worldclock.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4751,9 +5072,6 @@ warn_unreachable = true [mypy-homeassistant.components.application_credentials.*] no_implicit_reexport = true -[mypy-homeassistant.components.spotify.*] -no_implicit_reexport = true - [mypy-tests.*] check_untyped_defs = false disallow_incomplete_defs = false diff --git a/pylint/plugins/hass_decorator.py b/pylint/plugins/hass_decorator.py new file mode 100644 index 00000000000..7e509776a86 --- /dev/null +++ b/pylint/plugins/hass_decorator.py @@ -0,0 +1,119 @@ +"""Plugin to check decorators.""" + +from __future__ import annotations + +from astroid import nodes +from pylint.checkers import BaseChecker +from pylint.lint import PyLinter + + +class HassDecoratorChecker(BaseChecker): + """Checker for decorators.""" + + name = "hass_decorator" + priority = -1 + msgs = { + "W7471": ( + "A coroutine function should not be decorated with @callback", + "hass-async-callback-decorator", + "Used when a coroutine function has an invalid @callback decorator", + ), + "W7472": ( + "Fixture %s is invalid here, please %s", + "hass-pytest-fixture-decorator", + "Used when a pytest fixture is invalid", + ), + } + + def _get_pytest_fixture_node(self, node: nodes.FunctionDef) -> nodes.Call | None: + for decorator in node.decorators.nodes: + if ( + isinstance(decorator, nodes.Call) + and decorator.func.as_string() == "pytest.fixture" + ): + return decorator + + return None + + def _get_pytest_fixture_node_keyword( + self, decorator: nodes.Call, search_arg: str + ) -> nodes.Keyword | None: + for keyword in decorator.keywords: + if keyword.arg == search_arg: + return keyword + + return None + + def _check_pytest_fixture( + self, node: nodes.FunctionDef, decoratornames: set[str] + ) -> None: + if ( + "_pytest.fixtures.FixtureFunctionMarker" not in decoratornames + or not (root_name := node.root().name).startswith("tests.") + or (decorator := self._get_pytest_fixture_node(node)) is None + or not ( + scope_keyword := self._get_pytest_fixture_node_keyword( + decorator, "scope" + ) + ) + or not isinstance(scope_keyword.value, nodes.Const) + or not (scope := scope_keyword.value.value) + ): + return + + parts = root_name.split(".") + test_component: str | None = None + if root_name.startswith("tests.components.") and parts[2] != "conftest": + test_component = parts[2] + + if scope == "session": + if test_component: + self.add_message( + "hass-pytest-fixture-decorator", + node=decorator, + args=("scope `session`", "use `package` or lower"), + ) + return + if not ( + autouse_keyword := self._get_pytest_fixture_node_keyword( + decorator, "autouse" + ) + ) or ( + isinstance(autouse_keyword.value, nodes.Const) + and not autouse_keyword.value.value + ): + self.add_message( + "hass-pytest-fixture-decorator", + node=decorator, + args=( + "scope/autouse combination", + "set `autouse=True` or reduce scope", + ), + ) + return + + test_module = parts[3] if len(parts) > 3 else "" + + if test_component and scope == "package" and test_module != "conftest": + self.add_message( + "hass-pytest-fixture-decorator", + node=decorator, + args=("scope `package`", "use `module` or lower"), + ) + + def visit_asyncfunctiondef(self, node: nodes.AsyncFunctionDef) -> None: + """Apply checks on an AsyncFunctionDef node.""" + if decoratornames := node.decoratornames(): + if "homeassistant.core.callback" in decoratornames: + self.add_message("hass-async-callback-decorator", node=node) + self._check_pytest_fixture(node, decoratornames) + + def visit_functiondef(self, node: nodes.FunctionDef) -> None: + """Apply checks on an AsyncFunctionDef node.""" + if decoratornames := node.decoratornames(): + self._check_pytest_fixture(node, decoratornames) + + +def register(linter: PyLinter) -> None: + """Register the checker.""" + linter.register_checker(HassDecoratorChecker(linter)) diff --git a/pylint/plugins/hass_enforce_class_module.py b/pylint/plugins/hass_enforce_class_module.py new file mode 100644 index 00000000000..09fe61b68c6 --- /dev/null +++ b/pylint/plugins/hass_enforce_class_module.py @@ -0,0 +1,168 @@ +"""Plugin for checking if class is in correct module.""" + +from __future__ import annotations + +from astroid import nodes +from pylint.checkers import BaseChecker +from pylint.lint import PyLinter + +from homeassistant.const import Platform + +_BASE_ENTITY_MODULES: set[str] = { + "BaseCoordinatorEntity", + "CoordinatorEntity", + "Entity", + "EntityDescription", + "ManualTriggerEntity", + "RestoreEntity", + "ToggleEntity", + "ToggleEntityDescription", + "TriggerBaseEntity", +} +_MODULES: dict[str, set[str]] = { + "air_quality": {"AirQualityEntity"}, + "alarm_control_panel": { + "AlarmControlPanelEntity", + "AlarmControlPanelEntityDescription", + }, + "assist_satellite": {"AssistSatelliteEntity", "AssistSatelliteEntityDescription"}, + "binary_sensor": {"BinarySensorEntity", "BinarySensorEntityDescription"}, + "button": {"ButtonEntity", "ButtonEntityDescription"}, + "calendar": {"CalendarEntity", "CalendarEntityDescription"}, + "camera": {"Camera", "CameraEntityDescription"}, + "climate": {"ClimateEntity", "ClimateEntityDescription"}, + "coordinator": {"DataUpdateCoordinator"}, + "conversation": {"ConversationEntity"}, + "cover": {"CoverEntity", "CoverEntityDescription"}, + "date": {"DateEntity", "DateEntityDescription"}, + "datetime": {"DateTimeEntity", "DateTimeEntityDescription"}, + "device_tracker": { + "DeviceTrackerEntity", + "ScannerEntity", + "ScannerEntityDescription", + "TrackerEntity", + "TrackerEntityDescription", + }, + "event": {"EventEntity", "EventEntityDescription"}, + "fan": {"FanEntity", "FanEntityDescription"}, + "geo_location": {"GeolocationEvent"}, + "humidifier": {"HumidifierEntity", "HumidifierEntityDescription"}, + "image": {"ImageEntity", "ImageEntityDescription"}, + "image_processing": { + "ImageProcessingEntity", + "ImageProcessingFaceEntity", + "ImageProcessingEntityDescription", + }, + "lawn_mower": {"LawnMowerEntity", "LawnMowerEntityDescription"}, + "light": {"LightEntity", "LightEntityDescription"}, + "lock": {"LockEntity", "LockEntityDescription"}, + "media_player": {"MediaPlayerEntity", "MediaPlayerEntityDescription"}, + "notify": {"NotifyEntity", "NotifyEntityDescription"}, + "number": {"NumberEntity", "NumberEntityDescription", "RestoreNumber"}, + "remote": {"RemoteEntity", "RemoteEntityDescription"}, + "select": {"SelectEntity", "SelectEntityDescription"}, + "sensor": {"RestoreSensor", "SensorEntity", "SensorEntityDescription"}, + "siren": {"SirenEntity", "SirenEntityDescription"}, + "stt": {"SpeechToTextEntity"}, + "switch": {"SwitchEntity", "SwitchEntityDescription"}, + "text": {"TextEntity", "TextEntityDescription"}, + "time": {"TimeEntity", "TimeEntityDescription"}, + "todo": {"TodoListEntity"}, + "tts": {"TextToSpeechEntity"}, + "update": {"UpdateEntity", "UpdateEntityDescription"}, + "vacuum": {"StateVacuumEntity", "VacuumEntity", "VacuumEntityDescription"}, + "wake_word": {"WakeWordDetectionEntity"}, + "water_heater": {"WaterHeaterEntity"}, + "weather": { + "CoordinatorWeatherEntity", + "SingleCoordinatorWeatherEntity", + "WeatherEntity", + "WeatherEntityDescription", + }, +} +_ENTITY_COMPONENTS: set[str] = {platform.value for platform in Platform}.union( + { + "alert", + "automation", + "counter", + "dominos", + "input_boolean", + "input_button", + "input_datetime", + "input_number", + "input_select", + "input_text", + "microsoft_face", + "person", + "plant", + "remember_the_milk", + "schedule", + "script", + "tag", + "timer", + } +) + + +_MODULE_CLASSES = { + class_name for classes in _MODULES.values() for class_name in classes +} + + +class HassEnforceClassModule(BaseChecker): + """Checker for class in correct module.""" + + name = "hass_enforce_class_module" + priority = -1 + msgs = { + "C7461": ( + "Derived %s is recommended to be placed in the '%s' module", + "hass-enforce-class-module", + "Used when derived class should be placed in its own module.", + ), + } + + def visit_classdef(self, node: nodes.ClassDef) -> None: + """Check if derived class is placed in its own module.""" + root_name = node.root().name + + # we only want to check components + if not root_name.startswith("homeassistant.components."): + return + parts = root_name.split(".") + current_integration = parts[2] + current_module = parts[3] if len(parts) > 3 else "" + + ancestors = list(node.ancestors()) + + if current_module != "entity" and current_integration not in _ENTITY_COMPONENTS: + top_level_ancestors = list(node.ancestors(recurs=False)) + + for ancestor in top_level_ancestors: + if ancestor.name in _BASE_ENTITY_MODULES and not any( + anc.name in _MODULE_CLASSES for anc in ancestors + ): + self.add_message( + "hass-enforce-class-module", + node=node, + args=(ancestor.name, "entity"), + ) + return + + for expected_module, classes in _MODULES.items(): + if expected_module in (current_module, current_integration): + continue + + for ancestor in ancestors: + if ancestor.name in classes: + self.add_message( + "hass-enforce-class-module", + node=node, + args=(ancestor.name, expected_module), + ) + return + + +def register(linter: PyLinter) -> None: + """Register the checker.""" + linter.register_checker(HassEnforceClassModule(linter)) diff --git a/pylint/plugins/hass_enforce_coordinator_module.py b/pylint/plugins/hass_enforce_coordinator_module.py deleted file mode 100644 index 7160a25085d..00000000000 --- a/pylint/plugins/hass_enforce_coordinator_module.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Plugin for checking if coordinator is in its own module.""" - -from __future__ import annotations - -from astroid import nodes -from pylint.checkers import BaseChecker -from pylint.lint import PyLinter - - -class HassEnforceCoordinatorModule(BaseChecker): - """Checker for coordinators own module.""" - - name = "hass_enforce_coordinator_module" - priority = -1 - msgs = { - "C7461": ( - "Derived data update coordinator is recommended to be placed in the 'coordinator' module", - "hass-enforce-coordinator-module", - "Used when derived data update coordinator should be placed in its own module.", - ), - } - - def visit_classdef(self, node: nodes.ClassDef) -> None: - """Check if derived data update coordinator is placed in its own module.""" - root_name = node.root().name - - # we only want to check component update coordinators - if not root_name.startswith("homeassistant.components"): - return - - is_coordinator_module = root_name.endswith(".coordinator") - for ancestor in node.ancestors(): - if ancestor.name == "DataUpdateCoordinator" and not is_coordinator_module: - self.add_message("hass-enforce-coordinator-module", node=node) - return - - -def register(linter: PyLinter) -> None: - """Register the checker.""" - linter.register_checker(HassEnforceCoordinatorModule(linter)) diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index e1812de44d3..a837650f3b5 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -28,6 +28,8 @@ _KNOWN_GENERIC_TYPES: set[str] = { } _KNOWN_GENERIC_TYPES_TUPLE = tuple(_KNOWN_GENERIC_TYPES) +_FORCE_ANNOTATION_PLATFORMS = ["config_flow"] + class _Special(Enum): """Sentinel values.""" @@ -1316,7 +1318,7 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ), TypeHintMatch( function_name="source_type", - return_type=["SourceType", "str"], + return_type="SourceType", ), ], ), @@ -1761,39 +1763,6 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ], ), ], - "mailbox": [ - ClassTypeHintMatch( - base_class="Mailbox", - matches=[ - TypeHintMatch( - function_name="media_type", - return_type="str", - ), - TypeHintMatch( - function_name="can_delete", - return_type="bool", - ), - TypeHintMatch( - function_name="has_media", - return_type="bool", - ), - TypeHintMatch( - function_name="async_get_media", - arg_types={1: "str"}, - return_type="bytes", - ), - TypeHintMatch( - function_name="async_get_messages", - return_type="list[dict[str, Any]]", - ), - TypeHintMatch( - function_name="async_delete", - arg_types={1: "str"}, - return_type="bool", - ), - ], - ), - ], "media_player": [ ClassTypeHintMatch( base_class="Entity", @@ -3141,6 +3110,7 @@ class HassTypeHintChecker(BaseChecker): _class_matchers: list[ClassTypeHintMatch] _function_matchers: list[TypeHintMatch] _module_node: nodes.Module + _module_platform: str | None _in_test_module: bool def visit_module(self, node: nodes.Module) -> None: @@ -3148,24 +3118,22 @@ class HassTypeHintChecker(BaseChecker): self._class_matchers = [] self._function_matchers = [] self._module_node = node + self._module_platform = _get_module_platform(node.name) self._in_test_module = node.name.startswith("tests.") - if ( - self._in_test_module - or (module_platform := _get_module_platform(node.name)) is None - ): + if self._in_test_module or self._module_platform is None: return - if module_platform in _PLATFORMS: + if self._module_platform in _PLATFORMS: self._function_matchers.extend(_FUNCTION_MATCH["__any_platform__"]) - if function_matches := _FUNCTION_MATCH.get(module_platform): + if function_matches := _FUNCTION_MATCH.get(self._module_platform): self._function_matchers.extend(function_matches) - if class_matches := _CLASS_MATCH.get(module_platform): + if class_matches := _CLASS_MATCH.get(self._module_platform): self._class_matchers.extend(class_matches) - if property_matches := _INHERITANCE_MATCH.get(module_platform): + if property_matches := _INHERITANCE_MATCH.get(self._module_platform): self._class_matchers.extend(property_matches) self._class_matchers.reverse() @@ -3175,7 +3143,12 @@ class HassTypeHintChecker(BaseChecker): ) -> bool: """Check if we can skip the function validation.""" return ( - self.linter.config.ignore_missing_annotations + # test modules are excluded from ignore_missing_annotations + not self._in_test_module + # some modules have checks forced + and self._module_platform not in _FORCE_ANNOTATION_PLATFORMS + # other modules are only checked ignore_missing_annotations + and self.linter.config.ignore_missing_annotations and node.returns is None and not _has_valid_annotations(annotations) ) diff --git a/pylint/plugins/hass_imports.py b/pylint/plugins/hass_imports.py index 57b71560b53..194f99ae700 100644 --- a/pylint/plugins/hass_imports.py +++ b/pylint/plugins/hass_imports.py @@ -19,6 +19,12 @@ class ObsoleteImportMatch: _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { + "functools": [ + ObsoleteImportMatch( + reason="replaced by propcache.cached_property", + constant=re.compile(r"^cached_property$"), + ), + ], "homeassistant.backports.enum": [ ObsoleteImportMatch( reason="We can now use the Python 3.11 provided enum.StrEnum instead", @@ -27,147 +33,10 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { ], "homeassistant.backports.functools": [ ObsoleteImportMatch( - reason=( - "We can now use the Python 3.12 provided " - "functools.cached_property instead" - ), + reason="replaced by propcache.cached_property", constant=re.compile(r"^cached_property$"), ), ], - "homeassistant.components.alarm_control_panel": [ - ObsoleteImportMatch( - reason="replaced by AlarmControlPanelEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by CodeFormat enum", - constant=re.compile(r"^FORMAT_(\w*)$"), - ), - ], - "homeassistant.components.alarm_control_panel.const": [ - ObsoleteImportMatch( - reason="replaced by AlarmControlPanelEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by CodeFormat enum", - constant=re.compile(r"^FORMAT_(\w*)$"), - ), - ], - "homeassistant.components.automation": [ - ObsoleteImportMatch( - reason="replaced by TriggerActionType from helpers.trigger", - constant=re.compile(r"^AutomationActionType$"), - ), - ObsoleteImportMatch( - reason="replaced by TriggerData from helpers.trigger", - constant=re.compile(r"^AutomationTriggerData$"), - ), - ObsoleteImportMatch( - reason="replaced by TriggerInfo from helpers.trigger", - constant=re.compile(r"^AutomationTriggerInfo$"), - ), - ], - "homeassistant.components.binary_sensor": [ - ObsoleteImportMatch( - reason="replaced by BinarySensorDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ], - "homeassistant.components.camera": [ - ObsoleteImportMatch( - reason="replaced by CameraEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by StreamType enum", - constant=re.compile(r"^STREAM_TYPE_(\w*)$"), - ), - ], - "homeassistant.components.camera.const": [ - ObsoleteImportMatch( - reason="replaced by StreamType enum", - constant=re.compile(r"^STREAM_TYPE_(\w*)$"), - ), - ], - "homeassistant.components.climate": [ - ObsoleteImportMatch( - reason="replaced by HVACMode enum", - constant=re.compile(r"^HVAC_MODE_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by ClimateEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.climate.const": [ - ObsoleteImportMatch( - reason="replaced by HVACAction enum", - constant=re.compile(r"^CURRENT_HVAC_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by HVACMode enum", - constant=re.compile(r"^HVAC_MODE_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by ClimateEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.cover": [ - ObsoleteImportMatch( - reason="replaced by CoverDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by CoverEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.device_tracker": [ - ObsoleteImportMatch( - reason="replaced by SourceType enum", - constant=re.compile(r"^SOURCE_TYPE_\w+$"), - ), - ], - "homeassistant.components.device_tracker.const": [ - ObsoleteImportMatch( - reason="replaced by SourceType enum", - constant=re.compile(r"^SOURCE_TYPE_\w+$"), - ), - ], - "homeassistant.components.fan": [ - ObsoleteImportMatch( - reason="replaced by FanEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.humidifier": [ - ObsoleteImportMatch( - reason="replaced by HumidifierDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by HumidifierEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.humidifier.const": [ - ObsoleteImportMatch( - reason="replaced by HumidifierDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ObsoleteImportMatch( - reason="replaced by HumidifierEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.lock": [ - ObsoleteImportMatch( - reason="replaced by LockEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], "homeassistant.components.light": [ ObsoleteImportMatch( reason="replaced by ColorMode enum", @@ -222,52 +91,12 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^REPEAT_MODE(\w*)$"), ), ], - "homeassistant.components.remote": [ - ObsoleteImportMatch( - reason="replaced by RemoteEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.sensor": [ - ObsoleteImportMatch( - reason="replaced by SensorDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(?!STATE_CLASSES)$"), - ), - ObsoleteImportMatch( - reason="replaced by SensorStateClass enum", - constant=re.compile(r"^STATE_CLASS_(\w*)$"), - ), - ], - "homeassistant.components.siren": [ - ObsoleteImportMatch( - reason="replaced by SirenEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.siren.const": [ - ObsoleteImportMatch( - reason="replaced by SirenEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], - "homeassistant.components.switch": [ - ObsoleteImportMatch( - reason="replaced by SwitchDeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w*)$"), - ), - ], "homeassistant.components.vacuum": [ ObsoleteImportMatch( reason="replaced by VacuumEntityFeature enum", constant=re.compile(r"^SUPPORT_(\w*)$"), ), ], - "homeassistant.components.water_heater": [ - ObsoleteImportMatch( - reason="replaced by WaterHeaterEntityFeature enum", - constant=re.compile(r"^SUPPORT_(\w*)$"), - ), - ], "homeassistant.config_entries": [ ObsoleteImportMatch( reason="replaced by ConfigEntryDisabler enum", @@ -279,86 +108,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { reason="replaced by local constants", constant=re.compile(r"^CONF_UNIT_SYSTEM_(\w+)$"), ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^DATA_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by ***DeviceClass enum", - constant=re.compile(r"^DEVICE_CLASS_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^ELECTRIC_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^ENERGY_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by EntityCategory enum", - constant=re.compile(r"^(ENTITY_CATEGORY_(\w+))|(ENTITY_CATEGORIES)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^FREQUENCY_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^IRRADIATION_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^LENGTH_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^MASS_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^POWER_(?!VOLT_AMPERE_REACTIVE)(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^PRECIPITATION_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^PRESSURE_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^SOUND_PRESSURE_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^SPEED_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^TEMP_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^TIME_(\w+)$"), - ), - ObsoleteImportMatch( - reason="replaced by unit enums", - constant=re.compile(r"^VOLUME_(\w+)$"), - ), - ], - "homeassistant.core": [ - ObsoleteImportMatch( - reason="replaced by ConfigSource enum", - constant=re.compile(r"^SOURCE_(\w*)$"), - ), - ], - "homeassistant.data_entry_flow": [ - ObsoleteImportMatch( - reason="replaced by FlowResultType enum", - constant=re.compile(r"^RESULT_TYPE_(\w*)$"), - ), ], "homeassistant.helpers.config_validation": [ ObsoleteImportMatch( @@ -366,12 +115,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^PLATFORM_SCHEMA(_BASE)?$"), ), ], - "homeassistant.helpers.device_registry": [ - ObsoleteImportMatch( - reason="replaced by DeviceEntryDisabler enum", - constant=re.compile(r"^DISABLED_(\w*)$"), - ), - ], "homeassistant.helpers.json": [ ObsoleteImportMatch( reason="moved to homeassistant.util.json", @@ -380,12 +123,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { ), ), ], - "homeassistant.util": [ - ObsoleteImportMatch( - reason="replaced by unit_conversion.***Converter", - constant=re.compile(r"^(distance|pressure|speed|temperature|volume)$"), - ), - ], "homeassistant.util.unit_system": [ ObsoleteImportMatch( reason="replaced by US_CUSTOMARY_SYSTEM", @@ -394,6 +131,31 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { ], } +_IGNORE_ROOT_IMPORT = ( + "assist_pipeline", + "automation", + "bluetooth", + "camera", + "cast", + "device_automation", + "device_tracker", + "ffmpeg", + "ffmpeg_motion", + "google_assistant", + "hardware", + "homeassistant", + "homeassistant_hardware", + "http", + "manual", + "plex", + "recorder", + "rest", + "script", + "sensor", + "stream", + "zha", +) + # Blacklist of imports that should be using the namespace @dataclass @@ -460,6 +222,11 @@ class HassImportsFormatChecker(BaseChecker): "hass-helper-namespace-import", "Used when a helper should be used via the namespace", ), + "W7426": ( + "`%s` should be imported using an alias, such as `%s as %s`", + "hass-import-constant-alias", + "Used when a constant should be imported as an alias", + ), } options = () @@ -484,8 +251,9 @@ class HassImportsFormatChecker(BaseChecker): if module.startswith(f"{self.current_package}."): self.add_message("hass-relative-import", node=node) continue - if module.startswith("homeassistant.components.") and module.endswith( - "const" + if ( + module.startswith("homeassistant.components.") + and len(module.split(".")) > 3 ): if ( self.current_package.startswith("tests.components.") @@ -517,6 +285,85 @@ class HassImportsFormatChecker(BaseChecker): if len(split_package) < node.level + 2: self.add_message("hass-absolute-import", node=node) + def _check_for_constant_alias( + self, + node: nodes.ImportFrom, + current_component: str | None, + imported_component: str, + ) -> bool: + """Check for hass-import-constant-alias.""" + if current_component == imported_component: + return True + + # Check for `from homeassistant.components.other import DOMAIN` + for name, alias in node.names: + if name == "DOMAIN" and (alias is None or alias == "DOMAIN"): + self.add_message( + "hass-import-constant-alias", + node=node, + args=( + "DOMAIN", + "DOMAIN", + f"{imported_component.upper()}_DOMAIN", + ), + ) + return False + + return True + + def _check_for_component_root_import( + self, + node: nodes.ImportFrom, + current_component: str | None, + imported_parts: list[str], + imported_component: str, + ) -> bool: + """Check for hass-component-root-import.""" + if ( + current_component == imported_component + or imported_component in _IGNORE_ROOT_IMPORT + ): + return True + + # Check for `from homeassistant.components.other.module import something` + if len(imported_parts) > 3: + self.add_message("hass-component-root-import", node=node) + return False + + # Check for `from homeassistant.components.other import const` + for name, _ in node.names: + if name == "const": + self.add_message("hass-component-root-import", node=node) + return False + + return True + + def _check_for_relative_import( + self, + current_package: str, + node: nodes.ImportFrom, + current_component: str | None, + ) -> bool: + """Check for hass-relative-import.""" + if node.modname == current_package or node.modname.startswith( + f"{current_package}." + ): + self.add_message("hass-relative-import", node=node) + return False + + for root in ("homeassistant", "tests"): + if current_package.startswith(f"{root}.components."): + if node.modname == f"{root}.components": + for name in node.names: + if name[0] == current_component: + self.add_message("hass-relative-import", node=node) + return False + elif node.modname.startswith(f"{root}.components.{current_component}."): + self.add_message("hass-relative-import", node=node) + return False + + return True + def visit_importfrom(self, node: nodes.ImportFrom) -> None: """Check for improper 'from _ import _' invocations.""" if not self.current_package: @@ -524,35 +371,36 @@ class HassImportsFormatChecker(BaseChecker): if node.level is not None: self._visit_importfrom_relative(self.current_package, node) return - if node.modname == self.current_package or node.modname.startswith( - f"{self.current_package}." - ): - self.add_message("hass-relative-import", node=node) - return + + # Cache current component + current_component: str | None = None for root in ("homeassistant", "tests"): if self.current_package.startswith(f"{root}.components."): current_component = self.current_package.split(".")[2] - if node.modname == f"{root}.components": - for name in node.names: - if name[0] == current_component: - self.add_message("hass-relative-import", node=node) - return - if node.modname.startswith(f"{root}.components.{current_component}."): - self.add_message("hass-relative-import", node=node) - return - if node.modname.startswith("homeassistant.components.") and ( - node.modname.endswith(".const") - or "const" in {names[0] for names in node.names} + + # Checks for hass-relative-import + if not self._check_for_relative_import( + self.current_package, node, current_component ): - if ( - self.current_package.startswith("tests.components.") - and self.current_package.split(".")[2] == node.modname.split(".")[2] - ): - # Ignore check if the component being tested matches - # the component being imported from - return - self.add_message("hass-component-root-import", node=node) return + + if node.modname.startswith("homeassistant.components."): + imported_parts = node.modname.split(".") + imported_component = imported_parts[2] + + # Checks for hass-component-root-import + if not self._check_for_component_root_import( + node, current_component, imported_parts, imported_component + ): + return + + # Checks for hass-import-constant-alias + if not self._check_for_constant_alias( + node, current_component, imported_component + ): + return + + # Checks for hass-deprecated-import if obsolete_imports := _OBSOLETE_IMPORT.get(node.modname): for name_tuple in node.names: for obsolete_import in obsolete_imports: @@ -562,6 +410,8 @@ class HassImportsFormatChecker(BaseChecker): node=node, args=(import_match.string, obsolete_import.reason), ) + + # Checks for hass-helper-namespace-import if namespace_alias := _FORCE_NAMESPACE_IMPORT.get(node.modname): for name in node.names: if name[0] in namespace_alias.names: diff --git a/pyproject.toml b/pyproject.toml index 10bc26f1a0a..91acea30b52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [build-system] -requires = ["setuptools==69.2.0", "wheel~=0.43.0"] +requires = ["setuptools==75.1.0"] build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.9.0.dev0" +version = "2025.1.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -19,57 +19,68 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Home Automation", ] requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.5", + # Integrations may depend on hassio integration without listing it to + # change behavior based on presence of supervisor. Deprecated with #127228 + # Lib can be removed with 2025.11 + "aiohasupervisor==0.2.2b2", + "aiohttp==3.11.10", "aiohttp_cors==0.7.0", - "aiohttp-fast-zlib==0.1.1", + "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", "astral==2.2", "async-interrupt==1.2.0", - "attrs==23.2.0", + "attrs==24.2.0", "atomicwrites-homeassistant==1.4.1", + "audioop-lts==0.2.1;python_version>='3.13'", "awesomeversion==24.6.0", - "bcrypt==4.1.3", + "bcrypt==4.2.0", "certifi>=2021.5.30", - "ciso8601==2.3.1", + "ciso8601==2.3.2", "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.81.1", + "hass-nabucasa==0.87.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all - "httpx==0.27.0", - "home-assistant-bluetooth==1.12.2", + "httpx==0.27.2", + "home-assistant-bluetooth==1.13.0", "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.9.0", + "PyJWT==2.10.1", # PyJWT has loose dependency. We want the latest one. - "cryptography==43.0.0", - "Pillow==10.4.0", - "pyOpenSSL==24.2.1", - "orjson==3.10.7", + "cryptography==44.0.0", + "Pillow==11.0.0", + "propcache==0.2.1", + "pyOpenSSL==24.3.0", + "orjson==3.10.12", "packaging>=23.1", - "pip>=21.3.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", "PyYAML==6.0.2", "requests==2.32.3", - "SQLAlchemy==2.0.31", + "securetar==2024.11.0", + "SQLAlchemy==2.0.36", + "standard-aifc==3.13.0;python_version>='3.13'", + "standard-telnetlib==3.13.0;python_version>='3.13'", "typing-extensions>=4.12.2,<5.0", "ulid-transform==1.0.2", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", + "uv==0.5.8", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.4", + "yarl==1.18.3", + "webrtc-models==0.3.0", ] [project.urls] @@ -84,8 +95,6 @@ dependencies = [ hass = "homeassistant.__main__:main" [tool.setuptools] -platforms = ["any"] -zip-safe = false include-package-data = true [tool.setuptools.packages.find] @@ -109,7 +118,8 @@ init-hook = """\ load-plugins = [ "pylint.extensions.code_style", "pylint.extensions.typing", - "hass_enforce_coordinator_module", + "hass_decorator", + "hass_enforce_class_module", "hass_enforce_sorted_platforms", "hass_enforce_super_call", "hass_enforce_type_hints", @@ -148,7 +158,6 @@ class-const-naming-style = "any" # inconsistent-return-statements - doesn't handle raise # too-many-ancestors - it's too strict. # wrong-import-order - isort guards this -# consider-using-f-string - str.format sometimes more readable # possibly-used-before-assignment - too many errors / not necessarily issues # --- # Pylint CodeStyle plugin @@ -170,8 +179,8 @@ disable = [ "too-many-locals", "too-many-public-methods", "too-many-boolean-expressions", + "too-many-positional-arguments", "wrong-import-order", - "consider-using-f-string", "consider-using-namedtuple-or-dataclass", "consider-using-assignment-expr", "possibly-used-before-assignment", @@ -313,6 +322,7 @@ disable = [ "broad-except", # BLE001 "protected-access", # SLF001 "broad-exception-raised", # TRY002 + "consider-using-f-string", # PLC0209 # "no-self-use", # PLR6301 # Optional plugin, not enabled # Handled by mypy @@ -443,6 +453,7 @@ norecursedirs = [ log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" log_date_format = "%Y-%m-%d %H:%M:%S" asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" filterwarnings = [ "error::sqlalchemy.exc.SAWarning", @@ -458,14 +469,14 @@ filterwarnings = [ # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.0.2/sunweg/plant.py#L96 - v3.0.2 - 2024-07-10 + # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", # -- design choice 3rd party - # https://github.com/gwww/elkm1/blob/2.2.7/elkm1_lib/util.py#L8-L19 + # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.1.1/ical/util.py#L21-L23 + # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", @@ -477,13 +488,13 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs - # https://github.com/ronf/asyncssh/issues/674 - v2.15.0 - "ignore:ARC4 has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and will be removed from this module in 48.0.0:UserWarning:asyncssh.crypto.cipher", - "ignore:TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from this module in 48.0.0:UserWarning:asyncssh.crypto.cipher", - # https://github.com/certbot/certbot/issues/9828 - v2.10.0 + # - pyOpenSSL v24.2.1 + # https://github.com/certbot/certbot/issues/9828 - v2.11.0 + # https://github.com/certbot/certbot/issues/9992 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 - "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", + # - other # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 # https://github.com/foxel/python_ndms2_client/pull/8 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", @@ -491,6 +502,8 @@ filterwarnings = [ # -- fixed, waiting for release / update # https://github.com/bachya/aiopurpleair/pull/200 - >=2023.10.0 "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators", + # https://bugs.launchpad.net/beautifulsoup/+bug/2076897 - >4.12.3 + "ignore:The 'strip_cdata' option of HTMLParser\\(\\) has never done anything and will eventually be removed:DeprecationWarning:bs4.builder._lxml", # https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0 "ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base", # https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0 @@ -499,7 +512,7 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", - # https://github.com/influxdata/influxdb-client-python/issues/603 >1.45.0 + # https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0 # https://github.com/influxdata/influxdb-client-python/pull/652 "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", # https://github.com/majuss/lupupy/pull/15 - >0.3.2 @@ -514,14 +527,8 @@ filterwarnings = [ # https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", - # https://github.com/hunterjm/python-onvif-zeep-async/pull/51 - >3.1.12 - "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:onvif.client", - # https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8 - >=2.13.12 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:google.pubsub_v1.services.publisher.client", # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", - # https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1 - "ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:zeep.utils", # -- fixed for Python 3.13 # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 @@ -529,10 +536,9 @@ filterwarnings = [ # -- other # Locale changes might take some time to resolve upstream + # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", - # https://github.com/protocolbuffers/protobuf - v4.25.1 - "ignore:Type google._upb._message.(Message|Scalar)MapContainer uses PyType_Spec with a metaclass that has custom tp_new. .* Python 3.14:DeprecationWarning", - # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 + # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", # https://github.com/lidatong/dataclasses-json/issues/328 # https://github.com/lidatong/dataclasses-json/pull/351 @@ -540,14 +546,19 @@ filterwarnings = [ # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 # https://github.com/martonperei/emulated_roku "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/thecynic/pylutron - v0.2.15 + # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", + # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", - # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 + # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", + # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 + "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", + "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", # Wrong stacklevel - # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 + # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", # New in aiohttp - v3.9.0 "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", @@ -568,13 +579,10 @@ filterwarnings = [ "ignore:invalid escape sequence:SyntaxWarning:.*sanix", # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty - # https://pypi.org/project/vobject/ - v0.9.7 - 2024-03-25 - # https://github.com/py-vobject/vobject - "ignore:invalid escape sequence:SyntaxWarning:.*vobject.base", # - pkg_resources # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.1 - 2019-01-14 / 2024-04-28 + # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", @@ -582,14 +590,6 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # https://pypi.org/project/velbus-aio/ - v2024.7.5 - 2024-07-05 - # https://github.com/Cereal2nd/velbus-aio/blob/2024.7.5/velbusaio/handler.py#L22 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", - # - pyOpenSSL v24.2.1 - # https://pypi.org/project/acme/ - v2.11.0 - 2024-06-06 - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://pypi.org/project/josepy/ - v1.14.0 - 2023-11-01 - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", # -- Python 3.13 # HomeAssistant @@ -599,11 +599,11 @@ filterwarnings = [ # https://github.com/nextcord/nextcord/issues/1174 # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/SpeechRecognition/ - v3.10.4 - 2024-05-05 - # https://github.com/Uberi/speech_recognition/blob/3.10.4/speech_recognition/__init__.py#L7 + # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 + # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", - # https://pypi.org/project/voip-utils/ - v0.1.0 - 2023-06-28 - # https://github.com/home-assistant-libs/voip-utils/blob/v0.1.0/voip_utils/rtp_audio.py#L2 + # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 + # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", # -- Python 3.13 - unmaintained projects, last release about 2+ years @@ -615,6 +615,17 @@ filterwarnings = [ # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", + # -- New in Python 3.13 + # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 + # https://github.com/kurtmckee/feedparser/issues/481 + "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", + # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib + "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", + # -- unmaintained projects, last release about 2+ years # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", @@ -625,7 +636,7 @@ filterwarnings = [ # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", - # https://pypi.org/project/foobot_async/ - v1.0.0 - 2020-11-24 + # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", @@ -687,7 +698,7 @@ exclude_lines = [ ] [tool.ruff] -required-version = ">=0.5.3" +required-version = ">=0.8.0" [tool.ruff.lint] select = [ @@ -718,6 +729,7 @@ select = [ "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) "E", # pycodestyle "F", # pyflakes/autoflake + "F541", # f-string without any placeholders "FLY", # flynt "FURB", # refurb "G", # flake8-logging-format @@ -734,6 +746,7 @@ select = [ "PIE", # flake8-pie "PL", # pylint "PT", # flake8-pytest-style + "PTH", # flake8-pathlib "PYI", # flake8-pyi "RET", # flake8-return "RSE", # flake8-raise @@ -768,9 +781,12 @@ select = [ "SLOT", # flake8-slots "T100", # Trace found: {name} used "T20", # flake8-print - "TID251", # Banned imports + "TC", # flake8-type-checking + "TID", # Tidy imports "TRY", # tryceratops "UP", # pyupgrade + "UP031", # Use format specifiers instead of percent format + "UP032", # Use f-string instead of `format` call "W", # pycodestyle ] @@ -789,7 +805,6 @@ ignore = [ "PLR0915", # Too many statements ({statements} > {max_statements}) "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target - "PT004", # Fixture {fixture} does not return anything, add leading underscore "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception "PT018", # Assertion should be broken down into multiple parts "RUF001", # String contains ambiguous unicode character. @@ -800,6 +815,12 @@ ignore = [ "SIM103", # Return the condition {condition} directly "SIM108", # Use ternary operator {contents} instead of if-else-block "SIM115", # Use context handler for opening files + + # Moving imports into type-checking blocks can mess with pytest.patch() + "TC001", # Move application import {} into a type-checking block + "TC002", # Move third-party import {} into a type-checking block + "TC003", # Move standard library import {} into a type-checking block + "TRY003", # Avoid specifying long messages outside the exception class "TRY400", # Use `logging.exception` instead of `logging.error` # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 @@ -844,7 +865,6 @@ voluptuous = "vol" "homeassistant.components.lawn_mower.PLATFORM_SCHEMA" = "LAWN_MOWER_PLATFORM_SCHEMA" "homeassistant.components.light.PLATFORM_SCHEMA" = "LIGHT_PLATFORM_SCHEMA" "homeassistant.components.lock.PLATFORM_SCHEMA" = "LOCK_PLATFORM_SCHEMA" -"homeassistant.components.mailbox.PLATFORM_SCHEMA" = "MAILBOX_PLATFORM_SCHEMA" "homeassistant.components.media_player.PLATFORM_SCHEMA" = "MEDIA_PLAYER_PLATFORM_SCHEMA" "homeassistant.components.notify.PLATFORM_SCHEMA" = "NOTIFY_PLATFORM_SCHEMA" "homeassistant.components.number.PLATFORM_SCHEMA" = "NUMBER_PLATFORM_SCHEMA" @@ -899,5 +919,17 @@ split-on-trailing-comma = false "homeassistant/scripts/*" = ["T201"] "script/*" = ["T20"] +# Allow relative imports within auth and within components +"homeassistant/auth/*/*" = ["TID252"] +"homeassistant/components/*/*/*" = ["TID252"] +"tests/components/*/*/*" = ["TID252"] + +# Temporary +"homeassistant/**" = ["PTH"] +"tests/**" = ["PTH"] + [tool.ruff.lint.mccabe] max-complexity = 25 + +[tool.ruff.lint.pydocstyle] +property-decorators = ["propcache.cached_property"] diff --git a/requirements.txt b/requirements.txt index ad6a39ddb54..e4346c3e517 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,41 +4,48 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.5 +aiohasupervisor==0.2.2b2 +aiohttp==3.11.10 aiohttp_cors==0.7.0 -aiohttp-fast-zlib==0.1.1 +aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 -attrs==23.2.0 +attrs==24.2.0 atomicwrites-homeassistant==1.4.1 +audioop-lts==0.2.1;python_version>='3.13' awesomeversion==24.6.0 -bcrypt==4.1.3 +bcrypt==4.2.0 certifi>=2021.5.30 -ciso8601==2.3.1 +ciso8601==2.3.2 fnv-hash-fast==1.0.2 -hass-nabucasa==0.81.1 -httpx==0.27.0 -home-assistant-bluetooth==1.12.2 +hass-nabucasa==0.87.0 +httpx==0.27.2 +home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.9.0 -cryptography==43.0.0 -Pillow==10.4.0 -pyOpenSSL==24.2.1 -orjson==3.10.7 +PyJWT==2.10.1 +cryptography==44.0.0 +Pillow==11.0.0 +propcache==0.2.1 +pyOpenSSL==24.3.0 +orjson==3.10.12 packaging>=23.1 -pip>=21.3.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 PyYAML==6.0.2 requests==2.32.3 -SQLAlchemy==2.0.31 +securetar==2024.11.0 +SQLAlchemy==2.0.36 +standard-aifc==3.13.0;python_version>='3.13' +standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 +uv==0.5.8 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.4 +yarl==1.18.3 +webrtc-models==0.3.0 diff --git a/requirements_all.txt b/requirements_all.txt index 6320f05fffb..2540a297334 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,19 +4,19 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.4 +AEMET-OpenData==0.6.3 # homeassistant.components.honeywell -AIOSomecomfort==0.0.25 +AIOSomecomfort==0.0.28 # homeassistant.components.adax Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==3.0.2 +DoorBirdPy==3.0.8 # homeassistant.components.homekit -HAP-python==4.9.1 +HAP-python==4.9.2 # homeassistant.components.tasmota HATasmota==0.9.2 @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.4.0 +Pillow==11.0.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==14.0.1 +PyChromecast==14.0.5 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -60,17 +60,17 @@ PyFronius==0.7.3 PyLoadAPI==1.3.2 # homeassistant.components.met_eireann -PyMetEireann==2021.8.0 +PyMetEireann==2024.11.0 # homeassistant.components.met # homeassistant.components.norway_air -PyMetno==0.12.0 +PyMetno==0.13.0 # homeassistant.components.keymitt_ble PyMicroBot==0.0.17 # homeassistant.components.nina -PyNINA==0.3.3 +PyNINA==0.3.4 # homeassistant.components.mobile_app # homeassistant.components.owntracks @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.1 +PySwitchbot==0.54.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -97,10 +97,10 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare-neo==0.2.1 +PyViCare==2.38.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -109,14 +109,14 @@ PyXiaomiGateway==0.14.3 RachioPy==1.1.0 # homeassistant.components.python_script -RestrictedPython==7.0 +RestrictedPython==7.4 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -131,7 +131,7 @@ TwitterAPI==2.7.12 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==3.0.0 +accuweather==4.0.0 # homeassistant.components.adax adax==0.4.0 @@ -152,10 +152,10 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.23 +agent-py==0.0.24 # homeassistant.components.geo_json_events -aio-geojson-generic-client==0.4 +aio-geojson-generic-client==0.5 # homeassistant.components.geonetnz_quakes aio-geojson-geonetnz-quakes==0.16 @@ -170,20 +170,23 @@ aio-geojson-nsw-rfs-incidents==0.7 aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs -aio-georss-gdacs==0.9 +aio-georss-gdacs==0.10 + +# homeassistant.components.acaia +aioacaia==0.1.11 # homeassistant.components.airq -aioairq==0.3.2 +aioairq==0.4.3 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.2 +aioairzone-cloud==0.6.10 # homeassistant.components.airzone -aioairzone==0.8.2 +aioairzone==0.9.7 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.01.0 +aioambient==2024.08.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 @@ -192,16 +195,16 @@ aioapcaccess==0.4.2 aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live -aioaseko==0.2.0 +aioaseko==1.0.0 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.8.0 +aioautomower==2024.12.0 # homeassistant.components.azure_devops -aioazuredevops==2.1.1 +aioazuredevops==2.2.1 # homeassistant.components.baf aiobafi6==0.9.0 @@ -210,7 +213,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.9.0 +aiocomelit==0.9.1 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 @@ -221,6 +224,9 @@ aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 +# homeassistant.components.duke_energy +aiodukeenergy==0.2.2 + # homeassistant.components.eafm aioeafm==0.1.2 @@ -237,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.2.1 +aioesphomeapi==28.0.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -246,7 +252,7 @@ aioflo==2021.11.0 aioftp==0.21.3 # homeassistant.components.github -aiogithubapi==23.11.0 +aiogithubapi==24.6.0 # homeassistant.components.guardian aioguardian==2022.07.0 @@ -254,8 +260,11 @@ aioguardian==2022.07.0 # homeassistant.components.harmony aioharmony==0.2.10 +# homeassistant.components.hassio +aiohasupervisor==0.2.2b2 + # homeassistant.components.homekit_controller -aiohomekit==3.2.3 +aiohomekit==3.2.7 # homeassistant.components.hue aiohue==4.7.3 @@ -273,13 +282,10 @@ aiokef==0.2.16 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.5.0 +aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.0.8 - -# homeassistant.components.livisi -aiolivisi==0.0.19 +aiolifx==1.1.2 # homeassistant.components.lookin aiolookin==1.0.0 @@ -288,7 +294,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.8.1 +aiomealie==0.9.4 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -309,19 +315,19 @@ aionut==4.3.3 aiooncue==0.3.7 # homeassistant.components.openexchangerates -aioopenexchangerates==0.4.0 +aioopenexchangerates==0.6.8 # homeassistant.components.nmap_tracker -aiooui==0.1.6 +aiooui==0.1.7 # homeassistant.components.pegel_online -aiopegelonline==0.0.10 +aiopegelonline==0.1.0 # homeassistant.components.acmeda -aiopulse==0.4.4 +aiopulse==0.4.6 # homeassistant.components.purpleair -aiopurpleair==2022.12.1 +aiopurpleair==2023.12.0 # homeassistant.components.hunterdouglas_powerview aiopvapi==3.1.1 @@ -347,10 +353,10 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.34 +aioruckus==0.42 # homeassistant.components.russound_rio -aiorussound==2.3.2 +aiorussound==4.1.0 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -359,7 +365,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.4 +aioshelly==12.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -373,26 +379,32 @@ aiosolaredge==0.2.0 # homeassistant.components.steamist aiosteamist==1.0.0 +# homeassistant.components.cambridge_audio +aiostreammagic==2.10.0 + # homeassistant.components.switcher_kis -aioswitcher==4.0.2 +aioswitcher==5.1.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig -aiotankerkoenig==0.4.1 +aiotankerkoenig==0.4.2 + +# homeassistant.components.tedee +aiotedee==0.2.20 # homeassistant.components.tractive aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==80 +aiounifi==81 # homeassistant.components.vlc_telnet -aiovlc==0.3.2 +aiovlc==0.5.1 # homeassistant.components.vodafone_station -aiovodafone==0.6.0 +aiovodafone==0.6.1 # homeassistant.components.waqi aiowaqi==3.1.0 @@ -404,19 +416,19 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.8.0 +airgradient==0.9.1 # homeassistant.components.airly airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.0 +airthings-ble==0.9.2 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -425,22 +437,22 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.10 +airtouch5py==0.2.11 # homeassistant.components.alpha_vantage alpha-vantage==2.3.1 # homeassistant.components.amberelectric -amberelectric==1.1.1 +amberelectric==2.0.12 # homeassistant.components.amcrest amcrest==1.9.8 # homeassistant.components.androidtv -androidtv[async]==0.0.73 +androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.1 +androidtvremote2==0.1.2 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 @@ -455,16 +467,16 @@ anthemav==1.4.1 anthropic==0.31.2 # homeassistant.components.weatherkit -apple_weatherkit==1.1.2 +apple_weatherkit==1.1.3 # homeassistant.components.apprise -apprise==1.8.0 +apprise==1.9.0 # homeassistant.components.aprs aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.2.1 +apsystems-ez1==2.4.0 # homeassistant.components.aqualogic aqualogic==2.6 @@ -487,7 +499,7 @@ asmog==0.0.6 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.40.0 +async-upnp-client==0.41.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -505,13 +517,20 @@ asyncsleepiq==1.5.2 # atenpdu==0.3.2 # homeassistant.components.aurora -auroranoaa==0.0.3 +auroranoaa==0.0.5 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 # homeassistant.components.autarco -autarco==2.0.0 +autarco==3.1.0 + +# homeassistant.components.husqvarna_automower_ble +automower-ble==0.2.0 + +# homeassistant.components.generic +# homeassistant.components.stream +av==13.1.0 # homeassistant.components.avea # avea==1.5.1 @@ -520,10 +539,10 @@ autarco==2.0.0 # avion==0.10 # homeassistant.components.axis -axis==62 +axis==64 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.3.1 +ayla-iot-unofficial==1.4.4 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -559,23 +578,23 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.1 +bimmer-connected[china]==0.17.2 # homeassistant.components.bizkaibus bizkaibus==0.1.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==1.0.0 +bleak-esphome==1.1.0 # homeassistant.components.bluetooth -bleak-retry-connector==3.5.0 +bleak-retry-connector==3.6.0 # homeassistant.components.bluetooth -bleak==0.22.2 +bleak==0.22.3 # homeassistant.components.blebox -blebox-uniapi==2.4.2 +blebox-uniapi==2.5.0 # homeassistant.components.blink blinkpy==0.23.0 @@ -594,7 +613,7 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.19.4 +bluetooth-adapters==0.20.2 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -619,13 +638,13 @@ boto3==1.34.131 botocore==1.34.131 # homeassistant.components.bring -bring-api==0.8.1 +bring-api==0.9.1 # homeassistant.components.broadlink broadlink==0.19.0 # homeassistant.components.brother -brother==4.3.0 +brother==4.3.1 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -649,7 +668,7 @@ btsmarthub-devicelist==0.2.3 buienradar==1.0.6 # homeassistant.components.dhcp -cached-ipaddress==0.5.0 +cached-ipaddress==0.8.0 # homeassistant.components.caldav caldav==1.3.9 @@ -684,8 +703,12 @@ connect-box==0.3.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.cookidoo +cookidoo-api==0.10.0 + +# homeassistant.components.backup # homeassistant.components.utility_meter -croniter==2.0.2 +cronsim==2.6 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -703,10 +726,10 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.23.0 +dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.1 +debugpy==1.8.11 # homeassistant.components.decora_wifi # decora-wifi==1.4 @@ -715,7 +738,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -726,10 +749,10 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==0.4.0 +demetriek==1.1.0 # homeassistant.components.denonavr -denonavr==0.11.6 +denonavr==1.0.1 # homeassistant.components.devialet devialet==1.4.5 @@ -785,11 +808,14 @@ ebusdpy==0.0.17 # homeassistant.components.ecoal_boiler ecoaliface==0.4.0 +# homeassistant.components.eheimdigital +eheimdigital==1.0.3 + # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 # homeassistant.components.elevenlabs -elevenlabs==1.6.1 +elevenlabs==1.9.0 # homeassistant.components.elgato elgato==5.1.2 @@ -798,10 +824,10 @@ elgato==5.1.2 eliqonline==1.2.2 # homeassistant.components.elkm1 -elkm1-lib==2.2.7 +elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.5 +elmax-api==0.0.6.3 # homeassistant.components.elvia elvia==0.1.0 @@ -828,7 +854,7 @@ enturclient==0.2.4 env-canada==0.7.2 # homeassistant.components.season -ephem==4.1.5 +ephem==4.1.6 # homeassistant.components.epic_games_store epicstore-api==0.1.7 @@ -840,7 +866,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.1.9 +eq3btsmart==1.4.1 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -855,7 +881,7 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 @@ -895,7 +921,7 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.6.0 +flipr-api==1.6.1 # homeassistant.components.flux_led flux-led==1.0.4 @@ -908,7 +934,7 @@ fnv-hash-fast==1.0.2 foobot_async==1.0.0 # homeassistant.components.forecast_solar -forecast-solar==3.1.0 +forecast-solar==4.0.0 # homeassistant.components.fortios fortiosapi==1.0.5 @@ -921,22 +947,22 @@ freesms==0.2.0 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor -fritzconnection[qr]==1.13.2 +fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.6.3 +fyta_cli==0.7.0 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.2 +gardena-bluetooth==1.4.4 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.1.4 +gcal-sync==6.2.0 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -964,7 +990,7 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.4 # homeassistant.components.gios -gios==4.0.0 +gios==5.0.0 # homeassistant.components.gitter gitterpy==0.1.7 @@ -972,6 +998,9 @@ gitterpy==0.1.7 # homeassistant.components.glances glances-api==0.8.0 +# homeassistant.components.go2rtc +go2rtc-client==0.1.2 + # homeassistant.components.goalzero goalzero==0.2.2 @@ -983,31 +1012,38 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.13.11 +google-cloud-pubsub==2.23.0 # homeassistant.components.google_cloud -google-cloud-texttospeech==2.16.3 +google-cloud-speech==2.27.0 + +# homeassistant.components.google_cloud +google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-generativeai==0.6.0 +google-generativeai==0.8.2 # homeassistant.components.nest -google-nest-sdm==5.0.0 +google-nest-sdm==6.1.5 + +# homeassistant.components.google_photos +google-photos-library-api==0.12.1 # homeassistant.components.google_travel_time googlemaps==2.5.1 # homeassistant.components.slide -goslide-api==0.5.1 +# homeassistant.components.slide_local +goslide-api==0.7.0 # homeassistant.components.tailwind -gotailwind==0.2.3 +gotailwind==0.3.0 # homeassistant.components.govee_ble govee-ble==0.40.0 # homeassistant.components.govee_light_local -govee-local-api==1.5.1 +govee-local-api==1.5.3 # homeassistant.components.remote_rpi_gpio gpiozero==1.6.2 @@ -1037,17 +1073,13 @@ gspread==5.5.0 gstreamer-player==1.1.2 # homeassistant.components.profiler -guppy3==3.1.4.post1 +guppy3==3.1.4.post1;python_version<'3.13' # homeassistant.components.iaqualink h2==4.1.0 -# homeassistant.components.generic -# homeassistant.components.stream -ha-av==10.1.1 - # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.0 +ha-ffmpeg==3.2.2 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -1056,25 +1088,25 @@ ha-iotawattpy==0.1.2 ha-philipsjs==3.2.2 # homeassistant.components.habitica -habitipy==0.3.1 +habitipy==0.3.3 # homeassistant.components.bluetooth -habluetooth==3.3.2 +habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.81.1 +hass-nabucasa==0.87.0 # homeassistant.components.splunk hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==1.7.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar -hdate==0.10.9 +hdate==0.11.1 # homeassistant.components.heatmiser -heatmiserV3==1.1.18 +heatmiserV3==2.0.3 # homeassistant.components.here_travel_time here-routing==1.0.1 @@ -1099,19 +1131,19 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.55 +holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20240809.0 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.12.9 # homeassistant.components.home_connect homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.2 +homematicip==1.1.5 # homeassistant.components.horizon horimote==0.4.1 @@ -1120,10 +1152,10 @@ horimote==0.4.1 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.7.3 +huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.10 +huum==0.7.12 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1143,7 +1175,7 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.1.1 +ical==8.2.0 # homeassistant.components.ping icmplib==3.0 @@ -1161,10 +1193,10 @@ iglo==1.2.7 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.5 +imgw_pib==1.0.7 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1179,16 +1211,16 @@ inkbird-ble==0.5.8 insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==2.2.2 +intellifire4py==4.1.9 # homeassistant.components.iotty -iottycloud==0.1.3 +iottycloud==0.3.0 # homeassistant.components.iperf3 iperf3==0.1.11 # homeassistant.components.isal -isal==1.6.1 +isal==1.7.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 @@ -1197,7 +1229,7 @@ ismartgate==5.0.1 israel-rail-api==0.1.2 # homeassistant.components.abode -jaraco.abode==5.2.1 +jaraco.abode==6.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 @@ -1222,19 +1254,19 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.3.1 +knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2024.8.9.225351 +knx-frontend==2024.11.16.205004 # homeassistant.components.konnected konnected==1.2.0 # homeassistant.components.kraken -krakenex==2.1.0 +krakenex==2.2.2 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.2 +lacrosse-view==1.0.3 # homeassistant.components.eufy lakeside==0.13 @@ -1243,7 +1275,7 @@ lakeside==0.13 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.1.6 +lcn-frontend==0.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1252,7 +1284,10 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 + +# homeassistant.components.lektrico +lektricowifi==0.0.43 # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1281,8 +1316,8 @@ linear-garage-door==0.2.9 # homeassistant.components.linode linode-api==4.1.9b1 -# homeassistant.components.lamarzocco -lmcloud==1.1.13 +# homeassistant.components.livisi +livisi==0.0.24 # homeassistant.components.google_maps locationsharinglib==5.0.1 @@ -1303,10 +1338,10 @@ lupupy==0.3.2 lw12==0.9.2 # homeassistant.components.scrape -lxml==5.1.0 +lxml==5.3.0 # homeassistant.components.matrix -matrix-nio==0.25.0 +matrix-nio==0.25.2 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1342,13 +1377,13 @@ mficlient==0.5.0 micloud==0.5 # homeassistant.components.microbees -microBeesPy==0.3.2 +microBeesPy==0.3.5 # homeassistant.components.mill mill-local==0.3.0 # homeassistant.components.mill -millheater==0.11.8 +millheater==0.12.2 # homeassistant.components.minio minio==7.1.12 @@ -1360,26 +1395,29 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.2 +monzopy==1.4.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.24 +motionblinds==0.6.25 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.1 +motionblindsble==0.1.3 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.4.1.8.6 +mozart-api==4.1.1.116.4 # homeassistant.components.mullvad mullvad-api==1.0.0 +# homeassistant.components.music_assistant +music-assistant-client==1.0.8 + # homeassistant.components.tts mutagen==1.47.0 @@ -1399,16 +1437,16 @@ nad-receiver==0.3.0 ndms2-client==0.1.2 # homeassistant.components.ness_alarm -nessclient==1.0.0 +nessclient==1.1.2 # homeassistant.components.netdata -netdata==1.1.0 +netdata==1.3.0 # homeassistant.components.nmap_tracker netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.3.0 +nettigo-air-monitor==4.0.0 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -1423,13 +1461,13 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.2.0 +nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.11.0 +nibe==2.14.0 # homeassistant.components.nice_go -nice-go==0.3.0 +nice-go==1.0.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1447,7 +1485,7 @@ notifications-android-tv==0.1.5 notify-events==1.0.4 # homeassistant.components.nederlandse_spoorwegen -nsapi==3.0.5 +nsapi==3.1.2 # homeassistant.components.nsw_fuel_station nsw-fuel-api-client==1.1.0 @@ -1463,7 +1501,10 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.26.0 +numpy==2.2.0 + +# homeassistant.components.nyt_games +nyt_games==0.4.4 # homeassistant.components.oasa_telematics oasatelematics==0.3 @@ -1480,8 +1521,11 @@ odp-amsterdam==6.0.2 # homeassistant.components.oem oemthermostat==1.1.1 +# homeassistant.components.ohme +ohme==1.1.1 + # homeassistant.components.ollama -ollama==0.3.1 +ollama==0.3.3 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1490,13 +1534,13 @@ omnilogic==0.4.5 ondilo==0.5.0 # homeassistant.components.onvif -onvif-zeep-async==3.1.12 +onvif-zeep-async==3.1.13 # homeassistant.components.opengarage open-garage==0.2.0 # homeassistant.components.open_meteo -open-meteo==0.3.1 +open-meteo==0.3.2 # homeassistant.components.openai_conversation openai==1.35.7 @@ -1514,7 +1558,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.3.0 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 @@ -1523,7 +1567,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.7.0 +opower==0.8.6 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1541,7 +1585,7 @@ ourgroceries==1.5.4 ovoenergy==2.0.0 # homeassistant.components.p1_monitor -p1monitor==3.0.1 +p1monitor==3.1.0 # homeassistant.components.mqtt paho-mqtt==1.6.1 @@ -1588,7 +1632,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.38.3 +plugwise==1.6.4 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1599,6 +1643,9 @@ pmsensor==0.4 # homeassistant.components.poolsense poolsense==0.0.8 +# homeassistant.components.powerfox +powerfox==1.0.0 + # homeassistant.components.reddit praw==7.5.0 @@ -1609,7 +1656,7 @@ prayer-times-calculator-offline==1.0.3 proliphix==0.4.1 # homeassistant.components.prometheus -prometheus-client==0.17.1 +prometheus-client==0.21.0 # homeassistant.components.proxmoxve proxmoxer==2.0.1 @@ -1620,14 +1667,11 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.0.0 +psutil==6.1.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 -# homeassistant.components.androidtv -pure-python-adb[async]==0.3.0.dev0 - # homeassistant.components.pushbullet pushbullet.py==0.11.0 @@ -1635,10 +1679,10 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.1.1 +pvo==2.2.0 # homeassistant.components.aosmith -py-aosmith==1.0.8 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 @@ -1656,13 +1700,13 @@ py-dormakaba-dkey==1.0.5 py-improv-ble-client==1.0.3 # homeassistant.components.madvr -py-madvr2==1.6.29 +py-madvr2==1.6.32 # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==2.0.4 +py-nextbusnext==2.0.5 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1674,7 +1718,7 @@ py-schluter==0.1.7 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.5.2 +py-synologydsm-api==2.5.3 # homeassistant.components.zabbix py-zabbix==1.1.7 @@ -1686,10 +1730,10 @@ pyAtome==0.1.1 pyCEC==0.5.2 # homeassistant.components.control4 -pyControl4==1.1.0 +pyControl4==1.2.0 # homeassistant.components.duotecno -pyDuotecno==2024.5.1 +pyDuotecno==2024.10.1 # homeassistant.components.electrasmart pyElectra==1.2.4 @@ -1707,7 +1751,7 @@ pyRFXtrx==0.31.1 pySDCP==1 # homeassistant.components.tibber -pyTibber==0.28.2 +pyTibber==0.30.8 # homeassistant.components.dlink pyW215==0.7.0 @@ -1741,13 +1785,13 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.0.3 +pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.0 +pyatv==0.16.0 # homeassistant.components.aussie_broadband -pyaussiebb==0.0.15 +pyaussiebb==0.1.4 # homeassistant.components.balboa pybalboa==1.0.2 @@ -1759,7 +1803,7 @@ pybbox==0.0.5-alpha pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==0.4.0 +pyblu==1.0.4 # homeassistant.components.neato pybotvac==0.0.25 @@ -1791,20 +1835,26 @@ pycomfoconnect==0.5.1 # homeassistant.components.coolmaster pycoolmasternet-async==0.2.2 +# homeassistant.components.radio_browser +pycountry==24.6.1 + # homeassistant.components.microsoft pycsspeechtts==1.0.8 # homeassistant.components.cups -# pycups==1.9.73 +# pycups==2.0.4 # homeassistant.components.daikin -pydaikin==2.13.4 +pydaikin==2.13.8 # homeassistant.components.danfoss_air pydanfossair==0.1.0 +# homeassistant.components.deako +pydeako==0.6.0 + # homeassistant.components.deconz -pydeconz==116 +pydeconz==118 # homeassistant.components.delijn pydelijn==1.1.0 @@ -1813,13 +1863,13 @@ pydelijn==1.1.0 pydexcom==0.2.3 # homeassistant.components.discovergy -pydiscovergy==3.0.1 +pydiscovergy==3.0.2 # homeassistant.components.doods pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.8.0 +pydrawise==2024.12.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1831,7 +1881,7 @@ pyebox==1.1.4 pyecoforest==0.4.0 # homeassistant.components.econet -pyeconet==0.1.22 +pyeconet==0.1.23 # homeassistant.components.ista_ecotrend pyecotrend-ista==3.3.1 @@ -1849,10 +1899,10 @@ pyegps==0.2.5 pyeiscp==0.0.7 # homeassistant.components.emoncms -pyemoncms==0.0.7 +pyemoncms==0.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.22.0 +pyenphase==1.23.0 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -1870,7 +1920,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.8 +pyfibaro==0.8.0 # homeassistant.components.fido pyfido==2.1.2 @@ -1936,7 +1986,7 @@ pyintesishome==1.8.0 pyipma==3.0.7 # homeassistant.components.ipp -pyipp==0.16.0 +pyipp==0.17.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 @@ -1944,6 +1994,9 @@ pyiqvia==2022.04.0 # homeassistant.components.irish_rail_transport pyirishrail==0.0.2 +# homeassistant.components.iskra +pyiskra==0.1.14 + # homeassistant.components.iss pyiss==1.0.1 @@ -1953,8 +2006,11 @@ pyisy==3.1.14 # homeassistant.components.itach pyitachip2ir==0.0.7 +# homeassistant.components.ituran +pyituran==0.1.4 + # homeassistant.components.jvc_projector -pyjvcprojector==1.0.12 +pyjvcprojector==1.1.2 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 @@ -1969,7 +2025,7 @@ pykmtronic==0.3.0 pykodi==0.2.7 # homeassistant.components.kostal_plenticore -pykoplenti==1.2.2 +pykoplenti==1.3.0 # homeassistant.components.kraken pykrakenapi==0.1.8 @@ -1983,6 +2039,9 @@ pykwb==0.0.8 # homeassistant.components.lacrosse pylacrosse==0.4 +# homeassistant.components.lamarzocco +pylamarzocco==1.4.0 + # homeassistant.components.lastfm pylast==5.1.0 @@ -1996,16 +2055,16 @@ pylgnetcast==0.3.9 pylibrespot-java==0.1.1 # homeassistant.components.litejet -pylitejet==0.6.2 +pylitejet==0.6.3 # homeassistant.components.litterrobot pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.21.1 +pylutron-caseta==0.22.0 # homeassistant.components.lutron -pylutron==0.2.15 +pylutron==0.2.16 # homeassistant.components.mailgun pymailgunner==1.4 @@ -2032,7 +2091,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.7.4 # homeassistant.components.monoprice pymonoprice==0.4 @@ -2044,7 +2103,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.0 +pynecil==2.1.0 # homeassistant.components.netgear pynetgear==0.10.10 @@ -2055,6 +2114,9 @@ pynetio==0.1.9.1 # homeassistant.components.nobo_hub pynobo==1.8.1 +# homeassistant.components.nordpool +pynordpool==0.2.3 + # homeassistant.components.nuki pynuki==1.6.3 @@ -2080,7 +2142,7 @@ pyombi==0.1.10 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.1.1 +pyopenweathermap==0.2.1 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -2092,7 +2154,7 @@ pyoppleio-legacy==1.0.8 pyosoenergyapi==1.1.4 # homeassistant.components.opentherm_gw -pyotgw==2.2.0 +pyotgw==2.2.2 # homeassistant.auth.mfa_modules.notify # homeassistant.auth.mfa_modules.totp @@ -2100,25 +2162,28 @@ pyotgw==2.2.0 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.14 +pyoverkiz==1.15.0 # homeassistant.components.onewire pyownet==0.10.0.post1 +# homeassistant.components.palazzetti +pypalazzetti==0.1.14 + # homeassistant.components.elv pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.21 +pypck==0.7.24 # homeassistant.components.pjlink pypjlink2==1.2.1 # homeassistant.components.plaato -pyplaato==0.0.18 +pyplaato==0.0.19 # homeassistant.components.point -pypoint==2.3.2 +pypoint==3.0.0 # homeassistant.components.profiler pyprof2calltree==1.4.5 @@ -2151,7 +2216,7 @@ pyrecswitch==1.0.2 pyrepetierng==0.1.0 # homeassistant.components.risco -pyrisco==0.6.4 +pyrisco==0.6.5 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 @@ -2169,7 +2234,7 @@ pysabnzbd==1.1.1 pysaj==0.0.16 # homeassistant.components.schlage -pyschlage==2024.8.0 +pyschlage==2024.11.0 # homeassistant.components.sensibo pysensibo==1.1.0 @@ -2210,14 +2275,17 @@ pysmartapp==0.3.5 # homeassistant.components.smartthings pysmartthings==0.7.8 +# homeassistant.components.smarty +pysmarty2==0.10.1 + # homeassistant.components.edl21 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.13 +pysmlight==0.1.4 # homeassistant.components.snmp -pysnmp==6.2.5 +pysnmp==6.2.6 # homeassistant.components.snooz pysnooz==0.8.6 @@ -2229,16 +2297,16 @@ pysoma==0.0.12 pyspcwebgw==0.7.0 # homeassistant.components.assist_pipeline -pyspeex-noise==1.0.0 +pyspeex-noise==1.0.2 # homeassistant.components.squeezebox -pysqueezebox==0.7.1 +pysqueezebox==0.10.0 # homeassistant.components.stiebel_eltron pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuez==0.2.0 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -2246,17 +2314,11 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 -# homeassistant.components.tedee -pytedee-async==0.2.20 - -# homeassistant.components.tfiac -pytfiac==0.4 - # homeassistant.components.thinkingcleaner pythinkingcleaner==0.0.3 # homeassistant.components.motionmount -python-MotionMount==2.0.0 +python-MotionMount==2.2.0 # homeassistant.components.awair python-awair==0.2.4 @@ -2265,7 +2327,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.6.2 +python-bsblan==1.2.1 # homeassistant.components.clementine python-clementine-remote==1.0.1 @@ -2274,7 +2336,7 @@ python-clementine-remote==1.0.1 python-digitalocean==1.13.2 # homeassistant.components.ecobee -python-ecobee-api==0.2.18 +python-ecobee-api==0.2.20 # homeassistant.components.etherscan python-etherscan-api==0.0.3 @@ -2295,10 +2357,10 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.7.0 +python-homeassistant-analytics==0.8.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.3.0 +python-homewizard-energy==v7.0.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 @@ -2313,16 +2375,16 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.1 +python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.8 +python-linkplay==0.1.1 # homeassistant.components.lirc # python-lirc==1.2.3 # homeassistant.components.matter -python-matter-server==6.3.0 +python-matter-server==6.6.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -2334,7 +2396,7 @@ python-mpd2==3.1.1 python-mystrom==2.2.0 # homeassistant.components.swiss_public_transport -python-opendata-transport==0.4.0 +python-opendata-transport==0.5.0 # homeassistant.components.opensky python-opensky==1.0.1 @@ -2353,10 +2415,10 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.6.0 +python-roborock==2.7.2 # homeassistant.components.smarttub -python-smarttub==0.0.36 +python-smarttub==0.0.38 # homeassistant.components.songpal python-songpal==0.16.2 @@ -2368,7 +2430,7 @@ python-tado==0.17.6 python-technove==1.3.1 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.0.1 +python-telegram-bot[socks]==21.5 # homeassistant.components.vlc python-vlc==3.0.18122 @@ -2385,6 +2447,9 @@ pytomorrowio==0.3.6 # homeassistant.components.touchline pytouchline==0.7 +# homeassistant.components.touchline_sl +pytouchlinesl==0.3.0 + # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 @@ -2396,7 +2461,7 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==1.0.0 +pytrafikverket==1.1.1 # homeassistant.components.v2c pytrydan==0.8.0 @@ -2411,7 +2476,7 @@ pyuptimerobot==22.2.0 # pyuserinput==0.1.11 # homeassistant.components.vera -pyvera==0.3.13 +pyvera==0.3.15 # homeassistant.components.versasense pyversasense==0.0.6 @@ -2446,11 +2511,14 @@ pywilight==0.0.74 # homeassistant.components.wiz pywizlight==0.5.14 +# homeassistant.components.wmspro +pywmspro==0.2.1 + # homeassistant.components.ws66i pyws66i==1.1 # homeassistant.components.xeoma -pyxeoma==1.4.1 +pyxeoma==1.4.2 # homeassistant.components.yardian pyyardian==1.1.1 @@ -2474,7 +2542,7 @@ qnapstats==0.4.0 quantum-gateway==0.0.8 # homeassistant.components.radio_browser -radios==0.3.1 +radios==0.3.2 # homeassistant.components.radiotherm radiotherm==2.1.0 @@ -2489,19 +2557,19 @@ rapt-ble==0.1.2 raspyrfm-client==1.2.8 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.5 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.5 +renault-api==0.2.8 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.7 +reolink-aio==0.11.5 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2510,7 +2578,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.9.0 +ring-doorbell==0.9.13 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2543,7 +2611,7 @@ rpi-bad-power==0.1.0 rtsp-to-webrtc==0.5.1 # homeassistant.components.russound_rnet -russound==0.1.9 +russound==0.2.0 # homeassistant.components.ruuvitag_ble ruuvitag-ble==0.1.2 @@ -2555,7 +2623,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.6.0 +samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 @@ -2570,14 +2638,14 @@ screenlogicpy==0.10.0 scsgate==0.1.0 # homeassistant.components.backup -securetar==2024.2.1 +securetar==2024.11.0 # homeassistant.components.sendgrid sendgrid==6.8.2 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.12.4 +sense-energy==0.13.4 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2586,13 +2654,16 @@ sensirion-ble==0.1.1 sensorpro-ble==0.5.3 # homeassistant.components.sensorpush -sensorpush-ble==1.6.2 +sensorpush-ble==1.7.1 + +# homeassistant.components.sensoterra +sensoterra==2.0.1 # homeassistant.components.sentry sentry-sdk==1.40.3 # homeassistant.components.sfr_box -sfrbox-api==0.0.8 +sfrbox-api==0.0.11 # homeassistant.components.sharkiq sharkiq==1.0.2 @@ -2616,7 +2687,10 @@ simplepush==2.2.3 simplisafe-python==2024.01.0 # homeassistant.components.sisyphus -sisyphus-control==3.1.3 +sisyphus-control==3.1.4 + +# homeassistant.components.sky_remote +skyboxremote==0.0.6 # homeassistant.components.slack slackclient==2.5.0 @@ -2628,22 +2702,22 @@ slixmpp==1.8.5 smart-meter-texas==0.5.5 # homeassistant.components.smhi -smhi-pkg==1.0.16 +smhi-pkg==1.0.18 # homeassistant.components.snapcast snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.4 +soco==0.30.6 # homeassistant.components.solaredge_local solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.1.6 +solarlog_cli==0.4.0 # homeassistant.components.solax -solax==3.1.1 +solax==3.2.1 # homeassistant.components.somfy_mylink somfy-mylink-synergy==1.0.6 @@ -2657,11 +2731,8 @@ speak2mary==1.4.0 # homeassistant.components.speedtestdotnet speedtest-cli==2.1.3 -# homeassistant.components.spider -spiderpy==1.6.1 - # homeassistant.components.spotify -spotipy==2.23.0 +spotifyaio==0.8.11 # homeassistant.components.sql sqlparse==0.5.0 @@ -2676,7 +2747,7 @@ starline==0.1.5 starlingbank==3.2 # homeassistant.components.starlink -starlink-grpc-core==1.1.3 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 @@ -2684,18 +2755,14 @@ statsd==3.2.1 # homeassistant.components.steam_online steamodd==4.21 -# homeassistant.components.stookalert -stookalert==0.1.4 - # homeassistant.components.stookwijzer -stookwijzer==1.3.0 +stookwijzer==1.5.1 # homeassistant.components.streamlabswater streamlabswater==1.0.1 # homeassistant.components.huawei_lte # homeassistant.components.solaredge -# homeassistant.components.thermoworks_smoke # homeassistant.components.traccar stringcase==1.2.0 @@ -2739,7 +2806,7 @@ tellcore-net==0.4 tellcore-py==1.1.2 # homeassistant.components.tellduslive -tellduslive==0.10.11 +tellduslive==0.10.12 # homeassistant.components.lg_soundbar temescal==0.5 @@ -2753,7 +2820,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.7.3 +tesla-fleet-api==0.8.5 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2761,6 +2828,9 @@ tesla-powerwall==0.5.2 # homeassistant.components.tesla_wall_connector tesla-wall-connector==1.0.2 +# homeassistant.components.teslemetry +teslemetry-stream==0.4.2 + # homeassistant.components.tessie tessie-api==0.1.1 @@ -2773,12 +2843,12 @@ thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 -# homeassistant.components.thermoworks_smoke -thermoworks-smoke==0.1.8 - # homeassistant.components.thingspeak thingspeak==1.0.0 +# homeassistant.components.lg_thinq +thinqconnect==1.0.2 + # homeassistant.components.tikteck tikteck==0.4 @@ -2798,17 +2868,20 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2024.5 +total-connect-client==2024.12 # homeassistant.components.tplink_lte tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.2 +tplink-omada-client==1.4.3 # homeassistant.components.transmission transmission-rpc==7.0.3 +# homeassistant.components.triggercmd +triggercmd==0.0.27 + # homeassistant.components.twinkly ttls==1.8.3 @@ -2816,10 +2889,10 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.1.9 +tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu -twentemilieu==2.0.1 +twentemilieu==2.2.0 # homeassistant.components.twilio twilio==6.32.0 @@ -2827,11 +2900,14 @@ twilio==6.32.0 # homeassistant.components.twitch twitchAPI==4.2.1 +# homeassistant.components.monarch_money +typedmonarchmoney==0.3.1 + # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.0.2 +uiprotect==7.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2840,19 +2916,19 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.unifi_direct -unifi_ap==0.0.1 +unifi_ap==0.0.2 # homeassistant.components.unifiled unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.22 +universal-silabs-flasher==0.0.25 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.9 # homeassistant.components.upcloud -upcloud-api==2.5.1 +upcloud-api==2.6.0 # homeassistant.components.huawei_lte # homeassistant.components.syncthru @@ -2872,7 +2948,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.6 +velbus-aio==2024.12.2 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2881,7 +2957,7 @@ venstarcolortouch==0.19 vilfo-api-client==0.5.0 # homeassistant.components.voip -voip-utils==0.1.0 +voip-utils==0.2.1 # homeassistant.components.volkszaehler volkszaehler==0.4.0 @@ -2890,7 +2966,7 @@ volkszaehler==0.4.0 volvooncall==0.10.3 # homeassistant.components.verisure -vsure==2.6.6 +vsure==2.6.7 # homeassistant.components.vasttrafik vtjp==0.2.1 @@ -2909,17 +2985,29 @@ wakeonlan==2.1.0 wallbox==0.7.0 # homeassistant.components.folder_watcher -watchdog==2.3.1 +watchdog==6.0.0 # homeassistant.components.waterfurnace waterfurnace==1.1.0 +# homeassistant.components.watergate +watergate-local-api==2024.4.1 + # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.21 +weatherflow4py==1.0.6 + +# homeassistant.components.cisco_webex_teams +webexpythonsdk==2.0.1 + +# homeassistant.components.nasweb +webio-api==0.1.11 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 +# homeassistant.components.weheat +weheat==2024.11.26 + # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2933,25 +3021,25 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.20.2 +wled==0.21.0 # homeassistant.components.wolflink -wolf-comm==0.0.9 +wolf-comm==0.0.15 # homeassistant.components.wyoming wyoming==1.5.4 # homeassistant.components.xbox -xbox-webapi==2.0.11 +xbox-webapi==2.1.0 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.2 +xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.1.1 +xknx==3.4.0 # homeassistant.components.knx -xknxproject==3.7.1 +xknxproject==3.8.1 # homeassistant.components.fritz # homeassistant.components.rest @@ -2964,14 +3052,16 @@ xmltodict==0.13.0 xs1-api-client==3.0.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.4.0 +yalesmartalarmclient==0.4.3 # homeassistant.components.august +# homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.4.3 +yalexs-ble==2.5.5 # homeassistant.components.august -yalexs==8.4.2 +# homeassistant.components.yale +yalexs==8.10.0 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2989,7 +3079,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.08.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 @@ -2998,16 +3088,16 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.132.2 +zeroconf==0.136.2 # homeassistant.components.zeversolar -zeversolar==0.3.1 +zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.31 +zha==0.0.42 # homeassistant.components.zhong_hong -zhong-hong-hvac==1.0.12 +zhong-hong-hvac==1.0.13 # homeassistant.components.ziggo_mediabox_xl ziggo-mediabox-xl==1.1.0 @@ -3016,7 +3106,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.57.0 +zwave-js-server-python==0.60.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index 19a60b6aa28..98a948cd56e 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,20 +7,20 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.2.4 -coverage==7.6.0 +astroid==3.3.6 +coverage==7.6.8 freezegun==1.5.1 +license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.12.0a2 -pre-commit==3.7.1 -pydantic==1.10.17 -pylint==3.2.6 +mypy-dev==1.14.0a7 +pre-commit==4.0.0 +pydantic==2.10.3 +pylint==3.3.2 pylint-per-file-ignores==1.3.2 -pipdeptree==2.23.1 -pip-licenses==4.5.1 -pytest-asyncio==0.23.8 +pipdeptree==2.23.4 +pytest-asyncio==0.24.0 pytest-aiohttp==1.0.5 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-freezer==0.4.8 pytest-github-actions-annotate-failures==0.2.0 pytest-socket==0.7.0 @@ -29,26 +29,25 @@ pytest-timeout==2.3.1 pytest-unordered==0.6.1 pytest-picked==0.5.0 pytest-xdist==3.6.1 -pytest==8.3.1 +pytest==8.3.4 requests-mock==1.12.1 respx==0.21.1 -syrupy==4.6.1 -tqdm==4.66.4 -types-aiofiles==23.2.0.20240623 +syrupy==4.8.0 +tqdm==4.66.5 +types-aiofiles==24.1.0.20240626 types-atomicwrites==1.4.5.1 -types-croniter==2.0.0.20240423 -types-beautifulsoup4==4.12.0.20240511 -types-caldav==1.3.0.20240331 +types-croniter==4.0.0.20241030 +types-beautifulsoup4==4.12.0.20241020 +types-caldav==1.3.0.20241107 types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 -types-pillow==10.2.0.20240520 -types-protobuf==4.24.0.20240106 -types-psutil==6.0.0.20240621 -types-python-dateutil==2.9.0.20240316 +types-pillow==10.2.0.20240822 +types-protobuf==5.28.3.20241030 +types-psutil==6.1.0.20241102 +types-python-dateutil==2.9.0.20241003 types-python-slugify==8.0.2.20240310 -types-pytz==2024.1.0.20240417 -types-PyYAML==6.0.12.20240311 +types-pytz==2024.2.0.20241003 +types-PyYAML==6.0.12.20240917 types-requests==2.31.0.3 types-xmltodict==0.13.0.3 -uv==0.2.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 34b482aa39f..fe528899ad3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,19 +4,19 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.4 +AEMET-OpenData==0.6.3 # homeassistant.components.honeywell -AIOSomecomfort==0.0.25 +AIOSomecomfort==0.0.28 # homeassistant.components.adax Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==3.0.2 +DoorBirdPy==3.0.8 # homeassistant.components.homekit -HAP-python==4.9.1 +HAP-python==4.9.2 # homeassistant.components.tasmota HATasmota==0.9.2 @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.4.0 +Pillow==11.0.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -42,7 +42,7 @@ PlexAPI==4.15.16 ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==14.0.1 +PyChromecast==14.0.5 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -57,17 +57,17 @@ PyFronius==0.7.3 PyLoadAPI==1.3.2 # homeassistant.components.met_eireann -PyMetEireann==2021.8.0 +PyMetEireann==2024.11.0 # homeassistant.components.met # homeassistant.components.norway_air -PyMetno==0.12.0 +PyMetno==0.13.0 # homeassistant.components.keymitt_ble PyMicroBot==0.0.17 # homeassistant.components.nina -PyNINA==0.3.3 +PyNINA==0.3.4 # homeassistant.components.mobile_app # homeassistant.components.owntracks @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.1 +PySwitchbot==0.54.0 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -91,10 +91,10 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare-neo==0.2.1 +PyViCare==2.38.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -103,14 +103,14 @@ PyXiaomiGateway==0.14.3 RachioPy==1.1.0 # homeassistant.components.python_script -RestrictedPython==7.0 +RestrictedPython==7.4 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -119,7 +119,7 @@ Tami4EdgeAPI==3.0 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==3.0.0 +accuweather==4.0.0 # homeassistant.components.adax adax==0.4.0 @@ -140,10 +140,10 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.23 +agent-py==0.0.24 # homeassistant.components.geo_json_events -aio-geojson-generic-client==0.4 +aio-geojson-generic-client==0.5 # homeassistant.components.geonetnz_quakes aio-geojson-geonetnz-quakes==0.16 @@ -158,20 +158,23 @@ aio-geojson-nsw-rfs-incidents==0.7 aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs -aio-georss-gdacs==0.9 +aio-georss-gdacs==0.10 + +# homeassistant.components.acaia +aioacaia==0.1.11 # homeassistant.components.airq -aioairq==0.3.2 +aioairq==0.4.3 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.2 +aioairzone-cloud==0.6.10 # homeassistant.components.airzone -aioairzone==0.8.2 +aioairzone==0.9.7 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.01.0 +aioambient==2024.08.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 @@ -180,16 +183,16 @@ aioapcaccess==0.4.2 aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live -aioaseko==0.2.0 +aioaseko==1.0.0 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.8.0 +aioautomower==2024.12.0 # homeassistant.components.azure_devops -aioazuredevops==2.1.1 +aioazuredevops==2.2.1 # homeassistant.components.baf aiobafi6==0.9.0 @@ -198,7 +201,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.9.0 +aiocomelit==0.9.1 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 @@ -209,6 +212,9 @@ aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 +# homeassistant.components.duke_energy +aiodukeenergy==0.2.2 + # homeassistant.components.eafm aioeafm==0.1.2 @@ -225,13 +231,13 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.2.1 +aioesphomeapi==28.0.0 # homeassistant.components.flo aioflo==2021.11.0 # homeassistant.components.github -aiogithubapi==23.11.0 +aiogithubapi==24.6.0 # homeassistant.components.guardian aioguardian==2022.07.0 @@ -239,8 +245,11 @@ aioguardian==2022.07.0 # homeassistant.components.harmony aioharmony==0.2.10 +# homeassistant.components.hassio +aiohasupervisor==0.2.2b2 + # homeassistant.components.homekit_controller -aiohomekit==3.2.3 +aiohomekit==3.2.7 # homeassistant.components.hue aiohue==4.7.3 @@ -255,13 +264,10 @@ aiokafka==0.10.0 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.5.0 +aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.0.8 - -# homeassistant.components.livisi -aiolivisi==0.0.19 +aiolifx==1.1.2 # homeassistant.components.lookin aiolookin==1.0.0 @@ -270,7 +276,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.8.1 +aiomealie==0.9.4 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -291,19 +297,19 @@ aionut==4.3.3 aiooncue==0.3.7 # homeassistant.components.openexchangerates -aioopenexchangerates==0.4.0 +aioopenexchangerates==0.6.8 # homeassistant.components.nmap_tracker -aiooui==0.1.6 +aiooui==0.1.7 # homeassistant.components.pegel_online -aiopegelonline==0.0.10 +aiopegelonline==0.1.0 # homeassistant.components.acmeda -aiopulse==0.4.4 +aiopulse==0.4.6 # homeassistant.components.purpleair -aiopurpleair==2022.12.1 +aiopurpleair==2023.12.0 # homeassistant.components.hunterdouglas_powerview aiopvapi==3.1.1 @@ -329,10 +335,10 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.34 +aioruckus==0.42 # homeassistant.components.russound_rio -aiorussound==2.3.2 +aiorussound==4.1.0 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -341,7 +347,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.4 +aioshelly==12.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -355,26 +361,32 @@ aiosolaredge==0.2.0 # homeassistant.components.steamist aiosteamist==1.0.0 +# homeassistant.components.cambridge_audio +aiostreammagic==2.10.0 + # homeassistant.components.switcher_kis -aioswitcher==4.0.2 +aioswitcher==5.1.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig -aiotankerkoenig==0.4.1 +aiotankerkoenig==0.4.2 + +# homeassistant.components.tedee +aiotedee==0.2.20 # homeassistant.components.tractive aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==80 +aiounifi==81 # homeassistant.components.vlc_telnet -aiovlc==0.3.2 +aiovlc==0.5.1 # homeassistant.components.vodafone_station -aiovodafone==0.6.0 +aiovodafone==0.6.1 # homeassistant.components.waqi aiowaqi==3.1.0 @@ -386,19 +398,19 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.8.0 +airgradient==0.9.1 # homeassistant.components.airly airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.0 +airthings-ble==0.9.2 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -407,16 +419,16 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.10 +airtouch5py==0.2.11 # homeassistant.components.amberelectric -amberelectric==1.1.1 +amberelectric==2.0.12 # homeassistant.components.androidtv -androidtv[async]==0.0.73 +androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.1 +androidtvremote2==0.1.2 # homeassistant.components.anova anova-wifi==0.17.0 @@ -428,16 +440,16 @@ anthemav==1.4.1 anthropic==0.31.2 # homeassistant.components.weatherkit -apple_weatherkit==1.1.2 +apple_weatherkit==1.1.3 # homeassistant.components.apprise -apprise==1.8.0 +apprise==1.9.0 # homeassistant.components.aprs aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.2.1 +apsystems-ez1==2.4.0 # homeassistant.components.aranet aranet4==2.4.0 @@ -451,7 +463,7 @@ arcam-fmj==1.5.2 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.40.0 +async-upnp-client==0.41.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -460,19 +472,26 @@ asyncarve==0.1.1 asyncsleepiq==1.5.2 # homeassistant.components.aurora -auroranoaa==0.0.3 +auroranoaa==0.0.5 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 # homeassistant.components.autarco -autarco==2.0.0 +autarco==3.1.0 + +# homeassistant.components.husqvarna_automower_ble +automower-ble==0.2.0 + +# homeassistant.components.generic +# homeassistant.components.stream +av==13.1.0 # homeassistant.components.axis -axis==62 +axis==64 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.3.1 +ayla-iot-unofficial==1.4.4 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -493,20 +512,20 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.1 +bimmer-connected[china]==0.17.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==1.0.0 +bleak-esphome==1.1.0 # homeassistant.components.bluetooth -bleak-retry-connector==3.5.0 +bleak-retry-connector==3.6.0 # homeassistant.components.bluetooth -bleak==0.22.2 +bleak==0.22.3 # homeassistant.components.blebox -blebox-uniapi==2.4.2 +blebox-uniapi==2.5.0 # homeassistant.components.blink blinkpy==0.23.0 @@ -518,7 +537,7 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.19.4 +bluetooth-adapters==0.20.2 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -539,13 +558,13 @@ boschshcpy==0.2.91 botocore==1.34.131 # homeassistant.components.bring -bring-api==0.8.1 +bring-api==0.9.1 # homeassistant.components.broadlink broadlink==0.19.0 # homeassistant.components.brother -brother==4.3.0 +brother==4.3.1 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -560,7 +579,7 @@ bthome-ble==3.9.1 buienradar==1.0.6 # homeassistant.components.dhcp -cached-ipaddress==0.5.0 +cached-ipaddress==0.8.0 # homeassistant.components.caldav caldav==1.3.9 @@ -580,8 +599,12 @@ colorthief==0.2.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.cookidoo +cookidoo-api==0.10.0 + +# homeassistant.components.backup # homeassistant.components.utility_meter -croniter==2.0.2 +cronsim==2.6 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -599,13 +622,13 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.23.0 +dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.1 +debugpy==1.8.11 # homeassistant.components.ecovacs -deebot-client==8.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -616,10 +639,10 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==0.4.0 +demetriek==1.1.0 # homeassistant.components.denonavr -denonavr==0.11.6 +denonavr==1.0.1 # homeassistant.components.devialet devialet==1.4.5 @@ -663,20 +686,23 @@ eagle100==0.1.1 # homeassistant.components.easyenergy easyenergy==2.1.2 +# homeassistant.components.eheimdigital +eheimdigital==1.0.3 + # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 # homeassistant.components.elevenlabs -elevenlabs==1.6.1 +elevenlabs==1.9.0 # homeassistant.components.elgato elgato==5.1.2 # homeassistant.components.elkm1 -elkm1-lib==2.2.7 +elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.5 +elmax-api==0.0.6.3 # homeassistant.components.elvia elvia==0.1.0 @@ -697,7 +723,7 @@ enocean==0.50 env-canada==0.7.2 # homeassistant.components.season -ephem==4.1.5 +ephem==4.1.6 # homeassistant.components.epic_games_store epicstore-api==0.1.7 @@ -709,7 +735,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.1.9 +eq3btsmart==1.4.1 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -721,7 +747,7 @@ eternalegypt==0.0.16 eufylife-ble-client==0.1.8 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 @@ -754,7 +780,7 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.6.0 +flipr-api==1.6.1 # homeassistant.components.flux_led flux-led==1.0.4 @@ -767,29 +793,29 @@ fnv-hash-fast==1.0.2 foobot_async==1.0.0 # homeassistant.components.forecast_solar -forecast-solar==3.1.0 +forecast-solar==4.0.0 # homeassistant.components.freebox freebox-api==1.1.0 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor -fritzconnection[qr]==1.13.2 +fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.6.3 +fyta_cli==0.7.0 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.2 +gardena-bluetooth==1.4.4 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.1.4 +gcal-sync==6.2.0 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -817,11 +843,14 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.4 # homeassistant.components.gios -gios==4.0.0 +gios==5.0.0 # homeassistant.components.glances glances-api==0.8.0 +# homeassistant.components.go2rtc +go2rtc-client==0.1.2 + # homeassistant.components.goalzero goalzero==0.2.2 @@ -833,25 +862,38 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.13.11 +google-cloud-pubsub==2.23.0 + +# homeassistant.components.google_cloud +google-cloud-speech==2.27.0 + +# homeassistant.components.google_cloud +google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-generativeai==0.6.0 +google-generativeai==0.8.2 # homeassistant.components.nest -google-nest-sdm==5.0.0 +google-nest-sdm==6.1.5 + +# homeassistant.components.google_photos +google-photos-library-api==0.12.1 # homeassistant.components.google_travel_time googlemaps==2.5.1 +# homeassistant.components.slide +# homeassistant.components.slide_local +goslide-api==0.7.0 + # homeassistant.components.tailwind -gotailwind==0.2.3 +gotailwind==0.3.0 # homeassistant.components.govee_ble govee-ble==0.40.0 # homeassistant.components.govee_light_local -govee-local-api==1.5.1 +govee-local-api==1.5.3 # homeassistant.components.gpsd gps3==0.33.3 @@ -872,17 +914,13 @@ growattServer==1.5.0 gspread==5.5.0 # homeassistant.components.profiler -guppy3==3.1.4.post1 +guppy3==3.1.4.post1;python_version<'3.13' # homeassistant.components.iaqualink h2==4.1.0 -# homeassistant.components.generic -# homeassistant.components.stream -ha-av==10.1.1 - # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.0 +ha-ffmpeg==3.2.2 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -891,19 +929,19 @@ ha-iotawattpy==0.1.2 ha-philipsjs==3.2.2 # homeassistant.components.habitica -habitipy==0.3.1 +habitipy==0.3.3 # homeassistant.components.bluetooth -habluetooth==3.3.2 +habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.81.1 +hass-nabucasa==0.87.0 # homeassistant.components.conversation -hassil==1.7.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar -hdate==0.10.9 +hdate==0.11.1 # homeassistant.components.here_travel_time here-routing==1.0.1 @@ -922,28 +960,28 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.55 +holidays==0.63 # homeassistant.components.frontend -home-assistant-frontend==20240809.0 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.12.9 # homeassistant.components.home_connect homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.2 +homematicip==1.1.5 # homeassistant.components.remember_the_milk httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.7.3 +huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.10 +huum==0.7.12 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -957,7 +995,7 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.1.1 +ical==8.2.0 # homeassistant.components.ping icmplib==3.0 @@ -969,10 +1007,10 @@ idasen-ha==2.6.2 ifaddr==0.2.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.5 +imgw_pib==1.0.7 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -987,13 +1025,13 @@ inkbird-ble==0.5.8 insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==2.2.2 +intellifire4py==4.1.9 # homeassistant.components.iotty -iottycloud==0.1.3 +iottycloud==0.3.0 # homeassistant.components.isal -isal==1.6.1 +isal==1.7.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 @@ -1002,7 +1040,7 @@ ismartgate==5.0.1 israel-rail-api==0.1.2 # homeassistant.components.abode -jaraco.abode==5.2.1 +jaraco.abode==6.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 @@ -1018,25 +1056,25 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.3.1 +knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2024.8.9.225351 +knx-frontend==2024.11.16.205004 # homeassistant.components.konnected konnected==1.2.0 # homeassistant.components.kraken -krakenex==2.1.0 +krakenex==2.2.2 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.2 +lacrosse-view==1.0.3 # homeassistant.components.laundrify laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.1.6 +lcn-frontend==0.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1045,7 +1083,10 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 + +# homeassistant.components.lektrico +lektricowifi==0.0.43 # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1059,8 +1100,8 @@ libsoundtouch==0.8 # homeassistant.components.linear_garage_door linear-garage-door==0.2.9 -# homeassistant.components.lamarzocco -lmcloud==1.1.13 +# homeassistant.components.livisi +livisi==0.0.24 # homeassistant.components.london_underground london-tube-status==0.5 @@ -1075,10 +1116,10 @@ luftdaten==0.7.4 lupupy==0.3.2 # homeassistant.components.scrape -lxml==5.1.0 +lxml==5.3.0 # homeassistant.components.matrix -matrix-nio==0.25.0 +matrix-nio==0.25.2 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1108,13 +1149,13 @@ mficlient==0.5.0 micloud==0.5 # homeassistant.components.microbees -microBeesPy==0.3.2 +microBeesPy==0.3.5 # homeassistant.components.mill mill-local==0.3.0 # homeassistant.components.mill -millheater==0.11.8 +millheater==0.12.2 # homeassistant.components.minio minio==7.1.12 @@ -1126,26 +1167,29 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.2 +monzopy==1.4.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.24 +motionblinds==0.6.25 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.1 +motionblindsble==0.1.3 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.4.1.8.6 +mozart-api==4.1.1.116.4 # homeassistant.components.mullvad mullvad-api==1.0.0 +# homeassistant.components.music_assistant +music-assistant-client==1.0.8 + # homeassistant.components.tts mutagen==1.47.0 @@ -1162,13 +1206,13 @@ myuplink==0.6.0 ndms2-client==0.1.2 # homeassistant.components.ness_alarm -nessclient==1.0.0 +nessclient==1.1.2 # homeassistant.components.nmap_tracker netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.3.0 +nettigo-air-monitor==4.0.0 # homeassistant.components.nexia nexia==2.0.8 @@ -1180,13 +1224,16 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.2.0 +nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.11.0 +nibe==2.14.0 # homeassistant.components.nice_go -nice-go==0.3.0 +nice-go==1.0.0 + +# homeassistant.components.niko_home_control +niko-home-control==0.2.1 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1208,7 +1255,10 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.26.0 +numpy==2.2.0 + +# homeassistant.components.nyt_games +nyt_games==0.4.4 # homeassistant.components.google oauth2client==4.1.3 @@ -1219,8 +1269,11 @@ objgraph==3.5.0 # homeassistant.components.garages_amsterdam odp-amsterdam==6.0.2 +# homeassistant.components.ohme +ohme==1.1.1 + # homeassistant.components.ollama -ollama==0.3.1 +ollama==0.3.3 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1229,13 +1282,13 @@ omnilogic==0.4.5 ondilo==0.5.0 # homeassistant.components.onvif -onvif-zeep-async==3.1.12 +onvif-zeep-async==3.1.13 # homeassistant.components.opengarage open-garage==0.2.0 # homeassistant.components.open_meteo -open-meteo==0.3.1 +open-meteo==0.3.2 # homeassistant.components.openai_conversation openai==1.35.7 @@ -1247,10 +1300,10 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.3.0 # homeassistant.components.opower -opower==0.7.0 +opower==0.8.6 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1262,7 +1315,7 @@ ourgroceries==1.5.4 ovoenergy==2.0.0 # homeassistant.components.p1_monitor -p1monitor==3.0.1 +p1monitor==3.1.0 # homeassistant.components.mqtt paho-mqtt==1.6.1 @@ -1292,7 +1345,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.38.3 +plugwise==1.6.4 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1300,6 +1353,9 @@ plumlightpad==0.0.11 # homeassistant.components.poolsense poolsense==0.0.8 +# homeassistant.components.powerfox +powerfox==1.0.0 + # homeassistant.components.reddit praw==7.5.0 @@ -1307,7 +1363,7 @@ praw==7.5.0 prayer-times-calculator-offline==1.0.3 # homeassistant.components.prometheus -prometheus-client==0.17.1 +prometheus-client==0.21.0 # homeassistant.components.hardware # homeassistant.components.recorder @@ -1315,10 +1371,7 @@ prometheus-client==0.17.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.0.0 - -# homeassistant.components.androidtv -pure-python-adb[async]==0.3.0.dev0 +psutil==6.1.0 # homeassistant.components.pushbullet pushbullet.py==0.11.0 @@ -1327,10 +1380,10 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.1.1 +pvo==2.2.0 # homeassistant.components.aosmith -py-aosmith==1.0.8 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 @@ -1348,13 +1401,13 @@ py-dormakaba-dkey==1.0.5 py-improv-ble-client==1.0.3 # homeassistant.components.madvr -py-madvr2==1.6.29 +py-madvr2==1.6.32 # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==2.0.4 +py-nextbusnext==2.0.5 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1363,16 +1416,16 @@ py-nightscout==1.2.2 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.5.2 +py-synologydsm-api==2.5.3 # homeassistant.components.hdmi_cec pyCEC==0.5.2 # homeassistant.components.control4 -pyControl4==1.1.0 +pyControl4==1.2.0 # homeassistant.components.duotecno -pyDuotecno==2024.5.1 +pyDuotecno==2024.10.1 # homeassistant.components.electrasmart pyElectra==1.2.4 @@ -1381,7 +1434,7 @@ pyElectra==1.2.4 pyRFXtrx==0.31.1 # homeassistant.components.tibber -pyTibber==0.28.2 +pyTibber==0.30.8 # homeassistant.components.dlink pyW215==0.7.0 @@ -1409,13 +1462,13 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.0.3 +pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.0 +pyatv==0.16.0 # homeassistant.components.aussie_broadband -pyaussiebb==0.0.15 +pyaussiebb==0.1.4 # homeassistant.components.balboa pybalboa==1.0.2 @@ -1424,7 +1477,7 @@ pybalboa==1.0.2 pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==0.4.0 +pyblu==1.0.4 # homeassistant.components.neato pybotvac==0.0.25 @@ -1441,23 +1494,29 @@ pycomfoconnect==0.5.1 # homeassistant.components.coolmaster pycoolmasternet-async==0.2.2 +# homeassistant.components.radio_browser +pycountry==24.6.1 + # homeassistant.components.microsoft pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.4 +pydaikin==2.13.8 + +# homeassistant.components.deako +pydeako==0.6.0 # homeassistant.components.deconz -pydeconz==116 +pydeconz==118 # homeassistant.components.dexcom pydexcom==0.2.3 # homeassistant.components.discovergy -pydiscovergy==3.0.1 +pydiscovergy==3.0.2 # homeassistant.components.hydrawise -pydrawise==2024.8.0 +pydrawise==2024.12.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1466,7 +1525,7 @@ pydroid-ipcam==2.0.0 pyecoforest==0.4.0 # homeassistant.components.econet -pyeconet==0.1.22 +pyeconet==0.1.23 # homeassistant.components.ista_ecotrend pyecotrend-ista==3.3.1 @@ -1477,11 +1536,14 @@ pyefergy==22.5.0 # homeassistant.components.energenie_power_sockets pyegps==0.2.5 +# homeassistant.components.onkyo +pyeiscp==0.0.7 + # homeassistant.components.emoncms -pyemoncms==0.0.7 +pyemoncms==0.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.22.0 +pyenphase==1.23.0 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1493,7 +1555,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.8 +pyfibaro==0.8.0 # homeassistant.components.fido pyfido==2.1.2 @@ -1547,19 +1609,25 @@ pyinsteon==1.6.3 pyipma==3.0.7 # homeassistant.components.ipp -pyipp==0.16.0 +pyipp==0.17.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 +# homeassistant.components.iskra +pyiskra==0.1.14 + # homeassistant.components.iss pyiss==1.0.1 # homeassistant.components.isy994 pyisy==3.1.14 +# homeassistant.components.ituran +pyituran==0.1.4 + # homeassistant.components.jvc_projector -pyjvcprojector==1.0.12 +pyjvcprojector==1.1.2 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 @@ -1574,7 +1642,7 @@ pykmtronic==0.3.0 pykodi==0.2.7 # homeassistant.components.kostal_plenticore -pykoplenti==1.2.2 +pykoplenti==1.3.0 # homeassistant.components.kraken pykrakenapi==0.1.8 @@ -1582,6 +1650,9 @@ pykrakenapi==0.1.8 # homeassistant.components.kulersky pykulersky==0.5.2 +# homeassistant.components.lamarzocco +pylamarzocco==1.4.0 + # homeassistant.components.lastfm pylast==5.1.0 @@ -1595,16 +1666,16 @@ pylgnetcast==0.3.9 pylibrespot-java==0.1.1 # homeassistant.components.litejet -pylitejet==0.6.2 +pylitejet==0.6.3 # homeassistant.components.litterrobot pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.21.1 +pylutron-caseta==0.22.0 # homeassistant.components.lutron -pylutron==0.2.15 +pylutron==0.2.16 # homeassistant.components.mailgun pymailgunner==1.4 @@ -1625,7 +1696,7 @@ pymicro-vad==1.0.1 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.7.4 # homeassistant.components.monoprice pymonoprice==0.4 @@ -1634,7 +1705,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.0 +pynecil==2.1.0 # homeassistant.components.netgear pynetgear==0.10.10 @@ -1642,6 +1713,9 @@ pynetgear==0.10.10 # homeassistant.components.nobo_hub pynobo==1.8.1 +# homeassistant.components.nordpool +pynordpool==0.2.3 + # homeassistant.components.nuki pynuki==1.6.3 @@ -1664,7 +1738,7 @@ pyoctoprintapi==0.1.12 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.1.1 +pyopenweathermap==0.2.1 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -1673,7 +1747,7 @@ pyopnsense==0.4.0 pyosoenergyapi==1.1.4 # homeassistant.components.opentherm_gw -pyotgw==2.2.0 +pyotgw==2.2.2 # homeassistant.auth.mfa_modules.notify # homeassistant.auth.mfa_modules.totp @@ -1681,22 +1755,25 @@ pyotgw==2.2.0 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.14 +pyoverkiz==1.15.0 # homeassistant.components.onewire pyownet==0.10.0.post1 +# homeassistant.components.palazzetti +pypalazzetti==0.1.14 + # homeassistant.components.lcn -pypck==0.7.21 +pypck==0.7.24 # homeassistant.components.pjlink pypjlink2==1.2.1 # homeassistant.components.plaato -pyplaato==0.0.18 +pyplaato==0.0.19 # homeassistant.components.point -pypoint==2.3.2 +pypoint==3.0.0 # homeassistant.components.profiler pyprof2calltree==1.4.5 @@ -1717,7 +1794,7 @@ pyqwikswitch==0.93 pyrainbird==6.0.1 # homeassistant.components.risco -pyrisco==0.6.4 +pyrisco==0.6.5 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 @@ -1732,7 +1809,7 @@ pyrympro==0.0.8 pysabnzbd==1.1.1 # homeassistant.components.schlage -pyschlage==2024.8.0 +pyschlage==2024.11.0 # homeassistant.components.sensibo pysensibo==1.1.0 @@ -1764,14 +1841,17 @@ pysmartapp==0.3.5 # homeassistant.components.smartthings pysmartthings==0.7.8 +# homeassistant.components.smarty +pysmarty2==0.10.1 + # homeassistant.components.edl21 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.13 +pysmlight==0.1.4 # homeassistant.components.snmp -pysnmp==6.2.5 +pysnmp==6.2.6 # homeassistant.components.snooz pysnooz==0.8.6 @@ -1783,13 +1863,13 @@ pysoma==0.0.12 pyspcwebgw==0.7.0 # homeassistant.components.assist_pipeline -pyspeex-noise==1.0.0 +pyspeex-noise==1.0.2 # homeassistant.components.squeezebox -pysqueezebox==0.7.1 +pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuez==0.2.0 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -1797,20 +1877,17 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 -# homeassistant.components.tedee -pytedee-async==0.2.20 - # homeassistant.components.motionmount -python-MotionMount==2.0.0 +python-MotionMount==2.2.0 # homeassistant.components.awair python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.6.2 +python-bsblan==1.2.1 # homeassistant.components.ecobee -python-ecobee-api==0.2.18 +python-ecobee-api==0.2.20 # homeassistant.components.fully_kiosk python-fullykiosk==0.0.14 @@ -1819,10 +1896,10 @@ python-fullykiosk==0.0.14 # python-gammu==3.2.4 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.7.0 +python-homeassistant-analytics==0.8.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.3.0 +python-homewizard-energy==v7.0.0 # homeassistant.components.izone python-izone==1.2.9 @@ -1831,13 +1908,13 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.1 +python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.8 +python-linkplay==0.1.1 # homeassistant.components.matter -python-matter-server==6.3.0 +python-matter-server==6.6.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -1849,7 +1926,7 @@ python-mpd2==3.1.1 python-mystrom==2.2.0 # homeassistant.components.swiss_public_transport -python-opendata-transport==0.4.0 +python-opendata-transport==0.5.0 # homeassistant.components.opensky python-opensky==1.0.1 @@ -1865,10 +1942,10 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.6.0 +python-roborock==2.7.2 # homeassistant.components.smarttub -python-smarttub==0.0.36 +python-smarttub==0.0.38 # homeassistant.components.songpal python-songpal==0.16.2 @@ -1880,7 +1957,7 @@ python-tado==0.17.6 python-technove==1.3.1 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.0.1 +python-telegram-bot[socks]==21.5 # homeassistant.components.tile pytile==2023.12.0 @@ -1888,6 +1965,9 @@ pytile==2023.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 +# homeassistant.components.touchline_sl +pytouchlinesl==0.3.0 + # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 @@ -1899,7 +1979,7 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==1.0.0 +pytrafikverket==1.1.1 # homeassistant.components.v2c pytrydan==0.8.0 @@ -1911,7 +1991,7 @@ pyudev==0.24.1 pyuptimerobot==22.2.0 # homeassistant.components.vera -pyvera==0.3.13 +pyvera==0.3.15 # homeassistant.components.vesync pyvesync==2.1.12 @@ -1943,6 +2023,9 @@ pywilight==0.0.74 # homeassistant.components.wiz pywizlight==0.5.14 +# homeassistant.components.wmspro +pywmspro==0.2.1 + # homeassistant.components.ws66i pyws66i==1.1 @@ -1962,7 +2045,7 @@ qingping-ble==0.10.0 qnapstats==0.4.0 # homeassistant.components.radio_browser -radios==0.3.1 +radios==0.3.2 # homeassistant.components.radiotherm radiotherm==2.1.0 @@ -1971,25 +2054,25 @@ radiotherm==2.1.0 rapt-ble==0.1.2 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.5 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.5 +renault-api==0.2.8 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.7 +reolink-aio==0.11.5 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.9.0 +ring-doorbell==0.9.13 # homeassistant.components.roku rokuecp==0.19.3 @@ -2022,7 +2105,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.6.0 +samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 @@ -2031,11 +2114,11 @@ sanix==1.0.6 screenlogicpy==0.10.0 # homeassistant.components.backup -securetar==2024.2.1 +securetar==2024.11.0 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.12.4 +sense-energy==0.13.4 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2044,13 +2127,16 @@ sensirion-ble==0.1.1 sensorpro-ble==0.5.3 # homeassistant.components.sensorpush -sensorpush-ble==1.6.2 +sensorpush-ble==1.7.1 + +# homeassistant.components.sensoterra +sensoterra==2.0.1 # homeassistant.components.sentry sentry-sdk==1.40.3 # homeassistant.components.sfr_box -sfrbox-api==0.0.8 +sfrbox-api==0.0.11 # homeassistant.components.sharkiq sharkiq==1.0.2 @@ -2067,6 +2153,9 @@ simplepush==2.2.3 # homeassistant.components.simplisafe simplisafe-python==2024.01.0 +# homeassistant.components.sky_remote +skyboxremote==0.0.6 + # homeassistant.components.slack slackclient==2.5.0 @@ -2074,19 +2163,19 @@ slackclient==2.5.0 smart-meter-texas==0.5.5 # homeassistant.components.smhi -smhi-pkg==1.0.16 +smhi-pkg==1.0.18 # homeassistant.components.snapcast snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.4 +soco==0.30.6 # homeassistant.components.solarlog -solarlog_cli==0.1.6 +solarlog_cli==0.4.0 # homeassistant.components.solax -solax==3.1.1 +solax==3.2.1 # homeassistant.components.somfy_mylink somfy-mylink-synergy==1.0.6 @@ -2100,11 +2189,8 @@ speak2mary==1.4.0 # homeassistant.components.speedtestdotnet speedtest-cli==2.1.3 -# homeassistant.components.spider -spiderpy==1.6.1 - # homeassistant.components.spotify -spotipy==2.23.0 +spotifyaio==0.8.11 # homeassistant.components.sql sqlparse==0.5.0 @@ -2116,7 +2202,7 @@ srpenergy==1.3.6 starline==0.1.5 # homeassistant.components.starlink -starlink-grpc-core==1.1.3 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 @@ -2124,18 +2210,14 @@ statsd==3.2.1 # homeassistant.components.steam_online steamodd==4.21 -# homeassistant.components.stookalert -stookalert==0.1.4 - # homeassistant.components.stookwijzer -stookwijzer==1.3.0 +stookwijzer==1.5.1 # homeassistant.components.streamlabswater streamlabswater==1.0.1 # homeassistant.components.huawei_lte # homeassistant.components.solaredge -# homeassistant.components.thermoworks_smoke # homeassistant.components.traccar stringcase==1.2.0 @@ -2161,7 +2243,7 @@ systembridgemodels==4.2.4 tailscale==0.6.1 # homeassistant.components.tellduslive -tellduslive==0.10.11 +tellduslive==0.10.12 # homeassistant.components.lg_soundbar temescal==0.5 @@ -2172,7 +2254,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.7.3 +tesla-fleet-api==0.8.5 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2180,6 +2262,9 @@ tesla-powerwall==0.5.2 # homeassistant.components.tesla_wall_connector tesla-wall-connector==1.0.2 +# homeassistant.components.teslemetry +teslemetry-stream==0.4.2 + # homeassistant.components.tessie tessie-api==0.1.1 @@ -2189,6 +2274,9 @@ thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 +# homeassistant.components.lg_thinq +thinqconnect==1.0.2 + # homeassistant.components.tilt_ble tilt-ble==0.2.3 @@ -2202,14 +2290,17 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2024.5 +total-connect-client==2024.12 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.2 +tplink-omada-client==1.4.3 # homeassistant.components.transmission transmission-rpc==7.0.3 +# homeassistant.components.triggercmd +triggercmd==0.0.27 + # homeassistant.components.twinkly ttls==1.8.3 @@ -2217,10 +2308,10 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.1.9 +tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu -twentemilieu==2.0.1 +twentemilieu==2.2.0 # homeassistant.components.twilio twilio==6.32.0 @@ -2228,11 +2319,14 @@ twilio==6.32.0 # homeassistant.components.twitch twitchAPI==4.2.1 +# homeassistant.components.monarch_money +typedmonarchmoney==0.3.1 + # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.0.2 +uiprotect==7.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2241,13 +2335,13 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.zha -universal-silabs-flasher==0.0.22 +universal-silabs-flasher==0.0.25 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.9 # homeassistant.components.upcloud -upcloud-api==2.5.1 +upcloud-api==2.6.0 # homeassistant.components.huawei_lte # homeassistant.components.syncthru @@ -2267,7 +2361,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.6 +velbus-aio==2024.12.2 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2276,13 +2370,13 @@ venstarcolortouch==0.19 vilfo-api-client==0.5.0 # homeassistant.components.voip -voip-utils==0.1.0 +voip-utils==0.2.1 # homeassistant.components.volvooncall volvooncall==0.10.3 # homeassistant.components.verisure -vsure==2.6.6 +vsure==2.6.7 # homeassistant.components.vulcan vulcan-api==2.3.2 @@ -2298,14 +2392,23 @@ wakeonlan==2.1.0 wallbox==0.7.0 # homeassistant.components.folder_watcher -watchdog==2.3.1 +watchdog==6.0.0 + +# homeassistant.components.watergate +watergate-local-api==2024.4.1 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.21 +weatherflow4py==1.0.6 + +# homeassistant.components.nasweb +webio-api==0.1.11 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 +# homeassistant.components.weheat +weheat==2024.11.26 + # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2316,25 +2419,25 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.20.2 +wled==0.21.0 # homeassistant.components.wolflink -wolf-comm==0.0.9 +wolf-comm==0.0.15 # homeassistant.components.wyoming wyoming==1.5.4 # homeassistant.components.xbox -xbox-webapi==2.0.11 +xbox-webapi==2.1.0 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.2 +xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.1.1 +xknx==3.4.0 # homeassistant.components.knx -xknxproject==3.7.1 +xknxproject==3.8.1 # homeassistant.components.fritz # homeassistant.components.rest @@ -2344,14 +2447,16 @@ xknxproject==3.7.1 xmltodict==0.13.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.4.0 +yalesmartalarmclient==0.4.3 # homeassistant.components.august +# homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.4.3 +yalexs-ble==2.5.5 # homeassistant.components.august -yalexs==8.4.2 +# homeassistant.components.yale +yalexs==8.10.0 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2366,22 +2471,22 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.08.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.132.2 +zeroconf==0.136.2 # homeassistant.components.zeversolar -zeversolar==0.3.1 +zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.31 +zha==0.0.42 # homeassistant.components.zwave_js -zwave-js-server-python==0.57.0 +zwave-js-server-python==0.60.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 0c8d2b3796b..dcddf267eb4 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.6.2 +ruff==0.8.3 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 6ce97468699..5cc609eec2a 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -6,7 +6,6 @@ from __future__ import annotations import difflib import importlib from operator import itemgetter -import os from pathlib import Path import pkgutil import re @@ -15,7 +14,7 @@ import tomllib from typing import Any from homeassistant.util.yaml.loader import load_yaml -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration # Requirements which can't be installed on all systems because they rely on additional # system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out @@ -59,8 +58,16 @@ INCLUDED_REQUIREMENTS_WHEELS = { # will be included in requirements_all_{action}.txt OVERRIDDEN_REQUIREMENTS_ACTIONS = { - "pytest": {"exclude": set(), "include": {"python-gammu"}}, - "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "pytest": { + "exclude": set(), + "include": {"python-gammu"}, + "markers": {}, + }, + "wheels_aarch64": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, # Pandas has issues building on armhf, it is expected they # will drop the platform in the near future (they consider it # "flimsy" on 386). The following packages depend on pandas, @@ -68,10 +75,23 @@ OVERRIDDEN_REQUIREMENTS_ACTIONS = { "wheels_armhf": { "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_armv7": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_amd64": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_i386": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, }, - "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, - "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, - "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -82,8 +102,8 @@ URL_PIN = ( ) -CONSTRAINT_PATH = os.path.join( - os.path.dirname(__file__), "../homeassistant/package_constraints.txt" +CONSTRAINT_PATH = ( + Path(__file__).parent.parent / "homeassistant" / "package_constraints.txt" ) CONSTRAINT_BASE = """ # Constrain pycryptodome to avoid vulnerability @@ -97,9 +117,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.59.0 -grpcio-status==1.59.0 -grpcio-reflection==1.59.0 +grpcio==1.67.1 +grpcio-status==1.67.1 +grpcio-reflection==1.67.1 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -119,7 +139,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.4.0 +anyio==4.6.2.post1 h11==0.14.0 httpcore==1.0.5 @@ -128,13 +148,8 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==1.26.0 - -# Prevent dependency conflicts between sisyphus-control and aioambient -# until upper bounds for sisyphus-control have been updated -# https://github.com/jkeljo/sisyphus-control/issues/6 -python-engineio>=3.13.1,<4.0 -python-socketio>=4.6.0,<5.0 +numpy==2.2.0 +pandas~=2.2.3 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -143,9 +158,8 @@ multidict>=6.0.2 # Version 2.0 added typing, prevent accidental fallbacks backoff>=2.0 -# Required to avoid breaking (#101042). -# v2 has breaking changes (#99218). -pydantic==1.10.17 +# ensure pydantic version does not float since it might have breaking changes +pydantic==2.10.3 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 @@ -164,16 +178,18 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.4 +protobuf==5.28.3 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder faust-cchardet>=2.1.18 -# websockets 11.0 is missing files in the source distribution -# which break wheel builds so we need at least 11.0.1 -# https://github.com/aaugustin/websockets/issues/1329 -websockets>=11.0.1 +# websockets 13.1 is the first version to fully support the new +# asyncio implementation. The legacy implementation is now +# deprecated as of websockets 14.0. +# https://websockets.readthedocs.io/en/13.0.1/howto/upgrade.html#missing-features +# https://websockets.readthedocs.io/en/stable/howto/upgrade.html +websockets>=13.1 # pysnmplib is no longer maintained and does not work with newer # python @@ -186,15 +202,12 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.2.0 +charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for -# Roborock, NAM, Brother, and GIOS. +# NAM, Brother, and GIOS. dacite>=1.7.0 -# Musle wheels for pandas 2.2.0 cannot be build for any architecture. -pandas==2.1.4 - # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -202,8 +215,8 @@ chacha20poly1305-reuseable>=0.13.0 # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy<2.5.0 will not work with python3.12 -scapy>=2.5.0 +# scapy==2.6.0 causes CI failures due to a race condition +scapy>=2.6.1 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -212,6 +225,18 @@ tuf>=4.0.0 # https://github.com/jd/tenacity/issues/471 tenacity!=8.4.0 + +# 5.0.0 breaks Timeout as a context manager +# TypeError: 'Timeout' object does not support the context manager protocol +async-timeout==4.0.3 + +# aiofiles keeps getting downgraded by custom components +# causing newer methods to not be available and breaking +# some integrations at startup +# https://github.com/home-assistant/core/issues/127529 +# https://github.com/home-assistant/core/issues/122508 +# https://github.com/home-assistant/core/issues/118004 +aiofiles>=24.1.0 """ GENERATED_MESSAGE = ( @@ -262,8 +287,7 @@ def explore_module(package: str, explore_children: bool) -> list[str]: def core_requirements() -> list[str]: """Gather core requirements out of pyproject.toml.""" - with open("pyproject.toml", "rb") as fp: - data = tomllib.load(fp) + data = tomllib.loads(Path("pyproject.toml").read_text()) dependencies: list[str] = data["project"]["dependencies"] return dependencies @@ -276,7 +300,9 @@ def gather_recursive_requirements( seen = set() seen.add(domain) - integration = Integration(Path(f"homeassistant/components/{domain}")) + integration = Integration( + Path(f"homeassistant/components/{domain}"), _get_hassfest_config() + ) integration.load_manifest() reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE} for dep_domain in integration.dependencies: @@ -315,6 +341,10 @@ def process_action_requirement(req: str, action: str) -> str: return req if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: return f"# {req}" + if markers := OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["markers"].get( + normalized_package_name, None + ): + return f"{req};{markers}" return req @@ -327,8 +357,8 @@ def gather_modules() -> dict[str, list[str]] | None: gather_requirements_from_manifests(errors, reqs) gather_requirements_from_modules(errors, reqs) - for key in reqs: - reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name)) + for value in reqs.values(): + value = sorted(value, key=lambda name: (len(name.split(".")), name)) if errors: print("******* ERROR") @@ -342,7 +372,8 @@ def gather_requirements_from_manifests( errors: list[str], reqs: dict[str, list[str]] ) -> None: """Gather all of the requirements from manifests.""" - integrations = Integration.load_dir(Path("homeassistant/components")) + config = _get_hassfest_config() + integrations = Integration.load_dir(config.core_integrations_path, config) for domain in sorted(integrations): integration = integrations[domain] @@ -531,7 +562,7 @@ def diff_file(filename: str, content: str) -> list[str]: def main(validate: bool, ci: bool) -> int: """Run the script.""" - if not os.path.isfile("requirements_all.txt"): + if not Path("requirements_all.txt").is_file(): print("Run this from HA root dir") return 1 @@ -590,6 +621,16 @@ def main(validate: bool, ci: bool) -> int: return 0 +def _get_hassfest_config() -> Config: + """Get hassfest config.""" + return Config( + root=Path().absolute(), + specific_integrations=None, + action="validate", + requirements=True, + ) + + if __name__ == "__main__": _VAL = sys.argv[-1] == "validate" _CI = sys.argv[-1] == "ci" diff --git a/script/hassfest/__init__.py b/script/hassfest/__init__.py index 2fa7997162f..c8c9aa9ef39 100644 --- a/script/hassfest/__init__.py +++ b/script/hassfest/__init__.py @@ -1 +1,14 @@ """Manifest validator.""" + +import ast +from functools import lru_cache +from pathlib import Path + + +@lru_cache +def ast_parse_module(file_path: Path) -> ast.Module: + """Parse a module. + + Cached to avoid parsing the same file for each plugin. + """ + return ast.parse(file_path.read_text()) diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index ea3c56200a2..c93d8fd4499 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -4,7 +4,7 @@ from __future__ import annotations import argparse from operator import attrgetter -import pathlib +from pathlib import Path import sys from time import monotonic @@ -23,6 +23,7 @@ from . import ( metadata, mqtt, mypy_config, + quality_scale, requirements, services, ssdp, @@ -43,6 +44,7 @@ INTEGRATION_PLUGINS = [ json, manifest, mqtt, + quality_scale, requirements, services, ssdp, @@ -63,9 +65,9 @@ ALL_PLUGIN_NAMES = [ ] -def valid_integration_path(integration_path: pathlib.Path | str) -> pathlib.Path: +def valid_integration_path(integration_path: Path | str) -> Path: """Test if it's a valid integration.""" - path = pathlib.Path(integration_path) + path = Path(integration_path) if not path.is_dir(): raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.") @@ -107,6 +109,12 @@ def get_config() -> Config: default=ALL_PLUGIN_NAMES, help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) + parser.add_argument( + "--core-path", + type=Path, + default=Path(), + help="Path to core", + ) parsed = parser.parse_args() if parsed.action is None: @@ -119,12 +127,12 @@ def get_config() -> Config: if ( not parsed.integration_path - and not pathlib.Path("requirements_all.txt").is_file() + and not (parsed.core_path / "requirements_all.txt").is_file() ): raise RuntimeError("Run from Home Assistant root") return Config( - root=pathlib.Path(".").absolute(), + root=parsed.core_path.absolute(), specific_integrations=parsed.integration_path, action=parsed.action, requirements=parsed.requirements, @@ -146,12 +154,12 @@ def main() -> int: integrations = {} for int_path in config.specific_integrations: - integration = Integration(int_path) + integration = Integration(int_path, config) integration.load_manifest() integrations[integration.domain] = integration else: - integrations = Integration.load_dir(pathlib.Path("homeassistant/components")) + integrations = Integration.load_dir(config.core_integrations_path, config) plugins += HASS_PLUGINS for plugin in plugins: diff --git a/script/hassfest/bluetooth.py b/script/hassfest/bluetooth.py index 49480d1ed02..94f25588632 100644 --- a/script/hassfest/bluetooth.py +++ b/script/hassfest/bluetooth.py @@ -34,19 +34,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(bluetooth_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "bluetooth", - "File bluetooth.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if bluetooth_path.read_text() != content: + config.add_error( + "bluetooth", + "File bluetooth.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate bluetooth file.""" bluetooth_path = config.root / "homeassistant/generated/bluetooth.py" - with open(str(bluetooth_path), "w") as fp: - fp.write(f"{config.cache['bluetooth']}") + bluetooth_path.write_text(f"{config.cache['bluetooth']}") diff --git a/script/hassfest/codeowners.py b/script/hassfest/codeowners.py index 04150836dd5..73ea8d02520 100644 --- a/script/hassfest/codeowners.py +++ b/script/hassfest/codeowners.py @@ -98,18 +98,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(codeowners_path)) as fp: - if fp.read().strip() != content: - config.add_error( - "codeowners", - "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if codeowners_path.read_text() != content + "\n": + config.add_error( + "codeowners", + "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate CODEOWNERS.""" codeowners_path = config.root / "CODEOWNERS" - with open(str(codeowners_path), "w") as fp: - fp.write(f"{config.cache['codeowners']}\n") + codeowners_path.write_text(f"{config.cache['codeowners']}\n") diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index 382e77bde74..83d406a0036 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -3,7 +3,6 @@ from __future__ import annotations import json -import pathlib from typing import Any from .brand import validate as validate_brands @@ -216,36 +215,31 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - brands = Brand.load_dir(pathlib.Path(config.root / "homeassistant/brands"), config) + brands = Brand.load_dir(config.root / "homeassistant/brands", config) validate_brands(brands, integrations, config) - with open(str(config_flow_path)) as fp: - if fp.read() != content: - config.add_error( - "config_flow", - "File config_flows.py is not up to date. " - "Run python3 -m script.hassfest", - fixable=True, - ) + if config_flow_path.read_text() != content: + config.add_error( + "config_flow", + "File config_flows.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) config.cache["integrations"] = content = _generate_integrations( brands, integrations, config ) - with open(str(integrations_path)) as fp: - if fp.read() != content + "\n": - config.add_error( - "config_flow", - "File integrations.json is not up to date. " - "Run python3 -m script.hassfest", - fixable=True, - ) + if integrations_path.read_text() != content + "\n": + config.add_error( + "config_flow", + "File integrations.json is not up to date. " + "Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate config flow file.""" config_flow_path = config.root / "homeassistant/generated/config_flows.py" integrations_path = config.root / "homeassistant/generated/integrations.json" - with open(str(config_flow_path), "w") as fp: - fp.write(f"{config.cache['config_flow']}") - with open(str(integrations_path), "w") as fp: - fp.write(f"{config.cache['integrations']}\n") + config_flow_path.write_text(f"{config.cache['config_flow']}") + integrations_path.write_text(f"{config.cache['integrations']}\n") diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 06ef2065127..70dff1194bc 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -6,11 +6,12 @@ import ast from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN +from . import ast_parse_module from .model import Config, Integration CONFIG_SCHEMA_IGNORE = { # Configuration under the homeassistant key is a special case, it's handled by - # conf_util.async_process_ha_core_config already during bootstrapping, not by + # core_config.async_process_ha_core_config already during bootstrapping, not by # a schema in the homeassistant integration. HOMEASSISTANT_DOMAIN, } @@ -60,7 +61,7 @@ def _validate_integration(config: Config, integration: Integration) -> None: # Virtual integrations don't have any implementation return - init = ast.parse(init_file.read_text()) + init = ast_parse_module(init_file) # No YAML Support if not _has_function( @@ -81,7 +82,7 @@ def _validate_integration(config: Config, integration: Integration) -> None: config_file = integration.path / "config.py" if config_file.is_file(): - config_module = ast.parse(config_file.read_text()) + config_module = ast_parse_module(config_file) if _has_function(config_module, ast.AsyncFunctionDef, "async_validate_config"): return diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 66796d4dd0d..62644e19c5e 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -10,6 +10,7 @@ from pathlib import Path from homeassistant.const import Platform from homeassistant.requirements import DISCOVERY_INTEGRATIONS +from . import ast_parse_module from .model import Config, Integration @@ -33,7 +34,7 @@ class ImportCollector(ast.NodeVisitor): self._cur_fil_dir = fil.relative_to(self.integration.path) self.referenced[self._cur_fil_dir] = set() try: - self.visit(ast.parse(fil.read_text())) + self.visit(ast_parse_module(fil)) except SyntaxError as e: e.add_note(f"File: {fil}") raise @@ -44,6 +45,15 @@ class ImportCollector(ast.NodeVisitor): assert self._cur_fil_dir self.referenced[self._cur_fil_dir].add(reference_domain) + def visit_If(self, node: ast.If) -> None: + """Visit If node.""" + if isinstance(node.test, ast.Name) and node.test.id == "TYPE_CHECKING": + # Ignore TYPE_CHECKING block + return + + # Have it visit other kids + self.generic_visit(node) + def visit_ImportFrom(self, node: ast.ImportFrom) -> None: """Visit ImportFrom node.""" if node.module is None: @@ -112,10 +122,10 @@ ALLOWED_USED_COMPONENTS = { "alert", "automation", "conversation", + "default_config", "device_automation", "frontend", "group", - "hassio", "homeassistant", "input_boolean", "input_button", diff --git a/script/hassfest/dhcp.py b/script/hassfest/dhcp.py index d1fd0474430..8a8f344f6cb 100644 --- a/script/hassfest/dhcp.py +++ b/script/hassfest/dhcp.py @@ -32,19 +32,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(dhcp_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "dhcp", - "File dhcp.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if dhcp_path.read_text() != content: + config.add_error( + "dhcp", + "File dhcp.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dhcp file.""" dhcp_path = config.root / "homeassistant/generated/dhcp.py" - with open(str(dhcp_path), "w") as fp: - fp.write(f"{config.cache['dhcp']}") + dhcp_path.write_text(f"{config.cache['dhcp']}") diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index e38a238be7d..022caee30cd 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -1,7 +1,13 @@ """Generate and validate the dockerfile.""" +from dataclasses import dataclass +from pathlib import Path + from homeassistant import core +from homeassistant.components.go2rtc.const import RECOMMENDED_VERSION as GO2RTC_VERSION +from homeassistant.const import Platform from homeassistant.util import executor, thread +from script.gen_requirements_all import gather_recursive_requirements from .model import Config, Integration from .requirements import PACKAGE_REGEX, PIP_VERSION_RANGE_SEPARATOR @@ -15,12 +21,13 @@ FROM ${{BUILD_FROM}} # Synchronize with homeassistant/core.py:async_stop ENV \ S6_SERVICES_GRACETIME={timeout} \ - UV_SYSTEM_PYTHON=true + UV_SYSTEM_PYTHON=true \ + UV_NO_CACHE=true ARG QEMU_CPU # Install uv -RUN pip3 install uv=={uv_version} +RUN pip3 install uv=={uv} WORKDIR /usr/src @@ -37,15 +44,9 @@ RUN \ if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \ uv pip install homeassistant/home_assistant_*.whl; \ fi \ - && if [ "${{BUILD_ARCH}}" = "i386" ]; then \ - linux32 uv pip install \ - --no-build \ - -r homeassistant/requirements_all.txt; \ - else \ - uv pip install \ - --no-build \ - -r homeassistant/requirements_all.txt; \ - fi + && uv pip install \ + --no-build \ + -r homeassistant/requirements_all.txt ## Setup Home Assistant Core COPY . homeassistant/ @@ -58,33 +59,122 @@ RUN \ # Home Assistant S6-Overlay COPY rootfs / +# Needs to be redefined inside the FROM statement to be set for RUN commands +ARG BUILD_ARCH +# Get go2rtc binary +RUN \ + case "${{BUILD_ARCH}}" in \ + "aarch64") go2rtc_suffix='arm64' ;; \ + "armhf") go2rtc_suffix='armv6' ;; \ + "armv7") go2rtc_suffix='arm' ;; \ + *) go2rtc_suffix=${{BUILD_ARCH}} ;; \ + esac \ + && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v{go2rtc}/go2rtc_linux_${{go2rtc_suffix}} --output /bin/go2rtc \ + && chmod +x /bin/go2rtc \ + # Verify go2rtc can be executed + && go2rtc --version + WORKDIR /config """ +_HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +FROM python:3.13-alpine -def _get_uv_version() -> str: - with open("requirements_test.txt") as fp: +ENV \ + UV_SYSTEM_PYTHON=true \ + UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" + +SHELL ["/bin/sh", "-o", "pipefail", "-c"] +ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] +WORKDIR "/github/workspace" + +COPY . /usr/src/homeassistant + +# Uv is only needed during build +RUN --mount=from=ghcr.io/astral-sh/uv:{uv},source=/uv,target=/bin/uv \ + # Required for PyTurboJPEG + apk add --no-cache libturbojpeg \ + && uv pip install \ + --no-build \ + --no-cache \ + -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ + -r /usr/src/homeassistant/requirements.txt \ + stdlib-list==0.10.0 pipdeptree=={pipdeptree} tqdm=={tqdm} ruff=={ruff} \ + {required_components_packages} + +LABEL "name"="hassfest" +LABEL "maintainer"="Home Assistant " + +LABEL "com.github.actions.name"="hassfest" +LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" +LABEL "com.github.actions.icon"="terminal" +LABEL "com.github.actions.color"="gray-dark" +""" + + +def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]: + package_versions: dict[str, str] = {} + with file.open(encoding="UTF-8") as fp: for _, line in enumerate(fp): + if package_versions.keys() == packages: + return package_versions + if match := PACKAGE_REGEX.match(line): pkg, sep, version = match.groups() - if pkg != "uv": + if pkg not in packages: continue if sep != "==" or not version: raise RuntimeError( - 'Requirement uv need to be pinned "uv==".' + f'Requirement {pkg} need to be pinned "{pkg}==".' ) for part in version.split(";", 1)[0].split(","): version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) if version_part: - return version_part.group(2) + package_versions[pkg] = version_part.group(2) + break - raise RuntimeError("Invalid uv requirement in requirements_test.txt") + if package_versions.keys() == packages: + return package_versions + + raise RuntimeError("At least one package was not found in the requirements file.") -def _generate_dockerfile() -> str: +@dataclass +class File: + """File.""" + + content: str + path: Path + + +def _generate_hassfest_dockerimage( + config: Config, timeout: int, package_versions: dict[str, str] +) -> File: + packages = set() + already_checked_domains = set() + for platform in Platform: + packages.update( + gather_recursive_requirements(platform.value, already_checked_domains) + ) + # Add go2rtc requirements as this file needs the go2rtc integration + packages.update(gather_recursive_requirements("go2rtc", already_checked_domains)) + + return File( + _HASSFEST_TEMPLATE.format( + timeout=timeout, + required_components_packages=" ".join(sorted(packages)), + **package_versions, + ), + config.root / "script/hassfest/docker/Dockerfile", + ) + + +def _generate_files(config: Config) -> list[File]: timeout = ( core.STOPPING_STAGE_SHUTDOWN_TIMEOUT + core.STOP_STAGE_SHUTDOWN_TIMEOUT @@ -93,27 +183,44 @@ def _generate_dockerfile() -> str: + executor.EXECUTOR_SHUTDOWN_TIMEOUT + thread.THREADING_SHUTDOWN_TIMEOUT + 10 + ) * 1000 + + package_versions = _get_package_versions(config.root / "requirements.txt", {"uv"}) + package_versions |= _get_package_versions( + config.root / "requirements_test.txt", {"pipdeptree", "tqdm"} ) - return DOCKERFILE_TEMPLATE.format( - timeout=timeout * 1000, uv_version=_get_uv_version() + package_versions |= _get_package_versions( + config.root / "requirements_test_pre_commit.txt", {"ruff"} ) + return [ + File( + DOCKERFILE_TEMPLATE.format( + timeout=timeout, + **package_versions, + go2rtc=GO2RTC_VERSION, + ), + config.root / "Dockerfile", + ), + _generate_hassfest_dockerimage(config, timeout, package_versions), + ] + def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate dockerfile.""" - dockerfile_content = _generate_dockerfile() - config.cache["dockerfile"] = dockerfile_content + docker_files = _generate_files(config) + config.cache["docker"] = docker_files - dockerfile_path = config.root / "Dockerfile" - if dockerfile_path.read_text() != dockerfile_content: - config.add_error( - "docker", - "File Dockerfile is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + for file in docker_files: + if file.content != file.path.read_text(): + config.add_error( + "docker", + f"File {file.path} is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dockerfile.""" - dockerfile_path = config.root / "Dockerfile" - dockerfile_path.write_text(config.cache["dockerfile"]) + for file in _generate_files(config): + file.path.write_text(file.content) diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile new file mode 100644 index 00000000000..369beb538ed --- /dev/null +++ b/script/hassfest/docker/Dockerfile @@ -0,0 +1,34 @@ +# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +FROM python:3.13-alpine + +ENV \ + UV_SYSTEM_PYTHON=true \ + UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" + +SHELL ["/bin/sh", "-o", "pipefail", "-c"] +ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] +WORKDIR "/github/workspace" + +COPY . /usr/src/homeassistant + +# Uv is only needed during build +RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ + # Required for PyTurboJPEG + apk add --no-cache libturbojpeg \ + && uv pip install \ + --no-build \ + --no-cache \ + -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ + -r /usr/src/homeassistant/requirements.txt \ + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + +LABEL "name"="hassfest" +LABEL "maintainer"="Home Assistant " + +LABEL "com.github.actions.name"="hassfest" +LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" +LABEL "com.github.actions.icon"="terminal" +LABEL "com.github.actions.color"="gray-dark" diff --git a/script/hassfest/docker/Dockerfile.dockerignore b/script/hassfest/docker/Dockerfile.dockerignore new file mode 100644 index 00000000000..c109421fce1 --- /dev/null +++ b/script/hassfest/docker/Dockerfile.dockerignore @@ -0,0 +1,11 @@ +# Ignore everything except the specified files +* + +!homeassistant/ +!requirements.txt +!script/ +script/hassfest/docker/ +!script/hassfest/docker/entrypoint.sh + +# Temporary files +**/__pycache__ \ No newline at end of file diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh new file mode 100755 index 00000000000..eabc08a9499 --- /dev/null +++ b/script/hassfest/docker/entrypoint.sh @@ -0,0 +1,30 @@ +#!/bin/sh + +integrations="" +integration_path="" +core_path_provided=false + +for arg in "$@"; do + case "$arg" in + --core-path=*) + core_path_provided=true + break + ;; + esac +done + +if [ "$core_path_provided" = false ]; then + # Enable recursive globbing using find + for manifest in $(find . -name "manifest.json"); do + manifest_path=$(realpath "${manifest}") + integrations="$integrations --integration-path ${manifest_path%/*}" + done + + if [ -z "$integrations" ]; then + echo "Error: No integrations found!" + exit 1 + fi +fi + +cd /usr/src/homeassistant || exit 1 +exec python3 -m script.hassfest --action validate $integrations "$@" diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index 10f666b9013..f6bcd865c23 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -9,6 +9,7 @@ import voluptuous as vol from voluptuous.humanize import humanize_error import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.icon import convert_shorthand_service_icon from .model import Config, Integration from .translations import translation_key_validator @@ -60,7 +61,38 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( ) -def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: +CORE_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( + vol.Schema( + { + vol.Optional("service"): icon_value_validator, + vol.Optional("sections"): cv.schema_with_slug_keys( + icon_value_validator, slug_validator=translation_key_validator + ), + } + ), + slug_validator=translation_key_validator, +) + + +CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( + vol.All( + convert_shorthand_service_icon, + vol.Schema( + { + vol.Optional("service"): icon_value_validator, + vol.Optional("sections"): cv.schema_with_slug_keys( + icon_value_validator, slug_validator=translation_key_validator + ), + } + ), + ), + slug_validator=translation_key_validator, +) + + +def icon_schema( + core_integration: bool, integration_type: str, no_entity_platform: bool +) -> vol.Schema: """Create an icon schema.""" state_validator = cv.schema_with_slug_keys( @@ -91,7 +123,9 @@ def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: {str: {"fix_flow": DATA_ENTRY_ICONS_SCHEMA}} ), vol.Optional("options"): DATA_ENTRY_ICONS_SCHEMA, - vol.Optional("services"): state_validator, + vol.Optional("services"): CORE_SERVICE_ICONS_SCHEMA + if core_integration + else CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA, } ) @@ -146,7 +180,9 @@ def validate_icon_file(config: Config, integration: Integration) -> None: no_entity_platform = integration.domain in ("notify", "image_processing") - schema = icon_schema(integration.integration_type, no_entity_platform) + schema = icon_schema( + integration.core, integration.integration_type, no_entity_platform + ) try: schema(icons) diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 1c01ee7cf58..fdbcf5bcb78 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from enum import IntEnum +from enum import StrEnum, auto import json from pathlib import Path import subprocess @@ -20,7 +20,7 @@ from voluptuous.humanize import humanize_error from homeassistant.const import Platform from homeassistant.helpers import config_validation as cv -from .model import Config, Integration +from .model import Config, Integration, ScaledQualityScaleTiers DOCUMENTATION_URL_SCHEMA = "https" DOCUMENTATION_URL_HOST = "www.home-assistant.io" @@ -28,16 +28,20 @@ DOCUMENTATION_URL_PATH_PREFIX = "/integrations/" DOCUMENTATION_URL_EXCEPTIONS = {"https://www.home-assistant.io/hassio"} -class QualityScale(IntEnum): +class NonScaledQualityScaleTiers(StrEnum): """Supported manifest quality scales.""" - INTERNAL = -1 - SILVER = 1 - GOLD = 2 - PLATINUM = 3 + CUSTOM = auto() + NO_SCORE = auto() + INTERNAL = auto() + LEGACY = auto() -SUPPORTED_QUALITY_SCALES = [enum.name.lower() for enum in QualityScale] +SUPPORTED_QUALITY_SCALES = [ + value.name.lower() + for enum in [ScaledQualityScaleTiers, NonScaledQualityScaleTiers] + for value in enum +] SUPPORTED_IOT_CLASSES = [ "assumed_state", "calculated", @@ -88,12 +92,10 @@ NO_IOT_CLASS = [ "logbook", "logger", "lovelace", - "map", "media_source", "my", "onboarding", "panel_custom", - "panel_iframe", "plant", "profiler", "proxy", @@ -113,21 +115,6 @@ NO_IOT_CLASS = [ "websocket_api", "zone", ] -# Grandfather rule for older integrations -# https://github.com/home-assistant/developers.home-assistant/pull/1512 -NO_DIAGNOSTICS = [ - "dlna_dms", - "gdacs", - "geonetnz_quakes", - "hyperion", - "nightscout", - "pvpc_hourly_pricing", - "risco", - "smarttub", - "songpal", - "vizio", - "yeelight", -] def documentation_url(value: str) -> str: @@ -272,7 +259,6 @@ INTEGRATION_MANIFEST_SCHEMA = vol.Schema( ) ], vol.Required("documentation"): vol.All(vol.Url(), documentation_url), - vol.Optional("issue_tracker"): vol.Url(), vol.Optional("quality_scale"): vol.In(SUPPORTED_QUALITY_SCALES), vol.Optional("requirements"): [str], vol.Optional("dependencies"): [str], @@ -308,6 +294,7 @@ def manifest_schema(value: dict[str, Any]) -> vol.Schema: CUSTOM_INTEGRATION_MANIFEST_SCHEMA = INTEGRATION_MANIFEST_SCHEMA.extend( { vol.Optional("version"): vol.All(str, verify_version), + vol.Optional("issue_tracker"): vol.Url(), vol.Optional("import_executor"): bool, } ) @@ -363,36 +350,17 @@ def validate_manifest(integration: Integration, core_components_dir: Path) -> No "Virtual integration points to non-existing supported_by integration", ) - if (quality_scale := integration.manifest.get("quality_scale")) and QualityScale[ - quality_scale.upper() - ] > QualityScale.SILVER: + if ( + (quality_scale := integration.manifest.get("quality_scale")) + and quality_scale.upper() in ScaledQualityScaleTiers + and ScaledQualityScaleTiers[quality_scale.upper()] + >= ScaledQualityScaleTiers.SILVER + ): if not integration.manifest.get("codeowners"): integration.add_error( "manifest", f"{quality_scale} integration does not have a code owner", ) - if ( - domain not in NO_DIAGNOSTICS - and not (integration.path / "diagnostics.py").exists() - ): - integration.add_error( - "manifest", - f"{quality_scale} integration does not implement diagnostics", - ) - - if domain in NO_DIAGNOSTICS: - if quality_scale and QualityScale[quality_scale.upper()] < QualityScale.GOLD: - integration.add_error( - "manifest", - "{quality_scale} integration should be " - "removed from NO_DIAGNOSTICS in script/hassfest/manifest.py", - ) - elif (integration.path / "diagnostics.py").exists(): - integration.add_error( - "manifest", - "Implements diagnostics and can be " - "removed from NO_DIAGNOSTICS in script/hassfest/manifest.py", - ) if not integration.core: validate_version(integration) diff --git a/script/hassfest/metadata.py b/script/hassfest/metadata.py index bd3ac4514e7..0768e875016 100644 --- a/script/hassfest/metadata.py +++ b/script/hassfest/metadata.py @@ -10,8 +10,7 @@ from .model import Config, Integration def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate project metadata keys.""" metadata_path = config.root / "pyproject.toml" - with open(metadata_path, "rb") as fp: - data = tomllib.load(fp) + data = tomllib.loads(metadata_path.read_text()) try: if data["project"]["version"] != __version__: diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 736fb6874be..08ded687096 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -3,6 +3,7 @@ from __future__ import annotations from dataclasses import dataclass, field +from enum import IntEnum import json import pathlib from typing import Any, Literal @@ -29,10 +30,15 @@ class Config: root: pathlib.Path action: Literal["validate", "generate"] requirements: bool + core_integrations_path: pathlib.Path = field(init=False) errors: list[Error] = field(default_factory=list) cache: dict[str, Any] = field(default_factory=dict) plugins: set[str] = field(default_factory=set) + def __post_init__(self) -> None: + """Post init.""" + self.core_integrations_path = self.root / "homeassistant/components" + def add_error(self, *args: Any, **kwargs: Any) -> None: """Add an error.""" self.errors.append(Error(*args, **kwargs)) @@ -105,7 +111,7 @@ class Integration: """Represent an integration in our validator.""" @classmethod - def load_dir(cls, path: pathlib.Path) -> dict[str, Integration]: + def load_dir(cls, path: pathlib.Path, config: Config) -> dict[str, Integration]: """Load all integrations in a directory.""" assert path.is_dir() integrations: dict[str, Integration] = {} @@ -123,13 +129,14 @@ class Integration: ) continue - integration = cls(fil) + integration = cls(fil, config) integration.load_manifest() integrations[integration.domain] = integration return integrations path: pathlib.Path + _config: Config _manifest: dict[str, Any] | None = None manifest_path: pathlib.Path | None = None errors: list[Error] = field(default_factory=list) @@ -150,7 +157,9 @@ class Integration: @property def core(self) -> bool: """Core integration.""" - return self.path.as_posix().startswith("homeassistant/components") + return self.path.as_posix().startswith( + self._config.core_integrations_path.as_posix() + ) @property def disabled(self) -> str | None: @@ -226,3 +235,12 @@ class Integration: self._manifest = manifest self.manifest_path = manifest_path + + +class ScaledQualityScaleTiers(IntEnum): + """Supported manifest quality scales.""" + + BRONZE = 1 + SILVER = 2 + GOLD = 3 + PLATINUM = 4 diff --git a/script/hassfest/mqtt.py b/script/hassfest/mqtt.py index b2112d9bb6a..54ee65aaa35 100644 --- a/script/hassfest/mqtt.py +++ b/script/hassfest/mqtt.py @@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(mqtt_path)) as fp: - if fp.read() != content: - config.add_error( - "mqtt", - "File mqtt.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + if mqtt_path.read_text() != content: + config.add_error( + "mqtt", + "File mqtt.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate MQTT file.""" mqtt_path = config.root / "homeassistant/generated/mqtt.py" - with open(str(mqtt_path), "w") as fp: - fp.write(f"{config.cache['mqtt']}") + mqtt_path.write_text(f"{config.cache['mqtt']}") diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index d2aff81aa05..1d7f2b5ed88 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -33,17 +33,21 @@ HEADER: Final = """ GENERAL_SETTINGS: Final[dict[str, str]] = { "python_version": ".".join(str(x) for x in REQUIRED_PYTHON_VER[:2]), "platform": "linux", - "plugins": "pydantic.mypy", - "show_error_codes": "true", - "follow_imports": "normal", - "enable_incomplete_feature": ", ".join( # noqa: FLY002 + "plugins": ", ".join( # noqa: FLY002 [ - "NewGenericSyntax", + "pydantic.mypy", + "pydantic.v1.mypy", ] ), + "show_error_codes": "true", + "follow_imports": "normal", + # "enable_incomplete_feature": ", ".join( # noqa: FLY002 + # [] + # ), # Enable some checks globally. "local_partial_types": "true", "strict_equality": "true", + "strict_bytes": "true", "no_implicit_optional": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", @@ -51,6 +55,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { "warn_unused_ignores": "true", "enable_error_code": ", ".join( # noqa: FLY002 [ + "deprecated", "ignore-without-code", "redundant-self", "truthy-iterable", diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py new file mode 100644 index 00000000000..4e5cee2d16d --- /dev/null +++ b/script/hassfest/quality_scale.py @@ -0,0 +1,1377 @@ +"""Validate integration quality scale files.""" + +from __future__ import annotations + +from dataclasses import dataclass + +import voluptuous as vol +from voluptuous.humanize import humanize_error + +from homeassistant.const import Platform +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.yaml import load_yaml_dict + +from .model import Config, Integration, ScaledQualityScaleTiers +from .quality_scale_validation import ( + RuleValidationProtocol, + config_entry_unloading, + config_flow, + diagnostics, + discovery, + parallel_updates, + reauthentication_flow, + reconfiguration_flow, + runtime_data, + strict_typing, + test_before_setup, + unique_config_entry, +) + +QUALITY_SCALE_TIERS = {value.name.lower(): value for value in ScaledQualityScaleTiers} + + +@dataclass +class Rule: + """Quality scale rules.""" + + name: str + tier: ScaledQualityScaleTiers + validator: RuleValidationProtocol | None = None + + +ALL_RULES = [ + # BRONZE + Rule("action-setup", ScaledQualityScaleTiers.BRONZE), + Rule("appropriate-polling", ScaledQualityScaleTiers.BRONZE), + Rule("brands", ScaledQualityScaleTiers.BRONZE), + Rule("common-modules", ScaledQualityScaleTiers.BRONZE), + Rule("config-flow", ScaledQualityScaleTiers.BRONZE, config_flow), + Rule("config-flow-test-coverage", ScaledQualityScaleTiers.BRONZE), + Rule("dependency-transparency", ScaledQualityScaleTiers.BRONZE), + Rule("docs-actions", ScaledQualityScaleTiers.BRONZE), + Rule("docs-high-level-description", ScaledQualityScaleTiers.BRONZE), + Rule("docs-installation-instructions", ScaledQualityScaleTiers.BRONZE), + Rule("docs-removal-instructions", ScaledQualityScaleTiers.BRONZE), + Rule("entity-event-setup", ScaledQualityScaleTiers.BRONZE), + Rule("entity-unique-id", ScaledQualityScaleTiers.BRONZE), + Rule("has-entity-name", ScaledQualityScaleTiers.BRONZE), + Rule("runtime-data", ScaledQualityScaleTiers.BRONZE, runtime_data), + Rule("test-before-configure", ScaledQualityScaleTiers.BRONZE), + Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE, test_before_setup), + Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE, unique_config_entry), + # SILVER + Rule("action-exceptions", ScaledQualityScaleTiers.SILVER), + Rule( + "config-entry-unloading", ScaledQualityScaleTiers.SILVER, config_entry_unloading + ), + Rule("docs-configuration-parameters", ScaledQualityScaleTiers.SILVER), + Rule("docs-installation-parameters", ScaledQualityScaleTiers.SILVER), + Rule("entity-unavailable", ScaledQualityScaleTiers.SILVER), + Rule("integration-owner", ScaledQualityScaleTiers.SILVER), + Rule("log-when-unavailable", ScaledQualityScaleTiers.SILVER), + Rule("parallel-updates", ScaledQualityScaleTiers.SILVER, parallel_updates), + Rule( + "reauthentication-flow", ScaledQualityScaleTiers.SILVER, reauthentication_flow + ), + Rule("test-coverage", ScaledQualityScaleTiers.SILVER), + # GOLD: [ + Rule("devices", ScaledQualityScaleTiers.GOLD), + Rule("diagnostics", ScaledQualityScaleTiers.GOLD, diagnostics), + Rule("discovery", ScaledQualityScaleTiers.GOLD, discovery), + Rule("discovery-update-info", ScaledQualityScaleTiers.GOLD), + Rule("docs-data-update", ScaledQualityScaleTiers.GOLD), + Rule("docs-examples", ScaledQualityScaleTiers.GOLD), + Rule("docs-known-limitations", ScaledQualityScaleTiers.GOLD), + Rule("docs-supported-devices", ScaledQualityScaleTiers.GOLD), + Rule("docs-supported-functions", ScaledQualityScaleTiers.GOLD), + Rule("docs-troubleshooting", ScaledQualityScaleTiers.GOLD), + Rule("docs-use-cases", ScaledQualityScaleTiers.GOLD), + Rule("dynamic-devices", ScaledQualityScaleTiers.GOLD), + Rule("entity-category", ScaledQualityScaleTiers.GOLD), + Rule("entity-device-class", ScaledQualityScaleTiers.GOLD), + Rule("entity-disabled-by-default", ScaledQualityScaleTiers.GOLD), + Rule("entity-translations", ScaledQualityScaleTiers.GOLD), + Rule("exception-translations", ScaledQualityScaleTiers.GOLD), + Rule("icon-translations", ScaledQualityScaleTiers.GOLD), + Rule("reconfiguration-flow", ScaledQualityScaleTiers.GOLD, reconfiguration_flow), + Rule("repair-issues", ScaledQualityScaleTiers.GOLD), + Rule("stale-devices", ScaledQualityScaleTiers.GOLD), + # PLATINUM + Rule("async-dependency", ScaledQualityScaleTiers.PLATINUM), + Rule("inject-websession", ScaledQualityScaleTiers.PLATINUM), + Rule("strict-typing", ScaledQualityScaleTiers.PLATINUM, strict_typing), +] + +SCALE_RULES = { + tier: [rule.name for rule in ALL_RULES if rule.tier == tier] + for tier in ScaledQualityScaleTiers +} + +VALIDATORS = {rule.name: rule.validator for rule in ALL_RULES if rule.validator} + +RULE_URL = ( + "Please check the documentation at " + "https://developers.home-assistant.io/docs/core/" + "integration-quality-scale/rules/{rule_name}/" +) + +INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ + "abode", + "accuweather", + "acer_projector", + "acmeda", + "actiontec", + "adax", + "adguard", + "ads", + "advantage_air", + "aemet", + "aftership", + "agent_dvr", + "airly", + "airnow", + "airq", + "airthings", + "airthings_ble", + "airtouch4", + "airtouch5", + "airvisual", + "airvisual_pro", + "airzone", + "airzone_cloud", + "aladdin_connect", + "alarmdecoder", + "alert", + "alexa", + "alpha_vantage", + "amazon_polly", + "amberelectric", + "ambient_network", + "ambient_station", + "amcrest", + "ampio", + "analytics", + "analytics_insights", + "android_ip_webcam", + "androidtv", + "androidtv_remote", + "anel_pwrctrl", + "anova", + "anthemav", + "anthropic", + "aosmith", + "apache_kafka", + "apcupsd", + "apple_tv", + "apprise", + "aprilaire", + "aprs", + "apsystems", + "aquacell", + "aqualogic", + "aquostv", + "aranet", + "arcam_fmj", + "arest", + "arris_tg2492lg", + "aruba", + "arve", + "arwn", + "aseko_pool_live", + "assist_pipeline", + "asterisk_mbox", + "asuswrt", + "atag", + "aten_pe", + "atome", + "august", + "aurora", + "aurora_abb_powerone", + "aussie_broadband", + "avea", + "avion", + "awair", + "aws", + "axis", + "azure_data_explorer", + "azure_devops", + "azure_event_hub", + "azure_service_bus", + "backup", + "baf", + "baidu", + "balboa", + "bang_olufsen", + "bayesian", + "bbox", + "beewi_smartclim", + "bitcoin", + "bizkaibus", + "blackbird", + "blebox", + "blink", + "blinksticklight", + "blockchain", + "blue_current", + "bluemaestro", + "bluesound", + "bluetooth", + "bluetooth_adapters", + "bluetooth_le_tracker", + "bluetooth_tracker", + "bmw_connected_drive", + "bond", + "bosch_shc", + "braviatv", + "broadlink", + "brother", + "brottsplatskartan", + "browser", + "brunt", + "bryant_evolution", + "bsblan", + "bt_home_hub_5", + "bt_smarthub", + "bthome", + "buienradar", + "caldav", + "canary", + "cast", + "ccm15", + "cert_expiry", + "chacon_dio", + "channels", + "circuit", + "cisco_ios", + "cisco_mobility_express", + "cisco_webex_teams", + "citybikes", + "clementine", + "clickatell", + "clicksend", + "clicksend_tts", + "climacell", + "cloud", + "cloudflare", + "cmus", + "co2signal", + "coinbase", + "color_extractor", + "comed_hourly_pricing", + "comelit", + "comfoconnect", + "command_line", + "compensation", + "concord232", + "control4", + "coolmaster", + "cppm_tracker", + "cpuspeed", + "crownstone", + "cups", + "currencylayer", + "daikin", + "danfoss_air", + "datadog", + "ddwrt", + "deako", + "debugpy", + "deconz", + "decora", + "decora_wifi", + "delijn", + "deluge", + "demo", + "denon", + "denonavr", + "derivative", + "devialet", + "device_sun_light_trigger", + "devolo_home_control", + "devolo_home_network", + "dexcom", + "dhcp", + "dialogflow", + "digital_ocean", + "directv", + "discogs", + "discord", + "dlib_face_detect", + "dlib_face_identify", + "dlink", + "dlna_dmr", + "dlna_dms", + "dnsip", + "dominos", + "doods", + "doorbird", + "dormakaba_dkey", + "dovado", + "downloader", + "dremel_3d_printer", + "drop_connect", + "dsmr", + "dsmr_reader", + "dublin_bus_transport", + "duckdns", + "duke_energy", + "dunehd", + "duotecno", + "dwd_weather_warnings", + "dweet", + "dynalite", + "eafm", + "easyenergy", + "ebox", + "ebusd", + "ecoal_boiler", + "ecobee", + "ecoforest", + "econet", + "ecovacs", + "ecowitt", + "eddystone_temperature", + "edimax", + "edl21", + "efergy", + "egardia", + "eight_sleep", + "electrasmart", + "electric_kiwi", + "eliqonline", + "elkm1", + "elmax", + "elv", + "elvia", + "emby", + "emoncms", + "emoncms_history", + "emonitor", + "emulated_hue", + "emulated_kasa", + "emulated_roku", + "energenie_power_sockets", + "energy", + "energyzero", + "enigma2", + "enocean", + "entur_public_transport", + "environment_canada", + "envisalink", + "ephember", + "epic_games_store", + "epion", + "epson", + "eq3btsmart", + "escea", + "esphome", + "etherscan", + "eufy", + "eufylife_ble", + "everlights", + "evil_genius_labs", + "evohome", + "ezviz", + "faa_delays", + "facebook", + "fail2ban", + "familyhub", + "fastdotcom", + "feedreader", + "ffmpeg_motion", + "ffmpeg_noise", + "fibaro", + "fido", + "file", + "filesize", + "filter", + "fints", + "fireservicerota", + "firmata", + "fivem", + "fixer", + "fjaraskupan", + "fleetgo", + "flexit", + "flexit_bacnet", + "flic", + "flick_electric", + "flipr", + "flo", + "flock", + "flume", + "flux", + "flux_led", + "folder", + "folder_watcher", + "foobot", + "forecast_solar", + "forked_daapd", + "fortios", + "foscam", + "foursquare", + "free_mobile", + "freebox", + "freedns", + "freedompro", + "fritzbox", + "fritzbox_callmonitor", + "frontier_silicon", + "fujitsu_fglair", + "fujitsu_hvac", + "futurenow", + "garadget", + "garages_amsterdam", + "gardena_bluetooth", + "gc100", + "gdacs", + "generic", + "generic_hygrostat", + "generic_thermostat", + "geniushub", + "geo_json_events", + "geo_rss_events", + "geocaching", + "geofency", + "geonetnz_quakes", + "geonetnz_volcano", + "gios", + "github", + "gitlab_ci", + "gitter", + "glances", + "go2rtc", + "goalzero", + "gogogate2", + "goodwe", + "google", + "google_assistant", + "google_assistant_sdk", + "google_cloud", + "google_domains", + "google_generative_ai_conversation", + "google_mail", + "google_maps", + "google_pubsub", + "google_sheets", + "google_translate", + "google_travel_time", + "google_wifi", + "govee_ble", + "govee_light_local", + "gpsd", + "gpslogger", + "graphite", + "gree", + "greeneye_monitor", + "greenwave", + "group", + "growatt_server", + "gstreamer", + "gtfs", + "guardian", + "harman_kardon_avr", + "harmony", + "hassio", + "haveibeenpwned", + "hddtemp", + "hdmi_cec", + "heatmiser", + "here_travel_time", + "hikvision", + "hikvisioncam", + "hisense_aehw4a1", + "history_stats", + "hitron_coda", + "hive", + "hko", + "hlk_sw16", + "holiday", + "home_connect", + "homekit", + "homekit_controller", + "homematic", + "homematicip_cloud", + "homeworks", + "honeywell", + "horizon", + "hp_ilo", + "html5", + "http", + "huawei_lte", + "hue", + "huisbaasje", + "hunterdouglas_powerview", + "husqvarna_automower_ble", + "huum", + "hvv_departures", + "hydrawise", + "hyperion", + "ialarm", + "iammeter", + "iaqualink", + "ibeacon", + "icloud", + "idteck_prox", + "ifttt", + "iglo", + "ign_sismologia", + "ihc", + "imgw_pib", + "improv_ble", + "incomfort", + "influxdb", + "inkbird", + "insteon", + "integration", + "intellifire", + "intesishome", + "ios", + "iotawatt", + "iotty", + "iperf3", + "ipma", + "ipp", + "iqvia", + "irish_rail_transport", + "isal", + "iskra", + "islamic_prayer_times", + "israel_rail", + "iss", + "isy994", + "itach", + "itunes", + "izone", + "jellyfin", + "jewish_calendar", + "joaoapps_join", + "juicenet", + "justnimbus", + "jvc_projector", + "kaiterra", + "kaleidescape", + "kankun", + "keba", + "keenetic_ndms2", + "kef", + "kegtron", + "keyboard", + "keyboard_remote", + "keymitt_ble", + "kira", + "kitchen_sink", + "kiwi", + "kmtronic", + "knocki", + "knx", + "kodi", + "konnected", + "kostal_plenticore", + "kraken", + "kulersky", + "kwb", + "lacrosse", + "lacrosse_view", + "landisgyr_heat_meter", + "lannouncer", + "lastfm", + "launch_library", + "laundrify", + "lcn", + "ld2410_ble", + "leaone", + "led_ble", + "lektrico", + "lg_netcast", + "lg_soundbar", + "lg_thinq", + "lidarr", + "life360", + "lifx", + "lifx_cloud", + "lightwave", + "limitlessled", + "linear_garage_door", + "linkplay", + "linksys_smart", + "linode", + "linux_battery", + "lirc", + "litejet", + "litterrobot", + "livisi", + "llamalab_automate", + "local_calendar", + "local_file", + "local_ip", + "local_todo", + "location", + "locative", + "logentries", + "logi_circle", + "london_air", + "london_underground", + "lookin", + "loqed", + "luci", + "luftdaten", + "lupusec", + "lutron", + "lutron_caseta", + "lw12wifi", + "lyric", + "madvr", + "mailbox", + "mailgun", + "manual", + "manual_mqtt", + "map", + "marytts", + "matrix", + "matter", + "maxcube", + "mazda", + "mealie", + "meater", + "medcom_ble", + "media_extractor", + "mediaroom", + "melcloud", + "melissa", + "melnor", + "meraki", + "message_bird", + "met", + "met_eireann", + "meteo_france", + "meteoalarm", + "meteoclimatic", + "metoffice", + "mfi", + "microbees", + "microsoft", + "microsoft_face", + "microsoft_face_detect", + "microsoft_face_identify", + "mikrotik", + "mill", + "min_max", + "minecraft_server", + "minio", + "mjpeg", + "moat", + "mobile_app", + "mochad", + "modbus", + "modem_callerid", + "modern_forms", + "moehlenhoff_alpha2", + "mold_indicator", + "monarch_money", + "monoprice", + "monzo", + "moon", + "mopeka", + "motion_blinds", + "motionblinds_ble", + "motioneye", + "motionmount", + "mpd", + "mqtt_eventstream", + "mqtt_json", + "mqtt_room", + "mqtt_statestream", + "msteams", + "mullvad", + "music_assistant", + "mutesync", + "mvglive", + "mycroft", + "myq", + "mysensors", + "mystrom", + "mythicbeastsdns", + "nad", + "nam", + "namecheapdns", + "nanoleaf", + "nasweb", + "neato", + "nederlandse_spoorwegen", + "ness_alarm", + "netatmo", + "netdata", + "netgear", + "netgear_lte", + "netio", + "network", + "neurio_energy", + "nexia", + "nextbus", + "nextcloud", + "nextdns", + "nfandroidtv", + "nibe_heatpump", + "nice_go", + "nightscout", + "niko_home_control", + "nilu", + "nina", + "nissan_leaf", + "nmap_tracker", + "nmbs", + "no_ip", + "noaa_tides", + "nobo_hub", + "norway_air", + "notify_events", + "notion", + "nsw_fuel_station", + "nsw_rural_fire_service_feed", + "nuheat", + "nuki", + "numato", + "nut", + "nws", + "nx584", + "nyt_games", + "nzbget", + "oasa_telematics", + "obihai", + "octoprint", + "oem", + "ohmconnect", + "ollama", + "ombi", + "omnilogic", + "oncue", + "ondilo_ico", + "onewire", + "onvif", + "open_meteo", + "openai_conversation", + "openalpr_cloud", + "openerz", + "openevse", + "openexchangerates", + "opengarage", + "openhardwaremonitor", + "openhome", + "opensensemap", + "opensky", + "opentherm_gw", + "openuv", + "openweathermap", + "opnsense", + "opower", + "opple", + "oralb", + "oru", + "orvibo", + "osoenergy", + "osramlightify", + "otbr", + "otp", + "ourgroceries", + "overkiz", + "ovo_energy", + "owntracks", + "p1_monitor", + "panasonic_bluray", + "panasonic_viera", + "pandora", + "panel_iframe", + "peco", + "pegel_online", + "pencom", + "permobil", + "persistent_notification", + "person", + "philips_js", + "pi_hole", + "picnic", + "picotts", + "pilight", + "ping", + "pioneer", + "pjlink", + "plaato", + "plant", + "plex", + "plum_lightpad", + "pocketcasts", + "point", + "poolsense", + "powerwall", + "private_ble_device", + "profiler", + "progettihwsw", + "proliphix", + "prometheus", + "prosegur", + "prowl", + "proximity", + "proxmoxve", + "prusalink", + "ps4", + "pulseaudio_loopback", + "pure_energie", + "purpleair", + "push", + "pushbullet", + "pushover", + "pushsafer", + "pvoutput", + "pvpc_hourly_pricing", + "pyload", + "qbittorrent", + "qingping", + "qld_bushfire", + "qnap", + "qnap_qsw", + "qrcode", + "quantum_gateway", + "qvr_pro", + "qwikswitch", + "rabbitair", + "rachio", + "radarr", + "radio_browser", + "radiotherm", + "raincloud", + "rainforest_eagle", + "rainforest_raven", + "rainmachine", + "random", + "rapt_ble", + "raspyrfm", + "rdw", + "recollect_waste", + "recorder", + "recswitch", + "reddit", + "refoss", + "rejseplanen", + "remember_the_milk", + "remote_rpi_gpio", + "renson", + "repetier", + "rest", + "rest_command", + "rflink", + "rfxtrx", + "rhasspy", + "ridwell", + "ring", + "ripple", + "risco", + "rituals_perfume_genie", + "rmvtransport", + "roborock", + "rocketchat", + "roku", + "romy", + "roomba", + "roon", + "route53", + "rova", + "rpi_camera", + "rpi_power", + "rss_feed_template", + "rtorrent", + "rtsp_to_webrtc", + "ruckus_unleashed", + "russound_rnet", + "ruuvi_gateway", + "ruuvitag_ble", + "rympro", + "saj", + "samsungtv", + "sanix", + "satel_integra", + "schlage", + "schluter", + "scrape", + "screenlogic", + "scsgate", + "season", + "sendgrid", + "sense", + "sensibo", + "sensirion_ble", + "sensorpro", + "sensorpush", + "sensoterra", + "sentry", + "senz", + "serial", + "serial_pm", + "sesame", + "seven_segments", + "seventeentrack", + "sfr_box", + "sharkiq", + "shell_command", + "shelly", + "shodan", + "shopping_list", + "sia", + "sigfox", + "sighthound", + "signal_messenger", + "simplefin", + "simplepush", + "simplisafe", + "simulated", + "sinch", + "sisyphus", + "sky_hub", + "sky_remote", + "skybeacon", + "skybell", + "slack", + "sleepiq", + "slide", + "slimproto", + "sma", + "smappee", + "smart_meter_texas", + "smartthings", + "smarttub", + "smarty", + "smhi", + "smlight", + "sms", + "smtp", + "snapcast", + "snips", + "snmp", + "snooz", + "solaredge", + "solaredge_local", + "solax", + "soma", + "somfy_mylink", + "sonarr", + "songpal", + "sonos", + "sony_projector", + "soundtouch", + "spaceapi", + "spc", + "speedtestdotnet", + "spider", + "splunk", + "spotify", + "sql", + "squeezebox", + "srp_energy", + "ssdp", + "starline", + "starlingbank", + "starlink", + "startca", + "statistics", + "statsd", + "steam_online", + "steamist", + "stiebel_eltron", + "stream", + "streamlabswater", + "subaru", + "sun", + "sunweg", + "supervisord", + "supla", + "surepetcare", + "swiss_hydrological_data", + "swiss_public_transport", + "swisscom", + "switch_as_x", + "switchbee", + "switchbot", + "switchbot_cloud", + "switcher_kis", + "switchmate", + "syncthing", + "syncthru", + "synology_chat", + "synology_dsm", + "synology_srm", + "syslog", + "system_bridge", + "systemmonitor", + "tado", + "tailscale", + "tami4", + "tank_utility", + "tankerkoenig", + "tapsaff", + "tasmota", + "tautulli", + "tcp", + "technove", + "ted5000", + "telegram", + "telegram_bot", + "tellduslive", + "tellstick", + "telnet", + "temper", + "template", + "tensorflow", + "tesla_fleet", + "tesla_wall_connector", + "teslemetry", + "tessie", + "tfiac", + "thermobeacon", + "thermopro", + "thermoworks_smoke", + "thethingsnetwork", + "thingspeak", + "thinkingcleaner", + "thomson", + "thread", + "threshold", + "tibber", + "tikteck", + "tile", + "tilt_ble", + "time_date", + "tmb", + "tod", + "todoist", + "tolo", + "tomato", + "tomorrowio", + "toon", + "torque", + "touchline", + "touchline_sl", + "tplink", + "tplink_lte", + "tplink_omada", + "traccar", + "traccar_server", + "tractive", + "tradfri", + "trafikverket_camera", + "trafikverket_ferry", + "trafikverket_train", + "trafikverket_weatherstation", + "transmission", + "transport_nsw", + "travisci", + "trend", + "triggercmd", + "tuya", + "twilio", + "twilio_call", + "twilio_sms", + "twinkly", + "twitch", + "twitter", + "ubus", + "uk_transport", + "ukraine_alarm", + "unifi", + "unifi_direct", + "unifiled", + "unifiprotect", + "universal", + "upb", + "upc_connect", + "upcloud", + "upnp", + "uptime", + "uptimerobot", + "usb", + "usgs_earthquakes_feed", + "utility_meter", + "uvc", + "v2c", + "vallox", + "vasttrafik", + "velux", + "venstar", + "vera", + "verisure", + "versasense", + "version", + "vesync", + "viaggiatreno", + "vilfo", + "vivotek", + "vizio", + "vlc", + "vlc_telnet", + "vodafone_station", + "voicerss", + "voip", + "volkszaehler", + "volumio", + "volvooncall", + "vulcan", + "vultr", + "w800rf32", + "wake_on_lan", + "wallbox", + "waqi", + "waterfurnace", + "watson_iot", + "watson_tts", + "watttime", + "waze_travel_time", + "weatherflow", + "weatherflow_cloud", + "weatherkit", + "webmin", + "webostv", + "weheat", + "wemo", + "whirlpool", + "whois", + "wiffi", + "wilight", + "wirelesstag", + "withings", + "wiz", + "wled", + "wmspro", + "wolflink", + "workday", + "worldclock", + "worldtidesinfo", + "worxlandroid", + "ws66i", + "wsdot", + "wyoming", + "x10", + "xbox", + "xeoma", + "xiaomi", + "xiaomi_aqara", + "xiaomi_ble", + "xiaomi_miio", + "xiaomi_tv", + "xmpp", + "xs1", + "yale", + "yale_smart_alarm", + "yalexs_ble", + "yamaha", + "yamaha_musiccast", + "yandex_transport", + "yandextts", + "yardian", + "yeelight", + "yeelightsunflower", + "yi", + "yolink", + "youless", + "youtube", + "zabbix", + "zamg", + "zengge", + "zeroconf", + "zerproc", + "zestimate", + "zeversolar", + "zha", + "zhong_hong", + "ziggo_mediabox_xl", + "zodiac", + "zoneminder", + "zwave_js", + "zwave_me", +] + +NO_QUALITY_SCALE = [ + *{platform.value for platform in Platform}, + "api", + "application_credentials", + "auth", + "automation", + "blueprint", + "config", + "configurator", + "counter", + "default_config", + "device_automation", + "device_tracker", + "diagnostics", + "ffmpeg", + "file_upload", + "frontend", + "hardkernel", + "hardware", + "history", + "homeassistant", + "homeassistant_alerts", + "homeassistant_green", + "homeassistant_hardware", + "homeassistant_sky_connect", + "homeassistant_yellow", + "image_upload", + "input_boolean", + "input_button", + "input_datetime", + "input_number", + "input_select", + "input_text", + "intent_script", + "intent", + "logbook", + "logger", + "lovelace", + "media_source", + "my", + "onboarding", + "panel_custom", + "proxy", + "python_script", + "raspberry_pi", + "recovery_mode", + "repairs", + "schedule", + "script", + "search", + "system_health", + "system_log", + "tag", + "timer", + "trace", + "webhook", + "websocket_api", + "zone", +] + +SCHEMA = vol.Schema( + { + vol.Required("rules"): vol.Schema( + { + vol.Optional(rule.name): vol.Any( + vol.In(["todo", "done"]), + vol.Schema( + { + vol.Required("status"): vol.In(["todo", "done"]), + vol.Optional("comment"): str, + } + ), + vol.Schema( + { + vol.Required("status"): "exempt", + vol.Required("comment"): str, + } + ), + ) + for rule in ALL_RULES + } + ) + } +) + + +def validate_iqs_file(config: Config, integration: Integration) -> None: + """Validate quality scale file for integration.""" + if not integration.core: + return + + declared_quality_scale = QUALITY_SCALE_TIERS.get(integration.quality_scale) + + iqs_file = integration.path / "quality_scale.yaml" + has_file = iqs_file.is_file() + if not has_file: + if ( + integration.domain not in INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE + and integration.domain not in NO_QUALITY_SCALE + and integration.integration_type != "virtual" + ): + integration.add_error( + "quality_scale", + "Quality scale definition not found. New integrations are required to at least reach the Bronze tier.", + ) + return + if declared_quality_scale is not None: + integration.add_error( + "quality_scale", + "Quality scale definition not found. Integrations that set a manifest quality scale must have a quality scale definition.", + ) + return + return + if integration.integration_type == "virtual": + integration.add_error( + "quality_scale", + "Virtual integrations are not allowed to have a quality scale file.", + ) + return + if integration.domain in NO_QUALITY_SCALE: + integration.add_error( + "quality_scale", + "This integration is not supposed to have a quality scale file.", + ) + return + if integration.domain in INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE: + integration.add_error( + "quality_scale", + "Quality scale file found! Please remove from script/hassfest/quality_scale.py", + ) + return + name = str(iqs_file) + + try: + data = load_yaml_dict(name) + except HomeAssistantError: + integration.add_error("quality_scale", "Invalid quality_scale.yaml") + return + + try: + SCHEMA(data) + except vol.Invalid as err: + integration.add_error( + "quality_scale", f"Invalid {name}: {humanize_error(data, err)}" + ) + + rules_done = set[str]() + rules_met = set[str]() + for rule_name, rule_value in data.get("rules", {}).items(): + status = rule_value["status"] if isinstance(rule_value, dict) else rule_value + if status not in {"done", "exempt"}: + continue + rules_met.add(rule_name) + if status == "done": + rules_done.add(rule_name) + + for rule_name in rules_done: + if (validator := VALIDATORS.get(rule_name)) and ( + errors := validator.validate(config, integration, rules_done=rules_done) + ): + for error in errors: + integration.add_error("quality_scale", f"[{rule_name}] {error}") + integration.add_error("quality_scale", RULE_URL.format(rule_name=rule_name)) + + # An integration must have all the necessary rules for the declared + # quality scale, and all the rules below. + if declared_quality_scale is None: + return + + for scale in ScaledQualityScaleTiers: + if scale > declared_quality_scale: + break + required_rules = set(SCALE_RULES[scale]) + if missing_rules := (required_rules - rules_met): + friendly_rule_str = "\n".join( + f" {rule}: todo" for rule in sorted(missing_rules) + ) + integration.add_error( + "quality_scale", + f"Quality scale tier {scale.name.lower()} requires quality scale rules to be met:\n{friendly_rule_str}", + ) + + +def validate(integrations: dict[str, Integration], config: Config) -> None: + """Handle YAML files inside integrations.""" + for integration in integrations.values(): + validate_iqs_file(config, integration) diff --git a/script/hassfest/quality_scale_validation/__init__.py b/script/hassfest/quality_scale_validation/__init__.py new file mode 100644 index 00000000000..7c41a58b601 --- /dev/null +++ b/script/hassfest/quality_scale_validation/__init__.py @@ -0,0 +1,17 @@ +"""Integration quality scale rules.""" + +from typing import Protocol + +from script.hassfest.model import Config, Integration + + +class RuleValidationProtocol(Protocol): + """Protocol for rule validation.""" + + def validate( + self, config: Config, integration: Integration, *, rules_done: set[str] + ) -> list[str] | None: + """Validate a quality scale rule. + + Returns error (if any). + """ diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py new file mode 100644 index 00000000000..4874ddc4625 --- /dev/null +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -0,0 +1,33 @@ +"""Enforce that the integration implements entry unloading. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/config-entry-unloading/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + + +def _has_unload_entry_function(module: ast.Module) -> bool: + """Test if the module defines `async_unload_entry` function.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name == "async_unload_entry" + for item in module.body + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration has a config flow.""" + + init_file = integration.path / "__init__.py" + init = ast_parse_module(init_file) + + if not _has_unload_entry_function(init): + return [ + "Integration does not support config entry unloading " + "(is missing `async_unload_entry` in __init__.py)" + ] + return None diff --git a/script/hassfest/quality_scale_validation/config_flow.py b/script/hassfest/quality_scale_validation/config_flow.py new file mode 100644 index 00000000000..d1ac70ab469 --- /dev/null +++ b/script/hassfest/quality_scale_validation/config_flow.py @@ -0,0 +1,26 @@ +"""Enforce that the integration implements config flow. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/config-flow/ +""" + +from script.hassfest.model import Config, Integration + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration implements config flow.""" + + if not integration.config_flow: + return [ + "Integration does not set config_flow in its manifest " + f"homeassistant/components/{integration.domain}/manifest.json", + ] + + config_flow_file = integration.path / "config_flow.py" + if not config_flow_file.exists(): + return [ + "Integration does not implement config flow (is missing config_flow.py)", + ] + + return None diff --git a/script/hassfest/quality_scale_validation/diagnostics.py b/script/hassfest/quality_scale_validation/diagnostics.py new file mode 100644 index 00000000000..ea143002b09 --- /dev/null +++ b/script/hassfest/quality_scale_validation/diagnostics.py @@ -0,0 +1,45 @@ +"""Enforce that the integration implements diagnostics. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/diagnostics/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + +DIAGNOSTICS_FUNCTIONS = { + "async_get_config_entry_diagnostics", + "async_get_device_diagnostics", +} + + +def _has_diagnostics_function(module: ast.Module) -> bool: + """Test if the module defines at least one of diagnostic functions.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name in DIAGNOSTICS_FUNCTIONS + for item in ast.walk(module) + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration implements diagnostics.""" + + diagnostics_file = integration.path / "diagnostics.py" + if not diagnostics_file.exists(): + return [ + "Integration does implement diagnostics platform " + "(is missing diagnostics.py)", + ] + + diagnostics = ast_parse_module(diagnostics_file) + + if not _has_diagnostics_function(diagnostics): + return [ + f"Integration is missing one of {DIAGNOSTICS_FUNCTIONS} " + f"in {diagnostics_file}" + ] + + return None diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py new file mode 100644 index 00000000000..d11bcaf2cec --- /dev/null +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -0,0 +1,62 @@ +"""Enforce that the integration supports discovery. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/discovery/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + +MANIFEST_KEYS = [ + "bluetooth", + "dhcp", + "homekit", + "mqtt", + "ssdp", + "usb", + "zeroconf", +] +CONFIG_FLOW_STEPS = { + "async_step_bluetooth", + "async_step_discovery", + "async_step_dhcp", + "async_step_hassio", + "async_step_homekit", + "async_step_mqtt", + "async_step_ssdp", + "async_step_usb", + "async_step_zeroconf", +} + + +def _has_discovery_function(module: ast.Module) -> bool: + """Test if the module defines at least one of the discovery functions.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name in CONFIG_FLOW_STEPS + for item in ast.walk(module) + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration implements diagnostics.""" + + config_flow_file = integration.path / "config_flow.py" + if not config_flow_file.exists(): + return ["Integration is missing config_flow.py"] + + # Check manifest + if any(key in integration.manifest for key in MANIFEST_KEYS): + return None + + # Fallback => check config_flow step + config_flow = ast_parse_module(config_flow_file) + if not (_has_discovery_function(config_flow)): + return [ + f"Integration is missing one of {CONFIG_FLOW_STEPS} " + f"in {config_flow_file}" + ] + + return None diff --git a/script/hassfest/quality_scale_validation/parallel_updates.py b/script/hassfest/quality_scale_validation/parallel_updates.py new file mode 100644 index 00000000000..00ad891774d --- /dev/null +++ b/script/hassfest/quality_scale_validation/parallel_updates.py @@ -0,0 +1,38 @@ +"""Enforce that the integration sets PARALLEL_UPDATES constant. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/parallel-updates +""" + +import ast + +from homeassistant.const import Platform +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + + +def _has_parallel_updates_defined(module: ast.Module) -> bool: + """Test if the module defines `PARALLEL_UPDATES` constant.""" + return any( + type(item) is ast.Assign and item.targets[0].id == "PARALLEL_UPDATES" + for item in module.body + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration sets PARALLEL_UPDATES constant.""" + + errors = [] + for platform in Platform: + module_file = integration.path / f"{platform}.py" + if not module_file.exists(): + continue + module = ast_parse_module(module_file) + + if not _has_parallel_updates_defined(module): + errors.append( + f"Integration does not set `PARALLEL_UPDATES` in {module_file}" + ) + + return errors diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py new file mode 100644 index 00000000000..3db9700af98 --- /dev/null +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -0,0 +1,33 @@ +"""Enforce that the integration implements reauthentication flow. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/reauthentication-flow/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + + +def _has_step_reauth_function(module: ast.Module) -> bool: + """Test if the module defines `async_step_reauth` function.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name == "async_step_reauth" + for item in ast.walk(module) + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration has a reauthentication flow.""" + + config_flow_file = integration.path / "config_flow.py" + config_flow = ast_parse_module(config_flow_file) + + if not _has_step_reauth_function(config_flow): + return [ + "Integration does not support a reauthentication flow " + f"(is missing `async_step_reauth` in {config_flow_file})" + ] + return None diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py new file mode 100644 index 00000000000..28cc0ef6d43 --- /dev/null +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -0,0 +1,33 @@ +"""Enforce that the integration implements reconfiguration flow. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/reconfiguration-flow/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + + +def _has_step_reconfigure_function(module: ast.Module) -> bool: + """Test if the module defines a function.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name == "async_step_reconfigure" + for item in ast.walk(module) + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration has a reconfiguration flow.""" + + config_flow_file = integration.path / "config_flow.py" + config_flow = ast_parse_module(config_flow_file) + + if not _has_step_reconfigure_function(config_flow): + return [ + "Integration does not support a reconfiguration flow " + f"(is missing `async_step_reconfigure` in {config_flow_file})" + ] + return None diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py new file mode 100644 index 00000000000..cfc4c5224de --- /dev/null +++ b/script/hassfest/quality_scale_validation/runtime_data.py @@ -0,0 +1,130 @@ +"""Enforce that the integration uses ConfigEntry.runtime_data to store runtime data. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/runtime-data +""" + +import ast +import re + +from homeassistant.const import Platform +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + +_ANNOTATION_MATCH = re.compile(r"^[A-Za-z]+ConfigEntry$") +_FUNCTIONS: dict[str, dict[str, int]] = { + "__init__": { # based on ComponentProtocol + "async_migrate_entry": 2, + "async_remove_config_entry_device": 2, + "async_remove_entry": 2, + "async_setup_entry": 2, + "async_unload_entry": 2, + }, + "diagnostics": { # based on DiagnosticsProtocol + "async_get_config_entry_diagnostics": 2, + "async_get_device_diagnostics": 2, + }, +} +for platform in Platform: # based on EntityPlatformModule + _FUNCTIONS[platform.value] = { + "async_setup_entry": 2, + } + + +def _sets_runtime_data( + async_setup_entry_function: ast.AsyncFunctionDef, config_entry_argument: ast.arg +) -> bool: + """Check that `entry.runtime` gets set within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if ( + isinstance(node, ast.Attribute) + and isinstance(node.value, ast.Name) + and node.value.id == config_entry_argument.arg + and node.attr == "runtime_data" + and isinstance(node.ctx, ast.Store) + ): + return True + return False + + +def _get_async_function(module: ast.Module, name: str) -> ast.AsyncFunctionDef | None: + """Get async function.""" + for item in module.body: + if isinstance(item, ast.AsyncFunctionDef) and item.name == name: + return item + return None + + +def _check_function_annotation( + function: ast.AsyncFunctionDef, position: int +) -> str | None: + """Ensure function uses CustomConfigEntry type annotation.""" + if len(function.args.args) < position: + return f"{function.name} has incorrect signature" + argument = function.args.args[position - 1] + if not ( + (annotation := argument.annotation) + and isinstance(annotation, ast.Name) + and _ANNOTATION_MATCH.match(annotation.id) + ): + return f"([+ strict-typing]) {function.name} does not use typed ConfigEntry" + return None + + +def _check_typed_config_entry(integration: Integration) -> list[str]: + """Ensure integration uses CustomConfigEntry type annotation.""" + errors: list[str] = [] + # Check body level function annotations + for file, functions in _FUNCTIONS.items(): + module_file = integration.path / f"{file}.py" + if not module_file.exists(): + continue + module = ast_parse_module(module_file) + for function, position in functions.items(): + if not (async_function := _get_async_function(module, function)): + continue + if error := _check_function_annotation(async_function, position): + errors.append(f"{error} in {module_file}") + + # Check config_flow annotations + config_flow_file = integration.path / "config_flow.py" + config_flow = ast_parse_module(config_flow_file) + for node in config_flow.body: + if not isinstance(node, ast.ClassDef): + continue + if any( + isinstance(async_function, ast.FunctionDef) + and async_function.name == "async_get_options_flow" + and (error := _check_function_annotation(async_function, 1)) + for async_function in node.body + ): + errors.append(f"{error} in {config_flow_file}") + + return errors + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate correct use of ConfigEntry.runtime_data.""" + init_file = integration.path / "__init__.py" + init = ast_parse_module(init_file) + + # Should not happen, but better to be safe + if not (async_setup_entry := _get_async_function(init, "async_setup_entry")): + return [f"Could not find `async_setup_entry` in {init_file}"] + if len(async_setup_entry.args.args) != 2: + return [f"async_setup_entry has incorrect signature in {init_file}"] + config_entry_argument = async_setup_entry.args.args[1] + + errors: list[str] = [] + if not _sets_runtime_data(async_setup_entry, config_entry_argument): + errors.append( + "Integration does not set entry.runtime_data in async_setup_entry" + f"({init_file})" + ) + + # Extra checks, if strict-typing is marked as done + if "strict-typing" in rules_done: + errors.extend(_check_typed_config_entry(integration)) + + return errors diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py new file mode 100644 index 00000000000..c1373032ff8 --- /dev/null +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -0,0 +1,67 @@ +"""Enforce that the integration has strict typing enabled. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/strict-typing/ +""" + +from functools import lru_cache +from importlib import metadata +from pathlib import Path +import re + +from script.hassfest.model import Config, Integration + +_STRICT_TYPING_FILE = Path(".strict-typing") +_COMPONENT_REGEX = r"homeassistant.components.([^.]+).*" + + +@lru_cache +def _strict_typing_components(strict_typing_file: Path) -> set[str]: + return set( + { + match.group(1) + for line in strict_typing_file.read_text(encoding="utf-8").splitlines() + if (match := re.match(_COMPONENT_REGEX, line)) is not None + } + ) + + +def _check_requirements_are_typed(integration: Integration) -> list[str]: + """Check if all requirements are typed.""" + invalid_requirements = [] + for requirement in integration.requirements: + requirement_name, requirement_version = requirement.split("==") + # Remove any extras + requirement_name = requirement_name.split("[")[0] + try: + distribution = metadata.distribution(requirement_name) + except metadata.PackageNotFoundError: + # Package not installed locally + continue + if distribution.version != requirement_version: + # Version out of date locally + continue + + if not any(file for file in distribution.files if file.name == "py.typed"): + # no py.typed file + invalid_requirements.append(requirement) + + return invalid_requirements + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration has strict typing enabled.""" + strict_typing_file = config.root / _STRICT_TYPING_FILE + + if integration.domain not in _strict_typing_components(strict_typing_file): + return [ + "Integration does not have strict typing enabled " + "(is missing from .strict-typing)" + ] + if untyped_requirements := _check_requirements_are_typed(integration): + return [ + f"Requirements {untyped_requirements} do not conform PEP 561 (https://peps.python.org/pep-0561/)", + "They should be typed and have a 'py.typed' file", + ] + return None diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py new file mode 100644 index 00000000000..5f21a9d2458 --- /dev/null +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -0,0 +1,82 @@ +"""Enforce that the integration raises correctly during initialisation. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/test-before-setup/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + +_VALID_EXCEPTIONS = { + "ConfigEntryNotReady", + "ConfigEntryAuthFailed", + "ConfigEntryError", +} + + +def _get_exception_name(expression: ast.expr) -> str: + """Get the name of the exception being raised.""" + if isinstance(expression, ast.Name): + return expression.id + + if isinstance(expression, ast.Call): + return _get_exception_name(expression.func) + + if isinstance(expression, ast.Attribute): + return _get_exception_name(expression.value) + + raise AssertionError( + f"Raise is neither Attribute nor Call nor Name: {type(expression)}" + ) + + +def _raises_exception(integration: Integration) -> bool: + """Check that a valid exception is raised.""" + for module_file in integration.path.rglob("*.py"): + module = ast_parse_module(module_file) + for node in ast.walk(module): + if ( + isinstance(node, ast.Raise) + and _get_exception_name(node.exc) in _VALID_EXCEPTIONS + ): + return True + + return False + + +def _calls_first_refresh(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: + """Check that a async_config_entry_first_refresh within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if ( + isinstance(node, ast.Call) + and isinstance(node.func, ast.Attribute) + and node.func.attr == "async_config_entry_first_refresh" + ): + return True + + return False + + +def _get_setup_entry_function(module: ast.Module) -> ast.AsyncFunctionDef | None: + """Get async_setup_entry function.""" + for item in module.body: + if isinstance(item, ast.AsyncFunctionDef) and item.name == "async_setup_entry": + return item + return None + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate correct use of ConfigEntry.runtime_data.""" + init_file = integration.path / "__init__.py" + init = ast_parse_module(init_file) + + # Should not happen, but better to be safe + if not (async_setup_entry := _get_setup_entry_function(init)): + return [f"Could not find `async_setup_entry` in {init_file}"] + + if not (_calls_first_refresh(async_setup_entry) or _raises_exception(integration)): + return [f"Integration does not raise one of {_VALID_EXCEPTIONS}"] + return None diff --git a/script/hassfest/quality_scale_validation/unique_config_entry.py b/script/hassfest/quality_scale_validation/unique_config_entry.py new file mode 100644 index 00000000000..83b3d20bd80 --- /dev/null +++ b/script/hassfest/quality_scale_validation/unique_config_entry.py @@ -0,0 +1,52 @@ +"""Enforce that the integration prevents duplicates from being configured. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/unique-config-entry/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + + +def _has_method_call(module: ast.Module, name: str) -> bool: + """Test if the module calls a specific method.""" + return any( + type(item.func) is ast.Attribute and item.func.attr == name + for item in ast.walk(module) + if isinstance(item, ast.Call) + ) + + +def _has_abort_entries_match(module: ast.Module) -> bool: + """Test if the module calls `_async_abort_entries_match`.""" + return _has_method_call(module, "_async_abort_entries_match") + + +def _has_abort_unique_id_configured(module: ast.Module) -> bool: + """Test if the module calls defines (and checks for) a unique_id.""" + return _has_method_call(module, "async_set_unique_id") and _has_method_call( + module, "_abort_if_unique_id_configured" + ) + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate that the integration prevents duplicate devices.""" + + if integration.manifest.get("single_config_entry"): + return None + + config_flow_file = integration.path / "config_flow.py" + config_flow = ast_parse_module(config_flow_file) + + if not ( + _has_abort_entries_match(config_flow) + or _has_abort_unique_id_configured(config_flow) + ): + return [ + "Integration doesn't prevent the same device or service from being " + f"set up twice in {config_flow_file}" + ] + return None diff --git a/script/hassfest/requirements.py b/script/hassfest/requirements.py index d35d96121c5..998593d20ec 100644 --- a/script/hassfest/requirements.py +++ b/script/hassfest/requirements.py @@ -28,12 +28,6 @@ PACKAGE_REGEX = re.compile( PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)") PIP_VERSION_RANGE_SEPARATOR = re.compile(r"^(==|>=|<=|~=|!=|<|>|===)?(.*)$") -IGNORE_STANDARD_LIBRARY_VIOLATIONS = { - # Integrations which have standard library requirements. - "slide", - "suez_water", -} - def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle requirements for integrations.""" @@ -84,18 +78,19 @@ def validate_requirements_format(integration: Integration) -> bool: if not version: continue - for part in version.split(";", 1)[0].split(","): - version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) - if ( - version_part - and AwesomeVersion(version_part.group(2)).strategy - == AwesomeVersionStrategy.UNKNOWN - ): - integration.add_error( - "requirements", - f"Unable to parse package version ({version}) for {pkg}.", - ) - continue + if integration.core: + for part in version.split(";", 1)[0].split(","): + version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) + if ( + version_part + and AwesomeVersion(version_part.group(2)).strategy + == AwesomeVersionStrategy.UNKNOWN + ): + integration.add_error( + "requirements", + f"Unable to parse package version ({version}) for {pkg}.", + ) + continue return len(integration.errors) == start_errors @@ -143,10 +138,7 @@ def validate_requirements(integration: Integration) -> None: if req in sys.stdlib_module_names: standard_library_violations.add(req) - if ( - standard_library_violations - and integration.domain not in IGNORE_STANDARD_LIBRARY_VIOLATIONS - ): + if standard_library_violations: integration.add_error( "requirements", ( @@ -154,18 +146,6 @@ def validate_requirements(integration: Integration) -> None: "are not compatible with the Python standard library" ), ) - elif ( - not standard_library_violations - and integration.domain in IGNORE_STANDARD_LIBRARY_VIOLATIONS - ): - integration.add_error( - "requirements", - ( - f"Integration {integration.domain} no longer has requirements which are" - " incompatible with the Python standard library, remove it from " - "IGNORE_STANDARD_LIBRARY_VIOLATIONS" - ), - ) @cache diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 92fca14d373..8c9ab5c0c0b 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -75,6 +75,14 @@ CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( } ) +CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( + { + vol.Optional("collapsed"): bool, + vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), + } +) + + CORE_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Schema( { @@ -105,7 +113,17 @@ CUSTOM_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Optional("target"): vol.Any( selector.TargetSelector.CONFIG_SCHEMA, None ), - vol.Optional("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), + vol.Optional("fields"): vol.All( + vol.Schema( + { + str: vol.Any( + CUSTOM_INTEGRATION_FIELD_SCHEMA, + CUSTOM_INTEGRATION_SECTION_SCHEMA, + ) + } + ), + unique_field_validator, + ), } ), None, diff --git a/script/hassfest/ssdp.py b/script/hassfest/ssdp.py index 0a61284eb46..989b614e43d 100644 --- a/script/hassfest/ssdp.py +++ b/script/hassfest/ssdp.py @@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(ssdp_path)) as fp: - if fp.read() != content: - config.add_error( - "ssdp", - "File ssdp.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + if ssdp_path.read_text() != content: + config.add_error( + "ssdp", + "File ssdp.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate ssdp file.""" ssdp_path = config.root / "homeassistant/generated/ssdp.py" - with open(str(ssdp_path), "w") as fp: - fp.write(f"{config.cache['ssdp']}") + ssdp_path.write_text(f"{config.cache['ssdp']}") diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index fa12ce626ad..078c649666d 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -131,11 +131,13 @@ def translation_value_validator(value: Any) -> str: - prevents strings with single quoted placeholders - prevents combined translations """ - value = cv.string_with_no_html(value) - value = string_no_single_quoted_placeholders(value) - if RE_COMBINED_REFERENCE.search(value): + string_value = cv.string_with_no_html(value) + string_value = string_no_single_quoted_placeholders(string_value) + if RE_COMBINED_REFERENCE.search(string_value): raise vol.Invalid("the string should not contain combined translations") - return str(value) + if string_value != string_value.strip(): + raise vol.Invalid("the string should not contain leading or trailing spaces") + return string_value def string_no_single_quoted_placeholders(value: str) -> str: @@ -170,6 +172,9 @@ def gen_data_entry_schema( vol.Optional("sections"): { str: { vol.Optional("data"): {str: translation_value_validator}, + vol.Optional("data_description"): { + str: translation_value_validator + }, vol.Optional("description"): translation_value_validator, vol.Optional("name"): translation_value_validator, }, @@ -280,6 +285,15 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: "user" if integration.integration_type == "helper" else None ), ), + vol.Optional("config_subentries"): cv.schema_with_slug_keys( + gen_data_entry_schema( + config=config, + integration=integration, + flow_title=REQUIRED, + require_step_title=False, + ), + slug_validator=vol.Any("_", cv.slug), + ), vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, @@ -366,6 +380,9 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: }, slug_validator=translation_key_validator, ), + vol.Optional( + "unit_of_measurement" + ): translation_value_validator, }, slug_validator=translation_key_validator, ), diff --git a/script/hassfest/usb.py b/script/hassfest/usb.py index 84cafc973ad..c34f4fd1b62 100644 --- a/script/hassfest/usb.py +++ b/script/hassfest/usb.py @@ -35,19 +35,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(usb_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "usb", - "File usb.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if usb_path.read_text() != content: + config.add_error( + "usb", + "File usb.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate usb file.""" usb_path = config.root / "homeassistant/generated/usb.py" - with open(str(usb_path), "w") as fp: - fp.write(f"{config.cache['usb']}") + usb_path.write_text(f"{config.cache['usb']}") diff --git a/script/hassfest/zeroconf.py b/script/hassfest/zeroconf.py index 63f10fcf294..fe3e5bb3875 100644 --- a/script/hassfest/zeroconf.py +++ b/script/hassfest/zeroconf.py @@ -55,19 +55,19 @@ def generate_and_validate(integrations: dict[str, Integration]) -> str: # HomeKit models are matched on starting string, make sure none overlap. warned = set() - for key in homekit_dict: + for key, value in homekit_dict.items(): if key in warned: continue # n^2 yoooo - for key_2 in homekit_dict: + for key_2, value_2 in homekit_dict.items(): if key == key_2 or key_2 in warned: continue if key.startswith(key_2) or key_2.startswith(key): integration.add_error( "zeroconf", - f"Integrations {homekit_dict[key]} and {homekit_dict[key_2]} " + f"Integrations {value} and {value_2} " "have overlapping HomeKit models", ) warned.add(key) @@ -90,19 +90,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(zeroconf_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "zeroconf", - "File zeroconf.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if zeroconf_path.read_text() != content: + config.add_error( + "zeroconf", + "File zeroconf.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate zeroconf file.""" zeroconf_path = config.root / "homeassistant/generated/zeroconf.py" - with open(str(zeroconf_path), "w") as fp: - fp.write(f"{config.cache['zeroconf']}") + zeroconf_path.write_text(f"{config.cache['zeroconf']}") diff --git a/script/inspect_schemas.py b/script/inspect_schemas.py index a8ffe0afb60..0f888d14af2 100755 --- a/script/inspect_schemas.py +++ b/script/inspect_schemas.py @@ -2,7 +2,7 @@ """Inspect all component SCHEMAS.""" import importlib -import os +from pathlib import Path import pkgutil from homeassistant.config import _identify_config_schema @@ -20,7 +20,7 @@ def explore_module(package): def main(): """Run the script.""" - if not os.path.isfile("requirements_all.txt"): + if not Path("requirements_all.txt").is_file(): print("Run this from HA root dir") return @@ -57,7 +57,9 @@ def main(): ) for key in sorted(msg): - print("\n{}\n - {}".format(key, "\n - ".join(msg[key]))) + print(f"\n{key}") + for val in msg[key]: + print(f" - {val}") if __name__ == "__main__": diff --git a/script/json_schemas/manifest_schema.json b/script/json_schemas/manifest_schema.json new file mode 100644 index 00000000000..7349f12b55a --- /dev/null +++ b/script/json_schemas/manifest_schema.json @@ -0,0 +1,391 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Home Assistant integration manifest", + "description": "The manifest for a Home Assistant integration", + "type": "object", + "if": { + "properties": { "integration_type": { "const": "virtual" } }, + "required": ["integration_type"] + }, + "then": { + "oneOf": [ + { + "properties": { + "domain": { + "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", + "examples": ["mobile_app"], + "type": "string", + "pattern": "[0-9a-z_]+" + }, + "name": { + "description": "The friendly name of the integration.", + "type": "string" + }, + "integration_type": { + "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", + "const": "virtual" + }, + "iot_standards": { + "description": "The IoT standards which supports devices or services of this virtual integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#iot-standards", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "enum": ["homekit", "zigbee", "zwave"] + } + } + }, + "additionalProperties": false, + "required": ["domain", "name", "integration_type", "iot_standards"] + }, + { + "properties": { + "domain": { + "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", + "examples": ["mobile_app"], + "type": "string", + "pattern": "[0-9a-z_]+" + }, + "name": { + "description": "The friendly name of the integration.", + "type": "string" + }, + "integration_type": { + "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", + "const": "virtual" + }, + "supported_by": { + "description": "The integration which supports devices or services of this virtual integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#supported-by", + "type": "string" + } + }, + "additionalProperties": false, + "required": ["domain", "name", "integration_type", "supported_by"] + } + ] + }, + "else": { + "properties": { + "domain": { + "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", + "examples": ["mobile_app"], + "type": "string", + "pattern": "[0-9a-z_]+" + }, + "name": { + "description": "The friendly name of the integration.", + "type": "string" + }, + "integration_type": { + "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", + "type": "string", + "default": "hub", + "enum": [ + "device", + "entity", + "hardware", + "helper", + "hub", + "service", + "system" + ] + }, + "config_flow": { + "description": "Whether the integration is configurable from the UI.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#config-flow", + "type": "boolean" + }, + "mqtt": { + "description": "A list of topics to subscribe for the discovery of devices via MQTT.\nThis requires to specify \"mqtt\" in either the \"dependencies\" or \"after_dependencies\".\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#mqtt", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "zeroconf": { + "description": "A list containing service domains to search for devices to discover via Zeroconf. Items can either be strings, which discovers all devices in the specific service domain, and/or objects which include filters. (useful for generic service domains like _http._tcp.local.)\nA device is discovered if it matches one of the items, but inside the individual item all properties have to be matched.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#zeroconf", + "type": "array", + "minItems": 1, + "items": { + "anyOf": [ + { + "type": "string", + "pattern": "^.*\\.local\\.$", + "description": "Service domain to search for devices." + }, + { + "type": "object", + "properties": { + "type": { + "description": "The service domain to search for devices.", + "examples": ["_http._tcp.local."], + "type": "string", + "pattern": "^.*\\.local\\.$" + }, + "name": { + "description": "The name or name pattern of the devices to filter.", + "type": "string" + }, + "properties": { + "description": "The properties of the Zeroconf advertisement to filter.", + "type": "object", + "additionalProperties": { "type": "string" } + } + }, + "required": ["type"], + "additionalProperties": false + } + ] + }, + "uniqueItems": true + }, + "ssdp": { + "description": "A list of matchers to find devices discoverable via SSDP/UPnP. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#ssdp", + "type": "array", + "minItems": 1, + "items": { + "description": "A matcher for the SSDP discovery.", + "type": "object", + "properties": { + "st": { + "type": "string" + }, + "deviceType": { + "type": "string" + }, + "manufacturer": { + "type": "string" + }, + "modelDescription": { + "type": "string" + } + }, + "additionalProperties": { "type": "string" } + } + }, + "bluetooth": { + "description": "A list of matchers to find devices discoverable via Bluetooth. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#bluetooth", + "type": "array", + "minItems": 1, + "items": { + "description": "A matcher for the bluetooth discovery", + "type": "object", + "properties": { + "connectable": { + "description": "Whether the device needs to be connected to or it works with just advertisement data.", + "type": "boolean" + }, + "local_name": { + "description": "The name or a name pattern of the device to match.", + "type": "string", + "pattern": "^([^*]+|[^*]{3,}[*].*)$" + }, + "service_uuid": { + "description": "The 128-bit service data UUID to match.", + "type": "string", + "pattern": "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" + }, + "service_data_uuid": { + "description": "The 16-bit service data UUID to match, converted into the corresponding 128-bit UUID by replacing the 3rd and 4th byte of `00000000-0000-1000-8000-00805f9b34fb` with the 16-bit UUID.", + "examples": ["0000fd3d-0000-1000-8000-00805f9b34fb"], + "type": "string", + "pattern": "0000[0-9a-f]{4}-0000-1000-8000-00805f9b34fb" + }, + "manufacturer_id": { + "description": "The Manufacturer ID to match.", + "type": "integer" + }, + "manufacturer_data_start": { + "description": "The start bytes of the manufacturer data to match.", + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "minimum": 0, + "maximum": 255 + } + } + }, + "additionalProperties": false + }, + "uniqueItems": true + }, + "homekit": { + "description": "A list of model names to find devices which are discoverable via HomeKit. A device is discovered if the model name of the device starts with any of the specified model names.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#homekit", + "type": "object", + "properties": { + "models": { + "description": "The model names to search for.", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "required": ["models"], + "additionalProperties": false + }, + "dhcp": { + "description": "A list of matchers to find devices discoverable via DHCP. In order to be discovered, the device has to match all properties of any of the matchers.\nYou can specify an item with \"registered_devices\" set to true to check for devices with MAC addresses specified in the device registry.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#dhcp", + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "properties": { + "registered_devices": { + "description": "Whether the MAC addresses of devices in the device registry should be used for discovery, useful if the discovery is used to update the IP address of already registered devices.", + "const": true + } + }, + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "hostname": { + "description": "The hostname or hostname pattern to match.", + "type": "string" + }, + "macaddress": { + "description": "The MAC address or MAC address pattern to match.", + "type": "string", + "maxLength": 12 + } + }, + "additionalProperties": false + } + ] + }, + "uniqueItems": true + }, + "usb": { + "description": "A list of matchers to find devices discoverable via USB. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#usb", + "type": "array", + "uniqueItems": true, + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "vid": { + "description": "The vendor ID to match.", + "type": "string", + "pattern": "[0-9A-F]{4}" + }, + "pid": { + "description": "The product ID to match.", + "type": "string", + "pattern": "[0-9A-F]{4}" + }, + "description": { + "description": "The USB device description to match.", + "type": "string" + }, + "manufacturer": { + "description": "The manufacturer to match.", + "type": "string" + }, + "serial_number": { + "description": "The serial number to match.", + "type": "string" + }, + "known_devices": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "documentation": { + "description": "The website containing the documentation for the integration. It has to be in the format \"https://www.home-assistant.io/integrations/[domain]\"\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#documentation", + "type": "string", + "pattern": "^https://www.home-assistant.io/integrations/[0-9a-z_]+$", + "format": "uri" + }, + "quality_scale": { + "description": "The quality scale of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-quality-scale", + "type": "string", + "enum": ["bronze", "silver", "gold", "platinum", "internal", "legacy"] + }, + "requirements": { + "description": "The PyPI package requirements for the integration. The package has to be pinned to a specific version.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#requirements", + "type": "array", + "items": { + "type": "string", + "pattern": ".+==.+" + }, + "uniqueItems": true + }, + "dependencies": { + "description": "A list of integrations which need to be loaded before this integration can be set up.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#dependencies", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "after_dependencies": { + "description": "A list of integrations which need to be loaded before this integration is set up when it is configured. The integration will still be set up when the \"after_dependencies\" are not configured.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#after-dependencies", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "codeowners": { + "description": "A list of GitHub usernames or GitHub team names of the integration owners.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#code-owners", + "type": "array", + "minItems": 0, + "items": { + "type": "string", + "pattern": "^@.+$" + }, + "uniqueItems": true + }, + "loggers": { + "description": "A list of logger names used by the requirements.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#loggers", + "type": "array", + "minItems": 1, + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "disabled": { + "description": "The reason for the integration being disabled.", + "type": "string" + }, + "iot_class": { + "description": "The IoT class of the integration, describing how the integration connects to the device or service.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#iot-class", + "type": "string", + "enum": [ + "assumed_state", + "cloud_polling", + "cloud_push", + "local_polling", + "local_push", + "calculated" + ] + }, + "single_config_entry": { + "description": "Whether the integration only supports a single config entry.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#single-config-entry-only", + "const": true + } + }, + "additionalProperties": false, + "required": ["domain", "name", "codeowners", "documentation"], + "dependencies": { + "mqtt": { + "anyOf": [ + { "required": ["dependencies"] }, + { "required": ["after_dependencies"] } + ] + } + } + } +} diff --git a/script/licenses.py b/script/licenses.py index 8a10b9a7530..464a2fc456b 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -2,12 +2,36 @@ from __future__ import annotations +from argparse import ArgumentParser, Namespace +from collections.abc import Sequence from dataclasses import dataclass +from importlib import metadata import json from pathlib import Path import sys +from typing import TypedDict, cast from awesomeversion import AwesomeVersion +from license_expression import ( + AND, + OR, + ExpressionError, + LicenseExpression, + LicenseSymbol, + get_spdx_licensing, +) + +licensing = get_spdx_licensing() + + +class PackageMetadata(TypedDict): + """Package metadata.""" + + name: str + version: str + license_expression: str | None + license_metadata: str | None + license_classifier: list[str] @dataclass @@ -15,19 +39,60 @@ class PackageDefinition: """Package definition.""" license: str + license_expression: str | None + license_metadata: str | None + license_classifier: list[str] name: str version: AwesomeVersion @classmethod - def from_dict(cls, data: dict[str, str]) -> PackageDefinition: - """Create a package definition from a dictionary.""" + def from_dict(cls, data: PackageMetadata) -> PackageDefinition: + """Create a package definition from PackageMetadata.""" + if not (license_str := "; ".join(data["license_classifier"])): + license_str = data["license_metadata"] or "UNKNOWN" return cls( - license=data["License"], - name=data["Name"], - version=AwesomeVersion(data["Version"]), + license=license_str, + license_expression=data["license_expression"], + license_metadata=data["license_metadata"], + license_classifier=data["license_classifier"], + name=data["name"], + version=AwesomeVersion(data["version"]), ) +# Incomplete list of OSI approved SPDX identifiers +# Add more as needed, see https://spdx.org/licenses/ +OSI_APPROVED_LICENSES_SPDX = { + "0BSD", + "AFL-2.1", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "Apache-2.0", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-3-Clause", + "EPL-1.0", + "EPL-2.0", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "HPND", + "ISC", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "MIT", + "MIT-CMU", + "MPL-1.1", + "MPL-2.0", + "PSF-2.0", + "Unlicense", + "Zlib", + "ZPL-2.1", +} + OSI_APPROVED_LICENSES = { "Academic Free License (AFL)", "Apache Software License", @@ -96,13 +161,10 @@ OSI_APPROVED_LICENSES = { "Zero-Clause BSD (0BSD)", "Zope Public License", "zlib/libpng License", + # End license classifier "Apache License", "MIT", - "apache-2.0", - "GPL-3.0", - "GPLv3+", "MPL2", - "MPL-2.0", "Apache 2", "LGPL v3", "BSD", @@ -110,30 +172,16 @@ OSI_APPROVED_LICENSES = { "GPLv3", "Eclipse Public License v2.0", "ISC", - "GPL-2.0-only", - "mit", "GNU General Public License v3", - "Unlicense", - "Apache-2", "GPLv2", - "Python-2.0.1", } EXCEPTIONS = { "PyMicroBot", # https://github.com/spycle/pyMicroBot/pull/3 "PySwitchmate", # https://github.com/Danielhiversen/pySwitchmate/pull/16 "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 - "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 - "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 - "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 - "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 - "aiovodafone", # https://github.com/chemelli74/aiovodafone/pull/131 - "airthings-ble", # https://github.com/Airthings/airthings-ble/pull/42 - "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 - "asyncio", # PSF License "chacha20poly1305", # LGPL - "chacha20poly1305-reuseable", # Apache 2.0 or BSD 3-Clause "commentjson", # https://github.com/vaidik/commentjson/pull/55 "crownstone-cloud", # https://github.com/crownstone/crownstone-lib-python-cloud/pull/5 "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 @@ -141,102 +189,225 @@ EXCEPTIONS = { "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 "enocean", # https://github.com/kipe/enocean/pull/142 - "gardena-bluetooth", # https://github.com/elupus/gardena-bluetooth/pull/11 - "heatmiserV3", # https://github.com/andylockran/heatmiserV3/pull/94 - "huum", # https://github.com/frwickst/pyhuum/pull/8 "imutils", # https://github.com/PyImageSearch/imutils/pull/292 "iso4217", # Public domain "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 - "krakenex", # https://github.com/veox/python3-krakenex/pull/145 "ld2410-ble", # https://github.com/930913/ld2410-ble/pull/7 "maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48 - "nessclient", # https://github.com/nickw444/nessclient/pull/65 "neurio", # https://github.com/jordanh/neurio-python/pull/13 "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 "pigpio", # https://github.com/joan2937/pigpio/pull/608 "pymitv", # MIT - "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 "pyvera", # https://github.com/maximvelichko/pyvera/pull/164 - "pyxeoma", # https://github.com/jeradM/pyxeoma/pull/11 "repoze.lru", - "russound", # https://github.com/laf/russound/pull/14 # codespell:ignore laf - "ruuvitag-ble", # https://github.com/Bluetooth-Devices/ruuvitag-ble/pull/10 - "sensirion-ble", # https://github.com/akx/sensirion-ble/pull/9 "sharp_aquos_rc", # https://github.com/jmoore987/sharp_aquos_rc/pull/14 "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 - "tellduslive", # https://github.com/molobrakos/tellduslive/pull/24 - "tellsticknet", # https://github.com/molobrakos/tellsticknet/pull/33 - "vincenty", # Public domain - "zeversolar", # https://github.com/kvanzuijlen/zeversolar/pull/46 } TODO = { "aiocache": AwesomeVersion( - "0.12.2" + "0.12.3" ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? } +EXCEPTIONS_AND_TODOS = EXCEPTIONS.union(TODO) -def main() -> int: - """Run the main script.""" - raw_licenses = json.loads(Path("licenses.json").read_text()) - package_definitions = [PackageDefinition.from_dict(data) for data in raw_licenses] + +def check_licenses(args: CheckArgs) -> int: + """Check licenses are OSI approved.""" exit_code = 0 - for package in package_definitions: - previous_unapproved_version = TODO.get(package.name) - approved = False - for approved_license in OSI_APPROVED_LICENSES: - if approved_license in package.license: - approved = True - break - if previous_unapproved_version is not None: - if previous_unapproved_version < package.version: - if approved: - print( - "Approved license detected for " - f"{package.name}@{package.version}: {package.license}" - ) - print("Please remove the package from the TODO list.") - print() - else: - print( - "We could not detect an OSI-approved license for " - f"{package.name}@{package.version}: {package.license}" - ) - print() - exit_code = 1 - elif not approved and package.name not in EXCEPTIONS: - print( - "We could not detect an OSI-approved license for " - f"{package.name}@{package.version}: {package.license}" - ) - print() - exit_code = 1 - elif approved and package.name in EXCEPTIONS: + raw_licenses = json.loads(Path(args.path).read_text()) + license_status = { + pkg.name: (pkg, check_license_status(pkg)) + for data in raw_licenses + if (pkg := PackageDefinition.from_dict(data)) + } + + for name, version in TODO.items(): + pkg, status = license_status.get(name, (None, None)) + if pkg is None or not (version < pkg.version): + continue + assert status is not None + + if status is True: print( "Approved license detected for " - f"{package.name}@{package.version}: {package.license}" + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" + "Please remove the package from the TODO list.\n" ) - print(f"Please remove the package from the EXCEPTIONS list: {package.name}") - print() - exit_code = 1 - current_packages = {package.name for package in package_definitions} - for package in [*TODO.keys(), *EXCEPTIONS]: - if package not in current_packages: + else: print( - f"Package {package} is tracked, but not used. Please remove from the licenses.py" - "file." + "We could not detect an OSI-approved license for " + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" + "Please update the package version on the TODO list.\n" + ) + exit_code = 1 + + for pkg, status in license_status.values(): + if status is False and pkg.name not in EXCEPTIONS_AND_TODOS: + print( + "We could not detect an OSI-approved license for " + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" ) - print() exit_code = 1 + if status is True and pkg.name in EXCEPTIONS: + print( + "Approved license detected for " + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" + "Please remove the package from the EXCEPTIONS list.\n" + ) + exit_code = 1 + + for name in EXCEPTIONS_AND_TODOS.difference(license_status): + print( + f"Package {name} is tracked, but not used. " + "Please remove it from the licenses.py file.\n" + ) + exit_code = 1 + return exit_code +def check_license_status(package: PackageDefinition) -> bool: + """Check if package licenses is OSI approved.""" + if package.license_expression: + # Prefer 'License-Expression' if it exists + return check_license_expression(package.license_expression) or False + + if ( + package.license_metadata + and (check := check_license_expression(package.license_metadata)) is not None + ): + # Check license metadata if it's a valid SPDX license expression + return check + + for approved_license in OSI_APPROVED_LICENSES: + if approved_license in package.license: + return True + return False + + +def check_license_expression(license_str: str) -> bool | None: + """Check if license expression is a valid and approved SPDX license string.""" + if license_str == "UNKNOWN" or "\n" in license_str: + # Ignore common errors for license metadata values + return None + + try: + expr = licensing.parse(license_str, validate=True) + except ExpressionError: + return None + return check_spdx_license(expr) + + +def check_spdx_license(expr: LicenseExpression) -> bool: + """Check a SPDX license expression.""" + if isinstance(expr, LicenseSymbol): + return expr.key in OSI_APPROVED_LICENSES_SPDX + if isinstance(expr, OR): + return any(check_spdx_license(arg) for arg in expr.args) + if isinstance(expr, AND): + return all(check_spdx_license(arg) for arg in expr.args) + return False + + +def get_license_str(package: PackageDefinition) -> str: + """Return license string.""" + return ( + f"{package.license_expression} -- {package.license_metadata} " + f"-- {package.license_classifier}" + ) + + +def extract_licenses(args: ExtractArgs) -> int: + """Extract license data for installed packages.""" + licenses = sorted( + [get_package_metadata(dist) for dist in list(metadata.distributions())], + key=lambda dist: dist["name"], + ) + Path(args.output_file).write_text(json.dumps(licenses, indent=2)) + return 0 + + +def get_package_metadata(dist: metadata.Distribution) -> PackageMetadata: + """Get package metadata for distribution.""" + return { + "name": dist.name, + "version": dist.version, + "license_expression": dist.metadata.get("License-Expression"), + "license_metadata": dist.metadata.get("License"), + "license_classifier": extract_license_classifier( + dist.metadata.get_all("Classifier") + ), + } + + +def extract_license_classifier(classifiers: list[str] | None) -> list[str]: + """Extract license from list of classifiers. + + E.g. 'License :: OSI Approved :: MIT License' -> 'MIT License'. + Filter out bare 'License :: OSI Approved'. + """ + return [ + license_classifier + for classifier in classifiers or () + if classifier.startswith("License") + and (license_classifier := classifier.rpartition(" :: ")[2]) + and license_classifier != "OSI Approved" + ] + + +class ExtractArgs(Namespace): + """Extract arguments.""" + + output_file: str + + +class CheckArgs(Namespace): + """Check arguments.""" + + path: str + + +def main(argv: Sequence[str] | None = None) -> int: + """Run the main script.""" + parser = ArgumentParser() + subparsers = parser.add_subparsers(title="Subcommands", required=True) + + parser_extract = subparsers.add_parser("extract") + parser_extract.set_defaults(action="extract") + parser_extract.add_argument( + "--output-file", + default="licenses.json", + help="Path to store the licenses file", + ) + + parser_check = subparsers.add_parser("check") + parser_check.set_defaults(action="check") + parser_check.add_argument( + "path", + nargs="?", + metavar="PATH", + default="licenses.json", + help="Path to json licenses file", + ) + + argv = argv or sys.argv[1:] + args = parser.parse_args(argv) + + if args.action == "extract": + args = cast(ExtractArgs, args) + return extract_licenses(args) + if args.action == "check": + args = cast(CheckArgs, args) + if (exit_code := check_licenses(args)) == 0: + print("All licenses are approved!") + return exit_code + return 0 + + if __name__ == "__main__": - exit_code = main() - if exit_code == 0: - print("All licenses are approved!") - sys.exit(exit_code) + sys.exit(main()) diff --git a/script/lint_and_test.py b/script/lint_and_test.py index ff3db8aa1ed..fb350c113b9 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -9,6 +9,7 @@ from collections import namedtuple from contextlib import suppress import itertools import os +from pathlib import Path import re import shlex import sys @@ -63,7 +64,7 @@ async def async_exec(*args, display=False): """Execute, return code & log.""" argsp = [] for arg in args: - if os.path.isfile(arg): + if Path(arg).is_file(): argsp.append(f"\\\n {shlex.quote(arg)}") else: argsp.append(shlex.quote(arg)) @@ -132,7 +133,7 @@ async def ruff(files): async def lint(files): """Perform lint.""" - files = [file for file in files if os.path.isfile(file)] + files = [file for file in files if Path(file).is_file()] res = sorted( itertools.chain( *await asyncio.gather( @@ -164,7 +165,7 @@ async def lint(files): async def main(): """Run the main loop.""" # Ensure we are in the homeassistant root - os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) + os.chdir(Path(__file__).parent.parent) files = await git() if not files: @@ -194,7 +195,7 @@ async def main(): gen_req = True # requirements script for components # Find test files... if fname.startswith("tests/"): - if "/test_" in fname and os.path.isfile(fname): + if "/test_" in fname and Path(fname).is_file(): # All test helpers should be excluded test_files.add(fname) else: @@ -207,7 +208,7 @@ async def main(): else: parts[-1] = f"test_{parts[-1]}" fname = "/".join(parts) - if os.path.isfile(fname): + if Path(fname).is_file(): test_files.add(fname) if gen_req: diff --git a/script/scaffold/__main__.py b/script/scaffold/__main__.py index 45dbed790e6..93c787df50f 100644 --- a/script/scaffold/__main__.py +++ b/script/scaffold/__main__.py @@ -28,7 +28,7 @@ def get_arguments() -> argparse.Namespace: return parser.parse_args() -def main(): +def main() -> int: """Scaffold an integration.""" if not Path("requirements_all.txt").is_file(): print("Run from project root") diff --git a/script/scaffold/generate.py b/script/scaffold/generate.py index 0bee69b93f8..9ca5ead5719 100644 --- a/script/scaffold/generate.py +++ b/script/scaffold/generate.py @@ -19,7 +19,7 @@ def generate(template: str, info: Info) -> None: print() -def _generate(src_dir, target_dir, info: Info) -> None: +def _generate(src_dir: Path, target_dir: Path, info: Info) -> None: """Generate an integration.""" replaces = {"NEW_DOMAIN": info.domain, "NEW_NAME": info.name} diff --git a/script/scaffold/templates/config_flow/integration/config_flow.py b/script/scaffold/templates/config_flow/integration/config_flow.py index 0bff976f288..06db7592840 100644 --- a/script/scaffold/templates/config_flow/integration/config_flow.py +++ b/script/scaffold/templates/config_flow/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME integration.""" +"""Config flow for the NEW_NAME integration.""" from __future__ import annotations diff --git a/script/scaffold/templates/config_flow_discovery/integration/config_flow.py b/script/scaffold/templates/config_flow_discovery/integration/config_flow.py index e2cfed40e1d..570b70b85aa 100644 --- a/script/scaffold/templates/config_flow_discovery/integration/config_flow.py +++ b/script/scaffold/templates/config_flow_discovery/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME.""" +"""Config flow for the NEW_NAME integration.""" import my_pypi_dependency diff --git a/script/scaffold/templates/config_flow_helper/integration/config_flow.py b/script/scaffold/templates/config_flow_helper/integration/config_flow.py index 5d89fec2da2..c2ab7a205da 100644 --- a/script/scaffold/templates/config_flow_helper/integration/config_flow.py +++ b/script/scaffold/templates/config_flow_helper/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME integration.""" +"""Config flow for the NEW_NAME integration.""" from __future__ import annotations diff --git a/script/scaffold/templates/config_flow_oauth2/integration/api.py b/script/scaffold/templates/config_flow_oauth2/integration/api.py index 3f4aa3cfb82..9516dd99122 100644 --- a/script/scaffold/templates/config_flow_oauth2/integration/api.py +++ b/script/scaffold/templates/config_flow_oauth2/integration/api.py @@ -49,7 +49,6 @@ class AsyncConfigEntryAuth(my_pypi_package.AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py b/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py index 51ef70b1885..0f01c8402df 100644 --- a/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py +++ b/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py @@ -1,11 +1,9 @@ -"""application_credentials platform the NEW_NAME integration.""" +"""Application credentials platform for the NEW_NAME integration.""" from homeassistant.components.application_credentials import AuthorizationServer from homeassistant.core import HomeAssistant -# TODO Update with your own urls -OAUTH2_AUTHORIZE = "https://www.example.com/auth/authorize" -OAUTH2_TOKEN = "https://www.example.com/auth/token" +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: diff --git a/script/scaffold/templates/device_trigger/tests/test_device_trigger.py b/script/scaffold/templates/device_trigger/tests/test_device_trigger.py index 7e4f88261bc..1693049ae4c 100644 --- a/script/scaffold/templates/device_trigger/tests/test_device_trigger.py +++ b/script/scaffold/templates/device_trigger/tests/test_device_trigger.py @@ -109,14 +109,16 @@ async def test_if_fires_on_state_change( hass.states.async_set("NEW_DOMAIN.entity", STATE_ON) await hass.async_block_till_done() assert len(service_calls) == 1 - assert service_calls[0].data[ - "some" - ] == "turn_on - device - {} - off - on - None - 0".format("NEW_DOMAIN.entity") + assert ( + service_calls[0].data["some"] + == "turn_on - device - NEW_DOMAIN.entity - off - on - None - 0" + ) # Fake that the entity is turning off. hass.states.async_set("NEW_DOMAIN.entity", STATE_OFF) await hass.async_block_till_done() assert len(service_calls) == 2 - assert service_calls[1].data[ - "some" - ] == "turn_off - device - {} - on - off - None - 0".format("NEW_DOMAIN.entity") + assert ( + service_calls[1].data["some"] + == "turn_off - device - NEW_DOMAIN.entity - on - off - None - 0" + ) diff --git a/script/scaffold/templates/integration/integration/quality_scale.yaml b/script/scaffold/templates/integration/integration/quality_scale.yaml new file mode 100644 index 00000000000..201a91652e5 --- /dev/null +++ b/script/scaffold/templates/integration/integration/quality_scale.yaml @@ -0,0 +1,60 @@ +rules: + # Bronze + action-setup: todo + appropriate-polling: todo + brands: todo + common-modules: todo + config-flow-test-coverage: todo + config-flow: todo + dependency-transparency: todo + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: todo + entity-unique-id: todo + has-entity-name: todo + runtime-data: todo + test-before-configure: todo + test-before-setup: todo + unique-config-entry: todo + + # Silver + action-exceptions: todo + config-entry-unloading: todo + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: todo + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: todo + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/script/split_tests.py b/script/split_tests.py index 8da03bd749b..c64de46a068 100755 --- a/script/split_tests.py +++ b/script/split_tests.py @@ -49,16 +49,27 @@ class BucketHolder: test_folder.get_all_flatten(), reverse=True, key=lambda x: x.total_tests ) for tests in sorted_tests: - print(f"{tests.total_tests:>{digits}} tests in {tests.path}") if tests.added_to_bucket: # Already added to bucket continue + print(f"{tests.total_tests:>{digits}} tests in {tests.path}") smallest_bucket = min(self._buckets, key=lambda x: x.total_tests) + is_file = isinstance(tests, TestFile) if ( smallest_bucket.total_tests + tests.total_tests < self._tests_per_bucket - ) or isinstance(tests, TestFile): + ) or is_file: smallest_bucket.add(tests) + # Ensure all files from the same folder are in the same bucket + # to ensure that syrupy correctly identifies unused snapshots + if is_file: + for other_test in tests.parent.children.values(): + if other_test is tests or isinstance(other_test, TestFolder): + continue + print( + f"{other_test.total_tests:>{digits}} tests in {other_test.path} (same bucket)" + ) + smallest_bucket.add(other_test) # verify that all tests are added to a bucket if not test_folder.added_to_bucket: @@ -66,7 +77,7 @@ class BucketHolder: def create_ouput_file(self) -> None: """Create output file.""" - with open("pytest_buckets.txt", "w") as file: + with Path("pytest_buckets.txt").open("w") as file: for idx, bucket in enumerate(self._buckets): print(f"Bucket {idx+1} has {bucket.total_tests} tests") file.write(bucket.get_paths_line()) @@ -79,6 +90,7 @@ class TestFile: total_tests: int path: Path added_to_bucket: bool = field(default=False, init=False) + parent: TestFolder | None = field(default=None, init=False) def add_to_bucket(self) -> None: """Add test file to bucket.""" @@ -125,6 +137,7 @@ class TestFolder: def add_test_file(self, file: TestFile) -> None: """Add test file to folder.""" path = file.path + file.parent = self relative_path = path.relative_to(self.path) if not relative_path.parts: raise ValueError("Path is not a child of this folder") diff --git a/script/translations/deduplicate.py b/script/translations/deduplicate.py index 8cc4cee3b10..f92f90115ce 100644 --- a/script/translations/deduplicate.py +++ b/script/translations/deduplicate.py @@ -7,8 +7,7 @@ from pathlib import Path from homeassistant.const import Platform from . import upload -from .develop import flatten_translations -from .util import get_base_arg_parser, load_json_from_path +from .util import flatten_translations, get_base_arg_parser, load_json_from_path def get_arguments() -> argparse.Namespace: diff --git a/script/translations/develop.py b/script/translations/develop.py index 00465e1bc24..9e3a2ded046 100644 --- a/script/translations/develop.py +++ b/script/translations/develop.py @@ -9,7 +9,7 @@ import sys from . import download, upload from .const import INTEGRATIONS_DIR -from .util import get_base_arg_parser +from .util import flatten_translations, get_base_arg_parser def valid_integration(integration): @@ -32,29 +32,6 @@ def get_arguments() -> argparse.Namespace: return parser.parse_args() -def flatten_translations(translations): - """Flatten all translations.""" - stack = [iter(translations.items())] - key_stack = [] - flattened_translations = {} - while stack: - for k, v in stack[-1]: - key_stack.append(k) - if isinstance(v, dict): - stack.append(iter(v.items())) - break - if isinstance(v, str): - common_key = "::".join(key_stack) - flattened_translations[common_key] = v - key_stack.pop() - else: - stack.pop() - if key_stack: - key_stack.pop() - - return flattened_translations - - def substitute_translation_references(integration_strings, flattened_translations): """Recursively processes all translation strings for the integration.""" result = {} diff --git a/script/translations/download.py b/script/translations/download.py index 8f7327c07ec..3fa7065d058 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -4,17 +4,17 @@ from __future__ import annotations import json -import os -import pathlib +from pathlib import Path import re import subprocess +from typing import Any from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR from .error import ExitApp -from .util import get_lokalise_token, load_json_from_path +from .util import flatten_translations, get_lokalise_token, load_json_from_path FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") -DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute() +DOWNLOAD_DIR = Path("build/translations-download").absolute() def run_download_docker(): @@ -56,35 +56,32 @@ def run_download_docker(): raise ExitApp("Failed to download translations") -def save_json(filename: str, data: list | dict): - """Save JSON data to a file. - - Returns True on success. - """ - data = json.dumps(data, sort_keys=True, indent=4) - with open(filename, "w", encoding="utf-8") as fdesc: - fdesc.write(data) - return True - return False +def save_json(filename: Path, data: list | dict) -> None: + """Save JSON data to a file.""" + filename.write_text(json.dumps(data, sort_keys=True, indent=4), encoding="utf-8") -def get_component_path(lang, component): +def get_component_path(lang, component) -> Path | None: """Get the component translation path.""" - if os.path.isdir(os.path.join("homeassistant", "components", component)): - return os.path.join( - "homeassistant", "components", component, "translations", f"{lang}.json" + if (Path("homeassistant") / "components" / component).is_dir(): + return ( + Path("homeassistant") + / "components" + / component + / "translations" + / f"{lang}.json" ) return None -def get_platform_path(lang, component, platform): +def get_platform_path(lang, component, platform) -> Path: """Get the platform translation path.""" - return os.path.join( - "homeassistant", - "components", - component, - "translations", - f"{platform}.{lang}.json", + return ( + Path("homeassistant") + / "components" + / component + / "translations" + / f"{platform}.{lang}.json" ) @@ -107,7 +104,15 @@ def save_language_translations(lang, translations): f"Skipping {lang} for {component}, as the integration doesn't seem to exist." ) continue - os.makedirs(os.path.dirname(path), exist_ok=True) + if not ( + Path("homeassistant") / "components" / component / "strings.json" + ).exists(): + print( + f"Skipping {lang} for {component}, as the integration doesn't have a strings.json file." + ) + continue + path.parent.mkdir(parents=True, exist_ok=True) + base_translations = pick_keys(component, base_translations) save_json(path, base_translations) if "platform" not in component_translations: @@ -117,7 +122,7 @@ def save_language_translations(lang, translations): "platform" ].items(): path = get_platform_path(lang, component, platform) - os.makedirs(os.path.dirname(path), exist_ok=True) + path.parent.mkdir(parents=True, exist_ok=True) save_json(path, platform_translations) @@ -135,6 +140,32 @@ def delete_old_translations(): fil.unlink() +def get_current_keys(component: str) -> dict[str, Any]: + """Get the current keys for a component.""" + strings_path = Path("homeassistant") / "components" / component / "strings.json" + return load_json_from_path(strings_path) + + +def pick_keys(component: str, translations: dict[str, Any]) -> dict[str, Any]: + """Pick the keys that are in the current strings.""" + flat_translations = flatten_translations(translations) + flat_current_keys = flatten_translations(get_current_keys(component)) + flatten_result = {} + for key in flat_current_keys: + if key in flat_translations: + flatten_result[key] = flat_translations[key] + result = {} + for key, value in flatten_result.items(): + parts = key.split("::") + d = result + for part in parts[:-1]: + if part not in d: + d[part] = {} + d = d[part] + d[parts[-1]] = value + return result + + def run(): """Run the script.""" DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True) diff --git a/script/translations/util.py b/script/translations/util.py index 8892bb46b7a..d78b2c4faff 100644 --- a/script/translations/util.py +++ b/script/translations/util.py @@ -66,3 +66,26 @@ def load_json_from_path(path: pathlib.Path) -> Any: return json.loads(path.read_text()) except json.JSONDecodeError as err: raise JSONDecodeErrorWithPath(err.msg, err.doc, err.pos, path) from err + + +def flatten_translations(translations): + """Flatten all translations.""" + stack = [iter(translations.items())] + key_stack = [] + flattened_translations = {} + while stack: + for k, v in stack[-1]: + key_stack.append(k) + if isinstance(v, dict): + stack.append(iter(v.items())) + break + if isinstance(v, str): + common_key = "::".join(key_stack) + flattened_translations[common_key] = v + key_stack.pop() + else: + stack.pop() + if key_stack: + key_stack.pop() + + return flattened_translations diff --git a/script/version_bump.py b/script/version_bump.py index fb4fe2f7868..ff94c01a5a2 100755 --- a/script/version_bump.py +++ b/script/version_bump.py @@ -2,6 +2,7 @@ """Helper script to bump the current version.""" import argparse +from pathlib import Path import re import subprocess @@ -110,8 +111,7 @@ def bump_version( def write_version(version): """Update Home Assistant constant file with new version.""" - with open("homeassistant/const.py") as fil: - content = fil.read() + content = Path("homeassistant/const.py").read_text() major, minor, patch = str(version).split(".", 2) @@ -125,25 +125,21 @@ def write_version(version): "PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content ) - with open("homeassistant/const.py", "w") as fil: - fil.write(content) + Path("homeassistant/const.py").write_text(content) def write_version_metadata(version: Version) -> None: """Update pyproject.toml file with new version.""" - with open("pyproject.toml", encoding="utf8") as fp: - content = fp.read() + content = Path("pyproject.toml").read_text(encoding="utf8") content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1) - with open("pyproject.toml", "w", encoding="utf8") as fp: - fp.write(content) + Path("pyproject.toml").write_text(content, encoding="utf8") def write_ci_workflow(version: Version) -> None: """Update ci workflow with new version.""" - with open(".github/workflows/ci.yaml") as fp: - content = fp.read() + content = Path(".github/workflows/ci.yaml").read_text() short_version = ".".join(str(version).split(".", maxsplit=2)[:2]) content = re.sub( @@ -153,8 +149,7 @@ def write_ci_workflow(version: Version) -> None: count=1, ) - with open(".github/workflows/ci.yaml", "w") as fp: - fp.write(content) + Path(".github/workflows/ci.yaml").write_text(content) def main() -> None: diff --git a/tests/auth/mfa_modules/test_notify.py b/tests/auth/mfa_modules/test_notify.py index d6f4d80f99e..8047ba2fef3 100644 --- a/tests/auth/mfa_modules/test_notify.py +++ b/tests/auth/mfa_modules/test_notify.py @@ -165,8 +165,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test-notify" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE in message.async_render() + assert MOCK_CODE in message with patch("pyotp.HOTP.verify", return_value=False): result = await hass.auth.login_flow.async_configure( @@ -224,8 +223,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test-notify" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE in message.async_render() + assert MOCK_CODE in message with patch("pyotp.HOTP.verify", return_value=True): result = await hass.auth.login_flow.async_configure( @@ -264,8 +262,7 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test1" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE in message.async_render() + assert MOCK_CODE in message with patch("pyotp.HOTP.at", return_value=MOCK_CODE_2): step = await flow.async_step_setup({"code": "invalid"}) @@ -281,8 +278,7 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test1" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE_2 in message.async_render() + assert MOCK_CODE_2 in message with patch("pyotp.HOTP.verify", return_value=True): step = await flow.async_step_setup({"code": MOCK_CODE_2}) diff --git a/tests/auth/test_jwt_wrapper.py b/tests/auth/test_jwt_wrapper.py index 297d4dd5d7f..f9295a7791c 100644 --- a/tests/auth/test_jwt_wrapper.py +++ b/tests/auth/test_jwt_wrapper.py @@ -6,6 +6,12 @@ import pytest from homeassistant.auth import jwt_wrapper +async def test_all_default_options_are_in_verify_options() -> None: + """Test that all default options in _VERIFY_OPTIONS.""" + for option in jwt_wrapper._PyJWTWithVerify._get_default_options(): + assert option in jwt_wrapper._VERIFY_OPTIONS + + async def test_reject_access_token_with_impossible_large_size() -> None: """Test rejecting access tokens with impossible sizes.""" with pytest.raises(jwt.DecodeError): diff --git a/tests/common.py b/tests/common.py index 893c9ffcd67..d2b0dff8faa 100644 --- a/tests/common.py +++ b/tests/common.py @@ -47,7 +47,7 @@ from homeassistant.components.device_automation import ( # noqa: F401 _async_get_device_automation_capabilities as async_get_device_automation_capabilities, ) from homeassistant.config import IntegrationConfigInfo, async_process_component_config -from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import ( DEVICE_DEFAULT_NAME, EVENT_HOMEASSISTANT_CLOSE, @@ -419,7 +419,7 @@ def async_fire_mqtt_message( from paho.mqtt.client import MQTTMessage # pylint: disable-next=import-outside-toplevel - from homeassistant.components.mqtt.models import MqttData + from homeassistant.components.mqtt import MqttData if isinstance(payload, str): payload = payload.encode("utf-8") @@ -491,7 +491,7 @@ _MONOTONIC_RESOLUTION = time.get_clock_info("monotonic").resolution def _async_fire_time_changed( hass: HomeAssistant, utc_datetime: datetime | None, fire_all: bool ) -> None: - timestamp = dt_util.utc_to_timestamp(utc_datetime) + timestamp = utc_datetime.timestamp() for task in list(get_scheduled_timer_handles(hass.loop)): if not isinstance(task, asyncio.TimerHandle): continue @@ -990,6 +990,7 @@ class MockConfigEntry(config_entries.ConfigEntry): *, data=None, disabled_by=None, + discovery_keys=None, domain="test", entry_id=None, minor_version=1, @@ -999,20 +1000,24 @@ class MockConfigEntry(config_entries.ConfigEntry): reason=None, source=config_entries.SOURCE_USER, state=None, + subentries_data=None, title="Mock Title", unique_id=None, version=1, ) -> None: """Initialize a mock config entry.""" + discovery_keys = discovery_keys or {} kwargs = { "data": data or {}, "disabled_by": disabled_by, + "discovery_keys": discovery_keys, "domain": domain, "entry_id": entry_id or ulid_util.ulid_now(), "minor_version": minor_version, "options": options or {}, "pref_disable_new_entities": pref_disable_new_entities, "pref_disable_polling": pref_disable_polling, + "subentries_data": subentries_data or (), "title": title, "unique_id": unique_id, "version": version, @@ -1054,6 +1059,60 @@ class MockConfigEntry(config_entries.ConfigEntry): """ self._async_set_state(hass, state, reason) + async def start_reauth_flow( + self, + hass: HomeAssistant, + context: dict[str, Any] | None = None, + data: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Start a reauthentication flow.""" + if self.entry_id not in hass.config_entries._entries: + raise ValueError("Config entry must be added to hass to start reauth flow") + return await start_reauth_flow(hass, self, context, data) + + async def start_reconfigure_flow( + self, + hass: HomeAssistant, + *, + show_advanced_options: bool = False, + ) -> ConfigFlowResult: + """Start a reconfiguration flow.""" + if self.entry_id not in hass.config_entries._entries: + raise ValueError( + "Config entry must be added to hass to start reconfiguration flow" + ) + return await hass.config_entries.flow.async_init( + self.domain, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": self.entry_id, + "show_advanced_options": show_advanced_options, + }, + ) + + +async def start_reauth_flow( + hass: HomeAssistant, + entry: ConfigEntry, + context: dict[str, Any] | None = None, + data: dict[str, Any] | None = None, +) -> ConfigFlowResult: + """Start a reauthentication flow for a config entry. + + This helper method should be aligned with `ConfigEntry._async_init_reauth`. + """ + return await hass.config_entries.flow.async_init( + entry.domain, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "title_placeholders": {"name": entry.title}, + "unique_id": entry.unique_id, + } + | (context or {}), + data=entry.data | (data or {}), + ) + def patch_yaml_files(files_dict, endswith=True): """Patch load_yaml with a dictionary of yaml files.""" @@ -1758,3 +1817,20 @@ async def snapshot_platform( state = hass.states.get(entity_entry.entity_id) assert state, f"State not found for {entity_entry.entity_id}" assert state == snapshot(name=f"{entity_entry.entity_id}-state") + + +def reset_translation_cache(hass: HomeAssistant, components: list[str]) -> None: + """Reset translation cache for specified components. + + Use this if you are mocking a core component (for example via + mock_integration), to ensure that the mocked translations are not + persisted in the shared session cache. + """ + translations_cache = translation._async_get_translations_cache(hass) + for loaded_components in translations_cache.cache_data.loaded.values(): + for component_to_unload in components: + loaded_components.discard(component_to_unload) + for loaded_categories in translations_cache.cache_data.cache.values(): + for loaded_components in loaded_categories.values(): + for component_to_unload in components: + loaded_components.pop(component_to_unload, None) diff --git a/tests/components/abode/test_alarm_control_panel.py b/tests/components/abode/test_alarm_control_panel.py index 51e0ee46838..025afa74b80 100644 --- a/tests/components/abode/test_alarm_control_panel.py +++ b/tests/components/abode/test_alarm_control_panel.py @@ -3,7 +3,10 @@ from unittest.mock import PropertyMock, patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -11,9 +14,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -39,7 +39,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, ALARM_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED assert state.attributes.get(ATTR_DEVICE_ID) == "area_1" assert not state.attributes.get("battery_backup") assert not state.attributes.get("cellular_backup") @@ -75,7 +75,7 @@ async def test_set_alarm_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY async def test_set_alarm_home(hass: HomeAssistant) -> None: @@ -105,7 +105,7 @@ async def test_set_alarm_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME async def test_set_alarm_standby(hass: HomeAssistant) -> None: @@ -134,7 +134,7 @@ async def test_set_alarm_standby(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_state_unknown(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_camera.py b/tests/components/abode/test_camera.py index 5cf3263876b..1fcf250935e 100644 --- a/tests/components/abode/test_camera.py +++ b/tests/components/abode/test_camera.py @@ -3,8 +3,8 @@ from unittest.mock import patch from homeassistant.components.abode.const import DOMAIN as ABODE_DOMAIN -from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, STATE_IDLE +from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN, CameraState +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,7 +26,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, CAMERA_DOMAIN) state = hass.states.get("camera.test_cam") - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE async def test_capture_image(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_config_flow.py b/tests/components/abode/test_config_flow.py index 265a77560f7..2abed387566 100644 --- a/tests/components/abode/test_config_flow.py +++ b/tests/components/abode/test_config_flow.py @@ -10,9 +10,8 @@ from jaraco.abode.helpers.errors import MFA_CODE_REQUIRED import pytest from requests.exceptions import ConnectTimeout -from homeassistant.components.abode import config_flow from homeassistant.components.abode.const import CONF_POLLING, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -22,114 +21,110 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -async def test_show_form(hass: HomeAssistant) -> None: - """Test that the form is served with no input.""" - flow = config_flow.AbodeFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - async def test_one_config_allowed(hass: HomeAssistant) -> None: """Test that only one Abode configuration is allowed.""" - flow = config_flow.AbodeFlowHandler() - flow.hass = hass - MockConfigEntry( domain=DOMAIN, data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ).add_to_hass(hass) - step_user_result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) - assert step_user_result["type"] is FlowResultType.ABORT - assert step_user_result["reason"] == "single_instance_allowed" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" -async def test_invalid_credentials(hass: HomeAssistant) -> None: - """Test that invalid credentials throws an error.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - - flow = config_flow.AbodeFlowHandler() - flow.hass = hass +async def test_user_flow(hass: HomeAssistant) -> None: + """Test user flow, with various errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + # Test that invalid credentials throws an error. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTPStatus.BAD_REQUEST, "auth error") ), ): - result = await flow.async_step_user(user_input=conf) - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_connection_auth_error(hass: HomeAssistant) -> None: - """Test other than invalid credentials throws an error.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - - flow = config_flow.AbodeFlowHandler() - flow.hass = hass + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_auth"} + # Test other than invalid credentials throws an error. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTPStatus.INTERNAL_SERVER_ERROR, "connection error") ), ): - result = await flow.async_step_user(user_input=conf) - assert result["errors"] == {"base": "cannot_connect"} - - -async def test_connection_error(hass: HomeAssistant) -> None: - """Test login throws an error if connection times out.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - - flow = config_flow.AbodeFlowHandler() - flow.hass = hass + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + # Test login throws an error if connection times out. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=ConnectTimeout, ): - result = await flow.async_step_user(user_input=conf) - assert result["errors"] == {"base": "cannot_connect"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} - -async def test_step_user(hass: HomeAssistant) -> None: - """Test that the user step works.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + # Test success + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) with patch("homeassistant.components.abode.config_flow.Abode"): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=conf + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "user@email.com" - assert result["data"] == { - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_POLLING: False, - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "user@email.com" + assert result["data"] == { + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "password", + CONF_POLLING: False, + } async def test_step_mfa(hass: HomeAssistant) -> None: """Test that the MFA step works.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException(MFA_CODE_REQUIRED), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=conf + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "mfa" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mfa" with patch( "homeassistant.components.abode.config_flow.Abode", @@ -141,49 +136,51 @@ async def test_step_mfa(hass: HomeAssistant) -> None: result["flow_id"], user_input={"mfa_code": "123456"} ) - assert result["errors"] == {"base": "invalid_mfa_code"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mfa" + assert result["errors"] == {"base": "invalid_mfa_code"} with patch("homeassistant.components.abode.config_flow.Abode"): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"mfa_code": "123456"} ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "user@email.com" - assert result["data"] == { - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_POLLING: False, - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "user@email.com" + assert result["data"] == { + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "password", + CONF_POLLING: False, + } async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth flow.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id="user@email.com", - data=conf, - ).add_to_hass(hass) + data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + entry.add_to_hass(hass) - with patch("homeassistant.components.abode.config_flow.Abode"): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=conf, + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with ( + patch("homeassistant.components.abode.config_flow.Abode"), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "new_password", + }, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" - with patch("homeassistant.config_entries.ConfigEntries.async_reload"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=conf, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - assert len(hass.config_entries.async_entries()) == 1 + assert len(hass.config_entries.async_entries()) == 1 + assert entry.data[CONF_PASSWORD] == "new_password" diff --git a/tests/components/abode/test_cover.py b/tests/components/abode/test_cover.py index cdbec0ddf68..4a49648516d 100644 --- a/tests/components/abode/test_cover.py +++ b/tests/components/abode/test_cover.py @@ -3,13 +3,12 @@ from unittest.mock import patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - STATE_CLOSED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -34,7 +33,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, COVER_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000007" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") diff --git a/tests/components/abode/test_init.py b/tests/components/abode/test_init.py index 9fca6dcbdd3..ed71cb550a7 100644 --- a/tests/components/abode/test_init.py +++ b/tests/components/abode/test_init.py @@ -13,7 +13,6 @@ from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType from .common import setup_platform @@ -63,25 +62,23 @@ async def test_unload_entry(hass: HomeAssistant) -> None: async def test_invalid_credentials(hass: HomeAssistant) -> None: """Test Abode credentials changing.""" - with ( - patch( - "homeassistant.components.abode.Abode", - side_effect=AbodeAuthenticationException( - (HTTPStatus.BAD_REQUEST, "auth error") - ), + with patch( + "homeassistant.components.abode.Abode", + side_effect=AbodeAuthenticationException( + (HTTPStatus.BAD_REQUEST, "auth error") ), - patch( - "homeassistant.components.abode.config_flow.AbodeFlowHandler.async_step_reauth", - return_value={ - "type": FlowResultType.FORM, - "flow_id": "mock_flow", - "step_id": "reauth_confirm", - }, - ) as mock_async_step_reauth, ): - await setup_platform(hass, ALARM_DOMAIN) + config_entry = await setup_platform(hass, ALARM_DOMAIN) + await hass.async_block_till_done() - mock_async_step_reauth.assert_called_once() + assert config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" + + hass.config_entries.flow.async_abort(flows[0]["flow_id"]) + assert not hass.config_entries.flow.async_progress() async def test_raise_config_entry_not_ready_when_offline(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_light.py b/tests/components/abode/test_light.py index fc9000a39f8..4be94a09ee8 100644 --- a/tests/components/abode/test_light.py +++ b/tests/components/abode/test_light.py @@ -6,7 +6,7 @@ from homeassistant.components.abode import ATTR_DEVICE_ID from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, @@ -45,8 +45,8 @@ async def test_attributes(hass: HomeAssistant) -> None: state = hass.states.get(DEVICE_ID) assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 204 - assert state.attributes.get(ATTR_RGB_COLOR) == (0, 63, 255) - assert state.attributes.get(ATTR_COLOR_TEMP) is None + assert state.attributes.get(ATTR_RGB_COLOR) == (0, 64, 255) + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(ATTR_DEVICE_ID) == "ZB:db5b1a" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") diff --git a/tests/components/abode/test_lock.py b/tests/components/abode/test_lock.py index 6be1aef22ca..fe203d0b0f4 100644 --- a/tests/components/abode/test_lock.py +++ b/tests/components/abode/test_lock.py @@ -3,13 +3,12 @@ from unittest.mock import patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, SERVICE_LOCK, SERVICE_UNLOCK, - STATE_LOCKED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -34,7 +33,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, LOCK_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000004" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") diff --git a/tests/components/acaia/__init__.py b/tests/components/acaia/__init__.py new file mode 100644 index 00000000000..f4eaa39e615 --- /dev/null +++ b/tests/components/acaia/__init__.py @@ -0,0 +1,14 @@ +"""Common test tools for the acaia integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the acaia integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/acaia/conftest.py b/tests/components/acaia/conftest.py new file mode 100644 index 00000000000..ff151f3b096 --- /dev/null +++ b/tests/components/acaia/conftest.py @@ -0,0 +1,84 @@ +"""Common fixtures for the acaia tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from aioacaia.acaiascale import AcaiaDeviceState +from aioacaia.const import UnitMass as AcaiaUnitOfMass +import pytest + +from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.acaia.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_verify() -> Generator[AsyncMock]: + """Override is_new_scale check.""" + with patch( + "homeassistant.components.acaia.config_flow.is_new_scale", return_value=True + ) as mock_verify: + yield mock_verify + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="LUNAR-DDEEFF", + domain=DOMAIN, + version=1, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_IS_NEW_STYLE_SCALE: True, + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_scale: MagicMock +) -> MockConfigEntry: + """Set up the acaia integration for testing.""" + await setup_integration(hass, mock_config_entry) + return mock_config_entry + + +@pytest.fixture +def mock_scale() -> Generator[MagicMock]: + """Return a mocked acaia scale client.""" + with ( + patch( + "homeassistant.components.acaia.coordinator.AcaiaScale", + autospec=True, + ) as scale_mock, + ): + scale = scale_mock.return_value + scale.connected = True + scale.mac = "aa:bb:cc:dd:ee:ff" + scale.model = "Lunar" + scale.last_disconnect_time = "1732181388.1895587" + scale.timer_running = True + scale.heartbeat_task = None + scale.process_queue_task = None + scale.device_state = AcaiaDeviceState( + battery_level=42, units=AcaiaUnitOfMass.OUNCES + ) + scale.weight = 123.45 + scale.timer = 23 + scale.flow_rate = 1.23 + yield scale diff --git a/tests/components/acaia/snapshots/test_binary_sensor.ambr b/tests/components/acaia/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..113b5f1501e --- /dev/null +++ b/tests/components/acaia/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.lunar_ddeeff_timer_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.lunar_ddeeff_timer_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer running', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_running', + 'unique_id': 'aa:bb:cc:dd:ee:ff_timer_running', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lunar_ddeeff_timer_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'LUNAR-DDEEFF Timer running', + }), + 'context': , + 'entity_id': 'binary_sensor.lunar_ddeeff_timer_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/acaia/snapshots/test_button.ambr b/tests/components/acaia/snapshots/test_button.ambr new file mode 100644 index 00000000000..cd91ca1a17a --- /dev/null +++ b/tests/components/acaia/snapshots/test_button.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_buttons[button.lunar_ddeeff_reset_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.lunar_ddeeff_reset_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset timer', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_timer', + 'unique_id': 'aa:bb:cc:dd:ee:ff_reset_timer', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.lunar_ddeeff_reset_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LUNAR-DDEEFF Reset timer', + }), + 'context': , + 'entity_id': 'button.lunar_ddeeff_reset_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[button.lunar_ddeeff_start_stop_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.lunar_ddeeff_start_stop_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start/stop timer', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start_stop', + 'unique_id': 'aa:bb:cc:dd:ee:ff_start_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.lunar_ddeeff_start_stop_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LUNAR-DDEEFF Start/stop timer', + }), + 'context': , + 'entity_id': 'button.lunar_ddeeff_start_stop_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[button.lunar_ddeeff_tare-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.lunar_ddeeff_tare', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tare', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tare', + 'unique_id': 'aa:bb:cc:dd:ee:ff_tare', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.lunar_ddeeff_tare-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LUNAR-DDEEFF Tare', + }), + 'context': , + 'entity_id': 'button.lunar_ddeeff_tare', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/acaia/snapshots/test_diagnostics.ambr b/tests/components/acaia/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..df5e4d36555 --- /dev/null +++ b/tests/components/acaia/snapshots/test_diagnostics.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device_state': dict({ + 'auto_off_time': 0, + 'battery_level': 42, + 'beeps': True, + 'units': 'ounces', + }), + 'last_disconnect_time': '1732181388.1895587', + 'mac': 'aa:bb:cc:dd:ee:ff', + 'model': 'Lunar', + 'timer': 23, + 'weight': 123.45, + }) +# --- diff --git a/tests/components/acaia/snapshots/test_init.ambr b/tests/components/acaia/snapshots/test_init.ambr new file mode 100644 index 00000000000..7011b20f68c --- /dev/null +++ b/tests/components/acaia/snapshots/test_init.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'kitchen', + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'bluetooth', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'acaia', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Acaia', + 'model': 'Lunar', + 'model_id': None, + 'name': 'LUNAR-DDEEFF', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'Kitchen', + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/acaia/snapshots/test_sensor.ambr b/tests/components/acaia/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c3c8ce966ee --- /dev/null +++ b/tests/components/acaia/snapshots/test_sensor.ambr @@ -0,0 +1,157 @@ +# serializer version: 1 +# name: test_sensors[sensor.lunar_ddeeff_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lunar_ddeeff_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff_battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'LUNAR-DDEEFF Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lunar_ddeeff_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '42', + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_volume_flow_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lunar_ddeeff_volume_flow_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Volume flow rate', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff_flow_rate', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_volume_flow_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'LUNAR-DDEEFF Volume flow rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lunar_ddeeff_volume_flow_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.23', + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_weight-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lunar_ddeeff_weight', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weight', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff_weight', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_weight-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'weight', + 'friendly_name': 'LUNAR-DDEEFF Weight', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lunar_ddeeff_weight', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123.45', + }) +# --- diff --git a/tests/components/acaia/test_binary_sensor.py b/tests/components/acaia/test_binary_sensor.py new file mode 100644 index 00000000000..a7aa7034d8d --- /dev/null +++ b/tests/components/acaia/test_binary_sensor.py @@ -0,0 +1,28 @@ +"""Test binary sensors for acaia integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_binary_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the acaia binary sensors.""" + + with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/acaia/test_button.py b/tests/components/acaia/test_button.py new file mode 100644 index 00000000000..f68f85e253d --- /dev/null +++ b/tests/components/acaia/test_button.py @@ -0,0 +1,90 @@ +"""Tests for the acaia buttons.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +BUTTONS = ( + "tare", + "reset_timer", + "start_stop_timer", +) + + +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the acaia buttons.""" + + with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_button_presses( + hass: HomeAssistant, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the acaia button presses.""" + + await setup_integration(hass, mock_config_entry) + + for button in BUTTONS: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.lunar_ddeeff_{button}", + }, + blocking=True, + ) + + function = getattr(mock_scale, button) + function.assert_called_once() + + +async def test_buttons_unavailable_on_disconnected_scale( + hass: HomeAssistant, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the acaia buttons are unavailable when the scale is disconnected.""" + + await setup_integration(hass, mock_config_entry) + + for button in BUTTONS: + state = hass.states.get(f"button.lunar_ddeeff_{button}") + assert state + assert state.state == STATE_UNKNOWN + + mock_scale.connected = False + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for button in BUTTONS: + state = hass.states.get(f"button.lunar_ddeeff_{button}") + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/acaia/test_config_flow.py b/tests/components/acaia/test_config_flow.py new file mode 100644 index 00000000000..2bf4b1dbe8a --- /dev/null +++ b/tests/components/acaia/test_config_flow.py @@ -0,0 +1,242 @@ +"""Test the acaia config flow.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice +import pytest + +from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo + +from tests.common import MockConfigEntry + +service_info = BluetoothServiceInfo( + name="LUNAR-DDEEFF", + address="aa:bb:cc:dd:ee:ff", + rssi=-63, + manufacturer_data={}, + service_data={}, + service_uuids=[], + source="local", +) + + +@pytest.fixture +def mock_discovered_service_info() -> Generator[AsyncMock]: + """Override getting Bluetooth service info.""" + with patch( + "homeassistant.components.acaia.config_flow.async_discovered_service_info", + return_value=[service_info], + ) as mock_discovered_service_info: + yield mock_discovered_service_info + + +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + } + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "LUNAR-DDEEFF" + assert result2["data"] == { + **user_input, + CONF_IS_NEW_STYLE_SCALE: True, + } + + +async def test_bluetooth_discovery( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, +) -> None: + """Test we can discover a device.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "bluetooth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == service_info.name + assert result2["data"] == { + CONF_ADDRESS: service_info.address, + CONF_IS_NEW_STYLE_SCALE: True, + } + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AcaiaDeviceNotFound("Error"), "device_not_found"), + (AcaiaError, "unknown"), + (AcaiaUnknownDevice, "unsupported_device"), + ], +) +async def test_bluetooth_discovery_errors( + hass: HomeAssistant, + mock_verify: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test abortions of Bluetooth discovery.""" + mock_verify.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error + + +async def test_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Ensure we can't add the same device twice.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_already_configured_bluetooth_discovery( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure configure device is not discovered again.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AcaiaDeviceNotFound("Error"), "device_not_found"), + (AcaiaError, "unknown"), + ], +) +async def test_recoverable_config_flow_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test recoverable errors.""" + mock_verify.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": error} + + # recover + mock_verify.side_effect = None + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + + +async def test_unsupported_device( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Test flow aborts on unsupported device.""" + mock_verify.side_effect = AcaiaUnknownDevice + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unsupported_device" + + +async def test_no_bluetooth_devices( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Test flow aborts on unsupported device.""" + mock_discovered_service_info.return_value = [] + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" diff --git a/tests/components/acaia/test_diagnostics.py b/tests/components/acaia/test_diagnostics.py new file mode 100644 index 00000000000..77f6306b068 --- /dev/null +++ b/tests/components/acaia/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Tests for the diagnostics data provided by the Acaia integration.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/acaia/test_init.py b/tests/components/acaia/test_init.py new file mode 100644 index 00000000000..8ad988d3b9b --- /dev/null +++ b/tests/components/acaia/test_init.py @@ -0,0 +1,65 @@ +"""Test init of acaia integration.""" + +from datetime import timedelta +from unittest.mock import MagicMock + +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.acaia.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = pytest.mark.usefixtures("init_integration") + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test loading and unloading the integration.""" + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + "exception", [AcaiaError, AcaiaDeviceNotFound("Boom"), TimeoutError] +) +async def test_update_exception_leads_to_active_disconnect( + hass: HomeAssistant, + mock_scale: MagicMock, + freezer: FrozenDateTimeFactory, + exception: Exception, +) -> None: + """Test scale gets disconnected on exception.""" + + mock_scale.connect.side_effect = exception + mock_scale.connected = False + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_scale.device_disconnected_handler.assert_called_once() + + +async def test_device( + mock_scale: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Snapshot the device from registry.""" + + device = device_registry.async_get_device({(DOMAIN, mock_scale.mac)}) + assert device + assert device == snapshot diff --git a/tests/components/acaia/test_sensor.py b/tests/components/acaia/test_sensor.py new file mode 100644 index 00000000000..2f5a851121c --- /dev/null +++ b/tests/components/acaia/test_sensor.py @@ -0,0 +1,63 @@ +"""Test sensors for acaia integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import PERCENTAGE, Platform +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + mock_restore_cache_with_extra_data, + snapshot_platform, +) + + +async def test_sensors( + hass: HomeAssistant, + mock_scale: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Acaia sensors.""" + with patch("homeassistant.components.acaia.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_restore_state( + hass: HomeAssistant, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test battery sensor restore state.""" + mock_scale.device_state = None + entity_id = "sensor.lunar_ddeeff_battery" + + mock_restore_cache_with_extra_data( + hass, + ( + ( + State( + entity_id, + "1", + ), + { + "native_value": 65, + "native_unit_of_measurement": PERCENTAGE, + }, + ), + ), + ) + + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(entity_id) + assert state + assert state.state == "65" diff --git a/tests/components/accuweather/snapshots/test_sensor.ambr b/tests/components/accuweather/snapshots/test_sensor.ambr index 5e28be5a72b..3468d638bc0 100644 --- a/tests/components/accuweather/snapshots/test_sensor.ambr +++ b/tests/components/accuweather/snapshots/test_sensor.ambr @@ -1969,6 +1969,58 @@ 'state': '9.2', }) # --- +# name: test_sensor[sensor.home_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'humidity', + 'unique_id': '0123456-relativehumidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.home_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by AccuWeather', + 'device_class': 'humidity', + 'friendly_name': 'Home Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.home_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '67', + }) +# --- # name: test_sensor[sensor.home_mold_pollen_day_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2267,6 +2319,61 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.home_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure', + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pressure', + 'unique_id': '0123456-pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.home_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by AccuWeather', + 'device_class': 'pressure', + 'friendly_name': 'Home Pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1012.0', + }) +# --- # name: test_sensor[sensor.home_pressure_tendency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4145,6 +4252,58 @@ 'state': '276.1', }) # --- +# name: test_sensor[sensor.home_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature', + 'unique_id': '0123456-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.home_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by AccuWeather', + 'device_class': 'temperature', + 'friendly_name': 'Home Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.6', + }) +# --- # name: test_sensor[sensor.home_thunderstorm_probability_day_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/accuweather/test_init.py b/tests/components/accuweather/test_init.py index 340676905d6..f88cde88e7e 100644 --- a/tests/components/accuweather/test_init.py +++ b/tests/components/accuweather/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.accuweather.const import ( UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -107,7 +107,7 @@ async def test_remove_ozone_sensors( ) -> None: """Test remove ozone sensors from registry.""" entity_registry.async_get_or_create( - SENSOR_PLATFORM, + SENSOR_DOMAIN, DOMAIN, "0123456-ozone-0", suggested_object_id="home_ozone_0d", diff --git a/tests/components/accuweather/test_sensor.py b/tests/components/accuweather/test_sensor.py index 41c1c0d930a..37ebe260f39 100644 --- a/tests/components/accuweather/test_sensor.py +++ b/tests/components/accuweather/test_sensor.py @@ -148,6 +148,7 @@ async def test_manual_update_entity( assert mock_accuweather_client.async_get_current_conditions.call_count == 2 +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_imperial_units( hass: HomeAssistant, mock_accuweather_client: AsyncMock ) -> None: diff --git a/tests/components/acmeda/conftest.py b/tests/components/acmeda/conftest.py new file mode 100644 index 00000000000..2c980351c09 --- /dev/null +++ b/tests/components/acmeda/conftest.py @@ -0,0 +1,20 @@ +"""Define fixtures available for all Acmeda tests.""" + +import pytest + +from homeassistant.components.acmeda.const import DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + ) + mock_config_entry.add_to_hass(hass) + return mock_config_entry diff --git a/tests/components/acmeda/test_cover.py b/tests/components/acmeda/test_cover.py new file mode 100644 index 00000000000..0d908ecc915 --- /dev/null +++ b/tests/components/acmeda/test_cover.py @@ -0,0 +1,28 @@ +"""Define tests for the Acmeda config flow.""" + +from homeassistant.components.acmeda.const import DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_cover_id_migration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test migrating unique id.""" + mock_config_entry.add_to_hass(hass) + entity_registry.async_get_or_create( + COVER_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry + ) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.async_block_till_done() + entities = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert len(entities) == 1 + assert entities[0].unique_id == "1234567890123" diff --git a/tests/components/acmeda/test_sensor.py b/tests/components/acmeda/test_sensor.py new file mode 100644 index 00000000000..3d7090ce7dd --- /dev/null +++ b/tests/components/acmeda/test_sensor.py @@ -0,0 +1,27 @@ +"""Define tests for the Acmeda config flow.""" + +from homeassistant.components.acmeda.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_sensor_id_migration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test migrating unique id.""" + mock_config_entry.add_to_hass(hass) + entity_registry.async_get_or_create( + SENSOR_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry + ) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + entities = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert len(entities) == 1 + assert entities[0].unique_id == "1234567890123" diff --git a/tests/components/adguard/__init__.py b/tests/components/adguard/__init__.py index 318e881ef2f..4d8ae091dc5 100644 --- a/tests/components/adguard/__init__.py +++ b/tests/components/adguard/__init__.py @@ -1 +1 @@ -"""Tests for the AdGuard Home component.""" +"""Tests for the AdGuard Home integration.""" diff --git a/tests/components/adguard/test_config_flow.py b/tests/components/adguard/test_config_flow.py index d493962611f..bd0f1b0a08f 100644 --- a/tests/components/adguard/test_config_flow.py +++ b/tests/components/adguard/test_config_flow.py @@ -4,7 +4,6 @@ import aiohttp from homeassistant import config_entries from homeassistant.components.adguard.const import DOMAIN -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_HOST, @@ -17,6 +16,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -59,9 +59,9 @@ async def test_connection_error( ) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} async def test_full_flow_implementation( @@ -83,25 +83,27 @@ async def test_full_flow_implementation( ) assert result - assert result.get("flow_id") - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["flow_id"] + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=FIXTURE_USER_INPUT ) - assert result2 - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == FIXTURE_USER_INPUT[CONF_HOST] + assert result + assert result["type"] is FlowResultType.CREATE_ENTRY - data = result2.get("data") - assert data - assert data[CONF_HOST] == FIXTURE_USER_INPUT[CONF_HOST] - assert data[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] - assert data[CONF_PORT] == FIXTURE_USER_INPUT[CONF_PORT] - assert data[CONF_SSL] == FIXTURE_USER_INPUT[CONF_SSL] - assert data[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert data[CONF_VERIFY_SSL] == FIXTURE_USER_INPUT[CONF_VERIFY_SSL] + config_entry = result["result"] + assert config_entry.title == FIXTURE_USER_INPUT[CONF_HOST] + assert config_entry.data == { + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], + CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD], + CONF_PORT: FIXTURE_USER_INPUT[CONF_PORT], + CONF_SSL: FIXTURE_USER_INPUT[CONF_SSL], + CONF_USERNAME: FIXTURE_USER_INPUT[CONF_USERNAME], + CONF_VERIFY_SSL: FIXTURE_USER_INPUT[CONF_VERIFY_SSL], + } + assert not config_entry.options async def test_integration_already_exists(hass: HomeAssistant) -> None: @@ -116,8 +118,8 @@ async def test_integration_already_exists(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_USER}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_already_configured(hass: HomeAssistant) -> None: @@ -141,8 +143,8 @@ async def test_hassio_already_configured(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_ignored(hass: HomeAssistant) -> None: @@ -166,8 +168,8 @@ async def test_hassio_ignored(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_confirm( @@ -195,24 +197,25 @@ async def test_hassio_confirm( context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "hassio_confirm" - assert result.get("description_placeholders") == {"addon": "AdGuard Home Addon"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "hassio_confirm" + assert result["description_placeholders"] == {"addon": "AdGuard Home Addon"} - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - assert result2 - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "AdGuard Home Addon" + assert result + assert result["type"] is FlowResultType.CREATE_ENTRY - data = result2.get("data") - assert data - assert data[CONF_HOST] == "mock-adguard" - assert data[CONF_PASSWORD] is None - assert data[CONF_PORT] == 3000 - assert data[CONF_SSL] is False - assert data[CONF_USERNAME] is None - assert data[CONF_VERIFY_SSL] + config_entry = result["result"] + assert config_entry.title == "AdGuard Home Addon" + assert config_entry.data == { + CONF_HOST: "mock-adguard", + CONF_PASSWORD: None, + CONF_PORT: 3000, + CONF_SSL: False, + CONF_USERNAME: None, + CONF_VERIFY_SSL: True, + } async def test_hassio_connection_error( @@ -241,6 +244,6 @@ async def test_hassio_connection_error( result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "hassio_confirm" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "hassio_confirm" + assert result["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/advantage_air/test_binary_sensor.py b/tests/components/advantage_air/test_binary_sensor.py index 13bbadb38f9..d0088d96ba5 100644 --- a/tests/components/advantage_air/test_binary_sensor.py +++ b/tests/components/advantage_air/test_binary_sensor.py @@ -1,10 +1,8 @@ """Test the Advantage Air Binary Sensor Platform.""" from datetime import timedelta -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from homeassistant.components.advantage_air import ADVANTAGE_AIR_SYNC_INTERVAL -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -70,22 +68,14 @@ async def test_binary_sensor_async_setup_entry( assert not hass.states.get(entity_id) mock_get.reset_mock() - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 2 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 state = hass.states.get(entity_id) assert state @@ -101,22 +91,14 @@ async def test_binary_sensor_async_setup_entry( assert not hass.states.get(entity_id) mock_get.reset_mock() - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 2 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 state = hass.states.get(entity_id) assert state diff --git a/tests/components/advantage_air/test_cover.py b/tests/components/advantage_air/test_cover.py index 4752601d9ad..a9a3cc70c18 100644 --- a/tests/components/advantage_air/test_cover.py +++ b/tests/components/advantage_air/test_cover.py @@ -9,8 +9,9 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, CoverDeviceClass, + CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -31,7 +32,7 @@ async def test_ac_cover( entity_id = "cover.myauto_zone_y" state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get("device_class") == CoverDeviceClass.DAMPER assert state.attributes.get("current_position") == 100 @@ -120,7 +121,7 @@ async def test_things_cover( thing_id = "200" state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get("device_class") == CoverDeviceClass.BLIND entry = entity_registry.async_get(entity_id) diff --git a/tests/components/advantage_air/test_sensor.py b/tests/components/advantage_air/test_sensor.py index 06243921a64..3ea368a59fb 100644 --- a/tests/components/advantage_air/test_sensor.py +++ b/tests/components/advantage_air/test_sensor.py @@ -1,15 +1,13 @@ """Test the Advantage Air Sensor Platform.""" from datetime import timedelta -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from homeassistant.components.advantage_air import ADVANTAGE_AIR_SYNC_INTERVAL from homeassistant.components.advantage_air.const import DOMAIN as ADVANTAGE_AIR_DOMAIN from homeassistant.components.advantage_air.sensor import ( ADVANTAGE_AIR_SERVICE_SET_TIME_TO, ADVANTAGE_AIR_SET_COUNTDOWN_VALUE, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -124,23 +122,15 @@ async def test_sensor_platform_disabled_entity( assert not hass.states.get(entity_id) - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done(wait_background_tasks=True) mock_get.reset_mock() - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done(wait_background_tasks=True) - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 2 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 state = hass.states.get(entity_id) assert state diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 8d4132cad84..1e09a372352 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -11,6 +11,8 @@ 'name': 'AEMET', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'aemet', 'entry_id': '7442b231f139e813fc1939281123f220', 'minor_version': 1, @@ -19,6 +21,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index 1899e12c8ae..395c5cd96a4 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -1,7 +1,7 @@ """AirGradient tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from airgradient import Config, Measures import pytest @@ -10,7 +10,6 @@ from homeassistant.components.airgradient.const import DOMAIN from homeassistant.const import CONF_HOST from tests.common import MockConfigEntry, load_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture diff --git a/tests/components/airgradient/snapshots/test_diagnostics.ambr b/tests/components/airgradient/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..a96dfb95382 --- /dev/null +++ b/tests/components/airgradient/snapshots/test_diagnostics.ambr @@ -0,0 +1,42 @@ +# serializer version: 1 +# name: test_diagnostics_polling_instance + dict({ + 'config': dict({ + 'co2_automatic_baseline_calibration_days': 8, + 'configuration_control': 'local', + 'country': 'DE', + 'display_brightness': 0, + 'led_bar_brightness': 100, + 'led_bar_mode': 'co2', + 'nox_learning_offset': 12, + 'pm_standard': 'ugm3', + 'post_data_to_airgradient': True, + 'temperature_unit': 'c', + 'tvoc_learning_offset': 12, + }), + 'measures': dict({ + 'ambient_temperature': 22.17, + 'boot_time': 28, + 'compensated_ambient_temperature': 22.17, + 'compensated_pm02': None, + 'compensated_relative_humidity': 47.0, + 'firmware_version': '3.1.1', + 'model': 'I-9PSL', + 'nitrogen_index': 1, + 'pm003_count': 270, + 'pm01': 22, + 'pm02': 34, + 'pm10': 41, + 'raw_ambient_temperature': 27.96, + 'raw_nitrogen': 16931, + 'raw_pm02': 34, + 'raw_relative_humidity': 48.0, + 'raw_total_volatile_organic_component': 31792, + 'rco2': 778, + 'relative_humidity': 47.0, + 'serial_number': '84fce612f5b8', + 'signal_strength': -52, + 'total_volatile_organic_component_index': 99, + }), + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_init.ambr b/tests/components/airgradient/snapshots/test_init.ambr index e47c5b38bbc..72cb12535f1 100644 --- a/tests/components/airgradient/snapshots/test_init.ambr +++ b/tests/components/airgradient/snapshots/test_init.ambr @@ -57,7 +57,7 @@ 'name': 'Airgradient', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': '84fce60bec38', + 'serial_number': '84fce612f5b8', 'suggested_area': None, 'sw_version': '3.1.1', 'via_device_id': None, diff --git a/tests/components/airgradient/snapshots/test_sensor.ambr b/tests/components/airgradient/snapshots/test_sensor.ambr index ff83fdcc111..941369ff266 100644 --- a/tests/components/airgradient/snapshots/test_sensor.ambr +++ b/tests/components/airgradient/snapshots/test_sensor.ambr @@ -305,7 +305,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '48.0', + 'state': '47.0', }) # --- # name: test_all_entities[indoor][sensor.airgradient_led_bar_brightness-entry] @@ -912,7 +912,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '27.96', + 'state': '22.17', }) # --- # name: test_all_entities[indoor][sensor.airgradient_voc_index-entry] diff --git a/tests/components/airgradient/snapshots/test_update.ambr b/tests/components/airgradient/snapshots/test_update.ambr index c639a97d5dd..1f944bb528b 100644 --- a/tests/components/airgradient/snapshots/test_update.ambr +++ b/tests/components/airgradient/snapshots/test_update.ambr @@ -37,6 +37,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/airgradient/icon.png', 'friendly_name': 'Airgradient Firmware', 'in_progress': False, @@ -47,6 +48,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.airgradient_firmware', diff --git a/tests/components/airgradient/test_button.py b/tests/components/airgradient/test_button.py index 7901c3a067b..83de2c2f048 100644 --- a/tests/components/airgradient/test_button.py +++ b/tests/components/airgradient/test_button.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 8730b18676f..73dbd17a213 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -9,7 +9,7 @@ from airgradient import ( ConfigurationControl, ) -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST diff --git a/tests/components/airgradient/test_diagnostics.py b/tests/components/airgradient/test_diagnostics.py new file mode 100644 index 00000000000..34a9bb7aab2 --- /dev/null +++ b/tests/components/airgradient/test_diagnostics.py @@ -0,0 +1,29 @@ +"""Tests for the diagnostics data provided by the AirGradient integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics_polling_instance( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_integration(hass, mock_config_entry) + + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/airgradient/test_init.py b/tests/components/airgradient/test_init.py index 408e6f5f3ba..a121940f2bc 100644 --- a/tests/components/airgradient/test_init.py +++ b/tests/components/airgradient/test_init.py @@ -1,16 +1,18 @@ """Tests for the AirGradient integration.""" +from datetime import timedelta from unittest.mock import AsyncMock +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_device_info( @@ -27,3 +29,28 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_new_firmware_version( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry.sw_version == "3.1.1" + mock_airgradient_client.get_current_measures.return_value.firmware_version = "3.1.2" + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry.sw_version == "3.1.2" diff --git a/tests/components/airgradient/test_number.py b/tests/components/airgradient/test_number.py index 0803c0d437f..7aabda8f81c 100644 --- a/tests/components/airgradient/test_number.py +++ b/tests/components/airgradient/test_number.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, diff --git a/tests/components/airgradient/test_select.py b/tests/components/airgradient/test_select.py index 61679a15c07..de4a7beaaa7 100644 --- a/tests/components/airgradient/test_select.py +++ b/tests/components/airgradient/test_select.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/airgradient/test_sensor.py b/tests/components/airgradient/test_sensor.py index c2e53ef4de2..e3fed70839a 100644 --- a/tests/components/airgradient/test_sensor.py +++ b/tests/components/airgradient/test_sensor.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er diff --git a/tests/components/airgradient/test_switch.py b/tests/components/airgradient/test_switch.py index 20a1cb7470b..a0cbdd17d75 100644 --- a/tests/components/airgradient/test_switch.py +++ b/tests/components/airgradient/test_switch.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, diff --git a/tests/components/airly/snapshots/test_diagnostics.ambr b/tests/components/airly/snapshots/test_diagnostics.ambr index c22e96a2082..1c760eaec52 100644 --- a/tests/components/airly/snapshots/test_diagnostics.ambr +++ b/tests/components/airly/snapshots/test_diagnostics.ambr @@ -9,6 +9,8 @@ 'name': 'Home', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'airly', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, @@ -17,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Home', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airnow/conftest.py b/tests/components/airnow/conftest.py index c5d23fa7289..84adf12806d 100644 --- a/tests/components/airnow/conftest.py +++ b/tests/components/airnow/conftest.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, patch import pytest -from homeassistant.components.airnow import DOMAIN +from homeassistant.components.airnow.const import DOMAIN from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonArrayType diff --git a/tests/components/airnow/snapshots/test_diagnostics.ambr b/tests/components/airnow/snapshots/test_diagnostics.ambr index c2004d759a9..73ba6a7123f 100644 --- a/tests/components/airnow/snapshots/test_diagnostics.ambr +++ b/tests/components/airnow/snapshots/test_diagnostics.ambr @@ -8,7 +8,7 @@ 'DateObserved': '2020-12-20', 'HourObserved': 15, 'Latitude': '**REDACTED**', - 'LocalTimeZoneInfo': 'PST', + 'LocalTimeZone': 'PST', 'Longitude': '**REDACTED**', 'O3': 0.048, 'PM10': 12, @@ -24,6 +24,8 @@ 'longitude': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'airnow', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, @@ -33,6 +35,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/airthings_ble/__init__.py b/tests/components/airthings_ble/__init__.py index a736fa979e9..add21b1067f 100644 --- a/tests/components/airthings_ble/__init__.py +++ b/tests/components/airthings_ble/__init__.py @@ -49,7 +49,7 @@ def patch_airthings_ble(return_value=AirthingsDevice, side_effect=None): def patch_airthings_device_update(): """Patch airthings-ble device.""" return patch( - "homeassistant.components.airthings_ble.AirthingsBluetoothDeviceData.update_device", + "homeassistant.components.airthings_ble.coordinator.AirthingsBluetoothDeviceData.update_device", return_value=WAVE_DEVICE_INFO, ) diff --git a/tests/components/airtouch5/test_cover.py b/tests/components/airtouch5/test_cover.py index 295535cd95d..57a344e8018 100644 --- a/tests/components/airtouch5/test_cover.py +++ b/tests/components/airtouch5/test_cover.py @@ -17,9 +17,9 @@ from homeassistant.components.cover import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_OPEN, + CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_CLOSED, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -118,26 +118,26 @@ async def test_cover_callbacks( await _call_zone_status_callback(0.7) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 70 # Fully open await _call_zone_status_callback(1) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 # Fully closed await _call_zone_status_callback(0.0) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 # Partly reopened await _call_zone_status_callback(0.3) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 30 diff --git a/tests/components/airvisual/snapshots/test_diagnostics.ambr b/tests/components/airvisual/snapshots/test_diagnostics.ambr index cb9d25b8790..0dbdef1d508 100644 --- a/tests/components/airvisual/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual/snapshots/test_diagnostics.ambr @@ -36,6 +36,8 @@ 'longitude': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'airvisual', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, @@ -45,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/airvisual/test_config_flow.py b/tests/components/airvisual/test_config_flow.py index b9643b17c07..632bdb72eb4 100644 --- a/tests/components/airvisual/test_config_flow.py +++ b/tests/components/airvisual/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.airvisual import ( INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SHOW_ON_MAP from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,6 +33,8 @@ from .conftest import ( TEST_STATE, ) +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -146,16 +148,10 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config_entry, setup_config_entry + hass: HomeAssistant, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config_entry.data - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airvisual/test_init.py b/tests/components/airvisual/test_init.py index 7fa9f4ca779..19dab3de210 100644 --- a/tests/components/airvisual/test_init.py +++ b/tests/components/airvisual/test_init.py @@ -11,7 +11,9 @@ from homeassistant.components.airvisual import ( INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, ) -from homeassistant.components.airvisual_pro import DOMAIN as AIRVISUAL_PRO_DOMAIN + +# pylint: disable-next=hass-component-root-import +from homeassistant.components.airvisual_pro.const import DOMAIN as AIRVISUAL_PRO_DOMAIN from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY, diff --git a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr index be709621e31..113db6e3b96 100644 --- a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr @@ -91,6 +91,8 @@ 'password': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'airvisual_pro', 'entry_id': '6a2b3770e53c28dc1eeb2515e906b0ce', 'minor_version': 1, @@ -99,6 +101,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'XXXXXXX', 'version': 1, diff --git a/tests/components/airvisual_pro/test_config_flow.py b/tests/components/airvisual_pro/test_config_flow.py index 803a335f52c..9298b8cf528 100644 --- a/tests/components/airvisual_pro/test_config_flow.py +++ b/tests/components/airvisual_pro/test_config_flow.py @@ -10,11 +10,13 @@ from pyairvisual.node import ( import pytest from homeassistant.components.airvisual_pro.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -98,22 +100,14 @@ async def test_step_import(hass: HomeAssistant, config, setup_airvisual_pro) -> async def test_reauth( hass: HomeAssistant, config, - config_entry, + config_entry: MockConfigEntry, connect_errors, connect_mock, pro, setup_airvisual_pro, ) -> None: """Test re-auth (including errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=config, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index 2adf50558e0..39668e3d19f 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -220,6 +220,45 @@ }), ]), }), + dict({ + 'data': list([ + dict({ + 'air_demand': 0, + 'coldStage': 0, + 'coldStages': 0, + 'coolmaxtemp': 30, + 'coolmintemp': 15, + 'coolsetpoint': 20, + 'errors': list([ + ]), + 'floor_demand': 0, + 'heatStage': 0, + 'heatStages': 0, + 'heatmaxtemp': 30, + 'heatmintemp': 15, + 'heatsetpoint': 20, + 'humidity': 0, + 'maxTemp': 30, + 'minTemp': 15, + 'mode': 6, + 'modes': list([ + 1, + 2, + 3, + 4, + 5, + 6, + ]), + 'name': 'Aux Heat', + 'on': 1, + 'roomTemp': 22, + 'setpoint': 20, + 'systemID': 4, + 'units': 0, + 'zoneID': 1, + }), + ]), + }), ]), }), 'version': dict({ @@ -238,6 +277,8 @@ 'port': 3000, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'airzone', 'entry_id': '6e7a0798c1734ba81d26ced0e690eaec', 'minor_version': 1, @@ -246,6 +287,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, @@ -267,8 +310,8 @@ 'temp-set': 45, 'temp-unit': 0, }), - 'num-systems': 3, - 'num-zones': 7, + 'num-systems': 4, + 'num-zones': 8, 'systems': dict({ '1': dict({ 'available': True, @@ -318,6 +361,23 @@ ]), 'problems': False, }), + '4': dict({ + 'available': True, + 'full-name': 'Airzone [4] System', + 'id': 4, + 'master-system-zone': '4:1', + 'master-zone': 1, + 'mode': 6, + 'modes': list([ + 1, + 2, + 3, + 4, + 5, + 6, + ]), + 'problems': False, + }), }), 'version': '1.62', 'webserver': dict({ @@ -681,6 +741,46 @@ 'temp-step': 1.0, 'temp-unit': 1, }), + '4:1': dict({ + 'absolute-temp-max': 30.0, + 'absolute-temp-min': 15.0, + 'action': 5, + 'air-demand': False, + 'available': True, + 'cold-stage': 0, + 'cool-temp-max': 30.0, + 'cool-temp-min': 15.0, + 'cool-temp-set': 20.0, + 'demand': False, + 'double-set-point': False, + 'floor-demand': False, + 'full-name': 'Airzone [4:1] Aux Heat', + 'heat-stage': 0, + 'heat-temp-max': 30.0, + 'heat-temp-min': 15.0, + 'heat-temp-set': 20.0, + 'id': 1, + 'master': True, + 'mode': 6, + 'modes': list([ + 1, + 2, + 3, + 4, + 5, + 6, + ]), + 'name': 'Aux Heat', + 'on': True, + 'problems': False, + 'system': 4, + 'temp': 22.0, + 'temp-max': 30.0, + 'temp-min': 15.0, + 'temp-set': 20.0, + 'temp-step': 0.5, + 'temp-unit': 0, + }), }), }), }) diff --git a/tests/components/airzone/test_climate.py b/tests/components/airzone/test_climate.py index 0f23c151e0e..12a73a6a268 100644 --- a/tests/components/airzone/test_climate.py +++ b/tests/components/airzone/test_climate.py @@ -225,6 +225,23 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0 assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 22.8 + state = hass.states.get("climate.aux_heat") + assert state.state == HVACMode.HEAT + assert state.attributes.get(ATTR_CURRENT_HUMIDITY) is None + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 22 + assert state.attributes.get(ATTR_HVAC_ACTION) == HVACAction.IDLE + assert state.attributes.get(ATTR_HVAC_MODES) == [ + HVACMode.OFF, + HVACMode.COOL, + HVACMode.HEAT, + HVACMode.FAN_ONLY, + HVACMode.DRY, + ] + assert state.attributes.get(ATTR_MAX_TEMP) == 30 + assert state.attributes.get(ATTR_MIN_TEMP) == 15 + assert state.attributes.get(ATTR_TARGET_TEMP_STEP) == API_TEMPERATURE_STEP + assert state.attributes.get(ATTR_TEMPERATURE) == 20.0 + HVAC_MOCK_CHANGED = copy.deepcopy(HVAC_MOCK) HVAC_MOCK_CHANGED[API_SYSTEMS][0][API_DATA][0][API_MAX_TEMP] = 25 HVAC_MOCK_CHANGED[API_SYSTEMS][0][API_DATA][0][API_MIN_TEMP] = 10 diff --git a/tests/components/airzone/test_switch.py b/tests/components/airzone/test_switch.py new file mode 100644 index 00000000000..f761b53ed4c --- /dev/null +++ b/tests/components/airzone/test_switch.py @@ -0,0 +1,102 @@ +"""The switch tests for the Airzone platform.""" + +from unittest.mock import patch + +from aioairzone.const import API_DATA, API_ON, API_SYSTEM_ID, API_ZONE_ID + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant + +from .util import async_init_integration + + +async def test_airzone_create_switches(hass: HomeAssistant) -> None: + """Test creation of switches.""" + + await async_init_integration(hass) + + state = hass.states.get("switch.despacho") + assert state.state == STATE_OFF + + state = hass.states.get("switch.dorm_1") + assert state.state == STATE_ON + + state = hass.states.get("switch.dorm_2") + assert state.state == STATE_OFF + + state = hass.states.get("switch.dorm_ppal") + assert state.state == STATE_ON + + state = hass.states.get("switch.salon") + assert state.state == STATE_OFF + + +async def test_airzone_switch_off(hass: HomeAssistant) -> None: + """Test switch off.""" + + await async_init_integration(hass) + + put_hvac_off = { + API_DATA: [ + { + API_SYSTEM_ID: 1, + API_ZONE_ID: 3, + API_ON: False, + } + ] + } + + with patch( + "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", + return_value=put_hvac_off, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.dorm_1", + }, + blocking=True, + ) + + state = hass.states.get("switch.dorm_1") + assert state.state == STATE_OFF + + +async def test_airzone_switch_on(hass: HomeAssistant) -> None: + """Test switch on.""" + + await async_init_integration(hass) + + put_hvac_on = { + API_DATA: [ + { + API_SYSTEM_ID: 1, + API_ZONE_ID: 5, + API_ON: True, + } + ] + } + + with patch( + "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", + return_value=put_hvac_on, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.dorm_2", + }, + blocking=True, + ) + + state = hass.states.get("switch.dorm_2") + assert state.state == STATE_ON diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index 2cdb7a9c6f9..278663b7a97 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -272,6 +272,37 @@ HVAC_MOCK = { }, ] }, + { + API_DATA: [ + { + API_SYSTEM_ID: 4, + API_ZONE_ID: 1, + API_NAME: "Aux Heat", + API_ON: 1, + API_COOL_SET_POINT: 20, + API_COOL_MAX_TEMP: 30, + API_COOL_MIN_TEMP: 15, + API_HEAT_SET_POINT: 20, + API_HEAT_MAX_TEMP: 30, + API_HEAT_MIN_TEMP: 15, + API_MAX_TEMP: 30, + API_MIN_TEMP: 15, + API_SET_POINT: 20, + API_ROOM_TEMP: 22, + API_MODES: [1, 2, 3, 4, 5, 6], + API_MODE: 6, + API_COLD_STAGES: 0, + API_COLD_STAGE: 0, + API_HEAT_STAGES: 0, + API_HEAT_STAGE: 0, + API_HUMIDITY: 0, + API_UNITS: 0, + API_ERRORS: [], + API_AIR_DEMAND: 0, + API_FLOOR_DEMAND: 0, + }, + ] + }, ] } diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 26a606bde42..4bd7bfaccdd 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -91,6 +91,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'airzone_cloud', 'entry_id': 'd186e31edb46d64d14b9b2f11f1ebd9f', 'minor_version': 1, @@ -99,6 +101,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'installation1', 'version': 1, @@ -134,6 +138,7 @@ }), 'temperature': 21.0, 'temperature-setpoint': 22.0, + 'temperature-setpoint-auto-air': 22.0, 'temperature-setpoint-cool-air': 22.0, 'temperature-setpoint-hot-air': 22.0, 'temperature-setpoint-max': 30.0, @@ -154,6 +159,9 @@ 'available': True, 'double-set-point': True, 'id': 'aidoo_pro', + 'indoor-exchanger-temperature': 26.0, + 'indoor-return-temperature': 26.0, + 'indoor-work-temperature': 25.0, 'installation': 'installation1', 'is-connected': True, 'mode': 2, @@ -166,6 +174,12 @@ 5, ]), 'name': 'Bron Pro', + 'outdoor-condenser-pressure': 150.0, + 'outdoor-discharge-temperature': 121.0, + 'outdoor-electric-current': 3.0, + 'outdoor-evaporator-pressure': 20.0, + 'outdoor-exchanger-temperature': -25.0, + 'outdoor-temperature': 29.0, 'power': True, 'problems': False, 'speed': 3, @@ -180,6 +194,7 @@ }), 'temperature': 20.0, 'temperature-setpoint': 22.0, + 'temperature-setpoint-auto-air': 22.0, 'temperature-setpoint-cool-air': 22.0, 'temperature-setpoint-hot-air': 18.0, 'temperature-setpoint-max': 30.0, @@ -286,6 +301,7 @@ 'dhw1': dict({ 'active': False, 'available': True, + 'double-set-point': False, 'id': 'dhw1', 'installation': 'installation1', 'is-connected': True, @@ -368,6 +384,7 @@ 'aq-present': True, 'aq-status': 'good', 'available': True, + 'double-set-point': False, 'errors': list([ dict({ '_id': 'error-id', diff --git a/tests/components/airzone_cloud/test_climate.py b/tests/components/airzone_cloud/test_climate.py index 37c5ff8e1af..2b587680a57 100644 --- a/tests/components/airzone_cloud/test_climate.py +++ b/tests/components/airzone_cloud/test_climate.py @@ -97,8 +97,7 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: assert state.attributes[ATTR_MAX_TEMP] == 30 assert state.attributes[ATTR_MIN_TEMP] == 15 assert state.attributes[ATTR_TARGET_TEMP_STEP] == API_DEFAULT_TEMP_STEP - assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 22.0 - assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 18.0 + assert state.attributes.get(ATTR_TEMPERATURE) == 22.0 # Groups state = hass.states.get("climate.group") @@ -589,6 +588,7 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None: SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: "climate.bron_pro", + ATTR_HVAC_MODE: HVACMode.HEAT_COOL, ATTR_TARGET_TEMP_HIGH: 25.0, ATTR_TARGET_TEMP_LOW: 20.0, }, @@ -596,7 +596,7 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None: ) state = hass.states.get("climate.bron_pro") - assert state.state == HVACMode.HEAT + assert state.state == HVACMode.HEAT_COOL assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0 assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 20.0 diff --git a/tests/components/airzone_cloud/test_init.py b/tests/components/airzone_cloud/test_init.py index b5b4bcebaa8..6cab0be6e7c 100644 --- a/tests/components/airzone_cloud/test_init.py +++ b/tests/components/airzone_cloud/test_init.py @@ -2,6 +2,8 @@ from unittest.mock import patch +from aioairzone_cloud.exceptions import AirzoneTimeout + from homeassistant.components.airzone_cloud.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -50,3 +52,20 @@ async def test_unload_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_init_api_timeout(hass: HomeAssistant) -> None: + """Test API timeouts when loading the Airzone Cloud integration.""" + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.login", + side_effect=AirzoneTimeout, + ): + config_entry = MockConfigEntry( + data=CONFIG, + domain=DOMAIN, + unique_id="airzone_cloud_unique_id", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) is False diff --git a/tests/components/airzone_cloud/test_select.py b/tests/components/airzone_cloud/test_select.py index 5a6b6104468..d0993365083 100644 --- a/tests/components/airzone_cloud/test_select.py +++ b/tests/components/airzone_cloud/test_select.py @@ -4,7 +4,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -22,9 +22,21 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dormitorio_air_quality_mode") assert state.state == "auto" + state = hass.states.get("select.dormitorio_mode") + assert state is None + state = hass.states.get("select.salon_air_quality_mode") assert state.state == "auto" + state = hass.states.get("select.salon_mode") + assert state.state == "cool" + assert state.attributes.get(ATTR_OPTIONS) == [ + "cool", + "dry", + "fan", + "heat", + ] + async def test_airzone_select_air_quality_mode(hass: HomeAssistant) -> None: """Test select Air Quality mode.""" @@ -58,3 +70,37 @@ async def test_airzone_select_air_quality_mode(hass: HomeAssistant) -> None: state = hass.states.get("select.dormitorio_air_quality_mode") assert state.state == "off" + + +async def test_airzone_select_mode(hass: HomeAssistant) -> None: + """Test select HVAC mode.""" + + await async_init_integration(hass) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "Invalid", + }, + blocking=True, + ) + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", + return_value=None, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "heat", + }, + blocking=True, + ) + + state = hass.states.get("select.salon_mode") + assert state.state == "heat" diff --git a/tests/components/airzone_cloud/test_sensor.py b/tests/components/airzone_cloud/test_sensor.py index cf291ec23a6..672e10adedb 100644 --- a/tests/components/airzone_cloud/test_sensor.py +++ b/tests/components/airzone_cloud/test_sensor.py @@ -20,6 +20,33 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.bron_pro_temperature") assert state.state == "20.0" + state = hass.states.get("sensor.bron_pro_indoor_exchanger_temperature") + assert state.state == "26.0" + + state = hass.states.get("sensor.bron_pro_indoor_return_temperature") + assert state.state == "26.0" + + state = hass.states.get("sensor.bron_pro_indoor_working_temperature") + assert state.state == "25.0" + + state = hass.states.get("sensor.bron_pro_outdoor_condenser_pressure") + assert state.state == "150.0" + + state = hass.states.get("sensor.bron_pro_outdoor_discharge_temperature") + assert state.state == "121.0" + + state = hass.states.get("sensor.bron_pro_outdoor_electric_current") + assert state.state == "3.0" + + state = hass.states.get("sensor.bron_pro_outdoor_evaporator_pressure") + assert state.state == "20.0" + + state = hass.states.get("sensor.bron_pro_outdoor_exchanger_temperature") + assert state.state == "-25.0" + + state = hass.states.get("sensor.bron_pro_outdoor_temperature") + assert state.state == "29.0" + # WebServers state = hass.states.get("sensor.webserver_11_22_33_44_55_66_cpu_usage") assert state.state == "32" diff --git a/tests/components/airzone_cloud/test_switch.py b/tests/components/airzone_cloud/test_switch.py new file mode 100644 index 00000000000..5ee65f11fa8 --- /dev/null +++ b/tests/components/airzone_cloud/test_switch.py @@ -0,0 +1,71 @@ +"""The switch tests for the Airzone Cloud platform.""" + +from unittest.mock import patch + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant + +from .util import async_init_integration + + +async def test_airzone_create_switches(hass: HomeAssistant) -> None: + """Test creation of switches.""" + + await async_init_integration(hass) + + state = hass.states.get("switch.dormitorio") + assert state.state == STATE_OFF + + state = hass.states.get("switch.salon") + assert state.state == STATE_ON + + +async def test_airzone_switch_off(hass: HomeAssistant) -> None: + """Test switch off.""" + + await async_init_integration(hass) + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", + return_value=None, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.salon", + }, + blocking=True, + ) + + state = hass.states.get("switch.salon") + assert state.state == STATE_OFF + + +async def test_airzone_switch_on(hass: HomeAssistant) -> None: + """Test switch on.""" + + await async_init_integration(hass) + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", + return_value=None, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.dormitorio", + }, + blocking=True, + ) + + state = hass.states.get("switch.dormitorio") + assert state.state == STATE_ON diff --git a/tests/components/airzone_cloud/util.py b/tests/components/airzone_cloud/util.py index fb538ea7c8e..52b0ae0bec3 100644 --- a/tests/components/airzone_cloud/util.py +++ b/tests/components/airzone_cloud/util.py @@ -24,12 +24,17 @@ from aioairzone_cloud.const import ( API_CELSIUS, API_CONFIG, API_CONNECTION_DATE, + API_CONSUMPTION_UE, API_CPU_WS, API_DEVICE_ID, API_DEVICES, + API_DISCH_COMP_TEMP_UE, API_DISCONNECTION_DATE, API_DOUBLE_SET_POINT, API_ERRORS, + API_EXCH_HEAT_TEMP_IU, + API_EXCH_HEAT_TEMP_UE, + API_EXT_TEMP, API_FAH, API_FREE, API_FREE_MEM, @@ -46,6 +51,8 @@ from aioairzone_cloud.const import ( API_MODE_AVAIL, API_NAME, API_OLD_ID, + API_PC_UE, + API_PE_UE, API_POWER, API_POWERFUL_MODE, API_RAD_ACTIVE, @@ -69,6 +76,7 @@ from aioairzone_cloud.const import ( API_RANGE_SP_MIN_HOT_AIR, API_RANGE_SP_MIN_STOP_AIR, API_RANGE_SP_MIN_VENT_AIR, + API_RETURN_TEMP, API_SETPOINT, API_SP_AIR_AUTO, API_SP_AIR_COOL, @@ -94,6 +102,7 @@ from aioairzone_cloud.const import ( API_THERMOSTAT_TYPE, API_TYPE, API_WARNINGS, + API_WORK_TEMP, API_WS_CONNECTED, API_WS_FW, API_WS_ID, @@ -266,6 +275,18 @@ GET_WEBSERVER_MOCK_AIDOO_PRO = { def mock_get_device_config(device: Device) -> dict[str, Any]: """Mock API device config.""" + if device.get_id() == "aidoo_pro": + return { + API_CONSUMPTION_UE: 3, + API_DISCH_COMP_TEMP_UE: {API_CELSIUS: 121, API_FAH: -250}, + API_EXCH_HEAT_TEMP_IU: {API_CELSIUS: 26, API_FAH: 79}, + API_EXCH_HEAT_TEMP_UE: {API_CELSIUS: -25, API_FAH: -13}, + API_EXT_TEMP: {API_CELSIUS: 29, API_FAH: 84}, + API_PC_UE: 0.15, + API_PE_UE: 0.02, + API_RETURN_TEMP: {API_CELSIUS: 26, API_FAH: 79}, + API_WORK_TEMP: {API_CELSIUS: 25, API_FAH: 77}, + } if device.get_id() == "system1": return { API_SYSTEM_FW: "3.35", diff --git a/tests/components/alarm_control_panel/__init__.py b/tests/components/alarm_control_panel/__init__.py index 1ef1161edd0..1f43c567844 100644 --- a/tests/components/alarm_control_panel/__init__.py +++ b/tests/components/alarm_control_panel/__init__.py @@ -1 +1,27 @@ """The tests for Alarm control panel platforms.""" + +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + + +async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [ALARM_CONTROL_PANEL_DOMAIN] + ) + return True + + +async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Unload test config emntry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.ALARM_CONTROL_PANEL] + ) diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index 36e9918f54c..8a631eeff36 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -8,6 +8,7 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -20,12 +21,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant @@ -145,31 +140,31 @@ class MockAlarm(MockEntity, AlarmControlPanelEntity): def alarm_arm_away(self, code=None): """Send arm away command.""" - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY self.schedule_update_ha_state() def alarm_arm_home(self, code=None): """Send arm home command.""" - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME self.schedule_update_ha_state() def alarm_arm_night(self, code=None): """Send arm night command.""" - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT self.schedule_update_ha_state() def alarm_arm_vacation(self, code=None): """Send arm night command.""" - self._attr_state = STATE_ALARM_ARMED_VACATION + self._attr_alarm_state = AlarmControlPanelState.ARMED_VACATION self.schedule_update_ha_state() def alarm_disarm(self, code=None): """Send disarm command.""" if code == "1234": - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED self.schedule_update_ha_state() def alarm_trigger(self, code=None): """Send alarm trigger command.""" - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED self.schedule_update_ha_state() diff --git a/tests/components/alarm_control_panel/conftest.py b/tests/components/alarm_control_panel/conftest.py index 3e82b935493..ddf67b27860 100644 --- a/tests/components/alarm_control_panel/conftest.py +++ b/tests/components/alarm_control_panel/conftest.py @@ -1,7 +1,7 @@ """Fixturs for Alarm Control Panel tests.""" -from collections.abc import Generator -from unittest.mock import MagicMock +from collections.abc import AsyncGenerator, Generator +from unittest.mock import MagicMock, patch import pytest @@ -13,7 +13,7 @@ from homeassistant.components.alarm_control_panel import ( from homeassistant.components.alarm_control_panel.const import CodeFormat from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, frame from homeassistant.helpers.entity_platform import AddEntitiesCallback from .common import MockAlarm @@ -107,6 +107,22 @@ class MockFlow(ConfigFlow): """Test flow.""" +@pytest.fixture(name="mock_as_custom_component") +async def mock_frame(hass: HomeAssistant) -> AsyncGenerator[None]: + """Mock frame.""" + with patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=frame.IntegrationFrame( + custom_integration=True, + integration="alarm_control_panel", + module="test_init.py", + relative_filename="test_init.py", + frame=frame.get_current_frame(), + ), + ): + yield + + @pytest.fixture(autouse=True) def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: """Mock config flow.""" diff --git a/tests/components/alarm_control_panel/test_device_action.py b/tests/components/alarm_control_panel/test_device_action.py index 9c5aaffd733..a7335017691 100644 --- a/tests/components/alarm_control_panel/test_device_action.py +++ b/tests/components/alarm_control_panel/test_device_action.py @@ -7,19 +7,10 @@ from homeassistant.components import automation from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_PLATFORM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - STATE_UNKNOWN, - EntityCategory, -) +from homeassistant.const import CONF_PLATFORM, STATE_UNKNOWN, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -541,27 +532,44 @@ async def test_action( hass.bus.async_fire("test_event_arm_away") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_AWAY + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_AWAY + ) hass.bus.async_fire("test_event_arm_home") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_HOME + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_HOME + ) hass.bus.async_fire("test_event_arm_vacation") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_VACATION + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_VACATION + ) hass.bus.async_fire("test_event_arm_night") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_NIGHT + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_NIGHT + ) hass.bus.async_fire("test_event_disarm") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_DISARMED + assert ( + hass.states.get(entity_entry.entity_id).state == AlarmControlPanelState.DISARMED + ) hass.bus.async_fire("test_event_trigger") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_TRIGGERED + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.TRIGGERED + ) async def test_action_legacy( @@ -615,4 +623,7 @@ async def test_action_legacy( hass.bus.async_fire("test_event_arm_away") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_AWAY + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_AWAY + ) diff --git a/tests/components/alarm_control_panel/test_device_condition.py b/tests/components/alarm_control_panel/test_device_condition.py index da1d77f50a3..37cbc466e6d 100644 --- a/tests/components/alarm_control_panel/test_device_condition.py +++ b/tests/components/alarm_control_panel/test_device_condition.py @@ -7,18 +7,10 @@ from homeassistant.components import automation from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - EntityCategory, -) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -354,7 +346,7 @@ async def test_if_state( ] }, ) - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -366,7 +358,7 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_triggered - event - test_event1" - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -378,7 +370,7 @@ async def test_if_state( assert len(service_calls) == 2 assert service_calls[1].data["some"] == "is_disarmed - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_HOME) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -390,7 +382,7 @@ async def test_if_state( assert len(service_calls) == 3 assert service_calls[2].data["some"] == "is_armed_home - event - test_event3" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_AWAY) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -402,7 +394,7 @@ async def test_if_state( assert len(service_calls) == 4 assert service_calls[3].data["some"] == "is_armed_away - event - test_event4" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_NIGHT) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -414,7 +406,7 @@ async def test_if_state( assert len(service_calls) == 5 assert service_calls[4].data["some"] == "is_armed_night - event - test_event5" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_VACATION) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_VACATION) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -426,7 +418,7 @@ async def test_if_state( assert len(service_calls) == 6 assert service_calls[5].data["some"] == "is_armed_vacation - event - test_event6" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_CUSTOM_BYPASS) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -488,7 +480,7 @@ async def test_if_state_legacy( ] }, ) - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) hass.bus.async_fire("test_event1") await hass.async_block_till_done() assert len(service_calls) == 1 diff --git a/tests/components/alarm_control_panel/test_device_trigger.py b/tests/components/alarm_control_panel/test_device_trigger.py index 46eba314dc1..17a301ccdf1 100644 --- a/tests/components/alarm_control_panel/test_device_trigger.py +++ b/tests/components/alarm_control_panel/test_device_trigger.py @@ -9,18 +9,10 @@ from homeassistant.components import automation from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, - EntityCategory, -) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -256,7 +248,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_ALARM_PENDING) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.PENDING) assert await async_setup_component( hass, @@ -400,7 +392,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is triggered. - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -409,7 +401,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is disarmed. - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -418,7 +410,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed home. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_HOME) await hass.async_block_till_done() assert len(service_calls) == 3 assert ( @@ -427,7 +419,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed away. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert len(service_calls) == 4 assert ( @@ -436,7 +428,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed night. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_NIGHT) await hass.async_block_till_done() assert len(service_calls) == 5 assert ( @@ -445,7 +437,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed vacation. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_VACATION) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_VACATION) await hass.async_block_till_done() assert len(service_calls) == 6 assert ( @@ -471,7 +463,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) assert await async_setup_component( hass, @@ -506,7 +498,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -536,7 +528,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) assert await async_setup_component( hass, @@ -570,7 +562,7 @@ async def test_if_fires_on_state_change_legacy( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 06724978ce3..168d7ecc269 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -1,12 +1,13 @@ """Test for the alarm control panel const module.""" -from types import ModuleType from typing import Any +from unittest.mock import patch import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel.const import ( +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, CodeFormat, ) @@ -22,12 +23,18 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, frame from homeassistant.helpers.typing import UNDEFINED, UndefinedType +from . import help_async_setup_entry_init, help_async_unload_entry from .conftest import MockAlarmControlPanel -from tests.common import help_test_all, import_and_test_deprecated_constant_enum +from tests.common import ( + MockConfigEntry, + MockModule, + mock_integration, + setup_test_component_platform, +) async def help_test_async_alarm_control_panel_service( @@ -47,76 +54,6 @@ async def help_test_async_alarm_control_panel_service( await hass.async_block_till_done() -@pytest.mark.parametrize( - "module", - [alarm_control_panel, alarm_control_panel.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - "code_format", - list(alarm_control_panel.CodeFormat), -) -@pytest.mark.parametrize( - "module", - [alarm_control_panel, alarm_control_panel.const], -) -def test_deprecated_constant_code_format( - caplog: pytest.LogCaptureFixture, - code_format: alarm_control_panel.CodeFormat, - module: ModuleType, -) -> None: - """Test deprecated format constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, code_format, "FORMAT_", "2025.1" - ) - - -@pytest.mark.parametrize( - "entity_feature", - list(alarm_control_panel.AlarmControlPanelEntityFeature), -) -@pytest.mark.parametrize( - "module", - [alarm_control_panel, alarm_control_panel.const], -) -def test_deprecated_support_alarm_constants( - caplog: pytest.LogCaptureFixture, - entity_feature: alarm_control_panel.AlarmControlPanelEntityFeature, - module: ModuleType, -) -> None: - """Test deprecated support alarm constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, entity_feature, "SUPPORT_ALARM_", "2025.1" - ) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockAlarmControlPanelEntity(alarm_control_panel.AlarmControlPanelEntity): - _attr_supported_features = 1 - - entity = MockAlarmControlPanelEntity() - assert ( - entity.supported_features - is alarm_control_panel.AlarmControlPanelEntityFeature(1) - ) - assert "MockAlarmControlPanelEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "AlarmControlPanelEntityFeature.ARM_HOME" in caplog.text - caplog.clear() - assert ( - entity.supported_features - is alarm_control_panel.AlarmControlPanelEntityFeature(1) - ) - assert "is using deprecated supported features values" not in caplog.text - - async def test_set_mock_alarm_control_panel_options( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -283,3 +220,228 @@ async def test_alarm_control_panel_with_default_code( hass, mock_alarm_control_panel_entity.entity_id, SERVICE_ALARM_DISARM ) mock_alarm_control_panel_entity.calls_disarm.assert_called_with("1234") + + +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_alarm_control_panel_not_log_deprecated_state_warning( + hass: HomeAssistant, + mock_alarm_control_panel_entity: MockAlarmControlPanel, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test correctly using alarm_state doesn't log issue or raise repair.""" + state = hass.states.get(mock_alarm_control_panel_entity.entity_id) + assert state is not None + assert ( + "the 'alarm_state' property and return its state using the AlarmControlPanelState enum" + not in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop( + hass: HomeAssistant, + code_format: CodeFormat | None, + supported_features: AlarmControlPanelEntityFeature, + code_arm_required: bool, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using state property does log issue and raise repair.""" + + class MockLegacyAlarmControlPanel(MockAlarmControlPanel): + """Mocked alarm control entity.""" + + def __init__( + self, + supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( + 0 + ), + code_format: CodeFormat | None = None, + code_arm_required: bool = True, + ) -> None: + """Initialize the alarm control.""" + super().__init__(supported_features, code_format, code_arm_required) + + @property + def state(self) -> str: + """Return the state of the entity.""" + return "disarmed" + + entity = MockLegacyAlarmControlPanel( + supported_features=supported_features, + code_format=code_format, + code_arm_required=code_arm_required, + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + built_in=False, + ) + setup_test_component_platform( + hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state" + " directly. Entity None (.MockLegacyAlarmControlPanel'>) should implement" + " the 'alarm_state' property and return its state using the AlarmControlPanelState" + " enum at test_init.py, line 123: yield. This will stop working in Home Assistant" + " 2025.11, please create a bug report at" in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state_attr( + hass: HomeAssistant, + code_format: CodeFormat | None, + supported_features: AlarmControlPanelEntityFeature, + code_arm_required: bool, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using _attr_state attribute does log issue and raise repair.""" + + class MockLegacyAlarmControlPanel(MockAlarmControlPanel): + """Mocked alarm control entity.""" + + def __init__( + self, + supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( + 0 + ), + code_format: CodeFormat | None = None, + code_arm_required: bool = True, + ) -> None: + """Initialize the alarm control.""" + super().__init__(supported_features, code_format, code_arm_required) + + def alarm_disarm(self, code: str | None = None) -> None: + """Mock alarm disarm calls.""" + self._attr_state = "disarmed" + + entity = MockLegacyAlarmControlPanel( + supported_features=supported_features, + code_format=code_format, + code_arm_required=code_arm_required, + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform( + hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state directly." + not in caplog.text + ) + + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) + + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state directly." + " Entity alarm_control_panel.test_alarm_control_panel" + " (.MockLegacyAlarmControlPanel'>) should implement the 'alarm_state' property" + " and return its state using the AlarmControlPanelState enum at test_init.py, line 123:" + " yield. This will stop working in Home Assistant 2025.11," + " please create a bug report at" in caplog.text + ) + caplog.clear() + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) + # Test we only log once + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state directly." + not in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_alarm_control_panel_deprecated_state_does_not_break_state( + hass: HomeAssistant, + code_format: CodeFormat | None, + supported_features: AlarmControlPanelEntityFeature, + code_arm_required: bool, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test using _attr_state attribute does not break state.""" + + class MockLegacyAlarmControlPanel(MockAlarmControlPanel): + """Mocked alarm control entity.""" + + def __init__( + self, + supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( + 0 + ), + code_format: CodeFormat | None = None, + code_arm_required: bool = True, + ) -> None: + """Initialize the alarm control.""" + self._attr_state = "armed_away" + super().__init__(supported_features, code_format, code_arm_required) + + def alarm_disarm(self, code: str | None = None) -> None: + """Mock alarm disarm calls.""" + self._attr_state = "disarmed" + + entity = MockLegacyAlarmControlPanel( + supported_features=supported_features, + code_format=code_format, + code_arm_required=code_arm_required, + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform( + hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + assert state.state == "armed_away" + + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) + + state = hass.states.get(entity.entity_id) + assert state is not None + assert state.state == "disarmed" diff --git a/tests/components/alarm_control_panel/test_reproduce_state.py b/tests/components/alarm_control_panel/test_reproduce_state.py index c7984b0793e..fcb4fdee36e 100644 --- a/tests/components/alarm_control_panel/test_reproduce_state.py +++ b/tests/components/alarm_control_panel/test_reproduce_state.py @@ -2,6 +2,7 @@ import pytest +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_CUSTOM_BYPASS, @@ -10,13 +11,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -29,27 +23,37 @@ async def test_reproducing_states( ) -> None: """Test reproducing Alarm control panel states.""" hass.states.async_set( - "alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY, {} - ) - hass.states.async_set( - "alarm_control_panel.entity_armed_custom_bypass", - STATE_ALARM_ARMED_CUSTOM_BYPASS, + "alarm_control_panel.entity_armed_away", + AlarmControlPanelState.ARMED_AWAY, {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME, {} + "alarm_control_panel.entity_armed_custom_bypass", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT, {} + "alarm_control_panel.entity_armed_home", + AlarmControlPanelState.ARMED_HOME, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION, {} + "alarm_control_panel.entity_armed_night", + AlarmControlPanelState.ARMED_NIGHT, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED, {} + "alarm_control_panel.entity_armed_vacation", + AlarmControlPanelState.ARMED_VACATION, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED, {} + "alarm_control_panel.entity_disarmed", AlarmControlPanelState.DISARMED, {} + ) + hass.states.async_set( + "alarm_control_panel.entity_triggered", + AlarmControlPanelState.TRIGGERED, + {}, ) arm_away_calls = async_mock_service( @@ -76,18 +80,34 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY), + State( + "alarm_control_panel.entity_armed_away", + AlarmControlPanelState.ARMED_AWAY, + ), State( "alarm_control_panel.entity_armed_custom_bypass", - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, ), - State("alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME), - State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT), State( - "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION + "alarm_control_panel.entity_armed_home", + AlarmControlPanelState.ARMED_HOME, + ), + State( + "alarm_control_panel.entity_armed_night", + AlarmControlPanelState.ARMED_NIGHT, + ), + State( + "alarm_control_panel.entity_armed_vacation", + AlarmControlPanelState.ARMED_VACATION, + ), + State( + "alarm_control_panel.entity_disarmed", + AlarmControlPanelState.DISARMED, + ), + State( + "alarm_control_panel.entity_triggered", + AlarmControlPanelState.TRIGGERED, ), - State("alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED), - State("alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED), ], ) @@ -117,17 +137,34 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("alarm_control_panel.entity_armed_away", STATE_ALARM_TRIGGERED), State( - "alarm_control_panel.entity_armed_custom_bypass", STATE_ALARM_ARMED_AWAY + "alarm_control_panel.entity_armed_away", + AlarmControlPanelState.TRIGGERED, ), State( - "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_CUSTOM_BYPASS + "alarm_control_panel.entity_armed_custom_bypass", + AlarmControlPanelState.ARMED_AWAY, + ), + State( + "alarm_control_panel.entity_armed_home", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + State( + "alarm_control_panel.entity_armed_night", + AlarmControlPanelState.ARMED_HOME, + ), + State( + "alarm_control_panel.entity_armed_vacation", + AlarmControlPanelState.ARMED_NIGHT, + ), + State( + "alarm_control_panel.entity_disarmed", + AlarmControlPanelState.ARMED_VACATION, + ), + State( + "alarm_control_panel.entity_triggered", + AlarmControlPanelState.DISARMED, ), - State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_HOME), - State("alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_NIGHT), - State("alarm_control_panel.entity_disarmed", STATE_ALARM_ARMED_VACATION), - State("alarm_control_panel.entity_triggered", STATE_ALARM_DISARMED), # Should not raise State("alarm_control_panel.non_existing", "on"), ], diff --git a/tests/components/alert/test_init.py b/tests/components/alert/test_init.py index 31236c84f34..263fb69c883 100644 --- a/tests/components/alert/test_init.py +++ b/tests/components/alert/test_init.py @@ -337,7 +337,7 @@ async def test_skipfirst(hass: HomeAssistant, mock_notifier: list[ServiceCall]) async def test_done_message_state_tracker_reset_on_cancel(hass: HomeAssistant) -> None: """Test that the done message is reset when canceled.""" - entity = alert.Alert(hass, *TEST_NOACK) + entity = alert.AlertEntity(hass, *TEST_NOACK) entity._cancel = lambda *args: None assert entity._send_done_message is False entity._send_done_message = True diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index b56d8054d7b..b10a93df0c9 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -5,13 +5,14 @@ from unittest.mock import patch import pytest +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.alexa import smart_home from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ClimateEntityFeature, HVACMode, ) -from homeassistant.components.lock import STATE_JAMMED, STATE_LOCKING, STATE_UNLOCKING +from homeassistant.components.lock import LockState from homeassistant.components.media_player import MediaPlayerEntityFeature from homeassistant.components.valve import ValveEntityFeature from homeassistant.components.water_heater import ( @@ -23,16 +24,9 @@ from homeassistant.components.water_heater import ( ) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_LOCKED, STATE_OFF, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -165,11 +159,11 @@ async def test_api_set_color_temperature(hass: HomeAssistant) -> None: assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["kelvin"] == 7500 + assert call_light[0].data["color_temp_kelvin"] == 7500 assert msg["header"]["name"] == "Response" -@pytest.mark.parametrize(("result", "initial"), [(383, "333"), (500, "500")]) +@pytest.mark.parametrize(("result", "initial"), [(2500, "3000"), (2000, "2000")]) async def test_api_decrease_color_temp( hass: HomeAssistant, result: int, initial: str ) -> None: @@ -182,7 +176,11 @@ async def test_api_decrease_color_temp( hass.states.async_set( "light.test", "off", - {"friendly_name": "Test light", "color_temp": initial, "max_mireds": 500}, + { + "friendly_name": "Test light", + "color_temp_kelvin": initial, + "min_color_temp_kelvin": 2000, + }, ) call_light = async_mock_service(hass, "light", "turn_on") @@ -195,11 +193,11 @@ async def test_api_decrease_color_temp( assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["color_temp"] == result + assert call_light[0].data["color_temp_kelvin"] == result assert msg["header"]["name"] == "Response" -@pytest.mark.parametrize(("result", "initial"), [(283, "333"), (142, "142")]) +@pytest.mark.parametrize(("result", "initial"), [(3500, "3000"), (7000, "7000")]) async def test_api_increase_color_temp( hass: HomeAssistant, result: int, initial: str ) -> None: @@ -212,7 +210,11 @@ async def test_api_increase_color_temp( hass.states.async_set( "light.test", "off", - {"friendly_name": "Test light", "color_temp": initial, "min_mireds": 142}, + { + "friendly_name": "Test light", + "color_temp_kelvin": initial, + "max_color_temp_kelvin": 7000, + }, ) call_light = async_mock_service(hass, "light", "turn_on") @@ -225,7 +227,7 @@ async def test_api_increase_color_temp( assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["color_temp"] == result + assert call_light[0].data["color_temp_kelvin"] == result assert msg["header"]["name"] == "Response" @@ -392,11 +394,11 @@ async def test_api_remote_set_power_state( async def test_report_lock_state(hass: HomeAssistant) -> None: """Test LockController implements lockState property.""" - hass.states.async_set("lock.locked", STATE_LOCKED, {}) - hass.states.async_set("lock.unlocked", STATE_UNLOCKED, {}) - hass.states.async_set("lock.unlocking", STATE_UNLOCKING, {}) - hass.states.async_set("lock.locking", STATE_LOCKING, {}) - hass.states.async_set("lock.jammed", STATE_JAMMED, {}) + hass.states.async_set("lock.locked", LockState.LOCKED, {}) + hass.states.async_set("lock.unlocked", LockState.UNLOCKED, {}) + hass.states.async_set("lock.unlocking", LockState.UNLOCKING, {}) + hass.states.async_set("lock.locking", LockState.LOCKING, {}) + hass.states.async_set("lock.jammed", LockState.JAMMED, {}) hass.states.async_set("lock.unknown", STATE_UNKNOWN, {}) properties = await reported_properties(hass, "lock.locked") @@ -1353,15 +1355,23 @@ async def test_temperature_sensor_water_heater(hass: HomeAssistant) -> None: async def test_report_alarm_control_panel_state(hass: HomeAssistant) -> None: """Test SecurityPanelController implements armState property.""" - hass.states.async_set("alarm_control_panel.armed_away", STATE_ALARM_ARMED_AWAY, {}) hass.states.async_set( - "alarm_control_panel.armed_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS, {} + "alarm_control_panel.armed_away", AlarmControlPanelState.ARMED_AWAY, {} ) - hass.states.async_set("alarm_control_panel.armed_home", STATE_ALARM_ARMED_HOME, {}) hass.states.async_set( - "alarm_control_panel.armed_night", STATE_ALARM_ARMED_NIGHT, {} + "alarm_control_panel.armed_custom_bypass", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + {}, + ) + hass.states.async_set( + "alarm_control_panel.armed_home", AlarmControlPanelState.ARMED_HOME, {} + ) + hass.states.async_set( + "alarm_control_panel.armed_night", AlarmControlPanelState.ARMED_NIGHT, {} + ) + hass.states.async_set( + "alarm_control_panel.disarmed", AlarmControlPanelState.DISARMED, {} ) - hass.states.async_set("alarm_control_panel.disarmed", STATE_ALARM_DISARMED, {}) properties = await reported_properties(hass, "alarm_control_panel.armed_away") properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index 6ccf265dcdc..e4a46db7d34 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -12,7 +12,6 @@ from homeassistant.components.cover import CoverDeviceClass, CoverEntityFeature from homeassistant.components.media_player import MediaPlayerEntityFeature from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import SERVICE_STOP_VALVE, ValveEntityFeature -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, @@ -20,6 +19,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import Context, Event, HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import entityfilter from homeassistant.setup import async_setup_component from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -3999,6 +3999,108 @@ async def test_alarm_control_panel_code_arm_required(hass: HomeAssistant) -> Non await discovery_test(device, hass, expected_endpoints=0) +async def test_alarm_control_panel_disarm_required(hass: HomeAssistant) -> None: + """Test alarm_control_panel disarm required.""" + device = ( + "alarm_control_panel.test_4", + "armed_away", + { + "friendly_name": "Test Alarm Control Panel 4", + "code_arm_required": False, + "code_format": "FORMAT_NUMBER", + "code": "1234", + "supported_features": 3, + }, + ) + appliance = await discovery_test(device, hass) + + assert appliance["endpointId"] == "alarm_control_panel#test_4" + assert appliance["displayCategories"][0] == "SECURITY_PANEL" + assert appliance["friendlyName"] == "Test Alarm Control Panel 4" + assert_endpoint_capabilities( + appliance, "Alexa.SecurityPanelController", "Alexa.EndpointHealth", "Alexa" + ) + + properties = await reported_properties(hass, "alarm_control_panel#test_4") + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") + + msg = await assert_request_fails( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_4", + "alarm_control_panel.alarm_arm_home", + hass, + payload={"armState": "ARMED_STAY"}, + ) + assert msg["event"]["payload"]["type"] == "AUTHORIZATION_REQUIRED" + assert ( + msg["event"]["payload"]["message"] + == "You must disarm the system before you can set the requested arm state." + ) + + _, msg = await assert_request_calls_service( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_4", + "alarm_control_panel.alarm_arm_away", + hass, + response_type="Arm.Response", + payload={"armState": "ARMED_AWAY"}, + ) + properties = ReportedProperties(msg["context"]["properties"]) + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") + + +async def test_alarm_control_panel_change_arm_type(hass: HomeAssistant) -> None: + """Test alarm_control_panel change arm type.""" + device = ( + "alarm_control_panel.test_5", + "armed_home", + { + "friendly_name": "Test Alarm Control Panel 5", + "code_arm_required": False, + "code_format": "FORMAT_NUMBER", + "code": "1234", + "supported_features": 3, + }, + ) + appliance = await discovery_test(device, hass) + + assert appliance["endpointId"] == "alarm_control_panel#test_5" + assert appliance["displayCategories"][0] == "SECURITY_PANEL" + assert appliance["friendlyName"] == "Test Alarm Control Panel 5" + assert_endpoint_capabilities( + appliance, "Alexa.SecurityPanelController", "Alexa.EndpointHealth", "Alexa" + ) + + properties = await reported_properties(hass, "alarm_control_panel#test_5") + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY") + + _, msg = await assert_request_calls_service( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_5", + "alarm_control_panel.alarm_arm_home", + hass, + response_type="Arm.Response", + payload={"armState": "ARMED_STAY"}, + ) + properties = ReportedProperties(msg["context"]["properties"]) + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY") + + _, msg = await assert_request_calls_service( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_5", + "alarm_control_panel.alarm_arm_away", + hass, + response_type="Arm.Response", + payload={"armState": "ARMED_AWAY"}, + ) + properties = ReportedProperties(msg["context"]["properties"]) + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") + + async def test_range_unsupported_domain(hass: HomeAssistant) -> None: """Test rangeController with unsupported domain.""" device = ("switch.test", "on", {"friendly_name": "Test switch"}) @@ -4444,6 +4546,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: "tilt_position_attr_in_service_call", "supported_features", "service_call", + "stop_feature_enabled", ), [ ( @@ -4454,6 +4557,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.set_cover_tilt_position", + True, ), ( 0, @@ -4463,6 +4567,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.close_cover_tilt", + True, ), ( 99, @@ -4472,6 +4577,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.set_cover_tilt_position", + True, ), ( 100, @@ -4481,36 +4587,42 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.open_cover_tilt", + True, ), ( 0, 0, CoverEntityFeature.SET_TILT_POSITION, "cover.set_cover_tilt_position", + False, ), ( 60, 60, CoverEntityFeature.SET_TILT_POSITION, "cover.set_cover_tilt_position", + False, ), ( 100, 100, CoverEntityFeature.SET_TILT_POSITION, "cover.set_cover_tilt_position", + False, ), ( 0, 0, CoverEntityFeature.SET_TILT_POSITION | CoverEntityFeature.OPEN_TILT, "cover.set_cover_tilt_position", + False, ), ( 100, 100, CoverEntityFeature.SET_TILT_POSITION | CoverEntityFeature.CLOSE_TILT, "cover.set_cover_tilt_position", + False, ), ], ids=[ @@ -4531,6 +4643,7 @@ async def test_cover_tilt_position( tilt_position_attr_in_service_call: int | None, supported_features: CoverEntityFeature, service_call: str, + stop_feature_enabled: bool, ) -> None: """Test cover discovery and tilt position using rangeController.""" device = ( @@ -4549,12 +4662,24 @@ async def test_cover_tilt_position( assert appliance["displayCategories"][0] == "INTERIOR_BLIND" assert appliance["friendlyName"] == "Test cover tilt range" + expected_interfaces: dict[bool, list[str]] = { + False: [ + "Alexa.PowerController", + "Alexa.RangeController", + "Alexa.EndpointHealth", + "Alexa", + ], + True: [ + "Alexa.PowerController", + "Alexa.RangeController", + "Alexa.PlaybackController", + "Alexa.EndpointHealth", + "Alexa", + ], + } + capabilities = assert_endpoint_capabilities( - appliance, - "Alexa.PowerController", - "Alexa.RangeController", - "Alexa.EndpointHealth", - "Alexa", + appliance, *expected_interfaces[stop_feature_enabled] ) range_capability = get_capability(capabilities, "Alexa.RangeController") @@ -4611,6 +4736,7 @@ async def test_cover_tilt_position_range(hass: HomeAssistant) -> None: appliance, "Alexa.PowerController", "Alexa.RangeController", + "Alexa.PlaybackController", "Alexa.EndpointHealth", "Alexa", ) @@ -4665,6 +4791,66 @@ async def test_cover_tilt_position_range(hass: HomeAssistant) -> None: ) +@pytest.mark.parametrize( + ("supported_stop_features", "cover_stop_calls", "cover_stop_tilt_calls"), + [ + (CoverEntityFeature(0), 0, 0), + (CoverEntityFeature.STOP, 1, 0), + (CoverEntityFeature.STOP_TILT, 0, 1), + (CoverEntityFeature.STOP | CoverEntityFeature.STOP_TILT, 1, 1), + ], + ids=["no_stop", "stop_cover", "stop_cover_tilt", "stop_cover_and_stop_cover_tilt"], +) +async def test_cover_stop( + hass: HomeAssistant, + supported_stop_features: CoverEntityFeature, + cover_stop_calls: int, + cover_stop_tilt_calls: int, +) -> None: + """Test cover and cover tilt can be stopped.""" + + base_features = ( + CoverEntityFeature.OPEN + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + ) + + device = ( + "cover.test_semantics", + "open", + { + "friendly_name": "Test cover semantics", + "device_class": "blind", + "supported_features": int(base_features | supported_stop_features), + "current_position": 30, + "tilt_position": 30, + }, + ) + appliance = await discovery_test(device, hass) + + assert appliance["endpointId"] == "cover#test_semantics" + assert appliance["displayCategories"][0] == "INTERIOR_BLIND" + assert appliance["friendlyName"] == "Test cover semantics" + + calls_stop = async_mock_service(hass, "cover", "stop_cover") + calls_stop_tilt = async_mock_service(hass, "cover", "stop_cover_tilt") + + context = Context() + request = get_new_request( + "Alexa.PlaybackController", "Stop", "cover#test_semantics" + ) + await smart_home.async_handle_message( + hass, get_default_config(hass), request, context + ) + await hass.async_block_till_done() + + assert len(calls_stop) == cover_stop_calls + assert len(calls_stop_tilt) == cover_stop_tilt_calls + + async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: """Test cover discovery and semantics with position and tilt support.""" device = ( @@ -4688,10 +4874,30 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: appliance, "Alexa.PowerController", "Alexa.RangeController", + "Alexa.PlaybackController", "Alexa.EndpointHealth", "Alexa", ) + playback_controller_capability = get_capability( + capabilities, "Alexa.PlaybackController" + ) + assert playback_controller_capability is not None + assert playback_controller_capability["supportedOperations"] == ["Stop"] + + # Assert both the cover and tilt stop calls are invoked + stop_cover_tilt_calls = async_mock_service(hass, "cover", "stop_cover_tilt") + await assert_request_calls_service( + "Alexa.PlaybackController", + "Stop", + "cover#test_semantics", + "cover.stop_cover", + hass, + ) + assert len(stop_cover_tilt_calls) == 1 + call = stop_cover_tilt_calls[0] + assert call.data == {"entity_id": "cover.test_semantics"} + # Assert for Position Semantics position_capability = get_capability( capabilities, "Alexa.RangeController", "cover.position" diff --git a/tests/components/amberelectric/helpers.py b/tests/components/amberelectric/helpers.py index 2bc65fdd558..971f3690a0d 100644 --- a/tests/components/amberelectric/helpers.py +++ b/tests/components/amberelectric/helpers.py @@ -2,73 +2,82 @@ from datetime import datetime, timedelta -from amberelectric.model.actual_interval import ActualInterval -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.forecast_interval import ForecastInterval -from amberelectric.model.interval import Descriptor, SpikeStatus +from amberelectric.models.actual_interval import ActualInterval +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.forecast_interval import ForecastInterval +from amberelectric.models.interval import Interval +from amberelectric.models.price_descriptor import PriceDescriptor +from amberelectric.models.spike_status import SpikeStatus from dateutil import parser -def generate_actual_interval( - channel_type: ChannelType, end_time: datetime -) -> ActualInterval: +def generate_actual_interval(channel_type: ChannelType, end_time: datetime) -> Interval: """Generate a mock actual interval.""" start_time = end_time - timedelta(minutes=30) - return ActualInterval( - duration=30, - spot_per_kwh=1.0, - per_kwh=8.0, - date=start_time.date(), - nem_time=end_time, - start_time=start_time, - end_time=end_time, - renewables=50, - channel_type=channel_type.value, - spike_status=SpikeStatus.NO_SPIKE.value, - descriptor=Descriptor.LOW.value, + return Interval( + ActualInterval( + type="ActualInterval", + duration=30, + spot_per_kwh=1.0, + per_kwh=8.0, + date=start_time.date(), + nem_time=end_time, + start_time=start_time, + end_time=end_time, + renewables=50, + channel_type=channel_type, + spike_status=SpikeStatus.NONE, + descriptor=PriceDescriptor.LOW, + ) ) def generate_current_interval( channel_type: ChannelType, end_time: datetime -) -> CurrentInterval: +) -> Interval: """Generate a mock current price.""" start_time = end_time - timedelta(minutes=30) - return CurrentInterval( - duration=30, - spot_per_kwh=1.0, - per_kwh=8.0, - date=start_time.date(), - nem_time=end_time, - start_time=start_time, - end_time=end_time, - renewables=50.6, - channel_type=channel_type.value, - spike_status=SpikeStatus.NO_SPIKE.value, - descriptor=Descriptor.EXTREMELY_LOW.value, - estimate=True, + return Interval( + CurrentInterval( + type="CurrentInterval", + duration=30, + spot_per_kwh=1.0, + per_kwh=8.0, + date=start_time.date(), + nem_time=end_time, + start_time=start_time, + end_time=end_time, + renewables=50.6, + channel_type=channel_type, + spike_status=SpikeStatus.NONE, + descriptor=PriceDescriptor.EXTREMELYLOW, + estimate=True, + ) ) def generate_forecast_interval( channel_type: ChannelType, end_time: datetime -) -> ForecastInterval: +) -> Interval: """Generate a mock forecast interval.""" start_time = end_time - timedelta(minutes=30) - return ForecastInterval( - duration=30, - spot_per_kwh=1.1, - per_kwh=8.8, - date=start_time.date(), - nem_time=end_time, - start_time=start_time, - end_time=end_time, - renewables=50, - channel_type=channel_type.value, - spike_status=SpikeStatus.NO_SPIKE.value, - descriptor=Descriptor.VERY_LOW.value, - estimate=True, + return Interval( + ForecastInterval( + type="ForecastInterval", + duration=30, + spot_per_kwh=1.1, + per_kwh=8.8, + date=start_time.date(), + nem_time=end_time, + start_time=start_time, + end_time=end_time, + renewables=50, + channel_type=channel_type, + spike_status=SpikeStatus.NONE, + descriptor=PriceDescriptor.VERYLOW, + estimate=True, + ) ) @@ -94,31 +103,31 @@ GENERAL_CHANNEL = [ CONTROLLED_LOAD_CHANNEL = [ generate_current_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T08:30:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T08:30:00+10:00") ), generate_forecast_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T09:00:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T09:00:00+10:00") ), generate_forecast_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T09:30:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T09:30:00+10:00") ), generate_forecast_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T10:00:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T10:00:00+10:00") ), ] FEED_IN_CHANNEL = [ generate_current_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T08:30:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T08:30:00+10:00") ), generate_forecast_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T09:00:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T09:00:00+10:00") ), generate_forecast_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T09:30:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T09:30:00+10:00") ), generate_forecast_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T10:00:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T10:00:00+10:00") ), ] diff --git a/tests/components/amberelectric/test_binary_sensor.py b/tests/components/amberelectric/test_binary_sensor.py index 2c1ee22b644..6a6ca372bc2 100644 --- a/tests/components/amberelectric/test_binary_sensor.py +++ b/tests/components/amberelectric/test_binary_sensor.py @@ -5,10 +5,10 @@ from __future__ import annotations from collections.abc import AsyncGenerator from unittest.mock import Mock, patch -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.interval import SpikeStatus -from amberelectric.model.tariff_information import TariffInformation +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.spike_status import SpikeStatus +from amberelectric.models.tariff_information import TariffInformation from dateutil import parser import pytest @@ -42,10 +42,10 @@ async def setup_no_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock(return_value=GENERAL_CHANNEL) + instance.get_current_prices = Mock(return_value=GENERAL_CHANNEL) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -65,7 +65,7 @@ async def setup_potential_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -73,8 +73,8 @@ async def setup_potential_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].spike_status = SpikeStatus.POTENTIAL - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.spike_status = SpikeStatus.POTENTIAL + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -94,7 +94,7 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -102,8 +102,8 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].spike_status = SpikeStatus.SPIKE - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.spike_status = SpikeStatus.SPIKE + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -156,7 +156,7 @@ async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mo instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -164,8 +164,10 @@ async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mo ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].tariff_information = TariffInformation(demandWindow=False) - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.tariff_information = TariffInformation( + demandWindow=False + ) + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -185,7 +187,7 @@ async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -193,8 +195,10 @@ async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].tariff_information = TariffInformation(demandWindow=True) - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.tariff_information = TariffInformation( + demandWindow=True + ) + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value diff --git a/tests/components/amberelectric/test_config_flow.py b/tests/components/amberelectric/test_config_flow.py index 030b82d3596..b394977b0e8 100644 --- a/tests/components/amberelectric/test_config_flow.py +++ b/tests/components/amberelectric/test_config_flow.py @@ -5,7 +5,8 @@ from datetime import date from unittest.mock import Mock, patch from amberelectric import ApiException -from amberelectric.model.site import Site, SiteStatus +from amberelectric.models.site import Site +from amberelectric.models.site_status import SiteStatus import pytest from homeassistant.components.amberelectric.config_flow import filter_sites @@ -28,7 +29,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") def mock_invalid_key_api() -> Generator: """Return an authentication error.""" - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.side_effect = ApiException(status=403) yield mock @@ -36,7 +37,7 @@ def mock_invalid_key_api() -> Generator: @pytest.fixture(name="api_error") def mock_api_error() -> Generator: """Return an authentication error.""" - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.side_effect = ApiException(status=500) yield mock @@ -45,16 +46,36 @@ def mock_api_error() -> Generator: def mock_single_site_api() -> Generator: """Return a single site.""" site = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111111", - [], - "Jemena", - SiteStatus.ACTIVE, - date(2002, 1, 1), - None, + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.ACTIVE, + active_from=date(2002, 1, 1), + closed_on=None, + interval_length=30, ) - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: + mock.return_value.get_sites.return_value = [site] + yield mock + + +@pytest.fixture(name="single_site_closed_no_close_date_api") +def single_site_closed_no_close_date_api() -> Generator: + """Return a single closed site with no closed date.""" + site = Site( + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.CLOSED, + active_from=None, + closed_on=None, + interval_length=30, + ) + + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.return_value = [site] yield mock @@ -63,16 +84,17 @@ def mock_single_site_api() -> Generator: def mock_single_site_pending_api() -> Generator: """Return a single site.""" site = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111111", - [], - "Jemena", - SiteStatus.PENDING, - None, - None, + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.PENDING, + active_from=None, + closed_on=None, + interval_length=30, ) - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.return_value = [site] yield mock @@ -82,35 +104,38 @@ def mock_single_site_rejoin_api() -> Generator: """Return a single site.""" instance = Mock() site_1 = Site( - "01HGD9QB72HB3DWQNJ6SSCGXGV", - "11111111111", - [], - "Jemena", - SiteStatus.CLOSED, - date(2002, 1, 1), - date(2002, 6, 1), + id="01HGD9QB72HB3DWQNJ6SSCGXGV", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.CLOSED, + active_from=date(2002, 1, 1), + closed_on=date(2002, 6, 1), + interval_length=30, ) site_2 = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111111", - [], - "Jemena", - SiteStatus.ACTIVE, - date(2003, 1, 1), - None, + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.ACTIVE, + active_from=date(2003, 1, 1), + closed_on=None, + interval_length=30, ) site_3 = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111112", - [], - "Jemena", - SiteStatus.CLOSED, - date(2003, 1, 1), - date(2003, 6, 1), + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111112", + channels=[], + network="Jemena", + status=SiteStatus.CLOSED, + active_from=date(2003, 1, 1), + closed_on=date(2003, 6, 1), + interval_length=30, ) instance.get_sites.return_value = [site_1, site_2, site_3] - with patch("amberelectric.api.AmberApi.create", return_value=instance): + with patch("amberelectric.AmberApi", return_value=instance): yield instance @@ -120,7 +145,7 @@ def mock_no_site_api() -> Generator: instance = Mock() instance.get_sites.return_value = [] - with patch("amberelectric.api.AmberApi.create", return_value=instance): + with patch("amberelectric.AmberApi", return_value=instance): yield instance @@ -188,6 +213,39 @@ async def test_single_site(hass: HomeAssistant, single_site_api: Mock) -> None: assert data[CONF_SITE_ID] == "01FG0AGP818PXK0DWHXJRRT2DH" +async def test_single_closed_site_no_closed_date( + hass: HomeAssistant, single_site_closed_no_close_date_api: Mock +) -> None: + """Test single closed site with no closed date.""" + initial_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert initial_result.get("type") is FlowResultType.FORM + assert initial_result.get("step_id") == "user" + + # Test filling in API key + enter_api_key_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_API_TOKEN: API_KEY}, + ) + assert enter_api_key_result.get("type") is FlowResultType.FORM + assert enter_api_key_result.get("step_id") == "site" + + select_site_result = await hass.config_entries.flow.async_configure( + enter_api_key_result["flow_id"], + {CONF_SITE_ID: "01FG0AGP818PXK0DWHXJRRT2DH", CONF_SITE_NAME: "Home"}, + ) + + # Show available sites + assert select_site_result.get("type") is FlowResultType.CREATE_ENTRY + assert select_site_result.get("title") == "Home" + data = select_site_result.get("data") + assert data + assert data[CONF_API_TOKEN] == API_KEY + assert data[CONF_SITE_ID] == "01FG0AGP818PXK0DWHXJRRT2DH" + + async def test_single_site_rejoin( hass: HomeAssistant, single_site_rejoin_api: Mock ) -> None: diff --git a/tests/components/amberelectric/test_coordinator.py b/tests/components/amberelectric/test_coordinator.py index cb3912cb5ac..0a8f5b874fa 100644 --- a/tests/components/amberelectric/test_coordinator.py +++ b/tests/components/amberelectric/test_coordinator.py @@ -7,10 +7,12 @@ from datetime import date from unittest.mock import Mock, patch from amberelectric import ApiException -from amberelectric.model.channel import Channel, ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.interval import Descriptor, SpikeStatus -from amberelectric.model.site import Site, SiteStatus +from amberelectric.models.channel import Channel, ChannelType +from amberelectric.models.interval import Interval +from amberelectric.models.price_descriptor import PriceDescriptor +from amberelectric.models.site import Site +from amberelectric.models.site_status import SiteStatus +from amberelectric.models.spike_status import SpikeStatus from dateutil import parser import pytest @@ -38,37 +40,40 @@ def mock_api_current_price() -> Generator: instance = Mock() general_site = Site( - GENERAL_ONLY_SITE_ID, - "11111111111", - [Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100")], - "Jemena", - SiteStatus.ACTIVE, - date(2021, 1, 1), - None, + id=GENERAL_ONLY_SITE_ID, + nmi="11111111111", + channels=[Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100")], + network="Jemena", + status=SiteStatus("active"), + activeFrom=date(2021, 1, 1), + closedOn=None, + interval_length=30, ) general_and_controlled_load = Site( - GENERAL_AND_CONTROLLED_SITE_ID, - "11111111112", - [ + id=GENERAL_AND_CONTROLLED_SITE_ID, + nmi="11111111112", + channels=[ Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100"), - Channel(identifier="E2", type=ChannelType.CONTROLLED_LOAD, tariff="A180"), + Channel(identifier="E2", type=ChannelType.CONTROLLEDLOAD, tariff="A180"), ], - "Jemena", - SiteStatus.ACTIVE, - date(2021, 1, 1), - None, + network="Jemena", + status=SiteStatus("active"), + activeFrom=date(2021, 1, 1), + closedOn=None, + interval_length=30, ) general_and_feed_in = Site( - GENERAL_AND_FEED_IN_SITE_ID, - "11111111113", - [ + id=GENERAL_AND_FEED_IN_SITE_ID, + nmi="11111111113", + channels=[ Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100"), - Channel(identifier="E2", type=ChannelType.FEED_IN, tariff="A100"), + Channel(identifier="E2", type=ChannelType.FEEDIN, tariff="A100"), ], - "Jemena", - SiteStatus.ACTIVE, - date(2021, 1, 1), - None, + network="Jemena", + status=SiteStatus("active"), + activeFrom=date(2021, 1, 1), + closedOn=None, + interval_length=30, ) instance.get_sites.return_value = [ general_site, @@ -76,44 +81,46 @@ def mock_api_current_price() -> Generator: general_and_feed_in, ] - with patch("amberelectric.api.AmberApi.create", return_value=instance): + with patch("amberelectric.AmberApi", return_value=instance): yield instance def test_normalize_descriptor() -> None: """Test normalizing descriptors works correctly.""" assert normalize_descriptor(None) is None - assert normalize_descriptor(Descriptor.NEGATIVE) == "negative" - assert normalize_descriptor(Descriptor.EXTREMELY_LOW) == "extremely_low" - assert normalize_descriptor(Descriptor.VERY_LOW) == "very_low" - assert normalize_descriptor(Descriptor.LOW) == "low" - assert normalize_descriptor(Descriptor.NEUTRAL) == "neutral" - assert normalize_descriptor(Descriptor.HIGH) == "high" - assert normalize_descriptor(Descriptor.SPIKE) == "spike" + assert normalize_descriptor(PriceDescriptor.NEGATIVE) == "negative" + assert normalize_descriptor(PriceDescriptor.EXTREMELYLOW) == "extremely_low" + assert normalize_descriptor(PriceDescriptor.VERYLOW) == "very_low" + assert normalize_descriptor(PriceDescriptor.LOW) == "low" + assert normalize_descriptor(PriceDescriptor.NEUTRAL) == "neutral" + assert normalize_descriptor(PriceDescriptor.HIGH) == "high" + assert normalize_descriptor(PriceDescriptor.SPIKE) == "spike" async def test_fetch_general_site(hass: HomeAssistant, current_price_api: Mock) -> None: """Test fetching a site with only a general channel.""" - current_price_api.get_current_price.return_value = GENERAL_CHANNEL + current_price_api.get_current_prices.return_value = GENERAL_CHANNEL data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_ONLY_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -122,12 +129,12 @@ async def test_fetch_no_general_site( ) -> None: """Test fetching a site with no general channel.""" - current_price_api.get_current_price.return_value = CONTROLLED_LOAD_CHANNEL + current_price_api.get_current_prices.return_value = CONTROLLED_LOAD_CHANNEL data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) with pytest.raises(UpdateFailed): await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_ONLY_SITE_ID, next=48 ) @@ -135,41 +142,45 @@ async def test_fetch_no_general_site( async def test_fetch_api_error(hass: HomeAssistant, current_price_api: Mock) -> None: """Test that the old values are maintained if a second call fails.""" - current_price_api.get_current_price.return_value = GENERAL_CHANNEL + current_price_api.get_current_prices.return_value = GENERAL_CHANNEL data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_ONLY_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) - current_price_api.get_current_price.side_effect = ApiException(status=403) + current_price_api.get_current_prices.side_effect = ApiException(status=403) with pytest.raises(UpdateFailed): await data_service._async_update_data() - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -178,7 +189,7 @@ async def test_fetch_general_and_controlled_load_site( ) -> None: """Test fetching a site with a general and controlled load channel.""" - current_price_api.get_current_price.return_value = ( + current_price_api.get_current_prices.return_value = ( GENERAL_CHANNEL + CONTROLLED_LOAD_CHANNEL ) data_service = AmberUpdateCoordinator( @@ -186,25 +197,30 @@ async def test_fetch_general_and_controlled_load_site( ) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_AND_CONTROLLED_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] - assert result["current"].get("controlled_load") is CONTROLLED_LOAD_CHANNEL[0] + assert ( + result["current"].get("controlled_load") + is CONTROLLED_LOAD_CHANNEL[0].actual_instance + ) assert result["forecasts"].get("controlled_load") == [ - CONTROLLED_LOAD_CHANNEL[1], - CONTROLLED_LOAD_CHANNEL[2], - CONTROLLED_LOAD_CHANNEL[3], + CONTROLLED_LOAD_CHANNEL[1].actual_instance, + CONTROLLED_LOAD_CHANNEL[2].actual_instance, + CONTROLLED_LOAD_CHANNEL[3].actual_instance, ] assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -213,31 +229,35 @@ async def test_fetch_general_and_feed_in_site( ) -> None: """Test fetching a site with a general and feed_in channel.""" - current_price_api.get_current_price.return_value = GENERAL_CHANNEL + FEED_IN_CHANNEL + current_price_api.get_current_prices.return_value = ( + GENERAL_CHANNEL + FEED_IN_CHANNEL + ) data_service = AmberUpdateCoordinator( hass, current_price_api, GENERAL_AND_FEED_IN_SITE_ID ) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_AND_FEED_IN_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None - assert result["current"].get("feed_in") is FEED_IN_CHANNEL[0] + assert result["current"].get("feed_in") is FEED_IN_CHANNEL[0].actual_instance assert result["forecasts"].get("feed_in") == [ - FEED_IN_CHANNEL[1], - FEED_IN_CHANNEL[2], - FEED_IN_CHANNEL[3], + FEED_IN_CHANNEL[1].actual_instance, + FEED_IN_CHANNEL[2].actual_instance, + FEED_IN_CHANNEL[3].actual_instance, ] - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -246,13 +266,13 @@ async def test_fetch_potential_spike( ) -> None: """Test fetching a site with only a general channel.""" - general_channel: list[CurrentInterval] = [ + general_channel: list[Interval] = [ generate_current_interval( ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") - ), + ) ] - general_channel[0].spike_status = SpikeStatus.POTENTIAL - current_price_api.get_current_price.return_value = general_channel + general_channel[0].actual_instance.spike_status = SpikeStatus.POTENTIAL + current_price_api.get_current_prices.return_value = general_channel data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() assert result["grid"]["price_spike"] == "potential" @@ -261,13 +281,13 @@ async def test_fetch_potential_spike( async def test_fetch_spike(hass: HomeAssistant, current_price_api: Mock) -> None: """Test fetching a site with only a general channel.""" - general_channel: list[CurrentInterval] = [ + general_channel: list[Interval] = [ generate_current_interval( ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") - ), + ) ] - general_channel[0].spike_status = SpikeStatus.SPIKE - current_price_api.get_current_price.return_value = general_channel + general_channel[0].actual_instance.spike_status = SpikeStatus.SPIKE + current_price_api.get_current_prices.return_value = general_channel data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() assert result["grid"]["price_spike"] == "spike" diff --git a/tests/components/amberelectric/test_sensor.py b/tests/components/amberelectric/test_sensor.py index 3a5626d14d5..203b65d6df6 100644 --- a/tests/components/amberelectric/test_sensor.py +++ b/tests/components/amberelectric/test_sensor.py @@ -3,8 +3,9 @@ from collections.abc import AsyncGenerator from unittest.mock import Mock, patch -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.range import Range +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.interval import Interval +from amberelectric.models.range import Range import pytest from homeassistant.components.amberelectric.const import ( @@ -44,10 +45,10 @@ async def setup_general(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock(return_value=GENERAL_CHANNEL) + instance.get_current_prices = Mock(return_value=GENERAL_CHANNEL) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -68,10 +69,10 @@ async def setup_general_and_controlled_load( instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock( + instance.get_current_prices = Mock( return_value=GENERAL_CHANNEL + CONTROLLED_LOAD_CHANNEL ) assert await async_setup_component(hass, DOMAIN, {}) @@ -92,10 +93,10 @@ async def setup_general_and_feed_in(hass: HomeAssistant) -> AsyncGenerator[Mock] instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock( + instance.get_current_prices = Mock( return_value=GENERAL_CHANNEL + FEED_IN_CHANNEL ) assert await async_setup_component(hass, DOMAIN, {}) @@ -126,7 +127,7 @@ async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> assert attributes.get("range_max") is None with_range: list[CurrentInterval] = GENERAL_CHANNEL - with_range[0].range = Range(7.8, 12.4) + with_range[0].actual_instance.range = Range(min=7.8, max=12.4) setup_general.get_current_price.return_value = with_range config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -211,8 +212,8 @@ async def test_general_forecast_sensor( assert first_forecast.get("range_min") is None assert first_forecast.get("range_max") is None - with_range: list[CurrentInterval] = GENERAL_CHANNEL - with_range[1].range = Range(7.8, 12.4) + with_range: list[Interval] = GENERAL_CHANNEL + with_range[1].actual_instance.range = Range(min=7.8, max=12.4) setup_general.get_current_price.return_value = with_range config_entry = hass.config_entries.async_entries(DOMAIN)[0] diff --git a/tests/components/ambient_network/conftest.py b/tests/components/ambient_network/conftest.py index 9fc001252a0..e728d46aaf6 100644 --- a/tests/components/ambient_network/conftest.py +++ b/tests/components/ambient_network/conftest.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, Mock, patch from aioambient import OpenAPI import pytest -from homeassistant.components import ambient_network +from homeassistant.components.ambient_network.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import ( @@ -69,7 +69,7 @@ async def mock_aioambient(open_api: OpenAPI): def config_entry_fixture(request: pytest.FixtureRequest) -> MockConfigEntry: """Mock config entry.""" return MockConfigEntry( - domain=ambient_network.DOMAIN, + domain=DOMAIN, title=f"Station {request.param[0]}", data={"mac": request.param}, ) diff --git a/tests/components/ambient_station/snapshots/test_diagnostics.ambr b/tests/components/ambient_station/snapshots/test_diagnostics.ambr index b4aede7948c..07db19101ab 100644 --- a/tests/components/ambient_station/snapshots/test_diagnostics.ambr +++ b/tests/components/ambient_station/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'app_key': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'ambient_station', 'entry_id': '382cf7643f016fd48b3fe52163fe8877', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index 28272cd8866..ba7e46bdde7 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -19,7 +19,7 @@ from homeassistant.components.analytics.const import ( ATTR_STATISTICS, ATTR_USAGE, ) -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import IntegrationNotFound @@ -67,6 +67,7 @@ def _last_call_payload(aioclient: AiohttpClientMocker) -> dict[str, Any]: return aioclient.mock_calls[-1][2] +@pytest.mark.usefixtures("supervisor_client") async def test_no_send( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -75,7 +76,7 @@ async def test_no_send( """Test send when no preferences are defined.""" analytics = Analytics(hass) with patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=False), ): assert not analytics.preferences[ATTR_BASE] @@ -96,7 +97,7 @@ async def test_load_with_supervisor_diagnostics(hass: HomeAssistant) -> None: side_effect=Mock(return_value={"diagnostics": True}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), ), ): @@ -117,7 +118,7 @@ async def test_load_with_supervisor_without_diagnostics(hass: HomeAssistant) -> side_effect=Mock(return_value={"diagnostics": False}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), ), ): @@ -126,6 +127,7 @@ async def test_load_with_supervisor_without_diagnostics(hass: HomeAssistant) -> assert not analytics.preferences[ATTR_DIAGNOSTICS] +@pytest.mark.usefixtures("supervisor_client") async def test_failed_to_send( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -144,6 +146,7 @@ async def test_failed_to_send( ) +@pytest.mark.usefixtures("supervisor_client") async def test_failed_to_send_raises( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -159,7 +162,7 @@ async def test_failed_to_send_raises( assert "Error sending analytics" in caplog.text -@pytest.mark.usefixtures("installation_type_mock") +@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_send_base( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -182,6 +185,7 @@ async def test_send_base( assert snapshot == submitted_data +@pytest.mark.usefixtures("supervisor_client") async def test_send_base_with_supervisor( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -215,8 +219,12 @@ async def test_send_base_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), + ) as is_hassio_mock, + patch( + "homeassistant.helpers.system_info.is_hassio", + new=is_hassio_mock, ), ): await analytics.load() @@ -230,7 +238,7 @@ async def test_send_base_with_supervisor( assert snapshot == submitted_data -@pytest.mark.usefixtures("installation_type_mock") +@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_send_usage( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -271,6 +279,7 @@ async def test_send_usage_with_supervisor( caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, snapshot: SnapshotAssertion, + supervisor_client: AsyncMock, ) -> None: """Test send usage with supervisor preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -281,6 +290,9 @@ async def test_send_usage_with_supervisor( assert analytics.preferences[ATTR_USAGE] hass.config.components.add("default_config") + supervisor_client.addons.addon_info.return_value = Mock( + slug="test_addon", protected=True, version="1", auto_update=False + ) with ( patch( "homeassistant.components.hassio.get_supervisor_info", @@ -306,19 +318,12 @@ async def test_send_usage_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.hassio.async_get_addon_info", - side_effect=AsyncMock( - return_value={ - "slug": "test_addon", - "protected": True, - "version": "1", - "auto_update": False, - } - ), - ), - patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), + ) as is_hassio_mock, + patch( + "homeassistant.helpers.system_info.is_hassio", + new=is_hassio_mock, ), ): await analytics.send_analytics() @@ -330,7 +335,7 @@ async def test_send_usage_with_supervisor( assert snapshot == submitted_data -@pytest.mark.usefixtures("installation_type_mock") +@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_send_statistics( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -358,9 +363,10 @@ async def test_send_statistics( assert snapshot == submitted_data -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("mock_hass_config", "supervisor_client") async def test_send_statistics_one_integration_fails( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test send statistics preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -381,7 +387,9 @@ async def test_send_statistics_one_integration_fails( assert post_call[2]["integration_count"] == 0 -@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") +@pytest.mark.usefixtures( + "installation_type_mock", "mock_hass_config", "supervisor_client" +) async def test_send_statistics_disabled_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -418,7 +426,9 @@ async def test_send_statistics_disabled_integration( assert snapshot == submitted_data -@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") +@pytest.mark.usefixtures( + "installation_type_mock", "mock_hass_config", "supervisor_client" +) async def test_send_statistics_ignored_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -461,9 +471,10 @@ async def test_send_statistics_ignored_integration( assert snapshot == submitted_data -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("mock_hass_config", "supervisor_client") async def test_send_statistics_async_get_integration_unknown_exception( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test send statistics preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -489,6 +500,7 @@ async def test_send_statistics_with_supervisor( caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, snapshot: SnapshotAssertion, + supervisor_client: AsyncMock, ) -> None: """Test send statistics preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -497,6 +509,9 @@ async def test_send_statistics_with_supervisor( assert analytics.preferences[ATTR_BASE] assert analytics.preferences[ATTR_STATISTICS] + supervisor_client.addons.addon_info.return_value = Mock( + slug="test_addon", protected=True, version="1", auto_update=False + ) with ( patch( "homeassistant.components.hassio.get_supervisor_info", @@ -522,19 +537,12 @@ async def test_send_statistics_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.hassio.async_get_addon_info", - side_effect=AsyncMock( - return_value={ - "slug": "test_addon", - "protected": True, - "version": "1", - "auto_update": False, - } - ), - ), - patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), + ) as is_hassio_mock, + patch( + "homeassistant.helpers.system_info.is_hassio", + new=is_hassio_mock, ), ): await analytics.send_analytics() @@ -546,6 +554,7 @@ async def test_send_statistics_with_supervisor( assert snapshot == submitted_data +@pytest.mark.usefixtures("supervisor_client") async def test_reusing_uuid( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -563,7 +572,9 @@ async def test_reusing_uuid( assert analytics.uuid == "NOT_MOCK_UUID" -@pytest.mark.usefixtures("enable_custom_integrations", "installation_type_mock") +@pytest.mark.usefixtures( + "enable_custom_integrations", "installation_type_mock", "supervisor_client" +) async def test_custom_integrations( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -590,8 +601,10 @@ async def test_custom_integrations( assert snapshot == submitted_data +@pytest.mark.usefixtures("supervisor_client") async def test_dev_url( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test sending payload to dev url.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200) @@ -607,6 +620,7 @@ async def test_dev_url( assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV +@pytest.mark.usefixtures("supervisor_client") async def test_dev_url_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -630,8 +644,10 @@ async def test_dev_url_error( ) in caplog.text +@pytest.mark.usefixtures("supervisor_client") async def test_nightly_endpoint( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test sending payload to production url when running nightly.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -647,7 +663,9 @@ async def test_nightly_endpoint( assert str(payload[1]) == ANALYTICS_ENDPOINT_URL -@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") +@pytest.mark.usefixtures( + "installation_type_mock", "mock_hass_config", "supervisor_client" +) async def test_send_with_no_energy( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -683,7 +701,9 @@ async def test_send_with_no_energy( assert snapshot == submitted_data -@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "mock_hass_config") +@pytest.mark.usefixtures( + "recorder_mock", "installation_type_mock", "mock_hass_config", "supervisor_client" +) async def test_send_with_no_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -714,7 +734,9 @@ async def test_send_with_no_energy_config( ) -@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "mock_hass_config") +@pytest.mark.usefixtures( + "recorder_mock", "installation_type_mock", "mock_hass_config", "supervisor_client" +) async def test_send_with_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -745,7 +767,9 @@ async def test_send_with_energy_config( ) -@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") +@pytest.mark.usefixtures( + "installation_type_mock", "mock_hass_config", "supervisor_client" +) async def test_send_usage_with_certificate( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -771,7 +795,7 @@ async def test_send_usage_with_certificate( assert snapshot == submitted_data -@pytest.mark.usefixtures("recorder_mock", "installation_type_mock") +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "supervisor_client") async def test_send_with_recorder( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -802,6 +826,7 @@ async def test_send_with_recorder( ) +@pytest.mark.usefixtures("supervisor_client") async def test_send_with_problems_loading_yaml( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -821,7 +846,7 @@ async def test_send_with_problems_loading_yaml( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("mock_hass_config", "supervisor_client") async def test_timeout_while_sending( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -840,7 +865,7 @@ async def test_timeout_while_sending( assert "Timeout sending analytics" in caplog.text -@pytest.mark.usefixtures("installation_type_mock") +@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_not_check_config_entries_if_yaml( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -863,7 +888,7 @@ async def test_not_check_config_entries_if_yaml( domain="ignored_integration", state=ConfigEntryState.LOADED, source="ignore", - disabled_by="user", + disabled_by=ConfigEntryDisabler.USER, ) mock_config_entry.add_to_hass(hass) diff --git a/tests/components/analytics/test_init.py b/tests/components/analytics/test_init.py index cf8d4838415..66000fc5936 100644 --- a/tests/components/analytics/test_init.py +++ b/tests/components/analytics/test_init.py @@ -2,6 +2,8 @@ from unittest.mock import patch +import pytest + from homeassistant.components.analytics.const import ANALYTICS_ENDPOINT_URL, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -20,6 +22,7 @@ async def test_setup(hass: HomeAssistant) -> None: assert DOMAIN in hass.data +@pytest.mark.usefixtures("supervisor_client") async def test_websocket( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, diff --git a/tests/components/analytics_insights/conftest.py b/tests/components/analytics_insights/conftest.py index fcdda95e9bd..a9c152b8ab9 100644 --- a/tests/components/analytics_insights/conftest.py +++ b/tests/components/analytics_insights/conftest.py @@ -5,9 +5,10 @@ from unittest.mock import AsyncMock, patch import pytest from python_homeassistant_analytics import CurrentAnalytics -from python_homeassistant_analytics.models import CustomIntegration, Integration +from python_homeassistant_analytics.models import Addon, CustomIntegration, Integration from homeassistant.components.analytics_insights.const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -43,6 +44,10 @@ def mock_analytics_client() -> Generator[AsyncMock]: client.get_current_analytics.return_value = CurrentAnalytics.from_json( load_fixture("analytics_insights/current_data.json") ) + addons = load_json_object_fixture("analytics_insights/addons.json") + client.get_addons.return_value = { + key: Addon.from_dict(value) for key, value in addons.items() + } integrations = load_json_object_fixture("analytics_insights/integrations.json") client.get_integrations.return_value = { key: Integration.from_dict(value) for key, value in integrations.items() @@ -65,6 +70,7 @@ def mock_config_entry() -> MockConfigEntry: title="Homeassistant Analytics", data={}, options={ + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "spotify", "myq"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, diff --git a/tests/components/analytics_insights/fixtures/addons.json b/tests/components/analytics_insights/fixtures/addons.json new file mode 100644 index 00000000000..cb7ae42c86b --- /dev/null +++ b/tests/components/analytics_insights/fixtures/addons.json @@ -0,0 +1,31 @@ +{ + "core_samba": { + "total": 76357, + "versions": { + "12.3.2": 65875, + "12.2.0": 1313, + "12.3.1": 5018, + "12.1.0": 211, + "10.0.0": 1139, + "9.4.0": 4, + "12.3.0": 704, + "9.3.1": 36, + "10.0.2": 1290, + "9.5.1": 379, + "9.6.1": 66, + "10.0.1": 200, + "9.3.0": 20, + "9.2.0": 9, + "9.5.0": 13, + "12.0.0": 39, + "9.7.0": 20, + "11.0.0": 13, + "3.0": 1, + "9.6.0": 2, + "8.1": 2, + "9.0": 3 + }, + "protected": 76345, + "auto_update": 32732 + } +} diff --git a/tests/components/analytics_insights/snapshots/test_sensor.ambr b/tests/components/analytics_insights/snapshots/test_sensor.ambr index d7eeed7955c..6e11b344b0b 100644 --- a/tests/components/analytics_insights/snapshots/test_sensor.ambr +++ b/tests/components/analytics_insights/snapshots/test_sensor.ambr @@ -1,4 +1,54 @@ # serializer version: 1 +# name: test_all_entities[sensor.homeassistant_analytics_core_samba-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.homeassistant_analytics_core_samba', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'core_samba', + 'platform': 'analytics_insights', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'addons', + 'unique_id': 'addon_core_samba_active_installations', + 'unit_of_measurement': 'active installations', + }) +# --- +# name: test_all_entities[sensor.homeassistant_analytics_core_samba-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Homeassistant Analytics core_samba', + 'state_class': , + 'unit_of_measurement': 'active installations', + }), + 'context': , + 'entity_id': 'sensor.homeassistant_analytics_core_samba', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '76357', + }) +# --- # name: test_all_entities[sensor.homeassistant_analytics_hacs_custom-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -149,6 +199,106 @@ 'state': '24388', }) # --- +# name: test_all_entities[sensor.homeassistant_analytics_total_active_installations-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.homeassistant_analytics_total_active_installations', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total active installations', + 'platform': 'analytics_insights', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_active_installations', + 'unique_id': 'total_active_installations', + 'unit_of_measurement': 'active installations', + }) +# --- +# name: test_all_entities[sensor.homeassistant_analytics_total_active_installations-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Homeassistant Analytics Total active installations', + 'state_class': , + 'unit_of_measurement': 'active installations', + }), + 'context': , + 'entity_id': 'sensor.homeassistant_analytics_total_active_installations', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '310400', + }) +# --- +# name: test_all_entities[sensor.homeassistant_analytics_total_reported_integrations-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.homeassistant_analytics_total_reported_integrations', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total reported integrations', + 'platform': 'analytics_insights', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_reports_integrations', + 'unique_id': 'total_reports_integrations', + 'unit_of_measurement': 'active installations', + }) +# --- +# name: test_all_entities[sensor.homeassistant_analytics_total_reported_integrations-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Homeassistant Analytics Total reported integrations', + 'state_class': , + 'unit_of_measurement': 'active installations', + }), + 'context': , + 'entity_id': 'sensor.homeassistant_analytics_total_reported_integrations', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '249256', + }) +# --- # name: test_all_entities[sensor.homeassistant_analytics_youtube-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/analytics_insights/test_config_flow.py b/tests/components/analytics_insights/test_config_flow.py index 0c9d4c074f8..747f24930a4 100644 --- a/tests/components/analytics_insights/test_config_flow.py +++ b/tests/components/analytics_insights/test_config_flow.py @@ -7,6 +7,7 @@ import pytest from python_homeassistant_analytics import HomeassistantAnalyticsConnectionError from homeassistant.components.analytics_insights.const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -25,10 +26,12 @@ from tests.common import MockConfigEntry [ ( { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -38,6 +41,7 @@ from tests.common import MockConfigEntry CONF_TRACKED_INTEGRATIONS: ["youtube"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -47,6 +51,7 @@ from tests.common import MockConfigEntry CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -83,6 +88,7 @@ async def test_form( "user_input", [ { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -113,6 +119,7 @@ async def test_submitting_empty_form( result = await hass.config_entries.flow.async_configure( result["flow_id"], { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -123,6 +130,7 @@ async def test_submitting_empty_form( assert result["title"] == "Home Assistant Analytics Insights" assert result["data"] == {} assert result["options"] == { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], } @@ -161,6 +169,7 @@ async def test_form_already_configured( domain=DOMAIN, data={}, options={ + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube", "spotify"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -179,19 +188,32 @@ async def test_form_already_configured( [ ( { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, ), + ( + { + CONF_TRACKED_ADDONS: ["core_samba"], + }, + { + CONF_TRACKED_ADDONS: ["core_samba"], + CONF_TRACKED_INTEGRATIONS: [], + CONF_TRACKED_CUSTOM_INTEGRATIONS: [], + }, + ), ( { CONF_TRACKED_INTEGRATIONS: ["youtube"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -201,6 +223,7 @@ async def test_form_already_configured( CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -237,6 +260,7 @@ async def test_options_flow( "user_input", [ { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -267,6 +291,7 @@ async def test_submitting_empty_options_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "hue"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -275,6 +300,7 @@ async def test_submitting_empty_options_flow( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "hue"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], } diff --git a/tests/components/analytics_insights/test_sensor.py b/tests/components/analytics_insights/test_sensor.py index 3ede971c8f8..bf82e0c2d65 100644 --- a/tests/components/analytics_insights/test_sensor.py +++ b/tests/components/analytics_insights/test_sensor.py @@ -4,6 +4,7 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory +import pytest from python_homeassistant_analytics import ( HomeassistantAnalyticsConnectionError, HomeassistantAnalyticsNotModifiedError, @@ -19,6 +20,7 @@ from . import setup_integration from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_all_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, diff --git a/tests/components/android_ip_webcam/test_init.py b/tests/components/android_ip_webcam/test_init.py index 70ecdc9271e..58108cef53b 100644 --- a/tests/components/android_ip_webcam/test_init.py +++ b/tests/components/android_ip_webcam/test_init.py @@ -79,4 +79,3 @@ async def test_unload_entry(hass: HomeAssistant, aioclient_mock_fixture) -> None await hass.async_block_till_done() assert entry.state is ConfigEntryState.NOT_LOADED - assert entry.entry_id not in hass.data[DOMAIN] diff --git a/tests/components/androidtv/common.py b/tests/components/androidtv/common.py index 23e048e4d52..133f6b1470b 100644 --- a/tests/components/androidtv/common.py +++ b/tests/components/androidtv/common.py @@ -100,7 +100,12 @@ CONFIG_FIRETV_DEFAULT = CONFIG_FIRETV_PYTHON_ADB def setup_mock_entry( - config: dict[str, Any], entity_domain: str + config: dict[str, Any], + entity_domain: str, + *, + options=None, + version=1, + minor_version=2, ) -> tuple[str, str, MockConfigEntry]: """Prepare mock entry for entities tests.""" patch_key = config[ADB_PATCH_KEY] @@ -109,6 +114,9 @@ def setup_mock_entry( domain=DOMAIN, data=config[DOMAIN], unique_id="a1:b1:c1:d1:e1:f1", + options=options, + version=version, + minor_version=minor_version, ) return patch_key, entity_id, config_entry diff --git a/tests/components/androidtv/test_config_flow.py b/tests/components/androidtv/test_config_flow.py index b73fee9fb10..cb1015e4198 100644 --- a/tests/components/androidtv/test_config_flow.py +++ b/tests/components/androidtv/test_config_flow.py @@ -22,7 +22,7 @@ from homeassistant.components.androidtv.const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -501,7 +501,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: user_input={ CONF_GET_SOURCES: True, CONF_EXCLUDE_UNNAMED_APPS: True, - CONF_SCREENCAP: True, + CONF_SCREENCAP_INTERVAL: 1, CONF_TURN_OFF_COMMAND: "off", CONF_TURN_ON_COMMAND: "on", }, @@ -515,6 +515,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert config_entry.options[CONF_GET_SOURCES] is True assert config_entry.options[CONF_EXCLUDE_UNNAMED_APPS] is True - assert config_entry.options[CONF_SCREENCAP] is True + assert config_entry.options[CONF_SCREENCAP_INTERVAL] == 1 assert config_entry.options[CONF_TURN_OFF_COMMAND] == "off" assert config_entry.options[CONF_TURN_ON_COMMAND] == "on" diff --git a/tests/components/androidtv/test_diagnostics.py b/tests/components/androidtv/test_diagnostics.py index 7d1801514af..40dba53bd9b 100644 --- a/tests/components/androidtv/test_diagnostics.py +++ b/tests/components/androidtv/test_diagnostics.py @@ -1,6 +1,6 @@ """Tests for the diagnostics data provided by the AndroidTV integration.""" -from homeassistant.components.asuswrt.diagnostics import TO_REDACT +from homeassistant.components.androidtv.diagnostics import TO_REDACT from homeassistant.components.diagnostics import async_redact_data from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -36,4 +36,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert result["entry"] == entry_dict + assert result["entry"] == entry_dict | {"discovery_keys": {}} diff --git a/tests/components/androidtv/test_init.py b/tests/components/androidtv/test_init.py new file mode 100644 index 00000000000..8ff7df1668b --- /dev/null +++ b/tests/components/androidtv/test_init.py @@ -0,0 +1,34 @@ +"""Tests for AndroidTV integration initialization.""" + +from homeassistant.components.androidtv.const import ( + CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, +) +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN +from homeassistant.core import HomeAssistant + +from . import patchers +from .common import CONFIG_ANDROID_DEFAULT, SHELL_RESPONSE_OFF, setup_mock_entry + + +async def test_migrate_version( + hass: HomeAssistant, +) -> None: + """Test migration to new version.""" + patch_key, _, mock_config_entry = setup_mock_entry( + CONFIG_ANDROID_DEFAULT, + MP_DOMAIN, + options={CONF_SCREENCAP: False}, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + + with ( + patchers.patch_connect(True)[patch_key], + patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key], + ): + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.options[CONF_SCREENCAP_INTERVAL] == 0 + assert mock_config_entry.minor_version == 2 diff --git a/tests/components/androidtv/test_media_player.py b/tests/components/androidtv/test_media_player.py index ef0d0c63b06..5a8d88dd9f6 100644 --- a/tests/components/androidtv/test_media_player.py +++ b/tests/components/androidtv/test_media_player.py @@ -13,7 +13,7 @@ import pytest from homeassistant.components.androidtv.const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -801,6 +801,9 @@ async def test_get_image_http( """ patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + config_entry, options={CONF_SCREENCAP_INTERVAL: 2} + ) with ( patchers.patch_connect(True)[patch_key], @@ -828,21 +831,27 @@ async def test_get_image_http( content = await resp.read() assert content == b"image" - next_update = utcnow() + timedelta(seconds=30) + next_update = utcnow() + timedelta(minutes=1) with ( patchers.patch_shell("11")[patch_key], patchers.PATCH_SCREENCAP as patch_screen_cap, - patch("homeassistant.util.utcnow", return_value=next_update), + patch( + "homeassistant.components.androidtv.media_player.utcnow", + return_value=next_update, + ), ): async_fire_time_changed(hass, next_update, True) await hass.async_block_till_done() patch_screen_cap.assert_not_called() - next_update = utcnow() + timedelta(seconds=60) + next_update = utcnow() + timedelta(minutes=2) with ( patchers.patch_shell("11")[patch_key], patchers.PATCH_SCREENCAP as patch_screen_cap, - patch("homeassistant.util.utcnow", return_value=next_update), + patch( + "homeassistant.components.androidtv.media_player.utcnow", + return_value=next_update, + ), ): async_fire_time_changed(hass, next_update, True) await hass.async_block_till_done() @@ -854,6 +863,9 @@ async def test_get_image_http_fail(hass: HomeAssistant) -> None: patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + config_entry, options={CONF_SCREENCAP_INTERVAL: 2} + ) with ( patchers.patch_connect(True)[patch_key], @@ -885,7 +897,7 @@ async def test_get_image_disabled(hass: HomeAssistant) -> None: patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP: False} + config_entry, options={CONF_SCREENCAP_INTERVAL: 0} ) with ( @@ -1133,7 +1145,7 @@ async def test_options_reload(hass: HomeAssistant) -> None: with patchers.PATCH_SETUP_ENTRY as setup_entry_call: # change an option that not require integration reload hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP: False} + config_entry, options={CONF_EXCLUDE_UNNAMED_APPS: True} ) await hass.async_block_till_done() diff --git a/tests/components/androidtv_remote/test_config_flow.py b/tests/components/androidtv_remote/test_config_flow.py index 93c9067d1c8..02e15bca415 100644 --- a/tests/components/androidtv_remote/test_config_flow.py +++ b/tests/components/androidtv_remote/test_config_flow.py @@ -757,6 +757,59 @@ async def test_zeroconf_flow_abort_if_mac_is_missing( assert result["reason"] == "cannot_connect" +async def test_zeroconf_flow_already_configured_zeroconf_has_multiple_invalid_ip_addresses( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_unload_entry: AsyncMock, + mock_api: MagicMock, +) -> None: + """Test we abort the zeroconf flow if already configured and zeroconf has invalid ip addresses.""" + host = "1.2.3.4" + name = "My Android TV" + mac = "1A:2B:3C:4D:5E:6F" + unique_id = "1a:2b:3c:4d:5e:6f" + name_existing = name + host_existing = host + + mock_config_entry = MockConfigEntry( + title=name, + domain=DOMAIN, + data={ + "host": host_existing, + "name": name_existing, + "mac": mac, + }, + unique_id=unique_id, + state=ConfigEntryState.LOADED, + ) + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.2.3.5"), + ip_addresses=[ip_address("1.2.3.5"), ip_address(host)], + port=6466, + hostname=host, + type="mock_type", + name=name + "._androidtvremote2._tcp.local.", + properties={"bt": mac}, + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + await hass.async_block_till_done() + assert hass.config_entries.async_entries(DOMAIN)[0].data == { + "host": host, + "name": name, + "mac": mac, + } + assert len(mock_unload_entry.mock_calls) == 0 + assert len(mock_setup_entry.mock_calls) == 0 + + async def test_reauth_flow_success( hass: HomeAssistant, mock_setup_entry: AsyncMock, diff --git a/tests/components/androidtv_remote/test_media_player.py b/tests/components/androidtv_remote/test_media_player.py index 46678f18fd3..e292a5b273f 100644 --- a/tests/components/androidtv_remote/test_media_player.py +++ b/tests/components/androidtv_remote/test_media_player.py @@ -20,10 +20,11 @@ async def test_media_player_receives_push_updates( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote media player receives push updates and state is updated.""" - mock_config_entry.options = { - "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} - } mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + mock_config_entry, + options={"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}}, + ) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -322,7 +323,7 @@ async def test_browse_media( mock_api: MagicMock, ) -> None: """Test the Android TV Remote media player browse media.""" - mock_config_entry.options = { + new_options = { "apps": { "com.google.android.youtube.tv": { "app_name": "YouTube", @@ -332,6 +333,7 @@ async def test_browse_media( } } mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/androidtv_remote/test_remote.py b/tests/components/androidtv_remote/test_remote.py index 7ca63685747..b3c3ce1c283 100644 --- a/tests/components/androidtv_remote/test_remote.py +++ b/tests/components/androidtv_remote/test_remote.py @@ -19,10 +19,9 @@ async def test_remote_receives_push_updates( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote receives push updates and state is updated.""" - mock_config_entry.options = { - "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} - } + new_options = {"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}} mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -53,10 +52,9 @@ async def test_remote_toggles( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote toggles.""" - mock_config_entry.options = { - "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} - } + new_options = {"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}} mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/anova/__init__.py b/tests/components/anova/__init__.py index 887f5b3b05b..903a1180980 100644 --- a/tests/components/anova/__init__.py +++ b/tests/components/anova/__init__.py @@ -36,6 +36,7 @@ def create_entry(hass: HomeAssistant, device_id: str = DEVICE_UNIQUE_ID) -> Conf }, unique_id="sample@gmail.com", version=1, + minor_version=2, ) entry.add_to_hass(hass) return entry diff --git a/tests/components/anova/test_config_flow.py b/tests/components/anova/test_config_flow.py index 0f93b869296..3b2afaa49c0 100644 --- a/tests/components/anova/test_config_flow.py +++ b/tests/components/anova/test_config_flow.py @@ -6,7 +6,7 @@ from anova_wifi import AnovaApi, InvalidLogin from homeassistant import config_entries from homeassistant.components.anova.const import DOMAIN -from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,7 +27,6 @@ async def test_flow_user(hass: HomeAssistant, anova_api: AnovaApi) -> None: assert result["data"] == { CONF_USERNAME: "sample@gmail.com", CONF_PASSWORD: "sample", - CONF_DEVICES: [], } diff --git a/tests/components/anova/test_init.py b/tests/components/anova/test_init.py index 5fc63fcaf93..2e3e2920abc 100644 --- a/tests/components/anova/test_init.py +++ b/tests/components/anova/test_init.py @@ -1,13 +1,18 @@ """Test init for Anova.""" +from unittest.mock import patch + from anova_wifi import AnovaApi -from homeassistant.components.anova import DOMAIN +from homeassistant.components.anova.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from . import async_init_integration, create_entry +from tests.common import MockConfigEntry + async def test_async_setup_entry(hass: HomeAssistant, anova_api: AnovaApi) -> None: """Test a successful setup entry.""" @@ -55,3 +60,34 @@ async def test_websocket_failure( """Test that we successfully handle a websocket failure on setup.""" entry = await async_init_integration(hass) assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migration_removing_devices_in_config_entry( + hass: HomeAssistant, anova_api: AnovaApi +) -> None: + """Test a successful setup entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Anova", + data={ + CONF_USERNAME: "sample@gmail.com", + CONF_PASSWORD: "sample", + CONF_DEVICES: [], + }, + unique_id="sample@gmail.com", + version=1, + minor_version=1, + ) + entry.add_to_hass(hass) + + with patch("homeassistant.components.anova.AnovaApi.authenticate"): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("sensor.anova_precision_cooker_mode") + assert state is not None + assert state.state == "idle" + + assert entry.version == 1 + assert entry.minor_version == 2 + assert CONF_DEVICES not in entry.data diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py index df27352b7b2..a5a025b00d0 100644 --- a/tests/components/anthropic/test_config_flow.py +++ b/tests/components/anthropic/test_config_flow.py @@ -108,7 +108,7 @@ async def test_options( ), body={"type": "error", "error": {"type": "invalid_request_error"}}, ), - "invalid_request_error", + "unknown", ), ( AuthenticationError( diff --git a/tests/components/aosmith/conftest.py b/tests/components/aosmith/conftest.py index 7efbe0c58b2..31e36332a89 100644 --- a/tests/components/aosmith/conftest.py +++ b/tests/components/aosmith/conftest.py @@ -10,7 +10,6 @@ from py_aosmith.models import ( DeviceType, EnergyUseData, EnergyUseHistoryEntry, - HotWaterStatus, OperationMode, SupportedOperationModeInfo, ) @@ -93,7 +92,7 @@ def build_device_fixture( temperature_setpoint_pending=setpoint_pending, temperature_setpoint_previous=130, temperature_setpoint_maximum=130, - hot_water_status=HotWaterStatus.LOW, + hot_water_status=90, ), ) diff --git a/tests/components/aosmith/fixtures/get_all_device_info.json b/tests/components/aosmith/fixtures/get_all_device_info.json index 4d19a80a3ad..27bd5b24a16 100644 --- a/tests/components/aosmith/fixtures/get_all_device_info.json +++ b/tests/components/aosmith/fixtures/get_all_device_info.json @@ -103,7 +103,7 @@ } ], "firmwareVersion": "2.14", - "hotWaterStatus": "HIGH", + "hotWaterStatus": 10, "isAdvancedLoadUpMore": false, "isCtaUcmPresent": false, "isDemandResponsePaused": false, diff --git a/tests/components/aosmith/snapshots/test_diagnostics.ambr b/tests/components/aosmith/snapshots/test_diagnostics.ambr index 8704cdaa214..e2cf6c6b24b 100644 --- a/tests/components/aosmith/snapshots/test_diagnostics.ambr +++ b/tests/components/aosmith/snapshots/test_diagnostics.ambr @@ -43,7 +43,7 @@ 'error': '', 'firmwareVersion': '2.14', 'heaterSsid': '**REDACTED**', - 'hotWaterStatus': 'HIGH', + 'hotWaterStatus': 10, 'isAdvancedLoadUpMore': False, 'isCtaUcmPresent': False, 'isDemandResponsePaused': False, diff --git a/tests/components/aosmith/snapshots/test_sensor.ambr b/tests/components/aosmith/snapshots/test_sensor.ambr index 7aae9713037..563b52f6df7 100644 --- a/tests/components/aosmith/snapshots/test_sensor.ambr +++ b/tests/components/aosmith/snapshots/test_sensor.ambr @@ -58,13 +58,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'low', - 'medium', - 'high', - ]), - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -81,7 +75,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Hot water availability', 'platform': 'aosmith', @@ -89,25 +83,20 @@ 'supported_features': 0, 'translation_key': 'hot_water_availability', 'unique_id': 'hot_water_availability_junctionId', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_state[sensor.my_water_heater_hot_water_availability-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'enum', 'friendly_name': 'My water heater Hot water availability', - 'options': list([ - 'low', - 'medium', - 'high', - ]), + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.my_water_heater_hot_water_availability', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'low', + 'state': '90', }) # --- diff --git a/tests/components/apcupsd/__init__.py b/tests/components/apcupsd/__init__.py index b75f3eab3af..eb8cd594ad7 100644 --- a/tests/components/apcupsd/__init__.py +++ b/tests/components/apcupsd/__init__.py @@ -4,7 +4,7 @@ from collections import OrderedDict from typing import Final from unittest.mock import patch -from homeassistant.components.apcupsd import DOMAIN +from homeassistant.components.apcupsd.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant diff --git a/tests/components/apcupsd/test_config_flow.py b/tests/components/apcupsd/test_config_flow.py index 2888771eb01..88594260579 100644 --- a/tests/components/apcupsd/test_config_flow.py +++ b/tests/components/apcupsd/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.apcupsd import DOMAIN +from homeassistant.components.apcupsd.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE from homeassistant.core import HomeAssistant diff --git a/tests/components/apple_tv/test_config_flow.py b/tests/components/apple_tv/test_config_flow.py index f37042a6f50..4567bd32582 100644 --- a/tests/components/apple_tv/test_config_flow.py +++ b/tests/components/apple_tv/test_config_flow.py @@ -16,6 +16,7 @@ from homeassistant.components.apple_tv.const import ( CONF_START_OFF, DOMAIN, ) +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -1189,18 +1190,17 @@ async def test_reconfigure_update_credentials(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": "reauth"}, - data={"identifier": "mrpid", "name": "apple tv"}, - ) + result = await config_entry.start_reauth_flow(hass, data={"name": "apple tv"}) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] is FlowResultType.FORM - assert result2["description_placeholders"] == {"protocol": "MRP"} + assert result2["description_placeholders"] == { + CONF_NAME: "Mock Title", + "protocol": "MRP", + } result3 = await hass.config_entries.flow.async_configure( result["flow_id"], {"pin": 1111} diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index d90084fa7c9..b72d9653c2d 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -423,6 +423,10 @@ async def test_import_named_credential( ] +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_credentials"], +) async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: """Test config flow base case with no credentials registered.""" result = await hass.config_entries.flow.async_init( @@ -432,6 +436,10 @@ async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: assert result.get("reason") == "missing_credentials" +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_credentials"], +) async def test_config_flow_other_domain( hass: HomeAssistant, ws_client: ClientFixture, @@ -559,6 +567,10 @@ async def test_config_flow_multiple_entries( ) +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_credentials"], +) async def test_config_flow_create_delete_credential( hass: HomeAssistant, ws_client: ClientFixture, @@ -604,6 +616,10 @@ async def test_config_flow_with_config_credential( assert result["data"].get("auth_implementation") == TEST_DOMAIN +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_configuration"], +) @pytest.mark.parametrize("mock_application_credentials_integration", [None]) async def test_import_without_setup(hass: HomeAssistant, config_credential) -> None: """Test import of credentials without setting up the integration.""" @@ -619,6 +635,10 @@ async def test_import_without_setup(hass: HomeAssistant, config_credential) -> N assert result.get("reason") == "missing_configuration" +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_configuration"], +) @pytest.mark.parametrize("mock_application_credentials_integration", [None]) async def test_websocket_without_platform( hass: HomeAssistant, ws_client: ClientFixture diff --git a/tests/components/apsystems/test_init.py b/tests/components/apsystems/test_init.py new file mode 100644 index 00000000000..f127744dbf4 --- /dev/null +++ b/tests/components/apsystems/test_init.py @@ -0,0 +1,69 @@ +"""Test the APSystem setup.""" + +import datetime +from unittest.mock import AsyncMock + +from APsystemsEZ1 import InverterReturnedError +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.apsystems.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + +SCAN_INTERVAL = datetime.timedelta(seconds=12) + + +@pytest.mark.usefixtures("mock_apsystems") +async def test_load_unload_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_setup_failed( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test update failed.""" + mock_apsystems.get_device_info.side_effect = TimeoutError + await setup_integration(hass, mock_config_entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_update( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update data with an inverter error and recover.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert "Inverter returned an error" not in caplog.text + mock_apsystems.get_output_data.side_effect = InverterReturnedError + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert "Error fetching APSystems Data data:" in caplog.text + caplog.clear() + mock_apsystems.get_output_data.side_effect = None + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert "Fetching APSystems Data data recovered" in caplog.text diff --git a/tests/components/aquacell/test_config_flow.py b/tests/components/aquacell/test_config_flow.py index b73852d513f..f677b3f8348 100644 --- a/tests/components/aquacell/test_config_flow.py +++ b/tests/components/aquacell/test_config_flow.py @@ -79,6 +79,7 @@ async def test_full_flow( ("exception", "error"), [ (ApiException, "cannot_connect"), + (TimeoutError, "cannot_connect"), (AuthenticationFailed, "invalid_auth"), (Exception, "unknown"), ], diff --git a/tests/components/aseko_pool_live/conftest.py b/tests/components/aseko_pool_live/conftest.py new file mode 100644 index 00000000000..f3bbddb2cab --- /dev/null +++ b/tests/components/aseko_pool_live/conftest.py @@ -0,0 +1,20 @@ +"""Aseko Pool Live conftest.""" + +from datetime import datetime + +from aioaseko import User +import pytest + + +@pytest.fixture +def user() -> User: + """Aseko User fixture.""" + return User( + user_id="a_user_id", + created_at=datetime.now(), + updated_at=datetime.now(), + name="John", + surname="Doe", + language="any_language", + is_active=True, + ) diff --git a/tests/components/aseko_pool_live/test_config_flow.py b/tests/components/aseko_pool_live/test_config_flow.py index 4307e527cee..b307f00abbe 100644 --- a/tests/components/aseko_pool_live/test_config_flow.py +++ b/tests/components/aseko_pool_live/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from aioaseko import AccountInfo, APIUnavailable, InvalidAuthCredentials +from aioaseko import AsekoAPIError, AsekoInvalidCredentials, User import pytest from homeassistant import config_entries @@ -23,7 +23,7 @@ async def test_async_step_user_form(hass: HomeAssistant) -> None: assert result["errors"] == {} -async def test_async_step_user_success(hass: HomeAssistant) -> None: +async def test_async_step_user_success(hass: HomeAssistant, user: User) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -31,8 +31,8 @@ async def test_async_step_user_success(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", - return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, ), patch( "homeassistant.components.aseko_pool_live.async_setup_entry", @@ -60,13 +60,13 @@ async def test_async_step_user_success(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("error_web", "reason"), [ - (APIUnavailable, "cannot_connect"), - (InvalidAuthCredentials, "invalid_auth"), + (AsekoAPIError, "cannot_connect"), + (AsekoInvalidCredentials, "invalid_auth"), (Exception, "unknown"), ], ) async def test_async_step_user_exception( - hass: HomeAssistant, error_web: Exception, reason: str + hass: HomeAssistant, user: User, error_web: Exception, reason: str ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -74,8 +74,8 @@ async def test_async_step_user_exception( ) with patch( - "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", - return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, side_effect=error_web, ): result2 = await hass.config_entries.flow.async_configure( @@ -93,13 +93,13 @@ async def test_async_step_user_exception( @pytest.mark.parametrize( ("error_web", "reason"), [ - (APIUnavailable, "cannot_connect"), - (InvalidAuthCredentials, "invalid_auth"), + (AsekoAPIError, "cannot_connect"), + (AsekoInvalidCredentials, "invalid_auth"), (Exception, "unknown"), ], ) async def test_get_account_info_exceptions( - hass: HomeAssistant, error_web: Exception, reason: str + hass: HomeAssistant, user: User, error_web: Exception, reason: str ) -> None: """Test we handle config flow exceptions.""" result = await hass.config_entries.flow.async_init( @@ -107,8 +107,8 @@ async def test_get_account_info_exceptions( ) with patch( - "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", - return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, side_effect=error_web, ): result2 = await hass.config_entries.flow.async_configure( @@ -123,52 +123,102 @@ async def test_get_account_info_exceptions( assert result2["errors"] == {"base": reason} -async def test_async_step_reauth_success(hass: HomeAssistant) -> None: +async def test_async_step_reauth_success(hass: HomeAssistant, user: User) -> None: """Test successful reauthentication.""" mock_entry = MockConfigEntry( domain=DOMAIN, - unique_id="UID", - data={CONF_EMAIL: "aseko@example.com"}, + unique_id="a_user_id", + data={CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, + version=2, ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} - with patch( - "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", - return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), - ) as mock_setup_entry: + with ( + patch( + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, + ), + patch( + "homeassistant.components.aseko_pool_live.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, + {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "new_password"}, ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 + assert mock_entry.unique_id == "a_user_id" + assert dict(mock_entry.data) == { + CONF_EMAIL: "aseko@example.com", + CONF_PASSWORD: "new_password", + } + + +async def test_async_step_reauth_mismatch(hass: HomeAssistant, user: User) -> None: + """Test mismatch reauthentication.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="UID", + data={CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, + version=2, + ) + mock_entry.add_to_hass(hass) + + result = await mock_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, + ), + patch( + "homeassistant.components.aseko_pool_live.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "new_password"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert len(mock_setup_entry.mock_calls) == 0 + assert mock_entry.unique_id == "UID" + assert dict(mock_entry.data) == { + CONF_EMAIL: "aseko@example.com", + CONF_PASSWORD: "passw0rd", + } @pytest.mark.parametrize( ("error_web", "reason"), [ - (APIUnavailable, "cannot_connect"), - (InvalidAuthCredentials, "invalid_auth"), + (AsekoAPIError, "cannot_connect"), + (AsekoInvalidCredentials, "invalid_auth"), (Exception, "unknown"), ], ) async def test_async_step_reauth_exception( - hass: HomeAssistant, error_web: Exception, reason: str + hass: HomeAssistant, user: User, error_web: Exception, reason: str ) -> None: """Test we get the form.""" @@ -179,17 +229,11 @@ async def test_async_step_reauth_exception( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( - "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", - return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, side_effect=error_web, ): result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index b7bf83a7ed0..0f6872edbfe 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -23,7 +23,7 @@ from homeassistant.components.assist_pipeline.pipeline import ( ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component @@ -36,6 +36,8 @@ from tests.common import ( mock_integration, mock_platform, ) +from tests.components.stt.common import MockSTTProvider, MockSTTProviderEntity +from tests.components.tts.common import MockTTSProvider _TRANSCRIPT = "test transcript" @@ -47,107 +49,6 @@ def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" -class BaseProvider: - """Mock STT provider.""" - - _supported_languages = ["en-US"] - - def __init__(self, text: str) -> None: - """Init test provider.""" - self.text = text - self.received: list[bytes] = [] - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return self._supported_languages - - @property - def supported_formats(self) -> list[stt.AudioFormats]: - """Return a list of supported formats.""" - return [stt.AudioFormats.WAV] - - @property - def supported_codecs(self) -> list[stt.AudioCodecs]: - """Return a list of supported codecs.""" - return [stt.AudioCodecs.PCM] - - @property - def supported_bit_rates(self) -> list[stt.AudioBitRates]: - """Return a list of supported bitrates.""" - return [stt.AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[stt.AudioSampleRates]: - """Return a list of supported samplerates.""" - return [stt.AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[stt.AudioChannels]: - """Return a list of supported channels.""" - return [stt.AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: stt.SpeechMetadata, stream: AsyncIterable[bytes] - ) -> stt.SpeechResult: - """Process an audio stream.""" - async for data in stream: - if not data: - break - self.received.append(data) - return stt.SpeechResult(self.text, stt.SpeechResultState.SUCCESS) - - -class MockSttProvider(BaseProvider, stt.Provider): - """Mock provider.""" - - -class MockSttProviderEntity(BaseProvider, stt.SpeechToTextEntity): - """Mock provider entity.""" - - _attr_name = "Mock STT" - - -class MockTTSProvider(tts.Provider): - """Mock TTS provider.""" - - name = "Test" - _supported_languages = ["en-US"] - _supported_voices = { - "en-US": [ - tts.Voice("james_earl_jones", "James Earl Jones"), - tts.Voice("fran_drescher", "Fran Drescher"), - ] - } - _supported_options = ["voice", "age", tts.ATTR_AUDIO_OUTPUT] - - @property - def default_language(self) -> str: - """Return the default language.""" - return "en" - - @property - def supported_languages(self) -> list[str]: - """Return list of supported languages.""" - return self._supported_languages - - @callback - def async_get_supported_voices(self, language: str) -> list[tts.Voice] | None: - """Return a list of supported voices for a language.""" - return self._supported_voices.get(language) - - @property - def supported_options(self) -> list[str]: - """Return list of supported options like voice, emotions.""" - return self._supported_options - - def get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> tts.TtsAudioType: - """Load TTS data.""" - return ("mp3", b"") - - class MockTTSPlatform(MockPlatform): """A mock TTS platform.""" @@ -162,19 +63,23 @@ class MockTTSPlatform(MockPlatform): @pytest.fixture async def mock_tts_provider() -> MockTTSProvider: """Mock TTS provider.""" - return MockTTSProvider() + provider = MockTTSProvider("en") + provider._supported_languages = ["en-US"] + return provider @pytest.fixture -async def mock_stt_provider() -> MockSttProvider: +async def mock_stt_provider() -> MockSTTProvider: """Mock STT provider.""" - return MockSttProvider(_TRANSCRIPT) + return MockSTTProvider(supported_languages=["en-US"], text=_TRANSCRIPT) @pytest.fixture -def mock_stt_provider_entity() -> MockSttProviderEntity: +def mock_stt_provider_entity() -> MockSTTProviderEntity: """Test provider entity fixture.""" - return MockSttProviderEntity(_TRANSCRIPT) + entity = MockSTTProviderEntity(supported_languages=["en-US"], text=_TRANSCRIPT) + entity._attr_name = "Mock STT" + return entity class MockSttPlatform(MockPlatform): @@ -290,8 +195,8 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: @pytest.fixture async def init_supporting_components( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider: MockSTTProvider, + mock_stt_provider_entity: MockSTTProviderEntity, mock_tts_provider: MockTTSProvider, mock_wake_word_provider_entity: MockWakeWordEntity, mock_wake_word_provider_entity2: MockWakeWordEntity2, diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 7f29534e473..f63a28efbb7 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -37,6 +37,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }), 'type': , }), @@ -60,6 +61,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -75,9 +77,9 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -126,6 +128,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en-US', + 'prefer_local_intents': False, }), 'type': , }), @@ -149,6 +152,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -164,9 +168,9 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=Arnold+Schwarzenegger", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_2657c1a8ee_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -215,6 +219,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en-US', + 'prefer_local_intents': False, }), 'type': , }), @@ -238,6 +243,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -253,9 +259,9 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=Arnold+Schwarzenegger", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_2657c1a8ee_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -328,6 +334,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }), 'type': , }), @@ -351,6 +358,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -366,9 +374,9 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -379,6 +387,93 @@ }), ]) # --- +# name: test_pipeline_from_audio_stream_with_cloud_auth_fail + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'engine': 'stt.mock_stt', + 'metadata': dict({ + 'bit_rate': , + 'channel': , + 'codec': , + 'format': , + 'language': 'en-US', + 'sample_rate': , + }), + }), + 'type': , + }), + dict({ + 'data': dict({ + 'code': 'cloud-auth-failed', + 'message': 'Home Assistant Cloud authentication failed', + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- +# name: test_pipeline_language_used_instead_of_conversation_language + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'conversation_id': None, + 'device_id': None, + 'engine': 'conversation.home_assistant', + 'intent_input': 'test input', + 'language': 'en', + 'prefer_local_intents': False, + }), + 'type': , + }), + dict({ + 'data': dict({ + 'intent_output': dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + }), + }), + }), + 'processed_locally': True, + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- # name: test_wake_word_detection_aborted list([ dict({ diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 7ea6af7e0bd..41747a50eb6 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -36,6 +36,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline.4 @@ -58,6 +59,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline.5 @@ -71,9 +73,9 @@ # name: test_audio_pipeline.6 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -117,6 +119,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_debug.4 @@ -139,6 +142,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_debug.5 @@ -152,9 +156,9 @@ # name: test_audio_pipeline_debug.6 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -210,6 +214,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_with_enhancements.4 @@ -232,6 +237,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_with_enhancements.5 @@ -245,9 +251,9 @@ # name: test_audio_pipeline_with_enhancements.6 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -313,6 +319,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.6 @@ -335,6 +342,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.7 @@ -348,9 +356,9 @@ # name: test_audio_pipeline_with_wake_word_no_timeout.8 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -519,6 +527,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_intent_failed.2 @@ -541,6 +550,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_intent_timeout.2 @@ -569,6 +579,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'never mind', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_pipeline_empty_tts_output.2 @@ -592,6 +603,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_pipeline_empty_tts_output.3 @@ -680,6 +692,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_text_only_pipeline[extra_msg0].2 @@ -697,11 +710,12 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called are', + 'speech': 'Sorry, I am not aware of any area called Are', }), }), }), }), + 'processed_locally': True, }) # --- # name: test_text_only_pipeline[extra_msg0].3 @@ -724,6 +738,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_text_only_pipeline[extra_msg1].2 @@ -741,11 +756,12 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called are', + 'speech': 'Sorry, I am not aware of any area called Are', }), }), }), }), + 'processed_locally': True, }) # --- # name: test_text_only_pipeline[extra_msg1].3 diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 04edab7131f..d4cce4e2e98 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -8,22 +8,31 @@ import tempfile from unittest.mock import ANY, patch import wave +import hass_nabucasa import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components import assist_pipeline, media_source, stt, tts +from homeassistant.components import ( + assist_pipeline, + conversation, + media_source, + stt, + tts, +) from homeassistant.components.assist_pipeline.const import ( BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import intent from homeassistant.setup import async_setup_component from .conftest import ( BYTES_ONE_SECOND, - MockSttProvider, - MockSttProviderEntity, + MockSTTProvider, + MockSTTProviderEntity, MockTTSProvider, MockWakeWordEntity, make_10ms_chunk, @@ -47,7 +56,7 @@ def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: async def test_pipeline_from_audio_stream_auto( hass: HomeAssistant, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -63,21 +72,24 @@ async def test_pipeline_from_audio_stream_auto( yield make_10ms_chunk(b"part2") yield b"" - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot assert len(mock_stt_provider_entity.received) == 2 @@ -88,7 +100,7 @@ async def test_pipeline_from_audio_stream_auto( async def test_pipeline_from_audio_stream_legacy( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -126,23 +138,26 @@ async def test_pipeline_from_audio_stream_legacy( assert msg["success"] pipeline_id = msg["result"]["id"] - # Use the created pipeline - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="en-UK", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + # Use the created pipeline + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="en-UK", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + pipeline_id=pipeline_id, + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot assert len(mock_stt_provider.received) == 2 @@ -153,7 +168,7 @@ async def test_pipeline_from_audio_stream_legacy( async def test_pipeline_from_audio_stream_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -191,23 +206,26 @@ async def test_pipeline_from_audio_stream_entity( assert msg["success"] pipeline_id = msg["result"]["id"] - # Use the created pipeline - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="en-UK", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + # Use the created pipeline + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="en-UK", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + pipeline_id=pipeline_id, + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot assert len(mock_stt_provider_entity.received) == 2 @@ -218,7 +236,7 @@ async def test_pipeline_from_audio_stream_entity( async def test_pipeline_from_audio_stream_no_stt( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -281,7 +299,7 @@ async def test_pipeline_from_audio_stream_no_stt( async def test_pipeline_from_audio_stream_unknown_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -319,7 +337,7 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( async def test_pipeline_from_audio_stream_wake_word( hass: HomeAssistant, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, snapshot: SnapshotAssertion, @@ -355,25 +373,28 @@ async def test_pipeline_from_audio_stream_wake_word( yield b"" - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - start_stage=assist_pipeline.PipelineStage.WAKE_WORD, - wake_word_settings=assist_pipeline.WakeWordSettings( - audio_seconds_to_buffer=1.5 - ), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + start_stage=assist_pipeline.PipelineStage.WAKE_WORD, + wake_word_settings=assist_pipeline.WakeWordSettings( + audio_seconds_to_buffer=1.5 + ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot @@ -395,7 +416,7 @@ async def test_pipeline_from_audio_stream_wake_word( async def test_pipeline_save_audio( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -474,7 +495,7 @@ async def test_pipeline_save_audio( async def test_pipeline_saved_audio_with_device_id( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -529,7 +550,7 @@ async def test_pipeline_saved_audio_with_device_id( async def test_pipeline_saved_audio_write_error( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -578,7 +599,7 @@ async def test_pipeline_saved_audio_write_error( async def test_pipeline_saved_audio_empty_queue( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -641,7 +662,7 @@ async def test_pipeline_saved_audio_empty_queue( async def test_wake_word_detection_aborted( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, @@ -788,13 +809,12 @@ async def test_tts_audio_output( assert len(extra_options) == 0, extra_options -async def test_tts_supports_preferred_format( +async def test_tts_wav_preferred_format( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts_provider: MockTTSProvider, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, - snapshot: SnapshotAssertion, ) -> None: """Test that preferred format options are given to the TTS system if supported.""" client = await hass_client() @@ -829,6 +849,7 @@ async def test_tts_supports_preferred_format( tts.ATTR_PREFERRED_FORMAT, tts.ATTR_PREFERRED_SAMPLE_RATE, tts.ATTR_PREFERRED_SAMPLE_CHANNELS, + tts.ATTR_PREFERRED_SAMPLE_BYTES, ] ) @@ -850,6 +871,346 @@ async def test_tts_supports_preferred_format( options = mock_get_tts_audio.call_args_list[0].kwargs["options"] # We should have received preferred format options in get_tts_audio - assert tts.ATTR_PREFERRED_FORMAT in options - assert tts.ATTR_PREFERRED_SAMPLE_RATE in options - assert tts.ATTR_PREFERRED_SAMPLE_CHANNELS in options + assert options.get(tts.ATTR_PREFERRED_FORMAT) == "wav" + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 16000 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 1 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 + + +async def test_tts_dict_preferred_format( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_tts_provider: MockTTSProvider, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that preferred format options are given to the TTS system if supported.""" + client = await hass_client() + assert await async_setup_component(hass, media_source.DOMAIN, {}) + + events: list[assist_pipeline.PipelineEvent] = [] + + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + tts_input="This is a test.", + conversation_id=None, + device_id=None, + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.TTS, + end_stage=assist_pipeline.PipelineStage.TTS, + event_callback=events.append, + tts_audio_output={ + tts.ATTR_PREFERRED_FORMAT: "flac", + tts.ATTR_PREFERRED_SAMPLE_RATE: 48000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 2, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + }, + ), + ) + await pipeline_input.validate() + + # Make the TTS provider support preferred format options + supported_options = list(mock_tts_provider.supported_options or []) + supported_options.extend( + [ + tts.ATTR_PREFERRED_FORMAT, + tts.ATTR_PREFERRED_SAMPLE_RATE, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS, + tts.ATTR_PREFERRED_SAMPLE_BYTES, + ] + ) + + with ( + patch.object(mock_tts_provider, "_supported_options", supported_options), + patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio, + ): + await pipeline_input.execute() + + for event in events: + if event.type == assist_pipeline.PipelineEventType.TTS_END: + # We must fetch the media URL to trigger the TTS + assert event.data + media_id = event.data["tts_output"]["media_id"] + resolved = await media_source.async_resolve_media(hass, media_id, None) + await client.get(resolved.url) + + assert mock_get_tts_audio.called + options = mock_get_tts_audio.call_args_list[0].kwargs["options"] + + # We should have received preferred format options in get_tts_audio + assert options.get(tts.ATTR_PREFERRED_FORMAT) == "flac" + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 48000 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 2 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 + + +async def test_sentence_trigger_overrides_conversation_agent( + hass: HomeAssistant, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that sentence triggers are checked before a non-default conversation agent.""" + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": [ + "test trigger sentence", + ], + }, + "action": { + "set_conversation_response": "test trigger response", + }, + } + }, + ) + + events: list[assist_pipeline.PipelineEvent] = [] + + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="test trigger sentence", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + intent_agent="test-agent", # not the default agent + ), + ) + + # Ensure prepare succeeds + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_get_agent_info", + return_value=conversation.AgentInfo(id="test-agent", name="Test Agent"), + ): + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse" + ) as mock_async_converse: + await pipeline_input.execute() + + # Sentence trigger should have been handled + mock_async_converse.assert_not_called() + + # Verify sentence trigger response + intent_end_event = next( + ( + e + for e in events + if e.type == assist_pipeline.PipelineEventType.INTENT_END + ), + None, + ) + assert (intent_end_event is not None) and intent_end_event.data + assert ( + intent_end_event.data["intent_output"]["response"]["speech"]["plain"][ + "speech" + ] + == "test trigger response" + ) + + +async def test_prefer_local_intents( + hass: HomeAssistant, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that the default agent is checked first when local intents are preferred.""" + events: list[assist_pipeline.PipelineEvent] = [] + + # Reuse custom sentences in test config + class OrderBeerIntentHandler(intent.IntentHandler): + intent_type = "OrderBeer" + + async def async_handle( + self, intent_obj: intent.Intent + ) -> intent.IntentResponse: + response = intent_obj.create_response() + response.async_set_speech("Order confirmed") + return response + + handler = OrderBeerIntentHandler() + intent.async_register(hass, handler) + + # Fake a test agent and prefer local intents + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + await assist_pipeline.pipeline.async_update_pipeline( + hass, pipeline, conversation_engine="test-agent", prefer_local_intents=True + ) + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="I'd like to order a stout please", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + + # Ensure prepare succeeds + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_get_agent_info", + return_value=conversation.AgentInfo(id="test-agent", name="Test Agent"), + ): + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse" + ) as mock_async_converse: + await pipeline_input.execute() + + # Test agent should not have been called + mock_async_converse.assert_not_called() + + # Verify local intent response + intent_end_event = next( + ( + e + for e in events + if e.type == assist_pipeline.PipelineEventType.INTENT_END + ), + None, + ) + assert (intent_end_event is not None) and intent_end_event.data + assert ( + intent_end_event.data["intent_output"]["response"]["speech"]["plain"][ + "speech" + ] + == "Order confirmed" + ) + + +async def test_pipeline_language_used_instead_of_conversation_language( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test that the pipeline language is used when the conversation language is '*' (all languages).""" + client = await hass_ws_client(hass) + + events: list[assist_pipeline.PipelineEvent] = [] + + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline/create", + "conversation_engine": "homeassistant", + "conversation_language": MATCH_ALL, + "language": "en", + "name": "test_name", + "stt_engine": "test", + "stt_language": "en-US", + "tts_engine": "test", + "tts_language": "en-US", + "tts_voice": "Arnold Schwarzenegger", + "wake_word_entity": None, + "wake_word_id": None, + } + ) + msg = await client.receive_json() + assert msg["success"] + pipeline_id = msg["result"]["id"] + pipeline = assist_pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="test input", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse", + return_value=conversation.ConversationResult( + intent.IntentResponse(pipeline.language) + ), + ) as mock_async_converse: + await pipeline_input.execute() + + # Check intent start event + assert process_events(events) == snapshot + intent_start: assist_pipeline.PipelineEvent | None = None + for event in events: + if event.type == assist_pipeline.PipelineEventType.INTENT_START: + intent_start = event + break + + assert intent_start is not None + + # Pipeline language (en) should be used instead of '*' + assert intent_start.data.get("language") == pipeline.language + + # Check input to async_converse + mock_async_converse.assert_called_once() + assert ( + mock_async_converse.call_args_list[0].kwargs.get("language") + == pipeline.language + ) + + +async def test_pipeline_from_audio_stream_with_cloud_auth_fail( + hass: HomeAssistant, + mock_stt_provider_entity: MockSTTProviderEntity, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test creating a pipeline from an audio stream but the cloud authentication fails.""" + + events: list[assist_pipeline.PipelineEvent] = [] + + async def audio_data(): + yield b"audio" + + with patch.object( + mock_stt_provider_entity, + "async_process_audio_stream", + side_effect=hass_nabucasa.auth.Unauthenticated, + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) + + assert process_events(events) == snapshot + assert len(events) == 4 # run start, stt start, error, run end + assert events[2].type == assist_pipeline.PipelineEventType.ERROR + assert events[2].data["code"] == "cloud-auth-failed" diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index ef5d5edff9e..d52e2a762ee 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MANY_LANGUAGES -from .conftest import MockSttProviderEntity, MockTTSProvider +from .conftest import MockSTTProviderEntity, MockTTSProvider from tests.common import flush_store @@ -398,7 +398,7 @@ async def test_default_pipeline_no_stt_tts( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline( hass: HomeAssistant, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, mock_tts_provider: MockTTSProvider, ha_language: str, ha_country: str | None, @@ -441,7 +441,7 @@ async def test_default_pipeline( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline_unsupported_stt_language( - hass: HomeAssistant, mock_stt_provider_entity: MockSttProviderEntity + hass: HomeAssistant, mock_stt_provider_entity: MockSTTProviderEntity ) -> None: """Test async_get_pipeline.""" with patch.object(mock_stt_provider_entity, "_supported_languages", ["smurfish"]): @@ -574,6 +574,7 @@ async def test_update_pipeline( "tts_voice": "test_voice", "wake_word_entity": "wake_work.test_1", "wake_word_id": "wake_word_id_1", + "prefer_local_intents": False, } await async_update_pipeline( @@ -617,6 +618,7 @@ async def test_update_pipeline( "tts_voice": "test_voice", "wake_word_entity": "wake_work.test_1", "wake_word_id": "wake_word_id_1", + "prefer_local_intents": False, } diff --git a/tests/components/assist_pipeline/test_repair_flows.py b/tests/components/assist_pipeline/test_repair_flows.py new file mode 100644 index 00000000000..4c8a242b20c --- /dev/null +++ b/tests/components/assist_pipeline/test_repair_flows.py @@ -0,0 +1,17 @@ +"""Test repair flows.""" + +import pytest + +from homeassistant.components.assist_pipeline.repair_flows import ( + AssistInProgressDeprecatedRepairFlow, +) + + +@pytest.mark.parametrize( + "data", [None, {}, {"entity_id": "blah", "entity_uuid": "12345"}] +) +def test_assist_in_progress_deprecated_flow_requires_data(data: dict | None) -> None: + """Test AssistInProgressDeprecatedRepairFlow requires data.""" + + with pytest.raises(ValueError): + AssistInProgressDeprecatedRepairFlow(data) diff --git a/tests/components/assist_pipeline/test_select.py b/tests/components/assist_pipeline/test_select.py index 9fb02e228d8..5ce3b1020d0 100644 --- a/tests/components/assist_pipeline/test_select.py +++ b/tests/components/assist_pipeline/test_select.py @@ -184,7 +184,7 @@ async def test_select_entity_changing_vad_sensitivity( hass: HomeAssistant, init_select: MockConfigEntry, ) -> None: - """Test entity tracking pipeline changes.""" + """Test entity tracking vad sensitivity changes.""" config_entry = init_select # nicer naming config_entry.mock_state(hass, ConfigEntryState.LOADED) @@ -192,7 +192,7 @@ async def test_select_entity_changing_vad_sensitivity( assert state is not None assert state.state == VadSensitivity.DEFAULT.value - # Change select to new pipeline + # Change select to new sensitivity await hass.services.async_call( "select", "select_option", diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index fda26d2fb94..bd07601cd5d 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -16,7 +16,7 @@ def test_silence() -> None: segmenter = VoiceCommandSegmenter() # True return value indicates voice command has not finished - assert segmenter.process(_ONE_SECOND * 3, False) + assert segmenter.process(_ONE_SECOND * 3, 0.0) assert not segmenter.in_command @@ -26,15 +26,15 @@ def test_speech() -> None: segmenter = VoiceCommandSegmenter() # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # silence # False return value indicates voice command is finished - assert not segmenter.process(_ONE_SECOND, False) + assert not segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command @@ -112,19 +112,19 @@ def test_silence_seconds() -> None: segmenter = VoiceCommandSegmenter(silence_seconds=1.0) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.in_command @@ -134,27 +134,27 @@ def test_silence_reset() -> None: segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # speech should reset silence detection - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.in_command @@ -166,23 +166,23 @@ def test_speech_reset() -> None: ) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.in_command # silence should reset speech detection - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.in_command # exactly enough speech now - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.in_command @@ -193,18 +193,18 @@ def test_timeout() -> None: # not enough to time out assert not segmenter.timed_out - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.timed_out # enough to time out - assert not segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.timed_out # flag resets with more audio - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.timed_out - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.timed_out @@ -215,14 +215,38 @@ def test_command_seconds() -> None: command_seconds=3, speech_seconds=1, silence_seconds=1, reset_seconds=1 ) - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) # Silence counts towards total command length - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) # Enough to finish command now - assert segmenter.process(_ONE_SECOND, True) - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND, 1.0) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) # Silence to finish - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) + + +def test_speech_thresholds() -> None: + """Test before/in command speech thresholds.""" + + segmenter = VoiceCommandSegmenter( + before_command_speech_threshold=0.2, + in_command_speech_threshold=0.5, + command_seconds=2, + speech_seconds=1, + silence_seconds=1, + ) + + # Not high enough probability to trigger command + assert segmenter.process(_ONE_SECOND, 0.1) + assert not segmenter.in_command + + # Triggers command + assert segmenter.process(_ONE_SECOND, 0.3) + assert segmenter.in_command + + # Now that same probability is considered silence. + # Finishes command. + assert not segmenter.process(_ONE_SECOND, 0.3) diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index f1f68d4a423..c1caf6f86a4 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -119,85 +119,88 @@ async def test_audio_pipeline( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "stt", - "end_stage": "tts", - "input": { - "sample_rate": 44100, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "tts", + "input": { + "sample_rate": 44100, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"] + # result + msg = await client.receive_json() + assert msg["success"] - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + pipeline_data: PipelineData = hass.data[DOMAIN] + pipeline_id = list(pipeline_data.pipeline_debug)[0] + pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_audio_pipeline_with_wake_word_timeout( @@ -210,49 +213,52 @@ async def test_audio_pipeline_with_wake_word_timeout( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "wake_word", - "end_stage": "tts", - "input": { - "sample_rate": SAMPLE_RATE, - "timeout": 1, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "wake_word", + "end_stage": "tts", + "input": { + "sample_rate": SAMPLE_RATE, + "timeout": 1, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"], msg + # result + msg = await client.receive_json() + assert msg["success"], msg - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # wake_word - msg = await client.receive_json() - assert msg["event"]["type"] == "wake_word-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # wake_word + msg = await client.receive_json() + assert msg["event"]["type"] == "wake_word-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) + # 2 seconds of silence + await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) - # Time out error - msg = await client.receive_json() - assert msg["event"]["type"] == "error" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # Time out error + msg = await client.receive_json() + assert msg["event"]["type"] == "error" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) async def test_audio_pipeline_with_wake_word_no_timeout( @@ -265,98 +271,101 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "wake_word", - "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "wake_word", + "end_stage": "tts", + "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, + } + ) - # result - msg = await client.receive_json() - assert msg["success"], msg - - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) - - # wake_word - msg = await client.receive_json() - assert msg["event"]["type"] == "wake_word-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) - - # "audio" - await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - - async with asyncio.timeout(1): + # result msg = await client.receive_json() - assert msg["event"]["type"] == "wake_word-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + assert msg["success"], msg - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # wake_word + msg = await client.receive_json() + assert msg["event"]["type"] == "wake_word-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # "audio" + await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + async with asyncio.timeout(1): + msg = await client.receive_json() + assert msg["event"]["type"] == "wake_word-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + pipeline_data: PipelineData = hass.data[DOMAIN] + pipeline_id = list(pipeline_data.pipeline_debug)[0] + pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_audio_pipeline_no_wake_word_engine( @@ -743,7 +752,7 @@ async def test_stt_stream_failed( client = await hass_ws_client(hass) with patch( - "tests.components.assist_pipeline.conftest.MockSttProviderEntity.async_process_audio_stream", + "tests.components.assist_pipeline.conftest.MockSTTProviderEntity.async_process_audio_stream", side_effect=RuntimeError, ): await client.send_json_auto_id( @@ -974,6 +983,7 @@ async def test_add_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": True, } ) msg = await client.receive_json() @@ -991,6 +1001,7 @@ async def test_add_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": True, } assert len(pipeline_store.data) == 2 @@ -1008,6 +1019,7 @@ async def test_add_pipeline( tts_voice="Arnold Schwarzenegger", wake_word_entity="wakeword_entity_1", wake_word_id="wakeword_id_1", + prefer_local_intents=True, ) await client.send_json_auto_id( @@ -1195,6 +1207,7 @@ async def test_get_pipeline( "tts_voice": "james_earl_jones", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } # Get conversation agent as pipeline @@ -1220,6 +1233,7 @@ async def test_get_pipeline( "tts_voice": "james_earl_jones", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } await client.send_json_auto_id( @@ -1249,6 +1263,7 @@ async def test_get_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": False, } ) msg = await client.receive_json() @@ -1277,6 +1292,7 @@ async def test_get_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": False, } @@ -1304,6 +1320,7 @@ async def test_list_pipelines( "tts_voice": "james_earl_jones", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } ], "preferred_pipeline": ANY, @@ -1395,6 +1412,7 @@ async def test_update_pipeline( "tts_voice": "new_tts_voice", "wake_word_entity": "new_wakeword_entity", "wake_word_id": "new_wakeword_id", + "prefer_local_intents": False, } assert len(pipeline_store.data) == 2 @@ -1446,6 +1464,7 @@ async def test_update_pipeline( "tts_voice": None, "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } pipeline = pipeline_store.data[pipeline_id] @@ -1530,99 +1549,102 @@ async def test_audio_pipeline_debug( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "stt", - "end_stage": "tts", - "input": { - "sample_rate": 44100, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "tts", + "input": { + "sample_rate": 44100, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"] + # result + msg = await client.receive_json() + assert msg["success"] - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # Get the id of the pipeline - await client.send_json_auto_id({"type": "assist_pipeline/pipeline/list"}) - msg = await client.receive_json() - assert msg["success"] - assert len(msg["result"]["pipelines"]) == 1 + # Get the id of the pipeline + await client.send_json_auto_id({"type": "assist_pipeline/pipeline/list"}) + msg = await client.receive_json() + assert msg["success"] + assert len(msg["result"]["pipelines"]) == 1 - pipeline_id = msg["result"]["pipelines"][0]["id"] + pipeline_id = msg["result"]["pipelines"][0]["id"] - # Get the id for the run - await client.send_json_auto_id( - {"type": "assist_pipeline/pipeline_debug/list", "pipeline_id": pipeline_id} - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"pipeline_runs": [ANY]} + # Get the id for the run + await client.send_json_auto_id( + {"type": "assist_pipeline/pipeline_debug/list", "pipeline_id": pipeline_id} + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"pipeline_runs": [ANY]} - pipeline_run_id = msg["result"]["pipeline_runs"][0]["pipeline_run_id"] + pipeline_run_id = msg["result"]["pipeline_runs"][0]["pipeline_run_id"] - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_pipeline_debug_list_runs_wrong_pipeline( @@ -1777,94 +1799,97 @@ async def test_audio_pipeline_with_enhancements( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "stt", - "end_stage": "tts", - "input": { - "sample_rate": SAMPLE_RATE, - # Enhancements - "noise_suppression_level": 2, - "auto_gain_dbfs": 15, - "volume_multiplier": 2.0, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "tts", + "input": { + "sample_rate": SAMPLE_RATE, + # Enhancements + "noise_suppression_level": 2, + "auto_gain_dbfs": 15, + "volume_multiplier": 2.0, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"] + # result + msg = await client.receive_json() + assert msg["success"] - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # One second of silence. - # This will pass through the audio enhancement pipeline, but we don't test - # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) + # One second of silence. + # This will pass through the audio enhancement pipeline, but we don't test + # the actual output. + await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + pipeline_data: PipelineData = hass.data[DOMAIN] + pipeline_id = list(pipeline_data.pipeline_debug)[0] + pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_wake_word_cooldown_same_id( diff --git a/tests/components/assist_satellite/__init__.py b/tests/components/assist_satellite/__init__.py new file mode 100644 index 00000000000..7e06ea3a4b9 --- /dev/null +++ b/tests/components/assist_satellite/__init__.py @@ -0,0 +1,3 @@ +"""Tests for Assist Satellite.""" + +ENTITY_ID = "assist_satellite.test_entity" diff --git a/tests/components/assist_satellite/conftest.py b/tests/components/assist_satellite/conftest.py new file mode 100644 index 00000000000..9e9bfd959e6 --- /dev/null +++ b/tests/components/assist_satellite/conftest.py @@ -0,0 +1,135 @@ +"""Test helpers for Assist Satellite.""" + +import pathlib +from unittest.mock import Mock + +import pytest + +from homeassistant.components.assist_pipeline import PipelineEvent +from homeassistant.components.assist_satellite import ( + DOMAIN as AS_DOMAIN, + AssistSatelliteAnnouncement, + AssistSatelliteConfiguration, + AssistSatelliteEntity, + AssistSatelliteEntityFeature, + AssistSatelliteWakeWord, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.setup import async_setup_component + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) + +TEST_DOMAIN = "test" + + +@pytest.fixture(autouse=True) +def mock_tts(mock_tts_cache_dir: pathlib.Path) -> None: + """Mock TTS cache dir fixture.""" + + +class MockAssistSatellite(AssistSatelliteEntity): + """Mock Assist Satellite Entity.""" + + _attr_name = "Test Entity" + _attr_supported_features = AssistSatelliteEntityFeature.ANNOUNCE + + def __init__(self) -> None: + """Initialize the mock entity.""" + self.events = [] + self.announcements: list[AssistSatelliteAnnouncement] = [] + self.config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord( + id="1234", wake_word="okay nabu", trained_languages=["en"] + ), + AssistSatelliteWakeWord( + id="5678", + wake_word="hey jarvis", + trained_languages=["en"], + ), + ], + active_wake_words=["1234"], + max_active_wake_words=1, + ) + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Handle pipeline events.""" + self.events.append(event) + + async def async_announce(self, announcement: AssistSatelliteAnnouncement) -> None: + """Announce media on a device.""" + self.announcements.append(announcement) + + @callback + def async_get_configuration(self) -> AssistSatelliteConfiguration: + """Get the current satellite configuration.""" + return self.config + + async def async_set_configuration( + self, config: AssistSatelliteConfiguration + ) -> None: + """Set the current satellite configuration.""" + self.config = config + + +@pytest.fixture +def entity() -> MockAssistSatellite: + """Mock Assist Satellite Entity.""" + return MockAssistSatellite() + + +@pytest.fixture +def config_entry(hass: HomeAssistant) -> ConfigEntry: + """Mock config entry.""" + entry = MockConfigEntry(domain=TEST_DOMAIN) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +async def init_components( + hass: HomeAssistant, + config_entry: ConfigEntry, + entity: MockAssistSatellite, +) -> None: + """Initialize components.""" + assert await async_setup_component(hass, "homeassistant", {}) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [AS_DOMAIN]) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload(config_entry, AS_DOMAIN) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + setup_test_component_platform(hass, AS_DOMAIN, [entity], from_config_entry=True) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow", Mock()) + + with mock_config_flow(TEST_DOMAIN, ConfigFlow): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py new file mode 100644 index 00000000000..884ba36782c --- /dev/null +++ b/tests/components/assist_satellite/test_entity.py @@ -0,0 +1,466 @@ +"""Test the Assist Satellite entity.""" + +import asyncio +from unittest.mock import patch + +import pytest + +from homeassistant.components import stt +from homeassistant.components.assist_pipeline import ( + OPTION_PREFERRED, + AudioSettings, + Pipeline, + PipelineEvent, + PipelineEventType, + PipelineStage, + async_get_pipeline, + async_update_pipeline, + vad, +) +from homeassistant.components.assist_satellite import ( + AssistSatelliteAnnouncement, + SatelliteBusyError, +) +from homeassistant.components.assist_satellite.entity import AssistSatelliteState +from homeassistant.components.media_source import PlayMedia +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import Context, HomeAssistant + +from . import ENTITY_ID +from .conftest import MockAssistSatellite + + +async def test_entity_state( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test entity state represent events.""" + + state = hass.states.get(ENTITY_ID) + assert state is not None + assert state.state == AssistSatelliteState.IDLE + + context = Context() + audio_stream = object() + + entity.async_set_context(context) + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream" + ) as mock_start_pipeline: + await entity.async_accept_pipeline_from_satellite(audio_stream) + + assert mock_start_pipeline.called + kwargs = mock_start_pipeline.call_args[1] + assert kwargs["context"] is context + assert kwargs["event_callback"] == entity._internal_on_pipeline_event + assert kwargs["stt_metadata"] == stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ) + assert kwargs["stt_stream"] is audio_stream + assert kwargs["pipeline_id"] is None + assert kwargs["device_id"] is None + assert kwargs["tts_audio_output"] is None + assert kwargs["wake_word_phrase"] is None + assert kwargs["audio_settings"] == AudioSettings( + silence_seconds=vad.VadSensitivity.to_seconds(vad.VadSensitivity.DEFAULT) + ) + assert kwargs["start_stage"] == PipelineStage.STT + assert kwargs["end_stage"] == PipelineStage.TTS + + for event_type, event_data, expected_state in ( + (PipelineEventType.RUN_START, {}, AssistSatelliteState.IDLE), + (PipelineEventType.RUN_END, {}, AssistSatelliteState.IDLE), + ( + PipelineEventType.WAKE_WORD_START, + {}, + AssistSatelliteState.IDLE, + ), + (PipelineEventType.WAKE_WORD_END, {}, AssistSatelliteState.IDLE), + (PipelineEventType.STT_START, {}, AssistSatelliteState.LISTENING), + (PipelineEventType.STT_VAD_START, {}, AssistSatelliteState.LISTENING), + (PipelineEventType.STT_VAD_END, {}, AssistSatelliteState.LISTENING), + (PipelineEventType.STT_END, {}, AssistSatelliteState.LISTENING), + (PipelineEventType.INTENT_START, {}, AssistSatelliteState.PROCESSING), + ( + PipelineEventType.INTENT_END, + { + "intent_output": { + "conversation_id": "mock-conversation-id", + } + }, + AssistSatelliteState.PROCESSING, + ), + (PipelineEventType.TTS_START, {}, AssistSatelliteState.RESPONDING), + (PipelineEventType.TTS_END, {}, AssistSatelliteState.RESPONDING), + (PipelineEventType.ERROR, {}, AssistSatelliteState.RESPONDING), + ): + kwargs["event_callback"](PipelineEvent(event_type, event_data)) + state = hass.states.get(ENTITY_ID) + assert state.state == expected_state, event_type + + entity.tts_response_finished() + state = hass.states.get(ENTITY_ID) + assert state.state == AssistSatelliteState.IDLE + + +async def test_new_pipeline_cancels_pipeline( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, +) -> None: + """Test that a new pipeline run cancels any running pipeline.""" + pipeline1_started = asyncio.Event() + pipeline1_finished = asyncio.Event() + pipeline1_cancelled = asyncio.Event() + pipeline2_finished = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, **kwargs): + if not pipeline1_started.is_set(): + # First pipeline run + pipeline1_started.set() + + # Wait for pipeline to be cancelled + try: + await pipeline1_finished.wait() + except asyncio.CancelledError: + pipeline1_cancelled.set() + raise + else: + # Second pipeline run + pipeline2_finished.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + ): + hass.async_create_task( + entity.async_accept_pipeline_from_satellite( + object(), # type: ignore[arg-type] + ) + ) + + async with asyncio.timeout(1): + await pipeline1_started.wait() + + # Start a second pipeline + await entity.async_accept_pipeline_from_satellite( + object(), # type: ignore[arg-type] + ) + await pipeline1_cancelled.wait() + await pipeline2_finished.wait() + + +@pytest.mark.parametrize( + ("service_data", "expected_params"), + [ + ( + {"message": "Hello"}, + AssistSatelliteAnnouncement( + "Hello", "https://www.home-assistant.io/resolved.mp3", "tts" + ), + ), + ( + { + "message": "Hello", + "media_id": "media-source://bla", + }, + AssistSatelliteAnnouncement( + "Hello", "https://www.home-assistant.io/resolved.mp3", "media_id" + ), + ), + ( + {"media_id": "http://example.com/bla.mp3"}, + AssistSatelliteAnnouncement("", "http://example.com/bla.mp3", "url"), + ), + ], +) +async def test_announce( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + service_data: dict, + expected_params: tuple[str, str], +) -> None: + """Test announcing on a device.""" + await async_update_pipeline( + hass, + async_get_pipeline(hass), + tts_engine="tts.mock_entity", + tts_language="en", + tts_voice="test-voice", + ) + + entity._attr_tts_options = {"test-option": "test-value"} + + original_announce = entity.async_announce + announce_started = asyncio.Event() + + async def async_announce(announcement): + # Verify state change + assert entity.state == AssistSatelliteState.RESPONDING + await original_announce(announcement) + announce_started.set() + + def tts_generate_media_source_id( + hass: HomeAssistant, + message: str, + engine: str | None = None, + language: str | None = None, + options: dict | None = None, + cache: bool | None = None, + ): + # Check that TTS options are passed here + assert options == {"test-option": "test-value", "voice": "test-voice"} + return "media-source://bla" + + with ( + patch( + "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + new=tts_generate_media_source_id, + ), + patch( + "homeassistant.components.media_source.async_resolve_media", + return_value=PlayMedia( + url="https://www.home-assistant.io/resolved.mp3", + mime_type="audio/mp3", + ), + ), + patch.object(entity, "async_announce", new=async_announce), + ): + await hass.services.async_call( + "assist_satellite", + "announce", + service_data, + target={"entity_id": "assist_satellite.test_entity"}, + blocking=True, + ) + assert entity.state == AssistSatelliteState.IDLE + + assert entity.announcements[0] == expected_params + + +async def test_announce_busy( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, +) -> None: + """Test that announcing while an announcement is in progress raises an error.""" + media_id = "https://www.home-assistant.io/resolved.mp3" + announce_started = asyncio.Event() + got_error = asyncio.Event() + + async def async_announce(announcement): + announce_started.set() + + # Block so we can do another announcement + await got_error.wait() + + with patch.object(entity, "async_announce", new=async_announce): + announce_task = asyncio.create_task( + entity.async_internal_announce(media_id=media_id) + ) + async with asyncio.timeout(1): + await announce_started.wait() + + # Try to do a second announcement + with pytest.raises(SatelliteBusyError): + await entity.async_internal_announce(media_id=media_id) + + # Avoid lingering task + got_error.set() + await announce_task + + +async def test_announce_cancels_pipeline( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, +) -> None: + """Test that announcements cancel any running pipeline.""" + media_id = "https://www.home-assistant.io/resolved.mp3" + pipeline_started = asyncio.Event() + pipeline_finished = asyncio.Event() + pipeline_cancelled = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, **kwargs): + pipeline_started.set() + + # Wait for pipeline to be cancelled + try: + await pipeline_finished.wait() + except asyncio.CancelledError: + pipeline_cancelled.set() + raise + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch.object(entity, "async_announce") as mock_async_announce, + ): + hass.async_create_task( + entity.async_accept_pipeline_from_satellite( + object(), # type: ignore[arg-type] + ) + ) + + async with asyncio.timeout(1): + await pipeline_started.wait() + await entity.async_internal_announce(None, media_id) + await pipeline_cancelled.wait() + + mock_async_announce.assert_called_once() + + +async def test_context_refresh( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test that the context will be automatically refreshed.""" + audio_stream = object() + + # Remove context + entity._context = None + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream" + ): + await entity.async_accept_pipeline_from_satellite(audio_stream) + + # Context should have been refreshed + assert entity._context is not None + + +async def test_pipeline_entity( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test getting pipeline from an entity.""" + audio_stream = object() + pipeline = Pipeline( + conversation_engine="test", + conversation_language="en", + language="en", + name="test-pipeline", + stt_engine=None, + stt_language=None, + tts_engine=None, + tts_language=None, + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + pipeline_entity_id = "select.pipeline" + hass.states.async_set(pipeline_entity_id, pipeline.name) + entity._attr_pipeline_entity_id = pipeline_entity_id + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, pipeline_id: str, **kwargs): + assert pipeline_id == pipeline.id + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipelines", + return_value=[pipeline], + ), + ): + async with asyncio.timeout(1): + await entity.async_accept_pipeline_from_satellite(audio_stream) + await done.wait() + + +async def test_pipeline_entity_preferred( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test getting pipeline from an entity with a preferred state.""" + audio_stream = object() + + pipeline_entity_id = "select.pipeline" + hass.states.async_set(pipeline_entity_id, OPTION_PREFERRED) + entity._attr_pipeline_entity_id = pipeline_entity_id + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, pipeline_id: str, **kwargs): + # Preferred pipeline + assert pipeline_id is None + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + ): + async with asyncio.timeout(1): + await entity.async_accept_pipeline_from_satellite(audio_stream) + await done.wait() + + +async def test_vad_sensitivity_entity( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test getting vad sensitivity from an entity.""" + audio_stream = object() + + vad_sensitivity_entity_id = "select.vad_sensitivity" + hass.states.async_set(vad_sensitivity_entity_id, vad.VadSensitivity.AGGRESSIVE) + entity._attr_vad_sensitivity_entity_id = vad_sensitivity_entity_id + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream( + *args, audio_settings: AudioSettings, **kwargs + ): + # Verify vad sensitivity + assert audio_settings.silence_seconds == vad.VadSensitivity.to_seconds( + vad.VadSensitivity.AGGRESSIVE + ) + done.set() + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ): + async with asyncio.timeout(1): + await entity.async_accept_pipeline_from_satellite(audio_stream) + await done.wait() + + +async def test_pipeline_entity_not_found( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test that setting the pipeline entity id to a non-existent entity raises an error.""" + audio_stream = object() + + # Set to an entity that doesn't exist + entity._attr_pipeline_entity_id = "select.pipeline" + + with pytest.raises(RuntimeError): + await entity.async_accept_pipeline_from_satellite(audio_stream) + + +async def test_vad_sensitivity_entity_not_found( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test that setting the vad sensitivity entity id to a non-existent entity raises an error.""" + audio_stream = object() + + # Set to an entity that doesn't exist + entity._attr_vad_sensitivity_entity_id = "select.vad_sensitivity" + + with pytest.raises(RuntimeError): + await entity.async_accept_pipeline_from_satellite(audio_stream) diff --git a/tests/components/assist_satellite/test_websocket_api.py b/tests/components/assist_satellite/test_websocket_api.py new file mode 100644 index 00000000000..257961a5b32 --- /dev/null +++ b/tests/components/assist_satellite/test_websocket_api.py @@ -0,0 +1,518 @@ +"""Test WebSocket API.""" + +import asyncio +from http import HTTPStatus +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.assist_pipeline import PipelineStage +from homeassistant.components.assist_satellite.websocket_api import ( + CONNECTION_TEST_TIMEOUT, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from . import ENTITY_ID +from .conftest import MockAssistSatellite + +from tests.common import MockUser +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +async def test_intercept_wake_word( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + await entity.async_accept_pipeline_from_satellite( + object(), # type: ignore[arg-type] + start_stage=PipelineStage.STT, + wake_word_phrase="ok, nabu", + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert msg["id"] == subscription_id + assert msg["type"] == "event" + assert msg["event"] == {"wake_word_phrase": "ok, nabu"} + + +async def test_intercept_wake_word_requires_on_device_wake_word( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word fails if detection happens in HA.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] is None + + await entity.async_accept_pipeline_from_satellite( + object(), # type: ignore[arg-type] + # Emulate wake word processing in Home Assistant + start_stage=PipelineStage.WAKE_WORD, + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "home_assistant_error", + "message": "Only on-device wake words currently supported", + } + + +async def test_intercept_wake_word_requires_wake_word_phrase( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word fails if detection happens in HA.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] is None + + await entity.async_accept_pipeline_from_satellite( + object(), # type: ignore[arg-type] + start_stage=PipelineStage.STT, + # We are not passing wake word phrase + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "home_assistant_error", + "message": "No wake word phrase provided", + } + + +async def test_intercept_wake_word_require_admin( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, + hass_admin_user: MockUser, +) -> None: + """Test intercepting a wake word requires admin access.""" + # Remove admin permission and verify we're not allowed + hass_admin_user.groups = [] + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "unauthorized", + "message": "Unauthorized", + } + + +async def test_intercept_wake_word_invalid_satellite( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word requires admin access.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": "assist_satellite.invalid", + } + ) + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "not_found", + "message": "Entity not found", + } + + +async def test_intercept_wake_word_twice( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word twice cancels the previous request.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] is None + + task = hass.async_create_task(ws_client.receive_json()) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + # Should get an error from previous subscription + async with asyncio.timeout(1): + msg = await task + + assert not msg["success"] + assert msg["error"] == { + "code": "home_assistant_error", + "message": "Wake word interception already in progress", + } + + # Response to second subscription + async with asyncio.timeout(1): + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] is None + + +async def test_intercept_wake_word_unsubscribe( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test that closing the websocket connection stops interception.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + # Wait for interception to start + for _ in range(3): + await asyncio.sleep(0) + + async def receive_json(): + with pytest.raises(TypeError): + # Raises TypeError when connection is closed + await ws_client.receive_json() + + task = hass.async_create_task(receive_json()) + + # Close connection + await ws_client.close() + await task + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + ) as mock_pipeline_from_audio_stream, + ): + # Start a pipeline with a wake word + await entity.async_accept_pipeline_from_satellite( + object(), + wake_word_phrase="ok, nabu", # type: ignore[arg-type] + ) + + # Wake word should not be intercepted + mock_pipeline_from_audio_stream.assert_called_once() + + +async def test_get_configuration( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test getting satellite configuration.""" + ws_client = await hass_ws_client(hass) + + with ( + patch.object(entity, "_attr_pipeline_entity_id", "select.test_pipeline"), + patch.object(entity, "_attr_vad_sensitivity_entity_id", "select.test_vad"), + ): + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/get_configuration", + "entity_id": ENTITY_ID, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] == { + "active_wake_words": ["1234"], + "available_wake_words": [ + {"id": "1234", "trained_languages": ["en"], "wake_word": "okay nabu"}, + {"id": "5678", "trained_languages": ["en"], "wake_word": "hey jarvis"}, + ], + "max_active_wake_words": 1, + "pipeline_entity_id": "select.test_pipeline", + "vad_entity_id": "select.test_vad", + } + + +async def test_set_wake_words( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test setting active wake words.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/set_wake_words", + "entity_id": ENTITY_ID, + "wake_word_ids": ["5678"], + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + + # Verify change + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/get_configuration", + "entity_id": ENTITY_ID, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"].get("active_wake_words") == ["5678"] + + +async def test_set_wake_words_exceed_maximum( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test setting too many active wake words.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/set_wake_words", + "entity_id": ENTITY_ID, + "wake_word_ids": ["1234", "5678"], # max of 1 + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"] == { + "code": "not_supported", + "message": "Maximum number of active wake words is 1", + } + + +async def test_set_wake_words_bad_id( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test setting active wake words with a bad id.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/set_wake_words", + "entity_id": ENTITY_ID, + "wake_word_ids": ["abcd"], # not an available id + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"] == { + "code": "not_supported", + "message": "Wake word id is not supported: abcd", + } + + +async def test_connection_test( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, + hass_client: ClientSessionGenerator, +) -> None: + """Test connection test.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/test_connection", + "entity_id": ENTITY_ID, + } + ) + + for _ in range(3): + await asyncio.sleep(0) + + assert len(entity.announcements) == 1 + assert entity.announcements[0].message == "" + announcement_media_id = entity.announcements[0].media_id + hass_url = "http://10.10.10.10:8123" + assert announcement_media_id.startswith( + f"{hass_url}/api/assist_satellite/connection_test/" + ) + + # Fake satellite fetches the URL + client = await hass_client() + resp = await client.get(announcement_media_id[len(hass_url) :]) + assert resp.status == HTTPStatus.OK + + response = await ws_client.receive_json() + assert response["success"] + assert response["result"] == {"status": "success"} + + +async def test_connection_test_timeout( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, + hass_client: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection test timeout.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/test_connection", + "entity_id": ENTITY_ID, + } + ) + + for _ in range(3): + await asyncio.sleep(0) + + assert len(entity.announcements) == 1 + assert entity.announcements[0].message == "" + announcement_media_id = entity.announcements[0].media_id + hass_url = "http://10.10.10.10:8123" + assert announcement_media_id.startswith( + f"{hass_url}/api/assist_satellite/connection_test/" + ) + + freezer.tick(CONNECTION_TEST_TIMEOUT + 1) + + # Timeout + response = await ws_client.receive_json() + assert response["success"] + assert response["result"] == {"status": "timeout"} + + +async def test_connection_test_invalid_satellite( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test connection test with unknown entity id.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/test_connection", + "entity_id": "assist_satellite.invalid", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Entity not found", + } + + +async def test_connection_test_timeout_announcement_unsupported( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test connection test entity which does not support announce.""" + ws_client = await hass_ws_client(hass) + + # Disable announce support + entity.supported_features = 0 + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/test_connection", + "entity_id": ENTITY_ID, + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_supported", + "message": "Entity does not support announce", + } diff --git a/tests/components/asuswrt/conftest.py b/tests/components/asuswrt/conftest.py index 7710e26707c..f850a26b997 100644 --- a/tests/components/asuswrt/conftest.py +++ b/tests/components/asuswrt/conftest.py @@ -16,12 +16,30 @@ ASUSWRT_LEGACY_LIB = f"{ASUSWRT_BASE}.bridge.AsusWrtLegacy" MOCK_BYTES_TOTAL = 60000000000, 50000000000 MOCK_BYTES_TOTAL_HTTP = dict(enumerate(MOCK_BYTES_TOTAL)) +MOCK_CPU_USAGE = { + "cpu1_usage": 0.1, + "cpu2_usage": 0.2, + "cpu3_usage": 0.3, + "cpu4_usage": 0.4, + "cpu5_usage": 0.5, + "cpu6_usage": 0.6, + "cpu7_usage": 0.7, + "cpu8_usage": 0.8, + "cpu_total_usage": 0.9, +} MOCK_CURRENT_TRANSFER_RATES = 20000000, 10000000 MOCK_CURRENT_TRANSFER_RATES_HTTP = dict(enumerate(MOCK_CURRENT_TRANSFER_RATES)) MOCK_LOAD_AVG_HTTP = {"load_avg_1": 1.1, "load_avg_5": 1.2, "load_avg_15": 1.3} MOCK_LOAD_AVG = list(MOCK_LOAD_AVG_HTTP.values()) +MOCK_MEMORY_USAGE = { + "mem_usage_perc": 52.4, + "mem_total": 1048576, + "mem_free": 393216, + "mem_used": 655360, +} MOCK_TEMPERATURES = {"2.4GHz": 40.2, "5.0GHz": 0, "CPU": 71.2} MOCK_TEMPERATURES_HTTP = {**MOCK_TEMPERATURES, "5.0GHz_2": 40.3, "6.0GHz": 40.4} +MOCK_UPTIME = {"last_boot": "2024-08-02T00:47:00+00:00", "uptime": 1625927} @pytest.fixture(name="patch_setup_entry") @@ -121,6 +139,11 @@ def mock_controller_connect_http(mock_devices_http): service_mock.return_value.async_get_temperatures.return_value = { k: v for k, v in MOCK_TEMPERATURES_HTTP.items() if k != "5.0GHz" } + service_mock.return_value.async_get_cpu_usage.return_value = MOCK_CPU_USAGE + service_mock.return_value.async_get_memory_usage.return_value = ( + MOCK_MEMORY_USAGE + ) + service_mock.return_value.async_get_uptime.return_value = MOCK_UPTIME yield service_mock @@ -133,13 +156,22 @@ def mock_controller_connect_http_sens_fail(connect_http): connect_http.return_value.async_get_traffic_rates.side_effect = AsusWrtError connect_http.return_value.async_get_loadavg.side_effect = AsusWrtError connect_http.return_value.async_get_temperatures.side_effect = AsusWrtError + connect_http.return_value.async_get_cpu_usage.side_effect = AsusWrtError + connect_http.return_value.async_get_memory_usage.side_effect = AsusWrtError + connect_http.return_value.async_get_uptime.side_effect = AsusWrtError @pytest.fixture(name="connect_http_sens_detect") def mock_controller_connect_http_sens_detect(): """Mock a successful sensor detection using http library.""" - with patch( - f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", - return_value=[*MOCK_TEMPERATURES_HTTP], - ) as mock_sens_detect: - yield mock_sens_detect + with ( + patch( + f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", + return_value=[*MOCK_TEMPERATURES_HTTP], + ) as mock_sens_temp_detect, + patch( + f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_cpu_sensors", + return_value=[*MOCK_CPU_USAGE], + ) as mock_sens_cpu_detect, + ): + yield mock_sens_temp_detect, mock_sens_cpu_detect diff --git a/tests/components/asuswrt/test_diagnostics.py b/tests/components/asuswrt/test_diagnostics.py index 207f3ba25f0..1acaf686567 100644 --- a/tests/components/asuswrt/test_diagnostics.py +++ b/tests/components/asuswrt/test_diagnostics.py @@ -38,4 +38,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert result["entry"] == entry_dict + assert result["entry"] == entry_dict | {"discovery_keys": {}} diff --git a/tests/components/asuswrt/test_sensor.py b/tests/components/asuswrt/test_sensor.py index 3de830f3f34..0036c40a6f2 100644 --- a/tests/components/asuswrt/test_sensor.py +++ b/tests/components/asuswrt/test_sensor.py @@ -2,6 +2,7 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory from pyasuswrt.exceptions import AsusWrtError, AsusWrtNotAvailableInfoError import pytest @@ -10,10 +11,13 @@ from homeassistant.components.asuswrt.const import ( CONF_INTERFACE, DOMAIN, SENSORS_BYTES, + SENSORS_CPU, SENSORS_LOAD_AVG, + SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, SENSORS_TEMPERATURES_LEGACY, + SENSORS_UPTIME, ) from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.config_entries import ConfigEntryState @@ -26,7 +30,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify -from homeassistant.util.dt import utcnow from .common import ( CONFIG_DATA_HTTP, @@ -42,7 +45,14 @@ from tests.common import MockConfigEntry, async_fire_time_changed SENSORS_DEFAULT = [*SENSORS_BYTES, *SENSORS_RATES] SENSORS_ALL_LEGACY = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES_LEGACY] -SENSORS_ALL_HTTP = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES] +SENSORS_ALL_HTTP = [ + *SENSORS_DEFAULT, + *SENSORS_CPU, + *SENSORS_LOAD_AVG, + *SENSORS_MEMORY, + *SENSORS_TEMPERATURES, + *SENSORS_UPTIME, +] @pytest.fixture(name="create_device_registry_devices") @@ -95,6 +105,7 @@ def _setup_entry(hass: HomeAssistant, config, sensors, unique_id=None): async def _test_sensors( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, mock_devices, config, entry_unique_id, @@ -125,7 +136,8 @@ async def _test_sensors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert hass.states.get(f"{device_tracker.DOMAIN}.test").state == STATE_HOME @@ -139,7 +151,8 @@ async def _test_sensors( # remove first tracked device mock_devices.pop(MOCK_MACS[0]) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # consider home option set, all devices still home but only 1 device connected @@ -160,7 +173,8 @@ async def _test_sensors( config_entry, options={CONF_CONSIDER_HOME: 0} ) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # consider home option set to 0, device "test" not home @@ -176,13 +190,16 @@ async def _test_sensors( ) async def test_sensors_legacy( hass: HomeAssistant, - connect_legacy, + freezer: FrozenDateTimeFactory, mock_devices_legacy, - create_device_registry_devices, entry_unique_id, + connect_legacy, + create_device_registry_devices, ) -> None: """Test creating AsusWRT default sensors and tracker with legacy protocol.""" - await _test_sensors(hass, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id) + await _test_sensors( + hass, freezer, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id + ) @pytest.mark.parametrize( @@ -191,16 +208,21 @@ async def test_sensors_legacy( ) async def test_sensors_http( hass: HomeAssistant, - connect_http, + freezer: FrozenDateTimeFactory, mock_devices_http, - create_device_registry_devices, entry_unique_id, + connect_http, + create_device_registry_devices, ) -> None: """Test creating AsusWRT default sensors and tracker with http protocol.""" - await _test_sensors(hass, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id) + await _test_sensors( + hass, freezer, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id + ) -async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: +async def _test_loadavg_sensors( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, config +) -> None: """Test creating an AsusWRT load average sensors.""" config_entry, sensor_prefix = _setup_entry(hass, config, SENSORS_LOAD_AVG) config_entry.add_to_hass(hass) @@ -208,7 +230,8 @@ async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # assert temperature sensor available @@ -217,18 +240,22 @@ async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: assert hass.states.get(f"{sensor_prefix}_sensor_load_avg15").state == "1.3" -async def test_loadavg_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: +async def test_loadavg_sensors_legacy( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy +) -> None: """Test creating an AsusWRT load average sensors.""" - await _test_loadavg_sensors(hass, CONFIG_DATA_TELNET) + await _test_loadavg_sensors(hass, freezer, CONFIG_DATA_TELNET) -async def test_loadavg_sensors_http(hass: HomeAssistant, connect_http) -> None: +async def test_loadavg_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: """Test creating an AsusWRT load average sensors.""" - await _test_loadavg_sensors(hass, CONFIG_DATA_HTTP) + await _test_loadavg_sensors(hass, freezer, CONFIG_DATA_HTTP) async def test_loadavg_sensors_unaivalable_http( - hass: HomeAssistant, connect_http + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http ) -> None: """Test load average sensors no available using http.""" config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_LOAD_AVG) @@ -241,7 +268,8 @@ async def test_loadavg_sensors_unaivalable_http( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # assert load average sensors not available @@ -271,7 +299,9 @@ async def test_temperature_sensors_http_fail( assert not hass.states.get(f"{sensor_prefix}_6_0ghz") -async def _test_temperature_sensors(hass: HomeAssistant, config, sensors) -> str: +async def _test_temperature_sensors( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, config, sensors +) -> str: """Test creating a AsusWRT temperature sensors.""" config_entry, sensor_prefix = _setup_entry(hass, config, sensors) config_entry.add_to_hass(hass) @@ -279,16 +309,19 @@ async def _test_temperature_sensors(hass: HomeAssistant, config, sensors) -> str # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() return sensor_prefix -async def test_temperature_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: +async def test_temperature_sensors_legacy( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy +) -> None: """Test creating a AsusWRT temperature sensors.""" sensor_prefix = await _test_temperature_sensors( - hass, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES_LEGACY + hass, freezer, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES_LEGACY ) # assert temperature sensor available assert hass.states.get(f"{sensor_prefix}_2_4ghz").state == "40.2" @@ -296,10 +329,12 @@ async def test_temperature_sensors_legacy(hass: HomeAssistant, connect_legacy) - assert not hass.states.get(f"{sensor_prefix}_5_0ghz") -async def test_temperature_sensors_http(hass: HomeAssistant, connect_http) -> None: +async def test_temperature_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: """Test creating a AsusWRT temperature sensors.""" sensor_prefix = await _test_temperature_sensors( - hass, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES + hass, freezer, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES ) # assert temperature sensor available assert hass.states.get(f"{sensor_prefix}_2_4ghz").state == "40.2" @@ -309,6 +344,97 @@ async def test_temperature_sensors_http(hass: HomeAssistant, connect_http) -> No assert not hass.states.get(f"{sensor_prefix}_5_0ghz") +async def test_cpu_sensors_http_fail( + hass: HomeAssistant, connect_http_sens_fail +) -> None: + """Test fail creating AsusWRT cpu sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_CPU) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # assert cpu availability exception is handled correctly + assert not hass.states.get(f"{sensor_prefix}_cpu1_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu2_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu3_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu4_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu5_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu6_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu7_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu8_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu_total_usage") + + +async def test_cpu_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: + """Test creating AsusWRT cpu sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_CPU) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # assert cpu sensors available + assert hass.states.get(f"{sensor_prefix}_cpu1_usage").state == "0.1" + assert hass.states.get(f"{sensor_prefix}_cpu2_usage").state == "0.2" + assert hass.states.get(f"{sensor_prefix}_cpu3_usage").state == "0.3" + assert hass.states.get(f"{sensor_prefix}_cpu4_usage").state == "0.4" + assert hass.states.get(f"{sensor_prefix}_cpu5_usage").state == "0.5" + assert hass.states.get(f"{sensor_prefix}_cpu6_usage").state == "0.6" + assert hass.states.get(f"{sensor_prefix}_cpu7_usage").state == "0.7" + assert hass.states.get(f"{sensor_prefix}_cpu8_usage").state == "0.8" + assert hass.states.get(f"{sensor_prefix}_cpu_total_usage").state == "0.9" + + +async def test_memory_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: + """Test creating AsusWRT memory sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_MEMORY) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # assert memory sensors available + assert hass.states.get(f"{sensor_prefix}_mem_usage_perc").state == "52.4" + assert hass.states.get(f"{sensor_prefix}_mem_free").state == "384.0" + assert hass.states.get(f"{sensor_prefix}_mem_used").state == "640.0" + + +async def test_uptime_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: + """Test creating AsusWRT uptime sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_UPTIME) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # assert uptime sensors available + assert ( + hass.states.get(f"{sensor_prefix}_sensor_last_boot").state + == "2024-08-02T00:47:00+00:00" + ) + assert hass.states.get(f"{sensor_prefix}_sensor_uptime").state == "1625927" + + @pytest.mark.parametrize( "side_effect", [OSError, None], @@ -359,7 +485,9 @@ async def test_connect_fail_http( assert config_entry.state is ConfigEntryState.SETUP_RETRY -async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> None: +async def _test_sensors_polling_fails( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, config, sensors +) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" config_entry, sensor_prefix = _setup_entry(hass, config, sensors) config_entry.add_to_hass(hass) @@ -367,7 +495,8 @@ async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> N # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() for sensor_name in sensors: @@ -380,22 +509,28 @@ async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> N async def test_sensors_polling_fails_legacy( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, connect_legacy_sens_fail, ) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" - await _test_sensors_polling_fails(hass, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY) + await _test_sensors_polling_fails( + hass, freezer, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY + ) async def test_sensors_polling_fails_http( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, connect_http_sens_fail, connect_http_sens_detect, ) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" - await _test_sensors_polling_fails(hass, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) + await _test_sensors_polling_fails(hass, freezer, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) -async def test_options_reload(hass: HomeAssistant, connect_legacy) -> None: +async def test_options_reload( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy +) -> None: """Test AsusWRT integration is reload changing an options that require this.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -408,7 +543,8 @@ async def test_options_reload(hass: HomeAssistant, connect_legacy) -> None: await hass.async_block_till_done() assert connect_legacy.return_value.connection.async_connect.call_count == 1 - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # change an option that requires integration reload @@ -451,7 +587,10 @@ async def test_unique_id_migration( async def test_decorator_errors( - hass: HomeAssistant, connect_legacy, mock_available_temps + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + connect_legacy, + mock_available_temps, ) -> None: """Test AsusWRT sensors are unavailable on decorator type check error.""" sensors = [*SENSORS_BYTES, *SENSORS_TEMPERATURES_LEGACY] @@ -465,7 +604,8 @@ async def test_decorator_errors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() for sensor_name in sensors: diff --git a/tests/components/atag/__init__.py b/tests/components/atag/__init__.py index adea1e07be7..a240cc47c7f 100644 --- a/tests/components/atag/__init__.py +++ b/tests/components/atag/__init__.py @@ -1,6 +1,8 @@ """Tests for the Atag integration.""" -from homeassistant.components.atag import DOMAIN, AtagException +from pyatag import AtagException + +from homeassistant.components.atag import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant diff --git a/tests/components/atag/test_climate.py b/tests/components/atag/test_climate.py index bc78ee58216..b4f2a0f3f0f 100644 --- a/tests/components/atag/test_climate.py +++ b/tests/components/atag/test_climate.py @@ -2,7 +2,8 @@ from unittest.mock import PropertyMock, patch -from homeassistant.components.atag.climate import DOMAIN, PRESET_MAP +from homeassistant.components.atag import DOMAIN +from homeassistant.components.atag.climate import PRESET_MAP from homeassistant.components.climate import ( ATTR_HVAC_ACTION, ATTR_HVAC_MODE, @@ -104,10 +105,10 @@ async def test_update_failed( entry = await init_integration(hass, aioclient_mock) await async_setup_component(hass, HA_DOMAIN, {}) assert hass.states.get(CLIMATE_ID).state == HVACMode.HEAT - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data with patch("pyatag.AtagOne.update", side_effect=TimeoutError) as updater: await coordinator.async_refresh() await hass.async_block_till_done() updater.assert_called_once() assert not coordinator.last_update_success - assert coordinator.data.id == UID + assert coordinator.atag.id == UID diff --git a/tests/components/atag/test_init.py b/tests/components/atag/test_init.py index 59f38ae7bfe..7c65150fbf6 100644 --- a/tests/components/atag/test_init.py +++ b/tests/components/atag/test_init.py @@ -1,6 +1,5 @@ """Tests for the ATAG integration.""" -from homeassistant.components.atag import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -23,7 +22,7 @@ async def test_unload_config_entry( ) -> None: """Test the ATAG configuration entry unloading.""" entry = await init_integration(hass, aioclient_mock) - assert hass.data[DOMAIN] + assert entry.runtime_data await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) + assert not hasattr(entry, "runtime_data") diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index c2ab8ce743c..43cc4957445 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -82,10 +82,7 @@ async def _mock_setup_august( ) entry.add_to_hass(hass) with ( - patch( - "yalexs.manager.data.async_create_pubnub", - return_value=AsyncMock(), - ), + patch.object(pubnub_mock, "run"), patch("yalexs.manager.data.AugustPubNub", return_value=pubnub_mock), ): assert await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/august/snapshots/test_binary_sensor.ambr b/tests/components/august/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6e95b0ce552 --- /dev/null +++ b/tests/components/august/snapshots/test_binary_sensor.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_doorbell_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'tmt100_name', + 'config_entries': , + 'configuration_url': 'https://account.august.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'august', + 'tmt100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'August Home Inc.', + 'model': 'hydra1', + 'model_id': None, + 'name': 'tmt100 Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'tmt100 Name', + 'sw_version': '3.1.0-HYDRC75+201909251139', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/august/snapshots/test_lock.ambr b/tests/components/august/snapshots/test_lock.ambr new file mode 100644 index 00000000000..6aad3a140ca --- /dev/null +++ b/tests/components/august/snapshots/test_lock.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'online_with_doorsense_name', + 'config_entries': , + 'configuration_url': 'https://account.august.com', + 'connections': set({ + tuple( + 'bluetooth', + '12:22', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'august', + 'online_with_doorsense', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'August Home Inc.', + 'model': 'AUG-MD01', + 'model_id': None, + 'name': 'online_with_doorsense Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'online_with_doorsense Name', + 'sw_version': 'undefined-4.3.0-1.8.14', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/august/test_binary_sensor.py b/tests/components/august/test_binary_sensor.py index 33d582de8d8..4ae300ae56b 100644 --- a/tests/components/august/test_binary_sensor.py +++ b/tests/components/august/test_binary_sensor.py @@ -1,8 +1,10 @@ """The binary_sensor tests for the august platform.""" import datetime -from unittest.mock import Mock, patch +from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion from yalexs.pubnub_async import AugustPubNub from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN @@ -36,28 +38,20 @@ async def test_doorsense(hass: HomeAssistant) -> None: hass, "get_lock.online_with_doorsense.json" ) await _create_august_with_devices(hass, [lock_one]) + states = hass.states - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -69,113 +63,82 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: hass, "get_activity.bridge_offline.json" ) await _create_august_with_devices(hass, [lock_one], activities=activities) - - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + states = hass.states + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state + == STATE_UNAVAILABLE ) - assert binary_sensor_online_with_doorsense_name.state == STATE_UNAVAILABLE async def test_create_doorbell(hass: HomeAssistant) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) + states = hass.states - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF async def test_create_doorbell_offline(hass: HomeAssistant) -> None: """Test creation of a doorbell that is offline.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) + states = hass.states - binary_sensor_tmt100_name_motion = hass.states.get( - "binary_sensor.tmt100_name_motion" + assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE + assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF + assert ( + states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE ) - assert binary_sensor_tmt100_name_motion.state == STATE_UNAVAILABLE - binary_sensor_tmt100_name_online = hass.states.get( - "binary_sensor.tmt100_name_connectivity" - ) - assert binary_sensor_tmt100_name_online.state == STATE_OFF - binary_sensor_tmt100_name_ding = hass.states.get( - "binary_sensor.tmt100_name_doorbell_ding" - ) - assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( hass, "get_activity.doorbell_motion.json" ) await _create_august_with_devices(hass, [doorbell_one], activities=activities) + states = hass.states - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" - ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF -async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_pubnub( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF pubnub.message( pubnub, @@ -198,10 +161,7 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON pubnub.message( pubnub, @@ -235,29 +195,19 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF pubnub.message( pubnub, @@ -271,37 +221,25 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF async def test_doorbell_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) reg_device = device_registry.async_get_device(identifiers={("august", "tmt100")}) - assert reg_device.model == "hydra1" - assert reg_device.name == "tmt100 Name" - assert reg_device.manufacturer == "August Home Inc." - assert reg_device.sw_version == "3.1.0-HYDRC75+201909251139" + assert reg_device == snapshot async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: @@ -314,11 +252,9 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: config_entry = await _create_august_with_devices( hass, [lock_one], activities=activities, pubnub=pubnub ) + states = hass.states - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.message( pubnub, @@ -330,10 +266,9 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF pubnub.message( pubnub, @@ -344,33 +279,22 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.message( pubnub, @@ -381,17 +305,11 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -402,7 +320,10 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") await _create_august_with_devices(hass, [lock_one]) - ding_sensor = hass.states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + states = hass.states + assert ( + states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ).state + == STATE_OFF ) - assert ding_sensor.state == STATE_OFF diff --git a/tests/components/august/test_button.py b/tests/components/august/test_button.py index 8ae2bc8a70d..948b59b2286 100644 --- a/tests/components/august/test_button.py +++ b/tests/components/august/test_button.py @@ -20,5 +20,4 @@ async def test_wake_lock(hass: HomeAssistant) -> None: await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True ) - await hass.async_block_till_done() api_instance.async_status_async.assert_called_once() diff --git a/tests/components/august/test_camera.py b/tests/components/august/test_camera.py index 539a26cc30f..287620cc872 100644 --- a/tests/components/august/test_camera.py +++ b/tests/components/august/test_camera.py @@ -6,7 +6,7 @@ from unittest.mock import patch from yalexs.const import Brand from yalexs.doorbell import ContentTokenExpired -from homeassistant.const import STATE_IDLE +from homeassistant.components.camera import CameraState from homeassistant.core import HomeAssistant from .mocks import _create_august_with_devices, _mock_doorbell_from_fixture @@ -25,14 +25,10 @@ async def test_create_doorbell( ): await _create_august_with_devices(hass, [doorbell_one], brand=Brand.AUGUST) - camera_k98gidt45gul_name_camera = hass.states.get( - "camera.k98gidt45gul_name_camera" - ) - assert camera_k98gidt45gul_name_camera.state == STATE_IDLE + camera_state = hass.states.get("camera.k98gidt45gul_name_camera") + assert camera_state.state == CameraState.IDLE - url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ - "entity_picture" - ] + url = camera_state.attributes["entity_picture"] client = await hass_client_no_auth() resp = await client.get(url) diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index fdebb8d5c46..b3138342b8c 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -5,7 +5,6 @@ from unittest.mock import patch from yalexs.authenticator_common import ValidationResult from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation -from homeassistant import config_entries from homeassistant.components.august.const import ( CONF_ACCESS_TOKEN_CACHE_FILE, CONF_BRAND, @@ -14,6 +13,7 @@ from homeassistant.components.august.const import ( DOMAIN, VERIFICATION_CODE_KEY, ) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,7 +25,7 @@ async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -66,7 +66,7 @@ async def test_form(hass: HomeAssistant) -> None: async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -90,7 +90,7 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None: async def test_user_unexpected_exception(hass: HomeAssistant) -> None: """Test we handle an unexpected exception.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -115,7 +115,7 @@ async def test_user_unexpected_exception(hass: HomeAssistant) -> None: async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -138,7 +138,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: async def test_form_needs_validate(hass: HomeAssistant) -> None: """Test we present validation when we need to validate.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with ( @@ -248,9 +248,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -294,9 +292,7 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -371,7 +367,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -389,7 +385,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_BRAND: "yale_home", + CONF_BRAND: "yale_access", CONF_LOGIN_METHOD: "email", CONF_USERNAME: "my@email.tld", CONF_PASSWORD: "test-password", @@ -400,4 +396,4 @@ async def test_switching_brands(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 - assert entry.data[CONF_BRAND] == "yale_home" + assert entry.data[CONF_BRAND] == "yale_access" diff --git a/tests/components/august/test_event.py b/tests/components/august/test_event.py index 61b7560f462..0bb482c5b89 100644 --- a/tests/components/august/test_event.py +++ b/tests/components/august/test_event.py @@ -1,13 +1,12 @@ """The event tests for the august.""" -import datetime -from unittest.mock import Mock, patch +from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from yalexs.pubnub_async import AugustPubNub from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util from .mocks import ( _create_august_with_devices, @@ -45,7 +44,9 @@ async def test_create_doorbell_offline(hass: HomeAssistant) -> None: assert doorbell_state.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( @@ -61,19 +62,16 @@ async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: assert doorbell_state is not None assert doorbell_state.state == STATE_UNKNOWN - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state.state == isotime -async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_pubnub( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() @@ -125,14 +123,9 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert motion_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state is not None @@ -155,14 +148,9 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert doorbell_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") assert doorbell_state is not None diff --git a/tests/components/august/test_init.py b/tests/components/august/test_init.py index 8261e32d668..3343e85d60a 100644 --- a/tests/components/august/test_init.py +++ b/tests/components/august/test_init.py @@ -5,22 +5,26 @@ from unittest.mock import Mock, patch from aiohttp import ClientResponseError import pytest from yalexs.authenticator_common import AuthenticationState +from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from homeassistant.components.august.const import DOMAIN -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, STATE_ON, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from homeassistant.setup import async_setup_component from .mocks import ( @@ -122,16 +126,16 @@ async def test_unlock_throws_august_api_http_error(hass: HomeAssistant) -> None: "unlock_return_activities": _unlock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: @@ -152,16 +156,15 @@ async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: "lock_return_activities": _lock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_open_throws_hass_service_not_supported_error( @@ -188,7 +191,7 @@ async def test_inoperative_locks_are_filtered_out(hass: HomeAssistant) -> None: lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get( "lock.a6697750d607098bae8d6baa11ef8063_name" ) - assert lock_a6697750d607098bae8d6baa11ef8063_name.state == STATE_LOCKED + assert lock_a6697750d607098bae8d6baa11ef8063_name.state == LockState.LOCKED async def test_lock_has_doorsense(hass: HomeAssistant) -> None: @@ -371,6 +374,7 @@ async def test_load_unload(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED async def test_load_triggers_ble_discovery( @@ -420,3 +424,24 @@ async def test_device_remove_devices( ) response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) assert response["success"] + + +async def test_brand_migration_issue(hass: HomeAssistant) -> None: + """Test creating and removing the brand migration issue.""" + august_operative_lock = await _mock_operative_august_lock_detail(hass) + config_entry = await _create_august_with_devices( + hass, [august_operative_lock], brand=Brand.YALE_HOME + ) + + assert config_entry.state is ConfigEntryState.LOADED + + issue_reg = ir.async_get(hass) + issue_entry = issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") + assert issue_entry + assert issue_entry.severity == ir.IssueSeverity.CRITICAL + assert issue_entry.translation_placeholders == { + "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" + } + + await hass.config_entries.async_remove(config_entry.entry_id) + assert not issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") diff --git a/tests/components/august/test_lock.py b/tests/components/august/test_lock.py index 8bb71826d24..eb177a35cfb 100644 --- a/tests/components/august/test_lock.py +++ b/tests/components/august/test_lock.py @@ -6,28 +6,23 @@ from unittest.mock import Mock from aiohttp import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME from yalexs.pubnub_async import AugustPubNub -from homeassistant.components.lock import ( - DOMAIN as LOCK_DOMAIN, - STATE_JAMMED, - STATE_LOCKING, - STATE_UNLOCKING, -) +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .mocks import ( @@ -43,7 +38,7 @@ from tests.common import async_fire_time_changed async def test_lock_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) @@ -52,10 +47,7 @@ async def test_lock_device_registry( reg_device = device_registry.async_get_device( identifiers={("august", "online_with_doorsense")} ) - assert reg_device.model == "AUG-MD01" - assert reg_device.sw_version == "undefined-4.3.0-1.8.14" - assert reg_device.name == "online_with_doorsense Name" - assert reg_device.manufacturer == "August Home Inc." + assert reg_device == snapshot async def test_lock_changed_by(hass: HomeAssistant) -> None: @@ -65,14 +57,10 @@ async def test_lock_changed_by(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert ( - lock_online_with_doorsense_name.attributes.get("changed_by") - == "Your favorite elven princess" - ) + assert lock_state.state == LockState.LOCKED + assert lock_state.attributes["changed_by"] == "Your favorite elven princess" async def test_state_locking(hass: HomeAssistant) -> None: @@ -82,9 +70,7 @@ async def test_state_locking(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKING async def test_state_unlocking(hass: HomeAssistant) -> None: @@ -96,9 +82,9 @@ async def test_state_unlocking(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert ( + hass.states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + ) async def test_state_jammed(hass: HomeAssistant) -> None: @@ -108,9 +94,7 @@ async def test_state_jammed(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_JAMMED + assert hass.states.get("lock.online_with_doorsense_name").state == LockState.JAMMED async def test_one_lock_operation( @@ -119,35 +103,27 @@ async def test_one_lock_operation( """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) await _create_august_with_devices(hass, [lock_one]) + states = hass.states - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == LockState.LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -155,8 +131,7 @@ async def test_one_lock_operation( ) assert lock_operator_sensor assert ( - hass.states.get("sensor.online_with_doorsense_name_operator").state - == STATE_UNKNOWN + states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN ) @@ -166,14 +141,13 @@ async def test_open_lock_operation(hass: HomeAssistant) -> None: await _create_august_with_devices(hass, [lock_with_unlatch]) lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_LOCKED + assert lock_online_with_unlatch_name.state == LockState.LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + assert lock_online_with_unlatch_name.state == LockState.UNLOCKED async def test_open_lock_operation_pubnub_connected( @@ -189,12 +163,10 @@ async def test_open_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_with_unlatch], pubnub=pubnub) pubnub.connected = True - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == LockState.LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -209,8 +181,7 @@ async def test_open_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == LockState.UNLOCKED await hass.async_block_till_done() @@ -227,19 +198,15 @@ async def test_one_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_one], pubnub=pubnub) pubnub.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == LockState.LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -254,17 +221,13 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -279,8 +242,8 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -306,8 +269,8 @@ async def test_one_lock_operation_pubnub_connected( ) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.UNLOCKED async def test_lock_jammed(hass: HomeAssistant) -> None: @@ -325,22 +288,18 @@ async def test_lock_jammed(hass: HomeAssistant) -> None: }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == LockState.LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_JAMMED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.JAMMED async def test_lock_throws_exception_on_unknown_status_code( @@ -360,15 +319,12 @@ async def test_lock_throws_exception_on_unknown_status_code( }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == LockState.LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} with pytest.raises(ClientResponseError): @@ -383,9 +339,7 @@ async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one]) - lock_brokenid_name = hass.states.get("lock.brokenid_name") - - assert lock_brokenid_name.state == STATE_UNKNOWN + assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -397,9 +351,7 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNAVAILABLE + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE async def test_lock_bridge_online(hass: HomeAssistant) -> None: @@ -411,14 +363,13 @@ async def test_lock_bridge_online(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKED async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) + states = hass.states assert lock_one.pubsub_channel == "pubsub" pubnub = AugustPubNub() @@ -428,9 +379,7 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: ) pubnub.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED pubnub.message( pubnub, @@ -446,8 +395,7 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING pubnub.message( pubnub, @@ -463,25 +411,21 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKING pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING pubnub.message( pubnub, @@ -496,13 +440,11 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -512,8 +454,9 @@ async def test_open_throws_hass_service_not_supported_error( hass: HomeAssistant, ) -> None: """Test open throws correct error on entity does not support this service error.""" + await async_setup_component(hass, "homeassistant", {}) mocked_lock_detail = await _mock_operative_august_lock_detail(hass) await _create_august_with_devices(hass, [mocked_lock_detail]) data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises(HomeAssistantError, match="does not support this service"): + with pytest.raises(ServiceNotSupported, match="does not support action"): await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 67223e9dff0..2d72d287ce3 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -28,13 +28,9 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) - sensor_k98gidt45gul_name_battery = hass.states.get( - "sensor.k98gidt45gul_name_battery" - ) - assert sensor_k98gidt45gul_name_battery.state == "96" - assert ( - sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE - ) + battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") + assert battery_state.state == "96" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE async def test_create_doorbell_offline( @@ -44,9 +40,9 @@ async def test_create_doorbell_offline( doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery.state == "81" - assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + battery_state = hass.states.get("sensor.tmt100_name_battery") + assert battery_state.state == "81" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get("sensor.tmt100_name_battery") assert entry @@ -60,8 +56,7 @@ async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery is None + assert hass.states.get("sensor.tmt100_name_battery") is None async def test_create_lock_with_linked_keypad( @@ -71,25 +66,21 @@ async def test_create_lock_with_linked_keypad( lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") await _create_august_with_devices(hass, [lock_one]) - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + battery_state = hass.states.get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "62" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "62" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" @@ -101,42 +92,32 @@ async def test_create_lock_with_low_battery_linked_keypad( """Test creation of a lock with a linked keypad that both have a battery.""" lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") await _create_august_with_devices(hass, [lock_one]) + states = hass.states - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + battery_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_battery") + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "10" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "10" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" # No activity means it will be unavailable until someone unlocks/locks it - lock_operator_sensor = entity_registry.async_get( + operator_entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" ) - assert ( - lock_operator_sensor.unique_id - == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" - ) - assert ( - hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state - == STATE_UNKNOWN - ) + assert operator_entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + + operator_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator") + assert operator_state.state == STATE_UNKNOWN async def test_lock_operator_bluetooth( diff --git a/tests/components/aussie_broadband/test_config_flow.py b/tests/components/aussie_broadband/test_config_flow.py index 6ee674ab0f4..76e96c5cc02 100644 --- a/tests/components/aussie_broadband/test_config_flow.py +++ b/tests/components/aussie_broadband/test_config_flow.py @@ -13,6 +13,8 @@ from homeassistant.data_entry_flow import FlowResultType from .common import FAKE_DATA, FAKE_SERVICES +from tests.common import MockConfigEntry + TEST_USERNAME = FAKE_DATA[CONF_USERNAME] TEST_PASSWORD = FAKE_DATA[CONF_PASSWORD] @@ -163,41 +165,15 @@ async def test_form_network_issue(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant) -> None: """Test reauth flow.""" - - # Test reauth but the entry doesn't exist - result1 = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FAKE_DATA + mock_entry = MockConfigEntry( + domain=DOMAIN, + data=FAKE_DATA, + unique_id=FAKE_DATA[CONF_USERNAME], ) - - with ( - patch("aussiebb.asyncio.AussieBB.__init__", return_value=None), - patch("aussiebb.asyncio.AussieBB.login", return_value=True), - patch( - "aussiebb.asyncio.AussieBB.get_services", return_value=[FAKE_SERVICES[0]] - ), - patch( - "homeassistant.components.aussie_broadband.async_setup_entry", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - { - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == TEST_USERNAME - assert result2["data"] == FAKE_DATA + mock_entry.add_to_hass(hass) # Test failed reauth - result5 = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FAKE_DATA, - ) + result5 = await mock_entry.start_reauth_flow(hass) assert result5["step_id"] == "reauth_confirm" with ( diff --git a/tests/components/autarco/conftest.py b/tests/components/autarco/conftest.py index c7a95d7aa23..b35ea993600 100644 --- a/tests/components/autarco/conftest.py +++ b/tests/components/autarco/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch -from autarco import AccountSite, Inverter, Solar +from autarco import AccountSite, Battery, Inverter, Solar import pytest from homeassistant.components.autarco.const import DOMAIN @@ -66,6 +66,17 @@ def mock_autarco_client() -> Generator[AsyncMock]: health="OK", ), } + client.get_battery.return_value = Battery( + flow_now=777, + net_charged_now=777, + state_of_charge=56, + discharged_today=2, + discharged_month=25, + discharged_total=696, + charged_today=1, + charged_month=26, + charged_total=748, + ) yield client diff --git a/tests/components/autarco/snapshots/test_diagnostics.ambr b/tests/components/autarco/snapshots/test_diagnostics.ambr index 53d9f96fb86..876e6d6b727 100644 --- a/tests/components/autarco/snapshots/test_diagnostics.ambr +++ b/tests/components/autarco/snapshots/test_diagnostics.ambr @@ -3,6 +3,17 @@ dict({ 'sites_data': list([ dict({ + 'battery': dict({ + 'charged_month': 26, + 'charged_today': 1, + 'charged_total': 748, + 'discharged_month': 25, + 'discharged_today': 2, + 'discharged_total': 696, + 'flow_now': 777, + 'net_charged_now': 777, + 'state_of_charge': 56, + }), 'health': 'OK', 'id': 1, 'inverters': list([ diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr index 0aa093d6a6d..dbbd8e9b47d 100644 --- a/tests/components/autarco/snapshots/test_sensor.ambr +++ b/tests/components/autarco/snapshots/test_sensor.ambr @@ -1,4 +1,412 @@ # serializer version: 1 +# name: test_all_sensors[sensor.battery_charged_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_charged_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charged month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charged_month', + 'unique_id': '1_battery_charged_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_charged_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Battery Charged month', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_charged_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_all_sensors[sensor.battery_charged_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_charged_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charged today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charged_today', + 'unique_id': '1_battery_charged_today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_charged_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Battery Charged today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_charged_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_sensors[sensor.battery_charged_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_charged_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charged total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charged_total', + 'unique_id': '1_battery_charged_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_charged_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Battery Charged total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_charged_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '748', + }) +# --- +# name: test_all_sensors[sensor.battery_discharged_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_discharged_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharged month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'discharged_month', + 'unique_id': '1_battery_discharged_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_discharged_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Battery Discharged month', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_discharged_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_all_sensors[sensor.battery_discharged_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_discharged_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharged today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'discharged_today', + 'unique_id': '1_battery_discharged_today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_discharged_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Battery Discharged today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_discharged_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_sensors[sensor.battery_discharged_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_discharged_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharged total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'discharged_total', + 'unique_id': '1_battery_discharged_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_discharged_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Battery Discharged total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_discharged_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '696', + }) +# --- +# name: test_all_sensors[sensor.battery_flow_now-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_flow_now', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flow now', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flow_now', + 'unique_id': '1_battery_flow_now', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.battery_flow_now-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Battery Flow now', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.battery_flow_now', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '777', + }) +# --- +# name: test_all_sensors[sensor.battery_state_of_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.battery_state_of_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State of charge', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_of_charge', + 'unique_id': '1_battery_state_of_charge', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_sensors[sensor.battery_state_of_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Battery State of charge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.battery_state_of_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) +# --- # name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -208,7 +616,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -241,6 +651,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Solar Energy production month', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -256,7 +667,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -289,6 +702,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Solar Energy production today', + 'state_class': , 'unit_of_measurement': , }), 'context': , diff --git a/tests/components/autarco/test_config_flow.py b/tests/components/autarco/test_config_flow.py index 621ad7f55c8..47c6a2fb084 100644 --- a/tests/components/autarco/test_config_flow.py +++ b/tests/components/autarco/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Autarco config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from autarco import AutarcoAuthenticationError, AutarcoConnectionError import pytest @@ -92,6 +92,7 @@ async def test_exceptions( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {"base": error} + # Recover from error mock_autarco_client.get_account.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -99,3 +100,72 @@ async def test_exceptions( user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, ) assert result.get("type") is FlowResultType.CREATE_ENTRY + + +async def test_step_reauth( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth flow.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + with patch("homeassistant.components.autarco.config_flow.Autarco", autospec=True): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AutarcoConnectionError, "cannot_connect"), + (AutarcoAuthenticationError, "invalid_auth"), + ], +) +async def test_step_reauth_exceptions( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions in reauth flow.""" + mock_autarco_client.get_account.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_autarco_client.get_account.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py index 81c5f947251..6c71eca5ef1 100644 --- a/tests/components/autarco/test_init.py +++ b/tests/components/autarco/test_init.py @@ -4,6 +4,8 @@ from __future__ import annotations from unittest.mock import AsyncMock +from autarco import AutarcoAuthenticationError, AutarcoConnectionError + from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -26,3 +28,35 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Autarco configuration entry not ready.""" + mock_autarco_client.get_account.side_effect = AutarcoConnectionError + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_entry_exception( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test ConfigEntryNotReady when API raises an exception during entry setup.""" + mock_config_entry.add_to_hass(hass) + mock_autarco_client.get_site.side_effect = AutarcoAuthenticationError + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" diff --git a/tests/components/autarco/test_sensor.py b/tests/components/autarco/test_sensor.py index e5e823501b9..c7e65baba70 100644 --- a/tests/components/autarco/test_sensor.py +++ b/tests/components/autarco/test_sensor.py @@ -1,16 +1,20 @@ """Test the sensor provided by the Autarco integration.""" -from unittest.mock import MagicMock, patch +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock, patch +from autarco import AutarcoConnectionError +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_all_sensors( @@ -25,3 +29,29 @@ async def test_all_sensors( await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_failed( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test entities become unavailable after failed update.""" + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + assert ( + hass.states.get("sensor.inverter_test_serial_1_energy_ac_output_total").state + is not None + ) + + mock_autarco_client.get_solar.side_effect = AutarcoConnectionError + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get("sensor.inverter_test_serial_1_energy_ac_output_total").state + == STATE_UNAVAILABLE + ) diff --git a/tests/components/automation/test_blueprint.py b/tests/components/automation/test_blueprint.py index 2c92d7a5242..1095c625fb2 100644 --- a/tests/components/automation/test_blueprint.py +++ b/tests/components/automation/test_blueprint.py @@ -38,7 +38,10 @@ def patch_blueprint( return orig_load(self, path) return models.Blueprint( - yaml.load_yaml(data_path), expected_domain=self.domain, path=path + yaml.load_yaml(data_path), + expected_domain=self.domain, + path=path, + schema=automation.config.AUTOMATION_BLUEPRINT_SCHEMA, ) with patch( diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index d8f04f10458..98d8bf0396e 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -50,7 +50,6 @@ from homeassistant.helpers.script import ( SCRIPT_MODE_SINGLE, _async_stop_scripts_at_shutdown, ) -from homeassistant.helpers.trigger import TriggerActionType, TriggerData, TriggerInfo from homeassistant.setup import async_setup_component from homeassistant.util import yaml import homeassistant.util.dt as dt_util @@ -62,8 +61,6 @@ from tests.common import ( async_capture_events, async_fire_time_changed, async_mock_service, - help_test_all, - import_and_test_deprecated_constant, mock_restore_cache, ) from tests.components.logbook.common import MockRow, mock_humanify @@ -240,7 +237,7 @@ async def test_trigger_service_ignoring_condition( automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], - "condition": { + "conditions": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", @@ -292,8 +289,8 @@ async def test_two_conditions_with_and( automation.DOMAIN, { automation.DOMAIN: { - "trigger": [{"platform": "event", "event_type": "test_event"}], - "condition": [ + "triggers": [{"platform": "event", "event_type": "test_event"}], + "conditions": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", @@ -301,7 +298,7 @@ async def test_two_conditions_with_and( "below": 150, }, ], - "action": {"action": "test.automation"}, + "actions": {"action": "test.automation"}, } }, ) @@ -331,9 +328,9 @@ async def test_shorthand_conditions_template( automation.DOMAIN, { automation.DOMAIN: { - "trigger": [{"platform": "event", "event_type": "test_event"}], - "condition": "{{ is_state('test.entity', 'hello') }}", - "action": {"action": "test.automation"}, + "triggers": [{"platform": "event", "event_type": "test_event"}], + "conditions": "{{ is_state('test.entity', 'hello') }}", + "actions": {"action": "test.automation"}, } }, ) @@ -807,8 +804,8 @@ async def test_reload_unchanged_does_not_stop( config = { automation.DOMAIN: { "alias": "hello", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [ + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"action": "test.automation"}, @@ -854,8 +851,8 @@ async def test_reload_single_unchanged_does_not_stop( automation.DOMAIN: { "id": "sun", "alias": "hello", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [ + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, {"action": "test.automation"}, @@ -1092,13 +1089,13 @@ async def test_reload_moved_automation_without_alias( config = { automation.DOMAIN: [ { - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "test.automation"}], }, { "alias": "automation_with_alias", - "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event2"}, + "actions": [{"action": "test.automation"}], }, ] } @@ -1148,18 +1145,18 @@ async def test_reload_identical_automations_without_id( automation.DOMAIN: [ { "alias": "dolly", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "test.automation"}], }, { "alias": "dolly", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "test.automation"}], }, { "alias": "dolly", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "test.automation"}], }, ] } @@ -1245,13 +1242,13 @@ async def test_reload_identical_automations_without_id( "automation_config", [ { - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "test.automation"}], }, # An automation using templates { - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "{{ 'test.automation' }}"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1277,14 +1274,14 @@ async def test_reload_identical_automations_without_id( }, { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "test.automation"}], }, # An automation using templates { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "{{ 'test.automation' }}"}], + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1380,8 +1377,8 @@ async def test_reload_automation_when_blueprint_changes( # Reload the automations without any change, but with updated blueprint blueprint_path = automation.async_get_blueprints(hass).blueprint_folder blueprint_config = yaml.load_yaml(blueprint_path / "test_event_service.yaml") - blueprint_config["action"] = [blueprint_config["action"]] - blueprint_config["action"].append(blueprint_config["action"][-1]) + blueprint_config["actions"] = [blueprint_config["actions"]] + blueprint_config["actions"].append(blueprint_config["actions"][-1]) with ( patch( @@ -1650,13 +1647,13 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: ( {}, "could not be validated", - "required key not provided @ data['action']", + "required key not provided @ data['actions']", "validation_failed_schema", ), ( { - "trigger": {"platform": "automation"}, - "action": [], + "triggers": {"platform": "automation"}, + "actions": [], }, "failed to setup triggers", "Integration 'automation' does not provide trigger support.", @@ -1664,14 +1661,14 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: ), ( { - "trigger": {"platform": "event", "event_type": "test_event"}, - "condition": { + "triggers": {"platform": "event", "event_type": "test_event"}, + "conditions": { "condition": "state", # The UUID will fail being resolved to en entity_id "entity_id": "abcdabcdabcdabcdabcdabcdabcdabcd", "state": "blah", }, - "action": [], + "actions": [], }, "failed to setup conditions", "Unknown entity registry entry abcdabcdabcdabcdabcdabcdabcdabcd.", @@ -1679,8 +1676,8 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: ), ( { - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": { + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": { "condition": "state", # The UUID will fail being resolved to en entity_id "entity_id": "abcdabcdabcdabcdabcdabcdabcdabcd", @@ -1712,8 +1709,8 @@ async def test_automation_bad_config_validation( {"alias": "bad_automation", **broken_config}, { "alias": "good_automation", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": { + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": { "action": "test.automation", "entity_id": "hello.world", }, @@ -1970,48 +1967,48 @@ async def test_extraction_functions( DOMAIN: [ { "alias": "test1", - "trigger": [ - {"platform": "state", "entity_id": "sensor.trigger_state"}, + "triggers": [ + {"trigger": "state", "entity_id": "sensor.trigger_state"}, { - "platform": "numeric_state", + "trigger": "numeric_state", "entity_id": "sensor.trigger_numeric_state", "above": 10, }, { - "platform": "calendar", + "trigger": "calendar", "entity_id": "calendar.trigger_calendar", "event": "start", }, { - "platform": "event", + "trigger": "event", "event_type": "state_changed", "event_data": {"entity_id": "sensor.trigger_event"}, }, # entity_id is a list of strings (not supported) { - "platform": "event", + "trigger": "event", "event_type": "state_changed", "event_data": {"entity_id": ["sensor.trigger_event2"]}, }, # entity_id is not a valid entity ID { - "platform": "event", + "trigger": "event", "event_type": "state_changed", "event_data": {"entity_id": "abc"}, }, # entity_id is not a string { - "platform": "event", + "trigger": "event", "event_type": "state_changed", "event_data": {"entity_id": 123}, }, ], - "condition": { + "conditions": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, - "action": [ + "actions": [ { "action": "test.script", "data": {"entity_id": "light.in_both"}, @@ -2042,50 +2039,50 @@ async def test_extraction_functions( }, { "alias": "test2", - "trigger": [ + "triggers": [ { - "platform": "device", + "trigger": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": trigger_device_2.id, }, { - "platform": "tag", + "trigger": "tag", "tag_id": "1234", "device_id": "device-trigger-tag1", }, { - "platform": "tag", + "trigger": "tag", "tag_id": "1234", "device_id": ["device-trigger-tag2", "device-trigger-tag3"], }, { - "platform": "event", + "trigger": "event", "event_type": "esphome.button_pressed", "event_data": {"device_id": "device-trigger-event"}, }, # device_id is a list of strings (not supported) { - "platform": "event", + "trigger": "event", "event_type": "esphome.button_pressed", "event_data": {"device_id": ["device-trigger-event2"]}, }, # device_id is not a string { - "platform": "event", + "trigger": "event", "event_type": "esphome.button_pressed", "event_data": {"device_id": 123}, }, ], - "condition": { + "conditions": { "condition": "device", "device_id": condition_device.id, "domain": "light", "type": "is_on", "entity_id": "light.bla", }, - "action": [ + "actions": [ { "action": "test.script", "data": {"entity_id": "light.in_both"}, @@ -2112,33 +2109,33 @@ async def test_extraction_functions( }, { "alias": "test3", - "trigger": [ + "triggers": [ { - "platform": "event", + "trigger": "event", "event_type": "esphome.button_pressed", "event_data": {"area_id": "area-trigger-event"}, }, # area_id is a list of strings (not supported) { - "platform": "event", + "trigger": "event", "event_type": "esphome.button_pressed", "event_data": {"area_id": ["area-trigger-event2"]}, }, # area_id is not a string { - "platform": "event", + "trigger": "event", "event_type": "esphome.button_pressed", "event_data": {"area_id": 123}, }, ], - "condition": { + "conditions": { "condition": "device", "device_id": condition_device.id, "domain": "light", "type": "is_on", "entity_id": "light.bla", }, - "action": [ + "actions": [ { "action": "test.script", "data": {"entity_id": "light.in_both"}, @@ -2287,8 +2284,8 @@ async def test_automation_variables( "event_type": "{{ trigger.event.event_type }}", "this_variables": "{{this.entity_id}}", }, - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": { + "triggers": {"trigger": "event", "event_type": "test_event"}, + "actions": { "action": "test.automation", "data": { "value": "{{ test_var }}", @@ -2302,12 +2299,12 @@ async def test_automation_variables( "variables": { "test_var": "defined_in_config", }, - "trigger": {"platform": "event", "event_type": "test_event_2"}, - "condition": { + "trigger": {"trigger": "event", "event_type": "test_event_2"}, + "conditions": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, - "action": { + "actions": { "action": "test.automation", }, }, @@ -2315,8 +2312,8 @@ async def test_automation_variables( "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, - "trigger": {"platform": "event", "event_type": "test_event_3"}, - "action": { + "triggers": {"trigger": "event", "event_type": "test_event_3"}, + "actions": { "action": "test.automation", }, }, @@ -2371,7 +2368,7 @@ async def test_automation_trigger_variables( "trigger_variables": { "test_var": "defined_in_config", }, - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": { "action": "test.automation", "data": { @@ -2389,7 +2386,7 @@ async def test_automation_trigger_variables( "test_var": "defined_in_config", "this_trigger_variables": "{{this.entity_id}}", }, - "trigger": {"platform": "event", "event_type": "test_event_2"}, + "trigger": {"trigger": "event", "event_type": "test_event_2"}, "action": { "action": "test.automation", "data": { @@ -2436,7 +2433,7 @@ async def test_automation_bad_trigger_variables( "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": { "action": "test.automation", }, @@ -2463,7 +2460,7 @@ async def test_automation_this_var_always( { automation.DOMAIN: [ { - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": { "action": "test.automation", "data": { @@ -2517,6 +2514,107 @@ async def test_blueprint_automation( ] +async def test_blueprint_automation_legacy_schema( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: + """Test blueprint automation where the blueprint is using legacy schema.""" + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "use_blueprint": { + "path": "test_event_service_legacy_schema.yaml", + "input": { + "trigger_event": "blueprint_event", + "service_to_call": "test.automation", + "a_number": 5, + }, + } + } + }, + ) + hass.bus.async_fire("blueprint_event") + await hass.async_block_till_done() + assert len(calls) == 1 + assert automation.entities_in_automation(hass, "automation.automation_0") == [ + "light.kitchen" + ] + assert ( + automation.blueprint_in_automation(hass, "automation.automation_0") + == "test_event_service_legacy_schema.yaml" + ) + assert automation.automations_with_blueprint( + hass, "test_event_service_legacy_schema.yaml" + ) == ["automation.automation_0"] + + +@pytest.mark.parametrize( + ("blueprint", "override"), + [ + # Override a blueprint with modern schema with legacy schema + ( + "test_event_service.yaml", + {"trigger": {"platform": "event", "event_type": "override"}}, + ), + # Override a blueprint with modern schema with modern schema + ( + "test_event_service.yaml", + {"triggers": {"platform": "event", "event_type": "override"}}, + ), + # Override a blueprint with legacy schema with legacy schema + ( + "test_event_service_legacy_schema.yaml", + {"trigger": {"platform": "event", "event_type": "override"}}, + ), + # Override a blueprint with legacy schema with modern schema + ( + "test_event_service_legacy_schema.yaml", + {"triggers": {"platform": "event", "event_type": "override"}}, + ), + ], +) +async def test_blueprint_automation_override( + hass: HomeAssistant, calls: list[ServiceCall], blueprint: str, override: dict +) -> None: + """Test blueprint automation where the automation config overrides the blueprint.""" + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "use_blueprint": { + "path": blueprint, + "input": { + "trigger_event": "blueprint_event", + "service_to_call": "test.automation", + "a_number": 5, + }, + }, + } + | override + }, + ) + + hass.bus.async_fire("blueprint_event") + await hass.async_block_till_done() + assert len(calls) == 0 + + hass.bus.async_fire("override") + await hass.async_block_till_done() + assert len(calls) == 1 + + assert automation.entities_in_automation(hass, "automation.automation_0") == [ + "light.kitchen" + ] + assert ( + automation.blueprint_in_automation(hass, "automation.automation_0") == blueprint + ) + assert automation.automations_with_blueprint(hass, blueprint) == [ + "automation.automation_0" + ] + + @pytest.mark.parametrize( ("blueprint_inputs", "problem", "details"), [ @@ -2542,7 +2640,7 @@ async def test_blueprint_automation( "Blueprint 'Call service based on event' generated invalid automation", ( "value should be a string for dictionary value @" - " data['action'][0]['action']" + " data['actions'][0]['action']" ), ), ], @@ -2638,7 +2736,7 @@ async def test_trigger_service(hass: HomeAssistant, calls: list[ServiceCall]) -> { automation.DOMAIN: { "alias": "hello", - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": { "action": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, @@ -2670,9 +2768,9 @@ async def test_trigger_condition_implicit_id( { automation.DOMAIN: { "trigger": [ - {"platform": "event", "event_type": "test_event1"}, - {"platform": "event", "event_type": "test_event2"}, - {"platform": "event", "event_type": "test_event3"}, + {"trigger": "event", "event_type": "test_event1"}, + {"trigger": "event", "event_type": "test_event2"}, + {"trigger": "event", "event_type": "test_event3"}, ], "action": { "choose": [ @@ -2722,8 +2820,8 @@ async def test_trigger_condition_explicit_id( { automation.DOMAIN: { "trigger": [ - {"platform": "event", "event_type": "test_event1", "id": "one"}, - {"platform": "event", "event_type": "test_event2", "id": "two"}, + {"trigger": "event", "event_type": "test_event1", "id": "one"}, + {"trigger": "event", "event_type": "test_event2", "id": "two"}, ], "action": { "choose": [ @@ -2837,7 +2935,7 @@ async def test_recursive_automation_starting_script( automation.DOMAIN: { "mode": automation_mode, "trigger": [ - {"platform": "event", "event_type": "trigger_automation"}, + {"trigger": "event", "event_type": "trigger_automation"}, ], "action": [ {"action": "test.automation_started"}, @@ -2919,7 +3017,7 @@ async def test_recursive_automation( automation.DOMAIN: { "mode": automation_mode, "trigger": [ - {"platform": "event", "event_type": "trigger_automation"}, + {"trigger": "event", "event_type": "trigger_automation"}, ], "action": [ {"event": "trigger_automation"}, @@ -2981,7 +3079,7 @@ async def test_recursive_automation_restart_mode( automation.DOMAIN: { "mode": SCRIPT_MODE_RESTART, "trigger": [ - {"platform": "event", "event_type": "trigger_automation"}, + {"trigger": "event", "event_type": "trigger_automation"}, ], "action": [ {"event": "trigger_automation"}, @@ -3020,8 +3118,8 @@ async def test_websocket_config( """Test config command.""" config = { "alias": "hello", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "data": 100}, + "triggers": {"trigger": "event", "event_type": "test_event"}, + "actions": {"action": "test.automation", "data": 100}, } assert await async_setup_component( hass, automation.DOMAIN, {automation.DOMAIN: config} @@ -3052,30 +3150,6 @@ async def test_websocket_config( assert msg["error"]["code"] == "not_found" -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(automation) - - -@pytest.mark.parametrize( - ("constant_name", "replacement"), - [ - ("AutomationActionType", TriggerActionType), - ("AutomationTriggerData", TriggerData), - ("AutomationTriggerInfo", TriggerInfo), - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - constant_name: str, - replacement: Any, -) -> None: - """Test deprecated automation constants.""" - import_and_test_deprecated_constant( - caplog, automation, constant_name, replacement.__name__, replacement, "2025.1" - ) - - async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> None: """Test an automation that turns off another automation.""" hass.set_state(CoreState.not_running) @@ -3090,7 +3164,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non automation.DOMAIN: [ { "trigger": { - "platform": "state", + "trigger": "state", "entity_id": "binary_sensor.presence", "from": "on", }, @@ -3108,7 +3182,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non }, { "trigger": { - "platform": "state", + "trigger": "state", "entity_id": "binary_sensor.presence", "from": "on", "for": { @@ -3201,7 +3275,7 @@ async def test_two_automations_call_restart_script_same_time( automation.DOMAIN: [ { "trigger": { - "platform": "state", + "trigger": "state", "entity_id": "binary_sensor.presence", "to": "on", }, @@ -3213,7 +3287,7 @@ async def test_two_automations_call_restart_script_same_time( }, { "trigger": { - "platform": "state", + "trigger": "state", "entity_id": "binary_sensor.presence", "to": "on", }, @@ -3259,7 +3333,7 @@ async def test_two_automation_call_restart_script_right_after_each_other( automation.DOMAIN: [ { "trigger": { - "platform": "state", + "trigger": "state", "entity_id": ["input_boolean.test_1", "input_boolean.test_1"], "from": "off", "to": "on", @@ -3303,16 +3377,26 @@ async def test_two_automation_call_restart_script_right_after_each_other( assert len(events) == 1 -async def test_action_service_backward_compatibility( +async def test_action_backward_compatibility( hass: HomeAssistant, calls: list[ServiceCall] ) -> None: - """Test we can still use the service call method.""" + """Test we can still use old-style automations. + + - Services action using the `service` key instead of `action` + - Singular `trigger` instead of `triggers` + - Singular `condition` instead of `conditions` + - Singular `action` instead of `actions` + """ assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, + "condition": { + "condition": "template", + "value_template": "{{ True }}", + }, "action": { "service": "test.automation", "entity_id": "hello.world", @@ -3327,3 +3411,84 @@ async def test_action_service_backward_compatibility( assert len(calls) == 1 assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] assert calls[0].data.get("event") == "test_event" + + +@pytest.mark.parametrize( + ("config", "message"), + [ + ( + { + "trigger": {"platform": "event", "event_type": "test_event"}, + "triggers": {"platform": "event", "event_type": "test_event2"}, + "actions": [], + }, + "Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only.", + ), + ( + { + "trigger": {"platform": "event", "event_type": "test_event"}, + "condition": {"condition": "template", "value_template": "{{ True }}"}, + "conditions": {"condition": "template", "value_template": "{{ True }}"}, + }, + "Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only.", + ), + ( + { + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, + "actions": {"service": "test.automation", "entity_id": "hello.world"}, + }, + "Cannot specify both 'action' and 'actions'. Please use 'actions' only.", + ), + ( + { + "trigger": { + "platform": "event", + "trigger": "event", + "event_type": "test_event2", + }, + "action": [], + }, + "Cannot specify both 'platform' and 'trigger'. Please use 'trigger' only.", + ), + ], +) +async def test_invalid_configuration( + hass: HomeAssistant, + config: dict[str, Any], + message: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test for invalid automation configurations.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + {automation.DOMAIN: config}, + ) + await hass.async_block_till_done() + assert message in caplog.text + + +@pytest.mark.parametrize( + ("trigger_key"), + ["trigger", "platform"], +) +async def test_valid_configuration( + hass: HomeAssistant, + trigger_key: str, +) -> None: + """Test for valid automation configurations.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "triggers": { + trigger_key: "event", + "event_type": "test_event2", + }, + "action": [], + } + }, + ) + await hass.async_block_till_done() diff --git a/tests/components/automation/test_recorder.py b/tests/components/automation/test_recorder.py index be354abe9d2..c1defdd0339 100644 --- a/tests/components/automation/test_recorder.py +++ b/tests/components/automation/test_recorder.py @@ -39,8 +39,8 @@ async def test_exclude_attributes( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, + "actions": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) diff --git a/tests/components/awair/__init__.py b/tests/components/awair/__init__.py index f93866263a2..0c0fd0eb522 100644 --- a/tests/components/awair/__init__.py +++ b/tests/components/awair/__init__.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from homeassistant.components.awair import DOMAIN +from homeassistant.components.awair.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/awair/test_config_flow.py b/tests/components/awair/test_config_flow.py index ab9f5faa425..b27f20e83f3 100644 --- a/tests/components/awair/test_config_flow.py +++ b/tests/components/awair/test_config_flow.py @@ -7,7 +7,7 @@ from aiohttp.client_exceptions import ClientConnectorError from python_awair.exceptions import AuthError, AwairError from homeassistant.components.awair.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -136,11 +136,7 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, - data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -148,27 +144,32 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: with patch("python_awair.AwairClient.query", side_effect=AuthError()): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=CLOUD_CONFIG, + user_input={CONF_ACCESS_TOKEN: "bad"}, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"} with ( patch( "python_awair.AwairClient.query", side_effect=[user, cloud_devices], ), - patch("homeassistant.components.awair.async_setup_entry", return_value=True), + patch( + "homeassistant.components.awair.async_setup_entry", return_value=True + ) as mock_setup_entry, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=CLOUD_CONFIG, + user_input={CONF_ACCESS_TOKEN: "good"}, ) + await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + mock_setup_entry.assert_called_once() + assert dict(mock_config.data) == {CONF_ACCESS_TOKEN: "good"} async def test_reauth_error(hass: HomeAssistant) -> None: @@ -180,11 +181,7 @@ async def test_reauth_error(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, - data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -403,10 +400,6 @@ async def test_zeroconf_discovery_update_configuration( return_value=True, ) as mock_setup_entry, patch("python_awair.AwairClient.query", side_effect=[local_devices]), - patch( - "homeassistant.components.awair.async_setup_entry", - return_value=True, - ), ): result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/awair/test_sensor.py b/tests/components/awair/test_sensor.py index 8af1fdd9c7c..8c9cd6e3a24 100644 --- a/tests/components/awair/test_sensor.py +++ b/tests/components/awair/test_sensor.py @@ -29,7 +29,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from . import setup_awair @@ -48,16 +48,24 @@ SENSOR_TYPES_MAP = { def assert_expected_properties( hass: HomeAssistant, - registry: er.RegistryEntry, - name, - unique_id, - state_value, + entity_registry: er.RegistryEntry, + name: str, + unique_id: str, + state_value: str, attributes: dict, + model="Awair", + model_id="awair", ): """Assert expected properties from a dict.""" + entity_entry = entity_registry.async_get(name) + assert entity_entry.unique_id == unique_id + + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(entity_entry.device_id) + assert device_entry is not None + assert device_entry.model == model + assert device_entry.model_id == model_id - entry = registry.async_get(name) - assert entry.unique_id == unique_id state = hass.states.get(name) assert state assert state.state == state_value @@ -201,7 +209,10 @@ async def test_awair_gen2_sensors( async def test_local_awair_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, local_devices, local_data + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + local_devices, + local_data, ) -> None: """Test expected sensors on a local Awair.""" @@ -215,6 +226,8 @@ async def test_local_awair_sensors( f"{local_devices['device_uuid']}_{SENSOR_TYPES_MAP[API_SCORE].unique_id_tag}", "94", {}, + model="Awair Element", + model_id="awair-element", ) diff --git a/tests/components/axis/snapshots/test_camera.ambr b/tests/components/axis/snapshots/test_camera.ambr new file mode 100644 index 00000000000..564ff96b3d8 --- /dev/null +++ b/tests/components/axis/snapshots/test_camera.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_camera[config_entry_options0-][camera.home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-camera', + 'unit_of_measurement': None, + }) +# --- +# name: test_camera[config_entry_options0-][camera.home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1', + 'entity_picture': '/api/camera_proxy/camera.home?token=1', + 'friendly_name': 'home', + 'frontend_stream_type': , + 'supported_features': , + }), + 'context': , + 'entity_id': 'camera.home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_camera[config_entry_options1-streamprofile=profile_1][camera.home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-camera', + 'unit_of_measurement': None, + }) +# --- +# name: test_camera[config_entry_options1-streamprofile=profile_1][camera.home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1', + 'entity_picture': '/api/camera_proxy/camera.home?token=1', + 'friendly_name': 'home', + 'frontend_stream_type': , + 'supported_features': , + }), + 'context': , + 'entity_id': 'camera.home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index 3a643f55d3e..b475c796d2b 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -37,6 +37,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'axis', 'entry_id': '676abe5b73621446e6550a2e86ffe3dd', 'minor_version': 1, @@ -45,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/axis/test_binary_sensor.py b/tests/components/axis/test_binary_sensor.py index a1cf1e129d5..766a51463a4 100644 --- a/tests/components/axis/test_binary_sensor.py +++ b/tests/components/axis/test_binary_sensor.py @@ -119,7 +119,6 @@ async def test_binary_sensors( with patch("homeassistant.components.axis.PLATFORMS", [Platform.BINARY_SENSOR]): config_entry = await config_entry_factory() mock_rtsp_event(**event) - assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 1 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/axis/test_camera.py b/tests/components/axis/test_camera.py index 00fe4391b0c..6cc4bbd7c2f 100644 --- a/tests/components/axis/test_camera.py +++ b/tests/components/axis/test_camera.py @@ -1,58 +1,31 @@ """Axis camera platform tests.""" +from unittest.mock import patch + import pytest +from syrupy import SnapshotAssertion from homeassistant.components import camera from homeassistant.components.axis.const import CONF_STREAM_PROFILE from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.const import STATE_IDLE +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ConfigEntryFactoryType from .const import MAC, NAME - -@pytest.mark.usefixtures("config_entry_setup") -async def test_camera(hass: HomeAssistant) -> None: - """Test that Axis camera platform is loaded properly.""" - assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 - - entity_id = f"{CAMERA_DOMAIN}.{NAME}" - - cam = hass.states.get(entity_id) - assert cam.state == STATE_IDLE - assert cam.name == NAME - - camera_entity = camera._get_camera_from_entity_id(hass, entity_id) - assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" - assert camera_entity.mjpeg_source == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi" - assert ( - await camera_entity.stream_source() - == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264" - ) +from tests.common import snapshot_platform -@pytest.mark.parametrize("config_entry_options", [{CONF_STREAM_PROFILE: "profile_1"}]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_camera_with_stream_profile(hass: HomeAssistant) -> None: - """Test that Axis camera entity is using the correct path with stream profike.""" - assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 - - entity_id = f"{CAMERA_DOMAIN}.{NAME}" - - cam = hass.states.get(entity_id) - assert cam.state == STATE_IDLE - assert cam.name == NAME - - camera_entity = camera._get_camera_from_entity_id(hass, entity_id) - assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" - assert ( - camera_entity.mjpeg_source - == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi?streamprofile=profile_1" - ) - assert ( - await camera_entity.stream_source() - == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264&streamprofile=profile_1" - ) +@pytest.fixture(autouse=True) +def mock_getrandbits(): + """Mock camera access token which normally is randomized.""" + with patch( + "homeassistant.components.camera.SystemRandom.getrandbits", + return_value=1, + ): + yield PROPERTY_DATA = f"""root.Properties.API.HTTP.Version=3 @@ -66,6 +39,39 @@ root.Properties.System.SerialNumber={MAC} """ # No image format data to signal camera support +@pytest.mark.parametrize( + ("config_entry_options", "stream_profile"), + [ + ({}, ""), + ({CONF_STREAM_PROFILE: "profile_1"}, "streamprofile=profile_1"), + ], +) +async def test_camera( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, + stream_profile: str, +) -> None: + """Test that Axis camera platform is loaded properly.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CAMERA]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + entity_id = f"{CAMERA_DOMAIN}.{NAME}" + camera_entity = camera.helper.get_camera_from_entity_id(hass, entity_id) + assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" + assert ( + camera_entity.mjpeg_source == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi" + f"{"" if not stream_profile else f"?{stream_profile}"}" + ) + assert ( + await camera_entity.stream_source() + == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264" + f"{"" if not stream_profile else f"&{stream_profile}"}" + ) + + @pytest.mark.parametrize("param_properties_payload", [PROPERTY_DATA]) @pytest.mark.usefixtures("config_entry_setup") async def test_camera_disabled(hass: HomeAssistant) -> None: diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 5ceb6588fbd..52dd9c2f8ad 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -17,8 +17,6 @@ from homeassistant.components.axis.const import ( ) from homeassistant.config_entries import ( SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF, @@ -77,7 +75,7 @@ async def test_flow_manual_configuration(hass: HomeAssistant) -> None: } -async def test_manual_configuration_update_configuration( +async def test_manual_configuration_duplicate_fails( hass: HomeAssistant, config_entry_setup: MockConfigEntry, mock_requests: Callable[[str], None], @@ -107,7 +105,7 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" @pytest.mark.parametrize( @@ -205,12 +203,7 @@ async def test_reauth_flow_update_configuration( assert config_entry_setup.data[CONF_USERNAME] == "root" assert config_entry_setup.data[CONF_PASSWORD] == "pass" - result = await hass.config_entries.flow.async_init( - AXIS_DOMAIN, - context={"source": SOURCE_REAUTH}, - data=config_entry_setup.data, - ) - + result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -228,7 +221,7 @@ async def test_reauth_flow_update_configuration( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "reauth_successful" assert config_entry_setup.data[CONF_PROTOCOL] == "https" assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" assert config_entry_setup.data[CONF_PORT] == 443 @@ -246,13 +239,7 @@ async def test_reconfiguration_flow_update_configuration( assert config_entry_setup.data[CONF_USERNAME] == "root" assert config_entry_setup.data[CONF_PASSWORD] == "pass" - result = await hass.config_entries.flow.async_init( - AXIS_DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry_setup.entry_id, - }, - ) + result = await config_entry_setup.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -268,7 +255,7 @@ async def test_reconfiguration_flow_update_configuration( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "reconfigure_successful" assert config_entry_setup.data[CONF_PROTOCOL] == "http" assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" assert config_entry_setup.data[CONF_PORT] == 80 diff --git a/tests/components/azure_data_explorer/test_config_flow.py b/tests/components/azure_data_explorer/test_config_flow.py index a700299be33..13ff6a8bb13 100644 --- a/tests/components/azure_data_explorer/test_config_flow.py +++ b/tests/components/azure_data_explorer/test_config_flow.py @@ -25,7 +25,7 @@ async def test_config_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> BASE_CONFIG.copy(), ) - assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result2["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY assert result2["title"] == "cluster.region.kusto.windows.net" mock_setup_entry.assert_called_once() @@ -59,12 +59,12 @@ async def test_config_flow_errors( result["flow_id"], BASE_CONFIG.copy(), ) - assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result2["type"] == data_entry_flow.FlowResultType.FORM assert result2["errors"] == {"base": expected} await hass.async_block_till_done() - assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result2["type"] == data_entry_flow.FlowResultType.FORM # Retest error handling if error is corrected and connection is successful @@ -77,4 +77,4 @@ async def test_config_flow_errors( await hass.async_block_till_done() - assert result3["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result3["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY diff --git a/tests/components/azure_data_explorer/test_init.py b/tests/components/azure_data_explorer/test_init.py index 4d339728d09..10633154efd 100644 --- a/tests/components/azure_data_explorer/test_init.py +++ b/tests/components/azure_data_explorer/test_init.py @@ -9,14 +9,10 @@ from azure.kusto.ingest import StreamDescriptor import pytest from homeassistant.components import azure_data_explorer -from homeassistant.components.azure_data_explorer.const import ( - CONF_SEND_INTERVAL, - DOMAIN, -) +from homeassistant.components.azure_data_explorer.const import CONF_SEND_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow from . import FilterTest @@ -99,27 +95,6 @@ async def test_put_event_on_queue_with_queueing_client( assert type(mock_queued_ingest.call_args.args[0]) is StreamDescriptor -async def test_import(hass: HomeAssistant) -> None: - """Test the popping of the filter and further import of the config.""" - config = { - DOMAIN: { - "filter": { - "include_domains": ["light"], - "include_entity_globs": ["sensor.included_*"], - "include_entities": ["binary_sensor.included"], - "exclude_domains": ["light"], - "exclude_entity_globs": ["sensor.excluded_*"], - "exclude_entities": ["binary_sensor.excluded"], - }, - } - } - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - assert "filter" in hass.data[DOMAIN] - - async def test_unload_entry( hass: HomeAssistant, entry_managed: MockConfigEntry, @@ -239,7 +214,6 @@ async def test_filter( ) await hass.async_block_till_done() assert mock_managed_streaming.called == test.expect_called - assert "filter" in hass.data[DOMAIN] @pytest.mark.parametrize( diff --git a/tests/components/azure_devops/__init__.py b/tests/components/azure_devops/__init__.py index d636a6fda6d..6414fe0257c 100644 --- a/tests/components/azure_devops/__init__.py +++ b/tests/components/azure_devops/__init__.py @@ -1,9 +1,12 @@ """Tests for the Azure DevOps integration.""" +from datetime import datetime from typing import Final -from aioazuredevops.models.builds import Build, BuildDefinition +from aioazuredevops.models.build import Build, BuildDefinition from aioazuredevops.models.core import Project +from aioazuredevops.models.work_item import WorkItem, WorkItemFields +from aioazuredevops.models.work_item_type import Category, Icon, State, WorkItemType from homeassistant.components.azure_devops.const import CONF_ORG, CONF_PAT, CONF_PROJECT from homeassistant.core import HomeAssistant @@ -77,6 +80,55 @@ DEVOPS_BUILD_MISSING_PROJECT_DEFINITION = Build( build_id=9876, ) +DEVOPS_WORK_ITEM_TYPES = [ + WorkItemType( + name="Bug", + reference_name="System.Bug", + description="Bug", + color="ff0000", + icon=Icon(id="1234", url="https://example.com/icon.png"), + is_disabled=False, + xml_form="", + fields=[], + field_instances=[], + transitions={}, + states=[ + State(name="New", color="ff0000", category=Category.PROPOSED), + State(name="Active", color="ff0000", category=Category.IN_PROGRESS), + State(name="Resolved", color="ff0000", category=Category.RESOLVED), + State(name="Closed", color="ff0000", category=Category.COMPLETED), + ], + url="", + ) +] + +DEVOPS_WORK_ITEM_IDS = [1] + +DEVOPS_WORK_ITEMS = [ + WorkItem( + id=1, + rev=1, + fields=WorkItemFields( + area_path="", + team_project="", + iteration_path="", + work_item_type="Bug", + state="New", + reason="New", + assigned_to=None, + created_date=datetime(2021, 1, 1), + created_by=None, + changed_date=datetime(2021, 1, 1), + changed_by=None, + comment_count=0, + title="Test", + microsoft_vsts_common_state_change_date=datetime(2021, 1, 1), + microsoft_vsts_common_priority=1, + ), + url="https://example.com", + ) +] + async def setup_integration( hass: HomeAssistant, diff --git a/tests/components/azure_devops/conftest.py b/tests/components/azure_devops/conftest.py index c65adaa4da5..54c730f9523 100644 --- a/tests/components/azure_devops/conftest.py +++ b/tests/components/azure_devops/conftest.py @@ -7,7 +7,16 @@ import pytest from homeassistant.components.azure_devops.const import DOMAIN -from . import DEVOPS_BUILD, DEVOPS_PROJECT, FIXTURE_USER_INPUT, PAT, UNIQUE_ID +from . import ( + DEVOPS_BUILD, + DEVOPS_PROJECT, + DEVOPS_WORK_ITEM_IDS, + DEVOPS_WORK_ITEM_TYPES, + DEVOPS_WORK_ITEMS, + FIXTURE_USER_INPUT, + PAT, + UNIQUE_ID, +) from tests.common import MockConfigEntry @@ -33,8 +42,9 @@ async def mock_devops_client() -> AsyncGenerator[MagicMock]: devops_client.get_project.return_value = DEVOPS_PROJECT devops_client.get_builds.return_value = [DEVOPS_BUILD] devops_client.get_build.return_value = DEVOPS_BUILD - devops_client.get_work_item_ids.return_value = None - devops_client.get_work_items.return_value = None + devops_client.get_work_item_types.return_value = DEVOPS_WORK_ITEM_TYPES + devops_client.get_work_item_ids.return_value = DEVOPS_WORK_ITEM_IDS + devops_client.get_work_items.return_value = DEVOPS_WORK_ITEMS yield devops_client diff --git a/tests/components/azure_devops/test_config_flow.py b/tests/components/azure_devops/test_config_flow.py index 45dc10802b9..64c771a7adc 100644 --- a/tests/components/azure_devops/test_config_flow.py +++ b/tests/components/azure_devops/test_config_flow.py @@ -53,20 +53,17 @@ async def test_authorization_error( async def test_reauth_authorization_error( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps authorization error.""" + mock_config_entry.add_to_hass(hass) mock_devops_client.authorize.return_value = False mock_devops_client.authorized = False - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -75,7 +72,7 @@ async def test_reauth_authorization_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "invalid_auth"} @@ -108,20 +105,18 @@ async def test_connection_error( async def test_reauth_connection_error( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps connection error.""" + mock_config_entry.add_to_hass(hass) mock_devops_client.authorize.side_effect = aiohttp.ClientError mock_devops_client.authorized = False - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -130,7 +125,7 @@ async def test_reauth_connection_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "cannot_connect"} @@ -174,14 +169,10 @@ async def test_reauth_project_error( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -190,7 +181,7 @@ async def test_reauth_project_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "project_error"} @@ -205,15 +196,10 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" - assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "reauth_confirm" mock_devops_client.authorize.return_value = True mock_devops_client.authorized = True diff --git a/tests/components/azure_devops/test_init.py b/tests/components/azure_devops/test_init.py index a7655042f25..dd512cb12e0 100644 --- a/tests/components/azure_devops/test_init.py +++ b/tests/components/azure_devops/test_init.py @@ -91,3 +91,48 @@ async def test_no_builds( assert mock_devops_client.get_builds.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_no_work_item_types( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_devops_client: MagicMock, +) -> None: + """Test a failed update entry.""" + mock_devops_client.get_work_item_types.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert mock_devops_client.get_work_item_types.call_count == 1 + + assert mock_config_entry.state is ConfigEntryState.LOADED + + +async def test_no_work_item_ids( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_devops_client: MagicMock, +) -> None: + """Test a failed update entry.""" + mock_devops_client.get_work_item_ids.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert mock_devops_client.get_work_item_ids.call_count == 1 + + assert mock_config_entry.state is ConfigEntryState.LOADED + + +async def test_no_work_items( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_devops_client: MagicMock, +) -> None: + """Test a failed update entry.""" + mock_devops_client.get_work_items.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert mock_devops_client.get_work_items.call_count == 1 + + assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/azure_event_hub/test_init.py b/tests/components/azure_event_hub/test_init.py index 1b0550b147b..5ffc6106c11 100644 --- a/tests/components/azure_event_hub/test_init.py +++ b/tests/components/azure_event_hub/test_init.py @@ -112,6 +112,7 @@ async def test_send_batch_error( ) await hass.async_block_till_done() mock_send_batch.assert_called_once() + mock_send_batch.side_effect = None # Reset to avoid error in teardown async def test_late_event( diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index 70b33d2de3f..b06b8a5ef5d 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -2,29 +2,164 @@ from __future__ import annotations +from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path -from unittest.mock import patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch -from homeassistant.components.backup import DOMAIN -from homeassistant.components.backup.manager import Backup +from homeassistant.components.backup import ( + DOMAIN, + AddonInfo, + AgentBackup, + BackupAgent, + BackupAgentPlatformProtocol, + Folder, +) +from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -TEST_BACKUP = Backup( - slug="abc123", - name="Test", +from tests.common import MockPlatform, mock_platform + +LOCAL_AGENT_ID = f"{DOMAIN}.local" + +TEST_BACKUP_ABC123 = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=True, date="1970-01-01T00:00:00.000Z", - path=Path("abc123.tar"), - size=0.0, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0, ) +TEST_BACKUP_PATH_ABC123 = Path("abc123.tar") + +TEST_BACKUP_DEF456 = AgentBackup( + addons=[], + backup_id="def456", + database_included=False, + date="1980-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test 2", + protected=False, + size=1, +) + +TEST_DOMAIN = "test" + + +class BackupAgentTest(BackupAgent): + """Test backup agent.""" + + domain = "test" + + def __init__(self, name: str, backups: list[AgentBackup] | None = None) -> None: + """Initialize the backup agent.""" + self.name = name + if backups is None: + backups = [ + AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=True, + date="1970-01-01T00:00:00Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=13, + ) + ] + + self._backup_data: bytearray | None = None + self._backups = {backup.backup_id: backup for backup in backups} + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return AsyncMock(spec_set=["__aiter__"]) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + self._backups[backup.backup_id] = backup + backup_stream = await open_stream() + self._backup_data = bytearray() + async for chunk in backup_stream: + self._backup_data += chunk + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return list(self._backups.values()) + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + return self._backups.get(backup_id) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file.""" async def setup_backup_integration( hass: HomeAssistant, with_hassio: bool = False, configuration: ConfigType | None = None, + *, + backups: dict[str, list[AgentBackup]] | None = None, + remote_agents: list[str] | None = None, ) -> bool: """Set up the Backup integration.""" - with patch("homeassistant.components.backup.is_hassio", return_value=with_hassio): - return await async_setup_component(hass, DOMAIN, configuration or {}) + with ( + patch("homeassistant.components.backup.is_hassio", return_value=with_hassio), + patch( + "homeassistant.components.backup.backup.is_hassio", return_value=with_hassio + ), + ): + remote_agents = remote_agents or [] + platform = Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest(agent, []) for agent in remote_agents] + ), + spec_set=BackupAgentPlatformProtocol, + ) + + mock_platform(hass, f"{TEST_DOMAIN}.backup", platform or MockPlatform()) + assert await async_setup_component(hass, TEST_DOMAIN, {}) + + result = await async_setup_component(hass, DOMAIN, configuration or {}) + await hass.async_block_till_done() + if not backups: + return result + + for agent_id, agent_backups in backups.items(): + if with_hassio and agent_id == LOCAL_AGENT_ID: + continue + agent = hass.data[DATA_MANAGER].backup_agents[agent_id] + agent._backups = {backups.backup_id: backups for backups in agent_backups} + if agent_id == LOCAL_AGENT_ID: + agent._loaded_backups = True + + return result diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py new file mode 100644 index 00000000000..13f2537db47 --- /dev/null +++ b/tests/components/backup/conftest.py @@ -0,0 +1,115 @@ +"""Test fixtures for the Backup integration.""" + +from __future__ import annotations + +from asyncio import Future +from collections.abc import Generator +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +import pytest + +from homeassistant.components.backup.manager import WrittenBackup +from homeassistant.core import HomeAssistant + +from .common import TEST_BACKUP_PATH_ABC123 + + +@pytest.fixture(name="mocked_json_bytes") +def mocked_json_bytes_fixture() -> Generator[Mock]: + """Mock json_bytes.""" + with patch( + "homeassistant.components.backup.manager.json_bytes", + return_value=b"{}", # Empty JSON + ) as mocked_json_bytes: + yield mocked_json_bytes + + +@pytest.fixture(name="mocked_tarfile") +def mocked_tarfile_fixture() -> Generator[Mock]: + """Mock tarfile.""" + with patch( + "homeassistant.components.backup.manager.SecureTarFile" + ) as mocked_tarfile: + yield mocked_tarfile + + +@pytest.fixture(name="path_glob") +def path_glob_fixture() -> Generator[MagicMock]: + """Mock path glob.""" + with patch( + "pathlib.Path.glob", return_value=[TEST_BACKUP_PATH_ABC123] + ) as path_glob: + yield path_glob + + +CONFIG_DIR = { + "testing_config": [ + Path("test.txt"), + Path(".DS_Store"), + Path(".storage"), + Path("backups"), + Path("tmp_backups"), + Path("home-assistant_v2.db"), + ], + "backups": [ + Path("backups/backup.tar"), + Path("backups/not_backup"), + ], + "tmp_backups": [ + Path("tmp_backups/forgotten_backup.tar"), + Path("tmp_backups/not_backup"), + ], +} +CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} + + +@pytest.fixture(name="create_backup") +def mock_create_backup() -> Generator[AsyncMock]: + """Mock manager create backup.""" + mock_written_backup = MagicMock(spec_set=WrittenBackup) + mock_written_backup.backup.backup_id = "abc123" + mock_written_backup.open_stream = AsyncMock() + mock_written_backup.release_stream = AsyncMock() + fut = Future() + fut.set_result(mock_written_backup) + with patch( + "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" + ) as mock_create_backup: + mock_create_backup.return_value = (MagicMock(), fut) + yield mock_create_backup + + +@pytest.fixture(name="mock_backup_generation") +def mock_backup_generation_fixture( + hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock +) -> Generator[None]: + """Mock backup generator.""" + + with ( + patch("pathlib.Path.iterdir", lambda x: CONFIG_DIR.get(x.name, [])), + patch("pathlib.Path.stat", return_value=MagicMock(st_size=123)), + patch("pathlib.Path.is_file", lambda x: x not in CONFIG_DIR_DIRS), + patch("pathlib.Path.is_dir", lambda x: x in CONFIG_DIR_DIRS), + patch( + "pathlib.Path.exists", + lambda x: x + not in ( + Path(hass.config.path("backups")), + Path(hass.config.path("tmp_backups")), + ), + ), + patch( + "pathlib.Path.is_symlink", + lambda _: False, + ), + patch( + "pathlib.Path.mkdir", + MagicMock(), + ), + patch( + "homeassistant.components.backup.manager.HAVERSION", + "2025.1.0", + ), + ): + yield diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr new file mode 100644 index 00000000000..b350ff680ee --- /dev/null +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -0,0 +1,206 @@ +# serializer version: 1 +# name: test_delete_backup[found_backups0-True-1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_backup[found_backups1-False-0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_backup[found_backups2-True-0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[None] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[None].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect1] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect2] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect2].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect3] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect3].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect4] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect4].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index a1d83f5cd75..8bd4e2817b2 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1,4 +1,32 @@ # serializer version: 1 +# name: test_agent_delete_backup + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_agents_info + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + dict({ + 'agent_id': 'domain.test', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_backup_end[with_hassio-hass_access_token] dict({ 'error': dict({ @@ -40,7 +68,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception0] +# name: test_backup_end_exception[exception0] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -51,7 +79,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception1] +# name: test_backup_end_exception[exception1] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -62,7 +90,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception2] +# name: test_backup_end_exception[exception2] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -114,7 +142,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception0] +# name: test_backup_start_exception[exception0] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -125,7 +153,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception1] +# name: test_backup_start_exception[exception1] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -136,7 +164,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception2] +# name: test_backup_start_exception[exception2] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -147,73 +175,2203 @@ 'type': 'result', }) # --- -# name: test_generate[with_hassio] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_generate[without_hassio] +# name: test_config_info[None] dict({ 'id': 1, 'result': dict({ - 'date': '1970-01-01T00:00:00.000Z', - 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), }), 'success': True, 'type': 'result', }) # --- -# name: test_info[with_hassio] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_info[without_hassio] +# name: test_config_info[storage_data1] dict({ 'id': 1, 'result': dict({ - 'backing_up': False, + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': True, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': '2024-10-27T04:45:00+01:00', + 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data4] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data5] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'sat', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command10] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command10].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command10].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command1] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command1].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command1].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command2].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command2].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command3].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command3].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command4] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command4].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': False, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command4].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': False, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command5] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command5].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command5].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command6] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command6].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command6].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command7] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command7].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command7].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command8] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command8].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command8].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command9] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command9].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command9].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update_errors[command0] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command0].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), 'backups': list([ dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, }), ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, }), 'success': True, 'type': 'result', }) # --- -# name: test_remove[with_hassio] +# name: test_delete[remote_agents1-backups1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + 'test.remote', + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Boom!', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_remove[without_hassio] +# name: test_delete_with_errors[side_effect1-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data1] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + 'test.remote', + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents0-backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents1-backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents2-backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents3-backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents4-backups4] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details_with_errors[BackupAgentUnreachableError] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details_with_errors[side_effect0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_generate[None] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].1 dict({ 'id': 1, 'result': None, @@ -221,3 +2379,522 @@ 'type': 'result', }) # --- +# name: test_generate[None].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[None].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data1].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data1].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data2].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data2].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_info[remote_agents0-remote_backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents1-remote_backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents2-remote_backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents3-remote_backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info_with_errors[BackupAgentUnreachableError] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info_with_errors[side_effect0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore_local_agent[backups0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent backup.local', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore_local_agent[backups0].1 + 0 +# --- +# name: test_restore_local_agent[backups1] + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_restore_local_agent[backups1].1 + 1 +# --- +# name: test_restore_remote_agent[remote_agents0-backups0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent test.remote', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore_remote_agent[remote_agents0-backups0].1 + 0 +# --- +# name: test_restore_remote_agent[remote_agents1-backups1] + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_restore_remote_agent[remote_agents1-backups1].1 + 1 +# --- +# name: test_subscribe_event + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_subscribe_event.1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_subscribe_event.2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- diff --git a/tests/components/backup/test_backup.py b/tests/components/backup/test_backup.py new file mode 100644 index 00000000000..02252ef6fa5 --- /dev/null +++ b/tests/components/backup/test_backup.py @@ -0,0 +1,129 @@ +"""Test the builtin backup platform.""" + +from __future__ import annotations + +from collections.abc import Generator +from io import StringIO +import json +from pathlib import Path +from tarfile import TarError +from unittest.mock import MagicMock, mock_open, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.backup import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import TEST_BACKUP_ABC123, TEST_BACKUP_PATH_ABC123 + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(name="read_backup") +def read_backup_fixture(path_glob: MagicMock) -> Generator[MagicMock]: + """Mock read backup.""" + with patch( + "homeassistant.components.backup.backup.read_backup", + return_value=TEST_BACKUP_ABC123, + ) as read_backup: + yield read_backup + + +@pytest.mark.parametrize( + "side_effect", + [ + None, + OSError("Boom"), + TarError("Boom"), + json.JSONDecodeError("Boom", "test", 1), + KeyError("Boom"), + ], +) +async def test_load_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + read_backup: MagicMock, + side_effect: Exception | None, +) -> None: + """Test load backups.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + read_backup.side_effect = side_effect + + # list agents + await client.send_json_auto_id({"type": "backup/agents/info"}) + assert await client.receive_json() == snapshot + + # load and list backups + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +async def test_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test upload backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() + open_mock = mock_open() + + with ( + patch("pathlib.Path.open", open_mock), + patch("shutil.move") as move_mock, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_BACKUP_ABC123, + ), + ): + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert open_mock.call_count == 1 + assert move_mock.call_count == 1 + assert move_mock.mock_calls[0].args[1].name == "abc123.tar" + + +@pytest.mark.usefixtures("read_backup") +@pytest.mark.parametrize( + ("found_backups", "backup_exists", "unlink_calls"), + [ + ([TEST_BACKUP_PATH_ABC123], True, 1), + ([TEST_BACKUP_PATH_ABC123], False, 0), + (([], True, 0)), + ], +) +async def test_delete_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + path_glob: MagicMock, + found_backups: list[Path], + backup_exists: bool, + unlink_calls: int, +) -> None: + """Test delete backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + path_glob.return_value = found_backups + + with ( + patch("pathlib.Path.exists", return_value=backup_exists), + patch("pathlib.Path.unlink") as unlink, + ): + await client.send_json_auto_id( + {"type": "backup/delete", "backup_id": TEST_BACKUP_ABC123.backup_id} + ) + assert await client.receive_json() == snapshot + + assert unlink.call_count == unlink_calls diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index baf1798534a..c071a0d8386 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -1,30 +1,34 @@ """Tests for the Backup integration.""" +import asyncio +from io import StringIO from unittest.mock import patch from aiohttp import web +import pytest +from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant -from .common import TEST_BACKUP, setup_backup_integration +from .common import TEST_BACKUP_ABC123, BackupAgentTest, setup_backup_integration from tests.common import MockUser from tests.typing import ClientSessionGenerator -async def test_downloading_backup( +async def test_downloading_local_backup( hass: HomeAssistant, hass_client: ClientSessionGenerator, ) -> None: - """Test downloading a backup file.""" + """Test downloading a local backup file.""" await setup_backup_integration(hass) client = await hass_client() with ( patch( - "homeassistant.components.backup.http.BackupManager.get_backup", - return_value=TEST_BACKUP, + "homeassistant.components.backup.backup.CoreLocalBackupAgent.async_get_backup", + return_value=TEST_BACKUP_ABC123, ), patch("pathlib.Path.exists", return_value=True), patch( @@ -32,10 +36,29 @@ async def test_downloading_backup( return_value=web.Response(text=""), ), ): - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 200 +async def test_downloading_remote_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test downloading a remote backup.""" + await setup_backup_integration(hass) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_client() + + with ( + patch.object(BackupAgentTest, "async_download_backup") as download_mock, + ): + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + resp = await client.get("/api/backup/download/abc123?agent_id=domain.test") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + async def test_downloading_backup_not_found( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -45,20 +68,70 @@ async def test_downloading_backup_not_found( client = await hass_client() - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 404 -async def test_non_admin( +async def test_downloading_as_non_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_admin_user: MockUser, ) -> None: - """Test downloading a backup file that does not exist.""" + """Test downloading a backup file when you are not an admin.""" hass_admin_user.groups = [] await setup_backup_integration(hass) client = await hass_client() - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 401 + + +async def test_uploading_a_backup_file( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test uploading a backup file.""" + await setup_backup_integration(hass) + + client = await hass_client() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_receive_backup", + ) as async_receive_backup_mock: + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO("test")}, + ) + assert resp.status == 201 + assert async_receive_backup_mock.called + + +@pytest.mark.parametrize( + ("error", "message"), + [ + (OSError("Boom!"), "Can't write backup file: Boom!"), + (asyncio.CancelledError("Boom!"), ""), + ], +) +async def test_error_handling_uploading_a_backup_file( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + error: Exception, + message: str, +) -> None: + """Test error handling when uploading a backup file.""" + await setup_backup_integration(hass) + + client = await hass_client() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_receive_backup", + side_effect=error, + ): + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO("test")}, + ) + assert resp.status == 500 + assert await resp.text() == message diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index 9fdfa978f94..16a49af9647 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -1,15 +1,18 @@ """Tests for the Backup integration.""" +from typing import Any from unittest.mock import patch import pytest -from homeassistant.components.backup.const import DOMAIN +from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceNotFound from .common import setup_backup_integration +@pytest.mark.usefixtures("supervisor_client") async def test_setup_with_hassio( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -20,25 +23,34 @@ async def test_setup_with_hassio( with_hassio=True, configuration={DOMAIN: {}}, ) - assert ( - "The backup integration is not supported on this installation method, please" - " remove it from your configuration" - ) in caplog.text + manager = hass.data[DATA_MANAGER] + assert not manager.backup_agents +@pytest.mark.parametrize("service_data", [None, {}]) async def test_create_service( hass: HomeAssistant, + service_data: dict[str, Any] | None, ) -> None: """Test generate backup.""" await setup_backup_integration(hass) with patch( - "homeassistant.components.backup.websocket.BackupManager.generate_backup", + "homeassistant.components.backup.manager.BackupManager.async_create_backup", ) as generate_backup: await hass.services.async_call( DOMAIN, "create", blocking=True, + service_data=service_data, ) assert generate_backup.called + + +async def test_create_service_with_hassio(hass: HomeAssistant) -> None: + """Test action backup.create does not exist with hassio.""" + await setup_backup_integration(hass, with_hassio=True) + + with pytest.raises(ServiceNotFound): + await hass.services.async_call(DOMAIN, "create", blocking=True) diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 41749298819..302f4e07011 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -2,197 +2,529 @@ from __future__ import annotations +import asyncio +from collections.abc import Generator +from io import StringIO +import json from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from typing import Any +from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, mock_open, patch import pytest -from homeassistant.components.backup import BackupManager -from homeassistant.components.backup.manager import BackupPlatformProtocol +from homeassistant.components.backup import ( + DOMAIN, + AgentBackup, + BackupAgentPlatformProtocol, + BackupManager, + BackupPlatformProtocol, + Folder, + LocalBackupAgent, + backup as local_backup_platform, +) +from homeassistant.components.backup.const import DATA_MANAGER +from homeassistant.components.backup.manager import ( + BackupManagerState, + CoreBackupReaderWriter, + CreateBackupEvent, + CreateBackupStage, + CreateBackupState, + NewBackup, + WrittenBackup, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .common import TEST_BACKUP +from .common import ( + LOCAL_AGENT_ID, + TEST_BACKUP_ABC123, + TEST_BACKUP_DEF456, + BackupAgentTest, +) from tests.common import MockPlatform, mock_platform +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +_EXPECTED_FILES = [ + "test.txt", + ".storage", + "backups", + "backups/not_backup", + "tmp_backups", + "tmp_backups/not_backup", +] +_EXPECTED_FILES_WITH_DATABASE = { + True: [*_EXPECTED_FILES, "home-assistant_v2.db"], + False: _EXPECTED_FILES, +} -async def _mock_backup_generation(manager: BackupManager): - """Mock backup generator.""" +async def _setup_backup_platform( + hass: HomeAssistant, + *, + domain: str = "some_domain", + platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None, +) -> None: + """Set up a mock domain.""" + mock_platform(hass, f"{domain}.backup", platform or MockPlatform()) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() - def _mock_iterdir(path: Path) -> list[Path]: - if not path.name.endswith("testing_config"): - return [] - return [ - Path("test.txt"), - Path(".DS_Store"), - Path(".storage"), - ] - with ( - patch( - "homeassistant.components.backup.manager.SecureTarFile" - ) as mocked_tarfile, - patch("pathlib.Path.iterdir", _mock_iterdir), - patch("pathlib.Path.stat", MagicMock(st_size=123)), - patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), - patch( - "pathlib.Path.is_dir", - lambda x: x.name == ".storage", - ), - patch( - "pathlib.Path.exists", - lambda x: x != manager.backup_dir, - ), - patch( - "pathlib.Path.is_symlink", - lambda _: False, - ), - patch( - "pathlib.Path.mkdir", - MagicMock(), - ), - patch( - "homeassistant.components.backup.manager.json_bytes", - return_value=b"{}", # Empty JSON - ) as mocked_json_bytes, - patch( - "homeassistant.components.backup.manager.HAVERSION", - "2025.1.0", - ), - ): - await manager.generate_backup() +@pytest.fixture(autouse=True) +def mock_delay_save() -> Generator[None]: + """Mock the delay save constant.""" + with patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0): + yield - assert mocked_json_bytes.call_count == 1 - backup_json_dict = mocked_json_bytes.call_args[0][0] - assert isinstance(backup_json_dict, dict) - assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} - assert manager.backup_dir.as_posix() in str( - mocked_tarfile.call_args_list[0][0][0] + +@pytest.fixture(name="generate_backup_id") +def generate_backup_id_fixture() -> Generator[MagicMock]: + """Mock generate backup id.""" + with patch("homeassistant.components.backup.manager._generate_backup_id") as mock: + mock.return_value = "abc123" + yield mock + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_async_create_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, +) -> None: + """Test create backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + new_backup = NewBackup(backup_job_id="time-123") + backup_task = AsyncMock( + return_value=WrittenBackup( + backup=TEST_BACKUP_ABC123, + open_stream=AsyncMock(), + release_stream=AsyncMock(), + ), + )() # call it so that it can be awaited + + with patch( + "homeassistant.components.backup.manager.CoreBackupReaderWriter.async_create_backup", + return_value=(new_backup, backup_task), + ) as create_backup: + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert create_backup.called + assert create_backup.call_args == call( + agent_ids=["backup.local"], + backup_name="Core 2025.1.0", + include_addons=None, + include_all_addons=False, + include_database=True, + include_folders=None, + include_homeassistant=True, + on_progress=ANY, + password=None, + ) + + +async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: + """Test generate backup.""" + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + manager.last_event = CreateBackupEvent( + stage=None, state=CreateBackupState.IN_PROGRESS + ) + with pytest.raises(HomeAssistantError, match="Backup manager busy"): + await manager.async_create_backup( + agent_ids=[LOCAL_AGENT_ID], + include_addons=[], + include_all_addons=False, + include_database=True, + include_folders=[], + include_homeassistant=True, + name=None, + password=None, ) -async def _setup_mock_domain( - hass: HomeAssistant, - platform: BackupPlatformProtocol | None = None, -) -> None: - """Set up a mock domain.""" - mock_platform(hass, "some_domain.backup", platform or MockPlatform()) - assert await async_setup_component(hass, "some_domain", {}) - - -async def test_constructor(hass: HomeAssistant) -> None: - """Test BackupManager constructor.""" - manager = BackupManager(hass) - assert manager.backup_dir.as_posix() == hass.config.path("backups") - - -async def test_load_backups(hass: HomeAssistant) -> None: - """Test loading backups.""" - manager = BackupManager(hass) - with ( - patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]), - patch("tarfile.open", return_value=MagicMock()), - patch( - "homeassistant.components.backup.manager.json_loads_object", - return_value={ - "slug": TEST_BACKUP.slug, - "name": TEST_BACKUP.name, - "date": TEST_BACKUP.date, - }, +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ({"agent_ids": []}, "At least one agent must be selected"), + ({"agent_ids": ["non_existing"]}, "Invalid agent selected"), + ( + {"include_addons": ["ssl"], "include_all_addons": True}, + "Cannot include all addons and specify specific addons", ), - patch( - "pathlib.Path.stat", - return_value=MagicMock(st_size=TEST_BACKUP.size), - ), - ): - await manager.load_backups() - backups = await manager.get_backups() - assert backups == {TEST_BACKUP.slug: TEST_BACKUP} - - -async def test_load_backups_with_exception( + ({"include_homeassistant": False}, "Home Assistant must be included in backup"), + ], +) +async def test_create_backup_wrong_parameters( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + hass_ws_client: WebSocketGenerator, + parameters: dict[str, Any], + expected_error: str, ) -> None: - """Test loading backups with exception.""" - manager = BackupManager(hass) - with ( - patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]), - patch("tarfile.open", side_effect=OSError("Test exception")), - ): - await manager.load_backups() - backups = await manager.get_backups() - assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text - assert backups == {} + """Test create backup with wrong parameters.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + ws_client = await hass_ws_client(hass) + + default_parameters = { + "agent_ids": [LOCAL_AGENT_ID], + "include_addons": [], + "include_all_addons": False, + "include_database": True, + "include_folders": [], + "include_homeassistant": True, + } + + await ws_client.send_json_auto_id( + {"type": "backup/generate"} | default_parameters | parameters + ) + result = await ws_client.receive_json() + + assert result["success"] is False + assert result["error"]["code"] == "home_assistant_error" + assert result["error"]["message"] == expected_error -async def test_removing_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test removing backup.""" - manager = BackupManager(hass) - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - manager.loaded_backups = True - - with patch("pathlib.Path.exists", return_value=True): - await manager.remove_backup(TEST_BACKUP.slug) - assert "Removed backup located at" in caplog.text - - -async def test_removing_non_existing_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test removing not existing backup.""" - manager = BackupManager(hass) - - await manager.remove_backup("non_existing") - assert "Removed backup located at" not in caplog.text - - -async def test_getting_backup_that_does_not_exist( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test getting backup that does not exist.""" - manager = BackupManager(hass) - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - manager.loaded_backups = True - - with patch("pathlib.Path.exists", return_value=False): - backup = await manager.get_backup(TEST_BACKUP.slug) - assert backup is None - - assert ( - f"Removing tracked backup ({TEST_BACKUP.slug}) that " - f"does not exists on the expected path {TEST_BACKUP.path}" - ) in caplog.text - - -async def test_generate_backup_when_backing_up(hass: HomeAssistant) -> None: - """Test generate backup.""" - manager = BackupManager(hass) - manager.backing_up = True - with pytest.raises(HomeAssistantError, match="Backup already in progress"): - await manager.generate_backup() - - -async def test_generate_backup( +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("agent_ids", "backup_directory", "temp_file_unlink_call_count"), + [ + ([LOCAL_AGENT_ID], "backups", 0), + (["test.remote"], "tmp_backups", 1), + ([LOCAL_AGENT_ID, "test.remote"], "backups", 0), + ], +) +@pytest.mark.parametrize( + "params", + [ + {}, + {"include_database": True, "name": "abc123"}, + {"include_database": False}, + {"password": "pass123"}, + ], +) +async def test_async_initiate_backup( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, + generate_backup_id: MagicMock, + path_glob: MagicMock, + params: dict[str, Any], + agent_ids: list[str], + backup_directory: str, + temp_file_unlink_call_count: int, ) -> None: """Test generate backup.""" - manager = BackupManager(hass) - manager.loaded_backups = True + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + agents = { + f"backup.{local_agent.name}": local_agent, + f"test.{remote_agent.name}": remote_agent, + } + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) - await _mock_backup_generation(manager) + ws_client = await hass_ws_client(hass) - assert "Generated new backup with slug " in caplog.text - assert "Creating backup directory" in caplog.text - assert "Loaded 0 platforms" in caplog.text + include_database = params.get("include_database", True) + name = params.get("name", "Core 2025.1.0") + password = params.get("password") + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch("pathlib.Path.unlink") as unlink_mock, + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} | params + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.COMPLETED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + assert unlink_mock.call_count == temp_file_unlink_call_count + + assert mocked_json_bytes.call_count == 1 + backup_json_dict = mocked_json_bytes.call_args[0][0] + assert isinstance(backup_json_dict, dict) + assert backup_json_dict == { + "compressed": True, + "date": ANY, + "homeassistant": { + "exclude_database": not include_database, + "version": "2025.1.0", + }, + "name": name, + "protected": bool(password), + "slug": ANY, + "type": "partial", + "version": 2, + } + + await ws_client.send_json_auto_id( + {"type": "backup/details", "backup_id": backup_id} + ) + result = await ws_client.receive_json() + + backup_data = result["result"]["backup"] + backup_agent_ids = backup_data.pop("agent_ids") + + assert backup_agent_ids == agent_ids + + backup = AgentBackup.from_dict(backup_data) + + assert backup == AgentBackup( + addons=[], + backup_id=ANY, + database_included=include_database, + date=ANY, + folders=[], + homeassistant_included=True, + homeassistant_version="2025.1.0", + name=name, + protected=bool(password), + size=ANY, + ) + for agent_id in agent_ids: + agent = agents[agent_id] + assert len(agent._backups) == 1 + agent_backup = agent._backups[backup.backup_id] + assert agent_backup.backup_id == backup.backup_id + assert agent_backup.date == backup.date + assert agent_backup.name == backup.name + assert agent_backup.protected == backup.protected + assert agent_backup.size == backup.size + + outer_tar = mocked_tarfile.return_value + core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value + expected_files = [call(hass.config.path(), arcname="data", recursive=False)] + [ + call(file, arcname=f"data/{file}", recursive=False) + for file in _EXPECTED_FILES_WITH_DATABASE[include_database] + ] + assert core_tar.add.call_args_list == expected_files + + tar_file_path = str(mocked_tarfile.call_args_list[0][0][0]) + backup_directory = hass.config.path(backup_directory) + assert tar_file_path == f"{backup_directory}/{backup.backup_id}.tar" + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_async_initiate_backup_with_agent_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, + generate_backup_id: MagicMock, + path_glob: MagicMock, + hass_storage: dict[str, Any], +) -> None: + """Test generate backup.""" + agent_ids = [LOCAL_AGENT_ID, "test.remote"] + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + ws_client = await hass_ws_client(hass) + + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch.object( + remote_agent, "async_upload_backup", side_effect=Exception("Test exception") + ), + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.COMPLETED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + expected_backup_data = { + "addons": [], + "agent_ids": ["backup.local"], + "backup_id": "abc123", + "database_included": True, + "date": ANY, + "failed_agent_ids": ["test.remote"], + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.1.0", + "name": "Core 2025.1.0", + "protected": False, + "size": 123, + "with_strategy_settings": False, + } + + await ws_client.send_json_auto_id( + {"type": "backup/details", "backup_id": backup_id} + ) + result = await ws_client.receive_json() + assert result["result"] == { + "agent_errors": {}, + "backup": expected_backup_data, + } + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + assert result["result"] == { + "agent_errors": {}, + "backups": [expected_backup_data], + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await hass.async_block_till_done() + assert hass_storage[DOMAIN]["data"]["backups"] == [ + { + "backup_id": "abc123", + "failed_agent_ids": ["test.remote"], + "with_strategy_settings": False, + } + ] async def test_loading_platforms( @@ -200,136 +532,449 @@ async def test_loading_platforms( caplog: pytest.LogCaptureFixture, ) -> None: """Test loading backup platforms.""" - manager = BackupManager(hass) + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - assert not manager.loaded_platforms assert not manager.platforms - await _setup_mock_domain( + get_agents_mock = AsyncMock(return_value=[]) + + await _setup_backup_platform( hass, - Mock( + platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=AsyncMock(), + async_get_backup_agents=get_agents_mock, ), ) await manager.load_platforms() await hass.async_block_till_done() - assert manager.loaded_platforms assert len(manager.platforms) == 1 - assert "Loaded 1 platforms" in caplog.text + get_agents_mock.assert_called_once_with(hass) + +class LocalBackupAgentTest(BackupAgentTest, LocalBackupAgent): + """Local backup agent.""" + + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup.""" + return "test.tar" + + +@pytest.mark.parametrize( + ("agent_class", "num_local_agents"), + [(LocalBackupAgentTest, 2), (BackupAgentTest, 1)], +) +async def test_loading_platform_with_listener( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + agent_class: type[BackupAgentTest], + num_local_agents: int, +) -> None: + """Test loading a backup agent platform which can be listened to.""" + ws_client = await hass_ws_client(hass) + assert await async_setup_component(hass, DOMAIN, {}) + manager = hass.data[DATA_MANAGER] + + get_agents_mock = AsyncMock(return_value=[agent_class("remote1", backups=[])]) + register_listener_mock = Mock() + + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=get_agents_mock, + async_register_backup_agents_listener=register_listener_mock, + ), + ) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id({"type": "backup/agents/info"}) + resp = await ws_client.receive_json() + assert resp["result"]["agents"] == [ + {"agent_id": "backup.local"}, + {"agent_id": "test.remote1"}, + ] + assert len(manager.local_backup_agents) == num_local_agents + + get_agents_mock.assert_called_once_with(hass) + register_listener_mock.assert_called_once_with(hass, listener=ANY) + + get_agents_mock.reset_mock() + get_agents_mock.return_value = [agent_class("remote2", backups=[])] + listener = register_listener_mock.call_args[1]["listener"] + listener() + + get_agents_mock.assert_called_once_with(hass) + await ws_client.send_json_auto_id({"type": "backup/agents/info"}) + resp = await ws_client.receive_json() + assert resp["result"]["agents"] == [ + {"agent_id": "backup.local"}, + {"agent_id": "test.remote2"}, + ] + assert len(manager.local_backup_agents) == num_local_agents + + +@pytest.mark.parametrize( + "platform_mock", + [ + Mock(async_pre_backup=AsyncMock(), spec=["async_pre_backup"]), + Mock(async_post_backup=AsyncMock(), spec=["async_post_backup"]), + Mock(spec=[]), + ], +) async def test_not_loading_bad_platforms( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + platform_mock: Mock, ) -> None: - """Test loading backup platforms.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain(hass) - await manager.load_platforms() + """Test not loading bad backup platforms.""" + await _setup_backup_platform( + hass, + domain="test", + platform=platform_mock, + ) + assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - assert manager.loaded_platforms - assert len(manager.platforms) == 0 - - assert "Loaded 0 platforms" in caplog.text - assert ( - "some_domain does not implement required functions for the backup platform" - in caplog.text - ) + assert platform_mock.mock_calls == [] -async def test_exception_plaform_pre(hass: HomeAssistant) -> None: +async def test_exception_platform_pre( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test exception in pre step.""" - manager = BackupManager(hass) - manager.loaded_backups = True async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") - await _setup_mock_domain( + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( + domain="test", + platform=Mock( async_pre_backup=_mock_step, async_post_backup=AsyncMock(), + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), ), ) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager) + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert "Generating backup failed" in caplog.text + assert "Test exception" in caplog.text -async def test_exception_plaform_post(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_backup_generation") +async def test_exception_platform_post( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: """Test exception in post step.""" - manager = BackupManager(hass) - manager.loaded_backups = True async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") - await _setup_mock_domain( + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( + domain="test", + platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=_mock_step, + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), ), ) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager) + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert "Generating backup failed" in caplog.text + assert "Test exception" in caplog.text -async def test_loading_platforms_when_running_pre_backup_actions( +@pytest.mark.parametrize( + ( + "agent_id_params", + "open_call_count", + "move_call_count", + "move_path_names", + "remote_agent_backups", + "remote_agent_backup_data", + "temp_file_unlink_call_count", + ), + [ + ( + "agent_id=backup.local&agent_id=test.remote", + 2, + 1, + ["abc123.tar"], + {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123}, + b"test", + 0, + ), + ( + "agent_id=backup.local", + 1, + 1, + ["abc123.tar"], + {}, + None, + 0, + ), + ( + "agent_id=test.remote", + 2, + 0, + [], + {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123}, + b"test", + 1, + ), + ], +) +async def test_receive_backup( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + hass_client: ClientSessionGenerator, + agent_id_params: str, + open_call_count: int, + move_call_count: int, + move_path_names: list[str], + remote_agent_backups: dict[str, AgentBackup], + remote_agent_backup_data: bytes | None, + temp_file_unlink_call_count: int, ) -> None: - """Test loading backup platforms when running post backup actions.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain( + """Test receive backup and upload to the local and a remote agent.""" + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, ), ) - await manager.pre_backup_actions() + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() - assert manager.loaded_platforms - assert len(manager.platforms) == 1 + upload_data = "test" + open_mock = mock_open(read_data=upload_data.encode(encoding="utf-8")) - assert "Loaded 1 platforms" in caplog.text + with ( + patch("pathlib.Path.open", open_mock), + patch("shutil.move") as move_mock, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_BACKUP_ABC123, + ), + patch("pathlib.Path.unlink") as unlink_mock, + ): + resp = await client.post( + f"/api/backup/upload?{agent_id_params}", + data={"file": StringIO(upload_data)}, + ) + await hass.async_block_till_done() + + assert resp.status == 201 + assert open_mock.call_count == open_call_count + assert move_mock.call_count == move_call_count + for index, name in enumerate(move_path_names): + assert move_mock.call_args_list[index].args[1].name == name + assert remote_agent._backups == remote_agent_backups + assert remote_agent._backup_data == remote_agent_backup_data + assert unlink_mock.call_count == temp_file_unlink_call_count -async def test_loading_platforms_when_running_post_backup_actions( +@pytest.mark.usefixtures("mock_backup_generation") +async def test_receive_backup_busy_manager( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, ) -> None: - """Test loading backup platforms when running post backup actions.""" - manager = BackupManager(hass) + """Test receive backup with a busy manager.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() + ws_client = await hass_ws_client(hass) - assert not manager.loaded_platforms - assert not manager.platforms + upload_data = "test" - await _setup_mock_domain( + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": "idle"} + + result = await ws_client.receive_json() + assert result["success"] is True + + new_backup = NewBackup(backup_job_id="time-123") + backup_task: asyncio.Future[WrittenBackup] = asyncio.Future() + with patch( + "homeassistant.components.backup.manager.CoreBackupReaderWriter.async_create_backup", + return_value=(new_backup, backup_task), + ) as create_backup: + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["backup.local"]} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + result = await ws_client.receive_json() + assert result["success"] is True + assert result["result"] == {"backup_job_id": "time-123"} + + assert create_backup.call_count == 1 + + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO(upload_data)}, + ) + + assert resp.status == 500 + assert ( + await resp.text() + == "Can't upload backup file: Backup manager busy: create_backup" + ) + + # finish the backup + backup_task.set_result( + WrittenBackup( + backup=TEST_BACKUP_ABC123, + open_stream=AsyncMock(), + release_stream=AsyncMock(), + ) + ) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("agent_id", "password", "restore_database", "restore_homeassistant", "dir"), + [ + (LOCAL_AGENT_ID, None, True, False, "backups"), + (LOCAL_AGENT_ID, "abc123", False, True, "backups"), + ("test.remote", None, True, True, "tmp_backups"), + ], +) +async def test_async_trigger_restore( + hass: HomeAssistant, + agent_id: str, + password: str | None, + restore_database: bool, + restore_homeassistant: bool, + dir: str, +) -> None: + """Test trigger restore.""" + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + hass.data[DATA_MANAGER] = manager + + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await _setup_backup_platform( hass, - Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123])] + ), + spec_set=BackupAgentPlatformProtocol, ), ) - await manager.post_backup_actions() + await manager.load_platforms() - assert manager.loaded_platforms - assert len(manager.platforms) == 1 + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True - assert "Loaded 1 platforms" in caplog.text + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.open"), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch.object(BackupAgentTest, "async_download_backup") as download_mock, + ): + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + await manager.async_restore_backup( + TEST_BACKUP_ABC123.backup_id, + agent_id=agent_id, + password=password, + restore_addons=None, + restore_database=restore_database, + restore_folders=None, + restore_homeassistant=restore_homeassistant, + ) + expected_restore_file = json.dumps( + { + "path": f"{hass.config.path()}/{dir}/abc123.tar", + "password": password, + "remove_after_restore": agent_id != LOCAL_AGENT_ID, + "restore_database": restore_database, + "restore_homeassistant": restore_homeassistant, + } + ) + assert mocked_write_text.call_args[0][0] == expected_restore_file + assert mocked_service_call.called + + +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"backup_id": TEST_BACKUP_DEF456.backup_id}, + "Backup def456 not found", + ), + ( + {"restore_addons": ["blah"]}, + "Addons and folders are not supported in core restore", + ), + ( + {"restore_folders": [Folder.ADDONS]}, + "Addons and folders are not supported in core restore", + ), + ( + {"restore_database": False, "restore_homeassistant": False}, + "Home Assistant or database must be included in restore", + ), + ], +) +async def test_async_trigger_restore_wrong_parameters( + hass: HomeAssistant, parameters: dict[str, Any], expected_error: str +) -> None: + """Test trigger restore.""" + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True + + default_parameters = { + "agent_id": LOCAL_AGENT_ID, + "backup_id": TEST_BACKUP_ABC123.backup_id, + "password": None, + "restore_addons": None, + "restore_database": True, + "restore_folders": None, + "restore_homeassistant": True, + } + + with ( + patch("pathlib.Path.exists", return_value=True), + pytest.raises(HomeAssistantError, match=expected_error), + ): + await manager.async_restore_backup(**(default_parameters | parameters)) diff --git a/tests/components/backup/test_models.py b/tests/components/backup/test_models.py new file mode 100644 index 00000000000..6a547f40dc3 --- /dev/null +++ b/tests/components/backup/test_models.py @@ -0,0 +1,11 @@ +"""Tests for the Backup integration.""" + +from homeassistant.components.backup import AgentBackup + +from .common import TEST_BACKUP_ABC123 + + +async def test_agent_backup_serialization() -> None: + """Test AgentBackup serialization.""" + + assert AgentBackup.from_dict(TEST_BACKUP_ABC123.as_dict()) == TEST_BACKUP_ABC123 diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index e11278202e0..4a94689c19e 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1,17 +1,72 @@ """Tests for the Backup integration.""" -from unittest.mock import patch +from collections.abc import Generator +from typing import Any +from unittest.mock import ANY, AsyncMock, MagicMock, call, patch +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.components.backup import AgentBackup, BackupAgentError +from homeassistant.components.backup.agent import BackupAgentUnreachableError +from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN +from homeassistant.components.backup.manager import ( + CreateBackupEvent, + CreateBackupState, + ManagerBackup, + NewBackup, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .common import TEST_BACKUP, setup_backup_integration +from .common import ( + LOCAL_AGENT_ID, + TEST_BACKUP_ABC123, + TEST_BACKUP_DEF456, + BackupAgentTest, + setup_backup_integration, +) +from tests.common import async_fire_time_changed, async_mock_service from tests.typing import WebSocketGenerator +BACKUP_CALL = call( + agent_ids=["test.test-agent"], + backup_name="test-name", + include_addons=["test-addon"], + include_all_addons=False, + include_database=True, + include_folders=["media"], + include_homeassistant=True, + password="test-password", + on_progress=ANY, +) + +DEFAULT_STORAGE_DATA: dict[str, Any] = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": [], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + }, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "retention": { + "copies": None, + "days": None, + }, + "schedule": { + "state": "never", + }, + }, +} + @pytest.fixture def sync_access_token_proxy( @@ -25,84 +80,542 @@ def sync_access_token_proxy( return request.getfixturevalue(access_token_fixture_name) +@pytest.fixture(autouse=True) +def mock_delay_save() -> Generator[None]: + """Mock the delay save constant.""" + with patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0): + yield + + +@pytest.fixture(name="delete_backup") +def mock_delete_backup() -> Generator[AsyncMock]: + """Mock manager delete backup.""" + with patch( + "homeassistant.components.backup.BackupManager.async_delete_backup" + ) as mock_delete_backup: + yield mock_delete_backup + + +@pytest.fixture(name="get_backups") +def mock_get_backups() -> Generator[AsyncMock]: + """Mock manager get backups.""" + with patch( + "homeassistant.components.backup.BackupManager.async_get_backups" + ) as mock_get_backups: + yield mock_get_backups + + @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "remote_backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), ], ) async def test_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + remote_backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test getting backup info.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration( + hass, + with_hassio=False, + backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} | remote_backups, + remote_agents=remote_agents, + ) client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.websocket.BackupManager.get_backups", - return_value={TEST_BACKUP.slug: TEST_BACKUP}, - ): - await client.send_json_auto_id({"type": "backup/info"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", + "side_effect", [HomeAssistantError("Boom!"), BackupAgentUnreachableError] +) +async def test_info_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + side_effect: Exception, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup info with one unavailable agent.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_list_backups", side_effect=side_effect): + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + ("remote_agents", "backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), + ( + ["remote"], + { + LOCAL_AGENT_ID: [TEST_BACKUP_ABC123], + "test.remote": [TEST_BACKUP_ABC123], + }, + ), ], ) -async def test_remove( +async def test_details( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup info.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch("pathlib.Path.exists", return_value=True): + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + "side_effect", [HomeAssistantError("Boom!"), BackupAgentUnreachableError] +) +async def test_details_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + side_effect: Exception, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup info with one unavailable agent.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with ( + patch("pathlib.Path.exists", return_value=True), + patch.object(BackupAgentTest, "async_get_backup", side_effect=side_effect), + ): + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + ("remote_agents", "backups"), + [ + ([], {}), + (["remote"], {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), + ( + ["remote"], + { + LOCAL_AGENT_ID: [TEST_BACKUP_ABC123], + "test.remote": [TEST_BACKUP_ABC123], + }, + ), + ], +) +async def test_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup file.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + "storage_data", + [ + DEFAULT_STORAGE_DATA, + DEFAULT_STORAGE_DATA + | { + "backups": [ + { + "backup_id": "abc123", + "failed_agent_ids": ["test.remote"], + "with_strategy_settings": False, + } + ] + }, + ], +) +@pytest.mark.parametrize( + "side_effect", [None, HomeAssistantError("Boom!"), BackupAgentUnreachableError] +) +async def test_delete_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + side_effect: Exception, + storage_data: dict[str, Any] | None, + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup with one unavailable agent.""" + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_delete_backup", side_effect=side_effect): + await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +async def test_agent_delete_backup( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: - """Test removing a backup file.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + """Test deleting a backup file with a mock agent.""" + await setup_backup_integration(hass) + hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")} client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.websocket.BackupManager.remove_backup", - ): - await client.send_json_auto_id({"type": "backup/remove", "slug": "abc123"}) - assert snapshot == await client.receive_json() + with patch.object(BackupAgentTest, "async_delete_backup") as delete_mock: + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": "abc123", + } + ) + assert await client.receive_json() == snapshot + + assert delete_mock.call_args == call("abc123") @pytest.mark.parametrize( - "with_hassio", + "data", [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + None, + {}, + {"password": "abc123"}, ], ) +@pytest.mark.usefixtures("mock_backup_generation") async def test_generate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + data: dict[str, Any] | None, + freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test generating a backup.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + assert await client.receive_json() == snapshot + await client.send_json_auto_id( + {"type": "backup/generate", **{"agent_ids": ["backup.local"]} | (data or {})} + ) + for _ in range(6): + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"include_homeassistant": False}, + "Home Assistant must be included in backup", + ), + ( + {"include_addons": ["blah"]}, + "Addons and folders are not supported by core backup", + ), + ( + {"include_all_addons": True}, + "Addons and folders are not supported by core backup", + ), + ( + {"include_folders": ["ssl"]}, + "Addons and folders are not supported by core backup", + ), + ], +) +async def test_generate_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test generating a backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + + default_parameters = {"type": "backup/generate", "agent_ids": ["backup.local"]} + + await client.send_json_auto_id(default_parameters | parameters) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": expected_error, + } + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("params", "expected_extra_call_params"), + [ + ({"agent_ids": ["backup.local"]}, {"agent_ids": ["backup.local"]}), + ( + { + "agent_ids": ["backup.local"], + "include_database": False, + "name": "abc123", + }, + { + "agent_ids": ["backup.local"], + "include_addons": None, + "include_database": False, + "include_folders": None, + "name": "abc123", + }, + ), + ], +) +async def test_generate_calls_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + params: dict[str, Any], + expected_extra_call_params: dict[str, Any], +) -> None: + """Test translation of WS parameter to backup/generate to async_initiate_backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", + return_value=NewBackup(backup_job_id="abc123"), + ) as generate_backup: + await client.send_json_auto_id({"type": "backup/generate"} | params) + result = await client.receive_json() + assert result["success"] + assert result["result"] == {"backup_job_id": "abc123"} + generate_backup.assert_called_once_with( + **{ + "include_all_addons": False, + "include_homeassistant": True, + "include_addons": None, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + } + | expected_extra_call_params + ) + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("create_backup_settings", "expected_call_params"), + [ + ( + {}, + { + "agent_ids": [], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "include_homeassistant": True, + "name": None, + "password": None, + "with_strategy_settings": True, + }, + ), + ( + { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "include_homeassistant": True, + "name": "test-name", + "password": "test-password", + "with_strategy_settings": True, + }, + ), + ], +) +async def test_generate_with_default_settings_calls_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + create_backup_settings: dict[str, Any], + expected_call_params: dict[str, Any], +) -> None: + """Test backup/generate_with_strategy_settings calls async_initiate_backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + await client.send_json_auto_id( + {"type": "backup/config/update", "create_backup": create_backup_settings} + ) + result = await client.receive_json() + assert result["success"] + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", + return_value=NewBackup(backup_job_id="abc123"), + ) as generate_backup: + await client.send_json_auto_id( + {"type": "backup/generate_with_strategy_settings"} + ) + result = await client.receive_json() + assert result["success"] + assert result["result"] == {"backup_job_id": "abc123"} + generate_backup.assert_called_once_with(**expected_call_params) + + +@pytest.mark.parametrize( + "backups", + [ + {}, + {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}, + ], +) +async def test_restore_local_agent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration(hass, with_hassio=False, backups=backups) + restart_calls = async_mock_service(hass, "homeassistant", "restart") client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.websocket.BackupManager.generate_backup", - return_value=TEST_BACKUP, + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text"), ): - await client.send_json_auto_id({"type": "backup/generate"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "backup.local", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == snapshot + + +@pytest.mark.parametrize( + ("remote_agents", "backups"), + [ + (["remote"], {}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + ], +) +async def test_restore_remote_agent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + restart_calls = async_mock_service(hass, "homeassistant", "restart") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch("pathlib.Path.write_text"), patch("pathlib.Path.open"): + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "test.remote", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == snapshot @pytest.mark.parametrize( @@ -116,6 +629,7 @@ async def test_generate( pytest.param(False, id="without_hassio"), ], ) +@pytest.mark.usefixtures("supervisor_client") async def test_backup_end( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -132,10 +646,10 @@ async def test_backup_end( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.websocket.BackupManager.post_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", ): await client.send_json_auto_id({"type": "backup/end"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -149,6 +663,7 @@ async def test_backup_end( pytest.param(False, id="without_hassio"), ], ) +@pytest.mark.usefixtures("supervisor_client") async def test_backup_start( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -165,10 +680,10 @@ async def test_backup_start( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.websocket.BackupManager.pre_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", ): await client.send_json_auto_id({"type": "backup/start"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -179,7 +694,8 @@ async def test_backup_start( Exception("Boom"), ], ) -async def test_backup_end_excepion( +@pytest.mark.usefixtures("supervisor_client") +async def test_backup_end_exception( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, @@ -193,11 +709,11 @@ async def test_backup_end_excepion( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.websocket.BackupManager.post_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", side_effect=exception, ): await client.send_json_auto_id({"type": "backup/end"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -208,7 +724,8 @@ async def test_backup_end_excepion( Exception("Boom"), ], ) -async def test_backup_start_excepion( +@pytest.mark.usefixtures("supervisor_client") +async def test_backup_start_exception( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, @@ -222,8 +739,1211 @@ async def test_backup_start_excepion( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.websocket.BackupManager.pre_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", side_effect=exception, ): await client.send_json_auto_id({"type": "backup/start"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup agents info.""" + await setup_backup_integration(hass, with_hassio=False) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "storage_data", + [ + None, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": True, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": 3, "days": 7}, + "last_attempted_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", + "schedule": {"state": "daily"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": 3, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "never"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": 7}, + "last_attempted_strategy_backup": "2024-10-27T04:45:00+01:00", + "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", + "schedule": {"state": "never"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "mon"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "sat"}, + }, + }, + ], +) +async def test_config_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + hass_storage: dict[str, Any], + storage_data: dict[str, Any] | None, +) -> None: + """Test getting backup config info.""" + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "command", + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 7}, + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "mon", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "never", + }, + { + "type": "backup/config/update", + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3, "days": 7}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 7}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"days": 7}, + "schedule": "daily", + }, + ], +) +async def test_config_update( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + command: dict[str, Any], + hass_storage: dict[str, Any], +) -> None: + """Test updating the backup config.""" + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + await hass.async_block_till_done() + + assert hass_storage[DOMAIN] == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "command", + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "someday", + }, + ], +) +async def test_config_update_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + command: dict[str, Any], +) -> None: + """Test errors when updating the backup config.""" + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert not result["success"] + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ( + "command", + "last_completed_strategy_backup", + "time_1", + "time_2", + "attempted_backup_time", + "completed_backup_time", + "backup_calls_1", + "backup_calls_2", + "call_args", + "create_backup_side_effect", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + "2024-11-25T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + }, + "2024-11-11T04:45:00+01:00", + "2034-11-11T12:00:00+01:00", # ten years later and still no backups + "2034-11-11T13:00:00+01:00", + "2024-11-11T04:45:00+01:00", + "2024-11-11T04:45:00+01:00", + 0, + 0, + None, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-10-26T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + }, + "2024-10-26T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # missed event uses daily schedule once + "2024-11-12T04:45:00+01:00", # missed event uses daily schedule once + 1, + 1, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + }, + "2024-10-26T04:45:00+01:00", + "2034-11-11T12:00:00+01:00", # ten years later and still no backups + "2034-11-12T12:00:00+01:00", + "2024-10-26T04:45:00+01:00", + "2024-10-26T04:45:00+01:00", + 0, + 0, + None, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # attempted to create backup but failed + "2024-11-11T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + [Exception("Boom"), None], + ), + ], +) +async def test_config_schedule_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + command: dict[str, Any], + last_completed_strategy_backup: str, + time_1: str, + time_2: str, + attempted_backup_time: str, + completed_backup_time: str, + backup_calls_1: int, + backup_calls_2: int, + call_args: Any, + create_backup_side_effect: list[Exception | None] | None, +) -> None: + """Test config schedule logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": last_completed_strategy_backup, + "last_completed_strategy_backup": last_completed_strategy_backup, + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + create_backup.side_effect = create_backup_side_effect + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(time_1) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls_1 + assert create_backup.call_args == call_args + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == attempted_backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == completed_backup_time + ) + + freezer.move_to(time_2) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls_2 + assert create_backup.call_args == call_args + + +@pytest.mark.parametrize( + ( + "command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "last_backup_time", + "next_time", + "backup_time", + "backup_calls", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, # we get backups even if backup retention copies is None + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {"test-agent": BackupAgentError("Boom!")}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {"test-agent": BackupAgentError("Boom!")}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 0, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 3, + [call("backup-1"), call("backup-2"), call("backup-3")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 0, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), + ], +) +async def test_config_retention_copies_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + delete_backup: AsyncMock, + get_backups: AsyncMock, + command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + last_backup_time: str, + next_time: str, + backup_time: str, + backup_calls: int, + get_backups_calls: int, + delete_calls: int, + delete_args_list: Any, +) -> None: + """Test config backup retention copies logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": last_backup_time, + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(next_time) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert create_backup.call_count == backup_calls + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == backup_time + ) + + +@pytest.mark.parametrize( + ( + "command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "last_backup_time", + "start_time", + "next_time", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 3}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {"test-agent": BackupAgentError("Boom!")}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {"test-agent": BackupAgentError("Boom!")}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 0}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ], +) +async def test_config_retention_days_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + delete_backup: AsyncMock, + get_backups: AsyncMock, + command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + last_backup_time: str, + start_time: str, + next_time: str, + get_backups_calls: int, + delete_calls: int, + delete_args_list: list[Any], +) -> None: + """Test config backup retention logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": last_backup_time, + "schedule": {"state": "never"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to(start_time) + + await setup_backup_integration(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(next_time) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + + +async def test_subscribe_event( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating a backup.""" + await setup_backup_integration(hass, with_hassio=False) + + manager = hass.data[DATA_MANAGER] + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + assert await client.receive_json() == snapshot + assert await client.receive_json() == snapshot + + manager.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.IN_PROGRESS) + ) + assert await client.receive_json() == snapshot diff --git a/tests/components/balboa/__init__.py b/tests/components/balboa/__init__.py index a27293e955f..2cb100e3642 100644 --- a/tests/components/balboa/__init__.py +++ b/tests/components/balboa/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import MagicMock -from homeassistant.components.balboa import CONF_SYNC_TIME, DOMAIN +from homeassistant.components.balboa.const import CONF_SYNC_TIME, DOMAIN from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant, State diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index dd6c4a73469..cbde856ff89 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -6,13 +6,20 @@ from unittest.mock import AsyncMock, Mock, patch from mozart_api.models import ( Action, BeolinkPeer, + BeolinkSelf, ContentItem, + ListeningMode, + ListeningModeFeatures, + ListeningModeRef, + ListeningModeTrigger, PlaybackContentMetadata, PlaybackProgress, PlaybackState, + PlayQueueSettings, ProductState, RemoteMenuItem, RenderingState, + SoftwareUpdateState, SoftwareUpdateStatus, Source, SourceArray, @@ -26,10 +33,22 @@ from homeassistant.core import HomeAssistant from .const import ( TEST_DATA_CREATE_ENTRY, + TEST_DATA_CREATE_ENTRY_2, TEST_FRIENDLY_NAME, + TEST_FRIENDLY_NAME_3, + TEST_FRIENDLY_NAME_4, + TEST_HOST_3, + TEST_HOST_4, TEST_JID_1, + TEST_JID_3, + TEST_JID_4, TEST_NAME, + TEST_NAME_2, TEST_SERIAL_NUMBER, + TEST_SERIAL_NUMBER_2, + TEST_SOUND_MODE, + TEST_SOUND_MODE_2, + TEST_SOUND_MODE_NAME, ) from tests.common import MockConfigEntry @@ -46,6 +65,17 @@ def mock_config_entry() -> MockConfigEntry: ) +@pytest.fixture +def mock_config_entry_2() -> MockConfigEntry: + """Mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_SERIAL_NUMBER_2, + data=TEST_DATA_CREATE_ENTRY_2, + title=TEST_NAME_2, + ) + + @pytest.fixture async def mock_media_player( hass: HomeAssistant, @@ -74,12 +104,12 @@ def mock_mozart_client() -> Generator[AsyncMock]: # REST API client methods client.get_beolink_self = AsyncMock() - client.get_beolink_self.return_value = BeolinkPeer( + client.get_beolink_self.return_value = BeolinkSelf( friendly_name=TEST_FRIENDLY_NAME, jid=TEST_JID_1 ) client.get_softwareupdate_status = AsyncMock() client.get_softwareupdate_status.return_value = SoftwareUpdateStatus( - software_version="1.0.0", state="" + software_version="1.0.0", state=SoftwareUpdateState() ) client.get_product_state = AsyncMock() client.get_product_state.return_value = ProductState( @@ -94,21 +124,29 @@ def mock_mozart_client() -> Generator[AsyncMock]: client.get_available_sources = AsyncMock() client.get_available_sources.return_value = SourceArray( items=[ - # Is in the HIDDEN_SOURCE_IDS constant, so should not be user selectable + # Is not playable, so should not be user selectable Source( name="AirPlay", id="airPlay", is_enabled=True, is_multiroom_available=False, ), - # The only available source + # The only available beolink source Source( name="Tidal", id="tidal", is_enabled=True, is_multiroom_available=True, + is_playable=True, ), - # Is disabled, so should not be user selectable + Source( + name="Line-In", + id="lineIn", + is_enabled=True, + is_multiroom_available=False, + is_playable=True, + ), + # Is disabled and not playable, so should not be user selectable Source( name="Powerlink", id="pl", @@ -227,6 +265,65 @@ def mock_mozart_client() -> Generator[AsyncMock]: id="64c9da45-3682-44a4-8030-09ed3ef44160", ), } + client.get_beolink_peers = AsyncMock() + client.get_beolink_peers.return_value = [ + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_3, + jid=TEST_JID_3, + ip_address=TEST_HOST_3, + ), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_4, + jid=TEST_JID_4, + ip_address=TEST_HOST_4, + ), + ] + client.get_beolink_listeners = AsyncMock() + client.get_beolink_listeners.return_value = [ + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_3, + jid=TEST_JID_3, + ip_address=TEST_HOST_3, + ), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_4, + jid=TEST_JID_4, + ip_address=TEST_HOST_4, + ), + ] + + client.get_listening_mode_set = AsyncMock() + client.get_listening_mode_set.return_value = [ + ListeningMode( + id=TEST_SOUND_MODE, + name=TEST_SOUND_MODE_NAME, + features=ListeningModeFeatures(), + triggers=[ListeningModeTrigger()], + ), + ListeningMode( + id=TEST_SOUND_MODE_2, + name=TEST_SOUND_MODE_NAME, + features=ListeningModeFeatures(), + triggers=[ListeningModeTrigger()], + ), + ListeningMode( + id=345, + name=f"{TEST_SOUND_MODE_NAME} 2", + features=ListeningModeFeatures(), + triggers=[ListeningModeTrigger()], + ), + ] + client.get_active_listening_mode = AsyncMock() + client.get_active_listening_mode.return_value = ListeningModeRef( + href="", + id=123, + ) + client.get_settings_queue = AsyncMock() + client.get_settings_queue.return_value = PlayQueueSettings( + repeat="none", + shuffle=False, + ) + client.post_standby = AsyncMock() client.set_current_volume_level = AsyncMock() client.set_volume_mute = AsyncMock() @@ -241,6 +338,14 @@ def mock_mozart_client() -> Generator[AsyncMock]: client.add_to_queue = AsyncMock() client.post_remote_trigger = AsyncMock() client.set_active_source = AsyncMock() + client.post_beolink_expand = AsyncMock() + client.join_beolink_peer = AsyncMock() + client.post_beolink_unexpand = AsyncMock() + client.post_beolink_leave = AsyncMock() + client.post_beolink_allstandby = AsyncMock() + client.join_latest_beolink_experience = AsyncMock() + client.activate_listening_mode = AsyncMock() + client.set_settings_queue = AsyncMock() # Non-REST API client methods client.check_device_connection = AsyncMock() diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index d5e2221675a..6602a898eb6 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -6,6 +6,7 @@ from unittest.mock import Mock from mozart_api.exceptions import ApiException from mozart_api.models import ( Action, + ListeningModeRef, OverlayPlayRequest, OverlayPlayRequestTextToSpeechTextToSpeech, PlaybackContentMetadata, @@ -15,6 +16,7 @@ from mozart_api.models import ( PlayQueueItemType, RenderingState, SceneProperties, + Source, UserFlow, VolumeLevel, VolumeMute, @@ -39,13 +41,30 @@ TEST_MODEL_BALANCE = "Beosound Balance" TEST_MODEL_THEATRE = "Beosound Theatre" TEST_MODEL_LEVEL = "Beosound Level" TEST_SERIAL_NUMBER = "11111111" +TEST_SERIAL_NUMBER_2 = "22222222" TEST_NAME = f"{TEST_MODEL_BALANCE}-{TEST_SERIAL_NUMBER}" +TEST_NAME_2 = f"{TEST_MODEL_BALANCE}-{TEST_SERIAL_NUMBER_2}" TEST_FRIENDLY_NAME = "Living room Balance" TEST_TYPE_NUMBER = "1111" TEST_ITEM_NUMBER = "1111111" TEST_JID_1 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.{TEST_SERIAL_NUMBER}@products.bang-olufsen.com" TEST_MEDIA_PLAYER_ENTITY_ID = "media_player.beosound_balance_11111111" +TEST_FRIENDLY_NAME_2 = "Laundry room Balance" +TEST_JID_2 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.22222222@products.bang-olufsen.com" +TEST_MEDIA_PLAYER_ENTITY_ID_2 = "media_player.beosound_balance_22222222" +TEST_HOST_2 = "192.168.0.2" + +TEST_FRIENDLY_NAME_3 = "Lego room Balance" +TEST_JID_3 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.33333333@products.bang-olufsen.com" +TEST_MEDIA_PLAYER_ENTITY_ID_3 = "media_player.beosound_balance_33333333" +TEST_HOST_3 = "192.168.0.3" + +TEST_FRIENDLY_NAME_4 = "Lounge room Balance" +TEST_JID_4 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.44444444@products.bang-olufsen.com" +TEST_MEDIA_PLAYER_ENTITY_ID_4 = "media_player.beosound_balance_44444444" +TEST_HOST_4 = "192.168.0.4" + TEST_HOSTNAME_ZEROCONF = TEST_NAME.replace(" ", "-") + ".local." TEST_TYPE_ZEROCONF = "_bangolufsen._tcp.local." TEST_NAME_ZEROCONF = TEST_NAME.replace(" ", "-") + "." + TEST_TYPE_ZEROCONF @@ -60,6 +79,12 @@ TEST_DATA_CREATE_ENTRY = { CONF_BEOLINK_JID: TEST_JID_1, CONF_NAME: TEST_NAME, } +TEST_DATA_CREATE_ENTRY_2 = { + CONF_HOST: TEST_HOST, + CONF_MODEL: TEST_MODEL_BALANCE, + CONF_BEOLINK_JID: TEST_JID_2, + CONF_NAME: TEST_NAME_2, +} TEST_DATA_ZEROCONF = ZeroconfServiceInfo( ip_address=IPv4Address(TEST_HOST), @@ -101,11 +126,15 @@ TEST_DATA_ZEROCONF_IPV6 = ZeroconfServiceInfo( }, ) -TEST_AUDIO_SOURCES = [BangOlufsenSource.TIDAL.name] +TEST_SOURCE = Source( + name="Tidal", id="tidal", is_seekable=True, is_enabled=True, is_playable=True +) +TEST_AUDIO_SOURCES = [TEST_SOURCE.name, BangOlufsenSource.LINE_IN.name] TEST_VIDEO_SOURCES = ["HDMI A"] TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES TEST_FALLBACK_SOURCES = [ "Audio Streamer", + "Bluetooth", "Spotify Connect", "Line-In", "Optical", @@ -177,3 +206,14 @@ TEST_DEEZER_INVALID_FLOW = ApiException( data='{"message": "Couldn\'t start user flow for me"}', # codespell:ignore ), ) +TEST_SOUND_MODE = 123 +TEST_SOUND_MODE_2 = 234 +TEST_SOUND_MODE_NAME = "Test Listening Mode" +TEST_ACTIVE_SOUND_MODE_NAME = f"{TEST_SOUND_MODE_NAME} ({TEST_SOUND_MODE})" +TEST_ACTIVE_SOUND_MODE_NAME_2 = f"{TEST_SOUND_MODE_NAME} ({TEST_SOUND_MODE_2})" +TEST_LISTENING_MODE_REF = ListeningModeRef(href="", id=TEST_SOUND_MODE_2) +TEST_SOUND_MODES = [ + TEST_ACTIVE_SOUND_MODE_NAME, + TEST_ACTIVE_SOUND_MODE_NAME_2, + f"{TEST_SOUND_MODE_NAME} 2 (345)", +] diff --git a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..d7f9a045921 --- /dev/null +++ b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr @@ -0,0 +1,69 @@ +# serializer version: 1 +# name: test_async_get_config_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '192.168.0.1', + 'jid': '1111.1111111.11111111@products.bang-olufsen.com', + 'model': 'Beosound Balance', + 'name': 'Beosound Balance-11111111', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'bang_olufsen', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Beosound Balance-11111111', + 'unique_id': '11111111', + 'version': 1, + }), + 'media_player': dict({ + 'attributes': dict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': 'music', + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': 2095933, + }), + 'entity_id': 'media_player.beosound_balance_11111111', + 'state': 'playing', + }), + 'websocket_connected': False, + }) +# --- diff --git a/tests/components/bang_olufsen/snapshots/test_media_player.ambr b/tests/components/bang_olufsen/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..327b7ecfacf --- /dev/null +++ b/tests/components/bang_olufsen/snapshots/test_media_player.ambr @@ -0,0 +1,1090 @@ +# serializer version: 1 +# name: test_async_beolink_allstandby + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[all_discovered-True-None-log_messages0-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[all_discovered-True-expand_side_effect1-log_messages1-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[beolink_jids-parameter_value2-None-log_messages2-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[beolink_jids-parameter_value3-expand_side_effect3-log_messages3-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join[service_parameters0-method_parameters0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join[service_parameters1-method_parameters1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join[service_parameters2-method_parameters2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join_invalid[service_parameters0-expected_result0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join_invalid[service_parameters1-expected_result1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join_invalid[service_parameters2-expected_result2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_unexpand + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members0-1-0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members0-1-0].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members1-0-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members1-0-1].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'media_position': 0, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Line-In', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_unjoin_player + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_update_beolink_listener + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'leader': dict({ + 'Laundry room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'media_player.beosound_balance_11111111', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_update_beolink_listener.1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/bang_olufsen/test_diagnostics.py b/tests/components/bang_olufsen/test_diagnostics.py new file mode 100644 index 00000000000..7c99648ace4 --- /dev/null +++ b/tests/components/bang_olufsen/test_diagnostics.py @@ -0,0 +1,41 @@ +"""Test bang_olufsen config entry diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_async_get_config_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot( + exclude=props( + "created_at", + "entry_id", + "id", + "last_changed", + "last_reported", + "last_updated", + "media_position_updated_at", + "modified_at", + ) + ) diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index 3eb98e956be..c8e4c05f9ab 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceRegistry -from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER +from .const import TEST_FRIENDLY_NAME, TEST_MODEL_BALANCE, TEST_SERIAL_NUMBER from tests.common import MockConfigEntry @@ -35,7 +35,8 @@ async def test_setup_entry( identifiers={(DOMAIN, TEST_SERIAL_NUMBER)} ) assert device is not None - assert device.name == TEST_NAME + # Is usually TEST_NAME, but is updated to the device's friendly name by _update_name_and_beolink + assert device.name == TEST_FRIENDLY_NAME assert device.model == TEST_MODEL_BALANCE # Ensure that the connection has been checked WebSocket connection has been initialized @@ -85,6 +86,7 @@ async def test_unload_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state == ConfigEntryState.LOADED + assert hasattr(mock_config_entry, "runtime_data") # Unload entry await hass.config_entries.async_unload(mock_config_entry.entry_id) @@ -94,5 +96,5 @@ async def test_unload_entry( assert mock_mozart_client.close_api_client.call_count == 1 # Ensure that the entry is not loaded and has been removed from hass - assert mock_config_entry.entry_id not in hass.data[DOMAIN] + assert not hasattr(mock_config_entry, "runtime_data") assert mock_config_entry.state == ConfigEntryState.NOT_LOADED diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 9928a626a4f..695b086b0a7 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -1,24 +1,33 @@ """Test the Bang & Olufsen media_player entity.""" -from collections.abc import Callable -from contextlib import nullcontext as does_not_raise +from contextlib import AbstractContextManager, nullcontext as does_not_raise import logging from unittest.mock import AsyncMock, patch +from mozart_api.exceptions import NotFoundException from mozart_api.models import ( + BeolinkLeader, + BeolinkSelf, PlaybackContentMetadata, + PlayQueueSettings, RenderingState, Source, + SourceArray, WebsocketNotificationTag, ) import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props +from voluptuous import Invalid, MultipleInvalid from homeassistant.components.bang_olufsen.const import ( + BANG_OLUFSEN_REPEAT_FROM_HA, BANG_OLUFSEN_STATES, DOMAIN, BangOlufsenSource, ) from homeassistant.components.media_player import ( + ATTR_GROUP_MEMBERS, ATTR_INPUT_SOURCE, ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_ALBUM_ARTIST, @@ -31,27 +40,60 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_EXTRA, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_REPEAT, ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TITLE, ATTR_MEDIA_TRACK, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, + ATTR_SOUND_MODE, + ATTR_SOUND_MODE_LIST, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PLAY_PAUSE, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_MEDIA_STOP, + SERVICE_PLAY_MEDIA, + SERVICE_REPEAT_SET, + SERVICE_SELECT_SOUND_MODE, + SERVICE_SELECT_SOURCE, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_UNJOIN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, MediaPlayerState, MediaType, + RepeatMode, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component from .const import ( + TEST_ACTIVE_SOUND_MODE_NAME, + TEST_ACTIVE_SOUND_MODE_NAME_2, TEST_AUDIO_SOURCES, TEST_DEEZER_FLOW, TEST_DEEZER_INVALID_FLOW, TEST_DEEZER_PLAYLIST, TEST_DEEZER_TRACK, TEST_FALLBACK_SOURCES, + TEST_FRIENDLY_NAME_2, + TEST_JID_1, + TEST_JID_2, + TEST_JID_3, + TEST_JID_4, + TEST_LISTENING_MODE_REF, TEST_MEDIA_PLAYER_ENTITY_ID, + TEST_MEDIA_PLAYER_ENTITY_ID_2, + TEST_MEDIA_PLAYER_ENTITY_ID_3, TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS, TEST_OVERLAY_OFFSET_VOLUME_TTS, TEST_PLAYBACK_ERROR, @@ -62,6 +104,9 @@ from .const import ( TEST_PLAYBACK_STATE_TURN_OFF, TEST_RADIO_STATION, TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, + TEST_SOUND_MODE_2, + TEST_SOUND_MODES, + TEST_SOURCE, TEST_SOURCES, TEST_VIDEO_SOURCES, TEST_VOLUME, @@ -93,15 +138,21 @@ async def test_initialization( # Check state (The initial state in this test does not contain all that much. # States are tested using simulated WebSocket events.) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_SOURCES assert states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + assert states.attributes[ATTR_SOUND_MODE_LIST] == TEST_SOUND_MODES # Check API calls mock_mozart_client.get_softwareupdate_status.assert_called_once() mock_mozart_client.get_product_state.assert_called_once() mock_mozart_client.get_available_sources.assert_called_once() mock_mozart_client.get_remote_menu.assert_called_once() + mock_mozart_client.get_listening_mode_set.assert_called_once() + mock_mozart_client.get_active_listening_mode.assert_called_once() + mock_mozart_client.get_beolink_self.assert_called_once() + mock_mozart_client.get_beolink_peers.assert_called_once() + mock_mozart_client.get_beolink_listeners.assert_called_once() async def test_async_update_sources_audio_only( @@ -115,7 +166,7 @@ async def test_async_update_sources_audio_only( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_AUDIO_SOURCES @@ -130,7 +181,7 @@ async def test_async_update_sources_outdated_api( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_FALLBACK_SOURCES + TEST_VIDEO_SOURCES @@ -138,7 +189,9 @@ async def test_async_update_sources_outdated_api( async def test_async_update_sources_remote( - hass: HomeAssistant, mock_mozart_client, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_sources is called when there are new video sources.""" @@ -160,6 +213,37 @@ async def test_async_update_sources_remote( assert mock_mozart_client.get_remote_menu.call_count == 2 +async def test_async_update_sources_availability( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the playback_source WebSocket event updates available playback sources.""" + # Remove video sources to simplify test + mock_mozart_client.get_remote_menu.return_value = {} + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + playback_source_callback = ( + mock_mozart_client.get_playback_source_notifications.call_args[0][0] + ) + + assert mock_mozart_client.get_available_sources.call_count == 1 + + # Add a source that is available and playable + mock_mozart_client.get_available_sources.return_value = SourceArray( + items=[TEST_SOURCE] + ) + + # Send playback_source. The source is not actually used, so its attributes don't matter + playback_source_callback(Source()) + + assert mock_mozart_client.get_available_sources.call_count == 2 + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [TEST_SOURCE.name] + + async def test_async_update_playback_metadata( hass: HomeAssistant, mock_mozart_client: AsyncMock, @@ -174,7 +258,7 @@ async def test_async_update_playback_metadata( mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_DURATION not in states.attributes assert ATTR_MEDIA_TITLE not in states.attributes assert ATTR_MEDIA_ALBUM_NAME not in states.attributes @@ -185,7 +269,7 @@ async def test_async_update_playback_metadata( # Send the WebSocket event dispatch playback_metadata_callback(TEST_PLAYBACK_METADATA) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_MEDIA_DURATION] == TEST_PLAYBACK_METADATA.total_duration_seconds @@ -237,14 +321,14 @@ async def test_async_update_playback_progress( mock_mozart_client.get_playback_progress_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_POSITION not in states.attributes old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] assert old_updated_at playback_progress_callback(TEST_PLAYBACK_PROGRESS) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress new_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] assert new_updated_at @@ -265,29 +349,27 @@ async def test_async_update_playback_state( mock_mozart_client.get_playback_state_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.state == MediaPlayerState.PLAYING playback_state_callback(TEST_PLAYBACK_STATE_PAUSED) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.state == TEST_PLAYBACK_STATE_PAUSED.value @pytest.mark.parametrize( - ("reported_source", "real_source", "content_type", "progress", "metadata"), + ("source", "content_type", "progress", "metadata"), [ - # Normal source, music mediatype expected, no progress expected + # Normal source, music mediatype expected ( - BangOlufsenSource.TIDAL, - BangOlufsenSource.TIDAL, + TEST_SOURCE, MediaType.MUSIC, TEST_PLAYBACK_PROGRESS.progress, PlaybackContentMetadata(), ), - # URI source, url media type expected, no progress expected + # URI source, url media type expected ( - BangOlufsenSource.URI_STREAMER, BangOlufsenSource.URI_STREAMER, MediaType.URL, TEST_PLAYBACK_PROGRESS.progress, @@ -296,44 +378,17 @@ async def test_async_update_playback_state( # Line-In source,media type expected, progress 0 expected ( BangOlufsenSource.LINE_IN, - BangOlufsenSource.CHROMECAST, MediaType.MUSIC, 0, PlaybackContentMetadata(), ), - # Chromecast as source, but metadata says Line-In. - # Progress is not set to 0 as the source is Chromecast first - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.LINE_IN, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(title=BangOlufsenSource.LINE_IN.name), - ), - # Chromecast as source, but metadata says Bluetooth - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.BLUETOOTH, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(title=BangOlufsenSource.BLUETOOTH.name), - ), - # Chromecast as source, but metadata says Bluetooth in another way - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.BLUETOOTH, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(art=[]), - ), ], ) async def test_async_update_source_change( hass: HomeAssistant, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, - reported_source: Source, - real_source: Source, + source: Source, content_type: MediaType, progress: int, metadata: PlaybackContentMetadata, @@ -353,7 +408,7 @@ async def test_async_update_source_change( mock_mozart_client.get_source_change_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_INPUT_SOURCE not in states.attributes assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC @@ -362,10 +417,10 @@ async def test_async_update_source_change( # Simulate metadata playback_metadata_callback(metadata) - source_change_callback(reported_source) + source_change_callback(source) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) - assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_INPUT_SOURCE] == source.name assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type assert states.attributes[ATTR_MEDIA_POSITION] == progress @@ -385,15 +440,16 @@ async def test_async_turn_off( ) await hass.services.async_call( - "media_player", - "turn_off", + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) playback_state_callback(TEST_PLAYBACK_STATE_TURN_OFF) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert TEST_PLAYBACK_STATE_TURN_OFF.value assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] # Check API call @@ -412,12 +468,12 @@ async def test_async_set_volume_level( volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes await hass.services.async_call( - "media_player", - "volume_set", + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: TEST_VOLUME_HOME_ASSISTANT_FORMAT, @@ -428,7 +484,7 @@ async def test_async_set_volume_level( # The service call will trigger a WebSocket notification volume_callback(TEST_VOLUME) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_MEDIA_VOLUME_LEVEL] == TEST_VOLUME_HOME_ASSISTANT_FORMAT ) @@ -438,6 +494,122 @@ async def test_async_set_volume_level( ) +async def test_async_update_beolink_line_in( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test _async_update_beolink with line-in and no active Beolink session.""" + # Ensure no listeners + mock_mozart_client.get_beolink_listeners.return_value = [] + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + beolink_callback = mock_mozart_client.get_notification_notifications.call_args[0][0] + + # Set source + source_change_callback(BangOlufsenSource.LINE_IN) + beolink_callback(WebsocketNotificationTag(value="beolinkListeners")) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes["group_members"] == [] + + # Called once during _initialize and once during _async_update_beolink + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + assert mock_mozart_client.get_beolink_peers.call_count == 2 + + +async def test_async_update_beolink_listener( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_config_entry_2: MockConfigEntry, +) -> None: + """Test _async_update_beolink as a listener.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + playback_metadata_callback = ( + mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] + ) + + # Add another entity + mock_config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_2.entry_id) + + # Runs _async_update_beolink + playback_metadata_callback( + PlaybackContentMetadata( + remote_leader=BeolinkLeader( + friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_2 + ) + ) + ) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes["group_members"] == [ + TEST_MEDIA_PLAYER_ENTITY_ID_2, + TEST_MEDIA_PLAYER_ENTITY_ID, + ] + + # Called once for each entity during _initialize + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + # Called once for each entity during _initialize and + # once more during _async_update_beolink for the entity that has the callback associated with it. + assert mock_mozart_client.get_beolink_peers.call_count == 3 + + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_update_name_and_beolink( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test _async_update_name_and_beolink.""" + # Change response to ensure device name is changed + mock_mozart_client.get_beolink_self.return_value = BeolinkSelf( + friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_1 + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + configuration_callback = ( + mock_mozart_client.get_notification_notifications.call_args[0][0] + ) + # Trigger callback + configuration_callback(WebsocketNotificationTag(value="configuration")) + + await hass.async_block_till_done() + + assert mock_mozart_client.get_beolink_self.call_count == 2 + assert mock_mozart_client.get_beolink_peers.call_count == 2 + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + + # Check that device name has been changed + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + ) + assert device.name == TEST_FRIENDLY_NAME_2 + + async def test_async_mute_volume( hass: HomeAssistant, mock_mozart_client: AsyncMock, @@ -450,12 +622,12 @@ async def test_async_mute_volume( volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes await hass.services.async_call( - "media_player", - "volume_mute", + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_VOLUME_MUTED: TEST_VOLUME_HOME_ASSISTANT_FORMAT, @@ -466,7 +638,7 @@ async def test_async_mute_volume( # The service call will trigger a WebSocket notification volume_callback(TEST_VOLUME_MUTED) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_MEDIA_VOLUME_MUTED] == TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT @@ -505,12 +677,13 @@ async def test_async_media_play_pause( # Set the initial state playback_state_callback(initial_state) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert initial_state.value assert states.state == BANG_OLUFSEN_STATES[initial_state.value] await hass.services.async_call( - "media_player", - "media_play_pause", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -535,12 +708,13 @@ async def test_async_media_stop( # Set the state to playing playback_state_callback(TEST_PLAYBACK_STATE_PLAYING) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert TEST_PLAYBACK_STATE_PLAYING.value assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] await hass.services.async_call( - "media_player", - "media_stop", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_STOP, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -560,8 +734,8 @@ async def test_async_media_next_track( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "media_next_track", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -572,10 +746,12 @@ async def test_async_media_next_track( @pytest.mark.parametrize( ("source", "expected_result", "seek_called_times"), [ - # Deezer source, seek expected - (BangOlufsenSource.DEEZER, does_not_raise(), 1), - # Non deezer source, seek shouldn't work - (BangOlufsenSource.TIDAL, pytest.raises(HomeAssistantError), 0), + # Seekable source, seek expected + (TEST_SOURCE, does_not_raise(), 1), + # Non seekable source, seek shouldn't work + (BangOlufsenSource.LINE_IN, pytest.raises(HomeAssistantError), 0), + # Malformed source, seek shouldn't work + (Source(), pytest.raises(HomeAssistantError), 0), ], ) async def test_async_media_seek( @@ -583,7 +759,7 @@ async def test_async_media_seek( mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, source: Source, - expected_result: Callable, + expected_result: AbstractContextManager, seek_called_times: int, ) -> None: """Test async_media_seek.""" @@ -601,8 +777,8 @@ async def test_async_media_seek( # Check results with expected_result: await hass.services.async_call( - "media_player", - "media_seek", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_SEEK, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_SEEK_POSITION: TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, @@ -624,8 +800,8 @@ async def test_async_media_previous_track( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "media_previous_track", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -644,8 +820,8 @@ async def test_async_clear_playlist( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "clear_playlist", + MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -659,7 +835,7 @@ async def test_async_clear_playlist( # Invalid source ("Test source", pytest.raises(ServiceValidationError), 0, 0), # Valid audio source - (BangOlufsenSource.TIDAL.name, does_not_raise(), 1, 0), + (TEST_SOURCE.name, does_not_raise(), 1, 0), # Valid video source (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), ], @@ -669,7 +845,7 @@ async def test_async_select_source( mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, source: str, - expected_result: Callable, + expected_result: AbstractContextManager, audio_source_call: int, video_source_call: int, ) -> None: @@ -680,8 +856,8 @@ async def test_async_select_source( with expected_result: await hass.services.async_call( - "media_player", - "select_source", + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_INPUT_SOURCE: source, @@ -693,6 +869,69 @@ async def test_async_select_source( assert mock_mozart_client.post_remote_trigger.call_count == video_source_call +async def test_async_select_sound_mode( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_select_sound_mode.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_SOUND_MODE] == TEST_ACTIVE_SOUND_MODE_NAME + + active_listening_mode_callback = ( + mock_mozart_client.get_active_listening_mode_notifications.call_args[0][0] + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOUND_MODE, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_SOUND_MODE: TEST_ACTIVE_SOUND_MODE_NAME_2, + }, + blocking=True, + ) + + active_listening_mode_callback(TEST_LISTENING_MODE_REF) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_SOUND_MODE] == TEST_ACTIVE_SOUND_MODE_NAME_2 + + mock_mozart_client.activate_listening_mode.assert_called_once_with( + id=TEST_SOUND_MODE_2 + ) + + +async def test_async_select_sound_mode_invalid( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_select_sound_mode with an invalid sound_mode.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with pytest.raises(ServiceValidationError) as exc_info: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOUND_MODE, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_SOUND_MODE: "invalid_sound_mode", + }, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "invalid_sound_mode" + assert exc_info.errisinstance(ServiceValidationError) + + async def test_async_play_media_invalid_type( hass: HomeAssistant, mock_mozart_client: AsyncMock, @@ -705,8 +944,8 @@ async def test_async_play_media_invalid_type( with pytest.raises(ServiceValidationError) as exc_info: await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "test", @@ -734,8 +973,8 @@ async def test_async_play_media_url( await async_setup_component(hass, "media_source", {"media_source": {}}) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -760,8 +999,8 @@ async def test_async_play_media_overlay_absolute_volume_uri( await async_setup_component(hass, "media_source", {"media_source": {}}) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -792,8 +1031,8 @@ async def test_async_play_media_overlay_invalid_offset_volume_tts( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "Dette er en test", @@ -829,8 +1068,8 @@ async def test_async_play_media_overlay_offset_volume_tts( volume_callback(TEST_VOLUME) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "This is a test", @@ -859,8 +1098,8 @@ async def test_async_play_media_tts( await async_setup_component(hass, "media_source", {"media_source": {}}) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -883,8 +1122,8 @@ async def test_async_play_media_radio( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "1234567890123456", @@ -909,8 +1148,8 @@ async def test_async_play_media_favourite( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "1", @@ -934,8 +1173,8 @@ async def test_async_play_media_deezer_flow( # Send a service call await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "flow", @@ -961,8 +1200,8 @@ async def test_async_play_media_deezer_playlist( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "playlist:1234567890", @@ -988,8 +1227,8 @@ async def test_async_play_media_deezer_track( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "1234567890", @@ -1017,8 +1256,8 @@ async def test_async_play_media_invalid_deezer( with pytest.raises(HomeAssistantError) as exc_info: await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "flow", @@ -1054,8 +1293,8 @@ async def test_async_play_media_url_m3u( ), ): await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -1131,3 +1370,484 @@ async def test_async_browse_media( assert response["success"] assert (child in response["result"]["children"]) is present + + +@pytest.mark.parametrize( + ("group_members", "expand_count", "join_count"), + [ + # Valid member + ([TEST_MEDIA_PLAYER_ENTITY_ID_2], 1, 0), + # Touch to join + ([], 0, 1), + ], +) +async def test_async_join_players( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_config_entry_2: MockConfigEntry, + group_members: list[str], + expand_count: int, + join_count: int, +) -> None: + """Test async_join_players.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + + # Add another entity + mock_config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_2.entry_id) + + # Set the source to a beolink expandable source + source_change_callback(TEST_SOURCE) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_GROUP_MEMBERS: group_members, + }, + blocking=True, + ) + + assert mock_mozart_client.post_beolink_expand.call_count == expand_count + assert mock_mozart_client.join_latest_beolink_experience.call_count == join_count + + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ("source", "group_members", "expected_result", "error_type"), + [ + # Invalid source + ( + BangOlufsenSource.LINE_IN, + [TEST_MEDIA_PLAYER_ENTITY_ID_2], + pytest.raises(ServiceValidationError), + "invalid_source", + ), + # Invalid media_player entity + ( + TEST_SOURCE, + [TEST_MEDIA_PLAYER_ENTITY_ID_3], + pytest.raises(ServiceValidationError), + "invalid_grouping_entity", + ), + ], +) +async def test_async_join_players_invalid( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_config_entry_2: MockConfigEntry, + source: Source, + group_members: list[str], + expected_result: AbstractContextManager, + error_type: str, +) -> None: + """Test async_join_players with an invalid media_player entity.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + + mock_config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_2.entry_id) + + source_change_callback(source) + + with expected_result as exc_info: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_GROUP_MEMBERS: group_members, + }, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == error_type + assert exc_info.errisinstance(HomeAssistantError) + + assert mock_mozart_client.post_beolink_expand.call_count == 0 + assert mock_mozart_client.join_latest_beolink_experience.call_count == 0 + + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_unjoin_player( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_unjoin_player.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_beolink_leave.assert_called_once() + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ( + "service_parameters", + "method_parameters", + ), + [ + # Defined JID + ( + {"beolink_jid": TEST_JID_2}, + {"jid": TEST_JID_2}, + ), + # Defined JID and source + ( + {"beolink_jid": TEST_JID_2, "source_id": TEST_SOURCE.id}, + {"jid": TEST_JID_2, "source": TEST_SOURCE.id}, + ), + # Defined JID and Beolink Converter NL/ML source + ( + {"beolink_jid": TEST_JID_2, "source_id": "cd"}, + {"jid": TEST_JID_2, "source": "CD"}, + ), + ], +) +async def test_async_beolink_join( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service_parameters: dict[str, str], + method_parameters: dict[str, str], +) -> None: + """Test async_beolink_join with defined JID and JID and source.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_join", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, **service_parameters}, + blocking=True, + ) + + mock_mozart_client.join_beolink_peer.assert_called_once_with(**method_parameters) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ( + "service_parameters", + "expected_result", + ), + [ + # Defined invalid JID + ( + {"beolink_jid": "not_a_jid"}, + pytest.raises(Invalid), + ), + # Defined invalid source + ( + {"source_id": "invalid_source"}, + pytest.raises(MultipleInvalid), + ), + # Defined invalid JID and invalid source + ( + {"beolink_jid": "not_a_jid", "source_id": "invalid_source"}, + pytest.raises(MultipleInvalid), + ), + ], +) +async def test_async_beolink_join_invalid( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service_parameters: dict[str, str], + expected_result: AbstractContextManager, +) -> None: + """Test invalid async_beolink_join calls with defined JID or source ID.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with expected_result: + await hass.services.async_call( + DOMAIN, + "beolink_join", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, **service_parameters}, + blocking=True, + ) + + mock_mozart_client.join_beolink_peer.assert_not_called() + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ( + "parameter", + "parameter_value", + "expand_side_effect", + "log_messages", + "peers_call_count", + ), + [ + # All discovered + # Valid peers + ("all_discovered", True, None, [], 2), + # Invalid peers + ( + "all_discovered", + True, + NotFoundException(), + [f"Unable to expand to {TEST_JID_3}", f"Unable to expand to {TEST_JID_4}"], + 2, + ), + # Beolink JIDs + # Valid peer + ("beolink_jids", [TEST_JID_3, TEST_JID_4], None, [], 1), + # Invalid peer + ( + "beolink_jids", + [TEST_JID_3, TEST_JID_4], + NotFoundException(), + [ + f"Unable to expand to {TEST_JID_3}. Is the device available on the network?", + f"Unable to expand to {TEST_JID_4}. Is the device available on the network?", + ], + 1, + ), + ], +) +async def test_async_beolink_expand( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + parameter: str, + parameter_value: bool | list[str], + expand_side_effect: NotFoundException | None, + log_messages: list[str], + peers_call_count: int, +) -> None: + """Test async_beolink_expand.""" + mock_mozart_client.post_beolink_expand.side_effect = expand_side_effect + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + + # Set the source to a beolink expandable source + source_change_callback(TEST_SOURCE) + + await hass.services.async_call( + DOMAIN, + "beolink_expand", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + parameter: parameter_value, + }, + blocking=True, + ) + + # Check log messages + for log_message in log_messages: + assert log_message in caplog.text + + # Called once during _initialize and once during async_beolink_expand for all_discovered + assert mock_mozart_client.get_beolink_peers.call_count == peers_call_count + + assert mock_mozart_client.post_beolink_expand.call_count == len( + await mock_mozart_client.get_beolink_peers() + ) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_unexpand( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test test_async_beolink_unexpand.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_unexpand", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + "beolink_jids": [TEST_JID_3, TEST_JID_4], + }, + blocking=True, + ) + + assert mock_mozart_client.post_beolink_unexpand.call_count == 2 + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_allstandby( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_beolink_allstandby.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_allstandby", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_beolink_allstandby.assert_called_once() + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ("repeat"), + [ + # Repeat all + (RepeatMode.ALL), + # Repeat track + (RepeatMode.ONE), + # Repeat none + (RepeatMode.OFF), + ], +) +async def test_async_set_repeat( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + repeat: RepeatMode, +) -> None: + """Test async_set_repeat.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert ATTR_MEDIA_REPEAT not in states.attributes + + # Set the return value of the repeat endpoint to match service call + mock_mozart_client.get_settings_queue.return_value = PlayQueueSettings( + repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_REPEAT: repeat, + }, + blocking=True, + ) + mock_mozart_client.set_settings_queue.assert_called_once_with( + play_queue_settings=PlayQueueSettings( + repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] + ) + ) + + # Test the BANG_OLUFSEN_REPEAT_TO_HA dict by checking property value + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_MEDIA_REPEAT] == repeat + + +@pytest.mark.parametrize( + ("shuffle"), + [ + # Shuffle on + (True), + # Shuffle off + (False), + ], +) +async def test_async_set_shuffle( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + shuffle: bool, +) -> None: + """Test async_set_shuffle.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert ATTR_MEDIA_SHUFFLE not in states.attributes + + # Set the return value of the shuffle endpoint to match service call + mock_mozart_client.get_settings_queue.return_value = PlayQueueSettings( + shuffle=shuffle + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_SHUFFLE: shuffle, + }, + blocking=True, + ) + mock_mozart_client.set_settings_queue.assert_called_once_with( + play_queue_settings=PlayQueueSettings(shuffle=shuffle) + ) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_MEDIA_SHUFFLE] == shuffle diff --git a/tests/components/bang_olufsen/test_websocket.py b/tests/components/bang_olufsen/test_websocket.py index 209550faee5..ecf5b2d011e 100644 --- a/tests/components/bang_olufsen/test_websocket.py +++ b/tests/components/bang_olufsen/test_websocket.py @@ -101,8 +101,11 @@ async def test_on_software_update_state( await hass.async_block_till_done() - device = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) ) assert device.sw_version == "1.0.0" @@ -132,18 +135,19 @@ async def test_on_all_notifications_raw( }, "eventType": "WebSocketEventVolume", } - raw_notification_full = raw_notification # Get device ID for the modified notification that is sent as an event and in the log - device = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - raw_notification_full.update( - { - "device_id": device.id, - "serial_number": mock_config_entry.unique_id, - } + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) ) + raw_notification_full = { + "device_id": device.id, + "serial_number": int(mock_config_entry.unique_id), + **raw_notification, + } caplog.set_level(logging.DEBUG) diff --git a/tests/components/bayesian/test_binary_sensor.py b/tests/components/bayesian/test_binary_sensor.py index 818e9bed909..a8723ae5d30 100644 --- a/tests/components/bayesian/test_binary_sensor.py +++ b/tests/components/bayesian/test_binary_sensor.py @@ -1,6 +1,7 @@ """The test for the bayesian sensor platform.""" import json +from logging import WARNING from unittest.mock import patch import pytest @@ -20,16 +21,14 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import Context, HomeAssistant, callback -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component from tests.common import get_fixture_path -async def test_load_values_when_added_to_hass( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: +async def test_load_values_when_added_to_hass(hass: HomeAssistant) -> None: """Test that sensor initializes with observations of relevant entities.""" config = { @@ -58,11 +57,6 @@ async def test_load_values_when_added_to_hass( assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() - assert ( - entity_registry.entities["binary_sensor.test_binary"].unique_id - == "bayesian-3b4c9563-5e84-4167-8fe7-8f507e796d72" - ) - state = hass.states.get("binary_sensor.test_binary") assert state.attributes.get("device_class") == "connectivity" assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8 @@ -331,6 +325,75 @@ async def test_sensor_value_template(hass: HomeAssistant) -> None: assert state.state == "off" +async def test_mixed_states(hass: HomeAssistant) -> None: + """Test sensor on probability threshold limits.""" + config = { + "binary_sensor": { + "name": "should_HVAC", + "platform": "bayesian", + "observations": [ + { + "platform": "template", + "value_template": "{{states('sensor.guest_sensor') != 'off'}}", + "prob_given_true": 0.3, + "prob_given_false": 0.15, + }, + { + "platform": "state", + "entity_id": "sensor.anyone_home", + "to_state": "on", + "prob_given_true": 0.6, + "prob_given_false": 0.05, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.temperature", + "below": 24, + "above": 19, + "prob_given_true": 0.1, + "prob_given_false": 0.6, + }, + ], + "prior": 0.3, + "probability_threshold": 0.5, + } + } + assert await async_setup_component(hass, "binary_sensor", config) + await hass.async_block_till_done() + + hass.states.async_set("sensor.guest_sensor", "UNKNOWN") + hass.states.async_set("sensor.anyone_home", "on") + hass.states.async_set("sensor.temperature", 15) + + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.should_HVAC") + + assert set(state.attributes.get("occurred_observation_entities")) == { + "sensor.anyone_home", + "sensor.temperature", + } + template_obs = { + "platform": "template", + "value_template": "{{states('sensor.guest_sensor') != 'off'}}", + "prob_given_true": 0.3, + "prob_given_false": 0.15, + "observed": True, + } + assert template_obs in state.attributes.get("observations") + + assert abs(0.95857988 - state.attributes.get("probability")) < 0.01 + # A = binary_sensor.should_HVAC being TRUE, P(A) being the prior + # B = value_template evaluating to TRUE + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Calculated where P(A) = 0.3, P(B|A) = 0.3 , P(B|notA) = 0.15 = 0.46153846 + # Step 2, prior is now 0.46153846, B now refers to sensor.anyone_home=='on' + # P(A) = 0.46153846, P(B|A) = 0.6 , P(B|notA) = 0.05, result = 0.91139240 + # Step 3, prior is now 0.91139240, B now refers to sensor.temperature in range [19,24] + # However since the temp is 15 we take the inverse probability for this negative observation + # P(A) = 0.91139240, P(B|A) = (1-0.1) , P(B|notA) = (1-0.6), result = 0.95857988 + + async def test_threshold(hass: HomeAssistant, issue_registry: ir.IssueRegistry) -> None: """Test sensor on probability threshold limits.""" config = { @@ -367,7 +430,7 @@ async def test_threshold(hass: HomeAssistant, issue_registry: ir.IssueRegistry) async def test_multiple_observations(hass: HomeAssistant) -> None: """Test sensor with multiple observations of same entity. - these entries should be labelled as 'multi_state' and negative observations ignored - as the outcome is not known to be binary. + these entries should be labelled as 'state' and negative observations ignored - as the outcome is not known to be binary. Before the merge of #67631 this practice was a common work-around for bayesian's ignoring of negative observations, this also preserves that function """ @@ -436,83 +499,203 @@ async def test_multiple_observations(hass: HomeAssistant) -> None: # Calculated using bayes theorum where P(A) = 0.2, P(B|A) = 0.2, P(B|notA) = 0.6 assert state.state == "off" - assert state.attributes.get("observations")[0]["platform"] == "multi_state" - assert state.attributes.get("observations")[1]["platform"] == "multi_state" + assert state.attributes.get("observations")[0]["platform"] == "state" + assert state.attributes.get("observations")[1]["platform"] == "state" -async def test_multiple_numeric_observations(hass: HomeAssistant) -> None: - """Test sensor with multiple numeric observations of same entity.""" +async def test_multiple_numeric_observations( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test sensor on numeric state platform observations with more than one range. + + This tests an example where the probability of it being a 'nice day' varies over + a series of temperatures. Since this is a multi-state, all the non-observed ranges + should be ignored and only the range including the observed value should update + the prior. When a value lands on above or below (15 is tested) it is included if it + equals `below`, and ignored if it equals `above`. + """ config = { "binary_sensor": { "platform": "bayesian", - "name": "Test_Binary", + "name": "nice_day", "observations": [ { "platform": "numeric_state", - "entity_id": "sensor.test_monitored", - "below": 10, - "above": 0, - "prob_given_true": 0.4, - "prob_given_false": 0.0001, + "entity_id": "sensor.test_temp", + "below": 0, + "prob_given_true": 0.05, + "prob_given_false": 0.2, }, { "platform": "numeric_state", - "entity_id": "sensor.test_monitored", - "below": 100, - "above": 30, - "prob_given_true": 0.6, - "prob_given_false": 0.0001, + "entity_id": "sensor.test_temp", + "below": 10, + "above": 0, + "prob_given_true": 0.1, + "prob_given_false": 0.25, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "below": 15, + "above": 10, + "prob_given_true": 0.2, + "prob_given_false": 0.35, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "below": 25, + "above": 15, + "prob_given_true": 0.5, + "prob_given_false": 0.15, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "above": 25, + "prob_given_true": 0.15, + "prob_given_false": 0.05, }, ], - "prior": 0.1, + "prior": 0.3, } } - assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() - hass.states.async_set("sensor.test_monitored", STATE_UNKNOWN) + hass.states.async_set("sensor.test_temp", -5) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_binary") + state = hass.states.get("binary_sensor.nice_day") for attrs in state.attributes.values(): json.dumps(attrs) - assert state.attributes.get("occurred_observation_entities") == [] + assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] assert state.attributes.get("probability") == 0.1 + # No observations made so probability should be the prior + assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] + assert abs(state.attributes.get("probability") - 0.09677) < 0.01 + # A = binary_sensor.nice_day being TRUE + # B = sensor.test_temp in the range (, 0] + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false + # Calculated using P(A) = 0.3, P(B|A) = 0.05, P(B|~A) = 0.2 -> 0.09677 + # Because >1 range is defined for sensor.test_temp we should not infer anything from the + # ranges not observed + assert state.state == "off" + + hass.states.async_set("sensor.test_temp", 5) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.nice_day") + + assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] + assert abs(state.attributes.get("probability") - 0.14634146) < 0.01 + # A = binary_sensor.nice_day being TRUE + # B = sensor.test_temp in the range (0, 10] + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false + # Calculated using P(A) = 0.3, P(B|A) = 0.1, P(B|~A) = 0.25 -> 0.14634146 + # Because >1 range is defined for sensor.test_temp we should not infer anything from the + # ranges not observed assert state.state == "off" - hass.states.async_set("sensor.test_monitored", 20) + hass.states.async_set("sensor.test_temp", 12) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_binary") - - assert state.attributes.get("occurred_observation_entities") == [ - "sensor.test_monitored" - ] - assert round(abs(0.026 - state.attributes.get("probability")), 7) < 0.01 - # Step 1 Calculated where P(A) = 0.1, P(~B|A) = 0.6 (negative obs), P(~B|notA) = 0.9999 -> 0.0625 - # Step 2 P(A) = 0.0625, P(B|A) = 0.4 (negative obs), P(B|notA) = 0.9999 -> 0.26 + state = hass.states.get("binary_sensor.nice_day") + assert abs(state.attributes.get("probability") - 0.19672131) < 0.01 + # A = binary_sensor.nice_day being TRUE + # B = sensor.test_temp in the range (10, 15] + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false + # Calculated using P(A) = 0.3, P(B|A) = 0.2, P(B|~A) = 0.35 -> 0.19672131 + # Because >1 range is defined for sensor.test_temp we should not infer anything from the + # ranges not observed assert state.state == "off" - hass.states.async_set("sensor.test_monitored", 35) + hass.states.async_set("sensor.test_temp", 22) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_binary") - assert state.attributes.get("occurred_observation_entities") == [ - "sensor.test_monitored" - ] - assert abs(1 - state.attributes.get("probability")) < 0.01 - # Step 1 Calculated where P(A) = 0.1, P(~B|A) = 0.6 (negative obs), P(~B|notA) = 0.9999 -> 0.0625 - # Step 2 P(A) = 0.0625, P(B|A) = 0.6, P(B|notA) = 0.0001 -> 0.9975 + state = hass.states.get("binary_sensor.nice_day") + assert abs(state.attributes.get("probability") - 0.58823529) < 0.01 + # A = binary_sensor.nice_day being TRUE + # B = sensor.test_temp in the range (15, 25] + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false + # Calculated using P(A) = 0.3, P(B|A) = 0.5, P(B|~A) = 0.15 -> 0.58823529 + # Because >1 range is defined for sensor.test_temp we should not infer anything from the + # ranges not observed assert state.state == "on" + + hass.states.async_set("sensor.test_temp", 30) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.nice_day") + assert abs(state.attributes.get("probability") - 0.562500) < 0.01 + # A = binary_sensor.nice_day being TRUE + # B = sensor.test_temp in the range (25, ] + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false + # Calculated using P(A) = 0.3, P(B|A) = 0.15, P(B|~A) = 0.05 -> 0.562500 + # Because >1 range is defined for sensor.test_temp we should not infer anything from the + # ranges not observed + + assert state.state == "on" + + # Edge cases + # if on a threshold only one observation should be included and not both + hass.states.async_set("sensor.test_temp", 15) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.nice_day") + + assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] + + assert abs(state.attributes.get("probability") - 0.19672131) < 0.01 + # Where there are multi numeric ranges when on the threshold, use below + # A = binary_sensor.nice_day being TRUE + # B = sensor.test_temp in the range (10, 15] + # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). + # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false + # Calculated using P(A) = 0.3, P(B|A) = 0.2, P(B|~A) = 0.35 -> 0.19672131 + # Because >1 range is defined for sensor.test_temp we should not infer anything from the + # ranges not observed + + assert state.state == "off" + + assert len(issue_registry.issues) == 0 assert state.attributes.get("observations")[0]["platform"] == "numeric_state" - assert state.attributes.get("observations")[1]["platform"] == "numeric_state" + + hass.states.async_set("sensor.test_temp", "badstate") + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.nice_day") + + assert state.attributes.get("occurred_observation_entities") == [] + assert state.state == "off" + + hass.states.async_set("sensor.test_temp", STATE_UNAVAILABLE) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.nice_day") + + assert state.attributes.get("occurred_observation_entities") == [] + assert state.state == "off" + + hass.states.async_set("sensor.test_temp", STATE_UNKNOWN) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.nice_day") + + assert state.attributes.get("occurred_observation_entities") == [] + assert state.state == "off" async def test_mirrored_observations( @@ -651,6 +834,127 @@ async def test_missing_prob_given_false( ) +async def test_bad_multi_numeric( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test whether missing prob_given_false are detected and appropriate issues are created.""" + + config = { + "binary_sensor": { + "platform": "bayesian", + "name": "bins_out", + "observations": [ + { + "platform": "numeric_state", + "entity_id": "sensor.signal_strength", + "above": 10, + "prob_given_true": 0.01, + "prob_given_false": 0.3, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.signal_strength", + "above": 5, + "below": 10, + "prob_given_true": 0.02, + "prob_given_false": 0.5, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.signal_strength", + "above": 0, + "below": 6, # overlaps + "prob_given_true": 0.07, + "prob_given_false": 0.1, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.signal_strength", + "above": -10, + "below": 0, + "prob_given_true": 0.3, + "prob_given_false": 0.07, + }, + { + "platform": "numeric_state", + "entity_id": "sensor.signal_strength", + "below": -10, + "prob_given_true": 0.6, + "prob_given_false": 0.03, + }, + ], + "prior": 0.2, + } + } + caplog.clear() + caplog.set_level(WARNING) + + assert await async_setup_component(hass, "binary_sensor", config) + + assert "entities must not overlap" in caplog.text + + +async def test_inverted_numeric( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test whether missing prob_given_false are detected and appropriate logs are created.""" + + config = { + "binary_sensor": { + "platform": "bayesian", + "name": "goldilocks_zone", + "observations": [ + { + "platform": "numeric_state", + "entity_id": "sensor.temp", + "above": 23, + "below": 20, + "prob_given_true": 0.9, + "prob_given_false": 0.2, + }, + ], + "prior": 0.4, + } + } + + assert await async_setup_component(hass, "binary_sensor", config) + assert ( + "bayesian numeric state 'above' (23.0) must be less than 'below' (20.0)" + in caplog.text + ) + + +async def test_no_value_numeric( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test whether missing prob_given_false are detected and appropriate logs are created.""" + + config = { + "binary_sensor": { + "platform": "bayesian", + "name": "goldilocks_zone", + "observations": [ + { + "platform": "numeric_state", + "entity_id": "sensor.temp", + "prob_given_true": 0.9, + "prob_given_false": 0.2, + }, + ], + "prior": 0.4, + } + } + + assert await async_setup_component(hass, "binary_sensor", config) + assert "at least one of 'above' or 'below' must be specified" in caplog.text + + async def test_probability_updates(hass: HomeAssistant) -> None: """Test probability update function.""" prob_given_true = [0.3, 0.6, 0.8] diff --git a/tests/components/binary_sensor/test_init.py b/tests/components/binary_sensor/test_init.py index ea0ad05a0db..26b8d919d72 100644 --- a/tests/components/binary_sensor/test_init.py +++ b/tests/components/binary_sensor/test_init.py @@ -17,8 +17,6 @@ from tests.common import ( MockConfigEntry, MockModule, MockPlatform, - help_test_all, - import_and_test_deprecated_constant_enum, mock_config_flow, mock_integration, mock_platform, @@ -198,22 +196,3 @@ async def test_entity_category_config_raises_error( "Entity binary_sensor.test2 cannot be added as the entity category is set to config" in caplog.text ) - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(binary_sensor) - - -@pytest.mark.parametrize( - "device_class", - list(binary_sensor.BinarySensorDeviceClass), -) -def test_deprecated_constant_device_class( - caplog: pytest.LogCaptureFixture, - device_class: binary_sensor.BinarySensorDeviceClass, -) -> None: - """Test deprecated binary sensor device classes.""" - import_and_test_deprecated_constant_enum( - caplog, binary_sensor, device_class, "DEVICE_CLASS_", "2025.1" - ) diff --git a/tests/components/blebox/test_climate.py b/tests/components/blebox/test_climate.py index 8ba0c3f630e..e402a3d5fbd 100644 --- a/tests/components/blebox/test_climate.py +++ b/tests/components/blebox/test_climate.py @@ -21,6 +21,7 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ( ATTR_DEVICE_CLASS, + ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, STATE_UNKNOWN, @@ -152,6 +153,7 @@ async def test_on_when_below_desired(saunabox, hass: HomeAssistant) -> None: feature_mock.desired = 64.8 feature_mock.current = 25.7 + feature_mock.mode = 1 feature_mock.async_on = AsyncMock(side_effect=turn_on) await hass.services.async_call( "climate", @@ -186,12 +188,13 @@ async def test_on_when_above_desired(saunabox, hass: HomeAssistant) -> None: feature_mock.desired = 23.4 feature_mock.current = 28.7 + feature_mock.mode = 1 feature_mock.async_on = AsyncMock(side_effect=turn_on) await hass.services.async_call( "climate", SERVICE_SET_HVAC_MODE, - {"entity_id": entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, + {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, ) feature_mock.async_off.assert_not_called() diff --git a/tests/components/blebox/test_cover.py b/tests/components/blebox/test_cover.py index 1900a6d6834..2d9125b2206 100644 --- a/tests/components/blebox/test_cover.py +++ b/tests/components/blebox/test_cover.py @@ -11,12 +11,9 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, CoverDeviceClass, CoverEntityFeature, + CoverState, ) from homeassistant.const import ( ATTR_DEVICE_CLASS, @@ -212,7 +209,7 @@ async def test_open(feature, hass: HomeAssistant) -> None: feature_mock.async_open = AsyncMock(side_effect=open_gate) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -221,7 +218,7 @@ async def test_open(feature, hass: HomeAssistant) -> None: {"entity_id": entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -240,13 +237,13 @@ async def test_close(feature, hass: HomeAssistant) -> None: feature_mock.async_close = AsyncMock(side_effect=close) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN feature_mock.async_update = AsyncMock() await hass.services.async_call( "cover", SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING def opening_to_stop_feature_mock(feature_mock): @@ -270,13 +267,13 @@ async def test_stop(feature, hass: HomeAssistant) -> None: opening_to_stop_feature_mock(feature_mock) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING feature_mock.async_update = AsyncMock() await hass.services.async_call( "cover", SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -295,7 +292,7 @@ async def test_update(feature, hass: HomeAssistant) -> None: state = hass.states.get(entity_id) assert state.attributes[ATTR_CURRENT_POSITION] == 71 # 100 - 29 - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -318,7 +315,7 @@ async def test_set_position(feature, hass: HomeAssistant) -> None: feature_mock.async_set_position = AsyncMock(side_effect=set_position) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -327,7 +324,7 @@ async def test_set_position(feature, hass: HomeAssistant) -> None: {"entity_id": entity_id, ATTR_POSITION: 1}, blocking=True, ) # almost closed - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING async def test_unknown_position(shutterbox, hass: HomeAssistant) -> None: @@ -344,7 +341,7 @@ async def test_unknown_position(shutterbox, hass: HomeAssistant) -> None: await async_setup_entity(hass, entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_CURRENT_POSITION not in state.attributes @@ -402,7 +399,7 @@ async def test_opening_state(feature, hass: HomeAssistant) -> None: feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -416,7 +413,7 @@ async def test_closing_state(feature, hass: HomeAssistant) -> None: feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -430,7 +427,7 @@ async def test_closed_state(feature, hass: HomeAssistant) -> None: feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED async def test_tilt_position(shutterbox, hass: HomeAssistant) -> None: @@ -465,7 +462,7 @@ async def test_set_tilt_position(shutterbox, hass: HomeAssistant) -> None: feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -474,7 +471,7 @@ async def test_set_tilt_position(shutterbox, hass: HomeAssistant) -> None: {"entity_id": entity_id, ATTR_TILT_POSITION: 80}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING async def test_open_tilt(shutterbox, hass: HomeAssistant) -> None: diff --git a/tests/components/blebox/test_init.py b/tests/components/blebox/test_init.py index f406df51bd4..0cb5139336c 100644 --- a/tests/components/blebox/test_init.py +++ b/tests/components/blebox/test_init.py @@ -5,7 +5,6 @@ import logging import blebox_uniapi import pytest -from homeassistant.components.blebox.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -57,10 +56,10 @@ async def test_unload_config_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.data[DOMAIN] + assert hasattr(entry, "runtime_data") await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) + assert not hasattr(entry, "runtime_data") assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr index 44554dad1e3..54df2b48cdb 100644 --- a/tests/components/blink/snapshots/test_diagnostics.ambr +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -38,6 +38,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'blink', 'minor_version': 1, 'options': dict({ @@ -46,6 +48,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 3, diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py index 9c3193ec7d6..ec1a8b95e0d 100644 --- a/tests/components/blink/test_config_flow.py +++ b/tests/components/blink/test_config_flow.py @@ -10,6 +10,8 @@ from homeassistant.components.blink import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -53,6 +55,35 @@ async def test_form(hass: HomeAssistant) -> None: } assert len(mock_setup_entry.mock_calls) == 1 + # Now check for duplicates + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch("homeassistant.components.blink.config_flow.Auth.startup"), + patch( + "homeassistant.components.blink.config_flow.Auth.check_key_required", + return_value=False, + ), + patch( + "homeassistant.components.blink.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "blink@example.com", "password": "example"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + assert len(mock_setup_entry.mock_calls) == 0 + async def test_form_2fa(hass: HomeAssistant) -> None: """Test we get the 2fa form.""" @@ -292,10 +323,11 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_shows_user_step(hass: HomeAssistant) -> None: """Test reauth shows the user form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, + mock_entry = MockConfigEntry( + domain=DOMAIN, data={"username": "blink@example.com", "password": "invalid_password"}, ) + mock_entry.add_to_hass(hass) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/blink/test_init.py b/tests/components/blink/test_init.py index 3cd2cd51ebd..6d4a93e58ab 100644 --- a/tests/components/blink/test_init.py +++ b/tests/components/blink/test_init.py @@ -66,18 +66,17 @@ async def test_setup_not_ready_authkey_required( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR -async def test_unload_entry_multiple( +async def test_unload_entry( hass: HomeAssistant, mock_blink_api: MagicMock, mock_blink_auth_api: MagicMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test being able to unload one of 2 entries.""" + """Test unload doesn't un-register services.""" mock_config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - hass.data[DOMAIN]["dummy"] = {1: 2} assert mock_config_entry.state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/blue_current/test_config_flow.py b/tests/components/blue_current/test_config_flow.py index 33346990425..a9dea70431f 100644 --- a/tests/components/blue_current/test_config_flow.py +++ b/tests/components/blue_current/test_config_flow.py @@ -129,6 +129,11 @@ async def test_reauth( expected_api_token: str, ) -> None: """Test reauth flow.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with ( patch( "homeassistant.components.blue_current.config_flow.Client.validate_api_token", @@ -146,20 +151,6 @@ async def test_reauth( lambda self, on_data, on_open: hass.loop.create_future(), ), ): - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data={"api_token": "123"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"api_token": "1234567890"}, diff --git a/tests/components/blueprint/test_default_blueprints.py b/tests/components/blueprint/test_default_blueprints.py index 9bd60a7cb6b..f69126a7f25 100644 --- a/tests/components/blueprint/test_default_blueprints.py +++ b/tests/components/blueprint/test_default_blueprints.py @@ -6,7 +6,7 @@ import pathlib import pytest -from homeassistant.components.blueprint import models +from homeassistant.components.blueprint import BLUEPRINT_SCHEMA, models from homeassistant.components.blueprint.const import BLUEPRINT_FOLDER from homeassistant.util import yaml @@ -26,4 +26,4 @@ def test_default_blueprints(domain: str) -> None: LOGGER.info("Processing %s", fil) assert fil.name.endswith(".yaml") data = yaml.load_yaml(fil) - models.Blueprint(data, expected_domain=domain) + models.Blueprint(data, expected_domain=domain, schema=BLUEPRINT_SCHEMA) diff --git a/tests/components/blueprint/test_models.py b/tests/components/blueprint/test_models.py index 45e35474e4c..0ce8c1f397a 100644 --- a/tests/components/blueprint/test_models.py +++ b/tests/components/blueprint/test_models.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest -from homeassistant.components.blueprint import errors, models +from homeassistant.components.blueprint import BLUEPRINT_SCHEMA, errors, models from homeassistant.core import HomeAssistant from homeassistant.util.yaml import Input @@ -22,7 +22,8 @@ def blueprint_1() -> models.Blueprint: "input": {"test-input": {"name": "Name", "description": "Description"}}, }, "example": Input("test-input"), - } + }, + schema=BLUEPRINT_SCHEMA, ) @@ -57,26 +58,32 @@ def blueprint_2(request: pytest.FixtureRequest) -> models.Blueprint: } }, } - return models.Blueprint(blueprint) + return models.Blueprint(blueprint, schema=BLUEPRINT_SCHEMA) @pytest.fixture def domain_bps(hass: HomeAssistant) -> models.DomainBlueprints: """Domain blueprints fixture.""" return models.DomainBlueprints( - hass, "automation", logging.getLogger(__name__), None, AsyncMock() + hass, + "automation", + logging.getLogger(__name__), + None, + AsyncMock(), + BLUEPRINT_SCHEMA, ) def test_blueprint_model_init() -> None: """Test constructor validation.""" with pytest.raises(errors.InvalidBlueprint): - models.Blueprint({}) + models.Blueprint({}, schema=BLUEPRINT_SCHEMA) with pytest.raises(errors.InvalidBlueprint): models.Blueprint( {"blueprint": {"name": "Hello", "domain": "automation"}}, expected_domain="not-automation", + schema=BLUEPRINT_SCHEMA, ) with pytest.raises(errors.InvalidBlueprint): @@ -88,7 +95,8 @@ def test_blueprint_model_init() -> None: "input": {"something": None}, }, "trigger": {"platform": Input("non-existing")}, - } + }, + schema=BLUEPRINT_SCHEMA, ) @@ -115,7 +123,8 @@ def test_blueprint_update_metadata() -> None: "name": "Hello", "domain": "automation", }, - } + }, + schema=BLUEPRINT_SCHEMA, ) bp.update_metadata(source_url="http://bla.com") @@ -131,7 +140,8 @@ def test_blueprint_validate() -> None: "name": "Hello", "domain": "automation", }, - } + }, + schema=BLUEPRINT_SCHEMA, ).validate() is None ) @@ -143,7 +153,8 @@ def test_blueprint_validate() -> None: "domain": "automation", "homeassistant": {"min_version": "100000.0.0"}, }, - } + }, + schema=BLUEPRINT_SCHEMA, ).validate() == ["Requires at least Home Assistant 100000.0.0"] diff --git a/tests/components/blueprint/test_websocket_api.py b/tests/components/blueprint/test_websocket_api.py index 13615803569..921088d8ac6 100644 --- a/tests/components/blueprint/test_websocket_api.py +++ b/tests/components/blueprint/test_websocket_api.py @@ -64,6 +64,17 @@ async def test_list_blueprints( "name": "Call service based on event", }, }, + "test_event_service_legacy_schema.yaml": { + "metadata": { + "domain": "automation", + "input": { + "service_to_call": None, + "trigger_event": {"selector": {"text": {}}}, + "a_number": {"selector": {"number": {"mode": "box", "step": 1.0}}}, + }, + "name": "Call service based on event", + }, + }, "in_folder/in_folder_blueprint.yaml": { "metadata": { "domain": "automation", @@ -212,16 +223,16 @@ async def test_save_blueprint( " input:\n trigger_event:\n selector:\n text: {}\n " " service_to_call:\n a_number:\n selector:\n number:\n " " mode: box\n step: 1.0\n source_url:" - " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntrigger:\n" - " platform: event\n event_type: !input 'trigger_event'\naction:\n " + " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntriggers:\n" + " trigger: event\n event_type: !input 'trigger_event'\nactions:\n " " service: !input 'service_to_call'\n entity_id: light.kitchen\n" # c dumper will not quote the value after !input "blueprint:\n name: Call service based on event\n domain: automation\n " " input:\n trigger_event:\n selector:\n text: {}\n " " service_to_call:\n a_number:\n selector:\n number:\n " " mode: box\n step: 1.0\n source_url:" - " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntrigger:\n" - " platform: event\n event_type: !input trigger_event\naction:\n service:" + " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntriggers:\n" + " trigger: event\n event_type: !input trigger_event\nactions:\n service:" " !input service_to_call\n entity_id: light.kitchen\n" ) # Make sure ita parsable and does not raise @@ -483,13 +494,13 @@ async def test_substituting_blueprint_inputs( assert msg["success"] assert msg["result"]["substituted_config"] == { - "action": { + "actions": { "entity_id": "light.kitchen", "service": "test.automation", }, - "trigger": { + "triggers": { "event_type": "test_event", - "platform": "event", + "trigger": "event", }, } diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index 155d6b66e4e..b4ee61dee57 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -1,71 +1,124 @@ """Common fixtures for the Bluesound tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator +from dataclasses import dataclass +import ipaddress +from typing import Any from unittest.mock import AsyncMock, patch -from pyblu import Status, SyncStatus +from pyblu import Input, Player, Preset, Status, SyncStatus import pytest from homeassistant.components.bluesound.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant +from .utils import LongPollingMock + from tests.common import MockConfigEntry -@pytest.fixture -def sync_status() -> SyncStatus: - """Return a sync status object.""" - return SyncStatus( - etag="etag", - id="1.1.1.1:11000", - mac="00:11:22:33:44:55", - name="player-name", - image="invalid_url", - initialized=True, - brand="brand", - model="model", - model_name="model-name", - volume_db=0.5, - volume=50, - group=None, - master=None, - slaves=None, - zone=None, - zone_master=None, - zone_slave=None, - mute_volume_db=None, - mute_volume=None, - ) +@dataclass +class PlayerMockData: + """Container for player mock data.""" + + host: str + player: AsyncMock + status_long_polling_mock: LongPollingMock[Status] + sync_status_long_polling_mock: LongPollingMock[SyncStatus] + + @staticmethod + async def generate(host: str) -> "PlayerMockData": + """Generate player mock data.""" + host_ip = ipaddress.ip_address(host) + assert host_ip.version == 4 + mac_parts = [0xFF, 0xFF, *host_ip.packed] + mac = ":".join(f"{x:02X}" for x in mac_parts) + + player_name = f"player-name{host.replace('.', '')}" + + player = await AsyncMock(spec=Player)() + player.__aenter__.return_value = player + + status_long_polling_mock = LongPollingMock( + Status( + etag="etag", + input_id=None, + service=None, + state="play", + shuffle=False, + album="album", + artist="artist", + name="song", + image=None, + volume=10, + volume_db=22.3, + mute=False, + mute_volume=None, + mute_volume_db=None, + seconds=2, + total_seconds=123.1, + can_seek=False, + sleep=0, + group_name=None, + group_volume=None, + indexing=False, + stream_url=None, + ) + ) + + sync_status_long_polling_mock = LongPollingMock( + SyncStatus( + etag="etag", + id=f"{host}:11000", + mac=mac, + name=player_name, + image="invalid_url", + initialized=True, + brand="brand", + model="model", + model_name="model-name", + volume_db=0.5, + volume=50, + group=None, + master=None, + slaves=None, + zone=None, + zone_master=None, + zone_slave=None, + mute_volume_db=None, + mute_volume=None, + ) + ) + + player.status.side_effect = status_long_polling_mock.side_effect() + player.sync_status.side_effect = sync_status_long_polling_mock.side_effect() + + player.inputs = AsyncMock( + return_value=[ + Input("1", "input1", "image1", "url1"), + Input("2", "input2", "image2", "url2"), + ] + ) + player.presets = AsyncMock( + return_value=[ + Preset("preset1", "1", "url1", "image1", None), + Preset("preset2", "2", "url2", "image2", None), + ] + ) + + return PlayerMockData( + host, player, status_long_polling_mock, sync_status_long_polling_mock + ) -@pytest.fixture -def status() -> Status: - """Return a status object.""" - return Status( - etag="etag", - input_id=None, - service=None, - state="playing", - shuffle=False, - album=None, - artist=None, - name=None, - image=None, - volume=10, - volume_db=22.3, - mute=False, - mute_volume=None, - mute_volume_db=None, - seconds=2, - total_seconds=123.1, - can_seek=False, - sleep=0, - group_name=None, - group_volume=None, - indexing=False, - stream_url=None, - ) +@dataclass +class PlayerMocks: + """Container for mocks.""" + + player_data: PlayerMockData + player_data_secondary: PlayerMockData + player_data_for_already_configured: PlayerMockData @pytest.fixture @@ -78,24 +131,76 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def config_entry() -> MockConfigEntry: """Return a mocked config entry.""" - mock_entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, data={ - CONF_HOST: "1.1.1.2", + CONF_HOST: "1.1.1.1", CONF_PORT: 11000, }, - unique_id="00:11:22:33:44:55-11000", + unique_id="ff:ff:01:01:01:01-11000", ) - mock_entry.add_to_hass(hass) - - return mock_entry @pytest.fixture -def mock_player(status: Status) -> Generator[AsyncMock]: +def config_entry_secondary() -> MockConfigEntry: + """Return a mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "2.2.2.2", + CONF_PORT: 11000, + }, + unique_id="ff:ff:02:02:02:02-11000", + ) + + +@pytest.fixture +async def setup_config_entry( + hass: HomeAssistant, config_entry: MockConfigEntry, player_mocks: PlayerMocks +) -> None: + """Set up the platform.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +@pytest.fixture +async def setup_config_entry_secondary( + hass: HomeAssistant, + config_entry_secondary: MockConfigEntry, + player_mocks: PlayerMocks, +) -> None: + """Set up the platform.""" + config_entry_secondary.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry_secondary.entry_id) + await hass.async_block_till_done() + + +@pytest.fixture +async def player_mocks() -> AsyncGenerator[PlayerMocks]: """Mock the player.""" + player_mocks = PlayerMocks( + player_data=await PlayerMockData.generate("1.1.1.1"), + player_data_secondary=await PlayerMockData.generate("2.2.2.2"), + player_data_for_already_configured=await PlayerMockData.generate("1.1.1.2"), + ) + + # to simulate a player that is already configured + player_mocks.player_data_for_already_configured.sync_status_long_polling_mock.get().mac = player_mocks.player_data.sync_status_long_polling_mock.get().mac + + def select_player(*args: Any, **kwargs: Any) -> AsyncMock: + match args[0]: + case "1.1.1.1": + return player_mocks.player_data.player + case "2.2.2.2": + return player_mocks.player_data_secondary.player + case "1.1.1.2": + return player_mocks.player_data_for_already_configured.player + case _: + raise ValueError("Invalid player") + with ( patch( "homeassistant.components.bluesound.Player", autospec=True @@ -105,28 +210,6 @@ def mock_player(status: Status) -> Generator[AsyncMock]: new=mock_player, ), ): - player = mock_player.return_value - player.__aenter__.return_value = player - player.status.return_value = status - player.sync_status.return_value = SyncStatus( - etag="etag", - id="1.1.1.1:11000", - mac="00:11:22:33:44:55", - name="player-name", - image="invalid_url", - initialized=True, - brand="brand", - model="model", - model_name="model-name", - volume_db=0.5, - volume=50, - group=None, - master=None, - slaves=None, - zone=None, - zone_master=None, - zone_slave=None, - mute_volume_db=None, - mute_volume=None, - ) - yield player + mock_player.side_effect = select_player + + yield player_mocks diff --git a/tests/components/bluesound/snapshots/test_media_player.ambr b/tests/components/bluesound/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..3e644d3038a --- /dev/null +++ b/tests/components/bluesound/snapshots/test_media_player.ambr @@ -0,0 +1,31 @@ +# serializer version: 1 +# name: test_attributes_set + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'player-name1111', + 'is_volume_muted': False, + 'master': False, + 'media_album_name': 'album', + 'media_artist': 'artist', + 'media_content_type': , + 'media_duration': 123, + 'media_position': 2, + 'media_title': 'song', + 'shuffle': False, + 'source_list': list([ + 'input1', + 'input2', + 'preset1', + 'preset2', + ]), + 'supported_features': , + 'volume_level': 0.1, + }), + 'context': , + 'entity_id': 'media_player.player_name1111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py index 8fecba7017d..63744cdf0ff 100644 --- a/tests/components/bluesound/test_config_flow.py +++ b/tests/components/bluesound/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiohttp import ClientConnectionError +from pyblu.errors import PlayerUnreachableError from homeassistant.components.bluesound.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo @@ -11,11 +11,13 @@ from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import PlayerMocks + from tests.common import MockConfigEntry async def test_user_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -33,15 +35,17 @@ async def test_user_flow_success( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "00:11:22:33:44:55-11000" + assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" mock_setup_entry.assert_called_once() async def test_user_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock, mock_setup_entry: AsyncMock + hass: HomeAssistant, + player_mocks: PlayerMocks, + mock_setup_entry: AsyncMock, ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( @@ -49,7 +53,9 @@ async def test_user_flow_cannot_connect( context={"source": SOURCE_USER}, ) - mock_player.sync_status.side_effect = ClientConnectionError + player_mocks.player_data.sync_status_long_polling_mock.set_error( + PlayerUnreachableError("Player not reachable") + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -61,7 +67,7 @@ async def test_user_flow_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} assert result["step_id"] == "user" - mock_player.sync_status.side_effect = None + player_mocks.player_data.sync_status_long_polling_mock.set_error(None) result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -70,7 +76,7 @@ async def test_user_flow_cannot_connect( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == { CONF_HOST: "1.1.1.1", CONF_PORT: 11000, @@ -81,10 +87,11 @@ async def test_user_flow_cannot_connect( async def test_user_flow_aleady_configured( hass: HomeAssistant, - mock_player: AsyncMock, - mock_config_entry: MockConfigEntry, + player_mocks: PlayerMocks, + config_entry: MockConfigEntry, ) -> None: """Test we handle already configured.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -93,7 +100,7 @@ async def test_user_flow_aleady_configured( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", + CONF_HOST: "1.1.1.2", CONF_PORT: 11000, }, ) @@ -101,13 +108,13 @@ async def test_user_flow_aleady_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.2" - mock_player.sync_status.assert_called_once() + player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() async def test_import_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -117,19 +124,21 @@ async def test_import_flow_success( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "00:11:22:33:44:55-11000" + assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" mock_setup_entry.assert_called_once() - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() async def test_import_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock + hass: HomeAssistant, player_mocks: PlayerMocks ) -> None: """Test we handle cannot connect error.""" - mock_player.sync_status.side_effect = ClientConnectionError + player_mocks.player_data.player.sync_status.side_effect = PlayerUnreachableError( + "Player not reachable" + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, @@ -139,29 +148,30 @@ async def test_import_flow_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() async def test_import_flow_already_configured( hass: HomeAssistant, - mock_player: AsyncMock, - mock_config_entry: MockConfigEntry, + player_mocks: PlayerMocks, + config_entry: MockConfigEntry, ) -> None: """Test we handle already configured.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + data={CONF_HOST: "1.1.1.2", CONF_PORT: 11000}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - mock_player.sync_status.assert_called_once() + player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() async def test_zeroconf_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -171,7 +181,7 @@ async def test_zeroconf_flow_success( ip_address="1.1.1.1", ip_addresses=["1.1.1.1"], port=11000, - hostname="player-name", + hostname="player-name1111", type="_musc._tcp.local.", name="player-name._musc._tcp.local.", properties={}, @@ -182,25 +192,27 @@ async def test_zeroconf_flow_success( assert result["step_id"] == "confirm" mock_setup_entry.assert_not_called() - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "00:11:22:33:44:55-11000" + assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" mock_setup_entry.assert_called_once() async def test_zeroconf_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock + hass: HomeAssistant, player_mocks: PlayerMocks ) -> None: """Test we handle cannot connect error.""" - mock_player.sync_status.side_effect = ClientConnectionError + player_mocks.player_data.player.sync_status.side_effect = PlayerUnreachableError( + "Player not reachable" + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, @@ -208,7 +220,7 @@ async def test_zeroconf_flow_cannot_connect( ip_address="1.1.1.1", ip_addresses=["1.1.1.1"], port=11000, - hostname="player-name", + hostname="player-name1111", type="_musc._tcp.local.", name="player-name._musc._tcp.local.", properties={}, @@ -218,23 +230,24 @@ async def test_zeroconf_flow_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() async def test_zeroconf_flow_already_configured( hass: HomeAssistant, - mock_player: AsyncMock, - mock_config_entry: MockConfigEntry, + player_mocks: PlayerMocks, + config_entry: MockConfigEntry, ) -> None: """Test we handle already configured and update the host.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=ZeroconfServiceInfo( - ip_address="1.1.1.1", - ip_addresses=["1.1.1.1"], + ip_address="1.1.1.2", + ip_addresses=["1.1.1.2"], port=11000, - hostname="player-name", + hostname="player-name1112", type="_musc._tcp.local.", name="player-name._musc._tcp.local.", properties={}, @@ -244,6 +257,6 @@ async def test_zeroconf_flow_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.2" - mock_player.sync_status.assert_called_once() + player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() diff --git a/tests/components/bluesound/test_init.py b/tests/components/bluesound/test_init.py new file mode 100644 index 00000000000..4178c27acad --- /dev/null +++ b/tests/components/bluesound/test_init.py @@ -0,0 +1,46 @@ +"""Test bluesound integration.""" + +from pyblu.errors import PlayerUnreachableError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .conftest import PlayerMocks + +from tests.common import MockConfigEntry + + +async def test_setup_entry( + hass: HomeAssistant, setup_config_entry: None, config_entry: MockConfigEntry +) -> None: + """Test a successful setup entry.""" + assert hass.states.get("media_player.player_name1111").state == "playing" + assert config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("media_player.player_name1111").state == "unavailable" + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_unload_entry_while_player_is_offline( + hass: HomeAssistant, + setup_config_entry: None, + config_entry: MockConfigEntry, + player_mocks: PlayerMocks, +) -> None: + """Test entries can be unloaded correctly while the player is offline.""" + player_mocks.player_data.player.status.side_effect = PlayerUnreachableError( + "Player not reachable" + ) + player_mocks.player_data.status_long_polling_mock.trigger() + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("media_player.player_name1111").state == "unavailable" + assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py new file mode 100644 index 00000000000..217225628f2 --- /dev/null +++ b/tests/components/bluesound/test_media_player.py @@ -0,0 +1,414 @@ +"""Tests for the Bluesound Media Player platform.""" + +import dataclasses +from unittest.mock import call + +from pyblu import PairedPlayer +from pyblu.errors import PlayerUnreachableError +import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.bluesound import DOMAIN as BLUESOUND_DOMAIN +from homeassistant.components.bluesound.const import ATTR_MASTER +from homeassistant.components.bluesound.services import ( + SERVICE_CLEAR_TIMER, + SERVICE_JOIN, + SERVICE_SET_TIMER, +) +from homeassistant.components.media_player import ( + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + MediaPlayerState, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from .conftest import PlayerMocks + + +@pytest.mark.parametrize( + ("service", "method"), + [ + (SERVICE_MEDIA_PAUSE, "pause"), + (SERVICE_MEDIA_PLAY, "play"), + (SERVICE_MEDIA_NEXT_TRACK, "skip"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "back"), + ], +) +async def test_simple_actions( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, + service: str, + method: str, +) -> None: + """Test the media player simple actions.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + service, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + getattr(player_mocks.player_data.player, method).assert_called_once_with() + + +async def test_volume_set( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume set.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.player_name1111", ATTR_MEDIA_VOLUME_LEVEL: 0.5}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=50) + + +async def test_volume_mute( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume mute.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.player_name1111", "is_volume_muted": True}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(mute=True) + + +async def test_volume_up( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume up.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=11) + + +async def test_volume_down( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume down.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=9) + + +async def test_attributes_set( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, + snapshot: SnapshotAssertion, +) -> None: + """Test the media player attributes set.""" + state = hass.states.get("media_player.player_name1111") + assert state == snapshot(exclude=props("media_position_updated_at")) + + +async def test_stop_maps_to_idle( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player stop maps to idle.""" + player_mocks.player_data.status_long_polling_mock.set( + dataclasses.replace( + player_mocks.player_data.status_long_polling_mock.get(), state="stop" + ) + ) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + assert ( + hass.states.get("media_player.player_name1111").state == MediaPlayerState.IDLE + ) + + +async def test_status_updated( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player status updated.""" + pre_state = hass.states.get("media_player.player_name1111") + assert pre_state.state == "playing" + assert pre_state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.1 + + status = player_mocks.player_data.status_long_polling_mock.get() + status = dataclasses.replace(status, state="pause", volume=50, etag="changed") + player_mocks.player_data.status_long_polling_mock.set(status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + post_state = hass.states.get("media_player.player_name1111") + + assert post_state.state == MediaPlayerState.PAUSED + assert post_state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.5 + + +async def test_unavailable_when_offline( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test that the media player goes unavailable when the player is unreachable.""" + pre_state = hass.states.get("media_player.player_name1111") + assert pre_state.state == "playing" + + player_mocks.player_data.status_long_polling_mock.set_error( + PlayerUnreachableError("Player not reachable") + ) + player_mocks.player_data.status_long_polling_mock.trigger() + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + post_state = hass.states.get("media_player.player_name1111") + + assert post_state.state == STATE_UNAVAILABLE + + +async def test_set_sleep_timer( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the set sleep timer action.""" + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_SET_TIMER, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.sleep_timer.assert_called_once() + + +async def test_clear_sleep_timer( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the clear sleep timer action.""" + + player_mocks.player_data.player.sleep_timer.side_effect = [15, 30, 45, 60, 90, 0] + + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_CLEAR_TIMER, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.sleep_timer.assert_has_calls([call()] * 6) + + +async def test_join_cannot_join_to_self( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test that joining to self is not allowed.""" + with pytest.raises(ServiceValidationError, match="Cannot join player to itself"): + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.player_name1111", + ATTR_MASTER: "media_player.player_name1111", + }, + blocking=True, + ) + + +async def test_join( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the join action.""" + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.player_name1111", + ATTR_MASTER: "media_player.player_name2222", + }, + blocking=True, + ) + + player_mocks.player_data_secondary.player.add_slave.assert_called_once_with( + "1.1.1.1", 11000 + ) + + +async def test_unjoin( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the unjoin action.""" + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + master=PairedPlayer("2.2.2.2", 11000), + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + await hass.services.async_call( + BLUESOUND_DOMAIN, + "unjoin", + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data_secondary.player.remove_slave.assert_called_once_with( + "1.1.1.1", 11000 + ) + + +async def test_attr_master( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player master.""" + attr_master = hass.states.get("media_player.player_name1111").attributes[ + ATTR_MASTER + ] + assert attr_master is False + + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + slaves=[PairedPlayer("2.2.2.2", 11000)], + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_master = hass.states.get("media_player.player_name1111").attributes[ + ATTR_MASTER + ] + + assert attr_master is True + + +async def test_attr_bluesound_group( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player grouping for leader.""" + attr_bluesound_group = hass.states.get( + "media_player.player_name1111" + ).attributes.get("bluesound_group") + assert attr_bluesound_group is None + + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + slaves=[PairedPlayer("2.2.2.2", 11000)], + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_bluesound_group = hass.states.get( + "media_player.player_name1111" + ).attributes.get("bluesound_group") + + assert attr_bluesound_group == ["player-name1111", "player-name2222"] + + +async def test_attr_bluesound_group_for_follower( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player grouping for follower.""" + attr_bluesound_group = hass.states.get( + "media_player.player_name2222" + ).attributes.get("bluesound_group") + assert attr_bluesound_group is None + + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + slaves=[PairedPlayer("2.2.2.2", 11000)], + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + updated_sync_status = dataclasses.replace( + player_mocks.player_data_secondary.sync_status_long_polling_mock.get(), + master=PairedPlayer("1.1.1.1", 11000), + ) + player_mocks.player_data_secondary.sync_status_long_polling_mock.set( + updated_sync_status + ) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_bluesound_group = hass.states.get( + "media_player.player_name2222" + ).attributes.get("bluesound_group") + + assert attr_bluesound_group == ["player-name1111", "player-name2222"] + + +async def test_volume_up_from_6_to_7( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player volume up from 6 to 7. + + This fails if if rounding is not done correctly. See https://github.com/home-assistant/core/issues/129956 for more details. + """ + player_mocks.player_data.status_long_polling_mock.set( + dataclasses.replace( + player_mocks.player_data.status_long_polling_mock.get(), volume=6 + ) + ) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=7) diff --git a/tests/components/bluesound/utils.py b/tests/components/bluesound/utils.py new file mode 100644 index 00000000000..112d077d7f5 --- /dev/null +++ b/tests/components/bluesound/utils.py @@ -0,0 +1,70 @@ +"""Utils for bluesound tests.""" + +import asyncio +from typing import Protocol + + +class Etag(Protocol): + """Etag protocol.""" + + etag: str + + +class LongPollingMock[T: Etag]: + """Mock long polling methods(status, sync_status).""" + + def __init__(self, value: T) -> None: + """Store value and allows to wait for changes.""" + self._value = value + self._error: Exception | None = None + self._event = asyncio.Event() + self._event.set() + + def trigger(self): + """Trigger the event without changing the value.""" + self._event.set() + + def set(self, value: T): + """Set the value and notify all waiting.""" + self._value = value + self._event.set() + + def set_error(self, error: Exception | None): + """Set the error and notify all waiting.""" + self._error = error + self._event.set() + + def get(self) -> T: + """Get the value without waiting.""" + return self._value + + async def wait(self) -> T: + """Wait for the value or error to change.""" + await self._event.wait() + self._event.clear() + + return self._value + + def side_effect(self): + """Return the side_effect for mocking.""" + last_etag = None + + async def mock(*args, **kwargs) -> T: + nonlocal last_etag + if self._error is not None: + raise self._error + + etag = kwargs.get("etag") + if etag is None or etag != last_etag: + last_etag = self.get().etag + return self.get() + + value = await self.wait() + last_etag = value.etag + + if self._error is not None: + raise self._error + + return value + + return mock diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index 8e7d604f794..ba8792a79a3 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -2872,7 +2872,7 @@ async def test_default_address_config_entries_removed_linux( assert not hass.config_entries.async_entries(bluetooth.DOMAIN) -@pytest.mark.usefixtures("enable_bluetooth", "one_adapter") +@pytest.mark.usefixtures("one_adapter") async def test_can_unsetup_bluetooth_single_adapter_linux( hass: HomeAssistant, mock_bleak_scanner_start: MagicMock ) -> None: @@ -2890,12 +2890,17 @@ async def test_can_unsetup_bluetooth_single_adapter_linux( await hass.async_block_till_done() -@pytest.mark.usefixtures("enable_bluetooth", "two_adapters") +@pytest.mark.usefixtures("two_adapters") async def test_can_unsetup_bluetooth_multiple_adapters( hass: HomeAssistant, mock_bleak_scanner_start: MagicMock, ) -> None: """Test we can setup and unsetup bluetooth with multiple adapters.""" + # Setup bluetooth first since otherwise loading the first + # config entry will load the second one as well + await async_setup_component(hass, bluetooth.DOMAIN, {}) + await hass.async_block_till_done() + entry1 = MockConfigEntry( domain=bluetooth.DOMAIN, data={}, unique_id="00:00:00:00:00:01" ) diff --git a/tests/components/bluetooth/test_manager.py b/tests/components/bluetooth/test_manager.py index 0ac49aa72cd..0454df9a4a7 100644 --- a/tests/components/bluetooth/test_manager.py +++ b/tests/components/bluetooth/test_manager.py @@ -13,6 +13,7 @@ from bluetooth_adapters import AdvertisementHistory from habluetooth.advertisement_tracker import TRACKER_BUFFERING_WOBBLE_SECONDS import pytest +from homeassistant import config_entries from homeassistant.components import bluetooth from homeassistant.components.bluetooth import ( FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS, @@ -36,6 +37,7 @@ from homeassistant.components.bluetooth.const import ( UNAVAILABLE_TRACK_SECONDS, ) from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.json import json_loads @@ -52,7 +54,13 @@ from . import ( patch_bluetooth_time, ) -from tests.common import async_fire_time_changed, load_fixture +from tests.common import ( + MockConfigEntry, + MockModule, + async_fire_time_changed, + load_fixture, + mock_integration, +) @pytest.fixture @@ -1002,6 +1010,12 @@ async def test_goes_unavailable_dismisses_discovery_and_makes_discoverable( assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "switchbot" + assert mock_config_flow.mock_calls[0][2]["context"] == { + "discovery_key": DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + "source": "bluetooth", + } assert async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None assert async_scanner_count(hass, connectable=False) == 1 @@ -1075,6 +1089,12 @@ async def test_goes_unavailable_dismisses_discovery_and_makes_discoverable( ) assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "switchbot" + assert mock_config_flow.mock_calls[0][2]["context"] == { + "discovery_key": DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + "source": "bluetooth", + } cancel_unavailable() @@ -1268,3 +1288,375 @@ async def test_set_fallback_interval_big(hass: HomeAssistant) -> None: # We should forget fallback interval after it expires assert async_get_fallback_availability_interval(hass, "44:44:33:11:23:12") is None + + +@pytest.mark.usefixtures("mock_bluetooth_adapters") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + ), + [ + # Matching discovery key + ( + "switchbot", + { + "bluetooth": ( + DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + ) + }, + ), + # Matching discovery key + ( + "switchbot", + { + "bluetooth": ( + DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + ), + "other": (DiscoveryKey(domain="other", key="blah", version=1),), + }, + ), + # Matching discovery key, other domain + # Note: Rediscovery is not currently restricted to the domain of the removed + # entry. Such a check can be added if needed. + ( + "comp", + { + "bluetooth": ( + DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + ) + }, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_BLUETOOTH, + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_USER, + ], +) +async def test_bluetooth_rediscover( + hass: HomeAssistant, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, +) -> None: + """Test we reinitiate flows when an ignored config entry is removed.""" + mock_bt = [ + { + "domain": "switchbot", + "service_data_uuid": "050a021a-0000-1000-8000-00805f9b34fb", + "connectable": False, + }, + ] + with patch( + "homeassistant.components.bluetooth.async_get_bluetooth", return_value=mock_bt + ): + assert await async_setup_component(hass, bluetooth.DOMAIN, {}) + await hass.async_block_till_done() + + assert async_scanner_count(hass, connectable=False) == 0 + switchbot_device_non_connectable = generate_ble_device( + "44:44:33:11:23:45", + "wohand", + {}, + rssi=-100, + ) + switchbot_device_adv = generate_advertisement_data( + local_name="wohand", + service_uuids=["050a021a-0000-1000-8000-00805f9b34fb"], + service_data={"050a021a-0000-1000-8000-00805f9b34fb": b"\n\xff"}, + manufacturer_data={1: b"\x01"}, + rssi=-100, + ) + callbacks = [] + + def _fake_subscriber( + service_info: BluetoothServiceInfo, + change: BluetoothChange, + ) -> None: + """Fake subscriber for the BleakScanner.""" + callbacks.append((service_info, change)) + + cancel = bluetooth.async_register_callback( + hass, + _fake_subscriber, + {"address": "44:44:33:11:23:45", "connectable": False}, + BluetoothScanningMode.ACTIVE, + ) + + class FakeScanner(BaseHaRemoteScanner): + def inject_advertisement( + self, device: BLEDevice, advertisement_data: AdvertisementData + ) -> None: + """Inject an advertisement.""" + self._async_on_advertisement( + device.address, + advertisement_data.rssi, + device.name, + advertisement_data.service_uuids, + advertisement_data.service_data, + advertisement_data.manufacturer_data, + advertisement_data.tx_power, + {"scanner_specific_data": "test"}, + MONOTONIC_TIME(), + ) + + def clear_all_devices(self) -> None: + """Clear all devices.""" + self._discovered_device_advertisement_datas.clear() + self._discovered_device_timestamps.clear() + self._previous_service_info.clear() + + connector = ( + HaBluetoothConnector(MockBleakClient, "mock_bleak_client", lambda: False), + ) + non_connectable_scanner = FakeScanner( + "connectable", + "connectable", + connector, + False, + ) + unsetup_connectable_scanner = non_connectable_scanner.async_setup() + cancel_connectable_scanner = _get_manager().async_register_scanner( + non_connectable_scanner + ) + with patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: + non_connectable_scanner.inject_advertisement( + switchbot_device_non_connectable, switchbot_device_adv + ) + await hass.async_block_till_done() + + expected_context = { + "discovery_key": DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + "source": "bluetooth", + } + assert len(mock_config_flow.mock_calls) == 1 + assert mock_config_flow.mock_calls[0][1][0] == "switchbot" + assert mock_config_flow.mock_calls[0][2]["context"] == expected_context + + hass.config.components.add(entry_domain) + mock_integration(hass, MockModule(entry_domain)) + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id="mock-unique-id", + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + assert ( + async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None + ) + assert async_scanner_count(hass, connectable=False) == 1 + assert len(callbacks) == 1 + + assert ( + "44:44:33:11:23:45" + in non_connectable_scanner.discovered_devices_and_advertisement_data + ) + + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert ( + async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None + ) + assert async_scanner_count(hass, connectable=False) == 1 + assert len(callbacks) == 1 + + assert len(mock_config_flow.mock_calls) == 2 + assert mock_config_flow.mock_calls[1][1][0] == "switchbot" + assert mock_config_flow.mock_calls[1][2]["context"] == expected_context + + cancel() + unsetup_connectable_scanner() + cancel_connectable_scanner() + + +@pytest.mark.usefixtures("mock_bluetooth_adapters") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + "entry_source", + "entry_unique_id", + ), + [ + # Discovery key from other domain + ( + "switchbot", + { + "zeroconf": ( + DiscoveryKey(domain="zeroconf", key="44:44:33:11:23:45", version=1), + ) + }, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + # Discovery key from the future + ( + "switchbot", + { + "bluetooth": ( + DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=2 + ), + ) + }, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + ], +) +async def test_bluetooth_rediscover_no_match( + hass: HomeAssistant, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + entry_unique_id: str, +) -> None: + """Test we don't reinitiate flows when a non matching config entry is removed.""" + mock_bt = [ + { + "domain": "switchbot", + "service_data_uuid": "050a021a-0000-1000-8000-00805f9b34fb", + "connectable": False, + }, + ] + with patch( + "homeassistant.components.bluetooth.async_get_bluetooth", return_value=mock_bt + ): + assert await async_setup_component(hass, bluetooth.DOMAIN, {}) + await hass.async_block_till_done() + + assert async_scanner_count(hass, connectable=False) == 0 + switchbot_device_non_connectable = generate_ble_device( + "44:44:33:11:23:45", + "wohand", + {}, + rssi=-100, + ) + switchbot_device_adv = generate_advertisement_data( + local_name="wohand", + service_uuids=["050a021a-0000-1000-8000-00805f9b34fb"], + service_data={"050a021a-0000-1000-8000-00805f9b34fb": b"\n\xff"}, + manufacturer_data={1: b"\x01"}, + rssi=-100, + ) + callbacks = [] + + def _fake_subscriber( + service_info: BluetoothServiceInfo, + change: BluetoothChange, + ) -> None: + """Fake subscriber for the BleakScanner.""" + callbacks.append((service_info, change)) + + cancel = bluetooth.async_register_callback( + hass, + _fake_subscriber, + {"address": "44:44:33:11:23:45", "connectable": False}, + BluetoothScanningMode.ACTIVE, + ) + + class FakeScanner(BaseHaRemoteScanner): + def inject_advertisement( + self, device: BLEDevice, advertisement_data: AdvertisementData + ) -> None: + """Inject an advertisement.""" + self._async_on_advertisement( + device.address, + advertisement_data.rssi, + device.name, + advertisement_data.service_uuids, + advertisement_data.service_data, + advertisement_data.manufacturer_data, + advertisement_data.tx_power, + {"scanner_specific_data": "test"}, + MONOTONIC_TIME(), + ) + + def clear_all_devices(self) -> None: + """Clear all devices.""" + self._discovered_device_advertisement_datas.clear() + self._discovered_device_timestamps.clear() + self._previous_service_info.clear() + + connector = ( + HaBluetoothConnector(MockBleakClient, "mock_bleak_client", lambda: False), + ) + non_connectable_scanner = FakeScanner( + "connectable", + "connectable", + connector, + False, + ) + unsetup_connectable_scanner = non_connectable_scanner.async_setup() + cancel_connectable_scanner = _get_manager().async_register_scanner( + non_connectable_scanner + ) + with patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: + non_connectable_scanner.inject_advertisement( + switchbot_device_non_connectable, switchbot_device_adv + ) + await hass.async_block_till_done() + + expected_context = { + "discovery_key": DiscoveryKey( + domain="bluetooth", key="44:44:33:11:23:45", version=1 + ), + "source": "bluetooth", + } + assert len(mock_config_flow.mock_calls) == 1 + assert mock_config_flow.mock_calls[0][1][0] == "switchbot" + assert mock_config_flow.mock_calls[0][2]["context"] == expected_context + + hass.config.components.add(entry_domain) + mock_integration(hass, MockModule(entry_domain)) + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + assert ( + async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None + ) + assert async_scanner_count(hass, connectable=False) == 1 + assert len(callbacks) == 1 + + assert ( + "44:44:33:11:23:45" + in non_connectable_scanner.discovered_devices_and_advertisement_data + ) + + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert ( + async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None + ) + assert async_scanner_count(hass, connectable=False) == 1 + assert len(callbacks) == 1 + assert len(mock_config_flow.mock_calls) == 1 + + cancel() + unsetup_connectable_scanner() + cancel_connectable_scanner() diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py index 452297e38c2..da90980640b 100644 --- a/tests/components/bluetooth_le_tracker/test_device_tracker.py +++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py @@ -18,7 +18,7 @@ from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, CONF_SCAN_INTERVAL, CONF_TRACK_NEW, - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, ) from homeassistant.const import CONF_PLATFORM from homeassistant.core import HomeAssistant @@ -73,7 +73,7 @@ async def test_do_not_see_device_if_time_not_updated(hass: HomeAssistant) -> Non address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -101,7 +101,9 @@ async def test_do_not_see_device_if_time_not_updated(hass: HomeAssistant) -> Non CONF_TRACK_NEW: True, CONF_CONSIDER_HOME: timedelta(minutes=10), } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() assert result @@ -136,7 +138,7 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -164,7 +166,9 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None: CONF_TRACK_NEW: True, CONF_CONSIDER_HOME: timedelta(minutes=10), } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) assert result # Tick until device seen enough times for to be registered for tracking @@ -215,7 +219,7 @@ async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -242,7 +246,9 @@ async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=1), CONF_TRACK_NEW: True, } - assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + assert await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() # Seen once here; return without name when seen subsequent times @@ -282,7 +288,7 @@ async def test_tracking_battery_times_out(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -311,7 +317,9 @@ async def test_tracking_battery_times_out(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() assert result @@ -348,7 +356,7 @@ async def test_tracking_battery_fails(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -377,7 +385,9 @@ async def test_tracking_battery_fails(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) assert result # Tick until device seen enough times for to be registered for tracking @@ -413,7 +423,7 @@ async def test_tracking_battery_successful(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -442,7 +452,9 @@ async def test_tracking_battery_successful(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() assert result diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index 655955ff9aa..c437e1d3669 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -9,6 +9,7 @@ import respx from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.const import ( + CONF_CAPTCHA_TOKEN, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, @@ -24,8 +25,12 @@ FIXTURE_USER_INPUT = { CONF_PASSWORD: "p4ssw0rd", CONF_REGION: "rest_of_world", } -FIXTURE_REFRESH_TOKEN = "SOME_REFRESH_TOKEN" -FIXTURE_GCID = "SOME_GCID" +FIXTURE_CAPTCHA_INPUT = { + CONF_CAPTCHA_TOKEN: "captcha_token", +} +FIXTURE_USER_INPUT_W_CAPTCHA = FIXTURE_USER_INPUT | FIXTURE_CAPTCHA_INPUT +FIXTURE_REFRESH_TOKEN = "another_token_string" +FIXTURE_GCID = "DUMMY" FIXTURE_CONFIG_ENTRY = { "entry_id": "1", @@ -40,9 +45,14 @@ FIXTURE_CONFIG_ENTRY = { }, "options": {CONF_READ_ONLY: False}, "source": config_entries.SOURCE_USER, - "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_REGION]}", + "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_USERNAME]}", } +REMOTE_SERVICE_EXC_REASON = "HTTPStatusError: 502 Bad Gateway" +REMOTE_SERVICE_EXC_TRANSLATION = ( + "Error executing remote service on vehicle. HTTPStatusError: 502 Bad Gateway" +) + async def setup_mocked_integration(hass: HomeAssistant) -> MockConfigEntry: """Mock a fully setup config entry and all components based on fixtures.""" diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 81ef1220069..b87da22a332 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -4833,7 +4833,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', @@ -7202,7 +7202,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', @@ -8925,7 +8925,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 8a26acd1040..624b2c6007f 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -245,7 +245,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', @@ -259,7 +259,6 @@ # name: test_entity_state_attrs[sensor.i3_rex_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Charging target', 'unit_of_measurement': '%', }), @@ -894,7 +893,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', @@ -908,7 +907,6 @@ # name: test_entity_state_attrs[sensor.i4_edrive40_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Charging target', 'unit_of_measurement': '%', }), @@ -929,6 +927,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -968,6 +967,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -1898,7 +1898,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', @@ -1912,7 +1912,6 @@ # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Charging target', 'unit_of_measurement': '%', }), @@ -1933,6 +1932,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -1972,6 +1972,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -2665,6 +2666,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -2704,6 +2706,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), diff --git a/tests/components/bmw_connected_drive/test_button.py b/tests/components/bmw_connected_drive/test_button.py index 99cabc900fa..356cfcb439e 100644 --- a/tests/components/bmw_connected_drive/test_button.py +++ b/tests/components/bmw_connected_drive/test_button.py @@ -13,7 +13,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -81,11 +85,13 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=MyBMWRemoteServiceError), + AsyncMock( + side_effect=MyBMWRemoteServiceError("HTTPStatusError: 502 Bad Gateway") + ), ) # Test - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "button", "press", @@ -165,7 +171,7 @@ async def test_service_call_success_state_change( ( "button.i4_edrive40_find_vehicle", "device_tracker.i4_edrive40", - {"latitude": 123.456, "longitude": 34.5678, "direction": 121}, + {"latitude": 12.345, "longitude": 34.5678, "direction": 121}, {"latitude": 48.177334, "longitude": 11.556274, "direction": 180}, ), ], diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f346cd70b26..9c124261392 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -6,10 +6,12 @@ from unittest.mock import patch from bimmer_connected.api.authentication import MyBMWAuthentication from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError from httpx import RequestError +import pytest from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.config_flow import DOMAIN from homeassistant.components.bmw_connected_drive.const import ( + CONF_CAPTCHA_TOKEN, CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) @@ -18,10 +20,12 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import ( + FIXTURE_CAPTCHA_INPUT, FIXTURE_CONFIG_ENTRY, FIXTURE_GCID, FIXTURE_REFRESH_TOKEN, FIXTURE_USER_INPUT, + FIXTURE_USER_INPUT_W_CAPTCHA, ) from tests.common import MockConfigEntry @@ -56,7 +60,7 @@ async def test_authentication_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -74,7 +78,7 @@ async def test_connection_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -92,7 +96,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=deepcopy(FIXTURE_USER_INPUT), + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -100,6 +104,28 @@ async def test_api_error(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.usefixtures("bmw_fixture") +async def test_captcha_flow_missing_error(hass: HomeAssistant) -> None: + """Test the external flow with captcha failing once and succeeding the second time.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=deepcopy(FIXTURE_USER_INPUT), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_CAPTCHA_TOKEN: " "} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "missing_captcha"} + + async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: """Test registering an integration and finishing flow works.""" with ( @@ -113,14 +139,22 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: return_value=True, ) as mock_setup_entry, ): - result2 = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data=deepcopy(FIXTURE_USER_INPUT), ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] - assert result2["data"] == FIXTURE_COMPLETE_ENTRY + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] + assert result["data"] == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 1 @@ -188,26 +222,60 @@ async def test_reauth(hass: HomeAssistant) -> None: assert config_entry.data == config_entry_with_wrong_password["data"] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} + assert result["step_id"] == "change_password" + assert set(result["data_schema"].schema) == {CONF_PASSWORD} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD]} ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 2 + + +async def test_reconfigure(hass: HomeAssistant) -> None: + """Test the reconfiguration form.""" + with patch( + "bimmer_connected.api.authentication.MyBMWAuthentication.login", + side_effect=login_sideeffect, + autospec=True, + ): + config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "change_password" + assert set(result["data_schema"].schema) == {CONF_PASSWORD} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD]} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == FIXTURE_COMPLETE_ENTRY diff --git a/tests/components/bmw_connected_drive/test_coordinator.py b/tests/components/bmw_connected_drive/test_coordinator.py index b0f507bbfc2..beb3d74d572 100644 --- a/tests/components/bmw_connected_drive/test_coordinator.py +++ b/tests/components/bmw_connected_drive/test_coordinator.py @@ -1,13 +1,19 @@ """Test BMW coordinator.""" +from copy import deepcopy from datetime import timedelta from unittest.mock import patch -from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN +from homeassistant.const import CONF_REGION from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir @@ -27,7 +33,7 @@ async def test_update_success(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert config_entry.runtime_data.coordinator.last_update_success is True + assert config_entry.runtime_data.last_update_success is True @pytest.mark.usefixtures("bmw_fixture") @@ -42,7 +48,7 @@ async def test_update_failed( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data assert coordinator.last_update_success is True @@ -71,7 +77,7 @@ async def test_update_reauth( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = config_entry.runtime_data assert coordinator.last_update_success is True @@ -122,3 +128,38 @@ async def test_init_reauth( f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}", ) assert reauth_issue.active is True + + +@pytest.mark.usefixtures("bmw_fixture") +async def test_captcha_reauth( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the reauth form.""" + TEST_REGION = "north_america" + + config_entry_fixure = deepcopy(FIXTURE_CONFIG_ENTRY) + config_entry_fixure["data"][CONF_REGION] = TEST_REGION + config_entry = MockConfigEntry(**config_entry_fixure) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + coordinator = config_entry.runtime_data + + assert coordinator.last_update_success is True + + freezer.tick(timedelta(minutes=10, seconds=1)) + with patch( + "bimmer_connected.account.MyBMWAccount.get_vehicles", + side_effect=MyBMWCaptchaMissingError( + "Missing hCaptcha token for North America login" + ), + ): + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert coordinator.last_update_success is False + assert isinstance(coordinator.last_exception, ConfigEntryAuthFailed) is True + assert coordinator.last_exception.translation_key == "missing_captcha" diff --git a/tests/components/bmw_connected_drive/test_lock.py b/tests/components/bmw_connected_drive/test_lock.py index 2fa694d426b..088534c79f5 100644 --- a/tests/components/bmw_connected_drive/test_lock.py +++ b/tests/components/bmw_connected_drive/test_lock.py @@ -16,7 +16,12 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform from tests.components.recorder.common import async_wait_recording_done @@ -118,11 +123,11 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=MyBMWRemoteServiceError), + AsyncMock(side_effect=MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON)), ) # Test - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "lock", service, diff --git a/tests/components/bmw_connected_drive/test_notify.py b/tests/components/bmw_connected_drive/test_notify.py index 4113f618be0..1bade3be011 100644 --- a/tests/components/bmw_connected_drive/test_notify.py +++ b/tests/components/bmw_connected_drive/test_notify.py @@ -11,7 +11,11 @@ import respx from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) async def test_legacy_notify_service_simple( @@ -68,21 +72,21 @@ async def test_legacy_notify_service_simple( { "latitude": POI_DATA.get("lat"), }, - "Invalid data for point of interest: required key not provided @ data['longitude']", + r"Invalid data for point of interest: required key not provided @ data\['longitude'\]", ), ( { "latitude": POI_DATA.get("lat"), "longitude": "text", }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + r"Invalid data for point of interest: invalid longitude for dictionary value @ data\['longitude'\]", ), ( { "latitude": POI_DATA.get("lat"), "longitude": 9999, }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + r"Invalid data for point of interest: invalid longitude for dictionary value @ data\['longitude'\]", ), ], ) @@ -96,7 +100,7 @@ async def test_service_call_invalid_input( # Setup component assert await setup_mocked_integration(hass) - with pytest.raises(ServiceValidationError) as exc: + with pytest.raises(ServiceValidationError, match=exc_translation): await hass.services.async_call( "notify", "bmw_connected_drive_ix_xdrive50", @@ -106,7 +110,6 @@ async def test_service_call_invalid_input( }, blocking=True, ) - assert str(exc.value) == exc_translation @pytest.mark.usefixtures("bmw_fixture") @@ -132,11 +135,11 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=raised), + AsyncMock(side_effect=raised("HTTPStatusError: 502 Bad Gateway")), ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "notify", "bmw_connected_drive_ix_xdrive50", diff --git a/tests/components/bmw_connected_drive/test_number.py b/tests/components/bmw_connected_drive/test_number.py index f2a50ce4df6..733f4fe3113 100644 --- a/tests/components/bmw_connected_drive/test_number.py +++ b/tests/components/bmw_connected_drive/test_number.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -89,7 +94,10 @@ async def test_service_call_invalid_input( old_value = hass.states.get(entity_id).state # Test - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="Target SoC must be an integer between 20 and 100 that is a multiple of 5.", + ): await hass.services.async_call( "number", "set_value", @@ -102,17 +110,32 @@ async def test_service_call_invalid_input( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ValueError, ValueError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + ValueError( + "Target SoC must be an integer between 20 and 100 that is a multiple of 5." + ), + ValueError, + "Target SoC must be an integer between 20 and 100 that is a multiple of 5.", + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -130,7 +153,7 @@ async def test_service_call_fail( ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "number", "set_value", diff --git a/tests/components/bmw_connected_drive/test_select.py b/tests/components/bmw_connected_drive/test_select.py index a270f38ee01..53c39f572f2 100644 --- a/tests/components/bmw_connected_drive/test_select.py +++ b/tests/components/bmw_connected_drive/test_select.py @@ -16,7 +16,12 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.translation import async_get_translations -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -105,7 +110,10 @@ async def test_service_call_invalid_input( old_value = hass.states.get(entity_id).state # Test - with pytest.raises(ServiceValidationError): + with pytest.raises( + ServiceValidationError, + match=f"Option {value} is not valid for entity {entity_id}", + ): await hass.services.async_call( "select", "select_option", @@ -118,17 +126,32 @@ async def test_service_call_invalid_input( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ServiceValidationError, ServiceValidationError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + ServiceValidationError( + "Option 17 is not valid for entity select.i4_edrive40_ac_charging_limit" + ), + ServiceValidationError, + "Option 17 is not valid for entity select.i4_edrive40_ac_charging_limit", + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -146,7 +169,7 @@ async def test_service_call_fail( ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "select", "select_option", diff --git a/tests/components/bmw_connected_drive/test_switch.py b/tests/components/bmw_connected_drive/test_switch.py index 58bddbfc937..c28b651abaf 100644 --- a/tests/components/bmw_connected_drive/test_switch.py +++ b/tests/components/bmw_connected_drive/test_switch.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -75,17 +80,25 @@ async def test_service_call_success( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ValueError, ValueError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -107,7 +120,7 @@ async def test_service_call_fail( assert hass.states.get(entity_id).state == old_value # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "switch", "turn_on", @@ -122,7 +135,7 @@ async def test_service_call_fail( assert hass.states.get(entity_id).state == old_value # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "switch", "turn_off", diff --git a/tests/components/bond/test_button.py b/tests/components/bond/test_button.py index 8c8f38db72b..c14bba0d01f 100644 --- a/tests/components/bond/test_button.py +++ b/tests/components/bond/test_button.py @@ -57,6 +57,15 @@ def light(name: str): } +def motorized_shade(name: str): + """Create a motorized shade with a given name.""" + return { + "name": name, + "type": DeviceType.MOTORIZED_SHADES, + "actions": [Action.OPEN, Action.OPEN_NEXT, Action.CLOSE, Action.CLOSE_NEXT], + } + + async def test_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -180,3 +189,38 @@ async def test_press_button(hass: HomeAssistant) -> None: mock_action.assert_called_once_with( "test-device-id", Action(Action.START_DECREASING_BRIGHTNESS) ) + + +async def test_motorized_shade_actions(hass: HomeAssistant) -> None: + """Tests motorized shade open next and close next actions.""" + await setup_platform( + hass, + BUTTON_DOMAIN, + motorized_shade("name-1"), + bond_device_id="test-device-id", + ) + + assert hass.states.get("button.name_1_open_next") + assert hass.states.get("button.name_1_close_next") + + with patch_bond_action() as mock_action, patch_bond_device_state(): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.name_1_open_next"}, + blocking=True, + ) + await hass.async_block_till_done() + + mock_action.assert_called_once_with("test-device-id", Action(Action.OPEN_NEXT)) + + with patch_bond_action() as mock_action, patch_bond_device_state(): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.name_1_close_next"}, + blocking=True, + ) + await hass.async_block_till_done() + + mock_action.assert_called_once_with("test-device-id", Action(Action.CLOSE_NEXT)) diff --git a/tests/components/bond/test_cover.py b/tests/components/bond/test_cover.py index e438a830eb5..4dc8256be48 100644 --- a/tests/components/bond/test_cover.py +++ b/tests/components/bond/test_cover.py @@ -8,7 +8,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, - STATE_CLOSED, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -19,7 +19,6 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_OPEN, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -224,7 +223,7 @@ async def test_tilt_and_open(hass: HomeAssistant) -> None: await hass.async_block_till_done() mock_open.assert_called_once_with("test-device-id", Action.tilt_open()) - assert hass.states.get("cover.name_1").state == STATE_CLOSED + assert hass.states.get("cover.name_1").state == CoverState.CLOSED async def test_update_reports_open_cover(hass: HomeAssistant) -> None: @@ -280,7 +279,7 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(0)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN assert entity_state.attributes[ATTR_CURRENT_POSITION] == 100 with ( @@ -298,7 +297,7 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(100)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == STATE_CLOSED + assert entity_state.state == CoverState.CLOSED assert entity_state.attributes[ATTR_CURRENT_POSITION] == 0 with ( @@ -316,5 +315,5 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(40)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN assert entity_state.attributes[ATTR_CURRENT_POSITION] == 60 diff --git a/tests/components/bosch_shc/test_config_flow.py b/tests/components/bosch_shc/test_config_flow.py index 2c43ec0a370..63f7169b026 100644 --- a/tests/components/bosch_shc/test_config_flow.py +++ b/tests/components/bosch_shc/test_config_flow.py @@ -99,8 +99,8 @@ async def test_form_user(hass: HomeAssistant) -> None: assert result3["title"] == "shc012345" assert result3["data"] == { "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, CONF_SHC_KEY), + "ssl_certificate": hass.config.path(DOMAIN, "test-mac", CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, "test-mac", CONF_SHC_KEY), "token": "abc:123", "hostname": "123", } @@ -549,8 +549,8 @@ async def test_zeroconf(hass: HomeAssistant) -> None: assert result3["title"] == "shc012345" assert result3["data"] == { "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, CONF_SHC_KEY), + "ssl_certificate": hass.config.path(DOMAIN, "test-mac", CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, "test-mac", CONF_SHC_KEY), "token": "abc:123", "hostname": "123", } @@ -646,11 +646,7 @@ async def test_reauth(hass: HomeAssistant) -> None: title="shc012345", ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -712,6 +708,7 @@ async def test_reauth(hass: HomeAssistant) -> None: async def test_tls_assets_writer(hass: HomeAssistant) -> None: """Test we write tls assets to correct location.""" + unique_id = "test-mac" assets = { "token": "abc:123", "cert": b"content_cert", @@ -723,14 +720,163 @@ async def test_tls_assets_writer(hass: HomeAssistant) -> None: "homeassistant.components.bosch_shc.config_flow.open", mock_open() ) as mocked_file, ): - write_tls_asset(hass, CONF_SHC_CERT, assets["cert"]) + write_tls_asset(hass, unique_id, CONF_SHC_CERT, assets["cert"]) mocked_file.assert_called_with( - hass.config.path(DOMAIN, CONF_SHC_CERT), "w", encoding="utf8" + hass.config.path(DOMAIN, unique_id, CONF_SHC_CERT), "w", encoding="utf8" ) mocked_file().write.assert_called_with("content_cert") - write_tls_asset(hass, CONF_SHC_KEY, assets["key"]) + write_tls_asset(hass, unique_id, CONF_SHC_KEY, assets["key"]) mocked_file.assert_called_with( - hass.config.path(DOMAIN, CONF_SHC_KEY), "w", encoding="utf8" + hass.config.path(DOMAIN, unique_id, CONF_SHC_KEY), "w", encoding="utf8" ) mocked_file().write.assert_called_with("content_key") + + +@pytest.mark.usefixtures("mock_zeroconf") +async def test_register_multiple_controllers(hass: HomeAssistant) -> None: + """Test register multiple controllers. + + Each registered controller must get its own key/certificate pair, + which must not get overwritten when a new controller is added. + """ + + controller_1 = { + "hostname": "shc111111", + "mac": "test-mac1", + "host": "1.1.1.1", + "register": { + "token": "abc:shc111111", + "cert": b"content_cert1", + "key": b"content_key1", + }, + } + controller_2 = { + "hostname": "shc222222", + "mac": "test-mac2", + "host": "2.2.2.2", + "register": { + "token": "abc:shc222222", + "cert": b"content_cert2", + "key": b"content_key2", + }, + } + + # Set up controller 1 + ctrl_1_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with ( + patch( + "boschshcpy.session.SHCSession.mdns_info", + return_value=SHCInformation, + ), + patch( + "boschshcpy.information.SHCInformation.name", + new_callable=PropertyMock, + return_value=controller_1["hostname"], + ), + patch( + "boschshcpy.information.SHCInformation.unique_id", + new_callable=PropertyMock, + return_value=controller_1["mac"], + ), + ): + ctrl_1_result2 = await hass.config_entries.flow.async_configure( + ctrl_1_result["flow_id"], + {"host": controller_1["host"]}, + ) + + with ( + patch( + "boschshcpy.register_client.SHCRegisterClient.register", + return_value=controller_1["register"], + ), + patch("os.mkdir"), + patch("homeassistant.components.bosch_shc.config_flow.open"), + patch("boschshcpy.session.SHCSession.authenticate"), + patch( + "homeassistant.components.bosch_shc.async_setup_entry", + return_value=True, + ), + ): + ctrl_1_result3 = await hass.config_entries.flow.async_configure( + ctrl_1_result2["flow_id"], + {"password": "test"}, + ) + await hass.async_block_till_done() + + assert ctrl_1_result3["type"] is FlowResultType.CREATE_ENTRY + assert ctrl_1_result3["title"] == "shc111111" + assert ctrl_1_result3["context"]["unique_id"] == controller_1["mac"] + assert ctrl_1_result3["data"] == { + "host": "1.1.1.1", + "ssl_certificate": hass.config.path(DOMAIN, controller_1["mac"], CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, controller_1["mac"], CONF_SHC_KEY), + "token": "abc:shc111111", + "hostname": "shc111111", + } + + # Set up controller 2 + ctrl_2_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with ( + patch( + "boschshcpy.session.SHCSession.mdns_info", + return_value=SHCInformation, + ), + patch( + "boschshcpy.information.SHCInformation.name", + new_callable=PropertyMock, + return_value=controller_2["hostname"], + ), + patch( + "boschshcpy.information.SHCInformation.unique_id", + new_callable=PropertyMock, + return_value=controller_2["mac"], + ), + ): + ctrl_2_result2 = await hass.config_entries.flow.async_configure( + ctrl_2_result["flow_id"], + {"host": controller_2["host"]}, + ) + + with ( + patch( + "boschshcpy.register_client.SHCRegisterClient.register", + return_value=controller_2["register"], + ), + patch("os.mkdir"), + patch("homeassistant.components.bosch_shc.config_flow.open"), + patch("boschshcpy.session.SHCSession.authenticate"), + patch( + "homeassistant.components.bosch_shc.async_setup_entry", + return_value=True, + ), + ): + ctrl_2_result3 = await hass.config_entries.flow.async_configure( + ctrl_2_result2["flow_id"], + {"password": "test"}, + ) + await hass.async_block_till_done() + + assert ctrl_2_result3["type"] is FlowResultType.CREATE_ENTRY + assert ctrl_2_result3["title"] == "shc222222" + assert ctrl_2_result3["context"]["unique_id"] == controller_2["mac"] + assert ctrl_2_result3["data"] == { + "host": "2.2.2.2", + "ssl_certificate": hass.config.path(DOMAIN, controller_2["mac"], CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, controller_2["mac"], CONF_SHC_KEY), + "token": "abc:shc222222", + "hostname": "shc222222", + } + + # Check that each controller has its own key/certificate pair + assert ( + ctrl_1_result3["data"]["ssl_certificate"] + != ctrl_2_result3["data"]["ssl_certificate"] + ) + assert ctrl_1_result3["data"]["ssl_key"] != ctrl_2_result3["data"]["ssl_key"] diff --git a/tests/components/braviatv/snapshots/test_diagnostics.ambr b/tests/components/braviatv/snapshots/test_diagnostics.ambr index 2fd515b24e5..de76c00cd23 100644 --- a/tests/components/braviatv/snapshots/test_diagnostics.ambr +++ b/tests/components/braviatv/snapshots/test_diagnostics.ambr @@ -9,6 +9,8 @@ 'use_psk': True, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'braviatv', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, @@ -17,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/braviatv/test_config_flow.py b/tests/components/braviatv/test_config_flow.py index 6fc02dbd36f..7a4f93f7f16 100644 --- a/tests/components/braviatv/test_config_flow.py +++ b/tests/components/braviatv/test_config_flow.py @@ -17,7 +17,7 @@ from homeassistant.components.braviatv.const import ( DOMAIN, NICKNAME_PREFIX, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_HOST, CONF_MAC, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -405,6 +405,9 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: title="TV-Model", ) config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "authorize" with ( patch("pybravia.BraviaClient.connect"), @@ -421,15 +424,6 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: return_value={}, ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=config_entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "authorize" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_USE_PSK: use_psk} ) diff --git a/tests/components/bring/conftest.py b/tests/components/bring/conftest.py index 60c13a1c208..62aa38d4e92 100644 --- a/tests/components/bring/conftest.py +++ b/tests/components/bring/conftest.py @@ -46,6 +46,9 @@ def mock_bring_client() -> Generator[AsyncMock]: client.login.return_value = cast(BringAuthResponse, {"name": "Bring"}) client.load_lists.return_value = load_json_object_fixture("lists.json", DOMAIN) client.get_list.return_value = load_json_object_fixture("items.json", DOMAIN) + client.get_all_user_settings.return_value = load_json_object_fixture( + "usersettings.json", DOMAIN + ) yield client diff --git a/tests/components/bring/fixtures/items.json b/tests/components/bring/fixtures/items.json index 43e05a39fbb..e0b9006167b 100644 --- a/tests/components/bring/fixtures/items.json +++ b/tests/components/bring/fixtures/items.json @@ -6,13 +6,31 @@ "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", "itemId": "Paprika", "specification": "Rot", - "attributes": [] + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] }, { "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", "itemId": "Pouletbrüstli", "specification": "Bio", - "attributes": [] + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] } ], "recently": [ diff --git a/tests/components/bring/fixtures/items_invitation.json b/tests/components/bring/fixtures/items_invitation.json new file mode 100644 index 00000000000..82ef623e439 --- /dev/null +++ b/tests/components/bring/fixtures/items_invitation.json @@ -0,0 +1,44 @@ +{ + "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", + "status": "INVITATION", + "purchase": [ + { + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "itemId": "Paprika", + "specification": "Rot", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + }, + { + "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", + "itemId": "Pouletbrüstli", + "specification": "Bio", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + } + ], + "recently": [ + { + "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", + "itemId": "Ananas", + "specification": "", + "attributes": [] + } + ] +} diff --git a/tests/components/bring/fixtures/items_shared.json b/tests/components/bring/fixtures/items_shared.json new file mode 100644 index 00000000000..9ac999729d3 --- /dev/null +++ b/tests/components/bring/fixtures/items_shared.json @@ -0,0 +1,44 @@ +{ + "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", + "status": "SHARED", + "purchase": [ + { + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "itemId": "Paprika", + "specification": "Rot", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + }, + { + "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", + "itemId": "Pouletbrüstli", + "specification": "Bio", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + } + ], + "recently": [ + { + "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", + "itemId": "Ananas", + "specification": "", + "attributes": [] + } + ] +} diff --git a/tests/components/bring/fixtures/usersettings.json b/tests/components/bring/fixtures/usersettings.json new file mode 100644 index 00000000000..6c93cdc7d83 --- /dev/null +++ b/tests/components/bring/fixtures/usersettings.json @@ -0,0 +1,60 @@ +{ + "userlistsettings": [ + { + "listUuid": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + "usersettings": [ + { + "key": "listSectionOrder", + "value": "[\"Früchte & Gemüse\",\"Brot & Gebäck\",\"Milch & Käse\",\"Fleisch & Fisch\",\"Zutaten & Gewürze\",\"Fertig- & Tiefkühlprodukte\",\"Getreideprodukte\",\"Snacks & Süsswaren\",\"Getränke & Tabak\",\"Haushalt & Gesundheit\",\"Pflege & Gesundheit\",\"Tierbedarf\",\"Baumarkt & Garten\",\"Eigene Artikel\"]" + }, + { + "key": "listArticleLanguage", + "value": "de-DE" + } + ] + }, + { + "listUuid": "b4776778-7f6c-496e-951b-92a35d3db0dd", + "usersettings": [ + { + "key": "listSectionOrder", + "value": "[\"Früchte & Gemüse\",\"Brot & Gebäck\",\"Milch & Käse\",\"Fleisch & Fisch\",\"Zutaten & Gewürze\",\"Fertig- & Tiefkühlprodukte\",\"Getreideprodukte\",\"Snacks & Süsswaren\",\"Getränke & Tabak\",\"Haushalt & Gesundheit\",\"Pflege & Gesundheit\",\"Tierbedarf\",\"Baumarkt & Garten\",\"Eigene Artikel\"]" + }, + { + "key": "listArticleLanguage", + "value": "en-US" + } + ] + } + ], + "usersettings": [ + { + "key": "autoPush", + "value": "ON" + }, + { + "key": "premiumHideOffersBadge", + "value": "ON" + }, + { + "key": "premiumHideSponsoredCategories", + "value": "ON" + }, + { + "key": "premiumHideInspirationsBadge", + "value": "ON" + }, + { + "key": "onboardClient", + "value": "android" + }, + { + "key": "premiumHideOffersOnMain", + "value": "ON" + }, + { + "key": "defaultListUUID", + "value": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5" + } + ] +} diff --git a/tests/components/bring/snapshots/test_diagnostics.ambr b/tests/components/bring/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..6d830a12133 --- /dev/null +++ b/tests/components/bring/snapshots/test_diagnostics.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'b4776778-7f6c-496e-951b-92a35d3db0dd': dict({ + 'listUuid': 'b4776778-7f6c-496e-951b-92a35d3db0dd', + 'name': 'Baumarkt', + 'purchase': list([ + dict({ + 'attributes': list([ + dict({ + 'content': dict({ + 'convenient': True, + 'discounted': True, + 'urgent': True, + }), + 'type': 'PURCHASE_CONDITIONS', + }), + ]), + 'itemId': 'Paprika', + 'specification': 'Rot', + 'uuid': 'b5d0790b-5f32-4d5c-91da-e29066f167de', + }), + dict({ + 'attributes': list([ + dict({ + 'content': dict({ + 'convenient': True, + 'discounted': True, + 'urgent': True, + }), + 'type': 'PURCHASE_CONDITIONS', + }), + ]), + 'itemId': 'Pouletbrüstli', + 'specification': 'Bio', + 'uuid': '72d370ab-d8ca-4e41-b956-91df94795b4e', + }), + ]), + 'recently': list([ + dict({ + 'attributes': list([ + ]), + 'itemId': 'Ananas', + 'specification': '', + 'uuid': 'fc8db30a-647e-4e6c-9d71-3b85d6a2d954', + }), + ]), + 'status': 'REGISTERED', + 'theme': 'ch.publisheria.bring.theme.home', + 'uuid': '77a151f8-77c4-47a3-8295-c750a0e69d4f', + }), + 'e542eef6-dba7-4c31-a52c-29e6ab9d83a5': dict({ + 'listUuid': 'e542eef6-dba7-4c31-a52c-29e6ab9d83a5', + 'name': 'Einkauf', + 'purchase': list([ + dict({ + 'attributes': list([ + dict({ + 'content': dict({ + 'convenient': True, + 'discounted': True, + 'urgent': True, + }), + 'type': 'PURCHASE_CONDITIONS', + }), + ]), + 'itemId': 'Paprika', + 'specification': 'Rot', + 'uuid': 'b5d0790b-5f32-4d5c-91da-e29066f167de', + }), + dict({ + 'attributes': list([ + dict({ + 'content': dict({ + 'convenient': True, + 'discounted': True, + 'urgent': True, + }), + 'type': 'PURCHASE_CONDITIONS', + }), + ]), + 'itemId': 'Pouletbrüstli', + 'specification': 'Bio', + 'uuid': '72d370ab-d8ca-4e41-b956-91df94795b4e', + }), + ]), + 'recently': list([ + dict({ + 'attributes': list([ + ]), + 'itemId': 'Ananas', + 'specification': '', + 'uuid': 'fc8db30a-647e-4e6c-9d71-3b85d6a2d954', + }), + ]), + 'status': 'REGISTERED', + 'theme': 'ch.publisheria.bring.theme.home', + 'uuid': '77a151f8-77c4-47a3-8295-c750a0e69d4f', + }), + }) +# --- diff --git a/tests/components/bring/snapshots/test_sensor.ambr b/tests/components/bring/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..97e1d1b4bd9 --- /dev/null +++ b/tests/components/bring/snapshots/test_sensor.ambr @@ -0,0 +1,583 @@ +# serializer version: 1 +# name: test_setup[sensor.baumarkt_discount_only-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.baumarkt_discount_only', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Discount only', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_discounted', + 'unit_of_measurement': 'items', + }) +# --- +# name: test_setup[sensor.baumarkt_discount_only-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Baumarkt Discount only', + 'unit_of_measurement': 'items', + }), + 'context': , + 'entity_id': 'sensor.baumarkt_discount_only', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_setup[sensor.baumarkt_list_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.baumarkt_list_access', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'List access', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_list_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup[sensor.baumarkt_list_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Baumarkt List access', + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'context': , + 'entity_id': 'sensor.baumarkt_list_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'registered', + }) +# --- +# name: test_setup[sensor.baumarkt_on_occasion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.baumarkt_on_occasion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On occasion', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_convenient', + 'unit_of_measurement': 'items', + }) +# --- +# name: test_setup[sensor.baumarkt_on_occasion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Baumarkt On occasion', + 'unit_of_measurement': 'items', + }), + 'context': , + 'entity_id': 'sensor.baumarkt_on_occasion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_setup[sensor.baumarkt_region_language-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'de-at', + 'de-ch', + 'de-de', + 'en-au', + 'en-ca', + 'en-gb', + 'en-us', + 'es-es', + 'fr-ch', + 'fr-fr', + 'hu-hu', + 'it-ch', + 'it-it', + 'nb-no', + 'nl-nl', + 'pl-pl', + 'pt-br', + 'ru-ru', + 'sv-se', + 'tr-tr', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.baumarkt_region_language', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Region & language', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_list_language', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup[sensor.baumarkt_region_language-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Baumarkt Region & language', + 'options': list([ + 'de-at', + 'de-ch', + 'de-de', + 'en-au', + 'en-ca', + 'en-gb', + 'en-us', + 'es-es', + 'fr-ch', + 'fr-fr', + 'hu-hu', + 'it-ch', + 'it-it', + 'nb-no', + 'nl-nl', + 'pl-pl', + 'pt-br', + 'ru-ru', + 'sv-se', + 'tr-tr', + ]), + }), + 'context': , + 'entity_id': 'sensor.baumarkt_region_language', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'en-us', + }) +# --- +# name: test_setup[sensor.baumarkt_urgent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.baumarkt_urgent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Urgent', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_urgent', + 'unit_of_measurement': 'items', + }) +# --- +# name: test_setup[sensor.baumarkt_urgent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Baumarkt Urgent', + 'unit_of_measurement': 'items', + }), + 'context': , + 'entity_id': 'sensor.baumarkt_urgent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_setup[sensor.einkauf_discount_only-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.einkauf_discount_only', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Discount only', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_discounted', + 'unit_of_measurement': 'items', + }) +# --- +# name: test_setup[sensor.einkauf_discount_only-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Einkauf Discount only', + 'unit_of_measurement': 'items', + }), + 'context': , + 'entity_id': 'sensor.einkauf_discount_only', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_setup[sensor.einkauf_list_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.einkauf_list_access', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'List access', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_list_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup[sensor.einkauf_list_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Einkauf List access', + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'context': , + 'entity_id': 'sensor.einkauf_list_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'registered', + }) +# --- +# name: test_setup[sensor.einkauf_on_occasion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.einkauf_on_occasion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On occasion', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_convenient', + 'unit_of_measurement': 'items', + }) +# --- +# name: test_setup[sensor.einkauf_on_occasion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Einkauf On occasion', + 'unit_of_measurement': 'items', + }), + 'context': , + 'entity_id': 'sensor.einkauf_on_occasion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_setup[sensor.einkauf_region_language-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'de-at', + 'de-ch', + 'de-de', + 'en-au', + 'en-ca', + 'en-gb', + 'en-us', + 'es-es', + 'fr-ch', + 'fr-fr', + 'hu-hu', + 'it-ch', + 'it-it', + 'nb-no', + 'nl-nl', + 'pl-pl', + 'pt-br', + 'ru-ru', + 'sv-se', + 'tr-tr', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.einkauf_region_language', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Region & language', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_list_language', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup[sensor.einkauf_region_language-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Einkauf Region & language', + 'options': list([ + 'de-at', + 'de-ch', + 'de-de', + 'en-au', + 'en-ca', + 'en-gb', + 'en-us', + 'es-es', + 'fr-ch', + 'fr-fr', + 'hu-hu', + 'it-ch', + 'it-it', + 'nb-no', + 'nl-nl', + 'pl-pl', + 'pt-br', + 'ru-ru', + 'sv-se', + 'tr-tr', + ]), + }), + 'context': , + 'entity_id': 'sensor.einkauf_region_language', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'de-de', + }) +# --- +# name: test_setup[sensor.einkauf_urgent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.einkauf_urgent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Urgent', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_urgent', + 'unit_of_measurement': 'items', + }) +# --- +# name: test_setup[sensor.einkauf_urgent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Einkauf Urgent', + 'unit_of_measurement': 'items', + }), + 'context': , + 'entity_id': 'sensor.einkauf_urgent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/bring/snapshots/test_todo.ambr b/tests/components/bring/snapshots/test_todo.ambr index 6a24b4148b7..6a7104727a1 100644 --- a/tests/components/bring/snapshots/test_todo.ambr +++ b/tests/components/bring/snapshots/test_todo.ambr @@ -23,7 +23,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Baumarkt', + 'original_name': None, 'platform': 'bring', 'previous_unique_id': None, 'supported_features': , @@ -70,7 +70,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Einkauf', + 'original_name': None, 'platform': 'bring', 'previous_unique_id': None, 'supported_features': , diff --git a/tests/components/bring/test_config_flow.py b/tests/components/bring/test_config_flow.py index d307e0ccbbe..93e86051a75 100644 --- a/tests/components/bring/test_config_flow.py +++ b/tests/components/bring/test_config_flow.py @@ -10,7 +10,7 @@ from bring_api.exceptions import ( import pytest from homeassistant.components.bring.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,15 +123,7 @@ async def test_flow_reauth( bring_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": bring_config_entry.entry_id, - "unique_id": bring_config_entry.unique_id, - }, - ) - + result = await bring_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -171,15 +163,7 @@ async def test_flow_reauth_error_and_recover( bring_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": bring_config_entry.entry_id, - "unique_id": bring_config_entry.unique_id, - }, - ) - + result = await bring_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -204,3 +188,29 @@ async def test_flow_reauth_error_and_recover( assert result["reason"] == "reauth_successful" assert len(hass.config_entries.async_entries()) == 1 + + +async def test_flow_reauth_unique_id_mismatch( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test we abort reauth if unique id mismatch.""" + + mock_bring_client.uuid = "11111111-11111111-11111111-11111111" + + bring_config_entry.add_to_hass(hass) + + result = await bring_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" diff --git a/tests/components/bring/test_diagnostics.py b/tests/components/bring/test_diagnostics.py new file mode 100644 index 00000000000..a86de5a0d2d --- /dev/null +++ b/tests/components/bring/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Test for diagnostics platform of the Bring! integration.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("mock_bring_client") +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + bring_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, bring_config_entry) + == snapshot + ) diff --git a/tests/components/bring/test_init.py b/tests/components/bring/test_init.py index 613b65e38b6..5ee66999ea4 100644 --- a/tests/components/bring/test_init.py +++ b/tests/components/bring/test_init.py @@ -90,7 +90,14 @@ async def test_init_exceptions( @pytest.mark.parametrize("exception", [BringRequestException, BringParseException]) -@pytest.mark.parametrize("bring_method", ["load_lists", "get_list"]) +@pytest.mark.parametrize( + "bring_method", + [ + "load_lists", + "get_list", + "get_all_user_settings", + ], +) async def test_config_entry_not_ready( hass: HomeAssistant, bring_config_entry: MockConfigEntry, diff --git a/tests/components/bring/test_sensor.py b/tests/components/bring/test_sensor.py new file mode 100644 index 00000000000..974818ccedf --- /dev/null +++ b/tests/components/bring/test_sensor.py @@ -0,0 +1,76 @@ +"""Test for sensor platform of the Bring! integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.bring.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform + + +@pytest.fixture(autouse=True) +def sensor_only() -> Generator[None]: + """Enable only the sensor platform.""" + with patch( + "homeassistant.components.bring.PLATFORMS", + [Platform.SENSOR], + ): + yield + + +@pytest.mark.usefixtures("mock_bring_client") +async def test_setup( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Snapshot test states of sensor platform.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform( + hass, entity_registry, snapshot, bring_config_entry.entry_id + ) + + +@pytest.mark.parametrize( + ("fixture", "entity_state"), + [ + ("items_invitation", "invitation"), + ("items_shared", "shared"), + ("items", "registered"), + ], +) +async def test_list_access_states( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, + fixture: str, + entity_state: str, +) -> None: + """Snapshot test states of list access sensor.""" + + mock_bring_client.get_list.return_value = load_json_object_fixture( + f"{fixture}.json", DOMAIN + ) + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + assert (state := hass.states.get("sensor.einkauf_list_access")) + assert state.state == entity_state diff --git a/tests/components/bring/test_todo.py b/tests/components/bring/test_todo.py index d67429e8f49..9cc4ae3d888 100644 --- a/tests/components/bring/test_todo.py +++ b/tests/components/bring/test_todo.py @@ -1,7 +1,8 @@ """Test for todo platform of the Bring! integration.""" +from collections.abc import Generator import re -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from bring_api import BringItemOperation, BringRequestException import pytest @@ -15,7 +16,7 @@ from homeassistant.components.todo import ( TodoServices, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -23,6 +24,16 @@ from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +@pytest.fixture(autouse=True) +def todo_only() -> Generator[None]: + """Enable only the todo platform.""" + with patch( + "homeassistant.components.bring.PLATFORMS", + [Platform.TODO], + ): + yield + + @pytest.mark.usefixtures("mock_bring_client") async def test_todo( hass: HomeAssistant, diff --git a/tests/components/bring/test_util.py b/tests/components/bring/test_util.py new file mode 100644 index 00000000000..0d9ed0c5345 --- /dev/null +++ b/tests/components/bring/test_util.py @@ -0,0 +1,56 @@ +"""Test for utility functions of the Bring! integration.""" + +from typing import cast + +from bring_api import BringUserSettingsResponse +import pytest + +from homeassistant.components.bring import DOMAIN +from homeassistant.components.bring.coordinator import BringData +from homeassistant.components.bring.util import list_language, sum_attributes + +from tests.common import load_json_object_fixture + + +@pytest.mark.parametrize( + ("list_uuid", "expected"), + [ + ("e542eef6-dba7-4c31-a52c-29e6ab9d83a5", "de-DE"), + ("b4776778-7f6c-496e-951b-92a35d3db0dd", "en-US"), + ("00000000-0000-0000-0000-00000000", None), + ], +) +def test_list_language(list_uuid: str, expected: str | None) -> None: + """Test function list_language.""" + + result = list_language( + list_uuid, + cast( + BringUserSettingsResponse, + load_json_object_fixture("usersettings.json", DOMAIN), + ), + ) + + assert result == expected + + +@pytest.mark.parametrize( + ("attribute", "expected"), + [ + ("urgent", 2), + ("convenient", 2), + ("discounted", 2), + ], +) +def test_sum_attributes(attribute: str, expected: int) -> None: + """Test function sum_attributes.""" + + result = sum_attributes( + cast( + BringData, + load_json_object_fixture("items.json", DOMAIN), + ), + attribute, + ) + + assert result == expected diff --git a/tests/components/broadlink/test_config_flow.py b/tests/components/broadlink/test_config_flow.py index 2def8c0b3b9..f31cb380631 100644 --- a/tests/components/broadlink/test_config_flow.py +++ b/tests/components/broadlink/test_config_flow.py @@ -734,13 +734,9 @@ async def test_flow_reauth_works(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) - + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reset" @@ -770,12 +766,8 @@ async def test_flow_reauth_invalid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} - with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) device.mac = get_device("Office").mac mock_api = device.get_mock_api() @@ -804,12 +796,9 @@ async def test_flow_reauth_valid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) device.host = "192.168.1.128" mock_api = device.get_mock_api() diff --git a/tests/components/brother/snapshots/test_sensor.ambr b/tests/components/brother/snapshots/test_sensor.ambr index a27c5addd61..4de85859461 100644 --- a/tests/components/brother/snapshots/test_sensor.ambr +++ b/tests/components/brother/snapshots/test_sensor.ambr @@ -31,7 +31,7 @@ 'supported_features': 0, 'translation_key': 'bw_pages', 'unique_id': '0123456789_bw_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_b_w_pages-state] @@ -39,7 +39,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW B/W pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_b_w_pages', @@ -131,7 +131,7 @@ 'supported_features': 0, 'translation_key': 'black_drum_page_counter', 'unique_id': '0123456789_black_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_black_drum_page_counter-state] @@ -139,7 +139,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Black drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_black_drum_page_counter', @@ -231,7 +231,7 @@ 'supported_features': 0, 'translation_key': 'black_drum_remaining_pages', 'unique_id': '0123456789_black_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_pages-state] @@ -239,7 +239,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Black drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_pages', @@ -331,7 +331,7 @@ 'supported_features': 0, 'translation_key': 'color_pages', 'unique_id': '0123456789_color_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_color_pages-state] @@ -339,7 +339,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Color pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_color_pages', @@ -381,7 +381,7 @@ 'supported_features': 0, 'translation_key': 'cyan_drum_page_counter', 'unique_id': '0123456789_cyan_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_cyan_drum_page_counter-state] @@ -389,7 +389,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Cyan drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_cyan_drum_page_counter', @@ -481,7 +481,7 @@ 'supported_features': 0, 'translation_key': 'cyan_drum_remaining_pages', 'unique_id': '0123456789_cyan_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_pages-state] @@ -489,7 +489,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Cyan drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_pages', @@ -581,7 +581,7 @@ 'supported_features': 0, 'translation_key': 'drum_page_counter', 'unique_id': '0123456789_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_drum_page_counter-state] @@ -589,7 +589,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_drum_page_counter', @@ -681,7 +681,7 @@ 'supported_features': 0, 'translation_key': 'drum_remaining_pages', 'unique_id': '0123456789_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_drum_remaining_pages-state] @@ -689,7 +689,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_drum_remaining_pages', @@ -731,7 +731,7 @@ 'supported_features': 0, 'translation_key': 'duplex_unit_page_counter', 'unique_id': '0123456789_duplex_unit_pages_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_duplex_unit_page_counter-state] @@ -739,7 +739,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Duplex unit page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_duplex_unit_page_counter', @@ -878,7 +878,7 @@ 'supported_features': 0, 'translation_key': 'magenta_drum_page_counter', 'unique_id': '0123456789_magenta_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_magenta_drum_page_counter-state] @@ -886,7 +886,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Magenta drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_magenta_drum_page_counter', @@ -978,7 +978,7 @@ 'supported_features': 0, 'translation_key': 'magenta_drum_remaining_pages', 'unique_id': '0123456789_magenta_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_pages-state] @@ -986,7 +986,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Magenta drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_pages', @@ -1078,7 +1078,7 @@ 'supported_features': 0, 'translation_key': 'page_counter', 'unique_id': '0123456789_page_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_page_counter-state] @@ -1086,7 +1086,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_page_counter', @@ -1224,7 +1224,7 @@ 'supported_features': 0, 'translation_key': 'yellow_drum_page_counter', 'unique_id': '0123456789_yellow_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_yellow_drum_page_counter-state] @@ -1232,7 +1232,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Yellow drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_yellow_drum_page_counter', @@ -1324,7 +1324,7 @@ 'supported_features': 0, 'translation_key': 'yellow_drum_remaining_pages', 'unique_id': '0123456789_yellow_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_pages-state] @@ -1332,7 +1332,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Yellow drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_pages', diff --git a/tests/components/brother/test_config_flow.py b/tests/components/brother/test_config_flow.py index ac7af4cc912..929e2f083e9 100644 --- a/tests/components/brother/test_config_flow.py +++ b/tests/components/brother/test_config_flow.py @@ -8,11 +8,7 @@ import pytest from homeassistant.components import zeroconf from homeassistant.components.brother.const import DOMAIN -from homeassistant.config_entries import ( - SOURCE_RECONFIGURE, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -262,17 +258,10 @@ async def test_reconfigure_successful( """Test starting a reconfigure flow.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -305,17 +294,10 @@ async def test_reconfigure_not_successful( """Test starting a reconfigure flow but no connection found.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_brother_client.async_update.side_effect = exc @@ -325,7 +307,7 @@ async def test_reconfigure_not_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": base_error} mock_brother_client.async_update.side_effect = None @@ -351,17 +333,10 @@ async def test_reconfigure_invalid_hostname( """Test starting a reconfigure flow but no connection found.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -369,7 +344,7 @@ async def test_reconfigure_invalid_hostname( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {CONF_HOST: "wrong_host"} @@ -381,17 +356,10 @@ async def test_reconfigure_not_the_same_device( """Test starting the reconfiguration process, but with a different printer.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_brother_client.serial = "9876543210" @@ -401,5 +369,5 @@ async def test_reconfigure_not_the_same_device( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "another_device"} diff --git a/tests/components/brunt/test_config_flow.py b/tests/components/brunt/test_config_flow.py index 2796882a3c1..7a805a9ee52 100644 --- a/tests/components/brunt/test_config_flow.py +++ b/tests/components/brunt/test_config_flow.py @@ -110,15 +110,7 @@ async def test_reauth( unique_id="test-username", ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=None, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" with patch( diff --git a/tests/components/bryant_evolution/test_config_flow.py b/tests/components/bryant_evolution/test_config_flow.py index 39d203201eb..54fc7bfbfcc 100644 --- a/tests/components/bryant_evolution/test_config_flow.py +++ b/tests/components/bryant_evolution/test_config_flow.py @@ -134,13 +134,7 @@ async def test_reconfigure( """Test that reconfigure discovers additional systems and zones.""" # Reconfigure with additional systems and zones. - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_evolution_entry.entry_id, - }, - ) + result = await mock_evolution_entry.start_reconfigure_flow(hass) with ( patch.object( BryantEvolutionLocalClient, @@ -160,7 +154,7 @@ async def test_reconfigure( ) await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT, result - assert result["reason"] == "reconfigured" + assert result["reason"] == "reconfigure_successful" config_entry = hass.config_entries.async_entries()[0] assert config_entry.data[CONF_SYSTEM_ZONE] == [ (1, 1), diff --git a/tests/components/bsblan/__init__.py b/tests/components/bsblan/__init__.py index d233fa068ea..3892fcaaaca 100644 --- a/tests/components/bsblan/__init__.py +++ b/tests/components/bsblan/__init__.py @@ -1 +1,18 @@ """Tests for the bsblan integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_with_selected_platforms( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Set up the BSBLAN integration with the selected platforms.""" + config_entry.add_to_hass(hass) + with patch("homeassistant.components.bsblan.PLATFORMS", platforms): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 13d4017d7c8..7d2db2f8b46 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from bsblan import Device, Info, State, StaticState +from bsblan import Device, HotWaterState, Info, Sensor, State, StaticState import pytest from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN @@ -52,10 +52,17 @@ def mock_bsblan() -> Generator[MagicMock]: load_fixture("device.json", DOMAIN) ) bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) - bsblan.static_values.return_value = StaticState.from_json( load_fixture("static.json", DOMAIN) ) + bsblan.sensor.return_value = Sensor.from_json( + load_fixture("sensor.json", DOMAIN) + ) + bsblan.hot_water_state.return_value = HotWaterState.from_json( + load_fixture("dhw_state.json", DOMAIN) + ) + # mock get_temperature_unit property + bsblan.get_temperature_unit = "°C" yield bsblan diff --git a/tests/components/bsblan/fixtures/dhw_state.json b/tests/components/bsblan/fixtures/dhw_state.json new file mode 100644 index 00000000000..41b8c7beda5 --- /dev/null +++ b/tests/components/bsblan/fixtures/dhw_state.json @@ -0,0 +1,110 @@ +{ + "operating_mode": { + "name": "DHW operating mode", + "error": 0, + "value": "On", + "desc": "On", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "nominal_setpoint": { + "name": "DHW nominal setpoint", + "error": 0, + "value": "50.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "nominal_setpoint_max": { + "name": "DHW nominal setpoint maximum", + "error": 0, + "value": "65.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "reduced_setpoint": { + "name": "DHW reduced setpoint", + "error": 0, + "value": "40.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "release": { + "name": "DHW release programme", + "error": 0, + "value": "1", + "desc": "Released", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "legionella_function": { + "name": "Legionella function fixed weekday", + "error": 0, + "value": "0", + "desc": "Off", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "legionella_setpoint": { + "name": "Legionella function setpoint", + "error": 0, + "value": "60.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "legionella_periodicity": { + "name": "Legionella function periodicity", + "error": 0, + "value": "7", + "desc": "Weekly", + "dataType": 0, + "readonly": 0, + "unit": "days" + }, + "legionella_function_day": { + "name": "Legionella function day", + "error": 0, + "value": "6", + "desc": "Saturday", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "legionella_function_time": { + "name": "Legionella function time", + "error": 0, + "value": "12:00", + "desc": "", + "dataType": 2, + "readonly": 0, + "unit": "" + }, + "dhw_actual_value_top_temperature": { + "name": "DHW temperature actual value", + "error": 0, + "value": "48.5", + "desc": "", + "dataType": 0, + "readonly": 1, + "unit": "°C" + }, + "state_dhw_pump": { + "name": "State DHW circulation pump", + "error": 0, + "value": "0", + "desc": "Off", + "dataType": 1, + "readonly": 1, + "unit": "" + } +} diff --git a/tests/components/bsblan/fixtures/sensor.json b/tests/components/bsblan/fixtures/sensor.json new file mode 100644 index 00000000000..3448e7e98d8 --- /dev/null +++ b/tests/components/bsblan/fixtures/sensor.json @@ -0,0 +1,20 @@ +{ + "outside_temperature": { + "name": "Outside temp sensor local", + "error": 0, + "value": "6.1", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "current_temperature": { + "name": "Room temp 1 actual value", + "error": 0, + "value": "18.6", + "desc": "", + "dataType": 0, + "readonly": 1, + "unit": "°C" + } +} diff --git a/tests/components/bsblan/fixtures/state.json b/tests/components/bsblan/fixtures/state.json index 51d4cf2e136..8c458e173d4 100644 --- a/tests/components/bsblan/fixtures/state.json +++ b/tests/components/bsblan/fixtures/state.json @@ -97,5 +97,14 @@ "dataType": 1, "readonly": 1, "unit": "" + }, + "room1_temp_setpoint_boost": { + "name": "Room 1 Temp Setpoint Boost", + "error": 0, + "value": "22.5", + "desc": "Boost", + "dataType": 1, + "readonly": 1, + "unit": "°C" } } diff --git a/tests/components/bsblan/fixtures/static_F.json b/tests/components/bsblan/fixtures/static_F.json new file mode 100644 index 00000000000..a61e870f6e5 --- /dev/null +++ b/tests/components/bsblan/fixtures/static_F.json @@ -0,0 +1,20 @@ +{ + "min_temp": { + "name": "Room temp frost protection setpoint", + "error": 0, + "value": "8.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°F" + }, + "max_temp": { + "name": "Summer/winter changeover temp heat circuit 1", + "error": 0, + "value": "20.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°F" + } +} diff --git a/tests/components/bsblan/snapshots/test_climate.ambr b/tests/components/bsblan/snapshots/test_climate.ambr new file mode 100644 index 00000000000..16828fea752 --- /dev/null +++ b/tests/components/bsblan/snapshots/test_climate.ambr @@ -0,0 +1,147 @@ +# serializer version: 1 +# name: test_celsius_fahrenheit[climate.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_modes': list([ + 'eco', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90-climate', + 'unit_of_measurement': None, + }) +# --- +# name: test_celsius_fahrenheit[climate.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 18.6, + 'friendly_name': 'BSB-LAN', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'eco', + 'none', + ]), + 'supported_features': , + 'temperature': 18.5, + }), + 'context': , + 'entity_id': 'climate.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_entity_properties[climate.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_modes': list([ + 'eco', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90-climate', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_entity_properties[climate.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 18.6, + 'friendly_name': 'BSB-LAN', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'eco', + 'none', + ]), + 'supported_features': , + 'temperature': 18.5, + }), + 'context': , + 'entity_id': 'climate.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/bsblan/snapshots/test_diagnostics.ambr b/tests/components/bsblan/snapshots/test_diagnostics.ambr index b172d26c249..9fabd373205 100644 --- a/tests/components/bsblan/snapshots/test_diagnostics.ambr +++ b/tests/components/bsblan/snapshots/test_diagnostics.ambr @@ -1,6 +1,111 @@ # serializer version: 1 # name: test_diagnostics dict({ + 'coordinator_data': dict({ + 'sensor': dict({ + 'current_temperature': dict({ + 'data_type': 0, + 'desc': '', + 'error': 0, + 'name': 'Room temp 1 actual value', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, + 'unit': '°C', + 'value': 18.6, + }), + 'outside_temperature': dict({ + 'data_type': 0, + 'desc': '', + 'error': 0, + 'name': 'Outside temp sensor local', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, + 'unit': '°C', + 'value': 6.1, + }), + }), + 'state': dict({ + 'current_temperature': dict({ + 'data_type': 0, + 'desc': '', + 'error': 0, + 'name': 'Room temp 1 actual value', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, + 'unit': '°C', + 'value': 18.6, + }), + 'hvac_action': dict({ + 'data_type': 1, + 'desc': 'Raumtemp’begrenzung', + 'error': 0, + 'name': 'Status heating circuit 1', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, + 'unit': '', + 'value': 122, + }), + 'hvac_mode': dict({ + 'data_type': 1, + 'desc': 'Komfort', + 'error': 0, + 'name': 'Operating mode', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, + 'unit': '', + 'value': 'heat', + }), + 'hvac_mode2': dict({ + 'data_type': 1, + 'desc': 'Reduziert', + 'error': 0, + 'name': 'Operating mode', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, + 'unit': '', + 'value': 2, + }), + 'room1_temp_setpoint_boost': dict({ + 'data_type': 1, + 'desc': 'Boost', + 'error': 0, + 'name': 'Room 1 Temp Setpoint Boost', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, + 'unit': '°C', + 'value': '22.5', + }), + 'room1_thermostat_mode': dict({ + 'data_type': 1, + 'desc': 'Kein Bedarf', + 'error': 0, + 'name': 'Raumthermostat 1', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, + 'unit': '', + 'value': 0, + }), + 'target_temperature': dict({ + 'data_type': 0, + 'desc': '', + 'error': 0, + 'name': 'Room temperature Comfort setpoint', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, + 'unit': '°C', + 'value': 18.5, + }), + }), + }), 'device': dict({ 'MAC': '00:80:41:19:69:90', 'name': 'BSB-LAN', @@ -11,67 +116,59 @@ 'controller_family': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Device family', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '211', + 'value': 211, }), 'controller_variant': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Device variant', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '127', + 'value': 127, }), 'device_identification': dict({ 'data_type': 7, 'desc': '', + 'error': 0, 'name': 'Gerte-Identifikation', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', 'value': 'RVS21.831F/127', }), }), - 'state': dict({ - 'current_temperature': dict({ + 'static': dict({ + 'max_temp': dict({ 'data_type': 0, 'desc': '', - 'name': 'Room temp 1 actual value', + 'error': 0, + 'name': 'Summer/winter changeover temp heat circuit 1', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '18.6', + 'value': 20.0, }), - 'hvac_action': dict({ - 'data_type': 1, - 'desc': 'Raumtemp’begrenzung', - 'name': 'Status heating circuit 1', - 'unit': '', - 'value': '122', - }), - 'hvac_mode': dict({ - 'data_type': 1, - 'desc': 'Komfort', - 'name': 'Operating mode', - 'unit': '', - 'value': 'heat', - }), - 'hvac_mode2': dict({ - 'data_type': 1, - 'desc': 'Reduziert', - 'name': 'Operating mode', - 'unit': '', - 'value': '2', - }), - 'room1_thermostat_mode': dict({ - 'data_type': 1, - 'desc': 'Kein Bedarf', - 'name': 'Raumthermostat 1', - 'unit': '', - 'value': '0', - }), - 'target_temperature': dict({ + 'min_temp': dict({ 'data_type': 0, 'desc': '', - 'name': 'Room temperature Comfort setpoint', + 'error': 0, + 'name': 'Room temp frost protection setpoint', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '18.5', + 'value': 8.0, }), }), }) diff --git a/tests/components/bsblan/snapshots/test_sensor.ambr b/tests/components/bsblan/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..0146dd23b3d --- /dev/null +++ b/tests/components/bsblan/snapshots/test_sensor.ambr @@ -0,0 +1,103 @@ +# serializer version: 1 +# name: test_sensor_entity_properties[sensor.bsb_lan_current_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bsb_lan_current_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current Temperature', + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_temperature', + 'unique_id': '00:80:41:19:69:90-current_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_entity_properties[sensor.bsb_lan_current_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'BSB-LAN Current Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.bsb_lan_current_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18.6', + }) +# --- +# name: test_sensor_entity_properties[sensor.bsb_lan_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bsb_lan_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside Temperature', + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outside_temperature', + 'unique_id': '00:80:41:19:69:90-outside_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_entity_properties[sensor.bsb_lan_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'BSB-LAN Outside Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.bsb_lan_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.1', + }) +# --- diff --git a/tests/components/bsblan/snapshots/test_water_heater.ambr b/tests/components/bsblan/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000..c1a13b764c0 --- /dev/null +++ b/tests/components/bsblan/snapshots/test_water_heater.ambr @@ -0,0 +1,68 @@ +# serializer version: 1 +# name: test_water_heater_states[dhw_state.json][water_heater.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 65.0, + 'min_temp': 40.0, + 'operation_list': list([ + 'eco', + 'off', + 'on', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90', + 'unit_of_measurement': None, + }) +# --- +# name: test_water_heater_states[dhw_state.json][water_heater.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 48.5, + 'friendly_name': 'BSB-LAN', + 'max_temp': 65.0, + 'min_temp': 40.0, + 'operation_list': list([ + 'eco', + 'off', + 'on', + ]), + 'operation_mode': 'on', + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': 50.0, + }), + 'context': , + 'entity_id': 'water_heater.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/bsblan/test_climate.py b/tests/components/bsblan/test_climate.py new file mode 100644 index 00000000000..7ee12c5fa1a --- /dev/null +++ b/tests/components/bsblan/test_climate.py @@ -0,0 +1,276 @@ +"""Tests for the BSB-Lan climate platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from bsblan import BSBLANError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ATTR_PRESET_MODE, + DOMAIN as CLIMATE_DOMAIN, + PRESET_ECO, + PRESET_NONE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +ENTITY_ID = "climate.bsb_lan" + + +async def test_celsius_fahrenheit( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test Celsius and Fahrenheit temperature units.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_climate_entity_properties( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the climate entity properties.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Test target_temperature + mock_target_temp = MagicMock() + mock_target_temp.value = 23.5 + mock_bsblan.state.return_value.target_temperature = mock_target_temp + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes["temperature"] == 23.5 + + # Test hvac_mode + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = HVACMode.AUTO + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.state == HVACMode.AUTO + + # Test preset_mode + mock_hvac_mode.value = PRESET_ECO + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes["preset_mode"] == PRESET_ECO + + +@pytest.mark.parametrize( + "mode", + [HVACMode.HEAT, HVACMode.AUTO, HVACMode.OFF], +) +async def test_async_set_hvac_mode( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + mode: HVACMode, +) -> None: + """Test setting HVAC mode via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # Call the service to set HVAC mode + await hass.services.async_call( + domain=CLIMATE_DOMAIN, + service=SERVICE_SET_HVAC_MODE, + service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: mode}, + blocking=True, + ) + + # Assert that the thermostat method was called + mock_bsblan.thermostat.assert_called_once_with(hvac_mode=mode) + mock_bsblan.thermostat.reset_mock() + + +@pytest.mark.parametrize( + ("hvac_mode", "preset_mode"), + [ + (HVACMode.AUTO, PRESET_ECO), + (HVACMode.AUTO, PRESET_NONE), + ], +) +async def test_async_set_preset_mode_succes( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + hvac_mode: HVACMode, + preset_mode: str, +) -> None: + """Test setting preset mode via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # patch hvac_mode + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = hvac_mode + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + # Attempt to set the preset mode + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset_mode}, + blocking=True, + ) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("hvac_mode", "preset_mode"), + [ + ( + HVACMode.HEAT, + PRESET_ECO, + ) + ], +) +async def test_async_set_preset_mode_error( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + hvac_mode: HVACMode, + preset_mode: str, +) -> None: + """Test setting preset mode via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # patch hvac_mode + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = hvac_mode + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + # Attempt to set the preset mode + error_message = "Preset mode can only be set when HVAC mode is set to 'auto'" + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset_mode}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("target_temp"), + [ + (8.0), # Min temperature + (15.0), # Mid-range temperature + (20.0), # Max temperature + ], +) +async def test_async_set_temperature( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + target_temp: float, +) -> None: + """Test setting temperature via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + await hass.services.async_call( + domain=CLIMATE_DOMAIN, + service=SERVICE_SET_TEMPERATURE, + service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: target_temp}, + blocking=True, + ) + # Assert that the thermostat method was called with the correct temperature + mock_bsblan.thermostat.assert_called_once_with(target_temperature=target_temp) + + +async def test_async_set_data( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting data via service calls.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # Test setting temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 19}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once_with(target_temperature=19) + mock_bsblan.thermostat.reset_mock() + + # Test setting HVAC mode + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once_with(hvac_mode=HVACMode.HEAT) + mock_bsblan.thermostat.reset_mock() + + # Patch HVAC mode to AUTO + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = HVACMode.AUTO + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + # Test setting preset mode to ECO + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once_with(hvac_mode=PRESET_ECO) + mock_bsblan.thermostat.reset_mock() + + # Test setting preset mode to NONE + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_NONE}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once() + mock_bsblan.thermostat.reset_mock() + + # Test error handling + mock_bsblan.thermostat.side_effect = BSBLANError("Test error") + error_message = "An error occurred while updating the BSBLAN device" + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 20}, + blocking=True, + ) diff --git a/tests/components/bsblan/test_diagnostics.py b/tests/components/bsblan/test_diagnostics.py index 8939456c2ac..aea53f8a1a2 100644 --- a/tests/components/bsblan/test_diagnostics.py +++ b/tests/components/bsblan/test_diagnostics.py @@ -1,5 +1,7 @@ """Tests for the diagnostics data provided by the BSBLan integration.""" +from unittest.mock import AsyncMock + from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -11,11 +13,13 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, + mock_bsblan: AsyncMock, hass_client: ClientSessionGenerator, init_integration: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" + diagnostics_data = await get_diagnostics_for_config_entry( hass, hass_client, init_integration ) diff --git a/tests/components/bsblan/test_sensor.py b/tests/components/bsblan/test_sensor.py new file mode 100644 index 00000000000..c95671a1a6b --- /dev/null +++ b/tests/components/bsblan/test_sensor.py @@ -0,0 +1,30 @@ +"""Tests for the BSB-Lan sensor platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_CURRENT_TEMP = "sensor.bsb_lan_current_temperature" +ENTITY_OUTSIDE_TEMP = "sensor.bsb_lan_outside_temperature" + + +async def test_sensor_entity_properties( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the sensor entity properties.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.SENSOR]) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/bsblan/test_water_heater.py b/tests/components/bsblan/test_water_heater.py new file mode 100644 index 00000000000..ed920774aa5 --- /dev/null +++ b/tests/components/bsblan/test_water_heater.py @@ -0,0 +1,210 @@ +"""Tests for the BSB-Lan water heater platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from bsblan import BSBLANError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.water_heater import ( + ATTR_OPERATION_MODE, + DOMAIN as WATER_HEATER_DOMAIN, + SERVICE_SET_OPERATION_MODE, + SERVICE_SET_TEMPERATURE, + STATE_ECO, + STATE_OFF, + STATE_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +ENTITY_ID = "water_heater.bsb_lan" + + +@pytest.mark.parametrize( + ("dhw_file"), + [ + ("dhw_state.json"), + ], +) +async def test_water_heater_states( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + dhw_file: str, +) -> None: + """Test water heater states with different configurations.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_water_heater_entity_properties( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the water heater entity properties.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + state = hass.states.get(ENTITY_ID) + assert state is not None + + # Test when nominal setpoint is "10" + mock_setpoint = MagicMock() + mock_setpoint.value = 10 + mock_bsblan.hot_water_state.return_value.nominal_setpoint = mock_setpoint + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes.get("temperature") == 10 + + +@pytest.mark.parametrize( + ("mode", "bsblan_mode"), + [ + (STATE_ECO, "Eco"), + (STATE_OFF, "Off"), + (STATE_ON, "On"), + ], +) +async def test_set_operation_mode( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + mode: str, + bsblan_mode: str, +) -> None: + """Test setting operation mode.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_OPERATION_MODE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_OPERATION_MODE: mode, + }, + blocking=True, + ) + + mock_bsblan.set_hot_water.assert_called_once_with(operating_mode=bsblan_mode) + + +async def test_set_invalid_operation_mode( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting invalid operation mode.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + with pytest.raises( + HomeAssistantError, + match=r"Operation mode invalid_mode is not valid for water_heater\.bsb_lan\. Valid operation modes are: eco, off, on", + ): + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_OPERATION_MODE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_OPERATION_MODE: "invalid_mode", + }, + blocking=True, + ) + + +async def test_set_temperature( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting temperature.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_TEMPERATURE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TEMPERATURE: 50, + }, + blocking=True, + ) + + mock_bsblan.set_hot_water.assert_called_once_with(nominal_setpoint=50) + + +async def test_set_temperature_failure( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting temperature with API failure.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + mock_bsblan.set_hot_water.side_effect = BSBLANError("Test error") + + with pytest.raises( + HomeAssistantError, match="An error occurred while setting the temperature" + ): + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_TEMPERATURE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TEMPERATURE: 50, + }, + blocking=True, + ) + + +async def test_operation_mode_error( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test operation mode setting with API failure.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + mock_bsblan.set_hot_water.side_effect = BSBLANError("Test error") + + with pytest.raises( + HomeAssistantError, match="An error occurred while setting the operation mode" + ): + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_OPERATION_MODE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_OPERATION_MODE: STATE_ECO, + }, + blocking=True, + ) diff --git a/tests/components/bthome/test_config_flow.py b/tests/components/bthome/test_config_flow.py index acf490d341e..faf2f1c9ef5 100644 --- a/tests/components/bthome/test_config_flow.py +++ b/tests/components/bthome/test_config_flow.py @@ -563,16 +563,7 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - }, - data=entry.data | {"device": device}, - ) + result = await entry.start_reauth_flow(hass, data={"device": device}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/bthome/test_device_trigger.py b/tests/components/bthome/test_device_trigger.py index 459654826f9..c4c900ef6e1 100644 --- a/tests/components/bthome/test_device_trigger.py +++ b/tests/components/bthome/test_device_trigger.py @@ -1,10 +1,19 @@ """Test BTHome BLE events.""" +import pytest + from homeassistant.components import automation from homeassistant.components.bluetooth import DOMAIN as BLUETOOTH_DOMAIN from homeassistant.components.bthome.const import CONF_SUBTYPE, DOMAIN from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_DOMAIN, + CONF_PLATFORM, + CONF_TYPE, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -121,6 +130,117 @@ async def test_get_triggers_button( await hass.async_block_till_done() +async def test_get_triggers_multiple_buttons( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test that we get the expected triggers for multiple buttons device.""" + mac = "A4:C1:38:8D:18:B2" + entry = await _async_setup_bthome_device(hass, mac) + events = async_capture_events(hass, "bthome_ble_event") + + # Emit button_1 long press and button_2 press events + # so it creates the device in the registry + inject_bluetooth_service_info_bleak( + hass, + make_bthome_v2_adv(mac, b"\x40\x3a\x04\x3a\x01"), + ) + + # wait for the event + await hass.async_block_till_done() + assert len(events) == 2 + + device = device_registry.async_get_device(identifiers={get_device_id(mac)}) + assert device + expected_trigger1 = { + CONF_PLATFORM: "device", + CONF_DOMAIN: DOMAIN, + CONF_DEVICE_ID: device.id, + CONF_TYPE: "button_1", + CONF_SUBTYPE: "long_press", + "metadata": {}, + } + expected_trigger2 = { + CONF_PLATFORM: "device", + CONF_DOMAIN: DOMAIN, + CONF_DEVICE_ID: device.id, + CONF_TYPE: "button_2", + CONF_SUBTYPE: "press", + "metadata": {}, + } + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device.id + ) + assert expected_trigger1 in triggers + assert expected_trigger2 in triggers + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("event_class", "event_type", "expected"), + [ + ("button_1", "long_press", STATE_ON), + ("button_2", "press", STATE_ON), + ("button_3", "long_press", STATE_UNAVAILABLE), + ("button", "long_press", STATE_UNAVAILABLE), + ("button_1", "invalid_press", STATE_UNAVAILABLE), + ], +) +async def test_validate_trigger_config( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + event_class: str, + event_type: str, + expected: str, +) -> None: + """Test unsupported trigger does not return a trigger config.""" + mac = "A4:C1:38:8D:18:B2" + entry = await _async_setup_bthome_device(hass, mac) + + # Emit button_1 long press and button_2 press events + # so it creates the device in the registry + inject_bluetooth_service_info_bleak( + hass, + make_bthome_v2_adv(mac, b"\x40\x3a\x04\x3a\x01"), + ) + + # wait for the event + await hass.async_block_till_done() + + device = device_registry.async_get_device(identifiers={get_device_id(mac)}) + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: [ + { + "trigger": { + CONF_PLATFORM: "device", + CONF_DOMAIN: DOMAIN, + CONF_DEVICE_ID: device.id, + CONF_TYPE: event_class, + CONF_SUBTYPE: event_type, + }, + "action": { + "service": "test.automation", + "data_template": {"some": "test_trigger_button_long_press"}, + }, + }, + ] + }, + ) + await hass.async_block_till_done() + + automations = hass.states.async_entity_ids(automation.DOMAIN) + assert len(automations) == 1 + assert hass.states.get(automations[0]).state == expected + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + async def test_get_triggers_dimmer( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: @@ -235,7 +355,7 @@ async def test_if_fires_on_motion_detected( make_bthome_v2_adv(mac, b"\x40\x3a\x03"), ) - # # wait for the event + # wait for the event await hass.async_block_till_done() device = device_registry.async_get_device(identifiers={get_device_id(mac)}) diff --git a/tests/components/caldav/test_config_flow.py b/tests/components/caldav/test_config_flow.py index 0079e59a931..bf22fb0bd9c 100644 --- a/tests/components/caldav/test_config_flow.py +++ b/tests/components/caldav/test_config_flow.py @@ -106,13 +106,7 @@ async def test_reauth_success( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -147,13 +141,7 @@ async def test_reauth_failure( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/calendar/test_init.py b/tests/components/calendar/test_init.py index 4ad5e11b8e4..36b102b933a 100644 --- a/tests/components/calendar/test_init.py +++ b/tests/components/calendar/test_init.py @@ -14,7 +14,8 @@ import voluptuous as vol from homeassistant.components.calendar import DOMAIN, SERVICE_GET_EVENTS from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .conftest import MockCalendarEntity, MockConfigEntry @@ -214,8 +215,12 @@ async def test_unsupported_websocket( async def test_unsupported_create_event_service(hass: HomeAssistant) -> None: """Test unsupported service call.""" - - with pytest.raises(HomeAssistantError, match="does not support this service"): + await async_setup_component(hass, "homeassistant", {}) + with pytest.raises( + ServiceNotSupported, + match="Entity calendar.calendar_1 does not " + "support action calendar.create_event", + ): await hass.services.async_call( DOMAIN, "create_event", diff --git a/tests/components/cambridge_audio/__init__.py b/tests/components/cambridge_audio/__init__.py new file mode 100644 index 00000000000..4e11a728f41 --- /dev/null +++ b/tests/components/cambridge_audio/__init__.py @@ -0,0 +1,25 @@ +"""Tests for the Cambridge Audio integration.""" + +from unittest.mock import AsyncMock + +from aiostreammagic.models import CallbackType + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def mock_state_update( + client: AsyncMock, callback_type: CallbackType = CallbackType.STATE +) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, callback_type) diff --git a/tests/components/cambridge_audio/conftest.py b/tests/components/cambridge_audio/conftest.py new file mode 100644 index 00000000000..33a9ded70e3 --- /dev/null +++ b/tests/components/cambridge_audio/conftest.py @@ -0,0 +1,85 @@ +"""Cambridge Audio tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, Mock, patch + +from aiostreammagic.models import ( + AudioOutput, + Display, + Info, + NowPlaying, + PlayState, + PresetList, + Source, + State, + Update, +) +import pytest + +from homeassistant.components.cambridge_audio.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry, load_fixture, load_json_array_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.cambridge_audio.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_stream_magic_client() -> Generator[AsyncMock]: + """Mock an Cambridge Audio client.""" + with ( + patch( + "homeassistant.components.cambridge_audio.StreamMagicClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.cambridge_audio.config_flow.StreamMagicClient", + new=mock_client, + ), + ): + client = mock_client.return_value + client.host = "192.168.20.218" + client.info = Info.from_json(load_fixture("get_info.json", DOMAIN)) + client.sources = [ + Source.from_dict(x) + for x in load_json_array_fixture("get_sources.json", DOMAIN) + ] + client.state = State.from_json(load_fixture("get_state.json", DOMAIN)) + client.play_state = PlayState.from_json( + load_fixture("get_play_state.json", DOMAIN) + ) + client.now_playing = NowPlaying.from_json( + load_fixture("get_now_playing.json", DOMAIN) + ) + client.display = Display.from_json(load_fixture("get_display.json", DOMAIN)) + client.update = Update.from_json(load_fixture("get_update.json", DOMAIN)) + client.preset_list = PresetList.from_json( + load_fixture("get_presets_list.json", DOMAIN) + ) + client.audio_output = AudioOutput.from_json( + load_fixture("get_audio_output.json", DOMAIN) + ) + client.is_connected = Mock(return_value=True) + client.position_last_updated = client.play_state.position + client.unregister_state_update_callbacks.return_value = True + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Cambridge Audio CXNv2", + data={CONF_HOST: "192.168.20.218"}, + unique_id="0020c2d8", + ) diff --git a/tests/components/cambridge_audio/const.py b/tests/components/cambridge_audio/const.py new file mode 100644 index 00000000000..36057c79bb3 --- /dev/null +++ b/tests/components/cambridge_audio/const.py @@ -0,0 +1,6 @@ +"""Constants for Cambridge Audio integration tests.""" + +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN + +DEVICE_NAME = "cambridge_audio_cxnv2" +ENTITY_ID = f"{MP_DOMAIN}.{DEVICE_NAME}" diff --git a/tests/components/cambridge_audio/fixtures/get_audio_output.json b/tests/components/cambridge_audio/fixtures/get_audio_output.json new file mode 100644 index 00000000000..e38ae037307 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_audio_output.json @@ -0,0 +1,16 @@ +{ + "outputs": [ + { + "id": "speaker_a", + "name": "Speaker A" + }, + { + "id": "speaker_b", + "name": "Speaker B" + }, + { + "id": "headphones", + "name": "Headphones" + } + ] +} diff --git a/tests/components/cambridge_audio/fixtures/get_display.json b/tests/components/cambridge_audio/fixtures/get_display.json new file mode 100644 index 00000000000..73cbf5a60b3 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_display.json @@ -0,0 +1,3 @@ +{ + "brightness": "bright" +} diff --git a/tests/components/cambridge_audio/fixtures/get_info.json b/tests/components/cambridge_audio/fixtures/get_info.json new file mode 100644 index 00000000000..ee88995412e --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_info.json @@ -0,0 +1,32 @@ +{ + "name": "Cambridge Audio CXNv2", + "timezone": "America/Chicago", + "locale": "en_GB", + "usage_reports": true, + "setup": true, + "sources_setup": true, + "versions": [ + { + "component": "cast", + "version": "1.52.272222" + }, + { + "component": "MCU", + "version": "3.1+0.5+36" + }, + { + "component": "service-pack", + "version": "v022-a-151+a" + }, + { + "component": "application", + "version": "1.0+gitAUTOINC+a94a3e2ad8" + } + ], + "udn": "02680b5c-1320-4d54-9f7c-3cfe915ad4c3", + "hcv": 3764, + "model": "CXNv2", + "unit_id": "0020c2d8", + "max_http_body_size": 65536, + "api": "1.8" +} diff --git a/tests/components/cambridge_audio/fixtures/get_now_playing.json b/tests/components/cambridge_audio/fixtures/get_now_playing.json new file mode 100644 index 00000000000..8dcc781be9b --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_now_playing.json @@ -0,0 +1,25 @@ +{ + "state": "PLAYING", + "source": { + "id": "AIRPLAY", + "name": "AirPlay" + }, + "allow_apd": false, + "listening_on": "Listening on Cambridge Audio CXNv2 - AirPlay", + "display": { + "line1": "Holiday", + "line2": "Green Day", + "line3": "Greatest Hits: God's Favorite Band", + "format": "44.1kHz/16bit ALAC", + "mqa": "none", + "playback_source": "iPhone", + "class": "stream.service.airplay", + "art_file": "/tmp/current/AlbumArtFile-811-363", + "art_url": "http://192.168.20.218:80/album-art-2d89?id=1:246", + "progress": { + "position": 216, + "duration": 232 + } + }, + "controls": ["play_pause", "track_next", "track_previous"] +} diff --git a/tests/components/cambridge_audio/fixtures/get_play_state.json b/tests/components/cambridge_audio/fixtures/get_play_state.json new file mode 100644 index 00000000000..cd727ee58a7 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_play_state.json @@ -0,0 +1,22 @@ +{ + "state": "play", + "position": 179, + "presettable": false, + "mode_repeat": "off", + "mode_shuffle": "off", + "metadata": { + "class": "md.track", + "source": "AIRPLAY", + "name": "AirPlay", + "duration": 232, + "album": "Greatest Hits: God's Favorite Band", + "artist": "Green Day", + "title": "Holiday", + "art_url": "http://192.168.20.218:80/album-art-2d89?id=1:246", + "mqa": "none", + "codec": "ALAC", + "lossless": true, + "sample_rate": 44100, + "bit_depth": 16 + } +} diff --git a/tests/components/cambridge_audio/fixtures/get_presets_list.json b/tests/components/cambridge_audio/fixtures/get_presets_list.json new file mode 100644 index 00000000000..87d49e9fd30 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_presets_list.json @@ -0,0 +1,34 @@ +{ + "start": 1, + "end": 99, + "max_presets": 99, + "presettable": true, + "presets": [ + { + "id": 1, + "name": "Chicago House Radio", + "type": "Radio", + "class": "stream.radio", + "state": "OK", + "is_playing": false, + "art_url": "https://static.airable.io/43/68/432868.png", + "airable_radio_id": 5317566146608442 + }, + { + "id": 2, + "name": "Spotify: Good & Evil", + "type": "Spotify", + "class": "stream.service.spotify", + "state": "OK", + "is_playing": true, + "art_url": "https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59" + }, + { + "id": 3, + "name": "Unknown Preset Type", + "type": "Unknown", + "class": "stream.unknown", + "state": "OK" + } + ] +} diff --git a/tests/components/cambridge_audio/fixtures/get_sources.json b/tests/components/cambridge_audio/fixtures/get_sources.json new file mode 100644 index 00000000000..185f65e5ff6 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_sources.json @@ -0,0 +1,113 @@ +[ + { + "id": "IR", + "name": "Internet Radio", + "default_name": "Internet Radio", + "class": "stream.radio", + "nameable": false, + "ui_selectable": false, + "description": "Internet Radio", + "description_locale": "Internet Radio", + "preferred_order": 9 + }, + { + "id": "USB_AUDIO", + "name": "USB Audio", + "default_name": "USB Audio", + "class": "digital.usb", + "nameable": true, + "ui_selectable": true, + "description": "USB Audio", + "description_locale": "USB Audio", + "preferred_order": 1 + }, + { + "id": "SPDIF_COAX", + "name": "D2", + "default_name": "D2", + "class": "digital.coax", + "nameable": true, + "ui_selectable": false, + "description": "Digital Co-axial", + "description_locale": "Digital Co-axial", + "preferred_order": 3 + }, + { + "id": "SPDIF_TOSLINK", + "name": "D1", + "default_name": "D1", + "class": "digital.toslink", + "nameable": true, + "ui_selectable": false, + "description": "Digital Optical", + "description_locale": "Digital Optical", + "preferred_order": 2 + }, + { + "id": "MEDIA_PLAYER", + "name": "Media Library", + "default_name": "Media Library", + "class": "stream.media", + "nameable": false, + "ui_selectable": true, + "description": "Media Player", + "description_locale": "Media Player", + "preferred_order": 10 + }, + { + "id": "AIRPLAY", + "name": "AirPlay", + "default_name": "AirPlay", + "class": "stream.service.airplay", + "nameable": false, + "ui_selectable": true, + "description": "AirPlay", + "description_locale": "AirPlay", + "preferred_order": 11 + }, + { + "id": "SPOTIFY", + "name": "Spotify", + "default_name": "Spotify", + "class": "stream.service.spotify", + "nameable": false, + "ui_selectable": true, + "description": "Spotify", + "description_locale": "Spotify", + "preferred_order": 6, + "normalisation": "off" + }, + { + "id": "CAST", + "name": "Chromecast built-in", + "default_name": "Chromecast built-in", + "class": "stream.service.cast", + "nameable": false, + "ui_selectable": true, + "description": "Chromecast built-in", + "description_locale": "Chromecast built-in", + "preferred_order": 8 + }, + { + "id": "ROON", + "name": "Roon Ready", + "default_name": "Roon Ready", + "class": "stream.service.roon", + "nameable": false, + "ui_selectable": false, + "description": "Roon Ready", + "description_locale": "Roon Ready", + "preferred_order": 5 + }, + { + "id": "TIDAL", + "name": "TIDAL Connect", + "default_name": "TIDAL Connect", + "class": "stream.service.tidal", + "nameable": false, + "ui_selectable": false, + "description": "TIDAL", + "description_locale": "TIDAL", + "preferred_order": 7 + } +] diff --git a/tests/components/cambridge_audio/fixtures/get_state.json b/tests/components/cambridge_audio/fixtures/get_state.json new file mode 100644 index 00000000000..1acf0df4f6a --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_state.json @@ -0,0 +1,7 @@ +{ + "source": "AIRPLAY", + "power": true, + "pre_amp_mode": false, + "pre_amp_state": "disabled_user", + "cbus": "off" +} diff --git a/tests/components/cambridge_audio/fixtures/get_update.json b/tests/components/cambridge_audio/fixtures/get_update.json new file mode 100644 index 00000000000..a6fec6265c0 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_update.json @@ -0,0 +1,5 @@ +{ + "early_update": false, + "update_available": false, + "updating": false +} diff --git a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..1ba9c4093f6 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr @@ -0,0 +1,196 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'display': dict({ + 'brightness': 'bright', + }), + 'info': dict({ + 'api_version': '1.8', + 'locale': 'en_GB', + 'model': 'CXNv2', + 'name': 'Cambridge Audio CXNv2', + 'timezone': 'America/Chicago', + 'udn': '02680b5c-1320-4d54-9f7c-3cfe915ad4c3', + 'unit_id': '0020c2d8', + }), + 'now_playing': dict({ + 'controls': list([ + 'play_pause', + 'track_next', + 'track_previous', + ]), + }), + 'play_state': dict({ + 'metadata': dict({ + 'album': "Greatest Hits: God's Favorite Band", + 'art_url': 'http://192.168.20.218:80/album-art-2d89?id=1:246', + 'artist': 'Green Day', + 'bitrate': None, + 'class_name': 'md.track', + 'codec': 'ALAC', + 'duration': 232, + 'encoding': None, + 'lossless': True, + 'mqa': 'none', + 'name': 'AirPlay', + 'radio_id': None, + 'sample_format': None, + 'sample_rate': 44100, + 'signal': None, + 'source': 'AIRPLAY', + 'station': None, + 'title': 'Holiday', + }), + 'mode_repeat': 'off', + 'mode_shuffle': 'off', + 'position': 179, + 'presettable': False, + 'state': 'play', + }), + 'presets_list': dict({ + 'end': 99, + 'max_presets': 99, + 'presets': list([ + dict({ + 'airable_radio_id': 5317566146608442, + 'art_url': 'https://static.airable.io/43/68/432868.png', + 'is_playing': False, + 'name': 'Chicago House Radio', + 'preset_class': 'stream.radio', + 'preset_id': 1, + 'state': 'OK', + 'type': 'Radio', + }), + dict({ + 'airable_radio_id': None, + 'art_url': 'https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59', + 'is_playing': True, + 'name': 'Spotify: Good & Evil', + 'preset_class': 'stream.service.spotify', + 'preset_id': 2, + 'state': 'OK', + 'type': 'Spotify', + }), + dict({ + 'airable_radio_id': None, + 'art_url': None, + 'is_playing': False, + 'name': 'Unknown Preset Type', + 'preset_class': 'stream.unknown', + 'preset_id': 3, + 'state': 'OK', + 'type': 'Unknown', + }), + ]), + 'presettable': True, + 'start': 1, + }), + 'sources': list([ + dict({ + 'default_name': 'Internet Radio', + 'description': 'Internet Radio', + 'description_locale': 'Internet Radio', + 'id': 'IR', + 'name': 'Internet Radio', + 'nameable': False, + 'preferred_order': 9, + 'ui_selectable': False, + }), + dict({ + 'default_name': 'USB Audio', + 'description': 'USB Audio', + 'description_locale': 'USB Audio', + 'id': 'USB_AUDIO', + 'name': 'USB Audio', + 'nameable': True, + 'preferred_order': 1, + 'ui_selectable': True, + }), + dict({ + 'default_name': 'D2', + 'description': 'Digital Co-axial', + 'description_locale': 'Digital Co-axial', + 'id': 'SPDIF_COAX', + 'name': 'D2', + 'nameable': True, + 'preferred_order': 3, + 'ui_selectable': False, + }), + dict({ + 'default_name': 'D1', + 'description': 'Digital Optical', + 'description_locale': 'Digital Optical', + 'id': 'SPDIF_TOSLINK', + 'name': 'D1', + 'nameable': True, + 'preferred_order': 2, + 'ui_selectable': False, + }), + dict({ + 'default_name': 'Media Library', + 'description': 'Media Player', + 'description_locale': 'Media Player', + 'id': 'MEDIA_PLAYER', + 'name': 'Media Library', + 'nameable': False, + 'preferred_order': 10, + 'ui_selectable': True, + }), + dict({ + 'default_name': 'AirPlay', + 'description': 'AirPlay', + 'description_locale': 'AirPlay', + 'id': 'AIRPLAY', + 'name': 'AirPlay', + 'nameable': False, + 'preferred_order': 11, + 'ui_selectable': True, + }), + dict({ + 'default_name': 'Spotify', + 'description': 'Spotify', + 'description_locale': 'Spotify', + 'id': 'SPOTIFY', + 'name': 'Spotify', + 'nameable': False, + 'preferred_order': 6, + 'ui_selectable': True, + }), + dict({ + 'default_name': 'Chromecast built-in', + 'description': 'Chromecast built-in', + 'description_locale': 'Chromecast built-in', + 'id': 'CAST', + 'name': 'Chromecast built-in', + 'nameable': False, + 'preferred_order': 8, + 'ui_selectable': True, + }), + dict({ + 'default_name': 'Roon Ready', + 'description': 'Roon Ready', + 'description_locale': 'Roon Ready', + 'id': 'ROON', + 'name': 'Roon Ready', + 'nameable': False, + 'preferred_order': 5, + 'ui_selectable': False, + }), + dict({ + 'default_name': 'TIDAL Connect', + 'description': 'TIDAL', + 'description_locale': 'TIDAL', + 'id': 'TIDAL', + 'name': 'TIDAL Connect', + 'nameable': False, + 'preferred_order': 7, + 'ui_selectable': False, + }), + ]), + 'update': dict({ + 'early_update': False, + 'update_available': False, + 'updating': False, + }), + }) +# --- diff --git a/tests/components/cambridge_audio/snapshots/test_init.ambr b/tests/components/cambridge_audio/snapshots/test_init.ambr new file mode 100644 index 00000000000..64182ee2188 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://192.168.20.218', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'cambridge_audio', + '0020c2d8', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Cambridge Audio', + 'model': 'CXNv2', + 'model_id': None, + 'name': 'Cambridge Audio CXNv2', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '0020c2d8', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- \ No newline at end of file diff --git a/tests/components/cambridge_audio/snapshots/test_select.ambr b/tests/components/cambridge_audio/snapshots/test_select.ambr new file mode 100644 index 00000000000..b40c8a8d5c4 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_select.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_all_entities[select.cambridge_audio_cxnv2_audio_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Speaker A', + 'Speaker B', + 'Headphones', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.cambridge_audio_cxnv2_audio_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Audio output', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'audio_output', + 'unique_id': '0020c2d8-audio_output', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[select.cambridge_audio_cxnv2_audio_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Audio output', + 'options': list([ + 'Speaker A', + 'Speaker B', + 'Headphones', + ]), + }), + 'context': , + 'entity_id': 'select.cambridge_audio_cxnv2_audio_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[select.cambridge_audio_cxnv2_display_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'bright', + 'dim', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.cambridge_audio_cxnv2_display_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display brightness', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'display_brightness', + 'unique_id': '0020c2d8-display_brightness', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[select.cambridge_audio_cxnv2_display_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Display brightness', + 'options': list([ + 'bright', + 'dim', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.cambridge_audio_cxnv2_display_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bright', + }) +# --- diff --git a/tests/components/cambridge_audio/snapshots/test_switch.ambr b/tests/components/cambridge_audio/snapshots/test_switch.ambr new file mode 100644 index 00000000000..9bfcd7c6da7 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_switch.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_all_entities[switch.cambridge_audio_cxnv2_early_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.cambridge_audio_cxnv2_early_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Early update', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'early_update', + 'unique_id': '0020c2d8-early_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.cambridge_audio_cxnv2_early_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Early update', + }), + 'context': , + 'entity_id': 'switch.cambridge_audio_cxnv2_early_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[switch.cambridge_audio_cxnv2_pre_amp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.cambridge_audio_cxnv2_pre_amp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pre-Amp', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pre_amp', + 'unique_id': '0020c2d8-pre_amp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.cambridge_audio_cxnv2_pre_amp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Pre-Amp', + }), + 'context': , + 'entity_id': 'switch.cambridge_audio_cxnv2_pre_amp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/cambridge_audio/test_config_flow.py b/tests/components/cambridge_audio/test_config_flow.py new file mode 100644 index 00000000000..8d01db6e015 --- /dev/null +++ b/tests/components/cambridge_audio/test_config_flow.py @@ -0,0 +1,246 @@ +"""Tests for the Cambridge Audio config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock + +from aiostreammagic import StreamMagicError + +from homeassistant.components.cambridge_audio.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +ZEROCONF_DISCOVERY = ZeroconfServiceInfo( + ip_address=ip_address("192.168.20.218"), + ip_addresses=[ip_address("192.168.20.218")], + hostname="cambridge_CXNv2.local.", + name="cambridge_CXNv2._stream-magic._tcp.local.", + port=80, + type="_stream-magic._tcp.local.", + properties={ + "serial": "0020c2d8", + "hcv": "3764", + "software": "v022-a-151+a", + "model": "CXNv2", + "udn": "02680b5c-1320-4d54-9f7c-3cfe915ad4c3", + }, +) + + +async def test_full_flow( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.20.218"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Cambridge Audio CXNv2" + assert result["data"] == { + CONF_HOST: "192.168.20.218", + } + assert result["result"].unique_id == "0020c2d8" + + +async def test_flow_errors( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test flow errors.""" + mock_stream_magic_client.connect.side_effect = StreamMagicError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.20.218"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_stream_magic_client.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.20.218"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.20.218"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_zeroconf_flow( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Cambridge Audio CXNv2" + assert result["data"] == { + CONF_HOST: "192.168.20.218", + } + assert result["result"].unique_id == "0020c2d8" + + +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test zeroconf flow.""" + mock_stream_magic_client.connect.side_effect = StreamMagicError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + mock_stream_magic_client.connect.side_effect = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Cambridge Audio CXNv2" + assert result["data"] == { + CONF_HOST: "192.168.20.218", + } + assert result["result"].unique_id == "0020c2d8" + + +async def test_zeroconf_duplicate( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def _start_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_HOST: "192.168.20.219"}, + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: "192.168.20.219", + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure reconfigure flow aborts when the bride changes.""" + mock_stream_magic_client.info.unit_id = "different_udn" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" diff --git a/tests/components/cambridge_audio/test_diagnostics.py b/tests/components/cambridge_audio/test_diagnostics.py new file mode 100644 index 00000000000..9c1a09c6318 --- /dev/null +++ b/tests/components/cambridge_audio/test_diagnostics.py @@ -0,0 +1,29 @@ +"""Tests for the diagnostics data provided by the Cambridge Audio integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + assert result == snapshot diff --git a/tests/components/cambridge_audio/test_init.py b/tests/components/cambridge_audio/test_init.py new file mode 100644 index 00000000000..a058f7c8b6c --- /dev/null +++ b/tests/components/cambridge_audio/test_init.py @@ -0,0 +1,67 @@ +"""Tests for the Cambridge Audio integration.""" + +from unittest.mock import AsyncMock, Mock + +from aiostreammagic import StreamMagicError +from aiostreammagic.models import CallbackType +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.cambridge_audio.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import mock_state_update, setup_integration + +from tests.common import MockConfigEntry + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test the Cambridge Audio configuration entry not ready.""" + mock_stream_magic_client.connect = AsyncMock(side_effect=StreamMagicError()) + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + mock_stream_magic_client.connect = AsyncMock(return_value=True) + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_disconnect_reconnect_log( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.is_connected = Mock(return_value=False) + await mock_state_update(mock_stream_magic_client, CallbackType.CONNECTION) + assert "Disconnected from device at 192.168.20.218" in caplog.text + + mock_stream_magic_client.is_connected = Mock(return_value=True) + await mock_state_update(mock_stream_magic_client, CallbackType.CONNECTION) + assert "Reconnected to device at 192.168.20.218" in caplog.text diff --git a/tests/components/cambridge_audio/test_media_player.py b/tests/components/cambridge_audio/test_media_player.py new file mode 100644 index 00000000000..bb2ccd1aec4 --- /dev/null +++ b/tests/components/cambridge_audio/test_media_player.py @@ -0,0 +1,491 @@ +"""Tests for the Cambridge Audio integration.""" + +from unittest.mock import AsyncMock + +from aiostreammagic import ( + RepeatMode as CambridgeRepeatMode, + ShuffleMode, + TransportControl, +) +import pytest + +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MP_DOMAIN, + SERVICE_PLAY_MEDIA, + MediaPlayerEntityFeature, + RepeatMode, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_BUFFERING, + STATE_IDLE, + STATE_OFF, + STATE_ON, + STATE_PAUSED, + STATE_PLAYING, + STATE_STANDBY, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import mock_state_update, setup_integration +from .const import ENTITY_ID + +from tests.common import MockConfigEntry + + +async def test_entity_supported_features( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test entity attributes.""" + await setup_integration(hass, mock_config_entry) + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + attrs = state.attributes + + # Ensure volume isn't available when pre-amp is disabled + assert not mock_stream_magic_client.state.pre_amp_mode + assert ( + MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP + | MediaPlayerEntityFeature.VOLUME_MUTE + not in attrs[ATTR_SUPPORTED_FEATURES] + ) + + # Check for basic media controls + assert { + TransportControl.PLAY_PAUSE, + TransportControl.TRACK_NEXT, + TransportControl.TRACK_PREVIOUS, + }.issubset(mock_stream_magic_client.now_playing.controls) + assert ( + MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PREVIOUS_TRACK + in attrs[ATTR_SUPPORTED_FEATURES] + ) + assert ( + MediaPlayerEntityFeature.SHUFFLE_SET + | MediaPlayerEntityFeature.REPEAT_SET + | MediaPlayerEntityFeature.SEEK + not in attrs[ATTR_SUPPORTED_FEATURES] + ) + + mock_stream_magic_client.now_playing.controls = [ + TransportControl.TOGGLE_REPEAT, + TransportControl.TOGGLE_SHUFFLE, + TransportControl.SEEK, + ] + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + attrs = state.attributes + + assert ( + MediaPlayerEntityFeature.SHUFFLE_SET + | MediaPlayerEntityFeature.REPEAT_SET + | MediaPlayerEntityFeature.SEEK + in attrs[ATTR_SUPPORTED_FEATURES] + ) + + mock_stream_magic_client.state.pre_amp_mode = True + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + attrs = state.attributes + assert ( + MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP + | MediaPlayerEntityFeature.VOLUME_MUTE + in attrs[ATTR_SUPPORTED_FEATURES] + ) + + +@pytest.mark.parametrize( + ("power_state", "play_state", "media_player_state"), + [ + (True, "NETWORK", STATE_STANDBY), + (False, "NETWORK", STATE_STANDBY), + (False, "play", STATE_OFF), + (True, "play", STATE_PLAYING), + (True, "pause", STATE_PAUSED), + (True, "connecting", STATE_BUFFERING), + (True, "stop", STATE_IDLE), + (True, "ready", STATE_IDLE), + (True, "other", STATE_ON), + ], +) +async def test_entity_state( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + power_state: bool, + play_state: str, + media_player_state: str, +) -> None: + """Test media player state.""" + await setup_integration(hass, mock_config_entry) + mock_stream_magic_client.state.power = power_state + mock_stream_magic_client.play_state.state = play_state + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.state == media_player_state + + +async def test_media_play_pause_stop( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test media next previous track service.""" + await setup_integration(hass, mock_config_entry) + + data = {ATTR_ENTITY_ID: ENTITY_ID} + + # Test for play/pause command when separate play and pause controls are unavailable + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PAUSE, data, True) + mock_stream_magic_client.play_pause.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data, True) + assert mock_stream_magic_client.play_pause.call_count == 2 + + # Test for separate play and pause controls + mock_stream_magic_client.now_playing.controls = [ + TransportControl.PLAY, + TransportControl.PAUSE, + TransportControl.STOP, + ] + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PAUSE, data, True) + mock_stream_magic_client.pause.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data, True) + mock_stream_magic_client.play.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_STOP, data, True) + mock_stream_magic_client.stop.assert_called_once() + + +async def test_media_next_previous_track( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test media next previous track service.""" + await setup_integration(hass, mock_config_entry) + + data = {ATTR_ENTITY_ID: ENTITY_ID} + + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data, True) + + mock_stream_magic_client.next_track.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data, True) + + mock_stream_magic_client.previous_track.assert_called_once() + + +async def test_shuffle_repeat_set( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test shuffle and repeat set service.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.now_playing.controls = [ + TransportControl.TOGGLE_SHUFFLE, + TransportControl.TOGGLE_REPEAT, + ] + + # Test shuffle + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_SHUFFLE: False}, + ) + + mock_stream_magic_client.set_shuffle.assert_called_with(ShuffleMode.OFF) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_SHUFFLE: True}, + ) + + mock_stream_magic_client.set_shuffle.assert_called_with(ShuffleMode.ALL) + + # Test repeat + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_REPEAT: RepeatMode.OFF}, + ) + + mock_stream_magic_client.set_repeat.assert_called_with(CambridgeRepeatMode.OFF) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_REPEAT: RepeatMode.ALL}, + ) + + mock_stream_magic_client.set_repeat.assert_called_with(CambridgeRepeatMode.ALL) + + +async def test_shuffle_repeat_get( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test shuffle and repeat get service.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.play_state.mode_shuffle = None + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False + + mock_stream_magic_client.play_state.mode_shuffle = ShuffleMode.ALL + + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_MEDIA_SHUFFLE] is True + + mock_stream_magic_client.play_state.mode_repeat = CambridgeRepeatMode.ALL + + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL + + +async def test_power_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test power service.""" + await setup_integration(hass, mock_config_entry) + + data = {ATTR_ENTITY_ID: ENTITY_ID} + + await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_ON, data, True) + + mock_stream_magic_client.power_on.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_OFF, data, True) + + mock_stream_magic_client.power_off.assert_called_once() + + +async def test_media_seek( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test media seek service.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.now_playing.controls = [ + TransportControl.SEEK, + ] + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_MEDIA_SEEK, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_SEEK_POSITION: 100}, + ) + + mock_stream_magic_client.media_seek.assert_called_once_with(100) + + +async def test_media_volume( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test volume service.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.state.pre_amp_mode = True + + # Test volume up + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: ENTITY_ID}, + ) + + mock_stream_magic_client.volume_up.assert_called_once() + + # Test volume down + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: ENTITY_ID}, + ) + + mock_stream_magic_client.volume_down.assert_called_once() + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.30}, + ) + + mock_stream_magic_client.set_volume.assert_called_once_with(30) + + +async def test_play_media_preset_item_id( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with a preset item id.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "preset", + ATTR_MEDIA_CONTENT_ID: "1", + }, + blocking=True, + ) + assert mock_stream_magic_client.recall_preset.call_count == 1 + assert mock_stream_magic_client.recall_preset.call_args_list[0].args[0] == 1 + + with pytest.raises(ServiceValidationError, match="Missing preset for media_id: 10"): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "preset", + ATTR_MEDIA_CONTENT_ID: "10", + }, + blocking=True, + ) + + with pytest.raises( + ServiceValidationError, match="Preset must be an integer, got: UNKNOWN_PRESET" + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "preset", + ATTR_MEDIA_CONTENT_ID: "UNKNOWN_PRESET", + }, + blocking=True, + ) + + +async def test_play_media_airable_radio_id( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with an airable radio id.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "airable", + ATTR_MEDIA_CONTENT_ID: "12345678", + }, + blocking=True, + ) + assert mock_stream_magic_client.play_radio_airable.call_count == 1 + call_args = mock_stream_magic_client.play_radio_airable.call_args_list[0].args + assert call_args[0] == "Radio" + assert call_args[1] == 12345678 + + +async def test_play_media_internet_radio( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with a url.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "internet_radio", + ATTR_MEDIA_CONTENT_ID: "https://example.com", + }, + blocking=True, + ) + assert mock_stream_magic_client.play_radio_url.call_count == 1 + call_args = mock_stream_magic_client.play_radio_url.call_args_list[0].args + assert call_args[0] == "Radio" + assert call_args[1] == "https://example.com" + + +async def test_play_media_unknown_type( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with an unsupported content type.""" + await setup_integration(hass, mock_config_entry) + + with pytest.raises( + HomeAssistantError, + match="Unsupported media type for Cambridge Audio device: unsupported_content_type", + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "unsupported_content_type", + ATTR_MEDIA_CONTENT_ID: "1", + }, + blocking=True, + ) diff --git a/tests/components/cambridge_audio/test_select.py b/tests/components/cambridge_audio/test_select.py new file mode 100644 index 00000000000..473c4027163 --- /dev/null +++ b/tests/components/cambridge_audio/test_select.py @@ -0,0 +1,64 @@ +"""Tests for the Cambridge Audio select platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.cambridge_audio.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.cambridge_audio_cxnv2_display_brightness", + ATTR_OPTION: "dim", + }, + blocking=True, + ) + mock_stream_magic_client.set_display_brightness.assert_called_once_with("dim") + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.cambridge_audio_cxnv2_audio_output", + ATTR_OPTION: "Speaker A", + }, + blocking=True, + ) + mock_stream_magic_client.set_audio_output.assert_called_once_with("speaker_a") diff --git a/tests/components/cambridge_audio/test_switch.py b/tests/components/cambridge_audio/test_switch.py new file mode 100644 index 00000000000..3192f198d1f --- /dev/null +++ b/tests/components/cambridge_audio/test_switch.py @@ -0,0 +1,60 @@ +"""Tests for the Cambridge Audio switch platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_ON +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.cambridge_audio.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.cambridge_audio_cxnv2_early_update", + }, + blocking=True, + ) + mock_stream_magic_client.set_early_update.assert_called_once_with(True) + mock_stream_magic_client.set_early_update.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.cambridge_audio_cxnv2_early_update", + }, + blocking=True, + ) + mock_stream_magic_client.set_early_update.assert_called_once_with(False) diff --git a/tests/components/camera/common.py b/tests/components/camera/common.py index 9cacf85d907..19ac2cc168b 100644 --- a/tests/components/camera/common.py +++ b/tests/components/camera/common.py @@ -6,8 +6,19 @@ components. Instead call the service directly. from unittest.mock import Mock +from webrtc_models import RTCIceCandidateInit + +from homeassistant.components.camera import ( + Camera, + CameraWebRTCProvider, + WebRTCAnswer, + WebRTCSendMessage, +) +from homeassistant.core import callback + EMPTY_8_6_JPEG = b"empty_8_6" WEBRTC_ANSWER = "a=sendonly" +STREAM_SOURCE = "rtsp://127.0.0.1/stream" def mock_turbo_jpeg( @@ -22,3 +33,43 @@ def mock_turbo_jpeg( mocked_turbo_jpeg.scale_with_quality.return_value = EMPTY_8_6_JPEG mocked_turbo_jpeg.encode.return_value = EMPTY_8_6_JPEG return mocked_turbo_jpeg + + +class SomeTestProvider(CameraWebRTCProvider): + """Test provider.""" + + def __init__(self) -> None: + """Initialize the provider.""" + self._is_supported = True + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return "some_test" + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + return self._is_supported + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback. + + Return value determines if the offer was handled successfully. + """ + send_message(WebRTCAnswer(answer="answer")) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle the WebRTC candidate.""" + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index ea3d65f4864..b529ee3e9b9 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -1,18 +1,30 @@ """Test helpers for camera.""" from collections.abc import AsyncGenerator, Generator -from unittest.mock import PropertyMock, patch +from unittest.mock import AsyncMock, Mock, PropertyMock, patch import pytest +from webrtc_models import RTCIceCandidateInit from homeassistant.components import camera from homeassistant.components.camera.const import StreamType +from homeassistant.components.camera.webrtc import WebRTCAnswer, WebRTCSendMessage +from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.setup import async_setup_component -from .common import WEBRTC_ANSWER +from .common import STREAM_SOURCE, WEBRTC_ANSWER, SomeTestProvider + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) @pytest.fixture(autouse=True) @@ -50,28 +62,35 @@ async def mock_camera_fixture(hass: HomeAssistant) -> AsyncGenerator[None]: def mock_camera_hls_fixture(mock_camera: None) -> Generator[None]: """Initialize a demo camera platform with HLS.""" with patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.HLS), + "homeassistant.components.camera.Camera.camera_capabilities", + new_callable=PropertyMock( + return_value=camera.CameraCapabilities({StreamType.HLS}) + ), ): yield -@pytest.fixture(name="mock_camera_web_rtc") -async def mock_camera_web_rtc_fixture(hass: HomeAssistant) -> AsyncGenerator[None]: +@pytest.fixture +async def mock_camera_webrtc( + mock_camera: None, +) -> AsyncGenerator[None]: """Initialize a demo camera platform with WebRTC.""" - assert await async_setup_component( - hass, "camera", {camera.DOMAIN: {"platform": "demo"}} - ) - await hass.async_block_till_done() + + async def async_handle_async_webrtc_offer( + offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + send_message(WebRTCAnswer(WEBRTC_ANSWER)) with ( patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.WEB_RTC), + "homeassistant.components.camera.Camera.async_handle_async_webrtc_offer", + side_effect=async_handle_async_webrtc_offer, ), patch( - "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", - return_value=WEBRTC_ANSWER, + "homeassistant.components.camera.Camera.camera_capabilities", + new_callable=PropertyMock( + return_value=camera.CameraCapabilities({StreamType.WEB_RTC}) + ), ), ): yield @@ -111,3 +130,115 @@ def mock_camera_with_no_name_fixture(mock_camera_with_device: None) -> Generator new_callable=PropertyMock(return_value=None), ): yield + + +@pytest.fixture(name="mock_stream") +async def mock_stream_fixture(hass: HomeAssistant) -> None: + """Initialize a demo camera platform with streaming.""" + assert await async_setup_component(hass, "stream", {"stream": {}}) + + +@pytest.fixture(name="mock_stream_source") +def mock_stream_source_fixture() -> Generator[AsyncMock]: + """Fixture to create an RTSP stream source.""" + with patch( + "homeassistant.components.camera.Camera.stream_source", + return_value=STREAM_SOURCE, + ) as mock_stream_source: + yield mock_stream_source + + +@pytest.fixture +async def mock_test_webrtc_cameras(hass: HomeAssistant) -> None: + """Initialize test WebRTC cameras with native RTC support.""" + + # Cannot use the fixture mock_camera_web_rtc as it's mocking Camera.async_handle_web_rtc_offer + # and native support is checked by verify the function "async_handle_web_rtc_offer" was + # overwritten(implemented) or not + class BaseCamera(camera.Camera): + """Base Camera.""" + + _attr_supported_features: camera.CameraEntityFeature = ( + camera.CameraEntityFeature.STREAM + ) + + async def stream_source(self) -> str | None: + return STREAM_SOURCE + + class SyncCamera(BaseCamera): + """Mock Camera with native sync WebRTC support.""" + + _attr_name = "Sync" + + async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: + return WEBRTC_ANSWER + + class AsyncCamera(BaseCamera): + """Mock Camera with native async WebRTC support.""" + + _attr_name = "Async" + + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + send_message(WebRTCAnswer(WEBRTC_ANSWER)) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle a WebRTC candidate.""" + # Do nothing + + domain = "test" + + entry = MockConfigEntry(domain=domain) + entry.add_to_hass(hass) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [camera.DOMAIN] + ) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload( + config_entry, camera.DOMAIN + ) + return True + + mock_integration( + hass, + MockModule( + domain, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + setup_test_component_platform( + hass, camera.DOMAIN, [SyncCamera(), AsyncCamera()], from_config_entry=True + ) + mock_platform(hass, f"{domain}.config_flow", Mock()) + + with mock_config_flow(domain, ConfigFlow): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + +@pytest.fixture +async def register_test_provider( + hass: HomeAssistant, +) -> AsyncGenerator[SomeTestProvider]: + """Add WebRTC test provider.""" + await async_setup_component(hass, "camera", {}) + + provider = SomeTestProvider() + unsub = camera.async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + yield provider + unsub() diff --git a/tests/components/camera/snapshots/test_init.ambr b/tests/components/camera/snapshots/test_init.ambr new file mode 100644 index 00000000000..eae1c481cc0 --- /dev/null +++ b/tests/components/camera/snapshots/test_init.ambr @@ -0,0 +1,127 @@ +# serializer version: 1 +# name: test_record_service[/test/recording_{{ entity_id }}.mpg-/test/recording_.mpg-expected_issues1] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.record', + }), + }) +# --- +# name: test_record_service[/test/recording_{{ entity_id.entity_id }}.mpg-/test/recording_camera.demo_camera.mpg-expected_issues3] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.record', + }), + }) +# --- +# name: test_record_service[/test/recording_{{ entity_id.name }}.mpg-/test/recording_Demo camera.mpg-expected_issues2] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.record', + }), + }) +# --- +# name: test_snapshot_service[/test/snapshot_{{ entity_id }}.jpg-/test/snapshot_.jpg-expected_issues1] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.snapshot', + }), + }) +# --- +# name: test_snapshot_service[/test/snapshot_{{ entity_id.entity_id }}.jpg-/test/snapshot_camera.demo_camera.jpg-expected_issues3] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.snapshot', + }), + }) +# --- +# name: test_snapshot_service[/test/snapshot_{{ entity_id.name }}.jpg-/test/snapshot_Demo camera.jpg-expected_issues2] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.snapshot', + }), + }) +# --- diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 098c321e63b..a3045e27cf1 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -1,33 +1,44 @@ """The tests for the camera component.""" -from collections.abc import Generator from http import HTTPStatus import io from types import ModuleType -from unittest.mock import AsyncMock, Mock, PropertyMock, mock_open, patch +from unittest.mock import ANY, AsyncMock, Mock, PropertyMock, mock_open, patch import pytest +from syrupy.assertion import SnapshotAssertion +from webrtc_models import RTCIceCandidateInit from homeassistant.components import camera +from homeassistant.components.camera import ( + Camera, + CameraWebRTCProvider, + WebRTCAnswer, + WebRTCSendMessage, + async_register_webrtc_provider, +) from homeassistant.components.camera.const import ( DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM, + StreamType, ) +from homeassistant.components.camera.helper import get_camera_from_entity_id from homeassistant.components.websocket_api import TYPE_RESULT -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, + CONF_PLATFORM, EVENT_HOMEASSISTANT_STARTED, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from .common import EMPTY_8_6_JPEG, WEBRTC_ANSWER, mock_turbo_jpeg +from .common import EMPTY_8_6_JPEG, STREAM_SOURCE, mock_turbo_jpeg from tests.common import ( async_fire_time_changed, @@ -36,18 +47,6 @@ from tests.common import ( ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -STREAM_SOURCE = "rtsp://127.0.0.1/stream" -HLS_STREAM_SOURCE = "http://127.0.0.1/example.m3u" -WEBRTC_OFFER = "v=0\r\n" - - -@pytest.fixture(name="mock_stream") -def mock_stream_fixture(hass: HomeAssistant) -> None: - """Initialize a demo camera platform with streaming.""" - assert hass.loop.run_until_complete( - async_setup_component(hass, "stream", {"stream": {}}) - ) - @pytest.fixture(name="image_mock_url") async def image_mock_url_fixture(hass: HomeAssistant) -> None: @@ -58,44 +57,6 @@ async def image_mock_url_fixture(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.fixture(name="mock_stream_source") -def mock_stream_source_fixture() -> Generator[AsyncMock]: - """Fixture to create an RTSP stream source.""" - with patch( - "homeassistant.components.camera.Camera.stream_source", - return_value=STREAM_SOURCE, - ) as mock_stream_source: - yield mock_stream_source - - -@pytest.fixture(name="mock_hls_stream_source") -async def mock_hls_stream_source_fixture() -> Generator[AsyncMock]: - """Fixture to create an HLS stream source.""" - with patch( - "homeassistant.components.camera.Camera.stream_source", - return_value=HLS_STREAM_SOURCE, - ) as mock_hls_stream_source: - yield mock_hls_stream_source - - -async def provide_web_rtc_answer(stream_source: str, offer: str, stream_id: str) -> str: - """Simulate an rtsp to webrtc provider.""" - assert stream_source == STREAM_SOURCE - assert offer == WEBRTC_OFFER - return WEBRTC_ANSWER - - -@pytest.fixture(name="mock_rtsp_to_web_rtc") -def mock_rtsp_to_web_rtc_fixture(hass: HomeAssistant) -> Generator[Mock]: - """Fixture that registers a mock rtsp to web_rtc provider.""" - mock_provider = Mock(side_effect=provide_web_rtc_answer) - unsub = camera.async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", mock_provider - ) - yield mock_provider - unsub() - - @pytest.mark.usefixtures("image_mock_url") async def test_get_image_from_camera(hass: HomeAssistant) -> None: """Grab an image from camera entity.""" @@ -245,7 +206,38 @@ async def test_get_image_fails(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("mock_camera") -async def test_snapshot_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("filename_template", "expected_filename", "expected_issues"), + [ + ( + "/test/snapshot.jpg", + "/test/snapshot.jpg", + [], + ), + ( + "/test/snapshot_{{ entity_id }}.jpg", + "/test/snapshot_.jpg", + ["deprecated_filename_template_camera.demo_camera_snapshot"], + ), + ( + "/test/snapshot_{{ entity_id.name }}.jpg", + "/test/snapshot_Demo camera.jpg", + ["deprecated_filename_template_camera.demo_camera_snapshot"], + ), + ( + "/test/snapshot_{{ entity_id.entity_id }}.jpg", + "/test/snapshot_camera.demo_camera.jpg", + ["deprecated_filename_template_camera.demo_camera_snapshot"], + ), + ], +) +async def test_snapshot_service( + hass: HomeAssistant, + filename_template: str, + expected_filename: str, + expected_issues: list, + snapshot: SnapshotAssertion, +) -> None: """Test snapshot service.""" mopen = mock_open() @@ -261,16 +253,25 @@ async def test_snapshot_service(hass: HomeAssistant) -> None: camera.SERVICE_SNAPSHOT, { ATTR_ENTITY_ID: "camera.demo_camera", - camera.ATTR_FILENAME: "/test/snapshot.jpg", + camera.ATTR_FILENAME: filename_template, }, blocking=True, ) + mopen.assert_called_once_with(expected_filename, "wb") + mock_write = mopen().write assert len(mock_write.mock_calls) == 1 assert mock_write.mock_calls[0][1][0] == b"Test" + issue_registry = ir.async_get(hass) + assert len(issue_registry.issues) == 1 + len(expected_issues) + for expected_issue in expected_issues: + issue = issue_registry.async_get_issue(DOMAIN, expected_issue) + assert issue is not None + assert issue == snapshot + @pytest.mark.usefixtures("mock_camera") async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: @@ -282,7 +283,10 @@ async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: patch( "homeassistant.components.camera.os.makedirs", ), - pytest.raises(HomeAssistantError, match="/test/snapshot.jpg"), + pytest.raises( + HomeAssistantError, + match="Cannot write `/test/snapshot.jpg`, no access to path", + ), ): await hass.services.async_call( camera.DOMAIN, @@ -295,6 +299,28 @@ async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("mock_camera") +async def test_snapshot_service_os_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test snapshot service with os error.""" + with ( + patch.object(hass.config, "is_allowed_path", return_value=True), + patch("homeassistant.components.camera.os.makedirs", side_effect=OSError), + ): + await hass.services.async_call( + camera.DOMAIN, + camera.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "camera.demo_camera", + camera.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + assert "Can't write image to file:" in caplog.text + + @pytest.mark.usefixtures("mock_camera", "mock_stream") async def test_websocket_stream_no_source( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -576,7 +602,34 @@ async def test_record_service_invalid_path(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("mock_camera", "mock_stream") -async def test_record_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("filename_template", "expected_filename", "expected_issues"), + [ + ("/test/recording.mpg", "/test/recording.mpg", []), + ( + "/test/recording_{{ entity_id }}.mpg", + "/test/recording_.mpg", + ["deprecated_filename_template_camera.demo_camera_record"], + ), + ( + "/test/recording_{{ entity_id.name }}.mpg", + "/test/recording_Demo camera.mpg", + ["deprecated_filename_template_camera.demo_camera_record"], + ), + ( + "/test/recording_{{ entity_id.entity_id }}.mpg", + "/test/recording_camera.demo_camera.mpg", + ["deprecated_filename_template_camera.demo_camera_record"], + ), + ], +) +async def test_record_service( + hass: HomeAssistant, + filename_template: str, + expected_filename: str, + expected_issues: list, + snapshot: SnapshotAssertion, +) -> None: """Test record service.""" with ( patch( @@ -592,12 +645,24 @@ async def test_record_service(hass: HomeAssistant) -> None: await hass.services.async_call( camera.DOMAIN, camera.SERVICE_RECORD, - {ATTR_ENTITY_ID: "camera.demo_camera", camera.CONF_FILENAME: "/my/path"}, + { + ATTR_ENTITY_ID: "camera.demo_camera", + camera.ATTR_FILENAME: filename_template, + }, blocking=True, ) # So long as we call stream.record, the rest should be covered # by those tests. - assert mock_record.called + mock_record.assert_called_once_with( + ANY, expected_filename, duration=30, lookback=0 + ) + + issue_registry = ir.async_get(hass) + assert len(issue_registry.issues) == 1 + len(expected_issues) + for expected_issue in expected_issues: + issue = issue_registry.async_get_issue(DOMAIN, expected_issue) + assert issue is not None + assert issue == snapshot @pytest.mark.usefixtures("mock_camera") @@ -619,154 +684,12 @@ async def test_camera_proxy_stream(hass_client: ClientSessionGenerator) -> None: assert response.status == HTTPStatus.BAD_GATEWAY -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test initiating a WebRTC stream with offer and answer.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert response["success"] - assert response["result"]["answer"] == WEBRTC_ANSWER - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_invalid_entity( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC with a camera entity that does not exist.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.does_not_exist", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_missing_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream with missing required fields.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "invalid_format" - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_failure( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream that fails handling the offer.""" - client = await hass_ws_client(hass) - - with patch( - "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", - side_effect=HomeAssistantError("offer failed"), - ): - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "web_rtc_offer_failed" - assert response["error"]["message"] == "offer failed" - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_timeout( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream with timeout handling the offer.""" - client = await hass_ws_client(hass) - - with patch( - "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", - side_effect=TimeoutError(), - ): - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "web_rtc_offer_failed" - assert response["error"]["message"] == "Timeout handling WebRTC offer" - - -@pytest.mark.usefixtures("mock_camera") -async def test_websocket_web_rtc_offer_invalid_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC initiating for a camera with a different stream_type.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "web_rtc_offer_failed" - - @pytest.mark.usefixtures("mock_camera") async def test_state_streaming(hass: HomeAssistant) -> None: """Camera state.""" demo_camera = hass.states.get("camera.demo_camera") assert demo_camera is not None - assert demo_camera.state == camera.STATE_STREAMING + assert demo_camera.state == camera.CameraState.STREAMING @pytest.mark.usefixtures("mock_camera", "mock_stream") @@ -819,145 +742,7 @@ async def test_stream_unavailable( demo_camera = hass.states.get("camera.demo_camera") assert demo_camera is not None - assert demo_camera.state == camera.STATE_STREAMING - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_web_rtc_offer( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - mock_rtsp_to_web_rtc: Mock, -) -> None: - """Test creating a web_rtc offer from an rstp provider.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response.get("id") == 9 - assert response.get("type") == TYPE_RESULT - assert response.get("success") - assert "result" in response - assert response["result"] == {"answer": WEBRTC_ANSWER} - - assert mock_rtsp_to_web_rtc.called - - -@pytest.mark.usefixtures( - "mock_camera", - "mock_hls_stream_source", # Not an RTSP stream source - "mock_rtsp_to_web_rtc", -) -async def test_unsupported_rtsp_to_web_rtc_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test rtsp-to-webrtc is not registered for non-RTSP streams.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 10, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response.get("id") == 10 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response["success"] - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_web_rtc_provider_unregistered( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test creating a web_rtc offer from an rstp provider.""" - mock_provider = Mock(side_effect=provide_web_rtc_answer) - unsub = camera.async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", mock_provider - ) - - client = await hass_ws_client(hass) - - # Registered provider can handle the WebRTC offer - await client.send_json( - { - "id": 11, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["id"] == 11 - assert response["type"] == TYPE_RESULT - assert response["success"] - assert response["result"]["answer"] == WEBRTC_ANSWER - - assert mock_provider.called - mock_provider.reset_mock() - - # Unregister provider, then verify the WebRTC offer cannot be handled - unsub() - await client.send_json( - { - "id": 12, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response.get("id") == 12 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response["success"] - - assert not mock_provider.called - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_web_rtc_offer_not_accepted( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test a provider that can't satisfy the rtsp to webrtc offer.""" - - async def provide_none(stream_source: str, offer: str) -> str: - """Simulate a provider that can't accept the offer.""" - return None - - mock_provider = Mock(side_effect=provide_none) - unsub = camera.async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", mock_provider - ) - client = await hass_ws_client(hass) - - # Registered provider can handle the WebRTC offer - await client.send_json( - { - "id": 11, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["id"] == 11 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response["success"] - - assert mock_provider.called - - unsub() + assert demo_camera.state == camera.CameraState.STREAMING @pytest.mark.usefixtures("mock_camera") @@ -1017,7 +802,7 @@ async def test_use_stream_for_stills( @pytest.mark.parametrize( "module", - [camera, camera.const], + [camera], ) def test_all(module: ModuleType) -> None: """Test module.__all__ is correctly set.""" @@ -1026,55 +811,19 @@ def test_all(module: ModuleType) -> None: @pytest.mark.parametrize( "enum", - list(camera.const.StreamType), + list(camera.const.CameraState), ) @pytest.mark.parametrize( "module", - [camera, camera.const], + [camera], ) -def test_deprecated_stream_type_constants( +def test_deprecated_state_constants( caplog: pytest.LogCaptureFixture, enum: camera.const.StreamType, module: ModuleType, ) -> None: """Test deprecated stream type constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, "STREAM_TYPE_", "2025.1" - ) - - -@pytest.mark.parametrize( - "entity_feature", - list(camera.CameraEntityFeature), -) -def test_deprecated_support_constants( - caplog: pytest.LogCaptureFixture, - entity_feature: camera.CameraEntityFeature, -) -> None: - """Test deprecated support constants.""" - import_and_test_deprecated_constant_enum( - caplog, camera, entity_feature, "SUPPORT_", "2025.1" - ) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockCamera(camera.Camera): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockCamera() - assert entity.supported_features_compat is camera.CameraEntityFeature(1) - assert "MockCamera" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "CameraEntityFeature.ON_OFF" in caplog.text - caplog.clear() - assert entity.supported_features_compat is camera.CameraEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text + import_and_test_deprecated_constant_enum(caplog, module, enum, "STATE_", "2025.10") @pytest.mark.usefixtures("mock_camera") @@ -1094,3 +843,186 @@ async def test_entity_picture_url_changes_on_token_update(hass: HomeAssistant) - new_entity_picture = camera_state.attributes["entity_picture"] assert new_entity_picture != original_picture assert "token=" in new_entity_picture + + +async def _test_capabilities( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_id: str, + expected_stream_types: set[StreamType], + expected_stream_types_with_webrtc_provider: set[StreamType], +) -> None: + """Test camera capabilities.""" + await async_setup_component(hass, "camera", {}) + await hass.async_block_till_done() + + async def test(expected_types: set[StreamType]) -> None: + camera_obj = get_camera_from_entity_id(hass, entity_id) + capabilities = camera_obj.camera_capabilities + assert capabilities == camera.CameraCapabilities(expected_types) + + # Request capabilities through WebSocket + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/capabilities", "entity_id": entity_id} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == {"frontend_stream_types": ANY} + assert sorted(msg["result"]["frontend_stream_types"]) == sorted(expected_types) + + await test(expected_stream_types) + + # Test with WebRTC provider + + class SomeTestProvider(CameraWebRTCProvider): + """Test provider.""" + + @property + def domain(self) -> str: + """Return domain.""" + return "test" + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + return True + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + send_message(WebRTCAnswer("answer")) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle the WebRTC candidate.""" + + provider = SomeTestProvider() + async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + await test(expected_stream_types_with_webrtc_provider) + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_camera_capabilities_hls( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test HLS camera capabilities.""" + await _test_capabilities( + hass, + hass_ws_client, + "camera.demo_camera", + {StreamType.HLS}, + {StreamType.HLS, StreamType.WEB_RTC}, + ) + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_camera_capabilities_webrtc( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test WebRTC camera capabilities.""" + + await _test_capabilities( + hass, hass_ws_client, "camera.sync", {StreamType.WEB_RTC}, {StreamType.WEB_RTC} + ) + + +@pytest.mark.parametrize( + ("entity_id", "expect_native_async_webrtc"), + [("camera.sync", False), ("camera.async", True)], +) +@pytest.mark.usefixtures("mock_test_webrtc_cameras", "register_test_provider") +async def test_webrtc_provider_not_added_for_native_webrtc( + hass: HomeAssistant, entity_id: str, expect_native_async_webrtc: bool +) -> None: + """Test that a WebRTC provider is not added to a camera when the camera has native WebRTC support.""" + camera_obj = get_camera_from_entity_id(hass, entity_id) + assert camera_obj + assert camera_obj._webrtc_provider is None + assert camera_obj._supports_native_sync_webrtc is not expect_native_async_webrtc + assert camera_obj._supports_native_async_webrtc is expect_native_async_webrtc + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_camera_capabilities_changing_non_native_support( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test WebRTC camera capabilities.""" + cam = get_camera_from_entity_id(hass, "camera.demo_camera") + assert ( + cam.supported_features + == camera.CameraEntityFeature.ON_OFF | camera.CameraEntityFeature.STREAM + ) + + await _test_capabilities( + hass, + hass_ws_client, + cam.entity_id, + {StreamType.HLS}, + {StreamType.HLS, StreamType.WEB_RTC}, + ) + + cam._attr_supported_features = camera.CameraEntityFeature(0) + cam.async_write_ha_state() + await hass.async_block_till_done() + + await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.parametrize(("entity_id"), ["camera.sync", "camera.async"]) +async def test_camera_capabilities_changing_native_support( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_id: str, +) -> None: + """Test WebRTC camera capabilities.""" + cam = get_camera_from_entity_id(hass, entity_id) + assert cam.supported_features == camera.CameraEntityFeature.STREAM + + await _test_capabilities( + hass, hass_ws_client, cam.entity_id, {StreamType.WEB_RTC}, {StreamType.WEB_RTC} + ) + + cam._attr_supported_features = camera.CameraEntityFeature(0) + cam.async_write_ha_state() + await hass.async_block_till_done() + + await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) + + +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_deprecated_frontend_stream_type_logs( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test using (_attr_)frontend_stream_type will log.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) + await hass.async_block_till_done() + + for entity_id in ( + "camera.property_frontend_stream_type", + "camera.attr_frontend_stream_type", + ): + camera_obj = get_camera_from_entity_id(hass, entity_id) + assert camera_obj.frontend_stream_type == StreamType.WEB_RTC + + assert ( + "Detected that custom integration 'test' is overwriting the 'frontend_stream_type' property in the PropertyFrontendStreamTypeCamera class, which is deprecated and will be removed in Home Assistant 2025.6," + ) in caplog.text + assert ( + "Detected that custom integration 'test' is setting the '_attr_frontend_stream_type' attribute in the AttrFrontendStreamTypeCamera class, which is deprecated and will be removed in Home Assistant 2025.6," + ) in caplog.text diff --git a/tests/components/camera/test_media_source.py b/tests/components/camera/test_media_source.py index 0780ecc2a9c..bd92010d242 100644 --- a/tests/components/camera/test_media_source.py +++ b/tests/components/camera/test_media_source.py @@ -5,6 +5,7 @@ from unittest.mock import PropertyMock, patch import pytest from homeassistant.components import media_source +from homeassistant.components.camera import CameraCapabilities from homeassistant.components.camera.const import StreamType from homeassistant.components.stream import FORMAT_CONTENT_TYPE from homeassistant.core import HomeAssistant @@ -65,8 +66,8 @@ async def test_browsing_mjpeg(hass: HomeAssistant) -> None: assert item.children[0].title == "Demo camera without stream" -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_browsing_web_rtc(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_browsing_webrtc(hass: HomeAssistant) -> None: """Test browsing WebRTC camera media source.""" # 3 cameras: # one only supports WebRTC (no stream source) @@ -91,7 +92,7 @@ async def test_browsing_web_rtc(hass: HomeAssistant) -> None: assert item.children[0].media_content_type == FORMAT_CONTENT_TYPE["hls"] -@pytest.mark.usefixtures("mock_camera_hls") +@pytest.mark.usefixtures("mock_camera") async def test_resolving(hass: HomeAssistant) -> None: """Test resolving.""" # Adding stream enables HLS camera @@ -109,7 +110,7 @@ async def test_resolving(hass: HomeAssistant) -> None: assert item.mime_type == FORMAT_CONTENT_TYPE["hls"] -@pytest.mark.usefixtures("mock_camera_hls") +@pytest.mark.usefixtures("mock_camera") async def test_resolving_errors(hass: HomeAssistant) -> None: """Test resolving.""" @@ -130,8 +131,10 @@ async def test_resolving_errors(hass: HomeAssistant) -> None: with ( pytest.raises(media_source.Unresolvable) as exc_info, patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.WEB_RTC), + "homeassistant.components.camera.Camera.camera_capabilities", + new_callable=PropertyMock( + return_value=CameraCapabilities({StreamType.WEB_RTC}) + ), ), ): await media_source.async_resolve_media( diff --git a/tests/components/camera/test_significant_change.py b/tests/components/camera/test_significant_change.py index a2a7ef20e71..b89b1c26747 100644 --- a/tests/components/camera/test_significant_change.py +++ b/tests/components/camera/test_significant_change.py @@ -1,6 +1,6 @@ """Test the Camera significant change platform.""" -from homeassistant.components.camera import STATE_IDLE, STATE_RECORDING +from homeassistant.components.camera import CameraState from homeassistant.components.camera.significant_change import ( async_check_significant_change, ) @@ -10,11 +10,11 @@ async def test_significant_change() -> None: """Detect Camera significant changes.""" attrs = {} assert not async_check_significant_change( - None, STATE_IDLE, attrs, STATE_IDLE, attrs + None, CameraState.IDLE, attrs, CameraState.IDLE, attrs ) assert not async_check_significant_change( - None, STATE_IDLE, attrs, STATE_IDLE, {"dummy": "dummy"} + None, CameraState.IDLE, attrs, CameraState.IDLE, {"dummy": "dummy"} ) assert async_check_significant_change( - None, STATE_IDLE, attrs, STATE_RECORDING, attrs + None, CameraState.IDLE, attrs, CameraState.RECORDING, attrs ) diff --git a/tests/components/camera/test_webrtc.py b/tests/components/camera/test_webrtc.py new file mode 100644 index 00000000000..a7c6d889409 --- /dev/null +++ b/tests/components/camera/test_webrtc.py @@ -0,0 +1,1302 @@ +"""Test camera WebRTC.""" + +from collections.abc import AsyncGenerator, Generator +import logging +from typing import Any +from unittest.mock import AsyncMock, Mock, patch + +import pytest +from webrtc_models import RTCIceCandidate, RTCIceCandidateInit, RTCIceServer + +from homeassistant.components.camera import ( + DATA_ICE_SERVERS, + DOMAIN as CAMERA_DOMAIN, + Camera, + CameraEntityFeature, + CameraWebRTCProvider, + StreamType, + WebRTCAnswer, + WebRTCCandidate, + WebRTCError, + WebRTCMessage, + WebRTCSendMessage, + async_get_supported_legacy_provider, + async_register_ice_servers, + async_register_rtsp_to_web_rtc_provider, + async_register_webrtc_provider, + get_camera_from_entity_id, +) +from homeassistant.components.websocket_api import TYPE_RESULT +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from .common import STREAM_SOURCE, WEBRTC_ANSWER, SomeTestProvider + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) +from tests.typing import WebSocketGenerator + +WEBRTC_OFFER = "v=0\r\n" +HLS_STREAM_SOURCE = "http://127.0.0.1/example.m3u" +TEST_INTEGRATION_DOMAIN = "test" + + +class Go2RTCProvider(SomeTestProvider): + """go2rtc provider.""" + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return "go2rtc" + + +class MockCamera(Camera): + """Mock Camera Entity.""" + + _attr_name = "Test" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + + def __init__(self) -> None: + """Initialize the mock entity.""" + super().__init__() + self._sync_answer: str | None | Exception = WEBRTC_ANSWER + + def set_sync_answer(self, value: str | None | Exception) -> None: + """Set sync offer answer.""" + self._sync_answer = value + + async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: + """Handle the WebRTC offer and return the answer.""" + if isinstance(self._sync_answer, Exception): + raise self._sync_answer + return self._sync_answer + + async def stream_source(self) -> str | None: + """Return the source of the stream. + + This is used by cameras with CameraEntityFeature.STREAM + and StreamType.HLS. + """ + return "rtsp://stream" + + +@pytest.fixture +async def init_test_integration( + hass: HomeAssistant, +) -> MockCamera: + """Initialize components.""" + + entry = MockConfigEntry(domain=TEST_INTEGRATION_DOMAIN) + entry.add_to_hass(hass) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [CAMERA_DOMAIN] + ) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload( + config_entry, CAMERA_DOMAIN + ) + return True + + mock_integration( + hass, + MockModule( + TEST_INTEGRATION_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + test_camera = MockCamera() + setup_test_component_platform( + hass, CAMERA_DOMAIN, [test_camera], from_config_entry=True + ) + mock_platform(hass, f"{TEST_INTEGRATION_DOMAIN}.config_flow", Mock()) + + with mock_config_flow(TEST_INTEGRATION_DOMAIN, ConfigFlow): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return test_camera + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_async_register_webrtc_provider( + hass: HomeAssistant, +) -> None: + """Test registering a WebRTC provider.""" + camera = get_camera_from_entity_id(hass, "camera.demo_camera") + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + + provider = SomeTestProvider() + unregister = async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + + assert camera.camera_capabilities.frontend_stream_types == { + StreamType.HLS, + StreamType.WEB_RTC, + } + + # Mark stream as unsupported + provider._is_supported = False + # Manually refresh the provider + await camera.async_refresh_providers() + + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + + # Mark stream as supported + provider._is_supported = True + # Manually refresh the provider + await camera.async_refresh_providers() + assert camera.camera_capabilities.frontend_stream_types == { + StreamType.HLS, + StreamType.WEB_RTC, + } + + unregister() + await hass.async_block_till_done() + + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_async_register_webrtc_provider_twice( + hass: HomeAssistant, + register_test_provider: SomeTestProvider, +) -> None: + """Test registering a WebRTC provider twice should raise.""" + with pytest.raises(ValueError, match="Provider already registered"): + async_register_webrtc_provider(hass, register_test_provider) + + +async def test_async_register_webrtc_provider_camera_not_loaded( + hass: HomeAssistant, +) -> None: + """Test registering a WebRTC provider when camera is not loaded.""" + with pytest.raises(ValueError, match="Unexpected state, camera not loaded"): + async_register_webrtc_provider(hass, SomeTestProvider()) + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_async_register_ice_server( + hass: HomeAssistant, +) -> None: + """Test registering an ICE server.""" + # Clear any existing ICE servers + hass.data[DATA_ICE_SERVERS].clear() + + called = 0 + + @callback + def get_ice_servers() -> list[RTCIceServer]: + nonlocal called + called += 1 + return [ + RTCIceServer(urls="stun:example.com"), + RTCIceServer(urls="turn:example.com"), + ] + + unregister = async_register_ice_servers(hass, get_ice_servers) + assert not called + + camera = get_camera_from_entity_id(hass, "camera.async") + config = camera.async_get_webrtc_client_configuration() + + assert config.configuration.ice_servers == [ + RTCIceServer(urls="stun:example.com"), + RTCIceServer(urls="turn:example.com"), + ] + assert called == 1 + + # register another ICE server + called_2 = 0 + + @callback + def get_ice_servers_2() -> list[RTCIceServer]: + nonlocal called_2 + called_2 += 1 + return [ + RTCIceServer( + urls=["stun:example2.com", "turn:example2.com"], + username="user", + credential="pass", + ) + ] + + unregister_2 = async_register_ice_servers(hass, get_ice_servers_2) + + config = camera.async_get_webrtc_client_configuration() + assert config.configuration.ice_servers == [ + RTCIceServer(urls="stun:example.com"), + RTCIceServer(urls="turn:example.com"), + RTCIceServer( + urls=["stun:example2.com", "turn:example2.com"], + username="user", + credential="pass", + ), + ] + assert called == 2 + assert called_2 == 1 + + # unregister the first ICE server + + unregister() + + config = camera.async_get_webrtc_client_configuration() + assert config.configuration.ice_servers == [ + RTCIceServer( + urls=["stun:example2.com", "turn:example2.com"], + username="user", + credential="pass", + ), + ] + assert called == 2 + assert called_2 == 2 + + # unregister the second ICE server + unregister_2() + + config = camera.async_get_webrtc_client_configuration() + assert config.configuration.ice_servers == [] + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_get_client_config( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test get WebRTC client config.""" + await async_setup_component(hass, "camera", {}) + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": { + "iceServers": [ + { + "urls": [ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + }, + ], + }, + "getCandidatesUpfront": False, + } + + @callback + def get_ice_server() -> list[RTCIceServer]: + return [ + RTCIceServer( + urls=["stun:example2.com", "turn:example2.com"], + username="user", + credential="pass", + ) + ] + + async_register_ice_servers(hass, get_ice_server) + + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": { + "iceServers": [ + { + "urls": [ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + }, + { + "urls": ["stun:example2.com", "turn:example2.com"], + "username": "user", + "credential": "pass", + }, + ], + }, + "getCandidatesUpfront": False, + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_get_client_config_sync_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test get WebRTC client config, when camera is supporting sync offer.""" + await async_setup_component(hass, "camera", {}) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.sync"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": {}, + "getCandidatesUpfront": True, + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_get_client_config_custom_config( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test get WebRTC client config.""" + await async_process_ha_core_config( + hass, + {"webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}}, + ) + + await async_setup_component(hass, "camera", {}) + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": {"iceServers": [{"urls": ["stun:custom_stun_server:3478"]}]}, + "getCandidatesUpfront": False, + } + + +@pytest.mark.usefixtures("mock_camera") +async def test_ws_get_client_config_no_rtc_camera( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test get WebRTC client config.""" + await async_setup_component(hass, "camera", {}) + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert not msg["success"] + assert msg["error"] == { + "code": "webrtc_get_client_config_failed", + "message": "Camera does not support WebRTC, frontend_stream_types={}", + } + + +async def provide_webrtc_answer(stream_source: str, offer: str, stream_id: str) -> str: + """Simulate an rtsp to webrtc provider.""" + assert stream_source == STREAM_SOURCE + assert offer == WEBRTC_OFFER + return WEBRTC_ANSWER + + +@pytest.fixture(name="mock_rtsp_to_webrtc") +def mock_rtsp_to_webrtc_fixture( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> Generator[Mock]: + """Fixture that registers a mock rtsp to webrtc provider.""" + mock_provider = Mock(side_effect=provide_webrtc_answer) + unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + assert ( + "async_register_rtsp_to_web_rtc_provider is a deprecated function which will" + " be removed in HA Core 2025.6. Use async_register_webrtc_provider instead" + ) in caplog.text + yield mock_provider + unsub() + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_websocket_webrtc_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test initiating a WebRTC stream with offer and answer.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.async", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": WEBRTC_ANSWER, + } + + # Unsubscribe/Close session + await client.send_json_auto_id( + { + "type": "unsubscribe_events", + "subscription": subscription_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + + +@pytest.mark.filterwarnings( + "ignore:Using RTCIceCandidate is deprecated. Use RTCIceCandidateInit instead" +) +@pytest.mark.usefixtures("mock_stream_source", "mock_camera") +async def test_websocket_webrtc_offer_webrtc_provider_deprecated( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, +) -> None: + """Test initiating a WebRTC stream with a webrtc provider with the deprecated class.""" + await _test_websocket_webrtc_offer_webrtc_provider( + hass, + hass_ws_client, + register_test_provider, + WebRTCCandidate(RTCIceCandidate("candidate")), + {"type": "candidate", "candidate": {"candidate": "candidate"}}, + ) + + +@pytest.mark.parametrize( + ("message", "expected_frontend_message"), + [ + ( + WebRTCCandidate(RTCIceCandidateInit("candidate")), + { + "type": "candidate", + "candidate": {"candidate": "candidate", "sdpMLineIndex": 0}, + }, + ), + ( + WebRTCError("webrtc_offer_failed", "error"), + {"type": "error", "code": "webrtc_offer_failed", "message": "error"}, + ), + (WebRTCAnswer("answer"), {"type": "answer", "answer": "answer"}), + ], + ids=["candidate", "error", "answer"], +) +@pytest.mark.usefixtures("mock_stream_source", "mock_camera") +async def test_websocket_webrtc_offer_webrtc_provider( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, + message: WebRTCMessage, + expected_frontend_message: dict[str, Any], +) -> None: + """Test initiating a WebRTC stream with a webrtc provider.""" + await _test_websocket_webrtc_offer_webrtc_provider( + hass, + hass_ws_client, + register_test_provider, + message, + expected_frontend_message, + ) + + +async def _test_websocket_webrtc_offer_webrtc_provider( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, + message: WebRTCMessage, + expected_frontend_message: dict[str, Any], +) -> None: + """Test initiating a WebRTC stream with a webrtc provider.""" + client = await hass_ws_client(hass) + with ( + patch.object( + register_test_provider, "async_handle_async_webrtc_offer", autospec=True + ) as mock_async_handle_async_webrtc_offer, + patch.object( + register_test_provider, "async_close_session", autospec=True + ) as mock_async_close_session, + ): + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + mock_async_handle_async_webrtc_offer.assert_called_once() + assert mock_async_handle_async_webrtc_offer.call_args[0][1] == WEBRTC_OFFER + send_message: WebRTCSendMessage = ( + mock_async_handle_async_webrtc_offer.call_args[0][3] + ) + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + session_id = response["event"]["session_id"] + + send_message(message) + + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == expected_frontend_message + + # Unsubscribe/Close session + await client.send_json_auto_id( + { + "type": "unsubscribe_events", + "subscription": subscription_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + mock_async_close_session.assert_called_once_with(session_id) + + +async def test_websocket_webrtc_offer_invalid_entity( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC with a camera entity that does not exist.""" + await async_setup_component(hass, "camera", {}) + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.does_not_exist", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Camera not found", + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_websocket_webrtc_offer_missing_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC stream with missing required fields.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "invalid_format" + + +@pytest.mark.parametrize( + ("error", "expected_message"), + [ + (ValueError("value error"), "value error"), + (HomeAssistantError("offer failed"), "offer failed"), + (TimeoutError(), "Timeout handling WebRTC offer"), + ], +) +async def test_websocket_webrtc_offer_failure( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_test_integration: MockCamera, + error: Exception, + expected_message: str, +) -> None: + """Test WebRTC stream that fails handling the offer.""" + client = await hass_ws_client(hass) + init_test_integration.set_sync_answer(error) + + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.test", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Error + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": expected_message, + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_websocket_webrtc_offer_sync( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sync WebRTC stream offer.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.sync", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert ( + "tests.components.camera.conftest", + logging.WARNING, + ( + "async_handle_web_rtc_offer was called from camera, this is a deprecated " + "function which will be removed in HA Core 2025.6. Use " + "async_handle_async_webrtc_offer instead" + ), + ) in caplog.record_tuples + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == {"type": "answer", "answer": WEBRTC_ANSWER} + + +async def test_websocket_webrtc_offer_sync_no_answer( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + init_test_integration: MockCamera, +) -> None: + """Test sync WebRTC stream offer with no answer.""" + client = await hass_ws_client(hass) + init_test_integration.set_sync_answer(None) + + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.test", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "No answer on WebRTC offer", + } + assert ( + "homeassistant.components.camera", + logging.ERROR, + "Error handling WebRTC offer: No answer", + ) in caplog.record_tuples + + +@pytest.mark.usefixtures("mock_camera") +async def test_websocket_webrtc_offer_invalid_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC initiating for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC, frontend_stream_types={}", + } + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_webrtc_offer( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_rtsp_to_webrtc: Mock, +) -> None: + """Test creating a webrtc offer from an rstp provider.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": WEBRTC_ANSWER, + } + + assert mock_rtsp_to_webrtc.called + + +@pytest.fixture(name="mock_hls_stream_source") +async def mock_hls_stream_source_fixture() -> AsyncGenerator[AsyncMock]: + """Fixture to create an HLS stream source.""" + with patch( + "homeassistant.components.camera.Camera.stream_source", + return_value=HLS_STREAM_SOURCE, + ) as mock_hls_stream_source: + yield mock_hls_stream_source + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_webrtc_provider_unregistered( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test creating a webrtc offer from an rstp provider.""" + mock_provider = Mock(side_effect=provide_webrtc_answer) + unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + + client = await hass_ws_client(hass) + + # Registered provider can handle the WebRTC offer + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": WEBRTC_ANSWER, + } + + assert mock_provider.called + mock_provider.reset_mock() + + # Unregister provider, then verify the WebRTC offer cannot be handled + unsub() + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response.get("type") == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC, frontend_stream_types={}", + } + + assert not mock_provider.called + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_webrtc_offer_not_accepted( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test a provider that can't satisfy the rtsp to webrtc offer.""" + + async def provide_none( + stream_source: str, offer: str, stream_id: str + ) -> str | None: + """Simulate a provider that can't accept the offer.""" + return None + + mock_provider = Mock(side_effect=provide_none) + unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + client = await hass_ws_client(hass) + + # Registered provider can handle the WebRTC offer + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC", + } + + assert mock_provider.called + + unsub() + + +@pytest.mark.parametrize( + ("frontend_candidate", "expected_candidate"), + [ + ( + {"candidate": "candidate", "sdpMLineIndex": 0}, + RTCIceCandidateInit("candidate"), + ), + ( + {"candidate": "candidate", "sdpMLineIndex": 1}, + RTCIceCandidateInit("candidate", sdp_m_line_index=1), + ), + ( + {"candidate": "candidate", "sdpMid": "1"}, + RTCIceCandidateInit("candidate", sdp_mid="1"), + ), + ], + ids=["candidate", "candidate-mline-index", "candidate-mid"], +) +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_webrtc_candidate( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + frontend_candidate: dict[str, Any], + expected_candidate: RTCIceCandidateInit, +) -> None: + """Test ws webrtc candidate command.""" + client = await hass_ws_client(hass) + session_id = "session_id" + with patch.object( + get_camera_from_entity_id(hass, "camera.async"), "async_on_webrtc_candidate" + ) as mock_on_webrtc_candidate: + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.async", + "session_id": session_id, + "candidate": frontend_candidate, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + mock_on_webrtc_candidate.assert_called_once_with(session_id, expected_candidate) + + +@pytest.mark.parametrize( + ("message", "expected_error_msg"), + [ + ( + {"sdpMLineIndex": 0}, + ( + 'Field "candidate" of type str is missing in RTCIceCandidateInit instance' + " for dictionary value @ data['candidate']. Got {'sdpMLineIndex': 0}" + ), + ), + ( + {"candidate": "candidate", "sdpMLineIndex": -1}, + ( + "sdpMLineIndex must be greater than or equal to 0 for dictionary value @ " + "data['candidate']. Got {'candidate': 'candidate', 'sdpMLineIndex': -1}" + ), + ), + ], + ids=[ + "candidate missing", + "spd_mline_index smaller than 0", + ], +) +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_webrtc_candidate_invalid_candidate_message( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + message: dict, + expected_error_msg: str, +) -> None: + """Test ws WebRTC candidate command for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + with patch("homeassistant.components.camera.Camera.async_on_webrtc_candidate"): + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.async", + "session_id": "session_id", + "candidate": message, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "invalid_format", + "message": expected_error_msg, + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_webrtc_candidate_not_supported( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws webrtc candidate command is raising if not supported.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.sync", + "session_id": "session_id", + "candidate": {"candidate": "candidate"}, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Cannot handle WebRTC candidate", + } + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_ws_webrtc_candidate_webrtc_provider( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, +) -> None: + """Test ws webrtc candidate command with WebRTC provider.""" + with patch.object( + register_test_provider, "async_on_webrtc_candidate" + ) as mock_on_webrtc_candidate: + client = await hass_ws_client(hass) + session_id = "session_id" + candidate = "candidate" + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": session_id, + "candidate": {"candidate": candidate, "sdpMLineIndex": 1}, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + mock_on_webrtc_candidate.assert_called_once_with( + session_id, RTCIceCandidateInit(candidate, sdp_m_line_index=1) + ) + + +async def test_ws_webrtc_candidate_invalid_entity( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws WebRTC candidate command with a camera entity that does not exist.""" + await async_setup_component(hass, "camera", {}) + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.does_not_exist", + "session_id": "session_id", + "candidate": {"candidate": "candidate"}, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Camera not found", + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_webrtc_canidate_missing_candidate( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws WebRTC candidate command with missing required fields.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.async", + "session_id": "session_id", + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "invalid_format" + + +@pytest.mark.usefixtures("mock_camera") +async def test_ws_webrtc_candidate_invalid_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws WebRTC candidate command for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": "session_id", + "candidate": {"candidate": "candidate"}, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "webrtc_candidate_failed", + "message": "Camera does not support WebRTC, frontend_stream_types={}", + } + + +async def test_webrtc_provider_optional_interface(hass: HomeAssistant) -> None: + """Test optional interface for WebRTC provider.""" + + class OnlyRequiredInterfaceProvider(CameraWebRTCProvider): + """Test provider.""" + + @property + def domain(self) -> str: + """Return the domain of the provider.""" + return "test" + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + return True + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback. + + Return value determines if the offer was handled successfully. + """ + send_message(WebRTCAnswer(answer="answer")) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle the WebRTC candidate.""" + + provider = OnlyRequiredInterfaceProvider() + # Call all interface methods + assert provider.async_is_supported("stream_source") is True + await provider.async_handle_async_webrtc_offer( + Mock(), "offer_sdp", "session_id", Mock() + ) + await provider.async_on_webrtc_candidate( + "session_id", RTCIceCandidateInit("candidate") + ) + provider.async_close_session("session_id") + + +@pytest.mark.usefixtures("mock_camera") +async def test_repair_issue_legacy_provider( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test repair issue created for legacy provider.""" + # Ensure no issue if no provider is registered + assert not issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + + # Register a legacy provider + legacy_provider = Mock(side_effect=provide_webrtc_answer) + unsub_legacy_provider = async_register_rtsp_to_web_rtc_provider( + hass, "mock_domain", legacy_provider + ) + await hass.async_block_till_done() + + # Ensure no issue if only legacy provider is registered + assert not issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + + provider = Go2RTCProvider() + unsub_go2rtc_provider = async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + + # Ensure issue when legacy and builtin provider are registered + issue = issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + assert issue + assert issue.is_fixable is False + assert issue.is_persistent is False + assert issue.issue_domain == "mock_domain" + assert issue.learn_more_url == "https://www.home-assistant.io/integrations/go2rtc/" + assert issue.severity == ir.IssueSeverity.WARNING + assert issue.issue_id == "legacy_webrtc_provider_mock_domain" + assert issue.translation_key == "legacy_webrtc_provider" + assert issue.translation_placeholders == { + "legacy_integration": "mock_domain", + "builtin_integration": "go2rtc", + } + + unsub_legacy_provider() + unsub_go2rtc_provider() + + +@pytest.mark.usefixtures("mock_camera", "register_test_provider", "mock_rtsp_to_webrtc") +async def test_no_repair_issue_without_new_provider( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test repair issue not created if no go2rtc provider exists.""" + assert not issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + + +@pytest.mark.usefixtures("mock_camera", "mock_rtsp_to_webrtc") +async def test_registering_same_legacy_provider( + hass: HomeAssistant, +) -> None: + """Test registering the same legacy provider twice.""" + legacy_provider = Mock(side_effect=provide_webrtc_answer) + with pytest.raises(ValueError, match="Provider already registered"): + async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", legacy_provider) + + +@pytest.mark.usefixtures("mock_hls_stream_source", "mock_camera", "mock_rtsp_to_webrtc") +async def test_get_not_supported_legacy_provider(hass: HomeAssistant) -> None: + """Test getting a not supported legacy provider.""" + camera = get_camera_from_entity_id(hass, "camera.demo_camera") + assert await async_get_supported_legacy_provider(hass, camera) is None diff --git a/tests/components/canary/test_alarm_control_panel.py b/tests/components/canary/test_alarm_control_panel.py index 83e801d67c4..a194621b0d9 100644 --- a/tests/components/canary/test_alarm_control_panel.py +++ b/tests/components/canary/test_alarm_control_panel.py @@ -4,17 +4,16 @@ from unittest.mock import PropertyMock, patch from canary.const import LOCATION_MODE_AWAY, LOCATION_MODE_HOME, LOCATION_MODE_NIGHT -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.components.canary import DOMAIN from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -67,7 +66,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED assert state.attributes["private"] type(mocked_location).is_private = PropertyMock(return_value=False) @@ -82,7 +81,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME # test armed away type(mocked_location).mode = PropertyMock( @@ -94,7 +93,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY # test armed night type(mocked_location).mode = PropertyMock( @@ -106,7 +105,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_ARMED_NIGHT + assert state.state == AlarmControlPanelState.ARMED_NIGHT async def test_alarm_control_panel_services(hass: HomeAssistant, canary) -> None: diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 7dce3f768e2..2dcf007c6d4 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -250,7 +250,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: user_input=user_input_dict, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] is None + assert result["data"] == {} for other_param in advanced_parameters: if other_param == parameter: continue @@ -264,7 +264,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: user_input={"known_hosts": ""}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] is None + assert result["data"] == {} expected_data = {**orig_data, "known_hosts": []} if parameter in advanced_parameters: expected_data[parameter] = updated diff --git a/tests/components/cast/test_home_assistant_cast.py b/tests/components/cast/test_home_assistant_cast.py index c9e311bb024..2fc348fd008 100644 --- a/tests/components/cast/test_home_assistant_cast.py +++ b/tests/components/cast/test_home_assistant_cast.py @@ -5,8 +5,8 @@ from unittest.mock import patch import pytest from homeassistant.components.cast import DOMAIN, home_assistant_cast -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry, async_mock_signal diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index 513f32b1ad6..b2ce60e9393 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -27,13 +27,13 @@ from homeassistant.components.media_player import ( MediaClass, MediaPlayerEntityFeature, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, CAST_APP_ID_HOMEASSISTANT_LOVELACE, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er, network from homeassistant.helpers.dispatcher import ( diff --git a/tests/components/cert_expiry/test_config_flow.py b/tests/components/cert_expiry/test_config_flow.py index 3fd696f5953..907071d8b1f 100644 --- a/tests/components/cert_expiry/test_config_flow.py +++ b/tests/components/cert_expiry/test_config_flow.py @@ -7,13 +7,12 @@ from unittest.mock import patch import pytest from homeassistant import config_entries -from homeassistant.components.cert_expiry.const import DEFAULT_PORT, DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.components.cert_expiry.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .const import HOST, PORT -from .helpers import future_timestamp from tests.common import MockConfigEntry @@ -64,122 +63,6 @@ async def test_user_with_bad_cert(hass: HomeAssistant) -> None: assert result["result"].unique_id == f"{HOST}:{PORT}" -async def test_import_host_only(hass: HomeAssistant) -> None: - """Test import with host only.""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == DEFAULT_PORT - assert result["result"].unique_id == f"{HOST}:{DEFAULT_PORT}" - - -async def test_import_host_and_port(hass: HomeAssistant) -> None: - """Test import with host and port.""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: PORT}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["result"].unique_id == f"{HOST}:{PORT}" - - -async def test_import_non_default_port(hass: HomeAssistant) -> None: - """Test import with host and non-default port.""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: 888}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"{HOST}:888" - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == 888 - assert result["result"].unique_id == f"{HOST}:888" - - -async def test_import_with_name(hass: HomeAssistant) -> None: - """Test import with name (deprecated).""" - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_NAME: "legacy", CONF_HOST: HOST, CONF_PORT: PORT}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["result"].unique_id == f"{HOST}:{PORT}" - - -async def test_bad_import(hass: HomeAssistant) -> None: - """Test import step.""" - with patch( - "homeassistant.components.cert_expiry.helper.async_get_cert", - side_effect=ConnectionRefusedError(), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "import_failed" - - async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test we abort if the cert is already setup.""" MockConfigEntry( @@ -188,14 +71,6 @@ async def test_abort_if_already_setup(hass: HomeAssistant) -> None: unique_id=f"{HOST}:{PORT}", ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: PORT}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/cert_expiry/test_init.py b/tests/components/cert_expiry/test_init.py index e2c333cc6f3..5ba63ad1af1 100644 --- a/tests/components/cert_expiry/test_init.py +++ b/tests/components/cert_expiry/test_init.py @@ -1,59 +1,24 @@ """Tests for Cert Expiry setup.""" -from datetime import timedelta from unittest.mock import patch from freezegun import freeze_time from homeassistant.components.cert_expiry.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_PORT, - EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED, STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util from .const import HOST, PORT from .helpers import future_timestamp, static_datetime -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_setup_with_config(hass: HomeAssistant) -> None: - """Test setup component with config.""" - assert hass.state is CoreState.running - - config = { - SENSOR_DOMAIN: [ - {"platform": DOMAIN, CONF_HOST: HOST, CONF_PORT: PORT}, - {"platform": DOMAIN, CONF_HOST: HOST, CONF_PORT: 888}, - ], - } - - with ( - patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" - ), - patch( - "homeassistant.components.cert_expiry.coordinator.get_cert_expiry_timestamp", - return_value=future_timestamp(1), - ), - ): - assert await async_setup_component(hass, SENSOR_DOMAIN, config) is True - await hass.async_block_till_done() - hass.bus.async_fire(EVENT_HOMEASSISTANT_START) - await hass.async_block_till_done() - next_update = dt_util.utcnow() + timedelta(seconds=20) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 2 +from tests.common import MockConfigEntry async def test_update_unique_id(hass: HomeAssistant) -> None: diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py index 24e6e8581d8..9e9f403ed0b 100644 --- a/tests/components/chacon_dio/test_cover.py +++ b/tests/components/chacon_dio/test_cover.py @@ -13,9 +13,7 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.const import ATTR_ENTITY_ID @@ -73,7 +71,7 @@ async def test_update( state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 51 - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_cover_actions( @@ -95,7 +93,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -105,7 +103,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -115,7 +113,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -125,7 +123,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_cover_callbacks( @@ -161,19 +159,19 @@ async def test_cover_callbacks( state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 79 - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await _callback_device_state_function(90, "up") state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 90 - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING await _callback_device_state_function(60, "down") state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 60 - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_no_cover_found( diff --git a/tests/components/climate/test_device_trigger.py b/tests/components/climate/test_device_trigger.py index a492d9805b5..4b5a578ecc4 100644 --- a/tests/components/climate/test_device_trigger.py +++ b/tests/components/climate/test_device_trigger.py @@ -48,7 +48,7 @@ async def test_get_triggers( ) hass.states.async_set( entity_entry.entity_id, - const.HVAC_MODE_COOL, + HVACMode.COOL, { const.ATTR_HVAC_ACTION: HVACAction.IDLE, const.ATTR_CURRENT_HUMIDITY: 23, diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index 256ecf92b1d..45570c63008 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -3,14 +3,12 @@ from __future__ import annotations from enum import Enum -from types import ModuleType from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, Mock import pytest import voluptuous as vol -from homeassistant.components import climate from homeassistant.components.climate import ( DOMAIN, SET_TEMPERATURE_SCHEMA, @@ -20,26 +18,27 @@ from homeassistant.components.climate import ( from homeassistant.components.climate.const import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, + ATTR_HUMIDITY, ATTR_MAX_TEMP, ATTR_MIN_TEMP, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, SERVICE_SET_FAN_MODE, + SERVICE_SET_HUMIDITY, + SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, + SWING_HORIZONTAL_OFF, + SWING_HORIZONTAL_ON, ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - PRECISION_WHOLE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - UnitOfTemperature, -) +from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import issue_registry as ir @@ -51,9 +50,6 @@ from tests.common import ( MockModule, MockPlatform, async_mock_service, - help_test_all, - import_and_test_deprecated_constant, - import_and_test_deprecated_constant_enum, mock_integration, mock_platform, setup_test_component_platform, @@ -101,6 +97,7 @@ class MockClimateEntity(MockEntity, ClimateEntity): ClimateEntityFeature.FAN_MODE | ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.SWING_HORIZONTAL_MODE ) _attr_preset_mode = "home" _attr_preset_modes = ["home", "away"] @@ -108,7 +105,12 @@ class MockClimateEntity(MockEntity, ClimateEntity): _attr_fan_modes = ["auto", "off"] _attr_swing_mode = "auto" _attr_swing_modes = ["auto", "off"] + _attr_swing_horizontal_mode = "on" + _attr_swing_horizontal_modes = [SWING_HORIZONTAL_ON, SWING_HORIZONTAL_OFF] _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_target_temperature = 20 + _attr_target_temperature_high = 25 + _attr_target_temperature_low = 15 @property def hvac_mode(self) -> HVACMode: @@ -138,6 +140,22 @@ class MockClimateEntity(MockEntity, ClimateEntity): """Set swing mode.""" self._attr_swing_mode = swing_mode + def set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set horizontal swing mode.""" + self._attr_swing_horizontal_mode = swing_horizontal_mode + + def set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + self._attr_hvac_mode = hvac_mode + + def set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if ATTR_TEMPERATURE in kwargs: + self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] + if ATTR_TARGET_TEMP_HIGH in kwargs: + self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] + self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] + class MockClimateEntityTestMethods(MockClimateEntity): """Mock Climate device.""" @@ -176,71 +194,85 @@ def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, s (enum_field, constant_prefix) for enum_field in enum if enum_field - not in [ClimateEntityFeature.TURN_ON, ClimateEntityFeature.TURN_OFF] + not in [ + ClimateEntityFeature.TURN_ON, + ClimateEntityFeature.TURN_OFF, + ClimateEntityFeature.SWING_HORIZONTAL_MODE, + ] ] -@pytest.mark.parametrize( - "module", - [climate, climate.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(climate.ClimateEntityFeature, "SUPPORT_") - + _create_tuples(climate.HVACMode, "HVAC_MODE_"), -) -@pytest.mark.parametrize( - "module", - [climate, climate.const], -) -def test_deprecated_constants( +async def test_temperature_features_is_valid( + hass: HomeAssistant, + register_test_integration: MockConfigEntry, caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, ) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.1" + """Test correct features for setting temperature.""" + + class MockClimateTempEntity(MockClimateEntity): + @property + def supported_features(self) -> int: + """Return supported features.""" + return ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + + class MockClimateTempRangeEntity(MockClimateEntity): + @property + def supported_features(self) -> int: + """Return supported features.""" + return ClimateEntityFeature.TARGET_TEMPERATURE + + climate_temp_entity = MockClimateTempEntity( + name="test", entity_id="climate.test_temp" + ) + climate_temp_range_entity = MockClimateTempRangeEntity( + name="test", entity_id="climate.test_range" ) - -@pytest.mark.parametrize( - ("enum", "constant_postfix"), - [ - (climate.HVACAction.OFF, "OFF"), - (climate.HVACAction.HEATING, "HEAT"), - (climate.HVACAction.COOLING, "COOL"), - (climate.HVACAction.DRYING, "DRY"), - (climate.HVACAction.IDLE, "IDLE"), - (climate.HVACAction.FAN, "FAN"), - ], -) -def test_deprecated_current_constants( - caplog: pytest.LogCaptureFixture, - enum: climate.HVACAction, - constant_postfix: str, -) -> None: - """Test deprecated current constants.""" - import_and_test_deprecated_constant( - caplog, - climate.const, - "CURRENT_HVAC_" + constant_postfix, - f"{enum.__class__.__name__}.{enum.name}", - enum, - "2025.1", + setup_test_component_platform( + hass, + DOMAIN, + entities=[climate_temp_entity, climate_temp_range_entity], + from_config_entry=True, ) + await hass.config_entries.async_setup(register_test_integration.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + ServiceValidationError, + match="Set temperature action was used with the target temperature parameter but the entity does not support it", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test_temp", + "temperature": 20, + }, + blocking=True, + ) + + with pytest.raises( + ServiceValidationError, + match="Set temperature action was used with the target temperature low/high parameter but the entity does not support it", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test_range", + "target_temp_low": 20, + "target_temp_high": 25, + }, + blocking=True, + ) -async def test_preset_mode_validation( - hass: HomeAssistant, register_test_integration: MockConfigEntry +async def test_mode_validation( + hass: HomeAssistant, + register_test_integration: MockConfigEntry, + caplog: pytest.LogCaptureFixture, ) -> None: - """Test mode validation for fan, swing and preset.""" + """Test mode validation for hvac_mode, fan, swing and preset.""" climate_entity = MockClimateEntity(name="test", entity_id="climate.test") setup_test_component_platform( @@ -250,9 +282,11 @@ async def test_preset_mode_validation( await hass.async_block_till_done() state = hass.states.get("climate.test") + assert state.state == "heat" assert state.attributes.get(ATTR_PRESET_MODE) == "home" assert state.attributes.get(ATTR_FAN_MODE) == "auto" assert state.attributes.get(ATTR_SWING_MODE) == "auto" + assert state.attributes.get(ATTR_SWING_HORIZONTAL_MODE) == "on" await hass.services.async_call( DOMAIN, @@ -272,6 +306,15 @@ async def test_preset_mode_validation( }, blocking=True, ) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SWING_HORIZONTAL_MODE, + { + "entity_id": "climate.test", + "swing_horizontal_mode": "off", + }, + blocking=True, + ) await hass.services.async_call( DOMAIN, SERVICE_SET_FAN_MODE, @@ -285,6 +328,24 @@ async def test_preset_mode_validation( assert state.attributes.get(ATTR_PRESET_MODE) == "away" assert state.attributes.get(ATTR_FAN_MODE) == "off" assert state.attributes.get(ATTR_SWING_MODE) == "off" + assert state.attributes.get(ATTR_SWING_HORIZONTAL_MODE) == "off" + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + { + "entity_id": "climate.test", + "hvac_mode": "auto", + }, + blocking=True, + ) + + assert ( + "MockClimateEntity sets the hvac_mode auto which is not valid " + "for this entity with modes: off, heat. This will stop working " + "in 2025.4 and raise an error instead. " + "Please" in caplog.text + ) with pytest.raises( ServiceValidationError, @@ -324,6 +385,25 @@ async def test_preset_mode_validation( ) assert exc.value.translation_key == "not_valid_swing_mode" + with pytest.raises( + ServiceValidationError, + match="Horizontal swing mode invalid is not valid. Valid horizontal swing modes are: on, off", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SWING_HORIZONTAL_MODE, + { + "entity_id": "climate.test", + "swing_horizontal_mode": "invalid", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Horizontal swing mode invalid is not valid. Valid horizontal swing modes are: on, off" + ) + assert exc.value.translation_key == "not_valid_horizontal_swing_mode" + with pytest.raises( ServiceValidationError, match="Fan mode invalid is not valid. Valid fan modes are: auto, off", @@ -344,289 +424,6 @@ async def test_preset_mode_validation( assert exc.value.translation_key == "not_valid_fan_mode" -@pytest.mark.parametrize( - "supported_features_at_int", - [ - ClimateEntityFeature.TARGET_TEMPERATURE.value, - ClimateEntityFeature.TARGET_TEMPERATURE.value - | ClimateEntityFeature.TURN_ON.value - | ClimateEntityFeature.TURN_OFF.value, - ], -) -def test_deprecated_supported_features_ints( - caplog: pytest.LogCaptureFixture, supported_features_at_int: int -) -> None: - """Test deprecated supported features ints.""" - - class MockClimateEntity(ClimateEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return supported_features_at_int - - entity = MockClimateEntity() - assert entity.supported_features is ClimateEntityFeature(supported_features_at_int) - assert "MockClimateEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "ClimateEntityFeature.TARGET_TEMPERATURE" in caplog.text - caplog.clear() - assert entity.supported_features is ClimateEntityFeature(supported_features_at_int) - assert "is using deprecated supported features values" not in caplog.text - - -async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test adding feature flag and warn if missing when methods are set.""" - - called = [] - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - def turn_on(self) -> None: - """Turn on.""" - called.append("turn_on") - - def turn_off(self) -> None: - """Turn off.""" - called.append("turn_off") - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "Entity climate.test (.MockClimateEntityTest'>)" - " does not set ClimateEntityFeature.TURN_OFF but implements the turn_off method." - " Please report it to the author of the 'test' custom integration" - in caplog.text - ) - assert ( - "Entity climate.test (.MockClimateEntityTest'>)" - " does not set ClimateEntityFeature.TURN_ON but implements the turn_on method." - " Please report it to the author of the 'test' custom integration" - in caplog.text - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON, - { - "entity_id": "climate.test", - }, - blocking=True, - ) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_OFF, - { - "entity_id": "climate.test", - }, - blocking=True, - ) - - assert len(called) == 2 - assert "turn_on" in called - assert "turn_off" in called - - -async def test_implicit_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test adding feature flag and warn if missing when methods are not set. - - (implicit by hvac mode) - """ - - class MockClimateEntityTest(MockEntity, ClimateEntity): - """Mock Climate device.""" - - _attr_temperature_unit = UnitOfTemperature.CELSIUS - - @property - def hvac_mode(self) -> HVACMode: - """Return hvac operation ie. heat, cool mode. - - Need to be one of HVACMode.*. - """ - return HVACMode.HEAT - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return the list of available hvac operation modes. - - Need to be a subset of HVAC_MODES. - """ - return [HVACMode.OFF, HVACMode.HEAT] - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "Entity climate.test (.MockClimateEntityTest'>)" - " implements HVACMode(s): off, heat and therefore implicitly supports the turn_on/turn_off" - " methods without setting the proper ClimateEntityFeature. Please report it to the author" - " of the 'test' custom integration" in caplog.text - ) - - -async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when feature flags are set.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "does not set ClimateEntityFeature.TURN_OFF but implements the turn_off method." - not in caplog.text - ) - assert ( - "does not set ClimateEntityFeature.TURN_ON but implements the turn_on method." - not in caplog.text - ) - assert ( - " implements HVACMode(s): off, heat and therefore implicitly supports the off, heat methods" - not in caplog.text - ) - - -async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _enable_turn_on_off_backwards_compatibility = False - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert ( - "does not set ClimateEntityFeature.TURN_OFF but implements the turn_off method." - not in caplog.text - ) - assert ( - "does not set ClimateEntityFeature.TURN_ON but implements the turn_on method." - not in caplog.text - ) - assert ( - " implements HVACMode(s): off, heat and therefore implicitly supports the off, heat methods" - not in caplog.text - ) - - -async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when integration uses the correct feature flags.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert "does not set ClimateEntityFeature" not in caplog.text - assert "implements HVACMode(s):" not in caplog.text - - async def test_turn_on_off_toggle(hass: HomeAssistant) -> None: """Test turn_on/turn_off/toggle methods.""" @@ -665,7 +462,6 @@ async def test_sync_toggle(hass: HomeAssistant) -> None: class MockClimateEntityTest(MockClimateEntity): """Mock Climate device.""" - _enable_turn_on_off_backwards_compatibility = False _attr_supported_features = ( ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) @@ -826,7 +622,7 @@ async def test_issue_aux_property_deprecated( assert ( "test::MockClimateEntityWithAux implements the `is_aux_heat` property or uses " "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " - f"and will be unsupported from Home Assistant 2024.10. Please {report}" + f"and will be unsupported from Home Assistant 2025.4. Please {report}" ) in caplog.text # Assert we only log warning once @@ -953,6 +749,71 @@ async def test_no_issue_no_aux_property( ) not in caplog.text +async def test_humidity_validation( + hass: HomeAssistant, + register_test_integration: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test validation for humidity.""" + + class MockClimateEntityHumidity(MockClimateEntity): + """Mock climate class with mocked aux heater.""" + + _attr_supported_features = ClimateEntityFeature.TARGET_HUMIDITY + _attr_target_humidity = 50 + _attr_min_humidity = 50 + _attr_max_humidity = 60 + + def set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + self._attr_target_humidity = humidity + + test_climate = MockClimateEntityHumidity( + name="Test", + unique_id="unique_climate_test", + ) + + setup_test_component_platform( + hass, DOMAIN, entities=[test_climate], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_HUMIDITY) == 50 + + with pytest.raises( + ServiceValidationError, + match="Provided humidity 1 is not valid. Accepted range is 50 to 60", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HUMIDITY, + { + "entity_id": "climate.test", + ATTR_HUMIDITY: "1", + }, + blocking=True, + ) + + assert exc.value.translation_key == "humidity_out_of_range" + assert "Check valid humidity 1 in range 50 - 60" in caplog.text + + with pytest.raises( + ServiceValidationError, + match="Provided humidity 70 is not valid. Accepted range is 50 to 60", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HUMIDITY, + { + "entity_id": "climate.test", + ATTR_HUMIDITY: "70", + }, + blocking=True, + ) + + async def test_temperature_validation( hass: HomeAssistant, register_test_integration: MockConfigEntry ) -> None: @@ -1050,3 +911,66 @@ async def test_temperature_validation( state = hass.states.get("climate.test") assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 10 assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25 + + +async def test_target_temp_high_higher_than_low( + hass: HomeAssistant, register_test_integration: MockConfigEntry +) -> None: + """Test that target high is higher than target low.""" + + class MockClimateEntityTemp(MockClimateEntity): + """Mock climate class with mocked aux heater.""" + + _attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + _attr_current_temperature = 15 + _attr_target_temperature = 15 + _attr_target_temperature_high = 18 + _attr_target_temperature_low = 10 + _attr_target_temperature_step = PRECISION_WHOLE + + def set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if ATTR_TEMPERATURE in kwargs: + self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] + if ATTR_TARGET_TEMP_HIGH in kwargs: + self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] + self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] + + test_climate = MockClimateEntityTemp( + name="Test", + unique_id="unique_climate_test", + ) + + setup_test_component_platform( + hass, DOMAIN, entities=[test_climate], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 15 + assert state.attributes.get(ATTR_MIN_TEMP) == 7 + assert state.attributes.get(ATTR_MAX_TEMP) == 35 + + with pytest.raises( + ServiceValidationError, + match="Target temperature low can not be higher than Target temperature high", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "15", + ATTR_TARGET_TEMP_LOW: "20", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Target temperature low can not be higher than Target temperature high" + ) + assert exc.value.translation_key == "low_temp_higher_than_high_temp" diff --git a/tests/components/climate/test_intent.py b/tests/components/climate/test_intent.py index 54e2e4ff1a6..d17f3a1747d 100644 --- a/tests/components/climate/test_intent.py +++ b/tests/components/climate/test_intent.py @@ -371,7 +371,7 @@ async def test_not_exposed( {"name": {"value": climate_1.name}}, assistant=conversation.DOMAIN, ) - assert err.value.result.no_match_reason == intent.MatchFailedReason.NAME + assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT # Expose first, hide second async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, True) diff --git a/tests/components/climate/test_reproduce_state.py b/tests/components/climate/test_reproduce_state.py index 0632ebcc9e4..3bc91467f14 100644 --- a/tests/components/climate/test_reproduce_state.py +++ b/tests/components/climate/test_reproduce_state.py @@ -6,6 +6,7 @@ from homeassistant.components.climate import ( ATTR_FAN_MODE, ATTR_HUMIDITY, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -14,6 +15,7 @@ from homeassistant.components.climate import ( SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, HVACMode, @@ -96,6 +98,7 @@ async def test_state_with_context(hass: HomeAssistant) -> None: [ (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), + (SERVICE_SET_SWING_HORIZONTAL_MODE, ATTR_SWING_HORIZONTAL_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), (SERVICE_SET_HUMIDITY, ATTR_HUMIDITY), (SERVICE_SET_TEMPERATURE, ATTR_TEMPERATURE), @@ -122,6 +125,7 @@ async def test_attribute(hass: HomeAssistant, service, attribute) -> None: [ (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), + (SERVICE_SET_SWING_HORIZONTAL_MODE, ATTR_SWING_HORIZONTAL_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), ], ) diff --git a/tests/components/climate/test_significant_change.py b/tests/components/climate/test_significant_change.py index f060344722a..7d709090357 100644 --- a/tests/components/climate/test_significant_change.py +++ b/tests/components/climate/test_significant_change.py @@ -10,6 +10,7 @@ from homeassistant.components.climate import ( ATTR_HUMIDITY, ATTR_HVAC_ACTION, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -66,6 +67,18 @@ async def test_significant_state_change(hass: HomeAssistant) -> None: ), (METRIC, {ATTR_SWING_MODE: "old_value"}, {ATTR_SWING_MODE: "old_value"}, False), (METRIC, {ATTR_SWING_MODE: "old_value"}, {ATTR_SWING_MODE: "new_value"}, True), + ( + METRIC, + {ATTR_SWING_HORIZONTAL_MODE: "old_value"}, + {ATTR_SWING_HORIZONTAL_MODE: "old_value"}, + False, + ), + ( + METRIC, + {ATTR_SWING_HORIZONTAL_MODE: "old_value"}, + {ATTR_SWING_HORIZONTAL_MODE: "new_value"}, + True, + ), # multiple attributes ( METRIC, diff --git a/tests/components/cloud/__init__.py b/tests/components/cloud/__init__.py index 18f8cd4d311..1fb9f2b0d40 100644 --- a/tests/components/cloud/__init__.py +++ b/tests/components/cloud/__init__.py @@ -35,6 +35,7 @@ PIPELINE_DATA = { "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, }, { "conversation_engine": "conversation_engine_2", @@ -49,6 +50,7 @@ PIPELINE_DATA = { "tts_voice": "The Voice", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, }, { "conversation_engine": "conversation_engine_3", @@ -63,6 +65,7 @@ PIPELINE_DATA = { "tts_voice": None, "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, }, ], "preferred_item": "01GX8ZWBAQYWNB1XV3EXEZ75DY", diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index 2edd9571bdd..7002f7c39ec 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -3,13 +3,14 @@ from collections.abc import AsyncGenerator, Callable, Coroutine, Generator from pathlib import Path from typing import Any -from unittest.mock import DEFAULT, MagicMock, PropertyMock, patch +from unittest.mock import DEFAULT, AsyncMock, MagicMock, PropertyMock, patch from hass_nabucasa import Cloud from hass_nabucasa.auth import CognitoAuth from hass_nabucasa.cloudhooks import Cloudhooks from hass_nabucasa.const import DEFAULT_SERVERS, DEFAULT_VALUES, STATE_CONNECTED from hass_nabucasa.google_report_state import GoogleReportState +from hass_nabucasa.ice_servers import IceServers from hass_nabucasa.iot import CloudIoT from hass_nabucasa.remote import RemoteUI from hass_nabucasa.voice import Voice @@ -68,6 +69,12 @@ async def cloud_fixture() -> AsyncGenerator[MagicMock]: ) mock_cloud.voice = MagicMock(spec=Voice) mock_cloud.started = None + mock_cloud.ice_servers = MagicMock( + spec=IceServers, + async_register_ice_servers_listener=AsyncMock( + return_value=lambda: "mock-unregister" + ), + ) def set_up_mock_cloud( cloud_client: CloudClient, mode: str, **kwargs: Any diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py new file mode 100644 index 00000000000..5e607bbc70b --- /dev/null +++ b/tests/components/cloud/test_backup.py @@ -0,0 +1,573 @@ +"""Test the cloud backup platform.""" + +from collections.abc import AsyncGenerator, AsyncIterator, Generator +from io import StringIO +from typing import Any +from unittest.mock import Mock, PropertyMock, patch + +from aiohttp import ClientError +from hass_nabucasa import CloudError +import pytest +from yarl import URL + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.cloud import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + cloud: MagicMock, + cloud_logged_in: None, +) -> AsyncGenerator[None]: + """Set up cloud integration.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +@pytest.fixture +def mock_delete_file() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_delete_file", + spec_set=True, + ) as delete_file: + yield delete_file + + +@pytest.fixture +def mock_get_download_details() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_download_details", + spec_set=True, + ) as download_details: + download_details.return_value = { + "url": ( + "https://blabla.cloudflarestorage.com/blabla/backup/" + "462e16810d6841228828d9dd2f9e341e.tar?X-Amz-Algorithm=blah" + ), + } + yield download_details + + +@pytest.fixture +def mock_get_upload_details() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_upload_details", + spec_set=True, + ) as download_details: + download_details.return_value = { + "url": ( + "https://blabla.cloudflarestorage.com/blabla/backup/" + "ea5c969e492c49df89d432a1483b8dc3.tar?X-Amz-Algorithm=blah" + ), + "headers": { + "content-md5": "HOhSM3WZkpHRYGiz4YRGIQ==", + "x-amz-meta-storage-type": "backup", + "x-amz-meta-b64json": ( + "eyJhZGRvbnMiOltdLCJiYWNrdXBfaWQiOiJjNDNiNWU2MCIsImRhdGUiOiIyMDI0LT" + "EyLTAzVDA0OjI1OjUwLjMyMDcwMy0wNTowMCIsImRhdGFiYXNlX2luY2x1ZGVkIjpm" + "YWxzZSwiZm9sZGVycyI6W10sImhvbWVhc3Npc3RhbnRfaW5jbHVkZWQiOnRydWUsIm" + "hvbWVhc3Npc3RhbnRfdmVyc2lvbiI6IjIwMjQuMTIuMC5kZXYwIiwibmFtZSI6ImVy" + "aWsiLCJwcm90ZWN0ZWQiOnRydWUsInNpemUiOjM1NjI0OTYwfQ==" + ), + }, + } + yield download_details + + +@pytest.fixture +def mock_list_files() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_list", spec_set=True + ) as list_files: + list_files.return_value = [ + { + "Key": "462e16810d6841228828d9dd2f9e341e.tar", + "LastModified": "2024-11-22T10:49:01.182Z", + "Size": 34519040, + "Metadata": { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "storage-type": "backup", + }, + } + ] + yield list_files + + +@pytest.fixture +def cloud_logged_in(cloud: MagicMock): + """Mock cloud logged in.""" + type(cloud).is_logged_in = PropertyMock(return_value=True) + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "cloud.cloud"}], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + mock_list_files: Mock, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + mock_list_files.assert_called_once_with(cloud, storage_type="backup") + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backups"] == [ + { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "agent_ids": ["cloud.cloud"], + "failed_agent_ids": [], + "with_strategy_settings": False, + } + ] + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +async def test_agents_list_backups_fail_cloud( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + mock_list_files: Mock, + side_effect: Exception, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + mock_list_files.side_effect = side_effect + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"cloud.cloud": "Failed to list backups"}, + "backups": [], + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + +@pytest.mark.parametrize( + ("backup_id", "expected_result"), + [ + ( + "23e64aec", + { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "agent_ids": ["cloud.cloud"], + "failed_agent_ids": [], + "with_strategy_settings": False, + }, + ), + ( + "12345", + None, + ), + ], + ids=["found", "not_found"], +) +async def test_agents_get_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + backup_id: str, + expected_result: dict[str, Any] | None, + mock_list_files: Mock, +) -> None: + """Test agent get backup.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + mock_list_files.assert_called_once_with(cloud, storage_type="backup") + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backup"] == expected_result + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_get_download_details: Mock, +) -> None: + """Test agent download backup.""" + client = await hass_client() + backup_id = "23e64aec" + + aioclient_mock.get( + mock_get_download_details.return_value["url"], content=b"backup data" + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_fail_cloud( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_get_download_details: Mock, + side_effect: Exception, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "23e64aec" + mock_get_download_details.side_effect = side_effect + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 500 + content = await resp.content.read() + assert "Failed to get download details" in content.decode() + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_fail_get( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_get_download_details: Mock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "23e64aec" + + aioclient_mock.get(mock_get_download_details.return_value["url"], status=500) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 500 + content = await resp.content.read() + assert "Failed to download backup" in content.decode() + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_not_found( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test agent download backup raises error if not found.""" + client = await hass_client() + backup_id = "1234" + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 404 + assert await resp.content.read() == b"" + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, + mock_get_upload_details: Mock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + aioclient_mock.put(mock_get_upload_details.return_value["url"]) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert len(aioclient_mock.mock_calls) == 1 + assert aioclient_mock.mock_calls[-1][0] == "PUT" + assert aioclient_mock.mock_calls[-1][1] == URL( + mock_get_upload_details.return_value["url"] + ) + assert isinstance(aioclient_mock.mock_calls[-1][2], AsyncIterator) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + + +@pytest.mark.parametrize("put_mock_kwargs", [{"status": 500}, {"exc": TimeoutError}]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_fail_put( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, + mock_get_upload_details: Mock, + put_mock_kwargs: dict[str, Any], +) -> None: + """Test agent upload backup fails.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + aioclient_mock.put(mock_get_upload_details.return_value["url"], **put_mock_kwargs) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Failed to upload backup" in caplog.text + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in") +async def test_agents_upload_fail_cloud( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_get_upload_details: Mock, + side_effect: Exception, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent upload backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "test-backup" + mock_get_upload_details.side_effect = side_effect + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Failed to get upload details" in caplog.text + + +async def test_agents_upload_not_protected( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent upload backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + with ( + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + ): + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Cloud backups must be protected" in caplog.text + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_delete_file: Mock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "23e64aec" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + mock_delete_file.assert_called_once() + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete_fail_cloud( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_delete_file: Mock, + side_effect: Exception, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "23e64aec" + mock_delete_file.side_effect = side_effect + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"cloud.cloud": "Failed to delete backup"} + } + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete_not_found( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent download backup raises error if not found.""" + client = await hass_ws_client(hass) + backup_id = "1234" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 7af163cc49d..43eccc5ef9c 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -1,5 +1,6 @@ """Test the cloud.iot module.""" +from collections.abc import Callable, Coroutine from datetime import timedelta from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch @@ -183,6 +184,59 @@ async def test_handler_google_actions_disabled( assert resp["payload"] == response_payload +async def test_handler_ice_servers( + hass: HomeAssistant, + cloud: MagicMock, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], +) -> None: + """Test handler ICE servers.""" + assert await async_setup_component(hass, "cloud", {"cloud": {}}) + await hass.async_block_till_done() + # make sure that preferences will not be reset + await cloud.client.prefs.async_set_username(cloud.username) + await set_cloud_prefs( + { + "alexa_enabled": False, + "google_enabled": False, + } + ) + + await cloud.login("test-user", "test-pass") + await cloud.client.cloud_connected() + + assert cloud.client._cloud_ice_servers_listener is not None + assert cloud.client._cloud_ice_servers_listener() == "mock-unregister" + + +async def test_handler_ice_servers_disabled( + hass: HomeAssistant, + cloud: MagicMock, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], +) -> None: + """Test handler ICE servers when user has disabled it.""" + assert await async_setup_component(hass, "cloud", {"cloud": {}}) + await hass.async_block_till_done() + # make sure that preferences will not be reset + await cloud.client.prefs.async_set_username(cloud.username) + await set_cloud_prefs( + { + "alexa_enabled": False, + "google_enabled": False, + } + ) + + await cloud.login("test-user", "test-pass") + await cloud.client.cloud_connected() + + await set_cloud_prefs( + { + "cloud_ice_servers_enabled": False, + } + ) + + assert cloud.client._cloud_ice_servers_listener is None + + async def test_webhook_msg( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -475,13 +529,16 @@ async def test_logged_out( await cloud.client.cloud_connected() await hass.async_block_till_done() + assert cloud.client._cloud_ice_servers_listener is not None + # Simulate logged out await cloud.logout() await hass.async_block_till_done() - # Check we clean up Alexa and Google + # Check we clean up Alexa, Google and ICE servers assert cloud.client._alexa_config is None assert cloud.client._google_config is None + assert cloud.client._cloud_ice_servers_listener is None google_config_mock.async_deinitialize.assert_called_once_with() alexa_config_mock.async_deinitialize.assert_called_once_with() diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 5ee9af88681..b35cc03ac73 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -8,12 +8,19 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import aiohttp from hass_nabucasa import thingtalk -from hass_nabucasa.auth import Unauthenticated, UnknownError +from hass_nabucasa.auth import ( + InvalidTotpCode, + MFARequired, + Unauthenticated, + UnknownError, +) from hass_nabucasa.const import STATE_CONNECTED from hass_nabucasa.voice import TTS_VOICES import pytest from homeassistant.components.alexa import errors as alexa_errors + +# pylint: disable-next=hass-component-root-import from homeassistant.components.alexa.entities import LightCapabilities from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud.const import DEFAULT_EXPOSED_DOMAINS, DOMAIN @@ -376,6 +383,128 @@ async def test_login_view_invalid_credentials( assert req.status == HTTPStatus.UNAUTHORIZED +async def test_login_view_mfa_required( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + +async def test_login_view_mfa_required_tokens_missing( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required, code is provided, but session tokens are missing.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={}) + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get MFA expired error + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + res = await req.json() + assert res["code"] == "mfaexpiredornotstarted" + + +async def test_login_view_mfa_password_and_totp_provided( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when password and TOTP code provided at once.""" + cloud_client = await hass_client() + + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "password": "my_password", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + + +async def test_login_view_invalid_totp_code( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required and invalid code is provided.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + cloud.login_verify_totp.side_effect = InvalidTotpCode + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get invalid TOTP code error + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + res = await req.json() + assert res["code"] == "invalidtotpcode" + + +async def test_login_view_valid_totp_provided( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in with valid TOTP code.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get success response + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.OK + result = await req.json() + assert result == {"success": True, "cloud_pipeline": None} + + async def test_login_view_unknown_error( cloud: MagicMock, setup_cloud: None, @@ -782,6 +911,7 @@ async def test_websocket_status( "google_report_state": True, "remote_allow_remote_enable": True, "remote_enabled": False, + "cloud_ice_servers_enabled": True, "tts_default_voice": ["en-US", "JennyNeural"], }, "alexa_entities": { @@ -901,6 +1031,7 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin is None assert cloud.client.prefs.remote_allow_remote_enable is True + assert cloud.client.prefs.cloud_ice_servers_enabled is True client = await hass_ws_client(hass) @@ -912,6 +1043,7 @@ async def test_websocket_update_preferences( "google_secure_devices_pin": "1234", "tts_default_voice": ["en-GB", "RyanNeural"], "remote_allow_remote_enable": False, + "cloud_ice_servers_enabled": False, } ) response = await client.receive_json() @@ -921,6 +1053,7 @@ async def test_websocket_update_preferences( assert not cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin == "1234" assert cloud.client.prefs.remote_allow_remote_enable is False + assert cloud.client.prefs.cloud_ice_servers_enabled is False assert cloud.client.prefs.tts_default_voice == ("en-GB", "RyanNeural") diff --git a/tests/components/cloud/test_system_health.py b/tests/components/cloud/test_system_health.py index 60b23e47fec..6293f44067d 100644 --- a/tests/components/cloud/test_system_health.py +++ b/tests/components/cloud/test_system_health.py @@ -50,7 +50,12 @@ async def test_cloud_system_health( await cloud.client.async_system_message({"region": "xx-earth-616"}) await set_cloud_prefs( - {"alexa_enabled": True, "google_enabled": False, "remote_enabled": True} + { + "alexa_enabled": True, + "google_enabled": False, + "remote_enabled": True, + "cloud_ice_servers_enabled": True, + } ) info = await get_system_health_info(hass, "cloud") @@ -70,6 +75,7 @@ async def test_cloud_system_health( "remote_server": "us-west-1", "alexa_enabled": True, "google_enabled": False, + "cloud_ice_servers_enabled": True, "can_reach_cert_server": "ok", "can_reach_cloud_auth": {"type": "failed", "error": "unreachable"}, "can_reach_cloud": "ok", diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index 52a9bc19ea2..bf9fd7302ae 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -23,11 +23,11 @@ from homeassistant.components.tts import ( ATTR_MEDIA_PLAYER_ENTITY_ID, ATTR_MESSAGE, DOMAIN as TTS_DOMAIN, + get_engine_instance, ) -from homeassistant.components.tts.helper import get_engine_instance -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component @@ -227,25 +227,21 @@ async def test_get_tts_audio( await on_start_callback() client = await hass_client() - url = "/api/tts_get_url" - data |= {"message": "There is someone at the door."} + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= {"message": "There is someone at the door."} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -280,25 +276,21 @@ async def test_get_tts_audio_logged_out( await hass.async_block_till_done() client = await hass_client() - url = "/api/tts_get_url" - data |= {"message": "There is someone at the door."} + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= {"message": "There is someone at the door."} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -342,28 +334,24 @@ async def test_tts_entity( assert state assert state.state == STATE_UNKNOWN - url = "/api/tts_get_url" - data = { - "engine_id": entity_id, - "message": "There is someone at the door.", - } + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data = { + "engine_id": entity_id, + "message": "There is someone at the door.", + } - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{entity_id}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{entity_id}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -482,29 +470,25 @@ async def test_deprecated_voice( client = await hass_client() # Test with non deprecated voice. - url = "/api/tts_get_url" - data |= { - "message": "There is someone at the door.", - "language": language, - "options": {"voice": replacement_voice}, - } + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= { + "message": "There is someone at the door.", + "language": language, + "options": {"voice": replacement_voice}, + } - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_87567e3e29_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_87567e3e29_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -522,22 +506,18 @@ async def test_deprecated_voice( # Test with deprecated voice. data["options"] = {"voice": deprecated_voice} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_13646b7d32_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_13646b7d32_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() issue_id = f"deprecated_voice_{deprecated_voice}" @@ -631,28 +611,24 @@ async def test_deprecated_gender( client = await hass_client() # Test without deprecated gender option. - url = "/api/tts_get_url" - data |= { - "message": "There is someone at the door.", - "language": language, - } + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= { + "message": "There is someone at the door.", + "language": language, + } - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -667,22 +643,18 @@ async def test_deprecated_gender( # Test with deprecated gender option. data["options"] = {"gender": gender_option} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() issue_id = "deprecated_gender" diff --git a/tests/components/cloudflare/test_config_flow.py b/tests/components/cloudflare/test_config_flow.py index 1278113c0c7..f34a423833c 100644 --- a/tests/components/cloudflare/test_config_flow.py +++ b/tests/components/cloudflare/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock import pycfdns from homeassistant.components.cloudflare.const import CONF_RECORDS, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -151,15 +151,7 @@ async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow: MagicMock) -> Non entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/co2signal/conftest.py b/tests/components/co2signal/conftest.py index d5cca448569..680465c2537 100644 --- a/tests/components/co2signal/conftest.py +++ b/tests/components/co2signal/conftest.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest -from homeassistant.components.co2signal import DOMAIN +from homeassistant.components.co2signal.const import DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index 645e0bd87e9..4159c8ec1a1 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'location': '', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'co2signal', 'entry_id': '904a74160aa6f335526706bee85dfb83', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/co2signal/test_config_flow.py b/tests/components/co2signal/test_config_flow.py index 7397b6e2355..f8f94d44126 100644 --- a/tests/components/co2signal/test_config_flow.py +++ b/tests/components/co2signal/test_config_flow.py @@ -11,7 +11,8 @@ from aioelectricitymaps import ( import pytest from homeassistant import config_entries -from homeassistant.components.co2signal import DOMAIN, config_flow +from homeassistant.components.co2signal import config_flow +from homeassistant.components.co2signal.const import DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -43,7 +44,7 @@ async def test_form_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "CO2 Signal" + assert result2["title"] == "Electricity Maps" assert result2["data"] == { "api_key": "api_key", } @@ -184,7 +185,7 @@ async def test_form_error_handling( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "CO2 Signal" + assert result["title"] == "Electricity Maps" assert result["data"] == { "api_key": "api_key", } @@ -198,17 +199,10 @@ async def test_reauth( """Test reauth flow.""" config_entry.add_to_hass(hass) - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=None, - ) + init_result = await config_entry.start_reauth_flow(hass) assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth" + assert init_result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.co2signal.async_setup_entry", diff --git a/tests/components/co2signal/test_sensor.py b/tests/components/co2signal/test_sensor.py index e9f46e483d1..fddda17f3ed 100644 --- a/tests/components/co2signal/test_sensor.py +++ b/tests/components/co2signal/test_sensor.py @@ -109,4 +109,4 @@ async def test_sensor_reauth_triggered( assert (flows := hass.config_entries.flow.async_progress()) assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth" + assert flows[0]["step_id"] == "reauth_confirm" diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 4f9e75dc38b..3eab18fb9f3 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -30,6 +30,8 @@ 'api_token': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'coinbase', 'entry_id': '080272b77a4f80c41b94d7cdc86fd826', 'minor_version': 1, @@ -42,6 +44,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/color_extractor/test_service.py b/tests/components/color_extractor/test_service.py index 7b603420bdf..23ba5e7808c 100644 --- a/tests/components/color_extractor/test_service.py +++ b/tests/components/color_extractor/test_service.py @@ -78,7 +78,7 @@ async def setup_light(hass: HomeAssistant): # Validate starting values assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 180 - assert state.attributes.get(ATTR_RGB_COLOR) == (255, 63, 111) + assert state.attributes.get(ATTR_RGB_COLOR) == (255, 64, 112) await hass.services.async_call( LIGHT_DOMAIN, diff --git a/tests/components/comelit/const.py b/tests/components/comelit/const.py index 998c12c09b7..92fdfebfa1d 100644 --- a/tests/components/comelit/const.py +++ b/tests/components/comelit/const.py @@ -1,6 +1,19 @@ """Common stuff for Comelit SimpleHome tests.""" -from aiocomelit.const import VEDO +from aiocomelit import ComelitVedoAreaObject, ComelitVedoZoneObject +from aiocomelit.api import ComelitSerialBridgeObject +from aiocomelit.const import ( + CLIMATE, + COVER, + IRRIGATION, + LIGHT, + OTHER, + SCENARIO, + VEDO, + WATT, + AlarmAreaState, + AlarmZoneState, +) from homeassistant.components.comelit.const import DOMAIN from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE @@ -27,3 +40,67 @@ MOCK_USER_BRIDGE_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] MOCK_USER_VEDO_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][1] FAKE_PIN = 5678 + +BRIDGE_DEVICE_QUERY = { + CLIMATE: {}, + COVER: { + 0: ComelitSerialBridgeObject( + index=0, + name="Cover0", + status=0, + human_status="closed", + type="cover", + val=0, + protected=0, + zone="Open space", + power=0.0, + power_unit=WATT, + ) + }, + LIGHT: { + 0: ComelitSerialBridgeObject( + index=0, + name="Light0", + status=0, + human_status="off", + type="light", + val=0, + protected=0, + zone="Bathroom", + power=0.0, + power_unit=WATT, + ) + }, + OTHER: {}, + IRRIGATION: {}, + SCENARIO: {}, +} + +VEDO_DEVICE_QUERY = { + "aree": { + 0: ComelitVedoAreaObject( + index=0, + name="Area0", + p1=True, + p2=False, + ready=False, + armed=False, + alarm=False, + alarm_memory=False, + sabotage=False, + anomaly=False, + in_time=False, + out_time=False, + human_status=AlarmAreaState.UNKNOWN, + ) + }, + "zone": { + 0: ComelitVedoZoneObject( + index=0, + name="Zone0", + status_api="0x000", + status=0, + human_status=AlarmZoneState.REST, + ) + }, +} diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..877f48a4611 --- /dev/null +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -0,0 +1,148 @@ +# serializer version: 1 +# name: test_entry_diagnostics_bridge + dict({ + 'device_info': dict({ + 'devices': list([ + dict({ + 'clima': list([ + ]), + }), + dict({ + 'shutter': list([ + dict({ + '0': dict({ + 'human_status': 'closed', + 'name': 'Cover0', + 'power': 0.0, + 'power_unit': 'W', + 'protected': 0, + 'status': 0, + 'val': 0, + 'zone': 'Open space', + }), + }), + ]), + }), + dict({ + 'light': list([ + dict({ + '0': dict({ + 'human_status': 'off', + 'name': 'Light0', + 'power': 0.0, + 'power_unit': 'W', + 'protected': 0, + 'status': 0, + 'val': 0, + 'zone': 'Bathroom', + }), + }), + ]), + }), + dict({ + 'other': list([ + ]), + }), + dict({ + 'irrigation': list([ + ]), + }), + dict({ + 'scenario': list([ + ]), + }), + ]), + 'last_exception': 'None', + 'last_update success': True, + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_host', + 'pin': '**REDACTED**', + 'port': 80, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'comelit', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'type': 'Serial bridge', + }) +# --- +# name: test_entry_diagnostics_vedo + dict({ + 'device_info': dict({ + 'devices': list([ + dict({ + 'aree': list([ + dict({ + '0': dict({ + 'alarm': False, + 'alarm_memory': False, + 'anomaly': False, + 'armed': False, + 'human_status': 'unknown', + 'in_time': False, + 'name': 'Area0', + 'out_time': False, + 'p1': True, + 'p2': False, + 'ready': False, + 'sabotage': False, + }), + }), + ]), + }), + dict({ + 'zone': list([ + dict({ + '0': dict({ + 'human_status': 'rest', + 'name': 'Zone0', + 'status': 0, + 'status_api': '0x000', + }), + }), + ]), + }), + ]), + 'last_exception': 'None', + 'last_update success': True, + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_vedo_host', + 'pin': '**REDACTED**', + 'port': 8080, + 'type': 'Vedo system', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'comelit', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'type': 'Vedo system', + }) +# --- diff --git a/tests/components/comelit/test_config_flow.py b/tests/components/comelit/test_config_flow.py index 333bf09bd20..eeaea0e41e9 100644 --- a/tests/components/comelit/test_config_flow.py +++ b/tests/components/comelit/test_config_flow.py @@ -7,7 +7,7 @@ from aiocomelit import CannotAuthenticate, CannotConnect import pytest from homeassistant.components.comelit.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -100,6 +100,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -113,15 +116,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: ): mock_request_get.return_value.status_code = 200 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -147,6 +141,9 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch("aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect), @@ -155,15 +152,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> ), patch("homeassistant.components.comelit.async_setup_entry"), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/comelit/test_diagnostics.py b/tests/components/comelit/test_diagnostics.py new file mode 100644 index 00000000000..39d75af1152 --- /dev/null +++ b/tests/components/comelit/test_diagnostics.py @@ -0,0 +1,81 @@ +"""Tests for Comelit Simplehome diagnostics platform.""" + +from __future__ import annotations + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.comelit.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .const import ( + BRIDGE_DEVICE_QUERY, + MOCK_USER_BRIDGE_DATA, + MOCK_USER_VEDO_DATA, + VEDO_DEVICE_QUERY, +) + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics_bridge( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test Bridge config entry diagnostics.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiocomelit.api.ComeliteSerialBridgeApi.login"), + patch( + "aiocomelit.api.ComeliteSerialBridgeApi.get_all_devices", + return_value=BRIDGE_DEVICE_QUERY, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props( + "entry_id", + "created_at", + "modified_at", + ) + ) + + +async def test_entry_diagnostics_vedo( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test Vedo System config entry diagnostics.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_VEDO_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiocomelit.api.ComelitVedoApi.login"), + patch( + "aiocomelit.api.ComelitVedoApi.get_all_areas_and_zones", + return_value=VEDO_DEVICE_QUERY, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props( + "entry_id", + "created_at", + "modified_at", + ) + ) diff --git a/tests/components/comfoconnect/test_sensor.py b/tests/components/comfoconnect/test_sensor.py index fdecfa5b1c7..5cae566379a 100644 --- a/tests/components/comfoconnect/test_sensor.py +++ b/tests/components/comfoconnect/test_sensor.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch import pytest -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -14,7 +14,7 @@ from tests.common import assert_setup_component COMPONENT = "comfoconnect" VALID_CONFIG = { COMPONENT: {"host": "1.2.3.4"}, - DOMAIN: { + SENSOR_DOMAIN: { "platform": COMPONENT, "resources": [ "current_humidity", @@ -51,8 +51,8 @@ async def setup_sensor( mock_comfoconnect_command: MagicMock, ) -> None: """Set up demo sensor component.""" - with assert_setup_component(1, DOMAIN): - await async_setup_component(hass, DOMAIN, VALID_CONFIG) + with assert_setup_component(1, SENSOR_DOMAIN): + await async_setup_component(hass, SENSOR_DOMAIN, VALID_CONFIG) await hass.async_block_till_done() diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index 5d1cd845e27..aa49410aacb 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -87,7 +87,7 @@ async def test_setup_platform_yaml(hass: HomeAssistant) -> None: "payload_off": "0", "value_template": "{{ value | multiply(0.1) }}", "icon": ( - '{% if this.state=="on" %} mdi:on {% else %} mdi:off {% endif %}' + '{% if this.attributes.icon=="mdi:icon2" %} mdi:icon1 {% else %} mdi:icon2 {% endif %}' ), } } @@ -101,7 +101,15 @@ async def test_template(hass: HomeAssistant, load_yaml_integration: None) -> Non entity_state = hass.states.get("binary_sensor.test") assert entity_state assert entity_state.state == STATE_ON - assert entity_state.attributes.get("icon") == "mdi:on" + assert entity_state.attributes.get("icon") == "mdi:icon2" + + async_fire_time_changed(hass, dt_util.now() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + entity_state = hass.states.get("binary_sensor.test") + assert entity_state + assert entity_state.state == STATE_ON + assert entity_state.attributes.get("icon") == "mdi:icon1" @pytest.mark.parametrize( diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index b81d915c6d5..426968eccc5 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -14,7 +14,11 @@ import pytest from homeassistant import setup from homeassistant.components.command_line import DOMAIN from homeassistant.components.command_line.cover import CommandCover -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, SCAN_INTERVAL +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SCAN_INTERVAL, + CoverState, +) from homeassistant.components.homeassistant import ( DOMAIN as HA_DOMAIN, SERVICE_UPDATE_ENTITY, @@ -24,7 +28,6 @@ from homeassistant.const import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, - STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -389,7 +392,7 @@ async def test_availability( entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN hass.states.async_set("sensor.input1", "off") await hass.async_block_till_done() @@ -419,13 +422,19 @@ async def test_icon_template(hass: HomeAssistant) -> None: "command_close": f"echo 0 > {path}", "command_stop": f"echo 0 > {path}", "name": "Test", - "icon": "{% if this.state=='open' %} mdi:open {% else %} mdi:closed {% endif %}", + "icon": '{% if this.attributes.icon=="mdi:icon2" %} mdi:icon1 {% else %} mdi:icon2 {% endif %}', } } ] }, ) await hass.async_block_till_done() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test"}, + blocking=True, + ) await hass.services.async_call( COVER_DOMAIN, @@ -435,7 +444,7 @@ async def test_icon_template(hass: HomeAssistant) -> None: ) entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.attributes.get("icon") == "mdi:closed" + assert entity_state.attributes.get("icon") == "mdi:icon1" await hass.services.async_call( COVER_DOMAIN, @@ -445,4 +454,4 @@ async def test_icon_template(hass: HomeAssistant) -> None: ) entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.attributes.get("icon") == "mdi:open" + assert entity_state.attributes.get("icon") == "mdi:icon2" diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index 549e729892c..d62410fa792 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -552,7 +552,7 @@ async def test_templating(hass: HomeAssistant) -> None: "command_off": f"echo 0 > {path}", "value_template": '{{ value=="1" }}', "icon": ( - '{% if this.state=="on" %} mdi:on {% else %} mdi:off {% endif %}' + '{% if this.attributes.icon=="mdi:icon2" %} mdi:icon1 {% else %} mdi:icon2 {% endif %}' ), "name": "Test", } @@ -564,7 +564,7 @@ async def test_templating(hass: HomeAssistant) -> None: "command_off": f"echo 0 > {path}", "value_template": '{{ value=="1" }}', "icon": ( - '{% if states("switch.test2")=="on" %} mdi:on {% else %} mdi:off {% endif %}' + '{% if states("switch.test")=="off" %} mdi:off {% else %} mdi:on {% endif %}' ), "name": "Test2", }, @@ -595,7 +595,7 @@ async def test_templating(hass: HomeAssistant) -> None: entity_state = hass.states.get("switch.test") entity_state2 = hass.states.get("switch.test2") assert entity_state.state == STATE_ON - assert entity_state.attributes.get("icon") == "mdi:on" + assert entity_state.attributes.get("icon") == "mdi:icon2" assert entity_state2.state == STATE_ON assert entity_state2.attributes.get("icon") == "mdi:on" diff --git a/tests/components/config/test_automation.py b/tests/components/config/test_automation.py index 89113070367..40a9c85a8d3 100644 --- a/tests/components/config/test_automation.py +++ b/tests/components/config/test_automation.py @@ -78,7 +78,7 @@ async def test_update_automation_config( resp = await client.post( "/api/config/automation/config/moon", - data=json.dumps({"trigger": [], "action": [], "condition": []}), + data=json.dumps({"triggers": [], "actions": [], "conditions": []}), ) await hass.async_block_till_done() assert sorted(hass.states.async_entity_ids("automation")) == [ @@ -91,8 +91,13 @@ async def test_update_automation_config( assert result == {"result": "ok"} new_data = hass_config_store["automations.yaml"] - assert list(new_data[1]) == ["id", "trigger", "condition", "action"] - assert new_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []} + assert list(new_data[1]) == ["id", "triggers", "conditions", "actions"] + assert new_data[1] == { + "id": "moon", + "triggers": [], + "conditions": [], + "actions": [], + } @pytest.mark.parametrize("automation_config", [{}]) @@ -101,18 +106,18 @@ async def test_update_automation_config( [ ( {"action": []}, - "required key not provided @ data['trigger']", + "required key not provided @ data['triggers']", ), ( { - "trigger": {"platform": "automation"}, + "trigger": {"trigger": "automation"}, "action": [], }, "Integration 'automation' does not provide trigger support", ), ( { - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "condition": { "condition": "state", # The UUID will fail being resolved to en entity_id @@ -125,7 +130,7 @@ async def test_update_automation_config( ), ( { - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": { "condition": "state", # The UUID will fail being resolved to en entity_id @@ -254,7 +259,7 @@ async def test_update_remove_key_automation_config( resp = await client.post( "/api/config/automation/config/moon", - data=json.dumps({"trigger": [], "action": [], "condition": []}), + data=json.dumps({"triggers": [], "actions": [], "conditions": []}), ) await hass.async_block_till_done() assert sorted(hass.states.async_entity_ids("automation")) == [ @@ -267,8 +272,13 @@ async def test_update_remove_key_automation_config( assert result == {"result": "ok"} new_data = hass_config_store["automations.yaml"] - assert list(new_data[1]) == ["id", "trigger", "condition", "action"] - assert new_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []} + assert list(new_data[1]) == ["id", "triggers", "conditions", "actions"] + assert new_data[1] == { + "id": "moon", + "triggers": [], + "conditions": [], + "actions": [], + } @pytest.mark.parametrize("automation_config", [{}]) @@ -297,7 +307,7 @@ async def test_bad_formatted_automations( resp = await client.post( "/api/config/automation/config/moon", - data=json.dumps({"trigger": [], "action": [], "condition": []}), + data=json.dumps({"triggers": [], "actions": [], "conditions": []}), ) await hass.async_block_till_done() assert sorted(hass.states.async_entity_ids("automation")) == [ @@ -312,7 +322,12 @@ async def test_bad_formatted_automations( # Verify ID added new_data = hass_config_store["automations.yaml"] assert "id" in new_data[0] - assert new_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []} + assert new_data[1] == { + "id": "moon", + "triggers": [], + "conditions": [], + "actions": [], + } @pytest.mark.parametrize( @@ -321,12 +336,12 @@ async def test_bad_formatted_automations( [ { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "id": "moon", - "trigger": {"platform": "event", "event_type": "test_event"}, + "trigger": {"trigger": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ], diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index a4dc91d5355..0a1ffbe87b3 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -17,6 +17,7 @@ from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_flow, config_validation as cv +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.loader import IntegrationNotFound from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -136,11 +137,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": True, "supports_reconfigure": False, "supports_remove_device": False, @@ -154,11 +157,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": core_ce.ConfigEntryState.SETUP_ERROR.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -172,11 +177,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -190,11 +197,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -208,11 +217,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -254,9 +265,7 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: """Test removing an entry via the API.""" - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) resp = await client.delete(f"/api/config/config_entries/entry/{entry.entry_id}") assert resp.status == HTTPStatus.OK @@ -267,11 +276,9 @@ async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: async def test_reload_entry(hass: HomeAssistant, client: TestClient) -> None: """Test reloading an entry via the API.""" - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") resp = await client.post( f"/api/config/config_entries/entry/{entry.entry_id}/reload" ) @@ -392,6 +399,10 @@ async def test_available_flows( ############################ +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.error.Should be unique."], +) async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can initialize a flow.""" mock_platform(hass, "test.config_flow", None) @@ -404,7 +415,7 @@ async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), description_placeholders={ "url": "https://example.com", "show_advanced_options": self.show_advanced_options, @@ -499,6 +510,10 @@ async def test_initialize_flow_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.abort.bla"], +) async def test_abort(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that aborts.""" mock_platform(hass, "test.config_flow", None) @@ -566,11 +581,13 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -581,6 +598,7 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, + "subentries": [], } @@ -649,11 +667,13 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -664,6 +684,7 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, + "subentries": [], } @@ -767,6 +788,10 @@ async def test_get_progress_index_unauth( assert response["error"]["code"] == "unauthorized" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.error.Should be unique."], +) async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can query the API for same result as we get from init a flow.""" mock_platform(hass, "test.config_flow", None) @@ -779,7 +804,7 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), errors={"username": "Should be unique."}, ) @@ -791,9 +816,7 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non assert resp.status == HTTPStatus.OK data = await resp.json() - resp2 = await client.get( - "/api/config/config_entries/flow/{}".format(data["flow_id"]) - ) + resp2 = await client.get(f"/api/config/config_entries/flow/{data['flow_id']}") assert resp2.status == HTTPStatus.OK data2 = await resp2.json() @@ -801,6 +824,10 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non assert data == data2 +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.error.Should be unique."], +) async def test_get_progress_flow_unauth( hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: @@ -815,7 +842,7 @@ async def test_get_progress_flow_unauth( return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), errors={"username": "Should be unique."}, ) @@ -829,9 +856,7 @@ async def test_get_progress_flow_unauth( hass_admin_user.groups = [] - resp2 = await client.get( - "/api/config/config_entries/flow/{}".format(data["flow_id"]) - ) + resp2 = await client.get(f"/api/config/config_entries/flow/{data['flow_id']}") assert resp2.status == HTTPStatus.UNAUTHORIZED @@ -849,7 +874,7 @@ async def test_options_flow(hass: HomeAssistant, client: TestClient) -> None: schema[vol.Required("enabled")] = bool return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), description_placeholders={"enabled": "Set to true to be true"}, ) @@ -1079,6 +1104,273 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} +async def test_subentry_flow(hass: HomeAssistant, client) -> None: + """Test we can start a subentry flow.""" + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_init(self, user_input=None): + raise NotImplementedError + + async def async_step_user(self, user_input=None): + schema = OrderedDict() + schema[vol.Required("enabled")] = bool + return self.async_show_form( + step_id="user", + data_schema=schema, + description_placeholders={"enabled": "Set to true to be true"}, + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + data.pop("flow_id") + assert data == { + "type": "form", + "handler": ["test1", "test"], + "step_id": "user", + "data_schema": [{"name": "enabled", "required": True, "type": "boolean"}], + "description_placeholders": {"enabled": "Set to true to be true"}, + "errors": None, + "last_step": None, + "preview": None, + } + + +@pytest.mark.parametrize( + ("endpoint", "method"), + [ + ("/api/config/config_entries/subentries/flow", "post"), + ("/api/config/config_entries/subentries/flow/1", "get"), + ("/api/config/config_entries/subentries/flow/1", "post"), + ], +) +async def test_subentry_flow_unauth( + hass: HomeAssistant, client, hass_admin_user: MockUser, endpoint: str, method: str +) -> None: + """Test unauthorized on subentry flow.""" + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_init(self, user_input=None): + schema = OrderedDict() + schema[vol.Required("enabled")] = bool + return self.async_show_form( + step_id="user", + data_schema=schema, + description_placeholders={"enabled": "Set to true to be true"}, + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + hass_admin_user.groups = [] + + with patch.dict(HANDLERS, {"test": TestFlow}): + resp = await getattr(client, method)(endpoint, json={"handler": entry.entry_id}) + + assert resp.status == HTTPStatus.UNAUTHORIZED + + +async def test_two_step_subentry_flow(hass: HomeAssistant, client) -> None: + """Test we can finish a two step subentry flow.""" + mock_integration( + hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) + ) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_user(self, user_input=None): + return await self.async_step_finish() + + async def async_step_finish(self, user_input=None): + if user_input: + return self.async_create_entry( + title="Mock title", data=user_input, unique_id="test" + ) + + return self.async_show_form( + step_id="finish", data_schema=vol.Schema({"enabled": bool}) + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + flow_id = data["flow_id"] + expected_data = { + "data_schema": [{"name": "enabled", "type": "boolean"}], + "description_placeholders": None, + "errors": None, + "flow_id": flow_id, + "handler": ["test1", "test"], + "last_step": None, + "preview": None, + "step_id": "finish", + "type": "form", + } + assert data == expected_data + + resp = await client.get(f"/api/config/config_entries/subentries/flow/{flow_id}") + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == expected_data + + resp = await client.post( + f"/api/config/config_entries/subentries/flow/{flow_id}", + json={"enabled": True}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == { + "description_placeholders": None, + "description": None, + "flow_id": flow_id, + "handler": ["test1", "test"], + "title": "Mock title", + "type": "create_entry", + "unique_id": "test", + } + + +async def test_subentry_flow_with_invalid_data(hass: HomeAssistant, client) -> None: + """Test a subentry flow with invalid_data.""" + mock_integration( + hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) + ) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_user(self, user_input=None): + return self.async_show_form( + step_id="finish", + data_schema=vol.Schema( + { + vol.Required( + "choices", default=["invalid", "valid"] + ): cv.multi_select({"valid": "Valid"}) + } + ), + ) + + async def async_step_finish(self, user_input=None): + return self.async_create_entry( + title="Enable disable", data=user_input + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + flow_id = data.pop("flow_id") + assert data == { + "type": "form", + "handler": ["test1", "test"], + "step_id": "finish", + "data_schema": [ + { + "default": ["invalid", "valid"], + "name": "choices", + "options": {"valid": "Valid"}, + "required": True, + "type": "multi_select", + } + ], + "description_placeholders": None, + "errors": None, + "last_step": None, + "preview": None, + } + + with patch.dict(HANDLERS, {"test": TestFlow}): + resp = await client.post( + f"/api/config/config_entries/subentries/flow/{flow_id}", + json={"choices": ["valid", "invalid"]}, + ) + assert resp.status == HTTPStatus.BAD_REQUEST + data = await resp.json() + assert data == {"errors": {"choices": "invalid is not a valid option"}} + + @pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -1111,11 +1403,13 @@ async def test_get_single( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "user", "state": "loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1144,11 +1438,9 @@ async def test_update_prefrences( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") assert entry.pref_disable_new_entities is False assert entry.pref_disable_polling is False @@ -1244,12 +1536,10 @@ async def test_disable_entry( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) assert entry.disabled_by is None - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") # Disable await ws_client.send_json( @@ -1321,8 +1611,27 @@ async def test_disable_entry_nonexisting( assert response["error"]["code"] == "not_found" +@pytest.mark.parametrize( + ( + "flow_context", + "entry_discovery_keys", + ), + [ + ( + {}, + {}, + ), + ( + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, + ), + ], +) async def test_ignore_flow( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + flow_context: dict, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], ) -> None: """Test we can ignore a flow.""" assert await async_setup_component(hass, "config", {}) @@ -1345,7 +1654,7 @@ async def test_ignore_flow( with patch.dict(HANDLERS, {"test": TestFlow}): result = await hass.config_entries.flow.async_init( - "test", context={"source": core_ce.SOURCE_USER} + "test", context={"source": core_ce.SOURCE_USER} | flow_context ) assert result["type"] is FlowResultType.FORM @@ -1367,6 +1676,8 @@ async def test_ignore_flow( assert entry.source == "ignore" assert entry.unique_id == "mock-unique-id" assert entry.title == "Test Integration" + assert entry.data == {} + assert entry.discovery_keys == entry_discovery_keys async def test_ignore_flow_nonexisting( @@ -1454,11 +1765,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1473,11 +1786,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1492,11 +1807,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1511,11 +1828,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1530,11 +1849,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1560,11 +1881,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1589,11 +1912,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1608,11 +1933,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1637,11 +1964,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1656,11 +1985,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1691,11 +2022,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1710,11 +2043,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1729,11 +2064,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1748,11 +2085,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1767,11 +2106,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1874,11 +2215,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1896,11 +2239,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1918,11 +2263,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1946,11 +2293,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1975,11 +2324,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2003,11 +2354,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2093,11 +2446,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2115,11 +2470,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2145,11 +2502,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2171,11 +2530,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2201,11 +2562,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2229,11 +2592,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2333,6 +2698,10 @@ async def test_flow_with_multiple_schema_errors_base( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.abort.reconfigure_successful"], +) @pytest.mark.usefixtures("enable_custom_integrations", "freezer") async def test_supports_reconfigure( hass: HomeAssistant, @@ -2345,6 +2714,9 @@ async def test_supports_reconfigure( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) + entry = MockConfigEntry(domain="test", title="Test", entry_id="1") + entry.add_to_hass(hass) + class TestFlow(core_ce.ConfigFlow): VERSION = 1 @@ -2358,8 +2730,10 @@ async def test_supports_reconfigure( return self.async_show_form( step_id="reconfigure", data_schema=vol.Schema({}) ) - return self.async_create_entry( - title="Test Entry", data={"secret": "account_token"} + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + title="Test Entry", + data={"secret": "account_token"}, ) with patch.dict(HANDLERS, {"test": TestFlow}): @@ -2395,36 +2769,12 @@ async def test_supports_reconfigure( assert len(entries) == 1 data = await resp.json() - timestamp = utcnow().timestamp() data.pop("flow_id") assert data == { "handler": "test", - "title": "Test Entry", - "type": "create_entry", - "version": 1, - "result": { - "created_at": timestamp, - "disabled_by": None, - "domain": "test", - "entry_id": entries[0].entry_id, - "error_reason_translation_key": None, - "error_reason_translation_placeholders": None, - "modified_at": timestamp, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "reason": None, - "source": core_ce.SOURCE_RECONFIGURE, - "state": core_ce.ConfigEntryState.LOADED.value, - "supports_options": False, - "supports_reconfigure": True, - "supports_remove_device": False, - "supports_unload": False, - "title": "Test Entry", - }, - "description": None, + "reason": "reconfigure_successful", + "type": "abort", "description_placeholders": None, - "options": {}, - "minor_version": 1, } @@ -2459,3 +2809,133 @@ async def test_does_not_support_reconfigure( response == '{"message":"Handler ConfigEntriesFlowManager doesn\'t support step reconfigure"}' ) + + +async def test_list_subentries( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test that we can list subentries.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry( + domain="test", + state=core_ce.ConfigEntryState.LOADED, + subentries_data=[ + core_ce.ConfigSubentryData( + data={"test": "test"}, + subentry_id="mock_id", + title="Mock title", + unique_id="test", + ) + ], + ) + entry.add_to_hass(hass) + + assert entry.pref_disable_new_entities is False + assert entry.pref_disable_polling is False + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": entry.entry_id, + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == [ + {"subentry_id": "mock_id", "title": "Mock title", "unique_id": "test"}, + ] + + # Try listing subentries for an unknown entry + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": "no_such_entry", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config entry not found", + } + + +async def test_delete_subentry( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test that we can delete a subentry.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry( + domain="test", + state=core_ce.ConfigEntryState.LOADED, + subentries_data=[ + core_ce.ConfigSubentryData( + data={"test": "test"}, subentry_id="mock_id", title="Mock title" + ) + ], + ) + entry.add_to_hass(hass) + + assert entry.pref_disable_new_entities is False + assert entry.pref_disable_polling is False + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": entry.entry_id, + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] is None + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": entry.entry_id, + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == [] + + # Try deleting the subentry again + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": entry.entry_id, + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config subentry not found", + } + + # Try deleting subentry from an unknown entry + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": "no_such_entry", + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config entry not found", + } diff --git a/tests/components/config/test_entity_registry.py b/tests/components/config/test_entity_registry.py index 60657d4a77b..bfbd69ec9bd 100644 --- a/tests/components/config/test_entity_registry.py +++ b/tests/components/config/test_entity_registry.py @@ -245,6 +245,7 @@ async def test_list_entities_for_display( "ec": 1, "ei": "test_domain.test", "en": "Hello World", + "hn": True, "ic": "mdi:icon", "lb": [], "pl": "test_platform", @@ -254,7 +255,7 @@ async def test_list_entities_for_display( "ai": "area52", "di": "device123", "ei": "test_domain.nameless", - "en": None, + "hn": True, "lb": [], "pl": "test_platform", }, @@ -262,6 +263,8 @@ async def test_list_entities_for_display( "ai": "area52", "di": "device123", "ei": "test_domain.renamed", + "en": "User name", + "hn": True, "lb": [], "pl": "test_platform", }, @@ -326,6 +329,7 @@ async def test_list_entities_for_display( "ai": "area52", "di": "device123", "ei": "test_domain.test", + "hn": True, "lb": [], "en": "Hello World", "pl": "test_platform", diff --git a/tests/components/config/test_view.py b/tests/components/config/test_view.py new file mode 100644 index 00000000000..0bea9240a89 --- /dev/null +++ b/tests/components/config/test_view.py @@ -0,0 +1,41 @@ +"""Test config HTTP views.""" + +from collections.abc import Callable +from contextlib import AbstractContextManager, nullcontext as does_not_raise + +import pytest + +from homeassistant.components.config import view +from homeassistant.core import HomeAssistant + + +async def _mock_validator(hass: HomeAssistant, key: str, data: dict) -> dict: + """Mock data validator.""" + return data + + +@pytest.mark.parametrize( + ("data_schema", "data_validator", "expected_result"), + [ + (None, None, pytest.raises(ValueError)), + (None, _mock_validator, does_not_raise()), + (lambda x: x, None, does_not_raise()), + (lambda x: x, _mock_validator, pytest.raises(ValueError)), + ], +) +async def test_view_requires_data_schema_or_validator( + hass: HomeAssistant, + data_schema: Callable | None, + data_validator: Callable | None, + expected_result: AbstractContextManager, +) -> None: + """Test the view base class requires a schema or validator.""" + with expected_result: + view.BaseEditConfigView( + "test", + "test", + "test", + lambda x: "", + data_schema=data_schema, + data_validator=data_validator, + ) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 39ff7071dc4..e95147b8664 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -2,19 +2,48 @@ from __future__ import annotations -from collections.abc import Callable, Generator +import asyncio +from collections.abc import AsyncGenerator, Callable, Generator +from functools import lru_cache from importlib.util import find_spec from pathlib import Path +import string from typing import TYPE_CHECKING, Any from unittest.mock import AsyncMock, MagicMock, patch +from aiohasupervisor.models import ( + Discovery, + Repository, + ResolutionInfo, + StoreAddon, + StoreInfo, +) import pytest +import voluptuous as vol +from homeassistant.components import repairs +from homeassistant.config_entries import ( + DISCOVERY_SOURCES, + ConfigEntriesFlowManager, + FlowResult, + OptionsFlowManager, +) from homeassistant.const import STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant +from homeassistant.core import Context, HomeAssistant, ServiceRegistry, ServiceResponse +from homeassistant.data_entry_flow import ( + FlowContext, + FlowHandler, + FlowManager, + FlowResultType, + section, +) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.translation import async_get_translations +from homeassistant.util import yaml if TYPE_CHECKING: - from homeassistant.components.hassio.addon_manager import AddonManager + from homeassistant.components.hassio import AddonManager from .conversation import MockAgent from .device_tracker.common import MockScanner @@ -185,7 +214,9 @@ def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], @pytest.fixture(name="addon_manager") -def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: +def addon_manager_fixture( + hass: HomeAssistant, supervisor_client: AsyncMock +) -> AddonManager: """Return an AddonManager instance.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_manager @@ -194,12 +225,9 @@ def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: @pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> Any: +def discovery_info_fixture() -> list[Discovery]: """Return the discovery info from the supervisor.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_discovery_info - - return mock_discovery_info() + return [] @pytest.fixture(name="discovery_info_side_effect") @@ -210,13 +238,29 @@ def discovery_info_side_effect_fixture() -> Any | None: @pytest.fixture(name="get_addon_discovery_info") def get_addon_discovery_info_fixture( - discovery_info: dict[str, Any], discovery_info_side_effect: Any | None -) -> Generator[AsyncMock]: + supervisor_client: AsyncMock, + discovery_info: list[Discovery], + discovery_info_side_effect: Any | None, +) -> AsyncMock: """Mock get add-on discovery info.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_get_addon_discovery_info + supervisor_client.discovery.list.return_value = discovery_info + supervisor_client.discovery.list.side_effect = discovery_info_side_effect + return supervisor_client.discovery.list - yield from mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect) + +@pytest.fixture(name="get_discovery_message_side_effect") +def get_discovery_message_side_effect_fixture() -> Any | None: + """Side effect for getting a discovery message by uuid.""" + return None + + +@pytest.fixture(name="get_discovery_message") +def get_discovery_message_fixture( + supervisor_client: AsyncMock, get_discovery_message_side_effect: Any | None +) -> AsyncMock: + """Mock getting a discovery message by uuid.""" + supervisor_client.discovery.get.side_effect = get_discovery_message_side_effect + return supervisor_client.discovery.get @pytest.fixture(name="addon_store_info_side_effect") @@ -227,13 +271,14 @@ def addon_store_info_side_effect_fixture() -> Any | None: @pytest.fixture(name="addon_store_info") def addon_store_info_fixture( + supervisor_client: AsyncMock, addon_store_info_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock Supervisor add-on store info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_store_info - yield from mock_addon_store_info(addon_store_info_side_effect) + return mock_addon_store_info(supervisor_client, addon_store_info_side_effect) @pytest.fixture(name="addon_info_side_effect") @@ -243,12 +288,14 @@ def addon_info_side_effect_fixture() -> Any | None: @pytest.fixture(name="addon_info") -def addon_info_fixture(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: +def addon_info_fixture( + supervisor_client: AsyncMock, addon_info_side_effect: Any | None +) -> AsyncMock: """Mock Supervisor add-on info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_info - yield from mock_addon_info(addon_info_side_effect) + return mock_addon_info(supervisor_client, addon_info_side_effect) @pytest.fixture(name="addon_not_installed") @@ -298,13 +345,12 @@ def install_addon_side_effect_fixture( @pytest.fixture(name="install_addon") def install_addon_fixture( + supervisor_client: AsyncMock, install_addon_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock install add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_install_addon - - yield from mock_install_addon(install_addon_side_effect) + supervisor_client.store.install_addon.side_effect = install_addon_side_effect + return supervisor_client.store.install_addon @pytest.fixture(name="start_addon_side_effect") @@ -319,12 +365,12 @@ def start_addon_side_effect_fixture( @pytest.fixture(name="start_addon") -def start_addon_fixture(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: +def start_addon_fixture( + supervisor_client: AsyncMock, start_addon_side_effect: Any | None +) -> AsyncMock: """Mock start add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_start_addon - - yield from mock_start_addon(start_addon_side_effect) + supervisor_client.addons.start_addon.side_effect = start_addon_side_effect + return supervisor_client.addons.start_addon @pytest.fixture(name="restart_addon_side_effect") @@ -335,31 +381,24 @@ def restart_addon_side_effect_fixture() -> Any | None: @pytest.fixture(name="restart_addon") def restart_addon_fixture( + supervisor_client: AsyncMock, restart_addon_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock restart add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_restart_addon - - yield from mock_restart_addon(restart_addon_side_effect) + supervisor_client.addons.restart_addon.side_effect = restart_addon_side_effect + return supervisor_client.addons.restart_addon @pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: +def stop_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: """Mock stop add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_stop_addon - - yield from mock_stop_addon() + return supervisor_client.addons.stop_addon @pytest.fixture(name="addon_options") def addon_options_fixture(addon_info: AsyncMock) -> dict[str, Any]: """Mock add-on options.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_options - - return mock_addon_options(addon_info) + return addon_info.return_value.options @pytest.fixture(name="set_addon_options_side_effect") @@ -375,22 +414,20 @@ def set_addon_options_side_effect_fixture( @pytest.fixture(name="set_addon_options") def set_addon_options_fixture( + supervisor_client: AsyncMock, set_addon_options_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock set add-on options.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_set_addon_options - - yield from mock_set_addon_options(set_addon_options_side_effect) + supervisor_client.addons.set_addon_options.side_effect = ( + set_addon_options_side_effect + ) + return supervisor_client.addons.set_addon_options @pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: +def uninstall_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: """Mock uninstall add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_uninstall_addon - - yield from mock_uninstall_addon() + return supervisor_client.addons.uninstall_addon @pytest.fixture(name="create_backup") @@ -403,9 +440,477 @@ def create_backup_fixture() -> Generator[AsyncMock]: @pytest.fixture(name="update_addon") -def update_addon_fixture() -> Generator[AsyncMock]: +def update_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: """Mock update add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_update_addon + return supervisor_client.store.update_addon - yield from mock_update_addon() + +@pytest.fixture(name="store_addons") +def store_addons_fixture() -> list[StoreAddon]: + """Mock store addons list.""" + return [] + + +@pytest.fixture(name="store_repositories") +def store_repositories_fixture() -> list[Repository]: + """Mock store repositories list.""" + return [] + + +@pytest.fixture(name="store_info") +def store_info_fixture( + supervisor_client: AsyncMock, + store_addons: list[StoreAddon], + store_repositories: list[Repository], +) -> AsyncMock: + """Mock store info.""" + supervisor_client.store.info.return_value = StoreInfo( + addons=store_addons, repositories=store_repositories + ) + return supervisor_client.store.info + + +@pytest.fixture(name="addon_stats") +def addon_stats_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock addon stats info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_stats + + return mock_addon_stats(supervisor_client) + + +@pytest.fixture(name="addon_changelog") +def addon_changelog_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock addon changelog.""" + supervisor_client.store.addon_changelog.return_value = "" + return supervisor_client.store.addon_changelog + + +@pytest.fixture(name="supervisor_is_connected") +def supervisor_is_connected_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock supervisor is connected.""" + supervisor_client.supervisor.ping.return_value = None + return supervisor_client.supervisor.ping + + +@pytest.fixture(name="resolution_info") +def resolution_info_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock resolution info from supervisor.""" + supervisor_client.resolution.info.return_value = ResolutionInfo( + suggestions=[], + unsupported=[], + unhealthy=[], + issues=[], + checks=[], + ) + return supervisor_client.resolution.info + + +@pytest.fixture(name="resolution_suggestions_for_issue") +def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock suggestions by issue from supervisor resolution.""" + supervisor_client.resolution.suggestions_for_issue.return_value = [] + return supervisor_client.resolution.suggestions_for_issue + + +@pytest.fixture(name="supervisor_client") +def supervisor_client() -> Generator[AsyncMock]: + """Mock the supervisor client.""" + mounts_info_mock = AsyncMock(spec_set=["mounts"]) + mounts_info_mock.mounts = [] + supervisor_client = AsyncMock() + supervisor_client.addons = AsyncMock() + supervisor_client.discovery = AsyncMock() + supervisor_client.homeassistant = AsyncMock() + supervisor_client.host = AsyncMock() + supervisor_client.mounts.info.return_value = mounts_info_mock + supervisor_client.os = AsyncMock() + supervisor_client.resolution = AsyncMock() + supervisor_client.supervisor = AsyncMock() + with ( + patch( + "homeassistant.components.hassio.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.handler.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.addon_manager.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.backup.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.discovery.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.coordinator.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.issues.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.repairs.get_supervisor_client", + return_value=supervisor_client, + ), + ): + yield supervisor_client + + +def _validate_translation_placeholders( + full_key: str, + translation: str, + description_placeholders: dict[str, str] | None, + translation_errors: dict[str, str], +) -> str | None: + """Raise if translation exists with missing placeholders.""" + tuples = list(string.Formatter().parse(translation)) + for _, placeholder, _, _ in tuples: + if placeholder is None: + continue + if ( + description_placeholders is None + or placeholder not in description_placeholders + ): + translation_errors[full_key] = ( + f"Description not found for placeholder `{placeholder}` in {full_key}" + ) + + +async def _validate_translation( + hass: HomeAssistant, + translation_errors: dict[str, str], + category: str, + component: str, + key: str, + description_placeholders: dict[str, str] | None, + *, + translation_required: bool = True, +) -> None: + """Raise if translation doesn't exist.""" + full_key = f"component.{component}.{category}.{key}" + translations = await async_get_translations(hass, "en", category, [component]) + if (translation := translations.get(full_key)) is not None: + _validate_translation_placeholders( + full_key, translation, description_placeholders, translation_errors + ) + return + + if not translation_required: + return + + if full_key in translation_errors: + translation_errors[full_key] = "used" + return + + translation_errors[full_key] = ( + f"Translation not found for {component}: `{category}.{key}`. " + f"Please add to homeassistant/components/{component}/strings.json" + ) + + +@pytest.fixture +def ignore_translations() -> str | list[str]: + """Ignore specific translations. + + Override or parametrize this fixture with a fixture that returns, + a list of translation that should be ignored. + """ + return [] + + +@lru_cache +def _get_integration_quality_scale(integration: str) -> dict[str, Any]: + """Get the quality scale for an integration.""" + try: + return yaml.load_yaml_dict( + f"homeassistant/components/{integration}/quality_scale.yaml" + ).get("rules", {}) + except FileNotFoundError: + return {} + + +def _get_integration_quality_scale_rule(integration: str, rule: str) -> str: + """Get the quality scale for an integration.""" + quality_scale = _get_integration_quality_scale(integration) + if not quality_scale or rule not in quality_scale: + return "todo" + status = quality_scale[rule] + return status if isinstance(status, str) else status["status"] + + +async def _check_step_or_section_translations( + hass: HomeAssistant, + translation_errors: dict[str, str], + category: str, + integration: str, + translation_prefix: str, + description_placeholders: dict[str, str], + data_schema: vol.Schema | None, +) -> None: + # neither title nor description are required + # - title defaults to integration name + # - description is optional + for header in ("title", "description"): + await _validate_translation( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.{header}", + description_placeholders, + translation_required=False, + ) + + if not data_schema: + return + + for data_key, data_value in data_schema.schema.items(): + if isinstance(data_value, section): + # check the nested section + await _check_step_or_section_translations( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.sections.{data_key}", + description_placeholders, + data_value.schema, + ) + return + iqs_config_flow = _get_integration_quality_scale_rule( + integration, "config-flow" + ) + # data and data_description are compulsory + for header in ("data", "data_description"): + await _validate_translation( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.{header}.{data_key}", + description_placeholders, + translation_required=(iqs_config_flow == "done"), + ) + + +async def _check_config_flow_result_translations( + manager: FlowManager, + flow: FlowHandler, + result: FlowResult[FlowContext, str], + translation_errors: dict[str, str], +) -> None: + if result["type"] is FlowResultType.CREATE_ENTRY: + # No need to check translations for a completed flow + return + + key_prefix = "" + if isinstance(manager, ConfigEntriesFlowManager): + category = "config" + integration = flow.handler + elif isinstance(manager, OptionsFlowManager): + category = "options" + integration = flow.hass.config_entries.async_get_entry(flow.handler).domain + elif isinstance(manager, repairs.RepairsFlowManager): + category = "issues" + integration = flow.handler + issue_id = flow.issue_id + issue = ir.async_get(flow.hass).async_get_issue(integration, issue_id) + key_prefix = f"{issue.translation_key}.fix_flow." + else: + return + + # Check if this flow has been seen before + # Gets set to False on first run, and to True on subsequent runs + setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) + + if result["type"] is FlowResultType.FORM: + if step_id := result.get("step_id"): + await _check_step_or_section_translations( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}step.{step_id}", + result["description_placeholders"], + result["data_schema"], + ) + + if errors := result.get("errors"): + for error in errors.values(): + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}error.{error}", + result["description_placeholders"], + ) + return + + if result["type"] is FlowResultType.ABORT: + # We don't need translations for a discovery flow which immediately + # aborts, since such flows won't be seen by users + if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: + return + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}abort.{result["reason"]}", + result["description_placeholders"], + ) + + +async def _check_create_issue_translations( + issue_registry: ir.IssueRegistry, + issue: ir.IssueEntry, + translation_errors: dict[str, str], +) -> None: + if issue.translation_key is None: + # `translation_key` is only None on dismissed issues + return + await _validate_translation( + issue_registry.hass, + translation_errors, + "issues", + issue.domain, + f"{issue.translation_key}.title", + issue.translation_placeholders, + ) + if not issue.is_fixable: + # Description is required for non-fixable issues + await _validate_translation( + issue_registry.hass, + translation_errors, + "issues", + issue.domain, + f"{issue.translation_key}.description", + issue.translation_placeholders, + ) + + +async def _check_exception_translation( + hass: HomeAssistant, + exception: HomeAssistantError, + translation_errors: dict[str, str], +) -> None: + if exception.translation_key is None: + return + await _validate_translation( + hass, + translation_errors, + "exceptions", + exception.translation_domain, + f"{exception.translation_key}.message", + exception.translation_placeholders, + ) + + +@pytest.fixture(autouse=True) +async def check_translations( + ignore_translations: str | list[str], +) -> AsyncGenerator[None]: + """Check that translation requirements are met. + + Current checks: + - data entry flow results (ConfigFlow/OptionsFlow/RepairFlow) + - issue registry entries + """ + if not isinstance(ignore_translations, list): + ignore_translations = [ignore_translations] + + translation_errors = {k: "unused" for k in ignore_translations} + + translation_coros = set() + + # Keep reference to original functions + _original_flow_manager_async_handle_step = FlowManager._async_handle_step + _original_issue_registry_async_create_issue = ir.IssueRegistry.async_get_or_create + _original_service_registry_async_call = ServiceRegistry.async_call + + # Prepare override functions + async def _flow_manager_async_handle_step( + self: FlowManager, flow: FlowHandler, *args + ) -> FlowResult: + result = await _original_flow_manager_async_handle_step(self, flow, *args) + await _check_config_flow_result_translations( + self, flow, result, translation_errors + ) + return result + + def _issue_registry_async_create_issue( + self: ir.IssueRegistry, domain: str, issue_id: str, *args, **kwargs + ) -> None: + result = _original_issue_registry_async_create_issue( + self, domain, issue_id, *args, **kwargs + ) + translation_coros.add( + _check_create_issue_translations(self, result, translation_errors) + ) + return result + + async def _service_registry_async_call( + self: ServiceRegistry, + domain: str, + service: str, + service_data: dict[str, Any] | None = None, + blocking: bool = False, + context: Context | None = None, + target: dict[str, Any] | None = None, + return_response: bool = False, + ) -> ServiceResponse: + try: + return await _original_service_registry_async_call( + self, + domain, + service, + service_data, + blocking, + context, + target, + return_response, + ) + except HomeAssistantError as err: + translation_coros.add( + _check_exception_translation(self._hass, err, translation_errors) + ) + raise + + # Use override functions + with ( + patch( + "homeassistant.data_entry_flow.FlowManager._async_handle_step", + _flow_manager_async_handle_step, + ), + patch( + "homeassistant.helpers.issue_registry.IssueRegistry.async_get_or_create", + _issue_registry_async_create_issue, + ), + patch( + "homeassistant.core.ServiceRegistry.async_call", + _service_registry_async_call, + ), + ): + yield + + await asyncio.gather(*translation_coros) + + # Run final checks + unused_ignore = [k for k, v in translation_errors.items() if v == "unused"] + if unused_ignore: + pytest.fail( + f"Unused ignore translations: {', '.join(unused_ignore)}. " + "Please remove them from the ignore_translations fixture." + ) + for description in translation_errors.values(): + if description not in {"used", "unused"}: + pytest.fail(description) diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index 051613f0300..f1e220b10b2 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -168,7 +168,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, kitchen light is not exposed', }), }), }), @@ -308,7 +308,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added light', + 'speech': 'Sorry, I am not aware of any area called late added', }), }), }), @@ -358,7 +358,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, kitchen light is not exposed', }), }), }), @@ -378,7 +378,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, I am not aware of any area called kitchen', }), }), }), @@ -428,7 +428,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', + 'speech': 'Sorry, I am not aware of any area called renamed', }), }), }), diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index fd02646df48..9cebfd9abd1 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -6,7 +6,6 @@ 'id': 'conversation.home_assistant', 'name': 'Home Assistant', 'supported_languages': list([ - 'af', 'ar', 'bg', 'bn', @@ -23,24 +22,19 @@ 'fa', 'fi', 'fr', - 'fr-CA', 'gl', - 'gu', 'he', - 'hi', 'hr', 'hu', 'id', 'is', 'it', 'ka', - 'kn', 'ko', 'lb', 'lt', 'lv', 'ml', - 'mn', 'ms', 'nb', 'nl', @@ -53,8 +47,8 @@ 'sl', 'sr', 'sv', - 'sw', 'te', + 'th', 'tr', 'uk', 'ur', @@ -541,7 +535,7 @@ 'name': 'HassTurnOn', }), 'match': True, - 'sentence_template': ' on [all] in ', + 'sentence_template': ' on [] ', 'slots': dict({ 'area': 'kitchen', 'domain': 'light', @@ -577,7 +571,7 @@ 'name': 'HassGetState', }), 'match': True, - 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', + 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} []', 'slots': dict({ 'area': 'kitchen', 'domain': 'lights', @@ -612,7 +606,7 @@ 'name': 'OrderBeer', }), 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", + 'sentence_template': "[I'd like to ]order a {beer_style} [please]", 'slots': dict({ 'beer_style': 'lager', }), @@ -639,7 +633,7 @@ 'details': dict({ 'brightness': dict({ 'name': 'brightness', - 'text': '100%', + 'text': '100', 'value': 100, }), 'name': dict({ @@ -654,7 +648,7 @@ 'match': True, 'sentence_template': '[] brightness [to] ', 'slots': dict({ - 'brightness': '100%', + 'brightness': '100', 'name': 'test light', }), 'source': 'builtin', @@ -699,6 +693,14 @@ }) # --- # name: test_ws_hass_agent_debug_sentence_trigger + dict({ + 'trigger_sentences': list([ + 'hello', + 'hello[ world]', + ]), + }) +# --- +# name: test_ws_hass_agent_debug_sentence_trigger.1 dict({ 'results': list([ dict({ diff --git a/tests/components/conversation/test_agent_manager.py b/tests/components/conversation/test_agent_manager.py new file mode 100644 index 00000000000..47b58a522a8 --- /dev/null +++ b/tests/components/conversation/test_agent_manager.py @@ -0,0 +1,34 @@ +"""Test agent manager.""" + +from unittest.mock import patch + +from homeassistant.components.conversation import ConversationResult, async_converse +from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers.intent import IntentResponse + + +async def test_async_converse(hass: HomeAssistant, init_components) -> None: + """Test the async_converse method.""" + context = Context() + with patch( + "homeassistant.components.conversation.default_agent.DefaultAgent.async_process", + return_value=ConversationResult(response=IntentResponse(language="test lang")), + ) as mock_process: + await async_converse( + hass, + text="test command", + conversation_id="test id", + context=context, + language="test lang", + agent_id="conversation.home_assistant", + device_id="test device id", + ) + + assert mock_process.called + conversation_input = mock_process.call_args[0][0] + assert conversation_input.text == "test command" + assert conversation_input.conversation_id == "test id" + assert conversation_input.context is context + assert conversation_input.language == "test lang" + assert conversation_input.agent_id == "conversation.home_assistant" + assert conversation_input.device_id == "test device id" diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 935ef205d4f..8df1647d18c 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -13,6 +13,8 @@ import yaml from homeassistant.components import conversation, cover, media_player from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY +from homeassistant.components.conversation.default_agent import METADATA_CUSTOM_SENTENCE from homeassistant.components.conversation.models import ConversationInput from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.homeassistant.exposed_entities import ( @@ -28,6 +30,7 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED, + STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory, @@ -203,7 +206,7 @@ async def test_exposed_areas( @pytest.mark.usefixtures("init_components") async def test_conversation_agent(hass: HomeAssistant) -> None: """Test DefaultAgent.""" - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] with patch( "homeassistant.components.conversation.default_agent.get_languages", return_value=["dwarvish", "elvish", "entish"], @@ -380,7 +383,7 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: trigger_sentences = ["It's party time", "It is time to party"] trigger_response = "Cowabunga!" - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] assert isinstance(agent, default_agent.DefaultAgent) callback = AsyncMock(return_value=trigger_response) @@ -395,7 +398,7 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: callback.reset_mock() result = await conversation.async_converse(hass, sentence, None, Context()) assert callback.call_count == 1 - assert callback.call_args[0][0] == sentence + assert callback.call_args[0][0].text == sentence assert ( result.response.response_type == intent.IntentResponseType.ACTION_DONE ), sentence @@ -416,6 +419,44 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: assert len(callback.mock_calls) == 0 +@pytest.mark.parametrize( + ("language", "expected"), + [("en", "English done"), ("de", "German done"), ("not_translated", "Done")], +) +@pytest.mark.usefixtures("init_components") +async def test_trigger_sentence_response_translation( + hass: HomeAssistant, language: str, expected: str +) -> None: + """Test translation of default response 'done'.""" + hass.config.language = language + + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + translations = { + "en": {"component.conversation.conversation.agent.done": "English done"}, + "de": {"component.conversation.conversation.agent.done": "German done"}, + "not_translated": {}, + } + + with patch( + "homeassistant.components.conversation.default_agent.translation.async_get_translations", + return_value=translations.get(language), + ): + unregister = agent.register_trigger( + ["test sentence"], AsyncMock(return_value=None) + ) + result = await conversation.async_converse( + hass, "test sentence", None, Context() + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech == { + "plain": {"speech": expected, "extra_data": None} + } + + unregister() + + @pytest.mark.usefixtures("init_components", "sl_setup") async def test_shopping_list_add_item(hass: HomeAssistant) -> None: """Test adding an item to the shopping list through the default agent.""" @@ -429,7 +470,7 @@ async def test_shopping_list_add_item(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("init_components") -async def test_nevermind_item(hass: HomeAssistant) -> None: +async def test_nevermind_intent(hass: HomeAssistant) -> None: """Test HassNevermind intent through the default agent.""" result = await conversation.async_converse(hass, "nevermind", None, Context()) assert result.response.intent is not None @@ -439,6 +480,17 @@ async def test_nevermind_item(hass: HomeAssistant) -> None: assert not result.response.speech +@pytest.mark.usefixtures("init_components") +async def test_respond_intent(hass: HomeAssistant) -> None: + """Test HassRespond intent through the default agent.""" + result = await conversation.async_converse(hass, "hello", None, Context()) + assert result.response.intent is not None + assert result.response.intent.intent_type == intent.INTENT_RESPOND + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech["plain"]["speech"] == "Hello from Home Assistant." + + @pytest.mark.usefixtures("init_components") async def test_device_area_context( hass: HomeAssistant, @@ -580,7 +632,7 @@ async def test_device_area_context( @pytest.mark.usefixtures("init_components") async def test_error_no_device(hass: HomeAssistant) -> None: - """Test error message when device/entity is missing.""" + """Test error message when device/entity doesn't exist.""" result = await conversation.async_converse( hass, "turn on missing entity", None, Context(), None ) @@ -593,9 +645,27 @@ async def test_error_no_device(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_exposed(hass: HomeAssistant) -> None: + """Test error message when device/entity exists but is not exposed.""" + hass.states.async_set("light.kitchen_light", "off") + expose_entity(hass, "light.kitchen_light", False) + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, kitchen light is not exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_area(hass: HomeAssistant) -> None: - """Test error message when area is missing.""" + """Test error message when area doesn't exist.""" result = await conversation.async_converse( hass, "turn on the lights in missing area", None, Context(), None ) @@ -610,7 +680,7 @@ async def test_error_no_area(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("init_components") async def test_error_no_floor(hass: HomeAssistant) -> None: - """Test error message when floor is missing.""" + """Test error message when floor doesn't exist.""" result = await conversation.async_converse( hass, "turn on all the lights on missing floor", None, Context(), None ) @@ -627,7 +697,7 @@ async def test_error_no_floor(hass: HomeAssistant) -> None: async def test_error_no_device_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: - """Test error message when area is missing a device/entity.""" + """Test error message when area exists but is does not contain a device/entity.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") result = await conversation.async_converse( @@ -642,6 +712,119 @@ async def test_error_no_device_in_area( ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_on_floor( + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when floor exists but is does not contain a device/entity.""" + floor_registry.async_create("ground") + result = await conversation.async_converse( + hass, "turn on missing entity on ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any device called missing entity on ground floor" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_on_floor_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when a device/entity exists on a floor but isn't exposed.""" + floor_ground = floor_registry.async_create("ground") + + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update( + area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id + ) + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + # We don't have a sentence for turning on devices by floor + name = MatchEntity(name="name", value=kitchen_light.name, text=kitchen_light.name) + floor = MatchEntity(name="floor", value=floor_ground.name, text=floor_ground.name) + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"name": name, "floor": floor}, + entities_list=[name, floor], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, + ): + result = await conversation.async_converse( + hass, "turn on test light on the ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, test light in the ground floor is not exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_in_area_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test error message when a device/entity exists in an area but isn't exposed.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on test light in the kitchen", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, test light in the kitchen area is not exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_domain(hass: HomeAssistant) -> None: """Test error message when no devices/entities exist for a domain.""" @@ -656,8 +839,8 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "turn on the fans", None, Context(), None @@ -674,6 +857,38 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain_exposed(hass: HomeAssistant) -> None: + """Test error message when devices/entities exist for a domain but are not exposed.""" + hass.states.async_set("fan.test_fan", "off") + expose_entity(hass, "fan.test_fan", False) + await hass.async_block_till_done() + + # We don't have a sentence for turning on all fans + fan_domain = MatchEntity(name="domain", value="fan", text="fans") + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"domain": fan_domain}, + entities_list=[fan_domain], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, + ): + result = await conversation.async_converse( + hass, "turn on the fans", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert result.response.speech["plain"]["speech"] == "Sorry, no fan is exposed" + + @pytest.mark.usefixtures("init_components") async def test_error_no_domain_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry @@ -694,7 +909,43 @@ async def test_error_no_domain_in_area( @pytest.mark.usefixtures("init_components") -async def test_error_no_domain_in_floor( +async def test_error_no_domain_in_area_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test error message when devices/entities for a domain exist in an area but are not exposed.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on the lights in the kitchen", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, no light in the kitchen area is exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain_on_floor( hass: HomeAssistant, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, @@ -735,6 +986,45 @@ async def test_error_no_domain_in_floor( ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain_on_floor_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when devices/entities for a domain exist on a floor but are not exposed.""" + floor_ground = floor_registry.async_create("ground") + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update( + area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id + ) + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on all lights on the ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, no light in the ground floor is exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_device_class(hass: HomeAssistant) -> None: """Test error message when no entities of a device class exist.""" @@ -758,8 +1048,8 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "open the windows", None, Context(), None @@ -776,6 +1066,54 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class_exposed(hass: HomeAssistant) -> None: + """Test error message when entities of a device class exist but aren't exposed.""" + # Create a cover entity that is not a window. + # This ensures that the filtering below won't exit early because there are + # no entities in the cover domain. + hass.states.async_set( + "cover.garage_door", + STATE_CLOSED, + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.GARAGE}, + ) + + # Create a window an ensure it's not exposed + hass.states.async_set( + "cover.test_window", + STATE_CLOSED, + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, + ) + expose_entity(hass, "cover.test_window", False) + + # We don't have a sentence for opening all windows + cover_domain = MatchEntity(name="domain", value="cover", text="cover") + window_class = MatchEntity(name="device_class", value="window", text="windows") + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"domain": cover_domain, "device_class": window_class}, + entities_list=[cover_domain, window_class], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, + ): + result = await conversation.async_converse( + hass, "open all the windows", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert ( + result.response.speech["plain"]["speech"] == "Sorry, no window is exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_device_class_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry @@ -795,12 +1133,105 @@ async def test_error_no_device_class_in_area( ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class_in_area_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test error message when entities of a device class exist in an area but are not exposed.""" + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") + bedroom_window = entity_registry.async_get_or_create("cover", "demo", "1234") + bedroom_window = entity_registry.async_update_entity( + bedroom_window.entity_id, + name="test cover", + area_id=area_bedroom.id, + ) + hass.states.async_set( + bedroom_window.entity_id, + "off", + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, + ) + expose_entity(hass, bedroom_window.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "open bedroom windows", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, no window in the bedroom area is exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class_on_floor_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when entities of a device class exist in on a floor but are not exposed.""" + floor_ground = floor_registry.async_create("ground") + + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update( + area_bedroom.id, name="bedroom", floor_id=floor_ground.floor_id + ) + bedroom_window = entity_registry.async_get_or_create("cover", "demo", "1234") + bedroom_window = entity_registry.async_update_entity( + bedroom_window.entity_id, + name="test cover", + area_id=area_bedroom.id, + ) + hass.states.async_set( + bedroom_window.entity_id, + "off", + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, + ) + expose_entity(hass, bedroom_window.entity_id, False) + await hass.async_block_till_done() + + # We don't have a sentence for opening all windows on a floor + cover_domain = MatchEntity(name="domain", value="cover", text="cover") + window_class = MatchEntity(name="device_class", value="window", text="windows") + floor = MatchEntity(name="floor", value=floor_ground.name, text=floor_ground.name) + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"domain": cover_domain, "device_class": window_class, "floor": floor}, + entities_list=[cover_domain, window_class, floor], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, + ): + result = await conversation.async_converse( + hass, "open ground floor windows", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, no window in the ground floor is exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_intent(hass: HomeAssistant) -> None: """Test response with an intent match failure.""" with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=None, ): result = await conversation.async_converse( hass, "do something", None, Context(), None @@ -869,12 +1300,48 @@ async def test_error_duplicate_names( @pytest.mark.usefixtures("init_components") -async def test_error_duplicate_names_in_area( +async def test_duplicate_names_but_one_is_exposed( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test when multiple devices have the same name (or alias), but only one of them is exposed.""" + kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light_2 = entity_registry.async_get_or_create("light", "demo", "5678") + + # Same name and alias + for light in (kitchen_light_1, kitchen_light_2): + light = entity_registry.async_update_entity( + light.entity_id, + name="kitchen light", + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + + # Only expose one + expose_entity(hass, kitchen_light_1.entity_id, True) + expose_entity(hass, kitchen_light_2.entity_id, False) + + # Check name and alias + async_mock_service(hass, "light", "turn_on") + for name in ("kitchen light", "overhead light"): + # command + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == kitchen_light_1.entity_id + + +@pytest.mark.usefixtures("init_components") +async def test_error_duplicate_names_same_area( hass: HomeAssistant, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: - """Test error message when multiple devices have the same name (or alias).""" + """Test error message when multiple devices have the same name (or alias) in the same area.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") @@ -926,6 +1393,127 @@ async def test_error_duplicate_names_in_area( ) +@pytest.mark.usefixtures("init_components") +async def test_duplicate_names_same_area_but_one_is_exposed( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test when multiple devices have the same name (or alias) in the same area but only one is exposed.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light_2 = entity_registry.async_get_or_create("light", "demo", "5678") + + # Same name and alias + for light in (kitchen_light_1, kitchen_light_2): + light = entity_registry.async_update_entity( + light.entity_id, + name="kitchen light", + area_id=area_kitchen.id, + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + + # Only expose one + expose_entity(hass, kitchen_light_1.entity_id, True) + expose_entity(hass, kitchen_light_2.entity_id, False) + + # Check name and alias + async_mock_service(hass, "light", "turn_on") + for name in ("kitchen light", "overhead light"): + # command + result = await conversation.async_converse( + hass, f"turn on {name} in {area_kitchen.name}", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == kitchen_light_1.entity_id + + +@pytest.mark.usefixtures("init_components") +async def test_duplicate_names_different_areas( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test preferred area when multiple devices have the same name (or alias) in different areas.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=area_kitchen.id + ) + bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") + bedroom_light = entity_registry.async_update_entity( + bedroom_light.entity_id, area_id=area_bedroom.id + ) + + # Same name and alias + for light in (kitchen_light, bedroom_light): + light = entity_registry.async_update_entity( + light.entity_id, + name="test light", + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + + # Add a satellite in the kitchen and bedroom + kitchen_entry = MockConfigEntry() + kitchen_entry.add_to_hass(hass) + device_kitchen = device_registry.async_get_or_create( + config_entry_id=kitchen_entry.entry_id, + connections=set(), + identifiers={("demo", "device-kitchen")}, + ) + device_registry.async_update_device(device_kitchen.id, area_id=area_kitchen.id) + + bedroom_entry = MockConfigEntry() + bedroom_entry.add_to_hass(hass) + device_bedroom = device_registry.async_get_or_create( + config_entry_id=bedroom_entry.entry_id, + connections=set(), + identifiers={("demo", "device-bedroom")}, + ) + device_registry.async_update_device(device_bedroom.id, area_id=area_bedroom.id) + + # Check name and alias + async_mock_service(hass, "light", "turn_on") + for name in ("test light", "overhead light"): + # Should fail without a preferred area + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ERROR + + # Target kitchen light by using kitchen device + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None, device_id=device_kitchen.id + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == kitchen_light.entity_id + + # Target bedroom light by using bedroom device + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None, device_id=device_bedroom.id + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == bedroom_light.entity_id + + @pytest.mark.usefixtures("init_components") async def test_error_wrong_state(hass: HomeAssistant) -> None: """Test error message when no entities are in the correct state.""" @@ -1148,7 +1736,7 @@ async def test_empty_aliases( return_value=None, ) as mock_recognize_all: await conversation.async_converse( - hass, "turn on lights in the kitchen", None, Context(), None + hass, "turn on kitchen light", None, Context(), None ) assert mock_recognize_all.call_count > 0 @@ -1905,7 +2493,7 @@ async def test_non_default_response(hass: HomeAssistant, init_components) -> Non hass.states.async_set("cover.front_door", "closed") calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] assert isinstance(agent, default_agent.DefaultAgent) result = await agent.async_process( @@ -2014,13 +2602,15 @@ async def test_light_area_same_name( device_registry.async_update_device(device.id, area_id=kitchen_area.id) kitchen_light = entity_registry.async_get_or_create( - "light", "demo", "1234", original_name="kitchen light" + "light", "demo", "1234", original_name="light in the kitchen" ) entity_registry.async_update_entity( kitchen_light.entity_id, area_id=kitchen_area.id ) hass.states.async_set( - kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: "light in the kitchen"}, ) ceiling_light = entity_registry.async_get_or_create( @@ -2033,12 +2623,19 @@ async def test_light_area_same_name( ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} ) + bathroom_light = entity_registry.async_get_or_create( + "light", "demo", "9012", original_name="light" + ) + hass.states.async_set( + bathroom_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "light"} + ) + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") await hass.services.async_call( "conversation", "process", - {conversation.ATTR_TEXT: "turn on kitchen light"}, + {conversation.ATTR_TEXT: "turn on light in the kitchen"}, ) await hass.async_block_till_done() @@ -2112,7 +2709,10 @@ async def test_config_sentences_priority( hass_admin_user: MockUser, snapshot: SnapshotAssertion, ) -> None: - """Test that user intents from configuration.yaml have priority over builtin intents/sentences.""" + """Test that user intents from configuration.yaml have priority over builtin intents/sentences. + + Also test that they follow proper selection logic. + """ # Add a custom sentence that would match a builtin sentence. # Custom sentences have priority. assert await async_setup_component(hass, "homeassistant", {}) @@ -2120,13 +2720,36 @@ async def test_config_sentences_priority( assert await async_setup_component( hass, "conversation", - {"conversation": {"intents": {"CustomIntent": ["turn on the lamp"]}}}, + { + "conversation": { + "intents": { + "CustomIntent": ["turn on "], + "WorseCustomIntent": ["turn on the lamp"], + "FakeCustomIntent": ["turn on "], + } + } + }, ) + + # Fake intent not being custom + intents = ( + await conversation.async_get_agent(hass).async_get_or_load_intents( + hass.config.language + ) + ).intents.intents + intents["FakeCustomIntent"].data[0].metadata[METADATA_CUSTOM_SENTENCE] = False + assert await async_setup_component(hass, "light", {}) assert await async_setup_component( hass, "intent_script", - {"intent_script": {"CustomIntent": {"speech": {"text": "custom response"}}}}, + { + "intent_script": { + "CustomIntent": {"speech": {"text": "custom response"}}, + "WorseCustomIntent": {"speech": {"text": "worse custom response"}}, + "FakeCustomIntent": {"speech": {"text": "fake custom response"}}, + } + }, ) # Ensure that a "lamp" exists so that we can verify the custom intent @@ -2143,3 +2766,333 @@ async def test_config_sentences_priority( data = result.as_dict() assert data["response"]["response_type"] == "action_done" assert data["response"]["speech"]["plain"]["speech"] == "custom response" + + +async def test_query_same_name_different_areas( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test asking a question about entities with the same name in different areas.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + kitchen_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(kitchen_device.id, area_id=kitchen_area.id) + + kitchen_light = entity_registry.async_get_or_create( + "light", + "demo", + "1234", + ) + entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + kitchen_light.entity_id, + "on", + attributes={ATTR_FRIENDLY_NAME: "overhead light"}, + ) + + bedroom_area = area_registry.async_create("bedroom") + bedroom_light = entity_registry.async_get_or_create( + "light", + "demo", + "5678", + ) + entity_registry.async_update_entity( + bedroom_light.entity_id, area_id=bedroom_area.id + ) + hass.states.async_set( + bedroom_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: "overhead light"}, + ) + + # Should fail without a preferred area (duplicate name) + result = await conversation.async_converse( + hass, "is the overhead light on?", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ERROR + + # Succeeds using area from device (kitchen) + result = await conversation.async_converse( + hass, + "is the overhead light on?", + None, + Context(), + None, + device_id=kitchen_device.id, + ) + assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(result.response.matched_states) == 1 + assert result.response.matched_states[0].entity_id == kitchen_light.entity_id + + +@pytest.mark.usefixtures("init_components") +async def test_intent_cache_exposed(hass: HomeAssistant) -> None: + """Test that intent recognition results are cached for exposed entities.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + entity_id = "light.test_light" + hass.states.async_set(entity_id, "off") + expose_entity(hass, entity_id, True) + await hass.async_block_till_done() + + user_input = ConversationInput( + text="turn on test light", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert result.entities["name"].text == "test light" + + # Mark this result so we know it is from cache next time + mark = "_from_cache" + setattr(result, mark, True) + + # Should be from cache this time + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is True + + # Unexposing clears the cache + expose_entity(hass, entity_id, False) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is None + + +@pytest.mark.usefixtures("init_components") +async def test_intent_cache_all_entities(hass: HomeAssistant) -> None: + """Test that intent recognition results are cached for all entities.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + entity_id = "light.test_light" + hass.states.async_set(entity_id, "off") + expose_entity(hass, entity_id, False) # not exposed + await hass.async_block_till_done() + + user_input = ConversationInput( + text="turn on test light", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert result.entities["name"].text == "test light" + + # Mark this result so we know it is from cache next time + mark = "_from_cache" + setattr(result, mark, True) + + # Should be from cache this time + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is True + + # Adding a new entity clears the cache + hass.states.async_set("light.new_light", "off") + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is None + + +@pytest.mark.usefixtures("init_components") +async def test_intent_cache_fuzzy(hass: HomeAssistant) -> None: + """Test that intent recognition results are cached for fuzzy matches.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + # There is no entity named test light + user_input = ConversationInput( + text="turn on test light", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert result.unmatched_entities["area"].text == "test " + + # Mark this result so we know it is from cache next time + mark = "_from_cache" + setattr(result, mark, True) + + # Should be from cache this time + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is True + + +@pytest.mark.usefixtures("init_components") +async def test_entities_filtered_by_input(hass: HomeAssistant) -> None: + """Test that entities are filtered by the input text before intent matching.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + # Only the switch is exposed + hass.states.async_set("light.test_light", "off") + hass.states.async_set( + "light.test_light_2", "off", attributes={ATTR_FRIENDLY_NAME: "test light"} + ) + hass.states.async_set("cover.garage_door", "closed") + hass.states.async_set("switch.test_switch", "off") + expose_entity(hass, "light.test_light", False) + expose_entity(hass, "light.test_light_2", False) + expose_entity(hass, "cover.garage_door", False) + expose_entity(hass, "switch.test_switch", True) + await hass.async_block_till_done() + + # test switch is exposed + user_input = ConversationInput( + text="turn on test switch", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=None, + ) as recognize_best: + await agent.async_recognize_intent(user_input) + + # (1) exposed, (2) all entities + assert len(recognize_best.call_args_list) == 2 + + # Only the test light should have been considered because its name shows + # up in the input text. + slot_lists = recognize_best.call_args_list[0].kwargs["slot_lists"] + name_list = slot_lists["name"] + assert len(name_list.values) == 1 + assert name_list.values[0].text_in.text == "test switch" + + # test light is not exposed + user_input = ConversationInput( + text="turn on Test Light", # different casing for name + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=None, + ) as recognize_best: + await agent.async_recognize_intent(user_input) + + # (1) exposed, (2) all entities + assert len(recognize_best.call_args_list) == 2 + + # Both test lights should have been considered because their name shows + # up in the input text. + slot_lists = recognize_best.call_args_list[1].kwargs["slot_lists"] + name_list = slot_lists["name"] + assert len(name_list.values) == 2 + assert name_list.values[0].text_in.text == "test light" + assert name_list.values[1].text_in.text == "test light" + + +@pytest.mark.usefixtures("init_components") +async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: + """Test that entities names are not treated as hassil templates.""" + # Contains hassil template characters + hass.states.async_set( + "light.test_light", "off", attributes={ATTR_FRIENDLY_NAME: " None: + """Test turn on/off in multiple languages.""" + entity_id = "light.light1234" + hass.states.async_set( + entity_id, STATE_OFF, attributes={ATTR_FRIENDLY_NAME: light_name} + ) + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + await conversation.async_converse( + hass, + on_sentence, + None, + Context(), + language=language, + ) + assert len(on_calls) == 1 + assert on_calls[0].data.get("entity_id") == [entity_id] + + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + await conversation.async_converse( + hass, + off_sentence, + None, + Context(), + language=language, + ) + assert len(off_calls) == 1 + assert off_calls[0].data.get("entity_id") == [entity_id] diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index 7bae9c43f70..244fa6bda7b 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -36,6 +36,7 @@ from homeassistant.helpers import ( intent, ) from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from tests.common import async_mock_service @@ -445,12 +446,22 @@ async def test_todo_add_item_fr( assert intent_obj.slots.get("item", {}).get("value", "").strip() == "farine" -@freeze_time(datetime(year=2013, month=9, day=17, hour=1, minute=2)) +@freeze_time( + datetime( + year=2013, + month=9, + day=17, + hour=1, + minute=2, + tzinfo=dt_util.UTC, + ) +) async def test_date_time( hass: HomeAssistant, init_components, ) -> None: """Test the date and time intents.""" + await hass.config.async_set_time_zone("UTC") result = await conversation.async_converse( hass, "what is the date", None, Context(), None ) diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py index 1431fd6c17b..6d69ec3c739 100644 --- a/tests/components/conversation/test_http.py +++ b/tests/components/conversation/test_http.py @@ -8,6 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.const import ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant @@ -214,7 +215,7 @@ async def test_ws_prepare( hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator, agent_id ) -> None: """Test the Websocket prepare conversation API.""" - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] assert isinstance(agent, default_agent.DefaultAgent) # No intents should be loaded yet @@ -354,15 +355,15 @@ async def test_ws_hass_agent_debug_null_result( """Test homeassistant agent debug websocket command with a null result.""" client = await hass_ws_client(hass) - async def async_recognize(self, user_input, *args, **kwargs): + async def async_recognize_intent(self, user_input, *args, **kwargs): if user_input.text == "bad sentence": return None return await self.async_recognize(user_input, *args, **kwargs) with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", - async_recognize, + "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize_intent", + async_recognize_intent, ): await client.send_json_auto_id( { @@ -500,6 +501,19 @@ async def test_ws_hass_agent_debug_sentence_trigger( client = await hass_ws_client(hass) + # List sentence + await client.send_json_auto_id( + { + "type": "conversation/sentences/list", + } + ) + await hass.async_block_till_done() + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + # Use trigger sentence await client.send_json_auto_id( { diff --git a/tests/components/conversation/test_init.py b/tests/components/conversation/test_init.py index 34a8fce636d..6900ba2d419 100644 --- a/tests/components/conversation/test_init.py +++ b/tests/components/conversation/test_init.py @@ -8,9 +8,15 @@ from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.components import conversation -from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation import ( + ConversationInput, + async_handle_intents, + async_handle_sentence_triggers, + default_agent, +) +from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.core import HomeAssistant +from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -143,7 +149,7 @@ async def test_prepare_reload(hass: HomeAssistant, init_components) -> None: language = hass.config.language # Load intents - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] assert isinstance(agent, default_agent.DefaultAgent) await agent.async_prepare(language) @@ -171,7 +177,7 @@ async def test_prepare_fail(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "conversation", {}) # Load intents - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] assert isinstance(agent, default_agent.DefaultAgent) await agent.async_prepare("not-a-language") @@ -228,3 +234,97 @@ async def test_prepare_agent( await conversation.async_prepare_agent(hass, agent_id, "en") assert len(mock_prepare.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("response_template", "expected_response"), + [("response {{ trigger.device_id }}", "response 1234"), ("", "")], +) +async def test_async_handle_sentence_triggers( + hass: HomeAssistant, response_template: str, expected_response: str +) -> None: + """Test handling sentence triggers with async_handle_sentence_triggers.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": ["my trigger"], + }, + "action": { + "set_conversation_response": response_template, + }, + } + }, + ) + + # Device id will be available in response template + device_id = "1234" + actual_response = await async_handle_sentence_triggers( + hass, + ConversationInput( + text="my trigger", + context=Context(), + conversation_id=None, + device_id=device_id, + language=hass.config.language, + ), + ) + assert actual_response == expected_response + + +async def test_async_handle_intents(hass: HomeAssistant) -> None: + """Test handling registered intents with async_handle_intents.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + + # Reuse custom sentences in test config to trigger default agent. + class OrderBeerIntentHandler(intent.IntentHandler): + intent_type = "OrderBeer" + + def __init__(self) -> None: + super().__init__() + self.was_handled = False + + async def async_handle( + self, intent_obj: intent.Intent + ) -> intent.IntentResponse: + self.was_handled = True + return intent_obj.create_response() + + handler = OrderBeerIntentHandler() + intent.async_register(hass, handler) + + # Registered intent will be handled + result = await async_handle_intents( + hass, + ConversationInput( + text="I'd like to order a stout", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + ), + ) + assert result is not None + assert result.intent is not None + assert result.intent.intent_type == handler.intent_type + assert handler.was_handled + + # No error messages, just None as a result + result = await async_handle_intents( + hass, + ConversationInput( + text="this sentence does not exist", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + ), + ) + assert result is None diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index 59cd10d2510..7c00b9a80b2 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -56,7 +56,7 @@ async def test_converation_trace( "intent_name": "HassListAddItem", "slots": { "name": "Shopping List", - "item": "apples ", + "item": "apples", }, } diff --git a/tests/components/conversation/test_trigger.py b/tests/components/conversation/test_trigger.py index 3c3e58e7136..50fac51c87a 100644 --- a/tests/components/conversation/test_trigger.py +++ b/tests/components/conversation/test_trigger.py @@ -6,6 +6,7 @@ import pytest import voluptuous as vol from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY from homeassistant.components.conversation.models import ConversationInput from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import trigger @@ -39,18 +40,31 @@ async def test_if_fires_on_event( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, } }, ) - + context = Context() service_response = await hass.services.async_call( "conversation", "process", {"text": "Ha ha ha"}, blocking=True, return_response=True, + context=context, ) assert service_response["response"]["speech"]["plain"]["speech"] == "Done" @@ -60,13 +74,21 @@ async def test_if_fires_on_event( assert service_calls[1].service == "automation" assert service_calls[1].data["data"] == { "alias": None, - "id": "0", - "idx": "0", + "id": 0, + "idx": 0, "platform": "conversation", "sentence": "Ha ha ha", "slots": {}, "details": {}, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "Ha ha ha", + }, } @@ -151,7 +173,19 @@ async def test_response_same_sentence( {"delay": "0:0:0.100"}, { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, {"set_conversation_response": "response 2"}, ], @@ -167,13 +201,14 @@ async def test_response_same_sentence( ] }, ) - + context = Context() service_response = await hass.services.async_call( "conversation", "process", {"text": "test sentence"}, blocking=True, return_response=True, + context=context, ) await hass.async_block_till_done() @@ -187,12 +222,20 @@ async def test_response_same_sentence( assert service_calls[1].data["data"] == { "alias": None, "id": "trigger1", - "idx": "0", + "idx": 0, "platform": "conversation", "sentence": "test sentence", "slots": {}, "details": {}, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "test sentence", + }, } @@ -230,13 +273,14 @@ async def test_response_same_sentence_with_error( ] }, ) - + context = Context() service_response = await hass.services.async_call( "conversation", "process", {"text": "test sentence"}, blocking=True, return_response=True, + context=context, ) await hass.async_block_till_done() @@ -319,12 +363,24 @@ async def test_same_trigger_multiple_sentences( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, } }, ) - + context = Context() await hass.services.async_call( "conversation", "process", @@ -332,6 +388,7 @@ async def test_same_trigger_multiple_sentences( "text": "hello", }, blocking=True, + context=context, ) # Only triggers once @@ -341,13 +398,21 @@ async def test_same_trigger_multiple_sentences( assert service_calls[1].service == "automation" assert service_calls[1].data["data"] == { "alias": None, - "id": "0", - "idx": "0", + "id": 0, + "idx": 0, "platform": "conversation", "sentence": "hello", "slots": {}, "details": {}, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "hello", + }, } @@ -370,7 +435,19 @@ async def test_same_sentence_multiple_triggers( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, }, { @@ -383,7 +460,19 @@ async def test_same_sentence_multiple_triggers( }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, }, ], @@ -487,12 +576,25 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) }, "action": { "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, + "data_template": { + "data": { + "alias": "{{ trigger.alias }}", + "id": "{{ trigger.id }}", + "idx": "{{ trigger.idx }}", + "platform": "{{ trigger.platform }}", + "sentence": "{{ trigger.sentence }}", + "slots": "{{ trigger.slots }}", + "details": "{{ trigger.details }}", + "device_id": "{{ trigger.device_id }}", + "user_input": "{{ trigger.user_input }}", + } + }, }, } }, ) + context = Context() await hass.services.async_call( "conversation", "process", @@ -500,6 +602,7 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) "text": "play the white album by the beatles", }, blocking=True, + context=context, ) await hass.async_block_till_done() @@ -508,8 +611,8 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) assert service_calls[1].service == "automation" assert service_calls[1].data["data"] == { "alias": None, - "id": "0", - "idx": "0", + "id": 0, + "idx": 0, "platform": "conversation", "sentence": "play the white album by the beatles", "slots": { @@ -529,6 +632,14 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) }, }, "device_id": None, + "user_input": { + "agent_id": None, + "context": context.as_dict(), + "conversation_id": None, + "device_id": None, + "language": "en", + "text": "play the white album by the beatles", + }, } @@ -550,7 +661,7 @@ async def test_trigger_with_device_id(hass: HomeAssistant) -> None: }, ) - agent = default_agent.async_get_default_agent(hass) + agent = hass.data[DATA_DEFAULT_ENTITY] assert isinstance(agent, default_agent.DefaultAgent) result = await agent.async_process( diff --git a/tests/components/cookidoo/__init__.py b/tests/components/cookidoo/__init__.py new file mode 100644 index 00000000000..043f627ecc6 --- /dev/null +++ b/tests/components/cookidoo/__init__.py @@ -0,0 +1,15 @@ +"""Tests for the Cookidoo integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Mock setup of the cookidoo integration.""" + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/cookidoo/conftest.py b/tests/components/cookidoo/conftest.py new file mode 100644 index 00000000000..68700967d35 --- /dev/null +++ b/tests/components/cookidoo/conftest.py @@ -0,0 +1,76 @@ +"""Common fixtures for the Cookidoo tests.""" + +from collections.abc import Generator +from typing import cast +from unittest.mock import AsyncMock, patch + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooAuthResponse, + CookidooIngredientItem, +) +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_json_object_fixture + +EMAIL = "test-email" +PASSWORD = "test-password" +COUNTRY = "CH" +LANGUAGE = "de-CH" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.cookidoo.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_cookidoo_client() -> Generator[AsyncMock]: + """Mock a Cookidoo client.""" + with ( + patch( + "homeassistant.components.cookidoo.Cookidoo", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.cookidoo.config_flow.Cookidoo", + new=mock_client, + ), + ): + client = mock_client.return_value + client.login.return_value = cast(CookidooAuthResponse, {"name": "Cookidoo"}) + client.get_ingredient_items.return_value = [ + CookidooIngredientItem(**item) + for item in load_json_object_fixture("ingredient_items.json", DOMAIN)[ + "data" + ] + ] + client.get_additional_items.return_value = [ + CookidooAdditionalItem(**item) + for item in load_json_object_fixture("additional_items.json", DOMAIN)[ + "data" + ] + ] + yield client + + +@pytest.fixture(name="cookidoo_config_entry") +def mock_cookidoo_config_entry() -> MockConfigEntry: + """Mock cookidoo configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: EMAIL, + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + }, + entry_id="01JBVVVJ87F6G5V0QJX6HBC94T", + ) diff --git a/tests/components/cookidoo/fixtures/additional_items.json b/tests/components/cookidoo/fixtures/additional_items.json new file mode 100644 index 00000000000..97cd206f6ad --- /dev/null +++ b/tests/components/cookidoo/fixtures/additional_items.json @@ -0,0 +1,9 @@ +{ + "data": [ + { + "id": "unique_id_tomaten", + "name": "Tomaten", + "is_owned": false + } + ] +} diff --git a/tests/components/cookidoo/fixtures/ingredient_items.json b/tests/components/cookidoo/fixtures/ingredient_items.json new file mode 100644 index 00000000000..7fbeb90e91a --- /dev/null +++ b/tests/components/cookidoo/fixtures/ingredient_items.json @@ -0,0 +1,10 @@ +{ + "data": [ + { + "id": "unique_id_mehl", + "name": "Mehl", + "description": "200 g", + "is_owned": false + } + ] +} diff --git a/tests/components/cookidoo/snapshots/test_todo.ambr b/tests/components/cookidoo/snapshots/test_todo.ambr new file mode 100644 index 00000000000..965cbb0adde --- /dev/null +++ b/tests/components/cookidoo/snapshots/test_todo.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_todo[todo.cookidoo_additional_purchases-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.cookidoo_additional_purchases', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Additional purchases', + 'platform': 'cookidoo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'additional_item_list', + 'unique_id': '01JBVVVJ87F6G5V0QJX6HBC94T_additional_items', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.cookidoo_additional_purchases-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cookidoo Additional purchases', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.cookidoo_additional_purchases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_todo[todo.cookidoo_shopping_list-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.cookidoo_shopping_list', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Shopping list', + 'platform': 'cookidoo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'ingredient_list', + 'unique_id': '01JBVVVJ87F6G5V0QJX6HBC94T_ingredients', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.cookidoo_shopping_list-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cookidoo Shopping list', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.cookidoo_shopping_list', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py new file mode 100644 index 00000000000..0057bb3767e --- /dev/null +++ b/tests/components/cookidoo/test_config_flow.py @@ -0,0 +1,464 @@ +"""Test the Cookidoo config flow.""" + +from unittest.mock import AsyncMock + +from cookidoo_api.exceptions import ( + CookidooAuthException, + CookidooException, + CookidooRequestException, +) +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import COUNTRY, EMAIL, LANGUAGE, PASSWORD +from .test_init import setup_integration + +from tests.common import MockConfigEntry + +MOCK_DATA_USER_STEP = { + CONF_EMAIL: EMAIL, + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, +} + +MOCK_DATA_LANGUAGE_STEP = { + CONF_LANGUAGE: LANGUAGE, +} + + +async def test_flow_user_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_cookidoo_client: AsyncMock +) -> None: + """Test we get the user flow and create entry with success.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Cookidoo" + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_user_init_data_unknown_error_and_recover_on_step_1( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.login.side_effect = raise_error + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].title == "Cookidoo" + + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_user_init_data_unknown_error_and_recover_on_step_2( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.get_additional_items.side_effect = raise_error + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.get_additional_items.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].title == "Cookidoo" + + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + + +async def test_flow_user_init_data_already_configured( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test we abort user data set when entry is already configured.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "user"} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_flow_reconfigure_success( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test we get the reconfigure flow and create entry with success.""" + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reconfigure_init_data_unknown_error_and_recover_on_step_1( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.login.side_effect = raise_error + + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reconfigure_init_data_unknown_error_and_recover_on_step_2( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.get_additional_items.side_effect = raise_error + + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.get_additional_items.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +async def test_flow_reauth( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert cookidoo_config_entry.data == { + CONF_EMAIL: "new-email", + CONF_PASSWORD: "new-password", + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reauth_error_and_recover( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, + raise_error, + text_error, +) -> None: + """Test reauth flow.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_cookidoo_client.login.side_effect = raise_error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": text_error} + + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert cookidoo_config_entry.data == { + CONF_EMAIL: "new-email", + CONF_PASSWORD: "new-password", + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("new_email", "saved_email", "result_reason"), + [ + (EMAIL, EMAIL, "reauth_successful"), + ("another-email", EMAIL, "already_configured"), + ], +) +async def test_flow_reauth_init_data_already_configured( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, + new_email: str, + saved_email: str, + result_reason: str, +) -> None: + """Test we abort user data set when entry is already configured.""" + + cookidoo_config_entry.add_to_hass(hass) + + another_cookidoo_config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "another-email", + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + }, + ) + + another_cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: new_email, CONF_PASSWORD: PASSWORD}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == result_reason + assert cookidoo_config_entry.data[CONF_EMAIL] == saved_email diff --git a/tests/components/cookidoo/test_init.py b/tests/components/cookidoo/test_init.py new file mode 100644 index 00000000000..b1b9b880526 --- /dev/null +++ b/tests/components/cookidoo/test_init.py @@ -0,0 +1,102 @@ +"""Unit tests for the cookidoo integration.""" + +from unittest.mock import AsyncMock + +from cookidoo_api import CookidooAuthException, CookidooRequestException +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_cookidoo_client") +async def test_load_unload( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test loading and unloading of the config entry.""" + await setup_integration(hass, cookidoo_config_entry) + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(cookidoo_config_entry.entry_id) + assert cookidoo_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (CookidooRequestException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_init_failure( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + status: ConfigEntryState, + exception: Exception, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test an initialization error on integration load.""" + mock_cookidoo_client.login.side_effect = exception + await setup_integration(hass, cookidoo_config_entry) + assert cookidoo_config_entry.state == status + + +@pytest.mark.parametrize( + "cookidoo_method", + [ + "get_ingredient_items", + "get_additional_items", + ], +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, + cookidoo_method: str, +) -> None: + """Test config entry not ready.""" + getattr( + mock_cookidoo_client, cookidoo_method + ).side_effect = CookidooRequestException() + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() + + assert cookidoo_config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (None, ConfigEntryState.LOADED), + (CookidooRequestException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_config_entry_not_ready_auth_error( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, + exception: Exception | None, + status: ConfigEntryState, +) -> None: + """Test config entry not ready from authentication error.""" + + mock_cookidoo_client.get_ingredient_items.side_effect = CookidooAuthException + mock_cookidoo_client.refresh_token.side_effect = exception + + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() + + assert cookidoo_config_entry.state is status diff --git a/tests/components/cookidoo/test_todo.py b/tests/components/cookidoo/test_todo.py new file mode 100644 index 00000000000..0e60a86d225 --- /dev/null +++ b/tests/components/cookidoo/test_todo.py @@ -0,0 +1,292 @@ +"""Test for todo platform of the Cookidoo integration.""" + +from collections.abc import Generator +import re +from unittest.mock import AsyncMock, patch + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooIngredientItem, + CookidooRequestException, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoItemStatus, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +def todo_only() -> Generator[None]: + """Enable only the todo platform.""" + with patch( + "homeassistant.components.cookidoo.PLATFORMS", + [Platform.TODO], + ): + yield + + +@pytest.mark.usefixtures("mock_cookidoo_client") +async def test_todo( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Snapshot test states of todo platform.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform( + hass, entity_registry, snapshot, cookidoo_config_entry.entry_id + ) + + +async def test_update_ingredient( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update ingredient item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_mehl", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_shopping_list"}, + blocking=True, + ) + + mock_cookidoo_client.edit_ingredient_items_ownership.assert_called_once_with( + [ + CookidooIngredientItem( + id="unique_id_mehl", + name="", + description="", + is_owned=True, + ) + ], + ) + + +async def test_update_ingredient_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update ingredient with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.edit_ingredient_items_ownership.side_effect = ( + CookidooRequestException + ) + with pytest.raises( + HomeAssistantError, match="Failed to update Mehl in Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_mehl", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_shopping_list"}, + blocking=True, + ) + + +async def test_add_additional_item( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test add additional item to list.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.add_additional_items.assert_called_once_with( + ["Äpfel"], + ) + + +async def test_add_additional_item_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test add additional item to list with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.add_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, match="Failed to save Äpfel to Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + +async def test_update_additional_item( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_tomaten", + ATTR_RENAME: "Peperoni", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.edit_additional_items_ownership.assert_called_once_with( + [ + CookidooAdditionalItem( + id="unique_id_tomaten", + name="Peperoni", + is_owned=True, + ) + ], + ) + mock_cookidoo_client.edit_additional_items.assert_called_once_with( + [ + CookidooAdditionalItem( + id="unique_id_tomaten", + name="Peperoni", + is_owned=True, + ) + ], + ) + + +async def test_update_additional_item_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update additional item with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.edit_additional_items_ownership.side_effect = ( + CookidooRequestException + ) + mock_cookidoo_client.edit_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, match="Failed to update Peperoni in Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_tomaten", + ATTR_RENAME: "Peperoni", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + +async def test_delete_additional_items( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test delete additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "unique_id_tomaten"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.remove_additional_items.assert_called_once_with( + ["unique_id_tomaten"] + ) + + +async def test_delete_additional_items_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test delete additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + mock_cookidoo_client.remove_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, + match=re.escape("Failed to delete 1 item(s) from Cookidoo shopping list"), + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "unique_id_tomaten"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) diff --git a/tests/components/cover/common.py b/tests/components/cover/common.py index d9f67e73f17..b4a0cdb06d4 100644 --- a/tests/components/cover/common.py +++ b/tests/components/cover/common.py @@ -2,8 +2,7 @@ from typing import Any -from homeassistant.components.cover import CoverEntity, CoverEntityFeature -from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING +from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState from tests.common import MockEntity @@ -26,7 +25,7 @@ class MockCover(MockEntity, CoverEntity): @property def is_closed(self): """Return if the cover is closed or not.""" - if "state" in self._values and self._values["state"] == STATE_CLOSED: + if "state" in self._values and self._values["state"] == CoverState.CLOSED: return True return self.current_cover_position == 0 @@ -35,7 +34,7 @@ class MockCover(MockEntity, CoverEntity): def is_opening(self): """Return if the cover is opening or not.""" if "state" in self._values: - return self._values["state"] == STATE_OPENING + return self._values["state"] == CoverState.OPENING return False @@ -43,28 +42,28 @@ class MockCover(MockEntity, CoverEntity): def is_closing(self): """Return if the cover is closing or not.""" if "state" in self._values: - return self._values["state"] == STATE_CLOSING + return self._values["state"] == CoverState.CLOSING return False def open_cover(self, **kwargs) -> None: """Open cover.""" if self._reports_opening_closing: - self._values["state"] = STATE_OPENING + self._values["state"] = CoverState.OPENING else: - self._values["state"] = STATE_OPEN + self._values["state"] = CoverState.OPEN def close_cover(self, **kwargs) -> None: """Close cover.""" if self._reports_opening_closing: - self._values["state"] = STATE_CLOSING + self._values["state"] = CoverState.CLOSING else: - self._values["state"] = STATE_CLOSED + self._values["state"] = CoverState.CLOSED def stop_cover(self, **kwargs) -> None: """Stop cover.""" assert CoverEntityFeature.STOP in self.supported_features - self._values["state"] = STATE_CLOSED if self.is_closed else STATE_OPEN + self._values["state"] = CoverState.CLOSED if self.is_closed else CoverState.OPEN @property def current_cover_position(self): diff --git a/tests/components/cover/test_device_condition.py b/tests/components/cover/test_device_condition.py index 8c1d2d1c9a7..aa5f150172c 100644 --- a/tests/components/cover/test_device_condition.py +++ b/tests/components/cover/test_device_condition.py @@ -4,17 +4,9 @@ import pytest from pytest_unordered import unordered from homeassistant.components import automation -from homeassistant.components.cover import DOMAIN, CoverEntityFeature +from homeassistant.components.cover import DOMAIN, CoverEntityFeature, CoverState from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_PLATFORM, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import CONF_PLATFORM, STATE_UNAVAILABLE, EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -365,7 +357,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) assert await async_setup_component( hass, @@ -469,21 +461,21 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_open - event - test_event1" - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(service_calls) == 2 assert service_calls[1].data["some"] == "is_closed - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_OPENING) + hass.states.async_set(entry.entity_id, CoverState.OPENING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event3") await hass.async_block_till_done() assert len(service_calls) == 3 assert service_calls[2].data["some"] == "is_opening - event - test_event3" - hass.states.async_set(entry.entity_id, STATE_CLOSING) + hass.states.async_set(entry.entity_id, CoverState.CLOSING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event4") await hass.async_block_till_done() @@ -508,7 +500,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) assert await async_setup_component( hass, @@ -675,7 +667,7 @@ async def test_if_position( assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 45} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -688,7 +680,7 @@ async def test_if_position( assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 90} ) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") @@ -835,7 +827,7 @@ async def test_if_tilt_position( assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 45} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -848,7 +840,7 @@ async def test_if_tilt_position( assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 90} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() diff --git a/tests/components/cover/test_device_trigger.py b/tests/components/cover/test_device_trigger.py index 5eb8cd484b2..e6021d22326 100644 --- a/tests/components/cover/test_device_trigger.py +++ b/tests/components/cover/test_device_trigger.py @@ -6,16 +6,9 @@ import pytest from pytest_unordered import unordered from homeassistant.components import automation -from homeassistant.components.cover import DOMAIN, CoverEntityFeature +from homeassistant.components.cover import DOMAIN, CoverEntityFeature, CoverState from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_PLATFORM, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - EntityCategory, -) +from homeassistant.const import CONF_PLATFORM, EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -387,7 +380,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) assert await async_setup_component( hass, @@ -487,7 +480,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is opened. - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -496,7 +489,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is closed. - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -505,7 +498,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is opening. - hass.states.async_set(entry.entity_id, STATE_OPENING) + hass.states.async_set(entry.entity_id, CoverState.OPENING) await hass.async_block_till_done() assert len(service_calls) == 3 assert ( @@ -514,7 +507,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is closing. - hass.states.async_set(entry.entity_id, STATE_CLOSING) + hass.states.async_set(entry.entity_id, CoverState.CLOSING) await hass.async_block_till_done() assert len(service_calls) == 4 assert ( @@ -540,7 +533,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) assert await async_setup_component( hass, @@ -574,7 +567,7 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is opened. - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -600,7 +593,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) assert await async_setup_component( hass, @@ -635,7 +628,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -754,12 +747,14 @@ async def test_if_fires_on_position( ] }, ) - hass.states.async_set(ent.entity_id, STATE_OPEN, attributes={"current_position": 1}) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 95} + ent.entity_id, CoverState.OPEN, attributes={"current_position": 1} ) hass.states.async_set( - ent.entity_id, STATE_OPEN, attributes={"current_position": 50} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 95} + ) + hass.states.async_set( + ent.entity_id, CoverState.OPEN, attributes={"current_position": 50} ) await hass.async_block_till_done() assert len(service_calls) == 3 @@ -781,11 +776,11 @@ async def test_if_fires_on_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 95} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 95} ) await hass.async_block_till_done() hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 45} ) await hass.async_block_till_done() assert len(service_calls) == 4 @@ -795,7 +790,7 @@ async def test_if_fires_on_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 90} ) await hass.async_block_till_done() assert len(service_calls) == 5 @@ -912,13 +907,13 @@ async def test_if_fires_on_tilt_position( }, ) hass.states.async_set( - ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 1} + ent.entity_id, CoverState.OPEN, attributes={"current_tilt_position": 1} ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 95} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 95} ) hass.states.async_set( - ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 50} + ent.entity_id, CoverState.OPEN, attributes={"current_tilt_position": 50} ) await hass.async_block_till_done() assert len(service_calls) == 3 @@ -940,11 +935,11 @@ async def test_if_fires_on_tilt_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 95} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 95} ) await hass.async_block_till_done() hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 45} ) await hass.async_block_till_done() assert len(service_calls) == 4 @@ -954,7 +949,7 @@ async def test_if_fires_on_tilt_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 90} ) await hass.async_block_till_done() assert len(service_calls) == 5 diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index d1d84ffad6c..e43b64b16a7 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -2,29 +2,16 @@ from enum import Enum -import pytest - from homeassistant.components import cover -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_PLATFORM, - SERVICE_TOGGLE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, -) +from homeassistant.components.cover import CoverState +from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM, SERVICE_TOGGLE from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers.entity import Entity from homeassistant.setup import async_setup_component from .common import MockCover -from tests.common import ( - help_test_all, - import_and_test_deprecated_constant_enum, - setup_test_component_platform, -) +from tests.common import help_test_all, setup_test_component_platform async def test_services( @@ -106,15 +93,17 @@ async def test_services( assert is_closing(hass, ent6) # Without STOP but still reports opening/closing has a 4th possible toggle state - set_state(ent6, STATE_CLOSED) + set_state(ent6, CoverState.CLOSED) await call_service(hass, SERVICE_TOGGLE, ent6) assert is_opening(hass, ent6) # After the unusual state transition: closing -> fully open, toggle should close - set_state(ent5, STATE_OPEN) + set_state(ent5, CoverState.OPEN) await call_service(hass, SERVICE_TOGGLE, ent5) # Start closing assert is_closing(hass, ent5) - set_state(ent5, STATE_OPEN) # Unusual state transition from closing -> fully open + set_state( + ent5, CoverState.OPEN + ) # Unusual state transition from closing -> fully open set_cover_position(ent5, 100) await call_service(hass, SERVICE_TOGGLE, ent5) # Should close, not open assert is_closing(hass, ent5) @@ -139,22 +128,22 @@ def set_state(ent, state) -> None: def is_open(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_OPEN) + return hass.states.is_state(ent.entity_id, CoverState.OPEN) def is_opening(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_OPENING) + return hass.states.is_state(ent.entity_id, CoverState.OPENING) def is_closed(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_CLOSED) + return hass.states.is_state(ent.entity_id, CoverState.CLOSED) def is_closing(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_CLOSING) + return hass.states.is_state(ent.entity_id, CoverState.CLOSING) def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: @@ -164,36 +153,3 @@ def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, s def test_all() -> None: """Test module.__all__ is correctly set.""" help_test_all(cover) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(cover.CoverEntityFeature, "SUPPORT_") - + _create_tuples(cover.CoverDeviceClass, "DEVICE_CLASS_"), -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, cover, enum, constant_prefix, "2025.1" - ) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockCoverEntity(cover.CoverEntity): - _attr_supported_features = 1 - - entity = MockCoverEntity() - assert entity.supported_features is cover.CoverEntityFeature(1) - assert "MockCoverEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "CoverEntityFeature.OPEN" in caplog.text - caplog.clear() - assert entity.supported_features is cover.CoverEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text diff --git a/tests/components/cover/test_intent.py b/tests/components/cover/test_intent.py index 1cf23c4c3df..383a55e2a72 100644 --- a/tests/components/cover/test_intent.py +++ b/tests/components/cover/test_intent.py @@ -10,9 +10,9 @@ from homeassistant.components.cover import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, + CoverState, intent as cover_intent, ) -from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -32,7 +32,9 @@ async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> await cover_intent.async_setup_intents(hass) hass.states.async_set( - f"{DOMAIN}.garage_door", STATE_CLOSED, attributes={"device_class": "garage"} + f"{DOMAIN}.garage_door", + CoverState.CLOSED, + attributes={"device_class": "garage"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_COVER) @@ -61,7 +63,7 @@ async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> await cover_intent.async_setup_intents(hass) hass.states.async_set( - f"{DOMAIN}.garage_door", STATE_OPEN, attributes={"device_class": "garage"} + f"{DOMAIN}.garage_door", CoverState.OPEN, attributes={"device_class": "garage"} ) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_COVER) @@ -95,7 +97,7 @@ async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> entity_id = f"{DOMAIN}.test_cover" hass.states.async_set( entity_id, - STATE_CLOSED, + CoverState.CLOSED, attributes={ATTR_CURRENT_POSITION: 0, "device_class": "shade"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_COVER_POSITION) diff --git a/tests/components/cover/test_reproduce_state.py b/tests/components/cover/test_reproduce_state.py index f5dd01745d3..4aad27011fa 100644 --- a/tests/components/cover/test_reproduce_state.py +++ b/tests/components/cover/test_reproduce_state.py @@ -7,6 +7,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + CoverState, ) from homeassistant.const import ( SERVICE_CLOSE_COVER, @@ -15,8 +16,6 @@ from homeassistant.const import ( SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, - STATE_CLOSED, - STATE_OPEN, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -28,32 +27,32 @@ async def test_reproducing_states( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test reproducing Cover states.""" - hass.states.async_set("cover.entity_close", STATE_CLOSED, {}) + hass.states.async_set("cover.entity_close", CoverState.CLOSED, {}) hass.states.async_set( "cover.entity_close_attr", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ) hass.states.async_set( - "cover.entity_close_tilt", STATE_CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} + "cover.entity_close_tilt", CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} ) - hass.states.async_set("cover.entity_open", STATE_OPEN, {}) + hass.states.async_set("cover.entity_open", CoverState.OPEN, {}) hass.states.async_set( - "cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_slightly_open", CoverState.OPEN, {ATTR_CURRENT_POSITION: 50} ) hass.states.async_set( "cover.entity_open_attr", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, ) hass.states.async_set( "cover.entity_open_tilt", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ) hass.states.async_set( "cover.entity_entirely_open", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ) @@ -70,34 +69,36 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("cover.entity_close", STATE_CLOSED), + State("cover.entity_close", CoverState.CLOSED), State( "cover.entity_close_attr", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_close_tilt", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50}, ), - State("cover.entity_open", STATE_OPEN), + State("cover.entity_open", CoverState.OPEN), State( - "cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_slightly_open", + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 50}, ), State( "cover.entity_open_attr", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_open_tilt", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ), State( "cover.entity_entirely_open", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ), ], @@ -125,26 +126,28 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("cover.entity_close", STATE_OPEN), + State("cover.entity_close", CoverState.OPEN), State( "cover.entity_close_attr", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ), State( "cover.entity_close_tilt", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 100}, ), - State("cover.entity_open", STATE_CLOSED), - State("cover.entity_slightly_open", STATE_OPEN, {}), - State("cover.entity_open_attr", STATE_CLOSED, {}), + State("cover.entity_open", CoverState.CLOSED), + State("cover.entity_slightly_open", CoverState.OPEN, {}), + State("cover.entity_open_attr", CoverState.CLOSED, {}), State( - "cover.entity_open_tilt", STATE_OPEN, {ATTR_CURRENT_TILT_POSITION: 0} + "cover.entity_open_tilt", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_entirely_open", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ), # Should not raise diff --git a/tests/components/cpuspeed/test_config_flow.py b/tests/components/cpuspeed/test_config_flow.py index 0ebb8aede49..1a68d6f9396 100644 --- a/tests/components/cpuspeed/test_config_flow.py +++ b/tests/components/cpuspeed/test_config_flow.py @@ -50,7 +50,7 @@ async def test_already_configured( ) assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result.get("reason") == "single_instance_allowed" assert len(mock_setup_entry.mock_calls) == 0 assert len(mock_cpuinfo_config_flow.mock_calls) == 0 diff --git a/tests/components/crownstone/test_config_flow.py b/tests/components/crownstone/test_config_flow.py index 5dd00e7baff..a38a04cb2ad 100644 --- a/tests/components/crownstone/test_config_flow.py +++ b/tests/components/crownstone/test_config_flow.py @@ -258,7 +258,7 @@ async def test_unknown_error( result = await start_config_flow(hass, cloud) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown_error"} + assert result["errors"] == {"base": "unknown"} assert crownstone_setup.call_count == 0 diff --git a/tests/components/daikin/test_init.py b/tests/components/daikin/test_init.py index b3d18467d33..2380d5ad798 100644 --- a/tests/components/daikin/test_init.py +++ b/tests/components/daikin/test_init.py @@ -7,10 +7,10 @@ from aiohttp import ClientConnectionError from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.daikin import DaikinApi, update_unique_id +from homeassistant.components.daikin import update_unique_id from homeassistant.components.daikin.const import DOMAIN, KEY_MAC from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -183,18 +183,15 @@ async def test_client_update_connection_error( await hass.config_entries.async_setup(config_entry.entry_id) - api: DaikinApi = hass.data[DOMAIN][config_entry.entry_id] - - assert api.available is True + assert hass.states.get("climate.daikinap00000").state != STATE_UNAVAILABLE type(mock_daikin).update_status.side_effect = ClientConnectionError - freezer.tick(timedelta(seconds=90)) + freezer.tick(timedelta(seconds=60)) async_fire_time_changed(hass) - await hass.async_block_till_done() - assert api.available is False + assert hass.states.get("climate.daikinap00000").state == STATE_UNAVAILABLE assert mock_daikin.update_status.call_count == 2 diff --git a/tests/components/datadog/test_init.py b/tests/components/datadog/test_init.py index 36c1d951078..3b7bea3c926 100644 --- a/tests/components/datadog/test_init.py +++ b/tests/components/datadog/test_init.py @@ -79,7 +79,7 @@ async def test_logbook_entry(hass: HomeAssistant) -> None: assert mock_statsd.event.call_count == 1 assert mock_statsd.event.call_args == mock.call( title="Home Assistant", - text="%%% \n **{}** {} \n %%%".format(event["name"], event["message"]), + text=f"%%% \n **{event['name']}** {event['message']} \n %%%", tags=["entity:sensor.foo.bar", "domain:automation"], ) diff --git a/tests/components/deako/__init__.py b/tests/components/deako/__init__.py new file mode 100644 index 00000000000..248a389f2e6 --- /dev/null +++ b/tests/components/deako/__init__.py @@ -0,0 +1 @@ +"""Tests for the Deako integration.""" diff --git a/tests/components/deako/conftest.py b/tests/components/deako/conftest.py new file mode 100644 index 00000000000..659634b8784 --- /dev/null +++ b/tests/components/deako/conftest.py @@ -0,0 +1,45 @@ +"""deako session fixtures.""" + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components.deako.const import DOMAIN + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + ) + + +@pytest.fixture(autouse=True) +def pydeako_deako_mock() -> Generator[MagicMock]: + """Mock pydeako deako client.""" + with patch("homeassistant.components.deako.Deako", autospec=True) as mock: + yield mock + + +@pytest.fixture(autouse=True) +def pydeako_discoverer_mock(mock_async_zeroconf: MagicMock) -> Generator[MagicMock]: + """Mock pydeako discovery client.""" + with ( + patch("homeassistant.components.deako.DeakoDiscoverer", autospec=True) as mock, + patch("homeassistant.components.deako.config_flow.DeakoDiscoverer", new=mock), + ): + yield mock + + +@pytest.fixture +def mock_deako_setup() -> Generator[MagicMock]: + """Mock async_setup_entry for config flow tests.""" + with patch( + "homeassistant.components.deako.async_setup_entry", + return_value=True, + ) as mock_setup: + yield mock_setup diff --git a/tests/components/deako/snapshots/test_light.ambr b/tests/components/deako/snapshots/test_light.ambr new file mode 100644 index 00000000000..7bc170654e1 --- /dev/null +++ b/tests/components/deako/snapshots/test_light.ambr @@ -0,0 +1,168 @@ +# serializer version: 1 +# name: test_dimmable_light_props[light.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.kitchen', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deako', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uuid', + 'unit_of_measurement': None, + }) +# --- +# name: test_dimmable_light_props[light.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 127, + 'color_mode': , + 'friendly_name': 'kitchen', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_light_initial_props[light.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.kitchen', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deako', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uuid', + 'unit_of_measurement': None, + }) +# --- +# name: test_light_initial_props[light.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'kitchen', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_light_setup_with_device[light.some_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.some_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deako', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'some_device', + 'unit_of_measurement': None, + }) +# --- +# name: test_light_setup_with_device[light.some_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 1, + 'color_mode': , + 'friendly_name': 'some device', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.some_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/deako/test_config_flow.py b/tests/components/deako/test_config_flow.py new file mode 100644 index 00000000000..21b10eaaa36 --- /dev/null +++ b/tests/components/deako/test_config_flow.py @@ -0,0 +1,80 @@ +"""Tests for the deako component config flow.""" + +from unittest.mock import MagicMock + +from pydeako.discover import DevicesNotFoundException + +from homeassistant.components.deako.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_found( + hass: HomeAssistant, + pydeako_discoverer_mock: MagicMock, + mock_deako_setup: MagicMock, +) -> None: + """Test finding a Deako device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + # Confirmation form + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + pydeako_discoverer_mock.return_value.get_address.assert_called_once() + + mock_deako_setup.assert_called_once() + + +async def test_not_found( + hass: HomeAssistant, + pydeako_discoverer_mock: MagicMock, + mock_deako_setup: MagicMock, +) -> None: + """Test not finding any Deako devices.""" + pydeako_discoverer_mock.return_value.get_address.side_effect = ( + DevicesNotFoundException() + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + # Confirmation form + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" + pydeako_discoverer_mock.return_value.get_address.assert_called_once() + + mock_deako_setup.assert_not_called() + + +async def test_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_deako_setup: MagicMock, +) -> None: + """Test flow aborts when already configured.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + mock_deako_setup.assert_not_called() diff --git a/tests/components/deako/test_init.py b/tests/components/deako/test_init.py new file mode 100644 index 00000000000..c2291330feb --- /dev/null +++ b/tests/components/deako/test_init.py @@ -0,0 +1,62 @@ +"""Tests for the deako component init.""" + +from unittest.mock import MagicMock + +from pydeako import FindDevicesError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_deako_async_setup_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + pydeako_discoverer_mock: MagicMock, +) -> None: + """Test successful setup entry.""" + pydeako_deako_mock.return_value.get_devices.return_value = { + "id1": {}, + "id2": {}, + } + + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + pydeako_deako_mock.assert_called_once_with( + pydeako_discoverer_mock.return_value.get_address + ) + pydeako_deako_mock.return_value.connect.assert_called_once() + pydeako_deako_mock.return_value.find_devices.assert_called_once() + pydeako_deako_mock.return_value.get_devices.assert_called() + + assert mock_config_entry.runtime_data == pydeako_deako_mock.return_value + + +async def test_deako_async_setup_entry_devices_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + pydeako_discoverer_mock: MagicMock, +) -> None: + """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises DeviceListTimeout.""" + + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesError() + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + pydeako_deako_mock.assert_called_once_with( + pydeako_discoverer_mock.return_value.get_address + ) + pydeako_deako_mock.return_value.connect.assert_called_once() + pydeako_deako_mock.return_value.find_devices.assert_called_once() + pydeako_deako_mock.return_value.disconnect.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/deako/test_light.py b/tests/components/deako/test_light.py new file mode 100644 index 00000000000..b969c7f71cb --- /dev/null +++ b/tests/components/deako/test_light.py @@ -0,0 +1,192 @@ +"""Tests for the light module.""" + +from unittest.mock import MagicMock + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_light_setup_with_device( + hass: HomeAssistant, + pydeako_deako_mock: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test light platform setup with device returned.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "some_device": {}, + } + pydeako_deako_mock.return_value.get_name.return_value = "some device" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_light_initial_props( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test on/off light is setup with accurate initial properties.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + pydeako_deako_mock.return_value.get_state.return_value = { + "power": False, + } + pydeako_deako_mock.return_value.is_dimmable.return_value = False + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_dimmable_light_props( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test dimmable on/off light is setup with accurate initial properties.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + pydeako_deako_mock.return_value.get_state.return_value = { + "power": True, + "dim": 50, + } + pydeako_deako_mock.return_value.is_dimmable.return_value = True + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_light_power_change_on( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, +) -> None: + """Test turing on a deako device.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.kitchen"}, + blocking=True, + ) + + pydeako_deako_mock.return_value.control_device.assert_called_once_with( + "uuid", True, None + ) + + +async def test_light_power_change_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, +) -> None: + """Test turing off a deako device.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.kitchen"}, + blocking=True, + ) + + pydeako_deako_mock.return_value.control_device.assert_called_once_with( + "uuid", False, None + ) + + +@pytest.mark.parametrize( + ("dim_input", "expected_dim_value"), + [ + (3, 1), + (255, 100), + (127, 50), + ], +) +async def test_light_brightness_change( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + dim_input: int, + expected_dim_value: int, +) -> None: + """Test turing on a deako device.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.kitchen", + ATTR_BRIGHTNESS: dim_input, + }, + blocking=True, + ) + + pydeako_deako_mock.return_value.control_device.assert_called_once_with( + "uuid", True, expected_dim_value + ) diff --git a/tests/components/deconz/snapshots/test_diagnostics.ambr b/tests/components/deconz/snapshots/test_diagnostics.ambr index 911f2e134f2..20558b4bbbd 100644 --- a/tests/components/deconz/snapshots/test_diagnostics.ambr +++ b/tests/components/deconz/snapshots/test_diagnostics.ambr @@ -10,6 +10,8 @@ 'port': 80, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'deconz', 'entry_id': '1', 'minor_version': 1, @@ -19,6 +21,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr index b5a9f7b5543..b73bbcca216 100644 --- a/tests/components/deconz/snapshots/test_light.ambr +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -125,7 +125,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -134,7 +134,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -283,7 +283,7 @@ 'min_mireds': 155, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -291,7 +291,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -429,7 +429,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -438,7 +438,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -587,7 +587,7 @@ 'min_mireds': 155, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -595,7 +595,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -891,7 +891,7 @@ 'min_mireds': 155, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -899,7 +899,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -981,7 +981,7 @@ 'rgb_color': tuple( 255, 165, - 84, + 85, ), 'supported_color_modes': list([ , @@ -990,8 +990,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.53, - 0.388, + 0.529, + 0.387, ), }), 'context': , @@ -1180,7 +1180,7 @@ 'is_deconz_group': False, 'rgb_color': tuple( 243, - 113, + 114, 255, ), 'supported_color_modes': list([ @@ -1189,7 +1189,7 @@ 'supported_features': , 'xy_color': tuple( 0.357, - 0.188, + 0.189, ), }), 'context': , @@ -1400,7 +1400,12 @@ 'area_id': None, 'capabilities': dict({ 'effect_list': list([ - 'colorloop', + , + , + , + , + , + , ]), 'max_color_temp_kelvin': 6535, 'max_mireds': 500, @@ -1448,7 +1453,12 @@ 'color_temp_kelvin': None, 'effect': None, 'effect_list': list([ - 'colorloop', + , + , + , + , + , + , ]), 'friendly_name': 'Gradient light', 'hs_color': tuple( diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr index dd097ea1c9a..0b76366b5d1 100644 --- a/tests/components/deconz/snapshots/test_sensor.ambr +++ b/tests/components/deconz/snapshots/test_sensor.ambr @@ -1537,6 +1537,60 @@ 'state': '90', }) # --- +# name: test_sensors[config_entry_options0-sensor_payload21-expected21][sensor.ikea_starkvind_filter_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ikea_starkvind_filter_time', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'IKEA Starkvind Filter time', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '0c:43:14:ff:fe:6c:20:12-01-fc7d-air_purifier_filter_run_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload21-expected21][sensor.ikea_starkvind_filter_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'IKEA Starkvind Filter time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ikea_starkvind_filter_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.849594907407407', + }) +# --- # name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/test_alarm_control_panel.py b/tests/components/deconz/test_alarm_control_panel.py index 6c47146f9b0..dbe75584df7 100644 --- a/tests/components/deconz/test_alarm_control_panel.py +++ b/tests/components/deconz/test_alarm_control_panel.py @@ -9,6 +9,7 @@ from syrupy import SnapshotAssertion from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -17,13 +18,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, Platform, ) from homeassistant.core import HomeAssistant @@ -117,21 +111,21 @@ async def test_alarm_control_panel( for action, state in ( # Event signals alarm control panel armed state - (AncillaryControlPanel.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), - (AncillaryControlPanel.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), - (AncillaryControlPanel.ARMED_STAY, STATE_ALARM_ARMED_HOME), - (AncillaryControlPanel.DISARMED, STATE_ALARM_DISARMED), + (AncillaryControlPanel.ARMED_AWAY, AlarmControlPanelState.ARMED_AWAY), + (AncillaryControlPanel.ARMED_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (AncillaryControlPanel.ARMED_STAY, AlarmControlPanelState.ARMED_HOME), + (AncillaryControlPanel.DISARMED, AlarmControlPanelState.DISARMED), # Event signals alarm control panel arming state - (AncillaryControlPanel.ARMING_AWAY, STATE_ALARM_ARMING), - (AncillaryControlPanel.ARMING_NIGHT, STATE_ALARM_ARMING), - (AncillaryControlPanel.ARMING_STAY, STATE_ALARM_ARMING), + (AncillaryControlPanel.ARMING_AWAY, AlarmControlPanelState.ARMING), + (AncillaryControlPanel.ARMING_NIGHT, AlarmControlPanelState.ARMING), + (AncillaryControlPanel.ARMING_STAY, AlarmControlPanelState.ARMING), # Event signals alarm control panel pending state - (AncillaryControlPanel.ENTRY_DELAY, STATE_ALARM_PENDING), - (AncillaryControlPanel.EXIT_DELAY, STATE_ALARM_PENDING), + (AncillaryControlPanel.ENTRY_DELAY, AlarmControlPanelState.PENDING), + (AncillaryControlPanel.EXIT_DELAY, AlarmControlPanelState.PENDING), # Event signals alarm control panel triggered state - (AncillaryControlPanel.IN_ALARM, STATE_ALARM_TRIGGERED), + (AncillaryControlPanel.IN_ALARM, AlarmControlPanelState.TRIGGERED), # Event signals alarm control panel unknown state keeps previous state - (AncillaryControlPanel.NOT_READY, STATE_ALARM_TRIGGERED), + (AncillaryControlPanel.NOT_READY, AlarmControlPanelState.TRIGGERED), ): await sensor_ws_data({"state": {"panel": action}}) assert hass.states.get("alarm_control_panel.keypad").state == state diff --git a/tests/components/deconz/test_climate.py b/tests/components/deconz/test_climate.py index 7f456e81976..e1000f0b4d6 100644 --- a/tests/components/deconz/test_climate.py +++ b/tests/components/deconz/test_climate.py @@ -259,7 +259,7 @@ async def test_climate_device_without_cooling_support( # Service set temperature without providing temperature attribute - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index 49711962407..ce13bbfa5d4 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -20,17 +20,12 @@ from homeassistant.components.deconz.const import ( DOMAIN as DECONZ_DOMAIN, HASSIO_CONFIGURATION_URL, ) -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL -from homeassistant.config_entries import ( - SOURCE_HASSIO, - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .conftest import API_KEY, BRIDGE_ID @@ -407,12 +402,7 @@ async def test_reauth_flow_update_configuration( config_entry_setup: MockConfigEntry, ) -> None: """Verify reauth flow can update gateway API key.""" - result = await hass.config_entries.flow.async_init( - DECONZ_DOMAIN, - data=config_entry_setup.data, - context={"source": SOURCE_REAUTH}, - ) - + result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/deconz/test_cover.py b/tests/components/deconz/test_cover.py index f1573394fae..47f8083798e 100644 --- a/tests/components/deconz/test_cover.py +++ b/tests/components/deconz/test_cover.py @@ -19,8 +19,9 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, + CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -67,7 +68,7 @@ async def test_cover( await light_ws_data({"state": {"lift": 0, "open": True}}) cover = hass.states.get("cover.window_covering_device") - assert cover.state == STATE_OPEN + assert cover.state == CoverState.OPEN assert cover.attributes[ATTR_CURRENT_POSITION] == 100 # Verify service calls for cover diff --git a/tests/components/deconz/test_device_trigger.py b/tests/components/deconz/test_device_trigger.py index 6f74db0b82c..1502cc4081d 100644 --- a/tests/components/deconz/test_device_trigger.py +++ b/tests/components/deconz/test_device_trigger.py @@ -7,6 +7,8 @@ from pytest_unordered import unordered from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN + +# pylint: disable-next=hass-component-root-import from homeassistant.components.binary_sensor.device_trigger import ( CONF_BAT_LOW, CONF_NOT_BAT_LOW, diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index 441cb01be63..9ac15d4867b 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -11,7 +11,7 @@ from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -391,7 +391,7 @@ async def test_light_state_change( "call": { ATTR_ENTITY_ID: "light.hue_go", ATTR_BRIGHTNESS: 200, - ATTR_COLOR_TEMP: 200, + ATTR_COLOR_TEMP_KELVIN: 5000, ATTR_TRANSITION: 5, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_COLORLOOP, @@ -413,7 +413,7 @@ async def test_light_state_change( ATTR_ENTITY_ID: "light.hue_go", ATTR_XY_COLOR: (0.411, 0.351), ATTR_FLASH: FLASH_LONG, - ATTR_EFFECT: "None", + ATTR_EFFECT: "none", }, }, { @@ -804,7 +804,7 @@ async def test_groups( "call": { ATTR_ENTITY_ID: "light.group", ATTR_BRIGHTNESS: 200, - ATTR_COLOR_TEMP: 200, + ATTR_COLOR_TEMP_KELVIN: 5000, ATTR_TRANSITION: 5, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_COLORLOOP, @@ -830,7 +830,7 @@ async def test_groups( }, { "on": True, - "xy": (0.235, 0.164), + "xy": (0.236, 0.166), }, ), ( # Turn on group with short color loop @@ -845,7 +845,7 @@ async def test_groups( }, { "on": True, - "xy": (0.235, 0.164), + "xy": (0.236, 0.166), }, ), ], @@ -1079,7 +1079,7 @@ async def test_non_color_light_reports_color( hass.states.get("light.group").attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP ) - assert hass.states.get("light.group").attributes[ATTR_COLOR_TEMP] == 250 + assert hass.states.get("light.group").attributes[ATTR_COLOR_TEMP_KELVIN] == 4000 # Updating a scene will return a faulty color value # for a non-color light causing an exception in hs_color @@ -1099,7 +1099,7 @@ async def test_non_color_light_reports_color( group = hass.states.get("light.group") assert group.attributes[ATTR_COLOR_MODE] == ColorMode.XY assert group.attributes[ATTR_HS_COLOR] == (40.571, 41.176) - assert group.attributes.get(ATTR_COLOR_TEMP) is None + assert group.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None @pytest.mark.parametrize( diff --git a/tests/components/deconz/test_lock.py b/tests/components/deconz/test_lock.py index 28d60e403ef..70a7bd732bb 100644 --- a/tests/components/deconz/test_lock.py +++ b/tests/components/deconz/test_lock.py @@ -8,8 +8,9 @@ from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, + LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from .conftest import WebsocketDataType @@ -43,10 +44,10 @@ async def test_lock_from_light( ) -> None: """Test that all supported lock entities based on lights are created.""" assert len(hass.states.async_all()) == 1 - assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED + assert hass.states.get("lock.door_lock").state == LockState.UNLOCKED await light_ws_data({"state": {"on": True}}) - assert hass.states.get("lock.door_lock").state == STATE_LOCKED + assert hass.states.get("lock.door_lock").state == LockState.LOCKED # Verify service calls @@ -107,10 +108,10 @@ async def test_lock_from_sensor( ) -> None: """Test that all supported lock entities based on sensors are created.""" assert len(hass.states.async_all()) == 2 - assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED + assert hass.states.get("lock.door_lock").state == LockState.UNLOCKED await sensor_ws_data({"state": {"lockstate": "locked"}}) - assert hass.states.get("lock.door_lock").state == STATE_LOCKED + assert hass.states.get("lock.door_lock").state == LockState.LOCKED # Verify service calls diff --git a/tests/components/deconz/test_logbook.py b/tests/components/deconz/test_logbook.py index d23680225f1..57cf8748762 100644 --- a/tests/components/deconz/test_logbook.py +++ b/tests/components/deconz/test_logbook.py @@ -16,7 +16,6 @@ from homeassistant.const import ( CONF_EVENT, CONF_ID, CONF_UNIQUE_ID, - STATE_ALARM_ARMED_AWAY, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -83,7 +82,7 @@ async def test_humanifying_deconz_alarm_event( { CONF_CODE: 1234, CONF_DEVICE_ID: keypad_entry.id, - CONF_EVENT: STATE_ALARM_ARMED_AWAY, + CONF_EVENT: "armed_away", CONF_ID: keypad_event_id, CONF_UNIQUE_ID: keypad_serial, }, @@ -94,7 +93,7 @@ async def test_humanifying_deconz_alarm_event( { CONF_CODE: 1234, CONF_DEVICE_ID: "ff99ff99ff99ff99ff99ff99ff99ff99", - CONF_EVENT: STATE_ALARM_ARMED_AWAY, + CONF_EVENT: "armed_away", CONF_ID: removed_device_event_id, CONF_UNIQUE_ID: removed_device_serial, }, diff --git a/tests/components/deconz/test_sensor.py b/tests/components/deconz/test_sensor.py index e6ae85df615..958cb3b793a 100644 --- a/tests/components/deconz/test_sensor.py +++ b/tests/components/deconz/test_sensor.py @@ -602,6 +602,41 @@ TEST_DATA = [ "next_state": "80", }, ), + ( # Air purifier filter time sensor + { + "config": { + "filterlifetime": 259200, + "ledindication": True, + "locked": False, + "mode": "speed_1", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "de26d19d9e91b2db3ded6ee7ab6b6a4b", + "lastannounced": None, + "lastseen": "2024-08-07T18:27Z", + "manufacturername": "IKEA of Sweden", + "modelid": "STARKVIND Air purifier", + "name": "IKEA Starkvind", + "productid": "E2007", + "state": { + "deviceruntime": 73405, + "filterruntime": 73405, + "lastupdated": "2024-08-07T18:27:52.543", + "replacefilter": False, + "speed": 20, + }, + "swversion": "1.1.001", + "type": "ZHAAirPurifier", + "uniqueid": "0c:43:14:ff:fe:6c:20:12-01-fc7d", + }, + { + "entity_id": "sensor.ikea_starkvind_filter_time", + "websocket_event": {"state": {"filterruntime": 100000}}, + "next_state": "1.15740740740741", + }, + ), ] diff --git a/tests/components/deluge/__init__.py b/tests/components/deluge/__init__.py index 4efbe04cf52..c9027f0c11f 100644 --- a/tests/components/deluge/__init__.py +++ b/tests/components/deluge/__init__.py @@ -14,3 +14,10 @@ CONF_DATA = { CONF_PORT: DEFAULT_RPC_PORT, CONF_WEB_PORT: DEFAULT_WEB_PORT, } + +GET_TORRENT_STATUS_RESPONSE = { + "upload_rate": 3462.0, + "download_rate": 98.5, + "dht_upload_rate": 7818.0, + "dht_download_rate": 2658.0, +} diff --git a/tests/components/deluge/test_config_flow.py b/tests/components/deluge/test_config_flow.py index 37229d4a72e..c336fc81cc6 100644 --- a/tests/components/deluge/test_config_flow.py +++ b/tests/components/deluge/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant.components.deluge.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -113,16 +113,7 @@ async def test_flow_reauth(hass: HomeAssistant, api) -> None: entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/deluge/test_sensor.py b/tests/components/deluge/test_sensor.py new file mode 100644 index 00000000000..7ff6dda0b94 --- /dev/null +++ b/tests/components/deluge/test_sensor.py @@ -0,0 +1,32 @@ +"""Test Deluge sensor.py methods.""" + +from homeassistant.components.deluge.const import DelugeSensorType +from homeassistant.components.deluge.sensor import get_state + +from . import GET_TORRENT_STATUS_RESPONSE + + +def test_get_state() -> None: + """Tests get_state() with different keys.""" + + download_result = get_state( + GET_TORRENT_STATUS_RESPONSE, DelugeSensorType.DOWNLOAD_SPEED_SENSOR + ) + assert download_result == 0.1 # round(98.5 / 1024, 2) + + upload_result = get_state( + GET_TORRENT_STATUS_RESPONSE, DelugeSensorType.UPLOAD_SPEED_SENSOR + ) + assert upload_result == 3.4 # round(3462.0 / 1024, 1) + + protocol_upload_result = get_state( + GET_TORRENT_STATUS_RESPONSE, + DelugeSensorType.PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR, + ) + assert protocol_upload_result == 7.6 # round(7818.0 / 1024, 1) + + protocol_download_result = get_state( + GET_TORRENT_STATUS_RESPONSE, + DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR, + ) + assert protocol_download_result == 2.6 # round(2658.0/1024, 1) diff --git a/tests/components/demo/test_button.py b/tests/components/demo/test_button.py index 6049de12570..702ee3aa3e0 100644 --- a/tests/components/demo/test_button.py +++ b/tests/components/demo/test_button.py @@ -5,7 +5,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -27,7 +27,9 @@ async def button_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_button(hass: HomeAssistant, button_only) -> None: """Initialize setup demo button entity.""" - assert await async_setup_component(hass, DOMAIN, {"button": {"platform": "demo"}}) + assert await async_setup_component( + hass, BUTTON_DOMAIN, {"button": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -47,7 +49,7 @@ async def test_press(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> Non now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") freezer.move_to(now) await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_PUSH}, blocking=True, diff --git a/tests/components/demo/test_camera.py b/tests/components/demo/test_camera.py index 89dd8e0cdf7..c8d8e1ef2e4 100644 --- a/tests/components/demo/test_camera.py +++ b/tests/components/demo/test_camera.py @@ -11,8 +11,7 @@ from homeassistant.components.camera import ( SERVICE_ENABLE_MOTION, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_IDLE, - STATE_STREAMING, + CameraState, async_get_image, ) from homeassistant.components.demo import DOMAIN @@ -46,7 +45,7 @@ async def demo_camera(hass: HomeAssistant, camera_only: None) -> None: async def test_init_state_is_streaming(hass: HomeAssistant) -> None: """Demo camera initialize as streaming.""" state = hass.states.get(ENTITY_CAMERA) - assert state.state == STATE_STREAMING + assert state.state == CameraState.STREAMING with patch( "homeassistant.components.demo.camera.Path.read_bytes", return_value=b"ON" @@ -59,21 +58,21 @@ async def test_init_state_is_streaming(hass: HomeAssistant) -> None: async def test_turn_on_state_back_to_streaming(hass: HomeAssistant) -> None: """After turn on state back to streaming.""" state = hass.states.get(ENTITY_CAMERA) - assert state.state == STATE_STREAMING + assert state.state == CameraState.STREAMING await hass.services.async_call( CAMERA_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_CAMERA}, blocking=True ) state = hass.states.get(ENTITY_CAMERA) - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE await hass.services.async_call( CAMERA_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CAMERA}, blocking=True ) state = hass.states.get(ENTITY_CAMERA) - assert state.state == STATE_STREAMING + assert state.state == CameraState.STREAMING async def test_turn_off_image(hass: HomeAssistant) -> None: @@ -90,7 +89,7 @@ async def test_turn_off_image(hass: HomeAssistant) -> None: async def test_turn_off_invalid_camera(hass: HomeAssistant) -> None: """Turn off non-exist camera should quietly fail.""" state = hass.states.get(ENTITY_CAMERA) - assert state.state == STATE_STREAMING + assert state.state == CameraState.STREAMING await hass.services.async_call( CAMERA_DOMAIN, @@ -100,7 +99,7 @@ async def test_turn_off_invalid_camera(hass: HomeAssistant) -> None: ) state = hass.states.get(ENTITY_CAMERA) - assert state.state == STATE_STREAMING + assert state.state == CameraState.STREAMING async def test_motion_detection(hass: HomeAssistant) -> None: diff --git a/tests/components/demo/test_climate.py b/tests/components/demo/test_climate.py index 383e00834b8..42152645ecb 100644 --- a/tests/components/demo/test_climate.py +++ b/tests/components/demo/test_climate.py @@ -22,7 +22,7 @@ from homeassistant.components.climate import ( ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, PRESET_AWAY, PRESET_ECO, SERVICE_SET_FAN_MODE, @@ -64,7 +64,9 @@ def climate_only() -> Generator[None]: async def setup_demo_climate(hass: HomeAssistant, climate_only: None) -> None: """Initialize setup demo climate.""" hass.config.units = METRIC_SYSTEM - assert await async_setup_component(hass, DOMAIN, {"climate": {"platform": "demo"}}) + assert await async_setup_component( + hass, CLIMATE_DOMAIN, {"climate": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -104,7 +106,7 @@ async def test_set_only_target_temp_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: None}, blocking=True, @@ -120,7 +122,7 @@ async def test_set_only_target_temp(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TEMPERATURE) == 21 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: 30}, blocking=True, @@ -136,7 +138,7 @@ async def test_set_only_target_temp_with_convert(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TEMPERATURE) == 20 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_HEATPUMP, ATTR_TEMPERATURE: 21}, blocking=True, @@ -154,7 +156,7 @@ async def test_set_target_temp_range(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 24.0 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ECOBEE, @@ -179,7 +181,7 @@ async def test_set_target_temp_range_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ECOBEE, @@ -202,7 +204,7 @@ async def test_set_temp_with_hvac_mode(hass: HomeAssistant) -> None: assert state.state == HVACMode.COOL await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_CLIMATE, @@ -224,7 +226,7 @@ async def test_set_target_humidity_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: None}, blocking=True, @@ -240,7 +242,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_HUMIDITY) == 67.4 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: 64}, blocking=True, @@ -257,7 +259,7 @@ async def test_set_fan_mode_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: None}, blocking=True, @@ -273,7 +275,7 @@ async def test_set_fan_mode(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_FAN_MODE) == "on_high" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: "on_low"}, blocking=True, @@ -290,7 +292,7 @@ async def test_set_swing_mode_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: None}, blocking=True, @@ -306,7 +308,7 @@ async def test_set_swing(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_SWING_MODE) == "off" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: "auto"}, blocking=True, @@ -327,7 +329,7 @@ async def test_set_hvac_bad_attr_and_state(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: None}, blocking=True, @@ -344,7 +346,7 @@ async def test_set_hvac(hass: HomeAssistant) -> None: assert state.state == HVACMode.COOL await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, @@ -357,7 +359,7 @@ async def test_set_hvac(hass: HomeAssistant) -> None: async def test_set_hold_mode_away(hass: HomeAssistant) -> None: """Test setting the hold mode away.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, @@ -370,7 +372,7 @@ async def test_set_hold_mode_away(hass: HomeAssistant) -> None: async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: """Test setting the hold mode eco.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_ECO}, blocking=True, @@ -383,7 +385,7 @@ async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.OFF}, blocking=True, @@ -393,7 +395,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: assert state.state == HVACMode.OFF await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True + CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True ) state = hass.states.get(ENTITY_CLIMATE) assert state.state == HVACMode.HEAT @@ -402,7 +404,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, @@ -412,7 +414,10 @@ async def test_turn_off(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ENTITY_CLIMATE}, + blocking=True, ) state = hass.states.get(ENTITY_CLIMATE) assert state.state == HVACMode.OFF diff --git a/tests/components/demo/test_cover.py b/tests/components/demo/test_cover.py index 009d2ca2f49..97cad5bbe14 100644 --- a/tests/components/demo/test_cover.py +++ b/tests/components/demo/test_cover.py @@ -11,7 +11,8 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -26,10 +27,6 @@ from homeassistant.const import ( SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, Platform, ) from homeassistant.core import HomeAssistant @@ -55,8 +52,8 @@ def cover_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_comp(hass: HomeAssistant, cover_only: None) -> None: """Set up demo cover component.""" - with assert_setup_component(1, DOMAIN): - await async_setup_component(hass, DOMAIN, CONFIG) + with assert_setup_component(1, COVER_DOMAIN): + await async_setup_component(hass, COVER_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -75,41 +72,41 @@ async def test_supported_features(hass: HomeAssistant) -> None: async def test_close_cover(hass: HomeAssistant) -> None: """Test closing the cover.""" state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 async def test_open_cover(hass: HomeAssistant) -> None: """Test opening the cover.""" state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 @@ -117,7 +114,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: """Test toggling the cover.""" # Start open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -125,11 +122,11 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes["current_position"] == 100 # Toggle closed await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -137,11 +134,11 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Toggle open await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -149,7 +146,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 @@ -158,7 +155,7 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 10}, blocking=True, @@ -177,13 +174,13 @@ async def test_stop_cover(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -196,7 +193,10 @@ async def test_close_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -212,7 +212,10 @@ async def test_open_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -227,7 +230,10 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: """Test toggling the cover tilt.""" # Start open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -238,7 +244,10 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # Toggle closed await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -249,7 +258,10 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Toggle Open await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -265,7 +277,7 @@ async def test_set_cover_tilt_position(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 90}, blocking=True, @@ -284,13 +296,19 @@ async def test_stop_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) async_fire_time_changed(hass, future) await hass.async_block_till_done() diff --git a/tests/components/demo/test_date.py b/tests/components/demo/test_date.py index 5e0fc2c29cd..228be936599 100644 --- a/tests/components/demo/test_date.py +++ b/tests/components/demo/test_date.py @@ -4,7 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.date import ATTR_DATE, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.date import ( + ATTR_DATE, + DOMAIN as DATE_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -25,7 +29,9 @@ async def date_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_date(hass: HomeAssistant, date_only) -> None: """Initialize setup demo date.""" - assert await async_setup_component(hass, DOMAIN, {"date": {"platform": "demo"}}) + assert await async_setup_component( + hass, DATE_DOMAIN, {"date": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -38,7 +44,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_datetime(hass: HomeAssistant) -> None: """Test set datetime service.""" await hass.services.async_call( - DOMAIN, + DATE_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_DATE, ATTR_DATE: "2021-02-03"}, blocking=True, diff --git a/tests/components/demo/test_datetime.py b/tests/components/demo/test_datetime.py index bd4adafd695..82cd5044068 100644 --- a/tests/components/demo/test_datetime.py +++ b/tests/components/demo/test_datetime.py @@ -4,7 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.datetime import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.datetime import ( + ATTR_DATETIME, + DOMAIN as DATETIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -25,7 +29,9 @@ async def datetime_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_datetime(hass: HomeAssistant, datetime_only) -> None: """Initialize setup demo datetime.""" - assert await async_setup_component(hass, DOMAIN, {"datetime": {"platform": "demo"}}) + assert await async_setup_component( + hass, DATETIME_DOMAIN, {"datetime": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -39,7 +45,7 @@ async def test_set_datetime(hass: HomeAssistant) -> None: """Test set datetime service.""" await hass.config.async_set_time_zone("UTC") await hass.services.async_call( - DOMAIN, + DATETIME_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_DATETIME, ATTR_DATETIME: "2021-02-03 01:02:03"}, blocking=True, diff --git a/tests/components/demo/test_humidifier.py b/tests/components/demo/test_humidifier.py index 0f0fcaf43fd..93bd2b13743 100644 --- a/tests/components/demo/test_humidifier.py +++ b/tests/components/demo/test_humidifier.py @@ -11,7 +11,7 @@ from homeassistant.components.humidifier import ( ATTR_HUMIDITY, ATTR_MAX_HUMIDITY, ATTR_MIN_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDITY_DOMAIN, MODE_AWAY, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, @@ -48,7 +48,7 @@ async def humidifier_only() -> None: async def setup_demo_humidifier(hass: HomeAssistant, humidifier_only: None): """Initialize setup demo humidifier.""" assert await async_setup_component( - hass, DOMAIN, {"humidifier": {"platform": "demo"}} + hass, HUMIDITY_DOMAIN, {"humidifier": {"platform": "demo"}} ) await hass.async_block_till_done() @@ -76,7 +76,7 @@ async def test_set_target_humidity_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_HUMIDITY: None, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True, @@ -93,7 +93,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_HUMIDITY) == 54.2 await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_HUMIDITY: 64, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True, @@ -107,7 +107,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: async def test_set_hold_mode_away(hass: HomeAssistant) -> None: """Test setting the hold mode away.""" await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_MODE, {ATTR_MODE: MODE_AWAY, ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, blocking=True, @@ -121,7 +121,7 @@ async def test_set_hold_mode_away(hass: HomeAssistant) -> None: async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: """Test setting the hold mode eco.""" await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_MODE, {ATTR_MODE: "eco", ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, blocking=True, @@ -135,14 +135,20 @@ async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF assert state.attributes.get(ATTR_ACTION) == "off" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON @@ -152,14 +158,20 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn off device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON assert state.attributes.get(ATTR_ACTION) == "drying" await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF @@ -169,19 +181,28 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test toggle device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index e3b1efc7eec..b39b09d9307 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -9,11 +9,10 @@ from homeassistant.components.demo import DOMAIN from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, - ATTR_KELVIN, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_XY_COLOR, DOMAIN as LIGHT_DOMAIN, @@ -73,31 +72,39 @@ async def test_state_attributes(hass: HomeAssistant) -> None: ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_RGB_COLOR) == (250, 252, 255) - assert state.attributes.get(ATTR_XY_COLOR) == (0.319, 0.326) + assert state.attributes.get(ATTR_RGB_COLOR) == (251, 253, 255) + assert state.attributes.get(ATTR_XY_COLOR) == (0.319, 0.327) await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_EFFECT: "none", ATTR_COLOR_TEMP: 400}, + { + ATTR_ENTITY_ID: ENTITY_LIGHT, + ATTR_EFFECT: "none", + ATTR_COLOR_TEMP_KELVIN: 2500, + }, blocking=True, ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_COLOR_TEMP) == 400 - assert state.attributes.get(ATTR_MIN_MIREDS) == 153 - assert state.attributes.get(ATTR_MAX_MIREDS) == 500 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2500 + assert state.attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN) == 6535 + assert state.attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN) == 2000 assert state.attributes.get(ATTR_EFFECT) == "none" await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS_PCT: 50, ATTR_KELVIN: 3000}, + { + ATTR_ENTITY_ID: ENTITY_LIGHT, + ATTR_BRIGHTNESS_PCT: 50, + ATTR_COLOR_TEMP_KELVIN: 3000, + }, blocking=True, ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_COLOR_TEMP) == 333 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 3000 assert state.attributes.get(ATTR_BRIGHTNESS) == 128 diff --git a/tests/components/demo/test_lock.py b/tests/components/demo/test_lock.py index 853b9197ab7..1fc4209d300 100644 --- a/tests/components/demo/test_lock.py +++ b/tests/components/demo/test_lock.py @@ -10,19 +10,9 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_UNLOCKED, - STATE_UNLOCKING, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - EVENT_STATE_CHANGED, - STATE_OPEN, - STATE_OPENING, - Platform, + LockState, ) +from homeassistant.const import ATTR_ENTITY_ID, EVENT_STATE_CHANGED, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -57,7 +47,7 @@ async def setup_comp(hass: HomeAssistant, lock_only: None): async def test_locking(hass: HomeAssistant) -> None: """Test the locking of a lock.""" state = hass.states.get(KITCHEN) - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -67,17 +57,17 @@ async def test_locking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == KITCHEN - assert state_changes[0].data["new_state"].state == STATE_LOCKING + assert state_changes[0].data["new_state"].state == LockState.LOCKING assert state_changes[1].data["entity_id"] == KITCHEN - assert state_changes[1].data["new_state"].state == STATE_LOCKED + assert state_changes[1].data["new_state"].state == LockState.LOCKED @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) async def test_unlocking(hass: HomeAssistant) -> None: """Test the unlocking of a lock.""" state = hass.states.get(FRONT) - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -87,17 +77,17 @@ async def test_unlocking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == FRONT - assert state_changes[0].data["new_state"].state == STATE_UNLOCKING + assert state_changes[0].data["new_state"].state == LockState.UNLOCKING assert state_changes[1].data["entity_id"] == FRONT - assert state_changes[1].data["new_state"].state == STATE_UNLOCKED + assert state_changes[1].data["new_state"].state == LockState.UNLOCKED @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) async def test_opening(hass: HomeAssistant) -> None: """Test the opening of a lock.""" state = hass.states.get(OPENABLE_LOCK) - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -107,17 +97,17 @@ async def test_opening(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == OPENABLE_LOCK - assert state_changes[0].data["new_state"].state == STATE_OPENING + assert state_changes[0].data["new_state"].state == LockState.OPENING assert state_changes[1].data["entity_id"] == OPENABLE_LOCK - assert state_changes[1].data["new_state"].state == STATE_OPEN + assert state_changes[1].data["new_state"].state == LockState.OPEN @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) async def test_jammed_when_locking(hass: HomeAssistant) -> None: """Test the locking of a lock jams.""" state = hass.states.get(POORLY_INSTALLED) - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -127,10 +117,10 @@ async def test_jammed_when_locking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == POORLY_INSTALLED - assert state_changes[0].data["new_state"].state == STATE_LOCKING + assert state_changes[0].data["new_state"].state == LockState.LOCKING assert state_changes[1].data["entity_id"] == POORLY_INSTALLED - assert state_changes[1].data["new_state"].state == STATE_JAMMED + assert state_changes[1].data["new_state"].state == LockState.JAMMED async def test_opening_mocked(hass: HomeAssistant) -> None: diff --git a/tests/components/demo/test_number.py b/tests/components/demo/test_number.py index 79885fa8581..4b7cbe4864f 100644 --- a/tests/components/demo/test_number.py +++ b/tests/components/demo/test_number.py @@ -11,7 +11,7 @@ from homeassistant.components.number import ( ATTR_MIN, ATTR_STEP, ATTR_VALUE, - DOMAIN, + DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, NumberMode, ) @@ -39,7 +39,9 @@ def number_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_demo_number(hass: HomeAssistant, number_only: None) -> None: """Initialize setup demo Number entity.""" - assert await async_setup_component(hass, DOMAIN, {"number": {"platform": "demo"}}) + assert await async_setup_component( + hass, NUMBER_DOMAIN, {"number": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -83,7 +85,7 @@ async def test_set_value_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: None, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, @@ -101,7 +103,7 @@ async def test_set_value_bad_range(hass: HomeAssistant) -> None: with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: 1024, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, @@ -118,7 +120,7 @@ async def test_set_set_value(hass: HomeAssistant) -> None: assert state.state == "42.0" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: 23, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, diff --git a/tests/components/demo/test_select.py b/tests/components/demo/test_select.py index f9805f44866..a78f8552ec7 100644 --- a/tests/components/demo/test_select.py +++ b/tests/components/demo/test_select.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN, + DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.const import ATTR_ENTITY_ID, Platform @@ -31,7 +31,9 @@ async def select_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_select(hass: HomeAssistant, select_only) -> None: """Initialize setup demo select entity.""" - assert await async_setup_component(hass, DOMAIN, {"select": {"platform": "demo"}}) + assert await async_setup_component( + hass, SELECT_DOMAIN, {"select": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -55,7 +57,7 @@ async def test_select_option_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, {ATTR_OPTION: "slow_speed", ATTR_ENTITY_ID: ENTITY_SPEED}, blocking=True, @@ -74,7 +76,7 @@ async def test_select_option(hass: HomeAssistant) -> None: assert state.state == "ridiculous_speed" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, {ATTR_OPTION: "light_speed", ATTR_ENTITY_ID: ENTITY_SPEED}, blocking=True, diff --git a/tests/components/demo/test_siren.py b/tests/components/demo/test_siren.py index e21cd96efc9..c537e73508d 100644 --- a/tests/components/demo/test_siren.py +++ b/tests/components/demo/test_siren.py @@ -8,7 +8,7 @@ from homeassistant.components.siren import ( ATTR_AVAILABLE_TONES, ATTR_TONE, ATTR_VOLUME_LEVEL, - DOMAIN, + DOMAIN as SIREN_DOMAIN, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -39,7 +39,9 @@ async def siren_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_siren(hass: HomeAssistant, siren_only: None): """Initialize setup demo siren.""" - assert await async_setup_component(hass, DOMAIN, {"siren": {"platform": "demo"}}) + assert await async_setup_component( + hass, SIREN_DOMAIN, {"siren": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -59,13 +61,13 @@ def test_all_setup_params(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON @@ -73,7 +75,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: # Test that an invalid tone will raise a ValueError with pytest.raises(ValueError): await hass.services.async_call( - DOMAIN, + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN_WITH_ALL_FEATURES, ATTR_TONE: "invalid_tone"}, blocking=True, @@ -83,13 +85,13 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn off device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF @@ -98,19 +100,19 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test toggle device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON @@ -122,7 +124,7 @@ async def test_turn_on_strip_attributes(hass: HomeAssistant) -> None: "homeassistant.components.demo.siren.DemoSiren.async_turn_on" ) as svc_call: await hass.services.async_call( - DOMAIN, + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN, ATTR_VOLUME_LEVEL: 1}, blocking=True, diff --git a/tests/components/demo/test_text.py b/tests/components/demo/test_text.py index 4ca172e5143..b3291012167 100644 --- a/tests/components/demo/test_text.py +++ b/tests/components/demo/test_text.py @@ -10,7 +10,7 @@ from homeassistant.components.text import ( ATTR_MIN, ATTR_PATTERN, ATTR_VALUE, - DOMAIN, + DOMAIN as TEXT_DOMAIN, SERVICE_SET_VALUE, ) from homeassistant.const import ( @@ -38,7 +38,9 @@ def text_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_demo_text(hass: HomeAssistant, text_only: None) -> None: """Initialize setup demo text.""" - assert await async_setup_component(hass, DOMAIN, {"text": {"platform": "demo"}}) + assert await async_setup_component( + hass, TEXT_DOMAIN, {"text": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -55,7 +57,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_value(hass: HomeAssistant) -> None: """Test set value service.""" await hass.services.async_call( - DOMAIN, + TEXT_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_TEXT, ATTR_VALUE: "new"}, blocking=True, diff --git a/tests/components/demo/test_time.py b/tests/components/demo/test_time.py index 8ef093a38f3..6997e8392ed 100644 --- a/tests/components/demo/test_time.py +++ b/tests/components/demo/test_time.py @@ -4,7 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.time import ATTR_TIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -25,7 +29,9 @@ async def time_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_datetime(hass: HomeAssistant, time_only) -> None: """Initialize setup demo time.""" - assert await async_setup_component(hass, DOMAIN, {"time": {"platform": "demo"}}) + assert await async_setup_component( + hass, TIME_DOMAIN, {"time": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -38,7 +44,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_value(hass: HomeAssistant) -> None: """Test set value service.""" await hass.services.async_call( - DOMAIN, + TIME_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_TIME, ATTR_TIME: "01:02:03"}, blocking=True, diff --git a/tests/components/demo/test_update.py b/tests/components/demo/test_update.py index 0a8886a085d..93a9f272aeb 100644 --- a/tests/components/demo/test_update.py +++ b/tests/components/demo/test_update.py @@ -11,7 +11,8 @@ from homeassistant.components.update import ( ATTR_RELEASE_SUMMARY, ATTR_RELEASE_URL, ATTR_TITLE, - DOMAIN, + ATTR_UPDATE_PERCENTAGE, + DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateDeviceClass, ) @@ -41,7 +42,9 @@ async def update_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_update(hass: HomeAssistant, update_only) -> None: """Initialize setup demo update entity.""" - assert await async_setup_component(hass, DOMAIN, {"update": {"platform": "demo"}}) + assert await async_setup_component( + hass, UPDATE_DOMAIN, {"update": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -123,55 +126,73 @@ def test_setup_params(hass: HomeAssistant) -> None: ) -async def test_update_with_progress(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("entity_id", "steps"), + [ + ("update.demo_update_with_progress", 10), + ("update.demo_update_with_decimal_progress", 1000), + ], +) +async def test_update_with_progress( + hass: HomeAssistant, entity_id: str, steps: int +) -> None: """Test update with progress.""" - state = hass.states.get("update.demo_update_with_progress") + state = hass.states.get(entity_id) assert state assert state.state == STATE_ON assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None events = [] async_track_state_change_event( hass, - "update.demo_update_with_progress", + entity_id, # pylint: disable-next=unnecessary-lambda callback(lambda event: events.append(event)), ) with patch("homeassistant.components.demo.update.FAKE_INSTALL_SLEEP_TIME", new=0): await hass.services.async_call( - DOMAIN, + UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert len(events) == 10 - assert events[0].data["new_state"].state == STATE_ON - assert events[0].data["new_state"].attributes[ATTR_IN_PROGRESS] == 10 - assert events[1].data["new_state"].attributes[ATTR_IN_PROGRESS] == 20 - assert events[2].data["new_state"].attributes[ATTR_IN_PROGRESS] == 30 - assert events[3].data["new_state"].attributes[ATTR_IN_PROGRESS] == 40 - assert events[4].data["new_state"].attributes[ATTR_IN_PROGRESS] == 50 - assert events[5].data["new_state"].attributes[ATTR_IN_PROGRESS] == 60 - assert events[6].data["new_state"].attributes[ATTR_IN_PROGRESS] == 70 - assert events[7].data["new_state"].attributes[ATTR_IN_PROGRESS] == 80 - assert events[8].data["new_state"].attributes[ATTR_IN_PROGRESS] == 90 - assert events[9].data["new_state"].attributes[ATTR_IN_PROGRESS] is False - assert events[9].data["new_state"].state == STATE_OFF + assert len(events) == steps + 1 + for i, event in enumerate(events[:steps]): + new_state = event.data["new_state"] + assert new_state.state == STATE_ON + assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] == pytest.approx( + 100 / steps * i + ) + new_state = events[steps].data["new_state"] + assert new_state.attributes[ATTR_IN_PROGRESS] is False + assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] is None + assert new_state.state == STATE_OFF -async def test_update_with_progress_raising(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("entity_id", "steps"), + [ + ("update.demo_update_with_progress", 10), + ("update.demo_update_with_decimal_progress", 1000), + ], +) +async def test_update_with_progress_raising( + hass: HomeAssistant, entity_id: str, steps: int +) -> None: """Test update with progress failing to install.""" - state = hass.states.get("update.demo_update_with_progress") + state = hass.states.get(entity_id) assert state assert state.state == STATE_ON assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None events = [] async_track_state_change_event( hass, - "update.demo_update_with_progress", + entity_id, # pylint: disable-next=unnecessary-lambda callback(lambda event: events.append(event)), ) @@ -184,19 +205,21 @@ async def test_update_with_progress_raising(hass: HomeAssistant) -> None: pytest.raises(RuntimeError), ): await hass.services.async_call( - DOMAIN, + UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) await hass.async_block_till_done() assert fake_sleep.call_count == 5 - assert len(events) == 5 - assert events[0].data["new_state"].state == STATE_ON - assert events[0].data["new_state"].attributes[ATTR_IN_PROGRESS] == 10 - assert events[1].data["new_state"].attributes[ATTR_IN_PROGRESS] == 20 - assert events[2].data["new_state"].attributes[ATTR_IN_PROGRESS] == 30 - assert events[3].data["new_state"].attributes[ATTR_IN_PROGRESS] == 40 - assert events[4].data["new_state"].attributes[ATTR_IN_PROGRESS] is False - assert events[4].data["new_state"].state == STATE_ON + assert len(events) == 6 + for i, event in enumerate(events[:5]): + new_state = event.data["new_state"] + assert new_state.state == STATE_ON + assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] == pytest.approx( + 100 / steps * i + ) + assert events[5].data["new_state"].attributes[ATTR_IN_PROGRESS] is False + assert events[5].data["new_state"].attributes[ATTR_UPDATE_PERCENTAGE] is None + assert events[5].data["new_state"].state == STATE_ON diff --git a/tests/components/demo/test_vacuum.py b/tests/components/demo/test_vacuum.py index a3b982ab70e..f910e6e53ac 100644 --- a/tests/components/demo/test_vacuum.py +++ b/tests/components/demo/test_vacuum.py @@ -19,14 +19,10 @@ from homeassistant.components.vacuum import ( ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, - DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -42,11 +38,11 @@ from homeassistant.util import dt as dt_util from tests.common import async_fire_time_changed, async_mock_service from tests.components.vacuum import common -ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() -ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() -ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() -ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() -ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() +ENTITY_VACUUM_BASIC = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_BASIC}".lower() +ENTITY_VACUUM_COMPLETE = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() +ENTITY_VACUUM_MINIMAL = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() +ENTITY_VACUUM_MOST = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_MOST}".lower() +ENTITY_VACUUM_NONE = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_NONE}".lower() @pytest.fixture @@ -62,7 +58,9 @@ async def vacuum_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass: HomeAssistant, vacuum_only: None): """Initialize setup demo vacuum.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) + assert await async_setup_component( + hass, VACUUM_DOMAIN, {VACUUM_DOMAIN: {CONF_PLATFORM: "demo"}} + ) await hass.async_block_till_done() @@ -73,35 +71,35 @@ async def test_supported_features(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_MOST) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 12412 assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_BASIC) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 12360 assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_MINIMAL) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3 assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED state = hass.states.get(ENTITY_VACUUM_NONE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0 assert state.attributes.get(ATTR_BATTERY_LEVEL) is None assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED async def test_methods(hass: HomeAssistant) -> None: @@ -109,29 +107,29 @@ async def test_methods(hass: HomeAssistant) -> None: await common.async_start(hass, ENTITY_VACUUM_BASIC) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_BASIC) - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING await common.async_stop(hass, ENTITY_VACUUM_BASIC) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_BASIC) - assert state.state == STATE_IDLE + assert state.state == VacuumActivity.IDLE state = hass.states.get(ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100 - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED await async_setup_component(hass, "notify", {}) await hass.async_block_till_done() await common.async_locate(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_IDLE + assert state.state == VacuumActivity.IDLE await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_RETURNING + assert state.state == VacuumActivity.RETURNING await common.async_set_fan_speed( hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE @@ -143,21 +141,21 @@ async def test_methods(hass: HomeAssistant) -> None: await common.async_clean_spot(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING await common.async_pause(hass, ENTITY_VACUUM_COMPLETE) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_PAUSED + assert state.state == VacuumActivity.PAUSED await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE) state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_RETURNING + assert state.state == VacuumActivity.RETURNING async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=31)) await hass.async_block_till_done() state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED async def test_unsupported_methods(hass: HomeAssistant) -> None: @@ -189,7 +187,7 @@ async def test_unsupported_methods(hass: HomeAssistant) -> None: async def test_services(hass: HomeAssistant) -> None: """Test vacuum services.""" # Test send_command - send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) + send_command_calls = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( @@ -198,20 +196,20 @@ async def test_services(hass: HomeAssistant) -> None: assert len(send_command_calls) == 1 call = send_command_calls[-1] - assert call.domain == DOMAIN + assert call.domain == VACUUM_DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed - set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) + set_fan_speed_calls = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], ENTITY_VACUUM_COMPLETE) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] - assert call.domain == DOMAIN + assert call.domain == VACUUM_DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] @@ -249,4 +247,4 @@ async def test_send_command(hass: HomeAssistant) -> None: new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE) assert old_state_complete != new_state_complete - assert new_state_complete.state == STATE_IDLE + assert new_state_complete.state == VacuumActivity.IDLE diff --git a/tests/components/derivative/test_init.py b/tests/components/derivative/test_init.py index 0081ab97580..32802080e39 100644 --- a/tests/components/derivative/test_init.py +++ b/tests/components/derivative/test_init.py @@ -42,7 +42,7 @@ async def test_setup_and_remove_config_entry( # Check the platform is setup correctly state = hass.states.get(derivative_entity_id) - assert state.state == "0" + assert state.state == "0.0" assert "unit_of_measurement" not in state.attributes assert state.attributes["source"] == "sensor.input" diff --git a/tests/components/derivative/test_sensor.py b/tests/components/derivative/test_sensor.py index 3646340cac3..4a4d8519b25 100644 --- a/tests/components/derivative/test_sensor.py +++ b/tests/components/derivative/test_sensor.py @@ -8,6 +8,7 @@ from typing import Any from freezegun import freeze_time from homeassistant.components.derivative.const import DOMAIN +from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.const import UnitOfPower, UnitOfTime from homeassistant.core import HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -354,6 +355,41 @@ async def test_suffix(hass: HomeAssistant) -> None: assert round(float(state.state), config["sensor"]["round"]) == 0.0 +async def test_total_increasing_reset(hass: HomeAssistant) -> None: + """Test derivative sensor state with total_increasing sensor input where it should ignore the reset value.""" + times = [0, 20, 30, 35, 40, 50, 60] + values = [0, 10, 30, 40, 0, 10, 40] + expected_times = [0, 20, 30, 35, 50, 60] + expected_values = ["0.00", "0.50", "2.00", "2.00", "1.00", "3.00"] + + config, entity_id = await _setup_sensor(hass, {"unit_time": UnitOfTime.SECONDS}) + + base_time = dt_util.utcnow() + actual_times = [] + actual_values = [] + with freeze_time(base_time) as freezer: + for time, value in zip(times, values, strict=False): + current_time = base_time + timedelta(seconds=time) + freezer.move_to(current_time) + hass.states.async_set( + entity_id, + value, + {ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING}, + force_update=True, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.power") + assert state is not None + + if state.last_reported == current_time: + actual_times.append(time) + actual_values.append(state.state) + + assert actual_times == expected_times + assert actual_values == expected_values + + async def test_device_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index 750817f3c41..94625746b05 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -27,7 +27,7 @@ from tests.common import MockConfigEntry, MockModule, mock_integration, mock_pla from tests.typing import WebSocketGenerator -@attr.s(frozen=True) +@attr.s(frozen=True, slots=True) class MockDeviceEntry(dr.DeviceEntry): """Device Registry Entry with fixed UUID.""" @@ -720,12 +720,17 @@ async def test_async_get_device_automations_all_devices_action_exception_throw( assert "KeyError" in caplog.text +@pytest.mark.parametrize( + "trigger_key", + ["trigger", "platform"], +) async def test_websocket_get_trigger_capabilities( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, fake_integration, + trigger_key: str, ) -> None: """Test we get the expected trigger capabilities through websocket.""" await async_setup_component(hass, "device_automation", {}) @@ -767,11 +772,12 @@ async def test_websocket_get_trigger_capabilities( assert msg["id"] == 1 assert msg["type"] == TYPE_RESULT assert msg["success"] - triggers = msg["result"] + triggers: dict = msg["result"] msg_id = 2 assert len(triggers) == 3 # toggled, turned_on, turned_off for trigger in triggers: + trigger[trigger_key] = trigger.pop("platform") await client.send_json( { "id": msg_id, @@ -1307,7 +1313,7 @@ async def test_automation_with_bad_action( }, ) - assert expected_error.format(path="['action'][0]") in caplog.text + assert expected_error.format(path="['actions'][0]") in caplog.text @patch("homeassistant.helpers.device_registry.DeviceEntry", MockDeviceEntry) @@ -1341,7 +1347,7 @@ async def test_automation_with_bad_condition_action( }, ) - assert expected_error.format(path="['action'][0]") in caplog.text + assert expected_error.format(path="['actions'][0]") in caplog.text @patch("homeassistant.helpers.device_registry.DeviceEntry", MockDeviceEntry) @@ -1375,7 +1381,7 @@ async def test_automation_with_bad_condition( }, ) - assert expected_error.format(path="['condition'][0]") in caplog.text + assert expected_error.format(path="['conditions'][0]") in caplog.text async def test_automation_with_sub_condition( @@ -1541,7 +1547,7 @@ async def test_automation_with_bad_sub_condition( }, ) - path = "['condition'][0]['conditions'][0]" + path = "['conditions'][0]['conditions'][0]" assert expected_error.format(path=path) in caplog.text diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index f3821eb5af9..24996482916 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -13,7 +13,7 @@ from homeassistant.components import ( group, light, ) -from homeassistant.components.device_tracker import DOMAIN +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PLATFORM, @@ -150,21 +150,21 @@ async def test_lights_turn_on_when_coming_home_after_sun_set( hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {}} ) - hass.states.async_set(f"{DOMAIN}.device_2", STATE_UNKNOWN) + hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_UNKNOWN) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == STATE_OFF for ent_id in hass.states.async_entity_ids("light") ) - hass.states.async_set(f"{DOMAIN}.device_2", STATE_NOT_HOME) + hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_NOT_HOME) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == STATE_OFF for ent_id in hass.states.async_entity_ids("light") ) - hass.states.async_set(f"{DOMAIN}.device_2", STATE_HOME) + hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_HOME) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == light.STATE_ON @@ -177,8 +177,11 @@ async def test_lights_turn_on_when_coming_home_after_sun_set_person( hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" - device_1 = f"{DOMAIN}.device_1" - device_2 = f"{DOMAIN}.device_2" + # Ensure all setup tasks are done (avoid flaky tests) + await hass.async_block_till_done(wait_background_tasks=True) + + device_1 = f"{DEVICE_TRACKER_DOMAIN}.device_1" + device_2 = f"{DEVICE_TRACKER_DOMAIN}.device_2" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) freezer.move_to(test_time) diff --git a/tests/components/device_tracker/common.py b/tests/components/device_tracker/common.py index b6341443d36..4842a91ce42 100644 --- a/tests/components/device_tracker/common.py +++ b/tests/components/device_tracker/common.py @@ -69,7 +69,7 @@ class MockScannerEntity(ScannerEntity): self._mac_address = "ad:de:ef:be:ed:fe" @property - def source_type(self): + def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" return SourceType.ROUTER diff --git a/tests/components/device_tracker/test_config_entry.py b/tests/components/device_tracker/test_config_entry.py index 5b9ce78e4f5..bc721803450 100644 --- a/tests/components/device_tracker/test_config_entry.py +++ b/tests/components/device_tracker/test_config_entry.py @@ -162,7 +162,7 @@ class MockTrackerEntity(TrackerEntity): return self._battery_level @property - def source_type(self) -> SourceType | str: + def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" return SourceType.GPS @@ -249,7 +249,7 @@ class MockScannerEntity(ScannerEntity): return False @property - def source_type(self) -> SourceType | str: + def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" return SourceType.ROUTER @@ -505,8 +505,7 @@ async def test_scanner_entity_state( def test_tracker_entity() -> None: """Test coverage for base TrackerEntity class.""" entity = TrackerEntity() - with pytest.raises(NotImplementedError): - assert entity.source_type is None + assert entity.source_type is SourceType.GPS assert entity.latitude is None assert entity.longitude is None assert entity.location_name is None @@ -539,8 +538,7 @@ def test_tracker_entity() -> None: def test_scanner_entity() -> None: """Test coverage for base ScannerEntity entity class.""" entity = ScannerEntity() - with pytest.raises(NotImplementedError): - assert entity.source_type is None + assert entity.source_type is SourceType.ROUTER with pytest.raises(NotImplementedError): assert entity.is_connected is None with pytest.raises(NotImplementedError): diff --git a/tests/components/device_tracker/test_init.py b/tests/components/device_tracker/test_init.py index 362258b035a..e73c18919c5 100644 --- a/tests/components/device_tracker/test_init.py +++ b/tests/components/device_tracker/test_init.py @@ -5,7 +5,6 @@ from datetime import datetime, timedelta import json import logging import os -from types import ModuleType from unittest.mock import call, patch import pytest @@ -37,8 +36,6 @@ from .common import MockScanner, mock_legacy_device_tracker_setup from tests.common import ( assert_setup_component, async_fire_time_changed, - help_test_all, - import_and_test_deprecated_constant_enum, mock_registry, mock_restore_cache, patch_yaml_files, @@ -739,28 +736,3 @@ def test_see_schema_allowing_ios_calls() -> None: "hostname": "beer", } ) - - -@pytest.mark.parametrize( - "module", - [device_tracker, device_tracker.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize(("enum"), list(SourceType)) -@pytest.mark.parametrize( - "module", - [device_tracker, device_tracker.const], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: SourceType, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, "SOURCE_TYPE_", "2025.1" - ) diff --git a/tests/components/devolo_home_control/__init__.py b/tests/components/devolo_home_control/__init__.py index f0e18eaf1a2..a1bf9d56aac 100644 --- a/tests/components/devolo_home_control/__init__.py +++ b/tests/components/devolo_home_control/__init__.py @@ -11,7 +11,6 @@ def configure_integration(hass: HomeAssistant) -> MockConfigEntry: config = { "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", } entry = MockConfigEntry( domain=DOMAIN, data=config, entry_id="123456", unique_id="123456" diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index 8c069de8f62..0e507ca0b28 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -33,11 +33,12 @@ ]), 'entry': dict({ 'data': dict({ - 'mydevolo_url': 'https://test_mydevolo_url.test', 'password': '**REDACTED**', 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'devolo_home_control', 'entry_id': '123456', 'minor_version': 1, @@ -46,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '123456', 'version': 1, diff --git a/tests/components/devolo_home_control/test_binary_sensor.py b/tests/components/devolo_home_control/test_binary_sensor.py index e809c94c129..fd28ce2fdf6 100644 --- a/tests/components/devolo_home_control/test_binary_sensor.py +++ b/tests/components/devolo_home_control/test_binary_sensor.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -34,24 +34,28 @@ async def test_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_door") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_door") == snapshot + assert entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_door") == snapshot - state = hass.states.get(f"{DOMAIN}.test_overload") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_overload") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_overload") == snapshot + assert ( + entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_overload") == snapshot + ) # Emulate websocket message: sensor turned on test_gateway.publisher.dispatch("Test", ("Test", True)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_door").state == STATE_ON + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door").state == STATE_ON # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_door").state == STATE_UNAVAILABLE + assert ( + hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door").state == STATE_UNAVAILABLE + ) @pytest.mark.usefixtures("mock_zeroconf") @@ -69,25 +73,30 @@ async def test_remote_control( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_button_1") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_button_1") == snapshot + assert ( + entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_button_1") == snapshot + ) # Emulate websocket message: button pressed test_gateway.publisher.dispatch("Test", ("Test", 1)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_ON + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state == STATE_ON # Emulate websocket message: button released test_gateway.publisher.dispatch("Test", ("Test", 0)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_OFF + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state == STATE_OFF # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_UNAVAILABLE + assert ( + hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state + == STATE_UNAVAILABLE + ) @pytest.mark.usefixtures("mock_zeroconf") @@ -101,7 +110,7 @@ async def test_disabled(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_door") is None + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") is None @pytest.mark.usefixtures("mock_zeroconf") @@ -116,7 +125,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_door") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_climate.py b/tests/components/devolo_home_control/test_climate.py index 953ff835b89..3aedda90e02 100644 --- a/tests/components/devolo_home_control/test_climate.py +++ b/tests/components/devolo_home_control/test_climate.py @@ -6,7 +6,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.climate import ( ATTR_HVAC_MODE, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, HVACMode, ) @@ -32,14 +32,14 @@ async def test_climate( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{CLIMATE_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{CLIMATE_DOMAIN}.test") == snapshot # Emulate websocket message: temperature changed test_gateway.publisher.dispatch("Test", ("Test", 21.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{CLIMATE_DOMAIN}.test") assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 21.0 @@ -48,10 +48,10 @@ async def test_climate( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: f"{DOMAIN}.test", + ATTR_ENTITY_ID: f"{CLIMATE_DOMAIN}.test", ATTR_HVAC_MODE: HVACMode.HEAT, ATTR_TEMPERATURE: 20.0, }, @@ -63,7 +63,7 @@ async def test_climate( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{CLIMATE_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -77,7 +77,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{CLIMATE_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_config_flow.py b/tests/components/devolo_home_control/test_config_flow.py index 48f9bf31f4f..aab3e69b38f 100644 --- a/tests/components/devolo_home_control/test_config_flow.py +++ b/tests/components/devolo_home_control/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant import config_entries -from homeassistant.components.devolo_home_control.const import DEFAULT_MYDEVOLO, DOMAIN +from homeassistant.components.devolo_home_control.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -90,7 +90,6 @@ async def test_form_advanced_options(hass: HomeAssistant) -> None: { "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) await hass.async_block_till_done() @@ -100,7 +99,6 @@ async def test_form_advanced_options(hass: HomeAssistant) -> None: assert result2["data"] == { "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", } assert len(mock_setup_entry.mock_calls) == 1 @@ -164,21 +162,16 @@ async def test_zeroconf_wrong_device(hass: HomeAssistant) -> None: async def test_form_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) - + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -205,20 +198,16 @@ async def test_form_reauth(hass: HomeAssistant) -> None: @pytest.mark.parametrize("credentials_valid", [False]) async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: """Test if we get the error message on invalid credentials.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -230,20 +219,16 @@ async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: async def test_form_uuid_change_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", - "mydevolo_url": "https://test_mydevolo_url.test", }, ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -291,7 +276,6 @@ async def _setup(hass: HomeAssistant, result: FlowResult) -> None: assert result2["data"] == { "username": "test-username", "password": "test-password", - "mydevolo_url": DEFAULT_MYDEVOLO, } assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/devolo_home_control/test_cover.py b/tests/components/devolo_home_control/test_cover.py index c21dabadb1a..7d4b081c87e 100644 --- a/tests/components/devolo_home_control/test_cover.py +++ b/tests/components/devolo_home_control/test_cover.py @@ -4,13 +4,17 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_CLOSED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -34,15 +38,15 @@ async def test_cover( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{COVER_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{COVER_DOMAIN}.test") == snapshot # Emulate websocket message: position changed test_gateway.publisher.dispatch("Test", ("devolo.Blinds", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") - assert state.state == STATE_CLOSED + state = hass.states.get(f"{COVER_DOMAIN}.test") + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0.0 # Test setting position @@ -50,27 +54,27 @@ async def test_cover( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(100) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(0) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test", ATTR_POSITION: 50}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(50) @@ -79,7 +83,7 @@ async def test_cover( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{COVER_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -93,7 +97,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{COVER_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_light.py b/tests/components/devolo_home_control/test_light.py index f72136ee287..46c3fbc98f3 100644 --- a/tests/components/devolo_home_control/test_light.py +++ b/tests/components/devolo_home_control/test_light.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -33,18 +33,18 @@ async def test_light_without_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{LIGHT_DOMAIN}.test") == snapshot # Emulate websocket message: brightness changed test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_OFF test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 100.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 255 @@ -53,27 +53,27 @@ async def test_light_without_binary_sensor( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(100) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(0) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_BRIGHTNESS: 50}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test", ATTR_BRIGHTNESS: 50}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(round(50 / 255 * 100)) @@ -82,7 +82,7 @@ async def test_light_without_binary_sensor( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{LIGHT_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_light_with_binary_sensor( @@ -101,18 +101,18 @@ async def test_light_with_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{LIGHT_DOMAIN}.test") == snapshot # Emulate websocket message: brightness changed test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_OFF test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 100.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 255 @@ -121,18 +121,18 @@ async def test_light_with_binary_sensor( "devolo_home_control_api.properties.binary_switch_property.BinarySwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(True) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(False) @@ -149,7 +149,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_sensor.py b/tests/components/devolo_home_control/test_sensor.py index 62023982e81..08b53dae865 100644 --- a/tests/components/devolo_home_control/test_sensor.py +++ b/tests/components/devolo_home_control/test_sensor.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,9 +26,9 @@ async def test_temperature_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_temperature") == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_temperature") == snapshot async def test_battery_sensor( @@ -45,14 +45,14 @@ async def test_battery_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_battery_level") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_battery_level") == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery_level") == snapshot # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("Test", 10, "battery_level")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_battery_level").state == "10" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level").state == "10" async def test_consumption_sensor( @@ -68,29 +68,36 @@ async def test_consumption_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_current_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_current_consumption") == snapshot + assert ( + entity_registry.async_get(f"{SENSOR_DOMAIN}.test_current_consumption") + == snapshot + ) - state = hass.states.get(f"{DOMAIN}.test_total_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_total_consumption") == snapshot + assert ( + entity_registry.async_get(f"{SENSOR_DOMAIN}.test_total_consumption") == snapshot + ) # Emulate websocket message: value changed test_gateway.devices["Test"].consumption_property["devolo.Meter:Test"].total = 50.0 test_gateway.publisher.dispatch("Test", ("devolo.Meter:Test", 50.0)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_total_consumption").state == "50.0" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() assert ( - hass.states.get(f"{DOMAIN}.test_current_consumption").state == STATE_UNAVAILABLE + hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption").state + == STATE_UNAVAILABLE ) assert ( - hass.states.get(f"{DOMAIN}.test_total_consumption").state == STATE_UNAVAILABLE + hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state + == STATE_UNAVAILABLE ) @@ -105,7 +112,7 @@ async def test_voltage_sensor(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_voltage") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_voltage") assert state is None @@ -123,14 +130,16 @@ async def test_sensor_change(hass: HomeAssistant) -> None: # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("devolo.MultiLevelSensor:Test", 50.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") assert state.state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_temperature").state == STATE_UNAVAILABLE + assert ( + hass.states.get(f"{SENSOR_DOMAIN}.test_temperature").state == STATE_UNAVAILABLE + ) async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -144,7 +153,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_siren.py b/tests/components/devolo_home_control/test_siren.py index be662418967..71f4dfdd34d 100644 --- a/tests/components/devolo_home_control/test_siren.py +++ b/tests/components/devolo_home_control/test_siren.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.siren import DOMAIN +from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -29,20 +29,20 @@ async def test_siren( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot # Emulate websocket message: sensor turned on test_gateway.publisher.dispatch("Test", ("devolo.SirenMultiLevelSwitch:Test", 1)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_ON + assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_ON # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_UNAVAILABLE @pytest.mark.usefixtures("mock_zeroconf") @@ -60,9 +60,9 @@ async def test_siren_switching( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" @@ -70,7 +70,7 @@ async def test_siren_switching( await hass.services.async_call( "siren", "turn_on", - {"entity_id": f"{DOMAIN}.test"}, + {"entity_id": f"{SIREN_DOMAIN}.test"}, blocking=True, ) # The real device state is changed by a websocket message @@ -86,7 +86,7 @@ async def test_siren_switching( await hass.services.async_call( "siren", "turn_off", - {"entity_id": f"{DOMAIN}.test"}, + {"entity_id": f"{SIREN_DOMAIN}.test"}, blocking=True, ) # The real device state is changed by a websocket message @@ -94,7 +94,7 @@ async def test_siren_switching( "Test", ("devolo.SirenMultiLevelSwitch:Test", 0) ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OFF + assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_OFF property_set.assert_called_once_with(0) @@ -113,9 +113,9 @@ async def test_siren_change_default_tone( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" @@ -124,7 +124,7 @@ async def test_siren_change_default_tone( await hass.services.async_call( "siren", "turn_on", - {"entity_id": f"{DOMAIN}.test"}, + {"entity_id": f"{SIREN_DOMAIN}.test"}, blocking=True, ) property_set.assert_called_once_with(2) @@ -142,7 +142,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_switch.py b/tests/components/devolo_home_control/test_switch.py index 86f93bfddf6..46adaf8c8b0 100644 --- a/tests/components/devolo_home_control/test_switch.py +++ b/tests/components/devolo_home_control/test_switch.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -32,9 +32,9 @@ async def test_switch( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SWITCH_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SWITCH_DOMAIN}.test") == snapshot # Emulate websocket message: switched on test_gateway.devices["Test"].binary_switch_property[ @@ -42,24 +42,24 @@ async def test_switch( ].state = True test_gateway.publisher.dispatch("Test", ("devolo.BinarySwitch:Test", True)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_ON + assert hass.states.get(f"{SWITCH_DOMAIN}.test").state == STATE_ON with patch( "devolo_home_control_api.properties.binary_switch_property.BinarySwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{SWITCH_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(state=True) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{SWITCH_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(state=False) @@ -68,7 +68,7 @@ async def test_switch( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{SWITCH_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -82,7 +82,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SWITCH_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/__init__.py b/tests/components/devolo_home_network/__init__.py index 05ccbca0c56..f6d1c13299a 100644 --- a/tests/components/devolo_home_network/__init__.py +++ b/tests/components/devolo_home_network/__init__.py @@ -4,7 +4,7 @@ from homeassistant.components.devolo_home_network.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant -from .const import IP +from .const import DISCOVERY_INFO, IP from tests.common import MockConfigEntry @@ -15,7 +15,12 @@ def configure_integration(hass: HomeAssistant) -> MockConfigEntry: CONF_IP_ADDRESS: IP, CONF_PASSWORD: "test", } - entry = MockConfigEntry(domain=DOMAIN, data=config, entry_id="123456") + entry = MockConfigEntry( + domain=DOMAIN, + data=config, + entry_id="123456", + unique_id=DISCOVERY_INFO.properties["SN"], + ) entry.add_to_hass(hass) return entry diff --git a/tests/components/devolo_home_network/const.py b/tests/components/devolo_home_network/const.py index 9d8faab9b13..7b0551b1daf 100644 --- a/tests/components/devolo_home_network/const.py +++ b/tests/components/devolo_home_network/const.py @@ -171,3 +171,5 @@ PLCNET_ATTACHED = LogicalNetwork( }, ], ) + +UPTIME = 100 diff --git a/tests/components/devolo_home_network/mock.py b/tests/components/devolo_home_network/mock.py index 4b999667e53..82bf3e5ad76 100644 --- a/tests/components/devolo_home_network/mock.py +++ b/tests/components/devolo_home_network/mock.py @@ -19,6 +19,7 @@ from .const import ( IP, NEIGHBOR_ACCESS_POINTS, PLCNET, + UPTIME, ) @@ -49,7 +50,7 @@ class MockDevice(Device): self, session_instance: httpx.AsyncClient | None = None ) -> None: """Give a mocked device the needed properties.""" - self.mac = DISCOVERY_INFO.properties["PlcMacAddress"] + self.mac = DISCOVERY_INFO.properties["PlcMacAddress"] if self.plcnet else None self.mt_number = DISCOVERY_INFO.properties["MT"] self.product = DISCOVERY_INFO.properties["Product"] self.serial_number = DISCOVERY_INFO.properties["SN"] @@ -64,6 +65,7 @@ class MockDevice(Device): ) self.device.async_get_led_setting = AsyncMock(return_value=False) self.device.async_restart = AsyncMock(return_value=True) + self.device.async_uptime = AsyncMock(return_value=UPTIME) self.device.async_start_wps = AsyncMock(return_value=True) self.device.async_get_wifi_connected_station = AsyncMock( return_value=CONNECTED_STATIONS diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 317aaac0116..1288b7f3ef6 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -22,6 +22,8 @@ 'password': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'devolo_home_network', 'entry_id': '123456', 'minor_version': 1, @@ -30,8 +32,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', - 'unique_id': None, + 'unique_id': '1234567890', 'version': 1, }), }) diff --git a/tests/components/devolo_home_network/snapshots/test_init.ambr b/tests/components/devolo_home_network/snapshots/test_init.ambr index 619a8ce1121..297c9a25183 100644 --- a/tests/components/devolo_home_network/snapshots/test_init.ambr +++ b/tests/components/devolo_home_network/snapshots/test_init.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_setup_entry +# name: test_setup_entry[mock_device] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -35,3 +35,35 @@ 'via_device_id': None, }) # --- +# name: test_setup_entry[mock_repeater_device] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://192.0.2.1', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'devolo_home_network', + '1234567890', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'devolo', + 'model': 'dLAN pro 1200+ WiFi ac', + 'model_id': '2730', + 'name': 'Mock Title', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '1234567890', + 'suggested_area': None, + 'sw_version': '5.6.1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/devolo_home_network/snapshots/test_sensor.ambr b/tests/components/devolo_home_network/snapshots/test_sensor.ambr index d985ac35495..2e6730cdb21 100644 --- a/tests/components/devolo_home_network/snapshots/test_sensor.ambr +++ b/tests/components/devolo_home_network/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2] +# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2-1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Connected PLC devices', @@ -12,7 +12,7 @@ 'state': '1', }) # --- -# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2].1 +# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2-1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -45,7 +45,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0] +# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0-1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Connected Wi-Fi clients', @@ -59,7 +59,7 @@ 'state': '1', }) # --- -# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0].1 +# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0-1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -94,7 +94,54 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1] +# name: test_sensor[last_restart_of_the_device-async_uptime-interval3-2023-01-13T11:58:50+00:00] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Last restart of the device', + }), + 'context': , + 'entity_id': 'sensor.mock_title_last_restart_of_the_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-01-13T11:58:20+00:00', + }) +# --- +# name: test_sensor[last_restart_of_the_device-async_uptime-interval3-2023-01-13T11:58:50+00:00].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_last_restart_of_the_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart of the device', + 'platform': 'devolo_home_network', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_restart', + 'unique_id': '1234567890_last_restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1-1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Neighboring Wi-Fi networks', @@ -107,7 +154,7 @@ 'state': '1', }) # --- -# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1].1 +# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1-1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), diff --git a/tests/components/devolo_home_network/snapshots/test_update.ambr b/tests/components/devolo_home_network/snapshots/test_update.ambr index 83ca84c82e8..8a1065f9a60 100644 --- a/tests/components/devolo_home_network/snapshots/test_update.ambr +++ b/tests/components/devolo_home_network/snapshots/test_update.ambr @@ -4,6 +4,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/devolo_home_network/icon.png', 'friendly_name': 'Mock Title Firmware', 'in_progress': False, @@ -14,6 +15,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_firmware', diff --git a/tests/components/devolo_home_network/test_binary_sensor.py b/tests/components/devolo_home_network/test_binary_sensor.py index 3e4bf8471c1..8197ec1a1e5 100644 --- a/tests/components/devolo_home_network/test_binary_sensor.py +++ b/tests/components/devolo_home_network/test_binary_sensor.py @@ -7,7 +7,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.devolo_home_network.const import ( CONNECTED_TO_ROUTER, LONG_UPDATE_INTERVAL, @@ -31,7 +31,10 @@ async def test_binary_sensor_setup(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}") is None + assert ( + hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}") + is None + ) await hass.config_entries.async_unload(entry.entry_id) @@ -47,7 +50,7 @@ async def test_update_attached_to_router( """Test state change of a attached_to_router binary sensor device.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}" + state_key = f"{BINARY_SENSOR_DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 5aa2bfa274e..92163b5cb95 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -29,8 +29,6 @@ from .const import ( ) from .mock import MockDevice -from tests.common import MockConfigEntry - async def test_form(hass: HomeAssistant, info: dict[str, Any]) -> None: """Test we get the form.""" @@ -125,6 +123,8 @@ async def test_zeroconf(hass: HomeAssistant) -> None: CONF_IP_ADDRESS: IP, CONF_PASSWORD: "", } + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == "1234567890" async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @@ -139,13 +139,9 @@ async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("info") -async def test_abort_if_configued(hass: HomeAssistant) -> None: +async def test_abort_if_configured(hass: HomeAssistant) -> None: """Test we abort config flow if already configured.""" - serial_number = DISCOVERY_INFO.properties["SN"] - entry = MockConfigEntry( - domain=DOMAIN, unique_id=serial_number, data={CONF_IP_ADDRESS: IP} - ) - entry.add_to_hass(hass) + entry = configure_integration(hass) # Abort on concurrent user flow result = await hass.config_entries.flow.async_init( @@ -179,18 +175,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: entry = configure_integration(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": { - CONF_NAME: DISCOVERY_INFO.hostname.split(".")[0], - }, - }, - data=entry.data, - ) - + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/devolo_home_network/test_image.py b/tests/components/devolo_home_network/test_image.py index 80efc4fcc09..f13db4fce9d 100644 --- a/tests/components/devolo_home_network/test_image.py +++ b/tests/components/devolo_home_network/test_image.py @@ -9,7 +9,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.devolo_home_network.const import SHORT_UPDATE_INTERVAL -from homeassistant.components.image import DOMAIN +from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -32,7 +32,9 @@ async def test_image_setup(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert ( - hass.states.get(f"{DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code") + hass.states.get( + f"{IMAGE_DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" + ) is not None ) @@ -51,7 +53,7 @@ async def test_guest_wifi_qr( """Test showing a QR code of the guest wifi credentials.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" + state_key = f"{IMAGE_DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_init.py b/tests/components/devolo_home_network/test_init.py index 1b8903c568e..71823eabe82 100644 --- a/tests/components/devolo_home_network/test_init.py +++ b/tests/components/devolo_home_network/test_init.py @@ -27,13 +27,16 @@ from .mock import MockDevice from tests.common import MockConfigEntry +@pytest.mark.parametrize("device", ["mock_device", "mock_repeater_device"]) async def test_setup_entry( hass: HomeAssistant, - mock_device: MockDevice, + device: str, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, + request: pytest.FixtureRequest, ) -> None: """Test setup entry.""" + mock_device: MockDevice = request.getfixturevalue(device) entry = configure_integration(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_sensor.py b/tests/components/devolo_home_network/test_sensor.py index efcbaa803df..cf0207a2800 100644 --- a/tests/components/devolo_home_network/test_sensor.py +++ b/tests/components/devolo_home_network/test_sensor.py @@ -3,16 +3,18 @@ from datetime import timedelta from unittest.mock import AsyncMock -from devolo_plc_api.exceptions.device import DeviceUnavailable +from devolo_plc_api.exceptions.device import DevicePasswordProtected, DeviceUnavailable from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.devolo_home_network.const import ( + DOMAIN, LONG_UPDATE_INTERVAL, SHORT_UPDATE_INTERVAL, ) -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as PLATFORM +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -33,59 +35,74 @@ async def test_sensor_setup(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert ( - hass.states.get(f"{DOMAIN}.{device_name}_connected_wi_fi_clients") is not None + hass.states.get(f"{PLATFORM}.{device_name}_connected_wi_fi_clients") is not None + ) + assert hass.states.get(f"{PLATFORM}.{device_name}_connected_plc_devices") is None + assert ( + hass.states.get(f"{PLATFORM}.{device_name}_neighboring_wi_fi_networks") is None ) - assert hass.states.get(f"{DOMAIN}.{device_name}_connected_plc_devices") is None - assert hass.states.get(f"{DOMAIN}.{device_name}_neighboring_wi_fi_networks") is None assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" + f"{PLATFORM}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" ) is not None ) assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" + f"{PLATFORM}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" ) is not None ) assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_downlink_phyrate_{PLCNET.devices[2].user_device_name}" + f"{PLATFORM}.{device_name}_plc_downlink_phyrate_{PLCNET.devices[2].user_device_name}" ) is None ) assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_uplink_phyrate_{PLCNET.devices[2].user_device_name}" + f"{PLATFORM}.{device_name}_plc_uplink_phyrate_{PLCNET.devices[2].user_device_name}" ) is None ) + assert ( + hass.states.get(f"{PLATFORM}.{device_name}_last_restart_of_the_device") is None + ) await hass.config_entries.async_unload(entry.entry_id) @pytest.mark.parametrize( - ("name", "get_method", "interval"), + ("name", "get_method", "interval", "expected_state"), [ ( "connected_wi_fi_clients", "async_get_wifi_connected_station", SHORT_UPDATE_INTERVAL, + "1", ), ( "neighboring_wi_fi_networks", "async_get_wifi_neighbor_access_points", LONG_UPDATE_INTERVAL, + "1", ), ( "connected_plc_devices", "async_get_network_overview", LONG_UPDATE_INTERVAL, + "1", + ), + ( + "last_restart_of_the_device", + "async_uptime", + SHORT_UPDATE_INTERVAL, + "2023-01-13T11:58:50+00:00", ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2023-01-13 12:00:00+00:00") async def test_sensor( hass: HomeAssistant, mock_device: MockDevice, @@ -95,11 +112,12 @@ async def test_sensor( name: str, get_method: str, interval: timedelta, + expected_state: str, ) -> None: """Test state change of a sensor device.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{DOMAIN}.{device_name}_{name}" + state_key = f"{PLATFORM}.{device_name}_{name}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -125,7 +143,7 @@ async def test_sensor( state = hass.states.get(state_key) assert state is not None - assert state.state == "1" + assert state.state == expected_state await hass.config_entries.async_unload(entry.entry_id) @@ -140,8 +158,8 @@ async def test_update_plc_phyrates( """Test state change of plc_downlink_phyrate and plc_uplink_phyrate sensor devices.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key_downlink = f"{DOMAIN}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" - state_key_uplink = f"{DOMAIN}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" + state_key_downlink = f"{PLATFORM}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" + state_key_uplink = f"{PLATFORM}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -181,3 +199,28 @@ async def test_update_plc_phyrates( assert state.state == str(PLCNET.data_rates[0].tx_rate) await hass.config_entries.async_unload(entry.entry_id) + + +async def test_update_last_update_auth_failed( + hass: HomeAssistant, mock_device: MockDevice +) -> None: + """Test getting the last update state with wrong password triggers the reauth flow.""" + entry = configure_integration(hass) + mock_device.device.async_uptime.side_effect = DevicePasswordProtected + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"]["source"] == SOURCE_REAUTH + assert flow["context"]["entry_id"] == entry.entry_id + + await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/devolo_home_network/test_update.py b/tests/components/devolo_home_network/test_update.py index 7f70524fa5b..4fe7a173309 100644 --- a/tests/components/devolo_home_network/test_update.py +++ b/tests/components/devolo_home_network/test_update.py @@ -141,7 +141,7 @@ async def test_device_failure_update( async def test_auth_failed(hass: HomeAssistant, mock_device: MockDevice) -> None: - """Test updating unautherized triggers the reauth flow.""" + """Test updating unauthorized triggers the reauth flow.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() state_key = f"{PLATFORM}.{device_name}_firmware" diff --git a/tests/components/dexcom/__init__.py b/tests/components/dexcom/__init__.py index adc9c56049a..10a742070d6 100644 --- a/tests/components/dexcom/__init__.py +++ b/tests/components/dexcom/__init__.py @@ -1,6 +1,7 @@ """Tests for the Dexcom integration.""" import json +from typing import Any from unittest.mock import patch from pydexcom import GlucoseReading @@ -20,14 +21,16 @@ CONFIG = { GLUCOSE_READING = GlucoseReading(json.loads(load_fixture("data.json", "dexcom"))) -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def init_integration( + hass: HomeAssistant, options: dict[str, Any] | None = None +) -> MockConfigEntry: """Set up the Dexcom integration in Home Assistant.""" entry = MockConfigEntry( domain=DOMAIN, title="test_username", unique_id="test_username", data=CONFIG, - options=None, + options=options, ) with ( patch( diff --git a/tests/components/dexcom/test_config_flow.py b/tests/components/dexcom/test_config_flow.py index e8893e21d0e..0a7338c13da 100644 --- a/tests/components/dexcom/test_config_flow.py +++ b/tests/components/dexcom/test_config_flow.py @@ -5,15 +5,13 @@ from unittest.mock import patch from pydexcom import AccountError, SessionError from homeassistant import config_entries -from homeassistant.components.dexcom.const import DOMAIN, MG_DL, MMOL_L -from homeassistant.const import CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME +from homeassistant.components.dexcom.const import DOMAIN +from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import CONFIG -from tests.common import MockConfigEntry - async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -101,51 +99,3 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} - - -async def test_option_flow_default(hass: HomeAssistant) -> None: - """Test config flow options.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=CONFIG, - options=None, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result2 = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == { - CONF_UNIT_OF_MEASUREMENT: MG_DL, - } - - -async def test_option_flow(hass: HomeAssistant) -> None: - """Test config flow options.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=CONFIG, - options={CONF_UNIT_OF_MEASUREMENT: MG_DL}, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_UNIT_OF_MEASUREMENT: MMOL_L}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_UNIT_OF_MEASUREMENT: MMOL_L, - } diff --git a/tests/components/dexcom/test_sensor.py b/tests/components/dexcom/test_sensor.py index 1b7f0b026ab..5c0a5280ad6 100644 --- a/tests/components/dexcom/test_sensor.py +++ b/tests/components/dexcom/test_sensor.py @@ -4,12 +4,7 @@ from unittest.mock import patch from pydexcom import SessionError -from homeassistant.components.dexcom.const import MMOL_L -from homeassistant.const import ( - CONF_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -58,36 +53,3 @@ async def test_sensors_update_failed(hass: HomeAssistant) -> None: assert test_username_glucose_value.state == STATE_UNAVAILABLE test_username_glucose_trend = hass.states.get("sensor.test_username_glucose_trend") assert test_username_glucose_trend.state == STATE_UNAVAILABLE - - -async def test_sensors_options_changed(hass: HomeAssistant) -> None: - """Test we handle sensor unavailable.""" - entry = await init_integration(hass) - - test_username_glucose_value = hass.states.get("sensor.test_username_glucose_value") - assert test_username_glucose_value.state == str(GLUCOSE_READING.value) - test_username_glucose_trend = hass.states.get("sensor.test_username_glucose_trend") - assert test_username_glucose_trend.state == GLUCOSE_READING.trend_description - - with ( - patch( - "homeassistant.components.dexcom.Dexcom.get_current_glucose_reading", - return_value=GLUCOSE_READING, - ), - patch( - "homeassistant.components.dexcom.Dexcom.create_session", - return_value="test_session_id", - ), - ): - hass.config_entries.async_update_entry( - entry=entry, - options={CONF_UNIT_OF_MEASUREMENT: MMOL_L}, - ) - await hass.async_block_till_done() - - assert entry.options == {CONF_UNIT_OF_MEASUREMENT: MMOL_L} - - test_username_glucose_value = hass.states.get("sensor.test_username_glucose_value") - assert test_username_glucose_value.state == str(GLUCOSE_READING.mmol_l) - test_username_glucose_trend = hass.states.get("sensor.test_username_glucose_trend") - assert test_username_glucose_trend.state == GLUCOSE_READING.trend_description diff --git a/tests/components/dhcp/test_init.py b/tests/components/dhcp/test_init.py index 7c652c8ea3e..6852f4369cc 100644 --- a/tests/components/dhcp/test_init.py +++ b/tests/components/dhcp/test_init.py @@ -8,10 +8,7 @@ from unittest.mock import patch import aiodhcpwatcher import pytest -from scapy import ( - arch, # noqa: F401 - interfaces, -) +from scapy import interfaces from scapy.error import Scapy_Exception from scapy.layers.dhcp import DHCP from scapy.layers.l2 import Ether @@ -35,11 +32,17 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant import homeassistant.helpers.device_registry as dr +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import ( + MockConfigEntry, + MockModule, + async_fire_time_changed, + mock_integration, +) # connect b8:b7:f1:6d:b5:33 192.168.210.56 RAW_DHCP_REQUEST = ( @@ -138,11 +141,15 @@ RAW_DHCP_REQUEST_WITHOUT_HOSTNAME = ( async def _async_get_handle_dhcp_packet( - hass: HomeAssistant, integration_matchers: dhcp.DhcpMatchers + hass: HomeAssistant, + integration_matchers: dhcp.DhcpMatchers, + address_data: dict | None = None, ) -> Callable[[Any], Awaitable[None]]: + if address_data is None: + address_data = {} dhcp_watcher = dhcp.DHCPWatcher( hass, - {}, + address_data, integration_matchers, ) with patch("aiodhcpwatcher.async_start"): @@ -177,7 +184,8 @@ async def test_dhcp_match_hostname_and_macaddress(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -205,7 +213,8 @@ async def test_dhcp_renewal_match_hostname_and_macaddress(hass: HomeAssistant) - assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="50147903852c", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.1.120", @@ -254,7 +263,8 @@ async def test_registered_devices( assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="50147903852c", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.1.120", @@ -280,7 +290,8 @@ async def test_dhcp_match_hostname(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -306,7 +317,8 @@ async def test_dhcp_match_macaddress(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -335,7 +347,8 @@ async def test_dhcp_multiple_match_only_one_flow(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -361,7 +374,8 @@ async def test_dhcp_match_macaddress_without_hostname(hass: HomeAssistant) -> No assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="606bbd59e4b4", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.107.151", @@ -687,7 +701,8 @@ async def test_device_tracker_hostname_and_macaddress_exists_before_start( assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -724,7 +739,8 @@ async def test_device_tracker_registered(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -803,7 +819,8 @@ async def test_device_tracker_hostname_and_macaddress_after_start( assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1012,7 +1029,8 @@ async def test_aiodiscover_finds_new_hosts(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1074,7 +1092,8 @@ async def test_aiodiscover_does_not_call_again_on_shorter_hostname( assert len(mock_init.mock_calls) == 2 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1083,7 +1102,8 @@ async def test_aiodiscover_does_not_call_again_on_shorter_hostname( ) assert mock_init.mock_calls[1][1][0] == "mock-domain" assert mock_init.mock_calls[1][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[1][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1140,10 +1160,196 @@ async def test_aiodiscover_finds_new_hosts_after_interval(hass: HomeAssistant) - assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_DHCP + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", hostname="connect", macaddress="b8b7f16db533", ) + + +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + ), + [ + # Matching discovery key + ( + "mock-domain", + {"dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1),)}, + ), + # Matching discovery key + ( + "mock-domain", + { + "dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1),), + "other": (DiscoveryKey(domain="other", key="blah", version=1),), + }, + ), + # Matching discovery key, other domain + # Note: Rediscovery is not currently restricted to the domain of the removed + # entry. Such a check can be added if needed. + ( + "comp", + {"dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1),)}, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_DHCP, + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_USER, + ], +) +async def test_dhcp_rediscover( + hass: HomeAssistant, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, +) -> None: + """Test we reinitiate flows when an ignored config entry is removed.""" + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id="mock-unique-id", + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + address_data = {} + integration_matchers = dhcp.async_index_integration_matchers( + [{"domain": "mock-domain", "hostname": "connect", "macaddress": "B8B7F1*"}] + ) + packet = Ether(RAW_DHCP_REQUEST) + + async_handle_dhcp_packet = await _async_get_handle_dhcp_packet( + hass, integration_matchers, address_data + ) + rediscovery_watcher = dhcp.RediscoveryWatcher( + hass, address_data, integration_matchers + ) + rediscovery_watcher.async_start() + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await async_handle_dhcp_packet(packet) + # Ensure no change is ignored + await async_handle_dhcp_packet(packet) + + # Assert the cached MAC address is hexstring without : + assert address_data == { + "b8b7f16db533": {"hostname": "connect", "ip": "192.168.210.56"} + } + + expected_context = { + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, + } + assert len(mock_init.mock_calls) == 1 + assert mock_init.mock_calls[0][1][0] == "mock-domain" + assert mock_init.mock_calls[0][2]["context"] == expected_context + assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( + ip="192.168.210.56", + hostname="connect", + macaddress="b8b7f16db533", + ) + + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_init.mock_calls) == 1 + assert mock_init.mock_calls[0][1][0] == "mock-domain" + assert mock_init.mock_calls[0][2]["context"] == expected_context + + +@pytest.mark.usefixtures("mock_async_zeroconf") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + "entry_source", + "entry_unique_id", + ), + [ + # Discovery key from other domain + ( + "mock-domain", + { + "bluetooth": ( + DiscoveryKey(domain="bluetooth", key="b8b7f16db533", version=1), + ) + }, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + # Discovery key from the future + ( + "mock-domain", + {"dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=2),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + ], +) +async def test_dhcp_rediscover_no_match( + hass: HomeAssistant, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + entry_unique_id: str, +) -> None: + """Test we don't reinitiate flows when a non matching config entry is removed.""" + + mock_integration(hass, MockModule(entry_domain)) + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + address_data = {} + integration_matchers = dhcp.async_index_integration_matchers( + [{"domain": "mock-domain", "hostname": "connect", "macaddress": "B8B7F1*"}] + ) + packet = Ether(RAW_DHCP_REQUEST) + + async_handle_dhcp_packet = await _async_get_handle_dhcp_packet( + hass, integration_matchers, address_data + ) + rediscovery_watcher = dhcp.RediscoveryWatcher( + hass, address_data, integration_matchers + ) + rediscovery_watcher.async_start() + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await async_handle_dhcp_packet(packet) + # Ensure no change is ignored + await async_handle_dhcp_packet(packet) + + expected_context = { + "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), + "source": config_entries.SOURCE_DHCP, + } + assert len(mock_init.mock_calls) == 1 + assert mock_init.mock_calls[0][1][0] == "mock-domain" + assert mock_init.mock_calls[0][2]["context"] == expected_context + assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( + ip="192.168.210.56", + hostname="connect", + macaddress="b8b7f16db533", + ) + + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_init.mock_calls) == 0 diff --git a/tests/components/diagnostics/test_init.py b/tests/components/diagnostics/test_init.py index 7f583395387..ffed7e21f60 100644 --- a/tests/components/diagnostics/test_init.py +++ b/tests/components/diagnostics/test_init.py @@ -174,6 +174,7 @@ async def test_download_diagnostics( "dependencies": [], "domain": "fake_integration", "is_built_in": True, + "overwrites_built_in": False, "name": "fake_integration", "requirements": [], }, @@ -260,6 +261,7 @@ async def test_download_diagnostics( "dependencies": [], "domain": "fake_integration", "is_built_in": True, + "overwrites_built_in": False, "name": "fake_integration", "requirements": [], }, diff --git a/tests/components/dialogflow/test_init.py b/tests/components/dialogflow/test_init.py index 4c36a6887aa..8144bef7c1c 100644 --- a/tests/components/dialogflow/test_init.py +++ b/tests/components/dialogflow/test_init.py @@ -8,8 +8,8 @@ import pytest from homeassistant import config_entries from homeassistant.components import dialogflow, intent_script -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component diff --git a/tests/components/directv/test_media_player.py b/tests/components/directv/test_media_player.py index 33eb35ed268..37762a22fe2 100644 --- a/tests/components/directv/test_media_player.py +++ b/tests/components/directv/test_media_player.py @@ -215,7 +215,7 @@ async def test_check_attributes( assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) assert state.attributes.get(ATTR_MEDIA_TITLE) == "Snow Bride" assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None - assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("HALLHD", "312") + assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "HALLHD (312)" assert state.attributes.get(ATTR_INPUT_SOURCE) == "312" assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) assert state.attributes.get(ATTR_MEDIA_RATING) == "TV-G" @@ -234,7 +234,7 @@ async def test_check_attributes( assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) assert state.attributes.get(ATTR_MEDIA_TITLE) == "Tyler's Ultimate" assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) == "Spaghetti and Clam Sauce" - assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("FOODHD", "231") + assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "FOODHD (231)" assert state.attributes.get(ATTR_INPUT_SOURCE) == "231" assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) assert state.attributes.get(ATTR_MEDIA_RATING) == "No Rating" @@ -255,7 +255,7 @@ async def test_check_attributes( assert state.attributes.get(ATTR_MEDIA_ARTIST) == "Gerald Albright" assert state.attributes.get(ATTR_MEDIA_ALBUM_NAME) == "Slam Dunk (2014)" assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None - assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("MCSJ", "851") + assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "MCSJ (851)" assert state.attributes.get(ATTR_INPUT_SOURCE) == "851" assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) assert state.attributes.get(ATTR_MEDIA_RATING) == "TV-PG" diff --git a/tests/components/discord/__init__.py b/tests/components/discord/__init__.py index bf7c188b7b5..1d81388d1e3 100644 --- a/tests/components/discord/__init__.py +++ b/tests/components/discord/__init__.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, Mock, patch import nextcord from homeassistant.components.discord.const import DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import HomeAssistant @@ -22,7 +21,7 @@ CONF_DATA = { } -def create_entry(hass: HomeAssistant) -> ConfigEntry: +def create_entry(hass: HomeAssistant) -> MockConfigEntry: """Add config entry in Home Assistant.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/discord/test_config_flow.py b/tests/components/discord/test_config_flow.py index 9b37179e86d..e9a1344c555 100644 --- a/tests/components/discord/test_config_flow.py +++ b/tests/components/discord/test_config_flow.py @@ -4,7 +4,7 @@ import nextcord from homeassistant import config_entries from homeassistant.components.discord.const import DOMAIN -from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE +from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,16 +123,7 @@ async def test_flow_user_unknown_error(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test a reauth flow.""" entry = create_entry(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/discovergy/test_config_flow.py b/tests/components/discovergy/test_config_flow.py index 2464ba3846f..23c4a0f7cee 100644 --- a/tests/components/discovergy/test_config_flow.py +++ b/tests/components/discovergy/test_config_flow.py @@ -6,7 +6,7 @@ from pydiscovergy.error import DiscovergyClientError, HTTPError, InvalidLogin import pytest from homeassistant.components.discovergy.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -20,7 +20,7 @@ async def test_form(hass: HomeAssistant, discovergy: AsyncMock) -> None: DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["errors"] is None + assert result["errors"] == {} with patch( "homeassistant.components.discovergy.async_setup_entry", @@ -49,15 +49,9 @@ async def test_reauth( ) -> None: """Test reauth flow.""" config_entry.add_to_hass(hass) - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data=None, - ) - + init_result = await config_entry.start_reauth_flow(hass) assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth" + assert init_result["step_id"] == "user" with patch( "homeassistant.components.discovergy.async_setup_entry", @@ -66,7 +60,7 @@ async def test_reauth( configure_result = await hass.config_entries.flow.async_configure( init_result["flow_id"], { - CONF_EMAIL: "test@example.com", + CONF_EMAIL: "user@example.org", CONF_PASSWORD: "test-password", }, ) @@ -117,3 +111,30 @@ async def test_form_fail( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test@example.com" assert "errors" not in result + + +async def test_reauth_unique_id_mismatch( + hass: HomeAssistant, config_entry: MockConfigEntry, discovergy: AsyncMock +) -> None: + """Test reauth flow with unique id mismatch.""" + config_entry.add_to_hass(hass) + + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.discovergy.async_setup_entry", + return_value=True, + ): + configure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "user2@example.org", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert configure_result["type"] is FlowResultType.ABORT + assert configure_result["reason"] == "account_mismatch" diff --git a/tests/components/dlna_dmr/test_config_flow.py b/tests/components/dlna_dmr/test_config_flow.py index d60a8f17b83..cb32001e1e5 100644 --- a/tests/components/dlna_dmr/test_config_flow.py +++ b/tests/components/dlna_dmr/test_config_flow.py @@ -671,83 +671,6 @@ async def test_ignore_flow_no_ssdp( } -async def test_unignore_flow(hass: HomeAssistant, ssdp_scanner_mock: Mock) -> None: - """Test a config flow started by unignoring a device.""" - # Create ignored entry (with no extra info from SSDP) - ssdp_scanner_mock.async_get_discovery_info_by_udn_st.return_value = None - result = await hass.config_entries.flow.async_init( - DLNA_DOMAIN, - context={"source": config_entries.SOURCE_IGNORE}, - data={"unique_id": MOCK_DEVICE_UDN, "title": MOCK_DEVICE_NAME}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MOCK_DEVICE_NAME - - # Device was found via SSDP, matching the 2nd device type tried - ssdp_scanner_mock.async_get_discovery_info_by_udn_st.side_effect = [ - None, - MOCK_DISCOVERY, - None, - None, - None, - ] - - # Unignore it and expect config flow to start - result = await hass.config_entries.flow.async_init( - DLNA_DOMAIN, - context={"source": config_entries.SOURCE_UNIGNORE}, - data={"unique_id": MOCK_DEVICE_UDN}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MOCK_DEVICE_NAME - assert result["data"] == { - CONF_URL: MOCK_DEVICE_LOCATION, - CONF_DEVICE_ID: MOCK_DEVICE_UDN, - CONF_TYPE: MOCK_DEVICE_TYPE, - CONF_MAC: MOCK_MAC_ADDRESS, - } - assert result["options"] == {} - - -async def test_unignore_flow_offline( - hass: HomeAssistant, ssdp_scanner_mock: Mock -) -> None: - """Test a config flow started by unignoring a device, but the device is offline.""" - # Create ignored entry (with no extra info from SSDP) - ssdp_scanner_mock.async_get_discovery_info_by_udn_st.return_value = None - result = await hass.config_entries.flow.async_init( - DLNA_DOMAIN, - context={"source": config_entries.SOURCE_IGNORE}, - data={"unique_id": MOCK_DEVICE_UDN, "title": MOCK_DEVICE_NAME}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MOCK_DEVICE_NAME - - # Device is not in the SSDP discoveries (perhaps HA restarted between ignore and unignore) - ssdp_scanner_mock.async_get_discovery_info_by_udn_st.return_value = None - - # Unignore it and expect config flow to start then abort - result = await hass.config_entries.flow.async_init( - DLNA_DOMAIN, - context={"source": config_entries.SOURCE_UNIGNORE}, - data={"unique_id": MOCK_DEVICE_UDN}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "discovery_error" - - async def test_get_mac_address_ipv4( hass: HomeAssistant, mock_get_mac_address: Mock ) -> None: diff --git a/tests/components/dlna_dms/test_device_availability.py b/tests/components/dlna_dms/test_device_availability.py index c1ad3c91a7b..1be68f91733 100644 --- a/tests/components/dlna_dms/test_device_availability.py +++ b/tests/components/dlna_dms/test_device_availability.py @@ -15,8 +15,8 @@ import pytest from homeassistant.components import media_source, ssdp from homeassistant.components.dlna_dms.const import DOMAIN from homeassistant.components.dlna_dms.dms import get_domain_data -from homeassistant.components.media_player.errors import BrowseError -from homeassistant.components.media_source.error import Unresolvable +from homeassistant.components.media_player import BrowseError +from homeassistant.components.media_source import Unresolvable from homeassistant.core import HomeAssistant from .conftest import ( diff --git a/tests/components/dlna_dms/test_dms_device_source.py b/tests/components/dlna_dms/test_dms_device_source.py index 23d9e6927ae..7907d40c415 100644 --- a/tests/components/dlna_dms/test_dms_device_source.py +++ b/tests/components/dlna_dms/test_dms_device_source.py @@ -13,9 +13,8 @@ import pytest from homeassistant.components import media_source, ssdp from homeassistant.components.dlna_dms.const import DLNA_SORT_CRITERIA, DOMAIN from homeassistant.components.dlna_dms.dms import DidlPlayMedia -from homeassistant.components.media_player.errors import BrowseError -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import BrowseMediaSource +from homeassistant.components.media_player import BrowseError +from homeassistant.components.media_source import BrowseMediaSource, Unresolvable from homeassistant.core import HomeAssistant from .conftest import ( diff --git a/tests/components/dlna_dms/test_media_source.py b/tests/components/dlna_dms/test_media_source.py index 641232e356a..ad290826075 100644 --- a/tests/components/dlna_dms/test_media_source.py +++ b/tests/components/dlna_dms/test_media_source.py @@ -13,11 +13,11 @@ from homeassistant.components.dlna_dms.media_source import ( DmsMediaSource, async_get_media_source, ) -from homeassistant.components.media_player.errors import BrowseError -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.media_source.models import ( +from homeassistant.components.media_player import BrowseError +from homeassistant.components.media_source import ( BrowseMediaSource, MediaSourceItem, + Unresolvable, ) from homeassistant.const import CONF_DEVICE_ID, CONF_URL from homeassistant.core import HomeAssistant diff --git a/tests/components/dnsip/test_config_flow.py b/tests/components/dnsip/test_config_flow.py index 99dc5781d16..9d92cb3554c 100644 --- a/tests/components/dnsip/test_config_flow.py +++ b/tests/components/dnsip/test_config_flow.py @@ -278,11 +278,15 @@ async def test_options_flow_empty_return(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - await hass.async_block_till_done() + with patch( + "homeassistant.components.dnsip.config_flow.aiodns.DNSResolver", + return_value=RetrieveDNS(), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={}, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { diff --git a/tests/components/doorbird/conftest.py b/tests/components/doorbird/conftest.py index 2e367e4e1d8..0da69a98303 100644 --- a/tests/components/doorbird/conftest.py +++ b/tests/components/doorbird/conftest.py @@ -32,13 +32,13 @@ class MockDoorbirdEntry: api: MagicMock -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def doorbird_info() -> dict[str, Any]: """Return a loaded DoorBird info fixture.""" return load_json_value_fixture("info.json", "doorbird")["BHA"]["VERSION"][0] -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def doorbird_schedule() -> list[DoorBirdScheduleEntry]: """Return a loaded DoorBird schedule fixture.""" return DoorBirdScheduleEntry.parse_all( @@ -46,7 +46,7 @@ def doorbird_schedule() -> list[DoorBirdScheduleEntry]: ) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def doorbird_schedule_wrong_param() -> list[DoorBirdScheduleEntry]: """Return a loaded DoorBird schedule fixture with an incorrect param.""" return DoorBirdScheduleEntry.parse_all( @@ -54,7 +54,7 @@ def doorbird_schedule_wrong_param() -> list[DoorBirdScheduleEntry]: ) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def doorbird_favorites() -> dict[str, dict[str, Any]]: """Return a loaded DoorBird favorites fixture.""" return load_json_value_fixture("favorites.json", "doorbird") diff --git a/tests/components/doorbird/test_button.py b/tests/components/doorbird/test_button.py index cb4bab656ee..abb490e9180 100644 --- a/tests/components/doorbird/test_button.py +++ b/tests/components/doorbird/test_button.py @@ -1,6 +1,6 @@ """Test DoorBird buttons.""" -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -16,7 +16,7 @@ async def test_relay_button( relay_1_entity_id = "button.mydoorbird_relay_1" assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True ) assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN assert doorbird_entry.api.energize_relay.call_count == 1 @@ -31,7 +31,7 @@ async def test_ir_button( ir_entity_id = "button.mydoorbird_ir" assert hass.states.get(ir_entity_id).state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True ) assert hass.states.get(ir_entity_id).state != STATE_UNKNOWN assert doorbird_entry.api.turn_light_on.call_count == 1 @@ -46,7 +46,7 @@ async def test_reset_favorites_button( reset_entity_id = "button.mydoorbird_reset_favorites" assert hass.states.get(reset_entity_id).state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True ) assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN assert doorbird_entry.api.delete_favorite.call_count == 3 diff --git a/tests/components/doorbird/test_camera.py b/tests/components/doorbird/test_camera.py index 228a6c81daa..a310bcb88cc 100644 --- a/tests/components/doorbird/test_camera.py +++ b/tests/components/doorbird/test_camera.py @@ -4,7 +4,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.camera import ( - STATE_IDLE, + CameraState, async_get_image, async_get_stream_source, ) @@ -23,11 +23,11 @@ async def test_doorbird_cameras( """Test the doorbird cameras.""" doorbird_entry = await doorbird_mocker() live_camera_entity_id = "camera.mydoorbird_live" - assert hass.states.get(live_camera_entity_id).state == STATE_IDLE + assert hass.states.get(live_camera_entity_id).state == CameraState.IDLE last_motion_camera_entity_id = "camera.mydoorbird_last_motion" - assert hass.states.get(last_motion_camera_entity_id).state == STATE_IDLE + assert hass.states.get(last_motion_camera_entity_id).state == CameraState.IDLE last_ring_camera_entity_id = "camera.mydoorbird_last_ring" - assert hass.states.get(last_ring_camera_entity_id).state == STATE_IDLE + assert hass.states.get(last_ring_camera_entity_id).state == CameraState.IDLE assert await async_get_stream_source(hass, live_camera_entity_id) is not None api = doorbird_entry.api api.get_image.side_effect = mock_not_found_exception() diff --git a/tests/components/doorbird/test_repairs.py b/tests/components/doorbird/test_repairs.py index 7449250b718..34e6de7516e 100644 --- a/tests/components/doorbird/test_repairs.py +++ b/tests/components/doorbird/test_repairs.py @@ -2,16 +2,7 @@ from __future__ import annotations -from http import HTTPStatus - from homeassistant.components.doorbird.const import DOMAIN -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -20,6 +11,11 @@ from homeassistant.setup import async_setup_component from . import mock_not_found_exception from .conftest import DoorbirdMockerType +from tests.components.repairs import ( + async_process_repairs_platforms, + process_repair_fix_flow, + start_repair_fix_flow, +) from tests.typing import ClientSessionGenerator @@ -43,19 +39,13 @@ async def test_change_schedule_fails( await async_process_repairs_platforms(hass) client = await hass_client() - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] placeholders = data["description_placeholders"] assert "404" in placeholders["error"] assert data["step_id"] == "confirm" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" diff --git a/tests/components/dormakaba_dkey/test_config_flow.py b/tests/components/dormakaba_dkey/test_config_flow.py index 499e5844949..8d8140d609a 100644 --- a/tests/components/dormakaba_dkey/test_config_flow.py +++ b/tests/components/dormakaba_dkey/test_config_flow.py @@ -310,11 +310,7 @@ async def test_reauth(hass: HomeAssistant) -> None: data={"address": DKEY_DISCOVERY_INFO.address}, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/downloader/test_config_flow.py b/tests/components/downloader/test_config_flow.py index 132b83dffdf..6bd740afab8 100644 --- a/tests/components/downloader/test_config_flow.py +++ b/tests/components/downloader/test_config_flow.py @@ -4,9 +4,8 @@ from unittest.mock import patch import pytest -from homeassistant import config_entries from homeassistant.components.downloader.const import CONF_DOWNLOAD_DIR, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -54,7 +53,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["data"] == {"download_dir": "download_dir"} -@pytest.mark.parametrize("source", [SOURCE_USER, SOURCE_IMPORT]) +@pytest.mark.parametrize("source", [SOURCE_USER]) async def test_single_instance_allowed( hass: HomeAssistant, source: str, @@ -69,40 +68,3 @@ async def test_single_instance_allowed( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - - -async def test_import_flow_success(hass: HomeAssistant) -> None: - """Test import flow.""" - with ( - patch( - "homeassistant.components.downloader.async_setup_entry", return_value=True - ), - patch( - "os.path.isdir", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=CONFIG, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Downloader" - assert result["data"] == CONFIG - - -async def test_import_flow_directory_not_found(hass: HomeAssistant) -> None: - """Test import flow.""" - with patch("os.path.isdir", return_value=False): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_DOWNLOAD_DIR: "download_dir", - }, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "directory_does_not_exist" diff --git a/tests/components/downloader/test_init.py b/tests/components/downloader/test_init.py index 5832c0402b4..70dfd227019 100644 --- a/tests/components/downloader/test_init.py +++ b/tests/components/downloader/test_init.py @@ -8,9 +8,7 @@ from homeassistant.components.downloader import ( SERVICE_DOWNLOAD_FILE, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -29,83 +27,3 @@ async def test_initialization(hass: HomeAssistant) -> None: assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) assert config_entry.state is ConfigEntryState.LOADED - - -async def test_import(hass: HomeAssistant, issue_registry: ir.IssueRegistry) -> None: - """Test the import of the downloader component.""" - with patch("os.path.isdir", return_value=True): - assert await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_DOWNLOAD_DIR: "/test_dir", - }, - }, - ) - await hass.async_block_till_done() - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == {CONF_DOWNLOAD_DIR: "/test_dir"} - assert config_entry.state is ConfigEntryState.LOADED - assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue( - issue_id="deprecated_yaml_downloader", domain=HOMEASSISTANT_DOMAIN - ) - assert issue - - -async def test_import_directory_missing( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test the import of the downloader component.""" - with patch("os.path.isdir", return_value=False): - assert await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_DOWNLOAD_DIR: "/test_dir", - }, - }, - ) - await hass.async_block_till_done() - - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue( - issue_id="deprecated_yaml_downloader", domain=DOMAIN - ) - assert issue - - -async def test_import_already_exists( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test the import of the downloader component.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_DOWNLOAD_DIR: "/test_dir", - }, - ) - config_entry.add_to_hass(hass) - with patch("os.path.isdir", return_value=True): - assert await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_DOWNLOAD_DIR: "/test_dir", - }, - }, - ) - await hass.async_block_till_done() - - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue( - issue_id="deprecated_yaml_downloader", domain=HOMEASSISTANT_DOMAIN - ) - assert issue diff --git a/tests/components/dremel_3d_printer/conftest.py b/tests/components/dremel_3d_printer/conftest.py index 6490b844dc0..cc70537db3d 100644 --- a/tests/components/dremel_3d_printer/conftest.py +++ b/tests/components/dremel_3d_printer/conftest.py @@ -34,7 +34,7 @@ def connection() -> None: """Mock Dremel 3D Printer connection.""" with requests_mock.Mocker() as mock: mock.post( - f"http://{HOST}:80/command", + f"http://{HOST}/command", response_list=[ {"text": load_fixture("dremel_3d_printer/command_1.json")}, {"text": load_fixture("dremel_3d_printer/command_2.json")}, diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index c2c6d48b007..4a2951f4ed8 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -1521,6 +1521,74 @@ async def test_gas_meter_providing_energy_reading( ) +async def test_heat_meter_mbus( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: + """Test if heat meter reading is correctly parsed.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + entry_data = { + "port": "/dev/ttyUSB0", + "dsmr_version": "5", + "serial_id": "1234", + "serial_id_gas": None, + } + entry_options = { + "time_between_update": 0, + } + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "004", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "GJ"}, + ], + ), + "MBUS_METER_READING", + ) + + mock_entry = MockConfigEntry( + domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options + ) + + hass.loop.set_debug(True) + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + # check if gas consumption is parsed correctly + heat_consumption = hass.states.get("sensor.heat_meter_energy") + assert heat_consumption.state == "745.695" + assert ( + heat_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + ) + assert ( + heat_consumption.attributes.get("unit_of_measurement") + == UnitOfEnergy.GIGA_JOULE + ) + assert ( + heat_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + + def test_all_obis_references_exists() -> None: """Verify that all attributes exist by name in database.""" for sensor in SENSORS: diff --git a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr index c6bc616ffd3..0a46dd7f476 100644 --- a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr +++ b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'dsmr_reader', 'entry_id': 'TEST_ENTRY_ID', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'dsmr_reader', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/dte_energy_bridge/__init__.py b/tests/components/dte_energy_bridge/__init__.py deleted file mode 100644 index 615944bda88..00000000000 --- a/tests/components/dte_energy_bridge/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the dte_energy_bridge component.""" diff --git a/tests/components/dte_energy_bridge/test_sensor.py b/tests/components/dte_energy_bridge/test_sensor.py deleted file mode 100644 index 244bec4e270..00000000000 --- a/tests/components/dte_energy_bridge/test_sensor.py +++ /dev/null @@ -1,58 +0,0 @@ -"""The tests for the DTE Energy Bridge.""" - -import requests_mock - -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -DTE_ENERGY_BRIDGE_CONFIG = {"platform": "dte_energy_bridge", "ip": "192.168.1.1"} - - -async def test_setup_with_config(hass: HomeAssistant) -> None: - """Test the platform setup with configuration.""" - assert await async_setup_component( - hass, "sensor", {"dte_energy_bridge": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - - -async def test_setup_correct_reading(hass: HomeAssistant) -> None: - """Test DTE Energy bridge returns a correct value.""" - with requests_mock.Mocker() as mock_req: - mock_req.get( - "http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]), - text=".411 kW", - ) - assert await async_setup_component( - hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - assert hass.states.get("sensor.current_energy_usage").state == "0.411" - - -async def test_setup_incorrect_units_reading(hass: HomeAssistant) -> None: - """Test DTE Energy bridge handles a value with incorrect units.""" - with requests_mock.Mocker() as mock_req: - mock_req.get( - "http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]), - text="411 kW", - ) - assert await async_setup_component( - hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - assert hass.states.get("sensor.current_energy_usage").state == "0.411" - - -async def test_setup_bad_format_reading(hass: HomeAssistant) -> None: - """Test DTE Energy bridge handles an invalid value.""" - with requests_mock.Mocker() as mock_req: - mock_req.get( - "http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]), - text="411", - ) - assert await async_setup_component( - hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG} - ) - await hass.async_block_till_done() - assert hass.states.get("sensor.current_energy_usage").state == "unknown" diff --git a/tests/components/duke_energy/__init__.py b/tests/components/duke_energy/__init__.py new file mode 100644 index 00000000000..2750d9d806e --- /dev/null +++ b/tests/components/duke_energy/__init__.py @@ -0,0 +1 @@ +"""Tests for the Duke Energy integration.""" diff --git a/tests/components/duke_energy/conftest.py b/tests/components/duke_energy/conftest.py new file mode 100644 index 00000000000..ed4182f450f --- /dev/null +++ b/tests/components/duke_energy/conftest.py @@ -0,0 +1,90 @@ +"""Common fixtures for the Duke Energy tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.duke_energy.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry +from tests.typing import RecorderInstanceGenerator + + +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.duke_energy.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> Generator[AsyncMock]: + """Return the default mocked config entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + config_entry.add_to_hass(hass) + return config_entry + + +@pytest.fixture +def mock_api() -> Generator[AsyncMock]: + """Mock a successful Duke Energy API.""" + with ( + patch( + "homeassistant.components.duke_energy.config_flow.DukeEnergy", + autospec=True, + ) as mock_api, + patch( + "homeassistant.components.duke_energy.coordinator.DukeEnergy", + new=mock_api, + ), + ): + api = mock_api.return_value + api.authenticate.return_value = { + "email": "TEST@EXAMPLE.COM", + "cdp_internal_user_id": "test-username", + } + api.get_meters.return_value = {} + yield api + + +@pytest.fixture +def mock_api_with_meters(mock_api: AsyncMock) -> AsyncMock: + """Mock a successful Duke Energy API with meters.""" + mock_api.get_meters.return_value = { + "123": { + "serialNum": "123", + "serviceType": "ELECTRIC", + "agreementActiveDate": "2000-01-01", + }, + } + mock_api.get_energy_usage.return_value = { + "data": { + dt_util.now(): { + "energy": 1.3, + "temperature": 70, + } + }, + "missing": [], + } + return mock_api diff --git a/tests/components/duke_energy/test_config_flow.py b/tests/components/duke_energy/test_config_flow.py new file mode 100644 index 00000000000..652267c9aac --- /dev/null +++ b/tests/components/duke_energy/test_config_flow.py @@ -0,0 +1,118 @@ +"""Test the Duke Energy config flow.""" + +from unittest.mock import AsyncMock, Mock + +from aiohttp import ClientError, ClientResponseError +import pytest + +from homeassistant import config_entries +from homeassistant.components.duke_energy.const import DOMAIN +from homeassistant.components.recorder import Recorder +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + + +async def test_user( + hass: HomeAssistant, + recorder_mock: Recorder, + mock_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user config.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + + # test with all provided + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@example.com" + + data = result.get("data") + assert data + assert data[CONF_USERNAME] == "test-username" + assert data[CONF_PASSWORD] == "test-password" + assert data[CONF_EMAIL] == "test@example.com" + + +async def test_abort_if_already_setup( + hass: HomeAssistant, + recorder_mock: Recorder, + mock_api: AsyncMock, + mock_config_entry: AsyncMock, +) -> None: + """Test we abort if the email is already setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +async def test_abort_if_already_setup_alternate_username( + hass: HomeAssistant, + recorder_mock: Recorder, + mock_api: AsyncMock, + mock_config_entry: AsyncMock, +) -> None: + """Test we abort if the email is already setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_USERNAME: "test@example.com", + CONF_PASSWORD: "test-password", + }, + ) + assert result + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (ClientResponseError(None, None, status=404), "invalid_auth"), + (ClientResponseError(None, None, status=500), "cannot_connect"), + (TimeoutError(), "cannot_connect"), + (ClientError(), "cannot_connect"), + (Exception(), "unknown"), + ], +) +async def test_api_errors( + hass: HomeAssistant, + recorder_mock: Recorder, + mock_api: Mock, + side_effect, + expected_error, +) -> None: + """Test the failure scenarios.""" + mock_api.authenticate.side_effect = side_effect + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": expected_error} + + mock_api.authenticate.side_effect = None + + # test with all provided + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/duke_energy/test_coordinator.py b/tests/components/duke_energy/test_coordinator.py new file mode 100644 index 00000000000..77ac9e8c2bf --- /dev/null +++ b/tests/components/duke_energy/test_coordinator.py @@ -0,0 +1,44 @@ +"""Tests for the SolarEdge coordinator services.""" + +from datetime import timedelta +from unittest.mock import Mock, patch + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.recorder import Recorder +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_api_with_meters: Mock, + freezer: FrozenDateTimeFactory, + recorder_mock: Recorder, +) -> None: + """Test Coordinator.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert mock_api_with_meters.get_meters.call_count == 1 + # 3 years of data + assert mock_api_with_meters.get_energy_usage.call_count == 37 + + with patch( + "homeassistant.components.duke_energy.coordinator.get_last_statistics", + return_value={ + "duke_energy:electric_123_energy_consumption": [ + {"start": dt_util.now().timestamp()} + ] + }, + ): + freezer.tick(timedelta(hours=12)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert mock_api_with_meters.get_meters.call_count == 2 + # Now have stats, so only one call + assert mock_api_with_meters.get_energy_usage.call_count == 38 diff --git a/tests/components/dynalite/common.py b/tests/components/dynalite/common.py index 640b6b3e24f..2d48d7e7b4f 100644 --- a/tests/components/dynalite/common.py +++ b/tests/components/dynalite/common.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, Mock, call, patch from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice from homeassistant.components import dynalite -from homeassistant.const import ATTR_SERVICE +from homeassistant.const import ATTR_SERVICE, CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -34,7 +34,7 @@ async def get_entry_id_from_hass(hass: HomeAssistant) -> str: async def create_entity_from_device(hass: HomeAssistant, device: DynaliteBaseDevice): """Set up the component and platform and create a light based on the device provided.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" diff --git a/tests/components/dynalite/test_bridge.py b/tests/components/dynalite/test_bridge.py index b0517b89031..ed9296ae685 100644 --- a/tests/components/dynalite/test_bridge.py +++ b/tests/components/dynalite/test_bridge.py @@ -17,6 +17,7 @@ from homeassistant.components.dynalite.const import ( ATTR_PACKET, ATTR_PRESET, ) +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -26,7 +27,7 @@ from tests.common import MockConfigEntry async def test_update_device(hass: HomeAssistant) -> None: """Test that update works.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" @@ -56,7 +57,7 @@ async def test_update_device(hass: HomeAssistant) -> None: async def test_add_devices_then_register(hass: HomeAssistant) -> None: """Test that add_devices work.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" @@ -91,7 +92,7 @@ async def test_add_devices_then_register(hass: HomeAssistant) -> None: async def test_register_then_add_devices(hass: HomeAssistant) -> None: """Test that add_devices work after register_add_entities.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" @@ -120,7 +121,7 @@ async def test_register_then_add_devices(hass: HomeAssistant) -> None: async def test_notifications(hass: HomeAssistant) -> None: """Test that update works.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" diff --git a/tests/components/dynalite/test_config_flow.py b/tests/components/dynalite/test_config_flow.py index 8bb47fd67e3..20ee42d33b5 100644 --- a/tests/components/dynalite/test_config_flow.py +++ b/tests/components/dynalite/test_config_flow.py @@ -7,11 +7,9 @@ import pytest from homeassistant import config_entries from homeassistant.components import dynalite from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_PORT -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -31,11 +29,8 @@ async def test_flow( exp_type, exp_result, exp_reason, - issue_registry: ir.IssueRegistry, ) -> None: """Run a flow with or without errors and return result.""" - issue = issue_registry.async_get_issue(dynalite.DOMAIN, "deprecated_yaml") - assert issue is None host = "1.2.3.4" with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", @@ -43,8 +38,8 @@ async def test_flow( ): result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host}, ) await hass.async_block_till_done() assert result["type"] == exp_type @@ -52,51 +47,33 @@ async def test_flow( assert result["result"].state == exp_result if exp_reason: assert result["reason"] == exp_reason - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{dynalite.DOMAIN}" - ) - assert issue is not None - assert issue.issue_domain == dynalite.DOMAIN - assert issue.severity == ir.IssueSeverity.WARNING - - -async def test_deprecated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Check that deprecation warning appears in caplog.""" - await async_setup_component( - hass, dynalite.DOMAIN, {dynalite.DOMAIN: {dynalite.CONF_HOST: "aaa"}} - ) - assert "The 'dynalite' option is deprecated" in caplog.text async def test_existing(hass: HomeAssistant) -> None: """Test when the entry exists with the same config.""" host = "1.2.3.4" - MockConfigEntry( - domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host} - ).add_to_hass(hass) + MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}).add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", return_value=True, ): result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" -async def test_existing_update(hass: HomeAssistant) -> None: +async def test_existing_abort_update(hass: HomeAssistant) -> None: """Test when the entry exists with a different config.""" host = "1.2.3.4" port1 = 7777 port2 = 8888 entry = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host, CONF_PORT: port1}, + data={CONF_HOST: host, CONF_PORT: port1}, ) entry.add_to_hass(hass) with patch( @@ -109,12 +86,12 @@ async def test_existing_update(hass: HomeAssistant) -> None: assert mock_dyn_dev().configure.mock_calls[0][1][0]["port"] == port1 result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host, CONF_PORT: port2}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host, CONF_PORT: port2}, ) await hass.async_block_till_done() - assert mock_dyn_dev().configure.call_count == 2 - assert mock_dyn_dev().configure.mock_calls[1][1][0]["port"] == port2 + assert mock_dyn_dev().configure.call_count == 1 + assert mock_dyn_dev().configure.mock_calls[0][1][0]["port"] == port1 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -123,17 +100,15 @@ async def test_two_entries(hass: HomeAssistant) -> None: """Test when two different entries exist with different hosts.""" host1 = "1.2.3.4" host2 = "5.6.7.8" - MockConfigEntry( - domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host1} - ).add_to_hass(hass) + MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host1}).add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", return_value=True, ): result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host2}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host2}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].state is ConfigEntryState.LOADED @@ -172,9 +147,7 @@ async def test_setup_user(hass: HomeAssistant) -> None: async def test_setup_user_existing_host(hass: HomeAssistant) -> None: """Test that when we setup a host that is defined, we get an error.""" host = "3.4.5.6" - MockConfigEntry( - domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host} - ).add_to_hass(hass) + MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}).add_to_hass(hass) result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, context={"source": config_entries.SOURCE_USER} ) diff --git a/tests/components/dynalite/test_cover.py b/tests/components/dynalite/test_cover.py index 930318978fc..ac8dd7b676d 100644 --- a/tests/components/dynalite/test_cover.py +++ b/tests/components/dynalite/test_cover.py @@ -13,15 +13,9 @@ from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, CoverDeviceClass, + CoverState, ) -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, -) +from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError @@ -130,16 +124,16 @@ async def test_cover_positions(hass: HomeAssistant, mock_device: Mock) -> None: """Test that the state updates in the various positions.""" update_func = await create_entity_from_device(hass, mock_device) await check_cover_position( - hass, update_func, mock_device, True, False, False, STATE_CLOSING + hass, update_func, mock_device, True, False, False, CoverState.CLOSING ) await check_cover_position( - hass, update_func, mock_device, False, True, False, STATE_OPENING + hass, update_func, mock_device, False, True, False, CoverState.OPENING ) await check_cover_position( - hass, update_func, mock_device, False, False, True, STATE_CLOSED + hass, update_func, mock_device, False, False, True, CoverState.CLOSED ) await check_cover_position( - hass, update_func, mock_device, False, False, False, STATE_OPEN + hass, update_func, mock_device, False, False, False, CoverState.OPEN ) @@ -147,12 +141,12 @@ async def test_cover_restore_state(hass: HomeAssistant, mock_device: Mock) -> No """Test restore from cache.""" mock_restore_cache( hass, - [State("cover.name", STATE_OPEN, attributes={ATTR_CURRENT_POSITION: 77})], + [State("cover.name", CoverState.OPEN, attributes={ATTR_CURRENT_POSITION: 77})], ) await create_entity_from_device(hass, mock_device) mock_device.init_level.assert_called_once_with(77) entity_state = hass.states.get("cover.name") - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN async def test_cover_restore_state_bad_cache( @@ -161,9 +155,9 @@ async def test_cover_restore_state_bad_cache( """Test restore from a cache without the attribute.""" mock_restore_cache( hass, - [State("cover.name", STATE_OPEN, attributes={"bla bla": 77})], + [State("cover.name", CoverState.OPEN, attributes={"bla bla": 77})], ) await create_entity_from_device(hass, mock_device) mock_device.init_level.assert_not_called() entity_state = hass.states.get("cover.name") - assert entity_state.state == STATE_CLOSED + assert entity_state.state == CoverState.CLOSED diff --git a/tests/components/dynalite/test_init.py b/tests/components/dynalite/test_init.py index 2c15c41e40b..4bf4eb53ad6 100644 --- a/tests/components/dynalite/test_init.py +++ b/tests/components/dynalite/test_init.py @@ -6,7 +6,7 @@ import pytest from voluptuous import MultipleInvalid import homeassistant.components.dynalite.const as dynalite -from homeassistant.const import CONF_DEFAULT, CONF_HOST, CONF_NAME, CONF_PORT, CONF_ROOM +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -20,71 +20,18 @@ async def test_empty_config(hass: HomeAssistant) -> None: assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 0 -async def test_async_setup(hass: HomeAssistant) -> None: - """Test a successful setup with all of the different options.""" - with patch( - "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", - return_value=True, - ): - assert await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: "1.2.3.4", - CONF_PORT: 1234, - dynalite.CONF_AUTO_DISCOVER: True, - dynalite.CONF_POLL_TIMER: 5.5, - dynalite.CONF_AREA: { - "1": { - CONF_NAME: "Name1", - dynalite.CONF_CHANNEL: {"4": {}}, - dynalite.CONF_PRESET: {"7": {}}, - dynalite.CONF_NO_DEFAULT: True, - }, - "2": {CONF_NAME: "Name2"}, - "3": { - CONF_NAME: "Name3", - dynalite.CONF_TEMPLATE: CONF_ROOM, - }, - "4": { - CONF_NAME: "Name4", - dynalite.CONF_TEMPLATE: dynalite.CONF_TIME_COVER, - }, - }, - CONF_DEFAULT: {dynalite.CONF_FADE: 2.3}, - dynalite.CONF_ACTIVE: dynalite.ACTIVE_INIT, - dynalite.CONF_PRESET: { - "5": {CONF_NAME: "pres5", dynalite.CONF_FADE: 4.5} - }, - dynalite.CONF_TEMPLATE: { - CONF_ROOM: { - dynalite.CONF_ROOM_ON: 6, - dynalite.CONF_ROOM_OFF: 7, - }, - dynalite.CONF_TIME_COVER: { - dynalite.CONF_OPEN_PRESET: 8, - dynalite.CONF_CLOSE_PRESET: 9, - dynalite.CONF_STOP_PRESET: 10, - dynalite.CONF_CHANNEL_COVER: 3, - dynalite.CONF_DURATION: 2.2, - dynalite.CONF_TILT_TIME: 3.3, - dynalite.CONF_DEVICE_CLASS: "awning", - }, - }, - } - ] - } - }, - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 1 - - async def test_service_request_area_preset(hass: HomeAssistant) -> None: """Test requesting and area preset via service call.""" + entry = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "1.2.3.4"}, + ) + entry2 = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "5.6.7.8"}, + ) + entry.add_to_hass(hass) + entry2.add_to_hass(hass) with ( patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", @@ -95,20 +42,8 @@ async def test_service_request_area_preset(hass: HomeAssistant) -> None: return_value=True, ) as mock_req_area_pres, ): - assert await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - {CONF_HOST: "1.2.3.4"}, - {CONF_HOST: "5.6.7.8"}, - ] - } - }, - ) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 2 await hass.services.async_call( dynalite.DOMAIN, "request_area_preset", @@ -160,6 +95,16 @@ async def test_service_request_area_preset(hass: HomeAssistant) -> None: async def test_service_request_channel_level(hass: HomeAssistant) -> None: """Test requesting the level of a channel via service call.""" + entry = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "1.2.3.4"}, + ) + entry2 = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "5.6.7.8"}, + ) + entry.add_to_hass(hass) + entry2.add_to_hass(hass) with ( patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", @@ -170,21 +115,7 @@ async def test_service_request_channel_level(hass: HomeAssistant) -> None: return_value=True, ) as mock_req_chan_lvl, ): - assert await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: "1.2.3.4", - dynalite.CONF_AREA: {"7": {CONF_NAME: "test"}}, - }, - {CONF_HOST: "5.6.7.8"}, - ] - } - }, - ) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 2 await hass.services.async_call( @@ -212,60 +143,6 @@ async def test_service_request_channel_level(hass: HomeAssistant) -> None: assert mock_req_chan_lvl.mock_calls == [call(4, 5), call(4, 5)] -async def test_async_setup_bad_config1(hass: HomeAssistant) -> None: - """Test a successful with bad config on templates.""" - with patch( - "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", - return_value=True, - ): - assert not await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: "1.2.3.4", - dynalite.CONF_AREA: { - "1": { - dynalite.CONF_TEMPLATE: dynalite.CONF_TIME_COVER, - CONF_NAME: "Name", - dynalite.CONF_ROOM_ON: 7, - } - }, - } - ] - } - }, - ) - await hass.async_block_till_done() - - -async def test_async_setup_bad_config2(hass: HomeAssistant) -> None: - """Test a successful with bad config on numbers.""" - host = "1.2.3.4" - with patch( - "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", - return_value=True, - ): - assert not await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: host, - dynalite.CONF_AREA: {"WRONG": {CONF_NAME: "Name"}}, - } - ] - } - }, - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 0 - - async def test_unload_entry(hass: HomeAssistant) -> None: """Test being able to unload an entry.""" host = "1.2.3.4" diff --git a/tests/components/dynalite/test_panel.py b/tests/components/dynalite/test_panel.py index 97752142f0c..a13b27e7567 100644 --- a/tests/components/dynalite/test_panel.py +++ b/tests/components/dynalite/test_panel.py @@ -4,7 +4,7 @@ from unittest.mock import patch from homeassistant.components import dynalite from homeassistant.components.cover import DEVICE_CLASSES -from homeassistant.const import CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -20,7 +20,7 @@ async def test_get_config( entry = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host, CONF_PORT: port}, + data={CONF_HOST: host, CONF_PORT: port}, ) entry.add_to_hass(hass) with patch( @@ -44,7 +44,7 @@ async def test_get_config( result = msg["result"] entry_id = entry.entry_id assert result == { - "config": {entry_id: {dynalite.CONF_HOST: host, CONF_PORT: port}}, + "config": {entry_id: {CONF_HOST: host, CONF_PORT: port}}, "default": { "DEFAULT_NAME": dynalite.const.DEFAULT_NAME, "DEFAULT_PORT": dynalite.const.DEFAULT_PORT, @@ -66,7 +66,7 @@ async def test_save_config( entry1 = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host1, CONF_PORT: port1}, + data={CONF_HOST: host1, CONF_PORT: port1}, ) entry1.add_to_hass(hass) with patch( @@ -77,7 +77,7 @@ async def test_save_config( await hass.async_block_till_done() entry2 = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host2, CONF_PORT: port2}, + data={CONF_HOST: host2, CONF_PORT: port2}, ) entry2.add_to_hass(hass) with patch( @@ -94,7 +94,7 @@ async def test_save_config( "id": 24, "type": "dynalite/save-config", "entry_id": entry2.entry_id, - "config": {dynalite.CONF_HOST: host3, CONF_PORT: port3}, + "config": {CONF_HOST: host3, CONF_PORT: port3}, } ) @@ -103,9 +103,9 @@ async def test_save_config( assert msg["result"] == {} existing_entry = hass.config_entries.async_get_entry(entry1.entry_id) - assert existing_entry.data == {dynalite.CONF_HOST: host1, CONF_PORT: port1} + assert existing_entry.data == {CONF_HOST: host1, CONF_PORT: port1} modified_entry = hass.config_entries.async_get_entry(entry2.entry_id) - assert modified_entry.data[dynalite.CONF_HOST] == host3 + assert modified_entry.data[CONF_HOST] == host3 assert modified_entry.data[CONF_PORT] == port3 @@ -120,7 +120,7 @@ async def test_save_config_invalid_entry( entry = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host1, CONF_PORT: port1}, + data={CONF_HOST: host1, CONF_PORT: port1}, ) entry.add_to_hass(hass) with patch( @@ -136,7 +136,7 @@ async def test_save_config_invalid_entry( "id": 24, "type": "dynalite/save-config", "entry_id": "junk", - "config": {dynalite.CONF_HOST: host2, CONF_PORT: port2}, + "config": {CONF_HOST: host2, CONF_PORT: port2}, } ) @@ -145,4 +145,4 @@ async def test_save_config_invalid_entry( assert msg["result"] == {"error": True} existing_entry = hass.config_entries.async_get_entry(entry.entry_id) - assert existing_entry.data == {dynalite.CONF_HOST: host1, CONF_PORT: port1} + assert existing_entry.data == {CONF_HOST: host1, CONF_PORT: port1} diff --git a/tests/components/easyenergy/test_init.py b/tests/components/easyenergy/test_init.py index 74293049fd1..c3c917bc9ed 100644 --- a/tests/components/easyenergy/test_init.py +++ b/tests/components/easyenergy/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import MagicMock, patch from easyenergy import EasyEnergyConnectionError -from homeassistant.components.easyenergy.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -24,7 +23,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/ecobee/common.py b/tests/components/ecobee/common.py index 423b0eee320..69d576ce2b5 100644 --- a/tests/components/ecobee/common.py +++ b/tests/components/ecobee/common.py @@ -5,14 +5,13 @@ from unittest.mock import patch from homeassistant.components.ecobee.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry async def setup_platform( hass: HomeAssistant, - platform: str, + platforms: str | list[str], ) -> MockConfigEntry: """Set up the ecobee platform.""" mock_entry = MockConfigEntry( @@ -25,8 +24,9 @@ async def setup_platform( ) mock_entry.add_to_hass(hass) - with patch("homeassistant.components.ecobee.const.PLATFORMS", [platform]): - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() + platforms = [platforms] if isinstance(platforms, str) else platforms + with patch("homeassistant.components.ecobee.PLATFORMS", platforms): + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() return mock_entry diff --git a/tests/components/ecobee/fixtures/ecobee-data.json b/tests/components/ecobee/fixtures/ecobee-data.json index b2f336e064d..e0e82d68863 100644 --- a/tests/components/ecobee/fixtures/ecobee-data.json +++ b/tests/components/ecobee/fixtures/ecobee-data.json @@ -1,7 +1,7 @@ { "thermostatList": [ { - "identifier": 8675309, + "identifier": "8675309", "name": "ecobee", "modelNumber": "athenaSmart", "utcTime": "2022-01-01 10:00:00", @@ -11,13 +11,32 @@ }, "program": { "climates": [ + { + "name": "Home", + "climateRef": "home", + "sensors": [ + { + "name": "ecobee" + } + ] + }, { "name": "Climate1", - "climateRef": "c1" + "climateRef": "c1", + "sensors": [ + { + "name": "ecobee" + } + ] }, { "name": "Climate2", - "climateRef": "c2" + "climateRef": "c2", + "sensors": [ + { + "name": "ecobee" + } + ] } ], "currentClimateRef": "c1" @@ -62,6 +81,24 @@ } ], "remoteSensors": [ + { + "id": "ei:0", + "name": "ecobee", + "type": "thermostat", + "inUse": true, + "capability": [ + { + "id": "1", + "type": "temperature", + "value": "782" + }, + { + "id": "2", + "type": "humidity", + "value": "54" + } + ] + }, { "id": "rs:100", "name": "Remote Sensor 1", @@ -123,6 +160,7 @@ "hasHumidifier": true, "humidifierMode": "manual", "hasHeatPump": true, + "compressorProtectionMinTemp": 100, "humidity": "30" }, "equipmentStatus": "fan", @@ -157,6 +195,25 @@ "value": "false" } ] + }, + { + "id": "rs:101", + "name": "Remote Sensor 2", + "type": "ecobee3_remote_sensor", + "code": "VTRK", + "inUse": false, + "capability": [ + { + "id": "1", + "type": "temperature", + "value": "782" + }, + { + "id": "2", + "type": "occupancy", + "value": "false" + } + ] } ] }, diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index 1c9dcec0ad2..403ac4a01ad 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -1,26 +1,29 @@ """The test for the Ecobee thermostat module.""" -import copy from http import HTTPStatus from unittest import mock -from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant import const -from homeassistant.components import climate from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.ecobee.climate import ( - ECOBEE_AUX_HEAT_ONLY, + ATTR_PRESET_MODE, + ATTR_SENSOR_LIST, PRESET_AWAY_INDEFINITELY, Thermostat, ) +from homeassistant.components.ecobee.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, STATE_OFF from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr -from . import GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP from .common import setup_platform +from tests.common import MockConfigEntry, async_fire_time_changed + ENTITY_ID = "climate.ecobee" @@ -33,9 +36,18 @@ def ecobee_fixture(): "identifier": "abc", "program": { "climates": [ - {"name": "Climate1", "climateRef": "c1"}, - {"name": "Climate2", "climateRef": "c2"}, - {"name": "Away", "climateRef": "away"}, + { + "name": "Climate1", + "climateRef": "c1", + "sensors": [{"name": "Ecobee"}], + }, + { + "name": "Climate2", + "climateRef": "c2", + "sensors": [{"name": "Ecobee"}], + }, + {"name": "Away", "climateRef": "away", "sensors": [{"name": "Ecobee"}]}, + {"name": "Home", "climateRef": "home", "sensors": [{"name": "Ecobee"}]}, ], "currentClimateRef": "c1", }, @@ -68,8 +80,19 @@ def ecobee_fixture(): "endTime": "10:00:00", } ], + "remoteSensors": [ + { + "id": "ei:0", + "name": "Ecobee", + }, + { + "id": "rs2:100", + "name": "Remote Sensor 1", + }, + ], } mock_ecobee = mock.Mock() + mock_ecobee.get = mock.Mock(side_effect=vals.get) mock_ecobee.__getitem__ = mock.Mock(side_effect=vals.__getitem__) mock_ecobee.__setitem__ = mock.Mock(side_effect=vals.__setitem__) return mock_ecobee @@ -84,10 +107,10 @@ def data_fixture(ecobee_fixture): @pytest.fixture(name="thermostat") -def thermostat_fixture(data): +def thermostat_fixture(data, hass: HomeAssistant): """Set up ecobee thermostat object.""" thermostat = data.ecobee.get_thermostat(1) - return Thermostat(data, 1, thermostat) + return Thermostat(data, 1, thermostat, hass) async def test_name(thermostat) -> None: @@ -111,25 +134,6 @@ async def test_aux_heat_not_supported_by_default(hass: HomeAssistant) -> None: ) -async def test_aux_heat_supported_with_heat_pump(hass: HomeAssistant) -> None: - """Aux Heat should be supported if thermostat has heatpump.""" - mock_get_thermostat = mock.Mock() - mock_get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): - await setup_platform(hass, const.Platform.CLIMATE) - state = hass.states.get(ENTITY_ID) - assert ( - state.attributes.get(ATTR_SUPPORTED_FEATURES) - == ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.AUX_HEAT - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - async def test_current_temperature(ecobee_fixture, thermostat) -> None: """Test current temperature.""" assert thermostat.current_temperature == 30 @@ -213,6 +217,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "heatPump2", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "auxHeat2" @@ -221,6 +227,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "auxHeat2", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "compCool1" @@ -229,6 +237,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "compCool1", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "" assert thermostat.extra_state_attributes == { @@ -236,6 +246,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "Unknown" @@ -244,6 +256,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "Unknown", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["program"]["currentClimateRef"] = "c2" @@ -252,32 +266,11 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate2", "fan_min_on_time": 10, "equipment_running": "Unknown", + "available_sensors": [], + "active_sensors": [], } -async def test_is_aux_heat_on(hass: HomeAssistant) -> None: - """Test aux heat property is only enabled for auxHeatOnly.""" - mock_get_thermostat = mock.Mock() - mock_get_thermostat.return_value = copy.deepcopy( - GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - ) - mock_get_thermostat.return_value["settings"]["hvacMode"] = "auxHeatOnly" - with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): - await setup_platform(hass, const.Platform.CLIMATE) - state = hass.states.get(ENTITY_ID) - assert state.attributes[climate.ATTR_AUX_HEAT] == "on" - - -async def test_is_aux_heat_off(hass: HomeAssistant) -> None: - """Test aux heat property is only enabled for auxHeatOnly.""" - mock_get_thermostat = mock.Mock() - mock_get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): - await setup_platform(hass, const.Platform.CLIMATE) - state = hass.states.get(ENTITY_ID) - assert state.attributes[climate.ATTR_AUX_HEAT] == "off" - - async def test_set_temperature(ecobee_fixture, thermostat, data) -> None: """Test set temperature.""" # Auto -> Auto @@ -400,36 +393,6 @@ async def test_set_fan_mode_auto(thermostat, data) -> None: ) -async def test_turn_aux_heat_on(hass: HomeAssistant, mock_ecobee: MagicMock) -> None: - """Test when aux heat is set on. This must change the HVAC mode.""" - mock_ecobee.get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - mock_ecobee.thermostats = [GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP] - await setup_platform(hass, const.Platform.CLIMATE) - await hass.services.async_call( - climate.DOMAIN, - climate.SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: ENTITY_ID, climate.ATTR_AUX_HEAT: True}, - blocking=True, - ) - assert mock_ecobee.set_hvac_mode.call_count == 1 - assert mock_ecobee.set_hvac_mode.call_args == mock.call(0, ECOBEE_AUX_HEAT_ONLY) - - -async def test_turn_aux_heat_off(hass: HomeAssistant, mock_ecobee: MagicMock) -> None: - """Test when aux heat is tuned off. Must change HVAC mode back to last used.""" - mock_ecobee.get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - mock_ecobee.thermostats = [GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP] - await setup_platform(hass, const.Platform.CLIMATE) - await hass.services.async_call( - climate.DOMAIN, - climate.SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: ENTITY_ID, climate.ATTR_AUX_HEAT: False}, - blocking=True, - ) - assert mock_ecobee.set_hvac_mode.call_count == 1 - assert mock_ecobee.set_hvac_mode.call_args == mock.call(0, "auto") - - async def test_preset_indefinite_away(ecobee_fixture, thermostat) -> None: """Test indefinite away showing correctly, and not as temporary away.""" ecobee_fixture["program"]["currentClimateRef"] = "away" @@ -455,3 +418,203 @@ async def test_set_preset_mode(ecobee_fixture, thermostat, data) -> None: data.ecobee.set_climate_hold.assert_has_calls( [mock.call(1, "away", "indefinite", thermostat.hold_hours())] ) + + +async def test_remote_sensors(hass: HomeAssistant) -> None: + """Test remote sensors.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + platform = hass.data[const.Platform.CLIMATE].entities + for entity in platform: + if entity.entity_id == "climate.ecobee": + thermostat = entity + break + + assert thermostat is not None + remote_sensors = thermostat.remote_sensors + + assert sorted(remote_sensors) == sorted(["ecobee", "Remote Sensor 1"]) + + +async def test_remote_sensor_devices( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test remote sensor devices.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + freezer.tick(100) + async_fire_time_changed(hass) + state = hass.states.get(ENTITY_ID) + device_registry = dr.async_get(hass) + for device in device_registry.devices.values(): + if device.name == "Remote Sensor 1": + remote_sensor_1_id = device.id + if device.name == "ecobee": + ecobee_id = device.id + assert sorted(state.attributes.get("available_sensors")) == sorted( + [f"Remote Sensor 1 ({remote_sensor_1_id})", f"ecobee ({ecobee_id})"] + ) + + +async def test_active_sensors_in_preset_mode(hass: HomeAssistant) -> None: + """Test active sensors in preset mode property.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + platform = hass.data[const.Platform.CLIMATE].entities + for entity in platform: + if entity.entity_id == "climate.ecobee": + thermostat = entity + break + + assert thermostat is not None + remote_sensors = thermostat.active_sensors_in_preset_mode + + assert sorted(remote_sensors) == sorted(["ecobee"]) + + +async def test_active_sensor_devices_in_preset_mode(hass: HomeAssistant) -> None: + """Test active sensor devices in preset mode.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + state = hass.states.get(ENTITY_ID) + + assert state.attributes.get("active_sensors") == ["ecobee"] + + +async def test_remote_sensor_ids_names(hass: HomeAssistant) -> None: + """Test getting ids and names_by_user for thermostat.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + platform = hass.data[const.Platform.CLIMATE].entities + for entity in platform: + if entity.entity_id == "climate.ecobee": + thermostat = entity + break + + assert thermostat is not None + + remote_sensor_ids_names = thermostat.remote_sensor_ids_names + for id_name in remote_sensor_ids_names: + assert id_name.get("id") is not None + + name_by_user_list = [item["name_by_user"] for item in remote_sensor_ids_names] + assert sorted(name_by_user_list) == sorted(["Remote Sensor 1", "ecobee"]) + + +async def test_set_sensors_used_in_climate(hass: HomeAssistant) -> None: + """Test set sensors used in climate.""" + # Get device_id of remote sensor from the device registry. + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + device_registry = dr.async_get(hass) + for device in device_registry.devices.values(): + if device.name == "Remote Sensor 1": + remote_sensor_1_id = device.id + if device.name == "ecobee": + ecobee_id = device.id + if device.name == "Remote Sensor 2": + remote_sensor_2_id = device.id + + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + device_from_other_integration = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, identifiers={("test", "unique")} + ) + + # Test that the function call works in its entirety. + with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [remote_sensor_1_id], + }, + blocking=True, + ) + await hass.async_block_till_done() + mock_sensors.assert_called_once_with(0, "Climate1", sensor_ids=["rs:100"]) + + # Update sensors without preset mode. + with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_SENSOR_LIST: [remote_sensor_1_id], + }, + blocking=True, + ) + await hass.async_block_till_done() + # `temp` is the preset running because of a hold. + mock_sensors.assert_called_once_with(0, "temp", sensor_ids=["rs:100"]) + + # Check that sensors are not updated when the sent sensors are the currently set sensors. + with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [ecobee_id], + }, + blocking=True, + ) + mock_sensors.assert_not_called() + + # Error raised because invalid climate name. + with pytest.raises(ServiceValidationError) as execinfo: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "InvalidClimate", + ATTR_SENSOR_LIST: [remote_sensor_1_id], + }, + blocking=True, + ) + assert execinfo.value.translation_domain == "ecobee" + assert execinfo.value.translation_key == "invalid_preset" + + ## Error raised because invalid sensor. + with pytest.raises(ServiceValidationError) as execinfo: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: ["abcd"], + }, + blocking=True, + ) + assert execinfo.value.translation_domain == "ecobee" + assert execinfo.value.translation_key == "invalid_sensor" + + ## Error raised because sensor not available on device. + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [remote_sensor_2_id], + }, + blocking=True, + ) + + with pytest.raises(ServiceValidationError) as execinfo: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [ + remote_sensor_1_id, + device_from_other_integration.id, + ], + }, + blocking=True, + ) + assert execinfo.value.translation_domain == "ecobee" + assert execinfo.value.translation_key == "sensor_lookup_failed" diff --git a/tests/components/ecobee/test_config_flow.py b/tests/components/ecobee/test_config_flow.py index 20d3dabb1ea..5c919ffab5c 100644 --- a/tests/components/ecobee/test_config_flow.py +++ b/tests/components/ecobee/test_config_flow.py @@ -11,6 +11,7 @@ from homeassistant.components.ecobee.const import ( DATA_ECOBEE_CONFIG, DOMAIN, ) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -20,12 +21,11 @@ from tests.common import MockConfigEntry async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test we abort if ecobee is already setup.""" - flow = config_flow.EcobeeFlowHandler() - flow.hass = hass - MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/ecobee/test_notify.py b/tests/components/ecobee/test_notify.py index c66f04c752a..ca5e40dbdb1 100644 --- a/tests/components/ecobee/test_notify.py +++ b/tests/components/ecobee/test_notify.py @@ -2,13 +2,11 @@ from unittest.mock import MagicMock -from homeassistant.components.ecobee import DOMAIN from homeassistant.components.notify import ( DOMAIN as NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from .common import setup_platform @@ -34,24 +32,3 @@ async def test_notify_entity_service( ) await hass.async_block_till_done() mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") - - -async def test_legacy_notify_service( - hass: HomeAssistant, - mock_ecobee: MagicMock, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the legacy notify service.""" - await setup_platform(hass, NOTIFY_DOMAIN) - - assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) - await hass.services.async_call( - NOTIFY_DOMAIN, - DOMAIN, - service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, - blocking=True, - ) - await hass.async_block_till_done() - mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") - mock_ecobee.send_message.reset_mock() - assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_number.py b/tests/components/ecobee/test_number.py index da5c8135a05..be65b6dbb30 100644 --- a/tests/components/ecobee/test_number.py +++ b/tests/components/ecobee/test_number.py @@ -2,40 +2,48 @@ from unittest.mock import patch -from homeassistant.components.number import ATTR_VALUE, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, UnitOfTime from homeassistant.core import HomeAssistant from .common import setup_platform -VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_min_time_home" -VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_min_time_away" +VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_minimum_time_home" +VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_minimum_time_away" THERMOSTAT_ID = 0 async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: """Test the ventilator number on home attributes are correct.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get(VENTILATOR_MIN_HOME_ID) assert state.state == "20" assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert state.attributes.get("friendly_name") == "ecobee Ventilator min time home" + assert ( + state.attributes.get("friendly_name") == "ecobee Ventilator minimum time home" + ) assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES async def test_ventilator_min_on_away_attributes(hass: HomeAssistant) -> None: """Test the ventilator number on away attributes are correct.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get(VENTILATOR_MIN_AWAY_ID) assert state.state == "10" assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert state.attributes.get("friendly_name") == "ecobee Ventilator min time away" + assert ( + state.attributes.get("friendly_name") == "ecobee Ventilator minimum time away" + ) assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -45,10 +53,10 @@ async def test_set_min_time_home(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_min_on_time_home" ) as mock_set_min_home_time: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: VENTILATOR_MIN_HOME_ID, ATTR_VALUE: target_value}, blocking=True, @@ -63,13 +71,52 @@ async def test_set_min_time_away(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_min_on_time_away" ) as mock_set_min_away_time: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: VENTILATOR_MIN_AWAY_ID, ATTR_VALUE: target_value}, blocking=True, ) await hass.async_block_till_done() mock_set_min_away_time.assert_called_once_with(THERMOSTAT_ID, target_value) + + +COMPRESSOR_MIN_TEMP_ID = "number.ecobee2_compressor_minimum_temperature" + + +async def test_compressor_protection_min_temp_attributes(hass: HomeAssistant) -> None: + """Test the compressor min temp value is correct. + + Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary. + """ + await setup_platform(hass, NUMBER_DOMAIN) + + state = hass.states.get(COMPRESSOR_MIN_TEMP_ID) + assert state.state == "-12.2" + assert ( + state.attributes.get("friendly_name") + == "ecobee2 Compressor minimum temperature" + ) + + +async def test_set_compressor_protection_min_temp(hass: HomeAssistant) -> None: + """Test the number can set minimum compressor operating temp. + + Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary + """ + target_value = 0 + with patch( + "homeassistant.components.ecobee.Ecobee.set_aux_cutover_threshold" + ) as mock_set_compressor_min_temp: + await setup_platform(hass, NUMBER_DOMAIN) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: COMPRESSOR_MIN_TEMP_ID, ATTR_VALUE: target_value}, + blocking=True, + ) + await hass.async_block_till_done() + mock_set_compressor_min_temp.assert_called_once_with(1, 32) diff --git a/tests/components/ecobee/test_repairs.py b/tests/components/ecobee/test_repairs.py deleted file mode 100644 index 1473f8eb3a1..00000000000 --- a/tests/components/ecobee/test_repairs.py +++ /dev/null @@ -1,114 +0,0 @@ -"""Test repairs for Ecobee integration.""" - -from http import HTTPStatus -from unittest.mock import MagicMock - -from homeassistant.components.climate import ( - ATTR_AUX_HEAT, - DOMAIN as CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, -) -from homeassistant.components.ecobee import DOMAIN -from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from .common import setup_platform - -from tests.typing import ClientSessionGenerator - -THERMOSTAT_ID = 0 - - -async def test_ecobee_notify_repair_flow( - hass: HomeAssistant, - mock_ecobee: MagicMock, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the ecobee notify service repair flow is triggered.""" - await setup_platform(hass, NOTIFY_DOMAIN) - await async_process_repairs_platforms(hass) - - http_client = await hass_client() - - # Simulate legacy service being used - assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) - await hass.services.async_call( - NOTIFY_DOMAIN, - DOMAIN, - service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, - blocking=True, - ) - await hass.async_block_till_done() - mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") - mock_ecobee.send_message.reset_mock() - - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", - ) - assert len(issue_registry.issues) == 1 - - url = RepairsFlowIndexView.url - resp = await http_client.post( - url, json={"handler": "notify", "issue_id": f"migrate_notify_{DOMAIN}_{DOMAIN}"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await http_client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data["type"] == "create_entry" - # Test confirm step in repair flow - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", - ) - assert len(issue_registry.issues) == 0 - - -async def test_ecobee_aux_heat_repair_flow( - hass: HomeAssistant, - mock_ecobee: MagicMock, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the ecobee aux_heat service repair flow is triggered.""" - await setup_platform(hass, CLIMATE_DOMAIN) - await async_process_repairs_platforms(hass) - - ENTITY_ID = "climate.ecobee2" - - # Simulate legacy service being used - assert hass.services.has_service(CLIMATE_DOMAIN, SERVICE_SET_AUX_HEAT) - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_AUX_HEAT: True}, - blocking=True, - ) - - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="ecobee", - issue_id="migrate_aux_heat", - ) - assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_switch.py b/tests/components/ecobee/test_switch.py index 05cea5a5e9d..b3c4c4f8296 100644 --- a/tests/components/ecobee/test_switch.py +++ b/tests/components/ecobee/test_switch.py @@ -8,7 +8,11 @@ from unittest.mock import patch import pytest from homeassistant.components.ecobee.switch import DATE_FORMAT -from homeassistant.components.switch import DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -29,7 +33,7 @@ def data_fixture(): async def test_ventilator_20min_attributes(hass: HomeAssistant) -> None: """Test the ventilator switch on home attributes are correct.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -42,7 +46,7 @@ async def test_ventilator_20min_when_on(hass: HomeAssistant, data) -> None: datetime.now() + timedelta(days=1) ).strftime(DATE_FORMAT) with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "on" @@ -57,7 +61,7 @@ async def test_ventilator_20min_when_off(hass: HomeAssistant, data) -> None: datetime.now() - timedelta(days=1) ).strftime(DATE_FORMAT) with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -70,7 +74,7 @@ async def test_ventilator_20min_when_empty(hass: HomeAssistant, data) -> None: data.return_value["settings"]["ventilatorOffDateTime"] = "" with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -84,10 +88,10 @@ async def test_turn_on_20min_ventilator(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_timer" ) as mock_set_20min_ventilator: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: VENTILATOR_20MIN_ID}, blocking=True, @@ -102,10 +106,10 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_timer" ) as mock_set_20min_ventilator: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: VENTILATOR_20MIN_ID}, blocking=True, @@ -114,16 +118,16 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: mock_set_20min_ventilator.assert_called_once_with(THERMOSTAT_ID, False) -DEVICE_ID = "switch.ecobee2_aux_heat_only" +DEVICE_ID = "switch.ecobee2_auxiliary_heat_only" async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: """Test the switch can be turned on.""" with patch("pyecobee.Ecobee.set_hvac_mode") as mock_turn_on: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True, @@ -134,10 +138,10 @@ async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: async def test_aux_heat_only_turn_off(hass: HomeAssistant) -> None: """Test the switch can be turned off.""" with patch("pyecobee.Ecobee.set_hvac_mode") as mock_turn_off: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True, diff --git a/tests/components/ecovacs/snapshots/test_diagnostics.ambr b/tests/components/ecovacs/snapshots/test_diagnostics.ambr index a4291f9fe25..f9540e06038 100644 --- a/tests/components/ecovacs/snapshots/test_diagnostics.ambr +++ b/tests/components/ecovacs/snapshots/test_diagnostics.ambr @@ -8,6 +8,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'ecovacs', 'minor_version': 1, 'options': dict({ @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, @@ -59,6 +63,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'ecovacs', 'minor_version': 1, 'options': dict({ @@ -66,6 +72,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index 659edfde2cf..9c76c00b5b7 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -177,14 +177,14 @@ 'supported_features': 0, 'translation_key': 'stats_area', 'unique_id': '8516fbb1-17f1-4194-0000000_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Goat G1 Area cleaned', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.goat_g1_area_cleaned', @@ -512,7 +512,7 @@ 'supported_features': 0, 'translation_key': 'total_stats_area', 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state] @@ -520,7 +520,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Goat G1 Total area cleaned', 'state_class': , - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.goat_g1_total_area_cleaned', @@ -755,14 +755,14 @@ 'supported_features': 0, 'translation_key': 'stats_area', 'unique_id': 'E1234567890000000001_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Area cleaned', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.ozmo_950_area_cleaned', @@ -1137,7 +1137,7 @@ 'supported_features': 0, 'translation_key': 'total_stats_area', 'unique_id': 'E1234567890000000001_total_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state] @@ -1145,7 +1145,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Total area cleaned', 'state_class': , - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.ozmo_950_total_area_cleaned', diff --git a/tests/components/efergy/__init__.py b/tests/components/efergy/__init__.py index d763aaa2fb6..36efa77cf45 100644 --- a/tests/components/efergy/__init__.py +++ b/tests/components/efergy/__init__.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, patch from pyefergy import exceptions -from homeassistant.components.efergy import DOMAIN +from homeassistant.components.efergy.const import DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/efergy/test_config_flow.py b/tests/components/efergy/test_config_flow.py index 9a66c42bc9a..8b77bbdc7ab 100644 --- a/tests/components/efergy/test_config_flow.py +++ b/tests/components/efergy/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from pyefergy import exceptions from homeassistant.components.efergy.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -76,20 +76,11 @@ async def test_flow_user_unknown(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with _patch_efergy(), _patch_setup(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - new_conf = {CONF_API_KEY: "1234567890"} result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/eheimdigital/__init__.py b/tests/components/eheimdigital/__init__.py new file mode 100644 index 00000000000..1f608f868de --- /dev/null +++ b/tests/components/eheimdigital/__init__.py @@ -0,0 +1 @@ +"""Tests for the EHEIM Digital integration.""" diff --git a/tests/components/eheimdigital/conftest.py b/tests/components/eheimdigital/conftest.py new file mode 100644 index 00000000000..cdad628de6b --- /dev/null +++ b/tests/components/eheimdigital/conftest.py @@ -0,0 +1,58 @@ +"""Configurations for the EHEIM Digital tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl +from eheimdigital.hub import EheimDigitalHub +from eheimdigital.types import EheimDeviceType, LightMode +import pytest + +from homeassistant.components.eheimdigital.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "eheimdigital"}, unique_id="00:00:00:00:00:01" + ) + + +@pytest.fixture +def classic_led_ctrl_mock(): + """Mock a classicLEDcontrol device.""" + classic_led_ctrl_mock = MagicMock(spec=EheimDigitalClassicLEDControl) + classic_led_ctrl_mock.tankconfig = [["CLASSIC_DAYLIGHT"], []] + classic_led_ctrl_mock.mac_address = "00:00:00:00:00:01" + classic_led_ctrl_mock.device_type = ( + EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + classic_led_ctrl_mock.name = "Mock classicLEDcontrol+e" + classic_led_ctrl_mock.aquarium_name = "Mock Aquarium" + classic_led_ctrl_mock.light_mode = LightMode.DAYCL_MODE + classic_led_ctrl_mock.light_level = (10, 39) + return classic_led_ctrl_mock + + +@pytest.fixture +def eheimdigital_hub_mock(classic_led_ctrl_mock: MagicMock) -> Generator[AsyncMock]: + """Mock eheimdigital hub.""" + with ( + patch( + "homeassistant.components.eheimdigital.coordinator.EheimDigitalHub", + spec=EheimDigitalHub, + ) as eheimdigital_hub_mock, + patch( + "homeassistant.components.eheimdigital.config_flow.EheimDigitalHub", + new=eheimdigital_hub_mock, + ), + ): + eheimdigital_hub_mock.return_value.devices = { + "00:00:00:00:00:01": classic_led_ctrl_mock + } + eheimdigital_hub_mock.return_value.main = classic_led_ctrl_mock + yield eheimdigital_hub_mock diff --git a/tests/components/eheimdigital/snapshots/test_light.ambr b/tests/components/eheimdigital/snapshots/test_light.ambr new file mode 100644 index 00000000000..8df4745997e --- /dev/null +++ b/tests/components/eheimdigital/snapshots/test_light.ambr @@ -0,0 +1,316 @@ +# serializer version: 1 +# name: test_dynamic_new_devices[light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_dynamic_new_devices[light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig0][light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig0][light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig1][light.mock_classicledcontrol_e_channel_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 1', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig1][light.mock_classicledcontrol_e_channel_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 99, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 1', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 99, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/eheimdigital/test_config_flow.py b/tests/components/eheimdigital/test_config_flow.py new file mode 100644 index 00000000000..e75cf31eb98 --- /dev/null +++ b/tests/components/eheimdigital/test_config_flow.py @@ -0,0 +1,212 @@ +"""Tests the config flow of EHEIM Digital.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock, MagicMock, patch + +from aiohttp import ClientConnectionError +import pytest + +from homeassistant.components.eheimdigital.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +ZEROCONF_DISCOVERY = ZeroconfServiceInfo( + ip_address=ip_address("192.0.2.1"), + ip_addresses=[ip_address("192.0.2.1")], + hostname="eheimdigital.local.", + name="eheimdigital._http._tcp.local.", + port=80, + type="_http._tcp.local.", + properties={}, +) + +USER_INPUT = {CONF_HOST: "eheimdigital"} + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_full_flow(hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +@pytest.mark.parametrize( + ("side_effect", "error_value"), + [(ClientConnectionError(), "cannot_connect"), (Exception(), "unknown")], +) +async def test_flow_errors( + hass: HomeAssistant, + eheimdigital_hub_mock: AsyncMock, + side_effect: BaseException, + error_value: str, +) -> None: + """Test flow errors.""" + eheimdigital_hub_mock.return_value.connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_value} + + eheimdigital_hub_mock.return_value.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_zeroconf_flow( + hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == ZEROCONF_DISCOVERY.host + assert result["data"] == { + CONF_HOST: ZEROCONF_DISCOVERY.host, + } + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@pytest.mark.parametrize( + ("side_effect", "error_value"), + [(ClientConnectionError(), "cannot_connect"), (Exception(), "unknown")], +) +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + side_effect: BaseException, + error_value: str, +) -> None: + """Test zeroconf flow errors.""" + eheimdigital_hub_mock.return_value.connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error_value + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_abort(hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock) -> None: + """Test flow abort on matching data or unique_id.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + result3 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + {CONF_HOST: "eheimdigital2"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" diff --git a/tests/components/eheimdigital/test_init.py b/tests/components/eheimdigital/test_init.py new file mode 100644 index 00000000000..211a8b3b6fd --- /dev/null +++ b/tests/components/eheimdigital/test_init.py @@ -0,0 +1,55 @@ +"""Tests for the init module.""" + +from unittest.mock import MagicMock + +from eheimdigital.types import EheimDeviceType + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_remove_device( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test removing a device.""" + assert await async_setup_component(hass, "config", {}) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + mac_address: str = eheimdigital_hub_mock.return_value.main.mac_address + + device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, mac_address)}, + ) + assert device_entry is not None + + hass_client = await hass_ws_client(hass) + + # Do not allow to delete a connected device + response = await hass_client.remove_device( + device_entry.id, mock_config_entry.entry_id + ) + assert not response["success"] + + eheimdigital_hub_mock.return_value.devices = {} + + # Allow to delete a not connected device + response = await hass_client.remove_device( + device_entry.id, mock_config_entry.entry_id + ) + assert response["success"] diff --git a/tests/components/eheimdigital/test_light.py b/tests/components/eheimdigital/test_light.py new file mode 100644 index 00000000000..da224979c43 --- /dev/null +++ b/tests/components/eheimdigital/test_light.py @@ -0,0 +1,249 @@ +"""Tests for the light module.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from aiohttp import ClientError +from eheimdigital.types import EheimDeviceType, LightMode +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.eheimdigital.const import EFFECT_DAYCL_MODE +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + DOMAIN as LIGHT_DOMAIN, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.color import value_to_brightness + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.parametrize( + "tankconfig", + [ + [["CLASSIC_DAYLIGHT"], []], + [[], ["CLASSIC_DAYLIGHT"]], + [["CLASSIC_DAYLIGHT"], ["CLASSIC_DAYLIGHT"]], + ], +) +async def test_setup_classic_led_ctrl( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + tankconfig: list[list[str]], + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test light platform setup with different channels.""" + mock_config_entry.add_to_hass(hass) + + classic_led_ctrl_mock.tankconfig = tankconfig + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_dynamic_new_devices( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + classic_led_ctrl_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test light platform setup with at first no devices and dynamically adding a device.""" + mock_config_entry.add_to_hass(hass) + + eheimdigital_hub_mock.return_value.devices = {} + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert ( + len( + entity_registry.entities.get_entries_for_config_entry_id( + mock_config_entry.entry_id + ) + ) + == 0 + ) + + eheimdigital_hub_mock.return_value.devices = { + "00:00:00:00:00:01": classic_led_ctrl_mock + } + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("eheimdigital_hub_mock") +async def test_turn_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test turning off the light.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await mock_config_entry.runtime_data._async_device_found( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0"}, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.MAN_MODE) + classic_led_ctrl_mock.turn_off.assert_awaited_once_with(0) + + +@pytest.mark.parametrize( + ("dim_input", "expected_dim_value"), + [ + (3, 1), + (255, 100), + (128, 50), + ], +) +async def test_turn_on_brightness( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, + dim_input: int, + expected_dim_value: int, +) -> None: + """Test turning on the light with different brightness values.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0", + ATTR_BRIGHTNESS: dim_input, + }, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.MAN_MODE) + classic_led_ctrl_mock.turn_on.assert_awaited_once_with(expected_dim_value, 0) + + +async def test_turn_on_effect( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test turning on the light with an effect value.""" + mock_config_entry.add_to_hass(hass) + + classic_led_ctrl_mock.light_mode = LightMode.MAN_MODE + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0", + ATTR_EFFECT: EFFECT_DAYCL_MODE, + }, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.DAYCL_MODE) + + +async def test_state_update( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test the light state update.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + classic_led_ctrl_mock.light_level = (20, 30) + + await eheimdigital_hub_mock.call_args.kwargs["receive_callback"]() + + assert (state := hass.states.get("light.mock_classicledcontrol_e_channel_0")) + assert state.attributes["brightness"] == value_to_brightness((1, 100), 20) + + +async def test_update_failed( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test an failed update.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + eheimdigital_hub_mock.return_value.update.side_effect = ClientError + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get("light.mock_classicledcontrol_e_channel_0").state + == STATE_UNAVAILABLE + ) diff --git a/tests/components/electric_kiwi/test_config_flow.py b/tests/components/electric_kiwi/test_config_flow.py index bf248aafb13..681320972b5 100644 --- a/tests/components/electric_kiwi/test_config_flow.py +++ b/tests/components/electric_kiwi/test_config_flow.py @@ -18,7 +18,6 @@ from homeassistant.components.electric_kiwi.const import ( OAUTH2_TOKEN, SCOPE_VALUES, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -160,16 +159,12 @@ async def test_reauthentication( setup_credentials: None, ) -> None: """Test Electric Kiwi reauthentication.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": DOMAIN} - ) - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert "flow_id" in flows[0] - - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, @@ -195,6 +190,7 @@ async def test_reauthentication( ) await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index c4d9a87b5ad..c9ed49ba13c 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -31,7 +31,7 @@ def mock_async_client() -> Generator[AsyncMock]: client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) client_mock.models.get_all.return_value = MOCK_MODELS with patch( - "elevenlabs.client.AsyncElevenLabs", return_value=client_mock + "elevenlabs.AsyncElevenLabs", return_value=client_mock ) as mock_async_client: yield mock_async_client diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py index 853c49d48ff..971fa75939a 100644 --- a/tests/components/elevenlabs/test_config_flow.py +++ b/tests/components/elevenlabs/test_config_flow.py @@ -3,9 +3,20 @@ from unittest.mock import AsyncMock from homeassistant.components.elevenlabs.const import ( + CONF_CONFIGURE_VOICE, CONF_MODEL, + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, CONF_VOICE, DEFAULT_MODEL, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, DOMAIN, ) from homeassistant.config_entries import SOURCE_USER @@ -89,6 +100,52 @@ async def test_options_flow_init( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert mock_entry.options == {CONF_MODEL: "model1", CONF_VOICE: "voice1"} + assert mock_entry.options == { + CONF_MODEL: "model1", + CONF_VOICE: "voice1", + } mock_setup_entry.assert_called_once() + + +async def test_options_flow_voice_settings_default( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, + mock_entry: MockConfigEntry, +) -> None: + """Test options flow voice settings.""" + mock_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_MODEL: "model1", + CONF_VOICE: "voice1", + CONF_CONFIGURE_VOICE: True, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "voice_settings" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_entry.options == { + CONF_MODEL: "model1", + CONF_VOICE: "voice1", + CONF_OPTIMIZE_LATENCY: DEFAULT_OPTIMIZE_LATENCY, + CONF_SIMILARITY: DEFAULT_SIMILARITY, + CONF_STABILITY: DEFAULT_STABILITY, + CONF_STYLE: DEFAULT_STYLE, + CONF_USE_SPEAKER_BOOST: DEFAULT_USE_SPEAKER_BOOST, + } diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index 8b14ab26487..7151aab10f2 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -8,19 +8,33 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from elevenlabs.core import ApiError -from elevenlabs.types import GetVoicesResponse +from elevenlabs.types import GetVoicesResponse, VoiceSettings import pytest from homeassistant.components import tts -from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE, DOMAIN +from homeassistant.components.elevenlabs.const import ( + CONF_MODEL, + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, + CONF_VOICE, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, + DOMAIN, +) from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_ID, DOMAIN as DOMAIN_MP, SERVICE_PLAY_MEDIA, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core_config import async_process_ha_core_config from .const import MOCK_MODELS, MOCK_VOICES @@ -53,17 +67,32 @@ async def setup_internal_url(hass: HomeAssistant) -> None: ) +@pytest.fixture +def mock_similarity(): + """Mock similarity.""" + return DEFAULT_SIMILARITY / 2 + + +@pytest.fixture +def mock_latency(): + """Mock latency.""" + return (DEFAULT_OPTIMIZE_LATENCY + 1) % 5 # 0, 1, 2, 3, 4 + + @pytest.fixture(name="setup") async def setup_fixture( hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any], + config_options_voice: dict[str, Any], request: pytest.FixtureRequest, mock_async_client: AsyncMock, ) -> AsyncMock: """Set up the test environment.""" if request.param == "mock_config_entry_setup": await mock_config_entry_setup(hass, config_data, config_options) + elif request.param == "mock_config_entry_setup_voice": + await mock_config_entry_setup(hass, config_data, config_options_voice) else: raise RuntimeError("Invalid setup fixture") @@ -83,6 +112,18 @@ def config_options_fixture() -> dict[str, Any]: return {} +@pytest.fixture(name="config_options_voice") +def config_options_voice_fixture(mock_similarity, mock_latency) -> dict[str, Any]: + """Return config options.""" + return { + CONF_OPTIMIZE_LATENCY: mock_latency, + CONF_SIMILARITY: mock_similarity, + CONF_STABILITY: DEFAULT_STABILITY, + CONF_STYLE: DEFAULT_STYLE, + CONF_USE_SPEAKER_BOOST: DEFAULT_USE_SPEAKER_BOOST, + } + + async def mock_config_entry_setup( hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any] ) -> None: @@ -146,6 +187,12 @@ async def test_tts_service_speak( """Test tts service.""" tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) tts_entity._client.generate.reset_mock() + assert tts_entity._voice_settings == VoiceSettings( + stability=DEFAULT_STABILITY, + similarity_boost=DEFAULT_SIMILARITY, + style=DEFAULT_STYLE, + use_speaker_boost=DEFAULT_USE_SPEAKER_BOOST, + ) await hass.services.async_call( tts.DOMAIN, @@ -161,7 +208,11 @@ async def test_tts_service_speak( ) tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", voice="voice2", model="model1" + text="There is a person at the front door.", + voice="voice2", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, ) @@ -219,7 +270,11 @@ async def test_tts_service_speak_lang_config( ) tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", voice="voice1", model="model1" + text="There is a person at the front door.", + voice="voice1", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, ) @@ -266,5 +321,130 @@ async def test_tts_service_speak_error( ) tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", voice="voice1", model="model1" + text="There is a person at the front door.", + voice="voice1", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, + ) + + +@pytest.mark.parametrize( + "config_data", + [ + {}, + {tts.CONF_LANG: "de"}, + {tts.CONF_LANG: "en"}, + {tts.CONF_LANG: "ja"}, + {tts.CONF_LANG: "es"}, + ], +) +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup_voice", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_voice_settings( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], + mock_similarity: float, + mock_latency: int, +) -> None: + """Test tts service.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + assert tts_entity._voice_settings == VoiceSettings( + stability=DEFAULT_STABILITY, + similarity_boost=mock_similarity, + style=DEFAULT_STYLE, + use_speaker_boost=DEFAULT_USE_SPEAKER_BOOST, + ) + assert tts_entity._latency == mock_latency + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", + voice="voice2", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_without_options( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with http response 200.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", + voice="voice1", + optimize_streaming_latency=0, + voice_settings=VoiceSettings( + stability=0.5, similarity_boost=0.75, style=0.0, use_speaker_boost=True + ), + model="model1", ) diff --git a/tests/components/elgato/conftest.py b/tests/components/elgato/conftest.py index 73b09421576..afa89f8eb27 100644 --- a/tests/components/elgato/conftest.py +++ b/tests/components/elgato/conftest.py @@ -7,7 +7,7 @@ from elgato import BatteryInfo, ElgatoNoBatteryError, Info, Settings, State import pytest from homeassistant.components.elgato.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, get_fixture_path, load_fixture @@ -35,7 +35,6 @@ def mock_config_entry() -> MockConfigEntry: data={ CONF_HOST: "127.0.0.1", CONF_MAC: "AA:BB:CC:DD:EE:FF", - CONF_PORT: 9123, }, unique_id="CN11A1A00001", ) diff --git a/tests/components/elgato/snapshots/test_config_flow.ambr b/tests/components/elgato/snapshots/test_config_flow.ambr deleted file mode 100644 index 39202d383fa..00000000000 --- a/tests/components/elgato/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,128 +0,0 @@ -# serializer version: 1 -# name: test_full_user_flow_implementation - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': None, - 'port': 9123, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': None, - 'port': 9123, - }), - 'disabled_by': None, - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- -# name: test_full_zeroconf_flow_implementation - FlowResultSnapshot({ - 'context': dict({ - 'confirm_only': True, - 'source': 'zeroconf', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, - }), - 'disabled_by': None, - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- -# name: test_zeroconf_during_onboarding - FlowResultSnapshot({ - 'context': dict({ - 'source': 'zeroconf', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, - }), - 'disabled_by': None, - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/elgato/snapshots/test_light.ambr b/tests/components/elgato/snapshots/test_light.ambr index c3ab076ded2..4bb4644ab86 100644 --- a/tests/components/elgato/snapshots/test_light.ambr +++ b/tests/components/elgato/snapshots/test_light.ambr @@ -11,13 +11,13 @@ 27.316, 47.743, ), - 'max_color_temp_kelvin': 6993, + 'max_color_temp_kelvin': 7000, 'max_mireds': 344, - 'min_color_temp_kelvin': 2906, - 'min_mireds': 143, + 'min_color_temp_kelvin': 2900, + 'min_mireds': 142, 'rgb_color': tuple( 255, - 188, + 189, 133, ), 'supported_color_modes': list([ @@ -25,8 +25,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.465, - 0.376, + 0.464, + 0.377, ), }), 'context': , @@ -43,10 +43,10 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6993, + 'max_color_temp_kelvin': 7000, 'max_mireds': 344, - 'min_color_temp_kelvin': 2906, - 'min_mireds': 143, + 'min_color_temp_kelvin': 2900, + 'min_mireds': 142, 'supported_color_modes': list([ , ]), @@ -126,13 +126,13 @@ 27.316, 47.743, ), - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'rgb_color': tuple( 255, - 188, + 189, 133, ), 'supported_color_modes': list([ @@ -141,8 +141,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.465, - 0.376, + 0.464, + 0.377, ), }), 'context': , @@ -159,9 +159,9 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'supported_color_modes': list([ , @@ -243,13 +243,13 @@ 358.0, 6.0, ), - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'rgb_color': tuple( 255, - 239, + 240, 240, ), 'supported_color_modes': list([ @@ -258,8 +258,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.34, - 0.327, + 0.339, + 0.328, ), }), 'context': , @@ -276,9 +276,9 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, + 'max_color_temp_kelvin': 6500, 'max_mireds': 285, - 'min_color_temp_kelvin': 3508, + 'min_color_temp_kelvin': 3500, 'min_mireds': 153, 'supported_color_modes': list([ , diff --git a/tests/components/elgato/test_config_flow.py b/tests/components/elgato/test_config_flow.py index 6da99241b64..00763f60458 100644 --- a/tests/components/elgato/test_config_flow.py +++ b/tests/components/elgato/test_config_flow.py @@ -5,12 +5,11 @@ from unittest.mock import AsyncMock, MagicMock from elgato import ElgatoConnectionError import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.elgato.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,7 +20,6 @@ async def test_full_user_flow_implementation( hass: HomeAssistant, mock_elgato: MagicMock, mock_setup_entry: AsyncMock, - snapshot: SnapshotAssertion, ) -> None: """Test the full manual user flow from start to finish.""" result = await hass.config_entries.flow.async_init( @@ -29,15 +27,22 @@ async def test_full_user_flow_implementation( context={"source": SOURCE_USER}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "127.0.0.1", CONF_PORT: 9123} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: "127.0.0.1"} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: None, + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 @@ -47,7 +52,6 @@ async def test_full_zeroconf_flow_implementation( hass: HomeAssistant, mock_elgato: MagicMock, mock_setup_entry: AsyncMock, - snapshot: SnapshotAssertion, ) -> None: """Test the zeroconf flow from start to finish.""" result = await hass.config_entries.flow.async_init( @@ -64,9 +68,9 @@ async def test_full_zeroconf_flow_implementation( ), ) - assert result.get("description_placeholders") == {"serial_number": "CN11A1A00001"} - assert result.get("step_id") == "zeroconf_confirm" - assert result.get("type") is FlowResultType.FORM + assert result["description_placeholders"] == {"serial_number": "CN11A1A00001"} + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM progress = hass.config_entries.flow.async_progress() assert len(progress) == 1 @@ -74,12 +78,19 @@ async def test_full_zeroconf_flow_implementation( assert "context" in progress[0] assert progress[0]["context"].get("confirm_only") is True - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 @@ -94,12 +105,31 @@ async def test_connection_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "127.0.0.1", CONF_PORT: 9123}, + data={CONF_HOST: "127.0.0.1"}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": "cannot_connect"} - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + # Recover from error + mock_elgato.info.side_effect = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: "127.0.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_MAC: None, + } + assert not config_entry.options async def test_zeroconf_connection_error( @@ -122,8 +152,8 @@ async def test_zeroconf_connection_error( ), ) - assert result.get("reason") == "cannot_connect" - assert result.get("type") is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + assert result["type"] is FlowResultType.ABORT @pytest.mark.usefixtures("mock_elgato") @@ -135,11 +165,11 @@ async def test_user_device_exists_abort( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "127.0.0.1", CONF_PORT: 9123}, + data={CONF_HOST: "127.0.0.1"}, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" @pytest.mark.usefixtures("mock_elgato") @@ -162,8 +192,8 @@ async def test_zeroconf_device_exists_abort( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" entries = hass.config_entries.async_entries(DOMAIN) assert entries[0].data[CONF_HOST] == "127.0.0.1" @@ -183,8 +213,8 @@ async def test_zeroconf_device_exists_abort( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" entries = hass.config_entries.async_entries(DOMAIN) assert entries[0].data[CONF_HOST] == "127.0.0.2" @@ -195,7 +225,6 @@ async def test_zeroconf_during_onboarding( mock_elgato: MagicMock, mock_setup_entry: AsyncMock, mock_onboarding: MagicMock, - snapshot: SnapshotAssertion, ) -> None: """Test the zeroconf creates an entry during onboarding.""" result = await hass.config_entries.flow.async_init( @@ -212,8 +241,15 @@ async def test_zeroconf_during_onboarding( ), ) - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 diff --git a/tests/components/elgato/test_light.py b/tests/components/elgato/test_light.py index 40c0232c2b3..43fad1faa77 100644 --- a/tests/components/elgato/test_light.py +++ b/tests/components/elgato/test_light.py @@ -9,7 +9,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.elgato.const import DOMAIN, SERVICE_IDENTIFY from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, DOMAIN as LIGHT_DOMAIN, ) @@ -74,7 +74,7 @@ async def test_light_change_state_temperature( { ATTR_ENTITY_ID: "light.frenck", ATTR_BRIGHTNESS: 255, - ATTR_COLOR_TEMP: 100, + ATTR_COLOR_TEMP_KELVIN: 10000, }, blocking=True, ) diff --git a/tests/components/elmax/conftest.py b/tests/components/elmax/conftest.py index f92fc2f1827..f8cf33ffe1a 100644 --- a/tests/components/elmax/conftest.py +++ b/tests/components/elmax/conftest.py @@ -1,6 +1,7 @@ """Configuration for Elmax tests.""" from collections.abc import Generator +from datetime import datetime, timedelta import json from unittest.mock import AsyncMock, patch @@ -11,6 +12,7 @@ from elmax_api.constants import ( ENDPOINT_LOGIN, ) from httpx import Response +import jwt import pytest import respx @@ -64,9 +66,20 @@ def httpx_mock_direct_fixture() -> Generator[respx.MockRouter]: ) as respx_mock: # Mock Login POST. login_route = respx_mock.post(f"/api/v2/{ENDPOINT_LOGIN}", name="login") - login_route.return_value = Response( - 200, json=json.loads(load_fixture("direct/login.json", "elmax")) + + login_json = json.loads(load_fixture("direct/login.json", "elmax")) + decoded_jwt = jwt.decode_complete( + login_json["token"].split(" ")[1], + algorithms="HS256", + options={"verify_signature": False}, ) + expiration = datetime.now() + timedelta(hours=1) + decoded_jwt["payload"]["exp"] = int(expiration.timestamp()) + jws_string = jwt.encode( + payload=decoded_jwt["payload"], algorithm="HS256", key="" + ) + login_json["token"] = f"JWT {jws_string}" + login_route.return_value = Response(200, json=login_json) # Mock Device list GET. list_devices_route = respx_mock.get( diff --git a/tests/components/elmax/test_config_flow.py b/tests/components/elmax/test_config_flow.py index 85e14dd0a3f..7a4d9755fa5 100644 --- a/tests/components/elmax/test_config_flow.py +++ b/tests/components/elmax/test_config_flow.py @@ -21,7 +21,6 @@ from homeassistant.components.elmax.const import ( CONF_ELMAX_USERNAME, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -544,20 +543,7 @@ async def test_show_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -577,24 +563,11 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.elmax.async_setup_entry", return_value=True, ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -624,24 +597,11 @@ async def test_reauth_panel_disappeared(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.list_control_panels", return_value=[], ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -670,24 +630,11 @@ async def test_reauth_invalid_pin(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.get_panel_status", side_effect=ElmaxBadPinError(), ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -716,24 +663,11 @@ async def test_reauth_bad_login(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.login", side_effect=ElmaxBadLoginError(), ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { diff --git a/tests/components/emoncms/__init__.py b/tests/components/emoncms/__init__.py index ecf3c54e9ed..59dc4fa08e1 100644 --- a/tests/components/emoncms/__init__.py +++ b/tests/components/emoncms/__init__.py @@ -1 +1,12 @@ """Tests for the emoncms component.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Set up the integration.""" + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py index 500fff228e9..4bd1d68217a 100644 --- a/tests/components/emoncms/conftest.py +++ b/tests/components/emoncms/conftest.py @@ -1,10 +1,23 @@ """Fixtures for emoncms integration tests.""" -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Generator +import copy from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.const import ( + CONF_API_KEY, + CONF_ID, + CONF_PLATFORM, + CONF_URL, + CONF_VALUE_TEMPLATE, +) +from homeassistant.helpers.typing import ConfigType + +from tests.common import MockConfigEntry + UNITS = ["kWh", "Wh", "W", "V", "A", "VA", "°C", "°F", "K", "Hz", "hPa", ""] @@ -29,19 +42,121 @@ FEEDS = [get_feed(i + 1, unit=unit) for i, unit in enumerate(UNITS)] EMONCMS_FAILURE = {"success": False, "message": "failure"} +FLOW_RESULT = { + CONF_API_KEY: "my_api_key", + CONF_ONLY_INCLUDE_FEEDID: [str(i + 1) for i in range(len(UNITS))], + CONF_URL: "http://1.1.1.1", +} + +SENSOR_NAME = "emoncms@1.1.1.1" + +YAML_BASE = { + CONF_PLATFORM: "emoncms", + CONF_API_KEY: "my_api_key", + CONF_ID: 1, + CONF_URL: "http://1.1.1.1", +} + +YAML = { + **YAML_BASE, + CONF_ONLY_INCLUDE_FEEDID: [1], +} + + +@pytest.fixture +def emoncms_yaml_config() -> ConfigType: + """Mock emoncms yaml configuration.""" + return {"sensor": YAML} + + +@pytest.fixture +def emoncms_yaml_config_with_template() -> ConfigType: + """Mock emoncms yaml conf with template parameter.""" + return {"sensor": {**YAML, CONF_VALUE_TEMPLATE: "{{ value | float + 1500 }}"}} + + +@pytest.fixture +def emoncms_yaml_config_no_include_only_feed_id() -> ConfigType: + """Mock emoncms yaml configuration without include_only_feed_id parameter.""" + return {"sensor": YAML_BASE} + + +@pytest.fixture +def config_entry() -> MockConfigEntry: + """Mock emoncms config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT, + ) + + +FLOW_RESULT_SECOND_URL = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_SECOND_URL[CONF_URL] = "http://1.1.1.2" + + +@pytest.fixture +def config_entry_unique_id() -> MockConfigEntry: + """Mock emoncms config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_SECOND_URL, + unique_id="123-53535292", + ) + + +FLOW_RESULT_NO_FEED = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_NO_FEED[CONF_ONLY_INCLUDE_FEEDID] = None + + +@pytest.fixture +def config_no_feed() -> MockConfigEntry: + """Mock emoncms config entry with no feed selected.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_NO_FEED, + ) + + +FLOW_RESULT_SINGLE_FEED = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_SINGLE_FEED[CONF_ONLY_INCLUDE_FEEDID] = ["1"] + + +@pytest.fixture +def config_single_feed() -> MockConfigEntry: + """Mock emoncms config entry with a single feed exposed.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_SINGLE_FEED, + entry_id="XXXXXXXX", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.emoncms.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + @pytest.fixture async def emoncms_client() -> AsyncGenerator[AsyncMock]: """Mock pyemoncms success response.""" with ( patch( - "homeassistant.components.emoncms.sensor.EmoncmsClient", autospec=True + "homeassistant.components.emoncms.EmoncmsClient", autospec=True ) as mock_client, patch( - "homeassistant.components.emoncms.coordinator.EmoncmsClient", + "homeassistant.components.emoncms.config_flow.EmoncmsClient", new=mock_client, ), ): client = mock_client.return_value client.async_request.return_value = {"success": True, "message": FEEDS} + client.async_get_uuid.return_value = "123-53535292" yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr index 62c85aaba01..210196ce414 100644 --- a/tests/components/emoncms/snapshots/test_sensor.ambr +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -1,5 +1,40 @@ # serializer version: 1 -# name: test_coordinator_update[sensor.emoncms_parameter_1] +# name: test_coordinator_update[sensor.temperature_tag_parameter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.temperature_tag_parameter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature tag parameter 1', + 'platform': 'emoncms', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature', + 'unique_id': '123-53535292-1', + 'unit_of_measurement': , + }) +# --- +# name: test_coordinator_update[sensor.temperature_tag_parameter_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'FeedId': '1', @@ -10,12 +45,12 @@ 'Tag': 'tag', 'UserId': '1', 'device_class': 'temperature', - 'friendly_name': 'EmonCMS parameter 1', + 'friendly_name': 'Temperature tag parameter 1', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.emoncms_parameter_1', + 'entity_id': 'sensor.temperature_tag_parameter_1', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/emoncms/test_config_flow.py b/tests/components/emoncms/test_config_flow.py new file mode 100644 index 00000000000..1914f23fb0b --- /dev/null +++ b/tests/components/emoncms/test_config_flow.py @@ -0,0 +1,161 @@ +"""Test emoncms config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_integration +from .conftest import EMONCMS_FAILURE, FLOW_RESULT_SINGLE_FEED, SENSOR_NAME, YAML + +from tests.common import MockConfigEntry + + +async def test_flow_import_include_feeds( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, +) -> None: + """YAML import with included feed - success test.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == SENSOR_NAME + assert result["data"] == FLOW_RESULT_SINGLE_FEED + + +async def test_flow_import_failure( + hass: HomeAssistant, + emoncms_client: AsyncMock, +) -> None: + """YAML import - failure test.""" + emoncms_client.async_request.return_value = EMONCMS_FAILURE + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "api_error" + + +async def test_flow_import_already_configured( + hass: HomeAssistant, + config_entry: MockConfigEntry, + emoncms_client: AsyncMock, +) -> None: + """Test we abort import data set when entry is already configured.""" + config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +USER_INPUT = { + CONF_URL: "http://1.1.1.1", + CONF_API_KEY: "my_api_key", +} + + +async def test_user_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, +) -> None: + """Test we get the user form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ONLY_INCLUDE_FEEDID: ["1"]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == SENSOR_NAME + assert result["data"] == {**USER_INPUT, CONF_ONLY_INCLUDE_FEEDID: ["1"]} + assert len(mock_setup_entry.mock_calls) == 1 + + +CONFIG_ENTRY = { + CONF_API_KEY: "my_api_key", + CONF_ONLY_INCLUDE_FEEDID: ["1"], + CONF_URL: "http://1.1.1.1", +} + + +async def test_options_flow( + hass: HomeAssistant, + emoncms_client: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Options flow - success test.""" + await setup_integration(hass, config_entry) + assert config_entry.options == {} + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_ONLY_INCLUDE_FEEDID: ["1"], + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == { + CONF_ONLY_INCLUDE_FEEDID: ["1"], + } + + +async def test_options_flow_failure( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Options flow - test failure.""" + emoncms_client.async_request.return_value = EMONCMS_FAILURE + await setup_integration(hass, config_entry) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + assert result["errors"]["base"] == "api_error" + assert result["description_placeholders"]["details"] == "failure" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + +async def test_unique_id_exists( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, + config_entry_unique_id: MockConfigEntry, +) -> None: + """Test when entry with same unique id already exists.""" + config_entry_unique_id.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/emoncms/test_init.py b/tests/components/emoncms/test_init.py new file mode 100644 index 00000000000..abe1a020034 --- /dev/null +++ b/tests/components/emoncms/test_init.py @@ -0,0 +1,89 @@ +"""Test Emoncms component setup process.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.components.emoncms.const import DOMAIN, FEED_ID, FEED_NAME +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from . import setup_integration +from .conftest import EMONCMS_FAILURE, FEEDS + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + emoncms_client: AsyncMock, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + emoncms_client: AsyncMock, +) -> None: + """Test load failure.""" + emoncms_client.async_request.return_value = EMONCMS_FAILURE + config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(config_entry.entry_id) + + +async def test_migrate_uuid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test migration from home assistant uuid to emoncms uuid.""" + config_entry.add_to_hass(hass) + assert config_entry.unique_id is None + for _, feed in enumerate(FEEDS): + entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + f"{config_entry.entry_id}-{feed[FEED_ID]}", + config_entry=config_entry, + suggested_object_id=f"{DOMAIN}_{feed[FEED_NAME]}", + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + emoncms_uuid = emoncms_client.async_get_uuid.return_value + assert config_entry.unique_id == emoncms_uuid + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + + for nb, feed in enumerate(FEEDS): + assert entity_entries[nb].unique_id == f"{emoncms_uuid}-{feed[FEED_ID]}" + assert ( + entity_entries[nb].previous_unique_id + == f"{config_entry.entry_id}-{feed[FEED_ID]}" + ) + + +async def test_no_uuid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when the emoncms server does not ship an uuid.""" + emoncms_client.async_get_uuid.return_value = None + await setup_integration(hass, config_entry) + + assert issue_registry.async_get_issue(domain=DOMAIN, issue_id="migrate database") diff --git a/tests/components/emoncms/test_sensor.py b/tests/components/emoncms/test_sensor.py index a039239077e..a7bc8059287 100644 --- a/tests/components/emoncms/test_sensor.py +++ b/tests/components/emoncms/test_sensor.py @@ -1,54 +1,112 @@ """Test emoncms sensor.""" -from typing import Any from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.components.emoncms.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_PLATFORM, CONF_URL -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -from .conftest import EMONCMS_FAILURE, FEEDS, get_feed +from . import setup_integration +from .conftest import EMONCMS_FAILURE, get_feed -from tests.common import async_fire_time_changed - -YAML = { - CONF_PLATFORM: "emoncms", - CONF_API_KEY: "my_api_key", - CONF_ID: 1, - CONF_URL: "http://1.1.1.1", - CONF_ONLY_INCLUDE_FEEDID: [1, 2], - "scan_interval": 30, -} +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -@pytest.fixture -def emoncms_yaml_config() -> ConfigType: - """Mock emoncms configuration from yaml.""" - return {"sensor": YAML} +async def test_deprecated_yaml( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + emoncms_yaml_config: ConfigType, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when we import from yaml config.""" + + await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" + ) -def get_entity_ids(feeds: list[dict[str, Any]]) -> list[str]: - """Get emoncms entity ids.""" - return [ - f"{SENSOR_DOMAIN}.{DOMAIN}_{feed["name"].replace(' ', '_')}" for feed in feeds - ] +async def test_yaml_with_template( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + emoncms_yaml_config_with_template: ConfigType, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when we import a yaml config with a value_template parameter.""" + + await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config_with_template) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"remove_value_template_{DOMAIN}" + ) -def get_feeds(nbs: list[int]) -> list[dict[str, Any]]: - """Get feeds.""" - return [feed for feed in FEEDS if feed["id"] in str(nbs)] +async def test_yaml_no_include_only_feed_id( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + emoncms_yaml_config_no_include_only_feed_id: ConfigType, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when we import a yaml config without a include_only_feed_id parameter.""" + + await async_setup_component( + hass, SENSOR_DOMAIN, emoncms_yaml_config_no_include_only_feed_id + ) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"missing_include_only_feed_id_{DOMAIN}" + ) + + +async def test_no_feed_selected( + hass: HomeAssistant, + config_no_feed: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test with no feed selected.""" + await setup_integration(hass, config_no_feed) + + assert config_no_feed.state is ConfigEntryState.LOADED + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_no_feed.entry_id + ) + assert entity_entries == [] + + +async def test_no_feed_broadcast( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test with no feed broadcasted.""" + emoncms_client.async_request.return_value = {"success": True, "message": []} + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.LOADED + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + assert entity_entries == [] async def test_coordinator_update( hass: HomeAssistant, - emoncms_yaml_config: ConfigType, + config_single_feed: MockConfigEntry, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, emoncms_client: AsyncMock, caplog: pytest.LogCaptureFixture, @@ -59,12 +117,11 @@ async def test_coordinator_update( "success": True, "message": [get_feed(1, unit="°C")], } - await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) - await hass.async_block_till_done() - feeds = get_feeds([1]) - for entity_id in get_entity_ids(feeds): - state = hass.states.get(entity_id) - assert state == snapshot(name=entity_id) + await setup_integration(hass, config_single_feed) + + await snapshot_platform( + hass, entity_registry, snapshot, config_single_feed.entry_id + ) async def skip_time() -> None: freezer.tick(60) @@ -78,8 +135,12 @@ async def test_coordinator_update( await skip_time() - for entity_id in get_entity_ids(feeds): - state = hass.states.get(entity_id) + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_single_feed.entry_id + ) + + for entity_entry in entity_entries: + state = hass.states.get(entity_entry.entity_id) assert state.attributes["LastUpdated"] == 1665509670 assert state.state == "24.04" diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index 28e269fdaeb..8a340d5e2dd 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -793,7 +793,10 @@ async def test_put_light_state( await hass_hue.services.async_call( light.DOMAIN, const.SERVICE_TURN_ON, - {const.ATTR_ENTITY_ID: "light.ceiling_lights", light.ATTR_COLOR_TEMP: 20}, + { + const.ATTR_ENTITY_ID: "light.ceiling_lights", + light.ATTR_COLOR_TEMP_KELVIN: 50000, + }, blocking=True, ) @@ -802,8 +805,10 @@ async def test_put_light_state( ) assert ( - hass_hue.states.get("light.ceiling_lights").attributes[light.ATTR_COLOR_TEMP] - == 50 + hass_hue.states.get("light.ceiling_lights").attributes[ + light.ATTR_COLOR_TEMP_KELVIN + ] + == 20000 ) # mock light.turn_on call @@ -1248,9 +1253,7 @@ async def test_proper_put_state_request(hue_client: TestClient) -> None: """Test the request to set the state.""" # Test proper on value parsing result = await hue_client.put( - "/api/username/lights/{}/state".format( - ENTITY_NUMBERS_BY_ID["light.ceiling_lights"] - ), + f"/api/username/lights/{ENTITY_NUMBERS_BY_ID['light.ceiling_lights']}/state", data=json.dumps({HUE_API_STATE_ON: 1234}), ) @@ -1258,9 +1261,7 @@ async def test_proper_put_state_request(hue_client: TestClient) -> None: # Test proper brightness value parsing result = await hue_client.put( - "/api/username/lights/{}/state".format( - ENTITY_NUMBERS_BY_ID["light.ceiling_lights"] - ), + f"/api/username/lights/{ENTITY_NUMBERS_BY_ID['light.ceiling_lights']}/state", data=json.dumps({HUE_API_STATE_ON: True, HUE_API_STATE_BRI: "Hello world!"}), ) @@ -1789,7 +1790,7 @@ async def test_get_light_state_when_none( light.ATTR_BRIGHTNESS: None, light.ATTR_RGB_COLOR: None, light.ATTR_HS_COLOR: None, - light.ATTR_COLOR_TEMP: None, + light.ATTR_COLOR_TEMP_KELVIN: None, light.ATTR_XY_COLOR: None, light.ATTR_SUPPORTED_COLOR_MODES: [ light.COLOR_MODE_COLOR_TEMP, @@ -1817,7 +1818,7 @@ async def test_get_light_state_when_none( light.ATTR_BRIGHTNESS: None, light.ATTR_RGB_COLOR: None, light.ATTR_HS_COLOR: None, - light.ATTR_COLOR_TEMP: None, + light.ATTR_COLOR_TEMP_KELVIN: None, light.ATTR_XY_COLOR: None, light.ATTR_SUPPORTED_COLOR_MODES: [ light.COLOR_MODE_COLOR_TEMP, diff --git a/tests/components/energyzero/snapshots/test_config_flow.ambr b/tests/components/energyzero/snapshots/test_config_flow.ambr index 9b4b3bfc635..88b0af6dc7b 100644 --- a/tests/components/energyzero/snapshots/test_config_flow.ambr +++ b/tests/components/energyzero/snapshots/test_config_flow.ambr @@ -18,6 +18,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'energyzero', 'entry_id': , 'minor_version': 1, @@ -26,10 +28,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'EnergyZero', 'unique_id': 'energyzero', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'EnergyZero', 'type': , 'version': 1, diff --git a/tests/components/energyzero/test_init.py b/tests/components/energyzero/test_init.py index 287157026f4..f8e7e75e902 100644 --- a/tests/components/energyzero/test_init.py +++ b/tests/components/energyzero/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock, patch from energyzero import EnergyZeroConnectionError import pytest -from homeassistant.components.energyzero.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -26,7 +25,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/enigma2/conftest.py b/tests/components/enigma2/conftest.py index 6c024ebf66a..a53d1494e9a 100644 --- a/tests/components/enigma2/conftest.py +++ b/tests/components/enigma2/conftest.py @@ -4,7 +4,6 @@ from openwebif.api import OpenWebIfServiceEvent, OpenWebIfStatus from homeassistant.components.enigma2.const import ( CONF_DEEP_STANDBY, - CONF_MAC_ADDRESS, CONF_SOURCE_BOUQUET, CONF_USE_CHANNEL_ICON, DEFAULT_DEEP_STANDBY, @@ -14,7 +13,6 @@ from homeassistant.components.enigma2.const import ( ) from homeassistant.const import ( CONF_HOST, - CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_SSL, @@ -40,21 +38,6 @@ TEST_FULL = { CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL, } -TEST_IMPORT_FULL = { - CONF_HOST: "1.1.1.1", - CONF_PORT: DEFAULT_PORT, - CONF_SSL: DEFAULT_SSL, - CONF_USERNAME: "root", - CONF_PASSWORD: "password", - CONF_NAME: "My Player", - CONF_DEEP_STANDBY: DEFAULT_DEEP_STANDBY, - CONF_SOURCE_BOUQUET: "Favourites", - CONF_MAC_ADDRESS: MAC_ADDRESS, - CONF_USE_CHANNEL_ICON: False, -} - -TEST_IMPORT_REQUIRED = {CONF_HOST: "1.1.1.1"} - EXPECTED_OPTIONS = { CONF_DEEP_STANDBY: DEFAULT_DEEP_STANDBY, CONF_SOURCE_BOUQUET: "Favourites", diff --git a/tests/components/enigma2/test_config_flow.py b/tests/components/enigma2/test_config_flow.py index 74721ce0993..8d32da42baf 100644 --- a/tests/components/enigma2/test_config_flow.py +++ b/tests/components/enigma2/test_config_flow.py @@ -10,18 +10,10 @@ import pytest from homeassistant import config_entries from homeassistant.components.enigma2.const import DOMAIN from homeassistant.const import CONF_HOST -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir -from .conftest import ( - EXPECTED_OPTIONS, - TEST_FULL, - TEST_IMPORT_FULL, - TEST_IMPORT_REQUIRED, - TEST_REQUIRED, - MockDevice, -) +from .conftest import TEST_FULL, TEST_REQUIRED, MockDevice from tests.common import MockConfigEntry @@ -87,87 +79,6 @@ async def test_form_user_errors( assert result["errors"] == {"base": error_type} -@pytest.mark.parametrize( - ("test_config", "expected_data", "expected_options"), - [ - (TEST_IMPORT_FULL, TEST_FULL, EXPECTED_OPTIONS), - (TEST_IMPORT_REQUIRED, TEST_REQUIRED, {}), - ], -) -async def test_form_import( - hass: HomeAssistant, - test_config: dict[str, Any], - expected_data: dict[str, Any], - expected_options: dict[str, Any], - issue_registry: ir.IssueRegistry, -) -> None: - """Test we get the form with import source.""" - with ( - patch( - "homeassistant.components.enigma2.config_flow.OpenWebIfDevice.__new__", - return_value=MockDevice(), - ), - patch( - "homeassistant.components.enigma2.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=test_config, - ) - await hass.async_block_till_done() - - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - assert issue - assert issue.issue_domain == DOMAIN - assert result["type"] == FlowResultType.CREATE_ENTRY - assert result["title"] == test_config[CONF_HOST] - assert result["data"] == expected_data - assert result["options"] == expected_options - - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("exception", "error_type"), - [ - (InvalidAuthError, "invalid_auth"), - (ClientError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_form_import_errors( - hass: HomeAssistant, - exception: Exception, - error_type: str, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we handle errors on import.""" - with patch( - "homeassistant.components.enigma2.config_flow.OpenWebIfDevice.__new__", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=TEST_IMPORT_FULL, - ) - - issue = issue_registry.async_get_issue( - DOMAIN, f"deprecated_yaml_{DOMAIN}_import_issue_{error_type}" - ) - - assert issue - assert issue.issue_domain == DOMAIN - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error_type - - async def test_options_flow(hass: HomeAssistant, user_flow: str) -> None: """Test the form options.""" diff --git a/tests/components/enigma2/test_init.py b/tests/components/enigma2/test_init.py index ab19c2ce51a..d12f96d4b0f 100644 --- a/tests/components/enigma2/test_init.py +++ b/tests/components/enigma2/test_init.py @@ -5,23 +5,37 @@ from unittest.mock import patch from homeassistant.components.enigma2.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from .conftest import TEST_REQUIRED, MockDevice from tests.common import MockConfigEntry +async def test_device_without_mac_address( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test that a device gets successfully registered when the device doesn't report a MAC address.""" + mock_device = MockDevice() + mock_device.mac_address = None + with patch( + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + return_value=mock_device, + ): + entry = MockConfigEntry( + domain=DOMAIN, data=TEST_REQUIRED, title="name", unique_id="123456" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert device_registry.async_get_device({(DOMAIN, entry.unique_id)}) is not None + + async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" - with ( - patch( - "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", - return_value=MockDevice(), - ), - patch( - "homeassistant.components.enigma2.media_player.async_setup_entry", - return_value=True, - ), + with patch( + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + return_value=MockDevice(), ): entry = MockConfigEntry(domain=DOMAIN, data=TEST_REQUIRED, title="name") entry.add_to_hass(hass) diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index ab6e0e4f097..541b6f96e19 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -69,6 +69,11 @@ async def mock_envoy( request: pytest.FixtureRequest, ) -> AsyncGenerator[AsyncMock]: """Define a mocked Envoy fixture.""" + new_token = jwt.encode( + payload={"name": "envoy", "exp": 2007837780}, + key="secret", + algorithm="HS256", + ) with ( patch( "homeassistant.components.enphase_envoy.config_flow.Envoy", @@ -78,6 +83,10 @@ async def mock_envoy( "homeassistant.components.enphase_envoy.Envoy", new=mock_client, ), + patch( + "pyenphase.auth.EnvoyTokenAuth._obtain_token", + return_value=new_token, + ), ): mock_envoy = mock_client.return_value # Add the fixtures specified @@ -141,6 +150,8 @@ def _load_json_2_production_data( """Fill envoy production data from fixture.""" if item := json_fixture["data"].get("system_consumption"): mocked_data.system_consumption = EnvoySystemConsumption(**item) + if item := json_fixture["data"].get("system_net_consumption"): + mocked_data.system_net_consumption = EnvoySystemConsumption(**item) if item := json_fixture["data"].get("system_production"): mocked_data.system_production = EnvoySystemProduction(**item) if item := json_fixture["data"].get("system_consumption_phases"): @@ -149,6 +160,12 @@ def _load_json_2_production_data( mocked_data.system_consumption_phases[sub_item] = EnvoySystemConsumption( **item_data ) + if item := json_fixture["data"].get("system_net_consumption_phases"): + mocked_data.system_net_consumption_phases = {} + for sub_item, item_data in item.items(): + mocked_data.system_net_consumption_phases[sub_item] = ( + EnvoySystemConsumption(**item_data) + ) if item := json_fixture["data"].get("system_production_phases"): mocked_data.system_production_phases = {} for sub_item, item_data in item.items(): diff --git a/tests/components/enphase_envoy/fixtures/envoy.json b/tests/components/enphase_envoy/fixtures/envoy.json index 8c9be429931..3431dba6766 100644 --- a/tests/components/enphase_envoy/fixtures/envoy.json +++ b/tests/components/enphase_envoy/fixtures/envoy.json @@ -17,6 +17,7 @@ "encharge_aggregate": null, "enpower": null, "system_consumption": null, + "system_net_consumption": null, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -24,6 +25,7 @@ "watts_now": 1234 }, "system_consumption_phases": null, + "system_net_consumption_phases": null, "system_production_phases": null, "ctmeter_production": null, "ctmeter_consumption": null, diff --git a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json index e72829280da..05a6f265dfb 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json +++ b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json @@ -22,6 +22,12 @@ "watt_hours_today": 1234, "watts_now": 1234 }, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -29,6 +35,7 @@ "watts_now": 1234 }, "system_consumption_phases": null, + "system_net_consumption_phases": null, "system_production_phases": null, "ctmeter_production": { "eid": "100000010", diff --git a/tests/components/enphase_envoy/fixtures/envoy_eu_batt.json b/tests/components/enphase_envoy/fixtures/envoy_eu_batt.json new file mode 100644 index 00000000000..8118630200f --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_eu_batt.json @@ -0,0 +1,262 @@ +{ + "serial_number": "1234", + "firmware": "7.6.358", + "part_number": "800-00654-r08", + "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", + "supported_features": 1759, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": { + "123456": { + "admin_state": 6, + "admin_state_str": "ENCHG_STATE_READY", + "bmu_firmware_version": "2.1.16", + "comm_level_2_4_ghz": 4, + "comm_level_sub_ghz": 4, + "communicating": true, + "dc_switch_off": false, + "encharge_capacity": 3500, + "encharge_revision": 2, + "firmware_loaded_date": 1714736645, + "firmware_version": "2.6.6618_rel/22.11", + "installed_date": 1714736645, + "last_report_date": 1714804173, + "led_status": 17, + "max_cell_temp": 16, + "operating": true, + "part_number": "830-01760-r46", + "percent_full": 4, + "serial_number": "122327081322", + "temperature": 16, + "temperature_unit": "C", + "zigbee_dongle_fw_version": "100F" + } + }, + "encharge_power": { + "123456": { + "apparent_power_mva": 0, + "real_power_mw": 0, + "soc": 4 + } + }, + "encharge_aggregate": { + "available_energy": 140, + "backup_reserve": 0, + "state_of_charge": 4, + "reserve_state_of_charge": 0, + "configured_reserve_state_of_charge": 0, + "max_available_capacity": 3500 + }, + "enpower": null, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, + "system_net_consumption_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1714749724", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 0.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1714749724" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "all_year_long", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 0, + "must_charge_duration": 0, + "must_charge_mode": "CP", + "enable_discharge_to_grid": false, + "periods": [ + { + "id": "period_1", + "start": 0, + "rate": 0.0 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json index 72b510e2328..7affc1bea0d 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json +++ b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json @@ -79,6 +79,12 @@ "watt_hours_today": 1234, "watts_now": 1234 }, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -105,6 +111,26 @@ "watts_now": 3324 } }, + "system_net_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 12341 + }, + "L2": { + "watt_hours_lifetime": 2321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 22341 + }, + "L3": { + "watt_hours_lifetime": 3321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 32341 + } + }, "system_production_phases": { "L1": { "watt_hours_lifetime": 1232, diff --git a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json index f9b6ae31196..ff975b690ed 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json +++ b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json @@ -22,6 +22,12 @@ "watt_hours_today": 1234, "watts_now": 1234 }, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -48,6 +54,26 @@ "watts_now": 3324 } }, + "system_net_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 12341 + }, + "L2": { + "watt_hours_lifetime": 2321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 22341 + }, + "L3": { + "watt_hours_lifetime": 3321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 32341 + } + }, "system_production_phases": { "L1": { "watt_hours_lifetime": 1232, diff --git a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json index ca2a976b6d1..62df69c6d88 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json +++ b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json @@ -17,6 +17,12 @@ "encharge_aggregate": null, "enpower": null, "system_consumption": null, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -24,6 +30,7 @@ "watts_now": 1234 }, "system_consumption_phases": null, + "system_net_consumption_phases": null, "system_production_phases": null, "ctmeter_production": { "eid": "100000010", diff --git a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr index 84401c7566b..f936a9db76e 100644 --- a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr @@ -1,4 +1,97 @@ # serializer version: 1 +# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_communicating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.encharge_123456_communicating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Communicating', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'communicating', + 'unique_id': '123456_communicating', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_communicating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Encharge 123456 Communicating', + }), + 'context': , + 'entity_id': 'binary_sensor.encharge_123456_communicating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_dc_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.encharge_123456_dc_switch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DC switch', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_switch', + 'unique_id': '123456_dc_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_dc_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Encharge 123456 DC switch', + }), + 'context': , + 'entity_id': 'binary_sensor.encharge_123456_dc_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index e849ab6ee43..3cacd3a8518 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -10,6 +10,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'enphase_envoy', 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', 'minor_version': 1, @@ -18,6 +20,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -441,6 +445,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'enphase_envoy', 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', 'minor_version': 1, @@ -450,6 +456,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -913,6 +921,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'enphase_envoy', 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', 'minor_version': 1, @@ -922,6 +932,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/enphase_envoy/snapshots/test_number.ambr b/tests/components/enphase_envoy/snapshots/test_number.ambr index 6310911c27e..b7e799c9ac8 100644 --- a/tests/components/enphase_envoy/snapshots/test_number.ambr +++ b/tests/components/enphase_envoy/snapshots/test_number.ambr @@ -1,4 +1,61 @@ # serializer version: 1 +# name: test_number[envoy_eu_batt][number.envoy_1234_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.envoy_1234_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '1234_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[envoy_eu_batt][number.envoy_1234_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Reserve battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.envoy_1234_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/snapshots/test_select.ambr b/tests/components/enphase_envoy/snapshots/test_select.ambr index 10f15820ac4..f091879d9fc 100644 --- a/tests/components/enphase_envoy/snapshots/test_select.ambr +++ b/tests/components/enphase_envoy/snapshots/test_select.ambr @@ -1,4 +1,61 @@ # serializer version: 1 +# name: test_select[envoy_eu_batt][select.envoy_1234_storage_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'backup', + 'self_consumption', + 'savings', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.envoy_1234_storage_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storage mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_mode', + 'unique_id': '1234_storage_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_eu_batt][select.envoy_1234_storage_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Storage mode', + 'options': list([ + 'backup', + 'self_consumption', + 'savings', + ]), + }), + 'context': , + 'entity_id': 'select.envoy_1234_storage_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'self_consumption', + }) +# --- # name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index dde6a6add41..c43325a639d 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -328,6 +328,64 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -783,6 +841,119 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1227,6 +1398,230 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1285,6 +1680,64 @@ 'state': '112', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.inverter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1385,6 +1838,4508 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_apparent_power_mva', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Encharge 123456 Apparent power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Encharge 123456 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '123456_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Encharge 123456 Last reported', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-05-04T06:29:33+00:00', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_real_power_mw', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Encharge 123456 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Encharge 123456 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'available_energy', + 'unique_id': '1234_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '140', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Battery', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_capacity', + 'unique_id': '1234_max_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3500', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_energy', + 'unique_id': '1234_reserve_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Reserve battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '1234_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Reserve battery level', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_eu_batt][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1768,6 +6723,238 @@ 'state': '525', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.341', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3918,6 +9105,678 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency', + 'unique_id': '1234_storage_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency_phase', + 'unique_id': '1234_storage_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency_phase', + 'unique_id': '1234_storage_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency_phase', + 'unique_id': '1234_storage_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6582,6 +12441,1118 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor', + 'unique_id': '1234_storage_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor_phase', + 'unique_id': '1234_storage_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.32', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor_phase', + 'unique_id': '1234_storage_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor_phase', + 'unique_id': '1234_storage_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6680,6 +13651,238 @@ 'state': '15', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current', + 'unique_id': '1234_storage_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.4', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current_phase', + 'unique_id': '1234_storage_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.4', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current_phase', + 'unique_id': '1234_storage_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current_phase', + 'unique_id': '1234_storage_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6912,6 +14115,238 @@ 'state': '112', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7244,6 +14679,238 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.341', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -9064,6 +16731,458 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -10840,6 +18959,902 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11072,6 +20087,238 @@ 'state': '112', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11172,6 +20419,64 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11343,6 +20648,119 @@ 'state': '1.234', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11507,6 +20925,176 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/snapshots/test_switch.ambr b/tests/components/enphase_envoy/snapshots/test_switch.ambr index a5dafd735b5..46123c03cec 100644 --- a/tests/components/enphase_envoy/snapshots/test_switch.ambr +++ b/tests/components/enphase_envoy/snapshots/test_switch.ambr @@ -1,4 +1,50 @@ # serializer version: 1 +# name: test_switch[envoy_eu_batt][switch.envoy_1234_charge_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.envoy_1234_charge_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge from grid', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_from_grid', + 'unique_id': '1234_charge_from_grid', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_eu_batt][switch.envoy_1234_charge_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Charge from grid', + }), + 'context': , + 'entity_id': 'switch.envoy_1234_charge_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/test_binary_sensor.py b/tests/components/enphase_envoy/test_binary_sensor.py index 883df4be6fc..bb4a5c5a191 100644 --- a/tests/components/enphase_envoy/test_binary_sensor.py +++ b/tests/components/enphase_envoy/test_binary_sensor.py @@ -16,7 +16,9 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] + ("mock_envoy"), + ["envoy_eu_batt", "envoy_metered_batt_relay"], + indirect=["mock_envoy"], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensor( diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index c2cc02fcc7c..44e2e680d5f 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -12,13 +12,10 @@ from homeassistant.components.enphase_envoy.const import ( DOMAIN, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, + OPTION_DISABLE_KEEP_ALIVE, + OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, ) -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -636,14 +633,7 @@ async def test_reauth( ) -> None: """Test we reauth auth.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -668,14 +658,12 @@ async def test_options_default( assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE - }, + result["flow_id"], user_input={} ) assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options == { - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, + OPTION_DISABLE_KEEP_ALIVE: OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, } @@ -692,10 +680,17 @@ async def test_options_set( assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True} + result["flow_id"], + user_input={ + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True, + OPTION_DISABLE_KEEP_ALIVE: True, + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == {OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True} + assert config_entry.options == { + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True, + OPTION_DISABLE_KEEP_ALIVE: True, + } async def test_reconfigure( @@ -706,13 +701,7 @@ async def test_reconfigure( ) -> None: """Test we can reconfiger the entry.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} @@ -748,13 +737,7 @@ async def test_reconfigure_nochange( ) -> None: """Test we get the reconfigure form and apply nochange.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} @@ -790,13 +773,7 @@ async def test_reconfigure_otherenvoy( ) -> None: """Test entering ip of other envoy and prevent changing it based on serial.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} @@ -813,34 +790,14 @@ async def test_reconfigure_otherenvoy( }, ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unexpected_envoy"} + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" # entry should still be original entry assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.data[CONF_USERNAME] == "test-username" assert config_entry.data[CONF_PASSWORD] == "test-password" - # set serial back to original to finsich flow - mock_envoy.serial_number = "1234" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "new-password", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - - # updated original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "new-password" - @pytest.mark.parametrize( ("exception", "error"), @@ -861,13 +818,7 @@ async def test_reconfigure_auth_failure( """Test changing credentials for existing host with auth failure.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -936,13 +887,7 @@ async def test_reconfigure_change_ip_to_existing( assert other_entry.data[CONF_USERNAME] == "other-username" assert other_entry.data[CONF_PASSWORD] == "other-password" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} diff --git a/tests/components/enphase_envoy/test_init.py b/tests/components/enphase_envoy/test_init.py new file mode 100644 index 00000000000..2b35aaff5e9 --- /dev/null +++ b/tests/components/enphase_envoy/test_init.py @@ -0,0 +1,362 @@ +"""Test Enphase Envoy runtime.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from jwt import encode +from pyenphase import EnvoyAuthenticationError, EnvoyError, EnvoyTokenAuth +from pyenphase.auth import EnvoyLegacyAuth +import pytest +import respx + +from homeassistant.components.enphase_envoy import DOMAIN +from homeassistant.components.enphase_envoy.const import ( + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, + OPTION_DISABLE_KEEP_ALIVE, + Platform, +) +from homeassistant.components.enphase_envoy.coordinator import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import WebSocketGenerator + + +async def test_with_pre_v7_firmware( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy coordinator with pre V7 firmware.""" + mock_envoy.firmware = "5.1.1" + mock_envoy.auth = EnvoyLegacyAuth( + "127.0.0.1", username="test-username", password="test-password" + ) + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") +async def test_token_in_config_file( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test coordinator with token provided from config.""" + token = encode( + payload={"name": "envoy", "exp": 1907837780}, + key="secret", + algorithm="HS256", + ) + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: token, + }, + ) + mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") + await setup_integration(hass, entry) + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +@respx.mock +@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") +async def test_expired_token_in_config( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test coordinator with expired token provided from config.""" + current_token = encode( + # some time in 2021 + payload={"name": "envoy", "exp": 1627314600}, + key="secret", + algorithm="HS256", + ) + + # mock envoy with expired token in config + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: current_token, + }, + ) + # Make sure to mock pyenphase.auth.EnvoyTokenAuth._obtain_token + # when specifying username and password in EnvoyTokenauth + mock_envoy.auth = EnvoyTokenAuth( + "127.0.0.1", + token=current_token, + envoy_serial="1234", + cloud_username="test_username", + cloud_password="test_password", + ) + await setup_integration(hass, entry) + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +async def test_coordinator_update_error( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator update error handling.""" + await setup_integration(hass, config_entry) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + original_state = entity_state + + # force HA to detect changed data by changing raw + mock_envoy.data.raw = {"I": "am changed 1"} + mock_envoy.update.side_effect = EnvoyError + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == STATE_UNAVAILABLE + + mock_envoy.reset_mock(return_value=True, side_effect=True) + + mock_envoy.data.raw = {"I": "am changed 2"} + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == original_state.state + + +async def test_coordinator_update_authentication_error( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test enphase_envoy coordinator update authentication error handling.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + # force HA to detect changed data by changing raw + mock_envoy.data.raw = {"I": "am changed 1"} + mock_envoy.update.side_effect = EnvoyAuthenticationError("This must fail") + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == STATE_UNAVAILABLE + + +@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") +async def test_coordinator_token_refresh_error( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test coordinator with expired token and failure to refresh.""" + token = encode( + # some time in 2021 + payload={"name": "envoy", "exp": 1627314600}, + key="secret", + algorithm="HS256", + ) + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: token, + }, + ) + # override fresh token in conftest mock_envoy.auth + mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") + # force token refresh to fail. + with patch( + "pyenphase.auth.EnvoyTokenAuth._obtain_token", + side_effect=EnvoyError, + ): + await setup_integration(hass, entry) + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +async def test_config_no_unique_id( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy init if config entry has no unique id.""" + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id=None, + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await setup_integration(hass, entry) + assert entry.state is ConfigEntryState.LOADED + assert entry.unique_id == mock_envoy.serial_number + + +async def test_config_different_unique_id( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy init if config entry has different unique id.""" + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id=4321, + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await setup_integration(hass, entry) + assert entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_remove_config_entry_device( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test removing enphase_envoy config entry device.""" + assert await async_setup_component(hass, "config", {}) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.LOADED + + # use client to send remove_device command + hass_client = await hass_ws_client(hass) + + # add device that will pass remove test + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "delete_this_device")}, + ) + response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) + assert response["success"] + + # inverters are not allowed to be removed + entity = entity_registry.entities["sensor.inverter_1"] + device_entry = device_registry.async_get(entity.device_id) + response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + # envoy itself is not allowed to be removed + entity = entity_registry.entities["sensor.envoy_1234_current_power_production"] + device_entry = device_registry.async_get(entity.device_id) + response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + # encharge can not be removed + entity = entity_registry.entities["sensor.encharge_123456_power"] + device_entry = device_registry.async_get(entity.device_id) + response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + # enpower can not be removed + entity = entity_registry.entities["sensor.enpower_654321_temperature"] + device_entry = device_registry.async_get(entity.device_id) + response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + # relays can be removed + entity = entity_registry.entities["switch.nc1_fixture"] + device_entry = device_registry.async_get(entity.device_id) + response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) + assert response["success"] + + +async def test_option_change_reload( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test options change will reload entity.""" + await setup_integration(hass, config_entry) + await hass.async_block_till_done(wait_background_tasks=True) + assert config_entry.state is ConfigEntryState.LOADED + + # option change will take care of COV of init::async_reload_entry + hass.config_entries.async_update_entry( + config_entry, + options={ + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: False, + OPTION_DISABLE_KEEP_ALIVE: True, + }, + ) + await hass.async_block_till_done() + assert config_entry.options == { + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: False, + OPTION_DISABLE_KEEP_ALIVE: True, + } diff --git a/tests/components/enphase_envoy/test_number.py b/tests/components/enphase_envoy/test_number.py index dac51ed5e26..dbf711cacaa 100644 --- a/tests/components/enphase_envoy/test_number.py +++ b/tests/components/enphase_envoy/test_number.py @@ -21,7 +21,9 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] + ("mock_envoy"), + ["envoy_metered_batt_relay", "envoy_eu_batt"], + indirect=["mock_envoy"], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_number( @@ -60,19 +62,24 @@ async def test_no_number( @pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] + ("mock_envoy", "use_serial"), + [ + ("envoy_metered_batt_relay", "enpower_654321"), + ("envoy_eu_batt", "envoy_1234"), + ], + indirect=["mock_envoy"], ) async def test_number_operation_storage( hass: HomeAssistant, mock_envoy: AsyncMock, config_entry: MockConfigEntry, + use_serial: bool, ) -> None: """Test enphase_envoy number storage entities operation.""" with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): await setup_integration(hass, config_entry) - sn = mock_envoy.data.enpower.serial_number - test_entity = f"{Platform.NUMBER}.enpower_{sn}_reserve_battery_level" + test_entity = f"{Platform.NUMBER}.{use_serial}_reserve_battery_level" assert (entity_state := hass.states.get(test_entity)) assert mock_envoy.data.tariff.storage_settings.reserved_soc == float( diff --git a/tests/components/enphase_envoy/test_select.py b/tests/components/enphase_envoy/test_select.py index 38640f53dea..071dbcb2fe2 100644 --- a/tests/components/enphase_envoy/test_select.py +++ b/tests/components/enphase_envoy/test_select.py @@ -28,7 +28,9 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] + ("mock_envoy"), + ["envoy_metered_batt_relay", "envoy_eu_batt"], + indirect=["mock_envoy"], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select( @@ -172,19 +174,24 @@ async def test_select_relay_modes( @pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] + ("mock_envoy", "use_serial"), + [ + ("envoy_metered_batt_relay", "enpower_654321"), + ("envoy_eu_batt", "envoy_1234"), + ], + indirect=["mock_envoy"], ) async def test_select_storage_modes( hass: HomeAssistant, mock_envoy: AsyncMock, config_entry: MockConfigEntry, + use_serial: str, ) -> None: """Test select platform entities storage mode changes.""" with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): await setup_integration(hass, config_entry) - sn = mock_envoy.data.enpower.serial_number - test_entity = f"{Platform.SELECT}.enpower_{sn}_storage_mode" + test_entity = f"{Platform.SELECT}.{use_serial}_storage_mode" assert (entity_state := hass.states.get(test_entity)) assert STORAGE_MODE_MAP[mock_envoy.data.tariff.storage_settings.mode] == ( diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 273f81173ff..784dfe54073 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -1,6 +1,7 @@ """Test Enphase Envoy sensors.""" from itertools import chain +import logging from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory @@ -26,6 +27,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat [ "envoy", "envoy_1p_metered", + "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", @@ -59,6 +61,7 @@ PRODUCTION_NAMES: tuple[str, ...] = ( [ "envoy", "envoy_1p_metered", + "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", @@ -148,6 +151,7 @@ CONSUMPTION_NAMES: tuple[str, ...] = ( ("mock_envoy"), [ "envoy_1p_metered", + "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", ], @@ -179,6 +183,48 @@ async def test_sensor_consumption_data( assert float(entity_state.state) == target +NET_CONSUMPTION_NAMES: tuple[str, ...] = ( + "balanced_net_power_consumption", + "lifetime_balanced_net_energy_consumption", +) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_1p_metered", + "envoy_eu_batt", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_net_consumption_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test net consumption entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.system_net_consumption + NET_CONSUMPTION_TARGETS = ( + data.watts_now / 1000.0, + data.watt_hours_lifetime / 1000.0, + ) + for name, target in list( + zip(NET_CONSUMPTION_NAMES, NET_CONSUMPTION_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CONSUMPTION_PHASE_NAMES: list[str] = [ f"{name}_{phase.lower()}" for phase in PHASENAMES for name in CONSUMPTION_NAMES ] @@ -224,6 +270,48 @@ async def test_sensor_consumption_phase_data( assert float(entity_state.state) == target +NET_CONSUMPTION_PHASE_NAMES: list[str] = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in NET_CONSUMPTION_NAMES +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_net_consumption_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + NET_CONSUMPTION_PHASE_TARGET = chain( + *[ + ( + phase_data.watts_now / 1000.0, + phase_data.watt_hours_lifetime / 1000.0, + ) + for phase_data in mock_envoy.data.system_net_consumption_phases.values() + ] + ) + for name, target in list( + zip(NET_CONSUMPTION_PHASE_NAMES, NET_CONSUMPTION_PHASE_TARGET, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_PRODUCTION_NAMES_INT = ("meter_status_flags_active_production_ct",) CT_PRODUCTION_NAMES_STR = ("metering_status_production_ct",) @@ -652,6 +740,7 @@ async def test_sensor_storage_phase_disabled_by_integration( [ "envoy", "envoy_1p_metered", + "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", @@ -684,6 +773,7 @@ async def test_sensor_inverter_data( [ "envoy", "envoy_1p_metered", + "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", @@ -877,6 +967,7 @@ async def test_sensor_missing_data( # force missing data to test 'if == none' code sections mock_envoy.data.system_production_phases["L2"] = None mock_envoy.data.system_consumption_phases["L2"] = None + mock_envoy.data.system_net_consumption_phases["L2"] = None mock_envoy.data.ctmeter_production = None mock_envoy.data.ctmeter_consumption = None mock_envoy.data.ctmeter_storage = None @@ -912,3 +1003,36 @@ async def test_sensor_missing_data( # test the original inverter is now unknown assert (entity_state := hass.states.get("sensor.inverter_1")) assert entity_state.state == STATE_UNKNOWN + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_fw_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test enphase_envoy sensor update over fw update.""" + logging.getLogger("homeassistant.components.enphase_envoy").setLevel(logging.DEBUG) + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + # force HA to detect changed data by changing raw + mock_envoy.firmware = "0.0.0" + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "firmware changed from: " in caplog.text + assert "to: 0.0.0, reloading enphase envoy integration" in caplog.text diff --git a/tests/components/enphase_envoy/test_switch.py b/tests/components/enphase_envoy/test_switch.py index 15f59cc3ea6..f30cba4d201 100644 --- a/tests/components/enphase_envoy/test_switch.py +++ b/tests/components/enphase_envoy/test_switch.py @@ -24,7 +24,9 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] + ("mock_envoy"), + ["envoy_metered_batt_relay", "envoy_eu_batt"], + indirect=["mock_envoy"], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_switch( @@ -109,7 +111,26 @@ async def test_switch_grid_operation( mock_envoy.go_off_grid.assert_awaited_once_with() mock_envoy.go_off_grid.reset_mock() - test_entity = f"{Platform.SWITCH}.enpower_{sn}_charge_from_grid" + +@pytest.mark.parametrize( + ("mock_envoy", "use_serial"), + [ + ("envoy_metered_batt_relay", "enpower_654321"), + ("envoy_eu_batt", "envoy_1234"), + ], + indirect=["mock_envoy"], +) +async def test_switch_charge_from_grid_operation( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + use_serial: str, +) -> None: + """Test switch platform operation for charge from grid switches.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + + test_entity = f"{Platform.SWITCH}.{use_serial}_charge_from_grid" # validate envoy value is reflected in entity assert (entity_state := hass.states.get(test_entity)) diff --git a/tests/components/epson/test_config_flow.py b/tests/components/epson/test_config_flow.py index d485a4bfdef..f727185362c 100644 --- a/tests/components/epson/test_config_flow.py +++ b/tests/components/epson/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from epson_projector.const import PWR_OFF_STATE from homeassistant import config_entries -from homeassistant.components.epson.const import DOMAIN +from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN, HTTP from homeassistant.const import CONF_HOST, CONF_NAME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,6 +33,10 @@ async def test_form(hass: HomeAssistant) -> None: patch( "homeassistant.components.epson.async_setup_entry", return_value=True, + ), + patch( + "homeassistant.components.epson.Projector.close", + return_value=True, ) as mock_setup_entry, ): result2 = await hass.config_entries.flow.async_configure( @@ -43,7 +47,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "test-epson" - assert result2["data"] == {CONF_HOST: "1.1.1.1"} + assert result2["data"] == {CONF_CONNECTION_TYPE: HTTP, CONF_HOST: "1.1.1.1"} assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/epson/test_init.py b/tests/components/epson/test_init.py new file mode 100644 index 00000000000..964f9e915ab --- /dev/null +++ b/tests/components/epson/test_init.py @@ -0,0 +1,37 @@ +"""Test the epson init.""" + +from unittest.mock import patch + +from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_migrate_entry(hass: HomeAssistant) -> None: + """Test successful migration of entry data from version 1 to 1.2.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + title="Epson", + version=1, + minor_version=1, + data={CONF_HOST: "1.1.1.1"}, + entry_id="1cb78c095906279574a0442a1f0003ef", + ) + assert mock_entry.version == 1 + + mock_entry.add_to_hass(hass) + + # Create entity entry to migrate to new unique ID + with patch("homeassistant.components.epson.Projector.get_power"): + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + # Check that is now has connection_type + assert mock_entry + assert mock_entry.version == 1 + assert mock_entry.minor_version == 2 + assert mock_entry.data.get(CONF_CONNECTION_TYPE) == "http" + assert mock_entry.data.get(CONF_HOST) == "1.1.1.1" diff --git a/tests/components/epson/test_media_player.py b/tests/components/epson/test_media_player.py index e529746dcd0..188fdd5b700 100644 --- a/tests/components/epson/test_media_player.py +++ b/tests/components/epson/test_media_player.py @@ -5,7 +5,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.epson.const import DOMAIN +from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN, HTTP from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,7 +22,7 @@ async def test_set_unique_id( entry = MockConfigEntry( domain=DOMAIN, title="Epson", - data={CONF_HOST: "1.1.1.1"}, + data={CONF_CONNECTION_TYPE: HTTP, CONF_HOST: "1.1.1.1"}, entry_id="1cb78c095906279574a0442a1f0003ef", ) entry.add_to_hass(hass) diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index ea4099560cd..2b7c127efd3 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -19,8 +19,8 @@ from aioesphomeapi import ( HomeassistantServiceCall, ReconnectLogic, UserService, + VoiceAssistantAnnounceFinished, VoiceAssistantAudioSettings, - VoiceAssistantEventType, VoiceAssistantFeature, ) import pytest @@ -34,11 +34,6 @@ from homeassistant.components.esphome.const import ( DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) -from homeassistant.components.esphome.entry_data import RuntimeEntryData -from homeassistant.components.esphome.voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantUDPPipeline, -) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -205,12 +200,13 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] + self.home_assistant_state_request_callback: Callable[[str, str | None], None] self.voice_assistant_handle_start_callback: Callable[ [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], ] self.voice_assistant_handle_stop_callback: Callable[ - [], Coroutine[Any, Any, None] + [bool], Coroutine[Any, Any, None] ] self.voice_assistant_handle_audio_callback: ( Callable[ @@ -219,6 +215,13 @@ class MockESPHomeDevice: ] | None ) + self.voice_assistant_handle_announcement_finished_callback: ( + Callable[ + [VoiceAssistantAnnounceFinished], + Coroutine[Any, Any, None], + ] + | None + ) self.device_info = device_info def set_state_callback(self, state_callback: Callable[[EntityState], None]) -> None: @@ -268,9 +271,11 @@ class MockESPHomeDevice: def set_home_assistant_state_subscription_callback( self, on_state_sub: Callable[[str, str | None], None], + on_state_request: Callable[[str, str | None], None], ) -> None: """Set the state call callback.""" self.home_assistant_state_subscription_callback = on_state_sub + self.home_assistant_state_request_callback = on_state_request def mock_home_assistant_state_subscription( self, entity_id: str, attribute: str | None @@ -278,13 +283,19 @@ class MockESPHomeDevice: """Mock a state subscription.""" self.home_assistant_state_subscription_callback(entity_id, attribute) + def mock_home_assistant_state_request( + self, entity_id: str, attribute: str | None + ) -> None: + """Mock a state request.""" + self.home_assistant_state_request_callback(entity_id, attribute) + def set_subscribe_voice_assistant_callbacks( self, handle_start: Callable[ [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], ], - handle_stop: Callable[[], Coroutine[Any, Any, None]], + handle_stop: Callable[[bool], Coroutine[Any, Any, None]], handle_audio: ( Callable[ [bytes], @@ -292,11 +303,21 @@ class MockESPHomeDevice: ] | None ) = None, + handle_announcement_finished: ( + Callable[ + [VoiceAssistantAnnounceFinished], + Coroutine[Any, Any, None], + ] + | None + ) = None, ) -> None: """Set the voice assistant subscription callbacks.""" self.voice_assistant_handle_start_callback = handle_start self.voice_assistant_handle_stop_callback = handle_stop self.voice_assistant_handle_audio_callback = handle_audio + self.voice_assistant_handle_announcement_finished_callback = ( + handle_announcement_finished + ) async def mock_voice_assistant_handle_start( self, @@ -310,15 +331,22 @@ class MockESPHomeDevice: conversation_id, flags, settings, wake_word_phrase ) - async def mock_voice_assistant_handle_stop(self) -> None: + async def mock_voice_assistant_handle_stop(self, abort: bool) -> None: """Mock voice assistant handle stop.""" - await self.voice_assistant_handle_stop_callback() + await self.voice_assistant_handle_stop_callback(abort) async def mock_voice_assistant_handle_audio(self, audio: bytes) -> None: """Mock voice assistant handle audio.""" assert self.voice_assistant_handle_audio_callback is not None await self.voice_assistant_handle_audio_callback(audio) + async def mock_voice_assistant_handle_announcement_finished( + self, finished: VoiceAssistantAnnounceFinished + ) -> None: + """Mock voice assistant handle announcement finished.""" + assert self.voice_assistant_handle_announcement_finished_callback is not None + await self.voice_assistant_handle_announcement_finished_callback(finished) + async def _mock_generic_device_entry( hass: HomeAssistant, @@ -378,9 +406,12 @@ async def _mock_generic_device_entry( def _subscribe_home_assistant_states( on_state_sub: Callable[[str, str | None], None], + on_state_request: Callable[[str, str | None], None], ) -> None: """Subscribe to home assistant states.""" - mock_device.set_home_assistant_state_subscription_callback(on_state_sub) + mock_device.set_home_assistant_state_subscription_callback( + on_state_sub, on_state_request + ) def _subscribe_voice_assistant( *, @@ -388,7 +419,7 @@ async def _mock_generic_device_entry( [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], ], - handle_stop: Callable[[], Coroutine[Any, Any, None]], + handle_stop: Callable[[bool], Coroutine[Any, Any, None]], handle_audio: ( Callable[ [bytes], @@ -396,10 +427,17 @@ async def _mock_generic_device_entry( ] | None ) = None, + handle_announcement_finished: ( + Callable[ + [VoiceAssistantAnnounceFinished], + Coroutine[Any, Any, None], + ] + | None + ) = None, ) -> Callable[[], None]: """Subscribe to voice assistant.""" mock_device.set_subscribe_voice_assistant_callbacks( - handle_start, handle_stop, handle_audio + handle_start, handle_stop, handle_audio, handle_announcement_finished ) def unsub(): @@ -613,57 +651,3 @@ async def mock_esphome_device( ) return _mock_device - - -@pytest.fixture -def mock_voice_assistant_api_pipeline() -> VoiceAssistantAPIPipeline: - """Return the API Pipeline factory.""" - mock_pipeline = Mock(spec=VoiceAssistantAPIPipeline) - - def mock_constructor( - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - api_client: APIClient, - ): - """Fake the constructor.""" - mock_pipeline.hass = hass - mock_pipeline.entry_data = entry_data - mock_pipeline.handle_event = handle_event - mock_pipeline.handle_finished = handle_finished - mock_pipeline.api_client = api_client - return mock_pipeline - - mock_pipeline.side_effect = mock_constructor - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantAPIPipeline", - new=mock_pipeline, - ): - yield mock_pipeline - - -@pytest.fixture -def mock_voice_assistant_udp_pipeline() -> VoiceAssistantUDPPipeline: - """Return the API Pipeline factory.""" - mock_pipeline = Mock(spec=VoiceAssistantUDPPipeline) - - def mock_constructor( - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - ): - """Fake the constructor.""" - mock_pipeline.hass = hass - mock_pipeline.entry_data = entry_data - mock_pipeline.handle_event = handle_event - mock_pipeline.handle_finished = handle_finished - return mock_pipeline - - mock_pipeline.side_effect = mock_constructor - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantUDPPipeline", - new=mock_pipeline, - ): - yield mock_pipeline diff --git a/tests/components/esphome/snapshots/test_diagnostics.ambr b/tests/components/esphome/snapshots/test_diagnostics.ambr index 0d2f0e60b82..8f1711e829e 100644 --- a/tests/components/esphome/snapshots/test_diagnostics.ambr +++ b/tests/components/esphome/snapshots/test_diagnostics.ambr @@ -10,6 +10,8 @@ 'port': 6053, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'esphome', 'entry_id': '08d821dc059cf4f645cb024d32c8e708', 'minor_version': 1, @@ -18,6 +20,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'ESPHome Device', 'unique_id': '11:22:33:44:55:aa', 'version': 1, diff --git a/tests/components/esphome/test_alarm_control_panel.py b/tests/components/esphome/test_alarm_control_panel.py index af717ac1b49..a3bfc72f3e2 100644 --- a/tests/components/esphome/test_alarm_control_panel.py +++ b/tests/components/esphome/test_alarm_control_panel.py @@ -4,9 +4,9 @@ from unittest.mock import call from aioesphomeapi import ( AlarmControlPanelCommand, - AlarmControlPanelEntityState, + AlarmControlPanelEntityState as ESPHomeAlarmEntityState, AlarmControlPanelInfo, - AlarmControlPanelState, + AlarmControlPanelState as ESPHomeAlarmState, APIClient, ) @@ -20,9 +20,10 @@ from homeassistant.components.alarm_control_panel import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, + AlarmControlPanelState, ) from homeassistant.components.esphome.alarm_control_panel import EspHomeACPFeatures -from homeassistant.const import ATTR_ENTITY_ID, STATE_ALARM_ARMED_AWAY, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -48,9 +49,7 @@ async def test_generic_alarm_control_panel_requires_code( requires_code_to_arm=True, ) ] - states = [ - AlarmControlPanelEntityState(key=1, state=AlarmControlPanelState.ARMED_AWAY) - ] + states = [ESPHomeAlarmEntityState(key=1, state=ESPHomeAlarmState.ARMED_AWAY)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -60,7 +59,7 @@ async def test_generic_alarm_control_panel_requires_code( ) state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, @@ -183,9 +182,7 @@ async def test_generic_alarm_control_panel_no_code( requires_code_to_arm=False, ) ] - states = [ - AlarmControlPanelEntityState(key=1, state=AlarmControlPanelState.ARMED_AWAY) - ] + states = [ESPHomeAlarmEntityState(key=1, state=ESPHomeAlarmState.ARMED_AWAY)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -195,7 +192,7 @@ async def test_generic_alarm_control_panel_no_code( ) state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py new file mode 100644 index 00000000000..5ca333df1e2 --- /dev/null +++ b/tests/components/esphome/test_assist_satellite.py @@ -0,0 +1,1670 @@ +"""Test ESPHome voice assistant server.""" + +import asyncio +from collections.abc import Awaitable, Callable +from dataclasses import replace +import io +import socket +from unittest.mock import ANY, AsyncMock, Mock, patch +import wave + +from aioesphomeapi import ( + APIClient, + EntityInfo, + EntityState, + MediaPlayerFormatPurpose, + MediaPlayerInfo, + MediaPlayerSupportedFormat, + UserService, + VoiceAssistantAnnounceFinished, + VoiceAssistantAudioSettings, + VoiceAssistantCommandFlag, + VoiceAssistantEventType, + VoiceAssistantFeature, + VoiceAssistantTimerEventType, +) +import pytest + +from homeassistant.components import assist_satellite, tts +from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType +from homeassistant.components.assist_satellite import ( + AssistSatelliteConfiguration, + AssistSatelliteEntity, + AssistSatelliteEntityFeature, + AssistSatelliteWakeWord, +) + +# pylint: disable-next=hass-component-root-import +from homeassistant.components.assist_satellite.entity import AssistSatelliteState +from homeassistant.components.esphome import DOMAIN +from homeassistant.components.esphome.assist_satellite import ( + EsphomeAssistSatellite, + VoiceAssistantUDPServer, +) +from homeassistant.components.media_source import PlayMedia +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, intent as intent_helper +import homeassistant.helpers.device_registry as dr +from homeassistant.helpers.entity_component import EntityComponent + +from .conftest import MockESPHomeDevice + + +def get_satellite_entity( + hass: HomeAssistant, mac_address: str +) -> EsphomeAssistSatellite | None: + """Get the satellite entity for a device.""" + ent_reg = er.async_get(hass) + satellite_entity_id = ent_reg.async_get_entity_id( + Platform.ASSIST_SATELLITE, DOMAIN, f"{mac_address}-assist_satellite" + ) + if satellite_entity_id is None: + return None + assert satellite_entity_id.endswith("_assist_satellite") + + component: EntityComponent[AssistSatelliteEntity] = hass.data[ + assist_satellite.DOMAIN + ] + if (entity := component.get_entity(satellite_entity_id)) is not None: + assert isinstance(entity, EsphomeAssistSatellite) + return entity + + return None + + +@pytest.fixture +def mock_wav() -> bytes: + """Return test WAV audio.""" + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(b"test-wav") + + return wav_io.getvalue() + + +async def test_no_satellite_without_voice_assistant( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that an assist satellite entity is not created if a voice assistant is not present.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={}, + ) + await hass.async_block_till_done() + + # No satellite entity should be created + assert get_satellite_entity(hass, mock_device.device_info.mac_address) is None + + +async def test_pipeline_api_audio( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test a complete pipeline run with API audio (over the TCP connection).""" + conversation_id = "test-conversation-id" + media_url = "http://test.url" + media_id = "test-media-id" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + }, + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + # Block TTS streaming until we're ready. + # This makes it easier to verify the order of pipeline events. + stream_tts_audio_ready = asyncio.Event() + original_stream_tts_audio = satellite._stream_tts_audio + + async def _stream_tts_audio(*args, **kwargs): + await stream_tts_audio_ready.wait() + await original_stream_tts_audio(*args, **kwargs) + + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + assert device_id == dev.id + + stt_stream = kwargs["stt_stream"] + + chunks = [chunk async for chunk in stt_stream] + + # Verify test API audio + assert chunks == [b"test-mic"] + + event_callback = kwargs["event_callback"] + + # Test unknown event type + event_callback( + PipelineEvent( + type="unknown-event", + data={}, + ) + ) + + mock_client.send_voice_assistant_event.assert_not_called() + + # Test error event + event_callback( + PipelineEvent( + type=PipelineEventType.ERROR, + data={"code": "test-error-code", "message": "test-error-message"}, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, + {"code": "test-error-code", "message": "test-error-message"}, + ) + + # Wake word + assert satellite.state == AssistSatelliteState.IDLE + + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_START, + data={ + "entity_id": "test-wake-word-entity-id", + "metadata": {}, + "timeout": 0, + }, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START, + {}, + ) + + # Test no wake word detected + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_END, data={"wake_word_output": {}} + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, + {"code": "no_wake_word", "message": "No wake word detected"}, + ) + + # Correct wake word detection + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_END, + data={"wake_word_output": {"wake_word_phrase": "test-wake-word"}}, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END, + {}, + ) + + # STT + event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={"engine": "test-stt-engine", "metadata": {}}, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_STT_START, + {}, + ) + assert satellite.state == AssistSatelliteState.LISTENING + + event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": "test-stt-text"}}, + ) + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_STT_END, + {"text": "test-stt-text"}, + ) + + # Intent + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_START, + data={ + "engine": "test-intent-engine", + "language": hass.config.language, + "intent_input": "test-intent-text", + "conversation_id": conversation_id, + "device_id": device_id, + }, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START, + {}, + ) + assert satellite.state == AssistSatelliteState.PROCESSING + + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_END, + data={"intent_output": {"conversation_id": conversation_id}}, + ) + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, + {"conversation_id": conversation_id}, + ) + + # TTS + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={ + "engine": "test-stt-engine", + "language": hass.config.language, + "voice": "test-voice", + "tts_input": "test-tts-text", + }, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START, + {"text": "test-tts-text"}, + ) + assert satellite.state == AssistSatelliteState.RESPONDING + + # Should return mock_wav audio + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": media_url, "media_id": media_id}}, + ) + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, + {"url": media_url}, + ) + + event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END, + {}, + ) + + # Allow TTS streaming to proceed + stream_tts_audio_ready.set() + + pipeline_finished = asyncio.Event() + original_handle_pipeline_finished = satellite.handle_pipeline_finished + + def handle_pipeline_finished(): + original_handle_pipeline_finished() + pipeline_finished.set() + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("wav", mock_wav) + + tts_finished = asyncio.Event() + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + tts_finished.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ), + patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), + patch.object(satellite, "_stream_tts_audio", _stream_tts_audio), + patch.object(satellite, "tts_response_finished", tts_response_finished), + ): + # Should be cleared at pipeline start + satellite._audio_queue.put_nowait(b"leftover-data") + + # Should be cancelled at pipeline start + mock_tts_streaming_task = Mock() + satellite._tts_streaming_task = mock_tts_streaming_task + + async with asyncio.timeout(1): + await satellite.handle_pipeline_start( + conversation_id=conversation_id, + flags=VoiceAssistantCommandFlag.USE_WAKE_WORD, + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase="", + ) + mock_tts_streaming_task.cancel.assert_called_once() + await satellite.handle_audio(b"test-mic") + await satellite.handle_pipeline_stop(abort=False) + await pipeline_finished.wait() + + await tts_finished.wait() + + # Verify TTS streaming events. + # These are definitely the last two events because we blocked TTS streaming + # until after RUN_END above. + assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, + {}, + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, + {}, + ) + + # Verify TTS WAV audio chunk came through + mock_client.send_voice_assistant_audio.assert_called_once_with(b"test-wav") + + +@pytest.mark.usefixtures("socket_enabled") +async def test_pipeline_udp_audio( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test a complete pipeline run with legacy UDP audio. + + This test is not as comprehensive as test_pipeline_api_audio since we're + mainly focused on the UDP server. + """ + conversation_id = "test-conversation-id" + media_url = "http://test.url" + media_id = "test-media-id" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + mic_audio_event = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + stt_stream = kwargs["stt_stream"] + + chunks = [] + async for chunk in stt_stream: + chunks.append(chunk) + mic_audio_event.set() + + # Verify test UDP audio + assert chunks == [b"test-mic"] + + event_callback = kwargs["event_callback"] + + # STT + event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={"engine": "test-stt-engine", "metadata": {}}, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": "test-stt-text"}}, + ) + ) + + # Intent + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_START, + data={ + "engine": "test-intent-engine", + "language": hass.config.language, + "intent_input": "test-intent-text", + "conversation_id": conversation_id, + "device_id": device_id, + }, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_END, + data={"intent_output": {"conversation_id": conversation_id}}, + ) + ) + + # TTS + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={ + "engine": "test-stt-engine", + "language": hass.config.language, + "voice": "test-voice", + "tts_input": "test-tts-text", + }, + ) + ) + + # Should return mock_wav audio + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": media_url, "media_id": media_id}}, + ) + ) + + event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) + + pipeline_finished = asyncio.Event() + original_handle_pipeline_finished = satellite.handle_pipeline_finished + + def handle_pipeline_finished(): + original_handle_pipeline_finished() + pipeline_finished.set() + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("wav", mock_wav) + + tts_finished = asyncio.Event() + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + tts_finished.set() + + class TestProtocol(asyncio.DatagramProtocol): + def __init__(self) -> None: + self.transport = None + self.data_received: list[bytes] = [] + + def connection_made(self, transport): + self.transport = transport + + def datagram_received(self, data: bytes, addr): + self.data_received.append(data) + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ), + patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), + patch.object(satellite, "tts_response_finished", tts_response_finished), + ): + async with asyncio.timeout(1): + port = await satellite.handle_pipeline_start( + conversation_id=conversation_id, + flags=VoiceAssistantCommandFlag(0), # stt + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase="", + ) + assert (port is not None) and (port > 0) + + ( + transport, + protocol, + ) = await asyncio.get_running_loop().create_datagram_endpoint( + TestProtocol, remote_addr=("127.0.0.1", port) + ) + assert isinstance(protocol, TestProtocol) + + # Send audio over UDP + transport.sendto(b"test-mic") + + # Wait for audio chunk to be delivered + await mic_audio_event.wait() + + await satellite.handle_pipeline_stop(abort=False) + await pipeline_finished.wait() + + await tts_finished.wait() + + # Verify TTS audio (from UDP) + assert protocol.data_received == [b"test-wav"] + + # Check that UDP server was stopped + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(("", port)) # will fail if UDP server is still running + sock.close() + + +async def test_udp_errors() -> None: + """Test UDP protocol error conditions.""" + audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + protocol = VoiceAssistantUDPServer(audio_queue) + + protocol.datagram_received(b"test", ("", 0)) + assert audio_queue.qsize() == 1 + assert (await audio_queue.get()) == b"test" + + # None will stop the pipeline + protocol.error_received(RuntimeError()) + assert audio_queue.qsize() == 1 + assert (await audio_queue.get()) is None + + # No transport + assert protocol.transport is None + protocol.send_audio_bytes(b"test") + + # No remote address + protocol.transport = Mock() + protocol.remote_addr = None + protocol.send_audio_bytes(b"test") + protocol.transport.sendto.assert_not_called() + + +async def test_pipeline_media_player( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test a complete pipeline run with the TTS response sent to a media player instead of a speaker. + + This test is not as comprehensive as test_pipeline_api_audio since we're + mainly focused on tts_response_finished getting automatically called. + """ + conversation_id = "test-conversation-id" + media_url = "http://test.url" + media_id = "test-media-id" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.API_AUDIO + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + stt_stream = kwargs["stt_stream"] + + async for _chunk in stt_stream: + break + + event_callback = kwargs["event_callback"] + + # STT + event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={"engine": "test-stt-engine", "metadata": {}}, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": "test-stt-text"}}, + ) + ) + + # Intent + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_START, + data={ + "engine": "test-intent-engine", + "language": hass.config.language, + "intent_input": "test-intent-text", + "conversation_id": conversation_id, + "device_id": device_id, + }, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_END, + data={"intent_output": {"conversation_id": conversation_id}}, + ) + ) + + # TTS + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={ + "engine": "test-stt-engine", + "language": hass.config.language, + "voice": "test-voice", + "tts_input": "test-tts-text", + }, + ) + ) + + # Should return mock_wav audio + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": media_url, "media_id": media_id}}, + ) + ) + + event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) + + pipeline_finished = asyncio.Event() + original_handle_pipeline_finished = satellite.handle_pipeline_finished + + def handle_pipeline_finished(): + original_handle_pipeline_finished() + pipeline_finished.set() + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("wav", mock_wav) + + tts_finished = asyncio.Event() + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + tts_finished.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ), + patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), + patch.object(satellite, "tts_response_finished", tts_response_finished), + ): + async with asyncio.timeout(1): + await satellite.handle_pipeline_start( + conversation_id=conversation_id, + flags=VoiceAssistantCommandFlag(0), # stt + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase="", + ) + + await satellite.handle_pipeline_stop(abort=False) + await pipeline_finished.wait() + + assert satellite.state == AssistSatelliteState.RESPONDING + + # Will trigger tts_response_finished + await mock_device.mock_voice_assistant_handle_announcement_finished( + VoiceAssistantAnnounceFinished(success=True) + ) + await tts_finished.wait() + + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_timer_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that injecting timer events results in the correct api client calls.""" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.TIMERS + }, + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + total_seconds = (1 * 60 * 60) + (2 * 60) + 3 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "name": {"value": "test timer"}, + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, + ANY, + "test timer", + total_seconds, + total_seconds, + True, + ) + + # Increase timer beyond original time and check total_seconds has increased + mock_client.send_voice_assistant_timer_event.reset_mock() + + total_seconds += 5 * 60 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_INCREASE_TIMER, + { + "name": {"value": "test timer"}, + "minutes": {"value": 5}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, + ANY, + "test timer", + total_seconds, + ANY, + True, + ) + + +async def test_unknown_timer_event( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that unknown (new) timer event types do not result in api calls.""" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.TIMERS + }, + ) + await hass.async_block_till_done() + assert mock_device.entry.unique_id is not None + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + assert dev is not None + + with patch( + "homeassistant.components.esphome.assist_satellite._TIMER_EVENT_TYPES.from_hass", + side_effect=KeyError, + ): + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "name": {"value": "test timer"}, + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_not_called() + + +async def test_streaming_tts_errors( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test error conditions for _stream_tts_audio function.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + # Should not stream if not running + satellite._is_running = False + await satellite._stream_tts_audio("test-media-id") + mock_client.send_voice_assistant_audio.assert_not_called() + satellite._is_running = True + + # Should only stream WAV + async def get_mp3( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("mp3", b"") + + with patch( + "homeassistant.components.tts.async_get_media_source_audio", new=get_mp3 + ): + await satellite._stream_tts_audio("test-media-id") + mock_client.send_voice_assistant_audio.assert_not_called() + + # Needs to be the correct sample rate, etc. + async def get_bad_wav( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(48000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(b"test-wav") + + return ("wav", wav_io.getvalue()) + + with patch( + "homeassistant.components.tts.async_get_media_source_audio", new=get_bad_wav + ): + await satellite._stream_tts_audio("test-media-id") + mock_client.send_voice_assistant_audio.assert_not_called() + + # Check that TTS_STREAM_* events still get sent after cancel + media_fetched = asyncio.Event() + + async def get_slow_wav( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + media_fetched.set() + await asyncio.sleep(1) + return ("wav", mock_wav) + + mock_client.send_voice_assistant_event.reset_mock() + with patch( + "homeassistant.components.tts.async_get_media_source_audio", new=get_slow_wav + ): + task = asyncio.create_task(satellite._stream_tts_audio("test-media-id")) + async with asyncio.timeout(1): + # Wait for media to be fetched + await media_fetched.wait() + + # Cancel task + task.cancel() + await task + + # No audio should have gone out + mock_client.send_voice_assistant_audio.assert_not_called() + assert len(mock_client.send_voice_assistant_event.call_args_list) == 2 + + # The TTS_STREAM_* events should have gone out + assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, + {}, + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, + {}, + ) + + +async def test_tts_format_from_media_player( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the text-to-speech format is pulled from the first media player.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.DEFAULT, + sample_bytes=2, + ), + # This is the format that should be used for tts + MediaPlayerSupportedFormat( + format="mp3", + sample_rate=22050, + num_channels=1, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=2, + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + ) as mock_pipeline_from_audio_stream: + await satellite.handle_pipeline_start( + conversation_id="", + flags=0, + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase=None, + ) + + mock_pipeline_from_audio_stream.assert_called_once() + kwargs = mock_pipeline_from_audio_stream.call_args_list[0].kwargs + + # Should be ANNOUNCEMENT format from media player + assert kwargs.get("tts_audio_output") == { + tts.ATTR_PREFERRED_FORMAT: "mp3", + tts.ATTR_PREFERRED_SAMPLE_RATE: 22050, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + + +async def test_tts_minimal_format_from_media_player( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test text-to-speech format when media player only specifies the codec.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.DEFAULT, + sample_bytes=2, + ), + # This is the format that should be used for tts + MediaPlayerSupportedFormat( + format="mp3", + sample_rate=0, # source rate + num_channels=0, # source channels + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=0, # source width + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + ) as mock_pipeline_from_audio_stream: + await satellite.handle_pipeline_start( + conversation_id="", + flags=0, + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase=None, + ) + + mock_pipeline_from_audio_stream.assert_called_once() + kwargs = mock_pipeline_from_audio_stream.call_args_list[0].kwargs + + # Should be ANNOUNCEMENT format from media player + assert kwargs.get("tts_audio_output") == { + tts.ATTR_PREFERRED_FORMAT: "mp3", + } + + +async def test_announce_supported_features( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the announce supported feature is set by flags.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + assert not (satellite.supported_features & AssistSatelliteEntityFeature.ANNOUNCE) + + +async def test_announce_message( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test announcement with message.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, timeout: float, text: str + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "https://www.home-assistant.io/resolved.mp3" + assert text == "test-text" + + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.media_source.async_resolve_media", + return_value=PlayMedia( + url="https://www.home-assistant.io/resolved.mp3", + mime_type="audio/mp3", + ), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + {"entity_id": satellite.entity_id, "message": "test-text"}, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_announce_media_id( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + device_registry: dr.DeviceRegistry, +) -> None: + """Test announcement with media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=2, + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, timeout: float, text: str + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "https://www.home-assistant.io/proxied.flac" + + done.set() + + with ( + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.esphome.assist_satellite.async_create_proxy_url", + return_value="https://www.home-assistant.io/proxied.flac", + ) as mock_async_create_proxy_url, + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + { + "entity_id": satellite.entity_id, + "media_id": "https://www.home-assistant.io/resolved.mp3", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + mock_async_create_proxy_url.assert_called_once_with( + hass, + dev.id, + "https://www.home-assistant.io/resolved.mp3", + media_format="flac", + rate=48000, + channels=2, + width=2, + ) + + +async def test_satellite_unloaded_on_disconnect( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the assist satellite platform is unloaded on disconnect.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + state = hass.states.get(satellite.entity_id) + assert state is not None + assert state.state != STATE_UNAVAILABLE + + # Device will be unavailable after disconnect + await mock_device.mock_disconnect(True) + + state = hass.states.get(satellite.entity_id) + assert state is not None + assert state.state == STATE_UNAVAILABLE + + +async def test_pipeline_abort( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test aborting a pipeline (no further processing).""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.API_AUDIO + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + chunks = [] + chunk_received = asyncio.Event() + pipeline_aborted = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, **kwargs): + stt_stream = kwargs["stt_stream"] + + try: + async for chunk in stt_stream: + chunks.append(chunk) + chunk_received.set() + except asyncio.CancelledError: + # Aborting cancels the pipeline task + pipeline_aborted.set() + raise + + pipeline_finished = asyncio.Event() + original_handle_pipeline_finished = satellite.handle_pipeline_finished + + def handle_pipeline_finished(): + original_handle_pipeline_finished() + pipeline_finished.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), + ): + async with asyncio.timeout(1): + await satellite.handle_pipeline_start( + conversation_id="", + flags=VoiceAssistantCommandFlag(0), # stt + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase="", + ) + + await satellite.handle_audio(b"before-abort") + await chunk_received.wait() + + # Abort the pipeline, no further processing + await satellite.handle_pipeline_stop(abort=True) + await pipeline_aborted.wait() + + # This chunk should not make it into the STT stream + await satellite.handle_audio(b"after-abort") + await pipeline_finished.wait() + + # Only first chunk + assert chunks == [b"before-abort"] + + +async def test_get_set_configuration( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test getting and setting the satellite configuration.""" + expected_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("1234", "okay nabu", ["en"]), + AssistSatelliteWakeWord("5678", "hey jarvis", ["en"]), + ], + active_wake_words=["1234"], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = expected_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + # HA should have been updated + actual_config = satellite.async_get_configuration() + assert actual_config == expected_config + + updated_config = replace(actual_config, active_wake_words=["5678"]) + mock_client.get_voice_assistant_configuration.return_value = updated_config + + # Change active wake words + await satellite.async_set_configuration(updated_config) + + # Set config method should be called + mock_client.set_voice_assistant_configuration.assert_called_once_with( + active_wake_words=["5678"] + ) + + # Device should have been updated + assert satellite.async_get_configuration() == updated_config + + +async def test_wake_word_select( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("okay_nabu", "Okay Nabu", ["en"]), + AssistSatelliteWakeWord("hey_jarvis", "Hey Jarvis", ["en"]), + AssistSatelliteWakeWord("hey_mycroft", "Hey Mycroft", ["en"]), + ], + active_wake_words=["hey_jarvis"], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + # Wrap mock so we can tell when it's done + configuration_set = asyncio.Event() + + async def wrapper(*args, **kwargs): + # Update device config because entity will request it after update + device_config.active_wake_words = kwargs["active_wake_words"] + configuration_set.set() + + mock_client.set_voice_assistant_configuration = AsyncMock(side_effect=wrapper) + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert satellite.async_get_configuration().active_wake_words == ["hey_jarvis"] + + # Active wake word should be selected + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == "Hey Jarvis" + + # Changing the select should set the active wake word + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {"entity_id": "select.test_wake_word", "option": "Okay Nabu"}, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == "Okay Nabu" + + # Wait for device config to be updated + async with asyncio.timeout(1): + await configuration_set.wait() + + # Satellite config should have been updated + assert satellite.async_get_configuration().active_wake_words == ["okay_nabu"] + + +async def test_wake_word_select_no_wake_words( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select is unavailable when there are no available wake word.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[], + active_wake_words=[], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert not satellite.async_get_configuration().available_wake_words + + # Select should be unavailable + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + +async def test_wake_word_select_zero_max_wake_words( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select is unavailable max wake words is zero.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("okay_nabu", "Okay Nabu", ["en"]), + ], + active_wake_words=[], + max_active_wake_words=0, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert satellite.async_get_configuration().max_active_wake_words == 0 + + # Select should be unavailable + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + +async def test_wake_word_select_no_active_wake_words( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select uses first available wake word if none are active.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("okay_nabu", "Okay Nabu", ["en"]), + AssistSatelliteWakeWord("hey_jarvis", "Hey Jarvis", ["en"]), + ], + active_wake_words=[], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert not satellite.async_get_configuration().active_wake_words + + # First available wake word should be selected + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == "Okay Nabu" diff --git a/tests/components/esphome/test_binary_sensor.py b/tests/components/esphome/test_binary_sensor.py index 3da8a54ff34..25d8b60f574 100644 --- a/tests/components/esphome/test_binary_sensor.py +++ b/tests/components/esphome/test_binary_sensor.py @@ -1,6 +1,7 @@ """Test ESPHome binary sensors.""" from collections.abc import Awaitable, Callable +from http import HTTPStatus from aioesphomeapi import ( APIClient, @@ -12,15 +13,20 @@ from aioesphomeapi import ( ) import pytest -from homeassistant.components.esphome import DomainData +from homeassistant.components.esphome import DOMAIN, DomainData +from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from .conftest import MockESPHomeDevice from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_assist_in_progress( hass: HomeAssistant, mock_voice_assistant_v1_entry, @@ -44,6 +50,131 @@ async def test_assist_in_progress( assert state.state == "off" +async def test_assist_in_progress_disabled_by_default( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + mock_voice_assistant_v1_entry, +) -> None: + """Test assist in progress binary sensor is added disabled.""" + + assert not hass.states.get("binary_sensor.test_assist_in_progress") + entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") + assert entity_entry + assert entity_entry.disabled + assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + # Test no issue for disabled entity + assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_assist_in_progress_issue( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + mock_voice_assistant_v1_entry, +) -> None: + """Test assist in progress binary sensor.""" + + state = hass.states.get("binary_sensor.test_assist_in_progress") + assert state is not None + + entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") + issue = issue_registry.async_get_issue( + DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" + ) + assert issue is not None + + # Test issue goes away after disabling the entity + entity_registry.async_update_entity( + "binary_sensor.test_assist_in_progress", + disabled_by=er.RegistryEntryDisabler.USER, + ) + await hass.async_block_till_done() + issue = issue_registry.async_get_issue( + DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" + ) + assert issue is None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_assist_in_progress_repair_flow( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + mock_voice_assistant_v1_entry, +) -> None: + """Test assist in progress binary sensor deprecation issue flow.""" + + state = hass.states.get("binary_sensor.test_assist_in_progress") + assert state is not None + + entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") + assert entity_entry.disabled_by is None + issue = issue_registry.async_get_issue( + DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" + ) + assert issue is not None + assert issue.data == { + "entity_id": "binary_sensor.test_assist_in_progress", + "entity_uuid": entity_entry.id, + "integration_name": "ESPHome", + } + assert issue.translation_key == "assist_in_progress_deprecated" + assert issue.translation_placeholders == {"integration_name": "ESPHome"} + + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + await hass.async_block_till_done() + await hass.async_start() + + client = await hass_client() + + resp = await client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "data_schema": [], + "description_placeholders": { + "assist_satellite_domain": "assist_satellite", + "entity_id": "binary_sensor.test_assist_in_progress", + "integration_name": "ESPHome", + }, + "errors": None, + "flow_id": flow_id, + "handler": DOMAIN, + "last_step": None, + "preview": None, + "step_id": "confirm_disable_entity", + "type": "form", + } + + resp = await client.post(f"/api/repairs/issues/fix/{flow_id}") + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "description": None, + "description_placeholders": None, + "flow_id": flow_id, + "handler": DOMAIN, + "type": "create_entry", + } + + # Test the entity is disabled + entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") + assert entity_entry.disabled_by is er.RegistryEntryDisabler.USER + + @pytest.mark.parametrize( "binary_state", [(True, STATE_ON), (False, STATE_OFF), (None, STATE_UNKNOWN)] ) diff --git a/tests/components/esphome/test_camera.py b/tests/components/esphome/test_camera.py index c6a61cd18e8..87b86b039fd 100644 --- a/tests/components/esphome/test_camera.py +++ b/tests/components/esphome/test_camera.py @@ -5,13 +5,13 @@ from collections.abc import Awaitable, Callable from aioesphomeapi import ( APIClient, CameraInfo, - CameraState, + CameraState as ESPHomeCameraState, EntityInfo, EntityState, UserService, ) -from homeassistant.components.camera import STATE_IDLE +from homeassistant.components.camera import CameraState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -55,10 +55,10 @@ async def test_camera_single_image( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE def _mock_camera_image(): - mock_device.set_state(CameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) + mock_device.set_state(ESPHomeCameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) mock_client.request_single_image = _mock_camera_image @@ -67,7 +67,7 @@ async def test_camera_single_image( await hass.async_block_till_done() state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE assert resp.status == 200 assert resp.content_type == "image/jpeg" @@ -103,7 +103,7 @@ async def test_camera_single_image_unavailable_before_requested( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE await mock_device.mock_disconnect(False) client = await hass_client() @@ -144,7 +144,7 @@ async def test_camera_single_image_unavailable_during_request( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE def _mock_camera_image(): hass.async_create_task(mock_device.mock_disconnect(False)) @@ -189,7 +189,7 @@ async def test_camera_stream( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE remaining_responses = 3 def _mock_camera_image(): @@ -197,7 +197,7 @@ async def test_camera_stream( if remaining_responses == 0: return remaining_responses -= 1 - mock_device.set_state(CameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) + mock_device.set_state(ESPHomeCameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) mock_client.request_image_stream = _mock_camera_image mock_client.request_single_image = _mock_camera_image @@ -207,7 +207,7 @@ async def test_camera_stream( await hass.async_block_till_done() state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE assert resp.status == 200 assert resp.content_type == "multipart/x-mixed-replace" @@ -249,7 +249,7 @@ async def test_camera_stream_unavailable( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE await mock_device.mock_disconnect(False) @@ -289,7 +289,7 @@ async def test_camera_stream_with_disconnection( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == STATE_IDLE + assert state.state == CameraState.IDLE remaining_responses = 3 def _mock_camera_image(): @@ -299,7 +299,7 @@ async def test_camera_stream_with_disconnection( if remaining_responses == 2: hass.async_create_task(mock_device.mock_disconnect(False)) remaining_responses -= 1 - mock_device.set_state(CameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) + mock_device.set_state(ESPHomeCameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) mock_client.request_image_stream = _mock_camera_image mock_client.request_single_image = _mock_camera_image diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index 4ec7fee6447..189b86fc5fd 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -13,6 +13,7 @@ from aioesphomeapi import ( ClimateState, ClimateSwingMode, ) +import pytest from syrupy import SnapshotAssertion from homeassistant.components.climate import ( @@ -41,6 +42,7 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError async def test_climate_entity( @@ -54,7 +56,6 @@ async def test_climate_entity( name="my climate", unique_id="my_climate", supports_current_temperature=True, - supports_two_point_target_temperature=True, supports_action=True, visual_min_temperature=10.0, visual_max_temperature=30.0, @@ -134,14 +135,13 @@ async def test_climate_entity_with_step_and_two_point( assert state is not None assert state.state == HVACMode.COOL - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, - blocking=True, - ) - mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) - mock_client.climate_command.reset_mock() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, + blocking=True, + ) await hass.services.async_call( CLIMATE_DOMAIN, @@ -213,38 +213,34 @@ async def test_climate_entity_with_step_and_target_temp( assert state is not None assert state.state == HVACMode.COOL - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, - blocking=True, - ) - mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) - mock_client.climate_command.reset_mock() - await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HVAC_MODE: HVACMode.AUTO, - ATTR_TARGET_TEMP_LOW: 20, - ATTR_TARGET_TEMP_HIGH: 30, + ATTR_TEMPERATURE: 25, }, blocking=True, ) mock_client.climate_command.assert_has_calls( - [ - call( - key=1, - mode=ClimateMode.AUTO, - target_temperature_low=20.0, - target_temperature_high=30.0, - ) - ] + [call(key=1, mode=ClimateMode.AUTO, target_temperature=25.0)] ) mock_client.climate_command.reset_mock() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: "climate.test_myclimate", + ATTR_HVAC_MODE: HVACMode.AUTO, + ATTR_TARGET_TEMP_LOW: 20, + ATTR_TARGET_TEMP_HIGH: 30, + }, + blocking=True, + ) + await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 68af6665380..0a389969c78 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -27,10 +27,10 @@ from homeassistant.components.esphome.const import ( DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from . import VALID_NOISE_PSK @@ -798,14 +798,7 @@ async def test_reauth_initiation(hass: HomeAssistant, mock_client) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -821,14 +814,7 @@ async def test_reauth_confirm_valid( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") result = await hass.config_entries.flow.async_configure( @@ -875,14 +861,7 @@ async def test_reauth_fixed_via_dashboard( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -896,7 +875,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( hass: HomeAssistant, mock_client, mock_dashboard: dict[str, Any], - mock_config_entry, + mock_config_entry: MockConfigEntry, mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard with password removed.""" @@ -918,14 +897,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -938,21 +910,14 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( async def test_reauth_fixed_via_remove_password( hass: HomeAssistant, mock_client, - mock_config_entry, + mock_config_entry: MockConfigEntry, mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically by seeing password removed.""" mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -981,14 +946,7 @@ async def test_reauth_fixed_via_dashboard_at_confirm( mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM, result assert result["step_id"] == "reauth_confirm" @@ -1027,14 +985,7 @@ async def test_reauth_confirm_invalid( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( @@ -1070,14 +1021,7 @@ async def test_reauth_confirm_invalid_with_unique_id( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( @@ -1456,6 +1400,14 @@ async def test_discovery_mqtt_no_mac( await mqtt_discovery_test_abort(hass, "{}", "mqtt_missing_mac") +@pytest.mark.usefixtures("mock_zeroconf") +async def test_discovery_mqtt_empty_payload( + hass: HomeAssistant, mock_client, mock_setup_entry: None +) -> None: + """Test discovery aborted if MQTT payload is empty.""" + await mqtt_discovery_test_abort(hass, "", "mqtt_missing_payload") + + @pytest.mark.usefixtures("mock_zeroconf") async def test_discovery_mqtt_no_api( hass: HomeAssistant, mock_client, mock_setup_entry: None diff --git a/tests/components/esphome/test_cover.py b/tests/components/esphome/test_cover.py index b190d287198..4cfe91c6dea 100644 --- a/tests/components/esphome/test_cover.py +++ b/tests/components/esphome/test_cover.py @@ -7,7 +7,7 @@ from aioesphomeapi import ( APIClient, CoverInfo, CoverOperation, - CoverState, + CoverState as ESPHomeCoverState, EntityInfo, EntityState, UserService, @@ -26,10 +26,7 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -58,7 +55,7 @@ async def test_cover_entity( ) ] states = [ - CoverState( + ESPHomeCoverState( key=1, position=0.5, tilt=0.5, @@ -74,7 +71,7 @@ async def test_cover_entity( ) state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -142,28 +139,30 @@ async def test_cover_entity( mock_client.cover_command.reset_mock() mock_device.set_state( - CoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) + ESPHomeCoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED mock_device.set_state( - CoverState(key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING) + ESPHomeCoverState( + key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING + ) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING mock_device.set_state( - CoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) + ESPHomeCoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_cover_entity_without_position( @@ -187,7 +186,7 @@ async def test_cover_entity_without_position( ) ] states = [ - CoverState( + ESPHomeCoverState( key=1, position=0.5, tilt=0.5, @@ -203,6 +202,6 @@ async def test_cover_entity_without_position( ) state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert ATTR_CURRENT_TILT_POSITION not in state.attributes assert ATTR_CURRENT_POSITION not in state.attributes diff --git a/tests/components/esphome/test_dashboard.py b/tests/components/esphome/test_dashboard.py index da805eb2eee..1641804e458 100644 --- a/tests/components/esphome/test_dashboard.py +++ b/tests/components/esphome/test_dashboard.py @@ -6,7 +6,7 @@ from unittest.mock import patch from aioesphomeapi import DeviceInfo, InvalidAuthAPIError from homeassistant.components.esphome import CONF_NOISE_PSK, coordinator, dashboard -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,7 +150,7 @@ async def test_new_info_reload_config_entries( async def test_new_dashboard_fix_reauth( - hass: HomeAssistant, mock_client, mock_config_entry, mock_dashboard + hass: HomeAssistant, mock_client, mock_config_entry: MockConfigEntry, mock_dashboard ) -> None: """Test config entries waiting for reauth are triggered.""" mock_client.device_info.side_effect = ( @@ -162,14 +162,7 @@ async def test_new_dashboard_fix_reauth( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert len(mock_get_encryption_key.mock_calls) == 0 diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index b66b6d72fce..0beeae71df3 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -70,6 +70,7 @@ async def test_diagnostics_with_bluetooth( "port": 6053, }, "disabled_by": None, + "discovery_keys": {}, "domain": "esphome", "entry_id": ANY, "minor_version": 1, @@ -78,6 +79,7 @@ async def test_diagnostics_with_bluetooth( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "11:22:33:44:55:aa", "version": 1, diff --git a/tests/components/esphome/test_ffmpeg_proxy.py b/tests/components/esphome/test_ffmpeg_proxy.py new file mode 100644 index 00000000000..295d8d2fda9 --- /dev/null +++ b/tests/components/esphome/test_ffmpeg_proxy.py @@ -0,0 +1,334 @@ +"""Tests for ffmpeg proxy view.""" + +from collections.abc import Generator +from http import HTTPStatus +import io +import os +import tempfile +from unittest.mock import patch +from urllib.request import pathname2url +import wave + +from aiohttp import client_exceptions +import mutagen +import pytest + +from homeassistant.components import esphome +from homeassistant.components.esphome.ffmpeg_proxy import async_create_proxy_url +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(name="wav_file_length") +def wav_file_length_fixture() -> int: + """Wanted length of temporary wave file.""" + return 1 + + +@pytest.fixture(name="wav_file") +def wav_file_fixture(wav_file_length: int) -> Generator[str]: + """Create a temporary file and fill it with 1s of silence.""" + with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: + _write_silence(temp_file.name, wav_file_length) + yield temp_file.name + + +def _write_silence(filename: str, length: int) -> None: + """Write silence to a file.""" + with wave.open(filename, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(bytes(16000 * 2 * length)) # length s + + +async def test_async_create_proxy_url(hass: HomeAssistant) -> None: + """Test that async_create_proxy_url returns the correct format.""" + assert await async_setup_component(hass, "esphome", {}) + + device_id = "test-device" + convert_id = "test-id" + media_format = "flac" + media_url = "http://127.0.0.1/test.mp3" + proxy_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.{media_format}" + + with patch( + "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", + return_value=convert_id, + ): + assert ( + async_create_proxy_url(hass, device_id, media_url, media_format) + == proxy_url + ) + + +async def test_proxy_view( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + wav_file: str, +) -> None: + """Test proxy HTTP view for converting audio.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + wav_url = pathname2url(wav_file) + convert_id = "test-id" + url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" + + # Should fail because we haven't allowed the URL yet + req = await client.get(url) + assert req.status == HTTPStatus.NOT_FOUND + + # Allow the URL + with patch( + "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", + return_value=convert_id, + ): + assert ( + async_create_proxy_url( + hass, device_id, wav_url, media_format="mp3", rate=22050, channels=2 + ) + == url + ) + + # Requesting the wrong media format should fail + wrong_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.flac" + req = await client.get(wrong_url) + assert req.status == HTTPStatus.BAD_REQUEST + + # Correct URL + req = await client.get(url) + assert req.status == HTTPStatus.OK + + mp3_data = await req.content.read() + + # Verify conversion + with io.BytesIO(mp3_data) as mp3_io: + mp3_file = mutagen.File(mp3_io) + assert mp3_file.info.sample_rate == 22050 + assert mp3_file.info.channels == 2 + + # About a second, but not exact + assert round(mp3_file.info.length, 0) == 1 + + +async def test_ffmpeg_file_doesnt_exist( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test ffmpeg conversion with a file that doesn't exist.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + # Try to convert a file that doesn't exist + url = async_create_proxy_url(hass, device_id, "missing-file", media_format="mp3") + req = await client.get(url) + + # The HTTP status is OK because the ffmpeg process started, but no data is + # returned. + assert req.status == HTTPStatus.OK + mp3_data = await req.content.read() + assert not mp3_data + + +async def test_lingering_process( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + wav_file: str, +) -> None: + """Test that a new request stops the old ffmpeg process.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + wav_url = pathname2url(wav_file) + url1 = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + # First request will start ffmpeg + req1 = await client.get(url1) + assert req1.status == HTTPStatus.OK + + # Only read part of the data + await req1.content.readexactly(100) + + # Allow another URL + url2 = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + req2 = await client.get(url2) + assert req2.status == HTTPStatus.OK + + wav_data = await req2.content.read() + + # All of the data should be there because this is a new ffmpeg process + with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as received_wav_file: + # We can't use getnframes() here because the WAV header will be incorrect. + # WAV encoders usually go back and update the WAV header after all of + # the frames are written, but ffmpeg can't do that because we're + # streaming the data. + # So instead, we just read and count frames until we run out. + num_frames = 0 + while chunk := received_wav_file.readframes(1024): + num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples + + assert num_frames == 22050 # 1s + + +@pytest.mark.parametrize("wav_file_length", [10]) +async def test_request_same_url_multiple_times( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + wav_file: str, +) -> None: + """Test that the ffmpeg process is restarted if the same URL is requested multiple times.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + wav_url = pathname2url(wav_file) + url = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + # First request will start ffmpeg + req1 = await client.get(url) + assert req1.status == HTTPStatus.OK + + # Only read part of the data + await req1.content.readexactly(100) + + # Second request should restart ffmpeg + req2 = await client.get(url) + assert req2.status == HTTPStatus.OK + + wav_data = await req2.content.read() + + # All of the data should be there because this is a new ffmpeg process + with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as received_wav_file: + num_frames = 0 + while chunk := received_wav_file.readframes(1024): + num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples + + assert num_frames == 22050 * 10 # 10s + + +async def test_max_conversions_per_device( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test that each device has a maximum number of conversions (currently 2).""" + max_conversions = 2 + device_ids = ["1234", "5678"] + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + with tempfile.TemporaryDirectory() as temp_dir: + wav_paths = [ + os.path.join(temp_dir, f"{i}.wav") for i in range(max_conversions + 1) + ] + for wav_path in wav_paths: + _write_silence(wav_path, 10) + + wav_urls = [pathname2url(p) for p in wav_paths] + + # Each device will have max + 1 conversions + device_urls = { + device_id: [ + async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + for wav_url in wav_urls + ] + for device_id in device_ids + } + + for urls in device_urls.values(): + # First URL should fail because it was overwritten by the others + req = await client.get(urls[0]) + assert req.status == HTTPStatus.BAD_REQUEST + + # All other URLs should succeed + for url in urls[1:]: + req = await client.get(url) + assert req.status == HTTPStatus.OK + + +async def test_abort_on_shutdown( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test we abort on Home Assistant shutdown.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: + with wave.open(temp_file.name, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(bytes(16000 * 2)) # 1s + + wav_url = pathname2url(temp_file.name) + convert_id = "test-id" + url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" + + wav_url = pathname2url(temp_file.name) + url = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + # Get URL and start reading + req = await client.get(url) + assert req.status == HTTPStatus.OK + initial_mp3_data = await req.content.read(4) + assert initial_mp3_data == b"RIFF" + + # Shut down Home Assistant + await hass.async_stop() + + with pytest.raises(client_exceptions.ClientPayloadError): + await req.content.read() diff --git a/tests/components/esphome/test_light.py b/tests/components/esphome/test_light.py index 2324c73b16f..8e4f37079d1 100644 --- a/tests/components/esphome/test_light.py +++ b/tests/components/esphome/test_light.py @@ -20,9 +20,7 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -676,7 +674,7 @@ async def test_light_rgb( color_mode=LightColorCapability.RGB | LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -814,7 +812,7 @@ async def test_light_rgbw( | LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS, white=0, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -993,7 +991,7 @@ async def test_light_rgbww_with_cold_warm_white_support( | LightColorCapability.BRIGHTNESS, cold_white=0, warm_white=0, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -1226,7 +1224,7 @@ async def test_light_rgbww_without_cold_warm_white_support( | LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS, white=0, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -1379,9 +1377,6 @@ async def test_light_color_temp( assert state.state == STATE_ON attributes = state.attributes - assert attributes[ATTR_MIN_MIREDS] == 153 - assert attributes[ATTR_MAX_MIREDS] == 370 - assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 await hass.services.async_call( @@ -1454,9 +1449,6 @@ async def test_light_color_temp_no_mireds_set( assert state.state == STATE_ON attributes = state.attributes - assert attributes[ATTR_MIN_MIREDS] is None - assert attributes[ATTR_MAX_MIREDS] is None - assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 0 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 0 await hass.services.async_call( @@ -1558,8 +1550,6 @@ async def test_light_color_temp_legacy( assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.COLOR_TEMP] - assert attributes[ATTR_MIN_MIREDS] == 153 - assert attributes[ATTR_MAX_MIREDS] == 370 assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 diff --git a/tests/components/esphome/test_lock.py b/tests/components/esphome/test_lock.py index 82c24b59a2c..ae54b16d6e2 100644 --- a/tests/components/esphome/test_lock.py +++ b/tests/components/esphome/test_lock.py @@ -2,16 +2,20 @@ from unittest.mock import call -from aioesphomeapi import APIClient, LockCommand, LockEntityState, LockInfo, LockState +from aioesphomeapi import ( + APIClient, + LockCommand, + LockEntityState, + LockInfo, + LockState as ESPHomeLockState, +) from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, - STATE_LOCKING, - STATE_UNLOCKING, + LockState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -31,7 +35,7 @@ async def test_lock_entity_no_open( requires_code=False, ) ] - states = [LockEntityState(key=1, state=LockState.UNLOCKING)] + states = [LockEntityState(key=1, state=ESPHomeLockState.UNLOCKING)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -41,7 +45,7 @@ async def test_lock_entity_no_open( ) state = hass.states.get("lock.test_mylock") assert state is not None - assert state.state == STATE_UNLOCKING + assert state.state == LockState.UNLOCKING await hass.services.async_call( LOCK_DOMAIN, @@ -65,7 +69,7 @@ async def test_lock_entity_start_locked( unique_id="my_lock", ) ] - states = [LockEntityState(key=1, state=LockState.LOCKED)] + states = [LockEntityState(key=1, state=ESPHomeLockState.LOCKED)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -75,7 +79,7 @@ async def test_lock_entity_start_locked( ) state = hass.states.get("lock.test_mylock") assert state is not None - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED async def test_lock_entity_supports_open( @@ -92,7 +96,7 @@ async def test_lock_entity_supports_open( requires_code=True, ) ] - states = [LockEntityState(key=1, state=LockState.LOCKING)] + states = [LockEntityState(key=1, state=ESPHomeLockState.LOCKING)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -102,7 +106,7 @@ async def test_lock_entity_supports_open( ) state = hass.states.get("lock.test_mylock") assert state is not None - assert state.state == STATE_LOCKING + assert state.state == LockState.LOCKING await hass.services.async_call( LOCK_DOMAIN, diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 9d2a906466e..4b322c8744e 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -2,7 +2,7 @@ import asyncio from collections.abc import Awaitable, Callable -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, call from aioesphomeapi import ( APIClient, @@ -17,7 +17,6 @@ from aioesphomeapi import ( UserService, UserServiceArg, UserServiceArgType, - VoiceAssistantFeature, ) import pytest @@ -29,10 +28,6 @@ from homeassistant.components.esphome.const import ( DOMAIN, STABLE_BLE_VERSION_STR, ) -from homeassistant.components.esphome.voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantUDPPipeline, -) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -44,7 +39,7 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, issue_registry as ir from homeassistant.setup import async_setup_component -from .conftest import _ONE_SECOND, MockESPHomeDevice +from .conftest import MockESPHomeDevice from tests.common import MockConfigEntry, async_capture_events, async_mock_service @@ -721,6 +716,34 @@ async def test_state_subscription( assert mock_client.send_home_assistant_state.mock_calls == [] +async def test_state_request( + mock_client: APIClient, + hass: HomeAssistant, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test ESPHome requests state change.""" + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) + await hass.async_block_till_done() + hass.states.async_set("binary_sensor.test", "on", {"bool": True, "float": 3.0}) + device.mock_home_assistant_state_request("binary_sensor.test", None) + await hass.async_block_till_done() + assert mock_client.send_home_assistant_state.mock_calls == [ + call("binary_sensor.test", None, "on") + ] + mock_client.send_home_assistant_state.reset_mock() + hass.states.async_set("binary_sensor.test", "off", {"bool": False, "float": 5.0}) + await hass.async_block_till_done() + assert mock_client.send_home_assistant_state.mock_calls == [] + + async def test_debug_logging( mock_client: APIClient, hass: HomeAssistant, @@ -1186,102 +1209,3 @@ async def test_entry_missing_unique_id( await mock_esphome_device(mock_client=mock_client, mock_storage=True) await hass.async_block_till_done() assert entry.unique_id == "11:22:33:44:55:aa" - - -async def test_manager_voice_assistant_handlers_api( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - caplog: pytest.LogCaptureFixture, - mock_voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the handlers are correctly executed in manager.py.""" - - device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.API_AUDIO - }, - ) - - await hass.async_block_till_done() - - with ( - patch( - "homeassistant.components.esphome.manager.VoiceAssistantAPIPipeline", - new=mock_voice_assistant_api_pipeline, - ), - ): - port: int | None = await device.mock_voice_assistant_handle_start( - "", 0, None, None - ) - - assert port == 0 - - port: int | None = await device.mock_voice_assistant_handle_start( - "", 0, None, None - ) - - assert "Previous Voice assistant pipeline was not stopped" in caplog.text - - await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) - - mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_called_with( - bytes(_ONE_SECOND) - ) - - mock_voice_assistant_api_pipeline.receive_audio_bytes.reset_mock() - - await device.mock_voice_assistant_handle_stop() - mock_voice_assistant_api_pipeline.handle_finished() - - await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) - - mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_not_called() - - -async def test_manager_voice_assistant_handlers_udp( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - mock_voice_assistant_udp_pipeline: VoiceAssistantUDPPipeline, -) -> None: - """Test the handlers are correctly executed in manager.py.""" - - device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - - await hass.async_block_till_done() - - with ( - patch( - "homeassistant.components.esphome.manager.VoiceAssistantUDPPipeline", - new=mock_voice_assistant_udp_pipeline, - ), - ): - await device.mock_voice_assistant_handle_start("", 0, None, None) - - mock_voice_assistant_udp_pipeline.run_pipeline.assert_called() - - await device.mock_voice_assistant_handle_stop() - mock_voice_assistant_udp_pipeline.handle_finished() - - mock_voice_assistant_udp_pipeline.stop.assert_called() - mock_voice_assistant_udp_pipeline.close.assert_called() diff --git a/tests/components/esphome/test_media_player.py b/tests/components/esphome/test_media_player.py index 3879129ccb6..42b7e72a06e 100644 --- a/tests/components/esphome/test_media_player.py +++ b/tests/components/esphome/test_media_player.py @@ -1,13 +1,19 @@ """Test ESPHome media_players.""" +from collections.abc import Awaitable, Callable from unittest.mock import AsyncMock, Mock, call, patch from aioesphomeapi import ( APIClient, + EntityInfo, + EntityState, MediaPlayerCommand, MediaPlayerEntityState, + MediaPlayerFormatPurpose, MediaPlayerInfo, MediaPlayerState, + MediaPlayerSupportedFormat, + UserService, ) import pytest @@ -16,6 +22,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_ANNOUNCE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_EXTRA, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, DOMAIN as MEDIA_PLAYER_DOMAIN, @@ -31,8 +38,11 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +import homeassistant.helpers.device_registry as dr from homeassistant.setup import async_setup_component +from .conftest import MockESPHomeDevice + from tests.common import mock_platform from tests.typing import WebSocketGenerator @@ -55,7 +65,7 @@ async def test_media_player_entity( key=1, volume=50, muted=True, state=MediaPlayerState.PAUSED ) ] - user_service = [] + user_service: list[UserService] = [] await mock_generic_device_entry( mock_client=mock_client, entity_info=entity_info, @@ -200,7 +210,7 @@ async def test_media_player_entity_with_source( key=1, volume=50, muted=True, state=MediaPlayerState.PLAYING ) ] - user_service = [] + user_service: list[UserService] = [] await mock_generic_device_entry( mock_client=mock_client, entity_info=entity_info, @@ -277,3 +287,150 @@ async def test_media_player_entity_with_source( mock_client.media_player_command.assert_has_calls( [call(1, media_url="media-source://tts?message=hello", announcement=True)] ) + + +async def test_media_player_proxy( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test a media_player entity with a proxy URL.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=0, # source rate + num_channels=0, # source channels + purpose=MediaPlayerFormatPurpose.DEFAULT, + sample_bytes=0, # source width + ), + MediaPlayerSupportedFormat( + format="wav", + sample_rate=16000, + num_channels=1, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=2, + ), + MediaPlayerSupportedFormat( + format="mp3", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.DEFAULT, + ), + ], + ) + ], + user_service=[], + states=[ + MediaPlayerEntityState( + key=1, volume=50, muted=False, state=MediaPlayerState.PAUSED + ) + ], + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + assert dev is not None + state = hass.states.get("media_player.test_mymedia_player") + assert state is not None + assert state.state == "paused" + + media_url = "http://127.0.0.1/test.mp3" + proxy_url = f"/api/esphome/ffmpeg_proxy/{dev.id}/test-id.flac" + + with ( + patch( + "homeassistant.components.esphome.media_player.async_create_proxy_url", + return_value=proxy_url, + ) as mock_async_create_proxy_url, + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: media_url, + }, + blocking=True, + ) + + # Should be the default format + mock_async_create_proxy_url.assert_called_once() + device_id = mock_async_create_proxy_url.call_args[0][1] + mock_async_create_proxy_url.assert_called_once_with( + hass, + device_id, + media_url, + media_format="flac", + rate=None, + channels=None, + width=None, + ) + + media_args = mock_client.media_player_command.call_args.kwargs + assert not media_args["announcement"] + + # Reset + mock_async_create_proxy_url.reset_mock() + + # Set announcement flag + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: media_url, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + + # Should be the announcement format + mock_async_create_proxy_url.assert_called_once() + device_id = mock_async_create_proxy_url.call_args[0][1] + mock_async_create_proxy_url.assert_called_once_with( + hass, + device_id, + media_url, + media_format="wav", + rate=16000, + channels=1, + width=2, + ) + + media_args = mock_client.media_player_command.call_args.kwargs + assert media_args["announcement"] + + # test with bypass_proxy flag + mock_async_create_proxy_url.reset_mock() + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: media_url, + ATTR_MEDIA_EXTRA: { + "bypass_proxy": True, + }, + }, + blocking=True, + ) + mock_async_create_proxy_url.assert_not_called() + media_args = mock_client.media_player_command.call_args.kwargs + assert media_args["media_url"] == media_url diff --git a/tests/components/esphome/test_repairs.py b/tests/components/esphome/test_repairs.py new file mode 100644 index 00000000000..c365e65cbe1 --- /dev/null +++ b/tests/components/esphome/test_repairs.py @@ -0,0 +1,13 @@ +"""Test ESPHome repairs.""" + +import pytest + +from homeassistant.components.esphome import repairs +from homeassistant.core import HomeAssistant + + +async def test_create_fix_flow_raises_on_unknown_issue_id(hass: HomeAssistant) -> None: + """Test reate_fix_flow raises on unknown issue_id.""" + + with pytest.raises(ValueError): + await repairs.async_create_fix_flow(hass, "no_such_issue", None) diff --git a/tests/components/esphome/test_select.py b/tests/components/esphome/test_select.py index a433b1b0ab0..6ae1260a89d 100644 --- a/tests/components/esphome/test_select.py +++ b/tests/components/esphome/test_select.py @@ -9,7 +9,7 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -19,7 +19,7 @@ async def test_pipeline_selector( ) -> None: """Test assist pipeline selector.""" - state = hass.states.get("select.test_assist_pipeline") + state = hass.states.get("select.test_assistant") assert state is not None assert state.state == "preferred" @@ -38,6 +38,16 @@ async def test_vad_sensitivity_select( assert state.state == "default" +async def test_wake_word_select( + hass: HomeAssistant, + mock_voice_assistant_v1_entry, +) -> None: + """Test that wake word select is unavailable initially.""" + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + async def test_select_generic_entity( hass: HomeAssistant, mock_client: APIClient, mock_generic_device_entry ) -> None: diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 83e89b1de00..5060471f5d2 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -31,7 +31,6 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -83,11 +82,6 @@ def stub_reconnect(): "supported_features": 0, }, ), - ( - [], - STATE_UNKNOWN, # dashboard is available but device is unknown - {"supported_features": 0}, - ), ], ) async def test_update_entity( @@ -408,11 +402,7 @@ async def test_update_becomes_available_at_runtime( ) await hass.async_block_till_done() state = hass.states.get("update.test_firmware") - assert state is not None - features = state.attributes[ATTR_SUPPORTED_FEATURES] - # There are no devices on the dashboard so no - # way to tell the version so install is disabled - assert features is UpdateEntityFeature(0) + assert state is None # A device gets added to the dashboard mock_dashboard["configured"] = [ @@ -433,6 +423,41 @@ async def test_update_becomes_available_at_runtime( assert features is UpdateEntityFeature.INSTALL +async def test_update_entity_not_present_with_dashboard_but_unknown_device( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_dashboard: dict[str, Any], +) -> None: + """Test ESPHome update entity does not get created if the device is unknown to the dashboard.""" + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) + + mock_dashboard["configured"] = [ + { + "name": "other-test", + "current_version": "2023.2.0-dev", + "configuration": "other-test.yaml", + } + ] + + state = hass.states.get("update.test_firmware") + assert state is None + + await async_get_dashboard(hass).async_refresh() + await hass.async_block_till_done() + + state = hass.states.get("update.none_firmware") + assert state is None + + async def test_generic_device_update_entity( hass: HomeAssistant, mock_client: APIClient, @@ -531,7 +556,8 @@ async def test_generic_device_update_entity_has_update( state = hass.states.get("update.test_myupdate") assert state is not None assert state.state == STATE_ON - assert state.attributes["in_progress"] == 50 + assert state.attributes["in_progress"] is True + assert state.attributes["update_percentage"] == 50 await hass.services.async_call( HOMEASSISTANT_DOMAIN, diff --git a/tests/components/esphome/test_valve.py b/tests/components/esphome/test_valve.py index 5ba7bcbe187..7a7e22b1713 100644 --- a/tests/components/esphome/test_valve.py +++ b/tests/components/esphome/test_valve.py @@ -10,7 +10,7 @@ from aioesphomeapi import ( UserService, ValveInfo, ValveOperation, - ValveState, + ValveState as ESPHomeValveState, ) from homeassistant.components.valve import ( @@ -21,10 +21,7 @@ from homeassistant.components.valve import ( SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, SERVICE_STOP_VALVE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + ValveState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -52,7 +49,7 @@ async def test_valve_entity( ) ] states = [ - ValveState( + ESPHomeValveState( key=1, position=0.5, current_operation=ValveOperation.IS_OPENING, @@ -67,7 +64,7 @@ async def test_valve_entity( ) state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == STATE_OPENING + assert state.state == ValveState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -107,28 +104,30 @@ async def test_valve_entity( mock_client.valve_command.reset_mock() mock_device.set_state( - ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) + ESPHomeValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == STATE_CLOSED + assert state.state == ValveState.CLOSED mock_device.set_state( - ValveState(key=1, position=0.5, current_operation=ValveOperation.IS_CLOSING) + ESPHomeValveState( + key=1, position=0.5, current_operation=ValveOperation.IS_CLOSING + ) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == STATE_CLOSING + assert state.state == ValveState.CLOSING mock_device.set_state( - ValveState(key=1, position=1.0, current_operation=ValveOperation.IDLE) + ESPHomeValveState(key=1, position=1.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == STATE_OPEN + assert state.state == ValveState.OPEN async def test_valve_entity_without_position( @@ -151,7 +150,7 @@ async def test_valve_entity_without_position( ) ] states = [ - ValveState( + ESPHomeValveState( key=1, position=0.5, current_operation=ValveOperation.IS_OPENING, @@ -166,7 +165,7 @@ async def test_valve_entity_without_position( ) state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == STATE_OPENING + assert state.state == ValveState.OPENING assert ATTR_CURRENT_POSITION not in state.attributes await hass.services.async_call( @@ -188,9 +187,9 @@ async def test_valve_entity_without_position( mock_client.valve_command.reset_mock() mock_device.set_state( - ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) + ESPHomeValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == STATE_CLOSED + assert state.state == ValveState.CLOSED diff --git a/tests/components/esphome/test_voice_assistant.py b/tests/components/esphome/test_voice_assistant.py deleted file mode 100644 index eafc0243dc6..00000000000 --- a/tests/components/esphome/test_voice_assistant.py +++ /dev/null @@ -1,964 +0,0 @@ -"""Test ESPHome voice assistant server.""" - -import asyncio -from collections.abc import Awaitable, Callable -import io -import socket -from unittest.mock import ANY, Mock, patch -import wave - -from aioesphomeapi import ( - APIClient, - EntityInfo, - EntityState, - UserService, - VoiceAssistantEventType, - VoiceAssistantFeature, - VoiceAssistantTimerEventType, -) -import pytest - -from homeassistant.components.assist_pipeline import ( - PipelineEvent, - PipelineEventType, - PipelineStage, -) -from homeassistant.components.assist_pipeline.error import ( - PipelineNotFound, - WakeWordDetectionAborted, - WakeWordDetectionError, -) -from homeassistant.components.esphome import DomainData -from homeassistant.components.esphome.voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantUDPPipeline, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import intent as intent_helper -import homeassistant.helpers.device_registry as dr - -from .conftest import _ONE_SECOND, MockESPHomeDevice - -_TEST_INPUT_TEXT = "This is an input test" -_TEST_OUTPUT_TEXT = "This is an output test" -_TEST_OUTPUT_URL = "output.mp3" -_TEST_MEDIA_ID = "12345" - - -@pytest.fixture -def voice_assistant_udp_pipeline( - hass: HomeAssistant, -) -> VoiceAssistantUDPPipeline: - """Return the UDP pipeline factory.""" - - def _voice_assistant_udp_server(entry): - entry_data = DomainData.get(hass).get_entry_data(entry) - - server: VoiceAssistantUDPPipeline = None - - def handle_finished(): - nonlocal server - assert server is not None - server.close() - - server = VoiceAssistantUDPPipeline(hass, entry_data, Mock(), handle_finished) - return server # noqa: RET504 - - return _voice_assistant_udp_server - - -@pytest.fixture -def voice_assistant_api_pipeline( - hass: HomeAssistant, - mock_client, - mock_voice_assistant_api_entry, -) -> VoiceAssistantAPIPipeline: - """Return the API Pipeline factory.""" - entry_data = DomainData.get(hass).get_entry_data(mock_voice_assistant_api_entry) - return VoiceAssistantAPIPipeline(hass, entry_data, Mock(), Mock(), mock_client) - - -@pytest.fixture -def voice_assistant_udp_pipeline_v1( - voice_assistant_udp_pipeline, - mock_voice_assistant_v1_entry, -) -> VoiceAssistantUDPPipeline: - """Return the UDP pipeline.""" - return voice_assistant_udp_pipeline(entry=mock_voice_assistant_v1_entry) - - -@pytest.fixture -def voice_assistant_udp_pipeline_v2( - voice_assistant_udp_pipeline, - mock_voice_assistant_v2_entry, -) -> VoiceAssistantUDPPipeline: - """Return the UDP pipeline.""" - return voice_assistant_udp_pipeline(entry=mock_voice_assistant_v2_entry) - - -@pytest.fixture -def mock_wav() -> bytes: - """Return one second of empty WAV audio.""" - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(_ONE_SECOND)) - - return wav_io.getvalue() - - -async def test_pipeline_events( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the pipeline function is called.""" - - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): - assert device_id == "mock-device-id" - - event_callback = kwargs["event_callback"] - - event_callback( - PipelineEvent( - type=PipelineEventType.WAKE_WORD_END, - data={"wake_word_output": {}}, - ) - ) - - # Fake events - event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": _TEST_INPUT_TEXT}}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={"tts_input": _TEST_OUTPUT_TEXT}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": _TEST_OUTPUT_URL}}, - ) - ) - - def handle_event( - event_type: VoiceAssistantEventType, data: dict[str, str] | None - ) -> None: - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: - assert data is not None - assert data["text"] == _TEST_INPUT_TEXT - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: - assert data is not None - assert data["text"] == _TEST_OUTPUT_TEXT - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: - assert data is not None - assert data["url"] == _TEST_OUTPUT_URL - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: - assert data is None - - voice_assistant_udp_pipeline_v1.handle_event = handle_event - - with patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - voice_assistant_udp_pipeline_v1.transport = Mock() - - await voice_assistant_udp_pipeline_v1.run_pipeline( - device_id="mock-device-id", conversation_id=None - ) - - -@pytest.mark.usefixtures("socket_enabled") -async def test_udp_server( - unused_udp_port_factory: Callable[[], int], - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server runs and queues incoming data.""" - port_to_use = unused_udp_port_factory() - - with patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", new=port_to_use - ): - port = await voice_assistant_udp_pipeline_v1.start_server() - assert port == port_to_use - - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 0 - sock.sendto(b"test", ("127.0.0.1", port)) - - # Give the socket some time to send/receive the data - async with asyncio.timeout(1): - while voice_assistant_udp_pipeline_v1.queue.qsize() == 0: - await asyncio.sleep(0.1) - - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 - - voice_assistant_udp_pipeline_v1.stop() - voice_assistant_udp_pipeline_v1.close() - - assert voice_assistant_udp_pipeline_v1.transport.is_closing() - - -async def test_udp_server_queue( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server queues incoming data.""" - - voice_assistant_udp_pipeline_v1.started = True - - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 0 - - voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 - - voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 2 - - async for data in voice_assistant_udp_pipeline_v1._iterate_packets(): - assert data == bytes(1024) - break - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 # One message removed - - voice_assistant_udp_pipeline_v1.stop() - assert ( - voice_assistant_udp_pipeline_v1.queue.qsize() == 2 - ) # An empty message added by stop - - voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) - assert ( - voice_assistant_udp_pipeline_v1.queue.qsize() == 2 - ) # No new messages added after stop - - voice_assistant_udp_pipeline_v1.close() - - # Stopping the UDP server should cause _iterate_packets to break out - # immediately without yielding any data. - has_data = False - async for _data in voice_assistant_udp_pipeline_v1._iterate_packets(): - has_data = True - - assert not has_data, "Server was stopped" - - -async def test_api_pipeline_queue( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline queues incoming data.""" - - voice_assistant_api_pipeline.started = True - - assert voice_assistant_api_pipeline.queue.qsize() == 0 - - voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) - assert voice_assistant_api_pipeline.queue.qsize() == 1 - - voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) - assert voice_assistant_api_pipeline.queue.qsize() == 2 - - async for data in voice_assistant_api_pipeline._iterate_packets(): - assert data == bytes(1024) - break - assert voice_assistant_api_pipeline.queue.qsize() == 1 # One message removed - - voice_assistant_api_pipeline.stop() - assert ( - voice_assistant_api_pipeline.queue.qsize() == 2 - ) # An empty message added by stop - - voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) - assert ( - voice_assistant_api_pipeline.queue.qsize() == 2 - ) # No new messages added after stop - - # Stopping the API Pipeline should cause _iterate_packets to break out - # immediately without yielding any data. - has_data = False - async for _data in voice_assistant_api_pipeline._iterate_packets(): - has_data = True - - assert not has_data, "Pipeline was stopped" - - -async def test_error_calls_handle_finished( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the handle_finished callback is called when an error occurs.""" - voice_assistant_udp_pipeline_v1.handle_finished = Mock() - - voice_assistant_udp_pipeline_v1.error_received(Exception()) - - voice_assistant_udp_pipeline_v1.handle_finished.assert_called() - - -@pytest.mark.usefixtures("socket_enabled") -async def test_udp_server_multiple( - unused_udp_port_factory: Callable[[], int], - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the UDP server raises an error if started twice.""" - with patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", - new=unused_udp_port_factory(), - ): - await voice_assistant_udp_pipeline_v1.start_server() - - with ( - patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", - new=unused_udp_port_factory(), - ), - pytest.raises(RuntimeError), - ): - await voice_assistant_udp_pipeline_v1.start_server() - - -@pytest.mark.usefixtures("socket_enabled") -async def test_udp_server_after_stopped( - unused_udp_port_factory: Callable[[], int], - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the UDP server raises an error if started after stopped.""" - voice_assistant_udp_pipeline_v1.close() - with ( - patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", - new=unused_udp_port_factory(), - ), - pytest.raises(RuntimeError), - ): - await voice_assistant_udp_pipeline_v1.start_server() - - -async def test_events_converted_correctly( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the pipeline events produce the correct data to send to the device.""" - - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts", - ): - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_STT_START, None - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": "text"}}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_STT_END, {"text": "text"} - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_START, - data={}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START, None - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_END, - data={ - "intent_output": { - "conversation_id": "conversation-id", - } - }, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, - {"conversation_id": "conversation-id"}, - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={"tts_input": "text"}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START, {"text": "text"} - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": "url", "media_id": "media-id"}}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, {"url": "url"} - ) - - -async def test_unknown_event_type( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline does not call handle_event for unknown events.""" - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type="unknown-event", - data={}, - ) - ) - - assert not voice_assistant_api_pipeline.handle_event.called - - -async def test_error_event_type( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline calls event handler with error.""" - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.ERROR, - data={"code": "code", "message": "message"}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - {"code": "code", "message": "message"}, - ) - - -async def test_send_tts_not_called( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server with a v1 device does not call _send_tts.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_udp_pipeline_v1._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - mock_send_tts.assert_not_called() - - -async def test_send_tts_called_udp( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server with a v2 device calls _send_tts.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - mock_send_tts.assert_called_with(_TEST_MEDIA_ID) - - -async def test_send_tts_called_api( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline calls _send_tts.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - mock_send_tts.assert_called_with(_TEST_MEDIA_ID) - - -async def test_send_tts_not_called_when_empty( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the pipelines do not call _send_tts when the output is empty.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_udp_pipeline_v1._event_callback( - PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) - ) - - mock_send_tts.assert_not_called() - - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) - ) - - mock_send_tts.assert_not_called() - - voice_assistant_api_pipeline._event_callback( - PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) - ) - - mock_send_tts.assert_not_called() - - -async def test_send_tts_udp( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - mock_wav: bytes, -) -> None: - """Test the UDP server calls sendto to transmit audio data to device.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_udp_pipeline_v2.started = True - voice_assistant_udp_pipeline_v2.transport = Mock(spec=asyncio.DatagramTransport) - with patch.object( - voice_assistant_udp_pipeline_v2.transport, "is_closing", return_value=False - ): - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": { - "media_id": _TEST_MEDIA_ID, - "url": _TEST_OUTPUT_URL, - } - }, - ) - ) - - await voice_assistant_udp_pipeline_v2._tts_done.wait() - - voice_assistant_udp_pipeline_v2.transport.sendto.assert_called() - - -async def test_send_tts_api( - hass: HomeAssistant, - mock_client: APIClient, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, - mock_wav: bytes, -) -> None: - """Test the API pipeline calls cli.send_voice_assistant_audio to transmit audio data to device.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_api_pipeline.started = True - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": { - "media_id": _TEST_MEDIA_ID, - "url": _TEST_OUTPUT_URL, - } - }, - ) - ) - - await voice_assistant_api_pipeline._tts_done.wait() - - mock_client.send_voice_assistant_audio.assert_called() - - -async def test_send_tts_wrong_sample_rate( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that only 16000Hz audio will be streamed.""" - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(22050) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(_ONE_SECOND)) - - wav_bytes = wav_io.getvalue() - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", wav_bytes), - ): - voice_assistant_api_pipeline.started = True - voice_assistant_api_pipeline.transport = Mock(spec=asyncio.DatagramTransport) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - assert voice_assistant_api_pipeline._tts_task is not None - with pytest.raises(ValueError): - await voice_assistant_api_pipeline._tts_task - - -async def test_send_tts_wrong_format( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that only WAV audio will be streamed.""" - with ( - patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("raw", bytes(1024)), - ), - ): - voice_assistant_api_pipeline.started = True - voice_assistant_api_pipeline.transport = Mock(spec=asyncio.DatagramTransport) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - assert voice_assistant_api_pipeline._tts_task is not None - with pytest.raises(ValueError): - await voice_assistant_api_pipeline._tts_task - - -async def test_send_tts_not_started( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - mock_wav: bytes, -) -> None: - """Test the UDP server does not call sendto when not started.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_udp_pipeline_v2.started = False - voice_assistant_udp_pipeline_v2.transport = Mock(spec=asyncio.DatagramTransport) - - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - await voice_assistant_udp_pipeline_v2._tts_done.wait() - - voice_assistant_udp_pipeline_v2.transport.sendto.assert_not_called() - - -async def test_send_tts_transport_none( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - mock_wav: bytes, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the UDP server does not call sendto when transport is None.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_udp_pipeline_v2.started = True - voice_assistant_udp_pipeline_v2.transport = None - - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - await voice_assistant_udp_pipeline_v2._tts_done.wait() - - assert "No transport to send audio to" in caplog.text - - -async def test_wake_word( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - async def async_pipeline_from_audio_stream(*args, start_stage, **kwargs): - assert start_stage == PipelineStage.WAKE_WORD - - with ( - patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch("asyncio.Event.wait"), # TTS wait event - ): - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) - - -async def test_wake_word_exception( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - raise WakeWordDetectionError("pipeline-not-found", "Pipeline not found") - - with patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - - def handle_event( - event_type: VoiceAssistantEventType, data: dict[str, str] | None - ) -> None: - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: - assert data is not None - assert data["code"] == "pipeline-not-found" - assert data["message"] == "Pipeline not found" - - voice_assistant_api_pipeline.handle_event = handle_event - - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) - - -async def test_wake_word_abort_exception( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - raise WakeWordDetectionAborted - - with ( - patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch.object(voice_assistant_api_pipeline, "handle_event") as mock_handle_event, - ): - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) - - mock_handle_event.assert_not_called() - - -async def test_timer_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that injecting timer events results in the correct api client calls.""" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.TIMERS - }, - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - total_seconds = (1 * 60 * 60) + (2 * 60) + 3 - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "name": {"value": "test timer"}, - "hours": {"value": 1}, - "minutes": {"value": 2}, - "seconds": {"value": 3}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_called_with( - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, - ANY, - "test timer", - total_seconds, - total_seconds, - True, - ) - - # Increase timer beyond original time and check total_seconds has increased - mock_client.send_voice_assistant_timer_event.reset_mock() - - total_seconds += 5 * 60 - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_INCREASE_TIMER, - { - "name": {"value": "test timer"}, - "minutes": {"value": 5}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_called_with( - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, - ANY, - "test timer", - total_seconds, - ANY, - True, - ) - - -async def test_unknown_timer_event( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that unknown (new) timer event types do not result in api calls.""" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.TIMERS - }, - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - with patch( - "homeassistant.components.esphome.voice_assistant._TIMER_EVENT_TYPES.from_hass", - side_effect=KeyError, - ): - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "name": {"value": "test timer"}, - "hours": {"value": 1}, - "minutes": {"value": 2}, - "seconds": {"value": 3}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_not_called() - - -async def test_invalid_pipeline_id( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - invalid_pipeline_id = "invalid-pipeline-id" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - raise PipelineNotFound( - "pipeline_not_found", f"Pipeline {invalid_pipeline_id} not found" - ) - - with patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - - def handle_event( - event_type: VoiceAssistantEventType, data: dict[str, str] | None - ) -> None: - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: - assert data is not None - assert data["code"] == "pipeline_not_found" - assert data["message"] == f"Pipeline {invalid_pipeline_id} not found" - - voice_assistant_api_pipeline.handle_event = handle_event - - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py index 260330896b7..6daab3f32bb 100644 --- a/tests/components/evohome/conftest.py +++ b/tests/components/evohome/conftest.py @@ -2,110 +2,207 @@ from __future__ import annotations -from datetime import datetime, timedelta -from typing import Any, Final +from collections.abc import AsyncGenerator, Callable +from datetime import datetime, timedelta, timezone +from http import HTTPMethod +from typing import Any from unittest.mock import MagicMock, patch from aiohttp import ClientSession from evohomeasync2 import EvohomeClient from evohomeasync2.broker import Broker +from evohomeasync2.controlsystem import ControlSystem +from evohomeasync2.zone import Zone import pytest from homeassistant.components.evohome import CONF_PASSWORD, CONF_USERNAME, DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util, slugify from homeassistant.util.json import JsonArrayType, JsonObjectType -from .const import ACCESS_TOKEN, REFRESH_TOKEN +from .const import ACCESS_TOKEN, REFRESH_TOKEN, USERNAME from tests.common import load_json_array_fixture, load_json_object_fixture -TEST_CONFIG: Final = { - CONF_USERNAME: "username", - CONF_PASSWORD: "password", -} - -def user_account_config_fixture() -> JsonObjectType: +def user_account_config_fixture(install: str) -> JsonObjectType: """Load JSON for the config of a user's account.""" - return load_json_object_fixture("user_account.json", DOMAIN) + try: + return load_json_object_fixture(f"{install}/user_account.json", DOMAIN) + except FileNotFoundError: + return load_json_object_fixture("default/user_account.json", DOMAIN) -def user_locations_config_fixture() -> JsonArrayType: +def user_locations_config_fixture(install: str) -> JsonArrayType: """Load JSON for the config of a user's installation (a list of locations).""" - return load_json_array_fixture("user_locations.json", DOMAIN) + return load_json_array_fixture(f"{install}/user_locations.json", DOMAIN) -def location_status_fixture(loc_id: str) -> JsonObjectType: +def location_status_fixture(install: str, loc_id: str | None = None) -> JsonObjectType: """Load JSON for the status of a specific location.""" - return load_json_object_fixture(f"status_{loc_id}.json", DOMAIN) + if loc_id is None: + _install = load_json_array_fixture(f"{install}/user_locations.json", DOMAIN) + loc_id = _install[0]["locationInfo"]["locationId"] # type: ignore[assignment, call-overload, index] + return load_json_object_fixture(f"{install}/status_{loc_id}.json", DOMAIN) -def dhw_schedule_fixture() -> JsonObjectType: +def dhw_schedule_fixture(install: str) -> JsonObjectType: """Load JSON for the schedule of a domesticHotWater zone.""" - return load_json_object_fixture("schedule_dhw.json", DOMAIN) + try: + return load_json_object_fixture(f"{install}/schedule_dhw.json", DOMAIN) + except FileNotFoundError: + return load_json_object_fixture("default/schedule_dhw.json", DOMAIN) -def zone_schedule_fixture() -> JsonObjectType: +def zone_schedule_fixture(install: str) -> JsonObjectType: """Load JSON for the schedule of a temperatureZone zone.""" - return load_json_object_fixture("schedule_zone.json", DOMAIN) + try: + return load_json_object_fixture(f"{install}/schedule_zone.json", DOMAIN) + except FileNotFoundError: + return load_json_object_fixture("default/schedule_zone.json", DOMAIN) -async def mock_get( - self: Broker, url: str, **kwargs: Any -) -> JsonArrayType | JsonObjectType: - """Return the JSON for a HTTP get of a given URL.""" +def mock_get_factory(install: str) -> Callable: + """Return a get method for a specified installation.""" - # a proxy for the behaviour of the real web API - if self.refresh_token is None: - self.refresh_token = f"new_{REFRESH_TOKEN}" + async def mock_get( + self: Broker, url: str, **kwargs: Any + ) -> JsonArrayType | JsonObjectType: + """Return the JSON for a HTTP get of a given URL.""" - if self.access_token_expires is None or self.access_token_expires < datetime.now(): - self.access_token = f"new_{ACCESS_TOKEN}" - self.access_token_expires = datetime.now() + timedelta(minutes=30) + # a proxy for the behaviour of the real web API + if self.refresh_token is None: + self.refresh_token = f"new_{REFRESH_TOKEN}" - # assume a valid GET, and return the JSON for that web API - if url == "userAccount": # userAccount - return user_account_config_fixture() + if ( + self.access_token_expires is None + or self.access_token_expires < datetime.now() + ): + self.access_token = f"new_{ACCESS_TOKEN}" + self.access_token_expires = datetime.now() + timedelta(minutes=30) - if url.startswith("location"): - if "installationInfo" in url: # location/installationInfo?userId={id} - return user_locations_config_fixture() - if "location" in url: # location/{id}/status - return location_status_fixture("2738909") + # assume a valid GET, and return the JSON for that web API + if url == "userAccount": # userAccount + return user_account_config_fixture(install) - elif "schedule" in url: - if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule - return dhw_schedule_fixture() - if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule - return zone_schedule_fixture() + if url.startswith("location"): + if "installationInfo" in url: # location/installationInfo?userId={id} + return user_locations_config_fixture(install) + if "location" in url: # location/{id}/status + return location_status_fixture(install) - pytest.xfail(f"Unexpected URL: {url}") + elif "schedule" in url: + if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule + return dhw_schedule_fixture(install) + if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule + return zone_schedule_fixture(install) + + pytest.fail(f"Unexpected request: {HTTPMethod.GET} {url}") + + return mock_get -@patch("evohomeasync2.broker.Broker.get", mock_get) -async def setup_evohome(hass: HomeAssistant, test_config: dict[str, str]) -> MagicMock: +@pytest.fixture +def config() -> dict[str, str]: + "Return a default/minimal configuration." + return { + CONF_USERNAME: USERNAME, + CONF_PASSWORD: "password", + } + + +async def setup_evohome( + hass: HomeAssistant, + config: dict[str, str], + install: str = "default", +) -> AsyncGenerator[MagicMock]: """Set up the evohome integration and return its client. The class is mocked here to check the client was instantiated with the correct args. """ + # set the time zone as for the active evohome location + loc_idx: int = config.get("location_idx", 0) # type: ignore[assignment] + + try: + locn = user_locations_config_fixture(install)[loc_idx] + except IndexError: + if loc_idx == 0: + raise + locn = user_locations_config_fixture(install)[0] + + utc_offset: int = locn["locationInfo"]["timeZone"]["currentOffsetMinutes"] # type: ignore[assignment, call-overload, index] + dt_util.set_default_time_zone(timezone(timedelta(minutes=utc_offset))) + with ( patch("homeassistant.components.evohome.evo.EvohomeClient") as mock_client, patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), + patch("evohomeasync2.broker.Broker.get", mock_get_factory(install)), ): - mock_client.side_effect = EvohomeClient + evo: EvohomeClient | None = None - assert await async_setup_component(hass, DOMAIN, {DOMAIN: test_config}) + def evohome_client(*args, **kwargs) -> EvohomeClient: + nonlocal evo + evo = EvohomeClient(*args, **kwargs) + return evo + + mock_client.side_effect = evohome_client + + assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) await hass.async_block_till_done() mock_client.assert_called_once() - assert mock_client.call_args.args[0] == test_config[CONF_USERNAME] - assert mock_client.call_args.args[1] == test_config[CONF_PASSWORD] + assert mock_client.call_args.args[0] == config[CONF_USERNAME] + assert mock_client.call_args.args[1] == config[CONF_PASSWORD] assert isinstance(mock_client.call_args.kwargs["session"], ClientSession) - assert mock_client.account_info is not None + assert evo and evo.account_info is not None - return mock_client + mock_client.return_value = evo + yield mock_client + + +@pytest.fixture +async def evohome( + hass: HomeAssistant, + config: dict[str, str], + install: str, +) -> AsyncGenerator[MagicMock]: + """Return the mocked evohome client for this install fixture.""" + + async for mock_client in setup_evohome(hass, config, install=install): + yield mock_client + + +@pytest.fixture +async def ctl_id( + hass: HomeAssistant, + config: dict[str, str], + install: MagicMock, +) -> AsyncGenerator[str]: + """Return the entity_id of the evohome integration's controller.""" + + async for mock_client in setup_evohome(hass, config, install=install): + evo: EvohomeClient = mock_client.return_value + ctl: ControlSystem = evo._get_single_tcs() + + yield f"{Platform.CLIMATE}.{slugify(ctl.location.name)}" + + +@pytest.fixture +async def zone_id( + hass: HomeAssistant, + config: dict[str, str], + install: MagicMock, +) -> AsyncGenerator[str]: + """Return the entity_id of the evohome integration's first zone.""" + + async for mock_client in setup_evohome(hass, config, install=install): + evo: EvohomeClient = mock_client.return_value + zone: Zone = list(evo._get_single_tcs().zones.values())[0] + + yield f"{Platform.CLIMATE}.{slugify(zone.name)}" diff --git a/tests/components/evohome/const.py b/tests/components/evohome/const.py index 0b298db533a..c3dc92c3fbc 100644 --- a/tests/components/evohome/const.py +++ b/tests/components/evohome/const.py @@ -8,3 +8,15 @@ ACCESS_TOKEN: Final = "at_1dc7z657UKzbhKA..." REFRESH_TOKEN: Final = "rf_jg68ZCKYdxEI3fF..." SESSION_ID: Final = "F7181186..." USERNAME: Final = "test_user@gmail.com" + +# The h-numbers refer to issues in HA's core repo +TEST_INSTALLS: Final = ( + "minimal", # evohome: single zone, no DHW + "default", # evohome: multi-zone, with DHW + "h032585", # VisionProWifi: no preset modes for TCS, zoneId=systemId + "h099625", # RoundThermostat + "sys_004", # RoundModulation +) +# "botched", # as default: but with activeFaults, ghost zones & unknown types + +TEST_INSTALLS_WITH_DHW: Final = ("default",) diff --git a/tests/components/evohome/fixtures/status_2738909.json b/tests/components/evohome/fixtures/botched/status_2738909.json similarity index 100% rename from tests/components/evohome/fixtures/status_2738909.json rename to tests/components/evohome/fixtures/botched/status_2738909.json diff --git a/tests/components/evohome/fixtures/user_locations.json b/tests/components/evohome/fixtures/botched/user_locations.json similarity index 99% rename from tests/components/evohome/fixtures/user_locations.json rename to tests/components/evohome/fixtures/botched/user_locations.json index cf59aa9ae8a..f2f4091a2dc 100644 --- a/tests/components/evohome/fixtures/user_locations.json +++ b/tests/components/evohome/fixtures/botched/user_locations.json @@ -246,7 +246,7 @@ }, { "zoneId": "3450733", - "modelType": "xx", + "modelType": "xxx", "setpointCapabilities": { "maxHeatSetpoint": 35.0, "minHeatSetpoint": 5.0, @@ -268,7 +268,7 @@ "setpointValueResolution": 0.5 }, "name": "Spare Room", - "zoneType": "xx" + "zoneType": "xxx" } ], "dhw": { diff --git a/tests/components/evohome/fixtures/schedule_dhw.json b/tests/components/evohome/fixtures/default/schedule_dhw.json similarity index 100% rename from tests/components/evohome/fixtures/schedule_dhw.json rename to tests/components/evohome/fixtures/default/schedule_dhw.json diff --git a/tests/components/evohome/fixtures/schedule_zone.json b/tests/components/evohome/fixtures/default/schedule_zone.json similarity index 100% rename from tests/components/evohome/fixtures/schedule_zone.json rename to tests/components/evohome/fixtures/default/schedule_zone.json diff --git a/tests/components/evohome/fixtures/default/status_2738909.json b/tests/components/evohome/fixtures/default/status_2738909.json new file mode 100644 index 00000000000..48754595d0f --- /dev/null +++ b/tests/components/evohome/fixtures/default/status_2738909.json @@ -0,0 +1,105 @@ +{ + "locationId": "2738909", + "gateways": [ + { + "gatewayId": "2499896", + "temperatureControlSystems": [ + { + "systemId": "3432522", + "zones": [ + { + "zoneId": "3432521", + "name": "Dead Zone", + "temperatureStatus": { "isAvailable": false }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "activeFaults": [] + }, + { + "zoneId": "3432576", + "name": "Main Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "PermanentOverride" + }, + "activeFaults": [] + }, + { + "zoneId": "3432577", + "name": "Front Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 21.0, + "setpointMode": "TemporaryOverride", + "until": "2022-03-07T19:00:00Z" + }, + "activeFaults": [] + }, + { + "zoneId": "3432578", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kitchen" + }, + { + "zoneId": "3432579", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Bathroom Dn" + }, + { + "zoneId": "3432580", + "temperatureStatus": { "temperature": 21.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Main Bedroom" + }, + { + "zoneId": "3449703", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kids Room" + }, + { + "zoneId": "3450733", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 14.0, + "setpointMode": "PermanentOverride" + }, + "name": "Spare Room" + } + ], + "dhw": { + "dhwId": "3933910", + "temperatureStatus": { "temperature": 23.0, "isAvailable": true }, + "stateStatus": { "state": "Off", "mode": "PermanentOverride" }, + "activeFaults": [] + }, + "activeFaults": [], + "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/user_account.json b/tests/components/evohome/fixtures/default/user_account.json similarity index 100% rename from tests/components/evohome/fixtures/user_account.json rename to tests/components/evohome/fixtures/default/user_account.json diff --git a/tests/components/evohome/fixtures/default/user_locations.json b/tests/components/evohome/fixtures/default/user_locations.json new file mode 100644 index 00000000000..90cd4366b75 --- /dev/null +++ b/tests/components/evohome/fixtures/default/user_locations.json @@ -0,0 +1,320 @@ +[ + { + "locationInfo": { + "locationId": "2738909", + "name": "My Home", + "streetAddress": "1 Main Street", + "city": "London", + "country": "UnitedKingdom", + "postcode": "E1 1AA", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2499896", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "3432522", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "3432521", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Dead Zone", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432576", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432577", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Front Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432578", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kitchen", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432579", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Bathroom Dn", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432580", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Bedroom", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449703", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kids Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3450733", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Spare Room", + "zoneType": "RadiatorZone" + } + ], + "dhw": { + "dhwId": "3933910", + "dhwStateCapabilitiesResponse": { + "allowedStates": ["On", "Off"], + "allowedModes": [ + "FollowSchedule", + "PermanentOverride", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilitiesResponse": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00" + } + }, + "allowedSystemModes": [ + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithReset", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "DayOff", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "Custom", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/h032585/status_111111.json b/tests/components/evohome/fixtures/h032585/status_111111.json new file mode 100644 index 00000000000..0ea535c2461 --- /dev/null +++ b/tests/components/evohome/fixtures/h032585/status_111111.json @@ -0,0 +1,31 @@ +{ + "locationId": "111111", + "gateways": [ + { + "gatewayId": "222222", + "temperatureControlSystems": [ + { + "systemId": "416856", + "zones": [ + { + "zoneId": "416856", + "temperatureStatus": { + "temperature": 21.5, + "isAvailable": true + }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 21.5, + "setpointMode": "FollowSchedule" + }, + "name": "THERMOSTAT" + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "Heat", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/h032585/temperatures.json b/tests/components/evohome/fixtures/h032585/temperatures.json new file mode 100644 index 00000000000..a2015c94f46 --- /dev/null +++ b/tests/components/evohome/fixtures/h032585/temperatures.json @@ -0,0 +1,3 @@ +{ + "416856": 21.5 +} diff --git a/tests/components/evohome/fixtures/h032585/user_locations.json b/tests/components/evohome/fixtures/h032585/user_locations.json new file mode 100644 index 00000000000..b4ea2e5c420 --- /dev/null +++ b/tests/components/evohome/fixtures/h032585/user_locations.json @@ -0,0 +1,79 @@ +[ + { + "locationInfo": { + "locationId": "111111", + "name": "My Home", + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "222222", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "416856", + "modelType": "VisionProWifiRetail", + "zones": [ + { + "zoneId": "416856", + "modelType": "VisionProWifiRetail", + "setpointCapabilities": { + "vacationHoldCapabilities": { + "isChangeable": true, + "isCancelable": true, + "minDuration": "1.00:00:00", + "maxDuration": "365.23:45:00", + "timingResolution": "00:15:00" + }, + "maxHeatSetpoint": 32.0, + "minHeatSetpoint": 4.5, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride", + "VacationHold" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:15:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 4, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:15:00", + "setpointValueResolution": 0.5 + }, + "name": "THERMOSTAT", + "zoneType": "Thermostat" + } + ], + "allowedSystemModes": [ + { + "systemMode": "Off", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Heat", + "canBePermanent": true, + "canBeTemporary": false + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/h099625/status_111111.json b/tests/components/evohome/fixtures/h099625/status_111111.json new file mode 100644 index 00000000000..149d8aba783 --- /dev/null +++ b/tests/components/evohome/fixtures/h099625/status_111111.json @@ -0,0 +1,44 @@ +{ + "locationId": "111111", + "gateways": [ + { + "gatewayId": "222222", + "temperatureControlSystems": [ + { + "systemId": "8557535", + "zones": [ + { + "zoneId": "8557539", + "temperatureStatus": { + "temperature": 21.5, + "isAvailable": true + }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 21.5, + "setpointMode": "FollowSchedule" + }, + "name": "THERMOSTAT" + }, + { + "zoneId": "8557541", + "temperatureStatus": { + "temperature": 21.5, + "isAvailable": true + }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 21.5, + "setpointMode": "FollowSchedule" + }, + "name": "THERMOSTAT" + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "Auto", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/h099625/user_locations.json b/tests/components/evohome/fixtures/h099625/user_locations.json new file mode 100644 index 00000000000..cc32caccc73 --- /dev/null +++ b/tests/components/evohome/fixtures/h099625/user_locations.json @@ -0,0 +1,113 @@ +[ + { + "locationInfo": { + "locationId": "111111", + "name": "My Home", + "timeZone": { + "timeZoneId": "FLEStandardTime", + "displayName": "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius", + "offsetMinutes": 120, + "currentOffsetMinutes": 180, + "supportsDaylightSaving": true + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "222222", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "8557535", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "8557539", + "modelType": "RoundWireless", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Thermostat", + "zoneType": "Thermostat" + }, + { + "zoneId": "8557541", + "modelType": "RoundWireless", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Thermostat 2", + "zoneType": "Thermostat" + } + ], + "allowedSystemModes": [ + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/minimal/status_2738909.json b/tests/components/evohome/fixtures/minimal/status_2738909.json new file mode 100644 index 00000000000..4b344314a67 --- /dev/null +++ b/tests/components/evohome/fixtures/minimal/status_2738909.json @@ -0,0 +1,28 @@ +{ + "locationId": "2738909", + "gateways": [ + { + "gatewayId": "2499896", + "temperatureControlSystems": [ + { + "systemId": "3432522", + "zones": [ + { + "zoneId": "3432576", + "name": "Main Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "activeFaults": [] + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/minimal/user_locations.json b/tests/components/evohome/fixtures/minimal/user_locations.json new file mode 100644 index 00000000000..932686d8728 --- /dev/null +++ b/tests/components/evohome/fixtures/minimal/user_locations.json @@ -0,0 +1,120 @@ +[ + { + "locationInfo": { + "locationId": "2738909", + "name": "My Home", + "streetAddress": "1 Main Street", + "city": "London", + "country": "UnitedKingdom", + "postcode": "E1 1AA", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2499896", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "3432522", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "3432576", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Room", + "zoneType": "RadiatorZone" + } + ], + "allowedSystemModes": [ + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithReset", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "DayOff", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "Custom", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/sys_004/status_3164610.json b/tests/components/evohome/fixtures/sys_004/status_3164610.json new file mode 100644 index 00000000000..a9ef3f6ee28 --- /dev/null +++ b/tests/components/evohome/fixtures/sys_004/status_3164610.json @@ -0,0 +1,33 @@ +{ + "locationId": "3164610", + "gateways": [ + { + "gatewayId": "2938388", + "temperatureControlSystems": [ + { + "systemId": "4187769", + "zones": [ + { + "zoneId": "4187768", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 15.0, + "setpointMode": "PermanentOverride" + }, + "name": "Thermostat" + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "Auto", "isPermanent": true } + } + ], + "activeFaults": [ + { + "faultType": "GatewayCommunicationLost", + "since": "2023-05-04T18:47:36.7727046" + } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/sys_004/user_locations.json b/tests/components/evohome/fixtures/sys_004/user_locations.json new file mode 100644 index 00000000000..9defab8b6ee --- /dev/null +++ b/tests/components/evohome/fixtures/sys_004/user_locations.json @@ -0,0 +1,99 @@ +[ + { + "locationInfo": { + "locationId": "3164610", + "name": "Living room", + "streetAddress": "1 Main Road", + "city": "Boomtown", + "country": "Netherlands", + "postcode": "1234XX", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "WEuropeStandardTime", + "displayName": "(UTC+01:00) Amsterdam, Berlijn, Bern, Rome, Stockholm, Wenen", + "offsetMinutes": 60, + "currentOffsetMinutes": 120, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2624305", + "username": "user_2624305@gmail.com", + "firstname": "Chris", + "lastname": "Jones" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2938388", + "mac": "00D02D5A7000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "4187769", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "4187768", + "modelType": "RoundModulation", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Thermostat", + "zoneType": "Thermostat" + } + ], + "allowedSystemModes": [ + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/snapshots/test_climate.ambr b/tests/components/evohome/snapshots/test_climate.ambr new file mode 100644 index 00000000000..ce7fcf2744e --- /dev/null +++ b/tests/components/evohome/snapshots/test_climate.ambr @@ -0,0 +1,1459 @@ +# serializer version: 1 +# name: test_ctl_set_hvac_mode[default] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[h032585] + list([ + tuple( + 'Off', + ), + tuple( + 'Heat', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[h099625] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[minimal] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[sys_004] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_off[default] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_off[h032585] + list([ + tuple( + 'Off', + ), + ]) +# --- +# name: test_ctl_turn_off[h099625] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_off[minimal] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_off[sys_004] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_on[default] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_on[h032585] + list([ + tuple( + 'Heat', + ), + ]) +# --- +# name: test_ctl_turn_on[h099625] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_on[minimal] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_on[sys_004] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_setup_platform[botched][climate.bathroom_dn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Bathroom Dn', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432579', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.bathroom_dn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.dead_zone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Dead Zone', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': False, + }), + 'zone_id': '3432521', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.dead_zone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.front_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Front Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'temporary', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + dict({ + 'faultType': 'TempZoneActuatorLowBattery', + 'since': '2022-03-02T04:50:20', + }), + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'TemporaryOverride', + 'target_heat_temperature': 21.0, + 'until': '2022-03-07T20:00:00+01:00', + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432577', + }), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.front_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.kids_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Kids Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3449703', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kids_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432578', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.main_bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Main Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.0, + }), + 'zone_id': '3432580', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.main_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.main_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + dict({ + 'faultType': 'TempZoneActuatorCommunicationLost', + 'since': '2022-03-02T15:56:01', + }), + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.7, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.bathroom_dn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Bathroom Dn', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432579', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.bathroom_dn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.dead_zone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Dead Zone', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': False, + }), + 'zone_id': '3432521', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.dead_zone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.front_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Front Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'temporary', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'TemporaryOverride', + 'target_heat_temperature': 21.0, + 'until': '2022-03-07T20:00:00+01:00', + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432577', + }), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.front_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.kids_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Kids Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3449703', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kids_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432578', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.main_bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Main Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.0, + }), + 'zone_id': '3432580', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.main_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.main_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.7, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.spare_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Spare Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 14.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3450733', + }), + 'supported_features': , + 'temperature': 14.0, + }), + 'context': , + 'entity_id': 'climate.spare_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h032585][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '416856', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Heat', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h032585][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 4.5, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '416856', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h099625][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'eco', + 'away', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '8557535', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Auto', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h099625][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+03:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+03:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '8557539', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h099625][climate.thermostat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+03:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+03:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '8557541', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[minimal][climate.main_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[minimal][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[sys_004][climate.living_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Living room', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'eco', + 'away', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '4187769', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Auto', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.living_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[sys_004][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Thermostat', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 15.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+02:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+02:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '4187768', + }), + 'supported_features': , + 'temperature': 15.0, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_zone_set_hvac_mode[default] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_set_hvac_mode[h032585] + list([ + tuple( + 4.5, + ), + ]) +# --- +# name: test_zone_set_hvac_mode[h099625] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_set_hvac_mode[minimal] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_set_hvac_mode[sys_004] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_set_preset_mode[default] + list([ + tuple( + 17.0, + ), + tuple( + 17.0, + ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_preset_mode[h032585] + list([ + tuple( + 21.5, + ), + tuple( + 21.5, + ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_preset_mode[h099625] + list([ + tuple( + 21.5, + ), + tuple( + 21.5, + ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 19, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_preset_mode[minimal] + list([ + tuple( + 17.0, + ), + tuple( + 17.0, + ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_preset_mode[sys_004] + list([ + tuple( + 15.0, + ), + tuple( + 15.0, + ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 20, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[default] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[h032585] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[h099625] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 19, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[minimal] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[sys_004] + list([ + dict({ + 'until': None, + }), + ]) +# --- +# name: test_zone_turn_off[default] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_turn_off[h032585] + list([ + tuple( + 4.5, + ), + ]) +# --- +# name: test_zone_turn_off[h099625] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_turn_off[minimal] + list([ + tuple( + 5.0, + ), + ]) +# --- +# name: test_zone_turn_off[sys_004] + list([ + tuple( + 5.0, + ), + ]) +# --- diff --git a/tests/components/evohome/snapshots/test_init.ambr b/tests/components/evohome/snapshots/test_init.ambr new file mode 100644 index 00000000000..d2e91e3c43d --- /dev/null +++ b/tests/components/evohome/snapshots/test_init.ambr @@ -0,0 +1,19 @@ +# serializer version: 1 +# name: test_setup[botched] + dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) +# --- +# name: test_setup[default] + dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) +# --- +# name: test_setup[h032585] + dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) +# --- +# name: test_setup[h099625] + dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) +# --- +# name: test_setup[minimal] + dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) +# --- +# name: test_setup[sys_004] + dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) +# --- diff --git a/tests/components/evohome/snapshots/test_water_heater.ambr b/tests/components/evohome/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000..4cdeb28f445 --- /dev/null +++ b/tests/components/evohome/snapshots/test_water_heater.ambr @@ -0,0 +1,105 @@ +# serializer version: 1 +# name: test_set_operation_mode[default] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), + }), + dict({ + 'until': datetime.datetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_setup_platform[botched][water_heater.domestic_hot_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'on', + 'current_temperature': 23, + 'friendly_name': 'Domestic Hot Water', + 'icon': 'mdi:thermometer-lines', + 'max_temp': 60, + 'min_temp': 43, + 'operation_list': list([ + 'auto', + 'on', + 'off', + ]), + 'operation_mode': 'off', + 'status': dict({ + 'active_faults': list([ + ]), + 'dhw_id': '3933910', + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T13:00:00+01:00', + 'next_sp_state': 'Off', + 'this_sp_from': '2024-07-10T12:00:00+01:00', + 'this_sp_state': 'On', + }), + 'state_status': dict({ + 'mode': 'PermanentOverride', + 'state': 'Off', + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 23.0, + }), + }), + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_platform[default][water_heater.domestic_hot_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'on', + 'current_temperature': 23, + 'friendly_name': 'Domestic Hot Water', + 'icon': 'mdi:thermometer-lines', + 'max_temp': 60, + 'min_temp': 43, + 'operation_list': list([ + 'auto', + 'on', + 'off', + ]), + 'operation_mode': 'off', + 'status': dict({ + 'active_faults': list([ + ]), + 'dhw_id': '3933910', + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T13:00:00+01:00', + 'next_sp_state': 'Off', + 'this_sp_from': '2024-07-10T12:00:00+01:00', + 'this_sp_state': 'On', + }), + 'state_status': dict({ + 'mode': 'PermanentOverride', + 'state': 'Off', + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 23.0, + }), + }), + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/evohome/test_climate.py b/tests/components/evohome/test_climate.py new file mode 100644 index 00000000000..325dd914bc0 --- /dev/null +++ b/tests/components/evohome/test_climate.py @@ -0,0 +1,384 @@ +"""The tests for the climate platform of evohome. + +All evohome systems have controllers and at least one zone. +""" + +from __future__ import annotations + +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ATTR_PRESET_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import setup_evohome +from .const import TEST_INSTALLS + + +@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) +async def test_setup_platform( + hass: HomeAssistant, + config: dict[str, str], + install: str, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test entities and their states after setup of evohome.""" + + # Cannot use the evohome fixture, as need to set dtm first + # - some extended state attrs are relative the current time + freezer.move_to("2024-07-10T12:00:00Z") + + async for _ in setup_evohome(hass, config, install=install): + pass + + for x in hass.states.async_all(Platform.CLIMATE): + assert x == snapshot(name=f"{x.entity_id}-state") + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_set_hvac_mode( + hass: HomeAssistant, + ctl_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_HVAC_MODE of an evohome controller.""" + + results = [] + + # SERVICE_SET_HVAC_MODE: HVACMode.OFF + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: ctl_id, + ATTR_HVAC_MODE: HVACMode.OFF, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'HeatingOff' or 'Off' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + # SERVICE_SET_HVAC_MODE: HVACMode.HEAT + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: ctl_id, + ATTR_HVAC_MODE: HVACMode.HEAT, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'Auto' or 'Heat' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_set_temperature( + hass: HomeAssistant, + ctl_id: str, +) -> None: + """Test SERVICE_SET_TEMPERATURE of an evohome controller.""" + + # Entity climate.xxx does not support this service + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ctl_id, + ATTR_TEMPERATURE: 19.1, + }, + blocking=True, + ) + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_turn_off( + hass: HomeAssistant, + ctl_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_TURN_OFF of an evohome controller.""" + + results = [] + + # SERVICE_TURN_OFF + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: ctl_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'HeatingOff' or 'Off' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_turn_on( + hass: HomeAssistant, + ctl_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_TURN_ON of an evohome controller.""" + + results = [] + + # SERVICE_TURN_ON + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: ctl_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'Auto' or 'Heat' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_set_hvac_mode( + hass: HomeAssistant, + zone_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_HVAC_MODE of an evohome heating zone.""" + + results = [] + + # SERVICE_SET_HVAC_MODE: HVACMode.HEAT + with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_HVAC_MODE: HVACMode.HEAT, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # SERVICE_SET_HVAC_MODE: HVACMode.OFF + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_HVAC_MODE: HVACMode.OFF, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # minimum target temp + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_set_preset_mode( + hass: HomeAssistant, + zone_id: str, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_PRESET_MODE of an evohome heating zone.""" + + freezer.move_to("2024-07-10T12:00:00Z") + results = [] + + # SERVICE_SET_PRESET_MODE: none + with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_PRESET_MODE: "none", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # SERVICE_SET_PRESET_MODE: permanent + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_PRESET_MODE: "permanent", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # current target temp + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + # SERVICE_SET_PRESET_MODE: temporary + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_PRESET_MODE: "temporary", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # current target temp + assert mock_fcn.await_args.kwargs != {} # next setpoint dtm + + results.append(mock_fcn.await_args.args) + results.append(mock_fcn.await_args.kwargs) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_set_temperature( + hass: HomeAssistant, + zone_id: str, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_TEMPERATURE of an evohome heating zone.""" + + freezer.move_to("2024-07-10T12:00:00Z") + results = [] + + # SERVICE_SET_TEMPERATURE: temperature + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_TEMPERATURE: 19.1, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == (19.1,) + assert mock_fcn.await_args.kwargs != {} # next setpoint dtm + + results.append(mock_fcn.await_args.kwargs) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_turn_off( + hass: HomeAssistant, + zone_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_TURN_OFF of an evohome heating zone.""" + + results = [] + + # SERVICE_TURN_OFF + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: zone_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # minimum target temp + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_turn_on( + hass: HomeAssistant, + zone_id: str, +) -> None: + """Test SERVICE_TURN_ON of an evohome heating zone.""" + + # SERVICE_TURN_ON + with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: zone_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} diff --git a/tests/components/evohome/test_init.py b/tests/components/evohome/test_init.py new file mode 100644 index 00000000000..49a854016ea --- /dev/null +++ b/tests/components/evohome/test_init.py @@ -0,0 +1,182 @@ +"""The tests for evohome.""" + +from __future__ import annotations + +from http import HTTPStatus +import logging +from unittest.mock import patch + +from evohomeasync2 import EvohomeClient, exceptions as exc +from evohomeasync2.broker import _ERR_MSG_LOOKUP_AUTH, _ERR_MSG_LOOKUP_BASE +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.evohome import DOMAIN, EvoService +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import TEST_INSTALLS + +SETUP_FAILED_ANTICIPATED = ( + "homeassistant.setup", + logging.ERROR, + "Setup failed for 'evohome': Integration failed to initialize.", +) +SETUP_FAILED_UNEXPECTED = ( + "homeassistant.setup", + logging.ERROR, + "Error during setup of component evohome", +) +AUTHENTICATION_FAILED = ( + "homeassistant.components.evohome.helpers", + logging.ERROR, + "Failed to authenticate with the vendor's server. Check your username" + " and password. NB: Some special password characters that work" + " correctly via the website will not work via the web API. Message" + " is: ", +) +REQUEST_FAILED_NONE = ( + "homeassistant.components.evohome.helpers", + logging.WARNING, + "Unable to connect with the vendor's server. " + "Check your network and the vendor's service status page. " + "Message is: ", +) +REQUEST_FAILED_503 = ( + "homeassistant.components.evohome.helpers", + logging.WARNING, + "The vendor says their server is currently unavailable. " + "Check the vendor's service status page", +) +REQUEST_FAILED_429 = ( + "homeassistant.components.evohome.helpers", + logging.WARNING, + "The vendor's API rate limit has been exceeded. " + "If this message persists, consider increasing the scan_interval", +) + +REQUEST_FAILED_LOOKUP = { + None: [ + REQUEST_FAILED_NONE, + SETUP_FAILED_ANTICIPATED, + ], + HTTPStatus.SERVICE_UNAVAILABLE: [ + REQUEST_FAILED_503, + SETUP_FAILED_ANTICIPATED, + ], + HTTPStatus.TOO_MANY_REQUESTS: [ + REQUEST_FAILED_429, + SETUP_FAILED_ANTICIPATED, + ], +} + + +@pytest.mark.parametrize( + "status", [*sorted([*_ERR_MSG_LOOKUP_AUTH, HTTPStatus.BAD_GATEWAY]), None] +) +async def test_authentication_failure_v2( + hass: HomeAssistant, + config: dict[str, str], + status: HTTPStatus, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test failure to setup an evohome-compatible system. + + In this instance, the failure occurs in the v2 API. + """ + + with patch("evohomeasync2.broker.Broker.get") as mock_fcn: + mock_fcn.side_effect = exc.AuthenticationFailed("", status=status) + + with caplog.at_level(logging.WARNING): + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + + assert result is False + + assert caplog.record_tuples == [ + AUTHENTICATION_FAILED, + SETUP_FAILED_ANTICIPATED, + ] + + +@pytest.mark.parametrize( + "status", [*sorted([*_ERR_MSG_LOOKUP_BASE, HTTPStatus.BAD_GATEWAY]), None] +) +async def test_client_request_failure_v2( + hass: HomeAssistant, + config: dict[str, str], + status: HTTPStatus, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test failure to setup an evohome-compatible system. + + In this instance, the failure occurs in the v2 API. + """ + + with patch("evohomeasync2.broker.Broker.get") as mock_fcn: + mock_fcn.side_effect = exc.RequestFailed("", status=status) + + with caplog.at_level(logging.WARNING): + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + + assert result is False + + assert caplog.record_tuples == REQUEST_FAILED_LOOKUP.get( + status, [SETUP_FAILED_UNEXPECTED] + ) + + +@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) +async def test_setup( + hass: HomeAssistant, + evohome: EvohomeClient, + snapshot: SnapshotAssertion, +) -> None: + """Test services after setup of evohome. + + Registered services vary by the type of system. + """ + + assert hass.services.async_services_for_domain(DOMAIN).keys() == snapshot + + +@pytest.mark.parametrize("install", ["default"]) +async def test_service_refresh_system( + hass: HomeAssistant, + evohome: EvohomeClient, +) -> None: + """Test EvoService.REFRESH_SYSTEM of an evohome system.""" + + # EvoService.REFRESH_SYSTEM + with patch("evohomeasync2.location.Location.refresh_status") as mock_fcn: + await hass.services.async_call( + DOMAIN, + EvoService.REFRESH_SYSTEM, + {}, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + +@pytest.mark.parametrize("install", ["default"]) +async def test_service_reset_system( + hass: HomeAssistant, + evohome: EvohomeClient, +) -> None: + """Test EvoService.RESET_SYSTEM of an evohome system.""" + + # EvoService.RESET_SYSTEM (if SZ_AUTO_WITH_RESET in modes) + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + DOMAIN, + EvoService.RESET_SYSTEM, + {}, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == ("AutoWithReset",) + assert mock_fcn.await_args.kwargs == {"until": None} diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py index e87b847a9ff..4cc21078333 100644 --- a/tests/components/evohome/test_storage.py +++ b/tests/components/evohome/test_storage.py @@ -8,7 +8,6 @@ from typing import Any, Final, NotRequired, TypedDict import pytest from homeassistant.components.evohome import ( - CONF_PASSWORD, CONF_USERNAME, DOMAIN, STORAGE_KEY, @@ -56,28 +55,20 @@ ACCESS_TOKEN_EXP_DTM, ACCESS_TOKEN_EXP_STR = dt_pair(dt_util.now() + timedelta(h USERNAME_DIFF: Final = f"not_{USERNAME}" USERNAME_SAME: Final = USERNAME -TEST_CONFIG: Final = { - CONF_USERNAME: USERNAME_SAME, - CONF_PASSWORD: "password", +_TEST_STORAGE_BASE: Final[_TokenStoreT] = { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, } -TEST_DATA: Final[dict[str, _TokenStoreT]] = { - "sans_session_id": { - SZ_USERNAME: USERNAME_SAME, - SZ_REFRESH_TOKEN: REFRESH_TOKEN, - SZ_ACCESS_TOKEN: ACCESS_TOKEN, - SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, - }, - "with_session_id": { - SZ_USERNAME: USERNAME_SAME, - SZ_REFRESH_TOKEN: REFRESH_TOKEN, - SZ_ACCESS_TOKEN: ACCESS_TOKEN, - SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, - SZ_USER_DATA: {"sessionId": SESSION_ID}, - }, +TEST_STORAGE_DATA: Final[dict[str, _TokenStoreT]] = { + "sans_session_id": _TEST_STORAGE_BASE, + "null_session_id": _TEST_STORAGE_BASE | {SZ_USER_DATA: None}, # type: ignore[dict-item] + "with_session_id": _TEST_STORAGE_BASE | {SZ_USER_DATA: {"sessionId": SESSION_ID}}, } -TEST_DATA_NULL: Final[dict[str, _EmptyStoreT | None]] = { +TEST_STORAGE_NULL: Final[dict[str, _EmptyStoreT | None]] = { "store_is_absent": None, "store_was_reset": {}, } @@ -89,22 +80,24 @@ DOMAIN_STORAGE_BASE: Final = { } -@pytest.mark.parametrize("idx", TEST_DATA_NULL) +@pytest.mark.parametrize("install", ["minimal"]) +@pytest.mark.parametrize("idx", TEST_STORAGE_NULL) async def test_auth_tokens_null( hass: HomeAssistant, hass_storage: dict[str, Any], + config: dict[str, str], idx: str, + install: str, ) -> None: """Test loading/saving authentication tokens when no cached tokens in the store.""" - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA_NULL[idx]} + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_STORAGE_NULL[idx]} - mock_client = await setup_evohome(hass, TEST_CONFIG) - - # Confirm client was instantiated without tokens, as cache was empty... - assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + async for mock_client in setup_evohome(hass, config, install=install): + # Confirm client was instantiated without tokens, as cache was empty... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg # Confirm the expected tokens were cached to storage... data: _TokenStoreT = hass_storage[DOMAIN]["data"] @@ -118,22 +111,26 @@ async def test_auth_tokens_null( ) -@pytest.mark.parametrize("idx", TEST_DATA) +@pytest.mark.parametrize("install", ["minimal"]) +@pytest.mark.parametrize("idx", TEST_STORAGE_DATA) async def test_auth_tokens_same( - hass: HomeAssistant, hass_storage: dict[str, Any], idx: str + hass: HomeAssistant, + hass_storage: dict[str, Any], + config: dict[str, str], + idx: str, + install: str, ) -> None: """Test loading/saving authentication tokens when matching username.""" - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_STORAGE_DATA[idx]} - mock_client = await setup_evohome(hass, TEST_CONFIG) - - # Confirm client was instantiated with the cached tokens... - assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN - assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN - assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( - ACCESS_TOKEN_EXP_DTM - ) + async for mock_client in setup_evohome(hass, config, install=install): + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[ + SZ_ACCESS_TOKEN_EXPIRES + ] == dt_aware_to_naive(ACCESS_TOKEN_EXP_DTM) # Confirm the expected tokens were cached to storage... data: _TokenStoreT = hass_storage[DOMAIN]["data"] @@ -144,28 +141,32 @@ async def test_auth_tokens_same( assert dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES]) == ACCESS_TOKEN_EXP_DTM -@pytest.mark.parametrize("idx", TEST_DATA) +@pytest.mark.parametrize("install", ["minimal"]) +@pytest.mark.parametrize("idx", TEST_STORAGE_DATA) async def test_auth_tokens_past( - hass: HomeAssistant, hass_storage: dict[str, Any], idx: str + hass: HomeAssistant, + hass_storage: dict[str, Any], + config: dict[str, str], + idx: str, + install: str, ) -> None: """Test loading/saving authentication tokens with matching username, but expired.""" dt_dtm, dt_str = dt_pair(dt_util.now() - timedelta(hours=1)) # make this access token have expired in the past... - test_data = TEST_DATA[idx].copy() # shallow copy is OK here + test_data = TEST_STORAGE_DATA[idx].copy() # shallow copy is OK here test_data[SZ_ACCESS_TOKEN_EXPIRES] = dt_str hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": test_data} - mock_client = await setup_evohome(hass, TEST_CONFIG) - - # Confirm client was instantiated with the cached tokens... - assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN - assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN - assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( - dt_dtm - ) + async for mock_client in setup_evohome(hass, config, install=install): + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[ + SZ_ACCESS_TOKEN_EXPIRES + ] == dt_aware_to_naive(dt_dtm) # Confirm the expected tokens were cached to storage... data: _TokenStoreT = hass_storage[DOMAIN]["data"] @@ -179,22 +180,26 @@ async def test_auth_tokens_past( ) -@pytest.mark.parametrize("idx", TEST_DATA) +@pytest.mark.parametrize("install", ["minimal"]) +@pytest.mark.parametrize("idx", TEST_STORAGE_DATA) async def test_auth_tokens_diff( - hass: HomeAssistant, hass_storage: dict[str, Any], idx: str + hass: HomeAssistant, + hass_storage: dict[str, Any], + config: dict[str, str], + idx: str, + install: str, ) -> None: """Test loading/saving authentication tokens when unmatched username.""" - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_STORAGE_DATA[idx]} - mock_client = await setup_evohome( - hass, TEST_CONFIG | {CONF_USERNAME: USERNAME_DIFF} - ) - - # Confirm client was instantiated without tokens, as username was different... - assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + async for mock_client in setup_evohome( + hass, config | {CONF_USERNAME: USERNAME_DIFF}, install=install + ): + # Confirm client was instantiated without tokens, as username was different... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg # Confirm the expected tokens were cached to storage... data: _TokenStoreT = hass_storage[DOMAIN]["data"] diff --git a/tests/components/evohome/test_water_heater.py b/tests/components/evohome/test_water_heater.py new file mode 100644 index 00000000000..8acfd469b59 --- /dev/null +++ b/tests/components/evohome/test_water_heater.py @@ -0,0 +1,190 @@ +"""The tests for the water_heater platform of evohome. + +Not all evohome systems will have a DHW zone. +""" + +from __future__ import annotations + +from unittest.mock import patch + +from evohomeasync2 import EvohomeClient +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.water_heater import ( + ATTR_AWAY_MODE, + ATTR_OPERATION_MODE, + SERVICE_SET_AWAY_MODE, + SERVICE_SET_OPERATION_MODE, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import setup_evohome +from .const import TEST_INSTALLS_WITH_DHW + +DHW_ENTITY_ID = "water_heater.domestic_hot_water" + + +@pytest.mark.parametrize("install", [*TEST_INSTALLS_WITH_DHW, "botched"]) +async def test_setup_platform( + hass: HomeAssistant, + config: dict[str, str], + install: str, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test entities and their states after setup of evohome.""" + + # Cannot use the evohome fixture, as need to set dtm first + # - some extended state attrs are relative the current time + freezer.move_to("2024-07-10T12:00:00Z") + + async for _ in setup_evohome(hass, config, install=install): + pass + + for x in hass.states.async_all(Platform.WATER_HEATER): + assert x == snapshot(name=f"{x.entity_id}-state") + + +@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) +async def test_set_operation_mode( + hass: HomeAssistant, + evohome: EvohomeClient, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_OPERATION_MODE of an evohome DHW zone.""" + + freezer.move_to("2024-07-10T11:55:00Z") + results = [] + + # SERVICE_SET_OPERATION_MODE: auto + with patch("evohomeasync2.hotwater.HotWater.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_OPERATION_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_OPERATION_MODE: "auto", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # SERVICE_SET_OPERATION_MODE: off (until next scheduled setpoint) + with patch("evohomeasync2.hotwater.HotWater.set_off") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_OPERATION_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_OPERATION_MODE: "off", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs != {} + + results.append(mock_fcn.await_args.kwargs) + + # SERVICE_SET_OPERATION_MODE: on (until next scheduled setpoint) + with patch("evohomeasync2.hotwater.HotWater.set_on") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_OPERATION_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_OPERATION_MODE: "on", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs != {} + + results.append(mock_fcn.await_args.kwargs) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) +async def test_set_away_mode(hass: HomeAssistant, evohome: EvohomeClient) -> None: + """Test SERVICE_SET_AWAY_MODE of an evohome DHW zone.""" + + # set_away_mode: off + with patch("evohomeasync2.hotwater.HotWater.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_AWAY_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_AWAY_MODE: "off", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # set_away_mode: on + with patch("evohomeasync2.hotwater.HotWater.set_off") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_AWAY_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_AWAY_MODE: "on", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + +@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) +async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None: + """Test SERVICE_TURN_OFF of an evohome DHW zone.""" + + # Entity water_heater.xxx does not support this service + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + }, + blocking=True, + ) + + +@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) +async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None: + """Test SERVICE_TURN_ON of an evohome DHW zone.""" + + # Entity water_heater.xxx does not support this service + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + }, + blocking=True, + ) diff --git a/tests/components/ezviz/test_config_flow.py b/tests/components/ezviz/test_config_flow.py index f9459635f2c..63499996c89 100644 --- a/tests/components/ezviz/test_config_flow.py +++ b/tests/components/ezviz/test_config_flow.py @@ -20,11 +20,7 @@ from homeassistant.components.ezviz.const import ( DEFAULT_TIMEOUT, DOMAIN, ) -from homeassistant.config_entries import ( - SOURCE_INTEGRATION_DISCOVERY, - SOURCE_REAUTH, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY, SOURCE_USER from homeassistant.const import ( CONF_CUSTOMIZE, CONF_IP_ADDRESS, @@ -45,6 +41,8 @@ from . import ( patch_async_setup_entry, ) +from tests.common import MockConfigEntry, start_reauth_flow + @pytest.mark.usefixtures("ezviz_config_flow") async def test_user_form(hass: HomeAssistant) -> None: @@ -134,9 +132,8 @@ async def test_async_step_reauth(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=USER_INPUT_VALIDATE - ) + new_entry = hass.config_entries.async_entries(DOMAIN)[0] + result = await start_reauth_flow(hass, new_entry) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -182,9 +179,10 @@ async def test_step_discovery_abort_if_cloud_account_missing( async def test_step_reauth_abort_if_cloud_account_missing(hass: HomeAssistant) -> None: """Test reauth and confirm step, abort if cloud account was removed.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=USER_INPUT_VALIDATE - ) + entry = MockConfigEntry(domain=DOMAIN, data=USER_INPUT_VALIDATE) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "ezviz_cloud_account_missing" @@ -562,9 +560,8 @@ async def test_async_step_reauth_exception( assert len(mock_setup_entry.mock_calls) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=USER_INPUT_VALIDATE - ) + new_entry = hass.config_entries.async_entries(DOMAIN)[0] + result = await start_reauth_flow(hass, new_entry) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index a7dc544a97a..90061ec60a1 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -1,10 +1,7 @@ """Tests for fan platforms.""" -from unittest.mock import patch - import pytest -from homeassistant.components import fan from homeassistant.components.fan import ( ATTR_PRESET_MODE, ATTR_PRESET_MODES, @@ -14,25 +11,13 @@ from homeassistant.components.fan import ( FanEntityFeature, NotValidPresetModeError, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er from homeassistant.setup import async_setup_component from .common import MockFan -from tests.common import ( - MockConfigEntry, - MockModule, - MockPlatform, - help_test_all, - import_and_test_deprecated_constant_enum, - mock_integration, - mock_platform, - setup_test_component_platform, -) +from tests.common import setup_test_component_platform class BaseFan(FanEntity): @@ -164,317 +149,3 @@ async def test_preset_mode_validation( with pytest.raises(NotValidPresetModeError) as exc: await test_fan._valid_preset_mode_or_raise("invalid") assert exc.value.translation_key == "not_valid_preset_mode" - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(fan) - - -@pytest.mark.parametrize(("enum"), list(fan.FanEntityFeature)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: fan.FanEntityFeature, -) -> None: - """Test deprecated constants.""" - if not FanEntityFeature.TURN_OFF and not FanEntityFeature.TURN_ON: - import_and_test_deprecated_constant_enum( - caplog, fan, enum, "SUPPORT_", "2025.1" - ) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockFan(FanEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockFan() - assert entity.supported_features is FanEntityFeature(1) - assert "MockFan" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "FanEntityFeature.SET_SPEED" in caplog.text - caplog.clear() - assert entity.supported_features is FanEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - -async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test adding feature flag and warn if missing when methods are set.""" - - called = [] - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - def turn_on( - self, - percentage: int | None = None, - preset_mode: str | None = None, - ) -> None: - """Turn on.""" - called.append("turn_on") - - def turn_off(self) -> None: - """Turn off.""" - called.append("turn_off") - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert ( - "Entity fan.test (.MockFanEntityTest'>) " - "does not set FanEntityFeature.TURN_OFF but implements the turn_off method. Please report it to the author of the 'test' custom integration" - in caplog.text - ) - assert ( - "Entity fan.test (.MockFanEntityTest'>) " - "does not set FanEntityFeature.TURN_ON but implements the turn_on method. Please report it to the author of the 'test' custom integration" - in caplog.text - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON, - { - "entity_id": "fan.test", - }, - blocking=True, - ) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_OFF, - { - "entity_id": "fan.test", - }, - blocking=True, - ) - - assert len(called) == 2 - assert "turn_on" in called - assert "turn_off" in called - - -async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when feature flags are set.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text - - -async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _enable_turn_on_off_backwards_compatibility = False - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text - - -async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when integration uses the correct feature flags.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text diff --git a/tests/components/feedreader/conftest.py b/tests/components/feedreader/conftest.py index 8eeb89e00cd..1e7d50c3835 100644 --- a/tests/components/feedreader/conftest.py +++ b/tests/components/feedreader/conftest.py @@ -64,6 +64,18 @@ def fixture_feed_only_summary(hass: HomeAssistant) -> bytes: return load_fixture_bytes("feedreader8.xml") +@pytest.fixture(name="feed_htmlentities") +def fixture_feed_htmlentities(hass: HomeAssistant) -> bytes: + """Load test feed data with HTML Entities.""" + return load_fixture_bytes("feedreader9.xml") + + +@pytest.fixture(name="feed_atom_htmlentities") +def fixture_feed_atom_htmlentities(hass: HomeAssistant) -> bytes: + """Load test ATOM feed data with HTML Entities.""" + return load_fixture_bytes("feedreader10.xml") + + @pytest.fixture(name="events") async def fixture_events(hass: HomeAssistant) -> list[Event]: """Fixture that catches alexa events.""" diff --git a/tests/components/feedreader/fixtures/feedreader10.xml b/tests/components/feedreader/fixtures/feedreader10.xml new file mode 100644 index 00000000000..17ec8069ae1 --- /dev/null +++ b/tests/components/feedreader/fixtures/feedreader10.xml @@ -0,0 +1,19 @@ + + + <![CDATA[ATOM RSS en español]]> + + 2024-11-18T14:00:00Z + + + + urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6 + + <![CDATA[Título]]> + + urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a + 2024-11-18T14:00:00Z + + + + diff --git a/tests/components/feedreader/fixtures/feedreader9.xml b/tests/components/feedreader/fixtures/feedreader9.xml new file mode 100644 index 00000000000..580a42cbd3f --- /dev/null +++ b/tests/components/feedreader/fixtures/feedreader9.xml @@ -0,0 +1,21 @@ + + + + <![CDATA[RSS en español]]> + + http://www.example.com/main.html + Mon, 18 Nov 2024 15:00:00 +1000 + Mon, 18 Nov 2024 15:00:00 +1000 + 1800 + + + <![CDATA[Título 1]]> + + http://www.example.com/link/1 + GUID 1 + Mon, 18 Nov 2024 15:00:00 +1000 + + + + + diff --git a/tests/components/feedreader/snapshots/test_event.ambr b/tests/components/feedreader/snapshots/test_event.ambr new file mode 100644 index 00000000000..9cce035ea87 --- /dev/null +++ b/tests/components/feedreader/snapshots/test_event.ambr @@ -0,0 +1,27 @@ +# serializer version: 1 +# name: test_event_htmlentities[feed_atom_htmlentities] + ReadOnlyDict({ + 'content': 'Contenido en español', + 'description': 'Resumen en español', + 'event_type': 'feedreader', + 'event_types': list([ + 'feedreader', + ]), + 'friendly_name': 'Mock Title', + 'link': 'http://example.org/2003/12/13/atom03', + 'title': 'Título', + }) +# --- +# name: test_event_htmlentities[feed_htmlentities] + ReadOnlyDict({ + 'content': 'Contenido 1 en español', + 'description': 'Descripción 1', + 'event_type': 'feedreader', + 'event_types': list([ + 'feedreader', + ]), + 'friendly_name': 'Mock Title', + 'link': 'http://www.example.com/link/1', + 'title': 'Título 1', + }) +# --- diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 47bccce902f..c9fc89179db 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -5,18 +5,15 @@ import urllib import pytest -from homeassistant.components.feedreader import CONF_URLS from homeassistant.components.feedreader.const import ( CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_URL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component from . import create_mock_entry from .const import FEED_TITLE, URL, VALID_CONFIG_DEFAULT @@ -95,65 +92,6 @@ async def test_user_errors( assert result["options"][CONF_MAX_ENTRIES] == DEFAULT_MAX_ENTRIES -@pytest.mark.parametrize( - ("data", "expected_data", "expected_options"), - [ - ({CONF_URLS: [URL]}, {CONF_URL: URL}, {CONF_MAX_ENTRIES: DEFAULT_MAX_ENTRIES}), - ( - {CONF_URLS: [URL], CONF_MAX_ENTRIES: 5}, - {CONF_URL: URL}, - {CONF_MAX_ENTRIES: 5}, - ), - ], -) -async def test_import( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - data, - expected_data, - expected_options, - feedparser, - setup_entry, -) -> None: - """Test starting an import flow.""" - config_entries = hass.config_entries.async_entries(DOMAIN) - assert not config_entries - - assert await async_setup_component(hass, DOMAIN, {DOMAIN: data}) - - config_entries = hass.config_entries.async_entries(DOMAIN) - assert config_entries - assert len(config_entries) == 1 - assert config_entries[0].title == FEED_TITLE - assert config_entries[0].data == expected_data - assert config_entries[0].options == expected_options - - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_feedreader" - ) - - -async def test_import_errors( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - feedparser, - setup_entry, - feed_one_event, -) -> None: - """Test starting an import flow which results in an URL error.""" - config_entries = hass.config_entries.async_entries(DOMAIN) - assert not config_entries - - # raise URLError - feedparser.side_effect = urllib.error.URLError("Test") - feedparser.return_value = None - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}}) - assert issue_registry.async_get_issue( - DOMAIN, - "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", - ) - - async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: """Test starting a reconfigure flow.""" entry = create_mock_entry(VALID_CONFIG_DEFAULT) @@ -162,16 +100,9 @@ async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: await hass.async_block_till_done() # init user flow - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # success with patch( @@ -201,16 +132,9 @@ async def test_reconfigure_errors( entry.add_to_hass(hass) # init user flow - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # raise URLError feedparser.side_effect = urllib.error.URLError("Test") @@ -222,7 +146,7 @@ async def test_reconfigure_errors( }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "url_error"} # success @@ -260,3 +184,38 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["data"] == { CONF_MAX_ENTRIES: 10, } + + +@pytest.mark.parametrize( + ("fixture_name", "expected_title"), + [ + ("feed_htmlentities", "RSS en español"), + ("feed_atom_htmlentities", "ATOM RSS en español"), + ], +) +async def test_feed_htmlentities( + hass: HomeAssistant, + feedparser, + setup_entry, + fixture_name, + expected_title, + request: pytest.FixtureRequest, +) -> None: + """Test starting a flow by user from a feed with HTML Entities in the title.""" + with patch( + "homeassistant.components.feedreader.config_flow.feedparser.http.get", + side_effect=[request.getfixturevalue(fixture_name)], + ): + # init user flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # success + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_URL: URL} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == expected_title diff --git a/tests/components/feedreader/test_event.py b/tests/components/feedreader/test_event.py index 5d903383c05..32f8ecb8080 100644 --- a/tests/components/feedreader/test_event.py +++ b/tests/components/feedreader/test_event.py @@ -3,8 +3,12 @@ from datetime import timedelta from unittest.mock import patch +import pytest +from syrupy.assertion import SnapshotAssertion + from homeassistant.components.feedreader.event import ( ATTR_CONTENT, + ATTR_DESCRIPTION, ATTR_LINK, ATTR_TITLE, ) @@ -35,6 +39,7 @@ async def test_event_entity( assert state.attributes[ATTR_TITLE] == "Title 1" assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" assert state.attributes[ATTR_CONTENT] == "Content 1" + assert state.attributes[ATTR_DESCRIPTION] == "Description 1" future = dt_util.utcnow() + timedelta(hours=1, seconds=1) async_fire_time_changed(hass, future) @@ -45,6 +50,7 @@ async def test_event_entity( assert state.attributes[ATTR_TITLE] == "Title 2" assert state.attributes[ATTR_LINK] == "http://www.example.com/link/2" assert state.attributes[ATTR_CONTENT] == "Content 2" + assert state.attributes[ATTR_DESCRIPTION] == "Description 2" future = dt_util.utcnow() + timedelta(hours=2, seconds=2) async_fire_time_changed(hass, future) @@ -55,3 +61,32 @@ async def test_event_entity( assert state.attributes[ATTR_TITLE] == "Title 1" assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" assert state.attributes[ATTR_CONTENT] == "This is a summary" + assert state.attributes[ATTR_DESCRIPTION] == "Description 1" + + +@pytest.mark.parametrize( + ("fixture_name"), + [ + ("feed_htmlentities"), + ("feed_atom_htmlentities"), + ], +) +async def test_event_htmlentities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + fixture_name, + request: pytest.FixtureRequest, +) -> None: + """Test feed event entity with HTML Entities.""" + entry = create_mock_entry(VALID_CONFIG_DEFAULT) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.feedreader.coordinator.feedparser.http.get", + side_effect=[request.getfixturevalue(fixture_name)], + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes == snapshot diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index d7700d79e3b..bc7a66dc86e 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -12,6 +12,7 @@ import pytest from homeassistant.components.feedreader.const import DOMAIN from homeassistant.core import Event, HomeAssistant +from homeassistant.helpers import device_registry as dr import homeassistant.util.dt as dt_util from . import async_setup_config_entry, create_mock_entry @@ -357,3 +358,23 @@ async def test_feed_errors( freezer.tick(timedelta(hours=1, seconds=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) + + +async def test_feed_atom_htmlentities( + hass: HomeAssistant, feed_atom_htmlentities, device_registry: dr.DeviceRegistry +) -> None: + """Test ATOM feed author with HTML Entities.""" + + entry = create_mock_entry(VALID_CONFIG_DEFAULT) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.feedreader.coordinator.feedparser.http.get", + side_effect=[feed_atom_htmlentities], + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, entry.entry_id)} + ) + assert device_entry.manufacturer == "Juan Pérez" diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index 4d99dea6682..583c44a41e6 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -49,6 +49,142 @@ def mock_room() -> Mock: return room +@pytest.fixture +def mock_power_sensor() -> Mock: + """Fixture for an individual power sensor without value.""" + sensor = Mock() + sensor.fibaro_id = 1 + sensor.parent_fibaro_id = 0 + sensor.name = "Test sensor" + sensor.room_id = 1 + sensor.visible = True + sensor.enabled = True + sensor.type = "com.fibaro.powerMeter" + sensor.base_type = "com.fibaro.device" + sensor.properties = { + "zwaveCompany": "Goap", + "endPointId": "2", + "manufacturer": "", + "power": "6.60", + } + sensor.actions = {} + sensor.has_central_scene_event = False + value_mock = Mock() + value_mock.has_value = False + value_mock.is_bool_value = False + sensor.value = value_mock + return sensor + + +@pytest.fixture +def mock_cover() -> Mock: + """Fixture for a cover.""" + cover = Mock() + cover.fibaro_id = 3 + cover.parent_fibaro_id = 0 + cover.name = "Test cover" + cover.room_id = 1 + cover.dead = False + cover.visible = True + cover.enabled = True + cover.type = "com.fibaro.FGR" + cover.base_type = "com.fibaro.device" + cover.properties = {"manufacturer": ""} + cover.actions = {"open": 0, "close": 0} + cover.supported_features = {} + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + cover.value = value_mock + value2_mock = Mock() + value2_mock.has_value = False + cover.value_2 = value2_mock + state_mock = Mock() + state_mock.has_value = True + state_mock.str_value.return_value = "opening" + cover.state = state_mock + return cover + + +@pytest.fixture +def mock_light() -> Mock: + """Fixture for a dimmmable light.""" + light = Mock() + light.fibaro_id = 3 + light.parent_fibaro_id = 0 + light.name = "Test light" + light.room_id = 1 + light.dead = False + light.visible = True + light.enabled = True + light.type = "com.fibaro.FGD212" + light.base_type = "com.fibaro.device" + light.properties = {"manufacturer": ""} + light.actions = {"setValue": 1, "on": 0, "off": 0} + light.supported_features = {} + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + light.value = value_mock + return light + + +@pytest.fixture +def mock_thermostat() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setThermostatMode": 1} + climate.supported_features = {} + climate.has_supported_thermostat_modes = True + climate.supported_thermostat_modes = ["Off", "Heat", "CustomerSpecific"] + climate.has_operating_mode = False + climate.has_thermostat_mode = True + climate.thermostat_mode = "CustomerSpecific" + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + +@pytest.fixture +def mock_thermostat_with_operating_mode() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setOperationMode": 1} + climate.supported_features = {} + climate.has_supported_operating_modes = True + climate.supported_operating_modes = [0, 1, 15] + climate.has_operating_mode = True + climate.operating_mode = 15 + climate.has_thermostat_mode = False + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_climate.py b/tests/components/fibaro/test_climate.py new file mode 100644 index 00000000000..31022e19a08 --- /dev/null +++ b/tests/components/fibaro/test_climate.py @@ -0,0 +1,134 @@ +"""Test the Fibaro climate platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.climate import ClimateEntityFeature, HVACMode +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_climate_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("climate.room_1_test_climate_4") + assert entry + assert entry.unique_id == "hc2_111111.4" + assert entry.original_name == "Room 1 Test climate" + assert entry.supported_features == ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.PRESET_MODE + ) + + +async def test_hvac_mode_preset( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate state is auto when a preset is selected.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO + assert state.attributes["preset_mode"] == "CustomerSpecific" + + +async def test_hvac_mode_heat( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the preset mode is None if a hvac mode is active.""" + + # Arrange + mock_thermostat.thermostat_mode = "Heat" + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.HEAT + assert state.attributes["preset_mode"] is None + + +async def test_set_hvac_mode( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that set_hvac_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_hvac_mode", + {"entity_id": "climate.room_1_test_climate_4", "hvac_mode": HVACMode.HEAT}, + blocking=True, + ) + + # Assert + mock_thermostat.execute_action.assert_called_once() + + +async def test_hvac_mode_with_operation_mode_support( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_with_operating_mode: Mock, + mock_room: Mock, +) -> None: + """Test that operating mode works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat_with_operating_mode] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index b6b4e3992cd..508bb81973d 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -183,15 +183,7 @@ async def test_reauth_success( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -211,15 +203,7 @@ async def test_reauth_connect_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -244,15 +228,7 @@ async def test_reauth_auth_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/fibaro/test_cover.py b/tests/components/fibaro/test_cover.py new file mode 100644 index 00000000000..d5b08f7d1f8 --- /dev/null +++ b/tests/components/fibaro/test_cover.py @@ -0,0 +1,98 @@ +"""Test the Fibaro cover platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.cover import CoverState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_cover_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("cover.room_1_test_cover_3") + assert entry + assert entry.unique_id == "hc2_111111.3" + assert entry.original_name == "Room 1 Test cover" + + +async def test_cover_opening( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover opening state is reported.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.OPENING + + +async def test_cover_opening_closing_none( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover opening closing states return None if not available.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_cover.state.has_value = False + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.OPEN + + +async def test_cover_closing( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover closing state is reported.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_cover.state.str_value.return_value = "closing" + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.CLOSING diff --git a/tests/components/fibaro/test_light.py b/tests/components/fibaro/test_light.py new file mode 100644 index 00000000000..d0a24e009b7 --- /dev/null +++ b/tests/components/fibaro/test_light.py @@ -0,0 +1,57 @@ +"""Test the Fibaro light platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_light_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_light: Mock, + mock_room: Mock, +) -> None: + """Test that the light creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_light] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.LIGHT]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("light.room_1_test_light_3") + assert entry + assert entry.unique_id == "hc2_111111.3" + assert entry.original_name == "Room 1 Test light" + + +async def test_light_brightness( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_light: Mock, + mock_room: Mock, +) -> None: + """Test that the light brightness value is translated.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_light] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.LIGHT]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("light.room_1_test_light_3") + assert state.attributes["brightness"] == 51 + assert state.state == "on" diff --git a/tests/components/fibaro/test_sensor.py b/tests/components/fibaro/test_sensor.py new file mode 100644 index 00000000000..38cbd5d12a8 --- /dev/null +++ b/tests/components/fibaro/test_sensor.py @@ -0,0 +1,39 @@ +"""Test the Fibaro sensor platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.sensor import SensorDeviceClass +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_power_sensor_detected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_power_sensor: Mock, + mock_room: Mock, +) -> None: + """Test that the strange power entity is detected. + + Similar to a Qubino 3-Phase power meter. + """ + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_power_sensor] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.SENSOR]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("sensor.room_1_test_sensor_1_power") + assert entry + assert entry.unique_id == "hc2_111111.1_power" + assert entry.original_name == "Room 1 Test sensor Power" + assert entry.original_device_class == SensorDeviceClass.POWER diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index 33e4739a488..e7cb85a9cfc 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -12,222 +12,46 @@ from homeassistant.components.file import DOMAIN from homeassistant.components.notify import ATTR_TITLE_DEFAULT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, assert_setup_component - - -async def test_bad_config(hass: HomeAssistant) -> None: - """Test set up the platform with bad/missing config.""" - config = {notify.DOMAIN: {"name": "test", "platform": "file"}} - with assert_setup_component(0, domain="notify") as handle_config: - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert not handle_config[notify.DOMAIN] +from tests.common import MockConfigEntry @pytest.mark.parametrize( ("domain", "service", "params"), [ - (notify.DOMAIN, "test", {"message": "one, two, testing, testing"}), ( notify.DOMAIN, "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, ), ], - ids=["legacy", "entity"], -) -@pytest.mark.parametrize( - ("timestamp", "config"), - [ - ( - False, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - "timestamp": False, - } - ] - }, - ), - ( - True, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - "timestamp": True, - } - ] - }, - ), - ], - ids=["no_timestamp", "timestamp"], ) +@pytest.mark.parametrize("timestamp", [False, True], ids=["no_timestamp", "timestamp"]) async def test_notify_file( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - timestamp: bool, mock_is_allowed_path: MagicMock, - config: ConfigType, + timestamp: bool, domain: str, service: str, params: dict[str, str], ) -> None: """Test the notify file output.""" filename = "mock_file" - message = params["message"] - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) + full_filename = os.path.join(hass.config.path(), filename) - freezer.move_to(dt_util.utcnow()) - - m_open = mock_open() - with ( - patch("homeassistant.components.file.notify.open", m_open, create=True), - patch("homeassistant.components.file.notify.os.stat") as mock_st, - ): - mock_st.return_value.st_size = 0 - title = ( - f"{ATTR_TITLE_DEFAULT} notifications " - f"(Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" - ) - - await hass.services.async_call(domain, service, params, blocking=True) - - full_filename = os.path.join(hass.config.path(), filename) - assert m_open.call_count == 1 - assert m_open.call_args == call(full_filename, "a", encoding="utf8") - - assert m_open.return_value.write.call_count == 2 - if not timestamp: - assert m_open.return_value.write.call_args_list == [ - call(title), - call(f"{message}\n"), - ] - else: - assert m_open.return_value.write.call_args_list == [ - call(title), - call(f"{dt_util.utcnow().isoformat()} {message}\n"), - ] - - -@pytest.mark.parametrize( - ("domain", "service", "params"), - [(notify.DOMAIN, "test", {"message": "one, two, testing, testing"})], - ids=["legacy"], -) -@pytest.mark.parametrize( - ("is_allowed", "config"), - [ - ( - True, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - } - ] - }, - ), - ], - ids=["allowed_but_access_failed"], -) -async def test_legacy_notify_file_exception( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_is_allowed_path: MagicMock, - config: ConfigType, - domain: str, - service: str, - params: dict[str, str], -) -> None: - """Test legacy notify file output has exception.""" - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - - freezer.move_to(dt_util.utcnow()) - - m_open = mock_open() - with ( - patch("homeassistant.components.file.notify.open", m_open, create=True), - patch("homeassistant.components.file.notify.os.stat") as mock_st, - ): - mock_st.side_effect = OSError("Access Failed") - with pytest.raises(ServiceValidationError) as exc: - await hass.services.async_call(domain, service, params, blocking=True) - assert f"{exc.value!r}" == "ServiceValidationError('write_access_failed')" - - -@pytest.mark.parametrize( - ("timestamp", "data", "options"), - [ - ( - False, - { - "name": "test", - "platform": "notify", - "file_path": "mock_file", - }, - { - "timestamp": False, - }, - ), - ( - True, - { - "name": "test", - "platform": "notify", - "file_path": "mock_file", - }, - { - "timestamp": True, - }, - ), - ], - ids=["no_timestamp", "timestamp"], -) -async def test_legacy_notify_file_entry_only_setup( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - timestamp: bool, - mock_is_allowed_path: MagicMock, - data: dict[str, Any], - options: dict[str, Any], -) -> None: - """Test the legacy notify file output in entry only setup.""" - filename = "mock_file" - - domain = notify.DOMAIN - service = "test" - params = {"message": "one, two, testing, testing"} message = params["message"] entry = MockConfigEntry( domain=DOMAIN, - data=data, + data={"name": "test", "platform": "notify", "file_path": full_filename}, + options={"timestamp": timestamp}, version=2, - options=options, - title=f"test [{data['file_path']}]", + title=f"test [{filename}]", ) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) + assert await hass.config_entries.async_setup(entry.entry_id) freezer.move_to(dt_util.utcnow()) @@ -245,7 +69,7 @@ async def test_legacy_notify_file_entry_only_setup( await hass.services.async_call(domain, service, params, blocking=True) assert m_open.call_count == 1 - assert m_open.call_args == call(filename, "a", encoding="utf8") + assert m_open.call_args == call(full_filename, "a", encoding="utf8") assert m_open.return_value.write.call_count == 2 if not timestamp: @@ -277,14 +101,14 @@ async def test_legacy_notify_file_entry_only_setup( ], ids=["not_allowed"], ) -async def test_legacy_notify_file_not_allowed( +async def test_notify_file_not_allowed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], options: dict[str, Any], ) -> None: - """Test legacy notify file output not allowed.""" + """Test notify file output not allowed.""" entry = MockConfigEntry( domain=DOMAIN, data=config, @@ -301,11 +125,10 @@ async def test_legacy_notify_file_not_allowed( @pytest.mark.parametrize( ("service", "params"), [ - ("test", {"message": "one, two, testing, testing"}), ( "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, - ), + ) ], ) @pytest.mark.parametrize( diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 634ae9d626c..9e6a16e3e27 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -7,33 +7,10 @@ import pytest from homeassistant.components.file import DOMAIN from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, get_fixture_path -@patch("os.path.isfile", Mock(return_value=True)) -@patch("os.access", Mock(return_value=True)) -async def test_file_value_yaml_setup( - hass: HomeAssistant, mock_is_allowed_path: MagicMock -) -> None: - """Test the File sensor from YAML setup.""" - config = { - "sensor": { - "platform": "file", - "scan_interval": 30, - "name": "file1", - "file_path": get_fixture_path("file_value.txt", "file"), - } - } - - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - state = hass.states.get("sensor.file1") - assert state.state == "21" - - @patch("os.path.isfile", Mock(return_value=True)) @patch("os.access", Mock(return_value=True)) async def test_file_value_entry_setup( diff --git a/tests/components/filesize/conftest.py b/tests/components/filesize/conftest.py index ac66af0d22f..09acf7a58cc 100644 --- a/tests/components/filesize/conftest.py +++ b/tests/components/filesize/conftest.py @@ -8,21 +8,30 @@ from unittest.mock import patch import pytest -from homeassistant.components.filesize.const import DOMAIN -from homeassistant.const import CONF_FILE_PATH +from homeassistant.components.filesize.const import DOMAIN, PLATFORMS +from homeassistant.const import CONF_FILE_PATH, Platform from . import TEST_FILE_NAME from tests.common import MockConfigEntry +@pytest.fixture(name="load_platforms") +async def patch_platform_constant() -> list[Platform]: + """Return list of platforms to load.""" + return PLATFORMS + + @pytest.fixture -def mock_config_entry(tmp_path: Path) -> MockConfigEntry: +def mock_config_entry( + tmp_path: Path, load_platforms: list[Platform] +) -> MockConfigEntry: """Return the default mocked config entry.""" test_file = str(tmp_path.joinpath(TEST_FILE_NAME)) return MockConfigEntry( title=TEST_FILE_NAME, domain=DOMAIN, + entry_id="01JD5CTQMH9FKEFQKZJ8MMBQ3X", data={CONF_FILE_PATH: test_file}, unique_id=test_file, ) diff --git a/tests/components/filesize/snapshots/test_sensor.ambr b/tests/components/filesize/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..339d64acf91 --- /dev/null +++ b/tests/components/filesize/snapshots/test_sensor.ambr @@ -0,0 +1,197 @@ +# serializer version: 1 +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_created-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_created', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Created', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'created', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X-created', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_created-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock_file_test_filesize.txt Created', + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_created', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-20T18:19:04+00:00', + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_updated', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X-last_updated', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock_file_test_filesize.txt Last updated', + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-20T18:19:24+00:00', + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_file_test_filesize_txt_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Size', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'size', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'mock_file_test_filesize.txt Size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size_in_bytes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_size_in_bytes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Size in bytes', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'size_bytes', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X-bytes', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size_in_bytes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'mock_file_test_filesize.txt Size in bytes', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_size_in_bytes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/filesize/test_config_flow.py b/tests/components/filesize/test_config_flow.py index 4b275e66d02..383b1f596f8 100644 --- a/tests/components/filesize/test_config_flow.py +++ b/tests/components/filesize/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.const import CONF_FILE_PATH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import TEST_FILE_NAME, async_create_file +from . import TEST_FILE_NAME, TEST_FILE_NAME2, async_create_file from tests.common import MockConfigEntry @@ -108,3 +108,119 @@ async def test_flow_fails_on_validation(hass: HomeAssistant, tmp_path: Path) -> assert result2["data"] == { CONF_FILE_PATH: test_file, } + + +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, tmp_path: Path +) -> None: + """Test a reconfigure flow.""" + test_file = str(tmp_path.joinpath(TEST_FILE_NAME2)) + await async_create_file(hass, test_file) + hass.config.allowlist_external_dirs = {tmp_path} + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FILE_PATH: test_file}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert mock_config_entry.data == {CONF_FILE_PATH: str(test_file)} + + +async def test_unique_id_already_exist_in_reconfigure_flow( + hass: HomeAssistant, tmp_path: Path +) -> None: + """Test a reconfigure flow fails when unique id already exist.""" + test_file = str(tmp_path.joinpath(TEST_FILE_NAME)) + test_file2 = str(tmp_path.joinpath(TEST_FILE_NAME2)) + await async_create_file(hass, test_file) + await async_create_file(hass, test_file2) + hass.config.allowlist_external_dirs = {tmp_path} + test_file = str(tmp_path.joinpath(TEST_FILE_NAME)) + mock_config_entry = MockConfigEntry( + title=TEST_FILE_NAME, + domain=DOMAIN, + data={CONF_FILE_PATH: test_file}, + unique_id=test_file, + ) + mock_config_entry2 = MockConfigEntry( + title=TEST_FILE_NAME2, + domain=DOMAIN, + data={CONF_FILE_PATH: test_file2}, + unique_id=test_file2, + ) + mock_config_entry.add_to_hass(hass) + mock_config_entry2.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FILE_PATH: test_file2}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_reconfigure_flow_fails_on_validation( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, tmp_path: Path +) -> None: + """Test config flow errors in reconfigure.""" + test_file2 = str(tmp_path.joinpath(TEST_FILE_NAME2)) + hass.config.allowlist_external_dirs = {} + + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_FILE_PATH: test_file2, + }, + ) + + assert result["errors"] == {"base": "not_valid"} + + await async_create_file(hass, test_file2) + + with patch( + "homeassistant.components.filesize.config_flow.pathlib.Path", + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_FILE_PATH: test_file2, + }, + ) + + assert result2["errors"] == {"base": "not_allowed"} + + hass.config.allowlist_external_dirs = {tmp_path} + with patch( + "homeassistant.components.filesize.config_flow.pathlib.Path", + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_FILE_PATH: test_file2, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" diff --git a/tests/components/filesize/test_sensor.py b/tests/components/filesize/test_sensor.py index 880563f0ad8..8292800a861 100644 --- a/tests/components/filesize/test_sensor.py +++ b/tests/components/filesize/test_sensor.py @@ -2,14 +2,56 @@ import os from pathlib import Path +from unittest.mock import patch -from homeassistant.const import CONF_FILE_PATH, STATE_UNAVAILABLE +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.filesize.const import DOMAIN +from homeassistant.const import CONF_FILE_PATH, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from . import TEST_FILE_NAME, async_create_file -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SENSOR]], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + tmp_path: Path, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that an invalid path is caught.""" + testfile = str(tmp_path.joinpath("file.txt")) + await async_create_file(hass, testfile) + hass.config.allowlist_external_dirs = {tmp_path} + mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + mock_config_entry, data={CONF_FILE_PATH: testfile} + ) + with ( + patch( + "os.stat_result.st_mtime", + 1732126764.780758, + ), + patch( + "os.stat_result.st_ctime", + 1732126744.780758, + ), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) async def test_invalid_path( @@ -27,7 +69,10 @@ async def test_invalid_path( async def test_valid_path( - hass: HomeAssistant, tmp_path: Path, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + tmp_path: Path, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, ) -> None: """Test for a valid path.""" testfile = str(tmp_path.joinpath("file.txt")) @@ -41,10 +86,15 @@ async def test_valid_path( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - state = hass.states.get("sensor.file_txt_size") + state = hass.states.get("sensor.mock_file_test_filesize_txt_size") assert state assert state.state == "0.0" + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert device.name == mock_config_entry.title + await hass.async_add_executor_job(os.remove, testfile) @@ -63,12 +113,12 @@ async def test_state_unavailable( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - state = hass.states.get("sensor.file_txt_size") + state = hass.states.get("sensor.mock_file_test_filesize_txt_size") assert state assert state.state == "0.0" await hass.async_add_executor_job(os.remove, testfile) - await async_update_entity(hass, "sensor.file_txt_size") + await async_update_entity(hass, "sensor.mock_file_test_filesize_txt_size") - state = hass.states.get("sensor.file_txt_size") + state = hass.states.get("sensor.mock_file_test_filesize_txt_size") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/filter/test_sensor.py b/tests/components/filter/test_sensor.py index a9581b78f4e..a3e0e58908a 100644 --- a/tests/components/filter/test_sensor.py +++ b/tests/components/filter/test_sensor.py @@ -37,6 +37,11 @@ import homeassistant.util.dt as dt_util from tests.common import assert_setup_component, get_fixture_path +@pytest.fixture(autouse=True, name="stub_blueprint_populate") +def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: + """Stub copying the blueprints to the config folder.""" + + @pytest.fixture(name="values") def values_fixture() -> list[State]: """Fixture for a list of test States.""" diff --git a/tests/components/fireservicerota/test_config_flow.py b/tests/components/fireservicerota/test_config_flow.py index 539906d800b..5555a8d649c 100644 --- a/tests/components/fireservicerota/test_config_flow.py +++ b/tests/components/fireservicerota/test_config_flow.py @@ -120,23 +120,8 @@ async def test_reauth(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_USERNAME] ) entry.add_to_hass(hass) - with patch( - "homeassistant.components.fireservicerota.config_flow.FireServiceRota" - ) as mock_fsr: - mock_fireservicerota = mock_fsr.return_value - mock_fireservicerota.request_tokens.return_value = MOCK_TOKEN_INFO - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - }, - data=MOCK_CONF, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM with ( patch( diff --git a/tests/components/fitbit/conftest.py b/tests/components/fitbit/conftest.py index 57511739993..8a408748f16 100644 --- a/tests/components/fitbit/conftest.py +++ b/tests/components/fitbit/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for fitbit.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable import datetime from http import HTTPStatus import time @@ -14,12 +14,7 @@ from homeassistant.components.application_credentials import ( ClientCredential, async_import_client_credential, ) -from homeassistant.components.fitbit.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - DOMAIN, - OAUTH_SCOPES, -) +from homeassistant.components.fitbit.const import DOMAIN, OAUTH_SCOPES from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -83,15 +78,19 @@ def mock_token_entry(token_expiration_time: float, scopes: list[str]) -> dict[st @pytest.fixture(name="config_entry") -def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: +def mock_config_entry( + token_entry: dict[str, Any], imported_config_data: dict[str, Any] +) -> MockConfigEntry: """Fixture for a config entry.""" return MockConfigEntry( domain=DOMAIN, data={ "auth_implementation": FAKE_AUTH_IMPL, "token": token_entry, + **imported_config_data, }, unique_id=PROFILE_USER_ID, + title=DISPLAY_NAME, ) @@ -107,37 +106,6 @@ async def setup_credentials(hass: HomeAssistant) -> None: ) -@pytest.fixture(name="fitbit_config_yaml") -def mock_fitbit_config_yaml(token_expiration_time: float) -> dict[str, Any] | None: - """Fixture for the yaml fitbit.conf file contents.""" - return { - CONF_CLIENT_ID: CLIENT_ID, - CONF_CLIENT_SECRET: CLIENT_SECRET, - "access_token": FAKE_ACCESS_TOKEN, - "refresh_token": FAKE_REFRESH_TOKEN, - "last_saved_at": token_expiration_time, - } - - -@pytest.fixture(name="fitbit_config_setup") -def mock_fitbit_config_setup( - fitbit_config_yaml: dict[str, Any] | None, -) -> Generator[None]: - """Fixture to mock out fitbit.conf file data loading and persistence.""" - has_config = fitbit_config_yaml is not None - with ( - patch( - "homeassistant.components.fitbit.sensor.os.path.isfile", - return_value=has_config, - ), - patch( - "homeassistant.components.fitbit.sensor.load_json_object", - return_value=fitbit_config_yaml, - ), - ): - yield - - @pytest.fixture(name="monitored_resources") def mock_monitored_resources() -> list[str] | None: """Fixture for the fitbit yaml config monitored_resources field.""" @@ -150,8 +118,8 @@ def mock_configured_unit_syststem() -> str | None: return None -@pytest.fixture(name="sensor_platform_config") -def mock_sensor_platform_config( +@pytest.fixture(name="imported_config_data") +def mock_imported_config_data( monitored_resources: list[str] | None, configured_unit_system: str | None, ) -> dict[str, Any]: @@ -164,32 +132,6 @@ def mock_sensor_platform_config( return config -@pytest.fixture(name="sensor_platform_setup") -async def mock_sensor_platform_setup( - hass: HomeAssistant, - sensor_platform_config: dict[str, Any], -) -> Callable[[], Awaitable[bool]]: - """Fixture to set up the integration.""" - - async def run() -> bool: - result = await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": DOMAIN, - **sensor_platform_config, - } - ] - }, - ) - await hass.async_block_till_done() - return result - - return run - - @pytest.fixture def platforms() -> list[Platform]: """Fixture to specify platforms to test.""" diff --git a/tests/components/fitbit/snapshots/test_sensor.ambr b/tests/components/fitbit/snapshots/test_sensor.ambr index 55b2639a56d..068df25454d 100644 --- a/tests/components/fitbit/snapshots/test_sensor.ambr +++ b/tests/components/fitbit/snapshots/test_sensor.ambr @@ -4,7 +4,7 @@ '99', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Water', + 'friendly_name': 'First L. Water', 'icon': 'mdi:cup-water', 'state_class': , 'unit_of_measurement': , @@ -16,7 +16,7 @@ '1600', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Calories In', + 'friendly_name': 'First L. Calories in', 'icon': 'mdi:food-apple', 'state_class': , 'unit_of_measurement': 'cal', @@ -28,7 +28,7 @@ '99', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Water', + 'friendly_name': 'First L. Water', 'icon': 'mdi:cup-water', 'state_class': , 'unit_of_measurement': , @@ -40,19 +40,19 @@ '1600', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Calories In', + 'friendly_name': 'First L. Calories in', 'icon': 'mdi:food-apple', 'state_class': , 'unit_of_measurement': 'cal', }), ) # --- -# name: test_sensors[monitored_resources0-sensor.activity_calories-activities/activityCalories-135] +# name: test_sensors[monitored_resources0-sensor.first_l_activity_calories-activities/activityCalories-135] tuple( '135', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Activity Calories', + 'friendly_name': 'First L. Activity calories', 'icon': 'mdi:fire', 'state_class': , 'unit_of_measurement': 'cal', @@ -60,254 +60,26 @@ 'fitbit-api-user-id-1_activities/activityCalories', ) # --- -# name: test_sensors[monitored_resources1-sensor.calories-activities/calories-139] +# name: test_sensors[monitored_resources1-sensor.first_l_tracker_activity_calories-activities/tracker/activityCalories-135] tuple( - '139', + '135', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Calories', + 'friendly_name': 'First L. tracker Activity calories', 'icon': 'mdi:fire', 'state_class': , 'unit_of_measurement': 'cal', }), - 'fitbit-api-user-id-1_activities/calories', + 'fitbit-api-user-id-1_activities/tracker/activityCalories', ) # --- -# name: test_sensors[monitored_resources10-sensor.steps-activities/steps-5600] - tuple( - '5600', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Steps', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': 'steps', - }), - 'fitbit-api-user-id-1_activities/steps', - ) -# --- -# name: test_sensors[monitored_resources11-sensor.weight-body/weight-175] - tuple( - '175.0', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'weight', - 'friendly_name': 'Weight', - 'icon': 'mdi:human', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_body/weight', - ) -# --- -# name: test_sensors[monitored_resources12-sensor.body_fat-body/fat-18] - tuple( - '18.0', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Body Fat', - 'icon': 'mdi:human', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'fitbit-api-user-id-1_body/fat', - ) -# --- -# name: test_sensors[monitored_resources13-sensor.bmi-body/bmi-23.7] - tuple( - '23.7', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'BMI', - 'icon': 'mdi:human', - 'state_class': , - 'unit_of_measurement': 'BMI', - }), - 'fitbit-api-user-id-1_body/bmi', - ) -# --- -# name: test_sensors[monitored_resources14-sensor.awakenings_count-sleep/awakeningsCount-7] - tuple( - '7', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Awakenings Count', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': 'times awaken', - }), - 'fitbit-api-user-id-1_sleep/awakeningsCount', - ) -# --- -# name: test_sensors[monitored_resources15-sensor.sleep_efficiency-sleep/efficiency-80] - tuple( - '80', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Sleep Efficiency', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'fitbit-api-user-id-1_sleep/efficiency', - ) -# --- -# name: test_sensors[monitored_resources16-sensor.minutes_after_wakeup-sleep/minutesAfterWakeup-17] - tuple( - '17', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Minutes After Wakeup', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesAfterWakeup', - ) -# --- -# name: test_sensors[monitored_resources17-sensor.sleep_minutes_asleep-sleep/minutesAsleep-360] - tuple( - '360', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Minutes Asleep', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesAsleep', - ) -# --- -# name: test_sensors[monitored_resources18-sensor.sleep_minutes_awake-sleep/minutesAwake-35] - tuple( - '35', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Minutes Awake', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesAwake', - ) -# --- -# name: test_sensors[monitored_resources19-sensor.sleep_minutes_to_fall_asleep-sleep/minutesToFallAsleep-35] - tuple( - '35', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Minutes to Fall Asleep', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesToFallAsleep', - ) -# --- -# name: test_sensors[monitored_resources2-sensor.distance-activities/distance-12.7] - tuple( - '12.70', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'distance', - 'friendly_name': 'Distance', - 'icon': 'mdi:map-marker', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_activities/distance', - ) -# --- -# name: test_sensors[monitored_resources20-sensor.sleep_start_time-sleep/startTime-2020-01-27T00:17:30.000] - tuple( - '2020-01-27T00:17:30.000', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Sleep Start Time', - 'icon': 'mdi:clock', - }), - 'fitbit-api-user-id-1_sleep/startTime', - ) -# --- -# name: test_sensors[monitored_resources21-sensor.sleep_time_in_bed-sleep/timeInBed-462] - tuple( - '462', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Time in Bed', - 'icon': 'mdi:hotel', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/timeInBed', - ) -# --- -# name: test_sensors[monitored_resources3-sensor.elevation-activities/elevation-7600.24] - tuple( - '7600.24', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'distance', - 'friendly_name': 'Elevation', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_activities/elevation', - ) -# --- -# name: test_sensors[monitored_resources4-sensor.floors-activities/floors-8] - tuple( - '8', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Floors', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': 'floors', - }), - 'fitbit-api-user-id-1_activities/floors', - ) -# --- -# name: test_sensors[monitored_resources5-sensor.resting_heart_rate-activities/heart-api_value5] - tuple( - '76', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Resting Heart Rate', - 'icon': 'mdi:heart-pulse', - 'state_class': , - 'unit_of_measurement': 'bpm', - }), - 'fitbit-api-user-id-1_activities/heart', - ) -# --- -# name: test_sensors[monitored_resources6-sensor.minutes_fairly_active-activities/minutesFairlyActive-35] - tuple( - '35', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Minutes Fairly Active', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_activities/minutesFairlyActive', - ) -# --- -# name: test_sensors[monitored_resources7-sensor.minutes_lightly_active-activities/minutesLightlyActive-95] +# name: test_sensors[monitored_resources10-sensor.first_l_minutes_lightly_active-activities/minutesLightlyActive-95] tuple( '95', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', 'device_class': 'duration', - 'friendly_name': 'Minutes Lightly Active', + 'friendly_name': 'First L. Minutes lightly active', 'icon': 'mdi:walk', 'state_class': , 'unit_of_measurement': , @@ -315,13 +87,13 @@ 'fitbit-api-user-id-1_activities/minutesLightlyActive', ) # --- -# name: test_sensors[monitored_resources8-sensor.minutes_sedentary-activities/minutesSedentary-18] +# name: test_sensors[monitored_resources11-sensor.first_l_minutes_sedentary-activities/minutesSedentary-18] tuple( '18', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', 'device_class': 'duration', - 'friendly_name': 'Minutes Sedentary', + 'friendly_name': 'First L. Minutes sedentary', 'icon': 'mdi:seat-recline-normal', 'state_class': , 'unit_of_measurement': , @@ -329,13 +101,13 @@ 'fitbit-api-user-id-1_activities/minutesSedentary', ) # --- -# name: test_sensors[monitored_resources9-sensor.minutes_very_active-activities/minutesVeryActive-20] +# name: test_sensors[monitored_resources12-sensor.first_l_minutes_very_active-activities/minutesVeryActive-20] tuple( '20', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', 'device_class': 'duration', - 'friendly_name': 'Minutes Very Active', + 'friendly_name': 'First L. Minutes very active', 'icon': 'mdi:run', 'state_class': , 'unit_of_measurement': , @@ -343,3 +115,271 @@ 'fitbit-api-user-id-1_activities/minutesVeryActive', ) # --- +# name: test_sensors[monitored_resources13-sensor.first_l_steps-activities/steps-5600] + tuple( + '5600', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Steps', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': 'steps', + }), + 'fitbit-api-user-id-1_activities/steps', + ) +# --- +# name: test_sensors[monitored_resources14-sensor.first_l_weight-body/weight-175] + tuple( + '175.0', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'weight', + 'friendly_name': 'First L. Weight', + 'icon': 'mdi:human', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_body/weight', + ) +# --- +# name: test_sensors[monitored_resources15-sensor.first_l_body_fat-body/fat-18] + tuple( + '18.0', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Body fat', + 'icon': 'mdi:human', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'fitbit-api-user-id-1_body/fat', + ) +# --- +# name: test_sensors[monitored_resources16-sensor.first_l_bmi-body/bmi-23.7] + tuple( + '23.7', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. BMI', + 'icon': 'mdi:human', + 'state_class': , + 'unit_of_measurement': 'BMI', + }), + 'fitbit-api-user-id-1_body/bmi', + ) +# --- +# name: test_sensors[monitored_resources17-sensor.first_l_awakenings_count-sleep/awakeningsCount-7] + tuple( + '7', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Awakenings count', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': 'times awaken', + }), + 'fitbit-api-user-id-1_sleep/awakeningsCount', + ) +# --- +# name: test_sensors[monitored_resources18-sensor.first_l_sleep_efficiency-sleep/efficiency-80] + tuple( + '80', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Sleep efficiency', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'fitbit-api-user-id-1_sleep/efficiency', + ) +# --- +# name: test_sensors[monitored_resources19-sensor.first_l_minutes_after_wakeup-sleep/minutesAfterWakeup-17] + tuple( + '17', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Minutes after wakeup', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesAfterWakeup', + ) +# --- +# name: test_sensors[monitored_resources2-sensor.first_l_calories-activities/calories-139] + tuple( + '139', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Calories', + 'icon': 'mdi:fire', + 'state_class': , + 'unit_of_measurement': 'cal', + }), + 'fitbit-api-user-id-1_activities/calories', + ) +# --- +# name: test_sensors[monitored_resources20-sensor.first_l_sleep_minutes_asleep-sleep/minutesAsleep-360] + tuple( + '360', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep minutes asleep', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesAsleep', + ) +# --- +# name: test_sensors[monitored_resources21-sensor.first_l_sleep_minutes_awake-sleep/minutesAwake-35] + tuple( + '35', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep minutes awake', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesAwake', + ) +# --- +# name: test_sensors[monitored_resources22-sensor.first_l_sleep_minutes_to_fall_asleep-sleep/minutesToFallAsleep-35] + tuple( + '35', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep minutes to fall asleep', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesToFallAsleep', + ) +# --- +# name: test_sensors[monitored_resources23-sensor.first_l_sleep_start_time-sleep/startTime-2020-01-27T00:17:30.000] + tuple( + '2020-01-27T00:17:30.000', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Sleep start time', + 'icon': 'mdi:clock', + }), + 'fitbit-api-user-id-1_sleep/startTime', + ) +# --- +# name: test_sensors[monitored_resources24-sensor.first_l_sleep_time_in_bed-sleep/timeInBed-462] + tuple( + '462', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep time in bed', + 'icon': 'mdi:hotel', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/timeInBed', + ) +# --- +# name: test_sensors[monitored_resources3-sensor.first_l_tracker_calories-activities/tracker/calories-139] + tuple( + '139', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. tracker Calories', + 'icon': 'mdi:fire', + 'state_class': , + 'unit_of_measurement': 'cal', + }), + 'fitbit-api-user-id-1_activities/tracker/calories', + ) +# --- +# name: test_sensors[monitored_resources4-sensor.first_l_distance-activities/distance-12.7] + tuple( + '12.70', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'distance', + 'friendly_name': 'First L. Distance', + 'icon': 'mdi:map-marker', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/distance', + ) +# --- +# name: test_sensors[monitored_resources5-sensor.first_l_tracker_distance-activities/distance-12.7] + tuple( + 'unknown', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'distance', + 'friendly_name': 'First L. tracker Distance', + 'icon': 'mdi:map-marker', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/tracker/distance', + ) +# --- +# name: test_sensors[monitored_resources6-sensor.first_l_elevation-activities/elevation-7600.24] + tuple( + '7600.24', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'distance', + 'friendly_name': 'First L. Elevation', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/elevation', + ) +# --- +# name: test_sensors[monitored_resources7-sensor.first_l_floors-activities/floors-8] + tuple( + '8', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Floors', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': 'floors', + }), + 'fitbit-api-user-id-1_activities/floors', + ) +# --- +# name: test_sensors[monitored_resources8-sensor.first_l_resting_heart_rate-activities/heart-api_value8] + tuple( + '76', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Resting heart rate', + 'icon': 'mdi:heart-pulse', + 'state_class': , + 'unit_of_measurement': 'bpm', + }), + 'fitbit-api-user-id-1_activities/heart', + ) +# --- +# name: test_sensors[monitored_resources9-sensor.first_l_minutes_fairly_active-activities/minutesFairlyActive-35] + tuple( + '35', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Minutes fairly active', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/minutesFairlyActive', + ) +# --- diff --git a/tests/components/fitbit/test_config_flow.py b/tests/components/fitbit/test_config_flow.py index d5f3d09abdd..70c54cd2657 100644 --- a/tests/components/fitbit/test_config_flow.py +++ b/tests/components/fitbit/test_config_flow.py @@ -2,7 +2,6 @@ from collections.abc import Awaitable, Callable from http import HTTPStatus -import time from typing import Any from unittest.mock import patch @@ -13,7 +12,7 @@ from homeassistant import config_entries from homeassistant.components.fitbit.const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow, issue_registry as ir +from homeassistant.helpers import config_entry_oauth2_flow from .conftest import ( CLIENT_ID, @@ -255,207 +254,6 @@ async def test_config_entry_already_exists( assert result.get("reason") == "already_configured" -@pytest.mark.parametrize( - "token_expiration_time", - [time.time() + 86400, time.time() - 86400], - ids=("token_active", "token_expired"), -) -async def test_import_fitbit_config( - hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, -) -> None: - """Test that platform configuration is imported successfully.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=HTTPStatus.OK, - json=SERVER_ACCESS_TOKEN, - ) - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - assert len(mock_setup.mock_calls) == 1 - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - # Verify valid profile can be fetched from the API - config_entry = entries[0] - assert config_entry.title == DISPLAY_NAME - assert config_entry.unique_id == PROFILE_USER_ID - - data = dict(config_entry.data) - # Verify imported values from fitbit.conf and configuration.yaml and - # that the token is updated. - assert "token" in data - expires_at = data["token"]["expires_at"] - assert expires_at > time.time() - del data["token"]["expires_at"] - assert dict(config_entry.data) == { - "auth_implementation": DOMAIN, - "clock_format": "24H", - "monitored_resources": ["activities/steps"], - "token": { - "access_token": "server-access-token", - "refresh_token": "server-refresh-token", - "scope": "activity heartrate nutrition profile settings sleep weight", - }, - "unit_system": "default", - } - - # Verify an issue is raised for deprecated configuration.yaml - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import" - - -async def test_import_fitbit_config_failure_cannot_connect( - hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, -) -> None: - """Test platform configuration fails to import successfully.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=HTTPStatus.OK, - json=SERVER_ACCESS_TOKEN, - ) - requests_mock.register_uri( - "GET", PROFILE_API_URL, status_code=HTTPStatus.INTERNAL_SERVER_ERROR - ) - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - assert len(mock_setup.mock_calls) == 0 - - # Verify an issue is raised that we were unable to import configuration - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import_issue_cannot_connect" - - -@pytest.mark.parametrize( - "status_code", - [ - (HTTPStatus.UNAUTHORIZED), - (HTTPStatus.INTERNAL_SERVER_ERROR), - ], -) -async def test_import_fitbit_config_cannot_refresh( - hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, - status_code: HTTPStatus, -) -> None: - """Test platform configuration import fails when refreshing the token.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=status_code, - json="", - ) - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - assert len(mock_setup.mock_calls) == 0 - - # Verify an issue is raised that we were unable to import configuration - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import_issue_cannot_connect" - - -async def test_import_fitbit_config_already_exists( - hass: HomeAssistant, - config_entry: MockConfigEntry, - setup_credentials: None, - integration_setup: Callable[[], Awaitable[bool]], - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, -) -> None: - """Test that platform configuration is not imported if it already exists.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=HTTPStatus.OK, - json=SERVER_ACCESS_TOKEN, - ) - - # Verify existing config entry - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_config_entry_setup: - await integration_setup() - - assert len(mock_config_entry_setup.mock_calls) == 1 - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_import_setup: - await sensor_platform_setup() - - assert len(mock_import_setup.mock_calls) == 0 - - # Still one config entry - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - # Verify an issue is raised for deprecated configuration.yaml - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import" - - -async def test_platform_setup_without_import( - hass: HomeAssistant, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, -) -> None: - """Test platform configuration.yaml but no existing fitbit.conf credentials.""" - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - # Verify no configuration entry is imported since the integration is not - # fully setup properly - assert len(mock_setup.mock_calls) == 0 - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 - - # Verify an issue is raised for deprecated configuration.yaml - assert len(issue_registry.issues) == 1 - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_no_import" - - @pytest.mark.usefixtures("current_request_with_host") async def test_reauth_flow( hass: HomeAssistant, @@ -472,13 +270,7 @@ async def test_reauth_flow( assert len(entries) == 1 # config_entry.req initiates reauth - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -546,13 +338,7 @@ async def test_reauth_wrong_user_id( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fitbit/test_sensor.py b/tests/components/fitbit/test_sensor.py index 9443d0500eb..cee9835f89f 100644 --- a/tests/components/fitbit/test_sensor.py +++ b/tests/components/fitbit/test_sensor.py @@ -78,133 +78,151 @@ def mock_token_refresh(requests_mock: Mocker) -> None: [ ( ["activities/activityCalories"], - "sensor.activity_calories", + "sensor.first_l_activity_calories", "activities/activityCalories", "135", ), + ( + ["activities/tracker/activityCalories"], + "sensor.first_l_tracker_activity_calories", + "activities/tracker/activityCalories", + "135", + ), ( ["activities/calories"], - "sensor.calories", + "sensor.first_l_calories", "activities/calories", "139", ), + ( + ["activities/tracker/calories"], + "sensor.first_l_tracker_calories", + "activities/tracker/calories", + "139", + ), ( ["activities/distance"], - "sensor.distance", + "sensor.first_l_distance", + "activities/distance", + "12.7", + ), + ( + ["activities/tracker/distance"], + "sensor.first_l_tracker_distance", "activities/distance", "12.7", ), ( ["activities/elevation"], - "sensor.elevation", + "sensor.first_l_elevation", "activities/elevation", "7600.24", ), ( ["activities/floors"], - "sensor.floors", + "sensor.first_l_floors", "activities/floors", "8", ), ( ["activities/heart"], - "sensor.resting_heart_rate", + "sensor.first_l_resting_heart_rate", "activities/heart", {"restingHeartRate": 76}, ), ( ["activities/minutesFairlyActive"], - "sensor.minutes_fairly_active", + "sensor.first_l_minutes_fairly_active", "activities/minutesFairlyActive", 35, ), ( ["activities/minutesLightlyActive"], - "sensor.minutes_lightly_active", + "sensor.first_l_minutes_lightly_active", "activities/minutesLightlyActive", 95, ), ( ["activities/minutesSedentary"], - "sensor.minutes_sedentary", + "sensor.first_l_minutes_sedentary", "activities/minutesSedentary", 18, ), ( ["activities/minutesVeryActive"], - "sensor.minutes_very_active", + "sensor.first_l_minutes_very_active", "activities/minutesVeryActive", 20, ), ( ["activities/steps"], - "sensor.steps", + "sensor.first_l_steps", "activities/steps", "5600", ), ( ["body/weight"], - "sensor.weight", + "sensor.first_l_weight", "body/weight", "175", ), ( ["body/fat"], - "sensor.body_fat", + "sensor.first_l_body_fat", "body/fat", "18", ), ( ["body/bmi"], - "sensor.bmi", + "sensor.first_l_bmi", "body/bmi", "23.7", ), ( ["sleep/awakeningsCount"], - "sensor.awakenings_count", + "sensor.first_l_awakenings_count", "sleep/awakeningsCount", "7", ), ( ["sleep/efficiency"], - "sensor.sleep_efficiency", + "sensor.first_l_sleep_efficiency", "sleep/efficiency", "80", ), ( ["sleep/minutesAfterWakeup"], - "sensor.minutes_after_wakeup", + "sensor.first_l_minutes_after_wakeup", "sleep/minutesAfterWakeup", "17", ), ( ["sleep/minutesAsleep"], - "sensor.sleep_minutes_asleep", + "sensor.first_l_sleep_minutes_asleep", "sleep/minutesAsleep", "360", ), ( ["sleep/minutesAwake"], - "sensor.sleep_minutes_awake", + "sensor.first_l_sleep_minutes_awake", "sleep/minutesAwake", "35", ), ( ["sleep/minutesToFallAsleep"], - "sensor.sleep_minutes_to_fall_asleep", + "sensor.first_l_sleep_minutes_to_fall_asleep", "sleep/minutesToFallAsleep", "35", ), ( ["sleep/startTime"], - "sensor.sleep_start_time", + "sensor.first_l_sleep_start_time", "sleep/startTime", "2020-01-27T00:17:30.000", ), ( ["sleep/timeInBed"], - "sensor.sleep_time_in_bed", + "sensor.first_l_sleep_time_in_bed", "sleep/timeInBed", "462", ), @@ -212,8 +230,8 @@ def mock_token_refresh(requests_mock: Mocker) -> None: ) async def test_sensors( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], register_timeseries: Callable[[str, dict[str, Any]], None], entity_registry: er.EntityRegistry, entity_id: str, @@ -226,7 +244,7 @@ async def test_sensors( register_timeseries( api_resource, timeseries_response(api_resource.replace("/", "-"), api_value) ) - await sensor_platform_setup() + await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 @@ -243,13 +261,13 @@ async def test_sensors( ) async def test_device_battery( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], entity_registry: er.EntityRegistry, ) -> None: """Test battery level sensor for devices.""" - assert await sensor_platform_setup() + assert await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 @@ -290,13 +308,13 @@ async def test_device_battery( ) async def test_device_battery_level( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], entity_registry: er.EntityRegistry, ) -> None: """Test battery level sensor for devices.""" - assert await sensor_platform_setup() + assert await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 @@ -347,25 +365,25 @@ async def test_device_battery_level( ) async def test_profile_local( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], register_timeseries: Callable[[str, dict[str, Any]], None], expected_unit: str, ) -> None: """Test the fitbit profile locale impact on unit of measure.""" register_timeseries("body/weight", timeseries_response("body-weight", "175")) - await sensor_platform_setup() + await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - state = hass.states.get("sensor.weight") + state = hass.states.get("sensor.first_l_weight") assert state assert state.attributes.get("unit_of_measurement") == expected_unit @pytest.mark.parametrize( - ("sensor_platform_config", "api_response", "expected_state"), + ("imported_config_data", "api_response", "expected_state"), [ ( {"clock_format": "12H", "monitored_resources": ["sleep/startTime"]}, @@ -396,8 +414,8 @@ async def test_profile_local( ) async def test_sleep_time_clock_format( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], register_timeseries: Callable[[str, dict[str, Any]], None], api_response: str, expected_state: str, @@ -407,9 +425,9 @@ async def test_sleep_time_clock_format( register_timeseries( "sleep/startTime", timeseries_response("sleep-startTime", api_response) ) - await sensor_platform_setup() + assert await integration_setup() - state = hass.states.get("sensor.sleep_start_time") + state = hass.states.get("sensor.first_l_sleep_start_time") assert state assert state.state == expected_state @@ -445,16 +463,16 @@ async def test_activity_scope_config_entry( states = hass.states.async_all() assert {s.entity_id for s in states} == { - "sensor.activity_calories", - "sensor.calories", - "sensor.distance", - "sensor.elevation", - "sensor.floors", - "sensor.minutes_fairly_active", - "sensor.minutes_lightly_active", - "sensor.minutes_sedentary", - "sensor.minutes_very_active", - "sensor.steps", + "sensor.first_l_activity_calories", + "sensor.first_l_calories", + "sensor.first_l_distance", + "sensor.first_l_elevation", + "sensor.first_l_floors", + "sensor.first_l_minutes_fairly_active", + "sensor.first_l_minutes_lightly_active", + "sensor.first_l_minutes_sedentary", + "sensor.first_l_minutes_very_active", + "sensor.first_l_steps", } @@ -478,7 +496,7 @@ async def test_heartrate_scope_config_entry( states = hass.states.async_all() assert {s.entity_id for s in states} == { - "sensor.resting_heart_rate", + "sensor.first_l_resting_heart_rate", } @@ -506,11 +524,11 @@ async def test_nutrition_scope_config_entry( ) assert await integration_setup() - state = hass.states.get("sensor.water") + state = hass.states.get("sensor.first_l_water") assert state assert (state.state, state.attributes) == snapshot - state = hass.states.get("sensor.calories_in") + state = hass.states.get("sensor.first_l_calories_in") assert state assert (state.state, state.attributes) == snapshot @@ -545,14 +563,14 @@ async def test_sleep_scope_config_entry( states = hass.states.async_all() assert {s.entity_id for s in states} == { - "sensor.awakenings_count", - "sensor.sleep_efficiency", - "sensor.minutes_after_wakeup", - "sensor.sleep_minutes_asleep", - "sensor.sleep_minutes_awake", - "sensor.sleep_minutes_to_fall_asleep", - "sensor.sleep_time_in_bed", - "sensor.sleep_start_time", + "sensor.first_l_awakenings_count", + "sensor.first_l_sleep_efficiency", + "sensor.first_l_minutes_after_wakeup", + "sensor.first_l_sleep_minutes_asleep", + "sensor.first_l_sleep_minutes_awake", + "sensor.first_l_sleep_minutes_to_fall_asleep", + "sensor.first_l_sleep_time_in_bed", + "sensor.first_l_sleep_start_time", } @@ -573,7 +591,7 @@ async def test_weight_scope_config_entry( states = hass.states.async_all() assert [s.entity_id for s in states] == [ - "sensor.weight", + "sensor.first_l_weight", ] @@ -623,7 +641,7 @@ async def test_sensor_update_failed( assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "unavailable" @@ -655,7 +673,7 @@ async def test_sensor_update_failed_requires_reauth( assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "unavailable" @@ -698,14 +716,14 @@ async def test_sensor_update_success( assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "60" - await async_update_entity(hass, "sensor.resting_heart_rate") + await async_update_entity(hass, "sensor.first_l_resting_heart_rate") await hass.async_block_till_done() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "70" @@ -867,6 +885,6 @@ async def test_resting_heart_rate_responses( ) assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == expected_state diff --git a/tests/components/fjaraskupan/test_coordinator.py b/tests/components/fjaraskupan/test_coordinator.py new file mode 100644 index 00000000000..e63d52a7594 --- /dev/null +++ b/tests/components/fjaraskupan/test_coordinator.py @@ -0,0 +1,33 @@ +"""Test the Fjäråskupan coordinator module.""" + +from fjaraskupan import ( + FjaraskupanConnectionError, + FjaraskupanError, + FjaraskupanReadError, + FjaraskupanWriteError, +) +import pytest + +from homeassistant.components.fjaraskupan.const import DOMAIN +from homeassistant.components.fjaraskupan.coordinator import exception_converter +from homeassistant.exceptions import HomeAssistantError + + +@pytest.mark.parametrize( + ("exception", "translation_key", "translation_placeholder"), + [ + (FjaraskupanReadError(), "read_error", None), + (FjaraskupanWriteError(), "write_error", None), + (FjaraskupanConnectionError(), "connection_error", None), + (FjaraskupanError("Some error"), "unexpected_error", {"msg": "Some error"}), + ], +) +def test_exeception_wrapper( + exception: Exception, translation_key: str, translation_placeholder: dict[str, str] +) -> None: + """Test our exception conversion.""" + with pytest.raises(HomeAssistantError) as exc_info, exception_converter(): + raise exception + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == translation_key + assert exc_info.value.translation_placeholders == translation_placeholder diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index cc7c9fa0570..6ce17261bfc 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -44,6 +44,7 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: ): flexit_bacnet.serial_number = "0000-0001" flexit_bacnet.device_name = "Device Name" + flexit_bacnet.model = "S4 RER" flexit_bacnet.room_temperature = 19.0 flexit_bacnet.air_temp_setpoint_away = 18.0 flexit_bacnet.air_temp_setpoint_home = 22.0 @@ -68,16 +69,16 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: flexit_bacnet.electric_heater = True # Mock fan setpoints - flexit_bacnet.fan_setpoint_extract_air_fire = 10 - flexit_bacnet.fan_setpoint_supply_air_fire = 20 - flexit_bacnet.fan_setpoint_extract_air_away = 30 - flexit_bacnet.fan_setpoint_supply_air_away = 40 - flexit_bacnet.fan_setpoint_extract_air_home = 50 - flexit_bacnet.fan_setpoint_supply_air_home = 60 - flexit_bacnet.fan_setpoint_extract_air_high = 70 - flexit_bacnet.fan_setpoint_supply_air_high = 80 - flexit_bacnet.fan_setpoint_extract_air_cooker = 90 - flexit_bacnet.fan_setpoint_supply_air_cooker = 100 + flexit_bacnet.fan_setpoint_extract_air_fire = 56 + flexit_bacnet.fan_setpoint_supply_air_fire = 77 + flexit_bacnet.fan_setpoint_extract_air_away = 40 + flexit_bacnet.fan_setpoint_supply_air_away = 42 + flexit_bacnet.fan_setpoint_extract_air_home = 70 + flexit_bacnet.fan_setpoint_supply_air_home = 74 + flexit_bacnet.fan_setpoint_extract_air_high = 100 + flexit_bacnet.fan_setpoint_supply_air_high = 100 + flexit_bacnet.fan_setpoint_extract_air_cooker = 50 + flexit_bacnet.fan_setpoint_supply_air_cooker = 70 yield flexit_bacnet diff --git a/tests/components/flexit_bacnet/snapshots/test_number.ambr b/tests/components/flexit_bacnet/snapshots/test_number.ambr index c4fb1e7c434..78eefd08345 100644 --- a/tests/components/flexit_bacnet/snapshots/test_number.ambr +++ b/tests/components/flexit_bacnet/snapshots/test_number.ambr @@ -5,8 +5,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, }), @@ -42,8 +42,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away extract fan setpoint', - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -53,7 +53,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '30', + 'state': '40', }) # --- # name: test_numbers[number.device_name_away_supply_fan_setpoint-entry] @@ -62,8 +62,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, }), @@ -99,8 +99,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away supply fan setpoint', - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -110,7 +110,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '40', + 'state': '42', }) # --- # name: test_numbers[number.device_name_cooker_hood_extract_fan_setpoint-entry] @@ -120,7 +120,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -157,7 +157,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -167,7 +167,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '90', + 'state': '50', }) # --- # name: test_numbers[number.device_name_cooker_hood_supply_fan_setpoint-entry] @@ -177,7 +177,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -214,7 +214,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -224,7 +224,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '70', }) # --- # name: test_numbers[number.device_name_fireplace_extract_fan_setpoint-entry] @@ -234,7 +234,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -271,7 +271,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -281,7 +281,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '10', + 'state': '56', }) # --- # name: test_numbers[number.device_name_fireplace_supply_fan_setpoint-entry] @@ -291,7 +291,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -328,7 +328,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -338,7 +338,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '20', + 'state': '77', }) # --- # name: test_numbers[number.device_name_high_extract_fan_setpoint-entry] @@ -348,7 +348,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, }), @@ -385,7 +385,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -395,7 +395,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '70', + 'state': '100', }) # --- # name: test_numbers[number.device_name_high_supply_fan_setpoint-entry] @@ -405,7 +405,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, }), @@ -442,7 +442,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -452,7 +452,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '80', + 'state': '100', }) # --- # name: test_numbers[number.device_name_home_extract_fan_setpoint-entry] @@ -462,7 +462,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, }), @@ -499,7 +499,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -509,7 +509,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '50', + 'state': '70', }) # --- # name: test_numbers[number.device_name_home_supply_fan_setpoint-entry] @@ -519,7 +519,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, }), @@ -556,7 +556,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -566,6 +566,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '60', + 'state': '74', }) # --- diff --git a/tests/components/flexit_bacnet/test_number.py b/tests/components/flexit_bacnet/test_number.py index ad49908fa96..f566b623f12 100644 --- a/tests/components/flexit_bacnet/test_number.py +++ b/tests/components/flexit_bacnet/test_number.py @@ -64,21 +64,21 @@ async def test_numbers_implementation( assert len(mocked_method.mock_calls) == 1 assert hass.states.get(ENTITY_ID).state == "60" - mock_flexit_bacnet.fan_setpoint_supply_air_fire = 10 + mock_flexit_bacnet.fan_setpoint_supply_air_fire = 40 await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) mocked_method = getattr(mock_flexit_bacnet, "set_fan_setpoint_supply_air_fire") assert len(mocked_method.mock_calls) == 2 - assert hass.states.get(ENTITY_ID).state == "10" + assert hass.states.get(ENTITY_ID).state == "40" # Error recovery, when setting the value mock_flexit_bacnet.set_fan_setpoint_supply_air_fire.side_effect = DecodingError @@ -89,7 +89,7 @@ async def test_numbers_implementation( SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) diff --git a/tests/components/flipr/__init__.py b/tests/components/flipr/__init__.py index 26767261866..3c5bfc2a6c2 100644 --- a/tests/components/flipr/__init__.py +++ b/tests/components/flipr/__init__.py @@ -1 +1,15 @@ """Tests for the Flipr integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Fixture for setting up the component.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/flipr/conftest.py b/tests/components/flipr/conftest.py new file mode 100644 index 00000000000..18457000636 --- /dev/null +++ b/tests/components/flipr/conftest.py @@ -0,0 +1,97 @@ +"""Common fixtures for the flipr tests.""" + +from collections.abc import Generator +from datetime import datetime +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.flipr.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry + +# Data for the mocked object returned via flipr_api client. +MOCK_DATE_TIME = datetime(2021, 2, 15, 9, 10, 32, tzinfo=dt_util.UTC) +MOCK_FLIPR_MEASURE = { + "temperature": 10.5, + "ph": 7.03, + "chlorine": 0.23654886, + "red_ox": 657.58, + "date_time": MOCK_DATE_TIME, + "ph_status": "TooLow", + "chlorine_status": "Medium", + "battery": 95.0, +} + +MOCK_HUB_STATE_ON = { + "state": True, + "mode": "planning", + "planning": "dummyplanningid", +} + +MOCK_HUB_STATE_OFF = { + "state": False, + "mode": "manual", + "planning": "dummyplanningid", +} + +MOCK_HUB_MODE_MANUAL = { + "state": False, + "mode": "manual", + "planning": "dummyplanningid", +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.flipr.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock the config entry.""" + return MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="toto@toto.com", + data={ + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + }, + ) + + +@pytest.fixture +def mock_flipr_client() -> Generator[AsyncMock]: + """Mock a Flipr client.""" + + with ( + patch( + "homeassistant.components.flipr.FliprAPIRestClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.flipr.config_flow.FliprAPIRestClient", + new=mock_client, + ), + ): + client = mock_client.return_value + + # Default values for the tests using this mock : + client.search_all_ids.return_value = {"flipr": ["myfliprid"], "hub": []} + + client.get_pool_measure_latest.return_value = MOCK_FLIPR_MEASURE + + client.get_hub_state.return_value = MOCK_HUB_STATE_ON + + client.set_hub_state.return_value = MOCK_HUB_STATE_ON + + client.set_hub_mode.return_value = MOCK_HUB_MODE_MANUAL + + yield client diff --git a/tests/components/flipr/test_binary_sensor.py b/tests/components/flipr/test_binary_sensor.py index 971b5b046b3..ed43dbb8a77 100644 --- a/tests/components/flipr/test_binary_sensor.py +++ b/tests/components/flipr/test_binary_sensor.py @@ -1,49 +1,24 @@ """Test the Flipr binary sensor.""" -from datetime import datetime -from unittest.mock import patch +from unittest.mock import AsyncMock -from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util + +from . import setup_integration from tests.common import MockConfigEntry -# Data for the mocked object returned via flipr_api client. -MOCK_DATE_TIME = datetime(2021, 2, 15, 9, 10, 32, tzinfo=dt_util.UTC) -MOCK_FLIPR_MEASURE = { - "temperature": 10.5, - "ph": 7.03, - "chlorine": 0.23654886, - "red_ox": 657.58, - "date_time": MOCK_DATE_TIME, - "ph_status": "TooLow", - "chlorine_status": "Medium", -} - -async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, +) -> None: """Test the creation and values of the Flipr binary sensors.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test_entry_unique_id", - data={ - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - CONF_FLIPR_ID: "myfliprid", - }, - ) - entry.add_to_hass(hass) - - with patch( - "flipr_api.FliprAPIRestClient.get_pool_measure_latest", - return_value=MOCK_FLIPR_MEASURE, - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) # Check entity unique_id value that is generated in FliprEntity base class. entity = entity_registry.async_get("binary_sensor.flipr_myfliprid_ph_status") diff --git a/tests/components/flipr/test_config_flow.py b/tests/components/flipr/test_config_flow.py index b99e6af7383..9df77dc0b2a 100644 --- a/tests/components/flipr/test_config_flow.py +++ b/tests/components/flipr/test_config_flow.py @@ -1,169 +1,131 @@ """Test the Flipr config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock import pytest from requests.exceptions import HTTPError, Timeout -from homeassistant import config_entries -from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN +from homeassistant.components.flipr.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -@pytest.fixture(name="mock_setup") -def mock_setups(): - """Prevent setup.""" - with patch( - "homeassistant.components.flipr.async_setup_entry", - return_value=True, - ): - yield - - -async def test_show_form(hass: HomeAssistant) -> None: - """Test we get the form.""" +async def test_full_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_flipr_client: AsyncMock +) -> None: + """Test the full flow.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == config_entries.SOURCE_USER + assert result["step_id"] == "user" + assert not result["errors"] - -async def test_invalid_credential(hass: HomeAssistant, mock_setup) -> None: - """Test invalid credential.""" - with patch( - "flipr_api.FliprAPIRestClient.search_flipr_ids", side_effect=HTTPError() - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_EMAIL: "bad_login", - CONF_PASSWORD: "bad_pass", - CONF_FLIPR_ID: "", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_nominal_case(hass: HomeAssistant, mock_setup) -> None: - """Test valid login form.""" - with patch( - "flipr_api.FliprAPIRestClient.search_flipr_ids", - return_value=["flipid"], - ) as mock_flipr_client: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - CONF_FLIPR_ID: "flipid", - }, - ) - await hass.async_block_till_done() - - assert len(mock_flipr_client.mock_calls) == 1 + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "flipid" + assert result["title"] == "Flipr dummylogin" + assert result["result"].unique_id == "dummylogin" assert result["data"] == { CONF_EMAIL: "dummylogin", CONF_PASSWORD: "dummypass", - CONF_FLIPR_ID: "flipid", } -async def test_multiple_flip_id(hass: HomeAssistant, mock_setup) -> None: - """Test multiple flipr id adding a config step.""" - with patch( - "flipr_api.FliprAPIRestClient.search_flipr_ids", - return_value=["FLIP1", "FLIP2"], - ) as mock_flipr_client: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) +@pytest.mark.parametrize( + ("exception", "expected"), + [ + (Exception("Bad request Boy :) --"), {"base": "unknown"}), + (HTTPError, {"base": "invalid_auth"}), + (Timeout, {"base": "cannot_connect"}), + (ConnectionError, {"base": "cannot_connect"}), + ], +) +async def test_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_flipr_client: AsyncMock, + exception: Exception, + expected: dict[str, str], +) -> None: + """Test we handle any error.""" + mock_flipr_client.search_all_ids.side_effect = exception - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "flipr_id" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_EMAIL: "nada", + CONF_PASSWORD: "nadap", + }, + ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_FLIPR_ID: "FLIP2"}, - ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected - assert len(mock_flipr_client.mock_calls) == 1 + # Test of recover in normal state after correction of the 1st error + mock_flipr_client.search_all_ids.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "FLIP2" + assert result["title"] == "Flipr dummylogin" assert result["data"] == { CONF_EMAIL: "dummylogin", CONF_PASSWORD: "dummypass", - CONF_FLIPR_ID: "FLIP2", } -async def test_no_flip_id(hass: HomeAssistant, mock_setup) -> None: - """Test no flipr id found.""" - with patch( - "flipr_api.FliprAPIRestClient.search_flipr_ids", - return_value=[], - ) as mock_flipr_client: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) +async def test_no_flipr_found( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_flipr_client: AsyncMock +) -> None: + """Test the case where there is no flipr found.""" - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_flipr_id_found"} - - assert len(mock_flipr_client.mock_calls) == 1 - - -async def test_http_errors(hass: HomeAssistant, mock_setup) -> None: - """Test HTTP Errors.""" - with patch("flipr_api.FliprAPIRestClient.search_flipr_ids", side_effect=Timeout()): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_EMAIL: "nada", - CONF_PASSWORD: "nada", - CONF_FLIPR_ID: "", - }, - ) + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": []} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_EMAIL: "nada", + CONF_PASSWORD: "nadap", + }, + ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + assert result["errors"] == {"base": "no_flipr_id_found"} - with patch( - "flipr_api.FliprAPIRestClient.search_flipr_ids", - side_effect=Exception("Bad request Boy :) --"), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_EMAIL: "nada", - CONF_PASSWORD: "nada", - CONF_FLIPR_ID: "", - }, - ) + # Test of recover in normal state after correction of the 1st error + mock_flipr_client.search_all_ids.return_value = {"flipr": ["myfliprid"], "hub": []} - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Flipr dummylogin" + assert result["data"] == { + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + } diff --git a/tests/components/flipr/test_init.py b/tests/components/flipr/test_init.py index 6a49b5b7200..6e9341b1e06 100644 --- a/tests/components/flipr/test_init.py +++ b/tests/components/flipr/test_init.py @@ -1,29 +1,90 @@ """Tests for init methods.""" -from unittest.mock import patch +from unittest.mock import AsyncMock -from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN +from homeassistant.components.flipr.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant +from . import setup_integration + from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant) -> None: +async def test_unload_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_flipr_client: AsyncMock, +) -> None: """Test unload entry.""" - entry = MockConfigEntry( + + mock_flipr_client.search_all_ids.return_value = { + "flipr": ["myfliprid"], + "hub": ["hubid"], + } + + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_duplicate_config_entries( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_flipr_client: AsyncMock, +) -> None: + """Test duplicate config entries.""" + + mock_config_entry_dup = MockConfigEntry( + version=2, domain=DOMAIN, + unique_id="toto@toto.com", data={ - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - CONF_FLIPR_ID: "FLIP1", + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + "flipr_id": "myflipr_id_dup", }, - unique_id="123456", ) - entry.add_to_hass(hass) - with patch("homeassistant.components.flipr.coordinator.FliprAPIRestClient"): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - await hass.config_entries.async_unload(entry.entry_id) - assert entry.state is ConfigEntryState.NOT_LOADED + + mock_config_entry.add_to_hass(hass) + # Initialize the first entry with default mock + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Initialize the second entry with another flipr id + mock_config_entry_dup.add_to_hass(hass) + assert not await hass.config_entries.async_setup(mock_config_entry_dup.entry_id) + await hass.async_block_till_done() + assert mock_config_entry_dup.state is ConfigEntryState.SETUP_ERROR + + +async def test_migrate_entry( + hass: HomeAssistant, + mock_flipr_client: AsyncMock, +) -> None: + """Test migrate config entry from v1 to v2.""" + + mock_config_entry_v1 = MockConfigEntry( + version=1, + domain=DOMAIN, + title="myfliprid", + unique_id="test_entry_unique_id", + data={ + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + "flipr_id": "myfliprid", + }, + ) + + await setup_integration(hass, mock_config_entry_v1) + assert mock_config_entry_v1.state is ConfigEntryState.LOADED + assert mock_config_entry_v1.version == 2 + assert mock_config_entry_v1.unique_id == "toto@toto.com" + assert mock_config_entry_v1.data == { + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + "flipr_id": "myfliprid", + } diff --git a/tests/components/flipr/test_select.py b/tests/components/flipr/test_select.py new file mode 100644 index 00000000000..d71297f4f1a --- /dev/null +++ b/tests/components/flipr/test_select.py @@ -0,0 +1,109 @@ +"""Test the Flipr select for Hub.""" + +import logging +from unittest.mock import AsyncMock + +from flipr_api.exceptions import FliprError + +from homeassistant.components.select import ( + ATTR_OPTION, + ATTR_OPTIONS, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry + +_LOGGER = logging.getLogger(__name__) + +SELECT_ENTITY_ID = "select.flipr_hub_myhubid_mode" + + +async def test_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, +) -> None: + """Test the creation and values of the Flipr select.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} + + await setup_integration(hass, mock_config_entry) + + # Check entity unique_id value that is generated in FliprEntity base class. + entity = entity_registry.async_get(SELECT_ENTITY_ID) + _LOGGER.debug("Found entity = %s", entity) + assert entity.unique_id == "myhubid-hubMode" + + mode = hass.states.get(SELECT_ENTITY_ID) + _LOGGER.debug("Found mode = %s", mode) + assert mode + assert mode.state == "planning" + assert mode.attributes.get(ATTR_OPTIONS) == ["auto", "manual", "planning"] + + +async def test_select_actions( + hass: HomeAssistant, + mock_flipr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the actions on the Flipr Hub select.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} + + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(SELECT_ENTITY_ID) + assert state.state == "planning" + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: SELECT_ENTITY_ID, ATTR_OPTION: "manual"}, + blocking=True, + ) + state = hass.states.get(SELECT_ENTITY_ID) + assert state.state == "manual" + + +async def test_no_select_found( + hass: HomeAssistant, + mock_flipr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the select absence.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": []} + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.async_entity_ids(SELECT_ENTITY_ID) + + +async def test_error_flipr_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, +) -> None: + """Test the Flipr sensors error.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} + + mock_flipr_client.get_hub_state.side_effect = FliprError( + "Error during flipr data retrieval..." + ) + + await setup_integration(hass, mock_config_entry) + + # Check entity is not generated because of the FliprError raised. + entity = entity_registry.async_get(SELECT_ENTITY_ID) + assert entity is None diff --git a/tests/components/flipr/test_sensor.py b/tests/components/flipr/test_sensor.py index 31eb075469d..77937e3af54 100644 --- a/tests/components/flipr/test_sensor.py +++ b/tests/components/flipr/test_sensor.py @@ -1,59 +1,28 @@ """Test the Flipr sensor.""" -from datetime import datetime -from unittest.mock import patch +from unittest.mock import AsyncMock from flipr_api.exceptions import FliprError -from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass -from homeassistant.const import ( - ATTR_UNIT_OF_MEASUREMENT, - CONF_EMAIL, - CONF_PASSWORD, - PERCENTAGE, - UnitOfTemperature, -) +from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util + +from . import setup_integration from tests.common import MockConfigEntry -# Data for the mocked object returned via flipr_api client. -MOCK_DATE_TIME = datetime(2021, 2, 15, 9, 10, 32, tzinfo=dt_util.UTC) -MOCK_FLIPR_MEASURE = { - "temperature": 10.5, - "ph": 7.03, - "chlorine": 0.23654886, - "red_ox": 657.58, - "date_time": MOCK_DATE_TIME, - "ph_status": "TooLow", - "chlorine_status": "Medium", - "battery": 95.0, -} +async def test_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, +) -> None: + """Test the creation and values of the Flipr binary sensors.""" -async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: - """Test the creation and values of the Flipr sensors.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test_entry_unique_id", - data={ - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - CONF_FLIPR_ID: "myfliprid", - }, - ) - - entry.add_to_hass(hass) - - with patch( - "flipr_api.FliprAPIRestClient.get_pool_measure_latest", - return_value=MOCK_FLIPR_MEASURE, - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) # Check entity unique_id value that is generated in FliprEntity base class. entity = entity_registry.async_get("sensor.flipr_myfliprid_red_ox") @@ -97,27 +66,18 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) async def test_error_flipr_api_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, ) -> None: """Test the Flipr sensors error.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test_entry_unique_id", - data={ - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - CONF_FLIPR_ID: "myfliprid", - }, + + mock_flipr_client.get_pool_measure_latest.side_effect = FliprError( + "Error during flipr data retrieval..." ) - entry.add_to_hass(hass) - - with patch( - "flipr_api.FliprAPIRestClient.get_pool_measure_latest", - side_effect=FliprError("Error during flipr data retrieval..."), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) # Check entity is not generated because of the FliprError raised. entity = entity_registry.async_get("sensor.flipr_myfliprid_red_ox") diff --git a/tests/components/flipr/test_switch.py b/tests/components/flipr/test_switch.py new file mode 100644 index 00000000000..f994ac1bdd3 --- /dev/null +++ b/tests/components/flipr/test_switch.py @@ -0,0 +1,110 @@ +"""Test the Flipr switch for Hub.""" + +from unittest.mock import AsyncMock + +from flipr_api.exceptions import FliprError + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration +from .conftest import MOCK_HUB_STATE_OFF + +from tests.common import MockConfigEntry + +SWITCH_ENTITY_ID = "switch.flipr_hub_myhubid" + + +async def test_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, +) -> None: + """Test the creation and values of the Flipr switch.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} + + await setup_integration(hass, mock_config_entry) + + # Check entity unique_id value that is generated in FliprEntity base class. + entity = entity_registry.async_get(SWITCH_ENTITY_ID) + assert entity.unique_id == "myhubid-hubState" + + state = hass.states.get(SWITCH_ENTITY_ID) + assert state + assert state.state == STATE_ON + + +async def test_switch_actions( + hass: HomeAssistant, + mock_flipr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the actions on the Flipr Hub switch.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state.state == STATE_ON + + mock_flipr_client.set_hub_state.return_value = MOCK_HUB_STATE_OFF + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state.state == STATE_OFF + + +async def test_no_switch_found( + hass: HomeAssistant, + mock_flipr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the switch absence.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": []} + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.async_entity_ids(SWITCH_DOMAIN) + + +async def test_error_flipr_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_flipr_client: AsyncMock, +) -> None: + """Test the Flipr sensors error.""" + + mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} + + mock_flipr_client.get_hub_state.side_effect = FliprError( + "Error during flipr data retrieval..." + ) + + await setup_integration(hass, mock_config_entry) + + # Check entity is not generated because of the FliprError raised. + entity = entity_registry.async_get(SWITCH_ENTITY_ID) + assert entity is None diff --git a/tests/components/flo/test_switch.py b/tests/components/flo/test_switch.py index 02ab93f9e67..5c124d312a7 100644 --- a/tests/components/flo/test_switch.py +++ b/tests/components/flo/test_switch.py @@ -3,7 +3,7 @@ import pytest from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -30,11 +30,11 @@ async def test_valve_switches( assert hass.states.get(entity_id).state == STATE_ON await hass.services.async_call( - DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == STATE_OFF await hass.services.async_call( - DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == STATE_ON diff --git a/tests/components/flume/conftest.py b/tests/components/flume/conftest.py index fb0d0157bbc..6173db1e2b9 100644 --- a/tests/components/flume/conftest.py +++ b/tests/components/flume/conftest.py @@ -3,8 +3,7 @@ from collections.abc import Generator import datetime from http import HTTPStatus -import json -from unittest.mock import mock_open, patch +from unittest.mock import patch import jwt import pytest @@ -116,7 +115,7 @@ def access_token_fixture(requests_mock: Mocker) -> Generator[None]: status_code=HTTPStatus.OK, json={"data": [token_response]}, ) - with patch("builtins.open", mock_open(read_data=json.dumps(token_response))): + with patch("homeassistant.components.flume.coordinator.FlumeAuth.write_token_file"): yield diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index 915299223e9..87fe3a2bbf0 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -124,11 +124,7 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": "test@test.org"}, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index ab0e8a556c4..f7dc30db240 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -1164,7 +1164,7 @@ async def test_flux_with_multiple_lights( assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376] -async def test_flux_with_mired( +async def test_flux_with_temp( hass: HomeAssistant, mock_light_entities: list[MockLight], ) -> None: @@ -1224,7 +1224,7 @@ async def test_flux_with_mired( async_fire_time_changed(hass, test_time) await hass.async_block_till_done() call = turn_on_calls[-1] - assert call.data[light.ATTR_COLOR_TEMP] == 269 + assert call.data[light.ATTR_COLOR_TEMP_KELVIN] == 3708 async def test_flux_with_rgb( diff --git a/tests/components/flux_led/test_config_flow.py b/tests/components/flux_led/test_config_flow.py index d95bc99f097..4332cb69f02 100644 --- a/tests/components/flux_led/test_config_flow.py +++ b/tests/components/flux_led/test_config_flow.py @@ -8,6 +8,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp +from homeassistant.components.flux_led.config_flow import FluxLedConfigFlow from homeassistant.components.flux_led.const import ( CONF_CUSTOM_EFFECT_COLORS, CONF_CUSTOM_EFFECT_SPEED_PCT, @@ -406,7 +407,20 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" - with _patch_discovery(), _patch_wifibulb(): + real_is_matching = FluxLedConfigFlow.is_matching + return_values = [] + + def is_matching(self, other_flow) -> bool: + return_values.append(real_is_matching(self, other_flow)) + return return_values[-1] + + with ( + _patch_discovery(), + _patch_wifibulb(), + patch.object( + FluxLedConfigFlow, "is_matching", wraps=is_matching, autospec=True + ), + ): result3 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -417,6 +431,10 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() + + # Ensure the is_matching method returned True + assert return_values == [True] + assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "already_in_progress" diff --git a/tests/components/flux_led/test_light.py b/tests/components/flux_led/test_light.py index f5a7b310202..a881bc2ea27 100644 --- a/tests/components/flux_led/test_light.py +++ b/tests/components/flux_led/test_light.py @@ -41,7 +41,7 @@ from homeassistant.components.flux_led.light import ( from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, @@ -517,7 +517,7 @@ async def test_rgbw_light_auto_on(hass: HomeAssistant) -> None: # enough resolution to determine which color to display bulb.async_turn_on.assert_not_called() bulb.async_set_brightness.assert_not_called() - bulb.async_set_levels.assert_called_with(2, 0, 0, 0) + bulb.async_set_levels.assert_called_with(3, 0, 0, 0) bulb.async_set_levels.reset_mock() await hass.services.async_call( @@ -534,7 +534,7 @@ async def test_rgbw_light_auto_on(hass: HomeAssistant) -> None: # enough resolution to determine which color to display bulb.async_turn_on.assert_not_called() bulb.async_set_brightness.assert_not_called() - bulb.async_set_levels.assert_called_with(2, 0, 0, 56) + bulb.async_set_levels.assert_called_with(3, 0, 0, 56) bulb.async_set_levels.reset_mock() bulb.brightness = 128 @@ -652,7 +652,7 @@ async def test_rgbww_light_auto_on(hass: HomeAssistant) -> None: # which color to display bulb.async_turn_on.assert_not_called() bulb.async_set_brightness.assert_not_called() - bulb.async_set_levels.assert_called_with(2, 0, 0, 0, 0) + bulb.async_set_levels.assert_called_with(3, 0, 0, 0, 0) bulb.async_set_levels.reset_mock() bulb.brightness = 128 @@ -777,12 +777,12 @@ async def test_rgb_cct_light(hass: HomeAssistant) -> None: assert attributes[ATTR_BRIGHTNESS] == 128 assert attributes[ATTR_COLOR_MODE] == "color_temp" assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "rgb"] - assert attributes[ATTR_COLOR_TEMP] == 200 + assert attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 370}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 2702}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(2702, 128) @@ -1003,7 +1003,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1012,7 +1012,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493, ATTR_BRIGHTNESS: 255}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1021,7 +1021,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 3448}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(3448, 255) @@ -1241,7 +1241,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1250,7 +1250,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493, ATTR_BRIGHTNESS: 255}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1259,7 +1259,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 3448}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(3448, 255) @@ -1316,7 +1316,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 170}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 5882}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(5882, MIN_CCT_BRIGHTNESS) diff --git a/tests/components/folder_watcher/test_config_flow.py b/tests/components/folder_watcher/test_config_flow.py index 745059717fb..3b41b5724fc 100644 --- a/tests/components/folder_watcher/test_config_flow.py +++ b/tests/components/folder_watcher/test_config_flow.py @@ -148,39 +148,3 @@ async def test_form_already_configured(hass: HomeAssistant, tmp_path: Path) -> N assert result["type"] == FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_import(hass: HomeAssistant, tmp_path: Path) -> None: - """Test import flow.""" - path = tmp_path.as_posix() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_FOLDER: path, CONF_PATTERNS: ["*"]}, - ) - await hass.async_block_till_done() - - assert result["type"] == FlowResultType.CREATE_ENTRY - assert result["title"] == f"Folder Watcher {path}" - assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} - - -async def test_import_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: - """Test we abort import when entry is already configured.""" - path = tmp_path.as_posix() - - entry = MockConfigEntry( - domain=DOMAIN, - title=f"Folder Watcher {path}", - data={CONF_FOLDER: path}, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_FOLDER: path}, - ) - - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/folder_watcher/test_init.py b/tests/components/folder_watcher/test_init.py index 965ae33c4f8..f4a3b7e3630 100644 --- a/tests/components/folder_watcher/test_init.py +++ b/tests/components/folder_watcher/test_init.py @@ -1,33 +1,68 @@ """The tests for the folder_watcher component.""" -import os +from pathlib import Path from types import SimpleNamespace from unittest.mock import Mock, patch +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components import folder_watcher +from homeassistant.components.folder_watcher.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.helpers import issue_registry as ir + +from tests.common import MockConfigEntry -async def test_invalid_path_setup(hass: HomeAssistant) -> None: +async def test_invalid_path_setup( + hass: HomeAssistant, + tmp_path: Path, + freezer: FrozenDateTimeFactory, + issue_registry: ir.IssueRegistry, +) -> None: """Test that an invalid path is not set up.""" - assert not await async_setup_component( - hass, - folder_watcher.DOMAIN, - {folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: "invalid_path"}}, + freezer.move_to("2022-04-19 10:31:02+00:00") + path = tmp_path.as_posix() + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + title=f"Folder Watcher {path!s}", + data={}, + options={"folder": str(path), "patterns": ["*"]}, + entry_id="1", ) + config_entry.add_to_hass(hass) -async def test_valid_path_setup(hass: HomeAssistant) -> None: + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_ERROR + assert len(issue_registry.issues) == 1 + + +async def test_valid_path_setup( + hass: HomeAssistant, tmp_path: Path, freezer: FrozenDateTimeFactory +) -> None: """Test that a valid path is setup.""" - cwd = os.path.join(os.path.dirname(__file__)) - hass.config.allowlist_external_dirs = {cwd} - with patch.object(folder_watcher, "Watcher"): - assert await async_setup_component( - hass, - folder_watcher.DOMAIN, - {folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: cwd}}, - ) + freezer.move_to("2022-04-19 10:31:02+00:00") + path = tmp_path.as_posix() + hass.config.allowlist_external_dirs = {path} + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + title=f"Folder Watcher {path!s}", + data={}, + options={"folder": str(path), "patterns": ["*"]}, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED def test_event() -> None: diff --git a/tests/components/forecast_solar/snapshots/test_init.ambr b/tests/components/forecast_solar/snapshots/test_init.ambr index 43145bcef9e..c0db54c2d4e 100644 --- a/tests/components/forecast_solar/snapshots/test_init.ambr +++ b/tests/components/forecast_solar/snapshots/test_init.ambr @@ -6,6 +6,8 @@ 'longitude': 4.42, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'forecast_solar', 'entry_id': , 'minor_version': 1, @@ -21,6 +23,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Green House', 'unique_id': 'unique', 'version': 2, diff --git a/tests/components/forecast_solar/test_config_flow.py b/tests/components/forecast_solar/test_config_flow.py index abaad402e1b..8fffb5096bc 100644 --- a/tests/components/forecast_solar/test_config_flow.py +++ b/tests/components/forecast_solar/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock +import pytest + from homeassistant.components.forecast_solar.const import ( CONF_AZIMUTH, CONF_DAMPING_EVENING, @@ -25,10 +27,10 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_NAME: "Name", @@ -40,13 +42,16 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "Name" - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Name" + assert config_entry.unique_id is None + assert config_entry.data == { CONF_LATITUDE: 52.42, CONF_LONGITUDE: 4.42, } - assert result2.get("options") == { + assert config_entry.options == { CONF_AZIMUTH: 142, CONF_DECLINATION: 42, CONF_MODULES_POWER: 4242, @@ -55,9 +60,9 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_options_flow_invalid_api( hass: HomeAssistant, - mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test options config flow when API key is invalid.""" @@ -67,10 +72,10 @@ async def test_options_flow_invalid_api( result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" - result2 = await hass.config_entries.options.async_configure( + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_API_KEY: "solarPOWER!", @@ -84,27 +89,11 @@ async def test_options_flow_invalid_api( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.FORM - assert result2["errors"] == {CONF_API_KEY: "invalid_api_key"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_API_KEY: "invalid_api_key"} - -async def test_options_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test config flow options.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" - - # With the API key - result2 = await hass.config_entries.options.async_configure( + # Ensure we can recover from this error + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_API_KEY: "SolarForecast150", @@ -118,8 +107,8 @@ async def test_options_flow( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_API_KEY: "SolarForecast150", CONF_DECLINATION: 21, CONF_AZIMUTH: 22, @@ -130,9 +119,9 @@ async def test_options_flow( } -async def test_options_flow_without_key( +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow( hass: HomeAssistant, - mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test config flow options.""" @@ -142,11 +131,53 @@ async def test_options_flow_without_key( result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # With the API key + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_API_KEY: "SolarForecast150", + CONF_DECLINATION: 21, + CONF_AZIMUTH: 22, + CONF_MODULES_POWER: 2122, + CONF_DAMPING_MORNING: 0.25, + CONF_DAMPING_EVENING: 0.25, + CONF_INVERTER_SIZE: 2000, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_API_KEY: "SolarForecast150", + CONF_DECLINATION: 21, + CONF_AZIMUTH: 22, + CONF_MODULES_POWER: 2122, + CONF_DAMPING_MORNING: 0.25, + CONF_DAMPING_EVENING: 0.25, + CONF_INVERTER_SIZE: 2000, + } + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow_without_key( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test config flow options.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" # Without the API key - result2 = await hass.config_entries.options.async_configure( + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_DECLINATION: 21, @@ -159,8 +190,8 @@ async def test_options_flow_without_key( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_API_KEY: None, CONF_DECLINATION: 21, CONF_AZIMUTH: 22, diff --git a/tests/components/freebox/test_alarm_control_panel.py b/tests/components/freebox/test_alarm_control_panel.py index e4ee8f63b2c..b02e4c974ff 100644 --- a/tests/components/freebox/test_alarm_control_panel.py +++ b/tests/components/freebox/test_alarm_control_panel.py @@ -8,6 +8,7 @@ from freezegun.api import FrozenDateTimeFactory from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.freebox import SCAN_INTERVAL from homeassistant.const import ( @@ -16,11 +17,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -59,7 +55,7 @@ async def test_alarm_changed_from_external( # Initial state assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMING + == AlarmControlPanelState.ARMING ) # Now simulate a changed status @@ -73,7 +69,7 @@ async def test_alarm_changed_from_external( assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMED_AWAY + == AlarmControlPanelState.ARMED_AWAY ) @@ -98,7 +94,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non # Initial state: arm_away assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMED_AWAY + == AlarmControlPanelState.ARMED_AWAY ) # Now call for a change -> disarmed @@ -113,7 +109,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_DISARMED + == AlarmControlPanelState.DISARMED ) # Now call for a change -> arm_away @@ -128,7 +124,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMING + == AlarmControlPanelState.ARMING ) # Now call for a change -> arm_home @@ -144,7 +140,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMED_HOME + == AlarmControlPanelState.ARMED_HOME ) # Now call for a change -> trigger @@ -159,7 +155,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_TRIGGERED + == AlarmControlPanelState.TRIGGERED ) diff --git a/tests/components/freedompro/test_cover.py b/tests/components/freedompro/test_cover.py index ba48da1d1d4..bcba1e0b917 100644 --- a/tests/components/freedompro/test_cover.py +++ b/tests/components/freedompro/test_cover.py @@ -5,14 +5,16 @@ from unittest.mock import ANY, patch import pytest -from homeassistant.components.cover import ATTR_POSITION, DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import ( + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_CLOSED, - STATE_OPEN, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -56,7 +58,7 @@ async def test_cover_get_state( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -80,7 +82,7 @@ async def test_cover_get_state( assert entry assert entry.unique_id == uid - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -107,7 +109,7 @@ async def test_cover_set_position( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -133,7 +135,7 @@ async def test_cover_set_position( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes["current_position"] == 33 @@ -171,7 +173,7 @@ async def test_cover_close( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -196,7 +198,7 @@ async def test_cover_close( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -223,7 +225,7 @@ async def test_cover_open( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -249,4 +251,4 @@ async def test_cover_open( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN diff --git a/tests/components/freedompro/test_lock.py b/tests/components/freedompro/test_lock.py index 94f5609ee47..a17217c49e8 100644 --- a/tests/components/freedompro/test_lock.py +++ b/tests/components/freedompro/test_lock.py @@ -7,8 +7,9 @@ from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, + LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity @@ -39,7 +40,7 @@ async def test_lock_get_state( entity_id = "lock.lock" state = hass.states.get(entity_id) assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get("friendly_name") == "lock" entry = entity_registry.async_get(entity_id) @@ -63,7 +64,7 @@ async def test_lock_get_state( assert entry assert entry.unique_id == uid - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED async def test_lock_set_unlock( @@ -87,7 +88,7 @@ async def test_lock_set_unlock( state = hass.states.get(entity_id) assert state - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get("friendly_name") == "lock" entry = entity_registry.async_get(entity_id) @@ -113,7 +114,7 @@ async def test_lock_set_unlock( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED async def test_lock_set_lock( @@ -126,7 +127,7 @@ async def test_lock_set_lock( entity_id = "lock.lock" state = hass.states.get(entity_id) assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get("friendly_name") == "lock" entry = entity_registry.async_get(entity_id) @@ -153,4 +154,4 @@ async def test_lock_set_lock( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED diff --git a/tests/components/fritz/const.py b/tests/components/fritz/const.py index 0d1222dfcda..acd96879b1e 100644 --- a/tests/components/fritz/const.py +++ b/tests/components/fritz/const.py @@ -655,7 +655,23 @@ MOCK_MESH_DATA = { "cur_data_rate_tx": 0, "cur_availability_rx": 99, "cur_availability_tx": 99, - } + }, + { + "uid": "nl-79", + "type": "LAN", + "state": "DISCONNECTED", + "last_connected": 1642872667, + "node_1_uid": "n-167", + "node_2_uid": "n-76", + "node_interface_1_uid": "ni-140", + "node_interface_2_uid": "ni-77", + "max_data_rate_rx": 1000000, + "max_data_rate_tx": 1000000, + "cur_data_rate_rx": 0, + "cur_data_rate_tx": 0, + "cur_availability_rx": 99, + "cur_availability_tx": 99, + }, ], } ], @@ -904,6 +920,14 @@ MOCK_HOST_ATTRIBUTES_DATA = [ }, ] +MOCK_CALL_DEFLECTION_DATA = { + "X_AVM-DE_OnTel1": { + "GetDeflections": { + "NewDeflectionList": "00fromAll+1234657890eImmediately" + } + } +} + MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] MOCK_USER_INPUT_ADVANCED = MOCK_USER_DATA MOCK_USER_INPUT_SIMPLE = { diff --git a/tests/components/fritz/snapshots/test_button.ambr b/tests/components/fritz/snapshots/test_button.ambr new file mode 100644 index 00000000000..ed0b0e72160 --- /dev/null +++ b/tests/components/fritz/snapshots/test_button.ambr @@ -0,0 +1,235 @@ +# serializer version: 1 +# name: test_button_setup[button.mock_title_cleanup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_cleanup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cleanup', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cleanup', + 'unique_id': '1C:ED:6F:12:34:11-cleanup', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_cleanup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Cleanup', + }), + 'context': , + 'entity_id': 'button.mock_title_cleanup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.mock_title_firmware_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_firmware_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware update', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'firmware_update', + 'unique_id': '1C:ED:6F:12:34:11-firmware_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_firmware_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'Mock Title Firmware update', + }), + 'context': , + 'entity_id': 'button.mock_title_firmware_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.mock_title_reconnect-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_reconnect', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reconnect', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reconnect', + 'unique_id': '1C:ED:6F:12:34:11-reconnect', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_reconnect-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Mock Title Reconnect', + }), + 'context': , + 'entity_id': 'button.mock_title_reconnect', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.mock_title_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Mock Title Restart', + }), + 'context': , + 'entity_id': 'button.mock_title_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.printer_wake_on_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.printer_wake_on_lan', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:lan-pending', + 'original_name': 'printer Wake on LAN', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_wake_on_lan', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.printer_wake_on_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Wake on LAN', + 'icon': 'mdi:lan-pending', + }), + 'context': , + 'entity_id': 'button.printer_wake_on_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..9b5b8c9353a --- /dev/null +++ b/tests/components/fritz/snapshots/test_diagnostics.ambr @@ -0,0 +1,71 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'device_info': dict({ + 'client_devices': list([ + dict({ + 'connected_to': 'fritz.box', + 'connection_type': 'LAN', + 'hostname': 'printer', + 'is_connected': True, + 'wan_access': True, + }), + ]), + 'connection_type': 'WANPPPConnection', + 'current_firmware': '7.29', + 'discovered_services': list([ + 'DeviceInfo1', + 'Hosts1', + 'LANEthernetInterfaceConfig1', + 'Layer3Forwarding1', + 'UserInterface1', + 'WANCommonIFC1', + 'WANCommonInterfaceConfig1', + 'WANDSLInterfaceConfig1', + 'WANIPConn1', + 'WANPPPConnection1', + 'WLANConfiguration1', + 'X_AVM-DE_Homeauto1', + 'X_AVM-DE_HostFilter1', + ]), + 'is_router': True, + 'last_exception': None, + 'last_update success': True, + 'latest_firmware': None, + 'mesh_role': 'master', + 'model': 'FRITZ!Box 7530 AX', + 'unique_id': '1C:ED:XX:XX:34:11', + 'update_available': False, + 'wan_link_properties': dict({ + 'NewLayer1DownstreamMaxBitRate': 318557000, + 'NewLayer1UpstreamMaxBitRate': 51805000, + 'NewPhysicalLinkStatus': 'Up', + 'NewWANAccessType': 'DSL', + }), + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_host', + 'password': '**REDACTED**', + 'port': '1234', + 'ssl': False, + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'fritz', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/fritz/snapshots/test_image.ambr b/tests/components/fritz/snapshots/test_image.ambr index a51ab015a89..6ef7413998b 100644 --- a/tests/components/fritz/snapshots/test_image.ambr +++ b/tests/components/fritz/snapshots/test_image.ambr @@ -1,10 +1,10 @@ # serializer version: 1 # name: test_image_entity[fc_data0] - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x94\x00\x00\x00\x94\x01\x00\x00\x00\x00]G=y\x00\x00\x00\xf5IDATx\xda\xedVQ\x0eC!\x0c"\xbb@\xef\x7fKn\xe0\x00\xfd\xdb\xcf6\xf9|\xc6\xc4\xc6\x0f\xd2\x02\xadb},\xe2\xb9\xfb\xe5\x0e\xc0(\x18\xf2\x84/|\xaeo\xef\x847\xda\x14\x1af\x1c\xde\xe3\x19(X\tKxN\xb2\x87\x17j9\x1d\xd7\xb7o\x8c44\x1a3\xbe\x16x\x03\xc1`\xe5k\x87Oh'\xf1\x07\xde\xd1\xcd\xa1\xc2\x877\x13]U\xfey\xe2Y\x95\xfe\xd2\x1a\xe0\xd0\x9bD\x91\x7f\xfcO\xfa\xca\xedg\xbc\xb1\xb4\xfb\x8a\x87\x16\xa2\x88\x1f\xf0\x11a\xc1_6/\xd1#\xc2\xb0\xf0/\xac}\xba\xfe\xd9\xe4\xaf\xd8n\xf1B\xbf\xcb_)<\xf3\xcfn\xf2\xc7\xba\x9f\xfam\xf4{\x1eQ\x82\xb3\xd1O;=\xae\x80\xc9\xaa\x7f2>\xf2\xd04\xf5k\xf0\xc4\xfe\xcc\x80f\xfeD\xfc}\x01\xe8\xfc\xdf\xc1u{*\xfd\xd3\xbe7@\xa7\xd4/5\x94\x06\xae\xfa\xff\xa6\xe7\xe6_\xe2\x97\xba\x99\x80\xe5\xfcO\xeby\x03l\xff?\xb8\xf8l\xe7\xaf\xa1j\xf4{\x03\x17\xfa\xb4\x19\xc7\xc5\xe1\xd3\x00\x00\x00\x00IEND\xaeB`\x82" # --- diff --git a/tests/components/fritz/snapshots/test_sensor.ambr b/tests/components/fritz/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..50744815aa5 --- /dev/null +++ b/tests/components/fritz/snapshots/test_sensor.ambr @@ -0,0 +1,771 @@ +# serializer version: 1 +# name: test_sensor_setup[sensor.mock_title_connection_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_connection_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connection uptime', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_uptime', + 'unique_id': '1C:ED:6F:12:34:11-connection_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_connection_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Connection uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_title_connection_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-09-01T10:11:33+00:00', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_download_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_download_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Download throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'kb_s_received', + 'unique_id': '1C:ED:6F:12:34:11-kb_s_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_download_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Download throughput', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_download_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '67.6', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ip-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_external_ip', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'External IP', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'external_ip', + 'unique_id': '1C:ED:6F:12:34:11-external_ip', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ip-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title External IP', + }), + 'context': , + 'entity_id': 'sensor.mock_title_external_ip', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2.3.4', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ipv6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_external_ipv6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'External IPv6', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'external_ipv6', + 'unique_id': '1C:ED:6F:12:34:11-external_ipv6', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ipv6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title External IPv6', + }), + 'context': , + 'entity_id': 'sensor.mock_title_external_ipv6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'fec0::1', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_gb_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'GB received', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gb_received', + 'unique_id': '1C:ED:6F:12:34:11-gb_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title GB received', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_gb_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.2', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_sent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_gb_sent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'GB sent', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gb_sent', + 'unique_id': '1C:ED:6F:12:34:11-gb_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_sent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title GB sent', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_gb_sent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.7', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_last_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_last_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_uptime', + 'unique_id': '1C:ED:6F:12:34:11-device_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_last_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Last restart', + }), + 'context': , + 'entity_id': 'sensor.mock_title_last_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-08-03T16:30:21+00:00', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_noise_margin-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_download_noise_margin', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link download noise margin', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_noise_margin_received', + 'unique_id': '1C:ED:6F:12:34:11-link_noise_margin_received', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_noise_margin-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link download noise margin', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_download_noise_margin', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_power_attenuation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_download_power_attenuation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link download power attenuation', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_attenuation_received', + 'unique_id': '1C:ED:6F:12:34:11-link_attenuation_received', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_power_attenuation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link download power attenuation', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_download_power_attenuation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_download_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Link download throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_kb_s_received', + 'unique_id': '1C:ED:6F:12:34:11-link_kb_s_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Link download throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_download_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '318557.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_noise_margin-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_upload_noise_margin', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link upload noise margin', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_noise_margin_sent', + 'unique_id': '1C:ED:6F:12:34:11-link_noise_margin_sent', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_noise_margin-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link upload noise margin', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_upload_noise_margin', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_power_attenuation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link upload power attenuation', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_attenuation_sent', + 'unique_id': '1C:ED:6F:12:34:11-link_attenuation_sent', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_power_attenuation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link upload power attenuation', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_upload_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Link upload throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_kb_s_sent', + 'unique_id': '1C:ED:6F:12:34:11-link_kb_s_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Link upload throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_upload_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '51805.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_download_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_max_connection_download_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max connection download throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_kb_s_received', + 'unique_id': '1C:ED:6F:12:34:11-max_kb_s_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_download_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Max connection download throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_max_connection_download_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10087.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_upload_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max connection upload throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_kb_s_sent', + 'unique_id': '1C:ED:6F:12:34:11-max_kb_s_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_upload_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Max connection upload throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2105.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_upload_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_upload_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Upload throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'kb_s_sent', + 'unique_id': '1C:ED:6F:12:34:11-kb_s_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_upload_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Upload throughput', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_upload_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- diff --git a/tests/components/fritz/snapshots/test_switch.ambr b/tests/components/fritz/snapshots/test_switch.ambr new file mode 100644 index 00000000000..b34a3626fe2 --- /dev/null +++ b/tests/components/fritz/snapshots/test_switch.ambr @@ -0,0 +1,571 @@ +# serializer version: 1 +# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_2_4ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_2_4ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_2_4ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_5ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi (5Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_5ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_5ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi (5Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data0][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data0][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi2', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi2', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi2', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data1][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data1][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_2_4ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_2_4ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_2_4ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_5ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi+ (5Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_5ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_5ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi+ (5Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data2][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data2][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data3][switch.mock_title_call_deflection_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_call_deflection_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:phone-forward', + 'original_name': 'Call deflection 0', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-call_deflection_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data3][switch.mock_title_call_deflection_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'deflection_to_number': '+1234657890', + 'friendly_name': 'Mock Title Call deflection 0', + 'icon': 'mdi:phone-forward', + 'mode': 'Immediately', + 'number': None, + 'outgoing': None, + 'phonebook_id': None, + 'type': 'fromAll', + }), + 'context': , + 'entity_id': 'switch.mock_title_call_deflection_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_setup[fc_data3][switch.mock_title_wi_fi_mywifi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_mywifi', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi MyWifi', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_mywifi', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data3][switch.mock_title_wi_fi_mywifi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi MyWifi', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_mywifi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data3][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data3][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/fritz/snapshots/test_update.ambr b/tests/components/fritz/snapshots/test_update.ambr new file mode 100644 index 00000000000..3c7880d01e7 --- /dev/null +++ b/tests/components/fritz/snapshots/test_update.ambr @@ -0,0 +1,175 @@ +# serializer version: 1 +# name: test_available_update_can_be_installed[update.mock_title_fritz_os-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_fritz_os', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FRITZ!OS', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_available_update_can_be_installed[update.mock_title_fritz_os-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', + 'friendly_name': 'Mock Title FRITZ!OS', + 'in_progress': False, + 'installed_version': '7.29', + 'latest_version': '7.50', + 'release_summary': None, + 'release_url': 'http://download.avm.de/fritzbox/fritzbox-7530-ax/deutschland/fritz.os/info_de.txt', + 'skipped_version': None, + 'supported_features': , + 'title': 'FRITZ!OS', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.mock_title_fritz_os', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_available[update.mock_title_fritz_os-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_fritz_os', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FRITZ!OS', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_update_available[update.mock_title_fritz_os-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', + 'friendly_name': 'Mock Title FRITZ!OS', + 'in_progress': False, + 'installed_version': '7.29', + 'latest_version': '7.50', + 'release_summary': None, + 'release_url': 'http://download.avm.de/fritzbox/fritzbox-7530-ax/deutschland/fritz.os/info_de.txt', + 'skipped_version': None, + 'supported_features': , + 'title': 'FRITZ!OS', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.mock_title_fritz_os', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_entities_initialized[update.mock_title_fritz_os-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_fritz_os', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FRITZ!OS', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_update_entities_initialized[update.mock_title_fritz_os-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', + 'friendly_name': 'Mock Title FRITZ!OS', + 'in_progress': False, + 'installed_version': '7.29', + 'latest_version': '7.29', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'FRITZ!OS', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.mock_title_fritz_os', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/fritz/test_button.py b/tests/components/fritz/test_button.py index 79639835003..068b07c4337 100644 --- a/tests/components/fritz/test_button.py +++ b/tests/components/fritz/test_button.py @@ -5,11 +5,12 @@ from datetime import timedelta from unittest.mock import patch import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.fritz.const import DOMAIN, MeshRoles from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util.dt import utcnow @@ -21,24 +22,27 @@ from .const import ( MOCK_USER_DATA, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -async def test_button_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None: +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_button_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, + snapshot: SnapshotAssertion, +) -> None: """Test setup of Fritz!Tools buttons.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - buttons = hass.states.async_all(BUTTON_DOMAIN) - assert len(buttons) == 4 - - for button in buttons: - assert button.state == STATE_UNKNOWN + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) @pytest.mark.parametrize( diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index a54acbb0ac0..84f1b240b88 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -10,6 +10,7 @@ from fritzconnection.core.exceptions import ( ) import pytest +from homeassistant.components import ssdp from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, @@ -22,13 +23,7 @@ from homeassistant.components.fritz.const import ( ERROR_UNKNOWN, FRITZ_AUTH_EXCEPTIONS, ) -from homeassistant.components.ssdp import ATTR_UPNP_UDN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -310,6 +305,9 @@ async def test_reauth_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -335,15 +333,6 @@ async def test_reauth_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -376,20 +365,14 @@ async def test_reauth_not_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.fritz.config_flow.FritzConnection", side_effect=side_effect, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -469,18 +452,13 @@ async def test_reconfigure_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": mock_config.entry_id, - "show_advanced_options": show_advanced_options, - }, - data=mock_config.data, + result = await mock_config.start_reconfigure_flow( + hass, + show_advanced_options=show_advanced_options, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -531,14 +509,10 @@ async def test_reconfigure_not_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -549,7 +523,7 @@ async def test_reconfigure_not_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"]["base"] == ERROR_CANNOT_CONNECT result = await hass.config_entries.flow.async_configure( @@ -670,7 +644,7 @@ async def test_ssdp_already_in_progress_host( MOCK_NO_UNIQUE_ID = dataclasses.replace(MOCK_SSDP_DATA) MOCK_NO_UNIQUE_ID.upnp = MOCK_NO_UNIQUE_ID.upnp.copy() - del MOCK_NO_UNIQUE_ID.upnp[ATTR_UPNP_UDN] + del MOCK_NO_UNIQUE_ID.upnp[ssdp.ATTR_UPNP_UDN] result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_NO_UNIQUE_ID ) @@ -763,3 +737,23 @@ async def test_options_flow(hass: HomeAssistant) -> None: CONF_OLD_DISCOVERY: False, CONF_CONSIDER_HOME: 37, } + + +async def test_ssdp_ipv6_link_local(hass: HomeAssistant) -> None: + """Test ignoring ipv6-link-local while ssdp discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_SSDP}, + data=ssdp.SsdpServiceInfo( + ssdp_usn="mock_usn", + ssdp_st="mock_st", + ssdp_location="https://[fe80::1ff:fe23:4567:890a]:12345/test", + upnp={ + ssdp.ATTR_UPNP_FRIENDLY_NAME: "fake_name", + ssdp.ATTR_UPNP_UDN: "uuid:only-a-test", + }, + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "ignore_ip6_link_local" diff --git a/tests/components/fritz/test_diagnostics.py b/tests/components/fritz/test_diagnostics.py index 55196eb6988..cbcaa57dab4 100644 --- a/tests/components/fritz/test_diagnostics.py +++ b/tests/components/fritz/test_diagnostics.py @@ -2,14 +2,13 @@ from __future__ import annotations -from homeassistant.components.diagnostics import REDACTED +from syrupy import SnapshotAssertion +from syrupy.filters import props + from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.fritz.coordinator import AvmWrapper -from homeassistant.components.fritz.diagnostics import TO_REDACT -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .const import MOCK_MESH_MASTER_MAC, MOCK_USER_DATA +from .const import MOCK_USER_DATA from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -21,64 +20,16 @@ async def test_entry_diagnostics( hass_client: ClientSessionGenerator, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - entry_dict = entry.as_dict() - for key in TO_REDACT: - entry_dict["data"][key] = REDACTED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id] - assert result == { - "entry": entry_dict, - "device_info": { - "client_devices": [ - { - "connected_to": device.connected_to, - "connection_type": device.connection_type, - "hostname": device.hostname, - "is_connected": device.is_connected, - "last_activity": device.last_activity.isoformat(), - "wan_access": device.wan_access, - } - for _, device in avm_wrapper.devices.items() - ], - "connection_type": "WANPPPConnection", - "current_firmware": "7.29", - "discovered_services": [ - "DeviceInfo1", - "Hosts1", - "LANEthernetInterfaceConfig1", - "Layer3Forwarding1", - "UserInterface1", - "WANCommonIFC1", - "WANCommonInterfaceConfig1", - "WANDSLInterfaceConfig1", - "WANIPConn1", - "WANPPPConnection1", - "WLANConfiguration1", - "X_AVM-DE_Homeauto1", - "X_AVM-DE_HostFilter1", - ], - "is_router": True, - "last_exception": None, - "last_update success": True, - "latest_firmware": None, - "mesh_role": "master", - "model": "FRITZ!Box 7530 AX", - "unique_id": MOCK_MESH_MASTER_MAC.replace("6F:12", "XX:XX"), - "update_available": False, - "wan_link_properties": { - "NewLayer1DownstreamMaxBitRate": 318557000, - "NewLayer1UpstreamMaxBitRate": 51805000, - "NewPhysicalLinkStatus": "Up", - "NewWANAccessType": "DSL", - }, - }, - } + assert result == snapshot( + exclude=props("created_at", "modified_at", "entry_id", "last_activity") + ) diff --git a/tests/components/fritz/test_image.py b/tests/components/fritz/test_image.py index 9097aab1762..d8652bd6508 100644 --- a/tests/components/fritz/test_image.py +++ b/tests/components/fritz/test_image.py @@ -24,6 +24,7 @@ from tests.typing import ClientSessionGenerator GUEST_WIFI_ENABLED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { + "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -43,6 +44,7 @@ GUEST_WIFI_ENABLED: dict[str, dict] = { GUEST_WIFI_CHANGED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { + "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -62,6 +64,7 @@ GUEST_WIFI_CHANGED: dict[str, dict] = { GUEST_WIFI_DISABLED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { + "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": False, "NewStatus": "Up", diff --git a/tests/components/fritz/test_sensor.py b/tests/components/fritz/test_sensor.py index f8114238376..7dec640b898 100644 --- a/tests/components/fritz/test_sensor.py +++ b/tests/components/fritz/test_sensor.py @@ -2,127 +2,48 @@ from __future__ import annotations -from datetime import timedelta -from typing import Any +from datetime import UTC, datetime, timedelta +from unittest.mock import patch from fritzconnection.core.exceptions import FritzConnectionException +import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.fritz.sensor import SENSOR_TYPES -from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_STATE, - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util from .const import MOCK_USER_DATA -from tests.common import MockConfigEntry, async_fire_time_changed - -SENSOR_STATES: dict[str, dict[str, Any]] = { - "sensor.mock_title_external_ip": { - ATTR_STATE: "1.2.3.4", - }, - "sensor.mock_title_external_ipv6": { - ATTR_STATE: "fec0::1", - }, - "sensor.mock_title_last_restart": { - # ATTR_STATE: "2022-02-05T17:46:04+00:00", - ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP, - }, - "sensor.mock_title_connection_uptime": { - # ATTR_STATE: "2022-03-06T11:27:16+00:00", - ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP, - }, - "sensor.mock_title_upload_throughput": { - ATTR_STATE: "3.4", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_UNIT_OF_MEASUREMENT: "kB/s", - }, - "sensor.mock_title_download_throughput": { - ATTR_STATE: "67.6", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_UNIT_OF_MEASUREMENT: "kB/s", - }, - "sensor.mock_title_max_connection_upload_throughput": { - ATTR_STATE: "2105.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_max_connection_download_throughput": { - ATTR_STATE: "10087.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_gb_sent": { - ATTR_STATE: "1.7", - ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING, - ATTR_UNIT_OF_MEASUREMENT: "GB", - }, - "sensor.mock_title_gb_received": { - ATTR_STATE: "5.2", - ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING, - ATTR_UNIT_OF_MEASUREMENT: "GB", - }, - "sensor.mock_title_link_upload_throughput": { - ATTR_STATE: "51805.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_link_download_throughput": { - ATTR_STATE: "318557.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_link_upload_noise_margin": { - ATTR_STATE: "9.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, - "sensor.mock_title_link_download_noise_margin": { - ATTR_STATE: "8.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, - "sensor.mock_title_link_upload_power_attenuation": { - ATTR_STATE: "7.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, - "sensor.mock_title_link_download_power_attenuation": { - ATTR_STATE: "12.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, -} +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -async def test_sensor_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None: +@pytest.mark.freeze_time(datetime(2024, 9, 1, 20, tzinfo=UTC)) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, + snapshot: SnapshotAssertion, +) -> None: """Test setup of Fritz!Tools sensors.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - sensors = hass.states.async_all(SENSOR_DOMAIN) - assert len(sensors) == len(SENSOR_TYPES) - - for sensor in sensors: - assert SENSOR_STATES.get(sensor.entity_id) is not None - for key, val in SENSOR_STATES[sensor.entity_id].items(): - if key == ATTR_STATE: - assert sensor.state == val - else: - assert sensor.attributes.get(key) == val + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_sensor_update_fail( - hass: HomeAssistant, fc_class_mock, fh_class_mock + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, fc_class_mock, fh_class_mock ) -> None: """Test failed update of Fritz!Tools sensors.""" @@ -132,10 +53,12 @@ async def test_sensor_update_fail( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - fc_class_mock().call_action_side_effect(FritzConnectionException) + fc_class_mock().call_action_side_effect(FritzConnectionException("Boom")) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=300)) await hass.async_block_till_done(wait_background_tasks=True) + assert "Error while uptaing the data: Boom" in caplog.text + sensors = hass.states.async_all(SENSOR_DOMAIN) for sensor in sensors: assert sensor.state == STATE_UNAVAILABLE diff --git a/tests/components/fritz/test_switch.py b/tests/components/fritz/test_switch.py index b82587d42bd..fdf76d54588 100644 --- a/tests/components/fritz/test_switch.py +++ b/tests/components/fritz/test_switch.py @@ -2,16 +2,19 @@ from __future__ import annotations +from unittest.mock import patch + import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.fritz.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .const import MOCK_FB_SERVICES, MOCK_USER_DATA +from .const import MOCK_CALL_DEFLECTION_DATA, MOCK_FB_SERVICES, MOCK_USER_DATA -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "WLANConfiguration1": { @@ -166,36 +169,28 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { @pytest.mark.parametrize( - ("fc_data", "expected_wifi_names"), + ("fc_data"), [ - ( - {**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_SAME_SSID}, - ["WiFi (2.4Ghz)", "WiFi (5Ghz)"], - ), - ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF_SSID}, ["WiFi", "WiFi2"]), - ( - {**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF2_SSID}, - ["WiFi (2.4Ghz)", "WiFi+ (5Ghz)"], - ), + ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_SAME_SSID}), + ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF_SSID}), + ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF2_SSID}), + ({**MOCK_FB_SERVICES, **MOCK_CALL_DEFLECTION_DATA}), ], ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_switch_setup( hass: HomeAssistant, - expected_wifi_names: list[str], + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test setup of Fritz!Tools switches.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) - switches = hass.states.async_all(Platform.SWITCH) - assert len(switches) == 3 - assert switches[0].name == f"Mock Title Wi-Fi {expected_wifi_names[0]}" - assert switches[1].name == f"Mock Title Wi-Fi {expected_wifi_names[1]}" - assert switches[2].name == "printer Internet Access" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/fritz/test_update.py b/tests/components/fritz/test_update.py index 5d7ef852d4c..72997b1aa12 100644 --- a/tests/components/fritz/test_update.py +++ b/tests/components/fritz/test_update.py @@ -2,10 +2,13 @@ from unittest.mock import patch +import pytest +from syrupy.assertion import SnapshotAssertion + from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .const import ( MOCK_FB_SERVICES, @@ -14,8 +17,7 @@ from .const import ( MOCK_USER_DATA, ) -from tests.common import MockConfigEntry -from tests.typing import ClientSessionGenerator +from tests.common import MockConfigEntry, snapshot_platform AVAILABLE_UPDATE = { "UserInterface1": { @@ -27,30 +29,33 @@ AVAILABLE_UPDATE = { } +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_update_entities_initialized( hass: HomeAssistant, - hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - updates = hass.states.async_all(UPDATE_DOMAIN) - assert len(updates) == 1 + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_update_available( hass: HomeAssistant, - hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" @@ -59,64 +64,39 @@ async def test_update_available( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - update = hass.states.get("update.mock_title_fritz_os") - assert update is not None - assert update.state == "on" - assert update.attributes.get("installed_version") == "7.29" - assert update.attributes.get("latest_version") == MOCK_FIRMWARE_AVAILABLE - assert update.attributes.get("release_url") == MOCK_FIRMWARE_RELEASE_URL - - -async def test_no_update_available( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - fc_class_mock, - fh_class_mock, -) -> None: - """Test update entities.""" - - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - - update = hass.states.get("update.mock_title_fritz_os") - assert update is not None - assert update.state == "off" - assert update.attributes.get("installed_version") == "7.29" - assert update.attributes.get("latest_version") == "7.29" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_available_update_can_be_installed( hass: HomeAssistant, - hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" fc_class_mock().override_services({**MOCK_FB_SERVICES, **AVAILABLE_UPDATE}) - with patch( - "homeassistant.components.fritz.coordinator.FritzBoxTools.async_trigger_firmware_update", - return_value=True, - ) as mocked_update_call: + with ( + patch( + "homeassistant.components.fritz.coordinator.FritzBoxTools.async_trigger_firmware_update", + return_value=True, + ) as mocked_update_call, + patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]), + ): entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - update = hass.states.get("update.mock_title_fritz_os") - assert update is not None - assert update.state == "on" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) await hass.services.async_call( "update", diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index bd68615212d..034b86497db 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from unittest.mock import Mock -from homeassistant.components.climate import PRESET_COMFORT, PRESET_ECO from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.core import HomeAssistant @@ -110,9 +109,7 @@ class FritzDeviceClimateMock(FritzEntityBaseMock): target_temperature = 19.5 window_open = "fake_window" nextchange_temperature = 22.0 - nextchange_endperiod = 0 - nextchange_preset = PRESET_COMFORT - scheduled_preset = PRESET_ECO + nextchange_endperiod = 1726855200 class FritzDeviceClimateWithoutTempSensorMock(FritzDeviceClimateMock): diff --git a/tests/components/fritzbox/test_binary_sensor.py b/tests/components/fritzbox/test_binary_sensor.py index 3e1a2691f67..f4cc1b2e2ca 100644 --- a/tests/components/fritzbox/test_binary_sensor.py +++ b/tests/components/fritzbox/test_binary_sensor.py @@ -6,7 +6,10 @@ from unittest.mock import Mock from requests.exceptions import HTTPError -from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDeviceClass +from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorDeviceClass, +) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( @@ -27,7 +30,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{BINARY_SENSOR_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -148,5 +151,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_device_alarm") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.new_device_alarm") assert state diff --git a/tests/components/fritzbox/test_button.py b/tests/components/fritzbox/test_button.py index 89e8d8357dd..913f828efbc 100644 --- a/tests/components/fritzbox/test_button.py +++ b/tests/components/fritzbox/test_button.py @@ -3,7 +3,7 @@ from datetime import timedelta from unittest.mock import Mock -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -19,7 +19,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{BUTTON_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -43,7 +43,7 @@ async def test_apply_template(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert fritz().apply_template.call_count == 1 @@ -67,5 +67,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_template") + state = hass.states.get(f"{BUTTON_DOMAIN}.new_template") assert state diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 358eeaa714e..29f5742216f 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -1,7 +1,7 @@ """Tests for AVM Fritz!Box climate component.""" from datetime import timedelta -from unittest.mock import Mock, call +from unittest.mock import Mock, _Call, call from freezegun.api import FrozenDateTimeFactory import pytest @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_PRESET_MODES, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, PRESET_COMFORT, PRESET_ECO, SERVICE_SET_HVAC_MODE, @@ -56,7 +56,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{CLIMATE_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -123,7 +123,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_next_scheduled_change_time" ) assert state - assert state.state == "1970-01-01T00:00:00+00:00" + assert state.state == "2024-09-20T18:00:00+00:00" assert ( state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Next scheduled change time" @@ -270,110 +270,101 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: assert fritz().login.call_count == 4 -async def test_set_temperature_temperature(hass: HomeAssistant, fritz: Mock) -> None: - """Test setting temperature by temperature.""" +@pytest.mark.parametrize( + ("service_data", "expected_call_args"), + [ + ({ATTR_TEMPERATURE: 23}, [call(23)]), + ( + { + ATTR_HVAC_MODE: HVACMode.OFF, + ATTR_TEMPERATURE: 23, + }, + [call(0)], + ), + ( + { + ATTR_HVAC_MODE: HVACMode.HEAT, + ATTR_TEMPERATURE: 23, + }, + [call(23)], + ), + ], +) +async def test_set_temperature( + hass: HomeAssistant, + fritz: Mock, + service_data: dict, + expected_call_args: list[_Call], +) -> None: + """Test setting temperature.""" device = FritzDeviceClimateMock() assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, + {ATTR_ENTITY_ID: ENTITY_ID, **service_data}, True, ) - assert device.set_target_temperature.call_args_list == [call(23)] + assert device.set_target_temperature.call_count == len(expected_call_args) + assert device.set_target_temperature.call_args_list == expected_call_args -async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> None: - """Test setting temperature by mode.""" - device = FritzDeviceClimateMock() - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_HVAC_MODE: HVACMode.OFF, - ATTR_TEMPERATURE: 23, - }, - True, - ) - assert device.set_target_temperature.call_args_list == [call(0)] - - -async def test_set_temperature_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: - """Test setting temperature by mode.""" - device = FritzDeviceClimateMock() - device.target_temperature = 0.0 - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_HVAC_MODE: HVACMode.HEAT, - ATTR_TEMPERATURE: 23, - }, - True, - ) - assert device.set_target_temperature.call_args_list == [call(22)] - - -async def test_set_hvac_mode_off(hass: HomeAssistant, fritz: Mock) -> None: +@pytest.mark.parametrize( + ("service_data", "target_temperature", "current_preset", "expected_call_args"), + [ + # mode off always sets target temperature to 0 + ({ATTR_HVAC_MODE: HVACMode.OFF}, 22, PRESET_COMFORT, [call(0)]), + ({ATTR_HVAC_MODE: HVACMode.OFF}, 16, PRESET_ECO, [call(0)]), + ({ATTR_HVAC_MODE: HVACMode.OFF}, 16, None, [call(0)]), + # mode heat sets target temperature based on current scheduled preset, + # when not already in mode heat + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 0.0, PRESET_COMFORT, [call(22)]), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 0.0, PRESET_ECO, [call(16)]), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 0.0, None, [call(22)]), + # mode heat does not set target temperature, when already in mode heat + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 16, PRESET_COMFORT, []), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 16, PRESET_ECO, []), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 16, None, []), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 22, PRESET_COMFORT, []), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 22, PRESET_ECO, []), + ({ATTR_HVAC_MODE: HVACMode.HEAT}, 22, None, []), + ], +) +async def test_set_hvac_mode( + hass: HomeAssistant, + fritz: Mock, + service_data: dict, + target_temperature: float, + current_preset: str, + expected_call_args: list[_Call], +) -> None: """Test setting hvac mode.""" device = FritzDeviceClimateMock() + device.target_temperature = target_temperature + + if current_preset is PRESET_COMFORT: + device.nextchange_temperature = device.eco_temperature + elif current_preset is PRESET_ECO: + device.nextchange_temperature = device.comfort_temperature + else: + device.nextchange_endperiod = 0 + assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + {ATTR_ENTITY_ID: ENTITY_ID, **service_data}, True, ) - assert device.set_target_temperature.call_args_list == [call(0)] - -async def test_no_reset_hvac_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: - """Test setting hvac mode.""" - device = FritzDeviceClimateMock() - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, - True, - ) - assert device.set_target_temperature.call_count == 0 - - -async def test_set_hvac_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: - """Test setting hvac mode.""" - device = FritzDeviceClimateMock() - device.target_temperature = 0.0 - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, - True, - ) - assert device.set_target_temperature.call_args_list == [call(22)] + assert device.set_target_temperature.call_count == len(expected_call_args) + assert device.set_target_temperature.call_args_list == expected_call_args async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None: @@ -384,7 +375,7 @@ async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_COMFORT}, True, @@ -400,7 +391,7 @@ async def test_set_preset_mode_eco(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO}, True, @@ -463,7 +454,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_climate") + state = hass.states.get(f"{CLIMATE_DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index 72d36a8ab63..0df6d0b2ea9 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -12,12 +12,7 @@ from requests.exceptions import HTTPError from homeassistant.components import ssdp from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,12 +124,7 @@ async def test_reauth_success(hass: HomeAssistant, fritz: Mock) -> None: """Test starting a reauthentication flow.""" mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -158,12 +148,7 @@ async def test_reauth_auth_failed(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -186,12 +171,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -216,13 +196,9 @@ async def test_reconfigure_success(hass: HomeAssistant, fritz: Mock) -> None: assert mock_config.data[CONF_USERNAME] == "fake_user" assert mock_config.data[CONF_PASSWORD] == "fake_pass" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -249,13 +225,9 @@ async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: assert mock_config.data[CONF_USERNAME] == "fake_user" assert mock_config.data[CONF_PASSWORD] == "fake_pass" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -264,7 +236,7 @@ async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"]["base"] == "no_devices_found" result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index 6626db2bccf..f26e65fc28a 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -6,8 +6,8 @@ from unittest.mock import Mock, call from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, - DOMAIN, - STATE_OPEN, + DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( @@ -32,7 +32,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{COVER_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -44,7 +44,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: state = hass.states.get(ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 @@ -68,7 +68,7 @@ async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_open.call_count == 1 @@ -81,7 +81,7 @@ async def test_close_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_close.call_count == 1 @@ -94,7 +94,7 @@ async def test_set_position_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 50}, True, @@ -110,7 +110,7 @@ async def test_stop_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_stop.call_count == 1 @@ -134,5 +134,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_climate") + state = hass.states.get(f"{COVER_DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_diagnostics.py b/tests/components/fritzbox/test_diagnostics.py index 38aaa623080..21d70b4b6d6 100644 --- a/tests/components/fritzbox/test_diagnostics.py +++ b/tests/components/fritzbox/test_diagnostics.py @@ -30,4 +30,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entries[0]) - assert result == {"entry": entry_dict, "data": {}} + assert result == {"entry": entry_dict | {"discovery_keys": {}}, "data": {}} diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 3cafa933fa3..84fafe25521 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -19,7 +19,7 @@ from homeassistant.components.light import ( ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, ) from homeassistant.const import ( @@ -38,7 +38,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{LIGHT_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -147,7 +147,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_COLOR_TEMP_KELVIN: 3000}, True, @@ -170,7 +170,7 @@ async def test_turn_on_color(hass: HomeAssistant, fritz: Mock) -> None: hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -204,7 +204,7 @@ async def test_turn_on_color_unsupported_api_method( device.set_unmapped_color.side_effect = error await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -219,7 +219,7 @@ async def test_turn_on_color_unsupported_api_method( error.response.status_code = 500 with pytest.raises(HTTPError, match="Bad Request"): await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -237,7 +237,7 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_state_off.call_count == 1 @@ -316,5 +316,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_light") + state = hass.states.get(f"{LIGHT_DOMAIN}.new_light") assert state diff --git a/tests/components/fritzbox/test_sensor.py b/tests/components/fritzbox/test_sensor.py index 63d0b67d7f4..0da040bbb5b 100644 --- a/tests/components/fritzbox/test_sensor.py +++ b/tests/components/fritzbox/test_sensor.py @@ -3,15 +3,22 @@ from datetime import timedelta from unittest.mock import Mock +import pytest from requests.exceptions import HTTPError +from homeassistant.components.climate import PRESET_COMFORT, PRESET_ECO from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN -from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN, SensorStateClass +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, CONF_DEVICES, PERCENTAGE, + STATE_UNKNOWN, EntityCategory, UnitOfTemperature, ) @@ -19,12 +26,17 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from . import FritzDeviceSensorMock, set_devices, setup_config_entry +from . import ( + FritzDeviceClimateMock, + FritzDeviceSensorMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup( @@ -130,5 +142,57 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_device_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.new_device_temperature") assert state + + +@pytest.mark.parametrize( + ("next_changes", "expected_states"), + [ + ( + [0, 16], + [STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN], + ), + ( + [0, 22], + [STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN], + ), + ( + [1726855200, 16.0], + ["2024-09-20T18:00:00+00:00", "16.0", PRESET_ECO, PRESET_COMFORT], + ), + ( + [1726855200, 22.0], + ["2024-09-20T18:00:00+00:00", "22.0", PRESET_COMFORT, PRESET_ECO], + ), + ], +) +async def test_next_change_sensors( + hass: HomeAssistant, fritz: Mock, next_changes: list, expected_states: list +) -> None: + """Test next change sensors.""" + device = FritzDeviceClimateMock() + device.nextchange_endperiod = next_changes[0] + device.nextchange_temperature = next_changes[1] + + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + base_name = f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}" + + state = hass.states.get(f"{base_name}_next_scheduled_change_time") + assert state + assert state.state == expected_states[0] + + state = hass.states.get(f"{base_name}_next_scheduled_temperature") + assert state + assert state.state == expected_states[1] + + state = hass.states.get(f"{base_name}_next_scheduled_preset") + assert state + assert state.state == expected_states[2] + + state = hass.states.get(f"{base_name}_current_scheduled_preset") + assert state + assert state.state == expected_states[3] diff --git a/tests/components/fritzbox/test_switch.py b/tests/components/fritzbox/test_switch.py index ba3b1de9b2f..e394ccbc7f3 100644 --- a/tests/components/fritzbox/test_switch.py +++ b/tests/components/fritzbox/test_switch.py @@ -12,7 +12,7 @@ from homeassistant.components.sensor import ( DOMAIN as SENSOR_DOMAIN, SensorStateClass, ) -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -39,7 +39,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup( @@ -124,7 +124,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_on.call_count == 1 @@ -138,7 +138,7 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_off.call_count == 1 @@ -158,7 +158,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: match="Can't toggle switch while manual switching is disabled for the device", ): await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) with pytest.raises( @@ -166,7 +166,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: match="Can't toggle switch while manual switching is disabled for the device", ): await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) @@ -239,5 +239,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_switch") + state = hass.states.get(f"{SWITCH_DOMAIN}.new_switch") assert state diff --git a/tests/components/fritzbox_callmonitor/test_config_flow.py b/tests/components/fritzbox_callmonitor/test_config_flow.py index 14f18e84e0c..0eccb651611 100644 --- a/tests/components/fritzbox_callmonitor/test_config_flow.py +++ b/tests/components/fritzbox_callmonitor/test_config_flow.py @@ -264,6 +264,97 @@ async def test_setup_invalid_auth( assert result["errors"] == {"base": ConnectResult.INVALID_AUTH} +async def test_reauth_successful(hass: HomeAssistant) -> None: + """Test starting a reauthentication flow.""" + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_ENTRY) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with ( + patch( + "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.__init__", + return_value=None, + ), + patch( + "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.phonebook_ids", + new_callable=PropertyMock, + return_value=[0], + ), + patch( + "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.phonebook_info", + return_value=MOCK_PHONEBOOK_INFO_1, + ), + patch( + "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.modelname", + return_value=MOCK_PHONEBOOK_NAME_1, + ), + patch( + "homeassistant.components.fritzbox_callmonitor.config_flow.FritzConnection.__init__", + return_value=None, + ), + patch( + "homeassistant.components.fritzbox_callmonitor.config_flow.FritzConnection.updatecheck", + new_callable=PropertyMock, + return_value=MOCK_DEVICE_INFO, + ), + patch( + "homeassistant.components.fritzbox_callmonitor.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "other_fake_user", + CONF_PASSWORD: "other_fake_password", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config.data == { + **MOCK_CONFIG_ENTRY, + CONF_USERNAME: "other_fake_user", + CONF_PASSWORD: "other_fake_password", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (FritzConnectionException, ConnectResult.INVALID_AUTH), + (FritzSecurityError, ConnectResult.INSUFFICIENT_PERMISSIONS), + ], +) +async def test_reauth_not_successful( + hass: HomeAssistant, side_effect: Exception, error: str +) -> None: + """Test starting a reauthentication flow but no connection found.""" + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_ENTRY) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with patch( + "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.__init__", + side_effect=side_effect, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "other_fake_user", + CONF_PASSWORD: "other_fake_password", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"]["base"] == error + + async def test_options_flow_correct_prefixes(hass: HomeAssistant) -> None: """Test config flow options.""" diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 57b22490ed0..8445e6b6a79 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,20 +3,16 @@ from __future__ import annotations from collections.abc import Callable -from datetime import timedelta import json from typing import Any -from freezegun.api import FrozenDateTimeFactory - from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture +from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker MOCK_HOST = "http://fronius" @@ -115,24 +111,3 @@ def mock_responses( f"{host}/solar_api/v1/GetOhmPilotRealtimeData.cgi?Scope=System", text=_load(f"{fixture_set}/GetOhmPilotRealtimeData.json", "fronius"), ) - - -async def enable_all_entities( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry_id: str, - time_till_next_update: timedelta, -) -> None: - """Enable all entities for a config entry and fast forward time to receive data.""" - registry = er.async_get(hass) - entities = er.async_entries_for_config_entry(registry, config_entry_id) - for entry in [ - entry - for entry in entities - if entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - ]: - registry.async_update_entity(entry.entity_id, disabled_by=None) - await hass.async_block_till_done() - freezer.tick(time_till_next_update) - async_fire_time_changed(hass) - await hass.async_block_till_done() diff --git a/tests/components/fronius/snapshots/test_diagnostics.ambr b/tests/components/fronius/snapshots/test_diagnostics.ambr index f23d63a58e3..b112839835a 100644 --- a/tests/components/fronius/snapshots/test_diagnostics.ambr +++ b/tests/components/fronius/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'is_logger': True, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'fronius', 'entry_id': 'f1e2b9837e8adaed6fa682acaa216fd8', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..8f8c9d919fc --- /dev/null +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -0,0 +1,9008 @@ +# serializer version: 1 +# name: test_gen24[sensor.inverter_name_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '12345678-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1589', + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '12345678-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter name AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.3204', + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '12345678-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '234.9168', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '12345678-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0783', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_current_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc_2', + 'unique_id': '12345678-current_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name DC current 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_current_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0754', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '12345678-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '411.3811', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_voltage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc_2', + 'unique_id': '12345678-voltage_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name DC voltage 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_voltage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '403.4312', + }) +# --- +# name: test_gen24[sensor.inverter_name_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '12345678-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Error code', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.inverter_name_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '12345678-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Inverter name Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9917', + }) +# --- +# name: test_gen24[sensor.inverter_name_inverter_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_inverter_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Inverter state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_state', + 'unique_id': '12345678-inverter_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_inverter_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Inverter state', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_inverter_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Running', + }) +# --- +# name: test_gen24[sensor.inverter_name_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '12345678-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Status code', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_gen24[sensor.inverter_name_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '12345678-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Inverter name Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'context': , + 'entity_id': 'sensor.inverter_name_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_gen24[sensor.inverter_name_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '12345678-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter name Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1530193.42', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent', + 'unique_id': '1234567890-power_apparent', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '868.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_1', + 'unique_id': '1234567890-power_apparent_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '243.3', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_2', + 'unique_id': '1234567890-power_apparent_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '323.4', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_3', + 'unique_id': '1234567890-power_apparent_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '301.2', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_1', + 'unique_id': '1234567890-current_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.145', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_2', + 'unique_id': '1234567890-current_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.33', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_3', + 'unique_id': '1234567890-current_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.825', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_frequency_phase_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency phase average', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_phase_average', + 'unique_id': '1234567890-frequency_phase_average', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_frequency_phase_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Smart Meter TS 65A-3 Frequency phase average', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': '1234567890-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Meter location', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': '1234567890-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Smart Meter TS 65A-3 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'feed_in', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor', + 'unique_id': '1234567890-power_factor', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.828', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_1', + 'unique_id': '1234567890-power_factor_phase_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.441', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_2', + 'unique_id': '1234567890-power_factor_phase_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 2', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.934', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_3', + 'unique_id': '1234567890-power_factor_phase_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 3', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.832', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_consumed', + 'unique_id': '1234567890-energy_reactive_ac_consumed', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy consumed', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '88221.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_produced', + 'unique_id': '1234567890-energy_reactive_ac_produced', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy produced', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1989125.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive', + 'unique_id': '1234567890-power_reactive', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-517.4', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_1', + 'unique_id': '1234567890-power_reactive_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-218.6', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_2', + 'unique_id': '1234567890-power_reactive_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-132.8', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_3', + 'unique_id': '1234567890-power_reactive_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-166.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_consumed', + 'unique_id': '1234567890-energy_real_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2013105.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_minus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy minus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_minus', + 'unique_id': '1234567890-energy_real_ac_minus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_minus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy minus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3863340.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy plus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_plus', + 'unique_id': '1234567890-energy_real_ac_plus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy plus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2013105.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_produced', + 'unique_id': '1234567890-energy_real_produced', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy produced', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3863340.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': '1234567890-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '653.1', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_1', + 'unique_id': '1234567890-power_real_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '106.8', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_2', + 'unique_id': '1234567890-power_real_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '294.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_3', + 'unique_id': '1234567890-power_real_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '251.3', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_1', + 'unique_id': '1234567890-voltage_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '235.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1-2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_12', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_12', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1-2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '408.7', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_2', + 'unique_id': '1234567890-voltage_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '236.1', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2-3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_23', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_23', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2-3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '409.6', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_3', + 'unique_id': '1234567890-voltage_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '236.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3-1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_31', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_31', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3-1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '409.4', + }) +# --- +# name: test_gen24[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_123.4567890-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'meter', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '658.4', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '658.4', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-695.6827', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '695.6827', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_123.4567890-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.9481', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.3592', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_gen24[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1530193.42', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': 'P030T020Z2001234567 -current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'BYD Battery-Box Premium HV DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': 'P030T020Z2001234567 -voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'BYD Battery-Box Premium HV DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_designed_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_designed_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Designed capacity', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity_designed', + 'unique_id': 'P030T020Z2001234567 -capacity_designed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_designed_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BYD Battery-Box Premium HV Designed capacity', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_designed_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16588', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_maximum_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_maximum_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Maximum capacity', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity_maximum', + 'unique_id': 'P030T020Z2001234567 -capacity_maximum', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_maximum_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BYD Battery-Box Premium HV Maximum capacity', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_maximum_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16588', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_state_of_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_state_of_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State of charge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_of_charge', + 'unique_id': 'P030T020Z2001234567 -state_of_charge', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_state_of_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'BYD Battery-Box Premium HV State of charge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_state_of_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.6', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_cell', + 'unique_id': 'P030T020Z2001234567 -temperature_cell', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'BYD Battery-Box Premium HV Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '12345678-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.1087', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '12345678-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Gen24 Storage AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '250.9093', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '12345678-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '227.354', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '12345678-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3952', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_current_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc_2', + 'unique_id': '12345678-current_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage DC current 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_current_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3564', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '12345678-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '419.1009', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_voltage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc_2', + 'unique_id': '12345678-voltage_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage DC voltage 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_voltage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '318.8103', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '12345678-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Error code', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '12345678-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gen24 Storage Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9816', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_inverter_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_inverter_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Inverter state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_state', + 'unique_id': '12345678-inverter_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_inverter_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Inverter state', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_inverter_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Running', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '12345678-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Status code', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '12345678-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gen24 Storage Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '12345678-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Gen24 Storage Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7512794.0117', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_consumed', + 'unique_id': '23456789-energy_real_ac_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Ohmpilot Energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1233295.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_ac', + 'unique_id': '23456789-power_real_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Ohmpilot Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ohmpilot_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_code', + 'unique_id': '23456789-state_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohmpilot State code', + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'up_and_running', + 'keep_minimum_temperature', + 'legionella_protection', + 'critical_fault', + 'fault', + 'boost_mode', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ohmpilot_state_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_message', + 'unique_id': '23456789-state_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Ohmpilot State message', + 'options': list([ + 'up_and_running', + 'keep_minimum_temperature', + 'legionella_protection', + 'critical_fault', + 'fault', + 'boost_mode', + ]), + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_state_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'up_and_running', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_channel_1', + 'unique_id': '23456789-temperature_channel_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Ohmpilot Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent', + 'unique_id': '1234567890-power_apparent', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '821.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_1', + 'unique_id': '1234567890-power_apparent_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '319.5', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_2', + 'unique_id': '1234567890-power_apparent_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '383.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_3', + 'unique_id': '1234567890-power_apparent_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '118.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_1', + 'unique_id': '1234567890-current_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.701', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_2', + 'unique_id': '1234567890-current_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.832', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_3', + 'unique_id': '1234567890-current_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.645', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_frequency_phase_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency phase average', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_phase_average', + 'unique_id': '1234567890-frequency_phase_average', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_frequency_phase_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Smart Meter TS 65A-3 Frequency phase average', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': '1234567890-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Meter location', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': '1234567890-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Smart Meter TS 65A-3 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'feed_in', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor', + 'unique_id': '1234567890-power_factor', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.698', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_1', + 'unique_id': '1234567890-power_factor_phase_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.995', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_2', + 'unique_id': '1234567890-power_factor_phase_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 2', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.389', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_3', + 'unique_id': '1234567890-power_factor_phase_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 3', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.163', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_consumed', + 'unique_id': '1234567890-energy_reactive_ac_consumed', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy consumed', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5482.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_produced', + 'unique_id': '1234567890-energy_reactive_ac_produced', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy produced', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3266105.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive', + 'unique_id': '1234567890-power_reactive', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-501.5', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_1', + 'unique_id': '1234567890-power_reactive_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-31.3', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_2', + 'unique_id': '1234567890-power_reactive_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-353.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_3', + 'unique_id': '1234567890-power_reactive_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-116.7', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_consumed', + 'unique_id': '1234567890-energy_real_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1247204.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_minus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy minus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_minus', + 'unique_id': '1234567890-energy_real_ac_minus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_minus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy minus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1705128.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy plus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_plus', + 'unique_id': '1234567890-energy_real_ac_plus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy plus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1247204.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_produced', + 'unique_id': '1234567890-energy_real_produced', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy produced', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1705128.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': '1234567890-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '487.7', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_1', + 'unique_id': '1234567890-power_real_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '317.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_2', + 'unique_id': '1234567890-power_real_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_3', + 'unique_id': '1234567890-power_real_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19.6', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_1', + 'unique_id': '1234567890-voltage_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '229.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1-2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_12', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_12', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1-2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '396.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_2', + 'unique_id': '1234567890-voltage_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '225.6', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2-3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_23', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_23', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2-3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '393.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_3', + 'unique_id': '1234567890-voltage_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '228.3', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3-1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_31', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_31', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3-1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '394.3', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_12345678-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bidirectional', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery', + 'unique_id': 'solar_net_12345678-power_flow-power_battery', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1591', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery charge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery_charge', + 'unique_id': 'solar_net_12345678-power_flow-power_battery_charge', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery charge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_discharge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery discharge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery_discharge', + 'unique_id': 'solar_net_12345678-power_flow-power_battery_discharge', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_discharge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery discharge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery_discharge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1591', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_12345678-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2274.9', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_12345678-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_12345678-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2274.9', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_12345678-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2459.3092', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_12345678-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2459.3092', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_12345678-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_12345678-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '216.4328', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_12345678-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.4984', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_12345678-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_12345678-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7512664.4042', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '234567-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 3.0-1 AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.32', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '234567-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Primo 3.0-1 AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '296', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '234567-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 3.0-1 AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223.6', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '234567-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 3.0-1 DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.97', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '234567-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 3.0-1 DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '329.5', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': '234567-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14237', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': '234567-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3596193.25', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '234567-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 Error code', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '234567-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Primo 3.0-1 Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.01', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED color', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_color', + 'unique_id': '234567-led_color', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 LED color', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_led_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_state', + 'unique_id': '234567-led_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 LED state', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_led_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '234567-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 Status code', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '234567-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 3.0-1 Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '234567-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5796010', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '123456-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 5.0-1 AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.85', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '123456-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Primo 5.0-1 AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '862', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '123456-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 5.0-1 AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223.9', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '123456-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 5.0-1 DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.23', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '123456-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 5.0-1 DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '452.3', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': '123456-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22504', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': '123456-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7532755.5', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '123456-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 Error code', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '123456-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Primo 5.0-1 Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED color', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_color', + 'unique_id': '123456-led_color', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 LED color', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_led_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_state', + 'unique_id': '123456-led_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 LED state', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_led_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '123456-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 Status code', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '123456-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 5.0-1 Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '123456-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17114940', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'S0 Meter at inverter 1 Meter location', + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'S0 Meter at inverter 1 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'consumption_path', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.s0_meter_at_inverter_1_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'S0 Meter at inverter 1 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2216.7487', + }) +# --- +# name: test_primo_s0[sensor.solarnet_co2_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_co2_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CO₂ factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_factor', + 'unique_id': '123.4567890-co2_factor', + 'unit_of_measurement': 'kg/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_co2_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet CO₂ factor', + 'state_class': , + 'unit_of_measurement': 'kg/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_co2_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.53', + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36724', + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11128933.25', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_export_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_grid_export_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid export tariff', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cash_factor', + 'unique_id': '123.4567890-cash_factor', + 'unit_of_measurement': 'BRL/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_export_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Grid export tariff', + 'state_class': , + 'unit_of_measurement': 'BRL/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_grid_export_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_import_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_grid_import_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid import tariff', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'delivery_factor', + 'unique_id': '123.4567890-delivery_factor', + 'unit_of_measurement': 'BRL/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_import_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Grid import tariff', + 'state_class': , + 'unit_of_measurement': 'BRL/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_grid_import_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_123.4567890-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'vague-meter', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '384.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '384.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2218.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2218.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_123.4567890-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1834', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '82.6523', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_primo_s0[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22910919.5', + }) +# --- diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index 41593a0ad2e..933b8fad8ef 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -44,43 +44,62 @@ MOCK_DHCP_DATA = DhcpServiceInfo( ) -async def test_form_with_logger(hass: HomeAssistant) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - with ( - patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, +async def assert_finish_flow_with_logger(hass: HomeAssistant, flow_id: str) -> None: + """Assert finishing the flow with a logger device.""" + with patch( + "pyfronius.Fronius.current_logger_info", + return_value=LOGGER_INFO_RETURN_VALUE, ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result = await hass.config_entries.flow.async_configure( + flow_id, { "host": "10.9.8.1", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "SolarNet Datalogger at 10.9.8.1" - assert result2["data"] == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SolarNet Datalogger at 10.9.8.1" + assert result["data"] == { "host": "10.9.8.1", "is_logger": True, } - assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "123.4567" + + +async def assert_abort_flow_with_logger( + hass: HomeAssistant, flow_id: str, reason: str +) -> config_entries.ConfigFlowResult: + """Assert the flow was aborted when a logger device responded.""" + with patch( + "pyfronius.Fronius.current_logger_info", + return_value=LOGGER_INFO_RETURN_VALUE, + ): + result = await hass.config_entries.flow.async_configure( + flow_id, + { + "host": "10.9.8.1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + return result + + +async def test_form_with_logger(hass: HomeAssistant) -> None: + """Test the basic flow with a logger device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + await assert_finish_flow_with_logger(hass, result["flow_id"]) async def test_form_with_inverter(hass: HomeAssistant) -> None: - """Test we get the form.""" + """Test the basic flow with a Gen24 device.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -96,10 +115,6 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: "pyfronius.Fronius.inverter_info", return_value=INVERTER_INFO_RETURN_VALUE, ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -115,11 +130,21 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: "host": "10.9.1.1", "is_logger": False, } - assert len(mock_setup_entry.mock_calls) == 1 + assert result2["result"].unique_id == "1234567" -async def test_form_cannot_connect(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "inverter_side_effect", + [ + FroniusError, + None, # raises StopIteration through INVERTER_INFO_NONE + ], +) +async def test_form_cannot_connect( + hass: HomeAssistant, inverter_side_effect: type[FroniusError] | None +) -> None: """Test we handle cannot connect error.""" + INVERTER_INFO_NONE: dict[str, list] = {"inverters": []} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -131,34 +156,8 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: ), patch( "pyfronius.Fronius.inverter_info", - side_effect=FroniusError, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_form_no_device(hass: HomeAssistant) -> None: - """Test we handle no device found error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch( - "pyfronius.Fronius.current_logger_info", - side_effect=FroniusError, - ), - patch( - "pyfronius.Fronius.inverter_info", - return_value={"inverters": []}, + side_effect=inverter_side_effect, + return_value=INVERTER_INFO_NONE, ), ): result2 = await hass.config_entries.flow.async_configure( @@ -170,6 +169,7 @@ async def test_form_no_device(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} + await assert_finish_flow_with_logger(hass, result2["flow_id"]) async def test_form_unexpected(hass: HomeAssistant) -> None: @@ -191,13 +191,14 @@ async def test_form_unexpected(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + await assert_finish_flow_with_logger(hass, result2["flow_id"]) async def test_form_already_existing(hass: HomeAssistant) -> None: """Test existing entry.""" MockConfigEntry( domain=DOMAIN, - unique_id="123.4567", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={CONF_HOST: "10.9.8.1", "is_logger": True}, ).add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -205,26 +206,15 @@ async def test_form_already_existing(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "10.9.8.1", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" + await assert_abort_flow_with_logger( + hass, result["flow_id"], reason="already_configured" + ) -async def test_form_updates_host( +async def test_config_flow_already_configured( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: - """Test existing entry gets updated.""" + """Test existing entry doesn't get updated by config flow.""" old_host = "http://10.1.0.1" new_host = "http://10.1.0.2" entry = MockConfigEntry( @@ -247,26 +237,20 @@ async def test_form_updates_host( ) mock_responses(aioclient_mock, host=new_host) - with patch( - "homeassistant.components.fronius.async_unload_entry", - return_value=True, - ) as mock_unload_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": new_host, - }, - ) - await hass.async_block_till_done() - + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": new_host, + }, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - mock_unload_entry.assert_called_with(hass, entry) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 assert entries[0].data == { - "host": new_host, + "host": old_host, # not updated from config flow - only from reconfigure flow "is_logger": True, } @@ -295,6 +279,7 @@ async def test_dhcp(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> "host": MOCK_DHCP_DATA.ip, "is_logger": True, } + assert result["result"].unique_id == "123.4567" async def test_dhcp_already_configured( @@ -342,24 +327,19 @@ async def test_dhcp_invalid( async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfiguring an entry.""" + old_host = "http://10.1.0.1" + new_host = "http://10.1.0.2" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id="1234567", data={ - CONF_HOST: "10.1.2.3", + CONF_HOST: old_host, "is_logger": True, }, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -372,15 +352,11 @@ async def test_reconfigure(hass: HomeAssistant) -> None: "pyfronius.Fronius.inverter_info", return_value=INVERTER_INFO_RETURN_VALUE, ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ - "host": "10.9.1.1", + "host": new_host, }, ) await hass.async_block_till_done() @@ -388,17 +364,16 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" assert entry.data == { - "host": "10.9.1.1", + "host": new_host, "is_logger": False, } - assert len(mock_setup_entry.mock_calls) == 1 async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -406,14 +381,7 @@ async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) with ( patch( @@ -435,12 +403,16 @@ async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} + await assert_abort_flow_with_logger( + hass, result2["flow_id"], reason="reconfigure_successful" + ) + async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: """Test we handle unexpected error.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -448,14 +420,7 @@ async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) with patch( "pyfronius.Fronius.current_logger_info", @@ -471,12 +436,16 @@ async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + await assert_abort_flow_with_logger( + hass, result2["flow_id"], reason="reconfigure_successful" + ) -async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: - """Test reconfiguring an entry.""" + +async def test_reconfigure_to_different_device(hass: HomeAssistant) -> None: + """Test reconfiguring an entry to a different device.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id="999.9999999", data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -484,86 +453,10 @@ async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" - with ( - patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ), - patch( - "pyfronius.Fronius.inverter_info", - return_value=INVERTER_INFO_RETURN_VALUE, - ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - "host": "10.1.2.3", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert len(mock_setup_entry.mock_calls) == 0 - - -async def test_reconfigure_already_existing(hass: HomeAssistant) -> None: - """Test reconfiguring entry to already existing device.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="123.4567890", - data={ - CONF_HOST: "10.1.2.3", - "is_logger": True, - }, + await assert_abort_flow_with_logger( + hass, result["flow_id"], reason="unique_id_mismatch" ) - entry.add_to_hass(hass) - - entry_2_uid = "222.2222222" - entry_2 = MockConfigEntry( - domain=DOMAIN, - unique_id=entry_2_uid, - data={ - CONF_HOST: "10.2.2.2", - "is_logger": True, - }, - ) - entry_2.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) - with patch( - "pyfronius.Fronius.current_logger_info", - return_value={"unique_identifier": {"value": entry_2_uid}}, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "10.1.1.1", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" diff --git a/tests/components/fronius/test_coordinator.py b/tests/components/fronius/test_coordinator.py index 13a08bbe70e..fab2d509767 100644 --- a/tests/components/fronius/test_coordinator.py +++ b/tests/components/fronius/test_coordinator.py @@ -29,7 +29,7 @@ async def test_adaptive_update_interval( mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() @@ -38,13 +38,13 @@ async def test_adaptive_update_interval( # first 3 bad requests at default interval - 4th has different interval for _ in range(3): freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_inverter_data.call_count == 3 mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.error_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_inverter_data.call_count == 1 mock_inverter_data.reset_mock() @@ -52,13 +52,13 @@ async def test_adaptive_update_interval( mock_inverter_data.side_effect = None # next successful request resets to default interval freezer.tick(FroniusInverterUpdateCoordinator.error_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() @@ -68,7 +68,7 @@ async def test_adaptive_update_interval( # first 3 requests at default interval - 4th has different interval for _ in range(3): freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() # BadStatusError does 3 silent retries for inverter endpoint * 3 request intervals = 9 assert mock_inverter_data.call_count == 9 diff --git a/tests/components/fronius/test_init.py b/tests/components/fronius/test_init.py index 9d570785073..a950ed4e296 100644 --- a/tests/components/fronius/test_init.py +++ b/tests/components/fronius/test_init.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pyfronius import FroniusError from homeassistant.components.fronius.const import DOMAIN, SOLAR_NET_RESCAN_TIMER @@ -10,7 +11,6 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from . import mock_responses, setup_fronius_integration @@ -66,6 +66,7 @@ async def test_inverter_night_rescan( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, ) -> None: """Test dynamic adding of an inverter discovered automatically after a Home Assistant reboot during the night.""" mock_responses(aioclient_mock, fixture_set="igplus_v2", night=True) @@ -78,9 +79,8 @@ async def test_inverter_night_rescan( # Switch to daytime mock_responses(aioclient_mock, fixture_set="igplus_v2", night=False) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() # We expect our inverter to be present now @@ -88,9 +88,8 @@ async def test_inverter_night_rescan( assert inverter_1.manufacturer == "Fronius" # After another re-scan we still only expect this inverter - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER * 2) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() inverter_1 = device_registry.async_get_device(identifiers={(DOMAIN, "203200")}) assert inverter_1.manufacturer == "Fronius" @@ -100,6 +99,7 @@ async def test_inverter_rescan_interruption( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, ) -> None: """Test interruption of re-scan during runtime to process further.""" mock_responses(aioclient_mock, fixture_set="igplus_v2", night=True) @@ -115,9 +115,8 @@ async def test_inverter_rescan_interruption( "pyfronius.Fronius.inverter_info", side_effect=FroniusError, ): - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() # No increase of devices expected because of a FroniusError @@ -132,9 +131,8 @@ async def test_inverter_rescan_interruption( # Next re-scan will pick up the new inverter. Expect 2 devices now. mock_responses(aioclient_mock, fixture_set="igplus_v2", night=False) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER * 2) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert ( diff --git a/tests/components/fronius/test_sensor.py b/tests/components/fronius/test_sensor.py index 04c25ce26f2..b5d051d56ca 100644 --- a/tests/components/fronius/test_sensor.py +++ b/tests/components/fronius/test_sensor.py @@ -2,27 +2,29 @@ from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from homeassistant.components.fronius.const import DOMAIN from homeassistant.components.fronius.coordinator import ( FroniusInverterUpdateCoordinator, - FroniusMeterUpdateCoordinator, FroniusPowerFlowUpdateCoordinator, ) from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import enable_all_entities, mock_responses, setup_fronius_integration +from . import mock_responses, setup_fronius_integration -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_inverter( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Symo inverter entities.""" @@ -32,15 +34,8 @@ async def test_symo_inverter( # Init at night mock_responses(aioclient_mock, night=True) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 assert_state("sensor.symo_20_dc_current", 0) assert_state("sensor.symo_20_energy_day", 10828) @@ -54,13 +49,6 @@ async def test_symo_inverter( freezer.tick(FroniusInverterUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 62 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 # 4 additional AC entities assert_state("sensor.symo_20_dc_current", 2.19) @@ -104,6 +92,7 @@ async def test_symo_logger( assert_state("sensor.solarnet_grid_import_tariff", 0.15) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_meter( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -117,15 +106,8 @@ async def test_symo_meter( assert state.state == str(expected_state) mock_responses(aioclient_mock) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 26 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 # states are rounded to 4 decimals assert_state("sensor.smart_meter_63a_current_phase_1", 7.755) @@ -206,6 +188,7 @@ async def test_symo_meter_forged( ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_power_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -220,15 +203,8 @@ async def test_symo_power_flow( # First test at night mock_responses(aioclient_mock, night=True) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 # states are rounded to 4 decimals assert_state("sensor.solarnet_energy_day", 10828) @@ -277,10 +253,13 @@ async def test_symo_power_flow( assert_state("sensor.solarnet_relative_self_consumption", 0) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_gen24( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Gen24 inverter entities.""" @@ -292,72 +271,10 @@ async def test_gen24( mock_responses(aioclient_mock, fixture_set="gen24") config_entry = await setup_fronius_integration(hass, is_logger=False) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 24 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 - # inverter 1 - assert_state("sensor.inverter_name_ac_current", 0.1589) - assert_state("sensor.inverter_name_dc_current_2", 0.0754) - assert_state("sensor.inverter_name_status_code", 7) - assert_state("sensor.inverter_name_status_message", "running") - assert_state("sensor.inverter_name_dc_current", 0.0783) - assert_state("sensor.inverter_name_dc_voltage_2", 403.4312) - assert_state("sensor.inverter_name_ac_power", 37.3204) - assert_state("sensor.inverter_name_error_code", 0) - assert_state("sensor.inverter_name_dc_voltage", 411.3811) - assert_state("sensor.inverter_name_total_energy", 1530193.42) - assert_state("sensor.inverter_name_inverter_state", "Running") - assert_state("sensor.inverter_name_ac_voltage", 234.9168) - assert_state("sensor.inverter_name_frequency", 49.9917) - # meter - assert_state("sensor.smart_meter_ts_65a_3_real_energy_produced", 3863340.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 2013105.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power", 653.1) - assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) - assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") - assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.828) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 88221.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 3863340.0) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_2", 2.33) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1", 235.9) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1_2", 408.7) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_2", 294.9) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_plus", 2013105.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2", 236.1) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 1989125.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3", 236.9) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_1", 0.441) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2_3", 409.6) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_3", 1.825) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_3", 0.832) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_1", 243.3) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3_1", 409.4) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_2", 323.4) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_3", 301.2) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_1", 106.8) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_2", 0.934) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 251.3) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_1", -218.6) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_2", -132.8) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_3", -166.0) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power", 868.0) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -517.4) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_1", 1.145) - # power_flow - assert_state("sensor.solarnet_power_grid", 658.4) - assert_state("sensor.solarnet_relative_self_consumption", 100.0) - assert_state("sensor.solarnet_power_photovoltaics", 62.9481) - assert_state("sensor.solarnet_power_load", -695.6827) - assert_state("sensor.solarnet_meter_mode", "meter") - assert_state("sensor.solarnet_relative_autonomy", 5.3592) - assert_state("sensor.solarnet_total_energy", 1530193.42) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + assert_state("sensor.inverter_name_total_energy", 1530193.42) # Gen24 devices may report 0 for total energy while doing firmware updates. # This should yield "unknown" state instead of 0. mock_responses( @@ -375,11 +292,14 @@ async def test_gen24( assert_state("sensor.inverter_name_total_energy", "unknown") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_gen24_storage( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Gen24 inverter with BYD battery and Ohmpilot entities.""" @@ -393,87 +313,8 @@ async def test_gen24_storage( hass, is_logger=False, unique_id="12345678" ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 37 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 72 - # inverter 1 - assert_state("sensor.gen24_storage_dc_current", 0.3952) - assert_state("sensor.gen24_storage_dc_voltage_2", 318.8103) - assert_state("sensor.gen24_storage_dc_current_2", 0.3564) - assert_state("sensor.gen24_storage_ac_current", 1.1087) - assert_state("sensor.gen24_storage_ac_power", 250.9093) - assert_state("sensor.gen24_storage_error_code", 0) - assert_state("sensor.gen24_storage_status_code", 7) - assert_state("sensor.gen24_storage_status_message", "running") - assert_state("sensor.gen24_storage_total_energy", 7512794.0117) - assert_state("sensor.gen24_storage_inverter_state", "Running") - assert_state("sensor.gen24_storage_dc_voltage", 419.1009) - assert_state("sensor.gen24_storage_ac_voltage", 227.354) - assert_state("sensor.gen24_storage_frequency", 49.9816) - # meter - assert_state("sensor.smart_meter_ts_65a_3_real_energy_produced", 1705128.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power", 487.7) - assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.698) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 1247204.0) - assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) - assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") - assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -501.5) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 3266105.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 19.6) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_3", 0.645) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 1705128.0) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_2", 383.9) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_1", 1.701) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_2", 1.832) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_1", 319.5) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1", 229.4) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_2", 150.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3_1", 394.3) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2", 225.6) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 5482.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_plus", 1247204.0) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_1", 0.995) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_3", 0.163) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_2", 0.389) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_1", -31.3) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_3", -116.7) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1_2", 396.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2_3", 393.0) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_2", -353.4) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_1", 317.9) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3", 228.3) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power", 821.9) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_3", 118.4) - # ohmpilot - assert_state("sensor.ohmpilot_energy_consumed", 1233295.0) - assert_state("sensor.ohmpilot_power", 0.0) - assert_state("sensor.ohmpilot_temperature", 38.9) - assert_state("sensor.ohmpilot_state_code", 0.0) - assert_state("sensor.ohmpilot_state_message", "up_and_running") - # power_flow - assert_state("sensor.solarnet_power_grid", 2274.9) - assert_state("sensor.solarnet_power_battery", 0.1591) - assert_state("sensor.solarnet_power_battery_charge", 0) - assert_state("sensor.solarnet_power_battery_discharge", 0.1591) - assert_state("sensor.solarnet_power_load", -2459.3092) - assert_state("sensor.solarnet_relative_self_consumption", 100.0) - assert_state("sensor.solarnet_power_photovoltaics", 216.4328) - assert_state("sensor.solarnet_relative_autonomy", 7.4984) - assert_state("sensor.solarnet_meter_mode", "bidirectional") - assert_state("sensor.solarnet_total_energy", 7512664.4042) - # storage - assert_state("sensor.byd_battery_box_premium_hv_dc_current", 0.0) - assert_state("sensor.byd_battery_box_premium_hv_state_of_charge", 4.6) - assert_state("sensor.byd_battery_box_premium_hv_maximum_capacity", 16588) - assert_state("sensor.byd_battery_box_premium_hv_temperature", 21.5) - assert_state("sensor.byd_battery_box_premium_hv_designed_capacity", 16588) - assert_state("sensor.byd_battery_box_premium_hv_dc_voltage", 0.0) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices solar_net = device_registry.async_get_device( @@ -507,11 +348,14 @@ async def test_gen24_storage( assert storage.name == "BYD Battery-Box Premium HV" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_primo_s0( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Primo dual inverter with S0 meter entities.""" @@ -523,64 +367,8 @@ async def test_primo_s0( mock_responses(aioclient_mock, fixture_set="primo_s0", inverter_ids=[1, 2]) config_entry = await setup_fronius_integration(hass, is_logger=True) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 31 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 47 - # logger - assert_state("sensor.solarnet_grid_export_tariff", 1) - assert_state("sensor.solarnet_co2_factor", 0.53) - assert_state("sensor.solarnet_grid_import_tariff", 1) - # inverter 1 - assert_state("sensor.primo_5_0_1_total_energy", 17114940) - assert_state("sensor.primo_5_0_1_energy_day", 22504) - assert_state("sensor.primo_5_0_1_dc_voltage", 452.3) - assert_state("sensor.primo_5_0_1_ac_power", 862) - assert_state("sensor.primo_5_0_1_error_code", 0) - assert_state("sensor.primo_5_0_1_dc_current", 4.23) - assert_state("sensor.primo_5_0_1_status_code", 7) - assert_state("sensor.primo_5_0_1_status_message", "running") - assert_state("sensor.primo_5_0_1_energy_year", 7532755.5) - assert_state("sensor.primo_5_0_1_ac_current", 3.85) - assert_state("sensor.primo_5_0_1_ac_voltage", 223.9) - assert_state("sensor.primo_5_0_1_frequency", 60) - assert_state("sensor.primo_5_0_1_led_color", 2) - assert_state("sensor.primo_5_0_1_led_state", 0) - # inverter 2 - assert_state("sensor.primo_3_0_1_total_energy", 5796010) - assert_state("sensor.primo_3_0_1_energy_day", 14237) - assert_state("sensor.primo_3_0_1_dc_voltage", 329.5) - assert_state("sensor.primo_3_0_1_ac_power", 296) - assert_state("sensor.primo_3_0_1_error_code", 0) - assert_state("sensor.primo_3_0_1_dc_current", 0.97) - assert_state("sensor.primo_3_0_1_status_code", 7) - assert_state("sensor.primo_3_0_1_status_message", "running") - assert_state("sensor.primo_3_0_1_energy_year", 3596193.25) - assert_state("sensor.primo_3_0_1_ac_current", 1.32) - assert_state("sensor.primo_3_0_1_ac_voltage", 223.6) - assert_state("sensor.primo_3_0_1_frequency", 60.01) - assert_state("sensor.primo_3_0_1_led_color", 2) - assert_state("sensor.primo_3_0_1_led_state", 0) - # meter - assert_state("sensor.s0_meter_at_inverter_1_meter_location", 1) - assert_state( - "sensor.s0_meter_at_inverter_1_meter_location_description", "consumption_path" - ) - assert_state("sensor.s0_meter_at_inverter_1_real_power", -2216.7487) - # power_flow - assert_state("sensor.solarnet_power_load", -2218.9349) - assert_state("sensor.solarnet_meter_mode", "vague-meter") - assert_state("sensor.solarnet_power_photovoltaics", 1834) - assert_state("sensor.solarnet_power_grid", 384.9349) - assert_state("sensor.solarnet_relative_self_consumption", 100) - assert_state("sensor.solarnet_relative_autonomy", 82.6523) - assert_state("sensor.solarnet_total_energy", 22910919.5) - assert_state("sensor.solarnet_energy_day", 36724) - assert_state("sensor.solarnet_energy_year", 11128933.25) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices solar_net = device_registry.async_get_device( diff --git a/tests/components/frontend/test_init.py b/tests/components/frontend/test_init.py index 5006adedd77..5a682277176 100644 --- a/tests/components/frontend/test_init.py +++ b/tests/components/frontend/test_init.py @@ -166,7 +166,7 @@ async def test_frontend_and_static(mock_http_client: TestClient) -> None: text = await resp.text() # Test we can retrieve frontend.js - frontendjs = re.search(r"(?P\/frontend_es5\/app.[A-Za-z0-9_-]{11}.js)", text) + frontendjs = re.search(r"(?P\/frontend_es5\/app.[A-Za-z0-9_-]{16}.js)", text) assert frontendjs is not None, text resp = await mock_http_client.get(frontendjs.groups(0)[0]) @@ -689,7 +689,7 @@ async def test_auth_authorize(mock_http_client: TestClient) -> None: # Test we can retrieve authorize.js authorizejs = re.search( - r"(?P\/frontend_latest\/authorize.[A-Za-z0-9_-]{11}.js)", text + r"(?P\/frontend_latest\/authorize.[A-Za-z0-9_-]{16}.js)", text ) assert authorizejs is not None, text diff --git a/tests/components/frontier_silicon/test_config_flow.py b/tests/components/frontier_silicon/test_config_flow.py index 04bd1febdf8..c92cf897fe6 100644 --- a/tests/components/frontier_silicon/test_config_flow.py +++ b/tests/components/frontier_silicon/test_config_flow.py @@ -26,6 +26,7 @@ MOCK_DISCOVERY = ssdp.SsdpServiceInfo( ssdp_udn="uuid:3dcc7100-f76c-11dd-87af-00226124ca30", ssdp_st="mock_st", ssdp_location="http://1.1.1.1/device", + ssdp_headers={"SPEAKER-NAME": "Speaker Name"}, upnp={"SPEAKER-NAME": "Speaker Name"}, ) @@ -34,6 +35,7 @@ INVALID_MOCK_DISCOVERY = ssdp.SsdpServiceInfo( ssdp_udn="uuid:3dcc7100-f76c-11dd-87af-00226124ca30", ssdp_st="mock_st", ssdp_location=None, + ssdp_headers={"SPEAKER-NAME": "Speaker Name"}, upnp={"SPEAKER-NAME": "Speaker Name"}, ) @@ -268,6 +270,11 @@ async def test_ssdp( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + flow = flows[0] + assert flow["context"]["title_placeholders"] == {"name": "Speaker Name"} + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, @@ -356,15 +363,7 @@ async def test_reauth_flow(hass: HomeAssistant, config_entry: MockConfigEntry) - config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" @@ -395,15 +394,7 @@ async def test_reauth_flow_friendly_name_error( config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" diff --git a/tests/components/fujitsu_fglair/conftest.py b/tests/components/fujitsu_fglair/conftest.py index b73007a566b..5974adbeb0d 100644 --- a/tests/components/fujitsu_fglair/conftest.py +++ b/tests/components/fujitsu_fglair/conftest.py @@ -7,7 +7,11 @@ from ayla_iot_unofficial import AylaApi from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, FujitsuHVAC, OpMode, SwingMode import pytest -from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN +from homeassistant.components.fujitsu_fglair.const import ( + CONF_REGION, + DOMAIN, + REGION_DEFAULT, +) from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from tests.common import MockConfigEntry @@ -30,7 +34,7 @@ TEST_PROPERTY_VALUES = { @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.fujitsu_fglair.async_setup_entry", return_value=True @@ -57,15 +61,19 @@ def mock_ayla_api(mock_devices: list[AsyncMock]) -> Generator[AsyncMock]: @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry(request: pytest.FixtureRequest) -> MockConfigEntry: """Return a regular config entry.""" + region = REGION_DEFAULT + if hasattr(request, "param"): + region = request.param + return MockConfigEntry( domain=DOMAIN, unique_id=TEST_USERNAME, data={ CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, + CONF_REGION: region, }, ) diff --git a/tests/components/fujitsu_fglair/test_climate.py b/tests/components/fujitsu_fglair/test_climate.py index fd016e4e226..daddc83a871 100644 --- a/tests/components/fujitsu_fglair/test_climate.py +++ b/tests/components/fujitsu_fglair/test_climate.py @@ -18,7 +18,7 @@ from homeassistant.components.climate import ( SWING_BOTH, HVACMode, ) -from homeassistant.components.fujitsu_fglair.const import ( +from homeassistant.components.fujitsu_fglair.climate import ( HA_TO_FUJI_FAN, HA_TO_FUJI_HVAC, HA_TO_FUJI_SWING, diff --git a/tests/components/fujitsu_fglair/test_config_flow.py b/tests/components/fujitsu_fglair/test_config_flow.py index fc6afd9b8f0..6c9ebd66e47 100644 --- a/tests/components/fujitsu_fglair/test_config_flow.py +++ b/tests/components/fujitsu_fglair/test_config_flow.py @@ -5,8 +5,12 @@ from unittest.mock import AsyncMock from ayla_iot_unofficial import AylaAuthError import pytest -from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.components.fujitsu_fglair.const import ( + CONF_REGION, + DOMAIN, + REGION_DEFAULT, +) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -28,7 +32,7 @@ async def _initial_step(hass: HomeAssistant) -> FlowResult: { CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, + CONF_REGION: REGION_DEFAULT, }, ) @@ -45,7 +49,7 @@ async def test_full_flow( assert result["data"] == { CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, + CONF_REGION: REGION_DEFAULT, } @@ -94,7 +98,7 @@ async def test_form_exceptions( { CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, + CONF_REGION: REGION_DEFAULT, }, ) @@ -103,7 +107,7 @@ async def test_form_exceptions( assert result["data"] == { CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, + CONF_REGION: REGION_DEFAULT, } @@ -116,20 +120,7 @@ async def test_reauth_success( """Test reauth flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": "test"}, - }, - data={ - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -164,20 +155,7 @@ async def test_reauth_exceptions( """Test reauth flow when an exception occurs.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": "test"}, - }, - data={ - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fujitsu_fglair/test_init.py b/tests/components/fujitsu_fglair/test_init.py index fa67ea08661..af51b222c19 100644 --- a/tests/components/fujitsu_fglair/test_init.py +++ b/tests/components/fujitsu_fglair/test_init.py @@ -1,17 +1,33 @@ """Test the initialization of fujitsu_fglair entities.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from ayla_iot_unofficial import AylaAuthError +from ayla_iot_unofficial.fujitsu_consts import FGLAIR_APP_CREDENTIALS from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.fujitsu_fglair.const import API_REFRESH, DOMAIN -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.components.fujitsu_fglair.const import ( + API_REFRESH, + API_TIMEOUT, + CONF_EUROPE, + CONF_REGION, + DOMAIN, + REGION_DEFAULT, + REGION_EU, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_PASSWORD, + CONF_USERNAME, + STATE_UNAVAILABLE, + Platform, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import aiohttp_client, entity_registry as er from . import entity_id, setup_integration +from .conftest import TEST_PASSWORD, TEST_USERNAME from tests.common import MockConfigEntry, async_fire_time_changed @@ -35,6 +51,63 @@ async def test_auth_failure( assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE +@pytest.mark.parametrize( + "mock_config_entry", FGLAIR_APP_CREDENTIALS.keys(), indirect=True +) +async def test_auth_regions( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_devices: list[AsyncMock], +) -> None: + """Test that we use the correct credentials if europe is selected.""" + with patch( + "homeassistant.components.fujitsu_fglair.new_ayla_api", return_value=AsyncMock() + ) as new_ayla_api_patch: + await setup_integration(hass, mock_config_entry) + new_ayla_api_patch.assert_called_once_with( + TEST_USERNAME, + TEST_PASSWORD, + FGLAIR_APP_CREDENTIALS[mock_config_entry.data[CONF_REGION]][0], + FGLAIR_APP_CREDENTIALS[mock_config_entry.data[CONF_REGION]][1], + europe=mock_config_entry.data[CONF_REGION] == "EU", + websession=aiohttp_client.async_get_clientsession(hass), + timeout=API_TIMEOUT, + ) + + +@pytest.mark.parametrize("is_europe", [True, False]) +async def test_migrate_entry_v11_v12( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ayla_api: AsyncMock, + is_europe: bool, + mock_devices: list[AsyncMock], +) -> None: + """Test migration from schema 1.1 to 1.2.""" + v11_config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USERNAME, + data={ + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: is_europe, + }, + ) + + await setup_integration(hass, v11_config_entry) + updated_entry = hass.config_entries.async_get_entry(v11_config_entry.entry_id) + + assert updated_entry.state is ConfigEntryState.LOADED + assert updated_entry.version == 1 + assert updated_entry.minor_version == 2 + if is_europe: + assert updated_entry.data[CONF_REGION] is REGION_EU + else: + assert updated_entry.data[CONF_REGION] is REGION_DEFAULT + + async def test_device_auth_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index 2bcad9b3c80..299b96be959 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -2,7 +2,7 @@ from collections.abc import Generator from datetime import UTC, datetime -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from fyta_cli.fyta_models import Credentials, Plant import pytest @@ -46,6 +46,7 @@ def mock_fyta_connector(): tzinfo=UTC ) mock_fyta_connector.client = AsyncMock(autospec=True) + mock_fyta_connector.data = MagicMock() mock_fyta_connector.update_all_plants.return_value = plants mock_fyta_connector.plant_list = { 0: "Gummibaum", diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json index f2e8dc9c970..600fc46608c 100644 --- a/tests/components/fyta/fixtures/plant_status1.json +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -1,20 +1,24 @@ { "battery_level": 80, - "battery_status": true, + "low_battery": true, "last_updated": "2023-01-10 10:10:00", "light": 2, "light_status": 3, "nickname": "Gummibaum", + "nutrients_status": 3, "moisture": 61, "moisture_status": 3, "sensor_available": true, + "sensor_id": "FD:1D:B7:E3:D0:E2", + "sensor_update_available": false, "sw_version": "1.0", - "status": 3, + "status": 1, "online": true, "ph": null, "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", + "is_productive_plant": false, "salinity": 1, "salinity_status": 4, "scientific_name": "Ficus elastica", diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json index a5c2735ca7c..c39e2ac8685 100644 --- a/tests/components/fyta/fixtures/plant_status2.json +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -1,20 +1,24 @@ { "battery_level": 80, - "battery_status": true, + "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, "light_status": 3, "nickname": "Kakaobaum", + "nutrients_status": 3, "moisture": 61, "moisture_status": 3, "sensor_available": true, + "sensor_id": "FD:1D:B7:E3:D0:E3", + "sensor_update_available": false, "sw_version": "1.0", - "status": 3, + "status": 1, "online": true, "ph": 7, "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", + "is_productive_plant": false, "salinity": 1, "salinity_status": 4, "scientific_name": "Theobroma cacao", diff --git a/tests/components/fyta/fixtures/plant_status3.json b/tests/components/fyta/fixtures/plant_status3.json new file mode 100644 index 00000000000..58e3e1b86a0 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status3.json @@ -0,0 +1,27 @@ +{ + "battery_level": 80, + "low_battery": true, + "last_updated": "2023-01-02 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Tomatenpflanze", + "nutrients_status": 0, + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sensor_id": "FD:1D:B7:E3:D0:E3", + "sensor_update_available": false, + "sw_version": "1.0", + "status": 1, + "online": true, + "ph": 7, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "is_productive_plant": true, + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Solanum lycopersicum", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index cf6bcdb77ad..f1792cb7535 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -9,6 +9,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'fyta', 'entry_id': 'ce5f5431554d101905d31797e1232da8', 'minor_version': 2, @@ -17,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'fyta_user', 'unique_id': None, 'version': 1, @@ -24,46 +28,70 @@ 'plant_data': dict({ '0': dict({ 'battery_level': 80.0, - 'battery_status': True, + 'fertilise_last': None, + 'fertilise_next': None, 'last_updated': '2023-01-10T10:10:00', 'light': 2.0, 'light_status': 3, + 'low_battery': True, 'moisture': 61.0, 'moisture_status': 3, 'name': 'Gummibaum', + 'notification_light': False, + 'notification_nutrition': False, + 'notification_temperature': False, + 'notification_water': False, + 'nutrients_status': 3, 'online': True, 'ph': None, 'plant_id': 0, 'plant_origin_path': '', 'plant_thumb_path': '', + 'productive_plant': False, + 'repotted': False, 'salinity': 1.0, 'salinity_status': 4, 'scientific_name': 'Ficus elastica', 'sensor_available': True, - 'status': 3, + 'sensor_id': 'FD:1D:B7:E3:D0:E2', + 'sensor_status': 0, + 'sensor_update_available': False, + 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, 'temperature_status': 3, }), '1': dict({ 'battery_level': 80.0, - 'battery_status': True, + 'fertilise_last': None, + 'fertilise_next': None, 'last_updated': '2023-01-02T10:10:00', 'light': 2.0, 'light_status': 3, + 'low_battery': True, 'moisture': 61.0, 'moisture_status': 3, 'name': 'Kakaobaum', + 'notification_light': False, + 'notification_nutrition': False, + 'notification_temperature': False, + 'notification_water': False, + 'nutrients_status': 3, 'online': True, 'ph': 7.0, 'plant_id': 0, 'plant_origin_path': '', 'plant_thumb_path': '', + 'productive_plant': False, + 'repotted': False, 'salinity': 1.0, 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', 'sensor_available': True, - 'status': 3, + 'sensor_id': 'FD:1D:B7:E3:D0:E3', + 'sensor_status': 0, + 'sensor_update_available': False, + 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, 'temperature_status': 3, diff --git a/tests/components/fyta/snapshots/test_sensor.ambr b/tests/components/fyta/snapshots/test_sensor.ambr index 2e96de0a283..ef583dd28a6 100644 --- a/tests/components/fyta/snapshots/test_sensor.ambr +++ b/tests/components/fyta/snapshots/test_sensor.ambr @@ -386,7 +386,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'no_sensor', + 'state': 'doing_great', }) # --- # name: test_all_entities[sensor.gummibaum_salinity-entry] @@ -421,7 +421,7 @@ 'supported_features': 0, 'translation_key': 'salinity', 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.gummibaum_salinity-state] @@ -430,7 +430,7 @@ 'device_class': 'conductivity', 'friendly_name': 'Gummibaum Salinity', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.gummibaum_salinity', @@ -1052,7 +1052,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'no_sensor', + 'state': 'doing_great', }) # --- # name: test_all_entities[sensor.kakaobaum_salinity-entry] @@ -1087,7 +1087,7 @@ 'supported_features': 0, 'translation_key': 'salinity', 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.kakaobaum_salinity-state] @@ -1096,7 +1096,7 @@ 'device_class': 'conductivity', 'friendly_name': 'Kakaobaum Salinity', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.kakaobaum_salinity', diff --git a/tests/components/fyta/test_config_flow.py b/tests/components/fyta/test_config_flow.py index df0626d0af0..21101db8534 100644 --- a/tests/components/fyta/test_config_flow.py +++ b/tests/components/fyta/test_config_flow.py @@ -10,6 +10,7 @@ from fyta_cli.fyta_exceptions import ( import pytest from homeassistant import config_entries +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -20,6 +21,26 @@ from .const import ACCESS_TOKEN, EXPIRATION, PASSWORD, USERNAME from tests.common import MockConfigEntry +async def user_step( + hass: HomeAssistant, flow_id: str, mock_setup_entry: AsyncMock +) -> None: + """Test user step (helper function).""" + + result = await hass.config_entries.flow.async_configure( + flow_id, {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USERNAME + assert result["data"] == { + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + CONF_ACCESS_TOKEN: ACCESS_TOKEN, + CONF_EXPIRATION: EXPIRATION, + } + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_user_flow( hass: HomeAssistant, mock_fyta_connector: AsyncMock, mock_setup_entry: AsyncMock ) -> None: @@ -31,20 +52,7 @@ async def test_user_flow( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == USERNAME - assert result2["data"] == { - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_EXPIRATION: EXPIRATION, - } - assert len(mock_setup_entry.mock_calls) == 1 + await user_step(hass, result["flow_id"], mock_setup_entry) @pytest.mark.parametrize( @@ -158,11 +166,7 @@ async def test_reauth( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -194,3 +198,27 @@ async def test_reauth( assert entry.data[CONF_PASSWORD] == "other_password" assert entry.data[CONF_ACCESS_TOKEN] == ACCESS_TOKEN assert entry.data[CONF_EXPIRATION] == EXPIRATION + + +async def test_dhcp_discovery( + hass: HomeAssistant, mock_fyta_connector: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test DHCP discovery flow.""" + + service_info = DhcpServiceInfo( + hostname="FYTA HUB", + ip="1.2.3.4", + macaddress="aabbccddeeff", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=service_info, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + await user_step(hass, result["flow_id"], mock_setup_entry) diff --git a/tests/components/fyta/test_sensor.py b/tests/components/fyta/test_sensor.py index e33c54695e5..07e3965e66f 100644 --- a/tests/components/fyta/test_sensor.py +++ b/tests/components/fyta/test_sensor.py @@ -5,16 +5,23 @@ from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory from fyta_cli.fyta_exceptions import FytaConnectionError, FytaPlantError +from fyta_cli.fyta_models import Plant import pytest from syrupy import SnapshotAssertion +from homeassistant.components.fyta.const import DOMAIN as FYTA_DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_platform -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) async def test_all_entities( @@ -54,3 +61,32 @@ async def test_connection_error( await hass.async_block_till_done() assert hass.states.get("sensor.gummibaum_plant_state").state == STATE_UNAVAILABLE + + +async def test_add_remove_entities( + hass: HomeAssistant, + mock_fyta_connector: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test if entities are added and old are removed.""" + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + + assert hass.states.get("sensor.gummibaum_plant_state").state == "doing_great" + + plants: dict[int, Plant] = { + 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), + 2: Plant.from_dict(load_json_object_fixture("plant_status3.json", FYTA_DOMAIN)), + } + mock_fyta_connector.update_all_plants.return_value = plants + mock_fyta_connector.plant_list = { + 0: "Kautschukbaum", + 2: "Tomatenpflanze", + } + + freezer.tick(delta=timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.kakaobaum_plant_state") is None + assert hass.states.get("sensor.tomatenpflanze_plant_state").state == "doing_great" diff --git a/tests/components/garages_amsterdam/__init__.py b/tests/components/garages_amsterdam/__init__.py index ff430c0e7b2..f721506b9b0 100644 --- a/tests/components/garages_amsterdam/__init__.py +++ b/tests/components/garages_amsterdam/__init__.py @@ -1 +1,12 @@ """Tests for the Garages Amsterdam integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/garages_amsterdam/conftest.py b/tests/components/garages_amsterdam/conftest.py index fb59ba26569..93190d1d1ee 100644 --- a/tests/components/garages_amsterdam/conftest.py +++ b/tests/components/garages_amsterdam/conftest.py @@ -1,32 +1,85 @@ -"""Test helpers.""" +"""Fixtures for Garages Amsterdam integration tests.""" -from unittest.mock import Mock, patch +from collections.abc import Generator +from datetime import UTC, datetime +from unittest.mock import AsyncMock, patch +from odp_amsterdam import Garage, GarageCategory, VehicleType import pytest +from homeassistant.components.garages_amsterdam.const import DOMAIN -@pytest.fixture(autouse=True) -def mock_cases(): - """Mock garages_amsterdam garages.""" +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override setup entry.""" with patch( - "odp_amsterdam.ODPAmsterdam.all_garages", - return_value=[ - Mock( + "homeassistant.components.garages_amsterdam.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_garages_amsterdam() -> Generator[AsyncMock]: + """Mock garages_amsterdam garages.""" + with ( + patch( + "homeassistant.components.garages_amsterdam.ODPAmsterdam", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.garages_amsterdam.config_flow.ODPAmsterdam", + new=mock_client, + ), + ): + client = mock_client.return_value + client.all_garages.return_value = [ + Garage( + garage_id="test-id-1", garage_name="IJDok", + vehicle=VehicleType.CAR, + category=GarageCategory.GARAGE, + state="ok", free_space_short=100, free_space_long=10, short_capacity=120, long_capacity=60, - state="ok", + availability_pct=50.5, + longitude=1.111111, + latitude=2.222222, + updated_at=datetime(2023, 2, 23, 13, 44, 48, tzinfo=UTC), ), - Mock( + Garage( + garage_id="test-id-2", garage_name="Arena", - free_space_short=200, - free_space_long=20, - short_capacity=240, - long_capacity=80, + vehicle=VehicleType.CAR, + category=GarageCategory.GARAGE, state="error", + free_space_short=200, + free_space_long=None, + short_capacity=240, + long_capacity=None, + availability_pct=83.3, + longitude=3.333333, + latitude=4.444444, + updated_at=datetime(2023, 2, 23, 13, 44, 48, tzinfo=UTC), ), - ], - ) as mock_get_garages: - yield mock_get_garages + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="monitor", + domain=DOMAIN, + data={ + "garage_name": "IJDok", + }, + unique_id="unique_thingy", + version=1, + ) diff --git a/tests/components/garages_amsterdam/snapshots/test_binary_sensor.ambr b/tests/components/garages_amsterdam/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..5f6511090ee --- /dev/null +++ b/tests/components/garages_amsterdam/snapshots/test_binary_sensor.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_all_binary_sensors[binary_sensor.ijdok_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.ijdok_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': 'IJDok-state', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.ijdok_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'device_class': 'problem', + 'friendly_name': 'IJDok State', + }), + 'context': , + 'entity_id': 'binary_sensor.ijdok_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/garages_amsterdam/snapshots/test_sensor.ambr b/tests/components/garages_amsterdam/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..2c579631bae --- /dev/null +++ b/tests/components/garages_amsterdam/snapshots/test_sensor.ambr @@ -0,0 +1,199 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.ijdok_long_parking_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_long_parking_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Long parking capacity', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'long_capacity', + 'unique_id': 'IJDok-long_capacity', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_long_parking_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Long parking capacity', + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_long_parking_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_all_sensors[sensor.ijdok_long_parking_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_long_parking_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Long parking free space', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'free_space_long', + 'unique_id': 'IJDok-free_space_long', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_long_parking_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Long parking free space', + 'state_class': , + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_long_parking_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_short_parking_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Short parking capacity', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'short_capacity', + 'unique_id': 'IJDok-short_capacity', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Short parking capacity', + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_short_parking_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_short_parking_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Short parking free space', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'free_space_short', + 'unique_id': 'IJDok-free_space_short', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Short parking free space', + 'state_class': , + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_short_parking_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- diff --git a/tests/components/garages_amsterdam/test_binary_sensor.py b/tests/components/garages_amsterdam/test_binary_sensor.py new file mode 100644 index 00000000000..b7d0333f7e3 --- /dev/null +++ b/tests/components/garages_amsterdam/test_binary_sensor.py @@ -0,0 +1,31 @@ +"""Tests the binary sensors provided by the Garages Amsterdam integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import snapshot_platform + + +async def test_all_binary_sensors( + hass: HomeAssistant, + mock_garages_amsterdam: AsyncMock, + mock_config_entry: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test all binary sensors.""" + with patch( + "homeassistant.components.garages_amsterdam.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/garages_amsterdam/test_config_flow.py b/tests/components/garages_amsterdam/test_config_flow.py index 729d31e413c..68950c96cf0 100644 --- a/tests/components/garages_amsterdam/test_config_flow.py +++ b/tests/components/garages_amsterdam/test_config_flow.py @@ -1,39 +1,40 @@ """Test the Garages Amsterdam config flow.""" from http import HTTPStatus -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiohttp import ClientResponseError import pytest -from homeassistant import config_entries from homeassistant.components.garages_amsterdam.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_full_flow(hass: HomeAssistant) -> None: - """Test we get the form.""" +async def test_full_user_flow( + hass: HomeAssistant, + mock_garages_amsterdam: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") - with patch( - "homeassistant.components.garages_amsterdam.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"garage_name": "IJDok"}, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"garage_name": "IJDok"}, + ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "IJDok" - assert "result" in result2 - assert result2["result"].unique_id == "IJDok" + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "IJDok" + assert result.get("data") == {"garage_name": "IJDok"} + assert len(mock_garages_amsterdam.all_garages.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -50,14 +51,14 @@ async def test_full_flow(hass: HomeAssistant) -> None: async def test_error_handling( side_effect: Exception, reason: str, hass: HomeAssistant ) -> None: - """Test we get the form.""" + """Test error handling in the config flow.""" with patch( "homeassistant.components.garages_amsterdam.config_flow.ODPAmsterdam.all_garages", side_effect=side_effect, ): result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == reason diff --git a/tests/components/garages_amsterdam/test_init.py b/tests/components/garages_amsterdam/test_init.py new file mode 100644 index 00000000000..ed5469e5ff9 --- /dev/null +++ b/tests/components/garages_amsterdam/test_init.py @@ -0,0 +1,26 @@ +"""Tests for the Garages Amsterdam integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_garages_amsterdam: AsyncMock, +) -> None: + """Test the Garages Amsterdam integration loads and unloads correctly.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/garages_amsterdam/test_sensor.py b/tests/components/garages_amsterdam/test_sensor.py new file mode 100644 index 00000000000..bc36401ea47 --- /dev/null +++ b/tests/components/garages_amsterdam/test_sensor.py @@ -0,0 +1,31 @@ +"""Tests the sensors provided by the Garages Amsterdam integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_garages_amsterdam: AsyncMock, + mock_config_entry: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test all sensors.""" + with patch( + "homeassistant.components.garages_amsterdam.PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/gardena_bluetooth/conftest.py b/tests/components/gardena_bluetooth/conftest.py index 882c9b1b090..d363e0e69f3 100644 --- a/tests/components/gardena_bluetooth/conftest.py +++ b/tests/components/gardena_bluetooth/conftest.py @@ -112,10 +112,5 @@ def mock_client( @pytest.fixture(autouse=True) -def enable_all_entities(): +def enable_all_entities(entity_registry_enabled_by_default: None) -> None: """Make sure all entities are enabled.""" - with patch( - "homeassistant.components.gardena_bluetooth.coordinator.GardenaBluetoothEntity.entity_registry_enabled_default", - new=Mock(return_value=True), - ): - yield diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 98cba151c52..10f23759fae 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -3,6 +3,11 @@ FlowResultSnapshot({ 'context': dict({ 'confirm_only': True, + 'discovery_key': dict({ + 'domain': 'bluetooth', + 'key': '00000000-0000-0000-0000-000000000001', + 'version': 1, + }), 'source': 'bluetooth', 'title_placeholders': dict({ 'name': 'Gardena Water Computer', @@ -18,6 +23,11 @@ FlowResultSnapshot({ 'context': dict({ 'confirm_only': True, + 'discovery_key': dict({ + 'domain': 'bluetooth', + 'key': '00000000-0000-0000-0000-000000000001', + 'version': 1, + }), 'source': 'bluetooth', 'title_placeholders': dict({ 'name': 'Gardena Water Computer', @@ -39,6 +49,15 @@ 'address': '00000000-0000-0000-0000-000000000001', }), 'disabled_by': None, + 'discovery_keys': dict({ + 'bluetooth': tuple( + dict({ + 'domain': 'bluetooth', + 'key': '00000000-0000-0000-0000-000000000001', + 'version': 1, + }), + ), + }), 'domain': 'gardena_bluetooth', 'entry_id': , 'minor_version': 1, @@ -47,10 +66,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'bluetooth', + 'subentries': list([ + ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, @@ -65,60 +88,6 @@ 'type': , }) # --- -# name: test_bluetooth_lost - FlowResultSnapshot({ - 'data_schema': None, - 'description_placeholders': dict({ - 'name': 'Timer', - }), - 'errors': None, - 'flow_id': , - 'handler': 'gardena_bluetooth', - 'last_step': None, - 'step_id': 'confirm', - 'type': , - }) -# --- -# name: test_bluetooth_lost.1 - FlowResultSnapshot({ - 'context': dict({ - 'confirm_only': True, - 'source': 'bluetooth', - 'title_placeholders': dict({ - 'name': 'Timer', - }), - 'unique_id': '00000000-0000-0000-0000-000000000001', - }), - 'data': dict({ - 'address': '00000000-0000-0000-0000-000000000001', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'gardena_bluetooth', - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'address': '00000000-0000-0000-0000-000000000001', - }), - 'disabled_by': None, - 'domain': 'gardena_bluetooth', - 'entry_id': , - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'bluetooth', - 'title': 'Timer', - 'unique_id': '00000000-0000-0000-0000-000000000001', - 'version': 1, - }), - 'title': 'Timer', - 'type': , - 'version': 1, - }) -# --- # name: test_failed_connect FlowResultSnapshot({ 'data_schema': list([ @@ -248,6 +217,8 @@ 'address': '00000000-0000-0000-0000-000000000001', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'gardena_bluetooth', 'entry_id': , 'minor_version': 1, @@ -256,10 +227,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, diff --git a/tests/components/gardena_bluetooth/test_config_flow.py b/tests/components/gardena_bluetooth/test_config_flow.py index 3b4e9c242b3..b20395ec40f 100644 --- a/tests/components/gardena_bluetooth/test_config_flow.py +++ b/tests/components/gardena_bluetooth/test_config_flow.py @@ -31,6 +31,7 @@ async def test_user_selection( inject_bluetooth_service_info(hass, WATER_TIMER_SERVICE_INFO) inject_bluetooth_service_info(hass, WATER_TIMER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/gdacs/snapshots/test_diagnostics.ambr b/tests/components/gdacs/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..5b6154307f7 --- /dev/null +++ b/tests/components/gdacs/snapshots/test_diagnostics.ambr @@ -0,0 +1,21 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'info': dict({ + 'categories': list([ + ]), + 'latitude': '**REDACTED**', + 'longitude': '**REDACTED**', + 'radius': 25, + 'scan_interval': 300.0, + 'unit_system': 'metric', + }), + 'service': dict({ + 'last_timestamp': None, + 'last_update': '2024-09-05T15:00:00', + 'last_update_successful': '2024-09-05T15:00:00', + 'status': 'OK', + 'total': 0, + }), + }) +# --- diff --git a/tests/components/gdacs/test_diagnostics.py b/tests/components/gdacs/test_diagnostics.py new file mode 100644 index 00000000000..3c6cf4080a6 --- /dev/null +++ b/tests/components/gdacs/test_diagnostics.py @@ -0,0 +1,33 @@ +"""Test GDACS diagnostics.""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.freeze_time("2024-09-05 15:00:00") +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, +) -> None: + """Test config entry diagnostics.""" + with patch("aio_georss_client.feed.GeoRssFeed.update") as mock_feed_update: + mock_feed_update.return_value = "OK", [] + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert result == snapshot diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index 59ff513ccc9..d3ef0a39241 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -275,7 +275,9 @@ async def test_limit_refetch( with ( pytest.raises(aiohttp.ServerTimeoutError), - patch("asyncio.timeout", side_effect=TimeoutError()), + patch.object( + client.session._connector, "connect", side_effect=asyncio.TimeoutError + ), ): resp = await client.get("/api/camera_proxy/camera.config_test") diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index e7af9383791..a882ca4cd8d 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -92,9 +92,9 @@ async def test_form( assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "user_confirm_still" client = await hass_client() - preview_id = result1["flow_id"] + preview_url = result1["description_placeholders"]["preview_url"] # Check the preview image works. - resp = await client.get(f"/api/generic/preview_flow_image/{preview_id}?t=1") + resp = await client.get(preview_url) assert resp.status == HTTPStatus.OK assert await resp.read() == fakeimgbytes_png result2 = await hass.config_entries.flow.async_configure( @@ -118,7 +118,7 @@ async def test_form( await hass.async_block_till_done() # Check that the preview image is disabled after. - resp = await client.get(f"/api/generic/preview_flow_image/{preview_id}") + resp = await client.get(preview_url) assert resp.status == HTTPStatus.NOT_FOUND assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -212,10 +212,10 @@ async def test_form_still_preview_cam_off( ) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "user_confirm_still" - preview_id = result1["flow_id"] + preview_url = result1["description_placeholders"]["preview_url"] # Try to view the image, should be unavailable. client = await hass_client() - resp = await client.get(f"/api/generic/preview_flow_image/{preview_id}?t=1") + resp = await client.get(preview_url) assert resp.status == HTTPStatus.SERVICE_UNAVAILABLE @@ -652,7 +652,8 @@ async def test_form_stream_worker_error( TESTDATA, ) assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"stream_source": "Some message"} + assert result2["errors"] == {"stream_source": "unknown_with_details"} + assert result2["description_placeholders"] == {"error": "Some message"} @respx.mock diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index 2beaf423201..33a8a0f37bd 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -3,6 +3,7 @@ import datetime from freezegun import freeze_time +from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -12,7 +13,7 @@ from homeassistant.components.generic_hygrostat import ( ) from homeassistant.components.humidifier import ( ATTR_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDIFIER_DOMAIN, MODE_AWAY, MODE_NORMAL, SERVICE_SET_HUMIDITY, @@ -106,7 +107,7 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -124,7 +125,7 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: _setup_sensor(hass, 23) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -150,7 +151,7 @@ async def test_humidifier_switch( assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -169,7 +170,7 @@ async def test_humidifier_switch( await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -190,7 +191,7 @@ async def test_unique_id( await _setup_switch(hass, True) assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -221,7 +222,7 @@ async def setup_comp_0(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -247,7 +248,7 @@ async def setup_comp_2(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -268,7 +269,7 @@ async def test_unavailable_state(hass: HomeAssistant) -> None: """Test the setting of defaults to unknown.""" await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -295,7 +296,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: """Test the setting of defaults to unknown.""" await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -344,7 +345,7 @@ async def test_get_modes(hass: HomeAssistant) -> None: async def test_set_target_humidity(hass: HomeAssistant) -> None: """Test the setting of the target humidity.""" await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 40}, blocking=True, @@ -354,7 +355,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get("humidity") == 40 with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: None}, blocking=True, @@ -368,14 +369,14 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: async def test_set_away_mode(hass: HomeAssistant) -> None: """Test the setting away mode.""" await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -392,14 +393,14 @@ async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> N Verify original humidity is restored. """ await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -408,7 +409,7 @@ async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> N state = hass.states.get(ENTITY) assert state.attributes.get("humidity") == 35 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -427,21 +428,21 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( Verify original humidity is restored. """ await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -450,7 +451,7 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( state = hass.states.get(ENTITY) assert state.attributes.get("humidity") == 35 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -520,8 +521,9 @@ async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: calls = await _setup_switch(hass, False) _setup_sensor(hass, 36) await hass.async_block_till_done() + calls.clear() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 45}, blocking=True, @@ -540,8 +542,9 @@ async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) await hass.async_block_till_done() + calls.clear() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 36}, blocking=True, @@ -561,7 +564,7 @@ async def test_humidity_change_humidifier_on_within_tolerance( """Test if humidity change doesn't turn on within tolerance.""" calls = await _setup_switch(hass, False) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, @@ -579,7 +582,7 @@ async def test_humidity_change_humidifier_on_outside_tolerance( """Test if humidity change turn humidifier on outside dry tolerance.""" calls = await _setup_switch(hass, False) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, @@ -601,7 +604,7 @@ async def test_humidity_change_humidifier_off_within_tolerance( """Test if humidity change doesn't turn off within tolerance.""" calls = await _setup_switch(hass, True) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 46}, blocking=True, @@ -619,7 +622,7 @@ async def test_humidity_change_humidifier_off_outside_tolerance( """Test if humidity change turn humidifier off outside wet tolerance.""" calls = await _setup_switch(hass, True) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 46}, blocking=True, @@ -641,14 +644,14 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: Switch turns on when humidity below setpoint and mode changes. """ await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 45}, blocking=True, @@ -658,7 +661,7 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: await hass.async_block_till_done() calls = await _setup_switch(hass, False) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -693,7 +696,7 @@ async def setup_comp_3(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -719,7 +722,7 @@ async def test_set_target_humidity_dry_off(hass: HomeAssistant) -> None: _setup_sensor(hass, 50) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 55}, blocking=True, @@ -740,14 +743,14 @@ async def test_turn_away_mode_on_drying(hass: HomeAssistant) -> None: _setup_sensor(hass, 50) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 34}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -768,7 +771,7 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -778,7 +781,7 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -872,7 +875,7 @@ async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None _setup_sensor(hass, 45) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -893,7 +896,7 @@ async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> _setup_sensor(hass, 30) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -910,7 +913,7 @@ async def setup_comp_4(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -920,7 +923,7 @@ async def setup_comp_4(hass: HomeAssistant) -> None: "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, "device_class": "dehumidifier", - "min_cycle_duration": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1005,7 +1008,7 @@ async def test_mode_change_dry_trigger_off_not_long_enough(hass: HomeAssistant) await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1025,7 +1028,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) - _setup_sensor(hass, 35) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1035,7 +1038,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) - await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1053,7 +1056,7 @@ async def setup_comp_6(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1062,7 +1065,7 @@ async def setup_comp_6(hass: HomeAssistant) -> None: "wet_tolerance": 3, "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, - "min_cycle_duration": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1154,7 +1157,7 @@ async def test_mode_change_humidifier_trigger_off_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1178,7 +1181,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1191,7 +1194,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1209,7 +1212,7 @@ async def setup_comp_7(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1219,8 +1222,8 @@ async def setup_comp_7(hass: HomeAssistant) -> None: "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, "device_class": "dehumidifier", - "min_cycle_duration": datetime.timedelta(minutes=15), - "keep_alive": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 15}, + "keep_alive": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1276,7 +1279,7 @@ async def setup_comp_8(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1285,8 +1288,8 @@ async def setup_comp_8(hass: HomeAssistant) -> None: "wet_tolerance": 3, "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, - "min_cycle_duration": datetime.timedelta(minutes=15), - "keep_alive": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 15}, + "keep_alive": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1341,7 +1344,7 @@ async def test_float_tolerance_values(hass: HomeAssistant) -> None: """Test if dehumidifier does not turn on within floating point tolerance.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1367,7 +1370,7 @@ async def test_float_tolerance_values_2(hass: HomeAssistant) -> None: """Test if dehumidifier turns off when oudside of floating point tolerance values.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1398,7 +1401,7 @@ async def test_custom_setup_params(hass: HomeAssistant) -> None: await hass.async_block_till_done() result = await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1438,7 +1441,7 @@ async def test_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1476,7 +1479,7 @@ async def test_restore_state_target_humidity(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1519,7 +1522,7 @@ async def test_restore_state_and_return_to_normal(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1539,7 +1542,7 @@ async def test_restore_state_and_return_to_normal(hass: HomeAssistant) -> None: assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -1574,7 +1577,7 @@ async def test_no_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1620,7 +1623,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: async def _setup_humidifier(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1662,7 +1665,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: await hass.async_block_till_done() await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1684,7 +1687,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Switch to Away mode await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_AWAY}, blocking=True, @@ -1700,7 +1703,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Change target humidity await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_HUMIDITY: 42}, blocking=True, @@ -1716,7 +1719,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Return to Normal mode await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -1733,7 +1736,9 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_comp_1") async def test_sensor_stale_duration( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, ) -> None: """Test turn off on sensor stale.""" @@ -1745,7 +1750,7 @@ async def test_sensor_stale_duration( assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1765,7 +1770,7 @@ async def test_sensor_stale_duration( assert hass.states.get(humidifier_switch).state == STATE_OFF await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -1775,14 +1780,31 @@ async def test_sensor_stale_duration( assert hass.states.get(humidifier_switch).state == STATE_ON # Wait 11 minutes - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(minutes=11)) + freezer.tick(datetime.timedelta(minutes=11)) + async_fire_time_changed(hass) await hass.async_block_till_done() # 11 minutes later, no news from the sensor : emergency cut off assert hass.states.get(humidifier_switch).state == STATE_OFF assert "emergency" in caplog.text - # Updated value from sensor received + # Updated value from sensor received (same value) + _setup_sensor(hass, 23) + await hass.async_block_till_done() + + # A new value has arrived, the humidifier should go ON + assert hass.states.get(humidifier_switch).state == STATE_ON + + # Wait 11 minutes + freezer.tick(datetime.timedelta(minutes=11)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # 11 minutes later, no news from the sensor : emergency cut off + assert hass.states.get(humidifier_switch).state == STATE_OFF + assert "emergency" in caplog.text + + # Updated value from sensor received (new value) _setup_sensor(hass, 24) await hass.async_block_till_done() @@ -1791,7 +1813,7 @@ async def test_sensor_stale_duration( # Manual turn off await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, diff --git a/tests/components/generic_thermostat/snapshots/test_config_flow.ambr b/tests/components/generic_thermostat/snapshots/test_config_flow.ambr index d515d52a81b..ed757d1c2ae 100644 --- a/tests/components/generic_thermostat/snapshots/test_config_flow.ambr +++ b/tests/components/generic_thermostat/snapshots/test_config_flow.ambr @@ -18,6 +18,25 @@ 'type': , }) # --- +# name: test_config_flow_preset_accepts_float[create_entry] + FlowResultSnapshot({ + 'result': ConfigEntrySnapshot({ + 'title': 'My thermostat', + }), + 'title': 'My thermostat', + 'type': , + }) +# --- +# name: test_config_flow_preset_accepts_float[init] + FlowResultSnapshot({ + 'type': , + }) +# --- +# name: test_config_flow_preset_accepts_float[presets] + FlowResultSnapshot({ + 'type': , + }) +# --- # name: test_options[create_entry] FlowResultSnapshot({ 'result': True, diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index f1c41270a2f..39435f154c4 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -11,7 +11,7 @@ from homeassistant import config as hass_config from homeassistant.components import input_boolean, switch from homeassistant.components.climate import ( ATTR_PRESET_MODE, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, PRESET_ACTIVITY, PRESET_AWAY, PRESET_COMFORT, @@ -122,7 +122,7 @@ async def test_heater_input_boolean(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -160,7 +160,7 @@ async def test_heater_switch( assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -192,7 +192,7 @@ async def test_unique_id( _setup_switch(hass, True) assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -221,7 +221,7 @@ async def setup_comp_2(hass: HomeAssistant) -> None: hass.config.units = METRIC_SYSTEM assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -248,7 +248,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: hass.config.units = METRIC_SYSTEM await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -272,7 +272,7 @@ async def test_setup_gets_current_temp_from_sensor(hass: HomeAssistant) -> None: await hass.async_block_till_done() await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -618,7 +618,7 @@ async def setup_comp_3(hass: HomeAssistant) -> None: hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -774,7 +774,7 @@ async def _setup_thermostat_with_min_cycle_duration( hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -927,7 +927,7 @@ async def setup_comp_7(hass: HomeAssistant) -> None: hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1002,7 +1002,7 @@ async def setup_comp_8(hass: HomeAssistant) -> None: hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1076,7 +1076,7 @@ async def setup_comp_9(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1110,7 +1110,7 @@ async def test_custom_setup_params(hass: HomeAssistant) -> None: """Test the setup with custom parameters.""" result = await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1151,7 +1151,7 @@ async def test_restore_state(hass: HomeAssistant, hvac_mode) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1189,7 +1189,7 @@ async def test_no_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1220,7 +1220,7 @@ async def test_initial_hvac_off_force_heater_off(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1274,7 +1274,7 @@ async def test_restore_will_turn_off_(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1319,7 +1319,7 @@ async def test_restore_will_turn_off_when_loaded_second(hass: HomeAssistant) -> await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1379,7 +1379,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: async def _setup_climate(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1415,7 +1415,7 @@ async def test_reload(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", diff --git a/tests/components/generic_thermostat/test_config_flow.py b/tests/components/generic_thermostat/test_config_flow.py index 7a7fdabc6e6..561870ad3d4 100644 --- a/tests/components/generic_thermostat/test_config_flow.py +++ b/tests/components/generic_thermostat/test_config_flow.py @@ -132,3 +132,51 @@ async def test_options(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None # Check config entry is reloaded with new options await hass.async_block_till_done() assert hass.states.get("climate.my_thermostat") == snapshot(name="without_away") + + +async def test_config_flow_preset_accepts_float( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test the config flow with preset is a float.""" + with patch( + "homeassistant.components.generic_thermostat.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result == snapshot(name="init", include=SNAPSHOT_FLOW_PROPS) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: "My thermostat", + CONF_HEATER: "switch.run", + CONF_SENSOR: "sensor.temperature", + CONF_AC_MODE: False, + CONF_COLD_TOLERANCE: 0.3, + CONF_HOT_TOLERANCE: 0.3, + }, + ) + assert result == snapshot(name="presets", include=SNAPSHOT_FLOW_PROPS) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PRESETS[PRESET_AWAY]: 10.4, + }, + ) + assert result == snapshot(name="create_entry", include=SNAPSHOT_FLOW_PROPS) + + await hass.async_block_till_done() + + assert len(mock_setup_entry.mock_calls) == 1 + assert result["options"] == { + "ac_mode": False, + "away_temp": 10.4, + "cold_tolerance": 0.3, + "heater": "switch.run", + "hot_tolerance": 0.3, + "name": "My thermostat", + "target_sensor": "sensor.temperature", + } diff --git a/tests/components/geniushub/__init__.py b/tests/components/geniushub/__init__.py index 15886486e38..ed06642d339 100644 --- a/tests/components/geniushub/__init__.py +++ b/tests/components/geniushub/__init__.py @@ -1 +1,13 @@ """Tests for the geniushub integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/geniushub/conftest.py b/tests/components/geniushub/conftest.py index 125f1cfa80c..304d7555a8c 100644 --- a/tests/components/geniushub/conftest.py +++ b/tests/components/geniushub/conftest.py @@ -1,15 +1,16 @@ """GeniusHub tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch +from geniushubclient import GeniusDevice, GeniusZone import pytest from homeassistant.components.geniushub.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from tests.common import MockConfigEntry -from tests.components.smhi.common import AsyncMock +from tests.common import MockConfigEntry, load_json_array_fixture @pytest.fixture @@ -38,6 +39,38 @@ def mock_geniushub_client() -> Generator[AsyncMock]: yield client +@pytest.fixture(scope="package") +def zones() -> list[dict[str, Any]]: + """Return a list of zones.""" + return load_json_array_fixture("zones_cloud_test_data.json", DOMAIN) + + +@pytest.fixture(scope="package") +def devices() -> list[dict[str, Any]]: + """Return a list of devices.""" + return load_json_array_fixture("devices_cloud_test_data.json", DOMAIN) + + +@pytest.fixture +def mock_geniushub_cloud( + zones: list[dict[str, Any]], devices: list[dict[str, Any]] +) -> Generator[MagicMock]: + """Mock a GeniusHub.""" + with patch( + "homeassistant.components.geniushub.GeniusHub", + autospec=True, + ) as mock_client: + client = mock_client.return_value + genius_zones = [GeniusZone(z["id"], z, client) for z in zones] + client.zone_objs = genius_zones + client._zones = genius_zones + genius_devices = [GeniusDevice(d["id"], d, client) for d in devices] + client.device_objs = genius_devices + client._devices = genius_devices + client.api_version = 1 + yield client + + @pytest.fixture def mock_local_config_entry() -> MockConfigEntry: """Mock a local config entry.""" @@ -62,4 +95,5 @@ def mock_cloud_config_entry() -> MockConfigEntry: data={ CONF_TOKEN: "abcdef", }, + entry_id="01J71MQF0EC62D620DGYNG2R8H", ) diff --git a/tests/components/geniushub/fixtures/devices_cloud_test_data.json b/tests/components/geniushub/fixtures/devices_cloud_test_data.json new file mode 100644 index 00000000000..92fd2c33811 --- /dev/null +++ b/tests/components/geniushub/fixtures/devices_cloud_test_data.json @@ -0,0 +1,151 @@ +[ + { + "id": "4", + "type": "Smart Plug", + "assignedZones": [{ "name": "Bedroom Socket" }], + "state": { "outputOnOff": "True" } + }, + { + "id": "6", + "type": "Smart Plug", + "assignedZones": [{ "name": "Kitchen Socket" }], + "state": { "outputOnOff": "True" } + }, + { + "id": "11", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Lounge" }], + "state": { "batteryLevel": 43, "setTemperature": 4 } + }, + { + "id": "16", + "type": "Room Sensor", + "assignedZones": [{ "name": "Guest room" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21, + "luminance": 29, + "occupancyTrigger": 255 + } + }, + { + "id": "17", + "type": "Room Sensor", + "assignedZones": [{ "name": "Ensuite" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21, + "luminance": 32, + "occupancyTrigger": 0 + } + }, + { + "id": "18", + "type": "Room Sensor", + "assignedZones": [{ "name": "Bedroom" }], + "state": { + "batteryLevel": 36, + "measuredTemperature": 21.5, + "luminance": 1, + "occupancyTrigger": 0 + } + }, + { + "id": "20", + "type": "Room Sensor", + "assignedZones": [{ "name": "Kitchen" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21.5, + "luminance": 1, + "occupancyTrigger": 0 + } + }, + { + "id": "21", + "type": "Room Sensor", + "assignedZones": [{ "name": "Hall" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21, + "luminance": 33, + "occupancyTrigger": 0 + } + }, + { + "id": "22", + "type": "Single Channel Receiver", + "assignedZones": [{ "name": "East Berlin" }], + "state": { "outputOnOff": "False" } + }, + { + "id": "50", + "type": "Room Sensor", + "assignedZones": [{ "name": "Study" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 22, + "luminance": 34, + "occupancyTrigger": 0 + } + }, + { + "id": "53", + "type": "Room Sensor", + "assignedZones": [{ "name": "Lounge" }], + "state": { + "batteryLevel": 28, + "measuredTemperature": 0, + "luminance": 0, + "occupancyTrigger": 0 + } + }, + { + "id": "56", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Kitchen" }], + "state": { "batteryLevel": 55, "setTemperature": 4 } + }, + { + "id": "68", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Hall" }], + "state": { "batteryLevel": 92, "setTemperature": 4 } + }, + { + "id": "78", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Bedroom" }], + "state": { "batteryLevel": 42, "setTemperature": 4 } + }, + { + "id": "85", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Study" }], + "state": { "batteryLevel": 61, "setTemperature": 4 } + }, + { + "id": "86", + "type": "Smart Plug", + "assignedZones": [{ "name": "Study Socket" }], + "state": { "outputOnOff": "False" } + }, + { + "id": "88", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Ensuite" }], + "state": { "batteryLevel": 49, "setTemperature": 4 } + }, + { + "id": "89", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Kitchen" }], + "state": { "batteryLevel": 48, "setTemperature": 4 } + }, + { + "id": "90", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Guest room" }], + "state": { "batteryLevel": 92, "setTemperature": 4 } + } +] diff --git a/tests/components/geniushub/fixtures/zones_cloud_test_data.json b/tests/components/geniushub/fixtures/zones_cloud_test_data.json new file mode 100644 index 00000000000..00d3109cf6e --- /dev/null +++ b/tests/components/geniushub/fixtures/zones_cloud_test_data.json @@ -0,0 +1,1069 @@ +[ + { + "id": 0, + "name": "West Berlin", + "output": 0, + "type": "manager", + "mode": "off", + "schedule": { "timer": {}, "footprint": {} } + }, + { + "id": 1, + "name": "Lounge", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 20, + "setpoint": 4, + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "monday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "tuesday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "wednesday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "thursday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "friday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "saturday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + } + } + } + } + }, + { + "id": 2, + "name": "Hall", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 73800, "start": 68400, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 37800, "start": 32400, "setpoint": 20 }, + { "end": 75600, "start": 56700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 43500, "start": 31800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 34200, "start": 27300, "setpoint": 20 }, + { "end": 75600, "start": 60900, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 48300, "start": 28800, "setpoint": 20 }, + { "end": 75600, "start": 75300, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 42000, "start": 28500, "setpoint": 20 }, + { "end": 70800, "start": 53700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 64500, "start": 28500, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 63900, "start": 53100, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 3, + "name": "Kitchen", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21.5, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 73800, "start": 68400, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 38100, "start": 29100, "setpoint": 20 }, + { "end": 75600, "start": 56700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 51600, "start": 32400, "setpoint": 20 }, + { "end": 74400, "start": 60600, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 33300, "start": 27300, "setpoint": 20 }, + { "end": 75600, "start": 58800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 48600, "start": 28800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 71400, "start": 56400, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 74400, "start": 40800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 63300, "start": 29700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 5, + "name": "Ensuite", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 28 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 7, + "name": "Guest room", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21, + "setpoint": 4, + "occupied": "True", + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 27, + "name": "Bedroom Socket", + "output": 1, + "type": "on / off", + "mode": "timer", + "setpoint": "True", + "override": { "duration": 0, "setpoint": "True" }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "monday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "tuesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "wednesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "thursday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "friday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "saturday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + } + } + }, + "footprint": {} + } + }, + { + "id": 28, + "name": "Kitchen Socket", + "output": 1, + "type": "on / off", + "mode": "timer", + "setpoint": "True", + "override": { "duration": 0, "setpoint": "True" }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "monday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "tuesday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "wednesday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "thursday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "friday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "saturday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + } + } + }, + "footprint": {} + } + }, + { + "id": 29, + "name": "Bedroom", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21.5, + "setpoint": 4, + "override": { "duration": 0, "setpoint": 23.5 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 19.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 30, + "name": "Study", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 22, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 28 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 32, + "name": "Study Socket", + "output": 0, + "type": "on / off", + "mode": "off", + "setpoint": "False", + "override": { "duration": 0, "setpoint": "True" }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "monday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "tuesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "wednesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "thursday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "friday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "saturday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + } + } + }, + "footprint": {} + } + } +] diff --git a/tests/components/geniushub/snapshots/test_binary_sensor.ambr b/tests/components/geniushub/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..fcc256b5232 --- /dev/null +++ b/tests/components/geniushub/snapshots/test_binary_sensor.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[binary_sensor.single_channel_receiver_22-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.single_channel_receiver_22', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Single Channel Receiver 22', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_22', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[binary_sensor.single_channel_receiver_22-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'East Berlin', + 'friendly_name': 'Single Channel Receiver 22', + 'state': dict({ + }), + }), + 'context': , + 'entity_id': 'binary_sensor.single_channel_receiver_22', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/geniushub/snapshots/test_climate.ambr b/tests/components/geniushub/snapshots/test_climate.ambr new file mode 100644 index 00000000000..eb372de784e --- /dev/null +++ b/tests/components/geniushub/snapshots/test_climate.ambr @@ -0,0 +1,569 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[climate.bedroom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bedroom', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Bedroom', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_29', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'override': dict({ + 'duration': 0, + 'setpoint': 23.5, + }), + 'temperature': 21.5, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.ensuite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.ensuite', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Ensuite', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_5', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.ensuite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21, + 'friendly_name': 'Ensuite', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 28, + }), + 'temperature': 21, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.ensuite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.guest_room-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.guest_room', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Guest room', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_7', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.guest_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21, + 'friendly_name': 'Guest room', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'True', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 21, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.guest_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.hall-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.hall', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Hall', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.hall-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21, + 'friendly_name': 'Hall', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 21, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.hall', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.kitchen', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Kitchen', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 21.5, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.lounge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.lounge', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Lounge', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.lounge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20, + 'friendly_name': 'Lounge', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 20, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.lounge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.study-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.study', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Study', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_30', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.study-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22, + 'friendly_name': 'Study', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 28, + }), + 'temperature': 22, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.study', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/geniushub/snapshots/test_sensor.ambr b/tests/components/geniushub/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..874f24cff95 --- /dev/null +++ b/tests/components/geniushub/snapshots/test_sensor.ambr @@ -0,0 +1,954 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[sensor.geniushub_errors-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.geniushub_errors', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'GeniusHub Errors', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Errors', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_errors-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'error_list': list([ + ]), + 'friendly_name': 'GeniusHub Errors', + }), + 'context': , + 'entity_id': 'sensor.geniushub_errors', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_information-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.geniushub_information', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'GeniusHub Information', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Information', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_information-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GeniusHub Information', + 'information_list': list([ + ]), + }), + 'context': , + 'entity_id': 'sensor.geniushub_information', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_warnings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.geniushub_warnings', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'GeniusHub Warnings', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Warnings', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_warnings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GeniusHub Warnings', + 'warning_list': list([ + ]), + }), + 'context': , + 'entity_id': 'sensor.geniushub_warnings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_11-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_11', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-40', + 'original_name': 'Radiator Valve 11', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_11', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_11-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Lounge', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 11', + 'icon': 'mdi:battery-40', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_11', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '43', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_56-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_56', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-50', + 'original_name': 'Radiator Valve 56', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_56', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_56-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Kitchen', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 56', + 'icon': 'mdi:battery-50', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_56', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_68-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_68', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-90', + 'original_name': 'Radiator Valve 68', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_68', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_68-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Hall', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 68', + 'icon': 'mdi:battery-90', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_68', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_78-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_78', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-40', + 'original_name': 'Radiator Valve 78', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_78', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_78-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Bedroom', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 78', + 'icon': 'mdi:battery-40', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_78', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '42', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_85-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_85', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-60', + 'original_name': 'Radiator Valve 85', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_85', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_85-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Study', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 85', + 'icon': 'mdi:battery-60', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_85', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_88-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_88', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-50', + 'original_name': 'Radiator Valve 88', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_88', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_88-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Ensuite', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 88', + 'icon': 'mdi:battery-50', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_88', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_89-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_89', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-50', + 'original_name': 'Radiator Valve 89', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_89', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_89-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Kitchen', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 89', + 'icon': 'mdi:battery-50', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_89', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '48', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_90-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_90', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-90', + 'original_name': 'Radiator Valve 90', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_90', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_90-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Guest room', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 90', + 'icon': 'mdi:battery-90', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_90', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_16-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_16', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 16', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_16', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_16-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Guest room', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 16', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 29, + 'measured_temperature': 21, + 'occupancy_trigger': 255, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_16', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_17-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_17', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 17', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_17', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_17-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Ensuite', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 17', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 32, + 'measured_temperature': 21, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_17', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_18-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_18', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Room Sensor 18', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_18', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_18-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Bedroom', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 18', + 'icon': 'mdi:battery-alert', + 'state': dict({ + 'luminance': 1, + 'measured_temperature': 21.5, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_18', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_20-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_20', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 20', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_20', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_20-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Kitchen', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 20', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 1, + 'measured_temperature': 21.5, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_20', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_21-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_21', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 21', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_21', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_21-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Hall', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 21', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 33, + 'measured_temperature': 21, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_21', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_50-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_50', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 50', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_50', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_50-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Study', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 50', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 34, + 'measured_temperature': 22, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_50', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_53-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_53', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Room Sensor 53', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_53', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_53-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Lounge', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 53', + 'icon': 'mdi:battery-alert', + 'state': dict({ + 'luminance': 0, + 'measured_temperature': 0, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_53', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28', + }) +# --- diff --git a/tests/components/geniushub/snapshots/test_switch.ambr b/tests/components/geniushub/snapshots/test_switch.ambr new file mode 100644 index 00000000000..6c3c95af477 --- /dev/null +++ b/tests/components/geniushub/snapshots/test_switch.ambr @@ -0,0 +1,166 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[switch.bedroom_socket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.bedroom_socket', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bedroom Socket', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_27', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[switch.bedroom_socket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Bedroom Socket', + 'status': dict({ + 'mode': 'timer', + 'override': dict({ + 'duration': 0, + 'setpoint': 'True', + }), + 'type': 'on / off', + }), + }), + 'context': , + 'entity_id': 'switch.bedroom_socket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_cloud_all_sensors[switch.kitchen_socket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.kitchen_socket', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Kitchen Socket', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_28', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[switch.kitchen_socket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Kitchen Socket', + 'status': dict({ + 'mode': 'timer', + 'override': dict({ + 'duration': 0, + 'setpoint': 'True', + }), + 'type': 'on / off', + }), + }), + 'context': , + 'entity_id': 'switch.kitchen_socket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_cloud_all_sensors[switch.study_socket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.study_socket', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Study Socket', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_32', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[switch.study_socket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Study Socket', + 'status': dict({ + 'mode': 'off', + 'override': dict({ + 'duration': 0, + 'setpoint': 'True', + }), + 'type': 'on / off', + }), + }), + 'context': , + 'entity_id': 'switch.study_socket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/geniushub/test_binary_sensor.py b/tests/components/geniushub/test_binary_sensor.py new file mode 100644 index 00000000000..682929eb696 --- /dev/null +++ b/tests/components/geniushub/test_binary_sensor.py @@ -0,0 +1,32 @@ +"""Tests for the Geniushub binary sensor platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub binary sensors.""" + with patch( + "homeassistant.components.geniushub.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geniushub/test_climate.py b/tests/components/geniushub/test_climate.py new file mode 100644 index 00000000000..d14e57b9552 --- /dev/null +++ b/tests/components/geniushub/test_climate.py @@ -0,0 +1,30 @@ +"""Tests for the Geniushub climate platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub climate entities.""" + with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py index 9234e03e35a..7d1d33a2245 100644 --- a/tests/components/geniushub/test_config_flow.py +++ b/tests/components/geniushub/test_config_flow.py @@ -2,21 +2,14 @@ from http import HTTPStatus import socket -from typing import Any from unittest.mock import AsyncMock from aiohttp import ClientConnectionError, ClientResponseError import pytest from homeassistant.components.geniushub import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import ( - CONF_HOST, - CONF_MAC, - CONF_PASSWORD, - CONF_TOKEN, - CONF_USERNAME, -) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -309,174 +302,3 @@ async def test_cloud_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -async def test_import_local_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], -) -> None: - """Test full local import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "10.0.0.130" - assert result["data"] == data - assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_TOKEN: "abcdef", - }, - { - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -async def test_import_cloud_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], -) -> None: - """Test full cloud import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Genius hub" - assert result["data"] == data - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - { - CONF_TOKEN: "abcdef", - }, - { - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -@pytest.mark.parametrize( - ("exception", "reason"), - [ - (socket.gaierror, "invalid_host"), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), - "invalid_auth", - ), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), - "invalid_host", - ), - (TimeoutError, "cannot_connect"), - (ClientConnectionError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_import_flow_exceptions( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], - exception: Exception, - reason: str, -) -> None: - """Test import flow exceptions.""" - mock_geniushub_client.request.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.131", - CONF_USERNAME: "test-username1", - CONF_PASSWORD: "test-password", - }, - ], -) -async def test_import_flow_local_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_local_config_entry: MockConfigEntry, - data: dict[str, Any], -) -> None: - """Test import flow aborts on local duplicate data.""" - mock_local_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_import_flow_cloud_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_cloud_config_entry: MockConfigEntry, -) -> None: - """Test import flow aborts on cloud duplicate data.""" - mock_cloud_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/geniushub/test_init.py b/tests/components/geniushub/test_init.py new file mode 100644 index 00000000000..ebdc082c4b8 --- /dev/null +++ b/tests/components/geniushub/test_init.py @@ -0,0 +1,39 @@ +"""Tests for the Genius Hub component.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.geniushub import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import CONF_MAC, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_cloud_unique_id_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_geniushub_cloud: AsyncMock, +) -> None: + """Test that the cloud unique ID is migrated to the entry_id.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Genius hub", + data={ + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + entry_id="1234", + ) + entry.add_to_hass(hass) + entity_registry.async_get_or_create( + SENSOR_DOMAIN, DOMAIN, "aa:bb:cc:dd:ee:ff_device_78", config_entry=entry + ) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert hass.states.get("sensor.geniushub_aa_bb_cc_dd_ee_ff_device_78") + entity_entry = entity_registry.async_get( + "sensor.geniushub_aa_bb_cc_dd_ee_ff_device_78" + ) + assert entity_entry.unique_id == "1234_device_78" diff --git a/tests/components/geniushub/test_sensor.py b/tests/components/geniushub/test_sensor.py new file mode 100644 index 00000000000..a75329ca7fc --- /dev/null +++ b/tests/components/geniushub/test_sensor.py @@ -0,0 +1,30 @@ +"""Tests for the Geniushub sensor platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub sensors.""" + with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geniushub/test_switch.py b/tests/components/geniushub/test_switch.py new file mode 100644 index 00000000000..0e88562e381 --- /dev/null +++ b/tests/components/geniushub/test_switch.py @@ -0,0 +1,30 @@ +"""Tests for the Geniushub switch platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub switch entities.""" + with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geocaching/test_config_flow.py b/tests/components/geocaching/test_config_flow.py index 0c2ce66b513..5db89de0868 100644 --- a/tests/components/geocaching/test_config_flow.py +++ b/tests/components/geocaching/test_config_flow.py @@ -14,7 +14,7 @@ from homeassistant.components.geocaching.const import ( ENVIRONMENT, ENVIRONMENT_URLS, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -195,9 +195,7 @@ async def test_reauthentication( """Test Geocaching reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH} - ) + result = await mock_config_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/geofency/test_init.py b/tests/components/geofency/test_init.py index 3a98c6480bd..33740397868 100644 --- a/tests/components/geofency/test_init.py +++ b/tests/components/geofency/test_init.py @@ -10,7 +10,6 @@ from homeassistant import config_entries from homeassistant.components import zone from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.geofency import CONF_MOBILE_BEACONS, DOMAIN -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_LATITUDE, ATTR_LONGITUDE, @@ -18,6 +17,7 @@ from homeassistant.const import ( STATE_NOT_HOME, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component diff --git a/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr b/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..481a662ccf9 --- /dev/null +++ b/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr @@ -0,0 +1,21 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'info': dict({ + 'latitude': '**REDACTED**', + 'longitude': '**REDACTED**', + 'minimum_magnitude': 0.0, + 'mmi': 4, + 'radius': 25, + 'scan_interval': 300.0, + 'unit_system': 'metric', + }), + 'service': dict({ + 'last_timestamp': None, + 'last_update': '2024-09-05T15:00:00', + 'last_update_successful': '2024-09-05T15:00:00', + 'status': 'OK', + 'total': 0, + }), + }) +# --- diff --git a/tests/components/geonetnz_quakes/test_diagnostics.py b/tests/components/geonetnz_quakes/test_diagnostics.py new file mode 100644 index 00000000000..db5e1300768 --- /dev/null +++ b/tests/components/geonetnz_quakes/test_diagnostics.py @@ -0,0 +1,33 @@ +"""Test GeoNet NZ Quakes diagnostics.""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.freeze_time("2024-09-05 15:00:00") +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, +) -> None: + """Test config entry diagnostics.""" + with patch("aio_geojson_client.feed.GeoJsonFeed.update") as mock_feed_update: + mock_feed_update.return_value = "OK", [] + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert result == snapshot diff --git a/tests/components/geonetnz_volcano/test_config_flow.py b/tests/components/geonetnz_volcano/test_config_flow.py index b074bdffa20..110fb3b0a9e 100644 --- a/tests/components/geonetnz_volcano/test_config_flow.py +++ b/tests/components/geonetnz_volcano/test_config_flow.py @@ -3,7 +3,8 @@ from datetime import timedelta from unittest.mock import patch -from homeassistant.components.geonetnz_volcano import config_flow +from homeassistant.components.geonetnz_volcano import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -20,19 +21,18 @@ async def test_duplicate_error(hass: HomeAssistant, config_entry) -> None: conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - result = await flow.async_step_user(user_input=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) assert result["errors"] == {"base": "already_configured"} async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=None + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -48,9 +48,6 @@ async def test_step_import(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=4), } - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -60,7 +57,9 @@ async def test_step_import(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await flow.async_step_import(import_config=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=conf + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { @@ -78,9 +77,6 @@ async def test_step_user(hass: HomeAssistant) -> None: hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25} - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -90,7 +86,9 @@ async def test_step_user(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await flow.async_step_user(user_input=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { diff --git a/tests/components/gios/snapshots/test_diagnostics.ambr b/tests/components/gios/snapshots/test_diagnostics.ambr index 1401b1e22a0..890edc00482 100644 --- a/tests/components/gios/snapshots/test_diagnostics.ambr +++ b/tests/components/gios/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'station_id': 123, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'gios', 'entry_id': '86129426118ae32020417a53712d6eef', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Home', 'unique_id': '123', 'version': 1, diff --git a/tests/components/glances/test_config_flow.py b/tests/components/glances/test_config_flow.py index a7d6934e32d..b8d376d652f 100644 --- a/tests/components/glances/test_config_flow.py +++ b/tests/components/glances/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for Glances config flow.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from glances_api.exceptions import ( GlancesApiAuthorizationError, @@ -10,13 +10,14 @@ from glances_api.exceptions import ( import pytest from homeassistant import config_entries -from homeassistant.components import glances +from homeassistant.components.glances.const import DOMAIN +from homeassistant.const import CONF_NAME, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import HA_SENSOR_DATA, MOCK_USER_INPUT -from tests.common import MockConfigEntry, patch +from tests.common import MockConfigEntry @pytest.fixture(autouse=True) @@ -30,7 +31,7 @@ async def test_form(hass: HomeAssistant) -> None: """Test config entry configured successfully.""" result = await hass.config_entries.flow.async_init( - glances.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -59,7 +60,7 @@ async def test_form_fails( mock_api.return_value.get_ha_sensor_data.side_effect = error result = await hass.config_entries.flow.async_init( - glances.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_USER_INPUT @@ -71,11 +72,11 @@ async def test_form_fails( async def test_form_already_configured(hass: HomeAssistant) -> None: """Test host is already configured.""" - entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - glances.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_USER_INPUT @@ -86,21 +87,16 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: async def test_reauth_success(hass: HomeAssistant) -> None: """Test we can reauth.""" - entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - glances.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_USER_INPUT, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "username"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "username", + } result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -124,22 +120,17 @@ async def test_reauth_fails( hass: HomeAssistant, error: Exception, message: str, mock_api: MagicMock ) -> None: """Test we can reauth.""" - entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) mock_api.return_value.get_ha_sensor_data.side_effect = [error, HA_SENSOR_DATA] - result = await hass.config_entries.flow.async_init( - glances.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_USER_INPUT, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "username"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "username", + } result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/glances/test_init.py b/tests/components/glances/test_init.py index 553bd6f2089..16d4d9d371b 100644 --- a/tests/components/glances/test_init.py +++ b/tests/components/glances/test_init.py @@ -1,6 +1,6 @@ """Tests for Glances integration.""" -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import MagicMock from glances_api.exceptions import ( GlancesApiAuthorizationError, @@ -12,9 +12,8 @@ import pytest from homeassistant.components.glances.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from . import HA_SENSOR_DATA, MOCK_USER_INPUT +from . import MOCK_USER_INPUT from tests.common import MockConfigEntry @@ -30,29 +29,6 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.LOADED -async def test_entry_deprecated_version( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, mock_api: AsyncMock -) -> None: - """Test creating an issue if glances server is version 2.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) - entry.add_to_hass(hass) - - mock_api.return_value.get_ha_sensor_data.side_effect = [ - GlancesApiNoDataAvailable("endpoint: 'all' is not valid"), # fail v4 - GlancesApiNoDataAvailable("endpoint: 'all' is not valid"), # fail v3 - HA_SENSOR_DATA, # success v2 - HA_SENSOR_DATA, - ] - - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.LOADED - - issue = issue_registry.async_get_issue(DOMAIN, "deprecated_version") - assert issue is not None - assert issue.severity == ir.IssueSeverity.WARNING - - @pytest.mark.parametrize( ("error", "entry_state"), [ diff --git a/tests/components/go2rtc/__init__.py b/tests/components/go2rtc/__init__.py new file mode 100644 index 00000000000..0971541efa5 --- /dev/null +++ b/tests/components/go2rtc/__init__.py @@ -0,0 +1 @@ +"""Go2rtc tests.""" diff --git a/tests/components/go2rtc/conftest.py b/tests/components/go2rtc/conftest.py new file mode 100644 index 00000000000..abb139b89bf --- /dev/null +++ b/tests/components/go2rtc/conftest.py @@ -0,0 +1,96 @@ +"""Go2rtc test configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, Mock, patch + +from awesomeversion import AwesomeVersion +from go2rtc_client.rest import _StreamClient, _WebRTCClient +import pytest + +from homeassistant.components.go2rtc.const import RECOMMENDED_VERSION +from homeassistant.components.go2rtc.server import Server + +GO2RTC_PATH = "homeassistant.components.go2rtc" + + +@pytest.fixture +def rest_client() -> Generator[AsyncMock]: + """Mock a go2rtc rest client.""" + with ( + patch( + "homeassistant.components.go2rtc.Go2RtcRestClient", + ) as mock_client, + patch("homeassistant.components.go2rtc.server.Go2RtcRestClient", mock_client), + ): + client = mock_client.return_value + client.streams = streams = Mock(spec_set=_StreamClient) + streams.list.return_value = {} + client.validate_server_version = AsyncMock( + return_value=AwesomeVersion(RECOMMENDED_VERSION) + ) + client.webrtc = Mock(spec_set=_WebRTCClient) + yield client + + +@pytest.fixture +def ws_client() -> Generator[Mock]: + """Mock a go2rtc websocket client.""" + with patch( + "homeassistant.components.go2rtc.Go2RtcWsClient", autospec=True + ) as ws_client_mock: + yield ws_client_mock.return_value + + +@pytest.fixture +def server_stdout() -> list[str]: + """Server stdout lines.""" + return [ + "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", + "09:00:03.466 INF config path=/tmp/go2rtc.yaml", + "09:00:03.467 INF [rtsp] listen addr=:8554", + "09:00:03.467 INF [api] listen addr=127.0.0.1:1984", + "09:00:03.467 INF [webrtc] listen addr=:8555/tcp", + ] + + +@pytest.fixture +def mock_create_subprocess(server_stdout: list[str]) -> Generator[AsyncMock]: + """Mock create_subprocess_exec.""" + with patch(f"{GO2RTC_PATH}.server.asyncio.create_subprocess_exec") as mock_subproc: + subproc = AsyncMock() + subproc.terminate = Mock() + subproc.kill = Mock() + subproc.returncode = None + # Simulate process output + subproc.stdout.__aiter__.return_value = iter( + [f"{entry}\n".encode() for entry in server_stdout] + ) + mock_subproc.return_value = subproc + yield mock_subproc + + +@pytest.fixture +def server_start(mock_create_subprocess: AsyncMock) -> Generator[AsyncMock]: + """Mock start of a go2rtc server.""" + with patch( + f"{GO2RTC_PATH}.server.Server.start", wraps=Server.start, autospec=True + ) as mock_server_start: + yield mock_server_start + + +@pytest.fixture +def server_stop() -> Generator[AsyncMock]: + """Mock stop of a go2rtc server.""" + with ( + patch( + f"{GO2RTC_PATH}.server.Server.stop", wraps=Server.stop, autospec=True + ) as mock_server_stop, + ): + yield mock_server_stop + + +@pytest.fixture +def server(server_start: AsyncMock, server_stop: AsyncMock) -> Generator[AsyncMock]: + """Mock a go2rtc server.""" + with patch(f"{GO2RTC_PATH}.Server", wraps=Server) as mock_server: + yield mock_server diff --git a/tests/components/go2rtc/test_config_flow.py b/tests/components/go2rtc/test_config_flow.py new file mode 100644 index 00000000000..c414af35b38 --- /dev/null +++ b/tests/components/go2rtc/test_config_flow.py @@ -0,0 +1,45 @@ +"""Test the Home Assistant Cloud config flow.""" + +from unittest.mock import patch + +from homeassistant.components.go2rtc.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_config_flow(hass: HomeAssistant) -> None: + """Test create cloud entry.""" + + with ( + patch( + "homeassistant.components.go2rtc.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.go2rtc.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "system"} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "go2rtc" + assert result["data"] == {} + await hass.async_block_till_done() + + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_multiple_entries(hass: HomeAssistant) -> None: + """Test creating multiple cloud entries.""" + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "system"} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/go2rtc/test_init.py b/tests/components/go2rtc/test_init.py new file mode 100644 index 00000000000..38ff82fc9c8 --- /dev/null +++ b/tests/components/go2rtc/test_init.py @@ -0,0 +1,759 @@ +"""The tests for the go2rtc component.""" + +from collections.abc import Callable, Generator +import logging +from typing import NamedTuple +from unittest.mock import AsyncMock, Mock, patch + +from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError +from awesomeversion import AwesomeVersion +from go2rtc_client import Stream +from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError +from go2rtc_client.models import Producer +from go2rtc_client.ws import ( + ReceiveMessages, + WebRTCAnswer, + WebRTCCandidate, + WebRTCOffer, + WsError, +) +import pytest +from webrtc_models import RTCIceCandidateInit + +from homeassistant.components.camera import ( + DOMAIN as CAMERA_DOMAIN, + Camera, + CameraEntityFeature, + StreamType, + WebRTCAnswer as HAWebRTCAnswer, + WebRTCCandidate as HAWebRTCCandidate, + WebRTCError, + WebRTCMessage, + WebRTCSendMessage, +) +from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN +from homeassistant.components.go2rtc import WebRTCProvider +from homeassistant.components.go2rtc.const import ( + CONF_DEBUG_UI, + DEBUG_UI_URL_MESSAGE, + DOMAIN, + RECOMMENDED_VERSION, +) +from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) + +TEST_DOMAIN = "test" + +# The go2rtc provider does not inspect the details of the offer and answer, +# and is only a pass through. +OFFER_SDP = "v=0\r\no=carol 28908764872 28908764872 IN IP4 100.3.6.6\r\n..." +ANSWER_SDP = "v=0\r\no=bob 2890844730 2890844730 IN IP4 host.example.com\r\n..." + + +class MockCamera(Camera): + """Mock Camera Entity.""" + + _attr_name = "Test" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + + def __init__(self) -> None: + """Initialize the mock entity.""" + super().__init__() + self._stream_source: str | None = "rtsp://stream" + + def set_stream_source(self, stream_source: str | None) -> None: + """Set the stream source.""" + self._stream_source = stream_source + + async def stream_source(self) -> str | None: + """Return the source of the stream. + + This is used by cameras with CameraEntityFeature.STREAM + and StreamType.HLS. + """ + return self._stream_source + + +@pytest.fixture +def integration_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Test mock config entry.""" + entry = MockConfigEntry(domain=TEST_DOMAIN) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture(name="go2rtc_binary") +def go2rtc_binary_fixture() -> str: + """Fixture to provide go2rtc binary name.""" + return "/usr/bin/go2rtc" + + +@pytest.fixture +def mock_get_binary(go2rtc_binary) -> Generator[Mock]: + """Mock _get_binary.""" + with patch( + "homeassistant.components.go2rtc.shutil.which", + return_value=go2rtc_binary, + ) as mock_which: + yield mock_which + + +@pytest.fixture(name="has_go2rtc_entry") +def has_go2rtc_entry_fixture() -> bool: + """Fixture to control if a go2rtc config entry should be created.""" + return True + + +@pytest.fixture +def mock_go2rtc_entry(hass: HomeAssistant, has_go2rtc_entry: bool) -> None: + """Mock a go2rtc onfig entry.""" + if not has_go2rtc_entry: + return + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + + +@pytest.fixture(name="is_docker_env") +def is_docker_env_fixture() -> bool: + """Fixture to provide is_docker_env return value.""" + return True + + +@pytest.fixture +def mock_is_docker_env(is_docker_env) -> Generator[Mock]: + """Mock is_docker_env.""" + with patch( + "homeassistant.components.go2rtc.is_docker_env", + return_value=is_docker_env, + ) as mock_is_docker_env: + yield mock_is_docker_env + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + rest_client: AsyncMock, + mock_is_docker_env, + mock_get_binary, + server: Mock, +) -> None: + """Initialize the go2rtc integration.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + +@pytest.fixture +async def init_test_integration( + hass: HomeAssistant, + integration_config_entry: ConfigEntry, +) -> MockCamera: + """Initialize components.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [CAMERA_DOMAIN] + ) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload( + config_entry, CAMERA_DOMAIN + ) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + test_camera = MockCamera() + setup_test_component_platform( + hass, CAMERA_DOMAIN, [test_camera], from_config_entry=True + ) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow", Mock()) + + with mock_config_flow(TEST_DOMAIN, ConfigFlow): + assert await hass.config_entries.async_setup(integration_config_entry.entry_id) + await hass.async_block_till_done() + + return test_camera + + +async def _test_setup_and_signaling( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + rest_client: AsyncMock, + ws_client: Mock, + config: ConfigType, + after_setup_fn: Callable[[], None], + camera: MockCamera, +) -> None: + """Test the go2rtc config entry.""" + entity_id = camera.entity_id + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + assert issue_registry.async_get_issue(DOMAIN, "recommended_version") is None + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == ConfigEntryState.LOADED + after_setup_fn() + + receive_message_callback = Mock(spec_set=WebRTCSendMessage) + + async def test() -> None: + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.subscribe.assert_called_once() + + # Simulate the answer from the go2rtc server + callback = ws_client.subscribe.call_args[0][0] + callback(WebRTCAnswer(ANSWER_SDP)) + receive_message_callback.assert_called_once_with(HAWebRTCAnswer(ANSWER_SDP)) + + await test() + + rest_client.streams.add.assert_called_once_with( + entity_id, + [ + "rtsp://stream", + f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", + ], + ) + + # Stream exists but the source is different + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + entity_id: Stream([Producer("rtsp://different")]) + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + rest_client.streams.add.assert_called_once_with( + entity_id, + [ + "rtsp://stream", + f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", + ], + ) + + # If the stream is already added, the stream should not be added again. + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + entity_id: Stream([Producer("rtsp://stream")]) + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + rest_client.streams.add.assert_not_called() + assert isinstance(camera._webrtc_provider, WebRTCProvider) + + # Set stream source to None and provider should be skipped + rest_client.streams.list.return_value = {} + receive_message_callback.reset_mock() + camera.set_stream_source(None) + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + receive_message_callback.assert_called_once_with( + WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + ) + + +@pytest.mark.usefixtures( + "init_test_integration", + "mock_get_binary", + "mock_is_docker_env", + "mock_go2rtc_entry", +) +@pytest.mark.parametrize( + ("config", "ui_enabled"), + [ + ({DOMAIN: {}}, False), + ({DOMAIN: {CONF_DEBUG_UI: True}}, True), + ({DEFAULT_CONFIG_DOMAIN: {}}, False), + ({DEFAULT_CONFIG_DOMAIN: {}, DOMAIN: {CONF_DEBUG_UI: True}}, True), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +async def test_setup_go_binary( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + rest_client: AsyncMock, + ws_client: Mock, + server: AsyncMock, + server_start: Mock, + server_stop: Mock, + init_test_integration: MockCamera, + has_go2rtc_entry: bool, + config: ConfigType, + ui_enabled: bool, +) -> None: + """Test the go2rtc config entry with binary.""" + assert (len(hass.config_entries.async_entries(DOMAIN)) == 1) == has_go2rtc_entry + + def after_setup() -> None: + server.assert_called_once_with(hass, "/usr/bin/go2rtc", enable_ui=ui_enabled) + server_start.assert_called_once() + + await _test_setup_and_signaling( + hass, + issue_registry, + rest_client, + ws_client, + config, + after_setup, + init_test_integration, + ) + + await hass.async_stop() + server_stop.assert_called_once() + + +@pytest.mark.usefixtures("mock_go2rtc_entry") +@pytest.mark.parametrize( + ("go2rtc_binary", "is_docker_env"), + [ + ("/usr/bin/go2rtc", True), + (None, False), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +async def test_setup( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + rest_client: AsyncMock, + ws_client: Mock, + server: Mock, + init_test_integration: MockCamera, + mock_get_binary: Mock, + mock_is_docker_env: Mock, + has_go2rtc_entry: bool, +) -> None: + """Test the go2rtc config entry without binary.""" + assert (len(hass.config_entries.async_entries(DOMAIN)) == 1) == has_go2rtc_entry + + config = {DOMAIN: {CONF_URL: "http://localhost:1984/"}} + + def after_setup() -> None: + server.assert_not_called() + + await _test_setup_and_signaling( + hass, + issue_registry, + rest_client, + ws_client, + config, + after_setup, + init_test_integration, + ) + + mock_get_binary.assert_not_called() + server.assert_not_called() + + +class Callbacks(NamedTuple): + """Callbacks for the test.""" + + on_message: Mock + send_message: Mock + + +@pytest.fixture +async def message_callbacks( + ws_client: Mock, + init_test_integration: MockCamera, +) -> Callbacks: + """Prepare and return receive message callback.""" + receive_callback = Mock(spec_set=WebRTCSendMessage) + camera = init_test_integration + + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_callback + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.subscribe.assert_called_once() + + # Simulate messages from the go2rtc server + send_callback = ws_client.subscribe.call_args[0][0] + + return Callbacks(receive_callback, send_callback) + + +@pytest.mark.parametrize( + ("message", "expected_message"), + [ + ( + WebRTCCandidate("candidate"), + HAWebRTCCandidate(RTCIceCandidateInit("candidate")), + ), + ( + WebRTCAnswer(ANSWER_SDP), + HAWebRTCAnswer(ANSWER_SDP), + ), + ( + WsError("error"), + WebRTCError("go2rtc_webrtc_offer_failed", "error"), + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_receiving_messages_from_go2rtc_server( + message_callbacks: Callbacks, + message: ReceiveMessages, + expected_message: WebRTCMessage, +) -> None: + """Test receiving message from go2rtc server.""" + on_message, send_message = message_callbacks + + send_message(message) + on_message.assert_called_once_with(expected_message) + + +@pytest.mark.usefixtures("init_integration") +async def test_on_candidate( + ws_client: Mock, + init_test_integration: MockCamera, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test frontend sending candidate to go2rtc server.""" + camera = init_test_integration + session_id = "session_id" + + # Session doesn't exist + await camera.async_on_webrtc_candidate(session_id, RTCIceCandidateInit("candidate")) + assert ( + "homeassistant.components.go2rtc", + logging.DEBUG, + f"Unknown session {session_id}. Ignoring candidate", + ) in caplog.record_tuples + caplog.clear() + + # Store session + await init_test_integration.async_handle_async_webrtc_offer( + OFFER_SDP, session_id, Mock() + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.reset_mock() + + await camera.async_on_webrtc_candidate(session_id, RTCIceCandidateInit("candidate")) + ws_client.send.assert_called_once_with(WebRTCCandidate("candidate")) + assert caplog.record_tuples == [] + + +@pytest.mark.usefixtures("init_integration") +async def test_close_session( + ws_client: Mock, + init_test_integration: MockCamera, +) -> None: + """Test closing session.""" + camera = init_test_integration + session_id = "session_id" + + # Session doesn't exist + with pytest.raises(KeyError): + camera.close_webrtc_session(session_id) + ws_client.close.assert_not_called() + + # Store session + await init_test_integration.async_handle_async_webrtc_offer( + OFFER_SDP, session_id, Mock() + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + + # Close session + camera.close_webrtc_session(session_id) + ws_client.close.assert_called_once() + + # Close again should raise an error + ws_client.reset_mock() + with pytest.raises(KeyError): + camera.close_webrtc_session(session_id) + ws_client.close.assert_not_called() + + +ERR_BINARY_NOT_FOUND = "Could not find go2rtc docker binary" +ERR_CONNECT = "Could not connect to go2rtc instance" +ERR_CONNECT_RETRY = ( + "Could not connect to go2rtc instance on http://localhost:1984/; Retrying" +) +ERR_START_SERVER = "Could not start go2rtc server" +ERR_UNSUPPORTED_VERSION = "The go2rtc server version is not supported" +_INVALID_CONFIG = "Invalid config for 'go2rtc': " +ERR_INVALID_URL = _INVALID_CONFIG + "invalid url" +ERR_EXCLUSIVE = _INVALID_CONFIG + DEBUG_UI_URL_MESSAGE +ERR_URL_REQUIRED = "Go2rtc URL required in non-docker installs" + + +@pytest.mark.parametrize( + ("config", "go2rtc_binary", "is_docker_env"), + [ + ({}, None, False), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_non_user_setup_with_error( + hass: HomeAssistant, + config: ConfigType, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup integration does not fail if not setup by user.""" + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + assert not hass.config_entries.async_entries(DOMAIN) + + +@pytest.mark.parametrize( + ("config", "go2rtc_binary", "is_docker_env", "expected_log_message"), + [ + ({DEFAULT_CONFIG_DOMAIN: {}}, None, True, ERR_BINARY_NOT_FOUND), + ({DEFAULT_CONFIG_DOMAIN: {}}, "/usr/bin/go2rtc", True, ERR_START_SERVER), + ({DOMAIN: {}}, None, False, ERR_URL_REQUIRED), + ({DOMAIN: {}}, None, True, ERR_BINARY_NOT_FOUND), + ({DOMAIN: {}}, "/usr/bin/go2rtc", True, ERR_START_SERVER), + ({DOMAIN: {CONF_URL: "invalid"}}, None, True, ERR_INVALID_URL), + ( + {DOMAIN: {CONF_URL: "http://localhost:1984", CONF_DEBUG_UI: True}}, + None, + True, + ERR_EXCLUSIVE, + ), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_setup_error( + hass: HomeAssistant, + config: ConfigType, + caplog: pytest.LogCaptureFixture, + has_go2rtc_entry: bool, + expected_log_message: str, +) -> None: + """Test setup integration fails.""" + + assert not await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + assert bool(hass.config_entries.async_entries(DOMAIN)) == has_go2rtc_entry + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize( + ("config", "go2rtc_binary", "is_docker_env", "expected_log_message"), + [ + ({DOMAIN: {CONF_URL: "http://localhost:1984/"}}, None, True, ERR_CONNECT), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_setup_entry_error( + hass: HomeAssistant, + config: ConfigType, + caplog: pytest.LogCaptureFixture, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == ConfigEntryState.SETUP_ERROR + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize("config", [{DOMAIN: {CONF_URL: "http://localhost:1984/"}}]) +@pytest.mark.parametrize( + ("cause", "expected_config_entry_state", "expected_log_message"), + [ + (ClientConnectionError(), ConfigEntryState.SETUP_RETRY, ERR_CONNECT_RETRY), + (ServerConnectionError(), ConfigEntryState.SETUP_RETRY, ERR_CONNECT_RETRY), + (None, ConfigEntryState.SETUP_ERROR, ERR_CONNECT), + (Exception(), ConfigEntryState.SETUP_ERROR, ERR_CONNECT), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_retryable_setup_entry_error_custom_server( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + rest_client: AsyncMock, + config: ConfigType, + cause: Exception, + expected_config_entry_state: ConfigEntryState, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + go2rtc_error = Go2RtcClientError() + go2rtc_error.__cause__ = cause + rest_client.validate_server_version.side_effect = go2rtc_error + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == expected_config_entry_state + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize("config", [{DOMAIN: {}}, {DEFAULT_CONFIG_DOMAIN: {}}]) +@pytest.mark.parametrize( + ("cause", "expected_config_entry_state", "expected_log_message"), + [ + (ClientConnectionError(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + (ServerConnectionError(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + (None, ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + (Exception(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_retryable_setup_entry_error_default_server( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + rest_client: AsyncMock, + has_go2rtc_entry: bool, + config: ConfigType, + cause: Exception, + expected_config_entry_state: ConfigEntryState, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + go2rtc_error = Go2RtcClientError() + go2rtc_error.__cause__ = cause + rest_client.validate_server_version.side_effect = go2rtc_error + assert not await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == has_go2rtc_entry + for config_entry in config_entries: + assert config_entry.state == expected_config_entry_state + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize("config", [{DOMAIN: {}}, {DEFAULT_CONFIG_DOMAIN: {}}]) +@pytest.mark.parametrize( + ("go2rtc_error", "expected_config_entry_state", "expected_log_message"), + [ + ( + Go2RtcVersionError("1.9.4", "1.9.5", "2.0.0"), + ConfigEntryState.SETUP_RETRY, + ERR_UNSUPPORTED_VERSION, + ), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_version_error( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + rest_client: AsyncMock, + config: ConfigType, + go2rtc_error: Exception, + expected_config_entry_state: ConfigEntryState, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + rest_client.validate_server_version.side_effect = [None, go2rtc_error] + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == expected_config_entry_state + assert expected_log_message in caplog.text + + +async def test_config_entry_remove(hass: HomeAssistant) -> None: + """Test config entry removed when neither default_config nor go2rtc is in config.""" + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert not await hass.config_entries.async_setup(config_entry.entry_id) + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + + +@pytest.mark.parametrize("config", [{DOMAIN: {CONF_URL: "http://localhost:1984"}}]) +@pytest.mark.usefixtures("server") +async def test_setup_with_recommended_version_repair( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + rest_client: AsyncMock, + config: ConfigType, +) -> None: + """Test setup integration entry fails.""" + rest_client.validate_server_version.return_value = AwesomeVersion("1.9.5") + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + + # Verify the issue is created + issue = issue_registry.async_get_issue(DOMAIN, "recommended_version") + assert issue + assert issue.is_fixable is False + assert issue.is_persistent is False + assert issue.severity == ir.IssueSeverity.WARNING + assert issue.issue_id == "recommended_version" + assert issue.translation_key == "recommended_version" + assert issue.translation_placeholders == { + "recommended_version": RECOMMENDED_VERSION, + "current_version": "1.9.5", + } diff --git a/tests/components/go2rtc/test_server.py b/tests/components/go2rtc/test_server.py new file mode 100644 index 00000000000..e4fe3993f3c --- /dev/null +++ b/tests/components/go2rtc/test_server.py @@ -0,0 +1,393 @@ +"""Tests for the go2rtc server.""" + +import asyncio +from collections.abc import Generator +import logging +import subprocess +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +import pytest + +from homeassistant.components.go2rtc.server import Server +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +TEST_BINARY = "/bin/go2rtc" + + +@pytest.fixture +def enable_ui() -> bool: + """Fixture to enable the UI.""" + return False + + +@pytest.fixture +def server(hass: HomeAssistant, enable_ui: bool) -> Server: + """Fixture to initialize the Server.""" + return Server(hass, binary=TEST_BINARY, enable_ui=enable_ui) + + +@pytest.fixture +def mock_tempfile() -> Generator[Mock]: + """Fixture to mock NamedTemporaryFile.""" + with patch( + "homeassistant.components.go2rtc.server.NamedTemporaryFile", autospec=True + ) as mock_tempfile: + file = mock_tempfile.return_value.__enter__.return_value + file.name = "test.yaml" + yield file + + +def _assert_server_output_logged( + server_stdout: list[str], + caplog: pytest.LogCaptureFixture, + loglevel: int, + expect_logged: bool, +) -> None: + """Check server stdout was logged.""" + for entry in server_stdout: + assert ( + ( + "homeassistant.components.go2rtc.server", + loglevel, + entry, + ) + in caplog.record_tuples + ) is expect_logged + + +def assert_server_output_logged( + server_stdout: list[str], + caplog: pytest.LogCaptureFixture, + loglevel: int, +) -> None: + """Check server stdout was logged.""" + _assert_server_output_logged(server_stdout, caplog, loglevel, True) + + +def assert_server_output_not_logged( + server_stdout: list[str], + caplog: pytest.LogCaptureFixture, + loglevel: int, +) -> None: + """Check server stdout was logged.""" + _assert_server_output_logged(server_stdout, caplog, loglevel, False) + + +@pytest.mark.parametrize( + ("enable_ui", "api_ip"), + [ + (True, ""), + (False, "127.0.0.1"), + ], +) +async def test_server_run_success( + mock_create_subprocess: AsyncMock, + rest_client: AsyncMock, + server_stdout: list[str], + server: Server, + caplog: pytest.LogCaptureFixture, + mock_tempfile: Mock, + api_ip: str, +) -> None: + """Test that the server runs successfully.""" + await server.start() + + # Check that Popen was called with the right arguments + mock_create_subprocess.assert_called_once_with( + TEST_BINARY, + "-c", + "test.yaml", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=False, + ) + + # Verify that the config file was written + mock_tempfile.write.assert_called_once_with( + f"""# This file is managed by Home Assistant +# Do not edit it manually + +api: + listen: "{api_ip}:11984" + +rtsp: + listen: "127.0.0.1:18554" + +webrtc: + listen: ":18555/tcp" + ice_servers: [] +""".encode() + ) + + # Verify go2rtc binary stdout was logged with debug level + assert_server_output_logged(server_stdout, caplog, logging.DEBUG) + + await server.stop() + mock_create_subprocess.return_value.terminate.assert_called_once() + + # Verify go2rtc binary stdout was not logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + +@pytest.mark.usefixtures("mock_tempfile") +async def test_server_timeout_on_stop( + mock_create_subprocess: MagicMock, rest_client: AsyncMock, server: Server +) -> None: + """Test server run where the process takes too long to terminate.""" + # Start server thread + await server.start() + + async def sleep() -> None: + await asyncio.sleep(1) + + # Simulate timeout + mock_create_subprocess.return_value.wait.side_effect = sleep + + with patch("homeassistant.components.go2rtc.server._TERMINATE_TIMEOUT", new=0.1): + await server.stop() + + # Ensure terminate and kill were called due to timeout + mock_create_subprocess.return_value.terminate.assert_called_once() + mock_create_subprocess.return_value.kill.assert_called_once() + + +@pytest.mark.parametrize( + "server_stdout", + [ + [ + "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", + "09:00:03.466 INF config path=/tmp/go2rtc.yaml", + ] + ], +) +@pytest.mark.usefixtures("mock_tempfile") +async def test_server_failed_to_start( + mock_create_subprocess: MagicMock, + server_stdout: list[str], + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test server, where an exception is raised if the expected log entry was not received until the timeout.""" + with ( + patch("homeassistant.components.go2rtc.server._SETUP_TIMEOUT", new=0.1), + pytest.raises(HomeAssistantError, match="Go2rtc server didn't start correctly"), + ): + await server.start() + + # Verify go2rtc binary stdout was logged with debug and warning level + assert_server_output_logged(server_stdout, caplog, logging.DEBUG) + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + assert ( + "homeassistant.components.go2rtc.server", + logging.ERROR, + "Go2rtc server didn't start correctly", + ) in caplog.record_tuples + + # Check that Popen was called with the right arguments + mock_create_subprocess.assert_called_once_with( + TEST_BINARY, + "-c", + "test.yaml", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=False, + ) + + +@pytest.mark.parametrize( + ("server_stdout", "expected_loglevel"), + [ + ( + [ + "09:00:03.466 TRC [api] register path path=/", + "09:00:03.466 DBG build vcs.time=2024-10-28T19:47:55Z version=go1.23.2", + "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", + "09:00:03.467 INF [api] listen addr=127.0.0.1:1984", + "09:00:03.466 WRN warning message", + '09:00:03.466 ERR [api] listen error="listen tcp 127.0.0.1:11984: bind: address already in use"', + "09:00:03.466 FTL fatal message", + "09:00:03.466 PNC panic message", + "exit with signal: interrupt", # Example of stderr write + ], + [ + logging.DEBUG, + logging.DEBUG, + logging.DEBUG, + logging.DEBUG, + logging.WARNING, + logging.WARNING, + logging.ERROR, + logging.ERROR, + logging.WARNING, + ], + ) + ], +) +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_log_level_mapping( + hass: HomeAssistant, + mock_create_subprocess: MagicMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, + expected_loglevel: list[int], +) -> None: + """Log level mapping.""" + evt = asyncio.Event() + + async def wait_event() -> None: + await evt.wait() + + mock_create_subprocess.return_value.wait.side_effect = wait_event + + await server.start() + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + + # Verify go2rtc binary stdout was logged with default level + for i, entry in enumerate(server_stdout): + assert ( + "homeassistant.components.go2rtc.server", + expected_loglevel[i], + entry, + ) in caplog.record_tuples + + evt.set() + await asyncio.sleep(0.1) + await hass.async_block_till_done() + + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_process_exit( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the server is restarted when it exits.""" + evt = asyncio.Event() + + async def wait_event() -> None: + await evt.wait() + + mock_create_subprocess.return_value.wait.side_effect = wait_event + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_not_awaited() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + evt.set() + await asyncio.sleep(0.1) + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_process_error( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the server is restarted on error.""" + mock_create_subprocess.return_value.wait.side_effect = [Exception, None, None, None] + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_api_error( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the server is restarted on error.""" + rest_client.streams.list.side_effect = Exception + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_error( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test error handling when exception is raised during restart.""" + rest_client.streams.list.side_effect = Exception + mock_create_subprocess.return_value.terminate.side_effect = [Exception, None] + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + assert "Unexpected error when restarting go2rtc server" in caplog.text + + await server.stop() diff --git a/tests/components/goalzero/test_switch.py b/tests/components/goalzero/test_switch.py index de2e6035a12..b784cff05aa 100644 --- a/tests/components/goalzero/test_switch.py +++ b/tests/components/goalzero/test_switch.py @@ -1,7 +1,7 @@ """Switch tests for the Goalzero integration.""" from homeassistant.components.goalzero.const import DEFAULT_NAME -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -32,7 +32,7 @@ async def test_switches_states( text=load_fixture("goalzero/state_change.json"), ) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True, @@ -44,7 +44,7 @@ async def test_switches_states( text=load_fixture("goalzero/state_data.json"), ) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: [entity_id]}, blocking=True, diff --git a/tests/components/gogogate2/test_cover.py b/tests/components/gogogate2/test_cover.py index 001212fa17b..42ee1f6f731 100644 --- a/tests/components/gogogate2/test_cover.py +++ b/tests/components/gogogate2/test_cover.py @@ -20,6 +20,7 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, CoverDeviceClass, CoverEntityFeature, + CoverState, ) from homeassistant.components.gogogate2.const import ( DEVICE_TYPE_GOGOGATE2, @@ -34,10 +35,6 @@ from homeassistant.const import ( CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -144,7 +141,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None assert hass.states.get("cover.door1") is None assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPEN + assert hass.states.get("cover.door1").state == CoverState.OPEN assert dict(hass.states.get("cover.door1").attributes) == expected_attributes api.async_info.return_value = info_response(DoorStatus.CLOSED) @@ -163,12 +160,12 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSING + assert hass.states.get("cover.door1").state == CoverState.CLOSING api.async_close_door.assert_called_with(1) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSING + assert hass.states.get("cover.door1").state == CoverState.CLOSING api.async_info.return_value = info_response(DoorStatus.CLOSED) api.async_get_door_statuses_from_info.return_value = { @@ -177,7 +174,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSED + assert hass.states.get("cover.door1").state == CoverState.CLOSED api.async_info.return_value = info_response(DoorStatus.OPENED) api.async_get_door_statuses_from_info.return_value = { @@ -195,12 +192,12 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPENING + assert hass.states.get("cover.door1").state == CoverState.OPENING api.async_open_door.assert_called_with(1) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPENING + assert hass.states.get("cover.door1").state == CoverState.OPENING api.async_info.return_value = info_response(DoorStatus.OPENED) api.async_get_door_statuses_from_info.return_value = { @@ -209,7 +206,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPEN + assert hass.states.get("cover.door1").state == CoverState.OPEN api.async_info.return_value = info_response(DoorStatus.UNDEFINED) api.async_get_door_statuses_from_info.return_value = { @@ -241,7 +238,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPENING + assert hass.states.get("cover.door1").state == CoverState.OPENING api.async_open_door.assert_called_with(1) assert await hass.config_entries.async_unload(config_entry.entry_id) @@ -303,7 +300,7 @@ async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSED + assert hass.states.get("cover.door1").state == CoverState.CLOSED assert dict(hass.states.get("cover.door1").attributes) == expected_attributes diff --git a/tests/components/gogogate2/test_init.py b/tests/components/gogogate2/test_init.py index f7e58296a43..90765c425b4 100644 --- a/tests/components/gogogate2/test_init.py +++ b/tests/components/gogogate2/test_init.py @@ -3,11 +3,10 @@ from unittest.mock import MagicMock, patch from ismartgate import GogoGate2Api -import pytest -from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2, async_setup_entry +from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2 from homeassistant.components.gogogate2.const import DEVICE_TYPE_ISMARTGATE, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_DEVICE, CONF_IP_ADDRESS, @@ -15,7 +14,6 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady from tests.common import MockConfigEntry @@ -97,6 +95,8 @@ async def test_api_failure_on_startup(hass: HomeAssistant) -> None: "homeassistant.components.gogogate2.common.ISmartGateApi.async_info", side_effect=TimeoutError, ), - pytest.raises(ConfigEntryNotReady), ): - await async_setup_entry(hass, config_entry) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/goodwe/snapshots/test_diagnostics.ambr b/tests/components/goodwe/snapshots/test_diagnostics.ambr index 4097848a34a..40ed22195d5 100644 --- a/tests/components/goodwe/snapshots/test_diagnostics.ambr +++ b/tests/components/goodwe/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'model_family': 'ET', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'goodwe', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/google/conftest.py b/tests/components/google/conftest.py index 791e5613b0b..23b6b884145 100644 --- a/tests/components/google/conftest.py +++ b/tests/components/google/conftest.py @@ -98,12 +98,21 @@ def calendar_access_role() -> str: return "owner" +@pytest.fixture +def calendar_is_primary() -> bool: + """Set if the calendar is the primary or not.""" + return False + + @pytest.fixture(name="test_api_calendar") -def api_calendar(calendar_access_role: str) -> dict[str, Any]: +def api_calendar( + calendar_access_role: str, calendar_is_primary: bool +) -> dict[str, Any]: """Return a test calendar object used in API responses.""" return { **TEST_API_CALENDAR, "accessRole": calendar_access_role, + "primary": calendar_is_primary, } diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 11d4ec46bd1..6ce95a2bc17 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -15,9 +15,11 @@ from gcal_sync.auth import API_BASE_URL import pytest from homeassistant.components.google.const import CONF_CALENDAR_ACCESS, DOMAIN +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.helpers.template import DATE_STR_FORMAT import homeassistant.util.dt as dt_util @@ -570,6 +572,62 @@ async def test_opaque_event( assert state.state == (STATE_ON if expect_visible_event else STATE_OFF) +async def test_declined_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_calendars_yaml, + mock_events_list_items, + component_setup, +) -> None: + """Test querying the API and fetching events from the server.""" + event = { + **TEST_EVENT, + **upcoming(), + "attendees": [ + { + "self": "True", + "responseStatus": "declined", + } + ], + } + mock_events_list_items([event]) + assert await component_setup() + + client = await hass_client() + response = await client.get(upcoming_event_url(TEST_YAML_ENTITY)) + assert response.status == HTTPStatus.OK + events = await response.json() + assert len(events) == 0 + + +async def test_attending_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_calendars_yaml, + mock_events_list_items, + component_setup, +) -> None: + """Test querying the API and fetching events from the server.""" + event = { + **TEST_EVENT, + **upcoming(), + "attendees": [ + { + "self": "True", + "responseStatus": "accepted", + } + ], + } + mock_events_list_items([event]) + assert await component_setup() + + client = await hass_client() + response = await client.get(upcoming_event_url(TEST_YAML_ENTITY)) + assert response.status == HTTPStatus.OK + events = await response.json() + assert len(events) == 1 + + @pytest.mark.parametrize("mock_test_setup", [None]) async def test_scan_calendar_error( hass: HomeAssistant, @@ -1359,3 +1417,90 @@ async def test_invalid_rrule_fix( assert event["uid"] == "cydrevtfuybguinhomj@google.com" assert event["recurrence_id"] == "_c8rinwq863h45qnucyoi43ny8_20230915" assert event["rrule"] is None + + +@pytest.mark.parametrize( + ("event_type", "expected_event_message"), + [ + ("default", "Test All Day Event"), + ("workingLocation", None), + ], +) +async def test_working_location_ignored( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_events_list_items: Callable[[list[dict[str, Any]]], None], + component_setup: ComponentSetup, + event_type: str, + expected_event_message: str | None, +) -> None: + """Test working location events are skipped.""" + event = { + **TEST_EVENT, + **upcoming(), + "eventType": event_type, + } + mock_events_list_items([event]) + assert await component_setup() + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == TEST_ENTITY_NAME + assert state.attributes.get("message") == expected_event_message + + +@pytest.mark.parametrize("calendar_is_primary", [True]) +async def test_working_location_entity( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + mock_events_list_items: Callable[[list[dict[str, Any]]], None], + component_setup: ComponentSetup, +) -> None: + """Test that working location events are registered under a disabled by default entity.""" + event = { + **TEST_EVENT, + **upcoming(), + "eventType": "workingLocation", + } + mock_events_list_items([event]) + assert await component_setup() + + entity_entry = entity_registry.async_get("calendar.working_location") + assert entity_entry + assert entity_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + + entity_registry.async_update_entity( + entity_id="calendar.working_location", disabled_by=None + ) + async_fire_time_changed( + hass, + dt_util.utcnow() + datetime.timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + state = hass.states.get("calendar.working_location") + assert state + assert state.name == "Working location" + assert state.attributes.get("message") == "Test All Day Event" + + +@pytest.mark.parametrize("calendar_is_primary", [False]) +async def test_no_working_location_entity( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + mock_events_list_items: Callable[[list[dict[str, Any]]], None], + component_setup: ComponentSetup, +) -> None: + """Test that working location events are not registered for a secondary calendar.""" + event = { + **TEST_EVENT, + **upcoming(), + "eventType": "workingLocation", + } + mock_events_list_items([event]) + assert await component_setup() + + entity_entry = entity_registry.async_get("calendar.working_location") + assert not entity_entry diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index f4a6c97f50d..de882a6f791 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -26,9 +26,11 @@ from homeassistant.components.application_credentials import ( async_import_client_credential, ) from homeassistant.components.google.const import ( + CONF_CALENDAR_ACCESS, CONF_CREDENTIAL_TYPE, DOMAIN, CredentialType, + FeatureAccess, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -474,10 +476,27 @@ async def test_wrong_configuration( assert result.get("reason") == "oauth_error" +@pytest.mark.parametrize( + ("options"), + [ + ({}), + ( + { + CONF_CALENDAR_ACCESS: FeatureAccess.read_write.name, + } + ), + ( + { + CONF_CALENDAR_ACCESS: FeatureAccess.read_only.name, + } + ), + ], +) async def test_reauth_flow( hass: HomeAssistant, mock_code_flow: Mock, mock_exchange: Mock, + options: dict[str, Any] | None, ) -> None: """Test reauth of an existing config entry.""" config_entry = MockConfigEntry( @@ -486,6 +505,7 @@ async def test_reauth_flow( "auth_implementation": DOMAIN, "token": {"access_token": "OLD_ACCESS_TOKEN"}, }, + options=options, ) config_entry.add_to_hass(hass) await async_import_client_credential( @@ -497,14 +517,7 @@ async def test_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -547,6 +560,8 @@ async def test_reauth_flow( }, "credential_type": "device_auth", } + # Options are preserved during reauth + assert entries[0].options == options assert len(mock_setup.mock_calls) == 1 @@ -761,14 +776,7 @@ async def test_web_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/google/test_init.py b/tests/components/google/test_init.py index cfcda18df3a..ad43e341968 100644 --- a/tests/components/google/test_init.py +++ b/tests/components/google/test_init.py @@ -20,7 +20,8 @@ from homeassistant.components.google.const import CONF_CALENDAR_ACCESS from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_OFF from homeassistant.core import HomeAssistant, State -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported +from homeassistant.setup import async_setup_component from homeassistant.util.dt import UTC, utcnow from .conftest import ( @@ -248,35 +249,23 @@ async def test_init_calendar( async def test_multiple_config_entries( hass: HomeAssistant, component_setup: ComponentSetup, + config_entry: MockConfigEntry, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, ) -> None: """Test finding a calendar from the API.""" + mock_calendars_list({"items": [test_api_calendar]}) + mock_events_list({}) + assert await component_setup() - config_entry1 = MockConfigEntry( - domain=DOMAIN, data=config_entry.data, unique_id=EMAIL_ADDRESS - ) - calendar1 = { - **test_api_calendar, - "id": "calendar-id1", - "summary": "Example Calendar 1", - } - - mock_calendars_list({"items": [calendar1]}) - mock_events_list({}, calendar_id="calendar-id1") - config_entry1.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry1.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("calendar.example_calendar_1") + state = hass.states.get(TEST_API_ENTITY) assert state assert state.state == STATE_OFF - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Example calendar 1" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == TEST_API_ENTITY_NAME config_entry2 = MockConfigEntry( domain=DOMAIN, data=config_entry.data, unique_id="other-address@example.com" @@ -605,7 +594,7 @@ async def test_unsupported_create_event( aioclient_mock: AiohttpClientMocker, ) -> None: """Test create event service call is unsupported for virtual calendars.""" - + await async_setup_component(hass, "homeassistant", {}) mock_calendars_list({"items": [test_api_calendar]}) mock_events_list({}) assert await component_setup() @@ -613,8 +602,12 @@ async def test_unsupported_create_event( start_datetime = datetime.datetime.now(tz=zoneinfo.ZoneInfo("America/Regina")) delta = datetime.timedelta(days=3, hours=3) end_datetime = start_datetime + delta + entity_id = "calendar.backyard_light" - with pytest.raises(HomeAssistantError, match="does not support this service"): + with pytest.raises( + ServiceNotSupported, + match=f"Entity {entity_id} does not support action google.create_event", + ): await hass.services.async_call( DOMAIN, "create_event", @@ -625,7 +618,7 @@ async def test_unsupported_create_event( "summary": TEST_EVENT_SUMMARY, "description": TEST_EVENT_DESCRIPTION, }, - target={"entity_id": "calendar.backyard_light"}, + target={"entity_id": entity_id}, blocking=True, ) diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index 9a4ad8b3da3..1ecedbd1173 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -6,6 +6,8 @@ 'project_id': '1234', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'google_assistant', 'minor_version': 1, 'options': dict({ @@ -13,6 +15,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'import', + 'subentries': list([ + ]), 'title': '1234', 'unique_id': '1234', 'version': 1, diff --git a/tests/components/google_assistant/test_google_assistant.py b/tests/components/google_assistant/test_google_assistant.py index ea30f89e0ef..2b0bfd82908 100644 --- a/tests/components/google_assistant/test_google_assistant.py +++ b/tests/components/google_assistant/test_google_assistant.py @@ -491,7 +491,7 @@ async def test_execute_request(hass_fixture, assistant_client, auth_header) -> N assert kitchen.attributes.get(light.ATTR_RGB_COLOR) == (255, 0, 0) bed = hass_fixture.states.get("light.bed_light") - assert bed.attributes.get(light.ATTR_COLOR_TEMP) == 212 + assert bed.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 4700 assert hass_fixture.states.get("switch.decorative_lights").state == "off" diff --git a/tests/components/google_assistant/test_helpers.py b/tests/components/google_assistant/test_helpers.py index 492f1be1829..0e6876cc901 100644 --- a/tests/components/google_assistant/test_helpers.py +++ b/tests/components/google_assistant/test_helpers.py @@ -14,9 +14,9 @@ from homeassistant.components.google_assistant.const import ( SOURCE_LOCAL, STORE_GOOGLE_LOCAL_WEBHOOK_ID, ) -from homeassistant.components.matter.models import MatterDeviceInfo -from homeassistant.config import async_process_ha_core_config +from homeassistant.components.matter import MatterDeviceInfo from homeassistant.core import HomeAssistant, State +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index ea8f6957e38..a1c2ba1b3d4 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -9,10 +9,20 @@ from pytest_unordered import unordered from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ATTR_MAX_TEMP, ATTR_MIN_TEMP, HVACMode + +# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.binary_sensor import DemoBinarySensor + +# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.cover import DemoCover + +# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.light import LIGHT_EFFECT_LIST, DemoLight + +# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.media_player import AbstractDemoPlayer + +# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.switch import DemoSwitch from homeassistant.components.google_assistant import ( EVENT_COMMAND_RECEIVED, @@ -22,7 +32,6 @@ from homeassistant.components.google_assistant import ( smart_home as sh, trait, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, EVENT_CALL_SERVICE, @@ -31,6 +40,7 @@ from homeassistant.const import ( __version__, ) from homeassistant.core import HomeAssistant, State +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import ( area_registry as ar, device_registry as dr, @@ -199,7 +209,7 @@ async def test_sync_message(hass: HomeAssistant, registries) -> None: }, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ONOFF, + trait.TRAIT_ON_OFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -319,7 +329,7 @@ async def test_sync_in_area(area_on_device, hass: HomeAssistant, registries) -> "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ONOFF, + trait.TRAIT_ON_OFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -392,7 +402,7 @@ async def test_query_message(hass: HomeAssistant) -> None: light.async_write_ha_state() light2 = DemoLight( - None, "Another Light", state=True, hs_color=(180, 75), ct=400, brightness=78 + None, "Another Light", state=True, hs_color=(180, 75), ct=2500, brightness=78 ) light2.hass = hass light2.entity_id = "light.another_light" @@ -400,7 +410,7 @@ async def test_query_message(hass: HomeAssistant) -> None: light2._attr_name = "Another Light" light2.async_write_ha_state() - light3 = DemoLight(None, "Color temp Light", state=True, ct=400, brightness=200) + light3 = DemoLight(None, "Color temp Light", state=True, ct=2500, brightness=200) light3.hass = hass light3.entity_id = "light.color_temp_light" light3._attr_device_info = None @@ -916,7 +926,7 @@ async def test_unavailable_state_does_sync(hass: HomeAssistant) -> None: "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ONOFF, + trait.TRAIT_ON_OFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -1440,7 +1450,7 @@ async def test_sync_message_recovery( "light.bad_light", "on", { - "min_mireds": "badvalue", + "max_color_temp_kelvin": "badvalue", "supported_color_modes": ["color_temp"], }, ) diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 54aa4035670..d269b5ff0d7 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -33,7 +33,10 @@ from homeassistant.components import ( valve, water_heater, ) -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature @@ -51,7 +54,6 @@ from homeassistant.components.media_player import ( from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import ValveEntityFeature from homeassistant.components.water_heater import WaterHeaterEntityFeature -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_BATTERY_LEVEL, @@ -63,9 +65,6 @@ from homeassistant.const import ( EVENT_CALL_SERVICE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, @@ -77,7 +76,8 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State -from homeassistant.util import color, dt as dt_util +from homeassistant.core_config import async_process_ha_core_config +from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter from . import BASIC_CONFIG, MockConfig @@ -187,12 +187,12 @@ async def test_onoff_group(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} @@ -215,12 +215,12 @@ async def test_onoff_input_boolean(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} @@ -282,12 +282,12 @@ async def test_onoff_switch(hass: HomeAssistant) -> None: assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} @@ -307,12 +307,12 @@ async def test_onoff_fan(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} @@ -333,12 +333,12 @@ async def test_onoff_light(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} @@ -359,13 +359,13 @@ async def test_onoff_media_player(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @@ -386,13 +386,13 @@ async def test_onoff_humidifier(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} @@ -415,13 +415,13 @@ async def test_onoff_water_heater(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, water_heater.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "water_heater.bla"} off_calls = async_mock_service(hass, water_heater.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "water_heater.bla"} @@ -431,7 +431,9 @@ async def test_dock_vacuum(hass: HomeAssistant) -> None: assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None, None) - trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) + trt = trait.DockTrait( + hass, State("vacuum.bla", vacuum.VacuumActivity.IDLE), BASIC_CONFIG + ) assert trt.sync_attributes() == {} @@ -454,7 +456,7 @@ async def test_locate_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_IDLE, + vacuum.VacuumActivity.IDLE, {ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.LOCATE}, ), BASIC_CONFIG, @@ -485,7 +487,7 @@ async def test_energystorage_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_DOCKED, + vacuum.VacuumActivity.DOCKED, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.BATTERY, ATTR_BATTERY_LEVEL: 100, @@ -511,7 +513,7 @@ async def test_energystorage_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_CLEANING, + vacuum.VacuumActivity.CLEANING, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.BATTERY, ATTR_BATTERY_LEVEL: 20, @@ -551,7 +553,7 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: hass, State( "vacuum.bla", - vacuum.STATE_PAUSED, + vacuum.VacuumActivity.PAUSED, {ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.PAUSE}, ), BASIC_CONFIG, @@ -562,22 +564,22 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) - await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) - await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} @@ -612,10 +614,10 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: ), ( valve.DOMAIN, - valve.STATE_OPEN, - valve.STATE_CLOSED, - valve.STATE_OPENING, - valve.STATE_CLOSING, + valve.ValveState.OPEN, + valve.ValveState.CLOSED, + valve.ValveState.OPENING, + valve.ValveState.CLOSING, ValveEntityFeature.STOP | ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE, @@ -665,7 +667,7 @@ async def test_startstop_cover_valve( open_calls = async_mock_service(hass, domain, service_open) close_calls = async_mock_service(hass, domain, service_close) toggle_calls = async_mock_service(hass, domain, service_toggle) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -681,18 +683,18 @@ async def test_startstop_cover_valve( with pytest.raises( SmartHomeError, match=f"{domain.capitalize()} is already stopped" ): - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) # Start triggers toggle open state.state = state_closed - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(open_calls) == 0 assert len(close_calls) == 0 assert len(toggle_calls) == 1 assert toggle_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} # Second start triggers toggle close state.state = state_open - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(open_calls) == 0 assert len(close_calls) == 0 assert len(toggle_calls) == 2 @@ -703,7 +705,7 @@ async def test_startstop_cover_valve( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): - await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"start": True}, {}) @pytest.mark.parametrize( @@ -736,10 +738,10 @@ async def test_startstop_cover_valve( ), ( valve.DOMAIN, - valve.STATE_OPEN, - valve.STATE_CLOSED, - valve.STATE_OPENING, - valve.STATE_CLOSING, + valve.ValveState.OPEN, + valve.ValveState.CLOSED, + valve.ValveState.OPENING, + valve.ValveState.CLOSING, ValveEntityFeature.STOP | ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE, @@ -779,13 +781,13 @@ async def test_startstop_cover_valve_assumed( stop_calls = async_mock_service(hass, domain, service_stop) toggle_calls = async_mock_service(hass, domain, service_toggle) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert len(toggle_calls) == 0 assert stop_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} stop_calls.clear() - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(stop_calls) == 0 assert len(toggle_calls) == 1 assert toggle_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -868,10 +870,10 @@ async def test_color_setting_temperature_light(hass: HomeAssistant) -> None: "light.bla", STATE_ON, { - light.ATTR_MIN_MIREDS: 200, + light.ATTR_MAX_COLOR_TEMP_KELVIN: 5000, light.ATTR_COLOR_MODE: "color_temp", - light.ATTR_COLOR_TEMP: 300, - light.ATTR_MAX_MIREDS: 500, + light.ATTR_COLOR_TEMP_KELVIN: 3333, + light.ATTR_MIN_COLOR_TEMP_KELVIN: 2000, "supported_color_modes": ["color_temp"], }, ), @@ -904,7 +906,7 @@ async def test_color_setting_temperature_light(hass: HomeAssistant) -> None: assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", - light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), + light.ATTR_COLOR_TEMP_KELVIN: 2857, } @@ -922,9 +924,9 @@ async def test_color_light_temperature_light_bad_temp(hass: HomeAssistant) -> No "light.bla", STATE_ON, { - light.ATTR_MIN_MIREDS: 200, - light.ATTR_COLOR_TEMP: 0, - light.ATTR_MAX_MIREDS: 500, + light.ATTR_MAX_COLOR_TEMP_KELVIN: 5000, + light.ATTR_COLOR_TEMP_KELVIN: 0, + light.ATTR_MIN_COLOR_TEMP_KELVIN: 2000, }, ), BASIC_CONFIG, @@ -984,13 +986,13 @@ async def test_light_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, @@ -1422,7 +1424,7 @@ async def test_temperature_control(hass: HomeAssistant) -> None: "temperatureAmbientCelsius": 18, } with pytest.raises(helpers.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED @@ -1602,18 +1604,18 @@ async def test_lock_unlock_lock(hass: HomeAssistant) -> None: assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, LockEntityFeature.OPEN, None) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG + hass, State("lock.front_door", lock.LockState.LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} - assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) + assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) - await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} @@ -1628,7 +1630,7 @@ async def test_lock_unlock_unlocking(hass: HomeAssistant) -> None: assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, LockEntityFeature.OPEN, None) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.STATE_UNLOCKING), PIN_CONFIG + hass, State("lock.front_door", lock.LockState.UNLOCKING), PIN_CONFIG ) assert trt.sync_attributes() == {} @@ -1645,18 +1647,18 @@ async def test_lock_unlock_lock_jammed(hass: HomeAssistant) -> None: assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, LockEntityFeature.OPEN, None) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.STATE_JAMMED), PIN_CONFIG + hass, State("lock.front_door", lock.LockState.JAMMED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isJammed": True} - assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) + assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) - await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} @@ -1670,20 +1672,20 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: ) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG + hass, State("lock.front_door", lock.LockState.LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} - assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) + assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -1691,14 +1693,14 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} + trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( - trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} + trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 @@ -1706,11 +1708,11 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: # Test without pin trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG + hass, State("lock.front_door", lock.LockState.LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP @@ -1720,7 +1722,7 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: "should_2fa", return_value=False, ): - await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 @@ -1734,7 +1736,7 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_HOME @@ -1765,11 +1767,12 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: assert trt.query_attributes() == { "isArmed": True, - "currentArmLevel": STATE_ALARM_ARMED_AWAY, + "currentArmLevel": AlarmControlPanelState.ARMED_AWAY, } assert trt.can_execute( - trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} + trait.COMMAND_ARM_DISARM, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, ) calls = async_mock_service( @@ -1782,16 +1785,16 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, BASIC_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 0 @@ -1801,7 +1804,7 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, @@ -1809,9 +1812,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 0 @@ -1821,9 +1824,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 @@ -1832,9 +1835,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # correct pin await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {"pin": "1234"}, ) @@ -1845,16 +1848,16 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 1 @@ -1865,22 +1868,22 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True}, {}, @@ -1897,7 +1900,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.TRIGGER @@ -1942,7 +1945,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: "isArmed": False, } - assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) + assert trt.can_execute(trait.COMMAND_ARM_DISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM @@ -1953,13 +1956,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARM_DISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP @@ -1968,7 +1971,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, @@ -1976,7 +1979,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -1984,7 +1987,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED @@ -1992,7 +1995,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # correct pin await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 @@ -2002,13 +2005,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED @@ -2016,7 +2019,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, @@ -2025,7 +2028,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED @@ -2036,13 +2039,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_PENDING, + AlarmControlPanelState.PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 @@ -2078,10 +2081,12 @@ async def test_fan_speed(hass: HomeAssistant) -> None: "currentFanSpeedSetting": ANY, } - assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) + assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) - await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) + await trt.execute( + trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {} + ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} @@ -2216,10 +2221,10 @@ async def test_fan_speed_ordered( "currentFanSpeedSetting": speed, } - assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": speed}) + assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeed": speed}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) - await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": speed}, {}) + await trt.execute(trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeed": speed}, {}) assert len(calls) == 1 assert calls[0].data == { @@ -2328,10 +2333,12 @@ async def test_climate_fan_speed(hass: HomeAssistant) -> None: "currentFanSpeedSetting": "low", } - assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) + assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) - await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) + await trt.execute( + trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeed": "medium"}, {} + ) assert len(calls) == 1 assert calls[0].data == { @@ -2387,7 +2394,7 @@ async def test_inputselector(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_INPUT, + trait.COMMAND_SET_INPUT, params={"newInput": "media"}, ) @@ -2395,7 +2402,7 @@ async def test_inputselector(hass: HomeAssistant) -> None: hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( - trait.COMMAND_INPUT, + trait.COMMAND_SET_INPUT, BASIC_DATA, {"newInput": "media"}, {}, @@ -2563,7 +2570,7 @@ async def test_modes_input_select(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) @@ -2571,7 +2578,7 @@ async def test_modes_input_select(hass: HomeAssistant) -> None: hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, @@ -2639,13 +2646,13 @@ async def test_modes_select(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service(hass, select.DOMAIN, select.SERVICE_SELECT_OPTION) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, @@ -2716,12 +2723,12 @@ async def test_modes_humidifier(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, @@ -2792,14 +2799,15 @@ async def test_modes_water_heater(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, params={"updateModeSettings": {"operation mode": "gas"}} + trait.COMMAND_SET_MODES, + params={"updateModeSettings": {"operation mode": "gas"}}, ) calls = async_mock_service( hass, water_heater.DOMAIN, water_heater.SERVICE_SET_OPERATION_MODE ) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"operation mode": "gas"}}, {}, @@ -2868,7 +2876,7 @@ async def test_sound_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) @@ -2876,7 +2884,7 @@ async def test_sound_modes(hass: HomeAssistant) -> None: hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, @@ -2941,13 +2949,13 @@ async def test_preset_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"preset mode": "auto"}}, ) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PRESET_MODE) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"preset mode": "auto"}}, {}, @@ -2975,7 +2983,7 @@ async def test_traits_unknown_domains( assert trt.supported("not_supported_domain", False, None, None) is False await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {}}, {}, @@ -3049,9 +3057,9 @@ async def test_openclose_cover_valve( calls_open = async_mock_service(hass, domain, open_service) calls_close = async_mock_service(hass, domain, close_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( - trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} + trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == { @@ -3066,9 +3074,9 @@ async def test_openclose_cover_valve( assert len(calls_close) == 0 - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 0}, {}) await trt.execute( - trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 0}, {} + trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 0}, {} ) assert len(calls_set) == 1 assert len(calls_close) == 1 @@ -3123,7 +3131,7 @@ async def test_openclose_cover_valve_unknown_state( trt.query_attributes() calls = async_mock_service(hass, domain, open_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -3144,7 +3152,7 @@ async def test_openclose_cover_valve_unknown_state( valve.DOMAIN, valve.SERVICE_SET_VALVE_POSITION, ValveEntityFeature.SET_POSITION, - valve.STATE_OPEN, + valve.ValveState.OPEN, ), ], ) @@ -3177,7 +3185,7 @@ async def test_openclose_cover_valve_assumed_state( assert trt.query_attributes() == {} calls = async_mock_service(hass, domain, set_position_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla", cover.ATTR_POSITION: 40} @@ -3191,7 +3199,7 @@ async def test_openclose_cover_valve_assumed_state( ), ( valve.DOMAIN, - valve.STATE_OPEN, + valve.ValveState.OPEN, ), ], ) @@ -3242,8 +3250,8 @@ async def test_openclose_cover_valve_query_only( ), ( valve.DOMAIN, - valve.STATE_OPEN, - valve.STATE_CLOSED, + valve.ValveState.OPEN, + valve.ValveState.CLOSED, ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE, valve.SERVICE_OPEN_VALVE, valve.SERVICE_CLOSE_VALVE, @@ -3291,12 +3299,12 @@ async def test_openclose_cover_valve_no_position( assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, domain, close_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} calls = async_mock_service(hass, domain, open_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -3304,14 +3312,14 @@ async def test_openclose_cover_valve_no_position( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( - trait.COMMAND_OPENCLOSE_RELATIVE, + trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( @@ -3354,7 +3362,7 @@ async def test_openclose_cover_secure(hass: HomeAssistant, device_class) -> None # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -3362,20 +3370,20 @@ async def test_openclose_cover_secure(hass: HomeAssistant, device_class) -> None # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} + trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( - trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} + trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close - await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @@ -3699,7 +3707,7 @@ async def test_humidity_setting_sensor_data( assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED @@ -4063,3 +4071,90 @@ async def test_sensorstate( ) is False ) + + +@pytest.mark.parametrize( + ("state", "identifier"), + [ + (STATE_ON, 0), + (STATE_OFF, 1), + (STATE_UNKNOWN, 2), + ], +) +@pytest.mark.parametrize( + ("device_class", "name", "states"), + [ + ( + binary_sensor.BinarySensorDeviceClass.CO, + "CarbonMonoxideLevel", + ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], + ), + ( + binary_sensor.BinarySensorDeviceClass.SMOKE, + "SmokeLevel", + ["smoke detected", "no smoke detected", "unknown"], + ), + ( + binary_sensor.BinarySensorDeviceClass.MOISTURE, + "WaterLeak", + ["leak", "no leak", "unknown"], + ), + ], +) +async def test_binary_sensorstate( + hass: HomeAssistant, + state: str, + identifier: int, + device_class: binary_sensor.BinarySensorDeviceClass, + name: str, + states: list[str], +) -> None: + """Test SensorState trait support for binary sensor domain.""" + + assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None + assert trait.SensorStateTrait.supported( + binary_sensor.DOMAIN, None, device_class, None + ) + + trt = trait.SensorStateTrait( + hass, + State( + "binary_sensor.test", + state, + { + "device_class": device_class, + }, + ), + BASIC_CONFIG, + ) + + assert trt.sync_attributes() == { + "sensorStatesSupported": [ + { + "name": name, + "descriptiveCapabilities": { + "availableStates": states, + }, + } + ] + } + assert trt.query_attributes() == { + "currentSensorStateData": [ + { + "name": name, + "currentSensorState": states[identifier], + "rawValue": None, + }, + ] + } + + assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None + assert ( + trait.SensorStateTrait.supported( + binary_sensor.DOMAIN, + None, + binary_sensor.BinarySensorDeviceClass.TAMPER, + None, + ) + is False + ) diff --git a/tests/components/google_assistant_sdk/test_config_flow.py b/tests/components/google_assistant_sdk/test_config_flow.py index d66d12509e8..332610e74e8 100644 --- a/tests/components/google_assistant_sdk/test_config_flow.py +++ b/tests/components/google_assistant_sdk/test_config_flow.py @@ -178,37 +178,7 @@ async def test_single_instance_allowed( result = await hass.config_entries.flow.async_init( "google_assistant_sdk", context={"source": config_entries.SOURCE_USER} ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - assert result["url"] == ( - f"{GOOGLE_AUTH_URI}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}&scope=https://www.googleapis.com/auth/assistant-sdk-prototype" - "&access_type=offline&prompt=consent" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - GOOGLE_TOKEN_URI, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - }, - ) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == "single_instance_allowed" diff --git a/tests/components/google_cloud/__init__.py b/tests/components/google_cloud/__init__.py new file mode 100644 index 00000000000..67e83b58c71 --- /dev/null +++ b/tests/components/google_cloud/__init__.py @@ -0,0 +1 @@ +"""Tests for the Google Cloud integration.""" diff --git a/tests/components/google_cloud/conftest.py b/tests/components/google_cloud/conftest.py new file mode 100644 index 00000000000..897c352b402 --- /dev/null +++ b/tests/components/google_cloud/conftest.py @@ -0,0 +1,124 @@ +"""Tests helpers.""" + +from collections.abc import Generator +import json +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +from google.cloud.texttospeech_v1.types import cloud_tts +import pytest + +from homeassistant.components.google_cloud.const import ( + CONF_SERVICE_ACCOUNT_INFO, + DOMAIN, +) + +from tests.common import MockConfigEntry + +VALID_SERVICE_ACCOUNT_INFO = { + "type": "service_account", + "project_id": "my project id", + "private_key_id": "my private key if", + "private_key": "-----BEGIN PRIVATE KEY-----\nMIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAKYscIlwm7soDsHAz6L6YvUkCvkrX19rS6yeYOmovvhoK5WeYGWUsd8V72zmsyHB7XO94YgJVjvxfzn5K8bLePjFzwoSJjZvhBJ/ZQ05d8VmbvgyWUoPdG9oEa4fZ/lCYrXoaFdTot2xcJvrb/ZuiRl4s4eZpNeFYvVK/Am7UeFPAgMBAAECgYAUetOfzLYUudofvPCaKHu7tKZ5kQPfEa0w6BAPnBF1Mfl1JiDBRDMryFtKs6AOIAVwx00dY/Ex0BCbB3+Cr58H7t4NaPTJxCpmR09pK7o17B7xAdQv8+SynFNud9/5vQ5AEXMOLNwKiU7wpXT6Z7ZIibUBOR7ewsWgsHCDpN1iqQJBAOMODPTPSiQMwRAUHIc6GPleFSJnIz2PAoG3JOG9KFAL6RtIc19lob2ZXdbQdzKtjSkWo+O5W20WDNAl1k32h6MCQQC7W4ZCIY67mPbL6CxXfHjpSGF4Dr9VWJ7ZrKHr6XUoOIcEvsn/pHvWonjMdy93rQMSfOE8BKd/I1+GHRmNVgplAkAnSo4paxmsZVyfeKt7Jy2dMY+8tVZe17maUuQaAE7Sk00SgJYegwrbMYgQnWCTL39HBfj0dmYA2Zj8CCAuu6O7AkEAryFiYjaUAO9+4iNoL27+ZrFtypeeadyov7gKs0ZKaQpNyzW8A+Zwi7TbTeSqzic/E+z/bOa82q7p/6b7141xsQJBANCAcIwMcVb6KVCHlQbOtKspo5Eh4ZQi8bGl+IcwbQ6JSxeTx915IfAldgbuU047wOB04dYCFB2yLDiUGVXTifU=\n-----END PRIVATE KEY-----\n", + "client_email": "my client email", + "client_id": "my client id", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/service-account", + "universe_domain": "googleapis.com", +} + + +@pytest.fixture +def create_google_credentials_json(tmp_path: Path) -> str: + """Create googlecredentials.json.""" + file_path = tmp_path / "googlecredentials.json" + with open(file_path, "w", encoding="utf8") as f: + json.dump(VALID_SERVICE_ACCOUNT_INFO, f) + return str(file_path) + + +@pytest.fixture +def create_invalid_google_credentials_json(create_google_credentials_json: str) -> str: + """Create invalid googlecredentials.json.""" + invalid_service_account_info = VALID_SERVICE_ACCOUNT_INFO.copy() + invalid_service_account_info.pop("client_email") + with open(create_google_credentials_json, "w", encoding="utf8") as f: + json.dump(invalid_service_account_info, f) + return create_google_credentials_json + + +@pytest.fixture +def mock_process_uploaded_file( + create_google_credentials_json: str, +) -> Generator[MagicMock]: + """Mock upload certificate files.""" + ctx_mock = MagicMock() + ctx_mock.__enter__.return_value = Path(create_google_credentials_json) + with patch( + "homeassistant.components.google_cloud.config_flow.process_uploaded_file", + return_value=ctx_mock, + ) as mock_upload: + yield mock_upload + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="my Google Cloud title", + domain=DOMAIN, + data={CONF_SERVICE_ACCOUNT_INFO: VALID_SERVICE_ACCOUNT_INFO}, + ) + + +@pytest.fixture +def mock_api_tts() -> AsyncMock: + """Return a mocked TTS client.""" + mock_client = AsyncMock() + mock_client.list_voices.return_value = cloud_tts.ListVoicesResponse( + voices=[ + cloud_tts.Voice(language_codes=["en-US"], name="en-US-Standard-A"), + cloud_tts.Voice(language_codes=["en-US"], name="en-US-Standard-B"), + cloud_tts.Voice(language_codes=["el-GR"], name="el-GR-Standard-A"), + ] + ) + return mock_client + + +@pytest.fixture +def mock_api_tts_from_service_account_info( + mock_api_tts: AsyncMock, +) -> Generator[AsyncMock]: + """Return a mocked TTS client created with from_service_account_info.""" + with ( + patch( + "google.cloud.texttospeech.TextToSpeechAsyncClient.from_service_account_info", + return_value=mock_api_tts, + ), + ): + yield mock_api_tts + + +@pytest.fixture +def mock_api_tts_from_service_account_file( + mock_api_tts: AsyncMock, +) -> Generator[AsyncMock]: + """Return a mocked TTS client created with from_service_account_file.""" + with ( + patch( + "google.cloud.texttospeech.TextToSpeechAsyncClient.from_service_account_file", + return_value=mock_api_tts, + ), + ): + yield mock_api_tts + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.google_cloud.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/google_cloud/test_config_flow.py b/tests/components/google_cloud/test_config_flow.py new file mode 100644 index 00000000000..e4b4631f223 --- /dev/null +++ b/tests/components/google_cloud/test_config_flow.py @@ -0,0 +1,185 @@ +"""Test the Google Cloud config flow.""" + +from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 + +from homeassistant import config_entries +from homeassistant.components import tts +from homeassistant.components.google_cloud.config_flow import UPLOADED_KEY_FILE +from homeassistant.components.google_cloud.const import ( + CONF_KEY_FILE, + CONF_SERVICE_ACCOUNT_INFO, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PLATFORM +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.setup import async_setup_component + +from .conftest import VALID_SERVICE_ACCOUNT_INFO + +from tests.common import MockConfigEntry + + +async def test_user_flow_success( + hass: HomeAssistant, + mock_process_uploaded_file: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow creates entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + uploaded_file = str(uuid4()) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {UPLOADED_KEY_FILE: uploaded_file}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Google Cloud" + assert result["data"] == {CONF_SERVICE_ACCOUNT_INFO: VALID_SERVICE_ACCOUNT_INFO} + mock_process_uploaded_file.assert_called_with(hass, uploaded_file) + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_flow_missing_file( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow when uploaded file is missing.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {UPLOADED_KEY_FILE: str(uuid4())}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_file"} + assert len(mock_setup_entry.mock_calls) == 0 + + +async def test_user_flow_invalid_file( + hass: HomeAssistant, + create_invalid_google_credentials_json: str, + mock_process_uploaded_file: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow when uploaded file is invalid.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + uploaded_file = str(uuid4()) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {UPLOADED_KEY_FILE: uploaded_file}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_file"} + mock_process_uploaded_file.assert_called_with(hass, uploaded_file) + assert len(mock_setup_entry.mock_calls) == 0 + + +async def test_import_flow( + hass: HomeAssistant, + create_google_credentials_json: str, + mock_api_tts_from_service_account_file: AsyncMock, + mock_api_tts_from_service_account_info: AsyncMock, +) -> None: + """Test the import flow.""" + assert not hass.config_entries.async_entries(DOMAIN) + assert await async_setup_component( + hass, + tts.DOMAIN, + { + tts.DOMAIN: {CONF_PLATFORM: DOMAIN} + | {CONF_KEY_FILE: create_google_credentials_json} + }, + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + assert config_entry.state is config_entries.ConfigEntryState.LOADED + + +async def test_import_flow_invalid_file( + hass: HomeAssistant, + create_invalid_google_credentials_json: str, + mock_api_tts_from_service_account_file: AsyncMock, +) -> None: + """Test the import flow when the key file is invalid.""" + assert not hass.config_entries.async_entries(DOMAIN) + assert await async_setup_component( + hass, + tts.DOMAIN, + { + tts.DOMAIN: {CONF_PLATFORM: DOMAIN} + | {CONF_KEY_FILE: create_invalid_google_credentials_json} + }, + ) + await hass.async_block_till_done() + assert not hass.config_entries.async_entries(DOMAIN) + assert mock_api_tts_from_service_account_file.list_voices.call_count == 1 + + +async def test_options_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_api_tts_from_service_account_info: AsyncMock, +) -> None: + """Test options flow.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_api_tts_from_service_account_info.list_voices.call_count == 1 + + assert mock_config_entry.options == {} + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + data_schema = result["data_schema"].schema + assert set(data_schema) == { + "language", + "gender", + "voice", + "encoding", + "speed", + "pitch", + "gain", + "profiles", + "text_type", + "stt_model", + } + assert mock_api_tts_from_service_account_info.list_voices.call_count == 2 + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"language": "el-GR"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.options == { + "language": "el-GR", + "gender": "NEUTRAL", + "voice": "", + "encoding": "MP3", + "speed": 1.0, + "pitch": 0.0, + "gain": 0.0, + "profiles": [], + "text_type": "text", + "stt_model": "latest_short", + } + assert mock_api_tts_from_service_account_info.list_voices.call_count == 3 diff --git a/tests/components/google_domains/__init__.py b/tests/components/google_domains/__init__.py deleted file mode 100644 index 3466a3be489..00000000000 --- a/tests/components/google_domains/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the google_domains component.""" diff --git a/tests/components/google_domains/test_init.py b/tests/components/google_domains/test_init.py deleted file mode 100644 index bb27cf7b483..00000000000 --- a/tests/components/google_domains/test_init.py +++ /dev/null @@ -1,85 +0,0 @@ -"""Test the Google Domains component.""" - -from datetime import timedelta - -import pytest - -from homeassistant.components import google_domains -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component -from homeassistant.util.dt import utcnow - -from tests.common import async_fire_time_changed -from tests.test_util.aiohttp import AiohttpClientMocker - -DOMAIN = "test.example.com" -USERNAME = "abc123" -PASSWORD = "xyz789" - -UPDATE_URL = f"https://{USERNAME}:{PASSWORD}@domains.google.com/nic/update" - - -@pytest.fixture -def setup_google_domains( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Fixture that sets up NamecheapDNS.""" - aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="ok 0.0.0.0") - - hass.loop.run_until_complete( - async_setup_component( - hass, - google_domains.DOMAIN, - { - "google_domains": { - "domain": DOMAIN, - "username": USERNAME, - "password": PASSWORD, - } - }, - ) - ) - - -async def test_setup(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None: - """Test setup works if update passes.""" - aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nochg 0.0.0.0") - - result = await async_setup_component( - hass, - google_domains.DOMAIN, - { - "google_domains": { - "domain": DOMAIN, - "username": USERNAME, - "password": PASSWORD, - } - }, - ) - assert result - assert aioclient_mock.call_count == 1 - - async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done() - assert aioclient_mock.call_count == 2 - - -async def test_setup_fails_if_update_fails( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup fails if first update fails.""" - aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nohost") - - result = await async_setup_component( - hass, - google_domains.DOMAIN, - { - "google_domains": { - "domain": DOMAIN, - "username": USERNAME, - "password": PASSWORD, - } - }, - ) - assert not result - assert aioclient_mock.call_count == 1 diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 1ea5c2ad9b8..4192a60513e 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -212,7 +212,7 @@ async def test_function_call( name="test_tool", args={ "param1": ["test_value", "param1\\'s value"], - "param2": "param2\\'s value", + "param2": 2.7, }, ) @@ -258,7 +258,7 @@ async def test_function_call( tool_name="test_tool", tool_args={ "param1": ["test_value", "param1's value"], - "param2": "param2's value", + "param2": 2.7, }, ), llm.LLMContext( diff --git a/tests/components/google_photos/__init__.py b/tests/components/google_photos/__init__.py new file mode 100644 index 00000000000..fa345811216 --- /dev/null +++ b/tests/components/google_photos/__init__.py @@ -0,0 +1 @@ +"""Tests for the Google Photos integration.""" diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py new file mode 100644 index 00000000000..c848122a9fd --- /dev/null +++ b/tests/components/google_photos/conftest.py @@ -0,0 +1,202 @@ +"""Test fixtures for Google Photos.""" + +from collections.abc import AsyncGenerator, Awaitable, Callable, Generator +import time +from typing import Any +from unittest.mock import AsyncMock, Mock, patch + +from google_photos_library_api.api import GooglePhotosLibraryApi +from google_photos_library_api.model import ( + Album, + ListAlbumResult, + ListMediaItemResult, + MediaItem, + UserInfoResult, +) +import pytest + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.google_photos.const import DOMAIN, OAUTH2_SCOPES +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) + +USER_IDENTIFIER = "user-identifier-1" +CONFIG_ENTRY_ID = "user-identifier-1" +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" +FAKE_ACCESS_TOKEN = "some-access-token" +FAKE_REFRESH_TOKEN = "some-refresh-token" +EXPIRES_IN = 3600 +USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" +PHOTOS_BASE_URL = "https://photoslibrary.googleapis.com" +MEDIA_ITEMS_URL = f"{PHOTOS_BASE_URL}/v1/mediaItems" +ALBUMS_URL = f"{PHOTOS_BASE_URL}/v1/albums" +UPLOADS_URL = f"{PHOTOS_BASE_URL}/v1/uploads" +CREATE_MEDIA_ITEMS_URL = f"{PHOTOS_BASE_URL}/v1/mediaItems:batchCreate" + + +@pytest.fixture(name="expires_at") +def mock_expires_at() -> int: + """Fixture to set the oauth token expiration time.""" + return time.time() + EXPIRES_IN + + +@pytest.fixture(name="scopes") +def mock_scopes() -> list[str]: + """Fixture to set scopes used during the config entry.""" + return OAUTH2_SCOPES + + +@pytest.fixture(name="token_entry") +def mock_token_entry(expires_at: int, scopes: list[str]) -> dict[str, Any]: + """Fixture for OAuth 'token' data for a ConfigEntry.""" + return { + "access_token": FAKE_ACCESS_TOKEN, + "refresh_token": FAKE_REFRESH_TOKEN, + "scope": " ".join(scopes), + "type": "Bearer", + "expires_at": expires_at, + "expires_in": EXPIRES_IN, + } + + +@pytest.fixture(name="config_entry_id") +def mock_config_entry_id() -> str | None: + """Provide a json fixture file to load for list media item api responses.""" + return CONFIG_ENTRY_ID + + +@pytest.fixture(name="config_entry") +def mock_config_entry( + config_entry_id: str, token_entry: dict[str, Any] +) -> MockConfigEntry: + """Fixture for a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=config_entry_id, + data={ + "auth_implementation": DOMAIN, + "token": token_entry, + }, + title="Account Name", + ) + + +@pytest.fixture(autouse=True) +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), + ) + + +@pytest.fixture(name="fixture_name") +def mock_fixture_name() -> str | None: + """Provide a json fixture file to load for list media item api responses.""" + return None + + +@pytest.fixture(name="user_identifier") +def mock_user_identifier() -> str | None: + """Provide a json fixture file to load for list media item api responses.""" + return USER_IDENTIFIER + + +@pytest.fixture(name="api_error") +def mock_api_error() -> Exception | None: + """Provide a json fixture file to load for list media item api responses.""" + return None + + +@pytest.fixture(name="mock_api") +def mock_client_api( + fixture_name: str, + user_identifier: str, + api_error: Exception, +) -> Generator[Mock]: + """Set up fake Google Photos API responses from fixtures.""" + mock_api = AsyncMock(GooglePhotosLibraryApi, autospec=True) + mock_api.get_user_info.return_value = UserInfoResult( + id=user_identifier, + name="Test Name", + ) + + responses = load_json_array_fixture(fixture_name, DOMAIN) if fixture_name else [] + + async def list_media_items(*args: Any) -> AsyncGenerator[ListMediaItemResult]: + for response in responses: + mock_list_media_items = Mock(ListMediaItemResult) + mock_list_media_items.media_items = [ + MediaItem.from_dict(media_item) for media_item in response["mediaItems"] + ] + yield mock_list_media_items + + mock_api.list_media_items.return_value.__aiter__ = list_media_items + mock_api.list_media_items.return_value.__anext__ = list_media_items + mock_api.list_media_items.side_effect = api_error + + # Mock a point lookup by reading contents of the fixture above + async def get_media_item(media_item_id: str, **kwargs: Any) -> Mock: + for response in responses: + for media_item in response["mediaItems"]: + if media_item["id"] == media_item_id: + return MediaItem.from_dict(media_item) + return None + + mock_api.get_media_item = get_media_item + + # Emulate an async iterator for returning pages of response objects. We just + # return a single page. + + async def list_albums(*args: Any, **kwargs: Any) -> AsyncGenerator[ListAlbumResult]: + mock_list_album_result = Mock(ListAlbumResult) + mock_list_album_result.albums = [ + Album.from_dict(album) + for album in load_json_object_fixture("list_albums.json", DOMAIN)["albums"] + ] + yield mock_list_album_result + + mock_api.list_albums.return_value.__aiter__ = list_albums + mock_api.list_albums.return_value.__anext__ = list_albums + mock_api.list_albums.side_effect = api_error + + # Mock a point lookup by reading contents of the album fixture above + async def get_album(album_id: str, **kwargs: Any) -> Mock: + for album in load_json_object_fixture("list_albums.json", DOMAIN)["albums"]: + if album["id"] == album_id: + return Album.from_dict(album) + return None + + mock_api.get_album = get_album + mock_api.get_album.side_effect = api_error + + return mock_api + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_api: Mock, +) -> Callable[[], Awaitable[bool]]: + """Fixture to set up the integration.""" + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.google_photos.GooglePhotosLibraryApi", + return_value=mock_api, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/google_photos/fixtures/list_albums.json b/tests/components/google_photos/fixtures/list_albums.json new file mode 100644 index 00000000000..7460e1d36f3 --- /dev/null +++ b/tests/components/google_photos/fixtures/list_albums.json @@ -0,0 +1,13 @@ +{ + "albums": [ + { + "id": "album-media-id-1", + "title": "Album title", + "productUrl": "http://photos.google.com/album-media-id-1", + "isWriteable": true, + "mediaItemsCount": 7, + "coverPhotoBaseUrl": "http://img.example.com/id3", + "coverPhotoMediaItemId": "cover-photo-media-id-3" + } + ] +} diff --git a/tests/components/google_photos/fixtures/list_mediaitems.json b/tests/components/google_photos/fixtures/list_mediaitems.json new file mode 100644 index 00000000000..8e470a2fc04 --- /dev/null +++ b/tests/components/google_photos/fixtures/list_mediaitems.json @@ -0,0 +1,35 @@ +[ + { + "mediaItems": [ + { + "id": "id1", + "description": "some-descripton", + "productUrl": "http://example.com/id1", + "baseUrl": "http://img.example.com/id1", + "mimeType": "image/jpeg", + "mediaMetadata": { + "creationTime": "2014-10-02T15:01:23Z", + "width": 1600, + "height": 768 + }, + "filename": "example1.jpg" + }, + { + "id": "id2", + "description": "some-descripton", + "productUrl": "http://example.com/id2", + "baseUrl": "http://img.example.com/id2", + "mimeType": "video/mp4", + "mediaMetadata": { + "creationTime": "2014-10-02T16:01:23Z", + "width": 1600, + "height": 768, + "video": { + "cameraMake": "Pixel" + } + }, + "filename": "example2.mp4" + } + ] + } +] diff --git a/tests/components/google_photos/fixtures/list_mediaitems_empty.json b/tests/components/google_photos/fixtures/list_mediaitems_empty.json new file mode 100644 index 00000000000..bf6a4da855f --- /dev/null +++ b/tests/components/google_photos/fixtures/list_mediaitems_empty.json @@ -0,0 +1,5 @@ +[ + { + "mediaItems": [] + } +] diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py new file mode 100644 index 00000000000..4896f82effb --- /dev/null +++ b/tests/components/google_photos/test_config_flow.py @@ -0,0 +1,326 @@ +"""Test the Google Photos config flow.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import Mock, patch + +from google_photos_library_api.exceptions import GooglePhotosApiError +import pytest + +from homeassistant import config_entries +from homeassistant.components.google_photos.const import ( + DOMAIN, + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .conftest import EXPIRES_IN, FAKE_ACCESS_TOKEN, FAKE_REFRESH_TOKEN, USER_IDENTIFIER + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" + + +@pytest.fixture(name="mock_setup") +def mock_setup_entry() -> Generator[Mock]: + """Fixture to mock out integration setup.""" + with patch( + "homeassistant.components.google_photos.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture(autouse=True) +def mock_patch_api(mock_api: Mock) -> Generator[None]: + """Fixture to patch the config flow api.""" + with patch( + "homeassistant.components.google_photos.config_flow.GooglePhotosLibraryApi", + return_value=mock_api, + ): + yield + + +@pytest.fixture(name="updated_token_entry", autouse=True) +def mock_updated_token_entry() -> dict[str, Any]: + """Fixture to provide any test specific overrides to token data from the oauth token endpoint.""" + return {} + + +@pytest.fixture(name="mock_oauth_token_request", autouse=True) +def mock_token_request( + aioclient_mock: AiohttpClientMocker, + token_entry: dict[str, any], + updated_token_entry: dict[str, Any], +) -> None: + """Fixture to provide a fake response from the oauth token endpoint.""" + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + **token_entry, + **updated_token_entry, + }, + ) + + +@pytest.mark.usefixtures("current_request_with_host", "mock_api") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_setup: Mock, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + config_entry = result["result"] + assert config_entry.unique_id == USER_IDENTIFIER + assert config_entry.title == "Test Name" + config_entry_data = dict(config_entry.data) + assert "token" in config_entry_data + assert "expires_at" in config_entry_data["token"] + del config_entry_data["token"]["expires_at"] + assert config_entry_data == { + "auth_implementation": DOMAIN, + "token": { + "access_token": FAKE_ACCESS_TOKEN, + "expires_in": EXPIRES_IN, + "refresh_token": FAKE_REFRESH_TOKEN, + "type": "Bearer", + "scope": ( + "https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + " https://www.googleapis.com/auth/photoslibrary.appendonly" + " https://www.googleapis.com/auth/userinfo.profile" + ), + }, + } + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "current_request_with_host", + "setup_credentials", + "mock_api", +) +@pytest.mark.parametrize( + "api_error", + [ + GooglePhotosApiError("some error"), + ], +) +async def test_api_not_enabled( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, +) -> None: + """Check flow aborts if api is not enabled.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "access_not_configured" + assert result["description_placeholders"]["message"].endswith("some error") + + +@pytest.mark.usefixtures("current_request_with_host", "setup_credentials") +async def test_general_exception( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_api: Mock, +) -> None: + """Check flow aborts if exception happens.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + mock_api.list_media_items.side_effect = Exception + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +@pytest.mark.usefixtures("current_request_with_host", "mock_api", "setup_integration") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +@pytest.mark.parametrize( + "updated_token_entry", + [ + { + "access_token": "updated-access-token", + } + ], +) +@pytest.mark.parametrize( + ( + "user_identifier", + "abort_reason", + "resulting_access_token", + "expected_setup_calls", + ), + [ + ( + USER_IDENTIFIER, + "reauth_successful", + "updated-access-token", + 1, + ), + ( + "345", + "wrong_account", + FAKE_ACCESS_TOKEN, + 0, + ), + ], +) +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + user_identifier: str, + abort_reason: str, + resulting_access_token: str, + mock_setup: Mock, + expected_setup_calls: int, +) -> None: + """Test the re-authentication case updates the correct config entry.""" + + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == abort_reason + + assert config_entry.unique_id == USER_IDENTIFIER + assert config_entry.title == "Account Name" + config_entry_data = dict(config_entry.data) + assert "token" in config_entry_data + assert "expires_at" in config_entry_data["token"] + del config_entry_data["token"]["expires_at"] + assert config_entry_data == { + "auth_implementation": DOMAIN, + "token": { + # Verify token is refreshed or not + "access_token": resulting_access_token, + "expires_in": EXPIRES_IN, + "refresh_token": FAKE_REFRESH_TOKEN, + "type": "Bearer", + "scope": ( + "https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + " https://www.googleapis.com/auth/photoslibrary.appendonly" + " https://www.googleapis.com/auth/userinfo.profile" + ), + }, + } + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == expected_setup_calls diff --git a/tests/components/google_photos/test_init.py b/tests/components/google_photos/test_init.py new file mode 100644 index 00000000000..80b051d092d --- /dev/null +++ b/tests/components/google_photos/test_init.py @@ -0,0 +1,120 @@ +"""Tests for Google Photos.""" + +import http +import time + +from aiohttp import ClientError +from google_photos_library_api.exceptions import GooglePhotosApiError +import pytest + +from homeassistant.components.google_photos.const import OAUTH2_TOKEN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.mark.usefixtures("setup_integration") +async def test_setup( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test successful setup and unload.""" + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.fixture(name="refresh_token_status") +def mock_refresh_token_status() -> http.HTTPStatus: + """Fixture to set a token refresh status.""" + return http.HTTPStatus.OK + + +@pytest.fixture(name="refresh_token_exception") +def mock_refresh_token_exception() -> Exception | None: + """Fixture to set a token refresh status.""" + return None + + +@pytest.fixture(name="refresh_token") +def mock_refresh_token( + aioclient_mock: AiohttpClientMocker, + refresh_token_status: http.HTTPStatus, + refresh_token_exception: Exception | None, +) -> MockConfigEntry: + """Fixture to simulate a token refresh response.""" + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + exc=refresh_token_exception, + status=refresh_token_status, + json={ + "access_token": "updated-access-token", + "refresh_token": "updated-refresh-token", + "expires_at": time.time() + 3600, + "expires_in": 3600, + }, + ) + + +@pytest.mark.usefixtures("refresh_token", "setup_integration") +@pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) +async def test_expired_token_refresh_success( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test expired token is refreshed.""" + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.data["token"]["access_token"] == "updated-access-token" + assert config_entry.data["token"]["expires_in"] == 3600 + + +@pytest.mark.usefixtures("refresh_token", "setup_integration") +@pytest.mark.parametrize( + ("expires_at", "refresh_token_status", "refresh_token_exception", "expected_state"), + [ + ( + time.time() - 3600, + http.HTTPStatus.UNAUTHORIZED, + None, + ConfigEntryState.SETUP_ERROR, # Reauth + ), + ( + time.time() - 3600, + http.HTTPStatus.INTERNAL_SERVER_ERROR, + None, + ConfigEntryState.SETUP_RETRY, + ), + ( + time.time() - 3600, + None, + ClientError("Client exception raised"), + ConfigEntryState.SETUP_RETRY, + ), + ], + ids=["unauthorized", "internal_server_error", "client_error"], +) +async def test_expired_token_refresh_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + expected_state: ConfigEntryState, +) -> None: + """Test failure while refreshing token with a transient error.""" + + assert config_entry.state is expected_state + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize("api_error", [GooglePhotosApiError("some error")]) +async def test_coordinator_init_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test init failure to load albums.""" + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py new file mode 100644 index 00000000000..ce059e4fce5 --- /dev/null +++ b/tests/components/google_photos/test_media_source.py @@ -0,0 +1,191 @@ +"""Test the Google Photos media source.""" + +from unittest.mock import Mock + +from google_photos_library_api.exceptions import GooglePhotosApiError +import pytest + +from homeassistant.components.google_photos.const import DOMAIN, UPLOAD_SCOPE +from homeassistant.components.media_source import ( + URI_SCHEME, + BrowseError, + async_browse_media, + async_resolve_media, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .conftest import CONFIG_ENTRY_ID + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def setup_components(hass: HomeAssistant) -> None: + """Fixture to initialize the integration.""" + await async_setup_component(hass, "media_source", {}) + + +@pytest.mark.usefixtures("setup_integration") +async def test_no_config_entries( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test a media source with no active config entry.""" + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert browse.can_expand + assert not browse.children + + +@pytest.mark.usefixtures("setup_integration", "mock_api") +@pytest.mark.parametrize( + ("scopes"), + [ + [UPLOAD_SCOPE], + ], +) +async def test_no_read_scopes( + hass: HomeAssistant, +) -> None: + """Test a media source with only write scopes configured so no media source exists.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert not browse.children + + +@pytest.mark.usefixtures("setup_integration", "mock_api") +@pytest.mark.parametrize( + ("album_path", "expected_album_title"), + [ + (f"{CONFIG_ENTRY_ID}/a/album-media-id-1", "Album title"), + ], +) +@pytest.mark.parametrize( + ("fixture_name", "expected_results", "expected_medias"), + [ + ("list_mediaitems_empty.json", [], []), + ( + "list_mediaitems.json", + [ + (f"{CONFIG_ENTRY_ID}/p/id1", "example1.jpg"), + (f"{CONFIG_ENTRY_ID}/p/id2", "example2.mp4"), + ], + [ + ("http://img.example.com/id1=h2160", "image/jpeg"), + ("http://img.example.com/id2=dv", "video/mp4"), + ], + ), + ], +) +async def test_browse_albums( + hass: HomeAssistant, + album_path: str, + expected_album_title: str, + expected_results: list[tuple[str, str]], + expected_medias: list[tuple[str, str]], +) -> None: + """Test a media source with no eligible camera devices.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + (CONFIG_ENTRY_ID, "Account Name") + ] + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}") + assert browse.domain == DOMAIN + assert browse.identifier == CONFIG_ENTRY_ID + assert browse.title == "Account Name" + assert [(child.identifier, child.title) for child in browse.children] == [ + (f"{CONFIG_ENTRY_ID}/a/album-media-id-1", "Album title"), + ] + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{album_path}") + assert browse.domain == DOMAIN + assert browse.identifier == album_path + assert browse.title == "Account Name" + assert [ + (child.identifier, child.title) for child in browse.children + ] == expected_results + + media = [ + await async_resolve_media( + hass, f"{URI_SCHEME}{DOMAIN}/{child.identifier}", None + ) + for child in browse.children + ] + assert [ + (play_media.url, play_media.mime_type) for play_media in media + ] == expected_medias + + +@pytest.mark.usefixtures("setup_integration", "mock_api") +async def test_invalid_config_entry(hass: HomeAssistant) -> None: + """Test browsing to a config entry that does not exist.""" + with pytest.raises(BrowseError, match="Could not find config entry"): + await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/invalid-config-entry") + + +@pytest.mark.usefixtures("setup_integration", "mock_api") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +async def test_browse_invalid_path(hass: HomeAssistant) -> None: + """Test browsing to a photo is not possible.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + (CONFIG_ENTRY_ID, "Account Name") + ] + + with pytest.raises(BrowseError, match="Unsupported identifier"): + await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/p/some-photo-id" + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize( + ("identifier", "expected_error"), + [ + (CONFIG_ENTRY_ID, "not a Photo"), + ("invalid-config-entry/a/example", "not a Photo"), + ("invalid-config-entry/q/example", "Could not parse"), + ("too/many/slashes/in/path", "Invalid identifier"), + ], +) +async def test_missing_photo_id( + hass: HomeAssistant, identifier: str, expected_error: str +) -> None: + """Test parsing an invalid media identifier.""" + with pytest.raises(BrowseError, match=expected_error): + await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/{identifier}", None) + + +@pytest.mark.usefixtures("setup_integration", "mock_api") +async def test_list_media_items_failure(hass: HomeAssistant, mock_api: Mock) -> None: + """Test browsing to an album id that does not exist.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + (CONFIG_ENTRY_ID, "Account Name") + ] + + mock_api.list_media_items.side_effect = GooglePhotosApiError("some error") + + with pytest.raises(BrowseError, match="Error listing media items"): + await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" + ) diff --git a/tests/components/google_photos/test_services.py b/tests/components/google_photos/test_services.py new file mode 100644 index 00000000000..381fb1c431f --- /dev/null +++ b/tests/components/google_photos/test_services.py @@ -0,0 +1,396 @@ +"""Tests for Google Photos.""" + +from collections.abc import Generator +from dataclasses import dataclass +import re +from unittest.mock import Mock, patch + +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import ( + Album, + CreateMediaItemsResult, + MediaItem, + NewMediaItemResult, + Status, +) +import pytest + +from homeassistant.components.google_photos.const import DOMAIN, READ_SCOPE +from homeassistant.components.google_photos.services import ( + CONF_ALBUM, + CONF_CONFIG_ENTRY_ID, + UPLOAD_SERVICE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from tests.common import MockConfigEntry + +TEST_FILENAME = "doorbell_snapshot.jpg" +ALBUM_TITLE = "Album title" + + +@dataclass +class MockUploadFile: + """Dataclass used to configure the test with a fake file behavior.""" + + content: bytes = b"image bytes" + exists: bool = True + is_allowed_path: bool = True + size: int | None = None + + +@pytest.fixture(name="upload_file") +def upload_file_fixture() -> None: + """Fixture to set up test configuration with a fake file.""" + return MockUploadFile() + + +@pytest.fixture(autouse=True) +def mock_upload_file( + hass: HomeAssistant, upload_file: MockUploadFile +) -> Generator[None]: + """Fixture that mocks out the file calls using the FakeFile fixture.""" + with ( + patch( + "homeassistant.components.google_photos.services.Path.read_bytes", + return_value=upload_file.content, + ), + patch( + "homeassistant.components.google_photos.services.Path.exists", + return_value=upload_file.exists, + ), + patch.object( + hass.config, "is_allowed_path", return_value=upload_file.is_allowed_path + ), + patch("pathlib.Path.stat") as mock_stat, + ): + mock_stat.return_value = Mock() + mock_stat.return_value.st_size = ( + upload_file.size if upload_file.size else len(upload_file.content) + ) + yield + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_api: Mock, +) -> None: + """Test service call to upload content.""" + assert hass.services.has_service(DOMAIN, "upload") + + mock_api.create_media_items.return_value = CreateMediaItemsResult( + new_media_item_results=[ + NewMediaItemResult( + upload_token="some-upload-token", + status=Status(code=200), + media_item=MediaItem(id="new-media-item-id-1"), + ) + ] + ) + + response = await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + assert response == { + "media_items": [{"media_item_id": "new-media-item-id-1"}], + "album_id": "album-media-id-1", + } + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service_config_entry_not_found( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a config entry that does not exist.""" + with pytest.raises(HomeAssistantError, match="not found in registry"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: "invalid-config-entry-id", + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_config_entry_not_loaded( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a config entry that is not loaded.""" + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + with pytest.raises(HomeAssistantError, match="not found in registry"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.unique_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize("upload_file", [MockUploadFile(is_allowed_path=False)]) +async def test_path_is_not_allowed( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that is not allowed.""" + with ( + pytest.raises(HomeAssistantError, match="no access to path"), + ): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize("upload_file", [MockUploadFile(exists=False)]) +async def test_filename_does_not_exist( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that does not exist.""" + with pytest.raises(HomeAssistantError, match="does not exist"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service_upload_content_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_api: Mock, +) -> None: + """Test service call to upload content.""" + + mock_api.upload_content.side_effect = GooglePhotosApiError() + + with pytest.raises(HomeAssistantError, match="Failed to upload content"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service_fails_create( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_api: Mock, +) -> None: + """Test service call to upload content.""" + + mock_api.create_media_items.side_effect = GooglePhotosApiError() + + with pytest.raises( + HomeAssistantError, match="Google Photos API responded with error" + ): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize( + ("scopes"), + [ + [READ_SCOPE], + ], +) +async def test_upload_service_no_scope( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test service call to upload content but the config entry is read-only.""" + + with pytest.raises(HomeAssistantError, match="not granted permission"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize("upload_file", [MockUploadFile(size=26 * 1024 * 1024)]) +async def test_upload_size_limit( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that does not exist.""" + with pytest.raises( + HomeAssistantError, + match=re.escape(f"`{TEST_FILENAME}` is too large (27262976 > 20971520)"), + ): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: ALBUM_TITLE, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_to_new_album( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_api: Mock, +) -> None: + """Test service call to upload content to a new album.""" + assert hass.services.has_service(DOMAIN, "upload") + + mock_api.create_media_items.return_value = CreateMediaItemsResult( + new_media_item_results=[ + NewMediaItemResult( + upload_token="some-upload-token", + status=Status(code=200), + media_item=MediaItem(id="new-media-item-id-1"), + ) + ] + ) + mock_api.create_album.return_value = Album(id="album-media-id-2", title="New Album") + response = await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: "New Album", + }, + blocking=True, + return_response=True, + ) + + # Verify media item was created with the new album id + mock_api.create_album.assert_awaited() + assert response == { + "media_items": [{"media_item_id": "new-media-item-id-1"}], + "album_id": "album-media-id-2", + } + + # Upload an additional item to the same album and assert that no new album is created + mock_api.create_album.reset_mock() + mock_api.create_media_items.reset_mock() + mock_api.create_media_items.return_value = CreateMediaItemsResult( + new_media_item_results=[ + NewMediaItemResult( + upload_token="some-upload-token", + status=Status(code=200), + media_item=MediaItem(id="new-media-item-id-3"), + ) + ] + ) + response = await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: "New Album", + }, + blocking=True, + return_response=True, + ) + + # Verify the album created last time is used + mock_api.create_album.assert_not_awaited() + assert response == { + "media_items": [{"media_item_id": "new-media-item-id-3"}], + "album_id": "album-media-id-2", + } + + +@pytest.mark.usefixtures("setup_integration") +async def test_create_album_failed( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_api: Mock, +) -> None: + """Test service call to upload content to a new album but creating the album fails.""" + assert hass.services.has_service(DOMAIN, "upload") + + mock_api.create_album.side_effect = GooglePhotosApiError() + + with pytest.raises(HomeAssistantError, match="Failed to create album"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_ALBUM: "New Album", + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index 97e499d5d6d..5f160054da7 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -148,7 +148,7 @@ async def test_allowlist(hass: HomeAssistant, mock_client) -> None: ] for test in tests: - hass.states.async_set(test.id, "not blank") + hass.states.async_set(test.id, "on") await hass.async_block_till_done() was_called = publish_client.publish.call_count == 1 @@ -178,7 +178,7 @@ async def test_denylist(hass: HomeAssistant, mock_client) -> None: ] for test in tests: - hass.states.async_set(test.id, "not blank") + hass.states.async_set(test.id, "on") await hass.async_block_till_done() was_called = publish_client.publish.call_count == 1 diff --git a/tests/components/google_sheets/test_config_flow.py b/tests/components/google_sheets/test_config_flow.py index a504d8c4280..756ff080212 100644 --- a/tests/components/google_sheets/test_config_flow.py +++ b/tests/components/google_sheets/test_config_flow.py @@ -235,6 +235,7 @@ async def test_reauth( "homeassistant.components.google_sheets.async_setup_entry", return_value=True ) as mock_setup: result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index 7db78af6232..e519cac9bdc 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -1,10 +1,12 @@ """Test fixtures for Google Tasks.""" from collections.abc import Awaitable, Callable +import json import time from typing import Any -from unittest.mock import patch +from unittest.mock import Mock, patch +from httplib2 import Response import pytest from homeassistant.components.application_credentials import ( @@ -24,6 +26,14 @@ FAKE_ACCESS_TOKEN = "some-access-token" FAKE_REFRESH_TOKEN = "some-refresh-token" FAKE_AUTH_IMPL = "conftest-imported-cred" +TASK_LIST = { + "id": "task-list-id-1", + "title": "My tasks", +} +LIST_TASK_LIST_RESPONSE = { + "items": [TASK_LIST], +} + @pytest.fixture def platforms() -> list[Platform]: @@ -89,3 +99,31 @@ async def mock_integration_setup( return result return run + + +@pytest.fixture(name="api_responses") +def mock_api_responses() -> list[dict | list]: + """Fixture forcreate_response_object API responses to return during test.""" + return [] + + +def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: + """Create an http response.""" + return ( + Response({"Content-Type": "application/json"}), + json.dumps(api_response).encode(), + ) + + +@pytest.fixture(name="response_handler") +def mock_response_handler(api_responses: list[dict | list]) -> list: + """Create a mock http2lib response handler.""" + return [create_response_object(api_response) for api_response in api_responses] + + +@pytest.fixture +def mock_http_response(response_handler: list | Callable) -> Mock: + """Fixture to fake out http2lib responses.""" + + with patch("httplib2.Http.request", side_effect=response_handler) as mock_response: + yield mock_response diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index 76611ba4a31..f32441354fc 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -15,7 +15,7 @@ ) # --- # name: test_create_todo_list_item[due].1 - '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00", "notes": null}' + '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00+00:00", "notes": null}' # --- # name: test_create_todo_list_item[summary] tuple( @@ -137,7 +137,7 @@ ) # --- # name: test_partial_update[due_date].1 - '{"title": "Water", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00", "notes": null}' + '{"title": "Water", "status": "needsAction", "due": "2023-11-18T00:00:00+00:00", "notes": null}' # --- # name: test_partial_update[empty_description] tuple( @@ -166,6 +166,33 @@ # name: test_partial_update_status[api_responses0].1 '{"title": "Water", "status": "needsAction", "due": null, "notes": null}' # --- +# name: test_update_due_date[api_responses0-America/Regina] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-America/Regina].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- +# name: test_update_due_date[api_responses0-Asia/Tokyo] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-Asia/Tokyo].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- +# name: test_update_due_date[api_responses0-UTC] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-UTC].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- # name: test_update_todo_list_item[api_responses0] tuple( 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 1fe0e4a0c36..9ad8c887a66 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -2,8 +2,12 @@ from collections.abc import Awaitable, Callable import http +from http import HTTPStatus import time +from unittest.mock import Mock +from aiohttp import ClientError +from httplib2 import Response import pytest from homeassistant.components.google_tasks import DOMAIN @@ -11,15 +15,19 @@ from homeassistant.components.google_tasks.const import OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from .conftest import LIST_TASK_LIST_RESPONSE + from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) async def test_setup( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, + mock_http_response: Mock, ) -> None: """Test successful setup and unload.""" assert config_entry.state is ConfigEntryState.NOT_LOADED @@ -35,12 +43,14 @@ async def test_setup( @pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) +@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) async def test_expired_token_refresh_success( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], aioclient_mock: AiohttpClientMocker, config_entry: MockConfigEntry, setup_credentials: None, + mock_http_response: Mock, ) -> None: """Test expired token is refreshed.""" @@ -63,20 +73,28 @@ async def test_expired_token_refresh_success( @pytest.mark.parametrize( - ("expires_at", "status", "expected_state"), + ("expires_at", "status", "exc", "expected_state"), [ ( time.time() - 3600, http.HTTPStatus.UNAUTHORIZED, + None, ConfigEntryState.SETUP_ERROR, ), ( time.time() - 3600, http.HTTPStatus.INTERNAL_SERVER_ERROR, + None, + ConfigEntryState.SETUP_RETRY, + ), + ( + time.time() - 3600, + None, + ClientError("error"), ConfigEntryState.SETUP_RETRY, ), ], - ids=["unauthorized", "internal_server_error"], + ids=["unauthorized", "internal_server_error", "client_error"], ) async def test_expired_token_refresh_failure( hass: HomeAssistant, @@ -84,7 +102,8 @@ async def test_expired_token_refresh_failure( aioclient_mock: AiohttpClientMocker, config_entry: MockConfigEntry, setup_credentials: None, - status: http.HTTPStatus, + status: http.HTTPStatus | None, + exc: Exception | None, expected_state: ConfigEntryState, ) -> None: """Test failure while refreshing token with a transient error.""" @@ -93,8 +112,28 @@ async def test_expired_token_refresh_failure( aioclient_mock.post( OAUTH2_TOKEN, status=status, + exc=exc, ) await integration_setup() assert config_entry.state is expected_state + + +@pytest.mark.parametrize( + "response_handler", + [ + ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), + ], +) +async def test_setup_error( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + mock_http_response: Mock, + config_entry: MockConfigEntry, +) -> None: + """Test an error returned by the server when setting up the platform.""" + + assert not await integration_setup() + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index b0ee135d4a9..c713b9fd44f 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable from http import HTTPStatus import json from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import Mock from httplib2 import Response import pytest @@ -23,16 +23,11 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from .conftest import LIST_TASK_LIST_RESPONSE, create_response_object + from tests.typing import WebSocketGenerator ENTITY_ID = "todo.my_tasks" -ITEM = { - "id": "task-list-id-1", - "title": "My tasks", -} -LIST_TASK_LIST_RESPONSE = { - "items": [ITEM], -} EMPTY_RESPONSE = {} LIST_TASKS_RESPONSE = { "items": [], @@ -149,20 +144,6 @@ async def ws_get_items( return get -@pytest.fixture(name="api_responses") -def mock_api_responses() -> list[dict | list]: - """Fixture for API responses to return during test.""" - return [] - - -def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: - """Create an http response.""" - return ( - Response({"Content-Type": "application/json"}), - json.dumps(api_response).encode(), - ) - - def create_batch_response_object( content_ids: list[str], api_responses: list[dict | list | Response | None] ) -> tuple[Response, bytes]: @@ -225,20 +206,13 @@ def create_batch_response_handler( return _handler -@pytest.fixture(name="response_handler") -def mock_response_handler(api_responses: list[dict | list]) -> list: - """Create a mock http2lib response handler.""" - return [create_response_object(api_response) for api_response in api_responses] - - @pytest.fixture(autouse=True) -def mock_http_response(response_handler: list | Callable) -> Mock: - """Fixture to fake out http2lib responses.""" - - with patch("httplib2.Http.request", side_effect=response_handler) as mock_response: - yield mock_response +def setup_http_response(mock_http_response: Mock) -> None: + """Fixture to load the http response mock.""" + return +@pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) @pytest.mark.parametrize( "api_responses", [ @@ -251,7 +225,7 @@ def mock_http_response(response_handler: list | Callable) -> Mock: "title": "Task 1", "status": "needsAction", "position": "0000000000000001", - "due": "2023-11-18T00:00:00+00:00", + "due": "2023-11-18T00:00:00Z", }, { "id": "task-2", @@ -271,8 +245,10 @@ async def test_get_items( integration_setup: Callable[[], Awaitable[bool]], hass_ws_client: WebSocketGenerator, ws_get_items: Callable[[], Awaitable[dict[str, str]]], + timezone: str, ) -> None: """Test getting todo list items.""" + await hass.config.async_set_time_zone(timezone) assert await integration_setup() @@ -300,29 +276,6 @@ async def test_get_items( assert state.state == "1" -@pytest.mark.parametrize( - "response_handler", - [ - ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), - ], -) -async def test_list_items_server_error( - hass: HomeAssistant, - setup_credentials: None, - integration_setup: Callable[[], Awaitable[bool]], - hass_ws_client: WebSocketGenerator, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], -) -> None: - """Test an error returned by the server when setting up the platform.""" - - assert await integration_setup() - - await hass_ws_client(hass) - - state = hass.states.get("todo.my_tasks") - assert state is None - - @pytest.mark.parametrize( "api_responses", [ @@ -484,6 +437,39 @@ async def test_update_todo_list_item( assert call.kwargs.get("body") == snapshot +@pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) +@pytest.mark.parametrize("api_responses", [UPDATE_API_RESPONSES]) +async def test_update_due_date( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + mock_http_response: Any, + snapshot: SnapshotAssertion, + timezone: str, +) -> None: + """Test for updating the due date of a To-do item and timezone.""" + await hass.config.async_set_time_zone(timezone) + + assert await integration_setup() + + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_DUE_DATE: "2024-12-5"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, + blocking=True, + ) + assert len(mock_http_response.call_args_list) == 4 + call = mock_http_response.call_args_list[2] + assert call + assert call.args == snapshot + assert call.kwargs.get("body") == snapshot + + @pytest.mark.parametrize( "api_responses", [ diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 95313df6140..5b691da4bdc 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -14,9 +14,9 @@ import pytest from homeassistant.components import tts from homeassistant.components.google_translate.const import CONF_TLD, DOMAIN from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -103,7 +103,7 @@ async def mock_config_entry_setup(hass: HomeAssistant, config: dict[str, Any]) - "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -160,7 +160,7 @@ async def test_tts_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_de_com", + ATTR_ENTITY_ID: "tts.google_translate_de_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -216,7 +216,7 @@ async def test_service_say_german_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "de", @@ -273,7 +273,7 @@ async def test_service_say_german_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_co_uk", + ATTR_ENTITY_ID: "tts.google_translate_en_co_uk", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -329,7 +329,7 @@ async def test_service_say_en_uk_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "en-uk", @@ -386,7 +386,7 @@ async def test_service_say_en_uk_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_OPTIONS: {"tld": "co.uk"}, @@ -443,7 +443,7 @@ async def test_service_say_en_couk( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, diff --git a/tests/components/google_travel_time/test_config_flow.py b/tests/components/google_travel_time/test_config_flow.py index d16d1c1ffc9..5f9d5d4549b 100644 --- a/tests/components/google_travel_time/test_config_flow.py +++ b/tests/components/google_travel_time/test_config_flow.py @@ -198,13 +198,7 @@ async def test_malformed_api_key(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("validate_config_entry", "bypass_setup") async def test_reconfigure(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: """Test reconfigure flow.""" - reconfigure_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_config.entry_id, - }, - ) + reconfigure_result = await mock_config.start_reconfigure_flow(hass) assert reconfigure_result["type"] is FlowResultType.FORM assert reconfigure_result["step_id"] == "reconfigure" @@ -228,13 +222,7 @@ async def test_reconfigure_invalid_config_entry( hass: HomeAssistant, mock_config: MockConfigEntry ) -> None: """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -265,13 +253,7 @@ async def test_reconfigure_invalid_api_key( hass: HomeAssistant, mock_config: MockConfigEntry ) -> None: """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -301,13 +283,7 @@ async def test_reconfigure_transport_error( hass: HomeAssistant, mock_config: MockConfigEntry ) -> None: """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -337,13 +313,7 @@ async def test_reconfigure_timeout( hass: HomeAssistant, mock_config: MockConfigEntry ) -> None: """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/google_travel_time/test_sensor.py b/tests/components/google_travel_time/test_sensor.py index 5ac9ecad482..9ee6ebbbc7b 100644 --- a/tests/components/google_travel_time/test_sensor.py +++ b/tests/components/google_travel_time/test_sensor.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import MagicMock, patch +from googlemaps.exceptions import ApiError, Timeout, TransportError import pytest from homeassistant.components.google_travel_time.config_flow import default_options @@ -13,7 +14,9 @@ from homeassistant.components.google_travel_time.const import ( UNITS_IMPERIAL, UNITS_METRIC, ) +from homeassistant.components.google_travel_time.sensor import SCAN_INTERVAL from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import ( METRIC_SYSTEM, US_CUSTOMARY_SYSTEM, @@ -22,7 +25,7 @@ from homeassistant.util.unit_system import ( from .const import MOCK_CONFIG -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture(name="mock_update") @@ -240,3 +243,25 @@ async def test_sensor_unit_system( distance_matrix_mock.assert_called_once() assert distance_matrix_mock.call_args.kwargs["units"] == expected_unit_option + + +@pytest.mark.parametrize( + ("exception"), + [(ApiError), (TransportError), (Timeout)], +) +@pytest.mark.parametrize( + ("data", "options"), + [(MOCK_CONFIG, {})], +) +async def test_sensor_exception( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_update: MagicMock, + mock_config: MagicMock, + exception: Exception, +) -> None: + """Test that exception gets caught.""" + mock_update.side_effect = exception("Errormessage") + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + await hass.async_block_till_done() + assert "Error getting travel time" in caplog.text diff --git a/tests/components/gpslogger/test_init.py b/tests/components/gpslogger/test_init.py index fab6aaa4e84..aff8b20dc52 100644 --- a/tests/components/gpslogger/test_init.py +++ b/tests/components/gpslogger/test_init.py @@ -11,9 +11,9 @@ from homeassistant.components import gpslogger, zone from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.gpslogger import DOMAIN, TRACKER_UPDATE -from homeassistant.config import async_process_ha_core_config from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import DATA_DISPATCHER diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index 32372bebf37..ae2f0c74236 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,7 +5,7 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN, HVACMode +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, HVACMode from homeassistant.components.gree.const import ( COORDINATORS, DOMAIN as GREE, @@ -18,8 +18,8 @@ from .common import async_setup_gree, build_device_mock from tests.common import async_fire_time_changed -ENTITY_ID_1 = f"{DOMAIN}.fake_device_1" -ENTITY_ID_2 = f"{DOMAIN}.fake_device_2" +ENTITY_ID_1 = f"{CLIMATE_DOMAIN}.fake_device_1" +ENTITY_ID_2 = f"{CLIMATE_DOMAIN}.fake_device_2" @pytest.fixture @@ -46,7 +46,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.1" @@ -68,7 +68,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" @@ -82,7 +82,7 @@ async def test_coordinator_updates( await async_setup_gree(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 callback = device().add_handler.call_args_list[0][0][1] diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index 1bf49bbca26..0cb187f5a60 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -21,7 +21,7 @@ from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, FAN_AUTO, FAN_HIGH, FAN_LOW, @@ -71,7 +71,7 @@ from .common import async_setup_gree, build_device_mock from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.fake_device_1" +ENTITY_ID = f"{CLIMATE_DOMAIN}.fake_device_1" async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: @@ -98,7 +98,7 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: await async_setup_gree(hass) await hass.async_block_till_done() assert discovery.call_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 async def test_discovery_setup_connection_error( @@ -117,7 +117,7 @@ async def test_discovery_setup_connection_error( await async_setup_gree(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE @@ -143,7 +143,7 @@ async def test_discovery_after_setup( await async_setup_gree(hass) # Update 1 assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 # rediscover the same devices shouldn't change anything discovery.return_value.mock_devices = [MockDevice1, MockDevice2] @@ -154,7 +154,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 async def test_discovery_add_device_after_setup( @@ -180,7 +180,7 @@ async def test_discovery_add_device_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 # rediscover the same devices shouldn't change anything discovery.return_value.mock_devices = [MockDevice2] @@ -191,7 +191,7 @@ async def test_discovery_add_device_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 async def test_discovery_device_bind_after_setup( @@ -209,7 +209,7 @@ async def test_discovery_device_bind_after_setup( await async_setup_gree(hass) # Update 1 - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE @@ -328,7 +328,7 @@ async def test_send_command_device_timeout( # Send failure should not raise exceptions or change device state await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -377,7 +377,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -397,7 +397,7 @@ async def test_send_power_off_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -439,7 +439,7 @@ async def test_send_target_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -473,7 +473,7 @@ async def test_send_target_temperature_with_hvac_mode( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ID, @@ -509,7 +509,7 @@ async def test_send_target_temperature_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -543,7 +543,7 @@ async def test_update_target_temperature( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -565,7 +565,7 @@ async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset}, blocking=True, @@ -582,7 +582,7 @@ async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: "invalid"}, blocking=True, @@ -605,7 +605,7 @@ async def test_send_preset_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset}, blocking=True, @@ -653,7 +653,7 @@ async def test_send_hvac_mode( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode}, blocking=True, @@ -677,7 +677,7 @@ async def test_send_hvac_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode}, blocking=True, @@ -722,7 +722,7 @@ async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) - await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode}, blocking=True, @@ -739,7 +739,7 @@ async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "invalid"}, blocking=True, @@ -763,7 +763,7 @@ async def test_send_fan_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode}, blocking=True, @@ -801,7 +801,7 @@ async def test_send_swing_mode( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode}, blocking=True, @@ -818,7 +818,7 @@ async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) - with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: "invalid"}, blocking=True, @@ -841,7 +841,7 @@ async def test_send_swing_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode}, blocking=True, @@ -884,7 +884,7 @@ async def test_coordinator_update_handler( await async_setup_gree(hass) await hass.async_block_till_done() - entity: GreeClimateEntity = hass.data[DOMAIN].get_entity(ENTITY_ID) + entity: GreeClimateEntity = hass.data[CLIMATE_DOMAIN].get_entity(ENTITY_ID) assert entity is not None # Initial state @@ -911,7 +911,7 @@ async def test_coordinator_update_handler( assert entity.max_temp == TEMP_MAX -@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [CLIMATE_DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: @@ -922,7 +922,7 @@ async def test_registry_settings( assert entries == snapshot -@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [CLIMATE_DOMAIN]) async def test_entity_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Test for entity registry settings (unique_id).""" await async_setup_gree(hass) diff --git a/tests/components/gree/test_switch.py b/tests/components/gree/test_switch.py index c5684abbf6f..e9491796bdf 100644 --- a/tests/components/gree/test_switch.py +++ b/tests/components/gree/test_switch.py @@ -7,7 +7,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.gree.const import DOMAIN as GREE_DOMAIN -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TOGGLE, @@ -22,23 +22,23 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -ENTITY_ID_LIGHT_PANEL = f"{DOMAIN}.fake_device_1_panel_light" -ENTITY_ID_HEALTH_MODE = f"{DOMAIN}.fake_device_1_health_mode" -ENTITY_ID_QUIET = f"{DOMAIN}.fake_device_1_quiet" -ENTITY_ID_FRESH_AIR = f"{DOMAIN}.fake_device_1_fresh_air" -ENTITY_ID_XFAN = f"{DOMAIN}.fake_device_1_xfan" +ENTITY_ID_LIGHT_PANEL = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" +ENTITY_ID_HEALTH_MODE = f"{SWITCH_DOMAIN}.fake_device_1_health_mode" +ENTITY_ID_QUIET = f"{SWITCH_DOMAIN}.fake_device_1_quiet" +ENTITY_ID_FRESH_AIR = f"{SWITCH_DOMAIN}.fake_device_1_fresh_air" +ENTITY_ID_XFAN = f"{SWITCH_DOMAIN}.fake_device_1_xfan" async def async_setup_gree(hass: HomeAssistant) -> MockConfigEntry: """Set up the gree switch platform.""" entry = MockConfigEntry(domain=GREE_DOMAIN) entry.add_to_hass(hass) - await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {DOMAIN: {}}}) + await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {SWITCH_DOMAIN: {}}}) await hass.async_block_till_done() return entry -@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [SWITCH_DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -67,7 +67,7 @@ async def test_send_switch_on(hass: HomeAssistant, entity: str) -> None: await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -98,7 +98,7 @@ async def test_send_switch_on_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -125,7 +125,7 @@ async def test_send_switch_off(hass: HomeAssistant, entity: str) -> None: await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -153,7 +153,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Turn the service on first await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -165,7 +165,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Toggle it off await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -177,7 +177,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Toggle is back on await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -197,5 +197,5 @@ async def test_entity_state( """Test for entity registry settings (disabled_by, unique_id).""" await async_setup_gree(hass) - state = hass.states.async_all(DOMAIN) + state = hass.states.async_all(SWITCH_DOMAIN) assert state == snapshot diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index c687ca21e2d..b1f622569bd 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -11,7 +11,8 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.components.group.cover import DEFAULT_NAME from homeassistant.const import ( @@ -31,10 +32,6 @@ from homeassistant.const import ( SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -52,7 +49,7 @@ DEMO_COVER_TILT = "cover.living_room_window" DEMO_TILT = "cover.tilt_demo" CONFIG_ALL = { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -62,7 +59,7 @@ CONFIG_ALL = { } CONFIG_POS = { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -72,7 +69,7 @@ CONFIG_POS = { } CONFIG_TILT_ONLY = { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -82,7 +79,7 @@ CONFIG_TILT_ONLY = { } CONFIG_ATTRIBUTES = { - DOMAIN: { + COVER_DOMAIN: { "platform": "group", CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT], CONF_UNIQUE_ID: "unique_identifier", @@ -96,8 +93,8 @@ async def setup_comp( ) -> None: """Set up group cover component.""" config, count = config_count - with assert_setup_component(count, DOMAIN): - await async_setup_component(hass, DOMAIN, config) + with assert_setup_component(count, COVER_DOMAIN): + await async_setup_component(hass, COVER_DOMAIN, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() @@ -158,90 +155,105 @@ async def test_state(hass: HomeAssistant) -> None: # At least one member opening -> group opening for state_1 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_2 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_3 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_OPENING, {}) + hass.states.async_set(DEMO_TILT, CoverState.OPENING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # At least one member closing -> group closing for state_1 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_2 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_3 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_CLOSING, {}) + hass.states.async_set(DEMO_TILT, CoverState.CLOSING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # At least one member open -> group open - for state_1 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_2 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_3 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_1 in ( + CoverState.CLOSED, + CoverState.OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + for state_2 in ( + CoverState.CLOSED, + CoverState.OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + for state_3 in ( + CoverState.CLOSED, + CoverState.OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_OPEN, {}) + hass.states.async_set(DEMO_TILT, CoverState.OPEN, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # At least one member closed -> group closed - for state_1 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_2 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_3 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_1 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_2 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_3 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_CLOSED, {}) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # All group members removed from the state machine -> unavailable hass.states.async_remove(DEMO_COVER) @@ -269,11 +281,11 @@ async def test_attributes( assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Set entity as closed - hass.states.async_set(DEMO_COVER, STATE_CLOSED, {}) + hass.states.async_set(DEMO_COVER, CoverState.CLOSED, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_ENTITY_ID] == [ DEMO_COVER, DEMO_COVER_POS, @@ -282,18 +294,18 @@ async def test_attributes( ] # Set entity as opening - hass.states.async_set(DEMO_COVER, STATE_OPENING, {}) + hass.states.async_set(DEMO_COVER, CoverState.OPENING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Set entity as closing - hass.states.async_set(DEMO_COVER, STATE_CLOSING, {}) + hass.states.async_set(DEMO_COVER, CoverState.CLOSING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Set entity as unknown again hass.states.async_set(DEMO_COVER, STATE_UNKNOWN, {}) @@ -303,11 +315,11 @@ async def test_attributes( assert state.state == STATE_UNKNOWN # Add Entity that supports open / close / stop - hass.states.async_set(DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set(DEMO_COVER, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11 assert ATTR_CURRENT_POSITION not in state.attributes @@ -316,24 +328,24 @@ async def test_attributes( # Add Entity that supports set_cover_position hass.states.async_set( DEMO_COVER_POS, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 70}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15 assert state.attributes[ATTR_CURRENT_POSITION] == 70 assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Add Entity that supports open tilt / close tilt / stop tilt - hass.states.async_set(DEMO_TILT, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 112}) + hass.states.async_set(DEMO_TILT, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 112}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 127 assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -342,13 +354,13 @@ async def test_attributes( # Add Entity that supports set_tilt_position hass.states.async_set( DEMO_COVER_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255 assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -359,12 +371,14 @@ async def test_attributes( # Covers hass.states.async_set( - DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100} + DEMO_COVER, + CoverState.OPEN, + {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 244 assert state.attributes[ATTR_CURRENT_POSITION] == 85 # (70 + 100) / 2 @@ -375,7 +389,7 @@ async def test_attributes( await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 240 assert ATTR_CURRENT_POSITION not in state.attributes @@ -384,31 +398,31 @@ async def test_attributes( # Tilts hass.states.async_set( DEMO_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 100}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 128 assert ATTR_CURRENT_POSITION not in state.attributes assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80 # (60 + 100) / 2 hass.states.async_remove(DEMO_COVER_TILT) - hass.states.async_set(DEMO_TILT, STATE_CLOSED) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 assert ATTR_CURRENT_POSITION not in state.attributes assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Group member has set assumed_state - hass.states.async_set(DEMO_TILT, STATE_CLOSED, {ATTR_ASSUMED_STATE: True}) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED, {ATTR_ASSUMED_STATE: True}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) @@ -426,16 +440,16 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non """Test removing a cover that support tilt.""" hass.states.async_set( DEMO_COVER_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) hass.states.async_set( DEMO_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME assert state.attributes[ATTR_ENTITY_ID] == [ DEMO_COVER_TILT, @@ -445,7 +459,7 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non assert ATTR_CURRENT_TILT_POSITION in state.attributes hass.states.async_remove(DEMO_COVER_TILT) - hass.states.async_set(DEMO_TILT, STATE_CLOSED) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED) await hass.async_block_till_done() @@ -454,7 +468,7 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non async def test_open_covers(hass: HomeAssistant) -> None: """Test open cover function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): @@ -463,10 +477,10 @@ async def test_open_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert hass.states.get(DEMO_COVER).state == STATE_OPEN + assert hass.states.get(DEMO_COVER).state == CoverState.OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100 @@ -476,7 +490,7 @@ async def test_open_covers(hass: HomeAssistant) -> None: async def test_close_covers(hass: HomeAssistant) -> None: """Test close cover function.""" await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): @@ -485,10 +499,10 @@ async def test_close_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 - assert hass.states.get(DEMO_COVER).state == STATE_CLOSED + assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0 @@ -499,7 +513,7 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: """Test toggle cover function.""" # Start covers in open state await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -507,11 +521,11 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Toggle will close covers await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -519,16 +533,16 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 - assert hass.states.get(DEMO_COVER).state == STATE_CLOSED + assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0 # Toggle again will open covers await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -536,10 +550,10 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert hass.states.get(DEMO_COVER).state == STATE_OPEN + assert hass.states.get(DEMO_COVER).state == CoverState.OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100 @@ -549,24 +563,24 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: async def test_stop_covers(hass: HomeAssistant) -> None: """Test stop cover function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 # (20 + 80) / 2 - assert hass.states.get(DEMO_COVER).state == STATE_OPEN + assert hass.states.get(DEMO_COVER).state == CoverState.OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 20 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 80 @@ -576,7 +590,7 @@ async def test_stop_covers(hass: HomeAssistant) -> None: async def test_set_cover_position(hass: HomeAssistant) -> None: """Test set cover position function.""" await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: COVER_GROUP, ATTR_POSITION: 50}, blocking=True, @@ -587,10 +601,10 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 50 - assert hass.states.get(DEMO_COVER).state == STATE_CLOSED + assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 50 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 50 @@ -600,7 +614,10 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: async def test_open_tilts(hass: HomeAssistant) -> None: """Test open tilt function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(5): future = dt_util.utcnow() + timedelta(seconds=1) @@ -608,7 +625,7 @@ async def test_open_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -621,7 +638,10 @@ async def test_open_tilts(hass: HomeAssistant) -> None: async def test_close_tilts(hass: HomeAssistant) -> None: """Test close tilt function.""" await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(5): future = dt_util.utcnow() + timedelta(seconds=1) @@ -629,7 +649,7 @@ async def test_close_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -641,7 +661,10 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: """Test toggle tilt function.""" # Start tilted open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -649,7 +672,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -658,7 +681,10 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: # Toggle will tilt closed await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -666,14 +692,17 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Toggle again will tilt open await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -681,7 +710,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -694,21 +723,27 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: async def test_stop_tilts(hass: HomeAssistant) -> None: """Test stop tilts function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 60 @@ -719,7 +754,7 @@ async def test_stop_tilts(hass: HomeAssistant) -> None: async def test_set_tilt_positions(hass: HomeAssistant) -> None: """Test set tilt position function.""" await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: COVER_GROUP, ATTR_TILT_POSITION: 80}, blocking=True, @@ -730,7 +765,7 @@ async def test_set_tilt_positions(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 80 @@ -741,14 +776,14 @@ async def test_set_tilt_positions(hass: HomeAssistant) -> None: async def test_is_opening_closing(hass: HomeAssistant) -> None: """Test is_opening property.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) await hass.async_block_till_done() # Both covers opening -> opening - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING - assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -756,67 +791,81 @@ async def test_is_opening_closing(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) # Both covers closing -> closing - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING - hass.states.async_set(DEMO_COVER_POS, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.OPENING, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() # Closing + Opening -> Opening - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPENING + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING - hass.states.async_set(DEMO_COVER_POS, STATE_CLOSING, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.CLOSING, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() # Both covers closing -> closing - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING # Closed + Closing -> Closing - hass.states.async_set(DEMO_COVER_POS, STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSED - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSED + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING # Open + Closing -> Closing - hass.states.async_set(DEMO_COVER_POS, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPEN - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPEN + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING # Closed + Opening -> Closing - hass.states.async_set(DEMO_COVER_TILT, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11}) - hass.states.async_set(DEMO_COVER_POS, STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_TILT, CoverState.OPENING, {ATTR_SUPPORTED_FEATURES: 11} + ) + hass.states.async_set( + DEMO_COVER_POS, CoverState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSED - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSED + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING # Open + Opening -> Closing - hass.states.async_set(DEMO_COVER_POS, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPEN - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPEN + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING async def test_nested_group(hass: HomeAssistant) -> None: """Test nested cover group.""" await async_setup_component( hass, - DOMAIN, + COVER_DOMAIN, { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -837,23 +886,23 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("cover.bedroom_group") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_ENTITY_ID) == [DEMO_COVER_POS, DEMO_COVER_TILT] state = hass.states.get("cover.nested_group") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_ENTITY_ID) == ["cover.bedroom_group"] # Test controlling the nested group async with asyncio.timeout(0.5): await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.nested_group"}, blocking=True, ) - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get("cover.bedroom_group").state == STATE_CLOSING - assert hass.states.get("cover.nested_group").state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get("cover.bedroom_group").state == CoverState.CLOSING + assert hass.states.get("cover.nested_group").state == CoverState.CLOSING diff --git a/tests/components/group/test_fan.py b/tests/components/group/test_fan.py index 184693f7618..93509b5a651 100644 --- a/tests/components/group/test_fan.py +++ b/tests/components/group/test_fan.py @@ -14,7 +14,7 @@ from homeassistant.components.fan import ( ATTR_PERCENTAGE_STEP, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, SERVICE_SET_PERCENTAGE, @@ -60,7 +60,7 @@ FULL_SUPPORT_FEATURES = ( CONFIG_MISSING_FAN = { - DOMAIN: [ + FAN_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -74,7 +74,7 @@ CONFIG_MISSING_FAN = { } CONFIG_FULL_SUPPORT = { - DOMAIN: [ + FAN_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -84,7 +84,7 @@ CONFIG_FULL_SUPPORT = { } CONFIG_LIMITED_SUPPORT = { - DOMAIN: [ + FAN_DOMAIN: [ { "platform": "group", CONF_ENTITIES: [*LIMITED_FAN_ENTITY_IDS], @@ -94,7 +94,7 @@ CONFIG_LIMITED_SUPPORT = { CONFIG_ATTRIBUTES = { - DOMAIN: { + FAN_DOMAIN: { "platform": "group", CONF_ENTITIES: [*FULL_FAN_ENTITY_IDS, *LIMITED_FAN_ENTITY_IDS], CONF_UNIQUE_ID: "unique_identifier", @@ -108,8 +108,8 @@ async def setup_comp( ) -> None: """Set up group fan component.""" config, count = config_count - with assert_setup_component(count, DOMAIN): - await async_setup_component(hass, DOMAIN, config) + with assert_setup_component(count, FAN_DOMAIN): + await async_setup_component(hass, FAN_DOMAIN, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() @@ -393,7 +393,7 @@ async def test_state_missing_entity_id(hass: HomeAssistant) -> None: async def test_setup_before_started(hass: HomeAssistant) -> None: """Test we can setup before starting.""" hass.set_state(CoreState.stopped) - assert await async_setup_component(hass, DOMAIN, CONFIG_MISSING_FAN) + assert await async_setup_component(hass, FAN_DOMAIN, CONFIG_MISSING_FAN) await hass.async_block_till_done() await hass.async_start() @@ -431,14 +431,14 @@ async def test_reload(hass: HomeAssistant) -> None: async def test_service_calls(hass: HomeAssistant) -> None: """Test calling services.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True ) assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_ON assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_ON assert hass.states.get(FAN_GROUP).state == STATE_ON await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 66}, blocking=True, @@ -452,14 +452,14 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True + FAN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True ) assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_OFF assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_OFF assert hass.states.get(FAN_GROUP).state == STATE_OFF await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_SET_PERCENTAGE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 100}, blocking=True, @@ -472,7 +472,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_PERCENTAGE] == 100 await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 0}, blocking=True, @@ -482,7 +482,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert hass.states.get(FAN_GROUP).state == STATE_OFF await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_OSCILLATE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: True}, blocking=True, @@ -495,7 +495,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_OSCILLATING] is True await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_OSCILLATE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: False}, blocking=True, @@ -508,7 +508,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_OSCILLATING] is False await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_SET_DIRECTION, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_FORWARD}, blocking=True, @@ -521,7 +521,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_SET_DIRECTION, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_REVERSE}, blocking=True, @@ -538,9 +538,9 @@ async def test_nested_group(hass: HomeAssistant) -> None: """Test nested fan group.""" await async_setup_component( hass, - DOMAIN, + FAN_DOMAIN, { - DOMAIN: [ + FAN_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -578,7 +578,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: # Test controlling the nested group async with asyncio.timeout(0.5): await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "fan.nested_group"}, blocking=True, diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index bbbe22cba83..9e6e352e46c 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -11,6 +11,7 @@ import pytest from homeassistant.components import group from homeassistant.components.group.registry import GroupIntegrationRegistry +from homeassistant.components.lock import LockState from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_FRIENDLY_NAME, @@ -19,17 +20,10 @@ from homeassistant.const import ( SERVICE_RELOAD, STATE_CLOSED, STATE_HOME, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, STATE_NOT_HOME, STATE_OFF, STATE_ON, - STATE_OPEN, - STATE_OPENING, STATE_UNKNOWN, - STATE_UNLOCKED, - STATE_UNLOCKING, ) from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er @@ -740,78 +734,78 @@ async def test_is_on(hass: HomeAssistant) -> None: ), ( ("cover", "cover"), - (STATE_OPEN, STATE_CLOSED), + (LockState.OPEN, STATE_CLOSED), (STATE_CLOSED, STATE_CLOSED), - (STATE_OPEN, True), + (LockState.OPEN, True), (STATE_CLOSED, False), ), ( ("lock", "lock"), - (STATE_UNLOCKED, STATE_LOCKED), - (STATE_LOCKED, STATE_LOCKED), - (STATE_UNLOCKED, True), - (STATE_LOCKED, False), + (LockState.UNLOCKED, LockState.LOCKED), + (LockState.LOCKED, LockState.LOCKED), + (LockState.UNLOCKED, True), + (LockState.LOCKED, False), ), ( ("cover", "lock"), - (STATE_OPEN, STATE_LOCKED), - (STATE_CLOSED, STATE_LOCKED), + (LockState.OPEN, LockState.LOCKED), + (STATE_CLOSED, LockState.LOCKED), (STATE_ON, True), (STATE_OFF, False), ), ( ("cover", "lock"), - (STATE_OPEN, STATE_UNLOCKED), - (STATE_CLOSED, STATE_LOCKED), + (LockState.OPEN, LockState.UNLOCKED), + (STATE_CLOSED, LockState.LOCKED), (STATE_ON, True), (STATE_OFF, False), ), ( ("cover", "lock", "light"), - (STATE_OPEN, STATE_LOCKED, STATE_ON), - (STATE_CLOSED, STATE_LOCKED, STATE_OFF), + (LockState.OPEN, LockState.LOCKED, STATE_ON), + (STATE_CLOSED, LockState.LOCKED, STATE_OFF), (STATE_ON, True), (STATE_OFF, False), ), ( ("lock", "lock"), - (STATE_OPEN, STATE_LOCKED), - (STATE_LOCKED, STATE_LOCKED), - (STATE_UNLOCKED, True), - (STATE_LOCKED, False), + (LockState.OPEN, LockState.LOCKED), + (LockState.LOCKED, LockState.LOCKED), + (LockState.UNLOCKED, True), + (LockState.LOCKED, False), ), ( ("lock", "lock"), - (STATE_OPENING, STATE_LOCKED), - (STATE_LOCKED, STATE_LOCKED), - (STATE_UNLOCKED, True), - (STATE_LOCKED, False), + (LockState.OPENING, LockState.LOCKED), + (LockState.LOCKED, LockState.LOCKED), + (LockState.UNLOCKED, True), + (LockState.LOCKED, False), ), ( ("lock", "lock"), - (STATE_UNLOCKING, STATE_LOCKED), - (STATE_LOCKED, STATE_LOCKED), - (STATE_UNLOCKED, True), - (STATE_LOCKED, False), + (LockState.UNLOCKING, LockState.LOCKED), + (LockState.LOCKED, LockState.LOCKED), + (LockState.UNLOCKED, True), + (LockState.LOCKED, False), ), ( ("lock", "lock"), - (STATE_LOCKING, STATE_LOCKED), - (STATE_LOCKED, STATE_LOCKED), - (STATE_UNLOCKED, True), - (STATE_LOCKED, False), + (LockState.LOCKING, LockState.LOCKED), + (LockState.LOCKED, LockState.LOCKED), + (LockState.UNLOCKED, True), + (LockState.LOCKED, False), ), ( ("lock", "lock"), - (STATE_JAMMED, STATE_LOCKED), - (STATE_LOCKED, STATE_LOCKED), - (STATE_LOCKED, False), - (STATE_LOCKED, False), + (LockState.JAMMED, LockState.LOCKED), + (LockState.LOCKED, LockState.LOCKED), + (LockState.LOCKED, False), + (LockState.LOCKED, False), ), ( ("cover", "lock"), - (STATE_OPEN, STATE_OPEN), - (STATE_CLOSED, STATE_LOCKED), + (LockState.OPEN, LockState.OPEN), + (STATE_CLOSED, LockState.LOCKED), (STATE_ON, True), (STATE_OFF, False), ), diff --git a/tests/components/group/test_light.py b/tests/components/group/test_light.py index af8556b5450..91604d663b3 100644 --- a/tests/components/group/test_light.py +++ b/tests/components/group/test_light.py @@ -12,7 +12,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, @@ -792,19 +791,19 @@ async def test_emulated_color_temp_group(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.light_group", ATTR_COLOR_TEMP: 200}, + {ATTR_ENTITY_ID: "light.light_group", ATTR_COLOR_TEMP_KELVIN: 5000}, blocking=True, ) await hass.async_block_till_done() state = hass.states.get("light.test1") assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 200 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 assert ATTR_HS_COLOR in state.attributes state = hass.states.get("light.test2") assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 200 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 assert ATTR_HS_COLOR in state.attributes state = hass.states.get("light.test3") diff --git a/tests/components/group/test_lock.py b/tests/components/group/test_lock.py index 0c62913ae3e..cc255264183 100644 --- a/tests/components/group/test_lock.py +++ b/tests/components/group/test_lock.py @@ -12,18 +12,9 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, + LockState, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - STATE_UNLOCKED, - STATE_UNLOCKING, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -55,7 +46,7 @@ async def test_default_state( state = hass.states.get("lock.door_group") assert state is not None - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_ENTITY_ID) == ["lock.front", "lock.back"] entry = entity_registry.async_get("lock.door_group") @@ -109,63 +100,63 @@ async def test_state_reporting(hass: HomeAssistant) -> None: # At least one member jammed -> group jammed for state_1 in ( - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, + LockState.JAMMED, + LockState.LOCKED, + LockState.LOCKING, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, - STATE_UNLOCKING, + LockState.UNLOCKED, + LockState.UNLOCKING, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", STATE_JAMMED) + hass.states.async_set("lock.test2", LockState.JAMMED) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == STATE_JAMMED + assert hass.states.get("lock.lock_group").state == LockState.JAMMED # At least one member locking -> group unlocking for state_1 in ( - STATE_LOCKED, - STATE_LOCKING, + LockState.LOCKED, + LockState.LOCKING, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, - STATE_UNLOCKING, + LockState.UNLOCKED, + LockState.UNLOCKING, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", STATE_LOCKING) + hass.states.async_set("lock.test2", LockState.LOCKING) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == STATE_LOCKING + assert hass.states.get("lock.lock_group").state == LockState.LOCKING # At least one member unlocking -> group unlocking for state_1 in ( - STATE_LOCKED, + LockState.LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, - STATE_UNLOCKING, + LockState.UNLOCKED, + LockState.UNLOCKING, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", STATE_UNLOCKING) + hass.states.async_set("lock.test2", LockState.UNLOCKING) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == STATE_UNLOCKING + assert hass.states.get("lock.lock_group").state == LockState.UNLOCKING # At least one member unlocked -> group unlocked for state_1 in ( - STATE_LOCKED, + LockState.LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, + LockState.UNLOCKED, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", STATE_UNLOCKED) + hass.states.async_set("lock.test2", LockState.UNLOCKED) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == STATE_UNLOCKED + assert hass.states.get("lock.lock_group").state == LockState.UNLOCKED # Otherwise -> locked - hass.states.async_set("lock.test1", STATE_LOCKED) - hass.states.async_set("lock.test2", STATE_LOCKED) + hass.states.async_set("lock.test1", LockState.LOCKED) + hass.states.async_set("lock.test2", LockState.LOCKED) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == STATE_LOCKED + assert hass.states.get("lock.lock_group").state == LockState.LOCKED # All group members removed from the state machine -> unavailable hass.states.async_remove("lock.test1") @@ -195,9 +186,9 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: await hass.async_block_till_done() group_state = hass.states.get("lock.lock_group") - assert group_state.state == STATE_UNLOCKED - assert hass.states.get("lock.openable_lock").state == STATE_LOCKED - assert hass.states.get("lock.another_openable_lock").state == STATE_UNLOCKED + assert group_state.state == LockState.UNLOCKED + assert hass.states.get("lock.openable_lock").state == LockState.LOCKED + assert hass.states.get("lock.another_openable_lock").state == LockState.UNLOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -205,8 +196,8 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.openable_lock").state == STATE_OPEN - assert hass.states.get("lock.another_openable_lock").state == STATE_OPEN + assert hass.states.get("lock.openable_lock").state == LockState.OPEN + assert hass.states.get("lock.another_openable_lock").state == LockState.OPEN await hass.services.async_call( LOCK_DOMAIN, @@ -214,8 +205,8 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.openable_lock").state == STATE_LOCKED - assert hass.states.get("lock.another_openable_lock").state == STATE_LOCKED + assert hass.states.get("lock.openable_lock").state == LockState.LOCKED + assert hass.states.get("lock.another_openable_lock").state == LockState.LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -223,8 +214,8 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.openable_lock").state == STATE_UNLOCKED - assert hass.states.get("lock.another_openable_lock").state == STATE_UNLOCKED + assert hass.states.get("lock.openable_lock").state == LockState.UNLOCKED + assert hass.states.get("lock.another_openable_lock").state == LockState.UNLOCKED async def test_service_calls_basic(hass: HomeAssistant) -> None: @@ -248,9 +239,9 @@ async def test_service_calls_basic(hass: HomeAssistant) -> None: await hass.async_block_till_done() group_state = hass.states.get("lock.lock_group") - assert group_state.state == STATE_UNLOCKED - assert hass.states.get("lock.basic_lock").state == STATE_LOCKED - assert hass.states.get("lock.another_basic_lock").state == STATE_UNLOCKED + assert group_state.state == LockState.UNLOCKED + assert hass.states.get("lock.basic_lock").state == LockState.LOCKED + assert hass.states.get("lock.another_basic_lock").state == LockState.UNLOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -258,8 +249,8 @@ async def test_service_calls_basic(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.basic_lock").state == STATE_LOCKED - assert hass.states.get("lock.another_basic_lock").state == STATE_LOCKED + assert hass.states.get("lock.basic_lock").state == LockState.LOCKED + assert hass.states.get("lock.another_basic_lock").state == LockState.LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -267,8 +258,8 @@ async def test_service_calls_basic(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.basic_lock").state == STATE_UNLOCKED - assert hass.states.get("lock.another_basic_lock").state == STATE_UNLOCKED + assert hass.states.get("lock.basic_lock").state == LockState.UNLOCKED + assert hass.states.get("lock.another_basic_lock").state == LockState.UNLOCKED with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -303,7 +294,7 @@ async def test_reload(hass: HomeAssistant) -> None: await hass.async_start() await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == STATE_UNLOCKED + assert hass.states.get("lock.lock_group").state == LockState.UNLOCKED yaml_path = get_fixture_path("configuration.yaml", "group") with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): @@ -322,7 +313,7 @@ async def test_reload(hass: HomeAssistant) -> None: async def test_reload_with_platform_not_setup(hass: HomeAssistant) -> None: """Test the ability to reload locks.""" - hass.states.async_set("lock.something", STATE_UNLOCKED) + hass.states.async_set("lock.something", LockState.UNLOCKED) await async_setup_component( hass, LOCK_DOMAIN, @@ -372,11 +363,11 @@ async def test_reload_with_base_integration_platform_not_setup( }, ) await hass.async_block_till_done() - hass.states.async_set("lock.front_lock", STATE_LOCKED) - hass.states.async_set("lock.back_lock", STATE_UNLOCKED) + hass.states.async_set("lock.front_lock", LockState.LOCKED) + hass.states.async_set("lock.back_lock", LockState.UNLOCKED) - hass.states.async_set("lock.outside_lock", STATE_LOCKED) - hass.states.async_set("lock.outside_lock_2", STATE_LOCKED) + hass.states.async_set("lock.outside_lock", LockState.LOCKED) + hass.states.async_set("lock.outside_lock_2", LockState.LOCKED) yaml_path = get_fixture_path("configuration.yaml", "group") with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): @@ -391,8 +382,8 @@ async def test_reload_with_base_integration_platform_not_setup( assert hass.states.get("lock.lock_group") is None assert hass.states.get("lock.inside_locks_g") is not None assert hass.states.get("lock.outside_locks_g") is not None - assert hass.states.get("lock.inside_locks_g").state == STATE_UNLOCKED - assert hass.states.get("lock.outside_locks_g").state == STATE_LOCKED + assert hass.states.get("lock.inside_locks_g").state == LockState.UNLOCKED + assert hass.states.get("lock.outside_locks_g").state == LockState.LOCKED @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) @@ -426,7 +417,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("lock.some_group") assert state is not None - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ENTITY_ID) == [ "lock.front_door", "lock.kitchen_door", @@ -434,7 +425,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("lock.nested_group") assert state is not None - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ENTITY_ID) == ["lock.some_group"] # Test controlling the nested group @@ -444,7 +435,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.nested_group"}, blocking=True, ) - assert hass.states.get("lock.front_door").state == STATE_LOCKED - assert hass.states.get("lock.kitchen_door").state == STATE_LOCKED - assert hass.states.get("lock.some_group").state == STATE_LOCKED - assert hass.states.get("lock.nested_group").state == STATE_LOCKED + assert hass.states.get("lock.front_door").state == LockState.LOCKED + assert hass.states.get("lock.kitchen_door").state == LockState.LOCKED + assert hass.states.get("lock.some_group").state == LockState.LOCKED + assert hass.states.get("lock.nested_group").state == LockState.LOCKED diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index bbf2d98b492..e3a01c05eca 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -161,7 +161,8 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No "data": {"hello": "world", "test": "message", "default": "default"}, }, ), - ] + ], + any_order=True, ) send_message_mock.reset_mock() diff --git a/tests/components/group/test_sensor.py b/tests/components/group/test_sensor.py index db642506361..de406cb251c 100644 --- a/tests/components/group/test_sensor.py +++ b/tests/components/group/test_sensor.py @@ -32,6 +32,7 @@ from homeassistant.const import ( SERVICE_RELOAD, STATE_UNAVAILABLE, STATE_UNKNOWN, + UnitOfTemperature, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -496,7 +497,7 @@ async def test_sensor_with_uoms_but_no_device_class( state = hass.states.get("sensor.test_sum") assert state.attributes.get("device_class") is None assert state.attributes.get("state_class") is None - assert state.attributes.get("unit_of_measurement") == "W" + assert state.attributes.get("unit_of_measurement") is None assert state.state == STATE_UNKNOWN assert ( @@ -650,10 +651,10 @@ async def test_sensor_calculated_result_fails_on_uom(hass: HomeAssistant) -> Non await hass.async_block_till_done() state = hass.states.get("sensor.test_sum") - assert state.state == STATE_UNKNOWN + assert state.state == STATE_UNAVAILABLE assert state.attributes.get("device_class") == "energy" assert state.attributes.get("state_class") == "total" - assert state.attributes.get("unit_of_measurement") == "kWh" + assert state.attributes.get("unit_of_measurement") is None async def test_sensor_calculated_properties_not_convertible_device_class( @@ -730,7 +731,7 @@ async def test_sensor_calculated_properties_not_convertible_device_class( assert state.state == STATE_UNKNOWN assert state.attributes.get("device_class") == "humidity" assert state.attributes.get("state_class") == "measurement" - assert state.attributes.get("unit_of_measurement") == "%" + assert state.attributes.get("unit_of_measurement") is None assert ( "Unable to use state. Only entities with correct unit of measurement is" @@ -812,3 +813,197 @@ async def test_sensors_attributes_added_when_entity_info_available( assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "L" + + +async def test_sensor_state_class_no_uom_not_available( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test when input sensors drops unit of measurement.""" + + # If we have a valid unit of measurement from all input sensors + # the group sensor will go unknown in the case any input sensor + # drops the unit of measurement and log a warning. + + config = { + SENSOR_DOMAIN: { + "platform": GROUP_DOMAIN, + "name": "test_sum", + "type": "sum", + "entities": ["sensor.test_1", "sensor.test_2", "sensor.test_3"], + "unique_id": "very_unique_id_sum_sensor", + } + } + + entity_ids = config["sensor"]["entities"] + + input_attributes = { + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": PERCENTAGE, + } + + hass.states.async_set(entity_ids[0], VALUES[0], input_attributes) + hass.states.async_set(entity_ids[1], VALUES[1], input_attributes) + hass.states.async_set(entity_ids[2], VALUES[2], input_attributes) + await hass.async_block_till_done() + + assert await async_setup_component(hass, "sensor", config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sum") + assert state.state == str(sum(VALUES)) + assert state.attributes.get("state_class") == "measurement" + assert state.attributes.get("unit_of_measurement") == "%" + + assert ( + "Unable to use state. Only entities with correct unit of measurement is" + " supported" + ) not in caplog.text + + # sensor.test_3 drops the unit of measurement + hass.states.async_set( + entity_ids[2], + VALUES[2], + { + "state_class": SensorStateClass.MEASUREMENT, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sum") + assert state.state == STATE_UNKNOWN + assert state.attributes.get("state_class") == "measurement" + assert state.attributes.get("unit_of_measurement") is None + + assert ( + "Unable to use state. Only entities with correct unit of measurement is" + " supported, entity sensor.test_3, value 15.3 with" + " device class None and unit of measurement None excluded from calculation" + " in sensor.test_sum" + ) in caplog.text + + +async def test_sensor_different_attributes_ignore_non_numeric( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the sensor handles calculating attributes when using ignore_non_numeric.""" + config = { + SENSOR_DOMAIN: { + "platform": GROUP_DOMAIN, + "name": "test_sum", + "type": "sum", + "ignore_non_numeric": True, + "entities": ["sensor.test_1", "sensor.test_2", "sensor.test_3"], + "unique_id": "very_unique_id_sum_sensor", + } + } + + entity_ids = config["sensor"]["entities"] + + assert await async_setup_component(hass, "sensor", config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sum") + assert state.state == STATE_UNAVAILABLE + assert state.attributes.get("state_class") is None + assert state.attributes.get("device_class") is None + assert state.attributes.get("unit_of_measurement") is None + + test_cases = [ + { + "entity": entity_ids[0], + "value": VALUES[0], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(VALUES[0])), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[1], + "value": VALUES[1], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.HUMIDITY, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(sum([VALUES[0], VALUES[1]]))), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[2], + "value": VALUES[2], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.TEMPERATURE, + "unit_of_measurement": UnitOfTemperature.CELSIUS, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": None, + }, + { + "entity": entity_ids[2], + "value": VALUES[2], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.HUMIDITY, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + # One sensor does not have a device class + "expected_device_class": None, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[0], + "value": VALUES[0], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.HUMIDITY, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + # First sensor now has a device class + "expected_device_class": SensorDeviceClass.HUMIDITY, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[0], + "value": VALUES[0], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": None, + }, + ] + + for test_case in test_cases: + hass.states.async_set( + test_case["entity"], + test_case["value"], + test_case["attributes"], + ) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_sum") + assert state.state == test_case["expected_state"] + assert state.attributes.get("state_class") == test_case["expected_state_class"] + assert ( + state.attributes.get("device_class") == test_case["expected_device_class"] + ) + assert ( + state.attributes.get("unit_of_measurement") + == test_case["expected_unit_of_measurement"] + ) diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index 3b3ed21bc65..4487d0b6ac6 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -41,6 +41,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "data": { "valve_controller": { diff --git a/tests/components/habitica/conftest.py b/tests/components/habitica/conftest.py index 2401397be26..f76987c5ce6 100644 --- a/tests/components/habitica/conftest.py +++ b/tests/components/habitica/conftest.py @@ -3,6 +3,14 @@ from unittest.mock import patch import pytest +from yarl import URL + +from homeassistant.components.habitica.const import CONF_API_USER, DEFAULT_URL, DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_json_object_fixture +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) @@ -13,3 +21,75 @@ def disable_plumbum(): """ with patch("plumbum.local"), patch("plumbum.colors"): yield + + +def mock_called_with( + mock_client: AiohttpClientMocker, + method: str, + url: str, +) -> tuple | None: + """Assert request mock was called with json data.""" + + return next( + ( + call + for call in mock_client.mock_calls + if call[0].upper() == method.upper() and call[1] == URL(url) + ), + None, + ) + + +@pytest.fixture +def mock_habitica(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: + """Mock aiohttp requests.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", json=load_json_object_fixture("user.json", DOMAIN) + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user/anonymized", + json={ + "data": { + "user": load_json_object_fixture("user.json", DOMAIN)["data"], + "tasks": load_json_object_fixture("tasks.json", DOMAIN)["data"], + } + }, + ) + + return aioclient_mock + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Mock Habitica configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="test-user", + data={ + CONF_URL: DEFAULT_URL, + CONF_API_USER: "test-api-user", + CONF_API_KEY: "test-api-key", + }, + unique_id="00000000-0000-0000-0000-000000000000", + ) + + +@pytest.fixture +async def set_tz(hass: HomeAssistant) -> None: + """Fixture to set timezone.""" + await hass.config.async_set_time_zone("Europe/Berlin") diff --git a/tests/components/habitica/fixtures/common_buttons_unavailable.json b/tests/components/habitica/fixtures/common_buttons_unavailable.json new file mode 100644 index 00000000000..bcc65ee3f91 --- /dev/null +++ b/tests/components/habitica/fixtures/common_buttons_unavailable.json @@ -0,0 +1,55 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": true, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50, + "exp": 737, + "gp": 0, + "lvl": 5, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/completed_todos.json b/tests/components/habitica/fixtures/completed_todos.json new file mode 100644 index 00000000000..8185a0a4ff7 --- /dev/null +++ b/tests/components/habitica/fixtures/completed_todos.json @@ -0,0 +1,78 @@ +{ + "success": true, + "data": [ + { + "_id": "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba", + "completed": true, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Wocheneinkauf erledigen", + "notes": "Lebensmittel und Haushaltsbedarf für die Woche einkaufen.", + "tags": ["64235347-55d0-4ba1-a86a-3428dcfdf319"], + "value": 1, + "priority": 1.5, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:19:10.919Z", + "updatedAt": "2024-09-21T22:19:15.484Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "dateCompleted": "2024-09-21T22:19:15.478Z", + "id": "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba" + }, + { + "_id": "3fa06743-aa0f-472b-af1a-f27c755e329c", + "completed": true, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Wohnung aufräumen", + "notes": "Wohnzimmer und Küche gründlich aufräumen.", + "tags": ["64235347-55d0-4ba1-a86a-3428dcfdf319"], + "value": 1, + "priority": 2, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:18:30.646Z", + "updatedAt": "2024-09-21T22:18:34.663Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "dateCompleted": "2024-09-21T22:18:34.660Z", + "id": "3fa06743-aa0f-472b-af1a-f27c755e329c" + } + ], + "notifications": [ + { + "type": "ITEM_RECEIVED", + "data": { + "icon": "notif_orca_mount", + "title": "Orcas for Summer Splash!", + "text": "To celebrate Summer Splash, we've given you an Orca Mount!", + "destination": "stable" + }, + "seen": true, + "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" + }, + { + "type": "UNALLOCATED_STATS_POINTS", + "data": { + "points": 2 + }, + "seen": true, + "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" + } + ], + "userV": 584, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/content.json b/tests/components/habitica/fixtures/content.json new file mode 100644 index 00000000000..e8e14dead73 --- /dev/null +++ b/tests/components/habitica/fixtures/content.json @@ -0,0 +1,287 @@ +{ + "success": true, + "data": { + "gear": { + "flat": { + "weapon_warrior_5": { + "text": "Ruby Sword", + "notes": "Weapon whose forge-glow never fades. Increases Strength by 15. ", + "str": 15, + "value": 90, + "type": "weapon", + "key": "weapon_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "armor_warrior_5": { + "text": "Golden Armor", + "notes": "Looks ceremonial, but no known blade can pierce it. Increases Constitution by 11.", + "con": 11, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "head_warrior_5": { + "text": "Golden Helm", + "notes": "Regal crown bound to shining armor. Increases Strength by 12.", + "str": 12, + "value": 80, + "last": true, + "type": "head", + "key": "head_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "shield_warrior_5": { + "text": "Golden Shield", + "notes": "Shining badge of the vanguard. Increases Constitution by 9.", + "con": 9, + "value": 90, + "last": true, + "type": "shield", + "key": "shield_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "weapon_wizard_5": { + "twoHanded": true, + "text": "Archmage Staff", + "notes": "Assists in weaving the most complex of spells. Increases Intelligence by 15 and Perception by 7. Two-handed item.", + "int": 15, + "per": 7, + "value": 160, + "type": "weapon", + "key": "weapon_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "con": 0 + }, + "armor_wizard_5": { + "text": "Royal Magus Robe", + "notes": "Symbol of the power behind the throne. Increases Intelligence by 12.", + "int": 12, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "head_wizard_5": { + "text": "Royal Magus Hat", + "notes": "Shows authority over fortune, weather, and lesser mages. Increases Perception by 10.", + "per": 10, + "value": 80, + "last": true, + "type": "head", + "key": "head_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "weapon_healer_5": { + "text": "Royal Scepter", + "notes": "Fit to grace the hand of a monarch, or of one who stands at a monarch's right hand. Increases Intelligence by 9. ", + "int": 9, + "value": 90, + "type": "weapon", + "key": "weapon_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "armor_healer_5": { + "text": "Royal Mantle", + "notes": "Attire of those who have saved the lives of kings. Increases Constitution by 18.", + "con": 18, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "head_healer_5": { + "text": "Royal Diadem", + "notes": "For king, queen, or miracle-worker. Increases Intelligence by 9.", + "int": 9, + "value": 80, + "last": true, + "type": "head", + "key": "head_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "shield_healer_5": { + "text": "Royal Shield", + "notes": "Bestowed upon those most dedicated to the kingdom's defense. Increases Constitution by 12.", + "con": 12, + "value": 90, + "last": true, + "type": "shield", + "key": "shield_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "weapon_rogue_5": { + "text": "Ninja-to", + "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", + "str": 8, + "value": 90, + "type": "weapon", + "key": "weapon_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "armor_rogue_5": { + "text": "Umbral Armor", + "notes": "Allows stealth in the open in broad daylight. Increases Perception by 18.", + "per": 18, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "head_rogue_5": { + "text": "Umbral Hood", + "notes": "Conceals even thoughts from those who would probe them. Increases Perception by 12.", + "per": 12, + "value": 80, + "last": true, + "type": "head", + "key": "head_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "shield_rogue_5": { + "text": "Ninja-to", + "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", + "str": 8, + "value": 90, + "type": "shield", + "key": "shield_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "back_special_heroicAureole": { + "text": "Heroic Aureole", + "notes": "The gems on this aureole glimmer when you tell your tales of glory. Increases all stats by 7.", + "con": 7, + "str": 7, + "per": 7, + "int": 7, + "value": 175, + "type": "back", + "key": "back_special_heroicAureole", + "set": "special-heroicAureole", + "klass": "special", + "index": "heroicAureole" + }, + "headAccessory_armoire_gogglesOfBookbinding": { + "per": 8, + "set": "bookbinder", + "notes": "These goggles will help you zero in on any task, large or small! Increases Perception by 8. Enchanted Armoire: Bookbinder Set (Item 1 of 4).", + "text": "Goggles of Bookbinding", + "value": 100, + "type": "headAccessory", + "key": "headAccessory_armoire_gogglesOfBookbinding", + "klass": "armoire", + "index": "gogglesOfBookbinding", + "str": 0, + "int": 0, + "con": 0 + }, + "eyewear_armoire_plagueDoctorMask": { + "con": 5, + "int": 5, + "set": "plagueDoctor", + "notes": "An authentic mask worn by the doctors who battle the Plague of Procrastination. Increases Constitution and Intelligence by 5 each. Enchanted Armoire: Plague Doctor Set (Item 2 of 3).", + "text": "Plague Doctor Mask", + "value": 100, + "type": "eyewear", + "key": "eyewear_armoire_plagueDoctorMask", + "klass": "armoire", + "index": "plagueDoctorMask", + "str": 0, + "per": 0 + }, + "body_special_aetherAmulet": { + "text": "Aether Amulet", + "notes": "This amulet has a mysterious history. Increases Constitution and Strength by 10 each.", + "value": 175, + "str": 10, + "con": 10, + "type": "body", + "key": "body_special_aetherAmulet", + "set": "special-aetherAmulet", + "klass": "special", + "index": "aetherAmulet", + "int": 0, + "per": 0 + } + } + } + }, + "appVersion": "5.29.2" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_1.json b/tests/components/habitica/fixtures/duedate_fixture_1.json new file mode 100644 index 00000000000..d44d5f38498 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_1.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "daily", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-07-06T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": true, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_2.json b/tests/components/habitica/fixtures/duedate_fixture_2.json new file mode 100644 index 00000000000..99cf4e89454 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_2.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "daily", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-23T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_3.json b/tests/components/habitica/fixtures/duedate_fixture_3.json new file mode 100644 index 00000000000..78b66ad6643 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_3.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "monthly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-10-22T22:00:00.000Z", "2024-11-22T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-10-22T22:00:00.000Z", + "daysOfMonth": [23], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_4.json b/tests/components/habitica/fixtures/duedate_fixture_4.json new file mode 100644 index 00000000000..7e14e3339e2 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_4.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "yearly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-10-22T22:00:00.000Z", "2025-10-22T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-10-22T22:00:00.000Z", + "daysOfMonth": [22], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_5.json b/tests/components/habitica/fixtures/duedate_fixture_5.json new file mode 100644 index 00000000000..d8d5f4cd773 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_5.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "weekly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-20T22:00:00.000Z", "2024-09-27T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-25T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_6.json b/tests/components/habitica/fixtures/duedate_fixture_6.json new file mode 100644 index 00000000000..dce177b1abc --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_6.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "monthly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-20T22:00:00.000Z", "2024-10-20T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-25T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_7.json b/tests/components/habitica/fixtures/duedate_fixture_7.json new file mode 100644 index 00000000000..723ee40062d --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_7.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "monthly", + "everyX": 0, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-23T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_8.json b/tests/components/habitica/fixtures/duedate_fixture_8.json new file mode 100644 index 00000000000..21a40a0a649 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_8.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "daily", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": [], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-23T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/healer_fixture.json b/tests/components/habitica/fixtures/healer_fixture.json new file mode 100644 index 00000000000..d76ae612662 --- /dev/null +++ b/tests/components/habitica/fixtures/healer_fixture.json @@ -0,0 +1,60 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 45, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "healer", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_healer_5", + "armor": "armor_healer_5", + "head": "head_healer_5", + "shield": "shield_healer_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/healer_skills_unavailable.json b/tests/components/habitica/fixtures/healer_skills_unavailable.json new file mode 100644 index 00000000000..e3cead40f7d --- /dev/null +++ b/tests/components/habitica/fixtures/healer_skills_unavailable.json @@ -0,0 +1,59 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 10, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "healer", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_healer_5", + "armor": "armor_healer_5", + "head": "head_healer_5", + "shield": "shield_healer_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/party_members.json b/tests/components/habitica/fixtures/party_members.json new file mode 100644 index 00000000000..e1bb31e6d81 --- /dev/null +++ b/tests/components/habitica/fixtures/party_members.json @@ -0,0 +1,442 @@ +{ + "success": true, + "data": [ + { + "_id": "a380546a-94be-4b8e-8a0b-23e0d5c03303", + "auth": { + "local": { + "username": "test-username" + }, + "timestamps": { + "created": "2024-10-19T18:43:39.782Z", + "loggedin": "2024-10-31T16:13:35.048Z", + "updated": "2024-10-31T16:15:56.552Z" + } + }, + "achievements": { + "ultimateGearSets": { + "healer": false, + "wizard": false, + "rogue": false, + "warrior": false + }, + "streak": 0, + "challenges": [], + "perfect": 1, + "quests": {}, + "purchasedEquipment": true, + "completedTask": true, + "partyUp": true + }, + "backer": {}, + "contributor": {}, + "flags": { + "verifiedUsername": true, + "classSelected": true + }, + "items": { + "gear": { + "owned": { + "headAccessory_special_blackHeadband": true, + "headAccessory_special_blueHeadband": true, + "headAccessory_special_greenHeadband": true, + "headAccessory_special_pinkHeadband": true, + "headAccessory_special_redHeadband": true, + "headAccessory_special_whiteHeadband": true, + "headAccessory_special_yellowHeadband": true, + "eyewear_special_blackTopFrame": true, + "eyewear_special_blueTopFrame": true, + "eyewear_special_greenTopFrame": true, + "eyewear_special_pinkTopFrame": true, + "eyewear_special_redTopFrame": true, + "eyewear_special_whiteTopFrame": true, + "eyewear_special_yellowTopFrame": true, + "eyewear_special_blackHalfMoon": true, + "eyewear_special_blueHalfMoon": true, + "eyewear_special_greenHalfMoon": true, + "eyewear_special_pinkHalfMoon": true, + "eyewear_special_redHalfMoon": true, + "eyewear_special_whiteHalfMoon": true, + "eyewear_special_yellowHalfMoon": true, + "armor_special_bardRobes": true, + "weapon_special_fall2024Warrior": true, + "shield_special_fall2024Warrior": true, + "head_special_fall2024Warrior": true, + "armor_special_fall2024Warrior": true, + "back_mystery_201402": true, + "body_mystery_202003": true, + "head_special_bardHat": true, + "weapon_wizard_0": true + }, + "equipped": { + "weapon": "weapon_special_fall2024Warrior", + "armor": "armor_special_fall2024Warrior", + "head": "head_special_fall2024Warrior", + "shield": "shield_special_fall2024Warrior", + "back": "back_mystery_201402", + "headAccessory": "headAccessory_special_pinkHeadband", + "eyewear": "eyewear_special_pinkHalfMoon", + "body": "body_mystery_202003" + }, + "costume": { + "armor": "armor_base_0", + "head": "head_base_0", + "shield": "shield_base_0" + } + }, + "special": { + "snowball": 99, + "spookySparkles": 99, + "shinySeed": 99, + "seafoam": 99, + "valentine": 0, + "valentineReceived": [], + "nye": 0, + "nyeReceived": [], + "greeting": 0, + "greetingReceived": [], + "thankyou": 0, + "thankyouReceived": [], + "birthday": 0, + "birthdayReceived": [], + "congrats": 0, + "congratsReceived": [], + "getwell": 0, + "getwellReceived": [], + "goodluck": 0, + "goodluckReceived": [] + }, + "pets": { + "Rat-Shade": 1, + "Gryphatrice-Jubilant": 1 + }, + "currentPet": "Gryphatrice-Jubilant", + "eggs": { + "Cactus": 1, + "Fox": 2, + "Wolf": 1 + }, + "hatchingPotions": { + "CottonCandyBlue": 1, + "RoyalPurple": 1 + }, + "food": { + "Meat": 2, + "Chocolate": 1, + "CottonCandyPink": 1, + "Candy_Zombie": 1 + }, + "mounts": { + "Velociraptor-Base": true, + "Gryphon-Gryphatrice": true + }, + "currentMount": "Gryphon-Gryphatrice", + "quests": { + "dustbunnies": 1, + "vice1": 1, + "atom1": 1, + "moonstone1": 1, + "goldenknight1": 1, + "basilist": 1 + }, + "lastDrop": { + "date": "2024-10-31T16:13:34.952Z", + "count": 0 + } + }, + "party": { + "quest": { + "progress": { + "up": 0, + "down": 0, + "collectedItems": 0, + "collect": {} + }, + "RSVPNeeded": false, + "key": "dustbunnies" + }, + "order": "level", + "orderAscending": "ascending", + "_id": "94cd398c-2240-4320-956e-6d345cf2c0de" + }, + "preferences": { + "size": "slim", + "hair": { + "color": "red", + "base": 3, + "bangs": 1, + "beard": 0, + "mustache": 0, + "flower": 1 + }, + "skin": "915533", + "shirt": "blue", + "chair": "handleless_pink", + "costume": false, + "sleep": false, + "disableClasses": false, + "tasks": { + "groupByChallenge": false, + "confirmScoreNotes": false, + "mirrorGroupTasks": [], + "activeFilter": { + "habit": "all", + "daily": "all", + "todo": "remaining", + "reward": "all" + } + }, + "background": "violet" + }, + "profile": { + "name": "test-user" + }, + "stats": { + "hp": 50, + "mp": 150.8, + "exp": 127, + "gp": 19.08650199252128, + "lvl": 99, + "class": "wizard", + "points": 0, + "str": 0, + "con": 0, + "int": 0, + "per": 0, + "buffs": { + "str": 50, + "int": 50, + "per": 50, + "con": 50, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "training": { + "int": 0, + "per": 0, + "str": 0, + "con": 0 + }, + "toNextLevel": 3580, + "maxHealth": 50, + "maxMP": 228 + }, + "inbox": { + "optOut": false + }, + "loginIncentives": 6, + "id": "a380546a-94be-4b8e-8a0b-23e0d5c03303" + }, + { + "_id": "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + "auth": { + "local": { + "username": "test-partymember-username" + }, + "timestamps": { + "created": "2024-10-10T15:57:01.106Z", + "loggedin": "2024-10-30T19:37:01.970Z", + "updated": "2024-10-30T19:38:25.968Z" + } + }, + "achievements": { + "ultimateGearSets": { + "healer": false, + "wizard": false, + "rogue": false, + "warrior": false + }, + "streak": 0, + "challenges": [], + "perfect": 1, + "quests": {}, + "completedTask": true, + "partyUp": true, + "snowball": 1, + "spookySparkles": 1, + "seafoam": 1, + "shinySeed": 1 + }, + "backer": {}, + "contributor": {}, + "flags": { + "verifiedUsername": true, + "classSelected": false + }, + "items": { + "gear": { + "equipped": { + "armor": "armor_base_0", + "head": "head_base_0", + "shield": "shield_base_0" + }, + "costume": { + "armor": "armor_base_0", + "head": "head_base_0", + "shield": "shield_base_0" + }, + "owned": { + "headAccessory_special_blackHeadband": true, + "headAccessory_special_blueHeadband": true, + "headAccessory_special_greenHeadband": true, + "headAccessory_special_pinkHeadband": true, + "headAccessory_special_redHeadband": true, + "headAccessory_special_whiteHeadband": true, + "headAccessory_special_yellowHeadband": true, + "eyewear_special_blackTopFrame": true, + "eyewear_special_blueTopFrame": true, + "eyewear_special_greenTopFrame": true, + "eyewear_special_pinkTopFrame": true, + "eyewear_special_redTopFrame": true, + "eyewear_special_whiteTopFrame": true, + "eyewear_special_yellowTopFrame": true, + "eyewear_special_blackHalfMoon": true, + "eyewear_special_blueHalfMoon": true, + "eyewear_special_greenHalfMoon": true, + "eyewear_special_pinkHalfMoon": true, + "eyewear_special_redHalfMoon": true, + "eyewear_special_whiteHalfMoon": true, + "eyewear_special_yellowHalfMoon": true, + "armor_special_bardRobes": true + } + }, + "special": { + "snowball": 0, + "spookySparkles": 0, + "shinySeed": 0, + "seafoam": 0, + "valentine": 0, + "valentineReceived": [], + "nye": 0, + "nyeReceived": [], + "greeting": 0, + "greetingReceived": [], + "thankyou": 0, + "thankyouReceived": [], + "birthday": 0, + "birthdayReceived": [], + "congrats": 0, + "congratsReceived": [], + "getwell": 0, + "getwellReceived": [], + "goodluck": 0, + "goodluckReceived": [] + }, + "lastDrop": { + "count": 0, + "date": "2024-10-30T19:37:01.838Z" + }, + "currentPet": "", + "currentMount": "", + "pets": {}, + "eggs": { + "BearCub": 1, + "Cactus": 1 + }, + "hatchingPotions": { + "Skeleton": 1 + }, + "food": { + "Candy_Red": 1 + }, + "mounts": {}, + "quests": { + "dustbunnies": 1 + } + }, + "party": { + "quest": { + "progress": { + "up": 0, + "down": 0, + "collectedItems": 0, + "collect": {} + }, + "RSVPNeeded": true, + "key": "dustbunnies" + }, + "order": "level", + "orderAscending": "ascending", + "_id": "94cd398c-2240-4320-956e-6d345cf2c0de" + }, + "preferences": { + "size": "slim", + "hair": { + "color": "red", + "base": 3, + "bangs": 1, + "beard": 0, + "mustache": 0, + "flower": 1 + }, + "skin": "915533", + "shirt": "blue", + "chair": "none", + "costume": false, + "sleep": false, + "disableClasses": false, + "tasks": { + "groupByChallenge": false, + "confirmScoreNotes": false, + "mirrorGroupTasks": [], + "activeFilter": { + "habit": "all", + "daily": "all", + "todo": "remaining", + "reward": "all" + } + }, + "background": "violet" + }, + "profile": { + "name": "test-partymember-displayname" + }, + "stats": { + "buffs": { + "str": 1, + "int": 1, + "per": 1, + "con": 1, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": true, + "snowball": false, + "spookySparkles": false + }, + "training": { + "int": 0, + "per": 0, + "str": 0, + "con": 0 + }, + "hp": 50, + "mp": 24, + "exp": 24, + "gp": 4, + "lvl": 1, + "class": "warrior", + "points": 0, + "str": 0, + "con": 0, + "int": 0, + "per": 0, + "toNextLevel": 25, + "maxHealth": 50, + "maxMP": 32 + }, + "inbox": { + "optOut": false + }, + "loginIncentives": 1, + "id": "ffce870c-3ff3-4fa4-bad1-87612e52b8e7" + } + ], + "notifications": [], + "userV": 96, + "appVersion": "5.29.0" +} diff --git a/tests/components/habitica/fixtures/quest_invitation_off.json b/tests/components/habitica/fixtures/quest_invitation_off.json new file mode 100644 index 00000000000..0f191696476 --- /dev/null +++ b/tests/components/habitica/fixtures/quest_invitation_off.json @@ -0,0 +1,66 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "tasksOrder": { + "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], + "todos": [ + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "86ea2475-d1b5-4020-bdcc-c188c7996afa" + ], + "dailys": [ + "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "e97659e0-2c42-4599-a7bb-00282adc410d", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + ], + "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] + }, + "party": { + "quest": { + "RSVPNeeded": false, + "key": null + } + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z" + } +} diff --git a/tests/components/habitica/fixtures/rogue_fixture.json b/tests/components/habitica/fixtures/rogue_fixture.json new file mode 100644 index 00000000000..b6fcd9f1427 --- /dev/null +++ b/tests/components/habitica/fixtures/rogue_fixture.json @@ -0,0 +1,60 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "rogue", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/rogue_skills_unavailable.json b/tests/components/habitica/fixtures/rogue_skills_unavailable.json new file mode 100644 index 00000000000..b3bada649fa --- /dev/null +++ b/tests/components/habitica/fixtures/rogue_skills_unavailable.json @@ -0,0 +1,59 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": true, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 20, + "exp": 737, + "gp": 0, + "lvl": 38, + "class": "rogue", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json new file mode 100644 index 00000000000..9478feb91fa --- /dev/null +++ b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json @@ -0,0 +1,59 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 4, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50, + "exp": 737, + "gp": 0, + "lvl": 38, + "class": "rogue", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/score_with_drop.json b/tests/components/habitica/fixtures/score_with_drop.json new file mode 100644 index 00000000000..f25838d6c37 --- /dev/null +++ b/tests/components/habitica/fixtures/score_with_drop.json @@ -0,0 +1,69 @@ +{ + "success": true, + "data": { + "delta": 0.9999999781878414, + "_tmp": { + "quest": { + "progressDelta": 1.049999977097233 + }, + "drop": { + "value": 3, + "key": "Dragon", + "type": "Egg", + "dialog": "You've found a Dragon Egg!" + } + }, + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "training": { + "int": 0, + "per": 0, + "str": 0, + "con": 0 + }, + "hp": 25.100000000000016, + "mp": 24, + "exp": 196, + "gp": 30.453660284128997, + "lvl": 20, + "class": "warrior", + "points": 2, + "str": 0, + "con": 0, + "int": 0, + "per": 0 + }, + "notifications": [ + { + "type": "ITEM_RECEIVED", + "data": { + "icon": "notif_orca_mount", + "title": "Orcas for Summer Splash!", + "text": "To celebrate Summer Splash, we've given you an Orca Mount!", + "destination": "stable" + }, + "seen": true, + "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" + }, + { + "type": "UNALLOCATED_STATS_POINTS", + "data": { + "points": 2 + }, + "seen": true, + "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" + } + ], + "userV": 623, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json new file mode 100644 index 00000000000..a4942063612 --- /dev/null +++ b/tests/components/habitica/fixtures/tasks.json @@ -0,0 +1,610 @@ +{ + "success": true, + "data": [ + { + "_id": "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "up": true, + "down": true, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [], + "type": "habit", + "text": "Gesundes Essen/Junkfood", + "notes": "", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-07-07T17:51:53.268Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a" + }, + { + "_id": "1d147de6-5c02-4740-8e2f-71d3015a37f4", + "up": true, + "down": false, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [ + { + "date": 1720376763324, + "value": 1, + "scoredUp": 1, + "scoredDown": 0 + } + ], + "type": "habit", + "text": "Eine kurze Pause machen", + "notes": "", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.266Z", + "updatedAt": "2024-07-12T09:58:45.438Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "1d147de6-5c02-4740-8e2f-71d3015a37f4" + }, + { + "_id": "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "up": false, + "down": true, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [], + "type": "habit", + "text": "Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest", + "notes": "Oder lösche es über die Bearbeitungs-Ansicht", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.265Z", + "updatedAt": "2024-07-07T17:51:53.265Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "bc1d1855-b2b8-4663-98ff-62e7b763dfc4" + }, + { + "_id": "e97659e0-2c42-4599-a7bb-00282adc410d", + "up": true, + "down": false, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [ + { + "date": 1720376763140, + "value": 1, + "scoredUp": 1, + "scoredDown": 0 + } + ], + "type": "habit", + "text": "Füge eine Aufgabe zu Habitica hinzu", + "notes": "Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.264Z", + "updatedAt": "2024-07-12T09:58:45.438Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "e97659e0-2c42-4599-a7bb-00282adc410d", + "alias": "create_a_task" + }, + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "weekly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": [ + "Mon Sep 23 2024 00:00:00 GMT+0200", + "Tue Sep 24 2024 00:00:00 GMT+0200", + "Wed Sep 25 2024 00:00:00 GMT+0200", + "Thu Sep 26 2024 00:00:00 GMT+0200", + "Fri Sep 27 2024 00:00:00 GMT+0200", + "Sat Sep 28 2024 00:00:00 GMT+0200" + ], + "yesterDaily": true, + "history": [ + { + "date": 1720376766749, + "value": 1, + "isDue": true, + "completed": true + }, + { + "date": 1720545311292, + "value": 0.02529999999999999, + "isDue": true, + "completed": false + }, + { + "date": 1720564306719, + "value": -0.9740518837628547, + "isDue": true, + "completed": false + }, + { + "date": 1720691096907, + "value": 0.051222853419153, + "isDue": true, + "completed": true + }, + { + "date": 1720778325243, + "value": 1.0499115128458676, + "isDue": true, + "completed": true + }, + { + "date": 1724185196447, + "value": 0.07645736684721605, + "isDue": true, + "completed": false + }, + { + "date": 1724255707692, + "value": -0.921585289356988, + "isDue": true, + "completed": false + }, + { + "date": 1726846163640, + "value": -1.9454824860630637, + "isDue": true, + "completed": false + }, + { + "date": 1726953787542, + "value": -2.9966001649571803, + "isDue": true, + "completed": false + }, + { + "date": 1726956115608, + "value": -4.07641493832036, + "isDue": true, + "completed": false + }, + { + "date": 1726957460150, + "value": -2.9663035443712333, + "isDue": true, + "completed": true + } + ], + "completed": true, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-07-06T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": true, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + }, + { + "_id": "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "frequency": "weekly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 0, + "nextDue": [ + "2024-09-22T22:00:00.000Z", + "2024-09-23T22:00:00.000Z", + "2024-09-24T22:00:00.000Z", + "2024-09-25T22:00:00.000Z", + "2024-09-26T22:00:00.000Z", + "2024-09-27T22:00:00.000Z" + ], + "yesterDaily": true, + "history": [ + { + "date": 1720374903074, + "value": 1, + "isDue": true, + "completed": true + }, + { + "date": 1720545311291, + "value": 0.02529999999999999, + "isDue": true, + "completed": false + }, + { + "date": 1720564306717, + "value": -0.9740518837628547, + "isDue": true, + "completed": false + }, + { + "date": 1720682459722, + "value": 0.051222853419153, + "isDue": true, + "completed": true + }, + { + "date": 1720778325246, + "value": 1.0499115128458676, + "isDue": true, + "completed": true + }, + { + "date": 1720778492219, + "value": 2.023365658844519, + "isDue": true, + "completed": true + }, + { + "date": 1724255707691, + "value": 1.0738942424964806, + "isDue": true, + "completed": false + }, + { + "date": 1726846163638, + "value": 0.10103816898038132, + "isDue": true, + "completed": false + }, + { + "date": 1726953787540, + "value": -0.8963760215867302, + "isDue": true, + "completed": false + }, + { + "date": 1726956115607, + "value": -1.919611992979862, + "isDue": true, + "completed": false + } + ], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "5 Minuten ruhig durchatmen", + "notes": "Klicke um Deinen Terminplan festzulegen!", + "tags": [], + "value": -1.919611992979862, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-07-06T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [ + { + "id": "1491d640-6b21-4d0c-8940-0b7aa61c8836", + "time": "2024-09-22T20:00:00.0000Z" + } + ], + "createdAt": "2024-07-07T17:51:53.266Z", + "updatedAt": "2024-09-21T22:51:41.756Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": true, + "id": "f2c85972-1a19-4426-bc6d-ce3337b9d99f" + }, + { + "_id": "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "frequency": "weekly", + "everyX": 1, + "startDate": "2024-09-21T22:00:00.000Z", + "repeat": { + "m": false, + "t": false, + "w": true, + "th": false, + "f": false, + "s": true, + "su": true + }, + "streak": 0, + "daysOfMonth": [], + "weeksOfMonth": [], + "nextDue": [ + "2024-09-24T22:00:00.000Z", + "2024-09-27T22:00:00.000Z", + "2024-09-28T22:00:00.000Z", + "2024-10-01T22:00:00.000Z", + "2024-10-04T22:00:00.000Z", + "2024-10-08T22:00:00.000Z" + ], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "daily", + "text": "Fitnessstudio besuchen", + "notes": "Ein einstündiges Workout im Fitnessstudio absolvieren.", + "tags": ["51076966-2970-4b40-b6ba-d58c6a756dd7"], + "value": 0, + "priority": 2, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-22T11:44:43.774Z", + "updatedAt": "2024-09-22T11:44:43.774Z", + "userId": "1343a9af-d891-4027-841a-956d105ca408", + "isDue": true, + "id": "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + }, + { + "_id": "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "date": "2024-09-27T22:17:00.000Z", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Buch zu Ende lesen", + "notes": "Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:17:57.816Z", + "updatedAt": "2024-09-21T22:17:57.816Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "88de7cd9-af2b-49ce-9afd-bf941d87336b" + }, + { + "_id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "date": "2024-08-31T22:16:00.000Z", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Rechnungen bezahlen", + "notes": "Strom- und Internetrechnungen rechtzeitig überweisen.", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [ + { + "id": "91c09432-10ac-4a49-bd20-823081ec29ed", + "time": "2024-09-22T02:00:00.0000Z" + } + ], + "byHabitica": false, + "createdAt": "2024-09-21T22:17:19.513Z", + "updatedAt": "2024-09-21T22:19:35.576Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "alias": "pay_bills" + }, + { + "_id": "1aa3137e-ef72-4d1f-91ee-41933602f438", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Garten pflegen", + "notes": "Rasen mähen und die Pflanzen gießen.", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:16:38.153Z", + "updatedAt": "2024-09-21T22:16:38.153Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "1aa3137e-ef72-4d1f-91ee-41933602f438" + }, + { + "_id": "86ea2475-d1b5-4020-bdcc-c188c7996afa", + "date": "2024-09-21T22:00:00.000Z", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Wochenendausflug planen", + "notes": "Den Ausflug für das kommende Wochenende organisieren.", + "tags": ["51076966-2970-4b40-b6ba-d58c6a756dd7"], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:16:16.756Z", + "updatedAt": "2024-09-21T22:16:16.756Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "86ea2475-d1b5-4020-bdcc-c188c7996afa" + }, + { + "_id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + "type": "reward", + "text": "Belohne Dich selbst", + "notes": "Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!", + "tags": [], + "value": 10, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.266Z", + "updatedAt": "2024-07-07T17:51:53.266Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b" + }, + { + "repeat": { + "m": false, + "t": false, + "w": false, + "th": false, + "f": false, + "s": false, + "su": true + }, + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "_id": "6e53f1f5-a315-4edd-984d-8d762e4a08ef", + "frequency": "monthly", + "everyX": 1, + "streak": 1, + "nextDue": [ + "2024-12-14T23:00:00.000Z", + "2025-01-18T23:00:00.000Z", + "2025-02-15T23:00:00.000Z", + "2025-03-15T23:00:00.000Z", + "2025-04-19T23:00:00.000Z", + "2025-05-17T23:00:00.000Z" + ], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Arbeite an einem kreativen Projekt", + "notes": "Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!", + "tags": [], + "value": -0.9215181434950852, + "priority": 1, + "attribute": "str", + "byHabitica": false, + "startDate": "2024-09-20T23:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [3], + "checklist": [], + "reminders": [], + "createdAt": "2024-10-10T15:57:14.304Z", + "updatedAt": "2024-11-27T23:47:29.986Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "6e53f1f5-a315-4edd-984d-8d762e4a08ef" + } + ], + "notifications": [ + { + "type": "ITEM_RECEIVED", + "data": { + "icon": "notif_orca_mount", + "title": "Orcas for Summer Splash!", + "text": "To celebrate Summer Splash, we've given you an Orca Mount!", + "destination": "stable" + }, + "seen": true, + "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" + }, + { + "type": "UNALLOCATED_STATS_POINTS", + "data": { + "points": 2 + }, + "seen": true, + "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" + } + ], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json new file mode 100644 index 00000000000..ed41a306a03 --- /dev/null +++ b/tests/components/habitica/fixtures/user.json @@ -0,0 +1,88 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "auth": { "local": { "username": "test-username" } }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "tasksOrder": { + "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], + "todos": [ + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "86ea2475-d1b5-4020-bdcc-c188c7996afa" + ], + "dailys": [ + "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "e97659e0-2c42-4599-a7bb-00282adc410d", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "6e53f1f5-a315-4edd-984d-8d762e4a08ef" + ], + "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] + }, + "party": { + "quest": { + "RSVPNeeded": true, + "key": "dustbunnies" + }, + "_id": "94cd398c-2240-4320-956e-6d345cf2c0de" + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "id": "a380546a-94be-4b8e-8a0b-23e0d5c03303", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/warrior_fixture.json b/tests/components/habitica/fixtures/warrior_fixture.json new file mode 100644 index 00000000000..97ad9e5b060 --- /dev/null +++ b/tests/components/habitica/fixtures/warrior_fixture.json @@ -0,0 +1,60 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "warrior", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/warrior_skills_unavailable.json b/tests/components/habitica/fixtures/warrior_skills_unavailable.json new file mode 100644 index 00000000000..f25ca484cba --- /dev/null +++ b/tests/components/habitica/fixtures/warrior_skills_unavailable.json @@ -0,0 +1,59 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 10, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "warrior", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/wizard_fixture.json b/tests/components/habitica/fixtures/wizard_fixture.json new file mode 100644 index 00000000000..655c0ad1f0d --- /dev/null +++ b/tests/components/habitica/fixtures/wizard_fixture.json @@ -0,0 +1,60 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/wizard_frost_unavailable.json b/tests/components/habitica/fixtures/wizard_frost_unavailable.json new file mode 100644 index 00000000000..d5634633a0d --- /dev/null +++ b/tests/components/habitica/fixtures/wizard_frost_unavailable.json @@ -0,0 +1,59 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": true, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/wizard_skills_unavailable.json b/tests/components/habitica/fixtures/wizard_skills_unavailable.json new file mode 100644 index 00000000000..eaf5f6f55b8 --- /dev/null +++ b/tests/components/habitica/fixtures/wizard_skills_unavailable.json @@ -0,0 +1,59 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 10, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/snapshots/test_binary_sensor.ambr b/tests/components/habitica/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..c18f8f551c9 --- /dev/null +++ b/tests/components/habitica/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pending quest invitation', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_pending_quest', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/inventory_quest_scroll_dustbunnies.png', + 'friendly_name': 'test-user Pending quest invitation', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_button.ambr b/tests/components/habitica/snapshots/test_button.ambr new file mode 100644 index 00000000000..c8f92650874 --- /dev/null +++ b/tests/components/habitica/snapshots/test_button.ambr @@ -0,0 +1,1305 @@ +# serializer version: 1 +# name: test_buttons[healer_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_blessing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_blessing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Blessing', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_heal_all', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_blessing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_healAll.png', + 'friendly_name': 'test-user Blessing', + }), + 'context': , + 'entity_id': 'button.test_user_blessing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_healing_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_healing_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Healing light', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_heal', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_healing_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_heal.png', + 'friendly_name': 'test-user Healing light', + }), + 'context': , + 'entity_id': 'button.test_user_healing_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_protective_aura-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_protective_aura', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Protective aura', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_protect_aura', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_protective_aura-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_protectAura.png', + 'friendly_name': 'test-user Protective aura', + }), + 'context': , + 'entity_id': 'button.test_user_protective_aura', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_searing_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_searing_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Searing brightness', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_brightness', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_searing_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_brightness.png', + 'friendly_name': 'test-user Searing brightness', + }), + 'context': , + 'entity_id': 'button.test_user_searing_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_stealth-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_stealth', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stealth', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_stealth', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_stealth-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_stealth.png', + 'friendly_name': 'test-user Stealth', + }), + 'context': , + 'entity_id': 'button.test_user_stealth', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_tools_of_the_trade-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_tools_of_the_trade', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tools of the trade', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_tools_of_trade', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_tools_of_the_trade-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_toolsOfTrade.png', + 'friendly_name': 'test-user Tools of the trade', + }), + 'context': , + 'entity_id': 'button.test_user_tools_of_the_trade', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_defensive_stance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_defensive_stance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Defensive stance', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_defensive_stance', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_defensive_stance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_defensiveStance.png', + 'friendly_name': 'test-user Defensive stance', + }), + 'context': , + 'entity_id': 'button.test_user_defensive_stance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_intimidating_gaze-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_intimidating_gaze', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Intimidating gaze', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_intimidate', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_intimidating_gaze-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_intimidate.png', + 'friendly_name': 'test-user Intimidating gaze', + }), + 'context': , + 'entity_id': 'button.test_user_intimidating_gaze', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_valorous_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_valorous_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Valorous presence', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_valorous_presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_valorous_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_valorousPresence.png', + 'friendly_name': 'test-user Valorous presence', + }), + 'context': , + 'entity_id': 'button.test_user_valorous_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_chilling_frost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_chilling_frost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Chilling frost', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_frost', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_chilling_frost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_frost.png', + 'friendly_name': 'test-user Chilling frost', + }), + 'context': , + 'entity_id': 'button.test_user_chilling_frost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_earthquake-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_earthquake', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Earthquake', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_earth', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_earthquake-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_earth.png', + 'friendly_name': 'test-user Earthquake', + }), + 'context': , + 'entity_id': 'button.test_user_earthquake', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_ethereal_surge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_ethereal_surge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ethereal surge', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_mpheal', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_ethereal_surge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_mpheal.png', + 'friendly_name': 'test-user Ethereal surge', + }), + 'context': , + 'entity_id': 'button.test_user_ethereal_surge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_calendar.ambr b/tests/components/habitica/snapshots/test_calendar.ambr new file mode 100644 index 00000000000..5e010a33c84 --- /dev/null +++ b/tests/components/habitica/snapshots/test_calendar.ambr @@ -0,0 +1,1110 @@ +# serializer version: 1 +# name: test_api_events[date range in the past-calendar.test_user_dailies] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_daily_reminders] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_to_do_reminders] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_to_do_s] + list([ + ]) +# --- +# name: test_api_events[default date range-calendar.test_user_dailies] + list([ + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-21', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-21', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-23', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-22', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-23', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-22', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-23', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-22', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-24', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-23', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-24', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-23', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-25', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-24', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-25', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-24', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-26', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-25', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-26', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-25', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-26', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-25', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-27', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-26', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-27', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-26', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-28', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-27', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-28', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-27', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-29', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-28', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-29', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-28', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-29', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-28', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-30', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-29', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-30', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-29', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-30', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-29', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-01', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-30', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-01', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-30', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-02', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-01', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-02', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-01', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-03', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-02', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-03', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-02', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-10-03', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-10-02', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-04', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-03', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-04', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-03', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-05', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-04', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-05', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-04', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-06', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-05', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-06', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-05', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-10-06', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-10-05', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-07', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-06', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-07', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-06', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-10-07', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-10-06', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-08', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-07', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-08', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-07', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + ]) +# --- +# name: test_api_events[default date range-calendar.test_user_daily_reminders] + list([ + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-21T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-21T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-22T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-22T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-23T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-23T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-24T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-24T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-25T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-25T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-26T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-26T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-27T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-27T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-28T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-28T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-29T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-29T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-30T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-30T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-01T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-01T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-02T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-02T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-03T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-03T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-04T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-04T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-05T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-05T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-06T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-06T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-07T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-07T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + ]) +# --- +# name: test_api_events[default date range-calendar.test_user_to_do_reminders] + list([ + dict({ + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'end': dict({ + 'dateTime': '2024-09-22T03:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-22T02:00:00+02:00', + }), + 'summary': 'Rechnungen bezahlen', + 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490_91c09432-10ac-4a49-bd20-823081ec29ed', + }), + ]) +# --- +# name: test_api_events[default date range-calendar.test_user_to_do_s] + list([ + dict({ + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'end': dict({ + 'date': '2024-09-01', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-08-31', + }), + 'summary': 'Rechnungen bezahlen', + 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + }), + dict({ + 'description': 'Den Ausflug für das kommende Wochenende organisieren.', + 'end': dict({ + 'date': '2024-09-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-09-21', + }), + 'summary': 'Wochenendausflug planen', + 'uid': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + }), + dict({ + 'description': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'end': dict({ + 'date': '2024-09-28', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-09-27', + }), + 'summary': 'Buch zu Ende lesen', + 'uid': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + }), + ]) +# --- +# name: test_calendar_platform[calendar.test_user_dailies-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_dailies', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dailies', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_dailies-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': True, + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end_time': '2024-09-22 00:00:00', + 'friendly_name': 'test-user Dailies', + 'location': '', + 'message': '5 Minuten ruhig durchatmen', + 'start_time': '2024-09-21 00:00:00', + 'yesterdaily': False, + }), + 'context': , + 'entity_id': 'calendar.test_user_dailies', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_calendar_platform[calendar.test_user_daily_reminders-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_daily_reminders', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily reminders', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_daily_reminders', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_daily_reminders-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': False, + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end_time': '2024-09-21 21:00:00', + 'friendly_name': 'test-user Daily reminders', + 'location': '', + 'message': '5 Minuten ruhig durchatmen', + 'start_time': '2024-09-21 20:00:00', + }), + 'context': , + 'entity_id': 'calendar.test_user_daily_reminders', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_reminders-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_to_do_reminders', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'To-do reminders', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todo_reminders', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_reminders-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': False, + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'end_time': '2024-09-22 03:00:00', + 'friendly_name': 'test-user To-do reminders', + 'location': '', + 'message': 'Rechnungen bezahlen', + 'start_time': '2024-09-22 02:00:00', + }), + 'context': , + 'entity_id': 'calendar.test_user_to_do_reminders', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_to_do_s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': "To-Do's", + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todos', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': True, + 'description': 'Den Ausflug für das kommende Wochenende organisieren.', + 'end_time': '2024-09-22 00:00:00', + 'friendly_name': "test-user To-Do's", + 'location': '', + 'message': 'Wochenendausflug planen', + 'start_time': '2024-09-21 00:00:00', + }), + 'context': , + 'entity_id': 'calendar.test_user_to_do_s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_diagnostics.ambr b/tests/components/habitica/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..0d5f07d9a6c --- /dev/null +++ b/tests/components/habitica/snapshots/test_diagnostics.ambr @@ -0,0 +1,776 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'config_entry_data': dict({ + 'api_user': 'test-api-user', + 'url': 'https://habitica.com', + }), + 'habitica_data': dict({ + 'tasks': list([ + dict({ + '_id': 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.268Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a', + 'notes': '', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Gesundes Essen/Junkfood', + 'type': 'habit', + 'up': True, + 'updatedAt': '2024-07-07T17:51:53.268Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '1d147de6-5c02-4740-8e2f-71d3015a37f4', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.266Z', + 'down': False, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'date': 1720376763324, + 'scoredDown': 0, + 'scoredUp': 1, + 'value': 1, + }), + ]), + 'id': '1d147de6-5c02-4740-8e2f-71d3015a37f4', + 'notes': '', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Eine kurze Pause machen', + 'type': 'habit', + 'up': True, + 'updatedAt': '2024-07-12T09:58:45.438Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.265Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4', + 'notes': 'Oder lösche es über die Bearbeitungs-Ansicht', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest', + 'type': 'habit', + 'up': False, + 'updatedAt': '2024-07-07T17:51:53.265Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': 'e97659e0-2c42-4599-a7bb-00282adc410d', + 'alias': 'create_a_task', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.264Z', + 'down': False, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'date': 1720376763140, + 'scoredDown': 0, + 'scoredUp': 1, + 'value': 1, + }), + ]), + 'id': 'e97659e0-2c42-4599-a7bb-00282adc410d', + 'notes': 'Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Füge eine Aufgabe zu Habitica hinzu', + 'type': 'habit', + 'up': True, + 'updatedAt': '2024-07-12T09:58:45.438Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': True, + 'createdAt': '2024-07-07T17:51:53.268Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'completed': True, + 'date': 1720376766749, + 'isDue': True, + 'value': 1, + }), + dict({ + 'completed': False, + 'date': 1720545311292, + 'isDue': True, + 'value': 0.02529999999999999, + }), + dict({ + 'completed': False, + 'date': 1720564306719, + 'isDue': True, + 'value': -0.9740518837628547, + }), + dict({ + 'completed': True, + 'date': 1720691096907, + 'isDue': True, + 'value': 0.051222853419153, + }), + dict({ + 'completed': True, + 'date': 1720778325243, + 'isDue': True, + 'value': 1.0499115128458676, + }), + dict({ + 'completed': False, + 'date': 1724185196447, + 'isDue': True, + 'value': 0.07645736684721605, + }), + dict({ + 'completed': False, + 'date': 1724255707692, + 'isDue': True, + 'value': -0.921585289356988, + }), + dict({ + 'completed': False, + 'date': 1726846163640, + 'isDue': True, + 'value': -1.9454824860630637, + }), + dict({ + 'completed': False, + 'date': 1726953787542, + 'isDue': True, + 'value': -2.9966001649571803, + }), + dict({ + 'completed': False, + 'date': 1726956115608, + 'isDue': True, + 'value': -4.07641493832036, + }), + dict({ + 'completed': True, + 'date': 1726957460150, + 'isDue': True, + 'value': -2.9663035443712333, + }), + ]), + 'id': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + 'isDue': True, + 'nextDue': list([ + 'Mon Sep 23 2024 00:00:00 GMT+0200', + 'Tue Sep 24 2024 00:00:00 GMT+0200', + 'Wed Sep 25 2024 00:00:00 GMT+0200', + 'Thu Sep 26 2024 00:00:00 GMT+0200', + 'Fri Sep 27 2024 00:00:00 GMT+0200', + 'Sat Sep 28 2024 00:00:00 GMT+0200', + ]), + 'notes': 'Klicke um Änderungen zu machen!', + 'priority': 1, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'startDate': '2024-07-06T22:00:00.000Z', + 'streak': 1, + 'tags': list([ + ]), + 'text': 'Zahnseide benutzen', + 'type': 'daily', + 'updatedAt': '2024-09-21T22:24:20.154Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -2.9663035443712333, + 'weeksOfMonth': list([ + ]), + 'yesterDaily': True, + }), + dict({ + '_id': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-07-07T17:51:53.266Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'completed': True, + 'date': 1720374903074, + 'isDue': True, + 'value': 1, + }), + dict({ + 'completed': False, + 'date': 1720545311291, + 'isDue': True, + 'value': 0.02529999999999999, + }), + dict({ + 'completed': False, + 'date': 1720564306717, + 'isDue': True, + 'value': -0.9740518837628547, + }), + dict({ + 'completed': True, + 'date': 1720682459722, + 'isDue': True, + 'value': 0.051222853419153, + }), + dict({ + 'completed': True, + 'date': 1720778325246, + 'isDue': True, + 'value': 1.0499115128458676, + }), + dict({ + 'completed': True, + 'date': 1720778492219, + 'isDue': True, + 'value': 2.023365658844519, + }), + dict({ + 'completed': False, + 'date': 1724255707691, + 'isDue': True, + 'value': 1.0738942424964806, + }), + dict({ + 'completed': False, + 'date': 1726846163638, + 'isDue': True, + 'value': 0.10103816898038132, + }), + dict({ + 'completed': False, + 'date': 1726953787540, + 'isDue': True, + 'value': -0.8963760215867302, + }), + dict({ + 'completed': False, + 'date': 1726956115607, + 'isDue': True, + 'value': -1.919611992979862, + }), + ]), + 'id': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + 'isDue': True, + 'nextDue': list([ + '2024-09-22T22:00:00.000Z', + '2024-09-23T22:00:00.000Z', + '2024-09-24T22:00:00.000Z', + '2024-09-25T22:00:00.000Z', + '2024-09-26T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + ]), + 'notes': 'Klicke um Deinen Terminplan festzulegen!', + 'priority': 1, + 'reminders': list([ + dict({ + 'id': '1491d640-6b21-4d0c-8940-0b7aa61c8836', + 'time': '2024-09-22T20:00:00.0000Z', + }), + ]), + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'startDate': '2024-07-06T22:00:00.000Z', + 'streak': 0, + 'tags': list([ + ]), + 'text': '5 Minuten ruhig durchatmen', + 'type': 'daily', + 'updatedAt': '2024-09-21T22:51:41.756Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -1.919611992979862, + 'weeksOfMonth': list([ + ]), + 'yesterDaily': True, + }), + dict({ + '_id': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-22T11:44:43.774Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + 'isDue': True, + 'nextDue': list([ + '2024-09-24T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + '2024-09-28T22:00:00.000Z', + '2024-10-01T22:00:00.000Z', + '2024-10-04T22:00:00.000Z', + '2024-10-08T22:00:00.000Z', + ]), + 'notes': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'priority': 2, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': False, + 'm': False, + 's': True, + 'su': True, + 't': False, + 'th': False, + 'w': True, + }), + 'startDate': '2024-09-21T22:00:00.000Z', + 'streak': 0, + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Fitnessstudio besuchen', + 'type': 'daily', + 'updatedAt': '2024-09-22T11:44:43.774Z', + 'userId': '1343a9af-d891-4027-841a-956d105ca408', + 'value': 0, + 'weeksOfMonth': list([ + ]), + 'yesterDaily': True, + }), + dict({ + '_id': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:17:57.816Z', + 'date': '2024-09-27T22:17:00.000Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + 'notes': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Buch zu Ende lesen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:17:57.816Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + 'alias': 'pay_bills', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:17:19.513Z', + 'date': '2024-08-31T22:16:00.000Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + 'notes': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'priority': 1, + 'reminders': list([ + dict({ + 'id': '91c09432-10ac-4a49-bd20-823081ec29ed', + 'time': '2024-09-22T02:00:00.0000Z', + }), + ]), + 'tags': list([ + ]), + 'text': 'Rechnungen bezahlen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:19:35.576Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '1aa3137e-ef72-4d1f-91ee-41933602f438', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:16:38.153Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '1aa3137e-ef72-4d1f-91ee-41933602f438', + 'notes': 'Rasen mähen und die Pflanzen gießen.', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Garten pflegen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:16:38.153Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:16:16.756Z', + 'date': '2024-09-21T22:00:00.000Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + 'notes': 'Den Ausflug für das kommende Wochenende organisieren.', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Wochenendausflug planen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:16:16.756Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'createdAt': '2024-07-07T17:51:53.266Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b', + 'notes': 'Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Belohne Dich selbst', + 'type': 'reward', + 'updatedAt': '2024-07-07T17:51:53.266Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 10, + }), + dict({ + '_id': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-10-10T15:57:14.304Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'monthly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + 'isDue': False, + 'nextDue': list([ + '2024-12-14T23:00:00.000Z', + '2025-01-18T23:00:00.000Z', + '2025-02-15T23:00:00.000Z', + '2025-03-15T23:00:00.000Z', + '2025-04-19T23:00:00.000Z', + '2025-05-17T23:00:00.000Z', + ]), + 'notes': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'priority': 1, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': False, + 'm': False, + 's': False, + 'su': True, + 't': False, + 'th': False, + 'w': False, + }), + 'startDate': '2024-09-20T23:00:00.000Z', + 'streak': 1, + 'tags': list([ + ]), + 'text': 'Arbeite an einem kreativen Projekt', + 'type': 'daily', + 'updatedAt': '2024-11-27T23:47:29.986Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -0.9215181434950852, + 'weeksOfMonth': list([ + 3, + ]), + 'yesterDaily': True, + }), + ]), + 'user': dict({ + 'api_user': 'test-api-user', + 'auth': dict({ + 'local': dict({ + 'username': 'test-username', + }), + }), + 'flags': dict({ + 'classSelected': True, + }), + 'id': 'a380546a-94be-4b8e-8a0b-23e0d5c03303', + 'items': dict({ + 'gear': dict({ + 'equipped': dict({ + 'armor': 'armor_warrior_5', + 'back': 'back_special_heroicAureole', + 'body': 'body_special_aetherAmulet', + 'eyewear': 'eyewear_armoire_plagueDoctorMask', + 'head': 'head_warrior_5', + 'headAccessory': 'headAccessory_armoire_gogglesOfBookbinding', + 'shield': 'shield_warrior_5', + 'weapon': 'weapon_warrior_5', + }), + }), + }), + 'lastCron': '2024-09-21T22:01:55.586Z', + 'needsCron': True, + 'party': dict({ + '_id': '94cd398c-2240-4320-956e-6d345cf2c0de', + 'quest': dict({ + 'RSVPNeeded': True, + 'key': 'dustbunnies', + }), + }), + 'preferences': dict({ + 'automaticAllocation': True, + 'disableClasses': False, + 'language': 'en', + 'sleep': False, + }), + 'profile': dict({ + 'name': 'test-user', + }), + 'stats': dict({ + 'buffs': dict({ + 'con': 26, + 'int': 26, + 'per': 26, + 'seafoam': False, + 'shinySeed': False, + 'snowball': False, + 'spookySparkles': False, + 'stealth': 0, + 'str': 26, + 'streaks': False, + }), + 'class': 'wizard', + 'con': 15, + 'exp': 737, + 'gp': 137.62587214609795, + 'hp': 0, + 'int': 15, + 'lvl': 38, + 'maxHealth': 50, + 'maxMP': 166, + 'mp': 50.89999999999998, + 'per': 15, + 'points': 5, + 'str': 15, + 'toNextLevel': 880, + }), + 'tasksOrder': dict({ + 'dailys': list([ + 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a', + 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4', + 'e97659e0-2c42-4599-a7bb-00282adc410d', + '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + ]), + 'habits': list([ + '1d147de6-5c02-4740-8e2f-71d3015a37f4', + ]), + 'rewards': list([ + '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b', + ]), + 'todos': list([ + '88de7cd9-af2b-49ce-9afd-bf941d87336b', + '2f6fcabc-f670-4ec3-ba65-817e8deea490', + '1aa3137e-ef72-4d1f-91ee-41933602f438', + '86ea2475-d1b5-4020-bdcc-c188c7996afa', + ]), + }), + }), + }), + }) +# --- diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..7e72d486276 --- /dev/null +++ b/tests/components/habitica/snapshots/test_sensor.ambr @@ -0,0 +1,1280 @@ +# serializer version: 1 +# name: test_sensors[sensor.test_user_class-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'warrior', + 'healer', + 'wizard', + 'rogue', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_class', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Class', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_class', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_user_class-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'test-user Class', + 'options': list([ + 'warrior', + 'healer', + 'wizard', + 'rogue', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_user_class', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'wizard', + }) +# --- +# name: test_sensors[sensor.test_user_constitution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_constitution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Constitution', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_constitution', + 'unit_of_measurement': 'CON', + }) +# --- +# name: test_sensors[sensor.test_user_constitution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 42, + 'friendly_name': 'test-user Constitution', + 'level': 19, + 'unit_of_measurement': 'CON', + }), + 'context': , + 'entity_id': 'sensor.test_user_constitution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '102', + }) +# --- +# name: test_sensors[sensor.test_user_dailies-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_dailies', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dailies', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_dailies-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1': dict({ + 'created_at': '2024-09-22T11:44:43.774Z', + 'every_x': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'is_due': True, + 'next_due': list([ + '2024-09-24T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + '2024-09-28T22:00:00.000Z', + '2024-10-01T22:00:00.000Z', + '2024-10-04T22:00:00.000Z', + '2024-10-08T22:00:00.000Z', + ]), + 'notes': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'priority': 2, + 'repeat': dict({ + 'f': False, + 'm': False, + 's': True, + 'su': True, + 't': False, + 'th': False, + 'w': True, + }), + 'start_date': '2024-09-21T22:00:00.000Z', + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Fitnessstudio besuchen', + 'type': 'daily', + 'yester_daily': True, + }), + '564b9ac9-c53d-4638-9e7f-1cd96fe19baa': dict({ + 'completed': True, + 'created_at': '2024-07-07T17:51:53.268Z', + 'every_x': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'is_due': True, + 'next_due': list([ + 'Mon Sep 23 2024 00:00:00 GMT+0200', + 'Tue Sep 24 2024 00:00:00 GMT+0200', + 'Wed Sep 25 2024 00:00:00 GMT+0200', + 'Thu Sep 26 2024 00:00:00 GMT+0200', + 'Fri Sep 27 2024 00:00:00 GMT+0200', + 'Sat Sep 28 2024 00:00:00 GMT+0200', + ]), + 'notes': 'Klicke um Änderungen zu machen!', + 'priority': 1, + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'start_date': '2024-07-06T22:00:00.000Z', + 'streak': 1, + 'text': 'Zahnseide benutzen', + 'type': 'daily', + 'value': -2.9663035443712333, + 'yester_daily': True, + }), + '6e53f1f5-a315-4edd-984d-8d762e4a08ef': dict({ + 'created_at': '2024-10-10T15:57:14.304Z', + 'every_x': 1, + 'frequency': 'monthly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'next_due': list([ + '2024-12-14T23:00:00.000Z', + '2025-01-18T23:00:00.000Z', + '2025-02-15T23:00:00.000Z', + '2025-03-15T23:00:00.000Z', + '2025-04-19T23:00:00.000Z', + '2025-05-17T23:00:00.000Z', + ]), + 'notes': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'priority': 1, + 'repeat': dict({ + 'f': False, + 'm': False, + 's': False, + 'su': True, + 't': False, + 'th': False, + 'w': False, + }), + 'start_date': '2024-09-20T23:00:00.000Z', + 'streak': 1, + 'text': 'Arbeite an einem kreativen Projekt', + 'type': 'daily', + 'value': -0.9215181434950852, + 'weeks_of_month': list([ + 3, + ]), + 'yester_daily': True, + }), + 'f2c85972-1a19-4426-bc6d-ce3337b9d99f': dict({ + 'created_at': '2024-07-07T17:51:53.266Z', + 'every_x': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'is_due': True, + 'next_due': list([ + '2024-09-22T22:00:00.000Z', + '2024-09-23T22:00:00.000Z', + '2024-09-24T22:00:00.000Z', + '2024-09-25T22:00:00.000Z', + '2024-09-26T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + ]), + 'notes': 'Klicke um Deinen Terminplan festzulegen!', + 'priority': 1, + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'start_date': '2024-07-06T22:00:00.000Z', + 'text': '5 Minuten ruhig durchatmen', + 'type': 'daily', + 'value': -1.919611992979862, + 'yester_daily': True, + }), + 'friendly_name': 'test-user Dailies', + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_dailies', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[sensor.test_user_display_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_display_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display name', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_display_name', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_user_display_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Display name', + }), + 'context': , + 'entity_id': 'sensor.test_user_display_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'test-user', + }) +# --- +# name: test_sensors[sensor.test_user_experience-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_experience', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Experience', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_experience', + 'unit_of_measurement': 'XP', + }) +# --- +# name: test_sensors[sensor.test_user_experience-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Experience', + 'unit_of_measurement': 'XP', + }), + 'context': , + 'entity_id': 'sensor.test_user_experience', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '737', + }) +# --- +# name: test_sensors[sensor.test_user_gems-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_gems', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gems', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_gems', + 'unit_of_measurement': 'gems', + }) +# --- +# name: test_sensors[sensor.test_user_gems-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_gem.png', + 'friendly_name': 'test-user Gems', + 'unit_of_measurement': 'gems', + }), + 'context': , + 'entity_id': 'sensor.test_user_gems', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_user_gold-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_gold', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gold', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_gold', + 'unit_of_measurement': 'GP', + }) +# --- +# name: test_sensors[sensor.test_user_gold-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Gold', + 'unit_of_measurement': 'GP', + }), + 'context': , + 'entity_id': 'sensor.test_user_gold', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '137.625872146098', + }) +# --- +# name: test_sensors[sensor.test_user_habits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_habits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Habits', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_habits', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_habits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '1d147de6-5c02-4740-8e2f-71d3015a37f4': dict({ + 'created_at': '2024-07-07T17:51:53.266Z', + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'priority': 1, + 'text': 'Eine kurze Pause machen', + 'type': 'habit', + 'up': True, + }), + 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4': dict({ + 'created_at': '2024-07-07T17:51:53.265Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Oder lösche es über die Bearbeitungs-Ansicht', + 'priority': 1, + 'text': 'Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest', + 'type': 'habit', + }), + 'e97659e0-2c42-4599-a7bb-00282adc410d': dict({ + 'created_at': '2024-07-07T17:51:53.264Z', + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do', + 'priority': 1, + 'text': 'Füge eine Aufgabe zu Habitica hinzu', + 'type': 'habit', + 'up': True, + }), + 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a': dict({ + 'created_at': '2024-07-07T17:51:53.268Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'priority': 1, + 'text': 'Gesundes Essen/Junkfood', + 'type': 'habit', + 'up': True, + }), + 'friendly_name': 'test-user Habits', + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_habits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[sensor.test_user_health-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_health', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Health', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_health', + 'unit_of_measurement': 'HP', + }) +# --- +# name: test_sensors[sensor.test_user_health-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Health', + 'unit_of_measurement': 'HP', + }), + 'context': , + 'entity_id': 'sensor.test_user_health', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_user_intelligence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_intelligence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Intelligence', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_intelligence', + 'unit_of_measurement': 'INT', + }) +# --- +# name: test_sensors[sensor.test_user_intelligence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 12, + 'friendly_name': 'test-user Intelligence', + 'level': 19, + 'unit_of_measurement': 'INT', + }), + 'context': , + 'entity_id': 'sensor.test_user_intelligence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72', + }) +# --- +# name: test_sensors[sensor.test_user_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Level', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_user_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Level', + }), + 'context': , + 'entity_id': 'sensor.test_user_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38', + }) +# --- +# name: test_sensors[sensor.test_user_mana-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_mana', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mana', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_mana', + 'unit_of_measurement': 'MP', + }) +# --- +# name: test_sensors[sensor.test_user_mana-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Mana', + 'unit_of_measurement': 'MP', + }), + 'context': , + 'entity_id': 'sensor.test_user_mana', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.9', + }) +# --- +# name: test_sensors[sensor.test_user_max_health-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_max_health', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Max. health', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_health_max', + 'unit_of_measurement': 'HP', + }) +# --- +# name: test_sensors[sensor.test_user_max_health-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Max. health', + 'unit_of_measurement': 'HP', + }), + 'context': , + 'entity_id': 'sensor.test_user_max_health', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensors[sensor.test_user_max_mana-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_max_mana', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Max. mana', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_mana_max', + 'unit_of_measurement': 'MP', + }) +# --- +# name: test_sensors[sensor.test_user_max_mana-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Max. mana', + 'unit_of_measurement': 'MP', + }), + 'context': , + 'entity_id': 'sensor.test_user_max_mana', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '166', + }) +# --- +# name: test_sensors[sensor.test_user_mystic_hourglasses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_mystic_hourglasses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mystic hourglasses', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_trinkets', + 'unit_of_measurement': '⧖', + }) +# --- +# name: test_sensors[sensor.test_user_mystic_hourglasses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/notif_subscriber_reward.png', + 'friendly_name': 'test-user Mystic hourglasses', + 'unit_of_measurement': '⧖', + }), + 'context': , + 'entity_id': 'sensor.test_user_mystic_hourglasses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_user_next_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_next_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next level', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_experience_max', + 'unit_of_measurement': 'XP', + }) +# --- +# name: test_sensors[sensor.test_user_next_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Next level', + 'unit_of_measurement': 'XP', + }), + 'context': , + 'entity_id': 'sensor.test_user_next_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '880', + }) +# --- +# name: test_sensors[sensor.test_user_perception-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_perception', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Perception', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_perception', + 'unit_of_measurement': 'PER', + }) +# --- +# name: test_sensors[sensor.test_user_perception-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 15, + 'friendly_name': 'test-user Perception', + 'level': 19, + 'unit_of_measurement': 'PER', + }), + 'context': , + 'entity_id': 'sensor.test_user_perception', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_sensors[sensor.test_user_rewards-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_rewards', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Rewards', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_rewards', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_rewards-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b': dict({ + 'created_at': '2024-07-07T17:51:53.266Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!', + 'priority': 1, + 'text': 'Belohne Dich selbst', + 'type': 'reward', + 'value': 10, + }), + 'friendly_name': 'test-user Rewards', + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_rewards', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensors[sensor.test_user_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Strength', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_strength', + 'unit_of_measurement': 'STR', + }) +# --- +# name: test_sensors[sensor.test_user_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 44, + 'friendly_name': 'test-user Strength', + 'level': 19, + 'unit_of_measurement': 'STR', + }), + 'context': , + 'entity_id': 'sensor.test_user_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '104', + }) +# --- +# name: test_sensors[sensor.test_user_to_do_s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_to_do_s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': "To-Do's", + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todos', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_to_do_s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '1aa3137e-ef72-4d1f-91ee-41933602f438': dict({ + 'created_at': '2024-09-21T22:16:38.153Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Rasen mähen und die Pflanzen gießen.', + 'priority': 1, + 'text': 'Garten pflegen', + 'type': 'todo', + }), + '2f6fcabc-f670-4ec3-ba65-817e8deea490': dict({ + 'created_at': '2024-09-21T22:17:19.513Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'priority': 1, + 'text': 'Rechnungen bezahlen', + 'type': 'todo', + }), + '86ea2475-d1b5-4020-bdcc-c188c7996afa': dict({ + 'created_at': '2024-09-21T22:16:16.756Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Den Ausflug für das kommende Wochenende organisieren.', + 'priority': 1, + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Wochenendausflug planen', + 'type': 'todo', + }), + '88de7cd9-af2b-49ce-9afd-bf941d87336b': dict({ + 'created_at': '2024-09-21T22:17:57.816Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'priority': 1, + 'text': 'Buch zu Ende lesen', + 'type': 'todo', + }), + 'friendly_name': "test-user To-Do's", + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_to_do_s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_switch.ambr b/tests/components/habitica/snapshots/test_switch.ambr new file mode 100644 index 00000000000..3affbd11e2a --- /dev/null +++ b/tests/components/habitica/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_switch[switch.test_user_rest_in_the_inn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_user_rest_in_the_inn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rest in the inn', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_sleep', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_user_rest_in_the_inn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'test-user Rest in the inn', + }), + 'context': , + 'entity_id': 'switch.test_user_rest_in_the_inn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_todo.ambr b/tests/components/habitica/snapshots/test_todo.ambr new file mode 100644 index 00000000000..8c49cad5436 --- /dev/null +++ b/tests/components/habitica/snapshots/test_todo.ambr @@ -0,0 +1,196 @@ +# serializer version: 1 +# name: test_complete_todo_item[daily] + tuple( + 'Habitica', + ''' + ![Dragon](https://habitica-assets.s3.amazonaws.com/mobileApp/images/Pet_Egg_Dragon.png) + You've found a Dragon Egg! + ''', + ) +# --- +# name: test_complete_todo_item[todo] + tuple( + 'Habitica', + ''' + ![Dragon](https://habitica-assets.s3.amazonaws.com/mobileApp/images/Pet_Egg_Dragon.png) + You've found a Dragon Egg! + ''', + ) +# --- +# name: test_todo_items[todo.test_user_dailies] + dict({ + 'todo.test_user_dailies': dict({ + 'items': list([ + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'due': '2024-09-22', + 'status': 'completed', + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'due': '2024-09-21', + 'status': 'needs_action', + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'due': '2024-09-21', + 'status': 'needs_action', + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'due': '2024-12-14', + 'status': 'needs_action', + 'summary': 'Arbeite an einem kreativen Projekt', + 'uid': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + }), + ]), + }), + }) +# --- +# name: test_todo_items[todo.test_user_to_do_s] + dict({ + 'todo.test_user_to_do_s': dict({ + 'items': list([ + dict({ + 'description': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'due': '2024-09-27', + 'status': 'needs_action', + 'summary': 'Buch zu Ende lesen', + 'uid': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + }), + dict({ + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'due': '2024-08-31', + 'status': 'needs_action', + 'summary': 'Rechnungen bezahlen', + 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + }), + dict({ + 'description': 'Rasen mähen und die Pflanzen gießen.', + 'status': 'needs_action', + 'summary': 'Garten pflegen', + 'uid': '1aa3137e-ef72-4d1f-91ee-41933602f438', + }), + dict({ + 'description': 'Den Ausflug für das kommende Wochenende organisieren.', + 'due': '2024-09-21', + 'status': 'needs_action', + 'summary': 'Wochenendausflug planen', + 'uid': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + }), + dict({ + 'description': 'Lebensmittel und Haushaltsbedarf für die Woche einkaufen.', + 'status': 'completed', + 'summary': 'Wocheneinkauf erledigen', + 'uid': '162f0bbe-a097-4a06-b4f4-8fbeed85d2ba', + }), + dict({ + 'description': 'Wohnzimmer und Küche gründlich aufräumen.', + 'status': 'completed', + 'summary': 'Wohnung aufräumen', + 'uid': '3fa06743-aa0f-472b-af1a-f27c755e329c', + }), + ]), + }), + }) +# --- +# name: test_todos[todo.test_user_dailies-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.test_user_dailies', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dailies', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', + 'unit_of_measurement': None, + }) +# --- +# name: test_todos[todo.test_user_dailies-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Dailies', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.test_user_dailies', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_todos[todo.test_user_to_do_s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.test_user_to_do_s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': "To-Do's", + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todos', + 'unit_of_measurement': None, + }) +# --- +# name: test_todos[todo.test_user_to_do_s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': "test-user To-Do's", + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.test_user_to_do_s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/habitica/test_binary_sensor.py b/tests/components/habitica/test_binary_sensor.py new file mode 100644 index 00000000000..1710f8f217e --- /dev/null +++ b/tests/components/habitica/test_binary_sensor.py @@ -0,0 +1,84 @@ +"""Tests for the Habitica binary sensor platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import ASSETS_URL, DEFAULT_URL, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def binary_sensor_only() -> Generator[None]: + """Enable only the binarty sensor platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_binary_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica binary sensor platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("fixture", "entity_state", "entity_picture"), + [ + ("user", STATE_ON, f"{ASSETS_URL}inventory_quest_scroll_dustbunnies.png"), + ("quest_invitation_off", STATE_OFF, None), + ], +) +async def test_pending_quest_states( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + fixture: str, + entity_state: str, + entity_picture: str | None, +) -> None: + """Test states of pending quest sensor.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get(f"{DEFAULT_URL}/api/v3/tasks/user", json={"data": []}) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert ( + state := hass.states.get("binary_sensor.test_user_pending_quest_invitation") + ) + assert state.state == entity_state + assert state.attributes.get("entity_picture") == entity_picture diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py new file mode 100644 index 00000000000..09cc1c9d373 --- /dev/null +++ b/tests/components/habitica/test_button.py @@ -0,0 +1,411 @@ +"""Tests for Habitica button platform.""" + +from collections.abc import Generator +from datetime import timedelta +from http import HTTPStatus +import re +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er +import homeassistant.util.dt as dt_util + +from .conftest import mock_called_with + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def button_only() -> Generator[None]: + """Enable only the button platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.BUTTON], + ): + yield + + +@pytest.mark.parametrize( + "fixture", + [ + "wizard_fixture", + "rogue_fixture", + "warrior_fixture", + "healer_fixture", + ], +) +async def test_buttons( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + fixture: str, +) -> None: + """Test button entities.""" + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "api_url", "fixture"), + [ + ("button.test_user_allocate_all_stat_points", "user/allocate-now", "user"), + ("button.test_user_buy_a_health_potion", "user/buy-health-potion", "user"), + ("button.test_user_revive_from_death", "user/revive", "user"), + ("button.test_user_start_my_day", "cron", "user"), + ( + "button.test_user_chilling_frost", + "user/class/cast/frost", + "wizard_fixture", + ), + ( + "button.test_user_earthquake", + "user/class/cast/earth", + "wizard_fixture", + ), + ( + "button.test_user_ethereal_surge", + "user/class/cast/mpheal", + "wizard_fixture", + ), + ( + "button.test_user_stealth", + "user/class/cast/stealth", + "rogue_fixture", + ), + ( + "button.test_user_tools_of_the_trade", + "user/class/cast/toolsOfTrade", + "rogue_fixture", + ), + ( + "button.test_user_defensive_stance", + "user/class/cast/defensiveStance", + "warrior_fixture", + ), + ( + "button.test_user_intimidating_gaze", + "user/class/cast/intimidate", + "warrior_fixture", + ), + ( + "button.test_user_valorous_presence", + "user/class/cast/valorousPresence", + "warrior_fixture", + ), + ( + "button.test_user_healing_light", + "user/class/cast/heal", + "healer_fixture", + ), + ( + "button.test_user_protective_aura", + "user/class/cast/protectAura", + "healer_fixture", + ), + ( + "button.test_user_searing_brightness", + "user/class/cast/brightness", + "healer_fixture", + ), + ( + "button.test_user_blessing", + "user/class/cast/healAll", + "healer_fixture", + ), + ], +) +async def test_button_press( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + entity_id: str, + api_url: str, + fixture: str, +) -> None: + """Test button press method.""" + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + aioclient_mock.post(f"{DEFAULT_URL}/api/v3/{api_url}", json={"data": None}) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with(aioclient_mock, "post", f"{DEFAULT_URL}/api/v3/{api_url}") + + +@pytest.mark.parametrize( + ("entity_id", "api_url"), + [ + ("button.test_user_allocate_all_stat_points", "user/allocate-now"), + ("button.test_user_buy_a_health_potion", "user/buy-health-potion"), + ("button.test_user_revive_from_death", "user/revive"), + ("button.test_user_start_my_day", "cron"), + ("button.test_user_chilling_frost", "user/class/cast/frost"), + ("button.test_user_earthquake", "user/class/cast/earth"), + ("button.test_user_ethereal_surge", "user/class/cast/mpheal"), + ], + ids=[ + "allocate-points", + "health-potion", + "revive", + "run-cron", + "chilling frost", + "earthquake", + "ethereal surge", + ], +) +@pytest.mark.parametrize( + ("status_code", "msg", "exception"), + [ + ( + HTTPStatus.TOO_MANY_REQUESTS, + "Rate limit exceeded, try again later", + ServiceValidationError, + ), + ( + HTTPStatus.BAD_REQUEST, + "Unable to connect to Habitica, try again later", + HomeAssistantError, + ), + ( + HTTPStatus.UNAUTHORIZED, + "Unable to complete action, the required conditions are not met", + ServiceValidationError, + ), + ], +) +async def test_button_press_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + entity_id: str, + api_url: str, + status_code: HTTPStatus, + msg: str, + exception: Exception, +) -> None: + """Test button press exceptions.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/{api_url}", + status=status_code, + json={"data": None}, + ) + + with pytest.raises(exception, match=msg): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/{api_url}") + + +@pytest.mark.parametrize( + ("fixture", "entity_ids"), + [ + ( + "common_buttons_unavailable", + [ + "button.test_user_allocate_all_stat_points", + "button.test_user_revive_from_death", + "button.test_user_buy_a_health_potion", + "button.test_user_start_my_day", + ], + ), + ( + "wizard_skills_unavailable", + [ + "button.test_user_chilling_frost", + "button.test_user_earthquake", + "button.test_user_ethereal_surge", + ], + ), + ("wizard_frost_unavailable", ["button.test_user_chilling_frost"]), + ( + "rogue_skills_unavailable", + ["button.test_user_tools_of_the_trade", "button.test_user_stealth"], + ), + ("rogue_stealth_unavailable", ["button.test_user_stealth"]), + ( + "warrior_skills_unavailable", + [ + "button.test_user_defensive_stance", + "button.test_user_intimidating_gaze", + "button.test_user_valorous_presence", + ], + ), + ( + "healer_skills_unavailable", + [ + "button.test_user_healing_light", + "button.test_user_protective_aura", + "button.test_user_searing_brightness", + "button.test_user_blessing", + ], + ), + ], +) +async def test_button_unavailable( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + fixture: str, + entity_ids: list[str], +) -> None: + """Test buttons are unavailable if conditions are not met.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get(re.compile(r".*"), json={"data": []}) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for entity_id in entity_ids: + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE + + +async def test_class_change( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test removing and adding skills after class change.""" + mage_skills = [ + "button.test_user_chilling_frost", + "button.test_user_earthquake", + "button.test_user_ethereal_surge", + ] + healer_skills = [ + "button.test_user_healing_light", + "button.test_user_protective_aura", + "button.test_user_searing_brightness", + "button.test_user_blessing", + ] + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("wizard_fixture.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for skill in mage_skills: + assert hass.states.get(skill) + + aioclient_mock._mocks.pop(0) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("healer_fixture.json", DOMAIN), + ) + + async_fire_time_changed(hass, dt_util.now() + timedelta(seconds=60)) + await hass.async_block_till_done() + + for skill in mage_skills: + assert not hass.states.get(skill) + + for skill in healer_skills: + assert hass.states.get(skill) diff --git a/tests/components/habitica/test_calendar.py b/tests/components/habitica/test_calendar.py new file mode 100644 index 00000000000..ff3ffbeb80d --- /dev/null +++ b/tests/components/habitica/test_calendar.py @@ -0,0 +1,95 @@ +"""Tests for the Habitica calendar platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +def calendar_only() -> Generator[None]: + """Enable only the calendar platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.CALENDAR], + ): + yield + + +@pytest.fixture(autouse=True) +async def set_tz(hass: HomeAssistant) -> None: + """Fixture to set timezone.""" + await hass.config.async_set_time_zone("Europe/Berlin") + + +@pytest.mark.usefixtures("mock_habitica") +@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") +async def test_calendar_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica calendar platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity"), + [ + "calendar.test_user_to_do_s", + "calendar.test_user_dailies", + "calendar.test_user_daily_reminders", + "calendar.test_user_to_do_reminders", + ], +) +@pytest.mark.parametrize( + ("start_date", "end_date"), + [ + ("2024-08-29", "2024-10-08"), + ("2023-08-01", "2023-08-02"), + ], + ids=[ + "default date range", + "date range in the past", + ], +) +@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") +@pytest.mark.usefixtures("mock_habitica") +async def test_api_events( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, + entity: str, + start_date: str, + end_date: str, +) -> None: + """Test calendar event.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + client = await hass_client() + response = await client.get( + f"/api/calendars/{entity}?start={start_date}&end={end_date}" + ) + + assert await response.json() == snapshot diff --git a/tests/components/habitica/test_config_flow.py b/tests/components/habitica/test_config_flow.py index 4dfc696daf2..604877f0c47 100644 --- a/tests/components/habitica/test_config_flow.py +++ b/tests/components/habitica/test_config_flow.py @@ -3,26 +3,150 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError +import pytest from homeassistant import config_entries -from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN +from homeassistant.components.habitica.const import CONF_API_USER, DEFAULT_URL, DOMAIN +from homeassistant.const import ( + CONF_API_KEY, + CONF_PASSWORD, + CONF_URL, + CONF_USERNAME, + CONF_VERIFY_SSL, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +MOCK_DATA_LOGIN_STEP = { + CONF_USERNAME: "test-email@example.com", + CONF_PASSWORD: "test-password", +} +MOCK_DATA_ADVANCED_STEP = { + CONF_API_USER: "test-api-user", + CONF_API_KEY: "test-api-key", + CONF_URL: DEFAULT_URL, + CONF_VERIFY_SSL: True, +} -async def test_form(hass: HomeAssistant) -> None: +async def test_form_login(hass: HomeAssistant) -> None: + """Test we get the login form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.MENU + assert "login" in result["menu_options"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "login"} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "login" + + mock_obj = MagicMock() + mock_obj.user.auth.local.login.post = AsyncMock() + mock_obj.user.auth.local.login.post.return_value = { + "id": "test-api-user", + "apiToken": "test-api-key", + "username": "test-username", + } + with ( + patch( + "homeassistant.components.habitica.config_flow.HabitipyAsync", + return_value=mock_obj, + ), + patch( + "homeassistant.components.habitica.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.habitica.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LOGIN_STEP, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == { + **MOCK_DATA_ADVANCED_STEP, + CONF_USERNAME: "test-username", + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (ClientResponseError(MagicMock(), (), status=400), "cannot_connect"), + (ClientResponseError(MagicMock(), (), status=401), "invalid_auth"), + (IndexError(), "unknown"), + ], +) +async def test_form_login_errors(hass: HomeAssistant, raise_error, text_error) -> None: + """Test we handle invalid credentials error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "login"} + ) + + mock_obj = MagicMock() + mock_obj.user.auth.local.login.post = AsyncMock(side_effect=raise_error) + with patch( + "homeassistant.components.habitica.config_flow.HabitipyAsync", + return_value=mock_obj, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LOGIN_STEP, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": text_error} + + +async def test_form_advanced(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + + assert result["type"] is FlowResultType.MENU + assert "advanced" in result["menu_options"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "advanced"} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "advanced" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "advanced"} + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} mock_obj = MagicMock() mock_obj.user.get = AsyncMock() + mock_obj.user.get.return_value = {"auth": {"local": {"username": "test-username"}}} with ( patch( @@ -39,103 +163,55 @@ async def test_form(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"api_user": "test-api-user", "api_key": "test-api-key"}, + user_input=MOCK_DATA_ADVANCED_STEP, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Default username" + assert result2["title"] == "test-username" assert result2["data"] == { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", + **MOCK_DATA_ADVANCED_STEP, + CONF_USERNAME: "test-username", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_credentials(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (ClientResponseError(MagicMock(), (), status=400), "cannot_connect"), + (ClientResponseError(MagicMock(), (), status=401), "invalid_auth"), + (IndexError(), "unknown"), + ], +) +async def test_form_advanced_errors( + hass: HomeAssistant, raise_error, text_error +) -> None: """Test we handle invalid credentials error.""" + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(side_effect=ClientResponseError(MagicMock(), ())) - - with patch( - "homeassistant.components.habitica.config_flow.HabitipyAsync", - return_value=mock_obj, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_credentials"} - - -async def test_form_unexpected_exception(hass: HomeAssistant) -> None: - """Test we handle unexpected exception error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(side_effect=Exception) - - with patch( - "homeassistant.components.habitica.config_flow.HabitipyAsync", - return_value=mock_obj, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} - - -async def test_manual_flow_config_exist(hass: HomeAssistant) -> None: - """Test config flow discovers only already configured config.""" - MockConfigEntry( - domain=DOMAIN, - unique_id="test-api-user", - data={"api_user": "test-api-user", "api_key": "test-api-key"}, - ).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT} - ) - - assert result["type"] is FlowResultType.FORM + assert result["type"] is FlowResultType.MENU assert result["step_id"] == "user" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "advanced"} + ) + mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(return_value={"api_user": "test-api-user"}) + mock_obj.user.get = AsyncMock(side_effect=raise_error) with patch( "homeassistant.components.habitica.config_flow.HabitipyAsync", return_value=mock_obj, ): - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", - }, + user_input=MOCK_DATA_ADVANCED_STEP, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": text_error} diff --git a/tests/components/habitica/test_diagnostics.py b/tests/components/habitica/test_diagnostics.py new file mode 100644 index 00000000000..68b40fe254a --- /dev/null +++ b/tests/components/habitica/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Tests for Habitica diagnostics.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("mock_habitica") +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index 4c2b1e2aae6..fd8a18b2d44 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -1,7 +1,10 @@ """Test the habitica module.""" +import datetime from http import HTTPStatus +import logging +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.habitica.const import ( @@ -13,10 +16,16 @@ from homeassistant.components.habitica.const import ( EVENT_API_CALL_SUCCESS, SERVICE_API_CALL, ) +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_NAME from homeassistant.core import Event, HomeAssistant -from tests.common import MockConfigEntry, async_capture_events +from tests.common import ( + MockConfigEntry, + async_capture_events, + async_fire_time_changed, + load_json_object_fixture, +) from tests.test_util.aiohttp import AiohttpClientMocker TEST_API_CALL_ARGS = {"text": "Use API from Home Assistant", "type": "todo"} @@ -29,120 +38,47 @@ def capture_api_call_success(hass: HomeAssistant) -> list[Event]: return async_capture_events(hass, EVENT_API_CALL_SUCCESS) -@pytest.fixture -def habitica_entry(hass: HomeAssistant) -> MockConfigEntry: - """Test entry for the following tests.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test-api-user", - data={ - "api_user": "test-api-user", - "api_key": "test-api-key", - "url": DEFAULT_URL, - }, - ) - entry.add_to_hass(hass) - return entry +@pytest.mark.usefixtures("mock_habitica") +async def test_entry_setup_unload( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test integration setup and unload.""" + + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(config_entry.entry_id) + + assert config_entry.state is ConfigEntryState.NOT_LOADED -@pytest.fixture -def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: - """Register requests for the tests.""" - aioclient_mock.get( - "https://habitica.com/api/v3/user", - json={ - "data": { - "api_user": "test-api-user", - "profile": {"name": TEST_USER_NAME}, - "stats": { - "class": "warrior", - "con": 1, - "exp": 2, - "gp": 3, - "hp": 4, - "int": 5, - "lvl": 6, - "maxHealth": 7, - "maxMP": 8, - "mp": 9, - "per": 10, - "points": 11, - "str": 12, - "toNextLevel": 13, - }, - } - }, - ) - aioclient_mock.get( - "https://habitica.com/api/v3/tasks/user?type=completedTodos", - json={ - "data": [ - { - "text": "this is a mock todo #5", - "id": 5, - "_id": 5, - "type": "todo", - "completed": True, - } - ] - }, - ) - aioclient_mock.get( - "https://habitica.com/api/v3/tasks/user", - json={ - "data": [ - { - "text": f"this is a mock {task} #{i}", - "id": f"{i}", - "type": task, - "completed": False, - } - for i, task in enumerate(("habit", "daily", "todo", "reward"), start=1) - ] - }, - ) +@pytest.mark.usefixtures("mock_habitica") +async def test_service_call( + hass: HomeAssistant, + config_entry: MockConfigEntry, + capture_api_call_success: list[Event], + mock_habitica: AiohttpClientMocker, +) -> None: + """Test integration setup, service call and unload.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() - aioclient_mock.post( + assert config_entry.state is ConfigEntryState.LOADED + + assert len(capture_api_call_success) == 0 + + mock_habitica.post( "https://habitica.com/api/v3/tasks/user", status=HTTPStatus.CREATED, json={"data": TEST_API_CALL_ARGS}, ) - return aioclient_mock - - -@pytest.mark.usefixtures("common_requests") -async def test_entry_setup_unload( - hass: HomeAssistant, habitica_entry: MockConfigEntry -) -> None: - """Test integration setup and unload.""" - assert await hass.config_entries.async_setup(habitica_entry.entry_id) - await hass.async_block_till_done() - - assert hass.services.has_service(DOMAIN, SERVICE_API_CALL) - - assert await hass.config_entries.async_unload(habitica_entry.entry_id) - - assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) - - -@pytest.mark.usefixtures("common_requests") -async def test_service_call( - hass: HomeAssistant, - habitica_entry: MockConfigEntry, - capture_api_call_success: list[Event], -) -> None: - """Test integration setup, service call and unload.""" - - assert await hass.config_entries.async_setup(habitica_entry.entry_id) - await hass.async_block_till_done() - - assert hass.services.has_service(DOMAIN, SERVICE_API_CALL) - - assert len(capture_api_call_success) == 0 - TEST_SERVICE_DATA = { - ATTR_NAME: "test_user", + ATTR_NAME: "test-user", ATTR_PATH: ["tasks", "user", "post"], ATTR_ARGS: TEST_API_CALL_ARGS, } @@ -156,6 +92,77 @@ async def test_service_call( del captured_data[ATTR_DATA] assert captured_data == TEST_SERVICE_DATA - assert await hass.config_entries.async_unload(habitica_entry.entry_id) - assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) +@pytest.mark.parametrize( + ("status"), [HTTPStatus.NOT_FOUND, HTTPStatus.TOO_MANY_REQUESTS] +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + status: HTTPStatus, +) -> None: + """Test config entry not ready.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + status=status, + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_update_failed( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test coordinator update failed.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("user.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + status=HTTPStatus.NOT_FOUND, + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_rate_limited( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator when rate limited.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.clear_requests() + mock_habitica.get( + f"{DEFAULT_URL}/api/v3/user", + status=HTTPStatus.TOO_MANY_REQUESTS, + ) + + with caplog.at_level(logging.DEBUG): + freezer.tick(datetime.timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert "Rate limit exceeded, will try again later" in caplog.text diff --git a/tests/components/habitica/test_sensor.py b/tests/components/habitica/test_sensor.py new file mode 100644 index 00000000000..defe5a270ae --- /dev/null +++ b/tests/components/habitica/test_sensor.py @@ -0,0 +1,72 @@ +"""Test Habitica sensor platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import DOMAIN +from homeassistant.components.habitica.sensor import HabitipySensorEntity +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +def sensor_only() -> Generator[None]: + """Enable only the sensor platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.SENSOR], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica", "entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica sensor platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("mock_habitica", "entity_registry_enabled_by_default") +async def test_sensor_deprecation_issue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test task sensor deprecation issue.""" + + with patch( + "homeassistant.components.habitica.sensor.entity_used_in", return_value=True + ): + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id=f"deprecated_task_entity_{HabitipySensorEntity.TODOS}", + ) + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id=f"deprecated_task_entity_{HabitipySensorEntity.DAILIES}", + ) diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py new file mode 100644 index 00000000000..cd363eba3b5 --- /dev/null +++ b/tests/components/habitica/test_services.py @@ -0,0 +1,791 @@ +"""Test Habitica actions.""" + +from collections.abc import Generator +from http import HTTPStatus +from typing import Any +from unittest.mock import patch + +import pytest + +from homeassistant.components.habitica.const import ( + ATTR_CONFIG_ENTRY, + ATTR_DIRECTION, + ATTR_ITEM, + ATTR_SKILL, + ATTR_TARGET, + ATTR_TASK, + DEFAULT_URL, + DOMAIN, + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_CANCEL_QUEST, + SERVICE_CAST_SKILL, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_SCORE_HABIT, + SERVICE_SCORE_REWARD, + SERVICE_START_QUEST, + SERVICE_TRANSFORMATION, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import load_json_object_fixture, mock_called_with + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + +REQUEST_EXCEPTION_MSG = "Unable to connect to Habitica, try again later" +RATE_LIMIT_EXCEPTION_MSG = "Rate limit exceeded, try again later" + + +@pytest.fixture(autouse=True) +def services_only() -> Generator[None]: + """Enable only services.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [], + ): + yield + + +@pytest.fixture(autouse=True) +async def load_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + services_only: Generator, +) -> None: + """Load config entry.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.fixture(autouse=True) +def uuid_mock() -> Generator[None]: + """Mock the UUID.""" + with patch( + "uuid.uuid4", return_value="5d1935ff-80c8-443c-b2e9-733c66b44745" + ) as uuid_mock: + yield uuid_mock.return_value + + +@pytest.mark.parametrize( + ("service_data", "item", "target_id"), + [ + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "pickpocket", + }, + "pickPocket", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "backstab", + }, + "backStab", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "fireball", + }, + "fireball", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "pay_bills", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ], + ids=[ + "cast pickpocket", + "cast backstab", + "cast fireball", + "cast smash", + "select task by name", + "select task_by_alias", + ], +) +async def test_cast_skill( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + item: str, + target_id: str, +) -> None: + """Test Habitica cast skill action.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TASK: "task-not-found", + ATTR_SKILL: "smash", + }, + HTTPStatus.OK, + ServiceValidationError, + "Unable to complete action, could not find the task 'task-not-found'", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.NOT_FOUND, + ServiceValidationError, + "Unable to cast skill, your character does not have the skill or spell smash", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Unable to cast skill, not enough mana. Your character has 50 MP, but the skill costs 10 MP", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_cast_skill_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica cast skill action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/smash?targetId=2f6fcabc-f670-4ec3-ba65-817e8deea490", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_habitica") +async def test_get_config_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test Habitica config entry exceptions.""" + + with pytest.raises( + ServiceValidationError, + match="The selected character is not configured in Home Assistant", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: "0000000000000000", + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + return_response=True, + blocking=True, + ) + + assert await hass.config_entries.async_unload(config_entry.entry_id) + + with pytest.raises( + ServiceValidationError, + match="The selected character is currently not loaded or disabled in Home Assistant", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "command"), + [ + (SERVICE_ABORT_QUEST, "abort"), + (SERVICE_ACCEPT_QUEST, "accept"), + (SERVICE_CANCEL_QUEST, "cancel"), + (SERVICE_LEAVE_QUEST, "leave"), + (SERVICE_REJECT_QUEST, "reject"), + (SERVICE_START_QUEST, "force-start"), + ], + ids=[], +) +async def test_handle_quests( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service: str, + command: str, +) -> None: + """Test Habitica actions for quest handling.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + service, + service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", + ) + + +@pytest.mark.parametrize( + ( + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + HTTPStatus.NOT_FOUND, + ServiceValidationError, + "Unable to complete action, quest or group not found", + ), + ( + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Action not allowed, only quest leader or group leader can perform this action", + ), + ( + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_handle_quests_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica handle quests action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/groups/party/quests/accept", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_ACCEPT_QUEST, + service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "service_data", "task_id"), + [ + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "down", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_REWARD, + { + ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + }, + "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "Füge eine Aufgabe zu Habitica hinzu", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "create_a_task", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ], + ids=[ + "habit score up", + "habit score down", + "buy reward", + "match task by name", + "match task by alias", + ], +) +async def test_score_task( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service: str, + service_data: dict[str, Any], + task_id: str, +) -> None: + """Test Habitica score task action.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + service, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TASK: "task does not exist", + ATTR_DIRECTION: "up", + }, + HTTPStatus.OK, + ServiceValidationError, + "Unable to complete action, could not find the task 'task does not exist'", + ), + ( + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + ATTR_DIRECTION: "up", + }, + HTTPStatus.UNAUTHORIZED, + HomeAssistantError, + "Unable to buy reward, not enough gold. Your character has 137.63 GP, but the reward costs 10 GP", + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_score_task_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica score task action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/e97659e0-2c42-4599-a7bb-00282adc410d/score/up", + json={"success": True, "data": {}}, + status=http_status, + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b/score/up", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_SCORE_HABIT, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service_data", "item", "target_id"), + [ + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "shiny_seed", + }, + "shinySeed", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "seafoam", + }, + "seafoam", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "snowball", + }, + "snowball", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "test-user", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "test-username", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ), + ( + { + ATTR_TARGET: "test-partymember-displayname", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ), + ], + ids=[], +) +async def test_transformation( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + item: str, + target_id: str, +) -> None: + """Test Habitica user transformation item action.""" + mock_habitica.get( + f"{DEFAULT_URL}/api/v3/groups/party/members", + json=load_json_object_fixture("party_members.json", DOMAIN), + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_TRANSFORMATION, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status_members", + "http_status_cast", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TARGET: "user-not-found", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.OK, + ServiceValidationError, + "Unable to find target 'user-not-found' in your party", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.TOO_MANY_REQUESTS, + HTTPStatus.OK, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.NOT_FOUND, + HTTPStatus.OK, + ServiceValidationError, + "Unable to find target, you are currently not in a party. You can only target yourself", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.BAD_REQUEST, + HTTPStatus.OK, + HomeAssistantError, + "Unable to connect to Habitica, try again later", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Unable to use spooky_sparkles, you don't own this item", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + "Unable to connect to Habitica, try again later", + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_transformation_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status_members: HTTPStatus, + http_status_cast: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica transformation action exceptions.""" + mock_habitica.get( + f"{DEFAULT_URL}/api/v3/groups/party/members", + json=load_json_object_fixture("party_members.json", DOMAIN), + status=http_status_members, + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/spookySparkles?targetId=ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + json={"success": True, "data": {}}, + status=http_status_cast, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_TRANSFORMATION, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) diff --git a/tests/components/habitica/test_switch.py b/tests/components/habitica/test_switch.py new file mode 100644 index 00000000000..55ba7b19b22 --- /dev/null +++ b/tests/components/habitica/test_switch.py @@ -0,0 +1,138 @@ +"""Tests for the Habitica switch platform.""" + +from collections.abc import Generator +from http import HTTPStatus +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import DEFAULT_URL +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import mock_called_with + +from tests.common import MockConfigEntry, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def switch_only() -> Generator[None]: + """Enable only the switch platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.SWITCH], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch entities.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +async def test_turn_on_off_toggle( + hass: HomeAssistant, + config_entry: MockConfigEntry, + service_call: str, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test switch turn on/off, toggle method.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/sleep", + json={"success": True, "data": False}, + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: "switch.test_user_rest_in_the_inn"}, + blocking=True, + ) + + assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/user/sleep") + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +@pytest.mark.parametrize( + ("status_code", "exception"), + [ + (HTTPStatus.TOO_MANY_REQUESTS, ServiceValidationError), + (HTTPStatus.BAD_REQUEST, HomeAssistantError), + ], +) +async def test_turn_on_off_toggle_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + service_call: str, + mock_habitica: AiohttpClientMocker, + status_code: HTTPStatus, + exception: Exception, +) -> None: + """Test switch turn on/off, toggle method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/sleep", + status=status_code, + json={"success": True, "data": False}, + ) + + with pytest.raises(expected_exception=exception): + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: "switch.test_user_rest_in_the_inn"}, + blocking=True, + ) + + assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/user/sleep") diff --git a/tests/components/habitica/test_todo.py b/tests/components/habitica/test_todo.py new file mode 100644 index 00000000000..66f741eb39a --- /dev/null +++ b/tests/components/habitica/test_todo.py @@ -0,0 +1,695 @@ +"""Tests for Habitica todo platform.""" + +from collections.abc import Generator +from http import HTTPStatus +import json +import re +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import mock_called_with + +from tests.common import ( + MockConfigEntry, + async_get_persistent_notifications, + load_json_object_fixture, + snapshot_platform, +) +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +def todo_only() -> Generator[None]: + """Enable only the todo platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.TODO], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_todos( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test todo platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id"), + [ + "todo.test_user_to_do_s", + "todo.test_user_dailies", + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_todo_items( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_id: str, +) -> None: + """Test items on todo lists.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + result = await hass.services.async_call( + TODO_DOMAIN, + TodoServices.GET_ITEMS, + {}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + return_response=True, + ) + + assert result == snapshot + + +@pytest.mark.freeze_time("2024-09-21 00:00:00") +@pytest.mark.parametrize( + ("entity_id", "uid"), + [ + ("todo.test_user_to_do_s", "88de7cd9-af2b-49ce-9afd-bf941d87336b"), + ("todo.test_user_dailies", "f2c85972-1a19-4426-bc6d-ce3337b9d99f"), + ], + ids=["todo", "daily"], +) +async def test_complete_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_id: str, + uid: str, +) -> None: + """Test completing an item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/up", + json=load_json_object_fixture("score_with_drop.json", DOMAIN), + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/up" + ) + + # Test notification for item drop + notifications = async_get_persistent_notifications(hass) + assert len(notifications) == 1 + _id, *_ = notifications + assert snapshot == (notifications[_id]["title"], notifications[_id]["message"]) + + +@pytest.mark.parametrize( + ("entity_id", "uid"), + [ + ("todo.test_user_to_do_s", "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba"), + ("todo.test_user_dailies", "564b9ac9-c53d-4638-9e7f-1cd96fe19baa"), + ], + ids=["todo", "daily"], +) +async def test_uncomplete_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + entity_id: str, + uid: str, +) -> None: + """Test uncompleting an item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/down", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/down" + ) + + +@pytest.mark.parametrize( + ("uid", "status"), + [ + ("88de7cd9-af2b-49ce-9afd-bf941d87336b", "completed"), + ("162f0bbe-a097-4a06-b4f4-8fbeed85d2ba", "needs_action"), + ], + ids=["completed", "needs_action"], +) +async def test_complete_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + uid: str, + status: str, +) -> None: + """Test exception when completing/uncompleting an item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + re.compile(f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/.+"), + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match=r"Unable to update the score for your Habitica to-do `.+`, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_STATUS: status}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("entity_id", "uid", "date"), + [ + ( + "todo.test_user_to_do_s", + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2024-07-30", + ), + ( + "todo.test_user_dailies", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + None, + ), + ], + ids=["todo", "daily"], +) +async def test_update_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + entity_id: str, + uid: str, + date: str, +) -> None: + """Test update details of a item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.put( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + { + ATTR_ITEM: uid, + ATTR_RENAME: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: date, + }, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + mock_call = mock_called_with( + mock_habitica, "PUT", f"{DEFAULT_URL}/api/v3/tasks/{uid}" + ) + assert mock_call + assert json.loads(mock_call[2]) == { + "date": date, + "notes": "test-description", + "text": "test-summary", + } + + +async def test_update_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when update item on the todo list.""" + uid = "88de7cd9-af2b-49ce-9afd-bf941d87336b" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.put( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to update the Habitica to-do `test-summary`, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + { + ATTR_ITEM: uid, + ATTR_RENAME: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: "2024-07-30", + }, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +async def test_add_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test add a todo item to the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/user", + json={"data": {}, "success": True}, + status=HTTPStatus.CREATED, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: "2024-07-30", + }, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + mock_call = mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/user", + ) + assert mock_call + assert json.loads(mock_call[2]) == { + "date": "2024-07-30", + "notes": "test-description", + "text": "test-summary", + "type": "todo", + } + + +async def test_add_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when adding a todo item to the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/user", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to create new to-do `test-summary` for Habitica, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: "2024-07-30", + }, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +async def test_delete_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test deleting a todo item from the todo list.""" + + uid = "2f6fcabc-f670-4ec3-ba65-817e8deea490" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.delete( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uid}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "delete", f"{DEFAULT_URL}/api/v3/tasks/{uid}" + ) + + +async def test_delete_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when deleting a todo item from the todo list.""" + + uid = "2f6fcabc-f670-4ec3-ba65-817e8deea490" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.delete( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to delete item from Habitica to-do list, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uid}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +async def test_delete_completed_todo_items( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test deleting completed todo items from the todo list.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_COMPLETED_ITEMS, + {}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos" + ) + + +async def test_delete_completed_todo_items_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when deleting completed todo items from the todo list.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to delete completed to-do items from Habitica to-do list, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_COMPLETED_ITEMS, + {}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("entity_id", "uid", "previous_uid"), + [ + ( + "todo.test_user_to_do_s", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + ), + ( + "todo.test_user_dailies", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + ), + ], + ids=["todo", "daily"], +) +async def test_move_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + hass_ws_client: WebSocketGenerator, + entity_id: str, + uid: str, + previous_uid: str, +) -> None: + """Test move todo items.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for pos in (0, 1): + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/{pos}", + json={"data": {}, "success": True}, + ) + + client = await hass_ws_client() + # move to second position + data = { + "id": id, + "type": "todo/item/move", + "entity_id": entity_id, + "uid": uid, + "previous_uid": previous_uid, + } + await client.send_json_auto_id(data) + resp = await client.receive_json() + assert resp.get("success") + + # move to top position + data = { + "id": id, + "type": "todo/item/move", + "entity_id": entity_id, + "uid": uid, + } + await client.send_json_auto_id(data) + resp = await client.receive_json() + assert resp.get("success") + + for pos in (0, 1): + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/{pos}", + ) + + +async def test_move_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test exception when moving todo item.""" + + uid = "1aa3137e-ef72-4d1f-91ee-41933602f438" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/0", + status=HTTPStatus.NOT_FOUND, + ) + + client = await hass_ws_client() + + data = { + "id": id, + "type": "todo/item/move", + "entity_id": "todo.test_user_to_do_s", + "uid": uid, + } + await client.send_json_auto_id(data) + resp = await client.receive_json() + assert resp.get("success") is False + + +@pytest.mark.parametrize( + ("fixture", "calculated_due_date"), + [ + ("duedate_fixture_1.json", "2024-09-22"), + ("duedate_fixture_2.json", "2024-09-24"), + ("duedate_fixture_3.json", "2024-10-23"), + ("duedate_fixture_4.json", "2024-10-23"), + ("duedate_fixture_5.json", "2024-09-28"), + ("duedate_fixture_6.json", "2024-10-21"), + ("duedate_fixture_7.json", None), + ("duedate_fixture_8.json", None), + ], + ids=[ + "default", + "daily starts on startdate", + "monthly starts on startdate", + "yearly starts on startdate", + "weekly", + "monthly starts on fixed day", + "grey daily", + "empty nextDue", + ], +) +@pytest.mark.usefixtures("set_tz") +async def test_next_due_date( + hass: HomeAssistant, + fixture: str, + calculated_due_date: tuple | None, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test next_due_date calculation.""" + + dailies_entity = "todo.test_user_dailies" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", json=load_json_object_fixture("user.json", DOMAIN) + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json={"data": []}, + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture(fixture, DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + result = await hass.services.async_call( + TODO_DOMAIN, + TodoServices.GET_ITEMS, + {}, + target={ATTR_ENTITY_ID: dailies_entity}, + blocking=True, + return_response=True, + ) + + assert result[dailies_entity]["items"][0].get("due") == calculated_due_date diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py index 630368a0a7a..82d3564440b 100644 --- a/tests/components/hassio/common.py +++ b/tests/components/hassio/common.py @@ -3,14 +3,71 @@ from __future__ import annotations from collections.abc import Generator +from dataclasses import fields import logging +from types import MethodType from typing import Any -from unittest.mock import DEFAULT, AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch + +from aiohasupervisor.models import ( + AddonsOptions, + AddonsStats, + AddonStage, + InstalledAddonComplete, + Repository, + StoreAddon, + StoreAddonComplete, +) from homeassistant.components.hassio.addon_manager import AddonManager from homeassistant.core import HomeAssistant LOGGER = logging.getLogger(__name__) +INSTALLED_ADDON_FIELDS = [field.name for field in fields(InstalledAddonComplete)] +STORE_ADDON_FIELDS = [field.name for field in fields(StoreAddonComplete)] +ADDONS_STATS_FIELDS = [field.name for field in fields(AddonsStats)] + +MOCK_STORE_ADDONS = [ + StoreAddon( + name="test", + arch=[], + documentation=False, + advanced=False, + available=True, + build=False, + description="Test add-on service", + homeassistant=None, + icon=False, + logo=False, + repository="core", + slug="core_test", + stage=AddonStage.EXPERIMENTAL, + update_available=False, + url="https://example.com/addons/tree/master/test", + version_latest="1.0.0", + version="1.0.0", + installed=True, + ) +] + +MOCK_REPOSITORIES = [ + Repository( + slug="core", + name="Official add-ons", + source="core", + url="https://home-assistant.io/addons", + maintainer="Home Assistant", + ) +] + + +def mock_to_dict(obj: Mock, fields: list[str]) -> dict[str, Any]: + """Aiohasupervisor mocks to dictionary representation.""" + return { + field: getattr(obj, field) + for field in fields + if not isinstance(getattr(obj, field), Mock) + } def mock_addon_manager(hass: HomeAssistant) -> AddonManager: @@ -18,62 +75,64 @@ def mock_addon_manager(hass: HomeAssistant) -> AddonManager: return AddonManager(hass, LOGGER, "Test", "test_addon") -def mock_discovery_info() -> Any: - """Return the discovery info from the supervisor.""" - return DEFAULT - - -def mock_get_addon_discovery_info( - discovery_info: dict[str, Any], discovery_info_side_effect: Any | None -) -> Generator[AsyncMock]: - """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", - side_effect=discovery_info_side_effect, - return_value=discovery_info, - ) as get_addon_discovery_info: - yield get_addon_discovery_info - - def mock_addon_store_info( + supervisor_client: AsyncMock, addon_store_info_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", - side_effect=addon_store_info_side_effect, - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info + supervisor_client.store.addon_info.side_effect = addon_store_info_side_effect + + supervisor_client.store.addon_info.return_value = addon_info = Mock( + spec=StoreAddonComplete, + slug="test", + repository="core", + available=True, + installed=False, + update_available=False, + version="1.0.0", + supervisor_api=False, + supervisor_role="default", + ) + addon_info.name = "test" + addon_info.to_dict = MethodType( + lambda self: mock_to_dict(self, STORE_ADDON_FIELDS), + addon_info, + ) + return supervisor_client.store.addon_info -def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: +def mock_addon_info( + supervisor_client: AsyncMock, addon_info_side_effect: Any | None +) -> AsyncMock: """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - side_effect=addon_info_side_effect, - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info + supervisor_client.addons.addon_info.side_effect = addon_info_side_effect + + supervisor_client.addons.addon_info.return_value = addon_info = Mock( + spec=InstalledAddonComplete, + slug="test", + repository="core", + available=False, + hostname="", + options={}, + state="unknown", + update_available=False, + version=None, + supervisor_api=False, + supervisor_role="default", + ) + addon_info.name = "test" + addon_info.to_dict = MethodType( + lambda self: mock_to_dict(self, INSTALLED_ADDON_FIELDS), + addon_info, + ) + return supervisor_client.addons.addon_info def mock_addon_not_installed( addon_store_info: AsyncMock, addon_info: AsyncMock ) -> AsyncMock: """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True + addon_store_info.return_value.available = True return addon_info @@ -81,31 +140,20 @@ def mock_addon_installed( addon_store_info: AsyncMock, addon_info: AsyncMock ) -> AsyncMock: """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-test-addon" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True + addon_info.return_value.available = True + addon_info.return_value.hostname = "core-test-addon" + addon_info.return_value.state = "stopped" + addon_info.return_value.version = "1.0.0" return addon_info def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: """Mock add-on already running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-test-addon" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True + addon_info.return_value.state = "started" return addon_info @@ -114,110 +162,42 @@ def mock_install_addon_side_effect( ) -> Any | None: """Return the install add-on side effect.""" - async def install_addon(hass: HomeAssistant, slug): + async def install_addon(addon: str): """Mock install add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True + addon_info.return_value.available = True + addon_info.return_value.state = "stopped" + addon_info.return_value.version = "1.0.0" return install_addon -def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock install add-on.""" - - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - def mock_start_addon_side_effect( addon_store_info: AsyncMock, addon_info: AsyncMock ) -> Any | None: """Return the start add-on options side effect.""" - async def start_addon(hass: HomeAssistant, slug): + async def start_addon(addon: str) -> None: """Mock start add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "started" + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True + addon_info.return_value.available = True + addon_info.return_value.state = "started" return start_addon -def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon", - side_effect=start_addon_side_effect, - ) as start_addon: - yield start_addon - - -def mock_stop_addon() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock restart add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_restart_addon", - side_effect=restart_addon_side_effect, - ) as restart_addon: - yield restart_addon - - -def mock_uninstall_addon() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -def mock_addon_options(addon_info: AsyncMock) -> dict[str, Any]: - """Mock add-on options.""" - return addon_info.return_value["options"] - - def mock_set_addon_options_side_effect(addon_options: dict[str, Any]) -> Any | None: """Return the set add-on options side effect.""" - async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: + async def set_addon_options(slug: str, options: AddonsOptions) -> None: """Mock set add-on options.""" - addon_options.update(options["options"]) + addon_options.update(options.config) return set_addon_options -def mock_set_addon_options( - set_addon_options_side_effect: Any | None, -) -> Generator[AsyncMock]: - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options", - side_effect=set_addon_options_side_effect, - ) as set_options: - yield set_options - - def mock_create_backup() -> Generator[AsyncMock]: """Mock create backup.""" with patch( @@ -226,9 +206,21 @@ def mock_create_backup() -> Generator[AsyncMock]: yield create_backup -def mock_update_addon() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon +def mock_addon_stats(supervisor_client: AsyncMock) -> AsyncMock: + """Mock addon stats.""" + supervisor_client.addons.addon_stats.return_value = addon_stats = Mock( + spec=AddonsStats, + cpu_percent=0.99, + memory_usage=182611968, + memory_limit=3977146368, + memory_percent=4.59, + network_rx=362570232, + network_tx=82374138, + blk_read=46010945536, + blk_write=15051526144, + ) + addon_stats.to_dict = MethodType( + lambda self: mock_to_dict(self, ADDONS_STATS_FIELDS), + addon_stats, + ) + return supervisor_client.addons.addon_stats diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index db1a07c4df3..7075b9d6982 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -3,8 +3,9 @@ from collections.abc import Generator import os import re -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiohasupervisor.models import AddonsStats, AddonState from aiohttp.test_utils import TestClient import pytest @@ -31,14 +32,10 @@ def disable_security_filter() -> Generator[None]: @pytest.fixture -def hassio_env() -> Generator[None]: +def hassio_env(supervisor_is_connected: AsyncMock) -> Generator[None]: """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value={"result": "ok", "data": {}}, - ), patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}), patch( "homeassistant.components.hassio.HassIO.get_info", @@ -54,6 +51,7 @@ def hassio_stubs( hass: HomeAssistant, hass_client: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> RefreshToken: """Create mock hassio http client.""" with ( @@ -76,9 +74,6 @@ def hassio_stubs( patch( "homeassistant.components.hassio.issues.SupervisorIssues.setup", ), - patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ), ): hass.set_state(CoreState.starting) hass.loop.run_until_complete(async_setup_component(hass, "hassio", {})) @@ -129,7 +124,12 @@ def hassio_handler( @pytest.fixture def all_setup_requests( - aioclient_mock: AiohttpClientMocker, request: pytest.FixtureRequest + aioclient_mock: AiohttpClientMocker, + request: pytest.FixtureRequest, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_changelog: AsyncMock, + addon_stats: AsyncMock, ) -> None: """Mock all setup requests.""" include_addons = hasattr(request, "param") and request.param.get( @@ -137,7 +137,6 @@ def all_setup_requests( ) aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -150,13 +149,6 @@ def all_setup_requests( }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -225,46 +217,32 @@ def all_setup_requests( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={ - "result": "ok", - "data": { - "name": "test", - "slug": "test", - "update_available": False, - "version": "1.0.0", - "version_latest": "1.0.0", - "repository": "core", - "state": "started", - "icon": False, - "url": "https://github.com/home-assistant/addons/test", - "auto_update": True, - }, - }, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={ - "result": "ok", - "data": { - "name": "test2", - "slug": "test2", - "update_available": False, - "version": "1.0.0", - "version_latest": "1.0.0", - "repository": "core", - "state": "started", - "icon": False, - "url": "https://github.com", - "auto_update": False, - }, - }, - ) + addon_installed.return_value.update_available = False + addon_installed.return_value.version = "1.0.0" + addon_installed.return_value.version_latest = "1.0.0" + addon_installed.return_value.repository = "core" + addon_installed.return_value.state = AddonState.STARTED + addon_installed.return_value.icon = False + + def mock_addon_info(slug: str): + if slug == "test": + addon_installed.return_value.name = "test" + addon_installed.return_value.slug = "test" + addon_installed.return_value.url = ( + "https://github.com/home-assistant/addons/test" + ) + addon_installed.return_value.auto_update = True + else: + addon_installed.return_value.name = "test2" + addon_installed.return_value.slug = "test2" + addon_installed.return_value.url = "https://github.com" + addon_installed.return_value.auto_update = False + + return addon_installed.return_value + + addon_installed.side_effect = mock_addon_info + aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -297,38 +275,32 @@ def all_setup_requests( }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/test2/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.8, - "memory_usage": 51941376, - "memory_limit": 3977146368, - "memory_percent": 1.31, - "network_rx": 31338284, - "network_tx": 15692900, - "blk_read": 740077568, - "blk_write": 6004736, - }, - }, - ) + + async def mock_addon_stats(addon: str) -> AddonsStats: + """Mock addon stats for test and test2.""" + if addon == "test2": + return AddonsStats( + cpu_percent=0.8, + memory_usage=51941376, + memory_limit=3977146368, + memory_percent=1.31, + network_rx=31338284, + network_tx=15692900, + blk_read=740077568, + blk_write=6004736, + ) + return AddonsStats( + cpu_percent=0.99, + memory_usage=182611968, + memory_limit=3977146368, + memory_percent=4.59, + network_rx=362570232, + network_tx=82374138, + blk_read=46010945536, + blk_write=15051526144, + ) + + addon_stats.side_effect = mock_addon_stats aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 4cb57e5b8d8..3d4644fbfd9 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -5,7 +5,10 @@ from __future__ import annotations import asyncio from typing import Any from unittest.mock import AsyncMock, call +from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions, Discovery import pytest from homeassistant.components.hassio.addon_manager import ( @@ -42,8 +45,8 @@ async def test_not_available_raises_exception( addon_info: AsyncMock, ) -> None: """Test addon not available raises exception.""" - addon_store_info.return_value["available"] = False - addon_info.return_value["available"] = False + addon_store_info.return_value.available = False + addon_info.return_value.available = False with pytest.raises(AddonError) as err: await addon_manager.async_install_addon() @@ -60,7 +63,11 @@ async def test_get_addon_discovery_info( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test get addon discovery info.""" - get_addon_discovery_info.return_value = {"config": {"test_key": "test"}} + get_addon_discovery_info.return_value = [ + Discovery( + addon="test_addon", service="", uuid=uuid4(), config={"test_key": "test"} + ) + ] assert await addon_manager.async_get_addon_discovery_info() == {"test_key": "test"} @@ -71,8 +78,6 @@ async def test_missing_addon_discovery_info( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test missing addon discovery info.""" - get_addon_discovery_info.return_value = None - with pytest.raises(AddonError): await addon_manager.async_get_addon_discovery_info() @@ -83,7 +88,7 @@ async def test_get_addon_discovery_info_error( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test get addon discovery info raises error.""" - get_addon_discovery_info.side_effect = HassioAPIError("Boom") + get_addon_discovery_info.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: assert await addon_manager.async_get_addon_discovery_info() @@ -118,7 +123,7 @@ async def test_get_addon_info( addon_state: AddonState, ) -> None: """Test get addon info when addon is installed.""" - addon_installed.return_value["state"] = addon_info_state + addon_installed.return_value.state = addon_info_state assert await addon_manager.async_get_addon_info() == AddonInfo( available=True, hostname="core-test-addon", @@ -136,7 +141,7 @@ async def test_get_addon_info( "addon_store_info_error", "addon_store_info_calls", ), - [(HassioAPIError("Boom"), 1, None, 1), (None, 0, HassioAPIError("Boom"), 1)], + [(SupervisorError("Boom"), 1, None, 1), (None, 0, SupervisorError("Boom"), 1)], ) async def test_get_addon_info_error( addon_manager: AddonManager, @@ -169,7 +174,7 @@ async def test_set_addon_options( assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "test_addon", {"options": {"test_key": "test"}} + "test_addon", AddonsOptions(config={"test_key": "test"}) ) @@ -177,7 +182,7 @@ async def test_set_addon_options_error( hass: HomeAssistant, addon_manager: AddonManager, set_addon_options: AsyncMock ) -> None: """Test set addon options raises error.""" - set_addon_options.side_effect = HassioAPIError("Boom") + set_addon_options.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_set_addon_options({"test_key": "test"}) @@ -186,7 +191,7 @@ async def test_set_addon_options_error( assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "test_addon", {"options": {"test_key": "test"}} + "test_addon", AddonsOptions(config={"test_key": "test"}) ) @@ -197,8 +202,8 @@ async def test_install_addon( addon_info: AsyncMock, ) -> None: """Test install addon.""" - addon_store_info.return_value["available"] = True - addon_info.return_value["available"] = True + addon_store_info.return_value.available = True + addon_info.return_value.available = True await addon_manager.async_install_addon() @@ -212,9 +217,9 @@ async def test_install_addon_error( addon_info: AsyncMock, ) -> None: """Test install addon raises error.""" - addon_store_info.return_value["available"] = True - addon_info.return_value["available"] = True - install_addon.side_effect = HassioAPIError("Boom") + addon_store_info.return_value.available = True + addon_info.return_value.available = True + install_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_install_addon() @@ -265,7 +270,7 @@ async def test_schedule_install_addon_error( install_addon: AsyncMock, ) -> None: """Test schedule install addon raises error.""" - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_install_addon() @@ -282,7 +287,7 @@ async def test_schedule_install_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule install addon logs error.""" - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") await addon_manager.async_schedule_install_addon(catch_error=True) @@ -303,7 +308,7 @@ async def test_uninstall_addon_error( addon_manager: AddonManager, uninstall_addon: AsyncMock ) -> None: """Test uninstall addon raises error.""" - uninstall_addon.side_effect = HassioAPIError("Boom") + uninstall_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_uninstall_addon() @@ -324,7 +329,7 @@ async def test_start_addon_error( addon_manager: AddonManager, start_addon: AsyncMock ) -> None: """Test start addon raises error.""" - start_addon.side_effect = HassioAPIError("Boom") + start_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_start_addon() @@ -366,7 +371,7 @@ async def test_schedule_start_addon_error( start_addon: AsyncMock, ) -> None: """Test schedule start addon raises error.""" - start_addon.side_effect = HassioAPIError("Boom") + start_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_start_addon() @@ -383,7 +388,7 @@ async def test_schedule_start_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule start addon logs error.""" - start_addon.side_effect = HassioAPIError("Boom") + start_addon.side_effect = SupervisorError("Boom") await addon_manager.async_schedule_start_addon(catch_error=True) @@ -404,7 +409,7 @@ async def test_restart_addon_error( addon_manager: AddonManager, restart_addon: AsyncMock ) -> None: """Test restart addon raises error.""" - restart_addon.side_effect = HassioAPIError("Boom") + restart_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_restart_addon() @@ -446,7 +451,7 @@ async def test_schedule_restart_addon_error( restart_addon: AsyncMock, ) -> None: """Test schedule restart addon raises error.""" - restart_addon.side_effect = HassioAPIError("Boom") + restart_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_restart_addon() @@ -463,7 +468,7 @@ async def test_schedule_restart_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule restart addon logs error.""" - restart_addon.side_effect = HassioAPIError("Boom") + restart_addon.side_effect = SupervisorError("Boom") await addon_manager.async_schedule_restart_addon(catch_error=True) @@ -482,7 +487,7 @@ async def test_stop_addon_error( addon_manager: AddonManager, stop_addon: AsyncMock ) -> None: """Test stop addon raises error.""" - stop_addon.side_effect = HassioAPIError("Boom") + stop_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_stop_addon() @@ -501,7 +506,7 @@ async def test_update_addon( update_addon: AsyncMock, ) -> None: """Test update addon.""" - addon_info.return_value["update_available"] = True + addon_info.return_value.update_available = True await addon_manager.async_update_addon() @@ -521,7 +526,7 @@ async def test_update_addon_no_update( update_addon: AsyncMock, ) -> None: """Test update addon without update available.""" - addon_info.return_value["update_available"] = False + addon_info.return_value.update_available = False await addon_manager.async_update_addon() @@ -539,8 +544,8 @@ async def test_update_addon_error( update_addon: AsyncMock, ) -> None: """Test update addon raises error.""" - addon_info.return_value["update_available"] = True - update_addon.side_effect = HassioAPIError("Boom") + addon_info.return_value.update_available = True + update_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_update_addon() @@ -564,7 +569,7 @@ async def test_schedule_update_addon( update_addon: AsyncMock, ) -> None: """Test schedule update addon.""" - addon_info.return_value["update_available"] = True + addon_info.return_value.update_available = True update_task = addon_manager.async_schedule_update_addon() @@ -619,7 +624,7 @@ async def test_schedule_update_addon( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to update the Test add-on: Boom", ), @@ -637,7 +642,7 @@ async def test_schedule_update_addon_error( error_message: str, ) -> None: """Test schedule update addon raises error.""" - addon_installed.return_value["update_available"] = True + addon_installed.return_value.update_available = True create_backup.side_effect = create_backup_error update_addon.side_effect = update_addon_error @@ -669,7 +674,7 @@ async def test_schedule_update_addon_error( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to update the Test add-on: Boom", ), @@ -688,7 +693,7 @@ async def test_schedule_update_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule update addon logs error.""" - addon_installed.return_value["update_available"] = True + addon_installed.return_value.update_available = True create_backup.side_effect = create_backup_error update_addon.side_effect = update_addon_error @@ -789,7 +794,7 @@ async def test_schedule_install_setup_addon( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -800,7 +805,7 @@ async def test_schedule_install_setup_addon( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -811,7 +816,7 @@ async def test_schedule_install_setup_addon( 1, None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to start the Test add-on: Boom", ), @@ -858,7 +863,7 @@ async def test_schedule_install_setup_addon_error( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -869,7 +874,7 @@ async def test_schedule_install_setup_addon_error( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -880,7 +885,7 @@ async def test_schedule_install_setup_addon_error( 1, None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to start the Test add-on: Boom", ), @@ -955,7 +960,7 @@ async def test_schedule_setup_addon( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -964,7 +969,7 @@ async def test_schedule_setup_addon( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to start the Test add-on: Boom", ), @@ -1004,7 +1009,7 @@ async def test_schedule_setup_addon_error( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -1013,7 +1018,7 @@ async def test_schedule_setup_addon_error( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to start the Test add-on: Boom", ), diff --git a/tests/components/hassio/test_addon_panel.py b/tests/components/hassio/test_addon_panel.py index f7407152f7e..2c3552c8d08 100644 --- a/tests/components/hassio/test_addon_panel.py +++ b/tests/components/hassio/test_addon_panel.py @@ -1,7 +1,7 @@ """Test add-on panel.""" from http import HTTPStatus -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -13,10 +13,11 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, supervisor_is_connected: AsyncMock +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/homeassistant/info", diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py new file mode 100644 index 00000000000..75cc049f7b5 --- /dev/null +++ b/tests/components/hassio/test_backup.py @@ -0,0 +1,1023 @@ +"""Test supervisor backup functionality.""" + +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Callable, + Coroutine, + Generator, +) +from dataclasses import replace +from datetime import datetime +from io import StringIO +import os +from typing import Any +from unittest.mock import AsyncMock, Mock, patch + +from aiohasupervisor.exceptions import ( + SupervisorBadRequestError, + SupervisorNotFoundError, +) +from aiohasupervisor.models import ( + backups as supervisor_backups, + mounts as supervisor_mounts, +) +from aiohasupervisor.models.mounts import MountsInfo +import pytest + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + BackupAgent, + BackupAgentPlatformProtocol, + Folder, +) +from homeassistant.components.hassio.backup import LOCATION_CLOUD_BACKUP +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .test_init import MOCK_ENVIRON + +from tests.common import mock_platform +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +TEST_BACKUP = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP.compressed, + date=TEST_BACKUP.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant="2024.12.0", + location=TEST_BACKUP.location, + locations=TEST_BACKUP.locations, + name=TEST_BACKUP.name, + protected=TEST_BACKUP.protected, + repositories=[], + size=TEST_BACKUP.size, + size_bytes=TEST_BACKUP.size_bytes, + slug=TEST_BACKUP.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP.type, +) + +TEST_BACKUP_2 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=False, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP_2.compressed, + date=TEST_BACKUP_2.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant=None, + location=TEST_BACKUP_2.location, + locations=TEST_BACKUP_2.locations, + name=TEST_BACKUP_2.name, + protected=TEST_BACKUP_2.protected, + repositories=[], + size=TEST_BACKUP_2.size, + size_bytes=TEST_BACKUP_2.size_bytes, + slug=TEST_BACKUP_2.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP_2.type, +) + +TEST_BACKUP_3 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location="share", + locations={"share"}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP_3.compressed, + date=TEST_BACKUP_3.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant=None, + location=TEST_BACKUP_3.location, + locations=TEST_BACKUP_3.locations, + name=TEST_BACKUP_3.name, + protected=TEST_BACKUP_3.protected, + repositories=[], + size=TEST_BACKUP_3.size, + size_bytes=TEST_BACKUP_3.size_bytes, + slug=TEST_BACKUP_3.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP_3.type, +) + + +@pytest.fixture(autouse=True) +def fixture_supervisor_environ() -> Generator[None]: + """Mock os environ for supervisor.""" + with patch.dict(os.environ, MOCK_ENVIRON): + yield + + +@pytest.fixture(autouse=True) +async def hassio_enabled( + hass: HomeAssistant, supervisor_client: AsyncMock +) -> AsyncGenerator[None]: + """Enable hassio.""" + with ( + patch("homeassistant.components.backup.is_hassio", return_value=True), + patch("homeassistant.components.backup.backup.is_hassio", return_value=True), + ): + yield + + +@pytest.fixture +async def setup_integration( + hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock +) -> AsyncGenerator[None]: + """Set up Backup integration.""" + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + await hass.async_block_till_done() + + +class BackupAgentTest(BackupAgent): + """Test backup agent.""" + + domain = "test" + + def __init__(self, name: str) -> None: + """Initialize the backup agent.""" + self.name = name + + async def async_download_backup( + self, backup_id: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return AsyncMock(spec_set=["__aiter__"]) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + await open_stream() + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return [] + + async def async_get_backup( + self, backup_id: str, **kwargs: Any + ) -> AgentBackup | None: + """Return a backup.""" + return None + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Delete a backup file.""" + + +async def _setup_backup_platform( + hass: HomeAssistant, + *, + domain: str, + platform: BackupAgentPlatformProtocol, +) -> None: + """Set up a mock domain.""" + mock_platform(hass, f"{domain}.backup", platform) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() + + +@pytest.mark.usefixtures("hassio_client") +@pytest.mark.parametrize( + ("mounts", "expected_agents"), + [ + (MountsInfo(default_backup_mount=None, mounts=[]), ["hassio.local"]), + ( + MountsInfo( + default_backup_mount=None, + mounts=[ + supervisor_mounts.CIFSMountResponse( + share="test", + name="test", + read_only=False, + state=supervisor_mounts.MountState.ACTIVE, + user_path="test", + usage=supervisor_mounts.MountUsage.BACKUP, + server="test", + type=supervisor_mounts.MountType.CIFS, + ) + ], + ), + ["hassio.local", "hassio.test"], + ), + ( + MountsInfo( + default_backup_mount=None, + mounts=[ + supervisor_mounts.CIFSMountResponse( + share="test", + name="test", + read_only=False, + state=supervisor_mounts.MountState.ACTIVE, + user_path="test", + usage=supervisor_mounts.MountUsage.MEDIA, + server="test", + type=supervisor_mounts.MountType.CIFS, + ) + ], + ), + ["hassio.local"], + ), + ], +) +async def test_agent_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + mounts: MountsInfo, + expected_agents: list[str], +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + supervisor_client.mounts.info.return_value = mounts + + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": agent_id} for agent_id in expected_agents], + } + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("backup", "backup_details", "expected_response"), + [ + ( + TEST_BACKUP, + TEST_BACKUP_DETAILS, + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": True, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + }, + ), + ( + TEST_BACKUP_2, + TEST_BACKUP_DETAILS_2, + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": False, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": False, + "homeassistant_version": None, + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + }, + ), + ], +) +async def test_agent_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + backup: supervisor_backups.Backup, + backup_details: supervisor_backups.BackupComplete, + expected_response: dict[str, Any], +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + supervisor_client.backups.list.return_value = [backup, TEST_BACKUP_3] + supervisor_client.backups.backup_info.return_value = backup_details + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [expected_response] + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.download_backup.return_value.__aiter__.return_value = ( + iter((b"backup data",)) + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + supervisor_client.backups.download_backup.assert_called_once_with( + "abc123", options=supervisor_backups.DownloadBackupOptions(location=None) + ) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_download_unavailable_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP_3] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_3 + supervisor_client.backups.download_backup.return_value.__aiter__.return_value = ( + iter((b"backup data",)) + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local") + assert resp.status == 404 + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + supervisor_client.backups.reload.assert_not_called() + with ( + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("shutil.copy"), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=hassio.local", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + supervisor_client.backups.reload.assert_not_called() + supervisor_client.backups.download_backup.assert_not_called() + supervisor_client.backups.remove_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + supervisor_client.backups.remove_backup.assert_called_once_with( + backup_id, options=supervisor_backups.RemoveBackupOptions(location={None}) + ) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("remove_side_effect", "expected_response"), + [ + ( + SupervisorBadRequestError("blah"), + { + "success": False, + "error": {"code": "unknown_error", "message": "Unknown error"}, + }, + ), + ( + SupervisorBadRequestError("Backup does not exist"), + { + "success": True, + "result": {"agent_errors": {}}, + }, + ), + ( + SupervisorNotFoundError(), + { + "success": True, + "result": {"agent_errors": {}}, + }, + ), + ], +) +async def test_agent_delete_with_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + remove_side_effect: Exception, + expected_response: dict[str, Any], +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + supervisor_client.backups.remove_backup.side_effect = remove_side_effect + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response == {"id": 1, "type": "result"} | expected_response + supervisor_client.backups.remove_backup.assert_called_once_with( + backup_id, options=supervisor_backups.RemoveBackupOptions(location={None}) + ) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("event_data", "mount_info_calls"), + [ + ( + { + "event": "job", + "data": {"name": "mount_manager_create_mount", "done": True}, + }, + 1, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_create_mount", "done": False}, + }, + 0, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_remove_mount", "done": True}, + }, + 1, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_remove_mount", "done": False}, + }, + 0, + ), + ({"event": "job", "data": {"name": "other_job", "done": True}}, 0), + ( + { + "event": "other_event", + "data": {"name": "mount_manager_remove_mount", "done": True}, + }, + 0, + ), + ], +) +async def test_agents_notify_on_mount_added_removed( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + event_data: dict[str, Any], + mount_info_calls: int, +) -> None: + """Test the listener is called when mounts are added or removed.""" + client = await hass_ws_client(hass) + assert supervisor_client.mounts.info.call_count == 1 + assert supervisor_client.mounts.info.call_args[0] == () + supervisor_client.mounts.info.reset_mock() + + await client.send_json_auto_id({"type": "supervisor/event", "data": event_data}) + response = await client.receive_json() + assert response["success"] + await hass.async_block_till_done() + assert supervisor_client.mounts.info.call_count == mount_info_calls + + +DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( + addons=None, + background=True, + compressed=True, + folders=None, + homeassistant_exclude_database=False, + homeassistant=True, + location=[None], + name="Test", + password=None, +) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("extra_generate_options", "expected_supervisor_options"), + [ + ( + {}, + DEFAULT_BACKUP_OPTIONS, + ), + ( + {"include_addons": ["addon_1", "addon_2"]}, + replace(DEFAULT_BACKUP_OPTIONS, addons={"addon_1", "addon_2"}), + ), + ( + {"include_all_addons": True}, + replace(DEFAULT_BACKUP_OPTIONS, addons="all"), + ), + ( + {"include_database": False}, + replace(DEFAULT_BACKUP_OPTIONS, homeassistant_exclude_database=True), + ), + ( + {"include_folders": ["media", "share"]}, + replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share"}), + ), + ( + {"include_folders": ["media"], "include_homeassistant": False}, + replace(DEFAULT_BACKUP_OPTIONS, folders={"media"}, homeassistant=False), + ), + ], +) +async def test_reader_writer_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + extra_generate_options: dict[str, Any], + expected_supervisor_options: supervisor_backups.PartialBackupOptions, +) -> None: + """Test generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + | extra_generate_options + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + supervisor_client.backups.partial_backup.assert_called_once_with( + expected_supervisor_options + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "completed", + } + + supervisor_client.backups.download_backup.assert_not_called() + supervisor_client.backups.remove_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_create_remote_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test generating a backup which will be uploaded to a remote agent.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["test.remote"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + supervisor_client.backups.partial_backup.assert_called_once_with( + replace(DEFAULT_BACKUP_OPTIONS, location=LOCATION_CLOUD_BACKUP), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "completed", + } + + supervisor_client.backups.download_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with( + "test_slug", + options=supervisor_backups.RemoveBackupOptions({LOCATION_CLOUD_BACKUP}), + ) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("extra_generate_options"), + [ + {"include_homeassistant": False}, + ], +) +async def test_reader_writer_create_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + extra_generate_options: dict[str, Any], +) -> None: + """Test generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + | extra_generate_options + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "idle", + } + + response = await client.receive_json() + assert not response["success"] + assert response["error"] == {"code": "unknown_error", "message": "Unknown error"} + + supervisor_client.backups.partial_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_receive_remote_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test receiving a backup which will be uploaded to a remote agent.""" + client = await hass_client() + backup_id = "test-backup" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.upload_backup.return_value = "test_slug" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + supervisor_client.backups.reload.assert_not_called() + with ( + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("shutil.copy"), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=test.remote", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + + supervisor_client.backups.download_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with( + "test_slug", + options=supervisor_backups.RemoveBackupOptions({LOCATION_CLOUD_BACKUP}), + ) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_restore( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test restoring a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_restore.return_value.job_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "stage": None, + "state": "in_progress", + } + + supervisor_client.backups.partial_restore.assert_called_once_with( + "abc123", + supervisor_backups.PartialRestoreOptions( + addons=None, + background=True, + folders=None, + homeassistant=True, + location=None, + password=None, + ), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + response = await client.receive_json() + assert response["success"] + assert response["result"] is None + + +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"restore_database": False}, + "Cannot restore Home Assistant without database", + ), + ( + {"restore_homeassistant": False}, + "Cannot restore database without Home Assistant", + ), + ], +) +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_restore_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test trigger restore.""" + client = await hass_ws_client(hass) + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + default_parameters = { + "type": "backup/restore", + "agent_id": "hassio.local", + "backup_id": "abc123", + } + + await client.send_json_auto_id(default_parameters | parameters) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": expected_error, + } diff --git a/tests/components/hassio/test_binary_sensor.py b/tests/components/hassio/test_binary_sensor.py index af72ea9d702..9878dd67a21 100644 --- a/tests/components/hassio/test_binary_sensor.py +++ b/tests/components/hassio/test_binary_sensor.py @@ -1,7 +1,7 @@ """The tests for the hassio binary sensors.""" import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -10,6 +10,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component +from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS + from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -17,10 +19,16 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_changelog: AsyncMock, + addon_stats: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -33,13 +41,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -105,22 +106,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -153,33 +138,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -193,20 +154,26 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: @pytest.mark.parametrize( - ("entity_id", "expected"), + ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] +) +@pytest.mark.parametrize( + ("entity_id", "expected", "addon_state"), [ - ("binary_sensor.test_running", "on"), - ("binary_sensor.test2_running", "off"), + ("binary_sensor.test_running", "on", "started"), + ("binary_sensor.test2_running", "off", "stopped"), ], ) async def test_binary_sensor( hass: HomeAssistant, - entity_id, - expected, + entity_id: str, + expected: str, + addon_state: str, aioclient_mock: AiohttpClientMocker, entity_registry: er.EntityRegistry, + addon_installed: AsyncMock, ) -> None: """Test hassio OS and addons binary sensor.""" + addon_installed.return_value.state = addon_state config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) diff --git a/tests/components/hassio/test_config_flow.py b/tests/components/hassio/test_config_flow.py index 1153203817d..48c1a06f81e 100644 --- a/tests/components/hassio/test_config_flow.py +++ b/tests/components/hassio/test_config_flow.py @@ -38,4 +38,4 @@ async def test_multiple_entries(hass: HomeAssistant) -> None: DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/hassio/test_diagnostics.py b/tests/components/hassio/test_diagnostics.py index 0d648ba9bdb..c95cde67b8a 100644 --- a/tests/components/hassio/test_diagnostics.py +++ b/tests/components/hassio/test_diagnostics.py @@ -1,7 +1,7 @@ """Test Supervisor diagnostics.""" import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -18,10 +18,16 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -34,13 +40,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -110,22 +109,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -158,33 +141,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_discovery.py b/tests/components/hassio/test_discovery.py index 305b863b3af..ba6338f84e2 100644 --- a/tests/components/hassio/test_discovery.py +++ b/tests/components/hassio/test_discovery.py @@ -3,19 +3,28 @@ from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch +from uuid import uuid4 +from aiohasupervisor.models import Discovery from aiohttp.test_utils import TestClient import pytest from homeassistant import config_entries -from homeassistant.components.hassio.discovery import HassioServiceInfo from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant +from homeassistant.helpers.discovery_flow import DiscoveryKey +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.setup import async_setup_component -from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) from tests.test_util.aiohttp import AiohttpClientMocker @@ -41,44 +50,34 @@ def mock_mqtt_fixture( @pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, mock_mqtt: type[config_entries.ConfigFlow], + addon_installed: AsyncMock, + get_addon_discovery_info: AsyncMock, ) -> None: """Test startup and discovery after event.""" - aioclient_mock.get( - "http://127.0.0.1/discovery", - json={ - "result": "ok", - "data": { - "discovery": [ - { - "service": "mqtt", - "uuid": "test", - "addon": "mosquitto", - "config": { - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - } - ] + get_addon_discovery_info.return_value = [ + Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/mosquitto/info", - json={"result": "ok", "data": {"name": "Mosquitto Test"}}, - ) + ) + ] + addon_installed.return_value.name = "Mosquitto Test" - assert aioclient_mock.call_count == 0 + assert get_addon_discovery_info.call_count == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() - assert aioclient_mock.call_count == 2 + assert get_addon_discovery_info.call_count == 1 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -92,7 +91,7 @@ async def test_hassio_discovery_startup( }, name="Mosquitto Test", slug="mosquitto", - uuid="test", + uuid=uuid.hex, ) ) @@ -102,38 +101,29 @@ async def test_hassio_discovery_startup_done( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_mqtt: type[config_entries.ConfigFlow], + addon_installed: AsyncMock, + get_addon_discovery_info: AsyncMock, ) -> None: """Test startup and discovery with hass discovery.""" aioclient_mock.post( "http://127.0.0.1/supervisor/options", json={"result": "ok", "data": {}}, ) - aioclient_mock.get( - "http://127.0.0.1/discovery", - json={ - "result": "ok", - "data": { - "discovery": [ - { - "service": "mqtt", - "uuid": "test", - "addon": "mosquitto", - "config": { - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - } - ] + get_addon_discovery_info.return_value = [ + Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/mosquitto/info", - json={"result": "ok", "data": {"name": "Mosquitto Test"}}, - ) + ) + ] + addon_installed.return_value.name = "Mosquitto Test" with ( patch( @@ -149,7 +139,7 @@ async def test_hassio_discovery_startup_done( await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() - assert aioclient_mock.call_count == 2 + assert get_addon_discovery_info.call_count == 1 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -163,51 +153,43 @@ async def test_hassio_discovery_startup_done( }, name="Mosquitto Test", slug="mosquitto", - uuid="test", + uuid=uuid.hex, ) ) async def test_hassio_discovery_webhook( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, hassio_client: TestClient, mock_mqtt: type[config_entries.ConfigFlow], + addon_installed: AsyncMock, + get_discovery_message: AsyncMock, ) -> None: """Test discovery webhook.""" - aioclient_mock.get( - "http://127.0.0.1/discovery/testuuid", - json={ - "result": "ok", - "data": { - "service": "mqtt", - "uuid": "test", - "addon": "mosquitto", - "config": { - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - }, + get_discovery_message.return_value = Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/mosquitto/info", - json={"result": "ok", "data": {"name": "Mosquitto Test"}}, - ) + addon_installed.return_value.name = "Mosquitto Test" resp = await hassio_client.post( - "/api/hassio_push/discovery/testuuid", - json={"addon": "mosquitto", "service": "mqtt", "uuid": "testuuid"}, + f"/api/hassio_push/discovery/{uuid!s}", + json={"addon": "mosquitto", "service": "mqtt", "uuid": str(uuid)}, ) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert resp.status == HTTPStatus.OK - assert aioclient_mock.call_count == 2 + assert get_discovery_message.call_count == 1 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -221,6 +203,153 @@ async def test_hassio_discovery_webhook( }, name="Mosquitto Test", slug="mosquitto", - uuid="test", + uuid=uuid.hex, ) ) + + +TEST_UUID = str(uuid4()) + + +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + ), + [ + # Matching discovery key + ( + "mock-domain", + {"hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),)}, + ), + # Matching discovery key + ( + "mock-domain", + { + "hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),), + "other": (DiscoveryKey(domain="other", key="blah", version=1),), + }, + ), + # Matching discovery key, other domain + # Note: Rediscovery is not currently restricted to the domain of the removed + # entry. Such a check can be added if needed. + ( + "comp", + {"hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),)}, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_HASSIO, + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_USER, + ], +) +async def test_hassio_rediscover( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hassio_client: TestClient, + addon_installed: AsyncMock, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + get_addon_discovery_info: AsyncMock, + get_discovery_message: AsyncMock, +) -> None: + """Test we reinitiate flows when an ignored config entry is removed.""" + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id="mock-unique-id", + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + get_discovery_message.return_value = Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + }, + ) + + expected_context = { + "discovery_key": DiscoveryKey(domain="hassio", key=uuid.hex, version=1), + "source": config_entries.SOURCE_HASSIO, + } + + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_init.mock_calls) == 1 + assert mock_init.mock_calls[0][1][0] == "mqtt" + assert mock_init.mock_calls[0][2]["context"] == expected_context + + +@pytest.mark.usefixtures("mock_async_zeroconf") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + "entry_source", + "entry_unique_id", + ), + [ + # Discovery key from other domain + ( + "mock-domain", + {"bluetooth": (DiscoveryKey(domain="bluetooth", key="test", version=1),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + # Discovery key from the future + ( + "mock-domain", + {"hassio": (DiscoveryKey(domain="hassio", key="test", version=2),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + ], +) +async def test_hassio_rediscover_no_match( + hass: HomeAssistant, + hassio_client: TestClient, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + entry_unique_id: str, +) -> None: + """Test we don't reinitiate flows when a non matching config entry is removed.""" + + mock_integration(hass, MockModule(entry_domain)) + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_init.mock_calls) == 0 diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index c5fa6ff8254..e6375171dab 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any, Literal -import aiohttp from aiohttp import hdrs, web import pytest @@ -16,36 +15,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from tests.test_util.aiohttp import AiohttpClientMocker -async def test_api_ping( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping.""" - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) - - assert await hassio_handler.is_connected() - assert aioclient_mock.call_count == 1 - - -async def test_api_ping_error( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping error.""" - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "error"}) - - assert not (await hassio_handler.is_connected()) - assert aioclient_mock.call_count == 1 - - -async def test_api_ping_exeption( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping exception.""" - aioclient_mock.get("http://127.0.0.1/supervisor/ping", exc=aiohttp.ClientError()) - - assert not (await hassio_handler.is_connected()) - assert aioclient_mock.call_count == 1 - - async def test_api_info( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -181,54 +150,6 @@ async def test_api_core_info_error( assert aioclient_mock.call_count == 1 -async def test_api_homeassistant_stop( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Home Assistant stop.""" - aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) - - assert await hassio_handler.stop_homeassistant() - assert aioclient_mock.call_count == 1 - - -async def test_api_homeassistant_restart( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Home Assistant restart.""" - aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) - - assert await hassio_handler.restart_homeassistant() - assert aioclient_mock.call_count == 1 - - -async def test_api_addon_info( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Add-on info.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"name": "bla"}}, - ) - - data = await hassio_handler.get_addon_info("test") - assert data["name"] == "bla" - assert aioclient_mock.call_count == 1 - - -async def test_api_addon_stats( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Add-on stats.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={"result": "ok", "data": {"memory_percent": 0.01}}, - ) - - data = await hassio_handler.get_addon_stats("test") - assert data["memory_percent"] == 0.01 - assert aioclient_mock.call_count == 1 - - async def test_api_core_stats( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -257,34 +178,6 @@ async def test_api_supervisor_stats( assert aioclient_mock.call_count == 1 -async def test_api_discovery_message( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API discovery message.""" - aioclient_mock.get( - "http://127.0.0.1/discovery/test", - json={"result": "ok", "data": {"service": "mqtt"}}, - ) - - data = await hassio_handler.get_discovery_message("test") - assert data["service"] == "mqtt" - assert aioclient_mock.call_count == 1 - - -async def test_api_retrieve_discovery( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API discovery message.""" - aioclient_mock.get( - "http://127.0.0.1/discovery", - json={"result": "ok", "data": {"discovery": [{"service": "mqtt"}]}}, - ) - - data = await hassio_handler.retrieve_discovery_messages() - assert data["discovery"][-1]["service"] == "mqtt" - assert aioclient_mock.call_count == 1 - - async def test_api_ingress_panels( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -315,8 +208,7 @@ async def test_api_ingress_panels( @pytest.mark.parametrize( ("api_call", "method", "payload"), [ - ("retrieve_discovery_messages", "GET", None), - ("refresh_updates", "POST", None), + ("get_network_info", "GET", None), ("update_diagnostics", "POST", True), ], ) @@ -449,23 +341,16 @@ async def test_api_set_yellow_settings( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") -async def test_api_reboot_host( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping.""" - aioclient_mock.post( - "http://127.0.0.1/host/reboot", - json={"result": "ok", "data": {}}, - ) - - assert await handler.async_reboot_host(hass) == {} - assert aioclient_mock.call_count == 1 - - @pytest.mark.usefixtures("hassio_stubs") async def test_send_command_invalid_command(hass: HomeAssistant) -> None: """Test send command fails when command is invalid.""" hassio: HassIO = hass.data["hassio"] with pytest.raises(HassioAPIError): + # absolute path await hassio.send_command("/test/../bad") + with pytest.raises(HassioAPIError): + # relative path + await hassio.send_command("test/../bad") + with pytest.raises(HassioAPIError): + # relative path with percent encoding + await hassio.send_command("test/%2E%2E/bad") diff --git a/tests/components/hassio/test_http.py b/tests/components/hassio/test_http.py index 404c047a56c..8ed59bc78d1 100644 --- a/tests/components/hassio/test_http.py +++ b/tests/components/hassio/test_http.py @@ -82,7 +82,9 @@ async def test_forward_request_onboarded_user_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), + ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), + ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_onboarded_user_unallowed_paths( @@ -152,7 +154,9 @@ async def test_forward_request_onboarded_noauth_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), + ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), + ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_onboarded_noauth_unallowed_paths( @@ -265,7 +269,9 @@ async def test_forward_request_not_onboarded_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), + ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), + ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_not_onboarded_unallowed_paths( @@ -292,7 +298,9 @@ async def test_forward_request_not_onboarded_unallowed_paths( ("addons/bl_b392/icon", False), ("backups/1234abcd/info", True), ("supervisor/logs", True), + ("supervisor/logs/follow", True), ("addons/bl_b392/logs", True), + ("addons/bl_b392/logs/follow", True), ("addons/bl_b392/changelog", True), ("addons/bl_b392/documentation", True), ], @@ -494,3 +502,70 @@ async def test_entrypoint_cache_control( assert resp1.headers["Cache-Control"] == "no-store, max-age=0" assert "Cache-Control" not in resp2.headers + + +async def test_no_follow_logs_compress( + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that we do not compress follow logs.""" + aioclient_mock.get("http://127.0.0.1/supervisor/logs/follow") + aioclient_mock.get("http://127.0.0.1/supervisor/logs") + + resp1 = await hassio_client.get("/api/hassio/supervisor/logs/follow") + resp2 = await hassio_client.get("/api/hassio/supervisor/logs") + + # Check we got right response + assert resp1.status == HTTPStatus.OK + assert resp1.headers.get("Content-Encoding") is None + + assert resp2.status == HTTPStatus.OK + assert resp2.headers.get("Content-Encoding") == "deflate" + + +async def test_forward_range_header_for_logs( + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that we forward the Range header for logs.""" + aioclient_mock.get("http://127.0.0.1/host/logs") + aioclient_mock.get("http://127.0.0.1/host/logs/boots/-1") + aioclient_mock.get("http://127.0.0.1/host/logs/boots/-2/follow?lines=100") + aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs") + aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs/follow") + aioclient_mock.get("http://127.0.0.1/backups/1234abcd/download") + + test_range = ":-100:50" + + host_resp = await hassio_client.get( + "/api/hassio/host/logs", headers={"Range": test_range} + ) + host_resp2 = await hassio_client.get( + "/api/hassio/host/logs/boots/-1", headers={"Range": test_range} + ) + host_resp3 = await hassio_client.get( + "/api/hassio/host/logs/boots/-2/follow?lines=100", headers={"Range": test_range} + ) + addon_resp = await hassio_client.get( + "/api/hassio/addons/123abc_esphome/logs", headers={"Range": test_range} + ) + addon_resp2 = await hassio_client.get( + "/api/hassio/addons/123abc_esphome/logs/follow", headers={"Range": test_range} + ) + backup_resp = await hassio_client.get( + "/api/hassio/backups/1234abcd/download", headers={"Range": test_range} + ) + + assert host_resp.status == HTTPStatus.OK + assert host_resp2.status == HTTPStatus.OK + assert host_resp3.status == HTTPStatus.OK + assert addon_resp.status == HTTPStatus.OK + assert addon_resp2.status == HTTPStatus.OK + assert backup_resp.status == HTTPStatus.OK + + assert len(aioclient_mock.mock_calls) == 6 + + assert aioclient_mock.mock_calls[0][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[1][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[2][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[3][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[4][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[5][-1].get("Range") is None diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index d71e8acfbe0..5c11370ae74 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -1,34 +1,42 @@ """The tests for the hassio component.""" from datetime import timedelta +import logging import os from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsStats import pytest from voluptuous import Invalid from homeassistant.auth.const import GROUP_ID_ADMIN -from homeassistant.components import frontend +from homeassistant.components import frontend, hassio from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.hassio import ( ADDONS_COORDINATOR, DOMAIN, STORAGE_KEY, - async_get_addon_store_info, get_core_info, + get_supervisor_ip, hostname_from_addon_slug, - is_hassio, + is_hassio as deprecated_is_hassio, ) from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY -from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, issue_registry as ir +from homeassistant.helpers.hassio import is_hassio +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + import_and_test_deprecated_constant, +) from tests.test_util.aiohttp import AiohttpClientMocker MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @@ -52,10 +60,17 @@ def os_info(extra_os_info): @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + os_info: AsyncMock, + store_info: AsyncMock, + addon_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -68,13 +83,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -162,81 +170,41 @@ def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/test2/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.8, - "memory_usage": 51941376, - "memory_limit": 3977146368, - "memory_percent": 1.31, - "network_rx": 31338284, - "network_tx": 15692900, - "blk_read": 740077568, - "blk_write": 6004736, - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/test3/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.8, - "memory_usage": 51941376, - "memory_limit": 3977146368, - "memory_percent": 1.31, - "network_rx": 31338284, - "network_tx": 15692900, - "blk_read": 740077568, - "blk_write": 6004736, - }, - }, - ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) + + async def mock_addon_stats(addon: str) -> AddonsStats: + """Mock addon stats for test and test2.""" + if addon in {"test2", "test3"}: + return AddonsStats( + cpu_percent=0.8, + memory_usage=51941376, + memory_limit=3977146368, + memory_percent=1.31, + network_rx=31338284, + network_tx=15692900, + blk_read=740077568, + blk_write=6004736, + ) + return AddonsStats( + cpu_percent=0.99, + memory_usage=182611968, + memory_limit=3977146368, + memory_percent=4.59, + network_rx=362570232, + network_tx=82374138, + blk_read=46010945536, + blk_write=15051526144, + ) + + addon_stats.side_effect = mock_addon_stats + + def mock_addon_info(slug: str): + addon_info.return_value.auto_update = slug == "test" + return addon_info.return_value + + addon_info.side_effect = mock_addon_info aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -250,7 +218,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: async def test_setup_api_ping( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -258,7 +228,7 @@ async def test_setup_api_ping( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 assert get_core_info(hass)["version_latest"] == "1.0.0" assert is_hassio(hass) @@ -293,7 +263,9 @@ async def test_setup_api_panel( async def test_setup_api_push_api_data( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -303,14 +275,16 @@ async def test_setup_api_push_api_data( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 9999 - assert "watchdog" not in aioclient_mock.mock_calls[1][2] + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 9999 + assert "watchdog" not in aioclient_mock.mock_calls[0][2] async def test_setup_api_push_api_data_server_host( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -322,16 +296,17 @@ async def test_setup_api_push_api_data_server_host( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 9999 - assert not aioclient_mock.mock_calls[1][2]["watchdog"] + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 9999 + assert not aioclient_mock.mock_calls[0][2]["watchdog"] async def test_setup_api_push_api_data_default( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_storage: dict[str, Any], + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -339,10 +314,10 @@ async def test_setup_api_push_api_data_default( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 8123 - refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 8123 + refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) @@ -409,6 +384,7 @@ async def test_setup_api_existing_hassio_user( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_storage: dict[str, Any], + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") @@ -419,14 +395,16 @@ async def test_setup_api_existing_hassio_user( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 8123 - assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 8123 + assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token async def test_setup_core_push_timezone( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" hass.config.time_zone = "testzone" @@ -436,8 +414,8 @@ async def test_setup_core_push_timezone( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone" with patch("homeassistant.util.dt.set_default_time_zone"): await hass.config.async_update(time_zone="America/New_York") @@ -446,7 +424,9 @@ async def test_setup_core_push_timezone( async def test_setup_hassio_no_additional_data( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" with ( @@ -457,7 +437,7 @@ async def test_setup_hassio_no_additional_data( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456" @@ -469,16 +449,13 @@ async def test_fail_setup_without_environ_var(hass: HomeAssistant) -> None: async def test_warn_when_cannot_connect( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + supervisor_is_connected: AsyncMock, ) -> None: """Fail warn when we cannot connect.""" - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), - ): + supervisor_is_connected.side_effect = SupervisorError + with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result @@ -509,15 +486,14 @@ async def test_service_calls( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + supervisor_client: AsyncMock, + addon_installed: AsyncMock, + supervisor_is_connected: AsyncMock, + issue_registry: ir.IssueRegistry, ) -> None: """Call service and check the API calls behind that.""" - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), - ): + supervisor_is_connected.side_effect = SupervisorError + with patch.dict(os.environ, MOCK_ENVIRON): assert await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -541,19 +517,20 @@ async def test_service_calls( await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call("hassio", "addon_update", {"addon": "test"}) + assert (DOMAIN, "update_service_deprecated") in issue_registry.issues await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 24 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() - assert aioclient_mock.call_count == 26 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27 await hass.services.async_call("hassio", "backup_full", {}) await hass.services.async_call( @@ -568,7 +545,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 28 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 03:48:00", "homeassistant": True, @@ -593,7 +570,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 30 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], @@ -612,7 +589,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 31 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32 assert aioclient_mock.mock_calls[-1][2] == { "name": "backup_name", "location": "backup_share", @@ -628,7 +605,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 32 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 03:48:00", "location": None, @@ -647,7 +624,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 34 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 35 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 11:48:00", "location": None, @@ -657,15 +634,11 @@ async def test_service_calls( async def test_invalid_service_calls( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, ) -> None: """Call service with invalid input and check that it raises.""" - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), - ): + supervisor_is_connected.side_effect = SupervisorError + with patch.dict(os.environ, MOCK_ENVIRON): assert await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -682,6 +655,7 @@ async def test_invalid_service_calls( async def test_addon_service_call_with_complex_slug( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, ) -> None: """Addon slugs can have ., - and _, confirm that passes validation.""" supervisor_mock_data = { @@ -701,12 +675,9 @@ async def test_addon_service_call_with_complex_slug( }, ], } + supervisor_is_connected.side_effect = SupervisorError with ( patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), patch( "homeassistant.components.hassio.HassIO.get_supervisor_info", return_value=supervisor_mock_data, @@ -720,7 +691,9 @@ async def test_addon_service_call_with_complex_slug( @pytest.mark.usefixtures("hassio_env") async def test_service_calls_core( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -732,12 +705,12 @@ async def test_service_calls_core( await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() - assert aioclient_mock.call_count == 5 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 6 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() - assert aioclient_mock.call_count == 5 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 6 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None @@ -746,9 +719,10 @@ async def test_service_calls_core( await hass.async_block_till_done() assert mock_check_config.called - assert aioclient_mock.call_count == 6 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 7 +@pytest.mark.usefixtures("addon_installed") async def test_entry_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading config entry.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -775,6 +749,7 @@ async def test_migration_off_hassio(hass: HomeAssistant) -> None: assert hass.config_entries.async_entries(DOMAIN) == [] +@pytest.mark.usefixtures("addon_installed") async def test_device_registry_calls( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: @@ -927,130 +902,110 @@ async def test_device_registry_calls( assert len(device_registry.devices) == 5 +@pytest.mark.usefixtures("addon_installed") async def test_coordinator_updates( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, supervisor_client: AsyncMock ) -> None: """Test coordinator updates.""" await async_setup_component(hass, "homeassistant", {}) - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.refresh_updates" - ) as refresh_updates_mock, - ): + with patch.dict(os.environ, MOCK_ENVIRON): config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # Initial refresh, no update refresh call - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.assert_not_called() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) - await hass.async_block_till_done() + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() - # Scheduled refresh, no update refresh call - assert refresh_updates_mock.call_count == 0 + # Scheduled refresh, no update refresh call + supervisor_client.refresh_updates.assert_not_called() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - assert refresh_updates_mock.call_count == 0 - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + supervisor_client.refresh_updates.assert_not_called() + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_called_once() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - side_effect=HassioAPIError("Unknown"), - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 - assert "Error on Supervisor API: Unknown" in caplog.text + supervisor_client.refresh_updates.reset_mock() + supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown") + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_called_once() + assert "Error on Supervisor API: Unknown" in caplog.text -@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "addon_installed") async def test_coordinator_updates_stats_entities_enabled( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, + supervisor_client: AsyncMock, ) -> None: """Test coordinator updates with stats entities enabled.""" await async_setup_component(hass, "homeassistant", {}) - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.refresh_updates" - ) as refresh_updates_mock, - ): + with patch.dict(os.environ, MOCK_ENVIRON): config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # Initial refresh without stats - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.assert_not_called() # Refresh with stats once we know which ones are needed async_fire_time_changed( hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) ) await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.assert_called_once() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.reset_mock() + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_not_called() + + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + supervisor_client.refresh_updates.assert_not_called() # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer async_fire_time_changed( @@ -1058,28 +1013,26 @@ async def test_coordinator_updates_stats_entities_enabled( ) await hass.async_block_till_done() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - side_effect=HassioAPIError("Unknown"), - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 - assert "Error on Supervisor API: Unknown" in caplog.text + supervisor_client.refresh_updates.reset_mock() + supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown") + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_called_once() + assert "Error on Supervisor API: Unknown" in caplog.text @pytest.mark.parametrize( @@ -1099,7 +1052,10 @@ async def test_coordinator_updates_stats_entities_enabled( ], ) async def test_setup_hardware_integration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, integration + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + integration, ) -> None: """Test setup initiates hardware integration.""" @@ -1114,26 +1070,10 @@ async def test_setup_hardware_integration( await hass.async_block_till_done(wait_background_tasks=True) assert result - assert aioclient_mock.call_count == 20 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("hassio_stubs") -async def test_get_store_addon_info( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test get store add-on info from Supervisor API.""" - aioclient_mock.clear_requests() - aioclient_mock.get( - "http://127.0.0.1/store/addons/test", - json={"result": "ok", "data": {"name": "bla"}}, - ) - - data = await async_get_addon_store_info(hass, "test") - assert data["name"] == "bla" - assert aioclient_mock.call_count == 1 - - def test_hostname_from_addon_slug() -> None: """Test hostname_from_addon_slug.""" assert hostname_from_addon_slug("mqtt") == "mqtt" @@ -1141,3 +1081,62 @@ def test_hostname_from_addon_slug() -> None: hostname_from_addon_slug("core_silabs_multiprotocol") == "core-silabs-multiprotocol" ) + + +def test_deprecated_function_is_hassio( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling deprecated_is_hassio function will create log entry.""" + + deprecated_is_hassio(hass) + assert caplog.record_tuples == [ + ( + "homeassistant.components.hassio", + logging.WARNING, + "is_hassio is a deprecated function which will be removed in HA Core 2025.11. Use homeassistant.helpers.hassio.is_hassio instead", + ) + ] + + +def test_deprecated_function_get_supervisor_ip( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling get_supervisor_ip function will create log entry.""" + + get_supervisor_ip() + assert caplog.record_tuples == [ + ( + "homeassistant.helpers.hassio", + logging.WARNING, + "get_supervisor_ip is a deprecated function which will be removed in HA Core 2025.11. Use homeassistant.helpers.hassio.get_supervisor_ip instead", + ) + ] + + +@pytest.mark.parametrize( + ("constant_name", "replacement_name", "replacement"), + [ + ( + "HassioServiceInfo", + "homeassistant.helpers.service_info.hassio.HassioServiceInfo", + HassioServiceInfo, + ), + ], +) +def test_deprecated_constants( + caplog: pytest.LogCaptureFixture, + constant_name: str, + replacement_name: str, + replacement: Any, +) -> None: + """Test deprecated automation constants.""" + import_and_test_deprecated_constant( + caplog, + hassio, + constant_name, + replacement_name, + replacement, + "2025.11", + ) diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index 1a3d3d83f95..b0d3920be09 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -4,11 +4,28 @@ from __future__ import annotations from collections.abc import Generator from datetime import timedelta -from http import HTTPStatus import os from typing import Any -from unittest.mock import ANY, patch +from unittest.mock import ANY, AsyncMock, patch +from uuid import UUID, uuid4 +from aiohasupervisor import ( + SupervisorBadRequestError, + SupervisorError, + SupervisorTimeoutError, +) +from aiohasupervisor.models import ( + Check, + CheckType, + ContextType, + Issue, + IssueType, + ResolutionInfo, + Suggestion, + SuggestionType, + UnhealthyReason, + UnsupportedReason, +) from freezegun.api import FrozenDateTimeFactory import pytest @@ -18,7 +35,6 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON -from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse from tests.typing import WebSocketGenerator @@ -36,49 +52,41 @@ def fixture_supervisor_environ() -> Generator[None]: def mock_resolution_info( - aioclient_mock: AiohttpClientMocker, - unsupported: list[str] | None = None, - unhealthy: list[str] | None = None, - issues: list[dict[str, str]] | None = None, - suggestion_result: str = "ok", + supervisor_client: AsyncMock, + unsupported: list[UnsupportedReason] | None = None, + unhealthy: list[UnhealthyReason] | None = None, + issues: list[Issue] | None = None, + suggestions_by_issue: dict[UUID, list[Suggestion]] | None = None, + suggestion_result: SupervisorError | None = None, ) -> None: """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": unsupported or [], - "unhealthy": unhealthy or [], - "suggestions": [], - "issues": [ - {k: v for k, v in issue.items() if k != "suggestions"} - for issue in issues - ] - if issues - else [], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, + supervisor_client.resolution.info.return_value = ResolutionInfo( + unsupported=unsupported or [], + unhealthy=unhealthy or [], + issues=issues or [], + suggestions=[ + suggestion + for issue_list in suggestions_by_issue.values() + for suggestion in issue_list + ] + if suggestions_by_issue + else [], + checks=[ + Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), + Check(enabled=True, slug=CheckType.FREE_SPACE), + ], ) - if issues: - suggestions_by_issue = { - issue["uuid"]: issue.get("suggestions", []) for issue in issues - } - for issue_uuid, suggestions in suggestions_by_issue.items(): - aioclient_mock.get( - f"http://127.0.0.1/resolution/issue/{issue_uuid}/suggestions", - json={"result": "ok", "data": {"suggestions": suggestions}}, - ) - for suggestion in suggestions: - aioclient_mock.post( - f"http://127.0.0.1/resolution/suggestion/{suggestion['uuid']}", - json={"result": suggestion_result}, - ) + if suggestions_by_issue: + + async def mock_suggestions_for_issue(uuid: UUID) -> list[Suggestion]: + """Mock of suggestions for issue api.""" + return suggestions_by_issue.get(uuid, []) + + supervisor_client.resolution.suggestions_for_issue.side_effect = ( + mock_suggestions_for_issue + ) + supervisor_client.resolution.apply_suggestion.side_effect = suggestion_result def assert_repair_in_list( @@ -134,11 +142,13 @@ def assert_issue_repair_in_list( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unhealthy systems.""" - mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) + mock_resolution_info( + supervisor_client, unhealthy=[UnhealthyReason.DOCKER, UnhealthyReason.SETUP] + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -156,11 +166,14 @@ async def test_unhealthy_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unsupported systems.""" - mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) + mock_resolution_info( + supervisor_client, + unsupported=[UnsupportedReason.CONTENT_TRUST, UnsupportedReason.OS], + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -180,11 +193,11 @@ async def test_unsupported_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test unhealthy issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -237,11 +250,11 @@ async def test_unhealthy_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test unsupported issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -294,22 +307,33 @@ async def test_unsupported_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( - aioclient_mock, - unsupported=["os"], - unhealthy=["docker"], + supervisor_client, + unsupported=[UnsupportedReason.OS], + unhealthy=[UnhealthyReason.DOCKER], issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - } + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(uuid := uuid4()), + ) ], + suggestions_by_issue={ + uuid: [ + Suggestion( + SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ) + ] + }, ) result = await async_setup_component(hass, "hassio", {}) @@ -325,15 +349,14 @@ async def test_reset_issues_supervisor_restart( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=uuid.hex, context="system", type_="reboot_required", - fixable=False, + fixable=True, reference=None, ) - aioclient_mock.clear_requests() - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) await client.send_json( { "id": 2, @@ -358,11 +381,15 @@ async def test_reset_issues_supervisor_restart( @pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" - mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) + mock_resolution_info( + supervisor_client, + unsupported=[UnsupportedReason.OS], + unhealthy=[UnhealthyReason.DOCKER], + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -376,9 +403,10 @@ async def test_reasons_added_and_removed( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="docker") assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") - aioclient_mock.clear_requests() mock_resolution_info( - aioclient_mock, unsupported=["content_trust"], unhealthy=["setup"] + supervisor_client, + unsupported=[UnsupportedReason.CONTENT_TRUST], + unhealthy=[UnhealthyReason.SETUP], ) await client.send_json( { @@ -408,12 +436,14 @@ async def test_reasons_added_and_removed( @pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( - aioclient_mock, unsupported=["privileged"], unhealthy=["privileged"] + supervisor_client, + unsupported=[UnsupportedReason.PRIVILEGED], + unhealthy=[UnhealthyReason.PRIVILEGED], ) result = await async_setup_component(hass, "hassio", {}) @@ -431,12 +461,14 @@ async def test_ignored_unsupported_skipped( @pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( - aioclient_mock, unsupported=["fake_unsupported"], unhealthy=["fake_unhealthy"] + supervisor_client, + unsupported=["fake_unsupported"], + unhealthy=["fake_unhealthy"], ) result = await async_setup_component(hass, "hassio", {}) @@ -481,40 +513,43 @@ async def test_new_unsupported_unhealthy_reason( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - { - "uuid": "1235", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1236", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - } - ], - }, - { - "uuid": "1237", - "type": "should_not_be_repair", - "context": "os", - "reference": None, - }, + Issue( + type=IssueType.DETACHED_ADDON_MISSING, + context=ContextType.ADDON, + reference="test", + uuid=(uuid_issue1 := uuid4()), + ), + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(uuid_issue2 := uuid4()), + ), + Issue( + type="should_not_be_repair", + context=ContextType.OS, + reference=None, + uuid=uuid4(), + ), ], + suggestions_by_issue={ + uuid_issue2: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=uuid4(), + auto=False, + ) + ] + }, ) result = await async_setup_component(hass, "hassio", {}) @@ -528,15 +563,16 @@ async def test_supervisor_issues( assert len(msg["result"]["issues"]) == 2 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", - context="system", - type_="reboot_required", + uuid=uuid_issue1.hex, + context="addon", + type_="detached_addon_missing", fixable=False, - reference=None, + reference="test", + placeholders={"addon_url": "/hassio/addon/test", "addon": "test"}, ) assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1235", + uuid=uuid_issue2.hex, context="system", type_="multiple_data_disks", fixable=True, @@ -547,61 +583,41 @@ async def test_supervisor_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + resolution_info: AsyncMock, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, ) -> None: """Test issues manager retries after initial update failure.""" - responses = [ - AiohttpClientMockResponse( - method="get", - url="http://127.0.0.1/resolution/info", - status=HTTPStatus.BAD_REQUEST, - json={ - "result": "error", - "message": "System is not ready with state: setup", - }, - ), - AiohttpClientMockResponse( - method="get", - url="http://127.0.0.1/resolution/info", - status=HTTPStatus.OK, - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - ], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, - ), + mock_resolution_info( + supervisor_client, + unsupported=[], + unhealthy=[], + issues=[ + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(uuid := uuid4()), + ) + ], + suggestions_by_issue={ + uuid: [ + Suggestion( + SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ) + ] + }, + ) + resolution_info.side_effect = [ + SupervisorBadRequestError("System is not ready with state: setup"), + resolution_info.return_value, ] - async def mock_responses(*args): - nonlocal responses - return responses.pop(0) - - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - side_effect=mock_responses, - ) - aioclient_mock.get( - "http://127.0.0.1/resolution/issue/1234/suggestions", - json={"result": "ok", "data": {"suggestions": []}}, - ) - with patch("homeassistant.components.hassio.issues.REQUEST_REFRESH_DELAY", new=0.1): result = await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -625,11 +641,11 @@ async def test_supervisor_issues_initial_failure( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -643,10 +659,18 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": (issue_uuid := uuid4().hex), "type": "reboot_required", "context": "system", "reference": None, + "suggestions": [ + { + "uuid": uuid4().hex, + "type": "execute_reboot", + "context": "system", + "reference": None, + } + ], }, }, } @@ -661,10 +685,10 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="system", type_="reboot_required", - fixable=False, + fixable=True, reference=None, ) @@ -673,20 +697,12 @@ async def test_supervisor_issues_add_remove( "id": 3, "type": "supervisor/event", "data": { - "event": "issue_changed", + "event": "issue_removed", "data": { - "uuid": "1234", + "uuid": issue_uuid, "type": "reboot_required", "context": "system", "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - } - ], }, }, } @@ -698,75 +714,29 @@ async def test_supervisor_issues_add_remove( await client.send_json({"id": 4, "type": "repairs/list_issues"}) msg = await client.receive_json() assert msg["success"] - assert len(msg["result"]["issues"]) == 1 - assert_issue_repair_in_list( - msg["result"]["issues"], - uuid="1234", - context="system", - type_="reboot_required", - fixable=True, - reference=None, - ) - - await client.send_json( - { - "id": 5, - "type": "supervisor/event", - "data": { - "event": "issue_removed", - "data": { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - }, - } - ) - msg = await client.receive_json() - assert msg["success"] - await hass.async_block_till_done() - - await client.send_json({"id": 6, "type": "repairs/list_issues"}) - msg = await client.receive_json() - assert msg["success"] assert msg["result"] == {"issues": []} @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + resolution_suggestions_for_issue: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test failing to get suggestions for issue skips it.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - } - ], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/resolution/issue/1234/suggestions", - exc=TimeoutError(), + mock_resolution_info( + supervisor_client, + issues=[ + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + ) + ], ) + resolution_suggestions_for_issue.side_effect = SupervisorTimeoutError result = await async_setup_component(hass, "hassio", {}) assert result @@ -782,11 +752,11 @@ async def test_supervisor_issues_suggestions_fail( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -816,16 +786,12 @@ async def test_supervisor_remove_missing_issue_without_error( @pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + resolution_info: AsyncMock, caplog: pytest.LogCaptureFixture, ) -> None: """Ensure hassio starts despite error.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "", - "message": "System is not ready with state: setup", - }, + resolution_info.side_effect = SupervisorBadRequestError( + "System is not ready with state: setup" ) assert await async_setup_component(hass, "hassio", {}) @@ -838,11 +804,11 @@ async def test_system_is_not_ready( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -856,7 +822,7 @@ async def test_supervisor_issues_detached_addon_missing( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": (issue_uuid := uuid4().hex), "type": "detached_addon_missing", "context": "addon", "reference": "test", @@ -874,7 +840,7 @@ async def test_supervisor_issues_detached_addon_missing( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="addon", type_="detached_addon_missing", fixable=False, diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index 907529ec9c4..f8cac4e1a97 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -3,8 +3,17 @@ from collections.abc import Generator from http import HTTPStatus import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ( + ContextType, + Issue, + IssueType, + Suggestion, + SuggestionType, +) import pytest from homeassistant.core import HomeAssistant @@ -14,7 +23,6 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON from .test_issues import mock_resolution_info -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -28,34 +36,39 @@ def fixture_supervisor_environ() -> Generator[None]: @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1235", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - } - ], - }, + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(sugg_uuid := uuid4()), + auto=False, + ) + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -95,52 +108,53 @@ async def test_supervisor_issue_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": "test", - }, - { - "uuid": "1236", - "type": "test_type", - "context": "system", - "reference": "test", - }, - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference="test", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type="test_type", + context=ContextType.SYSTEM, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -189,52 +203,53 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1236" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - }, - { - "uuid": "1236", - "type": "test_type", - "context": "system", - "reference": None, - }, - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type="test_type", + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -302,46 +317,46 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - } - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -381,53 +396,54 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "mount_failed", - "context": "mount", - "reference": "backup_share", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reload", - "context": "mount", - "reference": "backup_share", - }, - { - "uuid": "1236", - "type": "execute_remove", - "context": "mount", - "reference": "backup_share", - }, - ], - }, + Issue( + type=IssueType.MOUNT_FAILED, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(issue_uuid := uuid4()), + ), ], - suggestion_result=False, + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_RELOAD, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + ] + }, + suggestion_result=SupervisorError("boom"), ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -459,46 +475,52 @@ async def test_mount_failed_repair_flow_error( "description_placeholders": None, } - assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") + assert issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "mount_failed", - "context": "mount", - "reference": "backup_share", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reload", - "context": "mount", - "reference": "backup_share", - }, - { - "uuid": "1236", - "type": "execute_remove", - "context": "mount", - "reference": "backup_share", - }, - ], - }, + Issue( + type=IssueType.MOUNT_FAILED, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_RELOAD, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -551,13 +573,8 @@ async def test_mount_failed_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( @@ -566,62 +583,69 @@ async def test_mount_failed_repair_flow( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "docker_config", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_rebuild", - "context": "system", - "reference": None, - } - ], - }, - { - "uuid": "1236", - "type": "docker_config", - "context": "core", - "reference": None, - "suggestions": [ - { - "uuid": "1237", - "type": "execute_rebuild", - "context": "core", - "reference": None, - } - ], - }, - { - "uuid": "1238", - "type": "docker_config", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1239", - "type": "execute_rebuild", - "context": "addon", - "reference": "test", - } - ], - }, + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue1_uuid := uuid4()), + ), + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.CORE, + reference=None, + uuid=(issue2_uuid := uuid4()), + ), + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.ADDON, + reference="test", + uuid=(issue3_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue1_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ], + issue2_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.CORE, + reference=None, + uuid=uuid4(), + auto=False, + ), + ], + issue3_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.ADDON, + reference="test", + uuid=uuid4(), + auto=False, + ), + ], + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue1_uuid.hex + ) assert repair_issue client = await hass_client() @@ -661,52 +685,53 @@ async def test_supervisor_issue_docker_config_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue1_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1235", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - }, - { - "uuid": "1236", - "type": "adopt_data_disk", - "context": "system", - "reference": "/dev/sda1", - }, - ], - }, + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type=SuggestionType.ADOPT_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -774,13 +799,8 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1236" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( @@ -789,34 +809,39 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "detached_addon_removed", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_remove", - "context": "addon", - "reference": "test", - } - ], - }, + Issue( + type=IssueType.DETACHED_ADDON_REMOVED, + context=ContextType.ADDON, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.ADDON, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -861,10 +886,107 @@ async def test_supervisor_issue_detached_addon_removed( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" + +@pytest.mark.parametrize( + "all_setup_requests", [{"include_addons": True}], indirect=True +) +@pytest.mark.usefixtures("all_setup_requests") +async def test_supervisor_issue_addon_boot_fail( + hass: HomeAssistant, + supervisor_client: AsyncMock, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test fix flow for supervisor issue.""" + mock_resolution_info( + supervisor_client, + issues=[ + Issue( + type="boot_fail", + context=ContextType.ADDON, + reference="test", + uuid=(issue_uuid := uuid4()), + ), + ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type="execute_start", + context=ContextType.ADDON, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type="disable_boot", + context=ContextType.ADDON, + reference="test", + uuid=uuid4(), + auto=False, + ), + ] + }, ) + + assert await async_setup_component(hass, "hassio", {}) + + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) + assert repair_issue + + client = await hass_client() + + resp = await client.post( + "/api/repairs/issues/fix", + json={"handler": "hassio", "issue_id": repair_issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "menu", + "flow_id": flow_id, + "handler": "hassio", + "step_id": "fix_menu", + "data_schema": [ + { + "type": "select", + "options": [ + ["addon_execute_start", "addon_execute_start"], + ["addon_disable_boot", "addon_disable_boot"], + ], + "name": "next_step_id", + } + ], + "menu_options": ["addon_execute_start", "addon_disable_boot"], + "description_placeholders": { + "reference": "test", + "addon": "test", + }, + } + + resp = await client.post( + f"/api/repairs/issues/fix/{flow_id}", + json={"next_step_id": "addon_execute_start"}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "create_entry", + "flow_id": flow_id, + "handler": "hassio", + "description": None, + "description_placeholders": None, + } + + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) diff --git a/tests/components/hassio/test_sensor.py b/tests/components/hassio/test_sensor.py index 71b867d849d..7160a2cbf16 100644 --- a/tests/components/hassio/test_sensor.py +++ b/tests/components/hassio/test_sensor.py @@ -2,17 +2,14 @@ from datetime import timedelta import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from aiohasupervisor import SupervisorError from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant import config_entries -from homeassistant.components.hassio import ( - DOMAIN, - HASSIO_UPDATE_INTERVAL, - HassioAPIError, -) +from homeassistant.components.hassio import DOMAIN, HASSIO_UPDATE_INTERVAL from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE @@ -21,6 +18,8 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util +from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS + from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker @@ -28,44 +27,21 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" _install_default_mocks(aioclient_mock) - _install_test_addon_stats_mock(aioclient_mock) - - -def _install_test_addon_stats_mock(aioclient_mock: AiohttpClientMocker): - """Install mock to provide valid stats for the test addon.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) - - -def _install_test_addon_stats_failure_mock(aioclient_mock: AiohttpClientMocker): - """Install mocks to raise an exception when fetching stats for the test addon.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - exc=HassioAPIError, - ) def _install_default_mocks(aioclient_mock: AiohttpClientMocker): """Install default mocks.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -78,13 +54,6 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -175,33 +144,9 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -214,6 +159,9 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): ) +@pytest.mark.parametrize( + ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] +) @pytest.mark.parametrize( ("entity_id", "expected"), [ @@ -272,6 +220,9 @@ async def test_sensor( assert state.state == expected +@pytest.mark.parametrize( + ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] +) @pytest.mark.parametrize( ("entity_id", "expected"), [ @@ -288,6 +239,7 @@ async def test_stats_addon_sensor( entity_registry: er.EntityRegistry, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, + addon_stats: AsyncMock, ) -> None: """Test stats addons sensor.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -305,7 +257,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - _install_test_addon_stats_failure_mock(aioclient_mock) + addon_stats.side_effect = SupervisorError freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) @@ -315,7 +267,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - _install_test_addon_stats_mock(aioclient_mock) + addon_stats.side_effect = None freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) @@ -348,7 +300,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - _install_test_addon_stats_failure_mock(aioclient_mock) + addon_stats.side_effect = SupervisorError freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) diff --git a/tests/components/hassio/test_update.py b/tests/components/hassio/test_update.py index 9a047010cc3..c1775d6e0b4 100644 --- a/tests/components/hassio/test_update.py +++ b/tests/components/hassio/test_update.py @@ -2,11 +2,13 @@ from datetime import timedelta import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from aiohasupervisor import SupervisorBadRequestError, SupervisorError +from aiohasupervisor.models import StoreAddonUpdate import pytest -from homeassistant.components.hassio import DOMAIN, HassioAPIError +from homeassistant.components.hassio import DOMAIN from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -21,10 +23,16 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -37,13 +45,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -114,22 +115,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -162,33 +147,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -217,8 +178,10 @@ async def test_update_entities( expected_state, auto_update, aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, ) -> None: """Test update entities.""" + addon_installed.return_value.auto_update = auto_update config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -239,9 +202,7 @@ async def test_update_entities( assert state.attributes["auto_update"] is auto_update -async def test_update_addon( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_update_addon(hass: HomeAssistant, update_addon: AsyncMock) -> None: """Test updating addon update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -255,22 +216,16 @@ async def test_update_addon( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/addons/test/update", - json={"result": "ok", "data": {}}, - ) - await hass.services.async_call( "update", "install", {"entity_id": "update.test_update"}, blocking=True, ) + update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False)) -async def test_update_os( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_update_os(hass: HomeAssistant, supervisor_client: AsyncMock) -> None: """Test updating OS update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -284,22 +239,17 @@ async def test_update_os( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/os/update", - json={"result": "ok", "data": {}}, - ) - + supervisor_client.os.update.return_value = None await hass.services.async_call( "update", "install", {"entity_id": "update.home_assistant_operating_system_update"}, blocking=True, ) + supervisor_client.os.update.assert_called_once() -async def test_update_core( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_update_core(hass: HomeAssistant, supervisor_client: AsyncMock) -> None: """Test updating core update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -313,21 +263,18 @@ async def test_update_core( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/core/update", - json={"result": "ok", "data": {}}, - ) - + supervisor_client.homeassistant.update.return_value = None await hass.services.async_call( "update", "install", - {"entity_id": "update.home_assistant_os_update"}, + {"entity_id": "update.home_assistant_core_update"}, blocking=True, ) + supervisor_client.homeassistant.update.assert_called_once() async def test_update_supervisor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating supervisor update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -342,21 +289,19 @@ async def test_update_supervisor( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/supervisor/update", - json={"result": "ok", "data": {}}, - ) - + supervisor_client.supervisor.update.return_value = None await hass.services.async_call( "update", "install", {"entity_id": "update.home_assistant_supervisor_update"}, blocking=True, ) + supervisor_client.supervisor.update.assert_called_once() async def test_update_addon_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + update_addon: AsyncMock, ) -> None: """Test updating addon update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -370,12 +315,8 @@ async def test_update_addon_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/addons/test/update", - exc=HassioAPIError, - ) - - with pytest.raises(HomeAssistantError): + update_addon.side_effect = SupervisorError + with pytest.raises(HomeAssistantError, match=r"^Error updating test:"): assert not await hass.services.async_call( "update", "install", @@ -385,7 +326,7 @@ async def test_update_addon_with_error( async def test_update_os_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating OS update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -399,12 +340,10 @@ async def test_update_os_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/os/update", - exc=HassioAPIError, - ) - - with pytest.raises(HomeAssistantError): + supervisor_client.os.update.side_effect = SupervisorError + with pytest.raises( + HomeAssistantError, match=r"^Error updating Home Assistant Operating System:" + ): assert not await hass.services.async_call( "update", "install", @@ -414,7 +353,7 @@ async def test_update_os_with_error( async def test_update_supervisor_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating supervisor update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -428,12 +367,10 @@ async def test_update_supervisor_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/supervisor/update", - exc=HassioAPIError, - ) - - with pytest.raises(HomeAssistantError): + supervisor_client.supervisor.update.side_effect = SupervisorError + with pytest.raises( + HomeAssistantError, match=r"^Error updating Home Assistant Supervisor:" + ): assert not await hass.services.async_call( "update", "install", @@ -443,7 +380,7 @@ async def test_update_supervisor_with_error( async def test_update_core_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating core update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -457,12 +394,10 @@ async def test_update_core_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/core/update", - exc=HassioAPIError, - ) - - with pytest.raises(HomeAssistantError): + supervisor_client.homeassistant.update.side_effect = SupervisorError + with pytest.raises( + HomeAssistantError, match=r"^Error updating Home Assistant Core:" + ): assert not await hass.services.async_call( "update", "install", @@ -613,23 +548,18 @@ async def test_no_os_entity(hass: HomeAssistant) -> None: async def test_setting_up_core_update_when_addon_fails( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + addon_installed: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, ) -> None: """Test setting up core update when single addon fails.""" + addon_installed.side_effect = SupervisorBadRequestError("Addon Test does not exist") + addon_stats.side_effect = SupervisorBadRequestError("add-on is not running") + addon_changelog.side_effect = SupervisorBadRequestError("add-on is not running") with ( patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.get_addon_stats", - side_effect=HassioAPIError("add-on is not running"), - ), - patch( - "homeassistant.components.hassio.HassIO.get_addon_changelog", - side_effect=HassioAPIError("add-on is not running"), - ), - patch( - "homeassistant.components.hassio.HassIO.get_addon_info", - side_effect=HassioAPIError("add-on is not running"), - ), ): result = await async_setup_component( hass, diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index 7d8f07bfaec..21e6b03678b 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -1,5 +1,7 @@ """Test websocket API.""" +from unittest.mock import AsyncMock + import pytest from homeassistant.components.hassio.const import ( @@ -23,10 +25,13 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -64,19 +69,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) @pytest.mark.usefixtures("hassio_env") diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index a12f4c610ad..9ea3341304a 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -27,7 +27,10 @@ from tests.common import MockConfigEntry def config_entry_fixture(): """Create a mock HEOS config entry.""" return MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, title="Controller (127.0.0.1)" + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + title="HEOS System (via 127.0.0.1)", + unique_id=DOMAIN, ) @@ -164,6 +167,25 @@ def discovery_data_fixture() -> dict: ) +@pytest.fixture(name="discovery_data_bedroom") +def discovery_data_fixture_bedroom() -> dict: + """Return mock discovery data for testing.""" + return ssdp.SsdpServiceInfo( + ssdp_usn="mock_usn", + ssdp_st="mock_st", + ssdp_location="http://127.0.0.2:60006/upnp/desc/aios_device/aios_device.xml", + upnp={ + ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-denon-com:device:AiosDevice:1", + ssdp.ATTR_UPNP_FRIENDLY_NAME: "Bedroom", + ssdp.ATTR_UPNP_MANUFACTURER: "Denon", + ssdp.ATTR_UPNP_MODEL_NAME: "HEOS Drive", + ssdp.ATTR_UPNP_MODEL_NUMBER: "DWSA-10 4.0", + ssdp.ATTR_UPNP_SERIAL: None, + ssdp.ATTR_UPNP_UDN: "uuid:e61de70c-2250-1c22-0080-0005cdf512be", + }, + ) + + @pytest.fixture(name="quick_selects") def quick_selects_fixture() -> dict[int, str]: """Create a dict of quick selects for testing.""" diff --git a/tests/components/heos/test_config_flow.py b/tests/components/heos/test_config_flow.py index 7b737d7bb4b..38382a81794 100644 --- a/tests/components/heos/test_config_flow.py +++ b/tests/components/heos/test_config_flow.py @@ -1,14 +1,10 @@ """Tests for the Heos config flow module.""" -from unittest.mock import patch -from urllib.parse import urlparse - from pyheos import HeosError from homeassistant.components import heos, ssdp -from homeassistant.components.heos.config_flow import HeosFlowHandler -from homeassistant.components.heos.const import DATA_DISCOVERED_HOSTS, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER +from homeassistant.components.heos.const import DOMAIN +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,18 +13,20 @@ from homeassistant.data_entry_flow import FlowResultType async def test_flow_aborts_already_setup(hass: HomeAssistant, config_entry) -> None: """Test flow aborts when entry already setup.""" config_entry.add_to_hass(hass) - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" async def test_no_host_shows_form(hass: HomeAssistant) -> None: """Test form is shown when host not provided.""" - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} @@ -45,73 +43,69 @@ async def test_cannot_connect_shows_error_form(hass: HomeAssistant, controller) assert result["errors"][CONF_HOST] == "cannot_connect" assert controller.connect.call_count == 1 assert controller.disconnect.call_count == 1 - controller.connect.reset_mock() - controller.disconnect.reset_mock() async def test_create_entry_when_host_valid(hass: HomeAssistant, controller) -> None: """Test result type is create entry when host is valid.""" data = {CONF_HOST: "127.0.0.1"} - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_USER}, data=data - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" - assert result["data"] == data - assert controller.connect.call_count == 1 - assert controller.disconnect.call_count == 1 + + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_USER}, data=data + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == DOMAIN + assert result["title"] == "HEOS System (via 127.0.0.1)" + assert result["data"] == data + assert controller.connect.call_count == 2 # Also called in async_setup_entry + assert controller.disconnect.call_count == 1 async def test_create_entry_when_friendly_name_valid( hass: HomeAssistant, controller ) -> None: """Test result type is create entry when friendly name is valid.""" - hass.data[DATA_DISCOVERED_HOSTS] = {"Office (127.0.0.1)": "127.0.0.1"} + hass.data[DOMAIN] = {"Office (127.0.0.1)": "127.0.0.1"} data = {CONF_HOST: "Office (127.0.0.1)"} - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_USER}, data=data - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" - assert result["data"] == {CONF_HOST: "127.0.0.1"} - assert controller.connect.call_count == 1 - assert controller.disconnect.call_count == 1 - assert DATA_DISCOVERED_HOSTS not in hass.data + + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_USER}, data=data + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == DOMAIN + assert result["title"] == "HEOS System (via 127.0.0.1)" + assert result["data"] == {CONF_HOST: "127.0.0.1"} + assert controller.connect.call_count == 2 # Also called in async_setup_entry + assert controller.disconnect.call_count == 1 + assert DOMAIN not in hass.data async def test_discovery_shows_create_form( - hass: HomeAssistant, controller, discovery_data: ssdp.SsdpServiceInfo + hass: HomeAssistant, + controller, + discovery_data: ssdp.SsdpServiceInfo, + discovery_data_bedroom: ssdp.SsdpServiceInfo, ) -> None: - """Test discovery shows form to confirm setup and subsequent abort.""" + """Test discovery shows form to confirm setup.""" - await hass.config_entries.flow.async_init( + # Single discovered host shows form for user to finish setup. + result = await hass.config_entries.flow.async_init( heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == {"Office (127.0.0.1)": "127.0.0.1"} + assert hass.data[DOMAIN] == {"Office (127.0.0.1)": "127.0.0.1"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - port = urlparse(discovery_data.ssdp_location).port - discovery_data.ssdp_location = f"http://127.0.0.2:{port}/" - discovery_data.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] = "Bedroom" - - await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + # Subsequent discovered hosts append to discovered hosts and abort. + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data_bedroom ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == { + assert hass.data[DOMAIN] == { "Office (127.0.0.1)": "127.0.0.1", "Bedroom (127.0.0.2)": "127.0.0.2", } + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_in_progress" async def test_discovery_flow_aborts_already_setup( @@ -119,41 +113,80 @@ async def test_discovery_flow_aborts_already_setup( ) -> None: """Test discovery flow aborts when entry already setup.""" config_entry.add_to_hass(hass) - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_ssdp(discovery_data) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + ) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" -async def test_discovery_sets_the_unique_id( - hass: HomeAssistant, controller, discovery_data: ssdp.SsdpServiceInfo +async def test_reconfigure_validates_and_updates_config( + hass: HomeAssistant, config_entry, controller ) -> None: - """Test discovery sets the unique id.""" + """Test reconfigure validates host and successfully updates.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reconfigure_flow(hass) + assert config_entry.data[CONF_HOST] == "127.0.0.1" - port = urlparse(discovery_data.ssdp_location).port - discovery_data.ssdp_location = f"http://127.0.0.2:{port}/" - discovery_data.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] = "Bedroom" - - await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + # Test reconfigure initially shows form with current host value. + host = next( + key.default() for key in result["data_schema"].schema if key == CONF_HOST ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == {"Bedroom (127.0.0.2)": "127.0.0.2"} + assert host == "127.0.0.1" + assert result["errors"] == {} + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + + # Test reconfigure successfully updates. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + assert controller.connect.call_count == 2 # Also called when entry reloaded + assert controller.disconnect.call_count == 1 + assert config_entry.data == {CONF_HOST: "127.0.0.2"} + assert config_entry.unique_id == DOMAIN + assert result["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT -async def test_import_sets_the_unique_id(hass: HomeAssistant, controller) -> None: - """Test import sets the unique id.""" +async def test_reconfigure_cannot_connect_recovers( + hass: HomeAssistant, config_entry, controller +) -> None: + """Test reconfigure cannot connect and recovers.""" + controller.connect.side_effect = HeosError() + config_entry.add_to_hass(hass) + result = await config_entry.start_reconfigure_flow(hass) + assert config_entry.data[CONF_HOST] == "127.0.0.1" - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "127.0.0.2"}, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + + assert controller.connect.call_count == 1 + assert controller.disconnect.call_count == 1 + host = next( + key.default() for key in result["data_schema"].schema if key == CONF_HOST + ) + assert host == "127.0.0.2" + assert result["errors"][CONF_HOST] == "cannot_connect" + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + + # Test reconfigure recovers and successfully updates. + controller.connect.side_effect = None + controller.connect.reset_mock() + controller.disconnect.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + assert controller.connect.call_count == 2 # Also called when entry reloaded + assert controller.disconnect.call_count == 1 + assert config_entry.data == {CONF_HOST: "127.0.0.2"} + assert config_entry.unique_id == DOMAIN + assert result["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index 9341c8fbace..8d2e3b68a22 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -8,49 +8,16 @@ import pytest from homeassistant.components.heos import ( ControllerManager, + HeosRuntimeData, async_setup_entry, async_unload_entry, ) -from homeassistant.components.heos.const import ( - DATA_CONTROLLER_MANAGER, - DATA_SOURCE_MANAGER, - DOMAIN, -) -from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN -from homeassistant.const import CONF_HOST +from homeassistant.components.heos.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component -async def test_async_setup_creates_entry(hass: HomeAssistant, config) -> None: - """Test component setup creates entry from config.""" - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - entry = entries[0] - assert entry.title == "Controller (127.0.0.1)" - assert entry.data == {CONF_HOST: "127.0.0.1"} - assert entry.unique_id == DOMAIN - - -async def test_async_setup_updates_entry( - hass: HomeAssistant, config_entry, config, controller -) -> None: - """Test component setup updates entry from config.""" - config[DOMAIN][CONF_HOST] = "127.0.0.2" - config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - entry = entries[0] - assert entry.title == "Controller (127.0.0.2)" - assert entry.data == {CONF_HOST: "127.0.0.2"} - assert entry.unique_id == DOMAIN - - async def test_async_setup_returns_true( hass: HomeAssistant, config_entry, config ) -> None: @@ -92,10 +59,6 @@ async def test_async_setup_entry_loads_platforms( assert controller.get_favorites.call_count == 1 assert controller.get_input_sources.call_count == 1 controller.disconnect.assert_not_called() - assert hass.data[DOMAIN][DATA_CONTROLLER_MANAGER].controller == controller - assert hass.data[DOMAIN][MEDIA_PLAYER_DOMAIN] == controller.players - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].favorites == favorites - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].inputs == input_sources async def test_async_setup_entry_not_signed_in_loads_platforms( @@ -121,10 +84,6 @@ async def test_async_setup_entry_not_signed_in_loads_platforms( assert controller.get_favorites.call_count == 0 assert controller.get_input_sources.call_count == 1 controller.disconnect.assert_not_called() - assert hass.data[DOMAIN][DATA_CONTROLLER_MANAGER].controller == controller - assert hass.data[DOMAIN][MEDIA_PLAYER_DOMAIN] == controller.players - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].favorites == {} - assert hass.data[DOMAIN][DATA_SOURCE_MANAGER].inputs == input_sources assert ( "127.0.0.1 is not logged in to a HEOS account and will be unable to retrieve " "HEOS favorites: Use the 'heos.sign_in' service to sign-in to a HEOS account" @@ -163,7 +122,8 @@ async def test_async_setup_entry_player_failure( async def test_unload_entry(hass: HomeAssistant, config_entry, controller) -> None: """Test entries are unloaded correctly.""" controller_manager = Mock(ControllerManager) - hass.data[DOMAIN] = {DATA_CONTROLLER_MANAGER: controller_manager} + config_entry.runtime_data = HeosRuntimeData(controller_manager, None, None, {}) + with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=True ) as unload: @@ -186,7 +146,7 @@ async def test_update_sources_retry( assert await async_setup_component(hass, DOMAIN, config) controller.get_favorites.reset_mock() controller.get_input_sources.reset_mock() - source_manager = hass.data[DOMAIN][DATA_SOURCE_MANAGER] + source_manager = config_entry.runtime_data.source_manager source_manager.retry_delay = 0 source_manager.max_retry_attempts = 1 controller.get_favorites.side_effect = CommandFailedError("Test", "test", 0) diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 089fa1cceea..fa3f01107c1 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -8,11 +8,7 @@ from pyheos.error import HeosError import pytest from homeassistant.components.heos import media_player -from homeassistant.components.heos.const import ( - DATA_SOURCE_MANAGER, - DOMAIN, - SIGNAL_HEOS_UPDATED, -) +from homeassistant.components.heos.const import DOMAIN, SIGNAL_HEOS_UPDATED from homeassistant.components.media_player import ( ATTR_GROUP_MEMBERS, ATTR_INPUT_SOURCE, @@ -106,7 +102,7 @@ async def test_state_attributes( assert ATTR_INPUT_SOURCE not in state.attributes assert ( state.attributes[ATTR_INPUT_SOURCE_LIST] - == hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list + == config_entry.runtime_data.source_manager.source_list ) @@ -219,7 +215,7 @@ async def test_updates_from_sources_updated( const.SIGNAL_CONTROLLER_EVENT, const.EVENT_SOURCES_CHANGED, {} ) await event.wait() - source_list = hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list + source_list = config_entry.runtime_data.source_manager.source_list assert len(source_list) == 2 state = hass.states.get("media_player.test_player") assert state.attributes[ATTR_INPUT_SOURCE_LIST] == source_list @@ -318,7 +314,7 @@ async def test_updates_from_user_changed( const.SIGNAL_CONTROLLER_EVENT, const.EVENT_USER_CHANGED, None ) await event.wait() - source_list = hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list + source_list = config_entry.runtime_data.source_manager.source_list assert len(source_list) == 1 state = hass.states.get("media_player.test_player") assert state.attributes[ATTR_INPUT_SOURCE_LIST] == source_list diff --git a/tests/components/here_travel_time/test_config_flow.py b/tests/components/here_travel_time/test_config_flow.py index ea3de64ed0c..ce210813fb2 100644 --- a/tests/components/here_travel_time/test_config_flow.py +++ b/tests/components/here_travel_time/test_config_flow.py @@ -323,13 +323,7 @@ async def do_common_reconfiguration_steps(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - reconfigure_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + reconfigure_result = await entry.start_reconfigure_flow(hass) assert reconfigure_result["type"] is FlowResultType.FORM assert reconfigure_result["step_id"] == "user" diff --git a/tests/components/history/test_init_db_schema_30.py b/tests/components/history/test_init_db_schema_30.py deleted file mode 100644 index 1520d5363d5..00000000000 --- a/tests/components/history/test_init_db_schema_30.py +++ /dev/null @@ -1,1007 +0,0 @@ -"""The tests the History component.""" - -from __future__ import annotations - -from datetime import datetime, timedelta -from http import HTTPStatus -import json -from unittest.mock import patch, sentinel - -from freezegun import freeze_time -import pytest - -from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder -from homeassistant.components.recorder.history import get_significant_states -from homeassistant.components.recorder.models import process_timestamp -from homeassistant.core import HomeAssistant, State -from homeassistant.helpers.json import JSONEncoder -from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util - -from tests.components.recorder.common import ( - assert_dict_of_states_equal_without_context_and_last_changed, - assert_multiple_states_equal_without_context, - assert_multiple_states_equal_without_context_and_last_changed, - assert_states_equal_without_context, - async_recorder_block_till_done, - async_wait_recording_done, - old_db_schema, -) -from tests.typing import ClientSessionGenerator, WebSocketGenerator - - -@pytest.fixture(autouse=True) -def db_schema_30(): - """Fixture to initialize the db with the old schema 30.""" - with old_db_schema("30"): - yield - - -@pytest.fixture -def legacy_hass_history(hass: HomeAssistant, hass_history): - """Home Assistant fixture to use legacy history recording.""" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - yield - - -@pytest.mark.usefixtures("legacy_hass_history") -async def test_setup() -> None: - """Test setup method of history.""" - # Verification occurs in the fixture - - -async def test_get_significant_states(hass: HomeAssistant, legacy_hass_history) -> None: - """Test that only significant states are returned. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - zero, four, states = await async_record_states(hass) - hist = get_significant_states(hass, zero, four, entity_ids=list(states)) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_minimal_response( - hass: HomeAssistant, legacy_hass_history -) -> None: - """Test that only significant states are returned. - - When minimal responses is set only the first and - last states return a complete state. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - zero, four, states = await async_record_states(hass) - hist = get_significant_states( - hass, zero, four, minimal_response=True, entity_ids=list(states) - ) - entites_with_reducable_states = [ - "media_player.test", - "media_player.test3", - ] - - # All states for media_player.test state are reduced - # down to last_changed and state when minimal_response - # is set except for the first state. - # is set. We use JSONEncoder to make sure that are - # pre-encoded last_changed is always the same as what - # will happen with encoding a native state - for entity_id in entites_with_reducable_states: - entity_states = states[entity_id] - for state_idx in range(1, len(entity_states)): - input_state = entity_states[state_idx] - orig_last_changed = json.dumps( - process_timestamp(input_state.last_changed), - cls=JSONEncoder, - ).replace('"', "") - orig_state = input_state.state - entity_states[state_idx] = { - "last_changed": orig_last_changed, - "state": orig_state, - } - - assert len(hist) == len(states) - assert_states_equal_without_context( - states["media_player.test"][0], hist["media_player.test"][0] - ) - assert states["media_player.test"][1] == hist["media_player.test"][1] - assert states["media_player.test"][2] == hist["media_player.test"][2] - - assert_multiple_states_equal_without_context( - states["media_player.test2"], hist["media_player.test2"] - ) - assert_states_equal_without_context( - states["media_player.test3"][0], hist["media_player.test3"][0] - ) - assert states["media_player.test3"][1] == hist["media_player.test3"][1] - - assert_multiple_states_equal_without_context( - states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"] - ) - assert_multiple_states_equal_without_context_and_last_changed( - states["thermostat.test"], hist["thermostat.test"] - ) - assert_multiple_states_equal_without_context_and_last_changed( - states["thermostat.test2"], hist["thermostat.test2"] - ) - - -async def test_get_significant_states_with_initial( - hass: HomeAssistant, legacy_hass_history -) -> None: - """Test that only significant states are returned. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - zero, four, states = await async_record_states(hass) - one = zero + timedelta(seconds=1) - one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) - one_and_half = zero + timedelta(seconds=1.5) - for entity_id in states: - if entity_id == "media_player.test": - states[entity_id] = states[entity_id][1:] - for state in states[entity_id]: - if state.last_changed in (one, one_with_microsecond): - state.last_changed = one_and_half - state.last_updated = one_and_half - - hist = get_significant_states( - hass, - one_and_half, - four, - include_start_time_state=True, - entity_ids=list(states), - ) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_without_initial( - hass: HomeAssistant, legacy_hass_history -) -> None: - """Test that only significant states are returned. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - zero, four, states = await async_record_states(hass) - one = zero + timedelta(seconds=1) - one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) - one_and_half = zero + timedelta(seconds=1.5) - for entity_id in states: - states[entity_id] = list( - filter( - lambda s: s.last_changed not in (one, one_with_microsecond), - states[entity_id], - ) - ) - del states["media_player.test2"] - - hist = get_significant_states( - hass, - one_and_half, - four, - include_start_time_state=False, - entity_ids=list(states), - ) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_entity_id( - hass: HomeAssistant, hass_history -) -> None: - """Test that only significant states are returned for one entity.""" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = await async_record_states(hass) - del states["media_player.test2"] - del states["media_player.test3"] - del states["thermostat.test"] - del states["thermostat.test2"] - del states["script.can_cancel_this_one"] - - hist = get_significant_states(hass, zero, four, ["media_player.test"]) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_multiple_entity_ids( - hass: HomeAssistant, legacy_hass_history -) -> None: - """Test that only significant states are returned for one entity.""" - zero, four, states = await async_record_states(hass) - del states["media_player.test2"] - del states["media_player.test3"] - del states["thermostat.test2"] - del states["script.can_cancel_this_one"] - - hist = get_significant_states( - hass, - zero, - four, - ["media_player.test", "thermostat.test"], - ) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_are_ordered( - hass: HomeAssistant, legacy_hass_history -) -> None: - """Test order of results from get_significant_states. - - When entity ids are given, the results should be returned with the data - in the same order. - """ - zero, four, _states = await async_record_states(hass) - entity_ids = ["media_player.test", "media_player.test2"] - hist = get_significant_states(hass, zero, four, entity_ids) - assert list(hist.keys()) == entity_ids - entity_ids = ["media_player.test2", "media_player.test"] - hist = get_significant_states(hass, zero, four, entity_ids) - assert list(hist.keys()) == entity_ids - - -async def test_get_significant_states_only( - hass: HomeAssistant, legacy_hass_history -) -> None: - """Test significant states when significant_states_only is set.""" - entity_id = "sensor.test" - - async def set_state(state, **kwargs): - """Set the state.""" - hass.states.async_set(entity_id, state, **kwargs) - await async_wait_recording_done(hass) - return hass.states.get(entity_id) - - start = dt_util.utcnow() - timedelta(minutes=4) - points = [start + timedelta(minutes=i) for i in range(1, 4)] - - states = [] - with freeze_time(start) as freezer: - await set_state("123", attributes={"attribute": 10.64}) - - freezer.move_to(points[0]) - # Attributes are different, state not - states.append(await set_state("123", attributes={"attribute": 21.42})) - - freezer.move_to(points[1]) - # state is different, attributes not - states.append(await set_state("32", attributes={"attribute": 21.42})) - - freezer.move_to(points[2]) - # everything is different - states.append(await set_state("412", attributes={"attribute": 54.23})) - - hist = get_significant_states( - hass, - start, - significant_changes_only=True, - entity_ids=list({state.entity_id for state in states}), - ) - - assert len(hist[entity_id]) == 2 - assert not any( - state.last_updated == states[0].last_updated for state in hist[entity_id] - ) - assert any( - state.last_updated == states[1].last_updated for state in hist[entity_id] - ) - assert any( - state.last_updated == states[2].last_updated for state in hist[entity_id] - ) - - hist = get_significant_states( - hass, - start, - significant_changes_only=False, - entity_ids=list({state.entity_id for state in states}), - ) - - assert len(hist[entity_id]) == 3 - assert_multiple_states_equal_without_context_and_last_changed( - states, hist[entity_id] - ) - - -async def async_record_states( - hass: HomeAssistant, -) -> tuple[datetime, datetime, dict[str, list[State | None]]]: - """Record some test states. - - We inject a bunch of state updates from media player, zone and - thermostat. - """ - mp = "media_player.test" - mp2 = "media_player.test2" - mp3 = "media_player.test3" - therm = "thermostat.test" - therm2 = "thermostat.test2" - zone = "zone.home" - script_c = "script.can_cancel_this_one" - - async def async_set_state(entity_id, state, **kwargs): - """Set the state.""" - hass.states.async_set(entity_id, state, **kwargs) - await async_wait_recording_done(hass) - return hass.states.get(entity_id) - - zero = dt_util.utcnow() - one = zero + timedelta(seconds=1) - two = one + timedelta(seconds=1) - three = two + timedelta(seconds=1) - four = three + timedelta(seconds=1) - - states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []} - with freeze_time(one) as freezer: - states[mp].append( - await async_set_state( - mp, "idle", attributes={"media_title": str(sentinel.mt1)} - ) - ) - states[mp2].append( - await async_set_state( - mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)} - ) - ) - states[mp3].append( - await async_set_state( - mp3, "idle", attributes={"media_title": str(sentinel.mt1)} - ) - ) - states[therm].append( - await async_set_state(therm, 20, attributes={"current_temperature": 19.5}) - ) - - freezer.move_to(one + timedelta(microseconds=1)) - states[mp].append( - await async_set_state( - mp, "YouTube", attributes={"media_title": str(sentinel.mt2)} - ) - ) - - freezer.move_to(two) - # This state will be skipped only different in time - await async_set_state( - mp, "YouTube", attributes={"media_title": str(sentinel.mt3)} - ) - # This state will be skipped because domain is excluded - await async_set_state(zone, "zoning") - states[script_c].append( - await async_set_state(script_c, "off", attributes={"can_cancel": True}) - ) - states[therm].append( - await async_set_state(therm, 21, attributes={"current_temperature": 19.8}) - ) - states[therm2].append( - await async_set_state(therm2, 20, attributes={"current_temperature": 19}) - ) - - freezer.move_to(three) - states[mp].append( - await async_set_state( - mp, "Netflix", attributes={"media_title": str(sentinel.mt4)} - ) - ) - states[mp3].append( - await async_set_state( - mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)} - ) - ) - # Attributes changed even though state is the same - states[therm].append( - await async_set_state(therm, 21, attributes={"current_temperature": 20}) - ) - - return zero, four, states - - -async def test_fetch_period_api( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator -) -> None: - """Test the fetch period view for history.""" - await async_setup_component(hass, "history", {}) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - client = await hass_client() - response = await client.get( - f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power" - ) - assert response.status == HTTPStatus.OK - - -async def test_fetch_period_api_with_minimal_response( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator -) -> None: - """Test the fetch period view for history with minimal_response.""" - now = dt_util.utcnow() - await async_setup_component(hass, "history", {}) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - hass.states.async_set("sensor.power", 0, {"attr": "any"}) - await async_wait_recording_done(hass) - hass.states.async_set("sensor.power", 50, {"attr": "any"}) - await async_wait_recording_done(hass) - hass.states.async_set("sensor.power", 23, {"attr": "any"}) - last_changed = hass.states.get("sensor.power").last_changed - await async_wait_recording_done(hass) - hass.states.async_set("sensor.power", 23, {"attr": "any"}) - await async_wait_recording_done(hass) - client = await hass_client() - response = await client.get( - f"/api/history/period/{now.isoformat()}?filter_entity_id=sensor.power&minimal_response&no_attributes" - ) - assert response.status == HTTPStatus.OK - response_json = await response.json() - assert len(response_json[0]) == 3 - state_list = response_json[0] - - assert state_list[0]["entity_id"] == "sensor.power" - assert state_list[0]["attributes"] == {} - assert state_list[0]["state"] == "0" - - assert "attributes" not in state_list[1] - assert "entity_id" not in state_list[1] - assert state_list[1]["state"] == "50" - - assert "attributes" not in state_list[2] - assert "entity_id" not in state_list[2] - assert state_list[2]["state"] == "23" - assert state_list[2]["last_changed"] == json.dumps( - process_timestamp(last_changed), - cls=JSONEncoder, - ).replace('"', "") - - -async def test_fetch_period_api_with_no_timestamp( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator -) -> None: - """Test the fetch period view for history with no timestamp.""" - await async_setup_component(hass, "history", {}) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - client = await hass_client() - response = await client.get("/api/history/period?filter_entity_id=sensor.power") - assert response.status == HTTPStatus.OK - - -async def test_fetch_period_api_with_include_order( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator -) -> None: - """Test the fetch period view for history.""" - await async_setup_component( - hass, - "history", - { - "history": { - "use_include_order": True, - "include": {"entities": ["light.kitchen"]}, - } - }, - ) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - client = await hass_client() - response = await client.get( - f"/api/history/period/{dt_util.utcnow().isoformat()}", - params={"filter_entity_id": "non.existing,something.else"}, - ) - assert response.status == HTTPStatus.OK - - -async def test_entity_ids_limit_via_api( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator -) -> None: - """Test limiting history to entity_ids.""" - await async_setup_component( - hass, - "history", - {"history": {}}, - ) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - hass.states.async_set("light.kitchen", "on") - hass.states.async_set("light.cow", "on") - hass.states.async_set("light.nomatch", "on") - - await async_wait_recording_done(hass) - - client = await hass_client() - response = await client.get( - f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow", - ) - assert response.status == HTTPStatus.OK - response_json = await response.json() - assert len(response_json) == 2 - assert response_json[0][0]["entity_id"] == "light.kitchen" - assert response_json[1][0]["entity_id"] == "light.cow" - - -async def test_entity_ids_limit_via_api_with_skip_initial_state( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator -) -> None: - """Test limiting history to entity_ids with skip_initial_state.""" - await async_setup_component( - hass, - "history", - {"history": {}}, - ) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - hass.states.async_set("light.kitchen", "on") - hass.states.async_set("light.cow", "on") - hass.states.async_set("light.nomatch", "on") - - await async_wait_recording_done(hass) - - client = await hass_client() - response = await client.get( - f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state", - ) - assert response.status == HTTPStatus.OK - response_json = await response.json() - assert len(response_json) == 0 - - when = dt_util.utcnow() - timedelta(minutes=1) - response = await client.get( - f"/api/history/period/{when.isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state", - ) - assert response.status == HTTPStatus.OK - response_json = await response.json() - assert len(response_json) == 2 - assert response_json[0][0]["entity_id"] == "light.kitchen" - assert response_json[1][0]["entity_id"] == "light.cow" - - -async def test_history_during_period( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator -) -> None: - """Test history_during_period.""" - now = dt_util.utcnow() - - await async_setup_component(hass, "history", {}) - await async_setup_component(hass, "sensor", {}) - await async_recorder_block_till_done(hass) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "off", attributes={"any": "attr"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "off", attributes={"any": "changed"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "off", attributes={"any": "again"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) - await async_wait_recording_done(hass) - - await async_wait_recording_done(hass) - - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "end_time": now.isoformat(), - "entity_ids": ["sensor.test"], - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == {} - - await client.send_json( - { - "id": 2, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "entity_ids": ["sensor.test"], - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": True, - "minimal_response": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 2 - - sensor_test_history = response["result"]["sensor.test"] - assert len(sensor_test_history) == 3 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {} - assert isinstance(sensor_test_history[0]["lu"], float) - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - assert "a" not in sensor_test_history[1] - assert sensor_test_history[1]["s"] == "off" - assert isinstance(sensor_test_history[1]["lu"], float) - assert ( - "lc" not in sensor_test_history[1] - ) # skipped if the same a last_updated (lu) - - assert sensor_test_history[2]["s"] == "on" - assert "a" not in sensor_test_history[2] - - await client.send_json( - { - "id": 3, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "entity_ids": ["sensor.test"], - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": False, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 3 - sensor_test_history = response["result"]["sensor.test"] - - assert len(sensor_test_history) == 5 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {"any": "attr"} - assert isinstance(sensor_test_history[0]["lu"], float) - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - assert sensor_test_history[1]["s"] == "off" - assert isinstance(sensor_test_history[1]["lu"], float) - assert ( - "lc" not in sensor_test_history[1] - ) # skipped if the same a last_updated (lu) - assert sensor_test_history[1]["a"] == {"any": "attr"} - - assert sensor_test_history[4]["s"] == "on" - assert sensor_test_history[4]["a"] == {"any": "attr"} - - await client.send_json( - { - "id": 4, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "entity_ids": ["sensor.test"], - "include_start_time_state": True, - "significant_changes_only": True, - "no_attributes": False, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 4 - sensor_test_history = response["result"]["sensor.test"] - - assert len(sensor_test_history) == 3 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {"any": "attr"} - assert isinstance(sensor_test_history[0]["lu"], float) - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - assert sensor_test_history[1]["s"] == "off" - assert isinstance(sensor_test_history[1]["lu"], float) - assert ( - "lc" not in sensor_test_history[1] - ) # skipped if the same a last_updated (lu) - assert sensor_test_history[1]["a"] == {"any": "attr"} - - assert sensor_test_history[2]["s"] == "on" - assert sensor_test_history[2]["a"] == {"any": "attr"} - - -async def test_history_during_period_impossible_conditions( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator -) -> None: - """Test history_during_period returns when condition cannot be true.""" - await async_setup_component(hass, "history", {}) - await async_setup_component(hass, "sensor", {}) - await async_recorder_block_till_done(hass) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "off", attributes={"any": "attr"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "off", attributes={"any": "changed"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "off", attributes={"any": "again"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) - await async_wait_recording_done(hass) - - await async_wait_recording_done(hass) - - after = dt_util.utcnow() - - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "history/history_during_period", - "start_time": after.isoformat(), - "end_time": after.isoformat(), - "entity_ids": ["sensor.test"], - "include_start_time_state": False, - "significant_changes_only": False, - "no_attributes": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 1 - assert response["result"] == {} - - future = dt_util.utcnow() + timedelta(hours=10) - - await client.send_json( - { - "id": 2, - "type": "history/history_during_period", - "start_time": future.isoformat(), - "entity_ids": ["sensor.test"], - "include_start_time_state": True, - "significant_changes_only": True, - "no_attributes": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 2 - assert response["result"] == {} - - -@pytest.mark.parametrize( - "time_zone", ["UTC", "Europe/Berlin", "America/Chicago", "US/Hawaii"] -) -async def test_history_during_period_significant_domain( - hass: HomeAssistant, - recorder_mock: Recorder, - hass_ws_client: WebSocketGenerator, - time_zone, -) -> None: - """Test history_during_period with climate domain.""" - await hass.config.async_set_time_zone(time_zone) - now = dt_util.utcnow() - - await async_setup_component(hass, "history", {}) - await async_setup_component(hass, "sensor", {}) - await async_recorder_block_till_done(hass) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - hass.states.async_set("climate.test", "on", attributes={"temperature": "1"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("climate.test", "off", attributes={"temperature": "2"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("climate.test", "off", attributes={"temperature": "3"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("climate.test", "off", attributes={"temperature": "4"}) - await async_recorder_block_till_done(hass) - hass.states.async_set("climate.test", "on", attributes={"temperature": "5"}) - await async_wait_recording_done(hass) - - await async_wait_recording_done(hass) - - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "end_time": now.isoformat(), - "entity_ids": ["climate.test"], - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == {} - - await client.send_json( - { - "id": 2, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "entity_ids": ["climate.test"], - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": True, - "minimal_response": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 2 - - sensor_test_history = response["result"]["climate.test"] - assert len(sensor_test_history) == 5 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {} - assert isinstance(sensor_test_history[0]["lu"], float) - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - assert "a" in sensor_test_history[1] - assert sensor_test_history[1]["s"] == "off" - assert ( - "lc" not in sensor_test_history[1] - ) # skipped if the same a last_updated (lu) - - assert sensor_test_history[4]["s"] == "on" - assert sensor_test_history[4]["a"] == {} - - await client.send_json( - { - "id": 3, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "entity_ids": ["climate.test"], - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": False, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 3 - sensor_test_history = response["result"]["climate.test"] - - assert len(sensor_test_history) == 5 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {"temperature": "1"} - assert isinstance(sensor_test_history[0]["lu"], float) - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - assert sensor_test_history[1]["s"] == "off" - assert isinstance(sensor_test_history[1]["lu"], float) - assert ( - "lc" not in sensor_test_history[1] - ) # skipped if the same a last_updated (lu) - assert sensor_test_history[1]["a"] == {"temperature": "2"} - - assert sensor_test_history[4]["s"] == "on" - assert sensor_test_history[4]["a"] == {"temperature": "5"} - - await client.send_json( - { - "id": 4, - "type": "history/history_during_period", - "start_time": now.isoformat(), - "entity_ids": ["climate.test"], - "include_start_time_state": True, - "significant_changes_only": True, - "no_attributes": False, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 4 - sensor_test_history = response["result"]["climate.test"] - - assert len(sensor_test_history) == 5 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {"temperature": "1"} - assert isinstance(sensor_test_history[0]["lu"], float) - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - assert sensor_test_history[1]["s"] == "off" - assert isinstance(sensor_test_history[1]["lu"], float) - assert ( - "lc" not in sensor_test_history[1] - ) # skipped if the same a last_updated (lu) - assert sensor_test_history[1]["a"] == {"temperature": "2"} - - assert sensor_test_history[2]["s"] == "off" - assert sensor_test_history[2]["a"] == {"temperature": "3"} - - assert sensor_test_history[3]["s"] == "off" - assert sensor_test_history[3]["a"] == {"temperature": "4"} - - assert sensor_test_history[4]["s"] == "on" - assert sensor_test_history[4]["a"] == {"temperature": "5"} - - # Test we impute the state time state - later = dt_util.utcnow() - await client.send_json( - { - "id": 5, - "type": "history/history_during_period", - "start_time": later.isoformat(), - "entity_ids": ["climate.test"], - "include_start_time_state": True, - "significant_changes_only": True, - "no_attributes": False, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 5 - sensor_test_history = response["result"]["climate.test"] - - assert len(sensor_test_history) == 1 - - assert sensor_test_history[0]["s"] == "on" - assert sensor_test_history[0]["a"] == {"temperature": "5"} - assert sensor_test_history[0]["lu"] == later.timestamp() - assert ( - "lc" not in sensor_test_history[0] - ) # skipped if the same a last_updated (lu) - - -async def test_history_during_period_bad_start_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator -) -> None: - """Test history_during_period bad state time.""" - await async_setup_component( - hass, - "history", - {"history": {}}, - ) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "history/history_during_period", - "entity_ids": ["sensor.pet"], - "start_time": "cats", - } - ) - response = await client.receive_json() - assert not response["success"] - assert response["error"]["code"] == "invalid_start_time" - - -async def test_history_during_period_bad_end_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator -) -> None: - """Test history_during_period bad end time.""" - now = dt_util.utcnow() - - await async_setup_component( - hass, - "history", - {"history": {}}, - ) - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "history/history_during_period", - "entity_ids": ["sensor.pet"], - "start_time": now.isoformat(), - "end_time": "dogs", - } - ) - response = await client.receive_json() - assert not response["success"] - assert response["error"]["code"] == "invalid_end_time" diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index f86c04b3e5b..d60203676e6 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -437,10 +437,10 @@ async def test_measure(recorder_mock: Recorder, hass: HomeAssistant) -> None: await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" + assert hass.states.get("sensor.sensor1").state == "0.5" + assert 0.499 < float(hass.states.get("sensor.sensor2").state) < 0.501 assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor4").state == "50.0" async def test_async_on_entire_period( @@ -459,7 +459,11 @@ async def test_async_on_entire_period( def _fake_states(*args, **kwargs): return { "binary_sensor.test_on_id": [ - ha.State("binary_sensor.test_on_id", "on", last_changed=start_time), + ha.State( + "binary_sensor.test_on_id", + "on", + last_changed=(start_time - timedelta(seconds=10)), + ), ha.State("binary_sensor.test_on_id", "on", last_changed=t0), ha.State("binary_sensor.test_on_id", "on", last_changed=t1), ha.State("binary_sensor.test_on_id", "on", last_changed=t2), @@ -1254,10 +1258,10 @@ async def test_measure_sliding_window( await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "41.7" + assert hass.states.get("sensor.sensor1").state == "0.0" + assert float(hass.states.get("sensor.sensor2").state) == 0 + assert hass.states.get("sensor.sensor3").state == "0" + assert hass.states.get("sensor.sensor4").state == "0.0" past_next_update = start_time + timedelta(minutes=30) with ( @@ -1268,12 +1272,12 @@ async def test_measure_sliding_window( freeze_time(past_next_update), ): async_fire_time_changed(hass, past_next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "41.7" + assert hass.states.get("sensor.sensor1").state == "0.17" + assert 0.166 < float(hass.states.get("sensor.sensor2").state) < 0.167 + assert hass.states.get("sensor.sensor3").state == "1" + assert hass.states.get("sensor.sensor4").state == "8.3" async def test_measure_from_end_going_backwards( @@ -1355,10 +1359,10 @@ async def test_measure_from_end_going_backwards( await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor1").state == "0.0" + assert float(hass.states.get("sensor.sensor2").state) == 0 + assert hass.states.get("sensor.sensor3").state == "0" + assert hass.states.get("sensor.sensor4").state == "0.0" past_next_update = start_time + timedelta(minutes=30) with ( @@ -1369,12 +1373,12 @@ async def test_measure_from_end_going_backwards( freeze_time(past_next_update), ): async_fire_time_changed(hass, past_next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor1").state == "0.17" + assert 0.166 < float(hass.states.get("sensor.sensor2").state) < 0.167 + assert hass.states.get("sensor.sensor3").state == "1" + assert 16.6 <= float(hass.states.get("sensor.sensor4").state) <= 16.7 async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None: @@ -1403,7 +1407,7 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None "homeassistant.components.recorder.history.state_changes_during_period", _fake_states, ), - freeze_time(start_time), + freeze_time(start_time + timedelta(minutes=60)), ): await async_setup_component( hass, @@ -1455,10 +1459,10 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" + assert hass.states.get("sensor.sensor1").state == "0.5" + assert 0.499 < float(hass.states.get("sensor.sensor2").state) < 0.501 assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor4").state == "50.0" @pytest.mark.parametrize("time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii"]) @@ -1537,18 +1541,19 @@ async def test_end_time_with_microseconds_zeroed( await hass.async_block_till_done() await async_update_entity(hass, "sensor.heatpump_compressor_today") await hass.async_block_till_done() - assert hass.states.get("sensor.heatpump_compressor_today").state == "1.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "0.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "1.83333333333333" + 0.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 0.501 ) async_fire_time_changed(hass, time_200) - await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.heatpump_compressor_today").state == "1.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "0.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "1.83333333333333" + 0.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 0.501 ) hass.states.async_set("binary_sensor.heatpump_compressor_state", "off") await hass.async_block_till_done() @@ -1557,10 +1562,11 @@ async def test_end_time_with_microseconds_zeroed( with freeze_time(time_400): async_fire_time_changed(hass, time_400) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.heatpump_compressor_today").state == "1.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "0.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "1.83333333333333" + 0.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 0.501 ) hass.states.async_set("binary_sensor.heatpump_compressor_state", "on") await async_wait_recording_done(hass) @@ -1568,10 +1574,11 @@ async def test_end_time_with_microseconds_zeroed( with freeze_time(time_600): async_fire_time_changed(hass, time_600) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.heatpump_compressor_today").state == "3.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "2.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "3.83333333333333" + 2.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 2.501 ) rolled_to_next_day = start_of_today + timedelta(days=1) diff --git a/tests/components/hive/test_config_flow.py b/tests/components/hive/test_config_flow.py index fd6eb564a39..8749954c364 100644 --- a/tests/components/hive/test_config_flow.py +++ b/tests/components/hive/test_config_flow.py @@ -25,52 +25,6 @@ MFA_RESEND_CODE = "0000" MFA_INVALID_CODE = "HIVE" -async def test_import_flow(hass: HomeAssistant) -> None: - """Check import flow.""" - - with ( - patch( - "homeassistant.components.hive.config_flow.Auth.login", - return_value={ - "ChallengeName": "SUCCESS", - "AuthenticationResult": { - "RefreshToken": "mock-refresh-token", - "AccessToken": "mock-access-token", - }, - }, - ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.hive.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == USERNAME - assert result["data"] == { - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - "tokens": { - "AuthenticationResult": { - "AccessToken": "mock-access-token", - "RefreshToken": "mock-refresh-token", - }, - "ChallengeName": "SUCCESS", - }, - } - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_user_flow(hass: HomeAssistant) -> None: """Test the user flow.""" result = await hass.config_entries.flow.async_init( @@ -91,9 +45,6 @@ async def test_user_flow(hass: HomeAssistant) -> None: }, }, ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, patch( "homeassistant.components.hive.async_setup_entry", return_value=True, @@ -119,7 +70,6 @@ async def test_user_flow(hass: HomeAssistant) -> None: }, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -185,9 +135,6 @@ async def test_user_flow_2fa(hass: HomeAssistant) -> None: "mock-device-password", ], ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, patch( "homeassistant.components.hive.async_setup_entry", return_value=True, @@ -220,7 +167,6 @@ async def test_user_flow_2fa(hass: HomeAssistant) -> None: ], } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -246,14 +192,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} @@ -305,14 +244,7 @@ async def test_reauth_2fa_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} @@ -476,9 +408,6 @@ async def test_user_flow_2fa_send_new_code(hass: HomeAssistant) -> None: "mock-device-password", ], ), - patch( - "homeassistant.components.hive.async_setup", return_value=True - ) as mock_setup, patch( "homeassistant.components.hive.async_setup_entry", return_value=True, @@ -507,7 +436,6 @@ async def test_user_flow_2fa_send_new_code(hass: HomeAssistant) -> None: "mock-device-password", ], } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 assert len(hass.config_entries.async_entries(DOMAIN)) == 1 diff --git a/tests/components/holiday/test_config_flow.py b/tests/components/holiday/test_config_flow.py index 14e2b68234c..466dbaffd8b 100644 --- a/tests/components/holiday/test_config_flow.py +++ b/tests/components/holiday/test_config_flow.py @@ -230,13 +230,7 @@ async def test_reconfigure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -267,13 +261,7 @@ async def test_reconfigure_incorrect_language( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -308,13 +296,7 @@ async def test_reconfigure_entry_exists( ) entry2.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index c8137a044a1..2ac8c851e1b 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -67,6 +67,20 @@ def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: "auth_implementation": FAKE_AUTH_IMPL, "token": token_entry, }, + minor_version=2, + ) + + +@pytest.fixture(name="config_entry_v1_1") +def mock_config_entry_v1_1(token_entry: dict[str, Any]) -> MockConfigEntry: + """Fixture for a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + "auth_implementation": FAKE_AUTH_IMPL, + "token": token_entry, + }, + minor_version=1, ) @@ -164,13 +178,20 @@ def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: ) mock.name = app type(mock).status = PropertyMock(return_value={}) + mock.get.side_effect = HomeConnectError mock.get_programs_active.side_effect = HomeConnectError mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError + mock.select_program.side_effect = HomeConnectError + mock.pause_program.side_effect = HomeConnectError mock.stop_program.side_effect = HomeConnectError + mock.set_options_active_program.side_effect = HomeConnectError + mock.set_options_selected_program.side_effect = HomeConnectError mock.get_status.side_effect = HomeConnectError mock.get_settings.side_effect = HomeConnectError mock.set_setting.side_effect = HomeConnectError + mock.set_setting.side_effect = HomeConnectError + mock.execute_command.side_effect = HomeConnectError return mock diff --git a/tests/components/home_connect/fixtures/settings.json b/tests/components/home_connect/fixtures/settings.json index eb6a5f5ff98..1b9bec57276 100644 --- a/tests/components/home_connect/fixtures/settings.json +++ b/tests/components/home_connect/fixtures/settings.json @@ -111,5 +111,51 @@ } ] } + }, + "FridgeFreezer": { + "data": { + "settings": [ + { + "key": "Refrigeration.FridgeFreezer.Setting.SuperModeFreezer", + "value": false, + "type": "Boolean", + "constraints": { + "access": "readWrite" + } + }, + { + "key": "Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator", + "value": false, + "type": "Boolean", + "constraints": { + "access": "readWrite" + } + }, + { + "key": "Refrigeration.Common.Setting.Dispenser.Enabled", + "value": false, + "type": "Boolean", + "constraints": { + "access": "readWrite" + } + }, + { + "key": "Refrigeration.Common.Setting.Light.External.Power", + "value": true, + "type": "Boolean" + }, + { + "key": "Refrigeration.Common.Setting.Light.External.Brightness", + "value": 70, + "unit": "%", + "type": "Double", + "constraints": { + "min": 0, + "max": 100, + "access": "readWrite" + } + } + ] + } } } diff --git a/tests/components/home_connect/fixtures/status.json b/tests/components/home_connect/fixtures/status.json index 8eac586a308..efdbde6cd97 100644 --- a/tests/components/home_connect/fixtures/status.json +++ b/tests/components/home_connect/fixtures/status.json @@ -10,6 +10,10 @@ { "key": "BSH.Common.Status.DoorState", "value": "BSH.Common.EnumType.DoorState.Closed" + }, + { + "key": "Refrigeration.Common.Status.Door.Refrigerator", + "value": "BSH.Common.EnumType.DoorState.Open" } ] } diff --git a/tests/components/home_connect/snapshots/test_diagnostics.ambr b/tests/components/home_connect/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..99f10fe2847 --- /dev/null +++ b/tests/components/home_connect/snapshots/test_diagnostics.ambr @@ -0,0 +1,468 @@ +# serializer version: 1 +# name: test_async_get_config_entry_diagnostics + dict({ + 'BOSCH-000000000-000000000000': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000001': dict({ + 'programs': list([ + 'LaundryCare.WasherDryer.Program.Mix', + 'LaundryCare.Washer.Option.Temperature', + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000002': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000003': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000004': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Setting.AmbientLightBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'BSH.Common.Setting.AmbientLightColor': dict({ + 'type': 'BSH.Common.EnumType.AmbientLightColor', + 'value': 'BSH.Common.EnumType.AmbientLightColor.Color43', + }), + 'BSH.Common.Setting.AmbientLightCustomColor': dict({ + 'type': 'String', + 'value': '#4a88f8', + }), + 'BSH.Common.Setting.AmbientLightEnabled': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'BSH.Common.Setting.ColorTemperature': dict({ + 'type': 'BSH.Common.EnumType.ColorTemperature', + 'value': 'Cooking.Hood.EnumType.ColorTemperature.warmToNeutral', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Cooking.Common.Setting.Lighting': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'Cooking.Common.Setting.LightingBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'Cooking.Hood.Setting.ColorTemperaturePercent': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000005': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000006': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS01OVN1-43E0065FE245': dict({ + 'programs': list([ + 'Cooking.Oven.Program.HeatingMode.HotAir', + 'Cooking.Oven.Program.HeatingMode.TopBottomHeating', + 'Cooking.Oven.Program.HeatingMode.PizzaSetting', + ]), + 'status': dict({ + 'BSH.Common.Root.ActiveProgram': dict({ + 'value': 'Cooking.Oven.Program.HeatingMode.HotAir', + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS04DYR1-831694AE3C5A': dict({ + 'programs': list([ + 'LaundryCare.Dryer.Program.Cotton', + 'LaundryCare.Dryer.Program.Synthetic', + 'LaundryCare.Dryer.Program.Mix', + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS06COM1-D70390681C2C': dict({ + 'programs': list([ + 'ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.Coffee', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.Cappuccino', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.LatteMacchiato', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeLatte', + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'SIEMENS-HCS02DWH1-6BE58C26DCC1': dict({ + 'programs': list([ + 'Dishcare.Dishwasher.Program.Auto1', + 'Dishcare.Dishwasher.Program.Auto2', + 'Dishcare.Dishwasher.Program.Auto3', + 'Dishcare.Dishwasher.Program.Eco50', + 'Dishcare.Dishwasher.Program.Quick45', + ]), + 'status': dict({ + 'BSH.Common.Setting.AmbientLightBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'BSH.Common.Setting.AmbientLightColor': dict({ + 'type': 'BSH.Common.EnumType.AmbientLightColor', + 'value': 'BSH.Common.EnumType.AmbientLightColor.Color43', + }), + 'BSH.Common.Setting.AmbientLightCustomColor': dict({ + 'type': 'String', + 'value': '#4a88f8', + }), + 'BSH.Common.Setting.AmbientLightEnabled': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'BSH.Common.Setting.ChildLock': dict({ + 'type': 'Boolean', + 'value': False, + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'SIEMENS-HCS03WCH1-7BC6383CF794': dict({ + 'programs': list([ + 'LaundryCare.Washer.Program.Cotton', + 'LaundryCare.Washer.Program.EasyCare', + 'LaundryCare.Washer.Program.Mix', + 'LaundryCare.Washer.Program.DelicatesSilk', + 'LaundryCare.Washer.Program.Wool', + ]), + 'status': dict({ + 'BSH.Common.Root.ActiveProgram': dict({ + 'value': 'BSH.Common.Root.ActiveProgram', + }), + 'BSH.Common.Setting.ChildLock': dict({ + 'type': 'Boolean', + 'value': False, + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'SIEMENS-HCS05FRF1-304F4F9E541D': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Setting.Dispenser.Enabled': dict({ + 'constraints': dict({ + 'access': 'readWrite', + }), + 'type': 'Boolean', + 'value': False, + }), + 'Refrigeration.Common.Setting.Light.External.Brightness': dict({ + 'constraints': dict({ + 'access': 'readWrite', + 'max': 100, + 'min': 0, + }), + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'Refrigeration.Common.Setting.Light.External.Power': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + 'Refrigeration.FridgeFreezer.Setting.SuperModeFreezer': dict({ + 'constraints': dict({ + 'access': 'readWrite', + }), + 'type': 'Boolean', + 'value': False, + }), + 'Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator': dict({ + 'constraints': dict({ + 'access': 'readWrite', + }), + 'type': 'Boolean', + 'value': False, + }), + }), + }), + }) +# --- +# name: test_async_get_device_diagnostics + dict({ + 'programs': list([ + 'Dishcare.Dishwasher.Program.Auto1', + 'Dishcare.Dishwasher.Program.Auto2', + 'Dishcare.Dishwasher.Program.Auto3', + 'Dishcare.Dishwasher.Program.Eco50', + 'Dishcare.Dishwasher.Program.Quick45', + ]), + 'status': dict({ + 'BSH.Common.Setting.AmbientLightBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'BSH.Common.Setting.AmbientLightColor': dict({ + 'type': 'BSH.Common.EnumType.AmbientLightColor', + 'value': 'BSH.Common.EnumType.AmbientLightColor.Color43', + }), + 'BSH.Common.Setting.AmbientLightCustomColor': dict({ + 'type': 'String', + 'value': '#4a88f8', + }), + 'BSH.Common.Setting.AmbientLightEnabled': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'BSH.Common.Setting.ChildLock': dict({ + 'type': 'Boolean', + 'value': False, + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }) +# --- diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index 39502507439..b564b003af6 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -3,20 +3,30 @@ from collections.abc import Awaitable, Callable from unittest.mock import MagicMock, Mock +from homeconnect.api import HomeConnectAPI import pytest +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity from homeassistant.components.home_connect.const import ( BSH_DOOR_STATE, BSH_DOOR_STATE_CLOSED, BSH_DOOR_STATE_LOCKED, BSH_DOOR_STATE_OPEN, + DOMAIN, + REFRIGERATION_STATUS_DOOR_CLOSED, + REFRIGERATION_STATUS_DOOR_OPEN, + REFRIGERATION_STATUS_DOOR_REFRIGERATOR, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -64,9 +74,134 @@ async def test_binary_sensors_door_states( entity_id = "binary_sensor.washer_door" get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.status.update({BSH_DOOR_STATE: {"value": state}}) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED - appliance.status.update({BSH_DOOR_STATE: {"value": state}}) await async_update_entity(hass, entity_id) await hass.async_block_till_done() assert hass.states.is_state(entity_id, expected) + + +@pytest.mark.parametrize( + ("entity_id", "status_key", "event_value_update", "expected", "appliance"), + [ + ( + "binary_sensor.fridgefreezer_refrigerator_door", + REFRIGERATION_STATUS_DOOR_REFRIGERATOR, + REFRIGERATION_STATUS_DOOR_CLOSED, + STATE_OFF, + "FridgeFreezer", + ), + ( + "binary_sensor.fridgefreezer_refrigerator_door", + REFRIGERATION_STATUS_DOOR_REFRIGERATOR, + REFRIGERATION_STATUS_DOOR_OPEN, + STATE_ON, + "FridgeFreezer", + ), + ( + "binary_sensor.fridgefreezer_refrigerator_door", + REFRIGERATION_STATUS_DOOR_REFRIGERATOR, + "", + STATE_UNAVAILABLE, + "FridgeFreezer", + ), + ], + indirect=["appliance"], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_bianry_sensors_fridge_door_states( + entity_id: str, + status_key: str, + event_value_update: str, + appliance: Mock, + expected: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Tests for Home Connect Fridge appliance door states.""" + appliance.status.update( + HomeConnectAPI.json2dict( + load_json_object_fixture("home_connect/status.json")["data"]["status"] + ) + ) + get_appliances.return_value = [appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + appliance.status.update({status_key: {"value": event_value_update}}) + await async_update_entity(hass, entity_id) + await hass.async_block_till_done() + assert hass.states.is_state(entity_id, expected) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("bypass_throttle") +async def test_create_issue( + hass: HomeAssistant, + appliance: Mock, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = "binary_sensor.washer_door" + get_appliances.return_value = [appliance] + issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.status.update({BSH_DOOR_STATE: {"value": BSH_DOOR_STATE_OPEN}}) + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/home_connect/test_diagnostics.py b/tests/components/home_connect/test_diagnostics.py new file mode 100644 index 00000000000..d0bc5e77735 --- /dev/null +++ b/tests/components/home_connect/test_diagnostics.py @@ -0,0 +1,87 @@ +"""Test diagnostics for Home Connect.""" + +from collections.abc import Awaitable, Callable +from unittest.mock import MagicMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.components.home_connect.diagnostics import ( + async_get_config_entry_diagnostics, + async_get_device_diagnostics, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_get_config_entry_diagnostics( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert await async_get_config_entry_diagnostics(hass, config_entry) == snapshot + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_get_device_diagnostics( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device config entry diagnostics.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "SIEMENS-HCS02DWH1-6BE58C26DCC1")}, + ) + + assert await async_get_device_diagnostics(hass, config_entry, device) == snapshot + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_device_diagnostics_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device config entry diagnostics.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "Random-Device-ID")}, + ) + + with pytest.raises(ValueError): + await async_get_device_diagnostics(hass, config_entry, device) diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 02d9bcaa208..69601efb42d 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -2,17 +2,36 @@ from collections.abc import Awaitable, Callable from typing import Any -from unittest.mock import MagicMock, Mock +from unittest.mock import MagicMock, Mock, patch from freezegun.api import FrozenDateTimeFactory import pytest from requests import HTTPError import requests_mock -from homeassistant.components.home_connect.const import DOMAIN, OAUTH2_TOKEN +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.home_connect import ( + SCAN_INTERVAL, + bsh_key_to_translation_key, +) +from homeassistant.components.home_connect.const import ( + BSH_CHILD_LOCK_STATE, + BSH_OPERATION_STATE, + BSH_POWER_STATE, + BSH_REMOTE_START_ALLOWANCE_STATE, + COOKING_LIGHTING, + DOMAIN, + OAUTH2_TOKEN, +) +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr, entity_registry as er +from script.hassfest.translations import RE_TRANSLATION_KEY from .conftest import ( CLIENT_ID, @@ -155,14 +174,14 @@ async def test_update_throttle( # First re-load after 1 minute is not blocked. assert await hass.config_entries.async_unload(config_entry.entry_id) assert config_entry.state == ConfigEntryState.NOT_LOADED - freezer.tick(60) + freezer.tick(SCAN_INTERVAL.seconds + 0.1) assert await hass.config_entries.async_setup(config_entry.entry_id) assert get_appliances.call_count == get_appliances_call_count + 1 # Second re-load is blocked by Throttle. assert await hass.config_entries.async_unload(config_entry.entry_id) assert config_entry.state == ConfigEntryState.NOT_LOADED - freezer.tick(59) + freezer.tick(SCAN_INTERVAL.seconds - 0.1) assert await hass.config_entries.async_setup(config_entry.entry_id) assert get_appliances.call_count == get_appliances_call_count + 1 @@ -272,8 +291,40 @@ async def test_services( ) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) @pytest.mark.usefixtures("bypass_throttle") async def test_services_exception( + service_call: list[dict[str, Any]], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + problematic_appliance: Mock, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a HomeAssistantError when there is an API error.""" + get_appliances.return_value = [problematic_appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, problematic_appliance.haId)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(HomeAssistantError): + await hass.services.async_call(**service_call) + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_services_appliance_not_found( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -281,7 +332,7 @@ async def test_services_exception( get_appliances: MagicMock, appliance: Mock, ) -> None: - """Raise a ValueError when device id does not match.""" + """Raise a ServiceValidationError when device id does not match.""" get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED assert await integration_setup() @@ -291,5 +342,77 @@ async def test_services_exception( service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError, match=r"Appliance.*not found"): await hass.services.async_call(**service_call) + + +async def test_entity_migration( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + config_entry_v1_1: MockConfigEntry, + appliance: Mock, + platforms: list[Platform], +) -> None: + """Test entity migration.""" + + config_entry_v1_1.add_to_hass(hass) + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry_v1_1.entry_id, + identifiers={(DOMAIN, appliance.haId)}, + ) + + test_entities = [ + ( + SENSOR_DOMAIN, + "Operation State", + BSH_OPERATION_STATE, + ), + ( + SWITCH_DOMAIN, + "ChildLock", + BSH_CHILD_LOCK_STATE, + ), + ( + SWITCH_DOMAIN, + "Power", + BSH_POWER_STATE, + ), + ( + BINARY_SENSOR_DOMAIN, + "Remote Start", + BSH_REMOTE_START_ALLOWANCE_STATE, + ), + ( + LIGHT_DOMAIN, + "Light", + COOKING_LIGHTING, + ), + ] + + for domain, old_unique_id_suffix, _ in test_entities: + entity_registry.async_get_or_create( + domain, + DOMAIN, + f"{appliance.haId}-{old_unique_id_suffix}", + device_id=device_entry.id, + config_entry=config_entry_v1_1, + ) + + with patch("homeassistant.components.home_connect.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry_v1_1.entry_id) + await hass.async_block_till_done() + + for domain, _, expected_unique_id_suffix in test_entities: + assert entity_registry.async_get_entity_id( + domain, DOMAIN, f"{appliance.haId}-{expected_unique_id_suffix}" + ) + assert config_entry_v1_1.minor_version == 2 + + +async def test_bsh_key_transformations() -> None: + """Test that the key transformations are compatible valid translations keys and can be reversed.""" + program = "Dishcare.Dishwasher.Program.Eco50" + translation_key = bsh_key_to_translation_key(program) + assert RE_TRANSLATION_KEY.match(translation_key) diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py index 8d918dc5815..471ddf0ec54 100644 --- a/tests/components/home_connect/test_light.py +++ b/tests/components/home_connect/test_light.py @@ -3,17 +3,20 @@ from collections.abc import Awaitable, Callable, Generator from unittest.mock import MagicMock, Mock -from homeconnect.api import HomeConnectError +from homeconnect.api import HomeConnectAppliance, HomeConnectError import pytest from homeassistant.components.home_connect.const import ( BSH_AMBIENT_LIGHT_BRIGHTNESS, + BSH_AMBIENT_LIGHT_COLOR, BSH_AMBIENT_LIGHT_CUSTOM_COLOR, BSH_AMBIENT_LIGHT_ENABLED, COOKING_LIGHTING, COOKING_LIGHTING_BRIGHTNESS, + REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS, + REFRIGERATION_EXTERNAL_LIGHT_POWER, ) -from homeassistant.components.light import DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( SERVICE_TURN_OFF, @@ -24,6 +27,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -65,7 +69,7 @@ async def test_light( ("entity_id", "status", "service", "service_data", "state", "appliance"), [ ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": True, @@ -77,7 +81,7 @@ async def test_light( "Hood", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": True, @@ -90,7 +94,7 @@ async def test_light( "Hood", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: {"value": False}, COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, @@ -101,7 +105,7 @@ async def test_light( "Hood", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": None, @@ -114,7 +118,7 @@ async def test_light( "Hood", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: { "value": True, @@ -127,7 +131,7 @@ async def test_light( "Hood", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: {"value": False}, BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, @@ -138,7 +142,7 @@ async def test_light( "Hood", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, @@ -148,6 +152,35 @@ async def test_light( STATE_ON, "Hood", ), + ( + "light.hood_ambient_light", + { + BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, + BSH_AMBIENT_LIGHT_COLOR: { + "value": "", + }, + BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, + }, + SERVICE_TURN_ON, + { + "rgb_color": [255, 255, 0], + }, + STATE_ON, + "Hood", + ), + ( + "light.fridgefreezer_external_light", + { + REFRIGERATION_EXTERNAL_LIGHT_POWER: { + "value": True, + }, + REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS: {"value": 75}, + }, + SERVICE_TURN_ON, + {}, + STATE_ON, + "FridgeFreezer", + ), ], indirect=["appliance"], ) @@ -166,7 +199,14 @@ async def test_light_functionality( get_appliances: MagicMock, ) -> None: """Test light functionality.""" - appliance.status.update(SETTINGS_STATUS) + appliance.status.update( + HomeConnectAppliance.json2dict( + load_json_object_fixture("home_connect/settings.json") + .get(appliance.name) + .get("data") + .get("settings") + ) + ) get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED @@ -176,7 +216,7 @@ async def test_light_functionality( appliance.status.update(status) service_data["entity_id"] = entity_id await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, service, service_data, blocking=True, @@ -193,10 +233,11 @@ async def test_light_functionality( "mock_attr", "attr_side_effect", "problematic_appliance", + "exception_match", ), [ ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": False, @@ -207,9 +248,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*on.*", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": True, @@ -221,9 +263,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*on.*", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: {"value": False}, }, @@ -232,9 +275,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*off.*", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: { "value": True, @@ -246,9 +290,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*on.*", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: { "value": True, @@ -258,8 +303,9 @@ async def test_light_functionality( SERVICE_TURN_ON, {"brightness": 200}, "set_setting", - [HomeConnectError, None, HomeConnectError, HomeConnectError], + [HomeConnectError, None, HomeConnectError], "Hood", + r"Error.*set.*color.*", ), ], indirect=["problematic_appliance"], @@ -272,6 +318,7 @@ async def test_switch_exception_handling( mock_attr: str, attr_side_effect: list, problematic_appliance: Mock, + exception_match: str, bypass_throttle: Generator[None], hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -294,5 +341,8 @@ async def test_switch_exception_handling( problematic_appliance.status.update(status) service_data["entity_id"] = entity_id - await hass.services.async_call(DOMAIN, service, service_data, blocking=True) + with pytest.raises(HomeAssistantError, match=exception_match): + await hass.services.async_call( + LIGHT_DOMAIN, service, service_data, blocking=True + ) assert getattr(problematic_appliance, mock_attr).call_count == len(attr_side_effect) diff --git a/tests/components/home_connect/test_number.py b/tests/components/home_connect/test_number.py new file mode 100644 index 00000000000..bce19161cf8 --- /dev/null +++ b/tests/components/home_connect/test_number.py @@ -0,0 +1,176 @@ +"""Tests for home_connect number entities.""" + +from collections.abc import Awaitable, Callable, Generator +import random +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + ATTR_CONSTRAINTS, + ATTR_STEPSIZE, + ATTR_UNIT, + ATTR_VALUE, +) +from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + ATTR_VALUE as SERVICE_ATTR_VALUE, + DEFAULT_MIN_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.NUMBER] + + +async def test_number( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test number entity.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize("appliance", ["Refrigerator"], indirect=True) +@pytest.mark.parametrize( + ( + "entity_id", + "setting_key", + "min_value", + "max_value", + "step_size", + "unit_of_measurement", + ), + [ + ( + f"{NUMBER_DOMAIN.lower()}.refrigerator_refrigerator_temperature", + "Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", + 7, + 15, + 0.1, + "°C", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_number_entity_functionality( + appliance: Mock, + entity_id: str, + setting_key: str, + bypass_throttle: Generator[None], + min_value: int, + max_value: int, + step_size: float, + unit_of_measurement: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test number entity functionality.""" + appliance.get.side_effect = [ + { + ATTR_CONSTRAINTS: { + ATTR_MIN: min_value, + ATTR_MAX: max_value, + ATTR_STEPSIZE: step_size, + }, + ATTR_UNIT: unit_of_measurement, + } + ] + get_appliances.return_value = [appliance] + current_value = min_value + appliance.status.update({setting_key: {ATTR_VALUE: current_value}}) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + assert hass.states.is_state(entity_id, str(current_value)) + state = hass.states.get(entity_id) + assert state.attributes["min"] == min_value + assert state.attributes["max"] == max_value + assert state.attributes["step"] == step_size + assert state.attributes["unit_of_measurement"] == unit_of_measurement + + new_value = random.randint(min_value + 1, max_value) + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + SERVICE_ATTR_VALUE: new_value, + }, + blocking=True, + ) + appliance.set_setting.assert_called_once_with(setting_key, new_value) + + +@pytest.mark.parametrize("problematic_appliance", ["Refrigerator"], indirect=True) +@pytest.mark.parametrize( + ("entity_id", "setting_key", "mock_attr"), + [ + ( + f"{NUMBER_DOMAIN.lower()}.refrigerator_refrigerator_temperature", + "Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", + "set_setting", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_number_entity_error( + problematic_appliance: Mock, + entity_id: str, + setting_key: str, + mock_attr: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test number entity error.""" + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + problematic_appliance.status.update({setting_key: {}}) + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + with pytest.raises( + HomeAssistantError, match=r"Error.*assign.*value.*to.*setting.*" + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + SERVICE_ATTR_VALUE: DEFAULT_MIN_VALUE, + }, + blocking=True, + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/home_connect/test_select.py b/tests/components/home_connect/test_select.py new file mode 100644 index 00000000000..7d5843e9525 --- /dev/null +++ b/tests/components/home_connect/test_select.py @@ -0,0 +1,161 @@ +"""Tests for home_connect select entities.""" + +from collections.abc import Awaitable, Callable, Generator +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + BSH_ACTIVE_PROGRAM, + BSH_SELECTED_PROGRAM, +) +from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry, load_json_object_fixture + +SETTINGS_STATUS = { + setting.pop("key"): setting + for setting in load_json_object_fixture("home_connect/settings.json") + .get("Washer") + .get("data") + .get("settings") +} + +PROGRAM = "Dishcare.Dishwasher.Program.Eco50" + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.SELECT] + + +async def test_select( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test select entity.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("entity_id", "status", "program_to_set"), + [ + ( + "select.washer_selected_program", + {BSH_SELECTED_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + ), + ( + "select.washer_active_program", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + ), + ], +) +async def test_select_functionality( + entity_id: str, + status: dict, + program_to_set: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test select functionality.""" + appliance.status.update(SETTINGS_STATUS) + appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + appliance.status.update(status) + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: program_to_set}, + blocking=True, + ) + assert hass.states.is_state(entity_id, program_to_set) + + +@pytest.mark.parametrize( + ( + "entity_id", + "status", + "program_to_set", + "mock_attr", + "exception_match", + ), + [ + ( + "select.washer_selected_program", + {BSH_SELECTED_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + "select_program", + r"Error.*select.*program.*", + ), + ( + "select.washer_active_program", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + "start_program", + r"Error.*start.*program.*", + ), + ], +) +async def test_select_exception_handling( + entity_id: str, + status: dict, + program_to_set: str, + mock_attr: str, + exception_match: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + problematic_appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test exception handling.""" + problematic_appliance.get_programs_available.side_effect = None + problematic_appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + with pytest.raises(HomeAssistantError, match=exception_match): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {"entity_id": entity_id, "option": program_to_set}, + blocking=True, + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index 661ac62403f..f2ee3b13922 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -4,28 +4,40 @@ from collections.abc import Awaitable, Callable from unittest.mock import MagicMock, Mock from freezegun.api import FrozenDateTimeFactory +from homeconnect.api import HomeConnectAPI import pytest +from homeassistant.components.home_connect.const import ( + BSH_DOOR_STATE, + BSH_DOOR_STATE_CLOSED, + BSH_DOOR_STATE_LOCKED, + BSH_DOOR_STATE_OPEN, + BSH_EVENT_PRESENT_STATE_CONFIRMED, + BSH_EVENT_PRESENT_STATE_OFF, + BSH_EVENT_PRESENT_STATE_PRESENT, + COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture TEST_HC_APP = "Dishwasher" EVENT_PROG_DELAYED_START = { "BSH.Common.Status.OperationState": { - "value": "BSH.Common.EnumType.OperationState.Delayed" + "value": "BSH.Common.EnumType.OperationState.DelayedStart" }, } EVENT_PROG_REMAIN_NO_VALUE = { "BSH.Common.Option.RemainingProgramTime": {}, "BSH.Common.Status.OperationState": { - "value": "BSH.Common.EnumType.OperationState.Delayed" + "value": "BSH.Common.EnumType.OperationState.DelayedStart" }, } @@ -95,13 +107,13 @@ PROGRAM_SEQUENCE_EVENTS = ( # Entity mapping to expected state at each program sequence. ENTITY_ID_STATES = { "sensor.dishwasher_operation_state": ( - "Delayed", - "Run", - "Run", - "Run", - "Ready", + "delayedstart", + "run", + "run", + "run", + "ready", ), - "sensor.dishwasher_remaining_program_time": ( + "sensor.dishwasher_program_finish_time": ( "unavailable", "2021-01-09T12:00:00+00:00", "2021-01-09T12:00:00+00:00", @@ -150,6 +162,8 @@ async def test_event_sensors( get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.get_programs_available = MagicMock(return_value=["dummy_program"]) + appliance.status.update(EVENT_PROG_DELAYED_START) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -190,11 +204,13 @@ async def test_remaining_prog_time_edge_cases( ) -> None: """Run program sequence to test edge cases for the remaining_prog_time entity.""" get_appliances.return_value = [appliance] - entity_id = "sensor.dishwasher_remaining_program_time" + entity_id = "sensor.dishwasher_program_finish_time" time_to_freeze = "2021-01-09 12:00:00+00:00" freezer.move_to(time_to_freeze) assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.get_programs_available = MagicMock(return_value=["dummy_program"]) + appliance.status.update(EVENT_PROG_REMAIN_NO_VALUE) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -207,3 +223,115 @@ async def test_remaining_prog_time_edge_cases( await hass.async_block_till_done() freezer.tick() assert hass.states.is_state(entity_id, expected_state) + + +@pytest.mark.parametrize( + ("entity_id", "status_key", "event_value_update", "expected", "appliance"), + [ + ( + "sensor.dishwasher_door", + BSH_DOOR_STATE, + BSH_DOOR_STATE_LOCKED, + "locked", + "Dishwasher", + ), + ( + "sensor.dishwasher_door", + BSH_DOOR_STATE, + BSH_DOOR_STATE_CLOSED, + "closed", + "Dishwasher", + ), + ( + "sensor.dishwasher_door", + BSH_DOOR_STATE, + BSH_DOOR_STATE_OPEN, + "open", + "Dishwasher", + ), + ( + "sensor.fridgefreezer_freezer_door_alarm", + "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", + "", + "off", + "FridgeFreezer", + ), + ( + "sensor.fridgefreezer_freezer_door_alarm", + REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, + BSH_EVENT_PRESENT_STATE_OFF, + "off", + "FridgeFreezer", + ), + ( + "sensor.fridgefreezer_freezer_door_alarm", + REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, + BSH_EVENT_PRESENT_STATE_PRESENT, + "present", + "FridgeFreezer", + ), + ( + "sensor.fridgefreezer_freezer_door_alarm", + REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, + BSH_EVENT_PRESENT_STATE_CONFIRMED, + "confirmed", + "FridgeFreezer", + ), + ( + "sensor.coffeemaker_bean_container_empty", + "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", + "", + "off", + "CoffeeMaker", + ), + ( + "sensor.coffeemaker_bean_container_empty", + COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + BSH_EVENT_PRESENT_STATE_OFF, + "off", + "CoffeeMaker", + ), + ( + "sensor.coffeemaker_bean_container_empty", + COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + BSH_EVENT_PRESENT_STATE_PRESENT, + "present", + "CoffeeMaker", + ), + ( + "sensor.coffeemaker_bean_container_empty", + COFFEE_EVENT_BEAN_CONTAINER_EMPTY, + BSH_EVENT_PRESENT_STATE_CONFIRMED, + "confirmed", + "CoffeeMaker", + ), + ], + indirect=["appliance"], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_sensors_states( + entity_id: str, + status_key: str, + event_value_update: str, + appliance: Mock, + expected: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Tests for Appliance alarm sensors.""" + appliance.status.update( + HomeConnectAPI.json2dict( + load_json_object_fixture("home_connect/status.json")["data"]["status"] + ) + ) + get_appliances.return_value = [appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + appliance.status.update({status_key: {"value": event_value_update}}) + await async_update_entity(hass, entity_id) + await hass.async_block_till_done() + assert hass.states.is_state(entity_id, expected) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index c6a7b384036..3a89005dc59 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -3,20 +3,25 @@ from collections.abc import Awaitable, Callable, Generator from unittest.mock import MagicMock, Mock -from homeconnect.api import HomeConnectError +from homeconnect.api import HomeConnectAppliance, HomeConnectError import pytest from homeassistant.components.home_connect.const import ( + ATTR_ALLOWED_VALUES, + ATTR_CONSTRAINTS, BSH_ACTIVE_PROGRAM, BSH_CHILD_LOCK_STATE, BSH_OPERATION_STATE, BSH_POWER_OFF, BSH_POWER_ON, + BSH_POWER_STANDBY, BSH_POWER_STATE, + REFRIGERATION_SUPERMODEFREEZER, ) -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, @@ -24,6 +29,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from .conftest import get_all_appliances @@ -32,7 +38,7 @@ from tests.common import MockConfigEntry, load_json_object_fixture SETTINGS_STATUS = { setting.pop("key"): setting for setting in load_json_object_fixture("home_connect/settings.json") - .get("Washer") + .get("Dishwasher") .get("data") .get("settings") } @@ -62,56 +68,38 @@ async def test_switches( @pytest.mark.parametrize( - ("entity_id", "status", "service", "state"), + ("entity_id", "status", "service", "state", "appliance"), [ ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, SERVICE_TURN_ON, STATE_ON, + "Dishwasher", ), ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": ""}}, SERVICE_TURN_OFF, STATE_OFF, + "Dishwasher", ), ( - "switch.washer_power", - {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, - SERVICE_TURN_ON, - STATE_ON, - ), - ( - "switch.washer_power", - {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, - SERVICE_TURN_OFF, - STATE_OFF, - ), - ( - "switch.washer_power", - { - BSH_POWER_STATE: {"value": ""}, - BSH_OPERATION_STATE: { - "value": "BSH.Common.EnumType.OperationState.Inactive" - }, - }, - SERVICE_TURN_OFF, - STATE_OFF, - ), - ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": True}}, SERVICE_TURN_ON, STATE_ON, + "Dishwasher", ), ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": False}}, SERVICE_TURN_OFF, STATE_OFF, + "Dishwasher", ), ], + indirect=["appliance"], ) async def test_switch_functionality( entity_id: str, @@ -137,57 +125,78 @@ async def test_switch_functionality( appliance.status.update(status) await hass.services.async_call( - DOMAIN, service, {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) assert hass.states.is_state(entity_id, state) @pytest.mark.parametrize( - ("entity_id", "status", "service", "mock_attr"), + ( + "entity_id", + "status", + "service", + "mock_attr", + "problematic_appliance", + "exception_match", + ), [ ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, SERVICE_TURN_ON, "start_program", + "Dishwasher", + r"Error.*start.*program.*", ), ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, SERVICE_TURN_OFF, "stop_program", + "Dishwasher", + r"Error.*stop.*program.*", ), ( - "switch.washer_power", + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, + SERVICE_TURN_OFF, + "set_setting", + "Dishwasher", + r"Error.*turn.*off.*", + ), + ( + "switch.dishwasher_power", {BSH_POWER_STATE: {"value": ""}}, SERVICE_TURN_ON, "set_setting", + "Dishwasher", + r"Error.*turn.*on.*", ), ( - "switch.washer_power", - {BSH_POWER_STATE: {"value": ""}}, - SERVICE_TURN_OFF, - "set_setting", - ), - ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": ""}}, SERVICE_TURN_ON, "set_setting", + "Dishwasher", + r"Error.*turn.*on.*", ), ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": ""}}, SERVICE_TURN_OFF, "set_setting", + "Dishwasher", + r"Error.*turn.*off.*", ), ], + indirect=["problematic_appliance"], ) async def test_switch_exception_handling( entity_id: str, status: dict, service: str, mock_attr: str, + exception_match: str, bypass_throttle: Generator[None], hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -201,6 +210,131 @@ async def test_switch_exception_handling( problematic_appliance.get_programs_available.return_value = [PROGRAM] get_appliances.return_value = [problematic_appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + problematic_appliance.status.update(status) + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + with pytest.raises(HomeAssistantError, match=exception_match): + await hass.services.async_call( + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "state", "appliance"), + [ + ( + "switch.fridgefreezer_freezer_super_mode", + {REFRIGERATION_SUPERMODEFREEZER: {"value": True}}, + SERVICE_TURN_ON, + STATE_ON, + "FridgeFreezer", + ), + ( + "switch.fridgefreezer_freezer_super_mode", + {REFRIGERATION_SUPERMODEFREEZER: {"value": False}}, + SERVICE_TURN_OFF, + STATE_OFF, + "FridgeFreezer", + ), + ], + indirect=["appliance"], +) +async def test_ent_desc_switch_functionality( + entity_id: str, + status: dict, + service: str, + state: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test switch functionality - entity description setup.""" + appliance.status.update( + HomeConnectAppliance.json2dict( + load_json_object_fixture("home_connect/settings.json") + .get(appliance.name) + .get("data") + .get("settings") + ) + ) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update(status) + await hass.services.async_call( + SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.is_state(entity_id, state) + + +@pytest.mark.parametrize( + ( + "entity_id", + "status", + "service", + "mock_attr", + "problematic_appliance", + "exception_match", + ), + [ + ( + "switch.fridgefreezer_freezer_super_mode", + {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, + SERVICE_TURN_ON, + "set_setting", + "FridgeFreezer", + r"Error.*turn.*on.*", + ), + ( + "switch.fridgefreezer_freezer_super_mode", + {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, + SERVICE_TURN_OFF, + "set_setting", + "FridgeFreezer", + r"Error.*turn.*off.*", + ), + ], + indirect=["problematic_appliance"], +) +async def test_ent_desc_switch_exception_handling( + entity_id: str, + status: dict, + service: str, + mock_attr: str, + exception_match: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + problematic_appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test switch exception handling - entity description setup.""" + problematic_appliance.status.update( + HomeConnectAppliance.json2dict( + load_json_object_fixture("home_connect/settings.json") + .get(problematic_appliance.name) + .get("data") + .get("settings") + ) + ) + get_appliances.return_value = [problematic_appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -210,7 +344,165 @@ async def test_switch_exception_handling( getattr(problematic_appliance, mock_attr)() problematic_appliance.status.update(status) - await hass.services.async_call( - DOMAIN, service, {"entity_id": entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError, match=exception_match): + await hass.services.async_call( + SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) assert getattr(problematic_appliance, mock_attr).call_count == 2 + + +@pytest.mark.parametrize( + ("entity_id", "status", "allowed_values", "service", "power_state", "appliance"), + [ + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, + [BSH_POWER_ON, BSH_POWER_OFF], + SERVICE_TURN_ON, + STATE_ON, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, + [BSH_POWER_ON, BSH_POWER_OFF], + SERVICE_TURN_OFF, + STATE_OFF, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + { + BSH_POWER_STATE: {"value": ""}, + BSH_OPERATION_STATE: { + "value": "BSH.Common.EnumType.OperationState.Run" + }, + }, + [BSH_POWER_ON], + SERVICE_TURN_ON, + STATE_ON, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + { + BSH_POWER_STATE: {"value": ""}, + BSH_OPERATION_STATE: { + "value": "BSH.Common.EnumType.OperationState.Inactive" + }, + }, + [BSH_POWER_ON], + SERVICE_TURN_ON, + STATE_OFF, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, + [BSH_POWER_ON, BSH_POWER_STANDBY], + SERVICE_TURN_ON, + STATE_ON, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_STANDBY}}, + [BSH_POWER_ON, BSH_POWER_STANDBY], + SERVICE_TURN_OFF, + STATE_OFF, + "Dishwasher", + ), + ], + indirect=["appliance"], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_power_swtich( + entity_id: str, + status: dict, + allowed_values: list[str], + service: str, + power_state: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test power switch functionality.""" + appliance.get.side_effect = [ + { + ATTR_CONSTRAINTS: { + ATTR_ALLOWED_VALUES: allowed_values, + }, + } + ] + appliance.status.update(SETTINGS_STATUS) + appliance.status.update(status) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.is_state(entity_id, power_state) + + +@pytest.mark.parametrize( + ("entity_id", "allowed_values", "service", "appliance", "exception_match"), + [ + ( + "switch.dishwasher_power", + [BSH_POWER_ON], + SERVICE_TURN_OFF, + "Dishwasher", + r".*not support.*turn.*off.*", + ), + ( + "switch.dishwasher_power", + None, + SERVICE_TURN_OFF, + "Dishwasher", + r".*Unable.*turn.*off.*support.*not.*determined.*", + ), + ], + indirect=["appliance"], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_power_switch_service_validation_errors( + entity_id: str, + allowed_values: list[str], + service: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + exception_match: str, + get_appliances: MagicMock, +) -> None: + """Test power switch functionality validation errors.""" + if allowed_values: + appliance.get.side_effect = [ + { + ATTR_CONSTRAINTS: { + ATTR_ALLOWED_VALUES: allowed_values, + }, + } + ] + appliance.status.update(SETTINGS_STATUS) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update({BSH_POWER_STATE: {"value": BSH_POWER_ON}}) + + with pytest.raises(HomeAssistantError, match=exception_match): + await hass.services.async_call( + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py new file mode 100644 index 00000000000..1401e07b05a --- /dev/null +++ b/tests/components/home_connect/test_time.py @@ -0,0 +1,150 @@ +"""Tests for home_connect time entities.""" + +from collections.abc import Awaitable, Callable, Generator +from datetime import time +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ATTR_VALUE +from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VALUE +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.TIME] + + +async def test_time( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test time entity.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +@pytest.mark.parametrize( + ("entity_id", "setting_key", "setting_value", "expected_state"), + [ + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + {ATTR_VALUE: 59}, + str(time(second=59)), + ), + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + {ATTR_VALUE: None}, + "unknown", + ), + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + None, + "unknown", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_time_entity_functionality( + appliance: Mock, + entity_id: str, + setting_key: str, + setting_value: dict, + expected_state: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test time entity functionality.""" + get_appliances.return_value = [appliance] + appliance.status.update({setting_key: setting_value}) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + assert hass.states.is_state(entity_id, expected_state) + + new_value = 30 + assert hass.states.get(entity_id).state != new_value + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(second=new_value), + }, + blocking=True, + ) + appliance.set_setting.assert_called_once_with(setting_key, new_value) + + +@pytest.mark.parametrize("problematic_appliance", ["Oven"], indirect=True) +@pytest.mark.parametrize( + ("entity_id", "setting_key", "mock_attr"), + [ + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + "set_setting", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_time_entity_error( + problematic_appliance: Mock, + entity_id: str, + setting_key: str, + mock_attr: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test time entity error.""" + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + problematic_appliance.status.update({setting_key: {}}) + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + with pytest.raises( + HomeAssistantError, match=r"Error.*assign.*value.*to.*setting.*" + ): + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index a0902fe62df..56eeb4177b1 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -7,7 +7,6 @@ import voluptuous as vol import yaml from homeassistant import config -import homeassistant.components as comps from homeassistant.components.homeassistant import ( ATTR_ENTRY_ID, ATTR_SAFE_MODE, @@ -46,15 +45,6 @@ from tests.common import ( ) -async def test_is_on(hass: HomeAssistant) -> None: - """Test is_on method.""" - with pytest.raises( - RuntimeError, - match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", - ): - assert comps.is_on(hass, "light.Bowl") - - async def test_turn_on_without_entities(hass: HomeAssistant) -> None: """Test turn_on method without entities.""" await async_setup_component(hass, ha.DOMAIN, {}) @@ -137,7 +127,7 @@ async def test_reload_core_conf(hass: HomeAssistant) -> None: @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) @patch("homeassistant.components.homeassistant._LOGGER.error") -@patch("homeassistant.config.async_process_ha_core_config") +@patch("homeassistant.core_config.async_process_ha_core_config") async def test_reload_core_with_wrong_conf( mock_process, mock_error, hass: HomeAssistant ) -> None: @@ -194,6 +184,7 @@ async def test_turn_on_skips_domains_without_service( # because by mocking out the call service method, we mock out all # So we mimic how the service registry calls services service_call = ha.ServiceCall( + hass, "homeassistant", "turn_on", {"entity_id": ["light.test", "sensor.bla", "binary_sensor.blub", "light.bla"]}, @@ -252,7 +243,7 @@ async def test_setting_location(hass: HomeAssistant) -> None: assert elevation != 50 await hass.services.async_call( "homeassistant", - "set_location", + SERVICE_SET_LOCATION, {"latitude": 30, "longitude": 40}, blocking=True, ) @@ -263,12 +254,24 @@ async def test_setting_location(hass: HomeAssistant) -> None: await hass.services.async_call( "homeassistant", - "set_location", + SERVICE_SET_LOCATION, {"latitude": 30, "longitude": 40, "elevation": 50}, blocking=True, ) + assert hass.config.latitude == 30 + assert hass.config.longitude == 40 assert hass.config.elevation == 50 + await hass.services.async_call( + "homeassistant", + SERVICE_SET_LOCATION, + {"latitude": 30, "longitude": 40, "elevation": 0}, + blocking=True, + ) + assert hass.config.latitude == 30 + assert hass.config.longitude == 40 + assert hass.config.elevation == 0 + async def test_require_admin( hass: HomeAssistant, hass_read_only_user: MockUser diff --git a/tests/components/homeassistant/test_repairs.py b/tests/components/homeassistant/test_repairs.py index c7a1b3e762e..f84b29d8d2d 100644 --- a/tests/components/homeassistant/test_repairs.py +++ b/tests/components/homeassistant/test_repairs.py @@ -1,19 +1,15 @@ """Test the Homeassistant repairs module.""" -from http import HTTPStatus - from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.components.repairs import ( + async_process_repairs_platforms, + process_repair_fix_flow, + start_repair_fix_flow, +) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -27,6 +23,7 @@ async def test_integration_not_found_confirm_step( await hass.async_block_till_done() assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) await hass.async_block_till_done() + MockConfigEntry(domain="test1").add_to_hass(hass) assert await async_setup_component(hass, "test1", {}) is False await hass.async_block_till_done() entry1 = MockConfigEntry(domain="test1") @@ -48,32 +45,20 @@ async def test_integration_not_found_confirm_step( assert issue["issue_id"] == issue_id assert issue["translation_placeholders"] == {"domain": "test1"} - url = RepairsFlowIndexView.url - resp = await http_client.post( - url, json={"handler": HOMEASSISTANT_DOMAIN, "issue_id": issue_id} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, HOMEASSISTANT_DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "init" assert data["description_placeholders"] == {"domain": "test1"} - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - - # Show menu - resp = await http_client.post(url) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(http_client, flow_id) assert data["type"] == "menu" # Apply fix - resp = await http_client.post(url, json={"next_step_id": "confirm"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow( + http_client, flow_id, json={"next_step_id": "confirm"} + ) assert data["type"] == "create_entry" @@ -99,6 +84,7 @@ async def test_integration_not_found_ignore_step( await hass.async_block_till_done() assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) await hass.async_block_till_done() + MockConfigEntry(domain="test1").add_to_hass(hass) assert await async_setup_component(hass, "test1", {}) is False await hass.async_block_till_done() entry1 = MockConfigEntry(domain="test1") @@ -118,32 +104,21 @@ async def test_integration_not_found_ignore_step( assert issue["issue_id"] == issue_id assert issue["translation_placeholders"] == {"domain": "test1"} - url = RepairsFlowIndexView.url - resp = await http_client.post( - url, json={"handler": HOMEASSISTANT_DOMAIN, "issue_id": issue_id} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, HOMEASSISTANT_DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "init" assert data["description_placeholders"] == {"domain": "test1"} - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - # Show menu - resp = await http_client.post(url) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(http_client, flow_id) assert data["type"] == "menu" # Apply fix - resp = await http_client.post(url, json={"next_step_id": "ignore"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow( + http_client, flow_id, json={"next_step_id": "ignore"} + ) assert data["type"] == "abort" assert data["reason"] == "issue_ignored" diff --git a/tests/components/homeassistant/triggers/test_time.py b/tests/components/homeassistant/triggers/test_time.py index 76d80120fdd..8900998a7b8 100644 --- a/tests/components/homeassistant/triggers/test_time.py +++ b/tests/components/homeassistant/triggers/test_time.py @@ -156,17 +156,43 @@ async def test_if_fires_using_at_input_datetime( ) +@pytest.mark.parametrize( + ("conf_at", "trigger_deltas"), + [ + ( + ["5:00:00", "6:00:00", "{{ '7:00:00' }}"], + [timedelta(0), timedelta(hours=1), timedelta(hours=2)], + ), + ( + [ + "5:00:05", + {"entity_id": "sensor.next_alarm", "offset": "00:00:10"}, + "sensor.next_alarm", + ], + [timedelta(seconds=5), timedelta(seconds=10), timedelta(0)], + ), + ], +) async def test_if_fires_using_multiple_at( hass: HomeAssistant, freezer: FrozenDateTimeFactory, service_calls: list[ServiceCall], + conf_at: list[str | dict[str, int | str]], + trigger_deltas: list[timedelta], ) -> None: - """Test for firing at.""" + """Test for firing at multiple trigger times.""" now = dt_util.now() - trigger_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2) - time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1) + start_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2) + + hass.states.async_set( + "sensor.next_alarm", + start_dt.isoformat(), + {ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP}, + ) + + time_that_will_not_match_right_away = start_dt - timedelta(minutes=1) freezer.move_to(dt_util.as_utc(time_that_will_not_match_right_away)) assert await async_setup_component( @@ -174,7 +200,7 @@ async def test_if_fires_using_multiple_at( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "time", "at": ["5:00:00", "6:00:00"]}, + "trigger": {"platform": "time", "at": conf_at}, "action": { "service": "test.automation", "data_template": { @@ -186,17 +212,14 @@ async def test_if_fires_using_multiple_at( ) await hass.async_block_till_done() - async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) - await hass.async_block_till_done() + for count, delta in enumerate(sorted(trigger_deltas)): + async_fire_time_changed(hass, start_dt + delta + timedelta(seconds=1)) + await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "time - 5" - - async_fire_time_changed(hass, trigger_dt + timedelta(hours=1, seconds=1)) - await hass.async_block_till_done() - - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "time - 6" + assert len(service_calls) == count + 1 + assert ( + service_calls[count].data["some"] == f"time - {5 + (delta.seconds // 3600)}" + ) async def test_if_not_fires_using_wrong_at( @@ -415,10 +438,14 @@ async def test_untrack_time_change(hass: HomeAssistant) -> None: assert len(mock_track_time_change.mock_calls) == 3 +@pytest.mark.parametrize( + ("at_sensor"), ["sensor.next_alarm", "{{ 'sensor.next_alarm' }}"] +) async def test_if_fires_using_at_sensor( hass: HomeAssistant, freezer: FrozenDateTimeFactory, service_calls: list[ServiceCall], + at_sensor: str, ) -> None: """Test for firing at sensor time.""" now = dt_util.now() @@ -441,7 +468,7 @@ async def test_if_fires_using_at_sensor( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "time", "at": "sensor.next_alarm"}, + "trigger": {"platform": "time", "at": at_sensor}, "action": { "service": "test.automation", "data_template": {"some": some_data}, @@ -518,12 +545,102 @@ async def test_if_fires_using_at_sensor( assert len(service_calls) == 2 +@pytest.mark.parametrize( + ("offset", "delta"), + [ + ("00:00:10", timedelta(seconds=10)), + ("-00:00:10", timedelta(seconds=-10)), + ({"minutes": 5}, timedelta(minutes=5)), + ], +) +async def test_if_fires_using_at_sensor_with_offset( + hass: HomeAssistant, + service_calls: list[ServiceCall], + freezer: FrozenDateTimeFactory, + offset: str | dict[str, int], + delta: timedelta, +) -> None: + """Test for firing at sensor time.""" + now = dt_util.now() + + start_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2) + trigger_dt = start_dt + delta + + hass.states.async_set( + "sensor.next_alarm", + start_dt.isoformat(), + {ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP}, + ) + + time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1) + + some_data = "{{ trigger.platform }}-{{ trigger.now.day }}-{{ trigger.now.hour }}-{{ trigger.now.minute }}-{{ trigger.now.second }}-{{trigger.entity_id}}" + + freezer.move_to(dt_util.as_utc(time_that_will_not_match_right_away)) + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": { + "platform": "time", + "at": { + "entity_id": "sensor.next_alarm", + "offset": offset, + }, + }, + "action": { + "service": "test.automation", + "data_template": {"some": some_data}, + }, + } + }, + ) + await hass.async_block_till_done() + + async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) + await hass.async_block_till_done() + + assert len(service_calls) == 1 + assert ( + service_calls[0].data["some"] + == f"time-{trigger_dt.day}-{trigger_dt.hour}-{trigger_dt.minute}-{trigger_dt.second}-sensor.next_alarm" + ) + + start_dt += timedelta(days=1, hours=1) + trigger_dt += timedelta(days=1, hours=1) + + hass.states.async_set( + "sensor.next_alarm", + start_dt.isoformat(), + {ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP}, + ) + await hass.async_block_till_done() + + async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) + await hass.async_block_till_done() + + assert len(service_calls) == 2 + assert ( + service_calls[1].data["some"] + == f"time-{trigger_dt.day}-{trigger_dt.hour}-{trigger_dt.minute}-{trigger_dt.second}-sensor.next_alarm" + ) + + @pytest.mark.parametrize( "conf", [ {"platform": "time", "at": "input_datetime.bla"}, {"platform": "time", "at": "sensor.bla"}, {"platform": "time", "at": "12:34"}, + {"platform": "time", "at": "{{ '12:34' }}"}, + {"platform": "time", "at": "{{ 'input_datetime.bla' }}"}, + {"platform": "time", "at": "{{ 'sensor.bla' }}"}, + {"platform": "time", "at": {"entity_id": "sensor.bla", "offset": "-00:01"}}, + { + "platform": "time", + "at": [{"entity_id": "sensor.bla", "offset": "-01:00:00"}], + }, ], ) def test_schema_valid(conf) -> None: @@ -537,6 +654,11 @@ def test_schema_valid(conf) -> None: {"platform": "time", "at": "binary_sensor.bla"}, {"platform": "time", "at": 745}, {"platform": "time", "at": "25:00"}, + { + "platform": "time", + "at": {"entity_id": "input_datetime.bla", "offset": "0:10"}, + }, + {"platform": "time", "at": {"entity_id": "13:00:00", "offset": "0:10"}}, ], ) def test_schema_invalid(conf) -> None: @@ -612,3 +734,70 @@ async def test_datetime_in_past_on_load( service_calls[2].data["some"] == f"time-{future.day}-{future.hour}-input_datetime.my_trigger" ) + + +@pytest.mark.parametrize( + "trigger", + [ + {"platform": "time", "at": "{{ 'hello world' }}"}, + {"platform": "time", "at": "{{ 74 }}"}, + {"platform": "time", "at": "{{ true }}"}, + {"platform": "time", "at": "{{ 7.5465 }}"}, + ], +) +async def test_if_at_template_renders_bad_value( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + trigger: dict[str, str], +) -> None: + """Test for invalid templates.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": trigger, + "action": { + "service": "test.automation", + }, + } + }, + ) + + await hass.async_block_till_done() + + assert ( + "expected HH:MM, HH:MM:SS or Entity ID with domain 'input_datetime' or 'sensor'" + in caplog.text + ) + + +@pytest.mark.parametrize( + "trigger", + [ + {"platform": "time", "at": "{{ now().strftime('%H:%M') }}"}, + {"platform": "time", "at": "{{ states('sensor.blah') | int(0) }}"}, + ], +) +async def test_if_at_template_limited_template( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + trigger: dict[str, str], +) -> None: + """Test for invalid templates.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": trigger, + "action": { + "service": "test.automation", + }, + } + }, + ) + + await hass.async_block_till_done() + + assert "is not supported in limited templates" in caplog.text diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index c63dca74391..ddf18305b2a 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -47,12 +47,3 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture(): - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py index a1842f4c4e6..8b0995a67f3 100644 --- a/tests/components/homeassistant_hardware/test_config_flow.py +++ b/tests/components/homeassistant_hardware/test_config_flow.py @@ -9,7 +9,7 @@ from unittest.mock import AsyncMock, Mock, call, patch import pytest from universal_silabs_flasher.const import ApplicationType -from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.hassio import AddonInfo, AddonState from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( STEP_PICK_FIRMWARE_THREAD, STEP_PICK_FIRMWARE_ZIGBEE, @@ -120,6 +120,11 @@ def mock_test_firmware_platform( yield +@pytest.fixture(autouse=True) +async def fixture_mock_supervisor_client(supervisor_client: AsyncMock): + """Mock supervisor client in tests.""" + + def delayed_side_effect() -> Callable[..., Awaitable[None]]: """Slows down eager tasks by delaying for an event loop tick.""" diff --git a/tests/components/homeassistant_hardware/test_config_flow_failures.py b/tests/components/homeassistant_hardware/test_config_flow_failures.py index 4c3ea7d28fa..5a6f765c44c 100644 --- a/tests/components/homeassistant_hardware/test_config_flow_failures.py +++ b/tests/components/homeassistant_hardware/test_config_flow_failures.py @@ -5,11 +5,7 @@ from unittest.mock import AsyncMock import pytest from universal_silabs_flasher.const import ApplicationType -from homeassistant.components.hassio.addon_manager import ( - AddonError, - AddonInfo, - AddonState, -) +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( STEP_PICK_FIRMWARE_THREAD, STEP_PICK_FIRMWARE_ZIGBEE, @@ -29,6 +25,15 @@ from .test_config_flow import ( from tests.common import MockConfigEntry +@pytest.fixture(autouse=True) +async def fixture_mock_supervisor_client(supervisor_client: AsyncMock): + """Mock supervisor client in tests.""" + + +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.unsupported_firmware"], +) @pytest.mark.parametrize( "next_step", [ @@ -59,6 +64,10 @@ async def test_config_flow_cannot_probe_firmware( assert result["reason"] == "unsupported_firmware" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.not_hassio"], +) async def test_config_flow_zigbee_not_hassio_wrong_firmware( hass: HomeAssistant, ) -> None: @@ -84,6 +93,10 @@ async def test_config_flow_zigbee_not_hassio_wrong_firmware( assert result["reason"] == "not_hassio" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_already_running"], +) async def test_config_flow_zigbee_flasher_addon_already_running( hass: HomeAssistant, ) -> None: @@ -118,6 +131,10 @@ async def test_config_flow_zigbee_flasher_addon_already_running( assert result["reason"] == "addon_already_running" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_info_failed"], +) async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( @@ -151,6 +168,10 @@ async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) assert result["reason"] == "addon_info_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_install_failed"], +) async def test_config_flow_zigbee_flasher_addon_install_fails( hass: HomeAssistant, ) -> None: @@ -181,6 +202,10 @@ async def test_config_flow_zigbee_flasher_addon_install_fails( assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_set_config_failed"], +) async def test_config_flow_zigbee_flasher_addon_set_config_fails( hass: HomeAssistant, ) -> None: @@ -215,6 +240,10 @@ async def test_config_flow_zigbee_flasher_addon_set_config_fails( assert result["reason"] == "addon_set_config_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_start_failed"], +) async def test_config_flow_zigbee_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( @@ -276,6 +305,10 @@ async def test_config_flow_zigbee_flasher_uninstall_fails(hass: HomeAssistant) - assert result["step_id"] == "confirm_zigbee" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.not_hassio_thread"], +) async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: """Test when the stick is used with a non-hassio setup and Thread is selected.""" result = await hass.config_entries.flow.async_init( @@ -299,6 +332,10 @@ async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: assert result["reason"] == "not_hassio_thread" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_info_failed"], +) async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( @@ -323,6 +360,10 @@ async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: assert result["reason"] == "addon_info_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.otbr_addon_already_running"], +) async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> None: """Test failure case when the Thread addon is already running.""" result = await hass.config_entries.flow.async_init( @@ -358,6 +399,10 @@ async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> assert result["reason"] == "otbr_addon_already_running" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_install_failed"], +) async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( @@ -385,6 +430,10 @@ async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> No assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_set_config_failed"], +) async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( @@ -412,6 +461,10 @@ async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> assert result["reason"] == "addon_set_config_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_start_failed"], +) async def test_config_flow_thread_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( @@ -473,6 +526,10 @@ async def test_config_flow_thread_flasher_uninstall_fails(hass: HomeAssistant) - assert result["step_id"] == "confirm_otbr" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.options.abort.zha_still_using_stick"], +) async def test_options_flow_zigbee_to_thread_zha_configured( hass: HomeAssistant, ) -> None: @@ -510,6 +567,10 @@ async def test_options_flow_zigbee_to_thread_zha_configured( assert result["reason"] == "zha_still_using_stick" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.options.abort.otbr_still_using_stick"], +) async def test_options_flow_thread_to_zigbee_otbr_configured( hass: HomeAssistant, ) -> None: diff --git a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py index 5718133cd24..22e3e338986 100644 --- a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py +++ b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py @@ -6,10 +6,11 @@ from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions import pytest from homeassistant.components.hassio import AddonError, AddonInfo, AddonState, HassIO -from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigFlow @@ -32,6 +33,11 @@ TEST_DOMAIN = "test" TEST_DOMAIN_2 = "test_2" +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + class FakeConfigFlow(ConfigFlow): """Handle a config flow for the silabs multiprotocol add-on.""" @@ -241,26 +247,25 @@ async def test_option_flow_install_multi_pan_addon( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + start_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -316,7 +321,7 @@ async def test_option_flow_install_multi_pan_addon_zha( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -330,16 +335,15 @@ async def test_option_flow_install_multi_pan_addon_zha( assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) # Check the channel is initialized from ZHA assert multipan_manager._channel == 11 @@ -355,7 +359,7 @@ async def test_option_flow_install_multi_pan_addon_zha( assert zha_config_entry.title == "Test Multiprotocol" await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + start_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -411,27 +415,26 @@ async def test_option_flow_install_multi_pan_addon_zha_other_radio( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value.hostname = "core-silabs-multiprotocol" result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + start_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -447,6 +450,10 @@ async def test_option_flow_install_multi_pan_addon_zha_other_radio( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.not_hassio"], +) async def test_option_flow_non_hassio( hass: HomeAssistant, ) -> None: @@ -508,7 +515,7 @@ async def test_option_flow_addon_installed_same_device_reconfigure_unexpected_us ) -> None: """Test reconfiguring the multi pan addon.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -567,7 +574,7 @@ async def test_option_flow_addon_installed_same_device_reconfigure_expected_user ) -> None: """Test reconfiguring the multi pan addon.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -638,7 +645,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( ) -> None: """Test uninstalling the multi pan addon.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -672,11 +679,8 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["step_id"] == "uninstall_addon" # Make sure the flasher addon is installed - addon_store_info.return_value = { - "installed": None, - "available": True, - "state": "not_installed", - } + addon_store_info.return_value.installed = False + addon_store_info.return_Value.available = True result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -694,7 +698,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["progress_action"] == "uninstall_multiprotocol_addon" await hass.async_block_till_done() - uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -703,7 +707,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["description_placeholders"] == {"addon_name": "Silicon Labs Flasher"} await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_flasher") + install_addon.assert_called_once_with("core_silabs_flasher") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -733,7 +737,7 @@ async def test_option_flow_addon_installed_same_device_do_not_uninstall_multi_pa ) -> None: """Test uninstalling the multi pan addon.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -762,6 +766,10 @@ async def test_option_flow_addon_installed_same_device_do_not_uninstall_multi_pa assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_already_running"], +) async def test_option_flow_flasher_already_running_failure( hass: HomeAssistant, addon_info, @@ -776,7 +784,7 @@ async def test_option_flow_flasher_already_running_failure( ) -> None: """Test uninstalling the multi pan addon but with the flasher addon running.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -799,8 +807,8 @@ async def test_option_flow_flasher_already_running_failure( assert result["step_id"] == "uninstall_addon" # The flasher addon is already installed and running, this is bad - addon_store_info.return_value["installed"] = True - addon_info.return_value["state"] = "started" + addon_store_info.return_value.installed = True + addon_info.return_value.state = "started" result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -823,7 +831,7 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed ) -> None: """Test uninstalling the multi pan addon.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -845,11 +853,8 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["type"] is FlowResultType.FORM assert result["step_id"] == "uninstall_addon" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -859,7 +864,7 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["progress_action"] == "uninstall_multiprotocol_addon" await hass.async_block_till_done() - uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -867,11 +872,8 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["progress_action"] == "start_flasher_addon" assert result["description_placeholders"] == {"addon_name": "Silicon Labs Flasher"} - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True await hass.async_block_till_done() install_addon.assert_not_called() @@ -879,6 +881,10 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_install_failed"], +) async def test_option_flow_flasher_install_failure( hass: HomeAssistant, addon_info, @@ -893,7 +899,7 @@ async def test_option_flow_flasher_install_failure( ) -> None: """Test uninstalling the multi pan addon, case where flasher addon fails.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -926,11 +932,8 @@ async def test_option_flow_flasher_install_failure( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "uninstall_addon" - addon_store_info.return_value = { - "installed": None, - "available": True, - "state": "not_installed", - } + addon_store_info.return_value.installed = False + addon_store_info.return_value.available = True install_addon.side_effect = [AddonError()] result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -941,13 +944,17 @@ async def test_option_flow_flasher_install_failure( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_flasher") + install_addon.assert_called_once_with("core_silabs_flasher") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_start_failed"], +) async def test_option_flow_flasher_addon_flash_failure( hass: HomeAssistant, addon_info, @@ -962,7 +969,7 @@ async def test_option_flow_flasher_addon_flash_failure( ) -> None: """Test where flasher addon fails to flash Zigbee firmware.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -991,10 +998,10 @@ async def test_option_flow_flasher_addon_flash_failure( assert result["step_id"] == "uninstall_multiprotocol_addon" assert result["progress_action"] == "uninstall_multiprotocol_addon" - start_addon.side_effect = HassioAPIError("Boom") + start_addon.side_effect = SupervisorError("Boom") await hass.async_block_till_done() - uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1010,6 +1017,10 @@ async def test_option_flow_flasher_addon_flash_failure( assert result["description_placeholders"]["addon_name"] == "Silicon Labs Flasher" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_initiate_migration", side_effect=Exception("Boom!"), @@ -1029,7 +1040,7 @@ async def test_option_flow_uninstall_migration_initiate_failure( ) -> None: """Test uninstalling the multi pan addon, case where ZHA migration init fails.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -1071,6 +1082,10 @@ async def test_option_flow_uninstall_migration_initiate_failure( mock_initiate_migration.assert_called_once() +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_finish_migration", side_effect=Exception("Boom!"), @@ -1090,7 +1105,7 @@ async def test_option_flow_uninstall_migration_finish_failure( ) -> None: """Test uninstalling the multi pan addon, case where ZHA migration init fails.""" - addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" + addon_info.return_value.options["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -1128,7 +1143,7 @@ async def test_option_flow_uninstall_migration_finish_failure( ) await hass.async_block_till_done() - uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1172,6 +1187,10 @@ async def test_option_flow_do_not_install_multi_pan_addon( assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_install_failed"], +) async def test_option_flow_install_multi_pan_addon_install_fails( hass: HomeAssistant, addon_store_info, @@ -1182,7 +1201,7 @@ async def test_option_flow_install_multi_pan_addon_install_fails( ) -> None: """Test installing the multi pan addon.""" - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1208,13 +1227,17 @@ async def test_option_flow_install_multi_pan_addon_install_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_start_failed"], +) async def test_option_flow_install_multi_pan_addon_start_fails( hass: HomeAssistant, addon_store_info, @@ -1225,7 +1248,7 @@ async def test_option_flow_install_multi_pan_addon_start_fails( ) -> None: """Test installing the multi pan addon.""" - start_addon.side_effect = HassioAPIError("Boom") + start_addon.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1251,32 +1274,35 @@ async def test_option_flow_install_multi_pan_addon_start_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + start_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_set_config_failed"], +) async def test_option_flow_install_multi_pan_addon_set_options_fails( hass: HomeAssistant, addon_store_info, @@ -1287,7 +1313,7 @@ async def test_option_flow_install_multi_pan_addon_set_options_fails( ) -> None: """Test installing the multi pan addon.""" - set_addon_options.side_effect = HassioAPIError("Boom") + set_addon_options.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1313,13 +1339,17 @@ async def test_option_flow_install_multi_pan_addon_set_options_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_set_config_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_info_failed"], +) async def test_option_flow_addon_info_fails( hass: HomeAssistant, addon_store_info, @@ -1327,7 +1357,7 @@ async def test_option_flow_addon_info_fails( ) -> None: """Test installing the multi pan addon.""" - addon_store_info.side_effect = HassioAPIError("Boom") + addon_store_info.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1343,6 +1373,10 @@ async def test_option_flow_addon_info_fails( assert result["reason"] == "addon_info_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_initiate_migration", side_effect=Exception("Boom!"), @@ -1390,7 +1424,7 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_1( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT @@ -1398,6 +1432,10 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_1( set_addon_options.assert_not_called() +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_finish_migration", side_effect=Exception("Boom!"), @@ -1446,26 +1484,25 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_2( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + start_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT @@ -1626,7 +1663,7 @@ async def test_check_multi_pan_addon_info_error( ) -> None: """Test `check_multi_pan_addon` where the addon info cannot be read.""" - addon_store_info.side_effect = HassioAPIError("Boom") + addon_store_info.side_effect = SupervisorError("Boom") with pytest.raises(HomeAssistantError): await silabs_multiprotocol_addon.check_multi_pan_addon(hass) @@ -1662,18 +1699,15 @@ async def test_check_multi_pan_addon_auto_start( ) -> None: """Test `check_multi_pan_addon` auto starting the addon.""" - addon_info.return_value["state"] = "not_running" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_info.return_value.state = "not_running" + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True # An error is raised even if we auto-start with pytest.raises(HomeAssistantError): await silabs_multiprotocol_addon.check_multi_pan_addon(hass) - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + start_addon.assert_called_once_with("core_silabs_multiprotocol") async def test_check_multi_pan_addon( @@ -1681,12 +1715,9 @@ async def test_check_multi_pan_addon( ) -> None: """Test `check_multi_pan_addon`.""" - addon_info.return_value["state"] = "started" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "running", - } + addon_info.return_value.state = "started" + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True await silabs_multiprotocol_addon.check_multi_pan_addon(hass) start_addon.assert_not_called() @@ -1712,12 +1743,9 @@ async def test_multi_pan_addon_using_device_not_running( ) -> None: """Test `multi_pan_addon_using_device` when the addon isn't running.""" - addon_info.return_value["state"] = "not_running" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_info.return_value.state = "not_running" + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True assert ( await silabs_multiprotocol_addon.multi_pan_addon_using_device( @@ -1740,18 +1768,15 @@ async def test_multi_pan_addon_using_device( ) -> None: """Test `multi_pan_addon_using_device` when the addon isn't running.""" - addon_info.return_value["state"] = "started" - addon_info.return_value["options"] = { + addon_info.return_value.state = "started" + addon_info.return_value.options = { "autoflash_firmware": True, "device": options_device, "baudrate": "115200", "flow_control": True, } - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True assert ( await silabs_multiprotocol_addon.multi_pan_addon_using_device( diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index d71bf4305b3..c5bfa4bd609 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -47,12 +47,3 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture(): - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index 0d4c517b07f..055b6347267 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import Mock, patch import pytest from homeassistant.components import usb -from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.hassio import AddonInfo, AddonState from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( STEP_PICK_FIRMWARE_ZIGBEE, ) @@ -159,6 +159,7 @@ async def test_options_flow( } +@pytest.mark.usefixtures("supervisor_client") @pytest.mark.parametrize( ("usb_data", "model"), [ diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 95d7df89c9d..1067be7b56e 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -1,13 +1,25 @@ """Test the Home Assistant Yellow config flow.""" from collections.abc import Generator -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest -from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.homeassistant_yellow.const import DOMAIN -from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN +from homeassistant.components.hassio import ( + DOMAIN as HASSIO_DOMAIN, + AddonInfo, + AddonState, +) +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_ZIGBEE, +) +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + CONF_DISABLE_MULTI_PAN, + get_flasher_addon_manager, + get_multiprotocol_addon_manager, +) +from homeassistant.components.homeassistant_hardware.util import ApplicationType +from homeassistant.components.homeassistant_yellow.const import DOMAIN, RADIO_DEVICE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -24,6 +36,16 @@ def config_flow_handler(hass: HomeAssistant) -> Generator[None]: yield +@pytest.fixture(autouse=True) +def mock_get_supervisor_client(supervisor_client: AsyncMock) -> Generator[None]: + """Mock get_supervisor_client method.""" + with patch( + "homeassistant.components.homeassistant_yellow.config_flow.get_supervisor_client", + return_value=supervisor_client, + ): + yield + + @pytest.fixture(name="get_yellow_settings") def mock_get_yellow_settings(): """Mock getting yellow settings.""" @@ -44,12 +66,9 @@ def mock_set_yellow_settings(): @pytest.fixture(name="reboot_host") -def mock_reboot_host(): +def mock_reboot_host(supervisor_client: AsyncMock) -> AsyncMock: """Mock rebooting host.""" - with patch( - "homeassistant.components.homeassistant_yellow.config_flow.async_reboot_host", - ) as reboot_host: - yield reboot_host + return supervisor_client.host.reboot async def test_config_flow(hass: HomeAssistant) -> None: @@ -57,22 +76,28 @@ async def test_config_flow(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - with patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", - return_value=True, - ) as mock_setup_entry: + with ( + patch( + "homeassistant.components.homeassistant_yellow.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ), + ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Home Assistant Yellow" - assert result["data"] == {} + assert result["data"] == {"firmware": "ezsp"} assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == {} + assert config_entry.data == {"firmware": "ezsp"} assert config_entry.options == {} assert config_entry.title == "Home Assistant Yellow" @@ -84,10 +109,12 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -104,176 +131,17 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: mock_setup_entry.assert_not_called() -async def test_option_flow_install_multi_pan_addon( - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - {"next_step_id": "multipan_settings"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": "/dev/ttyAMA1", - "baudrate": "115200", - "flow_control": True, - } - }, - ) - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_option_flow_install_multi_pan_addon_zha( - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon when a zha config entry exists.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - - zha_config_entry = MockConfigEntry( - data={"device": {"path": "/dev/ttyAMA1"}, "radio_type": "ezsp"}, - domain=ZHA_DOMAIN, - options={}, - title="Yellow", - ) - zha_config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - {"next_step_id": "multipan_settings"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": "/dev/ttyAMA1", - "baudrate": "115200", - "flow_control": True, - } - }, - ) - # Check the ZHA config entry data is updated - assert zha_config_entry.data == { - "device": { - "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - } - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - @pytest.mark.parametrize( ("reboot_menu_choice", "reboot_calls"), [("reboot_now", 1), ("reboot_later", 0)], ) async def test_option_flow_led_settings( hass: HomeAssistant, - get_yellow_settings, - set_yellow_settings, - reboot_host, - reboot_menu_choice, - reboot_calls, + get_yellow_settings: AsyncMock, + set_yellow_settings: AsyncMock, + reboot_host: AsyncMock, + reboot_menu_choice: str, + reboot_calls: int, ) -> None: """Test updating LED settings.""" mock_integration(hass, MockModule("hassio")) @@ -281,10 +149,12 @@ async def test_option_flow_led_settings( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -313,7 +183,7 @@ async def test_option_flow_led_settings( {"next_step_id": reboot_menu_choice}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert len(reboot_host.mock_calls) == reboot_calls + assert reboot_host.call_count == reboot_calls async def test_option_flow_led_settings_unchanged( @@ -327,10 +197,12 @@ async def test_option_flow_led_settings_unchanged( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -359,10 +231,12 @@ async def test_option_flow_led_settings_fail_1(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -391,10 +265,12 @@ async def test_option_flow_led_settings_fail_2( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -418,3 +294,140 @@ async def test_option_flow_led_settings_fail_2( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "write_hw_settings_error" + + +async def test_firmware_options_flow(hass: HomeAssistant) -> None: + """Test the firmware options flow for Yellow.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + config_entry = MockConfigEntry( + data={"firmware": ApplicationType.SPINEL}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "main_menu" + assert "firmware_settings" in result["menu_options"] + + # Pick firmware settings + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "firmware_settings"}, + ) + + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == "Home Assistant Yellow" + + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"] is True + + assert config_entry.data == { + "firmware": "ezsp", + } + + +@pytest.mark.usefixtures("supervisor_client") +async def test_options_flow_multipan_uninstall(hass: HomeAssistant) -> None: + """Test options flow for when multi-PAN firmware is installed.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + config_entry = MockConfigEntry( + data={"firmware": ApplicationType.CPC}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + # Multi-PAN addon is running + mock_multipan_manager = Mock(spec_set=await get_multiprotocol_addon_manager(hass)) + mock_multipan_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": RADIO_DEVICE}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + mock_flasher_manager = Mock(spec_set=get_flasher_addon_manager(hass)) + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_multiprotocol_addon_manager", + return_value=mock_multipan_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "main_menu" + assert "multipan_settings" in result["menu_options"] + + # Pick multi-PAN settings + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "multipan_settings"}, + ) + + # Pick the uninstall option + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "uninstall_addon"}, + ) + + # Check the box + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={CONF_DISABLE_MULTI_PAN: True} + ) + + # Finish the flow + result = await hass.config_entries.options.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.options.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # We've reverted the firmware back to Zigbee + assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_yellow/test_hardware.py b/tests/components/homeassistant_yellow/test_hardware.py index 9d43b341abf..4fd2eddb704 100644 --- a/tests/components/homeassistant_yellow/test_hardware.py +++ b/tests/components/homeassistant_yellow/test_hardware.py @@ -13,6 +13,7 @@ from tests.common import MockConfigEntry, MockModule, mock_integration from tests.typing import WebSocketGenerator +@pytest.mark.usefixtures("supervisor_client") async def test_hardware_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, addon_store_info ) -> None: @@ -65,6 +66,7 @@ async def test_hardware_info( @pytest.mark.parametrize("os_info", [None, {"board": None}, {"board": "other"}]) +@pytest.mark.usefixtures("supervisor_client") async def test_hardware_info_fail( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, os_info, addon_store_info ) -> None: diff --git a/tests/components/homeassistant_yellow/test_init.py b/tests/components/homeassistant_yellow/test_init.py index ec3ba4e7005..5d534dad1e7 100644 --- a/tests/components/homeassistant_yellow/test_init.py +++ b/tests/components/homeassistant_yellow/test_init.py @@ -6,10 +6,14 @@ import pytest from homeassistant.components import zha from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.hassio.handler import HassioAPIError +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareGuess, +) from homeassistant.components.homeassistant_yellow.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockModule, mock_integration @@ -27,10 +31,12 @@ async def test_setup_entry( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -42,6 +48,14 @@ async def test_setup_entry( "homeassistant.components.onboarding.async_is_onboarded", return_value=onboarded, ), + patch( + "homeassistant.components.homeassistant_yellow.guess_firmware_type", + return_value=FirmwareGuess( # Nothing is setup + is_running=False, + firmware_type=ApplicationType.EZSP, + source="unknown", + ), + ), ): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -74,118 +88,12 @@ async def test_setup_zha(hass: HomeAssistant, addon_store_info) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ) as mock_get_os_info, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get_os_info.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": "hardware", - "path": "/dev/ttyAMA1", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "Yellow" - - -async def test_setup_zha_multipan( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - addon_info.return_value["options"]["device"] = "/dev/ttyAMA1" - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ) as mock_get_os_info, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get_os_info.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": "socket://core-silabs-multiprotocol:9999", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "Yellow Multiprotocol" - - -async def test_setup_zha_multipan_other_device( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - addon_info.return_value["options"]["device"] = "/dev/not_yellow_radio" - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -229,10 +137,12 @@ async def test_setup_entry_no_hassio(hass: HomeAssistant) -> None: """Test setup of a config entry without hassio.""" # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -254,10 +164,12 @@ async def test_setup_entry_wrong_board(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -280,10 +192,12 @@ async def test_setup_entry_wait_hassio(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with patch( @@ -303,14 +217,15 @@ async def test_setup_entry_addon_info_fails( """Test setup of a config entry when fetching addon info fails.""" mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - addon_store_info.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.CPC}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -319,41 +234,15 @@ async def test_setup_entry_addon_info_fails( return_value={"board": "yellow"}, ), patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False + "homeassistant.components.onboarding.async_is_onboarded", + return_value=False, + ), + patch( + "homeassistant.components.homeassistant_yellow.check_multi_pan_addon", + side_effect=HomeAssistantError("Boom"), ), ): assert not await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_entry_addon_not_running( - hass: HomeAssistant, addon_installed, start_addon -) -> None: - """Test the addon is started if it is not running.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - start_addon.assert_called_once() diff --git a/tests/components/homekit/test_accessories.py b/tests/components/homekit/test_accessories.py index c37cac84b8a..00cf42bb916 100644 --- a/tests/components/homekit/test_accessories.py +++ b/tests/components/homekit/test_accessories.py @@ -121,7 +121,7 @@ async def test_home_accessory(hass: HomeAssistant, hk_driver) -> None: serv = acc3.services[0] # SERV_ACCESSORY_INFO assert ( serv.get_characteristic(CHAR_NAME).value - == "Home Accessory that exceeds the maximum maximum maximum maximum " + == "Home Accessory that exceeds the maximum maximum maximum maximum" ) assert ( serv.get_characteristic(CHAR_MANUFACTURER).value @@ -154,7 +154,7 @@ async def test_home_accessory(hass: HomeAssistant, hk_driver) -> None: serv = acc4.services[0] # SERV_ACCESSORY_INFO assert ( serv.get_characteristic(CHAR_NAME).value - == "Home Accessory that exceeds the maximum maximum maximum maximum " + == "Home Accessory that exceeds the maximum maximum maximum maximum" ) assert ( serv.get_characteristic(CHAR_MANUFACTURER).value diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index ba8c1919e73..4000c61e422 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -2030,7 +2030,6 @@ async def test_homekit_finds_linked_motion_sensors( @pytest.mark.parametrize( ("domain", "device_class"), [ - ("binary_sensor", BinarySensorDeviceClass.OCCUPANCY), ("event", EventDeviceClass.DOORBELL), ], ) diff --git a/tests/components/homekit/test_type_covers.py b/tests/components/homekit/test_type_covers.py index b3125c6581c..049f6818784 100644 --- a/tests/components/homekit/test_type_covers.py +++ b/tests/components/homekit/test_type_covers.py @@ -5,8 +5,9 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, CoverEntityFeature, + CoverState, ) from homeassistant.components.homekit.const import ( ATTR_OBSTRUCTION_DETECTED, @@ -31,12 +32,8 @@ from homeassistant.const import ( ATTR_SUPPORTED_FEATURES, EVENT_HOMEASSISTANT_START, SERVICE_SET_COVER_TILT_POSITION, - STATE_CLOSED, - STATE_CLOSING, STATE_OFF, STATE_ON, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -64,13 +61,15 @@ async def test_garage_door_open_close( assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN - hass.states.async_set(entity_id, STATE_CLOSED, {ATTR_OBSTRUCTION_DETECTED: False}) + hass.states.async_set( + entity_id, CoverState.CLOSED, {ATTR_OBSTRUCTION_DETECTED: False} + ) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED assert acc.char_target_state.value == HK_DOOR_CLOSED assert acc.char_obstruction_detected.value is False - hass.states.async_set(entity_id, STATE_OPEN, {ATTR_OBSTRUCTION_DETECTED: True}) + hass.states.async_set(entity_id, CoverState.OPEN, {ATTR_OBSTRUCTION_DETECTED: True}) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN @@ -92,8 +91,8 @@ async def test_garage_door_open_close( assert acc.available is True # Set from HomeKit - call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") + call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -104,7 +103,7 @@ async def test_garage_door_open_close( assert len(events) == 1 assert events[-1].data[ATTR_VALUE] is None - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) await hass.async_block_till_done() acc.char_target_state.client_update_value(1) @@ -123,7 +122,7 @@ async def test_garage_door_open_close( assert len(events) == 3 assert events[-1].data[ATTR_VALUE] is None - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, CoverState.OPEN) await hass.async_block_till_done() acc.char_target_state.client_update_value(0) @@ -140,7 +139,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 0, @@ -159,7 +158,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -172,7 +171,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: "GARBAGE", @@ -221,7 +220,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_OPENING, + CoverState.OPENING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 60, @@ -234,7 +233,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_OPENING, + CoverState.OPENING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 70.0, @@ -247,7 +246,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_CLOSING, + CoverState.CLOSING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -260,7 +259,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -272,7 +271,9 @@ async def test_windowcovering_set_cover_position( assert acc.char_position_state.value == 2 # Set from HomeKit - call_set_cover_position = async_mock_service(hass, DOMAIN, "set_cover_position") + call_set_cover_position = async_mock_service( + hass, COVER_DOMAIN, "set_cover_position" + ) acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -301,7 +302,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 0, @@ -320,7 +321,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -333,7 +334,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: "GARBAGE", @@ -367,29 +368,37 @@ async def test_windowcovering_cover_set_tilt( assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: None}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: None} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 100}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 100} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == 90 assert acc.char_target_tilt.value == 90 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 50}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 50} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 0}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 0} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == -90 assert acc.char_target_tilt.value == -90 # set from HomeKit call_set_tilt_position = async_mock_service( - hass, DOMAIN, SERVICE_SET_COVER_TILT_POSITION + hass, COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION ) # HomeKit sets tilts between -90 and 90 (degrees), whereas @@ -463,33 +472,33 @@ async def test_windowcovering_open_close( assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 2 - hass.states.async_set(entity_id, STATE_OPENING) + hass.states.async_set(entity_id, CoverState.OPENING) await hass.async_block_till_done() assert acc.char_current_position.value == 0 assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 1 - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, CoverState.OPEN) await hass.async_block_till_done() assert acc.char_current_position.value == 100 assert acc.char_target_position.value == 100 assert acc.char_position_state.value == 2 - hass.states.async_set(entity_id, STATE_CLOSING) + hass.states.async_set(entity_id, CoverState.CLOSING) await hass.async_block_till_done() assert acc.char_current_position.value == 100 assert acc.char_target_position.value == 100 assert acc.char_position_state.value == 0 - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) await hass.async_block_till_done() assert acc.char_current_position.value == 0 assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 2 # Set from HomeKit - call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") + call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -536,9 +545,9 @@ async def test_windowcovering_open_close_stop( await hass.async_block_till_done() # Set from HomeKit - call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") - call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover") + call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") + call_stop_cover = async_mock_service(hass, COVER_DOMAIN, "stop_cover") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -590,7 +599,7 @@ async def test_windowcovering_open_close_with_position_and_stop( await hass.async_block_till_done() # Set from HomeKit - call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover") + call_stop_cover = async_mock_service(hass, COVER_DOMAIN, "stop_cover") acc.char_hold_position.client_update_value(0) await hass.async_block_till_done() @@ -708,20 +717,20 @@ async def test_garage_door_with_linked_obstruction_sensor( assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED assert acc.char_target_state.value == HK_DOOR_CLOSED assert acc.char_obstruction_detected.value is False - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, CoverState.OPEN) hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_ON) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN assert acc.char_obstruction_detected.value is True - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_OFF) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED diff --git a/tests/components/homekit/test_type_fans.py b/tests/components/homekit/test_type_fans.py index 1808767c614..67392f11f14 100644 --- a/tests/components/homekit/test_type_fans.py +++ b/tests/components/homekit/test_type_fans.py @@ -11,7 +11,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODES, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, FanEntityFeature, ) from homeassistant.components.homekit.const import ATTR_VALUE, PROP_MIN_STEP @@ -63,8 +63,8 @@ async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_active.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, FAN_DOMAIN, "turn_off") char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -144,7 +144,7 @@ async def test_fan_direction( assert acc.char_direction.value == 1 # Set from HomeKit - call_set_direction = async_mock_service(hass, DOMAIN, "set_direction") + call_set_direction = async_mock_service(hass, FAN_DOMAIN, "set_direction") char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID] @@ -218,7 +218,7 @@ async def test_fan_oscillate( assert acc.char_swing.value == 1 # Set from HomeKit - call_oscillate = async_mock_service(hass, DOMAIN, "oscillate") + call_oscillate = async_mock_service(hass, FAN_DOMAIN, "oscillate") char_swing_iid = acc.char_swing.to_HAP()[HAP_REPR_IID] @@ -301,7 +301,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_speed.value == 100 # Set from HomeKit - call_set_percentage = async_mock_service(hass, DOMAIN, "set_percentage") + call_set_percentage = async_mock_service(hass, FAN_DOMAIN, "set_percentage") char_speed_iid = acc.char_speed.to_HAP()[HAP_REPR_IID] char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -343,7 +343,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_speed.value == 50 assert acc.char_active.value == 0 - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -409,11 +409,11 @@ async def test_fan_set_all_one_shot( assert hass.states.get(entity_id).state == STATE_OFF # Set from HomeKit - call_set_percentage = async_mock_service(hass, DOMAIN, "set_percentage") - call_oscillate = async_mock_service(hass, DOMAIN, "oscillate") - call_set_direction = async_mock_service(hass, DOMAIN, "set_direction") - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_set_percentage = async_mock_service(hass, FAN_DOMAIN, "set_percentage") + call_oscillate = async_mock_service(hass, FAN_DOMAIN, "oscillate") + call_set_direction = async_mock_service(hass, FAN_DOMAIN, "set_direction") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, FAN_DOMAIN, "turn_off") char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID] @@ -641,8 +641,8 @@ async def test_fan_multiple_preset_modes( assert acc.preset_mode_chars["auto"].value == 0 assert acc.preset_mode_chars["smart"].value == 1 # Set from HomeKit - call_set_preset_mode = async_mock_service(hass, DOMAIN, "set_preset_mode") - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_set_preset_mode = async_mock_service(hass, FAN_DOMAIN, "set_preset_mode") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") char_auto_iid = acc.preset_mode_chars["auto"].to_HAP()[HAP_REPR_IID] @@ -711,8 +711,8 @@ async def test_fan_single_preset_mode( await hass.async_block_till_done() # Set from HomeKit - call_set_preset_mode = async_mock_service(hass, DOMAIN, "set_preset_mode") - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_set_preset_mode = async_mock_service(hass, FAN_DOMAIN, "set_preset_mode") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") char_target_fan_state_iid = acc.char_target_fan_state.to_HAP()[HAP_REPR_IID] diff --git a/tests/components/homekit/test_type_humidifiers.py b/tests/components/homekit/test_type_humidifiers.py index fbb72333c9b..de563503b23 100644 --- a/tests/components/homekit/test_type_humidifiers.py +++ b/tests/components/homekit/test_type_humidifiers.py @@ -26,7 +26,7 @@ from homeassistant.components.humidifier import ( ATTR_MIN_HUMIDITY, DEFAULT_MAX_HUMIDITY, DEFAULT_MIN_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, HumidifierDeviceClass, ) @@ -106,7 +106,9 @@ async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) - assert acc.char_active.value == 0 # Set from HomeKit - call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + call_set_humidity = async_mock_service( + hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY + ) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -194,7 +196,9 @@ async def test_dehumidifier( assert acc.char_active.value == 0 # Set from HomeKit - call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + call_set_humidity = async_mock_service( + hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY + ) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -257,7 +261,7 @@ async def test_hygrostat_power_state( assert acc.char_active.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + call_turn_on = async_mock_service(hass, HUMIDIFIER_DOMAIN, SERVICE_TURN_ON) char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -281,7 +285,7 @@ async def test_hygrostat_power_state( assert len(events) == 1 assert events[-1].data[ATTR_VALUE] == "Active to 1" - call_turn_off = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + call_turn_off = async_mock_service(hass, HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF) hk_driver.set_characteristics( { @@ -323,7 +327,9 @@ async def test_hygrostat_get_humidity_range( await hass.async_block_till_done() # Set from HomeKit - call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + call_set_humidity = async_mock_service( + hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY + ) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index 0f85e07c0bb..fb059b93a13 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -20,14 +20,14 @@ from homeassistant.components.light import ( ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, ) from homeassistant.const import ( @@ -83,8 +83,8 @@ async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_on.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") char_on_iid = acc.char_on.to_HAP()[HAP_REPR_IID] @@ -160,8 +160,8 @@ async def test_light_brightness( assert acc.char_brightness.value == 40 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") hk_driver.set_characteristics( { @@ -226,6 +226,24 @@ async def test_light_brightness( assert len(events) == 3 assert events[-1].data[ATTR_VALUE] == f"Set state to 0, brightness at 0{PERCENTAGE}" + hk_driver.set_characteristics( + { + HAP_REPR_CHARS: [ + { + HAP_REPR_AID: acc.aid, + HAP_REPR_IID: char_brightness_iid, + HAP_REPR_VALUE: 0, + }, + ] + }, + "mock_addr", + ) + await _wait_for_light_coalesce(hass) + assert call_turn_off + assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 4 + assert events[-1].data[ATTR_VALUE] == f"Set state to 0, brightness at 0{PERCENTAGE}" + # 0 is a special case for homekit, see "Handle Brightness" # in update_state hass.states.async_set( @@ -296,7 +314,7 @@ async def test_light_color_temperature( assert acc.char_color_temp.value == 190 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] @@ -372,7 +390,7 @@ async def test_light_color_temperature_and_rgb_color( char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -549,7 +567,7 @@ async def test_light_rgb_color( assert acc.char_saturation.value == 90 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -671,7 +689,7 @@ async def test_light_rgb_with_color_temp( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -791,7 +809,7 @@ async def test_light_rgbwx_with_color_temp_and_brightness( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] char_brightness_iid = acc.char_brightness.to_HAP()[HAP_REPR_IID] @@ -858,7 +876,7 @@ async def test_light_rgb_or_w_lights( assert acc.char_color_temp.value == 153 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -985,7 +1003,7 @@ async def test_light_rgb_with_white_switch_to_temp( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1100,7 +1118,7 @@ async def test_light_rgbww_with_color_temp_conversion( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1221,7 +1239,7 @@ async def test_light_rgbw_with_color_temp_conversion( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1325,7 +1343,7 @@ async def test_light_set_brightness_and_color( assert acc.char_saturation.value == 9 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -1373,8 +1391,8 @@ async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: { ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], ATTR_BRIGHTNESS: 255, - ATTR_MAX_MIREDS: 500.5, - ATTR_MIN_MIREDS: 153.5, + ATTR_MIN_COLOR_TEMP_KELVIN: 1999, + ATTR_MAX_COLOR_TEMP_KELVIN: 6499, }, ) await hass.async_block_till_done() @@ -1432,7 +1450,7 @@ async def test_light_set_brightness_and_color_temp( assert acc.char_color_temp.value == 224 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") hk_driver.set_characteristics( { diff --git a/tests/components/homekit/test_type_locks.py b/tests/components/homekit/test_type_locks.py index 31f03b1964f..7691e341dcc 100644 --- a/tests/components/homekit/test_type_locks.py +++ b/tests/components/homekit/test_type_locks.py @@ -1,24 +1,34 @@ """Test different accessory types: Locks.""" +from unittest.mock import MagicMock + import pytest -from homeassistant.components.homekit.const import ATTR_VALUE -from homeassistant.components.homekit.type_locks import Lock -from homeassistant.components.lock import ( - DOMAIN, - STATE_JAMMED, - STATE_LOCKING, - STATE_UNLOCKING, +from homeassistant.components import lock +from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.components.event import EventDeviceClass +from homeassistant.components.homekit.accessories import HomeBridge +from homeassistant.components.homekit.const import ( + ATTR_VALUE, + CHAR_PROGRAMMABLE_SWITCH_EVENT, + CONF_LINKED_DOORBELL_SENSOR, + SERV_DOORBELL, + SERV_STATELESS_PROGRAMMABLE_SWITCH, ) +from homeassistant.components.homekit.type_locks import Lock +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ( ATTR_CODE, + ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, - STATE_LOCKED, + STATE_OFF, + STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN, - STATE_UNLOCKED, ) from homeassistant.core import Event, HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from tests.common import async_mock_service @@ -40,27 +50,27 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_current_state.value == 3 assert acc.char_target_state.value == 1 - hass.states.async_set(entity_id, STATE_LOCKED) + hass.states.async_set(entity_id, LockState.LOCKED) await hass.async_block_till_done() assert acc.char_current_state.value == 1 assert acc.char_target_state.value == 1 - hass.states.async_set(entity_id, STATE_LOCKING) + hass.states.async_set(entity_id, LockState.LOCKING) await hass.async_block_till_done() assert acc.char_current_state.value == 0 assert acc.char_target_state.value == 1 - hass.states.async_set(entity_id, STATE_UNLOCKED) + hass.states.async_set(entity_id, LockState.UNLOCKED) await hass.async_block_till_done() assert acc.char_current_state.value == 0 assert acc.char_target_state.value == 0 - hass.states.async_set(entity_id, STATE_UNLOCKING) + hass.states.async_set(entity_id, LockState.UNLOCKING) await hass.async_block_till_done() assert acc.char_current_state.value == 1 assert acc.char_target_state.value == 0 - hass.states.async_set(entity_id, STATE_JAMMED) + hass.states.async_set(entity_id, LockState.JAMMED) await hass.async_block_till_done() assert acc.char_current_state.value == 2 assert acc.char_target_state.value == 0 @@ -78,7 +88,7 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_target_state.value == 0 assert acc.available is False - hass.states.async_set(entity_id, STATE_UNLOCKED) + hass.states.async_set(entity_id, LockState.UNLOCKED) await hass.async_block_till_done() assert acc.char_current_state.value == 0 assert acc.char_target_state.value == 0 @@ -98,8 +108,8 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_target_state.value == 0 # Set from HomeKit - call_lock = async_mock_service(hass, DOMAIN, "lock") - call_unlock = async_mock_service(hass, DOMAIN, "unlock") + call_lock = async_mock_service(hass, LOCK_DOMAIN, "lock") + call_unlock = async_mock_service(hass, LOCK_DOMAIN, "unlock") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -132,7 +142,7 @@ async def test_no_code( acc = Lock(hass, hk_driver, "Lock", entity_id, 2, config) # Set from HomeKit - call_lock = async_mock_service(hass, DOMAIN, "lock") + call_lock = async_mock_service(hass, LOCK_DOMAIN, "lock") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -142,3 +152,285 @@ async def test_no_code( assert acc.char_target_state.value == 1 assert len(events) == 1 assert events[-1].data[ATTR_VALUE] is None + + +async def test_lock_with_linked_doorbell_sensor(hass: HomeAssistant, hk_driver) -> None: + """Test a lock with a linked doorbell sensor can update.""" + code = "1234" + await async_setup_component(hass, lock.DOMAIN, {lock.DOMAIN: {"platform": "demo"}}) + await hass.async_block_till_done() + doorbell_entity_id = "binary_sensor.doorbell" + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + ) + await hass.async_block_till_done() + entity_id = "lock.demo_lock" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Lock( + hass, + hk_driver, + "Lock", + entity_id, + 2, + { + ATTR_CODE: code, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", hk_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 6 # DoorLock + + service = acc.get_service(SERV_DOORBELL) + assert service + char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char + + assert char.value is None + + service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) + assert service2 + char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char2 + broker = MagicMock() + char2.broker = broker + assert char2.value is None + + hass.states.async_set( + doorbell_entity_id, + STATE_OFF, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + + char.set_value(True) + char2.set_value(True) + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 2 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY}, + force_update=True, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.OCCUPANCY, "other": "attr"}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + # Ensure we do not throw when the linked + # doorbell sensor is removed + hass.states.async_remove(doorbell_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + + +async def test_lock_with_linked_doorbell_event(hass: HomeAssistant, hk_driver) -> None: + """Test a lock with a linked doorbell event can update.""" + await async_setup_component(hass, lock.DOMAIN, {lock.DOMAIN: {"platform": "demo"}}) + await hass.async_block_till_done() + doorbell_entity_id = "event.doorbell" + code = "1234" + + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + entity_id = "lock.demo_lock" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Lock( + hass, + hk_driver, + "Lock", + entity_id, + 2, + { + ATTR_CODE: code, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", hk_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 6 # DoorLock + + service = acc.get_service(SERV_DOORBELL) + assert service + char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char + + assert char.value is None + + service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) + assert service2 + char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char2 + broker = MagicMock() + char2.broker = broker + assert char2.value is None + + hass.states.async_set( + doorbell_entity_id, + STATE_UNKNOWN, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + + char.set_value(True) + char2.set_value(True) + broker.reset_mock() + + original_time = dt_util.utcnow().isoformat() + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 2 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + force_update=True, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL, "other": "attr"}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + # Ensure we do not throw when the linked + # doorbell sensor is removed + hass.states.async_remove(doorbell_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + + await hass.async_block_till_done() + hass.states.async_set( + doorbell_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + # Ensure re-adding does not fire an event + assert not broker.mock_calls + broker.reset_mock() + + # going from unavailable to a state should not fire an event + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + +async def test_lock_with_a_missing_linked_doorbell_sensor( + hass: HomeAssistant, hk_driver +) -> None: + """Test a lock with a configured linked doorbell sensor that is missing.""" + await async_setup_component(hass, lock.DOMAIN, {lock.DOMAIN: {"platform": "demo"}}) + await hass.async_block_till_done() + code = "1234" + doorbell_entity_id = "binary_sensor.doorbell" + entity_id = "lock.demo_lock" + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Lock( + hass, + hk_driver, + "Lock", + entity_id, + 2, + { + ATTR_CODE: code, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", hk_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 6 # DoorLock + + assert not acc.get_service(SERV_DOORBELL) + assert not acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) diff --git a/tests/components/homekit/test_type_media_players.py b/tests/components/homekit/test_type_media_players.py index 14c21f0a5f5..78c35b15790 100644 --- a/tests/components/homekit/test_type_media_players.py +++ b/tests/components/homekit/test_type_media_players.py @@ -25,7 +25,7 @@ from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, MediaPlayerDeviceClass, ) from homeassistant.const import ( @@ -112,12 +112,12 @@ async def test_media_player_set_state( assert acc.chars[FEATURE_PLAY_STOP].value is False # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") - call_media_play = async_mock_service(hass, DOMAIN, "media_play") - call_media_pause = async_mock_service(hass, DOMAIN, "media_pause") - call_media_stop = async_mock_service(hass, DOMAIN, "media_stop") - call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute") + call_turn_on = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_off") + call_media_play = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_play") + call_media_pause = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_pause") + call_media_stop = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_stop") + call_toggle_mute = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_mute") acc.chars[FEATURE_ON_OFF].client_update_value(True) await hass.async_block_till_done() @@ -252,16 +252,18 @@ async def test_media_player_television( assert caplog.records[-2].levelname == "DEBUG" # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") - call_media_play = async_mock_service(hass, DOMAIN, "media_play") - call_media_pause = async_mock_service(hass, DOMAIN, "media_pause") - call_media_play_pause = async_mock_service(hass, DOMAIN, "media_play_pause") - call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute") - call_select_source = async_mock_service(hass, DOMAIN, "select_source") - call_volume_up = async_mock_service(hass, DOMAIN, "volume_up") - call_volume_down = async_mock_service(hass, DOMAIN, "volume_down") - call_volume_set = async_mock_service(hass, DOMAIN, "volume_set") + call_turn_on = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_off") + call_media_play = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_play") + call_media_pause = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_pause") + call_media_play_pause = async_mock_service( + hass, MEDIA_PLAYER_DOMAIN, "media_play_pause" + ) + call_toggle_mute = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_mute") + call_select_source = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "select_source") + call_volume_up = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_up") + call_volume_down = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_down") + call_volume_set = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_set") acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -634,7 +636,7 @@ async def test_media_player_television_unsafe_chars( await hass.async_block_till_done() assert acc.char_input_source.value == 1 - call_select_source = async_mock_service(hass, DOMAIN, "select_source") + call_select_source = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "select_source") acc.char_input_source.client_update_value(3) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_remote.py b/tests/components/homekit/test_type_remote.py index dedf3ae34db..62c45c6ee89 100644 --- a/tests/components/homekit/test_type_remote.py +++ b/tests/components/homekit/test_type_remote.py @@ -16,7 +16,7 @@ from homeassistant.components.remote import ( ATTR_ACTIVITY, ATTR_ACTIVITY_LIST, ATTR_CURRENT_ACTIVITY, - DOMAIN, + DOMAIN as REMOTE_DOMAIN, RemoteEntityFeature, ) from homeassistant.const import ( @@ -91,8 +91,8 @@ async def test_activity_remote( assert acc.char_input_source.value == 1 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, REMOTE_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, REMOTE_DOMAIN, "turn_off") acc.char_active.client_update_value(1) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index 27580949ec2..94b0e68e76d 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -4,21 +4,16 @@ from pyhap.loader import get_loader import pytest from homeassistant.components.alarm_control_panel import ( - DOMAIN, + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.homekit.const import ATTR_VALUE from homeassistant.components.homekit.type_security_systems import SecuritySystem from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, STATE_UNKNOWN, ) from homeassistant.core import Event, HomeAssistant @@ -46,27 +41,27 @@ async def test_switch_set_state( assert acc.char_current_state.value == 3 assert acc.char_target_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_HOME) await hass.async_block_till_done() assert acc.char_target_state.value == 0 assert acc.char_current_state.value == 0 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_NIGHT) await hass.async_block_till_done() assert acc.char_target_state.value == 2 assert acc.char_current_state.value == 2 - hass.states.async_set(entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entity_id, AlarmControlPanelState.DISARMED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 4 @@ -77,10 +72,16 @@ async def test_switch_set_state( assert acc.char_current_state.value == 4 # Set from HomeKit - call_arm_home = async_mock_service(hass, DOMAIN, "alarm_arm_home") - call_arm_away = async_mock_service(hass, DOMAIN, "alarm_arm_away") - call_arm_night = async_mock_service(hass, DOMAIN, "alarm_arm_night") - call_disarm = async_mock_service(hass, DOMAIN, "alarm_disarm") + call_arm_home = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_home" + ) + call_arm_away = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_away" + ) + call_arm_night = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_night" + ) + call_disarm = async_mock_service(hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_disarm") acc.char_target_state.client_update_value(0) await hass.async_block_till_done() @@ -131,7 +132,9 @@ async def test_no_alarm_code( acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) # Set from HomeKit - call_arm_home = async_mock_service(hass, DOMAIN, "alarm_arm_home") + call_arm_home = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_home" + ) acc.char_target_state.client_update_value(0) await hass.async_block_till_done() @@ -153,42 +156,42 @@ async def test_arming(hass: HomeAssistant, hk_driver) -> None: acc.run() await hass.async_block_till_done() - hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_HOME) await hass.async_block_till_done() assert acc.char_target_state.value == 0 assert acc.char_current_state.value == 0 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_VACATION) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_VACATION) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_NIGHT) await hass.async_block_till_done() assert acc.char_target_state.value == 2 assert acc.char_current_state.value == 2 - hass.states.async_set(entity_id, STATE_ALARM_ARMING) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMING) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entity_id, AlarmControlPanelState.DISARMED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 4 @@ -309,3 +312,33 @@ async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: for val in valid_target_values.values(): assert val in test_config.get("target_values") + + +@pytest.mark.parametrize( + ("state"), + [ + (None), + ("None"), + (STATE_UNKNOWN), + (STATE_UNAVAILABLE), + ], +) +async def test_handle_non_alarm_states( + hass: HomeAssistant, hk_driver, events: list[Event], state: str +) -> None: + """Test we can handle states that should not raise.""" + code = "1234" + config = {ATTR_CODE: code} + entity_id = "alarm_control_panel.test" + + hass.states.async_set(entity_id, state) + await hass.async_block_till_done() + acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) + acc.run() + await hass.async_block_till_done() + + assert acc.aid == 2 + assert acc.category == 11 # AlarmSystem + + assert acc.char_current_state.value == 3 + assert acc.char_target_state.value == 3 diff --git a/tests/components/homekit/test_type_sensors.py b/tests/components/homekit/test_type_sensors.py index 3e8e05fdcfd..2bfddf4d4c6 100644 --- a/tests/components/homekit/test_type_sensors.py +++ b/tests/components/homekit/test_type_sensors.py @@ -30,10 +30,9 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, PERCENTAGE, - STATE_HOME, - STATE_NOT_HOME, STATE_OFF, STATE_ON, + STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfTemperature, ) @@ -535,11 +534,11 @@ async def test_binary(hass: HomeAssistant, hk_driver) -> None: await hass.async_block_till_done() assert acc.char_detected.value == 0 - hass.states.async_set(entity_id, STATE_HOME, {ATTR_DEVICE_CLASS: "opening"}) + hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: "opening"}) await hass.async_block_till_done() - assert acc.char_detected.value == 1 + assert acc.char_detected.value == 0 - hass.states.async_set(entity_id, STATE_NOT_HOME, {ATTR_DEVICE_CLASS: "opening"}) + hass.states.async_set(entity_id, STATE_UNAVAILABLE, {ATTR_DEVICE_CLASS: "opening"}) await hass.async_block_till_done() assert acc.char_detected.value == 0 @@ -579,13 +578,15 @@ async def test_motion_uses_bool(hass: HomeAssistant, hk_driver) -> None: assert acc.char_detected.value is False hass.states.async_set( - entity_id, STATE_HOME, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} + entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} ) await hass.async_block_till_done() - assert acc.char_detected.value is True + assert acc.char_detected.value is False hass.states.async_set( - entity_id, STATE_NOT_HOME, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} + entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION}, ) await hass.async_block_till_done() assert acc.char_detected.value is False @@ -655,7 +656,7 @@ async def test_bad_name(hass: HomeAssistant, hk_driver) -> None: assert acc.category == 10 # Sensor assert acc.char_humidity.value == 20 - assert acc.display_name == "--Humid--" + assert acc.display_name == "Humid" async def test_empty_name(hass: HomeAssistant, hk_driver) -> None: diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 9b708f18b8a..0d19763e4c7 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -26,8 +26,7 @@ from homeassistant.components.vacuum import ( SERVICE_START, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_CLEANING, - STATE_DOCKED, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -295,7 +294,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( hass.states.async_set( entity_id, - STATE_CLEANING, + VacuumActivity.CLEANING, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.RETURN_HOME | VacuumEntityFeature.START @@ -306,7 +305,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( hass.states.async_set( entity_id, - STATE_DOCKED, + VacuumActivity.DOCKED, { ATTR_SUPPORTED_FEATURES: VacuumEntityFeature.RETURN_HOME | VacuumEntityFeature.START diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index 8454610566b..e99db8f6234 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -921,8 +921,8 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[0] assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.5 - assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68 + assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.4 + assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68.18 assert len(events) == 1 assert events[-1].data[ATTR_VALUE] == "CoolingThresholdTemperature to 23°C" @@ -942,8 +942,8 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[1] assert call_set_temperature[1].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.5 - assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.5 + assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.4 + assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.6 assert len(events) == 2 assert events[-1].data[ATTR_VALUE] == "HeatingThresholdTemperature to 22°C" @@ -962,7 +962,7 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[2] assert call_set_temperature[2].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.0 + assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.2 assert len(events) == 3 assert events[-1].data[ATTR_VALUE] == "TargetTemperature to 24.0°C" diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 7f7e3ee0ce0..853db54b992 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -159,8 +159,20 @@ def test_validate_entity_config() -> None: assert vec({"lock.demo": {}}) == { "lock.demo": {ATTR_CODE: None, CONF_LOW_BATTERY_THRESHOLD: 20} } - assert vec({"lock.demo": {ATTR_CODE: "1234"}}) == { - "lock.demo": {ATTR_CODE: "1234", CONF_LOW_BATTERY_THRESHOLD: 20} + + assert vec( + { + "lock.demo": { + ATTR_CODE: "1234", + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + } + } + ) == { + "lock.demo": { + ATTR_CODE: "1234", + CONF_LOW_BATTERY_THRESHOLD: 20, + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + } } assert vec({"media_player.demo": {}}) == { @@ -256,6 +268,7 @@ def test_cleanup_name_for_homekit() -> None: """Ensure name sanitize works as expected.""" assert cleanup_name_for_homekit("abc") == "abc" + assert cleanup_name_for_homekit("abc ") == "abc" assert cleanup_name_for_homekit("a b c") == "a b c" assert cleanup_name_for_homekit("ab_c") == "ab c" assert ( @@ -267,14 +280,16 @@ def test_cleanup_name_for_homekit() -> None: def test_temperature_to_homekit() -> None: """Test temperature conversion from HA to HomeKit.""" - assert temperature_to_homekit(20.46, UnitOfTemperature.CELSIUS) == 20.5 - assert temperature_to_homekit(92.1, UnitOfTemperature.FAHRENHEIT) == 33.4 + assert temperature_to_homekit(20.46, UnitOfTemperature.CELSIUS) == 20.46 + assert temperature_to_homekit(92.1, UnitOfTemperature.FAHRENHEIT) == pytest.approx( + 33.388888888888886 + ) def test_temperature_to_states() -> None: """Test temperature conversion from HomeKit to HA.""" assert temperature_to_states(20, UnitOfTemperature.CELSIUS) == 20.0 - assert temperature_to_states(20.2, UnitOfTemperature.FAHRENHEIT) == 68.5 + assert temperature_to_states(20.2, UnitOfTemperature.FAHRENHEIT) == 68.36 def test_density_to_air_quality() -> None: diff --git a/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json b/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json new file mode 100644 index 00000000000..a3c24eb85c3 --- /dev/null +++ b/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json @@ -0,0 +1,378 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "U by Moen-015F44", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Moen Incorporated", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "TS3304", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pr"], + "format": "string", + "value": "3.3.0", + "description": "Firmware Revision", + "maxLen": 64 + } + ] + }, + { + "iid": 8, + "type": "000000D7-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr"], + "format": "string", + "value": "u by moen", + "description": "Name", + "maxLen": 64 + } + ], + "linked": [11, 17, 22, 27, 32] + }, + { + "iid": 11, + "type": "000000BC-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "00000011-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr", "ev"], + "format": "float", + "value": 21.66666, + "description": "Current Temperature", + "unit": "celsius", + "minValue": 0.0, + "maxValue": 100.0, + "minStep": 0.1 + }, + { + "type": "000000B1-0000-1000-8000-0026BB765291", + "iid": 14, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Heater Cooler State", + "minValue": 0, + "maxValue": 3, + "minStep": 1 + }, + { + "type": "000000B2-0000-1000-8000-0026BB765291", + "iid": 15, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Heater Cooler State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + }, + { + "type": "00000012-0000-1000-8000-0026BB765291", + "iid": 16, + "perms": ["pr", "pw", "ev"], + "format": "float", + "value": 37.77777, + "description": "Heating Threshold Temperature", + "unit": "celsius", + "minValue": 15.55556, + "maxValue": 48.88888, + "minStep": 0.1 + } + ] + }, + { + "iid": 17, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 18, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 19, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 20, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 21, + "perms": ["pr"], + "format": "string", + "value": "Outlet 1", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 22, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 23, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 24, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 25, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 26, + "perms": ["pr"], + "format": "string", + "value": "Outlet 2", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 27, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 28, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 29, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 30, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 31, + "perms": ["pr"], + "format": "string", + "value": "Outlet 3", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 32, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 33, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 34, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 35, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 36, + "perms": ["pr"], + "format": "string", + "value": "Outlet 4", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 37, + "type": "00000010-0000-1000-8000-001D4B474349", + "characteristics": [ + { + "type": "00000011-0000-1000-8000-001D4B474349", + "iid": 38, + "perms": ["pr", "ev", "hd"], + "format": "uint8", + "value": 1 + }, + { + "type": "00000012-0000-1000-8000-001D4B474349", + "iid": 39, + "perms": ["pw", "hd"], + "format": "uint8" + }, + { + "type": "00000013-0000-1000-8000-001D4B474349", + "iid": 40, + "perms": ["pw", "hd"], + "format": "string", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-001D4B474349", + "iid": 41, + "perms": ["pw", "hd"], + "format": "string", + "maxLen": 64 + }, + { + "type": "00000015-0000-1000-8000-001D4B474349", + "iid": 42, + "perms": ["pw", "hd"], + "format": "string", + "maxLen": 64 + } + ] + }, + { + "iid": 43, + "type": "000000A2-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000037-0000-1000-8000-0026BB765291", + "iid": 44, + "perms": ["pr"], + "format": "string", + "value": "1.1.0", + "description": "Version", + "maxLen": 64 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 6a0fead65d3..2bd5e7faf75 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -1474,7 +1474,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara-Hub-E1-00A0 Security System', 'supported_features': , @@ -1848,7 +1848,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara Hub-1563 Security System', 'supported_features': , @@ -11400,15 +11400,15 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 167, - 89, + 168, + 90, ), 'supported_color_modes': list([ , ]), 'supported_features': , 'xy_color': tuple( - 0.524, + 0.522, 0.387, ), }), @@ -11548,15 +11548,15 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 167, - 89, + 168, + 90, ), 'supported_color_modes': list([ , ]), 'supported_features': , 'xy_color': tuple( - 0.524, + 0.522, 0.387, ), }), @@ -14883,7 +14883,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 141, + 142, 28, ), 'supported_color_modes': list([ @@ -14892,8 +14892,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.589, - 0.385, + 0.588, + 0.386, ), }), 'entity_id': 'light.nanoleaf_strip_3b32_nanoleaf_light_strip', @@ -17758,6 +17758,397 @@ }), ]) # --- +# name: test_snapshots[u_by_moen_ts3304] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Moen Incorporated', + 'model': 'TS3304', + 'model_id': None, + 'name': 'U by Moen-015F44', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '3.3.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.u_by_moen_015f44_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'U by Moen-015F44 Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_6', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'U by Moen-015F44 Identify', + }), + 'entity_id': 'button.u_by_moen_015f44_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'target_temp_step': 1.0, + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.u_by_moen_015f44', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'U by Moen-015F44', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_11', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_temperature': 21.7, + 'friendly_name': 'U by Moen-015F44', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': None, + }), + 'entity_id': 'climate.u_by_moen_015f44', + 'state': 'off', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.u_by_moen_015f44_current_temperature', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'U by Moen-015F44 Current Temperature', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_11_13', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'U by Moen-015F44 Current Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.u_by_moen_015f44_current_temperature', + 'state': '21.66666', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'U by Moen-015F44', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'U by Moen-015F44', + }), + 'entity_id': 'switch.u_by_moen_015f44', + 'state': 'off', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44_outlet_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'U by Moen-015F44 Outlet 1', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_17', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'U by Moen-015F44 Outlet 1', + 'in_use': False, + }), + 'entity_id': 'switch.u_by_moen_015f44_outlet_1', + 'state': 'off', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44_outlet_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'U by Moen-015F44 Outlet 2', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_22', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'U by Moen-015F44 Outlet 2', + 'in_use': False, + }), + 'entity_id': 'switch.u_by_moen_015f44_outlet_2', + 'state': 'off', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44_outlet_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'U by Moen-015F44 Outlet 3', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_27', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'U by Moen-015F44 Outlet 3', + 'in_use': False, + }), + 'entity_id': 'switch.u_by_moen_015f44_outlet_3', + 'state': 'off', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44_outlet_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'U by Moen-015F44 Outlet 4', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_32', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'U by Moen-015F44 Outlet 4', + 'in_use': False, + }), + 'entity_id': 'switch.u_by_moen_015f44_outlet_4', + 'state': 'off', + }), + }), + ]), + }), + ]) +# --- # name: test_snapshots[velux_active_netatmo_co2] list([ dict({ @@ -18379,1638 +18770,6 @@ }), ]) # --- -# name: test_snapshots[velux_somfy_venetian_blinds] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:5', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_5_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_5_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:8', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_8_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_8_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 45, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds_2', - 'state': 'open', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:11', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_11_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify_3', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_11_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds_3', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:12', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_12_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify_4', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_12_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds_4', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Gateway', - 'model_id': None, - 'name': 'VELUX Gateway', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '132.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_gateway_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Gateway Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_6', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Gateway Identify', - }), - 'entity_id': 'button.velux_gateway_identify', - 'state': 'unknown', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:9', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_9_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_9_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'current_tilt_position': 100, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:13', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_13_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_13_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 100, - 'current_tilt_position': 0, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', - 'state': 'open', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:14', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_14_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify_3', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_14_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'current_tilt_position': 100, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:15', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_15_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify_4', - 'state': 'unknown', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:2', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Sensor', - 'model_id': None, - 'name': 'VELUX Sensor', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '16.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_sensor_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Sensor Identify', - }), - 'entity_id': 'button.velux_sensor_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Carbon Dioxide sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_14', - 'unit_of_measurement': 'ppm', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'state': '1124.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_humidity_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Humidity sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_11', - 'unit_of_measurement': '%', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'VELUX Sensor Humidity sensor', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'entity_id': 'sensor.velux_sensor_humidity_sensor', - 'state': '69.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Temperature sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_8', - 'unit_of_measurement': , - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'temperature', - 'friendly_name': 'VELUX Sensor Temperature sensor', - 'state_class': , - 'unit_of_measurement': , - }), - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'state': '23.9', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:3', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Sensor', - 'model_id': None, - 'name': 'VELUX Sensor', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '16.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_sensor_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Sensor Identify', - }), - 'entity_id': 'button.velux_sensor_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Carbon Dioxide sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_14', - 'unit_of_measurement': 'ppm', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', - 'state': '1074.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Humidity sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_11', - 'unit_of_measurement': '%', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'VELUX Sensor Humidity sensor', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', - 'state': '64.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Temperature sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_8', - 'unit_of_measurement': , - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'temperature', - 'friendly_name': 'VELUX Sensor Temperature sensor', - 'state_class': , - 'unit_of_measurement': , - }), - 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', - 'state': '24.5', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:4', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Window', - 'model_id': None, - 'name': 'VELUX Window', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_window_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_4_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Window Identify', - }), - 'entity_id': 'button.velux_window_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_window_roof_window', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Roof Window', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_4_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'device_class': 'window', - 'friendly_name': 'VELUX Window Roof Window', - 'supported_features': , - }), - 'entity_id': 'cover.velux_window_roof_window', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:7', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Window', - 'model_id': None, - 'name': 'VELUX Window', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_window_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_7_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Window Identify', - }), - 'entity_id': 'button.velux_window_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_window_roof_window_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Roof Window', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_7_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'device_class': 'window', - 'friendly_name': 'VELUX Window Roof Window', - 'supported_features': , - }), - 'entity_id': 'cover.velux_window_roof_window_2', - 'state': 'closed', - }), - }), - ]), - }), - ]) -# --- # name: test_snapshots[velux_window] list([ dict({ diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index 1e9f023fc46..3ab9dc82e41 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -6,6 +6,7 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes +from homeassistant.components.alarm_control_panel import ATTR_CODE_ARM_REQUIRED from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -106,6 +107,7 @@ async def test_switch_read_alarm_state( state = await helper.poll_and_get_state() assert state.state == "armed_home" assert state.attributes["battery_level"] == 50 + assert state.attributes[ATTR_CODE_ARM_REQUIRED] is False await helper.async_update( ServicesTypes.SECURITY_SYSTEM, diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 29033887953..62c73af9977 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -6,6 +6,7 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, + CurrentFanStateValues, CurrentHeaterCoolerStateValues, SwingModeValues, TargetHeaterCoolerStateValues, @@ -13,7 +14,7 @@ from aiohomekit.model.characteristics import ( from aiohomekit.model.services import ServicesTypes from homeassistant.components.climate import ( - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, @@ -66,6 +67,9 @@ def create_thermostat_service(accessory: Accessory) -> None: char = service.add_char(CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT) char.value = 0 + char = service.add_char(CharacteristicsTypes.FAN_STATE_CURRENT) + char.value = 0 + def create_thermostat_service_min_max(accessory: Accessory) -> None: """Define thermostat characteristics.""" @@ -113,7 +117,7 @@ async def test_climate_change_thermostat_state( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -126,7 +130,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -139,7 +143,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -152,7 +156,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -165,7 +169,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "on"}, blocking=True, @@ -178,7 +182,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "auto"}, blocking=True, @@ -198,7 +202,7 @@ async def test_climate_check_min_max_values_per_mode( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -208,7 +212,7 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -218,7 +222,7 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -235,7 +239,7 @@ async def test_climate_change_thermostat_temperature( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 21}, blocking=True, @@ -248,7 +252,7 @@ async def test_climate_change_thermostat_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 25}, blocking=True, @@ -268,14 +272,14 @@ async def test_climate_change_thermostat_temperature_range( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -303,14 +307,14 @@ async def test_climate_change_thermostat_temperature_range_iphone( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -338,14 +342,14 @@ async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -399,7 +403,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -409,7 +413,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -419,7 +423,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -438,14 +442,14 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 21}, blocking=True, @@ -458,7 +462,7 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -471,7 +475,7 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -496,7 +500,7 @@ async def test_climate_set_mode_via_temp( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -514,7 +518,7 @@ async def test_climate_set_mode_via_temp( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -539,7 +543,7 @@ async def test_climate_change_thermostat_humidity( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {"entity_id": "climate.testdevice", "humidity": 50}, blocking=True, @@ -552,7 +556,7 @@ async def test_climate_change_thermostat_humidity( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {"entity_id": "climate.testdevice", "humidity": 45}, blocking=True, @@ -648,6 +652,18 @@ async def test_hvac_mode_vs_hvac_action( assert state.state == "heat" assert state.attributes["hvac_action"] == "idle" + # Simulate the fan running while the heat/cool is idle + await helper.async_update( + ServicesTypes.THERMOSTAT, + { + CharacteristicsTypes.FAN_STATE_CURRENT: CurrentFanStateValues.ACTIVE, + }, + ) + + state = await helper.poll_and_get_state() + assert state.state == "heat" + assert state.attributes["hvac_action"] == "fan" + # Simulate that current temperature is below target temp # Heating might be on and hvac_action currently 'heat' await helper.async_update( @@ -768,7 +784,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -781,7 +797,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -794,7 +810,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -807,7 +823,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -832,7 +848,7 @@ async def test_can_turn_on_after_off( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -845,7 +861,7 @@ async def test_can_turn_on_after_off( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -868,13 +884,13 @@ async def test_heater_cooler_change_thermostat_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 20}, blocking=True, @@ -887,13 +903,13 @@ async def test_heater_cooler_change_thermostat_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 26}, blocking=True, @@ -915,13 +931,13 @@ async def test_heater_cooler_change_fan_speed( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "low"}, blocking=True, @@ -933,7 +949,7 @@ async def test_heater_cooler_change_fan_speed( }, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "medium"}, blocking=True, @@ -945,7 +961,7 @@ async def test_heater_cooler_change_fan_speed( }, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "high"}, blocking=True, @@ -1121,7 +1137,7 @@ async def test_heater_cooler_change_swing_mode( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {"entity_id": "climate.testdevice", "swing_mode": "vertical"}, blocking=True, @@ -1134,7 +1150,7 @@ async def test_heater_cooler_change_swing_mode( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {"entity_id": "climate.testdevice", "swing_mode": "off"}, blocking=True, diff --git a/tests/components/homekit_controller/test_config_flow.py b/tests/components/homekit_controller/test_config_flow.py index 8c83d8e4b1b..4fb0a80cd26 100644 --- a/tests/components/homekit_controller/test_config_flow.py +++ b/tests/components/homekit_controller/test_config_flow.py @@ -799,7 +799,6 @@ async def test_pair_form_errors_on_finish( "title_placeholders": {"name": "TestDevice", "category": "Outlet"}, "unique_id": "00:00:00:00:00:00", "source": config_entries.SOURCE_ZEROCONF, - "pairing": True, } @@ -850,7 +849,6 @@ async def test_pair_unknown_errors(hass: HomeAssistant, controller) -> None: "title_placeholders": {"name": "TestDevice", "category": "Outlet"}, "unique_id": "00:00:00:00:00:00", "source": config_entries.SOURCE_ZEROCONF, - "pairing": True, } @@ -959,54 +957,6 @@ async def test_user_no_unpaired_devices(hass: HomeAssistant, controller) -> None assert result["reason"] == "no_devices" -async def test_unignore_works(hass: HomeAssistant, controller) -> None: - """Test rediscovery triggered disovers work.""" - device = setup_mock_accessory(controller) - - # Device is unignored - result = await hass.config_entries.flow.async_init( - "homekit_controller", - context={"source": config_entries.SOURCE_UNIGNORE}, - data={"unique_id": device.description.id}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pair" - assert get_flow_context(hass, result) == { - "title_placeholders": {"name": "TestDevice", "category": "Other"}, - "unique_id": "00:00:00:00:00:00", - "source": config_entries.SOURCE_UNIGNORE, - } - - # User initiates pairing by clicking on 'configure' - device enters pairing mode and displays code - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pair" - - # Pairing finalized - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"pairing_code": "111-22-333"} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Koogeek-LS1-20833F" - - -async def test_unignore_ignores_missing_devices( - hass: HomeAssistant, controller -) -> None: - """Test rediscovery triggered disovers handle devices that have gone away.""" - setup_mock_accessory(controller) - - # Device is unignored - result = await hass.config_entries.flow.async_init( - "homekit_controller", - context={"source": config_entries.SOURCE_UNIGNORE}, - data={"unique_id": "00:00:00:00:00:01"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "accessory_not_found_error" - - async def test_discovery_dismiss_existing_flow_on_paired( hass: HomeAssistant, controller ) -> None: diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index 4b429959c67..07bdb8a2e38 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -6,7 +6,11 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes -from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL +from homeassistant.components.humidifier import ( + DOMAIN as HUMIDIFIER_DOMAIN, + MODE_AUTO, + MODE_NORMAL, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -74,7 +78,7 @@ async def test_humidifier_active_state( helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( - DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -83,7 +87,7 @@ async def test_humidifier_active_state( ) await hass.services.async_call( - DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -101,7 +105,7 @@ async def test_dehumidifier_active_state( ) await hass.services.async_call( - DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -110,7 +114,7 @@ async def test_dehumidifier_active_state( ) await hass.services.async_call( - DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -208,7 +212,7 @@ async def test_humidifier_set_humidity( helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_humidity", {"entity_id": helper.entity_id, "humidity": 20}, blocking=True, @@ -228,7 +232,7 @@ async def test_dehumidifier_set_humidity( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_humidity", {"entity_id": helper.entity_id, "humidity": 20}, blocking=True, @@ -246,7 +250,7 @@ async def test_humidifier_set_mode( helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_AUTO}, blocking=True, @@ -260,7 +264,7 @@ async def test_humidifier_set_mode( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_NORMAL}, blocking=True, @@ -283,7 +287,7 @@ async def test_dehumidifier_set_mode( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_AUTO}, blocking=True, @@ -297,7 +301,7 @@ async def test_dehumidifier_set_mode( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_NORMAL}, blocking=True, diff --git a/tests/components/homekit_controller/test_init.py b/tests/components/homekit_controller/test_init.py index 2a017b8d592..f74e8ea994e 100644 --- a/tests/components/homekit_controller/test_init.py +++ b/tests/components/homekit_controller/test_init.py @@ -289,6 +289,7 @@ async def test_snapshots( entry.pop("device_id", None) entry.pop("created_at", None) entry.pop("modified_at", None) + entry.pop("_cache", None) entities.append({"entry": entry, "state": state_dict}) @@ -297,6 +298,8 @@ async def test_snapshots( device_dict.pop("via_device_id", None) device_dict.pop("created_at", None) device_dict.pop("modified_at", None) + device_dict.pop("_cache", None) + devices.append({"device": device_dict, "entities": entities}) assert snapshot == devices diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index a2586f7355e..d841323bd59 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -27,6 +27,14 @@ def create_switch_service(accessory: Accessory) -> None: outlet_in_use.value = False +def create_faucet_service(accessory: Accessory) -> None: + """Define faucet characteristics.""" + service = accessory.add_service(ServicesTypes.FAUCET) + + active_char = service.add_char(CharacteristicsTypes.ACTIVE) + active_char.value = False + + def create_valve_service(accessory: Accessory) -> None: """Define valve characteristics.""" service = accessory.add_service(ServicesTypes.VALVE) @@ -115,6 +123,58 @@ async def test_switch_read_outlet_state( assert switch_1.attributes["outlet_in_use"] is True +async def test_faucet_change_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that we can turn a HomeKit outlet on and off again.""" + helper = await setup_test_component(hass, get_next_aid(), create_faucet_service) + + await hass.services.async_call( + "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True + ) + helper.async_assert_service_values( + ServicesTypes.FAUCET, + { + CharacteristicsTypes.ACTIVE: 1, + }, + ) + + await hass.services.async_call( + "switch", "turn_off", {"entity_id": "switch.testdevice"}, blocking=True + ) + helper.async_assert_service_values( + ServicesTypes.FAUCET, + { + CharacteristicsTypes.ACTIVE: 0, + }, + ) + + +async def test_faucet_read_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that we can read the state of a HomeKit outlet accessory.""" + helper = await setup_test_component(hass, get_next_aid(), create_faucet_service) + + # Initial state is that the switch is off and the outlet isn't in use + switch_1 = await helper.poll_and_get_state() + assert switch_1.state == "off" + + # Simulate that someone switched on the device in the real world not via HA + switch_1 = await helper.async_update( + ServicesTypes.FAUCET, + {CharacteristicsTypes.ACTIVE: True}, + ) + assert switch_1.state == "on" + + # Simulate that device switched off in the real world not via HA + switch_1 = await helper.async_update( + ServicesTypes.FAUCET, + {CharacteristicsTypes.ACTIVE: False}, + ) + assert switch_1.state == "off" + + async def test_valve_change_active_state( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index e67ffd78467..7a3d3f06b09 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -1805,93 +1805,164 @@ "updateState": "UP_TO_DATE" }, "3014F7110000000000000049": { - "availableFirmwareVersion": "1.0.8", + "availableFirmwareVersion": "1.4.8", "connectionType": "HMIP_RF", - "firmwareVersion": "1.0.8", - "firmwareVersionInteger": 65544, + "deviceArchetype": "HMIP", + "firmwareVersion": "1.4.8", + "firmwareVersionInteger": 66568, "functionalChannels": { "0": { + "busConfigMismatch": null, "coProFaulty": false, "coProRestartNeeded": false, "coProUpdateFailure": false, - "configPending": false, + "configPending": true, + "controlsMountingOrientation": null, "coolingEmergencyValue": 0.0, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, "deviceId": "3014F7110000000000000049", + "deviceOperationMode": null, "deviceOverheated": false, "deviceOverloaded": false, + "devicePowerFailureDetected": false, "deviceUndervoltage": false, + "displayContrast": null, "dutyCycle": false, "frostProtectionTemperature": 8.0, "functionalChannelType": "DEVICE_BASE_FLOOR_HEATING", "groupIndex": 0, - "groups": [], - "heatingEmergencyValue": 0.25, + "groups": ["00000000-0000-0000-0000-000000000005"], + "heatingEmergencyValue": 0.05, "index": 0, "label": "", + "lockJammed": null, "lowBat": null, "minimumFloorHeatingValvePosition": 0.0, - "pulseWidthModulationAtLowFloorHeatingValvePositionEnabled": true, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "pulseWidthModulationAtLowFloorHeatingValvePositionEnabled": false, "routerModuleEnabled": false, "routerModuleSupported": false, - "rssiDeviceValue": -55, + "rssiDeviceValue": -83, "rssiPeerValue": null, + "sensorCommunicationError": null, + "sensorError": null, + "shortCircuitDataLine": null, "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, "IFeatureDeviceCoProError": false, "IFeatureDeviceCoProRestart": false, "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, "IFeatureDeviceOverheated": false, "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": false, + "IFeatureDeviceSensorError": false, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, "IFeatureDeviceTemperatureOutOfRange": false, "IFeatureDeviceUndervoltage": false, "IFeatureMinimumFloorHeatingValvePosition": true, - "IFeaturePulseWidthModulationAtLowFloorHeatingValvePosition": true + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeaturePulseWidthModulationAtLowFloorHeatingValvePosition": true, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": false, + "IOptionalFeatureMountingOrientation": false }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, "temperatureOutOfRange": false, "unreach": false, "valveProtectionDuration": 5, "valveProtectionSwitchingInterval": 14 }, "1": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 1, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000022", + "00000000-0000-0000-0000-000000000023" + ], "index": 1, - "label": "", + "label": "Heizkreislauf (1) OG Bad r", + "valvePosition": 0.475, "valveState": "ADAPTION_DONE" }, "10": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 10, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000030", + "00000000-0000-0000-0000-000000000031" + ], "index": 10, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (10) OG AZ rechts", + "valvePosition": 0.385, + "valveState": "ADAPTION_DONE" }, "11": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 11, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000030", + "00000000-0000-0000-0000-000000000031" + ], "index": 11, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (11) OG AZ links", + "valvePosition": 0.385, + "valveState": "ADAPTION_DONE" }, "12": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 12, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000022", + "00000000-0000-0000-0000-000000000023" + ], "index": 12, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (12) OG Bad Heizk\u00f6rper", + "valvePosition": 0.385, + "valveState": "ADAPTION_DONE" }, "13": { "deviceId": "3014F7110000000000000049", "functionalChannelType": "HEAT_DEMAND_CHANNEL", "groupIndex": 0, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000058", + "00000000-0000-0000-0000-000000000059" + ], "index": 13, "label": "" }, @@ -1899,7 +1970,7 @@ "deviceId": "3014F7110000000000000049", "functionalChannelType": "DEHUMIDIFIER_DEMAND_CHANNEL", "groupIndex": 0, - "groups": [], + "groups": ["00000000-0000-0000-0000-000000000060"], "index": 14, "label": "" }, @@ -1907,89 +1978,136 @@ "deviceId": "3014F7110000000000000049", "functionalChannelType": "CHANGE_OVER_CHANNEL", "groupIndex": 0, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000061", + "00000000-0000-0000-0000-000000000062", + "00000000-0000-0000-0000-000000000063", + "00000000-0000-0000-0000-000000000064" + ], "index": 15, "label": "" }, "2": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 2, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000022", + "00000000-0000-0000-0000-000000000023" + ], "index": 2, - "label": "", + "label": "Heizkreislauf (2) OG Bad l", + "valvePosition": 0.385, "valveState": "ADAPTION_DONE" }, "3": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 3, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 3, - "label": "", + "label": "Heizkreislauf (3) OG WZ rechts", + "valvePosition": 0.0, "valveState": "ADAPTION_DONE" }, "4": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 4, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 4, - "label": "", + "label": "Heizkreislauf (4) OG WZ Mitte rechts", + "valvePosition": 0.0, "valveState": "ADAPTION_DONE" }, "5": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 5, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 5, - "label": "", + "label": "Heizkreislauf (5) OG WZ Mitte links", + "valvePosition": 0.0, "valveState": "ADAPTION_DONE" }, "6": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 6, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 6, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (6) OG WZ links", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" }, "7": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 7, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 7, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (7) OG K\u00fcche", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" }, "8": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 8, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000026", + "00000000-0000-0000-0000-000000000027" + ], "index": 8, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (8) OG SZ rechts", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" }, "9": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 9, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000026", + "00000000-0000-0000-0000-000000000027" + ], "index": 9, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (9) OG SZ links", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" } }, "homeId": "00000000-0000-0000-0000-000000000001", "id": "3014F7110000000000000049", - "label": "Fu\u00dfbodenheizungsaktor OG motorisch", - "lastStatusUpdate": 1577486092047, + "label": "Fu\u00dfbodenheizungsaktor", + "lastStatusUpdate": 1704379652281, "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, "manufacturerCode": 1, + "measuredAttributes": {}, "modelId": 365, "modelType": "HmIP-FALMOT-C12", "oem": "eQ-3", @@ -3237,6 +3355,173 @@ "type": "BRAND_SWITCH_NOTIFICATION_LIGHT", "updateState": "UP_TO_DATE" }, + "3014F711000000000000BSL2": { + "availableFirmwareVersion": "2.0.2", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "2.0.2", + "firmwareVersionInteger": 131074, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F711000000000000BSL2", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000007"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": null, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -74, + "rssiPeerValue": -75, + "sensorCommunicationError": null, + "sensorError": null, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": true, + "IFeatureDeviceOverheated": true, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": false, + "IFeatureDeviceSensorError": false, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": true, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": false, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": null, + "deviceId": "3014F711000000000000BSL2", + "functionalChannelType": "SWITCH_CHANNEL", + "groupIndex": 1, + "groups": [], + "index": 1, + "internalLinkConfiguration": { + "firstInputAction": "OFF", + "internalLinkConfigurationType": "DOUBLE_INPUT_SWITCH", + "longPressOnTimeEnabled": false, + "onTime": 111600.0, + "secondInputAction": "ON" + }, + "label": "", + "on": false, + "powerUpSwitchState": "PERMANENT_OFF", + "profileMode": "AUTOMATIC", + "supportedOptionalFeatures": { + "IFeatureAccessAuthorizationActuatorChannel": false, + "IFeatureGarageGroupActuatorChannel": false, + "IFeatureLightGroupActuatorChannel": false, + "IFeatureLightProfileActuatorChannel": false, + "IOptionalFeatureInternalLinkConfiguration": true, + "IOptionalFeaturePowerUpSwitchState": true + }, + "userDesiredProfileMode": "AUTOMATIC" + }, + "2": { + "channelRole": "NOTIFICATION_LIGHT_DIMMING_ACTUATOR", + "deviceId": "3014F711000000000000BSL2", + "dimLevel": 0.0, + "functionalChannelType": "NOTIFICATION_LIGHT_CHANNEL", + "groupIndex": 2, + "groups": ["00000000-0000-0000-0000-000000000021"], + "index": 2, + "label": "Led Unten", + "on": false, + "opticalSignalBehaviour": "BLINKING_MIDDLE", + "profileMode": "AUTOMATIC", + "simpleRGBColorState": "TURQUOISE", + "supportedOptionalFeatures": { + "IFeatureOpticalSignalBehaviourState": true + }, + "userDesiredProfileMode": "AUTOMATIC" + }, + "3": { + "channelRole": "NOTIFICATION_LIGHT_DIMMING_ACTUATOR", + "deviceId": "3014F711000000000000BSL2", + "dimLevel": 0.25, + "functionalChannelType": "NOTIFICATION_LIGHT_CHANNEL", + "groupIndex": 3, + "groups": ["00000000-0000-0000-0000-000000000021"], + "index": 3, + "label": "Led Oben", + "on": true, + "opticalSignalBehaviour": "BLINKING_MIDDLE", + "profileMode": "AUTOMATIC", + "simpleRGBColorState": "GREEN", + "supportedOptionalFeatures": { + "IFeatureOpticalSignalBehaviourState": true + }, + "userDesiredProfileMode": "AUTOMATIC" + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F711000000000000BSL2", + "label": "BSL2", + "lastStatusUpdate": 1714910246419, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": {}, + "modelId": 360, + "modelType": "HmIP-BSL", + "oem": "eQ-3", + "permanentlyReachable": true, + "serializedGlobalTradeItemNumber": "3014F711000000000000BSL2", + "type": "BRAND_SWITCH_NOTIFICATION_LIGHT", + "updateState": "UP_TO_DATE" + }, "3014F711SLO0000000000026": { "availableFirmwareVersion": "0.0.0", "connectionType": "HMIP_RF", diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index 229b3c20251..80081123519 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -16,7 +16,7 @@ from homematicip.base.homematicip_object import HomeMaticIPObject from homematicip.home import Home from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.components.homematicip_cloud.generic_entity import ( +from homeassistant.components.homematicip_cloud.entity import ( ATTR_IS_GROUP, ATTR_MODEL_TYPE, ) @@ -186,6 +186,10 @@ class HomeTemplate(Home): def _generate_mocks(self): """Generate mocks for groups and devices.""" self.devices = [_get_mock(device) for device in self.devices] + for device in self.devices: + device.functionalChannels = [ + _get_mock(ch) for ch in device.functionalChannels + ] self.groups = [_get_mock(group) for group in self.groups] diff --git a/tests/components/homematicip_cloud/test_alarm_control_panel.py b/tests/components/homematicip_cloud/test_alarm_control_panel.py index cf27aed7a84..094308862f6 100644 --- a/tests/components/homematicip_cloud/test_alarm_control_panel.py +++ b/tests/components/homematicip_cloud/test_alarm_control_panel.py @@ -4,14 +4,9 @@ from homematicip.aio.home import AsyncHome from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, ) from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -83,7 +78,7 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, internal_active=True, external_active=True ) - assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await hass.services.async_call( "alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True @@ -91,7 +86,7 @@ async def test_hmip_alarm_control_panel( assert home.mock_calls[-1][0] == "set_security_zones_activation" assert home.mock_calls[-1][1] == (False, True) await _async_manipulate_security_zones(hass, home, external_active=True) - assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_HOME + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME await hass.services.async_call( "alarm_control_panel", "alarm_disarm", {"entity_id": entity_id}, blocking=True @@ -99,7 +94,7 @@ async def test_hmip_alarm_control_panel( assert home.mock_calls[-1][0] == "set_security_zones_activation" assert home.mock_calls[-1][1] == (False, False) await _async_manipulate_security_zones(hass, home) - assert hass.states.get(entity_id).state is STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( "alarm_control_panel", "alarm_arm_away", {"entity_id": entity_id}, blocking=True @@ -109,7 +104,7 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, internal_active=True, external_active=True, alarm_triggered=True ) - assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED await hass.services.async_call( "alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True @@ -119,4 +114,4 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, external_active=True, alarm_triggered=True ) - assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED diff --git a/tests/components/homematicip_cloud/test_binary_sensor.py b/tests/components/homematicip_cloud/test_binary_sensor.py index d6ea33ed5fb..02e96b10fe8 100644 --- a/tests/components/homematicip_cloud/test_binary_sensor.py +++ b/tests/components/homematicip_cloud/test_binary_sensor.py @@ -16,7 +16,7 @@ from homeassistant.components.homematicip_cloud.binary_sensor import ( ATTR_WATER_LEVEL_DETECTED, ATTR_WINDOW_STATE, ) -from homeassistant.components.homematicip_cloud.generic_entity import ( +from homeassistant.components.homematicip_cloud.entity import ( ATTR_EVENT_DELAY, ATTR_GROUP_MEMBER_UNREACHABLE, ATTR_LOW_BATTERY, diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index c059ed4b744..d4711440288 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -141,13 +141,6 @@ async def test_hmip_heating_group_heat( ha_state = hass.states.get(entity_id) assert ha_state.attributes[ATTR_PRESET_MODE] == "STD" - # Not required for hmip, but a possibility to send no temperature. - await hass.services.async_call( - "climate", - "set_temperature", - {"entity_id": entity_id, "target_temp_low": 10, "target_temp_high": 10}, - blocking=True, - ) # No new service call should be in mock_calls. assert len(hmip_device.mock_calls) == service_call_counter + 12 # Only fire event from last async_manipulate_test_data available. diff --git a/tests/components/homematicip_cloud/test_cover.py b/tests/components/homematicip_cloud/test_cover.py index 4d32ae547ef..bcafa689172 100644 --- a/tests/components/homematicip_cloud/test_cover.py +++ b/tests/components/homematicip_cloud/test_cover.py @@ -6,9 +6,10 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_CURRENT_TILT_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN +from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -50,7 +51,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (0, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -64,7 +65,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (0.5, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -75,7 +76,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (1, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -105,7 +106,7 @@ async def test_hmip_cover_slats( hass, mock_hap, entity_id, entity_name, device_model ) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -119,7 +120,7 @@ async def test_hmip_cover_slats( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -134,7 +135,7 @@ async def test_hmip_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 1, "slatsLevel": 0.5} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -146,7 +147,7 @@ async def test_hmip_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 1, "slatsLevel": 1} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -185,7 +186,7 @@ async def test_hmip_multi_cover_slats( await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -199,7 +200,7 @@ async def test_hmip_multi_cover_slats( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0, channel=4) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -214,7 +215,7 @@ async def test_hmip_multi_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 4, "slatsLevel": 0.5} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -226,7 +227,7 @@ async def test_hmip_multi_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 4, "slatsLevel": 1} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -261,7 +262,7 @@ async def test_hmip_blind_module( hass, mock_hap, entity_id, entity_name, device_model ) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 5 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 service_call_counter = len(hmip_device.mock_calls) @@ -287,7 +288,7 @@ async def test_hmip_blind_module( assert hmip_device.mock_calls[-1][2] == {"primaryShadingLevel": 0} ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -310,7 +311,7 @@ async def test_hmip_blind_module( assert hmip_device.mock_calls[-1][0] == "set_primary_shading_level" assert hmip_device.mock_calls[-1][2] == {"primaryShadingLevel": 0.5} ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -331,7 +332,7 @@ async def test_hmip_blind_module( } ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -385,7 +386,7 @@ async def test_hmip_garage_door_tormatic( assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -396,7 +397,7 @@ async def test_hmip_garage_door_tormatic( assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -434,7 +435,7 @@ async def test_hmip_garage_door_hoermann( assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -445,7 +446,7 @@ async def test_hmip_garage_door_hoermann( assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -481,7 +482,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (0,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -495,7 +496,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (0.5,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -506,7 +507,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (1,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -536,7 +537,7 @@ async def test_hmip_cover_slats_group( await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -557,7 +558,7 @@ async def test_hmip_cover_slats_group( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -572,7 +573,7 @@ async def test_hmip_cover_slats_group( assert hmip_device.mock_calls[-1][1] == (0.5,) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -584,7 +585,7 @@ async def test_hmip_cover_slats_group( assert hmip_device.mock_calls[-1][1] == (1,) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 25fb31c3c62..5b4993f7314 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -28,7 +28,7 @@ async def test_hmip_load_all_supported_devices( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 293 + assert len(mock_hap.hmip_device_by_entity_id) == 308 async def test_hmip_remove_device( diff --git a/tests/components/homematicip_cloud/test_light.py b/tests/components/homematicip_cloud/test_light.py index 18d490c3786..c0717e81e0d 100644 --- a/tests/components/homematicip_cloud/test_light.py +++ b/tests/components/homematicip_cloud/test_light.py @@ -1,12 +1,14 @@ """Tests for HomematicIP Cloud light.""" -from homematicip.base.enums import RGBColorState +from homematicip.base.enums import OpticalSignalBehaviour, RGBColorState from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_NAME, + ATTR_EFFECT, + ATTR_HS_COLOR, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -173,6 +175,101 @@ async def test_hmip_notification_light( assert not ha_state.attributes.get(ATTR_BRIGHTNESS) +async def test_hmip_notification_light_2( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: + """Test HomematicipNotificationLight.""" + entity_id = "light.led_oben" + entity_name = "Led Oben" + device_model = "HmIP-BSL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == STATE_ON + assert ha_state.attributes[ATTR_EFFECT] == "BLINKING_MIDDLE" + + functional_channel = hmip_device.functionalChannels[3] + service_call_counter = len(functional_channel.mock_calls) + + # Send all color via service call. + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": entity_id, ATTR_HS_COLOR: [240.0, 100.0], ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + assert functional_channel.mock_calls[-1][0] == "async_set_optical_signal" + assert functional_channel.mock_calls[-1][2] == { + "opticalSignalBehaviour": OpticalSignalBehaviour.BLINKING_MIDDLE, + "rgb": RGBColorState.BLUE, + "dimLevel": 0.5, + } + assert service_call_counter + 1 == len(functional_channel.mock_calls) + + +async def test_hmip_notification_light_2_without_brightness_and_light( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: + """Test HomematicipNotificationLight.""" + entity_id = "light.led_oben" + entity_name = "Led Oben" + device_model = "HmIP-BSL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + color_before = ha_state.attributes["color_name"] + + functional_channel = hmip_device.functionalChannels[3] + service_call_counter = len(functional_channel.mock_calls) + + # Send all color via service call. + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": entity_id, ATTR_EFFECT: OpticalSignalBehaviour.FLASH_MIDDLE}, + blocking=True, + ) + assert functional_channel.mock_calls[-1][0] == "async_set_optical_signal" + assert functional_channel.mock_calls[-1][2] == { + "opticalSignalBehaviour": OpticalSignalBehaviour.FLASH_MIDDLE, + "rgb": color_before, + "dimLevel": 1, + } + assert service_call_counter + 1 == len(functional_channel.mock_calls) + + +async def test_hmip_notification_light_2_turn_off( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: + """Test HomematicipNotificationLight.""" + entity_id = "light.led_oben" + entity_name = "Led Oben" + device_model = "HmIP-BSL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + functional_channel = hmip_device.functionalChannels[3] + service_call_counter = len(functional_channel.mock_calls) + + # Send all color via service call. + await hass.services.async_call( + "light", + "turn_off", + {"entity_id": entity_id}, + blocking=True, + ) + assert functional_channel.mock_calls[-1][0] == "async_turn_off" + assert service_call_counter + 1 == len(functional_channel.mock_calls) + + async def test_hmip_dimmer( hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: diff --git a/tests/components/homematicip_cloud/test_lock.py b/tests/components/homematicip_cloud/test_lock.py index 7035cf979c4..cb8a0188639 100644 --- a/tests/components/homematicip_cloud/test_lock.py +++ b/tests/components/homematicip_cloud/test_lock.py @@ -2,15 +2,14 @@ from unittest.mock import patch -from homematicip.base.enums import LockState, MotorState +from homematicip.base.enums import LockState as HomematicLockState, MotorState import pytest from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.lock import ( - DOMAIN, - STATE_LOCKING, - STATE_UNLOCKING, + DOMAIN as LOCK_DOMAIN, LockEntityFeature, + LockState, ) from homeassistant.const import ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant @@ -23,7 +22,7 @@ from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entit async def test_manually_configured_platform(hass: HomeAssistant) -> None: """Test that we do not set up an access point.""" assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {"platform": HMIPC_DOMAIN}} + hass, LOCK_DOMAIN, {LOCK_DOMAIN: {"platform": HMIPC_DOMAIN}} ) assert not hass.data.get(HMIPC_DOMAIN) @@ -52,7 +51,7 @@ async def test_hmip_doorlockdrive( blocking=True, ) assert hmip_device.mock_calls[-1][0] == "set_lock_state" - assert hmip_device.mock_calls[-1][1] == (LockState.OPEN,) + assert hmip_device.mock_calls[-1][1] == (HomematicLockState.OPEN,) await hass.services.async_call( "lock", @@ -61,7 +60,7 @@ async def test_hmip_doorlockdrive( blocking=True, ) assert hmip_device.mock_calls[-1][0] == "set_lock_state" - assert hmip_device.mock_calls[-1][1] == (LockState.LOCKED,) + assert hmip_device.mock_calls[-1][1] == (HomematicLockState.LOCKED,) await hass.services.async_call( "lock", @@ -71,19 +70,19 @@ async def test_hmip_doorlockdrive( ) assert hmip_device.mock_calls[-1][0] == "set_lock_state" - assert hmip_device.mock_calls[-1][1] == (LockState.UNLOCKED,) + assert hmip_device.mock_calls[-1][1] == (HomematicLockState.UNLOCKED,) await async_manipulate_test_data( hass, hmip_device, "motorState", MotorState.CLOSING ) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_LOCKING + assert ha_state.state == LockState.LOCKING await async_manipulate_test_data( hass, hmip_device, "motorState", MotorState.OPENING ) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_UNLOCKING + assert ha_state.state == LockState.UNLOCKING async def test_hmip_doorlockdrive_handle_errors( diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 4028f6d189e..2dda3116032 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -3,7 +3,7 @@ from homematicip.base.enums import ValveState from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.components.homematicip_cloud.generic_entity import ( +from homeassistant.components.homematicip_cloud.entity import ( ATTR_CONFIG_PENDING, ATTR_DEVICE_OVERHEATED, ATTR_DEVICE_OVERLOADED, @@ -12,6 +12,7 @@ from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_RSSI_DEVICE, ATTR_RSSI_PEER, ) +from homeassistant.components.homematicip_cloud.hap import HomematicipHAP from homeassistant.components.homematicip_cloud.sensor import ( ATTR_CURRENT_ILLUMINATION, ATTR_HIGHEST_ILLUMINATION, @@ -22,7 +23,11 @@ from homeassistant.components.homematicip_cloud.sensor import ( ATTR_WIND_DIRECTION, ATTR_WIND_DIRECTION_VARIATION, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, LIGHT_LUX, @@ -361,6 +366,7 @@ async def test_hmip_windspeed_sensor( assert ( ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfSpeed.KILOMETERS_PER_HOUR ) + assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "windSpeed", 9.4) ha_state = hass.states.get(entity_id) assert ha_state.state == "9.4" @@ -410,6 +416,7 @@ async def test_hmip_today_rain_sensor( assert ha_state.state == "3.9" assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfLength.MILLIMETERS + assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "todayRainCounter", 14.2) ha_state = hass.states.get(entity_id) assert ha_state.state == "14.2" @@ -515,6 +522,47 @@ async def test_hmip_passage_detector_delta_counter( assert ha_state.state == "190" +async def test_hmip_floor_terminal_block_mechanic_channel_1_valve_position( + hass: HomeAssistant, default_mock_hap_factory: HomematicipHAP +) -> None: + """Test HomematicipFloorTerminalBlockMechanicChannelValve Channel 1 HmIP-FALMOT-C12.""" + entity_id = "sensor.heizkreislauf_1_og_bad_r" + entity_name = "Heizkreislauf (1) OG Bad r" + device_model = "HmIP-FALMOT-C12" + + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Fu\u00dfbodenheizungsaktor"] + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + hmip_device = mock_hap.hmip_device_by_entity_id.get(entity_id) + + assert ha_state.state == "48" + assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.36) + ha_state = hass.states.get(entity_id) + assert ha_state.state == "36" + + await async_manipulate_test_data(hass, hmip_device, "configPending", True) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes["icon"] == "mdi:alert-circle" + + await async_manipulate_test_data(hass, hmip_device, "configPending", False) + await async_manipulate_test_data( + hass, hmip_device, "valveState", ValveState.ADAPTION_IN_PROGRESS + ) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes["icon"] == "mdi:alert" + + await async_manipulate_test_data( + hass, hmip_device, "valveState", ValveState.ADAPTION_DONE + ) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes["icon"] == "mdi:heating-coil" + + async def test_hmip_esi_iec_current_power_consumption( hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: diff --git a/tests/components/homematicip_cloud/test_switch.py b/tests/components/homematicip_cloud/test_switch.py index e4b51688ba7..54cdd632d03 100644 --- a/tests/components/homematicip_cloud/test_switch.py +++ b/tests/components/homematicip_cloud/test_switch.py @@ -1,7 +1,7 @@ """Tests for HomematicIP Cloud switch.""" from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.components.homematicip_cloud.generic_entity import ( +from homeassistant.components.homematicip_cloud.entity import ( ATTR_GROUP_MEMBER_UNREACHABLE, ) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py index fcfe1e5c189..dfd92577a04 100644 --- a/tests/components/homewizard/conftest.py +++ b/tests/components/homewizard/conftest.py @@ -4,7 +4,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from homewizard_energy.errors import NotFoundError -from homewizard_energy.models import Data, Device, State, System +from homewizard_energy.v1.models import Data, Device, State, System import pytest from homeassistant.components.homewizard.const import DOMAIN @@ -27,11 +27,11 @@ def mock_homewizardenergy( """Return a mock bridge.""" with ( patch( - "homeassistant.components.homewizard.coordinator.HomeWizardEnergy", + "homeassistant.components.homewizard.coordinator.HomeWizardEnergyV1", autospec=True, ) as homewizard, patch( - "homeassistant.components.homewizard.config_flow.HomeWizardEnergy", + "homeassistant.components.homewizard.config_flow.HomeWizardEnergyV1", new=homewizard, ), ): @@ -77,12 +77,12 @@ def mock_config_entry() -> MockConfigEntry: title="Device", domain=DOMAIN, data={ - "product_name": "Product name", - "product_type": "product_type", - "serial": "aabbccddeeff", + "product_name": "P1 Meter", + "product_type": "HWE-P1", + "serial": "5c2fafabcdef", CONF_IP_ADDRESS: "127.0.0.1", }, - unique_id="aabbccddeeff", + unique_id="HWE-P1_5c2fafabcdef", ) diff --git a/tests/components/homewizard/fixtures/HWE-KWH1/device.json b/tests/components/homewizard/fixtures/HWE-KWH1/device.json index 67f9ddf42cb..2cb20bf1255 100644 --- a/tests/components/homewizard/fixtures/HWE-KWH1/device.json +++ b/tests/components/homewizard/fixtures/HWE-KWH1/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-KWH1", "product_name": "kWh meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-KWH3/device.json b/tests/components/homewizard/fixtures/HWE-KWH3/device.json index e3122c8ff89..a3ba3281a4f 100644 --- a/tests/components/homewizard/fixtures/HWE-KWH3/device.json +++ b/tests/components/homewizard/fixtures/HWE-KWH3/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-KWH3", "product_name": "KWh meter 3-phase", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json b/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json b/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json b/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1/device.json b/tests/components/homewizard/fixtures/HWE-P1/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-SKT-11/device.json b/tests/components/homewizard/fixtures/HWE-SKT-11/device.json index bab5a636368..8b768eccb98 100644 --- a/tests/components/homewizard/fixtures/HWE-SKT-11/device.json +++ b/tests/components/homewizard/fixtures/HWE-SKT-11/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-SKT", "product_name": "Energy Socket", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.03", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-SKT-21/device.json b/tests/components/homewizard/fixtures/HWE-SKT-21/device.json index 69b5947351f..a4ab182e7ec 100644 --- a/tests/components/homewizard/fixtures/HWE-SKT-21/device.json +++ b/tests/components/homewizard/fixtures/HWE-SKT-21/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-SKT", "product_name": "Energy Socket", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.07", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-WTR/device.json b/tests/components/homewizard/fixtures/HWE-WTR/device.json index d33e6045299..3f57d7174fc 100644 --- a/tests/components/homewizard/fixtures/HWE-WTR/device.json +++ b/tests/components/homewizard/fixtures/HWE-WTR/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-WTR", "product_name": "Watermeter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "2.03", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/SDM230/SDM630/device.json b/tests/components/homewizard/fixtures/SDM230/SDM630/device.json index b8ec1d18fe8..c7fefd081b5 100644 --- a/tests/components/homewizard/fixtures/SDM230/SDM630/device.json +++ b/tests/components/homewizard/fixtures/SDM230/SDM630/device.json @@ -1,7 +1,7 @@ { "product_type": "SDM630-wifi", "product_name": "KWh meter 3-phase", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/SDM230/device.json b/tests/components/homewizard/fixtures/SDM230/device.json index b6b5c18904e..2dcd391e119 100644 --- a/tests/components/homewizard/fixtures/SDM230/device.json +++ b/tests/components/homewizard/fixtures/SDM230/device.json @@ -1,7 +1,7 @@ { "product_type": "SDM230-wifi", "product_name": "kWh meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/SDM630/device.json b/tests/components/homewizard/fixtures/SDM630/device.json index b8ec1d18fe8..c7fefd081b5 100644 --- a/tests/components/homewizard/fixtures/SDM630/device.json +++ b/tests/components/homewizard/fixtures/SDM630/device.json @@ -1,7 +1,7 @@ { "product_type": "SDM630-wifi", "product_name": "KWh meter 3-phase", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/snapshots/test_button.ambr b/tests/components/homewizard/snapshots/test_button.ambr index d5ad9770478..6dd7fcc45d2 100644 --- a/tests/components/homewizard/snapshots/test_button.ambr +++ b/tests/components/homewizard/snapshots/test_button.ambr @@ -42,7 +42,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_identify', + 'unique_id': 'HWE-P1_5c2fafabcdef_identify', 'unit_of_measurement': None, }) # --- @@ -54,7 +54,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -64,7 +64,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index 663d9153991..71e70f3a153 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -3,7 +3,7 @@ FlowResultSnapshot({ 'context': dict({ 'source': 'zeroconf', - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', }), 'data': dict({ 'ip_address': '127.0.0.1', @@ -20,6 +20,8 @@ 'ip_address': '127.0.0.1', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -28,10 +30,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'P1 meter', - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -45,7 +51,7 @@ 'title_placeholders': dict({ 'name': 'P1 meter', }), - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', }), 'data': dict({ 'ip_address': '127.0.0.1', @@ -62,6 +68,8 @@ 'ip_address': '127.0.0.1', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -70,10 +78,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'P1 meter', - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -85,9 +97,9 @@ 'confirm_only': True, 'source': 'zeroconf', 'title_placeholders': dict({ - 'name': 'Energy Socket (aabbccddeeff)', + 'name': 'Energy Socket (5c2fafabcdef)', }), - 'unique_id': 'HWE-SKT_aabbccddeeff', + 'unique_id': 'HWE-SKT_5c2fafabcdef', }), 'data': dict({ 'ip_address': '127.0.0.1', @@ -104,6 +116,8 @@ 'ip_address': '127.0.0.1', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -112,10 +126,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'Energy Socket', - 'unique_id': 'HWE-SKT_aabbccddeeff', + 'unique_id': 'HWE-SKT_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Energy Socket', 'type': , 'version': 1, @@ -125,7 +143,7 @@ FlowResultSnapshot({ 'context': dict({ 'source': 'user', - 'unique_id': 'HWE-P1_3c39e7aabbcc', + 'unique_id': 'HWE-P1_5c2fafabcdef', }), 'data': dict({ 'ip_address': '2.2.2.2', @@ -142,6 +160,8 @@ 'ip_address': '2.2.2.2', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -150,10 +170,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'P1 meter', - 'unique_id': 'HWE-P1_3c39e7aabbcc', + 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, diff --git a/tests/components/homewizard/snapshots/test_diagnostics.ambr b/tests/components/homewizard/snapshots/test_diagnostics.ambr index f8ac80f2536..cb5e7ef1f43 100644 --- a/tests/components/homewizard/snapshots/test_diagnostics.ambr +++ b/tests/components/homewizard/snapshots/test_diagnostics.ambr @@ -82,8 +82,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -171,8 +171,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -311,8 +311,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -404,8 +404,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -497,8 +497,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -586,8 +586,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -675,8 +675,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -764,8 +764,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) diff --git a/tests/components/homewizard/snapshots/test_number.ambr b/tests/components/homewizard/snapshots/test_number.ambr index 768255c7508..b14028cd97c 100644 --- a/tests/components/homewizard/snapshots/test_number.ambr +++ b/tests/components/homewizard/snapshots/test_number.ambr @@ -14,7 +14,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_number_entities[HWE-SKT-11].1 @@ -51,7 +51,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'status_light_brightness', - 'unique_id': 'aabbccddeeff_status_light_brightness', + 'unique_id': 'HWE-P1_5c2fafabcdef_status_light_brightness', 'unit_of_measurement': '%', }) # --- @@ -63,7 +63,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -73,7 +73,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -106,7 +106,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_number_entities[HWE-SKT-21].1 @@ -143,7 +143,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'status_light_brightness', - 'unique_id': 'aabbccddeeff_status_light_brightness', + 'unique_id': 'HWE-P1_5c2fafabcdef_status_light_brightness', 'unit_of_measurement': '%', }) # --- @@ -155,7 +155,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -165,7 +165,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index dd50b098d40..c5de96cbf8f 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -1,37 +1,4 @@ # serializer version: 1 -# name: test_gas_meter_migrated[sensor.homewizard_aabbccddeeff_total_gas_m3:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.homewizard_aabbccddeeff_total_gas_m3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'homewizard', - 'previous_unique_id': 'aabbccddeeff_total_gas_m3', - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'homewizard_gas_meter_01FFEEDDCCBBAA99887766554433221100', - 'unit_of_measurement': None, - }) -# --- # name: test_sensors[HWE-KWH1-entity_ids7][sensor.device_apparent_power:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -40,7 +7,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -50,7 +17,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -99,7 +66,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -127,7 +94,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -137,7 +104,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -186,7 +153,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -214,7 +181,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -224,7 +191,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -273,7 +240,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -301,7 +268,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -311,7 +278,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -360,7 +327,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -388,7 +355,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -398,7 +365,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -447,7 +414,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -475,7 +442,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -485,7 +452,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -537,7 +504,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -565,7 +532,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -575,7 +542,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -624,7 +591,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_factor', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor', 'unit_of_measurement': '%', }) # --- @@ -652,7 +619,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -662,7 +629,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -711,7 +678,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -739,7 +706,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -749,7 +716,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -798,7 +765,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_voltage_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_v', 'unit_of_measurement': , }) # --- @@ -826,7 +793,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -836,7 +803,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -883,7 +850,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -908,7 +875,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -918,7 +885,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -967,7 +934,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -994,7 +961,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1004,7 +971,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1053,7 +1020,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -1081,7 +1048,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1091,7 +1058,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1140,7 +1107,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l1_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l1_va', 'unit_of_measurement': , }) # --- @@ -1168,7 +1135,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1178,7 +1145,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1227,7 +1194,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l2_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l2_va', 'unit_of_measurement': , }) # --- @@ -1255,7 +1222,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1265,7 +1232,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1314,7 +1281,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l3_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l3_va', 'unit_of_measurement': , }) # --- @@ -1342,7 +1309,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1352,7 +1319,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1401,7 +1368,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -1429,7 +1396,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1439,7 +1406,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1488,7 +1455,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -1516,7 +1483,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1526,7 +1493,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1575,7 +1542,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -1603,7 +1570,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1613,7 +1580,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1662,7 +1629,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -1690,7 +1657,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1700,7 +1667,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1749,7 +1716,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -1777,7 +1744,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1787,7 +1754,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1836,7 +1803,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -1864,7 +1831,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1874,7 +1841,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1923,7 +1890,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -1951,7 +1918,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1961,7 +1928,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2013,7 +1980,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -2041,7 +2008,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2051,7 +2018,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2100,7 +2067,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l1', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l1', 'unit_of_measurement': '%', }) # --- @@ -2128,7 +2095,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2138,7 +2105,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2187,7 +2154,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l2', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l2', 'unit_of_measurement': '%', }) # --- @@ -2215,7 +2182,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2225,7 +2192,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2274,7 +2241,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l3', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l3', 'unit_of_measurement': '%', }) # --- @@ -2302,7 +2269,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2312,7 +2279,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2364,7 +2331,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -2392,7 +2359,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2402,7 +2369,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2454,7 +2421,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -2482,7 +2449,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2492,7 +2459,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2544,7 +2511,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -2572,7 +2539,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2582,7 +2549,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2631,7 +2598,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -2659,7 +2626,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2669,7 +2636,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2718,7 +2685,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l1_var', 'unit_of_measurement': , }) # --- @@ -2746,7 +2713,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2756,7 +2723,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2805,7 +2772,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l2_var', 'unit_of_measurement': , }) # --- @@ -2833,7 +2800,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2843,7 +2810,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2892,7 +2859,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l3_var', 'unit_of_measurement': , }) # --- @@ -2920,7 +2887,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2930,7 +2897,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2979,7 +2946,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -3007,7 +2974,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3017,7 +2984,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3066,7 +3033,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -3094,7 +3061,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3104,7 +3071,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3153,7 +3120,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -3181,7 +3148,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3191,7 +3158,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3238,7 +3205,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -3263,7 +3230,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3273,7 +3240,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3322,7 +3289,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -3349,7 +3316,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3359,7 +3326,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3406,7 +3373,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_average_w', - 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_average_w', 'unit_of_measurement': , }) # --- @@ -3433,7 +3400,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3443,7 +3410,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3492,7 +3459,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -3520,7 +3487,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3530,7 +3497,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3579,7 +3546,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -3607,7 +3574,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3617,7 +3584,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3666,7 +3633,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -3694,7 +3661,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3704,7 +3671,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3751,7 +3718,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dsmr_version', - 'unique_id': 'aabbccddeeff_smr_version', + 'unique_id': 'HWE-P1_5c2fafabcdef_smr_version', 'unit_of_measurement': None, }) # --- @@ -3776,7 +3743,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3786,7 +3753,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3835,7 +3802,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -3863,7 +3830,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3873,7 +3840,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3922,7 +3889,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t1_kwh', 'unit_of_measurement': , }) # --- @@ -3950,7 +3917,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3960,7 +3927,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4009,7 +3976,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t2_kwh', 'unit_of_measurement': , }) # --- @@ -4037,7 +4004,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4047,7 +4014,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4096,7 +4063,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t3_kwh', 'unit_of_measurement': , }) # --- @@ -4124,7 +4091,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4134,7 +4101,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4183,7 +4150,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t4_kwh', 'unit_of_measurement': , }) # --- @@ -4211,7 +4178,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4221,7 +4188,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4270,7 +4237,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -4298,7 +4265,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4308,7 +4275,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4357,7 +4324,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t1_kwh', 'unit_of_measurement': , }) # --- @@ -4385,7 +4352,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4395,7 +4362,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4444,7 +4411,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t2_kwh', 'unit_of_measurement': , }) # --- @@ -4472,7 +4439,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4482,7 +4449,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4531,7 +4498,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t3_kwh', 'unit_of_measurement': , }) # --- @@ -4559,7 +4526,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4569,7 +4536,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4618,7 +4585,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t4_kwh', 'unit_of_measurement': , }) # --- @@ -4646,7 +4613,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4656,7 +4623,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4705,7 +4672,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -4733,7 +4700,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4743,7 +4710,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4790,7 +4757,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'long_power_fail_count', - 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_long_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -4815,7 +4782,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4825,7 +4792,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4872,7 +4839,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'monthly_power_peak_w', - 'unique_id': 'aabbccddeeff_monthly_power_peak_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_monthly_power_peak_w', 'unit_of_measurement': , }) # --- @@ -4899,7 +4866,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4909,7 +4876,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4961,7 +4928,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -4989,7 +4956,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4999,7 +4966,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5046,7 +5013,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'any_power_fail_count', - 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_any_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -5071,7 +5038,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5081,7 +5048,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5133,7 +5100,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -5161,7 +5128,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5171,7 +5138,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5223,7 +5190,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -5251,7 +5218,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5261,7 +5228,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5313,7 +5280,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -5341,7 +5308,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5351,7 +5318,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5398,7 +5365,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'unique_meter_id', - 'unique_id': 'aabbccddeeff_unique_meter_id', + 'unique_id': 'HWE-P1_5c2fafabcdef_unique_meter_id', 'unit_of_measurement': None, }) # --- @@ -5423,7 +5390,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5433,7 +5400,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5480,7 +5447,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'meter_model', - 'unique_id': 'aabbccddeeff_meter_model', + 'unique_id': 'HWE-P1_5c2fafabcdef_meter_model', 'unit_of_measurement': None, }) # --- @@ -5505,7 +5472,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5515,7 +5482,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5569,7 +5536,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_tariff', - 'unique_id': 'aabbccddeeff_active_tariff', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_tariff', 'unit_of_measurement': None, }) # --- @@ -5601,7 +5568,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5611,7 +5578,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5660,7 +5627,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -5688,7 +5655,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5698,7 +5665,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5747,7 +5714,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -5775,7 +5742,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5785,7 +5752,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5834,7 +5801,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -5862,7 +5829,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5872,7 +5839,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5921,7 +5888,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -5949,7 +5916,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5959,7 +5926,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6006,7 +5973,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l1_count', 'unit_of_measurement': None, }) # --- @@ -6031,7 +5998,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6041,7 +6008,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6088,7 +6055,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l2_count', 'unit_of_measurement': None, }) # --- @@ -6113,7 +6080,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6123,7 +6090,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6170,7 +6137,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l3_count', 'unit_of_measurement': None, }) # --- @@ -6195,7 +6162,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6205,7 +6172,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6252,7 +6219,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l1_count', 'unit_of_measurement': None, }) # --- @@ -6277,7 +6244,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6287,7 +6254,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6334,7 +6301,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l2_count', 'unit_of_measurement': None, }) # --- @@ -6359,7 +6326,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6369,7 +6336,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6416,7 +6383,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l3_count', 'unit_of_measurement': None, }) # --- @@ -6441,7 +6408,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6451,7 +6418,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6500,8 +6467,8 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-P1-entity_ids0][sensor.device_water_usage:state] @@ -6509,7 +6476,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -6527,7 +6494,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6537,7 +6504,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6584,7 +6551,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -6609,7 +6576,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6619,7 +6586,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6668,7 +6635,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -7109,7 +7076,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7119,7 +7086,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7166,7 +7133,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_average_w', - 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_average_w', 'unit_of_measurement': , }) # --- @@ -7193,7 +7160,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7203,7 +7170,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7252,7 +7219,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -7280,7 +7247,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7290,7 +7257,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7339,7 +7306,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -7367,7 +7334,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7377,7 +7344,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7426,7 +7393,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -7454,7 +7421,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7464,7 +7431,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7511,7 +7478,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dsmr_version', - 'unique_id': 'aabbccddeeff_smr_version', + 'unique_id': 'HWE-P1_5c2fafabcdef_smr_version', 'unit_of_measurement': None, }) # --- @@ -7536,7 +7503,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7546,7 +7513,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7595,7 +7562,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -7623,7 +7590,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7633,7 +7600,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7682,7 +7649,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t1_kwh', 'unit_of_measurement': , }) # --- @@ -7710,7 +7677,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7720,7 +7687,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7769,7 +7736,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t2_kwh', 'unit_of_measurement': , }) # --- @@ -7797,7 +7764,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7807,7 +7774,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7856,7 +7823,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t3_kwh', 'unit_of_measurement': , }) # --- @@ -7884,7 +7851,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7894,7 +7861,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7943,7 +7910,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t4_kwh', 'unit_of_measurement': , }) # --- @@ -7971,7 +7938,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7981,7 +7948,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8030,7 +7997,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -8058,7 +8025,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8068,7 +8035,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8117,7 +8084,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t1_kwh', 'unit_of_measurement': , }) # --- @@ -8145,7 +8112,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8155,7 +8122,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8204,7 +8171,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t2_kwh', 'unit_of_measurement': , }) # --- @@ -8232,7 +8199,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8242,7 +8209,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8291,7 +8258,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t3_kwh', 'unit_of_measurement': , }) # --- @@ -8319,7 +8286,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8329,7 +8296,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8378,7 +8345,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t4_kwh', 'unit_of_measurement': , }) # --- @@ -8406,7 +8373,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8416,7 +8383,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8465,7 +8432,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -8493,7 +8460,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8503,7 +8470,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8550,7 +8517,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'long_power_fail_count', - 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_long_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -8575,7 +8542,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8585,7 +8552,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8632,7 +8599,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'monthly_power_peak_w', - 'unique_id': 'aabbccddeeff_monthly_power_peak_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_monthly_power_peak_w', 'unit_of_measurement': , }) # --- @@ -8659,7 +8626,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8669,7 +8636,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8721,7 +8688,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -8749,7 +8716,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8759,7 +8726,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8806,7 +8773,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'any_power_fail_count', - 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_any_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -8831,7 +8798,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8841,7 +8808,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8893,7 +8860,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -8921,7 +8888,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8931,7 +8898,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8983,7 +8950,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -9011,7 +8978,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9021,7 +8988,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9073,7 +9040,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -9101,7 +9068,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9111,7 +9078,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9158,7 +9125,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'unique_meter_id', - 'unique_id': 'aabbccddeeff_unique_meter_id', + 'unique_id': 'HWE-P1_5c2fafabcdef_unique_meter_id', 'unit_of_measurement': None, }) # --- @@ -9183,7 +9150,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9193,7 +9160,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9240,7 +9207,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'meter_model', - 'unique_id': 'aabbccddeeff_meter_model', + 'unique_id': 'HWE-P1_5c2fafabcdef_meter_model', 'unit_of_measurement': None, }) # --- @@ -9265,7 +9232,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9275,7 +9242,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9329,7 +9296,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_tariff', - 'unique_id': 'aabbccddeeff_active_tariff', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_tariff', 'unit_of_measurement': None, }) # --- @@ -9361,7 +9328,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9371,7 +9338,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9420,7 +9387,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -9448,7 +9415,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9458,7 +9425,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9507,7 +9474,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -9535,7 +9502,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9545,7 +9512,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9594,7 +9561,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -9622,7 +9589,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9632,7 +9599,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9681,7 +9648,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -9709,7 +9676,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9719,7 +9686,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9766,7 +9733,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l1_count', 'unit_of_measurement': None, }) # --- @@ -9791,7 +9758,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9801,7 +9768,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9848,7 +9815,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l2_count', 'unit_of_measurement': None, }) # --- @@ -9873,7 +9840,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9883,7 +9850,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9930,7 +9897,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l3_count', 'unit_of_measurement': None, }) # --- @@ -9955,7 +9922,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9965,7 +9932,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10012,7 +9979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l1_count', 'unit_of_measurement': None, }) # --- @@ -10037,7 +10004,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10047,7 +10014,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10094,7 +10061,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l2_count', 'unit_of_measurement': None, }) # --- @@ -10119,7 +10086,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10129,7 +10096,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10176,7 +10143,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l3_count', 'unit_of_measurement': None, }) # --- @@ -10201,7 +10168,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10211,7 +10178,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10260,8 +10227,8 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-P1-invalid-EAN-entity_ids9][sensor.device_water_usage:state] @@ -10269,7 +10236,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -10287,7 +10254,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10297,7 +10264,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10344,7 +10311,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -10369,7 +10336,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10379,7 +10346,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10428,7 +10395,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -10869,7 +10836,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10879,7 +10846,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10926,7 +10893,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_average_w', - 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_average_w', 'unit_of_measurement': , }) # --- @@ -10953,7 +10920,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10963,7 +10930,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11012,7 +10979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -11040,7 +11007,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11050,7 +11017,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11099,7 +11066,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -11127,7 +11094,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11137,7 +11104,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11186,7 +11153,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -11214,7 +11181,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11224,7 +11191,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11273,7 +11240,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -11301,7 +11268,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11311,7 +11278,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11360,7 +11327,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t1_kwh', 'unit_of_measurement': , }) # --- @@ -11388,7 +11355,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11398,7 +11365,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11447,7 +11414,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t2_kwh', 'unit_of_measurement': , }) # --- @@ -11475,7 +11442,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11485,7 +11452,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11534,7 +11501,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t3_kwh', 'unit_of_measurement': , }) # --- @@ -11562,7 +11529,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11572,7 +11539,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11621,7 +11588,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t4_kwh', 'unit_of_measurement': , }) # --- @@ -11649,7 +11616,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11659,7 +11626,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11708,7 +11675,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -11736,7 +11703,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11746,7 +11713,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11795,7 +11762,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t1_kwh', 'unit_of_measurement': , }) # --- @@ -11823,7 +11790,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11833,7 +11800,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11882,7 +11849,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t2_kwh', 'unit_of_measurement': , }) # --- @@ -11910,7 +11877,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11920,7 +11887,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11969,7 +11936,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t3_kwh', 'unit_of_measurement': , }) # --- @@ -11997,7 +11964,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12007,7 +11974,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12056,7 +12023,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t4_kwh', 'unit_of_measurement': , }) # --- @@ -12084,7 +12051,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12094,7 +12061,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12143,7 +12110,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -12171,7 +12138,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12181,7 +12148,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12228,7 +12195,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'long_power_fail_count', - 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_long_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -12253,7 +12220,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12263,7 +12230,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12315,7 +12282,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -12343,7 +12310,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12353,7 +12320,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12400,7 +12367,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'any_power_fail_count', - 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_any_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -12425,7 +12392,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12435,7 +12402,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12487,7 +12454,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -12515,7 +12482,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12525,7 +12492,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12577,7 +12544,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -12605,7 +12572,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12615,7 +12582,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12667,7 +12634,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -12695,7 +12662,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12705,7 +12672,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12754,7 +12721,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -12782,7 +12749,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12792,7 +12759,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12841,7 +12808,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -12869,7 +12836,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12879,7 +12846,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12928,7 +12895,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -12956,7 +12923,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12966,7 +12933,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13015,7 +12982,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -13043,7 +13010,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13053,7 +13020,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13100,7 +13067,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l1_count', 'unit_of_measurement': None, }) # --- @@ -13125,7 +13092,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13135,7 +13102,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13182,7 +13149,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l2_count', 'unit_of_measurement': None, }) # --- @@ -13207,7 +13174,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13217,7 +13184,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13264,7 +13231,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l3_count', 'unit_of_measurement': None, }) # --- @@ -13289,7 +13256,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13299,7 +13266,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13346,7 +13313,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l1_count', 'unit_of_measurement': None, }) # --- @@ -13371,7 +13338,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13381,7 +13348,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13428,7 +13395,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l2_count', 'unit_of_measurement': None, }) # --- @@ -13453,7 +13420,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13463,7 +13430,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13510,7 +13477,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l3_count', 'unit_of_measurement': None, }) # --- @@ -13535,7 +13502,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13545,7 +13512,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13594,8 +13561,8 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_water_usage:state] @@ -13603,7 +13570,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -13621,7 +13588,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13631,7 +13598,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13680,7 +13647,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -13708,7 +13675,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13718,7 +13685,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13767,7 +13734,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -13795,7 +13762,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13805,7 +13772,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13857,7 +13824,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -13885,7 +13852,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13895,7 +13862,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13947,7 +13914,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -13975,7 +13942,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13985,7 +13952,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14032,7 +13999,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -14057,7 +14024,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14067,7 +14034,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14116,7 +14083,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -14143,7 +14110,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14153,7 +14120,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14202,7 +14169,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -14230,7 +14197,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14240,7 +14207,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14289,7 +14256,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -14317,7 +14284,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14327,7 +14294,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14376,7 +14343,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -14404,7 +14371,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14414,7 +14381,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14463,7 +14430,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -14491,7 +14458,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14501,7 +14468,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14550,7 +14517,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -14578,7 +14545,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14588,7 +14555,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14640,7 +14607,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -14668,7 +14635,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14678,7 +14645,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14727,7 +14694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_factor', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor', 'unit_of_measurement': '%', }) # --- @@ -14755,7 +14722,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14765,7 +14732,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14817,7 +14784,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -14845,7 +14812,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14855,7 +14822,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14904,7 +14871,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -14932,7 +14899,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14942,7 +14909,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14991,7 +14958,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_voltage_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_v', 'unit_of_measurement': , }) # --- @@ -15019,7 +14986,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15029,7 +14996,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15076,7 +15043,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -15101,7 +15068,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15111,7 +15078,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15160,7 +15127,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -15187,7 +15154,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15197,7 +15164,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15246,7 +15213,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -15274,7 +15241,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15284,7 +15251,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15333,8 +15300,8 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', - 'unit_of_measurement': 'l/min', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-WTR-entity_ids4][sensor.device_water_usage:state] @@ -15342,7 +15309,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Water usage', 'state_class': , - 'unit_of_measurement': 'l/min', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_water_usage', @@ -15360,7 +15327,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15370,7 +15337,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15417,7 +15384,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -15442,7 +15409,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15452,7 +15419,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15501,7 +15468,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -15528,7 +15495,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15538,7 +15505,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15587,7 +15554,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -15615,7 +15582,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15625,7 +15592,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15674,7 +15641,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -15702,7 +15669,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15712,7 +15679,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15761,7 +15728,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -15789,7 +15756,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15799,7 +15766,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15848,7 +15815,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -15876,7 +15843,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15886,7 +15853,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15935,7 +15902,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -15963,7 +15930,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15973,7 +15940,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16025,7 +15992,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -16053,7 +16020,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16063,7 +16030,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16112,7 +16079,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_factor', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor', 'unit_of_measurement': '%', }) # --- @@ -16140,7 +16107,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16150,7 +16117,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16199,7 +16166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -16227,7 +16194,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16237,7 +16204,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16286,7 +16253,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_voltage_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_v', 'unit_of_measurement': , }) # --- @@ -16314,7 +16281,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16324,7 +16291,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16371,7 +16338,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -16396,7 +16363,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16406,7 +16373,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16455,7 +16422,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -16482,7 +16449,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16492,7 +16459,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16541,7 +16508,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -16569,7 +16536,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16579,7 +16546,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16628,7 +16595,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l1_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l1_va', 'unit_of_measurement': , }) # --- @@ -16656,7 +16623,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16666,7 +16633,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16715,7 +16682,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l2_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l2_va', 'unit_of_measurement': , }) # --- @@ -16743,7 +16710,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16753,7 +16720,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16802,7 +16769,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l3_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l3_va', 'unit_of_measurement': , }) # --- @@ -16830,7 +16797,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16840,7 +16807,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16889,7 +16856,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -16917,7 +16884,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16927,7 +16894,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16976,7 +16943,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -17004,7 +16971,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17014,7 +16981,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17063,7 +17030,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -17091,7 +17058,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17101,7 +17068,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17150,7 +17117,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -17178,7 +17145,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17188,7 +17155,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17237,7 +17204,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -17265,7 +17232,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17275,7 +17242,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17324,7 +17291,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -17352,7 +17319,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17362,7 +17329,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17411,7 +17378,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -17439,7 +17406,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17449,7 +17416,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17501,7 +17468,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -17529,7 +17496,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17539,7 +17506,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17588,7 +17555,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l1', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l1', 'unit_of_measurement': '%', }) # --- @@ -17616,7 +17583,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17626,7 +17593,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17675,7 +17642,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l2', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l2', 'unit_of_measurement': '%', }) # --- @@ -17703,7 +17670,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17713,7 +17680,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17762,7 +17729,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l3', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l3', 'unit_of_measurement': '%', }) # --- @@ -17790,7 +17757,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17800,7 +17767,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17852,7 +17819,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -17880,7 +17847,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17890,7 +17857,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17942,7 +17909,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -17970,7 +17937,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17980,7 +17947,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18032,7 +17999,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -18060,7 +18027,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18070,7 +18037,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18119,7 +18086,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -18147,7 +18114,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18157,7 +18124,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18206,7 +18173,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l1_var', 'unit_of_measurement': , }) # --- @@ -18234,7 +18201,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18244,7 +18211,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18293,7 +18260,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l2_var', 'unit_of_measurement': , }) # --- @@ -18321,7 +18288,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18331,7 +18298,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18380,7 +18347,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l3_var', 'unit_of_measurement': , }) # --- @@ -18408,7 +18375,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18418,7 +18385,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18467,7 +18434,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -18495,7 +18462,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18505,7 +18472,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18554,7 +18521,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -18582,7 +18549,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18592,7 +18559,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18641,7 +18608,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -18669,7 +18636,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18679,7 +18646,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18726,7 +18693,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -18751,7 +18718,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18761,7 +18728,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18810,7 +18777,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/homewizard/snapshots/test_switch.ambr b/tests/components/homewizard/snapshots/test_switch.ambr index 68a351c1ebb..c2ef87970f3 100644 --- a/tests/components/homewizard/snapshots/test_switch.ambr +++ b/tests/components/homewizard/snapshots/test_switch.ambr @@ -41,7 +41,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -53,7 +53,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -63,7 +63,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -123,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -135,7 +135,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -145,7 +145,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -206,7 +206,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_power_on', + 'unique_id': 'HWE-P1_5c2fafabcdef_power_on', 'unit_of_measurement': None, }) # --- @@ -218,7 +218,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -228,7 +228,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -288,7 +288,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -300,7 +300,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -310,7 +310,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -370,7 +370,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'switch_lock', - 'unique_id': 'aabbccddeeff_switch_lock', + 'unique_id': 'HWE-P1_5c2fafabcdef_switch_lock', 'unit_of_measurement': None, }) # --- @@ -382,7 +382,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -392,7 +392,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -453,7 +453,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_power_on', + 'unique_id': 'HWE-P1_5c2fafabcdef_power_on', 'unit_of_measurement': None, }) # --- @@ -465,7 +465,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -475,7 +475,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -535,7 +535,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -547,7 +547,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -557,7 +557,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -617,7 +617,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'switch_lock', - 'unique_id': 'aabbccddeeff_switch_lock', + 'unique_id': 'HWE-P1_5c2fafabcdef_switch_lock', 'unit_of_measurement': None, }) # --- @@ -629,7 +629,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -639,7 +639,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -699,7 +699,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -711,7 +711,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -721,7 +721,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -781,7 +781,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -793,7 +793,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -803,7 +803,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -863,7 +863,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -875,7 +875,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -885,7 +885,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, diff --git a/tests/components/homewizard/test_button.py b/tests/components/homewizard/test_button.py index 928e6f21901..d0a6d92b36f 100644 --- a/tests/components/homewizard/test_button.py +++ b/tests/components/homewizard/test_button.py @@ -79,7 +79,7 @@ async def test_identify_button( with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( button.DOMAIN, diff --git a/tests/components/homewizard/test_config_flow.py b/tests/components/homewizard/test_config_flow.py index 8d12a8a1787..984fda8e7a4 100644 --- a/tests/components/homewizard/test_config_flow.py +++ b/tests/components/homewizard/test_config_flow.py @@ -8,7 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant import config_entries -from homeassistant.components import zeroconf +from homeassistant.components import dhcp, zeroconf from homeassistant.components.homewizard.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant @@ -66,7 +66,7 @@ async def test_discovery_flow_works( "path": "/api/v1", "product_name": "Energy Socket", "product_type": "HWE-SKT", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -112,7 +112,7 @@ async def test_discovery_flow_during_onboarding( "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -149,7 +149,7 @@ async def test_discovery_flow_during_onboarding_disabled_api( "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -193,7 +193,7 @@ async def test_discovery_disabled_api( "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -228,7 +228,7 @@ async def test_discovery_missing_data_in_service_info(hass: HomeAssistant) -> No "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -254,7 +254,7 @@ async def test_discovery_invalid_api(hass: HomeAssistant) -> None: "path": "/api/not_v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -263,6 +263,116 @@ async def test_discovery_invalid_api(hass: HomeAssistant) -> None: assert result["reason"] == "unsupported_api_version" +async def test_dhcp_discovery_updates_entry( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test DHCP discovery updates config entries.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="1.0.0.127", + hostname="HW-p1meter-aabbcc", + macaddress="5c2fafabcdef", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + ("exception"), + [(DisabledError), (RequestError)], +) +async def test_dhcp_discovery_updates_entry_fails( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Test DHCP discovery updates config entries, but fails to connect.""" + mock_homewizardenergy.device.side_effect = exception + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="1.0.0.127", + hostname="HW-p1meter-aabbcc", + macaddress="5c2fafabcdef", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "unknown" + + +async def test_dhcp_discovery_ignores_unknown( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, +) -> None: + """Test DHCP discovery is only used for updates. + + Anything else will just abort the flow. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="127.0.0.1", + hostname="HW-p1meter-aabbcc", + macaddress="5c2fafabcdef", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "unknown" + + +async def test_discovery_flow_updates_new_ip( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test discovery setup updates new config data.""" + mock_config_entry.add_to_hass(hass) + + # preflight check, see if the ip address is already in use + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.0.0.127"), + ip_addresses=[ip_address("1.0.0.127")], + port=80, + hostname="p1meter-ddeeff.local.", + type="", + name="", + properties={ + "api_enabled": "1", + "path": "/api/v1", + "product_name": "P1 Meter", + "product_type": "HWE-P1", + "serial": "5c2fafabcdef", + }, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" + + @pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( ("exception", "reason"), @@ -341,13 +451,7 @@ async def test_reauth_flow( """Test reauth flow while API is enabled.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -367,13 +471,7 @@ async def test_reauth_error( mock_homewizardenergy.device.side_effect = DisabledError mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -382,3 +480,131 @@ async def test_reauth_error( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "api_not_enabled"} + + +async def test_reconfigure( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # original entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" + + +async def test_reconfigure_nochange( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration without changing values.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # original entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "127.0.0.1", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + + +async def test_reconfigure_wrongdevice( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test entering ip of other device and prevent changing it based on serial.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # simulate different serial number, as if user entered wrong IP + mock_homewizardenergy.device.return_value.serial = "not_5c2fafabcdef" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" + + # entry should still be original entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + + +@pytest.mark.parametrize( + ("exception", "reason"), + [(DisabledError, "api_not_enabled"), (RequestError, "network_error")], +) +async def test_reconfigure_cannot_connect( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + reason: str, +) -> None: + """Test reconfiguration fails when not able to connect.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + mock_homewizardenergy.device.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": reason} + assert result["data_schema"]({}) == {CONF_IP_ADDRESS: "127.0.0.1"} + + # attempt with valid IP should work + mock_homewizardenergy.device.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" diff --git a/tests/components/homewizard/test_init.py b/tests/components/homewizard/test_init.py index 33412900677..a01f075ee61 100644 --- a/tests/components/homewizard/test_init.py +++ b/tests/components/homewizard/test_init.py @@ -1,17 +1,17 @@ """Tests for the homewizard component.""" +from datetime import timedelta from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory from homewizard_energy.errors import DisabledError import pytest from homeassistant.components.homewizard.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_load_unload( @@ -97,60 +97,36 @@ async def test_load_removes_reauth_flow( assert len(flows) == 0 -@pytest.mark.parametrize( - ("device_fixture", "old_unique_id", "new_unique_id"), - [ - ( - "HWE-P1", - "homewizard_G001", - "homewizard_gas_meter_G001", - ), - ( - "HWE-P1", - "homewizard_W001", - "homewizard_water_meter_W001", - ), - ( - "HWE-P1", - "homewizard_WW001", - "homewizard_warm_water_meter_WW001", - ), - ( - "HWE-P1", - "homewizard_H001", - "homewizard_heat_meter_H001", - ), - ( - "HWE-P1", - "homewizard_IH001", - "homewizard_inlet_heat_meter_IH001", - ), - ], -) @pytest.mark.usefixtures("mock_homewizardenergy") -async def test_external_sensor_migration( +async def test_disablederror_reloads_integration( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_config_entry: MockConfigEntry, - old_unique_id: str, - new_unique_id: str, + mock_homewizardenergy: MagicMock, + freezer: FrozenDateTimeFactory, ) -> None: - """Test unique ID or External sensors are migrated.""" + """Test DisabledError reloads integration.""" mock_config_entry.add_to_hass(hass) - - entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=old_unique_id, - config_entry=mock_config_entry, - ) - - assert entity.unique_id == old_unique_id - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - entity_migrated = entity_registry.async_get(entity.entity_id) - assert entity_migrated - assert entity_migrated.unique_id == new_unique_id - assert entity_migrated.previous_unique_id == old_unique_id + # Make sure current state is loaded and not reauth flow is active + assert mock_config_entry.state is ConfigEntryState.LOADED + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 0 + + # Simulate DisabledError and wait for next update + mock_homewizardenergy.device.side_effect = DisabledError() + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # State should be setup retry and reauth flow should be active + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN diff --git a/tests/components/homewizard/test_number.py b/tests/components/homewizard/test_number.py index ff27fb1b257..623ba018dee 100644 --- a/tests/components/homewizard/test_number.py +++ b/tests/components/homewizard/test_number.py @@ -42,7 +42,7 @@ async def test_number_entities( assert snapshot == device_entry # Test unknown handling - assert state.state == "100.0" + assert state.state == "100" mock_homewizardenergy.state.return_value.brightness = None @@ -85,7 +85,7 @@ async def test_number_entities( mock_homewizardenergy.state_set.side_effect = DisabledError with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( number.DOMAIN, diff --git a/tests/components/homewizard/test_sensor.py b/tests/components/homewizard/test_sensor.py index abcd6a879c5..60077c2cdf9 100644 --- a/tests/components/homewizard/test_sensor.py +++ b/tests/components/homewizard/test_sensor.py @@ -3,18 +3,17 @@ from unittest.mock import MagicMock from homewizard_energy.errors import RequestError -from homewizard_energy.models import Data +from homewizard_energy.v1.models import Data import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.homewizard import DOMAIN from homeassistant.components.homewizard.const import UPDATE_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import async_fire_time_changed pytestmark = [ pytest.mark.usefixtures("init_integration"), @@ -815,49 +814,3 @@ async def test_entities_not_created_for_device( """Ensures entities for a specific device are not created.""" for entity_id in entity_ids: assert not hass.states.get(entity_id) - - -async def test_gas_meter_migrated( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test old gas meter sensor is migrated.""" - entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - "aabbccddeeff_total_gas_m3", - ) - - await hass.config_entries.async_reload(init_integration.entry_id) - await hass.async_block_till_done() - - entity_id = "sensor.homewizard_aabbccddeeff_total_gas_m3" - - assert (entity_entry := entity_registry.async_get(entity_id)) - assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry - - # Make really sure this happens - assert entity_entry.previous_unique_id == "aabbccddeeff_total_gas_m3" - - -async def test_gas_unique_id_removed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test old gas meter id sensor is removed.""" - entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - "aabbccddeeff_gas_unique_id", - ) - - await hass.config_entries.async_reload(init_integration.entry_id) - await hass.async_block_till_done() - - entity_id = "sensor.homewizard_aabbccddeeff_gas_unique_id" - - assert not entity_registry.async_get(entity_id) diff --git a/tests/components/homewizard/test_switch.py b/tests/components/homewizard/test_switch.py index b9e812620e8..d9f1ac26b4f 100644 --- a/tests/components/homewizard/test_switch.py +++ b/tests/components/homewizard/test_switch.py @@ -174,7 +174,7 @@ async def test_switch_entities( with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( switch.DOMAIN, @@ -185,7 +185,7 @@ async def test_switch_entities( with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( switch.DOMAIN, diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index d0693531006..e8c4ab15b3d 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -17,7 +17,7 @@ from homeassistant.components.homeworks.const import ( CONF_RELEASE_DELAY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -241,10 +241,7 @@ async def test_reconfigure_flow( """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -309,10 +306,7 @@ async def test_reconfigure_flow_flow_duplicate( ) entry2.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": entry1.entry_id}, - ) + result = await entry1.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -334,10 +328,7 @@ async def test_reconfigure_flow_flow_no_change( """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -382,10 +373,7 @@ async def test_reconfigure_flow_credentials_password_only( """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" diff --git a/tests/components/honeywell/snapshots/test_climate.ambr b/tests/components/honeywell/snapshots/test_climate.ambr index 25bb73851c6..f26064b335a 100644 --- a/tests/components/honeywell/snapshots/test_climate.ambr +++ b/tests/components/honeywell/snapshots/test_climate.ambr @@ -1,7 +1,6 @@ # serializer version: 1 # name: test_static_attributes ReadOnlyDict({ - 'aux_heat': 'off', 'current_humidity': 50, 'current_temperature': 20, 'fan_action': 'idle', @@ -30,7 +29,7 @@ 'away', 'hold', ]), - 'supported_features': , + 'supported_features': , 'target_temp_high': None, 'target_temp_low': None, 'temperature': None, diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index 55a55f7d7e7..73c5ff33dbc 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -5,12 +5,12 @@ from unittest.mock import MagicMock from aiohttp import ClientConnectionError import aiosomecomfort +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.climate import ( - ATTR_AUX_HEAT, ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, @@ -22,7 +22,6 @@ from homeassistant.components.climate import ( FAN_ON, PRESET_AWAY, PRESET_NONE, - SERVICE_SET_AUX_HEAT, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -31,6 +30,8 @@ from homeassistant.components.climate import ( ) from homeassistant.components.honeywell.climate import ( DOMAIN, + MODE_PERMANENT_HOLD, + MODE_TEMPORARY_HOLD, PRESET_HOLD, RETRY, SCAN_INTERVAL, @@ -40,7 +41,6 @@ from homeassistant.const import ( ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant @@ -221,53 +221,6 @@ async def test_mode_service_calls( ) -async def test_auxheat_service_calls( - hass: HomeAssistant, device: MagicMock, config_entry: MagicMock -) -> None: - """Test controlling the auxheat through service calls.""" - await init_integration(hass, config_entry) - entity_id = f"climate.{device.name}" - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: True}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("emheat") - - device.set_system_mode.reset_mock() - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("heat") - - device.set_system_mode.reset_mock() - device.set_system_mode.side_effect = aiosomecomfort.SomeComfortError - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: True}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("emheat") - - device.set_system_mode.reset_mock() - device.set_system_mode.side_effect = aiosomecomfort.SomeComfortError - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, - blocking=True, - ) - - async def test_fan_modes_service_calls( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: @@ -1240,37 +1193,6 @@ async def test_async_update_errors( assert state.state == "unavailable" -async def test_aux_heat_off_service_call( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device: MagicMock, - config_entry: MagicMock, -) -> None: - """Test aux heat off turns of system when no heat configured.""" - device.raw_ui_data["SwitchHeatAllowed"] = False - device.raw_ui_data["SwitchAutoAllowed"] = False - device.raw_ui_data["SwitchEmergencyHeatAllowed"] = True - - await init_integration(hass, config_entry) - - entity_id = f"climate.{device.name}" - entry = entity_registry.async_get(entity_id) - assert entry - - state = hass.states.get(entity_id) - assert state is not None - assert state.state != STATE_UNAVAILABLE - assert state.state == HVACMode.OFF - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("off") - - async def test_unique_id( hass: HomeAssistant, device: MagicMock, @@ -1288,3 +1210,59 @@ async def test_unique_id( await init_integration(hass, config_entry) entity_entry = entity_registry.async_get(f"climate.{device.name}") assert entity_entry.unique_id == str(device.deviceid) + + +async def test_preset_mode( + hass: HomeAssistant, + device: MagicMock, + config_entry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test mode settings properly reflected.""" + await init_integration(hass, config_entry) + entity_id = f"climate.{device.name}" + + device.raw_ui_data["StatusHeat"] = 3 + device.raw_ui_data["StatusCool"] = 3 + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + device.raw_ui_data["StatusHeat"] = MODE_TEMPORARY_HOLD + device.raw_ui_data["StatusCool"] = MODE_TEMPORARY_HOLD + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOLD + + device.raw_ui_data["StatusHeat"] = MODE_PERMANENT_HOLD + device.raw_ui_data["StatusCool"] = MODE_PERMANENT_HOLD + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOLD + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY + + device.raw_ui_data["StatusHeat"] = 3 + device.raw_ui_data["StatusCool"] = 3 + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE diff --git a/tests/components/honeywell/test_config_flow.py b/tests/components/honeywell/test_config_flow.py index 7cd987f0d83..ed9c86f5e10 100644 --- a/tests/components/honeywell/test_config_flow.py +++ b/tests/components/honeywell/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.honeywell.const import ( CONF_HEAT_AWAY_TEMPERATURE, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,21 +129,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: unique_id="test-username", ) mock_entry.add_to_hass(hass) - with patch( - "homeassistant.components.honeywell.async_setup_entry", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - - await hass.async_block_till_done() + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -177,16 +163,7 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, client: MagicMock) -> ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - await hass.async_block_till_done() + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -226,17 +203,7 @@ async def test_reauth_flow_connnection_error( unique_id="test-username", ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - await hass.async_block_till_done() - + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/html5/test_config_flow.py b/tests/components/html5/test_config_flow.py new file mode 100644 index 00000000000..ca0b3da0389 --- /dev/null +++ b/tests/components/html5/test_config_flow.py @@ -0,0 +1,203 @@ +"""Test the HTML5 config flow.""" + +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries, data_entry_flow +from homeassistant.components.html5.const import ( + ATTR_VAPID_EMAIL, + ATTR_VAPID_PRV_KEY, + ATTR_VAPID_PUB_KEY, + DOMAIN, +) +from homeassistant.components.html5.issues import ( + FAILED_IMPORT_TRANSLATION_KEY, + SUCCESSFUL_IMPORT_TRANSLATION_KEY, +) +from homeassistant.const import CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +import homeassistant.helpers.issue_registry as ir + +MOCK_CONF = { + ATTR_VAPID_EMAIL: "test@example.com", + ATTR_VAPID_PRV_KEY: "h6acSRds8_KR8hT9djD8WucTL06Gfe29XXyZ1KcUjN8", +} +MOCK_CONF_PUB_KEY = "BIUtPN7Rq_8U7RBEqClZrfZ5dR9zPCfvxYPtLpWtRVZTJEc7lzv2dhzDU6Aw1m29Ao0-UA1Uq6XO9Df8KALBKqA" + + +async def test_step_user_success(hass: HomeAssistant) -> None: + """Test a successful user config flow.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=MOCK_CONF.copy(), + ) + + await hass.async_block_till_done() + + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["data"] == { + ATTR_VAPID_PRV_KEY: MOCK_CONF[ATTR_VAPID_PRV_KEY], + ATTR_VAPID_PUB_KEY: MOCK_CONF_PUB_KEY, + ATTR_VAPID_EMAIL: MOCK_CONF[ATTR_VAPID_EMAIL], + CONF_NAME: DOMAIN, + } + + assert mock_setup_entry.call_count == 1 + + +async def test_step_user_success_generate(hass: HomeAssistant) -> None: + """Test a successful user config flow, generating a key pair.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + conf = {ATTR_VAPID_EMAIL: MOCK_CONF[ATTR_VAPID_EMAIL]} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=conf + ) + + await hass.async_block_till_done() + + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["data"][ATTR_VAPID_EMAIL] == MOCK_CONF[ATTR_VAPID_EMAIL] + + assert mock_setup_entry.call_count == 1 + + +async def test_step_user_new_form(hass: HomeAssistant) -> None: + """Test new user input.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=None + ) + + await hass.async_block_till_done() + + assert result["type"] is data_entry_flow.FlowResultType.FORM + assert mock_setup_entry.call_count == 0 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], MOCK_CONF + ) + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert mock_setup_entry.call_count == 1 + + +@pytest.mark.parametrize( + ("key", "value"), + [ + (ATTR_VAPID_PRV_KEY, "invalid"), + ], +) +async def test_step_user_form_invalid_key( + hass: HomeAssistant, key: str, value: str +) -> None: + """Test invalid user input.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + bad_conf = MOCK_CONF.copy() + bad_conf[key] = value + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=bad_conf + ) + + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert mock_setup_entry.call_count == 0 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], MOCK_CONF + ) + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert mock_setup_entry.call_count == 1 + + +async def test_step_import_good( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test valid import input.""" + + with ( + patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + conf = MOCK_CONF.copy() + conf[ATTR_VAPID_PUB_KEY] = MOCK_CONF_PUB_KEY + conf["random_key"] = "random_value" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=conf + ) + + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["data"] == { + ATTR_VAPID_PRV_KEY: conf[ATTR_VAPID_PRV_KEY], + ATTR_VAPID_PUB_KEY: MOCK_CONF_PUB_KEY, + ATTR_VAPID_EMAIL: conf[ATTR_VAPID_EMAIL], + CONF_NAME: DOMAIN, + } + + assert mock_setup_entry.call_count == 1 + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + assert issue + assert issue.translation_key == SUCCESSFUL_IMPORT_TRANSLATION_KEY + + +@pytest.mark.parametrize( + ("key", "value"), + [ + (ATTR_VAPID_PRV_KEY, "invalid"), + ], +) +async def test_step_import_bad( + hass: HomeAssistant, issue_registry: ir.IssueRegistry, key: str, value: str +) -> None: + """Test invalid import input.""" + + with ( + patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + bad_conf = MOCK_CONF.copy() + bad_conf[key] = value + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=bad_conf + ) + + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert mock_setup_entry.call_count == 0 + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, f"deprecated_yaml_{DOMAIN}") + assert issue + assert issue.translation_key == FAILED_IMPORT_TRANSLATION_KEY diff --git a/tests/components/html5/test_init.py b/tests/components/html5/test_init.py new file mode 100644 index 00000000000..290cb381296 --- /dev/null +++ b/tests/components/html5/test_init.py @@ -0,0 +1,44 @@ +"""Test the HTML5 setup.""" + +from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + +NOTIFY_CONF = { + "notify": [ + { + "platform": "html5", + "name": "html5", + "vapid_pub_key": "BIUtPN7Rq_8U7RBEqClZrfZ5dR9zPCfvxYPtLpWtRVZTJEc7lzv2dhzDU6Aw1m29Ao0-UA1Uq6XO9Df8KALBKqA", + "vapid_prv_key": "h6acSRds8_KR8hT9djD8WucTL06Gfe29XXyZ1KcUjN8", + "vapid_email": "test@example.com", + } + ] +} + + +async def test_setup_entry( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test setup of a good config entry.""" + config_entry = MockConfigEntry(domain="html5", data={}) + config_entry.add_to_hass(hass) + assert await async_setup_component(hass, "html5", {}) + + assert len(issue_registry.issues) == 0 + + +async def test_setup_entry_issue( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test setup of an imported config entry with deprecated YAML.""" + config_entry = MockConfigEntry(domain="html5", data={}) + config_entry.add_to_hass(hass) + assert await async_setup_component(hass, "notify", NOTIFY_CONF) + assert await async_setup_component(hass, "html5", NOTIFY_CONF) + + assert len(issue_registry.issues) == 1 diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index 42ca6067418..0d9388907a9 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -94,7 +94,7 @@ async def test_get_service_with_no_json(hass: HomeAssistant) -> None: await async_setup_component(hass, "http", {}) m = mock_open() with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) assert service is not None @@ -109,7 +109,7 @@ async def test_dismissing_message(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -138,7 +138,7 @@ async def test_sending_message(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -169,7 +169,7 @@ async def test_fcm_key_include(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -194,7 +194,7 @@ async def test_fcm_send_with_unknown_priority(mock_wp, hass: HomeAssistant) -> N m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -219,7 +219,7 @@ async def test_fcm_no_targets(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -244,7 +244,7 @@ async def test_fcm_additional_data(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -479,7 +479,7 @@ async def test_callback_view_with_jwt( mock_wp().send().status_code = 201 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -495,7 +495,7 @@ async def test_callback_view_with_jwt( assert push_payload["body"] == "Hello" assert push_payload["icon"] == "beer.png" - bearer_token = "Bearer {}".format(push_payload["data"]["jwt"]) + bearer_token = f"Bearer {push_payload['data']['jwt']}" resp = await client.post( PUBLISH_URL, json={"type": "push"}, headers={AUTHORIZATION: bearer_token} @@ -516,7 +516,7 @@ async def test_send_fcm_without_targets( mock_wp().send().status_code = 201 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -541,7 +541,7 @@ async def test_send_fcm_expired( mock_wp().send().status_code = 410 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -566,7 +566,7 @@ async def test_send_fcm_expired_save_fails( mock_wp().send().status_code = 410 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 76c512c9686..052c0031469 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -312,7 +312,7 @@ async def test_auth_access_signed_path_with_refresh_token( assert data["user_id"] == refresh_token.user.id # Use signature on other path - req = await client.get("/another_path?{}".format(signed_path.split("?")[1])) + req = await client.get(f"/another_path?{signed_path.split('?')[1]}") assert req.status == HTTPStatus.UNAUTHORIZED # We only allow GET diff --git a/tests/components/http/test_ban.py b/tests/components/http/test_ban.py index 41f36dad2df..59011de0cfd 100644 --- a/tests/components/http/test_ban.py +++ b/tests/components/http/test_ban.py @@ -3,7 +3,7 @@ from http import HTTPStatus from ipaddress import ip_address import os -from unittest.mock import Mock, mock_open, patch +from unittest.mock import AsyncMock, Mock, mock_open, patch from aiohttp import web from aiohttp.web_exceptions import HTTPUnauthorized @@ -34,14 +34,10 @@ BANNED_IPS_WITH_SUPERVISOR = [*BANNED_IPS, SUPERVISOR_IP] @pytest.fixture(name="hassio_env") -def hassio_env_fixture(): +def hassio_env_fixture(supervisor_is_connected: AsyncMock): """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value={"result": "ok", "data": {}}, - ), patch.dict(os.environ, {"SUPERVISOR_TOKEN": "123456"}), ): yield @@ -201,6 +197,7 @@ async def test_access_from_supervisor_ip( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_env, + resolution_info: AsyncMock, ) -> None: """Test accessing to server from supervisor IP.""" app = web.Application() @@ -222,17 +219,7 @@ async def test_access_from_supervisor_ip( manager = app[KEY_BAN_MANAGER] - with patch( - "homeassistant.components.hassio.HassIO.get_resolution_info", - return_value={ - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - ): - assert await async_setup_component(hass, "hassio", {"hassio": {}}) + assert await async_setup_component(hass, "hassio", {"hassio": {}}) m_open = mock_open() diff --git a/tests/components/http/test_init.py b/tests/components/http/test_init.py index 2895209b5f9..4d96f2267fa 100644 --- a/tests/components/http/test_init.py +++ b/tests/components/http/test_init.py @@ -12,8 +12,10 @@ from unittest.mock import Mock, patch import pytest from homeassistant.auth.providers.homeassistant import HassAuthProvider -from homeassistant.components import http +from homeassistant.components import cloud, http +from homeassistant.components.cloud import CloudNotAvailable from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.http import KEY_HASS from homeassistant.helpers.network import NoURLAvailableError from homeassistant.setup import async_setup_component @@ -545,3 +547,150 @@ async def test_register_static_paths( "event loop, instead call " "`await hass.http.async_register_static_paths" ) in caplog.text + + +async def test_ssl_issue_if_no_urls_configured( + hass: HomeAssistant, + tmp_path: Path, + issue_registry: ir.IssueRegistry, +) -> None: + """Test raising SSL issue if no external or internal URL is configured.""" + + assert hass.config.external_url is None + assert hass.config.internal_url is None + + cert_path, key_path, _ = await hass.async_add_executor_job( + _setup_empty_ssl_pem_files, tmp_path + ) + + with ( + patch("ssl.SSLContext.load_cert_chain"), + patch( + "homeassistant.util.ssl.server_context_modern", + side_effect=server_context_modern, + ), + ): + assert await async_setup_component( + hass, + "http", + {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, + ) + await hass.async_start() + await hass.async_block_till_done() + + assert ("http", "ssl_configured_without_configured_urls") in issue_registry.issues + + +async def test_ssl_issue_if_using_cloud( + hass: HomeAssistant, + tmp_path: Path, + issue_registry: ir.IssueRegistry, +) -> None: + """Test raising no SSL issue if not right configured but using cloud.""" + assert hass.config.external_url is None + assert hass.config.internal_url is None + + cert_path, key_path, _ = await hass.async_add_executor_job( + _setup_empty_ssl_pem_files, tmp_path + ) + + with ( + patch("ssl.SSLContext.load_cert_chain"), + patch.object(cloud, "async_remote_ui_url", return_value="https://example.com"), + patch( + "homeassistant.util.ssl.server_context_modern", + side_effect=server_context_modern, + ), + ): + assert await async_setup_component( + hass, + "http", + {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, + ) + await hass.async_start() + await hass.async_block_till_done() + + assert ( + "http", + "ssl_configured_without_configured_urls", + ) not in issue_registry.issues + + +async def test_ssl_issue_if_not_connected_to_cloud( + hass: HomeAssistant, + tmp_path: Path, + issue_registry: ir.IssueRegistry, +) -> None: + """Test raising no SSL issue if not right configured and not connected to cloud.""" + assert hass.config.external_url is None + assert hass.config.internal_url is None + + cert_path, key_path, _ = await hass.async_add_executor_job( + _setup_empty_ssl_pem_files, tmp_path + ) + + with ( + patch("ssl.SSLContext.load_cert_chain"), + patch( + "homeassistant.util.ssl.server_context_modern", + side_effect=server_context_modern, + ), + patch( + "homeassistant.components.cloud.async_remote_ui_url", + side_effect=CloudNotAvailable, + ), + ): + assert await async_setup_component( + hass, + "http", + {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, + ) + await hass.async_start() + await hass.async_block_till_done() + + assert ("http", "ssl_configured_without_configured_urls") in issue_registry.issues + + +@pytest.mark.parametrize( + ("external_url", "internal_url"), + [ + ("https://example.com", "https://example.local"), + (None, "http://example.local"), + ("https://example.com", None), + ], +) +async def test_ssl_issue_urls_configured( + hass: HomeAssistant, + tmp_path: Path, + issue_registry: ir.IssueRegistry, + external_url: str | None, + internal_url: str | None, +) -> None: + """Test raising SSL issue if no external or internal URL is configured.""" + + cert_path, key_path, _ = await hass.async_add_executor_job( + _setup_empty_ssl_pem_files, tmp_path + ) + + hass.config.external_url = external_url + hass.config.internal_url = internal_url + + with ( + patch("ssl.SSLContext.load_cert_chain"), + patch( + "homeassistant.util.ssl.server_context_modern", + side_effect=server_context_modern, + ), + ): + assert await async_setup_component( + hass, + "http", + {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, + ) + await hass.async_start() + await hass.async_block_till_done() + + assert ( + "http", + "ssl_configured_without_configured_urls", + ) not in issue_registry.issues diff --git a/tests/components/huawei_lte/test_config_flow.py b/tests/components/huawei_lte/test_config_flow.py index 862af02963c..a9a147eb17e 100644 --- a/tests/components/huawei_lte/test_config_flow.py +++ b/tests/components/huawei_lte/test_config_flow.py @@ -385,15 +385,7 @@ async def test_reauth( ) entry.add_to_hass(hass) - context = { - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - } - result = await hass.config_entries.flow.async_init( - DOMAIN, context=context, data=entry.data - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["data_schema"] is not None diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index 417670a3769..2b978ffc33f 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -175,7 +175,7 @@ async def test_light_turn_on_service( assert len(mock_bridge_v2.mock_requests) == 6 assert mock_bridge_v2.mock_requests[5]["json"]["color_temperature"]["mirek"] == 500 - # test enable effect + # test enable an effect await hass.services.async_call( "light", "turn_on", @@ -184,8 +184,20 @@ async def test_light_turn_on_service( ) assert len(mock_bridge_v2.mock_requests) == 7 assert mock_bridge_v2.mock_requests[6]["json"]["effects"]["effect"] == "candle" + # fire event to update effect in HA state + event = { + "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", + "type": "light", + "effects": {"status": "candle"}, + } + mock_bridge_v2.api.emit_event("update", event) + await hass.async_block_till_done() + test_light = hass.states.get(test_light_id) + assert test_light is not None + assert test_light.attributes["effect"] == "candle" # test disable effect + # it should send a request with effect set to "no_effect" await hass.services.async_call( "light", "turn_on", @@ -194,6 +206,28 @@ async def test_light_turn_on_service( ) assert len(mock_bridge_v2.mock_requests) == 8 assert mock_bridge_v2.mock_requests[7]["json"]["effects"]["effect"] == "no_effect" + # fire event to update effect in HA state + event = { + "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", + "type": "light", + "effects": {"status": "no_effect"}, + } + mock_bridge_v2.api.emit_event("update", event) + await hass.async_block_till_done() + test_light = hass.states.get(test_light_id) + assert test_light is not None + assert test_light.attributes["effect"] == "None" + + # test turn on with useless effect + # it should send a effect in the request if the device has no effect active + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": test_light_id, "effect": "None"}, + blocking=True, + ) + assert len(mock_bridge_v2.mock_requests) == 9 + assert "effects" not in mock_bridge_v2.mock_requests[8]["json"] # test timed effect await hass.services.async_call( @@ -202,11 +236,11 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "sunrise", "transition": 6}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 9 + assert len(mock_bridge_v2.mock_requests) == 10 assert ( - mock_bridge_v2.mock_requests[8]["json"]["timed_effects"]["effect"] == "sunrise" + mock_bridge_v2.mock_requests[9]["json"]["timed_effects"]["effect"] == "sunrise" ) - assert mock_bridge_v2.mock_requests[8]["json"]["timed_effects"]["duration"] == 6000 + assert mock_bridge_v2.mock_requests[9]["json"]["timed_effects"]["duration"] == 6000 # test enabling effect should ignore color temperature await hass.services.async_call( @@ -215,9 +249,9 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "candle", "color_temp": 500}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 10 - assert mock_bridge_v2.mock_requests[9]["json"]["effects"]["effect"] == "candle" - assert "color_temperature" not in mock_bridge_v2.mock_requests[9]["json"] + assert len(mock_bridge_v2.mock_requests) == 11 + assert mock_bridge_v2.mock_requests[10]["json"]["effects"]["effect"] == "candle" + assert "color_temperature" not in mock_bridge_v2.mock_requests[10]["json"] # test enabling effect should ignore xy color await hass.services.async_call( @@ -226,9 +260,9 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "candle", "xy_color": [0.123, 0.123]}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 11 - assert mock_bridge_v2.mock_requests[10]["json"]["effects"]["effect"] == "candle" - assert "xy_color" not in mock_bridge_v2.mock_requests[9]["json"] + assert len(mock_bridge_v2.mock_requests) == 12 + assert mock_bridge_v2.mock_requests[11]["json"]["effects"]["effect"] == "candle" + assert "xy_color" not in mock_bridge_v2.mock_requests[11]["json"] async def test_light_turn_off_service( diff --git a/tests/components/humidifier/conftest.py b/tests/components/humidifier/conftest.py new file mode 100644 index 00000000000..9fe1720ffc0 --- /dev/null +++ b/tests/components/humidifier/conftest.py @@ -0,0 +1,69 @@ +"""Fixtures for Humidifier platform tests.""" + +from collections.abc import Generator + +import pytest + +from homeassistant.components.humidifier import DOMAIN as HUMIDIFIER_DOMAIN +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +@pytest.fixture +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, "test.config_flow") + + with mock_config_flow("test", MockFlow): + yield + + +@pytest.fixture +def register_test_integration( + hass: HomeAssistant, config_flow_fixture: None +) -> Generator: + """Provide a mocked integration for tests.""" + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [HUMIDIFIER_DOMAIN] + ) + return True + + async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config emntry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.HUMIDIFIER] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + + return config_entry diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py index b31750a3a3b..ce54863736b 100644 --- a/tests/components/humidifier/test_init.py +++ b/tests/components/humidifier/test_init.py @@ -1,23 +1,25 @@ """The tests for the humidifier component.""" -from enum import Enum -from types import ModuleType from unittest.mock import MagicMock import pytest -from homeassistant.components import humidifier from homeassistant.components.humidifier import ( - ATTR_MODE, + ATTR_HUMIDITY, + DOMAIN as HUMIDIFIER_DOMAIN, + MODE_ECO, + MODE_NORMAL, + SERVICE_SET_HUMIDITY, HumidifierEntity, HumidifierEntityFeature, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError -from tests.common import help_test_all, import_and_test_deprecated_constant_enum +from tests.common import MockConfigEntry, MockEntity, setup_test_component_platform -class MockHumidifierEntity(HumidifierEntity): +class MockHumidifierEntity(MockEntity, HumidifierEntity): """Mock Humidifier device to use in tests.""" @property @@ -48,56 +50,68 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert humidifier.turn_off.called -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: - return [(enum_field, constant_prefix) for enum_field in enum] - - -@pytest.mark.parametrize( - "module", - [humidifier, humidifier.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(humidifier.HumidifierEntityFeature, "SUPPORT_") - + _create_tuples(humidifier.HumidifierDeviceClass, "DEVICE_CLASS_"), -) -@pytest.mark.parametrize(("module"), [humidifier, humidifier.const]) -def test_deprecated_constants( +async def test_humidity_validation( + hass: HomeAssistant, + register_test_integration: MockConfigEntry, caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, ) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.1" + """Test validation for humidity.""" + + class MockHumidifierEntityHumidity(MockEntity, HumidifierEntity): + """Mock climate class with mocked aux heater.""" + + _attr_supported_features = HumidifierEntityFeature.MODES + _attr_available_modes = [MODE_NORMAL, MODE_ECO] + _attr_mode = MODE_NORMAL + _attr_target_humidity = 50 + _attr_min_humidity = 50 + _attr_max_humidity = 60 + + def set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + self._attr_target_humidity = humidity + + test_humidifier = MockHumidifierEntityHumidity( + name="Test", + unique_id="unique_humidifier_test", ) + setup_test_component_platform( + hass, HUMIDIFIER_DOMAIN, entities=[test_humidifier], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) + await hass.async_block_till_done() -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" + state = hass.states.get("humidifier.test") + assert state.attributes.get(ATTR_HUMIDITY) == 50 - class MockHumidifierEntity(HumidifierEntity): - _attr_mode = "mode1" + with pytest.raises( + ServiceValidationError, + match="Provided humidity 1 is not valid. Accepted range is 50 to 60", + ) as exc: + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, + { + "entity_id": "humidifier.test", + ATTR_HUMIDITY: "1", + }, + blocking=True, + ) - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 + assert exc.value.translation_key == "humidity_out_of_range" + assert "Check valid humidity 1 in range 50 - 60" in caplog.text - entity = MockHumidifierEntity() - assert entity.supported_features_compat is HumidifierEntityFeature(1) - assert "MockHumidifierEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "HumidifierEntityFeature.MODES" in caplog.text - caplog.clear() - assert entity.supported_features_compat is HumidifierEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - assert entity.state_attributes[ATTR_MODE] == "mode1" + with pytest.raises( + ServiceValidationError, + match="Provided humidity 70 is not valid. Accepted range is 50 to 60", + ) as exc: + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, + { + "entity_id": "humidifier.test", + ATTR_HUMIDITY: "70", + }, + blocking=True, + ) diff --git a/tests/components/hunterdouglas_powerview/conftest.py b/tests/components/hunterdouglas_powerview/conftest.py index d4433f93dcb..ea40ba4ecc6 100644 --- a/tests/components/hunterdouglas_powerview/conftest.py +++ b/tests/components/hunterdouglas_powerview/conftest.py @@ -33,15 +33,15 @@ def mock_hunterdouglas_hub( """Return a mocked Powerview Hub with all data populated.""" with ( patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", return_value=load_json_object_fixture(device_json, DOMAIN), ), patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_home_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_home_data", return_value=load_json_object_fixture(home_json, DOMAIN), ), patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_firmware", return_value=load_json_object_fixture(firmware_json, DOMAIN), ), patch( @@ -111,7 +111,7 @@ def firmware_json(api_version: int) -> str: def rooms_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen2/rooms.json" + return "gen1/rooms.json" if api_version == 2: return "gen2/rooms.json" if api_version == 3: @@ -124,7 +124,7 @@ def rooms_json(api_version: int) -> str: def scenes_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen2/scenes.json" + return "gen1/scenes.json" if api_version == 2: return "gen2/scenes.json" if api_version == 3: @@ -137,7 +137,7 @@ def scenes_json(api_version: int) -> str: def shades_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen2/shades.json" + return "gen1/shades.json" if api_version == 2: return "gen2/shades.json" if api_version == 3: diff --git a/tests/components/hunterdouglas_powerview/const.py b/tests/components/hunterdouglas_powerview/const.py index 5a912a63a17..65b03fd5ec2 100644 --- a/tests/components/hunterdouglas_powerview/const.py +++ b/tests/components/hunterdouglas_powerview/const.py @@ -6,6 +6,7 @@ from homeassistant import config_entries from homeassistant.components import dhcp, zeroconf MOCK_MAC = "AA::BB::CC::DD::EE::FF" +MOCK_SERIAL = "A1B2C3D4E5G6H7" HOMEKIT_DISCOVERY_GEN2 = zeroconf.ZeroconfServiceInfo( ip_address="1.2.3.4", @@ -41,7 +42,7 @@ ZEROCONF_DISCOVERY_GEN3 = zeroconf.ZeroconfServiceInfo( ip_address="1.2.3.4", ip_addresses=[IPv4Address("1.2.3.4")], hostname="mock_hostname", - name="Powerview Generation 3._powerview-g3._tcp.local.", + name="Powerview Generation 3._PowerView-G3._tcp.local.", port=None, properties={}, type="mock_type", diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json new file mode 100644 index 00000000000..4ddcccd466e --- /dev/null +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json @@ -0,0 +1,13 @@ +{ + "roomIds": [4896], + "roomData": [ + { + "id": 4896, + "name": "U3BpbmRsZQ==", + "order": 0, + "colorId": 11, + "iconId": 77, + "name_unicode": "Spindle" + } + ] +} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json new file mode 100644 index 00000000000..4b6b7fb9cc3 --- /dev/null +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json @@ -0,0 +1,188 @@ +{ + "sceneIds": [ + 19831, 4068, 55363, 43508, 59372, 48243, 54636, 20625, 4034, 59103, 61648, + 24626, 64679, 22498, 28856, 25458, 51159, 959 + ], + "sceneData": [ + { + "id": 19831, + "networkNumber": 0, + "name": "Q2xvc2UgTG91bmdlIFJvb20=", + "roomId": 4896, + "order": 0, + "colorId": 7, + "iconId": 171, + "name_unicode": "Close Lounge Room" + }, + { + "id": 4068, + "networkNumber": 1, + "name": "Q2xvc2UgQmVkIDQ=", + "roomId": 4896, + "order": 1, + "colorId": 7, + "iconId": 10, + "name_unicode": "Close Bed 4" + }, + { + "id": 55363, + "networkNumber": 2, + "name": "Q2xvc2UgQmVkIDI=", + "roomId": 4896, + "order": 2, + "colorId": 11, + "iconId": 171, + "name_unicode": "Close Bed 2" + }, + { + "id": 43508, + "networkNumber": 3, + "name": "Q2xvc2UgTWFzdGVyIEJlZA==", + "roomId": 4896, + "order": 3, + "colorId": 11, + "iconId": 10, + "name_unicode": "Close Master Bed" + }, + { + "id": 59372, + "networkNumber": 4, + "name": "Q2xvc2UgRmFtaWx5", + "roomId": 4896, + "order": 4, + "colorId": 0, + "iconId": 171, + "name_unicode": "Close Family" + }, + { + "id": 48243, + "networkNumber": 5, + "name": "T3BlbiBCZWQgNA==", + "roomId": 4896, + "order": 5, + "colorId": 0, + "iconId": 10, + "name_unicode": "Open Bed 4" + }, + { + "id": 54636, + "networkNumber": 6, + "name": "T3BlbiBNYXN0ZXIgQmVk", + "roomId": 4896, + "order": 6, + "colorId": 0, + "iconId": 26, + "name_unicode": "Open Master Bed" + }, + { + "id": 20625, + "networkNumber": 7, + "name": "T3BlbiBCZWQgMw==", + "roomId": 4896, + "order": 7, + "colorId": 7, + "iconId": 26, + "name_unicode": "Open Bed 3" + }, + { + "id": 4034, + "networkNumber": 8, + "name": "T3BlbiBGYW1pbHk=", + "roomId": 4896, + "order": 8, + "colorId": 11, + "iconId": 26, + "name_unicode": "Open Family" + }, + { + "id": 59103, + "networkNumber": 9, + "name": "Q2xvc2UgU3R1ZHk=", + "roomId": 4896, + "order": 9, + "colorId": 0, + "iconId": 171, + "name_unicode": "Close Study" + }, + { + "id": 61648, + "networkNumber": 10, + "name": "T3BlbiBBbGw=", + "roomId": 4896, + "order": 10, + "colorId": 11, + "iconId": 26, + "name_unicode": "Open All" + }, + { + "id": 24626, + "networkNumber": 11, + "name": "Q2xvc2UgQWxs", + "roomId": 4896, + "order": 11, + "colorId": 0, + "iconId": 171, + "name_unicode": "Close All" + }, + { + "id": 64679, + "networkNumber": 12, + "name": "T3BlbiBLaXRjaGVu", + "roomId": 4896, + "order": 12, + "colorId": 7, + "iconId": 26, + "name_unicode": "Open Kitchen" + }, + { + "id": 22498, + "networkNumber": 13, + "name": "T3BlbiBMb3VuZ2UgUm9vbQ==", + "roomId": 4896, + "order": 13, + "colorId": 7, + "iconId": 26, + "name_unicode": "Open Lounge Room" + }, + { + "id": 25458, + "networkNumber": 14, + "name": "T3BlbiBCZWQgMg==", + "roomId": 4896, + "order": 14, + "colorId": 0, + "iconId": 26, + "name_unicode": "Open Bed 2" + }, + { + "id": 46225, + "networkNumber": 15, + "name": "Q2xvc2UgQmVkIDM=", + "roomId": 4896, + "order": 15, + "colorId": 0, + "iconId": 26, + "name_unicode": "Close Bed 3" + }, + { + "id": 51159, + "networkNumber": 16, + "name": "Q2xvc2UgS2l0Y2hlbg==", + "roomId": 4896, + "order": 16, + "colorId": 0, + "iconId": 26, + "name_unicode": "Close Kitchen" + }, + { + "id": 959, + "networkNumber": 17, + "name": "T3BlbiBTdHVkeQ==", + "roomId": 4896, + "order": 17, + "colorId": 0, + "iconId": 26, + "name_unicode": "Open Study" + } + ] +} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json new file mode 100644 index 00000000000..6e43c1d788d --- /dev/null +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json @@ -0,0 +1,53 @@ +{ + "shadeIds": [36492, 65111, 7003, 53627], + "shadeData": [ + { + "id": 36492, + "name": "S2l0Y2hlbiBOb3J0aA==", + "roomId": 4896, + "groupId": 35661, + "order": 0, + "type": 40, + "batteryStrength": 116, + "batteryStatus": 3, + "positions": { "position1": 65535, "posKind1": 1 }, + "name_unicode": "Kitchen North" + }, + { + "id": 65111, + "name": "S2l0Y2hlbiBXZXN0", + "roomId": 4896, + "groupId": 35661, + "order": 1, + "type": 40, + "batteryStrength": 124, + "batteryStatus": 3, + "positions": { "position1": 65535, "posKind1": 3 }, + "name_unicode": "Kitchen West" + }, + { + "id": 7003, + "name": "QmF0aCBFYXN0", + "roomId": 4896, + "groupId": 35661, + "order": 2, + "type": 40, + "batteryStrength": 94, + "batteryStatus": 1, + "positions": { "position1": 65535, "posKind1": 1 }, + "name_unicode": "Bath East" + }, + { + "id": 53627, + "name": "QmF0aCBTb3V0aA==", + "roomId": 4896, + "groupId": 35661, + "order": 3, + "type": 40, + "batteryStrength": 127, + "batteryStatus": 3, + "positions": { "position1": 65535, "posKind1": 3 }, + "name_unicode": "Bath South" + } + ] +} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json index 132e2721b05..90b64ee4686 100644 --- a/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json @@ -1,34 +1,34 @@ { "userData": { - "enableScheduledEvents": true, - "staticIp": false, - "sceneControllerCount": 0, - "accessPointCount": 0, - "shadeCount": 5, - "ip": "192.168.0.20", - "groupCount": 9, - "scheduledEventCount": 0, - "editingEnabled": true, - "roomCount": 5, - "setupCompleted": false, - "sceneCount": 18, - "sceneControllerMemberCount": 0, - "mask": "255.255.255.0", - "hubName": "UG93ZXJ2aWV3IEdlbmVyYXRpb24gMQ==", - "rfID": "0x8B2A", - "remoteConnectEnabled": false, - "multiSceneMemberCount": 0, - "rfStatus": 0, "serialNumber": "A1B2C3D4E5G6H7", - "undefinedShadeCount": 0, - "sceneMemberCount": 18, - "unassignedShadeCount": 0, - "multiSceneCount": 0, - "addressKind": "newPrimary", - "gateway": "192.168.0.1", - "localTimeDataSet": true, - "dns": "192.168.0.1", + "rfID": "0x8B2A", + "rfIDInt": 35626, + "rfStatus": 0, + "hubName": "UG93ZXJ2aWV3IEdlbmVyYXRpb24gMQ==", "macAddress": "AA:BB:CC:DD:EE:FF", - "rfIDInt": 35626 + "roomCount": 1, + "shadeCount": 4, + "groupCount": 5, + "sceneCount": 9, + "sceneMemberCount": 24, + "multiSceneCount": 0, + "multiSceneMemberCount": 0, + "scheduledEventCount": 4, + "sceneControllerCount": 0, + "sceneControllerMemberCount": 0, + "accessPointCount": 0, + "localTimeDataSet": true, + "enableScheduledEvents": true, + "remoteConnectEnabled": true, + "editingEnabled": true, + "setupCompleted": false, + "gateway": "192.168.0.1", + "mask": "255.255.255.0", + "ip": "192.168.0.20", + "dns": "192.168.0.1", + "staticIp": false, + "addressKind": "newPrimary", + "unassignedShadeCount": 0, + "undefinedShadeCount": 0 } } diff --git a/tests/components/hunterdouglas_powerview/test_config_flow.py b/tests/components/hunterdouglas_powerview/test_config_flow.py index b9721f4adb1..42589bb10e0 100644 --- a/tests/components/hunterdouglas_powerview/test_config_flow.py +++ b/tests/components/hunterdouglas_powerview/test_config_flow.py @@ -10,8 +10,9 @@ from homeassistant.components.hunterdouglas_powerview.const import DOMAIN from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +import homeassistant.helpers.entity_registry as er -from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA +from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA, MOCK_SERIAL from tests.common import MockConfigEntry, load_json_object_fixture @@ -40,7 +41,7 @@ async def test_user_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == "A1B2C3D4E5G6H7" + assert result2["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -75,7 +76,7 @@ async def test_form_homekit_and_dhcp_cannot_connect( ignored_config_entry.add_to_hass(hass) with patch( - "homeassistant.components.hunterdouglas_powerview.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", side_effect=TimeoutError, ): result = await hass.config_entries.flow.async_init( @@ -100,7 +101,7 @@ async def test_form_homekit_and_dhcp_cannot_connect( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -142,7 +143,7 @@ async def test_form_homekit_and_dhcp( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == "A1B2C3D4E5G6H7" + assert result2["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -205,7 +206,7 @@ async def test_form_cannot_connect( # Simulate a timeout error with patch( - "homeassistant.components.hunterdouglas_powerview.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", side_effect=TimeoutError, ): result2 = await hass.config_entries.flow.async_configure( @@ -225,7 +226,7 @@ async def test_form_cannot_connect( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -244,11 +245,11 @@ async def test_form_no_data( with ( patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", return_value={}, ), patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_home_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_home_data", return_value={}, ), ): @@ -269,7 +270,7 @@ async def test_form_no_data( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -288,7 +289,7 @@ async def test_form_unknown_exception( # Simulate a transient error with patch( - "homeassistant.components.hunterdouglas_powerview.config_flow.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", side_effect=SyntaxError, ): result2 = await hass.config_entries.flow.async_configure( @@ -308,7 +309,7 @@ async def test_form_unknown_exception( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == "A1B2C3D4E5G6H7" + assert result2["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -327,7 +328,7 @@ async def test_form_unsupported_device( # Simulate a gen 3 secondary hub with patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", return_value=load_json_object_fixture("gen3/gateway/secondary.json", DOMAIN), ): result2 = await hass.config_entries.flow.async_configure( @@ -347,6 +348,57 @@ async def test_form_unsupported_device( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_hunterdouglas_hub") +@pytest.mark.parametrize("api_version", [1, 2, 3]) +async def test_migrate_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + api_version: int, +) -> None: + """Test migrate to newest version.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={"host": "1.2.3.4"}, + unique_id=MOCK_SERIAL, + version=1, + minor_version=1, + ) + + # Add entries with int unique_id + entity_registry.async_get_or_create( + domain="cover", + platform="hunterdouglas_powerview", + unique_id=123, + config_entry=entry, + ) + # Add entries with a str unique_id not starting with entry.unique_id + entity_registry.async_get_or_create( + domain="cover", + platform="hunterdouglas_powerview", + unique_id="old_unique_id", + config_entry=entry, + ) + + assert entry.version == 1 + assert entry.minor_version == 1 + + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.version == 1 + assert entry.minor_version == 2 + + # Reload the registry entries + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + + # Ensure the IDs have been migrated + for reg_entry in registry_entries: + assert reg_entry.unique_id.startswith(f"{entry.unique_id}_") diff --git a/tests/components/husqvarna_automower/conftest.py b/tests/components/husqvarna_automower/conftest.py index dbb8f3b4c72..0202cec05b9 100644 --- a/tests/components/husqvarna_automower/conftest.py +++ b/tests/components/husqvarna_automower/conftest.py @@ -1,9 +1,11 @@ """Test helpers for Husqvarna Automower.""" +import asyncio from collections.abc import Generator import time from unittest.mock import AsyncMock, patch +from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession, _MowerCommands from aioautomower.utils import mower_list_to_dictionary_dataclass from aiohttp import ClientWebSocketResponse @@ -16,6 +18,7 @@ from homeassistant.components.application_credentials import ( from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from .const import CLIENT_ID, CLIENT_SECRET, USER_ID @@ -40,6 +43,21 @@ def mock_scope() -> str: return "iam:read amc:api" +@pytest.fixture(name="mower_time_zone") +async def mock_time_zone(hass: HomeAssistant) -> dict[str, MowerAttributes]: + """Fixture to set correct scope for the token.""" + return await dt_util.async_get_time_zone("Europe/Berlin") + + +@pytest.fixture(name="values") +def mock_values(mower_time_zone) -> dict[str, MowerAttributes]: + """Fixture to set correct scope for the token.""" + return mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN), + mower_time_zone, + ) + + @pytest.fixture def mock_config_entry(jwt: str, expires_at: int, scope: str) -> MockConfigEntry: """Return the default mocked config entry.""" @@ -81,17 +99,20 @@ async def setup_credentials(hass: HomeAssistant) -> None: @pytest.fixture -def mock_automower_client() -> Generator[AsyncMock]: +def mock_automower_client(values) -> Generator[AsyncMock]: """Mock a Husqvarna Automower client.""" - mower_dict = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) + async def listen() -> None: + """Mock listen.""" + listen_block = asyncio.Event() + await listen_block.wait() + pytest.fail("Listen was not cancelled!") mock = AsyncMock(spec=AutomowerSession) mock.auth = AsyncMock(side_effect=ClientWebSocketResponse) mock.commands = AsyncMock(spec_set=_MowerCommands) - mock.get_status.return_value = mower_dict + mock.get_status.return_value = values + mock.start_listening = AsyncMock(side_effect=listen) with patch( "homeassistant.components.husqvarna_automower.AutomowerSession", diff --git a/tests/components/husqvarna_automower/fixtures/empty.json b/tests/components/husqvarna_automower/fixtures/empty.json new file mode 100644 index 00000000000..22f4a272fc1 --- /dev/null +++ b/tests/components/husqvarna_automower/fixtures/empty.json @@ -0,0 +1 @@ +{ "data": [] } diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index aa8ea2cbef4..8ab2f96e42f 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -6,7 +6,7 @@ "attributes": { "system": { "name": "Test Mower 1", - "model": "450XH-TEST", + "model": "HUSQVARNA AUTOMOWER® 450XH", "serialNumber": 123 }, "battery": { @@ -40,7 +40,8 @@ "thursday": false, "friday": true, "saturday": false, - "sunday": false + "sunday": false, + "workAreaId": 123456 }, { "start": 0, @@ -51,7 +52,32 @@ "thursday": true, "friday": false, "saturday": true, - "sunday": false + "sunday": false, + "workAreaId": 123456 + }, + { + "start": 0, + "duration": 480, + "monday": false, + "tuesday": true, + "wednesday": false, + "thursday": true, + "friday": false, + "saturday": true, + "sunday": false, + "workAreaId": 654321 + }, + { + "start": 60, + "duration": 480, + "monday": true, + "tuesday": true, + "wednesday": false, + "thursday": true, + "friday": false, + "saturday": true, + "sunday": false, + "workAreaId": 654321 } ] }, @@ -64,23 +90,30 @@ }, "metadata": { "connected": true, - "statusTimestamp": 1697669932683 + "statusTimestamp": 1685923200000 }, "workAreas": [ { "workAreaId": 123456, "name": "Front lawn", - "cuttingHeight": 50 + "cuttingHeight": 50, + "enabled": true, + "progress": 40, + "lastTimeCompleted": 1723449269 }, { "workAreaId": 654321, "name": "Back lawn", - "cuttingHeight": 25 + "cuttingHeight": 25, + "enabled": true }, { "workAreaId": 0, "name": "", - "cuttingHeight": 50 + "cuttingHeight": 50, + "enabled": false, + "progress": 20, + "lastTimeCompleted": 1723439269 } ], "positions": [ @@ -173,6 +206,69 @@ } } } + }, + { + "type": "mower", + "id": "1234", + "attributes": { + "system": { + "name": "Test Mower 2", + "model": "HUSQVARNA AUTOMOWER® Aspire R4", + "serialNumber": 123 + }, + "battery": { + "batteryPercent": 50 + }, + "capabilities": { + "canConfirmError": false, + "headlights": false, + "position": false, + "stayOutZones": false, + "workAreas": false + }, + "mower": { + "mode": "MAIN_AREA", + "activity": "PARKED_IN_CS", + "inactiveReason": "NONE", + "state": "RESTRICTED", + "errorCode": 0, + "errorCodeTimestamp": 0 + }, + "calendar": { + "tasks": [ + { + "start": 120, + "duration": 49, + "monday": true, + "tuesday": false, + "wednesday": false, + "thursday": false, + "friday": false, + "saturday": false, + "sunday": false + } + ] + }, + "planner": { + "nextStartTimestamp": 1685991600000, + "override": { + "action": "NOT_ACTIVE" + }, + "restrictedReason": "WEEK_SCHEDULE" + }, + "metadata": { + "connected": true, + "statusTimestamp": 1697669932683 + }, + "positions": [], + "settings": { + "cuttingHeight": null, + "headlight": { + "mode": null + } + }, + "statistics": {} + } } ] } diff --git a/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr index aaa9c59679f..16d9452e847 100644 --- a/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr @@ -138,3 +138,142 @@ 'state': 'off', }) # --- +# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_mower_2_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Test Mower 2 Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.test_mower_2_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_leaving_dock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_mower_2_leaving_dock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Leaving dock', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'leaving_dock', + 'unique_id': '1234_leaving_dock', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_leaving_dock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 2 Leaving dock', + }), + 'context': , + 'entity_id': 'binary_sensor.test_mower_2_leaving_dock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_returning_to_dock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_mower_2_returning_to_dock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Returning to dock', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'returning_to_dock', + 'unique_id': '1234_returning_to_dock', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_returning_to_dock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 2 Returning to dock', + }), + 'context': , + 'entity_id': 'binary_sensor.test_mower_2_returning_to_dock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_button.ambr b/tests/components/husqvarna_automower/snapshots/test_button.ambr index ab2cb427f1a..2ce3aae3065 100644 --- a/tests/components/husqvarna_automower/snapshots/test_button.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_button.ambr @@ -45,3 +45,95 @@ 'state': 'unavailable', }) # --- +# name: test_button_snapshot[button.test_mower_1_sync_clock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_mower_1_sync_clock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sync clock', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sync_clock', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_sync_clock', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_snapshot[button.test_mower_1_sync_clock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 Sync clock', + }), + 'context': , + 'entity_id': 'button.test_mower_1_sync_clock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_snapshot[button.test_mower_2_sync_clock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_mower_2_sync_clock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sync clock', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sync_clock', + 'unique_id': '1234_sync_clock', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_snapshot[button.test_mower_2_sync_clock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 2 Sync clock', + }), + 'context': , + 'entity_id': 'button.test_mower_2_sync_clock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_calendar.ambr b/tests/components/husqvarna_automower/snapshots/test_calendar.ambr new file mode 100644 index 00000000000..7cd8c68b624 --- /dev/null +++ b/tests/components/husqvarna_automower/snapshots/test_calendar.ambr @@ -0,0 +1,88 @@ +# serializer version: 1 +# name: test_calendar_snapshot[start_date0-end_date0] + dict({ + 'calendar.test_mower_1': dict({ + 'events': list([ + dict({ + 'end': '2023-06-05T09:00:00+02:00', + 'start': '2023-06-05T01:00:00+02:00', + 'summary': 'Back lawn schedule 2', + }), + dict({ + 'end': '2023-06-06T00:00:00+02:00', + 'start': '2023-06-05T19:00:00+02:00', + 'summary': 'Front lawn schedule 1', + }), + dict({ + 'end': '2023-06-06T08:00:00+02:00', + 'start': '2023-06-06T00:00:00+02:00', + 'summary': 'Back lawn schedule 1', + }), + dict({ + 'end': '2023-06-06T08:00:00+02:00', + 'start': '2023-06-06T00:00:00+02:00', + 'summary': 'Front lawn schedule 2', + }), + dict({ + 'end': '2023-06-06T09:00:00+02:00', + 'start': '2023-06-06T01:00:00+02:00', + 'summary': 'Back lawn schedule 2', + }), + dict({ + 'end': '2023-06-08T00:00:00+02:00', + 'start': '2023-06-07T19:00:00+02:00', + 'summary': 'Front lawn schedule 1', + }), + dict({ + 'end': '2023-06-08T08:00:00+02:00', + 'start': '2023-06-08T00:00:00+02:00', + 'summary': 'Back lawn schedule 1', + }), + dict({ + 'end': '2023-06-08T08:00:00+02:00', + 'start': '2023-06-08T00:00:00+02:00', + 'summary': 'Front lawn schedule 2', + }), + dict({ + 'end': '2023-06-08T09:00:00+02:00', + 'start': '2023-06-08T01:00:00+02:00', + 'summary': 'Back lawn schedule 2', + }), + dict({ + 'end': '2023-06-10T00:00:00+02:00', + 'start': '2023-06-09T19:00:00+02:00', + 'summary': 'Front lawn schedule 1', + }), + dict({ + 'end': '2023-06-10T08:00:00+02:00', + 'start': '2023-06-10T00:00:00+02:00', + 'summary': 'Back lawn schedule 1', + }), + dict({ + 'end': '2023-06-10T08:00:00+02:00', + 'start': '2023-06-10T00:00:00+02:00', + 'summary': 'Front lawn schedule 2', + }), + dict({ + 'end': '2023-06-10T09:00:00+02:00', + 'start': '2023-06-10T01:00:00+02:00', + 'summary': 'Back lawn schedule 2', + }), + dict({ + 'end': '2023-06-12T09:00:00+02:00', + 'start': '2023-06-12T01:00:00+02:00', + 'summary': 'Back lawn schedule 2', + }), + ]), + }), + 'calendar.test_mower_2': dict({ + 'events': list([ + dict({ + 'end': '2023-06-05T02:49:00+02:00', + 'start': '2023-06-05T02:00:00+02:00', + 'summary': 'Schedule 1', + }), + ]), + }), + }) +# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 3838f2eb960..2dab82451a6 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -5,26 +5,6 @@ 'battery_percent': 100, }), 'calendar': dict({ - 'events': list([ - dict({ - 'end': '2024-03-02T00:00:00', - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', - 'schedule_no': 1, - 'start': '2024-03-01T19:00:00', - 'uid': '1140_300_MO,WE,FR', - 'work_area_id': None, - 'work_area_name': None, - }), - dict({ - 'end': '2024-03-02T08:00:00', - 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', - 'schedule_no': 2, - 'start': '2024-03-02T00:00:00', - 'uid': '0_480_TU,TH,SA', - 'work_area_id': None, - 'work_area_name': None, - }), - ]), 'tasks': list([ dict({ 'duration': 300, @@ -36,8 +16,7 @@ 'thursday': False, 'tuesday': False, 'wednesday': True, - 'work_area_id': None, - 'work_area_name': None, + 'workAreaId': 123456, }), dict({ 'duration': 480, @@ -49,8 +28,31 @@ 'thursday': True, 'tuesday': True, 'wednesday': False, - 'work_area_id': None, - 'work_area_name': None, + 'workAreaId': 123456, + }), + dict({ + 'duration': 480, + 'friday': False, + 'monday': False, + 'saturday': True, + 'start': 0, + 'sunday': False, + 'thursday': True, + 'tuesday': True, + 'wednesday': False, + 'workAreaId': 654321, + }), + dict({ + 'duration': 480, + 'friday': False, + 'monday': True, + 'saturday': True, + 'start': 60, + 'sunday': False, + 'thursday': True, + 'tuesday': True, + 'wednesday': False, + 'workAreaId': 654321, }), ]), }), @@ -63,34 +65,32 @@ }), 'metadata': dict({ 'connected': True, - 'status_dateteime': '2023-10-18T22:58:52.683000+00:00', + 'status_dateteime': '2023-06-05T00:00:00+00:00', }), 'mower': dict({ - 'activity': 'PARKED_IN_CS', + 'activity': 'parked_in_cs', 'error_code': 0, - 'error_datetime_naive': None, + 'error_datetime': None, 'error_key': None, - 'error_timestamp': 0, - 'inactive_reason': 'NONE', + 'inactive_reason': 'none', 'is_error_confirmable': False, - 'mode': 'MAIN_AREA', - 'state': 'RESTRICTED', + 'mode': 'main_area', + 'state': 'restricted', 'work_area_id': 123456, 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start': 1685991600000, - 'next_start_datetime_naive': '2023-06-05T19:00:00', + 'next_start_datetime': '2023-06-05T19:00:00+02:00', 'override': dict({ - 'action': 'NOT_ACTIVE', + 'action': 'not_active', }), - 'restricted_reason': 'WEEK_SCHEDULE', + 'restricted_reason': 'week_schedule', }), 'positions': '**REDACTED**', 'settings': dict({ 'cutting_height': 4, 'headlight': dict({ - 'mode': 'EVENING_ONLY', + 'mode': 'evening_only', }), }), 'statistics': dict({ @@ -117,9 +117,9 @@ }), }), 'system': dict({ - 'model': '450XH-TEST', + 'model': 'HUSQVARNA AUTOMOWER® 450XH', 'name': 'Test Mower 1', - 'serial_number': 123, + 'serial_number': '123', }), 'work_area_dict': dict({ '0': 'my_lawn', @@ -135,15 +135,24 @@ 'work_areas': dict({ '0': dict({ 'cutting_height': 50, + 'enabled': False, + 'last_time_completed': '2024-08-12T05:07:49+02:00', 'name': 'my_lawn', + 'progress': 20, }), '123456': dict({ 'cutting_height': 50, + 'enabled': True, + 'last_time_completed': '2024-08-12T07:54:29+02:00', 'name': 'Front lawn', + 'progress': 40, }), '654321': dict({ 'cutting_height': 25, + 'enabled': True, + 'last_time_completed': None, 'name': 'Back lawn', + 'progress': None, }), }), }) @@ -154,7 +163,7 @@ 'auth_implementation': 'husqvarna_automower', 'token': dict({ 'access_token': '**REDACTED**', - 'expires_at': 1709208000.0, + 'expires_at': 1685919600.0, 'expires_in': 86399, 'provider': 'husqvarna', 'refresh_token': '**REDACTED**', @@ -164,6 +173,8 @@ }), }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'husqvarna_automower', 'entry_id': 'automower_test', 'minor_version': 1, @@ -172,6 +183,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Husqvarna Automower of Erika Mustermann', 'unique_id': '123', 'version': 1, diff --git a/tests/components/husqvarna_automower/snapshots/test_init.ambr b/tests/components/husqvarna_automower/snapshots/test_init.ambr index ccfb1bf3df4..036783dd6d0 100644 --- a/tests/components/husqvarna_automower/snapshots/test_init.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_init.ambr @@ -20,12 +20,12 @@ 'labels': set({ }), 'manufacturer': 'Husqvarna', - 'model': '450XH-TEST', + 'model': 'AUTOMOWER® 450XH', 'model_id': None, 'name': 'Test Mower 1', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 123, + 'serial_number': '123', 'suggested_area': 'Garden', 'sw_version': None, 'via_device_id': None, diff --git a/tests/components/husqvarna_automower/snapshots/test_number.ambr b/tests/components/husqvarna_automower/snapshots/test_number.ambr index de8b397f01c..b0ccce5800a 100644 --- a/tests/components/husqvarna_automower/snapshots/test_number.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_number.ambr @@ -32,7 +32,7 @@ 'platform': 'husqvarna_automower', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'work_area_cutting_height', + 'translation_key': 'work_area_cutting_height_work_area', 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_654321_cutting_height_work_area', 'unit_of_measurement': '%', }) @@ -143,7 +143,7 @@ 'platform': 'husqvarna_automower', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'work_area_cutting_height', + 'translation_key': 'work_area_cutting_height_work_area', 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_cutting_height_work_area', 'unit_of_measurement': '%', }) @@ -195,11 +195,11 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'My lawn cutting height ', + 'original_name': 'My lawn cutting height', 'platform': 'husqvarna_automower', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'my_lawn_cutting_height', + 'translation_key': 'my_lawn_cutting_height_work_area', 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_cutting_height_work_area', 'unit_of_measurement': '%', }) @@ -207,7 +207,7 @@ # name: test_number_snapshot[number.test_mower_1_my_lawn_cutting_height-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 My lawn cutting height ', + 'friendly_name': 'Test Mower 1 My lawn cutting height', 'max': 100.0, 'min': 0.0, 'mode': , diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index c260e6beba6..d57a829a997 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -162,6 +162,9 @@ 'docking_sensor_defect', 'electronic_problem', 'empty_battery', + 'error', + 'error_at_power_up', + 'fatal_error', 'folding_cutting_deck_sensor_defect', 'folding_sensor_activated', 'geofence_problem', @@ -340,6 +343,9 @@ 'docking_sensor_defect', 'electronic_problem', 'empty_battery', + 'error', + 'error_at_power_up', + 'fatal_error', 'folding_cutting_deck_sensor_defect', 'folding_sensor_activated', 'geofence_problem', @@ -442,6 +448,103 @@ 'state': 'no_error', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_last_time_completed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_front_lawn_last_time_completed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front lawn last time completed', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'work_area_last_time_completed', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_last_time_completed', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_last_time_completed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Mower 1 Front lawn last time completed', + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_front_lawn_last_time_completed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-08-12T05:54:29+00:00', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_progress-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_front_lawn_progress', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Front lawn progress', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'work_area_progress', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_progress', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_progress-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 Front lawn progress', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_front_lawn_progress', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -449,11 +552,11 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'main_area', - 'demo', - 'secondary_area', - 'home', - 'unknown', + , + , + , + , + , ]), }), 'config_entry_id': , @@ -489,11 +592,11 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 1 Mode', 'options': list([ - 'main_area', - 'demo', - 'secondary_area', - 'home', - 'unknown', + , + , + , + , + , ]), }), 'context': , @@ -504,6 +607,103 @@ 'state': 'main_area', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_last_time_completed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_my_lawn_last_time_completed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'My lawn last time completed', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'my_lawn_last_time_completed', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_last_time_completed', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_last_time_completed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Mower 1 My lawn last time completed', + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_my_lawn_last_time_completed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-08-12T03:07:49+00:00', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_progress-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_my_lawn_progress', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'My lawn progress', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'my_lawn_progress', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_progress', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_progress-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 My lawn progress', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_my_lawn_progress', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_next_start-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -656,16 +856,16 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'all_work_areas_completed', - 'daily_limit', - 'external', - 'fota', - 'frost', - 'none', - 'not_applicable', - 'park_override', - 'sensor', - 'week_schedule', + , + , + , + , + , + , + , + , + , + , ]), }), 'config_entry_id': , @@ -701,16 +901,16 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 1 Restricted reason', 'options': list([ - 'all_work_areas_completed', - 'daily_limit', - 'external', - 'fota', - 'frost', - 'none', - 'not_applicable', - 'park_override', - 'sensor', - 'week_schedule', + , + , + , + , + , + , + , + , + , + , ]), }), 'context': , @@ -1056,3 +1256,579 @@ 'state': 'Front lawn', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_2_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_2_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Mower 2 Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_mower_2_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'alarm_mower_in_motion', + 'alarm_mower_lifted', + 'alarm_mower_stopped', + 'alarm_mower_switched_off', + 'alarm_mower_tilted', + 'alarm_outside_geofence', + 'angular_sensor_problem', + 'battery_problem', + 'battery_problem', + 'battery_restriction_due_to_ambient_temperature', + 'can_error', + 'charging_current_too_high', + 'charging_station_blocked', + 'charging_system_problem', + 'charging_system_problem', + 'collision_sensor_defect', + 'collision_sensor_error', + 'collision_sensor_problem_front', + 'collision_sensor_problem_rear', + 'com_board_not_available', + 'communication_circuit_board_sw_must_be_updated', + 'complex_working_area', + 'connection_changed', + 'connection_not_changed', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_settings_restored', + 'cutting_drive_motor_1_defect', + 'cutting_drive_motor_2_defect', + 'cutting_drive_motor_3_defect', + 'cutting_height_blocked', + 'cutting_height_problem', + 'cutting_height_problem_curr', + 'cutting_height_problem_dir', + 'cutting_height_problem_drive', + 'cutting_motor_problem', + 'cutting_stopped_slope_too_steep', + 'cutting_system_blocked', + 'cutting_system_blocked', + 'cutting_system_imbalance_warning', + 'cutting_system_major_imbalance', + 'destination_not_reachable', + 'difficult_finding_home', + 'docking_sensor_defect', + 'electronic_problem', + 'empty_battery', + 'error', + 'error_at_power_up', + 'fatal_error', + 'folding_cutting_deck_sensor_defect', + 'folding_sensor_activated', + 'geofence_problem', + 'geofence_problem', + 'gps_navigation_problem', + 'guide_1_not_found', + 'guide_2_not_found', + 'guide_3_not_found', + 'guide_calibration_accomplished', + 'guide_calibration_failed', + 'high_charging_power_loss', + 'high_internal_power_loss', + 'high_internal_temperature', + 'internal_voltage_error', + 'invalid_battery_combination_invalid_combination_of_different_battery_types', + 'invalid_sub_device_combination', + 'invalid_system_configuration', + 'left_brush_motor_overloaded', + 'lift_sensor_defect', + 'lifted', + 'limited_cutting_height_range', + 'limited_cutting_height_range', + 'loop_sensor_defect', + 'loop_sensor_problem_front', + 'loop_sensor_problem_left', + 'loop_sensor_problem_rear', + 'loop_sensor_problem_right', + 'low_battery', + 'memory_circuit_problem', + 'mower_lifted', + 'mower_tilted', + 'no_accurate_position_from_satellites', + 'no_confirmed_position', + 'no_drive', + 'no_loop_signal', + 'no_power_in_charging_station', + 'no_response_from_charger', + 'outside_working_area', + 'poor_signal_quality', + 'reference_station_communication_problem', + 'right_brush_motor_overloaded', + 'safety_function_faulty', + 'settings_restored', + 'sim_card_locked', + 'sim_card_locked', + 'sim_card_locked', + 'sim_card_locked', + 'sim_card_not_found', + 'sim_card_requires_pin', + 'slipped_mower_has_slipped_situation_not_solved_with_moving_pattern', + 'slope_too_steep', + 'sms_could_not_be_sent', + 'stop_button_problem', + 'stuck_in_charging_station', + 'switch_cord_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'tilt_sensor_problem', + 'too_high_discharge_current', + 'too_high_internal_current', + 'trapped', + 'ultrasonic_problem', + 'ultrasonic_sensor_1_defect', + 'ultrasonic_sensor_2_defect', + 'ultrasonic_sensor_3_defect', + 'ultrasonic_sensor_4_defect', + 'unexpected_cutting_height_adj', + 'unexpected_error', + 'upside_down', + 'weak_gps_signal', + 'wheel_drive_problem_left', + 'wheel_drive_problem_rear_left', + 'wheel_drive_problem_rear_right', + 'wheel_drive_problem_right', + 'wheel_motor_blocked_left', + 'wheel_motor_blocked_rear_left', + 'wheel_motor_blocked_rear_right', + 'wheel_motor_blocked_right', + 'wheel_motor_overloaded_left', + 'wheel_motor_overloaded_rear_left', + 'wheel_motor_overloaded_rear_right', + 'wheel_motor_overloaded_right', + 'work_area_not_valid', + 'wrong_loop_signal', + 'wrong_pin_code', + 'zone_generator_problem', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_2_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error', + 'unique_id': '1234_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Mower 2 Error', + 'options': list([ + 'no_error', + 'alarm_mower_in_motion', + 'alarm_mower_lifted', + 'alarm_mower_stopped', + 'alarm_mower_switched_off', + 'alarm_mower_tilted', + 'alarm_outside_geofence', + 'angular_sensor_problem', + 'battery_problem', + 'battery_problem', + 'battery_restriction_due_to_ambient_temperature', + 'can_error', + 'charging_current_too_high', + 'charging_station_blocked', + 'charging_system_problem', + 'charging_system_problem', + 'collision_sensor_defect', + 'collision_sensor_error', + 'collision_sensor_problem_front', + 'collision_sensor_problem_rear', + 'com_board_not_available', + 'communication_circuit_board_sw_must_be_updated', + 'complex_working_area', + 'connection_changed', + 'connection_not_changed', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_problem', + 'connectivity_settings_restored', + 'cutting_drive_motor_1_defect', + 'cutting_drive_motor_2_defect', + 'cutting_drive_motor_3_defect', + 'cutting_height_blocked', + 'cutting_height_problem', + 'cutting_height_problem_curr', + 'cutting_height_problem_dir', + 'cutting_height_problem_drive', + 'cutting_motor_problem', + 'cutting_stopped_slope_too_steep', + 'cutting_system_blocked', + 'cutting_system_blocked', + 'cutting_system_imbalance_warning', + 'cutting_system_major_imbalance', + 'destination_not_reachable', + 'difficult_finding_home', + 'docking_sensor_defect', + 'electronic_problem', + 'empty_battery', + 'error', + 'error_at_power_up', + 'fatal_error', + 'folding_cutting_deck_sensor_defect', + 'folding_sensor_activated', + 'geofence_problem', + 'geofence_problem', + 'gps_navigation_problem', + 'guide_1_not_found', + 'guide_2_not_found', + 'guide_3_not_found', + 'guide_calibration_accomplished', + 'guide_calibration_failed', + 'high_charging_power_loss', + 'high_internal_power_loss', + 'high_internal_temperature', + 'internal_voltage_error', + 'invalid_battery_combination_invalid_combination_of_different_battery_types', + 'invalid_sub_device_combination', + 'invalid_system_configuration', + 'left_brush_motor_overloaded', + 'lift_sensor_defect', + 'lifted', + 'limited_cutting_height_range', + 'limited_cutting_height_range', + 'loop_sensor_defect', + 'loop_sensor_problem_front', + 'loop_sensor_problem_left', + 'loop_sensor_problem_rear', + 'loop_sensor_problem_right', + 'low_battery', + 'memory_circuit_problem', + 'mower_lifted', + 'mower_tilted', + 'no_accurate_position_from_satellites', + 'no_confirmed_position', + 'no_drive', + 'no_loop_signal', + 'no_power_in_charging_station', + 'no_response_from_charger', + 'outside_working_area', + 'poor_signal_quality', + 'reference_station_communication_problem', + 'right_brush_motor_overloaded', + 'safety_function_faulty', + 'settings_restored', + 'sim_card_locked', + 'sim_card_locked', + 'sim_card_locked', + 'sim_card_locked', + 'sim_card_not_found', + 'sim_card_requires_pin', + 'slipped_mower_has_slipped_situation_not_solved_with_moving_pattern', + 'slope_too_steep', + 'sms_could_not_be_sent', + 'stop_button_problem', + 'stuck_in_charging_station', + 'switch_cord_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'temporary_battery_problem', + 'tilt_sensor_problem', + 'too_high_discharge_current', + 'too_high_internal_current', + 'trapped', + 'ultrasonic_problem', + 'ultrasonic_sensor_1_defect', + 'ultrasonic_sensor_2_defect', + 'ultrasonic_sensor_3_defect', + 'ultrasonic_sensor_4_defect', + 'unexpected_cutting_height_adj', + 'unexpected_error', + 'upside_down', + 'weak_gps_signal', + 'wheel_drive_problem_left', + 'wheel_drive_problem_rear_left', + 'wheel_drive_problem_rear_right', + 'wheel_drive_problem_right', + 'wheel_motor_blocked_left', + 'wheel_motor_blocked_rear_left', + 'wheel_motor_blocked_rear_right', + 'wheel_motor_blocked_right', + 'wheel_motor_overloaded_left', + 'wheel_motor_overloaded_rear_left', + 'wheel_motor_overloaded_rear_right', + 'wheel_motor_overloaded_right', + 'work_area_not_valid', + 'wrong_loop_signal', + 'wrong_pin_code', + 'zone_generator_problem', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_mower_2_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_2_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '1234_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Mower 2 Mode', + 'options': list([ + , + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.test_mower_2_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'main_area', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_next_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_2_next_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Next start', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_start_timestamp', + 'unique_id': '1234_next_start_timestamp', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_next_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Mower 2 Next start', + }), + 'context': , + 'entity_id': 'sensor.test_mower_2_next_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-06-05T17:00:00+00:00', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_restricted_reason-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + , + , + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_2_restricted_reason', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restricted reason', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restricted_reason', + 'unique_id': '1234_restricted_reason', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_2_restricted_reason-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Mower 2 Restricted reason', + 'options': list([ + , + , + , + , + , + , + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.test_mower_2_restricted_reason', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'week_schedule', + }) +# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_switch.ambr b/tests/components/husqvarna_automower/snapshots/test_switch.ambr index f52462496ff..8f8f6b367c0 100644 --- a/tests/components/husqvarna_automower/snapshots/test_switch.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_switch.ambr @@ -91,6 +91,52 @@ 'state': 'on', }) # --- +# name: test_switch_snapshot[switch.test_mower_1_back_lawn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_mower_1_back_lawn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Back lawn', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'work_area_work_area', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_654321_work_area', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_snapshot[switch.test_mower_1_back_lawn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 Back lawn', + }), + 'context': , + 'entity_id': 'switch.test_mower_1_back_lawn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_switch_snapshot[switch.test_mower_1_enable_schedule-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -137,3 +183,141 @@ 'state': 'on', }) # --- +# name: test_switch_snapshot[switch.test_mower_1_front_lawn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_mower_1_front_lawn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Front lawn', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'work_area_work_area', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_work_area', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_snapshot[switch.test_mower_1_front_lawn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 Front lawn', + }), + 'context': , + 'entity_id': 'switch.test_mower_1_front_lawn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_snapshot[switch.test_mower_1_my_lawn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_mower_1_my_lawn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'My lawn', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'my_lawn_work_area', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_work_area', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_snapshot[switch.test_mower_1_my_lawn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 My lawn', + }), + 'context': , + 'entity_id': 'switch.test_mower_1_my_lawn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_snapshot[switch.test_mower_2_enable_schedule-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_mower_2_enable_schedule', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Enable schedule', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'enable_schedule', + 'unique_id': '1234_enable_schedule', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_snapshot[switch.test_mower_2_enable_schedule-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 2 Enable schedule', + }), + 'context': , + 'entity_id': 'switch.test_mower_2_enable_schedule', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/husqvarna_automower/test_binary_sensor.py b/tests/components/husqvarna_automower/test_binary_sensor.py index fceaeee2321..30c9cc1bdd3 100644 --- a/tests/components/husqvarna_automower/test_binary_sensor.py +++ b/tests/components/husqvarna_automower/test_binary_sensor.py @@ -2,12 +2,11 @@ from unittest.mock import AsyncMock, patch -from aioautomower.model import MowerActivities -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerActivities, MowerAttributes from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -16,24 +15,18 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensor_states( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test binary sensor states.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("binary_sensor.test_mower_1_charging") assert state is not None @@ -59,6 +52,7 @@ async def test_binary_sensor_states( assert state.state == "on" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensor_snapshot( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index 5cbb9b893a8..25fa64b531f 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -4,13 +4,12 @@ import datetime from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.button import SERVICE_PRESS -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, @@ -25,31 +24,24 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC)) async def test_button_states_and_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: - """Test button commands.""" + """Test error confirm button command.""" entity_id = "button.test_mower_1_confirm_error" await setup_integration(hass, mock_config_entry) state = hass.states.get(entity_id) assert state.name == "Test Mower 1 Confirm error" assert state.state == STATE_UNAVAILABLE - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].mower.is_error_confirmable = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) @@ -76,7 +68,7 @@ async def test_button_states_and_commands( mocked_method.assert_called_once_with(TEST_MOWER_ID) await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == "2024-02-29T11:16:00+00:00" + assert state.state == "2023-06-05T00:16:00+00:00" getattr(mock_automower_client.commands, "error_confirm").side_effect = ApiException( "Test error" ) @@ -92,6 +84,46 @@ async def test_button_states_and_commands( ) +@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) +async def test_sync_clock( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], +) -> None: + """Test sync clock button command.""" + entity_id = "button.test_mower_1_sync_clock" + await setup_integration(hass, mock_config_entry) + state = hass.states.get(entity_id) + assert state.name == "Test Mower 1 Sync clock" + + mock_automower_client.get_status.return_value = values + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mocked_method = mock_automower_client.commands.set_datetime + mocked_method.assert_called_once_with(TEST_MOWER_ID) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == "2024-02-29T11:00:00+00:00" + mock_automower_client.commands.set_datetime.side_effect = ApiException("Test error") + with pytest.raises( + HomeAssistantError, + match="Failed to send command: Test error", + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_button_snapshot( hass: HomeAssistant, diff --git a/tests/components/husqvarna_automower/test_calendar.py b/tests/components/husqvarna_automower/test_calendar.py new file mode 100644 index 00000000000..8138b8c139b --- /dev/null +++ b/tests/components/husqvarna_automower/test_calendar.py @@ -0,0 +1,154 @@ +"""Tests for calendar platform.""" + +from collections.abc import Awaitable, Callable +import datetime +from http import HTTPStatus +from typing import Any +from unittest.mock import AsyncMock +import urllib +import zoneinfo + +from aioautomower.utils import mower_list_to_dictionary_dataclass +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.calendar import ( + DOMAIN as CALENDAR_DOMAIN, + EVENT_END_DATETIME, + EVENT_START_DATETIME, + SERVICE_GET_EVENTS, +) +from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, +) +from tests.typing import ClientSessionGenerator + +TEST_ENTITY = "calendar.test_mower_1" +type GetEventsFn = Callable[[str, str], Awaitable[dict[str, Any]]] + + +@pytest.fixture(name="get_events") +def get_events_fixture( + hass_client: ClientSessionGenerator, +) -> GetEventsFn: + """Fetch calendar events from the HTTP API.""" + + async def _fetch(start: str, end: str) -> list[dict[str, Any]]: + client = await hass_client() + response = await client.get( + f"/api/calendars/{TEST_ENTITY}?start={urllib.parse.quote(start)}&end={urllib.parse.quote(end)}" + ) + assert response.status == HTTPStatus.OK + results = await response.json() + return [{k: event[k] for k in ("summary", "start", "end")} for event in results] + + return _fetch + + +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, 12)) +async def test_calendar_state_off( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """State test of the calendar.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("calendar.test_mower_1") + assert state is not None + assert state.state == "off" + + +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, 19)) +async def test_calendar_state_on( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """State test of the calendar.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("calendar.test_mower_1") + assert state is not None + assert state.state == "on" + + +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5)) +async def test_empty_calendar( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + get_events: GetEventsFn, + mower_time_zone: zoneinfo.ZoneInfo, +) -> None: + """State if there is no schedule set.""" + await setup_integration(hass, mock_config_entry) + json_values = load_json_value_fixture("mower.json", DOMAIN) + json_values["data"][0]["attributes"]["calendar"]["tasks"] = [] + values = mower_list_to_dictionary_dataclass( + json_values, + mower_time_zone, + ) + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("calendar.test_mower_1") + assert state is not None + assert state.state == "off" + events = await get_events("2023-06-05T00:00:00", "2023-06-12T00:00:00") + assert events == [] + + +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5)) +@pytest.mark.parametrize( + ( + "start_date", + "end_date", + ), + [ + ( + datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC), + datetime.datetime(2023, 6, 12, tzinfo=datetime.UTC), + ), + ], +) +async def test_calendar_snapshot( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + start_date: datetime, + end_date: datetime, +) -> None: + """Snapshot test of the calendar entity.""" + await setup_integration(hass, mock_config_entry) + events = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: ["calendar.test_mower_1", "calendar.test_mower_2"], + EVENT_START_DATETIME: start_date, + EVENT_END_DATETIME: end_date, + }, + blocking=True, + return_response=True, + ) + + assert events == snapshot diff --git a/tests/components/husqvarna_automower/test_config_flow.py b/tests/components/husqvarna_automower/test_config_flow.py index 31e8a9afcbd..d91078d80a2 100644 --- a/tests/components/husqvarna_automower/test_config_flow.py +++ b/tests/components/husqvarna_automower/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock, patch +from aioautomower.const import API_BASE_URL +from aioautomower.session import AutomowerEndpoint import pytest from homeassistant import config_entries @@ -18,16 +20,18 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import setup_integration from .const import CLIENT_ID, USER_ID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @pytest.mark.parametrize( - ("new_scope", "amount"), + ("new_scope", "fixture", "exception", "amount"), [ - ("iam:read amc:api", 1), - ("iam:read", 0), + ("iam:read amc:api", "mower.json", None, 1), + ("iam:read amc:api", "mower.json", Exception, 0), + ("iam:read", "mower.json", None, 0), + ("iam:read amc:api", "empty.json", None, 0), ], ) @pytest.mark.usefixtures("current_request_with_host") @@ -38,6 +42,8 @@ async def test_full_flow( jwt: str, new_scope: str, amount: int, + fixture: str, + exception: Exception | None, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -76,11 +82,17 @@ async def test_full_flow( "expires_at": 1697753347, }, ) - - with patch( - "homeassistant.components.husqvarna_automower.async_setup_entry", - return_value=True, - ) as mock_setup: + aioclient_mock.get( + f"{API_BASE_URL}/{AutomowerEndpoint.mowers}", + text=load_fixture(fixture, DOMAIN), + exc=exception, + ) + with ( + patch( + "homeassistant.components.husqvarna_automower.async_setup_entry", + return_value=True, + ) as mock_setup, + ): await hass.config_entries.flow.async_configure(result["flow_id"]) assert len(hass.config_entries.async_entries(DOMAIN)) == amount diff --git a/tests/components/husqvarna_automower/test_diagnostics.py b/tests/components/husqvarna_automower/test_diagnostics.py index 3166b09f1ee..2b47bff25a4 100644 --- a/tests/components/husqvarna_automower/test_diagnostics.py +++ b/tests/components/husqvarna_automower/test_diagnostics.py @@ -2,6 +2,7 @@ import datetime from unittest.mock import AsyncMock +import zoneinfo import pytest from syrupy.assertion import SnapshotAssertion @@ -21,7 +22,9 @@ from tests.components.diagnostics import ( from tests.typing import ClientSessionGenerator -@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) +@pytest.mark.freeze_time( + datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) +) async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -40,7 +43,9 @@ async def test_entry_diagnostics( assert result == snapshot(exclude=props("created_at", "modified_at")) -@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) +@pytest.mark.freeze_time( + datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) +) async def test_device_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -49,7 +54,7 @@ async def test_device_diagnostics( mock_config_entry: MockConfigEntry, device_registry: dr.DeviceRegistry, ) -> None: - """Test select platform.""" + """Test device diagnostics platform.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/husqvarna_automower/test_init.py b/tests/components/husqvarna_automower/test_init.py index 84fe1b9e891..ae688571d2c 100644 --- a/tests/components/husqvarna_automower/test_init.py +++ b/tests/components/husqvarna_automower/test_init.py @@ -1,23 +1,28 @@ """Tests for init module.""" -from datetime import timedelta +from asyncio import Event +from datetime import datetime import http import time -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ( ApiException, AuthException, HusqvarnaWSServerHandshakeError, + TimeoutException, ) +from aioautomower.model import MowerAttributes, WorkArea from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.husqvarna_automower.const import DOMAIN, OAUTH2_TOKEN +from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util import dt as dt_util from . import setup_integration from .const import TEST_MOWER_ID @@ -25,6 +30,10 @@ from .const import TEST_MOWER_ID from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker +ADDITIONAL_NUMBER_ENTITIES = 1 +ADDITIONAL_SENSOR_ENTITIES = 2 +ADDITIONAL_SWITCH_ENTITIES = 1 + async def test_load_unload_entry( hass: HomeAssistant, @@ -120,28 +129,77 @@ async def test_update_failed( assert entry.state is entry_state +@patch( + "homeassistant.components.husqvarna_automower.coordinator.DEFAULT_RECONNECT_TIME", 0 +) +@pytest.mark.parametrize( + ("method_path", "exception", "error_msg"), + [ + ( + ["auth", "websocket_connect"], + HusqvarnaWSServerHandshakeError, + "Failed to connect to websocket.", + ), + ( + ["start_listening"], + TimeoutException, + "Failed to listen to websocket.", + ), + ], +) async def test_websocket_not_available( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, + method_path: list[str], + exception: type[Exception], + error_msg: str, ) -> None: - """Test trying reload the websocket.""" - mock_automower_client.start_listening.side_effect = HusqvarnaWSServerHandshakeError( - "Boom" - ) + """Test trying to reload the websocket.""" + calls = [] + mock_called = Event() + mock_stall = Event() + + async def mock_function(): + mock_called.set() + await mock_stall.wait() + # Raise the first time the method is awaited + if not calls: + calls.append(None) + raise exception("Boom") + if mock_side_effect: + await mock_side_effect() + + # Find the method to mock + mock = mock_automower_client + for itm in method_path: + mock = getattr(mock, itm) + mock_side_effect = mock.side_effect + mock.side_effect = mock_function + + # Setup integration and verify log error message await setup_integration(hass, mock_config_entry) - assert "Failed to connect to websocket. Trying to reconnect: Boom" in caplog.text - assert mock_automower_client.auth.websocket_connect.call_count == 1 - assert mock_automower_client.start_listening.call_count == 1 - assert mock_config_entry.state is ConfigEntryState.LOADED - freezer.tick(timedelta(seconds=2)) - async_fire_time_changed(hass) + await mock_called.wait() + mock_called.clear() + # Allow the exception to be raised + mock_stall.set() + assert mock.call_count == 1 await hass.async_block_till_done() - assert mock_automower_client.auth.websocket_connect.call_count == 2 - assert mock_automower_client.start_listening.call_count == 2 - assert mock_config_entry.state is ConfigEntryState.LOADED + assert f"{error_msg} Trying to reconnect: Boom" in caplog.text + + # Simulate a successful connection + caplog.clear() + await mock_called.wait() + mock_called.clear() + await hass.async_block_till_done() + assert mock.call_count == 2 + assert "Trying to reconnect: Boom" not in caplog.text + + # Simulate hass shutting down + await hass.async_stop() + assert mock.call_count == 2 async def test_device_info( @@ -160,3 +218,106 @@ async def test_device_info( identifiers={(DOMAIN, TEST_MOWER_ID)}, ) assert reg_device == snapshot + + +async def test_coordinator_automatic_registry_cleanup( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + values: dict[str, MowerAttributes], +) -> None: + """Test automatic registry cleanup.""" + await setup_integration(hass, mock_config_entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + await hass.async_block_till_done() + + current_entites = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + current_devices = len( + dr.async_entries_for_config_entry(device_registry, entry.entry_id) + ) + + values.pop(TEST_MOWER_ID) + mock_automower_client.get_status.return_value = values + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) + == current_entites - 37 + ) + assert ( + len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) + == current_devices - 1 + ) + + +async def test_add_and_remove_work_area( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + values: dict[str, MowerAttributes], +) -> None: + """Test adding a work area in runtime.""" + await setup_integration(hass, mock_config_entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + current_entites_start = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + values[TEST_MOWER_ID].work_area_names.append("new work area") + values[TEST_MOWER_ID].work_area_dict.update({1: "new work area"}) + values[TEST_MOWER_ID].work_areas.update( + { + 1: WorkArea( + name="new work area", + cutting_height=12, + enabled=True, + progress=12, + last_time_completed=datetime( + 2024, 10, 1, 11, 11, 0, tzinfo=dt_util.get_default_time_zone() + ), + ) + } + ) + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + current_entites_after_addition = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert ( + current_entites_after_addition + == current_entites_start + + ADDITIONAL_NUMBER_ENTITIES + + ADDITIONAL_SENSOR_ENTITIES + + ADDITIONAL_SWITCH_ENTITIES + ) + + values[TEST_MOWER_ID].work_area_names.remove("new work area") + del values[TEST_MOWER_ID].work_area_dict[1] + del values[TEST_MOWER_ID].work_areas[1] + values[TEST_MOWER_ID].work_area_names.remove("Front lawn") + del values[TEST_MOWER_ID].work_area_dict[123456] + del values[TEST_MOWER_ID].work_areas[123456] + del values[TEST_MOWER_ID].calendar.tasks[:2] + values[TEST_MOWER_ID].mower.work_area_id = 654321 + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + current_entites_after_deletion = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert ( + current_entites_after_deletion + == current_entites_start + - ADDITIONAL_SWITCH_ENTITIES + - ADDITIONAL_NUMBER_ENTITIES + - ADDITIONAL_SENSOR_ENTITIES + ) diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 2ae427e0e1e..3aca509e865 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -4,7 +4,7 @@ from datetime import timedelta from unittest.mock import AsyncMock from aioautomower.exceptions import ApiException -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerActivities, MowerAttributes, MowerStates from freezegun.api import FrozenDateTimeFactory import pytest from voluptuous.error import MultipleInvalid @@ -18,11 +18,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, -) +from tests.common import MockConfigEntry, async_fire_time_changed async def test_lawn_mower_states( @@ -30,20 +26,23 @@ async def test_lawn_mower_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test lawn_mower state.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("lawn_mower.test_mower_1") assert state is not None assert state.state == LawnMowerActivity.DOCKED for activity, state, expected_state in ( - ("UNKNOWN", "PAUSED", LawnMowerActivity.PAUSED), - ("MOWING", "NOT_APPLICABLE", LawnMowerActivity.MOWING), - ("NOT_APPLICABLE", "ERROR", LawnMowerActivity.ERROR), + (MowerActivities.UNKNOWN, MowerStates.PAUSED, LawnMowerActivity.PAUSED), + (MowerActivities.MOWING, MowerStates.NOT_APPLICABLE, LawnMowerActivity.MOWING), + (MowerActivities.NOT_APPLICABLE, MowerStates.ERROR, LawnMowerActivity.ERROR), + ( + MowerActivities.GOING_HOME, + MowerStates.IN_OPERATION, + LawnMowerActivity.RETURNING, + ), ): values[TEST_MOWER_ID].mower.activity = activity values[TEST_MOWER_ID].mower.state = state @@ -252,12 +251,10 @@ async def test_lawn_mower_wrong_service_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test lawn_mower commands.""" await setup_integration(hass, mock_config_entry) - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].capabilities.work_areas = mower_support_wa mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index 9f2f8793bba..e1f232e7b5c 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -4,15 +4,12 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import ( - DOMAIN, - EXECUTION_TIME_DELAY, -) +from homeassistant.components.husqvarna_automower.const import EXECUTION_TIME_DELAY from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -21,12 +18,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -68,19 +60,15 @@ async def test_number_workarea_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test number commands.""" entity_id = "number.test_mower_1_front_lawn_cutting_height" await setup_integration(hass, mock_config_entry) - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].work_areas[123456].cutting_height = 75 mock_automower_client.get_status.return_value = values mocked_method = AsyncMock() - setattr( - mock_automower_client.commands, "set_cutting_height_workarea", mocked_method - ) + setattr(mock_automower_client.commands, "workarea_settings", mocked_method) await hass.services.async_call( domain="number", service="set_value", @@ -111,31 +99,6 @@ async def test_number_workarea_commands( assert len(mocked_method.mock_calls) == 2 -async def test_workarea_deleted( - hass: HomeAssistant, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test if work area is deleted after removed.""" - - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - await setup_integration(hass, mock_config_entry) - current_entries = len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) - ) - - del values[TEST_MOWER_ID].work_areas[123456] - mock_automower_client.get_status.return_value = values - await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) - ) == (current_entries - 1) - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_number_snapshot( hass: HomeAssistant, diff --git a/tests/components/husqvarna_automower/test_select.py b/tests/components/husqvarna_automower/test_select.py index e885a4d3487..18d1b0ed21f 100644 --- a/tests/components/husqvarna_automower/test_select.py +++ b/tests/components/husqvarna_automower/test_select.py @@ -3,12 +3,10 @@ from unittest.mock import AsyncMock from aioautomower.exceptions import ApiException -from aioautomower.model import HeadlightModes -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import HeadlightModes, MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -16,11 +14,7 @@ from homeassistant.exceptions import HomeAssistantError from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, -) +from tests.common import MockConfigEntry, async_fire_time_changed async def test_select_states( @@ -28,11 +22,9 @@ async def test_select_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test states of headlight mode select.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("select.test_mower_1_headlight_mode") assert state is not None diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 1a4f545ac96..08ed5251344 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -1,14 +1,14 @@ """Tests for sensor platform.""" +import datetime from unittest.mock import AsyncMock, patch +import zoneinfo -from aioautomower.model import MowerModes -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes, MowerModes, MowerStates from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant @@ -17,12 +17,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_sensor_unknown_states( @@ -30,11 +25,9 @@ async def test_sensor_unknown_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test a sensor which returns unknown.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_mode") assert state is not None @@ -63,11 +56,15 @@ async def test_cutting_blade_usage_time_sensor( assert state.state == "0.034" +@pytest.mark.freeze_time( + datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) +) async def test_next_start_sensor( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test if this sensor is only added, if data is available.""" await setup_integration(hass, mock_config_entry) @@ -75,10 +72,7 @@ async def test_next_start_sensor( assert state is not None assert state.state == "2023-06-05T17:00:00+00:00" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - values[TEST_MOWER_ID].planner.next_start_datetime_naive = None + values[TEST_MOWER_ID].planner.next_start_datetime = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -92,6 +86,7 @@ async def test_work_area_sensor( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test the work area sensor.""" await setup_integration(hass, mock_config_entry) @@ -99,9 +94,6 @@ async def test_work_area_sensor( assert state is not None assert state.state == "Front lawn" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].mower.work_area_id = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) @@ -119,6 +111,7 @@ async def test_work_area_sensor( assert state.state == "my_lawn" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("sensor_to_test"), [ @@ -137,13 +130,10 @@ async def test_statistics_not_available( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, sensor_to_test: str, + values: dict[str, MowerAttributes], ) -> None: """Test if this sensor is only added, if data is available.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - delattr(values[TEST_MOWER_ID].statistics, sensor_to_test) mock_automower_client.get_status.return_value = values await setup_integration(hass, mock_config_entry) @@ -156,18 +146,20 @@ async def test_error_sensor( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test error sensor.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) - for state, expected_state in ( - (None, "no_error"), - ("can_error", "can_error"), + for state, error_key, expected_state in ( + (MowerStates.IN_OPERATION, None, "no_error"), + (MowerStates.ERROR, "can_error", "can_error"), + (MowerStates.ERROR, None, MowerStates.ERROR.lower()), + (MowerStates.ERROR_AT_POWER_UP, None, MowerStates.ERROR_AT_POWER_UP.lower()), + (MowerStates.FATAL_ERROR, None, MowerStates.FATAL_ERROR.lower()), ): - values[TEST_MOWER_ID].mower.error_key = state + values[TEST_MOWER_ID].mower.state = state + values[TEST_MOWER_ID].mower.error_key = error_key mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -176,6 +168,7 @@ async def test_error_sensor( assert state.state == expected_state +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_snapshot( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 5b4e465e253..100fd9fe3a4 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -2,9 +2,10 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch +import zoneinfo from aioautomower.exceptions import ApiException -from aioautomower.model import MowerModes +from aioautomower.model import MowerAttributes, MowerModes, Zone from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest @@ -15,7 +16,14 @@ from homeassistant.components.husqvarna_automower.const import ( EXECUTION_TIME_DELAY, ) from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL -from homeassistant.const import Platform +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -30,6 +38,8 @@ from tests.common import ( snapshot_platform, ) +TEST_AREA_ID = 0 +TEST_VARIABLE_ZONE_ID = "203F6359-AB56-4D57-A6DC-703095BB695D" TEST_ZONE_ID = "AAAAAAAA-BBBB-CCCC-DDDD-123456789101" @@ -38,11 +48,9 @@ async def test_switch_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test switch state.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) for mode, expected_state in ( @@ -61,9 +69,9 @@ async def test_switch_states( @pytest.mark.parametrize( ("service", "aioautomower_command"), [ - ("turn_off", "park_until_further_notice"), - ("turn_on", "resume_schedule"), - ("toggle", "park_until_further_notice"), + (SERVICE_TURN_OFF, "park_until_further_notice"), + (SERVICE_TURN_ON, "resume_schedule"), + (SERVICE_TOGGLE, "park_until_further_notice"), ], ) async def test_switch_commands( @@ -76,9 +84,9 @@ async def test_switch_commands( """Test switch commands.""" await setup_integration(hass, mock_config_entry) await hass.services.async_call( - domain="switch", + domain=SWITCH_DOMAIN, service=service, - service_data={"entity_id": "switch.test_mower_1_enable_schedule"}, + service_data={ATTR_ENTITY_ID: "switch.test_mower_1_enable_schedule"}, blocking=True, ) mocked_method = getattr(mock_automower_client.commands, aioautomower_command) @@ -90,9 +98,9 @@ async def test_switch_commands( match="Failed to send command: Test error", ): await hass.services.async_call( - domain="switch", + domain=SWITCH_DOMAIN, service=service, - service_data={"entity_id": "switch.test_mower_1_enable_schedule"}, + service_data={ATTR_ENTITY_ID: "switch.test_mower_1_enable_schedule"}, blocking=True, ) assert len(mocked_method.mock_calls) == 2 @@ -101,9 +109,9 @@ async def test_switch_commands( @pytest.mark.parametrize( ("service", "boolean", "excepted_state"), [ - ("turn_off", False, "off"), - ("turn_on", True, "on"), - ("toggle", True, "on"), + (SERVICE_TURN_OFF, False, "off"), + (SERVICE_TURN_ON, True, "on"), + (SERVICE_TOGGLE, True, "on"), ], ) async def test_stay_out_zone_switch_commands( @@ -114,21 +122,23 @@ async def test_stay_out_zone_switch_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + mower_time_zone: zoneinfo.ZoneInfo, ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_avoid_danger_zone" await setup_integration(hass, mock_config_entry) values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) + load_json_value_fixture("mower.json", DOMAIN), + mower_time_zone, ) values[TEST_MOWER_ID].stay_out_zones.zones[TEST_ZONE_ID].enabled = boolean mock_automower_client.get_status.return_value = values mocked_method = AsyncMock() setattr(mock_automower_client.commands, "switch_stay_out_zone", mocked_method) await hass.services.async_call( - domain="switch", + domain=SWITCH_DOMAIN, service=service, - service_data={"entity_id": entity_id}, + service_data={ATTR_ENTITY_ID: entity_id}, blocking=False, ) freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) @@ -145,37 +155,112 @@ async def test_stay_out_zone_switch_commands( match="Failed to send command: Test error", ): await hass.services.async_call( - domain="switch", + domain=SWITCH_DOMAIN, service=service, - service_data={"entity_id": entity_id}, + service_data={ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert len(mocked_method.mock_calls) == 2 -async def test_zones_deleted( +@pytest.mark.parametrize( + ("service", "boolean", "excepted_state"), + [ + (SERVICE_TURN_OFF, False, "off"), + (SERVICE_TURN_ON, True, "on"), + (SERVICE_TOGGLE, True, "on"), + ], +) +async def test_work_area_switch_commands( + hass: HomeAssistant, + service: str, + boolean: bool, + excepted_state: str, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + mower_time_zone: zoneinfo.ZoneInfo, + values: dict[str, MowerAttributes], +) -> None: + """Test switch commands.""" + entity_id = "switch.test_mower_1_my_lawn" + await setup_integration(hass, mock_config_entry) + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN), + mower_time_zone, + ) + values[TEST_MOWER_ID].work_areas[TEST_AREA_ID].enabled = boolean + mock_automower_client.get_status.return_value = values + mocked_method = AsyncMock() + setattr(mock_automower_client.commands, "workarea_settings", mocked_method) + await hass.services.async_call( + domain=SWITCH_DOMAIN, + service=service, + service_data={ATTR_ENTITY_ID: entity_id}, + blocking=False, + ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_AREA_ID, enabled=boolean) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == excepted_state + + mocked_method.side_effect = ApiException("Test error") + with pytest.raises( + HomeAssistantError, + match="Failed to send command: Test error", + ): + await hass.services.async_call( + domain=SWITCH_DOMAIN, + service=service, + service_data={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert len(mocked_method.mock_calls) == 2 + + +async def test_add_stay_out_zone( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, entity_registry: er.EntityRegistry, + values: dict[str, MowerAttributes], ) -> None: - """Test if stay-out-zone is deleted after removed.""" - - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) + """Test adding a stay out zone in runtime.""" await setup_integration(hass, mock_config_entry) - current_entries = len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) + entry = hass.config_entries.async_entries(DOMAIN)[0] + current_entites = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + values[TEST_MOWER_ID].stay_out_zones.zones.update( + { + TEST_VARIABLE_ZONE_ID: Zone( + name="future_zone", + enabled=True, + ) + } ) - - del values[TEST_MOWER_ID].stay_out_zones.zones[TEST_ZONE_ID] mock_automower_client.get_status.return_value = values - await hass.config_entries.async_reload(mock_config_entry.entry_id) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) - ) == (current_entries - 1) + current_entites_after_addition = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert current_entites_after_addition == current_entites + 1 + values[TEST_MOWER_ID].stay_out_zones.zones.pop(TEST_VARIABLE_ZONE_ID) + values[TEST_MOWER_ID].stay_out_zones.zones.pop(TEST_ZONE_ID) + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + current_entites_after_deletion = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert current_entites_after_deletion == current_entites - 1 async def test_switch_snapshot( diff --git a/tests/components/husqvarna_automower_ble/__init__.py b/tests/components/husqvarna_automower_ble/__init__.py new file mode 100644 index 00000000000..7ca5aea121d --- /dev/null +++ b/tests/components/husqvarna_automower_ble/__init__.py @@ -0,0 +1,74 @@ +"""Tests for the Husqvarna Automower Bluetooth integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + +AUTOMOWER_SERVICE_INFO = BluetoothServiceInfo( + name="305", + address="00000000-0000-0000-0000-000000000003", + rssi=-63, + service_data={}, + manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "00001800-0000-1000-8000-00805f9b34fb", + ], + source="local", +) + +AUTOMOWER_UNNAMED_SERVICE_INFO = BluetoothServiceInfo( + name=None, + address="00000000-0000-0000-0000-000000000004", + rssi=-63, + service_data={}, + manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "00001800-0000-1000-8000-00805f9b34fb", + ], + source="local", +) + +AUTOMOWER_MISSING_MANUFACTURER_DATA_SERVICE_INFO = BluetoothServiceInfo( + name="Missing Manufacturer Data", + address="00000000-0000-0000-0002-000000000001", + rssi=-63, + service_data={}, + manufacturer_data={}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "00001800-0000-1000-8000-00805f9b34fb", + ], + source="local", +) + +AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO = BluetoothServiceInfo( + name="Unsupported Group", + address="00000000-0000-0000-0002-000000000002", + rssi=-63, + service_data={}, + manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + ], + source="local", +) + + +async def setup_entry( + hass: HomeAssistant, mock_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Make sure the device is available.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + + with patch("homeassistant.components.husqvarna_automower_ble.PLATFORMS", platforms): + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/husqvarna_automower_ble/conftest.py b/tests/components/husqvarna_automower_ble/conftest.py new file mode 100644 index 00000000000..3a8e881aba0 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/conftest.py @@ -0,0 +1,62 @@ +"""Common fixtures for the Husqvarna Automower Bluetooth tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.husqvarna_automower_ble.const import DOMAIN +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID + +from . import AUTOMOWER_SERVICE_INFO + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.husqvarna_automower_ble.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(autouse=True) +def mock_automower_client(enable_bluetooth: None) -> Generator[AsyncMock]: + """Mock a BleakClient client.""" + with ( + patch( + "homeassistant.components.husqvarna_automower_ble.Mower", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.husqvarna_automower_ble.config_flow.Mower", + new=mock_client, + ), + ): + client = mock_client.return_value + client.connect.return_value = True + client.is_connected.return_value = True + client.get_model.return_value = "305" + client.battery_level.return_value = 100 + client.mower_state.return_value = "pendingStart" + client.mower_activity.return_value = "charging" + client.probe_gatts.return_value = ("Husqvarna", "Automower", "305") + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Husqvarna AutoMower", + data={ + CONF_ADDRESS: AUTOMOWER_SERVICE_INFO.address, + CONF_CLIENT_ID: 1197489078, + }, + unique_id=AUTOMOWER_SERVICE_INFO.address, + ) diff --git a/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr b/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr new file mode 100644 index 00000000000..1cc54020195 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_setup + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'husqvarna_automower_ble', + '00000000-0000-0000-0000-000000000003_1197489078', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Husqvarna', + 'model': None, + 'model_id': '305', + 'name': 'Husqvarna AutoMower', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/husqvarna_automower_ble/test_config_flow.py b/tests/components/husqvarna_automower_ble/test_config_flow.py new file mode 100644 index 00000000000..e053a28b7dd --- /dev/null +++ b/tests/components/husqvarna_automower_ble/test_config_flow.py @@ -0,0 +1,198 @@ +"""Test the Husqvarna Bluetooth config flow.""" + +from unittest.mock import Mock, patch + +from bleak import BleakError +import pytest + +from homeassistant.components.husqvarna_automower_ble.const import DOMAIN +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import ( + AUTOMOWER_SERVICE_INFO, + AUTOMOWER_UNNAMED_SERVICE_INFO, + AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO, +) + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +@pytest.fixture(autouse=True) +def mock_random() -> Mock: + """Mock random to generate predictable client id.""" + with patch( + "homeassistant.components.husqvarna_automower_ble.config_flow.random" + ) as mock_random: + mock_random.randint.return_value = 1197489078 + yield mock_random + + +async def test_user_selection(hass: HomeAssistant) -> None: + """Test we can select a device.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000001"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Husqvarna Automower" + assert result["result"].unique_id == "00000000-0000-0000-0000-000000000001" + + assert result["data"] == { + CONF_ADDRESS: "00000000-0000-0000-0000-000000000001", + CONF_CLIENT_ID: 1197489078, + } + + +async def test_bluetooth(hass: HomeAssistant) -> None: + """Test bluetooth device discovery.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] + assert result["step_id"] == "confirm" + assert result["context"]["unique_id"] == "00000000-0000-0000-0000-000000000003" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Husqvarna Automower" + assert result["result"].unique_id == "00000000-0000-0000-0000-000000000003" + + assert result["data"] == { + CONF_ADDRESS: "00000000-0000-0000-0000-000000000003", + CONF_CLIENT_ID: 1197489078, + } + + +async def test_bluetooth_invalid(hass: HomeAssistant) -> None: + """Test bluetooth device discovery with invalid data.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" + + +async def test_failed_connect( + hass: HomeAssistant, + mock_automower_client: Mock, +) -> None: + """Test we can select a device.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + mock_automower_client.connect.side_effect = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000001"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Husqvarna Automower" + assert result["result"].unique_id == "00000000-0000-0000-0000-000000000001" + + assert result["data"] == { + CONF_ADDRESS: "00000000-0000-0000-0000-000000000001", + CONF_CLIENT_ID: 1197489078, + } + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we can select a device.""" + + mock_config_entry.add_to_hass(hass) + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + + await hass.async_block_till_done(wait_background_tasks=True) + + # Test we should not discover the already configured device + assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 0 + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000003"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_exception_connect( + hass: HomeAssistant, + mock_automower_client: Mock, +) -> None: + """Test we can select a device.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + mock_automower_client.probe_gatts.side_effect = BleakError + + result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/husqvarna_automower_ble/test_init.py b/tests/components/husqvarna_automower_ble/test_init.py new file mode 100644 index 00000000000..3cb4338eca4 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/test_init.py @@ -0,0 +1,71 @@ +"""Test the Husqvarna Automower Bluetooth setup.""" + +from unittest.mock import Mock + +from bleak import BleakError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.husqvarna_automower_ble.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import AUTOMOWER_SERVICE_INFO + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_automower_client") + + +async def test_setup( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup creates expected devices.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, f"{AUTOMOWER_SERVICE_INFO.address}_1197489078")} + ) + + assert device_entry == snapshot + + +async def test_setup_retry_connect( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setup creates expected devices.""" + + mock_automower_client.connect.return_value = False + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_failed_connect( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setup creates expected devices.""" + + mock_automower_client.connect.side_effect = BleakError + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/husqvarna_automower_ble/test_lawn_mower.py b/tests/components/husqvarna_automower_ble/test_lawn_mower.py new file mode 100644 index 00000000000..3f00d3dbff0 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/test_lawn_mower.py @@ -0,0 +1,126 @@ +"""Test the Husqvarna Automower Bluetooth setup.""" + +from datetime import timedelta +from unittest.mock import Mock + +from bleak import BleakError +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = pytest.mark.usefixtures("mock_automower_client") + + +@pytest.mark.parametrize( + ( + "is_connected_side_effect", + "is_connected_return_value", + "connect_side_effect", + "connect_return_value", + ), + [ + (None, False, None, False), + (None, False, BleakError, False), + (None, False, None, True), + (BleakError, False, None, True), + ], +) +async def test_setup_disconnect( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + is_connected_side_effect: Exception, + is_connected_return_value: bool, + connect_side_effect: Exception, + connect_return_value: bool, +) -> None: + """Test disconnected device.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("lawn_mower.husqvarna_automower").state != STATE_UNAVAILABLE + + mock_automower_client.is_connected.side_effect = is_connected_side_effect + mock_automower_client.is_connected.return_value = is_connected_return_value + mock_automower_client.connect.side_effect = connect_side_effect + mock_automower_client.connect.return_value = connect_return_value + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("attribute"), + [ + "mower_activity", + "mower_state", + "battery_level", + ], +) +async def test_invalid_data_received( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + attribute: str, +) -> None: + """Test invalid data received.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + getattr(mock_automower_client, attribute).return_value = None + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("attribute"), + [ + "mower_activity", + "mower_state", + "battery_level", + ], +) +async def test_bleak_error_data_update( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + attribute: str, +) -> None: + """Test BleakError during data update.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + getattr(mock_automower_client, attribute).side_effect = BleakError + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index a938322414b..2de7fb1da9a 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -56,7 +56,6 @@ def mock_legacy_pydrawise( @pytest.fixture def mock_pydrawise( - mock_auth: AsyncMock, user: User, controller: Controller, zones: list[Zone], diff --git a/tests/components/hydrawise/test_binary_sensor.py b/tests/components/hydrawise/test_binary_sensor.py index a42f9b1c044..40cd32920b0 100644 --- a/tests/components/hydrawise/test_binary_sensor.py +++ b/tests/components/hydrawise/test_binary_sensor.py @@ -9,7 +9,7 @@ from freezegun.api import FrozenDateTimeFactory from pydrawise.schema import Controller from syrupy.assertion import SnapshotAssertion -from homeassistant.components.hydrawise.const import SCAN_INTERVAL +from homeassistant.components.hydrawise.const import MAIN_SCAN_INTERVAL from homeassistant.const import STATE_OFF, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -42,7 +42,8 @@ async def test_update_data_fails( # Make the coordinator refresh data. mock_pydrawise.get_user.reset_mock(return_value=True) mock_pydrawise.get_user.side_effect = ClientError - freezer.tick(SCAN_INTERVAL + timedelta(seconds=30)) + mock_pydrawise.get_water_use_summary.side_effect = ClientError + freezer.tick(MAIN_SCAN_INTERVAL + timedelta(seconds=30)) async_fire_time_changed(hass) await hass.async_block_till_done() @@ -61,7 +62,7 @@ async def test_controller_offline( """Test the binary_sensor for the controller being online.""" # Make the coordinator refresh data. controller.online = False - freezer.tick(SCAN_INTERVAL + timedelta(seconds=30)) + freezer.tick(MAIN_SCAN_INTERVAL + timedelta(seconds=30)) async_fire_time_changed(hass) await hass.async_block_till_done() diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index e85b1b9b249..4d25fd5840b 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -21,6 +21,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, + mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User, ) -> None: @@ -46,11 +47,12 @@ async def test_form( CONF_PASSWORD: "__password__", } assert len(mock_setup_entry.mock_calls) == 1 - mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) + mock_auth.token.assert_awaited_once_with() + mock_pydrawise.get_user.assert_awaited_once_with(fetch_zones=False) async def test_form_api_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User ) -> None: """Test we handle API errors.""" mock_pydrawise.get_user.side_effect = ClientError("XXX") @@ -71,8 +73,29 @@ async def test_form_api_error( assert result2["type"] is FlowResultType.CREATE_ENTRY -async def test_form_connect_timeout( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +async def test_form_auth_connect_timeout( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock +) -> None: + """Test we handle API errors.""" + mock_auth.token.side_effect = TimeoutError + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} + result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], data + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "timeout_connect"} + + mock_auth.token.reset_mock(side_effect=True) + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result2["type"] is FlowResultType.CREATE_ENTRY + + +async def test_form_client_connect_timeout( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User ) -> None: """Test we handle API errors.""" mock_pydrawise.get_user.side_effect = TimeoutError @@ -94,10 +117,10 @@ async def test_form_connect_timeout( async def test_form_not_authorized_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock ) -> None: """Test we handle API errors.""" - mock_pydrawise.get_user.side_effect = NotAuthorizedError + mock_auth.token.side_effect = NotAuthorizedError init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -109,8 +132,7 @@ async def test_form_not_authorized_error( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - mock_pydrawise.get_user.reset_mock(side_effect=True) - mock_pydrawise.get_user.return_value = user + mock_auth.token.reset_mock(side_effect=True) result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -118,6 +140,7 @@ async def test_form_not_authorized_error( async def test_reauth( hass: HomeAssistant, user: User, + mock_auth: AsyncMock, mock_pydrawise: AsyncMock, ) -> None: """Test that re-authorization works.""" diff --git a/tests/components/hydrawise/test_entity_availability.py b/tests/components/hydrawise/test_entity_availability.py index 58ded5fe6c3..27587425c31 100644 --- a/tests/components/hydrawise/test_entity_availability.py +++ b/tests/components/hydrawise/test_entity_availability.py @@ -8,7 +8,7 @@ from aiohttp import ClientError from freezegun.api import FrozenDateTimeFactory from pydrawise.schema import Controller -from homeassistant.components.hydrawise.const import SCAN_INTERVAL +from homeassistant.components.hydrawise.const import WATER_USE_SCAN_INTERVAL from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -42,7 +42,8 @@ async def test_api_offline( config_entry = await mock_add_config_entry() mock_pydrawise.get_user.reset_mock(return_value=True) mock_pydrawise.get_user.side_effect = ClientError - freezer.tick(SCAN_INTERVAL + timedelta(seconds=30)) + mock_pydrawise.get_water_use_summary.side_effect = ClientError + freezer.tick(WATER_USE_SCAN_INTERVAL + timedelta(seconds=30)) async_fire_time_changed(hass) await hass.async_block_till_done() _test_availability(hass, config_entry, entity_registry) diff --git a/tests/components/hydrawise/test_sensor.py b/tests/components/hydrawise/test_sensor.py index b9ff99f0013..1c14a07f182 100644 --- a/tests/components/hydrawise/test_sensor.py +++ b/tests/components/hydrawise/test_sensor.py @@ -1,12 +1,18 @@ """Test Hydrawise sensor.""" from collections.abc import Awaitable, Callable -from unittest.mock import patch +from datetime import timedelta +from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory from pydrawise.schema import Controller, ControllerWaterUseSummary, User, Zone import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.hydrawise.const import ( + MAIN_SCAN_INTERVAL, + WATER_USE_SCAN_INTERVAL, +) from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -16,7 +22,7 @@ from homeassistant.util.unit_system import ( UnitSystem, ) -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.freeze_time("2023-10-01 00:00:00+00:00") @@ -50,6 +56,34 @@ async def test_suspended_state( assert next_cycle.state == "unknown" +@pytest.mark.freeze_time("2024-11-01 00:00:00+00:00") +async def test_usage_refresh( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + controller_water_use_summary: ControllerWaterUseSummary, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that water usage summaries refresh less frequently than other data.""" + assert hass.states.get("sensor.zone_one_daily_active_water_use") is not None + mock_pydrawise.get_water_use_summary.assert_called_once() + + # Make the coordinator refresh data. + mock_pydrawise.get_water_use_summary.reset_mock() + freezer.tick(MAIN_SCAN_INTERVAL + timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + # Make sure we didn't fetch water use summary again. + mock_pydrawise.get_water_use_summary.assert_not_called() + + # Wait for enough time to pass for a water use summary fetch. + mock_pydrawise.get_water_use_summary.return_value = controller_water_use_summary + freezer.tick(WATER_USE_SCAN_INTERVAL + timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_pydrawise.get_water_use_summary.assert_called_once() + + async def test_no_sensor_and_water_state( hass: HomeAssistant, controller: Controller, diff --git a/tests/components/hydrawise/test_valve.py b/tests/components/hydrawise/test_valve.py index 918fae00017..7d769f920e6 100644 --- a/tests/components/hydrawise/test_valve.py +++ b/tests/components/hydrawise/test_valve.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, patch from pydrawise.schema import Zone from syrupy.assertion import SnapshotAssertion -from homeassistant.components.valve import DOMAIN +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, @@ -42,7 +42,7 @@ async def test_services( ) -> None: """Test valve services.""" await hass.services.async_call( - DOMAIN, + VALVE_DOMAIN, SERVICE_OPEN_VALVE, service_data={ATTR_ENTITY_ID: "valve.zone_one"}, blocking=True, @@ -51,7 +51,7 @@ async def test_services( mock_pydrawise.reset_mock() await hass.services.async_call( - DOMAIN, + VALVE_DOMAIN, SERVICE_CLOSE_VALVE, service_data={ATTR_ENTITY_ID: "valve.zone_one"}, blocking=True, diff --git a/tests/components/hyperion/__init__.py b/tests/components/hyperion/__init__.py index 72aba96e81f..36137ce0ddd 100644 --- a/tests/components/hyperion/__init__.py +++ b/tests/components/hyperion/__init__.py @@ -124,9 +124,9 @@ def add_test_config_entry( hass: HomeAssistant, data: dict[str, Any] | None = None, options: dict[str, Any] | None = None, -) -> ConfigEntry: +) -> MockConfigEntry: """Add a test config entry.""" - config_entry: MockConfigEntry = MockConfigEntry( + config_entry = MockConfigEntry( entry_id=TEST_CONFIG_ENTRY_ID, domain=DOMAIN, data=data diff --git a/tests/components/hyperion/test_config_flow.py b/tests/components/hyperion/test_config_flow.py index fb4fa1fe671..4109fe0f653 100644 --- a/tests/components/hyperion/test_config_flow.py +++ b/tests/components/hyperion/test_config_flow.py @@ -20,7 +20,7 @@ from homeassistant.components.hyperion.const import ( DOMAIN, ) from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, @@ -861,12 +861,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ), patch("homeassistant.components.hyperion.async_setup_entry", return_value=True), ): - result = await _init_flow( - hass, - source=SOURCE_REAUTH, - data=config_data, - ) - await hass.async_block_till_done() + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM result = await _configure_flow( @@ -886,18 +881,13 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: CONF_PORT: TEST_PORT, } - add_test_config_entry(hass, data=config_data) + config_entry = add_test_config_entry(hass, data=config_data) client = create_mock_client() client.async_client_connect = AsyncMock(return_value=False) with patch( "homeassistant.components.hyperion.client.HyperionClient", return_value=client ): - result = await _init_flow( - hass, - source=SOURCE_REAUTH, - data=config_data, - ) - await hass.async_block_till_done() + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" diff --git a/tests/components/iaqualink/test_config_flow.py b/tests/components/iaqualink/test_config_flow.py index 4aaa66416f6..26540eb7308 100644 --- a/tests/components/iaqualink/test_config_flow.py +++ b/tests/components/iaqualink/test_config_flow.py @@ -7,7 +7,8 @@ from iaqualink.exception import ( AqualinkServiceUnauthorizedException, ) -from homeassistant.components.iaqualink import config_flow +from homeassistant.components.iaqualink import DOMAIN, config_flow +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -18,13 +19,12 @@ async def test_already_configured( """Test config flow when iaqualink component is already setup.""" config_entry.add_to_hass(hass) - flow = config_flow.AqualinkFlowHandler() - flow.hass = hass - flow.context = {} - - result = await flow.async_step_user(config_data) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" async def test_without_config(hass: HomeAssistant) -> None: diff --git a/tests/components/ibeacon/test_device_tracker.py b/tests/components/ibeacon/test_device_tracker.py index dcc21b5bfc9..e34cc480cb0 100644 --- a/tests/components/ibeacon/test_device_tracker.py +++ b/tests/components/ibeacon/test_device_tracker.py @@ -11,9 +11,7 @@ from homeassistant.components.bluetooth import ( async_ble_device_from_address, async_last_service_info, ) -from homeassistant.components.bluetooth.const import ( # pylint: disable=hass-component-root-import - UNAVAILABLE_TRACK_SECONDS, -) +from homeassistant.components.bluetooth.const import UNAVAILABLE_TRACK_SECONDS from homeassistant.components.ibeacon.const import ( DOMAIN, UNAVAILABLE_TIMEOUT, diff --git a/tests/components/ibeacon/test_sensor.py b/tests/components/ibeacon/test_sensor.py index e2ddf1dd7bc..f4dba57bced 100644 --- a/tests/components/ibeacon/test_sensor.py +++ b/tests/components/ibeacon/test_sensor.py @@ -4,9 +4,7 @@ from datetime import timedelta import pytest -from homeassistant.components.bluetooth.const import ( # pylint: disable=hass-component-root-import - UNAVAILABLE_TRACK_SECONDS, -) +from homeassistant.components.bluetooth.const import UNAVAILABLE_TRACK_SECONDS from homeassistant.components.ibeacon.const import DOMAIN, UPDATE_INTERVAL from homeassistant.components.sensor import ATTR_STATE_CLASS from homeassistant.const import ( diff --git a/tests/components/icloud/test_config_flow.py b/tests/components/icloud/test_config_flow.py index ec8d11f1135..c0bc5d7ed2e 100644 --- a/tests/components/icloud/test_config_flow.py +++ b/tests/components/icloud/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.icloud.const import ( DEFAULT_WITH_FAMILY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -386,12 +386,7 @@ async def test_password_update( ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data={**MOCK_CONFIG}, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -410,12 +405,7 @@ async def test_password_update_wrong_password(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data={**MOCK_CONFIG}, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM with patch( diff --git a/tests/components/idasen_desk/test_cover.py b/tests/components/idasen_desk/test_cover.py index 0110fe7d820..83312c04e72 100644 --- a/tests/components/idasen_desk/test_cover.py +++ b/tests/components/idasen_desk/test_cover.py @@ -10,14 +10,13 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -36,7 +35,7 @@ async def test_cover_available( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 mock_desk_api.connect = AsyncMock() @@ -51,11 +50,11 @@ async def test_cover_available( @pytest.mark.parametrize( ("service", "service_data", "expected_state", "expected_position"), [ - (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, STATE_OPEN, 100), - (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 0}, STATE_CLOSED, 0), - (SERVICE_OPEN_COVER, {}, STATE_OPEN, 100), - (SERVICE_CLOSE_COVER, {}, STATE_CLOSED, 0), - (SERVICE_STOP_COVER, {}, STATE_OPEN, 60), + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, CoverState.OPEN, 100), + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 0}, CoverState.CLOSED, 0), + (SERVICE_OPEN_COVER, {}, CoverState.OPEN, 100), + (SERVICE_CLOSE_COVER, {}, CoverState.CLOSED, 0), + (SERVICE_STOP_COVER, {}, CoverState.OPEN, 60), ], ) async def test_cover_services( @@ -71,7 +70,7 @@ async def test_cover_services( await init_integration(hass) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 await hass.services.async_call( COVER_DOMAIN, diff --git a/tests/components/ifttt/test_init.py b/tests/components/ifttt/test_init.py index 44896dc0f2c..c6d24421a8a 100644 --- a/tests/components/ifttt/test_init.py +++ b/tests/components/ifttt/test_init.py @@ -2,8 +2,8 @@ from homeassistant import config_entries from homeassistant.components import ifttt -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.typing import ClientSessionGenerator diff --git a/tests/components/image/conftest.py b/tests/components/image/conftest.py index 8bb5d19b6db..06ef7db9f49 100644 --- a/tests/components/image/conftest.py +++ b/tests/components/image/conftest.py @@ -52,6 +52,21 @@ class MockImageEntityInvalidContentType(image.ImageEntity): return b"Test" +class MockImageEntityCapitalContentType(image.ImageEntity): + """Mock image entity with correct content type, but capitalized.""" + + _attr_name = "Test" + + async def async_added_to_hass(self): + """Set the update time and assign and incorrect content type.""" + self._attr_content_type = "Image/jpeg" + self._attr_image_last_updated = dt_util.utcnow() + + async def async_image(self) -> bytes | None: + """Return bytes of image.""" + return b"Test" + + class MockURLImageEntity(image.ImageEntity): """Mock image entity.""" @@ -73,6 +88,16 @@ class MockImageNoStateEntity(image.ImageEntity): return b"Test" +class MockImageNoDataEntity(image.ImageEntity): + """Mock image entity.""" + + _attr_name = "Test" + + async def async_image(self) -> bytes | None: + """Return bytes of image.""" + return None + + class MockImageSyncEntity(image.ImageEntity): """Mock image entity.""" diff --git a/tests/components/image/test_init.py b/tests/components/image/test_init.py index 717e82a652d..3bcf0df52e3 100644 --- a/tests/components/image/test_init.py +++ b/tests/components/image/test_init.py @@ -3,7 +3,7 @@ from datetime import datetime from http import HTTPStatus import ssl -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, mock_open, patch from aiohttp import hdrs from freezegun.api import FrozenDateTimeFactory @@ -13,12 +13,16 @@ import respx from homeassistant.components import image from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from .conftest import ( MockImageEntity, + MockImageEntityCapitalContentType, MockImageEntityInvalidContentType, + MockImageNoDataEntity, MockImageNoStateEntity, MockImagePlatform, MockImageSyncEntity, @@ -138,6 +142,32 @@ async def test_no_valid_content_type( assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR +async def test_valid_but_capitalized_content_type( + hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> None: + """Test invalid content type.""" + mock_integration(hass, MockModule(domain="test")) + mock_platform( + hass, "test.image", MockImagePlatform([MockImageEntityCapitalContentType(hass)]) + ) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + client = await hass_client() + + state = hass.states.get("image.test") + access_token = state.attributes["access_token"] + assert state.attributes == { + "access_token": access_token, + "entity_picture": f"/api/image_proxy/image.test?token={access_token}", + "friendly_name": "Test", + } + resp = await client.get(f"/api/image_proxy/image.test?token={access_token}") + assert resp.status == HTTPStatus.OK + + async def test_fetch_image_authenticated( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_image_platform: None ) -> None: @@ -354,3 +384,112 @@ async def test_image_stream( await hass.async_block_till_done() await close_future + + +async def test_snapshot_service(hass: HomeAssistant) -> None: + """Test snapshot service.""" + mopen = mock_open() + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockImageSyncEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.image.open", mopen, create=True), + patch("homeassistant.components.image.os.makedirs"), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + mock_write = mopen().write + + assert len(mock_write.mock_calls) == 1 + assert mock_write.mock_calls[0][1][0] == b"Test" + + +async def test_snapshot_service_no_image(hass: HomeAssistant) -> None: + """Test snapshot service with no image.""" + mopen = mock_open() + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockImageNoDataEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.image.open", mopen, create=True), + patch( + "homeassistant.components.image.os.makedirs", + ), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + mock_write = mopen().write + + assert len(mock_write.mock_calls) == 0 + + +async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: + """Test snapshot service with a not allowed path.""" + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockURLImageEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with pytest.raises(HomeAssistantError, match="/test/snapshot.jpg"): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + +async def test_snapshot_service_os_error(hass: HomeAssistant) -> None: + """Test snapshot service with os error.""" + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockImageSyncEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with ( + patch.object(hass.config, "is_allowed_path", return_value=True), + patch("os.makedirs", side_effect=OSError), + pytest.raises(HomeAssistantError), + ): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) diff --git a/tests/components/image_upload/test_media_source.py b/tests/components/image_upload/test_media_source.py new file mode 100644 index 00000000000..d66e099bdc9 --- /dev/null +++ b/tests/components/image_upload/test_media_source.py @@ -0,0 +1,90 @@ +"""Test image_upload media source.""" + +import tempfile +from unittest.mock import patch + +from aiohttp import ClientSession +import pytest + +from homeassistant.components import media_source +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import TEST_IMAGE + +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +async def setup_media_source(hass: HomeAssistant) -> None: + """Set up media source.""" + assert await async_setup_component(hass, "media_source", {}) + + +async def __upload_test_image( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> str: + with ( + tempfile.TemporaryDirectory() as tempdir, + patch.object(hass.config, "path", return_value=tempdir), + ): + assert await async_setup_component(hass, "image_upload", {}) + client: ClientSession = await hass_client() + + file = await hass.async_add_executor_job(TEST_IMAGE.open, "rb") + res = await client.post("/api/image/upload", data={"file": file}) + hass.async_add_executor_job(file.close) + + assert res.status == 200 + item = await res.json() + assert item["content_type"] == "image/png" + assert item["filesize"] == 38847 + return item["id"] + + +async def test_browsing( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test browsing image media source.""" + image_id = await __upload_test_image(hass, hass_client) + + item = await media_source.async_browse_media(hass, "media-source://image_upload") + + assert item is not None + assert item.title == "Image Upload" + assert len(item.children) == 1 + assert item.children[0].media_content_type == "image/png" + assert item.children[0].identifier == image_id + assert item.children[0].thumbnail == f"/api/image/serve/{image_id}/256x256" + + with pytest.raises( + media_source.BrowseError, + match="Unknown item", + ): + await media_source.async_browse_media( + hass, "media-source://image_upload/invalid_path" + ) + + +async def test_resolving( + hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> None: + """Test resolving.""" + image_id = await __upload_test_image(hass, hass_client) + item = await media_source.async_resolve_media( + hass, f"media-source://image_upload/{image_id}", None + ) + assert item is not None + assert item.url == f"/api/image/serve/{image_id}/original" + assert item.mime_type == "image/png" + + invalid_id = "aabbccddeeff" + with pytest.raises( + media_source.Unresolvable, + match=f"Could not resolve media item: {invalid_id}", + ): + await media_source.async_resolve_media( + hass, f"media-source://image_upload/{invalid_id}", None + ) diff --git a/tests/components/imap/const.py b/tests/components/imap/const.py index 037960c9e5d..8f6761bd795 100644 --- a/tests/components/imap/const.py +++ b/tests/components/imap/const.py @@ -141,6 +141,8 @@ TEST_CONTENT_MULTIPART_BASE64_INVALID = ( ) EMPTY_SEARCH_RESPONSE = ("OK", [b"", b"Search completed (0.0001 + 0.000 secs)."]) +EMPTY_SEARCH_RESPONSE_ALT = ("OK", [b"Search completed (0.0001 + 0.000 secs)."]) + BAD_RESPONSE = ("BAD", [b"", b"Unexpected error"]) TEST_SEARCH_RESPONSE = ("OK", [b"1", b"Search completed (0.0001 + 0.000 secs)."]) diff --git a/tests/components/imap/test_config_flow.py b/tests/components/imap/test_config_flow.py index 459cecec4a6..2270030ad4f 100644 --- a/tests/components/imap/test_config_flow.py +++ b/tests/components/imap/test_config_flow.py @@ -15,7 +15,7 @@ from homeassistant.components.imap.const import ( DOMAIN, ) from homeassistant.components.imap.errors import InvalidAuth, InvalidFolder -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -215,18 +215,13 @@ async def test_reauth_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {CONF_USERNAME: "email@email.com"} + assert result["description_placeholders"] == { + CONF_USERNAME: "email@email.com", + CONF_NAME: "Mock Title", + } with patch( "homeassistant.components.imap.config_flow.connect_to_server" @@ -256,15 +251,7 @@ async def test_reauth_failed(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -294,15 +281,7 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/imap/test_diagnostics.py b/tests/components/imap/test_diagnostics.py index 23450104aed..43f837679c8 100644 --- a/tests/components/imap/test_diagnostics.py +++ b/tests/components/imap/test_diagnostics.py @@ -41,7 +41,7 @@ async def test_entry_diagnostics( # Make sure we have had one update (when polling) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" diff --git a/tests/components/imap/test_init.py b/tests/components/imap/test_init.py index 40c3ce013e4..d4281b9e513 100644 --- a/tests/components/imap/test_init.py +++ b/tests/components/imap/test_init.py @@ -20,6 +20,7 @@ from homeassistant.util.dt import utcnow from .const import ( BAD_RESPONSE, EMPTY_SEARCH_RESPONSE, + EMPTY_SEARCH_RESPONSE_ALT, TEST_BADLY_ENCODED_CONTENT, TEST_FETCH_RESPONSE_BINARY, TEST_FETCH_RESPONSE_HTML, @@ -153,7 +154,7 @@ async def test_receiving_message_successfully( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -202,7 +203,7 @@ async def test_receiving_message_with_invalid_encoding( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -237,7 +238,7 @@ async def test_receiving_message_no_subject_to_from( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -273,7 +274,7 @@ async def test_initial_authentication_error( assert await hass.config_entries.async_setup(config_entry.entry_id) == success await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert (state is not None) == success @@ -290,7 +291,7 @@ async def test_initial_invalid_folder_error( assert await hass.config_entries.async_setup(config_entry.entry_id) == success await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert (state is not None) == success @@ -330,7 +331,7 @@ async def test_late_authentication_retry( assert "Authentication failed, retrying" in caplog.text # we still should have an entity with an unavailable state - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -371,7 +372,7 @@ async def test_late_authentication_error( assert "Username or password incorrect, starting reauthentication" in caplog.text # we still should have an entity with an unavailable state - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -415,7 +416,7 @@ async def test_late_folder_error( assert "Selected mailbox folder is invalid" in caplog.text # we still should have an entity with an unavailable state - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -444,7 +445,7 @@ async def test_handle_cleanup_exception( async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have an entity assert state is not None assert state.state == "0" @@ -456,7 +457,7 @@ async def test_handle_cleanup_exception( await hass.async_block_till_done() assert "Error while cleaning up imap connection" in caplog.text - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have an entity with an unavailable state assert state is not None @@ -487,7 +488,7 @@ async def test_lost_connection_with_imap_push( await hass.async_block_till_done() assert "Lost imap.server.com (will attempt to reconnect after 10 s)" in caplog.text - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # Our entity should keep its current state as this assert state is not None assert state.state == "0" @@ -511,12 +512,17 @@ async def test_fetch_number_of_messages( await hass.async_block_till_done() assert "Invalid response for search" in caplog.text - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have an entity with an unavailable state assert state is not None assert state.state == STATE_UNAVAILABLE +@pytest.mark.parametrize( + "empty_search_reponse", + [EMPTY_SEARCH_RESPONSE, EMPTY_SEARCH_RESPONSE_ALT], + ids=["regular_empty_search_response", "alt_empty_search_response"], +) @pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE]) @pytest.mark.parametrize( ("imap_fetch", "valid_date"), @@ -525,7 +531,10 @@ async def test_fetch_number_of_messages( ) @pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"]) async def test_reset_last_message( - hass: HomeAssistant, mock_imap_protocol: MagicMock, valid_date: bool + hass: HomeAssistant, + mock_imap_protocol: MagicMock, + valid_date: bool, + empty_search_reponse: tuple[str, list[bytes]], ) -> None: """Test receiving a message successfully.""" event = asyncio.Event() # needed for pushed coordinator to make a new loop @@ -556,7 +565,7 @@ async def test_reset_last_message( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -580,7 +589,7 @@ async def test_reset_last_message( ) # Simulate an update where no messages are found (needed for pushed coordinator) - mock_imap_protocol.search.return_value = Response(*EMPTY_SEARCH_RESPONSE) + mock_imap_protocol.search.return_value = Response(*empty_search_reponse) # Make sure we have an update async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) @@ -590,7 +599,7 @@ async def test_reset_last_message( await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have message assert state is not None assert state.state == "0" @@ -607,7 +616,7 @@ async def test_reset_last_message( await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -637,7 +646,7 @@ async def test_event_skipped_message_too_large( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -667,7 +676,7 @@ async def test_message_is_truncated( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -702,7 +711,7 @@ async def test_message_data( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -747,7 +756,7 @@ async def test_custom_template( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -798,7 +807,7 @@ async def test_enforce_polling( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -838,7 +847,7 @@ async def test_services(hass: HomeAssistant, mock_imap_protocol: MagicMock) -> N # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" diff --git a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr index 096e370ab02..f15fc706d7e 100644 --- a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr +++ b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr @@ -6,6 +6,8 @@ 'station_id': '123', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'imgw_pib', 'minor_version': 1, 'options': dict({ @@ -13,6 +15,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'River Name (Station Name)', 'unique_id': '123', 'version': 1, diff --git a/tests/components/improv_ble/__init__.py b/tests/components/improv_ble/__init__.py index 41ea98cda7b..521d0881443 100644 --- a/tests/components/improv_ble/__init__.py +++ b/tests/components/improv_ble/__init__.py @@ -25,6 +25,25 @@ IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( ) +BAD_IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( + name="00123456", + address="AA:BB:CC:DD:EE:F0", + rssi=-60, + manufacturer_data={}, + service_uuids=[SERVICE_UUID], + service_data={SERVICE_DATA_UUID: b"\x00\x00\x00\x00\x00\x00"}, + source="local", + device=generate_ble_device(address="AA:BB:CC:DD:EE:F0", name="00123456"), + advertisement=generate_advertisement_data( + service_uuids=[SERVICE_UUID], + service_data={SERVICE_DATA_UUID: b"\x00\x00\x00\x00\x00\x00"}, + ), + time=0, + connectable=True, + tx_power=-127, +) + + PROVISIONED_IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( name="00123456", address="AA:BB:CC:DD:EE:F0", diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 640a931bee5..2df4be2ba7d 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType from . import ( + BAD_IMPROV_BLE_DISCOVERY_INFO, IMPROV_BLE_DISCOVERY_INFO, NOT_IMPROV_BLE_DISCOVERY_INFO, PROVISIONED_IMPROV_BLE_DISCOVERY_INFO, @@ -649,3 +650,20 @@ async def test_provision_retry(hass: HomeAssistant, exc, error) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "provision" assert result["errors"] == {"base": error} + + +async def test_provision_fails_invalid_data( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test bluetooth flow with error due to invalid data.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=BAD_IMPROV_BLE_DISCOVERY_INFO, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "invalid_improv_data" + assert ( + "Aborting improv flow, device AA:BB:CC:DD:EE:F0 sent invalid improv data: '000000000000'" + in caplog.text + ) diff --git a/tests/components/incomfort/conftest.py b/tests/components/incomfort/conftest.py index f17547a1445..b00e3a638c8 100644 --- a/tests/components/incomfort/conftest.py +++ b/tests/components/incomfort/conftest.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from incomfortclient import DisplayCode import pytest -from homeassistant.components.incomfort import DOMAIN +from homeassistant.components.incomfort.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/incomfort/snapshots/test_binary_sensor.ambr b/tests/components/incomfort/snapshots/test_binary_sensor.ambr index 565abcaa26f..2f2319b6a44 100644 --- a/tests/components/incomfort/snapshots/test_binary_sensor.ambr +++ b/tests/components/incomfort/snapshots/test_binary_sensor.ambr @@ -188,147 +188,6 @@ 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -518,147 +377,6 @@ 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -848,147 +566,6 @@ 'state': 'on', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1178,147 +755,6 @@ 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_setup_platform[binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1508,144 +944,3 @@ 'state': 'off', }) # --- -# name: test_setup_platform[binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/incomfort/snapshots/test_climate.ambr b/tests/components/incomfort/snapshots/test_climate.ambr index 05b2d4878d0..17adcbb3bab 100644 --- a/tests/components/incomfort/snapshots/test_climate.ambr +++ b/tests/components/incomfort/snapshots/test_climate.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_setup_platform[climate.thermostat_1-entry] +# name: test_setup_platform[legacy_thermostat][climate.thermostat_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -38,7 +38,73 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_platform[climate.thermostat_1-state] +# name: test_setup_platform[legacy_thermostat][climate.thermostat_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.4, + 'friendly_name': 'Thermostat 1', + 'hvac_action': , + 'hvac_modes': list([ + , + ]), + 'max_temp': 30.0, + 'min_temp': 5.0, + 'status': dict({ + 'override': 0.0, + 'room_temp': 21.42, + 'setpoint': 18.0, + }), + 'supported_features': , + 'temperature': 18.0, + }), + 'context': , + 'entity_id': 'climate.thermostat_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[new_thermostat][climate.thermostat_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 30.0, + 'min_temp': 5.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c0ffeec0ffee_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_platform[new_thermostat][climate.thermostat_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 21.4, diff --git a/tests/components/incomfort/test_climate.py b/tests/components/incomfort/test_climate.py index d5f7397aaaf..ae4c1cf31f7 100644 --- a/tests/components/incomfort/test_climate.py +++ b/tests/components/incomfort/test_climate.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch +import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntry @@ -13,6 +14,14 @@ from tests.common import snapshot_platform @patch("homeassistant.components.incomfort.PLATFORMS", [Platform.CLIMATE]) +@pytest.mark.parametrize( + "mock_room_status", + [ + {"room_temp": 21.42, "setpoint": 18.0, "override": 18.0}, + {"room_temp": 21.42, "setpoint": 18.0, "override": 0.0}, + ], + ids=["new_thermostat", "legacy_thermostat"], +) async def test_setup_platform( hass: HomeAssistant, mock_incomfort: MagicMock, @@ -20,6 +29,10 @@ async def test_setup_platform( snapshot: SnapshotAssertion, mock_config_entry: ConfigEntry, ) -> None: - """Test the incomfort entities are set up correctly.""" + """Test the incomfort entities are set up correctly. + + Legacy thermostats report 0.0 as override if no override is set, + but new thermostat sync the override with the actual setpoint instead. + """ await hass.config_entries.async_setup(mock_config_entry.entry_id) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/incomfort/test_config_flow.py b/tests/components/incomfort/test_config_flow.py index 7a942dab817..287fd85715f 100644 --- a/tests/components/incomfort/test_config_flow.py +++ b/tests/components/incomfort/test_config_flow.py @@ -6,8 +6,8 @@ from aiohttp import ClientResponseError from incomfortclient import IncomfortError, InvalidHeaterList import pytest -from homeassistant.components.incomfort import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.components.incomfort.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -38,50 +38,6 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 -async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_incomfort: MagicMock -) -> None: - """Test we van import from YAML.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Intergas InComfort/Intouch Lan2RF gateway" - assert result["data"] == MOCK_CONFIG - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("exc", "abort_reason"), - [ - (IncomfortError(ClientResponseError(None, None, status=401)), "auth_error"), - (IncomfortError(ClientResponseError(None, None, status=404)), "not_found"), - (IncomfortError(ClientResponseError(None, None, status=500)), "unknown"), - (IncomfortError, "unknown"), - (InvalidHeaterList, "no_heaters"), - (ValueError, "unknown"), - (TimeoutError, "timeout_error"), - ], -) -async def test_import_fails( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_incomfort: MagicMock, - exc: Exception, - abort_reason: str, -) -> None: - """Test YAML import fails.""" - mock_incomfort().heaters.side_effect = exc - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == abort_reason - assert len(mock_setup_entry.mock_calls) == 0 - - async def test_entry_already_configured(hass: HomeAssistant) -> None: """Test aborting if the entry is already configured.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG) diff --git a/tests/components/incomfort/test_water_heater.py b/tests/components/incomfort/test_water_heater.py index 5b7aebc50a8..082aecf6d49 100644 --- a/tests/components/incomfort/test_water_heater.py +++ b/tests/components/incomfort/test_water_heater.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch +import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntry @@ -9,6 +10,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from .conftest import MOCK_HEATER_STATUS + from tests.common import snapshot_platform @@ -23,3 +26,44 @@ async def test_setup_platform( """Test the incomfort entities are set up correctly.""" await hass.config_entries.async_setup(mock_config_entry.entry_id) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_heater_status", "current_temperature"), + [ + (MOCK_HEATER_STATUS, 35.3), + (MOCK_HEATER_STATUS | {"is_tapping": True}, 30.2), + (MOCK_HEATER_STATUS | {"is_pumping": True}, 35.3), + (MOCK_HEATER_STATUS | {"heater_temp": None}, 30.2), + (MOCK_HEATER_STATUS | {"tap_temp": None}, 35.3), + (MOCK_HEATER_STATUS | {"heater_temp": None, "tap_temp": None}, None), + ], + ids=[ + "both_temps_available_choose_highest", + "is_tapping_choose_tapping_temp", + "is_pumping_choose_heater_temp", + "heater_temp_not_available_choose_tapping_temp", + "tapping_temp_not_available_choose_heater_temp", + "tapping_and_heater_temp_not_available_unknown", + ], +) +@patch("homeassistant.components.incomfort.PLATFORMS", [Platform.WATER_HEATER]) +async def test_current_temperature_cases( + hass: HomeAssistant, + mock_incomfort: MagicMock, + entity_registry: er.EntityRegistry, + mock_config_entry: ConfigEntry, + current_temperature: float | None, +) -> None: + """Test incomfort entities with alternate current temperature calculation. + + The boilers current temperature is calculated from the testdata: + heater_temp: 35.34 + tap_temp: 30.21 + + It is based on the operating mode as the boiler can heat tap water or + the house. + """ + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert (state := hass.states.get("water_heater.boiler")) is not None + assert state.attributes.get("current_temperature") == current_temperature diff --git a/tests/components/insteon/mock_devices.py b/tests/components/insteon/mock_devices.py index 2c385c337fd..05db45d00ac 100644 --- a/tests/components/insteon/mock_devices.py +++ b/tests/components/insteon/mock_devices.py @@ -168,6 +168,14 @@ class MockDevices: yield address await asyncio.sleep(0.01) + def values(self): + """Return the devices.""" + return self._devices.values() + + def items(self): + """Return the address, device pair.""" + return self._devices.items() + def subscribe(self, listener, force_strong_ref=False): """Mock the subscribe function.""" subscribe_topic(listener, DEVICE_LIST_CHANGED) diff --git a/tests/components/insteon/test_api_aldb.py b/tests/components/insteon/test_api_aldb.py index 9f3c78b4b39..bdb749836e2 100644 --- a/tests/components/insteon/test_api_aldb.py +++ b/tests/components/insteon/test_api_aldb.py @@ -1,5 +1,6 @@ """Test the Insteon All-Link Database APIs.""" +import asyncio import json from typing import Any from unittest.mock import patch @@ -332,3 +333,38 @@ async def test_bad_address( msg = await ws_client.receive_json() assert not msg["success"] assert msg["error"]["message"] == INSTEON_DEVICE_NOT_FOUND + + +async def test_notify_on_aldb_loading( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data +) -> None: + """Test tracking changes to ALDB status across all devices.""" + ws_client, devices = await _setup(hass, hass_ws_client, aldb_data) + + with patch.object(insteon.api.aldb, "devices", devices): + await ws_client.send_json_auto_id({TYPE: "insteon/aldb/notify_all"}) + msg = await ws_client.receive_json() + assert msg["success"] + + await asyncio.sleep(0.1) + msg = await ws_client.receive_json() + assert msg["event"]["type"] == "status" + assert not msg["event"]["is_loading"] + + device = devices["333333"] + device.aldb._update_status(ALDBStatus.LOADING) + await asyncio.sleep(0.1) + msg = await ws_client.receive_json() + assert msg["event"]["type"] == "status" + assert msg["event"]["is_loading"] + + device.aldb._update_status(ALDBStatus.LOADED) + await asyncio.sleep(0.1) + msg = await ws_client.receive_json() + assert msg["event"]["type"] == "status" + assert not msg["event"]["is_loading"] + + await ws_client.client.session.close() + + # Allow lingering tasks to complete + await asyncio.sleep(0.1) diff --git a/tests/components/insteon/test_api_config.py b/tests/components/insteon/test_api_config.py index 7c922338638..9c85ca6a706 100644 --- a/tests/components/insteon/test_api_config.py +++ b/tests/components/insteon/test_api_config.py @@ -1,7 +1,10 @@ """Test the Insteon APIs for configuring the integration.""" +import asyncio +import json from unittest.mock import patch +from homeassistant.components import insteon from homeassistant.components.insteon.api.device import ID, TYPE from homeassistant.components.insteon.const import ( CONF_HUB_VERSION, @@ -18,8 +21,10 @@ from .const import ( MOCK_USER_INPUT_PLM, ) from .mock_connection import mock_failed_connection, mock_successful_connection +from .mock_devices import MockDevices from .mock_setup import async_mock_setup +from tests.common import load_fixture from tests.typing import WebSocketGenerator @@ -389,3 +394,55 @@ async def test_remove_device_override_no_overrides( config_entry = hass.config_entries.async_get_entry("abcde12345") assert not config_entry.options.get(CONF_OVERRIDE) + + +async def test_get_broken_links( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting broken ALDB links.""" + + ws_client, _, _, _ = await async_mock_setup(hass, hass_ws_client) + devices = MockDevices() + await devices.async_load() + aldb_data = json.loads(load_fixture("insteon/aldb_data.json")) + devices.fill_aldb("33.33.33", aldb_data) + await asyncio.sleep(1) + with patch.object(insteon.api.config, "devices", devices): + await ws_client.send_json({ID: 2, TYPE: "insteon/config/get_broken_links"}) + msg = await ws_client.receive_json() + assert msg["success"] + + assert len(msg["result"]) == 5 + + +async def test_get_unknown_devices( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting unknown Insteon devices.""" + + ws_client, _, _, _ = await async_mock_setup(hass, hass_ws_client) + devices = MockDevices() + await devices.async_load() + aldb_data = { + "4095": { + "memory": 4095, + "in_use": True, + "controller": False, + "high_water_mark": False, + "bit5": True, + "bit4": False, + "group": 0, + "target": "FFFFFF", + "data1": 0, + "data2": 0, + "data3": 0, + }, + } + devices.fill_aldb("33.33.33", aldb_data) + with patch.object(insteon.api.config, "devices", devices): + await ws_client.send_json({ID: 2, TYPE: "insteon/config/get_unknown_devices"}) + msg = await ws_client.receive_json() + assert msg["success"] + + assert len(msg["result"]) == 1 + await asyncio.sleep(0.1) diff --git a/tests/components/insteon/test_api_device.py b/tests/components/insteon/test_api_device.py index 29d601eb3ef..6f1a174f024 100644 --- a/tests/components/insteon/test_api_device.py +++ b/tests/components/insteon/test_api_device.py @@ -16,7 +16,6 @@ from homeassistant.components.insteon.api.device import ( ID, INSTEON_DEVICE_NOT_FOUND, TYPE, - async_device_name, ) from homeassistant.components.insteon.const import ( CONF_OVERRIDE, @@ -24,6 +23,7 @@ from homeassistant.components.insteon.const import ( DOMAIN, MULTIPLE, ) +from homeassistant.components.insteon.utils import async_device_name from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -129,10 +129,6 @@ async def test_get_ha_device_name( name = await async_device_name(device_reg, "11.11.11") assert name == "Device 11.11.11" - # Test no HA device but a real Insteon device - name = await async_device_name(device_reg, "22.22.22") - assert name == "Device 22.22.22 (2)" - # Test no HA or Insteon device name = await async_device_name(device_reg, "BB.BB.BB") assert name == "" diff --git a/tests/components/insteon/test_api_properties.py b/tests/components/insteon/test_api_properties.py index 35ff95a5cc8..aeeeeab3d7b 100644 --- a/tests/components/insteon/test_api_properties.py +++ b/tests/components/insteon/test_api_properties.py @@ -1,5 +1,6 @@ """Test the Insteon properties APIs.""" +import asyncio import json from typing import Any from unittest.mock import AsyncMock, patch @@ -156,6 +157,7 @@ async def test_get_read_only_properties( msg = await ws_client.receive_json() assert msg["success"] assert len(msg["result"]["properties"]) == 15 + await asyncio.sleep(1) async def test_get_unknown_properties( diff --git a/tests/components/insteon/test_lock.py b/tests/components/insteon/test_lock.py index a782e006a62..ec236059c74 100644 --- a/tests/components/insteon/test_lock.py +++ b/tests/components/insteon/test_lock.py @@ -7,18 +7,11 @@ import pytest from homeassistant.components import insteon from homeassistant.components.insteon import ( DOMAIN, - insteon_entity, + entity as insteon_entity, utils as insteon_utils, ) -from homeassistant.components.lock import ( # SERVICE_LOCK,; SERVICE_UNLOCK, - DOMAIN as LOCK_DOMAIN, -) -from homeassistant.const import ( # ATTR_ENTITY_ID,; - EVENT_HOMEASSISTANT_STOP, - STATE_LOCKED, - STATE_UNLOCKED, - Platform, -) +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -48,11 +41,7 @@ def patch_setup_and_devices(): patch.object(insteon, "async_close"), patch.object(insteon, "devices", devices), patch.object(insteon_utils, "devices", devices), - patch.object( - insteon_entity, - "devices", - devices, - ), + patch.object(insteon_entity, "devices", devices), ): yield @@ -77,7 +66,7 @@ async def test_lock_lock( try: lock = entity_registry.async_get("lock.device_55_55_55_55_55_55") state = hass.states.get(lock.entity_id) - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED # lock via UI await hass.services.async_call( @@ -106,7 +95,7 @@ async def test_lock_unlock( lock = entity_registry.async_get("lock.device_55_55_55_55_55_55") state = hass.states.get(lock.entity_id) - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED # lock via UI await hass.services.async_call( diff --git a/tests/components/intellifire/__init__.py b/tests/components/intellifire/__init__.py index f655ccc2fa4..50497939f7f 100644 --- a/tests/components/intellifire/__init__.py +++ b/tests/components/intellifire/__init__.py @@ -1 +1,13 @@ """Tests for the IntelliFire integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index cf1e085c10f..0bd7073ee47 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -1,11 +1,37 @@ """Fixtures for IntelliFire integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch -from aiohttp.client_reqrep import ConnectionKey +from intellifire4py.const import IntelliFireApiMode +from intellifire4py.model import ( + IntelliFireCommonFireplaceData, + IntelliFirePollData, + IntelliFireUserData, +) import pytest +from homeassistant.components.intellifire.const import ( + API_MODE_CLOUD, + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, +) +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) + +from tests.common import MockConfigEntry, load_json_object_fixture + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -22,39 +48,201 @@ def mock_fireplace_finder_none() -> Generator[MagicMock]: mock_found_fireplaces = Mock() mock_found_fireplaces.ips = [] with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" + "homeassistant.components.intellifire.config_flow.UDPFireplaceFinder.search_fireplace" ): yield mock_found_fireplaces @pytest.fixture -def mock_fireplace_finder_single() -> Generator[MagicMock]: - """Mock fireplace finder.""" - mock_found_fireplaces = Mock() - mock_found_fireplaces.ips = ["192.168.1.69"] - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" - ): - yield mock_found_fireplaces +def mock_config_entry_current() -> MockConfigEntry: + """Return a mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=2, + data={ + CONF_IP_ADDRESS: "192.168.2.108", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", + CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", + CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, + unique_id="3FB284769E4736F30C8973A7ED358123", + ) @pytest.fixture -def mock_intellifire_config_flow() -> Generator[MagicMock]: - """Return a mocked IntelliFire client.""" - data_mock = Mock() - data_mock.serial = "12345" +def mock_config_entry_old() -> MockConfigEntry: + """For migration testing.""" + return MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=1, + title="Fireplace 3FB284769E4736F30C8973A7ED358123", + data={ + CONF_HOST: "192.168.2.108", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + ) + +@pytest.fixture +def mock_common_data_local() -> IntelliFireCommonFireplaceData: + """Fixture for mock common data.""" + return IntelliFireCommonFireplaceData( + auth_cookie="B984F21A6378560019F8A1CDE41B6782", + user_id="52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + web_client_id="FA2B1C3045601234D0AE17D72F8E975", + serial="3FB284769E4736F30C8973A7ED358123", + api_key="B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + ip_address="192.168.2.108", + read_mode=IntelliFireApiMode.LOCAL, + control_mode=IntelliFireApiMode.LOCAL, + ) + + +@pytest.fixture +def mock_apis_multifp( + mock_cloud_interface, mock_local_interface, mock_fp +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock]]: + """Multi fireplace version of mocks.""" + return mock_local_interface, mock_cloud_interface, mock_fp + + +@pytest.fixture +def mock_apis_single_fp( + mock_cloud_interface, mock_local_interface, mock_fp +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock]]: + """Single fire place version of the mocks.""" + data_v1 = IntelliFireUserData( + **load_json_object_fixture("user_data_1.json", DOMAIN) + ) + with patch.object( + type(mock_cloud_interface), "user_data", new_callable=PropertyMock + ) as mock_user_data: + mock_user_data.return_value = data_v1 + yield mock_local_interface, mock_cloud_interface, mock_fp + + +@pytest.fixture +def mock_cloud_interface() -> Generator[AsyncMock]: + """Mock cloud interface to use for testing.""" + user_data = IntelliFireUserData( + **load_json_object_fixture("user_data_3.json", DOMAIN) + ) + + with ( + patch( + "homeassistant.components.intellifire.IntelliFireCloudInterface", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.intellifire.config_flow.IntelliFireCloudInterface", + new=mock_client, + ), + patch( + "intellifire4py.cloud_interface.IntelliFireCloudInterface", + new=mock_client, + ), + ): + # Mock async context manager + mock_client = mock_client.return_value + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + # Mock other async methods if needed + mock_client.login_with_credentials = AsyncMock() + mock_client.poll = AsyncMock() + type(mock_client).user_data = PropertyMock(return_value=user_data) + + yield mock_client # Yielding to the test + + +@pytest.fixture +def mock_local_interface() -> Generator[AsyncMock]: + """Mock version of IntelliFireAPILocal.""" + poll_data = IntelliFirePollData( + **load_json_object_fixture("intellifire/local_poll.json") + ) with patch( - "homeassistant.components.intellifire.config_flow.IntellifireAPILocal", + "homeassistant.components.intellifire.config_flow.IntelliFireAPILocal", autospec=True, - ) as intellifire_mock: - intellifire = intellifire_mock.return_value - intellifire.data = data_mock - yield intellifire + ) as mock_client: + mock_client = mock_client.return_value + # Mock all instances of the class + type(mock_client).data = PropertyMock(return_value=poll_data) + yield mock_client -def mock_api_connection_error() -> ConnectionError: - """Return a fake a ConnectionError for iftapi.net.""" - ret = ConnectionError() - ret.args = [ConnectionKey("iftapi.net", 443, False, None, None, None, None)] - return ret +@pytest.fixture +def mock_fp(mock_common_data_local) -> Generator[AsyncMock]: + """Mock fireplace.""" + + local_poll_data = IntelliFirePollData( + **load_json_object_fixture("local_poll.json", DOMAIN) + ) + + assert local_poll_data.connection_quality == 988451 + + with patch( + "homeassistant.components.intellifire.UnifiedFireplace" + ) as mock_unified_fireplace: + # Create an instance of the mock + mock_instance = mock_unified_fireplace.return_value + + # Mock methods and properties of the instance + mock_instance.perform_cloud_poll = AsyncMock() + mock_instance.perform_local_poll = AsyncMock() + + mock_instance.async_validate_connectivity = AsyncMock(return_value=(True, True)) + + type(mock_instance).is_cloud_polling = PropertyMock(return_value=False) + type(mock_instance).is_local_polling = PropertyMock(return_value=True) + + mock_instance.get_user_data_as_json.return_value = '{"mock": "data"}' + + mock_instance.ip_address = "192.168.1.100" + mock_instance.api_key = "mock_api_key" + mock_instance.serial = "mock_serial" + mock_instance.user_id = "mock_user_id" + mock_instance.auth_cookie = "mock_auth_cookie" + mock_instance.web_client_id = "mock_web_client_id" + + # Configure the READ Api + mock_instance.read_api = MagicMock() + mock_instance.read_api.poll = MagicMock(return_value=local_poll_data) + mock_instance.read_api.data = local_poll_data + + mock_instance.control_api = MagicMock() + + mock_instance.local_connectivity = True + mock_instance.cloud_connectivity = False + + mock_instance._read_mode = IntelliFireApiMode.LOCAL + mock_instance.read_mode = IntelliFireApiMode.LOCAL + + mock_instance.control_mode = IntelliFireApiMode.LOCAL + mock_instance._control_mode = IntelliFireApiMode.LOCAL + + mock_instance.data = local_poll_data + + mock_instance.set_read_mode = AsyncMock() + mock_instance.set_control_mode = AsyncMock() + + mock_instance.async_validate_connectivity = AsyncMock( + return_value=(True, False) + ) + + # Patch class methods + with patch( + "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", + new_callable=AsyncMock, + return_value=mock_instance, + ): + yield mock_instance diff --git a/tests/components/intellifire/fixtures/local_poll.json b/tests/components/intellifire/fixtures/local_poll.json new file mode 100644 index 00000000000..9dac47c698d --- /dev/null +++ b/tests/components/intellifire/fixtures/local_poll.json @@ -0,0 +1,29 @@ +{ + "name": "", + "serial": "4GC295860E5837G40D9974B7FD459234", + "temperature": 17, + "battery": 0, + "pilot": 1, + "light": 0, + "height": 1, + "fanspeed": 1, + "hot": 0, + "power": 1, + "thermostat": 0, + "setpoint": 0, + "timer": 0, + "timeremaining": 0, + "prepurge": 0, + "feature_light": 0, + "feature_thermostat": 1, + "power_vent": 0, + "feature_fan": 1, + "errors": [], + "fw_version": "0x00030200", + "fw_ver_str": "0.3.2+hw2", + "downtime": 0, + "uptime": 117, + "connection_quality": 988451, + "ecm_latency": 0, + "ipv4_address": "192.168.2.108" +} diff --git a/tests/components/intellifire/fixtures/user_data_1.json b/tests/components/intellifire/fixtures/user_data_1.json new file mode 100644 index 00000000000..501d240662b --- /dev/null +++ b/tests/components/intellifire/fixtures/user_data_1.json @@ -0,0 +1,17 @@ +{ + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "fireplaces": [ + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123" + } + ], + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas" +} diff --git a/tests/components/intellifire/fixtures/user_data_3.json b/tests/components/intellifire/fixtures/user_data_3.json new file mode 100644 index 00000000000..39e9c95abbd --- /dev/null +++ b/tests/components/intellifire/fixtures/user_data_3.json @@ -0,0 +1,33 @@ +{ + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "fireplaces": [ + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123" + }, + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.109", + "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", + "serial": "4GC295860E5837G40D9974B7FD459234" + }, + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.110", + "api_key": "E5D6FC39CCED52F1FB21AFF9BFA6DE56", + "serial": "5HD306971F5938H51EAA85C8GE561345" + } + ], + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas" +} diff --git a/tests/components/intellifire/snapshots/test_binary_sensor.ambr b/tests/components/intellifire/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..1b85db51d68 --- /dev/null +++ b/tests/components/intellifire/snapshots/test_binary_sensor.ambr @@ -0,0 +1,813 @@ +# serializer version: 1 +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_accessory_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Accessory error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'accessory_error', + 'unique_id': 'error_accessory_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Accessory error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_accessory_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_cloud_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_cloud_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloud connectivity', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cloud_connectivity', + 'unique_id': 'cloud_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_cloud_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'connectivity', + 'friendly_name': 'IntelliFire Cloud connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_cloud_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_disabled_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Disabled error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disabled_error', + 'unique_id': 'error_disabled_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Disabled error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_disabled_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ECM offline error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ecm_offline_error', + 'unique_id': 'error_ecm_offline_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire ECM offline error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_fan_delay_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fan delay error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_delay_error', + 'unique_id': 'error_fan_delay_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Fan delay error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_fan_delay_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_fan_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fan error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_error', + 'unique_id': 'error_fan_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Fan error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_fan_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_flame', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flame', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame', + 'unique_id': 'on_off_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Flame', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_flame', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_flame_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flame Error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame_error', + 'unique_id': 'error_flame_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Flame Error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_flame_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_lights_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lights error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lights_error', + 'unique_id': 'error_lights_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Lights error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_lights_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_local_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_local_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Local connectivity', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'local_connectivity', + 'unique_id': 'local_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_local_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'connectivity', + 'friendly_name': 'IntelliFire Local connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_local_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_maintenance_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maintenance error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'maintenance_error', + 'unique_id': 'error_maintenance_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Maintenance error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_maintenance_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_offline_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Offline error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'offline_error', + 'unique_id': 'error_offline_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Offline error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_offline_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pilot flame error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pilot_flame_error', + 'unique_id': 'error_pilot_flame_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Pilot flame error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_pilot_light_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pilot light on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pilot_light_on', + 'unique_id': 'pilot_light_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Pilot light on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_pilot_light_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soft lock out error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'soft_lock_out_error', + 'unique_id': 'error_soft_lock_out_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Soft lock out error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_thermostat_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_on', + 'unique_id': 'thermostat_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Thermostat on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_thermostat_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_timer_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Timer on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_on', + 'unique_id': 'timer_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Timer on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_timer_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/intellifire/snapshots/test_climate.ambr b/tests/components/intellifire/snapshots/test_climate.ambr new file mode 100644 index 00000000000..36f719d2264 --- /dev/null +++ b/tests/components/intellifire/snapshots/test_climate.ambr @@ -0,0 +1,66 @@ +# serializer version: 1 +# name: test_all_sensor_entities[climate.intellifire_thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 37, + 'min_temp': 0, + 'target_temp_step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.intellifire_thermostat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'climate_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[climate.intellifire_thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'current_temperature': 17.0, + 'friendly_name': 'IntelliFire Thermostat', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 37, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': 0.0, + }), + 'context': , + 'entity_id': 'climate.intellifire_thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/intellifire/snapshots/test_sensor.ambr b/tests/components/intellifire/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..d749da216ac --- /dev/null +++ b/tests/components/intellifire/snapshots/test_sensor.ambr @@ -0,0 +1,493 @@ +# serializer version: 1 +# name: test_all_sensor_entities[sensor.intellifire_connection_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_connection_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Connection quality', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_quality', + 'unique_id': 'connection_quality_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_connection_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Connection quality', + }), + 'context': , + 'entity_id': 'sensor.intellifire_connection_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '988451', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_downtime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_downtime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Downtime', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'downtime', + 'unique_id': 'downtime_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_downtime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Downtime', + }), + 'context': , + 'entity_id': 'sensor.intellifire_downtime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_ecm_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'ECM latency', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ecm_latency', + 'unique_id': 'ecm_latency_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire ECM latency', + }), + 'context': , + 'entity_id': 'sensor.intellifire_ecm_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fan Speed', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_speed', + 'unique_id': 'fan_speed_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Fan Speed', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_flame_height-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_flame_height', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flame height', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame_height', + 'unique_id': 'flame_height_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_flame_height-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Flame height', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_flame_height', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ip_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_address', + 'unique_id': 'ipv4_address_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ip_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire IP address', + }), + 'context': , + 'entity_id': 'sensor.intellifire_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '192.168.2.108', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_target_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_target_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Target temperature', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'target_temp', + 'unique_id': 'target_temp_mock_serial', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_target_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'temperature', + 'friendly_name': 'IntelliFire Target temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_target_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'temperature_mock_serial', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'temperature', + 'friendly_name': 'IntelliFire Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_timer_end-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_timer_end', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer end', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_end_timestamp', + 'unique_id': 'timer_end_timestamp_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_timer_end-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Timer end', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_timer_end', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'uptime_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Uptime', + }), + 'context': , + 'entity_id': 'sensor.intellifire_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T11:58:03+00:00', + }) +# --- diff --git a/tests/components/intellifire/test_binary_sensor.py b/tests/components/intellifire/test_binary_sensor.py new file mode 100644 index 00000000000..a40f92b84d5 --- /dev/null +++ b/tests/components/intellifire/test_binary_sensor.py @@ -0,0 +1,35 @@ +"""Test IntelliFire Binary Sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_binary_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], +) -> None: + """Test all entities.""" + + with ( + patch( + "homeassistant.components.intellifire.PLATFORMS", [Platform.BINARY_SENSOR] + ), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intellifire/test_climate.py b/tests/components/intellifire/test_climate.py new file mode 100644 index 00000000000..da1b2864791 --- /dev/null +++ b/tests/components/intellifire/test_climate.py @@ -0,0 +1,34 @@ +"""Test climate.""" + +from unittest.mock import patch + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@freeze_time("2021-01-01T12:00:00Z") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_fp, +) -> None: + """Test all entities.""" + with ( + patch("homeassistant.components.intellifire.PLATFORMS", [Platform.CLIMATE]), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intellifire/test_config_flow.py b/tests/components/intellifire/test_config_flow.py index ba4e2f039a3..f1465c4dcd4 100644 --- a/tests/components/intellifire/test_config_flow.py +++ b/tests/components/intellifire/test_config_flow.py @@ -1,323 +1,168 @@ """Test the IntelliFire config flow.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock -from intellifire4py.exceptions import LoginException +from intellifire4py.exceptions import LoginError from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.intellifire.config_flow import MANUAL_ENTRY_STRING -from homeassistant.components.intellifire.const import CONF_USER_ID, DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.intellifire.const import CONF_SERIAL, DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import mock_api_connection_error - from tests.common import MockConfigEntry -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_no_discovery( +async def test_standard_config_with_single_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_single_fp, ) -> None: - """Test we should get the manual discovery form - because no discovered fireplaces.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=[], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM + """Test standard flow with a user who has only a single fireplace.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM assert result["errors"] == {} - assert result["step_id"] == "manual_device_entry" + assert result["step_id"] == "cloud_api" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "api_config" - - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Fireplace 12345" - assert result3["data"] == { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test", - CONF_PASSWORD: "AROONIE", - CONF_API_KEY: "key", - CONF_USER_ID: "intellifire", + # For a single fireplace we just create it + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", } - assert len(mock_setup_entry.mock_calls) == 1 -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(side_effect=mock_api_connection_error()), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_single_discovery( +async def test_standard_config_with_pre_configured_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_config_entry_current, + mock_apis_single_fp, ) -> None: - """Test single fireplace UDP discovery.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) + """What if we try to configure an already configured fireplace.""" + # Configure an existing entry + mock_config_entry_current.add_to_hass(hass) - await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: "192.168.1.69"} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "iftapi_connect"} + + # For a single fireplace we just create it + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "no_available_devices" -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(side_effect=LoginException), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_single_discovery_loign_error( +async def test_standard_config_with_single_fireplace_and_bad_credentials( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_single_fp, ) -> None: - """Test single fireplace UDP discovery.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: "192.168.1.69"} - ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, - ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "api_error"} - - -async def test_manual_entry( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple Fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["step_id"] == "pick_device" - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: MANUAL_ENTRY_STRING} - ) - - await hass.async_block_till_done() - assert result2["step_id"] == "manual_device_entry" - - -async def test_multi_discovery( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["step_id"] == "pick_device" - await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} - ) - await hass.async_block_till_done() - assert result["step_id"] == "pick_device" - - -async def test_multi_discovery_cannot_connect( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - mock_intellifire_config_flow.poll.side_effect = ConnectionError - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pick_device" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_form_cannot_connect_manual_entry( - hass: HomeAssistant, - mock_intellifire_config_flow: MagicMock, - mock_fireplace_finder_single: AsyncMock, -) -> None: - """Test we handle cannot connect error.""" - mock_intellifire_config_flow.poll.side_effect = ConnectionError + """Test bad credentials on a login.""" + mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_single_fp + # Set login error + mock_cloud_interface.login_with_credentials.side_effect = LoginError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "manual_device_entry" + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} + # Erase the error + mock_cloud_interface.login_with_credentials.side_effect = None + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "api_error"} + assert result["step_id"] == "cloud_api" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + ) + # For a single fireplace we just create it + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } -async def test_picker_already_discovered( +async def test_standard_config_with_multiple_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_multifp, ) -> None: - """Test single fireplace UDP discovery.""" - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "host": "192.168.1.3", - }, - title="Fireplace", - unique_id=44444, - ) - entry.add_to_hass(hass) - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.3"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await hass.async_block_till_done() - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "192.168.1.4", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert len(mock_setup_entry.mock_calls) == 0 - - -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_reauth_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test the reauth flow.""" - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "host": "192.168.1.3", - }, - title="Fireplace 1234", - version=1, - unique_id="4444", - ) - entry.add_to_hass(hass) - + """Test multi-fireplace user who must be very rich.""" result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": "reauth", - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, + DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "api_config" - - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert entry.data[CONF_PASSWORD] == "AROONIE" - assert entry.data[CONF_USERNAME] == "test" + # When we have multiple fireplaces we get to pick a serial + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "pick_cloud_device" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_SERIAL: "4GC295860E5837G40D9974B7FD459234"}, + ) + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.109", + "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", + "serial": "4GC295860E5837G40D9974B7FD459234", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } async def test_dhcp_discovery_intellifire_device( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_multifp, ) -> None: """Test successful DHCP Discovery.""" + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -327,26 +172,26 @@ async def test_dhcp_discovery_intellifire_device( hostname="zentrios-Test", ), ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "dhcp_confirm" - result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "dhcp_confirm" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={} + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - assert result3["title"] == "Fireplace 12345" - assert result3["data"] == {"host": "1.1.1.1"} + assert result["type"] == FlowResultType.CREATE_ENTRY async def test_dhcp_discovery_non_intellifire_device( hass: HomeAssistant, - mock_intellifire_config_flow: MagicMock, mock_setup_entry: AsyncMock, + mock_apis_multifp, ) -> None: - """Test failed DHCP Discovery.""" + """Test successful DHCP Discovery of a non intellifire device..""" - mock_intellifire_config_flow.poll.side_effect = ConnectionError + # Patch poll with an exception + mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_multifp + mock_local_interface.poll.side_effect = ConnectionError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -357,6 +202,28 @@ async def test_dhcp_discovery_non_intellifire_device( hostname="zentrios-Evil", ), ) - - assert result["type"] is FlowResultType.ABORT + assert result["type"] == FlowResultType.ABORT assert result["reason"] == "not_intellifire_device" + # Test is finished - the DHCP scanner detected a hostname that "might" be an IntelliFire device, but it was not. + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_config_entry_current: MockConfigEntry, + mock_apis_single_fp, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth.""" + + mock_config_entry_current.add_to_hass(hass) + result = await mock_config_entry_current.start_reauth_flow(hass) + assert result["type"] == FlowResultType.FORM + result["step_id"] = "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/intellifire/test_init.py b/tests/components/intellifire/test_init.py new file mode 100644 index 00000000000..6d08fda26c3 --- /dev/null +++ b/tests/components/intellifire/test_init.py @@ -0,0 +1,111 @@ +"""Test the IntelliFire config flow.""" + +from unittest.mock import AsyncMock, patch + +from homeassistant.components.intellifire import CONF_USER_ID +from homeassistant.components.intellifire.const import ( + API_MODE_CLOUD, + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_WEB_CLIENT_ID, + DOMAIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_minor_migration( + hass: HomeAssistant, mock_config_entry_old, mock_apis_single_fp +) -> None: + """With the new library we are going to end up rewriting the config entries.""" + mock_config_entry_old.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_old.entry_id) + + assert mock_config_entry_old.data == { + "ip_address": "192.168.2.108", + "host": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } + + +async def test_minor_migration_error(hass: HomeAssistant, mock_apis_single_fp) -> None: + """Test the case where we completely fail to initialize.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=1, + title="Fireplace of testing", + data={ + CONF_HOST: "11.168.2.218", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR + + +async def test_init_with_no_username(hass: HomeAssistant, mock_apis_single_fp) -> None: + """Test the case where we completely fail to initialize.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=2, + data={ + CONF_IP_ADDRESS: "192.168.2.108", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", + CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", + CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, + unique_id="3FB284769E4736F30C8973A7ED358123", + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_connectivity_bad( + hass: HomeAssistant, + mock_config_entry_current, + mock_apis_single_fp, +) -> None: + """Test a timeout error on the setup flow.""" + + with patch( + "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", + new_callable=AsyncMock, + side_effect=TimeoutError, + ): + mock_config_entry_current.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_current.entry_id) + + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/intellifire/test_sensor.py b/tests/components/intellifire/test_sensor.py new file mode 100644 index 00000000000..96e344d77fc --- /dev/null +++ b/tests/components/intellifire/test_sensor.py @@ -0,0 +1,35 @@ +"""Test IntelliFire Binary Sensors.""" + +from unittest.mock import AsyncMock, patch + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@freeze_time("2021-01-01T12:00:00Z") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], +) -> None: + """Test all entities.""" + + with ( + patch("homeassistant.components.intellifire.PLATFORMS", [Platform.SENSOR]), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intent/test_init.py b/tests/components/intent/test_init.py index 7288c4855af..0db9682d0ad 100644 --- a/tests/components/intent/test_init.py +++ b/tests/components/intent/test_init.py @@ -34,11 +34,11 @@ async def test_http_handle_intent( assert intent_obj.context.user_id == hass_admin_user.id response = intent_obj.create_response() response.async_set_speech( - "I've ordered a {}!".format(intent_obj.slots["type"]["value"]) + f"I've ordered a {intent_obj.slots['type']['value']}!" ) response.async_set_card( "Beer ordered", - "You chose a {}.".format(intent_obj.slots["type"]["value"]), + f"You chose a {intent_obj.slots['type']['value']}.", ) return response @@ -455,3 +455,25 @@ async def test_set_position_intent_unsupported_domain(hass: HomeAssistant) -> No "HassSetPosition", {"name": {"value": "test light"}, "position": {"value": 100}}, ) + + +async def test_intents_with_no_responses(hass: HomeAssistant) -> None: + """Test intents that should not return a response during handling.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + + # The "respond" intent gets its response text from home-assistant-intents + for intent_name in (intent.INTENT_NEVERMIND, intent.INTENT_RESPOND): + response = await intent.async_handle(hass, "test", intent_name, {}) + assert not response.speech + + +async def test_intents_respond_intent(hass: HomeAssistant) -> None: + """Test HassRespond intent with a response slot value.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + + response = await intent.async_handle( + hass, "test", intent.INTENT_RESPOND, {"response": {"value": "Hello World"}} + ) + assert response.speech["plain"]["speech"] == "Hello World" diff --git a/tests/components/intent/test_timers.py b/tests/components/intent/test_timers.py index d194d532513..1789e981e2d 100644 --- a/tests/components/intent/test_timers.py +++ b/tests/components/intent/test_timers.py @@ -1587,3 +1587,182 @@ async def test_async_device_supports_timers(hass: HomeAssistant) -> None: # After handler registration assert async_device_supports_timers(hass, device_id) + + +async def test_cancel_all_timers(hass: HomeAssistant, init_components) -> None: + """Test cancelling all timers.""" + device_id = "test_device" + + started_event = asyncio.Event() + num_started = 0 + + @callback + def handle_timer(event_type: TimerEventType, timer: TimerInfo) -> None: + nonlocal num_started + + if event_type == TimerEventType.STARTED: + num_started += 1 + if num_started == 3: + started_event.set() + + async_register_timer_handler(hass, device_id, handle_timer) + + # Start timers + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "pizza"}, "minutes": {"value": 10}}, + device_id=device_id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "tv"}, "minutes": {"value": 10}}, + device_id=device_id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result2 = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "media"}, "minutes": {"value": 15}}, + device_id=device_id, + ) + assert result2.response_type == intent.IntentResponseType.ACTION_DONE + + # Wait for all timers to start + async with asyncio.timeout(1): + await started_event.wait() + + # Cancel all timers + result = await intent.async_handle( + hass, "test", intent.INTENT_CANCEL_ALL_TIMERS, {}, device_id=device_id + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + assert result.speech_slots.get("canceled", 0) == 3 + + # No timers should be running for test_device + result = await intent.async_handle( + hass, "test", intent.INTENT_TIMER_STATUS, {}, device_id=device_id + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 0 + + +async def test_cancel_all_timers_area( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test cancelling all timers in an area.""" + entry = MockConfigEntry() + entry.add_to_hass(hass) + + area_kitchen = area_registry.async_create("kitchen") + device_kitchen = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("test", "kitchen-device")}, + ) + device_registry.async_update_device(device_kitchen.id, area_id=area_kitchen.id) + + area_living_room = area_registry.async_create("living room") + device_living_room = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("test", "living_room-device")}, + ) + device_registry.async_update_device( + device_living_room.id, area_id=area_living_room.id + ) + + started_event = asyncio.Event() + num_timers = 3 + num_started = 0 + + @callback + def handle_timer(event_type: TimerEventType, timer: TimerInfo) -> None: + nonlocal num_started + + if event_type == TimerEventType.STARTED: + num_started += 1 + if num_started == num_timers: + started_event.set() + + async_register_timer_handler(hass, device_kitchen.id, handle_timer) + async_register_timer_handler(hass, device_living_room.id, handle_timer) + + # Start timers in different areas + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "pizza"}, "minutes": {"value": 10}}, + device_id=device_kitchen.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "tv"}, "minutes": {"value": 10}}, + device_id=device_living_room.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "media"}, "minutes": {"value": 15}}, + device_id=device_living_room.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + # Wait for all timers to start + async with asyncio.timeout(1): + await started_event.wait() + + # Cancel all timers in kitchen + result = await intent.async_handle( + hass, + "test", + intent.INTENT_CANCEL_ALL_TIMERS, + {"area": {"value": "kitchen"}}, + device_id=device_kitchen.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + assert result.speech_slots.get("canceled", 0) == 1 + assert result.speech_slots.get("area") == "kitchen" + + # No timers should be running in kitchen + result = await intent.async_handle( + hass, + "test", + intent.INTENT_TIMER_STATUS, + {"area": {"value": "kitchen"}}, + device_id=device_kitchen.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 0 + + # timers should be running in living room + result = await intent.async_handle( + hass, + "test", + intent.INTENT_TIMER_STATUS, + {"area": {"value": "living room"}}, + device_id=device_living_room.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 2 diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 26c575f0407..39084b9298b 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -4,7 +4,7 @@ from unittest.mock import patch from homeassistant import config as hass_config from homeassistant.components.intent_script import DOMAIN -from homeassistant.const import SERVICE_RELOAD +from homeassistant.const import ATTR_FRIENDLY_NAME, SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import ( area_registry as ar, @@ -235,17 +235,31 @@ async def test_intent_script_targets( floor_1 = floor_registry.async_create("first floor") kitchen = area_registry.async_get_or_create("kitchen") area_registry.async_update(kitchen.id, floor_id=floor_1.floor_id) + bathroom = area_registry.async_get_or_create("bathroom") entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" + "light", "demo", "kitchen", suggested_object_id="kitchen" ) entity_registry.async_update_entity("light.kitchen", area_id=kitchen.id) - hass.states.async_set("light.kitchen", "off") + hass.states.async_set( + "light.kitchen", "off", attributes={ATTR_FRIENDLY_NAME: "overhead light"} + ) + entity_registry.async_get_or_create( + "light", "demo", "bathroom", suggested_object_id="bathroom" + ) + entity_registry.async_update_entity("light.bathroom", area_id=bathroom.id) + hass.states.async_set( + "light.bathroom", "off", attributes={ATTR_FRIENDLY_NAME: "overhead light"} + ) response = await intent.async_handle( hass, "test", "Targets", - {"name": {"value": "kitchen"}, "domain": {"value": "light"}}, + { + "name": {"value": "overhead light"}, + "domain": {"value": "light"}, + "preferred_area_id": {"value": "kitchen"}, + }, ) assert len(calls) == 1 assert calls[0].data["targets"] == {"entities": ["light.kitchen"]} diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py index 9f858879cb9..1935a069cca 100644 --- a/tests/components/iotty/conftest.py +++ b/tests/components/iotty/conftest.py @@ -6,7 +6,18 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientSession from iottycloud.device import Device from iottycloud.lightswitch import LightSwitch -from iottycloud.verbs import LS_DEVICE_TYPE_UID, RESULT, STATUS, STATUS_OFF, STATUS_ON +from iottycloud.shutter import Shutter +from iottycloud.verbs import ( + LS_DEVICE_TYPE_UID, + OPEN_PERCENTAGE, + RESULT, + SH_DEVICE_TYPE_UID, + STATUS, + STATUS_OFF, + STATUS_ON, + STATUS_OPENING, + STATUS_STATIONATRY, +) import pytest from homeassistant import setup @@ -48,6 +59,20 @@ test_ls_one_added = [ ls_2, ] +sh_0 = Shutter("TestSH", "TEST_SERIAL_SH_0", SH_DEVICE_TYPE_UID, "[TEST] Shutter 0") +sh_1 = Shutter("TestSH1", "TEST_SERIAL_SH_1", SH_DEVICE_TYPE_UID, "[TEST] Shutter 1") +sh_2 = Shutter("TestSH2", "TEST_SERIAL_SH_2", SH_DEVICE_TYPE_UID, "[TEST] Shutter 2") + +test_sh = [sh_0, sh_1] + +test_sh_one_removed = [sh_0] + +test_sh_one_added = [ + sh_0, + sh_1, + sh_2, +] + @pytest.fixture async def local_oauth_impl(hass: HomeAssistant): @@ -142,7 +167,7 @@ def mock_get_devices_nodevices() -> Generator[AsyncMock]: @pytest.fixture def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: - """Mock for get_devices, returning two objects.""" + """Mock for get_devices, returning two switches.""" with patch( "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_ls @@ -150,6 +175,16 @@ def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: yield mock_fn +@pytest.fixture +def mock_get_devices_twoshutters() -> Generator[AsyncMock]: + """Mock for get_devices, returning two shutters.""" + + with patch( + "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_sh + ) as mock_fn: + yield mock_fn + + @pytest.fixture def mock_command_fn() -> Generator[AsyncMock]: """Mock for command.""" @@ -169,6 +204,39 @@ def mock_get_status_filled_off() -> Generator[AsyncMock]: yield mock_fn +@pytest.fixture +def mock_get_status_filled_stationary_100() -> Generator[AsyncMock]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 100}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_status_filled_stationary_0() -> Generator[AsyncMock]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 0}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_status_filled_opening_50() -> Generator[AsyncMock]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 50}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn + + @pytest.fixture def mock_get_status_filled() -> Generator[AsyncMock]: """Mock setting up a get_status.""" diff --git a/tests/components/iotty/test_cover.py b/tests/components/iotty/test_cover.py new file mode 100644 index 00000000000..c9e1edaa24b --- /dev/null +++ b/tests/components/iotty/test_cover.py @@ -0,0 +1,235 @@ +"""Unit tests the Hass COVER component.""" + +from aiohttp import ClientSession +from freezegun.api import FrozenDateTimeFactory +from iottycloud.verbs import ( + OPEN_PERCENTAGE, + RESULT, + STATUS, + STATUS_CLOSING, + STATUS_OPENING, + STATUS_STATIONATRY, +) + +from homeassistant.components.cover import ( + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + SERVICE_STOP_COVER, + CoverState, +) +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.components.iotty.coordinator import UPDATE_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from .conftest import test_sh_one_added + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_open_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twoshutters, + mock_get_status_filled_stationary_0, + mock_command_fn, +) -> None: + """Issue an open command.""" + + entity_id = "cover.test_shutter_0_test_serial_sh_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED + + mock_get_status_filled_stationary_0.return_value = { + RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 10} + } + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING + + +async def test_close_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twoshutters, + mock_get_status_filled_stationary_100, + mock_command_fn, +) -> None: + """Issue a close command.""" + + entity_id = "cover.test_shutter_0_test_serial_sh_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPEN + + mock_get_status_filled_stationary_100.return_value = { + RESULT: {STATUS: STATUS_CLOSING, OPEN_PERCENTAGE: 90} + } + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING + + +async def test_stop_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twoshutters, + mock_get_status_filled_opening_50, + mock_command_fn, +) -> None: + """Issue a stop command.""" + + entity_id = "cover.test_shutter_0_test_serial_sh_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING + + mock_get_status_filled_opening_50.return_value = { + RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 60} + } + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPEN + + +async def test_set_position_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twoshutters, + mock_get_status_filled_stationary_0, + mock_command_fn, +) -> None: + """Issue a set position command.""" + + entity_id = "cover.test_shutter_0_test_serial_sh_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED + + mock_get_status_filled_stationary_0.return_value = { + RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 50} + } + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 10}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING + + +async def test_devices_insertion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twoshutters, + mock_get_status_filled_stationary_0, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty cover insertion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == [ + "cover.test_shutter_0_test_serial_sh_0", + "cover.test_shutter_1_test_serial_sh_1", + ] + + mock_get_devices_twoshutters.return_value = test_sh_one_added + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have three devices + assert hass.states.async_entity_ids_count() == 3 + assert hass.states.async_entity_ids() == [ + "cover.test_shutter_0_test_serial_sh_0", + "cover.test_shutter_1_test_serial_sh_1", + "cover.test_shutter_2_test_serial_sh_2", + ] diff --git a/tests/components/ipp/snapshots/test_diagnostics.ambr b/tests/components/ipp/snapshots/test_diagnostics.ambr index 98d0055c982..bd2564c5a40 100644 --- a/tests/components/ipp/snapshots/test_diagnostics.ambr +++ b/tests/components/ipp/snapshots/test_diagnostics.ambr @@ -2,6 +2,7 @@ # name: test_diagnostics dict({ 'data': dict({ + 'booted_at': '2019-11-11T09:10:02+00:00', 'info': dict({ 'command_set': 'ESCPL2,BDC,D4,D4PX,ESCPR7,END4,GENEP,URF', 'location': None, diff --git a/tests/components/ipp/snapshots/test_sensor.ambr b/tests/components/ipp/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..3f910399ad8 --- /dev/null +++ b/tests/components/ipp/snapshots/test_sensor.ambr @@ -0,0 +1,378 @@ +# serializer version: 1 +# name: test_sensors[sensor.test_ha_1000_series-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'printing', + 'stopped', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'printer', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_printer', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'command_set': 'ESCPL2,BDC,D4,D4PX,ESCPR7,END4,GENEP,URF', + 'device_class': 'enum', + 'friendly_name': 'Test HA-1000 Series', + 'info': 'Test HA-1000 Series', + 'location': None, + 'options': list([ + 'idle', + 'printing', + 'stopped', + ]), + 'serial': '555534593035345555', + 'state_message': None, + 'state_reason': None, + 'uri_supported': 'ipps://192.168.1.31:631/ipp/print,ipp://192.168.1.31:631/ipp/print', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_black_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_black_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_black_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Black ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_black_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_cyan_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_cyan_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_1', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_cyan_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Cyan ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_cyan_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '91', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_magenta_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_magenta_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_2', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_magenta_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Magenta ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_magenta_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '73', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_photo_black_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_photo_black_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Photo black ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_3', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_photo_black_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Photo black ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_photo_black_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '98', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_ha_1000_series_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test HA-1000 Series Uptime', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2019-11-11T09:10:02+00:00', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_yellow_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_yellow_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_4', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_yellow_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Yellow ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_yellow_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95', + }) +# --- diff --git a/tests/components/ipp/test_diagnostics.py b/tests/components/ipp/test_diagnostics.py index 08446601e69..d78f066d788 100644 --- a/tests/components/ipp/test_diagnostics.py +++ b/tests/components/ipp/test_diagnostics.py @@ -1,5 +1,6 @@ """Tests for the diagnostics data provided by the Internet Printing Protocol (IPP) integration.""" +import pytest from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -9,6 +10,7 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.freeze_time("2019-11-11 09:10:32+00:00") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/ipp/test_sensor.py b/tests/components/ipp/test_sensor.py index 9f0079a4e40..bdbb9a88d35 100644 --- a/tests/components/ipp/test_sensor.py +++ b/tests/components/ipp/test_sensor.py @@ -3,13 +3,12 @@ from unittest.mock import AsyncMock import pytest +from syrupy.assertion import SnapshotAssertion -from homeassistant.components.sensor import ATTR_OPTIONS -from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.freeze_time("2019-11-11 09:10:32+00:00") @@ -17,53 +16,11 @@ from tests.common import MockConfigEntry async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, init_integration: MockConfigEntry, ) -> None: """Test the creation and values of the IPP sensors.""" - state = hass.states.get("sensor.test_ha_1000_series") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == ["idle", "printing", "stopped"] - - entry = entity_registry.async_get("sensor.test_ha_1000_series") - assert entry - assert entry.translation_key == "printer" - - state = hass.states.get("sensor.test_ha_1000_series_black_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "58" - - state = hass.states.get("sensor.test_ha_1000_series_photo_black_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "98" - - state = hass.states.get("sensor.test_ha_1000_series_cyan_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "91" - - state = hass.states.get("sensor.test_ha_1000_series_yellow_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "95" - - state = hass.states.get("sensor.test_ha_1000_series_magenta_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "73" - - state = hass.states.get("sensor.test_ha_1000_series_uptime") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.state == "2019-11-11T09:10:02+00:00" - - entry = entity_registry.async_get("sensor.test_ha_1000_series_uptime") - - assert entry - assert entry.unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251_uptime" - assert entry.entity_category == EntityCategory.DIAGNOSTIC + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) async def test_disabled_by_default_sensors( diff --git a/tests/components/iqvia/snapshots/test_diagnostics.ambr b/tests/components/iqvia/snapshots/test_diagnostics.ambr index c46a2cc15e3..41cfedb0e29 100644 --- a/tests/components/iqvia/snapshots/test_diagnostics.ambr +++ b/tests/components/iqvia/snapshots/test_diagnostics.ambr @@ -348,6 +348,8 @@ 'zip_code': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'iqvia', 'entry_id': '690ac4b7e99855fc5ee7b987a758d5cb', 'minor_version': 1, @@ -356,6 +358,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index f489d7b7bb5..9091694e6a5 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -5,7 +5,14 @@ from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice from habluetooth import BluetoothServiceInfoBleak -from pynecil import DeviceInfoResponse, LiveDataResponse, OperatingMode, PowerSource +from pynecil import ( + DeviceInfoResponse, + LatestRelease, + LiveDataResponse, + OperatingMode, + PowerSource, + SettingsDataResponse, +) import pytest from homeassistant.components.iron_os import DOMAIN @@ -107,6 +114,25 @@ def mock_ble_device() -> Generator[MagicMock]: yield ble_device +@pytest.fixture(autouse=True) +def mock_ironosupdate() -> Generator[AsyncMock]: + """Mock IronOSUpdate.""" + + with patch( + "homeassistant.components.iron_os.IronOSUpdate", + autospec=True, + ) as mock_client: + client = mock_client.return_value + client.latest_release.return_value = LatestRelease( + html_url="https://github.com/Ralim/IronOS/releases/tag/v2.22", + name="V2.22 | TS101 & S60 Added | PinecilV2 improved", + tag_name="v2.22", + body="**RELEASE_NOTES**", + ) + + yield client + + @pytest.fixture def mock_pynecil() -> Generator[AsyncMock]: """Mock Pynecil library.""" @@ -122,6 +148,27 @@ def mock_pynecil() -> Generator[AsyncMock]: device_sn="0000c0ffeec0ffee", name=DEFAULT_NAME, ) + client.get_settings.return_value = SettingsDataResponse( + sleep_temp=150, + sleep_timeout=5, + min_dc_voltage_cells=0, + min_volltage_per_cell=3.3, + qc_ideal_voltage=9.0, + accel_sensitivity=7, + shutdown_time=10, + keep_awake_pulse_power=0.5, + keep_awake_pulse_delay=4, + keep_awake_pulse_duration=1, + voltage_div=600, + boost_temp=420, + calibration_offset=900, + power_limit=12.0, + temp_increment_long=10, + temp_increment_short=1, + hall_sensitivity=7, + pd_negotiation_timeout=2.0, + display_brightness=3, + ) client.get_live_data.return_value = LiveDataResponse( live_temp=298, setpoint_temp=300, diff --git a/tests/components/iron_os/snapshots/test_binary_sensor.ambr b/tests/components/iron_os/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..17b49c1d687 --- /dev/null +++ b/tests/components/iron_os/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.pinecil_soldering_tip-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.pinecil_soldering_tip', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soldering tip', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_connected', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.pinecil_soldering_tip-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Pinecil Soldering tip', + }), + 'context': , + 'entity_id': 'binary_sensor.pinecil_soldering_tip', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr index 2f5ee62e37e..24663cc4b0f 100644 --- a/tests/components/iron_os/snapshots/test_number.ambr +++ b/tests/components/iron_os/snapshots/test_number.ambr @@ -1,4 +1,732 @@ # serializer version: 1 +# name: test_state[number.pinecil_boost_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 0, + 'mode': , + 'step': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_boost_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Boost temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_boost_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_boost_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Boost temperature', + 'max': 450, + 'min': 0, + 'mode': , + 'step': 10, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_boost_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '420', + }) +# --- +# name: test_state[number.pinecil_calibration_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2500, + 'min': 100, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_calibration_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibration offset', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_calibration_offset', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_calibration_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Calibration offset', + 'max': 2500, + 'min': 100, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_calibration_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '900', + }) +# --- +# name: test_state[number.pinecil_display_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_display_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display brightness', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_display_brightness', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_display_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Display brightness', + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_display_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_state[number.pinecil_hall_effect_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_hall_effect_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hall effect sensitivity', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_hall_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_hall_effect_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Hall effect sensitivity', + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_hall_effect_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_delay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 22.5, + 'min': 2.5, + 'mode': , + 'step': 2.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_keep_awake_pulse_delay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keep-awake pulse delay', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_keep_awake_pulse_delay', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_delay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Keep-awake pulse delay', + 'max': 22.5, + 'min': 2.5, + 'mode': , + 'step': 2.5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_keep_awake_pulse_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2250, + 'min': 250, + 'mode': , + 'step': 250, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_keep_awake_pulse_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keep-awake pulse duration', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_keep_awake_pulse_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Keep-awake pulse duration', + 'max': 2250, + 'min': 250, + 'mode': , + 'step': 250, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_keep_awake_pulse_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '250', + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_intensity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9.9, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_keep_awake_pulse_intensity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keep-awake pulse intensity', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_keep_awake_pulse_power', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_keep_awake_pulse_intensity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Keep-awake pulse intensity', + 'max': 9.9, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_keep_awake_pulse_intensity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_state[number.pinecil_long_press_temperature_step-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 90, + 'min': 5, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_long_press_temperature_step', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Long-press temperature step', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_temp_increment_long', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_long_press_temperature_step-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Long-press temperature step', + 'max': 90, + 'min': 5, + 'mode': , + 'step': 5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_long_press_temperature_step', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_state[number.pinecil_min_voltage_per_cell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3.8, + 'min': 2.4, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_min_voltage_per_cell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Min. voltage per cell', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_min_voltage_per_cell', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_min_voltage_per_cell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Min. voltage per cell', + 'max': 3.8, + 'min': 2.4, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_min_voltage_per_cell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_state[number.pinecil_motion_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_motion_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion sensitivity', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_accel_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_motion_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Motion sensitivity', + 'max': 9, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_motion_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_state[number.pinecil_power_delivery_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5.0, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_power_delivery_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power Delivery timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_pd_timeout', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_power_delivery_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Power Delivery timeout', + 'max': 5.0, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_power_delivery_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_state[number.pinecil_power_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 12, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_power_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power limit', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_power_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_power_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Power limit', + 'max': 12, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_power_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.0', + }) +# --- +# name: test_state[number.pinecil_quick_charge_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 22.0, + 'min': 9.0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_quick_charge_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Quick Charge voltage', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_qc_max_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_quick_charge_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Pinecil Quick Charge voltage', + 'max': 22.0, + 'min': 9.0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_quick_charge_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.0', + }) +# --- # name: test_state[number.pinecil_setpoint_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -56,3 +784,284 @@ 'state': '300', }) # --- +# name: test_state[number.pinecil_short_press_temperature_step-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 50, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_short_press_temperature_step', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Short-press temperature step', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_temp_increment_short', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_short_press_temperature_step-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Short-press temperature step', + 'max': 50, + 'min': 1, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_short_press_temperature_step', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_state[number.pinecil_shutdown_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 60, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_shutdown_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Shutdown timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_shutdown_timeout', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_shutdown_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Shutdown timeout', + 'max': 60, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_shutdown_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_state[number.pinecil_sleep_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 10, + 'mode': , + 'step': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_sleep_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sleep temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_sleep_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_sleep_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Sleep temperature', + 'max': 450, + 'min': 10, + 'mode': , + 'step': 10, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_sleep_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150', + }) +# --- +# name: test_state[number.pinecil_sleep_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 15, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_sleep_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sleep timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_sleep_timeout', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_sleep_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Sleep timeout', + 'max': 15, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_sleep_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_state[number.pinecil_voltage_divider-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 900, + 'min': 360, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_voltage_divider', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Voltage divider', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_voltage_div', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[number.pinecil_voltage_divider-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Voltage divider', + 'max': 900, + 'min': 360, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.pinecil_voltage_divider', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '600', + }) +# --- diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr index 64cb951dacc..9ab5d47eec8 100644 --- a/tests/components/iron_os/snapshots/test_sensor.ambr +++ b/tests/components/iron_os/snapshots/test_sensor.ambr @@ -502,7 +502,7 @@ 'name': None, 'options': dict({ 'sensor': dict({ - 'suggested_display_precision': 3, + 'suggested_display_precision': 0, }), }), 'original_device_class': , @@ -513,7 +513,7 @@ 'supported_features': 0, 'translation_key': , 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_voltage', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.pinecil_raw_tip_voltage-state] @@ -522,7 +522,7 @@ 'device_class': 'voltage', 'friendly_name': 'Pinecil Raw tip voltage', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.pinecil_raw_tip_voltage', @@ -537,7 +537,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -569,6 +571,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Pinecil Tip resistance', + 'state_class': , 'unit_of_measurement': 'Ω', }), 'context': , diff --git a/tests/components/iron_os/snapshots/test_update.ambr b/tests/components/iron_os/snapshots/test_update.ambr new file mode 100644 index 00000000000..e0872d032ec --- /dev/null +++ b/tests/components/iron_os/snapshots/test_update.ambr @@ -0,0 +1,63 @@ +# serializer version: 1 +# name: test_update.2 + '**RELEASE_NOTES**' +# --- +# name: test_update[update.pinecil_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.pinecil_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c0:ff:ee:c0:ff:ee_firmware', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[update.pinecil_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/iron_os/icon.png', + 'friendly_name': 'Pinecil Firmware', + 'in_progress': False, + 'installed_version': 'v2.22', + 'latest_version': 'v2.22', + 'release_summary': None, + 'release_url': 'https://github.com/Ralim/IronOS/releases/tag/v2.22', + 'skipped_version': None, + 'supported_features': , + 'title': 'IronOS V2.22 | TS101 & S60 Added | PinecilV2 improved', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.pinecil_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/iron_os/test_binary_sensor.py b/tests/components/iron_os/test_binary_sensor.py new file mode 100644 index 00000000000..291fbf80573 --- /dev/null +++ b/tests/components/iron_os/test_binary_sensor.py @@ -0,0 +1,77 @@ +"""Tests for the Pinecil Binary Sensors.""" + +from collections.abc import AsyncGenerator +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pynecil import LiveDataResponse +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.binary_sensor import STATE_OFF, STATE_ON +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +async def binary_sensor_only() -> AsyncGenerator[None]: + """Enable only the binary sensor platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_binary_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test the Pinecil binary sensor platform.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "ble_device", "mock_pynecil" +) +async def test_tip_on_off( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test tip_connected binary sensor on/off states.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("binary_sensor.pinecil_soldering_tip").state == STATE_ON + + mock_pynecil.get_live_data.return_value = LiveDataResponse( + live_temp=479, + max_tip_temp_ability=460, + ) + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.pinecil_soldering_tip").state == STATE_OFF diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index f7db2a813ec..21194a55eea 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -1,14 +1,17 @@ """Test init of IronOS integration.""" +from datetime import datetime, timedelta from unittest.mock import AsyncMock +from freezegun.api import FrozenDateTimeFactory from pynecil import CommunicationError import pytest from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.usefixtures("mock_pynecil", "ble_device") @@ -45,16 +48,42 @@ async def test_update_data_config_entry_not_ready( assert config_entry.state is ConfigEntryState.SETUP_RETRY -@pytest.mark.usefixtures("ble_device") +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") async def test_setup_config_entry_not_ready( hass: HomeAssistant, config_entry: MockConfigEntry, mock_pynecil: AsyncMock, + freezer: FrozenDateTimeFactory, ) -> None: """Test config entry not ready.""" + mock_pynecil.get_settings.side_effect = CommunicationError mock_pynecil.get_device_info.side_effect = CommunicationError config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_settings_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test skipping of settings on exception.""" + mock_pynecil.get_settings.side_effect = CommunicationError + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + async_fire_time_changed(hass, datetime.now() + timedelta(seconds=60)) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert (state := hass.states.get("number.pinecil_boost_temperature")) + assert state.state == STATE_UNKNOWN diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index 781492987ee..e0617a5012f 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -1,8 +1,10 @@ """Tests for the IronOS number platform.""" from collections.abc import AsyncGenerator +from datetime import timedelta from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory from pynecil import CharSetting, CommunicationError import pytest from syrupy.assertion import SnapshotAssertion @@ -18,11 +20,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None]: +async def number_only() -> AsyncGenerator[None]: """Enable only the number platform.""" with patch( "homeassistant.components.iron_os.PLATFORMS", @@ -39,6 +41,7 @@ async def test_state( config_entry: MockConfigEntry, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the IronOS number platform states.""" config_entry.add_to_hass(hass) @@ -47,14 +50,105 @@ async def test_state( assert config_entry.state is ConfigEntryState.LOADED + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) +@pytest.mark.parametrize( + ("entity_id", "characteristic", "value", "expected_value"), + [ + ( + "number.pinecil_setpoint_temperature", + CharSetting.SETPOINT_TEMP, + 300, + 300, + ), + ( + "number.pinecil_boost_temperature", + CharSetting.BOOST_TEMP, + 420, + 420, + ), + ( + "number.pinecil_calibration_offset", + CharSetting.CALIBRATION_OFFSET, + 600, + 600, + ), + ( + "number.pinecil_display_brightness", + CharSetting.DISPLAY_BRIGHTNESS, + 3, + 3, + ), + ( + "number.pinecil_hall_effect_sensitivity", + CharSetting.HALL_SENSITIVITY, + 7, + 7, + ), + ( + "number.pinecil_keep_awake_pulse_delay", + CharSetting.KEEP_AWAKE_PULSE_DELAY, + 10.0, + 4, + ), + ( + "number.pinecil_keep_awake_pulse_duration", + CharSetting.KEEP_AWAKE_PULSE_DURATION, + 500, + 2, + ), + ( + "number.pinecil_keep_awake_pulse_intensity", + CharSetting.KEEP_AWAKE_PULSE_POWER, + 0.5, + 0.5, + ), + ( + "number.pinecil_long_press_temperature_step", + CharSetting.TEMP_INCREMENT_LONG, + 10, + 10, + ), + ( + "number.pinecil_min_voltage_per_cell", + CharSetting.MIN_VOLTAGE_PER_CELL, + 3.3, + 3.3, + ), + ("number.pinecil_motion_sensitivity", CharSetting.ACCEL_SENSITIVITY, 7, 7), + ( + "number.pinecil_power_delivery_timeout", + CharSetting.PD_NEGOTIATION_TIMEOUT, + 2.0, + 2.0, + ), + ("number.pinecil_power_limit", CharSetting.POWER_LIMIT, 12.0, 12.0), + ("number.pinecil_quick_charge_voltage", CharSetting.QC_IDEAL_VOLTAGE, 9.0, 9.0), + ( + "number.pinecil_short_press_temperature_step", + CharSetting.TEMP_INCREMENT_SHORT, + 1, + 1, + ), + ("number.pinecil_shutdown_timeout", CharSetting.SHUTDOWN_TIME, 10, 10), + ("number.pinecil_sleep_temperature", CharSetting.SLEEP_TEMP, 150, 150), + ("number.pinecil_sleep_timeout", CharSetting.SLEEP_TIMEOUT, 5, 5), + ("number.pinecil_voltage_divider", CharSetting.VOLTAGE_DIV, 600, 600), + ], +) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") async def test_set_value( hass: HomeAssistant, config_entry: MockConfigEntry, mock_pynecil: AsyncMock, + entity_id: str, + characteristic: CharSetting, + value: float, + expected_value: float, ) -> None: """Test the IronOS number platform set value service.""" @@ -67,12 +161,12 @@ async def test_set_value( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - service_data={ATTR_VALUE: 300}, - target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + service_data={ATTR_VALUE: value}, + target={ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert len(mock_pynecil.write.mock_calls) == 1 - mock_pynecil.write.assert_called_once_with(CharSetting.SETPOINT_TEMP, 300) + mock_pynecil.write.assert_called_once_with(characteristic, expected_value) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py index 2f79487a7fd..fec111c5799 100644 --- a/tests/components/iron_os/test_sensor.py +++ b/tests/components/iron_os/test_sensor.py @@ -4,13 +4,13 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory -from pynecil import CommunicationError +from pynecil import CommunicationError, LiveDataResponse import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.iron_os.coordinator import SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -71,3 +71,34 @@ async def test_sensors_unavailable( ) for entity_entry in entity_entries: assert hass.states.get(entity_entry.entity_id).state == STATE_UNAVAILABLE + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "ble_device", "mock_pynecil" +) +async def test_tip_detection( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, + ble_device: MagicMock, +) -> None: + """Test sensor state is unknown when tip is disconnected.""" + + mock_pynecil.get_live_data.return_value = LiveDataResponse( + live_temp=479, + max_tip_temp_ability=460, + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + entities = { + "sensor.pinecil_tip_temperature", + "sensor.pinecil_max_tip_temperature", + "sensor.pinecil_raw_tip_voltage", + "sensor.pinecil_tip_resistance", + } + for entity_id in entities: + assert hass.states.get(entity_id).state == STATE_UNKNOWN diff --git a/tests/components/iron_os/test_update.py b/tests/components/iron_os/test_update.py new file mode 100644 index 00000000000..47f3197da0e --- /dev/null +++ b/tests/components/iron_os/test_update.py @@ -0,0 +1,77 @@ +"""Tests for IronOS update platform.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +from pynecil import UpdateException +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def update_only() -> AsyncGenerator[None]: + """Enable only the update platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.UPDATE], + ): + yield + + +@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_ironosupdate") +async def test_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the IronOS update platform.""" + ws_client = await hass_ws_client(hass) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + await ws_client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": "update.pinecil_firmware", + } + ) + result = await ws_client.receive_json() + assert result["result"] == snapshot + + +@pytest.mark.usefixtures("ble_device", "mock_pynecil") +async def test_update_unavailable( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_ironosupdate: AsyncMock, +) -> None: + """Test update entity unavailable on error.""" + + mock_ironosupdate.latest_release.side_effect = UpdateException + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + state = hass.states.get("update.pinecil_firmware") + assert state is not None + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/iskra/__init__.py b/tests/components/iskra/__init__.py new file mode 100644 index 00000000000..ca93572a9e4 --- /dev/null +++ b/tests/components/iskra/__init__.py @@ -0,0 +1 @@ +"""Tests for the Iskra component.""" diff --git a/tests/components/iskra/conftest.py b/tests/components/iskra/conftest.py new file mode 100644 index 00000000000..d9cc6808aaa --- /dev/null +++ b/tests/components/iskra/conftest.py @@ -0,0 +1,46 @@ +"""Fixtures for mocking pyiskra's different protocols. + +Fixtures: +- `mock_pyiskra_rest`: Mock pyiskra Rest API protocol. +- `mock_pyiskra_modbus`: Mock pyiskra Modbus protocol. +""" + +from unittest.mock import patch + +import pytest + +from .const import PQ_MODEL, SERIAL, SG_MODEL + + +class MockBasicInfo: + """Mock BasicInfo class.""" + + def __init__(self, model) -> None: + """Initialize the mock class.""" + self.serial = SERIAL + self.model = model + self.description = "Iskra mock device" + self.location = "imagination" + self.sw_ver = "1.0.0" + + +@pytest.fixture +def mock_pyiskra_rest(): + """Mock Iskra API authenticate with Rest API protocol.""" + + with patch( + "pyiskra.adapters.RestAPI.RestAPI.get_basic_info", + return_value=MockBasicInfo(model=SG_MODEL), + ) as basic_info_mock: + yield basic_info_mock + + +@pytest.fixture +def mock_pyiskra_modbus(): + """Mock Iskra API authenticate with Rest API protocol.""" + + with patch( + "pyiskra.adapters.Modbus.Modbus.get_basic_info", + return_value=MockBasicInfo(model=PQ_MODEL), + ) as basic_info_mock: + yield basic_info_mock diff --git a/tests/components/iskra/const.py b/tests/components/iskra/const.py new file mode 100644 index 00000000000..bf38c9a4a79 --- /dev/null +++ b/tests/components/iskra/const.py @@ -0,0 +1,10 @@ +"""Constants used in the Iskra component tests.""" + +SG_MODEL = "SG-W1" +PQ_MODEL = "MC784" +SERIAL = "XXXXXXX" +HOST = "192.1.0.1" +MODBUS_PORT = 10001 +MODBUS_ADDRESS = 33 +USERNAME = "test_username" +PASSWORD = "test_password" diff --git a/tests/components/iskra/test_config_flow.py b/tests/components/iskra/test_config_flow.py new file mode 100644 index 00000000000..0c128be9850 --- /dev/null +++ b/tests/components/iskra/test_config_flow.py @@ -0,0 +1,300 @@ +"""Tests for the Iskra config flow.""" + +from pyiskra.exceptions import ( + DeviceConnectionError, + DeviceTimeoutError, + InvalidResponseCode, + NotAuthorised, +) +import pytest + +from homeassistant.components.iskra import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import ( + HOST, + MODBUS_ADDRESS, + MODBUS_PORT, + PASSWORD, + PQ_MODEL, + SERIAL, + SG_MODEL, + USERNAME, +) + +from tests.common import MockConfigEntry + + +# Test step_user with Rest API protocol +async def test_user_rest_no_auth(hass: HomeAssistant, mock_pyiskra_rest) -> None: + """Test the user flow with Rest API protocol.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + # Test if user form is provided + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # Test no authentication required + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + # Test successful Rest API configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == SG_MODEL + assert result["data"] == {CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"} + + +async def test_user_rest_auth(hass: HomeAssistant, mock_pyiskra_rest) -> None: + """Test the user flow with Rest API protocol and authentication required.""" + mock_pyiskra_rest.side_effect = NotAuthorised + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + # Test if user form is provided + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # Test if prompted to enter username and password if not authorised + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "authentication" + + # Test failed authentication + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "authentication" + + # Test successful authentication + mock_pyiskra_rest.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, + ) + + # Test successful Rest API configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == SG_MODEL + assert result["data"] == { + CONF_HOST: HOST, + CONF_PROTOCOL: "rest_api", + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + } + + +async def test_user_modbus(hass: HomeAssistant, mock_pyiskra_modbus) -> None: + """Test the user flow with Modbus TCP protocol.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + # Test if user form is provided + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, + ) + + # Test if propmpted to enter port and address + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + # Test successful Modbus TCP configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == PQ_MODEL + assert result["data"] == { + CONF_HOST: HOST, + CONF_PROTOCOL: "modbus_tcp", + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + } + + +async def test_modbus_abort_if_already_setup( + hass: HomeAssistant, mock_pyiskra_modbus +) -> None: + """Test we abort if Iskra is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id=SERIAL).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_rest_api_abort_if_already_setup( + hass: HomeAssistant, mock_pyiskra_rest +) -> None: + """Test we abort if Iskra is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id=SERIAL).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("s_effect", "reason"), + [ + (DeviceConnectionError, "cannot_connect"), + (DeviceTimeoutError, "cannot_connect"), + (InvalidResponseCode, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_modbus_device_error( + hass: HomeAssistant, + mock_pyiskra_modbus, + s_effect, + reason, +) -> None: + """Test device error with Modbus TCP protocol.""" + mock_pyiskra_modbus.side_effect = s_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + # Test if error returned + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + assert result["errors"] == {"base": reason} + + # Remove side effect + mock_pyiskra_modbus.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + # Test successful Modbus TCP configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == PQ_MODEL + assert result["data"] == { + CONF_HOST: HOST, + CONF_PROTOCOL: "modbus_tcp", + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + } + + +@pytest.mark.parametrize( + ("s_effect", "reason"), + [ + (DeviceConnectionError, "cannot_connect"), + (DeviceTimeoutError, "cannot_connect"), + (InvalidResponseCode, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_rest_device_error( + hass: HomeAssistant, + mock_pyiskra_rest, + s_effect, + reason, +) -> None: + """Test device error with Modbus TCP protocol.""" + mock_pyiskra_rest.side_effect = s_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + # Test if error returned + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": reason} + + # Remove side effect + mock_pyiskra_rest.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + # Test successful Rest API configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == SG_MODEL + assert result["data"] == {CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"} diff --git a/tests/components/israel_rail/test_sensor.py b/tests/components/israel_rail/test_sensor.py index d044dfe1d7c..85b7328742f 100644 --- a/tests/components/israel_rail/test_sensor.py +++ b/tests/components/israel_rail/test_sensor.py @@ -26,7 +26,6 @@ async def test_valid_config( ) -> None: """Ensure everything starts correctly.""" await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 6 await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/ista_ecotrend/test_config_flow.py b/tests/components/ista_ecotrend/test_config_flow.py index b702b0331e8..d6c88c51c99 100644 --- a/tests/components/ista_ecotrend/test_config_flow.py +++ b/tests/components/ista_ecotrend/test_config_flow.py @@ -6,7 +6,7 @@ from pyecotrend_ista import LoginError, ServerError import pytest from homeassistant.components.ista_ecotrend.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -98,15 +98,7 @@ async def test_reauth( ista_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": ista_config_entry.entry_id, - "unique_id": ista_config_entry.unique_id, - }, - ) - + result = await ista_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -148,15 +140,7 @@ async def test_reauth_error_and_recover( ista_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": ista_config_entry.entry_id, - "unique_id": ista_config_entry.unique_id, - }, - ) - + result = await ista_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/isy994/test_config_flow.py b/tests/components/isy994/test_config_flow.py index 411439e2e70..2bc1fff222f 100644 --- a/tests/components/isy994/test_config_flow.py +++ b/tests/components/isy994/test_config_flow.py @@ -644,10 +644,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": MOCK_UUID}, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -701,3 +698,16 @@ async def test_reauth(hass: HomeAssistant) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test option flow.""" + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # This should be improved at a later stage to increase test coverage + hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/ituran/__init__.py b/tests/components/ituran/__init__.py new file mode 100644 index 00000000000..52fccaad138 --- /dev/null +++ b/tests/components/ituran/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Ituran integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/ituran/conftest.py b/tests/components/ituran/conftest.py new file mode 100644 index 00000000000..ef22c90591d --- /dev/null +++ b/tests/components/ituran/conftest.py @@ -0,0 +1,83 @@ +"""Mocks for the Ituran integration.""" + +from collections.abc import Generator +from datetime import datetime +from unittest.mock import AsyncMock, PropertyMock, patch + +import pytest + +from homeassistant.components.ituran.const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_PHONE_NUMBER, + DOMAIN, +) + +from .const import MOCK_CONFIG_DATA + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ituran.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title=f"Ituran {MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT]}", + domain=DOMAIN, + data={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + CONF_MOBILE_ID: MOCK_CONFIG_DATA[CONF_MOBILE_ID], + }, + unique_id=MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + ) + + +class MockVehicle: + """Mock vehicle.""" + + def __init__(self) -> None: + """Initialize mock vehicle.""" + self.license_plate = "12345678" + self.make = "mock make" + self.model = "mock model" + self.mileage = 1000 + self.speed = 20 + self.gps_coordinates = (25.0, -71.0) + self.address = "Bermuda Triangle" + self.heading = 150 + self.last_update = datetime(2024, 1, 1, 0, 0, 0) + + +@pytest.fixture +def mock_ituran() -> Generator[AsyncMock]: + """Return a mocked PalazzettiClient.""" + with ( + patch( + "homeassistant.components.ituran.coordinator.Ituran", + autospec=True, + ) as ituran, + patch( + "homeassistant.components.ituran.config_flow.Ituran", + new=ituran, + ), + ): + mock_ituran = ituran.return_value + mock_ituran.is_authenticated.return_value = False + mock_ituran.authenticate.return_value = True + mock_ituran.get_vehicles.return_value = [MockVehicle()] + type(mock_ituran).mobile_id = PropertyMock( + return_value=MOCK_CONFIG_DATA[CONF_MOBILE_ID] + ) + + yield mock_ituran diff --git a/tests/components/ituran/const.py b/tests/components/ituran/const.py new file mode 100644 index 00000000000..b566caebbbe --- /dev/null +++ b/tests/components/ituran/const.py @@ -0,0 +1,24 @@ +"""Constants for tests of the Ituran component.""" + +from typing import Any + +from homeassistant.components.ituran.const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_PHONE_NUMBER, + DOMAIN, +) + +MOCK_CONFIG_DATA: dict[str, str] = { + CONF_ID_OR_PASSPORT: "12345678", + CONF_PHONE_NUMBER: "0501234567", + CONF_MOBILE_ID: "0123456789abcdef", +} + +MOCK_CONFIG_ENTRY: dict[str, Any] = { + "domain": DOMAIN, + "entry_id": "1", + "source": "user", + "title": MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + "data": MOCK_CONFIG_DATA, +} diff --git a/tests/components/ituran/snapshots/test_device_tracker.ambr b/tests/components/ituran/snapshots/test_device_tracker.ambr new file mode 100644 index 00000000000..3b650f7927f --- /dev/null +++ b/tests/components/ituran/snapshots/test_device_tracker.ambr @@ -0,0 +1,51 @@ +# serializer version: 1 +# name: test_device_tracker[device_tracker.mock_model-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.mock_model', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'car', + 'unique_id': '12345678-device_tracker', + 'unit_of_measurement': None, + }) +# --- +# name: test_device_tracker[device_tracker.mock_model-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock model', + 'gps_accuracy': 0, + 'latitude': 25.0, + 'longitude': -71.0, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.mock_model', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/ituran/snapshots/test_init.ambr b/tests/components/ituran/snapshots/test_init.ambr new file mode 100644 index 00000000000..1e64ef9e850 --- /dev/null +++ b/tests/components/ituran/snapshots/test_init.ambr @@ -0,0 +1,35 @@ +# serializer version: 1 +# name: test_device + list([ + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ituran', + '12345678', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'mock make', + 'model': 'mock model', + 'model_id': None, + 'name': 'mock model', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '12345678', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }), + ]) +# --- diff --git a/tests/components/ituran/test_config_flow.py b/tests/components/ituran/test_config_flow.py new file mode 100644 index 00000000000..19253103ad7 --- /dev/null +++ b/tests/components/ituran/test_config_flow.py @@ -0,0 +1,254 @@ +"""Test the Ituran config flow.""" + +from unittest.mock import AsyncMock + +from pyituran.exceptions import IturanApiError, IturanAuthError +import pytest + +from homeassistant.components.ituran.const import ( + CONF_ID_OR_PASSPORT, + CONF_MOBILE_ID, + CONF_OTP, + CONF_PHONE_NUMBER, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_integration +from .const import MOCK_CONFIG_DATA + +from tests.common import MockConfigEntry + + +async def __do_successful_user_step( + hass: HomeAssistant, result: ConfigFlowResult, mock_ituran: AsyncMock +): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + return result + + +async def __do_successful_otp_step( + hass: HomeAssistant, + result: ConfigFlowResult, + mock_ituran: AsyncMock, +): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Ituran {MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT]}" + assert result["data"][CONF_ID_OR_PASSPORT] == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + assert result["data"][CONF_PHONE_NUMBER] == MOCK_CONFIG_DATA[CONF_PHONE_NUMBER] + assert result["data"][CONF_MOBILE_ID] is not None + assert result["result"].unique_id == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + assert len(mock_ituran.is_authenticated.mock_calls) > 0 + assert len(mock_ituran.authenticate.mock_calls) > 0 + + return result + + +async def test_full_user_flow( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + +async def test_invalid_auth( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test invalid credentials configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_ituran.request_otp.side_effect = IturanAuthError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_auth"} + + mock_ituran.request_otp.side_effect = None + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + +async def test_invalid_otp( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test invalid OTP configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await __do_successful_user_step(hass, result, mock_ituran) + + mock_ituran.authenticate.side_effect = IturanAuthError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_otp"} + + mock_ituran.authenticate.side_effect = None + await __do_successful_otp_step(hass, result, mock_ituran) + + +@pytest.mark.parametrize( + ("exception", "expected_error"), + [(IturanApiError, "cannot_connect"), (Exception, "unknown")], +) +async def test_errors( + hass: HomeAssistant, + mock_ituran: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + expected_error: str, +) -> None: + """Test connection errors during configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_ituran.request_otp.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": expected_error} + + mock_ituran.request_otp.side_effect = None + result = await __do_successful_user_step(hass, result, mock_ituran) + + mock_ituran.authenticate.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + mock_ituran.authenticate.side_effect = None + await __do_successful_otp_step(hass, result, mock_ituran) + + +async def test_already_authenticated( + hass: HomeAssistant, mock_ituran: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test user already authenticated configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_ituran.is_authenticated.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_ID_OR_PASSPORT: MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT], + CONF_PHONE_NUMBER: MOCK_CONFIG_DATA[CONF_PHONE_NUMBER], + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Ituran {MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT]}" + assert result["data"][CONF_ID_OR_PASSPORT] == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + assert result["data"][CONF_PHONE_NUMBER] == MOCK_CONFIG_DATA[CONF_PHONE_NUMBER] + assert result["data"][CONF_MOBILE_ID] == MOCK_CONFIG_DATA[CONF_MOBILE_ID] + assert result["result"].unique_id == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + + +async def test_reauth( + hass: HomeAssistant, + mock_ituran: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauthenticating.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + await setup_integration(hass, mock_config_entry) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] is None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/ituran/test_device_tracker.py b/tests/components/ituran/test_device_tracker.py new file mode 100644 index 00000000000..7bcb314cde7 --- /dev/null +++ b/tests/components/ituran/test_device_tracker.py @@ -0,0 +1,61 @@ +"""Test the Ituran device_tracker.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pyituran.exceptions import IturanApiError +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.ituran.const import UPDATE_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_device_tracker( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state of device_tracker.""" + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_availability( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test device is marked as unavailable when we can't reach the Ituran service.""" + entity_id = "device_tracker.mock_model" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = IturanApiError + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = None + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE diff --git a/tests/components/ituran/test_init.py b/tests/components/ituran/test_init.py new file mode 100644 index 00000000000..3dfe946cdf9 --- /dev/null +++ b/tests/components/ituran/test_init.py @@ -0,0 +1,113 @@ +"""Tests for the Ituran integration.""" + +from unittest.mock import AsyncMock + +from pyituran.exceptions import IturanApiError, IturanAuthError +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, +) -> None: + """Test the Ituran configuration entry loading/unloading.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, + snapshot: SnapshotAssertion, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the device information.""" + await setup_integration(hass, mock_config_entry) + + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + assert device_entries == snapshot + + +async def test_remove_stale_devices( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test that devices not returned by the service are removed.""" + await setup_integration(hass, mock_config_entry) + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.return_value = [] + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 0 + + +async def test_recover_from_errors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ituran: AsyncMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Verify we can recover from service Errors.""" + + await setup_integration(hass, mock_config_entry) + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.side_effect = IturanApiError + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.side_effect = IturanAuthError + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 + + mock_ituran.get_vehicles.side_effect = None + await mock_config_entry.runtime_data.async_refresh() + await hass.async_block_till_done() + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + + assert len(device_entries) == 1 diff --git a/tests/components/jellyfin/test_config_flow.py b/tests/components/jellyfin/test_config_flow.py index c84a12d26a5..a8ffbcbf46c 100644 --- a/tests/components/jellyfin/test_config_flow.py +++ b/tests/components/jellyfin/test_config_flow.py @@ -222,14 +222,7 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -272,14 +265,7 @@ async def test_reauth_cannot_connect( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -339,14 +325,7 @@ async def test_reauth_invalid( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -400,14 +379,7 @@ async def test_reauth_exception( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/jellyfin/test_media_source.py b/tests/components/jellyfin/test_media_source.py index a57d51de1f1..2aca59a4d26 100644 --- a/tests/components/jellyfin/test_media_source.py +++ b/tests/components/jellyfin/test_media_source.py @@ -6,7 +6,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.jellyfin.const import DOMAIN -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import BrowseError from homeassistant.components.media_source import ( DOMAIN as MEDIA_SOURCE_DOMAIN, URI_SCHEME, diff --git a/tests/components/jellyfin/test_remote.py b/tests/components/jellyfin/test_remote.py new file mode 100644 index 00000000000..38390eabdcc --- /dev/null +++ b/tests/components/jellyfin/test_remote.py @@ -0,0 +1,93 @@ +"""Tests for the Jellyfin remote platform.""" + +from unittest.mock import MagicMock + +from homeassistant.components.remote import ( + ATTR_COMMAND, + ATTR_DELAY_SECS, + ATTR_HOLD_SECS, + ATTR_NUM_REPEATS, + DOMAIN as R_DOMAIN, + SERVICE_SEND_COMMAND, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_remote( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + mock_jellyfin: MagicMock, + mock_api: MagicMock, +) -> None: + """Test the Jellyfin remote.""" + state = hass.states.get("remote.jellyfin_device") + state2 = hass.states.get("remote.jellyfin_device_two") + state3 = hass.states.get("remote.jellyfin_device_three") + state4 = hass.states.get("remote.jellyfin_device_four") + + assert state + assert state2 + # Doesn't support remote control; remote not created + assert state3 is None + assert state4 + + assert state.state == STATE_ON + + +async def test_services( + hass: HomeAssistant, + init_integration: MockConfigEntry, + mock_jellyfin: MagicMock, + mock_api: MagicMock, +) -> None: + """Test Jellyfin remote services.""" + state = hass.states.get("remote.jellyfin_device") + assert state + + command = "Select" + await hass.services.async_call( + R_DOMAIN, + SERVICE_SEND_COMMAND, + { + ATTR_ENTITY_ID: state.entity_id, + ATTR_COMMAND: command, + ATTR_NUM_REPEATS: 1, + ATTR_DELAY_SECS: 0, + ATTR_HOLD_SECS: 0, + }, + blocking=True, + ) + assert len(mock_api.command.mock_calls) == 1 + assert mock_api.command.mock_calls[0].args == ( + "SESSION-UUID", + command, + ) + + command = "MoveLeft" + await hass.services.async_call( + R_DOMAIN, + SERVICE_SEND_COMMAND, + { + ATTR_ENTITY_ID: state.entity_id, + ATTR_COMMAND: command, + ATTR_NUM_REPEATS: 2, + ATTR_DELAY_SECS: 0, + ATTR_HOLD_SECS: 0, + }, + blocking=True, + ) + assert len(mock_api.command.mock_calls) == 3 + assert mock_api.command.mock_calls[1].args == ( + "SESSION-UUID", + command, + ) + assert mock_api.command.mock_calls[2].args == ( + "SESSION-UUID", + command, + ) diff --git a/tests/components/jellyfin/test_sensor.py b/tests/components/jellyfin/test_sensor.py index 40a3e62a6c0..82d42d7a27a 100644 --- a/tests/components/jellyfin/test_sensor.py +++ b/tests/components/jellyfin/test_sensor.py @@ -4,12 +4,7 @@ from unittest.mock import MagicMock from homeassistant.components.jellyfin.const import DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - ATTR_ICON, - ATTR_UNIT_OF_MEASUREMENT, -) +from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, ATTR_ICON from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -24,13 +19,12 @@ async def test_watching( mock_jellyfin: MagicMock, ) -> None: """Test the Jellyfin watching sensor.""" - state = hass.states.get("sensor.jellyfin_server") + state = hass.states.get("sensor.jellyfin_server_active_clients") assert state assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "JELLYFIN-SERVER" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "JELLYFIN-SERVER Active clients" assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_STATE_CLASS) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Watching" assert state.state == "3" entry = entity_registry.async_get(state.entity_id) diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index 466d3a1e4f0..e00fe41749f 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -2,8 +2,6 @@ from unittest.mock import AsyncMock -import pytest - from homeassistant import config_entries, setup from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -20,12 +18,10 @@ from homeassistant.const import ( CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, - CONF_NAME, CONF_TIME_ZONE, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -59,49 +55,6 @@ async def test_step_user(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert entries[0].data[CONF_TIME_ZONE] == hass.config.time_zone -@pytest.mark.parametrize("diaspora", [True, False]) -@pytest.mark.parametrize("language", ["hebrew", "english"]) -async def test_import_no_options(hass: HomeAssistant, language, diaspora) -> None: - """Test that the import step works.""" - conf = { - DOMAIN: {CONF_NAME: "test", CONF_LANGUAGE: language, CONF_DIASPORA: diaspora} - } - - assert await async_setup_component(hass, DOMAIN, conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == conf[DOMAIN][entry_key] - - -async def test_import_with_options(hass: HomeAssistant) -> None: - """Test that the import step works.""" - conf = { - DOMAIN: { - CONF_NAME: "test", - CONF_DIASPORA: DEFAULT_DIASPORA, - CONF_LANGUAGE: DEFAULT_LANGUAGE, - CONF_CANDLE_LIGHT_MINUTES: 20, - CONF_HAVDALAH_OFFSET_MINUTES: 50, - CONF_LATITUDE: 31.76, - CONF_LONGITUDE: 35.235, - } - } - - # Simulate HomeAssistant setting up the component - assert await async_setup_component(hass, DOMAIN, conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == conf[DOMAIN][entry_key] - for entry_key, entry_val in entries[0].options.items(): - assert entry_val == conf[DOMAIN][entry_key] - - async def test_single_instance_allowed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -164,3 +117,28 @@ async def test_options_reconfigure( assert ( mock_config_entry.options[CONF_CANDLE_LIGHT_MINUTES] == DEFAULT_CANDLE_LIGHT + 1 ) + + +async def test_reconfigure( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test starting a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # init user flow + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # success + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_DIASPORA: not DEFAULT_DIASPORA, + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data[CONF_DIASPORA] is not DEFAULT_DIASPORA diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index b8454b41a60..cb982afec0f 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1,76 +1 @@ """Tests for the Jewish Calendar component's init.""" - -from hdate import Location - -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSORS -from homeassistant.components.jewish_calendar import get_unique_prefix -from homeassistant.components.jewish_calendar.const import ( - CONF_CANDLE_LIGHT_MINUTES, - CONF_DIASPORA, - CONF_HAVDALAH_OFFSET_MINUTES, - DEFAULT_DIASPORA, - DEFAULT_LANGUAGE, - DOMAIN, -) -from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er -from homeassistant.setup import async_setup_component - - -async def test_import_unique_id_migration(hass: HomeAssistant) -> None: - """Test unique_id migration.""" - yaml_conf = { - DOMAIN: { - CONF_NAME: "test", - CONF_DIASPORA: DEFAULT_DIASPORA, - CONF_LANGUAGE: DEFAULT_LANGUAGE, - CONF_CANDLE_LIGHT_MINUTES: 20, - CONF_HAVDALAH_OFFSET_MINUTES: 50, - CONF_LATITUDE: 31.76, - CONF_LONGITUDE: 35.235, - } - } - - # Create an entry in the entity registry with the data from conf - ent_reg = er.async_get(hass) - location = Location( - latitude=yaml_conf[DOMAIN][CONF_LATITUDE], - longitude=yaml_conf[DOMAIN][CONF_LONGITUDE], - timezone=hass.config.time_zone, - diaspora=DEFAULT_DIASPORA, - ) - old_prefix = get_unique_prefix(location, DEFAULT_LANGUAGE, 20, 50) - sample_entity = ent_reg.async_get_or_create( - BINARY_SENSORS, - DOMAIN, - unique_id=f"{old_prefix}_erev_shabbat_hag", - suggested_object_id=f"{DOMAIN}_erev_shabbat_hag", - ) - # Save the existing unique_id, DEFAULT_LANGUAGE should be part of it - old_unique_id = sample_entity.unique_id - assert DEFAULT_LANGUAGE in old_unique_id - - # Simulate HomeAssistant setting up the component - assert await async_setup_component(hass, DOMAIN, yaml_conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == yaml_conf[DOMAIN][entry_key] - for entry_key, entry_val in entries[0].options.items(): - assert entry_val == yaml_conf[DOMAIN][entry_key] - - # Assert that the unique_id was updated - new_unique_id = ent_reg.async_get(sample_entity.entity_id).unique_id - assert new_unique_id != old_unique_id - assert DEFAULT_LANGUAGE not in new_unique_id - - # Confirm that when the component is reloaded, the unique_id is not changed - assert ent_reg.async_get(sample_entity.entity_id).unique_id == new_unique_id - - # Confirm that all the unique_ids are prefixed correctly - await hass.config_entries.async_reload(entries[0].entry_id) - er_entries = er.async_entries_for_config_entry(ent_reg, entries[0].entry_id) - assert all(entry.unique_id.startswith(entries[0].entry_id) for entry in er_entries) diff --git a/tests/components/jewish_calendar/test_sensor.py b/tests/components/jewish_calendar/test_sensor.py index cb054751f67..4897ef7749b 100644 --- a/tests/components/jewish_calendar/test_sensor.py +++ b/tests/components/jewish_calendar/test_sensor.py @@ -93,7 +93,26 @@ TEST_PARAMS = [ "id": "rosh_hashana_i", "type": "YOM_TOV", "type_id": 1, - "options": [h.description.english for h in htables.HOLIDAYS], + "options": htables.get_all_holidays("english"), + }, + ), + ( + dt(2024, 12, 31), + "UTC", + 31.778, + 35.235, + "english", + "holiday", + False, + "Chanukah, Rosh Chodesh", + { + "device_class": "enum", + "friendly_name": "Jewish Calendar Holiday", + "icon": "mdi:calendar-star", + "id": "chanukah, rosh_chodesh", + "type": "MELACHA_PERMITTED_HOLIDAY, ROSH_CHODESH", + "type_id": "4, 10", + "options": htables.get_all_holidays("english"), }, ), ( @@ -180,6 +199,7 @@ TEST_IDS = [ "date_output_hebrew", "holiday", "holiday_english", + "holiday_multiple", "torah_reading", "first_stars_ny", "first_stars_jerusalem", diff --git a/tests/components/justnimbus/test_config_flow.py b/tests/components/justnimbus/test_config_flow.py index f66693a752c..330b05bf48c 100644 --- a/tests/components/justnimbus/test_config_flow.py +++ b/tests/components/justnimbus/test_config_flow.py @@ -125,14 +125,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, - data=FIXTURE_OLD_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/jvc_projector/test_config_flow.py b/tests/components/jvc_projector/test_config_flow.py index 282411540a4..d7eb0995bbd 100644 --- a/tests/components/jvc_projector/test_config_flow.py +++ b/tests/components/jvc_projector/test_config_flow.py @@ -6,7 +6,7 @@ from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError import pytest from homeassistant.components.jvc_projector.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -163,14 +163,7 @@ async def test_reauth_config_flow_success( hass: HomeAssistant, mock_device: AsyncMock, mock_integration: MockConfigEntry ) -> None: """Test reauth config flow success.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -194,14 +187,7 @@ async def test_reauth_config_flow_auth_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorAuthError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -218,14 +204,7 @@ async def test_reauth_config_flow_auth_error( mock_device.connect.side_effect = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -249,14 +228,7 @@ async def test_reauth_config_flow_connect_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorConnectError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -273,14 +245,7 @@ async def test_reauth_config_flow_connect_error( mock_device.connect.side_effect = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py new file mode 100644 index 00000000000..6a738094ae6 --- /dev/null +++ b/tests/components/kitchen_sink/test_backup.py @@ -0,0 +1,215 @@ +"""Test the Kitchen Sink backup platform.""" + +from collections.abc import AsyncGenerator +from io import StringIO +from unittest.mock import patch + +import pytest + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.kitchen_sink import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def backup_only() -> AsyncGenerator[None]: + """Enable only the backup platform. + + The backup platform is not an entity platform. + """ + with patch( + "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", + [], + ): + yield + + +@pytest.fixture(autouse=True) +async def setup_integration(hass: HomeAssistant) -> AsyncGenerator[None]: + """Set up Kitchen Sink integration.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], + } + + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agents": [{"agent_id": "backup.local"}]} + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [ + { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "agent_ids": ["kitchen_sink.syncer"], + "backup_id": "abc123", + "database_included": False, + "date": "1970-01-01T00:00:00Z", + "failed_agent_ids": [], + "folders": ["media", "share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Kitchen sink syncer", + "protected": False, + "size": 1234, + "with_strategy_settings": False, + } + ] + + +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test downloading a backup.""" + client = await hass_client() + + resp = await client.get("/api/backup/download/abc123?agent_id=kitchen_sink.syncer") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + hass_supervisor_access_token: str, +) -> None: + """Test agent upload backup.""" + ws_client = await hass_ws_client(hass, hass_supervisor_access_token) + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + with ( + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=kitchen_sink.syncer", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + + await ws_client.send_json_auto_id({"type": "backup/info"}) + response = await ws_client.receive_json() + + assert response["success"] + backup_list = response["result"]["backups"] + assert len(backup_list) == 2 + assert backup_list[1] == { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "agent_ids": ["kitchen_sink.syncer"], + "backup_id": "test-backup", + "database_included": True, + "date": "1970-01-01T00:00:00.000Z", + "failed_agent_ids": [], + "folders": ["media", "share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 0.0, + "with_strategy_settings": False, + } + + +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert f"Deleted backup {backup_id}" in caplog.text + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + backup_list = response["result"]["backups"] + assert not backup_list diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 0575141bb3b..b832577a48a 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -5,6 +5,7 @@ from http import HTTPStatus from unittest.mock import ANY import pytest +import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.recorder import get_instance @@ -324,3 +325,24 @@ async def test_issues_created( }, ] } + + +async def test_service( + hass: HomeAssistant, +) -> None: + """Test we can call the service.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + with pytest.raises(vol.error.MultipleInvalid): + await hass.services.async_call(DOMAIN, "test_service_1", blocking=True) + + await hass.services.async_call( + DOMAIN, "test_service_1", {"field_1": 1, "field_2": "auto"}, blocking=True + ) + + await hass.services.async_call( + DOMAIN, + "test_service_1", + {"field_1": 1, "field_2": "auto", "field_3": 1, "field_4": "forwards"}, + blocking=True, + ) diff --git a/tests/components/kitchen_sink/test_lawn_mower.py b/tests/components/kitchen_sink/test_lawn_mower.py index e1ba201a722..5bd4fc834f8 100644 --- a/tests/components/kitchen_sink/test_lawn_mower.py +++ b/tests/components/kitchen_sink/test_lawn_mower.py @@ -100,7 +100,7 @@ async def test_mower( await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == entity - assert state_changes[0].data["new_state"].state == str(next_activity.value) + assert state_changes[0].data["new_state"].state == next_activity.value @pytest.mark.parametrize( diff --git a/tests/components/kitchen_sink/test_lock.py b/tests/components/kitchen_sink/test_lock.py index e86300a4d35..a626cccd45c 100644 --- a/tests/components/kitchen_sink/test_lock.py +++ b/tests/components/kitchen_sink/test_lock.py @@ -11,17 +11,9 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, - STATE_LOCKING, - STATE_UNLOCKED, - STATE_UNLOCKING, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - EVENT_STATE_CHANGED, - STATE_OPEN, - Platform, + LockState, ) +from homeassistant.const import ATTR_ENTITY_ID, EVENT_STATE_CHANGED, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -58,7 +50,7 @@ async def test_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: async def test_locking(hass: HomeAssistant) -> None: """Test the locking of a lock.""" state = hass.states.get(UNLOCKED_LOCK) - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -68,16 +60,16 @@ async def test_locking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == UNLOCKED_LOCK - assert state_changes[0].data["new_state"].state == STATE_LOCKING + assert state_changes[0].data["new_state"].state == LockState.LOCKING assert state_changes[1].data["entity_id"] == UNLOCKED_LOCK - assert state_changes[1].data["new_state"].state == STATE_LOCKED + assert state_changes[1].data["new_state"].state == LockState.LOCKED async def test_unlocking(hass: HomeAssistant) -> None: """Test the unlocking of a lock.""" state = hass.states.get(LOCKED_LOCK) - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -87,10 +79,10 @@ async def test_unlocking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == LOCKED_LOCK - assert state_changes[0].data["new_state"].state == STATE_UNLOCKING + assert state_changes[0].data["new_state"].state == LockState.UNLOCKING assert state_changes[1].data["entity_id"] == LOCKED_LOCK - assert state_changes[1].data["new_state"].state == STATE_UNLOCKED + assert state_changes[1].data["new_state"].state == LockState.UNLOCKED async def test_opening_mocked(hass: HomeAssistant) -> None: @@ -108,4 +100,4 @@ async def test_opening(hass: HomeAssistant) -> None: LOCK_DOMAIN, SERVICE_OPEN, {ATTR_ENTITY_ID: OPENABLE_LOCK}, blocking=True ) state = hass.states.get(OPENABLE_LOCK) - assert state.state == STATE_OPEN + assert state.state == LockState.OPEN diff --git a/tests/components/knx/README.md b/tests/components/knx/README.md index 8778feb2251..ef8398b3d17 100644 --- a/tests/components/knx/README.md +++ b/tests/components/knx/README.md @@ -18,22 +18,22 @@ async def test_something(hass, knx): ## Asserting outgoing telegrams -All outgoing telegrams are pushed to an assertion queue. Assert them in order they were sent. +All outgoing telegrams are appended to an assertion list. Assert them in order they were sent or pass `ignore_order=True` to the assertion method. - `knx.assert_no_telegram` - Asserts that no telegram was sent (assertion queue is empty). + Asserts that no telegram was sent (assertion list is empty). - `knx.assert_telegram_count(count: int)` Asserts that `count` telegrams were sent. -- `knx.assert_read(group_address: str, response: int | tuple[int, ...] | None = None)` +- `knx.assert_read(group_address: str, response: int | tuple[int, ...] | None = None, ignore_order: bool = False)` Asserts that a GroupValueRead telegram was sent to `group_address`. - The telegram will be removed from the assertion queue. + The telegram will be removed from the assertion list. Optionally inject incoming GroupValueResponse telegram after reception to clear the value reader waiting task. This can also be done manually with `knx.receive_response`. -- `knx.assert_response(group_address: str, payload: int | tuple[int, ...])` +- `knx.assert_response(group_address: str, payload: int | tuple[int, ...], ignore_order: bool = False)` Asserts that a GroupValueResponse telegram with `payload` was sent to `group_address`. - The telegram will be removed from the assertion queue. -- `knx.assert_write(group_address: str, payload: int | tuple[int, ...])` + The telegram will be removed from the assertion list. +- `knx.assert_write(group_address: str, payload: int | tuple[int, ...], ignore_order: bool = False)` Asserts that a GroupValueWrite telegram with `payload` was sent to `group_address`. - The telegram will be removed from the assertion queue. + The telegram will be removed from the assertion list. Change some states or call some services and assert outgoing telegrams. diff --git a/tests/components/knx/__init__.py b/tests/components/knx/__init__.py index 76ae91a193d..fc19741d190 100644 --- a/tests/components/knx/__init__.py +++ b/tests/components/knx/__init__.py @@ -1,7 +1,8 @@ """Tests for the KNX integration.""" -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine +from typing import Any from homeassistant.helpers import entity_registry as er -KnxEntityGenerator = Callable[..., Awaitable[er.RegistryEntry]] +type KnxEntityGenerator = Callable[..., Coroutine[Any, Any, er.RegistryEntry]] diff --git a/tests/components/knx/conftest.py b/tests/components/knx/conftest.py index 19f2bc4d845..c0ec1dd9b9a 100644 --- a/tests/components/knx/conftest.py +++ b/tests/components/knx/conftest.py @@ -57,9 +57,9 @@ class KNXTestKit: self.hass: HomeAssistant = hass self.mock_config_entry: MockConfigEntry = mock_config_entry self.xknx: XKNX - # outgoing telegrams will be put in the Queue instead of sent to the interface + # outgoing telegrams will be put in the List instead of sent to the interface # telegrams to an InternalGroupAddress won't be queued here - self._outgoing_telegrams: asyncio.Queue = asyncio.Queue() + self._outgoing_telegrams: list[Telegram] = [] def assert_state(self, entity_id: str, state: str, **attributes) -> None: """Assert the state of an entity.""" @@ -76,7 +76,7 @@ class KNXTestKit: async def patch_xknx_start(): """Patch `xknx.start` for unittests.""" self.xknx.cemi_handler.send_telegram = AsyncMock( - side_effect=self._outgoing_telegrams.put + side_effect=self._outgoing_telegrams.append ) # after XKNX.__init__() to not overwrite it by the config entry again # before StateUpdater starts to avoid slow down of tests @@ -117,24 +117,22 @@ class KNXTestKit: ######################## def _list_remaining_telegrams(self) -> str: - """Return a string containing remaining outgoing telegrams in test Queue. One per line.""" - remaining_telegrams = [] - while not self._outgoing_telegrams.empty(): - remaining_telegrams.append(self._outgoing_telegrams.get_nowait()) - return "\n".join(map(str, remaining_telegrams)) + """Return a string containing remaining outgoing telegrams in test List.""" + return "\n".join(map(str, self._outgoing_telegrams)) async def assert_no_telegram(self) -> None: - """Assert if every telegram in test Queue was checked.""" + """Assert if every telegram in test List was checked.""" await self.hass.async_block_till_done() - assert self._outgoing_telegrams.empty(), ( - f"Found remaining unasserted Telegrams: {self._outgoing_telegrams.qsize()}\n" + remaining_telegram_count = len(self._outgoing_telegrams) + assert not remaining_telegram_count, ( + f"Found remaining unasserted Telegrams: {remaining_telegram_count}\n" f"{self._list_remaining_telegrams()}" ) async def assert_telegram_count(self, count: int) -> None: - """Assert outgoing telegram count in test Queue.""" + """Assert outgoing telegram count in test List.""" await self.hass.async_block_till_done() - actual_count = self._outgoing_telegrams.qsize() + actual_count = len(self._outgoing_telegrams) assert actual_count == count, ( f"Outgoing telegrams: {actual_count} - Expected: {count}\n" f"{self._list_remaining_telegrams()}" @@ -149,52 +147,79 @@ class KNXTestKit: group_address: str, payload: int | tuple[int, ...] | None, apci_type: type[APCI], + ignore_order: bool = False, ) -> None: - """Assert outgoing telegram. One by one in timely order.""" + """Assert outgoing telegram. Optionally in timely order.""" await self.xknx.telegrams.join() - try: - telegram = self._outgoing_telegrams.get_nowait() - except asyncio.QueueEmpty as err: + if not self._outgoing_telegrams: raise AssertionError( f"No Telegram found. Expected: {apci_type.__name__} -" f" {group_address} - {payload}" - ) from err + ) + _expected_ga = GroupAddress(group_address) + if ignore_order: + for telegram in self._outgoing_telegrams: + if ( + telegram.destination_address == _expected_ga + and isinstance(telegram.payload, apci_type) + and (payload is None or telegram.payload.value.value == payload) + ): + self._outgoing_telegrams.remove(telegram) + return + raise AssertionError( + f"Telegram not found. Expected: {apci_type.__name__} -" + f" {group_address} - {payload}" + f"\nUnasserted telegrams:\n{self._list_remaining_telegrams()}" + ) + + telegram = self._outgoing_telegrams.pop(0) assert isinstance( telegram.payload, apci_type ), f"APCI type mismatch in {telegram} - Expected: {apci_type.__name__}" - assert ( - str(telegram.destination_address) == group_address + telegram.destination_address == _expected_ga ), f"Group address mismatch in {telegram} - Expected: {group_address}" - if payload is not None: assert ( telegram.payload.value.value == payload # type: ignore[attr-defined] ), f"Payload mismatch in {telegram} - Expected: {payload}" async def assert_read( - self, group_address: str, response: int | tuple[int, ...] | None = None + self, + group_address: str, + response: int | tuple[int, ...] | None = None, + ignore_order: bool = False, ) -> None: - """Assert outgoing GroupValueRead telegram. One by one in timely order. + """Assert outgoing GroupValueRead telegram. Optionally in timely order. Optionally inject incoming GroupValueResponse telegram after reception. """ - await self.assert_telegram(group_address, None, GroupValueRead) + await self.assert_telegram(group_address, None, GroupValueRead, ignore_order) if response is not None: await self.receive_response(group_address, response) async def assert_response( - self, group_address: str, payload: int | tuple[int, ...] + self, + group_address: str, + payload: int | tuple[int, ...], + ignore_order: bool = False, ) -> None: - """Assert outgoing GroupValueResponse telegram. One by one in timely order.""" - await self.assert_telegram(group_address, payload, GroupValueResponse) + """Assert outgoing GroupValueResponse telegram. Optionally in timely order.""" + await self.assert_telegram( + group_address, payload, GroupValueResponse, ignore_order + ) async def assert_write( - self, group_address: str, payload: int | tuple[int, ...] + self, + group_address: str, + payload: int | tuple[int, ...], + ignore_order: bool = False, ) -> None: - """Assert outgoing GroupValueWrite telegram. One by one in timely order.""" - await self.assert_telegram(group_address, payload, GroupValueWrite) + """Assert outgoing GroupValueWrite telegram. Optionally in timely order.""" + await self.assert_telegram( + group_address, payload, GroupValueWrite, ignore_order + ) #################### # Incoming telegrams diff --git a/tests/components/knx/fixtures/config_store.json b/tests/components/knx/fixtures/config_store.json index 971b692ade1..5eabcfa87f9 100644 --- a/tests/components/knx/fixtures/config_store.json +++ b/tests/components/knx/fixtures/config_store.json @@ -23,7 +23,26 @@ } } }, - "light": {} + "light": { + "knx_es_01J85ZKTFHSZNG4X9DYBE592TF": { + "entity": { + "name": "test", + "device_info": null, + "entity_category": "config" + }, + "knx": { + "color_temp_min": 2700, + "color_temp_max": 6000, + "_light_color_mode_schema": "default", + "ga_switch": { + "write": "1/1/21", + "state": "1/0/21", + "passive": [] + }, + "sync_state": true + } + } + } } } } diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index a05752eced1..38ccb36200b 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -6,7 +6,11 @@ import logging from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS +from homeassistant.components.knx.const import ( + CONF_PAYLOAD_LENGTH, + KNX_ADDRESS, + KNX_MODULE_KEY, +) from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant @@ -134,4 +138,4 @@ async def test_button_invalid( assert record.levelname == "ERROR" assert "Setup failed for 'knx': Invalid config." in record.message assert hass.states.get("button.test") is None - assert hass.data.get(DOMAIN) is None + assert hass.data.get(KNX_MODULE_KEY) is None diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index ec0498dc447..8fb348f1724 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -439,3 +439,414 @@ async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> knx.assert_state( "climate.test", HVACMode.HEAT, command_value=0, hvac_action=STATE_IDLE ) + + +async def test_fan_speed_3_steps(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate fan speed 3 steps.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_SPEED_MODE: "step", + ClimateSchema.CONF_FAN_MAX_STEP: 3, + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) + knx.assert_state( + "climate.test", + HVACMode.HEAT, + fan_mode="low", + fan_modes=["off", "low", "medium", "high"], + ) + + # set fan mode + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "medium"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x02,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="medium") + + # turn off + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "off"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") + + +async def test_fan_speed_2_steps(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate fan speed 2 steps.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_SPEED_MODE: "step", + ClimateSchema.CONF_FAN_MAX_STEP: 2, + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) + knx.assert_state( + "climate.test", HVACMode.HEAT, fan_mode="low", fan_modes=["off", "low", "high"] + ) + + # set fan mode + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "high"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x02,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="high") + + # turn off + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "off"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") + + +async def test_fan_speed_1_step(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate fan speed 1 step.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_SPEED_MODE: "step", + ClimateSchema.CONF_FAN_MAX_STEP: 1, + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) + knx.assert_state( + "climate.test", HVACMode.HEAT, fan_mode="on", fan_modes=["off", "on"] + ) + + # turn off + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "off"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") + + +async def test_fan_speed_5_steps(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate fan speed 5 steps.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_SPEED_MODE: "step", + ClimateSchema.CONF_FAN_MAX_STEP: 5, + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) + knx.assert_state( + "climate.test", + HVACMode.HEAT, + fan_mode="1", + fan_modes=["off", "1", "2", "3", "4", "5"], + ) + + # set fan mode + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "4"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x04,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="4") + + # turn off + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "off"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") + + +async def test_fan_speed_percentage(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate fan speed percentage.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_SPEED_MODE: "percent", + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (84,)) # 84 / 255 = 33% + knx.assert_state( + "climate.test", + HVACMode.HEAT, + fan_mode="low", + fan_modes=["off", "low", "medium", "high"], + ) + + # set fan mode + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "medium"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (168,)) # 168 / 255 = 66% + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="medium") + + # turn off + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "off"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") + + # check fan mode that is not in the fan modes list + await knx.receive_write("1/2/6", (127,)) # 127 / 255 = 50% + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="medium") + + # check FAN_OFF is not picked when fan_speed is closest to zero + await knx.receive_write("1/2/6", (3,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="low") + + +async def test_fan_speed_percentage_4_steps( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX climate fan speed percentage with 4 steps.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_SPEED_MODE: "percent", + ClimateSchema.CONF_FAN_MAX_STEP: 4, + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (64,)) # 64 / 255 = 25% + knx.assert_state( + "climate.test", + HVACMode.HEAT, + fan_mode="25%", + fan_modes=["off", "25%", "50%", "75%", "100%"], + ) + + # set fan mode + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "50%"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (128,)) # 128 / 255 = 50% + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="50%") + + # turn off + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "off"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") + + # check fan mode that is not in the fan modes list + await knx.receive_write("1/2/6", (168,)) # 168 / 255 = 66% + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="75%") + + +async def test_fan_speed_zero_mode_auto(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate fan speed 3 steps.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", + ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", + ClimateSchema.CONF_FAN_MAX_STEP: 3, + ClimateSchema.CONF_FAN_SPEED_MODE: "step", + ClimateSchema.CONF_FAN_ZERO_MODE: "auto", + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) + knx.assert_state( + "climate.test", + HVACMode.HEAT, + fan_mode="low", + fan_modes=["auto", "low", "medium", "high"], + ) + + # set auto + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.test", "fan_mode": "auto"}, + blocking=True, + ) + await knx.assert_write("1/2/6", (0x0,)) + knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="auto") + + +async def test_climate_humidity(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate humidity.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_HUMIDITY_STATE_ADDRESS: "1/2/16", + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/16") + await knx.receive_response("1/2/16", (0x14, 0x74)) + knx.assert_state( + "climate.test", + HVACMode.HEAT, + current_humidity=45.6, + ) diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 78751c7e641..2187721a518 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -913,7 +913,7 @@ async def test_form_with_automatic_connection_handling( CONF_KNX_ROUTE_BACK: False, CONF_KNX_TUNNEL_ENDPOINT_IA: None, CONF_KNX_STATE_UPDATER: True, - CONF_KNX_TELEGRAM_LOG_SIZE: 200, + CONF_KNX_TELEGRAM_LOG_SIZE: 1000, } knx_setup.assert_called_once() @@ -1210,7 +1210,7 @@ async def test_options_flow_connection_type( CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, CONF_KNX_SECURE_USER_ID: None, CONF_KNX_SECURE_USER_PASSWORD: None, - CONF_KNX_TELEGRAM_LOG_SIZE: 200, + CONF_KNX_TELEGRAM_LOG_SIZE: 1000, } diff --git a/tests/components/knx/test_cover.py b/tests/components/knx/test_cover.py index 2d2b72e9015..0604b575c5b 100644 --- a/tests/components/knx/test_cover.py +++ b/tests/components/knx/test_cover.py @@ -1,7 +1,8 @@ """Test KNX cover.""" +from homeassistant.components.cover import CoverState from homeassistant.components.knx.schema import CoverSchema -from homeassistant.const import CONF_NAME, STATE_CLOSING +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from .conftest import KNXTestKit @@ -72,7 +73,7 @@ async def test_cover_basic(hass: HomeAssistant, knx: KNXTestKit) -> None: knx.assert_state( "cover.test", - STATE_CLOSING, + CoverState.CLOSING, ) assert len(events) == 1 diff --git a/tests/components/knx/test_date.py b/tests/components/knx/test_date.py index d3b1ff2058e..1e6e5102bcf 100644 --- a/tests/components/knx/test_date.py +++ b/tests/components/knx/test_date.py @@ -1,6 +1,10 @@ """Test KNX date.""" -from homeassistant.components.date import ATTR_DATE, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.date import ( + ATTR_DATE, + DOMAIN as DATE_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import DateSchema from homeassistant.const import CONF_NAME @@ -24,7 +28,7 @@ async def test_date(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DOMAIN, + DATE_DOMAIN, SERVICE_SET_VALUE, {"entity_id": "date.test", ATTR_DATE: "1999-03-31"}, blocking=True, diff --git a/tests/components/knx/test_datetime.py b/tests/components/knx/test_datetime.py index 4b66769a8a3..025145ad1a3 100644 --- a/tests/components/knx/test_datetime.py +++ b/tests/components/knx/test_datetime.py @@ -1,6 +1,10 @@ """Test KNX date.""" -from homeassistant.components.datetime import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.datetime import ( + ATTR_DATETIME, + DOMAIN as DATETIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import DateTimeSchema from homeassistant.const import CONF_NAME @@ -27,7 +31,7 @@ async def test_datetime(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DOMAIN, + DATETIME_DOMAIN, SERVICE_SET_VALUE, {"entity_id": "datetime.test", ATTR_DATETIME: "2020-01-02T03:04:05+00:00"}, blocking=True, diff --git a/tests/components/knx/test_device.py b/tests/components/knx/test_device.py index 330fd854a50..04ff02f0611 100644 --- a/tests/components/knx/test_device.py +++ b/tests/components/knx/test_device.py @@ -58,7 +58,8 @@ async def test_remove_device( await knx.setup_integration({}) client = await hass_ws_client(hass) - await knx.assert_read("1/0/45", response=True) + await knx.assert_read("1/0/21", response=True, ignore_order=True) # test light + await knx.assert_read("1/0/45", response=True, ignore_order=True) # test switch assert hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") test_device = device_registry.async_get_device( diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index c4d0acf0ce2..0fd790a3e33 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -108,6 +108,11 @@ async def test_expose_attribute(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_telegram_count(0) + # Ignore "unavailable" state + hass.states.async_set(entity_id, "unavailable", {attribute: None}) + await hass.async_block_till_done() + await knx.assert_telegram_count(0) + async def test_expose_attribute_with_default( hass: HomeAssistant, knx: KNXTestKit @@ -131,7 +136,7 @@ async def test_expose_attribute_with_default( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (0,)) - # Change state to "on"; no attribute + # Change state to "on"; no attribute -> default hass.states.async_set(entity_id, "on", {}) await hass.async_block_till_done() await knx.assert_write("1/1/8", (0,)) @@ -146,6 +151,11 @@ async def test_expose_attribute_with_default( await hass.async_block_till_done() await knx.assert_no_telegram() + # Use default for "unavailable" state + hass.states.async_set(entity_id, "unavailable") + await hass.async_block_till_done() + await knx.assert_write("1/1/8", (0,)) + # Change state and attribute hass.states.async_set(entity_id, "on", {attribute: 3}) await hass.async_block_till_done() @@ -290,8 +300,18 @@ async def test_expose_value_template( assert "Error rendering value template for KNX expose" in caplog.text +@pytest.mark.parametrize( + "invalid_attribute", + [ + 101.0, + "invalid", # can't cast to float + ], +) async def test_expose_conversion_exception( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, knx: KNXTestKit + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + knx: KNXTestKit, + invalid_attribute: str, ) -> None: """Test expose throws exception.""" @@ -313,16 +333,17 @@ async def test_expose_conversion_exception( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (3,)) + caplog.clear() # Change attribute: Expect no exception hass.states.async_set( entity_id, "on", - {attribute: 101}, + {attribute: invalid_attribute}, ) await hass.async_block_till_done() await knx.assert_no_telegram() assert ( - 'Could not expose fake.entity fake_attribute value "101.0" to KNX:' + f'Could not expose fake.entity fake_attribute value "{invalid_attribute}" to KNX:' in caplog.text ) diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index e2e4a673a0d..6ba6090d60d 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -19,8 +19,9 @@ from homeassistant.components.light import ( ATTR_RGBW_COLOR, ColorMode, ) -from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON +from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON, EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from . import KnxEntityGenerator from .conftest import KNXTestKit @@ -40,7 +41,11 @@ async def test_light_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: } ) - knx.assert_state("light.test", STATE_OFF) + knx.assert_state( + "light.test", + STATE_OFF, + supported_color_modes=[ColorMode.ONOFF], + ) # turn on light await hass.services.async_call( "light", @@ -109,6 +114,7 @@ async def test_light_brightness(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=80, + supported_color_modes=[ColorMode.BRIGHTNESS], color_mode=ColorMode.BRIGHTNESS, ) # receive brightness changes from KNX @@ -164,6 +170,7 @@ async def test_light_color_temp_absolute(hass: HomeAssistant, knx: KNXTestKit) - "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.COLOR_TEMP], color_mode=ColorMode.COLOR_TEMP, color_temp=370, color_temp_kelvin=2700, @@ -226,6 +233,7 @@ async def test_light_color_temp_relative(hass: HomeAssistant, knx: KNXTestKit) - "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.COLOR_TEMP], color_mode=ColorMode.COLOR_TEMP, color_temp=250, color_temp_kelvin=4000, @@ -299,6 +307,7 @@ async def test_light_hs_color(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.HS], color_mode=ColorMode.HS, hs_color=(360, 100), ) @@ -374,6 +383,7 @@ async def test_light_xyy_color(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=204, + supported_color_modes=[ColorMode.XY], color_mode=ColorMode.XY, xy_color=(0.8, 0.8), ) @@ -456,6 +466,7 @@ async def test_light_xyy_color_with_brightness( "light.test", STATE_ON, brightness=255, # brightness form xyy_color ignored when extra brightness GA is used + supported_color_modes=[ColorMode.XY], color_mode=ColorMode.XY, xy_color=(0.8, 0.8), ) @@ -542,6 +553,7 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGB], color_mode=ColorMode.RGB, rgb_color=(255, 255, 255), ) @@ -698,6 +710,7 @@ async def test_light_rgbw_individual( "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGBW], color_mode=ColorMode.RGBW, rgbw_color=(0, 0, 0, 255), ) @@ -852,6 +865,7 @@ async def test_light_rgb(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGB], color_mode=ColorMode.RGB, rgb_color=(255, 255, 255), ) @@ -960,6 +974,7 @@ async def test_light_rgbw(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGBW], color_mode=ColorMode.RGBW, rgbw_color=(255, 101, 102, 103), ) @@ -1077,6 +1092,7 @@ async def test_light_rgbw_brightness(hass: HomeAssistant, knx: KNXTestKit) -> No "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGBW], color_mode=ColorMode.RGBW, rgbw_color=(255, 101, 102, 103), ) @@ -1159,7 +1175,7 @@ async def test_light_ui_create( knx: KNXTestKit, create_ui_entity: KnxEntityGenerator, ) -> None: - """Test creating a switch.""" + """Test creating a light.""" await knx.setup_integration({}) await create_ui_entity( platform=Platform.LIGHT, @@ -1173,8 +1189,12 @@ async def test_light_ui_create( # created entity sends read-request to KNX bus await knx.assert_read("2/2/2") await knx.receive_response("2/2/2", True) - state = hass.states.get("light.test") - assert state.state is STATE_ON + knx.assert_state( + "light.test", + STATE_ON, + supported_color_modes=[ColorMode.ONOFF], + color_mode=ColorMode.ONOFF, + ) @pytest.mark.parametrize( @@ -1192,7 +1212,7 @@ async def test_light_ui_color_temp( color_temp_mode: str, raw_ct: tuple[int, ...], ) -> None: - """Test creating a switch.""" + """Test creating a color-temp light.""" await knx.setup_integration({}) await create_ui_entity( platform=Platform.LIGHT, @@ -1215,6 +1235,124 @@ async def test_light_ui_color_temp( blocking=True, ) await knx.assert_write("3/3/3", raw_ct) - state = hass.states.get("light.test") - assert state.state is STATE_ON - assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == pytest.approx(4200, abs=1) + knx.assert_state( + "light.test", + STATE_ON, + supported_color_modes=[ColorMode.COLOR_TEMP], + color_mode=ColorMode.COLOR_TEMP, + color_temp_kelvin=pytest.approx(4200, abs=1), + ) + + +async def test_light_ui_multi_mode( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a light with multiple color modes.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.LIGHT, + entity_data={"name": "test"}, + knx_data={ + "color_temp_min": 2700, + "color_temp_max": 6000, + "_light_color_mode_schema": "default", + "ga_switch": { + "write": "1/1/1", + "passive": [], + "state": "2/2/2", + }, + "sync_state": True, + "ga_brightness": { + "write": "0/6/0", + "state": "0/6/1", + "passive": [], + }, + "ga_color_temp": { + "write": "0/6/2", + "dpt": "7.600", + "state": "0/6/3", + "passive": [], + }, + "ga_color": { + "write": "0/6/4", + "dpt": "251.600", + "state": "0/6/5", + "passive": [], + }, + }, + ) + await knx.assert_read("2/2/2", True) + await knx.assert_read("0/6/1", (0xFF,)) + await knx.assert_read("0/6/5", (0xFF, 0x65, 0x66, 0x67, 0x00, 0x0F)) + await knx.assert_read("0/6/3", (0x12, 0x34)) + + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": "light.test", + ATTR_COLOR_NAME: "hotpink", + }, + blocking=True, + ) + await knx.assert_write("0/6/4", (255, 0, 128, 178, 0, 15)) + knx.assert_state( + "light.test", + STATE_ON, + brightness=255, + color_temp_kelvin=None, + rgbw_color=(255, 0, 128, 178), + supported_color_modes=[ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ], + color_mode=ColorMode.RGBW, + ) + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": "light.test", + ATTR_COLOR_TEMP_KELVIN: 4200, + }, + blocking=True, + ) + await knx.assert_write("0/6/2", (0x10, 0x68)) + knx.assert_state( + "light.test", + STATE_ON, + brightness=255, + color_temp_kelvin=4200, + rgbw_color=None, + supported_color_modes=[ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ], + color_mode=ColorMode.COLOR_TEMP, + ) + + +async def test_light_ui_load( + hass: HomeAssistant, + knx: KNXTestKit, + load_config_store: None, + entity_registry: er.EntityRegistry, +) -> None: + """Test loading a light from storage.""" + await knx.setup_integration({}) + + await knx.assert_read("1/0/21", response=True, ignore_order=True) + # unrelated switch in config store + await knx.assert_read("1/0/45", response=True, ignore_order=True) + + knx.assert_state( + "light.test", + STATE_ON, + supported_color_modes=[ColorMode.ONOFF], + color_mode=ColorMode.ONOFF, + ) + + entity = entity_registry.async_get("light.test") + assert entity.entity_category is EntityCategory.CONFIG diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index b481675140b..c7e33dd5fe4 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -9,74 +9,6 @@ from homeassistant.core import HomeAssistant from .conftest import KNXTestKit -async def test_legacy_notify_service_simple( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX notify can send to one device.""" - await knx.setup_integration( - { - NotifySchema.PLATFORM: { - CONF_NAME: "test", - KNX_ADDRESS: "1/0/0", - } - } - ) - await hass.services.async_call( - "notify", "notify", {"target": "test", "message": "I love KNX"}, blocking=True - ) - await knx.assert_write( - "1/0/0", - (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), - ) - await hass.services.async_call( - "notify", - "notify", - { - "target": "test", - "message": "I love KNX, but this text is too long for KNX, poor KNX", - }, - blocking=True, - ) - await knx.assert_write( - "1/0/0", - (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), - ) - - -async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodings( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX notify `type` configuration.""" - await knx.setup_integration( - { - NotifySchema.PLATFORM: [ - { - CONF_NAME: "ASCII", - KNX_ADDRESS: "1/0/0", - CONF_TYPE: "string", - }, - { - CONF_NAME: "Latin-1", - KNX_ADDRESS: "1/0/1", - CONF_TYPE: "latin_1", - }, - ] - } - ) - await hass.services.async_call( - "notify", "notify", {"message": "Gänsefüßchen"}, blocking=True - ) - await knx.assert_write( - "1/0/0", - # "G?nsef??chen" - (71, 63, 110, 115, 101, 102, 63, 63, 99, 104, 101, 110, 0, 0), - ) - await knx.assert_write( - "1/0/1", - (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), - ) - - async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX notify can send to one device.""" await knx.setup_integration( diff --git a/tests/components/knx/test_repairs.py b/tests/components/knx/test_repairs.py deleted file mode 100644 index 690d6e450cb..00000000000 --- a/tests/components/knx/test_repairs.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Test repairs for KNX integration.""" - -from http import HTTPStatus - -from homeassistant.components.knx.const import DOMAIN, KNX_ADDRESS -from homeassistant.components.knx.schema import NotifySchema -from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) -from homeassistant.const import CONF_NAME -from homeassistant.core import HomeAssistant -import homeassistant.helpers.issue_registry as ir - -from .conftest import KNXTestKit - -from tests.typing import ClientSessionGenerator - - -async def test_knx_notify_service_issue( - hass: HomeAssistant, - knx: KNXTestKit, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the legacy notify service still works before migration and repair flow is triggered.""" - await knx.setup_integration( - { - NotifySchema.PLATFORM: { - CONF_NAME: "test", - KNX_ADDRESS: "1/0/0", - } - } - ) - http_client = await hass_client() - - # Assert no issue is present - assert len(issue_registry.issues) == 0 - - # Simulate legacy service being used - assert hass.services.has_service(NOTIFY_DOMAIN, NOTIFY_DOMAIN) - await hass.services.async_call( - NOTIFY_DOMAIN, - NOTIFY_DOMAIN, - service_data={"message": "It is too cold!", "target": "test"}, - blocking=True, - ) - await knx.assert_write( - "1/0/0", - (73, 116, 32, 105, 115, 32, 116, 111, 111, 32, 99, 111, 108, 100), - ) - - # Assert the issue is present - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_notify", - ) - - # Test confirm step in repair flow - resp = await http_client.post( - RepairsFlowIndexView.url, - json={"handler": "notify", "issue_id": f"migrate_notify_{DOMAIN}_notify"}, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - resp = await http_client.post( - RepairsFlowResourceView.url.format(flow_id=flow_id), - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data["type"] == "create_entry" - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_notify", - ) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/knx/test_telegrams.py b/tests/components/knx/test_telegrams.py index 2eda718f5ac..883e8ccbb2d 100644 --- a/tests/components/knx/test_telegrams.py +++ b/tests/components/knx/test_telegrams.py @@ -6,8 +6,10 @@ from typing import Any import pytest -from homeassistant.components.knx import DOMAIN -from homeassistant.components.knx.const import CONF_KNX_TELEGRAM_LOG_SIZE +from homeassistant.components.knx.const import ( + CONF_KNX_TELEGRAM_LOG_SIZE, + KNX_MODULE_KEY, +) from homeassistant.components.knx.telegrams import TelegramDict from homeassistant.core import HomeAssistant @@ -39,7 +41,7 @@ MOCK_TELEGRAMS = [ "dpt_name": None, "payload": [1, 2, 3, 4], "source": "0.0.0", - "source_name": "", + "source_name": "Home Assistant", "telegramtype": "GroupValueWrite", "timestamp": MOCK_TIMESTAMP, "unit": None, @@ -76,7 +78,7 @@ async def test_store_telegam_history( ) await knx.assert_write("2/2/2", (1, 2, 3, 4)) - assert len(hass.data[DOMAIN].telegrams.recent_telegrams) == 2 + assert len(hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams) == 2 with pytest.raises(KeyError): hass_storage["knx/telegrams_history.json"] @@ -93,7 +95,7 @@ async def test_load_telegam_history( """Test telegram history restoration.""" hass_storage["knx/telegrams_history.json"] = {"version": 1, "data": MOCK_TELEGRAMS} await knx.setup_integration({}) - loaded_telegrams = hass.data[DOMAIN].telegrams.recent_telegrams + loaded_telegrams = hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams assert assert_telegram_history(loaded_telegrams) # TelegramDict "payload" is a tuple, this shall be restored when loading from JSON assert isinstance(loaded_telegrams[1]["payload"], tuple) @@ -114,4 +116,4 @@ async def test_remove_telegam_history( await knx.setup_integration({}, add_entry_to_hass=False) # Store.async_remove() is mocked by hass_storage - check that data was removed. assert "knx/telegrams_history.json" not in hass_storage - assert not hass.data[DOMAIN].telegrams.recent_telegrams + assert not hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams diff --git a/tests/components/knx/test_time.py b/tests/components/knx/test_time.py index 9dc4c401ed8..05f84339742 100644 --- a/tests/components/knx/test_time.py +++ b/tests/components/knx/test_time.py @@ -2,7 +2,11 @@ from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import TimeSchema -from homeassistant.components.time import ATTR_TIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, State @@ -24,7 +28,7 @@ async def test_time(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DOMAIN, + TIME_DOMAIN, SERVICE_SET_VALUE, {"entity_id": "time.test", ATTR_TIME: "01:02:03"}, blocking=True, diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index 309ea111709..a34f126e4f4 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -3,8 +3,11 @@ from typing import Any from unittest.mock import patch -from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema +import pytest + +from homeassistant.components.knx.const import KNX_ADDRESS, KNX_MODULE_KEY from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY +from homeassistant.components.knx.schema import SwitchSchema from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant @@ -64,7 +67,7 @@ async def test_knx_project_file_process( await knx.setup_integration({}) client = await hass_ws_client(hass) - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded await client.send_json( { @@ -87,7 +90,7 @@ async def test_knx_project_file_process( parse_mock.assert_called_once_with() assert res["success"], res - assert hass.data[DOMAIN].project.loaded + assert hass.data[KNX_MODULE_KEY].project.loaded assert hass_storage[KNX_PROJECT_STORAGE_KEY]["data"] == _parse_result @@ -99,7 +102,7 @@ async def test_knx_project_file_process_error( """Test knx/project_file_process exception handling.""" await knx.setup_integration({}) client = await hass_ws_client(hass) - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded await client.send_json( { @@ -120,7 +123,7 @@ async def test_knx_project_file_process_error( parse_mock.assert_called_once_with() assert res["error"], res - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded async def test_knx_project_file_remove( @@ -134,13 +137,13 @@ async def test_knx_project_file_remove( await knx.setup_integration({}) assert hass_storage[KNX_PROJECT_STORAGE_KEY] client = await hass_ws_client(hass) - assert hass.data[DOMAIN].project.loaded + assert hass.data[KNX_MODULE_KEY].project.loaded await client.send_json({"id": 6, "type": "knx/project_file_remove"}) res = await client.receive_json() assert res["success"], res - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded assert not hass_storage.get(KNX_PROJECT_STORAGE_KEY) @@ -153,7 +156,7 @@ async def test_knx_get_project( """Test retrieval of kxnproject from store.""" await knx.setup_integration({}) client = await hass_ws_client(hass) - assert hass.data[DOMAIN].project.loaded + assert hass.data[KNX_MODULE_KEY].project.loaded await client.send_json({"id": 3, "type": "knx/get_knx_project"}) res = await client.receive_json() @@ -177,6 +180,37 @@ async def test_knx_group_monitor_info_command( assert res["result"]["recent_telegrams"] == [] +async def test_knx_group_telegrams_command( + hass: HomeAssistant, knx: KNXTestKit, hass_ws_client: WebSocketGenerator +) -> None: + """Test knx/group_telegrams command.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "knx/group_telegrams"}) + res = await client.receive_json() + assert res["success"], res + assert res["result"] == {} + + # # get some telegrams to populate the cache + await knx.receive_write("1/1/1", True) + await knx.receive_read("2/2/2") # read telegram shall be ignored + await knx.receive_write("3/3/3", 0x34) + + await client.send_json_auto_id({"type": "knx/group_telegrams"}) + res = await client.receive_json() + assert res["success"], res + assert len(res["result"]) == 2 + assert "1/1/1" in res["result"] + assert res["result"]["1/1/1"]["destination"] == "1/1/1" + assert "3/3/3" in res["result"] + assert res["result"]["3/3/3"]["payload"] == 52 + assert res["result"]["3/3/3"]["telegramtype"] == "GroupValueWrite" + assert res["result"]["3/3/3"]["source"] == "1.2.3" + assert res["result"]["3/3/3"]["direction"] == "Incoming" + assert res["result"]["3/3/3"]["timestamp"] is not None + + async def test_knx_subscribe_telegrams_command_recent_telegrams( hass: HomeAssistant, knx: KNXTestKit, hass_ws_client: WebSocketGenerator ) -> None: @@ -355,3 +389,28 @@ async def test_knx_subscribe_telegrams_command_project( ) assert res["event"]["direction"] == "Incoming" assert res["event"]["timestamp"] is not None + + +@pytest.mark.parametrize( + "endpoint", + [ + "knx/info", # sync ws-command + "knx/get_knx_project", # async ws-command + ], +) +async def test_websocket_when_config_entry_unloaded( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + endpoint: str, +) -> None: + """Test websocket connection when config entry is unloaded.""" + await knx.setup_integration({}) + await hass.config_entries.async_unload(knx.mock_config_entry.entry_id) + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": endpoint}) + res = await client.receive_json() + assert not res["success"] + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"] == "KNX integration not loaded." diff --git a/tests/components/konnected/test_init.py b/tests/components/konnected/test_init.py index 1a2da88624d..6fc6b10ff20 100644 --- a/tests/components/konnected/test_init.py +++ b/tests/components/konnected/test_init.py @@ -7,8 +7,8 @@ import pytest from homeassistant.components import konnected from homeassistant.components.konnected import config_flow -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 0f358260be7..3a99a7f681d 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -56,6 +56,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr index 9d880746ff9..640726e2355 100644 --- a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr +++ b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr @@ -15,6 +15,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'lacrosse_view', 'entry_id': 'lacrosse_view_test_entry_id', 'minor_version': 1, @@ -23,6 +25,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/lacrosse_view/test_config_flow.py b/tests/components/lacrosse_view/test_config_flow.py index 5a48b3d15fe..f953d9a3841 100644 --- a/tests/components/lacrosse_view/test_config_flow.py +++ b/tests/components/lacrosse_view/test_config_flow.py @@ -30,7 +30,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: ), patch( "lacrosse_view.LaCrosse.get_locations", - return_value=[Location(id=1, name="Test")], + return_value=[Location(id="1", name="Test")], ), ): result2 = await hass.config_entries.flow.async_configure( @@ -206,7 +206,7 @@ async def test_already_configured_device( ), patch( "lacrosse_view.LaCrosse.get_locations", - return_value=[Location(id=1, name="Test")], + return_value=[Location(id="1", name="Test")], ), ): result2 = await hass.config_entries.flow.async_configure( @@ -251,16 +251,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - }, - data=data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -271,7 +262,7 @@ async def test_reauth(hass: HomeAssistant) -> None: patch("lacrosse_view.LaCrosse.login", return_value=True), patch( "lacrosse_view.LaCrosse.get_locations", - return_value=[Location(id=1, name="Test")], + return_value=[Location(id="1", name="Test")], ), ): result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/lamarzocco/__init__.py b/tests/components/lamarzocco/__init__.py index 4d274d10baa..f6ca0fe40df 100644 --- a/tests/components/lamarzocco/__init__.py +++ b/tests/components/lamarzocco/__init__.py @@ -1,6 +1,6 @@ """Mock inputs for tests.""" -from lmcloud.const import MachineModel +from pylamarzocco.const import MachineModel from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -19,10 +19,10 @@ PASSWORD_SELECTION = { USER_INPUT = PASSWORD_SELECTION | {CONF_USERNAME: "username"} SERIAL_DICT = { - MachineModel.GS3_AV: "GS01234", - MachineModel.GS3_MP: "GS01234", - MachineModel.LINEA_MICRA: "MR01234", - MachineModel.LINEA_MINI: "LM01234", + MachineModel.GS3_AV: "GS012345", + MachineModel.GS3_MP: "GS012345", + MachineModel.LINEA_MICRA: "MR012345", + MachineModel.LINEA_MINI: "LM012345", } WAKE_UP_SLEEP_ENTRY_IDS = ["Os2OswX", "aXFz5bJ"] diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 1a4fbbd4a0c..997fa73604c 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -2,16 +2,22 @@ from collections.abc import Generator import json -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice -from lmcloud.const import FirmwareType, MachineModel, SteamLevel -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.const import FirmwareType, MachineModel, SteamLevel +from pylamarzocco.devices.machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoDeviceInfo import pytest from homeassistant.components.lamarzocco.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME, CONF_TOKEN +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_MODEL, + CONF_NAME, + CONF_TOKEN, +) from homeassistant.core import HomeAssistant from . import SERIAL_DICT, USER_INPUT, async_init_integration @@ -19,26 +25,53 @@ from . import SERIAL_DICT, USER_INPUT, async_init_integration from tests.common import MockConfigEntry, load_fixture, load_json_object_fixture +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.lamarzocco.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + @pytest.fixture def mock_config_entry( hass: HomeAssistant, mock_lamarzocco: MagicMock ) -> MockConfigEntry: """Return the default mocked config entry.""" - entry = MockConfigEntry( + return MockConfigEntry( title="My LaMarzocco", domain=DOMAIN, version=2, data=USER_INPUT | { CONF_MODEL: mock_lamarzocco.model, + CONF_ADDRESS: "00:00:00:00:00:00", CONF_HOST: "host", CONF_TOKEN: "token", CONF_NAME: "GS3", }, unique_id=mock_lamarzocco.serial_number, ) - entry.add_to_hass(hass) - return entry + + +@pytest.fixture +def mock_config_entry_no_local_connection( + hass: HomeAssistant, mock_lamarzocco: MagicMock +) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="My LaMarzocco", + domain=DOMAIN, + version=2, + data=USER_INPUT + | { + CONF_MODEL: mock_lamarzocco.model, + CONF_TOKEN: "token", + CONF_NAME: "GS3", + }, + unique_id=mock_lamarzocco.serial_number, + ) @pytest.fixture @@ -58,11 +91,11 @@ def device_fixture() -> MachineModel: @pytest.fixture -def mock_device_info() -> LaMarzoccoDeviceInfo: +def mock_device_info(device_fixture: MachineModel) -> LaMarzoccoDeviceInfo: """Return a mocked La Marzocco device info.""" return LaMarzoccoDeviceInfo( - model=MachineModel.GS3_AV, - serial_number="GS01234", + model=device_fixture, + serial_number=SERIAL_DICT[device_fixture], name="GS3", communication_key="token", ) @@ -110,7 +143,7 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: with ( patch( - "homeassistant.components.lamarzocco.coordinator.LaMarzoccoMachine", + "homeassistant.components.lamarzocco.LaMarzoccoMachine", autospec=True, ) as lamarzocco_mock, ): @@ -131,17 +164,6 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: yield lamarzocco -@pytest.fixture -def remove_local_connection( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> MockConfigEntry: - """Remove the local connection.""" - data = mock_config_entry.data.copy() - del data[CONF_HOST] - hass.config_entries.async_update_entry(mock_config_entry, data=data) - return mock_config_entry - - @pytest.fixture(autouse=True) def mock_bluetooth(enable_bluetooth: None) -> None: """Auto mock bluetooth.""" @@ -151,5 +173,5 @@ def mock_bluetooth(enable_bluetooth: None) -> None: def mock_ble_device() -> BLEDevice: """Return a mock BLE device.""" return BLEDevice( - "00:00:00:00:00:00", "GS_GS01234", details={"path": "path"}, rssi=50 + "00:00:00:00:00:00", "GS_GS012345", details={"path": "path"}, rssi=50 ) diff --git a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr index df47ac002e6..cda285a7106 100644 --- a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr @@ -1,19 +1,19 @@ # serializer version: 1 -# name: test_binary_sensors[GS01234_backflush_active-binary_sensor] +# name: test_binary_sensors[GS012345_backflush_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS01234 Backflush active', + 'friendly_name': 'GS012345 Backflush active', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_backflush_active', + 'entity_id': 'binary_sensor.gs012345_backflush_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_backflush_active-entry] +# name: test_binary_sensors[GS012345_backflush_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25,7 +25,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_backflush_active', + 'entity_id': 'binary_sensor.gs012345_backflush_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -42,25 +42,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'backflush_enabled', - 'unique_id': 'GS01234_backflush_enabled', + 'unique_id': 'GS012345_backflush_enabled', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS01234_brewing_active-binary_sensor] +# name: test_binary_sensors[GS012345_brewing_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS01234 Brewing active', + 'friendly_name': 'GS012345 Brewing active', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_brewing_active', + 'entity_id': 'binary_sensor.gs012345_brewing_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_brewing_active-entry] +# name: test_binary_sensors[GS012345_brewing_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -72,7 +72,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_brewing_active', + 'entity_id': 'binary_sensor.gs012345_brewing_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -89,25 +89,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'brew_active', - 'unique_id': 'GS01234_brew_active', + 'unique_id': 'GS012345_brew_active', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS01234_water_tank_empty-binary_sensor] +# name: test_binary_sensors[GS012345_water_tank_empty-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'GS01234 Water tank empty', + 'friendly_name': 'GS012345 Water tank empty', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_water_tank_empty', + 'entity_id': 'binary_sensor.gs012345_water_tank_empty', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_water_tank_empty-entry] +# name: test_binary_sensors[GS012345_water_tank_empty-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -119,7 +119,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_water_tank_empty', + 'entity_id': 'binary_sensor.gs012345_water_tank_empty', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -136,7 +136,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'water_tank', - 'unique_id': 'GS01234_water_tank', + 'unique_id': 'GS012345_water_tank', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_button.ambr b/tests/components/lamarzocco/snapshots/test_button.ambr index 023039cc6f7..64d47a11072 100644 --- a/tests/components/lamarzocco/snapshots/test_button.ambr +++ b/tests/components/lamarzocco/snapshots/test_button.ambr @@ -2,10 +2,10 @@ # name: test_start_backflush StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Start backflush', + 'friendly_name': 'GS012345 Start backflush', }), 'context': , - 'entity_id': 'button.gs01234_start_backflush', + 'entity_id': 'button.gs012345_start_backflush', 'last_changed': , 'last_reported': , 'last_updated': , @@ -24,7 +24,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': None, - 'entity_id': 'button.gs01234_start_backflush', + 'entity_id': 'button.gs012345_start_backflush', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -41,7 +41,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'start_backflush', - 'unique_id': 'GS01234_start_backflush', + 'unique_id': 'GS012345_start_backflush', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_calendar.ambr b/tests/components/lamarzocco/snapshots/test_calendar.ambr index 2fd5dab846a..729eed5879a 100644 --- a/tests/components/lamarzocco/snapshots/test_calendar.ambr +++ b/tests/components/lamarzocco/snapshots/test_calendar.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_calendar_edge_cases[start_date0-end_date0] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -15,7 +15,7 @@ # --- # name: test_calendar_edge_cases[start_date1-end_date1] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -29,7 +29,7 @@ # --- # name: test_calendar_edge_cases[start_date2-end_date2] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -43,7 +43,7 @@ # --- # name: test_calendar_edge_cases[start_date3-end_date3] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -57,7 +57,7 @@ # --- # name: test_calendar_edge_cases[start_date4-end_date4] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ ]), }), @@ -65,7 +65,7 @@ # --- # name: test_calendar_edge_cases[start_date5-end_date5] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -83,7 +83,7 @@ }), }) # --- -# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_axfz5bj] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -95,7 +95,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -112,11 +112,11 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS01234_auto_on_off_schedule_aXFz5bJ', + 'unique_id': 'GS012345_auto_on_off_schedule_aXFz5bJ', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_os2oswx] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -128,7 +128,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -145,13 +145,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS01234_auto_on_off_schedule_Os2OswX', + 'unique_id': 'GS012345_auto_on_off_schedule_Os2OswX', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[events.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[events.GS012345_auto_on_off_schedule_axfz5bj] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -181,9 +181,9 @@ }), }) # --- -# name: test_calendar_events[events.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[events.GS012345_auto_on_off_schedule_os2oswx] dict({ - 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -327,38 +327,38 @@ }), }) # --- -# name: test_calendar_events[state.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[state.GS012345_auto_on_off_schedule_axfz5bj] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-14 07:30:00', - 'friendly_name': 'GS01234 Auto on/off schedule (aXFz5bJ)', + 'friendly_name': 'GS012345 Auto on/off schedule (aXFz5bJ)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-14 07:00:00', }), 'context': , - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_calendar_events[state.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[state.GS012345_auto_on_off_schedule_os2oswx] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-13 00:00:00', - 'friendly_name': 'GS01234 Auto on/off schedule (Os2OswX)', + 'friendly_name': 'GS012345 Auto on/off schedule (Os2OswX)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-12 22:00:00', }), 'context': , - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -367,7 +367,7 @@ # --- # name: test_no_calendar_events_global_disable dict({ - 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ 'events': list([ ]), }), diff --git a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr index b185557bd08..b1d8140b2ce 100644 --- a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr +++ b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr @@ -3,6 +3,7 @@ dict({ 'config': dict({ 'backflush_enabled': False, + 'bbw_settings': None, 'boilers': dict({ 'CoffeeBoiler1': dict({ 'current_temperature': 96.5, @@ -44,6 +45,7 @@ }), }), 'prebrew_mode': 'TypeB', + 'scale': None, 'smart_standby': dict({ 'enabled': True, 'minutes': 10, diff --git a/tests/components/lamarzocco/snapshots/test_init.ambr b/tests/components/lamarzocco/snapshots/test_init.ambr new file mode 100644 index 00000000000..519a9301bfd --- /dev/null +++ b/tests/components/lamarzocco/snapshots/test_init.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'bluetooth', + 'aa:bb:cc:dd:ee:ff', + ), + tuple( + 'mac', + '00:00:00:00:00:00', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'lamarzocco', + 'GS012345', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'La Marzocco', + 'model': , + 'model_id': , + 'name': 'GS012345', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'GS012345', + 'suggested_area': None, + 'sw_version': '1.40', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index 8265e7d7646..b7e42bb425f 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -1,9 +1,9 @@ # serializer version: 1 -# name: test_coffee_boiler +# name: test_general_numbers[coffee_target_temperature-94-set_temp-kwargs0] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Coffee target temperature', + 'friendly_name': 'GS012345 Coffee target temperature', 'max': 104, 'min': 85, 'mode': , @@ -11,14 +11,14 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_coffee_target_temperature', + 'entity_id': 'number.gs012345_coffee_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '95', }) # --- -# name: test_coffee_boiler.1 +# name: test_general_numbers[coffee_target_temperature-94-set_temp-kwargs0].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_coffee_target_temperature', + 'entity_id': 'number.gs012345_coffee_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,15 +52,72 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'coffee_temp', - 'unique_id': 'GS01234_coffee_temp', + 'unique_id': 'GS012345_coffee_temp', 'unit_of_measurement': , }) # --- +# name: test_general_numbers[smart_standby_time-23-set_smart_standby-kwargs1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'GS012345 Smart standby time', + 'max': 240, + 'min': 10, + 'mode': , + 'step': 10, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.gs012345_smart_standby_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_general_numbers[smart_standby_time-23-set_smart_standby-kwargs1].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 240, + 'min': 10, + 'mode': , + 'step': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.gs012345_smart_standby_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smart standby time', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_standby_time', + 'unique_id': 'GS012345_smart_standby_time', + 'unit_of_measurement': , + }) +# --- # name: test_gs3_exclusive[steam_target_temperature-131-set_temp-kwargs0-GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Steam target temperature', + 'friendly_name': 'GS012345 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -68,7 +125,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +149,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,7 +166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS01234_steam_temp', + 'unique_id': 'GS012345_steam_temp', 'unit_of_measurement': , }) # --- @@ -117,7 +174,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Steam target temperature', + 'friendly_name': 'GS012345 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -125,7 +182,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +206,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,7 +223,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS01234_steam_temp', + 'unique_id': 'GS012345_steam_temp', 'unit_of_measurement': , }) # --- @@ -174,7 +231,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Tea water duration', + 'friendly_name': 'GS012345 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -182,7 +239,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -206,7 +263,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -223,7 +280,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS01234_tea_water_duration', + 'unique_id': 'GS012345_tea_water_duration', 'unit_of_measurement': , }) # --- @@ -231,7 +288,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Tea water duration', + 'friendly_name': 'GS012345 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -239,7 +296,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -263,7 +320,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -280,14 +337,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS01234_tea_water_duration', + 'unique_id': 'GS012345_tea_water_duration', 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_1-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 1', + 'friendly_name': 'GS012345 Dose Key 1', 'max': 999, 'min': 0, 'mode': , @@ -295,17 +352,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_1', + 'entity_id': 'number.gs012345_dose_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '135', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_2-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 2', + 'friendly_name': 'GS012345 Dose Key 2', 'max': 999, 'min': 0, 'mode': , @@ -313,17 +370,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_2', + 'entity_id': 'number.gs012345_dose_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '97', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_3-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 3', + 'friendly_name': 'GS012345 Dose Key 3', 'max': 999, 'min': 0, 'mode': , @@ -331,17 +388,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_3', + 'entity_id': 'number.gs012345_dose_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '108', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_4-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 4', + 'friendly_name': 'GS012345 Dose Key 4', 'max': 999, 'min': 0, 'mode': , @@ -349,18 +406,18 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_4', + 'entity_id': 'number.gs012345_dose_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '121', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 1', + 'friendly_name': 'GS012345 Prebrew off time Key 1', 'max': 10, 'min': 1, 'mode': , @@ -368,18 +425,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_1', + 'entity_id': 'number.gs012345_prebrew_off_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 2', + 'friendly_name': 'GS012345 Prebrew off time Key 2', 'max': 10, 'min': 1, 'mode': , @@ -387,18 +444,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_2', + 'entity_id': 'number.gs012345_prebrew_off_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 3', + 'friendly_name': 'GS012345 Prebrew off time Key 3', 'max': 10, 'min': 1, 'mode': , @@ -406,18 +463,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_3', + 'entity_id': 'number.gs012345_prebrew_off_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 4', + 'friendly_name': 'GS012345 Prebrew off time Key 4', 'max': 10, 'min': 1, 'mode': , @@ -425,18 +482,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_4', + 'entity_id': 'number.gs012345_prebrew_off_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 1', + 'friendly_name': 'GS012345 Prebrew on time Key 1', 'max': 10, 'min': 2, 'mode': , @@ -444,18 +501,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_1', + 'entity_id': 'number.gs012345_prebrew_on_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 2', + 'friendly_name': 'GS012345 Prebrew on time Key 2', 'max': 10, 'min': 2, 'mode': , @@ -463,18 +520,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_2', + 'entity_id': 'number.gs012345_prebrew_on_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 3', + 'friendly_name': 'GS012345 Prebrew on time Key 3', 'max': 10, 'min': 2, 'mode': , @@ -482,18 +539,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_3', + 'entity_id': 'number.gs012345_prebrew_on_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 4', + 'friendly_name': 'GS012345 Prebrew on time Key 4', 'max': 10, 'min': 2, 'mode': , @@ -501,18 +558,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_4', + 'entity_id': 'number.gs012345_prebrew_on_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 1', + 'friendly_name': 'GS012345 Preinfusion time Key 1', 'max': 29, 'min': 2, 'mode': , @@ -520,18 +577,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_1', + 'entity_id': 'number.gs012345_preinfusion_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 2', + 'friendly_name': 'GS012345 Preinfusion time Key 2', 'max': 29, 'min': 2, 'mode': , @@ -539,18 +596,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_2', + 'entity_id': 'number.gs012345_preinfusion_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 3', + 'friendly_name': 'GS012345 Preinfusion time Key 3', 'max': 29, 'min': 2, 'mode': , @@ -558,18 +615,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_3', + 'entity_id': 'number.gs012345_preinfusion_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 4', + 'friendly_name': 'GS012345 Preinfusion time Key 4', 'max': 29, 'min': 2, 'mode': , @@ -577,7 +634,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_4', + 'entity_id': 'number.gs012345_preinfusion_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , @@ -588,7 +645,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Prebrew off time', + 'friendly_name': 'LM012345 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -596,7 +653,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_prebrew_off_time', + 'entity_id': 'number.lm012345_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -620,7 +677,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_prebrew_off_time', + 'entity_id': 'number.lm012345_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -637,7 +694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'LM01234_prebrew_off', + 'unique_id': 'LM012345_prebrew_off', 'unit_of_measurement': , }) # --- @@ -645,7 +702,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Prebrew off time', + 'friendly_name': 'MR012345 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -653,7 +710,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_prebrew_off_time', + 'entity_id': 'number.mr012345_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -677,7 +734,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_prebrew_off_time', + 'entity_id': 'number.mr012345_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -694,7 +751,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'MR01234_prebrew_off', + 'unique_id': 'MR012345_prebrew_off', 'unit_of_measurement': , }) # --- @@ -702,7 +759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Prebrew on time', + 'friendly_name': 'LM012345 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -710,7 +767,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_prebrew_on_time', + 'entity_id': 'number.lm012345_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -734,7 +791,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_prebrew_on_time', + 'entity_id': 'number.lm012345_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -751,7 +808,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'LM01234_prebrew_on', + 'unique_id': 'LM012345_prebrew_on', 'unit_of_measurement': , }) # --- @@ -759,7 +816,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Prebrew on time', + 'friendly_name': 'MR012345 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -767,7 +824,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_prebrew_on_time', + 'entity_id': 'number.mr012345_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -791,7 +848,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_prebrew_on_time', + 'entity_id': 'number.mr012345_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -808,7 +865,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'MR01234_prebrew_on', + 'unique_id': 'MR012345_prebrew_on', 'unit_of_measurement': , }) # --- @@ -816,7 +873,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Preinfusion time', + 'friendly_name': 'LM012345 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -824,7 +881,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_preinfusion_time', + 'entity_id': 'number.lm012345_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -848,7 +905,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_preinfusion_time', + 'entity_id': 'number.lm012345_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -865,7 +922,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'LM01234_preinfusion_off', + 'unique_id': 'LM012345_preinfusion_off', 'unit_of_measurement': , }) # --- @@ -873,7 +930,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Preinfusion time', + 'friendly_name': 'MR012345 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -881,7 +938,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_preinfusion_time', + 'entity_id': 'number.mr012345_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -905,7 +962,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_preinfusion_time', + 'entity_id': 'number.mr012345_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -922,7 +979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'MR01234_preinfusion_off', + 'unique_id': 'MR012345_preinfusion_off', 'unit_of_measurement': , }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_select.ambr b/tests/components/lamarzocco/snapshots/test_select.ambr index be56af2b092..46fa55eff13 100644 --- a/tests/components/lamarzocco/snapshots/test_select.ambr +++ b/tests/components/lamarzocco/snapshots/test_select.ambr @@ -2,7 +2,7 @@ # name: test_pre_brew_infusion_select[GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Prebrew/-infusion mode', + 'friendly_name': 'GS012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -10,7 +10,7 @@ ]), }), 'context': , - 'entity_id': 'select.gs01234_prebrew_infusion_mode', + 'entity_id': 'select.gs012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.gs01234_prebrew_infusion_mode', + 'entity_id': 'select.gs012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,14 +52,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'GS01234_prebrew_infusion_select', + 'unique_id': 'GS012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Linea Mini] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'LM01234 Prebrew/-infusion mode', + 'friendly_name': 'LM012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -67,7 +67,7 @@ ]), }), 'context': , - 'entity_id': 'select.lm01234_prebrew_infusion_mode', + 'entity_id': 'select.lm012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +92,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.lm01234_prebrew_infusion_mode', + 'entity_id': 'select.lm012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,14 +109,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'LM01234_prebrew_infusion_select', + 'unique_id': 'LM012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR01234 Prebrew/-infusion mode', + 'friendly_name': 'MR012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -124,7 +124,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr01234_prebrew_infusion_mode', + 'entity_id': 'select.mr012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +149,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.mr01234_prebrew_infusion_mode', + 'entity_id': 'select.mr012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,14 +166,69 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'MR01234_prebrew_infusion_select', + 'unique_id': 'MR012345_prebrew_infusion_select', + 'unit_of_measurement': None, + }) +# --- +# name: test_smart_standby_mode + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GS012345 Smart standby mode', + 'options': list([ + 'power_on', + 'last_brewing', + ]), + }), + 'context': , + 'entity_id': 'select.gs012345_smart_standby_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'last_brewing', + }) +# --- +# name: test_smart_standby_mode.1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'power_on', + 'last_brewing', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.gs012345_smart_standby_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart standby mode', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_standby_mode', + 'unique_id': 'GS012345_smart_standby_mode', 'unit_of_measurement': None, }) # --- # name: test_steam_boiler_level[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR01234 Steam level', + 'friendly_name': 'MR012345 Steam level', 'options': list([ '1', '2', @@ -181,7 +236,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr01234_steam_level', + 'entity_id': 'select.mr012345_steam_level', 'last_changed': , 'last_reported': , 'last_updated': , @@ -206,7 +261,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': None, - 'entity_id': 'select.mr01234_steam_level', + 'entity_id': 'select.mr012345_steam_level', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -223,7 +278,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp_select', - 'unique_id': 'MR01234_steam_temp_select', + 'unique_id': 'MR012345_steam_temp_select', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_sensor.ambr b/tests/components/lamarzocco/snapshots/test_sensor.ambr index 2237a8416e1..da1efbf1eaa 100644 --- a/tests/components/lamarzocco/snapshots/test_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[GS01234_current_coffee_temperature-entry] +# name: test_sensors[GS012345_current_coffee_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -13,7 +13,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs01234_current_coffee_temperature', + 'entity_id': 'sensor.gs012345_current_coffee_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -33,27 +33,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_coffee', - 'unique_id': 'GS01234_current_temp_coffee', + 'unique_id': 'GS012345_current_temp_coffee', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_current_coffee_temperature-sensor] +# name: test_sensors[GS012345_current_coffee_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Current coffee temperature', + 'friendly_name': 'GS012345 Current coffee temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_current_coffee_temperature', + 'entity_id': 'sensor.gs012345_current_coffee_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '96.5', }) # --- -# name: test_sensors[GS01234_current_steam_temperature-entry] +# name: test_sensors[GS012345_current_steam_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -67,7 +67,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs01234_current_steam_temperature', + 'entity_id': 'sensor.gs012345_current_steam_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -87,27 +87,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_steam', - 'unique_id': 'GS01234_current_temp_steam', + 'unique_id': 'GS012345_current_temp_steam', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_current_steam_temperature-sensor] +# name: test_sensors[GS012345_current_steam_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Current steam temperature', + 'friendly_name': 'GS012345 Current steam temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_current_steam_temperature', + 'entity_id': 'sensor.gs012345_current_steam_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '123.800003051758', }) # --- -# name: test_sensors[GS01234_shot_timer-entry] +# name: test_sensors[GS012345_shot_timer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -121,7 +121,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_shot_timer', + 'entity_id': 'sensor.gs012345_shot_timer', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -138,27 +138,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'shot_timer', - 'unique_id': 'GS01234_shot_timer', + 'unique_id': 'GS012345_shot_timer', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_shot_timer-sensor] +# name: test_sensors[GS012345_shot_timer-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Shot timer', + 'friendly_name': 'GS012345 Shot timer', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_shot_timer', + 'entity_id': 'sensor.gs012345_shot_timer', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors[GS01234_total_coffees_made-entry] +# name: test_sensors[GS012345_total_coffees_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -172,7 +172,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_total_coffees_made', + 'entity_id': 'sensor.gs012345_total_coffees_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -189,26 +189,26 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_coffee', - 'unique_id': 'GS01234_drink_stats_coffee', + 'unique_id': 'GS012345_drink_stats_coffee', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS01234_total_coffees_made-sensor] +# name: test_sensors[GS012345_total_coffees_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Total coffees made', + 'friendly_name': 'GS012345 Total coffees made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs01234_total_coffees_made', + 'entity_id': 'sensor.gs012345_total_coffees_made', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1047', }) # --- -# name: test_sensors[GS01234_total_flushes_made-entry] +# name: test_sensors[GS012345_total_flushes_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -222,7 +222,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_total_flushes_made', + 'entity_id': 'sensor.gs012345_total_flushes_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -239,19 +239,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_flushing', - 'unique_id': 'GS01234_drink_stats_flushing', + 'unique_id': 'GS012345_drink_stats_flushing', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS01234_total_flushes_made-sensor] +# name: test_sensors[GS012345_total_flushes_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Total flushes made', + 'friendly_name': 'GS012345 Total flushes made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs01234_total_flushes_made', + 'entity_id': 'sensor.gs012345_total_flushes_made', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index 4ec22e3123d..79a305c998f 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', + 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS01234_auto_on_off_Os2OswX', + 'unique_id': 'GS012345_auto_on_off_Os2OswX', 'unit_of_measurement': None, }) # --- @@ -44,7 +44,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -61,17 +61,17 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS01234_auto_on_off_aXFz5bJ', + 'unique_id': 'GS012345_auto_on_off_aXFz5bJ', 'unit_of_measurement': None, }) # --- # name: test_auto_on_off_switches[state.auto_on_off_Os2OswX] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Auto on/off (Os2OswX)', + 'friendly_name': 'GS012345 Auto on/off (Os2OswX)', }), 'context': , - 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', + 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -81,62 +81,30 @@ # name: test_auto_on_off_switches[state.auto_on_off_aXFz5bJ] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Auto on/off (aXFz5bJ)', + 'friendly_name': 'GS012345 Auto on/off (aXFz5bJ)', }), 'context': , - 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_device - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'lamarzocco', - 'GS01234', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'La Marzocco', - 'model': , - 'model_id': None, - 'name': 'GS01234', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': 'GS01234', - 'suggested_area': None, - 'sw_version': '1.40', - 'via_device_id': None, - }) -# --- -# name: test_switches[-set_power] +# name: test_switches[-set_power-kwargs0] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234', + 'friendly_name': 'GS012345', }), 'context': , - 'entity_id': 'switch.gs01234', + 'entity_id': 'switch.gs012345', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_switches[-set_power].1 +# name: test_switches[-set_power-kwargs0].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -148,7 +116,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs01234', + 'entity_id': 'switch.gs012345', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -165,24 +133,70 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'main', - 'unique_id': 'GS01234_main', + 'unique_id': 'GS012345_main', 'unit_of_measurement': None, }) # --- -# name: test_switches[_steam_boiler-set_steam] +# name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Steam boiler', + 'friendly_name': 'GS012345 Smart standby enabled', }), 'context': , - 'entity_id': 'switch.gs01234_steam_boiler', + 'entity_id': 'switch.gs012345_smart_standby_enabled', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_switches[_steam_boiler-set_steam].1 +# name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.gs012345_smart_standby_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart standby enabled', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_standby_enabled', + 'unique_id': 'GS012345_smart_standby_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[_steam_boiler-set_steam-kwargs1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GS012345 Steam boiler', + }), + 'context': , + 'entity_id': 'switch.gs012345_steam_boiler', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switches[_steam_boiler-set_steam-kwargs1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -194,7 +208,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs01234_steam_boiler', + 'entity_id': 'switch.gs012345_steam_boiler', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -211,7 +225,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_boiler', - 'unique_id': 'GS01234_steam_boiler_enable', + 'unique_id': 'GS012345_steam_boiler_enable', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_update.ambr b/tests/components/lamarzocco/snapshots/test_update.ambr index f08b9249f50..46fa4cff815 100644 --- a/tests/components/lamarzocco/snapshots/test_update.ambr +++ b/tests/components/lamarzocco/snapshots/test_update.ambr @@ -4,8 +4,9 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS01234 Gateway firmware', + 'friendly_name': 'GS012345 Gateway firmware', 'in_progress': False, 'installed_version': 'v3.1-rc4', 'latest_version': 'v3.5-rc3', @@ -14,9 +15,10 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs01234_gateway_firmware', + 'entity_id': 'update.gs012345_gateway_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +37,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs01234_gateway_firmware', + 'entity_id': 'update.gs012345_gateway_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,7 +54,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'gateway_firmware', - 'unique_id': 'GS01234_gateway_firmware', + 'unique_id': 'GS012345_gateway_firmware', 'unit_of_measurement': None, }) # --- @@ -61,8 +63,9 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS01234 Machine firmware', + 'friendly_name': 'GS012345 Machine firmware', 'in_progress': False, 'installed_version': '1.40', 'latest_version': '1.55', @@ -71,9 +74,10 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs01234_machine_firmware', + 'entity_id': 'update.gs012345_machine_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +96,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs01234_machine_firmware', + 'entity_id': 'update.gs012345_machine_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,7 +113,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'machine_firmware', - 'unique_id': 'GS01234_machine_firmware', + 'unique_id': 'GS012345_machine_firmware', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index d363b96ca21..956bfe90dd4 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,8 +4,7 @@ from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from lmcloud.exceptions import RequestNotSuccessful -import pytest +from pylamarzocco.exceptions import RequestNotSuccessful from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE @@ -47,15 +46,14 @@ async def test_binary_sensors( assert entry == snapshot(name=f"{serial_number}_{binary_sensor}-entry") -@pytest.mark.usefixtures("remove_local_connection") async def test_brew_active_does_not_exists( hass: HomeAssistant, mock_lamarzocco: MagicMock, - mock_config_entry: MockConfigEntry, + mock_config_entry_no_local_connection: MockConfigEntry, ) -> None: """Test the La Marzocco currently_making_coffee doesn't exist if host not set.""" - await async_init_integration(hass, mock_config_entry) + await async_init_integration(hass, mock_config_entry_no_local_connection) state = hass.states.get(f"sensor.{mock_lamarzocco.serial_number}_brewing_active") assert state is None diff --git a/tests/components/lamarzocco/test_button.py b/tests/components/lamarzocco/test_button.py index e1a036df17a..61b7ba77c22 100644 --- a/tests/components/lamarzocco/test_button.py +++ b/tests/components/lamarzocco/test_button.py @@ -1,13 +1,15 @@ """Tests for the La Marzocco Buttons.""" -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock, patch +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er pytestmark = pytest.mark.usefixtures("init_integration") @@ -31,14 +33,41 @@ async def test_start_backflush( assert entry assert entry == snapshot - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", - }, - blocking=True, - ) + with patch( + "homeassistant.components.lamarzocco.button.asyncio.sleep", + new_callable=AsyncMock, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", + }, + blocking=True, + ) assert len(mock_lamarzocco.start_backflush.mock_calls) == 1 mock_lamarzocco.start_backflush.assert_called_once() + + +async def test_button_error( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, +) -> None: + """Test the La Marzocco button error.""" + serial_number = mock_lamarzocco.serial_number + + state = hass.states.get(f"button.{serial_number}_start_backflush") + assert state + + mock_lamarzocco.start_backflush.side_effect = RequestNotSuccessful("Boom.") + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", + }, + blocking=True, + ) + assert exc_info.value.translation_key == "button_error" diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index 92ecd0a13f4..e25aab39012 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -1,19 +1,24 @@ """Test the La Marzocco config flow.""" -from unittest.mock import MagicMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.const import MachineModel +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoDeviceInfo +import pytest +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN from homeassistant.config_entries import ( SOURCE_BLUETOOTH, - SOURCE_REAUTH, + SOURCE_DHCP, SOURCE_USER, ConfigEntryState, ) from homeassistant.const import ( + CONF_ADDRESS, CONF_HOST, CONF_MAC, CONF_MODEL, @@ -78,6 +83,7 @@ async def test_form( hass: HomeAssistant, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -132,6 +138,7 @@ async def test_form_invalid_auth( hass: HomeAssistant, mock_device_info: LaMarzoccoDeviceInfo, mock_cloud_client: MagicMock, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test invalid auth error.""" @@ -159,6 +166,7 @@ async def test_form_invalid_host( hass: HomeAssistant, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test invalid auth error.""" result = await hass.config_entries.flow.async_init( @@ -201,6 +209,7 @@ async def test_form_cannot_connect( hass: HomeAssistant, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test cannot connect error.""" @@ -247,15 +256,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -272,10 +273,68 @@ async def test_reauth_flow( assert mock_config_entry.data[CONF_PASSWORD] == "new_password" +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, + mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], +) -> None: + """Testing reconfgure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result2 = await __do_successful_user_step(hass, result, mock_cloud_client) + service_info = get_bluetooth_service_info( + mock_device_info.model, mock_device_info.serial_number + ) + + with ( + patch( + "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", + return_value=True, + ), + patch( + "homeassistant.components.lamarzocco.config_flow.async_discovered_service_info", + return_value=[service_info], + ), + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_MACHINE: mock_device_info.serial_number, + }, + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "bluetooth_selection" + + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + {CONF_MAC: service_info.address}, + ) + + assert result4["type"] is FlowResultType.ABORT + assert result4["reason"] == "reconfigure_successful" + + assert mock_config_entry.title == "My LaMarzocco" + assert mock_config_entry.data == { + **mock_config_entry.data, + CONF_MAC: service_info.address, + } + + async def test_bluetooth_discovery( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_cloud_client: MagicMock, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test bluetooth discovery.""" service_info = get_bluetooth_service_info( @@ -322,11 +381,32 @@ async def test_bluetooth_discovery( } +async def test_bluetooth_discovery_already_configured( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_setup_entry: Generator[AsyncMock], + mock_config_entry: MockConfigEntry, +) -> None: + """Test bluetooth discovery.""" + mock_config_entry.add_to_hass(hass) + + service_info = get_bluetooth_service_info( + mock_lamarzocco.model, mock_lamarzocco.serial_number + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_bluetooth_discovery_errors( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test bluetooth discovery errors.""" service_info = get_bluetooth_service_info( @@ -387,10 +467,108 @@ async def test_bluetooth_discovery_errors( } +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.LINEA_MICRA, MachineModel.LINEA_MINI, MachineModel.GS3_AV], +) +async def test_dhcp_discovery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], +) -> None: + """Test dhcp discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname=mock_lamarzocco.serial_number, + macaddress="aa:bb:cc:dd:ee:ff", + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", + return_value=True, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + **USER_INPUT, + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_HOST: "192.168.1.42", + CONF_MACHINE: mock_lamarzocco.serial_number, + CONF_MODEL: mock_device_info.model, + CONF_NAME: mock_device_info.name, + CONF_TOKEN: mock_device_info.communication_key, + } + + +async def test_dhcp_discovery_abort_on_hostname_changed( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test dhcp discovery aborts when hostname was changed manually.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname="custom_name", + macaddress="00:00:00:00:00:00", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_dhcp_already_configured_and_update( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test discovered IP address change.""" + old_ip = mock_config_entry.data[CONF_HOST] + old_address = mock_config_entry.data[CONF_ADDRESS] + + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname=mock_lamarzocco.serial_number, + macaddress="aa:bb:cc:dd:ee:ff", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] != old_ip + assert mock_config_entry.data[CONF_HOST] == "192.168.1.42" + + assert mock_config_entry.data[CONF_ADDRESS] != old_address + assert mock_config_entry.data[CONF_ADDRESS] == "aa:bb:cc:dd:ee:ff" + + async def test_options_flow( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_config_entry: MockConfigEntry, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test options flow.""" await async_init_integration(hass, mock_config_entry) diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 2c812f79438..446c8780b62 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -2,16 +2,28 @@ from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.const import FirmwareType -from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest +from syrupy import SnapshotAssertion from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_MODEL, + CONF_NAME, + CONF_TOKEN, + EVENT_HOMEASSISTANT_STOP, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from . import USER_INPUT, async_init_integration, get_bluetooth_service_info @@ -80,20 +92,22 @@ async def test_invalid_auth( async def test_v1_migration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_cloud_client: MagicMock, mock_lamarzocco: MagicMock, ) -> None: """Test v1 -> v2 Migration.""" + common_data = { + **USER_INPUT, + CONF_HOST: "host", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + } entry_v1 = MockConfigEntry( domain=DOMAIN, version=1, unique_id=mock_lamarzocco.serial_number, data={ - **USER_INPUT, - CONF_HOST: "host", + **common_data, CONF_MACHINE: mock_lamarzocco.serial_number, - CONF_MAC: "aa:bb:cc:dd:ee:ff", }, ) @@ -102,8 +116,11 @@ async def test_v1_migration( await hass.async_block_till_done() assert entry_v1.version == 2 - assert dict(entry_v1.data) == dict(mock_config_entry.data) | { - CONF_MAC: "aa:bb:cc:dd:ee:ff" + assert dict(entry_v1.data) == { + **common_data, + CONF_NAME: "GS3", + CONF_MODEL: mock_lamarzocco.model, + CONF_TOKEN: "token", } @@ -157,9 +174,7 @@ async def test_bluetooth_is_set_from_discovery( "homeassistant.components.lamarzocco.async_discovered_service_info", return_value=[service_info], ) as discovery, - patch( - "homeassistant.components.lamarzocco.coordinator.LaMarzoccoMachine" - ) as init_device, + patch("homeassistant.components.lamarzocco.LaMarzoccoMachine") as init_device, ): await async_init_integration(hass, mock_config_entry) discovery.assert_called_once() @@ -182,7 +197,7 @@ async def test_websocket_closed_on_unload( ) as local_client: client = local_client.return_value client.websocket = AsyncMock() - client.websocket.connected = True + client.websocket.closed = False await async_init_integration(hass, mock_config_entry) hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() @@ -207,3 +222,32 @@ async def test_gateway_version_issue( issue_registry = ir.async_get(hass) issue = issue_registry.async_get_issue(DOMAIN, "unsupported_gateway_firmware") assert (issue is not None) == issue_exists + + +async def test_device( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the device.""" + + await async_init_integration(hass, mock_config_entry) + + hass.config_entries.async_update_entry( + mock_config_entry, + data={**mock_config_entry.data, CONF_MAC: "aa:bb:cc:dd:ee:ff"}, + ) + + state = hass.states.get(f"switch.{mock_lamarzocco.serial_number}") + assert state + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry.device_id + + device = device_registry.async_get(entry.device_id) + assert device + assert device == snapshot diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 288c78c26dd..710a0220e06 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -1,14 +1,16 @@ """Tests for the La Marzocco number entities.""" +from typing import Any from unittest.mock import MagicMock -from lmcloud.const import ( +from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -19,6 +21,7 @@ from homeassistant.components.number import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from . import async_init_integration @@ -26,20 +29,41 @@ from . import async_init_integration from tests.common import MockConfigEntry -async def test_coffee_boiler( +@pytest.mark.parametrize( + ("entity_name", "value", "func_name", "kwargs"), + [ + ( + "coffee_target_temperature", + 94, + "set_temp", + {"boiler": BoilerType.COFFEE, "temperature": 94}, + ), + ( + "smart_standby_time", + 23, + "set_smart_standby", + {"enabled": True, "mode": "LastBrewing", "minutes": 23}, + ), + ], +) +async def test_general_numbers( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, + entity_name: str, + value: float, + func_name: str, + kwargs: dict[str, Any], ) -> None: - """Test the La Marzocco coffee temperature Number.""" + """Test the numbers available to all machines.""" await async_init_integration(hass, mock_config_entry) serial_number = mock_lamarzocco.serial_number - state = hass.states.get(f"number.{serial_number}_coffee_target_temperature") + state = hass.states.get(f"number.{serial_number}_{entity_name}") assert state assert state == snapshot @@ -57,16 +81,14 @@ async def test_coffee_boiler( NUMBER_DOMAIN, SERVICE_SET_VALUE, { - ATTR_ENTITY_ID: f"number.{serial_number}_coffee_target_temperature", - ATTR_VALUE: 94, + ATTR_ENTITY_ID: f"number.{serial_number}_{entity_name}", + ATTR_VALUE: value, }, blocking=True, ) - assert len(mock_lamarzocco.set_temp.mock_calls) == 1 - mock_lamarzocco.set_temp.assert_called_once_with( - boiler=BoilerType.COFFEE, temperature=94 - ) + mock_func = getattr(mock_lamarzocco, func_name) + mock_func.assert_called_once_with(**kwargs) @pytest.mark.parametrize("device_fixture", [MachineModel.GS3_AV, MachineModel.GS3_MP]) @@ -379,3 +401,46 @@ async def test_not_existing_key_entities( for key in range(1, KEYS_PER_MODEL[MachineModel.GS3_AV] + 1): state = hass.states.get(f"number.{serial_number}_{entity}_key_{key}") assert state is None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_error( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test number entities raise error on service call.""" + await async_init_integration(hass, mock_config_entry) + serial_number = mock_lamarzocco.serial_number + + state = hass.states.get(f"number.{serial_number}_coffee_target_temperature") + assert state + + mock_lamarzocco.set_temp.side_effect = RequestNotSuccessful("Boom") + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: f"number.{serial_number}_coffee_target_temperature", + ATTR_VALUE: 94, + }, + blocking=True, + ) + assert exc_info.value.translation_key == "number_exception" + + state = hass.states.get(f"number.{serial_number}_dose_key_1") + assert state + + mock_lamarzocco.set_dose.side_effect = RequestNotSuccessful("Boom") + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: f"number.{serial_number}_dose_key_1", + ATTR_VALUE: 99, + }, + blocking=True, + ) + assert exc_info.value.translation_key == "number_exception_key" diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index e3521b473bd..24b96f84f37 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -2,7 +2,8 @@ from unittest.mock import MagicMock -from lmcloud.const import MachineModel, PrebrewMode, SteamLevel +from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -13,6 +14,7 @@ from homeassistant.components.select import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er pytestmark = pytest.mark.usefixtures("init_integration") @@ -117,3 +119,63 @@ async def test_pre_brew_infusion_select_none( state = hass.states.get(f"select.{serial_number}_prebrew_infusion_mode") assert state is None + + +async def test_smart_standby_mode( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_lamarzocco: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test the La Marzocco Smart Standby mode select.""" + + serial_number = mock_lamarzocco.serial_number + + state = hass.states.get(f"select.{serial_number}_smart_standby_mode") + + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry == snapshot + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: f"select.{serial_number}_smart_standby_mode", + ATTR_OPTION: "power_on", + }, + blocking=True, + ) + + mock_lamarzocco.set_smart_standby.assert_called_once_with( + enabled=True, mode=SmartStandbyMode.POWER_ON, minutes=10 + ) + + +async def test_select_errors( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, +) -> None: + """Test select errors.""" + serial_number = mock_lamarzocco.serial_number + + state = hass.states.get(f"select.{serial_number}_prebrew_infusion_mode") + assert state + + mock_lamarzocco.set_prebrew_mode.side_effect = RequestNotSuccessful("Boom") + + # Test setting invalid option + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: f"select.{serial_number}_prebrew_infusion_mode", + ATTR_OPTION: "prebrew", + }, + blocking=True, + ) + assert exc_info.value.translation_key == "select_option_error" diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 1ce56724fa3..6f14d52d1fc 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from lmcloud.const import MachineModel +from pylamarzocco.const import MachineModel import pytest from syrupy import SnapshotAssertion @@ -47,15 +47,14 @@ async def test_sensors( assert entry == snapshot(name=f"{serial_number}_{sensor}-entry") -@pytest.mark.usefixtures("remove_local_connection") async def test_shot_timer_not_exists( hass: HomeAssistant, mock_lamarzocco: MagicMock, - mock_config_entry: MockConfigEntry, + mock_config_entry_no_local_connection: MockConfigEntry, ) -> None: """Test the La Marzocco shot timer doesn't exist if host not set.""" - await async_init_integration(hass, mock_config_entry) + await async_init_integration(hass, mock_config_entry_no_local_connection) state = hass.states.get(f"sensor.{mock_lamarzocco.serial_number}_shot_timer") assert state is None diff --git a/tests/components/lamarzocco/test_switch.py b/tests/components/lamarzocco/test_switch.py index 4f60b264a1d..9082e6f4c09 100644 --- a/tests/components/lamarzocco/test_switch.py +++ b/tests/components/lamarzocco/test_switch.py @@ -1,7 +1,9 @@ """Tests for La Marzocco switches.""" +from typing import Any from unittest.mock import MagicMock +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -12,7 +14,8 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from . import WAKE_UP_SLEEP_ENTRY_IDS, async_init_integration @@ -23,15 +26,15 @@ from tests.common import MockConfigEntry ( "entity_name", "method_name", + "kwargs", ), [ + ("", "set_power", {}), + ("_steam_boiler", "set_steam", {}), ( - "", - "set_power", - ), - ( - "_steam_boiler", - "set_steam", + "_smart_standby_enabled", + "set_smart_standby", + {"mode": "LastBrewing", "minutes": 10}, ), ], ) @@ -43,6 +46,7 @@ async def test_switches( snapshot: SnapshotAssertion, entity_name: str, method_name: str, + kwargs: dict[str, Any], ) -> None: """Test the La Marzocco switches.""" await async_init_integration(hass, mock_config_entry) @@ -69,7 +73,7 @@ async def test_switches( ) assert len(control_fn.mock_calls) == 1 - control_fn.assert_called_once_with(False) + control_fn.assert_called_once_with(enabled=False, **kwargs) await hass.services.async_call( SWITCH_DOMAIN, @@ -81,31 +85,7 @@ async def test_switches( ) assert len(control_fn.mock_calls) == 2 - control_fn.assert_called_with(True) - - -async def test_device( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the device for one switch.""" - - await async_init_integration(hass, mock_config_entry) - - state = hass.states.get(f"switch.{mock_lamarzocco.serial_number}") - assert state - - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry.device_id - - device = device_registry.async_get(entry.device_id) - assert device - assert device == snapshot + control_fn.assert_called_with(enabled=True, **kwargs) async def test_auto_on_off_switches( @@ -158,3 +138,56 @@ async def test_auto_on_off_switches( ) wake_up_sleep_entry.enabled = True mock_lamarzocco.set_wake_up_sleep.assert_called_with(wake_up_sleep_entry) + + +async def test_switch_exceptions( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the La Marzocco switches.""" + await async_init_integration(hass, mock_config_entry) + + serial_number = mock_lamarzocco.serial_number + + state = hass.states.get(f"switch.{serial_number}") + assert state + + mock_lamarzocco.set_power.side_effect = RequestNotSuccessful("Boom") + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: f"switch.{serial_number}", + }, + blocking=True, + ) + assert exc_info.value.translation_key == "switch_off_error" + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: f"switch.{serial_number}", + }, + blocking=True, + ) + assert exc_info.value.translation_key == "switch_on_error" + + state = hass.states.get(f"switch.{serial_number}_auto_on_off_os2oswx") + assert state + + mock_lamarzocco.set_wake_up_sleep.side_effect = RequestNotSuccessful("Boom") + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: f"switch.{serial_number}_auto_on_off_os2oswx", + }, + blocking=True, + ) + assert exc_info.value.translation_key == "auto_on_off_error" diff --git a/tests/components/lamarzocco/test_update.py b/tests/components/lamarzocco/test_update.py index 02330daf794..aef37d7c921 100644 --- a/tests/components/lamarzocco/test_update.py +++ b/tests/components/lamarzocco/test_update.py @@ -2,7 +2,8 @@ from unittest.mock import MagicMock -from lmcloud.const import FirmwareType +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -54,17 +55,26 @@ async def test_update_entites( mock_lamarzocco.update_firmware.assert_called_once_with(component) +@pytest.mark.parametrize( + ("attr", "value"), + [ + ("side_effect", RequestNotSuccessful("Boom")), + ("return_value", False), + ], +) async def test_update_error( hass: HomeAssistant, mock_lamarzocco: MagicMock, + attr: str, + value: bool | Exception, ) -> None: """Test error during update.""" state = hass.states.get(f"update.{mock_lamarzocco.serial_number}_machine_firmware") assert state - mock_lamarzocco.update_firmware.return_value = False + setattr(mock_lamarzocco.update_firmware, attr, value) - with pytest.raises(HomeAssistantError, match="Update failed"): + with pytest.raises(HomeAssistantError) as exc_info: await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, @@ -73,3 +83,4 @@ async def test_update_error( }, blocking=True, ) + assert exc_info.value.translation_key == "update_failed" diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index e8ba727f3db..da86d1bc4de 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -6,7 +6,6 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from demetriek import CloudDevice, Device -from pydantic import parse_raw_as # pylint: disable=no-name-in-module import pytest from homeassistant.components.application_credentials import ( @@ -18,7 +17,7 @@ from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_fixture, load_json_array_fixture @pytest.fixture(autouse=True) @@ -50,8 +49,8 @@ def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.lametric.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup + ): + yield @pytest.fixture @@ -61,9 +60,10 @@ def mock_lametric_cloud() -> Generator[MagicMock]: "homeassistant.components.lametric.config_flow.LaMetricCloud", autospec=True ) as lametric_mock: lametric = lametric_mock.return_value - lametric.devices.return_value = parse_raw_as( - list[CloudDevice], load_fixture("cloud_devices.json", DOMAIN) - ) + lametric.devices.return_value = [ + CloudDevice.from_dict(cloud_device) + for cloud_device in load_json_array_fixture("cloud_devices.json", DOMAIN) + ] yield lametric @@ -89,7 +89,7 @@ def mock_lametric(device_fixture: str) -> Generator[MagicMock]: lametric = lametric_mock.return_value lametric.api_key = "mock-api-key" lametric.host = "127.0.0.1" - lametric.device.return_value = Device.parse_raw( + lametric.device.return_value = Device.from_json( load_fixture(f"{device_fixture}.json", DOMAIN) ) yield lametric diff --git a/tests/components/lametric/snapshots/test_diagnostics.ambr b/tests/components/lametric/snapshots/test_diagnostics.ambr index cadd0e37566..7517cfe035e 100644 --- a/tests/components/lametric/snapshots/test_diagnostics.ambr +++ b/tests/components/lametric/snapshots/test_diagnostics.ambr @@ -26,6 +26,10 @@ 'brightness_mode': 'auto', 'display_type': 'mixed', 'height': 8, + 'on': None, + 'screensaver': dict({ + 'enabled': False, + }), 'width': 37, }), 'mode': 'auto', diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 2a21423ad03..ccbbe005639 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -20,12 +20,7 @@ from homeassistant.components.ssdp import ( ATTR_UPNP_SERIAL, SsdpServiceInfo, ) -from homeassistant.config_entries import ( - SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_DEVICE, CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -46,12 +41,11 @@ SSDP_DISCOVERY_INFO = SsdpServiceInfo( ) -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_cloud_import_flow_multiple_devices( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -60,25 +54,24 @@ async def test_full_cloud_import_flow_multiple_devices( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -101,37 +94,37 @@ async def test_full_cloud_import_flow_multiple_devices( }, ) - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.FORM - assert result3.get("step_id") == "cloud_select_device" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_select_device" - result4 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result4.get("type") is FlowResultType.CREATE_ENTRY - assert result4.get("title") == "Frenck's LaMetric" - assert result4.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result4 - assert result4["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_cloud_import_flow_single_device( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -140,25 +133,24 @@ async def test_full_cloud_import_flow_single_device( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -186,27 +178,28 @@ async def test_full_cloud_import_flow_single_device( mock_lametric_cloud.devices.return_value = [ mock_lametric_cloud.devices.return_value[0] ] - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_full_manual( hass: HomeAssistant, - mock_setup_entry: MagicMock, mock_lametric: MagicMock, ) -> None: """Check a full flow manual entry.""" @@ -214,31 +207,34 @@ async def test_full_manual( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 @@ -246,15 +242,12 @@ async def test_full_manual( notification: Notification = mock_lametric.notify.mock_calls[0][2]["notification"] assert notification.model.sound == Sound(sound=NotificationSound.WIN) - assert len(mock_setup_entry.mock_calls) == 1 - -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_ssdp_with_cloud_import( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -263,25 +256,24 @@ async def test_full_ssdp_with_cloud_import( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -304,27 +296,27 @@ async def test_full_ssdp_with_cloud_import( }, ) - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_full_ssdp_manual_entry( hass: HomeAssistant, - mock_setup_entry: MagicMock, mock_lametric: MagicMock, ) -> None: """Check a full flow triggered by SSDP, with manual API key entry.""" @@ -332,35 +324,35 @@ async def test_full_ssdp_manual_entry( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 @pytest.mark.parametrize( @@ -390,8 +382,8 @@ async def test_ssdp_abort_invalid_discovery( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=data ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == reason + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason @pytest.mark.usefixtures("current_request_with_host") @@ -409,16 +401,15 @@ async def test_cloud_import_updates_existing_entry( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -433,14 +424,14 @@ async def test_cloud_import_updates_existing_entry( "expires_in": 60, }, ) - await hass.config_entries.flow.async_configure(flow_id) + await hass.config_entries.flow.async_configure(result["flow_id"]) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -463,18 +454,18 @@ async def test_manual_updates_existing_entry( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.ABORT - assert result3.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -495,8 +486,8 @@ async def test_discovery_updates_existing_entry( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-from-fixture", @@ -515,16 +506,15 @@ async def test_cloud_abort_no_devices( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -542,14 +532,15 @@ async def test_cloud_abort_no_devices( # Stage there are no devices mock_lametric_cloud.devices.return_value = [] - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "no_devices" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices" assert len(mock_lametric_cloud.devices.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( ("side_effect", "reason"), [ @@ -562,7 +553,6 @@ async def test_cloud_abort_no_devices( async def test_manual_errors( hass: HomeAssistant, mock_lametric: MagicMock, - mock_setup_entry: MagicMock, side_effect: Exception, reason: str, ) -> None: @@ -570,46 +560,47 @@ async def test_manual_errors( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) mock_lametric.device.side_effect = side_effect - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" - assert result2.get("errors") == {"base": reason} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" + assert result["errors"] == {"base": reason} assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") @pytest.mark.parametrize( ("side_effect", "reason"), [ @@ -623,7 +614,6 @@ async def test_cloud_errors( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, side_effect: Exception, @@ -633,16 +623,15 @@ async def test_cloud_errors( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -657,41 +646,41 @@ async def test_cloud_errors( "expires_in": 60, }, ) - await hass.config_entries.flow.async_configure(flow_id) + await hass.config_entries.flow.async_configure(result["flow_id"]) mock_lametric.device.side_effect = side_effect - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "cloud_select_device" - assert result2.get("errors") == {"base": reason} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_select_device" + assert result["errors"] == {"base": reason} assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 async def test_dhcp_discovery_updates_entry( @@ -711,8 +700,8 @@ async def test_dhcp_discovery_updates_entry( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_API_KEY: "mock-from-fixture", CONF_HOST: "127.0.0.42", @@ -737,8 +726,8 @@ async def test_dhcp_unknown_device( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unknown" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" @pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") @@ -753,26 +742,16 @@ async def test_reauth_cloud_import( """Test reauth flow importing api keys from the cloud.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) - - flow_id = result["flow_id"] + result = await mock_config_entry.start_reauth_flow(hass) await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -789,10 +768,10 @@ async def test_reauth_cloud_import( }, ) - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -817,26 +796,16 @@ async def test_reauth_cloud_abort_device_not_found( mock_config_entry.add_to_hass(hass) hass.config_entries.async_update_entry(mock_config_entry, unique_id="UKNOWN_DEVICE") - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) - - flow_id = result["flow_id"] + result = await mock_config_entry.start_reauth_flow(hass) await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -853,10 +822,10 @@ async def test_reauth_cloud_abort_device_not_found( }, ) - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_device_not_found" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_device_not_found" assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 0 @@ -872,28 +841,18 @@ async def test_reauth_manual( """Test reauth flow with manual entry.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) - - flow_id = result["flow_id"] + result = await mock_config_entry.start_reauth_flow(hass) await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -914,28 +873,18 @@ async def test_reauth_manual_sky( """Test reauth flow with manual entry for LaMetric Sky.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) - - flow_id = result["flow_id"] + result = await mock_config_entry.start_reauth_flow(hass) await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", diff --git a/tests/components/lametric/test_init.py b/tests/components/lametric/test_init.py index 7352721e992..2fd8219ea51 100644 --- a/tests/components/lametric/test_init.py +++ b/tests/components/lametric/test_init.py @@ -74,7 +74,7 @@ async def test_config_entry_authentication_failed( assert len(flows) == 1 flow = flows[0] - assert flow.get("step_id") == "choice_enter_manual_or_fetch_cloud" + assert flow["step_id"] == "choice_enter_manual_or_fetch_cloud" assert flow.get("handler") == DOMAIN assert "context" in flow diff --git a/tests/components/lametric/test_notify.py b/tests/components/lametric/test_notify.py index a46d97f8f81..d30a8c86543 100644 --- a/tests/components/lametric/test_notify.py +++ b/tests/components/lametric/test_notify.py @@ -100,7 +100,7 @@ async def test_notification_options( assert len(notification.model.frames) == 1 frame = notification.model.frames[0] assert type(frame) is Simple - assert frame.icon == 1234 + assert frame.icon == "1234" assert frame.text == "The secret of getting ahead is getting started" diff --git a/tests/components/lametric/test_services.py b/tests/components/lametric/test_services.py index d3fbd0a18e0..b9b5c4c8b3a 100644 --- a/tests/components/lametric/test_services.py +++ b/tests/components/lametric/test_services.py @@ -190,7 +190,7 @@ async def test_service_message( assert len(notification.model.frames) == 1 frame = notification.model.frames[0] assert type(frame) is Simple - assert frame.icon == 6916 + assert frame.icon == "6916" assert frame.text == "Meow!" mock_lametric.notify.side_effect = LaMetricError diff --git a/tests/components/laundrify/__init__.py b/tests/components/laundrify/__init__.py index c09c6290adf..cb4ab1ad010 100644 --- a/tests/components/laundrify/__init__.py +++ b/tests/components/laundrify/__init__.py @@ -1,22 +1 @@ """Tests for the laundrify integration.""" - -from homeassistant.components.laundrify import DOMAIN -from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.core import HomeAssistant - -from .const import VALID_ACCESS_TOKEN, VALID_ACCOUNT_ID - -from tests.common import MockConfigEntry - - -def create_entry( - hass: HomeAssistant, access_token: str = VALID_ACCESS_TOKEN -) -> MockConfigEntry: - """Create laundrify entry in Home Assistant.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=VALID_ACCOUNT_ID, - data={CONF_ACCESS_TOKEN: access_token}, - ) - entry.add_to_hass(hass) - return entry diff --git a/tests/components/laundrify/conftest.py b/tests/components/laundrify/conftest.py index 2f6496c06a5..4a78a2e9025 100644 --- a/tests/components/laundrify/conftest.py +++ b/tests/components/laundrify/conftest.py @@ -1,52 +1,69 @@ """Configure py.test.""" import json -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from laundrify_aio import LaundrifyAPI, LaundrifyDevice import pytest +from homeassistant.components.laundrify import DOMAIN +from homeassistant.components.laundrify.const import MANUFACTURER +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import HomeAssistant + from .const import VALID_ACCESS_TOKEN, VALID_ACCOUNT_ID -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture +from tests.typing import ClientSessionGenerator -@pytest.fixture(name="laundrify_setup_entry") -def laundrify_setup_entry_fixture(): - """Mock laundrify setup entry function.""" - with patch( - "homeassistant.components.laundrify.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry +@pytest.fixture(name="mock_device") +def laundrify_sensor_fixture() -> LaundrifyDevice: + """Return a default Laundrify power sensor mock.""" + # Load test data from machines.json + machine_data = json.loads(load_fixture("laundrify/machines.json"))[0] + + mock_device = AsyncMock(spec=LaundrifyDevice) + mock_device.id = machine_data["id"] + mock_device.manufacturer = MANUFACTURER + mock_device.model = machine_data["model"] + mock_device.name = machine_data["name"] + mock_device.firmwareVersion = machine_data["firmwareVersion"] + return mock_device -@pytest.fixture(name="laundrify_exchange_code") -def laundrify_exchange_code_fixture(): - """Mock laundrify exchange_auth_code function.""" - with patch( - "laundrify_aio.LaundrifyAPI.exchange_auth_code", - return_value=VALID_ACCESS_TOKEN, - ) as exchange_code_mock: - yield exchange_code_mock - - -@pytest.fixture(name="laundrify_validate_token") -def laundrify_validate_token_fixture(): - """Mock laundrify validate_token function.""" - with patch( - "laundrify_aio.LaundrifyAPI.validate_token", - return_value=True, - ) as validate_token_mock: - yield validate_token_mock +@pytest.fixture(name="laundrify_config_entry") +async def laundrify_setup_config_entry( + hass: HomeAssistant, access_token: str = VALID_ACCESS_TOKEN +) -> MockConfigEntry: + """Create laundrify entry in Home Assistant.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=VALID_ACCOUNT_ID, + data={CONF_ACCESS_TOKEN: access_token}, + minor_version=2, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry @pytest.fixture(name="laundrify_api_mock", autouse=True) -def laundrify_api_fixture(laundrify_exchange_code, laundrify_validate_token): +def laundrify_api_fixture(hass_client: ClientSessionGenerator): """Mock valid laundrify API responses.""" with ( patch( "laundrify_aio.LaundrifyAPI.get_account_id", - return_value=VALID_ACCOUNT_ID, + return_value=1234, + ), + patch( + "laundrify_aio.LaundrifyAPI.validate_token", + return_value=True, + ), + patch( + "laundrify_aio.LaundrifyAPI.exchange_auth_code", + return_value=VALID_ACCESS_TOKEN, ), patch( "laundrify_aio.LaundrifyAPI.get_machines", @@ -54,6 +71,6 @@ def laundrify_api_fixture(laundrify_exchange_code, laundrify_validate_token): LaundrifyDevice(machine, LaundrifyAPI) for machine in json.loads(load_fixture("laundrify/machines.json")) ], - ) as get_machines_mock, + ), ): - yield get_machines_mock + yield LaundrifyAPI(VALID_ACCESS_TOKEN, hass_client) diff --git a/tests/components/laundrify/fixtures/machines.json b/tests/components/laundrify/fixtures/machines.json index 3397212659f..4319e76880e 100644 --- a/tests/components/laundrify/fixtures/machines.json +++ b/tests/components/laundrify/fixtures/machines.json @@ -5,6 +5,7 @@ "status": "OFF", "internalIP": "192.168.0.123", "model": "SU02", - "firmwareVersion": "2.1.0" + "firmwareVersion": "2.1.0", + "totalEnergy": 1337.0 } ] diff --git a/tests/components/laundrify/test_config_flow.py b/tests/components/laundrify/test_config_flow.py index 69a4b957cf5..54e849f79d0 100644 --- a/tests/components/laundrify/test_config_flow.py +++ b/tests/components/laundrify/test_config_flow.py @@ -3,16 +3,17 @@ from laundrify_aio import exceptions from homeassistant.components.laundrify.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CODE, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import create_entry from .const import VALID_ACCESS_TOKEN, VALID_AUTH_CODE, VALID_USER_INPUT +from tests.common import MockConfigEntry -async def test_form(hass: HomeAssistant, laundrify_setup_entry) -> None: + +async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -31,14 +32,12 @@ async def test_form(hass: HomeAssistant, laundrify_setup_entry) -> None: assert result["data"] == { CONF_ACCESS_TOKEN: VALID_ACCESS_TOKEN, } - assert len(laundrify_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "1234" -async def test_form_invalid_format( - hass: HomeAssistant, laundrify_exchange_code -) -> None: +async def test_form_invalid_format(hass: HomeAssistant, laundrify_api_mock) -> None: """Test we handle invalid format.""" - laundrify_exchange_code.side_effect = exceptions.InvalidFormat + laundrify_api_mock.exchange_auth_code.side_effect = exceptions.InvalidFormat result = await hass.config_entries.flow.async_init( DOMAIN, @@ -50,9 +49,9 @@ async def test_form_invalid_format( assert result["errors"] == {CONF_CODE: "invalid_format"} -async def test_form_invalid_auth(hass: HomeAssistant, laundrify_exchange_code) -> None: +async def test_form_invalid_auth(hass: HomeAssistant, laundrify_api_mock) -> None: """Test we handle invalid auth.""" - laundrify_exchange_code.side_effect = exceptions.UnknownAuthCode + laundrify_api_mock.exchange_auth_code.side_effect = exceptions.UnknownAuthCode result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -63,11 +62,11 @@ async def test_form_invalid_auth(hass: HomeAssistant, laundrify_exchange_code) - assert result["errors"] == {CONF_CODE: "invalid_auth"} -async def test_form_cannot_connect( - hass: HomeAssistant, laundrify_exchange_code -) -> None: +async def test_form_cannot_connect(hass: HomeAssistant, laundrify_api_mock) -> None: """Test we handle cannot connect error.""" - laundrify_exchange_code.side_effect = exceptions.ApiConnectionException + laundrify_api_mock.exchange_auth_code.side_effect = ( + exceptions.ApiConnectionException + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -78,11 +77,9 @@ async def test_form_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} -async def test_form_unkown_exception( - hass: HomeAssistant, laundrify_exchange_code -) -> None: +async def test_form_unkown_exception(hass: HomeAssistant, laundrify_api_mock) -> None: """Test we handle all other errors.""" - laundrify_exchange_code.side_effect = Exception + laundrify_api_mock.exchange_auth_code.side_effect = Exception result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -93,11 +90,11 @@ async def test_form_unkown_exception( assert result["errors"] == {"base": "unknown"} -async def test_step_reauth(hass: HomeAssistant) -> None: +async def test_step_reauth( + hass: HomeAssistant, laundrify_config_entry: MockConfigEntry +) -> None: """Test the reauth form is shown.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH} - ) + result = await laundrify_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -111,9 +108,10 @@ async def test_step_reauth(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM -async def test_integration_already_exists(hass: HomeAssistant) -> None: +async def test_integration_already_exists( + hass: HomeAssistant, laundrify_config_entry: MockConfigEntry +) -> None: """Test we only allow a single config flow.""" - create_entry(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER} ) diff --git a/tests/components/laundrify/test_coordinator.py b/tests/components/laundrify/test_coordinator.py index 0a395c736de..64b486d1285 100644 --- a/tests/components/laundrify/test_coordinator.py +++ b/tests/components/laundrify/test_coordinator.py @@ -1,52 +1,70 @@ """Test the laundrify coordinator.""" -from laundrify_aio import exceptions +from datetime import timedelta -from homeassistant.components.laundrify.const import DOMAIN -from homeassistant.core import HomeAssistant +from freezegun.api import FrozenDateTimeFactory +from laundrify_aio import LaundrifyDevice, exceptions -from . import create_entry +from homeassistant.components.laundrify.const import DEFAULT_POLL_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant, State +from homeassistant.util import slugify + +from tests.common import async_fire_time_changed -async def test_coordinator_update_success(hass: HomeAssistant) -> None: +def get_coord_entity(hass: HomeAssistant, mock_device: LaundrifyDevice) -> State: + """Get the coordinated energy sensor entity.""" + device_slug = slugify(mock_device.name, separator="_") + return hass.states.get(f"sensor.{device_slug}_energy") + + +async def test_coordinator_update_success( + hass: HomeAssistant, + laundrify_config_entry, + mock_device: LaundrifyDevice, + freezer: FrozenDateTimeFactory, +) -> None: """Test the coordinator update is performed successfully.""" - config_entry = create_entry(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - coordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] - await coordinator.async_refresh() + freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert coordinator.last_update_success + coord_entity = get_coord_entity(hass, mock_device) + assert coord_entity.state != STATE_UNAVAILABLE async def test_coordinator_update_unauthorized( - hass: HomeAssistant, laundrify_api_mock + hass: HomeAssistant, + laundrify_config_entry, + laundrify_api_mock, + mock_device: LaundrifyDevice, + freezer: FrozenDateTimeFactory, ) -> None: """Test the coordinator update fails if an UnauthorizedException is thrown.""" - config_entry = create_entry(hass) - await hass.config_entries.async_setup(config_entry.entry_id) + laundrify_api_mock.get_machines.side_effect = exceptions.UnauthorizedException + + freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() - coordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] - laundrify_api_mock.side_effect = exceptions.UnauthorizedException - await coordinator.async_refresh() - await hass.async_block_till_done() - - assert not coordinator.last_update_success + coord_entity = get_coord_entity(hass, mock_device) + assert coord_entity.state == STATE_UNAVAILABLE async def test_coordinator_update_connection_failed( - hass: HomeAssistant, laundrify_api_mock + hass: HomeAssistant, + laundrify_config_entry, + laundrify_api_mock, + mock_device: LaundrifyDevice, + freezer: FrozenDateTimeFactory, ) -> None: """Test the coordinator update fails if an ApiConnectionException is thrown.""" - config_entry = create_entry(hass) - await hass.config_entries.async_setup(config_entry.entry_id) + laundrify_api_mock.get_machines.side_effect = exceptions.ApiConnectionException + + freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() - coordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] - laundrify_api_mock.side_effect = exceptions.ApiConnectionException - await coordinator.async_refresh() - await hass.async_block_till_done() - - assert not coordinator.last_update_success + coord_entity = get_coord_entity(hass, mock_device) + assert coord_entity.state == STATE_UNAVAILABLE diff --git a/tests/components/laundrify/test_init.py b/tests/components/laundrify/test_init.py index e3ec54a3225..117da661e29 100644 --- a/tests/components/laundrify/test_init.py +++ b/tests/components/laundrify/test_init.py @@ -4,56 +4,71 @@ from laundrify_aio import exceptions from homeassistant.components.laundrify.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant -from . import create_entry +from .const import VALID_ACCESS_TOKEN + +from tests.common import MockConfigEntry async def test_setup_entry_api_unauthorized( - hass: HomeAssistant, laundrify_validate_token + hass: HomeAssistant, + laundrify_api_mock, + laundrify_config_entry: MockConfigEntry, ) -> None: """Test that ConfigEntryAuthFailed is thrown when authentication fails.""" - laundrify_validate_token.side_effect = exceptions.UnauthorizedException - config_entry = create_entry(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + laundrify_api_mock.validate_token.side_effect = exceptions.UnauthorizedException + await hass.config_entries.async_reload(laundrify_config_entry.entry_id) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert config_entry.state is ConfigEntryState.SETUP_ERROR + assert laundrify_config_entry.state is ConfigEntryState.SETUP_ERROR assert not hass.data.get(DOMAIN) async def test_setup_entry_api_cannot_connect( - hass: HomeAssistant, laundrify_validate_token + hass: HomeAssistant, + laundrify_api_mock, + laundrify_config_entry: MockConfigEntry, ) -> None: """Test that ApiConnectionException is thrown when connection fails.""" - laundrify_validate_token.side_effect = exceptions.ApiConnectionException - config_entry = create_entry(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + laundrify_api_mock.validate_token.side_effect = exceptions.ApiConnectionException + await hass.config_entries.async_reload(laundrify_config_entry.entry_id) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert config_entry.state is ConfigEntryState.SETUP_RETRY + assert laundrify_config_entry.state is ConfigEntryState.SETUP_RETRY assert not hass.data.get(DOMAIN) -async def test_setup_entry_successful(hass: HomeAssistant) -> None: +async def test_setup_entry_successful( + hass: HomeAssistant, laundrify_config_entry: MockConfigEntry +) -> None: """Test entry can be setup successfully.""" - config_entry = create_entry(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert config_entry.state is ConfigEntryState.LOADED + assert laundrify_config_entry.state is ConfigEntryState.LOADED -async def test_setup_entry_unload(hass: HomeAssistant) -> None: +async def test_setup_entry_unload( + hass: HomeAssistant, laundrify_config_entry: MockConfigEntry +) -> None: """Test unloading the laundrify entry.""" - config_entry = create_entry(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.config_entries.async_unload(config_entry.entry_id) + await hass.config_entries.async_unload(laundrify_config_entry.entry_id) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert config_entry.state is ConfigEntryState.NOT_LOADED + assert laundrify_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: + """Test migrating a 1.1 config entry to 1.2.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_ACCESS_TOKEN: VALID_ACCESS_TOKEN}, + version=1, + minor_version=1, + unique_id=123456, + ) + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id == "123456" diff --git a/tests/components/laundrify/test_sensor.py b/tests/components/laundrify/test_sensor.py new file mode 100644 index 00000000000..49b60200c1d --- /dev/null +++ b/tests/components/laundrify/test_sensor.py @@ -0,0 +1,94 @@ +"""Test the laundrify sensor platform.""" + +from datetime import timedelta +import logging +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from laundrify_aio import LaundrifyDevice +from laundrify_aio.exceptions import LaundrifyDeviceException +import pytest + +from homeassistant.components.laundrify.const import ( + DEFAULT_POLL_INTERVAL, + DOMAIN, + MODELS, +) +from homeassistant.components.sensor import SensorDeviceClass +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_UNIT_OF_MEASUREMENT, + STATE_UNKNOWN, + UnitOfPower, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.util import slugify + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_laundrify_sensor_init( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_device: LaundrifyDevice, + laundrify_config_entry: MockConfigEntry, +) -> None: + """Test Laundrify sensor default state.""" + device_slug = slugify(mock_device.name, separator="_") + + state = hass.states.get(f"sensor.{device_slug}_power") + assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.POWER + assert state.state == STATE_UNKNOWN + + device = device_registry.async_get_device({(DOMAIN, mock_device.id)}) + assert device is not None + assert device.name == mock_device.name + assert device.identifiers == {(DOMAIN, mock_device.id)} + assert device.manufacturer == mock_device.manufacturer + assert device.model == MODELS[mock_device.model] + assert device.sw_version == mock_device.firmwareVersion + + +async def test_laundrify_sensor_update( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_device: LaundrifyDevice, + laundrify_config_entry: MockConfigEntry, +) -> None: + """Test Laundrify sensor update.""" + device_slug = slugify(mock_device.name, separator="_") + + state = hass.states.get(f"sensor.{device_slug}_power") + assert state.state == STATE_UNKNOWN + + with patch("laundrify_aio.LaundrifyDevice.get_power", return_value=95): + freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{device_slug}_power") + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfPower.WATT + assert state.state == "95" + + +async def test_laundrify_sensor_update_failure( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, + mock_device: LaundrifyDevice, + laundrify_config_entry: MockConfigEntry, +) -> None: + """Test that update failures are logged.""" + caplog.set_level(logging.DEBUG) + + # test get_power() to raise a LaundrifyDeviceException + with patch( + "laundrify_aio.LaundrifyDevice.get_power", + side_effect=LaundrifyDeviceException("Raising error to test update failure."), + ): + freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert f"Couldn't load power for {mock_device.id}_power" in caplog.text diff --git a/tests/components/lawn_mower/test_init.py b/tests/components/lawn_mower/test_init.py index 16f32da7e04..0735d4541ff 100644 --- a/tests/components/lawn_mower/test_init.py +++ b/tests/components/lawn_mower/test_init.py @@ -176,4 +176,4 @@ async def test_lawn_mower_state(hass: HomeAssistant) -> None: lawn_mower.hass = hass lawn_mower.start_mowing() - assert lawn_mower.state == str(LawnMowerActivity.MOWING) + assert lawn_mower.state == LawnMowerActivity.MOWING diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index b1f28b28465..3c5979c3c36 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -1,16 +1,16 @@ """Test configuration and mocks for LCN component.""" -from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, Mock, patch import pypck -from pypck.connection import PchkConnectionManager import pypck.module from pypck.module import GroupConnection, ModuleConnection import pytest +from homeassistant.components.lcn import PchkConnectionManager +from homeassistant.components.lcn.config_flow import LcnFlowHandler from homeassistant.components.lcn.const import DOMAIN from homeassistant.components.lcn.helpers import AddressType, generate_unique_id from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST @@ -20,6 +20,8 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture +LATEST_CONFIG_ENTRY_VERSION = (LcnFlowHandler.VERSION, LcnFlowHandler.MINOR_VERSION) + class MockModuleConnection(ModuleConnection): """Fake a LCN module connection.""" @@ -54,17 +56,27 @@ class MockPchkConnectionManager(PchkConnectionManager): async def async_close(self) -> None: """Mock closing a connection to PCHK.""" - @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) - @patch.object(pypck.connection, "GroupConnection", MockGroupConnection) def get_address_conn(self, addr, request_serials=False): """Get LCN address connection.""" return super().get_address_conn(addr, request_serials) + @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) + def get_module_conn(self, addr, request_serials=False): + """Get LCN module connection.""" + return super().get_module_conn(addr, request_serials) + + @patch.object(pypck.connection, "GroupConnection", MockGroupConnection) + def get_group_conn(self, addr): + """Get LCN group connection.""" + return super().get_group_conn(addr) + scan_modules = AsyncMock() send_command = AsyncMock() -def create_config_entry(name: str) -> MockConfigEntry: +def create_config_entry( + name: str, version: tuple[int, int] = LATEST_CONFIG_ENTRY_VERSION +) -> MockConfigEntry: """Set up config entries with configuration data.""" fixture_filename = f"lcn/config_entry_{name}.json" entry_data = json.loads(load_fixture(fixture_filename)) @@ -76,13 +88,14 @@ def create_config_entry(name: str) -> MockConfigEntry: options = {} title = entry_data[CONF_HOST] - unique_id = fixture_filename return MockConfigEntry( + entry_id=fixture_filename, domain=DOMAIN, title=title, - unique_id=unique_id, data=entry_data, options=options, + version=version[0], + minor_version=version[1], ) @@ -98,10 +111,9 @@ def create_config_entry_myhome() -> MockConfigEntry: return create_config_entry("myhome") -@pytest.fixture(name="lcn_connection") async def init_integration( hass: HomeAssistant, entry: MockConfigEntry -) -> AsyncGenerator[MockPchkConnectionManager]: +) -> MockPchkConnectionManager: """Set up the LCN integration in Home Assistant.""" hass.http = Mock() # needs to be mocked as hass.http.register_static_path is called when registering the frontend lcn_connection = None @@ -113,12 +125,13 @@ async def init_integration( entry.add_to_hass(hass) with patch( - "pypck.connection.PchkConnectionManager", + "homeassistant.components.lcn.PchkConnectionManager", side_effect=lcn_connection_factory, ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - yield lcn_connection + + return lcn_connection async def setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/lcn/fixtures/config.json b/tests/components/lcn/fixtures/config.json index 13b3dd5feed..ed3e3500900 100644 --- a/tests/components/lcn/fixtures/config.json +++ b/tests/components/lcn/fixtures/config.json @@ -91,6 +91,35 @@ "motor": "motor1" } ], + "climates": [ + { + "name": "Climate1", + "address": "s0.m7", + "source": "var1", + "setpoint": "r1varsetpoint", + "lockable": true, + "min_temp": 0, + "max_temp": 40, + "unit_of_measurement": "°C" + } + ], + "scenes": [ + { + "name": "Romantic", + "address": "s0.m7", + "register": 0, + "scene": 0, + "outputs": ["output1", "output2", "relay1"] + }, + { + "name": "Romantic Transition", + "address": "s0.m7", + "register": 0, + "scene": 1, + "outputs": ["output1", "output2", "relay1"], + "transition": 10 + } + ], "binary_sensors": [ { "name": "Sensor_LockRegulator1", diff --git a/tests/components/lcn/fixtures/config_entry_myhome.json b/tests/components/lcn/fixtures/config_entry_myhome.json index a0f8e7d3e10..5abc9749b46 100644 --- a/tests/components/lcn/fixtures/config_entry_myhome.json +++ b/tests/components/lcn/fixtures/config_entry_myhome.json @@ -6,6 +6,7 @@ "password": "lcn", "sk_num_tries": 0, "dim_mode": "STEPS200", + "acknowledge": false, "devices": [], "entities": [ { diff --git a/tests/components/lcn/fixtures/config_entry_pchk.json b/tests/components/lcn/fixtures/config_entry_pchk.json index 08ccd194578..068b8757707 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk.json +++ b/tests/components/lcn/fixtures/config_entry_pchk.json @@ -6,6 +6,7 @@ "password": "lcn", "sk_num_tries": 0, "dim_mode": "STEPS200", + "acknowledge": false, "devices": [ { "address": [0, 7, false], @@ -31,7 +32,7 @@ "domain_data": { "output": "OUTPUT1", "dimmable": true, - "transition": 5000.0 + "transition": 5.0 } }, { @@ -42,7 +43,7 @@ "domain_data": { "output": "OUTPUT2", "dimmable": false, - "transition": 0 + "transition": 0.0 } }, { @@ -92,6 +93,24 @@ "output": "RELAY2" } }, + { + "address": [0, 7, false], + "name": "Switch_Regulator1", + "resource": "r1varsetpoint", + "domain": "switch", + "domain_data": { + "output": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Switch_KeyLock1", + "resource": "a1", + "domain": "switch", + "domain_data": { + "output": "A1" + } + }, { "address": [0, 5, true], "name": "Switch_Group5", @@ -121,6 +140,44 @@ "reverse_time": "RT1200" } }, + { + "address": [0, 7, false], + "name": "Climate1", + "resource": "var1.r1varsetpoint", + "domain": "climate", + "domain_data": { + "source": "VAR1", + "setpoint": "R1VARSETPOINT", + "lockable": true, + "min_temp": 0.0, + "max_temp": 40.0, + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Romantic", + "resource": "0.0", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 0, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": 0.0 + } + }, + { + "address": [0, 7, false], + "name": "Romantic Transition", + "resource": "0.1", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 1, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": 10.0 + } + }, { "address": [0, 7, false], "name": "Sensor_LockRegulator1", diff --git a/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json b/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json new file mode 100644 index 00000000000..e1893c30b42 --- /dev/null +++ b/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json @@ -0,0 +1,248 @@ +{ + "host": "pchk", + "ip_address": "192.168.2.41", + "port": 4114, + "username": "lcn", + "password": "lcn", + "sk_num_tries": 0, + "dim_mode": "STEPS200", + "devices": [ + { + "address": [0, 7, false], + "name": "TestModule", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + }, + { + "address": [0, 5, true], + "name": "TestGroup", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + } + ], + "entities": [ + { + "address": [0, 7, false], + "name": "Light_Output1", + "resource": "output1", + "domain": "light", + "domain_data": { + "output": "OUTPUT1", + "dimmable": true, + "transition": 5000.0 + } + }, + { + "address": [0, 7, false], + "name": "Light_Output2", + "resource": "output2", + "domain": "light", + "domain_data": { + "output": "OUTPUT2", + "dimmable": false, + "transition": 0 + } + }, + { + "address": [0, 7, false], + "name": "Light_Relay1", + "resource": "relay1", + "domain": "light", + "domain_data": { + "output": "RELAY1", + "dimmable": false, + "transition": 0.0 + } + }, + { + "address": [0, 7, false], + "name": "Switch_Output1", + "resource": "output1", + "domain": "switch", + "domain_data": { + "output": "OUTPUT1" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Output2", + "resource": "output2", + "domain": "switch", + "domain_data": { + "output": "OUTPUT2" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Relay1", + "resource": "relay1", + "domain": "switch", + "domain_data": { + "output": "RELAY1" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Relay2", + "resource": "relay2", + "domain": "switch", + "domain_data": { + "output": "RELAY2" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Regulator1", + "resource": "r1varsetpoint", + "domain": "switch", + "domain_data": { + "output": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Switch_KeyLock1", + "resource": "a1", + "domain": "switch", + "domain_data": { + "output": "A1" + } + }, + { + "address": [0, 5, true], + "name": "Switch_Group5", + "resource": "relay1", + "domain": "switch", + "domain_data": { + "output": "RELAY1" + } + }, + { + "address": [0, 7, false], + "name": "Cover_Outputs", + "resource": "outputs", + "domain": "cover", + "domain_data": { + "motor": "OUTPUTS", + "reverse_time": "RT1200" + } + }, + { + "address": [0, 7, false], + "name": "Cover_Relays", + "resource": "motor1", + "domain": "cover", + "domain_data": { + "motor": "MOTOR1", + "reverse_time": "RT1200" + } + }, + { + "address": [0, 7, false], + "name": "Climate1", + "resource": "var1.r1varsetpoint", + "domain": "climate", + "domain_data": { + "source": "VAR1", + "setpoint": "R1VARSETPOINT", + "lockable": true, + "min_temp": 0.0, + "max_temp": 40.0, + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Romantic", + "resource": "0.0", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 0, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": null + } + }, + { + "address": [0, 7, false], + "name": "Romantic Transition", + "resource": "0.1", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 1, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": 10000 + } + }, + { + "address": [0, 7, false], + "name": "Sensor_LockRegulator1", + "resource": "r1varsetpoint", + "domain": "binary_sensor", + "domain_data": { + "source": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Binary_Sensor1", + "resource": "binsensor1", + "domain": "binary_sensor", + "domain_data": { + "source": "BINSENSOR1" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_KeyLock", + "resource": "a5", + "domain": "binary_sensor", + "domain_data": { + "source": "A5" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Var1", + "resource": "var1", + "domain": "sensor", + "domain_data": { + "source": "VAR1", + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Setpoint1", + "resource": "r1varsetpoint", + "domain": "sensor", + "domain_data": { + "source": "R1VARSETPOINT", + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Led6", + "resource": "led6", + "domain": "sensor", + "domain_data": { + "source": "LED6", + "unit_of_measurement": "NATIVE" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_LogicOp1", + "resource": "logicop1", + "domain": "sensor", + "domain_data": { + "source": "LOGICOP1", + "unit_of_measurement": "NATIVE" + } + } + ] +} diff --git a/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json b/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json new file mode 100644 index 00000000000..7389079dca9 --- /dev/null +++ b/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json @@ -0,0 +1,249 @@ +{ + "host": "pchk", + "ip_address": "192.168.2.41", + "port": 4114, + "username": "lcn", + "password": "lcn", + "sk_num_tries": 0, + "dim_mode": "STEPS200", + "acknowledge": false, + "devices": [ + { + "address": [0, 7, false], + "name": "TestModule", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + }, + { + "address": [0, 5, true], + "name": "TestGroup", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + } + ], + "entities": [ + { + "address": [0, 7, false], + "name": "Light_Output1", + "resource": "output1", + "domain": "light", + "domain_data": { + "output": "OUTPUT1", + "dimmable": true, + "transition": 5000.0 + } + }, + { + "address": [0, 7, false], + "name": "Light_Output2", + "resource": "output2", + "domain": "light", + "domain_data": { + "output": "OUTPUT2", + "dimmable": false, + "transition": 0 + } + }, + { + "address": [0, 7, false], + "name": "Light_Relay1", + "resource": "relay1", + "domain": "light", + "domain_data": { + "output": "RELAY1", + "dimmable": false, + "transition": 0.0 + } + }, + { + "address": [0, 7, false], + "name": "Switch_Output1", + "resource": "output1", + "domain": "switch", + "domain_data": { + "output": "OUTPUT1" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Output2", + "resource": "output2", + "domain": "switch", + "domain_data": { + "output": "OUTPUT2" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Relay1", + "resource": "relay1", + "domain": "switch", + "domain_data": { + "output": "RELAY1" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Relay2", + "resource": "relay2", + "domain": "switch", + "domain_data": { + "output": "RELAY2" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Regulator1", + "resource": "r1varsetpoint", + "domain": "switch", + "domain_data": { + "output": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Switch_KeyLock1", + "resource": "a1", + "domain": "switch", + "domain_data": { + "output": "A1" + } + }, + { + "address": [0, 5, true], + "name": "Switch_Group5", + "resource": "relay1", + "domain": "switch", + "domain_data": { + "output": "RELAY1" + } + }, + { + "address": [0, 7, false], + "name": "Cover_Outputs", + "resource": "outputs", + "domain": "cover", + "domain_data": { + "motor": "OUTPUTS", + "reverse_time": "RT1200" + } + }, + { + "address": [0, 7, false], + "name": "Cover_Relays", + "resource": "motor1", + "domain": "cover", + "domain_data": { + "motor": "MOTOR1", + "reverse_time": "RT1200" + } + }, + { + "address": [0, 7, false], + "name": "Climate1", + "resource": "var1.r1varsetpoint", + "domain": "climate", + "domain_data": { + "source": "VAR1", + "setpoint": "R1VARSETPOINT", + "lockable": true, + "min_temp": 0.0, + "max_temp": 40.0, + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Romantic", + "resource": "0.0", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 0, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": null + } + }, + { + "address": [0, 7, false], + "name": "Romantic Transition", + "resource": "0.1", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 1, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": 10000 + } + }, + { + "address": [0, 7, false], + "name": "Sensor_LockRegulator1", + "resource": "r1varsetpoint", + "domain": "binary_sensor", + "domain_data": { + "source": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Binary_Sensor1", + "resource": "binsensor1", + "domain": "binary_sensor", + "domain_data": { + "source": "BINSENSOR1" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_KeyLock", + "resource": "a5", + "domain": "binary_sensor", + "domain_data": { + "source": "A5" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Var1", + "resource": "var1", + "domain": "sensor", + "domain_data": { + "source": "VAR1", + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Setpoint1", + "resource": "r1varsetpoint", + "domain": "sensor", + "domain_data": { + "source": "R1VARSETPOINT", + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Led6", + "resource": "led6", + "domain": "sensor", + "domain_data": { + "source": "LED6", + "unit_of_measurement": "NATIVE" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_LogicOp1", + "resource": "logicop1", + "domain": "sensor", + "domain_data": { + "source": "LOGICOP1", + "unit_of_measurement": "NATIVE" + } + } + ] +} diff --git a/tests/components/lcn/snapshots/test_binary_sensor.ambr b/tests/components/lcn/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..0ad31437dd1 --- /dev/null +++ b/tests/components/lcn/snapshots/test_binary_sensor.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_setup_lcn_binary_sensor[binary_sensor.binary_sensor1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.binary_sensor1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Binary_Sensor1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-binsensor1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.binary_sensor1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Binary_Sensor1', + }), + 'context': , + 'entity_id': 'binary_sensor.binary_sensor1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_keylock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_keylock', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_KeyLock', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-a5', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_keylock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_KeyLock', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_keylock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_lockregulator1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_lockregulator1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_LockRegulator1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_lockregulator1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_LockRegulator1', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_lockregulator1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_climate.ambr b/tests/components/lcn/snapshots/test_climate.ambr new file mode 100644 index 00000000000..443b13312d1 --- /dev/null +++ b/tests/components/lcn/snapshots/test_climate.ambr @@ -0,0 +1,63 @@ +# serializer version: 1 +# name: test_setup_lcn_climate[climate.climate1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 40.0, + 'min_temp': 0.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.climate1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1.r1varsetpoint', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_climate[climate.climate1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Climate1', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 40.0, + 'min_temp': 0.0, + 'supported_features': , + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.climate1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_cover.ambr b/tests/components/lcn/snapshots/test_cover.ambr new file mode 100644 index 00000000000..82a19060d73 --- /dev/null +++ b/tests/components/lcn/snapshots/test_cover.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_setup_lcn_cover[cover.cover_outputs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.cover_outputs', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cover_Outputs', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-outputs', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_cover[cover.cover_outputs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'friendly_name': 'Cover_Outputs', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.cover_outputs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_setup_lcn_cover[cover.cover_relays-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.cover_relays', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cover_Relays', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-motor1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_cover[cover.cover_relays-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'friendly_name': 'Cover_Relays', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.cover_relays', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_light.ambr b/tests/components/lcn/snapshots/test_light.ambr new file mode 100644 index 00000000000..f53d1fdf2dc --- /dev/null +++ b/tests/components/lcn/snapshots/test_light.ambr @@ -0,0 +1,167 @@ +# serializer version: 1 +# name: test_setup_lcn_light[light.light_output1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light_output1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light_Output1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_light[light.light_output1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Light_Output1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light_output1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_light[light.light_output2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light_output2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light_Output2', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output2', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_light[light.light_output2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Light_Output2', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light_output2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_light[light.light_relay1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light_relay1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light_Relay1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_light[light.light_relay1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Light_Relay1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light_relay1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_scene.ambr b/tests/components/lcn/snapshots/test_scene.ambr new file mode 100644 index 00000000000..c039c4ef951 --- /dev/null +++ b/tests/components/lcn/snapshots/test_scene.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_setup_lcn_scene[scene.romantic-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'scene', + 'entity_category': None, + 'entity_id': 'scene.romantic', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Romantic', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-0.0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_scene[scene.romantic-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Romantic', + }), + 'context': , + 'entity_id': 'scene.romantic', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_scene[scene.romantic_transition-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'scene', + 'entity_category': None, + 'entity_id': 'scene.romantic_transition', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Romantic Transition', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-0.1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_scene[scene.romantic_transition-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Romantic Transition', + }), + 'context': , + 'entity_id': 'scene.romantic_transition', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_sensor.ambr b/tests/components/lcn/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..56776e3e0f6 --- /dev/null +++ b/tests/components/lcn/snapshots/test_sensor.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_setup_lcn_sensor[sensor.sensor_led6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_led6', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_Led6', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-led6', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_led6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_Led6', + }), + 'context': , + 'entity_id': 'sensor.sensor_led6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_logicop1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_logicop1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_LogicOp1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-logicop1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_logicop1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_LogicOp1', + }), + 'context': , + 'entity_id': 'sensor.sensor_logicop1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_setpoint1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_setpoint1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sensor_Setpoint1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_setpoint1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Sensor_Setpoint1', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sensor_setpoint1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_var1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_var1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sensor_Var1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1', + 'unit_of_measurement': , + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_var1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Sensor_Var1', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sensor_var1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_switch.ambr b/tests/components/lcn/snapshots/test_switch.ambr new file mode 100644 index 00000000000..36145b8d4fd --- /dev/null +++ b/tests/components/lcn/snapshots/test_switch.ambr @@ -0,0 +1,323 @@ +# serializer version: 1 +# name: test_setup_lcn_switch[switch.switch_group5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_group5', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Group5', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-g000005-relay1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_group5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Group5', + }), + 'context': , + 'entity_id': 'switch.switch_group5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_keylock1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_keylock1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_KeyLock1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-a1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_keylock1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_KeyLock1', + }), + 'context': , + 'entity_id': 'switch.switch_keylock1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_output1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Output1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Output1', + }), + 'context': , + 'entity_id': 'switch.switch_output1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_output2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Output2', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output2', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Output2', + }), + 'context': , + 'entity_id': 'switch.switch_output2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_regulator1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_regulator1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Regulator1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_regulator1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Regulator1', + }), + 'context': , + 'entity_id': 'switch.switch_regulator1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_relay1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Relay1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Relay1', + }), + 'context': , + 'entity_id': 'switch.switch_relay1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_relay2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Relay2', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay2', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Relay2', + }), + 'context': , + 'entity_id': 'switch.switch_relay2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/lcn/test_binary_sensor.py b/tests/components/lcn/test_binary_sensor.py index 9ba04ac94c7..2f64f421b93 100644 --- a/tests/components/lcn/test_binary_sensor.py +++ b/tests/components/lcn/test_binary_sensor.py @@ -1,68 +1,53 @@ """Test for the LCN binary sensor platform.""" +from unittest.mock import patch + from pypck.inputs import ModStatusBinSensors, ModStatusKeyLocks, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import Var, VarValue +import pytest +from syrupy.assertion import SnapshotAssertion +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.lcn import DOMAIN from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.components.script import scripts_with_entity +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component + +from .conftest import MockConfigEntry, init_integration + +from tests.common import snapshot_platform BINARY_SENSOR_LOCKREGULATOR1 = "binary_sensor.sensor_lockregulator1" BINARY_SENSOR_SENSOR1 = "binary_sensor.binary_sensor1" BINARY_SENSOR_KEYLOCK = "binary_sensor.sensor_keylock" -async def test_setup_lcn_binary_sensor(hass: HomeAssistant, lcn_connection) -> None: - """Test the setup of binary sensor.""" - for entity_id in ( - BINARY_SENSOR_LOCKREGULATOR1, - BINARY_SENSOR_SENSOR1, - BINARY_SENSOR_KEYLOCK, - ): - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_UNKNOWN - - -async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: - """Test state of entity.""" - state = hass.states.get(BINARY_SENSOR_LOCKREGULATOR1) - assert state - - state = hass.states.get(BINARY_SENSOR_SENSOR1) - assert state - - state = hass.states.get(BINARY_SENSOR_KEYLOCK) - assert state - - -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +async def test_setup_lcn_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: - """Test the attributes of an entity.""" + """Test the setup of binary sensor.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.BINARY_SENSOR]): + await init_integration(hass, entry) - entity_setpoint1 = entity_registry.async_get(BINARY_SENSOR_LOCKREGULATOR1) - assert entity_setpoint1 - assert entity_setpoint1.unique_id == f"{entry.entry_id}-m000007-r1varsetpoint" - assert entity_setpoint1.original_name == "Sensor_LockRegulator1" - - entity_binsensor1 = entity_registry.async_get(BINARY_SENSOR_SENSOR1) - assert entity_binsensor1 - assert entity_binsensor1.unique_id == f"{entry.entry_id}-m000007-binsensor1" - assert entity_binsensor1.original_name == "Binary_Sensor1" - - entity_keylock = entity_registry.async_get(BINARY_SENSOR_KEYLOCK) - assert entity_keylock - assert entity_keylock.unique_id == f"{entry.entry_id}-m000007-a5" - assert entity_keylock.original_name == "Sensor_KeyLock" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_pushed_lock_setpoint_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, + entry: MockConfigEntry, ) -> None: """Test the lock setpoint sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -86,9 +71,11 @@ async def test_pushed_lock_setpoint_status_change( async def test_pushed_binsensor_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the binary port sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -114,9 +101,11 @@ async def test_pushed_binsensor_status_change( async def test_pushed_keylock_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the keylock sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [[False] * 8 for i in range(4)] @@ -141,9 +130,62 @@ async def test_pushed_keylock_status_change( assert state.state == STATE_ON -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the binary sensor is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(BINARY_SENSOR_LOCKREGULATOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_SENSOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_KEYLOCK).state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + "entity_id", ["binary_sensor.sensor_lockregulator1", "binary_sensor.sensor_keylock"] +) +async def test_create_issue( + hass: HomeAssistant, + service_calls: list[ServiceCall], + issue_registry: ir.IssueRegistry, + entry: MockConfigEntry, + entity_id, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": {"action": "test.automation"}, + } + }, + ) + + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": { + "condition": "state", + "entity_id": entity_id, + "state": STATE_ON, + } + } + } + }, + ) + + await init_integration(hass, entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert issue_registry.async_get_issue( + DOMAIN, f"deprecated_binary_sensor_{entity_id}" + ) + + assert len(issue_registry.issues) == 1 diff --git a/tests/components/lcn/test_climate.py b/tests/components/lcn/test_climate.py new file mode 100644 index 00000000000..7ba263bd597 --- /dev/null +++ b/tests/components/lcn/test_climate.py @@ -0,0 +1,289 @@ +"""Test for the LCN climate platform.""" + +from unittest.mock import patch + +from pypck.inputs import ModStatusVar, Unknown +from pypck.lcn_addr import LcnAddr +from pypck.lcn_defs import Var, VarUnit, VarValue +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + DOMAIN as DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.components.lcn.helpers import get_device_connection +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform + + +async def test_setup_lcn_climate( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the setup of climate.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.CLIMATE]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_set_hvac_mode_heat(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the hvac mode is set to heat.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + state = hass.states.get("climate.climate1") + state.state = HVACMode.OFF + + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state != HVACMode.HEAT + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.HEAT + + +async def test_set_hvac_mode_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the hvac mode is set off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + state = hass.states.get("climate.climate1") + state.state = HVACMode.HEAT + + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state != HVACMode.OFF + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.OFF + + +async def test_set_temperature(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the temperature is set.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "var_abs") as var_abs: + state = hass.states.get("climate.climate1") + state.state = HVACMode.HEAT + + # wrong temperature set via service call with high/low attributes + var_abs.return_value = False + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: "climate.climate1", + ATTR_TARGET_TEMP_LOW: 24.5, + ATTR_TARGET_TEMP_HIGH: 25.5, + }, + blocking=True, + ) + + var_abs.assert_not_awaited() + + # command failed + var_abs.reset_mock(return_value=True) + var_abs.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_TEMPERATURE: 25.5}, + blocking=True, + ) + + var_abs.assert_awaited_with(Var.R1VARSETPOINT, 25.5, VarUnit.CELSIUS) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.attributes[ATTR_TEMPERATURE] != 25.5 + + # command success + var_abs.reset_mock(return_value=True) + var_abs.return_value = True + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_TEMPERATURE: 25.5}, + blocking=True, + ) + + var_abs.assert_awaited_with(Var.R1VARSETPOINT, 25.5, VarUnit.CELSIUS) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.attributes[ATTR_TEMPERATURE] == 25.5 + + +async def test_pushed_current_temperature_status_change( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate changes its current temperature on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + + temperature = VarValue.from_celsius(25.5) + + inp = ModStatusVar(address, Var.VAR1, temperature) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 25.5 + assert state.attributes[ATTR_TEMPERATURE] is None + + +async def test_pushed_setpoint_status_change( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate changes its setpoint on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + + temperature = VarValue.from_celsius(25.5) + + inp = ModStatusVar(address, Var.R1VARSETPOINT, temperature) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert state.attributes[ATTR_TEMPERATURE] == 25.5 + + +async def test_pushed_lock_status_change( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate changes its setpoint on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + + temperature = VarValue(0x8000) + + inp = ModStatusVar(address, Var.R1VARSETPOINT, temperature) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.OFF + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert state.attributes[ATTR_TEMPERATURE] is None + + +async def test_pushed_wrong_input( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate handles wrong input correctly.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + + await device_connection.async_process_input(Unknown("input")) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert state.attributes[ATTR_TEMPERATURE] is None + + +async def test_unload_config_entry( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + + await hass.config_entries.async_unload(entry.entry_id) + state = hass.states.get("climate.climate1") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index d002c5fe625..b7967c247ec 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -7,7 +7,12 @@ import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.lcn.config_flow import LcnFlowHandler, validate_connection -from homeassistant.components.lcn.const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN +from homeassistant.components.lcn.const import ( + CONF_ACKNOWLEDGE, + CONF_DIM_MODE, + CONF_SK_NUM_TRIES, + DOMAIN, +) from homeassistant.const import ( CONF_BASE, CONF_DEVICES, @@ -18,9 +23,7 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -31,6 +34,7 @@ CONFIG_DATA = { CONF_PASSWORD: "lcn", CONF_SK_NUM_TRIES: 0, CONF_DIM_MODE: "STEPS200", + CONF_ACKNOWLEDGE: False, } CONNECTION_DATA = {CONF_HOST: "pchk", **CONFIG_DATA} @@ -42,82 +46,6 @@ IMPORT_DATA = { } -async def test_step_import( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test for import step.""" - - with ( - patch("pypck.connection.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), - patch("homeassistant.components.lcn.async_setup_entry", return_value=True), - ): - data = IMPORT_DATA.copy() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "pchk" - assert result["data"] == IMPORT_DATA - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - -async def test_step_import_existing_host( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test for update of config_entry if imported host already exists.""" - - # Create config entry and add it to hass - mock_data = IMPORT_DATA.copy() - mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50}) - mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) - mock_entry.add_to_hass(hass) - # Initialize a config flow with different data but same host address - with patch("pypck.connection.PchkConnectionManager.async_connect"): - imported_data = IMPORT_DATA.copy() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data - ) - - # Check if config entry was updated - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "existing_configuration_updated" - assert mock_entry.source == config_entries.SOURCE_IMPORT - assert mock_entry.data == IMPORT_DATA - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - -@pytest.mark.parametrize( - ("error", "reason"), - [ - (PchkAuthenticationError, "authentication_error"), - (PchkLicenseError, "license_error"), - (TimeoutError, "connection_refused"), - ], -) -async def test_step_import_error( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, error, reason -) -> None: - """Test for error in import is handled correctly.""" - with patch( - "pypck.connection.PchkConnectionManager.async_connect", side_effect=error - ): - data = IMPORT_DATA.copy() - data.update({CONF_HOST: "pchk"}) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert issue_registry.async_get_issue(DOMAIN, reason) - - async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" flow = LcnFlowHandler() @@ -132,8 +60,7 @@ async def test_show_form(hass: HomeAssistant) -> None: async def test_step_user(hass: HomeAssistant) -> None: """Test for user step.""" with ( - patch("pypck.connection.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): data = CONNECTION_DATA.copy() @@ -156,7 +83,7 @@ async def test_step_user_existing_host( """Test for user defined host already exists.""" entry.add_to_hass(hass) - with patch("pypck.connection.PchkConnectionManager.async_connect"): + with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): config_data = entry.data.copy() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data=config_data @@ -179,7 +106,8 @@ async def test_step_user_error( ) -> None: """Test for error in user step is handled correctly.""" with patch( - "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=error, ): data = CONNECTION_DATA.copy() data.update({CONF_HOST: "pchk"}) @@ -196,20 +124,18 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> entry.add_to_hass(hass) old_entry_data = entry.data.copy() + result = await entry.start_reconfigure_flow(hass) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "reconfigure" + with ( - patch("pypck.connection.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=CONFIG_DATA.copy(), + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_DATA.copy(), ) - assert result["type"] == data_entry_flow.FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -234,17 +160,18 @@ async def test_step_reconfigure_error( ) -> None: """Test for error in reconfigure step is handled correctly.""" entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "reconfigure" + with patch( - "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=error, ): - data = {**CONNECTION_DATA, CONF_HOST: "pchk"} - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=data, + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_DATA.copy(), ) assert result["type"] == data_entry_flow.FlowResultType.FORM @@ -256,8 +183,12 @@ async def test_validate_connection() -> None: data = CONNECTION_DATA.copy() with ( - patch("pypck.connection.PchkConnectionManager.async_connect") as async_connect, - patch("pypck.connection.PchkConnectionManager.async_close") as async_close, + patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect" + ) as async_connect, + patch( + "homeassistant.components.lcn.PchkConnectionManager.async_close" + ) as async_close, ): result = await validate_connection(data=data) diff --git a/tests/components/lcn/test_cover.py b/tests/components/lcn/test_cover.py index f50921c08a1..ff4311b6687 100644 --- a/tests/components/lcn/test_cover.py +++ b/tests/components/lcn/test_cover.py @@ -5,336 +5,334 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import MotorReverseTime, MotorStateModifier +from syrupy.assertion import SnapshotAssertion -from homeassistant.components.cover import DOMAIN as DOMAIN_COVER +from homeassistant.components.cover import DOMAIN as DOMAIN_COVER, CoverState from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockModuleConnection +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform COVER_OUTPUTS = "cover.cover_outputs" COVER_RELAYS = "cover.cover_relays" -async def test_setup_lcn_cover(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_setup_lcn_cover( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: """Test the setup of cover.""" - for entity_id in ( - COVER_OUTPUTS, - COVER_RELAYS, - ): - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_OPEN + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.COVER]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection -) -> None: - """Test the attributes of an entity.""" - - entity_outputs = entity_registry.async_get(COVER_OUTPUTS) - - assert entity_outputs - assert entity_outputs.unique_id == f"{entry.entry_id}-m000007-outputs" - assert entity_outputs.original_name == "Cover_Outputs" - - entity_relays = entity_registry.async_get(COVER_RELAYS) - - assert entity_relays - assert entity_relays.unique_id == f"{entry.entry_id}-m000007-motor1" - assert entity_relays.original_name == "Cover_Relays" - - -@patch.object(MockModuleConnection, "control_motors_outputs") -async def test_outputs_open( - control_motors_outputs, hass: HomeAssistant, lcn_connection -) -> None: +async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the outputs cover opens.""" - state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSED + await init_integration(hass, entry) - # command failed - control_motors_outputs.return_value = False + with patch.object( + MockModuleConnection, "control_motors_outputs" + ) as control_motors_outputs: + state = hass.states.get(COVER_OUTPUTS) + state.state = CoverState.CLOSED - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.UP, MotorReverseTime.RT1200 - ) + # command failed + control_motors_outputs.return_value = False - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state != STATE_OPENING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True + control_motors_outputs.assert_awaited_with( + MotorStateModifier.UP, MotorReverseTime.RT1200 + ) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.UP, MotorReverseTime.RT1200 - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state != CoverState.OPENING - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == STATE_OPENING + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + + control_motors_outputs.assert_awaited_with( + MotorStateModifier.UP, MotorReverseTime.RT1200 + ) + + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == CoverState.OPENING -@patch.object(MockModuleConnection, "control_motors_outputs") -async def test_outputs_close( - control_motors_outputs, hass: HomeAssistant, lcn_connection -) -> None: +async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the outputs cover closes.""" - state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_OPEN + await init_integration(hass, entry) - # command failed - control_motors_outputs.return_value = False + with patch.object( + MockModuleConnection, "control_motors_outputs" + ) as control_motors_outputs: + state = hass.states.get(COVER_OUTPUTS) + state.state = CoverState.OPEN - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.DOWN, MotorReverseTime.RT1200 - ) + # command failed + control_motors_outputs.return_value = False - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state != STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True + control_motors_outputs.assert_awaited_with( + MotorStateModifier.DOWN, MotorReverseTime.RT1200 + ) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.DOWN, MotorReverseTime.RT1200 - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state != CoverState.CLOSING - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == STATE_CLOSING + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + + control_motors_outputs.assert_awaited_with( + MotorStateModifier.DOWN, MotorReverseTime.RT1200 + ) + + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == CoverState.CLOSING -@patch.object(MockModuleConnection, "control_motors_outputs") -async def test_outputs_stop( - control_motors_outputs, hass: HomeAssistant, lcn_connection -) -> None: +async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the outputs cover stops.""" - state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSING + await init_integration(hass, entry) - # command failed - control_motors_outputs.return_value = False + with patch.object( + MockModuleConnection, "control_motors_outputs" + ) as control_motors_outputs: + state = hass.states.get(COVER_OUTPUTS) + state.state = CoverState.CLOSING - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + # command failed + control_motors_outputs.return_value = False - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True + control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == CoverState.CLOSING - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state not in (STATE_CLOSING, STATE_OPENING) + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + + control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state not in (CoverState.CLOSING, CoverState.OPENING) -@patch.object(MockModuleConnection, "control_motors_relays") -async def test_relays_open( - control_motors_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relays cover opens.""" - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.UP + await init_integration(hass, entry) - state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSED + with patch.object( + MockModuleConnection, "control_motors_relays" + ) as control_motors_relays: + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.UP - # command failed - control_motors_relays.return_value = False + state = hass.states.get(COVER_RELAYS) + state.state = CoverState.CLOSED - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + # command failed + control_motors_relays.return_value = False - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state != STATE_OPENING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state != CoverState.OPENING - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == STATE_OPENING + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + + control_motors_relays.assert_awaited_with(states) + + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == CoverState.OPENING -@patch.object(MockModuleConnection, "control_motors_relays") -async def test_relays_close( - control_motors_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relays cover closes.""" - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.DOWN + await init_integration(hass, entry) - state = hass.states.get(COVER_RELAYS) - state.state = STATE_OPEN + with patch.object( + MockModuleConnection, "control_motors_relays" + ) as control_motors_relays: + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.DOWN - # command failed - control_motors_relays.return_value = False + state = hass.states.get(COVER_RELAYS) + state.state = CoverState.OPEN - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + # command failed + control_motors_relays.return_value = False - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state != STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state != CoverState.CLOSING - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == STATE_CLOSING + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + + control_motors_relays.assert_awaited_with(states) + + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == CoverState.CLOSING -@patch.object(MockModuleConnection, "control_motors_relays") -async def test_relays_stop( - control_motors_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relays cover stops.""" - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.STOP + await init_integration(hass, entry) - state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSING + with patch.object( + MockModuleConnection, "control_motors_relays" + ) as control_motors_relays: + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.STOP - # command failed - control_motors_relays.return_value = False + state = hass.states.get(COVER_RELAYS) + state.state = CoverState.CLOSING - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + # command failed + control_motors_relays.return_value = False - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == CoverState.CLOSING - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state not in (STATE_CLOSING, STATE_OPENING) + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + + control_motors_relays.assert_awaited_with(states) + + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state not in (CoverState.CLOSING, CoverState.OPENING) async def test_pushed_outputs_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the outputs cover changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSED + state.state = CoverState.CLOSED # push status "open" inp = ModStatusOutput(address, 0, 100) @@ -343,7 +341,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # push status "stop" inp = ModStatusOutput(address, 0, 0) @@ -352,7 +350,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state not in (STATE_OPENING, STATE_CLOSING) + assert state.state not in (CoverState.OPENING, CoverState.CLOSING) # push status "close" inp = ModStatusOutput(address, 1, 100) @@ -361,19 +359,21 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_pushed_relays_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the relays cover changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSED + state.state = CoverState.CLOSED # push status "open" states[0:2] = [True, False] @@ -383,7 +383,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # push status "stop" states[0] = False @@ -393,7 +393,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state not in (STATE_OPENING, STATE_CLOSING) + assert state.state not in (CoverState.OPENING, CoverState.CLOSING) # push status "close" states[0:2] = [True, True] @@ -403,11 +403,13 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the cover is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(COVER_OUTPUTS).state == STATE_UNAVAILABLE assert hass.states.get(COVER_RELAYS).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_device_trigger.py b/tests/components/lcn/test_device_trigger.py index 6c5ab7d6f4e..6537c108981 100644 --- a/tests/components/lcn/test_device_trigger.py +++ b/tests/components/lcn/test_device_trigger.py @@ -15,15 +15,17 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import get_device +from .conftest import MockConfigEntry, get_device, init_integration from tests.common import async_get_device_automations async def test_get_triggers_module_device( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected triggers from a LCN module device.""" + await init_integration(hass, entry) + device = get_device(hass, entry, (0, 7, False)) expected_triggers = [ @@ -50,9 +52,11 @@ async def test_get_triggers_module_device( async def test_get_triggers_non_module_device( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry, lcn_connection + hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry: MockConfigEntry ) -> None: """Test we get the expected triggers from a LCN non-module device.""" + await init_integration(hass, entry) + not_included_types = ("transmitter", "transponder", "fingerprint", "send_keys") host_device = device_registry.async_get_device( @@ -72,9 +76,10 @@ async def test_get_triggers_non_module_device( async def test_if_fires_on_transponder_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for transponder event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -119,9 +124,10 @@ async def test_if_fires_on_transponder_event( async def test_if_fires_on_fingerprint_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for fingerprint event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -166,9 +172,10 @@ async def test_if_fires_on_fingerprint_event( async def test_if_fires_on_codelock_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for codelock event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -213,9 +220,10 @@ async def test_if_fires_on_codelock_event( async def test_if_fires_on_transmitter_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for transmitter event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -269,9 +277,10 @@ async def test_if_fires_on_transmitter_event( async def test_if_fires_on_send_keys_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for send_keys event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -318,9 +327,10 @@ async def test_if_fires_on_send_keys_event( async def test_get_transponder_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a transponder device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -341,9 +351,10 @@ async def test_get_transponder_trigger_capabilities( async def test_get_fingerprint_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a fingerprint device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -364,9 +375,10 @@ async def test_get_fingerprint_trigger_capabilities( async def test_get_transmitter_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a transmitter device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -397,9 +409,10 @@ async def test_get_transmitter_trigger_capabilities( async def test_get_send_keys_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a send_keys device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -435,9 +448,10 @@ async def test_get_send_keys_trigger_capabilities( async def test_unknown_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get empty capabilities if trigger is unknown.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) diff --git a/tests/components/lcn/test_events.py b/tests/components/lcn/test_events.py index eb62f820103..c6c3559e821 100644 --- a/tests/components/lcn/test_events.py +++ b/tests/components/lcn/test_events.py @@ -3,10 +3,11 @@ from pypck.inputs import Input, ModSendKeysHost, ModStatusAccessControl from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import AccessControlPeriphery, KeyAction, SendKeyCommand -import pytest from homeassistant.core import HomeAssistant +from .conftest import MockConfigEntry, init_integration + from tests.common import async_capture_events LCN_TRANSPONDER = "lcn_transponder" @@ -15,8 +16,11 @@ LCN_TRANSMITTER = "lcn_transmitter" LCN_SEND_KEYS = "lcn_send_keys" -async def test_fire_transponder_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_transponder_event( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test the transponder event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_TRANSPONDER) inp = ModStatusAccessControl( @@ -33,8 +37,11 @@ async def test_fire_transponder_event(hass: HomeAssistant, lcn_connection) -> No assert events[0].data["code"] == "aabbcc" -async def test_fire_fingerprint_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_fingerprint_event( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test the fingerprint event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_FINGERPRINT) inp = ModStatusAccessControl( @@ -51,8 +58,9 @@ async def test_fire_fingerprint_event(hass: HomeAssistant, lcn_connection) -> No assert events[0].data["code"] == "aabbcc" -async def test_fire_codelock_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_codelock_event(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the codelock event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, "lcn_codelock") inp = ModStatusAccessControl( @@ -69,8 +77,11 @@ async def test_fire_codelock_event(hass: HomeAssistant, lcn_connection) -> None: assert events[0].data["code"] == "aabbcc" -async def test_fire_transmitter_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_transmitter_event( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test the transmitter event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_TRANSMITTER) inp = ModStatusAccessControl( @@ -93,8 +104,9 @@ async def test_fire_transmitter_event(hass: HomeAssistant, lcn_connection) -> No assert events[0].data["action"] == "hit" -async def test_fire_sendkeys_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_sendkeys_event(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the send_keys event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_SEND_KEYS) inp = ModSendKeysHost( @@ -122,9 +134,10 @@ async def test_fire_sendkeys_event(hass: HomeAssistant, lcn_connection) -> None: async def test_dont_fire_on_non_module_input( - hass: HomeAssistant, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test for no event is fired if a non-module input is received.""" + lcn_connection = await init_integration(hass, entry) inp = Input() for event_name in ( @@ -139,16 +152,16 @@ async def test_dont_fire_on_non_module_input( assert len(events) == 0 -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_dont_fire_on_unknown_module(hass: HomeAssistant, lcn_connection) -> None: +async def test_dont_fire_on_unknown_module( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test for no event is fired if an input from an unknown module is received.""" + lcn_connection = await init_integration(hass, entry) inp = ModStatusAccessControl( LcnAddr(0, 10, False), # unknown module periphery=AccessControlPeriphery.FINGERPRINT, code="aabbcc", ) - events = async_capture_events(hass, LCN_FINGERPRINT) await lcn_connection.async_process_input(inp) await hass.async_block_till_done() diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index c118b98ecef..2327635e356 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -2,11 +2,8 @@ from unittest.mock import Mock, patch -from pypck.connection import ( - PchkAuthenticationError, - PchkConnectionManager, - PchkLicenseError, -) +from pypck.connection import PchkAuthenticationError, PchkLicenseError +import pytest from homeassistant import config_entries from homeassistant.components.lcn.const import DOMAIN @@ -14,11 +11,18 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import MockPchkConnectionManager, setup_component +from .conftest import ( + MockConfigEntry, + MockPchkConnectionManager, + create_config_entry, + init_integration, +) -async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_async_setup_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test a successful setup entry and unload of entry.""" + await init_integration(hass, entry) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert entry.state is ConfigEntryState.LOADED @@ -29,14 +33,16 @@ async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> assert not hass.data.get(DOMAIN) -async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) -> None: +async def test_async_setup_multiple_entries( + hass: HomeAssistant, entry: MockConfigEntry, entry2 +) -> None: """Test a successful setup and unload of multiple entries.""" hass.http = Mock() - with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): + with patch( + "homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager + ): for config_entry in (entry, entry2): - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await init_integration(hass, config_entry) assert config_entry.state is ConfigEntryState.LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 2 @@ -54,7 +60,7 @@ async def test_async_setup_entry_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - entry, + entry: MockConfigEntry, ) -> None: """Test a successful setup entry if entry with same id already exists.""" # setup first entry @@ -76,22 +82,17 @@ async def test_async_setup_entry_update( assert dummy_entity in entity_registry.entities.values() assert dummy_device in device_registry.devices.values() - # setup new entry with same data via import step (should cleanup dummy device) - with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): - await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry.data - ) - - assert dummy_device not in device_registry.devices.values() - assert dummy_entity not in entity_registry.entities.values() - +@pytest.mark.parametrize( + "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] +) async def test_async_setup_entry_raises_authentication_error( - hass: HomeAssistant, entry + hass: HomeAssistant, entry: MockConfigEntry, exception: Exception ) -> None: """Test that an authentication error is handled properly.""" - with patch.object( - PchkConnectionManager, "async_connect", side_effect=PchkAuthenticationError + with patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=exception, ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -100,38 +101,33 @@ async def test_async_setup_entry_raises_authentication_error( assert entry.state is ConfigEntryState.SETUP_ERROR -async def test_async_setup_entry_raises_license_error( - hass: HomeAssistant, entry -) -> None: - """Test that an authentication error is handled properly.""" - with patch.object( - PchkConnectionManager, "async_connect", side_effect=PchkLicenseError - ): - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: + """Test migration config entry.""" + entry_v1_1 = create_config_entry("pchk_v1_1", version=(1, 1)) + entry_v1_1.add_to_hass(hass) - assert entry.state is ConfigEntryState.SETUP_ERROR + await hass.config_entries.async_setup(entry_v1_1.entry_id) + await hass.async_block_till_done() + + entry_migrated = hass.config_entries.async_get_entry(entry_v1_1.entry_id) + assert entry_migrated.state is ConfigEntryState.LOADED + assert entry_migrated.version == 2 + assert entry_migrated.minor_version == 1 + assert entry_migrated.data == entry.data -async def test_async_setup_entry_raises_timeout_error( - hass: HomeAssistant, entry -) -> None: - """Test that an authentication error is handled properly.""" - with patch.object(PchkConnectionManager, "async_connect", side_effect=TimeoutError): - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_migrate_1_2(hass: HomeAssistant, entry) -> None: + """Test migration config entry.""" + entry_v1_2 = create_config_entry("pchk_v1_2", version=(1, 2)) + entry_v1_2.add_to_hass(hass) - assert entry.state is ConfigEntryState.SETUP_ERROR + await hass.config_entries.async_setup(entry_v1_2.entry_id) + await hass.async_block_till_done() - -async def test_async_setup_from_configuration_yaml(hass: HomeAssistant) -> None: - """Test a successful setup using data from configuration.yaml.""" - with ( - patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager), - patch("homeassistant.components.lcn.async_setup_entry") as async_setup_entry, - ): - await setup_component(hass) - - assert async_setup_entry.await_count == 2 + entry_migrated = hass.config_entries.async_get_entry(entry_v1_2.entry_id) + assert entry_migrated.state is ConfigEntryState.LOADED + assert entry_migrated.version == 2 + assert entry_migrated.minor_version == 1 + assert entry_migrated.data == entry.data diff --git a/tests/components/lcn/test_light.py b/tests/components/lcn/test_light.py index b91f3d5b17c..4251d997724 100644 --- a/tests/components/lcn/test_light.py +++ b/tests/components/lcn/test_light.py @@ -5,297 +5,278 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import RelayStateModifier +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, DOMAIN as DOMAIN_LIGHT, - ColorMode, - LightEntityFeature, ) from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockModuleConnection +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform LIGHT_OUTPUT1 = "light.light_output1" LIGHT_OUTPUT2 = "light.light_output2" LIGHT_RELAY1 = "light.light_relay1" -async def test_setup_lcn_light(hass: HomeAssistant, lcn_connection) -> None: +async def test_setup_lcn_light( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: """Test the setup of light.""" - for entity_id in ( - LIGHT_OUTPUT1, - LIGHT_OUTPUT2, - LIGHT_RELAY1, - ): - state = hass.states.get(entity_id) + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.LIGHT]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output light turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 9) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state != STATE_ON + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 9) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_ON + + +async def test_output_turn_on_with_attributes( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the output light turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: LIGHT_OUTPUT1, + ATTR_BRIGHTNESS: 50, + ATTR_TRANSITION: 2, + }, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 19, 6) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_ON + + +async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output light turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + state = hass.states.get(LIGHT_OUTPUT1) + state.state = STATE_ON + + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 9) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state != STATE_OFF + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 9) + + state = hass.states.get(LIGHT_OUTPUT1) assert state is not None assert state.state == STATE_OFF -async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: - """Test state of entity.""" - state = hass.states.get(LIGHT_OUTPUT1) - assert state - assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION - assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] - - state = hass.states.get(LIGHT_OUTPUT2) - assert state - assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION - assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] - - -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection -) -> None: - """Test the attributes of an entity.""" - entity_output = entity_registry.async_get(LIGHT_OUTPUT1) - - assert entity_output - assert entity_output.unique_id == f"{entry.entry_id}-m000007-output1" - assert entity_output.original_name == "Light_Output1" - - entity_relay = entity_registry.async_get(LIGHT_RELAY1) - - assert entity_relay - assert entity_relay.unique_id == f"{entry.entry_id}-m000007-relay1" - assert entity_relay.original_name == "Light_Relay1" - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_on(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output light turns on.""" - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state != STATE_ON - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_ON - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_on_with_attributes( - dim_output, hass: HomeAssistant, lcn_connection -) -> None: - """Test the output light turns on.""" - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: LIGHT_OUTPUT1, - ATTR_BRIGHTNESS: 50, - ATTR_TRANSITION: 2, - }, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 19, 6) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_ON - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_off(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output light turns off.""" - state = hass.states.get(LIGHT_OUTPUT1) - state.state = STATE_ON - - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state != STATE_OFF - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_OFF - - -@patch.object(MockModuleConnection, "dim_output") async def test_output_turn_off_with_attributes( - dim_output, hass: HomeAssistant, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the output light turns off.""" - dim_output.return_value = True + await init_integration(hass, entry) - state = hass.states.get(LIGHT_OUTPUT1) - state.state = STATE_ON + with patch.object(MockModuleConnection, "dim_output") as dim_output: + dim_output.return_value = True - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: LIGHT_OUTPUT1, - ATTR_TRANSITION: 2, - }, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 6) + state = hass.states.get(LIGHT_OUTPUT1) + state.state = STATE_ON - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_OFF + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: LIGHT_OUTPUT1, + ATTR_TRANSITION: 2, + }, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 6) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_OFF -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_on( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay light turns on.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.ON + await init_integration(hass, entry) - # command failed - control_relays.return_value = False + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.ON - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state != STATE_ON + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state != STATE_ON - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state == STATE_ON + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state == STATE_ON -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_off( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay light turns off.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.OFF + await init_integration(hass, entry) - state = hass.states.get(LIGHT_RELAY1) - state.state = STATE_ON + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.OFF - # command failed - control_relays.return_value = False + state = hass.states.get(LIGHT_RELAY1) + state.state = STATE_ON - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state != STATE_OFF + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state != STATE_OFF - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state == STATE_OFF + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state == STATE_OFF async def test_pushed_output_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the output light changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -320,9 +301,11 @@ async def test_pushed_output_status_change( async def test_pushed_relay_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the relay light changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -348,7 +331,9 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the light is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(LIGHT_OUTPUT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_scene.py b/tests/components/lcn/test_scene.py new file mode 100644 index 00000000000..27e7864df41 --- /dev/null +++ b/tests/components/lcn/test_scene.py @@ -0,0 +1,64 @@ +"""Test for the LCN scene platform.""" + +from unittest.mock import patch + +from pypck.lcn_defs import OutputPort, RelayPort +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.scene import DOMAIN as DOMAIN_SCENE +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_ON, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform + + +async def test_setup_lcn_scene( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the setup of switch.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SCENE]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_scene_activate( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the scene is activated.""" + await init_integration(hass, entry) + with patch.object(MockModuleConnection, "activate_scene") as activate_scene: + await hass.services.async_call( + DOMAIN_SCENE, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "scene.romantic"}, + blocking=True, + ) + + state = hass.states.get("scene.romantic") + assert state is not None + + activate_scene.assert_awaited_with( + 0, 0, [OutputPort.OUTPUT1, OutputPort.OUTPUT2], [RelayPort.RELAY1], 0.0 + ) + + +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the scene is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + + await hass.config_entries.async_unload(entry.entry_id) + state = hass.states.get("scene.romantic") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_sensor.py b/tests/components/lcn/test_sensor.py index cdcd5a195a3..18335f4b073 100644 --- a/tests/components/lcn/test_sensor.py +++ b/tests/components/lcn/test_sensor.py @@ -1,85 +1,46 @@ """Test for the LCN sensor platform.""" +from unittest.mock import patch + from pypck.inputs import ModStatusLedsAndLogicOps, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import LedStatus, LogicOpStatus, Var, VarValue +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.const import ( - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - UnitOfTemperature, -) +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from .conftest import MockConfigEntry, init_integration + +from tests.common import snapshot_platform + SENSOR_VAR1 = "sensor.sensor_var1" SENSOR_SETPOINT1 = "sensor.sensor_setpoint1" SENSOR_LED6 = "sensor.sensor_led6" SENSOR_LOGICOP1 = "sensor.sensor_logicop1" -async def test_setup_lcn_sensor(hass: HomeAssistant, entry, lcn_connection) -> None: - """Test the setup of sensor.""" - for entity_id in ( - SENSOR_VAR1, - SENSOR_SETPOINT1, - SENSOR_LED6, - SENSOR_LOGICOP1, - ): - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_UNKNOWN - - -async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: - """Test state of entity.""" - state = hass.states.get(SENSOR_VAR1) - assert state - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS - - state = hass.states.get(SENSOR_SETPOINT1) - assert state - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS - - state = hass.states.get(SENSOR_LED6) - assert state - - state = hass.states.get(SENSOR_LOGICOP1) - assert state - - -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +async def test_setup_lcn_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: - """Test the attributes of an entity.""" + """Test the setup of sensor.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SENSOR]): + await init_integration(hass, entry) - entity_var1 = entity_registry.async_get(SENSOR_VAR1) - assert entity_var1 - assert entity_var1.unique_id == f"{entry.entry_id}-m000007-var1" - assert entity_var1.original_name == "Sensor_Var1" - - entity_r1varsetpoint = entity_registry.async_get(SENSOR_SETPOINT1) - assert entity_r1varsetpoint - assert entity_r1varsetpoint.unique_id == f"{entry.entry_id}-m000007-r1varsetpoint" - assert entity_r1varsetpoint.original_name == "Sensor_Setpoint1" - - entity_led6 = entity_registry.async_get(SENSOR_LED6) - assert entity_led6 - assert entity_led6.unique_id == f"{entry.entry_id}-m000007-led6" - assert entity_led6.original_name == "Sensor_Led6" - - entity_logicop1 = entity_registry.async_get(SENSOR_LOGICOP1) - assert entity_logicop1 - assert entity_logicop1.unique_id == f"{entry.entry_id}-m000007-logicop1" - assert entity_logicop1.original_name == "Sensor_LogicOp1" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_pushed_variable_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the variable sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -103,9 +64,11 @@ async def test_pushed_variable_status_change( async def test_pushed_ledlogicop_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the led and logicop sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -129,8 +92,10 @@ async def test_pushed_ledlogicop_status_change( assert state.state == "all" -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the sensor is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(SENSOR_VAR1).state == STATE_UNAVAILABLE assert hass.states.get(SENSOR_SETPOINT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py new file mode 100644 index 00000000000..a4ea559cd72 --- /dev/null +++ b/tests/components/lcn/test_services.py @@ -0,0 +1,439 @@ +"""Test for the LCN services.""" + +from unittest.mock import patch + +import pypck +import pytest + +from homeassistant.components.lcn import DOMAIN +from homeassistant.components.lcn.const import ( + CONF_KEYS, + CONF_LED, + CONF_OUTPUT, + CONF_PCK, + CONF_RELVARREF, + CONF_ROW, + CONF_SETPOINT, + CONF_TABLE, + CONF_TEXT, + CONF_TIME, + CONF_TIME_UNIT, + CONF_TRANSITION, + CONF_VALUE, + CONF_VARIABLE, +) +from homeassistant.components.lcn.services import LcnService +from homeassistant.const import ( + CONF_ADDRESS, + CONF_BRIGHTNESS, + CONF_STATE, + CONF_UNIT_OF_MEASUREMENT, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .conftest import ( + MockConfigEntry, + MockModuleConnection, + MockPchkConnectionManager, + init_integration, +) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test output_abs service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + await hass.services.async_call( + DOMAIN, + LcnService.OUTPUT_ABS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_OUTPUT: "output1", + CONF_BRIGHTNESS: 100, + CONF_TRANSITION: 5, + }, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 9) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test output_rel service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "rel_output") as rel_output: + await hass.services.async_call( + DOMAIN, + LcnService.OUTPUT_REL, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_OUTPUT: "output1", + CONF_BRIGHTNESS: 25, + }, + blocking=True, + ) + + rel_output.assert_awaited_with(0, 25) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_output_toggle( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test output_toggle service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "toggle_output") as toggle_output: + await hass.services.async_call( + DOMAIN, + LcnService.OUTPUT_TOGGLE, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_OUTPUT: "output1", + CONF_TRANSITION: 5, + }, + blocking=True, + ) + + toggle_output.assert_awaited_with(0, 9) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test relays service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "control_relays") as control_relays: + await hass.services.async_call( + DOMAIN, + LcnService.RELAYS, + {CONF_ADDRESS: "pchk.s0.m7", CONF_STATE: "0011TT--"}, + blocking=True, + ) + + states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] + relay_states = [pypck.lcn_defs.RelayStateModifier[state] for state in states] + + control_relays.assert_awaited_with(relay_states) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test led service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "control_led") as control_led: + await hass.services.async_call( + DOMAIN, + LcnService.LED, + {CONF_ADDRESS: "pchk.s0.m7", CONF_LED: "led6", CONF_STATE: "blink"}, + blocking=True, + ) + + led = pypck.lcn_defs.LedPort["LED6"] + led_state = pypck.lcn_defs.LedStatus["BLINK"] + + control_led.assert_awaited_with(led, led_state) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test var_abs service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "var_abs") as var_abs: + await hass.services.async_call( + DOMAIN, + LcnService.VAR_ABS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_VARIABLE: "var1", + CONF_VALUE: 75, + CONF_UNIT_OF_MEASUREMENT: "%", + }, + blocking=True, + ) + + var_abs.assert_awaited_with( + pypck.lcn_defs.Var["VAR1"], 75, pypck.lcn_defs.VarUnit.parse("%") + ) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test var_rel service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "var_rel") as var_rel: + await hass.services.async_call( + DOMAIN, + LcnService.VAR_REL, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_VARIABLE: "var1", + CONF_VALUE: 10, + CONF_UNIT_OF_MEASUREMENT: "%", + CONF_RELVARREF: "current", + }, + blocking=True, + ) + + var_rel.assert_awaited_with( + pypck.lcn_defs.Var["VAR1"], + 10, + pypck.lcn_defs.VarUnit.parse("%"), + pypck.lcn_defs.RelVarRef["CURRENT"], + ) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test var_reset service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "var_reset") as var_reset: + await hass.services.async_call( + DOMAIN, + LcnService.VAR_RESET, + {CONF_ADDRESS: "pchk.s0.m7", CONF_VARIABLE: "var1"}, + blocking=True, + ) + + var_reset.assert_awaited_with(pypck.lcn_defs.Var["VAR1"]) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_lock_regulator( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test lock_regulator service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_REGULATOR, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_SETPOINT: "r1varsetpoint", + CONF_STATE: True, + }, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test send_keys service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "send_keys") as send_keys: + await hass.services.async_call( + DOMAIN, + LcnService.SEND_KEYS, + {CONF_ADDRESS: "pchk.s0.m7", CONF_KEYS: "a1a5d8", CONF_STATE: "hit"}, + blocking=True, + ) + + keys = [[False] * 8 for i in range(4)] + keys[0][0] = True + keys[0][4] = True + keys[3][7] = True + + send_keys.assert_awaited_with(keys, pypck.lcn_defs.SendKeyCommand["HIT"]) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_send_keys_hit_deferred( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test send_keys (hit_deferred) service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + keys = [[False] * 8 for i in range(4)] + keys[0][0] = True + keys[0][4] = True + keys[3][7] = True + + # success + with patch.object( + MockModuleConnection, "send_keys_hit_deferred" + ) as send_keys_hit_deferred: + await hass.services.async_call( + DOMAIN, + LcnService.SEND_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_KEYS: "a1a5d8", + CONF_TIME: 5, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + send_keys_hit_deferred.assert_awaited_with( + keys, 5, pypck.lcn_defs.TimeUnit.parse("S") + ) + + # wrong key action + with ( + patch.object( + MockModuleConnection, "send_keys_hit_deferred" + ) as send_keys_hit_deferred, + pytest.raises(ValueError), + ): + await hass.services.async_call( + DOMAIN, + LcnService.SEND_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_KEYS: "a1a5d8", + CONF_STATE: "make", + CONF_TIME: 5, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test lock_keys service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_keys") as lock_keys: + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_KEYS, + {CONF_ADDRESS: "pchk.s0.m7", CONF_TABLE: "a", CONF_STATE: "0011TT--"}, + blocking=True, + ) + + states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] + lock_states = [pypck.lcn_defs.KeyLockStateModifier[state] for state in states] + + lock_keys.assert_awaited_with(0, lock_states) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_lock_keys_tab_a_temporary( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test lock_keys (tab_a_temporary) service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + # success + with patch.object( + MockModuleConnection, "lock_keys_tab_a_temporary" + ) as lock_keys_tab_a_temporary: + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_STATE: "0011TT--", + CONF_TIME: 10, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] + lock_states = [pypck.lcn_defs.KeyLockStateModifier[state] for state in states] + + lock_keys_tab_a_temporary.assert_awaited_with( + 10, pypck.lcn_defs.TimeUnit.parse("S"), lock_states + ) + + # wrong table + with ( + patch.object( + MockModuleConnection, "lock_keys_tab_a_temporary" + ) as lock_keys_tab_a_temporary, + pytest.raises(ValueError), + ): + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_TABLE: "b", + CONF_STATE: "0011TT--", + CONF_TIME: 10, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test dyn_text service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dyn_text") as dyn_text: + await hass.services.async_call( + DOMAIN, + LcnService.DYN_TEXT, + {CONF_ADDRESS: "pchk.s0.m7", CONF_ROW: 1, CONF_TEXT: "text in row 1"}, + blocking=True, + ) + + dyn_text.assert_awaited_with(0, "text in row 1") + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test pck service.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "pck") as pck: + await hass.services.async_call( + DOMAIN, + LcnService.PCK, + {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, + blocking=True, + ) + + pck.assert_awaited_with("PIN4") + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_called_with_invalid_host_id( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test service was called with non existing host id.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "pck") as pck, pytest.raises(ValueError): + await hass.services.async_call( + DOMAIN, + LcnService.PCK, + {CONF_ADDRESS: "foobar.s0.m7", CONF_PCK: "PIN4"}, + blocking=True, + ) + + pck.assert_not_awaited() diff --git a/tests/components/lcn/test_switch.py b/tests/components/lcn/test_switch.py index f24828c5fcb..15b156aac43 100644 --- a/tests/components/lcn/test_switch.py +++ b/tests/components/lcn/test_switch.py @@ -2,9 +2,15 @@ from unittest.mock import patch -from pypck.inputs import ModStatusOutput, ModStatusRelays +from pypck.inputs import ( + ModStatusKeyLocks, + ModStatusOutput, + ModStatusRelays, + ModStatusVar, +) from pypck.lcn_addr import LcnAddr -from pypck.lcn_defs import RelayStateModifier +from pypck.lcn_defs import KeyLockStateModifier, RelayStateModifier, Var, VarValue +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH @@ -15,209 +21,366 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockModuleConnection +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform SWITCH_OUTPUT1 = "switch.switch_output1" SWITCH_OUTPUT2 = "switch.switch_output2" SWITCH_RELAY1 = "switch.switch_relay1" SWITCH_RELAY2 = "switch.switch_relay2" +SWITCH_REGULATOR1 = "switch.switch_regulator1" +SWITCH_KEYLOCKK1 = "switch.switch_keylock1" -async def test_setup_lcn_switch(hass: HomeAssistant, lcn_connection) -> None: +async def test_setup_lcn_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: """Test the setup of switch.""" - for entity_id in ( - SWITCH_OUTPUT1, - SWITCH_OUTPUT2, - SWITCH_RELAY1, - SWITCH_RELAY2, - ): - state = hass.states.get(entity_id) - assert state is not None + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SWITCH]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output switch turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 0) + + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_OFF + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 0) + + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_ON + + +async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output switch turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + state = hass.states.get(SWITCH_OUTPUT1) + state.state = STATE_ON + + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 0) + + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_ON + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 0) + + state = hass.states.get(SWITCH_OUTPUT1) assert state.state == STATE_OFF -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection -) -> None: - """Test the attributes of an entity.""" - - entity_output = entity_registry.async_get(SWITCH_OUTPUT1) - - assert entity_output - assert entity_output.unique_id == f"{entry.entry_id}-m000007-output1" - assert entity_output.original_name == "Switch_Output1" - - entity_relay = entity_registry.async_get(SWITCH_RELAY1) - - assert entity_relay - assert entity_relay.unique_id == f"{entry.entry_id}-m000007-relay1" - assert entity_relay.original_name == "Switch_Relay1" - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_on(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output switch turns on.""" - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_OFF - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_ON - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_off(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output switch turns off.""" - state = hass.states.get(SWITCH_OUTPUT1) - state.state = STATE_ON - - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_ON - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_OFF - - -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_on( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay switch turns on.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.ON + await init_integration(hass, entry) - # command failed - control_relays.return_value = False + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.ON - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_OFF + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_OFF - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_ON + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_ON -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_off( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay switch turns off.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.OFF + await init_integration(hass, entry) - state = hass.states.get(SWITCH_RELAY1) - state.state = STATE_ON + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.OFF - # command failed - control_relays.return_value = False + state = hass.states.get(SWITCH_RELAY1) + state.state = STATE_ON - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_ON + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_ON - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_OFF + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_OFF + + +async def test_regulatorlock_turn_on( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the regulator lock switch turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_OFF + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_ON + + +async def test_regulatorlock_turn_off( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the regulator lock switch turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + state = hass.states.get(SWITCH_REGULATOR1) + state.state = STATE_ON + + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_ON + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_OFF + + +async def test_keylock_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the keylock switch turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_keys") as lock_keys: + states = [KeyLockStateModifier.NOCHANGE] * 8 + states[0] = KeyLockStateModifier.ON + + # command failed + lock_keys.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_OFF + + # command success + lock_keys.reset_mock(return_value=True) + lock_keys.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_ON + + +async def test_keylock_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the keylock switch turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_keys") as lock_keys: + states = [KeyLockStateModifier.NOCHANGE] * 8 + states[0] = KeyLockStateModifier.OFF + + state = hass.states.get(SWITCH_KEYLOCKK1) + state.state = STATE_ON + + # command failed + lock_keys.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_ON + + # command success + lock_keys.reset_mock(return_value=True) + lock_keys.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_OFF async def test_pushed_output_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the output switch changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -239,9 +402,11 @@ async def test_pushed_output_status_change( async def test_pushed_relay_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the relay switch changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -265,7 +430,67 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_pushed_regulatorlock_status_change( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the regulator lock switch changes its state on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + states = [False] * 8 + + # push status "on" + states[0] = True + inp = ModStatusVar(address, Var.R1VARSETPOINT, VarValue(0x8000)) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_ON + + # push status "off" + states[0] = False + inp = ModStatusVar(address, Var.R1VARSETPOINT, VarValue(0x7FFF)) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_OFF + + +async def test_pushed_keylock_status_change( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the keylock switch changes its state on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + states = [[False] * 8 for i in range(4)] + states[0][0] = True + + # push status "on" + inp = ModStatusKeyLocks(address, states) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_ON + + # push status "off" + states[0][0] = False + inp = ModStatusKeyLocks(address, states) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_OFF + + +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the switch is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(SWITCH_OUTPUT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_websocket.py b/tests/components/lcn/test_websocket.py index f1f0a19b572..2c5fff89e19 100644 --- a/tests/components/lcn/test_websocket.py +++ b/tests/components/lcn/test_websocket.py @@ -1,8 +1,11 @@ """LCN Websocket Tests.""" +from typing import Any + from pypck.lcn_addr import LcnAddr import pytest +from homeassistant.components.lcn import AddressType from homeassistant.components.lcn.const import CONF_DOMAIN_DATA from homeassistant.components.lcn.helpers import get_device_config, get_resource from homeassistant.const import ( @@ -16,6 +19,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant +from .conftest import MockConfigEntry, init_integration + from tests.typing import WebSocketGenerator DEVICES_PAYLOAD = {CONF_TYPE: "lcn/devices", "entry_id": ""} @@ -52,11 +57,12 @@ ENTITIES_DELETE_PAYLOAD = { async def test_lcn_devices_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices command.""" - client = await hass_ws_client(hass) + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id({**DEVICES_PAYLOAD, "entry_id": entry.entry_id}) res = await client.receive_json() @@ -79,11 +85,12 @@ async def test_lcn_devices_command( async def test_lcn_entities_command( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - entry, - lcn_connection, + entry: MockConfigEntry, payload, ) -> None: """Test lcn/entities command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id( { @@ -107,10 +114,11 @@ async def test_lcn_entities_command( async def test_lcn_devices_scan_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices/scan command.""" # add new module which is not stored in config_entry + lcn_connection = await init_integration(hass, entry) lcn_connection.get_address_conn(LcnAddr(0, 10, False)) client = await hass_ws_client(hass) @@ -129,9 +137,11 @@ async def test_lcn_devices_scan_command( async def test_lcn_devices_add_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices/add command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) assert get_device_config((0, 10, False), entry) is None @@ -144,9 +154,11 @@ async def test_lcn_devices_add_command( async def test_lcn_devices_delete_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices/delete command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) assert get_device_config((0, 7, False), entry) @@ -160,9 +172,11 @@ async def test_lcn_devices_delete_command( async def test_lcn_entities_add_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/entities/add command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) entity_config = { @@ -185,9 +199,11 @@ async def test_lcn_entities_add_command( async def test_lcn_entities_delete_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/entities/delete command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) assert ( @@ -239,12 +255,14 @@ async def test_lcn_entities_delete_command( async def test_lcn_command_host_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - lcn_connection, - payload, - entity_id, - result, + entry: MockConfigEntry, + payload: dict[str, str], + entity_id: str, + result: bool, ) -> None: """Test lcn commands for unknown host.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id({**payload, "entry_id": entity_id}) @@ -265,13 +283,14 @@ async def test_lcn_command_host_error( async def test_lcn_command_address_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - entry, - lcn_connection, - payload, - address, - result, + entry: MockConfigEntry, + payload: dict[str, Any], + address: AddressType, + result: bool, ) -> None: """Test lcn commands for address error.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id( {**payload, "entry_id": entry.entry_id, CONF_ADDRESS: address} @@ -285,10 +304,11 @@ async def test_lcn_command_address_error( async def test_lcn_entities_add_existing_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - entry, - lcn_connection, + entry: MockConfigEntry, ) -> None: """Test lcn commands for address error.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id( { diff --git a/tests/components/lektrico/__init__.py b/tests/components/lektrico/__init__.py new file mode 100644 index 00000000000..449da2b35c4 --- /dev/null +++ b/tests/components/lektrico/__init__.py @@ -0,0 +1,13 @@ +"""Tests for Lektrico integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/lektrico/conftest.py b/tests/components/lektrico/conftest.py new file mode 100644 index 00000000000..fd840b0c290 --- /dev/null +++ b/tests/components/lektrico/conftest.py @@ -0,0 +1,92 @@ +"""Fixtures for Lektrico Charging Station integration tests.""" + +from collections.abc import Generator +from ipaddress import ip_address +import json +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.lektrico.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) + +from tests.common import MockConfigEntry, load_fixture + +MOCKED_DEVICE_IP_ADDRESS = "192.168.100.10" +MOCKED_DEVICE_SERIAL_NUMBER = "500006" +MOCKED_DEVICE_TYPE = "1p7k" +MOCKED_DEVICE_BOARD_REV = "B" + +MOCKED_DEVICE_ZC_NAME = "Lektrico-1p7k-500006._http._tcp" +MOCKED_DEVICE_ZC_TYPE = "_http._tcp.local." +MOCKED_DEVICE_ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address(MOCKED_DEVICE_IP_ADDRESS), + ip_addresses=[ip_address(MOCKED_DEVICE_IP_ADDRESS)], + hostname=f"{MOCKED_DEVICE_ZC_NAME.lower()}.local.", + port=80, + type=MOCKED_DEVICE_ZC_TYPE, + name=MOCKED_DEVICE_ZC_NAME, + properties={ + "id": "1p7k_500006", + "fw_id": "20230109-124642/v1.22-36-g56a3edd-develop-dirty", + }, +) + + +@pytest.fixture +def mock_device() -> Generator[AsyncMock]: + """Mock a Lektrico device.""" + with ( + patch( + "homeassistant.components.lektrico.Device", + autospec=True, + ) as mock_device, + patch( + "homeassistant.components.lektrico.config_flow.Device", + new=mock_device, + ), + patch( + "homeassistant.components.lektrico.coordinator.Device", + new=mock_device, + ), + ): + device = mock_device.return_value + + device.device_config.return_value = json.loads( + load_fixture("get_config.json", DOMAIN) + ) + device.device_info.return_value = json.loads( + load_fixture("get_info.json", DOMAIN) + ) + + yield device + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setup entry.""" + with patch( + "homeassistant.components.lektrico.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + CONF_TYPE: MOCKED_DEVICE_TYPE, + ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, + ATTR_HW_VERSION: "B", + }, + unique_id=MOCKED_DEVICE_SERIAL_NUMBER, + ) diff --git a/tests/components/lektrico/fixtures/current_measures.json b/tests/components/lektrico/fixtures/current_measures.json new file mode 100644 index 00000000000..1175b49f63c --- /dev/null +++ b/tests/components/lektrico/fixtures/current_measures.json @@ -0,0 +1,16 @@ +{ + "charger_state": "Available", + "charging_time": 0, + "instant_power": 0, + "session_energy": 0.0, + "temperature": 34.5, + "total_charged_energy": 0, + "install_current": 6, + "current_limit_reason": "Installation current", + "voltage_l1": 220.0, + "current_l1": 0.0, + "type": "1p7k", + "serial_number": "500006", + "board_revision": "B", + "fw_version": "1.44" +} diff --git a/tests/components/lektrico/fixtures/get_config.json b/tests/components/lektrico/fixtures/get_config.json new file mode 100644 index 00000000000..175475004ec --- /dev/null +++ b/tests/components/lektrico/fixtures/get_config.json @@ -0,0 +1,5 @@ +{ + "type": "1p7k", + "serial_number": "500006", + "board_revision": "B" +} diff --git a/tests/components/lektrico/fixtures/get_info.json b/tests/components/lektrico/fixtures/get_info.json new file mode 100644 index 00000000000..2b099a666e5 --- /dev/null +++ b/tests/components/lektrico/fixtures/get_info.json @@ -0,0 +1,28 @@ +{ + "charger_state": "available", + "charging_time": 0, + "instant_power": 0, + "session_energy": 0.0, + "temperature": 34.5, + "total_charged_energy": 0, + "install_current": 6, + "current_limit_reason": "installation_current", + "voltage_l1": 220.0, + "current_l1": 0.0, + "fw_version": "1.44", + "led_max_brightness": 20, + "dynamic_current": 32, + "user_current": 32, + "lb_mode": 0, + "require_auth": true, + "state_e_activated": false, + "undervoltage_error": true, + "rcd_error": false, + "meter_fault": false, + "overcurrent": false, + "overtemp": false, + "overvoltage_error": false, + "contactor_failure": false, + "cp_diode_failure": false, + "critical_temp": false +} diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6a28e7c60de --- /dev/null +++ b/tests/components/lektrico/snapshots/test_binary_sensor.ambr @@ -0,0 +1,471 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ev diode short', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cp_diode_failure', + 'unique_id': '500006_cp_diode_failure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Ev diode short', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ev error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_e_activated', + 'unique_id': '500006_state_e_activated', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Ev error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_metering_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Metering error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_fault', + 'unique_id': '500006_meter_fault', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Metering error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_metering_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overcurrent', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overcurrent', + 'unique_id': '500006_overcurrent', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overcurrent', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overheating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overheating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overheating', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'critical_temp', + 'unique_id': '500006_critical_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overheating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overheating', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overheating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overvoltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overvoltage', + 'unique_id': '500006_overvoltage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overvoltage', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rcd error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rcd_error', + 'unique_id': '500006_rcd_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Rcd error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay contacts welded', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'contactor_failure', + 'unique_id': '500006_contactor_failure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Relay contacts welded', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Thermal throttling', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overtemp', + 'unique_id': '500006_overtemp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Thermal throttling', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Undervoltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'undervoltage', + 'unique_id': '500006_undervoltage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Undervoltage', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_button.ambr b/tests/components/lektrico/snapshots/test_button.ambr new file mode 100644 index 00000000000..5070cd484c4 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_button.ambr @@ -0,0 +1,140 @@ +# serializer version: 1 +# name: test_all_entities[button.1p7k_500006_charge_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.1p7k_500006_charge_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge start', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_start', + 'unique_id': '500006-charge_start', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.1p7k_500006_charge_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Charge start', + }), + 'context': , + 'entity_id': 'button.1p7k_500006_charge_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[button.1p7k_500006_charge_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.1p7k_500006_charge_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge stop', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_stop', + 'unique_id': '500006-charge_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.1p7k_500006_charge_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Charge stop', + }), + 'context': , + 'entity_id': 'button.1p7k_500006_charge_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[button.1p7k_500006_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.1p7k_500006_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006-reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.1p7k_500006_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': '1p7k_500006 Restart', + }), + 'context': , + 'entity_id': 'button.1p7k_500006_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_init.ambr b/tests/components/lektrico/snapshots/test_init.ambr new file mode 100644 index 00000000000..63739e1c9d8 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'B', + 'id': , + 'identifiers': set({ + tuple( + 'lektrico', + '500006', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Lektrico', + 'model': '1P7K', + 'model_id': None, + 'name': '1p7k_500006', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '500006', + 'suggested_area': None, + 'sw_version': '1.44', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_number.ambr b/tests/components/lektrico/snapshots/test_number.ambr new file mode 100644 index 00000000000..30a37a25a09 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_number.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_all_entities[number.1p7k_500006_dynamic_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 32, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.1p7k_500006_dynamic_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dynamic limit', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dynamic_limit', + 'unique_id': '500006_dynamic_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.1p7k_500006_dynamic_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Dynamic limit', + 'max': 32, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.1p7k_500006_dynamic_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32', + }) +# --- +# name: test_all_entities[number.1p7k_500006_led_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.1p7k_500006_led_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Led brightness', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_max_brightness', + 'unique_id': '500006_led_max_brightness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[number.1p7k_500006_led_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Led brightness', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 5, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.1p7k_500006_led_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_select.ambr b/tests/components/lektrico/snapshots/test_select.ambr new file mode 100644 index 00000000000..5a964f52ada --- /dev/null +++ b/tests/components/lektrico/snapshots/test_select.ambr @@ -0,0 +1,60 @@ +# serializer version: 1 +# name: test_all_entities[select.1p7k_500006_load_balancing_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disabled', + 'power', + 'hybrid', + 'green', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.1p7k_500006_load_balancing_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Load balancing mode', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'load_balancing_mode', + 'unique_id': '500006_load_balancing_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[select.1p7k_500006_load_balancing_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Load balancing mode', + 'options': list([ + 'disabled', + 'power', + 'hybrid', + 'green', + ]), + }), + 'context': , + 'entity_id': 'select.1p7k_500006_load_balancing_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disabled', + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_sensor.ambr b/tests/components/lektrico/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..73ec88e6fa1 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_sensor.ambr @@ -0,0 +1,544 @@ +# serializer version: 1 +# name: test_all_entities[sensor.1p7k_500006_charging_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_charging_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging time', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charging_time', + 'unique_id': '500006_charging_time', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_charging_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': '1p7k_500006 Charging time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_charging_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_current', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': '1p7k_500006 Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': '1p7k_500006 Energy', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_installation_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_installation_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Installation current', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'installation_current', + 'unique_id': '500006_installation_current', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_installation_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': '1p7k_500006 Installation current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_installation_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_lifetime_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_lifetime_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime energy', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_energy', + 'unique_id': '500006_lifetime_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_lifetime_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': '1p7k_500006 Lifetime energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_lifetime_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_limit_reason-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_limit', + 'installation_current', + 'user_limit', + 'dynamic_limit', + 'schedule', + 'em_offline', + 'em', + 'ocpp', + 'overtemperature', + 'switching_phases', + '1p_charging_disabled', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_limit_reason', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Limit reason', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'limit_reason', + 'unique_id': '500006_limit_reason', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_limit_reason-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': '1p7k_500006 Limit reason', + 'options': list([ + 'no_limit', + 'installation_current', + 'user_limit', + 'dynamic_limit', + 'schedule', + 'em_offline', + 'em', + 'ocpp', + 'overtemperature', + 'switching_phases', + '1p_charging_disabled', + ]), + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_limit_reason', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'installation_current', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': '1p7k_500006 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0000', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'available', + 'charging', + 'connected', + 'error', + 'locked', + 'need_auth', + 'paused', + 'paused_by_scheduler', + 'updating_firmware', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': '500006_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': '1p7k_500006 State', + 'options': list([ + 'available', + 'charging', + 'connected', + 'error', + 'locked', + 'need_auth', + 'paused', + 'paused_by_scheduler', + 'updating_firmware', + ]), + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'available', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': '1p7k_500006 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.5', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': '1p7k_500006 Voltage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '220.0', + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_switch.ambr b/tests/components/lektrico/snapshots/test_switch.ambr new file mode 100644 index 00000000000..3f4a1693315 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_switch.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_all_entities[switch.1p7k_500006_authentication-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.1p7k_500006_authentication', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Authentication', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'authentication', + 'unique_id': '500006_authentication', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.1p7k_500006_authentication-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Authentication', + }), + 'context': , + 'entity_id': 'switch.1p7k_500006_authentication', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[switch.1p7k_500006_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.1p7k_500006_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lock', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lock', + 'unique_id': '500006_lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.1p7k_500006_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Lock', + }), + 'context': , + 'entity_id': 'switch.1p7k_500006_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/lektrico/test_binary_sensor.py b/tests/components/lektrico/test_binary_sensor.py new file mode 100644 index 00000000000..d49eac6cc23 --- /dev/null +++ b/tests/components/lektrico/test_binary_sensor.py @@ -0,0 +1,32 @@ +"""Tests for the Lektrico binary sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.BINARY_SENSOR], + LB_DEVICES_PLATFORMS=[Platform.BINARY_SENSOR], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_button.py b/tests/components/lektrico/test_button.py new file mode 100644 index 00000000000..7bd77848d21 --- /dev/null +++ b/tests/components/lektrico/test_button.py @@ -0,0 +1,32 @@ +"""Tests for the Lektrico button platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.BUTTON], + LB_DEVICES_PLATFORMS=[Platform.BUTTON], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_config_flow.py b/tests/components/lektrico/test_config_flow.py new file mode 100644 index 00000000000..15ab5f7cdda --- /dev/null +++ b/tests/components/lektrico/test_config_flow.py @@ -0,0 +1,173 @@ +"""Tests for the Lektrico Charging Station config flow.""" + +import dataclasses +from ipaddress import ip_address + +from lektricowifi import DeviceConnectionError + +from homeassistant.components.lektrico.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import ( + MOCKED_DEVICE_BOARD_REV, + MOCKED_DEVICE_IP_ADDRESS, + MOCKED_DEVICE_SERIAL_NUMBER, + MOCKED_DEVICE_TYPE, + MOCKED_DEVICE_ZEROCONF_DATA, +) + +from tests.common import MockConfigEntry + + +async def test_user_setup(hass: HomeAssistant, mock_device, mock_setup_entry) -> None: + """Test manually setting up.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == SOURCE_USER + assert "flow_id" in result + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == f"{MOCKED_DEVICE_TYPE}_{MOCKED_DEVICE_SERIAL_NUMBER}" + assert result.get("data") == { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, + CONF_TYPE: MOCKED_DEVICE_TYPE, + ATTR_HW_VERSION: MOCKED_DEVICE_BOARD_REV, + } + assert "result" in result + assert len(mock_setup_entry.mock_calls) == 1 + assert result.get("result").unique_id == MOCKED_DEVICE_SERIAL_NUMBER + + +async def test_user_setup_already_exists( + hass: HomeAssistant, mock_device, mock_config_entry: MockConfigEntry +) -> None: + """Test manually setting up when the device already exists.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_user_setup_device_offline(hass: HomeAssistant, mock_device) -> None: + """Test manually setting up when device is offline.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + mock_device.device_config.side_effect = DeviceConnectionError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_HOST: "cannot_connect"} + assert result["step_id"] == "user" + + mock_device.device_config.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_discovered_zeroconf( + hass: HomeAssistant, mock_device, mock_setup_entry +) -> None: + """Test we can setup when discovered from zeroconf.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=MOCKED_DEVICE_ZEROCONF_DATA, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + assert result.get("step_id") == "confirm" + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, + CONF_TYPE: MOCKED_DEVICE_TYPE, + ATTR_HW_VERSION: MOCKED_DEVICE_BOARD_REV, + } + assert result2["title"] == f"{MOCKED_DEVICE_TYPE}_{MOCKED_DEVICE_SERIAL_NUMBER}" + + +async def test_zeroconf_setup_already_exists( + hass: HomeAssistant, mock_device, mock_config_entry: MockConfigEntry +) -> None: + """Test we abort zeroconf flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + zc_data_new_ip = dataclasses.replace(MOCKED_DEVICE_ZEROCONF_DATA) + zc_data_new_ip.ip_address = ip_address(MOCKED_DEVICE_IP_ADDRESS) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zc_data_new_ip, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_discovered_zeroconf_device_connection_error( + hass: HomeAssistant, mock_device +) -> None: + """Test we can setup when discovered from zeroconf but device went offline.""" + + mock_device.device_config.side_effect = DeviceConnectionError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=MOCKED_DEVICE_ZEROCONF_DATA, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/lektrico/test_init.py b/tests/components/lektrico/test_init.py new file mode 100644 index 00000000000..93068ffe531 --- /dev/null +++ b/tests/components/lektrico/test_init.py @@ -0,0 +1,29 @@ +"""Tests for the Lektrico integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.lektrico.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/lektrico/test_number.py b/tests/components/lektrico/test_number.py new file mode 100644 index 00000000000..ade6515ca72 --- /dev/null +++ b/tests/components/lektrico/test_number.py @@ -0,0 +1,31 @@ +"""Tests for the Lektrico number platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.NUMBER], + LB_DEVICES_PLATFORMS=[Platform.NUMBER], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_select.py b/tests/components/lektrico/test_select.py new file mode 100644 index 00000000000..cb09c47535e --- /dev/null +++ b/tests/components/lektrico/test_select.py @@ -0,0 +1,31 @@ +"""Tests for the Lektrico select platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.SELECT], + LB_DEVICES_PLATFORMS=[Platform.SELECT], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_sensor.py b/tests/components/lektrico/test_sensor.py new file mode 100644 index 00000000000..27be7ff1c11 --- /dev/null +++ b/tests/components/lektrico/test_sensor.py @@ -0,0 +1,33 @@ +"""Tests for the Lektrico sensor platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.SENSOR], + LB_DEVICES_PLATFORMS=[Platform.SENSOR], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_switch.py b/tests/components/lektrico/test_switch.py new file mode 100644 index 00000000000..cfa693d9e44 --- /dev/null +++ b/tests/components/lektrico/test_switch.py @@ -0,0 +1,32 @@ +"""Tests for the Lektrico switch platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.SWITCH], + LB_DEVICES_PLATFORMS=[Platform.SWITCH], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_netcast/test_config_flow.py b/tests/components/lg_netcast/test_config_flow.py index 2ecbadbaf44..02707582484 100644 --- a/tests/components/lg_netcast/test_config_flow.py +++ b/tests/components/lg_netcast/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import DEFAULT, patch from homeassistant import data_entry_flow from homeassistant.components.lg_netcast.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_HOST, @@ -24,8 +24,6 @@ from . import ( _patch_lg_netcast, ) -from tests.common import MockConfigEntry - async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" @@ -146,77 +144,6 @@ async def test_invalid_session_id(hass: HomeAssistant) -> None: assert result2["errors"]["base"] == "cannot_connect" -async def test_import(hass: HomeAssistant) -> None: - """Test that the import works.""" - with _patch_lg_netcast(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == UNIQUE_ID - assert result["data"] == { - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - CONF_MODEL: MODEL_NAME, - CONF_ID: UNIQUE_ID, - } - - -async def test_import_not_online(hass: HomeAssistant) -> None: - """Test that the import works.""" - with _patch_lg_netcast(fail_connection=True): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - -async def test_import_duplicate_error(hass: HomeAssistant) -> None: - """Test that errors are shown when duplicates are added during import.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=UNIQUE_ID, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - CONF_ID: UNIQUE_ID, - }, - ) - config_entry.add_to_hass(hass) - - with _patch_lg_netcast(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - CONF_ID: UNIQUE_ID, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "already_configured" - - async def test_display_access_token_aborted(hass: HomeAssistant) -> None: """Test Access token display is cancelled.""" diff --git a/tests/components/lg_thinq/__init__.py b/tests/components/lg_thinq/__init__.py new file mode 100644 index 00000000000..a5ba55ab1c9 --- /dev/null +++ b/tests/components/lg_thinq/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the LG ThinQ integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/lg_thinq/conftest.py b/tests/components/lg_thinq/conftest.py new file mode 100644 index 00000000000..05cb3164137 --- /dev/null +++ b/tests/components/lg_thinq/conftest.py @@ -0,0 +1,110 @@ +"""Configure tests for the LGThinQ integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from thinqconnect import ThinQAPIException + +from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY + +from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT, MOCK_UUID + +from tests.common import MockConfigEntry, load_json_object_fixture + + +def mock_thinq_api_response( + *, + status: int = 200, + body: dict | None = None, + error_code: str | None = None, + error_message: str | None = None, +) -> MagicMock: + """Create a mock thinq api response.""" + response = MagicMock() + response.status = status + response.body = body + response.error_code = error_code + response.error_message = error_message + return response + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create a mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=f"Test {DOMAIN}", + unique_id=MOCK_PAT, + data={ + CONF_ACCESS_TOKEN: MOCK_PAT, + CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.lg_thinq.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_uuid() -> Generator[AsyncMock]: + """Mock a uuid.""" + with ( + patch("uuid.uuid4", autospec=True, return_value=MOCK_UUID) as mock_uuid, + patch( + "homeassistant.components.lg_thinq.config_flow.uuid.uuid4", + new=mock_uuid, + ), + ): + yield mock_uuid.return_value + + +@pytest.fixture +def mock_thinq_api(mock_thinq_mqtt_client: AsyncMock) -> Generator[AsyncMock]: + """Mock a thinq api.""" + with ( + patch("homeassistant.components.lg_thinq.ThinQApi", autospec=True) as mock_api, + patch( + "homeassistant.components.lg_thinq.config_flow.ThinQApi", + new=mock_api, + ), + ): + thinq_api = mock_api.return_value + thinq_api.async_get_device_list.return_value = [ + load_json_object_fixture("air_conditioner/device.json", DOMAIN) + ] + thinq_api.async_get_device_profile.return_value = load_json_object_fixture( + "air_conditioner/profile.json", DOMAIN + ) + thinq_api.async_get_device_status.return_value = load_json_object_fixture( + "air_conditioner/status.json", DOMAIN + ) + yield thinq_api + + +@pytest.fixture +def mock_thinq_mqtt_client() -> Generator[AsyncMock]: + """Mock a thinq api.""" + with patch( + "homeassistant.components.lg_thinq.mqtt.ThinQMQTTClient", autospec=True + ) as mock_api: + yield mock_api + + +@pytest.fixture +def mock_invalid_thinq_api(mock_thinq_api: AsyncMock) -> AsyncMock: + """Mock an invalid thinq api.""" + mock_thinq_api.async_get_device_list = AsyncMock( + side_effect=ThinQAPIException( + code="1309", message="Not allowed api call", headers=None + ) + ) + return mock_thinq_api diff --git a/tests/components/lg_thinq/const.py b/tests/components/lg_thinq/const.py new file mode 100644 index 00000000000..f46baa61c38 --- /dev/null +++ b/tests/components/lg_thinq/const.py @@ -0,0 +1,8 @@ +"""Constants for lgthinq test.""" + +from typing import Final + +MOCK_PAT: Final[str] = "123abc4567de8f90g123h4ij56klmn789012p345rst6uvw789xy" +MOCK_UUID: Final[str] = "1b3deabc-123d-456d-987d-2a1c7b3bdb67" +MOCK_CONNECT_CLIENT_ID: Final[str] = f"home-assistant-{MOCK_UUID}" +MOCK_COUNTRY: Final[str] = "KR" diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/device.json b/tests/components/lg_thinq/fixtures/air_conditioner/device.json new file mode 100644 index 00000000000..fb931c69929 --- /dev/null +++ b/tests/components/lg_thinq/fixtures/air_conditioner/device.json @@ -0,0 +1,9 @@ +{ + "deviceId": "MW2-2E247F93-B570-46A6-B827-920E9E10F966", + "deviceInfo": { + "deviceType": "DEVICE_AIR_CONDITIONER", + "modelName": "PAC_910604_WW", + "alias": "Test air conditioner", + "reportable": true + } +} diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/profile.json b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json new file mode 100644 index 00000000000..0d45dc5c9f4 --- /dev/null +++ b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json @@ -0,0 +1,154 @@ +{ + "notification": { + "push": ["WATER_IS_FULL"] + }, + "property": { + "airConJobMode": { + "currentJobMode": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["AIR_CLEAN", "COOL", "AIR_DRY"], + "w": ["AIR_CLEAN", "COOL", "AIR_DRY"] + } + } + }, + "airFlow": { + "windStrength": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["LOW", "HIGH", "MID"], + "w": ["LOW", "HIGH", "MID"] + } + } + }, + "airQualitySensor": { + "PM1": { + "mode": ["r"], + "type": "number" + }, + "PM10": { + "mode": ["r"], + "type": "number" + }, + "PM2": { + "mode": ["r"], + "type": "number" + }, + "humidity": { + "mode": ["r"], + "type": "number" + }, + "monitoringEnabled": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["ON_WORKING", "ALWAYS"], + "w": ["ON_WORKING", "ALWAYS"] + } + }, + "oder": { + "mode": ["r"], + "type": "number" + }, + "totalPollution": { + "mode": ["r"], + "type": "number" + } + }, + "operation": { + "airCleanOperationMode": { + "mode": ["w"], + "type": "enum", + "value": { + "w": ["START", "STOP"] + } + }, + "airConOperationMode": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["POWER_ON", "POWER_OFF"], + "w": ["POWER_ON", "POWER_OFF"] + } + } + }, + "powerSave": { + "powerSaveEnabled": { + "mode": ["r", "w"], + "type": "boolean", + "value": { + "r": [false, true], + "w": [false, true] + } + } + }, + "temperature": { + "coolTargetTemperature": { + "mode": ["w"], + "type": "range", + "value": { + "w": { + "max": 30, + "min": 18, + "step": 1 + } + } + }, + "currentTemperature": { + "mode": ["r"], + "type": "number" + }, + "targetTemperature": { + "mode": ["r", "w"], + "type": "range", + "value": { + "r": { + "max": 30, + "min": 18, + "step": 1 + }, + "w": { + "max": 30, + "min": 18, + "step": 1 + } + } + }, + "unit": { + "mode": ["r"], + "type": "enum", + "value": { + "r": ["C", "F"] + } + } + }, + "timer": { + "relativeHourToStart": { + "mode": ["r", "w"], + "type": "number" + }, + "relativeHourToStop": { + "mode": ["r", "w"], + "type": "number" + }, + "relativeMinuteToStart": { + "mode": ["r", "w"], + "type": "number" + }, + "relativeMinuteToStop": { + "mode": ["r", "w"], + "type": "number" + }, + "absoluteHourToStart": { + "mode": ["r", "w"], + "type": "number" + }, + "absoluteMinuteToStart": { + "mode": ["r", "w"], + "type": "number" + } + } + } +} diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/status.json b/tests/components/lg_thinq/fixtures/air_conditioner/status.json new file mode 100644 index 00000000000..90d15d1ae16 --- /dev/null +++ b/tests/components/lg_thinq/fixtures/air_conditioner/status.json @@ -0,0 +1,43 @@ +{ + "airConJobMode": { + "currentJobMode": "COOL" + }, + "airFlow": { + "windStrength": "MID" + }, + "airQualitySensor": { + "PM1": 12, + "PM10": 7, + "PM2": 24, + "humidity": 40, + "monitoringEnabled": "ON_WORKING", + "totalPollution": 3, + "totalPollutionLevel": "GOOD" + }, + "filterInfo": { + "filterLifetime": 540, + "usedTime": 180 + }, + "operation": { + "airConOperationMode": "POWER_ON" + }, + "powerSave": { + "powerSaveEnabled": false + }, + "sleepTimer": { + "relativeStopTimer": "UNSET" + }, + "temperature": { + "currentTemperature": 25, + "targetTemperature": 19, + "unit": "C" + }, + "timer": { + "relativeStartTimer": "UNSET", + "relativeStopTimer": "UNSET", + "absoluteStartTimer": "SET", + "absoluteStopTimer": "UNSET", + "absoluteHourToStart": 13, + "absoluteMinuteToStart": 14 + } +} diff --git a/tests/components/lg_thinq/snapshots/test_climate.ambr b/tests/components/lg_thinq/snapshots/test_climate.ambr new file mode 100644 index 00000000000..e9470c3de03 --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_climate.ambr @@ -0,0 +1,86 @@ +# serializer version: 1 +# name: test_all_entities[climate.test_air_conditioner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'high', + 'mid', + ]), + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 30, + 'min_temp': 18, + 'preset_modes': list([ + 'air_clean', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_air_conditioner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_climate_air_conditioner', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.test_air_conditioner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 40, + 'current_temperature': 25, + 'fan_mode': 'mid', + 'fan_modes': list([ + 'low', + 'high', + 'mid', + ]), + 'friendly_name': 'Test air conditioner', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 30, + 'min_temp': 18, + 'preset_mode': None, + 'preset_modes': list([ + 'air_clean', + ]), + 'supported_features': , + 'target_temp_step': 1, + 'temperature': 19, + }), + 'context': , + 'entity_id': 'climate.test_air_conditioner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/lg_thinq/snapshots/test_event.ambr b/tests/components/lg_thinq/snapshots/test_event.ambr new file mode 100644 index 00000000000..025f4496aeb --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_event.ambr @@ -0,0 +1,55 @@ +# serializer version: 1 +# name: test_all_entities[event.test_air_conditioner_notification-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'water_is_full', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.test_air_conditioner_notification', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Notification', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_notification', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[event.test_air_conditioner_notification-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'water_is_full', + ]), + 'friendly_name': 'Test air conditioner Notification', + }), + 'context': , + 'entity_id': 'event.test_air_conditioner_notification', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lg_thinq/snapshots/test_number.ambr b/tests/components/lg_thinq/snapshots/test_number.ambr new file mode 100644 index 00000000000..68f01854501 --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_number.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_all_entities[number.test_air_conditioner_schedule_turn_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.test_air_conditioner_schedule_turn_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Schedule turn-off', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_relative_hour_to_stop', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.test_air_conditioner_schedule_turn_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test air conditioner Schedule turn-off', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.test_air_conditioner_schedule_turn_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[number.test_air_conditioner_schedule_turn_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.test_air_conditioner_schedule_turn_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Schedule turn-on', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_relative_hour_to_start', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.test_air_conditioner_schedule_turn_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test air conditioner Schedule turn-on', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.test_air_conditioner_schedule_turn_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lg_thinq/snapshots/test_sensor.ambr b/tests/components/lg_thinq/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..387df916eba --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_sensor.ambr @@ -0,0 +1,205 @@ +# serializer version: 1 +# name: test_all_entities[sensor.test_air_conditioner_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Test air conditioner Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM1', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Test air conditioner PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm10', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Test air conditioner PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Test air conditioner PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- diff --git a/tests/components/lg_thinq/test_climate.py b/tests/components/lg_thinq/test_climate.py new file mode 100644 index 00000000000..24ed3ad230d --- /dev/null +++ b/tests/components/lg_thinq/test_climate.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq climate platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_config_flow.py b/tests/components/lg_thinq/test_config_flow.py new file mode 100644 index 00000000000..8c5afb4dac7 --- /dev/null +++ b/tests/components/lg_thinq/test_config_flow.py @@ -0,0 +1,69 @@ +"""Test the lgthinq config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT + +from tests.common import MockConfigEntry + + +async def test_config_flow( + hass: HomeAssistant, + mock_thinq_api: AsyncMock, + mock_uuid: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test that an thinq entry is normally created.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_ACCESS_TOKEN: MOCK_PAT, + CONF_COUNTRY: MOCK_COUNTRY, + CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, + } + + mock_thinq_api.async_get_device_list.assert_called_once() + + +async def test_config_flow_invalid_pat( + hass: HomeAssistant, mock_invalid_thinq_api: AsyncMock +) -> None: + """Test that an thinq flow should be aborted with an invalid PAT.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] + mock_invalid_thinq_api.async_get_device_list.assert_called_once() + + +async def test_config_flow_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_thinq_api: AsyncMock +) -> None: + """Test that thinq flow should be aborted when already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/lg_thinq/test_event.py b/tests/components/lg_thinq/test_event.py new file mode 100644 index 00000000000..bea758cb943 --- /dev/null +++ b/tests/components/lg_thinq/test_event.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq event platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.EVENT]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_init.py b/tests/components/lg_thinq/test_init.py new file mode 100644 index 00000000000..7da7e79fec0 --- /dev/null +++ b/tests/components/lg_thinq/test_init.py @@ -0,0 +1,26 @@ +"""Tests for the LG ThinQ integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/lg_thinq/test_number.py b/tests/components/lg_thinq/test_number.py new file mode 100644 index 00000000000..e578e4eba7a --- /dev/null +++ b/tests/components/lg_thinq/test_number.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq number platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_sensor.py b/tests/components/lg_thinq/test_sensor.py new file mode 100644 index 00000000000..02b91b4771b --- /dev/null +++ b/tests/components/lg_thinq/test_sensor.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq sensor platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lidarr/conftest.py b/tests/components/lidarr/conftest.py index 1024aadc403..bd87fa947bc 100644 --- a/tests/components/lidarr/conftest.py +++ b/tests/components/lidarr/conftest.py @@ -44,10 +44,12 @@ def mock_error( aioclient_mock.get(f"{API_URL}/rootfolder", status=status) aioclient_mock.get(f"{API_URL}/system/status", status=status) aioclient_mock.get(f"{API_URL}/wanted/missing", status=status) + aioclient_mock.get(f"{API_URL}/album", status=status) aioclient_mock.get(f"{API_URL}/queue", exc=ClientError) aioclient_mock.get(f"{API_URL}/rootfolder", exc=ClientError) aioclient_mock.get(f"{API_URL}/system/status", exc=ClientError) aioclient_mock.get(f"{API_URL}/wanted/missing", exc=ClientError) + aioclient_mock.get(f"{API_URL}/album", exc=ClientError) @pytest.fixture @@ -115,6 +117,11 @@ def mock_connection(aioclient_mock: AiohttpClientMocker) -> None: text=load_fixture("lidarr/wanted-missing.json"), headers={"Content-Type": CONTENT_TYPE_JSON}, ) + aioclient_mock.get( + f"{API_URL}/album", + text=load_fixture("lidarr/album.json"), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) aioclient_mock.get( f"{API_URL}/rootfolder", text=load_fixture("lidarr/rootfolder-linux.json"), diff --git a/tests/components/lidarr/fixtures/album.json b/tests/components/lidarr/fixtures/album.json new file mode 100644 index 00000000000..d257cabf1f1 --- /dev/null +++ b/tests/components/lidarr/fixtures/album.json @@ -0,0 +1,155 @@ +[ + { + "id": 0, + "title": "string", + "disambiguation": "string", + "overview": "string", + "artistId": 0, + "foreignAlbumId": "string", + "monitored": true, + "anyReleaseOk": true, + "profileId": 0, + "duration": 0, + "albumType": "string", + "secondaryTypes": ["string"], + "mediumCount": 0, + "ratings": { + "votes": 0, + "value": 0 + }, + "releaseDate": "2024-09-09T20:16:28.493Z", + "releases": [ + { + "id": 0, + "albumId": 0, + "foreignReleaseId": "string", + "title": "string", + "status": "string", + "duration": 0, + "trackCount": 0, + "media": [ + { + "mediumNumber": 0, + "mediumName": "string", + "mediumFormat": "string" + } + ], + "mediumCount": 0, + "disambiguation": "string", + "country": ["string"], + "label": ["string"], + "format": "string", + "monitored": true + } + ], + "genres": ["string"], + "media": [ + { + "mediumNumber": 0, + "mediumName": "string", + "mediumFormat": "string" + } + ], + "artist": { + "id": 0, + "status": "continuing", + "ended": true, + "artistName": "string", + "foreignArtistId": "string", + "mbId": "string", + "tadbId": 0, + "discogsId": 0, + "allMusicId": "string", + "overview": "string", + "artistType": "string", + "disambiguation": "string", + "links": [ + { + "url": "string", + "name": "string" + } + ], + "nextAlbum": "string", + "lastAlbum": "string", + "images": [ + { + "url": "string", + "coverType": "unknown", + "extension": "string", + "remoteUrl": "string" + } + ], + "members": [ + { + "name": "string", + "instrument": "string", + "images": [ + { + "url": "string", + "coverType": "unknown", + "extension": "string", + "remoteUrl": "string" + } + ] + } + ], + "remotePoster": "string", + "path": "string", + "qualityProfileId": 0, + "metadataProfileId": 0, + "monitored": true, + "monitorNewItems": "all", + "rootFolderPath": "string", + "folder": "string", + "genres": ["string"], + "cleanName": "string", + "sortName": "string", + "tags": [0], + "added": "2024-09-09T20:16:28.493Z", + "addOptions": { + "monitor": "all", + "albumsToMonitor": ["string"], + "monitored": true, + "searchForMissingAlbums": true + }, + "ratings": { + "votes": 0, + "value": 0 + }, + "statistics": { + "albumCount": 0, + "trackFileCount": 0, + "trackCount": 0, + "totalTrackCount": 0, + "sizeOnDisk": 0, + "percentOfTracks": 0 + } + }, + "images": [ + { + "url": "string", + "coverType": "unknown", + "extension": "string", + "remoteUrl": "string" + } + ], + "links": [ + { + "url": "string", + "name": "string" + } + ], + "statistics": { + "trackFileCount": 0, + "trackCount": 0, + "totalTrackCount": 0, + "sizeOnDisk": 0, + "percentOfTracks": 0 + }, + "addOptions": { + "addType": "automatic", + "searchForNewAlbum": true + }, + "remoteCover": "string" + } +] diff --git a/tests/components/lidarr/test_config_flow.py b/tests/components/lidarr/test_config_flow.py index e44b03cd2a2..0097e66fe24 100644 --- a/tests/components/lidarr/test_config_flow.py +++ b/tests/components/lidarr/test_config_flow.py @@ -1,13 +1,15 @@ """Test Lidarr config flow.""" from homeassistant.components.lidarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import CONF_DATA, MOCK_INPUT, ComponentSetup +from tests.common import MockConfigEntry + async def test_flow_user_form(hass: HomeAssistant, connection) -> None: """Test that the user set up form is served.""" @@ -95,20 +97,14 @@ async def test_flow_user_unknown_error(hass: HomeAssistant, unknown) -> None: async def test_flow_reauth( - hass: HomeAssistant, setup_integration: ComponentSetup, connection + hass: HomeAssistant, + setup_integration: ComponentSetup, + connection, + config_entry: MockConfigEntry, ) -> None: """Test reauth.""" await setup_integration() - entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( @@ -123,4 +119,4 @@ async def test_flow_reauth( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" - assert entry.data[CONF_API_KEY] == "abc123" + assert config_entry.data[CONF_API_KEY] == "abc123" diff --git a/tests/components/lidarr/test_sensor.py b/tests/components/lidarr/test_sensor.py index 0c19355a252..716df21303a 100644 --- a/tests/components/lidarr/test_sensor.py +++ b/tests/components/lidarr/test_sensor.py @@ -25,10 +25,14 @@ async def test_sensors( assert state.state == "2" assert state.attributes.get("string") == "stopped" assert state.attributes.get("string2") == "downloading" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Albums" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL state = hass.states.get("sensor.mock_title_wanted") assert state.state == "1" assert state.attributes.get("test") == "test" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Albums" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" + assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL + state = hass.states.get("sensor.mock_title_albums") + assert state.state == "1" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 432e7673db6..81b913da6ce 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -65,10 +65,13 @@ class MockLifxCommand: """Init command.""" self.bulb = bulb self.calls = [] - self.msg_kwargs = kwargs + self.msg_kwargs = { + k.removeprefix("msg_"): v for k, v in kwargs.items() if k.startswith("msg_") + } for k, v in kwargs.items(): - if k != "callb": - setattr(self.bulb, k, v) + if k.startswith("msg_") or k == "callb": + continue + setattr(self.bulb, k, v) def __call__(self, *args, **kwargs): """Call command.""" @@ -156,9 +159,16 @@ def _mocked_infrared_bulb() -> Light: def _mocked_light_strip() -> Light: bulb = _mocked_bulb() bulb.product = 31 # LIFX Z - bulb.color_zones = [MagicMock(), MagicMock()] + bulb.zones_count = 3 + bulb.color_zones = [MagicMock()] * 3 bulb.effect = {"effect": "MOVE", "speed": 3, "duration": 0, "direction": "RIGHT"} - bulb.get_color_zones = MockLifxCommand(bulb) + bulb.get_color_zones = MockLifxCommand( + bulb, + msg_seq_num=bulb.seq_next(), + msg_count=bulb.zones_count, + msg_index=0, + msg_color=bulb.color_zones, + ) bulb.set_color_zones = MockLifxCommand(bulb) bulb.get_multizone_effect = MockLifxCommand(bulb) bulb.set_multizone_effect = MockLifxCommand(bulb) diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index 29324d0d19a..d1a6920f84a 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -10,6 +10,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp, zeroconf from homeassistant.components.lifx import DOMAIN +from homeassistant.components.lifx.config_flow import LifXConfigFlow from homeassistant.components.lifx.const import CONF_SERIAL from homeassistant.const import CONF_DEVICE, CONF_HOST from homeassistant.core import HomeAssistant @@ -369,7 +370,18 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" - with _patch_discovery(), _patch_config_flow_try_connect(): + real_is_matching = LifXConfigFlow.is_matching + return_values = [] + + def is_matching(self, other_flow) -> bool: + return_values.append(real_is_matching(self, other_flow)) + return return_values[-1] + + with ( + _patch_discovery(), + _patch_config_flow_try_connect(), + patch.object(LifXConfigFlow, "is_matching", wraps=is_matching, autospec=True), + ): result3 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -380,6 +392,8 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "already_in_progress" + # Ensure the is_matching method returned True + assert return_values == [True] with ( _patch_discovery(no_device=True), diff --git a/tests/components/lifx/test_diagnostics.py b/tests/components/lifx/test_diagnostics.py index e3588dd3ed1..22e335612f8 100644 --- a/tests/components/lifx/test_diagnostics.py +++ b/tests/components/lifx/test_diagnostics.py @@ -9,6 +9,7 @@ from . import ( DEFAULT_ENTRY_TITLE, IP_ADDRESS, SERIAL, + MockLifxCommand, _mocked_bulb, _mocked_clean_bulb, _mocked_infrared_bulb, @@ -188,6 +189,22 @@ async def test_legacy_multizone_bulb_diagnostics( ) config_entry.add_to_hass(hass) bulb = _mocked_light_strip() + bulb.get_color_zones = MockLifxCommand( + bulb, + msg_seq_num=0, + msg_count=8, + msg_color=[ + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + ], + msg_index=0, + ) bulb.zones_count = 8 bulb.color_zones = [ (54612, 65535, 65535, 3500), @@ -302,6 +319,22 @@ async def test_multizone_bulb_diagnostics( config_entry.add_to_hass(hass) bulb = _mocked_light_strip() bulb.product = 38 + bulb.get_color_zones = MockLifxCommand( + bulb, + msg_seq_num=0, + msg_count=8, + msg_color=[ + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + ], + msg_index=0, + ) bulb.zones_count = 8 bulb.color_zones = [ (54612, 65535, 65535, 3500), diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index a642347b4e6..ffe819fa2cb 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components import lifx from homeassistant.components.lifx import DOMAIN -from homeassistant.components.lifx.const import ATTR_POWER +from homeassistant.components.lifx.const import _ATTR_COLOR_TEMP, ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( ATTR_CLOUD_SATURATION_MAX, @@ -31,11 +31,9 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS_PCT, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -192,15 +190,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100}, blocking=True, ) - call_dict = bulb.set_color_zones.calls[0][1] - call_dict.pop("callb") - assert call_dict == { - "apply": 0, - "color": [], - "duration": 0, - "end_index": 0, - "start_index": 0, - } + assert len(bulb.set_color_zones.calls) == 0 bulb.set_color_zones.reset_mock() await hass.services.async_call( @@ -209,15 +199,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)}, blocking=True, ) - call_dict = bulb.set_color_zones.calls[0][1] - call_dict.pop("callb") - assert call_dict == { - "apply": 0, - "color": [], - "duration": 0, - "end_index": 0, - "start_index": 0, - } + assert len(bulb.set_color_zones.calls) == 0 bulb.set_color_zones.reset_mock() bulb.color_zones = [ @@ -238,7 +220,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: blocking=True, ) # Single color uses the fast path - assert bulb.set_color.calls[0][0][0] == [1820, 19660, 65535, 3500] + assert bulb.set_color.calls[1][0][0] == [1820, 19660, 65535, 3500] bulb.set_color.reset_mock() assert len(bulb.set_color_zones.calls) == 0 @@ -422,7 +404,9 @@ async def test_light_strip(hass: HomeAssistant) -> None: blocking=True, ) - bulb.get_color_zones = MockLifxCommand(bulb) + bulb.get_color_zones = MockLifxCommand( + bulb, msg_seq_num=0, msg_color=[0, 0, 65535, 3500] * 3, msg_index=0, msg_count=3 + ) bulb.get_color = MockFailingLifxCommand(bulb) with pytest.raises(HomeAssistantError): @@ -587,14 +571,14 @@ async def test_extended_multizone_messages(hass: HomeAssistant) -> None: bulb.set_extended_color_zones.reset_mock() bulb.color_zones = [ - (0, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), + [0, 65535, 65535, 3500], + [54612, 65535, 65535, 3500], + [54612, 65535, 65535, 3500], + [54612, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], ] await hass.services.async_call( @@ -1112,8 +1096,8 @@ async def test_color_light_with_temp( ColorMode.HS, ] assert attributes[ATTR_HS_COLOR] == (30.754, 7.122) - assert attributes[ATTR_RGB_COLOR] == (255, 246, 236) - assert attributes[ATTR_XY_COLOR] == (0.34, 0.339) + assert attributes[ATTR_RGB_COLOR] == (255, 246, 237) + assert attributes[ATTR_XY_COLOR] == (0.339, 0.338) bulb.color = [65535, 65535, 65535, 65535] await hass.services.async_call( @@ -1278,7 +1262,7 @@ async def test_white_bulb(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 400}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) assert bulb.set_color.calls[0][0][0] == [32000, 0, 32000, 2500] @@ -1308,7 +1292,11 @@ async def test_config_zoned_light_strip_fails( def __call__(self, callb=None, *args, **kwargs): """Call command.""" self.call_count += 1 - response = None if self.call_count >= 2 else MockMessage() + response = ( + None + if self.call_count >= 2 + else MockMessage(seq_num=0, color=[], index=0, count=0) + ) if callb: callb(self.bulb, response) @@ -1349,7 +1337,15 @@ async def test_legacy_zoned_light_strip( self.call_count += 1 self.bulb.color_zones = [None] * 12 if callb: - callb(self.bulb, MockMessage()) + callb( + self.bulb, + MockMessage( + seq_num=0, + index=0, + count=self.bulb.zones_count, + color=self.bulb.color_zones, + ), + ) get_color_zones_mock = MockPopulateLifxZonesCommand(light_strip) light_strip.get_color_zones = get_color_zones_mock @@ -1721,7 +1717,7 @@ async def test_lifx_set_state_color(hass: HomeAssistant) -> None: async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: - """Test set_state works with old and new kelvin parameter names.""" + """Test set_state works with kelvin parameter names.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=SERIAL ) @@ -1750,15 +1746,6 @@ async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: assert bulb.set_power.calls[0][0][0] is False bulb.set_power.reset_mock() - await hass.services.async_call( - DOMAIN, - "set_state", - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, ATTR_KELVIN: 3500}, - blocking=True, - ) - assert bulb.set_color.calls[0][0][0] == [32000, 0, 65535, 3500] - bulb.set_color.reset_mock() - await hass.services.async_call( DOMAIN, "set_state", @@ -1771,7 +1758,7 @@ async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: await hass.services.async_call( DOMAIN, "set_state", - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, ATTR_COLOR_TEMP: 400}, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, _ATTR_COLOR_TEMP: 400}, blocking=True, ) assert bulb.set_color.calls[0][0][0] == [32000, 0, 65535, 2500] @@ -1946,6 +1933,33 @@ async def test_light_strip_zones_not_populated_yet(hass: HomeAssistant) -> None: bulb.power_level = 65535 bulb.color_zones = None bulb.color = [65535, 65535, 65535, 65535] + bulb.get_color_zones = next( + iter( + [ + MockLifxCommand( + bulb, + msg_seq_num=0, + msg_color=[0, 0, 65535, 3500] * 8, + msg_index=0, + msg_count=16, + ), + MockLifxCommand( + bulb, + msg_seq_num=1, + msg_color=[0, 0, 65535, 3500] * 8, + msg_index=0, + msg_count=16, + ), + MockLifxCommand( + bulb, + msg_seq_num=2, + msg_color=[0, 0, 65535, 3500] * 8, + msg_index=8, + msg_count=16, + ), + ] + ) + ) assert bulb.get_color_zones.calls == [] with ( diff --git a/tests/components/light/common.py b/tests/components/light/common.py index 0ad492a31e9..b29ac0c7c89 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -10,11 +10,10 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_PROFILE, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -22,9 +21,12 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN, ColorMode, LightEntity, + LightEntityFeature, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -34,54 +36,10 @@ from homeassistant.const import ( SERVICE_TURN_ON, ) from homeassistant.core import HomeAssistant -from homeassistant.loader import bind_hass from tests.common import MockToggleEntity -@bind_hass -def turn_on( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - rgbw_color: tuple[int, int, int, int] | None = None, - rgbww_color: tuple[int, int, int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, - white: bool | None = None, -) -> None: - """Turn all or specified light on.""" - hass.add_job( - async_turn_on, - hass, - entity_id, - transition, - brightness, - brightness_pct, - rgb_color, - rgbw_color, - rgbww_color, - xy_color, - hs_color, - color_temp, - kelvin, - profile, - flash, - effect, - color_name, - white, - ) - - async def async_turn_on( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -93,13 +51,12 @@ async def async_turn_on( rgbww_color: tuple[int, int, int, int, int] | None = None, xy_color: tuple[float, float] | None = None, hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, + color_temp_kelvin: int | None = None, profile: str | None = None, flash: str | None = None, effect: str | None = None, color_name: str | None = None, - white: bool | None = None, + white: int | None = None, ) -> None: """Turn all or specified light on.""" data = { @@ -115,8 +72,7 @@ async def async_turn_on( (ATTR_RGBWW_COLOR, rgbww_color), (ATTR_XY_COLOR, xy_color), (ATTR_HS_COLOR, hs_color), - (ATTR_COLOR_TEMP, color_temp), - (ATTR_KELVIN, kelvin), + (ATTR_COLOR_TEMP_KELVIN, color_temp_kelvin), (ATTR_FLASH, flash), (ATTR_EFFECT, effect), (ATTR_COLOR_NAME, color_name), @@ -128,17 +84,6 @@ async def async_turn_on( await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -@bind_hass -def turn_off( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - flash: str | None = None, -) -> None: - """Turn all or specified light off.""" - hass.add_job(async_turn_off, hass, entity_id, transition, flash) - - async def async_turn_off( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -159,43 +104,6 @@ async def async_turn_off( await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) -@bind_hass -def toggle( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, -) -> None: - """Toggle all or specified light.""" - hass.add_job( - async_toggle, - hass, - entity_id, - transition, - brightness, - brightness_pct, - rgb_color, - xy_color, - hs_color, - color_temp, - kelvin, - profile, - flash, - effect, - color_name, - ) - - async def async_toggle( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -205,8 +113,7 @@ async def async_toggle( rgb_color: tuple[int, int, int] | None = None, xy_color: tuple[float, float] | None = None, hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, + color_temp_kelvin: int | None = None, profile: str | None = None, flash: str | None = None, effect: str | None = None, @@ -224,8 +131,7 @@ async def async_toggle( (ATTR_RGB_COLOR, rgb_color), (ATTR_XY_COLOR, xy_color), (ATTR_HS_COLOR, hs_color), - (ATTR_COLOR_TEMP, color_temp), - (ATTR_KELVIN, kelvin), + (ATTR_COLOR_TEMP_KELVIN, color_temp_kelvin), (ATTR_FLASH, flash), (ATTR_EFFECT, effect), (ATTR_COLOR_NAME, color_name), @@ -249,9 +155,9 @@ TURN_ON_ARG_TO_COLOR_MODE = { class MockLight(MockToggleEntity, LightEntity): """Mock light class.""" - _attr_max_color_temp_kelvin = 6500 - _attr_min_color_temp_kelvin = 2000 - supported_features = 0 + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + supported_features = LightEntityFeature(0) brightness = None color_temp_kelvin = None diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index eeb32f1b17a..303bf68f68c 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -1,6 +1,6 @@ """The tests for the Light component.""" -from typing import Literal +from types import ModuleType from unittest.mock import MagicMock, mock_open, patch import pytest @@ -20,6 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, Unauthorized +from homeassistant.helpers import frame from homeassistant.setup import async_setup_component import homeassistant.util.color as color_util @@ -29,6 +30,9 @@ from tests.common import ( MockEntityPlatform, MockUser, async_mock_service, + help_test_all, + import_and_test_deprecated_constant, + import_and_test_deprecated_constant_enum, setup_test_component_platform, ) @@ -133,13 +137,8 @@ async def test_services( ent3.supported_color_modes = [light.ColorMode.HS] ent1.supported_features = light.LightEntityFeature.TRANSITION ent2.supported_features = ( - light.SUPPORT_COLOR - | light.LightEntityFeature.EFFECT - | light.LightEntityFeature.TRANSITION + light.LightEntityFeature.EFFECT | light.LightEntityFeature.TRANSITION ) - # Set color modes to none to trigger backwards compatibility in LightEntity - ent2.supported_color_modes = None - ent2.color_mode = None ent3.supported_features = ( light.LightEntityFeature.FLASH | light.LightEntityFeature.TRANSITION ) @@ -255,10 +254,7 @@ async def test_services( } _, data = ent2.last_call("turn_on") - assert data == { - light.ATTR_EFFECT: "fun_effect", - light.ATTR_HS_COLOR: (0, 0), - } + assert data == {light.ATTR_EFFECT: "fun_effect"} _, data = ent3.last_call("turn_on") assert data == {light.ATTR_FLASH: "short", light.ATTR_HS_COLOR: (71.059, 100)} @@ -342,8 +338,6 @@ async def test_services( _, data = ent2.last_call("turn_on") assert data == { - light.ATTR_BRIGHTNESS: 100, - light.ATTR_HS_COLOR: profile.hs_color, light.ATTR_TRANSITION: 1, } @@ -921,16 +915,12 @@ async def test_light_brightness_step(hass: HomeAssistant) -> None: setup_test_component_platform(hass, light.DOMAIN, entities) entity0 = entities[0] - entity0.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity0.supported_color_modes = None - entity0.color_mode = None + entity0.supported_color_modes = {light.ColorMode.BRIGHTNESS} + entity0.color_mode = light.ColorMode.BRIGHTNESS entity0.brightness = 100 entity1 = entities[1] - entity1.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity1.supported_color_modes = None - entity1.color_mode = None + entity1.supported_color_modes = {light.ColorMode.BRIGHTNESS} + entity1.color_mode = light.ColorMode.BRIGHTNESS entity1.brightness = 50 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -991,10 +981,8 @@ async def test_light_brightness_pct_conversion( setup_test_component_platform(hass, light.DOMAIN, mock_light_entities) entity = mock_light_entities[0] - entity.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity.supported_color_modes = None - entity.color_mode = None + entity.supported_color_modes = {light.ColorMode.BRIGHTNESS} + entity.color_mode = light.ColorMode.BRIGHTNESS entity.brightness = 100 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1143,167 +1131,6 @@ invalid_no_brightness_no_color_no_transition,,, assert invalid_profile_name not in profiles.data -@pytest.mark.parametrize("light_state", [STATE_ON, STATE_OFF]) -async def test_light_backwards_compatibility_supported_color_modes( - hass: HomeAssistant, light_state: Literal["on", "off"] -) -> None: - """Test supported_color_modes if not implemented by the entity.""" - entities = [ - MockLight("Test_0", light_state), - MockLight("Test_1", light_state), - MockLight("Test_2", light_state), - MockLight("Test_3", light_state), - MockLight("Test_4", light_state), - ] - - entity0 = entities[0] - - entity1 = entities[1] - entity1.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity1.supported_color_modes = None - entity1.color_mode = None - - entity2 = entities[2] - entity2.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR_TEMP - # Set color modes to none to trigger backwards compatibility in LightEntity - entity2.supported_color_modes = None - entity2.color_mode = None - - entity3 = entities[3] - entity3.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity3.supported_color_modes = None - entity3.color_mode = None - - entity4 = entities[4] - entity4.supported_features = ( - light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR | light.SUPPORT_COLOR_TEMP - ) - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None - - setup_test_component_platform(hass, light.DOMAIN, entities) - - assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) - await hass.async_block_till_done() - - state = hass.states.get(entity0.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.ONOFF] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.ONOFF - - state = hass.states.get(entity1.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.BRIGHTNESS] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - state = hass.states.get(entity2.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - state = hass.states.get(entity3.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - state = hass.states.get(entity4.entity_id) - assert state.attributes["supported_color_modes"] == [ - light.ColorMode.COLOR_TEMP, - light.ColorMode.HS, - ] - if light_state == STATE_OFF: - assert state.attributes["color_mode"] is None - else: - assert state.attributes["color_mode"] == light.ColorMode.UNKNOWN - - -async def test_light_backwards_compatibility_color_mode(hass: HomeAssistant) -> None: - """Test color_mode if not implemented by the entity.""" - entities = [ - MockLight("Test_0", STATE_ON), - MockLight("Test_1", STATE_ON), - MockLight("Test_2", STATE_ON), - MockLight("Test_3", STATE_ON), - MockLight("Test_4", STATE_ON), - ] - - entity0 = entities[0] - - entity1 = entities[1] - entity1.supported_features = light.SUPPORT_BRIGHTNESS - # Set color modes to none to trigger backwards compatibility in LightEntity - entity1.supported_color_modes = None - entity1.color_mode = None - entity1.brightness = 100 - - entity2 = entities[2] - entity2.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR_TEMP - # Set color modes to none to trigger backwards compatibility in LightEntity - entity2.supported_color_modes = None - entity2.color_mode = None - entity2.color_temp_kelvin = 10000 - - entity3 = entities[3] - entity3.supported_features = light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity3.supported_color_modes = None - entity3.color_mode = None - entity3.hs_color = (240, 100) - - entity4 = entities[4] - entity4.supported_features = ( - light.SUPPORT_BRIGHTNESS | light.SUPPORT_COLOR | light.SUPPORT_COLOR_TEMP - ) - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None - entity4.hs_color = (240, 100) - entity4.color_temp_kelvin = 10000 - - setup_test_component_platform(hass, light.DOMAIN, entities) - - assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) - await hass.async_block_till_done() - - state = hass.states.get(entity0.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.ONOFF] - assert state.attributes["color_mode"] == light.ColorMode.ONOFF - - state = hass.states.get(entity1.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.BRIGHTNESS] - assert state.attributes["color_mode"] == light.ColorMode.BRIGHTNESS - - state = hass.states.get(entity2.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] - assert state.attributes["color_mode"] == light.ColorMode.COLOR_TEMP - assert state.attributes["rgb_color"] == (201, 218, 255) - assert state.attributes["hs_color"] == (221.575, 20.9) - assert state.attributes["xy_color"] == (0.277, 0.287) - - state = hass.states.get(entity3.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] - assert state.attributes["color_mode"] == light.ColorMode.HS - - state = hass.states.get(entity4.entity_id) - assert state.attributes["supported_color_modes"] == [ - light.ColorMode.COLOR_TEMP, - light.ColorMode.HS, - ] - # hs color prioritized over color_temp, light should report mode ColorMode.HS - assert state.attributes["color_mode"] == light.ColorMode.HS - - async def test_light_service_call_rgbw(hass: HomeAssistant) -> None: """Test rgbw functionality in service calls.""" entity0 = MockLight("Test_rgbw", STATE_ON) @@ -1359,7 +1186,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_onoff", "supported_color_modes": [light.ColorMode.ONOFF], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), } state = hass.states.get(entity1.entity_id) @@ -1367,7 +1194,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_brightness", "supported_color_modes": [light.ColorMode.BRIGHTNESS], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "brightness": None, } @@ -1376,14 +1203,14 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_ct", "supported_color_modes": [light.ColorMode.COLOR_TEMP], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "brightness": None, "color_temp": None, "color_temp_kelvin": None, "hs_color": None, "rgb_color": None, "xy_color": None, - "max_color_temp_kelvin": 6500, + "max_color_temp_kelvin": 6535, "max_mireds": 500, "min_color_temp_kelvin": 2000, "min_mireds": 153, @@ -1394,7 +1221,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "color_mode": None, "friendly_name": "Test_rgbw", "supported_color_modes": [light.ColorMode.RGBW], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "brightness": None, "rgbw_color": None, "hs_color": None, @@ -1425,7 +1252,7 @@ async def test_light_state_rgbw(hass: HomeAssistant) -> None: "color_mode": light.ColorMode.RGBW, "friendly_name": "Test_rgbw", "supported_color_modes": [light.ColorMode.RGBW], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "hs_color": (240.0, 25.0), "rgb_color": (3, 3, 4), "rgbw_color": (1, 2, 3, 4), @@ -1456,7 +1283,7 @@ async def test_light_state_rgbww(hass: HomeAssistant) -> None: "color_mode": light.ColorMode.RGBWW, "friendly_name": "Test_rgbww", "supported_color_modes": [light.ColorMode.RGBWW], - "supported_features": 0, + "supported_features": light.LightEntityFeature(0), "hs_color": (60.0, 20.0), "rgb_color": (5, 5, 4), "rgbww_color": (1, 2, 3, 4, 5), @@ -1472,7 +1299,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: MockLight("Test_rgb", STATE_ON), MockLight("Test_xy", STATE_ON), MockLight("Test_all", STATE_ON), - MockLight("Test_legacy", STATE_ON), MockLight("Test_rgbw", STATE_ON), MockLight("Test_rgbww", STATE_ON), MockLight("Test_temperature", STATE_ON), @@ -1496,19 +1322,13 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: } entity4 = entities[4] - entity4.supported_features = light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None + entity4.supported_color_modes = {light.ColorMode.RGBW} entity5 = entities[5] - entity5.supported_color_modes = {light.ColorMode.RGBW} + entity5.supported_color_modes = {light.ColorMode.RGBWW} entity6 = entities[6] - entity6.supported_color_modes = {light.ColorMode.RGBWW} - - entity7 = entities[7] - entity7.supported_color_modes = {light.ColorMode.COLOR_TEMP} + entity6.supported_color_modes = {light.ColorMode.COLOR_TEMP} assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1530,15 +1350,12 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: ] state = hass.states.get(entity4.entity_id) - assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] - - state = hass.states.get(entity5.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBW] - state = hass.states.get(entity6.entity_id) + state = hass.states.get(entity5.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBWW] - state = hass.states.get(entity7.entity_id) + state = hass.states.get(entity6.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] await hass.services.async_call( @@ -1553,7 +1370,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 100, "hs_color": (240, 100), @@ -1569,12 +1385,10 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 255, "hs_color": (240.0, 100.0)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 255, "hs_color": (240.0, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 255, "rgbw_color": (0, 0, 255, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 255, "rgbww_color": (0, 0, 255, 0, 0)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 255, "color_temp_kelvin": 1739, "color_temp": 575} await hass.services.async_call( @@ -1589,7 +1403,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 100, "hs_color": (240, 0), @@ -1605,13 +1418,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 255, "hs_color": (240.0, 0.0)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 255, "hs_color": (240.0, 0.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 255, "rgbw_color": (0, 0, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint of the white channels is warm, compensated by adding green + blue assert data == {"brightness": 255, "rgbww_color": (0, 76, 141, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 255, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1626,7 +1437,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgb_color": (128, 0, 0), @@ -1641,13 +1451,12 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: assert data == {"brightness": 128, "xy_color": (0.701, 0.299)} _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (128, 0, 0)} + _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (128, 0, 0, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (128, 0, 0, 0, 0)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 6279, "color_temp": 159} await hass.services.async_call( @@ -1662,7 +1471,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgb_color": (255, 255, 255), @@ -1678,13 +1486,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (255, 255, 255)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 0.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (0, 0, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 76, 141, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1699,7 +1505,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "xy_color": (0.1, 0.8), @@ -1715,12 +1520,10 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "xy_color": (0.1, 0.8)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (125.176, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (0, 255, 22, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (0, 255, 22, 0, 0)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 8645, "color_temp": 115} await hass.services.async_call( @@ -1735,7 +1538,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "xy_color": (0.323, 0.329), @@ -1751,13 +1553,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "xy_color": (0.323, 0.329)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 0.392)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (1, 0, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 75, 140, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1772,7 +1572,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbw_color": (128, 0, 0, 64), @@ -1788,13 +1587,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (128, 43, 43)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 66.406)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (128, 0, 0, 64)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (128, 0, 30, 117, 117)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 3011, "color_temp": 332} await hass.services.async_call( @@ -1809,7 +1606,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbw_color": (255, 255, 255, 255), @@ -1825,13 +1621,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (255, 255, 255)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (0.0, 0.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (255, 255, 255, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 76, 141, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( @@ -1846,7 +1640,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbww_color": (128, 0, 0, 64, 32), @@ -1862,12 +1655,10 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (128, 33, 26)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (4.118, 79.688)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbw_color": (128, 9, 0, 33)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (128, 0, 0, 64, 32)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 3845, "color_temp": 260} await hass.services.async_call( @@ -1882,7 +1673,6 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: entity4.entity_id, entity5.entity_id, entity6.entity_id, - entity7.entity_id, ], "brightness_pct": 50, "rgbww_color": (255, 255, 255, 255, 255), @@ -1898,13 +1688,11 @@ async def test_light_service_call_color_conversion(hass: HomeAssistant) -> None: _, data = entity3.last_call("turn_on") assert data == {"brightness": 128, "rgb_color": (255, 217, 185)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 128, "hs_color": (27.429, 27.451)} - _, data = entity5.last_call("turn_on") # The midpoint the white channels is warm, compensated by decreasing green + blue assert data == {"brightness": 128, "rgbw_color": (96, 44, 0, 255)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (255, 255, 255, 255, 255)} - _, data = entity7.last_call("turn_on") + _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "color_temp_kelvin": 3451, "color_temp": 289} @@ -1917,7 +1705,6 @@ async def test_light_service_call_color_conversion_named_tuple( MockLight("Test_rgb", STATE_ON), MockLight("Test_xy", STATE_ON), MockLight("Test_all", STATE_ON), - MockLight("Test_legacy", STATE_ON), MockLight("Test_rgbw", STATE_ON), MockLight("Test_rgbww", STATE_ON), ] @@ -1940,16 +1727,10 @@ async def test_light_service_call_color_conversion_named_tuple( } entity4 = entities[4] - entity4.supported_features = light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity4.supported_color_modes = None - entity4.color_mode = None + entity4.supported_color_modes = {light.ColorMode.RGBW} entity5 = entities[5] - entity5.supported_color_modes = {light.ColorMode.RGBW} - - entity6 = entities[6] - entity6.supported_color_modes = {light.ColorMode.RGBWW} + entity5.supported_color_modes = {light.ColorMode.RGBWW} assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1965,7 +1746,6 @@ async def test_light_service_call_color_conversion_named_tuple( entity3.entity_id, entity4.entity_id, entity5.entity_id, - entity6.entity_id, ], "brightness_pct": 25, "rgb_color": color_util.RGBColor(128, 0, 0), @@ -1981,10 +1761,8 @@ async def test_light_service_call_color_conversion_named_tuple( _, data = entity3.last_call("turn_on") assert data == {"brightness": 64, "rgb_color": (128, 0, 0)} _, data = entity4.last_call("turn_on") - assert data == {"brightness": 64, "hs_color": (0.0, 100.0)} - _, data = entity5.last_call("turn_on") assert data == {"brightness": 64, "rgbw_color": (128, 0, 0, 0)} - _, data = entity6.last_call("turn_on") + _, data = entity5.last_call("turn_on") assert data == {"brightness": 64, "rgbww_color": (128, 0, 0, 0, 0)} @@ -2065,7 +1843,7 @@ async def test_light_service_call_color_temp_conversion(hass: HomeAssistant) -> assert entity1.min_mireds == 153 assert entity1.max_mireds == 500 assert entity1.min_color_temp_kelvin == 2000 - assert entity1.max_color_temp_kelvin == 6500 + assert entity1.max_color_temp_kelvin == 6535 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -2078,7 +1856,7 @@ async def test_light_service_call_color_temp_conversion(hass: HomeAssistant) -> assert state.attributes["min_mireds"] == 153 assert state.attributes["max_mireds"] == 500 assert state.attributes["min_color_temp_kelvin"] == 2000 - assert state.attributes["max_color_temp_kelvin"] == 6500 + assert state.attributes["max_color_temp_kelvin"] == 6535 state = hass.states.get(entity1.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBWW] @@ -2353,13 +2131,6 @@ async def test_light_state_color_conversion(hass: HomeAssistant) -> None: entity2.rgb_color = "Invalid" # Should be ignored entity2.xy_color = (0.1, 0.8) - entity3 = entities[3] - entity3.hs_color = (240, 100) - entity3.supported_features = light.SUPPORT_COLOR - # Set color modes to none to trigger backwards compatibility in LightEntity - entity3.supported_color_modes = None - entity3.color_mode = None - assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -2381,12 +2152,6 @@ async def test_light_state_color_conversion(hass: HomeAssistant) -> None: assert state.attributes["rgb_color"] == (0, 255, 22) assert state.attributes["xy_color"] == (0.1, 0.8) - state = hass.states.get(entity3.entity_id) - assert state.attributes["color_mode"] == light.ColorMode.HS - assert state.attributes["hs_color"] == (240, 100) - assert state.attributes["rgb_color"] == (0, 0, 255) - assert state.attributes["xy_color"] == (0.136, 0.04) - async def test_services_filter_parameters( hass: HomeAssistant, @@ -2621,27 +2386,6 @@ def test_filter_supported_color_modes() -> None: assert light.filter_supported_color_modes(supported) == {light.ColorMode.BRIGHTNESS} -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockLightEntityEntity(light.LightEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockLightEntityEntity() - assert entity.supported_features_compat is light.LightEntityFeature(1) - assert "MockLightEntityEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "LightEntityFeature" in caplog.text - assert "and color modes" in caplog.text - caplog.clear() - assert entity.supported_features_compat is light.LightEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text - - @pytest.mark.parametrize( ("color_mode", "supported_color_modes", "warning_expected"), [ @@ -2802,3 +2546,195 @@ def test_report_invalid_color_modes( entity._async_calculate_state() expected_warning = "sets invalid supported color modes" assert (expected_warning in caplog.text) is warning_expected + + +@pytest.mark.parametrize( + ("attributes", "expected_warnings", "expected_values"), + [ + ( + { + "_attr_color_temp_kelvin": 4000, + "_attr_min_color_temp_kelvin": 3000, + "_attr_max_color_temp_kelvin": 5000, + }, + {"current": False, "warmest": False, "coldest": False}, + # Just highlighting that the attributes match the + # converted kelvin values, not the mired properties + (3000, 4000, 5000, 200, 250, 333, 153, None, 500), + ), + ( + {"_attr_color_temp": 350, "_attr_min_mireds": 300, "_attr_max_mireds": 400}, + {"current": True, "warmest": True, "coldest": True}, + (2500, 2857, 3333, 300, 350, 400, 300, 350, 400), + ), + ( + {}, + {"current": False, "warmest": True, "coldest": True}, + (2000, None, 6535, 153, None, 500, 153, None, 500), + ), + ], + ids=["with_kelvin", "with_mired_values", "with_mired_defaults"], +) +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +def test_missing_kelvin_property_warnings( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + attributes: dict[str, int | None], + expected_warnings: dict[str, bool], + expected_values: tuple[int, int | None, int], +) -> None: + """Test missing kelvin properties.""" + + class MockLightEntityEntity(light.LightEntity): + _attr_color_mode = light.ColorMode.COLOR_TEMP + _attr_is_on = True + _attr_supported_features = light.LightEntityFeature.EFFECT + _attr_supported_color_modes = {light.ColorMode.COLOR_TEMP} + platform = MockEntityPlatform(hass, platform_name="test") + + entity = MockLightEntityEntity() + for k, v in attributes.items(): + setattr(entity, k, v) + + state = entity._async_calculate_state() + for warning, expected in expected_warnings.items(): + assert ( + f"is using mireds for {warning} light color temperature" in caplog.text + ) is expected, f"Expected {expected} for '{warning}'" + + assert state.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN] == expected_values[0] + assert state.attributes[light.ATTR_COLOR_TEMP_KELVIN] == expected_values[1] + assert state.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN] == expected_values[2] + assert state.attributes[light.ATTR_MIN_MIREDS] == expected_values[3] + assert state.attributes[light.ATTR_COLOR_TEMP] == expected_values[4] + assert state.attributes[light.ATTR_MAX_MIREDS] == expected_values[5] + assert entity.min_mireds == expected_values[6] + assert entity.color_temp == expected_values[7] + assert entity.max_mireds == expected_values[8] + + +@pytest.mark.parametrize( + "module", + [light], +) +def test_all(module: ModuleType) -> None: + """Test module.__all__ is correctly set.""" + help_test_all(module) + + +@pytest.mark.parametrize( + ("constant_name", "constant_value", "constant_replacement"), + [ + ("SUPPORT_BRIGHTNESS", 1, "supported_color_modes"), + ("SUPPORT_COLOR_TEMP", 2, "supported_color_modes"), + ("SUPPORT_COLOR", 16, "supported_color_modes"), + ("ATTR_COLOR_TEMP", "color_temp", "kelvin equivalent (ATTR_COLOR_TEMP_KELVIN)"), + ("ATTR_KELVIN", "kelvin", "ATTR_COLOR_TEMP_KELVIN"), + ( + "ATTR_MIN_MIREDS", + "min_mireds", + "kelvin equivalent (ATTR_MAX_COLOR_TEMP_KELVIN)", + ), + ( + "ATTR_MAX_MIREDS", + "max_mireds", + "kelvin equivalent (ATTR_MIN_COLOR_TEMP_KELVIN)", + ), + ], +) +def test_deprecated_light_constants( + caplog: pytest.LogCaptureFixture, + constant_name: str, + constant_value: int | str, + constant_replacement: str, +) -> None: + """Test deprecated light constants.""" + import_and_test_deprecated_constant( + caplog, light, constant_name, constant_replacement, constant_value, "2026.1" + ) + + +@pytest.mark.parametrize( + "entity_feature", + list(light.LightEntityFeature), +) +def test_deprecated_support_light_constants_enums( + caplog: pytest.LogCaptureFixture, + entity_feature: light.LightEntityFeature, +) -> None: + """Test deprecated support light constants.""" + import_and_test_deprecated_constant_enum( + caplog, light, entity_feature, "SUPPORT_", "2026.1" + ) + + +@pytest.mark.parametrize( + "entity_feature", + list(light.ColorMode), +) +def test_deprecated_color_mode_constants_enums( + caplog: pytest.LogCaptureFixture, + entity_feature: light.LightEntityFeature, +) -> None: + """Test deprecated support light constants.""" + import_and_test_deprecated_constant_enum( + caplog, light, entity_feature, "COLOR_MODE_", "2026.1" + ) + + +async def test_deprecated_turn_on_arguments( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test color temp conversion in service calls.""" + entity = MockLight("Test_ct", STATE_ON, {light.ColorMode.COLOR_TEMP}) + setup_test_component_platform(hass, light.DOMAIN, [entity]) + + assert await async_setup_component( + hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}} + ) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "color_temp": 200, + }, + blocking=True, + ) + assert "Got `color_temp` argument in `turn_on` service" in caplog.text + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "kelvin": 5000, + }, + blocking=True, + ) + assert "Got `kelvin` argument in `turn_on` service" in caplog.text + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "color_temp_kelvin": 5000, + }, + blocking=True, + ) + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + assert "argument in `turn_on` service" not in caplog.text diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index f3f87ff6074..d53ece61170 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,17 +9,17 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( + _DEPRECATED_ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -66,8 +66,8 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert len(states) >= 1 for entity_states in states.values(): for state in entity_states: - assert ATTR_MIN_MIREDS not in state.attributes - assert ATTR_MAX_MIREDS not in state.attributes + assert _DEPRECATED_ATTR_MIN_MIREDS.value not in state.attributes + assert _DEPRECATED_ATTR_MAX_MIREDS.value not in state.attributes assert ATTR_SUPPORTED_COLOR_MODES not in state.attributes assert ATTR_EFFECT_LIST not in state.attributes assert ATTR_FRIENDLY_NAME in state.attributes @@ -75,7 +75,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_BRIGHTNESS not in state.attributes assert ATTR_COLOR_MODE not in state.attributes - assert ATTR_COLOR_TEMP not in state.attributes + assert _DEPRECATED_ATTR_COLOR_TEMP.value not in state.attributes assert ATTR_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_EFFECT not in state.attributes assert ATTR_HS_COLOR not in state.attributes diff --git a/tests/components/light/test_reproduce_state.py b/tests/components/light/test_reproduce_state.py index aa698129915..987e97c6eb2 100644 --- a/tests/components/light/test_reproduce_state.py +++ b/tests/components/light/test_reproduce_state.py @@ -10,7 +10,7 @@ from tests.common import async_mock_service VALID_BRIGHTNESS = {"brightness": 180} VALID_EFFECT = {"effect": "random"} -VALID_COLOR_TEMP = {"color_temp": 240} +VALID_COLOR_TEMP_KELVIN = {"color_temp_kelvin": 4200} VALID_HS_COLOR = {"hs_color": (345, 75)} VALID_RGB_COLOR = {"rgb_color": (255, 63, 111)} VALID_RGBW_COLOR = {"rgbw_color": (255, 63, 111, 10)} @@ -19,7 +19,7 @@ VALID_XY_COLOR = {"xy_color": (0.59, 0.274)} NONE_BRIGHTNESS = {"brightness": None} NONE_EFFECT = {"effect": None} -NONE_COLOR_TEMP = {"color_temp": None} +NONE_COLOR_TEMP_KELVIN = {"color_temp_kelvin": None} NONE_HS_COLOR = {"hs_color": None} NONE_RGB_COLOR = {"rgb_color": None} NONE_RGBW_COLOR = {"rgbw_color": None} @@ -34,7 +34,7 @@ async def test_reproducing_states( hass.states.async_set("light.entity_off", "off", {}) hass.states.async_set("light.entity_bright", "on", VALID_BRIGHTNESS) hass.states.async_set("light.entity_effect", "on", VALID_EFFECT) - hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP) + hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP_KELVIN) hass.states.async_set("light.entity_hs", "on", VALID_HS_COLOR) hass.states.async_set("light.entity_rgb", "on", VALID_RGB_COLOR) hass.states.async_set("light.entity_xy", "on", VALID_XY_COLOR) @@ -49,7 +49,7 @@ async def test_reproducing_states( State("light.entity_off", "off"), State("light.entity_bright", "on", VALID_BRIGHTNESS), State("light.entity_effect", "on", VALID_EFFECT), - State("light.entity_temp", "on", VALID_COLOR_TEMP), + State("light.entity_temp", "on", VALID_COLOR_TEMP_KELVIN), State("light.entity_hs", "on", VALID_HS_COLOR), State("light.entity_rgb", "on", VALID_RGB_COLOR), State("light.entity_xy", "on", VALID_XY_COLOR), @@ -73,7 +73,7 @@ async def test_reproducing_states( State("light.entity_xy", "off"), State("light.entity_off", "on", VALID_BRIGHTNESS), State("light.entity_bright", "on", VALID_EFFECT), - State("light.entity_effect", "on", VALID_COLOR_TEMP), + State("light.entity_effect", "on", VALID_COLOR_TEMP_KELVIN), State("light.entity_temp", "on", VALID_HS_COLOR), State("light.entity_hs", "on", VALID_RGB_COLOR), State("light.entity_rgb", "on", VALID_XY_COLOR), @@ -92,7 +92,7 @@ async def test_reproducing_states( expected_bright["entity_id"] = "light.entity_bright" expected_calls.append(expected_bright) - expected_effect = dict(VALID_COLOR_TEMP) + expected_effect = dict(VALID_COLOR_TEMP_KELVIN) expected_effect["entity_id"] = "light.entity_effect" expected_calls.append(expected_effect) @@ -146,7 +146,7 @@ async def test_filter_color_modes( """Test filtering of parameters according to color mode.""" hass.states.async_set("light.entity", "off", {}) all_colors = { - **VALID_COLOR_TEMP, + **VALID_COLOR_TEMP_KELVIN, **VALID_HS_COLOR, **VALID_RGB_COLOR, **VALID_RGBW_COLOR, @@ -162,7 +162,7 @@ async def test_filter_color_modes( ) expected_map = { - light.ColorMode.COLOR_TEMP: {**VALID_BRIGHTNESS, **VALID_COLOR_TEMP}, + light.ColorMode.COLOR_TEMP: {**VALID_BRIGHTNESS, **VALID_COLOR_TEMP_KELVIN}, light.ColorMode.BRIGHTNESS: VALID_BRIGHTNESS, light.ColorMode.HS: {**VALID_BRIGHTNESS, **VALID_HS_COLOR}, light.ColorMode.ONOFF: {**VALID_BRIGHTNESS}, @@ -193,12 +193,76 @@ async def test_filter_color_modes( assert len(turn_on_calls) == 1 +async def test_filter_color_modes_missing_attributes( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test warning on missing attribute when filtering for color mode.""" + color_mode = light.ColorMode.COLOR_TEMP + hass.states.async_set("light.entity", "off", {}) + expected_log = ( + "Color mode color_temp specified " + "but attribute color_temp_kelvin missing for: light.entity" + ) + expected_fallback_log = "using color_temp (mireds) as fallback" + + turn_on_calls = async_mock_service(hass, "light", "turn_on") + + all_colors = { + **VALID_COLOR_TEMP_KELVIN, + **VALID_HS_COLOR, + **VALID_RGB_COLOR, + **VALID_RGBW_COLOR, + **VALID_RGBWW_COLOR, + **VALID_XY_COLOR, + **VALID_BRIGHTNESS, + } + + # Test missing `color_temp_kelvin` attribute + stored_attributes = {**all_colors} + stored_attributes.pop("color_temp_kelvin") + caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**stored_attributes, "color_mode": color_mode})], + ) + assert len(turn_on_calls) == 0 + assert expected_log in caplog.text + assert expected_fallback_log not in caplog.text + + # Test with deprecated `color_temp` attribute + stored_attributes["color_temp"] = 250 + expected = {"brightness": 180, "color_temp_kelvin": 4000} + caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**stored_attributes, "color_mode": color_mode})], + ) + + assert len(turn_on_calls) == 1 + assert expected_log in caplog.text + assert expected_fallback_log in caplog.text + + # Test with correct `color_temp_kelvin` attribute + expected = {"brightness": 180, "color_temp_kelvin": 4200} + caplog.clear() + turn_on_calls.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**all_colors, "color_mode": color_mode})], + ) + assert len(turn_on_calls) == 1 + assert turn_on_calls[0].domain == "light" + assert dict(turn_on_calls[0].data) == {"entity_id": "light.entity", **expected} + assert expected_log not in caplog.text + assert expected_fallback_log not in caplog.text + + @pytest.mark.parametrize( "saved_state", [ NONE_BRIGHTNESS, NONE_EFFECT, - NONE_COLOR_TEMP, + NONE_COLOR_TEMP_KELVIN, NONE_HS_COLOR, NONE_RGB_COLOR, NONE_RGBW_COLOR, diff --git a/tests/components/light/test_significant_change.py b/tests/components/light/test_significant_change.py index 87a60b58325..cf03f37228e 100644 --- a/tests/components/light/test_significant_change.py +++ b/tests/components/light/test_significant_change.py @@ -2,7 +2,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ) @@ -26,10 +26,10 @@ async def test_significant_change() -> None: # Color temp assert not async_check_significant_change( - None, "on", {ATTR_COLOR_TEMP: 60}, "on", {ATTR_COLOR_TEMP: 64} + None, "on", {ATTR_COLOR_TEMP_KELVIN: 2000}, "on", {ATTR_COLOR_TEMP_KELVIN: 2049} ) assert async_check_significant_change( - None, "on", {ATTR_COLOR_TEMP: 60}, "on", {ATTR_COLOR_TEMP: 65} + None, "on", {ATTR_COLOR_TEMP_KELVIN: 2000}, "on", {ATTR_COLOR_TEMP_KELVIN: 2050} ) # Effect diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr index 2543ca42156..db82f41eb73 100644 --- a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -63,6 +63,8 @@ 'site_id': 'test-site-id', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'linear_garage_door', 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', 'minor_version': 1, @@ -71,6 +73,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'test-site-name', 'unique_id': None, 'version': 1, diff --git a/tests/components/linear_garage_door/test_config_flow.py b/tests/components/linear_garage_door/test_config_flow.py index 4599bd24aef..64bdc589194 100644 --- a/tests/components/linear_garage_door/test_config_flow.py +++ b/tests/components/linear_garage_door/test_config_flow.py @@ -6,7 +6,7 @@ from linear_garage_door.errors import InvalidLoginError import pytest from homeassistant.components.linear_garage_door.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -61,16 +61,7 @@ async def test_reauth( ) -> None: """Test reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/linear_garage_door/test_cover.py b/tests/components/linear_garage_door/test_cover.py index f4593ff4d60..be5ae8f35f7 100644 --- a/tests/components/linear_garage_door/test_cover.py +++ b/tests/components/linear_garage_door/test_cover.py @@ -10,16 +10,10 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + CoverState, ) from homeassistant.components.linear_garage_door import DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -109,8 +103,8 @@ async def test_update_cover_state( await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert hass.states.get("cover.test_garage_1").state == STATE_OPEN - assert hass.states.get("cover.test_garage_2").state == STATE_CLOSED + assert hass.states.get("cover.test_garage_1").state == CoverState.OPEN + assert hass.states.get("cover.test_garage_2").state == CoverState.CLOSED device_states = load_json_object_fixture("get_device_state_1.json", DOMAIN) mock_linear.get_device_state.side_effect = lambda device_id: device_states[ @@ -120,5 +114,5 @@ async def test_update_cover_state( freezer.tick(timedelta(seconds=60)) async_fire_time_changed(hass) - assert hass.states.get("cover.test_garage_1").state == STATE_CLOSING - assert hass.states.get("cover.test_garage_2").state == STATE_OPENING + assert hass.states.get("cover.test_garage_1").state == CoverState.CLOSING + assert hass.states.get("cover.test_garage_2").state == CoverState.OPENING diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py index 5962f7fdaba..f825826f196 100644 --- a/tests/components/linkplay/__init__.py +++ b/tests/components/linkplay/__init__.py @@ -1 +1,16 @@ """Tests for the LinkPlay integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py index b3d65422e08..81ae993f6c3 100644 --- a/tests/components/linkplay/conftest.py +++ b/tests/components/linkplay/conftest.py @@ -1,11 +1,22 @@ """Test configuration and mocks for LinkPlay component.""" -from collections.abc import Generator +from collections.abc import Generator, Iterator +from contextlib import contextmanager +from typing import Any +from unittest import mock from unittest.mock import AsyncMock, patch +from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge, LinkPlayDevice import pytest +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_fixture +from tests.conftest import AiohttpClientMocker + HOST = "10.0.0.150" HOST_REENTRY = "10.0.0.66" UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" @@ -14,20 +25,24 @@ NAME = "Smart Zone 1_54B9" @pytest.fixture def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: - """Mock for linkplay_factory_bridge.""" + """Mock for linkplay_factory_httpapi_bridge.""" with ( patch( - "homeassistant.components.linkplay.config_flow.linkplay_factory_bridge" - ) as factory, + "homeassistant.components.linkplay.config_flow.async_get_client_session", + return_value=AsyncMock(spec=ClientSession), + ), + patch( + "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", + ) as conf_factory, ): bridge = AsyncMock(spec=LinkPlayBridge) bridge.endpoint = HOST bridge.device = AsyncMock(spec=LinkPlayDevice) bridge.device.uuid = UUID bridge.device.name = NAME - factory.return_value = bridge - yield factory + conf_factory.return_value = bridge + yield conf_factory @pytest.fixture @@ -38,3 +53,55 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=NAME, + data={CONF_HOST: HOST}, + unique_id=UUID, + ) + + +@pytest.fixture +def mock_player_ex( + mock_player_ex: AsyncMock, +) -> AsyncMock: + """Mock a update_status of the LinkPlayPlayer.""" + mock_player_ex.return_value = load_fixture("getPlayerEx.json", DOMAIN) + return mock_player_ex + + +@pytest.fixture +def mock_status_ex( + mock_status_ex: AsyncMock, +) -> AsyncMock: + """Mock a update_status of the LinkPlayDevice.""" + mock_status_ex.return_value = load_fixture("getStatusEx.json", DOMAIN) + return mock_status_ex + + +@contextmanager +def mock_lp_aiohttp_client() -> Iterator[AiohttpClientMocker]: + """Context manager to mock aiohttp client.""" + mocker = AiohttpClientMocker() + + def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: + session = mocker.create_session(hass.loop) + + async def close_session(event): + """Close session.""" + await session.close() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, close_session) + + return session + + with mock.patch( + "homeassistant.components.linkplay.async_get_client_session", + side_effect=create_session, + ): + yield mocker diff --git a/tests/components/linkplay/fixtures/getPlayerEx.json b/tests/components/linkplay/fixtures/getPlayerEx.json new file mode 100644 index 00000000000..79d09f942df --- /dev/null +++ b/tests/components/linkplay/fixtures/getPlayerEx.json @@ -0,0 +1,19 @@ +{ + "type": "0", + "ch": "0", + "mode": "0", + "loop": "0", + "eq": "0", + "status": "stop", + "curpos": "0", + "offset_pts": "0", + "totlen": "0", + "Title": "", + "Artist": "", + "Album": "", + "alarmflag": "0", + "plicount": "0", + "plicurr": "0", + "vol": "80", + "mute": "0" +} diff --git a/tests/components/linkplay/fixtures/getStatusEx.json b/tests/components/linkplay/fixtures/getStatusEx.json new file mode 100644 index 00000000000..17eda4aeee8 --- /dev/null +++ b/tests/components/linkplay/fixtures/getStatusEx.json @@ -0,0 +1,81 @@ +{ + "uuid": "FF31F09E5001FBDE05462DBFFF31F09E", + "DeviceName": "Smart Zone 1_54B9", + "GroupName": "Smart Zone 1_54B9", + "ssid": "Smart Zone 1_54B9", + "language": "en_us", + "firmware": "4.6.415145", + "hardware": "A31", + "build": "release", + "project": "SMART_ZONE4_AMP", + "priv_prj": "SMART_ZONE4_AMP", + "project_build_name": "a31rakoit", + "Release": "20220427", + "temp_uuid": "97296CE38DE8CC3D", + "hideSSID": "1", + "SSIDStrategy": "2", + "branch": "A31_stable_4.6", + "group": "0", + "wmrm_version": "4.2", + "internet": "1", + "MAC": "00:22:6C:21:7F:1D", + "STA_MAC": "00:00:00:00:00:00", + "CountryCode": "CN", + "CountryRegion": "1", + "netstat": "0", + "essid": "", + "apcli0": "", + "eth2": "192.168.168.197", + "ra0": "10.10.10.254", + "eth_dhcp": "1", + "VersionUpdate": "0", + "NewVer": "0", + "set_dns_enable": "1", + "mcu_ver": "37", + "mcu_ver_new": "0", + "dsp_ver": "0", + "dsp_ver_new": "0", + "date": "2024:10:29", + "time": "17:13:22", + "tz": "1.0000", + "dst_enable": "1", + "region": "unknown", + "prompt_status": "1", + "iot_ver": "1.0.0", + "upnp_version": "1005", + "cap1": "0x305200", + "capability": "0x28e90b80", + "languages": "0x6", + "streams_all": "0x7bff7ffe", + "streams": "0x7b9831fe", + "external": "0x0", + "plm_support": "0x40152", + "preset_key": "10", + "spotify_active": "0", + "lbc_support": "0", + "privacy_mode": "0", + "WifiChannel": "11", + "RSSI": "0", + "BSSID": "", + "battery": "0", + "battery_percent": "0", + "securemode": "1", + "auth": "WPAPSKWPA2PSK", + "encry": "AES", + "upnp_uuid": "uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E", + "uart_pass_port": "8899", + "communication_port": "8819", + "web_firmware_update_hide": "0", + "ignore_talkstart": "0", + "web_login_result": "-1", + "silenceOTATime": "", + "ignore_silenceOTATime": "1", + "new_tunein_preset_and_alarm": "1", + "iheartradio_new": "1", + "new_iheart_podcast": "1", + "tidal_version": "2.0", + "service_version": "1.0", + "ETH_MAC": "00:22:6C:21:7F:20", + "security": "https/2.0", + "security_version": "2.0" +} diff --git a/tests/components/linkplay/snapshots/test_diagnostics.ambr b/tests/components/linkplay/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..d8c52a25649 --- /dev/null +++ b/tests/components/linkplay/snapshots/test_diagnostics.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device_info': dict({ + 'device': dict({ + 'properties': dict({ + 'BSSID': '', + 'CountryCode': 'CN', + 'CountryRegion': '1', + 'DeviceName': 'Smart Zone 1_54B9', + 'ETH_MAC': '00:22:6C:21:7F:20', + 'GroupName': 'Smart Zone 1_54B9', + 'MAC': '00:22:6C:21:7F:1D', + 'NewVer': '0', + 'RSSI': '0', + 'Release': '20220427', + 'SSIDStrategy': '2', + 'STA_MAC': '00:00:00:00:00:00', + 'VersionUpdate': '0', + 'WifiChannel': '11', + 'apcli0': '', + 'auth': 'WPAPSKWPA2PSK', + 'battery': '0', + 'battery_percent': '0', + 'branch': 'A31_stable_4.6', + 'build': 'release', + 'cap1': '0x305200', + 'capability': '0x28e90b80', + 'communication_port': '8819', + 'date': '2024:10:29', + 'dsp_ver': '0', + 'dsp_ver_new': '0', + 'dst_enable': '1', + 'encry': 'AES', + 'essid': '', + 'eth2': '192.168.168.197', + 'eth_dhcp': '1', + 'external': '0x0', + 'firmware': '4.6.415145', + 'group': '0', + 'hardware': 'A31', + 'hideSSID': '1', + 'ignore_silenceOTATime': '1', + 'ignore_talkstart': '0', + 'iheartradio_new': '1', + 'internet': '1', + 'iot_ver': '1.0.0', + 'language': 'en_us', + 'languages': '0x6', + 'lbc_support': '0', + 'mcu_ver': '37', + 'mcu_ver_new': '0', + 'netstat': '0', + 'new_iheart_podcast': '1', + 'new_tunein_preset_and_alarm': '1', + 'plm_support': '0x40152', + 'preset_key': '10', + 'priv_prj': 'SMART_ZONE4_AMP', + 'privacy_mode': '0', + 'project': 'SMART_ZONE4_AMP', + 'project_build_name': 'a31rakoit', + 'prompt_status': '1', + 'ra0': '10.10.10.254', + 'region': 'unknown', + 'securemode': '1', + 'security': 'https/2.0', + 'security_version': '2.0', + 'service_version': '1.0', + 'set_dns_enable': '1', + 'silenceOTATime': '', + 'spotify_active': '0', + 'ssid': 'Smart Zone 1_54B9', + 'streams': '0x7b9831fe', + 'streams_all': '0x7bff7ffe', + 'temp_uuid': '97296CE38DE8CC3D', + 'tidal_version': '2.0', + 'time': '17:13:22', + 'tz': '1.0000', + 'uart_pass_port': '8899', + 'upnp_uuid': 'uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E', + 'upnp_version': '1005', + 'uuid': 'FF31F09E5001FBDE05462DBFFF31F09E', + 'web_firmware_update_hide': '0', + 'web_login_result': '-1', + 'wmrm_version': '4.2', + }), + }), + 'endpoint': dict({ + 'endpoint': 'https://10.0.0.150', + }), + 'multiroom': None, + 'player': dict({ + 'properties': dict({ + 'Album': '', + 'Artist': '', + 'Title': '', + 'alarmflag': '0', + 'ch': '0', + 'curpos': '0', + 'eq': '0', + 'loop': '0', + 'mode': '0', + 'mute': '0', + 'offset_pts': '0', + 'plicount': '0', + 'plicurr': '0', + 'status': 'stop', + 'totlen': '0', + 'type': '0', + 'vol': '80', + }), + }), + }), + }) +# --- diff --git a/tests/components/linkplay/test_config_flow.py b/tests/components/linkplay/test_config_flow.py index 641f09893c2..3fd1fbea95e 100644 --- a/tests/components/linkplay/test_config_flow.py +++ b/tests/components/linkplay/test_config_flow.py @@ -3,6 +3,9 @@ from ipaddress import ip_address from unittest.mock import AsyncMock +from linkplay.exceptions import LinkPlayRequestException +import pytest + from homeassistant.components.linkplay.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF @@ -47,10 +50,9 @@ ZEROCONF_DISCOVERY_RE_ENTRY = ZeroconfServiceInfo( ) +@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") async def test_user_flow( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test user setup config flow.""" result = await hass.config_entries.flow.async_init( @@ -74,10 +76,9 @@ async def test_user_flow( assert result["result"].unique_id == UUID +@pytest.mark.usefixtures("mock_linkplay_factory_bridge") async def test_user_flow_re_entry( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test user setup config flow when an entry with the same unique id already exists.""" @@ -105,10 +106,9 @@ async def test_user_flow_re_entry( assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") async def test_zeroconf_flow( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test Zeroconf flow.""" result = await hass.config_entries.flow.async_init( @@ -133,10 +133,9 @@ async def test_zeroconf_flow( assert result["result"].unique_id == UUID +@pytest.mark.usefixtures("mock_linkplay_factory_bridge") async def test_zeroconf_flow_re_entry( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test Zeroconf flow when an entry with the same unique id already exists.""" @@ -160,16 +159,35 @@ async def test_zeroconf_flow_re_entry( assert result["reason"] == "already_configured" -async def test_flow_errors( +@pytest.mark.usefixtures("mock_setup_entry") +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, +) -> None: + """Test flow when the device discovered through Zeroconf cannot be reached.""" + + # Temporarily make the mock_linkplay_factory_bridge throw an exception + mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_user_flow_errors( hass: HomeAssistant, mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test flow when the device cannot be reached.""" - # Temporarily store bridge in a separate variable and set factory to return None - bridge = mock_linkplay_factory_bridge.return_value - mock_linkplay_factory_bridge.return_value = None + # Temporarily make the mock_linkplay_factory_bridge throw an exception + mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) result = await hass.config_entries.flow.async_init( DOMAIN, @@ -188,8 +206,8 @@ async def test_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} - # Make linkplay_factory_bridge return a mock bridge again - mock_linkplay_factory_bridge.return_value = bridge + # Make mock_linkplay_factory_bridge_exception no longer throw an exception + mock_linkplay_factory_bridge.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py new file mode 100644 index 00000000000..de60b7ecb3a --- /dev/null +++ b/tests/components/linkplay/test_diagnostics.py @@ -0,0 +1,55 @@ +"""Tests for the LinkPlay diagnostics.""" + +from unittest.mock import patch + +from linkplay.bridge import LinkPlayMultiroom +from linkplay.consts import API_ENDPOINT +from linkplay.endpoint import LinkPlayApiEndpoint +from syrupy import SnapshotAssertion + +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import HOST, mock_lp_aiohttp_client + +from tests.common import MockConfigEntry, load_fixture +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + with ( + mock_lp_aiohttp_client() as mock_session, + patch.object(LinkPlayMultiroom, "update_status", return_value=None), + ): + endpoints = [ + LinkPlayApiEndpoint( + protocol="https", port=443, endpoint=HOST, session=None + ), + LinkPlayApiEndpoint(protocol="http", port=80, endpoint=HOST, session=None), + ] + for endpoint in endpoints: + mock_session.get( + API_ENDPOINT.format(str(endpoint), "getPlayerStatusEx"), + text=load_fixture("getPlayerEx.json", DOMAIN), + ) + + mock_session.get( + API_ENDPOINT.format(str(endpoint), "getStatusEx"), + text=load_fixture("getStatusEx.json", DOMAIN), + ) + + await setup_integration(hass, mock_config_entry) + + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/litterrobot/test_config_flow.py b/tests/components/litterrobot/test_config_flow.py index 5ffb78c7782..9420d3cb8a8 100644 --- a/tests/components/litterrobot/test_config_flow.py +++ b/tests/components/litterrobot/test_config_flow.py @@ -7,7 +7,7 @@ from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginExcepti from homeassistant import config_entries from homeassistant.components import litterrobot -from homeassistant.const import CONF_PASSWORD, CONF_SOURCE +from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,15 +124,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -164,15 +156,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/litterrobot/test_init.py b/tests/components/litterrobot/test_init.py index 21b16097603..1c8e0742b26 100644 --- a/tests/components/litterrobot/test_init.py +++ b/tests/components/litterrobot/test_init.py @@ -9,7 +9,7 @@ from homeassistant.components import litterrobot from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_START, - STATE_DOCKED, + VacuumActivity, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID @@ -30,7 +30,7 @@ async def test_unload_entry(hass: HomeAssistant, mock_account: MagicMock) -> Non vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_DOCKED + assert vacuum.state == VacuumActivity.DOCKED await hass.services.async_call( VACUUM_DOMAIN, diff --git a/tests/components/litterrobot/test_vacuum.py b/tests/components/litterrobot/test_vacuum.py index 735ee6653aa..f18098ccf1d 100644 --- a/tests/components/litterrobot/test_vacuum.py +++ b/tests/components/litterrobot/test_vacuum.py @@ -15,9 +15,7 @@ from homeassistant.components.vacuum import ( DOMAIN as PLATFORM_DOMAIN, SERVICE_START, SERVICE_STOP, - STATE_DOCKED, - STATE_ERROR, - STATE_PAUSED, + VacuumActivity, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -53,7 +51,7 @@ async def test_vacuum( vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_DOCKED + assert vacuum.state == VacuumActivity.DOCKED assert vacuum.attributes["is_sleeping"] is False ent_reg_entry = entity_registry.async_get(VACUUM_ENTITY_ID) @@ -95,18 +93,21 @@ async def test_vacuum_with_error( vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_ERROR + assert vacuum.state == VacuumActivity.ERROR @pytest.mark.parametrize( ("robot_data", "expected_state"), [ - ({"displayCode": "DC_CAT_DETECT"}, STATE_DOCKED), - ({"isDFIFull": True}, STATE_ERROR), - ({"robotCycleState": "CYCLE_STATE_CAT_DETECT"}, STATE_PAUSED), + ({"displayCode": "DC_CAT_DETECT"}, VacuumActivity.DOCKED), + ({"isDFIFull": True}, VacuumActivity.ERROR), + ( + {"robotCycleState": "CYCLE_STATE_CAT_DETECT"}, + VacuumActivity.PAUSED, + ), ], ) -async def test_vacuum_states( +async def test_activities( hass: HomeAssistant, mock_account_with_litterrobot_4: MagicMock, robot_data: dict[str, str | bool], @@ -150,7 +151,7 @@ async def test_commands( vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum - assert vacuum.state == STATE_DOCKED + assert vacuum.state == VacuumActivity.DOCKED extra = extra or {} data = {ATTR_ENTITY_ID: VACUUM_ENTITY_ID, **extra.get("data", {})} diff --git a/tests/components/livisi/test_config_flow.py b/tests/components/livisi/test_config_flow.py index 9f492b9a45a..cffae711d28 100644 --- a/tests/components/livisi/test_config_flow.py +++ b/tests/components/livisi/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from aiolivisi import errors as livisi_errors +from livisi import errors as livisi_errors import pytest from homeassistant.components.livisi.const import DOMAIN diff --git a/tests/components/local_calendar/test_config_flow.py b/tests/components/local_calendar/test_config_flow.py index c76fd9e283d..cf37176a10f 100644 --- a/tests/components/local_calendar/test_config_flow.py +++ b/tests/components/local_calendar/test_config_flow.py @@ -1,10 +1,20 @@ """Test the Local Calendar config flow.""" -from unittest.mock import patch +from collections.abc import Generator, Iterator +from contextlib import contextmanager +from pathlib import Path +from unittest.mock import MagicMock, patch +from uuid import uuid4 + +import pytest from homeassistant import config_entries from homeassistant.components.local_calendar.const import ( + ATTR_CREATE_EMPTY, + ATTR_IMPORT_ICS_FILE, CONF_CALENDAR_NAME, + CONF_ICS_FILE, + CONF_IMPORT, CONF_STORAGE_KEY, DOMAIN, ) @@ -14,6 +24,46 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +@pytest.fixture +def mock_ics_content(): + """Mock ics file content.""" + return b"""BEGIN:VCALENDAR + VERSION:2.0 + PRODID:-//hacksw/handcal//NONSGML v1.0//EN + END:VCALENDAR + """ + + +@pytest.fixture +def mock_process_uploaded_file( + tmp_path: Path, mock_ics_content: str +) -> Generator[MagicMock]: + """Mock upload ics file.""" + file_id_ics = str(uuid4()) + + @contextmanager + def _mock_process_uploaded_file( + hass: HomeAssistant, uploaded_file_id: str + ) -> Iterator[Path | None]: + with open(tmp_path / uploaded_file_id, "wb") as icsfile: + icsfile.write(mock_ics_content) + yield tmp_path / uploaded_file_id + + with ( + patch( + "homeassistant.components.local_calendar.config_flow.process_uploaded_file", + side_effect=_mock_process_uploaded_file, + ) as mock_upload, + patch( + "shutil.move", + ), + ): + mock_upload.file_id = { + CONF_ICS_FILE: file_id_ics, + } + yield mock_upload + + async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -38,11 +88,44 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["title"] == "My Calendar" assert result2["data"] == { CONF_CALENDAR_NAME: "My Calendar", + CONF_IMPORT: ATTR_CREATE_EMPTY, CONF_STORAGE_KEY: "my_calendar", } assert len(mock_setup_entry.mock_calls) == 1 +async def test_form_import_ics( + hass: HomeAssistant, + mock_process_uploaded_file: MagicMock, +) -> None: + """Test we get the import form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_CALENDAR_NAME: "My Calendar", CONF_IMPORT: ATTR_IMPORT_ICS_FILE}, + ) + assert result2["type"] is FlowResultType.FORM + + with patch( + "homeassistant.components.local_calendar.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + file_id = mock_process_uploaded_file.file_id + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ICS_FILE: file_id[CONF_ICS_FILE]}, + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_duplicate_name( hass: HomeAssistant, setup_integration: None, config_entry: MockConfigEntry ) -> None: @@ -65,3 +148,30 @@ async def test_duplicate_name( assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" + + +@pytest.mark.parametrize("mock_ics_content", [b"invalid-ics-content"]) +async def test_invalid_ics( + hass: HomeAssistant, + mock_process_uploaded_file: MagicMock, +) -> None: + """Test invalid ics content raises error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_CALENDAR_NAME: "My Calendar", CONF_IMPORT: ATTR_IMPORT_ICS_FILE}, + ) + assert result2["type"] is FlowResultType.FORM + + file_id = mock_process_uploaded_file.file_id + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ICS_FILE: file_id[CONF_ICS_FILE]}, + ) + assert result3["type"] is FlowResultType.FORM + assert result3["errors"] == {CONF_ICS_FILE: "invalid_ics_file"} diff --git a/tests/components/local_file/conftest.py b/tests/components/local_file/conftest.py new file mode 100644 index 00000000000..4ec06369c94 --- /dev/null +++ b/tests/components/local_file/conftest.py @@ -0,0 +1,63 @@ +"""Fixtures for the Local file integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from homeassistant.components.local_file.const import DEFAULT_NAME, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically patch setup.""" + with patch( + "homeassistant.components.local_file.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return {CONF_NAME: DEFAULT_NAME, CONF_FILE_PATH: "mock.file"} + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Local file integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/local_file/test_camera.py b/tests/components/local_file/test_camera.py index 4455d47469c..ddfdf4249bd 100644 --- a/tests/components/local_file/test_camera.py +++ b/tests/components/local_file/test_camera.py @@ -1,62 +1,231 @@ """The tests for local file camera component.""" from http import HTTPStatus -from unittest import mock +from typing import Any +from unittest.mock import Mock, mock_open, patch import pytest -from homeassistant.components.local_file.const import DOMAIN, SERVICE_UPDATE_FILE_PATH -from homeassistant.core import HomeAssistant +from homeassistant.components.local_file.const import ( + DEFAULT_NAME, + DOMAIN, + SERVICE_UPDATE_FILE_PATH, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import ATTR_ENTITY_ID, CONF_FILE_PATH +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component +from homeassistant.util import slugify +from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator async def test_loading_file( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + loaded_entry: MockConfigEntry, ) -> None: """Test that it loads image from disk.""" - with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=True)), - mock.patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - mock.Mock(return_value=(None, None)), - ), - ): - await async_setup_component( - hass, - "camera", - { - "camera": { - "name": "config_test", - "platform": "local_file", - "file_path": "mock.file", - } - }, - ) - await hass.async_block_till_done() client = await hass_client() - m_open = mock.mock_open(read_data=b"hello") - with mock.patch( - "homeassistant.components.local_file.camera.open", m_open, create=True - ): - resp = await client.get("/api/camera_proxy/camera.config_test") + m_open = mock_open(read_data=b"hello") + with patch("homeassistant.components.local_file.camera.open", m_open, create=True): + resp = await client.get("/api/camera_proxy/camera.local_file") assert resp.status == HTTPStatus.OK body = await resp.text() assert body == "hello" -async def test_file_not_readable( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture +async def test_file_not_readable_after_setup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + loaded_entry: MockConfigEntry, ) -> None: """Test a warning is shown setup when file is not readable.""" + + client = await hass_client() + + with patch( + "homeassistant.components.local_file.camera.open", side_effect=FileNotFoundError + ): + resp = await client.get("/api/camera_proxy/camera.local_file") + + assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR + assert "Could not read camera Local File image from file: mock.file" in caplog.text + + +@pytest.mark.parametrize( + ("config", "url", "content_type"), + [ + ( + { + "name": "test_jpg", + "file_path": "/path/to/image.jpg", + }, + "/api/camera_proxy/camera.test_jpg", + "image/jpeg", + ), + ( + { + "name": "test_png", + "file_path": "/path/to/image.png", + }, + "/api/camera_proxy/camera.test_png", + "image/png", + ), + ( + { + "name": "test_svg", + "file_path": "/path/to/image.svg", + }, + "/api/camera_proxy/camera.test_svg", + "image/svg+xml", + ), + ( + { + "name": "test_no_ext", + "file_path": "/path/to/image", + }, + "/api/camera_proxy/camera.test_no_ext", + "image/jpeg", + ), + ], +) +async def test_camera_content_type( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config: dict[str, Any], + url: str, + content_type: str, +) -> None: + """Test local_file camera content_type.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=False)), + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + client = await hass_client() + + image = "hello" + m_open = mock_open(read_data=image.encode()) + with patch("homeassistant.components.local_file.camera.open", m_open, create=True): + resp_1 = await client.get(url) + + assert resp_1.status == HTTPStatus.OK + assert resp_1.content_type == content_type + body = await resp_1.text() + assert body == image + + +@pytest.mark.parametrize( + "get_config", + [ + { + "name": DEFAULT_NAME, + "file_path": "mock/path.jpg", + } + ], +) +async def test_update_file_path( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test update_file_path service.""" + # Setup platform + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options={ + "name": "local_file_camera_2", + "file_path": "mock/path_2.jpg", + }, + entry_id="2", + ) + + config_entry.add_to_hass(hass) + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # Fetch state and check motion detection attribute + state = hass.states.get("camera.local_file") + assert state.attributes.get("friendly_name") == "Local File" + assert state.attributes.get("file_path") == "mock/path.jpg" + + service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_FILE_PATH, + service_data, + blocking=True, + ) + + state = hass.states.get("camera.local_file") + assert state.attributes.get("file_path") == "new/path.jpg" + + # Check that local_file_camera_2 file_path is still as configured + state = hass.states.get("camera.local_file_camera_2") + assert state.attributes.get("file_path") == "mock/path_2.jpg" + + # Assert it fails if file is not readable + service_data = { + ATTR_ENTITY_ID: "camera.local_file", + CONF_FILE_PATH: "new/path2.jpg", + } + with pytest.raises( + ServiceValidationError, match="Path new/path2.jpg is not accessible" + ): + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_FILE_PATH, + service_data, + blocking=True, + ) + + +async def test_import_from_yaml_success( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test import.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), ): await async_setup_component( hass, @@ -71,110 +240,48 @@ async def test_file_not_readable( ) await hass.async_block_till_done() - assert "Could not read" in caplog.text - assert "config_test" in caplog.text - assert "mock.file" in caplog.text + assert hass.config_entries.async_has_entries(DOMAIN) + state = hass.states.get("camera.config_test") + assert state.attributes.get("file_path") == "mock.file" - -async def test_camera_content_type( - hass: HomeAssistant, hass_client: ClientSessionGenerator -) -> None: - """Test local_file camera content_type.""" - cam_config_jpg = { - "name": "test_jpg", - "platform": "local_file", - "file_path": "/path/to/image.jpg", - } - cam_config_png = { - "name": "test_png", - "platform": "local_file", - "file_path": "/path/to/image.png", - } - cam_config_svg = { - "name": "test_svg", - "platform": "local_file", - "file_path": "/path/to/image.svg", - } - cam_config_noext = { - "name": "test_no_ext", - "platform": "local_file", - "file_path": "/path/to/image", - } - - await async_setup_component( - hass, - "camera", - {"camera": [cam_config_jpg, cam_config_png, cam_config_svg, cam_config_noext]}, + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" ) - await hass.async_block_till_done() - - client = await hass_client() - - image = "hello" - m_open = mock.mock_open(read_data=image.encode()) - with mock.patch( - "homeassistant.components.local_file.camera.open", m_open, create=True - ): - resp_1 = await client.get("/api/camera_proxy/camera.test_jpg") - resp_2 = await client.get("/api/camera_proxy/camera.test_png") - resp_3 = await client.get("/api/camera_proxy/camera.test_svg") - resp_4 = await client.get("/api/camera_proxy/camera.test_no_ext") - - assert resp_1.status == HTTPStatus.OK - assert resp_1.content_type == "image/jpeg" - body = await resp_1.text() - assert body == image - - assert resp_2.status == HTTPStatus.OK - assert resp_2.content_type == "image/png" - body = await resp_2.text() - assert body == image - - assert resp_3.status == HTTPStatus.OK - assert resp_3.content_type == "image/svg+xml" - body = await resp_3.text() - assert body == image - - # default mime type - assert resp_4.status == HTTPStatus.OK - assert resp_4.content_type == "image/jpeg" - body = await resp_4.text() - assert body == image + assert issue + assert issue.translation_key == "deprecated_yaml" -async def test_update_file_path(hass: HomeAssistant) -> None: - """Test update_file_path service.""" - # Setup platform +async def test_import_from_yaml_fails( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test import fails due to not accessible file.""" + with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=True)), - mock.patch( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=False)), + patch( "homeassistant.components.local_file.camera.mimetypes.guess_type", - mock.Mock(return_value=(None, None)), + Mock(return_value=(None, None)), ), ): - camera_1 = {"platform": "local_file", "file_path": "mock/path.jpg"} - camera_2 = { - "platform": "local_file", - "name": "local_file_camera_2", - "file_path": "mock/path_2.jpg", - } - await async_setup_component(hass, "camera", {"camera": [camera_1, camera_2]}) + await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "local_file", + "file_path": "mock.file", + } + }, + ) await hass.async_block_till_done() - # Fetch state and check motion detection attribute - state = hass.states.get("camera.local_file") - assert state.attributes.get("friendly_name") == "Local File" - assert state.attributes.get("file_path") == "mock/path.jpg" + assert not hass.config_entries.async_has_entries(DOMAIN) + assert not hass.states.get("camera.config_test") - service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} - - await hass.services.async_call(DOMAIN, SERVICE_UPDATE_FILE_PATH, service_data) - await hass.async_block_till_done() - - state = hass.states.get("camera.local_file") - assert state.attributes.get("file_path") == "new/path.jpg" - - # Check that local_file_camera_2 file_path is still as configured - state = hass.states.get("camera.local_file_camera_2") - assert state.attributes.get("file_path") == "mock/path_2.jpg" + issue = issue_registry.async_get_issue( + DOMAIN, f"no_access_path_{slugify("mock.file")}" + ) + assert issue + assert issue.translation_key == "no_access_path" diff --git a/tests/components/local_file/test_config_flow.py b/tests/components/local_file/test_config_flow.py new file mode 100644 index 00000000000..dda9d606107 --- /dev/null +++ b/tests/components/local_file/test_config_flow.py @@ -0,0 +1,235 @@ +"""Test the Scrape config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.local_file.const import DEFAULT_NAME, DOMAIN +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form for sensor.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_FILE_PATH: "mock.new.file"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_NAME: DEFAULT_NAME, CONF_FILE_PATH: "mock.new.file"} + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("camera.local_file") + assert state is not None + + +async def test_validation_options( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test validation.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=False)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "not_readable_path"} + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.new.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.new.file", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_import(hass: HomeAssistant) -> None: + """Test import.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + "name": DEFAULT_NAME, + "file_path": "mock/path.jpg", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock/path.jpg", + } + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_import_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test import abort existing entry.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/local_file/test_init.py b/tests/components/local_file/test_init.py new file mode 100644 index 00000000000..2b8b93e8100 --- /dev/null +++ b/tests/components/local_file/test_init.py @@ -0,0 +1,47 @@ +"""Test Statistics component setup process.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch + +from homeassistant.components.local_file.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_file_not_readable_during_startup( + hass: HomeAssistant, + get_config: dict[str, str], +) -> None: + """Test a warning is shown setup when file is not readable.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + config_entry.add_to_hass(hass) + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=False)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_ERROR diff --git a/tests/components/locative/test_init.py b/tests/components/locative/test_init.py index 8fd239ee398..c41db68e3d6 100644 --- a/tests/components/locative/test_init.py +++ b/tests/components/locative/test_init.py @@ -11,8 +11,8 @@ from homeassistant.components import locative from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.locative import DOMAIN, TRACKER_UPDATE -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.dispatcher import DATA_DISPATCHER from homeassistant.setup import async_setup_component @@ -134,9 +134,7 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state assert state_name == "home" data["id"] = "HOME" @@ -146,9 +144,7 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state assert state_name == "not_home" data["id"] = "hOmE" @@ -158,9 +154,7 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state assert state_name == "home" data["trigger"] = "exit" @@ -169,9 +163,7 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state assert state_name == "not_home" data["id"] = "work" @@ -181,9 +173,7 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state assert state_name == "work" @@ -206,7 +196,7 @@ async def test_exit_after_enter( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") assert state.state == "home" data["id"] = "Work" @@ -216,7 +206,7 @@ async def test_exit_after_enter( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") assert state.state == "work" data["id"] = "Home" @@ -227,7 +217,7 @@ async def test_exit_after_enter( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") assert state.state == "work" @@ -250,7 +240,7 @@ async def test_exit_first( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") assert state.state == "not_home" @@ -273,9 +263,7 @@ async def test_two_devices( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"]) - ) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['device']}") assert state.state == "not_home" # Enter Home @@ -286,13 +274,9 @@ async def test_two_devices( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_2["device"]) - ) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_2['device']}") assert state.state == "home" - state = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"]) - ) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['device']}") assert state.state == "not_home" @@ -318,7 +302,7 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") assert state.state == "not_home" assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1 diff --git a/tests/components/lock/test_device_condition.py b/tests/components/lock/test_device_condition.py index 74910e1909f..1818d4933b8 100644 --- a/tests/components/lock/test_device_condition.py +++ b/tests/components/lock/test_device_condition.py @@ -5,17 +5,8 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.lock import DOMAIN -from homeassistant.const import ( - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, - EntityCategory, -) +from homeassistant.components.lock import DOMAIN, LockState +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -142,7 +133,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_LOCKED) + hass.states.async_set(entry.entity_id, LockState.LOCKED) assert await async_setup_component( hass, @@ -284,38 +275,38 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_locked - event - test_event1" - hass.states.async_set(entry.entity_id, STATE_UNLOCKED) + hass.states.async_set(entry.entity_id, LockState.UNLOCKED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(service_calls) == 2 assert service_calls[1].data["some"] == "is_unlocked - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_UNLOCKING) + hass.states.async_set(entry.entity_id, LockState.UNLOCKING) hass.bus.async_fire("test_event3") await hass.async_block_till_done() assert len(service_calls) == 3 assert service_calls[2].data["some"] == "is_unlocking - event - test_event3" - hass.states.async_set(entry.entity_id, STATE_LOCKING) + hass.states.async_set(entry.entity_id, LockState.LOCKING) hass.bus.async_fire("test_event4") await hass.async_block_till_done() assert len(service_calls) == 4 assert service_calls[3].data["some"] == "is_locking - event - test_event4" - hass.states.async_set(entry.entity_id, STATE_JAMMED) + hass.states.async_set(entry.entity_id, LockState.JAMMED) hass.bus.async_fire("test_event5") await hass.async_block_till_done() assert len(service_calls) == 5 assert service_calls[4].data["some"] == "is_jammed - event - test_event5" - hass.states.async_set(entry.entity_id, STATE_OPENING) + hass.states.async_set(entry.entity_id, LockState.OPENING) hass.bus.async_fire("test_event6") await hass.async_block_till_done() assert len(service_calls) == 6 assert service_calls[5].data["some"] == "is_opening - event - test_event6" - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, LockState.OPEN) hass.bus.async_fire("test_event7") await hass.async_block_till_done() assert len(service_calls) == 7 @@ -339,7 +330,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_LOCKED) + hass.states.async_set(entry.entity_id, LockState.LOCKED) assert await async_setup_component( hass, diff --git a/tests/components/lock/test_device_trigger.py b/tests/components/lock/test_device_trigger.py index f64334fa29b..3ecdf2a9bca 100644 --- a/tests/components/lock/test_device_trigger.py +++ b/tests/components/lock/test_device_trigger.py @@ -7,17 +7,8 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.lock import DOMAIN, LockEntityFeature -from homeassistant.const import ( - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, - EntityCategory, -) +from homeassistant.components.lock import DOMAIN, LockEntityFeature, LockState +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -218,7 +209,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_UNLOCKED) + hass.states.async_set(entry.entity_id, LockState.UNLOCKED) assert await async_setup_component( hass, @@ -287,7 +278,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is turning on. - hass.states.async_set(entry.entity_id, STATE_LOCKED) + hass.states.async_set(entry.entity_id, LockState.LOCKED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -296,7 +287,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is turning off. - hass.states.async_set(entry.entity_id, STATE_UNLOCKED) + hass.states.async_set(entry.entity_id, LockState.UNLOCKED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -305,7 +296,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is opens. - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, LockState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 3 assert ( @@ -331,7 +322,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_UNLOCKED) + hass.states.async_set(entry.entity_id, LockState.UNLOCKED) assert await async_setup_component( hass, @@ -362,7 +353,7 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is turning on. - hass.states.async_set(entry.entity_id, STATE_LOCKED) + hass.states.async_set(entry.entity_id, LockState.LOCKED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -388,7 +379,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_UNLOCKED) + hass.states.async_set(entry.entity_id, LockState.UNLOCKED) assert await async_setup_component( hass, @@ -511,7 +502,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_LOCKED) + hass.states.async_set(entry.entity_id, LockState.LOCKED) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -523,7 +514,7 @@ async def test_if_fires_on_state_change_with_for( == f"turn_off device - {entry.entity_id} - unlocked - locked - 0:00:05" ) - hass.states.async_set(entry.entity_id, STATE_UNLOCKING) + hass.states.async_set(entry.entity_id, LockState.UNLOCKING) await hass.async_block_till_done() assert len(service_calls) == 1 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=16)) @@ -535,7 +526,7 @@ async def test_if_fires_on_state_change_with_for( == f"turn_on device - {entry.entity_id} - locked - unlocking - 0:00:05" ) - hass.states.async_set(entry.entity_id, STATE_JAMMED) + hass.states.async_set(entry.entity_id, LockState.JAMMED) await hass.async_block_till_done() assert len(service_calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=21)) @@ -547,7 +538,7 @@ async def test_if_fires_on_state_change_with_for( == f"turn_off device - {entry.entity_id} - unlocking - jammed - 0:00:05" ) - hass.states.async_set(entry.entity_id, STATE_LOCKING) + hass.states.async_set(entry.entity_id, LockState.LOCKING) await hass.async_block_till_done() assert len(service_calls) == 3 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) @@ -559,7 +550,7 @@ async def test_if_fires_on_state_change_with_for( == f"turn_on device - {entry.entity_id} - jammed - locking - 0:00:05" ) - hass.states.async_set(entry.entity_id, STATE_OPENING) + hass.states.async_set(entry.entity_id, LockState.OPENING) await hass.async_block_till_done() assert len(service_calls) == 4 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) diff --git a/tests/components/lock/test_init.py b/tests/components/lock/test_init.py index f0547fbbeae..68af8c7d482 100644 --- a/tests/components/lock/test_init.py +++ b/tests/components/lock/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +from enum import Enum import re from typing import Any @@ -15,14 +16,9 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_UNLOCKED, - STATE_UNLOCKING, LockEntityFeature, + LockState, ) -from homeassistant.const import STATE_OPEN, STATE_OPENING from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er @@ -67,37 +63,37 @@ async def test_lock_states(hass: HomeAssistant, mock_lock_entity: MockLock) -> N mock_lock_entity._attr_is_locking = True assert mock_lock_entity.is_locking - assert mock_lock_entity.state == STATE_LOCKING + assert mock_lock_entity.state == LockState.LOCKING mock_lock_entity._attr_is_locked = True mock_lock_entity._attr_is_locking = False assert mock_lock_entity.is_locked - assert mock_lock_entity.state == STATE_LOCKED + assert mock_lock_entity.state == LockState.LOCKED mock_lock_entity._attr_is_unlocking = True assert mock_lock_entity.is_unlocking - assert mock_lock_entity.state == STATE_UNLOCKING + assert mock_lock_entity.state == LockState.UNLOCKING mock_lock_entity._attr_is_locked = False mock_lock_entity._attr_is_unlocking = False assert not mock_lock_entity.is_locked - assert mock_lock_entity.state == STATE_UNLOCKED + assert mock_lock_entity.state == LockState.UNLOCKED mock_lock_entity._attr_is_jammed = True assert mock_lock_entity.is_jammed - assert mock_lock_entity.state == STATE_JAMMED + assert mock_lock_entity.state == LockState.JAMMED assert not mock_lock_entity.is_locked mock_lock_entity._attr_is_jammed = False mock_lock_entity._attr_is_opening = True assert mock_lock_entity.is_opening - assert mock_lock_entity.state == STATE_OPENING + assert mock_lock_entity.state == LockState.OPENING assert mock_lock_entity.is_opening mock_lock_entity._attr_is_opening = False mock_lock_entity._attr_is_open = True assert not mock_lock_entity.is_opening - assert mock_lock_entity.state == STATE_OPEN + assert mock_lock_entity.state == LockState.OPEN assert not mock_lock_entity.is_opening assert mock_lock_entity.is_open @@ -393,27 +389,31 @@ def test_all() -> None: help_test_all(lock) -@pytest.mark.parametrize(("enum"), list(LockEntityFeature)) +def _create_tuples( + enum: type[Enum], constant_prefix: str, remove_in_version: str +) -> list[tuple[Enum, str]]: + return [ + (enum_field, constant_prefix, remove_in_version) + for enum_field in enum + if enum_field + not in [ + lock.LockState.OPEN, + lock.LockState.OPENING, + ] + ] + + +@pytest.mark.parametrize( + ("enum", "constant_prefix", "remove_in_version"), + _create_tuples(lock.LockState, "STATE_", "2025.10"), +) def test_deprecated_constants( caplog: pytest.LogCaptureFixture, - enum: LockEntityFeature, + enum: Enum, + constant_prefix: str, + remove_in_version: str, ) -> None: """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, lock, enum, "SUPPORT_", "2025.1") - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockLockEntity(lock.LockEntity): - _attr_supported_features = 1 - - entity = MockLockEntity() - assert entity.supported_features is lock.LockEntityFeature(1) - assert "MockLockEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "LockEntityFeature.OPEN" in caplog.text - caplog.clear() - assert entity.supported_features is lock.LockEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text + import_and_test_deprecated_constant_enum( + caplog, lock, enum, constant_prefix, remove_in_version + ) diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index afa8b7fcde5..abb118467f4 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -35,7 +35,7 @@ class MockRow: self.event_data = json.dumps(data, cls=JSONEncoder) self.data = data self.time_fired = dt_util.utcnow() - self.time_fired_ts = dt_util.utc_to_timestamp(self.time_fired) + self.time_fired_ts = self.time_fired.timestamp() self.context_parent_id_bin = ( ulid_to_bytes_or_none(context.parent_id) if context else None ) diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 606c398c31f..841c8ed1247 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -11,6 +11,8 @@ import pytest import voluptuous as vol from homeassistant.components import logbook, recorder + +# pylint: disable-next=hass-component-root-import from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState @@ -328,7 +330,7 @@ def create_state_changed_event_from_old_new( row_id=1, event_type=PSEUDO_EVENT_STATE_CHANGED, event_data="{}", - time_fired_ts=dt_util.utc_to_timestamp(event_time_fired), + time_fired_ts=event_time_fired.timestamp(), context_id_bin=None, context_user_id_bin=None, context_parent_id_bin=None, diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index e5649564f94..50139d0f4f7 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -1181,6 +1181,10 @@ async def test_subscribe_unsubscribe_logbook_stream( await async_wait_recording_done(hass) websocket_client = await hass_ws_client() init_listeners = hass.bus.async_listeners() + init_listeners = { + **init_listeners, + EVENT_HOMEASSISTANT_START: init_listeners[EVENT_HOMEASSISTANT_START] - 1, + } await websocket_client.send_json( {"id": 7, "type": "logbook/event_stream", "start_time": now.isoformat()} ) @@ -2981,8 +2985,8 @@ async def test_live_stream_with_changed_state_change( ] ) - hass.states.async_set("binary_sensor.is_light", "ignored") - hass.states.async_set("binary_sensor.is_light", "init") + hass.states.async_set("binary_sensor.is_light", "unavailable") + hass.states.async_set("binary_sensor.is_light", "unknown") await async_wait_recording_done(hass) @callback @@ -3019,7 +3023,7 @@ async def test_live_stream_with_changed_state_change( # Make sure we get rows back in order assert recieved_rows == [ - {"entity_id": "binary_sensor.is_light", "state": "init", "when": ANY}, + {"entity_id": "binary_sensor.is_light", "state": "unknown", "when": ANY}, {"entity_id": "binary_sensor.is_light", "state": "on", "when": ANY}, {"entity_id": "binary_sensor.is_light", "state": "off", "when": ANY}, ] diff --git a/tests/components/loqed/test_lock.py b/tests/components/loqed/test_lock.py index 5fd00b66c43..89a7888571a 100644 --- a/tests/components/loqed/test_lock.py +++ b/tests/components/loqed/test_lock.py @@ -2,6 +2,7 @@ from loqedAPI import loqed +from homeassistant.components.lock import LockState from homeassistant.components.loqed import LoqedDataCoordinator from homeassistant.components.loqed.const import DOMAIN from homeassistant.const import ( @@ -9,8 +10,6 @@ from homeassistant.const import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, - STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant @@ -27,7 +26,7 @@ async def test_lock_entity( state = hass.states.get(entity_id) assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED async def test_lock_responds_to_bolt_state_updates( @@ -43,7 +42,7 @@ async def test_lock_responds_to_bolt_state_updates( state = hass.states.get(entity_id) assert state - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED async def test_lock_transition_to_unlocked( diff --git a/tests/components/lovelace/test_cast.py b/tests/components/lovelace/test_cast.py index c54b31d9297..dc57975701d 100644 --- a/tests/components/lovelace/test_cast.py +++ b/tests/components/lovelace/test_cast.py @@ -8,8 +8,8 @@ import pytest from homeassistant.components.lovelace import cast as lovelace_cast from homeassistant.components.media_player import MediaClass -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component diff --git a/tests/components/lovelace/test_system_health.py b/tests/components/lovelace/test_system_health.py index 4fe248fa950..251153fe419 100644 --- a/tests/components/lovelace/test_system_health.py +++ b/tests/components/lovelace/test_system_health.py @@ -72,6 +72,6 @@ async def test_system_health_info_yaml_not_found(hass: HomeAssistant) -> None: assert info == { "dashboards": 1, "mode": "yaml", - "error": "{} not found".format(hass.config.path("ui-lovelace.yaml")), + "error": f"{hass.config.path('ui-lovelace.yaml')} not found", "resources": 0, } diff --git a/tests/components/lyric/test_config_flow.py b/tests/components/lyric/test_config_flow.py index 1e0ae04f741..e1916924e9f 100644 --- a/tests/components/lyric/test_config_flow.py +++ b/tests/components/lyric/test_config_flow.py @@ -126,9 +126,7 @@ async def test_reauthentication_flow( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=old_entry.data - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/madvr/conftest.py b/tests/components/madvr/conftest.py index 187786c6964..3136e04b06b 100644 --- a/tests/components/madvr/conftest.py +++ b/tests/components/madvr/conftest.py @@ -57,6 +57,7 @@ def mock_config_entry() -> MockConfigEntry: data=MOCK_CONFIG, unique_id=MOCK_MAC, title=DEFAULT_NAME, + entry_id="3bd2acb0e4f0476d40865546d0d91132", ) diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..92d0578dba8 --- /dev/null +++ b/tests/components/madvr/snapshots/test_diagnostics.ambr @@ -0,0 +1,30 @@ +# serializer version: 1 +# name: test_entry_diagnostics[positive_payload0] + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '**REDACTED**', + 'port': 44077, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'madvr', + 'entry_id': '3bd2acb0e4f0476d40865546d0d91132', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'envy', + 'unique_id': '00:11:22:33:44:55', + 'version': 1, + }), + 'madvr_data': dict({ + 'is_on': True, + }), + }) +# --- diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py index 65eba05c802..7b31ec6c17c 100644 --- a/tests/components/madvr/test_config_flow.py +++ b/tests/components/madvr/test_config_flow.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -135,10 +135,7 @@ async def test_reconfigure_flow( ) -> None: """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -176,10 +173,7 @@ async def test_reconfigure_new_device( """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) # test reconfigure with a new device (should fail) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) # define new host new_host = "192.168.1.100" @@ -207,10 +201,7 @@ async def test_reconfigure_flow_errors( """Test error handling in reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" diff --git a/tests/components/madvr/test_diagnostics.py b/tests/components/madvr/test_diagnostics.py new file mode 100644 index 00000000000..453eaba8d94 --- /dev/null +++ b/tests/components/madvr/test_diagnostics.py @@ -0,0 +1,48 @@ +"""Test madVR diagnostics.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import get_update_callback + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.parametrize( + ("positive_payload"), + [ + {"is_on": True}, + ], +) +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_madvr_client: AsyncMock, + snapshot: SnapshotAssertion, + positive_payload: dict, +) -> None: + """Test config entry diagnostics.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + update_callback = get_update_callback(mock_madvr_client) + + # Add data to test storing diagnostic data + update_callback(positive_payload) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/mailbox/__init__.py b/tests/components/mailbox/__init__.py deleted file mode 100644 index 5e212354579..00000000000 --- a/tests/components/mailbox/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The tests for mailbox platforms.""" diff --git a/tests/components/mailbox/test_init.py b/tests/components/mailbox/test_init.py deleted file mode 100644 index 6fcf9176aae..00000000000 --- a/tests/components/mailbox/test_init.py +++ /dev/null @@ -1,225 +0,0 @@ -"""The tests for the mailbox component.""" - -from datetime import datetime -from hashlib import sha1 -from http import HTTPStatus -from typing import Any - -from aiohttp.test_utils import TestClient -import pytest - -from homeassistant.components import mailbox -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util - -from tests.common import MockModule, mock_integration, mock_platform -from tests.typing import ClientSessionGenerator - -MAILBOX_NAME = "TestMailbox" -MEDIA_DATA = b"3f67c4ea33b37d1710f" -MESSAGE_TEXT = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - - -def _create_message(idx: int) -> dict[str, Any]: - """Create a sample message.""" - msgtime = dt_util.as_timestamp(datetime(2010, 12, idx + 1, 13, 17, 00)) - msgtxt = f"Message {idx + 1}. {MESSAGE_TEXT}" - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - return { - "info": { - "origtime": int(msgtime), - "callerid": "John Doe <212-555-1212>", - "duration": "10", - }, - "text": msgtxt, - "sha": msgsha, - } - - -class TestMailbox(mailbox.Mailbox): - """Test Mailbox, with 10 sample messages.""" - - # This class doesn't contain any tests! Skip pytest test collection. - __test__ = False - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Test mailbox.""" - super().__init__(hass, name) - self._messages: dict[str, dict[str, Any]] = {} - for idx in range(10): - msg = _create_message(idx) - msgsha = msg["sha"] - self._messages[msgsha] = msg - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return mailbox.CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - if msgid not in self._messages: - raise mailbox.StreamError("Message not found") - - return MEDIA_DATA - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - return sorted( - self._messages.values(), - key=lambda item: item["info"]["origtime"], # type: ignore[no-any-return] - reverse=True, - ) - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - if msgid in self._messages: - del self._messages[msgid] - self.async_update() - return True - - -class MockMailbox: - """A mock mailbox platform.""" - - async def async_get_handler( - self, - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> mailbox.Mailbox: - """Set up the Test mailbox.""" - return TestMailbox(hass, MAILBOX_NAME) - - -@pytest.fixture -def mock_mailbox(hass: HomeAssistant) -> None: - """Mock mailbox.""" - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.mailbox", MockMailbox()) - - -@pytest.fixture -async def mock_http_client( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_mailbox: None -) -> TestClient: - """Start the Home Assistant HTTP component.""" - assert await async_setup_component( - hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} - ) - return await hass_client() - - -async def test_get_platforms_from_mailbox(mock_http_client: TestClient) -> None: - """Get platforms from mailbox.""" - url = "/api/mailbox/platforms" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 1 - assert result[0].get("name") == "TestMailbox" - - -async def test_get_messages_from_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - url = "/api/mailbox/messages/TestMailbox" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 10 - - -async def test_get_media_from_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - mp3sha = "7cad61312c7b66f619295be2da8c7ac73b4968f1" - msgtxt = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - - url = f"/api/mailbox/media/TestMailbox/{msgsha}" - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - data = await req.read() - assert sha1(data).hexdigest() == mp3sha - - -async def test_delete_from_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - msgtxt1 = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgtxt2 = "Message 3. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgsha1 = sha1(msgtxt1.encode("utf-8")).hexdigest() - msgsha2 = sha1(msgtxt2.encode("utf-8")).hexdigest() - - for msg in (msgsha1, msgsha2): - url = f"/api/mailbox/delete/TestMailbox/{msg}" - req = await mock_http_client.delete(url) - assert req.status == HTTPStatus.OK - - url = "/api/mailbox/messages/TestMailbox" - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 8 - - -async def test_get_messages_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - url = "/api/mailbox/messages/mailbox.invalid_mailbox" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_get_media_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/media/mailbox.invalid_mailbox/{msgsha}" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_get_media_from_invalid_msgid(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/media/TestMailbox/{msgsha}" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.INTERNAL_SERVER_ERROR - - -async def test_delete_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/delete/mailbox.invalid_mailbox/{msgsha}" - - req = await mock_http_client.delete(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_repair_issue_is_created( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, mock_mailbox: None -) -> None: - """Test repair issue is created.""" - assert await async_setup_component( - hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} - ) - await hass.async_block_till_done() - assert ( - mailbox.DOMAIN, - "deprecated_mailbox_test", - ) in issue_registry.issues diff --git a/tests/components/mailgun/test_init.py b/tests/components/mailgun/test_init.py index 2e60c56faa4..7dbde02b10f 100644 --- a/tests/components/mailgun/test_init.py +++ b/tests/components/mailgun/test_init.py @@ -8,9 +8,9 @@ import pytest from homeassistant import config_entries from homeassistant.components import mailgun, webhook -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_API_KEY, CONF_DOMAIN from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component diff --git a/tests/components/manual/test_alarm_control_panel.py b/tests/components/manual/test_alarm_control_panel.py index 7900dfd1c91..9fc92cd5458 100644 --- a/tests/components/manual/test_alarm_control_panel.py +++ b/tests/components/manual/test_alarm_control_panel.py @@ -7,7 +7,10 @@ from freezegun import freeze_time import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.demo import alarm_control_panel as demo from homeassistant.components.manual.alarm_control_panel import ( ATTR_NEXT_STATE, @@ -21,15 +24,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_VACATION, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import CoreState, HomeAssistant, State from homeassistant.exceptions import ServiceValidationError @@ -53,11 +47,14 @@ async def test_setup_demo_platform(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: @@ -79,7 +76,7 @@ async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -94,11 +91,14 @@ async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending_when_code_not_req( @@ -123,7 +123,7 @@ async def test_no_pending_when_code_not_req( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -138,11 +138,14 @@ async def test_no_pending_when_code_not_req( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_pending(hass: HomeAssistant, service, expected_state) -> None: @@ -164,7 +167,7 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -173,7 +176,7 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMING + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING state = hass.states.get(entity_id) assert state.attributes["next_state"] == expected_state @@ -203,11 +206,14 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) -> None: @@ -229,7 +235,7 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( @@ -242,17 +248,20 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_template_code(hass: HomeAssistant, service, expected_state) -> None: @@ -274,7 +283,7 @@ async def test_with_template_code(hass: HomeAssistant, service, expected_state) entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -290,11 +299,14 @@ async def test_with_template_code(hass: HomeAssistant, service, expected_state) @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_specific_pending( @@ -324,7 +336,7 @@ async def test_with_specific_pending( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMING + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -355,11 +367,11 @@ async def test_trigger_no_pending(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( @@ -370,8 +382,8 @@ async def test_trigger_no_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_delay(hass: HomeAssistant) -> None: @@ -394,17 +406,17 @@ async def test_trigger_with_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -415,8 +427,8 @@ async def test_trigger_with_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_zero_trigger_time(hass: HomeAssistant) -> None: @@ -438,11 +450,11 @@ async def test_trigger_zero_trigger_time(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_zero_trigger_time_with_pending(hass: HomeAssistant) -> None: @@ -464,11 +476,11 @@ async def test_trigger_zero_trigger_time_with_pending(hass: HomeAssistant) -> No entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_pending(hass: HomeAssistant) -> None: @@ -490,14 +502,14 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING state = hass.states.get(entity_id) - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -508,8 +520,8 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -520,7 +532,7 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: @@ -544,17 +556,17 @@ async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -565,8 +577,8 @@ async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: @@ -590,17 +602,17 @@ async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -611,8 +623,8 @@ async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: @@ -635,17 +647,17 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -656,8 +668,8 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -668,8 +680,8 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> None: @@ -693,17 +705,17 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -714,8 +726,8 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -726,8 +738,8 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: @@ -752,7 +764,7 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -763,8 +775,8 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -774,7 +786,7 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: @@ -796,13 +808,13 @@ async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -812,7 +824,7 @@ async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_zero_specific_trigger_time(hass: HomeAssistant) -> None: @@ -835,11 +847,11 @@ async def test_trigger_with_zero_specific_trigger_time(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( @@ -864,13 +876,13 @@ async def test_trigger_with_unused_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -880,7 +892,7 @@ async def test_trigger_with_unused_zero_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: @@ -902,13 +914,13 @@ async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -918,7 +930,7 @@ async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None: @@ -941,17 +953,17 @@ async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -961,7 +973,7 @@ async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_back_to_back_trigger_with_no_disarm_after_trigger( @@ -986,17 +998,17 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1006,13 +1018,13 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1022,7 +1034,7 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: @@ -1043,15 +1055,15 @@ async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1061,7 +1073,7 @@ async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> None: @@ -1083,7 +1095,7 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert ( hass.states.get(entity_id).attributes[alarm_control_panel.ATTR_CODE_FORMAT] == alarm_control_panel.CodeFormat.NUMBER @@ -1091,12 +1103,12 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1107,8 +1119,8 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_disarm_with_template_code(hass: HomeAssistant) -> None: @@ -1130,23 +1142,23 @@ async def test_disarm_with_template_code(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_home(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME await common.async_alarm_disarm(hass, "abc") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: @@ -1171,21 +1183,21 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMING - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.attributes["next_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMING + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["next_state"] == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMING - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.attributes["next_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMING + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["next_state"] == AlarmControlPanelState.ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): @@ -1193,14 +1205,14 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with freeze_time(future): @@ -1208,19 +1220,19 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED @pytest.mark.parametrize( "expected_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_DISARMED), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), + (AlarmControlPanelState.DISARMED), ], ) async def test_restore_state(hass: HomeAssistant, expected_state) -> None: @@ -1253,11 +1265,11 @@ async def test_restore_state(hass: HomeAssistant, expected_state) -> None: @pytest.mark.parametrize( "expected_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), ], ) async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None: @@ -1265,7 +1277,7 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None time = dt_util.utcnow() - timedelta(seconds=15) entity_id = "alarm_control_panel.test" attributes = { - "previous_state": STATE_ALARM_DISARMED, + "previous_state": AlarmControlPanelState.DISARMED, "next_state": expected_state, } mock_restore_cache( @@ -1292,9 +1304,9 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None state = hass.states.get(entity_id) assert state - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED assert state.attributes["next_state"] == expected_state - assert state.state == STATE_ALARM_ARMING + assert state.state == AlarmControlPanelState.ARMING future = time + timedelta(seconds=61) with freeze_time(future): @@ -1308,12 +1320,12 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None @pytest.mark.parametrize( "previous_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_DISARMED), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), + (AlarmControlPanelState.DISARMED), ], ) async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> None: @@ -1322,11 +1334,18 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non entity_id = "alarm_control_panel.test" attributes = { "previous_state": previous_state, - "next_state": STATE_ALARM_TRIGGERED, + "next_state": AlarmControlPanelState.TRIGGERED, } mock_restore_cache( hass, - (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), + ( + State( + entity_id, + AlarmControlPanelState.TRIGGERED, + attributes, + last_updated=time, + ), + ), ) hass.set_state(CoreState.starting) @@ -1351,8 +1370,8 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non state = hass.states.get(entity_id) assert state assert state.attributes["previous_state"] == previous_state - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED - assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == AlarmControlPanelState.PENDING future = time + timedelta(seconds=61) with freeze_time(future): @@ -1360,7 +1379,7 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED future = time + timedelta(seconds=121) with freeze_time(future): @@ -1374,12 +1393,12 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non @pytest.mark.parametrize( "previous_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_DISARMED), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), + (AlarmControlPanelState.DISARMED), ], ) async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> None: @@ -1391,7 +1410,14 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N } mock_restore_cache( hass, - (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), + ( + State( + entity_id, + AlarmControlPanelState.TRIGGERED, + attributes, + last_updated=time, + ), + ), ) hass.set_state(CoreState.starting) @@ -1417,7 +1443,7 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N assert state assert state.attributes[ATTR_PREVIOUS_STATE] == previous_state assert state.attributes[ATTR_NEXT_STATE] is None - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED future = time + timedelta(seconds=121) with freeze_time(future): @@ -1433,11 +1459,18 @@ async def test_restore_state_triggered_long_ago(hass: HomeAssistant) -> None: time = dt_util.utcnow() - timedelta(seconds=125) entity_id = "alarm_control_panel.test" attributes = { - "previous_state": STATE_ALARM_ARMED_AWAY, + "previous_state": AlarmControlPanelState.ARMED_AWAY, } mock_restore_cache( hass, - (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), + ( + State( + entity_id, + AlarmControlPanelState.TRIGGERED, + attributes, + last_updated=time, + ), + ), ) hass.set_state(CoreState.starting) @@ -1460,7 +1493,7 @@ async def test_restore_state_triggered_long_ago(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_default_arming_states(hass: HomeAssistant) -> None: diff --git a/tests/components/manual_mqtt/test_alarm_control_panel.py b/tests/components/manual_mqtt/test_alarm_control_panel.py index a1c913135a7..2b401cb10a0 100644 --- a/tests/components/manual_mqtt/test_alarm_control_panel.py +++ b/tests/components/manual_mqtt/test_alarm_control_panel.py @@ -7,6 +7,7 @@ from freezegun import freeze_time import pytest from homeassistant.components import alarm_control_panel +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -15,14 +16,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_VACATION, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -78,11 +71,14 @@ async def test_fail_setup_without_command_topic( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending( @@ -111,7 +107,7 @@ async def test_no_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -126,11 +122,14 @@ async def test_no_pending( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending_when_code_not_req( @@ -160,7 +159,7 @@ async def test_no_pending_when_code_not_req( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -175,11 +174,14 @@ async def test_no_pending_when_code_not_req( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_pending( @@ -208,7 +210,7 @@ async def test_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -217,7 +219,7 @@ async def test_with_pending( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == expected_state @@ -247,11 +249,14 @@ async def test_with_pending( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_invalid_code( @@ -280,7 +285,7 @@ async def test_with_invalid_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( @@ -290,17 +295,20 @@ async def test_with_invalid_code( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_template_code( @@ -329,7 +337,7 @@ async def test_with_template_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -345,11 +353,14 @@ async def test_with_template_code( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_specific_pending( @@ -384,7 +395,7 @@ async def test_with_specific_pending( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -419,12 +430,12 @@ async def test_trigger_no_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( @@ -434,7 +445,7 @@ async def test_trigger_no_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_delay( @@ -461,17 +472,17 @@ async def test_trigger_with_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -482,7 +493,7 @@ async def test_trigger_with_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_zero_trigger_time( @@ -508,11 +519,11 @@ async def test_trigger_zero_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_zero_trigger_time_with_pending( @@ -538,11 +549,11 @@ async def test_trigger_zero_trigger_time_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_pending( @@ -568,14 +579,14 @@ async def test_trigger_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING state = hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -585,7 +596,7 @@ async def test_trigger_with_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -595,7 +606,7 @@ async def test_trigger_with_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_disarm_after_trigger( @@ -621,11 +632,11 @@ async def test_trigger_with_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -635,7 +646,7 @@ async def test_trigger_with_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_zero_specific_trigger_time( @@ -662,11 +673,11 @@ async def test_trigger_with_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( @@ -693,11 +704,11 @@ async def test_trigger_with_unused_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -707,7 +718,7 @@ async def test_trigger_with_unused_zero_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_specific_trigger_time( @@ -733,11 +744,11 @@ async def test_trigger_with_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -747,7 +758,7 @@ async def test_trigger_with_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_back_to_back_trigger_with_no_disarm_after_trigger( @@ -773,15 +784,15 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -791,11 +802,11 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -805,7 +816,7 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_disarm_while_pending_trigger( @@ -830,15 +841,15 @@ async def test_disarm_while_pending_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -848,7 +859,7 @@ async def test_disarm_while_pending_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_disarm_during_trigger_with_invalid_code( @@ -874,7 +885,7 @@ async def test_disarm_during_trigger_with_invalid_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert ( hass.states.get(entity_id).attributes[alarm_control_panel.ATTR_CODE_FORMAT] == alarm_control_panel.CodeFormat.NUMBER @@ -882,12 +893,12 @@ async def test_disarm_during_trigger_with_invalid_code( await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING with pytest.raises(HomeAssistantError, match=r"Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -897,7 +908,7 @@ async def test_disarm_during_trigger_with_invalid_code( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_unused_specific_delay( @@ -925,17 +936,17 @@ async def test_trigger_with_unused_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -946,7 +957,7 @@ async def test_trigger_with_unused_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_delay( @@ -974,17 +985,17 @@ async def test_trigger_with_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -995,7 +1006,7 @@ async def test_trigger_with_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_delay( @@ -1023,17 +1034,17 @@ async def test_trigger_with_pending_and_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1044,8 +1055,8 @@ async def test_trigger_with_pending_and_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -1056,7 +1067,7 @@ async def test_trigger_with_pending_and_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_specific_delay( @@ -1085,17 +1096,17 @@ async def test_trigger_with_pending_and_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1106,8 +1117,8 @@ async def test_trigger_with_pending_and_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -1118,7 +1129,7 @@ async def test_trigger_with_pending_and_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_pending( @@ -1147,7 +1158,7 @@ async def test_trigger_with_specific_pending( await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -1157,7 +1168,7 @@ async def test_trigger_with_specific_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1167,7 +1178,7 @@ async def test_trigger_with_specific_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_no_disarm_after_trigger( @@ -1194,15 +1205,15 @@ async def test_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1212,7 +1223,7 @@ async def test_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_arm_away_after_disabled_disarmed( @@ -1241,21 +1252,21 @@ async def test_arm_away_after_disabled_disarmed( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["pre_pending_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["post_pending_state"] == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["pre_pending_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["post_pending_state"] == AlarmControlPanelState.ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): @@ -1263,14 +1274,18 @@ async def test_arm_away_after_disabled_disarmed( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["pre_pending_state"] == STATE_ALARM_ARMED_AWAY - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert ( + state.attributes["pre_pending_state"] == AlarmControlPanelState.ARMED_AWAY + ) + assert ( + state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + ) future += timedelta(seconds=1) with freeze_time(future): @@ -1278,7 +1293,7 @@ async def test_arm_away_after_disabled_disarmed( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_disarm_with_template_code( @@ -1304,33 +1319,33 @@ async def test_disarm_with_template_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_home(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME with pytest.raises(HomeAssistantError, match=r"Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME await common.async_alarm_disarm(hass, "abc") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize( ("config", "expected_state"), [ - ("payload_arm_away", STATE_ALARM_ARMED_AWAY), - ("payload_arm_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS), - ("payload_arm_home", STATE_ALARM_ARMED_HOME), - ("payload_arm_night", STATE_ALARM_ARMED_NIGHT), - ("payload_arm_vacation", STATE_ALARM_ARMED_VACATION), + ("payload_arm_away", AlarmControlPanelState.ARMED_AWAY), + ("payload_arm_custom_bypass", AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + ("payload_arm_home", AlarmControlPanelState.ARMED_HOME), + ("payload_arm_night", AlarmControlPanelState.ARMED_NIGHT), + ("payload_arm_vacation", AlarmControlPanelState.ARMED_VACATION), ], ) async def test_arm_via_command_topic( @@ -1359,12 +1374,12 @@ async def test_arm_via_command_topic( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED # Fire the arm command via MQTT; ensure state changes to arming async_fire_mqtt_message(hass, "alarm/command", command) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) @@ -1400,18 +1415,18 @@ async def test_disarm_pending_via_command_topic( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING # Now that we're pending, receive a command to disarm async_fire_mqtt_message(hass, "alarm/command", "DISARM") await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_state_changes_are_published_to_mqtt( @@ -1437,7 +1452,7 @@ async def test_state_changes_are_published_to_mqtt( # Component should send disarmed alarm state on startup await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_DISARMED, 0, True + "alarm/state", AlarmControlPanelState.DISARMED, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1445,7 +1460,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_home(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True + "alarm/state", AlarmControlPanelState.PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1457,7 +1472,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_HOME, 0, True + "alarm/state", AlarmControlPanelState.ARMED_HOME, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1465,7 +1480,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_away(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True + "alarm/state", AlarmControlPanelState.PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1477,7 +1492,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_AWAY, 0, True + "alarm/state", AlarmControlPanelState.ARMED_AWAY, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1485,7 +1500,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_night(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True + "alarm/state", AlarmControlPanelState.PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1497,7 +1512,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True + "alarm/state", AlarmControlPanelState.ARMED_NIGHT, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1505,7 +1520,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_disarm(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_DISARMED, 0, True + "alarm/state", AlarmControlPanelState.DISARMED, 0, True ) diff --git a/tests/components/map/__init__.py b/tests/components/map/__init__.py deleted file mode 100644 index 142afc0d5c9..00000000000 --- a/tests/components/map/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for Map.""" diff --git a/tests/components/map/test_init.py b/tests/components/map/test_init.py deleted file mode 100644 index 217550852bd..00000000000 --- a/tests/components/map/test_init.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Test the Map initialization.""" - -from collections.abc import Generator -from typing import Any -from unittest.mock import MagicMock, patch - -import pytest - -from homeassistant.components.map import DOMAIN -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.common import MockModule, mock_integration - - -@pytest.fixture -def mock_onboarding_not_done() -> Generator[MagicMock]: - """Mock that Home Assistant is currently onboarding.""" - with patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=False, - ) as mock_onboarding: - yield mock_onboarding - - -@pytest.fixture -def mock_onboarding_done() -> Generator[MagicMock]: - """Mock that Home Assistant is currently onboarding.""" - with patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=True, - ) as mock_onboarding: - yield mock_onboarding - - -@pytest.fixture -def mock_create_map_dashboard() -> Generator[MagicMock]: - """Mock the create map dashboard function.""" - with patch( - "homeassistant.components.map._create_map_dashboard", - ) as mock_create_map_dashboard: - yield mock_create_map_dashboard - - -async def test_create_dashboards_when_onboarded( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_onboarding_done, - mock_create_map_dashboard, -) -> None: - """Test we create map dashboard when onboarded.""" - # Mock the lovelace integration to prevent it from creating a map dashboard - mock_integration(hass, MockModule("lovelace")) - - assert await async_setup_component(hass, DOMAIN, {}) - - mock_create_map_dashboard.assert_called_once() - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_create_dashboards_once_when_onboarded( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_onboarding_done, - mock_create_map_dashboard, -) -> None: - """Test we create map dashboard once when onboarded.""" - hass_storage[DOMAIN] = { - "version": 1, - "minor_version": 1, - "key": "map", - "data": {"migrated": True}, - } - - # Mock the lovelace integration to prevent it from creating a map dashboard - mock_integration(hass, MockModule("lovelace")) - - assert await async_setup_component(hass, DOMAIN, {}) - - mock_create_map_dashboard.assert_not_called() - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_create_dashboards_when_not_onboarded( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_onboarding_not_done, - mock_create_map_dashboard, -) -> None: - """Test we do not create map dashboard when not onboarded.""" - # Mock the lovelace integration to prevent it from creating a map dashboard - mock_integration(hass, MockModule("lovelace")) - - assert await async_setup_component(hass, DOMAIN, {}) - - mock_create_map_dashboard.assert_not_called() - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_create_issue_when_not_manually_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test creating issue registry issues.""" - assert await async_setup_component(hass, DOMAIN, {}) - - assert not issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_map" - ) - - -async def test_create_issue_when_manually_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test creating issue registry issues.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) - - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, "deprecated_yaml_map") diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py index c64de44d496..ac23141be55 100644 --- a/tests/components/mastodon/conftest.py +++ b/tests/components/mastodon/conftest.py @@ -1,7 +1,7 @@ """Mastodon tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -9,7 +9,6 @@ from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from tests.common import MockConfigEntry, load_json_object_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py index 073a6534d7d..33f73812348 100644 --- a/tests/components/mastodon/test_config_flow.py +++ b/tests/components/mastodon/test_config_flow.py @@ -47,6 +47,39 @@ async def test_full_flow( assert result["result"].unique_id == "trwnh_mastodon_social" +async def test_full_flow_with_path( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full flow, where a path is accidentally specified.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social/home", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "@trwnh@mastodon.social" + assert result["data"] == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert result["result"].unique_id == "trwnh_mastodon_social" + + @pytest.mark.parametrize( ("exception", "error"), [ diff --git a/tests/components/matrix/conftest.py b/tests/components/matrix/conftest.py index 0b84aff5434..f0f16787f77 100644 --- a/tests/components/matrix/conftest.py +++ b/tests/components/matrix/conftest.py @@ -267,7 +267,9 @@ def mock_load_json(): @pytest.fixture def mock_allowed_path(): """Allow using NamedTemporaryFile for mock image.""" - with patch("homeassistant.core.Config.is_allowed_path", return_value=True) as mock: + with patch( + "homeassistant.core_config.Config.is_allowed_path", return_value=True + ) as mock: yield mock diff --git a/tests/components/matter/common.py b/tests/components/matter/common.py index 541f7383f1d..519b4c4027d 100644 --- a/tests/components/matter/common.py +++ b/tests/components/matter/common.py @@ -10,8 +10,11 @@ from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.models import EventType, MatterNodeData +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, load_fixture @@ -34,15 +37,7 @@ async def setup_integration_with_node_fixture( override_attributes: dict[str, Any] | None = None, ) -> MatterNode: """Set up Matter integration with fixture as node.""" - node_data = load_and_parse_node_fixture(node_fixture) - if override_attributes: - node_data["attributes"].update(override_attributes) - node = MatterNode( - dataclass_from_dict( - MatterNodeData, - node_data, - ) - ) + node = create_node_from_fixture(node_fixture, override_attributes) client.get_nodes.return_value = [node] client.get_node.return_value = node config_entry = MockConfigEntry( @@ -56,6 +51,21 @@ async def setup_integration_with_node_fixture( return node +def create_node_from_fixture( + node_fixture: str, override_attributes: dict[str, Any] | None = None +) -> MatterNode: + """Create a node from a fixture.""" + node_data = load_and_parse_node_fixture(node_fixture) + if override_attributes: + node_data["attributes"].update(override_attributes) + return MatterNode( + dataclass_from_dict( + MatterNodeData, + node_data, + ) + ) + + def set_node_attribute( node: MatterNode, endpoint: int, @@ -82,3 +92,17 @@ async def trigger_subscription_callback( if event_filter in (None, event): callback(event, data) await hass.async_block_till_done() + + +def snapshot_matter_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + platform: Platform, +) -> None: + """Snapshot Matter entities.""" + entities = hass.states.async_all(platform) + for entity_state in entities: + entity_entry = entity_registry.async_get(entity_state.entity_id) + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert entity_state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index b4af00a0b47..bbafec48e10 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from matter_server.client.models.node import MatterNode @@ -70,29 +71,73 @@ async def integration_fixture( return entry -@pytest.fixture(name="door_lock") -async def door_lock_fixture( - hass: HomeAssistant, matter_client: MagicMock +@pytest.fixture( + params=[ + "air_purifier", + "air_quality_sensor", + "color_temperature_light", + "dimmable_light", + "dimmable_plugin_unit", + "door_lock", + "door_lock_with_unbolt", + "eve_contact_sensor", + "eve_energy_plug", + "eve_energy_plug_patched", + "eve_thermo", + "eve_weather_sensor", + "extended_color_light", + "fan", + "flow_sensor", + "generic_switch", + "generic_switch_multi", + "humidity_sensor", + "leak_sensor", + "light_sensor", + "microwave_oven", + "multi_endpoint_light", + "occupancy_sensor", + "on_off_plugin_unit", + "onoff_light", + "onoff_light_alt_name", + "onoff_light_no_name", + "onoff_light_with_levelcontrol_present", + "pressure_sensor", + "room_airconditioner", + "silabs_dishwasher", + "smoke_detector", + "switch_unit", + "temperature_sensor", + "thermostat", + "vacuum_cleaner", + "valve", + "window_covering_full", + "window_covering_lift", + "window_covering_pa_lift", + "window_covering_pa_tilt", + "window_covering_tilt", + ] +) +async def matter_devices( + hass: HomeAssistant, matter_client: MagicMock, request: pytest.FixtureRequest ) -> MatterNode: - """Fixture for a door lock node.""" - return await setup_integration_with_node_fixture(hass, "door-lock", matter_client) + """Fixture for a Matter device.""" + return await setup_integration_with_node_fixture(hass, request.param, matter_client) -@pytest.fixture(name="door_lock_with_unbolt") -async def door_lock_with_unbolt_fixture( - hass: HomeAssistant, matter_client: MagicMock +@pytest.fixture +def attributes() -> dict[str, Any]: + """Return common attributes for all nodes.""" + return {} + + +@pytest.fixture +async def matter_node( + hass: HomeAssistant, + matter_client: MagicMock, + node_fixture: str, + attributes: dict[str, Any], ) -> MatterNode: - """Fixture for a door lock node with unbolt feature.""" + """Fixture for a Matter node.""" return await setup_integration_with_node_fixture( - hass, "door-lock-with-unbolt", matter_client - ) - - -@pytest.fixture(name="eve_contact_sensor_node") -async def eve_contact_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a contact sensor node.""" - return await setup_integration_with_node_fixture( - hass, "eve-contact-sensor", matter_client + hass, node_fixture, matter_client, attributes ) diff --git a/tests/components/matter/fixtures/config_entry_diagnostics.json b/tests/components/matter/fixtures/config_entry_diagnostics.json index 000b0d4e2e6..8cc9d068caf 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics.json @@ -647,7 +647,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json index 95447783bbc..28c93de5e11 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json @@ -460,7 +460,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/air-purifier.json b/tests/components/matter/fixtures/nodes/air_purifier.json similarity index 100% rename from tests/components/matter/fixtures/nodes/air-purifier.json rename to tests/components/matter/fixtures/nodes/air_purifier.json diff --git a/tests/components/matter/fixtures/nodes/air-quality-sensor.json b/tests/components/matter/fixtures/nodes/air_quality_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/air-quality-sensor.json rename to tests/components/matter/fixtures/nodes/air_quality_sensor.json diff --git a/tests/components/matter/fixtures/nodes/color-temperature-light.json b/tests/components/matter/fixtures/nodes/color_temperature_light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/color-temperature-light.json rename to tests/components/matter/fixtures/nodes/color_temperature_light.json diff --git a/tests/components/matter/fixtures/nodes/device_diagnostics.json b/tests/components/matter/fixtures/nodes/device_diagnostics.json index 1d1d450e1f0..5600a7e801b 100644 --- a/tests/components/matter/fixtures/nodes/device_diagnostics.json +++ b/tests/components/matter/fixtures/nodes/device_diagnostics.json @@ -444,7 +444,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/dimmable-light.json b/tests/components/matter/fixtures/nodes/dimmable_light.json similarity index 98% rename from tests/components/matter/fixtures/nodes/dimmable-light.json rename to tests/components/matter/fixtures/nodes/dimmable_light.json index 58c22f1b807..f8a3b28fb9e 100644 --- a/tests/components/matter/fixtures/nodes/dimmable-light.json +++ b/tests/components/matter/fixtures/nodes/dimmable_light.json @@ -305,13 +305,6 @@ "0/65/65528": [], "0/65/65529": [], "0/65/65531": [0, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 0, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0, 64], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/4/0": 128, "1/4/65532": 1, "1/4/65533": 4, diff --git a/tests/components/matter/fixtures/nodes/dimmable-plugin-unit.json b/tests/components/matter/fixtures/nodes/dimmable_plugin_unit.json similarity index 100% rename from tests/components/matter/fixtures/nodes/dimmable-plugin-unit.json rename to tests/components/matter/fixtures/nodes/dimmable_plugin_unit.json diff --git a/tests/components/matter/fixtures/nodes/door-lock.json b/tests/components/matter/fixtures/nodes/door_lock.json similarity index 99% rename from tests/components/matter/fixtures/nodes/door-lock.json rename to tests/components/matter/fixtures/nodes/door_lock.json index b6231e04af4..acd327ac56c 100644 --- a/tests/components/matter/fixtures/nodes/door-lock.json +++ b/tests/components/matter/fixtures/nodes/door_lock.json @@ -495,7 +495,7 @@ "1/257/48": 3, "1/257/49": 10, "1/257/51": false, - "1/257/65532": 3507, + "1/257/65532": 0, "1/257/65533": 6, "1/257/65528": [12, 15, 18, 28, 35, 37], "1/257/65529": [ diff --git a/tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json b/tests/components/matter/fixtures/nodes/door_lock_with_unbolt.json similarity index 100% rename from tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json rename to tests/components/matter/fixtures/nodes/door_lock_with_unbolt.json diff --git a/tests/components/matter/fixtures/nodes/eve-contact-sensor.json b/tests/components/matter/fixtures/nodes/eve_contact_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/eve-contact-sensor.json rename to tests/components/matter/fixtures/nodes/eve_contact_sensor.json diff --git a/tests/components/matter/fixtures/nodes/eve-energy-plug.json b/tests/components/matter/fixtures/nodes/eve_energy_plug.json similarity index 100% rename from tests/components/matter/fixtures/nodes/eve-energy-plug.json rename to tests/components/matter/fixtures/nodes/eve_energy_plug.json diff --git a/tests/components/matter/fixtures/nodes/eve_energy_plug_patched.json b/tests/components/matter/fixtures/nodes/eve_energy_plug_patched.json new file mode 100644 index 00000000000..18c4a8c68ef --- /dev/null +++ b/tests/components/matter/fixtures/nodes/eve_energy_plug_patched.json @@ -0,0 +1,396 @@ +{ + "node_id": 183, + "date_commissioned": "2023-11-30T14:39:37.020026", + "last_interview": "2023-11-30T14:39:37.020029", + "interview_version": 5, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 42, 48, 49, 51, 53, 60, 62, 63], + "0/29/2": [41], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 1, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "254": 1 + }, + { + "254": 2 + }, + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 5 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 3, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 1, + "0/40/1": "Eve Systems", + "0/40/2": 4874, + "0/40/3": "Eve Energy Plug Patched", + "0/40/4": 80, + "0/40/5": "", + "0/40/6": "XX", + "0/40/7": 1, + "0/40/8": "1.3", + "0/40/9": 6650, + "0/40/10": "3.2.1", + "0/40/15": "RV44L221A00081", + "0/40/18": "26E822F90561D17C42", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 1, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 65528, 65529, 65531, 65532, + 65533 + ], + "0/42/0": [ + { + "1": 2312386028615903905, + "2": 0, + "254": 1 + } + ], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "cfUKbvsdfsBjT+0=", + "1": true + } + ], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "cfUKbvBjdsffwT+0=", + "0/49/7": null, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 95, + "0/51/2": 268574, + "0/51/3": 4406, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [0], + "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], + "0/53/0": 25, + "0/53/1": 5, + "0/53/2": "MyHome23", + "0/53/3": 14707, + "0/53/4": 8211480967175688173, + "0/53/5": "aabbccdd", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 1828774034, + "0/53/10": 68, + "0/53/11": 237, + "0/53/12": 170, + "0/53/13": 23, + "0/53/14": 2, + "0/53/15": 1, + "0/53/16": 2, + "0/53/17": 0, + "0/53/18": 0, + "0/53/19": 2, + "0/53/20": 0, + "0/53/21": 0, + "0/53/22": 293884, + "0/53/23": 278934, + "0/53/24": 14950, + "0/53/25": 278894, + "0/53/26": 278468, + "0/53/27": 14990, + "0/53/28": 293844, + "0/53/29": 0, + "0/53/30": 40, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 65244, + "0/53/34": 426, + "0/53/35": 0, + "0/53/36": 87, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 6687540, + "0/53/40": 142626, + "0/53/41": 106835, + "0/53/42": 246171, + "0/53/43": 0, + "0/53/44": 541, + "0/53/45": 40, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 6360718, + "0/53/49": 2141, + "0/53/50": 35259, + "0/53/51": 4374, + "0/53/52": 0, + "0/53/53": 568, + "0/53/54": 18599, + "0/53/55": 19143, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//wA==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [0, 0, 0, 0], + "0/53/65532": 15, + "0/53/65533": 1, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, + 60, 61, 62, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 3, + "0/62/4": [ + "FTABAQAkAgE3AycUxofpv3kE1HwkFQEYJgS2Ty8rJgU2gxAtNwYnFMaH6b95BNR8JBUBGCQHASQIATAJQQSG0eCLvAjSHcSkZEo029SymN58wmxVcA645EXuFg6KwojGRyZsqWVtuMAYAB8TaPA9NEFsNvZZbvBR9XjrZhyKNwo1ASkBGCQCYDAEFNnFRJ+9qQIJtsM+LRdMdmCY3bQ4MAUU2cVEn72pAgm2wz4tF0x2YJjdtDgYMAtAFDv6Ouh7ugAGLiCjBQaEXCIAe0AkaaN8dBPskCZXOODjuZ1DCr4/f5IYg0rN2zFDUDTvG3GCxoI1+A7BvSjiNRg=", + "FTABAQAkAgE3AycUjuqR8vTQCmEkFQIYJgTFTy8rJgVFgxAtNwYnFI7qkfL00AphJBUCGCQHASQIATAJQQS5ZOLouMEkPsc/PYweZwUUFFWHWPR9nQVGsBl1VMWtm7CodpPAh4o79bZM9XU4T1wPVCvIzgGfuzIvsuwT7gHINwo1ASkBGCQCYDAEFKEEplpzAvCzsc5ga6CFmqmsv5onMAUUoQSmWnMC8LOxzmBroIWaqay/micYMAtAYkkA8OZFIGpxBEYYT+3A7Okba4WOq4NtwctIIZvCM48VU8pxQNjVvHMcJWPOP1Wh2Bw1VH7/Sg9lt9DL4DAwjBg=", + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEECDlp5HtG4UpmG6QLEwaCUJ3TR0qWHEarwFuN7JkKUrPmQ3Zi3Nq/TFayJYQRvez268whgWhBhQudIm84xNwPXjcKNQEpARgkAmAwBBTJ3+WZAQkWgZboUpiyZL3FV8R8UzAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQO9QSAdvJkM6b/wIc07MCw1ma46lTyGYG8nvpn0ICI73nuD3QeaWwGIQTkVGEpzF+TuDK7gtTz7YUrR+PSnvMk8Y" + ], + "0/62/5": 5, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 3, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 1, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/4/0": 128, + "1/4/65532": 1, + "1/4/65533": 4, + "1/4/65528": [0, 1, 2, 3], + "1/4/65529": [0, 1, 2, 3, 4, 5], + "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/6/0": false, + "1/6/16384": true, + "1/6/16385": 0, + "1/6/16386": 0, + "1/6/16387": null, + "1/6/65532": 1, + "1/6/65533": 4, + "1/6/65528": [], + "1/6/65529": [0, 1, 2, 64, 65, 66], + "1/6/65531": [ + 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 + ], + "1/29/0": [ + { + "0": 266, + "1": 1 + } + ], + "1/29/1": [3, 4, 6, 29, 319486977], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 1, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/319486977/319422464": "AAFQCwIAAAMC+xkEDFJWNDRMMUEwMDA4MZwBAP8EAQIA1PkBAWABZNAEAAAAAEUFBQAAAABGCQUAAAAOAABCBkkGBQwIEIABRBEFFAAFAzwAAAAAAAAAAAAAAEcRBSoh/CGWImgjeAAAADwAAABIBgUAAAAAAEoGBQAAAAAA", + "1/319486977/319422466": "BEZiAQAAAAAAAAAABgsCDAINAgcCDgEBAn4PABAAWgAAs8c+AQEA", + "1/319486977/319422467": "EgtaAAB74T4BDwAANwkAAAAA", + "1/319486977/319422471": 0, + "1/319486977/319422472": 238.8000030517578, + "1/319486977/319422473": 0.0, + "1/319486977/319422474": 0.0, + "1/319486977/319422475": 0.2200000286102295, + "1/319486977/319422476": 0, + "1/319486977/319422478": 0, + "1/319486977/319422481": false, + "1/319486977/319422482": 54272, + "1/319486977/65533": 1, + "1/319486977/65528": [], + "1/319486977/65529": [], + "1/319486977/65531": [ + 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, + 319422468, 319422469, 319422471, 319422472, 319422473, 319422474, + 319422475, 319422476, 319422478, 319422481, 319422482, 65533 + ], + "2/29/0": [ + { + "0": 1296, + "1": 1 + } + ], + "2/29/1": [3, 29, 144, 145, 156], + "2/29/2": [], + "2/29/3": [], + "2/29/65532": 0, + "2/29/65533": 2, + "2/29/65528": [], + "2/29/65529": [], + "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "2/144/0": 2, + "2/144/1": 3, + "2/144/2": [ + { + "0": 1, + "1": true, + "2": 0, + "3": 100, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + }, + { + "0": 2, + "1": true, + "2": 0, + "3": 100, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + }, + { + "0": 5, + "1": true, + "2": 0, + "3": 100, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + } + ], + "2/144/4": 220000, + "2/144/5": 2000, + "2/144/8": 550000, + "2/144/65533": 1, + "2/144/65532": 2, + "2/144/65531": [0, 1, 2, 4, 5, 8, 65528, 65529, 65530, 65531, 65532, 65533], + "2/144/65530": [], + "2/144/65529": [], + "2/144/65528": [], + "2/145/0": { + "0": 14, + "1": true, + "2": 0, + "3": 0, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + }, + "2/145/65533": 1, + "2/145/65532": 7, + "2/145/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], + "2/145/65530": [0], + "2/145/65529": [], + "2/145/65528": [], + "2/145/1": { + "0": 2500 + }, + "2/145/2": null + }, + "attribute_subscriptions": [], + "last_subscription_attempt": 0 +} diff --git a/tests/components/matter/fixtures/nodes/eve_thermo.json b/tests/components/matter/fixtures/nodes/eve_thermo.json new file mode 100644 index 00000000000..e00b55d2cfc --- /dev/null +++ b/tests/components/matter/fixtures/nodes/eve_thermo.json @@ -0,0 +1,406 @@ +{ + "node_id": 33, + "date_commissioned": "2024-09-11T05:47:53.888591", + "last_interview": "2024-09-11T05:48:45.828762", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 18, + "1": 1 + }, + { + "0": 17, + "1": 1 + }, + { + "0": 22, + "1": 2 + } + ], + "0/29/1": [29, 31, 40, 42, 47, 48, 49, 50, 51, 52, 53, 56, 60, 62, 63, 70], + "0/29/2": [41], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "254": 1 + }, + { + "254": 1 + }, + { + "254": 2 + }, + { + "254": 3 + }, + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 4 + } + ], + "0/31/1": [], + "0/31/2": 10, + "0/31/3": 3, + "0/31/4": 5, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Eve Systems", + "0/40/2": 4874, + "0/40/3": "Eve Thermo", + "0/40/4": 79, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 1, + "0/40/8": "1.1", + "0/40/9": 9217, + "0/40/10": "3.5.0", + "0/40/15": "**REDACTED**", + "0/40/18": "**REDACTED**", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 21, 22, 65528, 65529, 65531, + 65532, 65533 + ], + "0/42/0": [ + { + "1": 556220604, + "2": 0, + "254": 1 + } + ], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/47/0": 1, + "0/47/1": 0, + "0/47/2": "Battery", + "0/47/11": 3050, + "0/47/12": 200, + "0/47/14": 0, + "0/47/15": false, + "0/47/16": 2, + "0/47/18": [], + "0/47/19": "", + "0/47/25": 1, + "0/47/31": [], + "0/47/65532": 10, + "0/47/65533": 2, + "0/47/65528": [], + "0/47/65529": [], + "0/47/65531": [ + 0, 1, 2, 11, 12, 14, 15, 16, 18, 19, 25, 31, 65528, 65529, 65531, 65532, + 65533 + ], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "**REDACTED**", + "0/49/7": null, + "0/49/9": 4, + "0/49/10": 4, + "0/49/65532": 2, + "0/49/65533": 2, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533 + ], + "0/50/65532": 0, + "0/50/65533": 1, + "0/50/65528": [1], + "0/50/65529": [0], + "0/50/65531": [65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 2, + "0/51/2": 306352, + "0/51/3": 85, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], + "0/52/1": 10168, + "0/52/2": 1948, + "0/52/65532": 0, + "0/52/65533": 1, + "0/52/65528": [], + "0/52/65529": [], + "0/52/65531": [1, 2, 65528, 65529, 65531, 65532, 65533], + "0/53/0": 25, + "0/53/1": 2, + "0/53/2": "**REDACTED**", + "0/53/3": 4660, + "0/53/4": 12054125955590472924, + "0/53/5": "**REDACTED**", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 867525816, + "0/53/10": 68, + "0/53/11": 127, + "0/53/12": 197, + "0/53/13": 17, + "0/53/14": 4, + "0/53/15": 4, + "0/53/16": 0, + "0/53/17": 0, + "0/53/18": 13, + "0/53/19": 3, + "0/53/20": 0, + "0/53/21": 3, + "0/53/22": 167566, + "0/53/23": 167438, + "0/53/24": 128, + "0/53/25": 167438, + "0/53/26": 167326, + "0/53/27": 128, + "0/53/28": 14672, + "0/53/29": 152900, + "0/53/30": 0, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 30814, + "0/53/34": 63, + "0/53/35": 0, + "0/53/36": 37, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 16473, + "0/53/40": 7569, + "0/53/41": 23, + "0/53/42": 7273, + "0/53/43": 0, + "0/53/44": 0, + "0/53/45": 0, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 6541, + "0/53/49": 319, + "0/53/50": 105, + "0/53/51": 1500, + "0/53/52": 0, + "0/53/53": 0, + "0/53/54": 681, + "0/53/55": 54, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//4A==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [], + "0/53/65532": 15, + "0/53/65533": 2, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, + 60, 61, 62, 65528, 65529, 65531, 65532, 65533 + ], + "0/56/0": 779348920474853, + "0/56/1": 4, + "0/56/2": 2, + "0/56/3": null, + "0/56/5": [ + { + "0": 3600, + "1": 0, + "2": "Europe/Paris" + } + ], + "0/56/6": [ + { + "0": 3600, + "1": 0, + "2": 783306000000000 + }, + { + "0": 0, + "1": 783306000000000, + "2": 796611600000000 + } + ], + "0/56/7": 779356121143951, + "0/56/8": 2, + "0/56/10": 2, + "0/56/11": 2, + "0/56/65532": 9, + "0/56/65533": 2, + "0/56/65528": [3], + "0/56/65529": [0, 1, 2, 4], + "0/56/65531": [ + 0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 1, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 4, + "0/62/4": [], + "0/62/5": 4, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/70/0": 120, + "0/70/1": 300, + "0/70/2": 2000, + "0/70/65532": 0, + "0/70/65533": 2, + "0/70/65528": [], + "0/70/65529": [], + "0/70/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 4, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 769, + "1": 3 + } + ], + "1/29/1": [3, 29, 30, 513, 516, 319486977], + "1/29/2": [1026], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/30/0": [], + "1/30/65532": 0, + "1/30/65533": 1, + "1/30/65528": [], + "1/30/65529": [], + "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/513/0": 2100, + "1/513/3": 1000, + "1/513/4": 3000, + "1/513/16": 0, + "1/513/18": 1700, + "1/513/21": 1000, + "1/513/22": 3000, + "1/513/26": 0, + "1/513/27": 2, + "1/513/28": 4, + "1/513/65532": 1, + "1/513/65533": 6, + "1/513/65528": [], + "1/513/65529": [0], + "1/513/65531": [ + 0, 3, 4, 16, 18, 21, 22, 26, 27, 28, 65528, 65529, 65531, 65532, 65533 + ], + "1/516/0": 0, + "1/516/1": 0, + "1/516/2": 0, + "1/516/65532": 0, + "1/516/65533": 2, + "1/516/65528": [], + "1/516/65529": [], + "1/516/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "1/319486977/319422464": "AAFPCwIAAAMCEyQEDENNMzRNMUE0NzgxNZwBAP8EAQIIMPkBAR0BAD4AOwhTVEVHVDIxMjwBADcBAD8BACYBAScBHk8GAAAgICoq/wMjAQBFDQUCAAAAAAACAYk0BaVGVAXKISyfJEkCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEkGBQwIEIABRBEFHAAFAzwAAADhKT5Ch1orv0cRBSoh/CGWImgjtAAAADwAAABIBgUAAAAAAEoGBQAAAAAA/ygiCRABAAAAAAAAAAIb0XT3kNTbRpuy/pzwUAklhFBhciBkw6lmYXV0", + "1/319486977/319422466": "xqwEAFjkAwBNnpAsBgECEQIQARIBHQEjAgwCABAAAAAAEQAAAAEAAA==", + "1/319486977/319422467": "EwoCAAC8rAQAPwIIKAoUAQADDAwLAgAAvKwEACDqCw==", + "1/319486977/319422476": 0, + "1/319486977/319422482": 12296, + "1/319486977/319422487": false, + "1/319486977/319422488": 10, + "1/319486977/319422489": 30240, + "1/319486977/319422490": 0, + "1/319486977/65532": 0, + "1/319486977/65533": 1, + "1/319486977/65528": [], + "1/319486977/65529": [319422464], + "1/319486977/65531": [ + 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, + 319422468, 319422469, 319422476, 319422482, 319422487, 319422488, + 319422489, 319422490, 65532, 65533 + ] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/eve_weather_sensor.json b/tests/components/matter/fixtures/nodes/eve_weather_sensor.json new file mode 100644 index 00000000000..dacba8d336b --- /dev/null +++ b/tests/components/matter/fixtures/nodes/eve_weather_sensor.json @@ -0,0 +1,322 @@ +{ + "node_id": 29, + "date_commissioned": "2024-09-10T13:34:48.252332", + "last_interview": "2024-09-10T13:34:48.252334", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 42, 47, 48, 49, 51, 53, 60, 62, 63], + "0/29/2": [41], + "0/29/3": [1, 2], + "0/29/65532": 0, + "0/29/65533": 1, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 4 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 1, + "0/40/1": "Eve Systems", + "0/40/2": 4874, + "0/40/3": "Eve Weather", + "0/40/4": 87, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 1, + "0/40/8": "1.1", + "0/40/9": 7143, + "0/40/10": "3.3.0", + "0/40/15": "**REDACTED**", + "0/40/18": "**REDACTED**", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 1, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 65528, 65529, 65531, 65532, + 65533 + ], + "0/42/0": [], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/47/0": 1, + "0/47/1": 0, + "0/47/2": "Battery", + "0/47/11": 2956, + "0/47/12": 200, + "0/47/14": 0, + "0/47/15": false, + "0/47/16": 2, + "0/47/18": [], + "0/47/19": "", + "0/47/25": 1, + "0/47/65532": 10, + "0/47/65533": 1, + "0/47/65528": [], + "0/47/65529": [], + "0/47/65531": [ + 0, 1, 2, 11, 12, 14, 15, 16, 18, 19, 25, 65528, 65529, 65531, 65532, 65533 + ], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "**REDACTED**", + "0/49/7": null, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 1, + "0/51/2": 3416207, + "0/51/3": 948, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [0], + "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], + "0/53/0": 25, + "0/53/1": 2, + "0/53/2": "**REDACTED**", + "0/53/3": 4660, + "0/53/4": 12054125955590472924, + "0/53/5": "**REDACTED**", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 867525816, + "0/53/10": 68, + "0/53/11": 127, + "0/53/12": 197, + "0/53/13": 17, + "0/53/14": 244, + "0/53/15": 243, + "0/53/16": 0, + "0/53/17": 0, + "0/53/18": 334, + "0/53/19": 6, + "0/53/20": 0, + "0/53/21": 221, + "0/53/22": 1814103, + "0/53/23": 1812208, + "0/53/24": 1895, + "0/53/25": 1812220, + "0/53/26": 1806871, + "0/53/27": 1895, + "0/53/28": 144123, + "0/53/29": 1670020, + "0/53/30": 0, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 515245, + "0/53/34": 1061, + "0/53/35": 0, + "0/53/36": 25, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 310675, + "0/53/40": 180775, + "0/53/41": 783, + "0/53/42": 171240, + "0/53/43": 0, + "0/53/44": 4, + "0/53/45": 0, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 110041, + "0/53/49": 10200, + "0/53/50": 818, + "0/53/51": 11698, + "0/53/52": 0, + "0/53/53": 114, + "0/53/54": 6189, + "0/53/55": 371, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//4A==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [0, 0, 0, 0], + "0/53/65532": 15, + "0/53/65533": 1, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, + 60, 61, 62, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 4, + "0/62/4": [], + "0/62/5": 4, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 1, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 4, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 770, + "1": 2 + } + ], + "1/29/1": [3, 29, 1026, 319486977], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 1, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/1026/0": 1603, + "1/1026/1": -4000, + "1/1026/2": 8500, + "1/1026/65532": 0, + "1/1026/65533": 4, + "1/1026/65528": [], + "1/1026/65529": [], + "1/1026/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "1/319486977/319422464": "AAFXCwIAAAMC/xsEDFNWNDNMMUEwMzg2MJwBAP8EAQJsNPkBAR0BACUE9griHksEfgeAA1EBAA==", + "1/319486977/319422466": "Ps00AOODMwBqe48sBgECAgIDAicBLwEjAlAPABAABwAA6gERAAEA", + "1/319486977/319422467": "EiMTAACLYy0AH74Fwx88JwQOEiQTAADjZS0AH7wFzB87JwQOEiUTAAA7aC0AH7oF1B86JwQOEiYTAACTai0AH7kF5x86JwQOEicTAADrbC0AH7sF8B85JwQOEigTAABDby0AH7wFAiA4JwQOEikTAACbcS0AH7sFFCA3JwQOEioTAADzcy0AH7EFMiA1JwQOEisTAABLdi0AH6gFVyA0JwQOEiwTAACjeC0AH6gFaiAzJwQOEi0TAAD7ei0AH6YFfCAyJwQOEi4TAABTfS0AH6YFgCAzJwQOEi8TAACrfy0AH6MFhyA0JwQOEjATAAADgi0AH58FnSA1JwQOEjETAABbhC0AH58FtSA1JwQOEjITAACzhi0AH5wFwSA0JwQOEjMTAAALiS0AH5cF1SA0JwQOEjQTAABjiy0AH58F3yA0JwIGEjUTAAC7jS0AH6EF7yA0JwIGEjYTAAATkC0AH60F+yAzJwIGEjcTAABrki0AH68FAiEyJwIGEjgTAADDlC0AH7kFACEyJwIGEjkTAAAbly0AH8QF7SAyJwIGEjoTAABzmS0AH9QF1SAzJwIGEjsTAADLmy0AH98FvyAzJwIG", + "1/319486977/319422482": 13420, + "1/319486977/319422483": 40.0, + "1/319486977/319422484": 1008.5, + "1/319486977/319422485": 6, + "1/319486977/319422486": 0, + "1/319486977/65533": 1, + "1/319486977/65528": [], + "1/319486977/65529": [], + "1/319486977/65531": [ + 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, + 319422468, 319422469, 319422482, 319422483, 319422484, 319422485, + 319422486, 65533 + ], + "2/3/0": 0, + "2/3/1": 4, + "2/3/65532": 0, + "2/3/65533": 4, + "2/3/65528": [], + "2/3/65529": [0], + "2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "2/29/0": [ + { + "0": 775, + "1": 2 + } + ], + "2/29/1": [3, 29, 1029], + "2/29/2": [], + "2/29/3": [], + "2/29/65532": 0, + "2/29/65533": 1, + "2/29/65528": [], + "2/29/65529": [], + "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "2/1029/0": 8066, + "2/1029/1": 0, + "2/1029/2": 10000, + "2/1029/65532": 0, + "2/1029/65533": 3, + "2/1029/65528": [], + "2/1029/65529": [], + "2/1029/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/extended-color-light.json b/tests/components/matter/fixtures/nodes/extended_color_light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/extended-color-light.json rename to tests/components/matter/fixtures/nodes/extended_color_light.json diff --git a/tests/components/matter/fixtures/nodes/flow-sensor.json b/tests/components/matter/fixtures/nodes/flow_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/flow-sensor.json rename to tests/components/matter/fixtures/nodes/flow_sensor.json diff --git a/tests/components/matter/fixtures/nodes/generic-switch.json b/tests/components/matter/fixtures/nodes/generic_switch.json similarity index 100% rename from tests/components/matter/fixtures/nodes/generic-switch.json rename to tests/components/matter/fixtures/nodes/generic_switch.json diff --git a/tests/components/matter/fixtures/nodes/generic-switch-multi.json b/tests/components/matter/fixtures/nodes/generic_switch_multi.json similarity index 100% rename from tests/components/matter/fixtures/nodes/generic-switch-multi.json rename to tests/components/matter/fixtures/nodes/generic_switch_multi.json diff --git a/tests/components/matter/fixtures/nodes/humidity-sensor.json b/tests/components/matter/fixtures/nodes/humidity_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/humidity-sensor.json rename to tests/components/matter/fixtures/nodes/humidity_sensor.json diff --git a/tests/components/matter/fixtures/nodes/leak-sensor.json b/tests/components/matter/fixtures/nodes/leak_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/leak-sensor.json rename to tests/components/matter/fixtures/nodes/leak_sensor.json diff --git a/tests/components/matter/fixtures/nodes/light-sensor.json b/tests/components/matter/fixtures/nodes/light_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/light-sensor.json rename to tests/components/matter/fixtures/nodes/light_sensor.json diff --git a/tests/components/matter/fixtures/nodes/microwave-oven.json b/tests/components/matter/fixtures/nodes/microwave_oven.json similarity index 100% rename from tests/components/matter/fixtures/nodes/microwave-oven.json rename to tests/components/matter/fixtures/nodes/microwave_oven.json diff --git a/tests/components/matter/fixtures/nodes/multi-endpoint-light.json b/tests/components/matter/fixtures/nodes/multi_endpoint_light.json similarity index 99% rename from tests/components/matter/fixtures/nodes/multi-endpoint-light.json rename to tests/components/matter/fixtures/nodes/multi_endpoint_light.json index e3a01da9e7c..3b9be24d9ab 100644 --- a/tests/components/matter/fixtures/nodes/multi-endpoint-light.json +++ b/tests/components/matter/fixtures/nodes/multi_endpoint_light.json @@ -1620,7 +1620,7 @@ "6/768/16385": 0, "6/768/16394": 25, "6/768/16395": 0, - "6/768/16396": 65279, + "6/768/16396": 0, "6/768/16397": 0, "6/768/16400": 0, "6/768/65532": 25, diff --git a/tests/components/matter/fixtures/nodes/occupancy-sensor.json b/tests/components/matter/fixtures/nodes/occupancy_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/occupancy-sensor.json rename to tests/components/matter/fixtures/nodes/occupancy_sensor.json diff --git a/tests/components/matter/fixtures/nodes/on-off-plugin-unit.json b/tests/components/matter/fixtures/nodes/on_off_plugin_unit.json similarity index 100% rename from tests/components/matter/fixtures/nodes/on-off-plugin-unit.json rename to tests/components/matter/fixtures/nodes/on_off_plugin_unit.json diff --git a/tests/components/matter/fixtures/nodes/onoff-light.json b/tests/components/matter/fixtures/nodes/onoff_light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/onoff-light.json rename to tests/components/matter/fixtures/nodes/onoff_light.json diff --git a/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json b/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json similarity index 99% rename from tests/components/matter/fixtures/nodes/onoff-light-alt-name.json rename to tests/components/matter/fixtures/nodes/onoff_light_alt_name.json index 46575640adf..ac462cd7951 100644 --- a/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json +++ b/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json @@ -384,7 +384,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/onoff-light-no-name.json b/tests/components/matter/fixtures/nodes/onoff_light_no_name.json similarity index 99% rename from tests/components/matter/fixtures/nodes/onoff-light-no-name.json rename to tests/components/matter/fixtures/nodes/onoff_light_no_name.json index a6c73564af0..19cd58bf5cb 100644 --- a/tests/components/matter/fixtures/nodes/onoff-light-no-name.json +++ b/tests/components/matter/fixtures/nodes/onoff_light_no_name.json @@ -384,7 +384,7 @@ "1/768/16390": 0, "1/768/16394": 31, "1/768/16395": 0, - "1/768/16396": 65279, + "1/768/16396": 0, "1/768/16397": 0, "1/768/16400": 0, "1/768/65532": 31, diff --git a/tests/components/matter/fixtures/nodes/onoff-light-with-levelcontrol-present.json b/tests/components/matter/fixtures/nodes/onoff_light_with_levelcontrol_present.json similarity index 100% rename from tests/components/matter/fixtures/nodes/onoff-light-with-levelcontrol-present.json rename to tests/components/matter/fixtures/nodes/onoff_light_with_levelcontrol_present.json diff --git a/tests/components/matter/fixtures/nodes/pressure-sensor.json b/tests/components/matter/fixtures/nodes/pressure_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/pressure-sensor.json rename to tests/components/matter/fixtures/nodes/pressure_sensor.json diff --git a/tests/components/matter/fixtures/nodes/room-airconditioner.json b/tests/components/matter/fixtures/nodes/room_airconditioner.json similarity index 100% rename from tests/components/matter/fixtures/nodes/room-airconditioner.json rename to tests/components/matter/fixtures/nodes/room_airconditioner.json diff --git a/tests/components/matter/fixtures/nodes/silabs_dishwasher.json b/tests/components/matter/fixtures/nodes/silabs_dishwasher.json new file mode 100644 index 00000000000..c5015bc1c34 --- /dev/null +++ b/tests/components/matter/fixtures/nodes/silabs_dishwasher.json @@ -0,0 +1,657 @@ +{ + "node_id": 54, + "date_commissioned": "2024-08-15T07:14:29.055273", + "last_interview": "2024-08-15T11:36:27.830863", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [ + 29, 31, 40, 42, 43, 44, 45, 48, 49, 50, 51, 52, 53, 60, 62, 63, 64, 65 + ], + "0/29/2": [41], + "0/29/3": [1, 2], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "254": 1 + }, + { + "254": 1 + }, + { + "254": 2 + }, + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Silabs", + "0/40/2": 65521, + "0/40/3": "Dishwasher", + "0/40/4": 32773, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 1, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1", + "0/40/11": "20200101", + "0/40/12": "Dishwasher", + "0/40/13": "Dishwasher", + "0/40/14": "", + "0/40/15": "", + "0/40/16": false, + "0/40/18": "**REDACTED**", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, + 65528, 65529, 65531, 65532, 65533 + ], + "0/42/0": [ + { + "1": 556220604, + "2": 0, + "254": 1 + } + ], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/43/0": "en-US", + "0/43/1": [ + "en-US", + "de-DE", + "fr-FR", + "en-GB", + "es-ES", + "zh-CN", + "it-IT", + "ja-JP" + ], + "0/43/65532": 0, + "0/43/65533": 1, + "0/43/65528": [], + "0/43/65529": [], + "0/43/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "0/44/0": 0, + "0/44/1": 0, + "0/44/2": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 7], + "0/44/65532": 0, + "0/44/65533": 1, + "0/44/65528": [], + "0/44/65529": [], + "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/45/0": 1, + "0/45/65532": 0, + "0/45/65533": 1, + "0/45/65528": [], + "0/45/65529": [], + "0/45/65531": [0, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "**REDACTED**", + "0/49/7": null, + "0/49/9": 10, + "0/49/10": 4, + "0/49/65532": 2, + "0/49/65533": 2, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533 + ], + "0/50/65532": 0, + "0/50/65533": 1, + "0/50/65528": [1], + "0/50/65529": [0], + "0/50/65531": [65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 6, + "0/51/2": 10, + "0/51/3": 4, + "0/51/4": 1, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/52/0": [ + { + "0": 3, + "1": "UART", + "3": 128 + }, + { + "0": 9, + "1": "DishWash", + "3": 766 + }, + { + "0": 2, + "1": "OT Stack", + "3": 719 + }, + { + "0": 12, + "1": "Bluetoot", + "3": 40 + }, + { + "0": 1, + "1": "Bluetoot", + "3": 282 + }, + { + "0": 11, + "1": "Bluetoot", + "3": 210 + }, + { + "0": 8, + "1": "shell", + "3": 323 + }, + { + "0": 6, + "1": "Tmr Svc", + "3": 594 + }, + { + "0": 5, + "1": "IDLE", + "3": 266 + }, + { + "0": 7, + "1": "CHIP", + "3": 705 + } + ], + "0/52/1": 100824, + "0/52/2": 16984, + "0/52/3": 4294959062, + "0/52/65532": 1, + "0/52/65533": 1, + "0/52/65528": [], + "0/52/65529": [0], + "0/52/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/53/0": 25, + "0/53/1": 5, + "0/53/2": "**REDACTED**", + "0/53/3": 39055, + "0/53/4": 12054125955590472924, + "0/53/5": "**REDACTED**", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 1773502518, + "0/53/10": 64, + "0/53/11": 88, + "0/53/12": 225, + "0/53/13": 22, + "0/53/14": 1, + "0/53/15": 0, + "0/53/16": 1, + "0/53/17": 0, + "0/53/18": 0, + "0/53/19": 1, + "0/53/20": 0, + "0/53/21": 0, + "0/53/22": 693, + "0/53/23": 686, + "0/53/24": 7, + "0/53/25": 686, + "0/53/26": 686, + "0/53/27": 7, + "0/53/28": 693, + "0/53/29": 0, + "0/53/30": 0, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 61, + "0/53/34": 0, + "0/53/35": 0, + "0/53/36": 2, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 87, + "0/53/40": 87, + "0/53/41": 0, + "0/53/42": 86, + "0/53/43": 0, + "0/53/44": 0, + "0/53/45": 0, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 0, + "0/53/49": 1, + "0/53/50": 0, + "0/53/51": 0, + "0/53/52": 0, + "0/53/53": 0, + "0/53/54": 0, + "0/53/55": 0, + "0/53/56": 0, + "0/53/57": 0, + "0/53/58": 0, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//wA==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [], + "0/53/65532": 15, + "0/53/65533": 2, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 3, + "0/62/4": [], + "0/62/5": 3, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/64/0": [ + { + "0": "room", + "1": "bedroom 2" + }, + { + "0": "orientation", + "1": "North" + }, + { + "0": "floor", + "1": "2" + }, + { + "0": "direction", + "1": "up" + } + ], + "0/64/65532": 0, + "0/64/65533": 1, + "0/64/65528": [], + "0/64/65529": [], + "0/64/65531": [0, 65528, 65529, 65531, 65532, 65533], + "0/65/0": [], + "0/65/65532": 0, + "0/65/65533": 1, + "0/65/65528": [], + "0/65/65529": [], + "0/65/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 117, + "1": 1 + } + ], + "1/29/1": [3, 29, 30, 89, 96], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/30/0": [], + "1/30/65532": 0, + "1/30/65533": 1, + "1/30/65528": [], + "1/30/65529": [], + "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/89/0": null, + "1/89/1": null, + "1/89/65532": null, + "1/89/65533": 2, + "1/89/65528": [1], + "1/89/65529": [0], + "1/89/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/96/0": null, + "1/96/1": null, + "1/96/3": [ + { + "0": 0 + }, + { + "0": 1 + }, + { + "0": 2 + }, + { + "0": 3 + }, + { + "0": 8, + "1": "Extra state" + } + ], + "1/96/4": 0, + "1/96/5": { + "0": 0 + }, + "1/96/65532": 0, + "1/96/65533": 1, + "1/96/65528": [4], + "1/96/65529": [0, 1, 2], + "1/96/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "2/29/0": [ + { + "0": 1296, + "1": 1 + } + ], + "2/29/1": [29, 144, 145, 156], + "2/29/2": [], + "2/29/3": [], + "2/29/65532": 0, + "2/29/65533": 2, + "2/29/65528": [], + "2/29/65529": [], + "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "2/144/0": 2, + "2/144/1": 3, + "2/144/2": [ + { + "0": 5, + "1": true, + "2": -50000000, + "3": 50000000, + "4": [ + { + "0": -50000000, + "1": -10000000, + "2": 5000, + "3": 2000, + "4": 3000 + }, + { + "0": -9999999, + "1": 9999999, + "2": 1000, + "3": 100, + "4": 500 + }, + { + "0": 10000000, + "1": 50000000, + "2": 5000, + "3": 2000, + "4": 3000 + } + ] + }, + { + "0": 2, + "1": true, + "2": -100000, + "3": 100000, + "4": [ + { + "0": -100000, + "1": -5000, + "2": 5000, + "3": 2000, + "4": 3000 + }, + { + "0": -4999, + "1": 4999, + "2": 1000, + "3": 100, + "4": 500 + }, + { + "0": 5000, + "1": 100000, + "2": 5000, + "3": 2000, + "4": 3000 + } + ] + }, + { + "0": 1, + "1": true, + "2": -500000, + "3": 500000, + "4": [ + { + "0": -500000, + "1": -100000, + "2": 5000, + "3": 2000, + "4": 3000 + }, + { + "0": -99999, + "1": 99999, + "2": 1000, + "3": 100, + "4": 500 + }, + { + "0": 100000, + "1": 500000, + "2": 5000, + "3": 2000, + "4": 3000 + } + ] + } + ], + "2/144/3": [ + { + "0": 0, + "1": 0, + "2": 300, + "7": 101, + "8": 101, + "9": 101, + "10": 101 + }, + { + "0": 1, + "1": 0, + "2": 500, + "7": 101, + "8": 101, + "9": 101, + "10": 101 + }, + { + "0": 2, + "1": 0, + "2": 1000, + "7": 101, + "8": 101, + "9": 101, + "10": 101 + } + ], + "2/144/4": 120000, + "2/144/5": 0, + "2/144/6": 0, + "2/144/7": 0, + "2/144/8": 0, + "2/144/9": 0, + "2/144/10": 0, + "2/144/11": 120000, + "2/144/12": 0, + "2/144/13": 0, + "2/144/14": 60, + "2/144/15": [ + { + "0": 1, + "1": 100000 + } + ], + "2/144/16": [ + { + "0": 1, + "1": 100000 + } + ], + "2/144/17": 9800, + "2/144/18": 0, + "2/144/65532": 31, + "2/144/65533": 1, + "2/144/65528": [], + "2/144/65529": [], + "2/144/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 65528, + 65529, 65531, 65532, 65533 + ], + "2/145/0": { + "0": 14, + "1": true, + "2": 0, + "3": 1000000000000000, + "4": [ + { + "0": 0, + "1": 1000000000000000, + "2": 500, + "3": 50 + } + ] + }, + "2/145/1": { + "0": 0, + "1": 9, + "2": 12, + "3": 9649, + "4": 12530 + }, + "2/145/5": { + "0": 0, + "1": 0, + "2": 0, + "3": 0 + }, + "2/145/65532": 5, + "2/145/65533": 1, + "2/145/65528": [], + "2/145/65529": [], + "2/145/65531": [0, 1, 5, 65528, 65529, 65531, 65532, 65533], + "2/156/0": [0, 1, 2], + "2/156/1": null, + "2/156/65532": 12, + "2/156/65533": 1, + "2/156/65528": [], + "2/156/65529": [], + "2/156/65531": [0, 1, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/smoke_detector.json b/tests/components/matter/fixtures/nodes/smoke_detector.json new file mode 100644 index 00000000000..7ba525a7552 --- /dev/null +++ b/tests/components/matter/fixtures/nodes/smoke_detector.json @@ -0,0 +1,238 @@ +{ + "node_id": 1, + "date_commissioned": "2024-09-13T20:07:21.672257", + "last_interview": "2024-09-13T21:10:36.026041", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 2 + } + ], + "0/29/1": [29, 31, 40, 42, 48, 49, 51, 60, 62, 63, 70], + "0/29/2": [41], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65530": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65530": [0, 1], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "HEIMAN", + "0/40/2": 4619, + "0/40/3": "Smoke sensor", + "0/40/4": 4099, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "0.0", + "0/40/9": 16, + "0/40/10": "1.0", + "0/40/11": "20240403", + "0/40/14": "", + "0/40/15": "2404034099000007", + "0/40/16": false, + "0/40/18": "redacted", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 2, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65530": [0, 2], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 18, 19, 65528, 65529, + 65530, 65531, 65532, 65533 + ], + "0/42/0": [], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65530": [0, 1, 2], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65530": [], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "+uApc5vSQm4=", + "1": true + } + ], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "+uApc5vSQm4=", + "0/49/7": null, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65530": [], + "0/49/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/51/0": [], + "0/51/1": 1, + "0/51/2": 247340, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [0], + "0/51/65530": [3], + "0/51/65531": [ + 0, 1, 2, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65530": [], + "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 3, + "0/62/4": [], + "0/62/5": 3, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65530": [], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65530": [], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/70/0": 300, + "0/70/1": 6000, + "0/70/2": 500, + "0/70/3": [], + "0/70/4": 0, + "0/70/5": 2, + "0/70/65532": 1, + "0/70/65533": 1, + "0/70/65528": [1], + "0/70/65529": [0, 2, 3], + "0/70/65530": [], + "0/70/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0], + "1/3/65530": [], + "1/3/65531": [0, 1, 65528, 65529, 65530, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 118, + "1": 1 + } + ], + "1/29/1": [3, 29, 47, 92], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65530": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "1/47/0": 0, + "1/47/1": 2, + "1/47/2": "B2", + "1/47/11": 0, + "1/47/12": 188, + "1/47/14": 0, + "1/47/15": false, + "1/47/16": 0, + "1/47/19": "CR123A", + "1/47/20": 0, + "1/47/24": 0, + "1/47/25": 0, + "1/47/31": [], + "1/47/65532": 10, + "1/47/65533": 2, + "1/47/65528": [], + "1/47/65529": [], + "1/47/65530": [1], + "1/47/65531": [ + 0, 1, 2, 11, 12, 14, 15, 16, 19, 20, 24, 25, 31, 65528, 65529, 65530, + 65531, 65532, 65533 + ], + "1/92/0": 0, + "1/92/1": 0, + "1/92/3": 0, + "1/92/4": 0, + "1/92/5": false, + "1/92/6": false, + "1/92/7": 0, + "1/92/65532": 1, + "1/92/65533": 1, + "1/92/65528": [], + "1/92/65529": [0], + "1/92/65530": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "1/92/65531": [ + 0, 1, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 + ] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/switch-unit.json b/tests/components/matter/fixtures/nodes/switch_unit.json similarity index 100% rename from tests/components/matter/fixtures/nodes/switch-unit.json rename to tests/components/matter/fixtures/nodes/switch_unit.json diff --git a/tests/components/matter/fixtures/nodes/temperature-sensor.json b/tests/components/matter/fixtures/nodes/temperature_sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/temperature-sensor.json rename to tests/components/matter/fixtures/nodes/temperature_sensor.json diff --git a/tests/components/matter/fixtures/nodes/vacuum_cleaner.json b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json new file mode 100644 index 00000000000..d6268144ffd --- /dev/null +++ b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json @@ -0,0 +1,309 @@ +{ + "node_id": 66, + "date_commissioned": "2024-10-29T08:27:39.860951", + "last_interview": "2024-10-29T08:27:39.860959", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 48, 49, 50, 51, 60, 62, 63], + "0/29/2": [], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "TEST_VENDOR", + "0/40/2": 65521, + "0/40/3": "Mock Vacuum", + "0/40/4": 32769, + "0/40/5": "Mock Vacuum", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1.0", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/18": "F0D59DFAAEAD6E76", + "0/40/19": { + "0": 3, + "1": 65535 + }, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, + 65528, 65529, 65531, 65532, 65533 + ], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 2, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "ZW5kMA==", + "1": true + } + ], + "0/49/2": 0, + "0/49/3": 0, + "0/49/4": true, + "0/49/5": null, + "0/49/6": null, + "0/49/7": null, + "0/49/65532": 4, + "0/49/65533": 2, + "0/49/65528": [], + "0/49/65529": [], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/50/65532": 0, + "0/50/65533": 1, + "0/50/65528": [1], + "0/50/65529": [0], + "0/50/65531": [65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 1, + "0/51/2": 47, + "0/51/3": 0, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 16, + "0/62/3": 1, + "0/62/4": [], + "0/62/5": 1, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 116, + "1": 1 + } + ], + "1/29/1": [3, 29, 84, 85, 97], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/84/0": [ + { + "0": "Idle", + "1": 0, + "2": [ + { + "1": 16384 + } + ] + }, + { + "0": "Cleaning", + "1": 1, + "2": [ + { + "1": 16385 + } + ] + }, + { + "0": "Mapping", + "1": 2, + "2": [ + { + "1": 16386 + } + ] + } + ], + "1/84/1": 0, + "1/84/65532": 0, + "1/84/65533": 2, + "1/84/65528": [1], + "1/84/65529": [0], + "1/84/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/85/0": [ + { + "0": "Quick", + "1": 0, + "2": [ + { + "1": 16385 + }, + { + "1": 1 + } + ] + }, + { + "0": "Auto", + "1": 1, + "2": [ + { + "1": 0 + }, + { + "1": 16385 + } + ] + }, + { + "0": "Deep Clean", + "1": 2, + "2": [ + { + "1": 16386 + }, + { + "1": 16384 + }, + { + "1": 16385 + } + ] + }, + { + "0": "Quiet", + "1": 3, + "2": [ + { + "1": 2 + }, + { + "1": 16385 + } + ] + }, + { + "0": "Max Vac", + "1": 4, + "2": [ + { + "1": 16385 + }, + { + "1": 16384 + } + ] + } + ], + "1/85/1": 0, + "1/85/65532": 0, + "1/85/65533": 2, + "1/85/65528": [1], + "1/85/65529": [0], + "1/85/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/97/0": null, + "1/97/1": null, + "1/97/3": [ + { + "0": 0 + }, + { + "0": 1 + }, + { + "0": 2 + }, + { + "0": 3 + }, + { + "0": 64 + }, + { + "0": 65 + }, + { + "0": 66 + } + ], + "1/97/4": 0, + "1/97/5": { + "0": 0 + }, + "1/97/65532": 0, + "1/97/65533": 1, + "1/97/65528": [4], + "1/97/65529": [0, 3, 128], + "1/97/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/valve.json b/tests/components/matter/fixtures/nodes/valve.json new file mode 100644 index 00000000000..5ba06412ca9 --- /dev/null +++ b/tests/components/matter/fixtures/nodes/valve.json @@ -0,0 +1,260 @@ +{ + "node_id": 75, + "date_commissioned": "2024-09-02T09:32:00.380607", + "last_interview": "2024-09-02T09:32:00.380611", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 43, 48, 49, 50, 51, 60, 62, 63], + "0/29/2": [], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 18, + "0/40/1": "Mock", + "0/40/2": 65521, + "0/40/3": "Valve", + "0/40/4": 32768, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1.0", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/18": "A3586AC56A2CCCDB", + "0/40/19": { + "0": 3, + "1": 65535 + }, + "0/40/21": 17039360, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 2, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, + 65528, 65529, 65531, 65532, 65533 + ], + "0/43/0": "en-US", + "0/43/1": [ + "en-US", + "de-DE", + "fr-FR", + "en-GB", + "es-ES", + "zh-CN", + "it-IT", + "ja-JP" + ], + "0/43/65532": 0, + "0/43/65533": 1, + "0/43/65528": [], + "0/43/65529": [], + "0/43/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 2, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "ZW5zMzM=", + "1": true + } + ], + "0/49/2": 0, + "0/49/3": 0, + "0/49/4": true, + "0/49/5": null, + "0/49/6": null, + "0/49/7": null, + "0/49/65532": 4, + "0/49/65533": 2, + "0/49/65528": [], + "0/49/65529": [], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/50/65532": 0, + "0/50/65533": 1, + "0/50/65528": [1], + "0/50/65529": [0], + "0/50/65531": [65528, 65529, 65531, 65532, 65533], + "0/51/0": [ + { + "0": "ens33", + "1": true, + "2": null, + "3": null, + "4": "AAwpp2CV", + "5": ["wKgBjg=="], + "6": [ + "/adI27DsyURo2mqau/5wuw==", + "/adI27DsyUSOe4PwnMXbYg==", + "KgEOCgKzOZD9M4Fh8k4Abg==", + "KgEOCgKzOZCNpPnLBN7MTQ==", + "/oAAAAAAAADvX1kMcjUM+w==" + ], + "7": 2 + }, + { + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 + } + ], + "0/51/1": 1, + "0/51/2": 77, + "0/51/3": 0, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [ + { + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRSxgkBwEkCAEwCUEEPt5xWN1i0R+dLM+MnDvosL8hjyrRoHq5ja+iCtZbpXTIXt17ueMKWDc7pgeEvHn9opOCiFvmqjEZ1L4hDk27MTcKNQEoARgkAgE2AwQCBAEYMAQUUPvMnV9FkGhfQedEwlqazBFbVfUwBRQ1L3KS8MJ5RVnuryNgRxdXueDAoxgwC0CA4m5xhFuvxC4iDehajKmbdNvZdo2alIbL8hGTor2jMFIPAowJeA0ZaS0+ocRsA6xxHRrpmmF095qUHbSONrPIGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEBjOABseGNfeoeNqgBxhNV78q8SfQP8putY2hpTVwmJVaWzyqw4F/OhdJRHTZjXkSV87jHOZ58ivEb3GjFiT+OTcKNQEpARgkAmAwBBQ1L3KS8MJ5RVnuryNgRxdXueDAozAFFM2vLItbAuvwSMsedKJS5Tw7Aa2pGDALQCPtpgnYiXc8JmJmEi25z0BIPFYaf27j9yhVSmm45vjpdSZd3p8uOGjHd23m8w/22q2eWvkzU02qTVLgnV42cgkY", + "254": 1 + } + ], + "0/62/1": [ + { + "1": "BPUiJZj+BQknF7mbNOh2d9ZtKB+gQJLND+2qjIAAaMJb+2BW+xFhqDYYiA8p9YegdTb0wHA1NQY8TXMPyDwoP9Q=", + "2": 4939, + "3": 2, + "4": 75, + "5": "", + "254": 1 + } + ], + "0/62/2": 16, + "0/62/3": 1, + "0/62/4": [ + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEE9SIlmP4FCScXuZs06HZ31m0oH6BAks0P7aqMgABowlv7YFb7EWGoNhiIDyn1h6B1NvTAcDU1BjxNcw/IPCg/1DcKNQEpARgkAmAwBBTNryyLWwLr8EjLHnSiUuU8OwGtqTAFFM2vLItbAuvwSMsedKJS5Tw7Aa2pGDALQKL0AGnKE3ezVrBBzJA+9INd8GTFOC3oX/EeCpI4CSKlc7LijfauiDVtJ5gfqR0gf1TKLcWfSUe7mIIvXzzvg0UY" + ], + "0/62/5": 1, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 0, + "1/3/65532": 0, + "1/3/65533": 2, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/4/0": 128, + "1/4/65532": 1, + "1/4/65533": 3, + "1/4/65528": [0, 1, 2, 3], + "1/4/65529": [0, 1, 2, 3, 4, 5], + "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 66, + "1": 1 + } + ], + "1/29/1": [3, 4, 29, 129], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/129/0": 0, + "1/129/1": 0, + "1/129/2": 0, + "1/129/3": null, + "1/129/4": 0, + "1/129/5": 0, + "1/129/6": 0, + "1/129/7": 0, + "1/129/8": 100, + "1/129/9": 0, + "1/129/10": 0, + "1/129/65532": 0, + "1/129/65533": 1, + "1/129/65528": [], + "1/129/65529": [0, 1], + "1/129/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 65528, 65529, 65531, 65532, 65533 + ] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/window-covering_full.json b/tests/components/matter/fixtures/nodes/window_covering_full.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window-covering_full.json rename to tests/components/matter/fixtures/nodes/window_covering_full.json diff --git a/tests/components/matter/fixtures/nodes/window-covering_lift.json b/tests/components/matter/fixtures/nodes/window_covering_lift.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window-covering_lift.json rename to tests/components/matter/fixtures/nodes/window_covering_lift.json diff --git a/tests/components/matter/fixtures/nodes/window-covering_pa-lift.json b/tests/components/matter/fixtures/nodes/window_covering_pa_lift.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window-covering_pa-lift.json rename to tests/components/matter/fixtures/nodes/window_covering_pa_lift.json diff --git a/tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json b/tests/components/matter/fixtures/nodes/window_covering_pa_tilt.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json rename to tests/components/matter/fixtures/nodes/window_covering_pa_tilt.json diff --git a/tests/components/matter/fixtures/nodes/window-covering_tilt.json b/tests/components/matter/fixtures/nodes/window_covering_tilt.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window-covering_tilt.json rename to tests/components/matter/fixtures/nodes/window_covering_tilt.json diff --git a/tests/components/matter/snapshots/test_binary_sensor.ambr b/tests/components/matter/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..82dcc166f13 --- /dev/null +++ b/tests/components/matter/snapshots/test_binary_sensor.ambr @@ -0,0 +1,658 @@ +# serializer version: 1 +# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_door_lock_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-BatteryChargeLevel-47-14', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mock Door Lock Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_door_lock_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_door_lock_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-BatteryChargeLevel-47-14', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mock Door Lock Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_door_lock_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_door_lock_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LockDoorStateSensor-257-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Mock Door Lock Door', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_door_lock_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[eve_contact_sensor][binary_sensor.eve_door_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.eve_door_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-ContactSensor-69-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[eve_contact_sensor][binary_sensor.eve_door_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Eve Door Door', + }), + 'context': , + 'entity_id': 'binary_sensor.eve_door_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[leak_sensor][binary_sensor.water_leak_detector_water_leak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.water_leak_detector_water_leak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water leak', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_leak', + 'unique_id': '00000000000004D2-0000000000000020-MatterNodeDevice-1-WaterLeakDetector-69-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[leak_sensor][binary_sensor.water_leak_detector_water_leak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Water Leak Detector Water leak', + }), + 'context': , + 'entity_id': 'binary_sensor.water_leak_detector_water_leak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[occupancy_sensor][binary_sensor.mock_occupancy_sensor_occupancy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_occupancy_sensor_occupancy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Occupancy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-OccupancySensor-1030-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[occupancy_sensor][binary_sensor.mock_occupancy_sensor_occupancy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'occupancy', + 'friendly_name': 'Mock Occupancy Sensor Occupancy', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_occupancy_sensor_occupancy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[onoff_light_alt_name][binary_sensor.mock_onoff_light_occupancy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_onoff_light_occupancy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Occupancy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-OccupancySensor-1030-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[onoff_light_alt_name][binary_sensor.mock_onoff_light_occupancy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'occupancy', + 'friendly_name': 'Mock OnOff Light Occupancy', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_onoff_light_occupancy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[onoff_light_no_name][binary_sensor.mock_light_occupancy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_light_occupancy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Occupancy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-OccupancySensor-1030-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[onoff_light_no_name][binary_sensor.mock_light_occupancy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'occupancy', + 'friendly_name': 'Mock Light Occupancy', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_light_occupancy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_battery_alert-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.smoke_sensor_battery_alert', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery alert', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_alert', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmBatteryAlertSensor-92-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_battery_alert-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Smoke sensor Battery alert', + }), + 'context': , + 'entity_id': 'binary_sensor.smoke_sensor_battery_alert', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_end_of_service-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.smoke_sensor_end_of_service', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'End of service', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'end_of_service', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmEndfOfServiceSensor-92-7', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_end_of_service-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Smoke sensor End of service', + }), + 'context': , + 'entity_id': 'binary_sensor.smoke_sensor_end_of_service', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_hardware_fault-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.smoke_sensor_hardware_fault', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hardware fault', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hardware_fault', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmHardwareFaultAlertSensor-92-6', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_hardware_fault-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Smoke sensor Hardware fault', + }), + 'context': , + 'entity_id': 'binary_sensor.smoke_sensor_hardware_fault', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_muted-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.smoke_sensor_muted', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Muted', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'muted', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmDeviceMutedSensor-92-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_muted-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smoke sensor Muted', + }), + 'context': , + 'entity_id': 'binary_sensor.smoke_sensor_muted', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.smoke_sensor_smoke', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smoke', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmSmokeStateSensor-92-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'Smoke sensor Smoke', + }), + 'context': , + 'entity_id': 'binary_sensor.smoke_sensor_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_test_in_progress-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.smoke_sensor_test_in_progress', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Test in progress', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'test_in_progress', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmTestInProgressSensor-92-5', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_test_in_progress-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Smoke sensor Test in progress', + }), + 'context': , + 'entity_id': 'binary_sensor.smoke_sensor_test_in_progress', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/matter/snapshots/test_button.ambr b/tests/components/matter/snapshots/test_button.ambr new file mode 100644 index 00000000000..10792b58d28 --- /dev/null +++ b/tests/components/matter/snapshots/test_button.ambr @@ -0,0 +1,2812 @@ +# serializer version: 1 +# name: test_buttons[air_purifier][button.air_purifier_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (1)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (2)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (3)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-3-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (3)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (4)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-4-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (4)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (5)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-5-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (5)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.air_purifier_reset_filter_condition', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter condition', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter_condition', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-HepaFilterMonitoringResetButton-113-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Air Purifier Reset filter condition', + }), + 'context': , + 'entity_id': 'button.air_purifier_reset_filter_condition', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.air_purifier_reset_filter_condition_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter condition', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter_condition', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-ActivatedCarbonFilterMonitoringResetButton-114-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Air Purifier Reset filter condition', + }), + 'context': , + 'entity_id': 'button.air_purifier_reset_filter_condition_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_quality_sensor][button.lightfi_aq1_air_quality_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.lightfi_aq1_air_quality_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_quality_sensor][button.lightfi_aq1_air_quality_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Identify', + }), + 'context': , + 'entity_id': 'button.lightfi_aq1_air_quality_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[color_temperature_light][button.mock_color_temperature_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_color_temperature_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[color_temperature_light][button.mock_color_temperature_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Color Temperature Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_color_temperature_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[dimmable_plugin_unit][button.dimmable_plugin_unit_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dimmable_plugin_unit_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[dimmable_plugin_unit][button.dimmable_plugin_unit_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Dimmable Plugin Unit Identify', + }), + 'context': , + 'entity_id': 'button.dimmable_plugin_unit_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[door_lock][button.mock_door_lock_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_door_lock_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[door_lock][button.mock_door_lock_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Door Lock Identify', + }), + 'context': , + 'entity_id': 'button.mock_door_lock_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[door_lock_with_unbolt][button.mock_door_lock_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_door_lock_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[door_lock_with_unbolt][button.mock_door_lock_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Door Lock Identify', + }), + 'context': , + 'entity_id': 'button.mock_door_lock_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_contact_sensor][button.eve_door_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_door_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_contact_sensor][button.eve_door_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Door Identify', + }), + 'context': , + 'entity_id': 'button.eve_door_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_energy_plug][button.eve_energy_plug_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_energy_plug_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_energy_plug][button.eve_energy_plug_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Energy Plug Identify', + }), + 'context': , + 'entity_id': 'button.eve_energy_plug_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_energy_plug_patched][button.eve_energy_plug_patched_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_energy_plug_patched_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_energy_plug_patched][button.eve_energy_plug_patched_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Energy Plug Patched Identify', + }), + 'context': , + 'entity_id': 'button.eve_energy_plug_patched_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_thermo][button.eve_thermo_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_thermo_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_thermo][button.eve_thermo_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Thermo Identify', + }), + 'context': , + 'entity_id': 'button.eve_thermo_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_weather_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Weather Identify (1)', + }), + 'context': , + 'entity_id': 'button.eve_weather_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_weather_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Weather Identify (2)', + }), + 'context': , + 'entity_id': 'button.eve_weather_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_extended_color_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Extended Color Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_extended_color_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[fan][button.mocked_fan_switch_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mocked_fan_switch_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[fan][button.mocked_fan_switch_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mocked Fan Switch Identify', + }), + 'context': , + 'entity_id': 'button.mocked_fan_switch_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[flow_sensor][button.mock_flow_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_flow_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[flow_sensor][button.mock_flow_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Flow Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_flow_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[generic_switch][button.mock_generic_switch_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_generic_switch_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[generic_switch][button.mock_generic_switch_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Generic Switch Identify', + }), + 'context': , + 'entity_id': 'button.mock_generic_switch_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_fancy_button-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_generic_switch_fancy_button', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fancy Button', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_fancy_button-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Generic Switch Fancy Button', + }), + 'context': , + 'entity_id': 'button.mock_generic_switch_fancy_button', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_generic_switch_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Generic Switch Identify (1)', + }), + 'context': , + 'entity_id': 'button.mock_generic_switch_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[humidity_sensor][button.mock_humidity_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_humidity_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[humidity_sensor][button.mock_humidity_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Humidity Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_humidity_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[light_sensor][button.mock_light_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_light_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[light_sensor][button.mock_light_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Light Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_light_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.microwave_oven_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Microwave Oven Identify', + }), + 'context': , + 'entity_id': 'button.microwave_oven_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Pause', + }), + 'context': , + 'entity_id': 'button.microwave_oven_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_resume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_resume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Resume', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'resume', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateResumeButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_resume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Resume', + }), + 'context': , + 'entity_id': 'button.microwave_oven_resume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateStartButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Start', + }), + 'context': , + 'entity_id': 'button.microwave_oven_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateStopButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Stop', + }), + 'context': , + 'entity_id': 'button.microwave_oven_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_config-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_config', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Config', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_config-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Config', + }), + 'context': , + 'entity_id': 'button.inovelli_config', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_down-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_down', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Down', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_down-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Down', + }), + 'context': , + 'entity_id': 'button.inovelli_down', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Identify (1)', + }), + 'context': , + 'entity_id': 'button.inovelli_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Identify (2)', + }), + 'context': , + 'entity_id': 'button.inovelli_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_identify_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Identify (6)', + }), + 'context': , + 'entity_id': 'button.inovelli_identify_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_up-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_up', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Up', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_up-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Up', + }), + 'context': , + 'entity_id': 'button.inovelli_up', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_occupancy_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Occupancy Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_occupancy_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[on_off_plugin_unit][button.mock_onoffpluginunit_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_onoffpluginunit_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[on_off_plugin_unit][button.mock_onoffpluginunit_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock OnOffPluginUnit Identify', + }), + 'context': , + 'entity_id': 'button.mock_onoffpluginunit_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light][button.mock_onoff_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_onoff_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light][button.mock_onoff_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock OnOff Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_onoff_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light_alt_name][button.mock_onoff_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_onoff_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light_alt_name][button.mock_onoff_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock OnOff Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_onoff_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light_no_name][button.mock_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light_no_name][button.mock_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light_with_levelcontrol_present][button.d215s_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.d215s_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light_with_levelcontrol_present][button.d215s_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'D215S Identify', + }), + 'context': , + 'entity_id': 'button.d215s_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[pressure_sensor][button.mock_pressure_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_pressure_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[pressure_sensor][button.mock_pressure_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Pressure Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_pressure_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.room_airconditioner_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Room AirConditioner Identify (1)', + }), + 'context': , + 'entity_id': 'button.room_airconditioner_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.room_airconditioner_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Room AirConditioner Identify (2)', + }), + 'context': , + 'entity_id': 'button.room_airconditioner_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dishwasher_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Dishwasher Identify', + }), + 'context': , + 'entity_id': 'button.dishwasher_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dishwasher_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Pause', + }), + 'context': , + 'entity_id': 'button.dishwasher_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dishwasher_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStateStartButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Start', + }), + 'context': , + 'entity_id': 'button.dishwasher_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dishwasher_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStateStopButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Stop', + }), + 'context': , + 'entity_id': 'button.dishwasher_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[smoke_detector][button.smoke_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.smoke_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[smoke_detector][button.smoke_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Smoke sensor Identify', + }), + 'context': , + 'entity_id': 'button.smoke_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[switch_unit][button.mock_switchunit_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_switchunit_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[switch_unit][button.mock_switchunit_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock SwitchUnit Identify', + }), + 'context': , + 'entity_id': 'button.mock_switchunit_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[temperature_sensor][button.mock_temperature_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_temperature_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[temperature_sensor][button.mock_temperature_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Temperature Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_temperature_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[thermostat][button.longan_link_hvac_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.longan_link_hvac_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[thermostat][button.longan_link_hvac_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Longan link HVAC Identify', + }), + 'context': , + 'entity_id': 'button.longan_link_hvac_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[valve][button.valve_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.valve_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[valve][button.valve_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Valve Identify', + }), + 'context': , + 'entity_id': 'button.valve_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_full][button.mock_full_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_full_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_full][button.mock_full_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Full Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_full_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_lift][button.mock_lift_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_lift_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_lift][button.mock_lift_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Lift Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_lift_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.longan_link_wncv_da01_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Longan link WNCV DA01 Identify', + }), + 'context': , + 'entity_id': 'button.longan_link_wncv_da01_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_pa_tilt][button.mock_pa_tilt_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_pa_tilt_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_pa_tilt][button.mock_pa_tilt_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock PA Tilt Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_pa_tilt_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_tilt][button.mock_tilt_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_tilt_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_tilt][button.mock_tilt_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Tilt Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_tilt_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/matter/snapshots/test_climate.ambr b/tests/components/matter/snapshots/test_climate.ambr new file mode 100644 index 00000000000..25f5ca06f62 --- /dev/null +++ b/tests/components/matter/snapshots/test_climate.ambr @@ -0,0 +1,263 @@ +# serializer version: 1 +# name: test_climates[air_purifier][climate.air_purifier-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 5.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.air_purifier', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-5-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[air_purifier][climate.air_purifier-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Air Purifier', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 5.0, + 'supported_features': , + 'temperature': 20.0, + }), + 'context': , + 'entity_id': 'climate.air_purifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climates[eve_thermo][climate.eve_thermo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 10.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.eve_thermo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[eve_thermo][climate.eve_thermo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Eve Thermo', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 10.0, + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.eve_thermo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climates[room_airconditioner][climate.room_airconditioner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 16.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.room_airconditioner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[room_airconditioner][climate.room_airconditioner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Room AirConditioner', + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 16.0, + 'supported_features': , + 'temperature': 20.0, + }), + 'context': , + 'entity_id': 'climate.room_airconditioner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climates[thermostat][climate.longan_link_hvac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.longan_link_hvac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[thermostat][climate.longan_link_hvac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 28.3, + 'friendly_name': 'Longan link HVAC', + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.longan_link_hvac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/matter/snapshots/test_cover.ambr b/tests/components/matter/snapshots/test_cover.ambr new file mode 100644 index 00000000000..7d036d35983 --- /dev/null +++ b/tests/components/matter/snapshots/test_cover.ambr @@ -0,0 +1,245 @@ +# serializer version: 1 +# name: test_covers[window_covering_full][cover.mock_full_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_full_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCoverPositionAwareLiftAndTilt-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_full][cover.mock_full_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'current_tilt_position': 100, + 'device_class': 'awning', + 'friendly_name': 'Mock Full Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_full_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[window_covering_lift][cover.mock_lift_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_lift_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCover-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_lift][cover.mock_lift_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'awning', + 'friendly_name': 'Mock Lift Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_lift_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_covers[window_covering_pa_lift][cover.longan_link_wncv_da01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.longan_link_wncv_da01', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterCoverPositionAwareLift-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_pa_lift][cover.longan_link_wncv_da01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 51, + 'device_class': 'awning', + 'friendly_name': 'Longan link WNCV DA01', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.longan_link_wncv_da01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[window_covering_pa_tilt][cover.mock_pa_tilt_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_pa_tilt_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCoverPositionAwareTilt-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_pa_tilt][cover.mock_pa_tilt_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_tilt_position': 100, + 'device_class': 'awning', + 'friendly_name': 'Mock PA Tilt Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_pa_tilt_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_covers[window_covering_tilt][cover.mock_tilt_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_tilt_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCover-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_tilt][cover.mock_tilt_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'awning', + 'friendly_name': 'Mock Tilt Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_tilt_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/matter/snapshots/test_event.ambr b/tests/components/matter/snapshots/test_event.ambr new file mode 100644 index 00000000000..031e8e9d24f --- /dev/null +++ b/tests/components/matter/snapshots/test_event.ambr @@ -0,0 +1,385 @@ +# serializer version: 1 +# name: test_events[generic_switch][event.mock_generic_switch_button-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'initial_press', + 'short_release', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.mock_generic_switch_button', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Button', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[generic_switch][event.mock_generic_switch_button-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'initial_press', + 'short_release', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Mock Generic Switch Button', + }), + 'context': , + 'entity_id': 'event.mock_generic_switch_button', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_button_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.mock_generic_switch_button_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Button (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_button_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Mock Generic Switch Button (1)', + }), + 'context': , + 'entity_id': 'event.mock_generic_switch_button_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_fancy_button-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.mock_generic_switch_fancy_button', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fancy Button', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-2-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_fancy_button-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Mock Generic Switch Fancy Button', + }), + 'context': , + 'entity_id': 'event.mock_generic_switch_fancy_button', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_config-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.inovelli_config', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Config', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_config-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Inovelli Config', + }), + 'context': , + 'entity_id': 'event.inovelli_config', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_down-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.inovelli_down', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Down', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_down-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Inovelli Down', + }), + 'context': , + 'entity_id': 'event.inovelli_down', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_up-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.inovelli_up', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Up', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_up-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Inovelli Up', + }), + 'context': , + 'entity_id': 'event.inovelli_up', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/matter/snapshots/test_fan.ambr b/tests/components/matter/snapshots/test_fan.ambr new file mode 100644 index 00000000000..7f1fe7d42db --- /dev/null +++ b/tests/components/matter/snapshots/test_fan.ambr @@ -0,0 +1,263 @@ +# serializer version: 1 +# name: test_fans[air_purifier][fan.air_purifier-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.air_purifier', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[air_purifier][fan.air_purifier-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'direction': 'forward', + 'friendly_name': 'Air Purifier', + 'oscillating': False, + 'percentage': None, + 'percentage_step': 10.0, + 'preset_mode': 'auto', + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.air_purifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_fans[fan][fan.mocked_fan_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.mocked_fan_switch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[fan][fan.mocked_fan_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mocked Fan Switch', + 'percentage': 0, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.mocked_fan_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_fans[room_airconditioner][fan.room_airconditioner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'sleep_wind', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.room_airconditioner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[room_airconditioner][fan.room_airconditioner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Room AirConditioner', + 'percentage': 0, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'sleep_wind', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.room_airconditioner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_fans[thermostat][fan.longan_link_hvac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.longan_link_hvac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[thermostat][fan.longan_link_hvac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Longan link HVAC', + 'preset_mode': None, + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.longan_link_hvac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/matter/snapshots/test_light.ambr b/tests/components/matter/snapshots/test_light.ambr new file mode 100644 index 00000000000..eff5820d27d --- /dev/null +++ b/tests/components/matter/snapshots/test_light.ambr @@ -0,0 +1,660 @@ +# serializer version: 1 +# name: test_lights[color_temperature_light][light.mock_color_temperature_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_color_temperature_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[color_temperature_light][light.mock_color_temperature_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 128, + 'color_mode': , + 'color_temp': 284, + 'color_temp_kelvin': 3521, + 'friendly_name': 'Mock Color Temperature Light', + 'hs_color': tuple( + 27.152, + 44.32, + ), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 193, + 142, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.452, + 0.373, + ), + }), + 'context': , + 'entity_id': 'light.mock_color_temperature_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[dimmable_light][light.mock_dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_dimmable_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[dimmable_light][light.mock_dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 51, + 'color_mode': , + 'friendly_name': 'Mock Dimmable Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[dimmable_plugin_unit][light.dimmable_plugin_unit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_plugin_unit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[dimmable_plugin_unit][light.dimmable_plugin_unit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable Plugin Unit', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_plugin_unit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[extended_color_light][light.mock_extended_color_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_extended_color_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[extended_color_light][light.mock_extended_color_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 128, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Mock Extended Color Light', + 'hs_color': tuple( + 51.024, + 20.079, + ), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 247, + 204, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.362, + 0.373, + ), + }), + 'context': , + 'entity_id': 'light.mock_extended_color_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.inovelli_light_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'light', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Inovelli Light (1)', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.inovelli_light_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.inovelli_light_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'light', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Inovelli Light (6)', + 'hs_color': None, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': None, + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.inovelli_light_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_lights[onoff_light][light.mock_onoff_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_onoff_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light][light.mock_onoff_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Mock OnOff Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_onoff_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[onoff_light_alt_name][light.mock_onoff_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_onoff_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light_alt_name][light.mock_onoff_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Mock OnOff Light', + 'hs_color': None, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': None, + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.mock_onoff_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[onoff_light_no_name][light.mock_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light_no_name][light.mock_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Mock Light', + 'hs_color': None, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': None, + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.mock_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[onoff_light_with_levelcontrol_present][light.d215s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.d215s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light_with_levelcontrol_present][light.d215s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'D215S', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.d215s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/matter/snapshots/test_lock.ambr b/tests/components/matter/snapshots/test_lock.ambr new file mode 100644 index 00000000000..bf34ac267d7 --- /dev/null +++ b/tests/components/matter/snapshots/test_lock.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_locks[door_lock][lock.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLock-257-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[door_lock][lock.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- +# name: test_locks[door_lock_with_unbolt][lock.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLock-257-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[door_lock_with_unbolt][lock.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'locked', + }) +# --- diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr new file mode 100644 index 00000000000..9d51bb92e51 --- /dev/null +++ b/tests/components/matter/snapshots/test_number.ambr @@ -0,0 +1,1560 @@ +# serializer version: 1 +# name: test_numbers[color_temperature_light][number.mock_color_temperature_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_color_temperature_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[color_temperature_light][number.mock_color_temperature_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Color Temperature Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_color_temperature_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.dimmable_plugin_unit_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dimmable Plugin Unit On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.dimmable_plugin_unit_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.dimmable_plugin_unit_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dimmable Plugin Unit On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.dimmable_plugin_unit_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9000, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.eve_weather_altitude_above_sea_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Altitude above Sea Level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'altitude', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-EveWeatherAltitude-319486977-319422483', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Eve Weather Altitude above Sea Level', + 'max': 9000, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.eve_weather_altitude_above_sea_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.0', + }) +# --- +# name: test_numbers[extended_color_light][number.mock_extended_color_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_extended_color_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[extended_color_light][number.mock_extended_color_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Extended Color Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_extended_color_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.inovelli_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_level_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On level (1)', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.inovelli_on_level_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '137', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_level_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On level (6)', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.inovelli_on_level_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '254', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.inovelli_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.inovelli_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.5', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_light_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_light_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_light_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.d215s_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'D215S On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.d215s_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.d215s_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'D215S On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.d215s_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/matter/snapshots/test_select.ambr b/tests/components/matter/snapshots/test_select.ambr new file mode 100644 index 00000000000..663b0cdaf51 --- /dev/null +++ b/tests/components/matter/snapshots/test_select.ambr @@ -0,0 +1,1636 @@ +# serializer version: 1 +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_lighting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_color_temperature_light_lighting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lighting', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_lighting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Color Temperature Light Lighting', + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'context': , + 'entity_id': 'select.mock_color_temperature_light_lighting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Dark', + }) +# --- +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_color_temperature_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Color Temperature Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_color_temperature_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_dimmable_light_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED Color', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-6-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light LED Color', + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'context': , + 'entity_id': 'select.mock_dimmable_light_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Aqua', + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_dimmable_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_dimmable_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[dimmable_plugin_unit][select.dimmable_plugin_unit_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.dimmable_plugin_unit_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[dimmable_plugin_unit][select.dimmable_plugin_unit_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dimmable Plugin Unit Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.dimmable_plugin_unit_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[door_lock][select.mock_door_lock_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[door_lock][select.mock_door_lock_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_selects[door_lock_with_unbolt][select.mock_door_lock_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[door_lock_with_unbolt][select.mock_door_lock_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_selects[eve_energy_plug][select.eve_energy_plug_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.eve_energy_plug_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[eve_energy_plug][select.eve_energy_plug_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Energy Plug Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.eve_energy_plug_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[eve_energy_plug_patched][select.eve_energy_plug_patched_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.eve_energy_plug_patched_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[eve_energy_plug_patched][select.eve_energy_plug_patched_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Energy Plug Patched Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.eve_energy_plug_patched_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_extended_color_light_lighting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lighting', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Extended Color Light Lighting', + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'context': , + 'entity_id': 'select.mock_extended_color_light_lighting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Dark', + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_extended_color_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Extended Color Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_extended_color_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_edge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Leading', + 'Trailing', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_dimming_edge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimming Edge', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_edge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Dimming Edge', + 'options': list([ + 'Leading', + 'Trailing', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_dimming_edge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Leading', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Instant', + '500ms', + '800ms', + '1s', + '1.5s', + '2s', + '2.5s', + '3s', + '3.5s', + '4s', + '5s', + '6s', + '7s', + '8s', + '10s', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_dimming_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimming Speed', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Dimming Speed', + 'options': list([ + 'Instant', + '500ms', + '800ms', + '1s', + '1.5s', + '2s', + '2.5s', + '3s', + '3.5s', + '4s', + '5s', + '6s', + '7s', + '8s', + '10s', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_dimming_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2s', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED Color', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli LED Color', + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Lemon', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Power-on behavior on startup (1)', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Power-on behavior on startup (6)', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_relay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Relay Click Enable', + 'Relay Click Disable', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_relay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relay', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_relay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Relay', + 'options': list([ + 'Relay Click Enable', + 'Relay Click Disable', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_relay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Relay Click Disable', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_smart_bulb_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart Bulb Disable', + 'Smart Bulb Enable', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_smart_bulb_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart Bulb Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_smart_bulb_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Smart Bulb Mode', + 'options': list([ + 'Smart Bulb Disable', + 'Smart Bulb Enable', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_smart_bulb_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Smart Bulb Disable', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_switch_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'OnOff+Single', + 'OnOff+Dumb', + 'OnOff+AUX', + 'OnOff+Full Wave', + 'Dimmer+Single', + 'Dimmer+Dumb', + 'Dimmer+Aux', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_switch_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_switch_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Switch Mode', + 'options': list([ + 'OnOff+Single', + 'OnOff+Dumb', + 'OnOff+AUX', + 'OnOff+Full Wave', + 'Dimmer+Single', + 'Dimmer+Dumb', + 'Dimmer+Aux', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_switch_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Dimmer+Single', + }) +# --- +# name: test_selects[on_off_plugin_unit][select.mock_onoffpluginunit_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_onoffpluginunit_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[on_off_plugin_unit][select.mock_onoffpluginunit_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_onoffpluginunit_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light][select.mock_onoff_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light][select.mock_onoff_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light_alt_name][select.mock_onoff_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light_alt_name][select.mock_onoff_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light_no_name][select.mock_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light_no_name][select.mock_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light_with_levelcontrol_present][select.d215s_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.d215s_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light_with_levelcontrol_present][select.d215s_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'D215S Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.d215s_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[silabs_dishwasher][select.dishwasher_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dishwasher_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-MatterDishwasherMode-89-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[silabs_dishwasher][select.dishwasher_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Mode', + 'options': list([ + ]), + }), + 'context': , + 'entity_id': 'select.dishwasher_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_switchunit_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock SwitchUnit Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_switchunit_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Quick', + 'Auto', + 'Deep Clean', + 'Quiet', + 'Max Vac', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.mock_vacuum_clean_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clean mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clean_mode', + 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-MatterRvcCleanMode-85-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Vacuum Clean mode', + 'options': list([ + 'Quick', + 'Auto', + 'Deep Clean', + 'Quiet', + 'Max Vac', + ]), + }), + 'context': , + 'entity_id': 'select.mock_vacuum_clean_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Quick', + }) +# --- diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..60a3d33a130 --- /dev/null +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -0,0 +1,3115 @@ +# serializer version: 1 +# name: test_sensors[air_purifier][sensor.air_purifier_activated_carbon_filter_condition-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_activated_carbon_filter_condition', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Activated carbon filter condition', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'activated_carbon_filter_condition', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-ActivatedCarbonFilterCondition-114-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_activated_carbon_filter_condition-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Air Purifier Activated carbon filter condition', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_activated_carbon_filter_condition', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'extremely_poor', + 'very_poor', + 'poor', + 'fair', + 'good', + 'moderate', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air quality', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_quality', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-AirQuality-91-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Air Purifier Air quality', + 'options': list([ + 'extremely_poor', + 'very_poor', + 'poor', + 'fair', + 'good', + 'moderate', + ]), + }), + 'context': , + 'entity_id': 'sensor.air_purifier_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-CarbonDioxideSensor-1037-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'Air Purifier Carbon dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-CarbonMonoxideSensor-1036-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'Air Purifier Carbon monoxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_hepa_filter_condition-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_hepa_filter_condition', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hepa filter condition', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hepa_filter_condition', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-HepaFilterCondition-113-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_hepa_filter_condition-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Air Purifier Hepa filter condition', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_hepa_filter_condition', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-4-HumiditySensor-1029-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Air Purifier Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_nitrogen_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_nitrogen_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Nitrogen dioxide', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-NitrogenDioxideSensor-1043-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_nitrogen_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'nitrogen_dioxide', + 'friendly_name': 'Air Purifier Nitrogen dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_nitrogen_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_ozone-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_ozone', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ozone', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-OzoneConcentrationSensor-1045-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_ozone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ozone', + 'friendly_name': 'Air Purifier Ozone', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_ozone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM1', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-PM1Sensor-1068-0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Air Purifier PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-PM10Sensor-1069-0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Air Purifier PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-PM25Sensor-1066-0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Air Purifier PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-3-TemperatureSensor-1026-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Air Purifier Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.air_purifier_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_vocs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.air_purifier_vocs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VOCs', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-TotalVolatileOrganicCompoundsSensor-1070-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_purifier][sensor.air_purifier_vocs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds_parts', + 'friendly_name': 'Air Purifier VOCs', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.air_purifier_vocs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'extremely_poor', + 'very_poor', + 'poor', + 'fair', + 'good', + 'moderate', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air quality', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_quality', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-AirQuality-91-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Air quality', + 'options': list([ + 'extremely_poor', + 'very_poor', + 'poor', + 'fair', + 'good', + 'moderate', + ]), + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-CarbonDioxideSensor-1037-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Carbon dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '678.0', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-HumiditySensor-1029-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.75', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Nitrogen dioxide', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-NitrogenDioxideSensor-1043-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'nitrogen_dioxide', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Nitrogen dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM1', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PM1Sensor-1068-0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'lightfi-aq1-air-quality-sensor PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.0', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PM10Sensor-1069-0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'lightfi-aq1-air-quality-sensor PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.0', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PM25Sensor-1066-0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'lightfi-aq1-air-quality-sensor PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.0', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-TemperatureSensor-1026-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.08', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VOCs', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-TotalVolatileOrganicCompoundsSensor-1070-0', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds_parts', + 'friendly_name': 'lightfi-aq1-air-quality-sensor VOCs', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '189.0', + }) +# --- +# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Battery type', + }), + 'context': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Battery type', + }), + 'context': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_door_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSource-47-12', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Eve Door Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eve_door_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_door_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Door Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_door_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_door_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatVoltage-47-11', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Eve Door Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_door_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.558', + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorWattCurrent-319486977-319422473', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Eve Energy Plug Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorWattAccumulated-319486977-319422475', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eve Energy Plug Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.220000028610229', + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorWatt-319486977-319422474', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Eve Energy Plug Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorVoltage-319486977-319422472', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Eve Energy Plug Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '238.800003051758', + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_patched_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalPowerMeasurementActiveCurrent-144-5', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Eve Energy Plug Patched Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_patched_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_patched_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalEnergyMeasurementCumulativeEnergyImported-145-1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eve Energy Plug Patched Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_patched_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0025', + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_patched_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalPowerMeasurementWatt-144-8', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Eve Energy Plug Patched Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_patched_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '550.0', + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_energy_plug_patched_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalPowerMeasurementVoltage-144-4', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Eve Energy Plug Patched Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_energy_plug_patched_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '220.0', + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_thermo_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSource-47-12', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Eve Thermo Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eve_thermo_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_thermo_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Thermo Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_thermo_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eve_thermo_valve_position', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Valve position', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'valve_position', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-EveThermoValvePosition-319486977-319422488', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Thermo Valve position', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eve_thermo_valve_position', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_thermo_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSourceBatVoltage-47-11', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Eve Thermo Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_thermo_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.05', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_weather_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSource-47-12', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Eve Weather Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eve_weather_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_weather_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Weather Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_weather_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eve_weather_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-HumiditySensor-1029-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Eve Weather Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eve_weather_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.66', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eve_weather_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-EveWeatherPressure-319486977-319422484', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Eve Weather Pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_weather_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1008.5', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eve_weather_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-TemperatureSensor-1026-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Eve Weather Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_weather_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16.03', + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_weather_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSourceBatVoltage-47-11', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Eve Weather Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eve_weather_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.956', + }) +# --- +# name: test_sensors[flow_sensor][sensor.mock_flow_sensor_flow-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_flow_sensor_flow', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flow', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flow', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-FlowSensor-1028-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[flow_sensor][sensor.mock_flow_sensor_flow-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Flow Sensor Flow', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_flow_sensor_flow', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[humidity_sensor][sensor.mock_humidity_sensor_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_humidity_sensor_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-HumiditySensor-1029-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[humidity_sensor][sensor.mock_humidity_sensor_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Mock Humidity Sensor Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mock_humidity_sensor_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[light_sensor][sensor.mock_light_sensor_illuminance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_light_sensor_illuminance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Illuminance', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LightSensor-1024-0', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_sensors[light_sensor][sensor.mock_light_sensor_illuminance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Mock Light Sensor Illuminance', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.mock_light_sensor_illuminance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.3', + }) +# --- +# name: test_sensors[microwave_oven][sensor.microwave_oven_operational_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stopped', + 'running', + 'paused', + 'error', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.microwave_oven_operational_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Operational state', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operational_state', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalState-96-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[microwave_oven][sensor.microwave_oven_operational_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Microwave Oven Operational state', + 'options': list([ + 'stopped', + 'running', + 'paused', + 'error', + ]), + }), + 'context': , + 'entity_id': 'sensor.microwave_oven_operational_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[pressure_sensor][sensor.mock_pressure_sensor_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_pressure_sensor_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PressureSensor-1027-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[pressure_sensor][sensor.mock_pressure_sensor_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Mock Pressure Sensor Pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_pressure_sensor_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[room_airconditioner][sensor.room_airconditioner_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_airconditioner_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-2-TemperatureSensor-1026-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[room_airconditioner][sensor.room_airconditioner_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Room AirConditioner Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.room_airconditioner_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dishwasher_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalPowerMeasurementActiveCurrent-144-5', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Dishwasher Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dishwasher_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dishwasher_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalEnergyMeasurementCumulativeEnergyImported-145-1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Dishwasher Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dishwasher_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_operational_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stopped', + 'running', + 'paused', + 'error', + 'extra_state', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.dishwasher_operational_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Operational state', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operational_state', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalState-96-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_operational_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Dishwasher Operational state', + 'options': list([ + 'stopped', + 'running', + 'paused', + 'error', + 'extra_state', + ]), + }), + 'context': , + 'entity_id': 'sensor.dishwasher_operational_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dishwasher_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalPowerMeasurementWatt-144-8', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dishwasher Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dishwasher_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dishwasher_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalPowerMeasurementVoltage-144-4', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_dishwasher][sensor.dishwasher_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Dishwasher Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dishwasher_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120.0', + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smoke_sensor_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSource-47-12', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Smoke sensor Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.smoke_sensor_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '94', + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smoke_sensor_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smoke sensor Battery type', + }), + 'context': , + 'entity_id': 'sensor.smoke_sensor_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'CR123A', + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smoke_sensor_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatVoltage-47-11', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smoke sensor Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smoke_sensor_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[temperature_sensor][sensor.mock_temperature_sensor_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_temperature_sensor_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-TemperatureSensor-1026-0', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[temperature_sensor][sensor.mock_temperature_sensor_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Temperature Sensor Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_temperature_sensor_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.0', + }) +# --- diff --git a/tests/components/matter/snapshots/test_switch.ambr b/tests/components/matter/snapshots/test_switch.ambr new file mode 100644 index 00000000000..9396dccd245 --- /dev/null +++ b/tests/components/matter/snapshots/test_switch.ambr @@ -0,0 +1,377 @@ +# serializer version: 1 +# name: test_switches[door_lock][switch.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[door_lock][switch.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock Door Lock', + }), + 'context': , + 'entity_id': 'switch.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[door_lock_with_unbolt][switch.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[door_lock_with_unbolt][switch.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock Door Lock', + }), + 'context': , + 'entity_id': 'switch.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[eve_energy_plug][switch.eve_energy_plug-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.eve_energy_plug', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-MatterPlug-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[eve_energy_plug][switch.eve_energy_plug-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Eve Energy Plug', + }), + 'context': , + 'entity_id': 'switch.eve_energy_plug', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[eve_energy_plug_patched][switch.eve_energy_plug_patched-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.eve_energy_plug_patched', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-MatterPlug-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[eve_energy_plug_patched][switch.eve_energy_plug_patched-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Eve Energy Plug Patched', + }), + 'context': , + 'entity_id': 'switch.eve_energy_plug_patched', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_onoffpluginunit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterPlug-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock OnOffPluginUnit', + }), + 'context': , + 'entity_id': 'switch.mock_onoffpluginunit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[room_airconditioner][switch.room_airconditioner_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.room_airconditioner_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterPowerToggle-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[room_airconditioner][switch.room_airconditioner_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Room AirConditioner Power', + }), + 'context': , + 'entity_id': 'switch.room_airconditioner_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[switch_unit][switch.mock_switchunit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_switchunit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch_unit][switch.mock_switchunit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock SwitchUnit', + }), + 'context': , + 'entity_id': 'switch.mock_switchunit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[thermostat][switch.longan_link_hvac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.longan_link_hvac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[thermostat][switch.longan_link_hvac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Longan link HVAC', + }), + 'context': , + 'entity_id': 'switch.longan_link_hvac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/matter/snapshots/test_vacuum.ambr b/tests/components/matter/snapshots/test_vacuum.ambr new file mode 100644 index 00000000000..9e6b52ed572 --- /dev/null +++ b/tests/components/matter/snapshots/test_vacuum.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_vacuum[vacuum_cleaner][vacuum.mock_vacuum-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'vacuum', + 'entity_category': None, + 'entity_id': 'vacuum.mock_vacuum', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-MatterVacuumCleaner-84-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_vacuum[vacuum_cleaner][vacuum.mock_vacuum-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Vacuum', + 'supported_features': , + }), + 'context': , + 'entity_id': 'vacuum.mock_vacuum', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/matter/snapshots/test_valve.ambr b/tests/components/matter/snapshots/test_valve.ambr new file mode 100644 index 00000000000..98634635476 --- /dev/null +++ b/tests/components/matter/snapshots/test_valve.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_valves[valve][valve.valve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.valve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-MatterValve-129-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_valves[valve][valve.valve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Valve', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.valve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/matter/test_adapter.py b/tests/components/matter/test_adapter.py index da2ef179c44..01dff3b7899 100644 --- a/tests/components/matter/test_adapter.py +++ b/tests/components/matter/test_adapter.py @@ -4,9 +4,7 @@ from __future__ import annotations from unittest.mock import MagicMock -from matter_server.client.models.node import MatterNode -from matter_server.common.helpers.util import dataclass_from_dict -from matter_server.common.models import EventType, MatterNodeData +from matter_server.common.models import EventType import pytest from homeassistant.components.matter.adapter import get_clean_name @@ -14,33 +12,26 @@ from homeassistant.components.matter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .common import load_and_parse_node_fixture, setup_integration_with_node_fixture +from .common import create_node_from_fixture + +from tests.common import MockConfigEntry -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") @pytest.mark.parametrize( ("node_fixture", "name"), [ - ("onoff-light", "Mock OnOff Light"), - ("onoff-light-alt-name", "Mock OnOff Light"), - ("onoff-light-no-name", "Mock Light"), + ("onoff_light", "Mock OnOff Light"), + ("onoff_light_alt_name", "Mock OnOff Light"), + ("onoff_light_no_name", "Mock Light"), ], ) async def test_device_registry_single_node_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - matter_client: MagicMock, - node_fixture: str, name: str, ) -> None: """Test bridge devices are set up correctly with via_device.""" - await setup_integration_with_node_fixture( - hass, - node_fixture, - matter_client, - ) - entry = device_registry.async_get_device( identifiers={ (DOMAIN, "deviceid_00000000000004D2-0000000000000001-MatterNodeDevice") @@ -54,25 +45,19 @@ async def test_device_registry_single_node_device( assert entry.name == name assert entry.manufacturer == "Nabu Casa" assert entry.model == "Mock Light" + assert entry.model_id == "32768" assert entry.hw_version == "v1.0" assert entry.sw_version == "v1.0" assert entry.serial_number == "12345678" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") +@pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) async def test_device_registry_single_node_device_alt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - matter_client: MagicMock, ) -> None: """Test additional device with different attribute values.""" - await setup_integration_with_node_fixture( - hass, - "on-off-plugin-unit", - matter_client, - ) - entry = device_registry.async_get_device( identifiers={ (DOMAIN, "deviceid_00000000000004D2-0000000000000001-MatterNodeDevice") @@ -88,19 +73,14 @@ async def test_device_registry_single_node_device_alt( assert entry.serial_number is None +@pytest.mark.usefixtures("matter_node") @pytest.mark.skip("Waiting for a new test fixture") +@pytest.mark.parametrize("node_fixture", ["fake_bridge_two_light"]) async def test_device_registry_bridge( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - matter_client: MagicMock, ) -> None: """Test bridge devices are set up correctly with via_device.""" - await setup_integration_with_node_fixture( - hass, - "fake-bridge-two-light", - matter_client, - ) - # Validate bridge bridge_entry = device_registry.async_get_device( identifiers={(DOMAIN, "mock-hub-id")} @@ -140,12 +120,10 @@ async def test_device_registry_bridge( assert device2_entry.sw_version == "1.49.1" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("integration") async def test_node_added_subscription( hass: HomeAssistant, matter_client: MagicMock, - integration: MagicMock, ) -> None: """Test subscription to new devices work.""" assert matter_client.subscribe_events.call_count == 5 @@ -155,48 +133,32 @@ async def test_node_added_subscription( ) node_added_callback = matter_client.subscribe_events.call_args.kwargs["callback"] - node_data = load_and_parse_node_fixture("onoff-light") - node = MatterNode( - dataclass_from_dict( - MatterNodeData, - node_data, - ) - ) + node = create_node_from_fixture("onoff_light") - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert not entity_state node_added_callback(EventType.NODE_ADDED, node) await hass.async_block_till_done() - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert entity_state +@pytest.mark.usefixtures("matter_node") +@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_device_registry_single_node_composed_device( hass: HomeAssistant, - matter_client: MagicMock, + device_registry: dr.DeviceRegistry, ) -> None: """Test that a composed device within a standalone node only creates one HA device entry.""" - await setup_integration_with_node_fixture( - hass, - "air-purifier", - matter_client, - ) - dev_reg = dr.async_get(hass) - assert len(dev_reg.devices) == 1 + assert len(device_registry.devices) == 1 -async def test_multi_endpoint_name( - hass: HomeAssistant, - matter_client: MagicMock, -) -> None: +@pytest.mark.usefixtures("matter_node") +@pytest.mark.parametrize("node_fixture", ["multi_endpoint_light"]) +async def test_multi_endpoint_name(hass: HomeAssistant) -> None: """Test that the entity name gets postfixed if the device has multiple primary endpoints.""" - await setup_integration_with_node_fixture( - hass, - "multi-endpoint-light", - matter_client, - ) entity_state = hass.states.get("light.inovelli_light_1") assert entity_state assert entity_state.name == "Inovelli Light (1)" @@ -205,7 +167,7 @@ async def test_multi_endpoint_name( assert entity_state.name == "Inovelli Light (6)" -async def test_get_clean_name_() -> None: +async def test_get_clean_name() -> None: """Test get_clean_name helper. Test device names that are assigned to `null` @@ -217,3 +179,27 @@ async def test_get_clean_name_() -> None: assert get_clean_name("") is None assert get_clean_name("Mock device") == "Mock device" assert get_clean_name("Mock device \x00") == "Mock device" + + +async def test_bad_node_not_crash_integration( + hass: HomeAssistant, + matter_client: MagicMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that a bad node does not crash the integration.""" + good_node = create_node_from_fixture("onoff_light") + bad_node = create_node_from_fixture("onoff_light") + del bad_node.endpoints[0].node + matter_client.get_nodes.return_value = [good_node, bad_node] + config_entry = MockConfigEntry( + domain="matter", data={"url": "http://mock-matter-server-url"} + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert matter_client.get_nodes.call_count == 1 + assert hass.states.get("light.mock_onoff_light") is not None + assert len(hass.states.async_all("light")) == 1 + assert "Error setting up node" in caplog.text diff --git a/tests/components/matter/test_api.py b/tests/components/matter/test_api.py index 853da113e21..b131ca9eb19 100644 --- a/tests/components/matter/test_api.py +++ b/tests/components/matter/test_api.py @@ -23,14 +23,10 @@ from homeassistant.components.matter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .common import setup_integration_with_node_fixture - from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_commission( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -67,8 +63,6 @@ async def test_commission( matter_client.commission_with_code.assert_called_once_with("12345678", False) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_commission_on_network( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -105,8 +99,6 @@ async def test_commission_on_network( matter_client.commission_on_network.assert_called_once_with(1234, "1.2.3.4") -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_set_thread_dataset( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -147,8 +139,6 @@ async def test_set_thread_dataset( matter_client.set_thread_operational_dataset.assert_called_once_with("test_dataset") -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_set_wifi_credentials( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -197,8 +187,9 @@ async def test_set_wifi_credentials( ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") +# setup (mock) integration with a random node fixture +@pytest.mark.parametrize("node_fixture", ["onoff_light"]) async def test_node_diagnostics( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -206,12 +197,6 @@ async def test_node_diagnostics( matter_client: MagicMock, ) -> None: """Test the node diagnostics command.""" - # setup (mock) integration with a random node fixture - await setup_integration_with_node_fixture( - hass, - "onoff-light", - matter_client, - ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -271,8 +256,9 @@ async def test_node_diagnostics( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") +# setup (mock) integration with a random node fixture +@pytest.mark.parametrize("node_fixture", ["onoff_light"]) async def test_ping_node( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -280,12 +266,6 @@ async def test_ping_node( matter_client: MagicMock, ) -> None: """Test the ping_node command.""" - # setup (mock) integration with a random node fixture - await setup_integration_with_node_fixture( - hass, - "onoff-light", - matter_client, - ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -331,8 +311,9 @@ async def test_ping_node( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") +# setup (mock) integration with a random node fixture +@pytest.mark.parametrize("node_fixture", ["onoff_light"]) async def test_open_commissioning_window( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -340,12 +321,6 @@ async def test_open_commissioning_window( matter_client: MagicMock, ) -> None: """Test the open_commissioning_window command.""" - # setup (mock) integration with a random node fixture - await setup_integration_with_node_fixture( - hass, - "onoff-light", - matter_client, - ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -397,8 +372,9 @@ async def test_open_commissioning_window( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") +# setup (mock) integration with a random node fixture +@pytest.mark.parametrize("node_fixture", ["onoff_light"]) async def test_remove_matter_fabric( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -406,12 +382,6 @@ async def test_remove_matter_fabric( matter_client: MagicMock, ) -> None: """Test the remove_matter_fabric command.""" - # setup (mock) integration with a random node fixture - await setup_integration_with_node_fixture( - hass, - "onoff-light", - matter_client, - ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -453,8 +423,9 @@ async def test_remove_matter_fabric( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_node") +# setup (mock) integration with a random node fixture +@pytest.mark.parametrize("node_fixture", ["onoff_light"]) async def test_interview_node( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -462,12 +433,6 @@ async def test_interview_node( matter_client: MagicMock, ) -> None: """Test the interview_node command.""" - # setup (mock) integration with a random node fixture - await setup_integration_with_node_fixture( - hass, - "onoff-light", - matter_client, - ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index f419a12c59f..cddee975ac8 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -4,18 +4,20 @@ from collections.abc import Generator from unittest.mock import MagicMock, patch from matter_server.client.models.node import MatterNode +from matter_server.common.models import EventType import pytest +from syrupy import SnapshotAssertion from homeassistant.components.matter.binary_sensor import ( DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS, ) -from homeassistant.const import EntityCategory, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) @@ -32,31 +34,30 @@ def binary_sensor_platform() -> Generator[None]: yield -@pytest.fixture(name="occupancy_sensor_node") -async def occupancy_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a occupancy sensor node.""" - return await setup_integration_with_node_fixture( - hass, "occupancy-sensor", matter_client - ) +@pytest.mark.usefixtures("matter_devices") +async def test_binary_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test binary sensors.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.BINARY_SENSOR) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["occupancy_sensor"]) async def test_occupancy_sensor( hass: HomeAssistant, matter_client: MagicMock, - occupancy_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test occupancy sensor.""" state = hass.states.get("binary_sensor.mock_occupancy_sensor_occupancy") assert state assert state.state == "on" - set_node_attribute(occupancy_sensor_node, 1, 1030, 0, 0) + set_node_attribute(matter_node, 1, 1030, 0, 0) await trigger_subscription_callback( - hass, matter_client, data=(occupancy_sensor_node.node_id, "1/1030/0", 0) + hass, matter_client, data=(matter_node.node_id, "1/1030/0", 0) ) state = hass.states.get("binary_sensor.mock_occupancy_sensor_occupancy") @@ -64,36 +65,29 @@ async def test_occupancy_sensor( assert state.state == "off" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("eve-contact-sensor", "binary_sensor.eve_door_door"), - ("leak-sensor", "binary_sensor.water_leak_detector_water_leak"), + ("eve_contact_sensor", "binary_sensor.eve_door_door"), + ("leak_sensor", "binary_sensor.water_leak_detector_water_leak"), ], ) async def test_boolean_state_sensors( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test if binary sensors get created from devices with Boolean State cluster.""" - node = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) state = hass.states.get(entity_id) assert state assert state.state == "on" # invert the value - cur_attr_value = node.get_attribute_value(1, 69, 0) - set_node_attribute(node, 1, 69, 0, not cur_attr_value) + cur_attr_value = matter_node.get_attribute_value(1, 69, 0) + set_node_attribute(matter_node, 1, 69, 0, not cur_attr_value) await trigger_subscription_callback( - hass, matter_client, data=(node.node_id, "1/69/0", not cur_attr_value) + hass, matter_client, data=(matter_node.node_id, "1/69/0", not cur_attr_value) ) state = hass.states.get(entity_id) @@ -101,13 +95,12 @@ async def test_boolean_state_sensors( assert state.state == "off" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["door_lock"]) async def test_battery_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, - door_lock: MatterNode, + matter_node: MatterNode, ) -> None: """Test battery sensor.""" entity_id = "binary_sensor.mock_door_lock_battery" @@ -115,16 +108,42 @@ async def test_battery_sensor( assert state assert state.state == "off" - set_node_attribute(door_lock, 1, 47, 14, 1) + set_node_attribute(matter_node, 1, 47, 14, 1) await trigger_subscription_callback( - hass, matter_client, data=(door_lock.node_id, "1/47/14", 1) + hass, matter_client, data=(matter_node.node_id, "1/47/14", 1) ) state = hass.states.get(entity_id) assert state assert state.state == "on" - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.entity_category == EntityCategory.DIAGNOSTIC +@pytest.mark.parametrize("node_fixture", ["door_lock"]) +async def test_optional_sensor_from_featuremap( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test discovery of optional doorsensor in doorlock featuremap.""" + entity_id = "binary_sensor.mock_door_lock_door" + state = hass.states.get(entity_id) + assert state is None + + # update the feature map to include the optional door sensor feature + # and fire a node updated event + set_node_attribute(matter_node, 1, 257, 65532, 32) + await trigger_subscription_callback( + hass, matter_client, event=EventType.NODE_UPDATED, data=matter_node + ) + # this should result in a new binary sensor entity being discovered + state = hass.states.get(entity_id) + assert state + assert state.state == "off" + # now test the reverse, by removing the feature from the feature map + set_node_attribute(matter_node, 1, 257, 65532, 0) + await trigger_subscription_callback( + hass, matter_client, data=(matter_node.node_id, "1/257/65532", 0) + ) + state = hass.states.get(entity_id) + assert state is None diff --git a/tests/components/matter/test_button.py b/tests/components/matter/test_button.py new file mode 100644 index 00000000000..cbf62dd80c7 --- /dev/null +++ b/tests/components/matter/test_button.py @@ -0,0 +1,82 @@ +"""Test Matter switches.""" + +from unittest.mock import MagicMock, call + +from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import snapshot_matter_entities + + +@pytest.mark.usefixtures("matter_devices") +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test buttons.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.BUTTON) + + +@pytest.mark.parametrize("node_fixture", ["eve_energy_plug"]) +async def test_identify_button( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test button entity is created for a Matter Identify Cluster.""" + state = hass.states.get("button.eve_energy_plug_identify") + assert state + assert state.attributes["friendly_name"] == "Eve Energy Plug Identify" + # test press action + await hass.services.async_call( + "button", + "press", + { + "entity_id": "button.eve_energy_plug_identify", + }, + blocking=True, + ) + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.Identify.Commands.Identify(identifyTime=15), + ) + + +@pytest.mark.parametrize("node_fixture", ["silabs_dishwasher"]) +async def test_operational_state_buttons( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test if button entities are created for operational state commands.""" + assert hass.states.get("button.dishwasher_pause") + assert hass.states.get("button.dishwasher_start") + assert hass.states.get("button.dishwasher_stop") + + # resume may not be discovered as it's missing in the supported command list + assert hass.states.get("button.dishwasher_resume") is None + + # test press action + await hass.services.async_call( + "button", + "press", + { + "entity_id": "button.dishwasher_pause", + }, + blocking=True, + ) + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.OperationalState.Commands.Pause(), + ) diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index 4d6978edfde..037ec4e7626 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -6,45 +6,39 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest +from syrupy import SnapshotAssertion from homeassistant.components.climate import ClimateEntityFeature, HVACAction, HVACMode +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -@pytest.fixture(name="thermostat") -async def thermostat_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a thermostat node.""" - return await setup_integration_with_node_fixture(hass, "thermostat", matter_client) +@pytest.mark.usefixtures("matter_devices") +async def test_climates( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test climates.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.CLIMATE) -@pytest.fixture(name="room_airconditioner") -async def room_airconditioner( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a room air conditioner node.""" - return await setup_integration_with_node_fixture( - hass, "room-airconditioner", matter_client - ) - - -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["thermostat"]) async def test_thermostat_base( hass: HomeAssistant, matter_client: MagicMock, - thermostat: MatterNode, + matter_node: MatterNode, ) -> None: """Test thermostat base attributes and state updates.""" # test entity attributes - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["min_temp"] == 7 assert state.attributes["max_temp"] == 35 @@ -61,12 +55,12 @@ async def test_thermostat_base( assert state.attributes["supported_features"] & mask == mask # test common state updates from device - set_node_attribute(thermostat, 1, 513, 3, 1600) - set_node_attribute(thermostat, 1, 513, 4, 3000) - set_node_attribute(thermostat, 1, 513, 5, 1600) - set_node_attribute(thermostat, 1, 513, 6, 3000) + set_node_attribute(matter_node, 1, 513, 3, 1600) + set_node_attribute(matter_node, 1, 513, 4, 3000) + set_node_attribute(matter_node, 1, 513, 5, 1600) + set_node_attribute(matter_node, 1, 513, 6, 3000) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["min_temp"] == 16 assert state.attributes["max_temp"] == 30 @@ -78,95 +72,94 @@ async def test_thermostat_base( ] # test system mode update from device - set_node_attribute(thermostat, 1, 513, 28, 0) + set_node_attribute(matter_node, 1, 513, 28, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.OFF # test running state update from device - set_node_attribute(thermostat, 1, 513, 41, 1) + set_node_attribute(matter_node, 1, 513, 41, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.HEATING - set_node_attribute(thermostat, 1, 513, 41, 8) + set_node_attribute(matter_node, 1, 513, 41, 8) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.HEATING - set_node_attribute(thermostat, 1, 513, 41, 2) + set_node_attribute(matter_node, 1, 513, 41, 2) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.COOLING - set_node_attribute(thermostat, 1, 513, 41, 16) + set_node_attribute(matter_node, 1, 513, 41, 16) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.COOLING - set_node_attribute(thermostat, 1, 513, 41, 4) + set_node_attribute(matter_node, 1, 513, 41, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.FAN - set_node_attribute(thermostat, 1, 513, 41, 32) + set_node_attribute(matter_node, 1, 513, 41, 32) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.FAN - set_node_attribute(thermostat, 1, 513, 41, 64) + set_node_attribute(matter_node, 1, 513, 41, 64) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.FAN - set_node_attribute(thermostat, 1, 513, 41, 66) + set_node_attribute(matter_node, 1, 513, 41, 66) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.OFF # change system mode to heat - set_node_attribute(thermostat, 1, 513, 28, 4) + set_node_attribute(matter_node, 1, 513, 28, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.HEAT # change occupied heating setpoint to 20 - set_node_attribute(thermostat, 1, 513, 18, 2000) + set_node_attribute(matter_node, 1, 513, 18, 2000) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["temperature"] == 20 -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["thermostat"]) async def test_thermostat_service_calls( hass: HomeAssistant, matter_client: MagicMock, - thermostat: MatterNode, + matter_node: MatterNode, ) -> None: """Test climate platform service calls.""" # test single-setpoint temperature adjustment when cool mode is active - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.COOL await hass.services.async_call( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 25, }, blocking=True, @@ -174,20 +167,20 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path="1/513/17", value=2500, ) matter_client.write_attribute.reset_mock() # ensure that no command is executed when the temperature is the same - set_node_attribute(thermostat, 1, 513, 17, 2500) + set_node_attribute(matter_node, 1, 513, 17, 2500) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 25, }, blocking=True, @@ -197,9 +190,9 @@ async def test_thermostat_service_calls( matter_client.write_attribute.reset_mock() # test single-setpoint temperature adjustment when heat mode is active - set_node_attribute(thermostat, 1, 513, 28, 4) + set_node_attribute(matter_node, 1, 513, 28, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.HEAT @@ -207,7 +200,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 20, }, blocking=True, @@ -215,16 +208,16 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path="1/513/18", value=2000, ) matter_client.write_attribute.reset_mock() # test dual setpoint temperature adjustments when heat_cool mode is active - set_node_attribute(thermostat, 1, 513, 28, 1) + set_node_attribute(matter_node, 1, 513, 28, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.HEAT_COOL @@ -232,7 +225,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "target_temp_low": 10, "target_temp_high": 30, }, @@ -241,12 +234,12 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path="1/513/18", value=1000, ) assert matter_client.write_attribute.call_args_list[1] == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path="1/513/17", value=3000, ) @@ -257,7 +250,7 @@ async def test_thermostat_service_calls( "climate", "set_hvac_mode", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "hvac_mode": HVACMode.HEAT, }, blocking=True, @@ -265,7 +258,7 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path=create_attribute_path_from_attribute( endpoint_id=1, attribute=clusters.Thermostat.Attributes.SystemMode, @@ -281,7 +274,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 22, "hvac_mode": HVACMode.COOL, }, @@ -289,7 +282,7 @@ async def test_thermostat_service_calls( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path=create_attribute_path_from_attribute( endpoint_id=1, attribute=clusters.Thermostat.Attributes.SystemMode, @@ -297,22 +290,21 @@ async def test_thermostat_service_calls( value=3, ) assert matter_client.write_attribute.call_args_list[1] == call( - node_id=thermostat.node_id, + node_id=matter_node.node_id, attribute_path="1/513/17", value=2200, ) matter_client.write_attribute.reset_mock() -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["room_airconditioner"]) async def test_room_airconditioner( hass: HomeAssistant, matter_client: MagicMock, - room_airconditioner: MatterNode, + matter_node: MatterNode, ) -> None: """Test if a climate entity is created for a Room Airconditioner device.""" - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state assert state.attributes["current_temperature"] == 20 # room airconditioner has mains power on OnOff cluster with value set to False @@ -324,9 +316,9 @@ async def test_room_airconditioner( assert state.attributes["supported_features"] & mask == mask # set mains power to ON (OnOff cluster) - set_node_attribute(room_airconditioner, 1, 6, 0, True) + set_node_attribute(matter_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") # test supported HVAC modes include fan and dry modes assert state.attributes["hvac_modes"] == [ @@ -338,21 +330,21 @@ async def test_room_airconditioner( HVACMode.HEAT_COOL, ] # test fan-only hvac mode - set_node_attribute(room_airconditioner, 1, 513, 28, 7) + set_node_attribute(matter_node, 1, 513, 28, 7) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state assert state.state == HVACMode.FAN_ONLY # test dry hvac mode - set_node_attribute(room_airconditioner, 1, 513, 28, 8) + set_node_attribute(matter_node, 1, 513, 28, 8) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state assert state.state == HVACMode.DRY # test featuremap update - set_node_attribute(room_airconditioner, 1, 513, 65532, 1) + set_node_attribute(matter_node, 1, 513, 65532, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state.attributes["supported_features"] & ClimateEntityFeature.TURN_ON diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index a4ddc18802f..eed776c132e 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -5,16 +5,19 @@ from __future__ import annotations from collections.abc import Generator from ipaddress import ip_address from unittest.mock import AsyncMock, MagicMock, call, patch +from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import Discovery from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest from homeassistant import config_entries -from homeassistant.components.hassio import HassioAPIError, HassioServiceInfo from homeassistant.components.matter.const import ADDON_SLUG, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -289,7 +292,19 @@ async def test_zeroconf_discovery_not_onboarded_not_supervisor( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_already_discovered( hass: HomeAssistant, supervisor: MagicMock, @@ -327,7 +342,19 @@ async def test_zeroconf_not_onboarded_already_discovered( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_running( hass: HomeAssistant, supervisor: MagicMock, @@ -359,7 +386,19 @@ async def test_zeroconf_not_onboarded_running( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -380,7 +419,7 @@ async def test_zeroconf_not_onboarded_installed( await hass.async_block_till_done() assert addon_info.call_count == 1 - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -393,7 +432,19 @@ async def test_zeroconf_not_onboarded_installed( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_not_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -417,8 +468,8 @@ async def test_zeroconf_not_onboarded_not_installed( assert addon_info.call_count == 0 assert addon_store_info.call_count == 2 - assert install_addon.call_args == call(hass, "core_matter_server") - assert start_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -430,7 +481,19 @@ async def test_zeroconf_not_onboarded_not_installed( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_supervisor_discovery( hass: HomeAssistant, supervisor: MagicMock, @@ -468,7 +531,19 @@ async def test_supervisor_discovery( @pytest.mark.parametrize( ("discovery_info", "error"), - [({"config": ADDON_DISCOVERY_INFO}, HassioAPIError())], + [ + ( + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), + ) + ], ) async def test_supervisor_discovery_addon_info_failed( hass: HomeAssistant, @@ -501,7 +576,19 @@ async def test_supervisor_discovery_addon_info_failed( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_clean_supervisor_discovery_on_user_create( hass: HomeAssistant, supervisor: MagicMock, @@ -682,7 +769,7 @@ async def test_supervisor_discovery_addon_not_running( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -732,7 +819,7 @@ async def test_supervisor_discovery_addon_not_installed( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -740,7 +827,7 @@ async def test_supervisor_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -792,7 +879,19 @@ async def test_not_addon( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running( hass: HomeAssistant, supervisor: MagicMock, @@ -838,8 +937,15 @@ async def test_addon_running( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, None, "addon_get_discovery_info_failed", @@ -847,7 +953,14 @@ async def test_addon_running( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), None, @@ -856,7 +969,7 @@ async def test_addon_running( True, ), ( - None, + [], None, None, None, @@ -865,10 +978,17 @@ async def test_addon_running( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, None, - HassioAPIError(), + SupervisorError(), "addon_info_failed", False, False, @@ -924,8 +1044,15 @@ async def test_addon_running_failures( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, None, "addon_get_discovery_info_failed", @@ -933,7 +1060,14 @@ async def test_addon_running_failures( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), None, @@ -942,7 +1076,7 @@ async def test_addon_running_failures( True, ), ( - None, + [], None, None, None, @@ -951,10 +1085,17 @@ async def test_addon_running_failures( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, None, - HassioAPIError(), + SupervisorError(), "addon_info_failed", False, False, @@ -995,7 +1136,19 @@ async def test_addon_running_failures_zeroconf( assert result["reason"] == abort_reason -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1033,7 +1186,19 @@ async def test_addon_running_already_configured( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -1062,7 +1227,7 @@ async def test_addon_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" assert result["data"] == { @@ -1083,21 +1248,35 @@ async def test_addon_installed( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, False, False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), True, True, ), ( - None, + [], None, None, True, @@ -1140,7 +1319,7 @@ async def test_addon_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert get_addon_discovery_info.called is discovery_info_called assert client_connect.called is client_connect_called assert result["type"] is FlowResultType.ABORT @@ -1158,21 +1337,35 @@ async def test_addon_installed_failures( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, False, False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), True, True, ), ( - None, + [], None, None, True, @@ -1205,14 +1398,26 @@ async def test_addon_installed_failures_zeroconf( await hass.async_block_till_done() assert addon_info.call_count == 1 - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert get_addon_discovery_info.called is discovery_info_called assert client_connect.called is client_connect_called assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1250,7 +1455,7 @@ async def test_addon_installed_already_configured( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfiguration_successful" assert entry.data["url"] == "ws://host1:5581/ws" @@ -1258,7 +1463,19 @@ async def test_addon_installed_already_configured( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_not_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -1290,7 +1507,7 @@ async def test_addon_not_installed( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -1298,7 +1515,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" assert result["data"] == { @@ -1317,7 +1534,7 @@ async def test_addon_not_installed_failures( install_addon: AsyncMock, ) -> None: """Test add-on install failure.""" - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -1337,7 +1554,7 @@ async def test_addon_not_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert addon_info.call_count == 0 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" @@ -1354,20 +1571,32 @@ async def test_addon_not_installed_failures_zeroconf( zeroconf_info: ZeroconfServiceInfo, ) -> None: """Test add-on install failure.""" - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf_info ) await hass.async_block_till_done() - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert addon_info.call_count == 0 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_not_installed_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1409,7 +1638,7 @@ async def test_addon_not_installed_already_configured( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -1417,7 +1646,7 @@ async def test_addon_not_installed_already_configured( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfiguration_successful" diff --git a/tests/components/matter/test_cover.py b/tests/components/matter/test_cover.py index f526205234d..224aabd9082 100644 --- a/tests/components/matter/test_cover.py +++ b/tests/components/matter/test_cover.py @@ -4,50 +4,50 @@ from math import floor from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.cover import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - CoverEntityFeature, -) +from homeassistant.components.cover import CoverEntityFeature, CoverState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_devices") +async def test_covers( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test covers.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.COVER) + + @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_lift", "cover.mock_lift_window_covering_cover"), - ("window-covering_pa-lift", "cover.longan_link_wncv_da01_cover"), - ("window-covering_tilt", "cover.mock_tilt_window_covering_cover"), - ("window-covering_pa-tilt", "cover.mock_pa_tilt_window_covering_cover"), - ("window-covering_full", "cover.mock_full_window_covering_cover"), + ("window_covering_lift", "cover.mock_lift_window_covering"), + ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), + ("window_covering_tilt", "cover.mock_tilt_window_covering"), + ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), + ("window_covering_full", "cover.mock_full_window_covering"), ], ) async def test_cover( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering commands that always are implemented.""" - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - await hass.services.async_call( "cover", "close_cover", @@ -59,7 +59,7 @@ async def test_cover( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=window_covering.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.DownOrClose(), ) @@ -76,7 +76,7 @@ async def test_cover( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=window_covering.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.StopMotion(), ) @@ -93,37 +93,28 @@ async def test_cover( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=window_covering.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.UpOrOpen(), ) matter_client.send_device_command.reset_mock() -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_lift", "cover.mock_lift_window_covering_cover"), - ("window-covering_pa-lift", "cover.longan_link_wncv_da01_cover"), - ("window-covering_full", "cover.mock_full_window_covering_cover"), + ("window_covering_lift", "cover.mock_lift_window_covering"), + ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), + ("window_covering_full", "cover.mock_full_window_covering"), ], ) async def test_cover_lift( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering devices with lift and position aware lift features.""" - - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - await hass.services.async_call( "cover", "set_cover_position", @@ -136,65 +127,57 @@ async def test_cover_lift( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=window_covering.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.GoToLiftPercentage(5000), ) matter_client.send_device_command.reset_mock() - set_node_attribute(window_covering, 1, 258, 10, 0b001010) + set_node_attribute(matter_node, 1, 258, 10, 0b001010) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING - set_node_attribute(window_covering, 1, 258, 10, 0b000101) + set_node_attribute(matter_node, 1, 258, 10, 0b000101) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_lift", "cover.mock_lift_window_covering_cover"), + ("window_covering_lift", "cover.mock_lift_window_covering"), ], ) async def test_cover_lift_only( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering devices with lift feature and without position aware lift feature.""" - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - - set_node_attribute(window_covering, 1, 258, 14, None) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, None) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.state == "unknown" - set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2]) + set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.attributes["supported_features"] & CoverEntityFeature.SET_POSITION == 0 - set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2, 5]) + set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2, 5]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -202,28 +185,20 @@ async def test_cover_lift_only( assert state.attributes["supported_features"] & CoverEntityFeature.SET_POSITION != 0 -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_pa-lift", "cover.longan_link_wncv_da01_cover"), + ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), ], ) async def test_cover_position_aware_lift( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering devices with position aware lift features.""" - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - state = hass.states.get(entity_id) assert state mask = ( @@ -235,49 +210,41 @@ async def test_cover_position_aware_lift( assert state.attributes["supported_features"] & mask == mask for position in (0, 9999): - set_node_attribute(window_covering, 1, 258, 14, position) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, position) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.attributes["current_position"] == 100 - floor(position / 100) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN - set_node_attribute(window_covering, 1, 258, 14, 10000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 10000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.attributes["current_position"] == 0 - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_tilt", "cover.mock_tilt_window_covering_cover"), - ("window-covering_pa-tilt", "cover.mock_pa_tilt_window_covering_cover"), - ("window-covering_full", "cover.mock_full_window_covering_cover"), + ("window_covering_tilt", "cover.mock_tilt_window_covering"), + ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), + ("window_covering_full", "cover.mock_full_window_covering"), ], ) async def test_cover_tilt( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering devices with tilt and position aware tilt features.""" - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - await hass.services.async_call( "cover", "set_cover_tilt_position", @@ -290,7 +257,7 @@ async def test_cover_tilt( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=window_covering.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.GoToTiltPercentage(5000), ) @@ -298,43 +265,35 @@ async def test_cover_tilt( await trigger_subscription_callback(hass, matter_client) - set_node_attribute(window_covering, 1, 258, 10, 0b100010) + set_node_attribute(matter_node, 1, 258, 10, 0b100010) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING - set_node_attribute(window_covering, 1, 258, 10, 0b010001) + set_node_attribute(matter_node, 1, 258, 10, 0b010001) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_tilt", "cover.mock_tilt_window_covering_cover"), + ("window_covering_tilt", "cover.mock_tilt_window_covering"), ], ) async def test_cover_tilt_only( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering devices with tilt feature and without position aware tilt feature.""" - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - - set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2]) + set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -344,7 +303,7 @@ async def test_cover_tilt_only( == 0 ) - set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2, 8]) + set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2, 8]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -355,28 +314,20 @@ async def test_cover_tilt_only( ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("window-covering_pa-tilt", "cover.mock_pa_tilt_window_covering_cover"), + ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), ], ) async def test_cover_position_aware_tilt( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test window covering devices with position aware tilt feature.""" - window_covering = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - state = hass.states.get(entity_id) assert state mask = ( @@ -388,8 +339,8 @@ async def test_cover_position_aware_tilt( assert state.attributes["supported_features"] & mask == mask for tilt_position in (0, 9999, 10000): - set_node_attribute(window_covering, 1, 258, 15, tilt_position) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 15, tilt_position) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -399,18 +350,14 @@ async def test_cover_position_aware_tilt( ) +@pytest.mark.parametrize("node_fixture", ["window_covering_full"]) async def test_cover_full_features( hass: HomeAssistant, matter_client: MagicMock, + matter_node: MatterNode, ) -> None: """Test window covering devices with all the features.""" - - window_covering = await setup_integration_with_node_fixture( - hass, - "window-covering_full", - matter_client, - ) - entity_id = "cover.mock_full_window_covering_cover" + entity_id = "cover.mock_full_window_covering" state = hass.states.get(entity_id) assert state @@ -423,77 +370,77 @@ async def test_cover_full_features( ) assert state.attributes["supported_features"] & mask == mask - set_node_attribute(window_covering, 1, 258, 14, 10000) - set_node_attribute(window_covering, 1, 258, 15, 10000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 10000) + set_node_attribute(matter_node, 1, 258, 15, 10000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED - set_node_attribute(window_covering, 1, 258, 14, 5000) - set_node_attribute(window_covering, 1, 258, 15, 10000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 5000) + set_node_attribute(matter_node, 1, 258, 15, 10000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN - set_node_attribute(window_covering, 1, 258, 14, 10000) - set_node_attribute(window_covering, 1, 258, 15, 5000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 10000) + set_node_attribute(matter_node, 1, 258, 15, 5000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED - set_node_attribute(window_covering, 1, 258, 14, 5000) - set_node_attribute(window_covering, 1, 258, 15, 5000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 5000) + set_node_attribute(matter_node, 1, 258, 15, 5000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN - set_node_attribute(window_covering, 1, 258, 14, 5000) - set_node_attribute(window_covering, 1, 258, 15, None) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 5000) + set_node_attribute(matter_node, 1, 258, 15, None) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN - set_node_attribute(window_covering, 1, 258, 14, None) - set_node_attribute(window_covering, 1, 258, 15, 5000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, None) + set_node_attribute(matter_node, 1, 258, 15, 5000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.state == "unknown" - set_node_attribute(window_covering, 1, 258, 14, 10000) - set_node_attribute(window_covering, 1, 258, 15, None) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, 10000) + set_node_attribute(matter_node, 1, 258, 15, None) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED - set_node_attribute(window_covering, 1, 258, 14, None) - set_node_attribute(window_covering, 1, 258, 15, 10000) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, None) + set_node_attribute(matter_node, 1, 258, 15, 10000) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.state == "unknown" - set_node_attribute(window_covering, 1, 258, 14, None) - set_node_attribute(window_covering, 1, 258, 15, None) - set_node_attribute(window_covering, 1, 258, 10, 0b000000) + set_node_attribute(matter_node, 1, 258, 14, None) + set_node_attribute(matter_node, 1, 258, 15, None) + set_node_attribute(matter_node, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state diff --git a/tests/components/matter/test_diagnostics.py b/tests/components/matter/test_diagnostics.py index 6863619e145..cfdf305a361 100644 --- a/tests/components/matter/test_diagnostics.py +++ b/tests/components/matter/test_diagnostics.py @@ -6,6 +6,7 @@ import json from typing import Any from unittest.mock import MagicMock +from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.models import ServerDiagnostics import pytest @@ -15,8 +16,6 @@ from homeassistant.components.matter.diagnostics import redact_matter_attributes from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .common import setup_integration_with_node_fixture - from tests.common import MockConfigEntry, load_fixture from tests.components.diagnostics import ( get_diagnostics_for_config_entry, @@ -57,8 +56,6 @@ async def test_matter_attribute_redact(device_diagnostics: dict[str, Any]) -> No assert redacted_device_diagnostics == device_diagnostics -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_config_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -77,8 +74,7 @@ async def test_config_entry_diagnostics( assert diagnostics == config_entry_diagnostics_redacted -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["device_diagnostics"]) async def test_device_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -86,9 +82,9 @@ async def test_device_diagnostics( matter_client: MagicMock, config_entry_diagnostics: dict[str, Any], device_diagnostics: dict[str, Any], + matter_node: MatterNode, ) -> None: """Test the device diagnostics.""" - await setup_integration_with_node_fixture(hass, "device_diagnostics", matter_client) system_info_dict = config_entry_diagnostics["info"] device_diagnostics_redacted = { "server_info": system_info_dict, diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index 183867642f5..f3a318c4e8b 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -5,39 +5,31 @@ from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode from matter_server.common.models import EventType, MatterNodeEvent import pytest +from syrupy import SnapshotAssertion from homeassistant.components.event import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import setup_integration_with_node_fixture, trigger_subscription_callback +from .common import snapshot_matter_entities, trigger_subscription_callback -@pytest.fixture(name="generic_switch_node") -async def switch_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a GenericSwitch node.""" - return await setup_integration_with_node_fixture( - hass, "generic-switch", matter_client - ) +@pytest.mark.usefixtures("matter_devices") +async def test_events( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test events.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.EVENT) -@pytest.fixture(name="generic_switch_multi_node") -async def multi_switch_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a GenericSwitch node with multiple buttons.""" - return await setup_integration_with_node_fixture( - hass, "generic-switch-multi", matter_client - ) - - -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["generic_switch"]) async def test_generic_switch_node( hass: HomeAssistant, matter_client: MagicMock, - generic_switch_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test event entity for a GenericSwitch node.""" state = hass.states.get("event.mock_generic_switch_button") @@ -57,7 +49,7 @@ async def test_generic_switch_node( matter_client, EventType.NODE_EVENT, MatterNodeEvent( - node_id=generic_switch_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, cluster_id=59, event_id=1, @@ -72,12 +64,11 @@ async def test_generic_switch_node( assert state.attributes[ATTR_EVENT_TYPE] == "initial_press" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["generic_switch_multi"]) async def test_generic_switch_multi_node( hass: HomeAssistant, matter_client: MagicMock, - generic_switch_multi_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test event entity for a GenericSwitch node with multiple buttons.""" state_button_1 = hass.states.get("event.mock_generic_switch_button_1") @@ -105,7 +96,7 @@ async def test_generic_switch_multi_node( matter_client, EventType.NODE_EVENT, MatterNodeEvent( - node_id=generic_switch_multi_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, cluster_id=59, event_id=6, diff --git a/tests/components/matter/test_fan.py b/tests/components/matter/test_fan.py index 690209b1165..6ed95b0ecc2 100644 --- a/tests/components/matter/test_fan.py +++ b/tests/components/matter/test_fan.py @@ -1,10 +1,10 @@ """Test Matter Fan platform.""" -from typing import Any from unittest.mock import MagicMock, call from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion from homeassistant.components.fan import ( ATTR_DIRECTION, @@ -18,43 +18,40 @@ from homeassistant.components.fan import ( SERVICE_SET_DIRECTION, FanEntityFeature, ) -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -@pytest.fixture(name="fan_node") -async def simple_fan_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a Fan node.""" - return await setup_integration_with_node_fixture(hass, "fan", matter_client) +@pytest.mark.usefixtures("matter_devices") +async def test_fans( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test fans.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.FAN) -@pytest.fixture(name="air_purifier") -async def air_purifier_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a Air Purifier node (containing Fan cluster).""" - return await setup_integration_with_node_fixture( - hass, "air-purifier", matter_client - ) - - -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_base( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + matter_node: MatterNode, ) -> None: """Test Fan platform.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" state = hass.states.get(entity_id) assert state assert state.attributes["preset_modes"] == [ @@ -78,50 +75,51 @@ async def test_fan_base( ) assert state.attributes["supported_features"] & mask == mask # handle fan mode update - set_node_attribute(air_purifier, 1, 514, 0, 1) + set_node_attribute(matter_node, 1, 514, 0, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "low" # handle direction update - set_node_attribute(air_purifier, 1, 514, 11, 1) + set_node_attribute(matter_node, 1, 514, 11, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["direction"] == "reverse" # handle rock/oscillation update - set_node_attribute(air_purifier, 1, 514, 8, 1) + set_node_attribute(matter_node, 1, 514, 8, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["oscillating"] is True # handle wind mode active translates to correct preset - set_node_attribute(air_purifier, 1, 514, 10, 2) + set_node_attribute(matter_node, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "natural_wind" - set_node_attribute(air_purifier, 1, 514, 10, 1) + set_node_attribute(matter_node, 1, 514, 10, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "sleep_wind" # set mains power to OFF (OnOff cluster) - set_node_attribute(air_purifier, 1, 6, 0, False) + set_node_attribute(matter_node, 1, 6, 0, False) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] is None assert state.attributes["percentage"] == 0 # test featuremap update - set_node_attribute(air_purifier, 1, 514, 65532, 1) + set_node_attribute(matter_node, 1, 514, 65532, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["supported_features"] & FanEntityFeature.SET_SPEED @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_turn_on_with_percentage( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + matter_node: MatterNode, ) -> None: """Test turning on the fan with a specific percentage.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -130,7 +128,7 @@ async def test_fan_turn_on_with_percentage( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/2", value=50, ) @@ -145,20 +143,21 @@ async def test_fan_turn_on_with_percentage( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/2", value=255, ) @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["fan"]) async def test_fan_turn_on_with_preset_mode( hass: HomeAssistant, matter_client: MagicMock, - fan_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test turning on the fan with a specific preset mode.""" - entity_id = "fan.mocked_fan_switch_fan" + entity_id = "fan.mocked_fan_switch" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -167,7 +166,7 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=fan_node.node_id, + node_id=matter_node.node_id, attribute_path="1/514/0", value=2, ) @@ -182,13 +181,13 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=fan_node.node_id, + node_id=matter_node.node_id, attribute_path="1/514/10", value=value, ) # test again if wind mode is explicitly turned off when we set a new preset mode matter_client.write_attribute.reset_mock() - set_node_attribute(fan_node, 1, 514, 10, 2) + set_node_attribute(matter_node, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -198,20 +197,20 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=fan_node.node_id, + node_id=matter_node.node_id, attribute_path="1/514/10", value=0, ) assert matter_client.write_attribute.call_args == call( - node_id=fan_node.node_id, + node_id=matter_node.node_id, attribute_path="1/514/0", value=2, ) # test again where preset_mode is omitted in the service call # which should select the last active preset matter_client.write_attribute.reset_mock() - set_node_attribute(fan_node, 1, 514, 0, 1) - set_node_attribute(fan_node, 1, 514, 10, 0) + set_node_attribute(matter_node, 1, 514, 0, 1) + set_node_attribute(matter_node, 1, 514, 10, 0) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -221,19 +220,20 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=fan_node.node_id, + node_id=matter_node.node_id, attribute_path="1/514/0", value=1, ) +@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_turn_off( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + matter_node: MatterNode, ) -> None: """Test turning off the fan.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, @@ -242,13 +242,13 @@ async def test_fan_turn_off( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/0", value=0, ) matter_client.write_attribute.reset_mock() # test again if wind mode is turned off - set_node_attribute(air_purifier, 1, 514, 10, 2) + set_node_attribute(matter_node, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -258,24 +258,25 @@ async def test_fan_turn_off( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/10", value=0, ) assert matter_client.write_attribute.call_args_list[1] == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/0", value=0, ) +@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_oscillate( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + matter_node: MatterNode, ) -> None: """Test oscillating the fan.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" for oscillating, value in ((True, 1), (False, 0)): await hass.services.async_call( FAN_DOMAIN, @@ -285,20 +286,21 @@ async def test_fan_oscillate( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/8", value=value, ) matter_client.write_attribute.reset_mock() +@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_set_direction( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + matter_node: MatterNode, ) -> None: """Test oscillating the fan.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" for direction, value in ((DIRECTION_FORWARD, 0), (DIRECTION_REVERSE, 1)): await hass.services.async_call( FAN_DOMAIN, @@ -308,7 +310,7 @@ async def test_fan_set_direction( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=matter_node.node_id, attribute_path="1/514/11", value=value, ) @@ -317,11 +319,11 @@ async def test_fan_set_direction( @pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id", "attributes", "features"), + ("node_fixture", "entity_id", "attributes", "features"), [ ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 0, }, @@ -329,7 +331,7 @@ async def test_fan_set_direction( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 1, }, @@ -341,7 +343,7 @@ async def test_fan_set_direction( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 4, }, @@ -353,7 +355,7 @@ async def test_fan_set_direction( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 36, }, @@ -369,13 +371,11 @@ async def test_fan_set_direction( async def test_fan_supported_features( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, - attributes: dict[str, Any], features: int, ) -> None: """Test if the correct features get discovered from featuremap.""" - await setup_integration_with_node_fixture(hass, fixture, matter_client, attributes) state = hass.states.get(entity_id) assert state assert state.attributes["supported_features"] & features == features @@ -383,11 +383,11 @@ async def test_fan_supported_features( @pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id", "attributes", "preset_modes"), + ("node_fixture", "entity_id", "attributes", "preset_modes"), [ ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 0, "1/514/65532": 0}, [ "low", @@ -397,7 +397,7 @@ async def test_fan_supported_features( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 1, "1/514/65532": 0}, [ "low", @@ -406,25 +406,25 @@ async def test_fan_supported_features( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 2, "1/514/65532": 0}, ["low", "medium", "high", "auto"], ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 4, "1/514/65532": 0}, ["high", "auto"], ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 5, "1/514/65532": 0}, ["high"], ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 5, "1/514/65532": 8, "1/514/9": 3}, ["high", "natural_wind", "sleep_wind"], ), @@ -433,13 +433,11 @@ async def test_fan_supported_features( async def test_fan_features( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, - attributes: dict[str, Any], preset_modes: list[str], ) -> None: """Test if the correct presets get discovered from fanmodesequence.""" - await setup_integration_with_node_fixture(hass, fixture, matter_client, attributes) state = hass.states.get(entity_id) assert state assert state.attributes["preset_modes"] == preset_modes diff --git a/tests/components/matter/test_helpers.py b/tests/components/matter/test_helpers.py index a4b5e165a93..2f89f3703ef 100644 --- a/tests/components/matter/test_helpers.py +++ b/tests/components/matter/test_helpers.py @@ -4,6 +4,7 @@ from __future__ import annotations from unittest.mock import MagicMock +from matter_server.client.models.node import MatterNode import pytest from homeassistant.components.matter.const import DOMAIN @@ -19,23 +20,18 @@ from .common import setup_integration_with_node_fixture from tests.common import MockConfigEntry -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["device_diagnostics"]) async def test_get_device_id( hass: HomeAssistant, matter_client: MagicMock, + matter_node: MatterNode, ) -> None: """Test get_device_id.""" - node = await setup_integration_with_node_fixture( - hass, "device_diagnostics", matter_client - ) - device_id = get_device_id(matter_client.server_info, node.endpoints[0]) + device_id = get_device_id(matter_client.server_info, matter_node.endpoints[0]) assert device_id == "00000000000004D2-0000000000000005-MatterNodeDevice" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_node_from_device_entry( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/matter/test_init.py b/tests/components/matter/test_init.py index cd5ef307cd3..f6576689413 100644 --- a/tests/components/matter/test_init.py +++ b/tests/components/matter/test_init.py @@ -6,15 +6,14 @@ import asyncio from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, call, patch +from aiohasupervisor import SupervisorError from matter_server.client.exceptions import ( CannotConnect, + NotConnected, ServerVersionTooNew, ServerVersionTooOld, ) -from matter_server.client.models.node import MatterNode from matter_server.common.errors import MatterError -from matter_server.common.helpers.util import dataclass_from_dict -from matter_server.common.models import MatterNodeData import pytest from homeassistant.components.hassio import HassioAPIError @@ -29,7 +28,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from .common import load_and_parse_node_fixture, setup_integration_with_node_fixture +from .common import create_node_from_fixture, setup_integration_with_node_fixture from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -56,13 +55,7 @@ async def test_entry_setup_unload( matter_client: MagicMock, ) -> None: """Test the integration set up and unload.""" - node_data = load_and_parse_node_fixture("onoff-light") - node = MatterNode( - dataclass_from_dict( - MatterNodeData, - node_data, - ) - ) + node = create_node_from_fixture("onoff_light") matter_client.get_nodes.return_value = [node] matter_client.get_node.return_value = node entry = MockConfigEntry(domain="matter", data={"url": "ws://localhost:5580/ws"}) @@ -72,8 +65,9 @@ async def test_entry_setup_unload( await hass.async_block_till_done() assert matter_client.connect.call_count == 1 + assert matter_client.set_default_fabric_label.call_count == 1 assert entry.state is ConfigEntryState.LOADED - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert entity_state assert entity_state.state != STATE_UNAVAILABLE @@ -81,13 +75,11 @@ async def test_entry_setup_unload( assert matter_client.disconnect.call_count == 1 assert entry.state is ConfigEntryState.NOT_LOADED - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert entity_state assert entity_state.state == STATE_UNAVAILABLE -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_home_assistant_stop( hass: HomeAssistant, matter_client: MagicMock, @@ -116,6 +108,26 @@ async def test_connect_failed( assert entry.state is ConfigEntryState.SETUP_RETRY +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_set_default_fabric_label_failed( + hass: HomeAssistant, + matter_client: MagicMock, +) -> None: + """Test failure during client connection.""" + entry = MockConfigEntry(domain=DOMAIN, data={"url": "ws://localhost:5580/ws"}) + entry.add_to_hass(hass) + + matter_client.set_default_fabric_label.side_effect = NotConnected() + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert matter_client.connect.call_count == 1 + assert matter_client.set_default_fabric_label.call_count == 1 + + assert entry.state is ConfigEntryState.SETUP_RETRY + + async def test_connect_timeout( hass: HomeAssistant, matter_client: MagicMock, @@ -234,10 +246,10 @@ async def test_raise_addon_task_in_progress( install_addon_original_side_effect = install_addon.side_effect - async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: + async def install_addon_side_effect(slug: str) -> None: """Mock install add-on.""" await install_event.wait() - await install_addon_original_side_effect(hass, slug) + await install_addon_original_side_effect(slug) install_addon.side_effect = install_addon_side_effect @@ -298,7 +310,7 @@ async def test_start_addon( assert addon_info.call_count == 1 assert install_addon.call_count == 0 assert start_addon.call_count == 1 - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") async def test_install_addon( @@ -325,9 +337,9 @@ async def test_install_addon( assert entry.state is ConfigEntryState.SETUP_RETRY assert addon_store_info.call_count == 3 assert install_addon.call_count == 1 - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert start_addon.call_count == 1 - assert start_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call("core_matter_server") async def test_addon_info_failure( @@ -338,7 +350,7 @@ async def test_addon_info_failure( start_addon: AsyncMock, ) -> None: """Test failure to get add-on info for Matter add-on during entry setup.""" - addon_info.side_effect = HassioAPIError("Boom") + addon_info.side_effect = SupervisorError("Boom") entry = MockConfigEntry( domain=DOMAIN, title="Matter", @@ -377,7 +389,7 @@ async def test_addon_info_failure( True, 1, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), None, ServerVersionTooOld("Invalid version"), ), @@ -411,8 +423,8 @@ async def test_update_addon( connect_side_effect: Exception, ) -> None: """Test update the Matter add-on during entry setup.""" - addon_info.return_value["version"] = addon_version - addon_info.return_value["update_available"] = update_available + addon_info.return_value.version = addon_version + addon_info.return_value.update_available = update_available create_backup.side_effect = create_backup_side_effect update_addon.side_effect = update_addon_side_effect matter_client.connect.side_effect = connect_side_effect @@ -434,8 +446,6 @@ async def test_update_addon( assert update_addon.call_count == update_calls -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( ( "connect_side_effect", @@ -492,7 +502,7 @@ async def test_issue_registry_invalid_version( ("stop_addon_side_effect", "entry_state"), [ (None, ConfigEntryState.NOT_LOADED), - (HassioAPIError("Boom"), ConfigEntryState.LOADED), + (SupervisorError("Boom"), ConfigEntryState.LOADED), ], ) async def test_stop_addon( @@ -531,7 +541,7 @@ async def test_stop_addon( assert entry.state == entry_state assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_matter_server") + assert stop_addon.call_args == call("core_matter_server") async def test_remove_entry( @@ -570,7 +580,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_matter_server") + assert stop_addon.call_args == call("core_matter_server") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -578,7 +588,7 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call(hass, "core_matter_server") + assert uninstall_addon.call_args == call("core_matter_server") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 stop_addon.reset_mock() @@ -588,12 +598,12 @@ async def test_remove_entry( # test add-on stop failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - stop_addon.side_effect = HassioAPIError() + stop_addon.side_effect = SupervisorError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_matter_server") + assert stop_addon.call_args == call("core_matter_server") assert create_backup.call_count == 0 assert uninstall_addon.call_count == 0 assert entry.state is ConfigEntryState.NOT_LOADED @@ -612,7 +622,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_matter_server") + assert stop_addon.call_args == call("core_matter_server") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -631,12 +641,12 @@ async def test_remove_entry( # test add-on uninstall failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - uninstall_addon.side_effect = HassioAPIError() + uninstall_addon.side_effect = SupervisorError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_matter_server") + assert stop_addon.call_args == call("core_matter_server") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -644,14 +654,12 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call(hass, "core_matter_server") + assert uninstall_addon.call_args == call("core_matter_server") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 assert "Failed to uninstall the Matter Server add-on" in caplog.text -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_config_entry_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -668,7 +676,7 @@ async def test_remove_config_entry_device( device_entry = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id )[0] - entity_id = "light.m5stamp_lighting_app_light" + entity_id = "light.m5stamp_lighting_app" assert device_entry assert entity_registry.async_get(entity_id) @@ -684,8 +692,6 @@ async def test_remove_config_entry_device( assert not hass.states.get(entity_id) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_config_entry_device_no_node( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/matter/test_light.py b/tests/components/matter/test_light.py index 4fd73b6457b..c49b47c9106 100644 --- a/tests/components/matter/test_light.py +++ b/tests/components/matter/test_light.py @@ -3,55 +3,61 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion from homeassistant.components.light import ColorMode +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_devices") +async def test_lights( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test lights.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.LIGHT) + + @pytest.mark.parametrize( - ("fixture", "entity_id", "supported_color_modes"), + ("node_fixture", "entity_id", "supported_color_modes"), [ ( - "extended-color-light", - "light.mock_extended_color_light_light", + "extended_color_light", + "light.mock_extended_color_light", ["color_temp", "hs", "xy"], ), ( - "color-temperature-light", - "light.mock_color_temperature_light_light", + "color_temperature_light", + "light.mock_color_temperature_light", ["color_temp"], ), - ("dimmable-light", "light.mock_dimmable_light_light", ["brightness"]), - ("onoff-light", "light.mock_onoff_light_light", ["onoff"]), - ("onoff-light-with-levelcontrol-present", "light.d215s_light", ["onoff"]), + ("dimmable_light", "light.mock_dimmable_light", ["brightness"]), + ("onoff_light", "light.mock_onoff_light", ["onoff"]), + ("onoff_light_with_levelcontrol_present", "light.d215s", ["onoff"]), ], ) async def test_light_turn_on_off( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, supported_color_modes: list[str], ) -> None: """Test basic light discovery and turn on/off.""" - light_node = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - # Test that the light is off - set_node_attribute(light_node, 1, 6, 0, False) + set_node_attribute(matter_node, 1, 6, 0, False) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -64,7 +70,7 @@ async def test_light_turn_on_off( assert state.attributes["supported_color_modes"] == supported_color_modes # Test that the light is on - set_node_attribute(light_node, 1, 6, 0, True) + set_node_attribute(matter_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -83,7 +89,7 @@ async def test_light_turn_on_off( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.Off(), ) @@ -101,40 +107,32 @@ async def test_light_turn_on_off( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ) matter_client.send_device_command.reset_mock() -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("extended-color-light", "light.mock_extended_color_light_light"), - ("color-temperature-light", "light.mock_color_temperature_light_light"), - ("dimmable-light", "light.mock_dimmable_light_light"), - ("dimmable-plugin-unit", "light.dimmable_plugin_unit_light"), + ("extended_color_light", "light.mock_extended_color_light"), + ("color_temperature_light", "light.mock_color_temperature_light"), + ("dimmable_light", "light.mock_dimmable_light"), + ("dimmable_plugin_unit", "light.dimmable_plugin_unit"), ], ) async def test_dimmable_light( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test a dimmable light.""" - light_node = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - # Test that the light brightness is 50 (out of 254) - set_node_attribute(light_node, 1, 8, 0, 50) + set_node_attribute(matter_node, 1, 8, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -155,11 +153,11 @@ async def test_dimmable_light( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.LevelControl.Commands.MoveToLevelWithOnOff( level=128, - transitionTime=2, + transitionTime=0, ), ) matter_client.send_device_command.reset_mock() @@ -174,7 +172,7 @@ async def test_dimmable_light( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.LevelControl.Commands.MoveToLevelWithOnOff( level=128, @@ -184,32 +182,23 @@ async def test_dimmable_light( matter_client.send_device_command.reset_mock() -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("extended-color-light", "light.mock_extended_color_light_light"), - ("color-temperature-light", "light.mock_color_temperature_light_light"), + ("extended_color_light", "light.mock_extended_color_light"), + ("color_temperature_light", "light.mock_color_temperature_light"), ], ) async def test_color_temperature_light( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test a color temperature light.""" - - light_node = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - # Test that the light color temperature is 3000 (out of 50000) - set_node_attribute(light_node, 1, 768, 8, 2) - set_node_attribute(light_node, 1, 768, 7, 3000) + set_node_attribute(matter_node, 1, 768, 8, 2) + set_node_attribute(matter_node, 1, 768, 7, 3000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -233,17 +222,17 @@ async def test_color_temperature_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColorTemperature( colorTemperatureMireds=300, - transitionTime=2, + transitionTime=0, optionsMask=1, optionsOverride=1, ), ), call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -263,7 +252,7 @@ async def test_color_temperature_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColorTemperature( colorTemperatureMireds=300, @@ -273,7 +262,7 @@ async def test_color_temperature_light( ), ), call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -282,32 +271,24 @@ async def test_color_temperature_light( matter_client.send_device_command.reset_mock() -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("fixture", "entity_id"), + ("node_fixture", "entity_id"), [ - ("extended-color-light", "light.mock_extended_color_light_light"), + ("extended_color_light", "light.mock_extended_color_light"), ], ) async def test_extended_color_light( hass: HomeAssistant, matter_client: MagicMock, - fixture: str, + matter_node: MatterNode, entity_id: str, ) -> None: """Test an extended color light.""" - light_node = await setup_integration_with_node_fixture( - hass, - fixture, - matter_client, - ) - # Test that the XY color changes - set_node_attribute(light_node, 1, 768, 8, 1) - set_node_attribute(light_node, 1, 768, 3, 50) - set_node_attribute(light_node, 1, 768, 4, 100) + set_node_attribute(matter_node, 1, 768, 8, 1) + set_node_attribute(matter_node, 1, 768, 3, 50) + set_node_attribute(matter_node, 1, 768, 4, 100) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -317,9 +298,9 @@ async def test_extended_color_light( assert state.attributes["xy_color"] == (0.0007630, 0.001526) # Test that the HS color changes - set_node_attribute(light_node, 1, 768, 8, 0) - set_node_attribute(light_node, 1, 768, 1, 50) - set_node_attribute(light_node, 1, 768, 0, 100) + set_node_attribute(matter_node, 1, 768, 8, 0) + set_node_attribute(matter_node, 1, 768, 1, 50) + set_node_attribute(matter_node, 1, 768, 0, 100) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -343,18 +324,18 @@ async def test_extended_color_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColor( colorX=0.5 * 65536, colorY=0.5 * 65536, - transitionTime=2, + transitionTime=0, optionsMask=1, optionsOverride=1, ), ), call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -374,7 +355,7 @@ async def test_extended_color_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColor( colorX=0.5 * 65536, @@ -385,7 +366,7 @@ async def test_extended_color_light( ), ), call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -413,13 +394,13 @@ async def test_extended_color_light( command=clusters.ColorControl.Commands.MoveToHueAndSaturation( hue=167, saturation=254, - transitionTime=2, + transitionTime=0, optionsMask=1, optionsOverride=1, ), ), call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -454,7 +435,7 @@ async def test_extended_color_light( ), ), call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), diff --git a/tests/components/matter/test_lock.py b/tests/components/matter/test_lock.py index f279430b393..7bcfd381d6c 100644 --- a/tests/components/matter/test_lock.py +++ b/tests/components/matter/test_lock.py @@ -5,41 +5,50 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.lock import ( - STATE_LOCKED, - STATE_OPEN, - STATE_UNLOCKED, - LockEntityFeature, -) -from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_OPENING, STATE_UNKNOWN +from homeassistant.components.lock import LockEntityFeature, LockState +from homeassistant.const import ATTR_CODE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("matter_devices") +async def test_locks( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test locks.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.LOCK) + + +@pytest.mark.parametrize("node_fixture", ["door_lock"]) async def test_lock( hass: HomeAssistant, matter_client: MagicMock, - door_lock: MatterNode, + matter_node: MatterNode, ) -> None: """Test door lock.""" await hass.services.async_call( "lock", "unlock", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=door_lock.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.UnlockDoor(), timed_request_timeout_ms=1000, @@ -50,14 +59,14 @@ async def test_lock( "lock", "lock", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=door_lock.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.LockDoor(), timed_request_timeout_ms=1000, @@ -65,51 +74,50 @@ async def test_lock( matter_client.send_device_command.reset_mock() await hass.async_block_till_done() - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_LOCKING + assert state.state == LockState.LOCKING - set_node_attribute(door_lock, 1, 257, 0, 0) + set_node_attribute(matter_node, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED - set_node_attribute(door_lock, 1, 257, 0, 2) + set_node_attribute(matter_node, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED - set_node_attribute(door_lock, 1, 257, 0, 1) + set_node_attribute(matter_node, 1, 257, 0, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED - set_node_attribute(door_lock, 1, 257, 0, None) + set_node_attribute(matter_node, 1, 257, 0, None) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == STATE_UNKNOWN # test featuremap update - set_node_attribute(door_lock, 1, 257, 65532, 4096) + set_node_attribute(matter_node, 1, 257, 65532, 4096) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state.attributes["supported_features"] & LockEntityFeature.OPEN -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["door_lock"]) async def test_lock_requires_pin( hass: HomeAssistant, matter_client: MagicMock, - door_lock: MatterNode, + matter_node: MatterNode, entity_registry: er.EntityRegistry, ) -> None: """Test door lock with PINCode.""" @@ -117,9 +125,9 @@ async def test_lock_requires_pin( code = "1234567" # set RequirePINforRemoteOperation - set_node_attribute(door_lock, 1, 257, 51, True) + set_node_attribute(matter_node, 1, 257, 51, True) # set door state to unlocked - set_node_attribute(door_lock, 1, 257, 0, 2) + set_node_attribute(matter_node, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) with pytest.raises(ServiceValidationError): @@ -127,7 +135,7 @@ async def test_lock_requires_pin( await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock_lock", ATTR_CODE: "1234"}, + {"entity_id": "lock.mock_door_lock", ATTR_CODE: "1234"}, blocking=True, ) @@ -136,12 +144,12 @@ async def test_lock_requires_pin( await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock_lock", ATTR_CODE: code}, + {"entity_id": "lock.mock_door_lock", ATTR_CODE: code}, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=door_lock.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.LockDoor(code.encode()), timed_request_timeout_ms=1000, @@ -150,49 +158,48 @@ async def test_lock_requires_pin( # Lock door using default code default_code = "7654321" entity_registry.async_update_entity_options( - "lock.mock_door_lock_lock", "lock", {"default_code": default_code} + "lock.mock_door_lock", "lock", {"default_code": default_code} ) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock_lock"}, + {"entity_id": "lock.mock_door_lock"}, blocking=True, ) assert matter_client.send_device_command.call_count == 2 assert matter_client.send_device_command.call_args == call( - node_id=door_lock.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.LockDoor(default_code.encode()), timed_request_timeout_ms=1000, ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["door_lock_with_unbolt"]) async def test_lock_with_unbolt( hass: HomeAssistant, matter_client: MagicMock, - door_lock_with_unbolt: MatterNode, + matter_node: MatterNode, ) -> None: """Test door lock.""" - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes["supported_features"] & LockEntityFeature.OPEN # test unlock/unbolt await hass.services.async_call( "lock", "unlock", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 # unlock should unbolt on a lock with unbolt feature assert matter_client.send_device_command.call_args == call( - node_id=door_lock_with_unbolt.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.UnboltDoor(), timed_request_timeout_ms=1000, @@ -203,33 +210,33 @@ async def test_lock_with_unbolt( "lock", "open", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=door_lock_with_unbolt.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.UnlockDoor(), timed_request_timeout_ms=1000, ) await hass.async_block_till_done() - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_OPENING + assert state.state == LockState.OPENING - set_node_attribute(door_lock_with_unbolt, 1, 257, 0, 0) + set_node_attribute(matter_node, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED - set_node_attribute(door_lock_with_unbolt, 1, 257, 0, 3) + set_node_attribute(matter_node, 1, 257, 0, 3) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state - assert state.state == STATE_OPEN + assert state.state == LockState.OPEN diff --git a/tests/components/matter/test_number.py b/tests/components/matter/test_number.py index 917f8138c7a..86e1fbbf419 100644 --- a/tests/components/matter/test_number.py +++ b/tests/components/matter/test_number.py @@ -1,35 +1,39 @@ """Test Matter number entities.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, call from matter_server.client.models.node import MatterNode +from matter_server.common import custom_clusters +from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -@pytest.fixture(name="light_node") -async def dimmable_light_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a flow sensor node.""" - return await setup_integration_with_node_fixture( - hass, "dimmable-light", matter_client - ) +@pytest.mark.usefixtures("matter_devices") +async def test_numbers( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test numbers.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.NUMBER) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_level_control_config_entities( hass: HomeAssistant, matter_client: MagicMock, - light_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test number entities are created for the LevelControl cluster (config) attributes.""" state = hass.states.get("number.mock_dimmable_light_on_level") @@ -48,9 +52,48 @@ async def test_level_control_config_entities( assert state assert state.state == "0.0" - set_node_attribute(light_node, 1, 0x00000008, 0x0011, 20) + set_node_attribute(matter_node, 1, 0x00000008, 0x0011, 20) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("number.mock_dimmable_light_on_level") assert state assert state.state == "20" + + +@pytest.mark.parametrize("node_fixture", ["eve_weather_sensor"]) +async def test_eve_weather_sensor_altitude( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test weather sensor created from (Eve) custom cluster.""" + # pressure sensor on Eve custom cluster + state = hass.states.get("number.eve_weather_altitude_above_sea_level") + assert state + assert state.state == "40.0" + + set_node_attribute(matter_node, 1, 319486977, 319422483, 800) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("number.eve_weather_altitude_above_sea_level") + assert state + assert state.state == "800.0" + + # test set value + await hass.services.async_call( + "number", + "set_value", + { + "entity_id": "number.eve_weather_altitude_above_sea_level", + "value": 500, + }, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args_list[0] == call( + node_id=matter_node.node_id, + attribute_path=create_attribute_path_from_attribute( + endpoint_id=1, + attribute=custom_clusters.EveCluster.Attributes.Altitude, + ), + value=500, + ) diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py index f84e5870392..ffe996fd840 100644 --- a/tests/components/matter/test_select.py +++ b/tests/components/matter/test_select.py @@ -5,32 +5,34 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -@pytest.fixture(name="light_node") -async def dimmable_light_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a dimmable light node.""" - return await setup_integration_with_node_fixture( - hass, "dimmable-light", matter_client - ) +@pytest.mark.usefixtures("matter_devices") +async def test_selects( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test selects.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SELECT) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_mode_select_entities( hass: HomeAssistant, matter_client: MagicMock, - light_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test select entities are created for the ModeSelect cluster attributes.""" state = hass.states.get("select.mock_dimmable_light_led_color") @@ -53,7 +55,7 @@ async def test_mode_select_entities( ] # name should be derived from description attribute assert state.attributes["friendly_name"] == "Mock Dimmable Light LED Color" - set_node_attribute(light_node, 6, 80, 3, 1) + set_node_attribute(matter_node, 6, 80, 3, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("select.mock_dimmable_light_led_color") assert state.state == "Orange" @@ -70,30 +72,34 @@ async def test_mode_select_entities( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=light_node.node_id, + node_id=matter_node.node_id, endpoint_id=6, command=clusters.ModeSelect.Commands.ChangeToMode(newMode=3), ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_attribute_select_entities( hass: HomeAssistant, matter_client: MagicMock, - light_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test select entities are created for attribute based discovery schema(s).""" entity_id = "select.mock_dimmable_light_power_on_behavior_on_startup" state = hass.states.get(entity_id) assert state - assert state.state == "Previous" - assert state.attributes["options"] == ["On", "Off", "Toggle", "Previous"] + assert state.state == "previous" + assert state.attributes["options"] == ["on", "off", "toggle", "previous"] assert ( state.attributes["friendly_name"] - == "Mock Dimmable Light Power-on behavior on Startup" + == "Mock Dimmable Light Power-on behavior on startup" ) - set_node_attribute(light_node, 1, 6, 16387, 1) + set_node_attribute(matter_node, 1, 6, 16387, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) - assert state.state == "On" + assert state.state == "on" + # test that an invalid value (e.g. 253) leads to an unknown state + set_node_attribute(matter_node, 1, 6, 16387, 253) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.state == "unknown" diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 2c9bfae94ce..3215ec58116 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -4,109 +4,41 @@ from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion -from homeassistant.const import EntityCategory +from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -@pytest.fixture(name="flow_sensor_node") -async def flow_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a flow sensor node.""" - return await setup_integration_with_node_fixture(hass, "flow-sensor", matter_client) +@pytest.mark.usefixtures("matter_devices") +async def test_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test sensors.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SENSOR) -@pytest.fixture(name="humidity_sensor_node") -async def humidity_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a humidity sensor node.""" - return await setup_integration_with_node_fixture( - hass, "humidity-sensor", matter_client - ) - - -@pytest.fixture(name="light_sensor_node") -async def light_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a light sensor node.""" - return await setup_integration_with_node_fixture( - hass, "light-sensor", matter_client - ) - - -@pytest.fixture(name="pressure_sensor_node") -async def pressure_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a pressure sensor node.""" - return await setup_integration_with_node_fixture( - hass, "pressure-sensor", matter_client - ) - - -@pytest.fixture(name="temperature_sensor_node") -async def temperature_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a temperature sensor node.""" - return await setup_integration_with_node_fixture( - hass, "temperature-sensor", matter_client - ) - - -@pytest.fixture(name="eve_energy_plug_node") -async def eve_energy_plug_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a Eve Energy Plug node.""" - return await setup_integration_with_node_fixture( - hass, "eve-energy-plug", matter_client - ) - - -@pytest.fixture(name="air_quality_sensor_node") -async def air_quality_sensor_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for an air quality sensor (LightFi AQ1) node.""" - return await setup_integration_with_node_fixture( - hass, "air-quality-sensor", matter_client - ) - - -@pytest.fixture(name="air_purifier_node") -async def air_purifier_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for an air purifier node.""" - return await setup_integration_with_node_fixture( - hass, "air-purifier", matter_client - ) - - -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["flow_sensor"]) async def test_sensor_null_value( hass: HomeAssistant, matter_client: MagicMock, - flow_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test flow sensor.""" state = hass.states.get("sensor.mock_flow_sensor_flow") assert state assert state.state == "0.0" - set_node_attribute(flow_sensor_node, 1, 1028, 0, None) + set_node_attribute(matter_node, 1, 1028, 0, None) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_flow_sensor_flow") @@ -114,19 +46,18 @@ async def test_sensor_null_value( assert state.state == "unknown" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["flow_sensor"]) async def test_flow_sensor( hass: HomeAssistant, matter_client: MagicMock, - flow_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test flow sensor.""" state = hass.states.get("sensor.mock_flow_sensor_flow") assert state assert state.state == "0.0" - set_node_attribute(flow_sensor_node, 1, 1028, 0, 20) + set_node_attribute(matter_node, 1, 1028, 0, 20) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_flow_sensor_flow") @@ -134,19 +65,18 @@ async def test_flow_sensor( assert state.state == "2.0" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["humidity_sensor"]) async def test_humidity_sensor( hass: HomeAssistant, matter_client: MagicMock, - humidity_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test humidity sensor.""" state = hass.states.get("sensor.mock_humidity_sensor_humidity") assert state assert state.state == "0.0" - set_node_attribute(humidity_sensor_node, 1, 1029, 0, 4000) + set_node_attribute(matter_node, 1, 1029, 0, 4000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_humidity_sensor_humidity") @@ -154,19 +84,18 @@ async def test_humidity_sensor( assert state.state == "40.0" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["light_sensor"]) async def test_light_sensor( hass: HomeAssistant, matter_client: MagicMock, - light_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test light sensor.""" state = hass.states.get("sensor.mock_light_sensor_illuminance") assert state assert state.state == "1.3" - set_node_attribute(light_sensor_node, 1, 1024, 0, 3000) + set_node_attribute(matter_node, 1, 1024, 0, 3000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_light_sensor_illuminance") @@ -174,39 +103,18 @@ async def test_light_sensor( assert state.state == "2.0" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_pressure_sensor( - hass: HomeAssistant, - matter_client: MagicMock, - pressure_sensor_node: MatterNode, -) -> None: - """Test pressure sensor.""" - state = hass.states.get("sensor.mock_pressure_sensor_pressure") - assert state - assert state.state == "0.0" - - set_node_attribute(pressure_sensor_node, 1, 1027, 0, 1010) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("sensor.mock_pressure_sensor_pressure") - assert state - assert state.state == "101.0" - - -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["temperature_sensor"]) async def test_temperature_sensor( hass: HomeAssistant, matter_client: MagicMock, - temperature_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test temperature sensor.""" state = hass.states.get("sensor.mock_temperature_sensor_temperature") assert state assert state.state == "21.0" - set_node_attribute(temperature_sensor_node, 1, 1026, 0, 2500) + set_node_attribute(matter_node, 1, 1026, 0, 2500) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_temperature_sensor_temperature") @@ -214,13 +122,12 @@ async def test_temperature_sensor( assert state.state == "25.0" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["eve_contact_sensor"]) async def test_battery_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, - eve_contact_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test battery sensor.""" entity_id = "sensor.eve_door_battery" @@ -228,7 +135,7 @@ async def test_battery_sensor( assert state assert state.state == "100" - set_node_attribute(eve_contact_sensor_node, 1, 47, 12, 100) + set_node_attribute(matter_node, 1, 47, 12, 100) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -241,59 +148,115 @@ async def test_battery_sensor( assert entry.entity_category == EntityCategory.DIAGNOSTIC -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_eve_energy_sensors( +@pytest.mark.parametrize("node_fixture", ["eve_contact_sensor"]) +async def test_battery_sensor_voltage( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, - eve_energy_plug_node: MatterNode, + matter_node: MatterNode, ) -> None: - """Test Energy sensors created from Eve Energy custom cluster.""" - # power sensor - entity_id = "sensor.eve_energy_plug_power" + """Test battery voltage sensor.""" + entity_id = "sensor.eve_door_voltage" state = hass.states.get(entity_id) assert state + assert state.state == "3.558" + + set_node_attribute(matter_node, 1, 47, 11, 4234) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get(entity_id) + assert state + assert state.state == "4.234" + + entry = entity_registry.async_get(entity_id) + + assert entry + assert entry.entity_category == EntityCategory.DIAGNOSTIC + + +@pytest.mark.parametrize("node_fixture", ["smoke_detector"]) +async def test_battery_sensor_description( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test battery replacement description sensor.""" + state = hass.states.get("sensor.smoke_sensor_battery_type") + assert state + assert state.state == "CR123A" + + set_node_attribute(matter_node, 1, 47, 19, "CR2032") + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.smoke_sensor_battery_type") + assert state + assert state.state == "CR2032" + + +@pytest.mark.parametrize("node_fixture", ["eve_thermo"]) +async def test_eve_thermo_sensor( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test Eve Thermo.""" + # Valve position + state = hass.states.get("sensor.eve_thermo_valve_position") + assert state + assert state.state == "10" + + set_node_attribute(matter_node, 1, 319486977, 319422488, 0) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.eve_thermo_valve_position") + assert state + assert state.state == "0" + + +@pytest.mark.parametrize("node_fixture", ["pressure_sensor"]) +async def test_pressure_sensor( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test pressure sensor.""" + state = hass.states.get("sensor.mock_pressure_sensor_pressure") + assert state assert state.state == "0.0" - assert state.attributes["unit_of_measurement"] == "W" - assert state.attributes["device_class"] == "power" - assert state.attributes["friendly_name"] == "Eve Energy Plug Power" - # voltage sensor - entity_id = "sensor.eve_energy_plug_voltage" - state = hass.states.get(entity_id) + set_node_attribute(matter_node, 1, 1027, 0, 1010) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.mock_pressure_sensor_pressure") assert state - assert state.state == "238.800003051758" - assert state.attributes["unit_of_measurement"] == "V" - assert state.attributes["device_class"] == "voltage" - assert state.attributes["friendly_name"] == "Eve Energy Plug Voltage" + assert state.state == "101.0" - # energy sensor - entity_id = "sensor.eve_energy_plug_energy" - state = hass.states.get(entity_id) + +@pytest.mark.parametrize("node_fixture", ["eve_weather_sensor"]) +async def test_eve_weather_sensor_custom_cluster( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test weather sensor created from (Eve) custom cluster.""" + # pressure sensor on Eve custom cluster + state = hass.states.get("sensor.eve_weather_pressure") assert state - assert state.state == "0.220000028610229" - assert state.attributes["unit_of_measurement"] == "kWh" - assert state.attributes["device_class"] == "energy" - assert state.attributes["friendly_name"] == "Eve Energy Plug Energy" - assert state.attributes["state_class"] == "total_increasing" + assert state.state == "1008.5" - # current sensor - entity_id = "sensor.eve_energy_plug_current" - state = hass.states.get(entity_id) + set_node_attribute(matter_node, 1, 319486977, 319422484, 800) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("sensor.eve_weather_pressure") assert state - assert state.state == "0.0" - assert state.attributes["unit_of_measurement"] == "A" - assert state.attributes["device_class"] == "current" - assert state.attributes["friendly_name"] == "Eve Energy Plug Current" + assert state.state == "800.0" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["air_quality_sensor"]) async def test_air_quality_sensor( hass: HomeAssistant, matter_client: MagicMock, - air_quality_sensor_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test air quality sensor.""" # Carbon Dioxide @@ -301,7 +264,7 @@ async def test_air_quality_sensor( assert state assert state.state == "678.0" - set_node_attribute(air_quality_sensor_node, 1, 1037, 0, 789) + set_node_attribute(matter_node, 1, 1037, 0, 789) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide") @@ -313,7 +276,7 @@ async def test_air_quality_sensor( assert state assert state.state == "3.0" - set_node_attribute(air_quality_sensor_node, 1, 1068, 0, 50) + set_node_attribute(matter_node, 1, 1068, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_pm1") @@ -325,7 +288,7 @@ async def test_air_quality_sensor( assert state assert state.state == "3.0" - set_node_attribute(air_quality_sensor_node, 1, 1066, 0, 50) + set_node_attribute(matter_node, 1, 1066, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_pm2_5") @@ -337,7 +300,7 @@ async def test_air_quality_sensor( assert state assert state.state == "3.0" - set_node_attribute(air_quality_sensor_node, 1, 1069, 0, 50) + set_node_attribute(matter_node, 1, 1069, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_pm10") @@ -345,108 +308,28 @@ async def test_air_quality_sensor( assert state.state == "50.0" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_air_purifier_sensor( +@pytest.mark.parametrize("node_fixture", ["silabs_dishwasher"]) +async def test_operational_state_sensor( hass: HomeAssistant, matter_client: MagicMock, - air_purifier_node: MatterNode, + matter_node: MatterNode, ) -> None: - """Test Air quality sensors are creayted for air purifier device.""" - # Carbon Dioxide - state = hass.states.get("sensor.air_purifier_carbon_dioxide") + """Test dishwasher sensor.""" + # OperationalState Cluster / OperationalState attribute (1/96/4) + state = hass.states.get("sensor.dishwasher_operational_state") assert state - assert state.state == "2.0" - - # PM1 - state = hass.states.get("sensor.air_purifier_pm1") - assert state - assert state.state == "2.0" - - # PM2.5 - state = hass.states.get("sensor.air_purifier_pm2_5") - assert state - assert state.state == "2.0" - - # PM10 - state = hass.states.get("sensor.air_purifier_pm10") - assert state - assert state.state == "2.0" - - # Temperature - state = hass.states.get("sensor.air_purifier_temperature") - assert state - assert state.state == "20.0" - - # Humidity - state = hass.states.get("sensor.air_purifier_humidity") - assert state - assert state.state == "50.0" - - # VOCS - state = hass.states.get("sensor.air_purifier_vocs") - assert state - assert state.state == "2.0" - assert state.attributes["state_class"] == "measurement" - assert state.attributes["unit_of_measurement"] == "ppm" - assert state.attributes["device_class"] == "volatile_organic_compounds_parts" - assert state.attributes["friendly_name"] == "Air Purifier VOCs" - - # Air Quality - state = hass.states.get("sensor.air_purifier_air_quality") - assert state - assert state.state == "good" - expected_options = [ - "extremely_poor", - "very_poor", - "poor", - "fair", - "good", - "moderate", - "unknown", + assert state.state == "stopped" + assert state.attributes["options"] == [ + "stopped", + "running", + "paused", + "error", + "extra_state", ] - assert set(state.attributes["options"]) == set(expected_options) - assert state.attributes["device_class"] == "enum" - assert state.attributes["friendly_name"] == "Air Purifier Air quality" - # Carbon MonoOxide - state = hass.states.get("sensor.air_purifier_carbon_monoxide") - assert state - assert state.state == "2.0" - assert state.attributes["state_class"] == "measurement" - assert state.attributes["unit_of_measurement"] == "ppm" - assert state.attributes["device_class"] == "carbon_monoxide" - assert state.attributes["friendly_name"] == "Air Purifier Carbon monoxide" + set_node_attribute(matter_node, 1, 96, 4, 8) + await trigger_subscription_callback(hass, matter_client) - # Nitrogen Dioxide - state = hass.states.get("sensor.air_purifier_nitrogen_dioxide") + state = hass.states.get("sensor.dishwasher_operational_state") assert state - assert state.state == "2.0" - assert state.attributes["state_class"] == "measurement" - assert state.attributes["unit_of_measurement"] == "ppm" - assert state.attributes["device_class"] == "nitrogen_dioxide" - assert state.attributes["friendly_name"] == "Air Purifier Nitrogen dioxide" - - # Ozone Concentration - state = hass.states.get("sensor.air_purifier_ozone") - assert state - assert state.state == "2.0" - assert state.attributes["state_class"] == "measurement" - assert state.attributes["unit_of_measurement"] == "ppm" - assert state.attributes["device_class"] == "ozone" - assert state.attributes["friendly_name"] == "Air Purifier Ozone" - - # Hepa Filter Condition - state = hass.states.get("sensor.air_purifier_hepa_filter_condition") - assert state - assert state.state == "100" - assert state.attributes["state_class"] == "measurement" - assert state.attributes["unit_of_measurement"] == "%" - assert state.attributes["friendly_name"] == "Air Purifier Hepa filter condition" - - # Activated Carbon Filter Condition - state = hass.states.get("sensor.air_purifier_activated_carbon_filter_condition") - assert state - assert state.state == "100" - assert state.attributes["state_class"] == "measurement" - assert state.attributes["unit_of_measurement"] == "%" + assert state.state == "extra_state" diff --git a/tests/components/matter/test_switch.py b/tests/components/matter/test_switch.py index 0327e9ea5fe..d7a6a700cde 100644 --- a/tests/components/matter/test_switch.py +++ b/tests/components/matter/test_switch.py @@ -5,43 +5,37 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - setup_integration_with_node_fixture, + snapshot_matter_entities, trigger_subscription_callback, ) -@pytest.fixture(name="powerplug_node") -async def powerplug_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a Powerplug node.""" - return await setup_integration_with_node_fixture( - hass, "on-off-plugin-unit", matter_client - ) +@pytest.mark.usefixtures("matter_devices") +async def test_switches( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test switches.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SWITCH) -@pytest.fixture(name="switch_unit") -async def switch_unit_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a Switch Unit node.""" - return await setup_integration_with_node_fixture(hass, "switch-unit", matter_client) - - -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) async def test_turn_on( hass: HomeAssistant, matter_client: MagicMock, - powerplug_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test turning on a switch.""" - state = hass.states.get("switch.mock_onoffpluginunit_switch") + state = hass.states.get("switch.mock_onoffpluginunit") assert state assert state.state == "off" @@ -49,35 +43,34 @@ async def test_turn_on( "switch", "turn_on", { - "entity_id": "switch.mock_onoffpluginunit_switch", + "entity_id": "switch.mock_onoffpluginunit", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=powerplug_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ) - set_node_attribute(powerplug_node, 1, 6, 0, True) + set_node_attribute(matter_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("switch.mock_onoffpluginunit_switch") + state = hass.states.get("switch.mock_onoffpluginunit") assert state assert state.state == "on" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) async def test_turn_off( hass: HomeAssistant, matter_client: MagicMock, - powerplug_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test turning off a switch.""" - state = hass.states.get("switch.mock_onoffpluginunit_switch") + state = hass.states.get("switch.mock_onoffpluginunit") assert state assert state.state == "off" @@ -85,46 +78,34 @@ async def test_turn_off( "switch", "turn_off", { - "entity_id": "switch.mock_onoffpluginunit_switch", + "entity_id": "switch.mock_onoffpluginunit", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=powerplug_node.node_id, + node_id=matter_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.Off(), ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_switch_unit( - hass: HomeAssistant, - matter_client: MagicMock, - switch_unit: MatterNode, -) -> None: +@pytest.mark.parametrize("node_fixture", ["switch_unit"]) +async def test_switch_unit(hass: HomeAssistant, matter_node: MatterNode) -> None: """Test if a switch entity is discovered from any (non-light) OnOf cluster device.""" # A switch entity should be discovered as fallback for ANY Matter device (endpoint) # that has the OnOff cluster and does not fall into an explicit discovery schema # by another platform (e.g. light, lock etc.). - state = hass.states.get("switch.mock_switchunit_switch") + state = hass.states.get("switch.mock_switchunit") assert state assert state.state == "off" - assert state.attributes["friendly_name"] == "Mock SwitchUnit Switch" + assert state.attributes["friendly_name"] == "Mock SwitchUnit" -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_power_switch( - hass: HomeAssistant, - matter_client: MagicMock, -) -> None: +@pytest.mark.parametrize("node_fixture", ["room_airconditioner"]) +async def test_power_switch(hass: HomeAssistant, matter_node: MatterNode) -> None: """Test if a Power switch entity is created for a device that supports that.""" - await setup_integration_with_node_fixture( - hass, "room-airconditioner", matter_client - ) state = hass.states.get("switch.room_airconditioner_power") assert state assert state.state == "off" diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py index 19c57b0f3c7..92576fa69e2 100644 --- a/tests/components/matter/test_update.py +++ b/tests/components/matter/test_update.py @@ -78,21 +78,12 @@ async def update_node_fixture(matter_client: MagicMock) -> AsyncMock: return matter_client.update_node -@pytest.fixture(name="updateable_node") -async def updateable_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a flow sensor node.""" - return await setup_integration_with_node_fixture( - hass, "dimmable-light", matter_client - ) - - +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_update_entity( hass: HomeAssistant, matter_client: MagicMock, check_node_update: AsyncMock, - updateable_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test update entity exists and update check got made.""" state = hass.states.get("update.mock_dimmable_light") @@ -102,11 +93,12 @@ async def test_update_entity( assert matter_client.check_node_update.call_count == 1 +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_update_check_service( hass: HomeAssistant, matter_client: MagicMock, check_node_update: AsyncMock, - updateable_node: MatterNode, + matter_node: MatterNode, ) -> None: """Test check device update through service call.""" state = hass.states.get("update.mock_dimmable_light") @@ -149,11 +141,12 @@ async def test_update_check_service( ) +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_update_install( hass: HomeAssistant, matter_client: MagicMock, check_node_update: AsyncMock, - updateable_node: MatterNode, + matter_node: MatterNode, freezer: FrozenDateTimeFactory, ) -> None: """Test device update with Matter attribute changes influence progress.""" @@ -199,7 +192,7 @@ async def test_update_install( ) set_node_attribute_typed( - updateable_node, + matter_node, 0, clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kDownloading, @@ -209,10 +202,11 @@ async def test_update_install( state = hass.states.get("update.mock_dimmable_light") assert state assert state.state == STATE_ON - assert state.attributes.get("in_progress") + assert state.attributes["in_progress"] is True + assert state.attributes["update_percentage"] is None set_node_attribute_typed( - updateable_node, + matter_node, 0, clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateStateProgress, 50, @@ -222,22 +216,23 @@ async def test_update_install( state = hass.states.get("update.mock_dimmable_light") assert state assert state.state == STATE_ON - assert state.attributes.get("in_progress") == 50 + assert state.attributes["in_progress"] is True + assert state.attributes["update_percentage"] == 50 set_node_attribute_typed( - updateable_node, + matter_node, 0, clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kIdle, ) set_node_attribute_typed( - updateable_node, + matter_node, 0, clusters.BasicInformation.Attributes.SoftwareVersion, 2, ) set_node_attribute_typed( - updateable_node, + matter_node, 0, clusters.BasicInformation.Attributes.SoftwareVersionString, "v2.0", @@ -249,12 +244,13 @@ async def test_update_install( assert state.attributes.get("installed_version") == "v2.0" +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_update_install_failure( hass: HomeAssistant, matter_client: MagicMock, check_node_update: AsyncMock, update_node: AsyncMock, - updateable_node: MatterNode, + matter_node: MatterNode, freezer: FrozenDateTimeFactory, ) -> None: """Test update entity service call errors.""" @@ -317,12 +313,13 @@ async def test_update_install_failure( ) +@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) async def test_update_state_save_and_restore( hass: HomeAssistant, hass_storage: dict[str, Any], matter_client: MagicMock, check_node_update: AsyncMock, - updateable_node: MatterNode, + matter_node: MatterNode, freezer: FrozenDateTimeFactory, ) -> None: """Test latest update information is retained across reload/restart.""" @@ -392,7 +389,7 @@ async def test_update_state_restore( ), ), ) - await setup_integration_with_node_fixture(hass, "dimmable-light", matter_client) + await setup_integration_with_node_fixture(hass, "dimmable_light", matter_client) assert check_node_update.call_count == 0 diff --git a/tests/components/matter/test_vacuum.py b/tests/components/matter/test_vacuum.py new file mode 100644 index 00000000000..1b33f6a2fe2 --- /dev/null +++ b/tests/components/matter/test_vacuum.py @@ -0,0 +1,213 @@ +"""Test Matter vacuum.""" + +from unittest.mock import MagicMock, call + +from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceNotSupported +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component + +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_vacuum( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that the correct entities get created for a vacuum device.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.VACUUM) + + +@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) +async def test_vacuum_actions( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test vacuum entity actions.""" + # Fetch translations + await async_setup_component(hass, "homeassistant", {}) + entity_id = "vacuum.mock_vacuum" + state = hass.states.get(entity_id) + assert state + + # test return_to_base action + await hass.services.async_call( + "vacuum", + "return_to_base", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.RvcOperationalState.Commands.GoHome(), + ) + matter_client.send_device_command.reset_mock() + + # test start/resume action + await hass.services.async_call( + "vacuum", + "start", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.RvcOperationalState.Commands.Resume(), + ) + matter_client.send_device_command.reset_mock() + + # test pause action + await hass.services.async_call( + "vacuum", + "pause", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.OperationalState.Commands.Pause(), + ) + matter_client.send_device_command.reset_mock() + + # test stop action + # stop command is not supported by the vacuum fixture + with pytest.raises( + ServiceNotSupported, + match="Entity vacuum.mock_vacuum does not support action vacuum.stop", + ): + await hass.services.async_call( + "vacuum", + "stop", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + # update accepted command list to add support for stop command + set_node_attribute( + matter_node, 1, 97, 65529, [clusters.OperationalState.Commands.Stop.command_id] + ) + await trigger_subscription_callback(hass, matter_client) + await hass.services.async_call( + "vacuum", + "stop", + { + "entity_id": entity_id, + }, + blocking=True, + ) + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.OperationalState.Commands.Stop(), + ) + matter_client.send_device_command.reset_mock() + + +@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) +async def test_vacuum_updates( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test vacuum entity updates.""" + entity_id = "vacuum.mock_vacuum" + state = hass.states.get(entity_id) + assert state + # confirm initial state is idle (as stored in the fixture) + assert state.state == "idle" + + # confirm state is 'docked' by setting the operational state to 0x42 + set_node_attribute(matter_node, 1, 97, 4, 0x42) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "docked" + + # confirm state is 'docked' by setting the operational state to 0x41 + set_node_attribute(matter_node, 1, 97, 4, 0x41) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "docked" + + # confirm state is 'returning' by setting the operational state to 0x40 + set_node_attribute(matter_node, 1, 97, 4, 0x40) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "returning" + + # confirm state is 'error' by setting the operational state to 0x01 + set_node_attribute(matter_node, 1, 97, 4, 0x01) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "error" + + # confirm state is 'error' by setting the operational state to 0x02 + set_node_attribute(matter_node, 1, 97, 4, 0x02) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "error" + + # confirm state is 'cleaning' by setting; + # - the operational state to 0x00 + # - the run mode is set to a mode which has cleaning tag + set_node_attribute(matter_node, 1, 97, 4, 0) + set_node_attribute(matter_node, 1, 84, 1, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "cleaning" + + # confirm state is 'idle' by setting; + # - the operational state to 0x00 + # - the run mode is set to a mode which has idle tag + set_node_attribute(matter_node, 1, 97, 4, 0) + set_node_attribute(matter_node, 1, 84, 1, 0) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "idle" + + # confirm state is 'unknown' by setting; + # - the operational state to 0x00 + # - the run mode is set to a mode which has neither cleaning or idle tag + set_node_attribute(matter_node, 1, 97, 4, 0) + set_node_attribute(matter_node, 1, 84, 1, 2) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "unknown" diff --git a/tests/components/matter/test_valve.py b/tests/components/matter/test_valve.py new file mode 100644 index 00000000000..9c4429dda65 --- /dev/null +++ b/tests/components/matter/test_valve.py @@ -0,0 +1,135 @@ +"""Test Matter valve.""" + +from unittest.mock import MagicMock, call + +from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_valves( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test valves.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.VALVE) + + +@pytest.mark.parametrize("node_fixture", ["valve"]) +async def test_valve( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test valve entity is created for a Matter ValveConfigurationAndControl Cluster.""" + entity_id = "valve.valve" + state = hass.states.get(entity_id) + assert state + assert state.state == "closed" + assert state.attributes["friendly_name"] == "Valve" + + # test close_valve action + await hass.services.async_call( + "valve", + "close_valve", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.ValveConfigurationAndControl.Commands.Close(), + ) + matter_client.send_device_command.reset_mock() + + # test open_valve action + await hass.services.async_call( + "valve", + "open_valve", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.ValveConfigurationAndControl.Commands.Open(), + ) + matter_client.send_device_command.reset_mock() + + # set changing state to 'opening' + set_node_attribute(matter_node, 1, 129, 4, 2) + set_node_attribute(matter_node, 1, 129, 5, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "opening" + + # set changing state to 'closing' + set_node_attribute(matter_node, 1, 129, 4, 2) + set_node_attribute(matter_node, 1, 129, 5, 0) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "closing" + + # set changing state to 'open' + set_node_attribute(matter_node, 1, 129, 4, 1) + set_node_attribute(matter_node, 1, 129, 5, 0) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "open" + + # add support for setting position by updating the featuremap + set_node_attribute(matter_node, 1, 129, 65532, 2) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.attributes["current_position"] == 0 + + # update current position + set_node_attribute(matter_node, 1, 129, 6, 50) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.attributes["current_position"] == 50 + + # test set_position action + await hass.services.async_call( + "valve", + "set_valve_position", + { + "entity_id": entity_id, + "position": 100, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.ValveConfigurationAndControl.Commands.Open(targetLevel=100), + ) + matter_client.send_device_command.reset_mock() diff --git a/tests/components/maxcube/test_maxcube_climate.py b/tests/components/maxcube/test_maxcube_climate.py index 48e616f8fd2..8b56ee6a6de 100644 --- a/tests/components/maxcube/test_maxcube_climate.py +++ b/tests/components/maxcube/test_maxcube_climate.py @@ -216,7 +216,7 @@ async def test_thermostat_set_no_temperature( hass: HomeAssistant, cube: MaxCube, thermostat: MaxThermostat ) -> None: """Set hvac mode to heat.""" - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/mealie/conftest.py b/tests/components/mealie/conftest.py index ba42d16e56e..8e724e4d8ea 100644 --- a/tests/components/mealie/conftest.py +++ b/tests/components/mealie/conftest.py @@ -1,7 +1,7 @@ """Mealie tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiomealie import ( About, @@ -20,7 +20,6 @@ from homeassistant.components.mealie.const import DOMAIN from homeassistant.const import CONF_API_TOKEN, CONF_HOST from tests.common import MockConfigEntry, load_fixture -from tests.components.smhi.common import AsyncMock SHOPPING_LIST_ID = "list-id-1" SHOPPING_ITEM_NOTE = "Shopping Item 1" diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr index e6c72c950cc..a694c72fcf6 100644 --- a/tests/components/mealie/snapshots/test_diagnostics.ambr +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -10,6 +10,7 @@ 'description': None, 'entry_type': 'breakfast', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -18,6 +19,7 @@ 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'JeQ2', 'name': 'Roast Chicken', 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', @@ -35,6 +37,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -43,6 +46,7 @@ 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -58,6 +62,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -66,6 +71,7 @@ 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'En9o', 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', @@ -81,6 +87,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -89,6 +96,7 @@ 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'Kn62', 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', @@ -104,6 +112,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -112,6 +121,7 @@ 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'ibL6', 'name': 'Pampered Chef Double Chocolate Mocha Trifle', 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', @@ -127,6 +137,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -135,6 +146,7 @@ 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'beGq', 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', @@ -150,6 +162,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -158,6 +171,7 @@ 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -173,6 +187,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -181,6 +196,7 @@ 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -196,6 +212,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -204,6 +221,7 @@ 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '5G1v', 'name': 'Miso Udon Noodles with Spinach and Tofu', 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', @@ -219,6 +237,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -227,6 +246,7 @@ 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'rrNL', 'name': 'Mousse de saumon', 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', @@ -242,6 +262,7 @@ 'description': 'Dineren met de boys', 'entry_type': 'dinner', 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-21', @@ -257,6 +278,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -265,6 +287,7 @@ 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'INQz', 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', @@ -280,6 +303,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -288,6 +312,7 @@ 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nj5M', 'name': 'Boeuf bourguignon : la vraie recette (2)', 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', @@ -303,6 +328,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -311,6 +337,7 @@ 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -328,6 +355,7 @@ 'description': None, 'entry_type': 'side', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -336,6 +364,7 @@ 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr index 3ae158f1d2d..4f9ee6a5c09 100644 --- a/tests/components/mealie/snapshots/test_services.ambr +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -5,6 +5,7 @@ 'date_added': datetime.date(2024, 6, 29), 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'household_id': None, 'image': 'SuPW', 'ingredients': list([ dict({ @@ -196,11 +197,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -216,11 +219,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 229, 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'JeQ2', 'name': 'Roast Chicken', 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', @@ -236,11 +241,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 226, 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'INQz', 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', @@ -256,11 +263,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 224, 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nj5M', 'name': 'Boeuf bourguignon : la vraie recette (2)', 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', @@ -276,11 +285,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 222, 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'En9o', 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', @@ -296,11 +307,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 221, 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'Kn62', 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', @@ -316,11 +329,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 220, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -336,11 +351,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 219, 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'ibL6', 'name': 'Pampered Chef Double Chocolate Mocha Trifle', 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', @@ -356,11 +373,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 217, 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'beGq', 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', @@ -376,11 +395,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 216, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -396,11 +417,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 212, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -416,11 +439,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 211, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -436,11 +461,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 196, 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '5G1v', 'name': 'Miso Udon Noodles with Spinach and Tofu', 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', @@ -456,11 +483,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 195, 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'rrNL', 'name': 'Mousse de saumon', 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', @@ -476,6 +505,7 @@ 'description': 'Dineren met de boys', 'entry_type': , 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 21), 'mealplan_id': 1, 'recipe': None, @@ -491,6 +521,7 @@ 'date_added': datetime.date(2024, 6, 29), 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'household_id': None, 'image': 'SuPW', 'ingredients': list([ dict({ @@ -681,11 +712,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -705,11 +738,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -729,11 +764,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index f2886578744..628f0290f43 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -6,7 +6,7 @@ from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -85,6 +85,40 @@ async def test_flow_errors( assert result["type"] is FlowResultType.CREATE_ENTRY +async def test_ingress_host( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test disallow ingress host.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "http://homeassistant/hassio/ingress/db21ed7f_mealie", + CONF_API_TOKEN: "token", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "ingress_url"} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://homeassistant:9001", CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + @pytest.mark.parametrize( ("version"), [ @@ -152,11 +186,7 @@ async def test_reauth_flow( """Test reauth flow.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -179,11 +209,7 @@ async def test_reauth_flow_wrong_account( """Test reauth flow with wrong account.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -218,11 +244,7 @@ async def test_reauth_flow_exceptions( await setup_integration(hass, mock_config_entry) mock_mealie_client.get_user_info.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -254,13 +276,9 @@ async def test_reconfigure_flow( """Test reconfigure flow.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -287,13 +305,9 @@ async def test_reconfigure_flow_wrong_account( """Test reconfigure flow with wrong account.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" @@ -326,13 +340,9 @@ async def test_reconfigure_flow_exceptions( await setup_integration(hass, mock_config_entry) mock_mealie_client.get_user_info.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -340,7 +350,7 @@ async def test_reconfigure_flow_exceptions( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": error} mock_mealie_client.get_user_info.side_effect = None diff --git a/tests/components/meater/test_config_flow.py b/tests/components/meater/test_config_flow.py index b8c1be15268..9049cf4ac9a 100644 --- a/tests/components/meater/test_config_flow.py +++ b/tests/components/meater/test_config_flow.py @@ -123,11 +123,7 @@ async def test_reauth_flow(hass: HomeAssistant, mock_meater) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/media_extractor/test_config_flow.py b/tests/components/media_extractor/test_config_flow.py index bfee5ec4879..786341fd553 100644 --- a/tests/components/media_extractor/test_config_flow.py +++ b/tests/components/media_extractor/test_config_flow.py @@ -1,7 +1,7 @@ """Tests for the Media extractor config flow.""" from homeassistant.components.media_extractor.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -41,16 +41,3 @@ async def test_single_instance_allowed(hass: HomeAssistant) -> None: assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == "single_instance_allowed" - - -async def test_import_flow(hass: HomeAssistant, mock_setup_entry) -> None: - """Test import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT} - ) - - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result.get("title") == "Media extractor" - assert result.get("data") == {} - assert result.get("options") == {} - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/media_extractor/test_init.py b/tests/components/media_extractor/test_init.py index bc80e063697..21fab6f875c 100644 --- a/tests/components/media_extractor/test_init.py +++ b/tests/components/media_extractor/test_init.py @@ -22,12 +22,15 @@ from homeassistant.setup import async_setup_component from . import YOUTUBE_EMPTY_PLAYLIST, YOUTUBE_PLAYLIST, YOUTUBE_VIDEO, MockYoutubeDL from .const import NO_FORMATS_RESPONSE, SOUNDCLOUD_TRACK -from tests.common import load_json_object_fixture +from tests.common import MockConfigEntry, load_json_object_fixture async def test_play_media_service_is_registered(hass: HomeAssistant) -> None: """Test play media service is registered.""" - await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + mock_config_entry = MockConfigEntry(domain=DOMAIN) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert hass.services.has_service(DOMAIN, SERVICE_PLAY_MEDIA) diff --git a/tests/components/media_player/test_browse_media.py b/tests/components/media_player/test_browse_media.py index 2b7e40923bf..ea684ea2bc2 100644 --- a/tests/components/media_player/test_browse_media.py +++ b/tests/components/media_player/test_browse_media.py @@ -7,8 +7,8 @@ import pytest from homeassistant.components.media_player.browse_media import ( async_process_play_media_url, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.network import NoURLAvailableError diff --git a/tests/components/media_player/test_init.py b/tests/components/media_player/test_init.py index 11898edfc36..7c64f846df1 100644 --- a/tests/components/media_player/test_init.py +++ b/tests/components/media_player/test_init.py @@ -1,11 +1,14 @@ """Test the base functions of the media player.""" +from enum import Enum from http import HTTPStatus +from types import ModuleType from unittest.mock import patch import pytest import voluptuous as vol +from homeassistant.components import media_player from homeassistant.components.media_player import ( BrowseMedia, MediaClass, @@ -18,6 +21,7 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import help_test_all, import_and_test_deprecated_constant_enum from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -28,6 +32,111 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: + return [ + (enum_field, constant_prefix) + for enum_field in enum + if enum_field + not in [ + MediaPlayerEntityFeature.MEDIA_ANNOUNCE, + MediaPlayerEntityFeature.MEDIA_ENQUEUE, + ] + ] + + +@pytest.mark.parametrize( + "module", + [media_player, media_player.const], +) +def test_all(module: ModuleType) -> None: + """Test module.__all__ is correctly set.""" + help_test_all(module) + + +@pytest.mark.parametrize( + ("enum", "constant_prefix"), + _create_tuples(media_player.MediaPlayerEntityFeature, "SUPPORT_") + + _create_tuples(media_player.MediaPlayerDeviceClass, "DEVICE_CLASS_"), +) +@pytest.mark.parametrize( + "module", + [media_player], +) +def test_deprecated_constants( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + module: ModuleType, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, module, enum, constant_prefix, "2025.10" + ) + + +@pytest.mark.parametrize( + ("enum", "constant_prefix"), + _create_tuples(media_player.MediaClass, "MEDIA_CLASS_") + + _create_tuples(media_player.MediaPlayerEntityFeature, "SUPPORT_") + + _create_tuples(media_player.MediaType, "MEDIA_TYPE_") + + _create_tuples(media_player.RepeatMode, "REPEAT_MODE_"), +) +@pytest.mark.parametrize( + "module", + [media_player.const], +) +def test_deprecated_constants_const( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + module: ModuleType, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, module, enum, constant_prefix, "2025.10" + ) + + +@pytest.mark.parametrize( + "property_suffix", + [ + "play", + "pause", + "stop", + "seek", + "volume_set", + "volume_mute", + "previous_track", + "next_track", + "play_media", + "select_source", + "select_sound_mode", + "clear_playlist", + "shuffle_set", + "grouping", + ], +) +def test_support_properties(property_suffix: str) -> None: + """Test support_*** properties explicitly.""" + + all_features = media_player.MediaPlayerEntityFeature(653887) + feature = media_player.MediaPlayerEntityFeature[property_suffix.upper()] + + entity1 = MediaPlayerEntity() + entity1._attr_supported_features = media_player.MediaPlayerEntityFeature(0) + entity2 = MediaPlayerEntity() + entity2._attr_supported_features = all_features + entity3 = MediaPlayerEntity() + entity3._attr_supported_features = feature + entity4 = MediaPlayerEntity() + entity4._attr_supported_features = all_features & ~feature + + assert getattr(entity1, f"support_{property_suffix}") is False + assert getattr(entity2, f"support_{property_suffix}") is True + assert getattr(entity3, f"support_{property_suffix}") is True + assert getattr(entity4, f"support_{property_suffix}") is False + + async def test_get_image_http( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> None: @@ -298,10 +407,20 @@ async def test_enqueue_alert_exclusive(hass: HomeAssistant) -> None: ) +@pytest.mark.parametrize( + "media_content_id", + [ + "a/b c/d+e%2Fg{}", + "a/b c/d+e%2D", + "a/b c/d+e%2E", + "2012-06%20Pool%20party%20%2F%20BBQ", + ], +) async def test_get_async_get_browse_image_quoting( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, + media_content_id: str, ) -> None: """Test get browse image using media_content_id with special characters. @@ -325,27 +444,6 @@ async def test_get_async_get_browse_image_quoting( "homeassistant.components.media_player.MediaPlayerEntity." "async_get_browse_image", ) as mock_browse_image: - media_content_id = "a/b c/d+e%2Fg{}" url = player.get_browse_image_url("album", media_content_id) await client.get(url) mock_browse_image.assert_called_with("album", media_content_id, None) - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockMediaPlayerEntity(MediaPlayerEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockMediaPlayerEntity() - assert entity.supported_features_compat is MediaPlayerEntityFeature(1) - assert "MockMediaPlayerEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "MediaPlayerEntityFeature.PAUSE" in caplog.text - caplog.clear() - assert entity.supported_features_compat is MediaPlayerEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text diff --git a/tests/components/media_source/test_local_source.py b/tests/components/media_source/test_local_source.py index de90f229a85..d3ae95736a5 100644 --- a/tests/components/media_source/test_local_source.py +++ b/tests/components/media_source/test_local_source.py @@ -11,8 +11,8 @@ import pytest from homeassistant.components import media_source, websocket_api from homeassistant.components.media_source import const -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockUser diff --git a/tests/components/melcloud/snapshots/test_diagnostics.ambr b/tests/components/melcloud/snapshots/test_diagnostics.ambr index 7b0173c240e..671f5afcc52 100644 --- a/tests/components/melcloud/snapshots/test_diagnostics.ambr +++ b/tests/components/melcloud/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'melcloud', 'entry_id': 'TEST_ENTRY_ID', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'melcloud', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/melcloud/test_config_flow.py b/tests/components/melcloud/test_config_flow.py index c1c6c10ac4c..3f6e42ac264 100644 --- a/tests/components/melcloud/test_config_flow.py +++ b/tests/components/melcloud/test_config_flow.py @@ -9,7 +9,6 @@ import pytest from homeassistant import config_entries from homeassistant.components.melcloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -166,15 +165,7 @@ async def test_token_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -212,15 +203,7 @@ async def test_form_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", @@ -270,15 +253,7 @@ async def test_client_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", @@ -328,15 +303,7 @@ async def test_reconfigure_flow( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM @@ -395,15 +362,7 @@ async def test_form_errors_reconfigure( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reconfigure_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", diff --git a/tests/components/meraki/test_device_tracker.py b/tests/components/meraki/test_device_tracker.py index c3126f7b76a..139396a0689 100644 --- a/tests/components/meraki/test_device_tracker.py +++ b/tests/components/meraki/test_device_tracker.py @@ -142,12 +142,8 @@ async def test_data_will_be_saved( req = await meraki_client.post(URL, data=json.dumps(data)) assert req.status == HTTPStatus.OK await hass.async_block_till_done() - state_name = hass.states.get( - "{}.{}".format("device_tracker", "00_26_ab_b8_a9_a4") - ).state + state_name = hass.states.get("device_tracker.00_26_ab_b8_a9_a4").state assert state_name == "home" - state_name = hass.states.get( - "{}.{}".format("device_tracker", "00_26_ab_b8_a9_a5") - ).state + state_name = hass.states.get("device_tracker.00_26_ab_b8_a9_a5").state assert state_name == "home" diff --git a/tests/components/met/conftest.py b/tests/components/met/conftest.py index 699c1c81795..92b81d3d320 100644 --- a/tests/components/met/conftest.py +++ b/tests/components/met/conftest.py @@ -17,8 +17,9 @@ def mock_weather(): "pressure": 100, "humidity": 50, "wind_speed": 10, - "wind_bearing": "NE", + "wind_bearing": 90, "dew_point": 12.1, + "uv_index": 1.1, } mock_data.get_forecast.return_value = {} yield mock_data diff --git a/tests/components/met/test_config_flow.py b/tests/components/met/test_config_flow.py index c7f0311edef..1a2485615d7 100644 --- a/tests/components/met/test_config_flow.py +++ b/tests/components/met/test_config_flow.py @@ -8,9 +8,9 @@ import pytest from homeassistant import config_entries from homeassistant.components.met.const import DOMAIN, HOME_LOCATION_NAME -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from . import init_integration diff --git a/tests/components/met/test_init.py b/tests/components/met/test_init.py index b329e2ff01c..54f6930513b 100644 --- a/tests/components/met/test_init.py +++ b/tests/components/met/test_init.py @@ -7,9 +7,9 @@ from homeassistant.components.met.const import ( DEFAULT_HOME_LONGITUDE, DOMAIN, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr from . import init_integration diff --git a/tests/components/met/test_weather.py b/tests/components/met/test_weather.py index 80820ef0186..ac3904684e3 100644 --- a/tests/components/met/test_weather.py +++ b/tests/components/met/test_weather.py @@ -2,10 +2,22 @@ from homeassistant import config_entries from homeassistant.components.met import DOMAIN -from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN +from homeassistant.components.weather import ( + ATTR_CONDITION_CLOUDY, + ATTR_WEATHER_DEW_POINT, + ATTR_WEATHER_HUMIDITY, + ATTR_WEATHER_PRESSURE, + ATTR_WEATHER_TEMPERATURE, + ATTR_WEATHER_UV_INDEX, + ATTR_WEATHER_WIND_BEARING, + ATTR_WEATHER_WIND_SPEED, + DOMAIN as WEATHER_DOMAIN, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import init_integration + async def test_new_config_entry( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather @@ -36,6 +48,25 @@ async def test_legacy_config_entry( assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1 +async def test_weather(hass: HomeAssistant, mock_weather) -> None: + """Test states of the weather.""" + + await init_integration(hass) + assert len(hass.states.async_entity_ids("weather")) == 1 + entity_id = hass.states.async_entity_ids("weather")[0] + + state = hass.states.get(entity_id) + assert state + assert state.state == ATTR_CONDITION_CLOUDY + assert state.attributes[ATTR_WEATHER_TEMPERATURE] == 15 + assert state.attributes[ATTR_WEATHER_PRESSURE] == 100 + assert state.attributes[ATTR_WEATHER_HUMIDITY] == 50 + assert state.attributes[ATTR_WEATHER_WIND_SPEED] == 10 + assert state.attributes[ATTR_WEATHER_WIND_BEARING] == 90 + assert state.attributes[ATTR_WEATHER_DEW_POINT] == 12.1 + assert state.attributes[ATTR_WEATHER_UV_INDEX] == 1.1 + + async def test_tracking_home(hass: HomeAssistant, mock_weather) -> None: """Test we track home.""" await hass.config_entries.flow.async_init("met", context={"source": "onboarding"}) diff --git a/tests/components/microbees/test_config_flow.py b/tests/components/microbees/test_config_flow.py index d168dcd5017..f4e074d000d 100644 --- a/tests/components/microbees/test_config_flow.py +++ b/tests/components/microbees/test_config_flow.py @@ -6,7 +6,7 @@ from microBeesPy import MicroBeesException import pytest from homeassistant.components.microbees.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -144,14 +144,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -205,14 +198,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, config_entry) microbees.return_value.getMyProfile.return_value.id = 12345 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index 0f11501843e..e10ec589113 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -10,8 +10,8 @@ import pytest from homeassistant.components import tts from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID from homeassistant.components.microsoft.tts import SUPPORTED_LANGUAGES -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import ServiceNotFound from homeassistant.setup import async_setup_component diff --git a/tests/components/mikrotik/test_config_flow.py b/tests/components/mikrotik/test_config_flow.py index f34fde0c9a5..f65c7f0dfc5 100644 --- a/tests/components/mikrotik/test_config_flow.py +++ b/tests/components/mikrotik/test_config_flow.py @@ -14,6 +14,7 @@ from homeassistant.components.mikrotik.const import ( ) from homeassistant.const import ( CONF_HOST, + CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, @@ -175,18 +176,14 @@ async def test_reauth_success(hass: HomeAssistant, api) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {CONF_USERNAME: "username"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "username", + } result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -207,14 +204,7 @@ async def test_reauth_failed(hass: HomeAssistant, auth_error) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -240,14 +230,7 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant, conn_error) -> None ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/mobile_app/test_device_tracker.py b/tests/components/mobile_app/test_device_tracker.py index d1cbc21c36b..92a956ab629 100644 --- a/tests/components/mobile_app/test_device_tracker.py +++ b/tests/components/mobile_app/test_device_tracker.py @@ -15,7 +15,7 @@ async def test_sending_location( ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": { @@ -48,7 +48,7 @@ async def test_sending_location( assert state.attributes["vertical_accuracy"] == 80 resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": { @@ -87,7 +87,7 @@ async def test_restoring_location( ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": { diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index e1c7ed27cf9..a4edbea6ecf 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -226,3 +226,37 @@ async def test_delete_cloud_hook( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist + + +async def test_remove_entry_on_user_remove( + hass: HomeAssistant, + hass_admin_user: MockUser, +) -> None: + """Test removing related config entry, when a user gets removed from HA.""" + + config_entry = MockConfigEntry( + data={ + **REGISTER_CLEARTEXT, + CONF_WEBHOOK_ID: "test-webhook-id", + ATTR_DEVICE_NAME: "Test", + ATTR_DEVICE_ID: "Test", + CONF_USER_ID: hass_admin_user.id, + CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", + }, + domain=DOMAIN, + title="Test", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + await hass.auth.async_remove_user(hass_admin_user) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 0 diff --git a/tests/components/mobile_app/test_sensor.py b/tests/components/mobile_app/test_sensor.py index 6411274fc4e..fb124797523 100644 --- a/tests/components/mobile_app/test_sensor.py +++ b/tests/components/mobile_app/test_sensor.py @@ -622,3 +622,78 @@ async def test_updating_disabled_sensor( json = await update_resp.json() assert json["battery_state"]["success"] is True assert json["battery_state"]["is_disabled"] is True + + +async def test_recreate_correct_from_entity_registry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, +) -> None: + """Test that sensors can be re-created from entity registry.""" + webhook_id = create_registrations[1]["webhook_id"] + webhook_url = f"/api/webhook/{webhook_id}" + + reg_resp = await webhook_client.post( + webhook_url, + json={ + "type": "register_sensor", + "data": { + "device_class": "battery", + "icon": "mdi:battery", + "name": "Battery State", + "state": 100, + "type": "sensor", + "unique_id": "battery_state", + "unit_of_measurement": PERCENTAGE, + "state_class": "measurement", + }, + }, + ) + + assert reg_resp.status == HTTPStatus.CREATED + + update_resp = await webhook_client.post( + webhook_url, + json={ + "type": "update_sensor_states", + "data": [ + { + "icon": "mdi:battery-unknown", + "state": 123, + "type": "sensor", + "unique_id": "battery_state", + }, + ], + }, + ) + + assert update_resp.status == HTTPStatus.OK + + entity = hass.states.get("sensor.test_1_battery_state") + + assert entity is not None + entity_entry = entity_registry.async_get("sensor.test_1_battery_state") + assert entity_entry is not None + + assert entity_entry.capabilities == { + "state_class": "measurement", + } + + entry = hass.config_entries.async_entries("mobile_app")[1] + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("sensor.test_1_battery_state").state == STATE_UNAVAILABLE + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_entry = entity_registry.async_get("sensor.test_1_battery_state") + assert entity_entry is not None + assert hass.states.get("sensor.test_1_battery_state") is not None + + assert entity_entry.capabilities == { + "state_class": "measurement", + } diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index 61e342a45ce..dda5f369ad5 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -101,7 +101,7 @@ async def test_webhook_handle_render_template( ) -> None: """Test that we render templates properly.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "render_template", "data": { @@ -133,7 +133,7 @@ async def test_webhook_handle_call_services( calls = async_mock_service(hass, "test", "mobile_app") resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json=CALL_SERVICE, ) @@ -158,7 +158,7 @@ async def test_webhook_handle_fire_event( hass.bus.async_listen("test_event", store_event) resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json=FIRE_EVENT + f"/api/webhook/{create_registrations[1]['webhook_id']}", json=FIRE_EVENT ) assert resp.status == HTTPStatus.OK @@ -224,7 +224,7 @@ async def test_webhook_handle_get_zones( await hass.services.async_call(ZONE_DOMAIN, "reload", blocking=True) resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={"type": "get_zones"}, ) @@ -317,7 +317,7 @@ async def test_webhook_returns_error_incorrect_json( ) -> None: """Test that an error is returned when JSON is invalid.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), data="not json" + f"/api/webhook/{create_registrations[1]['webhook_id']}", data="not json" ) assert resp.status == HTTPStatus.BAD_REQUEST @@ -350,7 +350,7 @@ async def test_webhook_handle_decryption( container = {"type": msg["type"], "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -374,7 +374,7 @@ async def test_webhook_handle_decryption_legacy( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -399,7 +399,7 @@ async def test_webhook_handle_decryption_fail( data = encrypt_payload(key, RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -412,7 +412,7 @@ async def test_webhook_handle_decryption_fail( data = encrypt_payload(key, "{not_valid", encode_json=False) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -424,7 +424,7 @@ async def test_webhook_handle_decryption_fail( data = encrypt_payload(key[::-1], RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -444,7 +444,7 @@ async def test_webhook_handle_decryption_legacy_fail( data = encrypt_payload_legacy(key, RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -457,7 +457,7 @@ async def test_webhook_handle_decryption_legacy_fail( data = encrypt_payload_legacy(key, "{not_valid", encode_json=False) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -469,7 +469,7 @@ async def test_webhook_handle_decryption_legacy_fail( data = encrypt_payload_legacy(key[::-1], RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -490,7 +490,7 @@ async def test_webhook_handle_decryption_legacy_upgrade( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -508,7 +508,7 @@ async def test_webhook_handle_decryption_legacy_upgrade( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -526,7 +526,7 @@ async def test_webhook_handle_decryption_legacy_upgrade( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container ) assert resp.status == HTTPStatus.OK @@ -539,7 +539,7 @@ async def test_webhook_requires_encryption( ) -> None: """Test that encrypted registrations only accept encrypted data.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), + f"/api/webhook/{create_registrations[0]['webhook_id']}", json=RENDER_TEMPLATE, ) @@ -560,7 +560,7 @@ async def test_webhook_update_location_without_locations( # start off with a location set by name resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"location_name": STATE_HOME}, @@ -575,7 +575,7 @@ async def test_webhook_update_location_without_locations( # set location to an 'unknown' state resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"altitude": 123}, @@ -597,7 +597,7 @@ async def test_webhook_update_location_with_gps( ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"gps": [1, 2], "gps_accuracy": 10, "altitude": -10}, @@ -621,7 +621,7 @@ async def test_webhook_update_location_with_gps_without_accuracy( ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"gps": [1, 2]}, @@ -659,7 +659,7 @@ async def test_webhook_update_location_with_location_name( await hass.services.async_call(ZONE_DOMAIN, "reload", blocking=True) resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"location_name": "zone_name"}, @@ -672,7 +672,7 @@ async def test_webhook_update_location_with_location_name( assert state.state == "zone_name" resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"location_name": STATE_HOME}, @@ -685,7 +685,7 @@ async def test_webhook_update_location_with_location_name( assert state.state == STATE_HOME resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "update_location", "data": {"location_name": STATE_NOT_HOME}, @@ -876,7 +876,7 @@ async def test_webhook_handle_scan_tag( events = async_capture_events(hass, EVENT_TAG_SCANNED) resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={"type": "scan_tag", "data": {"tag_id": "mock-tag-id"}}, ) @@ -1052,7 +1052,7 @@ async def test_webhook_handle_conversation_process( return_value=mock_conversation_agent, ): resp = await webhook_client.post( - "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), + f"/api/webhook/{create_registrations[1]['webhook_id']}", json={ "type": "conversation_process", "data": { diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 5c612f9f8ad..cdea046ceea 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -57,7 +57,7 @@ def check_config_loaded_fixture(): @pytest.fixture(name="register_words") def register_words_fixture(): """Set default for register_words.""" - return [0x00, 0x00] + return [0x00] @pytest.fixture(name="config_addon") diff --git a/tests/components/modbus/test_binary_sensor.py b/tests/components/modbus/test_binary_sensor.py index 6aae0e7feae..24293377174 100644 --- a/tests/components/modbus/test_binary_sensor.py +++ b/tests/components/modbus/test_binary_sensor.py @@ -3,6 +3,7 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -15,10 +16,12 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_BINARY_SENSORS, CONF_DEVICE_CLASS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, CONF_UNIQUE_ID, @@ -26,7 +29,7 @@ from homeassistant.const import ( STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -212,14 +215,20 @@ async def test_service_binary_sensor_update( """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -428,7 +437,7 @@ async def test_no_discovery_info_binary_sensor( assert await async_setup_component( hass, SENSOR_DOMAIN, - {SENSOR_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {SENSOR_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SENSOR_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index 5578234ee6e..d34846639b5 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -20,6 +20,10 @@ from homeassistant.components.climate import ( FAN_OFF, FAN_ON, FAN_TOP, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_SWING_MODE, + SERVICE_SET_TEMPERATURE, SWING_BOTH, SWING_HORIZONTAL, SWING_OFF, @@ -27,6 +31,7 @@ from homeassistant.components.climate import ( SWING_VERTICAL, HVACMode, ) +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CONF_CLIMATES, CONF_DATA_TYPE, @@ -66,15 +71,17 @@ from homeassistant.components.modbus.const import ( DataType, ) from homeassistant.const import ( + ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_ADDRESS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -152,13 +159,13 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") CONF_HVAC_MODE_REGISTER: { CONF_ADDRESS: 11, CONF_HVAC_MODE_VALUES: { - "state_off": 0, - "state_heat": 1, - "state_cool": 2, - "state_heat_cool": 3, - "state_dry": 4, - "state_fan_only": 5, - "state_auto": 6, + CONF_HVAC_MODE_OFF: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_COOL: 2, + CONF_HVAC_MODE_HEAT_COOL: 3, + CONF_HVAC_MODE_DRY: 4, + CONF_HVAC_MODE_FAN_ONLY: 5, + CONF_HVAC_MODE_AUTO: 6, }, }, } @@ -176,13 +183,13 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") CONF_ADDRESS: 11, CONF_WRITE_REGISTERS: True, CONF_HVAC_MODE_VALUES: { - "state_off": 0, - "state_heat": 1, - "state_cool": 2, - "state_heat_cool": 3, - "state_dry": 4, - "state_fan_only": 5, - "state_auto": 6, + CONF_HVAC_MODE_OFF: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_COOL: 2, + CONF_HVAC_MODE_HEAT_COOL: 3, + CONF_HVAC_MODE_DRY: 4, + CONF_HVAC_MODE_FAN_ONLY: 5, + CONF_HVAC_MODE_AUTO: 6, }, }, } @@ -501,7 +508,10 @@ async def test_service_climate_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == result @@ -616,7 +626,10 @@ async def test_service_climate_fan_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).attributes[ATTR_FAN_MODE] == result @@ -756,7 +769,10 @@ async def test_service_climate_swing_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).attributes[ATTR_SWING_MODE] == result @@ -850,9 +866,9 @@ async def test_service_climate_set_temperature( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - "set_temperature", + SERVICE_SET_TEMPERATURE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature, }, blocking=True, @@ -961,9 +977,9 @@ async def test_service_set_hvac_mode( await hass.services.async_call( CLIMATE_DOMAIN, - "set_hvac_mode", + SERVICE_SET_HVAC_MODE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode, }, blocking=True, @@ -1024,9 +1040,9 @@ async def test_service_set_fan_mode( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - "set_fan_mode", + SERVICE_SET_FAN_MODE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode, }, blocking=True, @@ -1087,9 +1103,9 @@ async def test_service_set_swing_mode( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - "set_swing_mode", + SERVICE_SET_SWING_MODE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode, }, blocking=True, @@ -1174,7 +1190,7 @@ async def test_no_discovery_info_climate( assert await async_setup_component( hass, CLIMATE_DOMAIN, - {CLIMATE_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {CLIMATE_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert CLIMATE_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_cover.py b/tests/components/modbus/test_cover.py index 0860b3136ba..ae709f483e1 100644 --- a/tests/components/modbus/test_cover.py +++ b/tests/components/modbus/test_cover.py @@ -3,7 +3,8 @@ from pymodbus.exceptions import ModbusException import pytest -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_REGISTER_HOLDING, @@ -18,18 +19,18 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COVERS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -99,23 +100,23 @@ async def test_config_cover(hass: HomeAssistant, mock_modbus) -> None: [ ( [0x00], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0x80], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0xFE], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0xFF], - STATE_OPEN, + CoverState.OPEN, ), ( [0x01], - STATE_OPEN, + CoverState.OPEN, ), ], ) @@ -143,23 +144,23 @@ async def test_coil_cover(hass: HomeAssistant, expected, mock_do_cycle) -> None: [ ( [0x00], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0x80], - STATE_OPEN, + CoverState.OPEN, ), ( [0xFE], - STATE_OPEN, + CoverState.OPEN, ), ( [0xFF], - STATE_OPEN, + CoverState.OPEN, ), ( [0x01], - STATE_OPEN, + CoverState.OPEN, ), ], ) @@ -185,23 +186,29 @@ async def test_register_cover(hass: HomeAssistant, expected, mock_do_cycle) -> N async def test_service_cover_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + "update_entity", + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) - assert hass.states.get(ENTITY_ID).state == STATE_CLOSED + assert hass.states.get(ENTITY_ID).state == CoverState.CLOSED mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) - assert hass.states.get(ENTITY_ID).state == STATE_OPEN + assert hass.states.get(ENTITY_ID).state == CoverState.OPEN @pytest.mark.parametrize( "mock_test_state", [ - (State(ENTITY_ID, STATE_CLOSED),), - (State(ENTITY_ID, STATE_CLOSING),), - (State(ENTITY_ID, STATE_OPENING),), - (State(ENTITY_ID, STATE_OPEN),), + (State(ENTITY_ID, CoverState.CLOSED),), + (State(ENTITY_ID, CoverState.CLOSING),), + (State(ENTITY_ID, CoverState.OPENING),), + (State(ENTITY_ID, CoverState.OPEN),), ], indirect=True, ) @@ -260,27 +267,27 @@ async def test_service_cover_move(hass: HomeAssistant, mock_modbus_ha) -> None: mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x01]) await hass.services.async_call( - "cover", "open_cover", {"entity_id": ENTITY_ID}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == STATE_OPEN + assert hass.states.get(ENTITY_ID).state == CoverState.OPEN mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "cover", "close_cover", {"entity_id": ENTITY_ID}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == STATE_CLOSED + assert hass.states.get(ENTITY_ID).state == CoverState.CLOSED await mock_modbus_ha.reset() mock_modbus_ha.read_holding_registers.side_effect = ModbusException("fail write_") await hass.services.async_call( - "cover", "close_cover", {"entity_id": ENTITY_ID}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True ) assert mock_modbus_ha.read_holding_registers.called assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE mock_modbus_ha.read_coils.side_effect = ModbusException("fail write_") await hass.services.async_call( - "cover", "close_cover", {"entity_id": ENTITY_ID2}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID2}, blocking=True ) assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE @@ -293,7 +300,7 @@ async def test_no_discovery_info_cover( assert await async_setup_component( hass, COVER_DOMAIN, - {COVER_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {COVER_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert COVER_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_fan.py b/tests/components/modbus/test_fan.py index d52b9dc309a..2afc6314048 100644 --- a/tests/components/modbus/test_fan.py +++ b/tests/components/modbus/test_fan.py @@ -4,6 +4,7 @@ from pymodbus.exceptions import ModbusException import pytest from homeassistant.components.fan import DOMAIN as FAN_DOMAIN +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -19,17 +20,21 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -269,12 +274,12 @@ async def test_fan_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - "fan", "turn_on", service_data={"entity_id": ENTITY_ID} + FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - "fan", "turn_off", service_data={"entity_id": ENTITY_ID} + FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -282,26 +287,26 @@ async def test_fan_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - "fan", "turn_on", service_data={"entity_id": ENTITY_ID2} + FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "fan", "turn_off", service_data={"entity_id": ENTITY_ID2} + FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - "fan", "turn_on", service_data={"entity_id": ENTITY_ID2} + FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE mock_modbus.write_coil.side_effect = ModbusException("fail write_") await hass.services.async_call( - "fan", "turn_off", service_data={"entity_id": ENTITY_ID} + FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE @@ -325,12 +330,18 @@ async def test_fan_service_turn( async def test_service_fan_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -343,7 +354,7 @@ async def test_no_discovery_info_fan( assert await async_setup_component( hass, FAN_DOMAIN, - {FAN_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {FAN_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert FAN_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index d4dc5b05fac..0cfa7ba8b24 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -19,7 +19,7 @@ from unittest import mock from freezegun.api import FrozenDateTimeFactory from pymodbus.exceptions import ModbusException -from pymodbus.pdu import ExceptionResponse, IllegalFunctionRequest +from pymodbus.pdu import ExceptionResponse import pytest import voluptuous as vol @@ -52,7 +52,6 @@ from homeassistant.components.modbus.const import ( CONF_INPUT_TYPE, CONF_MSG_WAIT, CONF_PARITY, - CONF_RETRIES, CONF_SLAVE_COUNT, CONF_STOPBITS, CONF_SWAP, @@ -68,7 +67,6 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN as DOMAIN, RTUOVERTCP, SERIAL, - SERVICE_RESTART, SERVICE_STOP, SERVICE_WRITE_COIL, SERVICE_WRITE_REGISTER, @@ -573,18 +571,6 @@ async def test_no_duplicate_names(hass: HomeAssistant, do_config) -> None: } ], }, - { - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_RETRIES: 3, - CONF_SENSORS: [ - { - CONF_NAME: "dummy", - CONF_ADDRESS: 9999, - } - ], - }, { CONF_TYPE: TCP, CONF_HOST: TEST_MODBUS_HOST, @@ -834,7 +820,6 @@ SERVICE = "service" [ {VALUE: ReadResult([0x0001]), DATA: ""}, {VALUE: ExceptionResponse(0x06), DATA: "Pymodbus:"}, - {VALUE: IllegalFunctionRequest(0x06), DATA: "Pymodbus:"}, {VALUE: ModbusException("fail write_"), DATA: "Pymodbus:"}, ], ) @@ -942,7 +927,6 @@ async def mock_modbus_read_pymodbus_fixture( ("do_return", "do_exception", "do_expect_state", "do_expect_value"), [ (ReadResult([1]), None, STATE_ON, "1"), - (IllegalFunctionRequest(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE), (ExceptionResponse(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE), ( ReadResult([1]), @@ -1149,61 +1133,6 @@ async def test_shutdown( assert caplog.text == "" -@pytest.mark.parametrize( - "do_config", - [ - { - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 51, - CONF_SLAVE: 0, - } - ] - }, - ], -) -async def test_stop_restart( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus -) -> None: - """Run test for service stop.""" - - caplog.set_level(logging.INFO) - entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") - assert hass.states.get(entity_id).state in (STATE_UNKNOWN, STATE_UNAVAILABLE) - hass.states.async_set(entity_id, 17) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "17" - - mock_modbus.reset_mock() - caplog.clear() - data = { - ATTR_HUB: TEST_MODBUS_NAME, - } - await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - assert mock_modbus.close.called - assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text - - mock_modbus.reset_mock() - caplog.clear() - await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) - await hass.async_block_till_done() - assert not mock_modbus.close.called - assert mock_modbus.connect.called - assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text - - mock_modbus.reset_mock() - caplog.clear() - await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) - await hass.async_block_till_done() - assert mock_modbus.close.called - assert mock_modbus.connect.called - assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text - assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text - - @pytest.mark.parametrize("do_config", [{}]) async def test_write_no_client(hass: HomeAssistant, mock_modbus) -> None: """Run test for service stop and write without client.""" @@ -1234,7 +1163,7 @@ async def test_integration_reload( ) -> None: """Run test for integration reload.""" - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) caplog.clear() yaml_path = get_fixture_path("configuration.yaml", "modbus") @@ -1253,7 +1182,7 @@ async def test_integration_reload_failed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus ) -> None: """Run test for integration connect failure on reload.""" - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) caplog.clear() yaml_path = get_fixture_path("configuration.yaml", "modbus") diff --git a/tests/components/modbus/test_light.py b/tests/components/modbus/test_light.py index e74da085180..745249ff866 100644 --- a/tests/components/modbus/test_light.py +++ b/tests/components/modbus/test_light.py @@ -3,6 +3,7 @@ from pymodbus.exceptions import ModbusException import pytest +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, @@ -18,18 +19,22 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_LIGHTS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -269,12 +274,12 @@ async def test_light_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": ENTITY_ID} + LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - "light", "turn_off", service_data={"entity_id": ENTITY_ID} + LIGHT_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -282,20 +287,20 @@ async def test_light_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": ENTITY_ID2} + LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "light", "turn_off", service_data={"entity_id": ENTITY_ID2} + LIGHT_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": ENTITY_ID2} + LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE @@ -319,12 +324,18 @@ async def test_light_service_turn( async def test_service_light_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -337,7 +348,7 @@ async def test_no_discovery_info_light( assert await async_setup_component( hass, LIGHT_DOMAIN, - {LIGHT_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {LIGHT_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert LIGHT_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index 87015fa634c..fc63a300c5c 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -4,13 +4,13 @@ import struct import pytest +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT, CONF_DATA_TYPE, CONF_DEVICE_ADDRESS, CONF_INPUT_TYPE, - CONF_LAZY_ERROR, CONF_MAX_VALUE, CONF_MIN_VALUE, CONF_NAN_VALUE, @@ -32,11 +32,13 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COUNT, CONF_DEVICE_CLASS, CONF_NAME, CONF_OFFSET, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SENSORS, CONF_SLAVE, @@ -45,7 +47,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -166,17 +168,6 @@ SLAVE_UNIQUE_ID = "ground_floor_sensor" } ] }, - { - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 51, - CONF_DATA_TYPE: DataType.INT32, - CONF_VIRTUAL_COUNT: 5, - CONF_LAZY_ERROR: 3, - } - ] - }, { CONF_SENSORS: [ { @@ -1395,12 +1386,18 @@ async def test_service_sensor_update(hass: HomeAssistant, mock_modbus_ha) -> Non """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_input_registers.return_value = ReadResult([27]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == "27" mock_modbus_ha.read_input_registers.return_value = ReadResult([32]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == "32" @@ -1413,7 +1410,7 @@ async def test_no_discovery_info_sensor( assert await async_setup_component( hass, SENSOR_DOMAIN, - {SENSOR_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {SENSOR_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SENSOR_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_switch.py b/tests/components/modbus/test_switch.py index bdb95c667c7..4e0ad0841ea 100644 --- a/tests/components/modbus/test_switch.py +++ b/tests/components/modbus/test_switch.py @@ -6,6 +6,7 @@ from unittest import mock from pymodbus.exceptions import ModbusException import pytest +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -21,20 +22,24 @@ from homeassistant.components.modbus.const import ( ) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_DELAY, CONF_DEVICE_CLASS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, CONF_SWITCHES, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -44,6 +49,7 @@ from tests.common import async_fire_time_changed ENTITY_ID = f"{SWITCH_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") ENTITY_ID2 = f"{ENTITY_ID}_2" +ENTITY_ID3 = f"{ENTITY_ID}_3" @pytest.mark.parametrize( @@ -74,7 +80,7 @@ ENTITY_ID2 = f"{ENTITY_ID}_2" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, CONF_ADDRESS: 1235, @@ -92,7 +98,7 @@ ENTITY_ID2 = f"{ENTITY_ID}_2" CONF_DEVICE_ADDRESS: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, CONF_ADDRESS: 1235, @@ -110,7 +116,7 @@ ENTITY_ID2 = f"{ENTITY_ID}_2" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_INPUT, CONF_ADDRESS: 1235, @@ -129,7 +135,7 @@ ENTITY_ID2 = f"{ENTITY_ID}_2" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_DISCRETE, CONF_ADDRESS: 1235, @@ -147,12 +153,48 @@ ENTITY_ID2 = f"{ENTITY_ID}_2" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_SCAN_INTERVAL: 0, CONF_VERIFY: None, } ] }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1234, + CONF_DEVICE_ADDRESS: 10, + CONF_COMMAND_OFF: 0x00, + CONF_COMMAND_ON: 0x01, + CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_VERIFY: { + CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, + CONF_ADDRESS: 1235, + CONF_STATE_OFF: 0, + CONF_STATE_ON: [1, 2, 3], + }, + } + ] + }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1236, + CONF_DEVICE_ADDRESS: 10, + CONF_COMMAND_OFF: 0x00, + CONF_COMMAND_ON: 0x01, + CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_VERIFY: { + CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, + CONF_ADDRESS: 1235, + CONF_STATE_OFF: [0, 5, 6], + CONF_STATE_ON: 1, + }, + } + ] + }, ], ) async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None: @@ -218,6 +260,18 @@ async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None: None, STATE_OFF, ), + ( + [0x03], + False, + {CONF_VERIFY: {CONF_STATE_ON: [1, 3]}}, + STATE_ON, + ), + ( + [0x04], + False, + {CONF_VERIFY: {CONF_STATE_OFF: [0, 4]}}, + STATE_OFF, + ), ], ) async def test_all_switch(hass: HomeAssistant, mock_do_cycle, expected) -> None: @@ -269,6 +323,13 @@ async def test_restore_state_switch( CONF_SCAN_INTERVAL: 0, CONF_VERIFY: {}, }, + { + CONF_NAME: f"{TEST_ENTITY_NAME} 3", + CONF_ADDRESS: 18, + CONF_WRITE_TYPE: CALL_TYPE_REGISTER_HOLDING, + CONF_SCAN_INTERVAL: 0, + CONF_VERIFY: {CONF_STATE_ON: [1, 3]}, + }, ], }, ], @@ -283,12 +344,12 @@ async def test_switch_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - "switch", "turn_off", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -296,29 +357,48 @@ async def test_switch_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID2} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "switch", "turn_off", service_data={"entity_id": ENTITY_ID2} + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF + mock_modbus.read_holding_registers.return_value = ReadResult([0x03]) + assert hass.states.get(ENTITY_ID3).state == STATE_OFF + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID3} + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID3).state == STATE_ON + mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID3} + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID3).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID2} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE mock_modbus.write_coil.side_effect = ModbusException("fail write_") await hass.services.async_call( - "switch", "turn_off", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE + mock_modbus.write_register.side_effect = ModbusException("fail write_") + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID3} + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID3).state == STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -334,17 +414,43 @@ async def test_switch_service_turn( } ] }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1236, + CONF_WRITE_TYPE: CALL_TYPE_COIL, + CONF_VERIFY: {CONF_STATE_ON: [1, 3]}, + } + ] + }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1235, + CONF_WRITE_TYPE: CALL_TYPE_COIL, + CONF_VERIFY: {CONF_STATE_OFF: [0, 5]}, + } + ] + }, ], ) async def test_service_switch_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -372,7 +478,7 @@ async def test_delay_switch(hass: HomeAssistant, mock_modbus) -> None: mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) now = dt_util.utcnow() await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -391,7 +497,7 @@ async def test_no_discovery_info_switch( assert await async_setup_component( hass, SWITCH_DOMAIN, - {SWITCH_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {SWITCH_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SWITCH_DOMAIN in hass.config.components diff --git a/tests/components/modern_forms/__init__.py b/tests/components/modern_forms/__init__.py index ae4e5bd9862..5882eaf1ec9 100644 --- a/tests/components/modern_forms/__init__.py +++ b/tests/components/modern_forms/__init__.py @@ -62,7 +62,9 @@ async def init_integration( ) entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "192.168.1.123", CONF_MAC: "AA:BB:CC:DD:EE:FF"} + domain=DOMAIN, + data={CONF_HOST: "192.168.1.123", CONF_MAC: "AA:BB:CC:DD:EE:FF"}, + unique_id="AA:BB:CC:DD:EE:FF", ) entry.add_to_hass(hass) diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..1b4090ca5a4 --- /dev/null +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -0,0 +1,54 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '192.168.1.123', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'modern_forms', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': 'AA:BB:CC:DD:EE:FF', + 'version': 1, + }), + 'device': dict({ + 'info': dict({ + 'client_id': 'MF_000000000000', + 'device_name': 'ModernFormsFan', + 'fan_motor_type': 'DC125X25', + 'fan_type': '1818-56', + 'federated_identity': 'us-east-1:f3da237b-c19c-4f61-b387-0e6dde2e470b', + 'firmware_url': '', + 'firmware_version': '01.03.0025', + 'light_type': 'F6IN-120V-R1-30', + 'mac_address': '**REDACTED**', + 'main_mcu_firmware_version': '01.03.3008', + 'owner': '**REDACTED**', + 'product_sku': '', + 'production_lot_number': '', + }), + 'status': dict({ + 'adaptive_learning_enabled': False, + 'away_mode_enabled': False, + 'fan_direction': 'forward', + 'fan_on': True, + 'fan_sleep_timer': 0, + 'fan_speed': 3, + 'light_brightness': 50, + 'light_on': True, + 'light_sleep_timer': 0, + }), + }), + }) +# --- diff --git a/tests/components/modern_forms/test_config_flow.py b/tests/components/modern_forms/test_config_flow.py index 4c39f83f688..5b10d4d729e 100644 --- a/tests/components/modern_forms/test_config_flow.py +++ b/tests/components/modern_forms/test_config_flow.py @@ -84,10 +84,9 @@ async def test_full_zeroconf_flow_implementation( assert result.get("step_id") == "zeroconf_confirm" assert result.get("type") is FlowResultType.FORM - flow = flows[0] - assert "context" in flow - assert flow["context"][CONF_HOST] == "192.168.1.123" - assert flow["context"][CONF_NAME] == "example" + flow = hass.config_entries.flow._progress[flows[0]["flow_id"]] + assert flow.host == "192.168.1.123" + assert flow.name == "example" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} @@ -114,7 +113,11 @@ async def test_connection_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "example.com"}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "example.com"}, ) assert result.get("type") is FlowResultType.FORM @@ -194,24 +197,14 @@ async def test_user_device_exists_abort( await init_integration(hass, aioclient_mock, skip_setup=True) - await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - "host": "192.168.1.123", - "hostname": "example.local.", - "properties": {CONF_MAC: "AA:BB:CC:DD:EE:FF"}, - }, - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={ - "host": "192.168.1.123", - "hostname": "example.local.", - "properties": {CONF_MAC: "AA:BB:CC:DD:EE:FF"}, - }, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.123"}, ) assert result.get("type") is FlowResultType.ABORT @@ -224,16 +217,6 @@ async def test_zeroconf_with_mac_device_exists_abort( """Test we abort zeroconf flow if a Modern Forms device already configured.""" await init_integration(hass, aioclient_mock, skip_setup=True) - await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - "host": "192.168.1.123", - "hostname": "example.local.", - "properties": {CONF_MAC: "AA:BB:CC:DD:EE:FF"}, - }, - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, diff --git a/tests/components/modern_forms/test_diagnostics.py b/tests/components/modern_forms/test_diagnostics.py new file mode 100644 index 00000000000..9eb2e4efa94 --- /dev/null +++ b/tests/components/modern_forms/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests for the Modern Forms diagnostics platform.""" + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Modern Forms fans.""" + entry = await init_integration(hass, aioclient_mock) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert result == snapshot(exclude=props("created_at", "modified_at", "entry_id")) diff --git a/tests/components/mold_indicator/conftest.py b/tests/components/mold_indicator/conftest.py new file mode 100644 index 00000000000..11f07e1db35 --- /dev/null +++ b/tests/components/mold_indicator/conftest.py @@ -0,0 +1,90 @@ +"""Fixtures for the Mold indicator integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.mold_indicator.const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import ( + ATTR_UNIT_OF_MEASUREMENT, + CONF_NAME, + PERCENTAGE, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically path mold indicator.""" + with patch( + "homeassistant.components.mold_indicator.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Mold indicator integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + title=DEFAULT_NAME, + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + hass.states.async_set( + "sensor.indoor_temp", + "10", + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.outdoor_temp", + "10", + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.indoor_humidity", "0", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE} + ) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/mold_indicator/snapshots/test_config_flow.ambr b/tests/components/mold_indicator/snapshots/test_config_flow.ambr new file mode 100644 index 00000000000..a7986ad051e --- /dev/null +++ b/tests/components/mold_indicator/snapshots/test_config_flow.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_config_flow_preview_success[missing_calibration_factor] + dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'Mold Indicator', + 'state_class': 'measurement', + 'unit_of_measurement': '%', + }), + 'state': 'unavailable', + }) +# --- +# name: test_config_flow_preview_success[missing_humidity_entity] + dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'Mold Indicator', + 'state_class': 'measurement', + 'unit_of_measurement': '%', + }), + 'state': 'unavailable', + }) +# --- +# name: test_config_flow_preview_success[success] + dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'dewpoint': 12.01, + 'estimated_critical_temp': 19.5, + 'friendly_name': 'Mold Indicator', + 'state_class': 'measurement', + 'unit_of_measurement': '%', + }), + 'state': '61', + }) +# --- +# name: test_options_flow_preview + dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'dewpoint': 12.01, + 'estimated_critical_temp': 19.5, + 'friendly_name': 'Mold Indicator', + 'state_class': 'measurement', + 'unit_of_measurement': '%', + }), + 'state': '61', + }) +# --- diff --git a/tests/components/mold_indicator/test_config_flow.py b/tests/components/mold_indicator/test_config_flow.py new file mode 100644 index 00000000000..bb8362b5e0d --- /dev/null +++ b/tests/components/mold_indicator/test_config_flow.py @@ -0,0 +1,389 @@ +"""Test the Mold indicator config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant import config_entries +from homeassistant.components.mold_indicator.const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form for sensor.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 3.0, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 3.0, + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + # 3 input entities + resulting mold indicator sensor + assert len(hass.states.async_all()) == 4 + + state = hass.states.get("sensor.mold_indicator") + assert state is not None + + +async def test_calibration_factor_not_zero(hass: HomeAssistant) -> None: + """Test calibration factor is not zero.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 0.0, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "calibration_is_zero"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 1.0, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 1.0, + } + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + "user_input", + [ + ( + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + } + ), + ( + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + } + ), + ( + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + } + ), + ], + ids=("success", "missing_calibration_factor", "missing_humidity_entity"), +) +async def test_config_flow_preview_success( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + user_input: str, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow preview.""" + client = await hass_ws_client(hass) + + # add state for the tests + hass.states.async_set( + "sensor.indoor_temp", + 23, + {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.indoor_humidity", + 50, + {CONF_UNIT_OF_MEASUREMENT: "%"}, + ) + hass.states.async_set( + "sensor.outdoor_temp", + 16, + {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] is None + assert result["preview"] == "mold_indicator" + + await client.send_json_auto_id( + { + "type": "mold_indicator/start_preview", + "flow_id": result["flow_id"], + "flow_type": "config_flow", + "user_input": user_input, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + + msg = await client.receive_json() + assert msg["event"] == snapshot + assert len(hass.states.async_all()) == 3 + + +async def test_options_flow_preview( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the options flow preview.""" + client = await hass_ws_client(hass) + + # add state for the tests + hass.states.async_set( + "sensor.indoor_temp", + 23, + {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.indoor_humidity", + 50, + {CONF_UNIT_OF_MEASUREMENT: "%"}, + ) + hass.states.async_set( + "sensor.outdoor_temp", + 16, + {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + title="Test Sensor", + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + assert result["preview"] == "mold_indicator" + + await client.send_json_auto_id( + { + "type": "mold_indicator/start_preview", + "flow_id": result["flow_id"], + "flow_type": "options_flow", + "user_input": { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + + msg = await client.receive_json() + assert msg["event"] == snapshot + assert len(hass.states.async_all()) == 4 + + +async def test_options_flow_sensor_preview_config_entry_removed( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test the option flow preview where the config entry is removed.""" + client = await hass_ws_client(hass) + + hass.states.async_set( + "sensor.indoor_temp", + 23, + {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.indoor_humidity", + 50, + {CONF_UNIT_OF_MEASUREMENT: "%"}, + ) + hass.states.async_set( + "sensor.outdoor_temp", + 16, + {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + title="Test Sensor", + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + assert result["preview"] == "mold_indicator" + + await hass.config_entries.async_remove(config_entry.entry_id) + + await client.send_json_auto_id( + { + "type": "mold_indicator/start_preview", + "flow_id": result["flow_id"], + "flow_type": "options_flow", + "user_input": { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + } + ) + msg = await client.receive_json() + assert not msg["success"] + assert msg["error"] == { + "code": "home_assistant_error", + "message": "Config entry not found", + } diff --git a/tests/components/mold_indicator/test_init.py b/tests/components/mold_indicator/test_init.py new file mode 100644 index 00000000000..5fd6b11c8fe --- /dev/null +++ b/tests/components/mold_indicator/test_init.py @@ -0,0 +1,17 @@ +"""Test Mold indicator component setup process.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/mold_indicator/test_sensor.py b/tests/components/mold_indicator/test_sensor.py index 2de1d34b403..bb3f7c4fc93 100644 --- a/tests/components/mold_indicator/test_sensor.py +++ b/tests/components/mold_indicator/test_sensor.py @@ -16,6 +16,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + @pytest.fixture(autouse=True) def init_sensors_fixture(hass: HomeAssistant) -> None: @@ -52,6 +54,16 @@ async def test_setup(hass: HomeAssistant) -> None: assert moldind.attributes.get("unit_of_measurement") == PERCENTAGE +async def test_setup_from_config_entry( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test the mold indicator sensor setup from a config entry.""" + + moldind = hass.states.get("sensor.mold_indicator") + assert moldind + assert moldind.attributes.get("unit_of_measurement") == PERCENTAGE + + async def test_invalidcalib(hass: HomeAssistant) -> None: """Test invalid sensor values.""" hass.states.async_set( diff --git a/tests/components/monarch_money/__init__.py b/tests/components/monarch_money/__init__.py new file mode 100644 index 00000000000..f08addf2ec6 --- /dev/null +++ b/tests/components/monarch_money/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Monarch Money integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/monarch_money/conftest.py b/tests/components/monarch_money/conftest.py new file mode 100644 index 00000000000..7d6a965a009 --- /dev/null +++ b/tests/components/monarch_money/conftest.py @@ -0,0 +1,79 @@ +"""Common fixtures for the Monarch Money tests.""" + +from collections.abc import Generator +import json +from typing import Any +from unittest.mock import AsyncMock, PropertyMock, patch + +import pytest +from typedmonarchmoney.models import ( + MonarchAccount, + MonarchCashflowSummary, + MonarchSubscription, +) + +from homeassistant.components.monarch_money.const import DOMAIN +from homeassistant.const import CONF_TOKEN + +from tests.common import MockConfigEntry, load_fixture, load_json_object_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.monarch_money.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +async def mock_config_entry() -> MockConfigEntry: + """Fixture for mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_TOKEN: "fake_token_of_doom"}, + unique_id="222260252323873333", + version=1, + ) + + +@pytest.fixture +def mock_config_api() -> Generator[AsyncMock]: + """Mock the MonarchMoney class.""" + + account_json: dict[str, Any] = load_json_object_fixture("get_accounts.json", DOMAIN) + account_data = [MonarchAccount(data) for data in account_json["accounts"]] + account_data_dict: dict[str, MonarchAccount] = { + acc["id"]: MonarchAccount(acc) for acc in account_json["accounts"] + } + + cashflow_json: dict[str, Any] = json.loads( + load_fixture("get_cashflow_summary.json", DOMAIN) + ) + cashflow_summary = MonarchCashflowSummary(cashflow_json) + subscription_details = MonarchSubscription( + json.loads(load_fixture("get_subscription_details.json", DOMAIN)) + ) + + with ( + patch( + "homeassistant.components.monarch_money.config_flow.TypedMonarchMoney", + autospec=True, + ) as mock_class, + patch( + "homeassistant.components.monarch_money.TypedMonarchMoney", new=mock_class + ), + ): + instance = mock_class.return_value + type(instance).token = PropertyMock(return_value="mocked_token") + instance.login = AsyncMock(return_value=None) + instance.multi_factor_authenticate = AsyncMock(return_value=None) + instance.get_subscription_details = AsyncMock(return_value=subscription_details) + instance.get_accounts = AsyncMock(return_value=account_data) + instance.get_accounts_as_dict_with_id_key = AsyncMock( + return_value=account_data_dict + ) + instance.get_cashflow_summary = AsyncMock(return_value=cashflow_summary) + instance.get_subscription_details = AsyncMock(return_value=subscription_details) + yield mock_class diff --git a/tests/components/monarch_money/fixtures/get_accounts.json b/tests/components/monarch_money/fixtures/get_accounts.json new file mode 100644 index 00000000000..ddaecc1721b --- /dev/null +++ b/tests/components/monarch_money/fixtures/get_accounts.json @@ -0,0 +1,516 @@ +{ + "accounts": [ + { + "id": "900000000", + "displayName": "Brokerage", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": "0189", + "createdAt": "2021-10-15T01:32:33.809450+00:00", + "updatedAt": "2022-05-26T00:56:41.322045+00:00", + "displayLastUpdatedAt": "2022-05-26T00:56:41.321928+00:00", + "currentBalance": 1000.5, + "displayBalance": 1000.5, + "includeInNetWorth": true, + "hideFromList": true, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": false, + "includeInGoalBalance": false, + "dataProvider": "plaid", + "dataProviderAccountId": "testProviderAccountId", + "isManual": false, + "transactionsCount": 0, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 11, + "icon": "trending-up", + "logoUrl": "base64Nonce", + "type": { + "name": "brokerage", + "display": "Investments", + "__typename": "AccountType" + }, + "subtype": { + "name": "brokerage", + "display": "Brokerage", + "__typename": "AccountSubtype" + }, + "credential": { + "id": "900000001", + "updateRequired": false, + "disconnectedFromDataProviderAt": null, + "dataProvider": "PLAID", + "institution": { + "id": "700000000", + "plaidInstitutionId": "ins_0", + "name": "Rando Brokerage", + "status": "DEGRADED", + "logo": "base64Nonce", + "__typename": "Institution" + }, + "__typename": "Credential" + }, + "institution": { + "id": "700000000", + "name": "Rando Brokerage", + "logo": "base64Nonce", + "primaryColor": "#0075a3", + "url": "https://rando.brokerage/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "900000002", + "displayName": "Checking", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": "2602", + "createdAt": "2021-10-15T01:32:33.900521+00:00", + "updatedAt": "2024-02-17T11:21:05.228959+00:00", + "displayLastUpdatedAt": "2024-02-17T11:21:05.228721+00:00", + "currentBalance": 1000.02, + "displayBalance": 1000.02, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": true, + "includeInGoalBalance": true, + "dataProvider": "plaid", + "dataProviderAccountId": "testProviderAccountId", + "isManual": false, + "transactionsCount": 1403, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 0, + "icon": "dollar-sign", + "logoUrl": "data:image/png;base64,base64Nonce", + "type": { + "name": "depository", + "display": "Cash", + "__typename": "AccountType" + }, + "subtype": { + "name": "checking", + "display": "Checking", + "__typename": "AccountSubtype" + }, + "credential": { + "id": "900000003", + "updateRequired": false, + "disconnectedFromDataProviderAt": null, + "dataProvider": "PLAID", + "institution": { + "id": "7000000002", + "plaidInstitutionId": "ins_01", + "name": "Rando Bank", + "status": "DEGRADED", + "logo": "base64Nonce", + "__typename": "Institution" + }, + "__typename": "Credential" + }, + "institution": { + "id": "7000000005", + "name": "Rando Bank", + "logo": "base64Nonce", + "primaryColor": "#0075a3", + "url": "https://rando.bank/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + + { + "id": "121212192626186051", + "displayName": "2050 Toyota RAV8", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": null, + "createdAt": "2024-08-16T17:37:21.885036+00:00", + "updatedAt": "2024-08-16T17:37:21.885057+00:00", + "displayLastUpdatedAt": "2024-08-16T17:37:21.885057+00:00", + "currentBalance": 11075.58, + "displayBalance": 11075.58, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": true, + "includeInGoalBalance": false, + "dataProvider": "vin_audit", + "dataProviderAccountId": "1111111v5cw252004", + "isManual": false, + "transactionsCount": 0, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 0, + "logoUrl": "https://api.monarchmoney.com/cdn-cgi/image/width=128/images/institution/159427559853802644", + "type": { + "name": "vehicle", + "display": "Vehicles", + "__typename": "AccountType" + }, + "subtype": { + "name": "car", + "display": "Car", + "__typename": "AccountSubtype" + }, + "credential": null, + "institution": { + "id": "123456789853802644", + "name": "VinAudit", + "primaryColor": "#74ab16", + "url": "https://www.vinaudit.com/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "9000000007", + "displayName": "Credit Card", + "syncDisabled": true, + "deactivatedAt": null, + "isHidden": true, + "isAsset": false, + "mask": "3542", + "createdAt": "2021-10-15T01:33:46.646459+00:00", + "updatedAt": "2022-12-10T18:17:06.129456+00:00", + "displayLastUpdatedAt": "2022-10-15T08:34:34.815239+00:00", + "currentBalance": -200.0, + "displayBalance": -200.0, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": false, + "includeInGoalBalance": true, + "dataProvider": "finicity", + "dataProviderAccountId": "50001", + "isManual": false, + "transactionsCount": 1138, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 1, + "icon": "credit-card", + "logoUrl": "data:image/png;base64,base64Nonce", + "type": { + "name": "credit", + "display": "Credit Cards", + "__typename": "AccountType" + }, + "subtype": { + "name": "credit_card", + "display": "Credit Card", + "__typename": "AccountSubtype" + }, + "credential": { + "id": "9000000009", + "updateRequired": true, + "disconnectedFromDataProviderAt": null, + "dataProvider": "FINICITY", + "institution": { + "id": "7000000002", + "plaidInstitutionId": "ins_9", + "name": "Rando Credit", + "status": null, + "logo": "base64Nonce", + "__typename": "Institution" + }, + "__typename": "Credential" + }, + "institution": { + "id": "70000000010", + "name": "Rando Credit", + "logo": "base64Nonce", + "primaryColor": "#004966", + "url": "https://rando.credit/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "900000000012", + "displayName": "Roth IRA", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": "1052", + "createdAt": "2021-10-15T01:35:59.299450+00:00", + "updatedAt": "2024-02-17T13:32:21.072711+00:00", + "displayLastUpdatedAt": "2024-02-17T13:32:21.072453+00:00", + "currentBalance": 10000.43, + "displayBalance": 10000.43, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": true, + "includeInGoalBalance": false, + "dataProvider": "plaid", + "dataProviderAccountId": "testProviderAccountId", + "isManual": false, + "transactionsCount": 28, + "holdingsCount": 24, + "manualInvestmentsTrackingMethod": null, + "order": 4, + "icon": "trending-up", + "logoUrl": "data:image/png;base64,base64Nonce", + "type": { + "name": "brokerage", + "display": "Investments", + "__typename": "AccountType" + }, + "subtype": { + "name": "roth", + "display": "Roth IRA", + "__typename": "AccountSubtype" + }, + "credential": { + "id": "90000000014", + "updateRequired": false, + "disconnectedFromDataProviderAt": null, + "dataProvider": "PLAID", + "institution": { + "id": "70000000016", + "plaidInstitutionId": "ins_02", + "name": "Rando Investments", + "status": null, + "logo": "base64Nonce", + "__typename": "Institution" + }, + "__typename": "Credential" + }, + "institution": { + "id": "70000000018", + "name": "Rando Investments", + "logo": "base64Nonce", + "primaryColor": "#40a829", + "url": "https://rando.investments/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "90000000020", + "displayName": "House", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": null, + "createdAt": "2021-10-15T01:39:29.370279+00:00", + "updatedAt": "2024-02-12T09:00:25.451425+00:00", + "displayLastUpdatedAt": "2024-02-12T09:00:25.451425+00:00", + "currentBalance": 123000.0, + "displayBalance": 123000.0, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": true, + "includeInGoalBalance": false, + "dataProvider": "zillow", + "dataProviderAccountId": "testProviderAccountId", + "isManual": false, + "transactionsCount": 0, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 2, + "icon": "home", + "logoUrl": "data:image/png;base64,base64Nonce", + "type": { + "name": "real_estate", + "display": "Real Estate", + "__typename": "AccountType" + }, + "subtype": { + "name": "primary_home", + "display": "Primary Home", + "__typename": "AccountSubtype" + }, + "credential": null, + "institution": { + "id": "800000000", + "name": "Zillow", + "logo": "base64Nonce", + "primaryColor": "#006AFF", + "url": "https://www.zillow.com/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "90000000022", + "displayName": "401.k", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": null, + "createdAt": "2021-10-15T01:41:54.593239+00:00", + "updatedAt": "2024-02-17T08:13:10.554296+00:00", + "displayLastUpdatedAt": "2024-02-17T08:13:10.554029+00:00", + "currentBalance": 100000.35, + "displayBalance": 100000.35, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": true, + "includeInGoalBalance": false, + "dataProvider": "finicity", + "dataProviderAccountId": "testProviderAccountId", + "isManual": false, + "transactionsCount": 0, + "holdingsCount": 100, + "manualInvestmentsTrackingMethod": null, + "order": 3, + "icon": "trending-up", + "logoUrl": "data:image/png;base64,base64Nonce", + "type": { + "name": "brokerage", + "display": "Investments", + "__typename": "AccountType" + }, + "subtype": { + "name": "st_401k", + "display": "401k", + "__typename": "AccountSubtype" + }, + "credential": { + "id": "90000000024", + "updateRequired": false, + "disconnectedFromDataProviderAt": null, + "dataProvider": "FINICITY", + "institution": { + "id": "70000000026", + "plaidInstitutionId": "ins_03", + "name": "Rando Employer Investments", + "status": "HEALTHY", + "logo": "base64Nonce", + "__typename": "Institution" + }, + "__typename": "Credential" + }, + "institution": { + "id": "70000000028", + "name": "Rando Employer Investments", + "logo": "base64Nonce", + "primaryColor": "#408800", + "url": "https://rando-employer.investments/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "90000000030", + "displayName": "Mortgage", + "syncDisabled": true, + "deactivatedAt": "2023-08-15", + "isHidden": true, + "isAsset": false, + "mask": "0973", + "createdAt": "2021-10-15T01:45:25.244570+00:00", + "updatedAt": "2023-08-16T01:41:36.115588+00:00", + "displayLastUpdatedAt": "2023-08-15T18:11:09.134874+00:00", + "currentBalance": 0.0, + "displayBalance": -0.0, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": false, + "includeInGoalBalance": false, + "dataProvider": "plaid", + "dataProviderAccountId": "testProviderAccountId", + "isManual": false, + "transactionsCount": 0, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 1, + "icon": "home", + "logoUrl": "data:image/png;base64,base64Nonce", + "type": { + "name": "loan", + "display": "Loans", + "__typename": "AccountType" + }, + "subtype": { + "name": "mortgage", + "display": "Mortgage", + "__typename": "AccountSubtype" + }, + "credential": { + "id": "90000000032", + "updateRequired": false, + "disconnectedFromDataProviderAt": null, + "dataProvider": "PLAID", + "institution": { + "id": "70000000034", + "plaidInstitutionId": "ins_04", + "name": "Rando Mortgage", + "status": "HEALTHY", + "logo": "base64Nonce", + "__typename": "Institution" + }, + "__typename": "Credential" + }, + "institution": { + "id": "70000000036", + "name": "Rando Mortgage", + "logo": "base64Nonce", + "primaryColor": "#095aa6", + "url": "https://rando.mortgage/", + "__typename": "Institution" + }, + "__typename": "Account" + }, + { + "id": "186321412999033223", + "displayName": "Wallet", + "syncDisabled": false, + "deactivatedAt": null, + "isHidden": false, + "isAsset": true, + "mask": null, + "createdAt": "2024-08-16T14:22:10.440514+00:00", + "updatedAt": "2024-08-16T14:22:10.512731+00:00", + "displayLastUpdatedAt": "2024-08-16T14:22:10.512731+00:00", + "currentBalance": 20.0, + "displayBalance": 20.0, + "includeInNetWorth": true, + "hideFromList": false, + "hideTransactionsFromReports": false, + "includeBalanceInNetWorth": true, + "includeInGoalBalance": true, + "dataProvider": "", + "dataProviderAccountId": null, + "isManual": true, + "transactionsCount": 0, + "holdingsCount": 0, + "manualInvestmentsTrackingMethod": null, + "order": 14, + "logoUrl": null, + "type": { + "name": "depository", + "display": "Cash", + "__typename": "AccountType" + }, + "subtype": { + "name": "prepaid", + "display": "Prepaid", + "__typename": "AccountSubtype" + }, + "credential": null, + "institution": null, + "__typename": "Account" + } + ], + "householdPreferences": { + "id": "900000000022", + "accountGroupOrder": [], + "__typename": "HouseholdPreferences" + } +} diff --git a/tests/components/monarch_money/fixtures/get_cashflow_summary.json b/tests/components/monarch_money/fixtures/get_cashflow_summary.json new file mode 100644 index 00000000000..a223782469a --- /dev/null +++ b/tests/components/monarch_money/fixtures/get_cashflow_summary.json @@ -0,0 +1,14 @@ +{ + "summary": [ + { + "summary": { + "sumIncome": 15000.0, + "sumExpense": -9000.0, + "savings": 6000.0, + "savingsRate": 0.4, + "__typename": "TransactionsSummary" + }, + "__typename": "AggregateData" + } + ] +} diff --git a/tests/components/monarch_money/fixtures/get_subscription_details.json b/tests/components/monarch_money/fixtures/get_subscription_details.json new file mode 100644 index 00000000000..16f90a2ca38 --- /dev/null +++ b/tests/components/monarch_money/fixtures/get_subscription_details.json @@ -0,0 +1,10 @@ +{ + "subscription": { + "id": "222260252323873333", + "paymentSource": "STRIPE", + "referralCode": "go3dpvrdmw", + "isOnFreeTrial": true, + "hasPremiumEntitlement": true, + "__typename": "HouseholdSubscription" + } +} diff --git a/tests/components/monarch_money/snapshots/test_sensor.ambr b/tests/components/monarch_money/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..cf7e0cb7b2f --- /dev/null +++ b/tests/components/monarch_money/snapshots/test_sensor.ambr @@ -0,0 +1,1112 @@ +# serializer version: 1 +# name: test_all_entities[sensor.cashflow_expense_year_to_date-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cashflow_expense_year_to_date', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Expense year to date', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sum_expense', + 'unique_id': '222260252323873333_cashflow_sum_expense', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.cashflow_expense_year_to_date-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'monetary', + 'friendly_name': 'Cashflow Expense year to date', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.cashflow_expense_year_to_date', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-9000.0', + }) +# --- +# name: test_all_entities[sensor.cashflow_income_year_to_date-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cashflow_income_year_to_date', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Income year to date', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sum_income', + 'unique_id': '222260252323873333_cashflow_sum_income', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.cashflow_income_year_to_date-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'monetary', + 'friendly_name': 'Cashflow Income year to date', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.cashflow_income_year_to_date', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15000.0', + }) +# --- +# name: test_all_entities[sensor.cashflow_savings_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cashflow_savings_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Savings rate', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'savings_rate', + 'unique_id': '222260252323873333_cashflow_savings_rate', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.cashflow_savings_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cashflow Savings rate', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.cashflow_savings_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.0', + }) +# --- +# name: test_all_entities[sensor.cashflow_savings_year_to_date-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cashflow_savings_year_to_date', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Savings year to date', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'savings', + 'unique_id': '222260252323873333_cashflow_savings', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.cashflow_savings_year_to_date-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'monetary', + 'friendly_name': 'Cashflow Savings year to date', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.cashflow_savings_year_to_date', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6000.0', + }) +# --- +# name: test_all_entities[sensor.manual_entry_wallet_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.manual_entry_wallet_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_186321412999033223_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.manual_entry_wallet_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via Manual entry', + 'device_class': 'monetary', + 'friendly_name': 'Manual entry Wallet Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.manual_entry_wallet_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_all_entities[sensor.manual_entry_wallet_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.manual_entry_wallet_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_186321412999033223_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.manual_entry_wallet_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via Manual entry', + 'device_class': 'timestamp', + 'friendly_name': 'Manual entry Wallet Data age', + }), + 'context': , + 'entity_id': 'sensor.manual_entry_wallet_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-08-16T14:22:10+00:00', + }) +# --- +# name: test_all_entities[sensor.rando_bank_checking_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rando_bank_checking_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_900000002_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.rando_bank_checking_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'monetary', + 'entity_picture': 'data:image/png;base64,base64Nonce', + 'friendly_name': 'Rando Bank Checking Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.rando_bank_checking_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1000.02', + }) +# --- +# name: test_all_entities[sensor.rando_bank_checking_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.rando_bank_checking_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_900000002_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.rando_bank_checking_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'timestamp', + 'friendly_name': 'Rando Bank Checking Data age', + }), + 'context': , + 'entity_id': 'sensor.rando_bank_checking_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-17T11:21:05+00:00', + }) +# --- +# name: test_all_entities[sensor.rando_brokerage_brokerage_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rando_brokerage_brokerage_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_900000000_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.rando_brokerage_brokerage_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'monetary', + 'entity_picture': 'base64Nonce', + 'friendly_name': 'Rando Brokerage Brokerage Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.rando_brokerage_brokerage_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1000.5', + }) +# --- +# name: test_all_entities[sensor.rando_brokerage_brokerage_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.rando_brokerage_brokerage_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_900000000_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.rando_brokerage_brokerage_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'timestamp', + 'friendly_name': 'Rando Brokerage Brokerage Data age', + }), + 'context': , + 'entity_id': 'sensor.rando_brokerage_brokerage_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-05-26T00:56:41+00:00', + }) +# --- +# name: test_all_entities[sensor.rando_credit_credit_card_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rando_credit_credit_card_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_9000000007_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.rando_credit_credit_card_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via FINICITY', + 'device_class': 'monetary', + 'entity_picture': 'data:image/png;base64,base64Nonce', + 'friendly_name': 'Rando Credit Credit Card Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.rando_credit_credit_card_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-200.0', + }) +# --- +# name: test_all_entities[sensor.rando_credit_credit_card_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.rando_credit_credit_card_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_9000000007_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.rando_credit_credit_card_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via FINICITY', + 'device_class': 'timestamp', + 'friendly_name': 'Rando Credit Credit Card Data age', + }), + 'context': , + 'entity_id': 'sensor.rando_credit_credit_card_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-12-10T18:17:06+00:00', + }) +# --- +# name: test_all_entities[sensor.rando_employer_investments_401_k_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rando_employer_investments_401_k_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_90000000022_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.rando_employer_investments_401_k_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via FINICITY', + 'device_class': 'monetary', + 'entity_picture': 'data:image/png;base64,base64Nonce', + 'friendly_name': 'Rando Employer Investments 401.k Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.rando_employer_investments_401_k_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100000.35', + }) +# --- +# name: test_all_entities[sensor.rando_employer_investments_401_k_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.rando_employer_investments_401_k_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_90000000022_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.rando_employer_investments_401_k_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via FINICITY', + 'device_class': 'timestamp', + 'friendly_name': 'Rando Employer Investments 401.k Data age', + }), + 'context': , + 'entity_id': 'sensor.rando_employer_investments_401_k_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-17T08:13:10+00:00', + }) +# --- +# name: test_all_entities[sensor.rando_investments_roth_ira_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rando_investments_roth_ira_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_900000000012_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.rando_investments_roth_ira_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'monetary', + 'entity_picture': 'data:image/png;base64,base64Nonce', + 'friendly_name': 'Rando Investments Roth IRA Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.rando_investments_roth_ira_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10000.43', + }) +# --- +# name: test_all_entities[sensor.rando_investments_roth_ira_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.rando_investments_roth_ira_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_900000000012_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.rando_investments_roth_ira_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'timestamp', + 'friendly_name': 'Rando Investments Roth IRA Data age', + }), + 'context': , + 'entity_id': 'sensor.rando_investments_roth_ira_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-17T13:32:21+00:00', + }) +# --- +# name: test_all_entities[sensor.rando_mortgage_mortgage_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rando_mortgage_mortgage_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_90000000030_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.rando_mortgage_mortgage_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'monetary', + 'entity_picture': 'data:image/png;base64,base64Nonce', + 'friendly_name': 'Rando Mortgage Mortgage Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.rando_mortgage_mortgage_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[sensor.rando_mortgage_mortgage_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.rando_mortgage_mortgage_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_90000000030_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.rando_mortgage_mortgage_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via PLAID', + 'device_class': 'timestamp', + 'friendly_name': 'Rando Mortgage Mortgage Data age', + }), + 'context': , + 'entity_id': 'sensor.rando_mortgage_mortgage_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-08-16T01:41:36+00:00', + }) +# --- +# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_121212192626186051_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via Manual entry', + 'device_class': 'timestamp', + 'friendly_name': 'VinAudit 2050 Toyota RAV8 Data age', + }), + 'context': , + 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-08-16T17:37:21+00:00', + }) +# --- +# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_value-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_value', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Value', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'value', + 'unique_id': '222260252323873333_121212192626186051_value', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_value-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via Manual entry', + 'device_class': 'monetary', + 'entity_picture': 'https://api.monarchmoney.com/cdn-cgi/image/width=128/images/institution/159427559853802644', + 'friendly_name': 'VinAudit 2050 Toyota RAV8 Value', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_value', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11075.58', + }) +# --- +# name: test_all_entities[sensor.zillow_house_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.zillow_house_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Balance', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': '222260252323873333_90000000020_balance', + 'unit_of_measurement': '$', + }) +# --- +# name: test_all_entities[sensor.zillow_house_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via Manual entry', + 'device_class': 'monetary', + 'entity_picture': 'data:image/png;base64,base64Nonce', + 'friendly_name': 'Zillow House Balance', + 'state_class': , + 'unit_of_measurement': '$', + }), + 'context': , + 'entity_id': 'sensor.zillow_house_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123000.0', + }) +# --- +# name: test_all_entities[sensor.zillow_house_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.zillow_house_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'monarch_money', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': '222260252323873333_90000000020_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.zillow_house_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Monarch Money API via Manual entry', + 'device_class': 'timestamp', + 'friendly_name': 'Zillow House Data age', + }), + 'context': , + 'entity_id': 'sensor.zillow_house_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-12T09:00:25+00:00', + }) +# --- diff --git a/tests/components/monarch_money/test_config_flow.py b/tests/components/monarch_money/test_config_flow.py new file mode 100644 index 00000000000..03f0df0c526 --- /dev/null +++ b/tests/components/monarch_money/test_config_flow.py @@ -0,0 +1,166 @@ +"""Test the Monarch Money config flow.""" + +from unittest.mock import AsyncMock + +from monarchmoney import LoginFailedException, RequireMFAException + +from homeassistant.components.monarch_money.const import CONF_MFA_CODE, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + + +async def test_form_simple( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_api: AsyncMock +) -> None: + """Test simple case (no MFA / no errors).""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Monarch Money" + assert result["data"] == { + CONF_TOKEN: "mocked_token", + } + assert result["result"].unique_id == "222260252323873333" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_add_duplicate_entry( + hass: HomeAssistant, + mock_config_entry, + mock_setup_entry: AsyncMock, + mock_config_api: AsyncMock, +) -> None: + """Test a duplicate error config flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_form_invalid_auth( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_api: AsyncMock +) -> None: + """Test config flow with a login error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + # Change the login mock to raise an MFA required error + mock_config_api.return_value.login.side_effect = LoginFailedException( + "Invalid Auth" + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + mock_config_api.return_value.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Monarch Money" + assert result["data"] == { + CONF_TOKEN: "mocked_token", + } + assert result["context"]["unique_id"] == "222260252323873333" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_mfa( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_api: AsyncMock +) -> None: + """Test MFA enabled on account configuration.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + # Change the login mock to raise an MFA required error + mock_config_api.return_value.login.side_effect = RequireMFAException("mfa_required") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "mfa_required"} + assert result["step_id"] == "user" + + # Add a bad MFA Code response + mock_config_api.return_value.multi_factor_authenticate.side_effect = KeyError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_MFA_CODE: "123456", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "bad_mfa"} + assert result["step_id"] == "user" + + # Use a good MFA Code - Clear mock + mock_config_api.return_value.multi_factor_authenticate.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_MFA_CODE: "123456", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Monarch Money" + assert result["data"] == { + CONF_TOKEN: "mocked_token", + } + assert result["result"].unique_id == "222260252323873333" + + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/monarch_money/test_sensor.py b/tests/components/monarch_money/test_sensor.py new file mode 100644 index 00000000000..aac1eaefb2d --- /dev/null +++ b/tests/components/monarch_money/test_sensor.py @@ -0,0 +1,27 @@ +"""Test sensors.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_config_api: AsyncMock, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.monarch_money.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/monzo/test_config_flow.py b/tests/components/monzo/test_config_flow.py index b7d0de9cdc3..7630acfc1cf 100644 --- a/tests/components/monzo/test_config_flow.py +++ b/tests/components/monzo/test_config_flow.py @@ -1,10 +1,7 @@ """Tests for config flow.""" -from datetime import timedelta from unittest.mock import AsyncMock, patch -from freezegun.api import FrozenDateTimeFactory -from monzopy import AuthorisationExpiredError import pytest from homeassistant.components.monzo.application_credentials import ( @@ -12,7 +9,7 @@ from homeassistant.components.monzo.application_credentials import ( OAUTH2_TOKEN, ) from homeassistant.components.monzo.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -20,7 +17,7 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import setup_integration from .conftest import CLIENT_ID, USER_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -154,14 +151,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -223,14 +213,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -269,25 +252,3 @@ async def test_config_reauth_wrong_account( assert result assert result["type"] is FlowResultType.ABORT assert result["reason"] == "wrong_account" - - -async def test_api_can_trigger_reauth( - hass: HomeAssistant, - polling_config_entry: MockConfigEntry, - monzo: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test reauth an existing profile reauthenticates the config entry.""" - await setup_integration(hass, polling_config_entry) - - monzo.user_account.accounts.side_effect = AuthorisationExpiredError() - freezer.tick(timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - flows = hass.config_entries.flow.async_progress() - - assert len(flows) == 1 - flow = flows[0] - assert flow["step_id"] == "reauth_confirm" - assert flow["handler"] == DOMAIN - assert flow["context"]["source"] == SOURCE_REAUTH diff --git a/tests/components/monzo/test_init.py b/tests/components/monzo/test_init.py new file mode 100644 index 00000000000..b24fb6ff86e --- /dev/null +++ b/tests/components/monzo/test_init.py @@ -0,0 +1,37 @@ +"""Tests for component initialisation.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from monzopy import AuthorisationExpiredError + +from homeassistant.components.monzo.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_api_can_trigger_reauth( + hass: HomeAssistant, + polling_config_entry: MockConfigEntry, + monzo: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test reauth an existing profile reauthenticates the config entry.""" + await setup_integration(hass, polling_config_entry) + + monzo.user_account.accounts.side_effect = AuthorisationExpiredError() + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + + assert len(flows) == 1 + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + assert flow["context"]["source"] == SOURCE_REAUTH diff --git a/tests/components/monzo/test_sensor.py b/tests/components/monzo/test_sensor.py index bf88ce14931..a57466fdbd4 100644 --- a/tests/components/monzo/test_sensor.py +++ b/tests/components/monzo/test_sensor.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory +from monzopy import InvalidMonzoAPIResponseError import pytest from syrupy import SnapshotAssertion @@ -123,15 +124,22 @@ async def test_update_failed( monzo: AsyncMock, polling_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, ) -> None: """Test all entities.""" await setup_integration(hass, polling_config_entry) - monzo.user_account.accounts.side_effect = Exception + monzo.user_account.accounts.side_effect = InvalidMonzoAPIResponseError( + {"acc_id": None}, "account_id" + ) freezer.tick(timedelta(minutes=10)) async_fire_time_changed(hass) await hass.async_block_till_done() + assert "Invalid Monzo API response." in caplog.text + assert "account_id" in caplog.text + assert "acc_id" in caplog.text + entity_id = await async_get_entity_id( hass, TEST_ACCOUNTS[0]["id"], ACCOUNT_SENSORS[0] ) diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index f89cf4f305d..ef4f2e1e15d 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -19,6 +19,11 @@ from tests.common import MockConfigEntry from tests.components.bluetooth import generate_advertisement_data, generate_ble_device +@pytest.fixture(autouse=True) +def mock_bluetooth(enable_bluetooth: None) -> None: + """Auto mock bluetooth.""" + + @pytest.fixture def address() -> str: """Address fixture.""" @@ -109,6 +114,7 @@ def mock_config_entry( return MockConfigEntry( title="mock_title", domain=DOMAIN, + entry_id="mock_entry_id", unique_id=address, data={ CONF_ADDRESS: address, diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..d042dc02ac3 --- /dev/null +++ b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr @@ -0,0 +1,38 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device': dict({ + 'blind_type': 'Roller blind', + 'calibration_type': None, + 'connection_type': 'disconnected', + 'end_position_info': None, + 'position': None, + 'tilt': None, + 'timezone': None, + }), + 'entry': dict({ + 'data': dict({ + 'address': 'cc:cc:cc:cc:cc:cc', + 'blind_type': 'roller', + 'local_name': 'Motionblind CCCC', + 'mac_code': 'CCCC', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'motionblinds_ble', + 'entry_id': 'mock_entry_id', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': '**REDACTED**', + 'unique_id': '**REDACTED**', + 'version': 1, + }), + }) +# --- diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py index 2f6b33b3017..009bd1d0fa3 100644 --- a/tests/components/motionblinds_ble/test_cover.py +++ b/tests/components/motionblinds_ble/test_cover.py @@ -18,10 +18,7 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -74,8 +71,8 @@ async def test_cover_service( [ (None, "unknown"), (MotionRunningType.STILL, "unknown"), - (MotionRunningType.OPENING, STATE_OPENING), - (MotionRunningType.CLOSING, STATE_CLOSING), + (MotionRunningType.OPENING, CoverState.OPENING), + (MotionRunningType.CLOSING, CoverState.CLOSING), ], ) async def test_cover_update_running( @@ -101,9 +98,9 @@ async def test_cover_update_running( ("position", "tilt", "state"), [ (None, None, "unknown"), - (0, 0, STATE_OPEN), - (50, 90, STATE_OPEN), - (100, 180, STATE_CLOSED), + (0, 0, CoverState.OPEN), + (50, 90, CoverState.OPEN), + (100, 180, CoverState.CLOSED), ], ) async def test_cover_update_position( diff --git a/tests/components/motionblinds_ble/test_diagnostics.py b/tests/components/motionblinds_ble/test_diagnostics.py new file mode 100644 index 00000000000..878d2caa326 --- /dev/null +++ b/tests/components/motionblinds_ble/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Test Motionblinds Bluetooth diagnostics.""" + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + await setup_integration(hass, mock_config_entry) + + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("created_at", "modified_at", "repr")) diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py index 1bfd3b185e5..00369ba1e22 100644 --- a/tests/components/motionblinds_ble/test_entity.py +++ b/tests/components/motionblinds_ble/test_entity.py @@ -23,6 +23,7 @@ from . import setup_integration from tests.common import MockConfigEntry +@pytest.mark.usefixtures("motionblinds_ble_connect") @pytest.mark.parametrize( ("platform", "entity"), [ diff --git a/tests/components/motioneye/__init__.py b/tests/components/motioneye/__init__.py index 183d1b3e6bf..842d862a222 100644 --- a/tests/components/motioneye/__init__.py +++ b/tests/components/motioneye/__init__.py @@ -7,12 +7,12 @@ from unittest.mock import AsyncMock, Mock, patch from motioneye_client.const import DEFAULT_PORT -from homeassistant.components.motioneye import get_motioneye_entity_unique_id from homeassistant.components.motioneye.const import DOMAIN -from homeassistant.config import async_process_ha_core_config +from homeassistant.components.motioneye.entity import get_motioneye_entity_unique_id from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry diff --git a/tests/components/motioneye/test_camera.py b/tests/components/motioneye/test_camera.py index 0f3a7d6f904..8ef58cc968d 100644 --- a/tests/components/motioneye/test_camera.py +++ b/tests/components/motioneye/test_camera.py @@ -3,7 +3,6 @@ from asyncio import AbstractEventLoop from collections.abc import Callable import copy -from typing import cast from unittest.mock import AsyncMock, Mock, call from aiohttp import web @@ -46,6 +45,7 @@ from homeassistant.const import ATTR_DEVICE_ID, ATTR_ENTITY_ID, CONF_URL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.aiohttp import MockRequest import homeassistant.util.dt as dt_util from . import ( @@ -231,7 +231,7 @@ async def test_get_still_image_from_camera( ) -> None: """Test getting a still image.""" - image_handler = AsyncMock(return_value="") + image_handler = AsyncMock(return_value=web.Response(body="")) app = web.Application() app.add_routes( @@ -273,7 +273,8 @@ async def test_get_stream_from_camera( ) -> None: """Test getting a stream.""" - stream_handler = AsyncMock(return_value="") + stream_handler = AsyncMock(return_value=web.Response(body="")) + app = web.Application() app.add_routes([web.get("/", stream_handler)]) stream_server = await aiohttp_server(app) @@ -297,12 +298,7 @@ async def test_get_stream_from_camera( ) await hass.async_block_till_done() - # It won't actually get a stream from the dummy handler, so just catch - # the expected exception, then verify the right handler was called. - with pytest.raises(HTTPBadGateway): - await async_get_mjpeg_stream( - hass, cast(web.Request, None), TEST_CAMERA_ENTITY_ID - ) + await async_get_mjpeg_stream(hass, MockRequest(b"", "test"), TEST_CAMERA_ENTITY_ID) assert stream_handler.called @@ -358,7 +354,8 @@ async def test_camera_option_stream_url_template( """Verify camera with a stream URL template option.""" client = create_mock_motioneye_client() - stream_handler = AsyncMock(return_value="") + stream_handler = AsyncMock(return_value=web.Response(body="")) + app = web.Application() app.add_routes([web.get(f"/{TEST_CAMERA_NAME}/{TEST_CAMERA_ID}", stream_handler)]) stream_server = await aiohttp_server(app) @@ -384,10 +381,7 @@ async def test_camera_option_stream_url_template( ) await hass.async_block_till_done() - # It won't actually get a stream from the dummy handler, so just catch - # the expected exception, then verify the right handler was called. - with pytest.raises(HTTPBadGateway): - await async_get_mjpeg_stream(hass, Mock(), TEST_CAMERA_ENTITY_ID) + await async_get_mjpeg_stream(hass, MockRequest(b"", "test"), TEST_CAMERA_ENTITY_ID) assert AsyncMock.called assert not client.get_camera_stream_url.called diff --git a/tests/components/motioneye/test_config_flow.py b/tests/components/motioneye/test_config_flow.py index 816fb31933a..8d942e7a2a1 100644 --- a/tests/components/motioneye/test_config_flow.py +++ b/tests/components/motioneye/test_config_flow.py @@ -9,7 +9,6 @@ from motioneye_client.client import ( ) from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.motioneye.const import ( CONF_ADMIN_PASSWORD, CONF_ADMIN_USERNAME, @@ -23,6 +22,7 @@ from homeassistant.components.motioneye.const import ( from homeassistant.const import CONF_URL, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import TEST_URL, create_mock_motioneye_client, create_mock_motioneye_config_entry @@ -264,14 +264,7 @@ async def test_reauth(hass: HomeAssistant) -> None: config_entry = create_mock_motioneye_config_entry(hass, data=config_data) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py index 7395767aeae..22f0416a2c6 100644 --- a/tests/components/mqtt/conftest.py +++ b/tests/components/mqtt/conftest.py @@ -4,7 +4,7 @@ import asyncio from collections.abc import AsyncGenerator, Generator from random import getrandbits from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -87,7 +87,8 @@ async def setup_with_birth_msg_client_mock( patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0), ): entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} + domain=mqtt.DOMAIN, + data=mqtt_config_entry_data or {mqtt.CONF_BROKER: "test-broker"}, ) entry.add_to_hass(hass) hass.config.components.add(mqtt.DOMAIN) @@ -121,3 +122,10 @@ def record_calls(recorded_calls: list[ReceiveMessage]) -> MessageCallbackType: recorded_calls.append(msg) return record_calls + + +@pytest.fixture +def tag_mock() -> Generator[AsyncMock]: + """Fixture to mock tag.""" + with patch("homeassistant.components.tag.async_scan_tag") as mock_tag: + yield mock_tag diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index 07ebb671e37..b46829650f6 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -9,7 +9,10 @@ from unittest.mock import patch import pytest from homeassistant.components import alarm_control_panel, mqtt -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.mqtt.alarm_control_panel import ( MQTT_ALARM_ATTRIBUTES_BLOCKED, ) @@ -25,16 +28,6 @@ from homeassistant.const import ( SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, SERVICE_RELOAD, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -57,6 +50,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -213,23 +207,23 @@ async def test_update_state_via_state_topic( assert hass.states.get(entity_id).state == STATE_UNKNOWN for state in ( - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_PENDING, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMING, + AlarmControlPanelState.TRIGGERED, ): async_fire_mqtt_message(hass, "alarm/state", state) assert hass.states.get(entity_id).state == state - # Ignore empty payload (last state is STATE_ALARM_TRIGGERED) + # Ignore empty payload (last state is AlarmControlPanelState.TRIGGERED) async_fire_mqtt_message(hass, "alarm/state", "") - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # Reset state on `None` payload async_fire_mqtt_message(hass, "alarm/state", "None") @@ -769,7 +763,7 @@ async def test_update_state_via_state_topic_template( async_fire_mqtt_message(hass, "test-topic", "100") state = hass.states.get("alarm_control_panel.test") - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY @pytest.mark.parametrize( @@ -1287,6 +1281,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = alarm_control_panel.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ @@ -1306,7 +1312,11 @@ async def test_entity_name( @pytest.mark.parametrize( ("topic", "payload1", "payload2"), [ - ("test-topic", STATE_ALARM_DISARMED, STATE_ALARM_ARMED_HOME), + ( + "test-topic", + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_HOME, + ), ("availability-topic", "online", "offline"), ("json-attributes-topic", '{"attr1": "val1"}', '{"attr1": "val2"}'), ], diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index e2c168bd46e..d27163c3423 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -40,6 +40,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -1133,7 +1134,7 @@ async def test_skip_restoring_state_with_over_due_expire_trigger( freezer.move_to("2022-02-02 12:02:00+01:00") domain = binary_sensor.DOMAIN - config3 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][domain]) + config3: ConfigType = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][domain]) config3["name"] = "test3" config3["expire_after"] = 10 config3["state_topic"] = "test-topic3" @@ -1193,6 +1194,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = binary_sensor.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index d85ead6ecee..f147b33c88b 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -25,6 +25,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_name, help_test_publishing_with_custom_encoding, @@ -534,3 +535,15 @@ async def test_entity_name( await help_test_entity_name( hass, mqtt_mock_entry, domain, config, expected_friendly_name, device_class ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = button.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index dcded7d187a..4bfcde752ae 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1,9 +1,10 @@ """The tests for the MQTT client.""" import asyncio -from datetime import datetime, timedelta +from datetime import timedelta import socket import ssl +import time from typing import Any from unittest.mock import MagicMock, Mock, call, patch @@ -37,11 +38,6 @@ from tests.common import ( from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, MqttMockPahoClient -@pytest.fixture(autouse=True) -def mock_storage(hass_storage: dict[str, Any]) -> None: - """Autouse hass_storage for the TestCase tests.""" - - def help_assert_message( msg: ReceiveMessage, topic: str | None = None, @@ -301,10 +297,13 @@ async def test_subscribe_mqtt_config_entry_disabled( mqtt_mock.connected = True mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - assert mqtt_config_entry.state is ConfigEntryState.LOADED + + mqtt_config_entry_state = mqtt_config_entry.state + assert mqtt_config_entry_state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(mqtt_config_entry.entry_id) - assert mqtt_config_entry.state is ConfigEntryState.NOT_LOADED + mqtt_config_entry_state = mqtt_config_entry.state + assert mqtt_config_entry_state is ConfigEntryState.NOT_LOADED await hass.config_entries.async_set_disabled_by( mqtt_config_entry.entry_id, ConfigEntryDisabler.USER @@ -1046,10 +1045,17 @@ async def test_restore_subscriptions_on_reconnect( mqtt_client_mock.reset_mock() mqtt_client_mock.on_disconnect(None, None, 0) + # Test to subscribe orther topic while the client is not connected + await mqtt.async_subscribe(hass, "test/other", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown + assert ("test/other", 0) not in help_all_subscribe_calls(mqtt_client_mock) + mock_debouncer.clear() mqtt_client_mock.on_connect(None, None, None, 0) await mock_debouncer.wait() + # Assert all subscriptions are performed at the broker assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) + assert ("test/other", 0) in help_all_subscribe_calls(mqtt_client_mock) @pytest.mark.parametrize( @@ -1284,7 +1290,7 @@ async def test_handle_message_callback( callbacks.append(args) msg = ReceiveMessage( - "some-topic", b"test-payload", 1, False, "some-topic", datetime.now() + "some-topic", b"test-payload", 1, False, "some-topic", time.monotonic() ) mock_debouncer.clear() await mqtt.async_subscribe(hass, "some-topic", _callback) @@ -1717,6 +1723,97 @@ async def test_mqtt_subscribes_topics_on_connect( assert ("still/pending", 1) in subscribe_calls +@pytest.mark.parametrize("mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE]) +async def test_mqtt_subscribes_wildcard_topics_in_correct_order( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test subscription to wildcard topics on connect in the order of subscription.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "integration/test#", record_calls) + await mqtt.async_subscribe(hass, "integration/kitchen_sink#", record_calls) + await mock_debouncer.wait() + + def _assert_subscription_order(): + discovery_subscribes = [ + f"homeassistant/{platform}/+/config" for platform in SUPPORTED_COMPONENTS + ] + discovery_subscribes.extend( + [ + f"homeassistant/{platform}/+/+/config" + for platform in SUPPORTED_COMPONENTS + ] + ) + discovery_subscribes.extend( + ["homeassistant/device/+/config", "homeassistant/device/+/+/config"] + ) + discovery_subscribes.extend(["integration/test#", "integration/kitchen_sink#"]) + + expected_discovery_subscribes = discovery_subscribes.copy() + + # Assert we see the expected subscribes and in the correct order + actual_subscribes = [ + discovery_subscribes.pop(0) + for call in help_all_subscribe_calls(mqtt_client_mock) + if discovery_subscribes and discovery_subscribes[0] == call[0] + ] + + # Assert we have processed all items and that they are in the correct order + assert len(discovery_subscribes) == 0 + assert actual_subscribes == expected_discovery_subscribes + + # Assert the initial wildcard topic subscription order + _assert_subscription_order() + + mqtt_client_mock.on_disconnect(Mock(), None, 0) + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + mqtt_client_mock.on_connect(Mock(), None, 0, 0) + await mock_debouncer.wait() + + # Assert the wildcard topic subscription order after a reconnect + _assert_subscription_order() + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE | {mqtt.CONF_DISCOVERY: False}], +) +async def test_mqtt_discovery_not_subscribes_when_disabled( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test discovery subscriptions not performend when discovery is disabled.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + await mock_debouncer.wait() + + subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) + for component in SUPPORTED_COMPONENTS: + assert (f"homeassistant/{component}/+/config", 0) not in subscribe_calls + assert (f"homeassistant/{component}/+/+/config", 0) not in subscribe_calls + + mqtt_client_mock.on_disconnect(Mock(), None, 0) + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + mqtt_client_mock.on_connect(Mock(), None, 0, 0) + await mock_debouncer.wait() + + subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) + for component in SUPPORTED_COMPONENTS: + assert (f"homeassistant/{component}/+/config", 0) not in subscribe_calls + assert (f"homeassistant/{component}/+/+/config", 0) not in subscribe_calls + + @pytest.mark.parametrize( "mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE], diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 13bd6b5feda..5edd73e3f5a 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -53,6 +53,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_publishing_with_custom_encoding, @@ -202,7 +203,7 @@ async def test_set_operation_bad_attr_and_state( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_hvac_mode(hass, None, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] assert ( "expected HVACMode or one of 'off', 'heat', 'cool', 'heat_cool', 'auto', 'dry'," " 'fan_only' for dictionary value @ data['hvac_mode']" in str(excinfo.value) @@ -220,10 +221,9 @@ async def test_set_operation( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" - assert state.state == "cool" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "cool", 0, False) @@ -245,7 +245,7 @@ async def test_set_operation_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.state == STATE_UNKNOWN - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == STATE_UNKNOWN @@ -287,7 +287,7 @@ async def test_set_operation_optimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" @@ -316,13 +316,13 @@ async def test_set_operation_with_power_command( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "cool", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "off", 0, False)]) @@ -358,12 +358,12 @@ async def test_turn_on_and_off_optimistic_with_power_command( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "cool", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" @@ -374,7 +374,7 @@ async def test_turn_on_and_off_optimistic_with_power_command( mqtt_mock.async_publish.assert_has_calls([call("power-command", "ON", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" await common.async_turn_off(hass, ENTITY_CLIMATE) @@ -433,7 +433,7 @@ async def test_turn_on_and_off_without_power_command( else: mqtt_mock.async_publish.assert_has_calls([]) - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.reset_mock() @@ -460,7 +460,7 @@ async def test_set_fan_mode_bad_attr( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("fan_mode") == "low" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_fan_mode(hass, None, ENTITY_CLIMATE) + await common.async_set_fan_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] assert "string value is None for dictionary value @ data['fan_mode']" in str( excinfo.value ) @@ -555,7 +555,7 @@ async def test_set_swing_mode_bad_attr( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("swing_mode") == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_swing_mode(hass, None, ENTITY_CLIMATE) + await common.async_set_swing_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] assert "string value is None for dictionary value @ data['swing_mode']" in str( excinfo.value ) @@ -649,7 +649,7 @@ async def test_set_target_temperature( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 21 - await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "heat" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "heat", 0, False) @@ -712,7 +712,7 @@ async def test_set_target_temperature_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None - await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None @@ -744,7 +744,7 @@ async def test_set_target_temperature_optimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 21 - await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) await common.async_set_temperature(hass, temperature=17, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 17 @@ -1547,14 +1547,14 @@ async def test_set_and_templates( assert state.attributes.get("preset_mode") == PRESET_ECO # Mode - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) mqtt_mock.async_publish.assert_any_call("mode-topic", "mode: cool", 0, False) assert mqtt_mock.async_publish.call_count == 1 mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" - await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) mqtt_mock.async_publish.assert_any_call("mode-topic", "mode: off", 0, False) assert mqtt_mock.async_publish.call_count == 1 mqtt_mock.async_publish.reset_mock() @@ -2449,3 +2449,15 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity name setup.""" + domain = climate.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index c135c29ebc5..95a26daf562 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -20,7 +20,7 @@ from homeassistant.components.mqtt.const import ( MQTT_CONNECTION_STATE, SUPPORTED_COMPONENTS, ) -from homeassistant.components.mqtt.mixins import MQTT_ATTRIBUTES_BLOCKED +from homeassistant.components.mqtt.entity import MQTT_ATTRIBUTES_BLOCKED from homeassistant.components.mqtt.models import PublishPayloadType from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( @@ -69,10 +69,14 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { _SENTINEL = object() DISCOVERY_COUNT = len(MQTT) +DEVICE_DISCOVERY_COUNT = 2 type _MqttMessageType = list[tuple[str, str]] type _AttributesType = list[tuple[str, Any]] -type _StateDataType = list[tuple[_MqttMessageType, str | None, _AttributesType | None]] +type _StateDataType = ( + list[tuple[_MqttMessageType, str, _AttributesType | None]] + | list[tuple[_MqttMessageType, str, None]] +) def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: @@ -106,7 +110,7 @@ def help_custom_config( ) base.update(instance) entity_instances.append(base) - config[mqtt.DOMAIN][mqtt_entity_domain]: list[ConfigType] = entity_instances + config[mqtt.DOMAIN][mqtt_entity_domain] = entity_instances return config @@ -1186,7 +1190,10 @@ async def help_test_entity_id_update_subscriptions( assert state is not None assert ( mqtt_mock.async_subscribe.call_count - == len(topics) + 2 * len(SUPPORTED_COMPONENTS) + DISCOVERY_COUNT + == len(topics) + + 2 * len(SUPPORTED_COMPONENTS) + + DISCOVERY_COUNT + + DEVICE_DISCOVERY_COUNT ) for topic in topics: mqtt_mock.async_subscribe.assert_any_call( @@ -1360,11 +1367,11 @@ async def help_test_entity_debug_info_message( mqtt_mock_entry: MqttMockHAClientGenerator, domain: str, config: ConfigType, - service: str, + service: str | None, command_topic: str | None = None, command_payload: str | None = None, state_topic: str | object | None = _SENTINEL, - state_payload: str | None = None, + state_payload: bytes | str | None = None, service_parameters: dict[str, Any] | None = None, ) -> None: """Test debug_info. @@ -1665,6 +1672,61 @@ async def help_test_entity_category( assert not ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) +async def help_test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + domain: str, + config: ConfigType, + default_entity_picture: str | None = None, +) -> None: + """Test entity picture and icon.""" + await mqtt_mock_entry() + # Add device settings to config + config = copy.deepcopy(config[mqtt.DOMAIN][domain]) + config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) + + ent_registry = er.async_get(hass) + + # Discover an entity without entity icon or picture + unique_id = "veryunique1" + config["unique_id"] = unique_id + data = json.dumps(config) + async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) + await hass.async_block_till_done() + entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) + state = hass.states.get(entity_id) + assert entity_id is not None and state + assert state.attributes.get("icon") is None + assert state.attributes.get("entity_picture") == default_entity_picture + + # Discover an entity with an entity picture set + unique_id = "veryunique2" + config["entity_picture"] = "https://example.com/mypicture.png" + config["unique_id"] = unique_id + data = json.dumps(config) + async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) + await hass.async_block_till_done() + entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) + state = hass.states.get(entity_id) + assert entity_id is not None and state + assert state.attributes.get("icon") is None + assert state.attributes.get("entity_picture") == "https://example.com/mypicture.png" + config.pop("entity_picture") + + # Discover an entity with an entity icon set + unique_id = "veryunique3" + config["icon"] = "mdi:emoji-happy-outline" + config["unique_id"] = unique_id + data = json.dumps(config) + async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) + await hass.async_block_till_done() + entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) + state = hass.states.get(entity_id) + assert entity_id is not None and state + assert state.attributes.get("icon") == "mdi:emoji-happy-outline" + assert state.attributes.get("entity_picture") == default_entity_picture + + async def help_test_publishing_with_custom_encoding( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -1938,7 +2000,7 @@ async def help_test_skipped_async_ha_write_state( ) -> None: """Test entity.async_ha_write_state is only called on changes.""" with patch( - "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: assert len(mock_async_ha_write_state.mock_calls) == 0 async_fire_mqtt_message(hass, topic, payload1) diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 907e3ef9946..38dbda50cdd 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -8,14 +8,14 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import Discovery import pytest import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt -from homeassistant.components.hassio import HassioServiceInfo -from homeassistant.components.hassio.addon_manager import AddonError -from homeassistant.components.hassio.handler import HassioAPIError +from homeassistant.components.hassio import AddonError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.const import ( CONF_CLIENT_ID, @@ -26,6 +26,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient @@ -250,7 +251,7 @@ async def test_user_connection_works( assert len(mock_finish_setup.mock_calls) == 1 -@pytest.mark.usefixtures("mqtt_client_mock", "supervisor") +@pytest.mark.usefixtures("mqtt_client_mock", "supervisor", "supervisor_client") async def test_user_connection_works_with_supervisor( hass: HomeAssistant, mock_try_connection: MagicMock, @@ -417,7 +418,7 @@ async def test_hassio_already_configured(hass: HomeAssistant) -> None: "mqtt", context={"source": config_entries.SOURCE_HASSIO} ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "single_instance_allowed" async def test_hassio_ignored(hass: HomeAssistant) -> None: @@ -443,7 +444,7 @@ async def test_hassio_ignored(hass: HomeAssistant) -> None: ) assert result assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result.get("reason") == "single_instance_allowed" async def test_hassio_confirm( @@ -452,8 +453,6 @@ async def test_hassio_confirm( mock_finish_setup: MagicMock, ) -> None: """Test we can finish a config flow.""" - mock_try_connection.return_value = True - result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( @@ -531,7 +530,19 @@ async def test_hassio_cannot_connect( @pytest.mark.usefixtures( "mqtt_client_mock", "supervisor", "addon_info", "addon_running" ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_running( hass: HomeAssistant, mock_try_connection_success: MagicMock, @@ -573,7 +584,19 @@ async def test_addon_flow_with_supervisor_addon_running( @pytest.mark.usefixtures( "mqtt_client_mock", "supervisor", "addon_info", "addon_installed", "start_addon" ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_installed( hass: HomeAssistant, mock_try_connection_success: MagicMock, @@ -628,7 +651,19 @@ async def test_addon_flow_with_supervisor_addon_installed( @pytest.mark.usefixtures( "mqtt_client_mock", "supervisor", "addon_info", "addon_running" ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_running_connection_fails( hass: HomeAssistant, mock_try_connection: MagicMock, @@ -669,7 +704,7 @@ async def test_addon_not_running_api_error( Case: The Mosquitto add-on start fails on a API error. """ - start_addon.side_effect = HassioAPIError() + start_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( "mqtt", context={"source": config_entries.SOURCE_USER} @@ -756,7 +791,7 @@ async def test_addon_info_error( Case: The Mosquitto add-on info could not be retrieved. """ - addon_info.side_effect = AddonError() + addon_info.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( "mqtt", context={"source": config_entries.SOURCE_USER} @@ -783,7 +818,19 @@ async def test_addon_info_error( "install_addon", "start_addon", ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_not_installed( hass: HomeAssistant, mock_try_connection_success: MagicMock, @@ -855,7 +902,7 @@ async def test_addon_not_installed_failures( Case: The Mosquitto add-on install fails. """ - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( "mqtt", context={"source": config_entries.SOURCE_USER} @@ -1024,7 +1071,6 @@ async def test_bad_certificate( test_input.pop(mqtt.CONF_CLIENT_KEY) mqtt_mock = await mqtt_mock_entry() - mock_try_connection.return_value = True config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] # Add at least one advanced option to get the full form hass.config_entries.async_update_entry( @@ -1273,7 +1319,7 @@ async def test_invalid_discovery_prefix( def get_default(schema: vol.Schema, key: str) -> Any | None: """Get default value for key in voluptuous schema.""" - for schema_key in schema: + for schema_key in schema: # type:ignore[attr-defined] if schema_key == key: if schema_key.default == vol.UNDEFINED: return None @@ -1283,7 +1329,7 @@ def get_default(schema: vol.Schema, key: str) -> Any | None: def get_suggested(schema: vol.Schema, key: str) -> Any | None: """Get suggested value for key in voluptuous schema.""" - for schema_key in schema: + for schema_key in schema: # type:ignore[attr-defined] if schema_key == key: if ( schema_key.description is None @@ -1551,14 +1597,7 @@ async def test_step_reauth( assert result["context"]["source"] == "reauth" # Show the form - result = await hass.config_entries.flow.async_init( - mqtt.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -1587,7 +1626,19 @@ async def test_step_reauth( await hass.async_block_till_done() -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) @pytest.mark.usefixtures( "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" ) @@ -1636,8 +1687,30 @@ async def test_step_hassio_reauth( @pytest.mark.parametrize( ("discovery_info", "discovery_info_side_effect", "broker"), [ - ({"config": ADD_ON_DISCOVERY_INFO.copy()}, AddonError, "core-mosquitto"), - ({"config": ADD_ON_DISCOVERY_INFO.copy()}, None, "broker-not-addon"), + ( + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ], + AddonError, + "core-mosquitto", + ), + ( + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ], + None, + "broker-not-addon", + ), ], ) @pytest.mark.usefixtures( @@ -2089,7 +2162,7 @@ async def test_setup_with_advanced_settings( async def test_change_websockets_transport_to_tcp( hass: HomeAssistant, mock_try_connection: MagicMock ) -> None: - """Test option flow setup with websockets transport settings.""" + """Test reconfiguration flow changing websockets transport settings.""" config_entry = MockConfigEntry(domain=mqtt.DOMAIN) config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( @@ -2105,7 +2178,7 @@ async def test_change_websockets_transport_to_tcp( mock_try_connection.return_value = True - result = await hass.config_entries.options.async_init(config_entry.entry_id) + result = await config_entry.start_reconfigure_flow(hass, show_advanced_options=True) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "broker" assert result["data_schema"].schema["transport"] @@ -2113,7 +2186,7 @@ async def test_change_websockets_transport_to_tcp( assert result["data_schema"].schema["ws_headers"] # Change transport to tcp - result = await hass.config_entries.options.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ mqtt.CONF_BROKER: "test-broker", @@ -2123,23 +2196,61 @@ async def test_change_websockets_transport_to_tcp( mqtt.CONF_WS_PATH: "/some_path", }, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "options" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - mqtt.CONF_DISCOVERY: True, - mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # Check config entry result assert config_entry.data == { mqtt.CONF_BROKER: "test-broker", CONF_PORT: 1234, mqtt.CONF_TRANSPORT: "tcp", - mqtt.CONF_DISCOVERY: True, - mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", } + + +@pytest.mark.usefixtures("mock_ssl_context", "mock_process_uploaded_file") +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ + { + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: {"header_1": "custom_header1"}, + mqtt.CONF_WS_PATH: "/some_path", + } + ], +) +async def test_reconfigure_flow_form( + hass: HomeAssistant, + mock_try_connection: MagicMock, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test reconfigure flow.""" + await mqtt_mock_entry() + entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + result = await entry.start_reconfigure_flow(hass, show_advanced_options=True) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "broker" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "10.10.10,10", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: '{"header_1": "custom_header1"}', + mqtt.CONF_WS_PATH: "/some_new_path", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + mqtt.CONF_BROKER: "10.10.10,10", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: {"header_1": "custom_header1"}, + mqtt.CONF_WS_PATH: "/some_new_path", + } + await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index 451665de96a..ee74b78be81 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -12,6 +12,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + CoverState, ) from homeassistant.components.mqtt.const import CONF_STATE_TOPIC from homeassistant.components.mqtt.cover import ( @@ -39,9 +40,7 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, STATE_CLOSED, - STATE_CLOSING, STATE_OPEN, - STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -63,6 +62,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_publishing_with_custom_encoding, @@ -116,12 +116,12 @@ async def test_state_via_state_topic( async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", STATE_OPEN) state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "None") @@ -162,17 +162,17 @@ async def test_opening_and_closing_state_via_custom_state_payload( async_fire_mqtt_message(hass, "state-topic", "34") state = hass.states.get("cover.test") - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async_fire_mqtt_message(hass, "state-topic", "--43") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -197,11 +197,11 @@ async def test_opening_and_closing_state_via_custom_state_payload( @pytest.mark.parametrize( ("position", "assert_state"), [ - (0, STATE_CLOSED), - (1, STATE_OPEN), - (30, STATE_OPEN), - (99, STATE_OPEN), - (100, STATE_OPEN), + (0, CoverState.CLOSED), + (1, CoverState.OPEN), + (30, CoverState.OPEN), + (99, CoverState.OPEN), + (100, CoverState.OPEN), ], ) async def test_open_closed_state_from_position_optimistic( @@ -253,13 +253,13 @@ async def test_open_closed_state_from_position_optimistic( @pytest.mark.parametrize( ("position", "assert_state"), [ - (0, STATE_CLOSED), - (1, STATE_CLOSED), - (10, STATE_CLOSED), - (11, STATE_OPEN), - (30, STATE_OPEN), - (99, STATE_OPEN), - (100, STATE_OPEN), + (0, CoverState.CLOSED), + (1, CoverState.CLOSED), + (10, CoverState.CLOSED), + (11, CoverState.OPEN), + (30, CoverState.OPEN), + (99, CoverState.OPEN), + (100, CoverState.OPEN), ], ) async def test_open_closed_state_from_position_optimistic_alt_positions( @@ -449,12 +449,12 @@ async def test_position_via_position_topic( async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -490,12 +490,12 @@ async def test_state_via_template( async_fire_mqtt_message(hass, "state-topic", "10000") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "99") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -532,13 +532,13 @@ async def test_state_via_template_and_entity_id( async_fire_mqtt_message(hass, "state-topic", "invalid") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "closed") async_fire_mqtt_message(hass, "state-topic", "invalid") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -571,14 +571,14 @@ async def test_state_via_template_with_json_value( async_fire_mqtt_message(hass, "state-topic", '{ "Var1": "open", "Var2": "other" }') state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message( hass, "state-topic", '{ "Var1": "closed", "Var2": "other" }' ) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", '{ "Var2": "other" }') assert ( @@ -741,7 +741,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( cover.DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -750,7 +750,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED await hass.services.async_call( cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -759,7 +759,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -767,7 +767,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -804,7 +804,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 await hass.services.async_call( @@ -814,7 +814,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 await hass.services.async_call( @@ -824,7 +824,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 await hass.services.async_call( @@ -833,7 +833,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 @@ -1026,35 +1026,35 @@ async def test_current_cover_position_inverted( ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 0 - assert hass.states.get("cover.test").state == STATE_CLOSED + assert hass.states.get("cover.test").state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "0") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 100 - assert hass.states.get("cover.test").state == STATE_OPEN + assert hass.states.get("cover.test").state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "50") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 50 - assert hass.states.get("cover.test").state == STATE_OPEN + assert hass.states.get("cover.test").state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "non-numeric") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 50 - assert hass.states.get("cover.test").state == STATE_OPEN + assert hass.states.get("cover.test").state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "101") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 0 - assert hass.states.get("cover.test").state == STATE_CLOSED + assert hass.states.get("cover.test").state == CoverState.CLOSED @pytest.mark.parametrize( @@ -2738,32 +2738,32 @@ async def test_state_and_position_topics_state_not_set_via_position_topic( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "CLOSE") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -2800,27 +2800,27 @@ async def test_set_state_via_position_using_stopped_state( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -3136,32 +3136,32 @@ async def test_set_state_via_stopped_state_no_position_topic( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "OPENING") state = hass.states.get("cover.test") - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "CLOSING") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -3549,3 +3549,15 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity name setup.""" + domain = cover.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 1acfe8dd9f5..5cdfb14a5cf 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -2,6 +2,7 @@ import json from typing import Any +from unittest.mock import patch import pytest from pytest_unordered import unordered @@ -26,26 +27,46 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.mark.parametrize( + ("discovery_topic", "data"), + [ + ( + "homeassistant/device_automation/0AFFD2/bla/config", + '{ "automation_type":"trigger",' + ' "device":{"identifiers":["0AFFD2"]},' + ' "payload": "short_press",' + ' "topic": "foobar/triggers/button1",' + ' "type": "button_short_press",' + ' "subtype": "button_1" }', + ), + ( + "homeassistant/device/0AFFD2/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"}, "cmps": ' + '{ "bla": {' + ' "automation_type":"trigger", ' + ' "payload": "short_press",' + ' "topic": "foobar/triggers/button1",' + ' "type": "button_short_press",' + ' "subtype": "button_1",' + ' "platform":"device_automation"}}}', + ), + ], +) async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + data: str, ) -> None: """Test we get the expected triggers from a discovered mqtt device.""" await mqtt_mock_entry() - data1 = ( - '{ "automation_type":"trigger",' - ' "device":{"identifiers":["0AFFD2"]},' - ' "payload": "short_press",' - ' "topic": "foobar/triggers/button1",' - ' "type": "button_short_press",' - ' "subtype": "button_1" }' - ) - async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data1) + async_fire_mqtt_message(hass, discovery_topic, data) await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - expected_triggers = [ + expected_triggers: list[dict[str, Any]] = [ { "platform": "device", "domain": DOMAIN, @@ -165,7 +186,7 @@ async def test_discover_bad_triggers( await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - expected_triggers = [ + expected_triggers: list[dict[str, Any]] = [ { "platform": "device", "domain": DOMAIN, @@ -226,7 +247,7 @@ async def test_update_remove_triggers( device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry.name == "milk" - expected_triggers1 = [ + expected_triggers1: list[dict[str, Any]] = [ { "platform": "device", "domain": DOMAIN, @@ -1263,7 +1284,7 @@ async def test_entity_device_info_update( """Test device registry update.""" await mqtt_mock_entry() - config = { + config: dict[str, Any] = { "automation_type": "trigger", "topic": "test-topic", "type": "foo", @@ -1672,14 +1693,19 @@ async def test_trigger_debug_info( assert debug_info_data["triggers"][0]["discovery_data"]["payload"] == config2 -@pytest.mark.usefixtures("mqtt_mock") +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) async def test_unload_entry( hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test unloading the MQTT entry.""" + await mqtt_mock_entry() data1 = ( '{ "automation_type":"trigger",' ' "device":{"identifiers":["0AFFD2"]},' @@ -1713,6 +1739,7 @@ async def test_unload_entry( ] }, ) + await hass.async_block_till_done() # Fake short press 1 async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 7f58fc75dae..8a674a4e1cd 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -3,14 +3,17 @@ import asyncio import copy import json +import logging from pathlib import Path import re -from unittest.mock import AsyncMock, call, patch +from typing import Any +from unittest.mock import ANY, AsyncMock, call, patch import pytest from homeassistant import config_entries from homeassistant.components import mqtt +from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.mqtt.abbreviations import ( ABBREVIATIONS, DEVICE_ABBREVIATIONS, @@ -33,7 +36,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResult +from homeassistant.data_entry_flow import AbortFlow, FlowResult from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, @@ -45,12 +48,16 @@ from homeassistant.util.signal_type import SignalTypeFormat from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE from .test_common import help_all_subscribe_calls, help_test_unload_config_entry +from .test_tag import DEFAULT_TAG_ID, DEFAULT_TAG_SCAN from tests.common import ( MockConfigEntry, + MockModule, async_capture_events, async_fire_mqtt_message, + async_get_device_automations, mock_config_flow, + mock_integration, mock_platform, ) from tests.typing import ( @@ -59,6 +66,133 @@ from tests.typing import ( WebSocketGenerator, ) +TEST_SINGLE_CONFIGS = [ + ( + "homeassistant/device_automation/0AFFD2/bla1/config", + { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, + "automation_type": "trigger", + "payload": "short_press", + "topic": "foobar/triggers/button1", + "type": "button_short_press", + "subtype": "button_1", + }, + ), + ( + "homeassistant/sensor/0AFFD2/bla2/config", + { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, + "state_topic": "foobar/sensors/bla2/state", + "unique_id": "bla002", + }, + ), + ( + "homeassistant/tag/0AFFD2/bla3/config", + { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, + "topic": "foobar/tags/bla3/see", + }, + ), +] +TEST_DEVICE_CONFIG = { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.50.0", "url": "https://www.foo2mqtt.io"}, + "cmps": { + "bla1": { + "platform": "device_automation", + "automation_type": "trigger", + "payload": "short_press", + "topic": "foobar/triggers/button1", + "type": "button_short_press", + "subtype": "button_1", + }, + "bla2": { + "platform": "sensor", + "state_topic": "foobar/sensors/bla2/state", + "unique_id": "bla002", + "name": "mqtt_sensor", + }, + "bla3": { + "platform": "tag", + "topic": "foobar/tags/bla3/see", + }, + }, +} +TEST_DEVICE_DISCOVERY_TOPIC = "homeassistant/device/0AFFD2/config" + + +async def help_check_discovered_items( + hass: HomeAssistant, device_registry: dr.DeviceRegistry, tag_mock: AsyncMock +) -> None: + """Help checking discovered test items are still available.""" + + # Check the device_trigger was discovered + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device_entry.id + ) + assert len(triggers) == 1 + # Check the sensor was discovered + state = hass.states.get("sensor.test_device_mqtt_sensor") + assert state is not None + + # Check the tag works + async_fire_mqtt_message(hass, "foobar/tags/bla3/see", DEFAULT_TAG_SCAN) + await hass.async_block_till_done() + tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id) + tag_mock.reset_mock() + + +@pytest.fixture +def mqtt_data_flow_calls() -> list[MqttServiceInfo]: + """Return list to capture MQTT data data flow calls.""" + return [] + + +@pytest.fixture +async def mock_mqtt_flow( + hass: HomeAssistant, mqtt_data_flow_calls: list[MqttServiceInfo] +) -> config_entries.ConfigFlow: + """Test fixure for mqtt integration flow. + + The topic is used as a unique ID. + The component test domain used is: `comp`. + + Creates an entry if does not exist. + Updates an entry if it exists, and there is an updated payload. + """ + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: + """Test mqtt step.""" + await asyncio.sleep(0) + mqtt_data_flow_calls.append(discovery_info) + # Abort a flow if there is an update for the existing entry + if entry := self.hass.config_entries.async_entry_for_domain_unique_id( + "comp", discovery_info.topic + ): + hass.config_entries.async_update_entry( + entry, + data={ + "name": discovery_info.topic, + "payload": discovery_info.payload, + }, + ) + raise AbortFlow("already_configured") + await self.async_set_unique_id(discovery_info.topic) + return self.async_create_entry( + title="Test", + data={"name": discovery_info.topic, "payload": discovery_info.payload}, + ) + + return TestFlow + @pytest.mark.parametrize( "mqtt_config_entry_data", @@ -85,6 +219,8 @@ async def test_subscribing_config_topic( [ ("homeassistant/binary_sensor/bla/not_config", False), ("homeassistant/binary_sensor/rörkrökare/config", True), + ("homeassistant/device/bla/not_config", False), + ("homeassistant/device/rörkrökare/config", True), ], ) async def test_invalid_topic( @@ -113,10 +249,15 @@ async def test_invalid_topic( caplog.clear() +@pytest.mark.parametrize( + "discovery_topic", + ["homeassistant/binary_sensor/bla/config", "homeassistant/device/bla/config"], +) async def test_invalid_json( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, + discovery_topic: str, ) -> None: """Test sending in invalid JSON.""" await mqtt_mock_entry() @@ -125,9 +266,7 @@ async def test_invalid_json( ) as mock_dispatcher_send: mock_dispatcher_send = AsyncMock(return_value=None) - async_fire_mqtt_message( - hass, "homeassistant/binary_sensor/bla/config", "not json" - ) + async_fire_mqtt_message(hass, discovery_topic, "not json") await hass.async_block_till_done() assert "Unable to parse JSON" in caplog.text assert not mock_dispatcher_send.called @@ -176,6 +315,56 @@ async def test_invalid_config( assert "Error 'expected int for dictionary value @ data['qos']'" in caplog.text +async def test_invalid_device_discovery_config( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sending in JSON that violates the discovery schema if device or platform key is missing.""" + await mqtt_mock_entry() + async_fire_mqtt_message( + hass, + "homeassistant/device/bla/config", + '{ "o": {"name": "foobar"}, "cmps": ' + '{ "acp1": {"name": "abc", "state_topic": "home/alarm", ' + '"unique_id": "very_unique",' + '"command_topic": "home/alarm/set", ' + '"platform":"alarm_control_panel"}}}', + ) + await hass.async_block_till_done() + assert ( + "Invalid MQTT device discovery payload for bla, " + "required key not provided @ data['device']" in caplog.text + ) + + caplog.clear() + async_fire_mqtt_message( + hass, + "homeassistant/device/bla/config", + '{ "o": {"name": "foobar"}, "dev": {"identifiers": ["ABDE03"]}, ' + '"cmps": { "acp1": {"name": "abc", "state_topic": "home/alarm", ' + '"command_topic": "home/alarm/set" }}}', + ) + await hass.async_block_till_done() + assert ( + "Invalid MQTT device discovery payload for bla, " + "required key not provided @ data['components']['acp1']['platform']" + in caplog.text + ) + + caplog.clear() + async_fire_mqtt_message( + hass, + "homeassistant/device/bla/config", + '{ "o": {"name": "foobar"}, "dev": {"identifiers": ["ABDE03"]}, ' '"cmps": ""}', + ) + await hass.async_block_till_done() + assert ( + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['components']" in caplog.text + ) + + async def test_only_valid_components( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -218,27 +407,70 @@ async def test_correct_config_discovery( assert ("binary_sensor", "bla") in hass.data["mqtt"].discovery_already_discovered +@pytest.mark.parametrize( + ("discovery_topic", "payloads", "discovery_id"), + [ + ( + "homeassistant/binary_sensor/bla/config", + ( + '{"name":"Beer","state_topic": "test-topic",' + '"unique_id": "very_unique1",' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + '{"name":"Milk","state_topic": "test-topic",' + '"unique_id": "very_unique1",' + '"o":{"name":"bla2mqtt","sw":"1.1",' + '"url":"https://bla2mqtt.example.com/support"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + ), + "bla", + ), + ( + "homeassistant/device/bla/config", + ( + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id": "very_unique1",' + '"name":"Beer","state_topic": "test-topic"}},' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id": "very_unique1",' + '"name":"Milk","state_topic": "test-topic"}},' + '"o":{"name":"bla2mqtt","sw":"1.1",' + '"url":"https://bla2mqtt.example.com/support"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + ), + "bla bin_sens1", + ), + ], +) async def test_discovery_integration_info( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, + discovery_topic: str, + payloads: tuple[str, str], + discovery_id: str, ) -> None: - """Test logging discovery of new and updated items.""" + """Test discovery of integration info.""" await mqtt_mock_entry() async_fire_mqtt_message( hass, - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "state_topic": "test-topic", "o": {"name": "bla2mqtt", "sw": "1.0" } }', + discovery_topic, + payloads[0], ) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer") + state = hass.states.get("binary_sensor.bla_beer") assert state is not None - assert state.name == "Beer" + assert state.name == "bla Beer" assert ( - "Found new component: binary_sensor bla from external application bla2mqtt, version: 1.0" + "Processing device discovery for 'bla' from external " + "application bla2mqtt, version: 1.0" + in caplog.text + or f"Found new component: binary_sensor {discovery_id} from external application bla2mqtt, version: 1.0" in caplog.text ) caplog.clear() @@ -246,47 +478,635 @@ async def test_discovery_integration_info( # Send an update and add support url async_fire_mqtt_message( hass, - "homeassistant/binary_sensor/bla/config", - '{ "name": "Milk", "state_topic": "test-topic", "o": {"name": "bla2mqtt", "sw": "1.1", "url": "https://bla2mqtt.example.com/support" } }', + discovery_topic, + payloads[1], ) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer") + state = hass.states.get("binary_sensor.bla_beer") assert state is not None - assert state.name == "Milk" + assert state.name == "bla Milk" assert ( - "Component has already been discovered: binary_sensor bla, sending update from external application bla2mqtt, version: 1.1, support URL: https://bla2mqtt.example.com/support" + f"Component has already been discovered: binary_sensor {discovery_id}" in caplog.text ) @pytest.mark.parametrize( - "config_message", + ("single_configs", "device_discovery_topic", "device_config"), + [(TEST_SINGLE_CONFIGS, TEST_DEVICE_DISCOVERY_TOPIC, TEST_DEVICE_CONFIG)], +) +async def test_discovery_migration_to_device_base( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + caplog: pytest.LogCaptureFixture, + single_configs: list[tuple[str, dict[str, Any]]], + device_discovery_topic: str, + device_config: dict[str, Any], +) -> None: + """Test the migration of single discovery to device discovery.""" + await mqtt_mock_entry() + + # Discovery single config schema + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Try to migrate to device based discovery without migrate_discovery flag + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + assert ( + "Received a conflicting MQTT discovery message for device_automation " + "'0AFFD2 bla1' which was previously discovered on topic homeassistant/" + "device_automation/0AFFD2/bla1/config from external application Foo2Mqtt, " + "version: 1.40.2; the conflicting discovery message was received on topic " + "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " + "version: 1.50.0; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for entity sensor." + "test_device_mqtt_sensor; the entity was previously discovered on topic " + "homeassistant/sensor/0AFFD2/bla2/config from external application Foo2Mqtt, " + "version: 1.40.2; the conflicting discovery message was received on topic " + "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " + "version: 1.50.0; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for tag '0AFFD2 bla3' which " + "was previously discovered on topic homeassistant/tag/0AFFD2/bla3/config " + "from external application Foo2Mqtt, version: 1.40.2; the conflicting " + "discovery message was received on topic homeassistant/device/0AFFD2/config " + "from external application Foo2Mqtt, version: 1.50.0; for support visit " + "https://www.foo2mqtt.io" in caplog.text + ) + + # Check we still have our mqtt items + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Test Enable discovery migration + # Discovery single config schema + caplog.clear() + for discovery_topic, _ in single_configs: + # migr_discvry is abbreviation for migrate_discovery + payload = json.dumps({"migr_discvry": True}) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Assert we still have our device entry + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + # Check our trigger was unloaden + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device_entry.id + ) + assert len(triggers) == 0 + # Check the sensor was unloaded + state = hass.states.get("sensor.test_device_mqtt_sensor") + assert state is None + # Check the entity registry entry is retained + assert entity_registry.async_is_registered("sensor.test_device_mqtt_sensor") + + assert ( + "Migration to MQTT device discovery schema started for device_automation " + "'0AFFD2 bla1' from external application Foo2Mqtt, version: 1.40.2 on topic " + "homeassistant/device_automation/0AFFD2/bla1/config. To complete migration, " + "publish a device discovery message with device_automation '0AFFD2 bla1'. " + "After completed migration, publish an empty (retained) payload to " + "homeassistant/device_automation/0AFFD2/bla1/config" in caplog.text + ) + assert ( + "Migration to MQTT device discovery schema started for entity sensor." + "test_device_mqtt_sensor from external application Foo2Mqtt, version: 1.40.2 " + "on topic homeassistant/sensor/0AFFD2/bla2/config. To complete migration, " + "publish a device discovery message with sensor entity '0AFFD2 bla2'. After " + "completed migration, publish an empty (retained) payload to " + "homeassistant/sensor/0AFFD2/bla2/config" in caplog.text + ) + + # Migrate to device based discovery + caplog.clear() + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + + caplog.clear() + for _ in range(2): + # Test publishing an empty payload twice to the migrated discovery topics + # does not remove the migrated items + for discovery_topic, _ in single_configs: + async_fire_mqtt_message( + hass, + discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we cannot accidentally migrate back and remove the items + caplog.clear() + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert ( + "Received a conflicting MQTT discovery message for device_automation " + "'0AFFD2 bla1' which was previously discovered on topic homeassistant/device" + "/0AFFD2/config from external application Foo2Mqtt, version: 1.50.0; the " + "conflicting discovery message was received on topic homeassistant/" + "device_automation/0AFFD2/bla1/config from external application Foo2Mqtt, " + "version: 1.40.2; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for entity sensor." + "test_device_mqtt_sensor; the entity was previously discovered on topic " + "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " + "version: 1.50.0; the conflicting discovery message was received on topic " + "homeassistant/sensor/0AFFD2/bla2/config from external application Foo2Mqtt, " + "version: 1.40.2; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for tag '0AFFD2 bla3' which was " + "previously discovered on topic homeassistant/device/0AFFD2/config from " + "external application Foo2Mqtt, version: 1.50.0; the conflicting discovery " + "message was received on topic homeassistant/tag/0AFFD2/bla3/config from " + "external application Foo2Mqtt, version: 1.40.2; for support visit " + "https://www.foo2mqtt.io" in caplog.text + ) + + caplog.clear() + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we can remove the config using the new discovery topic + async_fire_mqtt_message( + hass, + device_discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + # Check the device was removed as all device components were removed + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is None + await hass.async_block_till_done(wait_background_tasks=True) + + +@pytest.mark.parametrize( + "config", [ - '{ "name": "Beer", "state_topic": "test-topic", "o": "bla2mqtt" }', - '{ "name": "Beer", "state_topic": "test-topic", "o": 2.0 }', - '{ "name": "Beer", "state_topic": "test-topic", "o": null }', - '{ "name": "Beer", "state_topic": "test-topic", "o": {"sw": "bla2mqtt"} }', + {"state_topic": "foobar/sensors/bla2/state", "name": "none_test"}, + { + "state_topic": "foobar/sensors/bla2/state", + "name": "none_test", + "unique_id": "very_unique", + }, + { + "state_topic": "foobar/sensors/bla2/state", + "device": {"identifiers": ["0AFFD2"], "name": "none_test"}, + }, + ], +) +async def test_discovery_migration_unique_id( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + config: dict[str, Any], +) -> None: + """Test entity has a unique_id and device context when migrating.""" + await mqtt_mock_entry() + + discovery_topic = "homeassistant/sensor/0AFFD2/bla2/config" + + # Discovery with single config schema + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Try discovery migration + payload = json.dumps({"migr_discvry": True}) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Assert the migration attempt fails + assert "Discovery migration is not possible" in caplog.text + + +@pytest.mark.parametrize( + ("single_configs", "device_discovery_topic", "device_config"), + [(TEST_SINGLE_CONFIGS, TEST_DEVICE_DISCOVERY_TOPIC, TEST_DEVICE_CONFIG)], +) +async def test_discovery_rollback_to_single_base( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + caplog: pytest.LogCaptureFixture, + single_configs: list[tuple[str, dict[str, Any]]], + device_discovery_topic: str, + device_config: dict[str, Any], +) -> None: + """Test the rollback of device discovery to a single component discovery.""" + await mqtt_mock_entry() + + # Start device based discovery + # any single component discovery will be migrated + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Migrate to single component discovery + # Test the schema + caplog.clear() + payload = json.dumps({"migrate_discovery": "invalid"}) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + assert "Invalid MQTT device discovery payload for 0AFFD2" in caplog.text + + # Set the correct migrate_discovery flag in the device payload + # to allow rollback + payload = json.dumps({"migrate_discovery": True}) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + + # Check the log messages + assert ( + "Rollback to MQTT platform discovery schema started for entity sensor." + "test_device_mqtt_sensor from external application Foo2Mqtt, version: 1.50.0 " + "on topic homeassistant/device/0AFFD2/config. To complete rollback, publish a " + "platform discovery message with sensor entity '0AFFD2 bla2'. After completed " + "rollback, publish an empty (retained) payload to " + "homeassistant/device/0AFFD2/config" in caplog.text + ) + assert ( + "Rollback to MQTT platform discovery schema started for device_automation " + "'0AFFD2 bla1' from external application Foo2Mqtt, version: 1.50.0 on topic " + "homeassistant/device/0AFFD2/config. To complete rollback, publish a platform " + "discovery message with device_automation '0AFFD2 bla1'. After completed " + "rollback, publish an empty (retained) payload to " + "homeassistant/device/0AFFD2/config" in caplog.text + ) + + # Assert we still have our device entry + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + # Check our trigger was unloaded + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device_entry.id + ) + assert len(triggers) == 0 + # Check the sensor was unloaded + state = hass.states.get("sensor.test_device_mqtt_sensor") + assert state is None + # Check the entity registry entry is retained + assert entity_registry.async_is_registered("sensor.test_device_mqtt_sensor") + + # Publish the new component based payloads + # to switch back to component based discovery + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items + # await help_check_discovered_items(hass, device_registry, tag_mock) + + for _ in range(2): + # Test publishing an empty payload twice to the migrated discovery topic + # does not remove the migrated items + async_fire_mqtt_message( + hass, + device_discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we cannot accidentally migrate back and remove the items + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we can remove the the config using the new discovery topics + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + # Check the device was removed as all device components were removed + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is None + + +@pytest.mark.parametrize( + ("discovery_topic", "payload"), + [ + ( + "homeassistant/binary_sensor/bla/config", + '{"state_topic": "test-topic",' + '"name":"bla","unique_id":"very_unique1",' + '"avty": {"topic": "avty-topic"},' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name":"Beer"}}', + ), + ( + "homeassistant/device/bla/config", + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"name":"bla","unique_id":"very_unique1",' + '"state_topic": "test-topic"}},' + '"avty": {"topic": "avty-topic"},' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name":"Beer"}}', + ), + ], + ids=["component", "device"], +) +async def test_discovery_availability( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + payload: str, +) -> None: + """Test device discovery with shared availability mapping.""" + await mqtt_mock_entry() + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.beer_bla") + assert state is not None + assert state.name == "Beer bla" + assert state.state == STATE_UNAVAILABLE + + async_fire_mqtt_message( + hass, + "avty-topic", + "online", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.beer_bla") + assert state is not None + assert state.state == STATE_UNKNOWN + + async_fire_mqtt_message( + hass, + "test-topic", + "ON", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.beer_bla") + assert state is not None + assert state.state == STATE_ON + + +@pytest.mark.parametrize( + ("discovery_topic", "payload"), + [ + ( + "homeassistant/device/bla/config", + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id":"very_unique",' + '"avty": {"topic": "avty-topic-component"},' + '"name":"Beer","state_topic": "test-topic"}},' + '"avty": {"topic": "avty-topic-device"},' + '"o":{"name":"bla2mqtt","sw":"1.0"},"dev":{"identifiers":["bla"]}}', + ), + ( + "homeassistant/device/bla/config", + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id":"very_unique",' + '"availability_topic": "avty-topic-component",' + '"name":"Beer","state_topic": "test-topic"}},' + '"availability_topic": "avty-topic-device",' + '"o":{"name":"bla2mqtt","sw":"1.0"},"dev":{"identifiers":["bla"]}}', + ), + ], + ids=["test1", "test2"], +) +async def test_discovery_component_availability_overridden( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + payload: str, +) -> None: + """Test device discovery with overridden shared availability mapping.""" + await mqtt_mock_entry() + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.none_beer") + assert state is not None + assert state.name == "Beer" + assert state.state == STATE_UNAVAILABLE + + async_fire_mqtt_message( + hass, + "avty-topic-device", + "online", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.none_beer") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + async_fire_mqtt_message( + hass, + "avty-topic-component", + "online", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.none_beer") + assert state is not None + assert state.state == STATE_UNKNOWN + + async_fire_mqtt_message( + hass, + "test-topic", + "ON", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.none_beer") + assert state is not None + assert state.state == STATE_ON + + +@pytest.mark.parametrize( + ("discovery_topic", "config_message", "error_message"), + [ + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": "bla2mqtt" }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": 2.0 }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": null }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": {"sw": "bla2mqtt"} }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": "bla2mqtt"}', + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['origin']", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": 2.0}', + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['origin']", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": null}', + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['origin']", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": {"sw": "bla2mqtt"}}', + "Invalid MQTT device discovery payload for bla, " + "required key not provided @ data['origin']['name']", + ), ], ) async def test_discovery_with_invalid_integration_info( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, + discovery_topic: str, config_message: str, + error_message: str, ) -> None: """Test sending in correct JSON.""" await mqtt_mock_entry() - async_fire_mqtt_message( - hass, "homeassistant/binary_sensor/bla/config", config_message - ) + async_fire_mqtt_message(hass, discovery_topic, config_message) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer") + state = hass.states.get("binary_sensor.none_beer") assert state is None - assert "Unable to parse origin information from discovery message" in caplog.text + assert error_message in caplog.text async def test_discover_fan( @@ -805,43 +1625,86 @@ async def test_duplicate_removal( assert "Component has already been discovered: binary_sensor bla" not in caplog.text +@pytest.mark.parametrize( + ("discovery_payloads", "entity_ids"), + [ + ( + { + "homeassistant/sensor/sens1/config": "{" + '"device":{"identifiers":["0AFFD2"]},' + '"state_topic": "foobar/sensor1",' + '"unique_id": "unique1",' + '"name": "sensor1"' + "}", + "homeassistant/sensor/sens2/config": "{" + '"device":{"identifiers":["0AFFD2"]},' + '"state_topic": "foobar/sensor2",' + '"unique_id": "unique2",' + '"name": "sensor2"' + "}", + }, + ["sensor.none_sensor1", "sensor.none_sensor2"], + ), + ( + { + "homeassistant/device/bla/config": "{" + '"device":{"identifiers":["0AFFD2"]},' + '"o": {"name": "foobar"},' + '"cmps": {"sens1": {' + '"platform": "sensor",' + '"name": "sensor1",' + '"state_topic": "foobar/sensor1",' + '"unique_id": "unique1"' + '},"sens2": {' + '"platform": "sensor",' + '"name": "sensor2",' + '"state_topic": "foobar/sensor2",' + '"unique_id": "unique2"' + "}}}" + }, + ["sensor.none_sensor1", "sensor.none_sensor2"], + ), + ], +) async def test_cleanup_device_manual( hass: HomeAssistant, + mock_debouncer: asyncio.Event, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_payloads: dict[str, str], + entity_ids: list[str], ) -> None: """Test discovered device is cleaned up when entry removed from device.""" mqtt_mock = await mqtt_mock_entry() assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - data = ( - '{ "device":{"identifiers":["0AFFD2"]},' - ' "state_topic": "foobar/sensor",' - ' "unique_id": "unique" }' - ) - - async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) - await hass.async_block_till_done() + mock_debouncer.clear() + for discovery_topic, discovery_payload in discovery_payloads.items(): + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + await mock_debouncer.wait() # Verify device and registry entries are created device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is not None - entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert entity_entry is not None - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + + state = hass.states.get(entity_id) + assert state is not None # Remove MQTT from the device mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + mock_debouncer.clear() response = await ws_client.remove_device( device_entry.id, mqtt_config_entry.entry_id ) assert response["success"] - await hass.async_block_till_done() + await mock_debouncer.wait() await hass.async_block_till_done() # Verify device and registry entries are cleared @@ -851,60 +1714,224 @@ async def test_cleanup_device_manual( assert entity_entry is None # Verify state is removed - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is None - await hass.async_block_till_done() + for entity_id in entity_ids: + state = hass.states.get(entity_id) + assert state is None - # Verify retained discovery topic has been cleared - mqtt_mock.async_publish.assert_called_once_with( - "homeassistant/sensor/bla/config", None, 0, True + # Verify retained discovery topics have been cleared + mqtt_mock.async_publish.assert_has_calls( + [call(discovery_topic, None, 0, True) for discovery_topic in discovery_payloads] ) + await hass.async_block_till_done(wait_background_tasks=True) + +@pytest.mark.parametrize( + ("discovery_topic", "discovery_payload", "entity_ids"), + [ + ( + "homeassistant/sensor/bla/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "state_topic": "foobar/sensor",' + ' "unique_id": "unique" }', + ["sensor.none_mqtt_sensor"], + ), + ( + "homeassistant/device/bla/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "platform": "sensor",' + ' "name": "sensor1",' + ' "state_topic": "foobar/sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "platform": "sensor",' + ' "name": "sensor2",' + ' "state_topic": "foobar/sensor2",' + ' "unique_id": "unique2"' + "}}}", + ["sensor.none_sensor1", "sensor.none_sensor2"], + ), + ], +) async def test_cleanup_device_mqtt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + discovery_payload: str, + entity_ids: list[str], ) -> None: - """Test discvered device is cleaned up when removed through MQTT.""" + """Test discovered device is cleaned up when removed through MQTT.""" mqtt_mock = await mqtt_mock_entry() - data = ( - '{ "device":{"identifiers":["0AFFD2"]},' - ' "state_topic": "foobar/sensor",' - ' "unique_id": "unique" }' - ) - async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) + # set up an existing sensor first + data = ( + '{ "device":{"identifiers":["0AFFD3"]},' + ' "name": "sensor_base",' + ' "state_topic": "foobar/sensor",' + ' "unique_id": "unique_base" }' + ) + base_discovery_topic = "homeassistant/sensor/bla_base/config" + base_entity_id = "sensor.none_sensor_base" + async_fire_mqtt_message(hass, base_discovery_topic, data) + await hass.async_block_till_done() + + # Verify the base entity has been created and it has a state + base_device_entry = device_registry.async_get_device( + identifiers={("mqtt", "0AFFD3")} + ) + assert base_device_entry is not None + entity_entry = entity_registry.async_get(base_entity_id) + assert entity_entry is not None + state = hass.states.get(base_entity_id) + assert state is not None + + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) await hass.async_block_till_done() # Verify device and registry entries are created device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is not None - entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert entity_entry is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is not None + state = hass.states.get(entity_id) + assert state is not None - async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", "") + async_fire_mqtt_message(hass, discovery_topic, "") await hass.async_block_till_done() await hass.async_block_till_done() # Verify device and registry entries are cleared device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is None - entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert entity_entry is None - # Verify state is removed - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is None - await hass.async_block_till_done() + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is None + + # Verify state is removed + state = hass.states.get(entity_id) + assert state is None + await hass.async_block_till_done() # Verify retained discovery topics have not been cleared again mqtt_mock.async_publish.assert_not_called() + # Verify the base entity still exists and it has a state + base_device_entry = device_registry.async_get_device( + identifiers={("mqtt", "0AFFD3")} + ) + assert base_device_entry is not None + entity_entry = entity_registry.async_get(base_entity_id) + assert entity_entry is not None + state = hass.states.get(base_entity_id) + assert state is not None + + +async def test_cleanup_device_mqtt_device_discovery( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test discovered device is cleaned up partly when removed through MQTT.""" + await mqtt_mock_entry() + + discovery_topic = "homeassistant/device/bla/config" + discovery_payload = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "p": "sensor",' + ' "name": "sensor1",' + ' "state_topic": "foobar/sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "p": "sensor",' + ' "name": "sensor2",' + ' "state_topic": "foobar/sensor2",' + ' "unique_id": "unique2"' + "}}}" + ) + entity_ids = ["sensor.none_sensor1", "sensor.none_sensor2"] + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + await hass.async_block_till_done() + + # Verify device and registry entries are created + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + + state = hass.states.get(entity_id) + assert state is not None + + # Do update and remove sensor 2 from device + discovery_payload_update1 = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "p": "sensor",' + ' "name": "sensor1",' + ' "state_topic": "foobar/sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "p": "sensor"' + "}}}" + ) + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update1) + await hass.async_block_till_done() + state = hass.states.get(entity_ids[0]) + assert state is not None + state = hass.states.get(entity_ids[1]) + assert state is None + + # Repeating the update + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update1) + await hass.async_block_till_done() + state = hass.states.get(entity_ids[0]) + assert state is not None + state = hass.states.get(entity_ids[1]) + assert state is None + + # Removing last sensor + discovery_payload_update2 = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "p": "sensor"' + ' },"sens2": {' + ' "p": "sensor"' + "}}}" + ) + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update2) + await hass.async_block_till_done() + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + # Verify the device entry was removed with the last sensor + assert device_entry is None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is None + + state = hass.states.get(entity_id) + assert state is None + + # Repeating the update + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update2) + await hass.async_block_till_done() + + # Clear the empty discovery payload and verify there was nothing to cleanup + async_fire_mqtt_message(hass, discovery_topic, "") + await hass.async_block_till_done() + assert "No device components to cleanup" in caplog.text + async def test_cleanup_device_multiple_config_entries( hass: HomeAssistant, @@ -1444,17 +2471,22 @@ async def test_complex_discovery_topic_prefix( @patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) -async def test_mqtt_integration_discovery_subscribe_unsubscribe( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient +@pytest.mark.parametrize( + "reason", ["single_instance_allowed", "already_configured", "some_abort_error"] +) +async def test_mqtt_integration_discovery_flow_fitering_on_redundant_payload( + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient, reason: str ) -> None: - """Check MQTT integration discovery subscribe and unsubscribe.""" + """Check MQTT integration discovery starts a flow once.""" + flow_calls: list[MqttServiceInfo] = [] class TestFlow(config_entries.ConfigFlow): """Test flow.""" async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: """Test mqtt step.""" - return self.async_abort(reason="already_configured") + flow_calls.append(discovery_info) + return self.async_abort(reason=reason) mock_platform(hass, "comp.config_flow", None) @@ -1465,13 +2497,6 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( """Handle birth message.""" birth.set() - wait_unsub = asyncio.Event() - - @callback - def _mock_unsubscribe(topics: list[str]) -> tuple[int, int]: - wait_unsub.set() - return (0, 0) - entry = MockConfigEntry(domain=mqtt.DOMAIN, data=ENTRY_DEFAULT_BIRTH_MESSAGE) entry.add_to_hass(hass) with ( @@ -1480,7 +2505,6 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( return_value={"comp": ["comp/discovery/#"]}, ), mock_config_flow("comp", TestFlow), - patch.object(mqtt_client_mock, "unsubscribe", side_effect=_mock_unsubscribe), ): assert await hass.config_entries.async_setup(entry.entry_id) await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) @@ -1490,31 +2514,45 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called mqtt_client_mock.reset_mock() + assert len(flow_calls) == 0 await hass.async_block_till_done(wait_background_tasks=True) - async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") - await wait_unsub.wait() - mqtt_client_mock.unsubscribe.assert_called_once_with(["comp/discovery/#"]) + async_fire_mqtt_message(hass, "comp/discovery/bla/config", "initial message") await hass.async_block_till_done(wait_background_tasks=True) + assert len(flow_calls) == 1 + + # A redundant message gets does not start a new flow + await hass.async_block_till_done(wait_background_tasks=True) + async_fire_mqtt_message(hass, "comp/discovery/bla/config", "initial message") + await hass.async_block_till_done(wait_background_tasks=True) + assert len(flow_calls) == 1 + + # An updated message gets starts a new flow + await hass.async_block_till_done(wait_background_tasks=True) + async_fire_mqtt_message(hass, "comp/discovery/bla/config", "update message") + await hass.async_block_till_done(wait_background_tasks=True) + assert len(flow_calls) == 2 @patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) -async def test_mqtt_discovery_unsubscribe_once( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient +async def test_mqtt_discovery_flow_starts_once( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + caplog: pytest.LogCaptureFixture, + mock_mqtt_flow: config_entries.ConfigFlow, + mqtt_data_flow_calls: list[MqttServiceInfo], ) -> None: - """Check MQTT integration discovery unsubscribe once.""" - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: - """Test mqtt step.""" - await asyncio.sleep(0) - return self.async_abort(reason="already_configured") + """Check MQTT integration discovery starts a flow once. + A flow should be started once after discovery, + and after an entry was removed, to trigger re-discovery. + """ + mock_integration( + hass, MockModule(domain="comp", async_setup_entry=AsyncMock(return_value=True)) + ) mock_platform(hass, "comp.config_flow", None) birth = asyncio.Event() @@ -1524,13 +2562,6 @@ async def test_mqtt_discovery_unsubscribe_once( """Handle birth message.""" birth.set() - wait_unsub = asyncio.Event() - - @callback - def _mock_unsubscribe(topics: list[str]) -> tuple[int, int]: - wait_unsub.set() - return (0, 0) - entry = MockConfigEntry(domain=mqtt.DOMAIN, data=ENTRY_DEFAULT_BIRTH_MESSAGE) entry.add_to_hass(hass) @@ -1539,8 +2570,7 @@ async def test_mqtt_discovery_unsubscribe_once( "homeassistant.components.mqtt.discovery.async_get_mqtt", return_value={"comp": ["comp/discovery/#"]}, ), - mock_config_flow("comp", TestFlow), - patch.object(mqtt_client_mock, "unsubscribe", side_effect=_mock_unsubscribe), + mock_config_flow("comp", mock_mqtt_flow), ): assert await hass.config_entries.async_setup(entry.entry_id) await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) @@ -1548,17 +2578,86 @@ async def test_mqtt_discovery_unsubscribe_once( await birth.wait() assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) - assert not mqtt_client_mock.unsubscribe.called + # Test the initial flow + async_fire_mqtt_message(hass, "comp/discovery/bla/config1", "initial message") await hass.async_block_till_done(wait_background_tasks=True) - async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") - async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") - await wait_unsub.wait() - await asyncio.sleep(0) + assert len(mqtt_data_flow_calls) == 1 + assert mqtt_data_flow_calls[0].topic == "comp/discovery/bla/config1" + assert mqtt_data_flow_calls[0].payload == "initial message" + + # Test we can ignore updates if they are the same + with caplog.at_level(logging.DEBUG): + async_fire_mqtt_message( + hass, "comp/discovery/bla/config1", "initial message" + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Ignoring already processed discovery message" in caplog.text + assert len(mqtt_data_flow_calls) == 1 + + # Test we can apply updates + async_fire_mqtt_message(hass, "comp/discovery/bla/config1", "update message") await hass.async_block_till_done(wait_background_tasks=True) - mqtt_client_mock.unsubscribe.assert_called_once_with(["comp/discovery/#"]) + + assert len(mqtt_data_flow_calls) == 2 + assert mqtt_data_flow_calls[1].topic == "comp/discovery/bla/config1" + assert mqtt_data_flow_calls[1].payload == "update message" + + # Test we set up multiple entries + async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "initial message") await hass.async_block_till_done(wait_background_tasks=True) + assert len(mqtt_data_flow_calls) == 3 + assert mqtt_data_flow_calls[2].topic == "comp/discovery/bla/config2" + assert mqtt_data_flow_calls[2].payload == "initial message" + + # Test we update multiple entries + async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "update message") + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mqtt_data_flow_calls) == 4 + assert mqtt_data_flow_calls[3].topic == "comp/discovery/bla/config2" + assert mqtt_data_flow_calls[3].payload == "update message" + + # Test an empty message triggers a flow to allow cleanup (if needed) + async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "") + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mqtt_data_flow_calls) == 5 + assert mqtt_data_flow_calls[4].topic == "comp/discovery/bla/config2" + assert mqtt_data_flow_calls[4].payload == "" + + # Cleanup the the second entry + assert ( + entry := hass.config_entries.async_entry_for_domain_unique_id( + "comp", "comp/discovery/bla/config2" + ) + ) is not None + await hass.config_entries.async_remove(entry.entry_id) + assert len(hass.config_entries.async_entries(domain="comp")) == 1 + + # Remove remaining entry1 and assert this triggers an + # automatic re-discovery flow with latest config + assert ( + entry := hass.config_entries.async_entry_for_domain_unique_id( + "comp", "comp/discovery/bla/config1" + ) + ) is not None + assert entry.unique_id == "comp/discovery/bla/config1" + await hass.config_entries.async_remove(entry.entry_id) + assert len(hass.config_entries.async_entries(domain="comp")) == 0 + + # Wait for re-discovery flow to complete + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mqtt_data_flow_calls) == 6 + assert mqtt_data_flow_calls[5].topic == "comp/discovery/bla/config1" + assert mqtt_data_flow_calls[5].payload == "update message" + + # Re-discovery triggered the config flow + assert len(hass.config_entries.async_entries(domain="comp")) == 1 + + assert not mqtt_client_mock.unsubscribe.called + async def test_clear_config_topic_disabled_entity( hass: HomeAssistant, @@ -1814,3 +2913,213 @@ async def test_discovery_dispatcher_signal_type_messages( assert len(calls) == 1 assert calls[0] == test_data unsub() + + +@pytest.mark.parametrize( + ("discovery_topic", "discovery_payload", "entity_ids"), + [ + ( + "homeassistant/device/bla/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "state_topic": "foobar/sensor-shared",' + ' "cmps": {"sens1": {' + ' "platform": "sensor",' + ' "name": "sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "platform": "sensor",' + ' "name": "sensor2",' + ' "unique_id": "unique2"' + ' },"sens3": {' + ' "platform": "sensor",' + ' "name": "sensor3",' + ' "state_topic": "foobar/sensor3",' + ' "unique_id": "unique3"' + "}}}", + ["sensor.none_sensor1", "sensor.none_sensor2", "sensor.none_sensor3"], + ), + ], +) +async def test_shared_state_topic( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + discovery_payload: str, + entity_ids: list[str], +) -> None: + """Test a shared state_topic can be used.""" + await mqtt_mock_entry() + + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + await hass.async_block_till_done() + + # Verify device and registry entries are created + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_UNKNOWN + + async_fire_mqtt_message(hass, "foobar/sensor-shared", "New state") + + entity_id = entity_ids[0] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "New state" + entity_id = entity_ids[1] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "New state" + entity_id = entity_ids[2] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_UNKNOWN + + async_fire_mqtt_message(hass, "foobar/sensor3", "New state3") + entity_id = entity_ids[2] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "New state3" + + +@pytest.mark.parametrize("single_configs", [copy.deepcopy(TEST_SINGLE_CONFIGS)]) +async def test_discovery_with_late_via_device_discovery( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + single_configs: list[tuple[str, dict[str, Any]]], +) -> None: + """Test a via device is available and the discovery of the via device is late.""" + await mqtt_mock_entry() + + await hass.async_block_till_done() + await hass.async_block_till_done() + + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is None + # Discovery single config schema + for discovery_topic, config in single_configs: + config["device"]["via_device"] = "id_via_very_unique" + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name is None + + await hass.async_block_till_done() + + # Now discover the via device (a switch) + via_device_config = { + "name": None, + "command_topic": "test-switch-topic", + "unique_id": "very_unique_switch", + "device": {"identifiers": ["id_via_very_unique"], "name": "My Switch"}, + } + payload = json.dumps(via_device_config) + via_device_discovery_topic = "homeassistant/switch/very_unique/config" + async_fire_mqtt_message( + hass, + via_device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name == "My Switch" + + await help_check_discovered_items(hass, device_registry, tag_mock) + + +@pytest.mark.parametrize("single_configs", [copy.deepcopy(TEST_SINGLE_CONFIGS)]) +async def test_discovery_with_late_via_device_update( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + single_configs: list[tuple[str, dict[str, Any]]], +) -> None: + """Test a via device is available and the discovery of the via device is is set via an update.""" + await mqtt_mock_entry() + + await hass.async_block_till_done() + await hass.async_block_till_done() + + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is None + # Discovery single config schema without via device + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + assert via_device_entry is None + + # Resend the discovery update to set the via device + for discovery_topic, config in single_configs: + config["device"]["via_device"] = "id_via_very_unique" + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name is None + + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Now discover the via device (a switch) + via_device_config = { + "name": None, + "command_topic": "test-switch-topic", + "unique_id": "very_unique_switch", + "device": {"identifiers": ["id_via_very_unique"], "name": "My Switch"}, + } + payload = json.dumps(via_device_config) + via_device_discovery_topic = "homeassistant/switch/very_unique/config" + async_fire_mqtt_message( + hass, + via_device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name == "My Switch" + + await help_check_discovered_items(hass, device_registry, tag_mock) diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 3d4847a406a..41049ed0887 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -37,6 +37,7 @@ from .test_common import ( help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_disabled_by_default, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -90,7 +91,7 @@ async def test_multiple_events_are_all_updating_the_state( """Test all events are respected and trigger a state write.""" await mqtt_mock_entry() with patch( - "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: async_fire_mqtt_message( hass, "test-topic", '{"event_type": "press", "duration": "short" }' @@ -109,7 +110,7 @@ async def test_handling_retained_event_payloads( """Test if event messages with a retained flag are ignored.""" await mqtt_mock_entry() with patch( - "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: async_fire_mqtt_message( hass, @@ -705,6 +706,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = event.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ @@ -752,7 +765,7 @@ async def test_skipped_async_ha_write_state2( payload1 = '{"event_type": "press"}' payload2 = '{"event_type": "unknown"}' with patch( - "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: assert len(mock_async_ha_write_state.mock_calls) == 0 async_fire_mqtt_message(hass, topic, payload1) diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 1d0cc809fd6..6c8afe8c1b4 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -1486,7 +1486,7 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][fan.DOMAIN]) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][fan.DOMAIN]) config[ATTR_PRESET_MODES] = ["eco", "auto"] config[CONF_PRESET_MODE_COMMAND_TOPIC] = "fan/some_preset_mode_command_topic" config[CONF_PERCENTAGE_COMMAND_TOPIC] = "fan/some_percentage_command_topic" @@ -2201,7 +2201,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = fan.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "preset_mode_command_topic": config[mqtt.DOMAIN][domain]["preset_modes"] = ["auto", "eco"] diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 62de371af4b..20ca89181eb 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -12,7 +12,6 @@ from homeassistant.components.humidifier import ( ATTR_CURRENT_HUMIDITY, ATTR_HUMIDITY, ATTR_MODE, - DOMAIN, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, HumidifierAction, @@ -87,7 +86,9 @@ async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) """Turn all or specified humidifier on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} - await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_TURN_ON, data, blocking=True + ) async def async_turn_off( @@ -96,7 +97,9 @@ async def async_turn_off( """Turn all or specified humidier off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} - await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_TURN_OFF, data, blocking=True + ) async def async_set_mode( @@ -109,7 +112,9 @@ async def async_set_mode( if value is not None } - await hass.services.async_call(DOMAIN, SERVICE_SET_MODE, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_SET_MODE, data, blocking=True + ) async def async_set_humidity( @@ -122,7 +127,9 @@ async def async_set_humidity( if value is not None } - await hass.services.async_call(DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True + ) @pytest.mark.parametrize( @@ -855,7 +862,9 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][humidifier.DOMAIN]) + config: dict[str, Any] = copy.deepcopy( + DEFAULT_CONFIG[mqtt.DOMAIN][humidifier.DOMAIN] + ) config["modes"] = ["eco", "auto"] config[CONF_MODE_COMMAND_TOPIC] = "humidifier/some_mode_command_topic" await help_test_encoding_subscribable_topics( @@ -1466,7 +1475,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = humidifier.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "mode_command_topic": config[mqtt.DOMAIN][domain]["modes"] = ["auto", "eco"] diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 5dab5689518..2ab664f5041 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -77,11 +77,6 @@ class _DebugInfo(TypedDict): config: _DebugDeviceInfo -@pytest.fixture(autouse=True) -def mock_storage(hass_storage: dict[str, Any]) -> None: - """Autouse hass_storage for the TestCase tests.""" - - async def test_command_template_value(hass: HomeAssistant) -> None: """Test the rendering of MQTT command template.""" @@ -235,7 +230,7 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: ) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value( - '{"some_var": null }', default=100 + '{"some_var": null }', default="100" ) assert str(exc.value) == ( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' " @@ -260,6 +255,26 @@ async def test_service_call_without_topic_does_not_publish( assert not mqtt_mock.async_publish.called +async def test_service_call_mqtt_entry_does_not_publish( + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient +) -> None: + """Test the service call if topic is missing.""" + assert await async_setup_component(hass, mqtt.DOMAIN, {}) + with pytest.raises( + ServiceValidationError, + match='Cannot publish to topic "test_topic", make sure MQTT is set up correctly', + ): + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test_topic", + mqtt.ATTR_PAYLOAD: "payload", + }, + blocking=True, + ) + + # The use of a topic_template in an mqtt publish action call # has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_mqtt_publish_action_call_with_topic_and_topic_template_does_not_publish( @@ -840,7 +855,7 @@ async def test_receiving_message_with_non_utf8_topic_gets_logged( msg.payload = b"Payload" msg.qos = 2 msg.retain = True - msg.timestamp = time.monotonic() + msg.timestamp = time.monotonic() # type:ignore[assignment] mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client @@ -1202,7 +1217,6 @@ async def test_mqtt_ws_get_device_debug_info( } data_sensor = json.dumps(config_sensor) data_trigger = json.dumps(config_trigger) - config_sensor["platform"] = config_trigger["platform"] = mqtt.DOMAIN async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data_sensor) async_fire_mqtt_message( @@ -1259,7 +1273,6 @@ async def test_mqtt_ws_get_device_debug_info_binary( "unique_id": "unique", } data = json.dumps(config) - config["platform"] = mqtt.DOMAIN async_fire_mqtt_message(hass, "homeassistant/camera/bla/config", data) await hass.async_block_till_done() @@ -1494,7 +1507,7 @@ async def test_debug_info_non_mqtt( """Test we get empty debug_info for a device with non MQTT entities.""" await mqtt_mock_entry() domain = "sensor" - setup_test_component_platform(hass, domain, mock_sensor_entities) + setup_test_component_platform(hass, domain, mock_sensor_entities.values()) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -1829,11 +1842,17 @@ async def test_subscribe_connection_status( async def test_unload_config_entry( hass: HomeAssistant, - setup_with_birth_msg_client_mock: MqttMockPahoClient, + mqtt_client_mock: MqttMockPahoClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test unloading the MQTT entry.""" - mqtt_client_mock = setup_with_birth_msg_client_mock + entry = MockConfigEntry( + domain=mqtt.DOMAIN, + data={mqtt.CONF_BROKER: "test-broker"}, + ) + entry.add_to_hass(hass) + + assert await async_setup_component(hass, mqtt.DOMAIN, {}) assert hass.services.has_service(mqtt.DOMAIN, "dump") assert hass.services.has_service(mqtt.DOMAIN, "publish") @@ -1850,8 +1869,8 @@ async def test_unload_config_entry( mqtt_client_mock.publish.assert_any_call("just_in_time", "published", 0, False) assert new_mqtt_config_entry.state is ConfigEntryState.NOT_LOADED await hass.async_block_till_done(wait_background_tasks=True) - assert not hass.services.has_service(mqtt.DOMAIN, "dump") - assert not hass.services.has_service(mqtt.DOMAIN, "publish") + assert hass.services.has_service(mqtt.DOMAIN, "dump") + assert hass.services.has_service(mqtt.DOMAIN, "publish") assert "No ACK from MQTT server" not in caplog.text @@ -1859,6 +1878,9 @@ async def test_publish_or_subscribe_without_valid_config_entry( hass: HomeAssistant, record_calls: MessageCallbackType ) -> None: """Test internal publish function with bad use cases.""" + assert await async_setup_component(hass, mqtt.DOMAIN, {}) + assert hass.services.has_service(mqtt.DOMAIN, "dump") + assert hass.services.has_service(mqtt.DOMAIN, "publish") with pytest.raises(HomeAssistantError): await mqtt.async_publish( hass, "some-topic", "test-payload", qos=0, retain=False, encoding=None @@ -1899,7 +1921,7 @@ async def test_disabling_and_enabling_entry( config_light = '{"name": "test_new", "command_topic": "test-topic_new"}' with patch( - "homeassistant.components.mqtt.mixins.mqtt_config_entry_enabled", + "homeassistant.components.mqtt.entity.mqtt_config_entry_enabled", return_value=False, ): # Discovery of mqtt tag diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 101a45787ef..0bef4196ef2 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -802,7 +802,9 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN]) + config: dict[str, Any] = copy.deepcopy( + DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN] + ) config["actions"] = ["milk", "beer"] await help_test_encoding_subscribable_topics( hass, diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py deleted file mode 100644 index 9b45b65d2cc..00000000000 --- a/tests/components/mqtt/test_legacy_vacuum.py +++ /dev/null @@ -1,83 +0,0 @@ -"""The tests for the Legacy Mqtt vacuum platform.""" - -# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 -# and was removed with HA Core 2024.2.0 -# cleanup is planned with HA Core 2025.2 - -import json - -import pytest - -from homeassistant.components import mqtt, vacuum -from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import DiscoveryInfoType - -from tests.common import async_fire_mqtt_message -from tests.typing import MqttMockHAClientGenerator - -DEFAULT_CONFIG = {mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}} - - -@pytest.mark.parametrize( - ("hass_config", "removed"), - [ - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "legacy"}}}, True), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}}, False), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, True), - ], -) -async def test_removed_support_yaml( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - removed: bool, -) -> None: - """Test that the removed support validation for the legacy schema works.""" - assert await mqtt_mock_entry() - entity = hass.states.get("vacuum.test") - - if removed: - assert entity is None - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - else: - assert entity is not None - - -@pytest.mark.parametrize( - ("config", "removed"), - [ - ({"name": "test", "schema": "legacy"}, True), - ({"name": "test"}, False), - ({"name": "test", "schema": "state"}, True), - ], -) -async def test_removed_support_discovery( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - config: DiscoveryInfoType, - removed: bool, -) -> None: - """Test that the removed support validation for the legacy schema works.""" - assert await mqtt_mock_entry() - - config_payload = json.dumps(config) - async_fire_mqtt_message(hass, "homeassistant/vacuum/test/config", config_payload) - await hass.async_block_till_done() - - entity = hass.states.get("vacuum.test") - assert entity is not None - - if removed: - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - else: - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" not in caplog.text - ) diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 18815281f63..dbca09e803c 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -270,7 +270,7 @@ async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None @@ -285,7 +285,7 @@ async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None @@ -350,7 +350,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -366,7 +366,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -649,7 +649,7 @@ async def test_invalid_state_via_topic( assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("xy_color") is None @@ -665,7 +665,7 @@ async def test_invalid_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (0, 0) assert state.attributes.get("xy_color") == (0.323, 0.329) @@ -721,16 +721,16 @@ async def test_invalid_state_via_topic( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (255, 254, 250) + assert state.attributes.get("rgb_color") == (255, 255, 251) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 153 + assert state.attributes.get("color_temp_kelvin") == 6535 assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (54.768, 1.6) - assert state.attributes.get("xy_color") == (0.326, 0.333) + assert state.attributes.get("xy_color") == (0.325, 0.333) async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "") light_state = hass.states.get("light.test") - assert light_state.attributes["color_temp"] == 153 + assert light_state.attributes["color_temp_kelvin"] == 6535 @pytest.mark.parametrize( @@ -939,7 +939,7 @@ async def test_controlling_state_via_topic_with_templates( hass, "test_light_rgb/color_temp/status", '{"hello": "300"}' ) state = hass.states.get("light.test") - assert state.attributes.get("color_temp") == 300 + assert state.attributes.get("color_temp_kelvin") == 3333 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -1008,7 +1008,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 100000, "color_mode": "hs", }, ) @@ -1021,7 +1021,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -1053,7 +1053,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=10, rgb_color=[80, 40, 20] + hass, "light.test", brightness=10, rgb_color=(80, 40, 20) ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1073,7 +1073,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=20, rgbw_color=[80, 40, 20, 10] + hass, "light.test", brightness=20, rgbw_color=(80, 40, 20, 10) ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1093,7 +1093,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=40, rgbww_color=[80, 40, 20, 10, 8] + hass, "light.test", brightness=40, rgbww_color=(80, 40, 20, 10, 8) ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1112,7 +1112,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), @@ -1130,7 +1130,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", brightness=60, xy_color=[0.2, 0.3]) + await common.async_turn_on(hass, "light.test", brightness=60, xy_color=(0.2, 0.3)) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), @@ -1148,7 +1148,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", color_temp=125) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=8000) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/color_temp/set", "125", 2, False), @@ -1160,7 +1160,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 60 - assert state.attributes.get("color_temp") == 125 + assert state.attributes.get("color_temp_kelvin") == 8000 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -1193,7 +1193,7 @@ async def test_sending_mqtt_rgb_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 64]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 64)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1236,7 +1236,7 @@ async def test_sending_mqtt_rgbw_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 64, 32]) + await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 64, 32)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1279,7 +1279,7 @@ async def test_sending_mqtt_rgbww_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 64, 32, 16]) + await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 64, 32, 16)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1321,7 +1321,7 @@ async def test_sending_mqtt_color_temp_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", color_temp=100) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=10000) mqtt_mock.async_publish.assert_has_calls( [ @@ -1469,7 +1469,7 @@ async def test_on_command_brightness( # Turn on w/ just a color to ensure brightness gets # added and sent. - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1545,7 +1545,7 @@ async def test_on_command_brightness_scaled( # Turn on w/ just a color to ensure brightness gets # added and sent. - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1626,7 +1626,7 @@ async def test_on_command_rgb( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1722,7 +1722,7 @@ async def test_on_command_rgbw( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 16]) + await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 0, 16)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1818,7 +1818,7 @@ async def test_on_command_rgbww( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 16, 32]) + await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 0, 16, 32)) mqtt_mock.async_publish.assert_has_calls( [ @@ -2103,7 +2103,7 @@ async def test_explicit_color_mode( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2119,7 +2119,7 @@ async def test_explicit_color_mode( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2248,7 +2248,7 @@ async def test_explicit_color_mode_templated( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -2258,7 +2258,7 @@ async def test_explicit_color_mode_templated( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -3262,7 +3262,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] elif topic == "white_command_topic": @@ -3333,7 +3333,7 @@ async def test_encoding_subscribable_topics( init_payload: tuple[str, str] | None, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) config[CONF_EFFECT_COMMAND_TOPIC] = "light/CONF_EFFECT_COMMAND_TOPIC" config[CONF_RGB_COMMAND_TOPIC] = "light/CONF_RGB_COMMAND_TOPIC" config[CONF_BRIGHTNESS_COMMAND_TOPIC] = "light/CONF_BRIGHTNESS_COMMAND_TOPIC" diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 829222e0304..988cce85653 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -99,7 +99,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import json_dumps -from homeassistant.util.json import JsonValueType, json_loads +from homeassistant.util.json import json_loads from .test_common import ( help_custom_config, @@ -172,11 +172,11 @@ COLOR_MODES_CONFIG = { class JsonValidator: """Helper to compare JSON.""" - def __init__(self, jsondata: JsonValueType) -> None: + def __init__(self, jsondata: bytes | str) -> None: """Initialize JSON validator.""" self.jsondata = jsondata - def __eq__(self, other: JsonValueType) -> bool: + def __eq__(self, other: bytes | str) -> bool: # type:ignore[override] """Compare JSON data.""" return json_loads(self.jsondata) == json_loads(other) @@ -423,7 +423,9 @@ async def test_single_color_mode( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) async_fire_mqtt_message( hass, @@ -435,7 +437,7 @@ async def test_single_color_mode( assert state.state == STATE_ON assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0] @@ -454,15 +456,17 @@ async def test_turn_on_with_unknown_color_mode_optimistic( state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.UNKNOWN assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.state == STATE_ON # Turn on the light with brightness or color_temp attributes - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP assert state.attributes.get("brightness") == 50 - assert state.attributes.get("color_temp") == 192 + assert state.attributes.get("color_temp_kelvin") == 5208 assert state.state == STATE_ON @@ -494,7 +498,7 @@ async def test_controlling_state_with_unknown_color_mode( ) state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get(light.ATTR_COLOR_TEMP) is None + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(light.ATTR_BRIGHTNESS) is None assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.UNKNOWN @@ -507,7 +511,7 @@ async def test_controlling_state_with_unknown_color_mode( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.COLOR_TEMP @@ -567,7 +571,7 @@ async def test_no_color_brightness_color_temp_if_no_topics( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -578,7 +582,7 @@ async def test_no_color_brightness_color_temp_if_no_topics( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -632,7 +636,7 @@ async def test_controlling_state_via_topic( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -653,7 +657,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # rgb color has priority + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("xy_color") == (0.323, 0.329) assert state.attributes.get("hs_color") == (0.0, 0.0) @@ -674,12 +678,12 @@ async def test_controlling_state_via_topic( assert state.attributes.get("rgb_color") == ( 255, 253, - 248, + 249, ) # temp converted to color assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 155 + assert state.attributes.get("color_temp_kelvin") == 6451 assert state.attributes.get("effect") == "colorloop" - assert state.attributes.get("xy_color") == (0.328, 0.334) # temp converted to color + assert state.attributes.get("xy_color") == (0.328, 0.333) # temp converted to color assert state.attributes.get("hs_color") == (44.098, 2.43) # temp converted to color # Turn the light off @@ -706,7 +710,7 @@ async def test_controlling_state_via_topic( ) light_state = hass.states.get("light.test") - assert light_state.attributes.get("xy_color") == (0.141, 0.14) + assert light_state.attributes.get("xy_color") == (0.141, 0.141) async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "color":{"h":180,"s":50}}' @@ -794,7 +798,7 @@ async def test_controlling_state_via_topic2( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") is None assert state.attributes.get("color_mode") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -820,7 +824,7 @@ async def test_controlling_state_via_topic2( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "rgbww" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("hs_color") == (20.552, 70.98) assert state.attributes.get("rgb_color") == (255, 136, 74) @@ -886,7 +890,7 @@ async def test_controlling_state_via_topic2( ) state = hass.states.get("light.test") assert state.attributes.get("color_mode") == "color_temp" - assert state.attributes.get("color_temp") == 155 + assert state.attributes.get("color_temp_kelvin") == 6451 # White async_fire_mqtt_message( @@ -965,7 +969,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") is None assert state.attributes.get("color_mode") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -990,7 +994,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "hs" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (15.765, 100.0) assert state.attributes.get("rgb_color") == (255, 67, 0) @@ -1012,13 +1016,13 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "color_temp" - assert state.attributes.get("color_temp") == 353 + assert state.attributes.get("color_temp_kelvin") == 2832 assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (28.125, 61.661) - assert state.attributes.get("rgb_color") == (255, 171, 97) + assert state.attributes.get("rgb_color") == (255, 171, 98) assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None - assert state.attributes.get("xy_color") == (0.513, 0.386) + assert state.attributes.get("xy_color") == (0.512, 0.385) @pytest.mark.parametrize( @@ -1053,7 +1057,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 10000, }, ) mock_restore_cache(hass, (fake_state,)) @@ -1065,7 +1069,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority color_modes = [light.ColorMode.COLOR_TEMP, light.ColorMode.HS] assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes expected_features = ( @@ -1083,7 +1087,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON - await common.async_turn_on(hass, "light.test", color_temp=90) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", @@ -1095,7 +1099,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP - assert state.attributes.get("color_temp") == 90 + assert state.attributes.get("color_temp_kelvin") == 11111 await common.async_turn_off(hass, "light.test") @@ -1108,13 +1112,13 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.reset_mock() await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"r": 0, "g": 123, "b": 255,' - ' "x": 0.14, "y": 0.131, "h": 210.824, "s": 100.0},' + '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255,' + ' "x": 0.14, "y": 0.133, "h": 210.824, "s": 100.0},' ' "brightness": 50}' ), 2, @@ -1125,10 +1129,10 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("color_mode") == light.ColorMode.HS assert state.attributes["brightness"] == 50 assert state.attributes["hs_color"] == (210.824, 100.0) - assert state.attributes["rgb_color"] == (0, 123, 255) - assert state.attributes["xy_color"] == (0.14, 0.131) + assert state.attributes["rgb_color"] == (0, 124, 255) + assert state.attributes["xy_color"] == (0.14, 0.133) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( @@ -1148,7 +1152,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes["rgb_color"] == (255, 56, 59) assert state.attributes["xy_color"] == (0.654, 0.301) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( @@ -1205,7 +1209,7 @@ async def test_sending_mqtt_commands_and_optimistic2( "on", { "brightness": 95, - "color_temp": 100, + "color_temp_kelvin": 10000, "color_mode": "rgb", "effect": "random", "hs_color": [100, 100], @@ -1223,7 +1227,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") == 95 assert state.attributes.get("color_mode") == "rgb" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "random" assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -1244,7 +1248,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.state == STATE_ON # Turn the light on with color temperature - await common.async_turn_on(hass, "light.test", color_temp=90) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator('{"state":"ON","color_temp":90}'), @@ -1265,7 +1269,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.state == STATE_OFF # Set hs color - await common.async_turn_on(hass, "light.test", brightness=75, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=75, hs_color=(359, 78)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1286,7 +1290,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgb color - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1305,7 +1309,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgbw color - await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 123]) + await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 0, 123)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1326,7 +1330,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgbww color - await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 45, 32]) + await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 0, 45, 32)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1348,7 +1352,7 @@ async def test_sending_mqtt_commands_and_optimistic2( # Set xy color await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.223] + hass, "light.test", brightness=50, xy_color=(0.123, 0.223) ) state = hass.states.get("light.test") assert state.state == STATE_ON @@ -1435,10 +1439,10 @@ async def test_sending_hs_color( mqtt_mock.reset_mock() await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1497,11 +1501,11 @@ async def test_sending_rgb_color_no_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) await common.async_turn_on( - hass, "light.test", rgb_color=[255, 128, 0], brightness=255 + hass, "light.test", rgb_color=(255, 128, 0), brightness=255 ) mqtt_mock.async_publish.assert_has_calls( @@ -1514,7 +1518,7 @@ async def test_sending_rgb_color_no_brightness( ), call( "test_light_rgb/set", - JsonValidator('{"state": "ON", "color": {"r": 50, "g": 11, "b": 11}}'), + JsonValidator('{"state": "ON", "color": {"r": 50, "g": 11, "b": 12}}'), 0, False, ), @@ -1555,17 +1559,17 @@ async def test_sending_rgb_color_no_brightness2( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) await common.async_turn_on( - hass, "light.test", rgb_color=[255, 128, 0], brightness=255 + hass, "light.test", rgb_color=(255, 128, 0), brightness=255 ) await common.async_turn_on( - hass, "light.test", rgbw_color=[128, 64, 32, 16], brightness=128 + hass, "light.test", rgbw_color=(128, 64, 32, 16), brightness=128 ) await common.async_turn_on( - hass, "light.test", rgbww_color=[128, 64, 32, 16, 8], brightness=64 + hass, "light.test", rgbww_color=(128, 64, 32, 16, 8), brightness=64 ) mqtt_mock.async_publish.assert_has_calls( @@ -1635,18 +1639,18 @@ async def test_sending_rgb_color_with_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=255, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=255, hs_color=(359, 78)) await common.async_turn_on(hass, "light.test", brightness=1) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ call( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"r": 0, "g": 123, "b": 255},' + '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255},' ' "brightness": 50}' ), 0, @@ -1705,18 +1709,18 @@ async def test_sending_rgb_color_with_scaled_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=255, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=255, hs_color=(359, 78)) await common.async_turn_on(hass, "light.test", brightness=1) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ call( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"r": 0, "g": 123, "b": 255},' + '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255},' ' "brightness": 20}' ), 0, @@ -1820,17 +1824,17 @@ async def test_sending_xy_color( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ call( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"x": 0.14, "y": 0.131},' + '{"state": "ON", "color": {"x": 0.14, "y": 0.133},' ' "brightness": 50}' ), 0, @@ -2181,7 +2185,9 @@ async def test_white_scale( ], ) async def test_invalid_values( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that invalid color/brightness/etc. values are ignored.""" await mqtt_mock_entry() @@ -2194,7 +2200,7 @@ async def test_invalid_values( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) # Turn on the light @@ -2212,7 +2218,7 @@ async def test_invalid_values( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None # Empty color value async_fire_mqtt_message( hass, @@ -2277,17 +2283,21 @@ async def test_invalid_values( ) state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 100 + assert state.attributes.get("color_temp_kelvin") == 10000 # Bad color temperature async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "color_temp": "badValue"}' ) + assert ( + "Invalid color temp value 'badValue' received for entity light.test" + in caplog.text + ) # Color temperature should not have changed state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 100 + assert state.attributes.get("color_temp_kelvin") == 10000 @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) @@ -2629,7 +2639,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] @@ -2680,7 +2690,7 @@ async def test_encoding_subscribable_topics( init_payload: tuple[str, str] | None, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) config["color_mode"] = True config["supported_color_modes"] = [ "color_temp", diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index d570454a6bf..4d2b93ff159 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -205,14 +205,16 @@ async def test_single_color_mode( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) async_fire_mqtt_message(hass, "test_light", "on,50,192") color_modes = [light.ColorMode.COLOR_TEMP] state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0] @@ -250,7 +252,7 @@ async def test_state_change_via_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb", "on") @@ -259,7 +261,7 @@ async def test_state_change_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None async_fire_mqtt_message(hass, "test_light_rgb", "off") @@ -314,7 +316,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("effect") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) # turn on the light @@ -322,9 +324,9 @@ async def test_state_brightness_color_effect_temp_change_via_topic( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (255, 128, 63) + assert state.attributes.get("rgb_color") == (255, 128, 64) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # rgb color has priority + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority assert state.attributes.get("effect") is None # turn on the light @@ -338,7 +340,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( 255, ) # temp converted to color assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 145 + assert state.attributes.get("color_temp_kelvin") == 6896 assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") == (0.317, 0.317) # temp converted to color assert state.attributes.get("hs_color") == ( @@ -432,7 +434,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 10000, }, ) mock_restore_cache(hass, (fake_state,)) @@ -443,7 +445,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_off(hass, "light.test") @@ -463,14 +465,14 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Set color_temp - await common.async_turn_on(hass, "light.test", color_temp=70) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=14285) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,70,--,-", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 70 + assert state.attributes.get("color_temp_kelvin") == 14285 # Set full brightness await common.async_turn_on(hass, "light.test", brightness=255) @@ -482,7 +484,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Full brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-128-0,30.118-100.0", 2, False ) @@ -492,14 +494,14 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("rgb_color") == (255, 128, 0) # Full brightness - normalization of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) mqtt_mock.async_publish.assert_called_once_with( - "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 2, False + "test_light_rgb/set", "on,,,255-128-0,30.0-100.0", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (255, 127, 0) + assert state.attributes.get("rgb_color") == (255, 128, 0) # Set half brightness await common.async_turn_on(hass, "light.test", brightness=128) @@ -511,7 +513,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Half brightness - scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 255, 128]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 255, 128)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-128-64,150.118-100.0", 2, False ) @@ -521,14 +523,14 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("rgb_color") == (0, 255, 128) # Half brightness - normalization+scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 32, 16]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-128-64,150.0-100.0", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (0, 255, 127) + assert state.attributes.get("rgb_color") == (0, 255, 128) @pytest.mark.parametrize( @@ -594,7 +596,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert state.state == STATE_UNKNOWN # Set color_temp - await common.async_turn_on(hass, "light.test", color_temp=70) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=14285) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,70,--,-", 0, False ) @@ -614,7 +616,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert not state.attributes.get("brightness") # Full brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-128-0,30.118-100.0", 0, False ) @@ -624,9 +626,9 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert not state.attributes.get("rgb_color") # Full brightness - normalization of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) mqtt_mock.async_publish.assert_called_once_with( - "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 0, False + "test_light_rgb/set", "on,,,255-128-0,30.0-100.0", 0, False ) mqtt_mock.async_publish.reset_mock() @@ -638,7 +640,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( mqtt_mock.async_publish.reset_mock() # Half brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 255, 128]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 255, 128)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-255-128,150.118-100.0", 0, False ) @@ -646,9 +648,9 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( state = hass.states.get("light.test") # Half brightness - normalization but no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 32, 16]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) mqtt_mock.async_publish.assert_called_once_with( - "test_light_rgb/set", "on,,,0-255-127,150.0-100.0", 0, False + "test_light_rgb/set", "on,,,0-255-128,150.0-100.0", 0, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") @@ -846,7 +848,7 @@ async def test_invalid_values( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) @@ -856,7 +858,7 @@ async def test_invalid_values( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("effect") == "rainbow" @@ -885,14 +887,14 @@ async def test_invalid_values( async_fire_mqtt_message(hass, "test_light_rgb", "on,,215,None-None-None") state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 215 + assert state.attributes.get("color_temp_kelvin") == 4651 # bad color temp values async_fire_mqtt_message(hass, "test_light_rgb", "on,,off,") # color temp should not have changed state = hass.states.get("light.test") - assert state.attributes.get("color_temp") == 215 + assert state.attributes.get("color_temp_kelvin") == 4651 # bad effect value async_fire_mqtt_message(hass, "test_light_rgb", "on,255,a-b-c,white") @@ -1259,7 +1261,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index 331f21a0a7c..034f9b5ff6e 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -10,14 +10,8 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, - STATE_OPEN, - STATE_OPENING, - STATE_UNLOCKED, - STATE_UNLOCKING, LockEntityFeature, + LockState, ) from homeassistant.components.mqtt.lock import MQTT_LOCK_ATTRIBUTES_BLOCKED from homeassistant.const import ( @@ -89,12 +83,12 @@ CONFIG_WITH_STATES = { @pytest.mark.parametrize( ("hass_config", "payload", "lock_state"), [ - (CONFIG_WITH_STATES, "closed", STATE_LOCKED), - (CONFIG_WITH_STATES, "closing", STATE_LOCKING), - (CONFIG_WITH_STATES, "open", STATE_OPEN), - (CONFIG_WITH_STATES, "opening", STATE_OPENING), - (CONFIG_WITH_STATES, "unlocked", STATE_UNLOCKED), - (CONFIG_WITH_STATES, "unlocking", STATE_UNLOCKING), + (CONFIG_WITH_STATES, "closed", LockState.LOCKED), + (CONFIG_WITH_STATES, "closing", LockState.LOCKING), + (CONFIG_WITH_STATES, "open", LockState.OPEN), + (CONFIG_WITH_STATES, "opening", LockState.OPENING), + (CONFIG_WITH_STATES, "unlocked", LockState.UNLOCKED), + (CONFIG_WITH_STATES, "unlocking", LockState.UNLOCKING), ], ) async def test_controlling_state_via_topic( @@ -115,18 +109,18 @@ async def test_controlling_state_via_topic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is lock_state + assert state.state == lock_state @pytest.mark.parametrize( ("hass_config", "payload", "lock_state"), [ - (CONFIG_WITH_STATES, "closed", STATE_LOCKED), - (CONFIG_WITH_STATES, "closing", STATE_LOCKING), - (CONFIG_WITH_STATES, "open", STATE_OPEN), - (CONFIG_WITH_STATES, "opening", STATE_OPENING), - (CONFIG_WITH_STATES, "unlocked", STATE_UNLOCKED), - (CONFIG_WITH_STATES, "unlocking", STATE_UNLOCKING), + (CONFIG_WITH_STATES, "closed", LockState.LOCKED), + (CONFIG_WITH_STATES, "closing", LockState.LOCKING), + (CONFIG_WITH_STATES, "open", LockState.OPEN), + (CONFIG_WITH_STATES, "opening", LockState.OPENING), + (CONFIG_WITH_STATES, "unlocked", LockState.UNLOCKED), + (CONFIG_WITH_STATES, "unlocking", LockState.UNLOCKING), (CONFIG_WITH_STATES, "None", STATE_UNKNOWN), ], ) @@ -146,13 +140,13 @@ async def test_controlling_non_default_state_via_topic( async_fire_mqtt_message(hass, "state-topic", payload) state = hass.states.get("lock.test") - assert state.state is lock_state + assert state.state == lock_state # Empty state is ignored async_fire_mqtt_message(hass, "state-topic", "") state = hass.states.get("lock.test") - assert state.state is lock_state + assert state.state == lock_state @pytest.mark.parametrize( @@ -165,7 +159,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closed"}', - STATE_LOCKED, + LockState.LOCKED, ), ( help_custom_config( @@ -174,7 +168,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closing"}', - STATE_LOCKING, + LockState.LOCKING, ), ( help_custom_config( @@ -183,7 +177,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocking"}', - STATE_UNLOCKING, + LockState.UNLOCKING, ), ( help_custom_config( @@ -192,7 +186,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"open"}', - STATE_OPEN, + LockState.OPEN, ), ( help_custom_config( @@ -201,7 +195,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"opening"}', - STATE_OPENING, + LockState.OPENING, ), ( help_custom_config( @@ -210,7 +204,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocked"}', - STATE_UNLOCKED, + LockState.UNLOCKED, ), ( help_custom_config( @@ -238,7 +232,7 @@ async def test_controlling_state_via_topic_and_json_message( async_fire_mqtt_message(hass, "state-topic", payload) state = hass.states.get("lock.test") - assert state.state is lock_state + assert state.state == lock_state @pytest.mark.parametrize( @@ -251,7 +245,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closed"}', - STATE_LOCKED, + LockState.LOCKED, ), ( help_custom_config( @@ -260,7 +254,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closing"}', - STATE_LOCKING, + LockState.LOCKING, ), ( help_custom_config( @@ -269,7 +263,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"open"}', - STATE_OPEN, + LockState.OPEN, ), ( help_custom_config( @@ -278,7 +272,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"opening"}', - STATE_OPENING, + LockState.OPENING, ), ( help_custom_config( @@ -287,7 +281,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocked"}', - STATE_UNLOCKED, + LockState.UNLOCKED, ), ( help_custom_config( @@ -296,7 +290,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocking"}', - STATE_UNLOCKING, + LockState.UNLOCKING, ), ], ) @@ -315,7 +309,7 @@ async def test_controlling_non_default_state_via_topic_and_json_message( async_fire_mqtt_message(hass, "state-topic", payload) state = hass.states.get("lock.test") - assert state.state is lock_state + assert state.state == lock_state @pytest.mark.parametrize( @@ -342,7 +336,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -352,7 +346,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -362,7 +356,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -393,7 +387,7 @@ async def test_sending_mqtt_commands_with_template( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -408,7 +402,7 @@ async def test_sending_mqtt_commands_with_template( ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -423,7 +417,7 @@ async def test_sending_mqtt_commands_with_template( ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -453,7 +447,7 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -463,7 +457,7 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -473,7 +467,7 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -502,7 +496,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == LockEntityFeature.OPEN @@ -513,7 +507,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -523,7 +517,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -533,7 +527,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_OPEN + assert state.state == LockState.OPEN assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -564,7 +558,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == LockEntityFeature.OPEN @@ -575,7 +569,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -585,7 +579,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -595,7 +589,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state is STATE_OPEN + assert state.state == LockState.OPEN assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -644,7 +638,7 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED await hass.services.async_call( lock.DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: "lock.test"}, blocking=True @@ -658,7 +652,7 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.services.async_call( lock.DOMAIN, SERVICE_OPEN, {ATTR_ENTITY_ID: "lock.test"}, blocking=True @@ -672,7 +666,7 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is STATE_UNLOCKED + assert state.state == LockState.UNLOCKED # send lock command to lock await hass.services.async_call( @@ -688,21 +682,21 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKING + assert state.state == LockState.LOCKING # receive jammed state from lock async_fire_mqtt_message(hass, "state-topic", "JAMMED") await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is STATE_JAMMED + assert state.state == LockState.JAMMED # receive solved state from lock async_fire_mqtt_message(hass, "state-topic", "LOCKED") await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state is STATE_LOCKED + assert state.state == LockState.LOCKED @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index 44652681fc3..48aaa11f672 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -47,6 +47,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -1100,6 +1101,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = number.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index 60eb4893760..8d79a3ce609 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -610,7 +610,7 @@ def _test_options_attributes_options_config( @pytest.mark.parametrize( ("hass_config", "options"), - _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), + _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), # type:ignore[arg-type] ) async def test_options_attributes( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, options: list[str] diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index a62c36404ca..7f418864872 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -53,6 +53,7 @@ from .test_common import ( help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_disabled_by_default, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -299,6 +300,17 @@ async def test_setting_sensor_to_long_state_via_mqtt_message( STATE_UNKNOWN, True, ), + ( + help_custom_config( + sensor.DOMAIN, + DEFAULT_CONFIG, + ({"device_class": sensor.SensorDeviceClass.TIMESTAMP},), + ), + sensor.SensorDeviceClass.TIMESTAMP, + "None", + STATE_UNKNOWN, + False, + ), ( help_custom_config( sensor.DOMAIN, @@ -702,7 +714,7 @@ async def test_force_update_disabled( def test_callback(event: Event) -> None: events.append(event) - hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) + hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) # type:ignore[arg-type] async_fire_mqtt_message(hass, "test-topic", "100") await hass.async_block_till_done() @@ -740,7 +752,7 @@ async def test_force_update_enabled( def test_callback(event: Event) -> None: events.append(event) - hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) + hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) # type:ignore[arg-type] async_fire_mqtt_message(hass, "test-topic", "100") await hass.async_block_till_done() @@ -945,7 +957,7 @@ async def test_invalid_state_class( } } }, - "The option `options` can only be used together with " + "The option `options` must be used together with " "device class `enum`, got `device_class` 'gas'", ), ( @@ -1572,6 +1584,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity name setup.""" + domain = sensor.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index 3f720e3ee3c..58a5cb735f9 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -594,7 +594,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG, {} + hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG, None ) @@ -974,7 +974,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with command templates and different encoding.""" domain = siren.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) config[mqtt.DOMAIN][domain][siren.ATTR_AVAILABLE_TONES] = ["siren", "xylophone"] await help_test_publishing_with_custom_encoding( diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index fddbfd8fbe2..dceeff07377 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -403,7 +403,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG, {} + hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG, None ) diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index adebd157588..41c417fe3e9 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -1,9 +1,9 @@ """The tests for MQTT tag scanner.""" -from collections.abc import Generator import copy import json -from unittest.mock import ANY, AsyncMock, patch +from typing import Any +from unittest.mock import ANY, AsyncMock import pytest @@ -46,13 +46,6 @@ DEFAULT_TAG_SCAN_JSON = ( ) -@pytest.fixture -def tag_mock() -> Generator[AsyncMock]: - """Fixture to mock tag.""" - with patch("homeassistant.components.tag.async_scan_tag") as mock_tag: - yield mock_tag - - @pytest.mark.no_fail_on_log_exception async def test_discover_bad_tag( hass: HomeAssistant, @@ -504,7 +497,7 @@ async def test_entity_device_info_update( """Test device registry update.""" await mqtt_mock_entry() - config = { + config: dict[str, Any] = { "topic": "test-topic", "device": { "identifiers": ["helloworld"], diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index ebcb835844d..96924030279 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -469,7 +469,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG, {} + hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG, None ) diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index 937b8cdebd0..4ca10cbe8b2 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -25,6 +25,7 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_reloadable, help_test_setting_attribute_via_mqtt_json_message, @@ -313,6 +314,60 @@ async def test_empty_json_state_message( } ], ) +async def test_invalid_json_state_message( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test an empty JSON payload.""" + state_topic = "test/state-topic" + await mqtt_mock_entry() + + async_fire_mqtt_message( + hass, + state_topic, + '{"installed_version":"1.9.0","latest_version":"1.9.0",' + '"title":"Test Update 1 Title","release_url":"https://example.com/release1",' + '"release_summary":"Test release summary 1",' + '"entity_picture": "https://example.com/icon1.png"}', + ) + + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "1.9.0" + assert state.attributes.get("latest_version") == "1.9.0" + assert state.attributes.get("release_summary") == "Test release summary 1" + assert state.attributes.get("release_url") == "https://example.com/release1" + assert state.attributes.get("title") == "Test Update 1 Title" + assert state.attributes.get("entity_picture") == "https://example.com/icon1.png" + + # Test update schema validation with invalid value in JSON update + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":101}') + + await hass.async_block_till_done() + assert ( + "Schema violation after processing payload '{\"update_percentage\":101}' on " + "topic 'test/state-topic' for entity 'update.test_update': value must be at " + "most 100 for dictionary value @ data['update_percentage']" in caplog.text + ) + + +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + update.DOMAIN: { + "state_topic": "test/state-topic", + "name": "Test Update", + "display_precision": 1, + } + } + } + ], +) async def test_json_state_message( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: @@ -354,6 +409,45 @@ async def test_json_state_message( assert state.attributes.get("installed_version") == "1.9.0" assert state.attributes.get("latest_version") == "2.0.0" assert state.attributes.get("entity_picture") == "https://example.com/icon2.png" + assert state.attributes.get("in_progress") is False + assert state.attributes.get("update_percentage") is None + + # Test in_progress status + async_fire_mqtt_message(hass, state_topic, '{"in_progress":true}') + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state.state == STATE_ON + assert state.attributes.get("installed_version") == "1.9.0" + assert state.attributes.get("latest_version") == "2.0.0" + assert state.attributes.get("entity_picture") == "https://example.com/icon2.png" + assert state.attributes.get("in_progress") is True + assert state.attributes.get("update_percentage") is None + + async_fire_mqtt_message(hass, state_topic, '{"in_progress":false}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is False + + # Test update_percentage status + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":51.75}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is True + assert state.attributes.get("update_percentage") == 51.75 + assert state.attributes.get("display_precision") == 1 + + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":100}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is True + assert state.attributes.get("update_percentage") == 100 + + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":null}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is False + assert state.attributes.get("update_percentage") is None @pytest.mark.parametrize( @@ -724,6 +818,10 @@ async def test_reloadable( '{"entity_picture": "https://example.com/icon1.png"}', '{"entity_picture": "https://example.com/icon2.png"}', ), + ("test-topic", '{"in_progress": true}', '{"in_progress": false}'), + ("test-topic", '{"update_percentage": 0}', '{"update_percentage": 50}'), + ("test-topic", '{"update_percentage": 50}', '{"update_percentage": 100}'), + ("test-topic", '{"update_percentage": 100}', '{"update_percentage": null}'), ("availability-topic", "online", "offline"), ("json-attributes-topic", '{"attr1": "val1"}', '{"attr1": "val2"}'), ], @@ -775,3 +873,19 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = update.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, + mqtt_mock_entry, + domain, + config, + default_entity_picture="https://brands.home-assistant.io/_/mqtt/icon.png", + ) diff --git a/tests/components/mqtt/test_util.py b/tests/components/mqtt/test_util.py index a3802de69da..37bf6982b7a 100644 --- a/tests/components/mqtt/test_util.py +++ b/tests/components/mqtt/test_util.py @@ -236,8 +236,7 @@ async def test_waiting_for_client_not_loaded( unsubs: list[Callable[[], None]] = [] - async def _async_just_in_time_subscribe() -> Callable[[], None]: - nonlocal unsub + async def _async_just_in_time_subscribe() -> None: assert await mqtt.async_wait_for_mqtt_client(hass) # Awaiting a second time should work too and return True assert await mqtt.async_wait_for_mqtt_client(hass) @@ -261,12 +260,12 @@ async def test_waiting_for_client_loaded( """Test waiting for client where mqtt entry is loaded.""" unsub: Callable[[], None] | None = None - async def _async_just_in_time_subscribe() -> Callable[[], None]: + async def _async_just_in_time_subscribe() -> None: nonlocal unsub assert await mqtt.async_wait_for_mqtt_client(hass) unsub = await mqtt.async_subscribe(hass, "test_topic", lambda msg: None) - entry = hass.config_entries.async_entries(mqtt.DATA_MQTT)[0] + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] assert entry.state is ConfigEntryState.LOADED await _async_just_in_time_subscribe() @@ -290,7 +289,7 @@ async def test_waiting_for_client_entry_fails( ) entry.add_to_hass(hass) - async def _async_just_in_time_subscribe() -> Callable[[], None]: + async def _async_just_in_time_subscribe() -> None: assert not await mqtt.async_wait_for_mqtt_client(hass) hass.async_create_task(_async_just_in_time_subscribe()) @@ -300,7 +299,7 @@ async def test_waiting_for_client_entry_fails( side_effect=Exception, ): await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_ERROR # type:ignore[comparison-overlap] async def test_waiting_for_client_setup_fails( @@ -318,7 +317,7 @@ async def test_waiting_for_client_setup_fails( ) entry.add_to_hass(hass) - async def _async_just_in_time_subscribe() -> Callable[[], None]: + async def _async_just_in_time_subscribe() -> None: assert not await mqtt.async_wait_for_mqtt_client(hass) hass.async_create_task(_async_just_in_time_subscribe()) @@ -327,7 +326,7 @@ async def test_waiting_for_client_setup_fails( # Simulate MQTT setup fails before the client would become available mqtt_client_mock.connect.side_effect = Exception assert not await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_ERROR # type:ignore[comparison-overlap] @patch("homeassistant.components.mqtt.util.AVAILABILITY_TIMEOUT", 0.01) diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index 7fc4ff981fd..c1c662048d7 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -2,7 +2,6 @@ from copy import deepcopy import json -import logging from typing import Any from unittest.mock import patch @@ -22,15 +21,13 @@ from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, - DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, SERVICE_RETURN_TO_BASE, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, + VacuumActivity, ) from homeassistant.const import CONF_NAME, ENTITY_MATCH_ALL, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -102,32 +99,6 @@ CONFIG_ALL_SERVICES = help_custom_config( ) -async def test_warning_schema_option( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the warning on use of deprecated schema option.""" - await mqtt_mock_entry() - # Send discovery message with deprecated schema option - async_fire_mqtt_message( - hass, - f"homeassistant/{vacuum.DOMAIN}/bla/config", - '{"name": "test", "schema": "state", "o": {"name": "Bla2MQTT", "sw": "0.99", "url":"https://example.com/support"}}', - ) - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("vacuum.test") - # We do not fail if the schema option is still in the payload, but we log an error - assert state is not None - with caplog.at_level(logging.WARNING): - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - - @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_default_supported_features( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator @@ -149,31 +120,34 @@ async def test_all_commands( mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( - DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "start", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "stop", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "pause", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "locate", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_CLEAN_SPOT, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_called_once_with( COMMAND_TOPIC, "clean_spot", 0, False @@ -181,7 +155,10 @@ async def test_all_commands( mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_RETURN_TO_BASE, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_called_once_with( COMMAND_TOPIC, "return_to_base", 0, False @@ -232,37 +209,43 @@ async def test_commands_without_supported_features( mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( - DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_RETURN_TO_BASE, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_CLEAN_SPOT, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() @@ -308,7 +291,7 @@ async def test_command_without_command_topic( mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() - await common.async_send_command(hass, "some command", "vacuum.test") + await common.async_send_command(hass, "some command", entity_id="vacuum.test") mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() @@ -329,7 +312,7 @@ async def test_status( }""" async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54 assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50" assert state.attributes.get(ATTR_FAN_SPEED) == "max" @@ -342,7 +325,7 @@ async def test_status( async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61 assert state.attributes.get(ATTR_FAN_SPEED) == "min" @@ -382,7 +365,7 @@ async def test_no_fan_vacuum( }""" async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54 @@ -396,7 +379,7 @@ async def test_no_fan_vacuum( async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_FAN_SPEED) is None assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None @@ -410,7 +393,7 @@ async def test_no_fan_vacuum( async_fire_mqtt_message(hass, "vacuum/state", message) state = hass.states.get("vacuum.mqtttest") - assert state.state == STATE_DOCKED + assert state.state == VacuumActivity.DOCKED assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60" assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61 diff --git a/tests/components/mqtt/test_valve.py b/tests/components/mqtt/test_valve.py index 53a7190eaf3..6dd0102b8a3 100644 --- a/tests/components/mqtt/test_valve.py +++ b/tests/components/mqtt/test_valve.py @@ -14,6 +14,7 @@ from homeassistant.components.valve import ( ATTR_CURRENT_POSITION, ATTR_POSITION, SERVICE_SET_VALVE_POSITION, + ValveState, ) from homeassistant.const import ( ATTR_ASSUMED_STATE, @@ -22,10 +23,6 @@ from homeassistant.const import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_STOP_VALVE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -103,14 +100,14 @@ DEFAULT_CONFIG_REPORTS_POSITION = { @pytest.mark.parametrize( ("message", "asserted_state"), [ - ("open", STATE_OPEN), - ("closed", STATE_CLOSED), - ("closing", STATE_CLOSING), - ("opening", STATE_OPENING), - ('{"state" : "open"}', STATE_OPEN), - ('{"state" : "closed"}', STATE_CLOSED), - ('{"state" : "closing"}', STATE_CLOSING), - ('{"state" : "opening"}', STATE_OPENING), + ("open", ValveState.OPEN), + ("closed", ValveState.CLOSED), + ("closing", ValveState.CLOSING), + ("opening", ValveState.OPENING), + ('{"state" : "open"}', ValveState.OPEN), + ('{"state" : "closed"}', ValveState.CLOSED), + ('{"state" : "closing"}', ValveState.CLOSING), + ('{"state" : "opening"}', ValveState.OPENING), ], ) async def test_state_via_state_topic_no_position( @@ -155,10 +152,10 @@ async def test_state_via_state_topic_no_position( @pytest.mark.parametrize( ("message", "asserted_state"), [ - ('{"state":"open"}', STATE_OPEN), - ('{"state":"closed"}', STATE_CLOSED), - ('{"state":"closing"}', STATE_CLOSING), - ('{"state":"opening"}', STATE_OPENING), + ('{"state":"open"}', ValveState.OPEN), + ('{"state":"closed"}', ValveState.CLOSED), + ('{"state":"closing"}', ValveState.CLOSING), + ('{"state":"opening"}', ValveState.OPENING), ], ) async def test_state_via_state_topic_with_template( @@ -199,9 +196,9 @@ async def test_state_via_state_topic_with_template( @pytest.mark.parametrize( ("message", "asserted_state"), [ - ('{"position":100}', STATE_OPEN), - ('{"position":50.0}', STATE_OPEN), - ('{"position":0}', STATE_CLOSED), + ('{"position":100}', ValveState.OPEN), + ('{"position":50.0}', ValveState.OPEN), + ('{"position":0}', ValveState.CLOSED), ('{"position":null}', STATE_UNKNOWN), ('{"position":"non_numeric"}', STATE_UNKNOWN), ('{"ignored":12}', STATE_UNKNOWN), @@ -245,23 +242,23 @@ async def test_state_via_state_topic_with_position_template( ("message", "asserted_state", "valve_position"), [ ("invalid", STATE_UNKNOWN, None), - ("0", STATE_CLOSED, 0), - ("opening", STATE_OPENING, None), - ("50", STATE_OPEN, 50), - ("closing", STATE_CLOSING, None), - ("100", STATE_OPEN, 100), + ("0", ValveState.CLOSED, 0), + ("opening", ValveState.OPENING, None), + ("50", ValveState.OPEN, 50), + ("closing", ValveState.CLOSING, None), + ("100", ValveState.OPEN, 100), ("open", STATE_UNKNOWN, None), ("closed", STATE_UNKNOWN, None), - ("-10", STATE_CLOSED, 0), - ("110", STATE_OPEN, 100), - ('{"position": 0, "state": "opening"}', STATE_OPENING, 0), - ('{"position": 10, "state": "opening"}', STATE_OPENING, 10), - ('{"position": 50, "state": "open"}', STATE_OPEN, 50), - ('{"position": 100, "state": "closing"}', STATE_CLOSING, 100), - ('{"position": 90, "state": "closing"}', STATE_CLOSING, 90), - ('{"position": 0, "state": "closed"}', STATE_CLOSED, 0), - ('{"position": -10, "state": "closed"}', STATE_CLOSED, 0), - ('{"position": 110, "state": "open"}', STATE_OPEN, 100), + ("-10", ValveState.CLOSED, 0), + ("110", ValveState.OPEN, 100), + ('{"position": 0, "state": "opening"}', ValveState.OPENING, 0), + ('{"position": 10, "state": "opening"}', ValveState.OPENING, 10), + ('{"position": 50, "state": "open"}', ValveState.OPEN, 50), + ('{"position": 100, "state": "closing"}', ValveState.CLOSING, 100), + ('{"position": 90, "state": "closing"}', ValveState.CLOSING, 90), + ('{"position": 0, "state": "closed"}', ValveState.CLOSED, 0), + ('{"position": -10, "state": "closed"}', ValveState.CLOSED, 0), + ('{"position": 110, "state": "open"}', ValveState.OPEN, 100), ], ) async def test_state_via_state_topic_through_position( @@ -319,18 +316,18 @@ async def test_opening_closing_state_is_reset( assert not state.attributes.get(ATTR_ASSUMED_STATE) messages = [ - ('{"position": 0, "state": "opening"}', STATE_OPENING, 0), - ('{"position": 50, "state": "opening"}', STATE_OPENING, 50), - ('{"position": 60}', STATE_OPENING, 60), - ('{"position": 100, "state": "opening"}', STATE_OPENING, 100), - ('{"position": 100, "state": null}', STATE_OPEN, 100), - ('{"position": 90, "state": "closing"}', STATE_CLOSING, 90), - ('{"position": 40}', STATE_CLOSING, 40), - ('{"position": 0}', STATE_CLOSED, 0), - ('{"position": 10}', STATE_OPEN, 10), - ('{"position": 0, "state": "opening"}', STATE_OPENING, 0), - ('{"position": 0, "state": "closing"}', STATE_CLOSING, 0), - ('{"position": 0}', STATE_CLOSED, 0), + ('{"position": 0, "state": "opening"}', ValveState.OPENING, 0), + ('{"position": 50, "state": "opening"}', ValveState.OPENING, 50), + ('{"position": 60}', ValveState.OPENING, 60), + ('{"position": 100, "state": "opening"}', ValveState.OPENING, 100), + ('{"position": 100, "state": null}', ValveState.OPEN, 100), + ('{"position": 90, "state": "closing"}', ValveState.CLOSING, 90), + ('{"position": 40}', ValveState.CLOSING, 40), + ('{"position": 0}', ValveState.CLOSED, 0), + ('{"position": 10}', ValveState.OPEN, 10), + ('{"position": 0, "state": "opening"}', ValveState.OPENING, 0), + ('{"position": 0, "state": "closing"}', ValveState.CLOSING, 0), + ('{"position": 0}', ValveState.CLOSED, 0), ] for message, asserted_state, valve_position in messages: @@ -416,19 +413,19 @@ async def test_invalid_state_updates( @pytest.mark.parametrize( ("message", "asserted_state", "valve_position"), [ - ("-128", STATE_CLOSED, 0), - ("0", STATE_OPEN, 50), - ("127", STATE_OPEN, 100), - ("-130", STATE_CLOSED, 0), - ("130", STATE_OPEN, 100), - ('{"position": -128, "state": "opening"}', STATE_OPENING, 0), - ('{"position": -30, "state": "opening"}', STATE_OPENING, 38), - ('{"position": 30, "state": "open"}', STATE_OPEN, 61), - ('{"position": 127, "state": "closing"}', STATE_CLOSING, 100), - ('{"position": 100, "state": "closing"}', STATE_CLOSING, 89), - ('{"position": -128, "state": "closed"}', STATE_CLOSED, 0), - ('{"position": -130, "state": "closed"}', STATE_CLOSED, 0), - ('{"position": 130, "state": "open"}', STATE_OPEN, 100), + ("-128", ValveState.CLOSED, 0), + ("0", ValveState.OPEN, 50), + ("127", ValveState.OPEN, 100), + ("-130", ValveState.CLOSED, 0), + ("130", ValveState.OPEN, 100), + ('{"position": -128, "state": "opening"}', ValveState.OPENING, 0), + ('{"position": -30, "state": "opening"}', ValveState.OPENING, 38), + ('{"position": 30, "state": "open"}', ValveState.OPEN, 61), + ('{"position": 127, "state": "closing"}', ValveState.CLOSING, 100), + ('{"position": 100, "state": "closing"}', ValveState.CLOSING, 89), + ('{"position": -128, "state": "closed"}', ValveState.CLOSED, 0), + ('{"position": -130, "state": "closed"}', ValveState.CLOSED, 0), + ('{"position": 130, "state": "open"}', ValveState.OPEN, 100), ], ) async def test_state_via_state_trough_position_with_alt_range( @@ -632,8 +629,8 @@ async def test_open_close_payload_config_not_allowed( @pytest.mark.parametrize( ("service", "asserted_message", "asserted_state"), [ - (SERVICE_CLOSE_VALVE, "CLOSE", STATE_CLOSED), - (SERVICE_OPEN_VALVE, "OPEN", STATE_OPEN), + (SERVICE_CLOSE_VALVE, "CLOSE", ValveState.CLOSED), + (SERVICE_OPEN_VALVE, "OPEN", ValveState.OPEN), ], ) async def test_controlling_valve_by_state_optimistic( @@ -782,9 +779,9 @@ async def test_controlling_valve_by_set_valve_position( @pytest.mark.parametrize( ("position", "asserted_message", "asserted_position", "asserted_state"), [ - (0, "0", 0, STATE_CLOSED), - (30, "30", 30, STATE_OPEN), - (100, "100", 100, STATE_OPEN), + (0, "0", 0, ValveState.CLOSED), + (30, "30", 30, ValveState.OPEN), + (100, "100", 100, ValveState.OPEN), ], ) async def test_controlling_valve_optimistic_by_set_valve_position( @@ -947,8 +944,8 @@ async def test_controlling_valve_with_alt_range_by_position( @pytest.mark.parametrize( ("service", "asserted_message", "asserted_state", "asserted_position"), [ - (SERVICE_CLOSE_VALVE, "0", STATE_CLOSED, 0), - (SERVICE_OPEN_VALVE, "100", STATE_OPEN, 100), + (SERVICE_CLOSE_VALVE, "0", ValveState.CLOSED, 0), + (SERVICE_OPEN_VALVE, "100", ValveState.OPEN, 100), ], ) async def test_controlling_valve_by_position_optimistic( @@ -1004,10 +1001,10 @@ async def test_controlling_valve_by_position_optimistic( @pytest.mark.parametrize( ("position", "asserted_message", "asserted_position", "asserted_state"), [ - (0, "-128", 0, STATE_CLOSED), - (30, "-52", 30, STATE_OPEN), - (50, "0", 50, STATE_OPEN), - (100, "127", 100, STATE_OPEN), + (0, "-128", 0, ValveState.CLOSED), + (30, "-52", 30, ValveState.OPEN), + (50, "0", 50, ValveState.OPEN), + (100, "127", 100, ValveState.OPEN), ], ) async def test_controlling_valve_optimistic_alt_range_by_set_valve_position( diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index 7bab4a5e233..02ae54c1a85 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -162,7 +162,7 @@ async def test_set_operation_mode_bad_attr_and_state( state = hass.states.get(ENTITY_WATER_HEATER) assert state.state == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER) + await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER) # type:ignore[arg-type] assert "string value is None for dictionary value @ data['operation_mode']" in str( excinfo.value ) diff --git a/tests/components/music_assistant/__init__.py b/tests/components/music_assistant/__init__.py new file mode 100644 index 00000000000..6893b862e2d --- /dev/null +++ b/tests/components/music_assistant/__init__.py @@ -0,0 +1 @@ +"""The tests for the Music Assistant component.""" diff --git a/tests/components/music_assistant/common.py b/tests/components/music_assistant/common.py new file mode 100644 index 00000000000..c8293b5622f --- /dev/null +++ b/tests/components/music_assistant/common.py @@ -0,0 +1,159 @@ +"""Provide common test tools.""" + +from __future__ import annotations + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +from music_assistant_models.enums import EventType +from music_assistant_models.media_items import Album, Artist, Playlist, Radio, Track +from music_assistant_models.player import Player +from music_assistant_models.player_queue import PlayerQueue +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, load_json_object_fixture + +MASS_DOMAIN = "music_assistant" +MOCK_URL = "http://mock-music_assistant-server-url" + + +def load_and_parse_fixture(fixture: str) -> dict[str, Any]: + """Load and parse a fixture.""" + data = load_json_object_fixture(f"music_assistant/{fixture}.json") + return data[fixture] + + +async def setup_integration_from_fixtures( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Set up MusicAssistant integration with fixture data.""" + players = create_players_from_fixture() + music_assistant_client.players._players = {x.player_id: x for x in players} + player_queues = create_player_queues_from_fixture() + music_assistant_client.player_queues._queues = { + x.queue_id: x for x in player_queues + } + config_entry = MockConfigEntry( + domain=MASS_DOMAIN, + data={"url": MOCK_URL}, + unique_id=music_assistant_client.server_info.server_id, + ) + music = music_assistant_client.music + library_artists = create_library_artists_from_fixture() + music.get_library_artists = AsyncMock(return_value=library_artists) + library_artist_albums = create_library_artist_albums_from_fixture() + music.get_artist_albums = AsyncMock(return_value=library_artist_albums) + library_albums = create_library_albums_from_fixture() + music.get_library_albums = AsyncMock(return_value=library_albums) + library_album_tracks = create_library_album_tracks_from_fixture() + music.get_album_tracks = AsyncMock(return_value=library_album_tracks) + library_tracks = create_library_tracks_from_fixture() + music.get_library_tracks = AsyncMock(return_value=library_tracks) + library_playlists = create_library_playlists_from_fixture() + music.get_library_playlists = AsyncMock(return_value=library_playlists) + library_playlist_tracks = create_library_playlist_tracks_from_fixture() + music.get_playlist_tracks = AsyncMock(return_value=library_playlist_tracks) + library_radios = create_library_radios_from_fixture() + music.get_library_radios = AsyncMock(return_value=library_radios) + music.get_item_by_uri = AsyncMock() + + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +def create_players_from_fixture() -> list[Player]: + """Create MA Players from fixture.""" + fixture_data = load_and_parse_fixture("players") + return [Player.from_dict(player_data) for player_data in fixture_data] + + +def create_player_queues_from_fixture() -> list[Player]: + """Create MA PlayerQueues from fixture.""" + fixture_data = load_and_parse_fixture("player_queues") + return [ + PlayerQueue.from_dict(player_queue_data) for player_queue_data in fixture_data + ] + + +def create_library_albums_from_fixture() -> list[Album]: + """Create MA Albums from fixture.""" + fixture_data = load_and_parse_fixture("library_albums") + return [Album.from_dict(album_data) for album_data in fixture_data] + + +def create_library_album_tracks_from_fixture() -> list[Track]: + """Create MA Tracks from fixture.""" + fixture_data = load_and_parse_fixture("library_album_tracks") + return [Track.from_dict(track_data) for track_data in fixture_data] + + +def create_library_tracks_from_fixture() -> list[Track]: + """Create MA Tracks from fixture.""" + fixture_data = load_and_parse_fixture("library_tracks") + return [Track.from_dict(track_data) for track_data in fixture_data] + + +def create_library_artists_from_fixture() -> list[Artist]: + """Create MA Artists from fixture.""" + fixture_data = load_and_parse_fixture("library_artists") + return [Artist.from_dict(artist_data) for artist_data in fixture_data] + + +def create_library_artist_albums_from_fixture() -> list[Album]: + """Create MA Albums from fixture.""" + fixture_data = load_and_parse_fixture("library_artist_albums") + return [Album.from_dict(album_data) for album_data in fixture_data] + + +def create_library_playlists_from_fixture() -> list[Playlist]: + """Create MA Playlists from fixture.""" + fixture_data = load_and_parse_fixture("library_playlists") + return [Playlist.from_dict(playlist_data) for playlist_data in fixture_data] + + +def create_library_playlist_tracks_from_fixture() -> list[Track]: + """Create MA Tracks from fixture.""" + fixture_data = load_and_parse_fixture("library_playlist_tracks") + return [Track.from_dict(track_data) for track_data in fixture_data] + + +def create_library_radios_from_fixture() -> list[Radio]: + """Create MA Radios from fixture.""" + fixture_data = load_and_parse_fixture("library_radios") + return [Radio.from_dict(radio_data) for radio_data in fixture_data] + + +async def trigger_subscription_callback( + hass: HomeAssistant, + client: MagicMock, + event: EventType = EventType.PLAYER_UPDATED, + data: Any = None, +) -> None: + """Trigger a subscription callback.""" + # trigger callback on all subscribers + for sub in client.subscribe_events.call_args_list: + callback = sub.kwargs["callback"] + event_filter = sub.kwargs.get("event_filter") + if event_filter in (None, event): + callback(event, data) + await hass.async_block_till_done() + + +def snapshot_music_assistant_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + platform: Platform, +) -> None: + """Snapshot MusicAssistant entities.""" + entities = hass.states.async_all(platform) + for entity_state in entities: + entity_entry = entity_registry.async_get(entity_state.entity_id) + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert entity_state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/music_assistant/conftest.py b/tests/components/music_assistant/conftest.py new file mode 100644 index 00000000000..2df43defe62 --- /dev/null +++ b/tests/components/music_assistant/conftest.py @@ -0,0 +1,83 @@ +"""Music Assistant test fixtures.""" + +import asyncio +from collections.abc import AsyncGenerator, Generator +from unittest.mock import MagicMock, patch + +from music_assistant_client.music import Music +from music_assistant_client.player_queues import PlayerQueues +from music_assistant_client.players import Players +from music_assistant_models.api import ServerInfoMessage +import pytest + +from homeassistant.components.music_assistant.config_flow import CONF_URL +from homeassistant.components.music_assistant.const import DOMAIN + +from tests.common import AsyncMock, MockConfigEntry, load_fixture + +MOCK_SERVER_ID = "1234" + + +@pytest.fixture +def mock_get_server_info() -> Generator[AsyncMock]: + """Mock the function to get server info.""" + with patch( + "homeassistant.components.music_assistant.config_flow.get_server_info" + ) as mock_get_server_info: + mock_get_server_info.return_value = ServerInfoMessage.from_json( + load_fixture("server_info_message.json", DOMAIN) + ) + yield mock_get_server_info + + +@pytest.fixture(name="music_assistant_client") +async def music_assistant_client_fixture() -> AsyncGenerator[MagicMock]: + """Fixture for a Music Assistant client.""" + with patch( + "homeassistant.components.music_assistant.MusicAssistantClient", autospec=True + ) as client_class: + client = client_class.return_value + + async def connect() -> None: + """Mock connect.""" + await asyncio.sleep(0) + + async def listen(init_ready: asyncio.Event | None) -> None: + """Mock listen.""" + if init_ready is not None: + init_ready.set() + listen_block = asyncio.Event() + await listen_block.wait() + pytest.fail("Listen was not cancelled!") + + client.connect = AsyncMock(side_effect=connect) + client.start_listening = AsyncMock(side_effect=listen) + client.server_info = ServerInfoMessage( + server_id=MOCK_SERVER_ID, + server_version="0.0.0", + schema_version=1, + min_supported_schema_version=1, + base_url="http://localhost:8095", + homeassistant_addon=False, + onboard_done=True, + ) + client.connection = MagicMock() + client.connection.connected = True + client.players = Players(client) + client.player_queues = PlayerQueues(client) + client.music = Music(client) + client.server_url = client.server_info.base_url + client.get_media_item_image_url = MagicMock(return_value=None) + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Music Assistant", + data={CONF_URL: "http://localhost:8095"}, + unique_id="1234", + ) diff --git a/tests/components/music_assistant/fixtures/library_album_tracks.json b/tests/components/music_assistant/fixtures/library_album_tracks.json new file mode 100644 index 00000000000..562ee84fe35 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_album_tracks.json @@ -0,0 +1,364 @@ +{ + "library_album_tracks": [ + { + "item_id": "247", + "provider": "library", + "name": "Le Mirage", + "version": "", + "sort_name": "mirage, le", + "uri": "library://track/247", + "external_ids": [["isrc", "FR10S1794640"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953631", + "provider_domain": "tidal", + "provider_instance": "tidal--63Pkq9Aw", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953631", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Dana Murray", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 35, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 352, + "artists": [ + { + "item_id": 195, + "provider": "library", + "name": "Dana Jean Phoenix", + "version": "", + "sort_name": "dana jean phoenix", + "uri": "library://artist/195", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 1 + }, + { + "item_id": "362", + "provider": "library", + "name": "Rabbit in the Headlights", + "version": "", + "sort_name": "rabbit in the headlights", + "uri": "library://track/362", + "external_ids": [["isrc", "GBLFP1645070"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953636", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953636", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Michael Oakley", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 34, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 253, + "artists": [ + { + "item_id": 90, + "provider": "library", + "name": "Michael Oakley", + "version": "", + "sort_name": "michael oakley", + "uri": "library://artist/90", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 6 + }, + { + "item_id": "1", + "provider": "library", + "name": "1988 Girls", + "version": "", + "sort_name": "1988 girls", + "uri": "library://track/1", + "external_ids": [["isrc", "DEBL60768604"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953637", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953637", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Kiez Beats", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 14, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 258, + "artists": [ + { + "item_id": 110, + "provider": "library", + "name": "Futurecop!", + "version": "", + "sort_name": "futurecop!", + "uri": "library://artist/110", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 7 + }, + { + "item_id": "495", + "provider": "library", + "name": "Timmy Goes to Space", + "version": "", + "sort_name": "timmy goes to space", + "uri": "library://track/495", + "external_ids": [["isrc", "NO2D81710001"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953643", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953643", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Jens Kristian Espevik", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 4, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 212, + "artists": [ + { + "item_id": 453, + "provider": "library", + "name": "Mr. Maen", + "version": "", + "sort_name": "mr. maen", + "uri": "library://artist/453", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 13 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_albums.json b/tests/components/music_assistant/fixtures/library_albums.json new file mode 100644 index 00000000000..6936a96adc8 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_albums.json @@ -0,0 +1,148 @@ +{ + "library_albums": [ + { + "item_id": "396", + "provider": "library", + "name": "Synth Punk EP", + "version": "", + "sort_name": "synth punk ep", + "uri": "library://album/396", + "external_ids": [["barcode", "872133626743"]], + "media_type": "album", + "provider_mappings": [ + { + "item_id": "48563817", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/album/48563817", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/99c8bc2f/ed43/4fb2/adfb/e7e3157089d2/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "586446 Records DK", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 7, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null, + "year": 2015, + "artists": [ + { + "item_id": 289, + "provider": "library", + "name": "A Space Love Adventure", + "version": "", + "sort_name": "space love adventure, a", + "uri": "library://artist/289", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album_type": "ep" + }, + { + "item_id": "95", + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "The 80S Revival", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [["barcode", "3614974086112"]], + "media_type": "album", + "provider_mappings": [ + { + "item_id": "70953630", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/album/70953630", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Kiez Beats", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 43, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null, + "year": 2017, + "artists": [ + { + "item_id": 96, + "provider": "library", + "name": "Various Artists", + "version": "", + "sort_name": "various artists", + "uri": "library://artist/96", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album_type": "compilation" + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_artist_albums.json b/tests/components/music_assistant/fixtures/library_artist_albums.json new file mode 100644 index 00000000000..31885528734 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_artist_albums.json @@ -0,0 +1,88 @@ +{ + "library_artist_albums": [ + { + "item_id": "115", + "provider": "library", + "name": "A Sea of Stars", + "version": "", + "sort_name": "sea of stars, a", + "uri": "library://album/115", + "external_ids": [["barcode", "859741010126"]], + "media_type": "album", + "provider_mappings": [ + { + "item_id": "157401232", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/album/157401232", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/f55c749b/6642/40e3/a291/ff01fd2915cf/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "2021 NRW Records, under exclusive license to NewRetroWave, LLC", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 0, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null, + "year": 2021, + "artists": [ + { + "item_id": 127, + "provider": "library", + "name": "W O L F C L U B", + "version": "", + "sort_name": "w o l f c l u b", + "uri": "library://artist/127", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + }, + { + "item_id": 128, + "provider": "library", + "name": "Dora Pereli", + "version": "", + "sort_name": "dora pereli", + "uri": "library://artist/128", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album_type": "single" + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_artists.json b/tests/components/music_assistant/fixtures/library_artists.json new file mode 100644 index 00000000000..803ce003b6c --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_artists.json @@ -0,0 +1,60 @@ +{ + "library_artists": [ + { + "item_id": "127", + "provider": "library", + "name": "W O L F C L U B", + "version": "", + "sort_name": "w o l f c l u b", + "uri": "library://artist/127", + "external_ids": [], + "media_type": "artist", + "provider_mappings": [ + { + "item_id": "8741977", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/artist/8741977", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/1e01cdb6/f15d/4d8b/8440/a047976c1cac/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_playlist_tracks.json b/tests/components/music_assistant/fixtures/library_playlist_tracks.json new file mode 100644 index 00000000000..1fb1c330957 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_playlist_tracks.json @@ -0,0 +1,262 @@ +{ + "library_playlist_tracks": [ + { + "item_id": "77616130", + "provider": "tidal--Ah76MuMg", + "name": "Won't Get Fooled Again", + "version": "", + "sort_name": "won't get fooled again", + "uri": "tidal--Ah76MuMg://track/77616130", + "external_ids": [["isrc", "GBUM71405419"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "77616130", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/77616130", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/3496a8ad/ea69/4d7e/bbda/045417ab59e1/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 1971 Polydor Ltd. (UK)", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 30, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": 0, + "duration": 516, + "artists": [ + { + "item_id": "24915", + "provider": "tidal--Ah76MuMg", + "name": "The Who", + "version": "", + "sort_name": "who, the", + "uri": "tidal--Ah76MuMg://artist/24915", + "external_ids": [], + "media_type": "artist", + "provider_mappings": [ + { + "item_id": "24915", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/artist/24915", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/0f782232/18c8/40b7/bb13/91c6039e40e6/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null + } + ], + "album": { + "item_id": "77616121", + "provider": "tidal--Ah76MuMg", + "name": "Who's Next", + "version": "", + "sort_name": "who's next", + "uri": "tidal--Ah76MuMg://album/77616121", + "external_ids": [], + "media_type": "album", + "available": true, + "image": null + }, + "disc_number": 1, + "track_number": 9 + }, + { + "item_id": "153795", + "provider": "tidal--Ah76MuMg", + "name": "We're An American Band", + "version": "Remastered 2002", + "sort_name": "we're an american band", + "uri": "tidal--Ah76MuMg://track/153795", + "external_ids": [["isrc", "USCA20200334"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "153795", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/153795", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/a6d86e02/84c1/41f7/84f5/41be8571fc40/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2002 Capitol Records, LLC", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 48, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": 1, + "duration": 207, + "artists": [ + { + "item_id": "9380", + "provider": "tidal--Ah76MuMg", + "name": "Grand Funk Railroad", + "version": "", + "sort_name": "grand funk railroad", + "uri": "tidal--Ah76MuMg://artist/9380", + "external_ids": [], + "media_type": "artist", + "provider_mappings": [ + { + "item_id": "9380", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/artist/9380", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/6535bf95/a06d/4d23/8262/604fa41d8126/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null + } + ], + "album": { + "item_id": "153794", + "provider": "tidal--Ah76MuMg", + "name": "We're An American Band (Expanded Edition / Remastered 2002)", + "version": "", + "sort_name": "we're an american band (expanded edition / remastered 2002)", + "uri": "tidal--Ah76MuMg://album/153794", + "external_ids": [], + "media_type": "album", + "available": true, + "image": null + }, + "disc_number": 1, + "track_number": 1 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_playlists.json b/tests/components/music_assistant/fixtures/library_playlists.json new file mode 100644 index 00000000000..7f88c5f3e24 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_playlists.json @@ -0,0 +1,63 @@ +{ + "library_playlists": [ + { + "item_id": "40", + "provider": "library", + "name": "1970s Rock Hits", + "version": "", + "sort_name": "1970s rock hits", + "uri": "library://playlist/40", + "external_ids": [], + "media_type": "playlist", + "provider_mappings": [ + { + "item_id": "30da0578-0ca0-4716-b66e-5f02bcd96702", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/browse/playlist/30da0578-0ca0-4716-b66e-5f02bcd96702", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/95913801/41c1/4cc9/bf94/a0fba657bba5/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "owner": "TIDAL", + "is_editable": 0, + "cache_checksum": "2023-10-09 07: 09: 23.446000+00: 00" + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_radios.json b/tests/components/music_assistant/fixtures/library_radios.json new file mode 100644 index 00000000000..1a6a4666ce4 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_radios.json @@ -0,0 +1,66 @@ +{ + "library_radios": [ + { + "item_id": "1", + "provider": "library", + "name": "fm4 | ORF | HQ", + "version": "", + "sort_name": "fm4 | orf | hq", + "uri": "library://radio/1", + "external_ids": [], + "media_type": "radio", + "provider_mappings": [ + { + "item_id": "1e13ed4e-daa9-4728-8550-e08d89c1c8e7", + "provider_domain": "radiobrowser", + "provider_instance": "radiobrowser--FRc3pD3t", + "available": 1, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": null, + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://tubestatic.orf.at/mojo/1_3/storyserver//tube/fm4/images/touch-icon-iphone-retina.png", + "provider": "radiobrowser", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": [ + { + "type": "website", + "url": "https://fm4.orf.at/" + } + ], + "performers": null, + "preview": null, + "popularity": 166, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 172800 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_tracks.json b/tests/components/music_assistant/fixtures/library_tracks.json new file mode 100644 index 00000000000..c4ed83e9342 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_tracks.json @@ -0,0 +1,556 @@ +{ + "library_tracks": [ + { + "item_id": "456", + "provider": "library", + "name": "Tennessee Whiskey", + "version": "", + "sort_name": "tennessee whiskey", + "uri": "library://track/456", + "external_ids": [["isrc", "USUM71418088"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "44832786", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/44832786", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/4894ff62/9de2/4ed8/a7b9/69e217bbbdda/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2015 Mercury Records, a Division of UMG Recordings, Inc.", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 33, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 293, + "artists": [ + { + "item_id": 433, + "provider": "library", + "name": "Chris Stapleton", + "version": "", + "sort_name": "chris stapleton", + "uri": "library://artist/433", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 463, + "provider": "library", + "name": "Traveller", + "version": "", + "sort_name": "traveller", + "uri": "library://album/463", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/4894ff62/9de2/4ed8/a7b9/69e217bbbdda/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 3 + }, + { + "item_id": "467", + "provider": "library", + "name": "Thelma + Louise", + "version": "", + "sort_name": "thelma + louise", + "uri": "library://track/467", + "external_ids": [["isrc", "GBUM72104380"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "194027388", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/194027388", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/04fc7c3c/b814/4855/874c/a2e456205b65/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2021 Virgin Records Limited", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 20, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 137, + "artists": [ + { + "item_id": 81, + "provider": "library", + "name": "Bastille", + "version": "", + "sort_name": "bastille", + "uri": "library://artist/81", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 471, + "provider": "library", + "name": "Thelma + Louise", + "version": "", + "sort_name": "thelma + louise", + "uri": "library://album/471", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/04fc7c3c/b814/4855/874c/a2e456205b65/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 1 + }, + { + "item_id": "485", + "provider": "library", + "name": "They Don't Care About Us", + "version": "", + "sort_name": "they don't care about us", + "uri": "library://track/485", + "external_ids": [["isrc", "USSM19500629"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "5279069", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/5279069", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/a2fa5815/851d/4d2d/b6a7/17a365c838f9/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "(P) 1995 MJJ Productions Inc.", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 27, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 284, + "artists": [ + { + "item_id": 30, + "provider": "library", + "name": "Michael Jackson", + "version": "", + "sort_name": "michael jackson", + "uri": "library://artist/30", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 486, + "provider": "library", + "name": "HIStory - PAST, PRESENT AND FUTURE - BOOK I", + "version": "", + "sort_name": "history - past, present and future - book i", + "uri": "library://album/486", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/a2fa5815/851d/4d2d/b6a7/17a365c838f9/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 2, + "track_number": 2 + }, + { + "item_id": "486", + "provider": "library", + "name": "They Don't Give A F**** About Us", + "version": "", + "sort_name": "they don't give a f**** about us", + "uri": "library://track/486", + "external_ids": [["isrc", "USIR10211795"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "44066854", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/44066854", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": true, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/6b7b2b58/5dc2/4d0c/8979/7b30bb779d6f/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2002 Amaru Entertainment, Inc., Under exclusive license to Interscope Records", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 34, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 306, + "artists": [ + { + "item_id": 159, + "provider": "library", + "name": "2Pac", + "version": "", + "sort_name": "2pac", + "uri": "library://artist/159", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + }, + { + "item_id": 451, + "provider": "library", + "name": "The Outlawz", + "version": "", + "sort_name": "outlawz, the", + "uri": "library://artist/451", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 487, + "provider": "library", + "name": "Better Dayz", + "version": "", + "sort_name": "better dayz", + "uri": "library://album/487", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/6b7b2b58/5dc2/4d0c/8979/7b30bb779d6f/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 2, + "track_number": 13 + }, + { + "item_id": "487", + "provider": "library", + "name": "Things We Lost In The Fire", + "version": "TORN Remix", + "sort_name": "things we lost in the fire", + "uri": "library://track/487", + "external_ids": [["isrc", "GBUM71304903"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "22627902", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/22627902", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/de277fd3/cc29/4d63/a60f/13b501c5f3d0/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2013 Virgin Records Limited", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 10, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 323, + "artists": [ + { + "item_id": 81, + "provider": "library", + "name": "Bastille", + "version": "", + "sort_name": "bastille", + "uri": "library://artist/81", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 488, + "provider": "library", + "name": "Things We Lost In The Fire", + "version": "", + "sort_name": "things we lost in the fire", + "uri": "library://album/488", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/de277fd3/cc29/4d63/a60f/13b501c5f3d0/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 3 + }, + { + "item_id": "488", + "provider": "library", + "name": "Those Nights", + "version": "", + "sort_name": "those nights", + "uri": "library://track/488", + "external_ids": [["isrc", "GBUM71803866"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "110750762", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/110750762", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/713805f3/c08c/4c0f/8199/d63e6badac0d/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2019 Virgin Records Limited", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 21, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 270, + "artists": [ + { + "item_id": 81, + "provider": "library", + "name": "Bastille", + "version": "", + "sort_name": "bastille", + "uri": "library://artist/81", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 489, + "provider": "library", + "name": "Doom Days", + "version": "", + "sort_name": "doom days", + "uri": "library://album/489", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/713805f3/c08c/4c0f/8199/d63e6badac0d/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 10 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/player_queues.json b/tests/components/music_assistant/fixtures/player_queues.json new file mode 100644 index 00000000000..5251560365c --- /dev/null +++ b/tests/components/music_assistant/fixtures/player_queues.json @@ -0,0 +1,328 @@ +{ + "player_queues": [ + { + "queue_id": "00:00:00:00:00:01", + "active": false, + "display_name": "Test Player 1", + "available": true, + "items": 0, + "shuffle_enabled": false, + "repeat_mode": "off", + "dont_stop_the_music_enabled": false, + "current_index": null, + "index_in_buffer": null, + "elapsed_time": 0, + "elapsed_time_last_updated": 1730118302.163217, + "state": "idle", + "current_item": null, + "next_item": null, + "radio_source": [], + "flow_mode": false, + "resume_pos": 0 + }, + { + "queue_id": "00:00:00:00:00:02", + "active": false, + "display_name": "My Super Test Player 2", + "available": true, + "items": 0, + "shuffle_enabled": false, + "repeat_mode": "off", + "dont_stop_the_music_enabled": false, + "current_index": null, + "index_in_buffer": null, + "elapsed_time": 0, + "elapsed_time_last_updated": 0, + "state": "idle", + "current_item": null, + "next_item": null, + "radio_source": [], + "flow_mode": false, + "resume_pos": 0 + }, + { + "queue_id": "test_group_player_1", + "active": true, + "display_name": "Test Group Player 1", + "available": true, + "items": 1094, + "shuffle_enabled": true, + "repeat_mode": "all", + "dont_stop_the_music_enabled": true, + "current_index": 26, + "index_in_buffer": 26, + "elapsed_time": 232.08810877799988, + "elapsed_time_last_updated": 1730313109.5659513, + "state": "playing", + "current_item": { + "queue_id": "test_group_player_1", + "queue_item_id": "5d95dc5be77e4f7eb4939f62cfef527b", + "name": "Guns N' Roses - November Rain", + "duration": 536, + "sort_index": 2109, + "streamdetails": { + "provider": "spotify", + "item_id": "3YRCqOhFifThpSRFJ1VWFM", + "audio_format": { + "content_type": "ogg", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "ogg", + "bit_rate": 0 + }, + "media_type": "track", + "stream_type": "custom", + "stream_title": null, + "duration": 536, + "size": null, + "can_seek": true, + "loudness": -12.47, + "loudness_album": null, + "prefer_album_loudness": false, + "volume_normalization_mode": "fallback_dynamic", + "target_loudness": -17, + "strip_silence_begin": false, + "strip_silence_end": true, + "stream_error": null + }, + "media_item": { + "item_id": "3YRCqOhFifThpSRFJ1VWFM", + "provider": "spotify", + "name": "November Rain", + "version": "", + "sort_name": "november rain", + "uri": "spotify://track/3YRCqOhFifThpSRFJ1VWFM", + "external_ids": [["isrc", "USGF19141510"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "3YRCqOhFifThpSRFJ1VWFM", + "provider_domain": "spotify", + "provider_instance": "spotify", + "available": true, + "audio_format": { + "content_type": "ogg", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "ogg", + "bit_rate": 320 + }, + "url": "https://open.spotify.com/track/3YRCqOhFifThpSRFJ1VWFM", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://i.scdn.co/image/ab67616d0000b273e44963b8bb127552ac761873", + "provider": "spotify", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "chapters": null, + "performers": null, + "preview": "https://p.scdn.co/mp3-preview/98deb9c370bbaa350be058b3470fbe3bc1e28d9d?cid=2eb96f9b37494be1824999d58028a305", + "popularity": 77, + "last_refresh": null + }, + "favorite": false, + "position": 1372, + "duration": 536, + "artists": [ + { + "item_id": "3qm84nBOXUEQ2vnTfUTTFC", + "provider": "spotify", + "name": "Guns N' Roses", + "version": "", + "sort_name": "guns n' roses", + "uri": "spotify://artist/3qm84nBOXUEQ2vnTfUTTFC", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": "0CxPbTRARqKUYighiEY9Sz", + "provider": "spotify", + "name": "Use Your Illusion I", + "version": "", + "sort_name": "use your illusion i", + "uri": "spotify://album/0CxPbTRARqKUYighiEY9Sz", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://i.scdn.co/image/ab67616d0000b273e44963b8bb127552ac761873", + "provider": "spotify", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 10 + }, + "image": { + "type": "thumb", + "path": "https://i.scdn.co/image/ab67616d0000b273e44963b8bb127552ac761873", + "provider": "spotify", + "remotely_accessible": true + }, + "index": 0 + }, + "next_item": { + "queue_id": "test_group_player_1", + "queue_item_id": "990ae8f29cdf4fb588d679b115621f55", + "name": "The Stranglers - Golden Brown", + "duration": 207, + "sort_index": 1138, + "streamdetails": { + "provider": "qobuz", + "item_id": "1004735", + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "media_type": "track", + "stream_type": "http", + "stream_title": null, + "duration": 207, + "size": null, + "can_seek": true, + "loudness": -14.23, + "loudness_album": null, + "prefer_album_loudness": true, + "volume_normalization_mode": "fallback_dynamic", + "target_loudness": -17, + "strip_silence_begin": true, + "strip_silence_end": true, + "stream_error": null + }, + "media_item": { + "item_id": "1004735", + "provider": "qobuz", + "name": "Golden Brown", + "version": "", + "sort_name": "golden brown", + "uri": "qobuz://track/1004735", + "external_ids": [["isrc", "GBAYE8100053"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "1004735", + "provider_domain": "qobuz", + "provider_instance": "qobuz", + "available": true, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://open.qobuz.com/track/1004735", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://static.qobuz.com/images/covers/59/88/0724353468859_600.jpg", + "provider": "qobuz", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "© 2001 Parlophone Records Ltd, a Warner Music Group Company ℗ 1981 Parlophone Records Ltd, a Warner Music Group Company", + "lyrics": null, + "label": null, + "links": null, + "chapters": null, + "performers": [ + "Dave Greenfield, Composer, Producer, Keyboards, Vocals", + "Jean", + "Hugh Cornwell, Composer, Producer, Guitar, Vocals", + "Jean Jacques Burnel, Producer, Bass Guitar, Vocals", + "Jet Black, Composer, Producer, Drums, Percussion", + "Jacques Burnell, Composer", + "The Stranglers, MainArtist" + ], + "preview": null, + "popularity": null, + "last_refresh": null + }, + "favorite": false, + "position": 183, + "duration": 207, + "artists": [ + { + "item_id": "26779", + "provider": "qobuz", + "name": "The Stranglers", + "version": "", + "sort_name": "stranglers, the", + "uri": "qobuz://artist/26779", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": "0724353468859", + "provider": "qobuz", + "name": "La Folie", + "version": "", + "sort_name": "folie, la", + "uri": "qobuz://album/0724353468859", + "external_ids": [["barcode", "0724353468859"]], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://static.qobuz.com/images/covers/59/88/0724353468859_600.jpg", + "provider": "qobuz", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 9 + }, + "image": { + "type": "thumb", + "path": "https://static.qobuz.com/images/covers/59/88/0724353468859_600.jpg", + "provider": "qobuz", + "remotely_accessible": true + }, + "index": 0 + }, + "radio_source": [], + "flow_mode": false, + "resume_pos": 0 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json new file mode 100644 index 00000000000..2d8b88d0e8e --- /dev/null +++ b/tests/components/music_assistant/fixtures/players.json @@ -0,0 +1,149 @@ +{ + "players": [ + { + "player_id": "00:00:00:00:00:01", + "provider": "test", + "type": "player", + "name": "Test Player 1", + "available": true, + "powered": false, + "device_info": { + "model": "Test Model", + "address": "192.168.1.1", + "manufacturer": "Test Manufacturer" + }, + "supported_features": [ + "volume_set", + "volume_mute", + "pause", + "set_members", + "power", + "enqueue" + ], + "elapsed_time": 0, + "elapsed_time_last_updated": 0, + "state": "idle", + "volume_level": 20, + "volume_muted": false, + "group_childs": [], + "active_source": "00:00:00:00:00:01", + "active_group": null, + "current_media": null, + "synced_to": null, + "enabled_by_default": true, + "needs_poll": false, + "poll_interval": 30, + "enabled": true, + "hidden": false, + "icon": "mdi-speaker", + "group_volume": 20, + "display_name": "Test Player 1", + "extra_data": {}, + "announcement_in_progress": false + }, + { + "player_id": "00:00:00:00:00:02", + "provider": "test", + "type": "player", + "name": "Test Player 2", + "available": true, + "powered": true, + "device_info": { + "model": "Test Model", + "address": "192.168.1.2", + "manufacturer": "Test Manufacturer" + }, + "supported_features": [ + "volume_set", + "volume_mute", + "pause", + "set_members", + "power", + "enqueue" + ], + "elapsed_time": 0, + "elapsed_time_last_updated": 0, + "state": "playing", + "volume_level": 20, + "volume_muted": false, + "group_childs": [], + "active_source": "spotify", + "active_group": null, + "current_media": { + "uri": "spotify://track/5d95dc5be77e4f7eb4939f62cfef527b", + "media_type": "track", + "title": "Test Track", + "artist": "Test Artist", + "album": "Test Album", + "image_url": null, + "duration": 300, + "queue_id": null, + "queue_item_id": null, + "custom_data": null + }, + "synced_to": null, + "enabled_by_default": true, + "needs_poll": false, + "poll_interval": 30, + "enabled": true, + "hidden": false, + "icon": "mdi-speaker", + "group_volume": 20, + "display_name": "My Super Test Player 2", + "extra_data": {}, + "announcement_in_progress": false + }, + { + "player_id": "test_group_player_1", + "provider": "player_group", + "type": "group", + "name": "Test Group Player 1", + "available": true, + "powered": true, + "device_info": { + "model": "Sync Group", + "address": "", + "manufacturer": "Test" + }, + "supported_features": [ + "volume_set", + "volume_mute", + "pause", + "set_members", + "power", + "enqueue" + ], + "elapsed_time": 0.0, + "elapsed_time_last_updated": 1730315437.9904983, + "state": "idle", + "volume_level": 6, + "volume_muted": false, + "group_childs": ["00:00:00:00:00:01", "00:00:00:00:00:02"], + "active_source": "test_group_player_1", + "active_group": null, + "current_media": { + "uri": "http://192.168.1.1:8097/single/test_group_player_1/5d95dc5be77e4f7eb4939f62cfef527b.flac?ts=1730313038", + "media_type": "unknown", + "title": null, + "artist": null, + "album": null, + "image_url": null, + "duration": null, + "queue_id": "test_group_player_1", + "queue_item_id": "5d95dc5be77e4f7eb4939f62cfef527b", + "custom_data": null + }, + "synced_to": null, + "enabled_by_default": true, + "needs_poll": true, + "poll_interval": 30, + "enabled": true, + "hidden": false, + "icon": "mdi-speaker-multiple", + "group_volume": 6, + "display_name": "Test Group Player 1", + "extra_data": {}, + "announcement_in_progress": false + } + ] +} diff --git a/tests/components/music_assistant/fixtures/server_info_message.json b/tests/components/music_assistant/fixtures/server_info_message.json new file mode 100644 index 00000000000..907ec8af820 --- /dev/null +++ b/tests/components/music_assistant/fixtures/server_info_message.json @@ -0,0 +1,9 @@ +{ + "server_id": "1234", + "server_version": "0.0.0", + "schema_version": 23, + "min_supported_schema_version": 23, + "base_url": "http://localhost:8095", + "homeassistant_addon": false, + "onboard_done": false +} diff --git a/tests/components/music_assistant/snapshots/test_media_player.ambr b/tests/components/music_assistant/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..e3d7a4a0cbc --- /dev/null +++ b/tests/components/music_assistant/snapshots/test_media_player.ambr @@ -0,0 +1,190 @@ +# serializer version: 1 +# name: test_media_player[media_player.my_super_test_player_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.my_super_test_player_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:speaker', + 'original_name': None, + 'platform': 'music_assistant', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.my_super_test_player_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'active_queue': None, + 'app_id': 'spotify', + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'My Super Test Player 2', + 'group_members': list([ + ]), + 'icon': 'mdi:speaker', + 'is_volume_muted': False, + 'mass_player_type': 'player', + 'media_album_name': 'Test Album', + 'media_artist': 'Test Artist', + 'media_content_id': 'spotify://track/5d95dc5be77e4f7eb4939f62cfef527b', + 'media_content_type': , + 'media_duration': 300, + 'media_position': 0, + 'media_title': 'Test Track', + 'supported_features': , + 'volume_level': 0.2, + }), + 'context': , + 'entity_id': 'media_player.my_super_test_player_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_media_player[media_player.test_group_player_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_group_player_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:speaker-multiple', + 'original_name': None, + 'platform': 'music_assistant', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test_group_player_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.test_group_player_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'active_queue': 'test_group_player_1', + 'app_id': 'music_assistant', + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Test Group Player 1', + 'group_members': list([ + 'media_player.my_super_test_player_2', + 'media_player.test_player_1', + ]), + 'icon': 'mdi:speaker-multiple', + 'is_volume_muted': False, + 'mass_player_type': 'group', + 'media_album_name': 'Use Your Illusion I', + 'media_artist': "Guns N' Roses", + 'media_content_id': 'spotify://track/3YRCqOhFifThpSRFJ1VWFM', + 'media_content_type': , + 'media_duration': 536, + 'media_position': 232, + 'media_position_updated_at': datetime.datetime(2024, 10, 30, 18, 31, 49, 565951, tzinfo=datetime.timezone.utc), + 'media_title': 'November Rain', + 'repeat': 'all', + 'shuffle': True, + 'supported_features': , + 'volume_level': 0.06, + }), + 'context': , + 'entity_id': 'media_player.test_group_player_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_media_player[media_player.test_player_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_player_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:speaker', + 'original_name': None, + 'platform': 'music_assistant', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.test_player_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'active_queue': '00:00:00:00:00:01', + 'device_class': 'speaker', + 'friendly_name': 'Test Player 1', + 'group_members': list([ + ]), + 'icon': 'mdi:speaker', + 'mass_player_type': 'player', + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.test_player_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/music_assistant/test_config_flow.py b/tests/components/music_assistant/test_config_flow.py new file mode 100644 index 00000000000..c700060889c --- /dev/null +++ b/tests/components/music_assistant/test_config_flow.py @@ -0,0 +1,217 @@ +"""Define tests for the Music Assistant Integration config flow.""" + +from copy import deepcopy +from ipaddress import ip_address +from unittest import mock +from unittest.mock import AsyncMock + +from music_assistant_client.exceptions import ( + CannotConnect, + InvalidServerVersion, + MusicAssistantClientException, +) +from music_assistant_models.api import ServerInfoMessage +import pytest + +from homeassistant.components.music_assistant.config_flow import CONF_URL +from homeassistant.components.music_assistant.const import DEFAULT_NAME, DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry, load_fixture + +SERVER_INFO = { + "server_id": "1234", + "base_url": "http://localhost:8095", + "server_version": "0.0.0", + "schema_version": 23, + "min_supported_schema_version": 23, + "homeassistant_addon": True, +} + +ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="mock_hostname", + port=None, + type=mock.ANY, + name=mock.ANY, + properties=SERVER_INFO, +) + + +async def test_full_flow( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_URL: "http://localhost:8095", + } + assert result["result"].unique_id == "1234" + + +async def test_zero_conf_flow( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_URL: "http://localhost:8095", + } + assert result["result"].unique_id == "1234" + + +async def test_zero_conf_missing_server_id( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow with missing server id.""" + bad_zero_conf_data = deepcopy(ZEROCONF_DATA) + bad_zero_conf_data.properties.pop("server_id") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=bad_zero_conf_data, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "missing_server_id" + + +async def test_duplicate_user( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate user flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_duplicate_zeroconf( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate zeroconf flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + (InvalidServerVersion("invalid_server_version"), "invalid_server_version"), + (CannotConnect("cannot_connect"), "cannot_connect"), + (MusicAssistantClientException("unknown"), "unknown"), + ], +) +async def test_flow_user_server_version_invalid( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + exception: MusicAssistantClientException, + error_message: str, +) -> None: + """Test user flow when server url is invalid.""" + mock_get_server_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + assert result["errors"] == {"base": error_message} + + mock_get_server_info.side_effect = None + mock_get_server_info.return_value = ServerInfoMessage.from_json( + load_fixture("server_info_message.json", DOMAIN) + ) + + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_flow_zeroconf_connect_issue( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow when server connect be reached.""" + mock_get_server_info.side_effect = CannotConnect("cannot_connect") + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/music_assistant/test_media_browser.py b/tests/components/music_assistant/test_media_browser.py new file mode 100644 index 00000000000..96fd54962d8 --- /dev/null +++ b/tests/components/music_assistant/test_media_browser.py @@ -0,0 +1,65 @@ +"""Test Music Assistant media browser implementation.""" + +from unittest.mock import MagicMock + +import pytest + +from homeassistant.components.media_player import BrowseError, BrowseMedia, MediaType +from homeassistant.components.music_assistant.const import DOMAIN +from homeassistant.components.music_assistant.media_browser import ( + LIBRARY_ALBUMS, + LIBRARY_ARTISTS, + LIBRARY_PLAYLISTS, + LIBRARY_RADIO, + LIBRARY_TRACKS, + async_browse_media, +) +from homeassistant.core import HomeAssistant + +from .common import setup_integration_from_fixtures + + +@pytest.mark.parametrize( + ("media_content_id", "media_content_type", "expected"), + [ + (LIBRARY_PLAYLISTS, MediaType.PLAYLIST, "library://playlist/40"), + (LIBRARY_ARTISTS, MediaType.ARTIST, "library://artist/127"), + (LIBRARY_ALBUMS, MediaType.ALBUM, "library://album/396"), + (LIBRARY_TRACKS, MediaType.TRACK, "library://track/486"), + (LIBRARY_RADIO, DOMAIN, "library://radio/1"), + ("artist", MediaType.ARTIST, "library://album/115"), + ("album", MediaType.ALBUM, "library://track/247"), + ("playlist", DOMAIN, "tidal--Ah76MuMg://track/77616130"), + (None, None, "artists"), + ], +) +async def test_browse_media_root( + hass: HomeAssistant, + music_assistant_client: MagicMock, + media_content_id: str, + media_content_type: str, + expected: str, +) -> None: + """Test the async_browse_media method.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + state = hass.states.get(entity_id) + assert state + browse_item: BrowseMedia = await async_browse_media( + hass, music_assistant_client, media_content_id, media_content_type + ) + assert browse_item.children[0].media_content_id == expected + + +async def test_browse_media_not_found( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test the async_browse_media method when media is not found.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + state = hass.states.get(entity_id) + assert state + + with pytest.raises(BrowseError, match="Media not found: unknown / unknown"): + await async_browse_media(hass, music_assistant_client, "unknown", "unknown") diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py new file mode 100644 index 00000000000..13716b6a479 --- /dev/null +++ b/tests/components/music_assistant/test_media_player.py @@ -0,0 +1,585 @@ +"""Test Music Assistant media player entities.""" + +from unittest.mock import MagicMock, call + +from music_assistant_models.enums import MediaType, QueueOption +from music_assistant_models.media_items import Track +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_GROUP_MEMBERS, + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, + SERVICE_UNJOIN, +) +from homeassistant.components.music_assistant.const import DOMAIN as MASS_DOMAIN +from homeassistant.components.music_assistant.media_player import ( + ATTR_ALBUM, + ATTR_ANNOUNCE_VOLUME, + ATTR_ARTIST, + ATTR_AUTO_PLAY, + ATTR_MEDIA_ID, + ATTR_MEDIA_TYPE, + ATTR_RADIO_MODE, + ATTR_SOURCE_PLAYER, + ATTR_URL, + ATTR_USE_PRE_ANNOUNCE, + SERVICE_PLAY_ANNOUNCEMENT, + SERVICE_PLAY_MEDIA_ADVANCED, + SERVICE_TRANSFER_QUEUE, +) +from homeassistant.config_entries import HomeAssistantError +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import setup_integration_from_fixtures, snapshot_music_assistant_entities + +from tests.common import AsyncMock + +MOCK_TRACK = Track( + item_id="1", + provider="library", + name="Test Track", + provider_mappings={}, +) + + +async def test_media_player( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + music_assistant_client: MagicMock, +) -> None: + """Test media player.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + snapshot_music_assistant_entities( + hass, entity_registry, snapshot, Platform.MEDIA_PLAYER + ) + + +async def test_media_player_basic_actions( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity basic actions (play/stop/pause etc.).""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + for action, cmd in ( + (SERVICE_MEDIA_PLAY, "play"), + (SERVICE_MEDIA_PAUSE, "pause"), + (SERVICE_MEDIA_STOP, "stop"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "previous"), + (SERVICE_MEDIA_NEXT_TRACK, "next"), + (SERVICE_VOLUME_UP, "volume_up"), + (SERVICE_VOLUME_DOWN, "volume_down"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + f"players/cmd/{cmd}", player_id=mass_player_id + ) + music_assistant_client.send_command.reset_mock() + + +async def test_media_player_seek_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity seek action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + "media_seek", + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_SEEK_POSITION: 100, + }, + blocking=True, + ) + + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/seek", player_id=mass_player_id, position=100 + ) + + +async def test_media_player_volume_set_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity volume_set action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_VOLUME_LEVEL: 0.5, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/volume_set", player_id=mass_player_id, volume_level=50 + ) + + +async def test_media_player_volume_mute_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity volume_mute action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_VOLUME_MUTED: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/volume_mute", player_id=mass_player_id, muted=True + ) + + +async def test_media_player_turn_on_off_actions( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity turn_on/turn_off actions.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + for action, pwr in ( + (SERVICE_TURN_ON, True), + (SERVICE_TURN_OFF, False), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/power", player_id=mass_player_id, powered=pwr + ) + music_assistant_client.send_command.reset_mock() + + +async def test_media_player_shuffle_set_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity shuffle_set action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_SHUFFLE: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/shuffle", queue_id=mass_player_id, shuffle_enabled=True + ) + + +async def test_media_player_repeat_set_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity repeat_set action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_REPEAT: "one", + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/repeat", queue_id=mass_player_id, repeat_mode="one" + ) + + +async def test_media_player_join_players_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity join_players action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: entity_id, + ATTR_GROUP_MEMBERS: ["media_player.my_super_test_player_2"], + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/group_many", + target_player=mass_player_id, + child_player_ids=["00:00:00:00:00:02"], + ) + # test again with invalid source player + music_assistant_client.send_command.reset_mock() + with pytest.raises( + HomeAssistantError, match="Entity media_player.blah_blah not found" + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: entity_id, + ATTR_GROUP_MEMBERS: ["media_player.blah_blah"], + }, + blocking=True, + ) + + +async def test_media_player_unjoin_player_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity unjoin player action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/ungroup", player_id=mass_player_id + ) + + +async def test_media_player_clear_playlist_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity clear_playlist action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/clear", queue_id=mass_player_id + ) + + +async def test_media_player_play_media_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player (advanced) play_media action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + + # test simple play_media call with URI as media_id and no media type + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "spotify://track/1234", + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=["spotify://track/1234"], + option=None, + radio_mode=False, + start_item=None, + ) + + # test simple play_media call with URI and enqueue specified + music_assistant_client.send_command.reset_mock() + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "spotify://track/1234", + ATTR_MEDIA_ENQUEUE: "add", + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=["spotify://track/1234"], + option=QueueOption.ADD, + radio_mode=False, + start_item=None, + ) + + # test basic play_media call with URL and radio mode specified + music_assistant_client.send_command.reset_mock() + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "spotify://track/1234", + ATTR_RADIO_MODE: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=["spotify://track/1234"], + option=None, + radio_mode=True, + start_item=None, + ) + + # test play_media call with media id and media type specified + music_assistant_client.send_command.reset_mock() + music_assistant_client.music.get_item = AsyncMock(return_value=MOCK_TRACK) + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "1", + ATTR_MEDIA_TYPE: "track", + }, + blocking=True, + ) + assert music_assistant_client.music.get_item.call_count == 1 + assert music_assistant_client.music.get_item.call_args == call( + MediaType.TRACK, "1", "library" + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=[MOCK_TRACK.uri], + option=None, + radio_mode=False, + start_item=None, + ) + + # test play_media call by name + music_assistant_client.send_command.reset_mock() + music_assistant_client.music.get_item_by_name = AsyncMock(return_value=MOCK_TRACK) + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "test", + ATTR_ARTIST: "artist", + ATTR_ALBUM: "album", + }, + blocking=True, + ) + assert music_assistant_client.music.get_item_by_name.call_count == 1 + assert music_assistant_client.music.get_item_by_name.call_args == call( + name="test", + artist="artist", + album="album", + media_type=None, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=[MOCK_TRACK.uri], + option=None, + radio_mode=False, + start_item=None, + ) + + +async def test_media_player_play_announcement_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player play_announcement action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_ANNOUNCEMENT, + { + ATTR_ENTITY_ID: entity_id, + ATTR_URL: "http://blah.com/announcement.mp3", + ATTR_USE_PRE_ANNOUNCE: True, + ATTR_ANNOUNCE_VOLUME: 50, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/play_announcement", + player_id=mass_player_id, + url="http://blah.com/announcement.mp3", + use_pre_announce=True, + volume_level=50, + ) + + +async def test_media_player_transfer_queue_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player transfer_queu action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_TRANSFER_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_SOURCE_PLAYER: "media_player.my_super_test_player_2", + ATTR_AUTO_PLAY: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/transfer", + source_queue_id="00:00:00:00:00:02", + target_queue_id="00:00:00:00:00:01", + auto_play=True, + require_schema=25, + ) + # test again with invalid source player + music_assistant_client.send_command.reset_mock() + with pytest.raises(HomeAssistantError, match="Source player not available."): + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_TRANSFER_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_SOURCE_PLAYER: "media_player.blah_blah", + }, + blocking=True, + ) + # test again with no source player specified (which picks first playing playerqueue) + music_assistant_client.send_command.reset_mock() + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_TRANSFER_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/transfer", + source_queue_id="test_group_player_1", + target_queue_id="00:00:00:00:00:01", + auto_play=None, + require_schema=25, + ) diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index b6fce35a4c7..1d407815db0 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -141,7 +141,7 @@ async def integration_fixture( config: dict[str, Any] = {} config_entry.add_to_hass(hass) with patch( - "homeassistant.components.mysensors.device.Debouncer", autospec=True + "homeassistant.components.mysensors.entity.Debouncer", autospec=True ) as debouncer_class: def debouncer( diff --git a/tests/components/mysensors/test_cover.py b/tests/components/mysensors/test_cover.py index e056bff80fa..a063aa8f8d8 100644 --- a/tests/components/mysensors/test_cover.py +++ b/tests/components/mysensors/test_cover.py @@ -15,10 +15,7 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -36,7 +33,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 assert state.attributes[ATTR_BATTERY_LEVEL] == 0 @@ -57,7 +54,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 transport_write.reset_mock() @@ -79,7 +76,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 50 transport_write.reset_mock() @@ -102,7 +99,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 75 receive_message("1;1;1;0;29;0\n") @@ -112,7 +109,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 transport_write.reset_mock() @@ -134,7 +131,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING assert state.attributes[ATTR_CURRENT_POSITION] == 50 receive_message("1;1;1;0;30;0\n") @@ -144,7 +141,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 transport_write.reset_mock() @@ -165,7 +162,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 25 @@ -181,7 +178,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -200,7 +197,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING transport_write.reset_mock() @@ -220,7 +217,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN transport_write.reset_mock() @@ -241,7 +238,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING receive_message("1;1;1;0;29;0\n") receive_message("1;1;1;0;2;1\n") @@ -250,7 +247,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN transport_write.reset_mock() @@ -270,7 +267,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING receive_message("1;1;1;0;30;0\n") receive_message("1;1;1;0;2;0\n") @@ -279,4 +276,4 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index 9ede11146ef..3ab186b61a8 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -15,10 +15,11 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.myuplink.const import DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.setup import async_setup_component from homeassistant.util.json import json_loads -from .const import CLIENT_ID, CLIENT_SECRET +from .const import CLIENT_ID, CLIENT_SECRET, UNIQUE_ID from tests.common import MockConfigEntry, load_fixture @@ -33,7 +34,7 @@ def mock_expires_at() -> float: def mock_config_entry(hass: HomeAssistant, expires_at: float) -> MockConfigEntry: """Return the default mocked config entry.""" config_entry = MockConfigEntry( - version=1, + minor_version=2, domain=DOMAIN, title="myUplink test", data={ @@ -48,6 +49,7 @@ def mock_config_entry(hass: HomeAssistant, expires_at: float) -> MockConfigEntry }, }, entry_id="myuplink_test", + unique_id=UNIQUE_ID, ) config_entry.add_to_hass(hass) return config_entry @@ -189,3 +191,21 @@ async def setup_platform( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() yield + + +@pytest.fixture +async def access_token(hass: HomeAssistant) -> str: + """Return a valid access token.""" + return config_entry_oauth2_flow._encode_jwt( + hass, + { + "sub": UNIQUE_ID, + "aud": [], + "scp": [ + "WRITESYSTEM", + "READSYSTEM", + "offline_access", + ], + "ou_code": "NA", + }, + ) diff --git a/tests/components/myuplink/const.py b/tests/components/myuplink/const.py index 6001cb151c0..4cb6db952f1 100644 --- a/tests/components/myuplink/const.py +++ b/tests/components/myuplink/const.py @@ -2,3 +2,4 @@ CLIENT_ID = "12345" CLIENT_SECRET = "67890" +UNIQUE_ID = "uid" diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 9ec5db0ea3b..0a61ab05f21 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -989,5 +989,124 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "147641", + "parameterName": "Start Wednesday", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:52:01+00:00", + "value": 0, + "strVal": "0", + "smartHomeCategories": [], + "minValue": 0, + "maxValue": 86400, + "stepValue": 900, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072", + "parameterName": "start diff additional heat", + "parameterUnit": "DM", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072r", + "parameterName": "r start diff additional heat", + "parameterUnit": "DM", + "writable": false, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47011", + "parameterName": "Heating offset climate system 1", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47007", + "parameterName": "Excluded", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "99000", + "parameterName": "Excluded 2", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": "Hello", + "strVal": "Hello", + "smartHomeCategories": [], + "minValue": "", + "maxValue": "", + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_binary_sensor.ambr b/tests/components/myuplink/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..755cae3c623 --- /dev/null +++ b/tests/components/myuplink/snapshots/test_binary_sensor.ambr @@ -0,0 +1,326 @@ +# serializer version: 1 +# name: test_binary_sensor_states[binary_sensor.gotham_city_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm', + 'unique_id': '123456-7890-1234-has_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Gotham City Alarm', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-connection_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Gotham City Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_connectivity_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-connection_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_connectivity_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Gotham City Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_connectivity_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Extern. adjust\xadment climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43161', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Extern. adjust\xadment climate system 1', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Extern. adjust\xadment climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43161', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_extern_adjustment_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Extern. adjust\xadment climate system 1', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_extern_adjustment_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pump: Heating medium (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49995', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Pump: Heating medium (GP1)', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pump: Heating medium (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49995', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_states[binary_sensor.gotham_city_pump_heating_medium_gp1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Pump: Heating medium (GP1)', + }), + 'context': , + 'entity_id': 'binary_sensor.gotham_city_pump_heating_medium_gp1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 9160fd3b365..6fe6becff11 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1050,6 +1050,125 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "147641", + "parameterName": "Start Wednesday", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:52:01+00:00", + "value": 0, + "strVal": "0", + "smartHomeCategories": [], + "minValue": 0, + "maxValue": 86400, + "stepValue": 900, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072", + "parameterName": "start diff additional heat", + "parameterUnit": "DM", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072r", + "parameterName": "r start diff additional heat", + "parameterUnit": "DM", + "writable": false, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47011", + "parameterName": "Heating offset climate system 1", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47007", + "parameterName": "Excluded", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "99000", + "parameterName": "Excluded 2", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": "Hello", + "strVal": "Hello", + "smartHomeCategories": [], + "minValue": "", + "maxValue": "", + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] @@ -2093,6 +2212,125 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "147641", + "parameterName": "Start Wednesday", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:52:01+00:00", + "value": 0, + "strVal": "0", + "smartHomeCategories": [], + "minValue": 0, + "maxValue": 86400, + "stepValue": 900, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072", + "parameterName": "start diff additional heat", + "parameterUnit": "DM", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072r", + "parameterName": "r start diff additional heat", + "parameterUnit": "DM", + "writable": false, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47011", + "parameterName": "Heating offset climate system 1", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47007", + "parameterName": "Excluded", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "99000", + "parameterName": "Excluded 2", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": "Hello", + "strVal": "Hello", + "smartHomeCategories": [], + "minValue": "", + "maxValue": "", + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_number.ambr b/tests/components/myuplink/snapshots/test_number.ambr new file mode 100644 index 00000000000..c47d3c60295 --- /dev/null +++ b/tests/components/myuplink/snapshots/test_number.ambr @@ -0,0 +1,445 @@ +# serializer version: 1 +# name: test_number_states[platforms0][number.gotham_city_degree_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_degree_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_degree_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_degree_minutes_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_degree_minutes_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating offset climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating offset climate system 1', + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating offset climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating offset climate system 1', + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Room sensor set point value heating climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47398', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Room sensor set point value heating climate system 1', + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'context': , + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.5', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Room sensor set point value heating climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47398', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Room sensor set point value heating climate system 1', + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'context': , + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.5', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 10.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_start_diff_additional_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City start diff additional heat', + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 10.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_start_diff_additional_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 10.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_start_diff_additional_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City start diff additional heat', + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 10.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_start_diff_additional_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700.0', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_select.ambr b/tests/components/myuplink/snapshots/test_select.ambr new file mode 100644 index 00000000000..eff06bc7f2d --- /dev/null +++ b/tests/components/myuplink/snapshots/test_select.ambr @@ -0,0 +1,119 @@ +# serializer version: 1 +# name: test_select_states[platforms0][select.gotham_city_comfort_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.gotham_city_comfort_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'comfort mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City comfort mode', + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'context': , + 'entity_id': 'select.gotham_city_comfort_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Economy', + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.gotham_city_comfort_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'comfort mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City comfort mode', + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'context': , + 'entity_id': 'select.gotham_city_comfort_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Economy', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_sensor.ambr b/tests/components/myuplink/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a5469dc9a77 --- /dev/null +++ b/tests/components/myuplink/snapshots/test_sensor.ambr @@ -0,0 +1,4767 @@ +# serializer version: 1 +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Average outdoor temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40067', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Average outdoor temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Average outdoor temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40067', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_average_outdoor_temp_bt1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Average outdoor temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_average_outdoor_temp_bt1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Calculated supply climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43009', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Calculated supply climate system 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Calculated supply climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43009', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_calculated_supply_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Calculated supply climate system 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_calculated_supply_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_condenser_bt12', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Condenser (BT12)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40017', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Condenser (BT12)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_condenser_bt12', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_condenser_bt12_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Condenser (BT12)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40017', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_condenser_bt12_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Condenser (BT12)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_condenser_bt12_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40079', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40079', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40081', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be2_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40081', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be2_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be2_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40083', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_be3_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current (BE3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40083', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_be3_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gotham City Current (BE3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_be3_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_compressor_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-41778', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Current compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_compressor_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_compressor_frequency_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-41778', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_compressor_frequency_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Current compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_compressor_frequency_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_fan_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current fan mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_mode', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43108', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current fan mode', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_fan_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_fan_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current fan mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_mode', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43108', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_fan_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current fan mode', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_fan_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_hot_water_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current hot water mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43109', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current hot water mode', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_hot_water_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_hot_water_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current hot water mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43109', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_hot_water_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Current hot water mode', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_hot_water_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current outd temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40004', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Current outd temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-9.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current outd temp (BT1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40004', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_current_outd_temp_bt1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Current outd temp (BT1)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_current_outd_temp_bt1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-9.3', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Decrease from reference value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43125', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Decrease from reference value', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Decrease from reference value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43125', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_decrease_from_reference_value_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Decrease from reference value', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_decrease_from_reference_value_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_defrosting_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Defrosting time', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43066', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Gotham City Defrosting time', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_defrosting_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_defrosting_time_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Defrosting time', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43066', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_defrosting_time_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Gotham City Defrosting time', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_defrosting_time_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_degree_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_degree_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_degree_minutes_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_degree_minutes_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_degree_minutes_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-42770', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49633', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-42770', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_desired_humidity_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Desired humidity', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49633', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_desired_humidity_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Desired humidity', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_desired_humidity_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_discharge_bt14', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharge (BT14)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40018', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Discharge (BT14)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_discharge_bt14', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '89.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_discharge_bt14_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Discharge (BT14)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40018', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_discharge_bt14_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Discharge (BT14)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_discharge_bt14_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '89.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'dT Inverter - exh air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City dT Inverter - exh air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'dT Inverter - exh air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_dt_inverter_exh_air_bt20_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City dT Inverter - exh air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_dt_inverter_exh_air_bt20_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_evaporator_bt16', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Evaporator (BT16)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40020', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Evaporator (BT16)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_evaporator_bt16', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-14.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_evaporator_bt16_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Evaporator (BT16)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40020', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_evaporator_bt16_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Evaporator (BT16)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_evaporator_bt16_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-14.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Exhaust air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40025', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Exhaust air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.5', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Exhaust air (BT20)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40025', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_exhaust_air_bt20_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Exhaust air (BT20)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_exhaust_air_bt20_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.5', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_extract_air_bt21', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extract air (BT21)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40026', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Extract air (BT21)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_extract_air_bt21', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_extract_air_bt21_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extract air (BT21)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40026', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_extract_air_bt21_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Extract air (BT21)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_extract_air_bt21_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-12.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating medium pump speed (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43437', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating medium pump speed (GP1)', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '79', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating medium pump speed (GP1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43437', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_heating_medium_pump_speed_gp1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating medium pump speed (GP1)', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_heating_medium_pump_speed_gp1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '79', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge current value ((BT12 | BT63))', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43116', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge current value ((BT12 | BT63))', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge current value ((BT12 | BT63))', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43116', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge current value ((BT12 | BT63))', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_current_value_bt12_bt63_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge set point value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43115', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge set point value', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water: charge set point value', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43115', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charge_set_point_value_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water: charge set point value', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charge_set_point_value_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water charging (BT6)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40014', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water charging (BT6)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '44.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water charging (BT6)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40014', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_charging_bt6_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water charging (BT6)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_charging_bt6_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '44.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water top (BT7)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40013', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water top (BT7)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '46', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water top (BT7)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40013', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_hot_water_top_bt7_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Hot water top (BT7)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_hot_water_top_bt7_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '46', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Int elec add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Int elec add heat', + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Active', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Int elec add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Int elec add heat', + 'options': list([ + 'Alarm', + 'Alarm', + 'Active', + 'Off', + 'Blocked', + 'Off', + 'Active', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Active', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Int elec add heat raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Int elec add heat raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Int elec add heat raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'elect_add', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49993-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_int_elec_add_heat_raw_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Int elec add heat raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_int_elec_add_heat_raw_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_inverter_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter temperature', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43140', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Inverter temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_inverter_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_inverter_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter temperature', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43140', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_inverter_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Inverter temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_inverter_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_liquid_line_bt15', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Liquid line (BT15)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40019', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Liquid line (BT15)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_liquid_line_bt15', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_liquid_line_bt15_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Liquid line (BT15)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40019', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_liquid_line_bt15_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Liquid line (BT15)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_liquid_line_bt15_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_max_compressor_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43123', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Max compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_max_compressor_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_max_compressor_frequency_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43123', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_max_compressor_frequency_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Max compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_max_compressor_frequency_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_min_compressor_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Min compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43122', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Min compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_min_compressor_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_min_compressor_frequency_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Min compressor frequency', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43122', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_min_compressor_frequency_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gotham City Min compressor frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_min_compressor_frequency_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40146', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_bt29_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_bt29_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (EP15-BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40145', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (EP15-BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Oil temperature (EP15-BT29)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40145', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_oil_temperature_ep15_bt29_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Oil temperature (EP15-BT29)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_oil_temperature_ep15_bt29_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Priority', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Priority', + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Heating', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Priority', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Priority', + 'options': list([ + 'Off', + 'Hot water', + 'Heating', + 'Pool', + 'Pool 2', + 'Trans\xadfer', + 'Cooling', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Heating', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority_raw', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Prior\xadity raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Prior\xadity raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority_raw', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_priority_raw_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Prior\xadity raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'priority', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-49994-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_priority_raw_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Prior\xadity raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_priority_raw_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'r start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072r', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City r start diff additional heat', + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'r start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072r', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City r start diff additional heat', + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_r_start_diff_additional_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reference, air speed sensor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'airflow', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43124', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Gotham City Reference, air speed sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '127.6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reference, air speed sensor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'airflow', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43124', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_reference_air_speed_sensor_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Gotham City Reference, air speed sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_reference_air_speed_sensor_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '127.6', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40012', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt3_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT3)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40012', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt3_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT3)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt3_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.4', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt62', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT62)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40048', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT62)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt62', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_return_line_bt62_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return line (BT62)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40048', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_return_line_bt62_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Return line (BT62)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_return_line_bt62_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_room_temperature_bt50', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature (BT50)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40033', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Room temperature (BT50)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_room_temperature_bt50', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_room_temperature_bt50_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature (BT50)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40033', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_room_temperature_bt50_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Room temperature (BT50)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_room_temperature_bt50_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status compressor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Status compressor', + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Runs', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status compressor', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gotham City Status compressor', + 'options': list([ + 'Off', + 'Starts', + 'Runs', + 'Stops', + ]), + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Runs', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor_raw', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status com\xadpressor raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Status com\xadpressor raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor_raw', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_status_compressor_raw_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status com\xadpressor raw', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_compressor', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43427-raw', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_states[sensor.gotham_city_status_compressor_raw_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Status com\xadpressor raw', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_status_compressor_raw_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_suction_gas_bt17', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Suction gas (BT17)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40022', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Suction gas (BT17)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_suction_gas_bt17', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_suction_gas_bt17_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Suction gas (BT17)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40022', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_suction_gas_bt17_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Suction gas (BT17)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_suction_gas_bt17_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-1.1', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40008', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt2_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT2)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40008', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt2_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT2)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt2_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39.7', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt61', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT61)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40047', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT61)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt61', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_supply_line_bt61_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply line (BT61)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40047', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_states[sensor.gotham_city_supply_line_bt61_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gotham City Supply line (BT61)', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gotham_city_supply_line_bt61_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_time_factor_add_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Time factor add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43081', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Time factor add heat', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_time_factor_add_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1686.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_time_factor_add_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Time factor add heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-43081', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_time_factor_add_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Time factor add heat', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_time_factor_add_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1686.9', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Value, air velocity sensor (BS1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40050', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Value, air velocity sensor (BS1)', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '101.5', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Value, air velocity sensor (BS1)', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40050', + 'unit_of_measurement': '', + }) +# --- +# name: test_sensor_states[sensor.gotham_city_value_air_velocity_sensor_bs1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Value, air velocity sensor (BS1)', + 'unit_of_measurement': '', + }), + 'context': , + 'entity_id': 'sensor.gotham_city_value_air_velocity_sensor_bs1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '101.5', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_switch.ambr b/tests/components/myuplink/snapshots/test_switch.ambr new file mode 100644 index 00000000000..5d621e661ee --- /dev/null +++ b/tests/components/myuplink/snapshots/test_switch.ambr @@ -0,0 +1,185 @@ +# serializer version: 1 +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_increased_ventilation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In\xadcreased venti\xadlation', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_ventilation', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50005', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City In\xadcreased venti\xadlation', + }), + 'context': , + 'entity_id': 'switch.gotham_city_increased_ventilation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_increased_ventilation_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In\xadcreased venti\xadlation', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_ventilation', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50005', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City In\xadcreased venti\xadlation', + }), + 'context': , + 'entity_id': 'switch.gotham_city_increased_ventilation_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_temporary_lux', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tempo\xadrary lux', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temporary_lux', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50004', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Tempo\xadrary lux', + }), + 'context': , + 'entity_id': 'switch.gotham_city_temporary_lux', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_temporary_lux_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tempo\xadrary lux', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temporary_lux', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50004', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Tempo\xadrary lux', + }), + 'context': , + 'entity_id': 'switch.gotham_city_temporary_lux_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/myuplink/test_binary_sensor.py b/tests/components/myuplink/test_binary_sensor.py index 128a4ebdde9..160530bcdab 100644 --- a/tests/components/myuplink/test_binary_sensor.py +++ b/tests/components/myuplink/test_binary_sensor.py @@ -1,57 +1,28 @@ -"""Tests for myuplink sensor module.""" +"""Tests for myuplink binary sensor module.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch -import pytest +from syrupy import SnapshotAssertion -from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform -# Test one entity from each of binary_sensor classes. -@pytest.mark.parametrize( - ("entity_id", "friendly_name", "test_attributes", "expected_state"), - [ - ( - "binary_sensor.gotham_city_pump_heating_medium_gp1", - "Gotham City Pump: Heating medium (GP1)", - True, - STATE_ON, - ), - ( - "binary_sensor.gotham_city_connectivity", - "Gotham City Connectivity", - False, - STATE_ON, - ), - ( - "binary_sensor.gotham_city_alarm", - "Gotham City Pump: Alarm", - False, - STATE_OFF, - ), - ], -) -async def test_sensor_states( +async def test_binary_sensor_states( hass: HomeAssistant, mock_myuplink_client: MagicMock, mock_config_entry: MockConfigEntry, - entity_id: str, - friendly_name: str, - test_attributes: bool, - expected_state: str, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, ) -> None: - """Test sensor state.""" - await setup_integration(hass, mock_config_entry) + """Test binary sensor state.""" - state = hass.states.get(entity_id) - assert state is not None - assert state.state == expected_state - if test_attributes: - assert state.attributes == { - "friendly_name": friendly_name, - } + with patch("homeassistant.components.myuplink.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index 3ae32575257..0b8d0dba17a 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow -from .const import CLIENT_ID +from .const import CLIENT_ID, UNIQUE_ID from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -29,6 +29,7 @@ async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + access_token: str, setup_credentials, ) -> None: """Check full flow.""" @@ -59,7 +60,7 @@ async def test_full_flow( OAUTH2_TOKEN, json={ "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", + "access_token": access_token, "type": "Bearer", "expires_in": 60, }, @@ -68,51 +69,70 @@ async def test_full_flow( with patch( f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True ) as mock_setup: - await hass.config_entries.flow.async_configure(result["flow_id"]) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 + assert result["data"]["auth_implementation"] == DOMAIN + assert result["data"]["token"]["refresh_token"] == "mock-refresh-token" + assert result["result"].unique_id == UNIQUE_ID + @pytest.mark.usefixtures("current_request_with_host") -async def test_flow_reauth( +@pytest.mark.parametrize( + ("unique_id", "scope", "expected_reason"), + [ + ( + UNIQUE_ID, + CURRENT_SCOPE, + "reauth_successful", + ), + ( + "wrong_uid", + CURRENT_SCOPE, + "account_mismatch", + ), + ( + UNIQUE_ID, + "READSYSTEM offline_access", + "reauth_successful", + ), + ], + ids=["reauth_only", "account_mismatch", "wrong_scope"], +) +async def test_flow_reauth_abort( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, setup_credentials: None, mock_config_entry: MockConfigEntry, + access_token: str, expires_at: float, + unique_id: str, + scope: str, + expected_reason: str, ) -> None: - """Test reauth step.""" + """Test reauth step with correct params and mismatches.""" - OLD_SCOPE = "READSYSTEM offline_access" - OLD_SCOPE_TOKEN = { + CURRENT_TOKEN = { "auth_implementation": DOMAIN, "token": { - "access_token": "Fake_token", - "scope": OLD_SCOPE, + "access_token": access_token, + "scope": scope, "expires_in": 86399, "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", "token_type": "Bearer", "expires_at": expires_at, }, } - assert mock_config_entry.data["token"]["scope"] == CURRENT_SCOPE assert hass.config_entries.async_update_entry( - mock_config_entry, data=OLD_SCOPE_TOKEN + mock_config_entry, data=CURRENT_TOKEN, unique_id=unique_id ) - assert mock_config_entry.data["token"]["scope"] == OLD_SCOPE - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" @@ -144,7 +164,7 @@ async def test_flow_reauth( OAUTH2_TOKEN, json={ "refresh_token": "updated-refresh-token", - "access_token": "updated-access-token", + "access_token": access_token, "type": "Bearer", "expires_in": "60", "scope": CURRENT_SCOPE, @@ -153,13 +173,104 @@ async def test_flow_reauth( with patch( f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True - ) as mock_setup: + ): result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "reauth_successful" + assert result.get("reason") == expected_reason + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.parametrize( + ("unique_id", "scope", "expected_reason"), + [ + ( + UNIQUE_ID, + CURRENT_SCOPE, + "reconfigure_successful", + ), + ( + "wrong_uid", + CURRENT_SCOPE, + "account_mismatch", + ), + ], + ids=["reauth_only", "account_mismatch"], +) +async def test_flow_reconfigure_abort( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + setup_credentials: None, + mock_config_entry: MockConfigEntry, + access_token: str, + expires_at: float, + unique_id: str, + scope: str, + expected_reason: str, +) -> None: + """Test reauth step with correct params and mismatches.""" + + CURRENT_TOKEN = { + "auth_implementation": DOMAIN, + "token": { + "access_token": access_token, + "scope": scope, + "expires_in": 86399, + "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", + "token_type": "Bearer", + "expires_at": expires_at, + }, + } + assert hass.config_entries.async_update_entry( + mock_config_entry, data=CURRENT_TOKEN, unique_id=unique_id + ) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URL, + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + f"&redirect_uri={REDIRECT_URL}" + f"&state={state}" + f"&scope={CURRENT_SCOPE.replace(' ', '+')}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "updated-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": "60", + "scope": CURRENT_SCOPE, + }, + ) + + with patch( + f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == expected_reason assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - assert mock_config_entry.data["token"]["scope"] == CURRENT_SCOPE diff --git a/tests/components/myuplink/test_init.py b/tests/components/myuplink/test_init.py index b474db731d1..fda0d3526f9 100644 --- a/tests/components/myuplink/test_init.py +++ b/tests/components/myuplink/test_init.py @@ -4,17 +4,21 @@ import http import time from unittest.mock import MagicMock +from aiohttp import ClientConnectionError import pytest from homeassistant.components.myuplink.const import DOMAIN, OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component from . import setup_integration +from .const import UNIQUE_ID from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import WebSocketGenerator async def test_load_unload_entry( @@ -70,6 +74,37 @@ async def test_expired_token_refresh_failure( assert mock_config_entry.state is expected_state +@pytest.mark.parametrize( + ("expires_at", "expected_state"), + [ + ( + time.time() - 3600, + ConfigEntryState.SETUP_RETRY, + ), + ], + ids=[ + "client_connection_error", + ], +) +async def test_expired_token_refresh_connection_failure( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + expected_state: ConfigEntryState, +) -> None: + """Test failure while refreshing token with a ClientError.""" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + exc=ClientConnectionError(), + ) + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is expected_state + + @pytest.mark.parametrize( "load_systems_file", [load_fixture("systems.json", DOMAIN)], @@ -92,7 +127,90 @@ async def test_devices_multiple_created_count( mock_myuplink_client: MagicMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test that multiple device are created.""" + """Test that multiple devices are created.""" await setup_integration(hass, mock_config_entry) assert len(device_registry.devices) == 2 + + +async def test_migrate_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_myuplink_client: MagicMock, + expires_at: float, + access_token: str, +) -> None: + """Test migration of config entry.""" + mock_entry_v1_1 = MockConfigEntry( + version=1, + minor_version=1, + domain=DOMAIN, + title="myUplink test", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": access_token, + "scope": "WRITESYSTEM READSYSTEM offline_access", + "expires_in": 86399, + "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", + "token_type": "Bearer", + "expires_at": expires_at, + }, + }, + entry_id="myuplink_test", + ) + + await setup_integration(hass, mock_entry_v1_1) + assert mock_entry_v1_1.version == 1 + assert mock_entry_v1_1.minor_version == 2 + assert mock_entry_v1_1.unique_id == UNIQUE_ID + + +async def test_oaut2_scope_failure( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that an incorrect OAuth2 scope fails.""" + + mock_config_entry.data["token"]["scope"] = "wrong_scope" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_device_remove_devices( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_config_entry: MockConfigEntry, + mock_myuplink_client: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test we can only remove a device that no longer exists.""" + assert await async_setup_component(hass, "config", {}) + + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device( + identifiers={ + ( + DOMAIN, + "batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff", + ) + }, + ) + client = await hass_ws_client(hass) + response = await client.remove_device(device_entry.id, mock_config_entry.entry_id) + assert not response["success"] + + old_device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={(DOMAIN, "OLD-DEVICE-UUID")}, + ) + response = await client.remove_device( + old_device_entry.id, mock_config_entry.entry_id + ) + assert response["success"] diff --git a/tests/components/myuplink/test_number.py b/tests/components/myuplink/test_number.py index 273c35ab749..ef7b1749782 100644 --- a/tests/components/myuplink/test_number.py +++ b/tests/components/myuplink/test_number.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.number import SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform @@ -11,12 +12,14 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.NUMBER pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) -ENTITY_ID = "number.gotham_city_degree_minutes" -ENTITY_FRIENDLY_NAME = "Gotham City Degree minutes" -ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940" +ENTITY_ID = "number.gotham_city_heating_offset_climate_system_1" +ENTITY_FRIENDLY_NAME = "Gotham City Heating offset climate system 1" +ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011" async def test_entity_registry( @@ -31,25 +34,6 @@ async def test_entity_registry( assert entry.unique_id == ENTITY_UID -async def test_attributes( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test the switch attributes are correct.""" - - state = hass.states.get(ENTITY_ID) - assert state.state == "-875.0" - assert state.attributes == { - "friendly_name": ENTITY_FRIENDLY_NAME, - "min": -3000, - "max": 3000, - "mode": "auto", - "step": 1.0, - "unit_of_measurement": "DM", - } - - async def test_set_value( hass: HomeAssistant, mock_myuplink_client: MagicMock, @@ -60,7 +44,7 @@ async def test_set_value( await hass.services.async_call( TEST_PLATFORM, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: ENTITY_ID, "value": -125}, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, blocking=True, ) await hass.async_block_till_done() @@ -79,7 +63,7 @@ async def test_api_failure( await hass.services.async_call( TEST_PLATFORM, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: ENTITY_ID, "value": -125}, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, blocking=True, ) mock_myuplink_client.async_set_device_points.assert_called_once() @@ -99,3 +83,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get("number.gotham_city_change_in_curve") assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47028" + + +async def test_number_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test number entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_select.py b/tests/components/myuplink/test_select.py index 7ad2d17cb5d..f1797ebe5ad 100644 --- a/tests/components/myuplink/test_select.py +++ b/tests/components/myuplink/test_select.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.const import ( ATTR_ENTITY_ID, @@ -15,6 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.SELECT pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -23,27 +26,6 @@ ENTITY_FRIENDLY_NAME = "Gotham City comfort mode" ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041" -async def test_select_entity( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test that the entities are registered in the entity registry.""" - - entry = entity_registry.async_get(ENTITY_ID) - assert entry.unique_id == ENTITY_UID - - # Test the select attributes are correct. - - state = hass.states.get(ENTITY_ID) - assert state.state == "Economy" - assert state.attributes == { - "options": ["Smart control", "Economy", "Normal", "Luxury"], - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - async def test_selecting( hass: HomeAssistant, mock_myuplink_client: MagicMock, @@ -87,3 +69,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get("select.gotham_city_all") assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47660" + + +async def test_select_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test select entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_sensor.py b/tests/components/myuplink/test_sensor.py index 8fecb787122..98cdfc322da 100644 --- a/tests/components/myuplink/test_sensor.py +++ b/tests/components/myuplink/test_sensor.py @@ -1,28 +1,30 @@ """Tests for myuplink sensor module.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_states( hass: HomeAssistant, mock_myuplink_client: MagicMock, mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, ) -> None: """Test sensor state.""" - await setup_integration(hass, mock_config_entry) - state = hass.states.get("sensor.gotham_city_average_outdoor_temp_bt1") - assert state is not None - assert state.state == "-12.2" - assert state.attributes == { - "friendly_name": "Gotham City Average outdoor temp (BT1)", - "device_class": "temperature", - "state_class": "measurement", - "unit_of_measurement": "°C", - } + with patch("homeassistant.components.myuplink.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_switch.py b/tests/components/myuplink/test_switch.py index 5e309e7152e..82d381df7fc 100644 --- a/tests/components/myuplink/test_switch.py +++ b/tests/components/myuplink/test_switch.py @@ -4,18 +4,20 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.SWITCH pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -36,20 +38,6 @@ async def test_entity_registry( assert entry.unique_id == ENTITY_UID -async def test_attributes( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test the switch attributes are correct.""" - - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OFF - assert state.attributes == { - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - @pytest.mark.parametrize( ("service"), [ @@ -109,3 +97,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get(ENTITY_ID) assert entry.unique_id == ENTITY_UID + + +async def test_switch_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test switch entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/nam/test_config_flow.py b/tests/components/nam/test_config_flow.py index b96eddfd18b..6c11399c888 100644 --- a/tests/components/nam/test_config_flow.py +++ b/tests/components/nam/test_config_flow.py @@ -8,12 +8,7 @@ import pytest from homeassistant.components import zeroconf from homeassistant.components.nam.const import DOMAIN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -122,6 +117,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -133,15 +131,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: return_value="aa:bb:cc:dd:ee:ff", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, @@ -160,20 +149,14 @@ async def test_reauth_unsuccessful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", side_effect=ApiError("API Error"), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, @@ -459,17 +442,10 @@ async def test_reconfigure_successful(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with ( patch( @@ -509,17 +485,10 @@ async def test_reconfigure_not_successful(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", @@ -531,7 +500,7 @@ async def test_reconfigure_not_successful(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "cannot_connect"} with ( @@ -572,17 +541,10 @@ async def test_reconfigure_not_the_same_device(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with ( patch( diff --git a/tests/components/nanoleaf/test_config_flow.py b/tests/components/nanoleaf/test_config_flow.py index eaa1c60dcd4..97a314b0bf4 100644 --- a/tests/components/nanoleaf/test_config_flow.py +++ b/tests/components/nanoleaf/test_config_flow.py @@ -297,15 +297,7 @@ async def test_reauth(hass: HomeAssistant) -> None: return_value=True, ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/nasweb/__init__.py b/tests/components/nasweb/__init__.py new file mode 100644 index 00000000000..d4906d710d5 --- /dev/null +++ b/tests/components/nasweb/__init__.py @@ -0,0 +1 @@ +"""Tests for the NASweb integration.""" diff --git a/tests/components/nasweb/conftest.py b/tests/components/nasweb/conftest.py new file mode 100644 index 00000000000..7757f40ee44 --- /dev/null +++ b/tests/components/nasweb/conftest.py @@ -0,0 +1,61 @@ +"""Common fixtures for the NASweb tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nasweb.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +BASE_CONFIG_FLOW = "homeassistant.components.nasweb.config_flow." +BASE_NASWEB_DATA = "homeassistant.components.nasweb.nasweb_data." +BASE_COORDINATOR = "homeassistant.components.nasweb.coordinator." +TEST_SERIAL_NUMBER = "0011223344556677" + + +@pytest.fixture +def validate_input_all_ok() -> Generator[dict[str, AsyncMock | MagicMock]]: + """Yield dictionary of mocked functions required for successful test_form execution.""" + with ( + patch( + BASE_CONFIG_FLOW + "WebioAPI.check_connection", + return_value=True, + ) as check_connection, + patch( + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", + return_value=True, + ) as refresh_device_info, + patch( + BASE_NASWEB_DATA + "NASwebData.get_webhook_url", + return_value="http://127.0.0.1:8123/api/webhook/de705e77291402afa0dd961426e9f19bb53631a9f2a106c52cfd2d2266913c04", + ) as get_webhook_url, + patch( + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", + return_value=TEST_SERIAL_NUMBER, + ) as get_serial, + patch( + BASE_CONFIG_FLOW + "WebioAPI.status_subscription", + return_value=True, + ) as status_subscription, + patch( + BASE_NASWEB_DATA + "NotificationCoordinator.check_connection", + return_value=True, + ) as check_status_confirmation, + ): + yield { + BASE_CONFIG_FLOW + "WebioAPI.check_connection": check_connection, + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info": refresh_device_info, + BASE_NASWEB_DATA + "NASwebData.get_webhook_url": get_webhook_url, + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number": get_serial, + BASE_CONFIG_FLOW + "WebioAPI.status_subscription": status_subscription, + BASE_NASWEB_DATA + + "NotificationCoordinator.check_connection": check_status_confirmation, + } diff --git a/tests/components/nasweb/test_config_flow.py b/tests/components/nasweb/test_config_flow.py new file mode 100644 index 00000000000..a5f2dca680d --- /dev/null +++ b/tests/components/nasweb/test_config_flow.py @@ -0,0 +1,208 @@ +"""Test the NASweb config flow.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from webio_api.api_client import AuthError + +from homeassistant import config_entries +from homeassistant.components.nasweb.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.network import NoURLAvailableError + +from .conftest import ( + BASE_CONFIG_FLOW, + BASE_COORDINATOR, + BASE_NASWEB_DATA, + TEST_SERIAL_NUMBER, +) + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +TEST_USER_INPUT = { + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + + +async def _add_test_config_entry(hass: HomeAssistant) -> ConfigFlowResult: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") == FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + await hass.async_block_till_done() + return result2 + + +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test the form.""" + result = await _add_test_config_entry(hass) + + assert result.get("type") == FlowResultType.CREATE_ENTRY + assert result.get("title") == "1.1.1.1" + assert result.get("data") == TEST_USER_INPUT + + config_entry = result.get("result") + assert config_entry is not None + assert config_entry.unique_id == TEST_SERIAL_NUMBER + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch(BASE_CONFIG_FLOW + "WebioAPI.check_connection", return_value=False): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "cannot_connect"} + + +async def test_form_invalid_auth( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", + side_effect=AuthError, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "invalid_auth"} + + +async def test_form_missing_internal_url( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test missing internal url.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_NASWEB_DATA + "NASwebData.get_webhook_url", side_effect=NoURLAvailableError + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_internal_url"} + + +async def test_form_missing_nasweb_data( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", + return_value=None, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_nasweb_data"} + with patch(BASE_CONFIG_FLOW + "WebioAPI.status_subscription", return_value=False): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_nasweb_data"} + + +async def test_missing_status( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test missing status update.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_COORDINATOR + "NotificationCoordinator.check_connection", + return_value=False, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_status"} + + +async def test_form_exception( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test other exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.nasweb.config_flow.validate_input", + side_effect=Exception, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "unknown"} + + +async def test_form_already_configured( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test already configured device.""" + result = await _add_test_config_entry(hass) + config_entry = result.get("result") + assert config_entry is not None + assert config_entry.unique_id == TEST_SERIAL_NUMBER + + result2_1 = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + result2_2 = await hass.config_entries.flow.async_configure( + result2_1["flow_id"], TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result2_2.get("type") == FlowResultType.ABORT + assert result2_2.get("reason") == "already_configured" diff --git a/tests/components/neato/test_config_flow.py b/tests/components/neato/test_config_flow.py index 1b86c4e9980..c5289927d91 100644 --- a/tests/components/neato/test_config_flow.py +++ b/tests/components/neato/test_config_flow.py @@ -111,16 +111,15 @@ async def test_reauth( hass, NEATO_DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) ) - MockConfigEntry( + entry = MockConfigEntry( entry_id="my_entry", domain=NEATO_DOMAIN, data={"username": "abcdef", "password": "123456", "vendor": "neato"}, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Should show form - result = await hass.config_entries.flow.async_init( - "neato", context={"source": config_entries.SOURCE_REAUTH} - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/ness_alarm/test_init.py b/tests/components/ness_alarm/test_init.py index fb003d253de..48821d3e68d 100644 --- a/tests/components/ness_alarm/test_init.py +++ b/tests/components/ness_alarm/test_init.py @@ -6,6 +6,7 @@ from nessclient import ArmingMode, ArmingState import pytest from homeassistant.components import alarm_control_panel +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.ness_alarm import ( ATTR_CODE, ATTR_OUTPUT_ID, @@ -24,13 +25,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -90,7 +84,9 @@ async def test_dispatch_state_change(hass: HomeAssistant, mock_nessclient) -> No on_state_change(ArmingState.ARMING, None) await hass.async_block_till_done() - assert hass.states.is_state("alarm_control_panel.alarm_panel", STATE_ALARM_ARMING) + assert hass.states.is_state( + "alarm_control_panel.alarm_panel", AlarmControlPanelState.ARMING + ) async def test_alarm_disarm(hass: HomeAssistant, mock_nessclient) -> None: @@ -178,15 +174,27 @@ async def test_arming_state_change(hass: HomeAssistant, mock_nessclient) -> None """Test arming state change handing.""" states = [ (ArmingState.UNKNOWN, None, STATE_UNKNOWN), - (ArmingState.DISARMED, None, STATE_ALARM_DISARMED), - (ArmingState.ARMING, None, STATE_ALARM_ARMING), - (ArmingState.EXIT_DELAY, None, STATE_ALARM_ARMING), - (ArmingState.ARMED, None, STATE_ALARM_ARMED_AWAY), - (ArmingState.ARMED, ArmingMode.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), - (ArmingState.ARMED, ArmingMode.ARMED_HOME, STATE_ALARM_ARMED_HOME), - (ArmingState.ARMED, ArmingMode.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), - (ArmingState.ENTRY_DELAY, None, STATE_ALARM_PENDING), - (ArmingState.TRIGGERED, None, STATE_ALARM_TRIGGERED), + (ArmingState.DISARMED, None, AlarmControlPanelState.DISARMED), + (ArmingState.ARMING, None, AlarmControlPanelState.ARMING), + (ArmingState.EXIT_DELAY, None, AlarmControlPanelState.ARMING), + (ArmingState.ARMED, None, AlarmControlPanelState.ARMED_AWAY), + ( + ArmingState.ARMED, + ArmingMode.ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, + ), + ( + ArmingState.ARMED, + ArmingMode.ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, + ), + ( + ArmingState.ARMED, + ArmingMode.ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, + ), + (ArmingState.ENTRY_DELAY, None, AlarmControlPanelState.PENDING), + (ArmingState.TRIGGERED, None, AlarmControlPanelState.TRIGGERED), ] await async_setup_component(hass, DOMAIN, VALID_CONFIG) diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index 9c8de0224f0..8f1f0a2f074 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -4,8 +4,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Generator import copy -from dataclasses import dataclass, field -import time +from dataclasses import dataclass from typing import Any from google_nest_sdm.auth import AbstractAuth @@ -30,13 +29,13 @@ CLIENT_ID = "some-client-id" CLIENT_SECRET = "some-client-secret" CLOUD_PROJECT_ID = "cloud-id-9876" SUBSCRIBER_ID = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" +SUBSCRIPTION_NAME = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" @dataclass class NestTestConfig: """Holder for integration configuration.""" - config: dict[str, Any] = field(default_factory=dict) config_entry_data: dict[str, Any] | None = None credential: ClientCredential | None = None @@ -53,37 +52,18 @@ TEST_CONFIG_APP_CREDS = NestTestConfig( credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), ) TEST_CONFIGFLOW_APP_CREDS = NestTestConfig( - config=TEST_CONFIG_APP_CREDS.config, credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), ) -TEST_CONFIG_LEGACY = NestTestConfig( - config={ - "nest": { - "client_id": "some-client-id", - "client_secret": "some-client-secret", - }, - }, +TEST_CONFIG_NEW_SUBSCRIPTION = NestTestConfig( config_entry_data={ - "auth_implementation": "local", - "tokens": { - "expires_at": time.time() + 86400, - "access_token": { - "token": "some-token", - }, - }, - }, -) -TEST_CONFIG_ENTRY_LEGACY = NestTestConfig( - config_entry_data={ - "auth_implementation": "local", - "tokens": { - "expires_at": time.time() + 86400, - "access_token": { - "token": "some-token", - }, - }, + "sdm": {}, + "project_id": PROJECT_ID, + "cloud_project_id": CLOUD_PROJECT_ID, + "subscription_name": SUBSCRIPTION_NAME, + "auth_implementation": "imported-cred", }, + credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), ) @@ -95,6 +75,7 @@ class FakeSubscriber(GoogleNestSubscriber): def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() + self._subscriber_name = "fake-name" def set_update_callback(self, target: Callable[[EventMessage], Awaitable[None]]): """Capture the callback set by Home Assistant.""" diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 85c64aff379..84f22e17e78 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -22,6 +22,7 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.nest import DOMAIN from homeassistant.components.nest.const import CONF_SUBSCRIBER_ID, SDM_SCOPES +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -201,20 +202,6 @@ def nest_test_config() -> NestTestConfig: return TEST_CONFIG_APP_CREDS -@pytest.fixture -def config( - subscriber_id: str | None, nest_test_config: NestTestConfig -) -> dict[str, Any]: - """Fixture that sets up the configuration.yaml for the test.""" - config = copy.deepcopy(nest_test_config.config) - if CONF_SUBSCRIBER_ID in config.get(DOMAIN, {}): - if subscriber_id: - config[DOMAIN][CONF_SUBSCRIBER_ID] = subscriber_id - else: - del config[DOMAIN][CONF_SUBSCRIBER_ID] - return config - - @pytest.fixture def config_entry_unique_id() -> str: """Fixture to set ConfigEntry unique id.""" @@ -274,19 +261,19 @@ async def credential(hass: HomeAssistant, nest_test_config: NestTestConfig) -> N async def setup_base_platform( hass: HomeAssistant, platforms: list[str], - config: dict[str, Any], config_entry: MockConfigEntry | None, ) -> YieldFixture[PlatformSetup]: """Fixture to setup the integration platform.""" - if config_entry: - config_entry.add_to_hass(hass) + config_entry.add_to_hass(hass) with patch("homeassistant.components.nest.PLATFORMS", platforms): async def _setup_func() -> bool: - assert await async_setup_component(hass, DOMAIN, config) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() yield _setup_func + if config_entry.state == ConfigEntryState.LOADED: + await hass.config_entries.async_unload(config_entry.entry_id) @pytest.fixture diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index 6aa25134563..698e9b7a274 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -15,7 +15,7 @@ from google_nest_sdm.event import EventMessage import pytest from homeassistant.components import camera -from homeassistant.components.camera import STATE_IDLE, STATE_STREAMING, StreamType +from homeassistant.components.camera import CameraState, StreamType from homeassistant.components.nest.const import DOMAIN from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.const import ATTR_FRIENDLY_NAME @@ -28,7 +28,7 @@ from .common import DEVICE_ID, CreateDevice, FakeSubscriber, PlatformSetup from .conftest import FakeAuth from tests.common import async_fire_time_changed -from tests.typing import WebSocketGenerator +from tests.typing import MockHAClientWebSocket, WebSocketGenerator PLATFORM = "camera" CAMERA_DEVICE_TYPE = "sdm.devices.types.CAMERA" @@ -176,6 +176,20 @@ async def async_get_image( return image.content +async def async_frontend_stream_types( + client: MockHAClientWebSocket, entity_id: str +) -> list[str] | None: + """Get the frontend stream types supported.""" + await client.send_json_auto_id( + {"type": "camera/capabilities", "entity_id": entity_id} + ) + msg = await client.receive_json() + assert msg.get("type") == TYPE_RESULT + assert msg.get("success") + assert msg.get("result") + return msg["result"].get("frontend_stream_types") + + async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): @@ -218,7 +232,7 @@ async def test_camera_device( assert len(hass.states.async_all()) == 1 camera = hass.states.get("camera.my_camera") assert camera is not None - assert camera.state == STATE_STREAMING + assert camera.state == CameraState.STREAMING assert camera.attributes.get(ATTR_FRIENDLY_NAME) == "My Camera" entry = entity_registry.async_get("camera.my_camera") @@ -237,6 +251,7 @@ async def test_camera_stream( camera_device: None, auth: FakeAuth, mock_create_stream: Mock, + hass_ws_client: WebSocketGenerator, ) -> None: """Test a basic camera and fetch its live stream.""" auth.responses = [make_stream_url_response()] @@ -245,8 +260,12 @@ async def test_camera_stream( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.HLS + assert cam.state == CameraState.STREAMING + client = await hass_ws_client(hass) + frontend_stream_types = await async_frontend_stream_types( + client, "camera.my_camera" + ) + assert frontend_stream_types == [StreamType.HLS] stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") assert stream_source == "rtsp://some/url?auth=g.0.streamingToken" @@ -267,10 +286,13 @@ async def test_camera_ws_stream( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.HLS - + assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) + frontend_stream_types = await async_frontend_stream_types( + client, "camera.my_camera" + ) + assert frontend_stream_types == [StreamType.HLS] + await client.send_json( { "id": 2, @@ -300,7 +322,7 @@ async def test_camera_ws_stream_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) await client.send_json( @@ -322,7 +344,7 @@ async def test_camera_ws_stream_failure( async def test_camera_stream_missing_trait( hass: HomeAssistant, setup_platform, create_device ) -> None: - """Test fetching a video stream when not supported by the API.""" + """Test that cameras missing a live stream are not supported.""" create_device.create( { "sdm.devices.traits.Info": { @@ -338,16 +360,7 @@ async def test_camera_stream_missing_trait( ) await setup_platform() - assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == STATE_IDLE - - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source is None - - # Fallback to placeholder image - await async_get_image(hass) + assert len(hass.states.async_all()) == 0 async def test_refresh_expired_stream_token( @@ -375,7 +388,7 @@ async def test_refresh_expired_stream_token( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING # Request a stream for the camera entity to exercise nest cam + camera interaction # and shutdown on url expiration @@ -446,7 +459,7 @@ async def test_stream_response_already_expired( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING # The stream is expired, but we return it anyway stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") @@ -459,6 +472,50 @@ async def test_stream_response_already_expired( assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" +async def test_extending_stream_already_expired( + hass: HomeAssistant, + auth: FakeAuth, + setup_platform: PlatformSetup, + camera_device: None, +) -> None: + """Test a API response when extending the stream returns an expired stream url.""" + now = utcnow() + stream_1_expiration = now + datetime.timedelta(seconds=180) + stream_2_expiration = now + datetime.timedelta(seconds=30) # Will be in the past + stream_3_expiration = now + datetime.timedelta(seconds=600) + auth.responses = [ + make_stream_url_response(stream_1_expiration, token_num=1), + make_stream_url_response(stream_2_expiration, token_num=2), + make_stream_url_response(stream_3_expiration, token_num=3), + ] + await setup_platform() + + assert len(hass.states.async_all()) == 1 + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == CameraState.STREAMING + + # The stream is expired, but we return it anyway + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.1.streamingToken" + + # Jump to when the stream will be refreshed + await fire_alarm(hass, now + datetime.timedelta(seconds=160)) + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" + + # The stream will have expired in the past, but 1 minute min refresh interval is applied. + # The stream token is not updated. + await fire_alarm(hass, now + datetime.timedelta(seconds=170)) + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" + + # Now go past the min update interval and the stream is refreshed + await fire_alarm(hass, now + datetime.timedelta(seconds=225)) + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.3.streamingToken" + + async def test_camera_removed( hass: HomeAssistant, auth: FakeAuth, @@ -474,7 +531,7 @@ async def test_camera_removed( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING # Start a stream, exercising cleanup on remove auth.responses = [ @@ -502,7 +559,7 @@ async def test_camera_remove_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING # Start a stream, exercising cleanup on remove auth.responses = [ @@ -543,7 +600,7 @@ async def test_refresh_expired_stream_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING # Request an HLS stream with patch("homeassistant.components.camera.create_stream") as create_stream: @@ -577,11 +634,11 @@ async def test_refresh_expired_stream_failure( assert create_stream.called +@pytest.mark.usefixtures("webrtc_camera_device") async def test_camera_web_rtc( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, - webrtc_camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -602,35 +659,50 @@ async def test_camera_web_rtc( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + assert cam.state == CameraState.STREAMING + client = await hass_ws_client(hass) + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.WEB_RTC + ] client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - msg = await client.receive_json() - assert msg["id"] == 5 - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"]["answer"] == "v=0\r\ns=-\r\n" + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": "v=0\r\ns=-\r\n", + } # Nest WebRTC cameras return a placeholder await async_get_image(hass) await async_get_image(hass, width=1024, height=768) +@pytest.mark.usefixtures("auth", "camera_device") async def test_camera_web_rtc_unsupported( hass: HomeAssistant, - auth, hass_ws_client: WebSocketGenerator, - camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -639,32 +711,35 @@ async def test_camera_web_rtc_unsupported( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.HLS + assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) - await client.send_json( + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.HLS + ] + + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) msg = await client.receive_json() - assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert not msg["success"] - assert msg["error"]["code"] == "web_rtc_offer_failed" - assert msg["error"]["message"].startswith("Camera does not support WebRTC") + assert msg["error"] == { + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC, frontend_stream_types={}", + } +@pytest.mark.usefixtures("webrtc_camera_device") async def test_camera_web_rtc_offer_failure( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, - webrtc_camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -676,39 +751,50 @@ async def test_camera_web_rtc_offer_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - msg = await client.receive_json() - assert msg["id"] == 5 - assert msg["type"] == TYPE_RESULT - assert not msg["success"] - assert msg["error"]["code"] == "web_rtc_offer_failed" - assert msg["error"]["message"].startswith("Nest API error") + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "Nest API error: Bad Request response from API (400)", + } +@pytest.mark.usefixtures("mock_create_stream") async def test_camera_multiple_streams( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, create_device, setup_platform, - mock_create_stream, ) -> None: """Test a camera supporting multiple stream types.""" expiration = utcnow() + datetime.timedelta(seconds=100) auth.responses = [ - # RTSP response - make_stream_url_response(), # WebRTC response aiohttp.web.json_response( { @@ -741,27 +827,139 @@ async def test_camera_multiple_streams( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING # Prefer WebRTC over RTSP/HLS assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + client = await hass_ws_client(hass) + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.WEB_RTC + ] - # RTSP stream + # RTSP stream is not supported stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.0.streamingToken" + assert not stream_source # WebRTC stream client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - msg = await client.receive_json() - assert msg["id"] == 5 - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"]["answer"] == "v=0\r\ns=-\r\n" + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": "v=0\r\ns=-\r\n", + } + + +@pytest.mark.usefixtures("webrtc_camera_device") +async def test_webrtc_refresh_expired_stream( + hass: HomeAssistant, + setup_platform: PlatformSetup, + hass_ws_client: WebSocketGenerator, + auth: FakeAuth, +) -> None: + """Test a camera webrtc expiration and refresh.""" + now = utcnow() + + stream_1_expiration = now + datetime.timedelta(seconds=90) + stream_2_expiration = now + datetime.timedelta(seconds=180) + auth.responses = [ + aiohttp.web.json_response( + { + "results": { + "answerSdp": "v=0\r\ns=-\r\n", + "mediaSessionId": "yP2grqz0Y1V_wgiX9KEbMWHoLd...", + "expiresAt": stream_1_expiration.isoformat(timespec="seconds"), + }, + } + ), + aiohttp.web.json_response( + { + "results": { + "mediaSessionId": "yP2grqz0Y1V_wgiX9KEbMWHoLd...", + "expiresAt": stream_2_expiration.isoformat(timespec="seconds"), + }, + } + ), + ] + await setup_platform() + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == CameraState.STREAMING + assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + client = await hass_ws_client(hass) + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.WEB_RTC + ] + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.my_camera", + "offer": "a=recvonly", + } + ) + + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": "v=0\r\ns=-\r\n", + } + + assert len(auth.captured_requests) == 1 + assert ( + auth.captured_requests[0][2].get("command") + == "sdm.devices.commands.CameraLiveStream.GenerateWebRtcStream" + ) + + # Fire alarm before stream_1_expiration. The stream url is not refreshed + next_update = now + datetime.timedelta(seconds=25) + await fire_alarm(hass, next_update) + assert len(auth.captured_requests) == 1 + + # Alarm is near stream_1_expiration which causes the stream extension + next_update = now + datetime.timedelta(seconds=60) + await fire_alarm(hass, next_update) + + assert len(auth.captured_requests) >= 2 + assert ( + auth.captured_requests[1][2].get("command") + == "sdm.devices.commands.CameraLiveStream.ExtendWebRtcStream" + ) diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index b6e84ce358f..807e299b79c 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -6,11 +6,7 @@ from http import HTTPStatus from typing import Any from unittest.mock import patch -from google_nest_sdm.exceptions import ( - AuthException, - ConfigurationException, - SubscriberException, -) +from google_nest_sdm.exceptions import AuthException from google_nest_sdm.structure import Structure import pytest @@ -31,7 +27,6 @@ from .common import ( TEST_CONFIGFLOW_APP_CREDS, FakeSubscriber, NestTestConfig, - PlatformSetup, ) from tests.common import MockConfigEntry @@ -40,7 +35,7 @@ from tests.typing import ClientSessionGenerator WEB_REDIRECT_URL = "https://example.com/auth/external/callback" APP_REDIRECT_URL = "urn:ietf:wg:oauth:2.0:oob" - +RAND_SUBSCRIBER_SUFFIX = "ABCDEF" FAKE_DHCP_DATA = dhcp.DhcpServiceInfo( ip="127.0.0.2", macaddress="001122334455", hostname="fake_hostname" @@ -53,6 +48,16 @@ def nest_test_config() -> NestTestConfig: return TEST_CONFIGFLOW_APP_CREDS +@pytest.fixture(autouse=True) +def mock_rand_topic_name_fixture() -> None: + """Set the topic name random string to a constant.""" + with patch( + "homeassistant.components.nest.config_flow.get_random_string", + return_value=RAND_SUBSCRIBER_SUFFIX, + ): + yield + + class OAuthFixture: """Simulate the oauth flow used by the config flow.""" @@ -158,6 +163,43 @@ class OAuthFixture: }, ) + async def async_complete_pubsub_flow( + self, + result: dict, + selected_topic: str, + selected_subscription: str = "create_new_subscription", + user_input: dict | None = None, + ) -> ConfigEntry: + """Fixture to walk through the Pub/Sub topic and subscription steps. + + This picks a simple set of steps that are reusable for most flows without + exercising the corner cases. + """ + + # Validate Pub/Sub topics are shown + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert not result.get("errors") + + # Select Pub/Sub topic the show available subscriptions (none) + result = await self.async_configure( + result, + { + "topic_name": selected_topic, + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert not result.get("errors") + + # Create the subscription and end the flow + return await self.async_finish_setup( + result, + { + "subscription_name": selected_subscription, + }, + ) + async def async_finish_setup( self, result: dict, user_input: dict | None = None ) -> ConfigEntry: @@ -179,15 +221,6 @@ class OAuthFixture: user_input, ) - async def async_pubsub_flow(self, result: dict, cloud_project_id="") -> None: - """Verify the pubsub creation step.""" - # Render form with a link to get an auth token - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pubsub" - assert "description_placeholders" in result - assert "url" in result["description_placeholders"] - assert result["data_schema"]({}) == {"cloud_project_id": cloud_project_id} - def get_config_entry(self) -> ConfigEntry: """Get the config entry.""" entries = self.hass.config_entries.async_entries(DOMAIN) @@ -206,32 +239,143 @@ async def oauth( return OAuthFixture(hass, hass_client_no_auth, aioclient_mock) +@pytest.fixture(name="sdm_managed_topic") +def mock_sdm_managed_topic() -> bool: + """Fixture to configure fake server responses for SDM owend Pub/Sub topics.""" + return False + + +@pytest.fixture(name="user_managed_topics") +def mock_user_managed_topics() -> list[str]: + """Fixture to configure fake server response for user owned Pub/Sub topics.""" + return [] + + +@pytest.fixture(name="subscriptions") +def mock_subscriptions() -> list[tuple[str, str]]: + """Fixture to configure fake server response for user subscriptions that exist.""" + return [] + + +@pytest.fixture(name="device_access_project_id") +def mock_device_access_project_id() -> str: + """Fixture to configure the device access console project id used in tests.""" + return PROJECT_ID + + +@pytest.fixture(name="cloud_project_id") +def mock_cloud_project_id() -> str: + """Fixture to configure the cloud console project id used in tests.""" + return CLOUD_PROJECT_ID + + +@pytest.fixture(name="create_subscription_status") +def mock_create_subscription_status() -> str: + """Fixture to configure the return code when creating the subscription.""" + return HTTPStatus.OK + + +@pytest.fixture(name="list_topics_status") +def mock_list_topics_status() -> str: + """Fixture to configure the return code when listing topics.""" + return HTTPStatus.OK + + +@pytest.fixture(name="list_subscriptions_status") +def mock_list_subscriptions_status() -> str: + """Fixture to configure the return code when listing subscriptions.""" + return HTTPStatus.OK + + +@pytest.fixture(autouse=True) +def mock_pubsub_api_responses( + aioclient_mock: AiohttpClientMocker, + sdm_managed_topic: bool, + user_managed_topics: list[str], + subscriptions: list[tuple[str, str]], + device_access_project_id: str, + cloud_project_id: str, + create_subscription_status: HTTPStatus, + list_topics_status: HTTPStatus, + list_subscriptions_status: HTTPStatus, +) -> None: + """Configure a server response for an SDM managed Pub/Sub topic. + + We check for a topic created by the SDM Device Access Console (but note we don't have permission to read it) + or the user has created one themselves in the Google Cloud Project. + """ + aioclient_mock.get( + f"https://pubsub.googleapis.com/v1/projects/sdm-prod/topics/enterprise-{device_access_project_id}", + status=HTTPStatus.FORBIDDEN if sdm_managed_topic else HTTPStatus.NOT_FOUND, + ) + aioclient_mock.get( + f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/topics", + json={ + "topics": [ + { + "name": topic_name, + } + for topic_name in user_managed_topics or () + ] + }, + status=list_topics_status, + ) + # We check for a topic created by the SDM Device Access Console (but note we don't have permission to read it) + # or the user has created one themselves in the Google Cloud Project. + aioclient_mock.get( + f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/subscriptions", + json={ + "subscriptions": [ + { + "name": subscription_name, + "topic": topic, + "pushConfig": {}, + "ackDeadlineSeconds": 10, + "messageRetentionDuration": "604800s", + "expirationPolicy": {"ttl": "2678400s"}, + "state": "ACTIVE", + } + for (subscription_name, topic) in subscriptions or () + ] + }, + status=list_subscriptions_status, + ) + aioclient_mock.put( + f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", + json={}, + status=create_subscription_status, + ) + + +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_app_credentials( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Check full flow.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result) + result = await oauth.async_configure(result, None) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, + "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -240,12 +384,12 @@ async def test_app_credentials( } -async def test_config_flow_restart( - hass: HomeAssistant, oauth, subscriber, setup_platform -) -> None: +@pytest.mark.parametrize( + ("sdm_managed_topic", "device_access_project_id", "cloud_project_id"), + [(True, "new-project-id", "new-cloud-project-id")], +) +async def test_config_flow_restart(hass: HomeAssistant, oauth, subscriber) -> None: """Check with auth implementation is re-initialized when aborting the flow.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -272,20 +416,22 @@ async def test_config_flow_restart( await oauth.async_oauth_web_flow(result, "new-project-id") oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-new-project-id" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert "projects/new-cloud-project-id/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": "new-cloud-project-id", "project_id": "new-project-id", + "subscription_name": "projects/new-cloud-project-id/subscriptions/home-assistant-ABCDEF", + "topic_name": "projects/sdm-prod/topics/enterprise-new-project-id", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -294,12 +440,13 @@ async def test_config_flow_restart( } +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_flow_wrong_project_id( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Check the case where the wrong project ids are entered.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -324,20 +471,22 @@ async def test_config_flow_wrong_project_id( await hass.async_block_till_done() oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-some-project-id" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, + "subscription_name": "projects/cloud-id-9876/subscriptions/home-assistant-ABCDEF", + "topic_name": "projects/sdm-prod/topics/enterprise-some-project-id", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -346,51 +495,101 @@ async def test_config_flow_wrong_project_id( } +@pytest.mark.parametrize( + ("sdm_managed_topic", "create_subscription_status"), [(True, HTTPStatus.NOT_FOUND)] +) async def test_config_flow_pubsub_configuration_error( hass: HomeAssistant, oauth, - setup_platform, mock_subscriber, ) -> None: """Check full flow fails with configuration error.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - mock_subscriber.create_subscription.side_effect = ConfigurationException result = await oauth.async_configure(result, {"code": "1234"}) - assert result["type"] is FlowResultType.FORM - assert "errors" in result - assert "cloud_project_id" in result["errors"] - assert result["errors"]["cloud_project_id"] == "bad_project_id" + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert result.get("data_schema")({}) == { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + } + + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("data_schema")({}) == { + "subscription_name": "create_new_subscription", + } + + # Failure when creating the subscription + result = await oauth.async_configure( + result, + { + "subscription_name": "create_new_subscription", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": "pubsub_api_error"} +@pytest.mark.parametrize( + ("sdm_managed_topic", "create_subscription_status"), + [(True, HTTPStatus.INTERNAL_SERVER_ERROR)], +) async def test_config_flow_pubsub_subscriber_error( - hass: HomeAssistant, oauth, setup_platform, mock_subscriber + hass: HomeAssistant, oauth, mock_subscriber ) -> None: """Check full flow with a subscriber error.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - - mock_subscriber.create_subscription.side_effect = SubscriberException() result = await oauth.async_configure(result, {"code": "1234"}) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert result.get("data_schema")({}) == { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + } - assert result["type"] is FlowResultType.FORM - assert "errors" in result - assert "cloud_project_id" in result["errors"] - assert result["errors"]["cloud_project_id"] == "subscriber_error" + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("data_schema")({}) == { + "subscription_name": "create_new_subscription", + } + + # Failure when creating the subscription + result = await oauth.async_configure( + result, + { + "subscription_name": "create_new_subscription", + }, + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": "pubsub_api_error"} -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic", "device_access_project_id"), + [(TEST_CONFIG_APP_CREDS, True, "project-id-2")], +) async def test_multiple_config_entries( hass: HomeAssistant, oauth, setup_platform ) -> None: @@ -405,7 +604,10 @@ async def test_multiple_config_entries( ) await oauth.async_app_creds_flow(result, project_id="project-id-2") oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result) + result = await oauth.async_configure(result, user_input={}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-project-id-2" + ) assert entry.title == "Mock Title" assert "token" in entry.data @@ -413,7 +615,9 @@ async def test_multiple_config_entries( assert len(entries) == 2 -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] +) async def test_duplicate_config_entries( hass: HomeAssistant, oauth, setup_platform ) -> None: @@ -438,7 +642,9 @@ async def test_duplicate_config_entries( assert result.get("reason") == "already_configured" -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] +) async def test_reauth_multiple_config_entries( hass: HomeAssistant, oauth, setup_platform, config_entry ) -> None: @@ -489,12 +695,11 @@ async def test_reauth_multiple_config_entries( assert entry.data.get("extra_data") +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_pubsub_subscription_strip_whitespace( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, oauth, subscriber ) -> None: """Check that project id has whitespace stripped on entry.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -502,8 +707,10 @@ async def test_pubsub_subscription_strip_whitespace( result, cloud_project_id=" " + CLOUD_PROJECT_ID + " " ) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) - + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-some-project-id" + ) assert entry.title == "Import from configuration.yaml" assert "token" in entry.data entry.data["token"].pop("expires_at") @@ -514,31 +721,59 @@ async def test_pubsub_subscription_strip_whitespace( "type": "Bearer", "expires_in": 60, } - assert "subscriber_id" in entry.data + assert "subscription_name" in entry.data assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID +@pytest.mark.parametrize( + ("sdm_managed_topic", "create_subscription_status"), + [(True, HTTPStatus.UNAUTHORIZED)], +) async def test_pubsub_subscription_auth_failure( - hass: HomeAssistant, oauth, setup_platform, mock_subscriber + hass: HomeAssistant, oauth, mock_subscriber ) -> None: """Check flow that creates a pub/sub subscription.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_subscriber.create_subscription.side_effect = AuthException() - await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() result = await oauth.async_configure(result, {"code": "1234"}) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert result.get("data_schema")({}) == { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + } - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "invalid_access_token" + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("data_schema")({}) == { + "subscription_name": "create_new_subscription", + } + + # Failure when creating the subscription + result = await oauth.async_configure( + result, + { + "subscription_name": "create_new_subscription", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("errors") == {"base": "pubsub_api_error"} -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] +) async def test_pubsub_subscriber_config_entry_reauth( hass: HomeAssistant, oauth, @@ -568,8 +803,9 @@ async def test_pubsub_subscriber_config_entry_reauth( assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_from_home( - hass: HomeAssistant, oauth, setup_platform, subscriber + hass: HomeAssistant, oauth, subscriber ) -> None: """Test that the Google Home name is used for the config entry title.""" @@ -587,23 +823,32 @@ async def test_config_entry_title_from_home( ) ) - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) assert entry.title == "Example Home" assert "token" in entry.data - assert "subscriber_id" in entry.data - assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID + assert entry.data.get("cloud_project_id") == CLOUD_PROJECT_ID + assert ( + entry.data.get("subscription_name") + == f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}" + ) + assert ( + entry.data.get("topic_name") + == f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_multiple_homes( - hass: HomeAssistant, oauth, setup_platform, subscriber + hass: HomeAssistant, oauth, subscriber ) -> None: """Test handling of multiple Google Homes authorized.""" @@ -633,24 +878,24 @@ async def test_config_entry_title_multiple_homes( ) ) - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) assert entry.title == "Example Home #1, Example Home #2" +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_title_failure_fallback( - hass: HomeAssistant, oauth, setup_platform, mock_subscriber + hass: HomeAssistant, oauth, mock_subscriber ) -> None: """Test exception handling when determining the structure names.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -658,16 +903,27 @@ async def test_title_failure_fallback( oauth.async_mock_refresh() mock_subscriber.async_get_device_manager.side_effect = AuthException() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) + assert entry.title == "Import from configuration.yaml" assert "token" in entry.data - assert "subscriber_id" in entry.data - assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID + assert entry.data.get("cloud_project_id") == CLOUD_PROJECT_ID + assert ( + entry.data.get("subscription_name") + == f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}" + ) + assert ( + entry.data.get("topic_name") + == f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) -async def test_structure_missing_trait( - hass: HomeAssistant, oauth, setup_platform, subscriber -) -> None: +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) +async def test_structure_missing_trait(hass: HomeAssistant, oauth, subscriber) -> None: """Test handling the case where a structure has no name set.""" device_manager = await subscriber.async_get_device_manager() @@ -681,15 +937,16 @@ async def test_structure_missing_trait( ) ) - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) # Fallback to default name assert entry.title == "Import from configuration.yaml" @@ -713,12 +970,13 @@ async def test_dhcp_discovery( assert result.get("reason") == "missing_credentials" +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_dhcp_discovery_with_creds( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Exercise discovery dhcp with no config present (can't run).""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -735,21 +993,23 @@ async def test_dhcp_discovery_with_creds( result = await oauth.async_configure(result, {"project_id": PROJECT_ID}) await oauth.async_oauth_web_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) - await hass.async_block_till_done() + + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, + "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -770,13 +1030,10 @@ async def test_token_error( hass: HomeAssistant, oauth: OAuthFixture, subscriber: FakeSubscriber, - setup_platform: PlatformSetup, status_code: HTTPStatus, error_reason: str, ) -> None: """Check full flow.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -789,3 +1046,133 @@ async def test_token_error( result = await oauth.async_configure(result, user_input=None) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == error_reason + + +@pytest.mark.parametrize( + ("user_managed_topics", "subscriptions"), + [ + ( + [f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id"], + [ + ( + f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", + f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", + ) + ], + ) + ], +) +async def test_existing_topic_and_subscription( + hass: HomeAssistant, + oauth, + subscriber, +) -> None: + """Test selecting existing user managed topic and subscription.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + entry = await oauth.async_complete_pubsub_flow( + result, + selected_topic=f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", + selected_subscription=f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", + ) + + data = dict(entry.data) + assert "token" in data + data["token"].pop("expires_in") + data["token"].pop("expires_at") + assert data == { + "sdm": {}, + "auth_implementation": "imported-cred", + "cloud_project_id": CLOUD_PROJECT_ID, + "project_id": PROJECT_ID, + "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", + "subscriber_id_imported": True, + "topic_name": f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", + "token": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + }, + } + + +async def test_no_eligible_topics( + hass: HomeAssistant, + oauth, + subscriber, +) -> None: + """Test the case where there are no eligible pub/sub topics.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub" + assert result.get("errors") == {"base": "no_pubsub_topics"} + + +@pytest.mark.parametrize( + ("list_topics_status"), + [ + (HTTPStatus.INTERNAL_SERVER_ERROR), + ], +) +async def test_list_topics_failure( + hass: HomeAssistant, + oauth, + subscriber, +) -> None: + """Test selecting existing user managed topic and subscription.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub" + assert result.get("errors") == {"base": "pubsub_api_error"} + + +@pytest.mark.parametrize( + ("sdm_managed_topic", "list_subscriptions_status"), + [ + (True, HTTPStatus.INTERNAL_SERVER_ERROR), + ], +) +async def test_list_subscriptions_failure( + hass: HomeAssistant, + oauth, + subscriber, +) -> None: + """Test selecting existing user managed topic and subscription.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert not result.get("errors") + + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("errors") == {"base": "pubsub_api_error"} diff --git a/tests/components/nest/test_events.py b/tests/components/nest/test_events.py index 643a2614bbc..e746e5f263f 100644 --- a/tests/components/nest/test_events.py +++ b/tests/components/nest/test_events.py @@ -122,28 +122,28 @@ def create_events(events, device_id=DEVICE_ID, timestamp=None): [ ( "sdm.devices.types.DOORBELL", - ["sdm.devices.traits.DoorbellChime"], + ["sdm.devices.traits.DoorbellChime", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.DoorbellChime.Chime", "Doorbell", "doorbell_chime", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraMotion"], + ["sdm.devices.traits.CameraMotion", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraMotion.Motion", "Camera", "camera_motion", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraPerson"], + ["sdm.devices.traits.CameraPerson", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraPerson.Person", "Camera", "camera_person", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraSound"], + ["sdm.devices.traits.CameraSound", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraSound.Sound", "Camera", "camera_sound", @@ -234,6 +234,41 @@ async def test_camera_multiple_event( } +@pytest.mark.parametrize( + "device_traits", + [(["sdm.devices.traits.CameraMotion"])], +) +async def test_media_not_supported( + hass: HomeAssistant, entity_registry: er.EntityRegistry, subscriber, setup_platform +) -> None: + """Test a pubsub message for a camera person event.""" + events = async_capture_events(hass, NEST_EVENT) + await setup_platform() + entry = entity_registry.async_get("camera.front") + assert entry is not None + + event_map = { + "sdm.devices.events.CameraMotion.Motion": { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + }, + } + + timestamp = utcnow() + await subscriber.async_receive_event(create_events(event_map, timestamp=timestamp)) + await hass.async_block_till_done() + + event_time = timestamp.replace(microsecond=0) + assert len(events) == 1 + assert event_view(events[0].data) == { + "device_id": entry.device_id, + "type": "camera_motion", + "timestamp": event_time, + } + # Media fetching not supported by this device + assert "attachment" not in events[0].data + + async def test_unknown_event(hass: HomeAssistant, subscriber, setup_platform) -> None: """Test a pubsub message for an unknown event type.""" events = async_capture_events(hass, NEST_EVENT) diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index f3226c936fb..17ddc485e85 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -24,21 +24,16 @@ import pytest from homeassistant.components.nest import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .common import ( PROJECT_ID, SUBSCRIBER_ID, - TEST_CONFIG_ENTRY_LEGACY, - TEST_CONFIG_LEGACY, - TEST_CONFIGFLOW_APP_CREDS, + TEST_CONFIG_NEW_SUBSCRIPTION, FakeSubscriber, PlatformSetup, YieldFixture, ) -from tests.common import MockConfigEntry - PLATFORM = "sensor" @@ -97,6 +92,19 @@ async def test_setup_success( assert entries[0].state is ConfigEntryState.LOADED +@pytest.mark.parametrize("nest_test_config", [(TEST_CONFIG_NEW_SUBSCRIPTION)]) +async def test_setup_success_new_subscription_format( + hass: HomeAssistant, error_caplog: pytest.LogCaptureFixture, setup_platform +) -> None: + """Test successful setup.""" + await setup_platform() + assert not error_caplog.records + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.LOADED + + @pytest.mark.parametrize("subscriber_id", [("invalid-subscriber-format")]) async def test_setup_configuration_failure( hass: HomeAssistant, @@ -171,19 +179,6 @@ async def test_subscriber_auth_failure( assert flows[0]["step_id"] == "reauth_confirm" -@pytest.mark.parametrize("subscriber_id", [(None)]) -async def test_setup_missing_subscriber_id( - hass: HomeAssistant, warning_caplog: pytest.LogCaptureFixture, setup_base_platform -) -> None: - """Test missing subscriber id from configuration.""" - await setup_base_platform() - assert "Configuration option" in warning_caplog.text - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - assert entries[0].state is ConfigEntryState.SETUP_ERROR - - @pytest.mark.parametrize("subscriber_side_effect", [(ConfigurationException())]) async def test_subscriber_configuration_failure( hass: HomeAssistant, @@ -200,18 +195,6 @@ async def test_subscriber_configuration_failure( assert entries[0].state is ConfigEntryState.SETUP_ERROR -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIGFLOW_APP_CREDS]) -async def test_empty_config( - hass: HomeAssistant, error_caplog: pytest.LogCaptureFixture, config, setup_platform -) -> None: - """Test setup is a no-op with not config.""" - await setup_platform() - assert not error_caplog.records - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 - - async def test_unload_entry(hass: HomeAssistant, setup_platform) -> None: """Test successful unload of a ConfigEntry.""" await setup_platform() @@ -317,26 +300,3 @@ async def test_migrate_unique_id( assert config_entry.state is ConfigEntryState.LOADED assert config_entry.unique_id == PROJECT_ID - - -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_LEGACY]) -async def test_legacy_works_with_nest_yaml( - hass: HomeAssistant, - config: dict[str, Any], - config_entry: MockConfigEntry, -) -> None: - """Test integration won't start with legacy works with nest yaml config.""" - config_entry.add_to_hass(hass) - assert not await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_ENTRY_LEGACY]) -async def test_legacy_works_with_nest_cleanup( - hass: HomeAssistant, setup_platform -) -> None: - """Test legacy works with nest config entries are silently removed once yaml is removed.""" - await setup_platform() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index 4bc3559e308..2526bfdf975 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -17,7 +17,7 @@ from google_nest_sdm.event import EventMessage import numpy as np import pytest -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import BrowseError from homeassistant.components.media_source import ( URI_SCHEME, Unresolvable, @@ -48,6 +48,9 @@ CAMERA_TRAITS = { "customName": DEVICE_NAME, }, "sdm.devices.traits.CameraImage": {}, + "sdm.devices.traits.CameraLiveStream": { + "supportedProtocols": ["RTSP"], + }, "sdm.devices.traits.CameraEventImage": {}, "sdm.devices.traits.CameraPerson": {}, "sdm.devices.traits.CameraMotion": {}, @@ -57,7 +60,9 @@ BATTERY_CAMERA_TRAITS = { "customName": DEVICE_NAME, }, "sdm.devices.traits.CameraClipPreview": {}, - "sdm.devices.traits.CameraLiveStream": {}, + "sdm.devices.traits.CameraLiveStream": { + "supportedProtocols": ["WEB_RTC"], + }, "sdm.devices.traits.CameraPerson": {}, "sdm.devices.traits.CameraMotion": {}, } diff --git a/tests/components/netatmo/snapshots/test_climate.ambr b/tests/components/netatmo/snapshots/test_climate.ambr index b9a92882b9e..aeae1fd71c7 100644 --- a/tests/components/netatmo/snapshots/test_climate.ambr +++ b/tests/components/netatmo/snapshots/test_climate.ambr @@ -14,8 +14,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -41,7 +41,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '222452125-DeviceType.OTM', 'unit_of_measurement': None, }) @@ -60,8 +60,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'supported_features': , 'target_temp_step': 0.5, @@ -89,8 +89,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -116,7 +116,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '2940411577-DeviceType.NRV', 'unit_of_measurement': None, }) @@ -135,12 +135,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'Frost Guard', + 'preset_mode': 'frost_guard', 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -170,8 +170,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -197,7 +197,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '1002003001-DeviceType.BNS', 'unit_of_measurement': None, }) @@ -215,12 +215,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'Schedule', + 'preset_mode': 'schedule', 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -250,8 +250,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -277,7 +277,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '2833524037-DeviceType.NRV', 'unit_of_measurement': None, }) @@ -296,12 +296,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'Frost Guard', + 'preset_mode': 'frost_guard', 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -332,8 +332,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -359,7 +359,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '2746182631-DeviceType.NATherm1', 'unit_of_measurement': None, }) @@ -382,8 +382,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 35cd0bfbf47..4ea7e30bcf9 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -608,6 +608,8 @@ 'webhook_id': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'netatmo', 'minor_version': 1, 'options': dict({ @@ -644,6 +646,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'netatmo', 'version': 1, diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index bc2a18d918d..ba18c2ca21a 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1159,59 +1159,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.cold_water_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.cold_water_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#8-12:34:56:00:16:0e#8-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.cold_water_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Cold water Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.cold_water_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.consumption_meter_none-entry] @@ -1412,58 +1360,6 @@ 'state': 'unavailable', }) # --- -# name: test_entity[sensor.ecocompteur_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ecocompteur_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e-12:34:56:00:16:0e-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.ecocompteur_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Écocompteur Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ecocompteur_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.gas_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1508,59 +1404,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.gas_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gas_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#6-12:34:56:00:16:0e#6-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.gas_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Gas Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.gas_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.home_avg_atmospheric_pressure-entry] @@ -3257,59 +3101,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.hot_water_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.hot_water_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#7-12:34:56:00:16:0e#7-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.hot_water_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Hot water Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.hot_water_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.kitchen_atmospheric_pressure-entry] @@ -3896,59 +3688,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.line_1_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_1_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#0-12:34:56:00:16:0e#0-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_1_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 1 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_1_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_2_none-entry] @@ -3995,59 +3735,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.line_2_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_2_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#1-12:34:56:00:16:0e#1-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_2_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 2 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_2_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_3_none-entry] @@ -4094,59 +3782,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.line_3_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_3_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#2-12:34:56:00:16:0e#2-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_3_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 3 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_3_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_4_none-entry] @@ -4193,59 +3829,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.line_4_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_4_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#3-12:34:56:00:16:0e#3-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_4_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 4 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_4_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_5_none-entry] @@ -4292,59 +3876,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.line_5_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_5_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#4-12:34:56:00:16:0e#4-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_5_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 5 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_5_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.livingroom_atmospheric_pressure-entry] @@ -5622,59 +5154,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity[sensor.total_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.total_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#5-12:34:56:00:16:0e#5-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.total_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Total Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.total_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.valve1_battery-entry] diff --git a/tests/components/netatmo/test_camera.py b/tests/components/netatmo/test_camera.py index c7398d64e1d..43904ed8f71 100644 --- a/tests/components/netatmo/test_camera.py +++ b/tests/components/netatmo/test_camera.py @@ -9,7 +9,7 @@ import pytest from syrupy import SnapshotAssertion from homeassistant.components import camera -from homeassistant.components.camera import STATE_STREAMING +from homeassistant.components.camera import CameraState from homeassistant.components.netatmo.const import ( NETATMO_EVENT, SERVICE_SET_CAMERA_LIGHT, @@ -176,7 +176,7 @@ async def test_camera_image_local( cam = hass.states.get(camera_entity_indoor) assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING assert cam.name == "Hall" stream_source = await camera.async_get_stream_source(hass, camera_entity_indoor) @@ -204,7 +204,7 @@ async def test_camera_image_vpn( cam = hass.states.get(camera_entity_indoor) assert cam is not None - assert cam.state == STATE_STREAMING + assert cam.state == CameraState.STREAMING stream_source = await camera.async_get_stream_source(hass, camera_entity_indoor) assert stream_source == stream_uri diff --git a/tests/components/netatmo/test_climate.py b/tests/components/netatmo/test_climate.py index 4b908580346..dc0312f7acd 100644 --- a/tests/components/netatmo/test_climate.py +++ b/tests/components/netatmo/test_climate.py @@ -282,7 +282,7 @@ async def test_service_preset_mode_frost_guard_thermostat( assert hass.states.get(climate_entity_livingroom).state == "auto" assert ( hass.states.get(climate_entity_livingroom).attributes["preset_mode"] - == "Frost Guard" + == "frost_guard" ) # Test service setting the preset mode to "frost guard" @@ -779,7 +779,7 @@ async def test_service_preset_mode_already_boost_valves( assert hass.states.get(climate_entity_entrada).state == "auto" assert ( hass.states.get(climate_entity_entrada).attributes["preset_mode"] - == "Frost Guard" + == "frost_guard" ) assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 7 diff --git a/tests/components/netatmo/test_config_flow.py b/tests/components/netatmo/test_config_flow.py index 29a065c3be3..436f75b12ec 100644 --- a/tests/components/netatmo/test_config_flow.py +++ b/tests/components/netatmo/test_config_flow.py @@ -23,7 +23,7 @@ from homeassistant.helpers import config_entry_oauth2_flow from .conftest import CLIENT_ID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, start_reauth_flow from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -282,9 +282,7 @@ async def test_reauth( assert len(mock_setup.mock_calls) == 1 # Should show form - result = await hass.config_entries.flow.async_init( - "netatmo", context={"source": config_entries.SOURCE_REAUTH} - ) + result = await start_reauth_flow(hass, new_entry) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/network/test_init.py b/tests/components/network/test_init.py index 57a12868d0a..dca31106dba 100644 --- a/tests/components/network/test_init.py +++ b/tests/components/network/test_init.py @@ -886,3 +886,42 @@ async def test_async_get_announce_addresses_no_source_ip(hass: HomeAssistant) -> "172.16.1.5", "fe80::dead:beef:dead:beef", ] + + +async def test_websocket_network_url( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test the network/url websocket command.""" + assert await async_setup_component(hass, "network", {}) + + client = await hass_ws_client(hass) + + with ( + patch( + "homeassistant.helpers.network._get_internal_url", return_value="internal" + ), + patch("homeassistant.helpers.network._get_cloud_url", return_value="cloud"), + ): + await client.send_json({"id": 1, "type": "network/url"}) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "internal": "internal", + "external": "cloud", + "cloud": "cloud", + } + + # Test with no cloud URL + with ( + patch( + "homeassistant.helpers.network._get_internal_url", return_value="internal" + ), + ): + await client.send_json({"id": 2, "type": "network/url"}) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "internal": "internal", + "external": None, + "cloud": None, + } diff --git a/tests/components/nexia/test_init.py b/tests/components/nexia/test_init.py index 5984a0af721..4e5c5118d6b 100644 --- a/tests/components/nexia/test_init.py +++ b/tests/components/nexia/test_init.py @@ -1,15 +1,19 @@ """The init tests for the nexia platform.""" +from unittest.mock import patch + import aiohttp from homeassistant.components.nexia.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .util import async_init_integration +from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -48,3 +52,20 @@ async def test_device_remove_devices( ) response = await client.remove_device(dead_device_entry.id, entry_id) assert response["success"] + + +async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: + """Test migrating a 1.1 config entry to 1.2.""" + with patch("homeassistant.components.nexia.async_setup_entry", return_value=True): + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, + version=1, + minor_version=1, + unique_id=123456, + ) + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id == "123456" diff --git a/tests/components/nexia/util.py b/tests/components/nexia/util.py index 98d5312f0a1..1104ffad63d 100644 --- a/tests/components/nexia/util.py +++ b/tests/components/nexia/util.py @@ -54,7 +54,10 @@ async def async_init_integration( text=load_fixture(set_fan_speed_fixture), ) entry = MockConfigEntry( - domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"} + domain=DOMAIN, + data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, + minor_version=2, + unique_id="123456", ) entry.add_to_hass(hass) diff --git a/tests/components/nextbus/__init__.py b/tests/components/nextbus/__init__.py index 609e0bb574b..e0af11965c4 100644 --- a/tests/components/nextbus/__init__.py +++ b/tests/components/nextbus/__init__.py @@ -1 +1,34 @@ """The tests for the nexbus component.""" + +from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_STOP +from homeassistant.core import HomeAssistant + +from .const import VALID_AGENCY_TITLE, VALID_ROUTE_TITLE, VALID_STOP_TITLE + +from tests.common import MockConfigEntry + + +async def assert_setup_sensor( + hass: HomeAssistant, + config: dict[str, dict[str, str]], + expected_state=ConfigEntryState.LOADED, + route_title: str = VALID_ROUTE_TITLE, +) -> MockConfigEntry: + """Set up the sensor and assert it's been created.""" + unique_id = f"{config[DOMAIN][CONF_AGENCY]}_{config[DOMAIN][CONF_ROUTE]}_{config[DOMAIN][CONF_STOP]}" + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config[DOMAIN], + title=f"{VALID_AGENCY_TITLE} {route_title} {VALID_STOP_TITLE}", + unique_id=unique_id, + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is expected_state + + return config_entry diff --git a/tests/components/nextbus/conftest.py b/tests/components/nextbus/conftest.py index 231faccf907..3f687989313 100644 --- a/tests/components/nextbus/conftest.py +++ b/tests/components/nextbus/conftest.py @@ -1,10 +1,13 @@ """Test helpers for NextBus tests.""" +from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest +from .const import BASIC_RESULTS + @pytest.fixture( params=[ @@ -41,7 +44,7 @@ import pytest def route_config_direction(request: pytest.FixtureRequest) -> Any: """Generate alternative directions values. - When only on edirection is returned, it is not returned as a list, but instead an object. + When only one direction is returned, it is not returned as a list, but instead an object. """ return request.param @@ -75,42 +78,74 @@ def mock_nextbus_lists( "hidden": False, "timestamp": "2024-06-23T03:06:58Z", }, + { + "id": "G", + "rev": 1057, + "title": "F Market & Wharves", + "description": "7am-10pm daily", + "color": "", + "textColor": "", + "hidden": False, + "timestamp": "2024-06-23T03:06:58Z", + }, ] - instance.route_details.return_value = { - "id": "F", - "rev": 1057, - "title": "F Market & Wharves", - "description": "7am-10pm daily", - "color": "", - "textColor": "", - "hidden": False, - "boundingBox": {}, - "stops": [ - { - "id": "5184", - "lat": 37.8071299, - "lon": -122.41732, - "name": "Jones St & Beach St", - "code": "15184", - "hidden": False, - "showDestinationSelector": True, - "directions": ["F_0_var1", "F_0_var0"], - }, - { - "id": "5651", - "lat": 37.8071299, - "lon": -122.41732, - "name": "Jones St & Beach St", - "code": "15651", - "hidden": False, - "showDestinationSelector": True, - "directions": ["F_0_var1", "F_0_var0"], - }, - ], - "directions": route_config_direction, - "paths": [], - "timestamp": "2024-06-23T03:06:58Z", - } + def route_details_side_effect(agency: str, route: str) -> dict: + route = route.upper() + return { + "id": route, + "rev": 1057, + "title": f"{route} Market & Wharves", + "description": "7am-10pm daily", + "color": "", + "textColor": "", + "hidden": False, + "boundingBox": {}, + "stops": [ + { + "id": "5184", + "lat": 37.8071299, + "lon": -122.41732, + "name": "Jones St & Beach St", + "code": "15184", + "hidden": False, + "showDestinationSelector": True, + "directions": ["F_0_var1", "F_0_var0"], + }, + { + "id": "5651", + "lat": 37.8071299, + "lon": -122.41732, + "name": "Jones St & Beach St", + "code": "15651", + "hidden": False, + "showDestinationSelector": True, + "directions": ["F_0_var1", "F_0_var0"], + }, + ], + "directions": route_config_direction, + "paths": [], + "timestamp": "2024-06-23T03:06:58Z", + } + + instance.route_details.side_effect = route_details_side_effect return instance + + +@pytest.fixture +def mock_nextbus() -> Generator[MagicMock]: + """Create a mock py_nextbus module.""" + with patch("homeassistant.components.nextbus.coordinator.NextBusClient") as client: + yield client + + +@pytest.fixture +def mock_nextbus_predictions( + mock_nextbus: MagicMock, +) -> Generator[MagicMock]: + """Create a mock of NextBusClient predictions.""" + instance = mock_nextbus.return_value + instance.predictions_for_stop.return_value = BASIC_RESULTS + + return instance.predictions_for_stop diff --git a/tests/components/nextbus/const.py b/tests/components/nextbus/const.py new file mode 100644 index 00000000000..66eb3635ca9 --- /dev/null +++ b/tests/components/nextbus/const.py @@ -0,0 +1,101 @@ +"""Constants for NextBus tests.""" + +from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import CONF_STOP + +VALID_AGENCY = "sfmta-cis" +VALID_ROUTE = "F" +VALID_STOP = "5184" +VALID_COORDINATOR_KEY = f"{VALID_AGENCY}-{VALID_STOP}" +VALID_AGENCY_TITLE = "San Francisco Muni" +VALID_ROUTE_TITLE = "F-Market & Wharves" +VALID_STOP_TITLE = "Market St & 7th St" +SENSOR_ID = "sensor.san_francisco_muni_f_market_wharves_market_st_7th_st" + +ROUTE_2 = "G" +ROUTE_TITLE_2 = "G-Market & Wharves" +SENSOR_ID_2 = "sensor.san_francisco_muni_g_market_wharves_market_st_7th_st" + +PLATFORM_CONFIG = { + SENSOR_DOMAIN: { + "platform": DOMAIN, + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: VALID_ROUTE, + CONF_STOP: VALID_STOP, + }, +} + + +CONFIG_BASIC = { + DOMAIN: { + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: VALID_ROUTE, + CONF_STOP: VALID_STOP, + } +} + +CONFIG_BASIC_2 = { + DOMAIN: { + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: ROUTE_2, + CONF_STOP: VALID_STOP, + } +} + +BASIC_RESULTS = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [ + {"minutes": 1, "timestamp": 1553807371000}, + {"minutes": 2, "timestamp": 1553807372000}, + {"minutes": 3, "timestamp": 1553807373000}, + {"minutes": 10, "timestamp": 1553807380000}, + ], + }, + { + "route": { + "title": ROUTE_TITLE_2, + "id": ROUTE_2, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [ + {"minutes": 90, "timestamp": 1553807379000}, + ], + }, +] + +NO_UPCOMING = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [], + }, + { + "route": { + "title": ROUTE_TITLE_2, + "id": ROUTE_2, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [], + }, +] diff --git a/tests/components/nextbus/test_init.py b/tests/components/nextbus/test_init.py new file mode 100644 index 00000000000..d44b8d1ecc0 --- /dev/null +++ b/tests/components/nextbus/test_init.py @@ -0,0 +1,27 @@ +"""The tests for the nexbus sensor component.""" + +from unittest.mock import MagicMock +from urllib.error import HTTPError + +from homeassistant.components.nextbus.coordinator import NextBusHTTPError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import assert_setup_sensor +from .const import CONFIG_BASIC + + +async def test_setup_retry( + hass: HomeAssistant, + mock_nextbus: MagicMock, + mock_nextbus_lists: MagicMock, + mock_nextbus_predictions: MagicMock, +) -> None: + """Verify that a list of messages are rendered correctly.""" + + mock_nextbus_predictions.side_effect = NextBusHTTPError( + "failed", HTTPError("url", 500, "error", MagicMock(), None) + ) + await assert_setup_sensor( + hass, CONFIG_BASIC, expected_state=ConfigEntryState.SETUP_RETRY + ) diff --git a/tests/components/nextbus/test_sensor.py b/tests/components/nextbus/test_sensor.py index dd0346c3e7a..04140a17c4f 100644 --- a/tests/components/nextbus/test_sensor.py +++ b/tests/components/nextbus/test_sensor.py @@ -1,121 +1,36 @@ """The tests for the nexbus sensor component.""" -from collections.abc import Generator from copy import deepcopy -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock from urllib.error import HTTPError +from freezegun.api import FrozenDateTimeFactory from py_nextbus.client import NextBusFormatError, NextBusHTTPError import pytest -from homeassistant.components import sensor -from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN +from homeassistant.components.nextbus.const import DOMAIN from homeassistant.components.nextbus.coordinator import NextBusDataUpdateCoordinator from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_NAME, CONF_STOP +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import UpdateFailed -from tests.common import MockConfigEntry +from . import assert_setup_sensor +from .const import ( + BASIC_RESULTS, + CONFIG_BASIC, + CONFIG_BASIC_2, + NO_UPCOMING, + ROUTE_TITLE_2, + SENSOR_ID, + SENSOR_ID_2, + VALID_AGENCY, + VALID_COORDINATOR_KEY, + VALID_ROUTE_TITLE, + VALID_STOP_TITLE, +) -VALID_AGENCY = "sfmta-cis" -VALID_ROUTE = "F" -VALID_STOP = "5184" -VALID_AGENCY_TITLE = "San Francisco Muni" -VALID_ROUTE_TITLE = "F-Market & Wharves" -VALID_STOP_TITLE = "Market St & 7th St" -SENSOR_ID = "sensor.san_francisco_muni_f_market_wharves_market_st_7th_st" - -PLATFORM_CONFIG = { - sensor.DOMAIN: { - "platform": DOMAIN, - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: VALID_ROUTE, - CONF_STOP: VALID_STOP, - }, -} - - -CONFIG_BASIC = { - DOMAIN: { - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: VALID_ROUTE, - CONF_STOP: VALID_STOP, - } -} - -BASIC_RESULTS = [ - { - "route": { - "title": VALID_ROUTE_TITLE, - "id": VALID_ROUTE, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [ - {"minutes": 1, "timestamp": 1553807371000}, - {"minutes": 2, "timestamp": 1553807372000}, - {"minutes": 3, "timestamp": 1553807373000}, - {"minutes": 10, "timestamp": 1553807380000}, - ], - } -] - -NO_UPCOMING = [ - { - "route": { - "title": VALID_ROUTE_TITLE, - "id": VALID_ROUTE, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [], - } -] - - -@pytest.fixture -def mock_nextbus() -> Generator[MagicMock]: - """Create a mock py_nextbus module.""" - with patch("homeassistant.components.nextbus.coordinator.NextBusClient") as client: - yield client - - -@pytest.fixture -def mock_nextbus_predictions( - mock_nextbus: MagicMock, -) -> Generator[MagicMock]: - """Create a mock of NextBusClient predictions.""" - instance = mock_nextbus.return_value - instance.predictions_for_stop.return_value = BASIC_RESULTS - - return instance.predictions_for_stop - - -async def assert_setup_sensor( - hass: HomeAssistant, - config: dict[str, dict[str, str]], - expected_state=ConfigEntryState.LOADED, -) -> MockConfigEntry: - """Set up the sensor and assert it's been created.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data=config[DOMAIN], - title=f"{VALID_AGENCY_TITLE} {VALID_ROUTE_TITLE} {VALID_STOP_TITLE}", - unique_id=f"{VALID_AGENCY}_{VALID_ROUTE}_{VALID_STOP}", - ) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is expected_state - - return config_entry +from tests.common import async_fire_time_changed async def test_predictions( @@ -153,7 +68,7 @@ async def test_prediction_exceptions( ) -> None: """Test that some coodinator exceptions raise UpdateFailed exceptions.""" await assert_setup_sensor(hass, CONFIG_BASIC) - coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN][VALID_AGENCY] + coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN][VALID_COORDINATOR_KEY] mock_nextbus_predictions.side_effect = client_exception with pytest.raises(UpdateFailed): await coordinator._async_update_data() @@ -205,3 +120,54 @@ async def test_verify_no_upcoming( assert state is not None assert state.attributes["upcoming"] == "No upcoming predictions" assert state.state == "unknown" + + +async def test_unload_entry( + hass: HomeAssistant, + mock_nextbus: MagicMock, + mock_nextbus_lists: MagicMock, + mock_nextbus_predictions: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that the sensor can be unloaded.""" + config_entry1 = await assert_setup_sensor(hass, CONFIG_BASIC) + await assert_setup_sensor(hass, CONFIG_BASIC_2, route_title=ROUTE_TITLE_2) + + # Verify the first sensor + state = hass.states.get(SENSOR_ID) + assert state is not None + assert state.state == "2019-03-28T21:09:31+00:00" + assert state.attributes["agency"] == VALID_AGENCY + assert state.attributes["route"] == VALID_ROUTE_TITLE + assert state.attributes["stop"] == VALID_STOP_TITLE + assert state.attributes["upcoming"] == "1, 2, 3, 10" + + # Verify the second sensor + state = hass.states.get(SENSOR_ID_2) + assert state is not None + assert state.state == "2019-03-28T21:09:39+00:00" + assert state.attributes["agency"] == VALID_AGENCY + assert state.attributes["route"] == ROUTE_TITLE_2 + assert state.attributes["stop"] == VALID_STOP_TITLE + assert state.attributes["upcoming"] == "90" + + # Update mock to return new predictions + new_predictions = deepcopy(BASIC_RESULTS) + new_predictions[1]["values"] = [{"minutes": 5, "timestamp": 1553807375000}] + mock_nextbus_predictions.return_value = new_predictions + + # Unload config entry 1 + await hass.config_entries.async_unload(config_entry1.entry_id) + await hass.async_block_till_done() + assert config_entry1.state is ConfigEntryState.NOT_LOADED + + # Skip ahead in time + freezer.tick(120) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # Check update for new predictions + state = hass.states.get(SENSOR_ID_2) + assert state is not None + assert state.attributes["upcoming"] == "5" + assert state.state == "2019-03-28T21:09:35+00:00" diff --git a/tests/components/nextcloud/__init__.py b/tests/components/nextcloud/__init__.py index e2102ed8c25..4bc5a041650 100644 --- a/tests/components/nextcloud/__init__.py +++ b/tests/components/nextcloud/__init__.py @@ -1 +1,38 @@ """Tests for the Nextcloud integration.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from .const import MOCKED_ENTRY_ID + +from tests.common import MockConfigEntry + + +def mock_config_entry(config: dict) -> MockConfigEntry: + """Return a mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, title=config[CONF_URL], data=config, entry_id=MOCKED_ENTRY_ID + ) + + +async def init_integration( + hass: HomeAssistant, config: dict, data: dict +) -> MockConfigEntry: + """Set up the nextcloud integration.""" + entry = mock_config_entry(config) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.update = Mock(return_value=True) + mock_nextcloud_monitor.return_value.data = data + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return entry diff --git a/tests/components/nextcloud/conftest.py b/tests/components/nextcloud/conftest.py index cf3eda55fe1..3234e3773b8 100644 --- a/tests/components/nextcloud/conftest.py +++ b/tests/components/nextcloud/conftest.py @@ -1,19 +1,11 @@ """Fixtrues for the Nextcloud integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch import pytest -@pytest.fixture -def mock_nextcloud_monitor() -> Mock: - """Mock of NextcloudMonitor.""" - return Mock( - update=Mock(return_value=True), - ) - - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" diff --git a/tests/components/nextcloud/const.py b/tests/components/nextcloud/const.py new file mode 100644 index 00000000000..2d328292b6f --- /dev/null +++ b/tests/components/nextcloud/const.py @@ -0,0 +1,182 @@ +"""Constants for nextcloud tests.""" + +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL + +MOCKED_ENTRY_ID = "1234567890abcdef" + +VALID_CONFIG = { + CONF_URL: "https://my.nc_url.local", + CONF_USERNAME: "nc_user", + CONF_PASSWORD: "nc_pass", + CONF_VERIFY_SSL: True, +} + +NC_DATA = { + "nextcloud": { + "system": { + "version": "28.0.4.1", + "theme": "", + "enable_avatars": "yes", + "enable_previews": "yes", + "memcache.local": "\\OC\\Memcache\\APCu", + "memcache.distributed": "none", + "filelocking.enabled": "yes", + "memcache.locking": "none", + "debug": "no", + "freespace": 32769138688, + "cpuload": [2.06640625, 1.58447265625, 1.45263671875], + "mem_total": 30728192, + "mem_free": 6753280, + "swap_total": 10484736, + "swap_free": 10484736, + "apps": { + "num_installed": 41, + "num_updates_available": 0, + "app_updates": [], + }, + "update": {"lastupdatedat": 1713048517, "available": False}, + }, + "storage": { + "num_users": 2, + "num_files": 6783, + "num_storages": 4, + "num_storages_local": 1, + "num_storages_home": 2, + "num_storages_other": 1, + }, + "shares": { + "num_shares": 2, + "num_shares_user": 0, + "num_shares_groups": 0, + "num_shares_link": 2, + "num_shares_mail": 0, + "num_shares_room": 0, + "num_shares_link_no_password": 2, + "num_fed_shares_sent": 0, + "num_fed_shares_received": 1, + "permissions_3_17": 1, + "permissions_3_31": 1, + }, + }, + "server": { + "webserver": "Apache/2.4.57 (Debian)", + "php": { + "version": "8.2.18", + "memory_limit": 536870912, + "max_execution_time": 3600, + "upload_max_filesize": 536870912, + "opcache_revalidate_freq": 60, + "opcache": { + "opcache_enabled": True, + "cache_full": False, + "restart_pending": False, + "restart_in_progress": False, + "memory_usage": { + "used_memory": 72027112, + "free_memory": 62190616, + "wasted_memory": 0, + "current_wasted_percentage": 0, + }, + "interned_strings_usage": { + "buffer_size": 33554432, + "used_memory": 12630360, + "free_memory": 20924072, + "number_of_strings": 69242, + }, + "opcache_statistics": { + "num_cached_scripts": 1406, + "num_cached_keys": 2654, + "max_cached_keys": 16229, + "hits": 9739971, + "start_time": 1722222008, + "last_restart_time": 0, + "oom_restarts": 0, + "hash_restarts": 0, + "manual_restarts": 0, + "misses": 1406, + "blacklist_misses": 0, + "blacklist_miss_ratio": 0, + "opcache_hit_rate": 99.9855667222406, + }, + "jit": { + "enabled": True, + "on": True, + "kind": 5, + "opt_level": 5, + "opt_flags": 6, + "buffer_size": 134217712, + "buffer_free": 133190688, + }, + }, + "apcu": { + "cache": { + "num_slots": 4099, + "ttl": 0, + "num_hits": 590911, + "num_misses": 55250, + "num_inserts": 55421, + "num_entries": 102, + "expunges": 0, + "start_time": 1722222008, + "mem_size": 175296, + "memory_type": "mmap", + }, + "sma": {"num_seg": 1, "seg_size": 33554312, "avail_mem": 33342368}, + }, + "extensions": [ + "Core", + "date", + "libxml", + "openssl", + "pcre", + "sqlite3", + "zlib", + "ctype", + "curl", + "dom", + "fileinfo", + "filter", + "hash", + "iconv", + "json", + "mbstring", + "SPL", + "session", + "PDO", + "pdo_sqlite", + "standard", + "posix", + "random", + "Reflection", + "Phar", + "SimpleXML", + "tokenizer", + "xml", + "xmlreader", + "xmlwriter", + "mysqlnd", + "apache2handler", + "apcu", + "bcmath", + "exif", + "ftp", + "gd", + "gmp", + "imagick", + "intl", + "ldap", + "memcached", + "pcntl", + "pdo_mysql", + "pdo_pgsql", + "redis", + "sodium", + "sysvsem", + "zip", + "Zend OPcache", + ], + }, + "database": {"type": "sqlite3", "version": "3.40.1", "size": "4784128"}, + }, + "activeUsers": {"last5minutes": 0, "last1hour": 0, "last24hours": 0}, +} diff --git a/tests/components/nextcloud/snapshots/test_binary_sensor.ambr b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..1831419af52 --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr @@ -0,0 +1,277 @@ +# serializer version: 1 +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Avatars enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_enable_avatars', + 'unique_id': '1234567890abcdef#system_enable_avatars', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Avatars enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Debug enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_debug', + 'unique_id': '1234567890abcdef#system_debug', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Debug enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filelocking enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_filelocking_enabled', + 'unique_id': '1234567890abcdef#system_filelocking.enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Filelocking enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT active', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_on', + 'unique_id': '1234567890abcdef#jit_on', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT active', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_enabled', + 'unique_id': '1234567890abcdef#jit_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previews enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_enable_previews', + 'unique_id': '1234567890abcdef#system_enable_previews', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Previews enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextcloud/snapshots/test_config_flow.ambr b/tests/components/nextcloud/snapshots/test_config_flow.ambr index 06c4ce216db..e87db0a25c0 100644 --- a/tests/components/nextcloud/snapshots/test_config_flow.ambr +++ b/tests/components/nextcloud/snapshots/test_config_flow.ambr @@ -2,7 +2,7 @@ # name: test_reauth dict({ 'password': 'other_password', - 'url': 'nc_url', + 'url': 'https://my.nc_url.local', 'username': 'other_user', 'verify_ssl': True, }) @@ -10,7 +10,7 @@ # name: test_user_create_entry dict({ 'password': 'nc_pass', - 'url': 'nc_url', + 'url': 'https://my.nc_url.local', 'username': 'nc_user', 'verify_ssl': True, }) diff --git a/tests/components/nextcloud/snapshots/test_sensor.ambr b/tests/components/nextcloud/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c49ba3496da --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_sensor.ambr @@ -0,0 +1,3973 @@ +# serializer version: 1 +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last 5 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last5minutes', + 'unique_id': '1234567890abcdef#activeUsers_last5minutes', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last 5 minutes', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last day', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last24hours', + 'unique_id': '1234567890abcdef#activeUsers_last24hours', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last day', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last hour', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last1hour', + 'unique_id': '1234567890abcdef#activeUsers_last1hour', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last hour', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_files', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of files', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_files', + 'unique_id': '1234567890abcdef#storage_num_files', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of files', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_files', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6783', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of group shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_groups', + 'unique_id': '1234567890abcdef#shares_num_shares_groups', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of group shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of link shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_link', + 'unique_id': '1234567890abcdef#shares_num_shares_link', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of link shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of local storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_local', + 'unique_id': '1234567890abcdef#storage_num_storages_local', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of local storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of mail shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_mail', + 'unique_id': '1234567890abcdef#shares_num_shares_mail', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of mail shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of other storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_other', + 'unique_id': '1234567890abcdef#storage_num_storages_other', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of other storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of passwordless link shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_link_no_password', + 'unique_id': '1234567890abcdef#shares_num_shares_link_no_password', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of passwordless link shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of room shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_room', + 'unique_id': '1234567890abcdef#shares_num_shares_room', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of room shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares', + 'unique_id': '1234567890abcdef#shares_num_shares', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares received', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_fed_shares_received', + 'unique_id': '1234567890abcdef#shares_num_fed_shares_received', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares received', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares sent', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_fed_shares_sent', + 'unique_id': '1234567890abcdef#shares_num_fed_shares_sent', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares sent', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages', + 'unique_id': '1234567890abcdef#storage_num_storages', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of storages at home', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_home', + 'unique_id': '1234567890abcdef#storage_num_storages_home', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of storages at home', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_user', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of user', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_users', + 'unique_id': '1234567890abcdef#storage_num_users', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of user', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of user shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_user', + 'unique_id': '1234567890abcdef#shares_num_shares_user', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of user shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_apps_installed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Apps installed', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_apps_num_installed', + 'unique_id': '1234567890abcdef#system_apps_num_installed', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Apps installed', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_apps_installed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '41', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_expunges', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache expunges', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_expunges', + 'unique_id': '1234567890abcdef#cache_expunges', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache expunges', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_expunges', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_memory_type', + 'unique_id': '1234567890abcdef#cache_memory_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache memory', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'mmap', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cache memory size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_mem_size', + 'unique_id': '1234567890abcdef#cache_mem_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Cache memory size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.175296', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of entires', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_entries', + 'unique_id': '1234567890abcdef#cache_num_entries', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of entires', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '102', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of hits', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_hits', + 'unique_id': '1234567890abcdef#cache_num_hits', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of hits', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '590911', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of inserts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_inserts', + 'unique_id': '1234567890abcdef#cache_num_inserts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of inserts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55421', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_misses', + 'unique_id': '1234567890abcdef#cache_num_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55250', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of slots', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_slots', + 'unique_id': '1234567890abcdef#cache_num_slots', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of slots', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4099', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_start_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cache start time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_start_time', + 'unique_id': '1234567890abcdef#cache_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Cache start time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-07-29T03:00:08+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_ttl', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache ttl', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_ttl', + 'unique_id': '1234567890abcdef#cache_ttl', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache ttl', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_ttl', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 15 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_15', + 'unique_id': '1234567890abcdef#system_cpuload_15', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 15 minutes', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.45263671875', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 1 minute', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_1', + 'unique_id': '1234567890abcdef#system_cpuload_1', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 1 minute', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.06640625', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 5 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_5', + 'unique_id': '1234567890abcdef#system_cpuload_5', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 5 minutes', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.58447265625', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_database_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Database size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_size', + 'unique_id': '1234567890abcdef#database_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Database size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.784128', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_database_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Database type', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_type', + 'unique_id': '1234567890abcdef#database_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Database type', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sqlite3', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_database_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Database version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_version', + 'unique_id': '1234567890abcdef#database_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Database version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.40.1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_mem_free', + 'unique_id': '1234567890abcdef#system_mem_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.75328', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_freespace', + 'unique_id': '1234567890abcdef#system_freespace', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.769138688', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free swap memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_swap_free', + 'unique_id': '1234567890abcdef#system_swap_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free swap memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.484736', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned buffer size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_buffer_size', + 'unique_id': '1234567890abcdef#interned_strings_usage_buffer_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned buffer size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.554432', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_free_memory', + 'unique_id': '1234567890abcdef#interned_strings_usage_free_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.924072', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Interned number of strings', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_number_of_strings', + 'unique_id': '1234567890abcdef#interned_strings_usage_number_of_strings', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Interned number of strings', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '69242', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned used memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_used_memory', + 'unique_id': '1234567890abcdef#interned_strings_usage_used_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned used memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.63036', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'JIT buffer free', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_buffer_free', + 'unique_id': '1234567890abcdef#jit_buffer_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local JIT buffer free', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '133.190688', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'JIT buffer size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_buffer_size', + 'unique_id': '1234567890abcdef#jit_buffer_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local JIT buffer size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '134.217712', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_kind', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT kind', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_kind', + 'unique_id': '1234567890abcdef#jit_kind', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT kind', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_kind', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT opt flags', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_opt_flags', + 'unique_id': '1234567890abcdef#jit_opt_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT opt flags', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT opt level', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_opt_level', + 'unique_id': '1234567890abcdef#jit_opt_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT opt level', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache blacklist miss ratio', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_blacklist_miss_ratio', + 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_miss_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache blacklist miss ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache blacklist misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_blacklist_misses', + 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache blacklist misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache cached keys', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_num_cached_keys', + 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_keys', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache cached keys', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2654', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache cached scripts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_num_cached_scripts', + 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_scripts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache cached scripts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache current wasted percentage', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_current_wasted_percentage', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_current_wasted_percentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache current wasted percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_free_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_free_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.190616', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hash restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_hash_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_hash_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hash restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hit rate', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_opcache_hit_rate', + 'unique_id': '1234567890abcdef#opcache_statistics_opcache_hit_rate', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hit rate', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99.9855667222406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hits', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_hits', + 'unique_id': '1234567890abcdef#opcache_statistics_hits', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hits', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9739971', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache last restart time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_last_restart_time', + 'unique_id': '1234567890abcdef#opcache_statistics_last_restart_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Opcache last restart time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:00+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache manual restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_manual_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_manual_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache manual restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache max cached keys', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_max_cached_keys', + 'unique_id': '1234567890abcdef#opcache_statistics_max_cached_keys', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache max cached keys', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16229', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_misses', + 'unique_id': '1234567890abcdef#opcache_statistics_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache out of memory restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_oom_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_oom_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache out of memory restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache start time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_start_time', + 'unique_id': '1234567890abcdef#opcache_statistics_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Opcache start time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-07-29T03:00:08+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache used memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_used_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_used_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache used memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72.027112', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache wasted memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_wasted_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_wasted_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache wasted memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP max execution time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_max_execution_time', + 'unique_id': '1234567890abcdef#server_php_max_execution_time', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'my.nc_url.local PHP max execution time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3600', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP memory limit', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_memory_limit', + 'unique_id': '1234567890abcdef#server_php_memory_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local PHP memory limit', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '536.870912', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP upload maximum filesize', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_upload_max_filesize', + 'unique_id': '1234567890abcdef#server_php_upload_max_filesize', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local PHP upload maximum filesize', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '536.870912', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PHP version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_version', + 'unique_id': '1234567890abcdef#server_php_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local PHP version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.2.18', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SMA available memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_avail_mem', + 'unique_id': '1234567890abcdef#sma_avail_mem', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local SMA available memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.342368', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SMA number of segments', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_num_seg', + 'unique_id': '1234567890abcdef#sma_num_seg', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local SMA number of segments', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SMA segment size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_seg_size', + 'unique_id': '1234567890abcdef#sma_seg_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local SMA segment size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.554312', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache distributed', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_distributed', + 'unique_id': '1234567890abcdef#system_memcache.distributed', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache distributed', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache local', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_local', + 'unique_id': '1234567890abcdef#system_memcache.local', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache local', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '\\OC\\Memcache\\APCu', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache locking', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_locking', + 'unique_id': '1234567890abcdef#system_memcache.locking', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache locking', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_system_theme', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System theme', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_theme', + 'unique_id': '1234567890abcdef#system_theme', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System theme', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_theme', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_system_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_version', + 'unique_id': '1234567890abcdef#system_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.0.4.1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_total_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_mem_total', + 'unique_id': '1234567890abcdef#system_mem_total', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Total memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_total_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.728192', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total swap memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_swap_total', + 'unique_id': '1234567890abcdef#system_swap_total', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Total swap memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.484736', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_updates_available', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Updates available', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_apps_num_updates_available', + 'unique_id': '1234567890abcdef#system_apps_num_updates_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Updates available', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_updates_available', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_webserver', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Webserver', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_webserver', + 'unique_id': '1234567890abcdef#server_webserver', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Webserver', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_webserver', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Apache/2.4.57 (Debian)', + }) +# --- diff --git a/tests/components/nextcloud/snapshots/test_update.ambr b/tests/components/nextcloud/snapshots/test_update.ambr new file mode 100644 index 00000000000..484106580b1 --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_update.ambr @@ -0,0 +1,59 @@ +# serializer version: 1 +# name: test_async_setup_entry[update.my_nc_url_local_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.my_nc_url_local_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234567890abcdef#update', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[update.my_nc_url_local_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/nextcloud/icon.png', + 'friendly_name': 'my.nc_url.local None', + 'in_progress': False, + 'installed_version': '28.0.4.1', + 'latest_version': '28.0.4.1', + 'release_summary': None, + 'release_url': 'https://nextcloud.com/changelog/#28-0-4', + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.my_nc_url_local_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/nextcloud/test_binary_sensor.py b/tests/components/nextcloud/test_binary_sensor.py new file mode 100644 index 00000000000..dd53f4fb2cf --- /dev/null +++ b/tests/components/nextcloud/test_binary_sensor.py @@ -0,0 +1,30 @@ +"""Tests for the Nextcloud binary sensors.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch( + "homeassistant.components.nextcloud.PLATFORMS", [Platform.BINARY_SENSOR] + ): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index c02516fdc99..16b6bf3bc04 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for the Nextcloud config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import patch from nextcloudmonitor import ( NextcloudMonitorAuthorizationError, @@ -11,25 +11,20 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.nextcloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .const import VALID_CONFIG + from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -VALID_CONFIG = { - CONF_URL: "nc_url", - CONF_USERNAME: "nc_user", - CONF_PASSWORD: "nc_pass", - CONF_VERIFY_SSL: True, -} - async def test_user_create_entry( - hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test that the user step works.""" # start user flow @@ -85,7 +80,7 @@ async def test_user_create_entry( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -94,17 +89,15 @@ async def test_user_create_entry( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "nc_url" + assert result["title"] == "https://my.nc_url.local" assert result["data"] == snapshot -async def test_user_already_configured( - hass: HomeAssistant, mock_nextcloud_monitor: Mock -) -> None: +async def test_user_already_configured(hass: HomeAssistant) -> None: """Test that errors are shown when duplicates are added.""" entry = MockConfigEntry( domain=DOMAIN, - title="nc_url", + title="https://my.nc_url.local", unique_id="nc_url", data=VALID_CONFIG, ) @@ -119,7 +112,7 @@ async def test_user_already_configured( with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -131,24 +124,18 @@ async def test_user_already_configured( assert result["reason"] == "already_configured" -async def test_reauth( - hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion -) -> None: +async def test_reauth(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Test that the re-auth flow works.""" entry = MockConfigEntry( domain=DOMAIN, - title="nc_url", + title="https://my.nc_url.local", unique_id="nc_url", data=VALID_CONFIG, ) entry.add_to_hass(hass) # start reauth flow - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -206,7 +193,7 @@ async def test_reauth( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/nextcloud/test_coordinator.py b/tests/components/nextcloud/test_coordinator.py new file mode 100644 index 00000000000..91f7e7967a3 --- /dev/null +++ b/tests/components/nextcloud/test_coordinator.py @@ -0,0 +1,69 @@ +"""Tests for the Nextcloud coordinator.""" + +from unittest.mock import Mock, patch + +from freezegun.api import FrozenDateTimeFactory +from nextcloudmonitor import ( + NextcloudMonitor, + NextcloudMonitorAuthorizationError, + NextcloudMonitorConnectionError, + NextcloudMonitorError, + NextcloudMonitorRequestError, +) +import pytest + +from homeassistant.components.nextcloud.const import DEFAULT_SCAN_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import mock_config_entry +from .const import NC_DATA, VALID_CONFIG + +from tests.common import async_fire_time_changed + + +@pytest.mark.parametrize( + ("error"), + [ + (NextcloudMonitorAuthorizationError), + (NextcloudMonitorConnectionError), + (NextcloudMonitorRequestError), + ], +) +async def test_data_update( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, error: NextcloudMonitorError +) -> None: + """Test a coordinator data updates.""" + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", spec=NextcloudMonitor + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.return_value.update = Mock( + return_value=True, + side_effect=[None, error, None], + ) + mock_nextcloud_monitor.return_value.data = NC_DATA + assert await hass.config_entries.async_setup(entry.entry_id) + + # Test successful setup and first data fetch + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state != STATE_UNAVAILABLE for state in states) + + # Test states get unavailable on error + freezer.tick(DEFAULT_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state == STATE_UNAVAILABLE for state in states) + + # Test successful data fetch + freezer.tick(DEFAULT_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state != STATE_UNAVAILABLE for state in states) diff --git a/tests/components/nextcloud/test_init.py b/tests/components/nextcloud/test_init.py new file mode 100644 index 00000000000..70c8f545c6b --- /dev/null +++ b/tests/components/nextcloud/test_init.py @@ -0,0 +1,95 @@ +"""Tests for the Nextcloud init.""" + +from unittest.mock import Mock, patch + +from nextcloudmonitor import ( + NextcloudMonitorAuthorizationError, + NextcloudMonitorConnectionError, + NextcloudMonitorError, + NextcloudMonitorRequestError, +) +import pytest + +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_URL, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration, mock_config_entry +from .const import MOCKED_ENTRY_ID, NC_DATA, VALID_CONFIG + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, +) -> None: + """Test a successful setup entry.""" + assert await init_integration(hass, VALID_CONFIG, NC_DATA) + + +async def test_unique_id_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test migration of unique ids to stable ones.""" + + object_id = "my_nc_url_local_system_version" + entity_id = f"{Platform.SENSOR}.{object_id}" + + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + entity = entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version", + suggested_object_id=object_id, + config_entry=entry, + ) + + # test old unique id + assert entity.entity_id == entity_id + assert entity.unique_id == f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version" + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor" + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.update = Mock(return_value=True) + mock_nextcloud_monitor.return_value.data = NC_DATA + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + # test migrated unique id + reg_entry = entity_registry.async_get(entity_id) + assert reg_entry.unique_id == f"{MOCKED_ENTRY_ID}#system_version" + + +@pytest.mark.parametrize( + ("exception", "expcted_entry_state"), + [ + (NextcloudMonitorAuthorizationError, ConfigEntryState.SETUP_ERROR), + (NextcloudMonitorConnectionError, ConfigEntryState.SETUP_RETRY), + (NextcloudMonitorRequestError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_entry_errors( + hass: HomeAssistant, + exception: NextcloudMonitorError, + expcted_entry_state: ConfigEntryState, +) -> None: + """Test a successful setup entry.""" + + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", side_effect=exception + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == expcted_entry_state diff --git a/tests/components/nextcloud/test_sensor.py b/tests/components/nextcloud/test_sensor.py new file mode 100644 index 00000000000..2ccaf2b7770 --- /dev/null +++ b/tests/components/nextcloud/test_sensor.py @@ -0,0 +1,28 @@ +"""Tests for the Nextcloud sensors.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_update.py b/tests/components/nextcloud/test_update.py new file mode 100644 index 00000000000..ed9b65ee55f --- /dev/null +++ b/tests/components/nextcloud/test_update.py @@ -0,0 +1,77 @@ +"""Tests for the Nextcloud update entity.""" + +from copy import deepcopy +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_setup_entity_without_update( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test update entity is created w/o available update.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 1 + assert states[0].state == STATE_OFF + assert states[0].attributes["installed_version"] == "28.0.4.1" + assert states[0].attributes["latest_version"] == "28.0.4.1" + assert ( + states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#28-0-4" + ) + + +async def test_setup_entity_with_update( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test update entity is created with available update.""" + data = deepcopy(NC_DATA) + data["nextcloud"]["system"]["update"]["available"] = True + data["nextcloud"]["system"]["update"]["available_version"] = "30.0.0.0" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, data) + + states = hass.states.async_all() + assert len(states) == 1 + assert states[0].state == STATE_ON + assert states[0].attributes["installed_version"] == "28.0.4.1" + assert states[0].attributes["latest_version"] == "30.0.0.0" + assert ( + states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#30-0-0" + ) + + +async def test_setup_no_entity(hass: HomeAssistant) -> None: + """Test no update entity is created, when no data available.""" + data = deepcopy(NC_DATA) + data["nextcloud"]["system"].pop("update") # only nc<28.0.0 + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, data) + + states = hass.states.async_all() + assert len(states) == 0 diff --git a/tests/components/nextdns/snapshots/test_diagnostics.ambr b/tests/components/nextdns/snapshots/test_diagnostics.ambr index 5040c6e052e..23f42fee077 100644 --- a/tests/components/nextdns/snapshots/test_diagnostics.ambr +++ b/tests/components/nextdns/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'profile_id': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'nextdns', 'entry_id': 'd9aa37407ddac7b964a99e86312288d6', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Fake Profile', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/nextdns/test_config_flow.py b/tests/components/nextdns/test_config_flow.py index 7571eef347e..27a6cf1e7e0 100644 --- a/tests/components/nextdns/test_config_flow.py +++ b/tests/components/nextdns/test_config_flow.py @@ -12,7 +12,7 @@ from homeassistant.const import CONF_API_KEY, CONF_PROFILE_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import PROFILES, init_integration +from . import PROFILES, init_integration, mock_nextdns async def test_form_create_entry(hass: HomeAssistant) -> None: @@ -101,3 +101,60 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_successful(hass: HomeAssistant) -> None: + """Test starting a reauthentication flow.""" + entry = await init_integration(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with ( + patch( + "homeassistant.components.nextdns.NextDns.get_profiles", + return_value=PROFILES, + ), + mock_nextdns(), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_API_KEY: "new_api_key"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.parametrize( + ("exc", "base_error"), + [ + (ApiError("API Error"), "cannot_connect"), + (InvalidApiKeyError, "invalid_api_key"), + (RetryError("Retry Error"), "cannot_connect"), + (TimeoutError, "cannot_connect"), + (ValueError, "unknown"), + ], +) +async def test_reauth_errors( + hass: HomeAssistant, exc: Exception, base_error: str +) -> None: + """Test reauthentication flow with errors.""" + entry = await init_integration(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with patch( + "homeassistant.components.nextdns.NextDns.get_profiles", side_effect=exc + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_API_KEY: "new_api_key"}, + ) + await hass.async_block_till_done() + + assert result["errors"] == {"base": base_error} diff --git a/tests/components/nextdns/test_coordinator.py b/tests/components/nextdns/test_coordinator.py new file mode 100644 index 00000000000..f2b353ea2c5 --- /dev/null +++ b/tests/components/nextdns/test_coordinator.py @@ -0,0 +1,76 @@ +"""Tests for NextDNS coordinator.""" + +from datetime import timedelta +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from nextdns import InvalidApiKeyError + +from homeassistant.components.nextdns.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import async_fire_time_changed + + +async def test_auth_error( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test authentication error when polling data.""" + entry = await init_integration(hass) + + assert entry.state is ConfigEntryState.LOADED + + freezer.tick(timedelta(minutes=10)) + with ( + patch( + "homeassistant.components.nextdns.NextDns.get_profiles", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.get_analytics_status", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.get_analytics_encryption", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.get_analytics_dnssec", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.get_analytics_ip_versions", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.get_analytics_protocols", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.get_settings", + side_effect=InvalidApiKeyError, + ), + patch( + "homeassistant.components.nextdns.NextDns.connection_status", + side_effect=InvalidApiKeyError, + ), + ): + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nextdns/test_init.py b/tests/components/nextdns/test_init.py index 61a487d917c..0a0bf3fc487 100644 --- a/tests/components/nextdns/test_init.py +++ b/tests/components/nextdns/test_init.py @@ -2,12 +2,12 @@ from unittest.mock import patch -from nextdns import ApiError +from nextdns import ApiError, InvalidApiKeyError import pytest from tenacity import RetryError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import CONF_API_KEY, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -59,3 +59,33 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN) + + +async def test_config_auth_failed(hass: HomeAssistant) -> None: + """Test for setup failure if the auth fails.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Fake Profile", + unique_id="xyz12", + data={CONF_API_KEY: "fake_api_key", CONF_PROFILE_ID: "xyz12"}, + ) + entry.add_to_hass(hass) + + with patch( + "homeassistant.components.nextdns.NextDns.get_profiles", + side_effect=InvalidApiKeyError, + ): + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py index 31b21083c05..cf85cd7e092 100644 --- a/tests/components/nice_go/conftest.py +++ b/tests/components/nice_go/conftest.py @@ -1,6 +1,7 @@ """Common fixtures for the Nice G.O. tests.""" from collections.abc import Generator +from datetime import datetime from unittest.mock import AsyncMock, patch from nice_go import Barrier, BarrierState, ConnectionState @@ -51,7 +52,9 @@ def mock_nice_go() -> Generator[AsyncMock]: attr=barrier["attr"], state=BarrierState( **barrier["state"], - connectionState=ConnectionState(**barrier["connectionState"]), + connectionState=ConnectionState(**barrier["connectionState"]) + if barrier.get("connectionState") + else None, ), api=client, ) @@ -71,7 +74,7 @@ def mock_config_entry() -> MockConfigEntry: CONF_EMAIL: "test-email", CONF_PASSWORD: "test-password", CONF_REFRESH_TOKEN: "test-refresh-token", - CONF_REFRESH_TOKEN_CREATION_TIME: 1722184160.738171, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, version=1, unique_id="test-email", diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json index adb0fb4bacd..5a7607612c1 100644 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -11,7 +11,6 @@ ], "state": { "deviceId": "1", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 1", "autoDisabled": false, @@ -42,7 +41,6 @@ ], "state": { "deviceId": "2", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 2", "autoDisabled": false, @@ -60,5 +58,59 @@ "connected": true, "updatedTimestamp": "123" } + }, + { + "id": "3", + "type": "Mms100", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "3", + "reported": { + "displayName": "Test Garage 3", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "3", + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "1,100,0,0,1,0,0,0", + "radioConnected": 1, + "powerLevel": "LOW" + }, + "timestamp": null, + "version": null + }, + "connectionState": null + }, + { + "id": "4", + "type": "unknown-device-type", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "4", + "reported": { + "displayName": "Test Garage 4", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "4", + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "1,100,0,0,1,0,0,0", + "radioConnected": 1, + "powerLevel": "LOW" + }, + "timestamp": null, + "version": null + }, + "connectionState": null } ] diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr index 391d91584bf..49b5267df56 100644 --- a/tests/components/nice_go/snapshots/test_cover.ambr +++ b/tests/components/nice_go/snapshots/test_cover.ambr @@ -117,21 +117,21 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': None, - 'platform': 'linear_garage_door', + 'platform': 'nice_go', 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'test3-GDO', + 'unique_id': '3', 'unit_of_measurement': None, }) # --- # name: test_covers[cover.test_garage_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'garage', + 'device_class': 'gate', 'friendly_name': 'Test Garage 3', 'supported_features': , }), @@ -140,7 +140,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'opening', + 'state': 'open', }) # --- # name: test_covers[cover.test_garage_4-entry] @@ -168,11 +168,11 @@ 'original_device_class': , 'original_icon': None, 'original_name': None, - 'platform': 'linear_garage_door', + 'platform': 'nice_go', 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'test4-GDO', + 'unique_id': '4', 'unit_of_measurement': None, }) # --- @@ -188,6 +188,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'closing', + 'state': 'open', }) # --- diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index abd3b3103d1..b33726d2b72 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -9,6 +9,7 @@ 'id': '1', 'light_status': True, 'name': 'Test Garage 1', + 'type': 'WallStation', 'vacation_mode': False, }), '2': dict({ @@ -18,17 +19,39 @@ 'id': '2', 'light_status': False, 'name': 'Test Garage 2', + 'type': 'WallStation', 'vacation_mode': True, }), + '3': dict({ + 'barrier_status': 'open', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '3', + 'light_status': None, + 'name': 'Test Garage 3', + 'type': 'Mms100', + 'vacation_mode': None, + }), + '4': dict({ + 'barrier_status': 'open', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '4', + 'light_status': None, + 'name': 'Test Garage 4', + 'type': 'unknown-device-type', + 'vacation_mode': None, + }), }), 'entry': dict({ 'data': dict({ 'email': '**REDACTED**', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', - 'refresh_token_creation_time': 1722184160.738171, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'nice_go', 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', 'minor_version': 1, @@ -37,6 +60,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr index 2e29d9589dd..529df95a570 100644 --- a/tests/components/nice_go/snapshots/test_light.ambr +++ b/tests/components/nice_go/snapshots/test_light.ambr @@ -109,115 +109,3 @@ 'state': 'off', }) # --- -# name: test_data[light.test_garage_3_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.test_garage_3_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'linear_garage_door', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': 'test3-Light', - 'unit_of_measurement': None, - }) -# --- -# name: test_data[light.test_garage_3_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': None, - 'friendly_name': 'Test Garage 3 Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.test_garage_3_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_data[light.test_garage_4_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.test_garage_4_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'linear_garage_door', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': 'test4-Light', - 'unit_of_measurement': None, - }) -# --- -# name: test_data[light.test_garage_4_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Test Garage 4 Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.test_garage_4_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nice_go/test_config_flow.py b/tests/components/nice_go/test_config_flow.py index 67930b9f752..9c25a640c75 100644 --- a/tests/components/nice_go/test_config_flow.py +++ b/tests/components/nice_go/test_config_flow.py @@ -16,6 +16,8 @@ from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration + from tests.common import MockConfigEntry @@ -109,3 +111,71 @@ async def test_duplicate_device( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_nice_go: AsyncMock, +) -> None: + """Test reauth flow.""" + + await setup_integration(hass, mock_config_entry, []) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "other-fake-password", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [(AuthFailedError, "invalid_auth"), (Exception, "unknown")], +) +async def test_reauth_exceptions( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_nice_go: AsyncMock, + side_effect: Exception, + expected_error: str, +) -> None: + """Test we handle invalid auth.""" + mock_nice_go.authenticate.side_effect = side_effect + await setup_integration(hass, mock_config_entry, []) + + result = await mock_config_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + mock_nice_go.authenticate.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert len(hass.config_entries.async_entries()) == 1 diff --git a/tests/components/nice_go/test_cover.py b/tests/components/nice_go/test_cover.py index a6eb9bd27fb..f90c2d438b0 100644 --- a/tests/components/nice_go/test_cover.py +++ b/tests/components/nice_go/test_cover.py @@ -2,24 +2,22 @@ from unittest.mock import AsyncMock +from aiohttp import ClientError from freezegun.api import FrozenDateTimeFactory +from nice_go import ApiError +import pytest from syrupy import SnapshotAssertion from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + CoverState, ) from homeassistant.components.nice_go.const import DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -103,13 +101,56 @@ async def test_update_cover_state( await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert hass.states.get("cover.test_garage_1").state == STATE_CLOSED - assert hass.states.get("cover.test_garage_2").state == STATE_OPEN + assert hass.states.get("cover.test_garage_1").state == CoverState.CLOSED + assert hass.states.get("cover.test_garage_2").state == CoverState.OPEN device_update = load_json_object_fixture("device_state_update.json", DOMAIN) await mock_config_entry.runtime_data.on_data(device_update) device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) await mock_config_entry.runtime_data.on_data(device_update_1) - assert hass.states.get("cover.test_garage_1").state == STATE_OPENING - assert hass.states.get("cover.test_garage_2").state == STATE_CLOSING + assert hass.states.get("cover.test_garage_1").state == CoverState.OPENING + assert hass.states.get("cover.test_garage_2").state == CoverState.CLOSING + + +@pytest.mark.parametrize( + ("action", "error", "entity_id", "expected_error"), + [ + ( + SERVICE_OPEN_COVER, + ApiError, + "cover.test_garage_1", + "Error opening the barrier", + ), + ( + SERVICE_CLOSE_COVER, + ClientError, + "cover.test_garage_2", + "Error closing the barrier", + ), + ], +) +async def test_cover_exceptions( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + action: str, + error: Exception, + entity_id: str, + expected_error: str, +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + mock_nice_go.open_barrier.side_effect = error + mock_nice_go.close_barrier.side_effect = error + + with pytest.raises(HomeAssistantError, match=expected_error): + await hass.services.async_call( + COVER_DOMAIN, + action, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) diff --git a/tests/components/nice_go/test_diagnostics.py b/tests/components/nice_go/test_diagnostics.py index 1c88c6a8dc6..5c8647f3d6e 100644 --- a/tests/components/nice_go/test_diagnostics.py +++ b/tests/components/nice_go/test_diagnostics.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock +import pytest from syrupy import SnapshotAssertion from syrupy.filters import props @@ -14,6 +15,7 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.freeze_time("2024-08-27") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -26,4 +28,6 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot( + exclude=props("created_at", "modified_at", "refresh_token_creation_time") + ) diff --git a/tests/components/nice_go/test_event.py b/tests/components/nice_go/test_event.py index 0038b2882ad..1c1b70532f4 100644 --- a/tests/components/nice_go/test_event.py +++ b/tests/components/nice_go/test_event.py @@ -19,10 +19,10 @@ async def test_barrier_obstructed( mock_config_entry: MockConfigEntry, ) -> None: """Test barrier obstructed.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.EVENT]) - await mock_nice_go.event.call_args_list[2][0][0]({"deviceId": "1"}) + await mock_nice_go.listen.call_args_list[3][0][1]({"deviceId": "1"}) await hass.async_block_till_done() event_state = hass.states.get("event.test_garage_1_barrier_obstructed") diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 249622d23b0..051c6623b23 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -1,7 +1,8 @@ """Test Nice G.O. init.""" +import asyncio from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory from nice_go import ApiError, AuthFailedError, Barrier, BarrierState @@ -9,9 +10,9 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.nice_go.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import issue_registry as ir from . import setup_integration @@ -32,29 +33,32 @@ async def test_unload_entry( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED -@pytest.mark.parametrize( - ("side_effect", "entry_state"), - [ - ( - AuthFailedError(), - ConfigEntryState.SETUP_ERROR, - ), - (ApiError(), ConfigEntryState.SETUP_RETRY), - ], -) -async def test_setup_failure( +async def test_setup_failure_api_error( hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry, - side_effect: Exception, - entry_state: ConfigEntryState, ) -> None: """Test reauth trigger setup.""" - mock_nice_go.authenticate_refresh.side_effect = side_effect + mock_nice_go.authenticate_refresh.side_effect = ApiError() await setup_integration(hass, mock_config_entry, []) - assert mock_config_entry.state is entry_state + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_failure_auth_failed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth trigger setup.""" + + mock_nice_go.authenticate_refresh.side_effect = AuthFailedError() + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) async def test_firmware_update_required( @@ -77,7 +81,6 @@ async def test_firmware_update_required( "displayName": "test-display-name", "migrationStatus": "NOT_STARTED", }, - desired=None, connectionState=None, version=None, timestamp=None, @@ -110,7 +113,7 @@ async def test_update_refresh_token( assert mock_nice_go.authenticate.call_count == 0 mock_nice_go.authenticate.return_value = "new-refresh-token" - freezer.tick(timedelta(days=30)) + freezer.tick(timedelta(days=30, seconds=1)) async_fire_time_changed(hass) assert await hass.config_entries.async_reload(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -175,6 +178,8 @@ async def test_update_refresh_token_auth_failed( assert mock_nice_go.get_all_barriers.call_count == 1 assert mock_config_entry.data["refresh_token"] == "test-refresh-token" assert "Authentication failed" in caplog.text + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) async def test_client_listen_api_error( @@ -209,11 +214,11 @@ async def test_on_data_none_parsed( ) -> None: """Test on data with None parsed.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.COVER]) - await mock_nice_go.event.call_args[0][0]( + await mock_nice_go.listen.call_args_list[1][0][1]( { "data": { "devicesStatesUpdateFeed": { @@ -243,18 +248,74 @@ async def test_on_connected( ) -> None: """Test on connected.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert mock_nice_go.event.call_count == 2 + assert mock_nice_go.listen.call_count == 3 mock_nice_go.subscribe = AsyncMock() - await mock_nice_go.event.call_args_list[0][0][0]() + await mock_nice_go.listen.call_args_list[0][0][1]() assert mock_nice_go.subscribe.call_count == 1 +async def test_on_connection_lost( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test on connection lost.""" + + mock_nice_go.listen = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.listen.call_count == 3 + + with patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0): + await mock_nice_go.listen.call_args_list[2][0][1]( + {"exception": ValueError("test")} + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" + + # Now fire connected + + mock_nice_go.subscribe = AsyncMock() + + await mock_nice_go.listen.call_args_list[0][0][1]() + + assert mock_nice_go.subscribe.call_count == 1 + + assert hass.states.get("cover.test_garage_1").state == "closed" + + +async def test_on_connection_lost_reconnect( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test on connection lost with reconnect.""" + + mock_nice_go.listen = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.listen.call_count == 3 + + assert hass.states.get("cover.test_garage_1").state == "closed" + + with patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0): + await mock_nice_go.listen.call_args_list[2][0][1]( + {"exception": ValueError("test")} + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" + + async def test_no_connection_state( hass: HomeAssistant, mock_nice_go: AsyncMock, @@ -262,13 +323,13 @@ async def test_no_connection_state( ) -> None: """Test parsing barrier with no connection state.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert mock_nice_go.event.call_count == 2 + assert mock_nice_go.listen.call_count == 3 - await mock_nice_go.event.call_args[0][0]( + await mock_nice_go.listen.call_args_list[1][0][1]( { "data": { "devicesStatesUpdateFeed": { @@ -285,4 +346,66 @@ async def test_no_connection_state( } ) - assert hass.states.get("cover.test_garage_1").state == "unavailable" + assert hass.states.get("cover.test_garage_1").state == "open" + + +async def test_connection_attempts_exhausted( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test connection attempts exhausted.""" + + mock_nice_go.connect.side_effect = ApiError + + with ( + patch("homeassistant.components.nice_go.coordinator.RECONNECT_ATTEMPTS", 1), + patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0), + ): + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert "API error" in caplog.text + assert "Error requesting Nice G.O. data" in caplog.text + + +async def test_reconnect_hass_stopping( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reconnect with hass stopping.""" + + mock_nice_go.listen = MagicMock() + mock_nice_go.connect.side_effect = ApiError + + wait_for_hass = asyncio.Event() + + @callback + def _async_ha_stop(event: Event) -> None: + """Stop reconnecting if hass is stopping.""" + wait_for_hass.set() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_ha_stop) + + with ( + patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0.1), + patch("homeassistant.components.nice_go.coordinator.RECONNECT_ATTEMPTS", 20), + ): + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + await hass.async_block_till_done() + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await wait_for_hass.wait() + await hass.async_block_till_done(wait_background_tasks=True) + + assert mock_nice_go.connect.call_count < 10 + + assert len(hass._background_tasks) == 0 + + assert "API error" in caplog.text + assert ( + "Failed to connect to the websocket, reconnect attempts exhausted" + not in caplog.text + ) diff --git a/tests/components/nice_go/test_light.py b/tests/components/nice_go/test_light.py index e1852581fe6..b170a0ee3ab 100644 --- a/tests/components/nice_go/test_light.py +++ b/tests/components/nice_go/test_light.py @@ -2,6 +2,9 @@ from unittest.mock import AsyncMock +from aiohttp import ClientError +from nice_go import ApiError +import pytest from syrupy import SnapshotAssertion from homeassistant.components.light import ( @@ -12,6 +15,7 @@ from homeassistant.components.light import ( from homeassistant.components.nice_go.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -78,6 +82,7 @@ async def test_update_light_state( assert hass.states.get("light.test_garage_1_light").state == STATE_ON assert hass.states.get("light.test_garage_2_light").state == STATE_OFF + assert hass.states.get("light.test_garage_3_light") is None device_update = load_json_object_fixture("device_state_update.json", DOMAIN) await mock_config_entry.runtime_data.on_data(device_update) @@ -86,3 +91,72 @@ async def test_update_light_state( assert hass.states.get("light.test_garage_1_light").state == STATE_OFF assert hass.states.get("light.test_garage_2_light").state == STATE_ON + assert hass.states.get("light.test_garage_3_light") is None + + +@pytest.mark.parametrize( + ("action", "error", "entity_id", "expected_error"), + [ + ( + SERVICE_TURN_OFF, + ApiError, + "light.test_garage_1_light", + "Error while turning off the light", + ), + ( + SERVICE_TURN_ON, + ClientError, + "light.test_garage_2_light", + "Error while turning on the light", + ), + ], +) +async def test_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + error: Exception, + entity_id: str, + expected_error: str, +) -> None: + """Test that errors are handled appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + mock_nice_go.light_on.side_effect = error + mock_nice_go.light_off.side_effect = error + + with pytest.raises(HomeAssistantError, match=expected_error): + await hass.services.async_call( + LIGHT_DOMAIN, + action, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + +async def test_unsupported_device_type( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that unsupported device types are handled appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + assert hass.states.get("light.test_garage_4_light") is None + assert ( + "Device 'Test Garage 4' has unknown device type 'unknown-device-type'" + in caplog.text + ) + assert "which is not supported by this integration" in caplog.text + assert ( + "We try to support it with a cover and event entity, but nothing else." + in caplog.text + ) + assert ( + "Please create an issue with your device model in additional info" + in caplog.text + ) diff --git a/tests/components/nice_go/test_switch.py b/tests/components/nice_go/test_switch.py index f34cba495c9..d3a2141eb2b 100644 --- a/tests/components/nice_go/test_switch.py +++ b/tests/components/nice_go/test_switch.py @@ -2,6 +2,10 @@ from unittest.mock import AsyncMock +from aiohttp import ClientError +from nice_go import ApiError +import pytest + from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -9,6 +13,7 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from . import setup_integration @@ -41,3 +46,45 @@ async def test_turn_off( blocking=True, ) mock_nice_go.vacation_mode_off.assert_called_once_with("2") + + +@pytest.mark.parametrize( + ("action", "error", "entity_id", "expected_error"), + [ + ( + SERVICE_TURN_OFF, + ApiError, + "switch.test_garage_1_vacation_mode", + "Error while turning off the switch", + ), + ( + SERVICE_TURN_ON, + ClientError, + "switch.test_garage_2_vacation_mode", + "Error while turning on the switch", + ), + ], +) +async def test_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + error: Exception, + entity_id: str, + expected_error: str, +) -> None: + """Test that errors are handled appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) + + mock_nice_go.vacation_mode_on.side_effect = error + mock_nice_go.vacation_mode_off.side_effect = error + + with pytest.raises(HomeAssistantError, match=expected_error): + await hass.services.async_call( + SWITCH_DOMAIN, + action, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) diff --git a/tests/components/niko_home_control/__init__.py b/tests/components/niko_home_control/__init__.py new file mode 100644 index 00000000000..f6e8187bf0f --- /dev/null +++ b/tests/components/niko_home_control/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the niko_home_control integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Set up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py new file mode 100644 index 00000000000..932480ac710 --- /dev/null +++ b/tests/components/niko_home_control/conftest.py @@ -0,0 +1,43 @@ +"""niko_home_control integration tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.niko_home_control.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override integration setup.""" + with patch( + "homeassistant.components.niko_home_control.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_niko_home_control_connection() -> Generator[AsyncMock]: + """Mock a NHC client.""" + with ( + patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + autospec=True, + ) as mock_client, + ): + client = mock_client.return_value + client.return_value = True + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, title="Niko Home Control", data={CONF_HOST: "192.168.0.123"} + ) diff --git a/tests/components/niko_home_control/test_config_flow.py b/tests/components/niko_home_control/test_config_flow.py new file mode 100644 index 00000000000..8220ee15e02 --- /dev/null +++ b/tests/components/niko_home_control/test_config_flow.py @@ -0,0 +1,140 @@ +"""Test niko_home_control config flow.""" + +from unittest.mock import AsyncMock, patch + +from homeassistant.components.niko_home_control.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Niko Home Control" + assert result["data"] == {CONF_HOST: "192.168.0.123"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_cannot_connect(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test the cannot connect error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + with patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection" + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test uniqueness.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the import flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "192.168.0.123"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Niko Home Control" + assert result["data"] == {CONF_HOST: "192.168.0.123"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test the cannot connect error.""" + + with patch( + "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "192.168.0.123"} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_duplicate_import_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test uniqueness.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "192.168.0.123"} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/nina/test_binary_sensor.py b/tests/components/nina/test_binary_sensor.py index a7f9a980960..6ed1aee7e9d 100644 --- a/tests/components/nina/test_binary_sensor.py +++ b/tests/components/nina/test_binary_sensor.py @@ -17,6 +17,7 @@ from homeassistant.components.nina.const import ( ATTR_SENT, ATTR_SEVERITY, ATTR_START, + ATTR_WEB, DOMAIN, ) from homeassistant.config_entries import ConfigEntryState @@ -77,6 +78,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w1.attributes.get(ATTR_SENDER) == "Deutscher Wetterdienst" assert state_w1.attributes.get(ATTR_SEVERITY) == "Minor" assert state_w1.attributes.get(ATTR_RECOMMENDED_ACTIONS) == "" + assert state_w1.attributes.get(ATTR_WEB) == "https://www.wettergefahren.de" assert ( state_w1.attributes.get(ATTR_AFFECTED_AREAS) == "Gemeinde Oberreichenbach, Gemeinde Neuweiler, Stadt Nagold, Stadt Neubulach, Gemeinde Schömberg, Gemeinde Simmersfeld, Gemeinde Simmozheim, Gemeinde Rohrdorf, Gemeinde Ostelsheim, Gemeinde Ebhausen, Gemeinde Egenhausen, Gemeinde Dobel, Stadt Bad Liebenzell, Stadt Solingen, Stadt Haiterbach, Stadt Bad Herrenalb, Gemeinde Höfen an der Enz, Gemeinde Gechingen, Gemeinde Enzklösterle, Gemeinde Gutach (Schwarzwaldbahn) und 3392 weitere." @@ -98,6 +100,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w2.attributes.get(ATTR_SENDER) is None assert state_w2.attributes.get(ATTR_SEVERITY) is None assert state_w2.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w2.attributes.get(ATTR_WEB) is None assert state_w2.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w2.attributes.get(ATTR_ID) is None assert state_w2.attributes.get(ATTR_SENT) is None @@ -116,6 +119,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w3.attributes.get(ATTR_SENDER) is None assert state_w3.attributes.get(ATTR_SEVERITY) is None assert state_w3.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w3.attributes.get(ATTR_WEB) is None assert state_w3.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w3.attributes.get(ATTR_ID) is None assert state_w3.attributes.get(ATTR_SENT) is None @@ -134,6 +138,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w4.attributes.get(ATTR_SENDER) is None assert state_w4.attributes.get(ATTR_SEVERITY) is None assert state_w4.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w4.attributes.get(ATTR_WEB) is None assert state_w4.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w4.attributes.get(ATTR_ID) is None assert state_w4.attributes.get(ATTR_SENT) is None @@ -152,6 +157,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w5.attributes.get(ATTR_SENDER) is None assert state_w5.attributes.get(ATTR_SEVERITY) is None assert state_w5.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w5.attributes.get(ATTR_WEB) is None assert state_w5.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w5.attributes.get(ATTR_ID) is None assert state_w5.attributes.get(ATTR_SENT) is None @@ -199,6 +205,7 @@ async def test_sensors_without_corona_filter( state_w1.attributes.get(ATTR_RECOMMENDED_ACTIONS) == "Waschen sich regelmäßig und gründlich die Hände." ) + assert state_w1.attributes.get(ATTR_WEB) == "" assert ( state_w1.attributes.get(ATTR_AFFECTED_AREAS) == "Bundesland: Freie Hansestadt Bremen, Land Berlin, Land Hessen, Land Nordrhein-Westfalen, Land Brandenburg, Freistaat Bayern, Land Mecklenburg-Vorpommern, Land Rheinland-Pfalz, Freistaat Sachsen, Land Schleswig-Holstein, Freie und Hansestadt Hamburg, Freistaat Thüringen, Land Niedersachsen, Land Saarland, Land Sachsen-Anhalt, Land Baden-Württemberg" @@ -227,6 +234,7 @@ async def test_sensors_without_corona_filter( assert state_w2.attributes.get(ATTR_SENDER) == "Deutscher Wetterdienst" assert state_w2.attributes.get(ATTR_SEVERITY) == "Minor" assert state_w2.attributes.get(ATTR_RECOMMENDED_ACTIONS) == "" + assert state_w2.attributes.get(ATTR_WEB) == "https://www.wettergefahren.de" assert state_w2.attributes.get(ATTR_ID) == "mow.DE-NW-BN-SE030-20201014-30-000" assert state_w2.attributes.get(ATTR_SENT) == "2021-10-11T05:20:00+01:00" assert state_w2.attributes.get(ATTR_START) == "2021-11-01T05:20:00+01:00" @@ -244,6 +252,7 @@ async def test_sensors_without_corona_filter( assert state_w3.attributes.get(ATTR_SENDER) is None assert state_w3.attributes.get(ATTR_SEVERITY) is None assert state_w3.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w3.attributes.get(ATTR_WEB) is None assert state_w3.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w3.attributes.get(ATTR_ID) is None assert state_w3.attributes.get(ATTR_SENT) is None @@ -262,6 +271,7 @@ async def test_sensors_without_corona_filter( assert state_w4.attributes.get(ATTR_SENDER) is None assert state_w4.attributes.get(ATTR_SEVERITY) is None assert state_w4.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w4.attributes.get(ATTR_WEB) is None assert state_w4.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w4.attributes.get(ATTR_ID) is None assert state_w4.attributes.get(ATTR_SENT) is None @@ -280,6 +290,7 @@ async def test_sensors_without_corona_filter( assert state_w5.attributes.get(ATTR_SENDER) is None assert state_w5.attributes.get(ATTR_SEVERITY) is None assert state_w5.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w5.attributes.get(ATTR_WEB) is None assert state_w5.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w5.attributes.get(ATTR_ID) is None assert state_w5.attributes.get(ATTR_SENT) is None diff --git a/tests/components/nina/test_config_flow.py b/tests/components/nina/test_config_flow.py index 23ee8cbf797..309c8860c20 100644 --- a/tests/components/nina/test_config_flow.py +++ b/tests/components/nina/test_config_flow.py @@ -89,7 +89,9 @@ async def test_step_user_unexpected_exception(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER}, data=deepcopy(DUMMY_DATA) ) - assert result["type"] is FlowResultType.ABORT + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + hass.config_entries.flow.async_abort(result["flow_id"]) async def test_step_user(hass: HomeAssistant) -> None: @@ -188,7 +190,7 @@ async def test_options_flow_init(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] is None + assert result["data"] == {} assert dict(config_entry.data) == { CONF_HEADLINE_FILTER: deepcopy(DUMMY_DATA[CONF_HEADLINE_FILTER]), @@ -300,7 +302,9 @@ async def test_options_flow_unexpected_exception(hass: HomeAssistant) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.ABORT + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + hass.config_entries.options.async_abort(result["flow_id"]) async def test_options_flow_entity_removal( diff --git a/tests/components/nordpool/__init__.py b/tests/components/nordpool/__init__.py new file mode 100644 index 00000000000..20d74d38486 --- /dev/null +++ b/tests/components/nordpool/__init__.py @@ -0,0 +1,9 @@ +"""Tests for the Nord Pool integration.""" + +from homeassistant.components.nordpool.const import CONF_AREAS +from homeassistant.const import CONF_CURRENCY + +ENTRY_CONFIG = { + CONF_AREAS: ["SE3", "SE4"], + CONF_CURRENCY: "SEK", +} diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py new file mode 100644 index 00000000000..9b7ab4b2afa --- /dev/null +++ b/tests/components/nordpool/conftest.py @@ -0,0 +1,83 @@ +"""Fixtures for the Nord Pool integration.""" + +from __future__ import annotations + +from collections.abc import AsyncGenerator +from datetime import datetime +import json +from typing import Any +from unittest.mock import patch + +from pynordpool import NordPoolClient +from pynordpool.const import Currency +from pynordpool.model import DeliveryPeriodData +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +async def no_sleep() -> AsyncGenerator[None]: + """No sleeping.""" + with patch("homeassistant.components.nordpool.coordinator.asyncio.sleep"): + yield + + +@pytest.fixture +async def load_int( + hass: HomeAssistant, get_data: DeliveryPeriodData +) -> MockConfigEntry: + """Set up the Nord Pool integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +@pytest.fixture(name="get_data") +async def get_data_from_library( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] +) -> DeliveryPeriodData: + """Retrieve data from Nord Pool library.""" + + client = NordPoolClient(aioclient_mock.create_session(hass.loop)) + with patch("pynordpool.NordPoolClient._get", return_value=load_json): + output = await client.async_get_delivery_period( + datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] + ) + await client._session.close() + return output + + +@pytest.fixture(name="load_json") +def load_json_from_fixture(load_data: str) -> dict[str, Any]: + """Load fixture with json data and return.""" + return json.loads(load_data) + + +@pytest.fixture(name="load_data", scope="package") +def load_data_from_fixture() -> str: + """Load fixture with fixture data and return.""" + return load_fixture("delivery_period.json", DOMAIN) diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period.json new file mode 100644 index 00000000000..77d51dc9433 --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-05", + "version": 3, + "updatedAt": "2024-11-04T12:15:03.9456464Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T00:00:00Z", + "entryPerArea": { + "SE3": 250.73, + "SE4": 283.79 + } + }, + { + "deliveryStart": "2024-11-05T00:00:00Z", + "deliveryEnd": "2024-11-05T01:00:00Z", + "entryPerArea": { + "SE3": 76.36, + "SE4": 81.36 + } + }, + { + "deliveryStart": "2024-11-05T01:00:00Z", + "deliveryEnd": "2024-11-05T02:00:00Z", + "entryPerArea": { + "SE3": 73.92, + "SE4": 79.15 + } + }, + { + "deliveryStart": "2024-11-05T02:00:00Z", + "deliveryEnd": "2024-11-05T03:00:00Z", + "entryPerArea": { + "SE3": 61.69, + "SE4": 65.19 + } + }, + { + "deliveryStart": "2024-11-05T03:00:00Z", + "deliveryEnd": "2024-11-05T04:00:00Z", + "entryPerArea": { + "SE3": 64.6, + "SE4": 68.44 + } + }, + { + "deliveryStart": "2024-11-05T04:00:00Z", + "deliveryEnd": "2024-11-05T05:00:00Z", + "entryPerArea": { + "SE3": 453.27, + "SE4": 516.71 + } + }, + { + "deliveryStart": "2024-11-05T05:00:00Z", + "deliveryEnd": "2024-11-05T06:00:00Z", + "entryPerArea": { + "SE3": 996.28, + "SE4": 1240.85 + } + }, + { + "deliveryStart": "2024-11-05T06:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "entryPerArea": { + "SE3": 1406.14, + "SE4": 1648.25 + } + }, + { + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T08:00:00Z", + "entryPerArea": { + "SE3": 1346.54, + "SE4": 1570.5 + } + }, + { + "deliveryStart": "2024-11-05T08:00:00Z", + "deliveryEnd": "2024-11-05T09:00:00Z", + "entryPerArea": { + "SE3": 1150.28, + "SE4": 1345.37 + } + }, + { + "deliveryStart": "2024-11-05T09:00:00Z", + "deliveryEnd": "2024-11-05T10:00:00Z", + "entryPerArea": { + "SE3": 1031.32, + "SE4": 1206.51 + } + }, + { + "deliveryStart": "2024-11-05T10:00:00Z", + "deliveryEnd": "2024-11-05T11:00:00Z", + "entryPerArea": { + "SE3": 927.37, + "SE4": 1085.8 + } + }, + { + "deliveryStart": "2024-11-05T11:00:00Z", + "deliveryEnd": "2024-11-05T12:00:00Z", + "entryPerArea": { + "SE3": 925.05, + "SE4": 1081.72 + } + }, + { + "deliveryStart": "2024-11-05T12:00:00Z", + "deliveryEnd": "2024-11-05T13:00:00Z", + "entryPerArea": { + "SE3": 949.49, + "SE4": 1130.38 + } + }, + { + "deliveryStart": "2024-11-05T13:00:00Z", + "deliveryEnd": "2024-11-05T14:00:00Z", + "entryPerArea": { + "SE3": 1042.03, + "SE4": 1256.91 + } + }, + { + "deliveryStart": "2024-11-05T14:00:00Z", + "deliveryEnd": "2024-11-05T15:00:00Z", + "entryPerArea": { + "SE3": 1258.89, + "SE4": 1765.82 + } + }, + { + "deliveryStart": "2024-11-05T15:00:00Z", + "deliveryEnd": "2024-11-05T16:00:00Z", + "entryPerArea": { + "SE3": 1816.45, + "SE4": 2522.55 + } + }, + { + "deliveryStart": "2024-11-05T16:00:00Z", + "deliveryEnd": "2024-11-05T17:00:00Z", + "entryPerArea": { + "SE3": 2512.65, + "SE4": 3533.03 + } + }, + { + "deliveryStart": "2024-11-05T17:00:00Z", + "deliveryEnd": "2024-11-05T18:00:00Z", + "entryPerArea": { + "SE3": 1819.83, + "SE4": 2524.06 + } + }, + { + "deliveryStart": "2024-11-05T18:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "entryPerArea": { + "SE3": 1011.77, + "SE4": 1804.46 + } + }, + { + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T20:00:00Z", + "entryPerArea": { + "SE3": 835.53, + "SE4": 1112.57 + } + }, + { + "deliveryStart": "2024-11-05T20:00:00Z", + "deliveryEnd": "2024-11-05T21:00:00Z", + "entryPerArea": { + "SE3": 796.19, + "SE4": 1051.69 + } + }, + { + "deliveryStart": "2024-11-05T21:00:00Z", + "deliveryEnd": "2024-11-05T22:00:00Z", + "entryPerArea": { + "SE3": 522.3, + "SE4": 662.44 + } + }, + { + "deliveryStart": "2024-11-05T22:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "entryPerArea": { + "SE3": 289.14, + "SE4": 349.21 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 422.87, + "min": 61.69, + "max": 1406.14 + }, + "SE4": { + "average": 497.97, + "min": 65.19, + "max": 1648.25 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1315.97, + "min": 925.05, + "max": 2512.65 + }, + "SE4": { + "average": 1735.59, + "min": 1081.72, + "max": 3533.03 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 610.79, + "min": 289.14, + "max": 835.53 + }, + "SE4": { + "average": 793.98, + "min": 349.21, + "max": 1112.57 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.6402, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 900.74 + }, + { + "areaCode": "SE4", + "price": 1166.12 + } + ] +} diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..dde2eca0022 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_diagnostics.ambr @@ -0,0 +1,283 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'raw': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 900.74, + }), + dict({ + 'areaCode': 'SE4', + 'price': 1166.12, + }), + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', + }), + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 422.87, + 'max': 1406.14, + 'min': 61.69, + }), + 'SE4': dict({ + 'average': 497.97, + 'max': 1648.25, + 'min': 65.19, + }), + }), + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1315.97, + 'max': 2512.65, + 'min': 925.05, + }), + 'SE4': dict({ + 'average': 1735.59, + 'max': 3533.03, + 'min': 1081.72, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 610.79, + 'max': 835.53, + 'min': 289.14, + }), + 'SE4': dict({ + 'average': 793.98, + 'max': 1112.57, + 'min': 349.21, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-05', + 'exchangeRate': 11.6402, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-05T00:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 250.73, + 'SE4': 283.79, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T01:00:00Z', + 'deliveryStart': '2024-11-05T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 76.36, + 'SE4': 81.36, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T02:00:00Z', + 'deliveryStart': '2024-11-05T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 73.92, + 'SE4': 79.15, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T03:00:00Z', + 'deliveryStart': '2024-11-05T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 61.69, + 'SE4': 65.19, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T04:00:00Z', + 'deliveryStart': '2024-11-05T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 64.6, + 'SE4': 68.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T05:00:00Z', + 'deliveryStart': '2024-11-05T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 453.27, + 'SE4': 516.71, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T06:00:00Z', + 'deliveryStart': '2024-11-05T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 996.28, + 'SE4': 1240.85, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-05T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1406.14, + 'SE4': 1648.25, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T08:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1346.54, + 'SE4': 1570.5, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T09:00:00Z', + 'deliveryStart': '2024-11-05T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1150.28, + 'SE4': 1345.37, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T10:00:00Z', + 'deliveryStart': '2024-11-05T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1031.32, + 'SE4': 1206.51, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T11:00:00Z', + 'deliveryStart': '2024-11-05T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 927.37, + 'SE4': 1085.8, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T12:00:00Z', + 'deliveryStart': '2024-11-05T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 925.05, + 'SE4': 1081.72, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T13:00:00Z', + 'deliveryStart': '2024-11-05T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 949.49, + 'SE4': 1130.38, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T14:00:00Z', + 'deliveryStart': '2024-11-05T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1042.03, + 'SE4': 1256.91, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T15:00:00Z', + 'deliveryStart': '2024-11-05T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1258.89, + 'SE4': 1765.82, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T16:00:00Z', + 'deliveryStart': '2024-11-05T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1816.45, + 'SE4': 2522.55, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T17:00:00Z', + 'deliveryStart': '2024-11-05T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2512.65, + 'SE4': 3533.03, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T18:00:00Z', + 'deliveryStart': '2024-11-05T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1819.83, + 'SE4': 2524.06, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1011.77, + 'SE4': 1804.46, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T20:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 835.53, + 'SE4': 1112.57, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T21:00:00Z', + 'deliveryStart': '2024-11-05T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 796.19, + 'SE4': 1051.69, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T22:00:00Z', + 'deliveryStart': '2024-11-05T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 522.3, + 'SE4': 662.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 289.14, + 'SE4': 349.21, + }), + }), + ]), + 'updatedAt': '2024-11-04T12:15:03.9456464Z', + 'version': 3, + }), + }) +# --- diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..01600352861 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_sensor.ambr @@ -0,0 +1,2215 @@ +# serializer version: 1 +# name: test_sensor[sensor.nord_pool_se3_currency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_currency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Currency', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'currency', + 'unique_id': 'SE3-currency', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_currency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Currency', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_currency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'SEK', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_current_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_current_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_price', + 'unique_id': 'SE3-current_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_current_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Current price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_current_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.01177', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_daily_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_daily_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_average', + 'unique_id': 'SE3-daily_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_daily_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Daily average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_daily_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.90074', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_exchange_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_exchange_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Exchange rate', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'exchange_rate', + 'unique_id': 'SE3-exchange_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_exchange_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Exchange rate', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_exchange_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.6402', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'updated_at', + 'unique_id': 'SE3-updated_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Last updated', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T12:15:03+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_next_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_next_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_price', + 'unique_id': 'SE3-next_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_next_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Next price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_next_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.83553', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_1-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.42287', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_1-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.40614', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_1-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06169', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_1-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 1 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_1-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 1 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_2-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.61079', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_2-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.83553', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_2-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.28914', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_2-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 2 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_2-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 2 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'peak-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.31597', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'peak-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.51265', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'peak-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.92505', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'peak-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Peak time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'peak-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Peak time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_previous_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_previous_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previous price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_price', + 'unique_id': 'SE3-last_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_previous_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Previous price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_previous_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.81983', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_currency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_currency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Currency', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'currency', + 'unique_id': 'SE4-currency', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_currency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Currency', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_currency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'SEK', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_current_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_current_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_price', + 'unique_id': 'SE4-current_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_current_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Current price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_current_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.80446', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_daily_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_daily_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_average', + 'unique_id': 'SE4-daily_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_daily_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Daily average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_daily_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.16612', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_exchange_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_exchange_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Exchange rate', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'exchange_rate', + 'unique_id': 'SE4-exchange_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_exchange_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Exchange rate', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_exchange_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.6402', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'updated_at', + 'unique_id': 'SE4-updated_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Last updated', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T12:15:03+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_next_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_next_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_price', + 'unique_id': 'SE4-next_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_next_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Next price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_next_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.11257', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_1-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.49797', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_1-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.64825', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_1-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06519', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_1-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 1 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_1-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 1 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_2-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.79398', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_2-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.11257', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_2-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.34921', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_2-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 2 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_2-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 2 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'peak-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.73559', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'peak-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.53303', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'peak-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.08172', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'peak-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Peak time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'peak-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Peak time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_previous_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_previous_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previous price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_price', + 'unique_id': 'SE4-last_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_previous_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Previous price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_previous_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.52406', + }) +# --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py new file mode 100644 index 00000000000..cfdfc63aca7 --- /dev/null +++ b/tests/components/nordpool/test_config_flow.py @@ -0,0 +1,206 @@ +"""Test the Nord Pool config flow.""" + +from __future__ import annotations + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolConnectionError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant import config_entries +from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + ENTRY_CONFIG, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["title"] == "Nord Pool" + assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_single_config_entry( + hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData +) -> None: + """Test abort for single config entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.parametrize( + ("error_message", "p_error"), + [ + (NordPoolConnectionError, "cannot_connect"), + (NordPoolEmptyResponseError, "no_data"), + (NordPoolError, "cannot_connect"), + (NordPoolResponseError, "cannot_connect"), + ], +) +async def test_cannot_connect( + hass: HomeAssistant, + get_data: DeliveryPeriodData, + error_message: Exception, + p_error: str, +) -> None: + """Test cannot connect error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == config_entries.SOURCE_USER + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error_message, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) + + assert result["errors"] == {"base": p_error} + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Nord Pool" + assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_reconfigure( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, +) -> None: + """Test reconfiguration.""" + + result = await load_int.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert load_int.data == { + "areas": [ + "SE3", + ], + "currency": "EUR", + } + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.parametrize( + ("error_message", "p_error"), + [ + (NordPoolConnectionError, "cannot_connect"), + (NordPoolEmptyResponseError, "no_data"), + (NordPoolError, "cannot_connect"), + (NordPoolResponseError, "cannot_connect"), + ], +) +async def test_reconfigure_cannot_connect( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, + error_message: Exception, + p_error: str, +) -> None: + """Test cannot connect error in a reeconfigure flow.""" + + result = await load_int.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error_message, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["errors"] == {"base": p_error} + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert load_int.data == { + "areas": [ + "SE3", + ], + "currency": "EUR", + } diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py new file mode 100644 index 00000000000..68534237dee --- /dev/null +++ b/tests/components/nordpool/test_coordinator.py @@ -0,0 +1,106 @@ +"""The test for the Nord Pool coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from pynordpool import ( + DeliveryPeriodData, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") +async def test_coordinator( + hass: HomeAssistant, + get_data: DeliveryPeriodData, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the Nord Pool coordinator with errors.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + ) as mock_data, + ): + mock_data.return_value = get_data + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "0.92737" + mock_data.reset_mock() + + mock_data.side_effect = NordPoolError("error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + assert mock_data.call_count == 4 + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + mock_data.reset_mock() + + assert "Authentication error" not in caplog.text + mock_data.side_effect = NordPoolAuthenticationError("Authentication error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + assert mock_data.call_count == 4 + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Authentication error" in caplog.text + mock_data.reset_mock() + + assert "Empty response" not in caplog.text + mock_data.side_effect = NordPoolEmptyResponseError("Empty response") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + assert mock_data.call_count == 4 + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Empty response" in caplog.text + mock_data.reset_mock() + + assert "Response error" not in caplog.text + mock_data.side_effect = NordPoolResponseError("Response error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + assert mock_data.call_count == 4 + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Response error" in caplog.text + mock_data.reset_mock() + + mock_data.return_value = get_data + mock_data.side_effect = None + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_diagnostics.py b/tests/components/nordpool/test_diagnostics.py new file mode 100644 index 00000000000..4639186ecf1 --- /dev/null +++ b/tests/components/nordpool/test_diagnostics.py @@ -0,0 +1,23 @@ +"""Test Nord Pool diagnostics.""" + +from __future__ import annotations + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + load_int: ConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test generating diagnostics for a config entry.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, load_int) == snapshot + ) diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py new file mode 100644 index 00000000000..ebebb8b60c1 --- /dev/null +++ b/tests/components/nordpool/test_init.py @@ -0,0 +1,78 @@ +"""Test for Nord Pool component Init.""" + +from __future__ import annotations + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolConnectionError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: + """Test load and unload an entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("error"), + [ + (NordPoolConnectionError), + (NordPoolEmptyResponseError), + (NordPoolError), + (NordPoolResponseError), + ], +) +async def test_initial_startup_fails( + hass: HomeAssistant, get_data: DeliveryPeriodData, error: Exception +) -> None: + """Test load and unload an entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py new file mode 100644 index 00000000000..5c2d138cb34 --- /dev/null +++ b/tests/components/nordpool/test_sensor.py @@ -0,0 +1,62 @@ +"""The test for the Nord Pool sensor platform.""" + +from __future__ import annotations + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import snapshot_platform + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Nord Pool sensor.""" + + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) + + +@pytest.mark.freeze_time("2024-11-05T23:00:00+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_no_next_price(hass: HomeAssistant, load_int: ConfigEntry) -> None: + """Test the Nord Pool sensor.""" + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.28914" + assert last_price.state == "0.28914" + assert next_price.state == STATE_UNKNOWN + + +@pytest.mark.freeze_time("2024-11-05T00:00:00+01:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_no_previous_price( + hass: HomeAssistant, load_int: ConfigEntry +) -> None: + """Test the Nord Pool sensor.""" + + current_price = hass.states.get("sensor.nord_pool_se3_current_price") + last_price = hass.states.get("sensor.nord_pool_se3_previous_price") + next_price = hass.states.get("sensor.nord_pool_se3_next_price") + + assert current_price is not None + assert last_price is not None + assert next_price is not None + assert current_price.state == "0.25073" + assert last_price.state == STATE_UNKNOWN + assert next_price.state == "0.07636" diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index 79a1b75dcae..eeacf915b03 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -19,7 +19,7 @@ from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from tests.common import MockPlatform, async_get_persistent_notifications, mock_platform +from tests.common import MockPlatform, mock_platform class NotificationService(notify.BaseNotificationService): @@ -186,24 +186,6 @@ async def test_remove_targets(hass: HomeAssistant) -> None: assert test.registered_targets == {"test_c": 1} -async def test_warn_template( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test warning when template used.""" - assert await async_setup_component(hass, "notify", {}) - - await hass.services.async_call( - "notify", - "persistent_notification", - {"message": "{{ 1 + 1 }}", "title": "Test notif {{ 1 + 1 }}"}, - blocking=True, - ) - # We should only log it once - assert caplog.text.count("Passing templates to notify service is deprecated") == 1 - notifications = async_get_persistent_notifications(hass) - assert len(notifications) == 1 - - async def test_invalid_platform( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: @@ -550,27 +532,11 @@ async def test_sending_none_message(hass: HomeAssistant, tmp_path: Path) -> None notify.DOMAIN, notify.SERVICE_NOTIFY, {notify.ATTR_MESSAGE: None} ) assert ( - str(exc.value) - == "template value is None for dictionary value @ data['message']" + str(exc.value) == "string value is None for dictionary value @ data['message']" ) send_message_mock.assert_not_called() -async def test_sending_templated_message(hass: HomeAssistant, tmp_path: Path) -> None: - """Send a templated message.""" - send_message_mock = await help_setup_notify(hass, tmp_path) - hass.states.async_set("sensor.temperature", 10) - data = { - notify.ATTR_MESSAGE: "{{states.sensor.temperature.state}}", - notify.ATTR_TITLE: "{{ states.sensor.temperature.name }}", - } - await hass.services.async_call(notify.DOMAIN, notify.SERVICE_NOTIFY, data) - await hass.async_block_till_done() - send_message_mock.assert_called_once_with( - "10", {"title": "temperature", "data": None} - ) - - async def test_method_forwards_correct_data( hass: HomeAssistant, tmp_path: Path ) -> None: diff --git a/tests/components/notify/test_repairs.py b/tests/components/notify/test_repairs.py index fef5818e1e6..e77da5cea6f 100644 --- a/tests/components/notify/test_repairs.py +++ b/tests/components/notify/test_repairs.py @@ -1,6 +1,5 @@ """Test repairs for notify entity component.""" -from http import HTTPStatus from unittest.mock import AsyncMock import pytest @@ -9,18 +8,16 @@ from homeassistant.components.notify import ( DOMAIN as NOTIFY_DOMAIN, migrate_notify_issue, ) -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockModule, mock_integration +from tests.components.repairs import ( + async_process_repairs_platforms, + process_repair_fix_flow, + start_repair_fix_flow, +) from tests.typing import ClientSessionGenerator THERMOSTAT_ID = 0 @@ -66,20 +63,12 @@ async def test_notify_migration_repair_flow( ) assert len(issue_registry.issues) == 1 - url = RepairsFlowIndexView.url - resp = await http_client.post( - url, json={"handler": NOTIFY_DOMAIN, "issue_id": translation_key} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, NOTIFY_DOMAIN, translation_key) flow_id = data["flow_id"] assert data["step_id"] == "confirm" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await http_client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(http_client, flow_id) assert data["type"] == "create_entry" # Test confirm step in repair flow await hass.async_block_till_done() diff --git a/tests/components/notify_events/test_notify.py b/tests/components/notify_events/test_notify.py index dbfc354404b..df6df078de1 100644 --- a/tests/components/notify_events/test_notify.py +++ b/tests/components/notify_events/test_notify.py @@ -1,6 +1,10 @@ """The tests for notify_events.""" -from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, DOMAIN +from homeassistant.components.notify import ( + ATTR_DATA, + ATTR_MESSAGE, + DOMAIN as NOTIFY_DOMAIN, +) from homeassistant.components.notify_events.notify import ( ATTR_LEVEL, ATTR_PRIORITY, @@ -13,10 +17,10 @@ from tests.common import async_mock_service async def test_send_msg(hass: HomeAssistant) -> None: """Test notify.events service.""" - notify_calls = async_mock_service(hass, DOMAIN, "events") + notify_calls = async_mock_service(hass, NOTIFY_DOMAIN, "events") await hass.services.async_call( - DOMAIN, + NOTIFY_DOMAIN, "events", { ATTR_MESSAGE: "message content", @@ -32,7 +36,7 @@ async def test_send_msg(hass: HomeAssistant) -> None: assert len(notify_calls) == 1 call = notify_calls[-1] - assert call.domain == DOMAIN + assert call.domain == NOTIFY_DOMAIN assert call.service == "events" assert call.data.get(ATTR_MESSAGE) == "message content" assert call.data.get(ATTR_DATA).get(ATTR_TOKEN) == "XYZ" diff --git a/tests/components/notion/test_config_flow.py b/tests/components/notion/test_config_flow.py index 2cc5e3f04b7..15c211c19cb 100644 --- a/tests/components/notion/test_config_flow.py +++ b/tests/components/notion/test_config_flow.py @@ -6,13 +6,15 @@ from aionotion.errors import InvalidCredentialsError, NotionError import pytest from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_REFRESH_TOKEN, TEST_USER_UUID, TEST_USERNAME +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -90,21 +92,13 @@ async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> Non async def test_reauth( hass: HomeAssistant, config, - config_entry, + config_entry: MockConfigEntry, errors, get_client_with_exception, mock_aionotion, ) -> None: """Test that re-auth works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=config, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" # Test errors that can arise when getting a Notion API client: diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index 4d87b6292e4..c1d1bd1bb2e 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -36,6 +36,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "data": { "bridges": [ diff --git a/tests/components/nuki/snapshots/test_binary_sensor.ambr b/tests/components/nuki/snapshots/test_binary_sensor.ambr index 4a122fa78f2..55976bcb433 100644 --- a/tests/components/nuki/snapshots/test_binary_sensor.ambr +++ b/tests/components/nuki/snapshots/test_binary_sensor.ambr @@ -83,7 +83,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Community door Ring Action', - 'nuki_id': 2, }), 'context': , 'entity_id': 'binary_sensor.community_door_ring_action', @@ -131,7 +130,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Home', - 'nuki_id': 1, }), 'context': , 'entity_id': 'binary_sensor.home', diff --git a/tests/components/nuki/snapshots/test_lock.ambr b/tests/components/nuki/snapshots/test_lock.ambr index a0013fc37c1..24c80e7b487 100644 --- a/tests/components/nuki/snapshots/test_lock.ambr +++ b/tests/components/nuki/snapshots/test_lock.ambr @@ -35,9 +35,7 @@ # name: test_locks[lock.community_door-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'battery_critical': False, 'friendly_name': 'Community door', - 'nuki_id': 2, 'supported_features': , }), 'context': , @@ -84,9 +82,7 @@ # name: test_locks[lock.home-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'battery_critical': False, 'friendly_name': 'Home', - 'nuki_id': 1, 'supported_features': , }), 'context': , diff --git a/tests/components/nuki/snapshots/test_sensor.ambr b/tests/components/nuki/snapshots/test_sensor.ambr index 3c1159aecba..a319104fbc3 100644 --- a/tests/components/nuki/snapshots/test_sensor.ambr +++ b/tests/components/nuki/snapshots/test_sensor.ambr @@ -37,7 +37,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Home Battery', - 'nuki_id': 1, 'unit_of_measurement': '%', }), 'context': , diff --git a/tests/components/nuki/test_config_flow.py b/tests/components/nuki/test_config_flow.py index cdd429c40c5..d4ddc261f1e 100644 --- a/tests/components/nuki/test_config_flow.py +++ b/tests/components/nuki/test_config_flow.py @@ -210,9 +210,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: """Test starting a reauthentication flow.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -241,9 +239,7 @@ async def test_reauth_invalid_auth(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with invalid auth.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -265,9 +261,7 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with cannot connect.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -289,9 +283,7 @@ async def test_reauth_unknown_exception(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with an unknown exception.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/number/test_const.py b/tests/components/number/test_const.py deleted file mode 100644 index 13d94e2eeaf..00000000000 --- a/tests/components/number/test_const.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Test the number const module.""" - -import pytest - -from homeassistant.components.number import const - -from tests.common import help_test_all, import_and_test_deprecated_constant_enum - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(const) - - -@pytest.mark.parametrize(("enum"), list(const.NumberMode)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: const.NumberMode, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, const, enum, "MODE_", "2025.1") diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 721b531e8cd..31d99dc55d7 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -2,7 +2,7 @@ from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest @@ -836,6 +836,69 @@ async def test_custom_unit_change( assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == default_unit +async def test_translated_unit( + hass: HomeAssistant, +) -> None: + """Test translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.number.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = common.MockNumberEntity( + name="Test", + native_value=123, + unique_id="very_unique", + ) + entity0.entity_description = NumberEntityDescription( + "test", + translation_key="test_translation_key", + ) + setup_test_component_platform(hass, DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "number", {"number": {"platform": "test"}} + ) + await hass.async_block_till_done() + + entity_id = entity0.entity_id + state = hass.states.get(entity_id) + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "Tests" + + +async def test_translated_unit_with_native_unit_raises( + hass: HomeAssistant, +) -> None: + """Test that translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.number.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = common.MockNumberEntity( + name="Test", + native_value=123, + unique_id="very_unique", + ) + entity0.entity_description = NumberEntityDescription( + "test", + translation_key="test_translation_key", + native_unit_of_measurement="bad_unit", + ) + setup_test_component_platform(hass, DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "number", {"number": {"platform": "test"}} + ) + await hass.async_block_till_done() + # Setup fails so entity_id is None + assert entity0.entity_id is None + + def test_device_classes_aligned() -> None: """Make sure all sensor device classes are also available in NumberDeviceClass.""" diff --git a/tests/components/nut/test_diagnostics.py b/tests/components/nut/test_diagnostics.py index f91269f5196..2586f224d73 100644 --- a/tests/components/nut/test_diagnostics.py +++ b/tests/components/nut/test_diagnostics.py @@ -39,5 +39,5 @@ async def test_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result["entry"] == entry_dict + assert result["entry"] == entry_dict | {"discovery_keys": {}} assert result["nut_data"] == nut_data_dict diff --git a/tests/components/nut/test_init.py b/tests/components/nut/test_init.py index 61a5187407b..d5d85daa336 100644 --- a/tests/components/nut/test_init.py +++ b/tests/components/nut/test_init.py @@ -8,8 +8,9 @@ from homeassistant.components.nut.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr -from .util import _get_mock_nutclient +from .util import _get_mock_nutclient, async_init_integration from tests.common import MockConfigEntry @@ -96,3 +97,53 @@ async def test_auth_fails(hass: HomeAssistant) -> None: flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["source"] == "reauth" + + +async def test_serial_number(hass: HomeAssistant) -> None: + """Test for serial number set on device.""" + mock_serial_number = "A00000000000" + await async_init_integration( + hass, + username="someuser", + password="somepassword", + list_vars={"ups.serial": mock_serial_number}, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value=[], + ) + + device_registry = dr.async_get(hass) + assert device_registry is not None + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_serial_number)} + ) + + assert device_entry is not None + assert device_entry.serial_number == mock_serial_number + + +async def test_device_location(hass: HomeAssistant) -> None: + """Test for suggested location on device.""" + mock_serial_number = "A00000000000" + mock_device_location = "XYZ Location" + await async_init_integration( + hass, + username="someuser", + password="somepassword", + list_vars={ + "ups.serial": mock_serial_number, + "device.location": mock_device_location, + }, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value=[], + ) + + device_registry = dr.async_get(hass) + assert device_registry is not None + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_serial_number)} + ) + + assert device_entry is not None + assert device_entry.suggested_area == mock_device_location diff --git a/tests/components/nyt_games/__init__.py b/tests/components/nyt_games/__init__.py new file mode 100644 index 00000000000..46dff12e5a1 --- /dev/null +++ b/tests/components/nyt_games/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the NYT Games integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/nyt_games/conftest.py b/tests/components/nyt_games/conftest.py new file mode 100644 index 00000000000..1004b6eb42a --- /dev/null +++ b/tests/components/nyt_games/conftest.py @@ -0,0 +1,57 @@ +"""NYTGames tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from nyt_games.models import ConnectionsStats, WordleStats +import pytest + +from homeassistant.components.nyt_games.const import DOMAIN +from homeassistant.const import CONF_TOKEN + +from tests.common import MockConfigEntry, load_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nyt_games.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_nyt_games_client() -> Generator[AsyncMock]: + """Mock an NYTGames client.""" + with ( + patch( + "homeassistant.components.nyt_games.NYTGamesClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.nyt_games.config_flow.NYTGamesClient", + new=mock_client, + ), + ): + client = mock_client.return_value + client.get_latest_stats.return_value = WordleStats.from_json( + load_fixture("latest.json", DOMAIN) + ).player.stats + client.get_user_id.return_value = 218886794 + client.get_connections.return_value = ConnectionsStats.from_json( + load_fixture("connections.json", DOMAIN) + ).player.stats + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="NYTGames", + data={CONF_TOKEN: "token"}, + unique_id="218886794", + ) diff --git a/tests/components/nyt_games/fixtures/connections.json b/tests/components/nyt_games/fixtures/connections.json new file mode 100644 index 00000000000..8c1ea18199a --- /dev/null +++ b/tests/components/nyt_games/fixtures/connections.json @@ -0,0 +1,24 @@ +{ + "states": [], + "user_id": 218886794, + "player": { + "user_id": 218886794, + "last_updated": 1727097528, + "stats": { + "connections": { + "puzzles_completed": 9, + "puzzles_won": 3, + "last_played_print_date": "2024-09-23", + "current_streak": 0, + "max_streak": 2, + "mistakes": { + "0": 2, + "1": 0, + "2": 1, + "3": 0, + "4": 6 + } + } + } + } +} diff --git a/tests/components/nyt_games/fixtures/latest.json b/tests/components/nyt_games/fixtures/latest.json new file mode 100644 index 00000000000..73a6f440fc0 --- /dev/null +++ b/tests/components/nyt_games/fixtures/latest.json @@ -0,0 +1,69 @@ +{ + "states": [], + "user_id": 218886794, + "player": { + "user_id": 218886794, + "last_updated": 1726831978, + "stats": { + "spelling_bee": { + "puzzles_started": 87, + "total_words": 362, + "total_pangrams": 15, + "longest_word": { + "word": "checkable", + "center_letter": "b", + "print_date": "2024-07-27" + }, + "ranks": { + "Beginner": 23, + "Good": 21, + "Good Start": 14, + "Moving Up": 16, + "Nice": 4, + "Solid": 9 + } + }, + "wordle": { + "legacyStats": { + "gamesPlayed": 70, + "gamesWon": 51, + "guesses": { + "1": 0, + "2": 1, + "3": 7, + "4": 11, + "5": 20, + "6": 12, + "fail": 19 + }, + "currentStreak": 1, + "maxStreak": 5, + "lastWonDayOffset": 1189, + "hasPlayed": true, + "autoOptInTimestamp": 1708273168957, + "hasMadeStatsChoice": false, + "timestamp": 1726831978 + }, + "calculatedStats": { + "gamesPlayed": 33, + "gamesWon": 26, + "guesses": { + "1": 0, + "2": 1, + "3": 4, + "4": 7, + "5": 10, + "6": 4, + "fail": 7 + }, + "currentStreak": 1, + "maxStreak": 5, + "lastWonPrintDate": "2024-09-20", + "lastCompletedPrintDate": "2024-09-20", + "hasPlayed": true, + "generation": 1 + } + } + } + } +} diff --git a/tests/components/nyt_games/fixtures/new_account.json b/tests/components/nyt_games/fixtures/new_account.json new file mode 100644 index 00000000000..ad4d8e2e416 --- /dev/null +++ b/tests/components/nyt_games/fixtures/new_account.json @@ -0,0 +1,51 @@ +{ + "states": [], + "user_id": 260705259, + "player": { + "user_id": 260705259, + "last_updated": 1727358123, + "stats": { + "wordle": { + "legacyStats": { + "gamesPlayed": 1, + "gamesWon": 1, + "guesses": { + "1": 0, + "2": 0, + "3": 0, + "4": 0, + "5": 1, + "6": 0, + "fail": 0 + }, + "currentStreak": 0, + "maxStreak": 1, + "lastWonDayOffset": 1118, + "hasPlayed": true, + "autoOptInTimestamp": 1727357874700, + "hasMadeStatsChoice": false, + "timestamp": 1727358123 + }, + "calculatedStats": { + "gamesPlayed": 0, + "gamesWon": 0, + "guesses": { + "1": 0, + "2": 0, + "3": 0, + "4": 0, + "5": 0, + "6": 0, + "fail": 0 + }, + "currentStreak": 0, + "maxStreak": 1, + "lastWonPrintDate": "", + "lastCompletedPrintDate": "", + "hasPlayed": false, + "generation": 1 + } + } + } + } +} diff --git a/tests/components/nyt_games/snapshots/test_init.ambr b/tests/components/nyt_games/snapshots/test_init.ambr new file mode 100644 index 00000000000..383bed0e106 --- /dev/null +++ b/tests/components/nyt_games/snapshots/test_init.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_device_info[device_connections] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'nyt_games', + '218886794_connections', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'New York Times', + 'model': None, + 'model_id': None, + 'name': 'Connections', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_device_info[device_spelling_bee] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'nyt_games', + '218886794_spelling_bee', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'New York Times', + 'model': None, + 'model_id': None, + 'name': 'Spelling Bee', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_device_info[device_wordle] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'nyt_games', + '218886794_wordle', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'New York Times', + 'model': None, + 'model_id': None, + 'name': 'Wordle', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/nyt_games/snapshots/test_sensor.ambr b/tests/components/nyt_games/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..84b74a26f0d --- /dev/null +++ b/tests/components/nyt_games/snapshots/test_sensor.ambr @@ -0,0 +1,602 @@ +# serializer version: 1 +# name: test_all_entities[sensor.connections_current_streak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.connections_current_streak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current streak', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'streak', + 'unique_id': '218886794-connections-connections_streak', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.connections_current_streak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Connections Current streak', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.connections_current_streak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.connections_highest_streak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.connections_highest_streak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Highest streak', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_streak', + 'unique_id': '218886794-connections-connections_max_streak', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.connections_highest_streak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Connections Highest streak', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.connections_highest_streak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_entities[sensor.connections_last_played-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.connections_last_played', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last played', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_played', + 'unique_id': '218886794-connections-connections_last_played', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.connections_last_played-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'Connections Last played', + }), + 'context': , + 'entity_id': 'sensor.connections_last_played', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-09-23', + }) +# --- +# name: test_all_entities[sensor.connections_played-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.connections_played', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Played', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connections_played', + 'unique_id': '218886794-connections-connections_played', + 'unit_of_measurement': 'games', + }) +# --- +# name: test_all_entities[sensor.connections_played-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Connections Played', + 'state_class': , + 'unit_of_measurement': 'games', + }), + 'context': , + 'entity_id': 'sensor.connections_played', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9', + }) +# --- +# name: test_all_entities[sensor.connections_won-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.connections_won', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Won', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'won', + 'unique_id': '218886794-connections-connections_won', + 'unit_of_measurement': 'games', + }) +# --- +# name: test_all_entities[sensor.connections_won-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Connections Won', + 'state_class': , + 'unit_of_measurement': 'games', + }), + 'context': , + 'entity_id': 'sensor.connections_won', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_all_entities[sensor.spelling_bee_played-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spelling_bee_played', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Played', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'spelling_bees_played', + 'unique_id': '218886794-spelling_bee-spelling_bees_played', + 'unit_of_measurement': 'games', + }) +# --- +# name: test_all_entities[sensor.spelling_bee_played-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spelling Bee Played', + 'state_class': , + 'unit_of_measurement': 'games', + }), + 'context': , + 'entity_id': 'sensor.spelling_bee_played', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '87', + }) +# --- +# name: test_all_entities[sensor.spelling_bee_total_pangrams_found-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spelling_bee_total_pangrams_found', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total pangrams found', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_pangrams', + 'unique_id': '218886794-spelling_bee-spelling_bees_total_pangrams', + 'unit_of_measurement': 'pangrams', + }) +# --- +# name: test_all_entities[sensor.spelling_bee_total_pangrams_found-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spelling Bee Total pangrams found', + 'state_class': , + 'unit_of_measurement': 'pangrams', + }), + 'context': , + 'entity_id': 'sensor.spelling_bee_total_pangrams_found', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_all_entities[sensor.spelling_bee_total_words_found-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spelling_bee_total_words_found', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total words found', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_words', + 'unique_id': '218886794-spelling_bee-spelling_bees_total_words', + 'unit_of_measurement': 'words', + }) +# --- +# name: test_all_entities[sensor.spelling_bee_total_words_found-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spelling Bee Total words found', + 'state_class': , + 'unit_of_measurement': 'words', + }), + 'context': , + 'entity_id': 'sensor.spelling_bee_total_words_found', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '362', + }) +# --- +# name: test_all_entities[sensor.wordle_current_streak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wordle_current_streak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current streak', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'streak', + 'unique_id': '218886794-wordle-wordles_streak', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.wordle_current_streak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Wordle Current streak', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wordle_current_streak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_entities[sensor.wordle_highest_streak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wordle_highest_streak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Highest streak', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_streak', + 'unique_id': '218886794-wordle-wordles_max_streak', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.wordle_highest_streak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Wordle Highest streak', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wordle_highest_streak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_all_entities[sensor.wordle_played-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wordle_played', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Played', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wordles_played', + 'unique_id': '218886794-wordle-wordles_played', + 'unit_of_measurement': 'games', + }) +# --- +# name: test_all_entities[sensor.wordle_played-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wordle Played', + 'state_class': , + 'unit_of_measurement': 'games', + }), + 'context': , + 'entity_id': 'sensor.wordle_played', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_all_entities[sensor.wordle_won-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wordle_won', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Won', + 'platform': 'nyt_games', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'won', + 'unique_id': '218886794-wordle-wordles_won', + 'unit_of_measurement': 'games', + }) +# --- +# name: test_all_entities[sensor.wordle_won-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wordle Won', + 'state_class': , + 'unit_of_measurement': 'games', + }), + 'context': , + 'entity_id': 'sensor.wordle_won', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '51', + }) +# --- diff --git a/tests/components/nyt_games/test_config_flow.py b/tests/components/nyt_games/test_config_flow.py new file mode 100644 index 00000000000..bd17724887e --- /dev/null +++ b/tests/components/nyt_games/test_config_flow.py @@ -0,0 +1,125 @@ +"""Tests for the NYT Games config flow.""" + +from unittest.mock import AsyncMock + +from nyt_games import NYTGamesAuthenticationError, NYTGamesError +import pytest + +from homeassistant.components.nyt_games.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_nyt_games_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "NYT Games" + assert result["data"] == {CONF_TOKEN: "token"} + assert result["result"].unique_id == "218886794" + + +async def test_stripping_token( + hass: HomeAssistant, + mock_nyt_games_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test stripping token.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: " token "}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_TOKEN: "token"} + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (NYTGamesAuthenticationError, "invalid_auth"), + (NYTGamesError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_flow_errors( + hass: HomeAssistant, + mock_nyt_games_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test flow errors.""" + mock_nyt_games_client.get_user_id.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_nyt_games_client.get_user_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate( + hass: HomeAssistant, + mock_nyt_games_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/nyt_games/test_init.py b/tests/components/nyt_games/test_init.py new file mode 100644 index 00000000000..2e1a8c92f90 --- /dev/null +++ b/tests/components/nyt_games/test_init.py @@ -0,0 +1,30 @@ +"""Tests for the NYT Games integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.nyt_games.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_nyt_games_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + for entity in ("wordle", "spelling_bee", "connections"): + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, f"{mock_config_entry.unique_id}_{entity}")} + ) + assert device_entry is not None + assert device_entry == snapshot(name=f"device_{entity}") diff --git a/tests/components/nyt_games/test_sensor.py b/tests/components/nyt_games/test_sensor.py new file mode 100644 index 00000000000..f35caf20b57 --- /dev/null +++ b/tests/components/nyt_games/test_sensor.py @@ -0,0 +1,77 @@ +"""Tests for the NYT Games sensor platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from nyt_games import NYTGamesError, WordleStats +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.nyt_games.const import DOMAIN +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_nyt_games_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_updating_exception( + hass: HomeAssistant, + mock_nyt_games_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handling an exception during update.""" + await setup_integration(hass, mock_config_entry) + + mock_nyt_games_client.get_latest_stats.side_effect = NYTGamesError + + freezer.tick(timedelta(minutes=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.wordle_played").state == STATE_UNAVAILABLE + + mock_nyt_games_client.get_latest_stats.side_effect = None + + freezer.tick(timedelta(minutes=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.wordle_played").state != STATE_UNAVAILABLE + + +async def test_new_account( + hass: HomeAssistant, + mock_nyt_games_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test handling an exception during update.""" + mock_nyt_games_client.get_latest_stats.return_value = WordleStats.from_json( + load_fixture("new_account.json", DOMAIN) + ).player.stats + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("sensor.spelling_bee_played") is None diff --git a/tests/components/octoprint/test_config_flow.py b/tests/components/octoprint/test_config_flow.py index 738fbea0887..e0696486718 100644 --- a/tests/components/octoprint/test_config_flow.py +++ b/tests/components/octoprint/test_config_flow.py @@ -580,15 +580,7 @@ async def test_reauth_form(hass: HomeAssistant) -> None: unique_id="1234", ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "entry_id": entry.entry_id, - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/ohme/__init__.py b/tests/components/ohme/__init__.py new file mode 100644 index 00000000000..7c00bedbd1e --- /dev/null +++ b/tests/components/ohme/__init__.py @@ -0,0 +1,14 @@ +"""Tests for the Ohme integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the Ohme integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/ohme/conftest.py b/tests/components/ohme/conftest.py new file mode 100644 index 00000000000..90395feeb6b --- /dev/null +++ b/tests/components/ohme/conftest.py @@ -0,0 +1,64 @@ +"""Provide common fixtures.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from ohme import ChargerPower, ChargerStatus +import pytest + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ohme.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="test@example.com", + domain=DOMAIN, + version=1, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter2", + }, + ) + + +@pytest.fixture +def mock_client(): + """Fixture to mock the OhmeApiClient.""" + with ( + patch( + "homeassistant.components.ohme.config_flow.OhmeApiClient", + autospec=True, + ) as client, + patch( + "homeassistant.components.ohme.OhmeApiClient", + new=client, + ), + ): + client = client.return_value + client.async_login.return_value = True + client.status = ChargerStatus.CHARGING + client.power = ChargerPower(0, 0, 0, 0) + client.serial = "chargerid" + client.ct_connected = True + client.energy = 1000 + client.device_info = { + "name": "Ohme Home Pro", + "model": "Home Pro", + "sw_version": "v2.65", + } + yield client diff --git a/tests/components/ohme/snapshots/test_button.ambr b/tests/components/ohme/snapshots/test_button.ambr new file mode 100644 index 00000000000..32de16208f4 --- /dev/null +++ b/tests/components/ohme/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_buttons[button.ohme_home_pro_approve_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.ohme_home_pro_approve_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Approve charge', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'approve', + 'unique_id': 'chargerid_approve', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.ohme_home_pro_approve_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Approve charge', + }), + 'context': , + 'entity_id': 'button.ohme_home_pro_approve_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- diff --git a/tests/components/ohme/snapshots/test_init.ambr b/tests/components/ohme/snapshots/test_init.ambr new file mode 100644 index 00000000000..e3ed339b78a --- /dev/null +++ b/tests/components/ohme/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ohme', + 'chargerid', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Ohme', + 'model': 'Home Pro', + 'model_id': None, + 'name': 'Ohme Home Pro', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'chargerid', + 'suggested_area': None, + 'sw_version': 'v2.65', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/ohme/snapshots/test_sensor.ambr b/tests/components/ohme/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..fbffa5b7e5d --- /dev/null +++ b/tests/components/ohme/snapshots/test_sensor.ambr @@ -0,0 +1,268 @@ +# serializer version: 1 +# name: test_sensors[sensor.ohme_home_pro_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CT current', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ct_current', + 'unique_id': 'chargerid_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Ohme Home Pro CT current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Ohme Home Pro Current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Ohme Home Pro Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Ohme Home Pro Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'unplugged', + 'pending_approval', + 'charging', + 'plugged_in', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': 'chargerid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Ohme Home Pro Status', + 'options': list([ + 'unplugged', + 'pending_approval', + 'charging', + 'plugged_in', + ]), + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'charging', + }) +# --- diff --git a/tests/components/ohme/test_button.py b/tests/components/ohme/test_button.py new file mode 100644 index 00000000000..1728563b2e9 --- /dev/null +++ b/tests/components/ohme/test_button.py @@ -0,0 +1,79 @@ +"""Tests for sensors.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from ohme import ChargerStatus +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme buttons.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_button_available( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test that button shows as unavailable when a charge is not pending approval.""" + mock_client.status = ChargerStatus.PENDING_APPROVAL + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("button.ohme_home_pro_approve_charge") + assert state.state == STATE_UNKNOWN + + mock_client.status = ChargerStatus.PLUGGED_IN + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("button.ohme_home_pro_approve_charge") + assert state.state == STATE_UNAVAILABLE + + +async def test_button_press( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the button press action.""" + mock_client.status = ChargerStatus.PENDING_APPROVAL + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.ohme_home_pro_approve_charge", + }, + blocking=True, + ) + + assert len(mock_client.async_approve_charge.mock_calls) == 1 diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py new file mode 100644 index 00000000000..bb7ecc00bdc --- /dev/null +++ b/tests/components/ohme/test_config_flow.py @@ -0,0 +1,184 @@ +"""Tests for the config flow.""" + +from unittest.mock import AsyncMock, MagicMock + +from ohme import ApiException, AuthException +import pytest + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_config_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_client: MagicMock +) -> None: + """Test config flow.""" + + # Initial form load + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Successful login + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter2"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert result["data"] == { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter2", + } + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_config_flow_fail( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test config flow errors.""" + + # Initial form load + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Failed login + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # End with CREATE_ENTRY + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert result["data"] == { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + } + + +async def test_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Ensure we can't add the same account twice.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter3", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_reauth_form(hass: HomeAssistant, mock_client: MagicMock) -> None: + """Test reauth form.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + assert not result["errors"] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_reauth_fail( + hass: HomeAssistant, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test reauth errors.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + # Initial form load + result = await entry.start_reauth_flow(hass) + + assert result["step_id"] == "reauth_confirm" + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Failed login + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # End with success + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/ohme/test_init.py b/tests/components/ohme/test_init.py new file mode 100644 index 00000000000..0f4c7cd64ee --- /dev/null +++ b/tests/components/ohme/test_init.py @@ -0,0 +1,47 @@ +"""Test init of Ohme integration.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test loading and unloading the integration.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + mock_client: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Snapshot the device from registry.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device({(DOMAIN, mock_client.serial)}) + assert device + assert device == snapshot diff --git a/tests/components/ohme/test_sensor.py b/tests/components/ohme/test_sensor.py new file mode 100644 index 00000000000..21f9f06f963 --- /dev/null +++ b/tests/components/ohme/test_sensor.py @@ -0,0 +1,59 @@ +"""Tests for sensors.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from ohme import ApiException +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme sensors.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_sensors_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test that sensors show as unavailable after a coordinator failure.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == "1.0" + + mock_client.async_get_charge_session.side_effect = ApiException + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == STATE_UNAVAILABLE + + mock_client.async_get_charge_session.side_effect = None + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == "1.0" diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index f10805e747d..66dc8a0c603 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -121,7 +121,7 @@ async def test_template_variables( ("tool_args", "expected_tool_args"), [ ({"param1": "test_value"}, {"param1": "test_value"}), - ({"param1": 2}, {"param1": 2}), + ({"param2": 2}, {"param2": 2}), ( {"param1": "test_value", "floor": ""}, {"param1": "test_value"}, # Omit empty arguments @@ -153,7 +153,8 @@ async def test_function_call( mock_tool.name = "test_tool" mock_tool.description = "Test function" mock_tool.parameters = vol.Schema( - {vol.Optional("param1", description="Test parameters"): str} + {vol.Optional("param1", description="Test parameters"): str}, + extra=vol.ALLOW_EXTRA, ) mock_tool.async_call.return_value = "Test response" @@ -482,8 +483,10 @@ async def test_message_history_unlimited( "ollama.AsyncClient.chat", return_value={"message": {"role": "assistant", "content": "test response"}}, ), - patch.object(mock_config_entry, "options", {ollama.CONF_MAX_HISTORY: 0}), ): + hass.config_entries.async_update_entry( + mock_config_entry, options={ollama.CONF_MAX_HISTORY: 0} + ) for i in range(100): result = await conversation.async_converse( hass, diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index dd53d6cbce6..35f6b7d739c 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -5,7 +5,7 @@ from collections.abc import AsyncGenerator from http import HTTPStatus import os from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest @@ -70,23 +70,13 @@ async def no_rpi_fixture( @pytest.fixture(name="mock_supervisor") async def mock_supervisor_fixture( aioclient_mock: AiohttpClientMocker, + store_info: AsyncMock, + supervisor_is_connected: AsyncMock, + resolution_info: AsyncMock, ) -> AsyncGenerator[None]: """Mock supervisor.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -99,10 +89,6 @@ async def mock_supervisor_fixture( ) with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=True, - ), patch( "homeassistant.components.hassio.HassIO.get_info", return_value={}, @@ -111,10 +97,6 @@ async def mock_supervisor_fixture( "homeassistant.components.hassio.HassIO.get_host_info", return_value={}, ), - patch( - "homeassistant.components.hassio.HassIO.get_store", - return_value={}, - ), patch( "homeassistant.components.hassio.HassIO.get_supervisor_info", return_value={"diagnostics": True}, diff --git a/tests/components/ondilo_ico/conftest.py b/tests/components/ondilo_ico/conftest.py index a847c1df069..d35e5ac0003 100644 --- a/tests/components/ondilo_ico/conftest.py +++ b/tests/components/ondilo_ico/conftest.py @@ -46,37 +46,37 @@ def mock_ondilo_client( yield client -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def pool1() -> list[dict[str, Any]]: """First pool description.""" return [load_json_object_fixture("pool1.json", DOMAIN)] -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def pool2() -> list[dict[str, Any]]: """Second pool description.""" return [load_json_object_fixture("pool2.json", DOMAIN)] -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def ico_details1() -> dict[str, Any]: """ICO details of first pool.""" return load_json_object_fixture("ico_details1.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def ico_details2() -> dict[str, Any]: """ICO details of second pool.""" return load_json_object_fixture("ico_details2.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def last_measures() -> list[dict[str, Any]]: """Pool measurements.""" return load_json_array_fixture("last_measures.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def two_pools( pool1: list[dict[str, Any]], pool2: list[dict[str, Any]] ) -> list[dict[str, Any]]: diff --git a/tests/components/ondilo_ico/test_init.py b/tests/components/ondilo_ico/test_init.py index 707022e9145..67f68f27b3e 100644 --- a/tests/components/ondilo_ico/test_init.py +++ b/tests/components/ondilo_ico/test_init.py @@ -3,6 +3,8 @@ from typing import Any from unittest.mock import MagicMock +from ondilo import OndiloError +import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntryState @@ -35,6 +37,29 @@ async def test_devices( assert device_entry == snapshot(name=f"{identifier[0]}-{identifier[1]}") +async def test_get_pools_error( + hass: HomeAssistant, + mock_ondilo_client: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test get pools errors.""" + mock_ondilo_client.get_pools.side_effect = OndiloError( + 502, + ( + " 502 Bad Gateway " + "

502 Bad Gateway

" + ), + ) + await setup_integration(hass, config_entry, mock_ondilo_client) + + # No sensor should be created + assert not hass.states.async_all() + # We should not have tried to retrieve pool measures + assert mock_ondilo_client.get_ICO_details.call_count == 0 + assert mock_ondilo_client.get_last_pool_measures.call_count == 0 + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + async def test_init_with_no_ico_attached( hass: HomeAssistant, mock_ondilo_client: MagicMock, @@ -53,3 +78,77 @@ async def test_init_with_no_ico_attached( # We should not have tried to retrieve pool measures mock_ondilo_client.get_last_pool_measures.assert_not_called() assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize("api", ["get_ICO_details", "get_last_pool_measures"]) +async def test_details_error_all_pools( + hass: HomeAssistant, + mock_ondilo_client: MagicMock, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + pool1: dict[str, Any], + api: str, +) -> None: + """Test details and measures error for all pools.""" + mock_ondilo_client.get_pools.return_value = pool1 + client_api = getattr(mock_ondilo_client, api) + client_api.side_effect = OndiloError(400, "error") + + await setup_integration(hass, config_entry, mock_ondilo_client) + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + + assert not device_entries + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_details_error_one_pool( + hass: HomeAssistant, + mock_ondilo_client: MagicMock, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + ico_details2: dict[str, Any], +) -> None: + """Test details error for one pool and success for the other.""" + mock_ondilo_client.get_ICO_details.side_effect = [ + OndiloError( + 404, + "Not Found", + ), + ico_details2, + ] + + await setup_integration(hass, config_entry, mock_ondilo_client) + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + + assert len(device_entries) == 1 + + +async def test_measures_error_one_pool( + hass: HomeAssistant, + mock_ondilo_client: MagicMock, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + last_measures: list[dict[str, Any]], +) -> None: + """Test measures error for one pool and success for the other.""" + mock_ondilo_client.get_last_pool_measures.side_effect = [ + OndiloError( + 404, + "Not Found", + ), + last_measures, + ] + + await setup_integration(hass, config_entry, mock_ondilo_client) + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + + assert len(device_entries) == 1 diff --git a/tests/components/onewire/snapshots/test_sensor.ambr b/tests/components/onewire/snapshots/test_sensor.ambr index 5ad4cf2ef4b..261b081060c 100644 --- a/tests/components/onewire/snapshots/test_sensor.ambr +++ b/tests/components/onewire/snapshots/test_sensor.ambr @@ -363,7 +363,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.A', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -396,7 +396,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.B', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), ]) # --- @@ -408,7 +408,6 @@ 'friendly_name': '1D.111111111111 Counter A', 'raw_value': 251123.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_a', @@ -423,7 +422,6 @@ 'friendly_name': '1D.111111111111 Counter B', 'raw_value': 248125.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_b', @@ -531,7 +529,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.A', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -564,7 +562,7 @@ 'supported_features': 0, 'translation_key': 'counter_id', 'unique_id': '/1D.111111111111/counter.B', - 'unit_of_measurement': 'count', + 'unit_of_measurement': None, }), ]) # --- @@ -576,7 +574,6 @@ 'friendly_name': '1D.111111111111 Counter A', 'raw_value': 251123.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_a', @@ -591,7 +588,6 @@ 'friendly_name': '1D.111111111111 Counter B', 'raw_value': 248125.0, 'state_class': , - 'unit_of_measurement': 'count', }), 'context': , 'entity_id': 'sensor.1d_111111111111_counter_b', diff --git a/tests/components/onewire/test_config_flow.py b/tests/components/onewire/test_config_flow.py index c147a522a59..029e1278c86 100644 --- a/tests/components/onewire/test_config_flow.py +++ b/tests/components/onewire/test_config_flow.py @@ -263,4 +263,4 @@ async def test_user_options_no_devices( result = await hass.config_entries.options.async_init(config_entry.entry_id) await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "No configurable devices found." + assert result["reason"] == "no_configurable_devices" diff --git a/tests/components/onkyo/__init__.py b/tests/components/onkyo/__init__.py new file mode 100644 index 00000000000..064075d109e --- /dev/null +++ b/tests/components/onkyo/__init__.py @@ -0,0 +1,88 @@ +"""Tests for the Onkyo integration.""" + +from unittest.mock import AsyncMock, Mock, patch + +from homeassistant.components.onkyo.receiver import Receiver, ReceiverInfo +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +def create_receiver_info(id: int) -> ReceiverInfo: + """Create an empty receiver info object for testing.""" + return ReceiverInfo( + host=f"host {id}", + port=id, + model_name=f"type {id}", + identifier=f"id{id}", + ) + + +def create_connection(id: int) -> Mock: + """Create an mock connection object for testing.""" + connection = Mock() + connection.host = f"host {id}" + connection.port = 0 + connection.name = f"type {id}" + connection.identifier = f"id{id}" + return connection + + +def create_config_entry_from_info(info: ReceiverInfo) -> MockConfigEntry: + """Create a config entry from receiver info.""" + data = {CONF_HOST: info.host} + options = { + "volume_resolution": 80, + "input_sources": {"12": "tv"}, + "max_volume": 100, + } + + return MockConfigEntry( + data=data, + options=options, + title=info.model_name, + domain="onkyo", + unique_id=info.identifier, + ) + + +def create_empty_config_entry() -> MockConfigEntry: + """Create an empty config entry for use in unit tests.""" + data = {CONF_HOST: ""} + options = { + "volume_resolution": 80, + "input_sources": {"12": "tv"}, + "max_volume": 100, + } + + return MockConfigEntry( + data=data, + options=options, + title="Unit test Onkyo", + domain="onkyo", + unique_id="onkyo_unique_id", + ) + + +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, receiver_info: ReceiverInfo +) -> None: + """Fixture for setting up the component.""" + + config_entry.add_to_hass(hass) + + mock_receiver = AsyncMock() + mock_receiver.conn.close = Mock() + mock_receiver.callbacks.connect = Mock() + mock_receiver.callbacks.update = Mock() + + with ( + patch( + "homeassistant.components.onkyo.async_interview", + return_value=receiver_info, + ), + patch.object(Receiver, "async_create", return_value=mock_receiver), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/onkyo/conftest.py b/tests/components/onkyo/conftest.py new file mode 100644 index 00000000000..abbe39dd966 --- /dev/null +++ b/tests/components/onkyo/conftest.py @@ -0,0 +1,74 @@ +"""Configure tests for the Onkyo integration.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.onkyo.const import DOMAIN + +from . import create_connection + +from tests.common import MockConfigEntry + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Create Onkyo entry in Home Assistant.""" + return MockConfigEntry( + domain=DOMAIN, + title="Onkyo", + data={}, + ) + + +@pytest.fixture(autouse=True) +def patch_timeouts(): + """Patch timeouts to avoid tests waiting.""" + with patch.multiple( + "homeassistant.components.onkyo.receiver", + DEVICE_INTERVIEW_TIMEOUT=0, + DEVICE_DISCOVERY_TIMEOUT=0, + ): + yield + + +@pytest.fixture +async def default_mock_discovery(): + """Mock discovery with a single device.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + await discovery_callback(create_connection(1)) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield + + +@pytest.fixture +async def stub_mock_discovery(): + """Mock discovery with no devices.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + pass + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield + + +@pytest.fixture +async def empty_mock_discovery(): + """Mock discovery with an empty connection.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + await discovery_callback(None) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py new file mode 100644 index 00000000000..1ee0bfdf9c5 --- /dev/null +++ b/tests/components/onkyo/test_config_flow.py @@ -0,0 +1,494 @@ +"""Test Onkyo config flow.""" + +from typing import Any +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.onkyo import InputSource +from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow +from homeassistant.components.onkyo.const import ( + DOMAIN, + OPTION_MAX_VOLUME, + OPTION_VOLUME_RESOLUTION, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType, InvalidData + +from . import ( + create_config_entry_from_info, + create_connection, + create_empty_config_entry, + create_receiver_info, + setup_integration, +) + +from tests.common import MockConfigEntry + + +async def test_user_initial_menu(hass: HomeAssistant) -> None: + """Test initial menu.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert init_result["type"] is FlowResultType.MENU + # Check if the values are there, but ignore order + assert not set(init_result["menu_options"]) ^ {"manual", "eiscp_discovery"} + + +async def test_manual_valid_host(hass: HomeAssistant, default_mock_discovery) -> None: + """Test valid host entered.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "host 1"}, + ) + + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 1 (host 1)" + + +async def test_manual_invalid_host(hass: HomeAssistant, stub_mock_discovery) -> None: + """Test invalid host entered.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + assert host_result["step_id"] == "manual" + assert host_result["errors"]["base"] == "cannot_connect" + + +async def test_manual_valid_host_unexpected_error( + hass: HomeAssistant, empty_mock_discovery +) -> None: + """Test valid host entered.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + assert host_result["step_id"] == "manual" + assert host_result["errors"]["base"] == "unknown" + + +async def test_discovery_and_no_devices_discovered( + hass: HomeAssistant, stub_mock_discovery +) -> None: + """Test initial menu.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "no_devices_found" + + +async def test_discovery_with_exception( + hass: HomeAssistant, empty_mock_discovery +) -> None: + """Test discovery which throws an unexpected exception.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "unknown" + + +async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> None: + """Test discovery with a new and an existing entry.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + async def mock_discover(discovery_callback, timeout): + await discovery_callback(create_connection(1)) + await discovery_callback(create_connection(2)) + + with ( + patch("pyeiscp.Connection.discover", new=mock_discover), + # Fake it like the first entry was already added + patch.object(OnkyoConfigFlow, "_async_current_ids", return_value=["id1"]), + ): + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.FORM + + assert form_result["data_schema"] is not None + schema = form_result["data_schema"].schema + container = schema["device"].container + assert container == {"id2": "type 2 (host 2)"} + + +async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: + """Test discovery after a selection.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + async def mock_discover(discovery_callback, timeout): + await discovery_callback(create_connection(42)) + await discovery_callback(create_connection(0)) + + with patch("pyeiscp.Connection.discover", new=mock_discover): + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={"device": "id42"}, + ) + + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" + + +async def test_configure_empty_source_list( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test receiver configuration with no sources set.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + configure_result = await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": []}, + ) + + assert configure_result["errors"] == {"input_sources": "empty_input_source_list"} + + +async def test_configure_no_resolution( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test receiver configure with no resolution set.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"input_sources": ["TV"]}, + ) + + +async def test_configure_resolution_set( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test receiver configure with specified resolution.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + configure_result = await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": ["TV"]}, + ) + + assert configure_result["type"] is FlowResultType.CREATE_ENTRY + assert configure_result["options"]["volume_resolution"] == 200 + + +async def test_configure_invalid_resolution_set( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test receiver configure with invalid resolution.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 42, "input_sources": ["TV"]}, + ) + + +async def test_reconfigure(hass: HomeAssistant, default_mock_discovery) -> None: + """Test the reconfigure config flow.""" + receiver_info = create_receiver_info(1) + config_entry = create_config_entry_from_info(receiver_info) + await setup_integration(hass, config_entry, receiver_info) + + old_host = config_entry.data[CONF_HOST] + old_max_volume = config_entry.options[OPTION_MAX_VOLUME] + + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"host": receiver_info.host} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "configure_receiver" + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"volume_resolution": 200, "input_sources": ["TUNER"]}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reconfigure_successful" + + assert config_entry.data[CONF_HOST] == old_host + assert config_entry.options[OPTION_VOLUME_RESOLUTION] == 200 + assert config_entry.options[OPTION_MAX_VOLUME] == old_max_volume + + +async def test_reconfigure_new_device(hass: HomeAssistant) -> None: + """Test the reconfigure config flow with new device.""" + receiver_info = create_receiver_info(1) + config_entry = create_config_entry_from_info(receiver_info) + await setup_integration(hass, config_entry, receiver_info) + + old_unique_id = receiver_info.identifier + + result = await config_entry.start_reconfigure_flow(hass) + + mock_connection = create_connection(2) + + # Create mock discover that calls callback immediately + async def mock_discover(host, discovery_callback, timeout): + await discovery_callback(mock_connection) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"host": mock_connection.host} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unique_id_mismatch" + + # unique id should remain unchanged + assert config_entry.unique_id == old_unique_id + + +@pytest.mark.parametrize( + ("user_input", "exception", "error"), + [ + ( + # No host, and thus no host reachable + { + CONF_HOST: None, + "receiver_max_volume": 100, + "max_volume": 100, + "sources": {}, + }, + None, + "cannot_connect", + ), + ( + # No host, and connection exception + { + CONF_HOST: None, + "receiver_max_volume": 100, + "max_volume": 100, + "sources": {}, + }, + Exception(), + "cannot_connect", + ), + ], +) +async def test_import_fail( + hass: HomeAssistant, + user_input: dict[str, Any], + exception: Exception, + error: str, +) -> None: + """Test import flow failed.""" + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + side_effect=exception, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error + + +async def test_import_success( + hass: HomeAssistant, +) -> None: + """Test import flow succeeded.""" + info = create_receiver_info(1) + + user_input = { + CONF_HOST: info.host, + "receiver_max_volume": 80, + "max_volume": 110, + "sources": { + InputSource("00"): "Auxiliary", + InputSource("01"): "Video", + }, + "info": info, + } + + import_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input + ) + await hass.async_block_till_done() + + assert import_result["type"] is FlowResultType.CREATE_ENTRY + assert import_result["data"]["host"] == "host 1" + assert import_result["options"]["volume_resolution"] == 80 + assert import_result["options"]["max_volume"] == 100 + assert import_result["options"]["input_sources"] == { + "00": "Auxiliary", + "01": "Video", + } + + +@pytest.mark.parametrize( + "ignore_translations", + [ + [ # The schema is dynamically created from input sources + "component.onkyo.options.step.init.data.TV", + "component.onkyo.options.step.init.data_description.TV", + ] + ], +) +async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Test options flow.""" + + receiver_info = create_receiver_info(1) + config_entry = create_empty_config_entry() + await setup_integration(hass, config_entry, receiver_info) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "max_volume": 42, + "TV": "television", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + "volume_resolution": 80, + "max_volume": 42.0, + "input_sources": { + "12": "television", + }, + } diff --git a/tests/components/onkyo/test_init.py b/tests/components/onkyo/test_init.py new file mode 100644 index 00000000000..17086a3088e --- /dev/null +++ b/tests/components/onkyo/test_init.py @@ -0,0 +1,72 @@ +"""Test Onkyo component setup process.""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest + +from homeassistant.components.onkyo import async_setup_entry +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from . import create_empty_config_entry, create_receiver_info, setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + + config_entry = create_empty_config_entry() + receiver_info = create_receiver_info(1) + await setup_integration(hass, config_entry, receiver_info) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_update_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test update options.""" + + with patch.object(hass.config_entries, "async_reload", return_value=True): + config_entry = create_empty_config_entry() + receiver_info = create_receiver_info(1) + await setup_integration(hass, config_entry, receiver_info) + + # Force option change + assert hass.config_entries.async_update_entry( + config_entry, options={"option": "new_value"} + ) + await hass.async_block_till_done() + + hass.config_entries.async_reload.assert_called_with(config_entry.entry_id) + + +async def test_no_connection( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test update options.""" + + config_entry = create_empty_config_entry() + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.onkyo.async_interview", + return_value=None, + ), + pytest.raises(ConfigEntryNotReady), + ): + await async_setup_entry(hass, config_entry) diff --git a/tests/components/onvif/snapshots/test_diagnostics.ambr b/tests/components/onvif/snapshots/test_diagnostics.ambr index 68c92ec755d..c3938efcbb6 100644 --- a/tests/components/onvif/snapshots/test_diagnostics.ambr +++ b/tests/components/onvif/snapshots/test_diagnostics.ambr @@ -11,6 +11,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'onvif', 'entry_id': '1', 'minor_version': 1, @@ -22,6 +24,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/onvif/test_config_flow.py b/tests/components/onvif/test_config_flow.py index c0e5a6fe545..5c01fb2d200 100644 --- a/tests/components/onvif/test_config_flow.py +++ b/tests/components/onvif/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.onvif import DOMAIN, config_flow from homeassistant.config_entries import SOURCE_DHCP -from homeassistant.const import CONF_HOST, CONF_USERNAME +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr @@ -769,11 +769,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: """Test reauthenticate.""" entry, _, _ = await setup_onvif_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert ( @@ -807,7 +803,8 @@ async def test_form_reauth(hass: HomeAssistant) -> None: assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {config_flow.CONF_PASSWORD: "auth_failed"} assert result2["description_placeholders"] == { - "error": "not authorized (subcodes:NotAuthorized)" + CONF_NAME: "Mock Title", + "error": "not authorized (subcodes:NotAuthorized)", } with ( diff --git a/tests/components/onvif/test_parsers.py b/tests/components/onvif/test_parsers.py new file mode 100644 index 00000000000..209e7cbccef --- /dev/null +++ b/tests/components/onvif/test_parsers.py @@ -0,0 +1,335 @@ +"""Test ONVIF parsers.""" + +import datetime +import os + +import onvif +import onvif.settings +from zeep import Client +from zeep.transports import Transport + +from homeassistant.components.onvif import models, parsers +from homeassistant.core import HomeAssistant + +TEST_UID = "test-unique-id" + + +async def get_event(notification_data: dict) -> models.Event: + """Take in a zeep dict, run it through the parser, and return an Event. + + When the parser encounters an unknown topic that it doesn't know how to parse, + it outputs a message 'No registered handler for event from ...' along with a + print out of the serialized xml message from zeep. If it tries to parse and + can't, it prints out 'Unable to parse event from ...' along with the same + serialized message. This method can take the output directly from these log + messages and run them through the parser, which makes it easy to add new unit + tests that verify the message can now be parsed. + """ + zeep_client = Client( + f"{os.path.dirname(onvif.__file__)}/wsdl/events.wsdl", + wsse=None, + transport=Transport(), + ) + + notif_msg_type = zeep_client.get_type("ns5:NotificationMessageHolderType") + assert notif_msg_type is not None + notif_msg = notif_msg_type(**notification_data) + assert notif_msg is not None + + # The xsd:any type embedded inside the message doesn't parse, so parse it manually. + msg_elem = zeep_client.get_element("ns8:Message") + assert msg_elem is not None + msg_data = msg_elem(**notification_data["Message"]["_value_1"]) + assert msg_data is not None + notif_msg.Message._value_1 = msg_data + + parser = parsers.PARSERS.get(notif_msg.Topic._value_1) + assert parser is not None + + return await parser(TEST_UID, notif_msg) + + +async def test_line_detector_crossed(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/LineDetector/Crossed.""" + event = await get_event( + { + "SubscriptionReference": { + "Address": {"_value_1": None, "_attr_1": None}, + "ReferenceParameters": None, + "Metadata": None, + "_value_1": None, + "_attr_1": None, + }, + "Topic": { + "_value_1": "tns1:RuleEngine/LineDetector/Crossed", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": { + "Address": { + "_value_1": "xx.xx.xx.xx/onvif/event/alarm", + "_attr_1": None, + }, + "ReferenceParameters": None, + "Metadata": None, + "_value_1": None, + "_attr_1": None, + }, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "video_source_config1", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "analytics_video_source", + }, + {"Name": "Rule", "Value": "MyLineDetectorRule"}, + ], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "ObjectId", "Value": "0"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime(2020, 5, 24, 7, 24, 47), + "PropertyOperation": "Initialized", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Line Detector Crossed" + assert event.platform == "sensor" + assert event.value == "0" + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/LineDetector/" + "Crossed_video_source_config1_analytics_video_source_MyLineDetectorRule" + ) + + +async def test_tapo_vehicle(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - vehicle.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsVehicle", "Value": "true"}], + "_attr_1": None, + }, + "Extension": None, + "Key": None, + "PropertyOperation": "Changed", + "Source": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + { + "Name": "Rule", + "Value": "MyTPSmartEventDetectorRule", + }, + ], + "_attr_1": None, + }, + "UtcTime": datetime.datetime( + 2024, 11, 2, 0, 33, 11, tzinfo=datetime.UTC + ), + "_attr_1": {}, + } + }, + "ProducerReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:5656/event", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "SubscriptionReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:2020/event-0_2020", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "Topic": { + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + "_value_1": "tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent", + }, + } + ) + + assert event is not None + assert event.name == "Vehicle Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/TPSmartEventDetector/" + "TPSmartEvent_VideoSourceToken_VideoAnalyticsToken_MyTPSmartEventDetectorRule" + ) + + +async def test_tapo_person(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - person.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], + "_attr_1": None, + }, + "Extension": None, + "Key": None, + "PropertyOperation": "Changed", + "Source": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + {"Name": "Rule", "Value": "MyPeopleDetectorRule"}, + ], + "_attr_1": None, + }, + "UtcTime": datetime.datetime( + 2024, 11, 3, 18, 40, 43, tzinfo=datetime.UTC + ), + "_attr_1": {}, + } + }, + "ProducerReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:5656/event", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "SubscriptionReference": { + "Address": { + "_attr_1": None, + "_value_1": "http://192.168.56.127:2020/event-0_2020", + }, + "Metadata": None, + "ReferenceParameters": None, + "_attr_1": None, + "_value_1": None, + }, + "Topic": { + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) + + assert event is not None + assert event.name == "Person Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/PeopleDetector/" + "People_VideoSourceToken_VideoAnalyticsToken_MyPeopleDetectorRule" + ) + + +async def test_tapo_missing_attributes(hass: HomeAssistant) -> None: + """Tests async_parse_tplink_detector with missing fields.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], + "_attr_1": None, + }, + } + }, + "Topic": { + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) + + assert event is None + + +async def test_tapo_unknown_type(hass: HomeAssistant) -> None: + """Tests async_parse_tplink_detector with unknown event type.""" + event = await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsNotPerson", "Value": "true"}], + "_attr_1": None, + }, + "Source": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + {"Name": "Rule", "Value": "MyPeopleDetectorRule"}, + ], + }, + } + }, + "Topic": { + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) + + assert event is None diff --git a/tests/components/openai_conversation/snapshots/test_conversation.ambr b/tests/components/openai_conversation/snapshots/test_conversation.ambr index e4dd7cd00bb..eaa3a9de64c 100644 --- a/tests/components/openai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/openai_conversation/snapshots/test_conversation.ambr @@ -20,7 +20,7 @@ speech=dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Error preparing LLM API: API non-existing not found', + 'speech': 'Error preparing LLM API', }), }), speech_slots=dict({ diff --git a/tests/components/openexchangerates/test_config_flow.py b/tests/components/openexchangerates/test_config_flow.py index ec06c662201..0d4744c057a 100644 --- a/tests/components/openexchangerates/test_config_flow.py +++ b/tests/components/openexchangerates/test_config_flow.py @@ -200,16 +200,7 @@ async def test_reauth( ) -> None: """Test we can reauthenticate the config entry.""" mock_config_entry.add_to_hass(hass) - flow_context = { - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - } - - result = await hass.config_entries.flow.async_init( - DOMAIN, context=flow_context, data=mock_config_entry.data - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/opentherm_gw/conftest.py b/tests/components/opentherm_gw/conftest.py new file mode 100644 index 00000000000..9c90c74b04b --- /dev/null +++ b/tests/components/opentherm_gw/conftest.py @@ -0,0 +1,62 @@ +"""Test configuration for opentherm_gw.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from pyotgw.vars import OTGW, OTGW_ABOUT +import pytest + +from homeassistant.components.opentherm_gw import DOMAIN +from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME + +from tests.common import MockConfigEntry + +VERSION_TEST = "4.2.5" +MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_TEST}"}} +MOCK_GATEWAY_ID = "mock_gateway" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.opentherm_gw.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_pyotgw() -> Generator[MagicMock]: + """Mock a pyotgw.OpenThermGateway object.""" + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGateway", + return_value=MagicMock( + connect=AsyncMock(return_value=MINIMAL_STATUS), + set_control_setpoint=AsyncMock(), + set_max_relative_mod=AsyncMock(), + disconnect=AsyncMock(), + ), + ) as mock_gateway, + patch( + "homeassistant.components.opentherm_gw.config_flow.pyotgw.OpenThermGateway", + new=mock_gateway, + ), + ): + yield mock_gateway + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock an OpenTherm Gateway config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Mock Gateway", + data={ + CONF_NAME: "Mock Gateway", + CONF_DEVICE: "/dev/null", + CONF_ID: MOCK_GATEWAY_ID, + }, + options={}, + ) diff --git a/tests/components/opentherm_gw/test_button.py b/tests/components/opentherm_gw/test_button.py new file mode 100644 index 00000000000..b02a9d9fef0 --- /dev/null +++ b/tests/components/opentherm_gw/test_button.py @@ -0,0 +1,50 @@ +"""Test opentherm_gw buttons.""" + +from unittest.mock import AsyncMock, MagicMock + +from pyotgw.vars import OTGW_MODE_RESET + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN +from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier +from homeassistant.const import ATTR_ENTITY_ID, CONF_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import MINIMAL_STATUS + +from tests.common import MockConfigEntry + + +async def test_restart_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test restart button.""" + + mock_pyotgw.return_value.set_mode = AsyncMock(return_value=MINIMAL_STATUS) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + button_entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-restart_button", + ) + ) is not None + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: button_entity_id, + }, + blocking=True, + ) + + mock_pyotgw.return_value.set_mode.assert_awaited_once_with(OTGW_MODE_RESET) diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index e61a87bb55e..57bea4e55dc 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -1,14 +1,12 @@ """Test the Opentherm Gateway config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, MagicMock -from pyotgw.vars import OTGW, OTGW_ABOUT from serial import SerialException from homeassistant import config_entries from homeassistant.components.opentherm_gw.const import ( CONF_FLOOR_TEMP, - CONF_PRECISION, CONF_READ_PRECISION, CONF_SET_PRECISION, CONF_TEMPORARY_OVRD_MODE, @@ -26,10 +24,12 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: "OpenTherm Gateway 4.2.5"}} - -async def test_form_user(hass: HomeAssistant) -> None: +async def test_form_user( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -38,27 +38,10 @@ async def test_form_user(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with ( - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS - ) as mock_pyotgw_connect, - patch( - "pyotgw.OpenThermGateway.disconnect", return_value=None - ) as mock_pyotgw_disconnect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test Entry 1" @@ -67,37 +50,22 @@ async def test_form_user(hass: HomeAssistant) -> None: CONF_DEVICE: "/dev/ttyUSB0", CONF_ID: "test_entry_1", } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_pyotgw_connect.mock_calls) == 1 - assert len(mock_pyotgw_disconnect.mock_calls) == 1 + assert mock_pyotgw.return_value.connect.await_count == 1 + assert mock_pyotgw.return_value.disconnect.await_count == 1 -async def test_form_import(hass: HomeAssistant) -> None: +# Deprecated import from configuration.yaml, can be removed in 2025.4.0 +async def test_form_import( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test import from existing config.""" - - with ( - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS - ) as mock_pyotgw_connect, - patch( - "pyotgw.OpenThermGateway.disconnect", return_value=None - ) as mock_pyotgw_disconnect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, - ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "legacy_gateway" @@ -106,13 +74,15 @@ async def test_form_import(hass: HomeAssistant) -> None: CONF_DEVICE: "/dev/ttyUSB1", CONF_ID: "legacy_gateway", } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_pyotgw_connect.mock_calls) == 1 - assert len(mock_pyotgw_disconnect.mock_calls) == 1 + assert mock_pyotgw.return_value.connect.await_count == 1 + assert mock_pyotgw.return_value.disconnect.await_count == 1 -async def test_form_duplicate_entries(hass: HomeAssistant) -> None: +async def test_form_duplicate_entries( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test duplicate device or id errors.""" flow1 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -124,139 +94,76 @@ async def test_form_duplicate_entries(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS - ) as mock_pyotgw_connect, - patch( - "pyotgw.OpenThermGateway.disconnect", return_value=None - ) as mock_pyotgw_disconnect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result1 = await hass.config_entries.flow.async_configure( - flow1["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) - result2 = await hass.config_entries.flow.async_configure( - flow2["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB1"} - ) - result3 = await hass.config_entries.flow.async_configure( - flow3["flow_id"], {CONF_NAME: "Test Entry 2", CONF_DEVICE: "/dev/ttyUSB0"} - ) + result1 = await hass.config_entries.flow.async_configure( + flow1["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) assert result1["type"] is FlowResultType.CREATE_ENTRY + + result2 = await hass.config_entries.flow.async_configure( + flow2["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB1"} + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "id_exists"} + + result3 = await hass.config_entries.flow.async_configure( + flow3["flow_id"], {CONF_NAME: "Test Entry 2", CONF_DEVICE: "/dev/ttyUSB0"} + ) assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "already_configured"} - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_pyotgw_connect.mock_calls) == 1 - assert len(mock_pyotgw_disconnect.mock_calls) == 1 + + assert mock_pyotgw.return_value.connect.await_count == 1 + assert mock_pyotgw.return_value.disconnect.await_count == 1 -async def test_form_connection_timeout(hass: HomeAssistant) -> None: +async def test_form_connection_timeout( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test we handle connection timeout.""" - result = await hass.config_entries.flow.async_init( + flow = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "pyotgw.OpenThermGateway.connect", side_effect=(TimeoutError) - ) as mock_connect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_NAME: "Test Entry 1", CONF_DEVICE: "socket://192.0.2.254:1234"}, - ) + mock_pyotgw.return_value.connect.side_effect = TimeoutError - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "timeout_connect"} - assert len(mock_connect.mock_calls) == 1 + result = await hass.config_entries.flow.async_configure( + flow["flow_id"], + {CONF_NAME: "Test Entry 1", CONF_DEVICE: "socket://192.0.2.254:1234"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "timeout_connect"} + + assert mock_pyotgw.return_value.connect.await_count == 1 -async def test_form_connection_error(hass: HomeAssistant) -> None: +async def test_form_connection_error( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test we handle serial connection error.""" - result = await hass.config_entries.flow.async_init( + flow = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "pyotgw.OpenThermGateway.connect", side_effect=(SerialException) - ) as mock_connect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) + mock_pyotgw.return_value.connect.side_effect = SerialException - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - assert len(mock_connect.mock_calls) == 1 - - -async def test_options_migration(hass: HomeAssistant) -> None: - """Test migration of precision option after update.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Mock Gateway", - data={ - CONF_NAME: "Test Entry 1", - CONF_DEVICE: "/dev/ttyUSB0", - CONF_ID: "test_entry_1", - }, - options={ - CONF_FLOOR_TEMP: True, - CONF_PRECISION: PRECISION_TENTHS, - }, + result = await hass.config_entries.flow.async_configure( + flow["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} ) - entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayHub.connect_and_subscribe", - return_value=True, - ), - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ), - patch( - "pyotgw.status.StatusManager._process_updates", - return_value=None, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init( - entry.entry_id, context={"source": config_entries.SOURCE_USER}, data=None - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_READ_PRECISION] == PRECISION_TENTHS - assert result["data"][CONF_SET_PRECISION] == PRECISION_TENTHS - assert result["data"][CONF_FLOOR_TEMP] is True + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert mock_pyotgw.return_value.connect.await_count == 1 -async def test_options_form(hass: HomeAssistant) -> None: +async def test_options_form( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test the options form.""" entry = MockConfigEntry( domain=DOMAIN, @@ -270,23 +177,17 @@ async def test_options_form(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - with ( - patch("homeassistant.components.opentherm_gw.async_setup", return_value=True), - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", return_value=True - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - result = await hass.config_entries.options.async_init( + flow = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" + assert flow["type"] is FlowResultType.FORM + assert flow["step_id"] == "init" result = await hass.config_entries.options.async_configure( - result["flow_id"], + flow["flow_id"], user_input={ CONF_FLOOR_TEMP: True, CONF_READ_PRECISION: PRECISION_HALVES, @@ -301,12 +202,12 @@ async def test_options_form(hass: HomeAssistant) -> None: assert result["data"][CONF_TEMPORARY_OVRD_MODE] is True assert result["data"][CONF_FLOOR_TEMP] is True - result = await hass.config_entries.options.async_init( + flow = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={CONF_READ_PRECISION: 0} + flow["flow_id"], user_input={CONF_READ_PRECISION: 0} ) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -315,12 +216,12 @@ async def test_options_form(hass: HomeAssistant) -> None: assert result["data"][CONF_TEMPORARY_OVRD_MODE] is True assert result["data"][CONF_FLOOR_TEMP] is True - result = await hass.config_entries.options.async_init( + flow = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) result = await hass.config_entries.options.async_configure( - result["flow_id"], + flow["flow_id"], user_input={ CONF_FLOOR_TEMP: False, CONF_READ_PRECISION: PRECISION_TENTHS, diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index a466f788f1a..3e85afbf782 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -1,84 +1,177 @@ """Test Opentherm Gateway init.""" -from unittest.mock import patch +from unittest.mock import MagicMock from pyotgw.vars import OTGW, OTGW_ABOUT -import pytest from homeassistant import setup -from homeassistant.components.opentherm_gw.const import DOMAIN -from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME +from homeassistant.components.opentherm_gw.const import ( + DOMAIN, + OpenThermDeviceIdentifier, +) +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) + +from .conftest import MOCK_GATEWAY_ID, VERSION_TEST from tests.common import MockConfigEntry -VERSION_OLD = "4.2.5" VERSION_NEW = "4.2.8.1" -MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_OLD}"}} MINIMAL_STATUS_UPD = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_NEW}"}} -MOCK_GATEWAY_ID = "mock_gateway" -MOCK_CONFIG_ENTRY = MockConfigEntry( - domain=DOMAIN, - title="Mock Gateway", - data={ - CONF_NAME: "Mock Gateway", - CONF_DEVICE: "/dev/null", - CONF_ID: MOCK_GATEWAY_ID, - }, - options={}, -) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_insert( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is initialized correctly.""" - MOCK_CONFIG_ENTRY.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", - return_value=None, - ), - patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS), - ): - await setup.async_setup_component(hass, DOMAIN, {}) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) - assert gw_dev.sw_version == VERSION_OLD + gw_dev = device_registry.async_get_device( + identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} + ) + assert gw_dev is not None + assert gw_dev.sw_version == VERSION_TEST -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_update( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is updated correctly.""" - MOCK_CONFIG_ENTRY.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) device_registry.async_get_or_create( - config_entry_id=MOCK_CONFIG_ENTRY.entry_id, - identifiers={(DOMAIN, MOCK_GATEWAY_ID)}, + config_entry_id=mock_config_entry.entry_id, + identifiers={ + (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}") + }, name="Mock Gateway", manufacturer="Schelte Bron", model="OpenTherm Gateway", - sw_version=VERSION_OLD, + sw_version=VERSION_TEST, ) - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", - return_value=None, - ), - patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS_UPD), - ): - await setup.async_setup_component(hass, DOMAIN, {}) + mock_pyotgw.return_value.connect.return_value = MINIMAL_STATUS_UPD + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + gw_dev = device_registry.async_get_device( + identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} + ) + assert gw_dev is not None + assert gw_dev.sw_version == VERSION_NEW + + +# Device migration test can be removed in 2025.4.0 +async def test_device_migration( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test that the device registry is updated correctly.""" + mock_config_entry.add_to_hass(hass) + + device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={ + (DOMAIN, MOCK_GATEWAY_ID), + }, + name="Mock Gateway", + manufacturer="Schelte Bron", + model="OpenTherm Gateway", + sw_version=VERSION_TEST, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) + is None + ) + + gw_dev = device_registry.async_get_device( + identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} + ) + assert gw_dev is not None + + assert ( + device_registry.async_get_device( + identifiers={ + (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.BOILER}") + } + ) + is not None + ) + + assert ( + device_registry.async_get_device( + identifiers={ + (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.THERMOSTAT}") + } + ) + is not None + ) + + +# Entity migration test can be removed in 2025.4.0 +async def test_climate_entity_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test that the climate entity unique_id gets migrated correctly.""" + mock_config_entry.add_to_hass(hass) + entry = entity_registry.async_get_or_create( + domain="climate", + platform="opentherm_gw", + unique_id=mock_config_entry.data[CONF_ID], + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + updated_entry = entity_registry.async_get(entry.entity_id) + assert updated_entry is not None + assert ( + updated_entry.unique_id + == f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" + ) + + +# Deprecation test, can be removed in 2025.4.0 +async def test_configuration_yaml_deprecation( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test that existing configuration in configuration.yaml creates an issue.""" + + await setup.async_setup_component( + hass, DOMAIN, {DOMAIN: {"legacy_gateway": {"device": "/dev/null"}}} + ) await hass.async_block_till_done() - gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) - assert gw_dev.sw_version == VERSION_NEW + assert ( + issue_registry.async_get_issue( + DOMAIN, "deprecated_import_from_configuration_yaml" + ) + is not None + ) diff --git a/tests/components/opentherm_gw/test_select.py b/tests/components/opentherm_gw/test_select.py new file mode 100644 index 00000000000..f89224b3874 --- /dev/null +++ b/tests/components/opentherm_gw/test_select.py @@ -0,0 +1,226 @@ +"""Test opentherm_gw select entities.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +from pyotgw.vars import ( + OTGW_GPIO_A, + OTGW_GPIO_B, + OTGW_LED_A, + OTGW_LED_B, + OTGW_LED_C, + OTGW_LED_D, + OTGW_LED_E, + OTGW_LED_F, +) +import pytest + +from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN +from homeassistant.components.opentherm_gw.const import ( + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + OpenThermDeviceIdentifier, +) +from homeassistant.components.opentherm_gw.select import ( + OpenThermSelectGPIOMode, + OpenThermSelectLEDMode, + PyotgwGPIOMode, + PyotgwLEDMode, +) +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, CONF_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import async_dispatcher_send + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ( + "entity_key", + "target_func_name", + "target_param_1", + "target_param_2", + "resulting_state", + ), + [ + ( + OTGW_GPIO_A, + "set_gpio_mode", + "A", + PyotgwGPIOMode.VCC, + OpenThermSelectGPIOMode.VCC, + ), + ( + OTGW_GPIO_B, + "set_gpio_mode", + "B", + PyotgwGPIOMode.HOME, + OpenThermSelectGPIOMode.HOME, + ), + ( + OTGW_LED_A, + "set_led_mode", + "A", + PyotgwLEDMode.TX_ANY, + OpenThermSelectLEDMode.TX_ANY, + ), + ( + OTGW_LED_B, + "set_led_mode", + "B", + PyotgwLEDMode.RX_ANY, + OpenThermSelectLEDMode.RX_ANY, + ), + ( + OTGW_LED_C, + "set_led_mode", + "C", + PyotgwLEDMode.BOILER_TRAFFIC, + OpenThermSelectLEDMode.BOILER_TRAFFIC, + ), + ( + OTGW_LED_D, + "set_led_mode", + "D", + PyotgwLEDMode.THERMOSTAT_TRAFFIC, + OpenThermSelectLEDMode.THERMOSTAT_TRAFFIC, + ), + ( + OTGW_LED_E, + "set_led_mode", + "E", + PyotgwLEDMode.FLAME_ON, + OpenThermSelectLEDMode.FLAME_ON, + ), + ( + OTGW_LED_F, + "set_led_mode", + "F", + PyotgwLEDMode.BOILER_MAINTENANCE_REQUIRED, + OpenThermSelectLEDMode.BOILER_MAINTENANCE_REQUIRED, + ), + ], +) +async def test_select_change_value( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, + entity_key: str, + target_func_name: str, + target_param_1: str, + target_param_2: str | int, + resulting_state: str, +) -> None: + """Test GPIO mode selector.""" + + setattr( + mock_pyotgw.return_value, + target_func_name, + AsyncMock(return_value=target_param_2), + ) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + select_entity_id := entity_registry.async_get_entity_id( + SELECT_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", + ) + ) is not None + assert hass.states.get(select_entity_id).state == STATE_UNKNOWN + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: select_entity_id, ATTR_OPTION: resulting_state}, + blocking=True, + ) + assert hass.states.get(select_entity_id).state == resulting_state + + target = getattr(mock_pyotgw.return_value, target_func_name) + target.assert_awaited_once_with(target_param_1, target_param_2) + + +@pytest.mark.parametrize( + ("entity_key", "test_value", "resulting_state"), + [ + (OTGW_GPIO_A, PyotgwGPIOMode.AWAY, OpenThermSelectGPIOMode.AWAY), + (OTGW_GPIO_B, PyotgwGPIOMode.LED_F, OpenThermSelectGPIOMode.LED_F), + ( + OTGW_LED_A, + PyotgwLEDMode.SETPOINT_OVERRIDE_ACTIVE, + OpenThermSelectLEDMode.SETPOINT_OVERRIDE_ACTIVE, + ), + ( + OTGW_LED_B, + PyotgwLEDMode.CENTRAL_HEATING_ON, + OpenThermSelectLEDMode.CENTRAL_HEATING_ON, + ), + (OTGW_LED_C, PyotgwLEDMode.HOT_WATER_ON, OpenThermSelectLEDMode.HOT_WATER_ON), + ( + OTGW_LED_D, + PyotgwLEDMode.COMFORT_MODE_ON, + OpenThermSelectLEDMode.COMFORT_MODE_ON, + ), + ( + OTGW_LED_E, + PyotgwLEDMode.TX_ERROR_DETECTED, + OpenThermSelectLEDMode.TX_ERROR_DETECTED, + ), + ( + OTGW_LED_F, + PyotgwLEDMode.RAISED_POWER_MODE_ACTIVE, + OpenThermSelectLEDMode.RAISED_POWER_MODE_ACTIVE, + ), + ], +) +async def test_select_state_update( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, + entity_key: str, + test_value: Any, + resulting_state: str, +) -> None: + """Test GPIO mode selector.""" + + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + select_entity_id := entity_registry.async_get_entity_id( + SELECT_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", + ) + ) is not None + assert hass.states.get(select_entity_id).state == STATE_UNKNOWN + + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][ + mock_config_entry.data[CONF_ID] + ] + async_dispatcher_send( + hass, + gw_hub.update_signal, + { + OpenThermDeviceIdentifier.BOILER: {}, + OpenThermDeviceIdentifier.GATEWAY: {entity_key: test_value}, + OpenThermDeviceIdentifier.THERMOSTAT: {}, + }, + ) + await hass.async_block_till_done() + + assert hass.states.get(select_entity_id).state == resulting_state diff --git a/tests/components/opentherm_gw/test_switch.py b/tests/components/opentherm_gw/test_switch.py new file mode 100644 index 00000000000..5eb8e906892 --- /dev/null +++ b/tests/components/opentherm_gw/test_switch.py @@ -0,0 +1,111 @@ +"""Test opentherm_gw switches.""" + +from unittest.mock import AsyncMock, MagicMock, call + +import pytest + +from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN +from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + "entity_key", ["central_heating_1_override", "central_heating_2_override"] +) +async def test_switch_added_disabled( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, + entity_key: str, +) -> None: + """Test switch gets added in disabled state.""" + + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + switch_entity_id := entity_registry.async_get_entity_id( + SWITCH_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", + ) + ) is not None + + assert (entity_entry := entity_registry.async_get(switch_entity_id)) is not None + assert entity_entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("entity_key", "target_func"), + [ + ("central_heating_1_override", "set_ch_enable_bit"), + ("central_heating_2_override", "set_ch2_enable_bit"), + ], +) +async def test_ch_override_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, + entity_key: str, + target_func: str, +) -> None: + """Test central heating override switch.""" + + setattr(mock_pyotgw.return_value, target_func, AsyncMock(side_effect=[0, 1])) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + switch_entity_id := entity_registry.async_get_entity_id( + SWITCH_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", + ) + ) is not None + assert hass.states.get(switch_entity_id).state == STATE_UNKNOWN + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: switch_entity_id, + }, + blocking=True, + ) + assert hass.states.get(switch_entity_id).state == STATE_OFF + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: switch_entity_id, + }, + blocking=True, + ) + assert hass.states.get(switch_entity_id).state == STATE_ON + + mock_func = getattr(mock_pyotgw.return_value, target_func) + assert mock_func.await_count == 2 + mock_func.assert_has_awaits([call(0), call(1)]) diff --git a/tests/components/openuv/test_config_flow.py b/tests/components/openuv/test_config_flow.py index 3d31cf53250..182f66c887f 100644 --- a/tests/components/openuv/test_config_flow.py +++ b/tests/components/openuv/test_config_flow.py @@ -7,7 +7,7 @@ import pytest import voluptuous as vol from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_ELEVATION, @@ -19,6 +19,8 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_API_KEY, TEST_ELEVATION, TEST_LATITUDE, TEST_LONGITUDE +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -105,12 +107,10 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 4fe851eea53..03b392b3e7b 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -38,6 +38,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "data": { "protection_window": { diff --git a/tests/components/openweathermap/test_config_flow.py b/tests/components/openweathermap/test_config_flow.py index f18aa432e2f..aec34360754 100644 --- a/tests/components/openweathermap/test_config_flow.py +++ b/tests/components/openweathermap/test_config_flow.py @@ -7,6 +7,7 @@ from pyopenweathermap import ( CurrentWeather, DailyTemperature, DailyWeatherForecast, + MinutelyWeatherForecast, RequestError, WeatherCondition, WeatherReport, @@ -105,7 +106,12 @@ def _create_mocked_owm_factory(is_valid: bool): rain=0, snow=0, ) - weather_report = WeatherReport(current_weather, [], [daily_weather_forecast]) + minutely_weather_forecast = MinutelyWeatherForecast( + date_time=1728672360, precipitation=2.54 + ) + weather_report = WeatherReport( + current_weather, [minutely_weather_forecast], [], [daily_weather_forecast] + ) mocked_owm_client = MagicMock() mocked_owm_client.validate_key = AsyncMock(return_value=is_valid) diff --git a/tests/components/osoenergy/conftest.py b/tests/components/osoenergy/conftest.py new file mode 100644 index 00000000000..bb14fec0241 --- /dev/null +++ b/tests/components/osoenergy/conftest.py @@ -0,0 +1,90 @@ +"""Common fixtures for the OSO Energy tests.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +from apyosoenergyapi.waterheater import OSOEnergyWaterHeaterData +import pytest + +from homeassistant.components.osoenergy.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonObjectType + +from tests.common import MockConfigEntry, load_json_object_fixture + +MOCK_CONFIG = { + CONF_API_KEY: "secret_api_key", +} +TEST_USER_EMAIL = "test_user_email@domain.com" + + +@pytest.fixture +def water_heater_fixture() -> JsonObjectType: + """Load the water heater fixture.""" + return load_json_object_fixture("water_heater.json", DOMAIN) + + +@pytest.fixture +def mock_water_heater(water_heater_fixture) -> MagicMock: + """Water heater mock object.""" + mock_heater = MagicMock(OSOEnergyWaterHeaterData) + for key, value in water_heater_fixture.items(): + setattr(mock_heater, key, value) + return mock_heater + + +@pytest.fixture +def mock_entry_data() -> dict[str, Any]: + """Mock config entry data for fixture.""" + return MOCK_CONFIG + + +@pytest.fixture +def mock_config_entry( + hass: HomeAssistant, mock_entry_data: dict[str, Any] +) -> ConfigEntry: + """Mock a config entry setup for incomfort integration.""" + entry = MockConfigEntry(domain=DOMAIN, data=mock_entry_data) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +async def mock_osoenergy_client(mock_water_heater) -> Generator[AsyncMock]: + """Mock a OSO Energy client.""" + + with ( + patch( + "homeassistant.components.osoenergy.OSOEnergy", MagicMock() + ) as mock_client, + patch( + "homeassistant.components.osoenergy.config_flow.OSOEnergy", new=mock_client + ), + ): + mock_session = MagicMock() + mock_session.device_list = {"water_heater": [mock_water_heater]} + mock_session.start_session = AsyncMock( + return_value={"water_heater": [mock_water_heater]} + ) + mock_session.update_data = AsyncMock(return_value=True) + + mock_client().session = mock_session + + mock_hotwater = MagicMock() + mock_hotwater.get_water_heater = AsyncMock(return_value=mock_water_heater) + mock_hotwater.set_profile = AsyncMock(return_value=True) + mock_hotwater.set_v40_min = AsyncMock(return_value=True) + mock_hotwater.turn_on = AsyncMock(return_value=True) + mock_hotwater.turn_off = AsyncMock(return_value=True) + + mock_client().hotwater = mock_hotwater + + mock_client().get_user_email = AsyncMock(return_value=TEST_USER_EMAIL) + mock_client().start_session = AsyncMock( + return_value={"water_heater": [mock_water_heater]} + ) + + yield mock_client diff --git a/tests/components/osoenergy/fixtures/water_heater.json b/tests/components/osoenergy/fixtures/water_heater.json new file mode 100644 index 00000000000..82bdafb5d8a --- /dev/null +++ b/tests/components/osoenergy/fixtures/water_heater.json @@ -0,0 +1,20 @@ +{ + "device_id": "osoenergy_water_heater", + "device_type": "SAGA S200", + "device_name": "TEST DEVICE", + "current_temperature": 60, + "min_temperature": 10, + "max_temperature": 75, + "target_temperature": 60, + "target_temperature_low": 57, + "target_temperature_high": 63, + "available": true, + "online": true, + "current_operation": "on", + "optimization_mode": "oso", + "heater_mode": "auto", + "profile": [ + 10, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60 + ] +} diff --git a/tests/components/osoenergy/snapshots/test_water_heater.ambr b/tests/components/osoenergy/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000..5ebac405144 --- /dev/null +++ b/tests/components/osoenergy/snapshots/test_water_heater.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_water_heater[water_heater.test_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 75, + 'min_temp': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.test_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'osoenergy', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'osoenergy_water_heater', + 'unit_of_measurement': None, + }) +# --- +# name: test_water_heater[water_heater.test_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 60, + 'friendly_name': 'TEST DEVICE', + 'max_temp': 75, + 'min_temp': 10, + 'supported_features': , + 'target_temp_high': 63, + 'target_temp_low': 57, + 'temperature': 60, + }), + 'context': , + 'entity_id': 'water_heater.test_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'eco', + }) +# --- diff --git a/tests/components/osoenergy/test_config_flow.py b/tests/components/osoenergy/test_config_flow.py index d9db5888cc3..0d77781a538 100644 --- a/tests/components/osoenergy/test_config_flow.py +++ b/tests/components/osoenergy/test_config_flow.py @@ -65,18 +65,11 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", return_value=None, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - "entry_id": mock_config.entry_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "user" + assert result["errors"] is None with patch( "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", diff --git a/tests/components/osoenergy/test_water_heater.py b/tests/components/osoenergy/test_water_heater.py new file mode 100644 index 00000000000..851e710fa1c --- /dev/null +++ b/tests/components/osoenergy/test_water_heater.py @@ -0,0 +1,276 @@ +"""The water heater tests for the OSO Energy platform.""" + +from unittest.mock import ANY, MagicMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.osoenergy.const import DOMAIN +from homeassistant.components.osoenergy.water_heater import ( + ATTR_UNTIL_TEMP_LIMIT, + ATTR_V40MIN, + SERVICE_GET_PROFILE, + SERVICE_SET_PROFILE, + SERVICE_SET_V40MIN, +) +from homeassistant.components.water_heater import ( + DOMAIN as WATER_HEATER_DOMAIN, + SERVICE_SET_TEMPERATURE, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import snapshot_platform + + +@patch("homeassistant.components.osoenergy.PLATFORMS", [Platform.WATER_HEATER]) +async def test_water_heater( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_osoenergy_client: MagicMock, + snapshot: SnapshotAssertion, + mock_config_entry: ConfigEntry, +) -> None: + """Test states of the water heater.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.freeze_time("2024-10-10 00:00:00") +async def test_get_profile( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + profile = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PROFILE, + {ATTR_ENTITY_ID: "water_heater.test_device"}, + blocking=True, + return_response=True, + ) + + # The profile is returned in UTC format from the server + # Each index represents an hour from the current day (0-23). For example index 2 - 02:00 UTC + # Depending on the time zone and the DST the UTC hour is converted to local time and the value is placed in the correct index + # Example: time zone 'US/Pacific' and DST (-7 hours difference) - index 9 (09:00 UTC) will be converted to index 2 (02:00 Local) + assert profile == { + "water_heater.test_device": { + "profile": [ + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 10, + 60, + 60, + 60, + 60, + 60, + 60, + ], + }, + } + + +@pytest.mark.freeze_time("2024-10-10 00:00:00") +async def test_set_profile( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_PROFILE, + {ATTR_ENTITY_ID: "water_heater.test_device", "hour_01": 45}, + blocking=True, + ) + + # The server expects to receive the profile in UTC format + # Each field represents an hour from the current day (0-23). For example field hour_01 - 01:00 Local time + # Depending on the time zone and the DST the Local hour is converted to UTC time and the value is placed in the correct index + # Example: time zone 'US/Pacific' and DST (-7 hours difference) - index 1 (01:00 Local) will be converted to index 8 (08:00 Utc) + mock_osoenergy_client().hotwater.set_profile.assert_called_once_with( + ANY, + [ + 10, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 45, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + ], + ) + + +async def test_set_v40_min( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_V40MIN, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_V40MIN: 300}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.set_v40_min.assert_called_once_with(ANY, 300) + + +async def test_set_temperature( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + WATER_HEATER_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_TEMPERATURE: 45}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.set_profile.assert_called_once_with( + ANY, + [ + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + ], + ) + + +async def test_turn_on( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test turning the heater on.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + WATER_HEATER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "water_heater.test_device"}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_on.assert_called_once_with(ANY, True) + + +async def test_turn_off( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + WATER_HEATER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "water_heater.test_device"}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_off.assert_called_once_with(ANY, True) + + +async def test_oso_turn_on( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test turning the heater on.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_UNTIL_TEMP_LIMIT: False}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_on.assert_called_once_with(ANY, False) + + +async def test_oso_turn_off( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_UNTIL_TEMP_LIMIT: False}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_off.assert_called_once_with(ANY, False) diff --git a/tests/components/otbr/__init__.py b/tests/components/otbr/__init__.py index 2c9daa127c2..7d52318b477 100644 --- a/tests/components/otbr/__init__.py +++ b/tests/components/otbr/__init__.py @@ -31,6 +31,7 @@ DATASET_INSECURE_PASSPHRASE = bytes.fromhex( TEST_BORDER_AGENT_EXTENDED_ADDRESS = bytes.fromhex("AEEB2F594B570BBF") TEST_BORDER_AGENT_ID = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52C") +TEST_BORDER_AGENT_ID_2 = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52D") ROUTER_DISCOVERY_HASS = { "type_": "_meshcop._udp.local.", diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index 3811ff66ebb..5ab3e442183 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -77,16 +77,18 @@ async def otbr_config_entry_multipan_fixture( get_active_dataset_tlvs: AsyncMock, get_border_agent_id: AsyncMock, get_extended_address: AsyncMock, -) -> None: +) -> str: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) + return config_entry.entry_id @pytest.fixture(name="otbr_config_entry_thread") @@ -102,6 +104,7 @@ async def otbr_config_entry_thread_fixture( domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index c4972bb5f83..cd02c14e4eb 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -3,28 +3,29 @@ import asyncio from http import HTTPStatus from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, Mock, patch import aiohttp import pytest import python_otbr_api -from homeassistant.components import hassio, otbr +from homeassistant.components import otbr from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo -from . import DATASET_CH15, DATASET_CH16 +from . import DATASET_CH15, DATASET_CH16, TEST_BORDER_AGENT_ID, TEST_BORDER_AGENT_ID_2 from tests.common import MockConfigEntry, MockModule, mock_integration from tests.test_util.aiohttp import AiohttpClientMocker -HASSIO_DATA = hassio.HassioServiceInfo( +HASSIO_DATA = HassioServiceInfo( config={"host": "core-silabs-multiprotocol", "port": 8081}, name="Silicon Labs Multiprotocol", slug="otbr", uuid="12345", ) -HASSIO_DATA_2 = hassio.HassioServiceInfo( +HASSIO_DATA_2 = HassioServiceInfo( config={"host": "core-silabs-multiprotocol_2", "port": 8082}, name="Silicon Labs Multiprotocol", slug="other_addon", @@ -32,21 +33,16 @@ HASSIO_DATA_2 = hassio.HassioServiceInfo( ) -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.otbr.config_flow.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info +@pytest.fixture(name="otbr_addon_info") +def otbr_addon_info_fixture(addon_info: AsyncMock, addon_installed) -> AsyncMock: + """Mock Supervisor otbr add-on info.""" + addon_info.return_value.available = True + addon_info.return_value.hostname = "" + addon_info.return_value.options = {} + addon_info.return_value.state = "unknown" + addon_info.return_value.update_available = False + addon_info.return_value.version = None + return addon_info @pytest.mark.parametrize( @@ -57,12 +53,91 @@ def addon_info_fixture(): "http://custom_url:1234//", ], ) +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_border_agent_id", +) async def test_user_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, url: str ) -> None: """Test the user flow.""" + await _finish_user_flow(hass, url) + + +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_extended_address", +) +async def test_user_flow_additional_entry( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test more than a single entry is allowed.""" + url1 = "http://custom_url:1234" + url2 = "http://custom_url_2:1234" + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) + + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": url2}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID_2.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Do a user flow + await _finish_user_flow(hass) + + +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_extended_address", +) +async def test_user_flow_additional_entry_fail_get_address( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test more than a single entry is allowed. + + This tets the behavior when we can't read the extended address from the existing + config entry. + """ + url1 = "http://custom_url:1234" + url2 = "http://custom_url_2:1234" + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) + + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": url2}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID_2.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Do a user flow + aioclient_mock.clear_requests() + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", status=HTTPStatus.NOT_FOUND) + await _finish_user_flow(hass) + assert f"Could not read border agent id from {url2}" in caplog.text + + +async def _finish_user_flow( + hass: HomeAssistant, url: str = "http://custom_url:1234" +) -> None: + """Finish a user flow.""" stripped_url = "http://custom_url:1234" - aioclient_mock.get(f"{stripped_url}/node/dataset/active", text="aa") result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} ) @@ -88,13 +163,56 @@ async def test_user_flow( assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + config_entry = result["result"] assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == otbr.DOMAIN + assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) +async def test_user_flow_additional_entry_same_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test more than a single entry is allowed.""" + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": "http://custom_url:1234"}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Start user flow + url = "http://custom_url:1234" + aioclient_mock.get(f"{url}/node/dataset/active", text="aa") + result = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "user"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "url": url, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "already_configured"} + + +@pytest.mark.usefixtures("get_border_agent_id") async def test_user_flow_router_not_setup( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -158,10 +276,11 @@ async def test_user_flow_router_not_setup( assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == otbr.DOMAIN + assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() -async def test_user_flow_404( +@pytest.mark.usefixtures("get_border_agent_id") +async def test_user_flow_get_dataset_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the user flow.""" @@ -192,7 +311,30 @@ async def test_user_flow_404( aiohttp.ClientError, ], ) -async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: +async def test_user_flow_get_ba_id_connect_error( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error +) -> None: + """Test the user flow.""" + await _test_user_flow_connect_error(hass, "get_border_agent_id", error) + + +@pytest.mark.usefixtures("get_border_agent_id") +@pytest.mark.parametrize( + "error", + [ + TimeoutError, + python_otbr_api.OTBRError, + aiohttp.ClientError, + ], +) +async def test_user_flow_get_dataset_connect_error( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error +) -> None: + """Test the user flow.""" + await _test_user_flow_connect_error(hass, "get_active_dataset_tlvs", error) + + +async def _test_user_flow_connect_error(hass: HomeAssistant, func, error) -> None: """Test the user flow.""" result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} @@ -201,7 +343,7 @@ async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): + with patch(f"python_otbr_api.OTBR.{func}", side_effect=error): result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -212,8 +354,9 @@ async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info ) -> None: """Test the hassio discovery flow.""" url = "http://core-silabs-multiprotocol:8081" @@ -244,21 +387,16 @@ async def test_hassio_discovery_flow( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_yellow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info ) -> None: """Test the hassio discovery flow.""" url = "http://core-silabs-multiprotocol:8081" aioclient_mock.get(f"{url}/node/dataset/active", text="aa") - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {"device": "/dev/ttyAMA1"}, - "state": None, - "update_available": False, - "version": None, - } + otbr_addon_info.return_value.available = True + otbr_addon_info.return_value.options = {"device": "/dev/ttyAMA1"} with ( patch( @@ -301,25 +439,20 @@ async def test_hassio_discovery_flow_yellow( ), ], ) +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_sky_connect( device: str, title: str, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - addon_info, + otbr_addon_info, ) -> None: """Test the hassio discovery flow.""" url = "http://core-silabs-multiprotocol:8081" aioclient_mock.get(f"{url}/node/dataset/active", text="aa") - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {"device": device}, - "state": None, - "update_available": False, - "version": None, - } + otbr_addon_info.return_value.available = True + otbr_addon_info.return_value.options = {"device": device} with patch( "homeassistant.components.otbr.async_setup_entry", @@ -346,59 +479,133 @@ async def test_hassio_discovery_flow_sky_connect( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") async def test_hassio_discovery_flow_2x_addons( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info ) -> None: """Test the hassio discovery flow when the user has 2 addons with otbr support.""" url1 = "http://core-silabs-multiprotocol:8081" url2 = "http://core-silabs-multiprotocol_2:8081" aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) - async def _addon_info(hass: HomeAssistant, slug: str) -> dict[str, Any]: + async def _addon_info(slug: str) -> Mock: await asyncio.sleep(0) if slug == "otbr": - return { - "available": True, - "hostname": None, - "options": { - "device": ( - "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" - "9e2adbd75b8beb119fe564a0f320645d-if00-port0" - ) - }, - "state": None, - "update_available": False, - "version": None, - } - return { - "available": True, - "hostname": None, - "options": { - "device": ( - "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" - "9e2adbd75b8beb119fe564a0f320645d-if00-port1" - ) - }, - "state": None, - "update_available": False, - "version": None, - } - - addon_info.side_effect = _addon_info - - with patch( - "homeassistant.components.otbr.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result1 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA - ) - result2 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + device = ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port0" + ) + else: + device = ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port1" + ) + return Mock( + available=True, + hostname=otbr_addon_info.return_value.hostname, + options={"device": device}, + state=otbr_addon_info.return_value.state, + update_available=otbr_addon_info.return_value.update_available, + version=otbr_addon_info.return_value.version, ) - results = [result1, result2] + otbr_addon_info.side_effect = _addon_info + + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + ) + + results = [result1, result2] + + expected_data = { + "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", + } + expected_data_2 = { + "url": f"http://{HASSIO_DATA_2.config['host']}:{HASSIO_DATA_2.config['port']}", + } + + assert results[0]["type"] is FlowResultType.CREATE_ENTRY + assert ( + results[0]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert results[0]["data"] == expected_data + assert results[0]["options"] == {} + + assert results[1]["type"] is FlowResultType.CREATE_ENTRY + assert ( + results[1]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert results[1]["data"] == expected_data_2 + assert results[1]["options"] == {} + + assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 + + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + assert config_entry.data == expected_data + assert config_entry.options == {} + assert ( + config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert config_entry.unique_id == HASSIO_DATA.uuid + + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[1] + assert config_entry.data == expected_data_2 + assert config_entry.options == {} + assert ( + config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert config_entry.unique_id == HASSIO_DATA_2.uuid + + +@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") +async def test_hassio_discovery_flow_2x_addons_same_ext_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info +) -> None: + """Test the hassio discovery flow when the user has 2 addons with otbr support.""" + url1 = "http://core-silabs-multiprotocol:8081" + url2 = "http://core-silabs-multiprotocol_2:8081" + aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") + aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + + async def _addon_info(slug: str) -> Mock: + await asyncio.sleep(0) + if slug == "otbr": + device = ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port0" + ) + else: + device = ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port1" + ) + return Mock( + available=True, + hostname=otbr_addon_info.return_value.hostname, + options={"device": device}, + state=otbr_addon_info.return_value.state, + update_available=otbr_addon_info.return_value.update_available, + version=otbr_addon_info.return_value.version, + ) + + otbr_addon_info.side_effect = _addon_info + + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + ) + + results = [result1, result2] expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -411,9 +618,8 @@ async def test_hassio_discovery_flow_2x_addons( assert results[0]["data"] == expected_data assert results[0]["options"] == {} assert results[1]["type"] is FlowResultType.ABORT - assert results[1]["reason"] == "single_instance_allowed" + assert results[1]["reason"] == "already_configured" assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] assert config_entry.data == expected_data @@ -424,8 +630,9 @@ async def test_hassio_discovery_flow_2x_addons( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info ) -> None: """Test the hassio discovery flow when the border router has no dataset. @@ -481,8 +688,9 @@ async def test_hassio_discovery_flow_router_not_setup( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info ) -> None: """Test the hassio discovery flow when the border router has no dataset. @@ -533,11 +741,12 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, multiprotocol_addon_manager_mock, - addon_info, + otbr_addon_info, ) -> None: """Test the hassio discovery flow when the border router has no dataset. @@ -596,6 +805,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -610,6 +820,7 @@ async def test_hassio_discovery_flow_404( assert result["reason"] == "unknown" +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port_missing_unique_id( hass: HomeAssistant, ) -> None: @@ -633,7 +844,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -642,6 +853,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( assert config_entry.data == expected_data +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: """Test the port can be updated.""" mock_integration(hass, MockModule("hassio")) @@ -664,7 +876,7 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -673,6 +885,12 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: assert config_entry.data == expected_data +@pytest.mark.usefixtures( + "otbr_addon_info", + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) -> None: """Test the port is not updated if we get data for another addon hosting OTBR.""" mock_integration(hass, MockModule("hassio")) @@ -691,22 +909,34 @@ async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + # Another entry will be created + assert result["type"] is FlowResultType.CREATE_ENTRY - # Make sure the data was not updated + # Make sure the data of the existing entry was not updated expected_data = { "url": f"http://openthread_border_router:{HASSIO_DATA.config['port']+1}", } - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) assert config_entry.data == expected_data -@pytest.mark.parametrize(("source", "data"), [("hassio", HASSIO_DATA), ("user", None)]) -async def test_config_flow_single_entry( - hass: HomeAssistant, source: str, data: Any +@pytest.mark.parametrize( + ("source", "data", "expected_result"), + [ + ("hassio", HASSIO_DATA, FlowResultType.CREATE_ENTRY), + ("user", None, FlowResultType.FORM), + ], +) +@pytest.mark.usefixtures( + "otbr_addon_info", + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) +async def test_config_flow_additional_entry( + hass: HomeAssistant, source: str, data: Any, expected_result: FlowResultType ) -> None: - """Test only a single entry is allowed.""" + """Test more than a single entry is allowed.""" mock_integration(hass, MockModule("hassio")) # Setup the config entry @@ -719,13 +949,11 @@ async def test_config_flow_single_entry( config_entry.add_to_hass(hass) with patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", + "homeassistant.components.otbr.async_setup_entry", return_value=True, - ) as mock_setup_entry: + ): result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": source}, data=data ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - mock_setup_entry.assert_not_called() + assert result["type"] is expected_result diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index 86bab71cbda..faf13786107 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -11,6 +11,7 @@ from zeroconf.asyncio import AsyncServiceInfo from homeassistant.components import otbr, thread from homeassistant.components.thread import discovery +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -18,7 +19,6 @@ from homeassistant.setup import async_setup_component from . import ( BASE_URL, CONFIG_ENTRY_DATA_MULTIPAN, - CONFIG_ENTRY_DATA_THREAD, DATASET_CH15, DATASET_CH16, DATASET_INSECURE_NW_KEY, @@ -47,6 +47,7 @@ def enable_mocks_fixture( """Enable API mocks.""" +@pytest.mark.usefixtures("supervisor_client") async def test_import_dataset( hass: HomeAssistant, mock_async_zeroconf: MagicMock, @@ -71,6 +72,7 @@ async def test_import_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) @@ -138,6 +140,7 @@ async def test_import_share_radio_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( @@ -177,6 +180,7 @@ async def test_import_share_radio_no_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( @@ -198,6 +202,7 @@ async def test_import_share_radio_no_channel_collision( ) +@pytest.mark.usefixtures("supervisor_client") @pytest.mark.parametrize("enable_compute_pskc", [True]) @pytest.mark.parametrize( "dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE] @@ -214,6 +219,7 @@ async def test_import_insecure_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( @@ -252,6 +258,7 @@ async def test_config_entry_not_ready( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) get_active_dataset_tlvs.side_effect = error @@ -268,6 +275,7 @@ async def test_border_agent_id_not_supported( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) get_border_agent_id.side_effect = python_otbr_api.GetBorderAgentIdNotSupportedError @@ -281,6 +289,7 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) mock_api = MagicMock() @@ -303,6 +312,7 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: mock_otrb_api.assert_called_once_with(new_config_entry_data["url"], ANY, ANY) +@pytest.mark.usefixtures("supervisor_client") async def test_remove_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan ) -> None: @@ -314,25 +324,33 @@ async def test_remove_entry( await hass.config_entries.async_remove(config_entry.entry_id) -async def test_remove_extra_entries( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +@pytest.mark.parametrize( + ("source", "unique_id", "updated_unique_id"), + [ + (SOURCE_HASSIO, None, None), + (SOURCE_HASSIO, "abcd", "abcd"), + (SOURCE_USER, None, TEST_BORDER_AGENT_ID.hex()), + (SOURCE_USER, "abcd", TEST_BORDER_AGENT_ID.hex()), + ], +) +async def test_update_unique_id( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + source: str, + unique_id: str | None, + updated_unique_id: str | None, ) -> None: - """Test we remove additional config entries.""" + """Test we update the unique id if extended address has changed.""" - config_entry1 = MockConfigEntry( + config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, + source=source, title="Open Thread Border Router", + unique_id=unique_id, ) - config_entry2 = MockConfigEntry( - data=CONFIG_ENTRY_DATA_THREAD, - domain=otbr.DOMAIN, - options={}, - title="Open Thread Border Router", - ) - config_entry1.add_to_hass(hass) - config_entry2.add_to_hass(hass) - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 + config_entry.add_to_hass(hass) assert await async_setup_component(hass, otbr.DOMAIN, {}) - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 + config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) + assert config_entry.unique_id == updated_unique_id diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index e842f40ad4c..c4123c25660 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -1,11 +1,10 @@ """Test OTBR Silicon Labs Multiprotocol support.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest from python_otbr_api import ActiveDataSet, tlv_parser -from homeassistant.components import otbr from homeassistant.components.otbr import ( silabs_multiprotocol as otbr_silabs_multiprotocol, ) @@ -32,6 +31,11 @@ DATASET_CH16_PENDING = ( ) +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + async def test_async_change_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: @@ -127,10 +131,11 @@ async def test_async_change_channel_no_otbr(hass: HomeAssistant) -> None: async def test_async_change_channel_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL with patch("python_otbr_api.OTBR.set_channel") as mock_set_channel: await otbr_silabs_multiprotocol.async_change_channel(hass, 16, delay=0) mock_set_channel.assert_not_awaited() @@ -184,10 +189,11 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: async def test_async_get_channel_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None mock_get_active_dataset.assert_not_awaited() @@ -198,10 +204,11 @@ async def test_async_get_channel_non_matching_url( [(OTBR_MULTIPAN_URL, True), (OTBR_NON_MULTIPAN_URL, False)], ) async def test_async_using_multipan( - hass: HomeAssistant, otbr_config_entry_multipan, url: str, expected: bool + hass: HomeAssistant, otbr_config_entry_multipan: str, url: str, expected: bool ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = url + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = url assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is expected @@ -213,8 +220,9 @@ async def test_async_using_multipan_no_otbr(hass: HomeAssistant) -> None: async def test_async_using_multipan_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index ec325b8819e..c11d8fe5736 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -1,6 +1,6 @@ """Test OTBR Utility functions.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest import python_otbr_api @@ -13,6 +13,11 @@ OTBR_MULTIPAN_URL = "http://core-silabs-multiprotocol:8081" OTBR_NON_MULTIPAN_URL = "/dev/ttyAMA1" +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + async def test_get_allowed_channel( hass: HomeAssistant, multiprotocol_addon_manager_mock ) -> None: @@ -31,24 +36,37 @@ async def test_get_allowed_channel( assert await otbr.util.get_allowed_channel(hass, OTBR_NON_MULTIPAN_URL) is None -async def test_factory_reset(hass: HomeAssistant, otbr_config_entry_multipan) -> None: +async def test_factory_reset( + hass: HomeAssistant, + otbr_config_entry_multipan: str, + get_border_agent_id: AsyncMock, +) -> None: """Test factory_reset.""" + new_ba_id = b"new_ba_id" + get_border_agent_id.return_value = new_ba_id + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + assert config_entry.unique_id != new_ba_id.hex() with ( patch("python_otbr_api.OTBR.factory_reset") as factory_reset_mock, patch( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() + # Check the unique_id is updated + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + assert config_entry.unique_id == new_ba_id.hex() + async def test_factory_reset_not_supported( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -58,16 +76,17 @@ async def test_factory_reset_not_supported( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_1( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -80,16 +99,17 @@ async def test_factory_reset_error_1( HomeAssistantError, ), ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_2( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -103,7 +123,7 @@ async def test_factory_reset_error_2( HomeAssistantError, ), ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() diff --git a/tests/components/otbr/test_websocket_api.py b/tests/components/otbr/test_websocket_api.py index 5361b56c688..7311b194df4 100644 --- a/tests/components/otbr/test_websocket_api.py +++ b/tests/components/otbr/test_websocket_api.py @@ -1,6 +1,6 @@ """Test OTBR Websocket API.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest import python_otbr_api @@ -29,6 +29,11 @@ async def websocket_client( return await hass_ws_client(hass) +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + async def test_get_info( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, diff --git a/tests/components/overkiz/test_config_flow.py b/tests/components/overkiz/test_config_flow.py index 50870ae85fe..cef5ef350a9 100644 --- a/tests/components/overkiz/test_config_flow.py +++ b/tests/components/overkiz/test_config_flow.py @@ -573,15 +573,7 @@ async def test_cloud_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -623,15 +615,7 @@ async def test_cloud_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -672,15 +656,7 @@ async def test_local_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" @@ -731,15 +707,7 @@ async def test_local_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index 00899e745b9..cfe679a254a 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -117,23 +117,23 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"][CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] assert result2["data"][CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] + assert result2["data"][CONF_ACCOUNT] == FIXTURE_USER_INPUT[CONF_ACCOUNT] async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" - result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, @@ -141,25 +141,26 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "authorization_error"} async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", side_effect=aiohttp.ClientError, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" - result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, @@ -167,29 +168,32 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "connection_error"} async def test_reauth_flow(hass: HomeAssistant) -> None: """Test reauth works.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + FIXTURE_REAUTH_INPUT, ) - mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" assert result["errors"] == {"base": "authorization_error"} with ( diff --git a/tests/components/owntracks/test_config_flow.py b/tests/components/owntracks/test_config_flow.py index b1172eb4a31..a80685e9b1e 100644 --- a/tests/components/owntracks/test_config_flow.py +++ b/tests/components/owntracks/test_config_flow.py @@ -8,9 +8,9 @@ from homeassistant import config_entries from homeassistant.components.owntracks import config_flow from homeassistant.components.owntracks.config_flow import CONF_CLOUDHOOK, CONF_SECRET from homeassistant.components.owntracks.const import DOMAIN -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -94,13 +94,14 @@ async def test_import_setup(hass: HomeAssistant) -> None: async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test that we can't add more than one instance.""" - flow = await init_config_flow(hass) - MockConfigEntry(domain=DOMAIN, data={}).add_to_hass(hass) assert hass.config_entries.async_entries(DOMAIN) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + # Should fail, already setup (flow) - result = await flow.async_step_user({}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index 2f35139c021..93f40d0ae3d 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -1540,7 +1540,7 @@ async def test_encrypted_payload_wrong_topic_key( async def test_encrypted_payload_no_topic_key(hass: HomeAssistant, setup_comp) -> None: """Test encrypted payload with no topic key.""" await setup_owntracks( - hass, {CONF_SECRET: {"owntracks/{}/{}".format(USER, "otherdevice"): "foobar"}} + hass, {CONF_SECRET: {f"owntracks/{USER}/otherdevice": "foobar"}} ) await send_message(hass, LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) assert hass.states.get(DEVICE_TRACKER_STATE) is None diff --git a/tests/components/p1_monitor/conftest.py b/tests/components/p1_monitor/conftest.py index 1d5f349f858..fbd39914536 100644 --- a/tests/components/p1_monitor/conftest.py +++ b/tests/components/p1_monitor/conftest.py @@ -7,7 +7,7 @@ from p1monitor import Phases, Settings, SmartMeter, WaterMeter import pytest from homeassistant.components.p1_monitor.const import DOMAIN -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -19,8 +19,9 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="monitor", domain=DOMAIN, - data={CONF_HOST: "example"}, + data={CONF_HOST: "example", CONF_PORT: 80}, unique_id="unique_thingy", + version=2, ) diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr new file mode 100644 index 00000000000..83684e153c9 --- /dev/null +++ b/tests/components/p1_monitor/snapshots/test_init.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_migration + ConfigEntrySnapshot({ + 'data': dict({ + 'host': 'example', + 'port': 80, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'p1_monitor', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': 'unique_thingy', + 'version': 2, + }) +# --- +# name: test_port_migration + ConfigEntrySnapshot({ + 'data': dict({ + 'host': 'example', + 'port': 80, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'p1_monitor', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': 'unique_thingy', + 'version': 2, + }) +# --- diff --git a/tests/components/p1_monitor/test_config_flow.py b/tests/components/p1_monitor/test_config_flow.py index 12a6a6f5d11..cbd89320074 100644 --- a/tests/components/p1_monitor/test_config_flow.py +++ b/tests/components/p1_monitor/test_config_flow.py @@ -6,7 +6,7 @@ from p1monitor import P1MonitorError from homeassistant.components.p1_monitor.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -30,12 +30,13 @@ async def test_full_user_flow(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_HOST: "example.com"}, + user_input={CONF_HOST: "example.com", CONF_PORT: 80}, ) assert result2.get("type") is FlowResultType.CREATE_ENTRY assert result2.get("title") == "P1 Monitor" - assert result2.get("data") == {CONF_HOST: "example.com"} + assert result2.get("data") == {CONF_HOST: "example.com", CONF_PORT: 80} + assert isinstance(result2["data"][CONF_PORT], int) assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_p1monitor.mock_calls) == 1 @@ -50,7 +51,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "example.com"}, + data={CONF_HOST: "example.com", CONF_PORT: 80}, ) assert result.get("type") is FlowResultType.FORM diff --git a/tests/components/p1_monitor/test_diagnostics.py b/tests/components/p1_monitor/test_diagnostics.py index 55d4ccc5e67..396a3d3bd0d 100644 --- a/tests/components/p1_monitor/test_diagnostics.py +++ b/tests/components/p1_monitor/test_diagnostics.py @@ -21,6 +21,7 @@ async def test_diagnostics( "title": "monitor", "data": { "host": REDACTED, + "port": REDACTED, }, }, "data": { diff --git a/tests/components/p1_monitor/test_init.py b/tests/components/p1_monitor/test_init.py index 02888b5ae97..3b7426051d4 100644 --- a/tests/components/p1_monitor/test_init.py +++ b/tests/components/p1_monitor/test_init.py @@ -3,9 +3,11 @@ from unittest.mock import AsyncMock, MagicMock, patch from p1monitor import P1MonitorConnectionError +from syrupy import SnapshotAssertion from homeassistant.components.p1_monitor.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -24,7 +26,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED @@ -44,3 +45,35 @@ async def test_config_entry_not_ready( assert mock_request.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migration(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test config entry version 1 -> 2 migration.""" + mock_config_entry = MockConfigEntry( + unique_id="unique_thingy", + domain=DOMAIN, + data={CONF_HOST: "example"}, + version=1, + ) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) == snapshot + + +async def test_port_migration(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test migration of host:port to separate host and port.""" + mock_config_entry = MockConfigEntry( + unique_id="unique_thingy", + domain=DOMAIN, + data={CONF_HOST: "example:80"}, + version=1, + ) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) == snapshot diff --git a/tests/components/palazzetti/__init__.py b/tests/components/palazzetti/__init__.py new file mode 100644 index 00000000000..0aafdf553ad --- /dev/null +++ b/tests/components/palazzetti/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Palazzetti integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py new file mode 100644 index 00000000000..fad535df914 --- /dev/null +++ b/tests/components/palazzetti/conftest.py @@ -0,0 +1,137 @@ +"""Fixtures for Palazzetti integration tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from pypalazzetti.temperature import TemperatureDefinition, TemperatureDescriptionKey +import pytest + +from homeassistant.components.palazzetti.const import DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.palazzetti.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="palazzetti", + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + unique_id="11:22:33:44:55:66", + ) + + +@pytest.fixture +def mock_palazzetti_client() -> Generator[AsyncMock]: + """Return a mocked PalazzettiClient.""" + with ( + patch( + "homeassistant.components.palazzetti.coordinator.PalazzettiClient", + autospec=True, + ) as client, + patch( + "homeassistant.components.palazzetti.config_flow.PalazzettiClient", + new=client, + ), + ): + mock_client = client.return_value + mock_client.mac = "11:22:33:44:55:66" + mock_client.name = "Stove" + mock_client.sw_version = "0.0.0" + mock_client.hw_version = "1.1.1" + mock_client.to_dict.return_value = { + "host": "XXXXXXXXXX", + "connected": True, + "properties": {}, + "attributes": {}, + } + mock_client.fan_speed_min = 1 + mock_client.fan_speed_max = 5 + mock_client.has_fan_silent = True + mock_client.has_fan_high = True + mock_client.has_fan_auto = True + mock_client.has_on_off_switch = True + mock_client.has_pellet_level = False + mock_client.connected = True + mock_client.status = 6 + mock_client.is_heating = True + mock_client.room_temperature = 18 + mock_client.T1 = 21.5 + mock_client.T2 = 25.1 + mock_client.T3 = 45 + mock_client.T4 = 0 + mock_client.T5 = 0 + mock_client.target_temperature = 21 + mock_client.target_temperature_min = 5 + mock_client.target_temperature_max = 50 + mock_client.pellet_quantity = 1248 + mock_client.pellet_level = 0 + mock_client.fan_speed = 3 + mock_client.connect.return_value = True + mock_client.update_state.return_value = True + mock_client.set_on.return_value = True + mock_client.set_target_temperature.return_value = True + mock_client.set_fan_speed.return_value = True + mock_client.set_fan_silent.return_value = True + mock_client.set_fan_high.return_value = True + mock_client.set_fan_auto.return_value = True + mock_client.set_power_mode.return_value = True + mock_client.power_mode = 3 + mock_client.list_temperatures.return_value = [ + TemperatureDefinition( + description_key=TemperatureDescriptionKey.ROOM_TEMP, + state_property="T1", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.RETURN_WATER_TEMP, + state_property="T4", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.TANK_WATER_TEMP, + state_property="T5", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.WOOD_COMBUSTION_TEMP, + state_property="T3", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.AIR_OUTLET_TEMP, + state_property="T2", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.T1_HYDRO_TEMP, + state_property="T1", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.T2_HYDRO_TEMP, + state_property="T2", + ), + ] + yield mock_client + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_palazzetti_client: MagicMock, +) -> MockConfigEntry: + """Set up the Palazzetti integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/palazzetti/snapshots/test_climate.ambr b/tests/components/palazzetti/snapshots/test_climate.ambr new file mode 100644 index 00000000000..e7cea3749a1 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_climate.ambr @@ -0,0 +1,87 @@ +# serializer version: 1 +# name: test_all_entities[climate.stove-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'silent', + '1', + '2', + '3', + '4', + '5', + 'high', + 'auto', + ]), + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 50, + 'min_temp': 5, + 'target_temp_step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.stove', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'palazzetti', + 'unique_id': '11:22:33:44:55:66', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.stove-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 18, + 'fan_mode': '3', + 'fan_modes': list([ + 'silent', + '1', + '2', + '3', + '4', + '5', + 'high', + 'auto', + ]), + 'friendly_name': 'Stove', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 50, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': 21, + }), + 'context': , + 'entity_id': 'climate.stove', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/palazzetti/snapshots/test_diagnostics.ambr b/tests/components/palazzetti/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e3f2d7430e5 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_diagnostics.ambr @@ -0,0 +1,13 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'api_data': dict({ + 'attributes': dict({ + }), + 'connected': True, + 'host': 'XXXXXXXXXX', + 'properties': dict({ + }), + }), + }) +# --- diff --git a/tests/components/palazzetti/snapshots/test_init.ambr b/tests/components/palazzetti/snapshots/test_init.ambr new file mode 100644 index 00000000000..abdee6b7f6f --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '11:22:33:44:55:66', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.1.1', + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Palazzetti', + 'model': None, + 'model_id': None, + 'name': 'Stove', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '0.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/palazzetti/snapshots/test_number.ambr b/tests/components/palazzetti/snapshots/test_number.ambr new file mode 100644 index 00000000000..0a25a1cfa8b --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_all_entities[number.stove_combustion_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.stove_combustion_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Combustion power', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'combustion_power', + 'unique_id': '11:22:33:44:55:66-combustion_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[number.stove_combustion_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Stove Combustion power', + 'max': 5, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.stove_combustion_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- diff --git a/tests/components/palazzetti/snapshots/test_sensor.ambr b/tests/components/palazzetti/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..aa98f3a4f59 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_sensor.ambr @@ -0,0 +1,555 @@ +# serializer version: 1 +# name: test_all_entities[sensor.stove_air_outlet_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_air_outlet_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air outlet temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_outlet_temperature', + 'unique_id': '11:22:33:44:55:66-air_outlet_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_air_outlet_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Air outlet temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_air_outlet_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.1', + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_hydro_temperature_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hydro temperature 1', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 't1_hydro', + 'unique_id': '11:22:33:44:55:66-t1_hydro', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Hydro temperature 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_hydro_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_hydro_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hydro temperature 2', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 't2_hydro', + 'unique_id': '11:22:33:44:55:66-t2_hydro', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Hydro temperature 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_hydro_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.1', + }) +# --- +# name: test_all_entities[sensor.stove_pellet_quantity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_pellet_quantity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pellet quantity', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pellet_quantity', + 'unique_id': '11:22:33:44:55:66-pellet_quantity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_pellet_quantity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'weight', + 'friendly_name': 'Stove Pellet quantity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_pellet_quantity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1248', + }) +# --- +# name: test_all_entities[sensor.stove_return_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_return_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return water temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'return_water_temperature', + 'unique_id': '11:22:33:44:55:66-return_water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_return_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Return water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_return_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.stove_room_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_room_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'room_temperature', + 'unique_id': '11:22:33:44:55:66-room_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_room_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Room temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_room_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_all_entities[sensor.stove_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'off_timer', + 'test_fire', + 'heatup', + 'fueling', + 'ign_test', + 'burning', + 'burning_mod', + 'unknown', + 'cool_fluid', + 'fire_stop', + 'clean_fire', + 'cooling', + 'cleanup', + 'ecomode', + 'chimney_alarm', + 'grate_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'off', + 'fueling', + 'ign_test', + 'burning', + 'firewood_finished', + 'cooling', + 'clean_fire', + 'general_error', + 'general_error', + 'door_open', + 'temp_too_high', + 'cleaning_warning', + 'fuel_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'general_error', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '11:22:33:44:55:66-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.stove_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Stove Status', + 'options': list([ + 'off', + 'off_timer', + 'test_fire', + 'heatup', + 'fueling', + 'ign_test', + 'burning', + 'burning_mod', + 'unknown', + 'cool_fluid', + 'fire_stop', + 'clean_fire', + 'cooling', + 'cleanup', + 'ecomode', + 'chimney_alarm', + 'grate_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'off', + 'fueling', + 'ign_test', + 'burning', + 'firewood_finished', + 'cooling', + 'clean_fire', + 'general_error', + 'general_error', + 'door_open', + 'temp_too_high', + 'cleaning_warning', + 'fuel_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'general_error', + ]), + }), + 'context': , + 'entity_id': 'sensor.stove_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'burning', + }) +# --- +# name: test_all_entities[sensor.stove_tank_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_tank_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tank water temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tank_water_temperature', + 'unique_id': '11:22:33:44:55:66-tank_water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_tank_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Tank water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_tank_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.stove_wood_combustion_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_wood_combustion_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wood combustion temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wood_combustion_temperature', + 'unique_id': '11:22:33:44:55:66-wood_combustion_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_wood_combustion_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Wood combustion temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_wood_combustion_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- diff --git a/tests/components/palazzetti/test_climate.py b/tests/components/palazzetti/test_climate.py new file mode 100644 index 00000000000..78af8f00bdb --- /dev/null +++ b/tests/components/palazzetti/test_climate.py @@ -0,0 +1,174 @@ +"""Tests for the Palazzetti climate platform.""" + +from unittest.mock import AsyncMock, patch + +from pypalazzetti.exceptions import CommunicationError, ValidationError +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_FAN_MODE, + ATTR_HVAC_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.components.palazzetti.const import FAN_AUTO, FAN_HIGH, FAN_SILENT +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_ID = "climate.stove" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_async_set_data( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting climate data via service call.""" + await setup_integration(hass, mock_config_entry) + + # Set HVAC Mode: Success + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + mock_palazzetti_client.set_on.assert_called_once_with(True) + mock_palazzetti_client.set_on.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + mock_palazzetti_client.set_on.assert_called_once_with(False) + mock_palazzetti_client.set_on.reset_mock() + + # Set HVAC Mode: Error + mock_palazzetti_client.set_on.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + mock_palazzetti_client.set_on.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + # Set Temperature: Success + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, + blocking=True, + ) + mock_palazzetti_client.set_target_temperature.assert_called_once_with(22) + mock_palazzetti_client.set_target_temperature.reset_mock() + + # Set Temperature: Error + mock_palazzetti_client.set_target_temperature.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, + blocking=True, + ) + + mock_palazzetti_client.set_target_temperature.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, + blocking=True, + ) + + # Set Fan Mode: Success + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_SILENT}, + blocking=True, + ) + mock_palazzetti_client.set_fan_silent.assert_called_once() + mock_palazzetti_client.set_fan_silent.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_HIGH}, + blocking=True, + ) + mock_palazzetti_client.set_fan_high.assert_called_once() + mock_palazzetti_client.set_fan_high.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_AUTO}, + blocking=True, + ) + mock_palazzetti_client.set_fan_auto.assert_called_once() + mock_palazzetti_client.set_fan_auto.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "3"}, + blocking=True, + ) + mock_palazzetti_client.set_fan_speed.assert_called_once_with(3) + mock_palazzetti_client.set_fan_speed.reset_mock() + + # Set Fan Mode: Error + mock_palazzetti_client.set_fan_speed.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: 3}, + blocking=True, + ) + + mock_palazzetti_client.set_fan_speed.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: 3}, + blocking=True, + ) diff --git a/tests/components/palazzetti/test_config_flow.py b/tests/components/palazzetti/test_config_flow.py new file mode 100644 index 00000000000..03c56c33d0c --- /dev/null +++ b/tests/components/palazzetti/test_config_flow.py @@ -0,0 +1,140 @@ +"""Test the Palazzetti config flow.""" + +from unittest.mock import AsyncMock + +from pypalazzetti.exceptions import CommunicationError + +from homeassistant.components import dhcp +from homeassistant.components.palazzetti.const import DOMAIN +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_user_flow( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stove" + assert result["data"] == {CONF_HOST: "192.168.1.1"} + assert result["result"].unique_id == "11:22:33:44:55:66" + assert len(mock_palazzetti_client.connect.mock_calls) > 0 + + +async def test_invalid_host( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test cannot connect error.""" + + mock_palazzetti_client.connect.side_effect = CommunicationError() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_palazzetti_client.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_dhcp_flow( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the DHCP flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=dhcp.DhcpServiceInfo( + hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" + ), + context={"source": SOURCE_DHCP}, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stove" + assert result["result"].unique_id == "11:22:33:44:55:66" + + +async def test_dhcp_flow_error( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the DHCP flow.""" + mock_palazzetti_client.connect.side_effect = CommunicationError() + + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=dhcp.DhcpServiceInfo( + hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" + ), + context={"source": SOURCE_DHCP}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/palazzetti/test_diagnostics.py b/tests/components/palazzetti/test_diagnostics.py new file mode 100644 index 00000000000..80d021be511 --- /dev/null +++ b/tests/components/palazzetti/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Test Palazzetti diagnostics.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + init_integration: MockConfigEntry, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/palazzetti/test_init.py b/tests/components/palazzetti/test_init.py new file mode 100644 index 00000000000..710144b2b7b --- /dev/null +++ b/tests/components/palazzetti/test_init.py @@ -0,0 +1,46 @@ +"""Tests for the Palazzetti integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_palazzetti_client: AsyncMock, +) -> None: + """Test the Palazzetti configuration entry loading/unloading.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_palazzetti_client: AsyncMock, + snapshot: SnapshotAssertion, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the device information.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, "11:22:33:44:55:66")} + ) + assert device is not None + assert device == snapshot diff --git a/tests/components/palazzetti/test_number.py b/tests/components/palazzetti/test_number.py new file mode 100644 index 00000000000..939c7c72c19 --- /dev/null +++ b/tests/components/palazzetti/test_number.py @@ -0,0 +1,72 @@ +"""Tests for the Palazzetti sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from pypalazzetti.exceptions import CommunicationError, ValidationError +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_ID = "number.stove_combustion_power" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_async_set_data( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting number data via service call.""" + await setup_integration(hass, mock_config_entry) + + # Set value: Success + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + blocking=True, + ) + mock_palazzetti_client.set_power_mode.assert_called_once_with(1) + mock_palazzetti_client.set_on.reset_mock() + + # Set value: Error + mock_palazzetti_client.set_power_mode.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + blocking=True, + ) + mock_palazzetti_client.set_on.reset_mock() + + mock_palazzetti_client.set_power_mode.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + blocking=True, + ) diff --git a/tests/components/palazzetti/test_sensor.py b/tests/components/palazzetti/test_sensor.py new file mode 100644 index 00000000000..c7d7317bb0b --- /dev/null +++ b/tests/components/palazzetti/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Palazzetti sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/panel_iframe/__init__.py b/tests/components/panel_iframe/__init__.py deleted file mode 100644 index df7115d9e97..00000000000 --- a/tests/components/panel_iframe/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the panel_iframe component.""" diff --git a/tests/components/panel_iframe/test_init.py b/tests/components/panel_iframe/test_init.py deleted file mode 100644 index 74e1b642df5..00000000000 --- a/tests/components/panel_iframe/test_init.py +++ /dev/null @@ -1,154 +0,0 @@ -"""The tests for the panel_iframe component.""" - -from typing import Any - -import pytest - -from homeassistant.components.panel_iframe import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.typing import WebSocketGenerator - -TEST_CONFIG = { - "router": { - "icon": "mdi:network-wireless", - "title": "Router", - "url": "http://192.168.1.1", - "require_admin": True, - }, - "weather": { - "icon": "mdi:weather", - "title": "Weather", - "url": "https://www.wunderground.com/us/ca/san-diego", - "require_admin": True, - }, - "api": {"icon": "mdi:weather", "title": "Api", "url": "/api"}, - "ftp": { - "icon": "mdi:weather", - "title": "FTP", - "url": "ftp://some/ftp", - }, -} - - -@pytest.mark.parametrize( - "config_to_try", - [ - {"invalid space": {"url": "https://home-assistant.io"}}, - {"router": {"url": "not-a-url"}}, - ], -) -async def test_wrong_config(hass: HomeAssistant, config_to_try) -> None: - """Test setup with wrong configuration.""" - assert not await async_setup_component( - hass, "panel_iframe", {"panel_iframe": config_to_try} - ) - - -async def test_import_config( - hass: HomeAssistant, - hass_storage: dict[str, Any], - hass_ws_client: WebSocketGenerator, -) -> None: - """Test import config.""" - client = await hass_ws_client(hass) - - assert await async_setup_component( - hass, - "panel_iframe", - {"panel_iframe": TEST_CONFIG}, - ) - - # List dashboards - await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == [ - { - "icon": "mdi:network-wireless", - "id": "router", - "mode": "storage", - "require_admin": True, - "show_in_sidebar": True, - "title": "Router", - "url_path": "router", - }, - { - "icon": "mdi:weather", - "id": "weather", - "mode": "storage", - "require_admin": True, - "show_in_sidebar": True, - "title": "Weather", - "url_path": "weather", - }, - { - "icon": "mdi:weather", - "id": "api", - "mode": "storage", - "require_admin": False, - "show_in_sidebar": True, - "title": "Api", - "url_path": "api", - }, - { - "icon": "mdi:weather", - "id": "ftp", - "mode": "storage", - "require_admin": False, - "show_in_sidebar": True, - "title": "FTP", - "url_path": "ftp", - }, - ] - - for url_path in ("api", "ftp", "router", "weather"): - await client.send_json_auto_id( - {"type": "lovelace/config", "url_path": url_path} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == { - "strategy": {"type": "iframe", "url": TEST_CONFIG[url_path]["url"]} - } - - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_import_config_once( - hass: HomeAssistant, - hass_storage: dict[str, Any], - hass_ws_client: WebSocketGenerator, -) -> None: - """Test import config only happens once.""" - client = await hass_ws_client(hass) - - hass_storage[DOMAIN] = { - "version": 1, - "minor_version": 1, - "key": "map", - "data": {"migrated": True}, - } - - assert await async_setup_component( - hass, - "panel_iframe", - {"panel_iframe": TEST_CONFIG}, - ) - - # List dashboards - await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == [] - - -async def test_create_issue_when_manually_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test creating issue registry issues.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) - - assert issue_registry.async_get_issue(DOMAIN, "deprecated_yaml") diff --git a/tests/components/peco/test_sensor.py b/tests/components/peco/test_sensor.py index 9cbef9fa1e6..4c9a3fca104 100644 --- a/tests/components/peco/test_sensor.py +++ b/tests/components/peco/test_sensor.py @@ -39,7 +39,7 @@ async def test_sensor_available( "peco.PecoOutageApi.get_outage_totals", return_value=OutageResults( customers_out=123, - percent_customers_out=15.589, + percent_customers_out=15, outage_count=456, customers_served=789, ), @@ -74,7 +74,7 @@ async def test_sensor_available( "peco.PecoOutageApi.get_outage_count", return_value=OutageResults( customers_out=123, - percent_customers_out=15.589, + percent_customers_out=15, outage_count=456, customers_served=789, ), diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..d0fdc81acb4 --- /dev/null +++ b/tests/components/pegel_online/snapshots/test_diagnostics.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'data': dict({ + 'air_temperature': None, + 'clearance_height': None, + 'oxygen_level': None, + 'ph_value': None, + 'water_flow': dict({ + 'uom': 'm³/s', + 'value': 88.4, + }), + 'water_level': dict({ + 'uom': 'cm', + 'value': 62, + }), + 'water_speed': None, + 'water_temperature': None, + }), + 'entry': dict({ + 'data': dict({ + 'station': '70272185-xxxx-xxxx-xxxx-43bea330dcae', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'pegel_online', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', + 'version': 1, + }), + }) +# --- diff --git a/tests/components/pegel_online/test_diagnostics.py b/tests/components/pegel_online/test_diagnostics.py new file mode 100644 index 00000000000..220f244b751 --- /dev/null +++ b/tests/components/pegel_online/test_diagnostics.py @@ -0,0 +1,44 @@ +"""Test pegel_online diagnostics.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.pegel_online.const import CONF_STATION, DOMAIN +from homeassistant.core import HomeAssistant + +from . import PegelOnlineMock +from .const import ( + MOCK_CONFIG_ENTRY_DATA_DRESDEN, + MOCK_STATION_DETAILS_DRESDEN, + MOCK_STATION_MEASUREMENT_DRESDEN, +) + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG_ENTRY_DATA_DRESDEN, + unique_id=MOCK_CONFIG_ENTRY_DATA_DRESDEN[CONF_STATION], + ) + entry.add_to_hass(hass) + with patch("homeassistant.components.pegel_online.PegelOnline") as pegelonline: + pegelonline.return_value = PegelOnlineMock( + station_details=MOCK_STATION_DETAILS_DRESDEN, + station_measurements=MOCK_STATION_MEASUREMENT_DRESDEN, + ) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/pegel_online/test_init.py b/tests/components/pegel_online/test_init.py index ee2e78af7cf..c1b8f1861c4 100644 --- a/tests/components/pegel_online/test_init.py +++ b/tests/components/pegel_online/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import patch from aiohttp.client_exceptions import ClientError +import pytest from homeassistant.components.pegel_online.const import ( CONF_STATION, @@ -23,7 +24,9 @@ from .const import ( from tests.common import MockConfigEntry, async_fire_time_changed -async def test_update_error(hass: HomeAssistant) -> None: +async def test_update_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Tests error during update entity.""" entry = MockConfigEntry( domain=DOMAIN, @@ -43,9 +46,11 @@ async def test_update_error(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dresden_elbe_water_level") assert state - pegelonline().override_side_effect(ClientError) + pegelonline().override_side_effect(ClientError("Boom")) async_fire_time_changed(hass, utcnow() + MIN_TIME_BETWEEN_UPDATES) await hass.async_block_till_done() + assert "Failed to communicate with API: Boom" in caplog.text + state = hass.states.get("sensor.dresden_elbe_water_level") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/permobil/test_config_flow.py b/tests/components/permobil/test_config_flow.py index ea39e678459..7067566a74d 100644 --- a/tests/components/permobil/test_config_flow.py +++ b/tests/components/permobil/test_config_flow.py @@ -284,23 +284,21 @@ async def test_config_flow_reauth_success( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": "reauth", "entry_id": mock_entry.entry_id}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "email_code" assert result["errors"] == {} - # request request new token + # request new token result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_CODE: reauth_code}, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_entry.data == { CONF_EMAIL: MOCK_EMAIL, CONF_REGION: MOCK_URL, CONF_CODE: reauth_code, @@ -326,10 +324,7 @@ async def test_config_flow_reauth_fail_invalid_code( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": "reauth", "entry_id": mock_entry.entry_id}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "email_code" @@ -357,16 +352,11 @@ async def test_config_flow_reauth_fail_code_request( ) mock_entry.add_to_hass(hass) # test the reauth and have request_application_code fail leading to an abort - my_permobil.request_application_code.side_effect = MyPermobilAPIException - reauth_entry = hass.config_entries.async_entries(config_flow.DOMAIN)[0] with patch( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": "reauth", "entry_id": reauth_entry.entry_id}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" diff --git a/tests/components/philips_js/snapshots/test_diagnostics.ambr b/tests/components/philips_js/snapshots/test_diagnostics.ambr index 5cff47c7d62..53db95f0534 100644 --- a/tests/components/philips_js/snapshots/test_diagnostics.ambr +++ b/tests/components/philips_js/snapshots/test_diagnostics.ambr @@ -85,6 +85,8 @@ }), }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'philips_js', 'minor_version': 1, 'options': dict({ @@ -92,6 +94,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index d7f539db9cf..4b8048a8ebe 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -60,7 +60,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry) -> None: async def test_reauth( - hass: HomeAssistant, mock_setup_entry, mock_config_entry, mock_tv + hass: HomeAssistant, mock_setup_entry, mock_config_entry: MockConfigEntry, mock_tv ) -> None: """Test we get the form.""" @@ -69,15 +69,7 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) assert len(mock_setup_entry.mock_calls) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -163,6 +155,7 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) "version": 1, "options": {}, "minor_version": 1, + "subentries": (), } await hass.async_block_till_done() diff --git a/tests/components/pi_hole/snapshots/test_diagnostics.ambr b/tests/components/pi_hole/snapshots/test_diagnostics.ambr index 865494b5e9f..2d6f6687d04 100644 --- a/tests/components/pi_hole/snapshots/test_diagnostics.ambr +++ b/tests/components/pi_hole/snapshots/test_diagnostics.ambr @@ -23,6 +23,8 @@ 'verify_ssl': True, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'pi_hole', 'entry_id': 'pi_hole_mock_entry', 'minor_version': 1, @@ -31,6 +33,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/picnic/test_config_flow.py b/tests/components/picnic/test_config_flow.py index 9ba18dac9a9..8d668b28c16 100644 --- a/tests/components/picnic/test_config_flow.py +++ b/tests/components/picnic/test_config_flow.py @@ -170,16 +170,15 @@ async def test_step_reauth(hass: HomeAssistant, picnic_api) -> None: # Create a mocked config entry conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id=picnic_api().get_user()["user_id"], data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -210,16 +209,15 @@ async def test_step_reauth_failed(hass: HomeAssistant) -> None: user_id = "f29-2a6-o32n" conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id=user_id, data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -249,16 +247,15 @@ async def test_step_reauth_different_account(hass: HomeAssistant, picnic_api) -> # Create a mocked config entry, unique_id should be different that the user id in the api response conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id="3fpawh-ues-af3ho", data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py index 2db5bc90159..3a6e09f7ac0 100644 --- a/tests/components/picnic/test_todo.py +++ b/tests/components/picnic/test_todo.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, Mock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import ATTR_ITEM, DOMAIN, TodoServices +from homeassistant.components.todo import ATTR_ITEM, DOMAIN as TODO_DOMAIN, TodoServices from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -91,7 +91,7 @@ async def test_create_todo_list_item( mock_picnic_api.add_product = Mock() await hass.services.async_call( - DOMAIN, + TODO_DOMAIN, TodoServices.ADD_ITEM, {ATTR_ITEM: "Melk"}, target={ATTR_ENTITY_ID: ENTITY_ID}, @@ -119,7 +119,7 @@ async def test_create_todo_list_item_not_found( with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + TODO_DOMAIN, TodoServices.ADD_ITEM, {ATTR_ITEM: "Melk"}, target={ATTR_ENTITY_ID: ENTITY_ID}, diff --git a/tests/components/ping/snapshots/test_binary_sensor.ambr b/tests/components/ping/snapshots/test_binary_sensor.ambr index 24717938874..0196c2cbbfb 100644 --- a/tests/components/ping/snapshots/test_binary_sensor.ambr +++ b/tests/components/ping/snapshots/test_binary_sensor.ambr @@ -36,10 +36,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'connectivity', 'friendly_name': '10.10.10.10', - 'round_trip_time_avg': 4.8, - 'round_trip_time_max': 10, - 'round_trip_time_mdev': None, - 'round_trip_time_min': 1, }), 'context': , 'entity_id': 'binary_sensor.10_10_10_10', @@ -54,10 +50,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'connectivity', 'friendly_name': '10.10.10.10', - 'round_trip_time_avg': None, - 'round_trip_time_max': None, - 'round_trip_time_mdev': None, - 'round_trip_time_min': None, }), 'context': , 'entity_id': 'binary_sensor.10_10_10_10', diff --git a/tests/components/ping/test_config_flow.py b/tests/components/ping/test_config_flow.py index 8204a000f29..bc13030647e 100644 --- a/tests/components/ping/test_config_flow.py +++ b/tests/components/ping/test_config_flow.py @@ -13,11 +13,15 @@ from tests.common import MockConfigEntry @pytest.mark.parametrize( - ("host", "expected_title"), - [("192.618.178.1", "192.618.178.1")], + ("host", "expected"), + [ + ("192.618.178.1", "192.618.178.1"), + (" 192.618.178.1 ", "192.618.178.1"), + (" demo.host ", "demo.host"), + ], ) @pytest.mark.usefixtures("patch_setup") -async def test_form(hass: HomeAssistant, host, expected_title) -> None: +async def test_form(hass: HomeAssistant, host, expected) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -35,21 +39,25 @@ async def test_form(hass: HomeAssistant, host, expected_title) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == expected_title + assert result["title"] == expected assert result["data"] == {} assert result["options"] == { "count": 5, - "host": host, + "host": expected, "consider_home": 180, } @pytest.mark.parametrize( - ("host", "count", "expected_title"), - [("192.618.178.1", 10, "192.618.178.1")], + ("host", "expected_host"), + [ + ("192.618.178.1", "192.618.178.1"), + (" 192.618.178.1 ", "192.618.178.1"), + (" demo.host ", "demo.host"), + ], ) @pytest.mark.usefixtures("patch_setup") -async def test_options(hass: HomeAssistant, host, count, expected_title) -> None: +async def test_options(hass: HomeAssistant, host: str, expected_host: str) -> None: """Test options flow.""" config_entry = MockConfigEntry( @@ -57,8 +65,8 @@ async def test_options(hass: HomeAssistant, host, count, expected_title) -> None source=config_entries.SOURCE_USER, data={}, domain=DOMAIN, - options={"count": count, "host": host, "consider_home": 180}, - title=expected_title, + options={"count": 1, "host": "192.168.1.1", "consider_home": 180}, + title="192.168.1.1", ) config_entry.add_to_hass(hass) @@ -72,15 +80,15 @@ async def test_options(hass: HomeAssistant, host, count, expected_title) -> None result = await hass.config_entries.options.async_configure( result["flow_id"], { - "host": "10.10.10.1", - "count": count, + "host": host, + "count": 10, }, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { - "count": count, - "host": "10.10.10.1", + "count": 10, + "host": expected_host, "consider_home": 180, } diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 202d62d70e0..c4ec108bb6b 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -26,7 +26,6 @@ from homeassistant.components.plex.const import ( ) from homeassistant.config_entries import ( SOURCE_INTEGRATION_DISCOVERY, - SOURCE_REAUTH, SOURCE_USER, ConfigEntryState, ) @@ -744,11 +743,7 @@ async def test_reauth( """Test setup and reauthorization of a Plex token.""" entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flow_id = result["flow_id"] with ( @@ -795,11 +790,7 @@ async def test_reauth_multiple_servers_available( entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flow_id = result["flow_id"] diff --git a/tests/components/plex/test_media_search.py b/tests/components/plex/test_media_search.py index 8219cbe27b6..04d91e8825c 100644 --- a/tests/components/plex/test_media_search.py +++ b/tests/components/plex/test_media_search.py @@ -57,6 +57,31 @@ async def test_media_lookups( ) assert "Media for key 123 not found" in str(excinfo.value) + # Search with a different specified username + with ( + patch( + "plexapi.library.LibrarySection.search", + __qualname__="search", + ) as search, + patch( + "plexapi.myplex.MyPlexAccount.user", + __qualname__="user", + ) as plex_account_user, + ): + plex_account_user.return_value.get_token.return_value = "token" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player_id, + ATTR_MEDIA_CONTENT_TYPE: MediaType.EPISODE, + ATTR_MEDIA_CONTENT_ID: '{"library_name": "TV Shows", "show_name": "TV Show", "username": "Kids"}', + }, + True, + ) + search.assert_called_with(**{"show.title": "TV Show", "libtype": "show"}) + plex_account_user.assert_called_with("Kids") + # TV show searches with pytest.raises(MediaNotFound) as excinfo: await hass.services.async_call( diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index ec857a965e5..dead58e0581 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -7,6 +7,7 @@ import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch +from packaging.version import Version from plugwise import PlugwiseData import pytest @@ -65,15 +66,16 @@ def mock_smile_config_flow() -> Generator[MagicMock]: smile = smile_mock.return_value smile.smile_hostname = "smile12345" smile.smile_model = "Test Model" + smile.smile_model_id = "Test Model ID" smile.smile_name = "Test Smile Name" - smile.connect.return_value = True + smile.connect.return_value = Version("4.3.2") yield smile @pytest.fixture def mock_smile_adam() -> Generator[MagicMock]: """Create a Mock Adam environment for testing exceptions.""" - chosen_env = "adam_multiple_devices_per_zone" + chosen_env = "m_adam_multiple_devices_per_zone" with patch( "homeassistant.components.plugwise.coordinator.Smile", autospec=True @@ -86,11 +88,12 @@ def mock_smile_adam() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -112,11 +115,12 @@ def mock_smile_adam_2() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.6.4") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -138,11 +142,12 @@ def mock_smile_adam_3() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.6.4") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -164,11 +169,12 @@ def mock_smile_adam_4() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.2.8") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -189,11 +195,12 @@ def mock_smile_anna() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = True + smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -214,11 +221,12 @@ def mock_smile_anna_2() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = True + smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -239,11 +247,12 @@ def mock_smile_anna_3() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = True + smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -264,11 +273,12 @@ def mock_smile_p1() -> Generator[MagicMock]: smile.smile_type = "power" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile" smile.smile_name = "Smile P1" - smile.connect.return_value = True + smile.connect.return_value = Version("4.4.2") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -289,11 +299,38 @@ def mock_smile_p1_2() -> Generator[MagicMock]: smile.smile_type = "power" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" + smile.smile_model_id = "smile" smile.smile_name = "Smile P1" - smile.connect.return_value = True + smile.connect.return_value = Version("4.4.2") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] + ) + + yield smile + + +@pytest.fixture +def mock_smile_legacy_anna() -> Generator[MagicMock]: + """Create a Mock legacy Anna environment for testing exceptions.""" + chosen_env = "legacy_anna" + with patch( + "homeassistant.components.plugwise.coordinator.Smile", autospec=True + ) as smile_mock: + smile = smile_mock.return_value + + smile.gateway_id = "0000aaaa0000aaaa0000aaaa0000aa00" + smile.heater_id = "04e4cbfe7f4340f090f85ec3b9e6a950" + smile.smile_version = "1.8.22" + smile.smile_type = "thermostat" + smile.smile_hostname = "smile98765" + smile.smile_model = "Gateway" + smile.smile_model_id = None + smile.smile_name = "Smile Anna" + smile.connect.return_value = Version("1.8.22") + all_data = _read_json(chosen_env, "all_data") + smile.async_update.return_value = PlugwiseData( + all_data["devices"], all_data["gateway"] ) yield smile @@ -314,11 +351,12 @@ def mock_stretch() -> Generator[MagicMock]: smile.smile_type = "stretch" smile.smile_hostname = "stretch98765" smile.smile_model = "Gateway" + smile.smile_model_id = None smile.smile_name = "Stretch" - smile.connect.return_value = True + smile.connect.return_value = Version("3.1.11") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index 5088281404a..3a54c3fb9a2 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -10,6 +10,7 @@ "location": "a57efe5f145f498c9be62a9b63626fbf", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile_thermo", "name": "Smile Anna", "sensors": { "outdoor_temperature": 20.2 @@ -60,11 +61,12 @@ "3cb70739631c4d17a86b8b12e8a5161b": { "active_preset": "home", "available_schedules": ["standaard", "off"], + "climate_mode": "auto", + "control_state": "heating", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", "location": "c784ee9fdab44e1395b8dee7d7a497d5", - "mode": "auto", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "home", "away", "asleep", "vacation"], @@ -97,7 +99,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", - "item_count": 66, + "item_count": 67, "notifications": {}, "reboot": true, "smile_name": "Smile Anna" diff --git a/tests/components/plugwise/fixtures/legacy_anna/all_data.json b/tests/components/plugwise/fixtures/legacy_anna/all_data.json new file mode 100644 index 00000000000..9275b82cde9 --- /dev/null +++ b/tests/components/plugwise/fixtures/legacy_anna/all_data.json @@ -0,0 +1,69 @@ +{ + "devices": { + "0000aaaa0000aaaa0000aaaa0000aa00": { + "dev_class": "gateway", + "firmware": "1.8.22", + "location": "0000aaaa0000aaaa0000aaaa0000aa00", + "mac_address": "01:23:45:67:89:AB", + "model": "Gateway", + "name": "Smile Anna", + "vendor": "Plugwise" + }, + "04e4cbfe7f4340f090f85ec3b9e6a950": { + "binary_sensors": { + "flame_state": true, + "heating_state": true + }, + "dev_class": "heater_central", + "location": "0000aaaa0000aaaa0000aaaa0000aa00", + "maximum_boiler_temperature": { + "lower_bound": 50.0, + "resolution": 1.0, + "setpoint": 50.0, + "upper_bound": 90.0 + }, + "model": "Generic heater", + "name": "OpenTherm", + "sensors": { + "dhw_temperature": 51.2, + "intended_boiler_temperature": 17.0, + "modulation_level": 0.0, + "return_temperature": 21.7, + "water_pressure": 1.2, + "water_temperature": 23.6 + }, + "vendor": "Bosch Thermotechniek B.V." + }, + "0d266432d64443e283b5d708ae98b455": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "heating", + "dev_class": "thermostat", + "firmware": "2017-03-13T11:54:58+01:00", + "hardware": "6539-1301-500", + "location": "0000aaaa0000aaaa0000aaaa0000aa00", + "model": "ThermoTouch", + "name": "Anna", + "preset_modes": ["away", "vacation", "asleep", "home", "no_frost"], + "sensors": { + "illuminance": 150.8, + "setpoint": 20.5, + "temperature": 20.4 + }, + "thermostat": { + "lower_bound": 4.0, + "resolution": 0.1, + "setpoint": 20.5, + "upper_bound": 30.0 + }, + "vendor": "Plugwise" + } + }, + "gateway": { + "cooling_present": false, + "gateway_id": "0000aaaa0000aaaa0000aaaa0000aa00", + "heater_id": "04e4cbfe7f4340f090f85ec3b9e6a950", + "item_count": 41, + "smile_name": "Smile Anna" + } +} diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index 759d0094dbb..af6d4b83380 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -28,14 +28,19 @@ }, "1772a4ea304041adb83f357b751341ff": { "available": true, - "dev_class": "thermo_sensor", + "binary_sensors": { + "low_battery": false + }, + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", + "model_id": "106-03", "name": "Tom Badkamer", "sensors": { "battery": 99, + "setpoint": 18.0, "temperature": 21.6, "temperature_difference": -0.2, "valve_position": 100 @@ -50,33 +55,16 @@ "zigbee_mac_address": "000D6F000C8FF5EE" }, "ad4838d7d35c4d6ea796ee12ae5aedf8": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], - "control_state": "cooling", "dev_class": "thermostat", "location": "f2bf9048bef64cc5b6d5110154e33c81", - "mode": "cool", "model": "ThermoTouch", + "model_id": "143.1", "name": "Anna", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "off", "sensors": { "setpoint": 23.5, "temperature": 25.8 }, - "thermostat": { - "lower_bound": 1.0, - "resolution": 0.01, - "setpoint": 23.5, - "upper_bound": 35.0 - }, "vendor": "Plugwise" }, "da224107914542988a88561b4452b0f6": { @@ -90,6 +78,7 @@ "location": "bc93488efab249e5bc54fd7e175a6f91", "mac_address": "012345679891", "model": "Gateway", + "model_id": "smile_open_therm", "name": "Adam", "regulation_modes": [ "bleeding_hot", @@ -107,27 +96,19 @@ "zigbee_mac_address": "000D6F000D5A168D" }, "e2f4322d57924fa090fbbc48b3a140dc": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], - "control_state": "preheating", + "binary_sensors": { + "low_battery": true + }, "dev_class": "zone_thermostat", "firmware": "2016-10-10T02:00:00+02:00", "hardware": "255", "location": "f871b8c4d63549319221e294e4f88074", - "mode": "auto", "model": "Lisa", + "model_id": "158-01", "name": "Lisa Badkamer", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "Badkamer", "sensors": { - "battery": 38, + "battery": 14, "setpoint": 23.5, "temperature": 23.9 }, @@ -137,12 +118,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 25.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "000D6F000C869B61" }, @@ -156,14 +131,81 @@ "name": "Test", "switches": { "relay": true - } + }, + "vendor": "Plugwise" + }, + "f2bf9048bef64cc5b6d5110154e33c81": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "cool", + "control_state": "cooling", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Living room", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "off", + "sensors": { + "electricity_consumed": 149.9, + "electricity_produced": 0.0, + "temperature": 25.8 + }, + "thermostat": { + "lower_bound": 1.0, + "resolution": 0.01, + "setpoint": 23.5, + "upper_bound": 35.0 + }, + "thermostats": { + "primary": ["ad4838d7d35c4d6ea796ee12ae5aedf8"], + "secondary": [] + }, + "vendor": "Plugwise" + }, + "f871b8c4d63549319221e294e4f88074": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "auto", + "control_state": "cooling", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Bathroom", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "Badkamer", + "sensors": { + "electricity_consumed": 0.0, + "electricity_produced": 0.0, + "temperature": 23.9 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 25.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["e2f4322d57924fa090fbbc48b3a140dc"], + "secondary": ["1772a4ea304041adb83f357b751341ff"] + }, + "vendor": "Plugwise" } }, "gateway": { "cooling_present": true, "gateway_id": "da224107914542988a88561b4452b0f6", "heater_id": "056ee145a816487eaa69243c3280f8bf", - "item_count": 147, + "item_count": 89, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json index e2c23df42d6..bb24faeebfa 100644 --- a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json @@ -33,14 +33,19 @@ }, "1772a4ea304041adb83f357b751341ff": { "available": true, - "dev_class": "thermo_sensor", + "binary_sensors": { + "low_battery": false + }, + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", + "model_id": "106-03", "name": "Tom Badkamer", "sensors": { "battery": 99, + "setpoint": 18.0, "temperature": 18.6, "temperature_difference": -0.2, "valve_position": 100 @@ -55,33 +60,16 @@ "zigbee_mac_address": "000D6F000C8FF5EE" }, "ad4838d7d35c4d6ea796ee12ae5aedf8": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], - "control_state": "preheating", "dev_class": "thermostat", "location": "f2bf9048bef64cc5b6d5110154e33c81", - "mode": "heat", "model": "ThermoTouch", + "model_id": "143.1", "name": "Anna", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "off", "sensors": { "setpoint": 20.0, "temperature": 19.1 }, - "thermostat": { - "lower_bound": 1.0, - "resolution": 0.01, - "setpoint": 20.0, - "upper_bound": 35.0 - }, "vendor": "Plugwise" }, "da224107914542988a88561b4452b0f6": { @@ -95,6 +83,7 @@ "location": "bc93488efab249e5bc54fd7e175a6f91", "mac_address": "012345679891", "model": "Gateway", + "model_id": "smile_open_therm", "name": "Adam", "regulation_modes": ["bleeding_hot", "bleeding_cold", "off", "heating"], "select_gateway_mode": "full", @@ -106,27 +95,19 @@ "zigbee_mac_address": "000D6F000D5A168D" }, "e2f4322d57924fa090fbbc48b3a140dc": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], - "control_state": "off", + "binary_sensors": { + "low_battery": true + }, "dev_class": "zone_thermostat", "firmware": "2016-10-10T02:00:00+02:00", "hardware": "255", "location": "f871b8c4d63549319221e294e4f88074", - "mode": "auto", "model": "Lisa", + "model_id": "158-01", "name": "Lisa Badkamer", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "Badkamer", "sensors": { - "battery": 38, + "battery": 14, "setpoint": 15.0, "temperature": 17.9 }, @@ -136,12 +117,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 15.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "000D6F000C869B61" }, @@ -155,14 +130,81 @@ "name": "Test", "switches": { "relay": true - } + }, + "vendor": "Plugwise" + }, + "f2bf9048bef64cc5b6d5110154e33c81": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "heat", + "control_state": "preheating", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Living room", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "off", + "sensors": { + "electricity_consumed": 149.9, + "electricity_produced": 0.0, + "temperature": 19.1 + }, + "thermostat": { + "lower_bound": 1.0, + "resolution": 0.01, + "setpoint": 20.0, + "upper_bound": 35.0 + }, + "thermostats": { + "primary": ["ad4838d7d35c4d6ea796ee12ae5aedf8"], + "secondary": [] + }, + "vendor": "Plugwise" + }, + "f871b8c4d63549319221e294e4f88074": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "auto", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Bathroom", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "Badkamer", + "sensors": { + "electricity_consumed": 0.0, + "electricity_produced": 0.0, + "temperature": 17.9 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 15.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["e2f4322d57924fa090fbbc48b3a140dc"], + "secondary": ["1772a4ea304041adb83f357b751341ff"] + }, + "vendor": "Plugwise" } }, "gateway": { "cooling_present": false, "gateway_id": "da224107914542988a88561b4452b0f6", "heater_id": "056ee145a816487eaa69243c3280f8bf", - "item_count": 147, + "item_count": 89, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 7888d777804..1a3ef66c147 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -1,17 +1,62 @@ { "devices": { - "1346fbd8498d4dbcab7e18d51b771f3d": { + "06aecb3d00354375924f50c47af36bd2": { "active_preset": "no_frost", + "climate_mode": "off", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Slaapkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 24.2 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["1346fbd8498d4dbcab7e18d51b771f3d"], + "secondary": ["356b65335e274d769c338223e7af9c33"] + }, + "vendor": "Plugwise" + }, + "13228dab8ce04617af318a2888b3c548": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Woonkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 27.4 + }, + "thermostat": { + "lower_bound": 4.0, + "resolution": 0.01, + "setpoint": 9.0, + "upper_bound": 30.0 + }, + "thermostats": { + "primary": ["f61f1a2535f54f52ad006a3d18e459ca"], + "secondary": ["833de10f269c4deab58fb9df69901b4e"] + }, + "vendor": "Plugwise" + }, + "1346fbd8498d4dbcab7e18d51b771f3d": { "available": true, - "control_state": "off", + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "06aecb3d00354375924f50c47af36bd2", - "mode": "off", "model": "Lisa", + "model_id": "158-01", "name": "Slaapkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 92, "setpoint": 13.0, @@ -23,22 +68,17 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A03" }, "1da4d325838e4ad8aac12177214505c9": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "d58fec52899f4f1c92e4f8fad6d8c48c", "model": "Tom/Floor", + "model_id": "106-03", "name": "Tom Logeerkamer", "sensors": { "setpoint": 13.0, @@ -57,11 +97,12 @@ }, "356b65335e274d769c338223e7af9c33": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "06aecb3d00354375924f50c47af36bd2", "model": "Tom/Floor", + "model_id": "106-03", "name": "Tom Slaapkamer", "sensors": { "setpoint": 13.0, @@ -80,9 +121,10 @@ }, "457ce8414de24596a2d5e7dbc9c7682f": { "available": true, - "dev_class": "zz_misc", + "dev_class": "zz_misc_plug", "location": "9e4433a9d69f40b3aefd15e74395eaec", - "model": "lumi.plug.maeu01", + "model": "Aqara Smart Plug", + "model_id": "lumi.plug.maeu01", "name": "Plug", "sensors": { "electricity_consumed_interval": 0.0 @@ -95,17 +137,17 @@ "zigbee_mac_address": "ABCD012345670A06" }, "6f3e9d7084214c21b9dfa46f6eeb8700": { - "active_preset": "home", "available": true, - "control_state": "off", + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "d27aede973b54be484f6842d1b2802ad", - "mode": "heat", "model": "Lisa", + "model_id": "158-01", "name": "Kinderkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 79, "setpoint": 13.0, @@ -117,22 +159,17 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A02" }, "833de10f269c4deab58fb9df69901b4e": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "13228dab8ce04617af318a2888b3c548", "model": "Tom/Floor", + "model_id": "106-03", "name": "Tom Woonkamer", "sensors": { "setpoint": 9.0, @@ -150,17 +187,17 @@ "zigbee_mac_address": "ABCD012345670A09" }, "a6abc6a129ee499c88a4d420cc413b47": { - "active_preset": "home", "available": true, - "control_state": "off", + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "d58fec52899f4f1c92e4f8fad6d8c48c", - "mode": "heat", "model": "Lisa", + "model_id": "158-01", "name": "Logeerkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 80, "setpoint": 13.0, @@ -172,12 +209,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A01" }, @@ -192,6 +223,7 @@ "location": "9e4433a9d69f40b3aefd15e74395eaec", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile_open_therm", "name": "Adam", "regulation_modes": ["heating", "off", "bleeding_cold", "bleeding_hot"], "select_gateway_mode": "full", @@ -202,13 +234,37 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670101" }, + "d27aede973b54be484f6842d1b2802ad": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Kinderkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 30.0 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["6f3e9d7084214c21b9dfa46f6eeb8700"], + "secondary": ["d4496250d0e942cfa7aea3476e9070d5"] + }, + "vendor": "Plugwise" + }, "d4496250d0e942cfa7aea3476e9070d5": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "d27aede973b54be484f6842d1b2802ad", "model": "Tom/Floor", + "model_id": "106-03", "name": "Tom Kinderkamer", "sensors": { "setpoint": 13.0, @@ -225,6 +281,29 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A04" }, + "d58fec52899f4f1c92e4f8fad6d8c48c": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Logeerkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 30.0 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["a6abc6a129ee499c88a4d420cc413b47"], + "secondary": ["1da4d325838e4ad8aac12177214505c9"] + }, + "vendor": "Plugwise" + }, "e4684553153b44afbef2200885f379dc": { "available": true, "binary_sensors": { @@ -246,7 +325,8 @@ "setpoint": 90.0, "upper_bound": 90.0 }, - "model": "10.20", + "model": "Generic heater", + "model_id": "10.20", "name": "OpenTherm", "sensors": { "intended_boiler_temperature": 0.0, @@ -261,17 +341,17 @@ "vendor": "Remeha B.V." }, "f61f1a2535f54f52ad006a3d18e459ca": { - "active_preset": "home", "available": true, - "control_state": "off", + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermometer", "firmware": "2020-09-01T02:00:00+02:00", "hardware": "1", "location": "13228dab8ce04617af318a2888b3c548", - "mode": "heat", "model": "Jip", + "model_id": "168-01", "name": "Woonkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 100, "humidity": 56.2, @@ -284,12 +364,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 4.0, - "resolution": 0.01, - "setpoint": 9.0, - "upper_bound": 30.0 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A08" } @@ -298,7 +372,7 @@ "cooling_present": false, "gateway_id": "b5c2386c6f6342669e50fe49dd05b188", "heater_id": "e4684553153b44afbef2200885f379dc", - "item_count": 213, + "item_count": 244, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json similarity index 76% rename from tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json rename to tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json index 9c17df5072d..8da184a7a3e 100644 --- a/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json @@ -2,10 +2,11 @@ "devices": { "02cf28bfec924855854c544690a609ef": { "available": true, - "dev_class": "vcr", + "dev_class": "vcr_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", + "model_id": "160-01", "name": "NVR", "sensors": { "electricity_consumed": 34.0, @@ -20,12 +21,82 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A15" }, + "08963fec7c53423ca5680aa4cb502c63": { + "active_preset": "away", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "auto", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Badkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "Badkamer Schema", + "sensors": { + "temperature": 18.9 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 14.0, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": [ + "f1fee6043d3642a9b0a65297455f008e", + "680423ff840043738f42cc7f1ff97a36" + ], + "secondary": [] + }, + "vendor": "Plugwise" + }, + "12493538af164a409c6a1c79e38afe1c": { + "active_preset": "away", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "heat", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Bios", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "off", + "sensors": { + "electricity_consumed": 0.0, + "electricity_produced": 0.0, + "temperature": 16.5 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": ["df4a4a8169904cdb9c03d61a21f42140"], + "secondary": ["a2c3583e0a6349358998b760cea82d2a"] + }, + "vendor": "Plugwise" + }, "21f2b542c49845e6bb416884c55778d6": { "available": true, - "dev_class": "game_console", + "dev_class": "game_console_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", + "model_id": "160-01", "name": "Playstation Smart Plug", "sensors": { "electricity_consumed": 84.1, @@ -40,12 +111,36 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A12" }, + "446ac08dd04d4eff8ac57489757b7314": { + "active_preset": "no_frost", + "climate_mode": "heat", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Garage", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 15.6 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 5.5, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": ["e7693eb9582644e5b865dba8d4447cf1"], + "secondary": [] + }, + "vendor": "Plugwise" + }, "4a810418d5394b3f82727340b91ba740": { "available": true, - "dev_class": "router", + "dev_class": "router_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", + "model_id": "160-01", "name": "USG Smart Plug", "sensors": { "electricity_consumed": 8.5, @@ -62,10 +157,11 @@ }, "675416a629f343c495449970e2ca37b5": { "available": true, - "dev_class": "router", + "dev_class": "router_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", + "model_id": "160-01", "name": "Ziggo Modem", "sensors": { "electricity_consumed": 12.2, @@ -82,12 +178,16 @@ }, "680423ff840043738f42cc7f1ff97a36": { "available": true, - "dev_class": "thermo_sensor", + "binary_sensors": { + "low_battery": false + }, + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "08963fec7c53423ca5680aa4cb502c63", "model": "Tom/Floor", - "name": "Thermostatic Radiator Badkamer", + "model_id": "106-03", + "name": "Thermostatic Radiator Badkamer 1", "sensors": { "battery": 51, "setpoint": 14.0, @@ -105,25 +205,17 @@ "zigbee_mac_address": "ABCD012345670A17" }, "6a3bf693d05e48e0b460c815a4fdd09d": { - "active_preset": "asleep", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "82fa13f017d240daa0d0ea1775420f24", - "mode": "auto", "model": "Lisa", + "model_id": "158-01", "name": "Zone Thermostat Jessie", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "CV Jessie", "sensors": { "battery": 37, "setpoint": 15.0, @@ -135,21 +227,16 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 15.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A03" }, "78d1126fc4c743db81b61c20e88342a7": { "available": true, - "dev_class": "central_heating_pump", + "dev_class": "central_heating_pump_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "c50f167537524366a5af7aa3942feb1e", "model": "Plug", + "model_id": "160-01", "name": "CV Pomp", "sensors": { "electricity_consumed": 35.6, @@ -163,6 +250,38 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A05" }, + "82fa13f017d240daa0d0ea1775420f24": { + "active_preset": "asleep", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "auto", + "control_state": "idle", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Jessie", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "CV Jessie", + "sensors": { + "temperature": 17.2 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 15.0, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": ["6a3bf693d05e48e0b460c815a4fdd09d"], + "secondary": ["d3da73bde12a47d5a6b8f9dad971f2ec"] + }, + "vendor": "Plugwise" + }, "90986d591dcd426cae3ec3e8111ff730": { "binary_sensors": { "heating_state": true @@ -179,10 +298,11 @@ }, "a28f588dc4a049a483fd03a30361ad3a": { "available": true, - "dev_class": "settop", + "dev_class": "settop_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", + "model_id": "160-01", "name": "Fibaro HC2", "sensors": { "electricity_consumed": 12.5, @@ -199,11 +319,15 @@ }, "a2c3583e0a6349358998b760cea82d2a": { "available": true, - "dev_class": "thermo_sensor", + "binary_sensors": { + "low_battery": false + }, + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "12493538af164a409c6a1c79e38afe1c", "model": "Tom/Floor", + "model_id": "106-03", "name": "Bios Cv Thermostatic Radiator ", "sensors": { "battery": 62, @@ -223,11 +347,12 @@ }, "b310b72a0e354bfab43089919b9a88bf": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "c50f167537524366a5af7aa3942feb1e", "model": "Tom/Floor", + "model_id": "106-03", "name": "Floor kraan", "sensors": { "setpoint": 21.5, @@ -245,25 +370,17 @@ "zigbee_mac_address": "ABCD012345670A02" }, "b59bcebaf94b499ea7d46e4a66fb62d8": { - "active_preset": "home", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermostat", "firmware": "2016-08-02T02:00:00+02:00", "hardware": "255", "location": "c50f167537524366a5af7aa3942feb1e", - "mode": "auto", "model": "Lisa", + "model_id": "158-01", "name": "Zone Lisa WK", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "GF7 Woonkamer", "sensors": { "battery": 34, "setpoint": 21.5, @@ -275,21 +392,50 @@ "setpoint": 0.0, "upper_bound": 2.0 }, + "vendor": "Plugwise", + "zigbee_mac_address": "ABCD012345670A07" + }, + "c50f167537524366a5af7aa3942feb1e": { + "active_preset": "home", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "auto", + "control_state": "heating", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Woonkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "GF7 Woonkamer", + "sensors": { + "electricity_consumed": 35.6, + "electricity_produced": 0.0, + "temperature": 20.9 + }, "thermostat": { "lower_bound": 0.0, "resolution": 0.01, "setpoint": 21.5, - "upper_bound": 99.9 + "upper_bound": 100.0 }, - "vendor": "Plugwise", - "zigbee_mac_address": "ABCD012345670A07" + "thermostats": { + "primary": ["b59bcebaf94b499ea7d46e4a66fb62d8"], + "secondary": ["b310b72a0e354bfab43089919b9a88bf"] + }, + "vendor": "Plugwise" }, "cd0ddb54ef694e11ac18ed1cbce5dbbd": { "available": true, - "dev_class": "vcr", + "dev_class": "vcr_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", + "model_id": "160-01", "name": "NAS", "sensors": { "electricity_consumed": 16.5, @@ -306,11 +452,15 @@ }, "d3da73bde12a47d5a6b8f9dad971f2ec": { "available": true, - "dev_class": "thermo_sensor", + "binary_sensors": { + "low_battery": false + }, + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "82fa13f017d240daa0d0ea1775420f24", "model": "Tom/Floor", + "model_id": "106-03", "name": "Thermostatic Radiator Jessie", "sensors": { "battery": 62, @@ -329,25 +479,17 @@ "zigbee_mac_address": "ABCD012345670A10" }, "df4a4a8169904cdb9c03d61a21f42140": { - "active_preset": "away", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], + "binary_sensors": { + "low_battery": false + }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "12493538af164a409c6a1c79e38afe1c", - "mode": "heat", "model": "Lisa", + "model_id": "158-01", "name": "Zone Lisa Bios", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "off", "sensors": { "battery": 67, "setpoint": 13.0, @@ -359,35 +501,21 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A06" }, "e7693eb9582644e5b865dba8d4447cf1": { - "active_preset": "no_frost", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], + "binary_sensors": { + "low_battery": false + }, "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "446ac08dd04d4eff8ac57489757b7314", - "mode": "heat", "model": "Tom/Floor", + "model_id": "106-03", "name": "CV Kraan Garage", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "off", "sensors": { "battery": 68, "setpoint": 5.5, @@ -401,35 +529,21 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 5.5, - "upper_bound": 100.0 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A11" }, "f1fee6043d3642a9b0a65297455f008e": { - "active_preset": "away", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], - "dev_class": "zone_thermostat", + "binary_sensors": { + "low_battery": false + }, + "dev_class": "thermostatic_radiator_valve", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "08963fec7c53423ca5680aa4cb502c63", - "mode": "auto", "model": "Lisa", - "name": "Zone Thermostat Badkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "Badkamer Schema", + "model_id": "158-01", + "name": "Thermostatic Radiator Badkamer 2", "sensors": { "battery": 92, "setpoint": 14.0, @@ -441,12 +555,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 14.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A08" }, @@ -460,6 +568,7 @@ "location": "1f9dcf83fd4e4b66b72ff787957bfe5d", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile_open_therm", "name": "Adam", "select_regulation_mode": "heating", "sensors": { @@ -473,7 +582,7 @@ "cooling_present": false, "gateway_id": "fe799307f1624099878210aa0b9f1475", "heater_id": "90986d591dcd426cae3ec3e8111ff730", - "item_count": 315, + "item_count": 369, "notifications": { "af82e4ccf9c548528166d38e560662a4": { "warning": "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device." diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index cb30b919797..eaa42facf10 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -10,6 +10,7 @@ "location": "a57efe5f145f498c9be62a9b63626fbf", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile_thermo", "name": "Smile Anna", "sensors": { "outdoor_temperature": 28.2 @@ -60,11 +61,12 @@ "3cb70739631c4d17a86b8b12e8a5161b": { "active_preset": "home", "available_schedules": ["standaard", "off"], + "climate_mode": "auto", + "control_state": "cooling", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", "location": "c784ee9fdab44e1395b8dee7d7a497d5", - "mode": "auto", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "home", "away", "asleep", "vacation"], @@ -97,7 +99,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", - "item_count": 66, + "item_count": 67, "notifications": {}, "reboot": true, "smile_name": "Smile Anna" diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 660f6b5a76b..52645b0f317 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -10,6 +10,7 @@ "location": "a57efe5f145f498c9be62a9b63626fbf", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile_thermo", "name": "Smile Anna", "sensors": { "outdoor_temperature": 28.2 @@ -60,11 +61,12 @@ "3cb70739631c4d17a86b8b12e8a5161b": { "active_preset": "home", "available_schedules": ["standaard", "off"], + "climate_mode": "auto", + "control_state": "idle", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", "location": "c784ee9fdab44e1395b8dee7d7a497d5", - "mode": "auto", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "home", "away", "asleep", "vacation"], @@ -97,7 +99,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", - "item_count": 66, + "item_count": 67, "notifications": {}, "reboot": true, "smile_name": "Smile Anna" diff --git a/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json b/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json index 7f152779252..3ea4bb01be2 100644 --- a/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json +++ b/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json @@ -10,6 +10,7 @@ "location": "a455b61e52394b2db5081ce025a430f3", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile", "name": "Smile P1", "vendor": "Plugwise" }, @@ -42,7 +43,7 @@ }, "gateway": { "gateway_id": "a455b61e52394b2db5081ce025a430f3", - "item_count": 31, + "item_count": 32, "notifications": {}, "reboot": true, "smile_name": "Smile P1" diff --git a/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json b/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json index 582c883a3a7..b7476b24a1e 100644 --- a/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json +++ b/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json @@ -10,6 +10,7 @@ "location": "03e65b16e4b247a29ae0d75a78cb492e", "mac_address": "012345670001", "model": "Gateway", + "model_id": "smile", "name": "Smile P1", "vendor": "Plugwise" }, @@ -51,7 +52,7 @@ }, "gateway": { "gateway_id": "03e65b16e4b247a29ae0d75a78cb492e", - "item_count": 40, + "item_count": 41, "notifications": { "97a04c0c263049b29350a660b4cdd01e": { "warning": "The Smile P1 is not connected to a smart meter." diff --git a/tests/components/plugwise/fixtures/stretch_v31/all_data.json b/tests/components/plugwise/fixtures/stretch_v31/all_data.json index a875324fc13..b1675116bdf 100644 --- a/tests/components/plugwise/fixtures/stretch_v31/all_data.json +++ b/tests/components/plugwise/fixtures/stretch_v31/all_data.json @@ -96,7 +96,8 @@ "name": "Schakel", "switches": { "relay": true - } + }, + "vendor": "Plugwise" }, "d950b314e9d8499f968e6db8d82ef78c": { "dev_class": "report", @@ -111,7 +112,8 @@ "name": "Stroomvreters", "switches": { "relay": true - } + }, + "vendor": "Plugwise" }, "e1c884e7dede431dadee09506ec4f859": { "dev_class": "refrigerator", diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index 44f4023d014..806c92fe7cb 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -4,10 +4,11 @@ 'devices': dict({ '02cf28bfec924855854c544690a609ef': dict({ 'available': True, - 'dev_class': 'vcr', + 'dev_class': 'vcr_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', + 'model_id': '160-01', 'name': 'NVR', 'sensors': dict({ 'electricity_consumed': 34.0, @@ -22,12 +23,99 @@ 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A15', }), + '08963fec7c53423ca5680aa4cb502c63': dict({ + 'active_preset': 'away', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'auto', + 'control_state': 'idle', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Badkamer', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'Badkamer Schema', + 'sensors': dict({ + 'temperature': 18.9, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 14.0, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'f1fee6043d3642a9b0a65297455f008e', + '680423ff840043738f42cc7f1ff97a36', + ]), + 'secondary': list([ + ]), + }), + 'vendor': 'Plugwise', + }), + '12493538af164a409c6a1c79e38afe1c': dict({ + 'active_preset': 'away', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'heat', + 'control_state': 'idle', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Bios', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'off', + 'sensors': dict({ + 'electricity_consumed': 0.0, + 'electricity_produced': 0.0, + 'temperature': 16.5, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 13.0, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'df4a4a8169904cdb9c03d61a21f42140', + ]), + 'secondary': list([ + 'a2c3583e0a6349358998b760cea82d2a', + ]), + }), + 'vendor': 'Plugwise', + }), '21f2b542c49845e6bb416884c55778d6': dict({ 'available': True, - 'dev_class': 'game_console', + 'dev_class': 'game_console_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', + 'model_id': '160-01', 'name': 'Playstation Smart Plug', 'sensors': dict({ 'electricity_consumed': 84.1, @@ -42,12 +130,45 @@ 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A12', }), + '446ac08dd04d4eff8ac57489757b7314': dict({ + 'active_preset': 'no_frost', + 'climate_mode': 'heat', + 'control_state': 'idle', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Garage', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'sensors': dict({ + 'temperature': 15.6, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 5.5, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'e7693eb9582644e5b865dba8d4447cf1', + ]), + 'secondary': list([ + ]), + }), + 'vendor': 'Plugwise', + }), '4a810418d5394b3f82727340b91ba740': dict({ 'available': True, - 'dev_class': 'router', + 'dev_class': 'router_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', + 'model_id': '160-01', 'name': 'USG Smart Plug', 'sensors': dict({ 'electricity_consumed': 8.5, @@ -64,10 +185,11 @@ }), '675416a629f343c495449970e2ca37b5': dict({ 'available': True, - 'dev_class': 'router', + 'dev_class': 'router_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', + 'model_id': '160-01', 'name': 'Ziggo Modem', 'sensors': dict({ 'electricity_consumed': 12.2, @@ -84,12 +206,16 @@ }), '680423ff840043738f42cc7f1ff97a36': dict({ 'available': True, - 'dev_class': 'thermo_sensor', + 'binary_sensors': dict({ + 'low_battery': False, + }), + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '08963fec7c53423ca5680aa4cb502c63', 'model': 'Tom/Floor', - 'name': 'Thermostatic Radiator Badkamer', + 'model_id': '106-03', + 'name': 'Thermostatic Radiator Badkamer 1', 'sensors': dict({ 'battery': 51, 'setpoint': 14.0, @@ -107,31 +233,17 @@ 'zigbee_mac_address': 'ABCD012345670A17', }), '6a3bf693d05e48e0b460c815a4fdd09d': dict({ - 'active_preset': 'asleep', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), + 'binary_sensors': dict({ + 'low_battery': False, + }), 'dev_class': 'zone_thermostat', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '82fa13f017d240daa0d0ea1775420f24', - 'mode': 'auto', 'model': 'Lisa', + 'model_id': '158-01', 'name': 'Zone Thermostat Jessie', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'CV Jessie', 'sensors': dict({ 'battery': 37, 'setpoint': 15.0, @@ -143,21 +255,16 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 15.0, - 'upper_bound': 99.9, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A03', }), '78d1126fc4c743db81b61c20e88342a7': dict({ 'available': True, - 'dev_class': 'central_heating_pump', + 'dev_class': 'central_heating_pump_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'c50f167537524366a5af7aa3942feb1e', 'model': 'Plug', + 'model_id': '160-01', 'name': 'CV Pomp', 'sensors': dict({ 'electricity_consumed': 35.6, @@ -171,6 +278,48 @@ 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A05', }), + '82fa13f017d240daa0d0ea1775420f24': dict({ + 'active_preset': 'asleep', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'auto', + 'control_state': 'idle', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Jessie', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'CV Jessie', + 'sensors': dict({ + 'temperature': 17.2, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 15.0, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + '6a3bf693d05e48e0b460c815a4fdd09d', + ]), + 'secondary': list([ + 'd3da73bde12a47d5a6b8f9dad971f2ec', + ]), + }), + 'vendor': 'Plugwise', + }), '90986d591dcd426cae3ec3e8111ff730': dict({ 'binary_sensors': dict({ 'heating_state': True, @@ -187,10 +336,11 @@ }), 'a28f588dc4a049a483fd03a30361ad3a': dict({ 'available': True, - 'dev_class': 'settop', + 'dev_class': 'settop_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', + 'model_id': '160-01', 'name': 'Fibaro HC2', 'sensors': dict({ 'electricity_consumed': 12.5, @@ -207,11 +357,15 @@ }), 'a2c3583e0a6349358998b760cea82d2a': dict({ 'available': True, - 'dev_class': 'thermo_sensor', + 'binary_sensors': dict({ + 'low_battery': False, + }), + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '12493538af164a409c6a1c79e38afe1c', 'model': 'Tom/Floor', + 'model_id': '106-03', 'name': 'Bios Cv Thermostatic Radiator ', 'sensors': dict({ 'battery': 62, @@ -231,11 +385,12 @@ }), 'b310b72a0e354bfab43089919b9a88bf': dict({ 'available': True, - 'dev_class': 'thermo_sensor', + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': 'c50f167537524366a5af7aa3942feb1e', 'model': 'Tom/Floor', + 'model_id': '106-03', 'name': 'Floor kraan', 'sensors': dict({ 'setpoint': 21.5, @@ -253,31 +408,17 @@ 'zigbee_mac_address': 'ABCD012345670A02', }), 'b59bcebaf94b499ea7d46e4a66fb62d8': dict({ - 'active_preset': 'home', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), + 'binary_sensors': dict({ + 'low_battery': False, + }), 'dev_class': 'zone_thermostat', 'firmware': '2016-08-02T02:00:00+02:00', 'hardware': '255', 'location': 'c50f167537524366a5af7aa3942feb1e', - 'mode': 'auto', 'model': 'Lisa', + 'model_id': '158-01', 'name': 'Zone Lisa WK', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'GF7 Woonkamer', 'sensors': dict({ 'battery': 34, 'setpoint': 21.5, @@ -289,21 +430,60 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), + 'vendor': 'Plugwise', + 'zigbee_mac_address': 'ABCD012345670A07', + }), + 'c50f167537524366a5af7aa3942feb1e': dict({ + 'active_preset': 'home', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'auto', + 'control_state': 'heating', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Woonkamer', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'GF7 Woonkamer', + 'sensors': dict({ + 'electricity_consumed': 35.6, + 'electricity_produced': 0.0, + 'temperature': 20.9, + }), 'thermostat': dict({ 'lower_bound': 0.0, 'resolution': 0.01, 'setpoint': 21.5, - 'upper_bound': 99.9, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'b59bcebaf94b499ea7d46e4a66fb62d8', + ]), + 'secondary': list([ + 'b310b72a0e354bfab43089919b9a88bf', + ]), }), 'vendor': 'Plugwise', - 'zigbee_mac_address': 'ABCD012345670A07', }), 'cd0ddb54ef694e11ac18ed1cbce5dbbd': dict({ 'available': True, - 'dev_class': 'vcr', + 'dev_class': 'vcr_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', + 'model_id': '160-01', 'name': 'NAS', 'sensors': dict({ 'electricity_consumed': 16.5, @@ -320,11 +500,15 @@ }), 'd3da73bde12a47d5a6b8f9dad971f2ec': dict({ 'available': True, - 'dev_class': 'thermo_sensor', + 'binary_sensors': dict({ + 'low_battery': False, + }), + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '82fa13f017d240daa0d0ea1775420f24', 'model': 'Tom/Floor', + 'model_id': '106-03', 'name': 'Thermostatic Radiator Jessie', 'sensors': dict({ 'battery': 62, @@ -343,31 +527,17 @@ 'zigbee_mac_address': 'ABCD012345670A10', }), 'df4a4a8169904cdb9c03d61a21f42140': dict({ - 'active_preset': 'away', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), + 'binary_sensors': dict({ + 'low_battery': False, + }), 'dev_class': 'zone_thermostat', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '12493538af164a409c6a1c79e38afe1c', - 'mode': 'heat', 'model': 'Lisa', + 'model_id': '158-01', 'name': 'Zone Lisa Bios', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'off', 'sensors': dict({ 'battery': 67, 'setpoint': 13.0, @@ -379,41 +549,21 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 13.0, - 'upper_bound': 99.9, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A06', }), 'e7693eb9582644e5b865dba8d4447cf1': dict({ - 'active_preset': 'no_frost', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), + 'binary_sensors': dict({ + 'low_battery': False, + }), 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '446ac08dd04d4eff8ac57489757b7314', - 'mode': 'heat', 'model': 'Tom/Floor', + 'model_id': '106-03', 'name': 'CV Kraan Garage', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'off', 'sensors': dict({ 'battery': 68, 'setpoint': 5.5, @@ -427,41 +577,21 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 5.5, - 'upper_bound': 100.0, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A11', }), 'f1fee6043d3642a9b0a65297455f008e': dict({ - 'active_preset': 'away', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), - 'dev_class': 'zone_thermostat', + 'binary_sensors': dict({ + 'low_battery': False, + }), + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '08963fec7c53423ca5680aa4cb502c63', - 'mode': 'auto', 'model': 'Lisa', - 'name': 'Zone Thermostat Badkamer', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'Badkamer Schema', + 'model_id': '158-01', + 'name': 'Thermostatic Radiator Badkamer 2', 'sensors': dict({ 'battery': 92, 'setpoint': 14.0, @@ -473,12 +603,6 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 14.0, - 'upper_bound': 99.9, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A08', }), @@ -492,6 +616,7 @@ 'location': '1f9dcf83fd4e4b66b72ff787957bfe5d', 'mac_address': '012345670001', 'model': 'Gateway', + 'model_id': 'smile_open_therm', 'name': 'Adam', 'select_regulation_mode': 'heating', 'sensors': dict({ @@ -505,7 +630,7 @@ 'cooling_present': False, 'gateway_id': 'fe799307f1624099878210aa0b9f1475', 'heater_id': '90986d591dcd426cae3ec3e8111ff730', - 'item_count': 315, + 'item_count': 369, 'notifications': dict({ 'af82e4ccf9c548528166d38e560662a4': dict({ 'warning': "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device.", diff --git a/tests/components/plugwise/test_binary_sensor.py b/tests/components/plugwise/test_binary_sensor.py index 878300bddb4..5c0e3fbdd2e 100644 --- a/tests/components/plugwise/test_binary_sensor.py +++ b/tests/components/plugwise/test_binary_sensor.py @@ -56,7 +56,7 @@ async def test_anna_climate_binary_sensor_change( async def test_adam_climate_binary_sensor_change( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: - """Test change of climate related binary_sensor entities.""" + """Test of a climate related plugwise-notification binary_sensor.""" state = hass.states.get("binary_sensor.adam_plugwise_notification") assert state assert state.state == STATE_ON @@ -64,3 +64,14 @@ async def test_adam_climate_binary_sensor_change( assert "unreachable" in state.attributes["warning_msg"][0] assert not state.attributes.get("error_msg") assert not state.attributes.get("other_msg") + + +async def test_p1_v4_binary_sensor_entity( + hass: HomeAssistant, mock_smile_p1_2: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test of a Smile P1 related plugwise-notification binary_sensor.""" + state = hass.states.get("binary_sensor.smile_p1_plugwise_notification") + assert state + assert state.state == STATE_ON + assert "warning_msg" in state.attributes + assert "connected" in state.attributes["warning_msg"][0] diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index 70cef16bcdc..8368af8e5cc 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -3,19 +3,38 @@ from datetime import timedelta from unittest.mock import MagicMock, patch +from freezegun.api import FrozenDateTimeFactory from plugwise.exceptions import PlugwiseError import pytest from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, + ATTR_HVAC_MODE, + ATTR_HVAC_MODES, + ATTR_MAX_TEMP, + ATTR_MIN_TEMP, + ATTR_PRESET_MODE, + ATTR_PRESET_MODES, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + ATTR_TARGET_TEMP_STEP, DOMAIN as CLIMATE_DOMAIN, + PRESET_AWAY, + PRESET_HOME, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_TEMPERATURE, + HVACAction, HVACMode, ) +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, + ATTR_TEMPERATURE, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -28,61 +47,57 @@ async def test_adam_climate_entity_attributes( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: """Test creation of adam climate device environment.""" - state = hass.states.get("climate.zone_lisa_wk") + state = hass.states.get("climate.woonkamer") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - # hvac_action is not asserted as the fixture is not in line with recent firmware functionality + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.AUTO, HVACMode.HEAT] + assert ATTR_PRESET_MODES in state.attributes + assert "no_frost" in state.attributes[ATTR_PRESET_MODES] + assert PRESET_HOME in state.attributes[ATTR_PRESET_MODES] + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOME + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.9 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 17 + assert state.attributes[ATTR_TEMPERATURE] == 21.5 + assert state.attributes[ATTR_MIN_TEMP] == 0.0 + assert state.attributes[ATTR_MAX_TEMP] == 35.0 + assert state.attributes[ATTR_TARGET_TEMP_STEP] == 0.1 - assert "preset_modes" in state.attributes - assert "no_frost" in state.attributes["preset_modes"] - assert "home" in state.attributes["preset_modes"] - - assert state.attributes["current_temperature"] == 20.9 - assert state.attributes["preset_mode"] == "home" - assert state.attributes["supported_features"] == 17 - assert state.attributes["temperature"] == 21.5 - assert state.attributes["min_temp"] == 0.0 - assert state.attributes["max_temp"] == 35.0 - assert state.attributes["target_temp_step"] == 0.1 - - state = hass.states.get("climate.zone_thermostat_jessie") + state = hass.states.get("climate.jessie") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - # hvac_action is not asserted as the fixture is not in line with recent firmware functionality - - assert "preset_modes" in state.attributes - assert "no_frost" in state.attributes["preset_modes"] - assert "home" in state.attributes["preset_modes"] - - assert state.attributes["current_temperature"] == 17.2 - assert state.attributes["preset_mode"] == "asleep" - assert state.attributes["temperature"] == 15.0 - assert state.attributes["min_temp"] == 0.0 - assert state.attributes["max_temp"] == 35.0 - assert state.attributes["target_temp_step"] == 0.1 + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.AUTO, HVACMode.HEAT] + assert ATTR_PRESET_MODES in state.attributes + assert "no_frost" in state.attributes[ATTR_PRESET_MODES] + assert PRESET_HOME in state.attributes[ATTR_PRESET_MODES] + assert state.attributes[ATTR_PRESET_MODE] == "asleep" + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 17.2 + assert state.attributes[ATTR_TEMPERATURE] == 15.0 + assert state.attributes[ATTR_MIN_TEMP] == 0.0 + assert state.attributes[ATTR_MAX_TEMP] == 35.0 + assert state.attributes[ATTR_TARGET_TEMP_STEP] == 0.1 async def test_adam_2_climate_entity_attributes( hass: HomeAssistant, mock_smile_adam_2: MagicMock, init_integration: MockConfigEntry ) -> None: """Test creation of adam climate device environment.""" - state = hass.states.get("climate.anna") + state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.HEAT - assert state.attributes["hvac_action"] == "preheating" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.PREHEATING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.HEAT, ] - state = hass.states.get("climate.lisa_badkamer") + state = hass.states.get("climate.bathroom") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "idle" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.HEAT, @@ -90,15 +105,17 @@ async def test_adam_2_climate_entity_attributes( async def test_adam_3_climate_entity_attributes( - hass: HomeAssistant, mock_smile_adam_3: MagicMock, init_integration: MockConfigEntry + hass: HomeAssistant, + mock_smile_adam_3: MagicMock, + init_integration: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test creation of adam climate device environment.""" - state = hass.states.get("climate.anna") - + state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.COOL - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.OFF, HVACMode.AUTO, HVACMode.COOL, @@ -107,7 +124,9 @@ async def test_adam_3_climate_entity_attributes( data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "heating" ) - data.devices["ad4838d7d35c4d6ea796ee12ae5aedf8"]["control_state"] = "heating" + data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = ( + HVACAction.HEATING + ) data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ "cooling_state" ] = False @@ -115,22 +134,27 @@ async def test_adam_3_climate_entity_attributes( "heating_state" ] = True with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() - state = hass.states.get("climate.anna") - assert state - assert state.state == HVACMode.HEAT - assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [ - HVACMode.OFF, - HVACMode.AUTO, - HVACMode.HEAT, - ] + + state = hass.states.get("climate.living_room") + assert state + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert state.attributes[ATTR_HVAC_MODES] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.HEAT, + ] + data = mock_smile_adam_3.async_update.return_value data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "cooling" ) - data.devices["ad4838d7d35c4d6ea796ee12ae5aedf8"]["control_state"] = "cooling" + data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = ( + HVACAction.COOLING + ) data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ "cooling_state" ] = True @@ -138,30 +162,32 @@ async def test_adam_3_climate_entity_attributes( "heating_state" ] = False with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() - state = hass.states.get("climate.anna") - assert state - assert state.state == HVACMode.COOL - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ - HVACMode.OFF, - HVACMode.AUTO, - HVACMode.COOL, - ] + + state = hass.states.get("climate.living_room") + assert state + assert state.state == HVACMode.COOL + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + assert state.attributes[ATTR_HVAC_MODES] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.COOL, + ] async def test_adam_climate_adjust_negative_testing( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: - """Test exceptions of climate entities.""" + """Test PlugwiseError exception.""" mock_smile_adam.set_temperature.side_effect = PlugwiseError with pytest.raises(HomeAssistantError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.zone_lisa_wk", "temperature": 25}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_TEMPERATURE: 25}, blocking=True, ) @@ -173,7 +199,7 @@ async def test_adam_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.zone_lisa_wk", "temperature": 25}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_TEMPERATURE: 25}, blocking=True, ) assert mock_smile_adam.set_temperature.call_count == 1 @@ -185,9 +211,9 @@ async def test_adam_climate_entity_climate_changes( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - "entity_id": "climate.zone_lisa_wk", - "hvac_mode": "heat", - "temperature": 25, + ATTR_ENTITY_ID: "climate.woonkamer", + ATTR_HVAC_MODE: HVACMode.HEAT, + ATTR_TEMPERATURE: 25, }, blocking=True, ) @@ -196,43 +222,43 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", {"setpoint": 25.0} ) - with pytest.raises(ServiceValidationError): + with pytest.raises(ServiceValidationError, match="Accepted range"): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.zone_lisa_wk", "temperature": 150}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_TEMPERATURE: 150}, blocking=True, ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {"entity_id": "climate.zone_lisa_wk", "preset_mode": "away"}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, ) assert mock_smile_adam.set_preset.call_count == 1 mock_smile_adam.set_preset.assert_called_with( - "c50f167537524366a5af7aa3942feb1e", "away" + "c50f167537524366a5af7aa3942feb1e", PRESET_AWAY ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.zone_lisa_wk", "hvac_mode": "heat"}, + {ATTR_ENTITY_ID: "climate.woonkamer", ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, ) assert mock_smile_adam.set_schedule_state.call_count == 2 mock_smile_adam.set_schedule_state.assert_called_with( - "c50f167537524366a5af7aa3942feb1e", "off" + "c50f167537524366a5af7aa3942feb1e", HVACMode.OFF ) - with pytest.raises(HomeAssistantError): + with pytest.raises(ServiceValidationError, match="valid modes are"): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.zone_thermostat_jessie", - "hvac_mode": "dry", + ATTR_ENTITY_ID: "climate.jessie", + ATTR_HVAC_MODE: HVACMode.DRY, }, blocking=True, ) @@ -251,8 +277,8 @@ async def test_adam_climate_off_mode_change( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.slaapkamer", - "hvac_mode": "heat", + ATTR_ENTITY_ID: "climate.slaapkamer", + ATTR_HVAC_MODE: HVACMode.HEAT, }, blocking=True, ) @@ -267,8 +293,8 @@ async def test_adam_climate_off_mode_change( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.kinderkamer", - "hvac_mode": "off", + ATTR_ENTITY_ID: "climate.kinderkamer", + ATTR_HVAC_MODE: HVACMode.OFF, }, blocking=True, ) @@ -283,8 +309,8 @@ async def test_adam_climate_off_mode_change( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.logeerkamer", - "hvac_mode": "heat", + ATTR_ENTITY_ID: "climate.logeerkamer", + ATTR_HVAC_MODE: HVACMode.HEAT, }, blocking=True, ) @@ -301,20 +327,20 @@ async def test_anna_climate_entity_attributes( state = hass.states.get("climate.anna") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT_COOL] + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.AUTO, HVACMode.HEAT_COOL] - assert "no_frost" in state.attributes["preset_modes"] - assert "home" in state.attributes["preset_modes"] + assert "no_frost" in state.attributes[ATTR_PRESET_MODES] + assert PRESET_HOME in state.attributes[ATTR_PRESET_MODES] - assert state.attributes["current_temperature"] == 19.3 - assert state.attributes["preset_mode"] == "home" - assert state.attributes["supported_features"] == 18 - assert state.attributes["target_temp_high"] == 30 - assert state.attributes["target_temp_low"] == 20.5 - assert state.attributes["min_temp"] == 4 - assert state.attributes["max_temp"] == 30 - assert state.attributes["target_temp_step"] == 0.1 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 19.3 + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOME + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 18 + assert state.attributes[ATTR_TARGET_TEMP_HIGH] == 30 + assert state.attributes[ATTR_TARGET_TEMP_LOW] == 20.5 + assert state.attributes[ATTR_MIN_TEMP] == 4 + assert state.attributes[ATTR_MAX_TEMP] == 30 + assert state.attributes[ATTR_TARGET_TEMP_STEP] == 0.1 async def test_anna_2_climate_entity_attributes( @@ -326,14 +352,14 @@ async def test_anna_2_climate_entity_attributes( state = hass.states.get("climate.anna") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.AUTO, HVACMode.HEAT_COOL, ] - assert state.attributes["supported_features"] == 18 - assert state.attributes["target_temp_high"] == 30 - assert state.attributes["target_temp_low"] == 20.5 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 18 + assert state.attributes[ATTR_TARGET_TEMP_HIGH] == 30 + assert state.attributes[ATTR_TARGET_TEMP_LOW] == 20.5 async def test_anna_3_climate_entity_attributes( @@ -345,8 +371,8 @@ async def test_anna_3_climate_entity_attributes( state = hass.states.get("climate.anna") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_action"] == "idle" - assert state.attributes["hvac_modes"] == [ + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert state.attributes[ATTR_HVAC_MODES] == [ HVACMode.AUTO, HVACMode.HEAT_COOL, ] @@ -356,12 +382,17 @@ async def test_anna_climate_entity_climate_changes( hass: HomeAssistant, mock_smile_anna: MagicMock, init_integration: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test handling of user requests in anna climate device environment.""" await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.anna", "target_temp_high": 30, "target_temp_low": 20}, + { + ATTR_ENTITY_ID: "climate.anna", + ATTR_TARGET_TEMP_HIGH: 30, + ATTR_TARGET_TEMP_LOW: 20, + }, blocking=True, ) assert mock_smile_anna.set_temperature.call_count == 1 @@ -373,18 +404,18 @@ async def test_anna_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {"entity_id": "climate.anna", "preset_mode": "away"}, + {ATTR_ENTITY_ID: "climate.anna", ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, ) assert mock_smile_anna.set_preset.call_count == 1 mock_smile_anna.set_preset.assert_called_with( - "c784ee9fdab44e1395b8dee7d7a497d5", "away" + "c784ee9fdab44e1395b8dee7d7a497d5", PRESET_AWAY ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.anna", "hvac_mode": "auto"}, + {ATTR_ENTITY_ID: "climate.anna", ATTR_HVAC_MODE: HVACMode.AUTO}, blocking=True, ) # hvac_mode is already auto so not called. @@ -393,18 +424,21 @@ async def test_anna_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.anna", "hvac_mode": "heat_cool"}, + {ATTR_ENTITY_ID: "climate.anna", ATTR_HVAC_MODE: HVACMode.HEAT_COOL}, blocking=True, ) assert mock_smile_anna.set_schedule_state.call_count == 1 mock_smile_anna.set_schedule_state.assert_called_with( - "c784ee9fdab44e1395b8dee7d7a497d5", "off" + "c784ee9fdab44e1395b8dee7d7a497d5", HVACMode.OFF ) + data = mock_smile_anna.async_update.return_value data.devices["3cb70739631c4d17a86b8b12e8a5161b"].pop("available_schedules") with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() + state = hass.states.get("climate.anna") assert state.state == HVACMode.HEAT - assert state.attributes["hvac_modes"] == [HVACMode.HEAT_COOL] + assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.HEAT_COOL] diff --git a/tests/components/plugwise/test_config_flow.py b/tests/components/plugwise/test_config_flow.py index 4b7c567baa8..9e1e29f4a48 100644 --- a/tests/components/plugwise/test_config_flow.py +++ b/tests/components/plugwise/test_config_flow.py @@ -1,19 +1,18 @@ """Test the Plugwise config flow.""" from ipaddress import ip_address -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, MagicMock from plugwise.exceptions import ( ConnectionFailedError, InvalidAuthentication, InvalidSetupError, InvalidXMLError, - ResponseError, UnsupportedDeviceError, ) import pytest -from homeassistant.components.plugwise.const import API, DEFAULT_PORT, DOMAIN, PW_TYPE +from homeassistant.components.plugwise.const import DEFAULT_PORT, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( @@ -36,6 +35,7 @@ TEST_PASSWORD = "test_password" TEST_PORT = 81 TEST_USERNAME = "smile" TEST_USERNAME2 = "stretch" +MOCK_SMILE_ID = "smile12345" TEST_DISCOVERY = ZeroconfServiceInfo( ip_address=ip_address(TEST_HOST), @@ -95,22 +95,6 @@ TEST_DISCOVERY_ADAM = ZeroconfServiceInfo( ) -@pytest.fixture(name="mock_smile") -def mock_smile(): - """Create a Mock Smile for testing exceptions.""" - with patch( - "homeassistant.components.plugwise.config_flow.Smile", - ) as smile_mock: - smile_mock.ConnectionFailedError = ConnectionFailedError - smile_mock.InvalidAuthentication = InvalidAuthentication - smile_mock.InvalidSetupError = InvalidSetupError - smile_mock.InvalidXMLError = InvalidXMLError - smile_mock.ResponseError = ResponseError - smile_mock.UnsupportedDeviceError = UnsupportedDeviceError - smile_mock.return_value.connect.return_value = True - yield smile_mock.return_value - - async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, @@ -140,12 +124,13 @@ async def test_form( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, CONF_USERNAME: TEST_USERNAME, - PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 + assert result2["result"].unique_id == MOCK_SMILE_ID + @pytest.mark.parametrize( ("discovery", "username"), @@ -165,11 +150,12 @@ async def test_zeroconf_flow( result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_ZEROCONF}, - data=discovery, + data=TEST_DISCOVERY, ) assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} assert result.get("step_id") == "user" + assert "flow_id" in result result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -183,13 +169,14 @@ async def test_zeroconf_flow( CONF_HOST: TEST_HOST, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, - CONF_USERNAME: username, - PW_TYPE: API, + CONF_USERNAME: TEST_USERNAME, } assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 + assert result2["result"].unique_id == MOCK_SMILE_ID + async def test_zeroconf_flow_stretch( hass: HomeAssistant, @@ -205,6 +192,7 @@ async def test_zeroconf_flow_stretch( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} assert result.get("step_id") == "user" + assert "flow_id" in result result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -219,7 +207,6 @@ async def test_zeroconf_flow_stretch( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, CONF_USERNAME: TEST_USERNAME2, - PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 @@ -276,7 +263,6 @@ async def test_zercoconf_discovery_update_configuration( (InvalidAuthentication, "invalid_auth"), (InvalidSetupError, "invalid_setup"), (InvalidXMLError, "response_error"), - (ResponseError, "response_error"), (RuntimeError, "unknown"), (UnsupportedDeviceError, "unsupported"), ], @@ -296,6 +282,7 @@ async def test_flow_errors( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} assert result.get("step_id") == "user" + assert "flow_id" in result mock_smile_config_flow.connect.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( @@ -323,13 +310,75 @@ async def test_flow_errors( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, CONF_USERNAME: TEST_USERNAME, - PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 2 +async def test_user_abort_existing_anna( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_smile_config_flow: MagicMock, +) -> None: + """Test the full user configuration flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + title=CONF_NAME, + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + unique_id=MOCK_SMILE_ID, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: TEST_HOST, + CONF_PASSWORD: TEST_PASSWORD, + }, + ) + await hass.async_block_till_done() + + assert result2.get("type") is FlowResultType.ABORT + assert result2.get("reason") == "already_configured" + + +async def test_zeroconf_abort_existing_anna( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_smile_config_flow: MagicMock, +) -> None: + """Test the full user configuration flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + title=CONF_NAME, + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + unique_id=TEST_HOSTNAME, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_ZEROCONF}, + data=TEST_DISCOVERY_ANNA, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + async def test_zeroconf_abort_anna_with_existing_config_entries( hass: HomeAssistant, mock_smile_adam: MagicMock, @@ -355,9 +404,9 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - flows_in_progress = hass.config_entries.flow.async_progress() + flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] assert len(flows_in_progress) == 1 - assert flows_in_progress[0]["context"]["product"] == "smile_thermo" + assert list(flows_in_progress)[0].product == "smile_thermo" # Discover Adam, Anna should be aborted and no longer present result2 = await hass.config_entries.flow.async_init( @@ -369,9 +418,9 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: assert result2.get("type") is FlowResultType.FORM assert result2.get("step_id") == "user" - flows_in_progress = hass.config_entries.flow.async_progress() + flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] assert len(flows_in_progress) == 1 - assert flows_in_progress[0]["context"]["product"] == "smile_open_therm" + assert list(flows_in_progress)[0].product == "smile_open_therm" # Discover Anna again, Anna should be aborted directly result3 = await hass.config_entries.flow.async_init( @@ -383,6 +432,6 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: assert result3.get("reason") == "anna_with_adam" # Adam should still be there - flows_in_progress = hass.config_entries.flow.async_progress() + flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] assert len(flows_in_progress) == 1 - assert flows_in_progress[0]["context"]["product"] == "smile_open_therm" + assert list(flows_in_progress)[0].product == "smile_open_therm" diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 26aedf864dc..014003d29d0 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import MagicMock, patch +from freezegun.api import FrozenDateTimeFactory from plugwise.exceptions import ( ConnectionFailedError, InvalidAuthentication, @@ -18,8 +19,6 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed @@ -34,14 +33,18 @@ SECONDARY_ID = ( TOM = { "01234567890abcdefghijklmnopqrstu": { "available": True, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", - "name": "Tom Zolder", + "name": "Tom Badkamer 2", + "binary_sensors": { + "low_battery": False, + }, "sensors": { "battery": 99, + "setpoint": 18.0, "temperature": 18.6, "temperature_difference": 2.3, "valve_position": 0.0, @@ -74,7 +77,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED @@ -107,6 +109,28 @@ async def test_gateway_config_entry_not_ready( assert mock_config_entry.state is entry_state +async def test_device_in_dr( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smile_p1: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test Gateway device registry data.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "a455b61e52394b2db5081ce025a430f3")} + ) + assert device_entry.hw_version == "AME Smile 2.0 board" + assert device_entry.manufacturer == "Plugwise" + assert device_entry.model == "Gateway" + assert device_entry.model_id == "smile" + assert device_entry.name == "Smile P1" + assert device_entry.sw_version == "4.4.2" + + @pytest.mark.parametrize( ("entitydata", "old_unique_id", "new_unique_id"), [ @@ -206,13 +230,13 @@ async def test_update_device( mock_smile_adam_2: MagicMock, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test a clean-up of the device_registry.""" - utcnow = dt_util.utcnow() data = mock_smile_adam_2.async_update.return_value mock_config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, {}) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert ( @@ -221,7 +245,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 29 + == 38 ) assert ( len( @@ -229,13 +253,22 @@ async def test_update_device( device_registry, mock_config_entry.entry_id ) ) - == 6 + == 8 ) # Add a 2nd Tom/Floor data.devices.update(TOM) + data.devices["f871b8c4d63549319221e294e4f88074"]["thermostats"].update( + { + "secondary": [ + "01234567890abcdefghijklmnopqrstu", + "1772a4ea304041adb83f357b751341ff", + ] + } + ) with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - async_fire_time_changed(hass, utcnow + timedelta(minutes=1)) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert ( @@ -244,7 +277,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 34 + == 45 ) assert ( len( @@ -252,7 +285,7 @@ async def test_update_device( device_registry, mock_config_entry.entry_id ) ) - == 7 + == 9 ) item_list: list[str] = [] for device_entry in list(device_registry.devices.values()): @@ -260,9 +293,13 @@ async def test_update_device( assert "01234567890abcdefghijklmnopqrstu" in item_list # Remove the existing Tom/Floor + data.devices["f871b8c4d63549319221e294e4f88074"]["thermostats"].update( + {"secondary": ["01234567890abcdefghijklmnopqrstu"]} + ) data.devices.pop("1772a4ea304041adb83f357b751341ff") with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - async_fire_time_changed(hass, utcnow + timedelta(minutes=1)) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert ( @@ -271,7 +308,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 29 + == 38 ) assert ( len( @@ -279,7 +316,7 @@ async def test_update_device( device_registry, mock_config_entry.entry_id ) ) - == 6 + == 8 ) item_list: list[str] = [] for device_entry in list(device_registry.devices.values()): diff --git a/tests/components/plugwise/test_number.py b/tests/components/plugwise/test_number.py index e10a7caa9e9..fdceb042669 100644 --- a/tests/components/plugwise/test_number.py +++ b/tests/components/plugwise/test_number.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock +import pytest + from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, @@ -9,6 +11,7 @@ from homeassistant.components.number import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from tests.common import MockConfigEntry @@ -101,3 +104,19 @@ async def test_adam_temperature_offset_change( mock_smile_adam.set_number.assert_called_with( "6a3bf693d05e48e0b460c815a4fdd09d", "temperature_offset", 1.0 ) + + +async def test_adam_temperature_offset_out_of_bounds_change( + hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test changing of the temperature_offset number beyond limits.""" + with pytest.raises(ServiceValidationError, match="valid range"): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: "number.zone_thermostat_jessie_temperature_offset", + ATTR_VALUE: 3.0, + }, + blocking=True, + ) diff --git a/tests/components/plugwise/test_select.py b/tests/components/plugwise/test_select.py index b9dec283bc4..8891a88bb91 100644 --- a/tests/components/plugwise/test_select.py +++ b/tests/components/plugwise/test_select.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock +import pytest + from homeassistant.components.select import ( ATTR_OPTION, DOMAIN as SELECT_DOMAIN, @@ -9,6 +11,7 @@ from homeassistant.components.select import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from tests.common import MockConfigEntry @@ -18,7 +21,7 @@ async def test_adam_select_entities( ) -> None: """Test a thermostat Select.""" - state = hass.states.get("select.zone_lisa_wk_thermostat_schedule") + state = hass.states.get("select.woonkamer_thermostat_schedule") assert state assert state.state == "GF7 Woonkamer" @@ -32,7 +35,7 @@ async def test_adam_change_select_entity( SELECT_DOMAIN, SERVICE_SELECT_OPTION, { - ATTR_ENTITY_ID: "select.zone_lisa_wk_thermostat_schedule", + ATTR_ENTITY_ID: "select.woonkamer_thermostat_schedule", ATTR_OPTION: "Badkamer Schema", }, blocking=True, @@ -65,8 +68,8 @@ async def test_adam_select_regulation_mode( SELECT_DOMAIN, SERVICE_SELECT_OPTION, { - "entity_id": "select.adam_regulation_mode", - "option": "heating", + ATTR_ENTITY_ID: "select.adam_regulation_mode", + ATTR_OPTION: "heating", }, blocking=True, ) @@ -77,3 +80,29 @@ async def test_adam_select_regulation_mode( "heating", "on", ) + + +async def test_legacy_anna_select_entities( + hass: HomeAssistant, + mock_smile_legacy_anna: MagicMock, + init_integration: MockConfigEntry, +) -> None: + """Test not creating a select-entity for a legacy Anna without a thermostat-schedule.""" + assert not hass.states.get("select.anna_thermostat_schedule") + + +async def test_adam_select_unavailable_regulation_mode( + hass: HomeAssistant, mock_smile_anna: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test a regulation_mode non-available preset.""" + + with pytest.raises(ServiceValidationError, match="valid options"): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.anna_thermostat_schedule", + ATTR_OPTION: "freezing", + }, + blocking=True, + ) diff --git a/tests/components/plugwise/test_sensor.py b/tests/components/plugwise/test_sensor.py index 9a20a37824d..f10f3f00933 100644 --- a/tests/components/plugwise/test_sensor.py +++ b/tests/components/plugwise/test_sensor.py @@ -2,11 +2,13 @@ from unittest.mock import MagicMock +import pytest + from homeassistant.components.plugwise.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +import homeassistant.helpers.entity_registry as er from tests.common import MockConfigEntry @@ -58,7 +60,7 @@ async def test_unique_id_migration_humidity( # Entry to migrate entity_registry.async_get_or_create( - Platform.SENSOR, + SENSOR_DOMAIN, DOMAIN, "f61f1a2535f54f52ad006a3d18e459ca-relative_humidity", config_entry=mock_config_entry, @@ -67,7 +69,7 @@ async def test_unique_id_migration_humidity( ) # Entry not needing migration entity_registry.async_get_or_create( - Platform.SENSOR, + SENSOR_DOMAIN, DOMAIN, "f61f1a2535f54f52ad006a3d18e459ca-battery", config_entry=mock_config_entry, @@ -135,6 +137,7 @@ async def test_p1_dsmr_sensor_entities( assert not state +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_p1_3ph_dsmr_sensor_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -154,21 +157,23 @@ async def test_p1_3ph_dsmr_sensor_entities( assert state assert int(state.state) == 2080 - entity_id = "sensor.p1_voltage_phase_one" - state = hass.states.get(entity_id) - assert not state - - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() - - await hass.config_entries.async_reload(init_integration.entry_id) - await hass.async_block_till_done() - + # Default disabled sensor test state = hass.states.get("sensor.p1_voltage_phase_one") assert state assert float(state.state) == 233.2 +async def test_p1_3ph_dsmr_sensor_disabled_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_smile_p1_2: MagicMock, + init_integration: MockConfigEntry, +) -> None: + """Test disabled power related sensor entities intent.""" + state = hass.states.get("sensor.p1_voltage_phase_one") + assert not state + + async def test_stretch_sensor_entities( hass: HomeAssistant, mock_stretch: MagicMock, init_integration: MockConfigEntry ) -> None: diff --git a/tests/components/plugwise/test_switch.py b/tests/components/plugwise/test_switch.py index 5da76bb0ebd..fa8a8a434e7 100644 --- a/tests/components/plugwise/test_switch.py +++ b/tests/components/plugwise/test_switch.py @@ -8,14 +8,16 @@ import pytest from homeassistant.components.plugwise.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + STATE_OFF, STATE_ON, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.entity_registry as er from tests.common import MockConfigEntry @@ -43,26 +45,26 @@ async def test_adam_climate_switch_negative_testing( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {"entity_id": "switch.cv_pomp_relay"}, + {ATTR_ENTITY_ID: "switch.cv_pomp_relay"}, blocking=True, ) assert mock_smile_adam.set_switch_state.call_count == 1 mock_smile_adam.set_switch_state.assert_called_with( - "78d1126fc4c743db81b61c20e88342a7", None, "relay", "off" + "78d1126fc4c743db81b61c20e88342a7", None, "relay", STATE_OFF ) with pytest.raises(HomeAssistantError): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {"entity_id": "switch.fibaro_hc2_relay"}, + {ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"}, blocking=True, ) assert mock_smile_adam.set_switch_state.call_count == 2 mock_smile_adam.set_switch_state.assert_called_with( - "a28f588dc4a049a483fd03a30361ad3a", None, "relay", "on" + "a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_ON ) @@ -73,37 +75,37 @@ async def test_adam_climate_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {"entity_id": "switch.cv_pomp_relay"}, + {ATTR_ENTITY_ID: "switch.cv_pomp_relay"}, blocking=True, ) assert mock_smile_adam.set_switch_state.call_count == 1 mock_smile_adam.set_switch_state.assert_called_with( - "78d1126fc4c743db81b61c20e88342a7", None, "relay", "off" + "78d1126fc4c743db81b61c20e88342a7", None, "relay", STATE_OFF ) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TOGGLE, - {"entity_id": "switch.fibaro_hc2_relay"}, + {ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"}, blocking=True, ) assert mock_smile_adam.set_switch_state.call_count == 2 mock_smile_adam.set_switch_state.assert_called_with( - "a28f588dc4a049a483fd03a30361ad3a", None, "relay", "off" + "a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_OFF ) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {"entity_id": "switch.fibaro_hc2_relay"}, + {ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"}, blocking=True, ) assert mock_smile_adam.set_switch_state.call_count == 3 mock_smile_adam.set_switch_state.assert_called_with( - "a28f588dc4a049a483fd03a30361ad3a", None, "relay", "on" + "a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_ON ) @@ -127,34 +129,34 @@ async def test_stretch_switch_changes( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {"entity_id": "switch.koelkast_92c4a_relay"}, + {ATTR_ENTITY_ID: "switch.koelkast_92c4a_relay"}, blocking=True, ) assert mock_stretch.set_switch_state.call_count == 1 mock_stretch.set_switch_state.assert_called_with( - "e1c884e7dede431dadee09506ec4f859", None, "relay", "off" + "e1c884e7dede431dadee09506ec4f859", None, "relay", STATE_OFF ) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TOGGLE, - {"entity_id": "switch.droger_52559_relay"}, + {ATTR_ENTITY_ID: "switch.droger_52559_relay"}, blocking=True, ) assert mock_stretch.set_switch_state.call_count == 2 mock_stretch.set_switch_state.assert_called_with( - "cfe95cf3de1948c0b8955125bf754614", None, "relay", "off" + "cfe95cf3de1948c0b8955125bf754614", None, "relay", STATE_OFF ) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {"entity_id": "switch.droger_52559_relay"}, + {ATTR_ENTITY_ID: "switch.droger_52559_relay"}, blocking=True, ) assert mock_stretch.set_switch_state.call_count == 3 mock_stretch.set_switch_state.assert_called_with( - "cfe95cf3de1948c0b8955125bf754614", None, "relay", "on" + "cfe95cf3de1948c0b8955125bf754614", None, "relay", STATE_ON ) diff --git a/tests/components/point/__init__.py b/tests/components/point/__init__.py index 9fb6eea9ac7..254eef2e936 100644 --- a/tests/components/point/__init__.py +++ b/tests/components/point/__init__.py @@ -1 +1,12 @@ """Tests for the Point component.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/point/test_config_flow.py b/tests/components/point/test_config_flow.py index 71f3f31ce8d..bd1e3cfac29 100644 --- a/tests/components/point/test_config_flow.py +++ b/tests/components/point/test_config_flow.py @@ -1,153 +1,172 @@ -"""Tests for the Point config flow.""" +"""Test the Minut Point config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest -from homeassistant.components.point import DOMAIN, config_flow -from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant import config_entries +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.point.const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" + +REDIRECT_URL = "https://example.com/auth/external/callback" -def init_config_flow( - hass: HomeAssistant, side_effect: type[Exception] | None = None -) -> config_flow.PointFlowHandler: - """Init a configuration flow.""" - config_flow.register_flow_implementation(hass, DOMAIN, "id", "secret") - flow = config_flow.PointFlowHandler() - flow._get_authorization_url = AsyncMock( - return_value="https://example.com", side_effect=side_effect +@pytest.fixture(autouse=True) +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), ) - flow.hass = hass - return flow -@pytest.fixture -def is_authorized() -> bool: - """Set PointSession authorized.""" - return True - - -@pytest.fixture -def mock_pypoint(is_authorized): - """Mock pypoint.""" - with patch( - "homeassistant.components.point.config_flow.PointSession" - ) as PointSession: - PointSession.return_value.get_access_token = AsyncMock( - return_value={"access_token": "boo"} - ) - PointSession.return_value.is_authorized = is_authorized - PointSession.return_value.user = AsyncMock( - return_value={"email": "john.doe@example.com"} - ) - yield PointSession - - -async def test_abort_if_no_implementation_registered(hass: HomeAssistant) -> None: - """Test we abort if no implementation is registered.""" - flow = config_flow.PointFlowHandler() - flow.hass = hass - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_flows" - - -async def test_abort_if_already_setup(hass: HomeAssistant) -> None: - """Test we abort if Point is already setup.""" - flow = init_config_flow(hass) - - with patch.object(hass.config_entries, "async_entries", return_value=[{}]): - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_setup" - - with patch.object(hass.config_entries, "async_entries", return_value=[{}]): - result = await flow.async_step_import() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_setup" - - -async def test_full_flow_implementation(hass: HomeAssistant, mock_pypoint) -> None: - """Test registering an implementation and finishing flow works.""" - config_flow.register_flow_implementation(hass, "test-other", None, None) - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await flow.async_step_user({"flow_impl": "test"}) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - assert result["description_placeholders"] == { - "authorization_url": "https://example.com" - } - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"]["refresh_args"] == { - CONF_CLIENT_ID: "id", - CONF_CLIENT_SECRET: "secret", - } - assert result["title"] == "john.doe@example.com" - assert result["data"]["token"] == {"access_token": "boo"} - - -async def test_step_import(hass: HomeAssistant, mock_pypoint) -> None: - """Test that we trigger import when configuring with client.""" - flow = init_config_flow(hass) - - result = await flow.async_step_import() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -@pytest.mark.parametrize("is_authorized", [False]) -async def test_wrong_code_flow_implementation( - hass: HomeAssistant, mock_pypoint +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, ) -> None: - """Test wrong code.""" - flow = init_config_flow(hass) + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( # noqa: SLF001 + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URL, + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + f"&redirect_uri={REDIRECT_URL}" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + "user_id": "abcd", + }, + ) + + with patch( + "homeassistant.components.point.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == "abcd" + assert result["result"].data["token"]["user_id"] == "abcd" + assert result["result"].data["token"]["type"] == "Bearer" + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + assert result["result"].data["token"]["expires_in"] == 60 + assert result["result"].data["token"]["access_token"] == "mock-access-token" + assert "webhook_id" in result["result"].data + + +@pytest.mark.parametrize( + ("unique_id", "expected", "expected_unique_id"), + [ + ("abcd", "reauth_successful", "abcd"), + (None, "reauth_successful", "abcd"), + ("abcde", "wrong_account", "abcde"), + ], + ids=("correct-unique_id", "missing-unique_id", "wrong-unique_id-abort"), +) +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauthentication_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + unique_id: str | None, + expected: str, + expected_unique_id: str, +) -> None: + """Test reauthentication flow.""" + old_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=unique_id, + version=1, + data={"id": "timmo", "auth_implementation": DOMAIN}, + ) + old_entry.add_to_hass(hass) + + result = await old_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URL, + }, + ) + client = await hass_client_no_auth() + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + "user_id": "abcd", + }, + ) + + with ( + patch("homeassistant.components.point.api.AsyncConfigEntryAuth"), + patch( + f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - result = await flow.async_step_code("123ABC") assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "auth_error" + assert result["reason"] == expected + assert old_entry.unique_id == expected_unique_id -async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: - """Test we allow picking implementation if we have one flow_imp.""" - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -async def test_abort_if_timeout_generating_auth_url(hass: HomeAssistant) -> None: - """Test we abort if generating authorize url fails.""" - flow = init_config_flow(hass, side_effect=TimeoutError) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "authorize_url_timeout" - - -async def test_abort_if_exception_generating_auth_url(hass: HomeAssistant) -> None: - """Test we abort if generating authorize url blows up.""" - flow = init_config_flow(hass, side_effect=ValueError) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unknown_authorize_url_generation" - - -async def test_abort_no_code(hass: HomeAssistant) -> None: - """Test if no code is given to step_code.""" - flow = init_config_flow(hass) - - result = await flow.async_step_code() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_code" +async def test_import_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "pick_implementation" diff --git a/tests/components/powerfox/__init__.py b/tests/components/powerfox/__init__.py new file mode 100644 index 00000000000..d24e52eba9b --- /dev/null +++ b/tests/components/powerfox/__init__.py @@ -0,0 +1,14 @@ +"""Tests for the Powerfox integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +MOCK_DIRECT_HOST = "1.1.1.1" + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/powerfox/conftest.py b/tests/components/powerfox/conftest.py new file mode 100644 index 00000000000..14ccc5996e5 --- /dev/null +++ b/tests/components/powerfox/conftest.py @@ -0,0 +1,87 @@ +"""Common fixtures for the Powerfox tests.""" + +from collections.abc import Generator +from datetime import UTC, datetime +from unittest.mock import AsyncMock, patch + +from powerfox import Device, DeviceType, PowerMeter, WaterMeter +import pytest + +from homeassistant.components.powerfox.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.powerfox.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_powerfox_client() -> Generator[AsyncMock]: + """Mock a Powerfox client.""" + with ( + patch( + "homeassistant.components.powerfox.Powerfox", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.powerfox.config_flow.Powerfox", + new=mock_client, + ), + ): + client = mock_client.return_value + client.all_devices.return_value = [ + Device( + id="9x9x1f12xx3x", + date_added=datetime(2024, 11, 26, 9, 22, 35, tzinfo=UTC), + main_device=True, + bidirectional=True, + type=DeviceType.POWER_METER, + name="Poweropti", + ), + Device( + id="9x9x1f12xx4x", + date_added=datetime(2024, 11, 26, 9, 22, 35, tzinfo=UTC), + main_device=False, + bidirectional=False, + type=DeviceType.COLD_WATER_METER, + name="Wateropti", + ), + ] + client.device.side_effect = [ + PowerMeter( + outdated=False, + timestamp=datetime(2024, 11, 26, 10, 48, 51, tzinfo=UTC), + power=111, + energy_usage=1111.111, + energy_return=111.111, + energy_usage_high_tariff=111.111, + energy_usage_low_tariff=111.111, + ), + WaterMeter( + outdated=False, + timestamp=datetime(2024, 11, 26, 10, 48, 51, tzinfo=UTC), + cold_water=1111.111, + warm_water=0.0, + ), + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a Powerfox config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Powerfox", + data={ + CONF_EMAIL: "test@powerfox.test", + CONF_PASSWORD: "test-password", + }, + ) diff --git a/tests/components/powerfox/snapshots/test_diagnostics.ambr b/tests/components/powerfox/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..781e7b8c0d5 --- /dev/null +++ b/tests/components/powerfox/snapshots/test_diagnostics.ambr @@ -0,0 +1,26 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'devices': list([ + dict({ + 'power_meter': dict({ + 'energy_return': 111.111, + 'energy_usage': 1111.111, + 'energy_usage_high_tariff': 111.111, + 'energy_usage_low_tariff': 111.111, + 'outdated': False, + 'power': 111, + 'timestamp': '2024-11-26 10:48:51', + }), + }), + dict({ + 'water_meter': dict({ + 'cold_water': 1111.111, + 'outdated': False, + 'timestamp': '2024-11-26 10:48:51', + 'warm_water': 0.0, + }), + }), + ]), + }) +# --- diff --git a/tests/components/powerfox/snapshots/test_sensor.ambr b/tests/components/powerfox/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..dda162d4eeb --- /dev/null +++ b/tests/components/powerfox/snapshots/test_sensor.ambr @@ -0,0 +1,358 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.poweropti_energy_return-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_return', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy return', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_return', + 'unique_id': '9x9x1f12xx3x_energy_return', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_return-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy return', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_return', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage', + 'unique_id': '9x9x1f12xx3x_energy_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy usage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_high_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_usage_high_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage high tariff', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage_high_tariff', + 'unique_id': '9x9x1f12xx3x_energy_usage_high_tariff', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_high_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy usage high tariff', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_usage_high_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_low_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_energy_usage_low_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage low tariff', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage_low_tariff', + 'unique_id': '9x9x1f12xx3x_energy_usage_low_tariff', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_energy_usage_low_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Poweropti Energy usage low tariff', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_energy_usage_low_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111.111', + }) +# --- +# name: test_all_sensors[sensor.poweropti_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.poweropti_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '9x9x1f12xx3x_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.poweropti_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Poweropti Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.poweropti_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_all_sensors[sensor.wateropti_cold_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wateropti_cold_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cold water', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cold_water', + 'unique_id': '9x9x1f12xx4x_cold_water', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.wateropti_cold_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Wateropti Cold water', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wateropti_cold_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1111.111', + }) +# --- +# name: test_all_sensors[sensor.wateropti_warm_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wateropti_warm_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Warm water', + 'platform': 'powerfox', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'warm_water', + 'unique_id': '9x9x1f12xx4x_warm_water', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.wateropti_warm_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Wateropti Warm water', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wateropti_warm_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/powerfox/test_config_flow.py b/tests/components/powerfox/test_config_flow.py new file mode 100644 index 00000000000..a38f316faf3 --- /dev/null +++ b/tests/components/powerfox/test_config_flow.py @@ -0,0 +1,323 @@ +"""Test the Powerfox config flow.""" + +from unittest.mock import AsyncMock, patch + +from powerfox import PowerfoxAuthenticationError, PowerfoxConnectionError +import pytest + +from homeassistant.components import zeroconf +from homeassistant.components.powerfox.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import MOCK_DIRECT_HOST + +from tests.common import MockConfigEntry + +MOCK_ZEROCONF_DISCOVERY_INFO = zeroconf.ZeroconfServiceInfo( + ip_address=MOCK_DIRECT_HOST, + ip_addresses=[MOCK_DIRECT_HOST], + hostname="powerfox.local", + name="Powerfox", + port=443, + type="_http._tcp", + properties={}, +) + + +async def test_full_user_flow( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@powerfox.test" + assert result.get("data") == { + CONF_EMAIL: "test@powerfox.test", + CONF_PASSWORD: "test-password", + } + assert len(mock_powerfox_client.all_devices.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_zeroconf_discovery( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test zeroconf discovery.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=MOCK_ZEROCONF_DISCOVERY_INFO, + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@powerfox.test" + assert result.get("data") == { + CONF_EMAIL: "test@powerfox.test", + CONF_PASSWORD: "test-password", + } + assert len(mock_powerfox_client.all_devices.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_powerfox_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +async def test_duplicate_entry_reconfiguration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_powerfox_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry on reconfiguration.""" + # Add two config entries + mock_config_entry.add_to_hass(hass) + mock_config_entry_2 = MockConfigEntry( + domain=DOMAIN, + data={CONF_EMAIL: "new@powerfox.test", CONF_PASSWORD: "new-password"}, + ) + mock_config_entry_2.add_to_hass(hass) + assert len(hass.config_entries.async_entries()) == 2 + + # Reconfigure the second entry + result = await mock_config_entry_2.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions during config flow.""" + mock_powerfox_client.all_devices.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY + + +async def test_step_reauth( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test re-authentication flow.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + with patch( + "homeassistant.components.powerfox.config_flow.Powerfox", + autospec=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_step_reauth_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions during re-authentication flow.""" + mock_powerfox_client.all_devices.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PASSWORD: "new-password"}, + ) + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +async def test_reconfigure( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration of existing entry.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reconfigure_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_EMAIL] == "new-email@powerfox.test" + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_reconfigure_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test exceptions during reconfiguration flow.""" + mock_powerfox_client.all_devices.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reconfigure_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_EMAIL] == "new-email@powerfox.test" + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" diff --git a/tests/components/powerfox/test_diagnostics.py b/tests/components/powerfox/test_diagnostics.py new file mode 100644 index 00000000000..7dc2c3c7263 --- /dev/null +++ b/tests/components/powerfox/test_diagnostics.py @@ -0,0 +1,30 @@ +"""Test for PowerFox diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the PowerFox entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot diff --git a/tests/components/powerfox/test_init.py b/tests/components/powerfox/test_init.py new file mode 100644 index 00000000000..1ad60babc04 --- /dev/null +++ b/tests/components/powerfox/test_init.py @@ -0,0 +1,62 @@ +"""Test the Powerfox init module.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from powerfox import PowerfoxAuthenticationError, PowerfoxConnectionError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Powerfox configuration entry not ready.""" + mock_powerfox_client.all_devices.side_effect = PowerfoxConnectionError + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_entry_exception( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test ConfigEntryNotReady when API raises an exception during entry setup.""" + mock_config_entry.add_to_hass(hass) + mock_powerfox_client.device.side_effect = PowerfoxAuthenticationError + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" diff --git a/tests/components/powerfox/test_sensor.py b/tests/components/powerfox/test_sensor.py new file mode 100644 index 00000000000..547d8de202c --- /dev/null +++ b/tests/components/powerfox/test_sensor.py @@ -0,0 +1,53 @@ +"""Test the sensors provided by the Powerfox integration.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from powerfox import PowerfoxConnectionError +from syrupy import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Powerfox sensors.""" + with patch("homeassistant.components.powerfox.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_failed( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test entities become unavailable after failed update.""" + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("sensor.poweropti_energy_usage").state is not None + + mock_powerfox_client.device.side_effect = PowerfoxConnectionError + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.poweropti_energy_usage").state == STATE_UNAVAILABLE diff --git a/tests/components/powerwall/test_config_flow.py b/tests/components/powerwall/test_config_flow.py index db0ef2e9884..1ff1470f81c 100644 --- a/tests/components/powerwall/test_config_flow.py +++ b/tests/components/powerwall/test_config_flow.py @@ -336,13 +336,14 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + flow = hass.config_entries.flow.async_get(result["flow_id"]) + assert flow["context"]["title_placeholders"] == { + "ip_address": VALID_CONFIG[CONF_IP_ADDRESS], + "name": entry.title, + } mock_powerwall = await _mock_powerwall_site_name(hass, "My site") diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 3f0e0b92056..84314b7b22c 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -5,6 +5,7 @@ from functools import lru_cache import logging import os from pathlib import Path +import sys from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -70,6 +71,9 @@ async def test_basic_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() +@pytest.mark.skipif( + sys.version_info >= (3, 13), reason="not yet available on Python 3.13" +) async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: """Test we can setup and the service is registered.""" test_dir = tmp_path / "profiles" @@ -101,6 +105,24 @@ async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() +@pytest.mark.skipif(sys.version_info < (3, 13), reason="still works on python 3.12") +async def test_memory_usage_py313(hass: HomeAssistant, tmp_path: Path) -> None: + """Test raise an error on python3.13.""" + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert hass.services.has_service(DOMAIN, SERVICE_MEMORY) + with pytest.raises( + HomeAssistantError, + match="Memory profiling is not supported on Python 3.13. Please use Python 3.12.", + ): + await hass.services.async_call( + DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001}, blocking=True + ) + + async def test_object_growth_logging( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -189,9 +211,10 @@ async def test_dump_log_object( assert hass.services.has_service(DOMAIN, SERVICE_DUMP_LOG_OBJECTS) - await hass.services.async_call( - DOMAIN, SERVICE_DUMP_LOG_OBJECTS, {CONF_TYPE: "DumpLogDummy"}, blocking=True - ) + with patch("objgraph.by_type", return_value=[obj1, obj2]): + await hass.services.async_call( + DOMAIN, SERVICE_DUMP_LOG_OBJECTS, {CONF_TYPE: "DumpLogDummy"}, blocking=True + ) assert "" in caplog.text assert "Failed to serialize" in caplog.text diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 0dfa3210671..043a9cc4389 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -3,11 +3,12 @@ from dataclasses import dataclass import datetime from http import HTTPStatus -from typing import Any +from typing import Any, Self from unittest import mock from freezegun import freeze_time import prometheus_client +from prometheus_client.utils import floatToGoString import pytest from homeassistant.components import ( @@ -30,6 +31,7 @@ from homeassistant.components import ( switch, update, ) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, @@ -50,6 +52,7 @@ from homeassistant.components.fan import ( DIRECTION_REVERSE, ) from homeassistant.components.humidifier import ATTR_AVAILABLE_MODES +from homeassistant.components.lock import LockState from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_BATTERY_LEVEL, @@ -62,19 +65,16 @@ from homeassistant.const import ( CONTENT_TYPE_TEXT_PLAIN, DEGREE, PERCENTAGE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, STATE_CLOSED, STATE_CLOSING, STATE_HOME, - STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_OPENING, STATE_UNAVAILABLE, - STATE_UNLOCKED, + STATE_UNKNOWN, UnitOfEnergy, UnitOfTemperature, ) @@ -88,6 +88,77 @@ from tests.typing import ClientSessionGenerator PROMETHEUS_PATH = "homeassistant.components.prometheus" +class EntityMetric: + """Represents a Prometheus metric for a Home Assistant entity.""" + + metric_name: str + labels: dict[str, str] + + @classmethod + def required_labels(cls) -> list[str]: + """List of all required labels for a Prometheus metric.""" + return [ + "domain", + "friendly_name", + "entity", + ] + + def __init__(self, metric_name: str, **kwargs: Any) -> None: + """Create a new EntityMetric based on metric name and labels.""" + self.metric_name = metric_name + self.labels = kwargs + + # Labels that are required for all entities. + for labelname in self.required_labels(): + assert labelname in self.labels + assert self.labels[labelname] != "" + + def withValue(self, value: float) -> Self: + """Return a metric with value.""" + return EntityMetricWithValue(self, value) + + @property + def _metric_name_string(self) -> str: + """Return a full metric name as a string.""" + labels = ",".join( + f'{key}="{value}"' for key, value in sorted(self.labels.items()) + ) + return f"{self.metric_name}{{{labels}}}" + + def _in_metrics(self, metrics: list[str]) -> bool: + """Report whether this metric exists in the provided Prometheus output.""" + return any(line.startswith(self._metric_name_string) for line in metrics) + + def assert_in_metrics(self, metrics: list[str]) -> None: + """Assert that this metric exists in the provided Prometheus output.""" + assert self._in_metrics(metrics) + + def assert_not_in_metrics(self, metrics: list[str]) -> None: + """Assert that this metric does not exist in Prometheus output.""" + assert not self._in_metrics(metrics) + + +class EntityMetricWithValue(EntityMetric): + """Represents a Prometheus metric with a value.""" + + value: float + + def __init__(self, metric: EntityMetric, value: float) -> None: + """Create a new metric with a value based on a metric.""" + super().__init__(metric.metric_name, **metric.labels) + self.value = value + + @property + def _metric_string(self) -> str: + """Return a full metric string.""" + value = floatToGoString(self.value) + return f"{self._metric_name_string} {value}" + + def assert_in_metrics(self, metrics: list[str]) -> None: + """Assert that this metric exists in the provided Prometheus output.""" + assert self._metric_string in metrics + + @dataclass class FilterTest: """Class for capturing a filter test.""" @@ -96,6 +167,299 @@ class FilterTest: should_pass: bool +def test_entity_metric_generates_metric_name_string_without_value() -> None: + """Test using EntityMetric to format a simple metric string without any value.""" + domain = "sensor" + object_id = "outside_temperature" + entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain=domain, + friendly_name="Outside Temperature", + entity=f"{domain}.{object_id}", + ) + assert entity_metric._metric_name_string == ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"}' + ) + + +def test_entity_metric_generates_metric_string_with_value() -> None: + """Test using EntityMetric to format a simple metric string but with a metric value included.""" + domain = "sensor" + object_id = "outside_temperature" + entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain=domain, + friendly_name="Outside Temperature", + entity=f"{domain}.{object_id}", + ).withValue(17.2) + assert entity_metric._metric_string == ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"}' + " 17.2" + ) + + +def test_entity_metric_raises_exception_without_required_labels() -> None: + """Test using EntityMetric to raise exception when required labels are missing.""" + domain = "sensor" + object_id = "outside_temperature" + test_kwargs = { + "metric_name": "homeassistant_sensor_temperature_celsius", + "domain": domain, + "friendly_name": "Outside Temperature", + "entity": f"{domain}.{object_id}", + } + + assert len(EntityMetric.required_labels()) > 0 + + for labelname in EntityMetric.required_labels(): + label_kwargs = dict(test_kwargs) + # Delete the required label and ensure we get an exception + del label_kwargs[labelname] + with pytest.raises(AssertionError): + EntityMetric(**label_kwargs) + + +def test_entity_metric_raises_exception_if_required_label_is_empty_string() -> None: + """Test using EntityMetric to raise exception when required label value is empty string.""" + domain = "sensor" + object_id = "outside_temperature" + test_kwargs = { + "metric_name": "homeassistant_sensor_temperature_celsius", + "domain": domain, + "friendly_name": "Outside Temperature", + "entity": f"{domain}.{object_id}", + } + + assert len(EntityMetric.required_labels()) > 0 + + for labelname in EntityMetric.required_labels(): + label_kwargs = dict(test_kwargs) + # Replace the required label with "" and ensure we get an exception + label_kwargs[labelname] = "" + with pytest.raises(AssertionError): + EntityMetric(**label_kwargs) + + +def test_entity_metric_generates_alphabetically_ordered_labels() -> None: + """Test using EntityMetric to format a simple metric string with labels alphabetically ordered.""" + domain = "sensor" + object_id = "outside_temperature" + + static_metric_string = ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature",' + 'zed_label="foo"' + "}" + " 17.2" + ) + + ordered_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain=domain, + entity=f"{domain}.{object_id}", + friendly_name="Outside Temperature", + zed_label="foo", + ).withValue(17.2) + assert ordered_entity_metric._metric_string == static_metric_string + + unordered_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + zed_label="foo", + entity=f"{domain}.{object_id}", + friendly_name="Outside Temperature", + domain=domain, + ).withValue(17.2) + assert unordered_entity_metric._metric_string == static_metric_string + + +def test_entity_metric_generates_metric_string_with_non_required_labels() -> None: + """Test using EntityMetric to format a simple metric string but with extra labels and values included.""" + mode_entity_metric = EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ).withValue(1) + assert mode_entity_metric._metric_string == ( + "climate_preset_mode{" + 'domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"' + "}" + " 1.0" + ) + + action_entity_metric = EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1) + assert action_entity_metric._metric_string == ( + "climate_action{" + 'action="heating",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"' + "}" + " 1.0" + ) + + state_entity_metric = EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name="Curtain", + entity="cover.curtain", + state="open", + ).withValue(1) + assert state_entity_metric._metric_string == ( + "cover_state{" + 'domain="cover",' + 'entity="cover.curtain",' + 'friendly_name="Curtain",' + 'state="open"' + "}" + " 1.0" + ) + + foo_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + foo="bar", + ).withValue(17.2) + assert foo_entity_metric._metric_string == ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'foo="bar",' + 'friendly_name="Outside Temperature"' + "}" + " 17.2" + ) + + +def test_entity_metric_assert_helpers() -> None: + """Test using EntityMetric for both assert_in_metrics and assert_not_in_metrics.""" + temp_metric = ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'foo="bar",' + 'friendly_name="Outside Temperature"' + "}" + ) + climate_metric = ( + "climate_preset_mode{" + 'domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"' + "}" + ) + excluded_cover_metric = ( + "cover_state{" + 'domain="cover",' + 'entity="cover.curtain",' + 'friendly_name="Curtain",' + 'state="open"' + "}" + ) + metrics = [ + temp_metric, + climate_metric, + ] + # First make sure the excluded metric is not present + assert excluded_cover_metric not in metrics + # now check for actual metrics + temp_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + foo="bar", + ) + assert temp_entity_metric._metric_name_string == temp_metric + temp_entity_metric.assert_in_metrics(metrics) + + climate_entity_metric = EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ) + assert climate_entity_metric._metric_name_string == climate_metric + climate_entity_metric.assert_in_metrics(metrics) + + excluded_cover_entity_metric = EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name="Curtain", + entity="cover.curtain", + state="open", + ) + assert excluded_cover_entity_metric._metric_name_string == excluded_cover_metric + excluded_cover_entity_metric.assert_not_in_metrics(metrics) + + +def test_entity_metric_with_value_assert_helpers() -> None: + """Test using EntityMetricWithValue helpers, which is only assert_in_metrics.""" + temp_metric = ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'foo="bar",' + 'friendly_name="Outside Temperature"' + "}" + " 17.2" + ) + climate_metric = ( + "climate_preset_mode{" + 'domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"' + "}" + " 1.0" + ) + metrics = [ + temp_metric, + climate_metric, + ] + temp_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + foo="bar", + ).withValue(17.2) + assert temp_entity_metric._metric_string == temp_metric + temp_entity_metric.assert_in_metrics(metrics) + + climate_entity_metric = EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ).withValue(1) + assert climate_entity_metric._metric_string == climate_metric + climate_entity_metric.assert_in_metrics(metrics) + + @pytest.fixture(name="client") async def setup_prometheus_client( hass: HomeAssistant, @@ -154,16 +518,18 @@ async def test_setup_enumeration( suggested_object_id="outside_temperature", original_name="Outside Temperature", ) - set_state_with_entry(hass, sensor_1, 12.3, {}) + state = 12.3 + set_state_with_entry(hass, sensor_1, state, {}) assert await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) client = await hass_client() body = await generate_latest_metrics(client) - assert ( - 'homeassistant_sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 12.3' in body - ) + EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(state).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -179,17 +545,19 @@ async def test_view_empty_namespace( "Objects collected during gc" in body ) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.radio_energy",' - 'friendly_name="Radio Energy"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Radio Energy", + entity="sensor.radio_energy", + ).withValue(1).assert_in_metrics(body) - assert ( - 'last_updated_time_seconds{domain="sensor",' - 'entity="sensor.radio_energy",' - 'friendly_name="Radio Energy"} 86400.0' in body - ) + EntityMetric( + metric_name="last_updated_time_seconds", + domain="sensor", + friendly_name="Radio Energy", + entity="sensor.radio_energy", + ).withValue(86400.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [None]) @@ -205,11 +573,12 @@ async def test_view_default_namespace( "Objects collected during gc" in body ) - assert ( - 'homeassistant_sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -219,29 +588,33 @@ async def test_sensor_unit( """Test prometheus metrics for sensors with a unit.""" body = await generate_latest_metrics(client) - assert ( - 'sensor_unit_kwh{domain="sensor",' - 'entity="sensor.television_energy",' - 'friendly_name="Television Energy"} 74.0' in body - ) + EntityMetric( + metric_name="sensor_unit_kwh", + domain="sensor", + friendly_name="Television Energy", + entity="sensor.television_energy", + ).withValue(74.0).assert_in_metrics(body) - assert ( - 'sensor_unit_sek_per_kwh{domain="sensor",' - 'entity="sensor.electricity_price",' - 'friendly_name="Electricity price"} 0.123' in body - ) + EntityMetric( + metric_name="sensor_unit_sek_per_kwh", + domain="sensor", + friendly_name="Electricity price", + entity="sensor.electricity_price", + ).withValue(0.123).assert_in_metrics(body) - assert ( - 'sensor_unit_u0xb0{domain="sensor",' - 'entity="sensor.wind_direction",' - 'friendly_name="Wind Direction"} 25.0' in body - ) + EntityMetric( + metric_name="sensor_unit_u0xb0", + domain="sensor", + friendly_name="Wind Direction", + entity="sensor.wind_direction", + ).withValue(25.0).assert_in_metrics(body) - assert ( - 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' - 'entity="sensor.sps30_pm_1um_weight_concentration",' - 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body - ) + EntityMetric( + metric_name="sensor_unit_u0xb5g_per_mu0xb3", + domain="sensor", + friendly_name="SPS30 PM <1µm Weight concentration", + entity="sensor.sps30_pm_1um_weight_concentration", + ).withValue(3.7069).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -251,23 +624,26 @@ async def test_sensor_without_unit( """Test prometheus metrics for sensors without a unit.""" body = await generate_latest_metrics(client) - assert ( - 'sensor_state{domain="sensor",' - 'entity="sensor.trend_gradient",' - 'friendly_name="Trend Gradient"} 0.002' in body - ) + EntityMetric( + metric_name="sensor_state", + domain="sensor", + friendly_name="Trend Gradient", + entity="sensor.trend_gradient", + ).withValue(0.002).assert_in_metrics(body) - assert ( - 'sensor_state{domain="sensor",' - 'entity="sensor.text",' - 'friendly_name="Text"} 0' not in body - ) + EntityMetric( + metric_name="sensor_state", + domain="sensor", + friendly_name="Text", + entity="sensor.text", + ).assert_not_in_metrics(body) - assert ( - 'sensor_unit_text{domain="sensor",' - 'entity="sensor.text_unit",' - 'friendly_name="Text Unit"} 0' not in body - ) + EntityMetric( + metric_name="sensor_unit_text", + domain="sensor", + friendly_name="Text Unit", + entity="sensor.text_unit", + ).assert_not_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -277,35 +653,40 @@ async def test_sensor_device_class( """Test prometheus metrics for sensor with a device_class.""" body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.fahrenheit",' - 'friendly_name="Fahrenheit"} 10.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Fahrenheit", + entity="sensor.fahrenheit", + ).withValue(10.0).assert_in_metrics(body) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'sensor_power_kwh{domain="sensor",' - 'entity="sensor.radio_energy",' - 'friendly_name="Radio Energy"} 14.0' in body - ) + EntityMetric( + metric_name="sensor_power_kwh", + domain="sensor", + friendly_name="Radio Energy", + entity="sensor.radio_energy", + ).withValue(14.0).assert_in_metrics(body) - assert ( - 'sensor_timestamp_seconds{domain="sensor",' - 'entity="sensor.timestamp",' - 'friendly_name="Timestamp"} 1.691445808136036e+09' in body - ) + EntityMetric( + metric_name="sensor_timestamp_seconds", + domain="sensor", + friendly_name="Timestamp", + entity="sensor.timestamp", + ).withValue(1.691445808136036e09).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -315,23 +696,33 @@ async def test_input_number( """Test prometheus metrics for input_number.""" body = await generate_latest_metrics(client) - assert ( - 'input_number_state{domain="input_number",' - 'entity="input_number.threshold",' - 'friendly_name="Threshold"} 5.2' in body - ) + EntityMetric( + metric_name="input_number_state", + domain="input_number", + friendly_name="Threshold", + entity="input_number.threshold", + ).withValue(5.2).assert_in_metrics(body) - assert ( - 'input_number_state{domain="input_number",' - 'entity="input_number.brightness",' - 'friendly_name="None"} 60.0' in body - ) + EntityMetric( + metric_name="input_number_state", + domain="input_number", + friendly_name="None", + entity="input_number.brightness", + ).withValue(60.0).assert_in_metrics(body) - assert ( - 'input_number_state_celsius{domain="input_number",' - 'entity="input_number.target_temperature",' - 'friendly_name="Target temperature"} 22.7' in body - ) + EntityMetric( + metric_name="input_number_state_celsius", + domain="input_number", + friendly_name="Target temperature", + entity="input_number.target_temperature", + ).withValue(22.7).assert_in_metrics(body) + + EntityMetric( + metric_name="input_number_state_celsius", + domain="input_number", + friendly_name="Converted temperature", + entity="input_number.converted_temperature", + ).withValue(100).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -341,23 +732,26 @@ async def test_number( """Test prometheus metrics for number.""" body = await generate_latest_metrics(client) - assert ( - 'number_state{domain="number",' - 'entity="number.threshold",' - 'friendly_name="Threshold"} 5.2' in body - ) + EntityMetric( + metric_name="number_state", + domain="number", + friendly_name="Threshold", + entity="number.threshold", + ).withValue(5.2).assert_in_metrics(body) - assert ( - 'number_state{domain="number",' - 'entity="number.brightness",' - 'friendly_name="None"} 60.0' in body - ) + EntityMetric( + metric_name="number_state", + domain="number", + friendly_name="None", + entity="number.brightness", + ).withValue(60.0).assert_in_metrics(body) - assert ( - 'number_state_celsius{domain="number",' - 'entity="number.target_temperature",' - 'friendly_name="Target temperature"} 22.7' in body - ) + EntityMetric( + metric_name="number_state_celsius", + domain="number", + friendly_name="Target temperature", + entity="number.target_temperature", + ).withValue(22.7).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -367,11 +761,12 @@ async def test_battery( """Test prometheus metrics for battery.""" body = await generate_latest_metrics(client) - assert ( - 'battery_level_percent{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 12.0' in body - ) + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(12.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -382,47 +777,56 @@ async def test_climate( """Test prometheus metrics for climate entities.""" body = await generate_latest_metrics(client) - assert ( - 'climate_current_temperature_celsius{domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 25.0' in body - ) + EntityMetric( + metric_name="climate_current_temperature_celsius", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + ).withValue(25.0).assert_in_metrics(body) - assert ( - 'climate_target_temperature_celsius{domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 20.0' in body - ) + EntityMetric( + metric_name="climate_target_temperature_celsius", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + ).withValue(20.0).assert_in_metrics(body) - assert ( - 'climate_target_temperature_low_celsius{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee"} 21.0' in body - ) + EntityMetric( + metric_name="climate_target_temperature_low_celsius", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + ).withValue(21.0).assert_in_metrics(body) - assert ( - 'climate_target_temperature_high_celsius{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee"} 24.0' in body - ) + EntityMetric( + metric_name="climate_target_temperature_high_celsius", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + ).withValue(24.0).assert_in_metrics(body) - assert ( - 'climate_target_temperature_celsius{domain="climate",' - 'entity="climate.fritzdect",' - 'friendly_name="Fritz!DECT"} 0.0' in body - ) - assert ( - 'climate_preset_mode{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="away"} 1.0' in body - ) - assert ( - 'climate_fan_mode{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="auto"} 1.0' in body - ) + EntityMetric( + metric_name="climate_target_temperature_celsius", + domain="climate", + friendly_name="Fritz!DECT", + entity="climate.fritzdect", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_fan_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="auto", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -433,30 +837,35 @@ async def test_humidifier( """Test prometheus metrics for humidifier entities.""" body = await generate_latest_metrics(client) - assert ( - 'humidifier_target_humidity_percent{domain="humidifier",' - 'entity="humidifier.humidifier",' - 'friendly_name="Humidifier"} 68.0' in body - ) + EntityMetric( + metric_name="humidifier_target_humidity_percent", + domain="humidifier", + friendly_name="Humidifier", + entity="humidifier.humidifier", + ).withValue(68.0).assert_in_metrics(body) - assert ( - 'humidifier_state{domain="humidifier",' - 'entity="humidifier.dehumidifier",' - 'friendly_name="Dehumidifier"} 1.0' in body - ) + EntityMetric( + metric_name="humidifier_state", + domain="humidifier", + friendly_name="Dehumidifier", + entity="humidifier.dehumidifier", + ).withValue(1).assert_in_metrics(body) - assert ( - 'humidifier_mode{domain="humidifier",' - 'entity="humidifier.hygrostat",' - 'friendly_name="Hygrostat",' - 'mode="home"} 1.0' in body - ) - assert ( - 'humidifier_mode{domain="humidifier",' - 'entity="humidifier.hygrostat",' - 'friendly_name="Hygrostat",' - 'mode="eco"} 0.0' in body - ) + EntityMetric( + metric_name="humidifier_mode", + domain="humidifier", + friendly_name="Hygrostat", + entity="humidifier.hygrostat", + mode="home", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="humidifier_mode", + domain="humidifier", + friendly_name="Hygrostat", + entity="humidifier.hygrostat", + mode="eco", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -467,29 +876,33 @@ async def test_attributes( """Test prometheus metrics for entity attributes.""" body = await generate_latest_metrics(client) - assert ( - 'switch_state{domain="switch",' - 'entity="switch.boolean",' - 'friendly_name="Boolean"} 1.0' in body - ) + EntityMetric( + metric_name="switch_state", + domain="switch", + friendly_name="Boolean", + entity="switch.boolean", + ).withValue(1).assert_in_metrics(body) - assert ( - 'switch_attr_boolean{domain="switch",' - 'entity="switch.boolean",' - 'friendly_name="Boolean"} 1.0' in body - ) + EntityMetric( + metric_name="switch_attr_boolean", + domain="switch", + friendly_name="Boolean", + entity="switch.boolean", + ).withValue(1).assert_in_metrics(body) - assert ( - 'switch_state{domain="switch",' - 'entity="switch.number",' - 'friendly_name="Number"} 0.0' in body - ) + EntityMetric( + metric_name="switch_state", + domain="switch", + friendly_name="Number", + entity="switch.number", + ).withValue(0.0).assert_in_metrics(body) - assert ( - 'switch_attr_number{domain="switch",' - 'entity="switch.number",' - 'friendly_name="Number"} 10.2' in body - ) + EntityMetric( + metric_name="switch_attr_number", + domain="switch", + friendly_name="Number", + entity="switch.number", + ).withValue(10.2).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -499,17 +912,19 @@ async def test_binary_sensor( """Test prometheus metrics for binary_sensor.""" body = await generate_latest_metrics(client) - assert ( - 'binary_sensor_state{domain="binary_sensor",' - 'entity="binary_sensor.door",' - 'friendly_name="Door"} 1.0' in body - ) + EntityMetric( + metric_name="binary_sensor_state", + domain="binary_sensor", + friendly_name="Door", + entity="binary_sensor.door", + ).withValue(1).assert_in_metrics(body) - assert ( - 'binary_sensor_state{domain="binary_sensor",' - 'entity="binary_sensor.window",' - 'friendly_name="Window"} 0.0' in body - ) + EntityMetric( + metric_name="binary_sensor_state", + domain="binary_sensor", + friendly_name="Window", + entity="binary_sensor.window", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -519,17 +934,19 @@ async def test_input_boolean( """Test prometheus metrics for input_boolean.""" body = await generate_latest_metrics(client) - assert ( - 'input_boolean_state{domain="input_boolean",' - 'entity="input_boolean.test",' - 'friendly_name="Test"} 1.0' in body - ) + EntityMetric( + metric_name="input_boolean_state", + domain="input_boolean", + friendly_name="Test", + entity="input_boolean.test", + ).withValue(1).assert_in_metrics(body) - assert ( - 'input_boolean_state{domain="input_boolean",' - 'entity="input_boolean.helper",' - 'friendly_name="Helper"} 0.0' in body - ) + EntityMetric( + metric_name="input_boolean_state", + domain="input_boolean", + friendly_name="Helper", + entity="input_boolean.helper", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -539,35 +956,40 @@ async def test_light( """Test prometheus metrics for lights.""" body = await generate_latest_metrics(client) - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.desk",' - 'friendly_name="Desk"} 100.0' in body - ) + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="Desk", + entity="light.desk", + ).withValue(100.0).assert_in_metrics(body) - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.wall",' - 'friendly_name="Wall"} 0.0' in body - ) + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="Wall", + entity="light.wall", + ).withValue(0.0).assert_in_metrics(body) - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.tv",' - 'friendly_name="TV"} 100.0' in body - ) + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="TV", + entity="light.tv", + ).withValue(100.0).assert_in_metrics(body) - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.pc",' - 'friendly_name="PC"} 70.58823529411765' in body - ) + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="PC", + entity="light.pc", + ).withValue(70.58823529411765).assert_in_metrics(body) - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.hallway",' - 'friendly_name="Hallway"} 100.0' in body - ) + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="Hallway", + entity="light.hallway", + ).withValue(100.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -577,17 +999,19 @@ async def test_lock( """Test prometheus metrics for lock.""" body = await generate_latest_metrics(client) - assert ( - 'lock_state{domain="lock",' - 'entity="lock.front_door",' - 'friendly_name="Front Door"} 1.0' in body - ) + EntityMetric( + metric_name="lock_state", + domain="lock", + friendly_name="Front Door", + entity="lock.front_door", + ).withValue(1).assert_in_metrics(body) - assert ( - 'lock_state{domain="lock",' - 'entity="lock.kitchen_door",' - 'friendly_name="Kitchen Door"} 0.0' in body - ) + EntityMetric( + metric_name="lock_state", + domain="lock", + friendly_name="Kitchen Door", + entity="lock.kitchen_door", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -597,42 +1021,48 @@ async def test_fan( """Test prometheus metrics for fan.""" body = await generate_latest_metrics(client) - assert ( - 'fan_state{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 1.0' in body - ) + EntityMetric( + metric_name="fan_state", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(1).assert_in_metrics(body) - assert ( - 'fan_speed_percent{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 33.0' in body - ) + EntityMetric( + metric_name="fan_speed_percent", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(33.0).assert_in_metrics(body) - assert ( - 'fan_is_oscillating{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 1.0' in body - ) + EntityMetric( + metric_name="fan_is_oscillating", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(1).assert_in_metrics(body) - assert ( - 'fan_direction_reversed{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 0.0' in body - ) + EntityMetric( + metric_name="fan_direction_reversed", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(0.0).assert_in_metrics(body) - assert ( - 'fan_preset_mode{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1",' - 'mode="LO"} 1.0' in body - ) + EntityMetric( + metric_name="fan_preset_mode", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + mode="LO", + ).withValue(1).assert_in_metrics(body) - assert ( - 'fan_direction_reversed{domain="fan",' - 'entity="fan.fan_2",' - 'friendly_name="Reverse Fan"} 1.0' in body - ) + EntityMetric( + metric_name="fan_direction_reversed", + domain="fan", + friendly_name="Reverse Fan", + entity="fan.fan_2", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -643,33 +1073,37 @@ async def test_alarm_control_panel( """Test prometheus metrics for alarm control panel.""" body = await generate_latest_metrics(client) - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_1",' - 'friendly_name="Alarm Control Panel 1",' - 'state="armed_away"} 1.0' in body - ) + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 1", + entity="alarm_control_panel.alarm_control_panel_1", + state="armed_away", + ).withValue(1).assert_in_metrics(body) - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_1",' - 'friendly_name="Alarm Control Panel 1",' - 'state="disarmed"} 0.0' in body - ) + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 1", + entity="alarm_control_panel.alarm_control_panel_1", + state="disarmed", + ).withValue(0.0).assert_in_metrics(body) - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_2",' - 'friendly_name="Alarm Control Panel 2",' - 'state="armed_home"} 1.0' in body - ) + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 2", + entity="alarm_control_panel.alarm_control_panel_2", + state="armed_home", + ).withValue(1).assert_in_metrics(body) - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_2",' - 'friendly_name="Alarm Control Panel 2",' - 'state="armed_away"} 0.0' in body - ) + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 2", + entity="alarm_control_panel.alarm_control_panel_2", + state="armed_away", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -682,55 +1116,61 @@ async def test_cover( open_covers = ["cover_open", "cover_position", "cover_tilt_position"] for testcover in data: - open_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="open"}} {1.0 if cover_entities[testcover].unique_id in open_covers else 0.0}' - ) - assert open_metric in body + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="open", + ).withValue( + 1.0 if cover_entities[testcover].unique_id in open_covers else 0.0 + ).assert_in_metrics(body) - closed_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="closed"}} {1.0 if cover_entities[testcover].unique_id == "cover_closed" else 0.0}' - ) - assert closed_metric in body + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="closed", + ).withValue( + 1.0 if cover_entities[testcover].unique_id == "cover_closed" else 0.0 + ).assert_in_metrics(body) - opening_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="opening"}} {1.0 if cover_entities[testcover].unique_id == "cover_opening" else 0.0}' - ) - assert opening_metric in body + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="opening", + ).withValue( + 1.0 if cover_entities[testcover].unique_id == "cover_opening" else 0.0 + ).assert_in_metrics(body) - closing_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="closing"}} {1.0 if cover_entities[testcover].unique_id == "cover_closing" else 0.0}' - ) - assert closing_metric in body + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="closing", + ).withValue( + 1.0 if cover_entities[testcover].unique_id == "cover_closing" else 0.0 + ).assert_in_metrics(body) if testcover == "cover_position": - position_metric = ( - f'cover_position{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}"' - f"}} 50.0" - ) - assert position_metric in body + EntityMetric( + metric_name="cover_position", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + ).withValue(50.0).assert_in_metrics(body) if testcover == "cover_tilt_position": - tilt_position_metric = ( - f'cover_tilt_position{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}"' - f"}} 50.0" - ) - assert tilt_position_metric in body + EntityMetric( + metric_name="cover_tilt_position", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + ).withValue(50.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -740,16 +1180,40 @@ async def test_device_tracker( """Test prometheus metrics for device_tracker.""" body = await generate_latest_metrics(client) - assert ( - 'device_tracker_state{domain="device_tracker",' - 'entity="device_tracker.phone",' - 'friendly_name="Phone"} 1.0' in body - ) - assert ( - 'device_tracker_state{domain="device_tracker",' - 'entity="device_tracker.watch",' - 'friendly_name="Watch"} 0.0' in body - ) + EntityMetric( + metric_name="device_tracker_state", + domain="device_tracker", + friendly_name="Phone", + entity="device_tracker.phone", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="device_tracker_state", + domain="device_tracker", + friendly_name="Watch", + entity="device_tracker.watch", + ).withValue(0.0).assert_in_metrics(body) + + +@pytest.mark.parametrize("namespace", [""]) +async def test_person( + client: ClientSessionGenerator, person_entities: dict[str, er.RegistryEntry] +) -> None: + """Test prometheus metrics for person.""" + body = await generate_latest_metrics(client) + + EntityMetric( + metric_name="person_state", + domain="person", + friendly_name="Bob", + entity="person.bob", + ).withValue(1).assert_in_metrics(body) + EntityMetric( + metric_name="person_state", + domain="person", + friendly_name="Alice", + entity="person.alice", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -759,11 +1223,12 @@ async def test_counter( """Test prometheus metrics for counter.""" body = await generate_latest_metrics(client) - assert ( - 'counter_value{domain="counter",' - 'entity="counter.counter",' - 'friendly_name="None"} 2.0' in body - ) + EntityMetric( + metric_name="counter_value", + domain="counter", + friendly_name="None", + entity="counter.counter", + ).withValue(2.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -773,16 +1238,18 @@ async def test_update( """Test prometheus metrics for update.""" body = await generate_latest_metrics(client) - assert ( - 'update_state{domain="update",' - 'entity="update.firmware",' - 'friendly_name="Firmware"} 1.0' in body - ) - assert ( - 'update_state{domain="update",' - 'entity="update.addon",' - 'friendly_name="Addon"} 0.0' in body - ) + EntityMetric( + metric_name="update_state", + domain="update", + friendly_name="Firmware", + entity="update.firmware", + ).withValue(1).assert_in_metrics(body) + EntityMetric( + metric_name="update_state", + domain="update", + friendly_name="Addon", + entity="update.addon", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -797,43 +1264,49 @@ async def test_renaming_entity_name( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 1.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 0.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -871,44 +1344,50 @@ async def test_renaming_entity_name( assert 'friendly_name="HeatPump"' not in body_line # Check if new metrics created - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature Renamed"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature Renamed", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature Renamed"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature Renamed", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump Renamed"} 1.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump Renamed", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump Renamed"} 0.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump Renamed", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -923,29 +1402,33 @@ async def test_renaming_entity_id( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -965,30 +1448,33 @@ async def test_renaming_entity_id( assert 'entity="sensor.outside_temperature"' not in body_line # Check if new metrics created - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature_renamed",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature_renamed", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature_renamed",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature_renamed", + ).withValue(1).assert_in_metrics(body) # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -1003,43 +1489,49 @@ async def test_deleting_entity( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 1.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 0.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1057,17 +1549,19 @@ async def test_deleting_entity( assert 'friendly_name="HeatPump"' not in body_line # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -1084,50 +1578,56 @@ async def test_disabling_entity( await hass.async_block_till_done() body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert any( - 'state_change_created{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"}' in metric - for metric in body - ) + EntityMetric( + metric_name="state_change_created", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_in_metrics(body) - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 1.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 0.0' in body - ) + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1151,137 +1651,191 @@ async def test_disabling_entity( assert 'friendly_name="HeatPump"' not in body_line # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) -async def test_entity_becomes_unavailable_with_export( +@pytest.mark.parametrize("unavailable_state", [STATE_UNAVAILABLE, STATE_UNKNOWN]) +async def test_entity_becomes_unavailable( hass: HomeAssistant, entity_registry: er.EntityRegistry, client: ClientSessionGenerator, sensor_entities: dict[str, er.RegistryEntry], + unavailable_state: str, ) -> None: - """Test an entity that becomes unavailable is still exported.""" + """Test an entity that becomes unavailable/unknown is no longer exported.""" data = {**sensor_entities} await hass.async_block_till_done() body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="last_updated_time_seconds", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_in_metrics(body) - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(12.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - # Make sensor_1 unavailable. + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + # Make sensor_1 unavailable/unknown. set_state_with_entry( - hass, data["sensor_1"], STATE_UNAVAILABLE, data["sensor_1_attributes"] + hass, data["sensor_1"], unavailable_state, data["sensor_1_attributes"] ) await hass.async_block_till_done() body = await generate_latest_metrics(client) - # Check that only the availability changed on sensor_1. - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + # Check that the availability changed on sensor_1 and the metric with the value is gone. + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_not_in_metrics(body) - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 2.0' in body - ) + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_not_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 0.0' in body - ) + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(2.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="last_updated_time_seconds", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_in_metrics(body) # The other sensor should be unchanged. - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) - # Bring sensor_1 back and check that it is correct. - set_state_with_entry(hass, data["sensor_1"], 200.0, data["sensor_1_attributes"]) + # Bring sensor_1 back and check that it returned. + set_state_with_entry(hass, data["sensor_1"], 201.0, data["sensor_1_attributes"]) await hass.async_block_till_done() body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 200.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(201.0).assert_in_metrics(body) - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 3.0' in body - ) + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(12.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(3.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) @pytest.fixture(name="sensor_entities") @@ -1571,7 +2125,7 @@ async def lock_fixture( suggested_object_id="front_door", original_name="Front Door", ) - set_state_with_entry(hass, lock_1, STATE_LOCKED) + set_state_with_entry(hass, lock_1, LockState.LOCKED) data["lock_1"] = lock_1 lock_2 = entity_registry.async_get_or_create( @@ -1581,7 +2135,7 @@ async def lock_fixture( suggested_object_id="kitchen_door", original_name="Kitchen Door", ) - set_state_with_entry(hass, lock_2, STATE_UNLOCKED) + set_state_with_entry(hass, lock_2, LockState.UNLOCKED) data["lock_2"] = lock_2 await hass.async_block_till_done() @@ -1698,6 +2252,17 @@ async def input_number_fixture( set_state_with_entry(hass, input_number_3, 22.7) data["input_number_3"] = input_number_3 + input_number_4 = entity_registry.async_get_or_create( + domain=input_number.DOMAIN, + platform="test", + unique_id="input_number_4", + suggested_object_id="converted_temperature", + original_name="Converted temperature", + unit_of_measurement=UnitOfTemperature.FAHRENHEIT, + ) + set_state_with_entry(hass, input_number_4, 212) + data["input_number_4"] = input_number_4 + await hass.async_block_till_done() return data @@ -1956,7 +2521,7 @@ async def alarm_control_panel_fixture( suggested_object_id="alarm_control_panel_1", original_name="Alarm Control Panel 1", ) - set_state_with_entry(hass, alarm_control_panel_1, STATE_ALARM_ARMED_AWAY) + set_state_with_entry(hass, alarm_control_panel_1, AlarmControlPanelState.ARMED_AWAY) data["alarm_control_panel_1"] = alarm_control_panel_1 alarm_control_panel_2 = entity_registry.async_get_or_create( @@ -1966,7 +2531,7 @@ async def alarm_control_panel_fixture( suggested_object_id="alarm_control_panel_2", original_name="Alarm Control Panel 2", ) - set_state_with_entry(hass, alarm_control_panel_2, STATE_ALARM_ARMED_HOME) + set_state_with_entry(hass, alarm_control_panel_2, AlarmControlPanelState.ARMED_HOME) data["alarm_control_panel_2"] = alarm_control_panel_2 await hass.async_block_till_done() diff --git a/tests/components/prosegur/test_alarm_control_panel.py b/tests/components/prosegur/test_alarm_control_panel.py index f66d070f218..4e3dcdc3fd8 100644 --- a/tests/components/prosegur/test_alarm_control_panel.py +++ b/tests/components/prosegur/test_alarm_control_panel.py @@ -6,7 +6,10 @@ from unittest.mock import AsyncMock, patch from pyprosegur.installation import Status import pytest -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -14,9 +17,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -93,9 +93,13 @@ async def test_connection_error( @pytest.mark.parametrize( ("code", "alarm_service", "alarm_state"), [ - (Status.ARMED, SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (Status.PARTIALLY, SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (Status.DISARMED, SERVICE_ALARM_DISARM, STATE_ALARM_DISARMED), + (Status.ARMED, SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + Status.PARTIALLY, + SERVICE_ALARM_ARM_HOME, + AlarmControlPanelState.ARMED_HOME, + ), + (Status.DISARMED, SERVICE_ALARM_DISARM, AlarmControlPanelState.DISARMED), ], ) async def test_arm( diff --git a/tests/components/prosegur/test_config_flow.py b/tests/components/prosegur/test_config_flow.py index 9362cecc289..7c3f399ee09 100644 --- a/tests/components/prosegur/test_config_flow.py +++ b/tests/components/prosegur/test_config_flow.py @@ -143,15 +143,7 @@ async def test_reauth_flow(hass: HomeAssistant, mock_list_contracts) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -208,15 +200,7 @@ async def test_reauth_flow_error(hass: HomeAssistant, exception, base_error) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.prosegur.config_flow.Installation.list", diff --git a/tests/components/proximity/snapshots/test_diagnostics.ambr b/tests/components/proximity/snapshots/test_diagnostics.ambr index 68270dc3297..42ec74710f9 100644 --- a/tests/components/proximity/snapshots/test_diagnostics.ambr +++ b/tests/components/proximity/snapshots/test_diagnostics.ambr @@ -93,6 +93,8 @@ 'zone': 'zone.home', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'proximity', 'minor_version': 1, 'options': dict({ @@ -100,6 +102,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'home', 'unique_id': 'proximity_home', 'version': 1, diff --git a/tests/components/proximity/test_config_flow.py b/tests/components/proximity/test_config_flow.py index 3ed9f5cba27..853026928bc 100644 --- a/tests/components/proximity/test_config_flow.py +++ b/tests/components/proximity/test_config_flow.py @@ -10,8 +10,8 @@ from homeassistant.components.proximity.const import ( CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -120,42 +120,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: } -async def test_import_flow(hass: HomeAssistant) -> None: - """Test import of yaml configuration.""" - with patch( - "homeassistant.components.proximity.async_setup_entry", return_value=True - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_NAME: "home", - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TOLERANCE: 10, - CONF_UNIT_OF_MEASUREMENT: "km", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_NAME: "home", - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TOLERANCE: 10, - CONF_UNIT_OF_MEASUREMENT: "km", - } - - zone = hass.states.get("zone.home") - assert result["title"] == zone.name - - await hass.async_block_till_done() - - assert mock_setup_entry.called - - async def test_abort_duplicated_entry(hass: HomeAssistant) -> None: """Test if we abort on duplicate user input data.""" DATA = { @@ -211,7 +175,7 @@ async def test_avoid_duplicated_title(hass: HomeAssistant) -> None: CONF_IGNORED_ZONES: ["zone.work"], CONF_TOLERANCE: 10, }, - unique_id=f"{DOMAIN}_home", + unique_id=f"{DOMAIN}_home_3", ).add_to_hass(hass) with patch( diff --git a/tests/components/prusalink/test_button.py b/tests/components/prusalink/test_button.py index 54f3854161c..f85e0232c74 100644 --- a/tests/components/prusalink/test_button.py +++ b/tests/components/prusalink/test_button.py @@ -93,7 +93,7 @@ async def test_button_resume_cancel( with ( patch(f"pyprusalink.PrusaLink.{method}") as mock_meth, patch( - "homeassistant.components.prusalink.PrusaLinkUpdateCoordinator._fetch_data" + "homeassistant.components.prusalink.coordinator.PrusaLinkUpdateCoordinator._fetch_data" ), ): await hass.services.async_call( diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index 3a9aac38646..24d45fee5b9 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -52,6 +52,7 @@ MOCK_FLOW_RESULT = { "title": "test_ps4", "data": MOCK_DATA, "options": {}, + "subentries": (), } MOCK_ENTRY_ID = "SomeID" @@ -269,9 +270,7 @@ async def test_send_command(hass: HomeAssistant) -> None: """Test send_command service.""" await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4", ".media_player.PS4Device.async_send_command" - ) + mock_func = "homeassistant.components.ps4.media_player.PS4Device.async_send_command" mock_devices = hass.data[PS4_DATA].devices assert len(mock_devices) == 1 diff --git a/tests/components/ps4/test_media_player.py b/tests/components/ps4/test_media_player.py index 5268306c87a..737cc3c9f1b 100644 --- a/tests/components/ps4/test_media_player.py +++ b/tests/components/ps4/test_media_player.py @@ -194,10 +194,7 @@ async def test_state_standby_is_set(hass: HomeAssistant) -> None: async def test_state_playing_is_set(hass: HomeAssistant) -> None: """Test that state is set to playing.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", - "pyps4.Ps4Async.async_get_ps_store_data", - ) + mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.async_get_ps_store_data" with patch(mock_func, return_value=None): await mock_ddp_response(hass, MOCK_STATUS_PLAYING) @@ -224,10 +221,7 @@ async def test_state_none_is_set(hass: HomeAssistant) -> None: async def test_media_attributes_are_fetched(hass: HomeAssistant) -> None: """Test that media attributes are fetched.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", - "pyps4.Ps4Async.async_get_ps_store_data", - ) + mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.async_get_ps_store_data" # Mock result from fetching data. mock_result = MagicMock() @@ -276,8 +270,7 @@ async def test_media_attributes_are_loaded( patch_load_json_object.return_value = {MOCK_TITLE_ID: MOCK_GAMES_DATA_LOCKED} with patch( - "homeassistant.components.ps4.media_player." - "pyps4.Ps4Async.async_get_ps_store_data", + "homeassistant.components.ps4.media_player.pyps4.Ps4Async.async_get_ps_store_data", return_value=None, ) as mock_fetch: await mock_ddp_response(hass, MOCK_STATUS_PLAYING) @@ -381,9 +374,7 @@ async def test_device_info_assummed_works( async def test_turn_on(hass: HomeAssistant) -> None: """Test that turn on service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.wakeup" - ) + mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.wakeup" with patch(mock_func) as mock_call: await hass.services.async_call( @@ -397,9 +388,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test that turn off service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.standby" - ) + mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.standby" with patch(mock_func) as mock_call: await hass.services.async_call( @@ -413,9 +402,7 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test that toggle service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.toggle" - ) + mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.toggle" with patch(mock_func) as mock_call: await hass.services.async_call( @@ -429,8 +416,8 @@ async def test_toggle(hass: HomeAssistant) -> None: async def test_media_pause(hass: HomeAssistant) -> None: """Test that media pause service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.remote_control" + mock_func = ( + "homeassistant.components.ps4.media_player.pyps4.Ps4Async.remote_control" ) with patch(mock_func) as mock_call: @@ -445,8 +432,8 @@ async def test_media_pause(hass: HomeAssistant) -> None: async def test_media_stop(hass: HomeAssistant) -> None: """Test that media stop service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "{}{}".format( - "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.remote_control" + mock_func = ( + "homeassistant.components.ps4.media_player.pyps4.Ps4Async.remote_control" ) with patch(mock_func) as mock_call: diff --git a/tests/components/pure_energie/test_init.py b/tests/components/pure_energie/test_init.py index 0dbd8a753e6..c0d07248664 100644 --- a/tests/components/pure_energie/test_init.py +++ b/tests/components/pure_energie/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from gridnet import GridNetConnectionError import pytest -from homeassistant.components.pure_energie.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -32,7 +31,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 3d6776dd12e..1809b16bd75 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -73,7 +73,7 @@ def config_entry_options_fixture() -> dict[str, Any]: @pytest.fixture(name="get_sensors_response", scope="package") def get_sensors_response_fixture() -> GetSensorsResponse: """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" - return GetSensorsResponse.parse_raw( + return GetSensorsResponse.model_validate_json( load_fixture("get_sensors_response.json", "purpleair") ) diff --git a/tests/components/purpleair/test_config_flow.py b/tests/components/purpleair/test_config_flow.py index 2345d98b5e1..998cb2b7878 100644 --- a/tests/components/purpleair/test_config_flow.py +++ b/tests/components/purpleair/test_config_flow.py @@ -6,13 +6,15 @@ from aiopurpleair.errors import InvalidApiKeyError, PurpleAirError import pytest from homeassistant.components.purpleair import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr from .conftest import TEST_API_KEY, TEST_SENSOR_INDEX1, TEST_SENSOR_INDEX2 +from tests.common import MockConfigEntry + TEST_LATITUDE = 51.5285582 TEST_LONGITUDE = -0.2416796 @@ -127,19 +129,11 @@ async def test_reauth( mock_aiopurpleair, check_api_key_errors, check_api_key_mock, - config_entry, + config_entry: MockConfigEntry, setup_config_entry, ) -> None: """Test re-auth (including errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data={"api_key": TEST_API_KEY}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index 599549bb723..6271a63d652 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -37,6 +37,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "data": { "fields": [ diff --git a/tests/components/push/test_camera.py b/tests/components/push/test_camera.py index df296e7cb57..0088aa6a9c2 100644 --- a/tests/components/push/test_camera.py +++ b/tests/components/push/test_camera.py @@ -4,8 +4,8 @@ from datetime import timedelta from http import HTTPStatus import io -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util diff --git a/tests/components/pushover/test_config_flow.py b/tests/components/pushover/test_config_flow.py index 14347084288..58485bfb427 100644 --- a/tests/components/pushover/test_config_flow.py +++ b/tests/components/pushover/test_config_flow.py @@ -149,14 +149,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -180,14 +173,7 @@ async def test_reauth_failed(hass: HomeAssistant, mock_pushover: MagicMock) -> N ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -223,14 +209,7 @@ async def test_reauth_with_existing_config(hass: HomeAssistant) -> None: ) entry2.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry2.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/pvoutput/test_config_flow.py b/tests/components/pvoutput/test_config_flow.py index 20e99f8e497..fc4335de00d 100644 --- a/tests/components/pvoutput/test_config_flow.py +++ b/tests/components/pvoutput/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from pvo import PVOutputAuthenticationError, PVOutputConnectionError from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,15 +150,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -192,15 +184,7 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -244,15 +228,7 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/pyload/test_config_flow.py b/tests/components/pyload/test_config_flow.py index 8c775412371..5ada856d78e 100644 --- a/tests/components/pyload/test_config_flow.py +++ b/tests/components/pyload/test_config_flow.py @@ -6,12 +6,7 @@ from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError import pytest from homeassistant.components.pyload.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import ( - SOURCE_IMPORT, - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -180,14 +175,7 @@ async def test_reauth( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -222,14 +210,7 @@ async def test_reauth_errors( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -266,17 +247,10 @@ async def test_reconfiguration( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -308,17 +282,10 @@ async def test_reconfigure_errors( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_pyloadapi.login.side_effect = side_effect result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index c4dc00c448a..2d151b4b81e 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -688,3 +688,27 @@ async def test_prohibited_augmented_assignment_operations( hass.async_add_executor_job(execute, hass, "aug_assign_prohibited.py", case, {}) await hass.async_block_till_done(wait_background_tasks=True) assert error in caplog.text + + +async def test_import_allow_strptime( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test calling datetime.datetime.strptime works.""" + source = """ +test_date = datetime.datetime.strptime('2024-04-01', '%Y-%m-%d') +logger.info(f'Date {test_date}') + """ + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import _strptime" not in caplog.text + assert "Date 2024-04-01 00:00:00" in caplog.text + + +async def test_no_other_imports_allowed( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test imports are not allowed.""" + source = "import sys" + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import sys" in caplog.text diff --git a/tests/components/qnap_qsw/test_sensor.py b/tests/components/qnap_qsw/test_sensor.py index 646058add62..16335e878fd 100644 --- a/tests/components/qnap_qsw/test_sensor.py +++ b/tests/components/qnap_qsw/test_sensor.py @@ -1,19 +1,27 @@ """The sensor tests for the QNAP QSW platform.""" +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.qnap_qsw.const import ATTR_MAX +from homeassistant.components.qnap_qsw.const import ATTR_MAX, DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .util import async_init_integration +from .util import async_init_integration, init_config_entry @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_qnap_qsw_create_sensors( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, ) -> None: """Test creation of sensors.""" + await hass.config.async_set_time_zone("UTC") + freezer.move_to("2024-07-25 12:00:00+00:00") await async_init_integration(hass) state = hass.states.get("sensor.qsw_m408_4c_fan_1_speed") @@ -45,8 +53,8 @@ async def test_qnap_qsw_create_sensors( state = hass.states.get("sensor.qsw_m408_4c_tx_speed") assert state.state == "0" - state = hass.states.get("sensor.qsw_m408_4c_uptime") - assert state.state == "91" + state = hass.states.get("sensor.qsw_m408_4c_uptime_timestamp") + assert state.state == "2024-07-25T11:58:29+00:00" # LACP Ports state = hass.states.get("sensor.qsw_m408_4c_lacp_port_1_link_speed") @@ -373,3 +381,60 @@ async def test_qnap_qsw_create_sensors( state = hass.states.get("sensor.qsw_m408_4c_port_12_tx_speed") assert state.state == "0" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_deprecated_uptime_seconds( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test deprecation warning of the Uptime seconds sensor entity.""" + original_id = "sensor.qsw_m408_4c_uptime" + domain = Platform.SENSOR + + config_entry = init_config_entry(hass) + + entity = entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=None, + ) + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + + with patch( + "homeassistant.components.qnap_qsw.sensor.automations_with_entity", + return_value=["item"], + ): + await async_init_integration(hass, config_entry=config_entry) + assert issue_registry.async_get_issue( + DOMAIN, f"uptime_seconds_deprecated_{entity.entity_id}_item" + ) + + +async def test_cleanup_deprecated_uptime_seconds( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleanup of the Uptime seconds sensor entity.""" + original_id = "sensor.qsw_m408_4c_uptime_seconds" + domain = Platform.SENSOR + + config_entry = init_config_entry(hass) + + entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + + await async_init_integration(hass, config_entry=config_entry) diff --git a/tests/components/qnap_qsw/util.py b/tests/components/qnap_qsw/util.py index 63238bb30a1..5132c1061ec 100644 --- a/tests/components/qnap_qsw/util.py +++ b/tests/components/qnap_qsw/util.py @@ -491,11 +491,10 @@ USERS_VERIFICATION_MOCK = { } -async def async_init_integration( +def init_config_entry( hass: HomeAssistant, -) -> None: - """Set up the QNAP QSW integration in Home Assistant.""" - +) -> MockConfigEntry: + """Set up the QNAP QSW entry in Home Assistant.""" config_entry = MockConfigEntry( data=CONFIG, domain=DOMAIN, @@ -503,6 +502,18 @@ async def async_init_integration( ) config_entry.add_to_hass(hass) + return config_entry + + +async def async_init_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry | None = None, +) -> None: + """Set up the QNAP QSW integration in Home Assistant.""" + + if config_entry is None: + config_entry = init_config_entry(hass) + with ( patch( "homeassistant.components.qnap_qsw.QnapQswApi.get_firmware_condition", diff --git a/tests/components/rachio/test_config_flow.py b/tests/components/rachio/test_config_flow.py index 1eaec1bc46e..586b31b092f 100644 --- a/tests/components/rachio/test_config_flow.py +++ b/tests/components/rachio/test_config_flow.py @@ -183,3 +183,16 @@ async def test_form_homekit_ignored(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test option flow.""" + entry = MockConfigEntry(domain=DOMAIN, data={CONF_API_KEY: "api_key"}) + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # This should be improved at a later stage to increase test coverage + hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/radarr/test_config_flow.py b/tests/components/radarr/test_config_flow.py index 407b7b50c48..096c78e1c4a 100644 --- a/tests/components/radarr/test_config_flow.py +++ b/tests/components/radarr/test_config_flow.py @@ -6,7 +6,7 @@ from aiopyarr import exceptions import pytest from homeassistant.components.radarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -137,21 +137,30 @@ async def test_zero_conf(hass: HomeAssistant) -> None: assert result["data"] == CONF_DATA +async def test_url_rewrite(hass: HomeAssistant) -> None: + """Test auth flow url rewrite.""" + with patch( + "homeassistant.components.radarr.config_flow.RadarrClient.async_try_zeroconf", + return_value=("v3", API_KEY, "/test"), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + data={CONF_URL: "https://192.168.1.100/test", CONF_VERIFY_SSL: False}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"][CONF_URL] == "https://192.168.1.100:443/test" + + @pytest.mark.freeze_time("2021-12-03 00:00:00+00:00") async def test_full_reauth_flow_implementation( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the manual reauth flow from start to finish.""" entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/rainbird/test_config_flow.py b/tests/components/rainbird/test_config_flow.py index 87506ad656c..6e76943f202 100644 --- a/tests/components/rainbird/test_config_flow.py +++ b/tests/components/rainbird/test_config_flow.py @@ -56,7 +56,7 @@ async def mock_setup() -> AsyncGenerator[AsyncMock]: yield mock_setup -async def complete_flow(hass: HomeAssistant) -> FlowResult: +async def complete_flow(hass: HomeAssistant, password: str = PASSWORD) -> FlowResult: """Start the config flow and enter the host and password.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -268,6 +268,59 @@ async def test_controller_cannot_connect( assert not mock_setup.mock_calls +async def test_controller_invalid_auth( + hass: HomeAssistant, + mock_setup: Mock, + responses: list[AiohttpClientMockResponse], + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test an invalid password.""" + + responses.clear() + responses.extend( + [ + # Incorrect password response + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + # Second attempt with the correct password + mock_response(SERIAL_RESPONSE), + mock_json_response(WIFI_PARAMS_RESPONSE), + ] + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + assert "flow_id" in result + + # Simulate authentication error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_PASSWORD: "wrong-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert result.get("errors") == {"base": "invalid_auth"} + + assert not mock_setup.mock_calls + + # Correct the form and enter the password again and setup completes + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_PASSWORD: PASSWORD}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == HOST + assert "result" in result + assert dict(result["result"].data) == CONFIG_ENTRY_DATA + assert result["result"].unique_id == MAC_ADDRESS_UNIQUE_ID + + assert len(mock_setup.mock_calls) == 1 + + async def test_controller_timeout( hass: HomeAssistant, mock_setup: Mock, @@ -286,6 +339,67 @@ async def test_controller_timeout( assert not mock_setup.mock_calls +@pytest.mark.parametrize( + ("responses", "config_entry_data"), + [ + ( + [ + # First attempt simulate the wrong password + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + # Second attempt simulate the correct password + mock_response(SERIAL_RESPONSE), + mock_json_response(WIFI_PARAMS_RESPONSE), + ], + { + **CONFIG_ENTRY_DATA, + CONF_PASSWORD: "old-password", + }, + ), + ], +) +async def test_reauth_flow( + hass: HomeAssistant, + mock_setup: Mock, + config_entry: MockConfigEntry, +) -> None: + """Test the controller is setup correctly.""" + assert config_entry.data.get(CONF_PASSWORD) == "old-password" + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result.get("step_id") == "reauth_confirm" + assert not result.get("errors") + + # Simluate the wrong password + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "incorrect_password"}, + ) + assert result.get("type") == FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + assert result.get("errors") == {"base": "invalid_auth"} + + # Enter the correct password and complete the flow + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: PASSWORD}, + ) + assert result.get("type") == FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + entry = entries[0] + assert entry.unique_id == MAC_ADDRESS_UNIQUE_ID + assert entry.data.get(CONF_PASSWORD) == PASSWORD + + assert len(mock_setup.mock_calls) == 1 + + async def test_options_flow(hass: HomeAssistant, mock_setup: Mock) -> None: """Test config flow options.""" diff --git a/tests/components/rainbird/test_init.py b/tests/components/rainbird/test_init.py index 5b2e2ea6d1b..01e0c4458e4 100644 --- a/tests/components/rainbird/test_init.py +++ b/tests/components/rainbird/test_init.py @@ -45,17 +45,19 @@ async def test_init_success( @pytest.mark.parametrize( - ("config_entry_data", "responses", "config_entry_state"), + ("config_entry_data", "responses", "config_entry_state", "config_flow_steps"), [ ( CONFIG_ENTRY_DATA, [mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE)], ConfigEntryState.SETUP_RETRY, + [], ), ( CONFIG_ENTRY_DATA, [mock_response_error(HTTPStatus.INTERNAL_SERVER_ERROR)], ConfigEntryState.SETUP_RETRY, + [], ), ( CONFIG_ENTRY_DATA, @@ -64,6 +66,7 @@ async def test_init_success( mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE), ], ConfigEntryState.SETUP_RETRY, + [], ), ( CONFIG_ENTRY_DATA, @@ -72,6 +75,13 @@ async def test_init_success( mock_response_error(HTTPStatus.INTERNAL_SERVER_ERROR), ], ConfigEntryState.SETUP_RETRY, + [], + ), + ( + CONFIG_ENTRY_DATA, + [mock_response_error(HTTPStatus.FORBIDDEN)], + ConfigEntryState.SETUP_ERROR, + ["reauth_confirm"], ), ], ids=[ @@ -79,17 +89,22 @@ async def test_init_success( "server-error", "coordinator-unavailable", "coordinator-server-error", + "forbidden", ], ) async def test_communication_failure( hass: HomeAssistant, config_entry: MockConfigEntry, config_entry_state: list[ConfigEntryState], + config_flow_steps: list[str], ) -> None: """Test unable to talk to device on startup, which fails setup.""" await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == config_entry_state + flows = hass.config_entries.flow.async_progress() + assert [flow["step_id"] for flow in flows] == config_flow_steps + @pytest.mark.parametrize( ("config_entry_unique_id", "config_entry_data"), diff --git a/tests/components/rainforest_eagle/test_diagnostics.py b/tests/components/rainforest_eagle/test_diagnostics.py index ed13c33f7b8..5aa460415b3 100644 --- a/tests/components/rainforest_eagle/test_diagnostics.py +++ b/tests/components/rainforest_eagle/test_diagnostics.py @@ -27,7 +27,7 @@ async def test_entry_diagnostics( config_entry_dict["data"][CONF_CLOUD_ID] = REDACTED assert result == { - "config_entry": config_entry_dict, + "config_entry": config_entry_dict | {"discovery_keys": {}}, "data": { var["Name"]: var["Value"] for var in MOCK_200_RESPONSE_WITHOUT_PRICE.values() diff --git a/tests/components/rainforest_raven/__init__.py b/tests/components/rainforest_raven/__init__.py index 9d40652b42d..ead1bb2ad3f 100644 --- a/tests/components/rainforest_raven/__init__.py +++ b/tests/components/rainforest_raven/__init__.py @@ -1,5 +1,7 @@ """Tests for the Rainforest RAVEn component.""" +from unittest.mock import AsyncMock + from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.const import CONF_DEVICE, CONF_MAC @@ -14,7 +16,7 @@ from .const import ( SUMMATION, ) -from tests.common import AsyncMock, MockConfigEntry +from tests.common import MockConfigEntry def create_mock_device() -> AsyncMock: @@ -42,4 +44,5 @@ def create_mock_entry(no_meters: bool = False) -> MockConfigEntry: CONF_DEVICE: DISCOVERY_INFO.device, CONF_MAC: [] if no_meters else [METER_INFO[None].meter_mac_id.hex()], }, + entry_id="01JADXBJSPYEBAFPKGXDJWZBQ8", ) diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..abf8e380916 --- /dev/null +++ b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr @@ -0,0 +1,111 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'device': '/dev/ttyACM0', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'rainforest_raven', + 'entry_id': '01JADXBJSPYEBAFPKGXDJWZBQ8', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'data': dict({ + 'Meters': dict({ + '**REDACTED0**': dict({ + 'CurrentSummationDelivered': dict({ + 'device_mac_id': '**REDACTED**', + 'meter_mac_id': '**REDACTED**', + 'summation_delivered': '23456.7890', + 'summation_received': '00000.0000', + 'time_stamp': None, + }), + 'InstantaneousDemand': dict({ + 'demand': '1.2345', + 'device_mac_id': '**REDACTED**', + 'meter_mac_id': '**REDACTED**', + 'time_stamp': None, + }), + 'PriceCluster': dict({ + 'currency': dict({ + '__type': "", + 'repr': "", + }), + 'device_mac_id': '**REDACTED**', + 'meter_mac_id': '**REDACTED**', + 'price': '0.10', + 'rate_label': 'Set by user', + 'tier': 3, + 'tier_label': 'Set by user', + 'time_stamp': None, + }), + }), + }), + 'NetworkInfo': dict({ + 'channel': 13, + 'coord_mac_id': None, + 'description': None, + 'device_mac_id': '**REDACTED**', + 'ext_pan_id': None, + 'link_strength': 100, + 'short_addr': None, + 'status': None, + 'status_code': None, + }), + }), + }) +# --- +# name: test_entry_diagnostics_no_meters + dict({ + 'config_entry': dict({ + 'data': dict({ + 'device': '/dev/ttyACM0', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'rainforest_raven', + 'entry_id': '01JADXBJSPYEBAFPKGXDJWZBQ8', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'data': dict({ + 'Meters': dict({ + }), + 'NetworkInfo': dict({ + 'channel': 13, + 'coord_mac_id': None, + 'description': None, + 'device_mac_id': '**REDACTED**', + 'ext_pan_id': None, + 'link_strength': 100, + 'short_addr': None, + 'status': None, + 'status_code': None, + }), + }), + }) +# --- diff --git a/tests/components/rainforest_raven/snapshots/test_init.ambr b/tests/components/rainforest_raven/snapshots/test_init.ambr new file mode 100644 index 00000000000..768bbc729d4 --- /dev/null +++ b/tests/components/rainforest_raven/snapshots/test_init.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_device_registry[None-0] + list([ + ]) +# --- +# name: test_device_registry[device_info0-1] + list([ + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '2.7.3', + 'id': , + 'identifiers': set({ + tuple( + 'rainforest_raven', + 'abcdef0123456789', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Rainforest Automation, Inc.', + 'model': 'Z105-2-EMU2-LEDD_JM', + 'model_id': 'Z105-2-EMU2-LEDD_JM', + 'name': 'RAVEn Device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '2.0.0 (7400)', + 'via_device_id': None, + }), + ]) +# --- diff --git a/tests/components/rainforest_raven/snapshots/test_sensor.ambr b/tests/components/rainforest_raven/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..34a5e031885 --- /dev/null +++ b/tests/components/rainforest_raven/snapshots/test_sensor.ambr @@ -0,0 +1,257 @@ +# serializer version: 1 +# name: test_sensors[sensor.raven_device_meter_power_demand-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_meter_power_demand', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter power demand', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_demand', + 'unique_id': '1234567890abcdef.InstantaneousDemand.demand', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.raven_device_meter_power_demand-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'RAVEn Device Meter power demand', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.raven_device_meter_power_demand', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2345', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_meter_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter price', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_price', + 'unique_id': '1234567890abcdef.PriceCluster.price', + 'unit_of_measurement': 'USD/kWh', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RAVEn Device Meter price', + 'rate_label': 'Set by user', + 'state_class': , + 'tier': 3, + 'unit_of_measurement': 'USD/kWh', + }), + 'context': , + 'entity_id': 'sensor.raven_device_meter_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.10', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.raven_device_meter_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter signal strength', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_strength', + 'unique_id': 'abcdef0123456789.NetworkInfo.link_strength', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'channel': 13, + 'friendly_name': 'RAVEn Device Meter signal strength', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.raven_device_meter_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total meter energy delivered', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_delivered', + 'unique_id': '1234567890abcdef.CurrentSummationDelivered.summation_delivered', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'RAVEn Device Total meter energy delivered', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23456.7890', + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_total_meter_energy_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total meter energy received', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_received', + 'unique_id': '1234567890abcdef.CurrentSummationDelivered.summation_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'RAVEn Device Total meter energy received', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.raven_device_total_meter_energy_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '00000.0000', + }) +# --- diff --git a/tests/components/rainforest_raven/test_coordinator.py b/tests/components/rainforest_raven/test_coordinator.py deleted file mode 100644 index db70118f7b9..00000000000 --- a/tests/components/rainforest_raven/test_coordinator.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Tests for the Rainforest RAVEn data coordinator.""" - -import asyncio -import functools -from unittest.mock import AsyncMock - -from aioraven.device import RAVEnConnectionError -import pytest - -from homeassistant.components.rainforest_raven.coordinator import RAVEnDataCoordinator -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady - -from . import create_mock_entry - - -@pytest.mark.usefixtures("mock_device") -async def test_coordinator_device_info(hass: HomeAssistant) -> None: - """Test reporting device information from the coordinator.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - assert coordinator.device_fw_version is None - assert coordinator.device_hw_version is None - assert coordinator.device_info is None - assert coordinator.device_mac_address is None - assert coordinator.device_manufacturer is None - assert coordinator.device_model is None - assert coordinator.device_name == "RAVEn Device" - - await coordinator.async_config_entry_first_refresh() - - assert coordinator.device_fw_version == "2.0.0 (7400)" - assert coordinator.device_hw_version == "2.7.3" - assert coordinator.device_info - assert coordinator.device_mac_address - assert coordinator.device_manufacturer == "Rainforest Automation, Inc." - assert coordinator.device_model == "Z105-2-EMU2-LEDD_JM" - assert coordinator.device_name == "RAVEn Device" - - -async def test_coordinator_cache_device( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test that the device isn't re-opened for subsequent refreshes.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - await coordinator.async_config_entry_first_refresh() - assert mock_device.get_network_info.call_count == 1 - assert mock_device.open.call_count == 1 - - await coordinator.async_refresh() - assert mock_device.get_network_info.call_count == 2 - assert mock_device.open.call_count == 1 - - -async def test_coordinator_device_error_setup( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device error during initialization.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - mock_device.get_network_info.side_effect = RAVEnConnectionError - with pytest.raises(ConfigEntryNotReady): - await coordinator.async_config_entry_first_refresh() - - -async def test_coordinator_device_error_update( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device error during an update.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - await coordinator.async_config_entry_first_refresh() - assert coordinator.last_update_success is True - - mock_device.get_network_info.side_effect = RAVEnConnectionError - await coordinator.async_refresh() - assert coordinator.last_update_success is False - - -async def test_coordinator_device_timeout_update( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device timeout during an update.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - await coordinator.async_config_entry_first_refresh() - assert coordinator.last_update_success is True - - mock_device.get_network_info.side_effect = functools.partial(asyncio.sleep, 10) - await coordinator.async_refresh() - assert coordinator.last_update_success is False - - -async def test_coordinator_comm_error( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of an error parsing or reading raw device data.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - mock_device.synchronize.side_effect = RAVEnConnectionError - with pytest.raises(ConfigEntryNotReady): - await coordinator.async_config_entry_first_refresh() diff --git a/tests/components/rainforest_raven/test_diagnostics.py b/tests/components/rainforest_raven/test_diagnostics.py index 86a86032ac6..ae231b3c8c2 100644 --- a/tests/components/rainforest_raven/test_diagnostics.py +++ b/tests/components/rainforest_raven/test_diagnostics.py @@ -1,22 +1,24 @@ """Test the Rainforest Eagle diagnostics.""" -from dataclasses import asdict +from unittest.mock import AsyncMock import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props -from homeassistant.components.diagnostics import REDACTED -from homeassistant.const import CONF_MAC from homeassistant.core import HomeAssistant from . import create_mock_entry -from .const import DEMAND, NETWORK_INFO, PRICE_CLUSTER, SUMMATION +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @pytest.fixture -async def mock_entry_no_meters(hass: HomeAssistant, mock_device): +async def mock_entry_no_meters( + hass: HomeAssistant, mock_device: AsyncMock +) -> MockConfigEntry: """Mock a RAVEn config entry with no meters.""" mock_entry = create_mock_entry(True) mock_entry.add_to_hass(hass) @@ -28,61 +30,23 @@ async def mock_entry_no_meters(hass: HomeAssistant, mock_device): async def test_entry_diagnostics_no_meters( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mock_device, - mock_entry_no_meters, + mock_entry_no_meters: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: """Test RAVEn diagnostics before the coordinator has updated.""" result = await get_diagnostics_for_config_entry( hass, hass_client, mock_entry_no_meters ) - - config_entry_dict = mock_entry_no_meters.as_dict() - config_entry_dict["data"][CONF_MAC] = REDACTED - - assert result == { - "config_entry": config_entry_dict, - "data": { - "Meters": {}, - "NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED}, - }, - } + assert result == snapshot(exclude=props("created_at", "modified_at")) async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_device, mock_entry + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_entry: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: """Test RAVEn diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - config_entry_dict = mock_entry.as_dict() - config_entry_dict["data"][CONF_MAC] = REDACTED - - assert result == { - "config_entry": config_entry_dict, - "data": { - "Meters": { - "**REDACTED0**": { - "CurrentSummationDelivered": { - **asdict(SUMMATION), - "device_mac_id": REDACTED, - "meter_mac_id": REDACTED, - }, - "InstantaneousDemand": { - **asdict(DEMAND), - "device_mac_id": REDACTED, - "meter_mac_id": REDACTED, - }, - "PriceCluster": { - **asdict(PRICE_CLUSTER), - "device_mac_id": REDACTED, - "meter_mac_id": REDACTED, - "currency": { - "__type": str(type(PRICE_CLUSTER.currency)), - "repr": repr(PRICE_CLUSTER.currency), - }, - }, - }, - }, - "NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED}, - }, - } + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/rainforest_raven/test_init.py b/tests/components/rainforest_raven/test_init.py index 974c45150a6..acd1f606a07 100644 --- a/tests/components/rainforest_raven/test_init.py +++ b/tests/components/rainforest_raven/test_init.py @@ -1,8 +1,19 @@ """Tests for the Rainforest RAVEn component initialisation.""" +from unittest.mock import AsyncMock + +from aioraven.data import DeviceInfo as RAVenDeviceInfo +from aioraven.device import RAVEnConnectionError +import pytest +from syrupy.assertion import SnapshotAssertion + from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import create_mock_entry +from .const import DEVICE_INFO from tests.common import MockConfigEntry @@ -18,4 +29,55 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert mock_entry.state is ConfigEntryState.NOT_LOADED - assert not hass.data.get(DOMAIN) + + +@pytest.mark.parametrize( + ("device_info", "device_count"), + [(DEVICE_INFO, 1), (None, 0)], +) +async def test_device_registry( + hass: HomeAssistant, + mock_device: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + device_info: RAVenDeviceInfo | None, + device_count: int, +) -> None: + """Test device registry, including if get_device_info returns None.""" + mock_device.get_device_info.return_value = device_info + entry = create_mock_entry() + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.LOADED + + assert len(hass.states.async_all()) == 5 + + entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) + assert len(entries) == device_count + assert entries == snapshot + + +async def test_synchronize_error(hass: HomeAssistant, mock_device: AsyncMock) -> None: + """Test handling of an error parsing or reading raw device data.""" + entry = create_mock_entry() + entry.add_to_hass(hass) + + mock_device.synchronize.side_effect = RAVEnConnectionError + + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_get_network_info_error( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test handling of a device error during initialization.""" + entry = create_mock_entry() + entry.add_to_hass(hass) + + mock_device.get_network_info.side_effect = RAVEnConnectionError + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/rainforest_raven/test_sensor.py b/tests/components/rainforest_raven/test_sensor.py index 3b859621cb4..2319b628374 100644 --- a/tests/components/rainforest_raven/test_sensor.py +++ b/tests/components/rainforest_raven/test_sensor.py @@ -1,36 +1,102 @@ """Tests for the Rainforest RAVEn sensors.""" -import pytest +from datetime import timedelta +from unittest.mock import AsyncMock +from aioraven.device import RAVEnConnectionError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .const import NETWORK_INFO + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("mock_entry") -async def test_sensors(hass: HomeAssistant) -> None: +async def test_sensors( + hass: HomeAssistant, + mock_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: """Test the sensors.""" assert len(hass.states.async_all()) == 5 - demand = hass.states.get("sensor.raven_device_meter_power_demand") - assert demand is not None - assert demand.state == "1.2345" - assert demand.attributes["unit_of_measurement"] == "kW" + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) - delivered = hass.states.get("sensor.raven_device_total_meter_energy_delivered") - assert delivered is not None - assert delivered.state == "23456.7890" - assert delivered.attributes["unit_of_measurement"] == "kWh" - received = hass.states.get("sensor.raven_device_total_meter_energy_received") - assert received is not None - assert received.state == "00000.0000" - assert received.attributes["unit_of_measurement"] == "kWh" +@pytest.mark.usefixtures("mock_entry") +async def test_device_update_error( + hass: HomeAssistant, + mock_device: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handling of a device error during an update.""" + mock_device.get_network_info.side_effect = (RAVEnConnectionError, NETWORK_INFO) - price = hass.states.get("sensor.raven_device_meter_price") - assert price is not None - assert price.state == "0.10" - assert price.attributes["unit_of_measurement"] == "USD/kWh" + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) - signal = hass.states.get("sensor.raven_device_meter_signal_strength") - assert signal is not None - assert signal.state == "100" - assert signal.attributes["unit_of_measurement"] == "%" + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state == STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + +@pytest.mark.usefixtures("mock_entry") +async def test_device_update_timeout( + hass: HomeAssistant, mock_device: AsyncMock, freezer: FrozenDateTimeFactory +) -> None: + """Test handling of a device timeout during an update.""" + mock_device.get_network_info.side_effect = (TimeoutError, NETWORK_INFO) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state == STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + +@pytest.mark.usefixtures("mock_entry") +async def test_device_cache( + hass: HomeAssistant, mock_device: AsyncMock, freezer: FrozenDateTimeFactory +) -> None: + """Test that the device isn't re-opened for subsequent refreshes.""" + assert mock_device.get_network_info.call_count == 1 + assert mock_device.open.call_count == 1 + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_device.get_network_info.call_count == 2 + assert mock_device.open.call_count == 1 diff --git a/tests/components/rainmachine/snapshots/test_diagnostics.ambr b/tests/components/rainmachine/snapshots/test_diagnostics.ambr index 9b5b5edc0c4..681805996f1 100644 --- a/tests/components/rainmachine/snapshots/test_diagnostics.ambr +++ b/tests/components/rainmachine/snapshots/test_diagnostics.ambr @@ -1131,6 +1131,8 @@ 'ssl': True, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'rainmachine', 'entry_id': '81bd010ed0a63b705f6da8407cb26d4b', 'minor_version': 1, @@ -1142,6 +1144,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, @@ -2260,6 +2264,8 @@ 'ssl': True, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'rainmachine', 'entry_id': '81bd010ed0a63b705f6da8407cb26d4b', 'minor_version': 1, @@ -2271,6 +2277,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index 2b92892b1d1..a57e289ec04 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -33,6 +33,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "data": [ { diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index a2cf41578c7..9e287d13594 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -189,6 +189,9 @@ async def test_delete_metadata_duplicates( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_28, @@ -306,6 +309,9 @@ async def test_delete_metadata_duplicates_many( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_28, diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 18e58d9e572..fbb0991c960 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -428,14 +428,6 @@ def get_schema_module_path(schema_version_postfix: str) -> str: return f"tests.components.recorder.db_schema_{schema_version_postfix}" -@dataclass(slots=True) -class MockMigrationTask(migration.MigrationTask): - """Mock migration task which does nothing.""" - - def run(self, instance: Recorder) -> None: - """Run migration task.""" - - @contextmanager def old_db_schema(schema_version_postfix: str) -> Iterator[None]: """Fixture to initialize the db with the old schema.""" @@ -445,16 +437,14 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/db_schema_16.py b/tests/components/recorder/db_schema_16.py index ffee438f2e9..d7ca35c9341 100644 --- a/tests/components/recorder/db_schema_16.py +++ b/tests/components/recorder/db_schema_16.py @@ -348,8 +348,6 @@ class LazyState(State): __slots__ = [ "_row", - "entity_id", - "state", "_attributes", "_last_changed", "_last_updated", diff --git a/tests/components/recorder/db_schema_18.py b/tests/components/recorder/db_schema_18.py index 09cd41d9e33..adb71dffb9e 100644 --- a/tests/components/recorder/db_schema_18.py +++ b/tests/components/recorder/db_schema_18.py @@ -361,8 +361,6 @@ class LazyState(State): __slots__ = [ "_row", - "entity_id", - "state", "_attributes", "_last_changed", "_last_updated", diff --git a/tests/components/recorder/db_schema_22.py b/tests/components/recorder/db_schema_22.py index d05cb48ff6f..c0d607b12a7 100644 --- a/tests/components/recorder/db_schema_22.py +++ b/tests/components/recorder/db_schema_22.py @@ -480,8 +480,6 @@ class LazyState(State): __slots__ = [ "_row", - "entity_id", - "state", "_attributes", "_last_changed", "_last_updated", diff --git a/tests/components/recorder/db_schema_23.py b/tests/components/recorder/db_schema_23.py index 9dffadaa0cc..f60b7b49df4 100644 --- a/tests/components/recorder/db_schema_23.py +++ b/tests/components/recorder/db_schema_23.py @@ -470,8 +470,6 @@ class LazyState(State): __slots__ = [ "_row", - "entity_id", - "state", "_attributes", "_last_changed", "_last_updated", diff --git a/tests/components/recorder/db_schema_23_with_newer_columns.py b/tests/components/recorder/db_schema_23_with_newer_columns.py index 4343f53d00d..4cc1074de41 100644 --- a/tests/components/recorder/db_schema_23_with_newer_columns.py +++ b/tests/components/recorder/db_schema_23_with_newer_columns.py @@ -594,8 +594,6 @@ class LazyState(State): __slots__ = [ "_row", - "entity_id", - "state", "_attributes", "_last_changed", "_last_updated", diff --git a/tests/components/recorder/db_schema_30.py b/tests/components/recorder/db_schema_30.py index 2668f610dfd..97c33334111 100644 --- a/tests/components/recorder/db_schema_30.py +++ b/tests/components/recorder/db_schema_30.py @@ -9,7 +9,6 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime, timedelta import logging -import time from typing import Any, Self, TypedDict, cast, overload import ciso8601 @@ -381,7 +380,7 @@ class States(Base): # type: ignore[misc,valid-type] ) # *** Not originally in v30, only added for recorder to startup ok last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True) last_updated_ts = Column( - TIMESTAMP_TYPE, default=time.time, index=True + TIMESTAMP_TYPE, index=True ) # *** Not originally in v30, only added for recorder to startup ok old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True) attributes_id = Column( diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index 60f4f733ec0..39ddb8e3148 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -224,7 +224,7 @@ class Events(Base): # type: ignore[misc,valid-type] data_id = Column(Integer, ForeignKey("event_data.data_id"), index=True) context_id_bin = Column( LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH) - ) # *** Not originally in v3v320, only added for recorder to startup ok + ) # *** Not originally in v32, only added for recorder to startup ok context_user_id_bin = Column( LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH) ) # *** Not originally in v32, only added for recorder to startup ok @@ -254,7 +254,7 @@ class Events(Base): # type: ignore[misc,valid-type] event_data=None, origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin), time_fired=None, - time_fired_ts=dt_util.utc_to_timestamp(event.time_fired), + time_fired_ts=event.time_fired.timestamp(), context_id=event.context.id, context_user_id=event.context.user_id, context_parent_id=event.context.parent_id, @@ -429,16 +429,16 @@ class States(Base): # type: ignore[misc,valid-type] # None state means the state was removed from the state machine if state is None: dbstate.state = "" - dbstate.last_updated_ts = dt_util.utc_to_timestamp(event.time_fired) + dbstate.last_updated_ts = event.time_fired.timestamp() dbstate.last_changed_ts = None return dbstate dbstate.state = state.state - dbstate.last_updated_ts = dt_util.utc_to_timestamp(state.last_updated) + dbstate.last_updated_ts = state.last_updated.timestamp() if state.last_updated == state.last_changed: dbstate.last_changed_ts = None else: - dbstate.last_changed_ts = dt_util.utc_to_timestamp(state.last_changed) + dbstate.last_changed_ts = state.last_changed.timestamp() return dbstate @@ -565,6 +565,7 @@ class StatisticsBase: id = Column(Integer, Identity(), primary_key=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) + # *** Not originally in v32, only added for recorder to startup ok created_ts = Column(TIMESTAMP_TYPE, default=time.time) metadata_id = Column( Integer, @@ -572,11 +573,13 @@ class StatisticsBase: index=True, ) start = Column(DATETIME_TYPE, index=True) + # *** Not originally in v32, only added for recorder to startup ok start_ts = Column(TIMESTAMP_TYPE, index=True) mean = Column(DOUBLE_TYPE) min = Column(DOUBLE_TYPE) max = Column(DOUBLE_TYPE) last_reset = Column(DATETIME_TYPE) + # *** Not originally in v32, only added for recorder to startup ok last_reset_ts = Column(TIMESTAMP_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) diff --git a/tests/components/recorder/db_schema_42.py b/tests/components/recorder/db_schema_42.py index 99bdbb28f2c..efeade46562 100644 --- a/tests/components/recorder/db_schema_42.py +++ b/tests/components/recorder/db_schema_42.py @@ -687,7 +687,7 @@ class StatisticsBase: created=None, created_ts=time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), diff --git a/tests/components/recorder/db_schema_43.py b/tests/components/recorder/db_schema_43.py index 26d8ecd6856..8e77e8782ee 100644 --- a/tests/components/recorder/db_schema_43.py +++ b/tests/components/recorder/db_schema_43.py @@ -697,7 +697,7 @@ class StatisticsBase: created=None, created_ts=time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index 3923c72107a..28b8275247c 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -5,30 +5,21 @@ from __future__ import annotations from copy import copy from datetime import datetime, timedelta import json -from unittest.mock import patch, sentinel +from unittest.mock import sentinel from freezegun import freeze_time import pytest -from sqlalchemy import text from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, get_instance, history +from homeassistant.components.recorder import Recorder, history from homeassistant.components.recorder.db_schema import ( - Events, - RecorderRuns, StateAttributes, States, StatesMeta, ) from homeassistant.components.recorder.filters import Filters -from homeassistant.components.recorder.history import legacy from homeassistant.components.recorder.models import process_timestamp -from homeassistant.components.recorder.models.legacy import ( - LegacyLazyState, - LegacyLazyStatePreSchema31, -) from homeassistant.components.recorder.util import session_scope -import homeassistant.core as ha from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import JSONEncoder import homeassistant.util.dt as dt_util @@ -57,77 +48,6 @@ def setup_recorder(recorder_mock: Recorder) -> recorder.Recorder: """Set up recorder.""" -async def _async_get_states( - hass: HomeAssistant, - utc_point_in_time: datetime, - entity_ids: list[str] | None = None, - run: RecorderRuns | None = None, - no_attributes: bool = False, -): - """Get states from the database.""" - - def _get_states_with_session(): - with session_scope(hass=hass, read_only=True) as session: - attr_cache = {} - pre_31_schema = get_instance(hass).schema_version < 31 - return [ - LegacyLazyStatePreSchema31(row, attr_cache, None) - if pre_31_schema - else LegacyLazyState( - row, - attr_cache, - None, - row.entity_id, - ) - for row in legacy._get_rows_with_session( - hass, - session, - utc_point_in_time, - entity_ids, - run, - no_attributes, - ) - ] - - return await recorder.get_instance(hass).async_add_executor_job( - _get_states_with_session - ) - - -def _add_db_entries( - hass: ha.HomeAssistant, point: datetime, entity_ids: list[str] -) -> None: - with session_scope(hass=hass) as session: - for idx, entity_id in enumerate(entity_ids): - session.add( - Events( - event_id=1001 + idx, - event_type="state_changed", - event_data="{}", - origin="LOCAL", - time_fired=point, - ) - ) - session.add( - States( - entity_id=entity_id, - state="on", - attributes='{"name":"the light"}', - last_changed=None, - last_updated=point, - event_id=1001 + idx, - attributes_id=1002 + idx, - ) - ) - session.add( - StateAttributes( - shared_attrs='{"name":"the shared light"}', - hash=1234 + idx, - attributes_id=1002 + idx, - ) - ) - - async def test_get_full_significant_states_with_session_entity_no_matches( hass: HomeAssistant, ) -> None: @@ -891,184 +811,6 @@ def record_states( return zero, four, states -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_state_changes_during_period_query_during_migration_to_schema_25( - hass: HomeAssistant, - recorder_db_url: str, -) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25. - - This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the - state_attributes table. - """ - - instance = recorder.get_instance(hass) - - with patch.object(instance.states_meta_manager, "active", False): - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - entity_id = "light.test" - await recorder.get_instance(hass).async_add_executor_job( - _add_db_entries, hass, point, [entity_id] - ) - - no_attributes = True - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes, include_start_time_state=False - ) - state = hist[entity_id][0] - assert state.attributes == {} - - no_attributes = False - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes, include_start_time_state=False - ) - state = hist[entity_id][0] - assert state.attributes == {"name": "the shared light"} - - with instance.engine.connect() as conn: - conn.execute(text("update states set attributes_id=NULL;")) - conn.execute(text("drop table state_attributes;")) - conn.commit() - - with patch.object(instance, "schema_version", 24): - instance.states_meta_manager.active = False - no_attributes = True - hist = history.state_changes_during_period( - hass, - start, - end, - entity_id, - no_attributes, - include_start_time_state=False, - ) - state = hist[entity_id][0] - assert state.attributes == {} - - no_attributes = False - hist = history.state_changes_during_period( - hass, - start, - end, - entity_id, - no_attributes, - include_start_time_state=False, - ) - state = hist[entity_id][0] - assert state.attributes == {"name": "the light"} - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_get_states_query_during_migration_to_schema_25( - hass: HomeAssistant, - recorder_db_url: str, -) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25. - - This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the - state_attributes table. - """ - - instance = recorder.get_instance(hass) - - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - entity_id = "light.test" - await instance.async_add_executor_job(_add_db_entries, hass, point, [entity_id]) - assert instance.states_meta_manager.active - - no_attributes = True - hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) - state = hist[0] - assert state.attributes == {} - - no_attributes = False - hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) - state = hist[0] - assert state.attributes == {"name": "the shared light"} - - with instance.engine.connect() as conn: - conn.execute(text("update states set attributes_id=NULL;")) - conn.execute(text("drop table state_attributes;")) - conn.commit() - - with patch.object(instance, "schema_version", 24): - instance.states_meta_manager.active = False - no_attributes = True - hist = await _async_get_states( - hass, end, [entity_id], no_attributes=no_attributes - ) - state = hist[0] - assert state.attributes == {} - - no_attributes = False - hist = await _async_get_states( - hass, end, [entity_id], no_attributes=no_attributes - ) - state = hist[0] - assert state.attributes == {"name": "the light"} - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_get_states_query_during_migration_to_schema_25_multiple_entities( - hass: HomeAssistant, - recorder_db_url: str, -) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25. - - This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the - state_attributes table. - """ - - instance = recorder.get_instance(hass) - - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - entity_id_1 = "light.test" - entity_id_2 = "switch.test" - entity_ids = [entity_id_1, entity_id_2] - - await instance.async_add_executor_job(_add_db_entries, hass, point, entity_ids) - assert instance.states_meta_manager.active - - no_attributes = True - hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) - assert hist[0].attributes == {} - assert hist[1].attributes == {} - - no_attributes = False - hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) - assert hist[0].attributes == {"name": "the shared light"} - assert hist[1].attributes == {"name": "the shared light"} - - with instance.engine.connect() as conn: - conn.execute(text("update states set attributes_id=NULL;")) - conn.execute(text("drop table state_attributes;")) - conn.commit() - - with patch.object(instance, "schema_version", 24): - instance.states_meta_manager.active = False - no_attributes = True - hist = await _async_get_states( - hass, end, entity_ids, no_attributes=no_attributes - ) - assert hist[0].attributes == {} - assert hist[1].attributes == {} - - no_attributes = False - hist = await _async_get_states( - hass, end, entity_ids, no_attributes=no_attributes - ) - assert hist[0].attributes == {"name": "the light"} - assert hist[1].attributes == {"name": "the light"} - - async def test_get_full_significant_states_handles_empty_last_changed( hass: HomeAssistant, ) -> None: diff --git a/tests/components/recorder/test_history_db_schema_30.py b/tests/components/recorder/test_history_db_schema_30.py deleted file mode 100644 index 0e5f6cf7f79..00000000000 --- a/tests/components/recorder/test_history_db_schema_30.py +++ /dev/null @@ -1,713 +0,0 @@ -"""The tests the History component.""" - -from __future__ import annotations - -from copy import copy -from datetime import datetime, timedelta -import json -from unittest.mock import patch, sentinel - -from freezegun import freeze_time -import pytest - -from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, history -from homeassistant.components.recorder.filters import Filters -from homeassistant.components.recorder.models import process_timestamp -from homeassistant.components.recorder.util import session_scope -from homeassistant.core import HomeAssistant, State -from homeassistant.helpers.json import JSONEncoder -import homeassistant.util.dt as dt_util - -from .common import ( - assert_dict_of_states_equal_without_context_and_last_changed, - assert_multiple_states_equal_without_context, - assert_multiple_states_equal_without_context_and_last_changed, - assert_states_equal_without_context, - async_wait_recording_done, - old_db_schema, -) - -from tests.typing import RecorderInstanceGenerator - - -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - -@pytest.fixture(autouse=True) -def db_schema_30(): - """Fixture to initialize the db with the old schema 30.""" - with old_db_schema("30"): - yield - - -@pytest.fixture(autouse=True) -def setup_recorder(db_schema_30, recorder_mock: Recorder) -> recorder.Recorder: - """Set up recorder.""" - - -async def test_get_full_significant_states_with_session_entity_no_matches( - hass: HomeAssistant, -) -> None: - """Test getting states at a specific point in time for entities that never have been recorded.""" - now = dt_util.utcnow() - time_before_recorder_ran = now - timedelta(days=1000) - instance = recorder.get_instance(hass) - with ( - session_scope(hass=hass) as session, - patch.object(instance.states_meta_manager, "active", False), - ): - assert ( - history.get_full_significant_states_with_session( - hass, session, time_before_recorder_ran, now, entity_ids=["demo.id"] - ) - == {} - ) - assert ( - history.get_full_significant_states_with_session( - hass, - session, - time_before_recorder_ran, - now, - entity_ids=["demo.id", "demo.id2"], - ) - == {} - ) - - -async def test_significant_states_with_session_entity_minimal_response_no_matches( - hass: HomeAssistant, -) -> None: - """Test getting states at a specific point in time for entities that never have been recorded.""" - now = dt_util.utcnow() - time_before_recorder_ran = now - timedelta(days=1000) - instance = recorder.get_instance(hass) - with ( - session_scope(hass=hass) as session, - patch.object(instance.states_meta_manager, "active", False), - ): - assert ( - history.get_significant_states_with_session( - hass, - session, - time_before_recorder_ran, - now, - entity_ids=["demo.id"], - minimal_response=True, - ) - == {} - ) - assert ( - history.get_significant_states_with_session( - hass, - session, - time_before_recorder_ran, - now, - entity_ids=["demo.id", "demo.id2"], - minimal_response=True, - ) - == {} - ) - - -@pytest.mark.parametrize( - ("attributes", "no_attributes", "limit"), - [ - ({"attr": True}, False, 5000), - ({}, True, 5000), - ({"attr": True}, False, 3), - ({}, True, 3), - ], -) -async def test_state_changes_during_period( - hass: HomeAssistant, attributes, no_attributes, limit -) -> None: - """Test state change during period.""" - entity_id = "media_player.test" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state, attributes) - return hass.states.get(entity_id) - - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - - with freeze_time(start) as freezer: - set_state("idle") - set_state("YouTube") - - freezer.move_to(point) - states = [ - set_state("idle"), - set_state("Netflix"), - set_state("Plex"), - set_state("YouTube"), - ] - - freezer.move_to(end) - set_state("Netflix") - set_state("Plex") - await async_wait_recording_done(hass) - - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes, limit=limit - ) - - assert_multiple_states_equal_without_context(states[:limit], hist[entity_id]) - - -async def test_state_changes_during_period_descending( - hass: HomeAssistant, -) -> None: - """Test state change during period descending.""" - entity_id = "media_player.test" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state, {"any": 1}) - return hass.states.get(entity_id) - - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - point2 = start + timedelta(seconds=1, microseconds=2) - point3 = start + timedelta(seconds=1, microseconds=3) - point4 = start + timedelta(seconds=1, microseconds=4) - end = point + timedelta(seconds=1) - - with freeze_time(start) as freezer: - set_state("idle") - set_state("YouTube") - - freezer.move_to(point) - - states = [set_state("idle")] - freezer.move_to(point2) - - states.append(set_state("Netflix")) - - freezer.move_to(point3) - states.append(set_state("Plex")) - - freezer.move_to(point4) - states.append(set_state("YouTube")) - - freezer.move_to(end) - set_state("Netflix") - set_state("Plex") - await async_wait_recording_done(hass) - - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes=False, descending=False - ) - assert_multiple_states_equal_without_context(states, hist[entity_id]) - - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes=False, descending=True - ) - assert_multiple_states_equal_without_context( - states, list(reversed(list(hist[entity_id]))) - ) - - -async def test_get_last_state_changes(hass: HomeAssistant) -> None: - """Test number of state changes.""" - entity_id = "sensor.test" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state) - return hass.states.get(entity_id) - - start = dt_util.utcnow() - timedelta(minutes=2) - point = start + timedelta(minutes=1) - point2 = point + timedelta(minutes=1, seconds=1) - states = [] - - with freeze_time(start) as freezer: - set_state("1") - - freezer.move_to(point) - states.append(set_state("2")) - - freezer.move_to(point2) - states.append(set_state("3")) - await async_wait_recording_done(hass) - - hist = history.get_last_state_changes(hass, 2, entity_id) - - assert_multiple_states_equal_without_context(states, hist[entity_id]) - - -async def test_ensure_state_can_be_copied( - hass: HomeAssistant, -) -> None: - """Ensure a state can pass though copy(). - - The filter integration uses copy() on states - from history. - """ - entity_id = "sensor.test" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state) - return hass.states.get(entity_id) - - start = dt_util.utcnow() - timedelta(minutes=2) - point = start + timedelta(minutes=1) - - with freeze_time(start) as freezer: - set_state("1") - - freezer.move_to(point) - set_state("2") - await async_wait_recording_done(hass) - - hist = history.get_last_state_changes(hass, 2, entity_id) - - assert_states_equal_without_context( - copy(hist[entity_id][0]), hist[entity_id][0] - ) - assert_states_equal_without_context( - copy(hist[entity_id][1]), hist[entity_id][1] - ) - - -async def test_get_significant_states(hass: HomeAssistant) -> None: - """Test that only significant states are returned. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = record_states(hass) - await async_wait_recording_done(hass) - - hist = history.get_significant_states(hass, zero, four, entity_ids=list(states)) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_minimal_response(hass: HomeAssistant) -> None: - """Test that only significant states are returned. - - When minimal responses is set only the first and - last states return a complete state. - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = record_states(hass) - await async_wait_recording_done(hass) - - hist = history.get_significant_states( - hass, zero, four, minimal_response=True, entity_ids=list(states) - ) - entites_with_reducable_states = [ - "media_player.test", - "media_player.test3", - ] - - # All states for media_player.test state are reduced - # down to last_changed and state when minimal_response - # is set except for the first state. - # is set. We use JSONEncoder to make sure that are - # pre-encoded last_changed is always the same as what - # will happen with encoding a native state - for entity_id in entites_with_reducable_states: - entity_states = states[entity_id] - for state_idx in range(1, len(entity_states)): - input_state = entity_states[state_idx] - orig_last_changed = json.dumps( - process_timestamp(input_state.last_changed), - cls=JSONEncoder, - ).replace('"', "") - orig_state = input_state.state - entity_states[state_idx] = { - "last_changed": orig_last_changed, - "state": orig_state, - } - - assert len(hist) == len(states) - assert_states_equal_without_context( - states["media_player.test"][0], hist["media_player.test"][0] - ) - assert states["media_player.test"][1] == hist["media_player.test"][1] - assert states["media_player.test"][2] == hist["media_player.test"][2] - - assert_multiple_states_equal_without_context( - states["media_player.test2"], hist["media_player.test2"] - ) - assert_states_equal_without_context( - states["media_player.test3"][0], hist["media_player.test3"][0] - ) - assert states["media_player.test3"][1] == hist["media_player.test3"][1] - - assert_multiple_states_equal_without_context( - states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"] - ) - assert_multiple_states_equal_without_context_and_last_changed( - states["thermostat.test"], hist["thermostat.test"] - ) - assert_multiple_states_equal_without_context_and_last_changed( - states["thermostat.test2"], hist["thermostat.test2"] - ) - - -async def test_get_significant_states_with_initial(hass: HomeAssistant) -> None: - """Test that only significant states are returned. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = record_states(hass) - await async_wait_recording_done(hass) - - one = zero + timedelta(seconds=1) - one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) - one_and_half = zero + timedelta(seconds=1.5) - for entity_id in states: - if entity_id == "media_player.test": - states[entity_id] = states[entity_id][1:] - for state in states[entity_id]: - if state.last_changed in (one, one_with_microsecond): - state.last_changed = one_and_half - state.last_updated = one_and_half - - hist = history.get_significant_states( - hass, - one_and_half, - four, - include_start_time_state=True, - entity_ids=list(states), - ) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_without_initial(hass: HomeAssistant) -> None: - """Test that only significant states are returned. - - We should get back every thermostat change that - includes an attribute change, but only the state updates for - media player (attribute changes are not significant and not returned). - """ - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = record_states(hass) - await async_wait_recording_done(hass) - - one = zero + timedelta(seconds=1) - one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) - one_and_half = zero + timedelta(seconds=1.5) - for entity_id in states: - states[entity_id] = [ - s - for s in states[entity_id] - if s.last_changed not in (one, one_with_microsecond) - ] - del states["media_player.test2"] - - hist = history.get_significant_states( - hass, - one_and_half, - four, - include_start_time_state=False, - entity_ids=list(states), - ) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_entity_id(hass: HomeAssistant) -> None: - """Test that only significant states are returned for one entity.""" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = record_states(hass) - await async_wait_recording_done(hass) - - del states["media_player.test2"] - del states["media_player.test3"] - del states["thermostat.test"] - del states["thermostat.test2"] - del states["script.can_cancel_this_one"] - - hist = history.get_significant_states(hass, zero, four, ["media_player.test"]) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def test_get_significant_states_multiple_entity_ids(hass: HomeAssistant) -> None: - """Test that only significant states are returned for one entity.""" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, states = record_states(hass) - await async_wait_recording_done(hass) - - del states["media_player.test2"] - del states["media_player.test3"] - del states["thermostat.test2"] - del states["script.can_cancel_this_one"] - - hist = history.get_significant_states( - hass, - zero, - four, - ["media_player.test", "thermostat.test"], - ) - assert_multiple_states_equal_without_context_and_last_changed( - states["media_player.test"], hist["media_player.test"] - ) - assert_multiple_states_equal_without_context_and_last_changed( - states["thermostat.test"], hist["thermostat.test"] - ) - - -async def test_get_significant_states_are_ordered(hass: HomeAssistant) -> None: - """Test order of results from get_significant_states. - - When entity ids are given, the results should be returned with the data - in the same order. - """ - - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - zero, four, _states = record_states(hass) - await async_wait_recording_done(hass) - - entity_ids = ["media_player.test", "media_player.test2"] - hist = history.get_significant_states(hass, zero, four, entity_ids) - assert list(hist.keys()) == entity_ids - entity_ids = ["media_player.test2", "media_player.test"] - hist = history.get_significant_states(hass, zero, four, entity_ids) - assert list(hist.keys()) == entity_ids - - -async def test_get_significant_states_only(hass: HomeAssistant) -> None: - """Test significant states when significant_states_only is set.""" - entity_id = "sensor.test" - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - - def set_state(state, **kwargs): - """Set the state.""" - hass.states.async_set(entity_id, state, **kwargs) - return hass.states.get(entity_id) - - start = dt_util.utcnow() - timedelta(minutes=4) - points = [start + timedelta(minutes=i) for i in range(1, 4)] - - states = [] - with freeze_time(start) as freezer: - set_state("123", attributes={"attribute": 10.64}) - - freezer.move_to(points[0]) - # Attributes are different, state not - states.append(set_state("123", attributes={"attribute": 21.42})) - - freezer.move_to(points[1]) - # state is different, attributes not - states.append(set_state("32", attributes={"attribute": 21.42})) - - freezer.move_to(points[2]) - # everything is different - states.append(set_state("412", attributes={"attribute": 54.23})) - await async_wait_recording_done(hass) - - hist = history.get_significant_states( - hass, - start, - significant_changes_only=True, - entity_ids=list({state.entity_id for state in states}), - ) - - assert len(hist[entity_id]) == 2 - assert not any( - state.last_updated == states[0].last_updated for state in hist[entity_id] - ) - assert any( - state.last_updated == states[1].last_updated for state in hist[entity_id] - ) - assert any( - state.last_updated == states[2].last_updated for state in hist[entity_id] - ) - - hist = history.get_significant_states( - hass, - start, - significant_changes_only=False, - entity_ids=list({state.entity_id for state in states}), - ) - - assert len(hist[entity_id]) == 3 - assert_multiple_states_equal_without_context_and_last_changed( - states, hist[entity_id] - ) - - -def record_states( - hass: HomeAssistant, -) -> tuple[datetime, datetime, dict[str, list[State]]]: - """Record some test states. - - We inject a bunch of state updates from media player, zone and - thermostat. - """ - mp = "media_player.test" - mp2 = "media_player.test2" - mp3 = "media_player.test3" - therm = "thermostat.test" - therm2 = "thermostat.test2" - zone = "zone.home" - script_c = "script.can_cancel_this_one" - - def set_state(entity_id, state, **kwargs): - """Set the state.""" - hass.states.async_set(entity_id, state, **kwargs) - return hass.states.get(entity_id) - - zero = dt_util.utcnow() - one = zero + timedelta(seconds=1) - two = one + timedelta(seconds=1) - three = two + timedelta(seconds=1) - four = three + timedelta(seconds=1) - - states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []} - with freeze_time(one) as freezer: - states[mp].append( - set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)}) - ) - states[mp2].append( - set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)}) - ) - states[mp3].append( - set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)}) - ) - states[therm].append( - set_state(therm, 20, attributes={"current_temperature": 19.5}) - ) - - freezer.move_to(one + timedelta(microseconds=1)) - states[mp].append( - set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)}) - ) - - freezer.move_to(two) - # This state will be skipped only different in time - set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)}) - # This state will be skipped because domain is excluded - set_state(zone, "zoning") - states[script_c].append( - set_state(script_c, "off", attributes={"can_cancel": True}) - ) - states[therm].append( - set_state(therm, 21, attributes={"current_temperature": 19.8}) - ) - states[therm2].append( - set_state(therm2, 20, attributes={"current_temperature": 19}) - ) - - freezer.move_to(three) - states[mp].append( - set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)}) - ) - states[mp3].append( - set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)}) - ) - # Attributes changed even though state is the same - states[therm].append( - set_state(therm, 21, attributes={"current_temperature": 20}) - ) - - return zero, four, states - - -async def test_state_changes_during_period_multiple_entities_single_test( - hass: HomeAssistant, -) -> None: - """Test state change during period with multiple entities in the same test. - - This test ensures the sqlalchemy query cache does not - generate incorrect results. - """ - instance = recorder.get_instance(hass) - with patch.object(instance.states_meta_manager, "active", False): - start = dt_util.utcnow() - test_entites = {f"sensor.{i}": str(i) for i in range(30)} - for entity_id, value in test_entites.items(): - hass.states.async_set(entity_id, value) - await async_wait_recording_done(hass) - - end = dt_util.utcnow() - - for entity_id, value in test_entites.items(): - hist = history.state_changes_during_period(hass, start, end, entity_id) - assert len(hist) == 1 - assert hist[entity_id][0].state == value - - -def test_get_significant_states_without_entity_ids_raises(hass: HomeAssistant) -> None: - """Test at least one entity id is required for get_significant_states.""" - now = dt_util.utcnow() - with pytest.raises(ValueError, match="entity_ids must be provided"): - history.get_significant_states(hass, now, None) - - -def test_state_changes_during_period_without_entity_ids_raises( - hass: HomeAssistant, -) -> None: - """Test at least one entity id is required for state_changes_during_period.""" - now = dt_util.utcnow() - with pytest.raises(ValueError, match="entity_id must be provided"): - history.state_changes_during_period(hass, now, None) - - -def test_get_significant_states_with_filters_raises(hass: HomeAssistant) -> None: - """Test passing filters is no longer supported.""" - now = dt_util.utcnow() - with pytest.raises(NotImplementedError, match="Filters are no longer supported"): - history.get_significant_states( - hass, now, None, ["media_player.test"], Filters() - ) - - -def test_get_significant_states_with_non_existent_entity_ids_returns_empty( - hass: HomeAssistant, -) -> None: - """Test get_significant_states returns an empty dict when entities not in the db.""" - now = dt_util.utcnow() - assert history.get_significant_states(hass, now, None, ["nonexistent.entity"]) == {} - - -def test_state_changes_during_period_with_non_existent_entity_ids_returns_empty( - hass: HomeAssistant, -) -> None: - """Test state_changes_during_period returns an empty dict when entities not in the db.""" - now = dt_util.utcnow() - assert ( - history.state_changes_during_period(hass, now, None, "nonexistent.entity") == {} - ) - - -def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty( - hass: HomeAssistant, -) -> None: - """Test get_last_state_changes returns an empty dict when entities not in the db.""" - assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} diff --git a/tests/components/recorder/test_history_db_schema_32.py b/tests/components/recorder/test_history_db_schema_32.py index 3ee6edd8e1e..666626ff688 100644 --- a/tests/components/recorder/test_history_db_schema_32.py +++ b/tests/components/recorder/test_history_db_schema_32.py @@ -38,6 +38,17 @@ async def mock_recorder_before_hass( """Set up recorder.""" +@pytest.fixture +def disable_states_meta_manager(): + """Disable the states meta manager.""" + with patch.object( + recorder.table_managers.states_meta.StatesMetaManager, + "active", + False, + ): + yield + + @pytest.fixture(autouse=True) def db_schema_32(): """Fixture to initialize the db with the old schema 32.""" @@ -46,7 +57,9 @@ def db_schema_32(): @pytest.fixture(autouse=True) -def setup_recorder(db_schema_32, recorder_mock: Recorder) -> recorder.Recorder: +def setup_recorder( + db_schema_32, disable_states_meta_manager, recorder_mock: Recorder +) -> recorder.Recorder: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_42.py b/tests/components/recorder/test_history_db_schema_42.py index 5d9444e9cfe..85badeea281 100644 --- a/tests/components/recorder/test_history_db_schema_42.py +++ b/tests/components/recorder/test_history_db_schema_42.py @@ -5,21 +5,15 @@ from __future__ import annotations from copy import copy from datetime import datetime, timedelta import json -from unittest.mock import patch, sentinel +from unittest.mock import sentinel from freezegun import freeze_time import pytest -from sqlalchemy import text from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, get_instance, history +from homeassistant.components.recorder import Recorder, history from homeassistant.components.recorder.filters import Filters -from homeassistant.components.recorder.history import legacy from homeassistant.components.recorder.models import process_timestamp -from homeassistant.components.recorder.models.legacy import ( - LegacyLazyState, - LegacyLazyStatePreSchema31, -) from homeassistant.components.recorder.util import session_scope import homeassistant.core as ha from homeassistant.core import HomeAssistant, State @@ -35,7 +29,7 @@ from .common import ( async_wait_recording_done, old_db_schema, ) -from .db_schema_42 import Events, RecorderRuns, StateAttributes, States, StatesMeta +from .db_schema_42 import StateAttributes, States, StatesMeta from tests.typing import RecorderInstanceGenerator @@ -59,77 +53,6 @@ def setup_recorder(db_schema_42, recorder_mock: Recorder) -> recorder.Recorder: """Set up recorder.""" -async def _async_get_states( - hass: HomeAssistant, - utc_point_in_time: datetime, - entity_ids: list[str] | None = None, - run: RecorderRuns | None = None, - no_attributes: bool = False, -): - """Get states from the database.""" - - def _get_states_with_session(): - with session_scope(hass=hass, read_only=True) as session: - attr_cache = {} - pre_31_schema = get_instance(hass).schema_version < 31 - return [ - LegacyLazyStatePreSchema31(row, attr_cache, None) - if pre_31_schema - else LegacyLazyState( - row, - attr_cache, - None, - row.entity_id, - ) - for row in legacy._get_rows_with_session( - hass, - session, - utc_point_in_time, - entity_ids, - run, - no_attributes, - ) - ] - - return await recorder.get_instance(hass).async_add_executor_job( - _get_states_with_session - ) - - -def _add_db_entries( - hass: ha.HomeAssistant, point: datetime, entity_ids: list[str] -) -> None: - with session_scope(hass=hass) as session: - for idx, entity_id in enumerate(entity_ids): - session.add( - Events( - event_id=1001 + idx, - event_type="state_changed", - event_data="{}", - origin="LOCAL", - time_fired=point, - ) - ) - session.add( - States( - entity_id=entity_id, - state="on", - attributes='{"name":"the light"}', - last_changed=None, - last_updated=point, - event_id=1001 + idx, - attributes_id=1002 + idx, - ) - ) - session.add( - StateAttributes( - shared_attrs='{"name":"the shared light"}', - hash=1234 + idx, - attributes_id=1002 + idx, - ) - ) - - async def test_get_full_significant_states_with_session_entity_no_matches( hass: HomeAssistant, ) -> None: @@ -893,184 +816,6 @@ def record_states( return zero, four, states -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_state_changes_during_period_query_during_migration_to_schema_25( - hass: HomeAssistant, - recorder_db_url: str, -) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25. - - This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the - state_attributes table. - """ - - instance = recorder.get_instance(hass) - - with patch.object(instance.states_meta_manager, "active", False): - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - entity_id = "light.test" - await recorder.get_instance(hass).async_add_executor_job( - _add_db_entries, hass, point, [entity_id] - ) - - no_attributes = True - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes, include_start_time_state=False - ) - state = hist[entity_id][0] - assert state.attributes == {} - - no_attributes = False - hist = history.state_changes_during_period( - hass, start, end, entity_id, no_attributes, include_start_time_state=False - ) - state = hist[entity_id][0] - assert state.attributes == {"name": "the shared light"} - - with instance.engine.connect() as conn: - conn.execute(text("update states set attributes_id=NULL;")) - conn.execute(text("drop table state_attributes;")) - conn.commit() - - with patch.object(instance, "schema_version", 24): - instance.states_meta_manager.active = False - no_attributes = True - hist = history.state_changes_during_period( - hass, - start, - end, - entity_id, - no_attributes, - include_start_time_state=False, - ) - state = hist[entity_id][0] - assert state.attributes == {} - - no_attributes = False - hist = history.state_changes_during_period( - hass, - start, - end, - entity_id, - no_attributes, - include_start_time_state=False, - ) - state = hist[entity_id][0] - assert state.attributes == {"name": "the light"} - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_get_states_query_during_migration_to_schema_25( - hass: HomeAssistant, - recorder_db_url: str, -) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25. - - This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the - state_attributes table. - """ - - instance = recorder.get_instance(hass) - - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - entity_id = "light.test" - await instance.async_add_executor_job(_add_db_entries, hass, point, [entity_id]) - assert instance.states_meta_manager.active - - no_attributes = True - hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) - state = hist[0] - assert state.attributes == {} - - no_attributes = False - hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) - state = hist[0] - assert state.attributes == {"name": "the shared light"} - - with instance.engine.connect() as conn: - conn.execute(text("update states set attributes_id=NULL;")) - conn.execute(text("drop table state_attributes;")) - conn.commit() - - with patch.object(instance, "schema_version", 24): - instance.states_meta_manager.active = False - no_attributes = True - hist = await _async_get_states( - hass, end, [entity_id], no_attributes=no_attributes - ) - state = hist[0] - assert state.attributes == {} - - no_attributes = False - hist = await _async_get_states( - hass, end, [entity_id], no_attributes=no_attributes - ) - state = hist[0] - assert state.attributes == {"name": "the light"} - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_get_states_query_during_migration_to_schema_25_multiple_entities( - hass: HomeAssistant, - recorder_db_url: str, -) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25. - - This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the - state_attributes table. - """ - - instance = recorder.get_instance(hass) - - start = dt_util.utcnow() - point = start + timedelta(seconds=1) - end = point + timedelta(seconds=1) - entity_id_1 = "light.test" - entity_id_2 = "switch.test" - entity_ids = [entity_id_1, entity_id_2] - - await instance.async_add_executor_job(_add_db_entries, hass, point, entity_ids) - assert instance.states_meta_manager.active - - no_attributes = True - hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) - assert hist[0].attributes == {} - assert hist[1].attributes == {} - - no_attributes = False - hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) - assert hist[0].attributes == {"name": "the shared light"} - assert hist[1].attributes == {"name": "the shared light"} - - with instance.engine.connect() as conn: - conn.execute(text("update states set attributes_id=NULL;")) - conn.execute(text("drop table state_attributes;")) - conn.commit() - - with patch.object(instance, "schema_version", 24): - instance.states_meta_manager.active = False - no_attributes = True - hist = await _async_get_states( - hass, end, entity_ids, no_attributes=no_attributes - ) - assert hist[0].attributes == {} - assert hist[1].attributes == {} - - no_attributes = False - hist = await _async_get_states( - hass, end, entity_ids, no_attributes=no_attributes - ) - assert hist[0].attributes == {"name": "the light"} - assert hist[1].attributes == {"name": "the light"} - - async def test_get_full_significant_states_handles_empty_last_changed( hass: HomeAssistant, ) -> None: diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 3bbc78e21ce..d16712e0c70 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -17,6 +17,7 @@ from sqlalchemy.exc import DatabaseError, OperationalError, SQLAlchemyError from sqlalchemy.pool import QueuePool from homeassistant.components import recorder +from homeassistant.components.lock import LockState from homeassistant.components.recorder import ( CONF_AUTO_PURGE, CONF_AUTO_REPACK, @@ -69,8 +70,6 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, MATCH_ALL, - STATE_LOCKED, - STATE_UNLOCKED, ) from homeassistant.core import Context, CoreState, Event, HomeAssistant, State, callback from homeassistant.helpers import ( @@ -834,8 +833,8 @@ async def test_saving_state_and_removing_entity( ) -> None: """Test saving the state of a removed entity.""" entity_id = "lock.mine" - hass.states.async_set(entity_id, STATE_LOCKED) - hass.states.async_set(entity_id, STATE_UNLOCKED) + hass.states.async_set(entity_id, LockState.LOCKED) + hass.states.async_set(entity_id, LockState.UNLOCKED) hass.states.async_remove(entity_id) await async_wait_recording_done(hass) @@ -848,9 +847,9 @@ async def test_saving_state_and_removing_entity( ) assert len(states) == 3 assert states[0].entity_id == entity_id - assert states[0].state == STATE_LOCKED + assert states[0].state == LockState.LOCKED assert states[1].entity_id == entity_id - assert states[1].state == STATE_UNLOCKED + assert states[1].state == LockState.UNLOCKED assert states[2].entity_id == entity_id assert states[2].state is None diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 0e473b702ef..14978bee5a9 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -95,7 +95,13 @@ async def test_schema_update_calls( hass, engine, session_maker, - migration.SchemaValidationStatus(0, True, set(), 0), + migration.SchemaValidationStatus( + current_version=0, + migration_needed=True, + non_live_data_migration_needed=True, + schema_errors=set(), + start_version=0, + ), 42, ), call( @@ -103,7 +109,13 @@ async def test_schema_update_calls( hass, engine, session_maker, - migration.SchemaValidationStatus(42, True, set(), 0), + migration.SchemaValidationStatus( + current_version=42, + migration_needed=True, + non_live_data_migration_needed=True, + schema_errors=set(), + start_version=0, + ), db_schema.SCHEMA_VERSION, ), ] diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index b2a83ae8313..e42cd22e952 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -3,6 +3,7 @@ import datetime import importlib import sys +import threading from typing import Any from unittest.mock import patch import uuid @@ -24,6 +25,7 @@ from homeassistant.components.recorder import ( from homeassistant.components.recorder.db_schema import ( Events, EventTypes, + MigrationChanges, States, StatesMeta, ) @@ -32,9 +34,9 @@ from homeassistant.components.recorder.queries import ( get_migration_changes, select_event_type_ids, ) -from homeassistant.components.recorder.tasks import EntityIDPostMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, + get_index_by_name, session_scope, ) from homeassistant.core import HomeAssistant @@ -42,16 +44,17 @@ import homeassistant.util.dt as dt_util from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes from .common import ( - MockMigrationTask, async_attach_db_engine, async_recorder_block_till_done, async_wait_recording_done, ) +from .conftest import instrument_migration +from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" @pytest.fixture @@ -77,8 +80,8 @@ def _create_engine_test(*args, **kwargs): This simulates an existing db with the old schema. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] engine = create_engine(*args, **kwargs) old_db_schema.Base.metadata.create_all(engine) with Session(engine) as session: @@ -94,37 +97,36 @@ def _create_engine_test(*args, **kwargs): return engine -@pytest.fixture(autouse=True) +@pytest.fixture def db_schema_32(): """Fixture to initialize the db with the old schema.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): yield -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_events_context_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] test_uuid = uuid.uuid4() uuid_hex = test_uuid.hex @@ -215,18 +217,28 @@ async def test_migrate_events_context_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventsContextIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await async_wait_recording_done(hass) - now = dt_util.utcnow() - expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6] - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + now = dt_util.utcnow() + expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[ + 0:6 + ] + await _async_wait_migration_done(hass) - with freeze_time(now): - # This is a threadsafe way to add a task to the recorder - migrator = migration.EventsContextIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + await hass.async_stop() + await hass.async_block_till_done() def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -252,7 +264,38 @@ async def test_migrate_events_context_ids( assert len(events) == 6 return {event.event_type: _object_as_dict(event) for event in events} - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) + # Run again with new schema, let migration run + async with async_test_home_assistant() as hass: + with freeze_time(now), instrument_migration(hass) as instrumented_migration: + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ) as instance: + # Check the context ID migrator is considered non-live + assert recorder.util.async_migration_is_live(hass) is False + instrumented_migration.migration_stall.set() + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + events_by_type = await instance.async_add_executor_job( + _fetch_migrated_events + ) + + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert ( + get_index_by_name(session, "events", "ix_events_context_id") + is None + ) + + await hass.async_stop() + await hass.async_block_till_done() old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None @@ -323,23 +366,129 @@ async def test_migrate_events_context_ids( event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None ) - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.EventsContextIDMigration.migration_id] == migration.EventsContextIDMigration.migration_version ) -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_finish_migrate_events_context_ids( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Test we re migrate old uuid context ids and ulid context ids to binary format. + + Before PR https://github.com/home-assistant/core/pull/125214, the migrator would + mark the migration as done before ensuring unused indices were dropped. This + test makes sure we drop the unused indices. + """ + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] + + def _insert_migration(): + with session_scope(hass=hass) as session: + session.merge( + MigrationChanges( + migration_id=migration.EventsContextIDMigration.migration_id, + version=1, + ) + ) + + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventsContextIDMigration, "migrate_data"), + patch.object( + migration.EventIDPostMigration, + "needs_migrate_impl", + return_value=migration.DataMigrationStatus( + needs_migrate=False, migration_done=True + ), + ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + # Check the index which will be removed by the migrator exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "events", "ix_events_context_id") + + await hass.async_stop() + await hass.async_block_till_done() + + # Run once with new schema, fake migration did not complete + with ( + patch.object(migration.EventsContextIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + # Fake migration ran with old version + await instance.async_add_executor_job(_insert_migration) + await async_wait_recording_done(hass) + + # Check the index which will be removed by the migrator exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "events", "ix_events_context_id") + + await hass.async_stop() + await hass.async_block_till_done() + + # Run again with new schema, let migration complete + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + # Check migration ran again + assert ( + migration_changes[migration.EventsContextIDMigration.migration_id] + == migration.EventsContextIDMigration.migration_version + ) + + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "events", "ix_events_context_id") is None + + await hass.async_stop() + await hass.async_block_till_done() + + +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_states_context_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] test_uuid = uuid.uuid4() uuid_hex = test_uuid.hex @@ -412,12 +561,24 @@ async def test_migrate_states_context_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_states) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.StatesContextIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) - await async_wait_recording_done(hass) - migrator = migration.StatesContextIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -443,9 +604,38 @@ async def test_migrate_states_context_ids( assert len(events) == 6 return {state.entity_id: _object_as_dict(state) for state in events} - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) + # Run again with new schema, let migration run + async with async_test_home_assistant() as hass: + with instrument_migration(hass) as instrumented_migration: + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ) as instance: + # Check the context ID migrator is considered non-live + assert recorder.util.async_migration_is_live(hass) is False + instrumented_migration.migration_stall.set() + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_entity_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert ( + get_index_by_name(session, "states", "ix_states_context_id") + is None + ) + + await hass.async_stop() + await hass.async_block_till_done() old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None @@ -520,23 +710,129 @@ async def test_migrate_states_context_ids( == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" ) - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.StatesContextIDMigration.migration_id] == migration.StatesContextIDMigration.migration_version ) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_finish_migrate_states_context_ids( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Test we re migrate old uuid context ids and ulid context ids to binary format. + + Before PR https://github.com/home-assistant/core/pull/125214, the migrator would + mark the migration as done before ensuring unused indices were dropped. This + test makes sure we drop the unused indices. + """ + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] + + def _insert_migration(): + with session_scope(hass=hass) as session: + session.merge( + MigrationChanges( + migration_id=migration.StatesContextIDMigration.migration_id, + version=1, + ) + ) + + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.StatesContextIDMigration, "migrate_data"), + patch.object( + migration.EventIDPostMigration, + "needs_migrate_impl", + return_value=migration.DataMigrationStatus( + needs_migrate=False, migration_done=True + ), + ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + # Check the index which will be removed by the migrator exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "states", "ix_states_context_id") + + await hass.async_stop() + await hass.async_block_till_done() + + # Run once with new schema, fake migration did not complete + with ( + patch.object(migration.StatesContextIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + # Fake migration ran with old version + await instance.async_add_executor_job(_insert_migration) + await async_wait_recording_done(hass) + + # Check the index which will be removed by the migrator exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "states", "ix_states_context_id") + + await hass.async_stop() + await hass.async_block_till_done() + + # Run again with new schema, let migration complete + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + # Check migration ran again + assert ( + migration_changes[migration.StatesContextIDMigration.migration_id] + == migration.StatesContextIDMigration.migration_version + ) + + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "states", "ix_states_context_id") is None + + await hass.async_stop() + await hass.async_block_till_done() + + +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate event_types to the EventTypes table.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_events(): with session_scope(hass=hass) as session: @@ -560,13 +856,24 @@ async def test_migrate_event_type_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventTypeIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await async_wait_recording_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EventTypeIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_events(): with session_scope(hass=hass, read_only=True) as session: @@ -597,35 +904,53 @@ async def test_migrate_event_type_ids( ) return result - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) - assert len(events_by_type["event_type_one"]) == 2 - assert len(events_by_type["event_type_two"]) == 1 - def _get_many(): with session_scope(hass=hass, read_only=True) as session: - return recorder_mock.event_type_manager.get_many( + return instance.event_type_manager.get_many( ("event_type_one", "event_type_two"), session ) - mapped = await recorder_mock.async_add_executor_job(_get_many) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + mapped = await instance.async_add_executor_job(_get_many) + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(events_by_type["event_type_one"]) == 2 + assert len(events_by_type["event_type_two"]) == 1 + assert mapped["event_type_one"] is not None assert mapped["event_type_two"] is not None - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version ) +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_entity_ids( + async_test_recorder: RecorderInstanceGenerator, +) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_states(): with session_scope(hass=hass) as session: @@ -649,13 +974,24 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) ) ) - await recorder_mock.async_add_executor_job(_insert_states) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EntityIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDMigration(None, None) - recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -681,29 +1017,45 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) ) return result - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_entity_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + await hass.async_stop() + await hass.async_block_till_done() + assert len(states_by_entity_id["sensor.two"]) == 2 assert len(states_by_entity_id["sensor.one"]) == 1 - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version ) +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_post_migrate_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_events(): with session_scope(hass=hass) as session: @@ -727,12 +1079,25 @@ async def test_post_migrate_entity_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EntityIDMigration, "migrate_data"), + patch.object(migration.EntityIDPostMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - recorder_mock.queue_task(EntityIDPostMigrationTask()) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -743,20 +1108,36 @@ async def test_post_migrate_entity_ids( assert len(states) == 3 return {state.state: state.entity_id for state in states} - states_by_state = await recorder_mock.async_add_executor_job(_fetch_migrated_states) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + + await hass.async_stop() + await hass.async_block_till_done() + assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None assert states_by_state["two_1"] is None +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_null_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_states(): with session_scope(hass=hass) as session: @@ -783,13 +1164,24 @@ async def test_migrate_null_entity_ids( ), ) - await recorder_mock.async_add_executor_job(_insert_states) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EntityIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDMigration(None, None) - recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -815,31 +1207,47 @@ async def test_migrate_null_entity_ids( ) return result - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) - assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 - assert len(states_by_entity_id["sensor.one"]) == 2 - def _get_migration_id(): with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_entity_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + migration_changes = await instance.async_add_executor_job(_get_migration_id) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 + assert len(states_by_entity_id["sensor.one"]) == 2 + assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version ) +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_null_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" - await async_wait_recording_done(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] def _insert_events(): with session_scope(hass=hass) as session: @@ -866,13 +1274,24 @@ async def test_migrate_null_event_type_ids( ), ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventTypeIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await _async_wait_migration_done(hass) - # This is a threadsafe way to add a task to the recorder - migrator = migration.EventTypeIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _fetch_migrated_events(): with session_scope(hass=hass, read_only=True) as session: @@ -903,29 +1322,44 @@ async def test_migrate_null_event_type_ids( ) return result - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) - assert len(events_by_type["event_type_one"]) == 2 - assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 - def _get_migration_id(): with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) + # Run again with new schema, let migration run + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + migration_changes = await instance.async_add_executor_job(_get_migration_id) + + await hass.async_stop() + await hass.async_block_till_done() + + assert len(events_by_type["event_type_one"]) == 2 + assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version ) +@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_conversion_is_reentrant( hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration is reentrant.""" await async_wait_recording_done(hass) await async_attach_db_engine(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_year_ago = now - datetime.timedelta(days=365) six_months_ago = now - datetime.timedelta(days=180) @@ -1070,14 +1504,15 @@ async def test_stats_timestamp_conversion_is_reentrant( ] +@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_with_one_by_one( hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration with one by one.""" await async_wait_recording_done(hass) await async_attach_db_engine(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_year_ago = now - datetime.timedelta(days=365) six_months_ago = now - datetime.timedelta(days=180) @@ -1289,14 +1724,15 @@ async def test_stats_timestamp_with_one_by_one( ] +@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_with_one_by_one_removes_duplicates( hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration with one by one removes duplicates.""" await async_wait_recording_done(hass) await async_attach_db_engine(hass) - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_year_ago = now - datetime.timedelta(days=365) six_months_ago = now - datetime.timedelta(days=180) @@ -1483,3 +1919,159 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "sum": None, }, ] + + +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_stats_migrate_times( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test we can migrate times in the statistics tables.""" + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] + now = dt_util.utcnow() + now_timestamp = now.timestamp() + + statistics_kwargs = { + "created": now, + "mean": 0, + "metadata_id": 1, + "min": 0, + "max": 0, + "last_reset": now, + "start": now, + "state": 0, + "sum": 0, + } + mock_metadata = old_db_schema.StatisticMetaData( + has_mean=False, + has_sum=False, + name="Test", + source="sensor", + statistic_id="sensor.test", + unit_of_measurement="cats", + ) + number_of_migrations = 5 + + def _get_index_names(table): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes(table) + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.StatisticsMeta.from_meta(mock_metadata)) + with session_scope(hass=hass) as session: + session.add(old_db_schema.Statistics(**statistics_kwargs)) + session.add(old_db_schema.StatisticsShortTerm(**statistics_kwargs)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + statistics_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics" + ) + statistics_short_term_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics_short_term" + ) + statistics_index_names = {index["name"] for index in statistics_indexes} + statistics_short_term_index_names = { + index["name"] for index in statistics_short_term_indexes + } + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_statistics_statistic_id_start" in statistics_index_names + assert ( + "ix_statistics_short_term_statistic_id_start" + in statistics_short_term_index_names + ) + + # Test that the times are migrated during migration from schema 32 + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + def _get_test_data_from_db(): + with session_scope(hass=hass) as session: + statistics_result = list( + session.query(recorder.db_schema.Statistics) + .join( + recorder.db_schema.StatisticsMeta, + recorder.db_schema.Statistics.metadata_id + == recorder.db_schema.StatisticsMeta.id, + ) + .where( + recorder.db_schema.StatisticsMeta.statistic_id == "sensor.test" + ) + ) + statistics_short_term_result = list( + session.query(recorder.db_schema.StatisticsShortTerm) + .join( + recorder.db_schema.StatisticsMeta, + recorder.db_schema.StatisticsShortTerm.metadata_id + == recorder.db_schema.StatisticsMeta.id, + ) + .where( + recorder.db_schema.StatisticsMeta.statistic_id == "sensor.test" + ) + ) + session.expunge_all() + return statistics_result, statistics_short_term_result + + ( + statistics_result, + statistics_short_term_result, + ) = await instance.async_add_executor_job(_get_test_data_from_db) + + for results in (statistics_result, statistics_short_term_result): + assert len(results) == 1 + assert results[0].created is None + assert results[0].created_ts == now_timestamp + assert results[0].last_reset is None + assert results[0].last_reset_ts == now_timestamp + assert results[0].start is None + assert results[0].start_ts == now_timestamp + + statistics_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics" + ) + statistics_short_term_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics_short_term" + ) + statistics_index_names = {index["name"] for index in statistics_indexes} + statistics_short_term_index_names = { + index["name"] for index in statistics_short_term_indexes + } + + assert "ix_statistics_statistic_id_start" not in statistics_index_names + assert ( + "ix_statistics_short_term_statistic_id_start" + not in statistics_short_term_index_names + ) + + await hass.async_stop() diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index bdd881a3a7b..7a333b0a2f5 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -19,11 +19,7 @@ from homeassistant.components.recorder.util import ( from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant -from .common import ( - MockMigrationTask, - async_recorder_block_till_done, - async_wait_recording_done, -) +from .common import async_recorder_block_till_done, async_wait_recording_done from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -72,7 +68,7 @@ def _create_engine_test(*args, **kwargs): return engine -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migration_changes_prevent_trying_to_migrate_again( @@ -94,16 +90,14 @@ async def test_migration_changes_prevent_trying_to_migrate_again( # Start with db schema that needs migration (version 32) with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): async with ( @@ -173,4 +167,6 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() for task in tasks: - assert not isinstance(task, MigrationTask) + if not isinstance(task, MigrationTask): + continue + assert not isinstance(task.migrator, migration.StatesContextIDMigration) diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index 975d67a8e99..b2894883ff2 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -3,7 +3,6 @@ from datetime import datetime, timedelta from unittest.mock import PropertyMock -from freezegun import freeze_time import pytest from homeassistant.components.recorder.const import SupportedDialect @@ -15,15 +14,14 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( LazyState, - process_datetime_to_timestamp, process_timestamp, process_timestamp_to_utc_isoformat, ) from homeassistant.const import EVENT_STATE_CHANGED import homeassistant.core as ha -from homeassistant.core import HomeAssistant from homeassistant.exceptions import InvalidEntityFormatError from homeassistant.util import dt as dt_util +from homeassistant.util.json import json_loads def test_from_event_to_db_event() -> None: @@ -44,6 +42,18 @@ def test_from_event_to_db_event() -> None: assert event.as_dict() == db_event.to_native().as_dict() +def test_from_event_to_db_event_with_null() -> None: + """Test converting event to EventData with a null with PostgreSQL.""" + event = ha.Event( + "test_event", + {"some_data": "withnull\0terminator"}, + ) + dialect = SupportedDialect.POSTGRESQL + event_data = EventData.shared_data_bytes_from_event(event, dialect) + decoded = json_loads(event_data) + assert decoded["some_data"] == "withnull" + + def test_from_event_to_db_state() -> None: """Test converting event to db state.""" state = ha.State( @@ -81,6 +91,21 @@ def test_from_event_to_db_state_attributes() -> None: assert db_attrs.to_native() == attrs +def test_from_event_to_db_state_attributes_with_null() -> None: + """Test converting a state to StateAttributes with a null with PostgreSQL.""" + attrs = {"this_attr": "withnull\0terminator"} + state = ha.State("sensor.temperature", "18", attrs) + event = ha.Event( + EVENT_STATE_CHANGED, + {"entity_id": "sensor.temperature", "old_state": None, "new_state": state}, + context=state.context, + ) + dialect = SupportedDialect.POSTGRESQL + shared_attrs = StateAttributes.shared_attrs_bytes_from_event(event, dialect) + decoded = json_loads(shared_attrs) + assert decoded["this_attr"] == "withnull" + + def test_repr() -> None: """Test converting event to db state repr.""" attrs = {"this_attr": True} @@ -300,6 +325,7 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( state="off", attributes='{"shared":true}', last_updated_ts=now.timestamp(), + last_reported_ts=now.timestamp(), last_changed_ts=(now - timedelta(seconds=60)).timestamp(), ) lstate = LazyState( @@ -314,6 +340,7 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( } assert lstate.last_updated.timestamp() == row.last_updated_ts assert lstate.last_changed.timestamp() == row.last_changed_ts + assert lstate.last_reported.timestamp() == row.last_updated_ts assert lstate.as_dict() == { "attributes": {"shared": True}, "entity_id": "sensor.valid", @@ -321,6 +348,9 @@ async def test_lazy_state_handles_different_last_updated_and_last_changed( "last_updated": "2021-06-12T03:04:01.000323+00:00", "state": "off", } + assert lstate.last_changed_timestamp == row.last_changed_ts + assert lstate.last_updated_timestamp == row.last_updated_ts + assert lstate.last_reported_timestamp == row.last_updated_ts async def test_lazy_state_handles_same_last_updated_and_last_changed( @@ -334,6 +364,7 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( attributes='{"shared":true}', last_updated_ts=now.timestamp(), last_changed_ts=now.timestamp(), + last_reported_ts=None, ) lstate = LazyState( row, {}, None, row.entity_id, row.state, row.last_updated_ts, False @@ -347,6 +378,7 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( } assert lstate.last_updated.timestamp() == row.last_updated_ts assert lstate.last_changed.timestamp() == row.last_changed_ts + assert lstate.last_reported.timestamp() == row.last_updated_ts assert lstate.as_dict() == { "attributes": {"shared": True}, "entity_id": "sensor.valid", @@ -354,75 +386,37 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( "last_updated": "2021-06-12T03:04:01.000323+00:00", "state": "off", } + assert lstate.last_changed_timestamp == row.last_changed_ts + assert lstate.last_updated_timestamp == row.last_updated_ts + assert lstate.last_reported_timestamp == row.last_updated_ts -@pytest.mark.parametrize( - "time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"] -) -async def test_process_datetime_to_timestamp(time_zone, hass: HomeAssistant) -> None: - """Test we can handle processing database datatimes to timestamps.""" - await hass.config.async_set_time_zone(time_zone) - utc_now = dt_util.utcnow() - assert process_datetime_to_timestamp(utc_now) == utc_now.timestamp() - now = dt_util.now() - assert process_datetime_to_timestamp(now) == now.timestamp() - - -@pytest.mark.parametrize( - "time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"] -) -async def test_process_datetime_to_timestamp_freeze_time( - time_zone, hass: HomeAssistant +async def test_lazy_state_handles_different_last_reported( + caplog: pytest.LogCaptureFixture, ) -> None: - """Test we can handle processing database datatimes to timestamps. - - This test freezes time to make sure everything matches. - """ - await hass.config.async_set_time_zone(time_zone) - utc_now = dt_util.utcnow() - with freeze_time(utc_now): - epoch = utc_now.timestamp() - assert process_datetime_to_timestamp(dt_util.utcnow()) == epoch - now = dt_util.now() - assert process_datetime_to_timestamp(now) == epoch - - -@pytest.mark.parametrize( - "time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"] -) -async def test_process_datetime_to_timestamp_mirrors_utc_isoformat_behavior( - time_zone, hass: HomeAssistant -) -> None: - """Test process_datetime_to_timestamp mirrors process_timestamp_to_utc_isoformat.""" - await hass.config.async_set_time_zone(time_zone) - datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt_util.UTC) - datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0) - est = dt_util.get_time_zone("US/Eastern") - datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) - est = dt_util.get_time_zone("US/Eastern") - datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) - nst = dt_util.get_time_zone("Canada/Newfoundland") - datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst) - hst = dt_util.get_time_zone("US/Hawaii") - datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst) - - assert ( - process_datetime_to_timestamp(datetime_with_tzinfo) - == dt_util.parse_datetime("2016-07-09T11:00:00+00:00").timestamp() + """Test that the LazyState handles last_reported different from last_updated.""" + now = datetime(2021, 6, 12, 3, 4, 1, 323, tzinfo=dt_util.UTC) + row = PropertyMock( + entity_id="sensor.valid", + state="off", + attributes='{"shared":true}', + last_updated_ts=(now - timedelta(seconds=60)).timestamp(), + last_reported_ts=now.timestamp(), + last_changed_ts=(now - timedelta(seconds=60)).timestamp(), ) - assert ( - process_datetime_to_timestamp(datetime_without_tzinfo) - == dt_util.parse_datetime("2016-07-09T11:00:00+00:00").timestamp() - ) - assert ( - process_datetime_to_timestamp(datetime_est_timezone) - == dt_util.parse_datetime("2016-07-09T15:00:00+00:00").timestamp() - ) - assert ( - process_datetime_to_timestamp(datetime_nst_timezone) - == dt_util.parse_datetime("2016-07-09T13:30:00+00:00").timestamp() - ) - assert ( - process_datetime_to_timestamp(datetime_hst_timezone) - == dt_util.parse_datetime("2016-07-09T21:00:00+00:00").timestamp() + lstate = LazyState( + row, {}, None, row.entity_id, row.state, row.last_updated_ts, False ) + assert lstate.as_dict() == { + "attributes": {"shared": True}, + "entity_id": "sensor.valid", + "last_changed": "2021-06-12T03:03:01.000323+00:00", + "last_updated": "2021-06-12T03:03:01.000323+00:00", + "state": "off", + } + assert lstate.last_updated.timestamp() == row.last_updated_ts + assert lstate.last_changed.timestamp() == row.last_changed_ts + assert lstate.last_reported.timestamp() == row.last_reported_ts + assert lstate.last_changed_timestamp == row.last_changed_ts + assert lstate.last_updated_timestamp == row.last_updated_ts + assert lstate.last_reported_timestamp == row.last_reported_ts diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 245acf4603d..ea764b14401 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -352,6 +352,8 @@ async def test_purge_old_recorder_runs( with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 + # Make sure we have a run that is not closed + assert sum(run.end is None for run in recorder_runs) == 1 purge_before = dt_util.utcnow() @@ -376,7 +378,9 @@ async def test_purge_old_recorder_runs( with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) - assert recorder_runs.count() == 1 + assert recorder_runs.count() == 3 + # Make sure we did not purge the unclosed run + assert sum(run.end is None for run in recorder_runs) == 1 async def test_purge_old_statistics_runs( @@ -543,7 +547,7 @@ async def test_purge_edge_case( event_type="EVENT_TEST_PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -551,8 +555,8 @@ async def test_purge_edge_case( entity_id="test.recorder2", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=1001, attributes_id=1002, ) @@ -614,7 +618,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_keep), + time_fired_ts=timestamp_keep.timestamp(), ) ) session.add( @@ -622,8 +626,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_keep), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_keep), + last_changed_ts=timestamp_keep.timestamp(), + last_updated_ts=timestamp_keep.timestamp(), event_id=1000, attributes_id=1000, ) @@ -642,7 +646,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_purge), + time_fired_ts=timestamp_purge.timestamp(), ) ) session.add( @@ -650,8 +654,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="purge", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_purge), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_purge), + last_changed_ts=timestamp_purge.timestamp(), + last_updated_ts=timestamp_purge.timestamp(), event_id=1000 + row, attributes_id=1000 + row, ) @@ -800,8 +804,8 @@ async def test_purge_filtered_states( entity_id="sensor.excluded", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), ) ) # Add states and state_changed events that should be keeped @@ -826,8 +830,8 @@ async def test_purge_filtered_states( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=1, state_attributes=state_attrs, ) @@ -836,8 +840,8 @@ async def test_purge_filtered_states( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=2, state_attributes=state_attrs, ) @@ -845,8 +849,8 @@ async def test_purge_filtered_states( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=62, # keep state_attributes=state_attrs, ) @@ -858,7 +862,7 @@ async def test_purge_filtered_states( event_type="EVENT_KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) convert_pending_states_to_meta(recorder_mock, session) @@ -964,6 +968,171 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 0 +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] +) +async def test_purge_filtered_states_multiple_rounds( + hass: HomeAssistant, + recorder_mock: Recorder, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test filtered states are purged when there are multiple rounds to purge.""" + assert recorder_mock.entity_filter("sensor.excluded") is False + + def _add_db_entries(hass: HomeAssistant) -> None: + with session_scope(hass=hass) as session: + # Add states and state_changed events that should be purged + for days in range(1, 4): + timestamp = dt_util.utcnow() - timedelta(days=days) + for event_id in range(1000, 1020): + _add_state_with_state_attributes( + session, + "sensor.excluded", + "purgeme", + timestamp, + event_id * days, + ) + # Add state **without** state_changed event that should be purged + timestamp = dt_util.utcnow() - timedelta(days=1) + session.add( + States( + entity_id="sensor.excluded", + state="purgeme", + attributes="{}", + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), + ) + ) + # Add states and state_changed events that should be keeped + timestamp = dt_util.utcnow() - timedelta(days=2) + for event_id in range(200, 210): + _add_state_with_state_attributes( + session, + "sensor.keep", + "keep", + timestamp, + event_id, + ) + # Add states with linked old_state_ids that need to be handled + timestamp = dt_util.utcnow() - timedelta(days=0) + state_attrs = StateAttributes( + hash=0, + shared_attrs=json.dumps( + {"sensor.linked_old_state_id": "sensor.linked_old_state_id"} + ), + ) + state_1 = States( + entity_id="sensor.linked_old_state_id", + state="keep", + attributes="{}", + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), + old_state_id=1, + state_attributes=state_attrs, + ) + timestamp = dt_util.utcnow() - timedelta(days=4) + state_2 = States( + entity_id="sensor.linked_old_state_id", + state="keep", + attributes="{}", + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), + old_state_id=2, + state_attributes=state_attrs, + ) + state_3 = States( + entity_id="sensor.linked_old_state_id", + state="keep", + attributes="{}", + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), + old_state_id=62, # keep + state_attributes=state_attrs, + ) + session.add_all((state_attrs, state_1, state_2, state_3)) + # Add event that should be keeped + session.add( + Events( + event_id=100, + event_type="EVENT_KEEP", + event_data="{}", + origin="LOCAL", + time_fired_ts=timestamp.timestamp(), + ) + ) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) + + service_data = {"keep_days": 10, "apply_filter": True} + _add_db_entries(hass) + + with session_scope(hass=hass) as session: + states = session.query(States) + assert states.count() == 74 + events_keep = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(("EVENT_KEEP",))) + ) + assert events_keep.count() == 1 + + await hass.services.async_call( + RECORDER_DOMAIN, SERVICE_PURGE, service_data, blocking=True + ) + + for _ in range(2): + # Make sure the second round of purging runs + await async_recorder_block_till_done(hass) + await async_wait_purge_done(hass) + + assert "Cleanup filtered data hasn't fully completed yet" in caplog.text + caplog.clear() + + with session_scope(hass=hass) as session: + states = session.query(States) + assert states.count() == 13 + events_keep = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(("EVENT_KEEP",))) + ) + assert events_keep.count() == 1 + + states_sensor_excluded = ( + session.query(States) + .outerjoin(StatesMeta, States.metadata_id == StatesMeta.metadata_id) + .filter(StatesMeta.entity_id == "sensor.excluded") + ) + assert states_sensor_excluded.count() == 0 + query = session.query(States) + + assert query.filter(States.state_id == 72).first().old_state_id is None + assert query.filter(States.state_id == 72).first().attributes_id == 71 + assert query.filter(States.state_id == 73).first().old_state_id is None + assert query.filter(States.state_id == 73).first().attributes_id == 71 + + final_keep_state = session.query(States).filter(States.state_id == 74).first() + assert final_keep_state.old_state_id == 62 # should have been kept + assert final_keep_state.attributes_id == 71 + + assert session.query(StateAttributes).count() == 11 + + # Do it again to make sure nothing changes + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await async_recorder_block_till_done(hass) + await async_wait_purge_done(hass) + + with session_scope(hass=hass) as session: + final_keep_state = session.query(States).filter(States.state_id == 74).first() + assert final_keep_state.old_state_id == 62 # should have been kept + assert final_keep_state.attributes_id == 71 + + assert session.query(StateAttributes).count() == 11 + + for _ in range(2): + # Make sure the second round of purging runs + await async_recorder_block_till_done(hass) + await async_wait_purge_done(hass) + + assert "Cleanup filtered data hasn't fully completed yet" not in caplog.text + + @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) @pytest.mark.parametrize( "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] @@ -1042,8 +1211,8 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( entity_id="sensor.old_format", state=STATE_ON, attributes=json.dumps({"old": "not_using_state_attributes"}), - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=event_id, state_attributes=None, ) @@ -1054,7 +1223,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( event_type=EVENT_STATE_CHANGED, event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -1063,7 +1232,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( event_type=EVENT_THEMES_UPDATED, event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) convert_pending_states_to_meta(recorder_mock, session) @@ -1118,7 +1287,7 @@ async def test_purge_filtered_events( event_type="EVENT_PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) @@ -1225,7 +1394,7 @@ async def test_purge_filtered_events_state_changed( event_type="EVENT_KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) # Add states with linked old_state_ids that need to be handled @@ -1234,8 +1403,8 @@ async def test_purge_filtered_events_state_changed( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=1, ) timestamp = dt_util.utcnow() - timedelta(days=4) @@ -1243,16 +1412,16 @@ async def test_purge_filtered_events_state_changed( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=2, ) state_3 = States( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), old_state_id=62, # keep ) session.add_all((state_1, state_2, state_3)) @@ -1262,7 +1431,7 @@ async def test_purge_filtered_events_state_changed( event_type="excluded_event", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -1270,8 +1439,8 @@ async def test_purge_filtered_events_state_changed( entity_id="sensor.old_format", state="remove", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), ) ) convert_pending_events_to_event_types(recorder_mock, session) @@ -1637,8 +1806,8 @@ def _add_state_without_event_linkage( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=None, state_attributes=state_attrs, ) @@ -1662,8 +1831,8 @@ def _add_state_with_state_attributes( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=event_id, state_attributes=state_attrs, ) @@ -1744,8 +1913,6 @@ async def test_purge_old_events_purges_the_event_type_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events purges event type ids.""" - assert recorder_mock.event_type_manager.active is True - utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) eleven_days_ago = utcnow - timedelta(days=11) @@ -1785,7 +1952,7 @@ async def test_purge_old_events_purges_the_event_type_ids( Events( event_type=None, event_type_id=event_type.event_type_id, - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) return recorder_mock.event_type_manager.get_many( @@ -1915,7 +2082,7 @@ async def test_purge_old_states_purges_the_state_metadata_ids( States( metadata_id=metadata_id, state="any", - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_updated_ts=timestamp.timestamp(), ) ) return recorder_mock.states_meta_manager.get_many( diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 0754b2e911c..2bd1e7fd7f7 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -347,7 +347,7 @@ async def test_purge_old_recorder_runs( with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) - assert recorder_runs.count() == 1 + assert recorder_runs.count() == 3 async def test_purge_old_statistics_runs( @@ -509,7 +509,7 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: event_type="EVENT_TEST_PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) session.add( @@ -517,8 +517,8 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: entity_id="test.recorder2", state="purgeme", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=1001, attributes_id=1002, ) @@ -576,7 +576,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="KEEP", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_keep), + time_fired_ts=timestamp_keep.timestamp(), ) ) session.add( @@ -584,8 +584,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="keep", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_keep), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_keep), + last_changed_ts=timestamp_keep.timestamp(), + last_updated_ts=timestamp_keep.timestamp(), event_id=1000, attributes_id=1000, ) @@ -604,7 +604,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - event_type="PURGE", event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp_purge), + time_fired_ts=timestamp_purge.timestamp(), ) ) session.add( @@ -612,8 +612,8 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - entity_id="test.cutoff", state="purge", attributes="{}", - last_changed_ts=dt_util.utc_to_timestamp(timestamp_purge), - last_updated_ts=dt_util.utc_to_timestamp(timestamp_purge), + last_changed_ts=timestamp_purge.timestamp(), + last_updated_ts=timestamp_purge.timestamp(), event_id=1000 + row, attributes_id=1000 + row, ) @@ -771,7 +771,7 @@ async def _add_test_events(hass: HomeAssistant, iterations: int = 1): event_type=event_type, event_data=json.dumps(event_data), origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) @@ -808,7 +808,7 @@ async def _add_events_with_event_data(hass: HomeAssistant, iterations: int = 1): Events( event_type=event_type, origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), event_data_rel=event_data, ) ) @@ -910,8 +910,8 @@ def _add_state_without_event_linkage( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=None, state_attributes=state_attrs, ) @@ -935,8 +935,8 @@ def _add_state_and_state_changed_event( entity_id=entity_id, state=state, attributes=None, - last_changed_ts=dt_util.utc_to_timestamp(timestamp), - last_updated_ts=dt_util.utc_to_timestamp(timestamp), + last_changed_ts=timestamp.timestamp(), + last_updated_ts=timestamp.timestamp(), event_id=event_id, state_attributes=state_attrs, ) @@ -947,7 +947,7 @@ def _add_state_and_state_changed_event( event_type=EVENT_STATE_CHANGED, event_data="{}", origin="LOCAL", - time_fired_ts=dt_util.utc_to_timestamp(timestamp), + time_fired_ts=timestamp.timestamp(), ) ) diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 5cbb29afc91..6b1e1a655db 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -337,12 +337,12 @@ def mock_from_stats(): counter = 0 real_from_stats = StatisticsShortTerm.from_stats - def from_stats(metadata_id, stats): + def from_stats(metadata_id, stats, now_timestamp): nonlocal counter if counter == 0 and metadata_id == 2: counter += 1 return None - return real_from_stats(metadata_id, stats) + return real_from_stats(metadata_id, stats, now_timestamp) with patch( "homeassistant.components.recorder.statistics.StatisticsShortTerm.from_stats", @@ -2512,6 +2512,7 @@ async def test_recorder_platform_with_statistics( recorder_platform = Mock( compile_statistics=Mock(wraps=_mock_compile_statistics), list_statistic_ids=Mock(wraps=_mock_list_statistic_ids), + update_statistics_issues=Mock(), validate_statistics=Mock(wraps=_mock_validate_statistics), ) @@ -2523,16 +2524,20 @@ async def test_recorder_platform_with_statistics( recorder_platform.compile_statistics.assert_not_called() recorder_platform.list_statistic_ids.assert_not_called() + recorder_platform.update_statistics_issues.assert_not_called() recorder_platform.validate_statistics.assert_not_called() - # Test compile statistics - zero = get_start_time(dt_util.utcnow()) + # Test compile statistics + update statistics issues + # Issues are updated hourly when minutes = 50, trigger one hour later to make + # sure statistics is not suppressed by an existing row in StatisticsRuns + zero = get_start_time(dt_util.utcnow()).replace(minute=50) + timedelta(hours=1) do_adhoc_statistics(hass, start=zero) await async_wait_recording_done(hass) recorder_platform.compile_statistics.assert_called_once_with( hass, ANY, zero, zero + timedelta(minutes=5) ) + recorder_platform.update_statistics_issues.assert_called_once_with(hass, ANY) recorder_platform.list_statistic_ids.assert_not_called() recorder_platform.validate_statistics.assert_not_called() @@ -2542,6 +2547,7 @@ async def test_recorder_platform_with_statistics( recorder_platform.list_statistic_ids.assert_called_once_with( hass, statistic_ids=None, statistic_type=None ) + recorder_platform.update_statistics_issues.assert_called_once() recorder_platform.validate_statistics.assert_not_called() # Test validate statistics @@ -2551,6 +2557,7 @@ async def test_recorder_platform_with_statistics( ) recorder_platform.compile_statistics.assert_called_once() recorder_platform.list_statistic_ids.assert_called_once() + recorder_platform.update_statistics_issues.assert_called_once() recorder_platform.validate_statistics.assert_called_once_with(hass) @@ -2575,6 +2582,7 @@ async def test_recorder_platform_without_statistics( [ ("compile_statistics",), ("list_statistic_ids",), + ("update_statistics_issues",), ("validate_statistics",), ], ) @@ -2601,6 +2609,7 @@ async def test_recorder_platform_with_partial_statistics_support( mock_impl = { "compile_statistics": _mock_compile_statistics, "list_statistic_ids": _mock_list_statistic_ids, + "update_statistics_issues": None, "validate_statistics": _mock_validate_statistics, } @@ -2620,8 +2629,10 @@ async def test_recorder_platform_with_partial_statistics_support( for meth in supported_methods: getattr(recorder_platform, meth).assert_not_called() - # Test compile statistics - zero = get_start_time(dt_util.utcnow()) + # Test compile statistics + update statistics issues + # Issues are updated hourly when minutes = 50, trigger one hour later to make + # sure statistics is not suppressed by an existing row in StatisticsRuns + zero = get_start_time(dt_util.utcnow()).replace(minute=50) + timedelta(hours=1) do_adhoc_statistics(hass, start=zero) await async_wait_recording_done(hass) diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index 53c59635e8c..1f9be0cabee 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -168,6 +168,9 @@ async def test_delete_duplicates( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -352,6 +355,9 @@ async def test_delete_duplicates_many( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -515,6 +521,9 @@ async def test_delete_duplicates_non_identical( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -638,6 +647,9 @@ async def test_delete_duplicates_short_term( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index d850778d214..99bd5083489 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1,12 +1,15 @@ """Test util methods.""" +from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import UTC, datetime, timedelta import os from pathlib import Path import sqlite3 import threading +from typing import Any from unittest.mock import MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from sqlalchemy import lambda_stmt, text from sqlalchemy.engine.result import ChunkedIteratorResult @@ -16,7 +19,11 @@ from sqlalchemy.sql.lambdas import StatementLambdaElement from homeassistant.components import recorder from homeassistant.components.recorder import Recorder, util -from homeassistant.components.recorder.const import DOMAIN, SQLITE_URL_PREFIX +from homeassistant.components.recorder.const import ( + DOMAIN, + SQLITE_URL_PREFIX, + SupportedDialect, +) from homeassistant.components.recorder.db_schema import RecorderRuns from homeassistant.components.recorder.history.modern import ( _get_single_entity_start_time_stmt, @@ -27,10 +34,14 @@ from homeassistant.components.recorder.models import ( ) from homeassistant.components.recorder.util import ( MIN_VERSION_SQLITE, + RETRYABLE_MYSQL_ERRORS, UPCOMING_MIN_VERSION_SQLITE, + database_job_retry_wrapper, end_incomplete_runs, is_second_sunday, resolve_period, + retryable_database_job, + retryable_database_job_method, session_scope, ) from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -1042,55 +1053,82 @@ async def test_execute_stmt_lambda_element( assert rows == ["mock_row"] -@pytest.mark.freeze_time(datetime(2022, 10, 21, 7, 25, tzinfo=UTC)) -async def test_resolve_period(hass: HomeAssistant) -> None: - """Test statistic_during_period.""" +@pytest.mark.parametrize( + ("start_time", "periods"), + [ + ( + # Test 00:25 local time, during DST + datetime(2022, 10, 21, 7, 25, 50, 123, tzinfo=UTC), + { + ("hour", 0): ("2022-10-21T07:00:00", "2022-10-21T08:00:00"), + ("hour", -1): ("2022-10-21T06:00:00", "2022-10-21T07:00:00"), + ("hour", 1): ("2022-10-21T08:00:00", "2022-10-21T09:00:00"), + ("day", 0): ("2022-10-21T07:00:00", "2022-10-22T07:00:00"), + ("day", -1): ("2022-10-20T07:00:00", "2022-10-21T07:00:00"), + ("day", 1): ("2022-10-22T07:00:00", "2022-10-23T07:00:00"), + ("week", 0): ("2022-10-17T07:00:00", "2022-10-24T07:00:00"), + ("week", -1): ("2022-10-10T07:00:00", "2022-10-17T07:00:00"), + ("week", 1): ("2022-10-24T07:00:00", "2022-10-31T07:00:00"), + ("month", 0): ("2022-10-01T07:00:00", "2022-11-01T07:00:00"), + ("month", -1): ("2022-09-01T07:00:00", "2022-10-01T07:00:00"), + ("month", -12): ("2021-10-01T07:00:00", "2021-11-01T07:00:00"), + ("month", 1): ("2022-11-01T07:00:00", "2022-12-01T08:00:00"), + ("month", 2): ("2022-12-01T08:00:00", "2023-01-01T08:00:00"), + ("month", 3): ("2023-01-01T08:00:00", "2023-02-01T08:00:00"), + ("month", 12): ("2023-10-01T07:00:00", "2023-11-01T07:00:00"), + ("month", 13): ("2023-11-01T07:00:00", "2023-12-01T08:00:00"), + ("month", 14): ("2023-12-01T08:00:00", "2024-01-01T08:00:00"), + ("year", 0): ("2022-01-01T08:00:00", "2023-01-01T08:00:00"), + ("year", -1): ("2021-01-01T08:00:00", "2022-01-01T08:00:00"), + ("year", 1): ("2023-01-01T08:00:00", "2024-01-01T08:00:00"), + }, + ), + ( + # Test 00:25 local time, standard time, February 28th a leap year + datetime(2024, 2, 28, 8, 25, 50, 123, tzinfo=UTC), + { + ("hour", 0): ("2024-02-28T08:00:00", "2024-02-28T09:00:00"), + ("hour", -1): ("2024-02-28T07:00:00", "2024-02-28T08:00:00"), + ("hour", 1): ("2024-02-28T09:00:00", "2024-02-28T10:00:00"), + ("day", 0): ("2024-02-28T08:00:00", "2024-02-29T08:00:00"), + ("day", -1): ("2024-02-27T08:00:00", "2024-02-28T08:00:00"), + ("day", 1): ("2024-02-29T08:00:00", "2024-03-01T08:00:00"), + ("week", 0): ("2024-02-26T08:00:00", "2024-03-04T08:00:00"), + ("week", -1): ("2024-02-19T08:00:00", "2024-02-26T08:00:00"), + ("week", 1): ("2024-03-04T08:00:00", "2024-03-11T07:00:00"), + ("month", 0): ("2024-02-01T08:00:00", "2024-03-01T08:00:00"), + ("month", -1): ("2024-01-01T08:00:00", "2024-02-01T08:00:00"), + ("month", -2): ("2023-12-01T08:00:00", "2024-01-01T08:00:00"), + ("month", -3): ("2023-11-01T07:00:00", "2023-12-01T08:00:00"), + ("month", -12): ("2023-02-01T08:00:00", "2023-03-01T08:00:00"), + ("month", -13): ("2023-01-01T08:00:00", "2023-02-01T08:00:00"), + ("month", -14): ("2022-12-01T08:00:00", "2023-01-01T08:00:00"), + ("month", 1): ("2024-03-01T08:00:00", "2024-04-01T07:00:00"), + ("year", 0): ("2024-01-01T08:00:00", "2025-01-01T08:00:00"), + ("year", -1): ("2023-01-01T08:00:00", "2024-01-01T08:00:00"), + ("year", 1): ("2025-01-01T08:00:00", "2026-01-01T08:00:00"), + }, + ), + ], +) +async def test_resolve_period( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + start_time: datetime, + periods: dict[tuple[str, int], tuple[str, str]], +) -> None: + """Test resolve_period.""" + assert hass.config.time_zone == "US/Pacific" + freezer.move_to(start_time) now = dt_util.utcnow() - start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T08:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T08:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "hour", "offset": -1}}) - assert start_t.isoformat() == "2022-10-21T06:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "day"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-22T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "day", "offset": -1}}) - assert start_t.isoformat() == "2022-10-20T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "week"}}) - assert start_t.isoformat() == "2022-10-17T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-24T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "week", "offset": -1}}) - assert start_t.isoformat() == "2022-10-10T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-17T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "month"}}) - assert start_t.isoformat() == "2022-10-01T07:00:00+00:00" - assert end_t.isoformat() == "2022-11-01T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "month", "offset": -1}}) - assert start_t.isoformat() == "2022-09-01T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-01T07:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "year"}}) - assert start_t.isoformat() == "2022-01-01T08:00:00+00:00" - assert end_t.isoformat() == "2023-01-01T08:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "year", "offset": -1}}) - assert start_t.isoformat() == "2021-01-01T08:00:00+00:00" - assert end_t.isoformat() == "2022-01-01T08:00:00+00:00" + for period_def, expected_period in periods.items(): + start_t, end_t = resolve_period( + {"calendar": {"period": period_def[0], "offset": period_def[1]}} + ) + assert start_t.isoformat() == f"{expected_period[0]}+00:00" + assert end_t.isoformat() == f"{expected_period[1]}+00:00" # Fixed period assert resolve_period({}) == (None, None) @@ -1117,3 +1155,129 @@ async def test_resolve_period(hass: HomeAssistant) -> None: } } ) == (now - timedelta(hours=1, minutes=25), now - timedelta(minutes=25)) + + +NonRetryable = OperationalError(None, None, BaseException()) +Retryable = OperationalError(None, None, BaseException(RETRYABLE_MYSQL_ERRORS[0], "")) + + +@pytest.mark.parametrize( + ("side_effect", "dialect", "retval", "expected_result", "num_calls"), + [ + (None, SupportedDialect.MYSQL, None, does_not_raise(), 1), + (ValueError, SupportedDialect.MYSQL, None, pytest.raises(ValueError), 1), + ( + NonRetryable, + SupportedDialect.MYSQL, + None, + pytest.raises(OperationalError), + 1, + ), + (Retryable, SupportedDialect.MYSQL, None, pytest.raises(OperationalError), 5), + ( + NonRetryable, + SupportedDialect.SQLITE, + None, + pytest.raises(OperationalError), + 1, + ), + (Retryable, SupportedDialect.SQLITE, None, pytest.raises(OperationalError), 1), + ], +) +def test_database_job_retry_wrapper( + side_effect: Any, + dialect: str, + retval: Any, + expected_result: AbstractContextManager, + num_calls: int, +) -> None: + """Test database_job_retry_wrapper.""" + + instance = Mock() + instance.db_retry_wait = 0 + instance.engine.dialect.name = dialect + mock_job = Mock(side_effect=side_effect) + + @database_job_retry_wrapper("test", 5) + def job(instance, *args, **kwargs) -> None: + mock_job() + return retval + + with expected_result: + assert job(instance) == retval + + assert len(mock_job.mock_calls) == num_calls + + +@pytest.mark.parametrize( + ("side_effect", "dialect", "retval", "expected_result"), + [ + (None, SupportedDialect.MYSQL, False, does_not_raise()), + (None, SupportedDialect.MYSQL, True, does_not_raise()), + (ValueError, SupportedDialect.MYSQL, False, pytest.raises(ValueError)), + (NonRetryable, SupportedDialect.MYSQL, True, does_not_raise()), + (Retryable, SupportedDialect.MYSQL, False, does_not_raise()), + (NonRetryable, SupportedDialect.SQLITE, True, does_not_raise()), + (Retryable, SupportedDialect.SQLITE, True, does_not_raise()), + ], +) +def test_retryable_database_job( + side_effect: Any, + retval: bool, + expected_result: AbstractContextManager, + dialect: str, +) -> None: + """Test retryable_database_job.""" + + instance = Mock() + instance.db_retry_wait = 0 + instance.engine.dialect.name = dialect + mock_job = Mock(side_effect=side_effect) + + @retryable_database_job(description="test") + def job(instance, *args, **kwargs) -> bool: + mock_job() + return retval + + with expected_result: + assert job(instance) == retval + + assert len(mock_job.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "dialect", "retval", "expected_result"), + [ + (None, SupportedDialect.MYSQL, False, does_not_raise()), + (None, SupportedDialect.MYSQL, True, does_not_raise()), + (ValueError, SupportedDialect.MYSQL, False, pytest.raises(ValueError)), + (NonRetryable, SupportedDialect.MYSQL, True, does_not_raise()), + (Retryable, SupportedDialect.MYSQL, False, does_not_raise()), + (NonRetryable, SupportedDialect.SQLITE, True, does_not_raise()), + (Retryable, SupportedDialect.SQLITE, True, does_not_raise()), + ], +) +def test_retryable_database_job_method( + side_effect: Any, + retval: bool, + expected_result: AbstractContextManager, + dialect: str, +) -> None: + """Test retryable_database_job_method.""" + + instance = Mock() + instance.db_retry_wait = 0 + instance.engine.dialect.name = dialect + mock_job = Mock(side_effect=side_effect) + + class Test: + @retryable_database_job_method(description="test") + def job(self, instance, *args, **kwargs) -> bool: + mock_job() + return retval + + test = Test() + with expected_result: + assert test.job(instance) == retval + + assert len(mock_job.mock_calls) == 1 diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 1006a03f4ec..d59486b61f0 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -1,5 +1,6 @@ """The tests for recorder platform migrating data from v30.""" +from collections.abc import Callable from datetime import timedelta import importlib import sys @@ -25,32 +26,42 @@ from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_30 = "tests.components.recorder.db_schema_30" +SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" -def _create_engine_test(*args, **kwargs): +def _create_engine_test(schema_module: str) -> Callable: """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - engine = create_engine(*args, **kwargs) - old_db_schema.Base.metadata.create_all(engine) - with Session(engine) as session: - session.add( - recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) - ) - session.add( - recorder.db_schema.SchemaChanges( - schema_version=old_db_schema.SCHEMA_VERSION + + def _create_engine_test(*args, **kwargs): + """Test version of create_engine that initializes with old schema. + + This simulates an existing db with the old schema. + """ + importlib.import_module(schema_module) + old_db_schema = sys.modules[schema_module] + engine = create_engine(*args, **kwargs) + old_db_schema.Base.metadata.create_all(engine) + with Session(engine) as session: + session.add( + recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) ) - ) - session.commit() - return engine + session.add( + recorder.db_schema.SchemaChanges( + schema_version=old_db_schema.SCHEMA_VERSION + ) + ) + session.commit() + return engine + + return _create_engine_test -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @@ -59,9 +70,9 @@ async def test_migrate_times( async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, ) -> None: - """Test we can migrate times.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + """Test we can migrate times in the events and states tables.""" + importlib.import_module(SCHEMA_MODULE_30) + old_db_schema = sys.modules[SCHEMA_MODULE_30] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) now_timestamp = now.timestamp() @@ -99,20 +110,19 @@ async def test_migrate_times( with ( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(migration.EventsContextIDMigration, "migrate_data"), patch.object(migration.StatesContextIDMigration, "migrate_data"), patch.object(migration.EventTypeIDMigration, "migrate_data"), patch.object(migration.EntityIDMigration, "migrate_data"), + patch.object(migration.EventIDPostMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), ): async with ( async_test_home_assistant() as hass, @@ -182,9 +192,12 @@ async def test_migrate_times( assert len(events_result) == 1 assert events_result[0].time_fired_ts == now_timestamp + assert events_result[0].time_fired is None assert len(states_result) == 1 assert states_result[0].last_changed_ts == one_second_past_timestamp assert states_result[0].last_updated_ts == now_timestamp + assert states_result[0].last_changed is None + assert states_result[0].last_updated is None def _get_events_index_names(): with session_scope(hass=hass) as session: @@ -208,6 +221,7 @@ async def test_migrate_times( await hass.async_stop() +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_entity_id_post_migration( @@ -216,8 +230,8 @@ async def test_migrate_can_resume_entity_id_post_migration( recorder_db_url: str, ) -> None: """Test we resume the entity id post migration after a restart.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -251,19 +265,16 @@ async def test_migrate_can_resume_entity_id_post_migration( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), ): async with ( async_test_home_assistant() as hass, @@ -314,6 +325,7 @@ async def test_migrate_can_resume_entity_id_post_migration( await hass.async_stop() +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage @@ -327,8 +339,8 @@ async def test_migrate_can_resume_ix_states_event_id_removed( This case tests the migration still happens if ix_states_event_id is removed from the states table. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -373,19 +385,16 @@ async def test_migrate_can_resume_ix_states_event_id_removed( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), ): async with ( async_test_home_assistant() as hass, @@ -463,8 +472,8 @@ async def test_out_of_disk_space_while_rebuild_states_table( This case tests the migration still happens if ix_states_event_id is removed from the states table. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -509,19 +518,16 @@ async def test_out_of_disk_space_while_rebuild_states_table( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), ): async with ( async_test_home_assistant() as hass, @@ -626,6 +632,7 @@ async def test_out_of_disk_space_while_rebuild_states_table( @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage @@ -641,10 +648,10 @@ async def test_out_of_disk_space_while_removing_foreign_key( Note that the test is somewhat forced; the states.event_id foreign key constraint is removed when migrating to schema version 46, inspecting the schema in - cleanup_legacy_states_event_ids is not likely to fail. + EventIDPostMigration.migrate_data, is not likely to fail. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -689,19 +696,16 @@ async def test_out_of_disk_space_while_removing_foreign_key( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), ): async with ( async_test_home_assistant() as hass, diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index 8efbf226bc1..403384aee9f 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -51,6 +51,16 @@ async def mock_recorder_before_hass( """Set up recorder.""" +AREA_SENSOR_FT_ATTRIBUTES = { + "device_class": "area", + "state_class": "measurement", + "unit_of_measurement": "ft²", +} +AREA_SENSOR_M_ATTRIBUTES = { + "device_class": "area", + "state_class": "measurement", + "unit_of_measurement": "m²", +} DISTANCE_SENSOR_FT_ATTRIBUTES = { "device_class": "distance", "state_class": "measurement", @@ -1247,6 +1257,9 @@ async def test_statistic_during_period_calendar( @pytest.mark.parametrize( ("attributes", "state", "value", "custom_units", "converted_value"), [ + (AREA_SENSOR_M_ATTRIBUTES, 10, 10, {"area": "cm²"}, 100000), + (AREA_SENSOR_M_ATTRIBUTES, 10, 10, {"area": "m²"}, 10), + (AREA_SENSOR_M_ATTRIBUTES, 10, 10, {"area": "ft²"}, 107.639), (DISTANCE_SENSOR_M_ATTRIBUTES, 10, 10, {"distance": "cm"}, 1000), (DISTANCE_SENSOR_M_ATTRIBUTES, 10, 10, {"distance": "m"}, 10), (DISTANCE_SENSOR_M_ATTRIBUTES, 10, 10, {"distance": "in"}, 10 / 0.0254), @@ -1434,6 +1447,7 @@ async def test_sum_statistics_during_period_unit_conversion( "custom_units", [ {"distance": "L"}, + {"area": "L"}, {"energy": "W"}, {"power": "Pa"}, {"pressure": "K"}, @@ -1678,6 +1692,8 @@ async def test_statistics_during_period_empty_statistic_ids( @pytest.mark.parametrize( ("units", "attributes", "display_unit", "statistics_unit", "unit_class"), [ + (US_CUSTOMARY_SYSTEM, AREA_SENSOR_M_ATTRIBUTES, "m²", "m²", "area"), + (METRIC_SYSTEM, AREA_SENSOR_M_ATTRIBUTES, "m²", "m²", "area"), (US_CUSTOMARY_SYSTEM, DISTANCE_SENSOR_M_ATTRIBUTES, "m", "m", "distance"), (METRIC_SYSTEM, DISTANCE_SENSOR_M_ATTRIBUTES, "m", "m", "distance"), ( @@ -1852,6 +1868,13 @@ async def test_list_statistic_ids( @pytest.mark.parametrize( ("attributes", "attributes2", "display_unit", "statistics_unit", "unit_class"), [ + ( + AREA_SENSOR_M_ATTRIBUTES, + AREA_SENSOR_FT_ATTRIBUTES, + "ft²", + "m²", + "area", + ), ( DISTANCE_SENSOR_M_ATTRIBUTES, DISTANCE_SENSOR_FT_ATTRIBUTES, @@ -1984,6 +2007,18 @@ async def test_validate_statistics( await assert_validation_result(client, {}) +async def test_update_statistics_issues( + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test update_statistics_issues can be called.""" + + client = await hass_ws_client() + await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] is None + + async def test_clear_statistics( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -2104,6 +2139,30 @@ async def test_clear_statistics( assert response["result"] == {"sensor.test2": expected_response["sensor.test2"]} +async def test_clear_statistics_time_out( + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test removing statistics with time-out error.""" + client = await hass_ws_client() + + with ( + patch.object(recorder.tasks.ClearStatisticsTask, "run"), + patch.object(recorder.websocket_api, "CLEAR_STATISTICS_TIME_OUT", 0), + ): + await client.send_json_auto_id( + { + "type": "recorder/clear_statistics", + "statistic_ids": ["sensor.test"], + } + ) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "timeout", + "message": "clear_statistics timed out", + } + + @pytest.mark.parametrize( ("new_unit", "new_unit_class", "new_display_unit"), [("dogs", None, "dogs"), (None, "unitless", None), ("W", "power", "kW")], @@ -2204,6 +2263,31 @@ async def test_update_statistics_metadata( } +async def test_update_statistics_metadata_time_out( + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test update statistics metadata with time-out error.""" + client = await hass_ws_client() + + with ( + patch.object(recorder.tasks.UpdateStatisticsMetadataTask, "run"), + patch.object(recorder.websocket_api, "UPDATE_STATISTICS_METADATA_TIME_OUT", 0), + ): + await client.send_json_auto_id( + { + "type": "recorder/update_statistics_metadata", + "statistic_id": "sensor.test", + "unit_of_measurement": "dogs", + } + ) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "timeout", + "message": "update_statistics_metadata timed out", + } + + async def test_change_statistics_unit( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: diff --git a/tests/components/remote/test_init.py b/tests/components/remote/test_init.py index 575e69015fe..51728d02ef3 100644 --- a/tests/components/remote/test_init.py +++ b/tests/components/remote/test_init.py @@ -1,7 +1,5 @@ """The tests for the Remote component, adapted from Light Test.""" -import pytest - from homeassistant.components import remote from homeassistant.components.remote import ( ATTR_ALTERNATIVE, @@ -23,11 +21,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from tests.common import ( - async_mock_service, - help_test_all, - import_and_test_deprecated_constant_enum, -) +from tests.common import async_mock_service TEST_PLATFORM = {DOMAIN: {CONF_PLATFORM: "test"}} SERVICE_SEND_COMMAND = "send_command" @@ -146,37 +140,3 @@ async def test_delete_command(hass: HomeAssistant) -> None: assert call.domain == remote.DOMAIN assert call.service == SERVICE_DELETE_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_ID - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(remote) - - -@pytest.mark.parametrize(("enum"), list(remote.RemoteEntityFeature)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: remote.RemoteEntityFeature, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, remote, enum, "SUPPORT_", "2025.1") - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockRemote(remote.RemoteEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 - - entity = MockRemote() - assert entity.supported_features_compat is remote.RemoteEntityFeature(1) - assert "MockRemote" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "RemoteEntityFeature.LEARN_COMMAND" in caplog.text - caplog.clear() - assert entity.supported_features_compat is remote.RemoteEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text diff --git a/tests/components/renault/__init__.py b/tests/components/renault/__init__.py index 86fddfd5bac..a7c6b314ccb 100644 --- a/tests/components/renault/__init__.py +++ b/tests/components/renault/__init__.py @@ -10,9 +10,9 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, ATTR_STATE, - ATTR_SW_VERSION, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -46,7 +46,7 @@ def check_device_registry( assert registry_entry.manufacturer == expected_device[ATTR_MANUFACTURER] assert registry_entry.name == expected_device[ATTR_NAME] assert registry_entry.model == expected_device[ATTR_MODEL] - assert registry_entry.sw_version == expected_device[ATTR_SW_VERSION] + assert registry_entry.model_id == expected_device[ATTR_MODEL_ID] def check_entities( diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index 19c40f6ec20..c552321ef97 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -19,9 +19,9 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, ATTR_STATE, - ATTR_SW_VERSION, ATTR_UNIT_OF_MEASUREMENT, CONF_PASSWORD, CONF_USERNAME, @@ -74,7 +74,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Zoe", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "X101VE", + ATTR_MODEL_ID: "X101VE", }, "endpoints": { "battery_status": "battery_status_charging.json", @@ -246,7 +246,13 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug", - ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], + ATTR_OPTIONS: [ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], ATTR_STATE: "plugged", ATTR_UNIQUE_ID: "vf1aaaaa555777999_plug_state", }, @@ -269,7 +275,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Zoe", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "X102VE", + ATTR_MODEL_ID: "X102VE", }, "endpoints": { "battery_status": "battery_status_not_charging.json", @@ -487,7 +493,13 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug-off", - ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], + ATTR_OPTIONS: [ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], ATTR_STATE: "unplugged", ATTR_UNIQUE_ID: "vf1aaaaa555777999_plug_state", }, @@ -517,7 +529,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Captur ii", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "XJB1SU", + ATTR_MODEL_ID: "XJB1SU", }, "endpoints": { "battery_status": "battery_status_charging.json", @@ -725,7 +737,13 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug", - ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], + ATTR_OPTIONS: [ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], ATTR_STATE: "plugged", ATTR_UNIQUE_ID: "vf1aaaaa555777123_plug_state", }, @@ -755,7 +773,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Captur ii", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "XJB1SU", + ATTR_MODEL_ID: "XJB1SU", }, "endpoints": { "cockpit": "cockpit_fuel.json", diff --git a/tests/components/renault/fixtures/action.set_ac_schedules.json b/tests/components/renault/fixtures/action.set_ac_schedules.json new file mode 100644 index 00000000000..601c1f6cf2d --- /dev/null +++ b/tests/components/renault/fixtures/action.set_ac_schedules.json @@ -0,0 +1,20 @@ +{ + "data": { + "type": "HvacSchedule", + "id": "guid", + "attributes": { + "schedules": [ + { + "id": 1, + "activated": true, + "tuesday": { "readyAtTime": "T04:30Z" }, + "wednesday": { "readyAtTime": "T22:30Z" }, + "thursday": { "readyAtTime": "T22:00Z" }, + "friday": { "readyAtTime": "T23:30Z" }, + "saturday": { "readyAtTime": "T18:30Z" }, + "sunday": { "readyAtTime": "T12:45Z" } + } + ] + } + } +} diff --git a/tests/components/renault/fixtures/hvac_settings.json b/tests/components/renault/fixtures/hvac_settings.json new file mode 100644 index 00000000000..8dd37e56af4 --- /dev/null +++ b/tests/components/renault/fixtures/hvac_settings.json @@ -0,0 +1,41 @@ +{ + "data": { + "type": "Car", + "id": "VF1AAAAA555777999", + "attributes": { + "dateTime": "2020-12-24T20:00:00.000Z", + "mode": "scheduled", + "schedules": [ + { + "id": 1, + "activated": false + }, + { + "id": 2, + "activated": true, + "wednesday": { "readyAtTime": "T15:15Z" }, + "friday": { "readyAtTime": "T15:15Z" } + }, + { + "id": 3, + "activated": false, + "monday": { "readyAtTime": "T23:30Z" }, + "tuesday": { "readyAtTime": "T23:30Z" }, + "wednesday": { "readyAtTime": "T23:30Z" }, + "thursday": { "readyAtTime": "T23:30Z" }, + "friday": { "readyAtTime": "T23:30Z" }, + "saturday": { "readyAtTime": "T23:30Z" }, + "sunday": { "readyAtTime": "T23:30Z" } + }, + { + "id": 4, + "activated": false + }, + { + "id": 5, + "activated": false + } + ] + } + } +} diff --git a/tests/components/renault/snapshots/test_binary_sensor.ambr b/tests/components/renault/snapshots/test_binary_sensor.ambr index 9dac0c323ce..7142608b977 100644 --- a/tests/components/renault/snapshots/test_binary_sensor.ambr +++ b/tests/components/renault/snapshots/test_binary_sensor.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -322,13 +322,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -708,13 +708,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -878,13 +878,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1306,13 +1306,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1606,13 +1606,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1992,13 +1992,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -2162,13 +2162,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_button.ambr b/tests/components/renault/snapshots/test_button.ambr index c4732ad1458..e61255372c1 100644 --- a/tests/components/renault/snapshots/test_button.ambr +++ b/tests/components/renault/snapshots/test_button.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -106,13 +106,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -274,13 +274,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -442,13 +442,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -610,13 +610,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -694,13 +694,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -862,13 +862,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1030,13 +1030,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_device_tracker.ambr b/tests/components/renault/snapshots/test_device_tracker.ambr index 5e7813316a2..f90cb92cc63 100644 --- a/tests/components/renault/snapshots/test_device_tracker.ambr +++ b/tests/components/renault/snapshots/test_device_tracker.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -107,13 +107,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -192,13 +192,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -234,13 +234,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -319,13 +319,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -407,13 +407,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -495,13 +495,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -537,13 +537,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index ccdc76f0130..9974e21be75 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -64,13 +64,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -161,13 +161,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -258,13 +258,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -355,13 +355,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -397,13 +397,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -494,13 +494,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -591,13 +591,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_sensor.ambr b/tests/components/renault/snapshots/test_sensor.ambr index e4bb2d74297..b092222c9f3 100644 --- a/tests/components/renault/snapshots/test_sensor.ambr +++ b/tests/components/renault/snapshots/test_sensor.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -332,13 +332,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -494,6 +494,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -921,6 +922,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1087,13 +1089,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1249,6 +1251,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1674,6 +1677,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1838,13 +1842,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -2000,6 +2004,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -2456,6 +2461,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -2632,13 +2638,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -2942,13 +2948,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -3104,6 +3110,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3531,6 +3538,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3697,13 +3705,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -3859,6 +3867,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -4284,6 +4293,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -4448,13 +4458,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -4610,6 +4620,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -5066,6 +5077,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), diff --git a/tests/components/renault/snapshots/test_services.ambr b/tests/components/renault/snapshots/test_services.ambr new file mode 100644 index 00000000000..882b2ffbe34 --- /dev/null +++ b/tests/components/renault/snapshots/test_services.ambr @@ -0,0 +1,757 @@ +# serializer version: 1 +# name: test_service_set_ac_schedule[zoe_40] + list([ + dict({ + 'activated': False, + 'friday': None, + 'id': 1, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 1, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': True, + 'friday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T15:15Z', + }), + 'readyAtTime': 'T15:15Z', + }), + 'id': 2, + 'monday': None, + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'readyAtTime': 'T15:15Z', + }), + 'id': 2, + 'wednesday': dict({ + 'readyAtTime': 'T15:15Z', + }), + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T15:15Z', + }), + 'readyAtTime': 'T15:15Z', + }), + }), + dict({ + 'activated': False, + 'friday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'id': 3, + 'monday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'raw_data': dict({ + 'activated': False, + 'friday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'id': 3, + 'monday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'saturday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'sunday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'thursday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'readyAtTime': 'T23:30Z', + }), + }), + 'saturday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'sunday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'thursday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 4, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 4, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 5, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 5, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + ]) +# --- +# name: test_service_set_ac_schedule_multi[zoe_40] + list([ + dict({ + 'activated': False, + 'friday': None, + 'id': 1, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 1, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': True, + 'friday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T15:15Z', + }), + 'readyAtTime': 'T15:15Z', + }), + 'id': 2, + 'monday': None, + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'readyAtTime': 'T15:15Z', + }), + 'id': 2, + 'wednesday': dict({ + 'readyAtTime': 'T15:15Z', + }), + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T15:15Z', + }), + 'readyAtTime': 'T15:15Z', + }), + }), + dict({ + 'activated': True, + 'friday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T12:00Z', + }), + 'readyAtTime': 'T12:00Z', + }), + 'id': 3, + 'monday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T12:00Z', + }), + 'readyAtTime': 'T12:00Z', + }), + 'raw_data': dict({ + 'activated': False, + 'friday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'id': 3, + 'monday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'saturday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'sunday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'thursday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'readyAtTime': 'T23:30Z', + }), + }), + 'saturday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T12:00Z', + }), + 'readyAtTime': 'T12:00Z', + }), + 'sunday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T12:00Z', + }), + 'readyAtTime': 'T12:00Z', + }), + 'thursday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T23:30Z', + }), + 'readyAtTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'raw_data': dict({ + 'readyAtTime': 'T12:00Z', + }), + 'readyAtTime': 'T12:00Z', + }), + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 4, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 4, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 5, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 5, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + ]) +# --- +# name: test_service_set_charge_schedule[zoe_40] + list([ + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'saturday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + }), + 'saturday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + }), + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'saturday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'sunday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'thursday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + }), + 'saturday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'sunday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'thursday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 3, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 3, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 4, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 4, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 5, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 5, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + ]) +# --- +# name: test_service_set_charge_schedule_multi[zoe_40] + list([ + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'saturday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + }), + 'saturday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + }), + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'saturday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'sunday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'thursday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + }), + 'saturday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'sunday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'thursday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 3, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 3, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 4, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 4, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 5, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 5, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + ]) +# --- diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 7d40cf69314..781b7efe226 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock, PropertyMock, patch +import aiohttp import pytest from renault_api.gigya.exceptions import InvalidCredentialsException from renault_api.kamereon import schemas @@ -13,33 +14,45 @@ from homeassistant.components.renault.const import ( CONF_LOCALE, DOMAIN, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import aiohttp_client -from .const import MOCK_CONFIG - -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture pytestmark = pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + ("exception", "error"), + [ + (Exception, "unknown"), + (aiohttp.ClientConnectionError, "cannot_connect"), + ( + InvalidCredentialsException(403042, "invalid loginID or password"), + "invalid_credentials", + ), + ], +) async def test_config_flow_single_account( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + exception: Exception | type[Exception], + error: str, ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} + assert result["step_id"] == "user" + assert not result["errors"] - # Failed credentials + # Raise error with patch( "renault_api.renault_session.RenaultSession.login", - side_effect=InvalidCredentialsException(403042, "invalid loginID or password"), + side_effect=exception, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -51,7 +64,8 @@ async def test_config_flow_single_account( ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_credentials"} + assert result["step_id"] == "user" + assert result["errors"] == {"base": error} renault_account = AsyncMock() type(renault_account).account_id = PropertyMock(return_value="account_id_1") @@ -87,6 +101,7 @@ async def test_config_flow_single_account( assert result["data"][CONF_PASSWORD] == "test" assert result["data"][CONF_KAMEREON_ACCOUNT_ID] == "account_id_1" assert result["data"][CONF_LOCALE] == "fr_FR" + assert result["context"]["unique_id"] == "account_id_1" assert len(mock_setup_entry.mock_calls) == 1 @@ -175,6 +190,7 @@ async def test_config_flow_multiple_accounts( assert result["data"][CONF_PASSWORD] == "test" assert result["data"][CONF_KAMEREON_ACCOUNT_ID] == "account_id_2" assert result["data"][CONF_LOCALE] == "fr_FR" + assert result["context"]["unique_id"] == "account_id_2" assert len(mock_setup_entry.mock_calls) == 1 @@ -220,22 +236,17 @@ async def test_config_flow_duplicate( assert len(mock_setup_entry.mock_calls) == 0 -async def test_reauth(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=MOCK_CONFIG, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["description_placeholders"] == {CONF_USERNAME: "email@test.com"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "email@test.com", + } assert result["errors"] == {} # Failed credentials @@ -249,7 +260,10 @@ async def test_reauth(hass: HomeAssistant, config_entry: ConfigEntry) -> None: ) assert result2["type"] is FlowResultType.FORM - assert result2["description_placeholders"] == {CONF_USERNAME: "email@test.com"} + assert result2["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "email@test.com", + } assert result2["errors"] == {"base": "invalid_credentials"} # Valid credentials @@ -261,3 +275,6 @@ async def test_reauth(hass: HomeAssistant, config_entry: ConfigEntry) -> None: assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" + + assert config_entry.data[CONF_USERNAME] == "email@test.com" + assert config_entry.data[CONF_PASSWORD] == "any" diff --git a/tests/components/renault/test_init.py b/tests/components/renault/test_init.py index 0f9d9cbaf5b..a71192dda47 100644 --- a/tests/components/renault/test_init.py +++ b/tests/components/renault/test_init.py @@ -9,7 +9,7 @@ import pytest from renault_api.gigya.exceptions import GigyaException, InvalidCredentialsException from homeassistant.components.renault.const import DOMAIN -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -62,6 +62,11 @@ async def test_setup_entry_bad_password( assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == SOURCE_REAUTH + assert flows[0]["context"]["entry_id"] == config_entry.entry_id + @pytest.mark.parametrize("side_effect", [aiohttp.ClientConnectionError, GigyaException]) async def test_setup_entry_exception( diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index 4e3460b9afa..970d7cf4ad8 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -7,7 +7,8 @@ from unittest.mock import patch import pytest from renault_api.exceptions import RenaultException from renault_api.kamereon import schemas -from renault_api.kamereon.models import ChargeSchedule +from renault_api.kamereon.models import ChargeSchedule, HvacSchedule +from syrupy import SnapshotAssertion from homeassistant.components.renault.const import DOMAIN from homeassistant.components.renault.services import ( @@ -16,6 +17,7 @@ from homeassistant.components.renault.services import ( ATTR_VEHICLE, ATTR_WHEN, SERVICE_AC_CANCEL, + SERVICE_AC_SET_SCHEDULES, SERVICE_AC_START, SERVICE_CHARGE_SET_SCHEDULES, ) @@ -24,11 +26,11 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, - ATTR_SW_VERSION, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from .const import MOCK_VEHICLES @@ -143,7 +145,7 @@ async def test_service_set_ac_start_with_date( async def test_service_set_charge_schedule( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test that service invokes renault_api with correct data.""" await hass.config_entries.async_setup(config_entry.entry_id) @@ -176,11 +178,11 @@ async def test_service_set_charge_schedule( ) assert len(mock_action.mock_calls) == 1 mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_action.mock_calls[0][1] == (mock_call_data,) + assert mock_call_data == snapshot async def test_service_set_charge_schedule_multi( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test that service invokes renault_api with correct data.""" await hass.config_entries.async_setup(config_entry.entry_id) @@ -225,7 +227,7 @@ async def test_service_set_charge_schedule_multi( ) assert len(mock_action.mock_calls) == 1 mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_action.mock_calls[0][1] == (mock_call_data,) + assert mock_call_data == snapshot # Monday updated with new values assert mock_call_data[1].monday.startTime == "T12:00Z" @@ -237,6 +239,101 @@ async def test_service_set_charge_schedule_multi( assert mock_call_data[1].thursday.duration == 15 +async def test_service_set_ac_schedule( + hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion +) -> None: + """Test that service invokes renault_api with correct data.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + schedules = {"id": 2} + data = { + ATTR_VEHICLE: get_device_id(hass), + ATTR_SCHEDULES: schedules, + } + + with ( + patch( + "renault_api.renault_vehicle.RenaultVehicle.get_hvac_settings", + return_value=schemas.KamereonVehicleDataResponseSchema.loads( + load_fixture("renault/hvac_settings.json") + ).get_attributes(schemas.KamereonVehicleHvacSettingsDataSchema), + ), + patch( + "renault_api.renault_vehicle.RenaultVehicle.set_hvac_schedules", + return_value=( + schemas.KamereonVehicleHvacScheduleActionDataSchema.loads( + load_fixture("renault/action.set_ac_schedules.json") + ) + ), + ) as mock_action, + ): + await hass.services.async_call( + DOMAIN, SERVICE_AC_SET_SCHEDULES, service_data=data, blocking=True + ) + assert len(mock_action.mock_calls) == 1 + mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] + assert mock_call_data == snapshot + + +async def test_service_set_ac_schedule_multi( + hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion +) -> None: + """Test that service invokes renault_api with correct data.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + schedules = [ + { + "id": 3, + "activated": True, + "monday": {"readyAtTime": "T12:00Z"}, + "tuesday": {"readyAtTime": "T12:00Z"}, + "wednesday": None, + "friday": {"readyAtTime": "T12:00Z"}, + "saturday": {"readyAtTime": "T12:00Z"}, + "sunday": {"readyAtTime": "T12:00Z"}, + }, + {"id": 4}, + ] + data = { + ATTR_VEHICLE: get_device_id(hass), + ATTR_SCHEDULES: schedules, + } + + with ( + patch( + "renault_api.renault_vehicle.RenaultVehicle.get_hvac_settings", + return_value=schemas.KamereonVehicleDataResponseSchema.loads( + load_fixture("renault/hvac_settings.json") + ).get_attributes(schemas.KamereonVehicleHvacSettingsDataSchema), + ), + patch( + "renault_api.renault_vehicle.RenaultVehicle.set_hvac_schedules", + return_value=( + schemas.KamereonVehicleHvacScheduleActionDataSchema.loads( + load_fixture("renault/action.set_ac_schedules.json") + ) + ), + ) as mock_action, + ): + await hass.services.async_call( + DOMAIN, SERVICE_AC_SET_SCHEDULES, service_data=data, blocking=True + ) + assert len(mock_action.mock_calls) == 1 + mock_call_data: list[HvacSchedule] = mock_action.mock_calls[0][1][0] + assert mock_call_data == snapshot + + # Schedule is activated now + assert mock_call_data[2].activated is True + # Monday updated with new values + assert mock_call_data[2].monday.readyAtTime == "T12:00Z" + # Wednesday has original values cleared + assert mock_call_data[2].wednesday is None + # Thursday keeps original values + assert mock_call_data[2].thursday.readyAtTime == "T23:30Z" + + async def test_service_invalid_device_id( hass: HomeAssistant, config_entry: ConfigEntry ) -> None: @@ -244,12 +341,14 @@ async def test_service_invalid_device_id( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - data = {ATTR_VEHICLE: "VF1AAAAA555777999"} + data = {ATTR_VEHICLE: "some_random_id"} - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_AC_CANCEL, service_data=data, blocking=True ) + assert err.value.translation_key == "invalid_device_id" + assert err.value.translation_placeholders == {"device_id": "some_random_id"} async def test_service_invalid_device_id2( @@ -267,7 +366,7 @@ async def test_service_invalid_device_id2( manufacturer=extra_vehicle[ATTR_MANUFACTURER], name=extra_vehicle[ATTR_NAME], model=extra_vehicle[ATTR_MODEL], - sw_version=extra_vehicle[ATTR_SW_VERSION], + model_id=extra_vehicle[ATTR_MODEL_ID], ) device_id = device_registry.async_get_device( identifiers=extra_vehicle[ATTR_IDENTIFIERS] @@ -275,7 +374,9 @@ async def test_service_invalid_device_id2( data = {ATTR_VEHICLE: device_id} - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_AC_CANCEL, service_data=data, blocking=True ) + assert err.value.translation_key == "no_config_entry_for_device" + assert err.value.translation_placeholders == {"device_id": "REG-NUMBER"} diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index ddea36cb292..81865d98801 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -1,13 +1,15 @@ """Setup the Reolink tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import pytest from reolink_aio.api import Chime +from reolink_aio.baichuan import Baichuan +from reolink_aio.exceptions import ReolinkError -from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL +from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -33,11 +35,14 @@ TEST_UID = "ABC1234567D89EFG" TEST_UID_CAM = "DEF7654321D89GHT" TEST_PORT = 1234 TEST_NVR_NAME = "test_reolink_name" +TEST_CAM_NAME = "test_reolink_cam" TEST_NVR_NAME2 = "test2_reolink_name" +TEST_CAM_NAME = "test_reolink_cam" TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" +TEST_DUO_MODEL = "Reolink Duo PoE" @pytest.fixture @@ -49,7 +54,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture +@pytest.fixture(scope="module") def reolink_connect_class() -> Generator[MagicMock]: """Mock reolink connection and return both the host_mock and host_mock_class.""" with ( @@ -63,6 +68,7 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.check_new_firmware.return_value = False host_mock.unsubscribe.return_value = True host_mock.logout.return_value = True + host_mock.is_hub = False host_mock.mac_address = TEST_MAC host_mock.uid = TEST_UID host_mock.onvif_enabled = True @@ -76,9 +82,11 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.protocol = "rtsp" host_mock.channels = [0] host_mock.stream_channels = [0] + host_mock.new_devices = False host_mock.sw_version_update_required = False host_mock.hardware_version = "IPC_00000" host_mock.sw_version = "v1.0.0.0.0.0000" + host_mock.sw_upload_progress.return_value = 100 host_mock.manufacturer = "Reolink" host_mock.model = TEST_HOST_MODEL host_mock.item_number = TEST_ITEM_NUMBER @@ -88,6 +96,7 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.camera_sw_version.return_value = "v1.1.0.0.0.0000" host_mock.camera_sw_version_update_required.return_value = False host_mock.camera_uid.return_value = TEST_UID_CAM + host_mock.camera_online.return_value = True host_mock.channel_for_uid.return_value = 0 host_mock.get_encoding.return_value = "h264" host_mock.firmware_update_available.return_value = False @@ -112,6 +121,12 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] host_mock.auto_track_method.return_value = 3 host_mock.daynight_state.return_value = "Black&White" + + # Baichuan + host_mock.baichuan = create_autospec(Baichuan) + # Disable tcp push by default for tests + host_mock.baichuan.events_active = False + host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error") yield host_mock_class @@ -134,14 +149,14 @@ def reolink_platforms() -> Generator[None]: def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Add the reolink mock config entry to hass.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr index 00363023d14..71c5397fbd1 100644 --- a/tests/components/reolink/snapshots/test_diagnostics.ambr +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -77,6 +77,10 @@ '0': 1, 'null': 1, }), + 'GetDeviceAudioCfg': dict({ + '0': 2, + 'null': 4, + }), 'GetEmail': dict({ '0': 1, 'null': 2, @@ -114,8 +118,8 @@ 'null': 2, }), 'GetPtzCurPos': dict({ - '0': 1, - 'null': 1, + '0': 2, + 'null': 2, }), 'GetPtzGuard': dict({ '0': 2, @@ -133,6 +137,9 @@ '0': 1, 'null': 2, }), + 'GetStateLight': dict({ + 'null': 1, + }), 'GetWhiteLed': dict({ '0': 3, 'null': 3, diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py index e02742afe1d..71318c27b25 100644 --- a/tests/components/reolink/test_binary_sensor.py +++ b/tests/components/reolink/test_binary_sensor.py @@ -1,16 +1,16 @@ """Test the Reolink binary sensor platform.""" +from collections.abc import Callable from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import TEST_NVR_NAME, TEST_UID +from .conftest import TEST_DUO_MODEL, TEST_HOST_MODEL, TEST_NVR_NAME from tests.common import MockConfigEntry, async_fire_time_changed from tests.typing import ClientSessionGenerator @@ -22,10 +22,9 @@ async def test_motion_sensor( freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, ) -> None: """Test binary sensor entity with motion sensor.""" - reolink_connect.model = "Reolink Duo PoE" + reolink_connect.model = TEST_DUO_MODEL reolink_connect.motion_detected.return_value = True with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -42,11 +41,51 @@ async def test_motion_sensor( assert hass.states.get(entity_id).state == STATE_OFF - # test webhook callback + # test ONVIF webhook callback reolink_connect.motion_detected.return_value = True reolink_connect.ONVIF_event_callback.return_value = [0] - webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + webhook_id = config_entry.runtime_data.host.webhook_id client = await hass_client_no_auth() await client.post(f"/api/webhook/{webhook_id}", data="test_data") assert hass.states.get(entity_id).state == STATE_ON + + +async def test_tcp_callback( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test tcp callback using motion sensor.""" + + class callback_mock_class: + callback_func = None + + def register_callback( + self, callback_id: str, callback: Callable[[], None], *args, **key_args + ) -> None: + if callback_id.endswith("_motion"): + self.callback_func = callback + + callback_mock = callback_mock_class() + + reolink_connect.model = TEST_HOST_MODEL + reolink_connect.baichuan.events_active = True + reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) + reolink_connect.baichuan.register_callback = callback_mock.register_callback + reolink_connect.motion_detected.return_value = True + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion" + assert hass.states.get(entity_id).state == STATE_ON + + # simulate a TCP push callback + reolink_connect.motion_detected.return_value = False + assert callback_mock.callback_func is not None + callback_mock.callback_func() + + assert hass.states.get(entity_id).state == STATE_OFF diff --git a/tests/components/reolink/test_button.py b/tests/components/reolink/test_button.py new file mode 100644 index 00000000000..126fbb6b29a --- /dev/null +++ b/tests/components/reolink/test_button.py @@ -0,0 +1,118 @@ +"""Test the Reolink button platform.""" + +from unittest.mock import MagicMock, patch + +import pytest +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.reolink.button import ATTR_SPEED, SERVICE_PTZ_MOVE +from homeassistant.components.reolink.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_button( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test button entity with ptz up.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_ptz_up" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_ptz_command.assert_called_once() + + reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_ptz_command.reset_mock(side_effect=True) + + +async def test_ptz_move_service( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test ptz_move entity service using PTZ button entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_ptz_up" + + await hass.services.async_call( + DOMAIN, + SERVICE_PTZ_MOVE, + {ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5}, + blocking=True, + ) + reolink_connect.set_ptz_command.assert_called_with(0, command="Up", speed=5) + + reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_PTZ_MOVE, + {ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5}, + blocking=True, + ) + + reolink_connect.set_ptz_command.reset_mock(side_effect=True) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_host_button( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host button entity with reboot.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_restart" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.reboot.assert_called_once() + + reolink_connect.reboot.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.reboot.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_camera.py b/tests/components/reolink/test_camera.py new file mode 100644 index 00000000000..4f18f769e02 --- /dev/null +++ b/tests/components/reolink/test_camera.py @@ -0,0 +1,69 @@ +"""Test the Reolink camera platform.""" + +from unittest.mock import MagicMock, patch + +import pytest +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.camera import ( + CameraState, + async_get_image, + async_get_stream_source, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_DUO_MODEL, TEST_NVR_NAME + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_camera( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test camera entity with fluent.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.CAMERA}.{TEST_NVR_NAME}_fluent" + assert hass.states.get(entity_id).state == CameraState.IDLE + + # check getting a image from the camera + reolink_connect.get_snapshot.return_value = b"image" + assert (await async_get_image(hass, entity_id)).content == b"image" + + reolink_connect.get_snapshot.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await async_get_image(hass, entity_id) + + # check getting the stream source + assert await async_get_stream_source(hass, entity_id) is not None + + reolink_connect.get_snapshot.reset_mock(side_effect=True) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_camera_no_stream_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test camera entity with no stream source.""" + reolink_connect.model = TEST_DUO_MODEL + reolink_connect.get_stream_source.return_value = None + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.CAMERA}.{TEST_NVR_NAME}_snapshots_fluent_lens_0" + assert hass.states.get(entity_id).state == CameraState.IDLE diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 716e66b8d6c..59342934c1c 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -2,16 +2,23 @@ import json from typing import Any -from unittest.mock import AsyncMock, MagicMock, call +from unittest.mock import ANY, AsyncMock, MagicMock, call +from aiohttp import ClientSession from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + LoginFirmwareError, + ReolinkError, +) from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL +from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.components.reolink.exceptions import ReolinkWebhookException from homeassistant.components.reolink.host import DEFAULT_TIMEOUT from homeassistant.config_entries import ConfigEntryState @@ -50,7 +57,7 @@ async def test_config_flow_manual_success( ) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -73,11 +80,12 @@ async def test_config_flow_manual_success( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, } + assert result["result"].unique_id == TEST_MAC async def test_config_flow_errors( @@ -85,7 +93,7 @@ async def test_config_flow_errors( ) -> None: """Successful flow manually initialized by the user after some errors.""" result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -94,6 +102,8 @@ async def test_config_flow_errors( reolink_connect.is_admin = False reolink_connect.user_level = "guest" + reolink_connect.unsubscribe.side_effect = ReolinkError("Test error") + reolink_connect.logout.side_effect = ReolinkError("Test error") result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -167,6 +177,20 @@ async def test_config_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {CONF_PASSWORD: "invalid_auth"} + reolink_connect.get_host_data.side_effect = LoginFirmwareError("Test error") + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_HOST: TEST_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "update_needed"} + reolink_connect.valid_password.return_value = False result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -196,7 +220,7 @@ async def test_config_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {CONF_HOST: "api_error"} - reolink_connect.get_host_data.side_effect = None + reolink_connect.get_host_data.reset_mock(side_effect=True) result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -204,7 +228,7 @@ async def test_config_flow_errors( CONF_PASSWORD: TEST_PASSWORD, CONF_HOST: TEST_HOST, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, ) @@ -215,24 +239,27 @@ async def test_config_flow_errors( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, } + reolink_connect.unsubscribe.reset_mock(side_effect=True) + reolink_connect.logout.reset_mock(side_effect=True) + async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test specifying non default settings using options flow.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: "rtsp", @@ -260,62 +287,17 @@ async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> } -async def test_change_connection_settings( - hass: HomeAssistant, mock_setup_entry: MagicMock -) -> None: - """Test changing connection settings by issuing a second user config flow.""" - config_entry = MockConfigEntry( - domain=const.DOMAIN, - unique_id=format_mac(TEST_MAC), - data={ - CONF_HOST: TEST_HOST, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, - }, - options={ - CONF_PROTOCOL: DEFAULT_PROTOCOL, - }, - title=TEST_NVR_NAME, - ) - config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: TEST_HOST2, - CONF_USERNAME: TEST_USERNAME2, - CONF_PASSWORD: TEST_PASSWORD2, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == TEST_HOST2 - assert config_entry.data[CONF_USERNAME] == TEST_USERNAME2 - assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 - - async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test a reauth flow.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -327,24 +309,7 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - const.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "title_placeholders": {"name": TEST_NVR_NAME}, - "unique_id": format_mac(TEST_MAC), - }, - data=config_entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -365,6 +330,55 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 +async def test_reauth_abort_unique_id_mismatch( + hass: HomeAssistant, mock_setup_entry: MagicMock, reolink_connect: MagicMock +) -> None: + """Test a reauth flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + reolink_connect.mac_address = "aa:aa:aa:aa:aa:aa" + + result = await config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME2, + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert config_entry.data[CONF_HOST] == TEST_HOST + assert config_entry.data[CONF_USERNAME] == TEST_USERNAME + assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD + + reolink_connect.mac_address = TEST_MAC + + async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Successful flow from DHCP discovery.""" dhcp_data = dhcp.DhcpServiceInfo( @@ -374,7 +388,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) assert result["type"] is FlowResultType.FORM @@ -396,67 +410,29 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, } -@pytest.mark.parametrize( - ("last_update_success", "attr", "value", "expected", "host_call_list"), - [ - ( - False, - None, - None, - TEST_HOST2, - [TEST_HOST, TEST_HOST2], - ), - ( - True, - None, - None, - TEST_HOST, - [TEST_HOST], - ), - ( - False, - "get_state", - AsyncMock(side_effect=ReolinkError("Test error")), - TEST_HOST, - [TEST_HOST, TEST_HOST2], - ), - ( - False, - "mac_address", - "aa:aa:aa:aa:aa:aa", - TEST_HOST, - [TEST_HOST, TEST_HOST2], - ), - ], -) -async def test_dhcp_ip_update( +async def test_dhcp_ip_update_aborted_if_wrong_mac( hass: HomeAssistant, freezer: FrozenDateTimeFactory, reolink_connect_class: MagicMock, reolink_connect: MagicMock, - last_update_success: bool, - attr: str, - value: Any, - expected: str, - host_call_list: list[str], ) -> None: - """Test dhcp discovery aborts if already configured where the IP is updated if appropriate.""" + """Test dhcp discovery does not update the IP if the mac address does not match.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -469,12 +445,108 @@ async def test_dhcp_ip_update( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - if not last_update_success: - # ensure the last_update_succes is False for the device_coordinator. - reolink_connect.get_states.side_effect = ReolinkError("Test error") - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() + # ensure the last_update_succes is False for the device_coordinator. + reolink_connect.get_states.side_effect = ReolinkError("Test error") + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + dhcp_data = dhcp.DhcpServiceInfo( + ip=TEST_HOST2, + hostname="Reolink", + macaddress=DHCP_FORMATTED_MAC, + ) + + reolink_connect.mac_address = "aa:aa:aa:aa:aa:aa" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + ) + + for host in (TEST_HOST, TEST_HOST2): + expected_call = call( + host, + TEST_USERNAME, + TEST_PASSWORD, + port=TEST_PORT, + use_https=TEST_USE_HTTPS, + protocol=DEFAULT_PROTOCOL, + timeout=DEFAULT_TIMEOUT, + aiohttp_get_session_callback=ANY, + ) + assert expected_call in reolink_connect_class.call_args_list + + for exc_call in reolink_connect_class.call_args_list: + assert exc_call[0][0] in [TEST_HOST, TEST_HOST2] + get_session = exc_call[1]["aiohttp_get_session_callback"] + assert isinstance(get_session(), ClientSession) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + await hass.async_block_till_done() + # Check that IP was not updated + assert config_entry.data[CONF_HOST] == TEST_HOST + + reolink_connect.get_states.side_effect = None + reolink_connect_class.reset_mock() + reolink_connect.mac_address = TEST_MAC + + +@pytest.mark.parametrize( + ("attr", "value", "expected", "host_call_list"), + [ + ( + None, + None, + TEST_HOST2, + [TEST_HOST, TEST_HOST2], + ), + ( + "get_state", + AsyncMock(side_effect=ReolinkError("Test error")), + TEST_HOST, + [TEST_HOST, TEST_HOST2], + ), + ], +) +async def test_dhcp_ip_update( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + reolink_connect_class: MagicMock, + reolink_connect: MagicMock, + attr: str, + value: Any, + expected: str, + host_call_list: list[str], +) -> None: + """Test dhcp discovery aborts if already configured where the IP is updated if appropriate.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + # ensure the last_update_succes is False for the device_coordinator. + reolink_connect.get_states.side_effect = ReolinkError("Test error") + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() dhcp_data = dhcp.DhcpServiceInfo( ip=TEST_HOST2, @@ -483,10 +555,11 @@ async def test_dhcp_ip_update( ) if attr is not None: + original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) for host in host_call_list: @@ -498,14 +571,181 @@ async def test_dhcp_ip_update( use_https=TEST_USE_HTTPS, protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, + aiohttp_get_session_callback=ANY, ) assert expected_call in reolink_connect_class.call_args_list for exc_call in reolink_connect_class.call_args_list: assert exc_call[0][0] in host_call_list + get_session = exc_call[1]["aiohttp_get_session_callback"] + assert isinstance(get_session(), ClientSession) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" await hass.async_block_till_done() assert config_entry.data[CONF_HOST] == expected + + reolink_connect.get_states.side_effect = None + reolink_connect_class.reset_mock() + if attr is not None: + setattr(reolink_connect, attr, original) + + +async def test_dhcp_ip_update_ingnored_if_still_connected( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + reolink_connect_class: MagicMock, + reolink_connect: MagicMock, +) -> None: + """Test dhcp discovery is ignored when the camera is still properly connected to HA.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + dhcp_data = dhcp.DhcpServiceInfo( + ip=TEST_HOST2, + hostname="Reolink", + macaddress=DHCP_FORMATTED_MAC, + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + ) + + expected_call = call( + TEST_HOST, + TEST_USERNAME, + TEST_PASSWORD, + port=TEST_PORT, + use_https=TEST_USE_HTTPS, + protocol=DEFAULT_PROTOCOL, + timeout=DEFAULT_TIMEOUT, + aiohttp_get_session_callback=ANY, + ) + assert expected_call in reolink_connect_class.call_args_list + + for exc_call in reolink_connect_class.call_args_list: + assert exc_call[0][0] == TEST_HOST + get_session = exc_call[1]["aiohttp_get_session_callback"] + assert isinstance(get_session(), ClientSession) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + await hass.async_block_till_done() + assert config_entry.data[CONF_HOST] == TEST_HOST + + reolink_connect.get_states.side_effect = None + reolink_connect_class.reset_mock() + + +async def test_reconfig(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: + """Test a reconfiguration flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: TEST_HOST2, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data[CONF_HOST] == TEST_HOST2 + assert config_entry.data[CONF_USERNAME] == TEST_USERNAME + assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD + + +async def test_reconfig_abort_unique_id_mismatch( + hass: HomeAssistant, mock_setup_entry: MagicMock, reolink_connect: MagicMock +) -> None: + """Test a reconfiguration flow aborts if the unique id does not match.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + reolink_connect.mac_address = "aa:aa:aa:aa:aa:aa" + + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: TEST_HOST2, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert config_entry.data[CONF_HOST] == TEST_HOST + assert config_entry.data[CONF_USERNAME] == TEST_USERNAME + assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD + + reolink_connect.mac_address = TEST_MAC diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py index c4096a4582f..c777e4064f0 100644 --- a/tests/components/reolink/test_host.py +++ b/tests/components/reolink/test_host.py @@ -1,50 +1,129 @@ """Test the Reolink host.""" from asyncio import CancelledError -from unittest.mock import AsyncMock, MagicMock +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError +from freezegun.api import FrozenDateTimeFactory import pytest +from reolink_aio.enums import SubType +from reolink_aio.exceptions import NotSupportedError, ReolinkError, SubscriptionError -from homeassistant.components.reolink import const +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.reolink.host import ( + FIRST_ONVIF_LONG_POLL_TIMEOUT, + FIRST_ONVIF_TIMEOUT, + FIRST_TCP_PUSH_TIMEOUT, + LONG_POLL_COOLDOWN, + LONG_POLL_ERROR_COOLDOWN, + POLL_INTERVAL_NO_PUSH, +) from homeassistant.components.webhook import async_handle_webhook from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.network import NoURLAvailableError from homeassistant.util.aiohttp import MockRequest -from .conftest import TEST_UID +from .conftest import TEST_NVR_NAME -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +async def test_setup_with_tcp_push( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test successful setup of the integration with TCP push callbacks.""" + reolink_connect.baichuan.events_active = True + reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + freezer.tick(timedelta(seconds=FIRST_TCP_PUSH_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # ONVIF push subscription not called + assert not reolink_connect.subscribe.called + + reolink_connect.baichuan.events_active = False + reolink_connect.baichuan.subscribe_events.side_effect = ReolinkError("Test error") + + +async def test_unloading_with_tcp_push( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test successful unloading of the integration with TCP push callbacks.""" + reolink_connect.baichuan.events_active = True + reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.baichuan.unsubscribe_events.side_effect = ReolinkError("Test error") + + # Unload the config entry + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + reolink_connect.baichuan.events_active = False + reolink_connect.baichuan.subscribe_events.side_effect = ReolinkError("Test error") + reolink_connect.baichuan.unsubscribe_events.reset_mock(side_effect=True) + + async def test_webhook_callback( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, entity_registry: er.EntityRegistry, ) -> None: """Test webhook callback with motion sensor.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) + reolink_connect.motion_detected.return_value = False + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion" + webhook_id = config_entry.runtime_data.host.webhook_id + unique_id = config_entry.runtime_data.host.unique_id signal_all = MagicMock() signal_ch = MagicMock() - async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) - async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) + async_dispatcher_connect(hass, f"{unique_id}_all", signal_all) + async_dispatcher_connect(hass, f"{unique_id}_0", signal_ch) client = await hass_client_no_auth() + assert hass.states.get(entity_id).state == STATE_OFF + # test webhook callback success all channels + reolink_connect.motion_detected.return_value = True reolink_connect.ONVIF_event_callback.return_value = None await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_called_once() + assert hass.states.get(entity_id).state == STATE_ON + + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() # test webhook callback all channels with failure to read motion_state signal_all.reset_mock() @@ -52,10 +131,14 @@ async def test_webhook_callback( await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_not_called() + assert hass.states.get(entity_id).state == STATE_ON + # test webhook callback success single channel + reolink_connect.motion_detected.return_value = False reolink_connect.ONVIF_event_callback.return_value = [0] await client.post(f"/api/webhook/{webhook_id}", data="test_data") signal_ch.assert_called_once() + assert hass.states.get(entity_id).state == STATE_OFF # test webhook callback single channel with error in event callback signal_ch.reset_mock() @@ -69,15 +152,325 @@ async def test_webhook_callback( content=bytes("test", "utf-8"), mock_source="test", ) - request.read = AsyncMock(side_effect=ConnectionResetError("Test error")) + request.read = AsyncMock() + request.read.side_effect = ConnectionResetError("Test error") await async_handle_webhook(hass, webhook_id, request) signal_all.assert_not_called() - request.read = AsyncMock(side_effect=ClientResponseError("Test error", "Test")) + request.read.side_effect = ClientResponseError("Test error", "Test") await async_handle_webhook(hass, webhook_id, request) signal_all.assert_not_called() - request.read = AsyncMock(side_effect=CancelledError("Test error")) + request.read.side_effect = CancelledError("Test error") with pytest.raises(CancelledError): await async_handle_webhook(hass, webhook_id, request) signal_all.assert_not_called() + + reolink_connect.ONVIF_event_callback.reset_mock(side_effect=True) + + +async def test_no_mac( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test setup of host with no mac.""" + original = reolink_connect.mac_address + reolink_connect.mac_address = None + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + reolink_connect.mac_address = original + + +async def test_subscribe_error( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test error when subscribing to ONVIF does not block startup.""" + reolink_connect.subscribe.side_effect = ReolinkError("Test Error") + reolink_connect.subscribed.return_value = False + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + reolink_connect.subscribe.reset_mock(side_effect=True) + + +async def test_subscribe_unsuccesfull( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test that a unsuccessful ONVIF subscription does not block startup.""" + reolink_connect.subscribed.return_value = False + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_initial_ONVIF_not_supported( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test setup when initial ONVIF is not supported.""" + + def test_supported(ch, key): + """Test supported function.""" + if key == "initial_ONVIF_state": + return False + return True + + reolink_connect.supported = test_supported + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_ONVIF_not_supported( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test setup is not blocked when ONVIF API returns NotSupportedError.""" + + def test_supported(ch, key): + """Test supported function.""" + if key == "initial_ONVIF_state": + return False + return True + + reolink_connect.supported = test_supported + reolink_connect.subscribed.return_value = False + reolink_connect.subscribe.side_effect = NotSupportedError("Test error") + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.subscribe.reset_mock(side_effect=True) + reolink_connect.subscribed.return_value = True + + +async def test_renew( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test renew of the ONVIF subscription.""" + reolink_connect.renewtimer.return_value = 1 + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.renew.assert_called() + + reolink_connect.renew.side_effect = SubscriptionError("Test error") + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.subscribe.assert_called() + + reolink_connect.subscribe.reset_mock() + reolink_connect.subscribe.side_effect = SubscriptionError("Test error") + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.subscribe.assert_called() + + reolink_connect.renew.reset_mock(side_effect=True) + reolink_connect.subscribe.reset_mock(side_effect=True) + + +async def test_long_poll_renew_fail( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test ONVIF long polling errors while renewing.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.subscribe.side_effect = NotSupportedError("Test error") + + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # ensure long polling continues + reolink_connect.pull_point_request.assert_called() + + reolink_connect.subscribe.reset_mock(side_effect=True) + + +async def test_register_webhook_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors while registering the webhook.""" + with patch( + "homeassistant.components.reolink.host.get_url", + side_effect=NoURLAvailableError("Test error"), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) is False + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_long_poll_stop_when_push( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test ONVIF long polling stops when ONVIF push comes in.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # simulate ONVIF push callback + client = await hass_client_no_auth() + reolink_connect.ONVIF_event_callback.return_value = None + webhook_id = config_entry.runtime_data.host.webhook_id + await client.post(f"/api/webhook/{webhook_id}") + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.unsubscribe.assert_called_with(sub_type=SubType.long_poll) + + +async def test_long_poll_errors( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors during ONVIF long polling.""" + reolink_connect.pull_point_request.reset_mock() + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.pull_point_request.side_effect = ReolinkError("Test error") + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.pull_point_request.assert_called_once() + reolink_connect.pull_point_request.side_effect = Exception("Test error") + + freezer.tick(timedelta(seconds=LONG_POLL_ERROR_COOLDOWN)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + freezer.tick(timedelta(seconds=LONG_POLL_COOLDOWN)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.unsubscribe.assert_called_with(sub_type=SubType.long_poll) + + reolink_connect.pull_point_request.reset_mock(side_effect=True) + + +async def test_fast_polling_errors( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors during ONVIF fast polling.""" + reolink_connect.get_motion_state_all_ch.reset_mock() + reolink_connect.get_motion_state_all_ch.side_effect = ReolinkError("Test error") + reolink_connect.pull_point_request.side_effect = ReolinkError("Test error") + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # start ONVIF fast polling because ONVIF long polling did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_LONG_POLL_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert reolink_connect.get_motion_state_all_ch.call_count == 1 + + freezer.tick(timedelta(seconds=POLL_INTERVAL_NO_PUSH)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # fast polling continues despite errors + assert reolink_connect.get_motion_state_all_ch.call_count == 2 + + reolink_connect.get_motion_state_all_ch.reset_mock(side_effect=True) + reolink_connect.pull_point_request.reset_mock(side_effect=True) + + +async def test_diagnostics_event_connection( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test Reolink diagnostics event connection return values.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "Fast polling" + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "ONVIF long polling" + + # simulate ONVIF push callback + client = await hass_client_no_auth() + reolink_connect.ONVIF_event_callback.return_value = None + webhook_id = config_entry.runtime_data.host.webhook_id + await client.post(f"/api/webhook/{webhook_id}") + + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "ONVIF push" + + # set TCP push as active + reolink_connect.baichuan.events_active = True + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "TCP push" diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index fd54f298966..f851e13c91d 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -13,12 +13,12 @@ from homeassistant.components.reolink import ( DEVICE_UPDATE_INTERVAL, FIRMWARE_UPDATE_INTERVAL, NUM_CRED_ERRORS, - const, ) -from homeassistant.config import async_process_ha_core_config +from homeassistant.components.reolink.const import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform +from homeassistant.const import CONF_PORT, STATE_OFF, STATE_UNAVAILABLE, Platform from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import ( device_registry as dr, entity_registry as er, @@ -31,6 +31,7 @@ from .conftest import ( TEST_HOST_MODEL, TEST_MAC, TEST_NVR_NAME, + TEST_PORT, TEST_UID, TEST_UID_CAM, ) @@ -92,6 +93,7 @@ async def test_failures_parametrized( expected: ConfigEntryState, ) -> None: """Test outcomes when changing errors.""" + original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) assert await hass.config_entries.async_setup(config_entry.entry_id) is ( expected is ConfigEntryState.LOADED @@ -100,6 +102,8 @@ async def test_failures_parametrized( assert config_entry.state == expected + setattr(reolink_connect, attr, original) + async def test_firmware_error_twice( hass: HomeAssistant, @@ -124,6 +128,8 @@ async def test_firmware_error_twice( assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + reolink_connect.check_new_firmware.reset_mock(side_effect=True) + async def test_credential_error_three( hass: HomeAssistant, @@ -140,7 +146,7 @@ async def test_credential_error_three( reolink_connect.get_states.side_effect = CredentialsInvalidError("Test error") - issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" + issue_id = f"config_entry_reauth_{DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues freezer.tick(DEVICE_UPDATE_INTERVAL) @@ -149,6 +155,8 @@ async def test_credential_error_three( assert (HOMEASSISTANT_DOMAIN, issue_id) in issue_registry.issues + reolink_connect.get_states.reset_mock(side_effect=True) + async def test_entry_reloading( hass: HomeAssistant, @@ -157,6 +165,7 @@ async def test_entry_reloading( ) -> None: """Test the entry is reloaded correctly when settings change.""" reolink_connect.is_nvr = False + reolink_connect.logout.reset_mock() assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -169,6 +178,8 @@ async def test_entry_reloading( assert reolink_connect.logout.call_count == 1 assert config_entry.title == "New Name" + reolink_connect.is_nvr = True + @pytest.mark.parametrize( ("attr", "value", "expected_models"), @@ -224,6 +235,7 @@ async def test_removing_disconnected_cams( # Try to remove the device after 'disconnecting' a camera. if attr is not None: + original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) expected_success = TEST_CAM_MODEL not in expected_models for device in device_entries: @@ -237,6 +249,9 @@ async def test_removing_disconnected_cams( device_models = [device.model for device in device_entries] assert sorted(device_models) == sorted(expected_models) + if attr is not None: + setattr(reolink_connect, attr, original) + @pytest.mark.parametrize( ("attr", "value", "expected_models"), @@ -414,14 +429,14 @@ async def test_migrate_entity_ids( reolink_connect.supported = mock_supported dev_entry = device_registry.async_get_or_create( - identifiers={(const.DOMAIN, original_dev_id)}, + identifiers={(DOMAIN, original_dev_id)}, config_entry_id=config_entry.entry_id, disabled_by=None, ) entity_registry.async_get_or_create( domain=domain, - platform=const.DOMAIN, + platform=DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, @@ -429,16 +444,13 @@ async def test_migrate_entity_ids( device_id=dev_entry.id, ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, new_id) is None + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) is None - assert device_registry.async_get_device( - identifiers={(const.DOMAIN, original_dev_id)} - ) + assert device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) if new_dev_id != original_dev_id: assert ( - device_registry.async_get_device(identifiers={(const.DOMAIN, new_dev_id)}) - is None + device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) is None ) # setup CH 0 and host entities/device @@ -446,19 +458,125 @@ async def test_migrate_entity_ids( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None - ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, new_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None + assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) if new_dev_id != original_dev_id: assert ( - device_registry.async_get_device( - identifiers={(const.DOMAIN, original_dev_id)} - ) + device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) is None ) - assert device_registry.async_get_device(identifiers={(const.DOMAIN, new_dev_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) + + +async def test_migrate_with_already_existing_device( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device ids that need to be migrated while the new ids already exist.""" + original_dev_id = f"{TEST_MAC}_ch0" + new_dev_id = f"{TEST_UID}_{TEST_UID_CAM}" + domain = Platform.SWITCH + + def mock_supported(ch, capability): + if capability == "UID" and ch is None: + return True + if capability == "UID": + return True + return True + + reolink_connect.channels = [0] + reolink_connect.supported = mock_supported + + device_registry.async_get_or_create( + identifiers={(DOMAIN, new_dev_id)}, + config_entry_id=config_entry.entry_id, + disabled_by=None, + ) + + device_registry.async_get_or_create( + identifiers={(DOMAIN, original_dev_id)}, + config_entry_id=config_entry.entry_id, + disabled_by=None, + ) + + assert device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) + is None + ) + assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) + + +async def test_migrate_with_already_existing_entity( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test entity ids that need to be migrated while the new ids already exist.""" + original_id = f"{TEST_UID}_0_record_audio" + new_id = f"{TEST_UID}_{TEST_UID_CAM}_record_audio" + dev_id = f"{TEST_UID}_{TEST_UID_CAM}" + domain = Platform.SWITCH + + def mock_supported(ch, capability): + if capability == "UID" and ch is None: + return True + if capability == "UID": + return True + return True + + reolink_connect.channels = [0] + reolink_connect.supported = mock_supported + + dev_entry = device_registry.async_get_or_create( + identifiers={(DOMAIN, dev_id)}, + config_entry_id=config_entry.entry_id, + disabled_by=None, + ) + + entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=new_id, + config_entry=config_entry, + suggested_object_id=new_id, + disabled_by=None, + device_id=dev_entry.id, + ) + + entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=None, + device_id=dev_entry.id, + ) + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None + assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) async def test_no_repair_issue( @@ -472,11 +590,11 @@ async def test_no_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "https_webhook") not in issue_registry.issues - assert (const.DOMAIN, "webhook_url") not in issue_registry.issues - assert (const.DOMAIN, "enable_port") not in issue_registry.issues - assert (const.DOMAIN, "firmware_update") not in issue_registry.issues - assert (const.DOMAIN, "ssl") not in issue_registry.issues + assert (DOMAIN, "https_webhook") not in issue_registry.issues + assert (DOMAIN, "webhook_url") not in issue_registry.issues + assert (DOMAIN, "enable_port") not in issue_registry.issues + assert (DOMAIN, "firmware_update") not in issue_registry.issues + assert (DOMAIN, "ssl") not in issue_registry.issues async def test_https_repair_issue( @@ -503,7 +621,7 @@ async def test_https_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "https_webhook") in issue_registry.issues + assert (DOMAIN, "https_webhook") in issue_registry.issues async def test_ssl_repair_issue( @@ -533,7 +651,7 @@ async def test_ssl_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "ssl") in issue_registry.issues + assert (DOMAIN, "ssl") in issue_registry.issues @pytest.mark.parametrize("protocol", ["rtsp", "rtmp"]) @@ -553,7 +671,9 @@ async def test_port_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "enable_port") in issue_registry.issues + assert (DOMAIN, "enable_port") in issue_registry.issues + + reolink_connect.set_net_port.reset_mock(side_effect=True) async def test_webhook_repair_issue( @@ -576,7 +696,7 @@ async def test_webhook_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "webhook_url") in issue_registry.issues + assert (DOMAIN, "webhook_url") in issue_registry.issues async def test_firmware_repair_issue( @@ -590,4 +710,42 @@ async def test_firmware_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "firmware_update_host") in issue_registry.issues + assert (DOMAIN, "firmware_update_host") in issue_registry.issues + + +async def test_new_device_discovered( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the entry is reloaded when a new camera or chime is detected.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + reolink_connect.logout.reset_mock() + + assert reolink_connect.logout.call_count == 0 + reolink_connect.new_devices = True + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert reolink_connect.logout.call_count == 1 + + +async def test_port_changed( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test config_entry port update when it has changed during initial login.""" + assert config_entry.data[CONF_PORT] == TEST_PORT + reolink_connect.port = 4567 + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.data[CONF_PORT] == 4567 diff --git a/tests/components/reolink/test_light.py b/tests/components/reolink/test_light.py new file mode 100644 index 00000000000..948a7fce0fe --- /dev/null +++ b/tests/components/reolink/test_light.py @@ -0,0 +1,249 @@ +"""Test the Reolink light platform.""" + +from unittest.mock import MagicMock, call, patch + +import pytest +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_light_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light entity state with floodlight.""" + reolink_connect.whiteled_state.return_value = True + reolink_connect.whiteled_brightness.return_value = 100 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes["brightness"] == 255 + + +async def test_light_brightness_none( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light entity with floodlight and brightness returning None.""" + reolink_connect.whiteled_state.return_value = True + reolink_connect.whiteled_brightness.return_value = None + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes["brightness"] is None + + +async def test_light_turn_off( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light turn off service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_called_with(0, state=False) + + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_whiteled.reset_mock(side_effect=True) + + +async def test_light_turn_on( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light turn on service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_has_calls( + [call(0, brightness=20), call(0, state=True)] + ) + + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, + blocking=True, + ) + + reolink_connect.set_whiteled.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, + blocking=True, + ) + + reolink_connect.set_whiteled.reset_mock(side_effect=True) + + +async def test_host_light_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host light entity state with status led.""" + reolink_connect.state_light = True + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_status_led" + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + +async def test_host_light_turn_off( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host light turn off service.""" + + def mock_supported(ch, capability): + if capability == "power_led": + return False + return True + + reolink_connect.supported = mock_supported + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_status_led" + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_state_light.assert_called_with(False) + + reolink_connect.set_state_light.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_state_light.reset_mock(side_effect=True) + + +async def test_host_light_turn_on( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host light turn on service.""" + + def mock_supported(ch, capability): + if capability == "power_led": + return False + return True + + reolink_connect.supported = mock_supported + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_status_led" + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_state_light.assert_called_with(True) + + reolink_connect.set_state_light.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index cbc9bf51705..32afd1f73ca 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -10,13 +10,12 @@ from reolink_aio.exceptions import ReolinkError from homeassistant.components.media_source import ( DOMAIN as MEDIA_SOURCE_DOMAIN, URI_SCHEME, + Unresolvable, async_browse_media, async_resolve_media, ) -from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.components.stream import DOMAIN as MEDIA_STREAM_DOMAIN from homeassistant.const import ( CONF_HOST, @@ -33,6 +32,7 @@ from homeassistant.setup import async_setup_component from .conftest import ( TEST_HOST2, + TEST_HOST_MODEL, TEST_MAC2, TEST_NVR_NAME, TEST_NVR_NAME2, @@ -130,7 +130,7 @@ async def test_browsing( ) -> None: """Test browsing the Reolink three.""" entry_id = config_entry.entry_id - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 reolink_connect.model = "Reolink TrackMix PoE" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): @@ -162,7 +162,7 @@ async def test_browsing( browse_res_AT_sub_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_sub" browse_res_AT_main_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_main" assert browse.domain == DOMAIN - assert browse.title == TEST_NVR_NAME + assert browse.title == f"{TEST_NVR_NAME} lens 0" assert browse.identifier == browse_resolution_id assert browse.children[0].identifier == browse_res_sub_id assert browse.children[1].identifier == browse_res_main_id @@ -178,19 +178,19 @@ async def test_browsing( browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_sub_id}") assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Low res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_sub_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Autotrack low res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_main_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Autotrack high res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack high res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_main_id}" @@ -200,7 +200,7 @@ async def test_browsing( browse_day_0_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}" browse_day_1_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY2}" assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} High res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 High res." assert browse.identifier == browse_days_id assert browse.children[0].identifier == browse_day_0_id assert browse.children[1].identifier == browse_day_1_id @@ -220,11 +220,14 @@ async def test_browsing( browse_file_id = f"FILE|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" assert browse.domain == DOMAIN assert ( - browse.title == f"{TEST_NVR_NAME} High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" + browse.title + == f"{TEST_NVR_NAME} lens 0 High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" ) assert browse.identifier == browse_files_id assert browse.children[0].identifier == browse_file_id + reolink_connect.model = TEST_HOST_MODEL + async def test_browsing_unsupported_encoding( hass: HomeAssistant, @@ -272,7 +275,7 @@ async def test_browsing_rec_playback_unsupported( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera which does not support playback of recordings.""" - reolink_connect.api_version.return_value = 0 + reolink_connect.supported.return_value = 0 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -293,7 +296,7 @@ async def test_browsing_errors( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera errors.""" - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -312,7 +315,7 @@ async def test_browsing_not_loaded( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera integration which is not loaded.""" - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -320,14 +323,14 @@ async def test_browsing_not_loaded( reolink_connect.get_host_data.side_effect = ReolinkError("Test error") config_entry2 = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC2), data={ CONF_HOST: TEST_HOST2, CONF_USERNAME: TEST_USERNAME2, CONF_PASSWORD: TEST_PASSWORD2, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -345,3 +348,5 @@ async def test_browsing_not_loaded( assert browse.title == "Reolink" assert browse.identifier is None assert len(browse.children) == 1 + + reolink_connect.get_host_data.side_effect = None diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py index e9abcec946c..c6507fa36c1 100644 --- a/tests/components/reolink/test_number.py +++ b/tests/components/reolink/test_number.py @@ -64,6 +64,52 @@ async def test_number( blocking=True, ) + reolink_connect.set_volume.reset_mock(side_effect=True) + + +async def test_host_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test number entity with volume.""" + reolink_connect.alarm_volume = 85 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_alarm_volume" + + assert hass.states.get(entity_id).state == "85" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 45}, + blocking=True, + ) + reolink_connect.set_hub_audio.assert_called_with(alarm_volume=45) + + reolink_connect.set_hub_audio.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 45}, + blocking=True, + ) + + reolink_connect.set_hub_audio.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 45}, + blocking=True, + ) + async def test_chime_number( hass: HomeAssistant, @@ -109,3 +155,5 @@ async def test_chime_number( {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, blocking=True, ) + + test_chime.set_option.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 0534f36f4c5..7910174380a 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -74,6 +74,8 @@ async def test_floodlight_mode_select( assert hass.states.get(entity_id).state == STATE_UNKNOWN + reolink_connect.set_whiteled.reset_mock(side_effect=True) + async def test_play_quick_reply_message( hass: HomeAssistant, @@ -99,6 +101,8 @@ async def test_play_quick_reply_message( ) reolink_connect.play_quick_reply.assert_called_once() + reolink_connect.quick_reply_dict = MagicMock() + async def test_chime_select( hass: HomeAssistant, @@ -153,3 +157,5 @@ async def test_chime_select( await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_UNKNOWN + + test_chime.set_tone.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_sensor.py b/tests/components/reolink/test_sensor.py new file mode 100644 index 00000000000..df164634355 --- /dev/null +++ b/tests/components/reolink/test_sensor.py @@ -0,0 +1,62 @@ +"""Test the Reolink sensor platform.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test sensor entities.""" + reolink_connect.ptz_pan_position.return_value = 1200 + reolink_connect.wifi_connection = True + reolink_connect.wifi_signal = 3 + reolink_connect.hdd_list = [0] + reolink_connect.hdd_storage.return_value = 95 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_ptz_pan_position" + assert hass.states.get(entity_id).state == "1200" + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_wi_fi_signal" + assert hass.states.get(entity_id).state == "3" + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_sd_0_storage" + assert hass.states.get(entity_id).state == "95" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hdd_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test hdd sensor entity.""" + reolink_connect.hdd_list = [0] + reolink_connect.hdd_type.return_value = "HDD" + reolink_connect.hdd_storage.return_value = 85 + reolink_connect.hdd_available.return_value = False + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_hdd_0_storage" + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/reolink/test_siren.py b/tests/components/reolink/test_siren.py index 0d9d3e0b800..f6ba8e0ea77 100644 --- a/tests/components/reolink/test_siren.py +++ b/tests/components/reolink/test_siren.py @@ -61,7 +61,6 @@ async def test_siren( reolink_connect.set_siren.assert_called_with(0, True, 2) # test siren turn off - reolink_connect.set_siren.side_effect = None await hass.services.async_call( SIREN_DOMAIN, SERVICE_TURN_OFF, @@ -101,6 +100,7 @@ async def test_siren_turn_on_errors( entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) with pytest.raises(expected): await hass.services.async_call( @@ -110,6 +110,8 @@ async def test_siren_turn_on_errors( blocking=True, ) + setattr(reolink_connect, attr, original) + async def test_siren_turn_off_errors( hass: HomeAssistant, @@ -132,3 +134,5 @@ async def test_siren_turn_off_errors( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) + + reolink_connect.set_siren.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_switch.py b/tests/components/reolink/test_switch.py index ebf805b593d..b2e82040ad4 100644 --- a/tests/components/reolink/test_switch.py +++ b/tests/components/reolink/test_switch.py @@ -1,18 +1,35 @@ """Test the Reolink switch platform.""" -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch -from homeassistant.components.reolink import const -from homeassistant.const import Platform +from freezegun.api import FrozenDateTimeFactory +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, + Platform, +) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .conftest import TEST_UID +from .conftest import TEST_CAM_NAME, TEST_NVR_NAME, TEST_UID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed -async def test_cleanup_hdr_switch_( +async def test_cleanup_hdr_switch( hass: HomeAssistant, config_entry: MockConfigEntry, reolink_connect: MagicMock, @@ -27,24 +44,93 @@ async def test_cleanup_hdr_switch_( entity_registry.async_get_or_create( domain=domain, - platform=const.DOMAIN, + platform=DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, disabled_by=er.RegistryEntryDisabler.USER, ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) # setup CH 0 and host entities/device with patch("homeassistant.components.reolink.PLATFORMS", [domain]): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None + + +@pytest.mark.parametrize( + ( + "original_id", + "capability", + ), + [ + ( + f"{TEST_UID}_record", + "recording", + ), + ( + f"{TEST_UID}_ftp_upload", + "ftp", + ), + ( + f"{TEST_UID}_push_notifications", + "push", + ), + ( + f"{TEST_UID}_email", + "email", + ), + ( + f"{TEST_UID}_buzzer", + "buzzer", + ), + ], +) +async def test_cleanup_hub_switches( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, + original_id: str, + capability: str, +) -> None: + """Test entity ids that need to be migrated.""" + + def mock_supported(ch, cap): + if cap == capability: + return False + return True + + domain = Platform.SWITCH + + reolink_connect.channels = [0] + reolink_connect.is_hub = True + reolink_connect.supported = mock_supported + + entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=er.RegistryEntryDisabler.USER, ) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None + + reolink_connect.is_hub = False + reolink_connect.supported.return_value = True + async def test_hdr_switch_deprecated_repair_issue( hass: HomeAssistant, @@ -62,20 +148,295 @@ async def test_hdr_switch_deprecated_repair_issue( entity_registry.async_get_or_create( domain=domain, - platform=const.DOMAIN, + platform=DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, disabled_by=None, ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) # setup CH 0 and host entities/device with patch("homeassistant.components.reolink.PLATFORMS", [domain]): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - assert (const.DOMAIN, "hdr_switch_deprecated") in issue_registry.issues + assert (DOMAIN, "hdr_switch_deprecated") in issue_registry.issues + + +@pytest.mark.parametrize( + ( + "original_id", + "capability", + ), + [ + ( + f"{TEST_UID}_record", + "recording", + ), + ( + f"{TEST_UID}_ftp_upload", + "ftp", + ), + ( + f"{TEST_UID}_push_notifications", + "push", + ), + ( + f"{TEST_UID}_email", + "email", + ), + ( + f"{TEST_UID}_buzzer", + "buzzer", + ), + ], +) +async def test_hub_switches_repair_issue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + original_id: str, + capability: str, +) -> None: + """Test entity ids that need to be migrated.""" + + def mock_supported(ch, cap): + if cap == capability: + return False + return True + + domain = Platform.SWITCH + + reolink_connect.channels = [0] + reolink_connect.is_hub = True + reolink_connect.supported = mock_supported + + entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=None, + ) + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + assert (DOMAIN, "hub_switch_deprecated") in issue_registry.issues + + reolink_connect.is_hub = False + reolink_connect.supported.return_value = True + + +async def test_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, +) -> None: + """Test switch entity.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SWITCH}.{TEST_CAM_NAME}_record" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.recording_enabled.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test switch turn on + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(0, True) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test switch turn off + reolink_connect.set_recording.reset_mock(side_effect=True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(0, False) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_recording.reset_mock(side_effect=True) + + reolink_connect.camera_online.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + reolink_connect.camera_online.return_value = True + + +async def test_host_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, +) -> None: + """Test host switch entity.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.recording_enabled.return_value = True + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SWITCH}.{TEST_NVR_NAME}_record" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.recording_enabled.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test switch turn on + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(None, True) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test switch turn off + reolink_connect.set_recording.reset_mock(side_effect=True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(None, False) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_recording.reset_mock(side_effect=True) + + +async def test_chime_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, + test_chime: Chime, +) -> None: + """Test host switch entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SWITCH}.test_chime_led" + assert hass.states.get(entity_id).state == STATE_ON + + test_chime.led_state = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test switch turn on + test_chime.set_option = AsyncMock() + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + test_chime.set_option.assert_called_with(led=True) + + test_chime.set_option.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test switch turn off + test_chime.set_option.reset_mock(side_effect=True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + test_chime.set_option.assert_called_with(led=False) + + test_chime.set_option.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + test_chime.set_option.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py new file mode 100644 index 00000000000..a6cfe862963 --- /dev/null +++ b/tests/components/reolink/test_update.py @@ -0,0 +1,206 @@ +"""Test the Reolink update platform.""" + +import asyncio +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from reolink_aio.exceptions import ReolinkError +from reolink_aio.software_version import NewSoftwareVersion + +from homeassistant.components.reolink.update import POLL_AFTER_INSTALL, POLL_PROGRESS +from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.dt import utcnow + +from .conftest import TEST_CAM_NAME, TEST_NVR_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import WebSocketGenerator + +TEST_DOWNLOAD_URL = "https://reolink.com/test" +TEST_RELEASE_NOTES = "bugfix 1, bugfix 2" + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_no_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_name: str, +) -> None: + """Test update state when no update available.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_update_str( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_name: str, +) -> None: + """Test update state when update available with string from API.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.firmware_update_available.return_value = "New firmware available" + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_ON + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_update_firm( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + entity_name: str, +) -> None: + """Test update state when update available with firmware info from reolink.com.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.sw_upload_progress.return_value = 100 + reolink_connect.camera_sw_version.return_value = "v1.1.0.0.0.0000" + new_firmware = NewSoftwareVersion( + version_string="v3.3.0.226_23031644", + download_url=TEST_DOWNLOAD_URL, + release_notes=TEST_RELEASE_NOTES, + ) + reolink_connect.firmware_update_available.return_value = new_firmware + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_ON + assert not hass.states.get(entity_id).attributes["in_progress"] + assert hass.states.get(entity_id).attributes["update_percentage"] is None + + # release notes + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + result = await client.receive_json() + assert TEST_DOWNLOAD_URL in result["result"] + assert TEST_RELEASE_NOTES in result["result"] + + # test install + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.update_firmware.assert_called() + + reolink_connect.sw_upload_progress.return_value = 50 + freezer.tick(POLL_PROGRESS) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).attributes["in_progress"] + assert hass.states.get(entity_id).attributes["update_percentage"] == 50 + + reolink_connect.sw_upload_progress.return_value = 100 + freezer.tick(POLL_AFTER_INSTALL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert not hass.states.get(entity_id).attributes["in_progress"] + assert hass.states.get(entity_id).attributes["update_percentage"] is None + + reolink_connect.update_firmware.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test _async_update_future + reolink_connect.camera_sw_version.return_value = "v3.3.0.226_23031644" + reolink_connect.firmware_update_available.return_value = False + freezer.tick(POLL_AFTER_INSTALL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + reolink_connect.update_firmware.side_effect = None + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_update_firm_keeps_available( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + hass_ws_client: WebSocketGenerator, + entity_name: str, +) -> None: + """Test update entity keeps being available during update.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.camera_sw_version.return_value = "v1.1.0.0.0.0000" + new_firmware = NewSoftwareVersion( + version_string="v3.3.0.226_23031644", + download_url=TEST_DOWNLOAD_URL, + release_notes=TEST_RELEASE_NOTES, + ) + reolink_connect.firmware_update_available.return_value = new_firmware + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_ON + + async def mock_update_firmware(*args, **kwargs) -> None: + await asyncio.sleep(0.000005) + + reolink_connect.update_firmware = mock_update_firmware + + # test install + with patch("homeassistant.components.reolink.update.POLL_PROGRESS", 0.000001): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.session_active = False + async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) + await hass.async_block_till_done() + + # still available + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.session_active = True diff --git a/tests/components/repairs/__init__.py b/tests/components/repairs/__init__.py index a6786db9685..e787d657e5c 100644 --- a/tests/components/repairs/__init__.py +++ b/tests/components/repairs/__init__.py @@ -1,5 +1,17 @@ """Tests for the repairs integration.""" +from http import HTTPStatus +from typing import Any + +from aiohttp.test_utils import TestClient + +from homeassistant.components.repairs.issue_handler import ( # noqa: F401 + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -27,3 +39,23 @@ async def get_repairs( assert msg["result"] return msg["result"]["issues"] + + +async def start_repair_fix_flow( + client: TestClient, handler: str, issue_id: int +) -> dict[str, Any]: + """Start a flow from an issue.""" + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": handler, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + return await resp.json() + + +async def process_repair_fix_flow( + client: TestClient, flow_id: int, json: dict[str, Any] | None = None +) -> dict[str, Any]: + """Return the repairs list of issues.""" + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json=json) + assert resp.status == HTTPStatus.OK + return await resp.json() diff --git a/tests/components/repairs/test_init.py b/tests/components/repairs/test_init.py index edb6e509841..e78563503f1 100644 --- a/tests/components/repairs/test_init.py +++ b/tests/components/repairs/test_init.py @@ -21,6 +21,16 @@ from tests.common import mock_platform from tests.typing import WebSocketGenerator +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.even_worse.title", + "component.test.issues.even_worse.description", + "component.test.issues.abc_123.title", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_create_update_issue( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -160,6 +170,14 @@ async def test_create_issue_invalid_version( assert msg["result"] == {"issues": []} +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.abc_123.title", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_ignore_issue( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -329,6 +347,10 @@ async def test_ignore_issue( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_delete_issue( hass: HomeAssistant, @@ -483,6 +505,10 @@ async def test_non_compliant_platform( assert list(hass.data[DOMAIN]["platforms"].keys()) == ["fake_integration"] +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) @pytest.mark.freeze_time("2022-07-21 08:22:00") async def test_sync_methods( hass: HomeAssistant, diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index bb3d50f9eb5..399292fb83f 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -151,6 +151,10 @@ async def mock_repairs_integration(hass: HomeAssistant) -> None: ) +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_dismiss_issue( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -234,6 +238,10 @@ async def test_dismiss_issue( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_fix_non_existing_issue( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -281,10 +289,20 @@ async def test_fix_non_existing_issue( @pytest.mark.parametrize( - ("domain", "step", "description_placeholders"), + ("domain", "step", "description_placeholders", "ignore_translations"), [ - ("fake_integration", "custom_step", None), - ("fake_integration_default_handler", "confirm", {"abc": "123"}), + ( + "fake_integration", + "custom_step", + None, + ["component.fake_integration.issues.abc_123.title"], + ), + ( + "fake_integration_default_handler", + "confirm", + {"abc": "123"}, + ["component.fake_integration_default_handler.issues.abc_123.title"], + ), ], ) async def test_fix_issue( @@ -380,6 +398,10 @@ async def test_fix_issue_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_get_progress_unauth( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -411,6 +433,10 @@ async def test_get_progress_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_step_unauth( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -442,6 +468,16 @@ async def test_step_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.even_worse.title", + "component.test.issues.even_worse.description", + "component.test.issues.abc_123.title", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_list_issues( hass: HomeAssistant, @@ -533,6 +569,15 @@ async def test_list_issues( } +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.fake_integration.issues.abc_123.title", + "component.fake_integration.issues.abc_123.fix_flow.abort.not_given", + ] + ], +) async def test_fix_issue_aborted( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -594,6 +639,16 @@ async def test_fix_issue_aborted( assert msg["result"]["issues"][0] == first_issue +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.abc_123.title", + "component.test.issues.even_worse.title", + "component.test.issues.even_worse.description", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_get_issue_data( hass: HomeAssistant, hass_ws_client: WebSocketGenerator diff --git a/tests/components/rest/test_init.py b/tests/components/rest/test_init.py index 02dfe6364ff..c401362d604 100644 --- a/tests/components/rest/test_init.py +++ b/tests/components/rest/test_init.py @@ -12,6 +12,7 @@ from homeassistant import config as hass_config from homeassistant.components.rest.const import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, + CONF_PACKAGES, SERVICE_RELOAD, STATE_UNAVAILABLE, UnitOfInformation, @@ -468,7 +469,7 @@ async def test_config_schema_via_packages(hass: HomeAssistant) -> None: "pack_11": {"rest": {"resource": "http://url1"}}, "pack_list": {"rest": [{"resource": "http://url2"}]}, } - config = {HOMEASSISTANT_DOMAIN: {hass_config.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} await hass_config.merge_packages_config(hass, config, packages) assert len(config) == 2 diff --git a/tests/components/rflink/test_cover.py b/tests/components/rflink/test_cover.py index 0f14e76620f..578221c7051 100644 --- a/tests/components/rflink/test_cover.py +++ b/tests/components/rflink/test_cover.py @@ -7,14 +7,9 @@ control of RFLink cover devices. import pytest -from homeassistant.components.rflink import EVENT_BUTTON_PRESSED -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - STATE_CLOSED, - STATE_OPEN, -) +from homeassistant.components.cover import CoverState +from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER from homeassistant.core import CoreState, HomeAssistant, State, callback from .test_init import mock_rflink @@ -53,7 +48,7 @@ async def test_default_setup( # test default state of cover loaded from config cover_initial = hass.states.get(f"{DOMAIN}.test") - assert cover_initial.state == STATE_CLOSED + assert cover_initial.state == CoverState.CLOSED assert cover_initial.attributes["assumed_state"] # cover should follow state of the hardware device by interpreting @@ -64,7 +59,7 @@ async def test_default_setup( await hass.async_block_till_done() cover_after_first_command = hass.states.get(f"{DOMAIN}.test") - assert cover_after_first_command.state == STATE_OPEN + assert cover_after_first_command.state == CoverState.OPEN # not sure why, but cover have always assumed_state=true assert cover_after_first_command.attributes.get("assumed_state") @@ -72,34 +67,34 @@ async def test_default_setup( event_callback({"id": "protocol_0_0", "command": "down"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # should respond to group command event_callback({"id": "protocol_0_0", "command": "allon"}) await hass.async_block_till_done() cover_after_first_command = hass.states.get(f"{DOMAIN}.test") - assert cover_after_first_command.state == STATE_OPEN + assert cover_after_first_command.state == CoverState.OPEN # should respond to group command event_callback({"id": "protocol_0_0", "command": "alloff"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test following aliases # mock incoming command event for this device alias event_callback({"id": "test_alias_0_0", "command": "up"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN # test changing state from HA propagates to RFLink await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: f"{DOMAIN}.test"} ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[0][0][0] == "protocol_0_0" assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN" @@ -107,7 +102,7 @@ async def test_default_setup( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: f"{DOMAIN}.test"} ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" @@ -269,19 +264,19 @@ async def test_group_alias( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to group alias event_callback({"id": "test_group_0_0", "command": "allon"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN # test sending group command to group alias event_callback({"id": "test_group_0_0", "command": "down"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN async def test_nogroup_alias( @@ -304,19 +299,19 @@ async def test_nogroup_alias( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup alias event_callback({"id": "test_nogroup_0_0", "command": "allon"}) await hass.async_block_till_done() # should not affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup alias event_callback({"id": "test_nogroup_0_0", "command": "up"}) await hass.async_block_till_done() # should affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN async def test_nogroup_device_id( @@ -334,19 +329,19 @@ async def test_nogroup_device_id( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup event_callback({"id": "test_nogroup_0_0", "command": "allon"}) await hass.async_block_till_done() # should not affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup event_callback({"id": "test_nogroup_0_0", "command": "up"}) await hass.async_block_till_done() # should affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN async def test_restore_state( @@ -367,7 +362,11 @@ async def test_restore_state( } mock_restore_cache( - hass, (State(f"{DOMAIN}.c1", STATE_OPEN), State(f"{DOMAIN}.c2", STATE_CLOSED)) + hass, + ( + State(f"{DOMAIN}.c1", CoverState.OPEN), + State(f"{DOMAIN}.c2", CoverState.CLOSED), + ), ) hass.set_state(CoreState.starting) @@ -377,20 +376,20 @@ async def test_restore_state( state = hass.states.get(f"{DOMAIN}.c1") assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN state = hass.states.get(f"{DOMAIN}.c2") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED state = hass.states.get(f"{DOMAIN}.c3") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # not cached cover must default values state = hass.states.get(f"{DOMAIN}.c4") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes["assumed_state"] @@ -435,7 +434,7 @@ async def test_inverted_cover( # test default state of cover loaded from config standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == STATE_CLOSED + assert standard_cover.state == CoverState.CLOSED assert standard_cover.attributes["assumed_state"] # mock incoming up command event for nonkaku_device_1 @@ -443,7 +442,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == STATE_OPEN + assert standard_cover.state == CoverState.OPEN assert standard_cover.attributes.get("assumed_state") # mock incoming up command event for nonkaku_device_2 @@ -451,7 +450,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_none") - assert standard_cover.state == STATE_OPEN + assert standard_cover.state == CoverState.OPEN assert standard_cover.attributes.get("assumed_state") # mock incoming up command event for nonkaku_device_3 @@ -460,7 +459,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_4 @@ -469,7 +468,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_5 @@ -478,7 +477,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_6 @@ -487,7 +486,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_1 @@ -496,7 +495,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == STATE_CLOSED + assert standard_cover.state == CoverState.CLOSED assert standard_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_2 @@ -505,7 +504,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_none") - assert standard_cover.state == STATE_CLOSED + assert standard_cover.state == CoverState.CLOSED assert standard_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_3 @@ -514,7 +513,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_4 @@ -523,7 +522,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_5 @@ -532,7 +531,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_6 @@ -541,7 +540,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # We are only testing the 'inverted' devices, the 'standard' devices @@ -553,7 +552,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "nonkaku_device_3", "command": "allon"}) @@ -561,7 +560,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # should respond to group command event_callback({"id": "newkaku_device_4", "command": "alloff"}) @@ -569,7 +568,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "newkaku_device_4", "command": "allon"}) @@ -577,7 +576,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # should respond to group command event_callback({"id": "newkaku_device_5", "command": "alloff"}) @@ -585,7 +584,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "newkaku_device_5", "command": "allon"}) @@ -593,7 +592,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # should respond to group command event_callback({"id": "newkaku_device_6", "command": "alloff"}) @@ -601,7 +600,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "newkaku_device_6", "command": "allon"}) @@ -609,7 +608,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # Sending the close command from HA should result # in an 'DOWN' command sent to a non-newkaku device @@ -622,7 +621,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[0][0][0] == "nonkaku_device_1" assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN" @@ -637,7 +636,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[1][0][0] == "nonkaku_device_1" assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" @@ -650,7 +649,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[2][0][0] == "nonkaku_device_2" assert protocol.send_command_ack.call_args_list[2][0][1] == "DOWN" @@ -663,7 +662,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[3][0][0] == "nonkaku_device_2" assert protocol.send_command_ack.call_args_list[3][0][1] == "UP" @@ -678,7 +677,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[4][0][0] == "nonkaku_device_3" assert protocol.send_command_ack.call_args_list[4][0][1] == "UP" @@ -693,7 +692,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[5][0][0] == "nonkaku_device_3" assert protocol.send_command_ack.call_args_list[5][0][1] == "DOWN" @@ -708,7 +707,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[6][0][0] == "newkaku_device_4" assert protocol.send_command_ack.call_args_list[6][0][1] == "DOWN" @@ -723,7 +722,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[7][0][0] == "newkaku_device_4" assert protocol.send_command_ack.call_args_list[7][0][1] == "UP" @@ -736,7 +735,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[8][0][0] == "newkaku_device_5" assert protocol.send_command_ack.call_args_list[8][0][1] == "UP" @@ -749,7 +748,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[9][0][0] == "newkaku_device_5" assert protocol.send_command_ack.call_args_list[9][0][1] == "DOWN" @@ -764,7 +763,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[10][0][0] == "newkaku_device_6" assert protocol.send_command_ack.call_args_list[10][0][1] == "UP" @@ -779,6 +778,6 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[11][0][0] == "newkaku_device_6" assert protocol.send_command_ack.call_args_list[11][0][1] == "DOWN" diff --git a/tests/components/rflink/test_light.py b/tests/components/rflink/test_light.py index ceb2b19e192..e76d5b4f783 100644 --- a/tests/components/rflink/test_light.py +++ b/tests/components/rflink/test_light.py @@ -8,7 +8,7 @@ control of RFLink switch devices. import pytest from homeassistant.components.light import ATTR_BRIGHTNESS -from homeassistant.components.rflink import EVENT_BUTTON_PRESSED +from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, diff --git a/tests/components/rflink/test_switch.py b/tests/components/rflink/test_switch.py index 2aab145f847..f81c41f03d5 100644 --- a/tests/components/rflink/test_switch.py +++ b/tests/components/rflink/test_switch.py @@ -7,7 +7,7 @@ control of Rflink switch devices. import pytest -from homeassistant.components.rflink import EVENT_BUTTON_PRESSED +from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, diff --git a/tests/components/ridwell/snapshots/test_diagnostics.ambr b/tests/components/ridwell/snapshots/test_diagnostics.ambr index d32b1d3f446..4b4dda7227d 100644 --- a/tests/components/ridwell/snapshots/test_diagnostics.ambr +++ b/tests/components/ridwell/snapshots/test_diagnostics.ambr @@ -34,6 +34,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'ridwell', 'entry_id': '11554ec901379b9cc8f5a6c1d11ce978', 'minor_version': 1, @@ -42,6 +44,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/ridwell/test_config_flow.py b/tests/components/ridwell/test_config_flow.py index 601ac182670..6dd00344c5b 100644 --- a/tests/components/ridwell/test_config_flow.py +++ b/tests/components/ridwell/test_config_flow.py @@ -13,6 +13,8 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("get_client_response", "errors"), @@ -65,12 +67,10 @@ async def test_duplicate_error(hass: HomeAssistant, config, setup_config_entry) async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test a full reauth flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "new_password"}, diff --git a/tests/components/ring/common.py b/tests/components/ring/common.py index 3b78adf0e09..22fa1c2bf32 100644 --- a/tests/components/ring/common.py +++ b/tests/components/ring/common.py @@ -2,6 +2,7 @@ from unittest.mock import patch +from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.ring import DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -12,9 +13,25 @@ from tests.common import MockConfigEntry async def setup_platform(hass: HomeAssistant, platform: Platform) -> None: """Set up the ring platform and prerequisites.""" - MockConfigEntry(domain=DOMAIN, data={"username": "foo", "token": {}}).add_to_hass( - hass - ) + if not hass.config_entries.async_has_entries(DOMAIN): + MockConfigEntry( + domain=DOMAIN, data={"username": "foo", "token": {}} + ).add_to_hass(hass) with patch("homeassistant.components.ring.PLATFORMS", [platform]): assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done(wait_background_tasks=True) + + +async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None: + """Set up an automation for tests.""" + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": alias, + "trigger": {"platform": "state", "entity_id": entity_id, "to": "on"}, + "action": {"action": "notify.notify", "metadata": {}, "data": {}}, + } + }, + ) diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 4456a9daa26..1296c2f58c5 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -8,14 +8,17 @@ import pytest import ring_doorbell from homeassistant.components.ring import DOMAIN -from homeassistant.const import CONF_USERNAME +from homeassistant.components.ring.const import CONF_CONFIG_ENTRY_MINOR_VERSION +from homeassistant.const import CONF_DEVICE_ID, CONF_USERNAME from homeassistant.core import HomeAssistant -from .device_mocks import get_active_alerts, get_devices_data, get_mock_devices +from .device_mocks import get_devices_data, get_mock_devices from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 +MOCK_HARDWARE_ID = "foo-bar" + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -103,7 +106,7 @@ def mock_ring_client(mock_ring_auth, mock_ring_devices): mock_client = create_autospec(ring_doorbell.Ring) mock_client.return_value.devices_data = get_devices_data() mock_client.return_value.devices.return_value = mock_ring_devices - mock_client.return_value.active_alerts.side_effect = get_active_alerts + mock_client.return_value.active_alerts.return_value = [] with patch("homeassistant.components.ring.Ring", new=mock_client): yield mock_client.return_value @@ -116,10 +119,13 @@ def mock_config_entry() -> MockConfigEntry: title="Ring", domain=DOMAIN, data={ + CONF_DEVICE_ID: MOCK_HARDWARE_ID, CONF_USERNAME: "foo@bar.com", "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + version=1, + minor_version=CONF_CONFIG_ENTRY_MINOR_VERSION, ) @@ -135,3 +141,14 @@ async def mock_added_config_entry( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() return mock_config_entry + + +@pytest.fixture(autouse=True) +def mock_ring_event_listener_class(): + """Fixture to mock the ring event listener.""" + + with patch( + "homeassistant.components.ring.coordinator.RingEventListener", autospec=True + ) as mock_ring_listener: + mock_ring_listener.return_value.started = True + yield mock_ring_listener diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index d2671c3896d..a1833aaa8bd 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -7,9 +7,8 @@ Each device entry in the devices.json will have a MagicMock instead of the RingO Mocks the api calls on the devices such as history() and health(). """ -from copy import deepcopy from datetime import datetime -from time import time +from functools import partial from unittest.mock import AsyncMock, MagicMock from ring_doorbell import ( @@ -19,6 +18,7 @@ from ring_doorbell import ( RingOther, RingStickUpCam, ) +from ring_doorbell.const import DOORBELL_EXISTING_TYPE from homeassistant.components.ring.const import DOMAIN from homeassistant.util import dt as dt_util @@ -30,7 +30,12 @@ DOORBOT_HISTORY = load_json_value_fixture("doorbot_history.json", DOMAIN) INTERCOM_HISTORY = load_json_value_fixture("intercom_history.json", DOMAIN) DOORBOT_HEALTH = load_json_value_fixture("doorbot_health_attrs.json", DOMAIN) CHIME_HEALTH = load_json_value_fixture("chime_health_attrs.json", DOMAIN) -DEVICE_ALERTS = load_json_value_fixture("ding_active.json", DOMAIN) + +FRONT_DOOR_DEVICE_ID = 987654 +INGRESS_DEVICE_ID = 185036587 +FRONT_DEVICE_ID = 765432 +INTERNAL_DEVICE_ID = 345678 +DOWNSTAIRS_DEVICE_ID = 123456 def get_mock_devices(): @@ -54,14 +59,6 @@ def get_devices_data(): } -def get_active_alerts(): - """Return active alerts set to now.""" - dings_fixture = deepcopy(DEVICE_ALERTS) - for ding in dings_fixture: - ding["now"] = time() - return dings_fixture - - DEVICE_TYPES = { "doorbots": RingDoorBell, "authorized_doorbots": RingDoorBell, @@ -76,6 +73,7 @@ DEVICE_CAPABILITIES = { RingCapability.VOLUME, RingCapability.MOTION_DETECTION, RingCapability.VIDEO, + RingCapability.DING, RingCapability.HISTORY, ], RingStickUpCam: [ @@ -88,7 +86,7 @@ DEVICE_CAPABILITIES = { RingCapability.LIGHT, ], RingChime: [RingCapability.VOLUME], - RingOther: [RingCapability.OPEN, RingCapability.HISTORY], + RingOther: [RingCapability.OPEN, RingCapability.HISTORY, RingCapability.DING], } @@ -149,6 +147,9 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): mock_device.configure_mock( motion_detection=device_dict["settings"].get("motion_detection_enabled"), ) + mock_device.async_set_motion_detection.side_effect = ( + lambda i: mock_device.configure_mock(motion_detection=i) + ) if has_capability(RingCapability.LIGHT): mock_device.configure_mock(lights=device_dict.get("led_status")) @@ -159,11 +160,17 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): "doorbell_volume", device_dict["settings"].get("volume") ) ) + mock_device.async_set_volume.side_effect = lambda i: mock_device.configure_mock( + volume=i + ) if has_capability(RingCapability.SIREN): mock_device.configure_mock( siren=device_dict["siren_status"].get("seconds_remaining") ) + mock_device.async_set_siren.side_effect = lambda i: mock_device.configure_mock( + siren=i + ) if has_capability(RingCapability.BATTERY): mock_device.configure_mock( @@ -172,11 +179,30 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): ) ) - if device_family == "other": + if device_family == "doorbots": mock_device.configure_mock( - doorbell_volume=device_dict["settings"].get("doorbell_volume"), - mic_volume=device_dict["settings"].get("mic_volume"), - voice_volume=device_dict["settings"].get("voice_volume"), + existing_doorbell_type=DOORBELL_EXISTING_TYPE[ + device_dict["settings"]["chime_settings"].get("type", 2) + ] + ) + mock_device.configure_mock( + existing_doorbell_type_enabled=device_dict["settings"][ + "chime_settings" + ].get("enable", False) + ) + mock_device.async_set_existing_doorbell_type_enabled.side_effect = ( + lambda i: mock_device.configure_mock(existing_doorbell_type_enabled=i) ) + if device_family == "other": + for prop in ("doorbell_volume", "mic_volume", "voice_volume"): + mock_device.configure_mock( + **{ + prop: device_dict["settings"].get(prop), + f"async_set_{prop}.side_effect": partial( + setattr, mock_device, prop + ), + } + ) + return mock_device diff --git a/tests/components/ring/snapshots/test_binary_sensor.ambr b/tests/components/ring/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..2f8e4d8a219 --- /dev/null +++ b/tests/components/ring/snapshots/test_binary_sensor.ambr @@ -0,0 +1,241 @@ +# serializer version: 1 +# name: test_states[binary_sensor.front_door_ding-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.front_door_ding', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ding', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ding', + 'unique_id': '987654-ding', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.front_door_ding-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'occupancy', + 'friendly_name': 'Front Door Ding', + }), + 'context': , + 'entity_id': 'binary_sensor.front_door_ding', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.front_door_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.front_door_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '987654-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.front_door_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'motion', + 'friendly_name': 'Front Door Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.front_door_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.front_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.front_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '765432-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.front_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'motion', + 'friendly_name': 'Front Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.front_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.ingress_ding-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.ingress_ding', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ding', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ding', + 'unique_id': '185036587-ding', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.ingress_ding-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'occupancy', + 'friendly_name': 'Ingress Ding', + }), + 'context': , + 'entity_id': 'binary_sensor.ingress_ding', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.internal_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.internal_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '345678-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.internal_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'motion', + 'friendly_name': 'Internal Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.internal_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/ring/snapshots/test_button.ambr b/tests/components/ring/snapshots/test_button.ambr new file mode 100644 index 00000000000..01f6525450b --- /dev/null +++ b/tests/components/ring/snapshots/test_button.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_states[button.ingress_open_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.ingress_open_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Open door', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'open_door', + 'unique_id': '185036587-open_door', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.ingress_open_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Open door', + }), + 'context': , + 'entity_id': 'button.ingress_open_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/ring/snapshots/test_camera.ambr b/tests/components/ring/snapshots/test_camera.ambr new file mode 100644 index 00000000000..ec285b438b3 --- /dev/null +++ b/tests/components/ring/snapshots/test_camera.ambr @@ -0,0 +1,318 @@ +# serializer version: 1 +# name: test_states[camera.front_door_last_recording-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.front_door_last_recording', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Last recording', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_recording', + 'unique_id': '987654-last_recording', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.front_door_last_recording-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.front_door_last_recording?token=1caab5c3b3', + 'friendly_name': 'Front Door Last recording', + 'last_video_id': None, + 'motion_detection': True, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.front_door_last_recording', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.front_door_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.front_door_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': '987654-live_view', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.front_door_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.front_door_live_view?token=1caab5c3b3', + 'friendly_name': 'Front Door Live view', + 'frontend_stream_type': , + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.front_door_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.front_last_recording-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.front_last_recording', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Last recording', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_recording', + 'unique_id': '765432-last_recording', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.front_last_recording-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.front_last_recording?token=1caab5c3b3', + 'friendly_name': 'Front Last recording', + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.front_last_recording', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.front_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.front_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': '765432-live_view', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.front_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.front_live_view?token=1caab5c3b3', + 'friendly_name': 'Front Live view', + 'frontend_stream_type': , + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.front_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.internal_last_recording-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.internal_last_recording', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Last recording', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_recording', + 'unique_id': '345678-last_recording', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.internal_last_recording-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.internal_last_recording?token=1caab5c3b3', + 'friendly_name': 'Internal Last recording', + 'last_video_id': None, + 'motion_detection': True, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.internal_last_recording', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.internal_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.internal_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': '345678-live_view', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.internal_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.internal_live_view?token=1caab5c3b3', + 'friendly_name': 'Internal Live view', + 'frontend_stream_type': , + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.internal_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/ring/snapshots/test_event.ambr b/tests/components/ring/snapshots/test_event.ambr new file mode 100644 index 00000000000..e97a01516bb --- /dev/null +++ b/tests/components/ring/snapshots/test_event.ambr @@ -0,0 +1,337 @@ +# serializer version: 1 +# name: test_states[event.front_door_ding-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'ding', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.front_door_ding', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ding', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ding', + 'unique_id': '987654-ding', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[event.front_door_ding-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'doorbell', + 'event_type': None, + 'event_types': list([ + 'ding', + ]), + 'friendly_name': 'Front Door Ding', + }), + 'context': , + 'entity_id': 'event.front_door_ding', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[event.front_door_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'motion', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.front_door_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '987654-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[event.front_door_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'motion', + 'event_type': None, + 'event_types': list([ + 'motion', + ]), + 'friendly_name': 'Front Door Motion', + }), + 'context': , + 'entity_id': 'event.front_door_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[event.front_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'motion', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.front_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '765432-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[event.front_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'motion', + 'event_type': None, + 'event_types': list([ + 'motion', + ]), + 'friendly_name': 'Front Motion', + }), + 'context': , + 'entity_id': 'event.front_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[event.ingress_ding-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'ding', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.ingress_ding', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ding', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ding', + 'unique_id': '185036587-ding', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[event.ingress_ding-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'doorbell', + 'event_type': None, + 'event_types': list([ + 'ding', + ]), + 'friendly_name': 'Ingress Ding', + }), + 'context': , + 'entity_id': 'event.ingress_ding', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[event.ingress_intercom_unlock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'intercom_unlock', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.ingress_intercom_unlock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Intercom unlock', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'intercom_unlock', + 'unique_id': '185036587-intercom_unlock', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[event.ingress_intercom_unlock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'intercom_unlock', + ]), + 'friendly_name': 'Ingress Intercom unlock', + }), + 'context': , + 'entity_id': 'event.ingress_intercom_unlock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[event.internal_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'motion', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.internal_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '345678-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[event.internal_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'motion', + 'event_type': None, + 'event_types': list([ + 'motion', + ]), + 'friendly_name': 'Internal Motion', + }), + 'context': , + 'entity_id': 'event.internal_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/ring/snapshots/test_light.ambr b/tests/components/ring/snapshots/test_light.ambr new file mode 100644 index 00000000000..73874fda259 --- /dev/null +++ b/tests/components/ring/snapshots/test_light.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_states[light.front_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.front_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '765432', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[light.front_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'color_mode': None, + 'friendly_name': 'Front Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.front_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[light.internal_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.internal_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '345678', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[light.internal_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'color_mode': , + 'friendly_name': 'Internal Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.internal_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/ring/snapshots/test_number.ambr b/tests/components/ring/snapshots/test_number.ambr new file mode 100644 index 00000000000..0873319b837 --- /dev/null +++ b/tests/components/ring/snapshots/test_number.ambr @@ -0,0 +1,393 @@ +# serializer version: 1 +# name: test_states[number.downstairs_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.downstairs_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '123456-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.downstairs_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Downstairs Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.downstairs_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_states[number.front_door_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.front_door_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '987654-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.front_door_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Door Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.front_door_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_states[number.front_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.front_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '765432-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.front_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.front_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_states[number.ingress_doorbell_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 8, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.ingress_doorbell_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Doorbell volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doorbell_volume', + 'unique_id': '185036587-doorbell_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.ingress_doorbell_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Doorbell volume', + 'max': 8, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.ingress_doorbell_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.0', + }) +# --- +# name: test_states[number.ingress_mic_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.ingress_mic_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mic volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mic_volume', + 'unique_id': '185036587-mic_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.ingress_mic_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Mic volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.ingress_mic_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_states[number.ingress_voice_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.ingress_voice_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Voice volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voice_volume', + 'unique_id': '185036587-voice_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.ingress_voice_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Voice volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.ingress_voice_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_states[number.internal_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.internal_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '345678-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.internal_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Internal Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.internal_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- diff --git a/tests/components/ring/snapshots/test_sensor.ambr b/tests/components/ring/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..9fd1ac7ba84 --- /dev/null +++ b/tests/components/ring/snapshots/test_sensor.ambr @@ -0,0 +1,1116 @@ +# serializer version: 1 +# name: test_states[sensor.downstairs_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.downstairs_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '123456-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.downstairs_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Downstairs Volume', + }), + 'context': , + 'entity_id': 'sensor.downstairs_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_states[sensor.downstairs_wifi_signal_category-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.downstairs_wifi_signal_category', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi signal category', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_category', + 'unique_id': '123456-wifi_signal_category', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.downstairs_wifi_signal_category-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Downstairs Wi-Fi signal category', + }), + 'context': , + 'entity_id': 'sensor.downstairs_wifi_signal_category', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.downstairs_wifi_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.downstairs_wifi_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi signal strength', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_strength', + 'unique_id': '123456-wifi_signal_strength', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_states[sensor.downstairs_wifi_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'signal_strength', + 'friendly_name': 'Downstairs Wi-Fi signal strength', + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.downstairs_wifi_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.front_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.front_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '765432-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.front_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'battery', + 'friendly_name': 'Front Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.front_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_states[sensor.front_door_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.front_door_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '987654-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.front_door_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'battery', + 'friendly_name': 'Front Door Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.front_door_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_states[sensor.front_door_last_activity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.front_door_last_activity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last activity', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_activity', + 'unique_id': '987654-last_activity', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.front_door_last_activity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'timestamp', + 'friendly_name': 'Front Door Last activity', + }), + 'context': , + 'entity_id': 'sensor.front_door_last_activity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.front_door_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.front_door_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '765432-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.front_door_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Volume', + }), + 'context': , + 'entity_id': 'sensor.front_door_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) +# --- +# name: test_states[sensor.front_door_wifi_signal_category-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.front_door_wifi_signal_category', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi signal category', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_category', + 'unique_id': '987654-wifi_signal_category', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.front_door_wifi_signal_category-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Door Wi-Fi signal category', + }), + 'context': , + 'entity_id': 'sensor.front_door_wifi_signal_category', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.front_door_wifi_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.front_door_wifi_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi signal strength', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_strength', + 'unique_id': '987654-wifi_signal_strength', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_states[sensor.front_door_wifi_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'signal_strength', + 'friendly_name': 'Front Door Wi-Fi signal strength', + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.front_door_wifi_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.front_last_activity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.front_last_activity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last activity', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_activity', + 'unique_id': '765432-last_activity', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.front_last_activity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'timestamp', + 'friendly_name': 'Front Last activity', + }), + 'context': , + 'entity_id': 'sensor.front_last_activity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.front_wifi_signal_category-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.front_wifi_signal_category', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi signal category', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_category', + 'unique_id': '765432-wifi_signal_category', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.front_wifi_signal_category-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Wi-Fi signal category', + }), + 'context': , + 'entity_id': 'sensor.front_wifi_signal_category', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.front_wifi_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.front_wifi_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi signal strength', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_strength', + 'unique_id': '765432-wifi_signal_strength', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_states[sensor.front_wifi_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'signal_strength', + 'friendly_name': 'Front Wi-Fi signal strength', + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.front_wifi_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.ingress_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ingress_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '185036587-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.ingress_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'battery', + 'friendly_name': 'Ingress Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.ingress_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '52', + }) +# --- +# name: test_states[sensor.ingress_doorbell_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ingress_doorbell_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Doorbell volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doorbell_volume', + 'unique_id': '185036587-doorbell_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.ingress_doorbell_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Doorbell volume', + }), + 'context': , + 'entity_id': 'sensor.ingress_doorbell_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_states[sensor.ingress_last_activity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ingress_last_activity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last activity', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_activity', + 'unique_id': '185036587-last_activity', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.ingress_last_activity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'timestamp', + 'friendly_name': 'Ingress Last activity', + }), + 'context': , + 'entity_id': 'sensor.ingress_last_activity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.ingress_mic_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ingress_mic_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mic volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mic_volume', + 'unique_id': '185036587-mic_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.ingress_mic_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Mic volume', + }), + 'context': , + 'entity_id': 'sensor.ingress_mic_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) +# --- +# name: test_states[sensor.ingress_voice_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ingress_voice_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Voice volume', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voice_volume', + 'unique_id': '185036587-voice_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.ingress_voice_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Voice volume', + }), + 'context': , + 'entity_id': 'sensor.ingress_voice_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) +# --- +# name: test_states[sensor.ingress_wifi_signal_category-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ingress_wifi_signal_category', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi signal category', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_category', + 'unique_id': '185036587-wifi_signal_category', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.ingress_wifi_signal_category-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Ingress Wi-Fi signal category', + }), + 'context': , + 'entity_id': 'sensor.ingress_wifi_signal_category', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.ingress_wifi_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ingress_wifi_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi signal strength', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_strength', + 'unique_id': '185036587-wifi_signal_strength', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_states[sensor.ingress_wifi_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'signal_strength', + 'friendly_name': 'Ingress Wi-Fi signal strength', + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.ingress_wifi_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.internal_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.internal_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '345678-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.internal_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'battery', + 'friendly_name': 'Internal Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.internal_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_states[sensor.internal_last_activity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.internal_last_activity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last activity', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_activity', + 'unique_id': '345678-last_activity', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.internal_last_activity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'timestamp', + 'friendly_name': 'Internal Last activity', + }), + 'context': , + 'entity_id': 'sensor.internal_last_activity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.internal_wifi_signal_category-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.internal_wifi_signal_category', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi signal category', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_category', + 'unique_id': '345678-wifi_signal_category', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.internal_wifi_signal_category-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Internal Wi-Fi signal category', + }), + 'context': , + 'entity_id': 'sensor.internal_wifi_signal_category', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[sensor.internal_wifi_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.internal_wifi_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi signal strength', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_signal_strength', + 'unique_id': '345678-wifi_signal_strength', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_states[sensor.internal_wifi_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'device_class': 'signal_strength', + 'friendly_name': 'Internal Wi-Fi signal strength', + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.internal_wifi_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/ring/snapshots/test_siren.ambr b/tests/components/ring/snapshots/test_siren.ambr new file mode 100644 index 00000000000..c49ab2cb30f --- /dev/null +++ b/tests/components/ring/snapshots/test_siren.ambr @@ -0,0 +1,154 @@ +# serializer version: 1 +# name: test_states[siren.downstairs_siren-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'available_tones': list([ + 'ding', + 'motion', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'siren', + 'entity_category': None, + 'entity_id': 'siren.downstairs_siren', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Siren', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'siren', + 'unique_id': '123456-siren', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[siren.downstairs_siren-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'available_tones': list([ + 'ding', + 'motion', + ]), + 'friendly_name': 'Downstairs Siren', + 'supported_features': , + }), + 'context': , + 'entity_id': 'siren.downstairs_siren', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[siren.front_siren-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'siren', + 'entity_category': None, + 'entity_id': 'siren.front_siren', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Siren', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'siren', + 'unique_id': '765432', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[siren.front_siren-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Siren', + 'supported_features': , + }), + 'context': , + 'entity_id': 'siren.front_siren', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[siren.internal_siren-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'siren', + 'entity_category': None, + 'entity_id': 'siren.internal_siren', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Siren', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'siren', + 'unique_id': '345678', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[siren.internal_siren-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Internal Siren', + 'supported_features': , + }), + 'context': , + 'entity_id': 'siren.internal_siren', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/ring/snapshots/test_switch.ambr b/tests/components/ring/snapshots/test_switch.ambr new file mode 100644 index 00000000000..57c27cfedfa --- /dev/null +++ b/tests/components/ring/snapshots/test_switch.ambr @@ -0,0 +1,283 @@ +# serializer version: 1 +# name: test_states[switch.front_door_in_home_chime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.front_door_in_home_chime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In-home chime', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'in_home_chime', + 'unique_id': '987654-in_home_chime', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.front_door_in_home_chime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Door In-home chime', + }), + 'context': , + 'entity_id': 'switch.front_door_in_home_chime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.front_door_motion_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.front_door_motion_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion detection', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion_detection', + 'unique_id': '987654-motion_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.front_door_motion_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Door Motion detection', + }), + 'context': , + 'entity_id': 'switch.front_door_motion_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.front_motion_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.front_motion_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion detection', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion_detection', + 'unique_id': '765432-motion_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.front_motion_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Motion detection', + }), + 'context': , + 'entity_id': 'switch.front_motion_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[switch.front_siren-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.front_siren', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Siren', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'siren', + 'unique_id': '765432-siren', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.front_siren-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Front Siren', + }), + 'context': , + 'entity_id': 'switch.front_siren', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[switch.internal_motion_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.internal_motion_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion detection', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion_detection', + 'unique_id': '345678-motion_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.internal_motion_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Internal Motion detection', + }), + 'context': , + 'entity_id': 'switch.internal_motion_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.internal_siren-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.internal_siren', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Siren', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'siren', + 'unique_id': '345678-siren', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.internal_siren-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Ring.com', + 'friendly_name': 'Internal Siren', + }), + 'context': , + 'entity_id': 'switch.internal_siren', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/ring/test_binary_sensor.py b/tests/components/ring/test_binary_sensor.py index 16bc6e872c1..81d7d6e6687 100644 --- a/tests/components/ring/test_binary_sensor.py +++ b/tests/components/ring/test_binary_sensor.py @@ -1,24 +1,243 @@ """The tests for the Ring binary sensor platform.""" -from homeassistant.const import Platform +import time +from unittest.mock import Mock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from ring_doorbell import Ring +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.ring.binary_sensor import RingEvent +from homeassistant.components.ring.const import DOMAIN +from homeassistant.components.ring.coordinator import RingEventListener +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component -from .common import setup_platform +from .common import MockConfigEntry, setup_automation, setup_platform +from .device_mocks import ( + FRONT_DEVICE_ID, + FRONT_DOOR_DEVICE_ID, + INGRESS_DEVICE_ID, + INTERNAL_DEVICE_ID, +) + +from tests.common import async_fire_time_changed, snapshot_platform -async def test_binary_sensor(hass: HomeAssistant, mock_ring_client) -> None: - """Test the Ring binary sensors.""" +@pytest.fixture +def create_deprecated_binary_sensor_entities( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + entity_registry: er.EntityRegistry, +): + """Create the entity so it is not ignored by the deprecation check.""" + mock_config_entry.add_to_hass(hass) + + def create_entry(device_name, device_id, key): + unique_id = f"{device_id}-{key}" + + entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{key}", + config_entry=mock_config_entry, + ) + + create_entry("front", FRONT_DEVICE_ID, "motion") + create_entry("front_door", FRONT_DOOR_DEVICE_ID, "motion") + create_entry("internal", INTERNAL_DEVICE_ID, "motion") + + create_entry("ingress", INGRESS_DEVICE_ID, "ding") + create_entry("front_door", FRONT_DOOR_DEVICE_ID, "ding") + + +async def test_states( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + create_deprecated_binary_sensor_entities, +) -> None: + """Test states.""" await setup_platform(hass, Platform.BINARY_SENSOR) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - motion_state = hass.states.get("binary_sensor.front_door_motion") - assert motion_state is not None - assert motion_state.state == "on" - assert motion_state.attributes["device_class"] == "motion" - front_ding_state = hass.states.get("binary_sensor.front_door_ding") - assert front_ding_state is not None - assert front_ding_state.state == "off" +@pytest.mark.parametrize( + ("device_id", "device_name", "alert_kind", "device_class"), + [ + pytest.param( + FRONT_DOOR_DEVICE_ID, + "front_door", + "motion", + "motion", + id="front_door_motion", + ), + pytest.param( + FRONT_DOOR_DEVICE_ID, + "front_door", + "ding", + "occupancy", + id="front_door_ding", + ), + pytest.param( + INGRESS_DEVICE_ID, "ingress", "ding", "occupancy", id="ingress_ding" + ), + ], +) +async def test_binary_sensor( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + mock_ring_client: Ring, + mock_ring_event_listener_class: RingEventListener, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + device_id: int, + device_name: str, + alert_kind: str, + device_class: str, +) -> None: + """Test the Ring binary sensors.""" + # Create the entity so it is not ignored by the deprecation check + mock_config_entry.add_to_hass(hass) - ingress_ding_state = hass.states.get("binary_sensor.ingress_ding") - assert ingress_ding_state is not None - assert ingress_ding_state.state == "off" + entity_id = f"binary_sensor.{device_name}_{alert_kind}" + unique_id = f"{device_id}-{alert_kind}" + + entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{alert_kind}", + config_entry=mock_config_entry, + ) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) + + on_event_cb = mock_ring_event_listener_class.return_value.add_notification_callback.call_args.args[ + 0 + ] + + # Default state is set to off + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OFF + assert state.attributes["device_class"] == device_class + + # A new alert sets to on + event = RingEvent( + 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None + ) + mock_ring_client.active_alerts.return_value = [event] + on_event_cb(event) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_ON + + # Test that another event resets the expiry callback + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done() + event = RingEvent( + 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None + ) + mock_ring_client.active_alerts.return_value = [event] + on_event_cb(event) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_ON + + freezer.tick(120) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_ON + + # Test the second alert has expired + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OFF + + +async def test_binary_sensor_not_exists_with_deprecation( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + mock_ring_client: Ring, + entity_registry: er.EntityRegistry, +) -> None: + """Test the deprecated Ring binary sensors are deleted or raise issues.""" + mock_config_entry.add_to_hass(hass) + + entity_id = "binary_sensor.front_door_motion" + + assert not hass.states.get(entity_id) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) + + assert not entity_registry.async_get(entity_id) + assert not er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert not hass.states.get(entity_id) + + +@pytest.mark.parametrize( + ("entity_disabled", "entity_has_automations"), + [ + pytest.param(False, False, id="without-automations"), + pytest.param(False, True, id="with-automations"), + pytest.param(True, False, id="disabled"), + ], +) +async def test_binary_sensor_exists_with_deprecation( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + mock_ring_client: Ring, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + entity_disabled: bool, + entity_has_automations: bool, +) -> None: + """Test the deprecated Ring binary sensors are deleted or raise issues.""" + mock_config_entry.add_to_hass(hass) + + entity_id = "binary_sensor.front_door_motion" + unique_id = f"{FRONT_DOOR_DEVICE_ID}-motion" + issue_id = f"deprecated_entity_{entity_id}_automation.test_automation" + + if entity_has_automations: + await setup_automation(hass, "test_automation", entity_id) + + entity = entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id="front_door_motion", + config_entry=mock_config_entry, + disabled_by=er.RegistryEntryDisabler.USER if entity_disabled else None, + ) + assert entity.entity_id == entity_id + assert not hass.states.get(entity_id) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) + + entity = entity_registry.async_get(entity_id) + # entity and state will be none if removed from registry + assert (entity is None) == entity_disabled + assert (hass.states.get(entity_id) is None) == entity_disabled + + assert ( + issue_registry.async_get_issue(DOMAIN, issue_id) is not None + ) == entity_has_automations diff --git a/tests/components/ring/test_button.py b/tests/components/ring/test_button.py index 946a893c8ad..ada02f206f5 100644 --- a/tests/components/ring/test_button.py +++ b/tests/components/ring/test_button.py @@ -1,22 +1,29 @@ """The tests for the Ring button platform.""" +from unittest.mock import Mock + +from syrupy.assertion import SnapshotAssertion + from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_platform +from .common import MockConfigEntry, setup_platform + +from tests.common import snapshot_platform -async def test_entity_registry( +async def test_states( hass: HomeAssistant, - mock_ring_client, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: - """Tests that the devices are registered in the entity registry.""" + """Test states.""" + mock_config_entry.add_to_hass(hass) await setup_platform(hass, Platform.BUTTON) - - entry = entity_registry.async_get("button.ingress_open_door") - assert entry.unique_id == "185036587-open_door" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) async def test_button_opens_door( diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 619fb52846c..4b4f019fdf7 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,13 +1,22 @@ """The tests for the Ring switch platform.""" -from unittest.mock import AsyncMock, patch +import logging +from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import make_mocked_request from freezegun.api import FrozenDateTimeFactory import pytest import ring_doorbell +from ring_doorbell.webrtcstream import RingWebRtcMessage +from syrupy.assertion import SnapshotAssertion -from homeassistant.components import camera +from homeassistant.components.camera import ( + CameraEntityFeature, + StreamType, + async_get_image, + async_get_mjpeg_stream, + get_camera_from_entity_id, +) from homeassistant.components.ring.camera import FORCE_REFRESH_INTERVAL from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH @@ -17,9 +26,11 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util.aiohttp import MockStreamReader -from .common import setup_platform +from .common import MockConfigEntry, setup_platform +from .device_mocks import FRONT_DEVICE_ID -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +from tests.typing import WebSocketGenerator SMALLEST_VALID_JPEG = ( "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" @@ -29,29 +40,31 @@ SMALLEST_VALID_JPEG = ( SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) -async def test_entity_registry( +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_states( hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, - mock_ring_client, + snapshot: SnapshotAssertion, ) -> None: - """Tests that the devices are registered in the entity registry.""" - await setup_platform(hass, Platform.CAMERA) - - entry = entity_registry.async_get("camera.front") - assert entry.unique_id == "765432" - - entry = entity_registry.async_get("camera.internal") - assert entry.unique_id == "345678" + """Test states.""" + mock_config_entry.add_to_hass(hass) + # Patch getrandbits so the access_token doesn't change on camera attributes + with patch("random.SystemRandom.getrandbits", return_value=123123123123): + await setup_platform(hass, Platform.CAMERA) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) @pytest.mark.parametrize( ("entity_name", "expected_state", "friendly_name"), [ - ("camera.internal", True, "Internal"), - ("camera.front", None, "Front"), + ("camera.internal_last_recording", True, "Internal Last recording"), + ("camera.front_last_recording", None, "Front Last recording"), ], ids=["On", "Off"], ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_motion_detection_state_reports_correctly( hass: HomeAssistant, mock_ring_client, @@ -67,40 +80,43 @@ async def test_camera_motion_detection_state_reports_correctly( assert state.attributes.get("friendly_name") == friendly_name +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_motion_detection_can_be_turned_on_and_off( - hass: HomeAssistant, mock_ring_client + hass: HomeAssistant, + mock_ring_client, ) -> None: """Tests the siren turns on correctly.""" await setup_platform(hass, Platform.CAMERA) - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is not True await hass.services.async_call( "camera", "enable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is True await hass.services.async_call( "camera", "disable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is None +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_motion_detection_not_supported( hass: HomeAssistant, mock_ring_client, @@ -120,43 +136,25 @@ async def test_camera_motion_detection_not_supported( await setup_platform(hass, Platform.CAMERA) - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is None await hass.services.async_call( "camera", "enable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is None assert ( - "Entity camera.front does not have motion detection capability" in caplog.text + "Entity camera.front_last_recording does not have motion detection capability" + in caplog.text ) -async def test_updates_work( - hass: HomeAssistant, mock_ring_client, mock_ring_devices -) -> None: - """Tests the update service works correctly.""" - await setup_platform(hass, Platform.CAMERA) - state = hass.states.get("camera.internal") - assert state.attributes.get("motion_detection") is True - - internal_camera_mock = mock_ring_devices.get_device(345678) - internal_camera_mock.motion_detection = False - - await hass.services.async_call("ring", "update", {}, blocking=True) - - await hass.async_block_till_done() - - state = hass.states.get("camera.internal") - assert state.attributes.get("motion_detection") is not True - - @pytest.mark.parametrize( ("exception_type", "reauth_expected"), [ @@ -166,6 +164,7 @@ async def test_updates_work( ], ids=["Authentication", "Timeout", "Other"], ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_motion_detection_errors_when_turned_on( hass: HomeAssistant, mock_ring_client, @@ -186,7 +185,7 @@ async def test_motion_detection_errors_when_turned_on( await hass.services.async_call( "camera", "enable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() @@ -201,6 +200,7 @@ async def test_motion_detection_errors_when_turned_on( ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_handle_mjpeg_stream( hass: HomeAssistant, mock_ring_client, @@ -213,7 +213,7 @@ async def test_camera_handle_mjpeg_stream( front_camera_mock = mock_ring_devices.get_device(765432) front_camera_mock.async_recording_url.return_value = None - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state is not None mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) @@ -221,7 +221,9 @@ async def test_camera_handle_mjpeg_stream( # history not updated yet front_camera_mock.async_history.assert_not_called() front_camera_mock.async_recording_url.assert_not_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None # Video url will be none so no stream @@ -229,9 +231,11 @@ async def test_camera_handle_mjpeg_stream( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) front_camera_mock.async_history.assert_called_once() - front_camera_mock.async_recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None # Stop the history updating so we can update the values manually @@ -240,8 +244,10 @@ async def test_camera_handle_mjpeg_stream( freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_called_once() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + front_camera_mock.async_recording_url.assert_called() + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None # If the history id hasn't changed the camera will not check again for the video url @@ -253,13 +259,15 @@ async def test_camera_handle_mjpeg_stream( await hass.async_block_till_done(wait_background_tasks=True) front_camera_mock.async_recording_url.assert_not_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None freezer.tick(FORCE_REFRESH_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called() # Now the stream should be returned stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) @@ -268,7 +276,9 @@ async def test_camera_handle_mjpeg_stream( mock_camera.return_value.open_camera = AsyncMock() mock_camera.return_value.close = AsyncMock() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is not None # Check the stream has been read assert not await stream_reader.read(-1) @@ -285,7 +295,7 @@ async def test_camera_image( front_camera_mock = mock_ring_devices.get_device(765432) - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_live_view") assert state is not None # history not updated yet @@ -298,7 +308,7 @@ async def test_camera_image( ), pytest.raises(HomeAssistantError), ): - image = await camera.async_get_image(hass, "camera.front") + image = await async_get_image(hass, "camera.front_live_view") freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -311,5 +321,145 @@ async def test_camera_image( "homeassistant.components.ring.camera.ffmpeg.async_get_image", return_value=SMALLEST_VALID_JPEG_BYTES, ): - image = await camera.async_get_image(hass, "camera.front") + image = await async_get_image(hass, "camera.front_live_view") assert image.content == SMALLEST_VALID_JPEG_BYTES + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_camera_stream_attributes( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test stream attributes.""" + await setup_platform(hass, Platform.CAMERA) + + # Live view + state = hass.states.get("camera.front_live_view") + supported_features = state.attributes.get("supported_features") + assert supported_features is CameraEntityFeature.STREAM + camera = get_camera_from_entity_id(hass, "camera.front_live_view") + assert camera.camera_capabilities.frontend_stream_types == {StreamType.WEB_RTC} + + # Last recording + state = hass.states.get("camera.front_last_recording") + supported_features = state.attributes.get("supported_features") + assert supported_features is CameraEntityFeature(0) + camera = get_camera_from_entity_id(hass, "camera.front_last_recording") + assert camera.camera_capabilities.frontend_stream_types == set() + + +async def test_camera_webrtc( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_ring_devices, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test WebRTC interactions.""" + caplog.set_level(logging.ERROR) + await setup_platform(hass, Platform.CAMERA) + client = await hass_ws_client(hass) + + # sdp offer + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.front_live_view", + "offer": "v=0\r\n", + } + ) + response = await client.receive_json() + assert response + assert response.get("success") is True + subscription_id = response["id"] + assert not caplog.text + + front_camera_mock = mock_ring_devices.get_device(FRONT_DEVICE_ID) + front_camera_mock.generate_async_webrtc_stream.assert_called_once() + args = front_camera_mock.generate_async_webrtc_stream.call_args.args + session_id = args[1] + on_message = args[2] + + # receive session + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "session" + assert not caplog.text + + # Ring candidate + on_message(RingWebRtcMessage(candidate="candidate", sdp_m_line_index=1)) + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "candidate" + assert not caplog.text + + # Error message + on_message(RingWebRtcMessage(error_code=1, error_message="error")) + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "error" + assert not caplog.text + + # frontend candidate + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.front_live_view", + "session_id": session_id, + "candidate": {"candidate": "candidate", "sdpMLineIndex": 1}, + } + ) + response = await client.receive_json() + assert response + assert response.get("success") is True + assert not caplog.text + front_camera_mock.on_webrtc_candidate.assert_called_once() + + # Invalid frontend candidate + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.front_live_view", + "session_id": session_id, + "candidate": {"candidate": "candidate", "sdpMid": "1"}, + } + ) + response = await client.receive_json() + assert response + assert response.get("success") is False + assert response["error"]["code"] == "home_assistant_error" + msg = "The sdp_m_line_index is required for ring webrtc streaming" + assert msg in response["error"].get("message") + assert msg in caplog.text + front_camera_mock.on_webrtc_candidate.assert_called_once() + + # Answer message + caplog.clear() + on_message(RingWebRtcMessage(answer="v=0\r\n")) + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "answer" + assert not caplog.text + + # Unsubscribe/Close session + front_camera_mock.sync_close_webrtc_stream.assert_not_called() + await client.send_json_auto_id( + { + "type": "unsubscribe_events", + "subscription": subscription_id, + } + ) + + response = await client.receive_json() + assert response + assert response.get("success") is True + front_camera_mock.sync_close_webrtc_stream.assert_called_once() diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index bbaec2e37c4..409cdac55aa 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -1,15 +1,19 @@ """Test the Ring config flow.""" -from unittest.mock import AsyncMock, Mock +from unittest.mock import AsyncMock, Mock, patch import pytest import ring_doorbell from homeassistant import config_entries +from homeassistant.components import dhcp from homeassistant.components.ring import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_DEVICE_ID, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import device_registry as dr + +from .conftest import MOCK_HARDWARE_ID from tests.common import MockConfigEntry @@ -27,17 +31,19 @@ async def test_form( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "hello@home-assistant.io", "password": "test-password"}, - ) - await hass.async_block_till_done() + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "hello@home-assistant.io", "password": "test-password"}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "hello@home-assistant.io" assert result2["data"] == { - "username": "hello@home-assistant.io", - "token": {"access_token": "mock-token"}, + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "hello@home-assistant.io", + CONF_TOKEN: {"access_token": "mock-token"}, } assert len(mock_setup_entry.mock_calls) == 1 @@ -80,13 +86,14 @@ async def test_form_2fa( assert result["errors"] == {} mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "foo@bar.com", - CONF_PASSWORD: "fake-password", - }, - ) + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "foo@bar.com", + CONF_PASSWORD: "fake-password", + }, + ) await hass.async_block_till_done() mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", None @@ -107,8 +114,9 @@ async def test_form_2fa( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "foo@bar.com" assert result3["data"] == { - "username": "foo@bar.com", - "token": "new-foobar", + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -154,8 +162,9 @@ async def test_reauth( assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" assert mock_added_config_entry.data == { - "username": "foo@bar.com", - "token": "new-foobar", + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -216,7 +225,185 @@ async def test_reauth_error( assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" assert mock_added_config_entry.data == { - "username": "foo@bar.com", - "token": "new-foobar", + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_account_configured( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_added_config_entry: Mock, +) -> None: + """Test that user cannot configure the same account twice.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "foo@bar.com", "password": "test-password"}, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_dhcp_discovery( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ring_client: Mock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test discovery by dhcp.""" + mac_address = "1234567890abcd" + hostname = "Ring-90abcd" + ip_address = "127.0.0.1" + username = "hello@home-assistant.io" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip=ip_address, macaddress=mac_address, hostname=hostname + ), + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "user" + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": username, "password": "test-password"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "hello@home-assistant.io" + assert result["data"] == { + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: username, + CONF_TOKEN: {"access_token": "mock-token"}, + } + + config_entry = hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, username + ) + assert config_entry + + # Create a device entry under the config entry just created + device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, mac_address)}, + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip=ip_address, macaddress=mac_address, hostname=hostname + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_reconfigure( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ring_client: Mock, + mock_added_config_entry: MockConfigEntry, +) -> None: + """Test the reconfigure config flow.""" + + assert mock_added_config_entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID + + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with patch("uuid.uuid4", return_value="new-hardware-id"): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "test-password"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert mock_added_config_entry.data[CONF_DEVICE_ID] == "new-hardware-id" + + +@pytest.mark.parametrize( + ("error_type", "errors_msg"), + [ + (ring_doorbell.AuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], + ids=["invalid-auth", "unknown-error"], +) +async def test_reconfigure_errors( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_ring_auth: Mock, + error_type, + errors_msg, +) -> None: + """Test errors during the reconfigure config flow.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + mock_ring_auth.async_fetch_token.side_effect = error_type + with patch("uuid.uuid4", return_value="new-hardware-id"): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "error_fake_password", + }, + ) + await hass.async_block_till_done() + mock_ring_auth.async_fetch_token.assert_called_with( + "foo@bar.com", "error_fake_password", None + ) + mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_PASSWORD: "other_fake_password", + }, + ) + + mock_ring_auth.async_fetch_token.assert_called_with( + "foo@bar.com", "other_fake_password", None + ) + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "2fa" + + # Now test reconfigure can go on to succeed + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" + + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"2fa": "123456"}, + ) + + mock_ring_auth.async_fetch_token.assert_called_with( + "foo@bar.com", "other_fake_password", "123456" + ) + + assert result4["type"] is FlowResultType.ABORT + assert result4["reason"] == "reconfigure_successful" + assert mock_added_config_entry.data == { + CONF_DEVICE_ID: "new-hardware-id", + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/ring/test_event.py b/tests/components/ring/test_event.py new file mode 100644 index 00000000000..5cd60382a97 --- /dev/null +++ b/tests/components/ring/test_event.py @@ -0,0 +1,98 @@ +"""The tests for the Ring event platform.""" + +from datetime import datetime +import time +from unittest.mock import Mock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from ring_doorbell import Ring +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.ring.binary_sensor import RingEvent +from homeassistant.components.ring.coordinator import RingEventListener +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import MockConfigEntry, setup_platform +from .device_mocks import FRONT_DOOR_DEVICE_ID, INGRESS_DEVICE_ID + +from tests.common import snapshot_platform + + +async def test_states( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test states.""" + mock_config_entry.add_to_hass(hass) + await setup_platform(hass, Platform.EVENT) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("device_id", "device_name", "alert_kind", "device_class"), + [ + pytest.param( + FRONT_DOOR_DEVICE_ID, + "front_door", + "motion", + "motion", + id="front_door_motion", + ), + pytest.param( + FRONT_DOOR_DEVICE_ID, "front_door", "ding", "doorbell", id="front_door_ding" + ), + pytest.param( + INGRESS_DEVICE_ID, "ingress", "ding", "doorbell", id="ingress_ding" + ), + pytest.param( + INGRESS_DEVICE_ID, + "ingress", + "intercom_unlock", + "button", + id="ingress_unlock", + ), + ], +) +async def test_event( + hass: HomeAssistant, + mock_ring_client: Ring, + mock_ring_event_listener_class: RingEventListener, + freezer: FrozenDateTimeFactory, + device_id: int, + device_name: str, + alert_kind: str, + device_class: str, +) -> None: + """Test the Ring event platforms.""" + + await setup_platform(hass, Platform.EVENT) + + start_time_str = "2024-09-04T15:32:53.892+00:00" + start_time = datetime.strptime(start_time_str, "%Y-%m-%dT%H:%M:%S.%f%z") + freezer.move_to(start_time) + on_event_cb = mock_ring_event_listener_class.return_value.add_notification_callback.call_args.args[ + 0 + ] + + # Default state is unknown + entity_id = f"event.{device_name}_{alert_kind}" + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "unknown" + assert state.attributes["device_class"] == device_class + + # A new alert sets to on + event = RingEvent( + 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None + ) + mock_ring_client.active_alerts.return_value = [event] + on_event_cb(event) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == start_time_str diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 4ab3e1bd366..27d4813f02d 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -1,20 +1,31 @@ """The tests for the Ring component.""" +from unittest.mock import AsyncMock, patch + from freezegun.api import FrozenDateTimeFactory import pytest -from ring_doorbell import AuthenticationError, RingError, RingTimeout +from ring_doorbell import AuthenticationError, Ring, RingError, RingTimeout from homeassistant.components import ring +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.ring import DOMAIN -from homeassistant.components.ring.const import SCAN_INTERVAL +from homeassistant.components.ring.const import ( + CONF_CONFIG_ENTRY_MINOR_VERSION, + CONF_LISTEN_CREDENTIALS, + SCAN_INTERVAL, +) +from homeassistant.components.ring.coordinator import RingEventListener from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_TOKEN, CONF_USERNAME +from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component +from .conftest import MOCK_HARDWARE_ID +from .device_mocks import FRONT_DOOR_DEVICE_ID + from tests.common import MockConfigEntry, async_fire_time_changed @@ -186,7 +197,7 @@ async def test_error_on_global_update( assert log_msg in caplog.text - assert mock_config_entry.entry_id in hass.data[DOMAIN] + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) @pytest.mark.parametrize( @@ -226,46 +237,18 @@ async def test_error_on_device_update( await hass.async_block_till_done(wait_background_tasks=True) assert log_msg in caplog.text - assert mock_config_entry.entry_id in hass.data[DOMAIN] - - -async def test_issue_deprecated_service_ring_update( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - caplog: pytest.LogCaptureFixture, - mock_ring_client, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the issue is raised on deprecated service ring.update.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await hass.services.async_call(DOMAIN, "update", {}, blocking=True) - - issue = issue_registry.async_get_issue("ring", "deprecated_service_ring_update") - assert issue - assert issue.issue_domain == "ring" - assert issue.issue_id == "deprecated_service_ring_update" - assert issue.translation_key == "deprecated_service_ring_update" - - assert ( - "Detected use of service 'ring.update'. " - "This is deprecated and will stop working in Home Assistant 2024.10. " - "Use 'homeassistant.update_entity' instead which updates all ring entities" - ) in caplog.text + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) @pytest.mark.parametrize( - ("domain", "old_unique_id"), + ("domain", "old_unique_id", "new_unique_id"), [ - ( - LIGHT_DOMAIN, - 123456, - ), - ( + pytest.param(LIGHT_DOMAIN, 123456, "123456", id="Light integer"), + pytest.param( CAMERA_DOMAIN, 654321, + "654321-last_recording", + id="Camera integer", ), ], ) @@ -276,6 +259,7 @@ async def test_update_unique_id( mock_ring_client, domain: str, old_unique_id: int | str, + new_unique_id: str, ) -> None: """Test unique_id update of integration.""" entry = MockConfigEntry( @@ -286,6 +270,7 @@ async def test_update_unique_id( "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + minor_version=1, ) entry.add_to_hass(hass) @@ -301,8 +286,9 @@ async def test_update_unique_id( entity_migrated = entity_registry.async_get(entity.entity_id) assert entity_migrated - assert entity_migrated.unique_id == str(old_unique_id) + assert entity_migrated.unique_id == new_unique_id assert (f"Fixing non string unique id {old_unique_id}") in caplog.text + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION async def test_update_unique_id_existing( @@ -321,6 +307,7 @@ async def test_update_unique_id_existing( "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + minor_version=1, ) entry.add_to_hass(hass) @@ -351,16 +338,17 @@ async def test_update_unique_id_existing( f"already exists for '{entity_existing.entity_id}', " "You may have to delete unavailable ring entities" ) in caplog.text + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION -async def test_update_unique_id_no_update( +async def test_update_unique_id_camera_update( hass: HomeAssistant, entity_registry: er.EntityRegistry, caplog: pytest.LogCaptureFixture, mock_ring_client, ) -> None: - """Test unique_id update of integration.""" - correct_unique_id = "123456" + """Test camera unique id with no suffix is updated.""" + correct_unique_id = "123456-last_recording" entry = MockConfigEntry( title="Ring", domain=DOMAIN, @@ -369,6 +357,7 @@ async def test_update_unique_id_no_update( "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + minor_version=1, ) entry.add_to_hass(hass) @@ -378,14 +367,16 @@ async def test_update_unique_id_no_update( unique_id="123456", config_entry=entry, ) - assert entity.unique_id == correct_unique_id + assert entity.unique_id == "123456" assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() entity_migrated = entity_registry.async_get(entity.entity_id) assert entity_migrated assert entity_migrated.unique_id == correct_unique_id + assert entity.disabled is False assert "Fixing non string unique id" not in caplog.text + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION async def test_token_updated( @@ -413,3 +404,92 @@ async def test_token_updated( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "new-mock-token"} + + +async def test_listen_token_updated( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_ring_client, + mock_ring_event_listener_class, +) -> None: + """Test that the listener token value is updated in the config entry. + + This simulates the api calling the callback. + """ + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert mock_ring_event_listener_class.call_count == 1 + token_updater = mock_ring_event_listener_class.call_args.args[2] + + assert mock_config_entry.data.get(CONF_LISTEN_CREDENTIALS) is None + token_updater({"listen_access_token": "mock-token"}) + assert mock_config_entry.data.get(CONF_LISTEN_CREDENTIALS) == { + "listen_access_token": "mock-token" + } + + +async def test_no_listen_start( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + entity_registry: er.EntityRegistry, + mock_ring_event_listener_class: type[RingEventListener], + mock_ring_client: Ring, +) -> None: + """Test behaviour if listener doesn't start.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data={"username": "foo", "token": {}}, + ) + # Create a binary sensor entity so it is not ignored by the deprecation check + # and the listener will start + entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=f"{FRONT_DOOR_DEVICE_ID}-motion", + suggested_object_id=f"{FRONT_DOOR_DEVICE_ID}_motion", + config_entry=mock_entry, + ) + mock_ring_event_listener_class.do_not_start = True + + mock_ring_event_listener_class.return_value.started = False + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + assert "Ring event listener failed to start after 10 seconds" in [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + + +async def test_migrate_create_device_id( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test migration creates new device id created.""" + entry = MockConfigEntry( + title="Ring", + domain=DOMAIN, + data={ + CONF_USERNAME: "foo@bar.com", + "token": {"access_token": "mock-token"}, + }, + unique_id="foo@bar.com", + version=1, + minor_version=1, + ) + entry.add_to_hass(hass) + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION + assert CONF_DEVICE_ID in entry.data + assert entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID + + assert "Migration to version 1.2 complete" in caplog.text diff --git a/tests/components/ring/test_light.py b/tests/components/ring/test_light.py index 22ed4a31cf8..0be314c3135 100644 --- a/tests/components/ring/test_light.py +++ b/tests/components/ring/test_light.py @@ -1,7 +1,10 @@ """The tests for the Ring light platform.""" +from unittest.mock import Mock + import pytest import ring_doorbell +from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import Platform @@ -9,22 +12,22 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import setup_platform +from .common import MockConfigEntry, setup_platform + +from tests.common import snapshot_platform -async def test_entity_registry( +async def test_states( hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, - mock_ring_client, + snapshot: SnapshotAssertion, ) -> None: - """Tests that the devices are registered in the entity registry.""" + """Test states.""" + mock_config_entry.add_to_hass(hass) await setup_platform(hass, Platform.LIGHT) - - entry = entity_registry.async_get("light.front_light") - assert entry.unique_id == "765432" - - entry = entity_registry.async_get("light.internal_light") - assert entry.unique_id == "345678" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) async def test_light_off_reports_correctly( @@ -65,25 +68,6 @@ async def test_light_can_be_turned_on(hass: HomeAssistant, mock_ring_client) -> assert state.state == "on" -async def test_updates_work( - hass: HomeAssistant, mock_ring_client, mock_ring_devices -) -> None: - """Tests the update service works correctly.""" - await setup_platform(hass, Platform.LIGHT) - state = hass.states.get("light.front_light") - assert state.state == "off" - - front_light_mock = mock_ring_devices.get_device(765432) - front_light_mock.lights = "on" - - await hass.services.async_call("ring", "update", {}, blocking=True) - - await hass.async_block_till_done() - - state = hass.states.get("light.front_light") - assert state.state == "on" - - @pytest.mark.parametrize( ("exception_type", "reauth_expected"), [ diff --git a/tests/components/ring/test_number.py b/tests/components/ring/test_number.py new file mode 100644 index 00000000000..aa484c6a7b2 --- /dev/null +++ b/tests/components/ring/test_number.py @@ -0,0 +1,95 @@ +"""The tests for the Ring number platform.""" + +from unittest.mock import Mock + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import MockConfigEntry, setup_platform + +from tests.common import snapshot_platform + + +@pytest.mark.parametrize( + ("entity_id", "unique_id"), + [ + ("number.downstairs_volume", "123456-volume"), + ("number.front_door_volume", "987654-volume"), + ("number.ingress_doorbell_volume", "185036587-doorbell_volume"), + ("number.ingress_mic_volume", "185036587-mic_volume"), + ("number.ingress_voice_volume", "185036587-voice_volume"), + ], +) +async def test_entity_registry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_ring_client: Mock, + entity_id: str, + unique_id: str, +) -> None: + """Tests that the devices are registered in the entity registry.""" + await setup_platform(hass, Platform.NUMBER) + + entry = entity_registry.async_get(entity_id) + assert entry is not None and entry.unique_id == unique_id + + +async def test_states( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test states.""" + + mock_config_entry.add_to_hass(hass) + await setup_platform(hass, Platform.NUMBER) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "new_value"), + [ + ("number.downstairs_volume", "4.0"), + ("number.front_door_volume", "3.0"), + ("number.ingress_doorbell_volume", "7.0"), + ("number.ingress_mic_volume", "2.0"), + ("number.ingress_voice_volume", "5.0"), + ], +) +async def test_volume_can_be_changed( + hass: HomeAssistant, + mock_ring_client: Mock, + entity_id: str, + new_value: str, +) -> None: + """Tests the volume can be changed correctly.""" + await setup_platform(hass, Platform.NUMBER) + + state = hass.states.get(entity_id) + assert state is not None + old_value = state.state + + # otherwise this test would be pointless + assert old_value != new_value + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: new_value}, + blocking=True, + ) + + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state is not None and state.state == new_value diff --git a/tests/components/ring/test_sensor.py b/tests/components/ring/test_sensor.py index 1f05c120251..48f679c4524 100644 --- a/tests/components/ring/test_sensor.py +++ b/tests/components/ring/test_sensor.py @@ -1,52 +1,84 @@ """The tests for the Ring sensor platform.""" import logging +from unittest.mock import Mock, patch from freezegun.api import FrozenDateTimeFactory import pytest +from ring_doorbell import Ring +from syrupy.assertion import SnapshotAssertion -from homeassistant.components.ring.const import SCAN_INTERVAL -from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass +from homeassistant.components.ring.const import DOMAIN, SCAN_INTERVAL +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component -from .common import setup_platform +from .common import MockConfigEntry, setup_platform +from .device_mocks import ( + DOWNSTAIRS_DEVICE_ID, + FRONT_DEVICE_ID, + FRONT_DOOR_DEVICE_ID, + INGRESS_DEVICE_ID, + INTERNAL_DEVICE_ID, +) -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform -async def test_sensor(hass: HomeAssistant, mock_ring_client) -> None: - """Test the Ring sensors.""" - await setup_platform(hass, "sensor") +@pytest.fixture +def create_deprecated_and_disabled_sensor_entities( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + entity_registry: er.EntityRegistry, +): + """Create the entity so it is not ignored by the deprecation check.""" + mock_config_entry.add_to_hass(hass) - front_battery_state = hass.states.get("sensor.front_battery") - assert front_battery_state is not None - assert front_battery_state.state == "80" - assert ( - front_battery_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT - ) + def create_entry( + device_name, + description, + device_id, + ): + unique_id = f"{device_id}-{description}" + entity_registry.async_get_or_create( + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{description}", + config_entry=mock_config_entry, + ) - front_door_battery_state = hass.states.get("sensor.front_door_battery") - assert front_door_battery_state is not None - assert front_door_battery_state.state == "100" - assert ( - front_door_battery_state.attributes[ATTR_STATE_CLASS] - == SensorStateClass.MEASUREMENT - ) + # Deprecated + create_entry("downstairs", "volume", DOWNSTAIRS_DEVICE_ID) + create_entry("front_door", "volume", FRONT_DEVICE_ID) + create_entry("ingress", "doorbell_volume", INGRESS_DEVICE_ID) + create_entry("ingress", "mic_volume", INGRESS_DEVICE_ID) + create_entry("ingress", "voice_volume", INGRESS_DEVICE_ID) - downstairs_volume_state = hass.states.get("sensor.downstairs_volume") - assert downstairs_volume_state is not None - assert downstairs_volume_state.state == "2" + # Disabled + for desc in ("wifi_signal_category", "wifi_signal_strength"): + create_entry("downstairs", desc, DOWNSTAIRS_DEVICE_ID) + create_entry("front", desc, FRONT_DEVICE_ID) + create_entry("ingress", desc, INGRESS_DEVICE_ID) + create_entry("front_door", desc, FRONT_DOOR_DEVICE_ID) + create_entry("internal", desc, INTERNAL_DEVICE_ID) - ingress_mic_volume_state = hass.states.get("sensor.ingress_mic_volume") - assert ingress_mic_volume_state.state == "11" - ingress_doorbell_volume_state = hass.states.get("sensor.ingress_doorbell_volume") - assert ingress_doorbell_volume_state.state == "8" - - ingress_voice_volume_state = hass.states.get("sensor.ingress_voice_volume") - assert ingress_voice_volume_state.state == "11" +async def test_states( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + create_deprecated_and_disabled_sensor_entities, +) -> None: + """Test states.""" + mock_config_entry.add_to_hass(hass) + await setup_platform(hass, Platform.SENSOR) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) @pytest.mark.parametrize( @@ -107,13 +139,23 @@ async def test_health_sensor( @pytest.mark.parametrize( - ("device_name", "sensor_name", "expected_value"), + ("device_id", "device_name", "sensor_name", "expected_value"), [ - ("front_door", "last_motion", "2017-03-05T15:03:40+00:00"), - ("front_door", "last_ding", "2018-03-05T15:03:40+00:00"), - ("front_door", "last_activity", "2018-03-05T15:03:40+00:00"), - ("front", "last_motion", "2017-03-05T15:03:40+00:00"), - ("ingress", "last_activity", "2024-02-02T11:21:24+00:00"), + ( + FRONT_DOOR_DEVICE_ID, + "front_door", + "last_motion", + "2017-03-05T15:03:40+00:00", + ), + (FRONT_DOOR_DEVICE_ID, "front_door", "last_ding", "2018-03-05T15:03:40+00:00"), + ( + FRONT_DOOR_DEVICE_ID, + "front_door", + "last_activity", + "2018-03-05T15:03:40+00:00", + ), + (FRONT_DEVICE_ID, "front", "last_motion", "2017-03-05T15:03:40+00:00"), + (INGRESS_DEVICE_ID, "ingress", "last_activity", "2024-02-02T11:21:24+00:00"), ], ids=[ "doorbell-motion", @@ -125,14 +167,31 @@ async def test_health_sensor( ) async def test_history_sensor( hass: HomeAssistant, - mock_ring_client, + mock_ring_client: Ring, + mock_config_entry: ConfigEntry, + entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - device_name, - sensor_name, - expected_value, + device_id: int, + device_name: str, + sensor_name: str, + expected_value: str, ) -> None: """Test the Ring sensors.""" - await setup_platform(hass, "sensor") + # Create the entity so it is not ignored by the deprecation check + mock_config_entry.add_to_hass(hass) + + entity_id = f"sensor.{device_name}_{sensor_name}" + unique_id = f"{device_id}-{sensor_name}" + + entity_registry.async_get_or_create( + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{sensor_name}", + config_entry=mock_config_entry, + ) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) entity_id = f"sensor.{device_name}_{sensor_name}" sensor_state = hass.states.get(entity_id) diff --git a/tests/components/ring/test_siren.py b/tests/components/ring/test_siren.py index e71dd1e6e77..6cfe8aecd57 100644 --- a/tests/components/ring/test_siren.py +++ b/tests/components/ring/test_siren.py @@ -1,15 +1,28 @@ """The tests for the Ring button platform.""" +from unittest.mock import Mock + import pytest import ring_doorbell +from syrupy.assertion import SnapshotAssertion +from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.const import Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import setup_platform +from .common import MockConfigEntry, setup_platform + +from tests.common import snapshot_platform async def test_entity_registry( @@ -24,6 +37,20 @@ async def test_entity_registry( assert entry.unique_id == "123456-siren" +async def test_states( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test states.""" + + mock_config_entry.add_to_hass(hass) + await setup_platform(hass, Platform.SIREN) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + async def test_sirens_report_correctly(hass: HomeAssistant, mock_ring_client) -> None: """Tests that the initial state of a device that should be on is correct.""" await setup_platform(hass, Platform.SIREN) @@ -165,3 +192,44 @@ async def test_siren_errors_when_turned_on( ) == reauth_expected ) + + +async def test_camera_siren_on_off( + hass: HomeAssistant, mock_ring_client, mock_ring_devices +) -> None: + """Tests siren on a ring camera turns on and off.""" + await setup_platform(hass, Platform.SIREN) + + entity_id = "siren.front_siren" + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + downstairs_chime_mock = mock_ring_devices.get_device(765432) + downstairs_chime_mock.async_set_siren.assert_called_once_with(1) + + downstairs_chime_mock.async_set_siren.reset_mock() + + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + downstairs_chime_mock.async_set_siren.assert_called_once_with(0) + + assert state.state == STATE_OFF diff --git a/tests/components/ring/test_switch.py b/tests/components/ring/test_switch.py index f7aa885342a..22b90253c23 100644 --- a/tests/components/ring/test_switch.py +++ b/tests/components/ring/test_switch.py @@ -1,35 +1,72 @@ """The tests for the Ring switch platform.""" +from unittest.mock import Mock + import pytest import ring_doorbell +from syrupy.assertion import SnapshotAssertion -from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.components.ring.const import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component -from .common import setup_platform +from .common import MockConfigEntry, setup_platform + +from tests.common import snapshot_platform -async def test_entity_registry( +@pytest.fixture +def create_deprecated_siren_entity( hass: HomeAssistant, + mock_config_entry: ConfigEntry, entity_registry: er.EntityRegistry, - mock_ring_client, +): + """Create the entity so it is not ignored by the deprecation check.""" + mock_config_entry.add_to_hass(hass) + + def create_entry(device_name, device_id): + unique_id = f"{device_id}-siren" + + entity_registry.async_get_or_create( + domain=SWITCH_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_siren", + config_entry=mock_config_entry, + ) + + create_entry("front", 765432) + create_entry("internal", 345678) + + +async def test_states( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + create_deprecated_siren_entity, ) -> None: - """Tests that the devices are registered in the entity registry.""" + """Test states.""" + + mock_config_entry.add_to_hass(hass) await setup_platform(hass, Platform.SWITCH) - - entry = entity_registry.async_get("switch.front_siren") - assert entry.unique_id == "765432-siren" - - entry = entity_registry.async_get("switch.internal_siren") - assert entry.unique_id == "345678-siren" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) async def test_siren_off_reports_correctly( - hass: HomeAssistant, mock_ring_client + hass: HomeAssistant, mock_ring_client, create_deprecated_siren_entity ) -> None: """Tests that the initial state of a device that should be off is correct.""" await setup_platform(hass, Platform.SWITCH) @@ -40,7 +77,7 @@ async def test_siren_off_reports_correctly( async def test_siren_on_reports_correctly( - hass: HomeAssistant, mock_ring_client + hass: HomeAssistant, mock_ring_client, create_deprecated_siren_entity ) -> None: """Tests that the initial state of a device that should be on is correct.""" await setup_platform(hass, Platform.SWITCH) @@ -50,45 +87,46 @@ async def test_siren_on_reports_correctly( assert state.attributes.get("friendly_name") == "Internal Siren" -async def test_siren_can_be_turned_on(hass: HomeAssistant, mock_ring_client) -> None: - """Tests the siren turns on correctly.""" - await setup_platform(hass, Platform.SWITCH) - - state = hass.states.get("switch.front_siren") - assert state.state == "off" - - await hass.services.async_call( - "switch", "turn_on", {"entity_id": "switch.front_siren"}, blocking=True - ) - - await hass.async_block_till_done() - state = hass.states.get("switch.front_siren") - assert state.state == "on" - - -async def test_updates_work( - hass: HomeAssistant, mock_ring_client, mock_ring_devices +@pytest.mark.parametrize( + ("entity_id"), + [ + ("switch.front_siren"), + ("switch.front_door_in_home_chime"), + ("switch.front_motion_detection"), + ], +) +async def test_switch_can_be_turned_on_and_off( + hass: HomeAssistant, + mock_ring_client, + create_deprecated_siren_entity, + entity_id, ) -> None: - """Tests the update service works correctly.""" + """Tests the switch turns on and off correctly.""" await setup_platform(hass, Platform.SWITCH) - state = hass.states.get("switch.front_siren") - assert state.state == "off" - front_siren_mock = mock_ring_devices.get_device(765432) - front_siren_mock.siren = 20 + assert hass.states.get(entity_id) - await async_setup_component(hass, "homeassistant", {}) await hass.services.async_call( - "homeassistant", - "update_entity", - {ATTR_ENTITY_ID: ["switch.front_siren"]}, + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON - state = hass.states.get("switch.front_siren") - assert state.state == "on" + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_OFF @pytest.mark.parametrize( @@ -106,6 +144,7 @@ async def test_switch_errors_when_turned_on( mock_ring_devices, exception_type, reauth_expected, + create_deprecated_siren_entity, ) -> None: """Tests the switch turns on correctly.""" await setup_platform(hass, Platform.SWITCH) diff --git a/tests/components/risco/test_alarm_control_panel.py b/tests/components/risco/test_alarm_control_panel.py index 9b554ddbf28..8caef1fbfc4 100644 --- a/tests/components/risco/test_alarm_control_panel.py +++ b/tests/components/risco/test_alarm_control_panel.py @@ -9,6 +9,7 @@ import pytest from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.risco import CannotConnectError, UnauthorizedError from homeassistant.components.risco.const import DOMAIN @@ -18,13 +19,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -42,25 +36,25 @@ SECOND_LOCAL_ENTITY_ID = "alarm_control_panel.name_1" CODES_REQUIRED_OPTIONS = {"code_arm_required": True, "code_disarm_required": True} TEST_RISCO_TO_HA = { - "arm": STATE_ALARM_ARMED_AWAY, - "partial_arm": STATE_ALARM_ARMED_HOME, - "A": STATE_ALARM_ARMED_HOME, - "B": STATE_ALARM_ARMED_HOME, - "C": STATE_ALARM_ARMED_NIGHT, - "D": STATE_ALARM_ARMED_NIGHT, + "arm": AlarmControlPanelState.ARMED_AWAY, + "partial_arm": AlarmControlPanelState.ARMED_HOME, + "A": AlarmControlPanelState.ARMED_HOME, + "B": AlarmControlPanelState.ARMED_HOME, + "C": AlarmControlPanelState.ARMED_NIGHT, + "D": AlarmControlPanelState.ARMED_NIGHT, } TEST_FULL_RISCO_TO_HA = { **TEST_RISCO_TO_HA, - "D": STATE_ALARM_ARMED_CUSTOM_BYPASS, + "D": AlarmControlPanelState.ARMED_CUSTOM_BYPASS, } TEST_HA_TO_RISCO = { - STATE_ALARM_ARMED_AWAY: "arm", - STATE_ALARM_ARMED_HOME: "partial_arm", - STATE_ALARM_ARMED_NIGHT: "C", + AlarmControlPanelState.ARMED_AWAY: "arm", + AlarmControlPanelState.ARMED_HOME: "partial_arm", + AlarmControlPanelState.ARMED_NIGHT: "C", } TEST_FULL_HA_TO_RISCO = { **TEST_HA_TO_RISCO, - STATE_ALARM_ARMED_CUSTOM_BYPASS: "D", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: "D", } CUSTOM_MAPPING_OPTIONS = { "risco_states_to_ha": TEST_RISCO_TO_HA, @@ -210,7 +204,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "triggered", - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.TRIGGERED, entity_id, partition_id, ) @@ -218,7 +212,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "arming", - STATE_ALARM_ARMING, + AlarmControlPanelState.ARMING, entity_id, partition_id, ) @@ -226,7 +220,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "armed", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, entity_id, partition_id, ) @@ -234,7 +228,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "partially_armed", - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, entity_id, partition_id, ) @@ -242,7 +236,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "disarmed", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, entity_id, partition_id, ) @@ -257,7 +251,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "partially_armed", - STATE_ALARM_ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, entity_id, partition_id, ) @@ -595,7 +589,7 @@ async def test_local_states( hass, two_part_local_alarm, "triggered", - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.TRIGGERED, entity_id, partition_id, callback, @@ -604,7 +598,7 @@ async def test_local_states( hass, two_part_local_alarm, "arming", - STATE_ALARM_ARMING, + AlarmControlPanelState.ARMING, entity_id, partition_id, callback, @@ -613,7 +607,7 @@ async def test_local_states( hass, two_part_local_alarm, "armed", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, entity_id, partition_id, callback, @@ -622,7 +616,7 @@ async def test_local_states( hass, two_part_local_alarm, "partially_armed", - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, entity_id, partition_id, callback, @@ -631,7 +625,7 @@ async def test_local_states( hass, two_part_local_alarm, "disarmed", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, entity_id, partition_id, callback, @@ -647,7 +641,7 @@ async def test_local_states( hass, two_part_local_alarm, "partially_armed", - STATE_ALARM_ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, entity_id, partition_id, callback, diff --git a/tests/components/risco/test_config_flow.py b/tests/components/risco/test_config_flow.py index 9fade18ea96..cff5f80e6c4 100644 --- a/tests/components/risco/test_config_flow.py +++ b/tests/components/risco/test_config_flow.py @@ -154,14 +154,12 @@ async def test_form_cloud_already_exists(hass: HomeAssistant) -> None: assert result3["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: +async def test_form_reauth( + hass: HomeAssistant, cloud_config_entry: MockConfigEntry +) -> None: """Test reauthenticate.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=cloud_config_entry.data, - ) + result = await cloud_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -194,15 +192,11 @@ async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: async def test_form_reauth_with_new_username( - hass: HomeAssistant, cloud_config_entry + hass: HomeAssistant, cloud_config_entry: MockConfigEntry ) -> None: """Test reauthenticate with new username.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=cloud_config_entry.data, - ) + result = await cloud_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/rituals_perfume_genie/test_select.py b/tests/components/rituals_perfume_genie/test_select.py index 17612edfd97..a4d97ab83fd 100644 --- a/tests/components/rituals_perfume_genie/test_select.py +++ b/tests/components/rituals_perfume_genie/test_select.py @@ -9,10 +9,10 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, ) from homeassistant.const import ( - AREA_SQUARE_METERS, ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, EntityCategory, + UnitOfArea, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -38,7 +38,7 @@ async def test_select_entity( entry = entity_registry.async_get("select.genie_room_size") assert entry assert entry.unique_id == f"{diffuser.hublot}-room_size_square_meter" - assert entry.unit_of_measurement == AREA_SQUARE_METERS + assert entry.unit_of_measurement == UnitOfArea.SQUARE_METERS assert entry.entity_category == EntityCategory.CONFIG diff --git a/tests/components/rmvtransport/test_sensor.py b/tests/components/rmvtransport/test_sensor.py index c17eaac2105..47728be438c 100644 --- a/tests/components/rmvtransport/test_sensor.py +++ b/tests/components/rmvtransport/test_sensor.py @@ -32,6 +32,23 @@ VALID_CONFIG_MISC = { } VALID_CONFIG_DEST = { + "sensor": { + "platform": "rmvtransport", + "next_departure": [ + { + "station": "3000010", + "destinations": [ + "Frankfurt (Main) Flughafen Regionalbahnhof", + "Frankfurt (Main) Stadion", + ], + "lines": [12, "S8"], + "time_offset": 15, + } + ], + } +} + +VALID_CONFIG_DEST_ONLY = { "sensor": { "platform": "rmvtransport", "next_departure": [ @@ -144,6 +161,19 @@ def get_departures_mock(): "info_long": None, "icon": "https://products/32_pic.png", }, + { + "product": "Bus", + "number": 12, + "trainId": "1234568", + "direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife", + "departure_time": datetime.datetime(2018, 8, 6, 14, 30), + "minutes": 16, + "delay": 0, + "stops": ["Frankfurt (Main) Stadion"], + "info": None, + "info_long": None, + "icon": "https://products/32_pic.png", + }, ], } @@ -215,6 +245,26 @@ async def test_rmvtransport_dest_config(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "sensor", VALID_CONFIG_DEST) await hass.async_block_till_done() + state = hass.states.get("sensor.frankfurt_main_hauptbahnhof") + assert state is not None + assert state.state == "16" + assert ( + state.attributes["direction"] == "Frankfurt (Main) Hugo-Junkers-Straße/Schleife" + ) + assert state.attributes["line"] == 12 + assert state.attributes["minutes"] == 16 + assert state.attributes["departure_time"] == datetime.datetime(2018, 8, 6, 14, 30) + + +async def test_rmvtransport_dest_only_config(hass: HomeAssistant) -> None: + """Test destination configuration.""" + with patch( + "RMVtransport.RMVtransport.get_departures", + return_value=get_departures_mock(), + ): + assert await async_setup_component(hass, "sensor", VALID_CONFIG_DEST_ONLY) + await hass.async_block_till_done() + state = hass.states.get("sensor.frankfurt_main_hauptbahnhof") assert state.state == "11" assert ( diff --git a/tests/components/roborock/snapshots/test_diagnostics.ambr b/tests/components/roborock/snapshots/test_diagnostics.ambr index 805a498041a..26ecb729312 100644 --- a/tests/components/roborock/snapshots/test_diagnostics.ambr +++ b/tests/components/roborock/snapshots/test_diagnostics.ambr @@ -102,6 +102,7 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -109,6 +110,7 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -116,6 +118,7 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -123,6 +126,7 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -130,6 +134,7 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -137,6 +142,7 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -144,6 +150,7 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -151,6 +158,7 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -381,6 +389,7 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -388,6 +397,7 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -395,6 +405,7 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -402,6 +413,7 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -409,6 +421,7 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -416,6 +429,7 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -423,6 +437,7 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -430,6 +445,7 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ diff --git a/tests/components/roborock/test_button.py b/tests/components/roborock/test_button.py index 88cf5beab15..43ef043f79c 100644 --- a/tests/components/roborock/test_button.py +++ b/tests/components/roborock/test_button.py @@ -3,9 +3,11 @@ from unittest.mock import patch import pytest +import roborock from homeassistant.components.button import SERVICE_PRESS from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -16,7 +18,7 @@ from tests.common import MockConfigEntry ("button.roborock_s7_maxv_reset_sensor_consumable"), ("button.roborock_s7_maxv_reset_air_filter_consumable"), ("button.roborock_s7_maxv_reset_side_brush_consumable"), - "button.roborock_s7_maxv_reset_main_brush_consumable", + ("button.roborock_s7_maxv_reset_main_brush_consumable"), ], ) @pytest.mark.freeze_time("2023-10-30 08:50:00") @@ -41,3 +43,37 @@ async def test_update_success( ) assert mock_send_message.assert_called_once assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" + + +@pytest.mark.parametrize( + ("entity_id"), + [ + ("button.roborock_s7_maxv_reset_air_filter_consumable"), + ], +) +@pytest.mark.freeze_time("2023-10-30 08:50:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_update_failure( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, + entity_id: str, +) -> None: + """Test failure while pressing the button entity.""" + # Ensure that the entity exist, as these test can pass even if there is no entity. + assert hass.states.get(entity_id).state == "unknown" + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.send_message", + side_effect=roborock.exceptions.RoborockTimeout, + ) as mock_send_message, + pytest.raises(HomeAssistantError, match="Error while calling RESET_CONSUMABLE"), + ): + await hass.services.async_call( + "button", + SERVICE_PRESS, + blocking=True, + target={"entity_id": entity_id}, + ) + assert mock_send_message.assert_called_once + assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" diff --git a/tests/components/roborock/test_number.py b/tests/components/roborock/test_number.py index 3291dd2a7dc..7e87b49253e 100644 --- a/tests/components/roborock/test_number.py +++ b/tests/components/roborock/test_number.py @@ -3,9 +3,11 @@ from unittest.mock import patch import pytest +import roborock from homeassistant.components.number import ATTR_VALUE, SERVICE_SET_VALUE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -37,3 +39,36 @@ async def test_update_success( target={"entity_id": entity_id}, ) assert mock_send_message.assert_called_once + + +@pytest.mark.parametrize( + ("entity_id", "value"), + [ + ("number.roborock_s7_maxv_volume", 3.0), + ], +) +async def test_update_failed( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, + entity_id: str, + value: float, +) -> None: + """Test allowed changing values for number entities.""" + # Ensure that the entity exist, as these test can pass even if there is no entity. + assert hass.states.get(entity_id) is not None + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.send_message", + side_effect=roborock.exceptions.RoborockTimeout, + ) as mock_send_message, + pytest.raises(HomeAssistantError, match="Failed to update Roborock options"), + ): + await hass.services.async_call( + "number", + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: value}, + blocking=True, + target={"entity_id": entity_id}, + ) + assert mock_send_message.assert_called_once diff --git a/tests/components/roborock/test_select.py b/tests/components/roborock/test_select.py index ce846107d93..784150e24c7 100644 --- a/tests/components/roborock/test_select.py +++ b/tests/components/roborock/test_select.py @@ -59,7 +59,7 @@ async def test_update_failure( "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.send_message", side_effect=RoborockException(), ), - pytest.raises(HomeAssistantError), + pytest.raises(HomeAssistantError, match="Error while calling SET_MOP_MOD"), ): await hass.services.async_call( "select", diff --git a/tests/components/roborock/test_switch.py b/tests/components/roborock/test_switch.py index 3afa72b319d..5de3c208c1e 100644 --- a/tests/components/roborock/test_switch.py +++ b/tests/components/roborock/test_switch.py @@ -3,9 +3,11 @@ from unittest.mock import patch import pytest +import roborock from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -49,3 +51,37 @@ async def test_update_success( target={"entity_id": entity_id}, ) assert mock_send_message.assert_called_once + + +@pytest.mark.parametrize( + ("entity_id", "service"), + [ + ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_ON), + ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_OFF), + ], +) +async def test_update_failed( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, + entity_id: str, + service: str, +) -> None: + """Test a failure while updating a switch.""" + # Ensure that the entity exist, as these test can pass even if there is no entity. + assert hass.states.get(entity_id) is not None + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1._send_command", + side_effect=roborock.exceptions.RoborockTimeout, + ) as mock_send_message, + pytest.raises(HomeAssistantError, match="Failed to update Roborock options"), + ): + await hass.services.async_call( + "switch", + service, + service_data=None, + blocking=True, + target={"entity_id": entity_id}, + ) + assert mock_send_message.assert_called_once diff --git a/tests/components/roborock/test_time.py b/tests/components/roborock/test_time.py index ca6507f887b..836a86bd114 100644 --- a/tests/components/roborock/test_time.py +++ b/tests/components/roborock/test_time.py @@ -4,9 +4,11 @@ from datetime import time from unittest.mock import patch import pytest +import roborock from homeassistant.components.time import SERVICE_SET_VALUE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -38,3 +40,35 @@ async def test_update_success( target={"entity_id": entity_id}, ) assert mock_send_message.assert_called_once + + +@pytest.mark.parametrize( + ("entity_id"), + [ + ("time.roborock_s7_maxv_do_not_disturb_begin"), + ], +) +async def test_update_failure( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, + entity_id: str, +) -> None: + """Test turning switch entities on and off.""" + # Ensure that the entity exist, as these test can pass even if there is no entity. + assert hass.states.get(entity_id) is not None + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1._send_command", + side_effect=roborock.exceptions.RoborockTimeout, + ) as mock_send_message, + pytest.raises(HomeAssistantError, match="Failed to update Roborock options"), + ): + await hass.services.async_call( + "time", + SERVICE_SET_VALUE, + service_data={"time": time(hour=1, minute=1)}, + blocking=True, + target={"entity_id": entity_id}, + ) + assert mock_send_message.assert_called_once diff --git a/tests/components/roborock/test_vacuum.py b/tests/components/roborock/test_vacuum.py index 15a64cbecf3..5080711d0f9 100644 --- a/tests/components/roborock/test_vacuum.py +++ b/tests/components/roborock/test_vacuum.py @@ -24,7 +24,7 @@ from homeassistant.components.vacuum import ( from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .mock_data import PROP @@ -38,12 +38,17 @@ DEVICE_ID = "abc123" async def test_registry_entries( hass: HomeAssistant, entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, bypass_api_fixture, setup_entry: MockConfigEntry, ) -> None: """Tests devices are registered in the entity registry.""" - entry = entity_registry.async_get(ENTITY_ID) - assert entry.unique_id == DEVICE_ID + entity_entry = entity_registry.async_get(ENTITY_ID) + assert entity_entry.unique_id == DEVICE_ID + + device_entry = device_registry.async_get(entity_entry.device_id) + assert device_entry is not None + assert device_entry.model_id == "roborock.vacuum.a27" @pytest.mark.parametrize( diff --git a/tests/components/roku/test_config_flow.py b/tests/components/roku/test_config_flow.py index 3cf5627f342..57ddf5d51a6 100644 --- a/tests/components/roku/test_config_flow.py +++ b/tests/components/roku/test_config_flow.py @@ -1,13 +1,18 @@ """Test the Roku config flow.""" import dataclasses -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest -from rokuecp import RokuConnectionError +from rokuecp import Device as RokuDevice, RokuConnectionError -from homeassistant.components.roku.const import DOMAIN -from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_SSDP, SOURCE_USER +from homeassistant.components.roku.const import CONF_PLAY_MEDIA_APP_ID, DOMAIN +from homeassistant.config_entries import ( + SOURCE_HOMEKIT, + SOURCE_SSDP, + SOURCE_USER, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -23,6 +28,8 @@ from . import ( from tests.common import MockConfigEntry +RECONFIGURE_HOST = "192.168.1.190" + async def test_duplicate_error( hass: HomeAssistant, @@ -254,3 +261,78 @@ async def test_ssdp_discovery( assert result["data"] assert result["data"][CONF_HOST] == HOST assert result["data"][CONF_NAME] == UPNP_FRIENDLY_NAME + + +async def test_options_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test options config flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "init" + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_PLAY_MEDIA_APP_ID: "782875"}, + ) + + assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result2.get("data") == { + CONF_PLAY_MEDIA_APP_ID: "782875", + } + + +async def _start_reconfigure_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "user" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_HOST: RECONFIGURE_HOST}, + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_roku_config_flow: MagicMock, +) -> None: + """Test reconfigure flow.""" + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: RECONFIGURE_HOST, + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_device: RokuDevice, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_roku_config_flow: MagicMock, +) -> None: + """Ensure reconfigure flow aborts when the device changes.""" + mock_device.info.serial_number = "RECONFIG" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" diff --git a/tests/components/roku/test_init.py b/tests/components/roku/test_init.py index a4fc8477ac3..9c414bcf62a 100644 --- a/tests/components/roku/test_init.py +++ b/tests/components/roku/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from rokuecp import RokuConnectionError -from homeassistant.components.roku.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -38,12 +37,7 @@ async def test_config_entry_no_unique_id( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED - assert ( - hass.data[DOMAIN][mock_config_entry.entry_id].device_id - == mock_config_entry.entry_id - ) async def test_load_unload_config_entry( @@ -56,10 +50,9 @@ async def test_load_unload_config_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/roku/test_media_player.py b/tests/components/roku/test_media_player.py index 9aff8f581d7..5f8a41d16ac 100644 --- a/tests/components/roku/test_media_player.py +++ b/tests/components/roku/test_media_player.py @@ -32,12 +32,12 @@ from homeassistant.components.roku.const import ( ATTR_FORMAT, ATTR_KEYWORD, ATTR_MEDIA_TYPE, + DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN, SERVICE_SEARCH, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER from homeassistant.components.websocket_api import TYPE_RESULT -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, @@ -59,6 +59,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -495,7 +496,7 @@ async def test_services_play_media( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 0 + assert mock_roku.launch.call_count == 0 await hass.services.async_call( MP_DOMAIN, @@ -509,7 +510,7 @@ async def test_services_play_media( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 0 + assert mock_roku.launch.call_count == 0 @pytest.mark.parametrize( @@ -546,9 +547,10 @@ async def test_services_play_media_audio( }, blocking=True, ) - mock_roku.play_on_roku.assert_called_once_with( - content_id, + mock_roku.launch.assert_called_once_with( + DEFAULT_PLAY_MEDIA_APP_ID, { + "u": content_id, "t": "a", "songName": resolved_name, "songFormat": resolved_format, @@ -591,9 +593,11 @@ async def test_services_play_media_video( }, blocking=True, ) - mock_roku.play_on_roku.assert_called_once_with( - content_id, + mock_roku.launch.assert_called_once_with( + DEFAULT_PLAY_MEDIA_APP_ID, { + "u": content_id, + "t": "v", "videoName": resolved_name, "videoFormat": resolved_format, }, @@ -617,10 +621,12 @@ async def test_services_camera_play_stream( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 1 - mock_roku.play_on_roku.assert_called_with( - "https://awesome.tld/api/hls/api_token/master_playlist.m3u8", + assert mock_roku.launch.call_count == 1 + mock_roku.launch.assert_called_with( + DEFAULT_PLAY_MEDIA_APP_ID, { + "u": "https://awesome.tld/api/hls/api_token/master_playlist.m3u8", + "t": "v", "videoName": "Camera Stream", "videoFormat": "hls", }, @@ -653,14 +659,21 @@ async def test_services_play_media_local_source( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 1 - assert mock_roku.play_on_roku.call_args - call_args = mock_roku.play_on_roku.call_args.args - assert "/local/Epic%20Sax%20Guy%2010%20Hours.mp4?authSig=" in call_args[0] - assert call_args[1] == { - "videoFormat": "mp4", - "videoName": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4", - } + assert mock_roku.launch.call_count == 1 + assert mock_roku.launch.call_args + call_args = mock_roku.launch.call_args.args + assert call_args[0] == DEFAULT_PLAY_MEDIA_APP_ID + assert "u" in call_args[1] + assert "/local/Epic%20Sax%20Guy%2010%20Hours.mp4?authSig=" in call_args[1]["u"] + assert "t" in call_args[1] + assert call_args[1]["t"] == "v" + assert "videoFormat" in call_args[1] + assert call_args[1]["videoFormat"] == "mp4" + assert "videoName" in call_args[1] + assert ( + call_args[1]["videoName"] + == "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4" + ) @pytest.mark.parametrize("mock_device", ["roku/rokutv-7820x.json"], indirect=True) diff --git a/tests/components/roomba/test_config_flow.py b/tests/components/roomba/test_config_flow.py index e5f882afa36..dedccc14249 100644 --- a/tests/components/roomba/test_config_flow.py +++ b/tests/components/roomba/test_config_flow.py @@ -8,7 +8,12 @@ from roombapy import RoombaConnectionError, RoombaInfo from homeassistant.components import dhcp, zeroconf from homeassistant.components.roomba import config_flow -from homeassistant.components.roomba.const import CONF_BLID, CONF_CONTINUOUS, DOMAIN +from homeassistant.components.roomba.const import ( + CONF_BLID, + CONF_CONTINUOUS, + DEFAULT_DELAY, + DOMAIN, +) from homeassistant.config_entries import ( SOURCE_DHCP, SOURCE_IGNORE, @@ -206,7 +211,7 @@ async def test_form_user_discovery_and_password_fetch(hass: HomeAssistant) -> No assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -331,7 +336,7 @@ async def test_form_user_discovery_manual_and_auto_password_fetch( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -468,7 +473,7 @@ async def test_form_user_discovery_no_devices_found_and_auto_password_fetch( assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -541,7 +546,7 @@ async def test_form_user_discovery_no_devices_found_and_password_fetch_fails( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -677,7 +682,7 @@ async def test_form_user_discovery_and_password_fetch_gets_connection_refused( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -738,7 +743,7 @@ async def test_dhcp_discovery_and_roomba_discovery_finds( assert result2["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -816,7 +821,7 @@ async def test_dhcp_discovery_falls_back_to_manual( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -886,7 +891,7 @@ async def test_dhcp_discovery_no_devices_falls_back_to_manual( assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -1055,6 +1060,43 @@ async def test_dhcp_discovery_partial_hostname(hass: HomeAssistant) -> None: assert current_flows[0]["flow_id"] == result2["flow_id"] +async def test_dhcp_discovery_when_user_flow_in_progress(hass: HomeAssistant) -> None: + """Test discovery flow when user flow is in progress.""" + + # Start a DHCP flow + with patch( + "homeassistant.components.roomba.config_flow.RoombaDiscovery", _mocked_discovery + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # Start a user flow - unique ID not set + with patch( + "homeassistant.components.roomba.config_flow.RoombaDiscovery", _mocked_discovery + ): + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip=MOCK_IP, + macaddress="aabbccddeeff", + hostname="irobot-blidthatislonger", + ), + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "link" + + current_flows = hass.config_entries.flow.async_progress() + assert len(current_flows) == 2 + + async def test_options_flow( hass: HomeAssistant, ) -> None: @@ -1082,10 +1124,10 @@ async def test_options_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={CONF_CONTINUOUS: True, CONF_DELAY: 1}, + user_input={CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY}, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_CONTINUOUS: True, CONF_DELAY: 1} - assert config_entry.options == {CONF_CONTINUOUS: True, CONF_DELAY: 1} + assert result["data"] == {CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY} + assert config_entry.options == {CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY} diff --git a/tests/components/rova/test_config_flow.py b/tests/components/rova/test_config_flow.py index d9d1df3e188..608f4ec105b 100644 --- a/tests/components/rova/test_config_flow.py +++ b/tests/components/rova/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.rova.const import ( CONF_ZIP_CODE, DOMAIN, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -167,104 +167,3 @@ async def test_abort_if_api_throws_exception( CONF_HOUSE_NUMBER: HOUSE_NUMBER, CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, } - - -async def test_import(hass: HomeAssistant, mock_rova: MagicMock) -> None: - """Test import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_ZIP_CODE: ZIP_CODE, - CONF_HOUSE_NUMBER: HOUSE_NUMBER, - CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"{ZIP_CODE} {HOUSE_NUMBER} {HOUSE_NUMBER_SUFFIX}" - assert result["data"] == { - CONF_ZIP_CODE: ZIP_CODE, - CONF_HOUSE_NUMBER: HOUSE_NUMBER, - CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, - } - - -async def test_import_already_configured( - hass: HomeAssistant, mock_rova: MagicMock -) -> None: - """Test we abort import flow when entry is already configured.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=f"{ZIP_CODE}{HOUSE_NUMBER}{HOUSE_NUMBER_SUFFIX}", - data={ - CONF_ZIP_CODE: ZIP_CODE, - CONF_HOUSE_NUMBER: HOUSE_NUMBER, - CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, - }, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_ZIP_CODE: ZIP_CODE, - CONF_HOUSE_NUMBER: HOUSE_NUMBER, - CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_import_if_not_rova_area( - hass: HomeAssistant, mock_rova: MagicMock -) -> None: - """Test we abort if rova does not collect at the given address.""" - - # test with area where rova does not collect - mock_rova.return_value.is_rova_area.return_value = False - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_ZIP_CODE: ZIP_CODE, - CONF_HOUSE_NUMBER: HOUSE_NUMBER, - CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, - }, - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "invalid_rova_area" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (ConnectTimeout(), "cannot_connect"), - (HTTPError(), "cannot_connect"), - ], -) -async def test_import_connection_errors( - hass: HomeAssistant, exception: Exception, error: str, mock_rova: MagicMock -) -> None: - """Test import connection errors flow.""" - - # test with HTTPError - mock_rova.return_value.is_rova_area.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_ZIP_CODE: ZIP_CODE, - CONF_HOUSE_NUMBER: HOUSE_NUMBER, - CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, - }, - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == error diff --git a/tests/components/rpi_power/test_binary_sensor.py b/tests/components/rpi_power/test_binary_sensor.py index 865d7c035b8..a5776a22fb0 100644 --- a/tests/components/rpi_power/test_binary_sensor.py +++ b/tests/components/rpi_power/test_binary_sensor.py @@ -68,6 +68,6 @@ async def test_new_detected( assert state.state == STATE_OFF assert ( binary_sensor.__name__, - logging.INFO, + logging.DEBUG, DESCRIPTION_NORMALIZED, ) in caplog.record_tuples diff --git a/tests/components/rtsp_to_webrtc/test_config_flow.py b/tests/components/rtsp_to_webrtc/test_config_flow.py index 5daf9400396..d3afa80b0b4 100644 --- a/tests/components/rtsp_to_webrtc/test_config_flow.py +++ b/tests/components/rtsp_to_webrtc/test_config_flow.py @@ -7,11 +7,11 @@ from unittest.mock import patch import rtsp_to_webrtc from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.rtsp_to_webrtc import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .conftest import ComponentSetup diff --git a/tests/components/rtsp_to_webrtc/test_init.py b/tests/components/rtsp_to_webrtc/test_init.py index 3071c3d9d08..985e76fa1d1 100644 --- a/tests/components/rtsp_to_webrtc/test_init.py +++ b/tests/components/rtsp_to_webrtc/test_init.py @@ -10,10 +10,11 @@ import aiohttp import pytest import rtsp_to_webrtc -from homeassistant.components.rtsp_to_webrtc import CONF_STUN_SERVER, DOMAIN +from homeassistant.components.rtsp_to_webrtc import DOMAIN from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from .conftest import SERVER_URL, STREAM_SOURCE, ComponentSetup @@ -34,15 +35,28 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) +@pytest.mark.usefixtures("rtsp_to_webrtc_client") async def test_setup_success( - hass: HomeAssistant, rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, ) -> None: """Test successful setup and unload.""" - await setup_integration() + config_entry.add_to_hass(hass) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + assert issue_registry.async_get_issue(DOMAIN, "deprecated") entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 assert entries[0].state is ConfigEntryState.LOADED + await hass.config_entries.async_unload(entries[0].entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert entries[0].state is ConfigEntryState.NOT_LOADED + assert not issue_registry.async_get_issue(DOMAIN, "deprecated") @pytest.mark.parametrize("config_entry_data", [{}]) @@ -87,12 +101,11 @@ async def test_setup_communication_failure( assert entries[0].state is ConfigEntryState.SETUP_RETRY +@pytest.mark.usefixtures("mock_camera", "rtsp_to_webrtc_client") async def test_offer_for_stream_source( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - mock_camera: Any, - rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup, ) -> None: """Test successful response from RTSPtoWebRTC server.""" @@ -104,21 +117,33 @@ async def test_offer_for_stream_source( ) client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 1, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.demo_camera", "offer": OFFER_SDP, } ) + response = await client.receive_json() - assert response.get("id") == 1 - assert response.get("type") == TYPE_RESULT - assert response.get("success") - assert "result" in response - assert response["result"].get("answer") == ANSWER_SDP - assert "error" not in response + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": ANSWER_SDP, + } # Validate request parameters were sent correctly assert len(aioclient_mock.mock_calls) == 1 @@ -128,12 +153,11 @@ async def test_offer_for_stream_source( } +@pytest.mark.usefixtures("mock_camera", "rtsp_to_webrtc_client") async def test_offer_failure( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - mock_camera: Any, - rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup, ) -> None: """Test a transient failure talking to RTSPtoWebRTC server.""" @@ -145,86 +169,31 @@ async def test_offer_failure( ) client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 2, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.demo_camera", "offer": OFFER_SDP, } ) + response = await client.receive_json() - assert response.get("id") == 2 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response.get("success") - assert "error" in response - assert response["error"].get("code") == "web_rtc_offer_failed" - assert "message" in response["error"] - assert "RTSPtoWebRTC server communication failure" in response["error"]["message"] + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] - -async def test_no_stun_server( - hass: HomeAssistant, - rtsp_to_webrtc_client: Any, - setup_integration: ComponentSetup, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test successful setup and unload.""" - await setup_integration() - - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 2, - "type": "rtsp_to_webrtc/get_settings", - } - ) + # Session id response = await client.receive_json() - assert response.get("id") == 2 - assert response.get("type") == TYPE_RESULT - assert "result" in response - assert response["result"].get("stun_server") == "" + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" - -@pytest.mark.parametrize( - "config_entry_options", [{CONF_STUN_SERVER: "example.com:1234"}] -) -async def test_stun_server( - hass: HomeAssistant, - rtsp_to_webrtc_client: Any, - setup_integration: ComponentSetup, - config_entry: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test successful setup and unload.""" - await setup_integration() - - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 3, - "type": "rtsp_to_webrtc/get_settings", - } - ) + # Answer response = await client.receive_json() - assert response.get("id") == 3 - assert response.get("type") == TYPE_RESULT - assert "result" in response - assert response["result"].get("stun_server") == "example.com:1234" - - # Simulate an options flow change, clearing the stun server and verify the change is reflected - hass.config_entries.async_update_entry(config_entry, options={}) - await hass.async_block_till_done() - - await client.send_json( - { - "id": 4, - "type": "rtsp_to_webrtc/get_settings", - } - ) - response = await client.receive_json() - assert response.get("id") == 4 - assert response.get("type") == TYPE_RESULT - assert "result" in response - assert response["result"].get("stun_server") == "" + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "RTSPtoWebRTC server communication failure: ", + } diff --git a/tests/components/ruckus_unleashed/__init__.py b/tests/components/ruckus_unleashed/__init__.py index ccbf404cce0..b6c9c86953a 100644 --- a/tests/components/ruckus_unleashed/__init__.py +++ b/tests/components/ruckus_unleashed/__init__.py @@ -1,4 +1,4 @@ -"""Tests for the Ruckus Unleashed integration.""" +"""Tests for the Ruckus integration.""" from __future__ import annotations @@ -78,7 +78,7 @@ DEFAULT_UNIQUEID = DEFAULT_SYSTEM_INFO[API_SYS_SYSINFO][API_SYS_SYSINFO_SERIAL] def mock_config_entry() -> MockConfigEntry: - """Return a Ruckus Unleashed mock config entry.""" + """Return a Ruckus mock config entry.""" return MockConfigEntry( domain=DOMAIN, title=DEFAULT_TITLE, @@ -89,7 +89,7 @@ def mock_config_entry() -> MockConfigEntry: async def init_integration(hass: HomeAssistant) -> MockConfigEntry: - """Set up the Ruckus Unleashed integration in Home Assistant.""" + """Set up the Ruckus integration in Home Assistant.""" entry = mock_config_entry() entry.add_to_hass(hass) # Make device tied to other integration so device tracker entities get enabled diff --git a/tests/components/ruckus_unleashed/test_config_flow.py b/tests/components/ruckus_unleashed/test_config_flow.py index 5bfe2d941d5..61f689f3030 100644 --- a/tests/components/ruckus_unleashed/test_config_flow.py +++ b/tests/components/ruckus_unleashed/test_config_flow.py @@ -1,4 +1,4 @@ -"""Test the Ruckus Unleashed config flow.""" +"""Test the config flow.""" from copy import deepcopy from datetime import timedelta @@ -83,15 +83,7 @@ async def test_form_user_reauth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -121,15 +113,7 @@ async def test_form_user_reauth_different_unique_id(hass: HomeAssistant) -> None entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -161,15 +145,7 @@ async def test_form_user_reauth_invalid_auth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -201,15 +177,7 @@ async def test_form_user_reauth_cannot_connect(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -241,15 +209,7 @@ async def test_form_user_reauth_general_exception(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/ruckus_unleashed/test_device_tracker.py b/tests/components/ruckus_unleashed/test_device_tracker.py index 79d7c2dfda4..460c64c9651 100644 --- a/tests/components/ruckus_unleashed/test_device_tracker.py +++ b/tests/components/ruckus_unleashed/test_device_tracker.py @@ -1,4 +1,4 @@ -"""The sensor tests for the Ruckus Unleashed platform.""" +"""The sensor tests for the Ruckus platform.""" from datetime import timedelta from unittest.mock import AsyncMock diff --git a/tests/components/ruckus_unleashed/test_init.py b/tests/components/ruckus_unleashed/test_init.py index 8147f040bde..a7514677f20 100644 --- a/tests/components/ruckus_unleashed/test_init.py +++ b/tests/components/ruckus_unleashed/test_init.py @@ -1,4 +1,4 @@ -"""Test the Ruckus Unleashed config flow.""" +"""Test the Ruckus config flow.""" from unittest.mock import AsyncMock diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py index 96171071907..d8764285dd3 100644 --- a/tests/components/russound_rio/__init__.py +++ b/tests/components/russound_rio/__init__.py @@ -1 +1,25 @@ """Tests for the Russound RIO integration.""" + +from unittest.mock import AsyncMock + +from aiorussound.models import CallbackType + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def mock_state_update( + client: AsyncMock, callback_type: CallbackType = CallbackType.STATE +) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, callback_type) diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index a87d0a74fa8..5522c1e6ea2 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -1,16 +1,19 @@ """Test fixtures for Russound RIO integration.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiorussound import Controller, RussoundTcpConnectionHandler, Source +from aiorussound.rio import ZoneControlSurface +from aiorussound.util import controller_device_str, zone_device_str import pytest from homeassistant.components.russound_rio.const import DOMAIN from homeassistant.core import HomeAssistant -from .const import HARDWARE_MAC, MOCK_CONFIG, MOCK_CONTROLLERS, MODEL +from .const import API_VERSION, HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -25,24 +28,49 @@ def mock_setup_entry(): @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a Russound RIO config entry.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL ) - entry.add_to_hass(hass) - return entry @pytest.fixture -def mock_russound() -> Generator[AsyncMock]: +def mock_russound_client() -> Generator[AsyncMock]: """Mock the Russound RIO client.""" with ( patch( - "homeassistant.components.russound_rio.Russound", autospec=True + "homeassistant.components.russound_rio.RussoundClient", autospec=True ) as mock_client, patch( - "homeassistant.components.russound_rio.config_flow.Russound", - return_value=mock_client, + "homeassistant.components.russound_rio.config_flow.RussoundClient", + new=mock_client, ), ): - mock_client.enumerate_controllers.return_value = MOCK_CONTROLLERS - yield mock_client + client = mock_client.return_value + zones = { + int(k): ZoneControlSurface.from_dict(v) + for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() + } + client.sources = { + int(k): Source.from_dict(v) + for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() + } + client.state = load_json_object_fixture("get_state.json", DOMAIN) + for k, v in zones.items(): + v.device_str = zone_device_str(1, k) + v.fetch_current_source = Mock( + side_effect=lambda current_source=v.current_source: client.sources.get( + int(current_source) + ) + ) + + client.controllers = { + 1: Controller( + 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones + ) + } + client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) + client.is_connected = Mock(return_value=True) + client.unregister_state_update_callbacks.return_value = True + client.rio_version = API_VERSION + + yield client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 527f4fe3377..8f8ae7b59ea 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -2,10 +2,13 @@ from collections import namedtuple +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN + HOST = "127.0.0.1" PORT = 9621 MODEL = "MCA-C5" HARDWARE_MAC = "00:11:22:33:44:55" +API_VERSION = "1.08.00" MOCK_CONFIG = { "host": HOST, @@ -14,3 +17,7 @@ MOCK_CONFIG = { _CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} + +DEVICE_NAME = "mca_c5" +NAME_ZONE_1 = "backyard" +ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_sources.json b/tests/components/russound_rio/fixtures/get_sources.json new file mode 100644 index 00000000000..e39d702b8a1 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_sources.json @@ -0,0 +1,10 @@ +{ + "1": { + "name": "Aux", + "type": "Miscellaneous Audio" + }, + "2": { + "name": "Spotify", + "type": "Russound Media Streamer" + } +} diff --git a/tests/components/russound_rio/fixtures/get_state.json b/tests/components/russound_rio/fixtures/get_state.json new file mode 100644 index 00000000000..931b7611d01 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_state.json @@ -0,0 +1,75 @@ +{ + "S": { + "3": { + "name": "Streamer", + "type": "Misc Audio" + }, + "2": { + "name": "Liv. Rm TV", + "type": "Misc Audio" + }, + "5": { + "name": "Source 5", + "type": null + }, + "4": { + "name": "Basement TV", + "type": null + }, + "1": { + "name": "Tuner", + "type": "DMS-3.1 Media Streamer", + "channelName": null, + "coverArtURL": null, + "mode": "Unknown", + "shuffleMode": null, + "repeatMode": null, + "volume": "0", + "rating": null, + "playlistName": "Please Wait...", + "artistName": null, + "albumName": null, + "songName": "Connecting to media source." + }, + "6": { + "name": "Source 6", + "type": null + }, + "8": { + "name": "Source 8", + "type": null + }, + "7": { + "name": "Source 7", + "type": null + } + }, + "System": { + "status": "OFF" + }, + "C": { + "1": { + "Z": { + "1": { + "name": "Deck", + "treble": "0", + "balance": "0", + "loudness": "OFF", + "turnOnVolume": "10", + "doNotDisturb": "OFF", + "currentSource": "2", + "volume": "0", + "status": "OFF", + "mute": "OFF", + "partyMode": "OFF", + "bass": "0", + "page": "OFF", + "sharedSource": "OFF", + "sleepTimeRemaining": "0", + "lastError": null, + "enabled_sources": [3, 2] + } + } + } + } +} diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json new file mode 100644 index 00000000000..396310339b3 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_zones.json @@ -0,0 +1,22 @@ +{ + "1": { + "name": "Backyard", + "volume": "10", + "status": "ON", + "enabled": "True", + "current_source": "1" + }, + "2": { + "name": "Kitchen", + "volume": "50", + "status": "OFF", + "enabled": "True", + "current_source": "2" + }, + "3": { + "name": "Bedroom", + "volume": "10", + "status": "OFF", + "enabled": "False" + } +} diff --git a/tests/components/russound_rio/snapshots/test_diagnostics.ambr b/tests/components/russound_rio/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..ff3a8bf757f --- /dev/null +++ b/tests/components/russound_rio/snapshots/test_diagnostics.ambr @@ -0,0 +1,81 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'C': dict({ + '1': dict({ + 'Z': dict({ + '1': dict({ + 'balance': '0', + 'bass': '0', + 'currentSource': '2', + 'doNotDisturb': 'OFF', + 'enabled_sources': list([ + 3, + 2, + ]), + 'lastError': None, + 'loudness': 'OFF', + 'mute': 'OFF', + 'name': 'Deck', + 'page': 'OFF', + 'partyMode': 'OFF', + 'sharedSource': 'OFF', + 'sleepTimeRemaining': '0', + 'status': 'OFF', + 'treble': '0', + 'turnOnVolume': '10', + 'volume': '0', + }), + }), + }), + }), + 'S': dict({ + '1': dict({ + 'albumName': None, + 'artistName': None, + 'channelName': None, + 'coverArtURL': None, + 'mode': 'Unknown', + 'name': 'Tuner', + 'playlistName': 'Please Wait...', + 'rating': None, + 'repeatMode': None, + 'shuffleMode': None, + 'songName': 'Connecting to media source.', + 'type': 'DMS-3.1 Media Streamer', + 'volume': '0', + }), + '2': dict({ + 'name': 'Liv. Rm TV', + 'type': 'Misc Audio', + }), + '3': dict({ + 'name': 'Streamer', + 'type': 'Misc Audio', + }), + '4': dict({ + 'name': 'Basement TV', + 'type': None, + }), + '5': dict({ + 'name': 'Source 5', + 'type': None, + }), + '6': dict({ + 'name': 'Source 6', + 'type': None, + }), + '7': dict({ + 'name': 'Source 7', + 'type': None, + }), + '8': dict({ + 'name': 'Source 8', + 'type': None, + }), + }), + 'System': dict({ + 'status': 'OFF', + }), + }) +# --- diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr new file mode 100644 index 00000000000..fcd59dd06f7 --- /dev/null +++ b/tests/components/russound_rio/snapshots/test_init.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.1', + 'connections': set({ + tuple( + 'mac', + '00:11:22:33:44:55', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'russound_rio', + '00:11:22:33:44:55', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Russound', + 'model': 'MCA-C5', + 'model_id': None, + 'name': 'MCA-C5', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index 8bc7bd738a1..28cbf7eda5e 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -7,11 +7,13 @@ from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import MOCK_CONFIG, MOCK_CONTROLLERS, MODEL +from .const import MOCK_CONFIG, MODEL + +from tests.common import MockConfigEntry async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -29,16 +31,17 @@ async def test_form( assert result["title"] == MODEL assert result["data"] == MOCK_CONFIG assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "00:11:22:33:44:55" async def test_form_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - mock_russound.connect.side_effect = TimeoutError + mock_russound_client.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -48,7 +51,7 @@ async def test_form_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} # Recover with correct information - mock_russound.connect.side_effect = None + mock_russound_client.connect.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -60,39 +63,33 @@ async def test_form_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 -async def test_no_primary_controller( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +async def test_duplicate( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: - """Test we handle no primary controller error.""" - mock_russound.enumerate_controllers.return_value = {} + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - user_input = MOCK_CONFIG - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_primary_controller"} - - # Recover with correct information - mock_russound.enumerate_controllers.return_value = MOCK_CONTROLLERS result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MODEL - assert result["data"] == MOCK_CONFIG - assert len(mock_setup_entry.mock_calls) == 1 + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we import a config entry.""" result = await hass.config_entries.flow.async_init( @@ -105,13 +102,14 @@ async def test_import( assert result["title"] == MODEL assert result["data"] == MOCK_CONFIG assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "00:11:22:33:44:55" async def test_import_cannot_connect( - hass: HomeAssistant, mock_russound: AsyncMock + hass: HomeAssistant, mock_russound_client: AsyncMock ) -> None: """Test we handle import cannot connect error.""" - mock_russound.connect.side_effect = TimeoutError + mock_russound_client.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG @@ -119,17 +117,3 @@ async def test_import_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - - -async def test_import_no_primary_controller( - hass: HomeAssistant, mock_russound: AsyncMock -) -> None: - """Test import with no primary controller error.""" - mock_russound.enumerate_controllers.return_value = {} - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_primary_controller" diff --git a/tests/components/russound_rio/test_diagnostics.py b/tests/components/russound_rio/test_diagnostics.py new file mode 100644 index 00000000000..c6c5441128d --- /dev/null +++ b/tests/components/russound_rio/test_diagnostics.py @@ -0,0 +1,29 @@ +"""Tests for the diagnostics data provided by the Russound RIO integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + assert result == snapshot diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py new file mode 100644 index 00000000000..e7022fa6ac1 --- /dev/null +++ b/tests/components/russound_rio/test_init.py @@ -0,0 +1,66 @@ +"""Tests for the Russound RIO integration.""" + +from unittest.mock import AsyncMock, Mock + +from aiorussound.models import CallbackType +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import mock_state_update, setup_integration + +from tests.common import MockConfigEntry + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test the Cambridge Audio configuration entry not ready.""" + mock_russound_client.connect.side_effect = TimeoutError + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + mock_russound_client.connect = AsyncMock(return_value=True) + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_disconnect_reconnect_log( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + + mock_russound_client.is_connected = Mock(return_value=False) + await mock_state_update(mock_russound_client, CallbackType.CONNECTION) + assert "Disconnected from device at 127.0.0.1" in caplog.text + + mock_russound_client.is_connected = Mock(return_value=True) + await mock_state_update(mock_russound_client, CallbackType.CONNECTION) + assert "Reconnected to device at 127.0.0.1" in caplog.text diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py new file mode 100644 index 00000000000..c740ec4f39e --- /dev/null +++ b/tests/components/russound_rio/test_media_player.py @@ -0,0 +1,52 @@ +"""Tests for the Russound RIO media player.""" + +from unittest.mock import AsyncMock + +from aiorussound.models import PlayStatus +import pytest + +from homeassistant.const import ( + STATE_BUFFERING, + STATE_IDLE, + STATE_OFF, + STATE_ON, + STATE_PAUSED, + STATE_PLAYING, +) +from homeassistant.core import HomeAssistant + +from . import mock_state_update, setup_integration +from .const import ENTITY_ID_ZONE_1 + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("zone_status", "source_play_status", "media_player_state"), + [ + (True, None, STATE_ON), + (True, PlayStatus.PLAYING, STATE_PLAYING), + (True, PlayStatus.PAUSED, STATE_PAUSED), + (True, PlayStatus.TRANSITIONING, STATE_BUFFERING), + (True, PlayStatus.STOPPED, STATE_IDLE), + (False, None, STATE_OFF), + (False, PlayStatus.STOPPED, STATE_OFF), + ], +) +async def test_entity_state( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + zone_status: bool, + source_play_status: PlayStatus | None, + media_player_state: str, +) -> None: + """Test media player state.""" + await setup_integration(hass, mock_config_entry) + mock_russound_client.controllers[1].zones[1].status = zone_status + mock_russound_client.sources[1].play_status = source_play_status + await mock_state_update(mock_russound_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_ZONE_1) + assert state.state == media_player_state diff --git a/tests/components/rympro/test_config_flow.py b/tests/components/rympro/test_config_flow.py index e92b7c23357..7770889bdeb 100644 --- a/tests/components/rympro/test_config_flow.py +++ b/tests/components/rympro/test_config_flow.py @@ -160,17 +160,10 @@ async def test_form_already_exists(hass: HomeAssistant, config_entry) -> None: assert result2["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: +async def test_form_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test reauthentication.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -203,17 +196,12 @@ async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_reauth_with_new_account(hass: HomeAssistant, config_entry) -> None: +async def test_form_reauth_with_new_account( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test reauthentication with new account.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/sabnzbd/conftest.py b/tests/components/sabnzbd/conftest.py index b5450e5134f..6fa3d14e880 100644 --- a/tests/components/sabnzbd/conftest.py +++ b/tests/components/sabnzbd/conftest.py @@ -5,6 +5,13 @@ from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.sabnzbd import DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, load_json_object_fixture + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -13,3 +20,41 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.sabnzbd.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture(name="sabnzbd", autouse=True) +def mock_sabnzbd() -> Generator[AsyncMock]: + """Mock the Sabnzbd API.""" + with patch( + "homeassistant.components.sabnzbd.helpers.SabnzbdApi", autospec=True + ) as mock_sabnzbd: + mock = mock_sabnzbd.return_value + mock.return_value.check_available = True + mock.queue = load_json_object_fixture("queue.json", DOMAIN) + yield mock + + +@pytest.fixture(name="config_entry") +async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return a MockConfigEntry for testing.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title="Sabnzbd", + entry_id="01JD2YVVPBC62D620DGYNG2R8H", + data={ + CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", + CONF_URL: "http://localhost:8080", + }, + ) + config_entry.add_to_hass(hass) + + return config_entry + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Fixture for setting up the component.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() diff --git a/tests/components/sabnzbd/fixtures/queue.json b/tests/components/sabnzbd/fixtures/queue.json new file mode 100644 index 00000000000..7acef65f2e9 --- /dev/null +++ b/tests/components/sabnzbd/fixtures/queue.json @@ -0,0 +1,39 @@ +{ + "total_size": 1638.4, + "month_size": 38.8, + "week_size": 9.4, + "day_size": 9.4, + "version": "4.3.3", + "paused": true, + "pause_int": "0", + "paused_all": false, + "diskspace1": "444.95", + "diskspace2": "3127.88", + "diskspace1_norm": "445.0 G", + "diskspace2_norm": "3.1 T", + "diskspacetotal1": "465.76", + "diskspacetotal2": "7448.42", + "speedlimit": "85", + "speedlimit_abs": "22282240", + "have_warnings": "1", + "finishaction": null, + "quota": "0 ", + "have_quota": false, + "left_quota": "0 ", + "cache_art": "0", + "cache_size": "0 B", + "kbpersec": "0.00", + "speed": "0 ", + "mbleft": "0.00", + "mb": "0.00", + "sizeleft": "0 B", + "size": "0 B", + "noofslots_total": 0, + "noofslots": 0, + "start": 0, + "limit": 10, + "finish": 10, + "status": "Paused", + "timeleft": "0:00:00", + "slots": [] +} diff --git a/tests/components/sabnzbd/snapshots/test_binary_sensor.ambr b/tests/components/sabnzbd/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..9f3087df3d1 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_sensor[binary_sensor.sabnzbd_warnings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.sabnzbd_warnings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Warnings', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'warnings', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_warnings', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.sabnzbd_warnings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Sabnzbd Warnings', + }), + 'context': , + 'entity_id': 'binary_sensor.sabnzbd_warnings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/sabnzbd/snapshots/test_button.ambr b/tests/components/sabnzbd/snapshots/test_button.ambr new file mode 100644 index 00000000000..9b965e10518 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_button.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_button_setup[button.sabnzbd_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.sabnzbd_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_pause', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.sabnzbd_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Pause', + }), + 'context': , + 'entity_id': 'button.sabnzbd_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.sabnzbd_resume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.sabnzbd_resume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Resume', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'resume', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_resume', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.sabnzbd_resume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Resume', + }), + 'context': , + 'entity_id': 'button.sabnzbd_resume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/sabnzbd/snapshots/test_number.ambr b/tests/components/sabnzbd/snapshots/test_number.ambr new file mode 100644 index 00000000000..6a370797264 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_number_setup[number.sabnzbd_speedlimit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.sabnzbd_speedlimit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Speedlimit', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'speedlimit', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_speedlimit', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number_setup[number.sabnzbd_speedlimit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Speedlimit', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.sabnzbd_speedlimit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '85', + }) +# --- diff --git a/tests/components/sabnzbd/snapshots/test_sensor.ambr b/tests/components/sabnzbd/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..8b977e69aa6 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_sensor.ambr @@ -0,0 +1,576 @@ +# serializer version: 1 +# name: test_sensor[sensor.sabnzbd_daily_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_daily_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_day_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_daily_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Daily total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_daily_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.4', + }) +# --- +# name: test_sensor[sensor.sabnzbd_free_disk_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_free_disk_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free disk space', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'free_disk_space', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_diskspace1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_free_disk_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Free disk space', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_free_disk_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '444.95', + }) +# --- +# name: test_sensor[sensor.sabnzbd_left_to_download-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_left_to_download', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Left to download', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'left', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_mbleft', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_left_to_download-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Left to download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_left_to_download', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_sensor[sensor.sabnzbd_monthly_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_monthly_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_month_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_monthly_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Monthly total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_monthly_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8', + }) +# --- +# name: test_sensor[sensor.sabnzbd_overall_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_overall_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overall total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overall_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_total_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_overall_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Overall total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_overall_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1638.4', + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Queue', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'queue', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_mb', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Queue', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue_count-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_queue_count', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Queue count', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'queue_count', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_noofslots_total', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue_count-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Queue count', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_queue_count', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[sensor.sabnzbd_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'speed', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_kbpersec', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Sabnzbd Speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[sensor.sabnzbd_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sabnzbd_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Status', + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Paused', + }) +# --- +# name: test_sensor[sensor.sabnzbd_total_disk_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_total_disk_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total disk space', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_disk_space', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_diskspacetotal1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_total_disk_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Total disk space', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_total_disk_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '465.76', + }) +# --- +# name: test_sensor[sensor.sabnzbd_weekly_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_weekly_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'weekly_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_week_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_weekly_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Weekly total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_weekly_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.4', + }) +# --- diff --git a/tests/components/sabnzbd/test_binary_sensor.py b/tests/components/sabnzbd/test_binary_sensor.py new file mode 100644 index 00000000000..48a3c006488 --- /dev/null +++ b/tests/components/sabnzbd/test_binary_sensor.py @@ -0,0 +1,23 @@ +"""Binary sensor tests for the Sabnzbd component.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.BINARY_SENSOR]) +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test binary sensor setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/sabnzbd/test_button.py b/tests/components/sabnzbd/test_button.py new file mode 100644 index 00000000000..199d8eb03a0 --- /dev/null +++ b/tests/components/sabnzbd/test_button.py @@ -0,0 +1,116 @@ +"""Button tests for the SABnzbd component.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pysabnzbd import SabnzbdApiException +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.BUTTON]) +async def test_button_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test button setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("button", "called_function"), + [("resume", "resume_queue"), ("pause", "pause_queue")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_button_presses( + hass: HomeAssistant, + sabnzbd: AsyncMock, + button: str, + called_function: str, +) -> None: + """Test the sabnzbd button presses.""" + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.sabnzbd_{button}", + }, + blocking=True, + ) + + function = getattr(sabnzbd, called_function) + function.assert_called_once() + + +@pytest.mark.parametrize( + ("button", "called_function"), + [("resume", "resume_queue"), ("pause", "pause_queue")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_buttons_exception( + hass: HomeAssistant, + sabnzbd: AsyncMock, + button: str, + called_function: str, +) -> None: + """Test the button handles errors.""" + function = getattr(sabnzbd, called_function) + function.side_effect = SabnzbdApiException("Boom") + + with pytest.raises( + HomeAssistantError, + match="Unable to send command to SABnzbd due to a connection error, try again later", + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.sabnzbd_{button}", + }, + blocking=True, + ) + + function.assert_called_once() + + +@pytest.mark.parametrize( + "button", + ["resume", "pause"], +) +@pytest.mark.usefixtures("setup_integration") +async def test_buttons_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + sabnzbd: AsyncMock, + button: str, +) -> None: + """Test the button is unavailable when coordinator can't update data.""" + state = hass.states.get(f"button.sabnzbd_{button}") + assert state + assert state.state == STATE_UNKNOWN + + sabnzbd.refresh_data.side_effect = Exception("Boom") + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"button.sabnzbd_{button}") + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/sabnzbd/test_config_flow.py b/tests/components/sabnzbd/test_config_flow.py index 7f5394902b4..797af63c096 100644 --- a/tests/components/sabnzbd/test_config_flow.py +++ b/tests/components/sabnzbd/test_config_flow.py @@ -1,38 +1,24 @@ """Define tests for the Sabnzbd config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from pysabnzbd import SabnzbdApiException import pytest from homeassistant import config_entries from homeassistant.components.sabnzbd import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SSL, - CONF_URL, -) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_URL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + VALID_CONFIG = { - CONF_NAME: "Sabnzbd", CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", CONF_URL: "http://localhost:8080", } -VALID_CONFIG_OLD = { - CONF_NAME: "Sabnzbd", - CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", - CONF_HOST: "localhost", - CONF_PORT: 8080, - CONF_SSL: False, -} - pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -44,57 +30,140 @@ async def test_create_entry(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - return_value=True, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "edc3eee7330e" - assert result2["data"] == { - CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", - CONF_NAME: "Sabnzbd", - CONF_URL: "http://localhost:8080", - } - assert len(mock_setup_entry.mock_calls) == 1 + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "localhost" + assert result["data"] == { + CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", + CONF_URL: "http://localhost:8080", + } + assert len(mock_setup_entry.mock_calls) == 1 -async def test_auth_error(hass: HomeAssistant) -> None: - """Test that the user step fails.""" - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - side_effect=SabnzbdApiException("Some error"), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data=VALID_CONFIG, - ) +async def test_auth_error(hass: HomeAssistant, sabnzbd: AsyncMock) -> None: + """Test when the user step fails and if we can recover.""" + sabnzbd.check_available.side_effect = SabnzbdApiException("Some error") - assert result["errors"] == {"base": "cannot_connect"} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data=VALID_CONFIG, + ) + + assert result["errors"] == {"base": "cannot_connect"} + + # reset side effect and check if we can recover + sabnzbd.check_available.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + await hass.async_block_till_done() + + assert "errors" not in result + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "localhost" + assert result["data"] == { + CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", + CONF_URL: "http://localhost:8080", + } -async def test_import_flow(hass: HomeAssistant) -> None: - """Test the import configuration flow.""" - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=VALID_CONFIG_OLD, - ) +async def test_reconfigure_successful( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test reconfiguring a SABnzbd entry.""" + result = await config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "edc3eee7330e" - assert result["data"][CONF_NAME] == "Sabnzbd" - assert result["data"][CONF_API_KEY] == "edc3eee7330e4fdda04489e3fbc283d0" - assert result["data"][CONF_HOST] == "localhost" - assert result["data"][CONF_PORT] == 8080 - assert result["data"][CONF_SSL] is False + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_URL: "http://10.10.10.10:8080", CONF_API_KEY: "new_key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == { + CONF_URL: "http://10.10.10.10:8080", + CONF_API_KEY: "new_key", + } + + +async def test_reconfigure_error( + hass: HomeAssistant, config_entry: MockConfigEntry, sabnzbd: AsyncMock +) -> None: + """Test reconfiguring a SABnzbd entry.""" + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # set side effect and check if error is handled + sabnzbd.check_available.side_effect = SabnzbdApiException("Some error") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_URL: "http://10.10.10.10:8080", CONF_API_KEY: "new_key"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # reset side effect and check if we can recover + sabnzbd.check_available.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_URL: "http://10.10.10.10:8080", CONF_API_KEY: "new_key"}, + ) + + assert "errors" not in result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == { + CONF_URL: "http://10.10.10.10:8080", + CONF_API_KEY: "new_key", + } + + +async def test_abort_already_configured( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test that the flow aborts if SABnzbd instance is already configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_abort_reconfigure_already_configured( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test that the reconfigure flow aborts if SABnzbd instance is already configured.""" + result = await config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/sabnzbd/test_init.py b/tests/components/sabnzbd/test_init.py index e666f9f1d3e..9b833875bbc 100644 --- a/tests/components/sabnzbd/test_init.py +++ b/tests/components/sabnzbd/test_init.py @@ -1,77 +1,42 @@ """Tests for the SABnzbd Integration.""" -from unittest.mock import patch +import pytest -from homeassistant.components.sabnzbd import DEFAULT_NAME, DOMAIN, OLD_SENSOR_KEYS -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_URL -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from tests.common import MockConfigEntry - -MOCK_ENTRY_ID = "mock_entry_id" - -MOCK_UNIQUE_ID = "someuniqueid" - -MOCK_DEVICE_ID = "somedeviceid" - -MOCK_DATA_VERSION_1 = { - CONF_API_KEY: "api_key", - CONF_URL: "http://127.0.0.1:8080", - CONF_NAME: "name", -} - -MOCK_ENTRY_VERSION_1 = MockConfigEntry( - domain=DOMAIN, data=MOCK_DATA_VERSION_1, entry_id=MOCK_ENTRY_ID, version=1 +from homeassistant.components.sabnzbd.const import ( + ATTR_API_KEY, + DOMAIN, + SERVICE_PAUSE, + SERVICE_RESUME, + SERVICE_SET_SPEED, ) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir -async def test_unique_id_migrate( +@pytest.mark.parametrize( + ("service", "issue_id"), + [ + (SERVICE_RESUME, "resume_action_deprecated"), + (SERVICE_PAUSE, "pause_action_deprecated"), + (SERVICE_SET_SPEED, "set_speed_action_deprecated"), + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_deprecated_service_creates_issue( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + service: str, + issue_id: str, ) -> None: - """Test that config flow entry is migrated correctly.""" - # Start with the config entry at Version 1. - mock_entry = MOCK_ENTRY_VERSION_1 - mock_entry.add_to_hass(hass) - - mock_d_entry = device_registry.async_get_or_create( - config_entry_id=mock_entry.entry_id, - identifiers={(DOMAIN, DOMAIN)}, - name=DEFAULT_NAME, - entry_type=dr.DeviceEntryType.SERVICE, + """Test that deprecated actions creates an issue.""" + await hass.services.async_call( + DOMAIN, + service, + {ATTR_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0"}, + blocking=True, ) - entity_id_sensor_key = [] - - for sensor_key in OLD_SENSOR_KEYS: - mock_entity_id = f"{SENSOR_DOMAIN}.{DOMAIN}_{sensor_key}" - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - unique_id=sensor_key, - config_entry=mock_entry, - device_id=mock_d_entry.id, - ) - entity = entity_registry.async_get(mock_entity_id) - assert entity.entity_id == mock_entity_id - assert entity.unique_id == sensor_key - entity_id_sensor_key.append((mock_entity_id, sensor_key)) - - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - return_value=True, - ): - await hass.config_entries.async_setup(mock_entry.entry_id) - - await hass.async_block_till_done() - - for mock_entity_id, sensor_key in entity_id_sensor_key: - entity = entity_registry.async_get(mock_entity_id) - assert entity.unique_id == f"{MOCK_ENTRY_ID}_{sensor_key}" - - assert device_registry.async_get(mock_d_entry.id).identifiers == { - (DOMAIN, MOCK_ENTRY_ID) - } + issue = issue_registry.async_get_issue(domain=DOMAIN, issue_id=issue_id) + assert issue + assert issue.severity == ir.IssueSeverity.WARNING + assert issue.breaks_in_ha_version == "2025.6" diff --git a/tests/components/sabnzbd/test_number.py b/tests/components/sabnzbd/test_number.py new file mode 100644 index 00000000000..61f7ea45ab1 --- /dev/null +++ b/tests/components/sabnzbd/test_number.py @@ -0,0 +1,123 @@ +"""Number tests for the SABnzbd component.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pysabnzbd import SabnzbdApiException +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.NUMBER]) +async def test_number_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test number setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("number", "input_number", "called_function", "expected_state"), + [ + ("speedlimit", 50.0, "set_speed_limit", 50), + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_number_set( + hass: HomeAssistant, + sabnzbd: AsyncMock, + number: str, + input_number: float, + called_function: str, + expected_state: str, +) -> None: + """Test the sabnzbd number set.""" + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_VALUE: input_number, + ATTR_ENTITY_ID: f"number.sabnzbd_{number}", + }, + blocking=True, + ) + + function = getattr(sabnzbd, called_function) + function.assert_called_with(int(input_number)) + + +@pytest.mark.parametrize( + ("number", "input_number", "called_function"), + [("speedlimit", 55.0, "set_speed_limit")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_number_exception( + hass: HomeAssistant, + sabnzbd: AsyncMock, + number: str, + input_number: float, + called_function: str, +) -> None: + """Test the number entity handles errors.""" + function = getattr(sabnzbd, called_function) + function.side_effect = SabnzbdApiException("Boom") + + with pytest.raises( + HomeAssistantError, + match="Unable to send command to SABnzbd due to a connection error, try again later", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_VALUE: input_number, + ATTR_ENTITY_ID: f"number.sabnzbd_{number}", + }, + blocking=True, + ) + + function.assert_called_once() + + +@pytest.mark.parametrize( + ("number", "initial_state"), + [("speedlimit", "85")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_number_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + sabnzbd: AsyncMock, + number: str, + initial_state: str, +) -> None: + """Test the number is unavailable when coordinator can't update data.""" + state = hass.states.get(f"number.sabnzbd_{number}") + assert state + assert state.state == initial_state + + sabnzbd.refresh_data.side_effect = Exception("Boom") + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"number.sabnzbd_{number}") + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/sabnzbd/test_sensor.py b/tests/components/sabnzbd/test_sensor.py new file mode 100644 index 00000000000..31c0868a5a7 --- /dev/null +++ b/tests/components/sabnzbd/test_sensor.py @@ -0,0 +1,25 @@ +"""Sensor tests for the Sabnzbd component.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.SENSOR]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test sensor setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 061b5bc1836..017a2bc3e60 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -72,7 +72,7 @@ }), 'manufacturer': None, 'model': '82GXARRS', - 'model_id': None, + 'model_id': '82GXARRS', 'name': 'fake', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 6c325ae3b04..eb78332b7b3 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -14,14 +14,11 @@ from samsungtvws.exceptions import ( UnauthorizedError, ) from websockets import frames -from websockets.exceptions import ( - ConnectionClosedError, - WebSocketException, - WebSocketProtocolError, -) +from websockets.exceptions import ConnectionClosedError, WebSocketException from homeassistant import config_entries from homeassistant.components import dhcp, ssdp, zeroconf +from homeassistant.components.samsungtv.config_flow import SamsungTVConfigFlow from homeassistant.components.samsungtv.const import ( CONF_MANUFACTURER, CONF_SESSION_ID, @@ -56,7 +53,7 @@ from homeassistant.const import ( CONF_TOKEN, ) from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.data_entry_flow import BaseServiceInfo, FlowResultType from homeassistant.setup import async_setup_component from .const import ( @@ -400,7 +397,7 @@ async def test_user_websocket_not_supported(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVWSAsyncRemote.open", - side_effect=WebSocketProtocolError("Boom"), + side_effect=WebSocketException("Boom"), ), ): # websocket device not supported @@ -783,12 +780,12 @@ async def test_ssdp_websocket_cannot_connect(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVEncryptedWSAsyncRemote.start_listening", - side_effect=WebSocketProtocolError("Boom"), + side_effect=WebSocketException("Boom"), ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVWSAsyncRemote", ) as remotews, - patch.object(remotews, "open", side_effect=WebSocketProtocolError("Boom")), + patch.object(remotews, "open", side_effect=WebSocketException("Boom")), ): # device not supported result = await hass.config_entries.flow.async_init( @@ -982,6 +979,78 @@ async def test_dhcp_wired(hass: HomeAssistant, rest_api: Mock) -> None: assert result["result"].unique_id == "be9554b9-c9fb-41f4-8920-22da015376a4" +@pytest.mark.usefixtures("remotews", "rest_api_non_ssl_only", "remoteencws_failing") +@pytest.mark.parametrize( + ("source1", "data1", "source2", "data2", "is_matching_result"), + [ + ( + config_entries.SOURCE_DHCP, + MOCK_DHCP_DATA, + config_entries.SOURCE_DHCP, + MOCK_DHCP_DATA, + True, + ), + ( + config_entries.SOURCE_DHCP, + MOCK_DHCP_DATA, + config_entries.SOURCE_ZEROCONF, + MOCK_ZEROCONF_DATA, + False, + ), + ( + config_entries.SOURCE_ZEROCONF, + MOCK_ZEROCONF_DATA, + config_entries.SOURCE_DHCP, + MOCK_DHCP_DATA, + False, + ), + ( + config_entries.SOURCE_ZEROCONF, + MOCK_ZEROCONF_DATA, + config_entries.SOURCE_ZEROCONF, + MOCK_ZEROCONF_DATA, + True, + ), + ], +) +async def test_dhcp_zeroconf_already_in_progress( + hass: HomeAssistant, + source1: str, + data1: BaseServiceInfo, + source2: str, + data2: BaseServiceInfo, + is_matching_result: bool, +) -> None: + """Test starting a flow from dhcp or zeroconf when already in progress.""" + # confirm to add the entry + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": source1}, data=data1 + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + real_is_matching = SamsungTVConfigFlow.is_matching + return_values = [] + + def is_matching(self, other_flow) -> bool: + return_values.append(real_is_matching(self, other_flow)) + return return_values[-1] + + with patch.object( + SamsungTVConfigFlow, "is_matching", wraps=is_matching, autospec=True + ): + # confirm to add the entry + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": source2}, data=data2 + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == RESULT_ALREADY_IN_PROGRESS + # Ensure the is_matching method returned the expected value + assert return_values == [is_matching_result] + + @pytest.mark.usefixtures("remotews", "rest_api", "remoteencws_failing") async def test_zeroconf(hass: HomeAssistant) -> None: """Test starting a flow from zeroconf.""" @@ -1666,7 +1735,7 @@ async def test_update_legacy_missing_mac_from_dhcp_no_unique_id( ), patch( "homeassistant.components.samsungtv.bridge.SamsungTVEncryptedWSAsyncRemote.start_listening", - side_effect=WebSocketProtocolError("Boom"), + side_effect=WebSocketException("Boom"), ), ): result = await hass.config_entries.flow.async_init( @@ -1749,11 +1818,7 @@ async def test_form_reauth_legacy(hass: HomeAssistant) -> None: """Test reauthenticate legacy.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_OLD_ENTRY) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1773,11 +1838,7 @@ async def test_form_reauth_websocket(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1798,11 +1859,7 @@ async def test_form_reauth_websocket_cannot_connect( """Test reauthenticate websocket when we cannot connect on the first attempt.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1830,11 +1887,7 @@ async def test_form_reauth_websocket_not_supported(hass: HomeAssistant) -> None: """Test reauthenticate websocket when the device is not supported.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1863,11 +1916,7 @@ async def test_form_reauth_encrypted(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/samsungtv/test_device_trigger.py b/tests/components/samsungtv/test_device_trigger.py index acc7ecb904d..fa6efd08076 100644 --- a/tests/components/samsungtv/test_device_trigger.py +++ b/tests/components/samsungtv/test_device_trigger.py @@ -7,7 +7,8 @@ from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) -from homeassistant.components.samsungtv import DOMAIN, device_trigger +from homeassistant.components.samsungtv import device_trigger +from homeassistant.components.samsungtv.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index b1bdf034bc1..e8e0b699a7e 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -42,6 +42,7 @@ async def test_entry_diagnostics( "token": REDACTED, }, "disabled_by": None, + "discovery_keys": {}, "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, @@ -50,6 +51,7 @@ async def test_entry_diagnostics( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -81,6 +83,7 @@ async def test_entry_diagnostics_encrypted( "session_id": REDACTED, }, "disabled_by": None, + "discovery_keys": {}, "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, @@ -89,6 +92,7 @@ async def test_entry_diagnostics_encrypted( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -119,6 +123,7 @@ async def test_entry_diagnostics_encrypte_offline( "session_id": REDACTED, }, "disabled_by": None, + "discovery_keys": {}, "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, @@ -127,6 +132,7 @@ async def test_entry_diagnostics_encrypte_offline( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, diff --git a/tests/components/samsungtv/test_media_player.py b/tests/components/samsungtv/test_media_player.py index ef7e58251e8..1a7c8713b17 100644 --- a/tests/components/samsungtv/test_media_player.py +++ b/tests/components/samsungtv/test_media_player.py @@ -76,7 +76,8 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceNotSupported +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import async_wait_config_entry_reload, setup_samsungtv_entry @@ -1021,8 +1022,9 @@ async def test_turn_on_wol(hass: HomeAssistant) -> None: async def test_turn_on_without_turnon(hass: HomeAssistant, remote: Mock) -> None: """Test turn on.""" + await async_setup_component(hass, "homeassistant", {}) await setup_samsungtv_entry(hass, MOCK_CONFIG) - with pytest.raises(HomeAssistantError, match="does not support this service"): + with pytest.raises(ServiceNotSupported, match="does not support action"): await hass.services.async_call( MP_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) diff --git a/tests/components/samsungtv/test_trigger.py b/tests/components/samsungtv/test_trigger.py index 8076ceb2807..e1d26043bb0 100644 --- a/tests/components/samsungtv/test_trigger.py +++ b/tests/components/samsungtv/test_trigger.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant.components import automation -from homeassistant.components.samsungtv import DOMAIN +from homeassistant.components.samsungtv.const import DOMAIN from homeassistant.const import SERVICE_RELOAD, SERVICE_TURN_ON from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr diff --git a/tests/components/schedule/test_init.py b/tests/components/schedule/test_init.py index 7cd59f19033..18346122bfd 100644 --- a/tests/components/schedule/test_init.py +++ b/tests/components/schedule/test_init.py @@ -12,6 +12,7 @@ import pytest from homeassistant.components.schedule import STORAGE_VERSION, STORAGE_VERSION_MINOR from homeassistant.components.schedule.const import ( ATTR_NEXT_EVENT, + CONF_DATA, CONF_FRIDAY, CONF_FROM, CONF_MONDAY, @@ -66,13 +67,21 @@ def schedule_setup( CONF_NAME: "from storage", CONF_ICON: "mdi:party-popper", CONF_FRIDAY: [ - {CONF_FROM: "17:00:00", CONF_TO: "23:59:59"}, + { + CONF_FROM: "17:00:00", + CONF_TO: "23:59:59", + CONF_DATA: {"party_level": "epic"}, + }, ], CONF_SATURDAY: [ {CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}, ], CONF_SUNDAY: [ - {CONF_FROM: "00:00:00", CONF_TO: "24:00:00"}, + { + CONF_FROM: "00:00:00", + CONF_TO: "24:00:00", + CONF_DATA: {"entry": "VIPs only"}, + }, ], } ] @@ -95,9 +104,21 @@ def schedule_setup( CONF_TUESDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], CONF_WEDNESDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], CONF_THURSDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], - CONF_FRIDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], + CONF_FRIDAY: [ + { + CONF_FROM: "00:00:00", + CONF_TO: "23:59:59", + CONF_DATA: {"party_level": "epic"}, + } + ], CONF_SATURDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], - CONF_SUNDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], + CONF_SUNDAY: [ + { + CONF_FROM: "00:00:00", + CONF_TO: "23:59:59", + CONF_DATA: {"entry": "VIPs only"}, + } + ], } } } @@ -557,13 +578,13 @@ async def test_ws_list( assert len(result) == 1 assert result["from_storage"][ATTR_NAME] == "from storage" assert result["from_storage"][CONF_FRIDAY] == [ - {CONF_FROM: "17:00:00", CONF_TO: "23:59:59"} + {CONF_FROM: "17:00:00", CONF_TO: "23:59:59", CONF_DATA: {"party_level": "epic"}} ] assert result["from_storage"][CONF_SATURDAY] == [ {CONF_FROM: "00:00:00", CONF_TO: "23:59:59"} ] assert result["from_storage"][CONF_SUNDAY] == [ - {CONF_FROM: "00:00:00", CONF_TO: "24:00:00"} + {CONF_FROM: "00:00:00", CONF_TO: "24:00:00", CONF_DATA: {"entry": "VIPs only"}} ] assert "from_yaml" not in result diff --git a/tests/components/schedule/test_recorder.py b/tests/components/schedule/test_recorder.py index a7410472a44..85aef3e1990 100644 --- a/tests/components/schedule/test_recorder.py +++ b/tests/components/schedule/test_recorder.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.recorder.history import get_significant_states @@ -18,8 +19,11 @@ from tests.components.recorder.common import async_wait_recording_done @pytest.mark.usefixtures("recorder_mock", "enable_custom_integrations") -async def test_exclude_attributes(hass: HomeAssistant) -> None: +async def test_exclude_attributes( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test attributes to be excluded.""" + freezer.move_to("2024-08-02 06:30:00-07:00") # Before Friday event now = dt_util.utcnow() assert await async_setup_component( hass, @@ -33,9 +37,13 @@ async def test_exclude_attributes(hass: HomeAssistant) -> None: "tuesday": [{"from": "2:00", "to": "3:00"}], "wednesday": [{"from": "3:00", "to": "4:00"}], "thursday": [{"from": "5:00", "to": "6:00"}], - "friday": [{"from": "7:00", "to": "8:00"}], + "friday": [ + {"from": "7:00", "to": "8:00", "data": {"party_level": "epic"}} + ], "saturday": [{"from": "9:00", "to": "10:00"}], - "sunday": [{"from": "11:00", "to": "12:00"}], + "sunday": [ + {"from": "11:00", "to": "12:00", "data": {"entry": "VIPs only"}} + ], } } }, @@ -48,8 +56,25 @@ async def test_exclude_attributes(hass: HomeAssistant) -> None: assert state.attributes[ATTR_ICON] assert state.attributes[ATTR_NEXT_EVENT] + # Move to during Friday event + freezer.move_to("2024-08-02 07:30:00-07:00") + async_fire_time_changed(hass, fire_all=True) await hass.async_block_till_done() - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) + state = hass.states.get("schedule.test") + assert "entry" not in state.attributes + assert state.attributes["party_level"] == "epic" + + # Move to during Sunday event + freezer.move_to("2024-08-04 11:30:00-07:00") + async_fire_time_changed(hass, fire_all=True) + await hass.async_block_till_done() + state = hass.states.get("schedule.test") + assert "party_level" not in state.attributes + assert state.attributes["entry"] == "VIPs only" + + await hass.async_block_till_done() + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -63,3 +88,5 @@ async def test_exclude_attributes(hass: HomeAssistant) -> None: assert ATTR_FRIENDLY_NAME in state.attributes assert ATTR_ICON in state.attributes assert ATTR_NEXT_EVENT not in state.attributes + assert "entry" not in state.attributes + assert "party_level" not in state.attributes diff --git a/tests/components/schlage/__init__.py b/tests/components/schlage/__init__.py index c6cd3fec0bc..613621b2fb8 100644 --- a/tests/components/schlage/__init__.py +++ b/tests/components/schlage/__init__.py @@ -1 +1,7 @@ """Tests for the Schlage integration.""" + +from homeassistant.components.schlage.coordinator import SchlageDataUpdateCoordinator + +from tests.common import MockConfigEntry + +type MockSchlageConfigEntry = MockConfigEntry[SchlageDataUpdateCoordinator] diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index 9d61bb877d9..6695191dcf0 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -1,6 +1,7 @@ """Common fixtures for the Schlage tests.""" from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock, Mock, create_autospec, patch from pyschlage.lock import Lock @@ -10,11 +11,13 @@ from homeassistant.components.schlage.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant +from . import MockSchlageConfigEntry + from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry() -> MockSchlageConfigEntry: """Mock ConfigEntry.""" return MockConfigEntry( title="asdf@asdf.com", @@ -30,11 +33,11 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture async def mock_added_config_entry( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, mock_lock: Mock, -) -> MockConfigEntry: +) -> MockSchlageConfigEntry: """Mock ConfigEntry that's been added to HA.""" mock_schlage.locks.return_value = [mock_lock] mock_schlage.users.return_value = [] @@ -70,21 +73,28 @@ def mock_pyschlage_auth() -> Mock: @pytest.fixture -def mock_lock() -> Mock: +def mock_lock(mock_lock_attrs: dict[str, Any]) -> Mock: """Mock Lock fixture.""" mock_lock = create_autospec(Lock) - mock_lock.configure_mock( - device_id="test", - name="Vault Door", - model_name="", - is_locked=False, - is_jammed=False, - battery_level=20, - firmware_version="1.0", - lock_and_leave_enabled=True, - beeper_enabled=True, - ) + mock_lock.configure_mock(**mock_lock_attrs) mock_lock.logs.return_value = [] mock_lock.last_changed_by.return_value = "thumbturn" mock_lock.keypad_disabled.return_value = False return mock_lock + + +@pytest.fixture +def mock_lock_attrs() -> dict[str, Any]: + """Attributes for a mock lock.""" + return { + "device_id": "test", + "name": "Vault Door", + "model_name": "", + "is_locked": False, + "is_jammed": False, + "battery_level": 20, + "auto_lock_time": 15, + "firmware_version": "1.0", + "lock_and_leave_enabled": True, + "beeper_enabled": True, + } diff --git a/tests/components/schlage/snapshots/test_init.ambr b/tests/components/schlage/snapshots/test_init.ambr new file mode 100644 index 00000000000..c7049443ab7 --- /dev/null +++ b/tests/components/schlage/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'schlage', + 'test', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Schlage', + 'model': '', + 'model_id': None, + 'name': 'Vault Door', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/schlage/test_binary_sensor.py b/tests/components/schlage/test_binary_sensor.py index 97f11577b86..a073097f755 100644 --- a/tests/components/schlage/test_binary_sensor.py +++ b/tests/components/schlage/test_binary_sensor.py @@ -3,37 +3,48 @@ from datetime import timedelta from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from pyschlage.exceptions import UnknownError from homeassistant.components.binary_sensor import BinarySensorDeviceClass -from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.dt import utcnow + +from . import MockSchlageConfigEntry from tests.common import async_fire_time_changed async def test_keypad_disabled_binary_sensor( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_schlage: Mock, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the keypad_disabled binary_sensor.""" mock_lock.keypad_disabled.reset_mock() mock_lock.keypad_disabled.return_value = True # Make the coordinator refresh data. - async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None - assert keypad.state == "on" + assert keypad.state == STATE_ON assert keypad.attributes["device_class"] == BinarySensorDeviceClass.PROBLEM mock_lock.keypad_disabled.assert_called_once_with([]) async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_schlage: Mock, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the keypad_disabled binary_sensor.""" mock_lock.keypad_disabled.reset_mock() @@ -42,12 +53,13 @@ async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( mock_lock.logs.side_effect = UnknownError("Cannot load logs") # Make the coordinator refresh data. - async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None - assert keypad.state == "on" + assert keypad.state == STATE_ON assert keypad.attributes["device_class"] == BinarySensorDeviceClass.PROBLEM mock_lock.keypad_disabled.assert_called_once_with([]) diff --git a/tests/components/schlage/test_config_flow.py b/tests/components/schlage/test_config_flow.py index 15ef3858c0c..3161ebe4097 100644 --- a/tests/components/schlage/test_config_flow.py +++ b/tests/components/schlage/test_config_flow.py @@ -10,13 +10,25 @@ from homeassistant.components.schlage.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import MockSchlageConfigEntry + from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + "username", + [ + "test-username", + "TEST-USERNAME", + ], +) async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_pyschlage_auth: Mock, + username: str, ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -28,7 +40,7 @@ async def test_form( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", + "username": username, "password": "test-password", }, ) @@ -44,6 +56,32 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 +async def test_form_requires_unique_id( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pyschlage_auth: Mock, +) -> None: + """Test entries have unique ids.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert init_result["type"] is FlowResultType.FORM + assert init_result["errors"] == {} + + create_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + { + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + + mock_pyschlage_auth.authenticate.assert_called_once_with() + assert create_result["type"] is FlowResultType.ABORT + assert create_result["reason"] == "already_configured" + + async def test_form_invalid_auth( hass: HomeAssistant, mock_pyschlage_auth: Mock ) -> None: @@ -85,8 +123,7 @@ async def test_form_unknown(hass: HomeAssistant, mock_pyschlage_auth: Mock) -> N async def test_reauth( hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, + mock_added_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, ) -> None: """Test reauth flow.""" @@ -94,8 +131,7 @@ async def test_reauth( await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - [result] = flows + result = flows[-1] assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( @@ -111,12 +147,11 @@ async def test_reauth( "username": "asdf@asdf.com", "password": "new-password", } - assert len(mock_setup_entry.mock_calls) == 1 async def test_reauth_invalid_auth( hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock, ) -> None: @@ -144,7 +179,7 @@ async def test_reauth_invalid_auth( async def test_reauth_wrong_account( hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock, ) -> None: diff --git a/tests/components/schlage/test_diagnostics.py b/tests/components/schlage/test_diagnostics.py index 15b2316bf38..0b0dc856c1a 100644 --- a/tests/components/schlage/test_diagnostics.py +++ b/tests/components/schlage/test_diagnostics.py @@ -4,7 +4,8 @@ from unittest.mock import Mock from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +from . import MockSchlageConfigEntry + from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -12,7 +13,7 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mock_added_config_entry: MockConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, mock_lock: Mock, ) -> None: """Test Schlage diagnostics.""" diff --git a/tests/components/schlage/test_init.py b/tests/components/schlage/test_init.py index 0fe7af1982b..57a139e582e 100644 --- a/tests/components/schlage/test_init.py +++ b/tests/components/schlage/test_init.py @@ -1,14 +1,23 @@ """Tests for the Schlage integration.""" -from unittest.mock import Mock, patch +from typing import Any +from unittest.mock import Mock, create_autospec, patch +from freezegun.api import FrozenDateTimeFactory from pycognito.exceptions import WarrantException from pyschlage.exceptions import Error, NotAuthorizedError +from pyschlage.lock import Lock +from syrupy.assertion import SnapshotAssertion +from homeassistant.components.schlage.const import DOMAIN, UPDATE_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +import homeassistant.helpers.device_registry as dr +from homeassistant.helpers.device_registry import DeviceRegistry -from tests.common import MockConfigEntry +from . import MockSchlageConfigEntry + +from tests.common import async_fire_time_changed @patch( @@ -16,7 +25,7 @@ from tests.common import MockConfigEntry side_effect=WarrantException, ) async def test_auth_failed( - mock_auth: Mock, hass: HomeAssistant, mock_config_entry: MockConfigEntry + mock_auth: Mock, hass: HomeAssistant, mock_config_entry: MockSchlageConfigEntry ) -> None: """Test failed auth on setup.""" mock_config_entry.add_to_hass(hass) @@ -29,7 +38,7 @@ async def test_auth_failed( async def test_update_data_fails( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, ) -> None: @@ -45,7 +54,7 @@ async def test_update_data_fails( async def test_update_data_auth_error( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, ) -> None: @@ -61,7 +70,7 @@ async def test_update_data_auth_error( async def test_update_data_get_logs_auth_error( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, mock_lock: Mock, @@ -80,7 +89,7 @@ async def test_update_data_get_logs_auth_error( async def test_load_unload_config_entry( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry: MockSchlageConfigEntry, mock_pyschlage_auth: Mock, mock_schlage: Mock, ) -> None: @@ -94,3 +103,74 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_lock_device_registry( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_added_config_entry: MockSchlageConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test lock is added to device registry.""" + device = device_registry.async_get_device(identifiers={(DOMAIN, "test")}) + assert device == snapshot + + +async def test_auto_add_device( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_added_config_entry: MockSchlageConfigEntry, + mock_schlage: Mock, + mock_lock: Mock, + mock_lock_attrs: dict[str, Any], + freezer: FrozenDateTimeFactory, +) -> None: + """Test new devices are auto-added to the device registry.""" + device = device_registry.async_get_device(identifiers={(DOMAIN, "test")}) + assert device is not None + all_devices = dr.async_entries_for_config_entry( + device_registry, mock_added_config_entry.entry_id + ) + assert len(all_devices) == 1 + + mock_lock_attrs["device_id"] = "test2" + new_mock_lock = create_autospec(Lock) + new_mock_lock.configure_mock(**mock_lock_attrs) + mock_schlage.locks.return_value = [mock_lock, new_mock_lock] + + # Make the coordinator refresh data. + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + new_device = device_registry.async_get_device(identifiers={(DOMAIN, "test2")}) + assert new_device is not None + + all_devices = dr.async_entries_for_config_entry( + device_registry, mock_added_config_entry.entry_id + ) + assert len(all_devices) == 2 + + +async def test_auto_remove_device( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_added_config_entry: MockSchlageConfigEntry, + mock_schlage: Mock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test new devices are auto-added to the device registry.""" + assert device_registry.async_get_device(identifiers={(DOMAIN, "test")}) is not None + + mock_schlage.locks.return_value = [] + + # Make the coordinator refresh data. + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert device_registry.async_get_device(identifiers={(DOMAIN, "test")}) is None + all_devices = dr.async_entries_for_config_entry( + device_registry, mock_added_config_entry.entry_id + ) + assert len(all_devices) == 0 diff --git a/tests/components/schlage/test_lock.py b/tests/components/schlage/test_lock.py index 6c06f124693..6a3bb799213 100644 --- a/tests/components/schlage/test_lock.py +++ b/tests/components/schlage/test_lock.py @@ -3,31 +3,45 @@ from datetime import timedelta from unittest.mock import Mock -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN -from homeassistant.config_entries import ConfigEntry +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.util.dt import utcnow + +from . import MockSchlageConfigEntry from tests.common import async_fire_time_changed -async def test_lock_device_registry( +async def test_lock_attributes( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_added_config_entry: ConfigEntry, + mock_added_config_entry: MockSchlageConfigEntry, + mock_schlage: Mock, + mock_lock: Mock, + freezer: FrozenDateTimeFactory, ) -> None: - """Test lock is added to device registry.""" - device = device_registry.async_get_device(identifiers={("schlage", "test")}) - assert device.model == "" - assert device.sw_version == "1.0" - assert device.name == "Vault Door" - assert device.manufacturer == "Schlage" + """Test lock attributes.""" + lock = hass.states.get("lock.vault_door") + assert lock is not None + assert lock.state == LockState.UNLOCKED + assert lock.attributes["changed_by"] == "thumbturn" + + mock_lock.is_locked = False + mock_lock.is_jammed = True + # Make the coordinator refresh data. + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + lock = hass.states.get("lock.vault_door") + assert lock is not None + assert lock.state == LockState.JAMMED async def test_lock_services( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test lock services.""" await hass.services.async_call( @@ -52,16 +66,20 @@ async def test_lock_services( async def test_changed_by( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test population of the changed_by attribute.""" mock_lock.last_changed_by.reset_mock() mock_lock.last_changed_by.return_value = "access code - foo" # Make the coordinator refresh data. - async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - mock_lock.last_changed_by.assert_called_once_with() + mock_lock.last_changed_by.assert_called_with() lock_device = hass.states.get("lock.vault_door") assert lock_device is not None diff --git a/tests/components/schlage/test_select.py b/tests/components/schlage/test_select.py new file mode 100644 index 00000000000..59ff065d449 --- /dev/null +++ b/tests/components/schlage/test_select.py @@ -0,0 +1,34 @@ +"""Test Schlage select.""" + +from unittest.mock import Mock + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import MockSchlageConfigEntry + + +async def test_select( + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, +) -> None: + """Test the auto-lock time select entity.""" + entity_id = "select.vault_door_auto_lock_time" + + select = hass.states.get(entity_id) + assert select is not None + assert select.state == "15" + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "30"}, + blocking=True, + ) + mock_lock.set_auto_lock_time.assert_called_once_with(30) diff --git a/tests/components/schlage/test_sensor.py b/tests/components/schlage/test_sensor.py index 2c0cabbb1e8..9a489f6ff73 100644 --- a/tests/components/schlage/test_sensor.py +++ b/tests/components/schlage/test_sensor.py @@ -1,27 +1,14 @@ """Test schlage sensor.""" from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -async def test_sensor_device_registry( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_added_config_entry: ConfigEntry, -) -> None: - """Test sensor is added to device registry.""" - device = device_registry.async_get_device(identifiers={("schlage", "test")}) - assert device.model == "" - assert device.sw_version == "1.0" - assert device.name == "Vault Door" - assert device.manufacturer == "Schlage" +from . import MockSchlageConfigEntry async def test_battery_sensor( - hass: HomeAssistant, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, mock_added_config_entry: MockSchlageConfigEntry ) -> None: """Test the battery sensor.""" battery_sensor = hass.states.get("sensor.vault_door_battery") diff --git a/tests/components/schlage/test_switch.py b/tests/components/schlage/test_switch.py index f1cded3ce22..fc5acc4399f 100644 --- a/tests/components/schlage/test_switch.py +++ b/tests/components/schlage/test_switch.py @@ -3,27 +3,16 @@ from unittest.mock import Mock from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -async def test_switch_device_registry( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_added_config_entry: ConfigEntry, -) -> None: - """Test switch is added to device registry.""" - device = device_registry.async_get_device(identifiers={("schlage", "test")}) - assert device.model == "" - assert device.sw_version == "1.0" - assert device.name == "Vault Door" - assert device.manufacturer == "Schlage" +from . import MockSchlageConfigEntry async def test_beeper_services( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test BeeperSwitch services.""" await hass.services.async_call( @@ -49,7 +38,9 @@ async def test_beeper_services( async def test_lock_and_leave_services( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: MockSchlageConfigEntry, ) -> None: """Test LockAndLeaveSwitch services.""" await hass.services.async_call( diff --git a/tests/components/screenlogic/snapshots/test_diagnostics.ambr b/tests/components/screenlogic/snapshots/test_diagnostics.ambr index 534c77223d6..c7db7a33959 100644 --- a/tests/components/screenlogic/snapshots/test_diagnostics.ambr +++ b/tests/components/screenlogic/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'port': 80, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'screenlogic', 'entry_id': 'screenlogictest', 'minor_version': 1, @@ -16,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Pentair: DD-EE-FF', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/screenlogic/test_services.py b/tests/components/screenlogic/test_services.py index 0fc79fad0e5..8a414ba2596 100644 --- a/tests/components/screenlogic/test_services.py +++ b/tests/components/screenlogic/test_services.py @@ -18,11 +18,9 @@ from homeassistant.components.screenlogic.const import ( SERVICE_STOP_SUPER_CHLORINATION, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_AREA_ID, ATTR_DEVICE_ID, ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr -from homeassistant.util import slugify from . import ( DATA_FULL_CHEM, @@ -102,22 +100,6 @@ async def setup_screenlogic_services_fixture( }, None, ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_AREA_ID: MOCK_DEVICE_AREA, - }, - ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_ENTITY_ID: f"{Platform.SENSOR}.{slugify(f'{MOCK_ADAPTER_NAME} Air Temperature')}", - }, - ), ], ) async def test_service_set_color_mode( @@ -148,30 +130,6 @@ async def test_service_set_color_mode( mocked_async_set_color_lights.assert_awaited_once() -async def test_service_set_color_mode_with_device( - hass: HomeAssistant, - service_fixture: dict[str, Any], -) -> None: - """Test set_color_mode service with a device target.""" - mocked_async_set_color_lights: AsyncMock = service_fixture["gateway"][ - "async_set_color_lights" - ] - - assert hass.services.has_service(DOMAIN, SERVICE_SET_COLOR_MODE) - - sl_device: dr.DeviceEntry = service_fixture["device"] - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_COLOR_MODE, - service_data={ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower()}, - blocking=True, - target={ATTR_DEVICE_ID: sl_device.id}, - ) - - mocked_async_set_color_lights.assert_awaited_once() - - @pytest.mark.parametrize( ("data", "target", "error_msg"), [ @@ -193,36 +151,6 @@ async def test_service_set_color_mode_with_device( f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry " "'test' is not a screenlogic config", ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_AREA_ID: "invalidareaid", - }, - f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " - "target not found", - ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_DEVICE_ID: "invaliddeviceid", - }, - f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " - "target not found", - ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_ENTITY_ID: "sensor.invalidentityid", - }, - f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " - "target not found", - ), ], ) async def test_service_set_color_mode_error( diff --git a/tests/components/script/test_blueprint.py b/tests/components/script/test_blueprint.py index aef22b93bcf..7f03a89c548 100644 --- a/tests/components/script/test_blueprint.py +++ b/tests/components/script/test_blueprint.py @@ -9,7 +9,11 @@ from unittest.mock import patch import pytest from homeassistant.components import script -from homeassistant.components.blueprint.models import Blueprint, DomainBlueprints +from homeassistant.components.blueprint import ( + BLUEPRINT_SCHEMA, + Blueprint, + DomainBlueprints, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers import device_registry as dr, template @@ -33,7 +37,10 @@ def patch_blueprint(blueprint_path: str, data_path: str) -> Iterator[None]: return orig_load(self, path) return Blueprint( - yaml.load_yaml(data_path), expected_domain=self.domain, path=path + yaml.load_yaml(data_path), + expected_domain=self.domain, + path=path, + schema=BLUEPRINT_SCHEMA, ) with patch( @@ -109,7 +116,6 @@ async def test_confirmable_notification( assert len(mock_call_action.mock_calls) == 1 _hass, config, variables, _context = mock_call_action.mock_calls[0][1] - template.attach(hass, config) rendered_config = template.render_complex(config, variables) assert rendered_config == { diff --git a/tests/components/search/test_init.py b/tests/components/search/test_init.py index 9b2b959e0dd..2c00c3bf6f2 100644 --- a/tests/components/search/test_init.py +++ b/tests/components/search/test_init.py @@ -250,7 +250,7 @@ async def test_search( { "id": "unique_id", "alias": "blueprint_automation_1", - "trigger": {"platform": "template", "value_template": "true"}, + "triggers": {"platform": "template", "value_template": "true"}, "use_blueprint": { "path": "test_event_service.yaml", "input": { @@ -262,7 +262,7 @@ async def test_search( }, { "alias": "blueprint_automation_2", - "trigger": {"platform": "template", "value_template": "true"}, + "triggers": {"platform": "template", "value_template": "true"}, "use_blueprint": { "path": "test_event_service.yaml", "input": { diff --git a/tests/components/sense/__init__.py b/tests/components/sense/__init__.py index bf0a87737b9..d604bcba737 100644 --- a/tests/components/sense/__init__.py +++ b/tests/components/sense/__init__.py @@ -1 +1,23 @@ """Tests for the Sense integration.""" + +from unittest.mock import patch + +from homeassistant.components.sense.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platform: Platform +) -> MockConfigEntry: + """Set up the Sense platform.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.sense.PLATFORMS", [platform]): + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/sense/conftest.py b/tests/components/sense/conftest.py new file mode 100644 index 00000000000..7cf1626f40e --- /dev/null +++ b/tests/components/sense/conftest.py @@ -0,0 +1,84 @@ +"""Common methods for Sense.""" + +from __future__ import annotations + +from collections.abc import Generator +import datetime +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +import pytest +from sense_energy import Scale + +from homeassistant.components.sense.binary_sensor import SenseDevice +from homeassistant.components.sense.const import DOMAIN + +from .const import ( + DEVICE_1_DAY_ENERGY, + DEVICE_1_ID, + DEVICE_1_NAME, + DEVICE_1_POWER, + DEVICE_2_DAY_ENERGY, + DEVICE_2_ID, + DEVICE_2_NAME, + DEVICE_2_POWER, + MOCK_CONFIG, + MONITOR_ID, +) + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.sense.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def config_entry() -> MockConfigEntry: + """Mock sense config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG, + unique_id="test-email", + ) + + +@pytest.fixture +def mock_sense() -> Generator[MagicMock]: + """Mock an ASyncSenseable object with a split foundation.""" + with patch("homeassistant.components.sense.ASyncSenseable", autospec=True) as mock: + gateway = mock.return_value + gateway.sense_monitor_id = MONITOR_ID + gateway.get_monitor_data.return_value = None + gateway.update_realtime.return_value = None + gateway.fetch_devices.return_value = None + gateway.update_trend_data.return_value = None + + type(gateway).active_power = PropertyMock(return_value=100) + type(gateway).active_solar_power = PropertyMock(return_value=500) + type(gateway).active_voltage = PropertyMock(return_value=[120, 240]) + gateway.get_stat.return_value = 15 + gateway.trend_start.return_value = datetime.datetime.fromisoformat( + "2024-01-01 01:01:00+00:00" + ) + + device_1 = SenseDevice(DEVICE_1_ID) + device_1.name = DEVICE_1_NAME + device_1.icon = "car" + device_1.is_on = False + device_1.power_w = DEVICE_1_POWER + device_1.energy_kwh[Scale.DAY] = DEVICE_1_DAY_ENERGY + + device_2 = SenseDevice(DEVICE_2_ID) + device_2.name = DEVICE_2_NAME + device_2.icon = "stove" + device_2.is_on = False + device_2.power_w = DEVICE_2_POWER + device_2.energy_kwh[Scale.DAY] = DEVICE_2_DAY_ENERGY + type(gateway).devices = PropertyMock(return_value=[device_1, device_2]) + + yield gateway diff --git a/tests/components/sense/const.py b/tests/components/sense/const.py new file mode 100644 index 00000000000..d040c0bc38c --- /dev/null +++ b/tests/components/sense/const.py @@ -0,0 +1,29 @@ +"""Cosntants for the Sense integration tests.""" + +MONITOR_ID = "456" + +MOCK_CONFIG = { + "timeout": 6, + "email": "test-email", + "password": "test-password", + "access_token": "ABC", + "user_id": "123", + "monitor_id": MONITOR_ID, + "device_id": "789", + "refresh_token": "XYZ", +} + + +DEVICE_1_NAME = "Car" +DEVICE_1_ID = "abc123" +DEVICE_1_ICON = "car-electric" +DEVICE_1_POWER = 100.0 +DEVICE_1_DAY_ENERGY = 500 + +DEVICE_2_NAME = "Oven" +DEVICE_2_ID = "def456" +DEVICE_2_ICON = "stove" +DEVICE_2_POWER = 50.0 +DEVICE_2_DAY_ENERGY = 42 + +MONITOR_ID = "12345" diff --git a/tests/components/sense/snapshots/test_binary_sensor.ambr b/tests/components/sense/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..339830b16d3 --- /dev/null +++ b/tests/components/sense/snapshots/test_binary_sensor.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.car_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.car_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-abc123', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.car_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Car Power', + 'icon': 'mdi:car-electric', + }), + 'context': , + 'entity_id': 'binary_sensor.car_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.oven_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-def456', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.oven_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Oven Power', + 'icon': 'mdi:stove', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sense/snapshots/test_sensor.ambr b/tests/components/sense/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..4a3507880a1 --- /dev/null +++ b/tests/components/sense/snapshots/test_sensor.ambr @@ -0,0 +1,2680 @@ +# serializer version: 1 +# name: test_sensors[sensor.car_bill_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_bill_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Bill energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bill_energy', + 'unique_id': '12345-abc123-bill-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_bill_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Bill energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_bill_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.car_daily_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_daily_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Daily energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_energy', + 'unique_id': '12345-abc123-daily-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_daily_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Daily energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_daily_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_sensors[sensor.car_monthly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_monthly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Monthly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_energy', + 'unique_id': '12345-abc123-monthly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_monthly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Monthly energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_monthly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.car_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-abc123-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Car Power', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_sensors[sensor.car_weekly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_weekly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Weekly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'weekly_energy', + 'unique_id': '12345-abc123-weekly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_weekly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Weekly energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_weekly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.car_yearly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_yearly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Yearly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yearly_energy', + 'unique_id': '12345-abc123-yearly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_yearly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Yearly energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_yearly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_bill_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_bill_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Bill energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bill_energy', + 'unique_id': '12345-def456-bill-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_bill_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Bill energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_bill_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_daily_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_daily_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Daily energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_energy', + 'unique_id': '12345-def456-daily-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_daily_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Daily energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_daily_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '42', + }) +# --- +# name: test_sensors[sensor.oven_monthly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_monthly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Monthly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_energy', + 'unique_id': '12345-def456-monthly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_monthly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Monthly energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_monthly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-def456-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Oven Power', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.0', + }) +# --- +# name: test_sensors[sensor.oven_weekly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_weekly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Weekly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'weekly_energy', + 'unique_id': '12345-def456-weekly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_weekly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Weekly energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_weekly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_yearly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_yearly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Yearly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yearly_energy', + 'unique_id': '12345-def456-yearly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_yearly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Yearly energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_yearly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Bill Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Bill Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Bill Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Bill Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Daily Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Daily Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-active-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Sense 12345 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[sensor.sense_12345_l1_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_l1_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'L1 Voltage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-L1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_l1_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'voltage', + 'friendly_name': 'Sense 12345 L1 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_l1_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120', + }) +# --- +# name: test_sensors[sensor.sense_12345_l2_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_l2_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'L2 Voltage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-L2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_l2_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'voltage', + 'friendly_name': 'Sense 12345 L2 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_l2_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '240', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Monthly Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Monthly Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Monthly Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Monthly Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-active-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Sense 12345 Production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Weekly Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Weekly Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Weekly Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Weekly Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yearly Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Yearly Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yearly Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Yearly Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- diff --git a/tests/components/sense/test_binary_sensor.py b/tests/components/sense/test_binary_sensor.py new file mode 100644 index 00000000000..ae91b7a9a21 --- /dev/null +++ b/tests/components/sense/test_binary_sensor.py @@ -0,0 +1,68 @@ +"""The tests for Sense binary sensor platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.sense.const import ACTIVE_UPDATE_RATE +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.dt import utcnow + +from . import setup_platform +from .const import DEVICE_1_NAME, DEVICE_2_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_binary_sensors( + hass: HomeAssistant, + mock_sense: MagicMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test Sensor.""" + await setup_platform(hass, config_entry, Platform.BINARY_SENSOR) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_on_off_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense binary sensors.""" + await setup_platform(hass, config_entry, BINARY_SENSOR_DOMAIN) + device_1, device_2 = mock_sense.devices + + state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == STATE_OFF + + state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == STATE_OFF + + device_1.is_on = True + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == STATE_ON + + state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == STATE_OFF + + device_1.is_on = False + device_2.is_on = True + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == STATE_OFF + + state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == STATE_ON diff --git a/tests/components/sense/test_config_flow.py b/tests/components/sense/test_config_flow.py index e564603ea87..acef82dd0ba 100644 --- a/tests/components/sense/test_config_flow.py +++ b/tests/components/sense/test_config_flow.py @@ -16,18 +16,9 @@ from homeassistant.const import CONF_CODE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +from .const import MOCK_CONFIG -MOCK_CONFIG = { - "timeout": 6, - "email": "test-email", - "password": "test-password", - "access_token": "ABC", - "user_id": "123", - "monitor_id": "456", - "device_id": "789", - "refresh_token": "XYZ", -} +from tests.common import MockConfigEntry @pytest.fixture(name="mock_sense") @@ -268,9 +259,7 @@ async def test_reauth_no_form(hass: HomeAssistant, mock_sense) -> None: "homeassistant.config_entries.ConfigEntries.async_reload", return_value=True, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=MOCK_CONFIG - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" @@ -288,9 +277,7 @@ async def test_reauth_password(hass: HomeAssistant, mock_sense) -> None: mock_sense.return_value.authenticate.side_effect = SenseAuthenticationException # Reauth success without user input - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM mock_sense.return_value.authenticate.side_effect = None diff --git a/tests/components/sense/test_sensor.py b/tests/components/sense/test_sensor.py new file mode 100644 index 00000000000..d43b422ec38 --- /dev/null +++ b/tests/components/sense/test_sensor.py @@ -0,0 +1,234 @@ +"""The tests for Sense sensor platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock, PropertyMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from sense_energy import Scale +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.sense.const import ACTIVE_UPDATE_RATE, TREND_UPDATE_RATE +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.dt import utcnow + +from . import setup_platform +from .const import ( + DEVICE_1_DAY_ENERGY, + DEVICE_1_NAME, + DEVICE_2_DAY_ENERGY, + DEVICE_2_NAME, + DEVICE_2_POWER, + MONITOR_ID, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + mock_sense: MagicMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test Sensor.""" + await setup_platform(hass, config_entry, Platform.SENSOR) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_device_power_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense device power sensors.""" + device_1, device_2 = mock_sense.devices + device_1.power_w = 0 + device_2.power_w = 0 + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + device_1, device_2 = mock_sense.devices + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == "0" + + device_2.power_w = DEVICE_2_POWER + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == f"{DEVICE_2_POWER:.1f}" + + +async def test_device_energy_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Sense device power sensors.""" + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + device_1, device_2 = mock_sense.devices + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") + assert state.state == f"{DEVICE_1_DAY_ENERGY:.0f}" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") + assert state.state == f"{DEVICE_2_DAY_ENERGY:.0f}" + + device_1.energy_kwh[Scale.DAY] = 0 + device_2.energy_kwh[Scale.DAY] = 0 + freezer.tick(timedelta(seconds=TREND_UPDATE_RATE)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") + assert state.state == "0" + + device_2.energy_kwh[Scale.DAY] = DEVICE_1_DAY_ENERGY + freezer.tick(timedelta(seconds=TREND_UPDATE_RATE)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") + assert state.state == f"{DEVICE_1_DAY_ENERGY:.0f}" + + +async def test_voltage_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense voltage sensors.""" + + type(mock_sense).active_voltage = PropertyMock(return_value=[120, 121]) + + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l1_voltage") + assert state.state == "120" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l2_voltage") + assert state.state == "121" + + type(mock_sense).active_voltage = PropertyMock(return_value=[122, 123]) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l1_voltage") + assert state.state == "122" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l2_voltage") + assert state.state == "123" + + +async def test_active_power_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense power sensors.""" + + type(mock_sense).active_power = PropertyMock(return_value=400) + type(mock_sense).active_solar_power = PropertyMock(return_value=500) + + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_energy") + assert state.state == "400" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_production") + assert state.state == "500" + + type(mock_sense).active_power = PropertyMock(return_value=600) + type(mock_sense).active_solar_power = PropertyMock(return_value=700) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_energy") + assert state.state == "600" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_production") + assert state.state == "700" + + +async def test_trend_energy_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense power sensors.""" + mock_sense.get_stat.side_effect = lambda sensor_type, variant: { + (Scale.DAY, "usage"): 100, + (Scale.DAY, "production"): 200, + (Scale.DAY, "from_grid"): 300, + (Scale.DAY, "to_grid"): 400, + (Scale.DAY, "net_production"): 500, + (Scale.DAY, "production_pct"): 600, + (Scale.DAY, "solar_powered"): 700, + }.get((sensor_type, variant), 0) + + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_energy") + assert state.state == "100" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_production") + assert state.state == "200" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_from_grid") + assert state.state == "300" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_to_grid") + assert state.state == "400" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_net_production") + assert state.state == "500" + + mock_sense.get_stat.side_effect = lambda sensor_type, variant: { + (Scale.DAY, "usage"): 1000, + (Scale.DAY, "production"): 2000, + (Scale.DAY, "from_grid"): 3000, + (Scale.DAY, "to_grid"): 4000, + (Scale.DAY, "net_production"): 5000, + (Scale.DAY, "production_pct"): 6000, + (Scale.DAY, "solar_powered"): 7000, + }.get((sensor_type, variant), 0) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=600)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_energy") + assert state.state == "1000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_production") + assert state.state == "2000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_from_grid") + assert state.state == "3000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_to_grid") + assert state.state == "4000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_net_production") + assert state.state == "5000" diff --git a/tests/components/sensibo/conftest.py b/tests/components/sensibo/conftest.py index 1c835cd8001..eaa42e47257 100644 --- a/tests/components/sensibo/conftest.py +++ b/tests/components/sensibo/conftest.py @@ -10,8 +10,9 @@ from pysensibo import SensiboClient from pysensibo.model import SensiboData import pytest -from homeassistant.components.sensibo.const import DOMAIN +from homeassistant.components.sensibo.const import DOMAIN, PLATFORMS from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from . import ENTRY_CONFIG @@ -20,8 +21,18 @@ from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.fixture(name="load_platforms") +async def patch_platform_constant() -> list[Platform]: + """Return list of platforms to load.""" + return PLATFORMS + + @pytest.fixture -async def load_int(hass: HomeAssistant, get_data: SensiboData) -> MockConfigEntry: +async def load_int( + hass: HomeAssistant, + get_data: SensiboData, + load_platforms: list[Platform], +) -> MockConfigEntry: """Set up the Sensibo integration in Home Assistant.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -35,6 +46,7 @@ async def load_int(hass: HomeAssistant, get_data: SensiboData) -> MockConfigEntr config_entry.add_to_hass(hass) with ( + patch("homeassistant.components.sensibo.PLATFORMS", load_platforms), patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", return_value=get_data, diff --git a/tests/components/sensibo/snapshots/test_binary_sensor.ambr b/tests/components/sensibo/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..110a6ae8174 --- /dev/null +++ b/tests/components/sensibo/snapshots/test_binary_sensor.ambr @@ -0,0 +1,705 @@ +# serializer version: 1 +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_filter_clean_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.bedroom_filter_clean_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter clean required', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_clean', + 'unique_id': 'BBZZBBZZ-filter_clean', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_filter_clean_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Bedroom Filter clean required', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_filter_clean_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with AC', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_ac_integration', + 'unique_id': 'BBZZBBZZ-pure_ac_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with AC', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with indoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_measure_integration', + 'unique_id': 'BBZZBBZZ-pure_measure_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with indoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with outdoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_prime_integration', + 'unique_id': 'BBZZBBZZ-pure_prime_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with outdoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with presence', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_geo_integration', + 'unique_id': 'BBZZBBZZ-pure_geo_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with presence', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_filter_clean_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.hallway_filter_clean_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter clean required', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_clean', + 'unique_id': 'ABC999111-filter_clean', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_filter_clean_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Hallway Filter clean required', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_filter_clean_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-alive', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Hallway Motion Sensor Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_main_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_main_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Main sensor', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_main_sensor', + 'unique_id': 'AABBCC-is_main_sensor', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_main_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Motion Sensor Main sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_main_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.hallway_motion_sensor_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Hallway Motion Sensor Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_room_occupied-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.hallway_room_occupied', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room occupied', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'room_occupied', + 'unique_id': 'ABC999111-room_occupied', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_room_occupied-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Hallway Room occupied', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_room_occupied', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_filter_clean_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.kitchen_filter_clean_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter clean required', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_clean', + 'unique_id': 'AAZZAAZZ-filter_clean', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_filter_clean_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Kitchen Filter clean required', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_filter_clean_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with AC', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_ac_integration', + 'unique_id': 'AAZZAAZZ-pure_ac_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with AC', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with indoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_measure_integration', + 'unique_id': 'AAZZAAZZ-pure_measure_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with indoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with outdoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_prime_integration', + 'unique_id': 'AAZZAAZZ-pure_prime_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with outdoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with presence', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_geo_integration', + 'unique_id': 'AAZZAAZZ-pure_geo_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with presence', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_button.ambr b/tests/components/sensibo/snapshots/test_button.ambr new file mode 100644 index 00000000000..7ef6d56c714 --- /dev/null +++ b/tests/components/sensibo/snapshots/test_button.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_button[load_platforms0][button.bedroom_reset_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.bedroom_reset_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter', + 'unique_id': 'BBZZBBZZ-reset_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[load_platforms0][button.bedroom_reset_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Bedroom Reset filter', + }), + 'context': , + 'entity_id': 'button.bedroom_reset_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[load_platforms0][button.hallway_reset_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.hallway_reset_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter', + 'unique_id': 'ABC999111-reset_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[load_platforms0][button.hallway_reset_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Reset filter', + }), + 'context': , + 'entity_id': 'button.hallway_reset_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[load_platforms0][button.kitchen_reset_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.kitchen_reset_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter', + 'unique_id': 'AAZZAAZZ-reset_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[load_platforms0][button.kitchen_reset_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Reset filter', + }), + 'context': , + 'entity_id': 'button.kitchen_reset_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_climate.ambr b/tests/components/sensibo/snapshots/test_climate.ambr index 1e02ee63a9a..e3b27332932 100644 --- a/tests/components/sensibo/snapshots/test_climate.ambr +++ b/tests/components/sensibo/snapshots/test_climate.ambr @@ -1,33 +1,230 @@ # serializer version: 1 -# name: test_climate - ReadOnlyDict({ - 'current_humidity': 32.9, - 'current_temperature': 21.2, - 'fan_mode': 'high', - 'fan_modes': list([ - 'quiet', - 'low', - 'medium', - ]), - 'friendly_name': 'Hallway', - 'hvac_modes': list([ - , - , - , - , - , - , - ]), - 'max_temp': 20, - 'min_temp': 10, - 'supported_features': , - 'swing_mode': 'stopped', - 'swing_modes': list([ - 'stopped', - 'fixedtop', - 'fixedmiddletop', - ]), - 'target_temp_step': 1, - 'temperature': 25, +# name: test_climate[load_platforms0][climate.bedroom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bedroom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_device', + 'unique_id': 'BBZZBBZZ', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[load_platforms0][climate.bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Bedroom', + 'hvac_modes': list([ + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1, + }), + 'context': , + 'entity_id': 'climate.bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climate[load_platforms0][climate.hallway-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'quiet', + 'low', + 'medium', + ]), + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 20, + 'min_temp': 10, + 'swing_modes': list([ + 'stopped', + 'fixedtop', + 'fixedmiddletop', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.hallway', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_device', + 'unique_id': 'ABC999111', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[load_platforms0][climate.hallway-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 32.9, + 'current_temperature': 21.2, + 'fan_mode': 'high', + 'fan_modes': list([ + 'quiet', + 'low', + 'medium', + ]), + 'friendly_name': 'Hallway', + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 20, + 'min_temp': 10, + 'supported_features': , + 'swing_mode': 'stopped', + 'swing_modes': list([ + 'stopped', + 'fixedtop', + 'fixedmiddletop', + ]), + 'target_temp_step': 1, + 'temperature': 25, + }), + 'context': , + 'entity_id': 'climate.hallway', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate[load_platforms0][climate.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'high', + ]), + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.kitchen', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_device', + 'unique_id': 'AAZZAAZZ', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[load_platforms0][climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'fan_mode': 'low', + 'fan_modes': list([ + 'low', + 'high', + ]), + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', }) # --- diff --git a/tests/components/sensibo/snapshots/test_number.ambr b/tests/components/sensibo/snapshots/test_number.ambr new file mode 100644 index 00000000000..b632b95f1be --- /dev/null +++ b/tests/components/sensibo/snapshots/test_number.ambr @@ -0,0 +1,343 @@ +# serializer version: 1 +# name: test_number[load_platforms0][number.bedroom_humidity_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.bedroom_humidity_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_humidity', + 'unique_id': 'BBZZBBZZ-calibration_hum', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[load_platforms0][number.bedroom_humidity_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Bedroom Humidity calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.bedroom_humidity_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.bedroom_temperature_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.bedroom_temperature_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_temperature', + 'unique_id': 'BBZZBBZZ-calibration_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_number[load_platforms0][number.bedroom_temperature_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Bedroom Temperature calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.bedroom_temperature_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.hallway_humidity_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.hallway_humidity_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_humidity', + 'unique_id': 'ABC999111-calibration_hum', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[load_platforms0][number.hallway_humidity_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Hallway Humidity calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.hallway_humidity_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.hallway_temperature_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.hallway_temperature_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_temperature', + 'unique_id': 'ABC999111-calibration_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_number[load_platforms0][number.hallway_temperature_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Hallway Temperature calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.hallway_temperature_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1', + }) +# --- +# name: test_number[load_platforms0][number.kitchen_humidity_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.kitchen_humidity_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_humidity', + 'unique_id': 'AAZZAAZZ-calibration_hum', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[load_platforms0][number.kitchen_humidity_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Kitchen Humidity calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.kitchen_humidity_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.kitchen_temperature_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.kitchen_temperature_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_temperature', + 'unique_id': 'AAZZAAZZ-calibration_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_number[load_platforms0][number.kitchen_temperature_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Kitchen Temperature calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.kitchen_temperature_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_select.ambr b/tests/components/sensibo/snapshots/test_select.ambr new file mode 100644 index 00000000000..bdafc8654ff --- /dev/null +++ b/tests/components/sensibo/snapshots/test_select.ambr @@ -0,0 +1,170 @@ +# serializer version: 1 +# name: test_select[load_platforms0][select.hallway_horizontal_swing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stopped', + 'fixedleft', + 'fixedcenterleft', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.hallway_horizontal_swing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Horizontal swing', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'horizontalswing', + 'unique_id': 'ABC999111-horizontalSwing', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[load_platforms0][select.hallway_horizontal_swing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Horizontal swing', + 'options': list([ + 'stopped', + 'fixedleft', + 'fixedcenterleft', + ]), + }), + 'context': , + 'entity_id': 'select.hallway_horizontal_swing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_select[load_platforms0][select.hallway_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.hallway_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ABC999111-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[load_platforms0][select.hallway_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Light', + 'options': list([ + 'on', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.hallway_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_select[load_platforms0][select.kitchen_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'dim', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.kitchen_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'AAZZAAZZ-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[load_platforms0][select.kitchen_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Light', + 'options': list([ + 'on', + 'dim', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.kitchen_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_sensor.ambr b/tests/components/sensibo/snapshots/test_sensor.ambr index cd8d510b6cc..31e579d9929 100644 --- a/tests/components/sensibo/snapshots/test_sensor.ambr +++ b/tests/components/sensibo/snapshots/test_sensor.ambr @@ -1,28 +1,818 @@ # serializer version: 1 -# name: test_sensor - ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kitchen Pure AQI', - 'options': list([ - 'good', - 'moderate', - 'bad', - ]), +# name: test_sensor[load_platforms0][sensor.bedroom_filter_last_reset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bedroom_filter_last_reset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter last reset', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_last_reset', + 'unique_id': 'BBZZBBZZ-filter_last_reset', + 'unit_of_measurement': None, }) # --- -# name: test_sensor.1 - ReadOnlyDict({ - 'device_class': 'temperature', - 'fanlevel': 'low', - 'friendly_name': 'Hallway Climate React low temperature threshold', - 'horizontalswing': 'stopped', - 'light': 'on', - 'mode': 'heat', - 'on': True, - 'state_class': , - 'swing': 'stopped', - 'targettemperature': 21, - 'temperatureunit': 'c', +# name: test_sensor[load_platforms0][sensor.bedroom_filter_last_reset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Bedroom Filter last reset', + }), + 'context': , + 'entity_id': 'sensor.bedroom_filter_last_reset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-04-23T15:58:45+00:00', + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_aqi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bedroom_pure_aqi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure AQI', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm25_pure', + 'unique_id': 'BBZZBBZZ-pm25', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_aqi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Bedroom Pure AQI', + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'context': , + 'entity_id': 'sensor.bedroom_pure_aqi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bedroom_pure_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pure sensitivity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sensitivity', + 'unique_id': 'BBZZBBZZ-pure_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Bedroom Pure sensitivity', + }), + 'context': , + 'entity_id': 'sensor.bedroom_pure_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'n', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_high_temperature_threshold-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_climate_react_high_temperature_threshold', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Climate React high temperature threshold', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_react_high', + 'unique_id': 'ABC999111-climate_react_high', 'unit_of_measurement': , }) # --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_high_temperature_threshold-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'fanlevel': 'high', + 'friendly_name': 'Hallway Climate React high temperature threshold', + 'horizontalswing': 'stopped', + 'light': 'on', + 'mode': 'cool', + 'on': True, + 'state_class': , + 'swing': 'stopped', + 'targettemperature': 21, + 'temperatureunit': 'c', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_climate_react_high_temperature_threshold', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '27.5', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_low_temperature_threshold-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_climate_react_low_temperature_threshold', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Climate React low temperature threshold', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_react_low', + 'unique_id': 'ABC999111-climate_react_low', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_low_temperature_threshold-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'fanlevel': 'low', + 'friendly_name': 'Hallway Climate React low temperature threshold', + 'horizontalswing': 'stopped', + 'light': 'on', + 'mode': 'heat', + 'on': True, + 'state_class': , + 'swing': 'stopped', + 'targettemperature': 21, + 'temperatureunit': 'c', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_climate_react_low_temperature_threshold', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_climate_react_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate React type', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_type', + 'unique_id': 'ABC999111-climate_react_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Climate React type', + }), + 'context': , + 'entity_id': 'sensor.hallway_climate_react_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'temperature', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_filter_last_reset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_filter_last_reset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter last reset', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_last_reset', + 'unique_id': 'ABC999111-filter_last_reset', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_filter_last_reset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Hallway Filter last reset', + }), + 'context': , + 'entity_id': 'sensor.hallway_filter_last_reset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-03-12T15:24:26+00:00', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_battery_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hallway_motion_sensor_battery_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery voltage', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_voltage', + 'unique_id': 'AABBCC-battery_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_battery_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Hallway Motion Sensor Battery voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_battery_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3000', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_motion_sensor_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Hallway Motion Sensor Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_rssi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hallway_motion_sensor_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RSSI', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rssi', + 'unique_id': 'AABBCC-rssi', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_rssi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Hallway Motion Sensor RSSI', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_rssi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-72', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_motion_sensor_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Hallway Motion Sensor Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.9', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_temperature_feels_like-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_temperature_feels_like', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature feels like', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'feels_like', + 'unique_id': 'ABC999111-feels_like', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_temperature_feels_like-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Hallway Temperature feels like', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_temperature_feels_like', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_timer_end_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_timer_end_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer end time', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_time', + 'unique_id': 'ABC999111-timer_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_timer_end_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Hallway Timer end time', + 'id': None, + 'turn_on': None, + }), + 'context': , + 'entity_id': 'sensor.hallway_timer_end_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_filter_last_reset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kitchen_filter_last_reset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter last reset', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_last_reset', + 'unique_id': 'AAZZAAZZ-filter_last_reset', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_filter_last_reset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Kitchen Filter last reset', + }), + 'context': , + 'entity_id': 'sensor.kitchen_filter_last_reset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-04-23T15:58:45+00:00', + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_aqi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kitchen_pure_aqi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure AQI', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm25_pure', + 'unique_id': 'AAZZAAZZ-pm25', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_aqi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kitchen Pure AQI', + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'context': , + 'entity_id': 'sensor.kitchen_pure_aqi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kitchen_pure_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pure sensitivity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sensitivity', + 'unique_id': 'AAZZAAZZ-pure_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Pure sensitivity', + }), + 'context': , + 'entity_id': 'sensor.kitchen_pure_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'n', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_switch.ambr b/tests/components/sensibo/snapshots/test_switch.ambr new file mode 100644 index 00000000000..13cb73cef7a --- /dev/null +++ b/tests/components/sensibo/snapshots/test_switch.ambr @@ -0,0 +1,192 @@ +# serializer version: 1 +# name: test_switch[load_platforms0][switch.bedroom_pure_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.bedroom_pure_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_boost_switch', + 'unique_id': 'BBZZBBZZ-pure_boost_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.bedroom_pure_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Bedroom Pure Boost', + }), + 'context': , + 'entity_id': 'switch.bedroom_pure_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_climate_react-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.hallway_climate_react', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Climate React', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_react_switch', + 'unique_id': 'ABC999111-climate_react_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_climate_react-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Hallway Climate React', + 'type': 'temperature', + }), + 'context': , + 'entity_id': 'switch.hallway_climate_react', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.hallway_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_on_switch', + 'unique_id': 'ABC999111-timer_on_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Hallway Timer', + 'id': None, + 'turn_on': None, + }), + 'context': , + 'entity_id': 'switch.hallway_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[load_platforms0][switch.kitchen_pure_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.kitchen_pure_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_boost_switch', + 'unique_id': 'AAZZAAZZ-pure_boost_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.kitchen_pure_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Kitchen Pure Boost', + }), + 'context': , + 'entity_id': 'switch.kitchen_pure_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_update.ambr b/tests/components/sensibo/snapshots/test_update.ambr new file mode 100644 index 00000000000..3eb69c9a812 --- /dev/null +++ b/tests/components/sensibo/snapshots/test_update.ambr @@ -0,0 +1,178 @@ +# serializer version: 1 +# name: test_update[load_platforms0][update.bedroom_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.bedroom_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'BBZZBBZZ-fw_ver_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[load_platforms0][update.bedroom_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/sensibo/icon.png', + 'friendly_name': 'Bedroom Firmware', + 'in_progress': False, + 'installed_version': 'PUR00111', + 'latest_version': 'PUR00111', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'pure', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.bedroom_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_update[load_platforms0][update.hallway_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.hallway_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ABC999111-fw_ver_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[load_platforms0][update.hallway_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/sensibo/icon.png', + 'friendly_name': 'Hallway Firmware', + 'in_progress': False, + 'installed_version': 'SKY30046', + 'latest_version': 'SKY30048', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'skyv2', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.hallway_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update[load_platforms0][update.kitchen_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.kitchen_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AAZZAAZZ-fw_ver_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[load_platforms0][update.kitchen_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/sensibo/icon.png', + 'friendly_name': 'Kitchen Firmware', + 'in_progress': False, + 'installed_version': 'PUR00111', + 'latest_version': 'PUR00111', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'pure', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.kitchen_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sensibo/test_binary_sensor.py b/tests/components/sensibo/test_binary_sensor.py index 61b62226679..dbc3e87a236 100644 --- a/tests/components/sensibo/test_binary_sensor.py +++ b/tests/components/sensibo/test_binary_sensor.py @@ -7,39 +7,33 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.BINARY_SENSOR]], +) async def test_binary_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo binary sensor.""" - state1 = hass.states.get("binary_sensor.hallway_motion_sensor_connectivity") - state2 = hass.states.get("binary_sensor.hallway_motion_sensor_main_sensor") - state3 = hass.states.get("binary_sensor.hallway_motion_sensor_motion") - state4 = hass.states.get("binary_sensor.hallway_room_occupied") - state5 = hass.states.get( - "binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality" - ) - state6 = hass.states.get( - "binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality" - ) - assert state1.state == "on" - assert state2.state == "on" - assert state3.state == "on" - assert state4.state == "on" - assert state5.state == "on" - assert state6.state == "off" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr( get_data.parsed["ABC999111"].motion_sensors["AABBCC"], "alive", False diff --git a/tests/components/sensibo/test_button.py b/tests/components/sensibo/test_button.py index 6d7ce442562..5c36fe9e94d 100644 --- a/tests/components/sensibo/test_button.py +++ b/tests/components/sensibo/test_button.py @@ -5,21 +5,47 @@ from __future__ import annotations from datetime import datetime, timedelta from unittest.mock import patch +from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@freeze_time("2022-03-12T15:24:26+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.BUTTON]], +) async def test_button( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Sensibo button.""" + + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) + + +async def test_button_update( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index b5a7be7bde0..7916727e57a 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -54,12 +54,14 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_climate_find_valid_targets() -> None: @@ -77,26 +79,22 @@ async def test_climate_find_valid_targets() -> None: assert _find_valid_target_temp(25, valid_targets) == 20 +@pytest.mark.parametrize( + "load_platforms", + [[Platform.CLIMATE]], +) async def test_climate( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, get_data: SensiboData, load_int: ConfigEntry, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo climate.""" - state1 = hass.states.get("climate.hallway") - state2 = hass.states.get("climate.kitchen") - state3 = hass.states.get("climate.bedroom") + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) - assert state1.state == "heat" - assert state1.attributes == snapshot - - assert state2.state == "off" - - assert state3 - assert state3.state == "off" found_log = False logs = caplog.get_records("setup") for log in logs: @@ -349,6 +347,17 @@ async def test_climate_temperatures( state2 = hass.states.get("climate.hallway") assert state2.attributes["temperature"] == 20 + with patch( + "homeassistant.components.sensibo.coordinator.SensiboClient.async_set_ac_state_property", + ) as mock_call: + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: state1.entity_id, ATTR_TEMPERATURE: 20}, + blocking=True, + ) + assert not mock_call.called + with ( patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", diff --git a/tests/components/sensibo/test_config_flow.py b/tests/components/sensibo/test_config_flow.py index e994402b09f..d6edb1c7ae0 100644 --- a/tests/components/sensibo/test_config_flow.py +++ b/tests/components/sensibo/test_config_flow.py @@ -192,15 +192,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -254,15 +246,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", @@ -338,15 +322,7 @@ async def test_flow_reauth_no_username_or_device( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -372,3 +348,171 @@ async def test_flow_reauth_no_username_or_device( assert result2["step_id"] == "reauth_confirm" assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": p_error} + + +async def test_reconfigure_flow(hass: HomeAssistant) -> None: + """Test a reconfigure flow.""" + entry = MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="username", + data={"api_key": "1234567890"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + ), + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_me", + return_value={"result": {"username": "username"}}, + ) as mock_sensibo, + patch( + "homeassistant.components.sensibo.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == {"api_key": "1234567891"} + + assert len(mock_sensibo.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("sideeffect", "p_error"), + [ + (aiohttp.ClientConnectionError, "cannot_connect"), + (TimeoutError, "cannot_connect"), + (AuthenticationError, "invalid_auth"), + (SensiboError, "cannot_connect"), + ], +) +async def test_reconfigure_flow_error( + hass: HomeAssistant, sideeffect: Exception, p_error: str +) -> None: + """Test a reconfigure flow with error.""" + entry = MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="username", + data={"api_key": "1234567890"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + side_effect=sideeffect, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567890"}, + ) + await hass.async_block_till_done() + + assert result2["step_id"] == "reconfigure" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": p_error} + + with ( + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + ), + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_me", + return_value={"result": {"username": "username"}}, + ), + patch( + "homeassistant.components.sensibo.async_setup_entry", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == {"api_key": "1234567891"} + + +@pytest.mark.parametrize( + ("get_devices", "get_me", "p_error"), + [ + ( + {"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + {"result": {}}, + "no_username", + ), + ( + {"result": []}, + {"result": {"username": "username"}}, + "no_devices", + ), + ( + {"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + {"result": {"username": "username2"}}, + "incorrect_api_key", + ), + ], +) +async def test_flow_reconfigure_no_username_or_device( + hass: HomeAssistant, + get_devices: dict[str, Any], + get_me: dict[str, Any], + p_error: str, +) -> None: + """Test config flow get no username from api.""" + entry = MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="username", + data={"api_key": "1234567890"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with ( + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + return_value=get_devices, + ), + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_me", + return_value=get_me, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_API_KEY: "1234567890", + }, + ) + await hass.async_block_till_done() + + assert result2["step_id"] == "reconfigure" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": p_error} diff --git a/tests/components/sensibo/test_number.py b/tests/components/sensibo/test_number.py index de369698f50..95836ba023c 100644 --- a/tests/components/sensibo/test_number.py +++ b/tests/components/sensibo/test_number.py @@ -7,6 +7,7 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.number import ( ATTR_VALUE, @@ -14,27 +15,31 @@ from homeassistant.components.number import ( SERVICE_SET_VALUE, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@pytest.mark.parametrize( + "load_platforms", + [[Platform.NUMBER]], +) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_number( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo number.""" - state1 = hass.states.get("number.hallway_temperature_calibration") - state2 = hass.states.get("number.hallway_humidity_calibration") - assert state1.state == "0.1" - assert state2.state == "0.0" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr(get_data.parsed["ABC999111"], "calibration_temp", 0.2) diff --git a/tests/components/sensibo/test_select.py b/tests/components/sensibo/test_select.py index 7a9c89ef612..2e4a1cb507c 100644 --- a/tests/components/sensibo/test_select.py +++ b/tests/components/sensibo/test_select.py @@ -7,6 +7,7 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.select import ( ATTR_OPTION, @@ -14,24 +15,30 @@ from homeassistant.components.select import ( SERVICE_SELECT_OPTION, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SELECT]], +) async def test_select( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo select.""" - state1 = hass.states.get("select.hallway_horizontal_swing") - assert state1.state == "stopped" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr( get_data.parsed["ABC999111"], "horizontal_swing_mode", "fixedleft" diff --git a/tests/components/sensibo/test_sensor.py b/tests/components/sensibo/test_sensor.py index 5fc761f178a..32794e266b0 100644 --- a/tests/components/sensibo/test_sensor.py +++ b/tests/components/sensibo/test_sensor.py @@ -5,37 +5,37 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pysensibo.model import PureAQI, SensiboData import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SENSOR]], +) async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: """Test the Sensibo sensor.""" - state1 = hass.states.get("sensor.hallway_motion_sensor_battery_voltage") - state2 = hass.states.get("sensor.kitchen_pure_aqi") - state3 = hass.states.get("sensor.kitchen_pure_sensitivity") - state4 = hass.states.get("sensor.hallway_climate_react_low_temperature_threshold") - assert state1.state == "3000" - assert state2.state == "good" - assert state3.state == "n" - assert state4.state == "0.0" - assert state2.attributes == snapshot - assert state4.attributes == snapshot + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25_pure", PureAQI(2)) diff --git a/tests/components/sensibo/test_switch.py b/tests/components/sensibo/test_switch.py index cc3c8881bec..f260af7baaa 100644 --- a/tests/components/sensibo/test_switch.py +++ b/tests/components/sensibo/test_switch.py @@ -7,6 +7,7 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntry @@ -16,12 +17,29 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SWITCH]], +) +async def test_switch( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Sensibo switch.""" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) async def test_switch_timer( diff --git a/tests/components/sensibo/test_update.py b/tests/components/sensibo/test_update.py index 23b2719d5b5..a4eb9751243 100644 --- a/tests/components/sensibo/test_update.py +++ b/tests/components/sensibo/test_update.py @@ -5,32 +5,36 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.const import STATE_OFF, Platform from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util +from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.UPDATE]], +) async def test_update( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: """Test the Sensibo update.""" - state1 = hass.states.get("update.hallway_firmware") - state2 = hass.states.get("update.kitchen_firmware") - assert state1.state == STATE_ON - assert state1.attributes["installed_version"] == "SKY30046" - assert state1.attributes["latest_version"] == "SKY30048" - assert state1.attributes["title"] == "skyv2" - assert state2.state == STATE_OFF + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr(get_data.parsed["ABC999111"], "fw_ver", "SKY30048") @@ -38,10 +42,8 @@ async def test_update( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", return_value=get_data, ): - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(minutes=5), - ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) await hass.async_block_till_done() state1 = hass.states.get("update.hallway_firmware") diff --git a/tests/components/sensor/test_device_condition.py b/tests/components/sensor/test_device_condition.py index d9a9900b8b1..a9781e0b800 100644 --- a/tests/components/sensor/test_device_condition.py +++ b/tests/components/sensor/test_device_condition.py @@ -51,7 +51,6 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: SensorDeviceClass.BATTERY: "CONF_IS_BATTERY_LEVEL", SensorDeviceClass.CO: "CONF_IS_CO", SensorDeviceClass.CO2: "CONF_IS_CO2", - SensorDeviceClass.CONDUCTIVITY: "CONF_IS_CONDUCTIVITY", SensorDeviceClass.ENERGY_STORAGE: "CONF_IS_ENERGY", SensorDeviceClass.VOLUME_STORAGE: "CONF_IS_VOLUME", }.get(device_class, f"CONF_IS_{device_class.value.upper()}") @@ -60,7 +59,6 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: # Ensure it has correct value constant_value = { SensorDeviceClass.BATTERY: "is_battery_level", - SensorDeviceClass.CONDUCTIVITY: "is_conductivity", SensorDeviceClass.ENERGY_STORAGE: "is_energy", SensorDeviceClass.VOLUME_STORAGE: "is_volume", }.get(device_class, f"is_{device_class.value}") diff --git a/tests/components/sensor/test_device_trigger.py b/tests/components/sensor/test_device_trigger.py index bb560c824d3..f50e92bc9df 100644 --- a/tests/components/sensor/test_device_trigger.py +++ b/tests/components/sensor/test_device_trigger.py @@ -55,7 +55,6 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: SensorDeviceClass.BATTERY: "CONF_BATTERY_LEVEL", SensorDeviceClass.CO: "CONF_CO", SensorDeviceClass.CO2: "CONF_CO2", - SensorDeviceClass.CONDUCTIVITY: "CONF_CONDUCTIVITY", SensorDeviceClass.ENERGY_STORAGE: "CONF_ENERGY", SensorDeviceClass.VOLUME_STORAGE: "CONF_VOLUME", }.get(device_class, f"CONF_{device_class.value.upper()}") @@ -64,7 +63,6 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: # Ensure it has correct value constant_value = { SensorDeviceClass.BATTERY: "battery_level", - SensorDeviceClass.CONDUCTIVITY: "conductivity", SensorDeviceClass.ENERGY_STORAGE: "energy", SensorDeviceClass.VOLUME_STORAGE: "volume", }.get(device_class, device_class.value) diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 2504ea80d84..0ea46a41273 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -5,8 +5,8 @@ from __future__ import annotations from collections.abc import Generator from datetime import UTC, date, datetime from decimal import Decimal -from types import ModuleType from typing import Any +from unittest.mock import patch import pytest @@ -30,6 +30,7 @@ from homeassistant.const import ( PERCENTAGE, STATE_UNKNOWN, EntityCategory, + UnitOfArea, UnitOfDataRate, UnitOfEnergy, UnitOfLength, @@ -58,8 +59,6 @@ from tests.common import ( MockModule, MockPlatform, async_mock_restore_state_shutdown_restart, - help_test_all, - import_and_test_deprecated_constant_enum, mock_config_flow, mock_integration, mock_platform, @@ -483,6 +482,108 @@ async def test_restore_sensor_restore_state( assert entity0.native_unit_of_measurement == uom +async def test_translated_unit( + hass: HomeAssistant, +) -> None: + """Test translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + + entity_id = entity0.entity_id + state = hass.states.get(entity_id) + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "Tests" + + +async def test_translated_unit_with_native_unit_raises( + hass: HomeAssistant, +) -> None: + """Test that translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + native_unit_of_measurement="bad_unit", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + # Setup fails so entity_id is None + assert entity0.entity_id is None + + +async def test_unit_translation_key_without_platform_raises( + hass: HomeAssistant, +) -> None: + """Test that unit translation key property raises if the entity has no platform yet.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + ) + with pytest.raises( + ValueError, + match="cannot have a translation key for unit of measurement before " + "being added to the entity platform", + ): + unit = entity0.unit_of_measurement # noqa: F841 + + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + + # Should not raise after being added to the platform + unit = entity0.unit_of_measurement # noqa: F841 + assert unit == "Tests" + + @pytest.mark.parametrize( ( "device_class", @@ -651,6 +752,34 @@ async def test_custom_unit( "device_class", ), [ + # Area + ( + UnitOfArea.SQUARE_KILOMETERS, + UnitOfArea.SQUARE_MILES, + UnitOfArea.SQUARE_MILES, + 1000, + "1000", + "386", + SensorDeviceClass.AREA, + ), + ( + UnitOfArea.SQUARE_CENTIMETERS, + UnitOfArea.SQUARE_INCHES, + UnitOfArea.SQUARE_INCHES, + 7.24, + "7.24", + "1.12", + SensorDeviceClass.AREA, + ), + ( + UnitOfArea.SQUARE_KILOMETERS, + "peer_distance", + UnitOfArea.SQUARE_KILOMETERS, + 1000, + "1000", + "1000", + SensorDeviceClass.AREA, + ), # Distance ( UnitOfLength.KILOMETERS, @@ -1834,6 +1963,7 @@ async def test_non_numeric_device_class_with_unit_of_measurement( [ SensorDeviceClass.APPARENT_POWER, SensorDeviceClass.AQI, + SensorDeviceClass.AREA, SensorDeviceClass.ATMOSPHERIC_PRESSURE, SensorDeviceClass.BATTERY, SensorDeviceClass.CO, @@ -2549,71 +2679,6 @@ async def test_entity_category_config_raises_error( assert not hass.states.get("sensor.test") -@pytest.mark.parametrize( - "module", - [sensor, sensor.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize(("enum"), list(sensor.SensorStateClass)) -@pytest.mark.parametrize(("module"), [sensor, sensor.const]) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: sensor.SensorStateClass, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, "STATE_CLASS_", "2025.1" - ) - - -@pytest.mark.parametrize( - ("enum"), - [ - sensor.SensorDeviceClass.AQI, - sensor.SensorDeviceClass.BATTERY, - sensor.SensorDeviceClass.CO, - sensor.SensorDeviceClass.CO2, - sensor.SensorDeviceClass.CURRENT, - sensor.SensorDeviceClass.DATE, - sensor.SensorDeviceClass.ENERGY, - sensor.SensorDeviceClass.FREQUENCY, - sensor.SensorDeviceClass.GAS, - sensor.SensorDeviceClass.HUMIDITY, - sensor.SensorDeviceClass.ILLUMINANCE, - sensor.SensorDeviceClass.MONETARY, - sensor.SensorDeviceClass.NITROGEN_DIOXIDE, - sensor.SensorDeviceClass.NITROGEN_MONOXIDE, - sensor.SensorDeviceClass.NITROUS_OXIDE, - sensor.SensorDeviceClass.OZONE, - sensor.SensorDeviceClass.PM1, - sensor.SensorDeviceClass.PM10, - sensor.SensorDeviceClass.PM25, - sensor.SensorDeviceClass.POWER_FACTOR, - sensor.SensorDeviceClass.POWER, - sensor.SensorDeviceClass.PRESSURE, - sensor.SensorDeviceClass.SIGNAL_STRENGTH, - sensor.SensorDeviceClass.SULPHUR_DIOXIDE, - sensor.SensorDeviceClass.TEMPERATURE, - sensor.SensorDeviceClass.TIMESTAMP, - sensor.SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - sensor.SensorDeviceClass.VOLTAGE, - ], -) -def test_deprecated_constants_sensor_device_class( - caplog: pytest.LogCaptureFixture, - enum: sensor.SensorStateClass, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, sensor, enum, "DEVICE_CLASS_", "2025.1" - ) - - @pytest.mark.parametrize( ("device_class", "native_unit"), [ diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 4d271785114..44eaa9fde0d 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1,10 +1,11 @@ """The tests for sensor recorder platform.""" +from collections.abc import Iterable from datetime import datetime, timedelta import math from statistics import mean from typing import Any, Literal -from unittest.mock import patch +from unittest.mock import ANY, patch from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory @@ -37,6 +38,7 @@ from homeassistant.components.recorder.util import get_instance, session_scope from homeassistant.components.sensor import ATTR_OPTIONS, DOMAIN, SensorDeviceClass from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -110,6 +112,24 @@ def setup_recorder(recorder_mock: Recorder) -> Recorder: """Set up recorder.""" +@pytest.fixture(autouse=True) +def disable_mariadb_issue() -> None: + """Disable creating issue about outdated MariaDB version.""" + with patch( + "homeassistant.components.recorder.util._async_create_mariadb_range_index_regression_issue" + ): + yield + + +@pytest.fixture(autouse=True) +def disable_sqlite_issue() -> None: + """Disable creating issue about outdated SQLite version.""" + with patch( + "homeassistant.components.recorder.util._async_create_issue_deprecated_version" + ): + yield + + async def async_list_statistic_ids( hass: HomeAssistant, statistic_ids: set[str] | None = None, @@ -137,15 +157,61 @@ async def assert_statistic_ids( ) +def assert_issues( + hass: HomeAssistant, + expected_issues: dict[str, dict[str, Any]], +) -> None: + """Assert statistics issues.""" + issue_registry = ir.async_get(hass) + assert len(issue_registry.issues) == len(expected_issues) + for issue_id, expected_issue_data in expected_issues.items(): + expected_translation_placeholders = dict(expected_issue_data) + expected_translation_placeholders.pop("issue_type") + expected_issue = ir.IssueEntry( + active=True, + breaks_in_ha_version=None, + created=ANY, + data=expected_issue_data, + dismissed_version=None, + domain=DOMAIN, + is_fixable=False, + is_persistent=False, + issue_domain=None, + issue_id=issue_id, + learn_more_url=None, + severity=ir.IssueSeverity.WARNING, + translation_key=expected_issue_data["issue_type"], + translation_placeholders=expected_translation_placeholders, + ) + assert (DOMAIN, issue_id) in issue_registry.issues + assert issue_registry.issues[(DOMAIN, issue_id)] == expected_issue + + async def assert_validation_result( + hass: HomeAssistant, client: MockHAClientWebSocket, - expected_result: dict[str, list[dict[str, Any]]], + expected_validation_result: dict[str, list[dict[str, Any]]], + expected_issues: Iterable[str], ) -> None: """Assert statistics validation result.""" await client.send_json_auto_id({"type": "recorder/validate_statistics"}) response = await client.receive_json() assert response["success"] - assert response["result"] == expected_result + assert response["result"] == expected_validation_result + await hass.async_block_till_done() + + # Check we get corresponding issues + await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) + response = await client.receive_json() + assert response["success"] + expected_issue_registry_issues = { + f"{issue['type']}_{statistic_id}": issue["data"] | {"issue_type": issue["type"]} + for statistic_id, issues in expected_validation_result.items() + for issue in issues + if issue["type"] in expected_issues + } + + assert_issues(hass, expected_issue_registry_issues) @pytest.mark.parametrize( @@ -161,6 +227,8 @@ async def assert_validation_result( ), [ (None, "%", "%", "%", "unitless", 13.050847, -10, 30), + ("area", "m²", "m²", "m²", "area", 13.050847, -10, 30), + ("area", "mi²", "mi²", "mi²", "area", 13.050847, -10, 30), ("battery", "%", "%", "%", "unitless", 13.050847, -10, 30), ("battery", None, None, None, "unitless", 13.050847, -10, 30), ("distance", "m", "m", "m", "distance", 13.050847, -10, 30), @@ -848,6 +916,8 @@ async def test_compile_hourly_statistics_wrong_unit( "factor", ), [ + (US_CUSTOMARY_SYSTEM, "area", "m²", "m²", "m²", "area", 1), + (US_CUSTOMARY_SYSTEM, "area", "mi²", "mi²", "mi²", "area", 1), (US_CUSTOMARY_SYSTEM, "distance", "m", "m", "m", "distance", 1), (US_CUSTOMARY_SYSTEM, "distance", "mi", "mi", "mi", "distance", 1), (US_CUSTOMARY_SYSTEM, "energy", "kWh", "kWh", "kWh", "energy", 1), @@ -860,6 +930,8 @@ async def test_compile_hourly_statistics_wrong_unit( (US_CUSTOMARY_SYSTEM, "volume", "ft³", "ft³", "ft³", "volume", 1), (US_CUSTOMARY_SYSTEM, "weight", "g", "g", "g", "mass", 1), (US_CUSTOMARY_SYSTEM, "weight", "oz", "oz", "oz", "mass", 1), + (METRIC_SYSTEM, "area", "m²", "m²", "m²", "area", 1), + (METRIC_SYSTEM, "area", "mi²", "mi²", "mi²", "area", 1), (METRIC_SYSTEM, "distance", "m", "m", "m", "distance", 1), (METRIC_SYSTEM, "distance", "mi", "mi", "mi", "distance", 1), (METRIC_SYSTEM, "energy", "kWh", "kWh", "kWh", "energy", 1), @@ -2162,6 +2234,8 @@ async def test_compile_hourly_energy_statistics_multiple( [ ("battery", "%", 30), ("battery", None, 30), + ("area", "m²", 30), + ("area", "mi²", 30), ("distance", "m", 30), ("distance", "mi", 30), ("humidity", "%", 30), @@ -2270,6 +2344,8 @@ async def test_compile_hourly_statistics_partially_unavailable( [ ("battery", "%", 30), ("battery", None, 30), + ("area", "m²", 30), + ("area", "mi²", 30), ("distance", "m", 30), ("distance", "mi", 30), ("humidity", "%", 30), @@ -2372,6 +2448,10 @@ async def test_compile_hourly_statistics_fails( "statistic_type", ), [ + ("measurement", "area", "m²", "m²", "m²", "area", "mean"), + ("measurement", "area", "mi²", "mi²", "mi²", "area", "mean"), + ("total", "area", "m²", "m²", "m²", "area", "sum"), + ("total", "area", "mi²", "mi²", "mi²", "area", "sum"), ("measurement", "battery", "%", "%", "%", "unitless", "mean"), ("measurement", "battery", None, None, None, "unitless", "mean"), ("measurement", "distance", "m", "m", "m", "distance", "mean"), @@ -4167,8 +4247,8 @@ async def async_record_states( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4219,7 +4299,7 @@ async def test_validate_unit_change_convertible( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, unit in state matching device class - empty response hass.states.async_set( @@ -4229,7 +4309,7 @@ async def test_validate_unit_change_convertible( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, unit in state not matching device class - empty response hass.states.async_set( @@ -4239,7 +4319,7 @@ async def test_validate_unit_change_convertible( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Statistics has run, incompatible unit - expect error await async_recorder_block_till_done(hass) @@ -4264,7 +4344,27 @@ async def test_validate_unit_change_convertible( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {"units_changed"}) + + # Unavailable state - empty response + hass.states.async_set( + "sensor.test", + "unavailable", + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + # Unknown state - empty response + hass.states.async_set( + "sensor.test", + "unknown", + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) # Valid state - empty response hass.states.async_set( @@ -4274,12 +4374,12 @@ async def test_validate_unit_change_convertible( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state in compatible unit - empty response hass.states.async_set( @@ -4289,12 +4389,12 @@ async def test_validate_unit_change_convertible( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=2)) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Remove the state - expect error about missing state hass.states.async_remove("sensor.test") @@ -4306,7 +4406,7 @@ async def test_validate_unit_change_convertible( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) @pytest.mark.parametrize( @@ -4333,7 +4433,7 @@ async def test_validate_statistics_unit_ignore_device_class( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, no device class - empty response initial_attributes = {"state_class": "measurement", "unit_of_measurement": "dogs"} @@ -4341,7 +4441,7 @@ async def test_validate_statistics_unit_ignore_device_class( "sensor.test", 10, attributes=initial_attributes, timestamp=now.timestamp() ) await hass.async_block_till_done() - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Statistics has run, device class set not matching unit - empty response do_adhoc_statistics(hass, start=now) @@ -4353,14 +4453,14 @@ async def test_validate_statistics_unit_ignore_device_class( timestamp=now.timestamp(), ) await hass.async_block_till_done() - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4418,7 +4518,7 @@ async def test_validate_statistics_unit_change_no_device_class( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, sensor state set - empty response hass.states.async_set( @@ -4428,7 +4528,7 @@ async def test_validate_statistics_unit_change_no_device_class( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, sensor state set to an incompatible unit - empty response hass.states.async_set( @@ -4438,7 +4538,7 @@ async def test_validate_statistics_unit_change_no_device_class( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Statistics has run, incompatible unit - expect error await async_recorder_block_till_done(hass) @@ -4463,7 +4563,27 @@ async def test_validate_statistics_unit_change_no_device_class( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {"units_changed"}) + + # Unavailable state - empty response + hass.states.async_set( + "sensor.test", + "unavailable", + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + # Unknown state - empty response + hass.states.async_set( + "sensor.test", + "unknown", + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) # Valid state - empty response hass.states.async_set( @@ -4473,12 +4593,12 @@ async def test_validate_statistics_unit_change_no_device_class( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state in compatible unit - empty response hass.states.async_set( @@ -4488,12 +4608,12 @@ async def test_validate_statistics_unit_change_no_device_class( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=2)) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Remove the state - expect error about missing state hass.states.async_remove("sensor.test") @@ -4505,7 +4625,7 @@ async def test_validate_statistics_unit_change_no_device_class( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) @pytest.mark.parametrize( @@ -4514,7 +4634,7 @@ async def test_validate_statistics_unit_change_no_device_class( (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), ], ) -async def test_validate_statistics_unsupported_state_class( +async def test_validate_statistics_state_class_removed( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, units, @@ -4530,19 +4650,19 @@ async def test_validate_statistics_unsupported_state_class( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, valid state - empty response hass.states.async_set( "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() ) await hass.async_block_till_done() - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Statistics has run, empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # State update with invalid state class, expect error _attributes = dict(attributes) @@ -4554,15 +4674,85 @@ async def test_validate_statistics_unsupported_state_class( expected = { "sensor.test": [ { - "data": { - "state_class": None, - "statistic_id": "sensor.test", - }, - "type": "unsupported_state_class", + "data": {"statistic_id": "sensor.test"}, + "type": "state_class_removed", } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {"state_class_removed"}) + + # Unavailable state - empty response + hass.states.async_set( + "sensor.test", "unavailable", attributes=_attributes, timestamp=now.timestamp() + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + # Unknown state - empty response + hass.states.async_set( + "sensor.test", "unknown", attributes=_attributes, timestamp=now.timestamp() + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + +@pytest.mark.parametrize( + ("units", "attributes", "unit"), + [ + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), + ], +) +async def test_validate_statistics_state_class_removed_issue_cleaned_up( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + units, + attributes, + unit, +) -> None: + """Test validate_statistics.""" + now = get_start_time(dt_util.utcnow()) + + hass.config.units = units + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + # No statistics, no state - empty response + await assert_validation_result(hass, client, {}, {}) + + # No statistics, valid state - empty response + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) + await hass.async_block_till_done() + await assert_validation_result(hass, client, {}, {}) + + # Statistics has run, empty response + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + # State update with invalid state class, expect error + _attributes = dict(attributes) + _attributes.pop("state_class") + hass.states.async_set( + "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() + ) + await hass.async_block_till_done() + expected = { + "sensor.test": [ + { + "data": {"statistic_id": "sensor.test"}, + "type": "state_class_removed", + } + ], + } + await assert_validation_result(hass, client, expected, {"state_class_removed"}) + + # Remove the statistics - empty response + get_instance(hass).async_clear_statistics(["sensor.test"]) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) @pytest.mark.parametrize( @@ -4587,19 +4777,19 @@ async def test_validate_statistics_sensor_no_longer_recorded( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, valid state - empty response hass.states.async_set( "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() ) await hass.async_block_till_done() - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Statistics has run, empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Sensor no longer recorded, expect error expected = { @@ -4616,7 +4806,7 @@ async def test_validate_statistics_sensor_no_longer_recorded( "entity_filter", return_value=False, ): - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) @pytest.mark.parametrize( @@ -4641,7 +4831,7 @@ async def test_validate_statistics_sensor_not_recorded( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Sensor not recorded, expect error expected = { @@ -4662,12 +4852,12 @@ async def test_validate_statistics_sensor_not_recorded( "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() ) await hass.async_block_till_done() - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) # Statistics has run, expect same error do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) @pytest.mark.parametrize( @@ -4692,19 +4882,19 @@ async def test_validate_statistics_sensor_removed( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, valid state - empty response hass.states.async_set( "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() ) await hass.async_block_till_done() - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Statistics has run, empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Sensor removed, expect error hass.states.async_remove("sensor.test") @@ -4716,7 +4906,7 @@ async def test_validate_statistics_sensor_removed( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) @pytest.mark.parametrize( @@ -4741,7 +4931,7 @@ async def test_validate_statistics_unit_change_no_conversion( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, original unit - empty response hass.states.async_set( @@ -4750,7 +4940,7 @@ async def test_validate_statistics_unit_change_no_conversion( attributes={**attributes, "unit_of_measurement": unit1}, timestamp=now.timestamp(), ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, changed unit - empty response hass.states.async_set( @@ -4759,7 +4949,7 @@ async def test_validate_statistics_unit_change_no_conversion( attributes={**attributes, "unit_of_measurement": unit2}, timestamp=now.timestamp(), ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Run statistics, no statistics will be generated because of conflicting units await async_recorder_block_till_done(hass) @@ -4774,7 +4964,7 @@ async def test_validate_statistics_unit_change_no_conversion( attributes={**attributes, "unit_of_measurement": unit1}, timestamp=now.timestamp(), ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Run statistics one hour later, only the state with unit1 will be considered await async_recorder_block_till_done(hass) @@ -4783,7 +4973,7 @@ async def test_validate_statistics_unit_change_no_conversion( await assert_statistic_ids( hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Change unit - expect error hass.states.async_set( @@ -4806,7 +4996,27 @@ async def test_validate_statistics_unit_change_no_conversion( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {"units_changed"}) + + # Unavailable state - empty response + hass.states.async_set( + "sensor.test", + "unavailable", + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + # Unknown state - empty response + hass.states.async_set( + "sensor.test", + "unknown", + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) # Original unit - empty response hass.states.async_set( @@ -4816,13 +5026,13 @@ async def test_validate_statistics_unit_change_no_conversion( timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Valid state, statistic runs again - empty response await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now + timedelta(hours=2)) await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Remove the state - expect error hass.states.async_remove("sensor.test") @@ -4834,7 +5044,7 @@ async def test_validate_statistics_unit_change_no_conversion( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {}) @pytest.mark.parametrize( @@ -4864,7 +5074,7 @@ async def test_validate_statistics_unit_change_equivalent_units( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, original unit - empty response hass.states.async_set( @@ -4873,7 +5083,7 @@ async def test_validate_statistics_unit_change_equivalent_units( attributes={**attributes, "unit_of_measurement": unit1}, timestamp=now.timestamp(), ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Run statistics await async_recorder_block_till_done(hass) @@ -4890,7 +5100,7 @@ async def test_validate_statistics_unit_change_equivalent_units( attributes={**attributes, "unit_of_measurement": unit2}, timestamp=now.timestamp() + 1, ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Run statistics one hour later, metadata will be updated await async_recorder_block_till_done(hass) @@ -4899,7 +5109,7 @@ async def test_validate_statistics_unit_change_equivalent_units( await assert_statistic_ids( hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) @pytest.mark.parametrize( @@ -4928,7 +5138,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # No statistics, original unit - empty response hass.states.async_set( @@ -4937,7 +5147,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( attributes={**attributes, "unit_of_measurement": unit1}, timestamp=now.timestamp(), ) - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) # Run statistics await async_recorder_block_till_done(hass) @@ -4967,7 +5177,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( } ], } - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {"units_changed"}) # Run statistics one hour later, metadata will not be updated await async_recorder_block_till_done(hass) @@ -4976,7 +5186,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( await assert_statistic_ids( hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(client, expected) + await assert_validation_result(hass, client, expected, {"units_changed"}) async def test_validate_statistics_other_domain( @@ -5009,7 +5219,67 @@ async def test_validate_statistics_other_domain( await async_recorder_block_till_done(hass) # We should not get complains about the missing number entity - await assert_validation_result(client, {}) + await assert_validation_result(hass, client, {}, {}) + + +@pytest.mark.parametrize( + ("units", "attributes", "unit"), + [ + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), + ], +) +async def test_update_statistics_issues( + hass: HomeAssistant, + units, + attributes, + unit, +) -> None: + """Test update_statistics_issues.""" + + async def one_hour_stats(start: datetime) -> datetime: + """Generate 5-minute statistics for one hour.""" + for _ in range(12): + do_adhoc_statistics(hass, start=start) + await async_wait_recording_done(hass) + start += timedelta(minutes=5) + return start + + now = get_start_time(dt_util.utcnow()) + + hass.config.units = units + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + + # No statistics, no state - no issues + now = await one_hour_stats(now) + assert_issues(hass, {}) + + # Statistics, valid state - no issues + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) + await hass.async_block_till_done() + now = await one_hour_stats(now) + assert_issues(hass, {}) + + # State update with invalid state class, statistics did not run again + _attributes = dict(attributes) + _attributes.pop("state_class") + hass.states.async_set( + "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() + ) + await hass.async_block_till_done() + assert_issues(hass, {}) + + # Let statistics run for one hour, expect issue + now = await one_hour_stats(now) + expected = { + "state_class_removed_sensor.test": { + "issue_type": "state_class_removed", + "statistic_id": "sensor.test", + } + } + assert_issues(hass, expected) async def async_record_meter_states( @@ -5174,3 +5444,62 @@ async def test_exclude_attributes(hass: HomeAssistant) -> None: assert len(states) == 1 assert ATTR_OPTIONS not in states[0].attributes assert ATTR_FRIENDLY_NAME in states[0].attributes + + +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues..title", + "component.test.issues..description", + "component.sensor.issues..title", + "component.sensor.issues..description", + ] + ], +) +async def test_clean_up_repairs( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test cleaning up repairs.""" + await async_setup_component(hass, "sensor", {}) + issue_registry = ir.async_get(hass) + client = await hass_ws_client() + + # Create some issues + def create_issue(domain: str, issue_id: str, data: dict | None) -> None: + ir.async_create_issue( + hass, + domain, + issue_id, + data=data, + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key="", + ) + + create_issue("test", "test_issue", None) + create_issue(DOMAIN, "test_issue_1", None) + create_issue(DOMAIN, "test_issue_2", {"issue_type": "another_issue"}) + create_issue(DOMAIN, "test_issue_3", {"issue_type": "state_class_removed"}) + create_issue(DOMAIN, "test_issue_4", {"issue_type": "units_changed"}) + + # Check the issues + assert set(issue_registry.issues) == { + ("test", "test_issue"), + ("sensor", "test_issue_1"), + ("sensor", "test_issue_2"), + ("sensor", "test_issue_3"), + ("sensor", "test_issue_4"), + } + + # Request update of issues + await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) + response = await client.receive_json() + assert response["success"] + + # Check the issues + assert set(issue_registry.issues) == { + ("test", "test_issue"), + ("sensor", "test_issue_1"), + ("sensor", "test_issue_2"), + } diff --git a/tests/components/sensor/test_websocket_api.py b/tests/components/sensor/test_websocket_api.py index 6f4eeb252e2..b1dafa04c94 100644 --- a/tests/components/sensor/test_websocket_api.py +++ b/tests/components/sensor/test_websocket_api.py @@ -36,11 +36,13 @@ async def test_device_class_units( "ft/s", "in/d", "in/h", + "in/s", "km/h", "kn", "m/s", "mm/d", "mm/h", + "mm/s", "mph", ] } diff --git a/tests/components/sensoterra/__init__.py b/tests/components/sensoterra/__init__.py new file mode 100644 index 00000000000..f70fede6c09 --- /dev/null +++ b/tests/components/sensoterra/__init__.py @@ -0,0 +1 @@ +"""Tests for the Sensoterra integration.""" diff --git a/tests/components/sensoterra/conftest.py b/tests/components/sensoterra/conftest.py new file mode 100644 index 00000000000..0f6b7a3014b --- /dev/null +++ b/tests/components/sensoterra/conftest.py @@ -0,0 +1,32 @@ +"""Common fixtures for the Sensoterra tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from .const import API_TOKEN + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.sensoterra.async_setup_entry", + return_value=True, + ) as mock_entry: + yield mock_entry + + +@pytest.fixture +def mock_customer_api_client() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with ( + patch( + "homeassistant.components.sensoterra.config_flow.CustomerApi", + autospec=True, + ) as mock_client, + ): + mock = mock_client.return_value + mock.get_token.return_value = API_TOKEN + yield mock diff --git a/tests/components/sensoterra/const.py b/tests/components/sensoterra/const.py new file mode 100644 index 00000000000..cc80610645d --- /dev/null +++ b/tests/components/sensoterra/const.py @@ -0,0 +1,6 @@ +"""Constants for the test Sensoterra integration.""" + +API_TOKEN = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE4NTYzMDQwMDAsInN1YiI6IjM5In0.yxdXXlc1DqopqDRHfAVzFrMqZJl6nKLpu1dV8alHvVY" +API_EMAIL = "test-email@example.com" +API_PASSWORD = "test-password" +HASS_UUID = "phony-unique-id" diff --git a/tests/components/sensoterra/test_config_flow.py b/tests/components/sensoterra/test_config_flow.py new file mode 100644 index 00000000000..20921406883 --- /dev/null +++ b/tests/components/sensoterra/test_config_flow.py @@ -0,0 +1,124 @@ +"""Test the Sensoterra config flow.""" + +from unittest.mock import AsyncMock + +from jwt import DecodeError +import pytest +from sensoterra.customerapi import InvalidAuth as StInvalidAuth, Timeout as StTimeout + +from homeassistant.components.sensoterra.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import API_EMAIL, API_PASSWORD, API_TOKEN, HASS_UUID + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_customer_api_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we can finish a config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + hass.data["core.uuid"] = HASS_UUID + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == API_EMAIL + assert result["data"] == { + CONF_TOKEN: API_TOKEN, + CONF_EMAIL: API_EMAIL, + } + + assert len(mock_customer_api_client.mock_calls) == 1 + + +async def test_form_unique_id( + hass: HomeAssistant, mock_customer_api_client: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + hass.data["core.uuid"] = HASS_UUID + + entry = MockConfigEntry(unique_id="39", domain=DOMAIN) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert len(mock_customer_api_client.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (StTimeout, "cannot_connect"), + (StInvalidAuth("Invalid credentials"), "invalid_auth"), + (DecodeError("Bad API token"), "invalid_access_token"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_customer_api_client: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test we handle config form exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + hass.data["core.uuid"] = HASS_UUID + + mock_customer_api_client.get_token.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + assert result["errors"] == {"base": error} + assert result["type"] is FlowResultType.FORM + + mock_customer_api_client.get_token.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == API_EMAIL + assert result["data"] == { + CONF_TOKEN: API_TOKEN, + CONF_EMAIL: API_EMAIL, + } + assert len(mock_customer_api_client.mock_calls) == 2 diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index e2493319b69..0d02a7ab5f1 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -40,6 +40,11 @@ NEW_SUMMARY_DATA = { "Returned": 1, } +ARCHIVE_PACKAGE_NUMBER = "123" +CONFIG_ENTRY_ID_KEY = "config_entry_id" +PACKAGE_TRACKING_NUMBER_KEY = "package_tracking_number" +PACKAGE_STATE_KEY = "package_state" + VALID_CONFIG = { CONF_USERNAME: "test", CONF_PASSWORD: "test", diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr index 202c5a3d667..e172a2de594 100644 --- a/tests/components/seventeentrack/snapshots/test_services.ambr +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -10,7 +10,7 @@ 'origin_country': 'Belgium', 'package_type': 'Registered Parcel', 'status': 'Expired', - 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'timestamp': '2020-08-10T10:32:00+00:00', 'tracking_info_language': 'Unknown', 'tracking_number': '123', }), @@ -22,7 +22,7 @@ 'origin_country': 'Belgium', 'package_type': 'Registered Parcel', 'status': 'In Transit', - 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'timestamp': '2020-08-10T10:32:00+00:00', 'tracking_info_language': 'Unknown', 'tracking_number': '456', }), @@ -34,7 +34,7 @@ 'origin_country': 'Belgium', 'package_type': 'Registered Parcel', 'status': 'Delivered', - 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'timestamp': '2020-08-10T10:32:00+00:00', 'tracking_info_language': 'Unknown', 'tracking_number': '789', }), @@ -52,7 +52,7 @@ 'origin_country': 'Belgium', 'package_type': 'Registered Parcel', 'status': 'In Transit', - 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'timestamp': '2020-08-10T10:32:00+00:00', 'tracking_info_language': 'Unknown', 'tracking_number': '456', }), @@ -64,7 +64,36 @@ 'origin_country': 'Belgium', 'package_type': 'Registered Parcel', 'status': 'Delivered', - 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'timestamp': '2020-08-10T10:32:00+00:00', + 'tracking_info_language': 'Unknown', + 'tracking_number': '789', + }), + ]), + }) +# --- +# name: test_packages_with_none_timestamp + dict({ + 'packages': list([ + dict({ + 'destination_country': 'Belgium', + 'friendly_name': 'friendly name 1', + 'info_text': 'info text 1', + 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', + 'status': 'In Transit', + 'tracking_info_language': 'Unknown', + 'tracking_number': '456', + }), + dict({ + 'destination_country': 'Belgium', + 'friendly_name': 'friendly name 2', + 'info_text': 'info text 1', + 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', + 'status': 'Delivered', + 'timestamp': '2020-08-10T10:32:00+00:00', 'tracking_info_language': 'Unknown', 'tracking_number': '789', }), diff --git a/tests/components/seventeentrack/test_config_flow.py b/tests/components/seventeentrack/test_config_flow.py index 0a7c4ca918c..9ad592419c3 100644 --- a/tests/components/seventeentrack/test_config_flow.py +++ b/tests/components/seventeentrack/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.seventeentrack.const import ( CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -105,55 +105,6 @@ async def test_flow_fails( } -async def test_import_flow(hass: HomeAssistant, mock_seventeentrack: AsyncMock) -> None: - """Test the import configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=VALID_CONFIG_OLD, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "someemail@gmail.com" - assert result["data"][CONF_USERNAME] == "someemail@gmail.com" - assert result["data"][CONF_PASSWORD] == "edc3eee7330e4fdda04489e3fbc283d0" - - -@pytest.mark.parametrize( - ("return_value", "side_effect", "error"), - [ - ( - False, - None, - "invalid_auth", - ), - ( - True, - SeventeenTrackError(), - "cannot_connect", - ), - ], -) -async def test_import_flow_cannot_connect_error( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - return_value, - side_effect, - error, -) -> None: - """Test the import configuration flow with error.""" - mock_seventeentrack.return_value.profile.login.return_value = return_value - mock_seventeentrack.return_value.profile.login.side_effect = side_effect - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=VALID_CONFIG_OLD, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error - - async def test_option_flow(hass: HomeAssistant, mock_seventeentrack: AsyncMock) -> None: """Test option flow.""" entry = MockConfigEntry( @@ -181,28 +132,3 @@ async def test_option_flow(hass: HomeAssistant, mock_seventeentrack: AsyncMock) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"][CONF_SHOW_ARCHIVED] assert not result["data"][CONF_SHOW_DELIVERED] - - -async def test_import_flow_already_configured( - hass: HomeAssistant, mock_seventeentrack: AsyncMock -) -> None: - """Test the import configuration flow with error.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=VALID_CONFIG, - unique_id=ACCOUNT_ID, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - result_aborted = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - await hass.async_block_till_done() - - assert result_aborted["type"] is FlowResultType.ABORT - assert result_aborted["reason"] == "already_configured" diff --git a/tests/components/seventeentrack/test_repairs.py b/tests/components/seventeentrack/test_repairs.py index 0f697c1ad49..44d1f078432 100644 --- a/tests/components/seventeentrack/test_repairs.py +++ b/tests/components/seventeentrack/test_repairs.py @@ -1,12 +1,10 @@ """Tests for the seventeentrack repair flow.""" -from http import HTTPStatus from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.repairs.websocket_api import RepairsFlowIndexView from homeassistant.components.seventeentrack import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -16,6 +14,7 @@ from . import goto_future, init_integration from .conftest import DEFAULT_SUMMARY_LENGTH, get_package from tests.common import MockConfigEntry +from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow from tests.typing import ClientSessionGenerator @@ -49,13 +48,7 @@ async def test_repair( client = await hass_client() - resp = await client.post( - RepairsFlowIndexView.url, - json={"handler": DOMAIN, "issue_id": repair_issue.issue_id}, - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, repair_issue.issue_id) flow_id = data["flow_id"] assert data == { @@ -70,9 +63,7 @@ async def test_repair( "preview": None, } - resp = await client.post(RepairsFlowIndexView.url + f"/{flow_id}") - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) flow_id = data["flow_id"] assert data == { diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index ca16fc64833..a631996b4eb 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -8,7 +8,6 @@ from freezegun.api import FrozenDateTimeFactory from pyseventeentrack.errors import SeventeenTrackError from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from . import goto_future, init_integration @@ -306,15 +305,3 @@ async def test_non_valid_platform_config( assert await async_setup_component(hass, "sensor", VALID_PLATFORM_CONFIG_FULL) await hass.async_block_till_done() assert len(hass.states.async_entity_ids()) == 0 - - -async def test_full_valid_platform_config( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - issue_registry: ir.IssueRegistry, -) -> None: - """Ensure everything starts correctly.""" - assert await async_setup_component(hass, "sensor", VALID_PLATFORM_CONFIG_FULL) - await hass.async_block_till_done() - assert len(hass.states.async_entity_ids()) == len(DEFAULT_SUMMARY.keys()) - assert len(issue_registry.issues) == 2 diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py index 4347189a5c0..bbd5644ad63 100644 --- a/tests/components/seventeentrack/test_services.py +++ b/tests/components/seventeentrack/test_services.py @@ -5,14 +5,24 @@ from unittest.mock import AsyncMock import pytest from syrupy import SnapshotAssertion -from homeassistant.components.seventeentrack import DOMAIN, SERVICE_GET_PACKAGES +from homeassistant.components.seventeentrack import DOMAIN +from homeassistant.components.seventeentrack.const import ( + SERVICE_ARCHIVE_PACKAGE, + SERVICE_GET_PACKAGES, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from . import init_integration -from .conftest import get_package +from .conftest import ( + ARCHIVE_PACKAGE_NUMBER, + CONFIG_ENTRY_ID_KEY, + PACKAGE_STATE_KEY, + PACKAGE_TRACKING_NUMBER_KEY, + get_package, +) from tests.common import MockConfigEntry @@ -30,8 +40,8 @@ async def test_get_packages_from_list( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": mock_config_entry.entry_id, - "package_state": ["in_transit", "delivered"], + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + PACKAGE_STATE_KEY: ["in_transit", "delivered"], }, blocking=True, return_response=True, @@ -53,7 +63,7 @@ async def test_get_all_packages( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": mock_config_entry.entry_id, + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, }, blocking=True, return_response=True, @@ -76,7 +86,7 @@ async def test_service_called_with_unloaded_entry( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": mock_config_entry.entry_id, + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, }, blocking=True, return_response=True, @@ -110,13 +120,58 @@ async def test_service_called_with_non_17track_device( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": device_entry.id, + CONFIG_ENTRY_ID_KEY: device_entry.id, }, blocking=True, return_response=True, ) +async def test_archive_package( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service archives package.""" + await _mock_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + DOMAIN, + SERVICE_ARCHIVE_PACKAGE, + { + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + PACKAGE_TRACKING_NUMBER_KEY: ARCHIVE_PACKAGE_NUMBER, + }, + blocking=True, + ) + mock_seventeentrack.return_value.profile.archive_package.assert_called_once_with( + ARCHIVE_PACKAGE_NUMBER + ) + + +async def test_packages_with_none_timestamp( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service returns all packages when non provided.""" + await _mock_invalid_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + service_response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PACKAGES, + { + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + assert service_response == snapshot + + async def _mock_packages(mock_seventeentrack): package1 = get_package(status=10) package2 = get_package( @@ -134,3 +189,19 @@ async def _mock_packages(mock_seventeentrack): package2, package3, ] + + +async def _mock_invalid_packages(mock_seventeentrack): + package1 = get_package( + status=10, + timestamp=None, + ) + package2 = get_package( + tracking_number="789", + friendly_name="friendly name 2", + status=40, + ) + mock_seventeentrack.return_value.profile.packages.return_value = [ + package1, + package2, + ] diff --git a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr index 0023f65c90e..15308fad91f 100644 --- a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr @@ -22,7 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , @@ -150,7 +150,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_button.ambr b/tests/components/sfr_box/snapshots/test_button.ambr index df097b58c51..67b2198fd2b 100644 --- a/tests/components/sfr_box/snapshots/test_button.ambr +++ b/tests/components/sfr_box/snapshots/test_button.ambr @@ -22,7 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_sensor.ambr b/tests/components/sfr_box/snapshots/test_sensor.ambr index 46b22448d25..7645a4ad8bf 100644 --- a/tests/components/sfr_box/snapshots/test_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_sensor.ambr @@ -22,7 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/test_config_flow.py b/tests/components/sfr_box/test_config_flow.py index 08c12e9817b..6bf610de661 100644 --- a/tests/components/sfr_box/test_config_flow.py +++ b/tests/components/sfr_box/test_config_flow.py @@ -207,15 +207,7 @@ async def test_reauth(hass: HomeAssistant, config_entry_with_auth: ConfigEntry) """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry_with_auth.entry_id, - "unique_id": config_entry_with_auth.unique_id, - }, - data=config_entry_with_auth.data, - ) + result = await config_entry_with_auth.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} diff --git a/tests/components/sharkiq/test_config_flow.py b/tests/components/sharkiq/test_config_flow.py index cf75bff1686..22a77678c0d 100644 --- a/tests/components/sharkiq/test_config_flow.py +++ b/tests/components/sharkiq/test_config_flow.py @@ -96,18 +96,18 @@ async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str) async def test_reauth_success(hass: HomeAssistant) -> None: """Test reauth flow.""" - with patch("sharkiq.AylaApi.async_sign_in", return_value=True): - mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) - mock_config.add_to_hass(hass) + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, + result = await mock_config.start_reauth_flow(hass) + + with patch("sharkiq.AylaApi.async_sign_in", return_value=True): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=CONFIG ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" @pytest.mark.parametrize( @@ -127,13 +127,15 @@ async def test_reauth( msg: str, ) -> None: """Test reauth failures.""" - with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, - ) + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=CONFIG + ) msg_value = result[msg_field] if msg_field == "errors": msg_value = msg_value.get("base") diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index 3748cfd6dc4..bfb2176026b 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -35,10 +35,7 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.const import ( @@ -160,7 +157,7 @@ async def test_simple_properties( assert entity assert state - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert entity.unique_id == "AC000Wxxxxxxxxx" @@ -189,10 +186,10 @@ async def test_initial_attributes( @pytest.mark.parametrize( ("service", "target_state"), [ - (SERVICE_STOP, STATE_IDLE), - (SERVICE_PAUSE, STATE_PAUSED), - (SERVICE_RETURN_TO_BASE, STATE_RETURNING), - (SERVICE_START, STATE_CLEANING), + (SERVICE_STOP, VacuumActivity.IDLE), + (SERVICE_PAUSE, VacuumActivity.PAUSED), + (SERVICE_RETURN_TO_BASE, VacuumActivity.RETURNING), + (SERVICE_START, VacuumActivity.CLEANING), ], ) async def test_cleaning_states( diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index a983cbbcda9..d453d25698c 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -226,9 +226,9 @@ MOCK_STATUS_COAP = { "update": { "status": "pending", "has_update": True, - "beta_version": "some_beta_version", - "new_version": "some_new_version", - "old_version": "some_old_version", + "beta_version": "20231107-162609/v1.14.1-rc1-g0617c15", + "new_version": "20230913-111730/v1.14.0-gcb84623", + "old_version": "20230913-111730/v1.14.0-gcb84623", }, "uptime": 5 * REST_SENSORS_UPDATE_INTERVAL, "wifi_sta": {"rssi": -64}, diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index 1156d7e0ed5..aeeeca30edd 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -13,8 +13,6 @@ from homeassistant.components.climate import ( ATTR_HVAC_ACTION, ATTR_HVAC_MODE, ATTR_PRESET_MODE, - ATTR_TARGET_TEMP_HIGH, - ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, PRESET_NONE, SERVICE_SET_HVAC_MODE, @@ -138,19 +136,6 @@ async def test_climate_set_temperature( assert state.state == HVACMode.OFF assert state.attributes[ATTR_TEMPERATURE] == 4 - # Test set temperature without target temperature - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_TARGET_TEMP_LOW: 20, - ATTR_TARGET_TEMP_HIGH: 30, - }, - blocking=True, - ) - mock_block_device.http_request.assert_not_called() - # Test set temperature await hass.services.async_call( CLIMATE_DOMAIN, @@ -609,23 +594,25 @@ async def test_rpc_climate_hvac_mode( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test climate hvac mode service.""" + entity_id = "climate.test_name_thermostat_0" + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 - entry = entity_registry.async_get(ENTITY_ID) + entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-thermostat:0" monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "output", False) mock_rpc_device.mock_update() - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 @@ -633,7 +620,7 @@ async def test_rpc_climate_hvac_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.OFF}, blocking=True, ) mock_rpc_device.mock_update() @@ -641,7 +628,7 @@ async def test_rpc_climate_hvac_mode( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "enable": False}} ) - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.state == HVACMode.OFF @@ -652,20 +639,21 @@ async def test_rpc_climate_without_humidity( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test climate entity without the humidity value.""" + entity_id = "climate.test_name_thermostat_0" new_status = deepcopy(mock_rpc_device.status) new_status.pop("humidity:0") monkeypatch.setattr(mock_rpc_device, "status", new_status) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert ATTR_CURRENT_HUMIDITY not in state.attributes - entry = entity_registry.async_get(ENTITY_ID) + entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-thermostat:0" @@ -674,29 +662,18 @@ async def test_rpc_climate_set_temperature( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate set target temperature.""" + entity_id = "climate.test_name_thermostat_0" + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.attributes[ATTR_TEMPERATURE] == 23 - # test set temperature without target temperature - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_TARGET_TEMP_LOW: 20, - ATTR_TARGET_TEMP_HIGH: 30, - }, - blocking=True, - ) - mock_rpc_device.call_rpc.assert_not_called() - monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "target_C", 28) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 28}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 28}, blocking=True, ) mock_rpc_device.mock_update() @@ -704,7 +681,7 @@ async def test_rpc_climate_set_temperature( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "target_C": 28}} ) - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.attributes[ATTR_TEMPERATURE] == 28 @@ -712,13 +689,14 @@ async def test_rpc_climate_hvac_mode_cool( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate with hvac mode cooling.""" + entity_id = "climate.test_name_thermostat_0" new_config = deepcopy(mock_rpc_device.config) new_config["thermostat:0"]["type"] = "cooling" monkeypatch.setattr(mock_rpc_device, "config", new_config) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.state == HVACMode.COOL assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING @@ -730,7 +708,7 @@ async def test_wall_display_thermostat_mode( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Wall Display in thermostat mode.""" - climate_entity_id = "climate.test_name" + climate_entity_id = "climate.test_name_thermostat_0" switch_entity_id = "switch.test_switch_0" await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) @@ -757,7 +735,7 @@ async def test_wall_display_thermostat_mode_external_actuator( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Wall Display in thermostat mode with an external actuator.""" - climate_entity_id = "climate.test_name" + climate_entity_id = "climate.test_name_thermostat_0" switch_entity_id = "switch.test_switch_0" new_status = deepcopy(mock_rpc_device.status) diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index c0c089f469a..d9945706182 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -16,14 +16,13 @@ import pytest from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.shelly import config_flow +from homeassistant.components.shelly import MacAddressMismatchError, config_flow from homeassistant.components.shelly.const import ( CONF_BLE_SCANNER_MODE, DOMAIN, BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -332,6 +331,7 @@ async def test_form_missing_model_key_zeroconf( ("exc", "base_error"), [ (DeviceConnectionError, "cannot_connect"), + (MacAddressMismatchError, "mac_address_mismatch"), (ValueError, "unknown"), ], ) @@ -437,6 +437,7 @@ async def test_user_setup_ignored_device( [ (InvalidAuthError, "invalid_auth"), (DeviceConnectionError, "cannot_connect"), + (MacAddressMismatchError, "mac_address_mismatch"), (ValueError, "unknown"), ], ) @@ -474,6 +475,7 @@ async def test_form_auth_errors_test_connection_gen1( [ (DeviceConnectionError, "cannot_connect"), (InvalidAuthError, "invalid_auth"), + (MacAddressMismatchError, "mac_address_mismatch"), (ValueError, "unknown"), ], ) @@ -819,20 +821,15 @@ async def test_reauth_successful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -850,14 +847,28 @@ async def test_reauth_successful( (3, {"password": "test2 password"}), ], ) +@pytest.mark.parametrize( + ("exc", "abort_reason"), + [ + (DeviceConnectionError, "reauth_unsuccessful"), + (MacAddressMismatchError, "mac_address_mismatch"), + ], +) async def test_reauth_unsuccessful( - hass: HomeAssistant, gen: int, user_input: dict[str, str] + hass: HomeAssistant, + gen: int, + user_input: dict[str, str], + exc: Exception, + abort_reason: str, ) -> None: """Test reauthentication flow failed.""" entry = MockConfigEntry( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -865,30 +876,17 @@ async def test_reauth_unsuccessful( return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ), patch( - "aioshelly.block_device.BlockDevice.create", - new=AsyncMock(side_effect=InvalidAuthError), - ), - patch( - "aioshelly.rpc_device.RpcDevice.create", - new=AsyncMock(side_effect=InvalidAuthError), + "aioshelly.block_device.BlockDevice.create", new=AsyncMock(side_effect=exc) ), + patch("aioshelly.rpc_device.RpcDevice.create", new=AsyncMock(side_effect=exc)), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_unsuccessful" + assert result["reason"] == abort_reason async def test_reauth_get_info_error(hass: HomeAssistant) -> None: @@ -897,20 +895,14 @@ async def test_reauth_get_info_error(hass: HomeAssistant) -> None: domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": 2} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", side_effect=DeviceConnectionError, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"password": "test2 password"}, @@ -1379,17 +1371,10 @@ async def test_reconfigure_successful( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.shelly.config_flow.get_info", @@ -1418,17 +1403,10 @@ async def test_reconfigure_unsuccessful( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.shelly.config_flow.get_info", @@ -1462,17 +1440,10 @@ async def test_reconfigure_with_exception( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch("homeassistant.components.shelly.config_flow.get_info", side_effect=exc): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index bb9694cf9b4..090c5e7207f 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -10,6 +10,7 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.shelly import MacAddressMismatchError from homeassistant.components.shelly.const import ( ATTR_CHANNEL, ATTR_CLICK_TYPE, @@ -254,11 +255,13 @@ async def test_block_polling_connection_error( assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_UNAVAILABLE +@pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) async def test_block_rest_update_connection_error( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, + exc: Exception, ) -> None: """Test block REST update connection error.""" entity_id = register_entity(hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud") @@ -269,11 +272,7 @@ async def test_block_rest_update_connection_error( await mock_rest_update(hass, freezer) assert get_entity_state(hass, entity_id) == STATE_ON - monkeypatch.setattr( - mock_block_device, - "update_shelly", - AsyncMock(side_effect=DeviceConnectionError), - ) + monkeypatch.setattr(mock_block_device, "update_shelly", AsyncMock(side_effect=exc)) await mock_rest_update(hass, freezer) assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE @@ -678,7 +677,7 @@ async def test_rpc_polling_auth_error( monkeypatch.setattr( mock_rpc_device, - "update_status", + "poll", AsyncMock( side_effect=InvalidAuthError, ), @@ -702,11 +701,13 @@ async def test_rpc_polling_auth_error( assert flow["context"].get("entry_id") == entry.entry_id +@pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) async def test_rpc_reconnect_error( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + exc: Exception, ) -> None: """Test RPC reconnect error.""" await init_integration(hass, 2) @@ -714,13 +715,7 @@ async def test_rpc_reconnect_error( assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr( - mock_rpc_device, - "initialize", - AsyncMock( - side_effect=DeviceConnectionError, - ), - ) + monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock(side_effect=exc)) # Move time to generate reconnect freezer.tick(timedelta(seconds=RPC_RECONNECT_INTERVAL)) @@ -768,7 +763,7 @@ async def test_rpc_polling_connection_error( monkeypatch.setattr( mock_rpc_device, - "update_status", + "poll", AsyncMock( side_effect=DeviceConnectionError, ), diff --git a/tests/components/shelly/test_cover.py b/tests/components/shelly/test_cover.py index cd5efb76cfe..40a364fd435 100644 --- a/tests/components/shelly/test_cover.py +++ b/tests/components/shelly/test_cover.py @@ -1,21 +1,25 @@ """Tests for Shelly cover platform.""" +from copy import deepcopy from unittest.mock import Mock import pytest from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, + ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, + ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, + SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + SERVICE_STOP_COVER_TILT, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -52,7 +56,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -60,7 +64,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -68,7 +72,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED entry = entity_registry.async_get(entity_id) assert entry @@ -82,11 +86,11 @@ async def test_block_device_update( monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 0) await init_integration(hass, 1) - assert hass.states.get("cover.test_name").state == STATE_CLOSED + assert hass.states.get("cover.test_name").state == CoverState.CLOSED monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 100) mock_block_device.mock_update() - assert hass.states.get("cover.test_name").state == STATE_OPEN + assert hass.states.get("cover.test_name").state == CoverState.OPEN async def test_block_device_no_roller_blocks( @@ -127,7 +131,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "state", "closing" @@ -139,7 +143,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await hass.services.async_call( @@ -149,7 +153,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED entry = entity_registry.async_get(entity_id) assert entry @@ -171,11 +175,11 @@ async def test_rpc_device_update( """Test RPC device update.""" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == STATE_CLOSED + assert hass.states.get("cover.test_cover_0").state == CoverState.CLOSED mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "open") mock_rpc_device.mock_update() - assert hass.states.get("cover.test_cover_0").state == STATE_OPEN + assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN async def test_rpc_device_no_position_control( @@ -186,4 +190,73 @@ async def test_rpc_device_no_position_control( monkeypatch, mock_rpc_device, "cover:0", "pos_control", False ) await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == STATE_OPEN + assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + + +async def test_rpc_cover_tilt( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + entity_registry: EntityRegistry, +) -> None: + """Test RPC cover that supports tilt.""" + entity_id = "cover.test_cover_0" + + config = deepcopy(mock_rpc_device.config) + config["cover:0"]["slat"] = {"enable": True} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["cover:0"]["slat_pos"] = 0 + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-cover:0" + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_TILT_POSITION, + {ATTR_ENTITY_ID: entity_id, ATTR_TILT_POSITION: 50}, + blocking=True, + ) + mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 50) + mock_rpc_device.mock_update() + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 100) + mock_rpc_device.mock_update() + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 10) + mock_rpc_device.mock_update() + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 10 diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index 395c7ccfeaf..f576524ba60 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -45,7 +45,7 @@ async def test_block_config_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) assert result == { - "entry": entry_dict, + "entry": entry_dict | {"discovery_keys": {}}, "bluetooth": "not initialized", "device_info": { "name": "Test name", @@ -105,7 +105,7 @@ async def test_rpc_config_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) assert result == { - "entry": entry_dict, + "entry": entry_dict | {"discovery_keys": {}}, "bluetooth": { "scanner": { "connectable": False, diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index 46698c23c0a..b5516485501 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -310,6 +310,52 @@ async def test_sleeping_rpc_device_online_new_firmware( assert entry.data["sleep_period"] == 1500 +async def test_sleeping_rpc_device_online_during_setup( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sleeping device Gen2 woke up by user during setup.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + await init_integration(hass, 2, sleep_period=1000) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "will resume when device is online" in caplog.text + assert "is online (source: setup)" in caplog.text + assert hass.states.get("sensor.test_name_temperature") is not None + + +async def test_sleeping_rpc_device_offline_during_setup( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sleeping device Gen2 woke up by user during setup.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + monkeypatch.setattr( + mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) + ) + + # Init integration, should fail since device is offline + await init_integration(hass, 2, sleep_period=1000) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "will resume when device is online" in caplog.text + assert "is online (source: setup)" in caplog.text + assert hass.states.get("sensor.test_name_temperature") is None + + # Create an online event and verify that device is init successfully + monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock()) + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert hass.states.get("sensor.test_name_temperature") is not None + + @pytest.mark.parametrize( ("gen", "entity_id"), [ diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index 2c464a8c39c..482821aa966 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -1,5 +1,6 @@ """Tests for Shelly light platform.""" +from copy import deepcopy from unittest.mock import AsyncMock, Mock from aioshelly.const import ( @@ -15,10 +16,13 @@ import pytest from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_BRIGHTNESS_PCT, ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_SUPPORTED_COLOR_MODES, @@ -29,7 +33,6 @@ from homeassistant.components.light import ( ColorMode, LightEntityFeature, ) -from homeassistant.components.shelly.const import SHELLY_PLUS_RGBW_CHANNELS from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, @@ -37,13 +40,21 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry -from . import get_entity, init_integration, mutate_rpc_device_status, register_entity +from . import ( + get_entity, + init_integration, + mutate_rpc_device_status, + register_device, + register_entity, +) from .conftest import mock_white_light_set_state RELAY_BLOCK_ID = 0 LIGHT_BLOCK_ID = 2 +SHELLY_PLUS_RGBW_CHANNELS = 4 async def test_block_device_rgbw_bulb( @@ -682,21 +693,39 @@ async def test_rpc_rgbw_device_light_mode_remove_others( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, + device_registry: DeviceRegistry, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Shelly RPC RGBW device in light mode removes RGB/RGBW entities.""" - # register lights monkeypatch.delitem(mock_rpc_device.status, "rgb:0") monkeypatch.delitem(mock_rpc_device.status, "rgbw:0") - register_entity(hass, LIGHT_DOMAIN, "test_rgb_0", "rgb:0") - register_entity(hass, LIGHT_DOMAIN, "test_rgbw_0", "rgbw:0") + + # register rgb and rgbw lights + config_entry = await init_integration(hass, 2, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + register_entity( + hass, + LIGHT_DOMAIN, + "test_rgb_0", + "rgb:0", + config_entry, + device_id=device_entry.id, + ) + register_entity( + hass, + LIGHT_DOMAIN, + "test_rgbw_0", + "rgbw:0", + config_entry, + device_id=device_entry.id, + ) # verify RGB & RGBW entities created assert get_entity(hass, LIGHT_DOMAIN, "rgb:0") is not None assert get_entity(hass, LIGHT_DOMAIN, "rgbw:0") is not None - # init to remove RGB & RGBW - await init_integration(hass, 2) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() # verify we have 4 lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): @@ -722,27 +751,45 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, + device_registry: DeviceRegistry, monkeypatch: pytest.MonkeyPatch, active_mode: str, removed_mode: str, ) -> None: """Test Shelly RPC RGBW device in RGB/W modes other lights.""" removed_key = f"{removed_mode}:0" + config_entry = await init_integration(hass, 2, skip_setup=True) + device_entry = register_device(device_registry, config_entry) # register lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): monkeypatch.delitem(mock_rpc_device.status, f"light:{i}") entity_id = f"light.test_light_{i}" - register_entity(hass, LIGHT_DOMAIN, entity_id, f"light:{i}") + register_entity( + hass, + LIGHT_DOMAIN, + entity_id, + f"light:{i}", + config_entry, + device_id=device_entry.id, + ) monkeypatch.delitem(mock_rpc_device.status, f"{removed_mode}:0") - register_entity(hass, LIGHT_DOMAIN, f"test_{removed_key}", removed_key) + register_entity( + hass, + LIGHT_DOMAIN, + f"test_{removed_key}", + removed_key, + config_entry, + device_id=device_entry.id, + ) # verify lights entities created for i in range(SHELLY_PLUS_RGBW_CHANNELS): assert get_entity(hass, LIGHT_DOMAIN, f"light:{i}") is not None assert get_entity(hass, LIGHT_DOMAIN, removed_key) is not None - await init_integration(hass, 2) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() # verify we have RGB/w light entity_id = f"light.test_{active_mode}_0" @@ -755,3 +802,126 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( for i in range(SHELLY_PLUS_RGBW_CHANNELS): assert get_entity(hass, LIGHT_DOMAIN, f"light:{i}") is None assert get_entity(hass, LIGHT_DOMAIN, removed_key) is None + + +async def test_rpc_cct_light( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC CCT light.""" + entity_id = f"{LIGHT_DOMAIN}.test_name_cct_light_0" + + config = deepcopy(mock_rpc_device.config) + config["cct:0"] = {"id": 0, "name": None, "ct_range": [3333, 5555]} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["cct:0"] = {"id": 0, "output": False, "brightness": 77, "ct": 3666} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 2) + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-cct:0" + + # Turn off + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": False}) + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + + # Turn on + mock_rpc_device.call_rpc.reset_mock() + mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cct:0", "output", True) + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + mock_rpc_device.mock_update() + mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": True}) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_BRIGHTNESS] == 196 # 77% of 255 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3666 + assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 3333 + assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 5555 + + # Turn on, brightness = 88 + mock_rpc_device.call_rpc.reset_mock() + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS_PCT: 88}, + blocking=True, + ) + + mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cct:0", "brightness", 88) + mock_rpc_device.mock_update() + + mock_rpc_device.call_rpc.assert_called_once_with( + "CCT.Set", {"id": 0, "on": True, "brightness": 88} + ) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_BRIGHTNESS] == 224 # 88% of 255 + + # Turn on, color temp = 4444 K + mock_rpc_device.call_rpc.reset_mock() + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4444}, + blocking=True, + ) + + mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cct:0", "ct", 4444) + + mock_rpc_device.mock_update() + + mock_rpc_device.call_rpc.assert_called_once_with( + "CCT.Set", {"id": 0, "on": True, "ct": 4444} + ) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4444 + + +async def test_rpc_remove_cct_light( + hass: HomeAssistant, + mock_rpc_device: Mock, + device_registry: DeviceRegistry, +) -> None: + """Test Shelly RPC remove orphaned CCT light entity.""" + # register CCT light entity + config_entry = await init_integration(hass, 2, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + register_entity( + hass, + LIGHT_DOMAIN, + "cct_light_0", + "cct:0", + config_entry, + device_id=device_entry.id, + ) + + # verify CCT light entity created + assert get_entity(hass, LIGHT_DOMAIN, "cct:0") is not None + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # there is no cct:0 in the status, so the CCT light entity should be removed + assert get_entity(hass, LIGHT_DOMAIN, "cct:0") is None diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index ef8a609998a..18c3d874c55 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -729,14 +729,14 @@ async def test_rpc_analog_input_sensors( await init_integration(hass, 2) - entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" + entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" assert hass.states.get(entity_id).state == "89" entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-input:1-analoginput" - entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" + entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" state = hass.states.get(entity_id) assert state assert state.state == "8.9" @@ -757,10 +757,10 @@ async def test_rpc_disabled_analog_input_sensors( await init_integration(hass, 2) - entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" + entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" assert hass.states.get(entity_id) is None - entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" + entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" assert hass.states.get(entity_id) is None @@ -777,10 +777,10 @@ async def test_rpc_disabled_xpercent( ) await init_integration(hass, 2) - entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" + entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" assert hass.states.get(entity_id).state == "89" - entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" + entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" assert hass.states.get(entity_id) is None @@ -1293,7 +1293,7 @@ async def test_rpc_rgbw_sensors( await init_integration(hass, 2) - entity_id = "sensor.test_name_power" + entity_id = f"sensor.test_name_{light_type}_light_0_power" state = hass.states.get(entity_id) assert state @@ -1304,7 +1304,7 @@ async def test_rpc_rgbw_sensors( assert entry assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" - entity_id = "sensor.test_name_energy" + entity_id = f"sensor.test_name_{light_type}_light_0_energy" state = hass.states.get(entity_id) assert state @@ -1315,7 +1315,7 @@ async def test_rpc_rgbw_sensors( assert entry assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" - entity_id = "sensor.test_name_current" + entity_id = f"sensor.test_name_{light_type}_light_0_current" state = hass.states.get(entity_id) assert state @@ -1328,7 +1328,7 @@ async def test_rpc_rgbw_sensors( assert entry assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" - entity_id = "sensor.test_name_voltage" + entity_id = f"sensor.test_name_{light_type}_light_0_voltage" state = hass.states.get(entity_id) assert state @@ -1341,7 +1341,7 @@ async def test_rpc_rgbw_sensors( assert entry assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" - entity_id = "sensor.test_name_device_temperature" + entity_id = f"sensor.test_name_{light_type}_light_0_device_temperature" state = hass.states.get(entity_id) assert state diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index c891d1d7b2d..5c7933afd7e 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -572,3 +572,62 @@ async def test_rpc_remove_virtual_switch_when_orphaned( entry = entity_registry.async_get(entity_id) assert not entry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_rpc_device_script_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test a script switch for RPC device.""" + config = deepcopy(mock_rpc_device.config) + key = "script:1" + script_name = "aioshelly_ble_integration" + entity_id = f"switch.test_name_{script_name}" + config[key] = { + "id": 1, + "name": script_name, + "enable": False, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status[key] = { + "running": True, + } + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{key}-script" + + monkeypatch.setitem(mock_rpc_device.status[key], "running", False) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + monkeypatch.setitem(mock_rpc_device.status[key], "running", True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index c6434c0b988..cd4cdf877a5 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -16,6 +16,7 @@ from homeassistant.components.update import ( ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, ATTR_RELEASE_URL, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateEntityFeature, @@ -53,17 +54,18 @@ async def test_block_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test block device update entity.""" - entity_id = "update.test_name_firmware_update" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") + entity_id = "update.test_name_firmware" + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1" - assert state.attributes[ATTR_LATEST_VERSION] == "2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" + assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -77,19 +79,21 @@ async def test_block_update( state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1" - assert state.attributes[ATTR_LATEST_VERSION] == "2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" + assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] == GEN1_RELEASE_URL - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0") await mock_rest_update(hass, freezer) state = hass.states.get(entity_id) assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "2" - assert state.attributes[ATTR_LATEST_VERSION] == "2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0" + assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -105,27 +109,31 @@ async def test_block_beta_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test block device beta update entity.""" - entity_id = "update.test_name_beta_firmware_update" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") + entity_id = "update.test_name_beta_firmware" + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") monkeypatch.setitem(mock_block_device.status["update"], "beta_version", "") monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) state = hass.states.get(entity_id) assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "1" - assert state.attributes[ATTR_LATEST_VERSION] == "1" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" + assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - monkeypatch.setitem(mock_block_device.status["update"], "beta_version", "2b") + monkeypatch.setitem( + mock_block_device.status["update"], "beta_version", "2.0.0-beta" + ) await mock_rest_update(hass, freezer) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1" - assert state.attributes[ATTR_LATEST_VERSION] == "2b" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" + assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] is None await hass.services.async_call( @@ -138,18 +146,20 @@ async def test_block_beta_update( state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1" - assert state.attributes[ATTR_LATEST_VERSION] == "2b" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" + assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2b") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0-beta") await mock_rest_update(hass, freezer) state = hass.states.get(entity_id) assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" - assert state.attributes[ATTR_LATEST_VERSION] == "2b" + assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0-beta" + assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -164,8 +174,8 @@ async def test_block_update_connection_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test block device update connection error.""" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") monkeypatch.setattr( mock_block_device, "trigger_ota_update", @@ -177,7 +187,7 @@ async def test_block_update_connection_error( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, + {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) assert "Error starting OTA update" in str(excinfo.value) @@ -190,8 +200,8 @@ async def test_block_update_auth_error( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test block device update authentication error.""" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") monkeypatch.setattr( mock_block_device, "trigger_ota_update", @@ -204,7 +214,7 @@ async def test_block_update_auth_error( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, + {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) @@ -222,6 +232,51 @@ async def test_block_update_auth_error( assert flow["context"].get("entry_id") == entry.entry_id +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_block_version_compare( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_block_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test block device custom firmware version comparison.""" + + STABLE = "20230913-111730/v1.14.0-gcb84623" + BETA = "20231107-162609/v1.14.1-rc1-g0617c15" + + entity_id_beta = "update.test_name_beta_firmware" + entity_id_latest = "update.test_name_firmware" + monkeypatch.setitem(mock_block_device.status["update"], "old_version", STABLE) + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "") + monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) + monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) + await init_integration(hass, 1) + + state = hass.states.get(entity_id_latest) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE + assert state.attributes[ATTR_LATEST_VERSION] == STABLE + state = hass.states.get(entity_id_beta) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE + assert state.attributes[ATTR_LATEST_VERSION] == BETA + + monkeypatch.setitem(mock_block_device.status["update"], "old_version", BETA) + monkeypatch.setitem(mock_block_device.status["update"], "new_version", STABLE) + monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) + await mock_rest_update(hass, freezer) + + state = hass.states.get(entity_id_latest) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == BETA + assert state.attributes[ATTR_LATEST_VERSION] == STABLE + state = hass.states.get(entity_id_beta) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == BETA + assert state.attributes[ATTR_LATEST_VERSION] == BETA + + async def test_rpc_update( hass: HomeAssistant, mock_rpc_device: Mock, @@ -229,7 +284,7 @@ async def test_rpc_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device update entity.""" - entity_id = "update.test_name_firmware_update" + entity_id = "update.test_name_firmware" monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( mock_rpc_device.status["sys"], @@ -245,6 +300,7 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -262,6 +318,7 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL inject_rpc_device_event( @@ -279,7 +336,9 @@ async def test_rpc_update( }, ) - assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 0 + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 inject_rpc_device_event( monkeypatch, @@ -297,7 +356,9 @@ async def test_rpc_update( }, ) - assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 50 + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 inject_rpc_device_event( monkeypatch, @@ -321,6 +382,7 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -344,7 +406,7 @@ async def test_rpc_sleeping_update( "stable": {"version": "2"}, }, ) - entity_id = f"{UPDATE_DOMAIN}.test_name_firmware_update" + entity_id = f"{UPDATE_DOMAIN}.test_name_firmware" await init_integration(hass, 2, sleep_period=1000) # Entity should be created when device is online @@ -359,6 +421,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL @@ -370,6 +433,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) entry = entity_registry.async_get(entity_id) @@ -389,7 +453,7 @@ async def test_rpc_restored_sleeping_update( entity_id = register_entity( hass, UPDATE_DOMAIN, - "test_name_firmware_update", + "test_name_firmware", "sys-fwupdate", entry, device_id=device.id, @@ -409,6 +473,7 @@ async def test_rpc_restored_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) # Make device online @@ -425,6 +490,7 @@ async def test_rpc_restored_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) @@ -448,7 +514,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( entity_id = register_entity( hass, UPDATE_DOMAIN, - "test_name_firmware_update", + "test_name_firmware", "sys-fwupdate", entry, device_id=device.id, @@ -475,6 +541,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) @@ -487,7 +554,7 @@ async def test_rpc_beta_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device beta update entity.""" - entity_id = "update.test_name_beta_firmware_update" + entity_id = "update.test_name_beta_firmware" monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( mock_rpc_device.status["sys"], @@ -504,6 +571,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] is None monkeypatch.setitem( @@ -521,6 +589,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None await hass.services.async_call( UPDATE_DOMAIN, @@ -549,7 +618,8 @@ async def test_rpc_beta_update( assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" - assert state.attributes[ATTR_IN_PROGRESS] == 0 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 inject_rpc_device_event( monkeypatch, @@ -567,7 +637,9 @@ async def test_rpc_beta_update( }, ) - assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 40 + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 40 inject_rpc_device_event( monkeypatch, @@ -591,6 +663,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -632,7 +705,7 @@ async def test_rpc_update_errors( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, + {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) assert error in str(excinfo.value) @@ -667,7 +740,7 @@ async def test_rpc_update_auth_error( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, + {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 5891f250fae..17bcd6e3d40 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -236,7 +236,42 @@ async def test_get_block_input_triggers( async def test_get_rpc_channel_name(mock_rpc_device: Mock) -> None: """Test get RPC channel name.""" assert get_rpc_channel_name(mock_rpc_device, "input:0") == "Test name input 0" - assert get_rpc_channel_name(mock_rpc_device, "input:3") == "Test name input_3" + assert get_rpc_channel_name(mock_rpc_device, "input:3") == "Test name Input 3" + + +@pytest.mark.parametrize( + ("component", "expected"), + [ + ("cover", "Cover"), + ("input", "Input"), + ("light", "Light"), + ("rgb", "RGB light"), + ("rgbw", "RGBW light"), + ("switch", "Switch"), + ("thermostat", "Thermostat"), + ], +) +async def test_get_rpc_channel_name_multiple_components( + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + component: str, + expected: str, +) -> None: + """Test get RPC channel name when there is more components of the same type.""" + config = { + f"{component}:0": {"name": None}, + f"{component}:1": {"name": None}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + assert ( + get_rpc_channel_name(mock_rpc_device, f"{component}:0") + == f"Test name {expected} 0" + ) + assert ( + get_rpc_channel_name(mock_rpc_device, f"{component}:1") + == f"Test name {expected} 1" + ) async def test_get_rpc_input_triggers( diff --git a/tests/components/shelly/test_valve.py b/tests/components/shelly/test_valve.py index 58b55e4f2dd..b35ce98b664 100644 --- a/tests/components/shelly/test_valve.py +++ b/tests/components/shelly/test_valve.py @@ -5,16 +5,8 @@ from unittest.mock import Mock from aioshelly.const import MODEL_GAS import pytest -from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_CLOSE_VALVE, - SERVICE_OPEN_VALVE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, -) +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -37,7 +29,7 @@ async def test_block_device_gas_valve( assert entry assert entry.unique_id == "123456789ABC-valve_0-valve" - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == ValveState.CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -48,7 +40,7 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == ValveState.OPENING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "opened") mock_block_device.mock_update() @@ -56,7 +48,7 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == ValveState.OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -67,7 +59,7 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == ValveState.CLOSING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "closed") mock_block_device.mock_update() @@ -75,4 +67,4 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == ValveState.CLOSED diff --git a/tests/components/shopping_list/test_init.py b/tests/components/shopping_list/test_init.py index 4e758764e3d..276602f794e 100644 --- a/tests/components/shopping_list/test_init.py +++ b/tests/components/shopping_list/test_init.py @@ -32,8 +32,10 @@ async def test_add_item(hass: HomeAssistant, sl_setup) -> None: """Test adding an item intent.""" response = await intent.async_handle( - hass, "test", "HassShoppingListAddItem", {"item": {"value": "beer"}} + hass, "test", "HassShoppingListAddItem", {"item": {"value": " beer "}} ) + assert len(hass.data[DOMAIN].items) == 1 + assert hass.data[DOMAIN].items[0]["name"] == "beer" # name was trimmed # Response text is now handled by default conversation agent assert response.response_type == intent.IntentResponseType.ACTION_DONE diff --git a/tests/components/simplefin/snapshots/test_binary_sensor.ambr b/tests/components/simplefin/snapshots/test_binary_sensor.ambr index be26ae1a03d..44fe2a10b78 100644 --- a/tests/components/simplefin/snapshots/test_binary_sensor.ambr +++ b/tests/components/simplefin/snapshots/test_binary_sensor.ambr @@ -47,54 +47,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.investments_dr_evil_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_dr_evil_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_dr_evil_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments Dr Evil Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_dr_evil_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.investments_my_checking_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -143,54 +95,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.investments_my_checking_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_my_checking_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_my_checking_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments My Checking Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_my_checking_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -239,54 +143,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments NerdCorp Series B Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -335,54 +191,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Mythical RandomSavings Castle Mortgage Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -431,54 +239,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Mythical RandomSavings Unicorn Pot Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -527,54 +287,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -623,54 +335,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'The Bank of Go PRIME SAVINGS Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -719,51 +383,3 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'The Bank of Go The Bank Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/simplisafe/test_config_flow.py b/tests/components/simplisafe/test_config_flow.py index dde7e37b891..9270fc43c30 100644 --- a/tests/components/simplisafe/test_config_flow.py +++ b/tests/components/simplisafe/test_config_flow.py @@ -8,11 +8,13 @@ from simplipy.errors import InvalidCredentialsError, SimplipyError from homeassistant.components.simplisafe import DOMAIN from homeassistant.components.simplisafe.config_flow import CONF_AUTH_CODE -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_CODE, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + VALID_AUTH_CODE = "code12345123451234512345123451234512345123451" @@ -90,13 +92,11 @@ async def test_options_flow(config_entry, hass: HomeAssistant) -> None: assert config_entry.options == {CONF_CODE: "4321"} -async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) -> None: +async def test_step_reauth( + config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe +) -> None: """Test the re-auth step.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "user" with ( @@ -118,14 +118,10 @@ async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) @pytest.mark.parametrize("unique_id", ["some_other_id"]) async def test_step_reauth_wrong_account( - config_entry, hass: HomeAssistant, setup_simplisafe + config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe ) -> None: """Test the re-auth step where the wrong account is used during login.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "user" with ( diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index 31bd44c6146..13c1e28aa36 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -31,6 +31,8 @@ async def test_entry_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, "subscription_data": { "12345": { diff --git a/tests/components/simulated/__init__.py b/tests/components/simulated/__init__.py deleted file mode 100644 index 501fbab603a..00000000000 --- a/tests/components/simulated/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the simulated component.""" diff --git a/tests/components/simulated/test_sensor.py b/tests/components/simulated/test_sensor.py deleted file mode 100644 index b167147367a..00000000000 --- a/tests/components/simulated/test_sensor.py +++ /dev/null @@ -1,50 +0,0 @@ -"""The tests for the simulated sensor.""" - -from homeassistant.components.simulated.sensor import ( - CONF_AMP, - CONF_FWHM, - CONF_MEAN, - CONF_PERIOD, - CONF_PHASE, - CONF_RELATIVE_TO_EPOCH, - CONF_SEED, - CONF_UNIT, - DEFAULT_AMP, - DEFAULT_FWHM, - DEFAULT_MEAN, - DEFAULT_NAME, - DEFAULT_PHASE, - DEFAULT_RELATIVE_TO_EPOCH, - DEFAULT_SEED, - DOMAIN, -) -from homeassistant.const import CONF_FRIENDLY_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - - -async def test_simulated_sensor_default_config( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test default config.""" - config = {"sensor": {"platform": "simulated"}} - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids()) == 1 - state = hass.states.get("sensor.simulated") - - assert state.attributes.get(CONF_FRIENDLY_NAME) == DEFAULT_NAME - assert state.attributes.get(CONF_AMP) == DEFAULT_AMP - assert state.attributes.get(CONF_UNIT) is None - assert state.attributes.get(CONF_MEAN) == DEFAULT_MEAN - assert state.attributes.get(CONF_PERIOD) == 60.0 - assert state.attributes.get(CONF_PHASE) == DEFAULT_PHASE - assert state.attributes.get(CONF_FWHM) == DEFAULT_FWHM - assert state.attributes.get(CONF_SEED) == DEFAULT_SEED - assert state.attributes.get(CONF_RELATIVE_TO_EPOCH) == DEFAULT_RELATIVE_TO_EPOCH - - issue = issue_registry.async_get_issue(DOMAIN, DOMAIN) - assert issue.issue_id == DOMAIN - assert issue.translation_key == "simulated_deprecation" diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 475b32540b4..b78d25366fa 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -1,11 +1,9 @@ """The tests for the siren component.""" -from types import ModuleType from unittest.mock import MagicMock import pytest -from homeassistant.components import siren from homeassistant.components.siren import ( SirenEntity, SirenEntityDescription, @@ -14,8 +12,6 @@ from homeassistant.components.siren import ( from homeassistant.components.siren.const import SirenEntityFeature from homeassistant.core import HomeAssistant -from tests.common import help_test_all, import_and_test_deprecated_constant_enum - class MockSirenEntity(SirenEntity): """Mock siren device to use in tests.""" @@ -109,40 +105,3 @@ async def test_missing_tones_dict(hass: HomeAssistant) -> None: siren.hass = hass with pytest.raises(ValueError): process_turn_on_params(siren, {"tone": 3}) - - -@pytest.mark.parametrize( - "module", - [siren, siren.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize(("enum"), list(SirenEntityFeature)) -@pytest.mark.parametrize(("module"), [siren, siren.const]) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: SirenEntityFeature, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, module, enum, "SUPPORT_", "2025.1") - - -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" - - class MockSirenEntity(siren.SirenEntity): - _attr_supported_features = 1 - - entity = MockSirenEntity() - assert entity.supported_features is siren.SirenEntityFeature(1) - assert "MockSirenEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "SirenEntityFeature.TURN_ON" in caplog.text - caplog.clear() - assert entity.supported_features is siren.SirenEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text diff --git a/tests/components/sky_remote/__init__.py b/tests/components/sky_remote/__init__.py new file mode 100644 index 00000000000..83d68330d5b --- /dev/null +++ b/tests/components/sky_remote/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Sky Remote component.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_mock_entry(hass: HomeAssistant, entry: MockConfigEntry): + """Initialize a mock config entry.""" + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + await hass.async_block_till_done() diff --git a/tests/components/sky_remote/conftest.py b/tests/components/sky_remote/conftest.py new file mode 100644 index 00000000000..d6c453d81f7 --- /dev/null +++ b/tests/components/sky_remote/conftest.py @@ -0,0 +1,47 @@ +"""Test mocks and fixtures.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT + +from tests.common import MockConfigEntry + +SAMPLE_CONFIG = {CONF_HOST: "example.com", CONF_PORT: DEFAULT_PORT} + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry(domain=DOMAIN, data=SAMPLE_CONFIG) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Stub out setup function.""" + with patch( + "homeassistant.components.sky_remote.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_remote_control(request: pytest.FixtureRequest) -> Generator[MagicMock]: + """Mock skyboxremote library.""" + with ( + patch( + "homeassistant.components.sky_remote.RemoteControl" + ) as mock_remote_control, + patch( + "homeassistant.components.sky_remote.config_flow.RemoteControl", + mock_remote_control, + ), + ): + mock_remote_control._instance_mock = MagicMock(host="example.com") + mock_remote_control._instance_mock.check_connectable = AsyncMock(True) + mock_remote_control.return_value = mock_remote_control._instance_mock + yield mock_remote_control diff --git a/tests/components/sky_remote/test_config_flow.py b/tests/components/sky_remote/test_config_flow.py new file mode 100644 index 00000000000..aaeda20788c --- /dev/null +++ b/tests/components/sky_remote/test_config_flow.py @@ -0,0 +1,125 @@ +"""Test the Sky Remote config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +import pytest +from skyboxremote import LEGACY_PORT, SkyBoxConnectionError + +from homeassistant.components.sky_remote.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import SAMPLE_CONFIG + + +async def test_user_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_remote_control +) -> None: + """Test we can setup an entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == SAMPLE_CONFIG + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_device_exists_abort( + hass: HomeAssistant, mock_config_entry, mock_remote_control +) -> None: + """Test we abort flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: mock_config_entry.data[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize("mock_remote_control", [LEGACY_PORT], indirect=True) +async def test_user_flow_legacy_device( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_remote_control, +) -> None: + """Test we can setup an entry with a legacy port.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + async def mock_check_connectable(): + if mock_remote_control.call_args[0][1] == LEGACY_PORT: + return True + raise SkyBoxConnectionError("Wrong port") + + mock_remote_control._instance_mock.check_connectable = mock_check_connectable + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {**SAMPLE_CONFIG, CONF_PORT: LEGACY_PORT} + + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize("mock_remote_control", [6], indirect=True) +async def test_user_flow_unconnectable( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_remote_control, +) -> None: + """Test we can setup an entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + mock_remote_control._instance_mock.check_connectable = AsyncMock( + side_effect=SkyBoxConnectionError("Example") + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + assert len(mock_setup_entry.mock_calls) == 0 + + mock_remote_control._instance_mock.check_connectable = AsyncMock(True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == SAMPLE_CONFIG + + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/sky_remote/test_init.py b/tests/components/sky_remote/test_init.py new file mode 100644 index 00000000000..fe316baa6bf --- /dev/null +++ b/tests/components/sky_remote/test_init.py @@ -0,0 +1,59 @@ +"""Tests for the Sky Remote component.""" + +from unittest.mock import AsyncMock + +from skyboxremote import SkyBoxConnectionError + +from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_mock_entry + +from tests.common import MockConfigEntry + + +async def test_setup_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_remote_control, + device_registry: dr.DeviceRegistry, +) -> None: + """Test successful setup of entry.""" + await setup_mock_entry(hass, mock_config_entry) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + mock_remote_control.assert_called_once_with("example.com", DEFAULT_PORT) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert device_entry is not None + assert device_entry.name == "example.com" + + +async def test_setup_unconnectable_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_remote_control, +) -> None: + """Test unsuccessful setup of entry.""" + mock_remote_control._instance_mock.check_connectable = AsyncMock( + side_effect=SkyBoxConnectionError() + ) + + await setup_mock_entry(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_unload_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_remote_control +) -> None: + """Test unload an entry.""" + await setup_mock_entry(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/sky_remote/test_remote.py b/tests/components/sky_remote/test_remote.py new file mode 100644 index 00000000000..301375bc039 --- /dev/null +++ b/tests/components/sky_remote/test_remote.py @@ -0,0 +1,46 @@ +"""Test sky_remote remote.""" + +import pytest + +from homeassistant.components.remote import ( + ATTR_COMMAND, + DOMAIN as REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from . import setup_mock_entry + +ENTITY_ID = "remote.example_com" + + +async def test_send_command( + hass: HomeAssistant, mock_config_entry, mock_remote_control +) -> None: + """Test "send_command" method.""" + await setup_mock_entry(hass, mock_config_entry) + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["sky"]}, + blocking=True, + ) + mock_remote_control._instance_mock.send_keys.assert_called_once_with(["sky"]) + + +async def test_send_invalid_command( + hass: HomeAssistant, mock_config_entry, mock_remote_control +) -> None: + """Test "send_command" method.""" + await setup_mock_entry(hass, mock_config_entry) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["apple"]}, + blocking=True, + ) + mock_remote_control._instance_mock.send_keys.assert_not_called() diff --git a/tests/components/skybell/test_config_flow.py b/tests/components/skybell/test_config_flow.py index cb62f808efc..f415fef077e 100644 --- a/tests/components/skybell/test_config_flow.py +++ b/tests/components/skybell/test_config_flow.py @@ -5,10 +5,9 @@ from unittest.mock import patch from aioskybell import exceptions import pytest -from homeassistant import config_entries from homeassistant.components.skybell.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_SOURCE +from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,15 +103,7 @@ async def test_step_reauth(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -130,15 +121,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, skybell_mock) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sleepiq/test_binary_sensor.py b/tests/components/sleepiq/test_binary_sensor.py index 65654de74ac..689834aba35 100644 --- a/tests/components/sleepiq/test_binary_sensor.py +++ b/tests/components/sleepiq/test_binary_sensor.py @@ -1,6 +1,9 @@ """The tests for SleepIQ binary sensor platform.""" -from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDeviceClass +from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorDeviceClass, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, @@ -28,7 +31,7 @@ async def test_binary_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ binary sensors.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, BINARY_SENSOR_DOMAIN) state = hass.states.get( f"binary_sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_is_in_bed" diff --git a/tests/components/sleepiq/test_button.py b/tests/components/sleepiq/test_button.py index 33ad4d72b46..e1c4203c937 100644 --- a/tests/components/sleepiq/test_button.py +++ b/tests/components/sleepiq/test_button.py @@ -1,6 +1,6 @@ """The tests for SleepIQ binary sensor platform.""" -from homeassistant.components.button import DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -12,7 +12,7 @@ async def test_button_calibrate( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ calibrate button.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, BUTTON_DOMAIN) state = hass.states.get(f"button.sleepnumber_{BED_NAME_LOWER}_calibrate") assert ( @@ -24,7 +24,7 @@ async def test_button_calibrate( assert entity.unique_id == f"{BED_ID}-calibrate" await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, "press", {ATTR_ENTITY_ID: f"button.sleepnumber_{BED_NAME_LOWER}_calibrate"}, blocking=True, @@ -38,7 +38,7 @@ async def test_button_stop_pump( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ stop pump button.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, BUTTON_DOMAIN) state = hass.states.get(f"button.sleepnumber_{BED_NAME_LOWER}_stop_pump") assert ( @@ -50,7 +50,7 @@ async def test_button_stop_pump( assert entity.unique_id == f"{BED_ID}-stop-pump" await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, "press", {ATTR_ENTITY_ID: f"button.sleepnumber_{BED_NAME_LOWER}_stop_pump"}, blocking=True, diff --git a/tests/components/sleepiq/test_config_flow.py b/tests/components/sleepiq/test_config_flow.py index af08f5aa9fe..26007d42e7d 100644 --- a/tests/components/sleepiq/test_config_flow.py +++ b/tests/components/sleepiq/test_config_flow.py @@ -101,19 +101,7 @@ async def test_reauth_password(hass: HomeAssistant) -> None: # set up initially entry = await setup_platform(hass) - with patch( - "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", - side_effect=SleepIQLoginException, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", diff --git a/tests/components/sleepiq/test_light.py b/tests/components/sleepiq/test_light.py index 9564bca7a99..d1284dc3e41 100644 --- a/tests/components/sleepiq/test_light.py +++ b/tests/components/sleepiq/test_light.py @@ -1,6 +1,6 @@ """The tests for SleepIQ light platform.""" -from homeassistant.components.light import DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.sleepiq.coordinator import LONGER_UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant @@ -16,7 +16,7 @@ async def test_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test for successfully setting up the SleepIQ platform.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, LIGHT_DOMAIN) assert len(entity_registry.entities) == 2 @@ -33,10 +33,10 @@ async def test_setup( async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test light change.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, LIGHT_DOMAIN) await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: f"light.sleepnumber_{BED_NAME_LOWER}_light_1"}, blocking=True, @@ -45,7 +45,7 @@ async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: mock_asyncsleepiq.beds[BED_ID].foundation.lights[0].turn_on.assert_called_once() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: f"light.sleepnumber_{BED_NAME_LOWER}_light_1"}, blocking=True, @@ -56,7 +56,7 @@ async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: async def test_switch_get_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test light update.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, LIGHT_DOMAIN) assert ( hass.states.get(f"light.sleepnumber_{BED_NAME_LOWER}_light_1").state diff --git a/tests/components/sleepiq/test_number.py b/tests/components/sleepiq/test_number.py index 52df2eb27aa..f0739aabc9d 100644 --- a/tests/components/sleepiq/test_number.py +++ b/tests/components/sleepiq/test_number.py @@ -5,7 +5,7 @@ from homeassistant.components.number import ( ATTR_MIN, ATTR_STEP, ATTR_VALUE, - DOMAIN, + DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_ICON @@ -30,7 +30,7 @@ async def test_firmness( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ firmness number values for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get( f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_firmness" @@ -71,7 +71,7 @@ async def test_firmness( assert entry.unique_id == f"{SLEEPER_R_ID}_firmness" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_firmness", @@ -89,7 +89,7 @@ async def test_actuators( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ actuator position values for a bed with adjustable head and foot.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get(f"number.sleepnumber_{BED_NAME_LOWER}_right_head_position") assert state.state == "60.0" @@ -143,7 +143,7 @@ async def test_actuators( assert entry.unique_id == f"{BED_ID}_F" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_right_head_position", @@ -165,7 +165,7 @@ async def test_foot_warmer_timer( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ foot warmer number values for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get( f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warming_timer" @@ -187,7 +187,7 @@ async def test_foot_warmer_timer( assert entry.unique_id == f"{BED_ID}_L_foot_warming_timer" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warming_timer", diff --git a/tests/components/sleepiq/test_select.py b/tests/components/sleepiq/test_select.py index ef4c7fb6df0..bbfb612e9cb 100644 --- a/tests/components/sleepiq/test_select.py +++ b/tests/components/sleepiq/test_select.py @@ -4,7 +4,10 @@ from unittest.mock import MagicMock from asyncsleepiq import FootWarmingTemps -from homeassistant.components.select import DOMAIN, SERVICE_SELECT_OPTION +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -37,7 +40,7 @@ async def test_split_foundation_preset( mock_asyncsleepiq: MagicMock, ) -> None: """Test the SleepIQ select entity for split foundation presets.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SELECT_DOMAIN) state = hass.states.get( f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset_right" @@ -72,7 +75,7 @@ async def test_split_foundation_preset( assert entry.unique_id == f"{BED_ID}_preset_L" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset_left", @@ -94,7 +97,7 @@ async def test_single_foundation_preset( mock_asyncsleepiq_single_foundation: MagicMock, ) -> None: """Test the SleepIQ select entity for single foundation presets.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SELECT_DOMAIN) state = hass.states.get(f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset") assert state.state == PRESET_R_STATE @@ -111,7 +114,7 @@ async def test_single_foundation_preset( assert entry.unique_id == f"{BED_ID}_preset" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset", @@ -135,7 +138,7 @@ async def test_foot_warmer( mock_asyncsleepiq: MagicMock, ) -> None: """Test the SleepIQ select entity for foot warmers.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SELECT_DOMAIN) state = hass.states.get( f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warmer" @@ -154,7 +157,7 @@ async def test_foot_warmer( assert entry.unique_id == f"{SLEEPER_L_ID}_foot_warmer" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warmer", @@ -185,7 +188,7 @@ async def test_foot_warmer( assert entry.unique_id == f"{SLEEPER_R_ID}_foot_warmer" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_R_NAME_LOWER}_foot_warmer", diff --git a/tests/components/sleepiq/test_sensor.py b/tests/components/sleepiq/test_sensor.py index ae25958419c..eb558850fb3 100644 --- a/tests/components/sleepiq/test_sensor.py +++ b/tests/components/sleepiq/test_sensor.py @@ -1,6 +1,6 @@ """The tests for SleepIQ sensor platform.""" -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_ICON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,7 +22,7 @@ async def test_sleepnumber_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ sleepnumber for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SENSOR_DOMAIN) state = hass.states.get( f"sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_sleepnumber" @@ -61,7 +61,7 @@ async def test_pressure_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ pressure for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SENSOR_DOMAIN) state = hass.states.get( f"sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_pressure" diff --git a/tests/components/sleepiq/test_switch.py b/tests/components/sleepiq/test_switch.py index 7c41b6b9d19..5dd3e77fd66 100644 --- a/tests/components/sleepiq/test_switch.py +++ b/tests/components/sleepiq/test_switch.py @@ -1,7 +1,7 @@ """The tests for SleepIQ switch platform.""" from homeassistant.components.sleepiq.coordinator import LONGER_UPDATE_INTERVAL -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -16,7 +16,7 @@ async def test_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test for successfully setting up the SleepIQ platform.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SWITCH_DOMAIN) assert len(entity_registry.entities) == 1 @@ -28,10 +28,10 @@ async def test_setup( async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test button press.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, "turn_off", {ATTR_ENTITY_ID: f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode"}, blocking=True, @@ -40,7 +40,7 @@ async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None mock_asyncsleepiq.beds[BED_ID].set_pause_mode.assert_called_with(False) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode"}, blocking=True, @@ -51,7 +51,7 @@ async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None async def test_switch_get_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test button press.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) assert ( hass.states.get(f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode").state diff --git a/tests/components/slide_local/__init__.py b/tests/components/slide_local/__init__.py new file mode 100644 index 00000000000..cd7bd6cb6d1 --- /dev/null +++ b/tests/components/slide_local/__init__.py @@ -0,0 +1,21 @@ +"""Tests for the slide_local integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> MockConfigEntry: + """Set up the slide local integration.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.slide_local.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/slide_local/conftest.py b/tests/components/slide_local/conftest.py new file mode 100644 index 00000000000..ad2734bbb64 --- /dev/null +++ b/tests/components/slide_local/conftest.py @@ -0,0 +1,61 @@ +"""Test fixtures for Slide local.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC + +from .const import HOST, SLIDE_INFO_DATA + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="slide", + data={ + CONF_HOST: HOST, + CONF_API_VERSION: 2, + CONF_MAC: "12:34:56:78:90:ab", + }, + options={ + CONF_INVERT_POSITION: False, + }, + minor_version=1, + unique_id="12:34:56:78:90:ab", + entry_id="ce5f5431554d101905d31797e1232da8", + ) + + +@pytest.fixture +def mock_slide_api() -> Generator[AsyncMock]: + """Build a fixture for the SlideLocalApi that connects successfully and returns one device.""" + + with ( + patch( + "homeassistant.components.slide_local.coordinator.SlideLocalApi", + autospec=True, + ) as mock_slide_local_api, + patch( + "homeassistant.components.slide_local.config_flow.SlideLocalApi", + new=mock_slide_local_api, + ), + ): + client = mock_slide_local_api.return_value + client.slide_info.return_value = SLIDE_INFO_DATA + yield client + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.slide_local.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/slide_local/const.py b/tests/components/slide_local/const.py new file mode 100644 index 00000000000..edf45753407 --- /dev/null +++ b/tests/components/slide_local/const.py @@ -0,0 +1,8 @@ +"""Common const used across tests for slide_local.""" + +from homeassistant.components.slide_local.const import DOMAIN + +from tests.common import load_json_object_fixture + +HOST = "127.0.0.2" +SLIDE_INFO_DATA = load_json_object_fixture("slide_1.json", DOMAIN) diff --git a/tests/components/slide_local/fixtures/slide_1.json b/tests/components/slide_local/fixtures/slide_1.json new file mode 100644 index 00000000000..6367b94f243 --- /dev/null +++ b/tests/components/slide_local/fixtures/slide_1.json @@ -0,0 +1,11 @@ +{ + "slide_id": "slide_1234567890ab", + "mac": "1234567890ab", + "board_rev": 1, + "device_name": "slide bedroom", + "zone_name": "bedroom", + "curtain_type": 0, + "calib_time": 10239, + "pos": 0.0, + "touch_go": true +} diff --git a/tests/components/slide_local/snapshots/test_button.ambr b/tests/components/slide_local/snapshots/test_button.ambr new file mode 100644 index 00000000000..549538f1361 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.slide_bedroom_calibrate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.slide_bedroom_calibrate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibrate', + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibrate', + 'unique_id': '1234567890ab-calibrate', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.slide_bedroom_calibrate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'slide bedroom Calibrate', + }), + 'context': , + 'entity_id': 'button.slide_bedroom_calibrate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/slide_local/snapshots/test_init.ambr b/tests/components/slide_local/snapshots/test_init.ambr new file mode 100644 index 00000000000..d90f72e4b05 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.2', + 'connections': set({ + tuple( + 'mac', + '12:34:56:78:90:ab', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 1, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Innovation in Motion', + 'model': None, + 'model_id': None, + 'name': 'slide bedroom', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '1234567890ab', + 'suggested_area': None, + 'sw_version': 2, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/slide_local/snapshots/test_switch.ambr b/tests/components/slide_local/snapshots/test_switch.ambr new file mode 100644 index 00000000000..e19467c283e --- /dev/null +++ b/tests/components/slide_local/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_all_entities[switch.slide_bedroom_touchgo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.slide_bedroom_touchgo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'TouchGo', + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'touchgo', + 'unique_id': '1234567890ab-touchgo', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.slide_bedroom_touchgo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'slide bedroom TouchGo', + }), + 'context': , + 'entity_id': 'switch.slide_bedroom_touchgo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/slide_local/test_button.py b/tests/components/slide_local/test_button.py new file mode 100644 index 00000000000..646c8fd7ef3 --- /dev/null +++ b/tests/components/slide_local/test_button.py @@ -0,0 +1,46 @@ +"""Tests for the Slide Local button platform.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_pressing_button( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.slide_bedroom_calibrate", + }, + blocking=True, + ) + mock_slide_api.slide_calibrate.assert_called_once() diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py new file mode 100644 index 00000000000..48be7dd7850 --- /dev/null +++ b/tests/components/slide_local/test_config_flow.py @@ -0,0 +1,398 @@ +"""Test the slide_local config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) +import pytest + +from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD, Platform +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_platform +from .const import HOST, SLIDE_INFO_DATA + +from tests.common import MockConfigEntry + +MOCK_ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.2"), + ip_addresses=[ip_address("127.0.0.2")], + hostname="Slide-1234567890AB.local.", + name="Slide-1234567890AB._http._tcp.local.", + port=80, + properties={ + "id": "slide-1234567890AB", + "arch": "esp32", + "app": "slide", + "fw_version": "2.0.0-1683059251", + "fw_id": "20230502-202745", + }, + type="mock_type", +) + + +async def test_user( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 2 + assert result2["result"].unique_id == "12:34:56:78:90:ab" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_api_1( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 1 + assert result2["result"].unique_id == "12:34:56:78:90:ab" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_api_error( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, None] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == "unknown" + + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 1 + assert result2["result"].unique_id == "12:34:56:78:90:ab" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ClientConnectionError, "cannot_connect"), + (ClientTimeoutError, "cannot_connect"), + (AuthenticationFailed, "invalid_auth"), + (DigestAuthCalcError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_api_1_exceptions( + hass: HomeAssistant, + exception: Exception, + error: str, + mock_slide_api: AsyncMock, +) -> None: + """Test we can handle Form exceptions for api 1.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, exception] + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == error + + # tests with all provided + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ClientConnectionError, "cannot_connect"), + (ClientTimeoutError, "cannot_connect"), + (AuthenticationFailed, "invalid_auth"), + (DigestAuthCalcError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_api_2_exceptions( + hass: HomeAssistant, + exception: Exception, + error: str, + mock_slide_api: AsyncMock, +) -> None: + """Test we can handle Form exceptions for api 2.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = exception + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == error + + # tests with all provided + mock_slide_api.slide_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_abort_if_already_setup( + hass: HomeAssistant, + mock_slide_api: AsyncMock, +) -> None: + """Test we abort if the device is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id="12:34:56:78:90:ab").add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_zeroconf( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test starting a flow from discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "127.0.0.2" + assert result["data"][CONF_HOST] == "127.0.0.2" + assert not result["options"][CONF_INVERT_POSITION] + assert result["result"].unique_id == "12:34:56:78:90:ab" + + +async def test_zeroconf_duplicate_entry( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test starting a flow from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: HOST}, unique_id="12:34:56:78:90:ab" + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries[0].data[CONF_HOST] == HOST + + +async def test_zeroconf_update_duplicate_entry( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test updating an existing entry from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.3"}, unique_id="12:34:56:78:90:ab" + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries[0].data[CONF_HOST] == HOST + + +@pytest.mark.parametrize( + ("exception"), + [ + (ClientConnectionError), + (ClientTimeoutError), + (AuthenticationFailed), + (DigestAuthCalcError), + (Exception), + ], +) +async def test_zeroconf_connection_error( + hass: HomeAssistant, + exception: Exception, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test starting a flow from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "slide_host"}, unique_id="12:34:56:78:90:cd" + ).add_to_hass(hass) + + mock_slide_api.slide_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "discovery_connection_failed" + + +async def test_options_flow( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test options flow works correctly.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_INVERT_POSITION: True, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.options == { + CONF_INVERT_POSITION: True, + } diff --git a/tests/components/slide_local/test_init.py b/tests/components/slide_local/test_init.py new file mode 100644 index 00000000000..7b0a2d83164 --- /dev/null +++ b/tests/components/slide_local/test_init.py @@ -0,0 +1,29 @@ +"""Tests for the Slide Local integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, "1234567890ab")} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/slide_local/test_switch.py b/tests/components/slide_local/test_switch.py new file mode 100644 index 00000000000..0ac9820ca10 --- /dev/null +++ b/tests/components/slide_local/test_switch.py @@ -0,0 +1,61 @@ +"""Tests for the Slide Local switch platform.""" + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_TOGGLE, + ], +) +async def test_services( + hass: HomeAssistant, + service: str, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test switch.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: "switch.slide_bedroom_touchgo", + }, + blocking=True, + ) + mock_slide_api.slide_set_touchgo.assert_called_once() diff --git a/tests/components/sma/__init__.py b/tests/components/sma/__init__.py index aefb99cf1b1..80837c718a9 100644 --- a/tests/components/sma/__init__.py +++ b/tests/components/sma/__init__.py @@ -6,7 +6,7 @@ MOCK_DEVICE = { "manufacturer": "SMA", "name": "SMA Device Name", "type": "Sunny Boy 3.6", - "serial": "123456789", + "serial": 123456789, } MOCK_USER_INPUT = { diff --git a/tests/components/sma/conftest.py b/tests/components/sma/conftest.py index a54f478a31d..dd47a0f1055 100644 --- a/tests/components/sma/conftest.py +++ b/tests/components/sma/conftest.py @@ -22,9 +22,10 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( domain=DOMAIN, title=MOCK_DEVICE["name"], - unique_id=MOCK_DEVICE["serial"], + unique_id=str(MOCK_DEVICE["serial"]), data=MOCK_USER_INPUT, source=config_entries.SOURCE_IMPORT, + minor_version=2, ) diff --git a/tests/components/sma/test_init.py b/tests/components/sma/test_init.py new file mode 100644 index 00000000000..0cc82f49a41 --- /dev/null +++ b/tests/components/sma/test_init.py @@ -0,0 +1,27 @@ +"""Test the sma init file.""" + +from homeassistant.components.sma.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.core import HomeAssistant + +from . import MOCK_DEVICE, MOCK_USER_INPUT, _patch_async_setup_entry + +from tests.common import MockConfigEntry + + +async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: + """Test migrating a 1.1 config entry to 1.2.""" + with _patch_async_setup_entry(): + entry = MockConfigEntry( + domain=DOMAIN, + title=MOCK_DEVICE["name"], + unique_id=MOCK_DEVICE["serial"], # Not converted to str + data=MOCK_USER_INPUT, + source=SOURCE_IMPORT, + minor_version=1, + ) + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id == str(MOCK_DEVICE["serial"]) diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 70fd9db0744..71a36c7885a 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -38,7 +38,6 @@ from homeassistant.components.smartthings.const import ( STORAGE_KEY, STORAGE_VERSION, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -47,6 +46,7 @@ from homeassistant.const import ( CONF_WEBHOOK_ID, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/smartthings/test_climate.py b/tests/components/smartthings/test_climate.py index e4b8cb6d373..d39ee2d6bed 100644 --- a/tests/components/smartthings/test_climate.py +++ b/tests/components/smartthings/test_climate.py @@ -88,6 +88,26 @@ def basic_thermostat_fixture(device_factory): return device +@pytest.fixture(name="minimal_thermostat") +def minimal_thermostat_fixture(device_factory): + """Fixture returns a minimal thermostat without cooling.""" + device = device_factory( + "Minimal Thermostat", + capabilities=[ + Capability.temperature_measurement, + Capability.thermostat_heating_setpoint, + Capability.thermostat_mode, + ], + status={ + Attribute.heating_setpoint: 68, + Attribute.thermostat_mode: "off", + Attribute.supported_thermostat_modes: ["off", "heat"], + }, + ) + device.status.attributes[Attribute.temperature] = Status(70, "F", None) + return device + + @pytest.fixture(name="thermostat") def thermostat_fixture(device_factory): """Fixture returns a fully-featured thermostat.""" @@ -310,6 +330,28 @@ async def test_basic_thermostat_entity_state( assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.1 # celsius +async def test_minimal_thermostat_entity_state( + hass: HomeAssistant, minimal_thermostat +) -> None: + """Tests the state attributes properly match the thermostat type.""" + await setup_platform(hass, CLIMATE_DOMAIN, devices=[minimal_thermostat]) + state = hass.states.get("climate.minimal_thermostat") + assert state.state == HVACMode.OFF + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON + ) + assert ATTR_HVAC_ACTION not in state.attributes + assert sorted(state.attributes[ATTR_HVAC_MODES]) == [ + HVACMode.HEAT, + HVACMode.OFF, + ] + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.1 # celsius + + async def test_thermostat_entity_state(hass: HomeAssistant, thermostat) -> None: """Tests the state attributes properly match the thermostat type.""" await setup_platform(hass, CLIMATE_DOMAIN, devices=[thermostat]) diff --git a/tests/components/smartthings/test_config_flow.py b/tests/components/smartthings/test_config_flow.py index 49444e47780..3621e58bc3d 100644 --- a/tests/components/smartthings/test_config_flow.py +++ b/tests/components/smartthings/test_config_flow.py @@ -16,9 +16,9 @@ from homeassistant.components.smartthings.const import ( CONF_LOCATION_ID, DOMAIN, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry diff --git a/tests/components/smartthings/test_cover.py b/tests/components/smartthings/test_cover.py index bb292b53ee8..31443c12ab2 100644 --- a/tests/components/smartthings/test_cover.py +++ b/tests/components/smartthings/test_cover.py @@ -13,10 +13,7 @@ from homeassistant.components.cover import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE from homeassistant.config_entries import ConfigEntryState @@ -87,7 +84,7 @@ async def test_open(hass: HomeAssistant, device_factory) -> None: for entity_id in entity_ids: state = hass.states.get(entity_id) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_close(hass: HomeAssistant, device_factory) -> None: @@ -112,7 +109,7 @@ async def test_close(hass: HomeAssistant, device_factory) -> None: for entity_id in entity_ids: state = hass.states.get(entity_id) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_set_cover_position_switch_level( @@ -136,7 +133,7 @@ async def test_set_cover_position_switch_level( state = hass.states.get("cover.shade") # Result of call does not update state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_BATTERY_LEVEL] == 95 assert state.attributes[ATTR_CURRENT_POSITION] == 10 # Ensure API called @@ -167,7 +164,7 @@ async def test_set_cover_position(hass: HomeAssistant, device_factory) -> None: state = hass.states.get("cover.shade") # Result of call does not update state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_BATTERY_LEVEL] == 95 assert state.attributes[ATTR_CURRENT_POSITION] == 10 # Ensure API called @@ -208,14 +205,14 @@ async def test_update_to_open_from_signal(hass: HomeAssistant, device_factory) - ) await setup_platform(hass, COVER_DOMAIN, devices=[device]) device.status.update_attribute_value(Attribute.door, "open") - assert hass.states.get("cover.garage").state == STATE_OPENING + assert hass.states.get("cover.garage").state == CoverState.OPENING # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("cover.garage") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_update_to_closed_from_signal( @@ -228,14 +225,14 @@ async def test_update_to_closed_from_signal( ) await setup_platform(hass, COVER_DOMAIN, devices=[device]) device.status.update_attribute_value(Attribute.door, "closed") - assert hass.states.get("cover.garage").state == STATE_CLOSING + assert hass.states.get("cover.garage").state == CoverState.CLOSING # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("cover.garage") assert state is not None - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async def test_unload_config_entry(hass: HomeAssistant, device_factory) -> None: diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index fa30fa258cf..e518f84aecb 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -23,8 +23,8 @@ from homeassistant.components.smartthings.const import ( PLATFORMS, SIGNAL_SMARTTHINGS_UPDATE, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_connect diff --git a/tests/components/smartthings/test_light.py b/tests/components/smartthings/test_light.py index 22b181a3645..b46188b5b5f 100644 --- a/tests/components/smartthings/test_light.py +++ b/tests/components/smartthings/test_light.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -101,8 +101,8 @@ async def test_entity_state(hass: HomeAssistant, light_devices) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION assert state.attributes[ATTR_BRIGHTNESS] == 255 assert ATTR_HS_COLOR not in state.attributes[ATTR_HS_COLOR] - assert isinstance(state.attributes[ATTR_COLOR_TEMP], int) - assert state.attributes[ATTR_COLOR_TEMP] == 222 + assert isinstance(state.attributes[ATTR_COLOR_TEMP_KELVIN], int) + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4500 async def test_entity_and_device_attributes( @@ -273,7 +273,7 @@ async def test_turn_on_with_color_temp(hass: HomeAssistant, light_devices) -> No await hass.services.async_call( "light", "turn_on", - {ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP: 300}, + {ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP_KELVIN: 3333}, blocking=True, ) # This test schedules and update right after the call @@ -282,7 +282,7 @@ async def test_turn_on_with_color_temp(hass: HomeAssistant, light_devices) -> No state = hass.states.get("light.color_dimmer_2") assert state is not None assert state.state == "on" - assert state.attributes[ATTR_COLOR_TEMP] == 300 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3333 async def test_update_from_signal(hass: HomeAssistant, device_factory) -> None: diff --git a/tests/components/smarttub/test_config_flow.py b/tests/components/smarttub/test_config_flow.py index c625f217405..5832841641c 100644 --- a/tests/components/smarttub/test_config_flow.py +++ b/tests/components/smarttub/test_config_flow.py @@ -66,15 +66,7 @@ async def test_reauth_success(hass: HomeAssistant, smarttub_api, account) -> Non ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -107,15 +99,7 @@ async def test_reauth_wrong_account(hass: HomeAssistant, smarttub_api, account) # we try to reauth account #2, and the user successfully authenticates to account #1 account.id = mock_entry1.unique_id - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry2.unique_id, - "entry_id": mock_entry2.entry_id, - }, - data=mock_entry2.data, - ) + result = await mock_entry2.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/smarty/__init__.py b/tests/components/smarty/__init__.py new file mode 100644 index 00000000000..c5ae7f2d382 --- /dev/null +++ b/tests/components/smarty/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Smarty integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Set up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/smarty/conftest.py b/tests/components/smarty/conftest.py new file mode 100644 index 00000000000..a9b518d88f4 --- /dev/null +++ b/tests/components/smarty/conftest.py @@ -0,0 +1,64 @@ +"""Smarty tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.smarty import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override integration setup.""" + with patch( + "homeassistant.components.smarty.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_smarty() -> Generator[AsyncMock]: + """Mock a Smarty client.""" + with ( + patch( + "homeassistant.components.smarty.coordinator.Smarty", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.smarty.config_flow.Smarty", + new=mock_client, + ), + ): + client = mock_client.return_value + client.update.return_value = True + client.fan_speed = 100 + client.warning = False + client.alarm = False + client.boost = False + client.enable_boost.return_value = True + client.disable_boost.return_value = True + client.supply_air_temperature = 20 + client.extract_air_temperature = 23 + client.outdoor_air_temperature = 24 + client.supply_fan_speed = 66 + client.extract_fan_speed = 100 + client.filter_timer = 31 + client.get_configuration_version.return_value = 111 + client.get_software_version.return_value = 127 + client.reset_filters_timer.return_value = True + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "192.168.0.2"}, + entry_id="01JAZ5DPW8C62D620DGYNG2R8H", + ) diff --git a/tests/components/smarty/snapshots/test_binary_sensor.ambr b/tests/components/smarty/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..2f943a25012 --- /dev/null +++ b/tests/components/smarty/snapshots/test_binary_sensor.ambr @@ -0,0 +1,141 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.mock_title_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_title_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Alarm', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_boost_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_title_boost_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boost state', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_state', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_boost_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Boost state', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_boost_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_warning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_title_warning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Warning', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'warning', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_warning', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_warning-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Warning', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_warning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_button.ambr b/tests/components/smarty/snapshots/test_button.ambr new file mode 100644 index 00000000000..38849bd2b2e --- /dev/null +++ b/tests/components/smarty/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.mock_title_reset_filters_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.mock_title_reset_filters_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filters timer', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filters_timer', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_reset_filters_timer', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.mock_title_reset_filters_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Reset filters timer', + }), + 'context': , + 'entity_id': 'button.mock_title_reset_filters_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_fan.ambr b/tests/components/smarty/snapshots/test_fan.ambr new file mode 100644 index 00000000000..8ca95beeb86 --- /dev/null +++ b/tests/components/smarty/snapshots/test_fan.ambr @@ -0,0 +1,54 @@ +# serializer version: 1 +# name: test_all_entities[fan.mock_title-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.mock_title', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'fan', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[fan.mock_title-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title', + 'percentage': 0, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.mock_title', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_init.ambr b/tests/components/smarty/snapshots/test_init.ambr new file mode 100644 index 00000000000..b25cdb9dc3a --- /dev/null +++ b/tests/components/smarty/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 111, + 'id': , + 'identifiers': set({ + tuple( + 'smarty', + '01JAZ5DPW8C62D620DGYNG2R8H', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Salda', + 'model': None, + 'model_id': None, + 'name': 'Mock Title', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 127, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smarty/snapshots/test_sensor.ambr b/tests/components/smarty/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..2f713db7f83 --- /dev/null +++ b/tests/components/smarty/snapshots/test_sensor.ambr @@ -0,0 +1,286 @@ +# serializer version: 1 +# name: test_all_entities[sensor.mock_title_extract_air_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_extract_air_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extract air temperature', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'extract_air_temperature', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_extract_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.mock_title_extract_air_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Extract air temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_extract_air_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23', + }) +# --- +# name: test_all_entities[sensor.mock_title_extract_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_extract_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Extract fan speed', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'extract_fan_speed', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_extract_fan_speed', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_all_entities[sensor.mock_title_extract_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Extract fan speed', + 'unit_of_measurement': 'rpm', + }), + 'context': , + 'entity_id': 'sensor.mock_title_extract_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_all_entities[sensor.mock_title_filter_days_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_filter_days_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter days left', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_days_left', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_filter_days_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mock_title_filter_days_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Filter days left', + }), + 'context': , + 'entity_id': 'sensor.mock_title_filter_days_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-11-21T01:00:00+00:00', + }) +# --- +# name: test_all_entities[sensor.mock_title_outdoor_air_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_outdoor_air_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outdoor air temperature', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outdoor_air_temperature', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_outdoor_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.mock_title_outdoor_air_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Outdoor air temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_outdoor_air_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_air_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_supply_air_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply air temperature', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_air_temperature', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_supply_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_air_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Supply air temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_supply_air_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_supply_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Supply fan speed', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_fan_speed', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_supply_fan_speed', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Supply fan speed', + 'unit_of_measurement': 'rpm', + }), + 'context': , + 'entity_id': 'sensor.mock_title_supply_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '66', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_switch.ambr b/tests/components/smarty/snapshots/test_switch.ambr new file mode 100644 index 00000000000..be1da7c6961 --- /dev/null +++ b/tests/components/smarty/snapshots/test_switch.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[switch.mock_title_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boost', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.mock_title_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Boost', + }), + 'context': , + 'entity_id': 'switch.mock_title_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/test_binary_sensor.py b/tests/components/smarty/test_binary_sensor.py new file mode 100644 index 00000000000..d28fb44e1ce --- /dev/null +++ b/tests/components/smarty/test_binary_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Smarty binary sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_button.py b/tests/components/smarty/test_button.py new file mode 100644 index 00000000000..0a7b67f2be6 --- /dev/null +++ b/tests/components/smarty/test_button.py @@ -0,0 +1,45 @@ +"""Tests for the Smarty button platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + target={ATTR_ENTITY_ID: "button.mock_title_reset_filters_timer"}, + blocking=True, + ) + mock_smarty.reset_filters_timer.assert_called_once_with() diff --git a/tests/components/smarty/test_config_flow.py b/tests/components/smarty/test_config_flow.py new file mode 100644 index 00000000000..fad4f27ca1c --- /dev/null +++ b/tests/components/smarty/test_config_flow.py @@ -0,0 +1,165 @@ +"""Test the smarty config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.smarty.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "192.168.0.2" + assert result["data"] == {CONF_HOST: "192.168.0.2"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_cannot_connect( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + + mock_smarty.update.return_value = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_smarty.update.return_value = True + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_unknown_error( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle unknown error.""" + + mock_smarty.update.side_effect = Exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + mock_smarty.update.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_existing_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test we handle existing entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the import flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Smarty" + assert result["data"] == {CONF_HOST: "192.168.0.2"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_smarty: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + + mock_smarty.update.return_value = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_unknown_error( + hass: HomeAssistant, mock_smarty: AsyncMock +) -> None: + """Test we handle unknown error.""" + + mock_smarty.update.side_effect = Exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/smarty/test_fan.py b/tests/components/smarty/test_fan.py new file mode 100644 index 00000000000..2c0135b7aa2 --- /dev/null +++ b/tests/components/smarty/test_fan.py @@ -0,0 +1,27 @@ +"""Tests for the Smarty fan platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.FAN]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_init.py b/tests/components/smarty/test_init.py new file mode 100644 index 00000000000..0366ea9eade --- /dev/null +++ b/tests/components/smarty/test_init.py @@ -0,0 +1,82 @@ +"""Tests for the Smarty component.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.smarty import DOMAIN +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import device_registry as dr, issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_import_flow( + hass: HomeAssistant, + mock_smarty: AsyncMock, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, +) -> None: + """Test import flow.""" + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml_smarty") in issue_registry.issues + + +async def test_import_flow_already_exists( + hass: HomeAssistant, + mock_smarty: AsyncMock, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test import flow when entry already exists.""" + mock_config_entry.add_to_hass(hass) + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml_smarty") in issue_registry.issues + + +async def test_import_flow_error( + hass: HomeAssistant, + mock_smarty: AsyncMock, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, +) -> None: + """Test import flow when error occurs.""" + mock_smarty.update.return_value = False + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert ( + DOMAIN, + "deprecated_yaml_import_issue_cannot_connect", + ) in issue_registry.issues + + +async def test_device( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device.""" + await setup_integration(hass, mock_config_entry) + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert device + assert device == snapshot diff --git a/tests/components/smarty/test_sensor.py b/tests/components/smarty/test_sensor.py new file mode 100644 index 00000000000..a534a2ebb0f --- /dev/null +++ b/tests/components/smarty/test_sensor.py @@ -0,0 +1,29 @@ +"""Tests for the Smarty sensor platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.freeze_time("2023-10-21") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_switch.py b/tests/components/smarty/test_switch.py new file mode 100644 index 00000000000..1a6748e2d23 --- /dev/null +++ b/tests/components/smarty/test_switch.py @@ -0,0 +1,58 @@ +"""Tests for the Smarty switch platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, + blocking=True, + ) + mock_smarty.enable_boost.assert_called_once_with() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, + blocking=True, + ) + mock_smarty.disable_boost.assert_called_once_with() diff --git a/tests/components/smhi/common.py b/tests/components/smhi/common.py deleted file mode 100644 index 7339ba76ac1..00000000000 --- a/tests/components/smhi/common.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Common test utilities.""" - -from unittest.mock import Mock - - -class AsyncMock(Mock): - """Implements Mock async.""" - - async def __call__(self, *args, **kwargs): - """Hack for async support for Mock.""" - return super().__call__(*args, **kwargs) diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index 9ab0375df83..2c0884d804d 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -9,9 +9,9 @@ 'datetime': '2023-08-08T00:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, - 'pressure': 992.0, - 'temperature': 18.0, - 'templow': 18.0, + 'pressure': 992.4, + 'temperature': 18.2, + 'templow': 18.2, 'wind_bearing': 103, 'wind_gust_speed': 23.76, 'wind_speed': 9.72, @@ -22,9 +22,9 @@ 'datetime': '2023-08-08T01:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, - 'pressure': 992.0, - 'temperature': 18.0, - 'templow': 18.0, + 'pressure': 992.4, + 'temperature': 17.5, + 'templow': 17.5, 'wind_bearing': 104, 'wind_gust_speed': 27.36, 'wind_speed': 9.72, @@ -35,9 +35,9 @@ 'datetime': '2023-08-08T02:00:00+00:00', 'humidity': 97, 'precipitation': 0.0, - 'pressure': 992.0, - 'temperature': 18.0, - 'templow': 18.0, + 'pressure': 992.2, + 'temperature': 17.6, + 'templow': 17.6, 'wind_bearing': 109, 'wind_gust_speed': 32.4, 'wind_speed': 12.96, @@ -48,9 +48,9 @@ 'datetime': '2023-08-08T03:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 17.0, - 'templow': 17.0, + 'pressure': 991.7, + 'temperature': 17.1, + 'templow': 17.1, 'wind_bearing': 114, 'wind_gust_speed': 32.76, 'wind_speed': 10.08, @@ -66,10 +66,10 @@ 'friendly_name': 'test', 'humidity': 100, 'precipitation_unit': , - 'pressure': 992.0, + 'pressure': 992.4, 'pressure_unit': , 'supported_features': , - 'temperature': 18.0, + 'temperature': 18.4, 'temperature_unit': , 'thunder_probability': 37, 'visibility': 0.4, @@ -90,9 +90,9 @@ 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 18.0, - 'templow': 15.0, + 'pressure': 991.7, + 'temperature': 18.4, + 'templow': 14.8, 'wind_bearing': 114, 'wind_gust_speed': 32.76, 'wind_speed': 10.08, @@ -103,9 +103,9 @@ 'datetime': '2023-08-08T12:00:00+00:00', 'humidity': 97, 'precipitation': 10.6, - 'pressure': 984.0, - 'temperature': 15.0, - 'templow': 11.0, + 'pressure': 984.1, + 'temperature': 14.8, + 'templow': 10.6, 'wind_bearing': 183, 'wind_gust_speed': 27.36, 'wind_speed': 11.16, @@ -116,8 +116,8 @@ 'datetime': '2023-08-09T12:00:00+00:00', 'humidity': 95, 'precipitation': 6.3, - 'pressure': 1001.0, - 'temperature': 12.0, + 'pressure': 1001.4, + 'temperature': 12.5, 'templow': 11.0, 'wind_bearing': 166, 'wind_gust_speed': 48.24, @@ -129,9 +129,9 @@ 'datetime': '2023-08-10T12:00:00+00:00', 'humidity': 75, 'precipitation': 4.8, - 'pressure': 1011.0, - 'temperature': 14.0, - 'templow': 10.0, + 'pressure': 1011.1, + 'temperature': 13.9, + 'templow': 10.4, 'wind_bearing': 174, 'wind_gust_speed': 29.16, 'wind_speed': 11.16, @@ -142,9 +142,9 @@ 'datetime': '2023-08-11T12:00:00+00:00', 'humidity': 69, 'precipitation': 0.6, - 'pressure': 1015.0, - 'temperature': 18.0, - 'templow': 12.0, + 'pressure': 1015.3, + 'temperature': 17.6, + 'templow': 11.7, 'wind_bearing': 197, 'wind_gust_speed': 27.36, 'wind_speed': 10.08, @@ -157,7 +157,7 @@ 'precipitation': 0.0, 'pressure': 1014.0, 'temperature': 17.0, - 'templow': 12.0, + 'templow': 12.3, 'wind_bearing': 225, 'wind_gust_speed': 28.08, 'wind_speed': 8.64, @@ -168,9 +168,9 @@ 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, - 'pressure': 1013.0, + 'pressure': 1013.6, 'temperature': 20.0, - 'templow': 14.0, + 'templow': 13.6, 'wind_bearing': 234, 'wind_gust_speed': 35.64, 'wind_speed': 14.76, @@ -181,9 +181,9 @@ 'datetime': '2023-08-14T12:00:00+00:00', 'humidity': 56, 'precipitation': 0.0, - 'pressure': 1015.0, - 'temperature': 21.0, - 'templow': 14.0, + 'pressure': 1015.3, + 'temperature': 20.8, + 'templow': 13.5, 'wind_bearing': 216, 'wind_gust_speed': 33.12, 'wind_speed': 13.68, @@ -194,9 +194,9 @@ 'datetime': '2023-08-15T12:00:00+00:00', 'humidity': 64, 'precipitation': 3.6, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, + 'pressure': 1014.3, + 'temperature': 20.4, + 'templow': 14.3, 'wind_bearing': 226, 'wind_gust_speed': 33.12, 'wind_speed': 13.68, @@ -208,8 +208,8 @@ 'humidity': 61, 'precipitation': 2.4, 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, + 'temperature': 20.2, + 'templow': 13.8, 'wind_bearing': 233, 'wind_gust_speed': 33.48, 'wind_speed': 14.04, @@ -225,9 +225,9 @@ 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 18.0, - 'templow': 15.0, + 'pressure': 991.7, + 'temperature': 18.4, + 'templow': 14.8, 'wind_bearing': 114, 'wind_gust_speed': 32.76, 'wind_speed': 10.08, @@ -240,9 +240,9 @@ 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, - 'pressure': 1013.0, + 'pressure': 1013.6, 'temperature': 20.0, - 'templow': 14.0, + 'templow': 13.6, 'wind_bearing': 234, 'wind_gust_speed': 35.64, 'wind_speed': 14.76, @@ -255,9 +255,9 @@ 'datetime': '2023-08-07T09:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, - 'pressure': 992.0, - 'temperature': 18.0, - 'templow': 18.0, + 'pressure': 992.4, + 'temperature': 18.2, + 'templow': 18.2, 'wind_bearing': 103, 'wind_gust_speed': 23.76, 'wind_speed': 9.72, @@ -270,9 +270,9 @@ 'datetime': '2023-08-07T15:00:00+00:00', 'humidity': 89, 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 16.0, - 'templow': 16.0, + 'pressure': 991.7, + 'temperature': 16.2, + 'templow': 16.2, 'wind_bearing': 108, 'wind_gust_speed': 31.68, 'wind_speed': 12.24, @@ -285,10 +285,10 @@ 'friendly_name': 'test', 'humidity': 100, 'precipitation_unit': , - 'pressure': 992.0, + 'pressure': 992.4, 'pressure_unit': , 'supported_features': , - 'temperature': 18.0, + 'temperature': 18.4, 'temperature_unit': , 'thunder_probability': 37, 'visibility': 0.4, diff --git a/tests/components/smhi/test_config_flow.py b/tests/components/smhi/test_config_flow.py index a771bcc1e1d..4195d1e5d52 100644 --- a/tests/components/smhi/test_config_flow.py +++ b/tests/components/smhi/test_config_flow.py @@ -217,13 +217,7 @@ async def test_reconfigure_flow( name=entry.title, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM with patch( diff --git a/tests/components/smlight/__init__.py b/tests/components/smlight/__init__.py index 37184226507..e518e0573ba 100644 --- a/tests/components/smlight/__init__.py +++ b/tests/components/smlight/__init__.py @@ -1 +1,21 @@ """Tests for the SMLIGHT Zigbee adapter integration.""" + +from collections.abc import Callable +from unittest.mock import MagicMock + +from pysmlight.const import Events as SmEvents +from pysmlight.sse import MessageEvent + + +def get_mock_event_function( + mock: MagicMock, event: SmEvents +) -> Callable[[MessageEvent], None]: + """Extract event function from mock call_args.""" + return next( + ( + call_args[0][1] + for call_args in mock.sse.register_callback.call_args_list + if call_args[0][0] == event + ), + None, + ) diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py index 0338bf4b672..665a55ba880 100644 --- a/tests/components/smlight/conftest.py +++ b/tests/components/smlight/conftest.py @@ -1,16 +1,22 @@ """Common fixtures for the SMLIGHT Zigbee tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch -from pysmlight.web import Info, Sensors +from pysmlight.sse import sseClient +from pysmlight.web import CmdWrapper, Firmware, Info, Sensors import pytest +from homeassistant.components.smlight import PLATFORMS from homeassistant.components.smlight.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) MOCK_HOST = "slzb-06.local" MOCK_USERNAME = "test-user" @@ -32,7 +38,32 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_config_entry_host() -> MockConfigEntry: + """Return the default mocked config entry, no credentials.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: MOCK_HOST, + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return PLATFORMS + + +@pytest.fixture(autouse=True) +async def mock_patch_platforms(platforms: list[str]) -> AsyncGenerator[None]: + """Fixture to set up platforms for tests.""" + with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms): + yield + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.smlight.async_setup_entry", return_value=True @@ -44,9 +75,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: """Mock the SMLIGHT API client.""" with ( - patch( - "homeassistant.components.smlight.coordinator.Api2", autospec=True - ) as smlight_mock, + patch("homeassistant.components.smlight.Api2", autospec=True) as smlight_mock, patch("homeassistant.components.smlight.config_flow.Api2", new=smlight_mock), ): api = smlight_mock.return_value @@ -58,13 +87,32 @@ def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: load_json_object_fixture("sensors.json", DOMAIN) ) + def get_firmware_side_effect(*args, **kwargs) -> list[Firmware]: + """Return the firmware version.""" + fw_list = [] + if kwargs.get("mode") == "zigbee": + fw_list = load_json_array_fixture("zb_firmware.json", DOMAIN) + else: + fw_list = load_json_array_fixture("esp_firmware.json", DOMAIN) + + return [Firmware.from_dict(fw) for fw in fw_list] + + api.get_firmware_version.side_effect = get_firmware_side_effect + api.check_auth_needed.return_value = False api.authenticate.return_value = True + api.cmds = AsyncMock(spec_set=CmdWrapper) + api.set_toggle = AsyncMock() + api.sse = MagicMock(spec_set=sseClient) + yield api -async def setup_integration(hass: HomeAssistant, mock_config_entry: MockConfigEntry): +async def setup_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> MockConfigEntry: """Set up the integration.""" mock_config_entry.add_to_hass(hass) diff --git a/tests/components/smlight/fixtures/esp_firmware.json b/tests/components/smlight/fixtures/esp_firmware.json new file mode 100644 index 00000000000..6ea0e1a8b44 --- /dev/null +++ b/tests/components/smlight/fixtures/esp_firmware.json @@ -0,0 +1,35 @@ +[ + { + "mode": "ESP", + "type": null, + "notes": "CHANGELOG (Current 2.5.2 vs. Previous 2.3.6):\\r\\nFixed incorrect device type detection for some devices\\r\\nFixed web interface not working on some devices\\r\\nFixed disabled SSID/pass fields\\r\\n", + "rev": "20240830", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-v2.5.2-ota.bin", + "ver": "v2.5.2", + "dev": false, + "prod": true, + "baud": null + }, + { + "mode": "ESP", + "type": null, + "notes": "Read/write IEEE for CC chips\\r\\nDefault black theme\\r\\nAdd device mac to MDNS ZeroConf\\r\\nBreaking change! socket_uptime in /ha_sensors and /metrics now in seconds\\r\\nNew 5 languages\\r\\nAdd manual ZB OTA for 06M\\r\\nAdd warning modal for ZB manual OTA\\r\\nWireGuard can now use hostname instead of IP\\r\\nWiFi AP fixes and improvements\\r\\nImproved management of socket clients\\r\\nFix \"Disable web server when socket is connected\"\\r\\nFix events tag for log\\r\\nFix ZB maual OTA header text\\r\\nFix feedback page stack overflow\\r\\nFix sta drop in AP mode after scan start", + "rev": "20240815", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-v2.3.6-ota.bin", + "ver": "v2.3.6", + "dev": false, + "prod": true, + "baud": null + }, + { + "mode": "ESP", + "type": null, + "notes": "release of previous version", + "rev": "10112023", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-0.9.9-ota.bin", + "ver": "0.9.9", + "dev": false, + "prod": true, + "baud": null + } +] diff --git a/tests/components/smlight/fixtures/info.json b/tests/components/smlight/fixtures/info.json index 72bb7c1ed9b..e3defb4410e 100644 --- a/tests/components/smlight/fixtures/info.json +++ b/tests/components/smlight/fixtures/info.json @@ -3,14 +3,17 @@ "device_ip": "192.168.1.161", "fs_total": 3456, "fw_channel": "dev", + "legacy_api": 0, + "hostname": "SLZB-06p7", "MAC": "AA:BB:CC:DD:EE:FF", "model": "SLZB-06p7", "ram_total": 296, - "sw_version": "v2.3.1.dev", + "sw_version": "v2.3.6", "wifi_mode": 0, "zb_flash_size": 704, + "zb_channel": 0, "zb_hw": "CC2652P7", "zb_ram_size": 152, - "zb_version": -1, - "zb_type": -1 + "zb_version": "20240314", + "zb_type": 0 } diff --git a/tests/components/smlight/fixtures/logs.txt b/tests/components/smlight/fixtures/logs.txt new file mode 100644 index 00000000000..f04dc881514 --- /dev/null +++ b/tests/components/smlight/fixtures/logs.txt @@ -0,0 +1 @@ +[04:28:51] setup | Starting firmware: v2.3.6\n[04:28:52] ConfigHelper | LittleFS mounted\n[04:28:52] ConfigHelper | load config\n[04:28:52] ConfigHelper | config open: Ok\n[04:28:52] setup | Config loaded\n[04:28:52] setup | Reboot reason: 3\n[04:28:52] setup | Coordinator mode: LAN\n[04:28:52] setup | Device type: SLZB-06P10\n[04:28:52] setup | Radio mode: \"ZB COORD\" Radio FW version: 20240716 Radio FW CH: PROD\n[04:28:52] Network | init\n[04:28:52] L_Y,L_B | status: 1\n[04:28:54] Network | EVENT_ETH_START\n[04:28:54] Network | EVENT_ETH_CONNECTED\n[04:28:54] Network | [MDNS] Started\n[04:28:54] Network | EVENT_ETH_GOT_IP\n[04:28:54] Network | ETH MAC: AA:BB:CC:DD:EE:FF IPv4: 192.168.0.11 GW: 192.168.0.1 Speed: 100Mbps DNS1: 192.168.0.1 DNS2: 0.0.0.0\n[04:28:54] Network | fireNetworkUp\n[04:28:54] taskZB | Waiting for zbChk\n[04:28:54] Web | Webserver started \ No newline at end of file diff --git a/tests/components/smlight/fixtures/sensors.json b/tests/components/smlight/fixtures/sensors.json index 0b2f9055e01..ea1fb9c1899 100644 --- a/tests/components/smlight/fixtures/sensors.json +++ b/tests/components/smlight/fixtures/sensors.json @@ -9,6 +9,8 @@ "wifi_connected": false, "wifi_status": 255, "disable_leds": false, - "night_mode": false, - "auto_zigbee": false + "night_mode": true, + "auto_zigbee": false, + "vpn_enabled": false, + "vpn_status": true } diff --git a/tests/components/smlight/fixtures/zb_firmware.json b/tests/components/smlight/fixtures/zb_firmware.json new file mode 100644 index 00000000000..ca9d10f87ac --- /dev/null +++ b/tests/components/smlight/fixtures/zb_firmware.json @@ -0,0 +1,46 @@ +[ + { + "mode": "ZB", + "type": 0, + "notes": "SMLIGHT latest Coordinator release for CC2674P10 chips [16-Jul-2024]:
- +20dB TRANSMIT POWER SUPPORT;
- SDK 7.41 based (latest);
", + "rev": "20240716", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp-SLZB-06P10-20240716.bin", + "ver": "20240716", + "dev": false, + "prod": true, + "baud": 115200 + }, + { + "mode": "ZB", + "type": 1, + "notes": "SMLIGHT latest ROUTER release for CC2674P10 chips [16-Jul-2024]:
- SDK 7.41 based (latest);
Terms of use", + "rev": "20240716", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/zr-ZR_SLZB-06P10-20240716.bin", + "ver": "20240716", + "dev": false, + "prod": true, + "baud": 0 + }, + { + "mode": "ZB", + "type": 0, + "notes": "SMLIGHT Coordinator release for CC2674P10 chips [15-Mar-2024]:
- Engineering (dev) version, not recommended (INT);
- SDK 7.40 based (latest);
- Baudrate: 115200;
Terms of use", + "rev": "20240315", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp_LP_EM_CC2674P10_SM_tirtos7_ticlangNR.bin", + "ver": "20240315", + "dev": false, + "prod": false, + "baud": 115200 + }, + { + "mode": "ZB", + "type": 0, + "notes": "SMLIGHT Coordinator release for CC2674P10 chips [14-Mar-2024]:
- Factory flashed firmware (EXT);
- SDK 7.40 based (latest);
- Baudrate: 115200;
Terms of use", + "rev": "20240314", + "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp_LP_EM_CC2674P10_SM_tirtos7_ticlangNP.bin", + "ver": "20240314", + "dev": false, + "prod": false, + "baud": 115200 + } +] diff --git a/tests/components/smlight/snapshots/test_binary_sensor.ambr b/tests/components/smlight/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..8becf5b2567 --- /dev/null +++ b/tests/components/smlight/snapshots/test_binary_sensor.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_all_binary_sensors[binary_sensor.mock_title_ethernet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_ethernet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ethernet', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ethernet', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ethernet', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_ethernet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Mock Title Ethernet', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_ethernet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_internet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_internet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Internet', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'internet', + 'unique_id': 'aa:bb:cc:dd:ee:ff_internet', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_internet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Mock Title Internet', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_internet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_vpn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_vpn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VPN', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vpn', + 'unique_id': 'aa:bb:cc:dd:ee:ff_vpn', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_vpn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Mock Title VPN', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_vpn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_wi_fi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_wi_fi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi', + 'unique_id': 'aa:bb:cc:dd:ee:ff_wifi', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_wi_fi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Mock Title Wi-Fi', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_wi_fi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smlight/snapshots/test_diagnostics.ambr b/tests/components/smlight/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..97177de1704 --- /dev/null +++ b/tests/components/smlight/snapshots/test_diagnostics.ambr @@ -0,0 +1,27 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'info': dict({ + 'MAC': 'AA:BB:CC:DD:EE:FF', + 'coord_mode': 0, + 'device_ip': '192.168.1.161', + 'fs_total': 3456, + 'fw_channel': 'dev', + 'hostname': 'SLZB-06p7', + 'legacy_api': 0, + 'model': 'SLZB-06p7', + 'ram_total': 296, + 'sw_version': 'v2.3.6', + 'wifi_mode': 0, + 'zb_channel': 0, + 'zb_flash_size': 704, + 'zb_hw': 'CC2652P7', + 'zb_ram_size': 152, + 'zb_type': 0, + 'zb_version': '20240314', + }), + 'log': list([ + '[04:28:51] setup | Starting firmware: v2.3.6\\n[04:28:52] ConfigHelper | LittleFS mounted\\n[04:28:52] ConfigHelper | load config\\n[04:28:52] ConfigHelper | config open: Ok\\n[04:28:52] setup | Config loaded\\n[04:28:52] setup | Reboot reason: 3\\n[04:28:52] setup | Coordinator mode: LAN\\n[04:28:52] setup | Device type: SLZB-06P10\\n[04:28:52] setup | Radio mode: \\"ZB COORD\\" Radio FW version: 20240716 Radio FW CH: PROD\\n[04:28:52] Network | init\\n[04:28:52] L_Y,L_B | status: 1\\n[04:28:54] Network | EVENT_ETH_START\\n[04:28:54] Network | EVENT_ETH_CONNECTED\\n[04:28:54] Network | [MDNS] Started\\n[04:28:54] Network | EVENT_ETH_GOT_IP\\n[04:28:54] Network | ETH MAC: AA:BB:CC:DD:EE:FF IPv4: 192.168.0.11 GW: 192.168.0.1 Speed: 100Mbps DNS1: 192.168.0.1 DNS2: 0.0.0.0\\n[04:28:54] Network | fireNetworkUp\\n[04:28:54] taskZB | Waiting for zbChk\\n[04:28:54] Web | Webserver started', + ]), + }) +# --- diff --git a/tests/components/smlight/snapshots/test_init.ambr b/tests/components/smlight/snapshots/test_init.ambr index 528a7b7b340..598166e537b 100644 --- a/tests/components/smlight/snapshots/test_init.ambr +++ b/tests/components/smlight/snapshots/test_init.ambr @@ -27,7 +27,7 @@ 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'sw_version': 'core: v2.3.6 / zigbee: 20240314', 'via_device_id': None, }) # --- diff --git a/tests/components/smlight/snapshots/test_sensor.ambr b/tests/components/smlight/snapshots/test_sensor.ambr index 0ff3d37b735..262ecfe1544 100644 --- a/tests/components/smlight/snapshots/test_sensor.ambr +++ b/tests/components/smlight/snapshots/test_sensor.ambr @@ -1,4 +1,62 @@ # serializer version: 1 +# name: test_sensors[sensor.mock_title_connection_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'eth', + 'wifi', + 'usb', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_connection_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connection mode', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_mode', + 'unique_id': 'aa:bb:cc:dd:ee:ff_device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_connection_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Mock Title Connection mode', + 'options': list([ + 'eth', + 'wifi', + 'usb', + ]), + }), + 'context': , + 'entity_id': 'sensor.mock_title_connection_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'eth', + }) +# --- # name: test_sensors[sensor.mock_title_core_chip_temp-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -53,6 +111,53 @@ 'state': '35.0', }) # --- +# name: test_sensors[sensor.mock_title_core_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_core_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core uptime', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_uptime', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_core_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Core uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_title_core_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-25T02:51:15+00:00', + }) +# --- # name: test_sensors[sensor.mock_title_filesystem_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -101,6 +206,62 @@ 'state': '188', }) # --- +# name: test_sensors[sensor.mock_title_firmware_channel-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'dev', + 'release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_firmware_channel', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware channel', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'firmware_channel', + 'unique_id': 'aa:bb:cc:dd:ee:ff_firmware_channel', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_firmware_channel-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Mock Title Firmware channel', + 'options': list([ + 'dev', + 'release', + ]), + }), + 'context': , + 'entity_id': 'sensor.mock_title_firmware_channel', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'dev', + }) +# --- # name: test_sensors[sensor.mock_title_ram_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -203,13 +364,17 @@ 'state': '32.7', }) # --- -# name: test_sensors[sensor.slzb_06_core_chip_temp-entry] +# name: test_sensors[sensor.mock_title_zigbee_type-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'options': list([ + 'coordinator', + 'router', + 'thread', + ]), }), 'config_entry_id': , 'device_class': None, @@ -217,7 +382,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'entity_id': 'sensor.mock_title_zigbee_type', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -226,124 +391,38 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Core chip temp', + 'original_name': 'Zigbee type', 'platform': 'smlight', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'core_temperature', - 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', - 'unit_of_measurement': , + 'translation_key': 'zigbee_type', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_type', + 'unit_of_measurement': None, }) # --- -# name: test_sensors[sensor.slzb_06_core_chip_temp-state] +# name: test_sensors[sensor.mock_title_zigbee_type-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'slzb-06 Core chip temp', - 'state_class': , - 'unit_of_measurement': , + 'device_class': 'enum', + 'friendly_name': 'Mock Title Zigbee type', + 'options': list([ + 'coordinator', + 'router', + 'thread', + ]), }), 'context': , - 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'entity_id': 'sensor.mock_title_zigbee_type', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '35.0', + 'state': 'coordinator', }) # --- -# name: test_sensors[sensor.slzb_06_core_chip_temp] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'slzb-06 Core chip temp', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.slzb_06_core_chip_temp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '35.0', - }) -# --- -# name: test_sensors[sensor.slzb_06_core_chip_temp].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.slzb_06_core_chip_temp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Core chip temp', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'core_temperature', - 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.slzb_06_core_chip_temp].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://slzb-06.local', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'SMLIGHT', - 'model': 'SLZB-06p7', - 'model_id': None, - 'name': 'slzb-06', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': 'core: v2.3.1.dev / zigbee: -1', - 'via_device_id': None, - }) -# --- -# name: test_sensors[sensor.slzb_06_filesystem_usage-entry] +# name: test_sensors[sensor.mock_title_zigbee_uptime-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -355,7 +434,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'entity_id': 'sensor.mock_title_zigbee_uptime', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -365,377 +444,28 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Filesystem usage', + 'original_name': 'Zigbee uptime', 'platform': 'smlight', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'fs_usage', - 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', - 'unit_of_measurement': , + 'translation_key': 'socket_uptime', + 'unique_id': 'aa:bb:cc:dd:ee:ff_socket_uptime', + 'unit_of_measurement': None, }) # --- -# name: test_sensors[sensor.slzb_06_filesystem_usage-state] +# name: test_sensors[sensor.mock_title_zigbee_uptime-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'slzb-06 Filesystem usage', - 'unit_of_measurement': , + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Zigbee uptime', }), 'context': , - 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'entity_id': 'sensor.mock_title_zigbee_uptime', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '188', - }) -# --- -# name: test_sensors[sensor.slzb_06_filesystem_usage] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'slzb-06 Filesystem usage', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.slzb_06_filesystem_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '188', - }) -# --- -# name: test_sensors[sensor.slzb_06_filesystem_usage].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.slzb_06_filesystem_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Filesystem usage', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fs_usage', - 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.slzb_06_filesystem_usage].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://slzb-06.local', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'SMLIGHT', - 'model': 'SLZB-06p7', - 'model_id': None, - 'name': 'slzb-06', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': 'core: v2.3.1.dev / zigbee: -1', - 'via_device_id': None, - }) -# --- -# name: test_sensors[sensor.slzb_06_ram_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.slzb_06_ram_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'RAM usage', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ram_usage', - 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.slzb_06_ram_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'slzb-06 RAM usage', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.slzb_06_ram_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '99', - }) -# --- -# name: test_sensors[sensor.slzb_06_ram_usage] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'slzb-06 RAM usage', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.slzb_06_ram_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '99', - }) -# --- -# name: test_sensors[sensor.slzb_06_ram_usage].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.slzb_06_ram_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'RAM usage', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ram_usage', - 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.slzb_06_ram_usage].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://slzb-06.local', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'SMLIGHT', - 'model': 'SLZB-06p7', - 'model_id': None, - 'name': 'slzb-06', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': 'core: v2.3.1.dev / zigbee: -1', - 'via_device_id': None, - }) -# --- -# name: test_sensors[sensor.slzb_06_zigbee_chip_temp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Zigbee chip temp', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'zigbee_temperature', - 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.slzb_06_zigbee_chip_temp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'slzb-06 Zigbee chip temp', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32.7', - }) -# --- -# name: test_sensors[sensor.slzb_06_zigbee_chip_temp] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'slzb-06 Zigbee chip temp', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32.7', - }) -# --- -# name: test_sensors[sensor.slzb_06_zigbee_chip_temp].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Zigbee chip temp', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'zigbee_temperature', - 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.slzb_06_zigbee_chip_temp].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://slzb-06.local', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'SMLIGHT', - 'model': 'SLZB-06p7', - 'model_id': None, - 'name': 'slzb-06', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': 'core: v2.3.1.dev / zigbee: -1', - 'via_device_id': None, + 'state': '2024-06-30T23:57:53+00:00', }) # --- diff --git a/tests/components/smlight/snapshots/test_switch.ambr b/tests/components/smlight/snapshots/test_switch.ambr new file mode 100644 index 00000000000..733d002be0f --- /dev/null +++ b/tests/components/smlight/snapshots/test_switch.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_switch_setup[switch.mock_title_auto_zigbee_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_auto_zigbee_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto Zigbee update', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_zigbee_update', + 'unique_id': 'aa:bb:cc:dd:ee:ff-auto_zigbee_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_auto_zigbee_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Auto Zigbee update', + }), + 'context': , + 'entity_id': 'switch.mock_title_auto_zigbee_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_setup[switch.mock_title_disable_leds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_disable_leds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Disable LEDs', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disable_led', + 'unique_id': 'aa:bb:cc:dd:ee:ff-disable_led', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_disable_leds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Disable LEDs', + }), + 'context': , + 'entity_id': 'switch.mock_title_disable_leds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_setup[switch.mock_title_led_night_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_led_night_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'LED night mode', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'night_mode', + 'unique_id': 'aa:bb:cc:dd:ee:ff-night_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_led_night_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title LED night mode', + }), + 'context': , + 'entity_id': 'switch.mock_title_led_night_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[switch.mock_title_vpn_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_vpn_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VPN enabled', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vpn_enabled', + 'unique_id': 'aa:bb:cc:dd:ee:ff-vpn_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_vpn_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title VPN enabled', + }), + 'context': , + 'entity_id': 'switch.mock_title_vpn_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smlight/snapshots/test_update.ambr b/tests/components/smlight/snapshots/test_update.ambr new file mode 100644 index 00000000000..ed0085dcdc8 --- /dev/null +++ b/tests/components/smlight/snapshots/test_update.ambr @@ -0,0 +1,119 @@ +# serializer version: 1 +# name: test_update_setup[update.mock_title_core_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_core_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core firmware', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'core_update', + 'unique_id': 'aa:bb:cc:dd:ee:ff-core_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_update_setup[update.mock_title_core_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smlight/icon.png', + 'friendly_name': 'Mock Title Core firmware', + 'in_progress': False, + 'installed_version': 'v2.3.6', + 'latest_version': 'v2.5.2', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.mock_title_core_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_setup[update.mock_title_zigbee_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_zigbee_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee firmware', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'zigbee_update', + 'unique_id': 'aa:bb:cc:dd:ee:ff-zigbee_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_update_setup[update.mock_title_zigbee_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smlight/icon.png', + 'friendly_name': 'Mock Title Zigbee firmware', + 'in_progress': False, + 'installed_version': '20240314', + 'latest_version': '20240716', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.mock_title_zigbee_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/smlight/test_binary_sensor.py b/tests/components/smlight/test_binary_sensor.py new file mode 100644 index 00000000000..b1d72b66dcf --- /dev/null +++ b/tests/components/smlight/test_binary_sensor.py @@ -0,0 +1,107 @@ +"""Tests for the SMLIGHT binary sensor platform.""" + +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from pysmlight.const import Events +from pysmlight.sse import MessageEvent +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.smlight.const import SCAN_INTERNET_INTERVAL +from homeassistant.const import STATE_ON, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import get_mock_event_function +from .conftest import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + +MOCK_INET_STATE = MessageEvent( + type="EVENT_INET_STATE", + message="EVENT_INET_STATE", + data="ok", + origin="http://slzb-06.local", + last_event_id="", +) + + +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.BINARY_SENSOR] + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_binary_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the SMLIGHT binary sensors.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + await hass.config_entries.async_unload(entry.entry_id) + + +async def test_disabled_by_default_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test wifi sensor is disabled by default .""" + await setup_integration(hass, mock_config_entry) + + for sensor in ("wi_fi", "vpn"): + assert not hass.states.get(f"binary_sensor.mock_title_{sensor}") + + assert ( + entry := entity_registry.async_get(f"binary_sensor.mock_title_{sensor}") + ) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +async def test_internet_sensor_event( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test internet sensor event.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.mock_title_internet") + assert state is not None + assert state.state == STATE_UNKNOWN + + assert len(mock_smlight_client.get_param.mock_calls) == 1 + mock_smlight_client.get_param.assert_called_with("inetState") + + freezer.tick(SCAN_INTERNET_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(mock_smlight_client.get_param.mock_calls) == 2 + mock_smlight_client.get_param.assert_called_with("inetState") + + event_function = get_mock_event_function( + mock_smlight_client, Events.EVENT_INET_STATE + ) + + event_function(MOCK_INET_STATE) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.mock_title_internet") + assert state is not None + assert state.state == STATE_ON diff --git a/tests/components/smlight/test_button.py b/tests/components/smlight/test_button.py new file mode 100644 index 00000000000..3721ee815e6 --- /dev/null +++ b/tests/components/smlight/test_button.py @@ -0,0 +1,117 @@ +"""Tests for SMLIGHT SLZB-06 button entities.""" + +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from pysmlight import Info +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.smlight.const import SCAN_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.fixture +def platforms() -> Platform | list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.BUTTON] + + +MOCK_ROUTER = Info(MAC="AA:BB:CC:DD:EE:FF", zb_type=1) + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ("core_restart", "reboot"), + ("zigbee_flash_mode", "zb_bootloader"), + ("zigbee_restart", "zb_restart"), + ("reconnect_zigbee_router", "zb_router"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_buttons( + hass: HomeAssistant, + entity_id: str, + entity_registry: er.EntityRegistry, + method: str, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test creation of button entities.""" + mock_smlight_client.get_info.return_value = MOCK_ROUTER + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(f"button.mock_title_{entity_id}") + assert state is not None + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get(f"button.mock_title_{entity_id}") + assert entry is not None + assert entry.unique_id == f"aa:bb:cc:dd:ee:ff-{entity_id}" + + mock_method = getattr(mock_smlight_client.cmds, method) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.mock_title_{entity_id}"}, + blocking=True, + ) + + assert len(mock_method.mock_calls) == 1 + mock_method.assert_called_with() + + +@pytest.mark.parametrize("entity_id", ["zigbee_flash_mode", "reconnect_zigbee_router"]) +async def test_disabled_by_default_buttons( + hass: HomeAssistant, + entity_id: str, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test the disabled by default buttons.""" + mock_smlight_client.get_info.return_value = MOCK_ROUTER + await setup_integration(hass, mock_config_entry) + + assert not hass.states.get(f"button.mock_{entity_id}") + + assert (entry := entity_registry.async_get(f"button.mock_title_{entity_id}")) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +async def test_remove_router_reconnect( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test removal of orphaned router reconnect button.""" + save_mock = mock_smlight_client.get_info.return_value + mock_smlight_client.get_info.return_value = MOCK_ROUTER + mock_config_entry = await setup_integration(hass, mock_config_entry) + + entities = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert len(entities) == 4 + assert entities[3].unique_id == "aa:bb:cc:dd:ee:ff-reconnect_zigbee_router" + + mock_smlight_client.get_info.return_value = save_mock + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + + await hass.async_block_till_done() + + entity = entity_registry.async_get("button.mock_title_reconnect_zigbee_router") + assert entity is None diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index 9a23a8de753..2fd39f75704 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -91,7 +91,7 @@ async def test_zeroconf_flow( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["context"]["source"] == "zeroconf" assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result2["title"] == "SLZB-06p7" + assert result2["title"] == "slzb-06" assert result2["data"] == { CONF_HOST: MOCK_HOST, } @@ -143,7 +143,7 @@ async def test_zeroconf_flow_auth( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["context"]["source"] == "zeroconf" assert result3["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result3["title"] == "SLZB-06p7" + assert result3["title"] == "slzb-06" assert result3["data"] == { CONF_USERNAME: MOCK_USERNAME, CONF_PASSWORD: MOCK_PASSWORD, @@ -336,6 +336,22 @@ async def test_zeroconf_cannot_connect( assert result2["reason"] == "cannot_connect" +async def test_zeroconf_legacy_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock +) -> None: + """Test we abort flow on zeroconf discovery unsupported firmware.""" + mock_smlight_client.get_info.side_effect = SmlightConnectionError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO_LEGACY, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + @pytest.mark.usefixtures("mock_smlight_client") async def test_zeroconf_legacy_mac( hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock @@ -356,10 +372,123 @@ async def test_zeroconf_legacy_mac( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["context"]["source"] == "zeroconf" assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result2["title"] == "SLZB-06p7" + assert result2["title"] == "slzb-06" assert result2["data"] == { CONF_HOST: MOCK_HOST, } assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smlight_client.get_info.mock_calls) == 2 + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth flow completes successfully.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert mock_config_entry.data == { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + CONF_HOST: MOCK_HOST, + } + + assert len(mock_smlight_client.authenticate.mock_calls) == 1 + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +async def test_reauth_auth_error( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth flow with authentication error.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: "test-bad", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "reauth_confirm" + + mock_smlight_client.authenticate.side_effect = None + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" + + assert mock_config_entry.data == { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + CONF_HOST: MOCK_HOST, + } + + assert len(mock_smlight_client.authenticate.mock_calls) == 2 + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +async def test_reauth_connect_error( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow with error.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightConnectionError + + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "cannot_connect" + assert len(mock_smlight_client.authenticate.mock_calls) == 1 diff --git a/tests/components/smlight/test_diagnostics.py b/tests/components/smlight/test_diagnostics.py new file mode 100644 index 00000000000..d0c756bfd87 --- /dev/null +++ b/tests/components/smlight/test_diagnostics.py @@ -0,0 +1,30 @@ +"""Test SMLIGHT diagnostics.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.smlight.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, load_fixture +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + mock_smlight_client.get.return_value = load_fixture("logs.txt", DOMAIN) + entry = await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert result == snapshot diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py index 682993cb943..afc53932fb0 100644 --- a/tests/components/smlight/test_init.py +++ b/tests/components/smlight/test_init.py @@ -3,15 +3,17 @@ from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +from pysmlight import Info +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError, SmlightError import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.smlight.const import SCAN_INTERVAL +from homeassistant.components.smlight.const import DOMAIN, SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.issue_registry import IssueRegistry from .conftest import setup_integration @@ -55,19 +57,37 @@ async def test_async_setup_auth_failed( assert entry.state is ConfigEntryState.NOT_LOADED +async def test_async_setup_missing_credentials( + hass: HomeAssistant, + mock_config_entry_host: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test we trigger reauth when credentials are missing.""" + mock_smlight_client.check_auth_needed.return_value = True + + await setup_integration(hass, mock_config_entry_host) + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0]["step_id"] == "reauth_confirm" + assert progress[0]["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize("error", [SmlightConnectionError, SmlightAuthError]) async def test_update_failed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_smlight_client: MagicMock, freezer: FrozenDateTimeFactory, + error: SmlightError, ) -> None: - """Test update failed due to connection error.""" + """Test update failed due to error.""" await setup_integration(hass, mock_config_entry) entity = hass.states.get("sensor.mock_title_core_chip_temp") assert entity.state is not STATE_UNAVAILABLE - mock_smlight_client.get_info.side_effect = SmlightConnectionError + mock_smlight_client.get_info.side_effect = error freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -92,3 +112,33 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_device_legacy_firmware( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + device_registry: dr.DeviceRegistry, + issue_registry: IssueRegistry, +) -> None: + """Test device setup for old firmware version that dont support required API.""" + LEGACY_VERSION = "v0.9.9" + mock_smlight_client.get_sensors.side_effect = SmlightError + mock_smlight_client.get_info.return_value = Info( + legacy_api=2, sw_version=LEGACY_VERSION, MAC="AA:BB:CC:DD:EE:FF" + ) + entry = await setup_integration(hass, mock_config_entry) + + assert entry.unique_id == "aa:bb:cc:dd:ee:ff" + + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} + ) + assert LEGACY_VERSION in device_entry.sw_version + + issue = issue_registry.async_get_issue( + domain=DOMAIN, issue_id="unsupported_firmware" + ) + assert issue is not None + assert issue.domain == DOMAIN + assert issue.issue_id == "unsupported_firmware" diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py index 4d16a73a0a7..f130d7ccf30 100644 --- a/tests/components/smlight/test_sensor.py +++ b/tests/components/smlight/test_sensor.py @@ -1,9 +1,12 @@ """Tests for the SMLIGHT sensor platform.""" +from unittest.mock import MagicMock + +from pysmlight import Sensors import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -19,12 +22,13 @@ pytestmark = [ @pytest.fixture -def platforms() -> Platform | list[Platform]: +def platforms() -> list[Platform]: """Platforms, which should be loaded during the test.""" - return Platform.SENSOR + return [Platform.SENSOR] @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2024-07-01 00:00:00+00:00") async def test_sensors( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -46,9 +50,26 @@ async def test_disabled_by_default_sensors( """Test the disabled by default SMLIGHT sensors.""" await setup_integration(hass, mock_config_entry) - for sensor in ("ram_usage", "filesystem_usage"): + for sensor in ("core_uptime", "filesystem_usage", "ram_usage", "zigbee_uptime"): assert not hass.states.get(f"sensor.mock_title_{sensor}") assert (entry := entity_registry.async_get(f"sensor.mock_title_{sensor}")) assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_zigbee_uptime_disconnected( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for uptime when zigbee socket is disconnected. + + In this case zigbee uptime state should be unknown. + """ + mock_smlight_client.get_sensors.return_value = Sensors(socket_uptime=0) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_uptime") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/smlight/test_switch.py b/tests/components/smlight/test_switch.py new file mode 100644 index 00000000000..da02814a1c5 --- /dev/null +++ b/tests/components/smlight/test_switch.py @@ -0,0 +1,132 @@ +"""Tests for the SMLIGHT switch platform.""" + +from collections.abc import Callable +from unittest.mock import MagicMock + +from pysmlight import SettingsEvent +from pysmlight.const import Settings +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.SWITCH] + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup of SMLIGHT switches.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_disabled_by_default_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test vpn enabled switch is disabled by default .""" + await setup_integration(hass, mock_config_entry) + for entity in ("vpn_enabled", "auto_zigbee_update"): + assert not hass.states.get(f"switch.mock_title_{entity}") + + assert (entry := entity_registry.async_get(f"switch.mock_title_{entity}")) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("entity", "setting"), + [ + ("disable_leds", Settings.DISABLE_LEDS), + ("led_night_mode", Settings.NIGHT_MODE), + ("auto_zigbee_update", Settings.ZB_AUTOUPDATE), + ("vpn_enabled", Settings.ENABLE_VPN), + ], +) +async def test_switches( + hass: HomeAssistant, + entity: str, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + setting: Settings, +) -> None: + """Test the SMLIGHT switches.""" + await setup_integration(hass, mock_config_entry) + + _page, _toggle = setting.value + + entity_id = f"switch.mock_title_{entity}" + state = hass.states.get(entity_id) + assert state is not None + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mock_smlight_client.set_toggle.mock_calls) == 1 + mock_smlight_client.set_toggle.assert_called_once_with(_page, _toggle, True) + + event_function: Callable[[SettingsEvent], None] = next( + ( + call_args[0][1] + for call_args in mock_smlight_client.sse.register_settings_cb.call_args_list + if setting == call_args[0][0] + ), + None, + ) + + async def _call_event_function(state: bool = True): + event_function(SettingsEvent(page=_page, origin="ha", setting={_toggle: state})) + await hass.async_block_till_done() + + await _call_event_function(state=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mock_smlight_client.set_toggle.mock_calls) == 2 + mock_smlight_client.set_toggle.assert_called_with(_page, _toggle, False) + + await _call_event_function(state=False) + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py new file mode 100644 index 00000000000..0bb2e34d7ca --- /dev/null +++ b/tests/components/smlight/test_update.py @@ -0,0 +1,313 @@ +"""Tests for the SMLIGHT update platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pysmlight import Firmware, Info +from pysmlight.const import Events as SmEvents +from pysmlight.sse import MessageEvent +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.smlight.const import SCAN_FIRMWARE_INTERVAL +from homeassistant.components.update import ( + ATTR_IN_PROGRESS, + ATTR_INSTALLED_VERSION, + ATTR_LATEST_VERSION, + ATTR_UPDATE_PERCENTAGE, + DOMAIN as PLATFORM, + SERVICE_INSTALL, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import get_mock_event_function +from .conftest import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.typing import WebSocketGenerator + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + +MOCK_FIRMWARE_DONE = MessageEvent( + type="FW_UPD_done", + message="FW_UPD_done", + data="", + origin="http://slzb-06p10.local", + last_event_id="", +) + +MOCK_FIRMWARE_PROGRESS = MessageEvent( + type="ZB_FW_prgs", + message="ZB_FW_prgs", + data="50", + origin="http://slzb-06p10.local", + last_event_id="", +) + +MOCK_FIRMWARE_FAIL = MessageEvent( + type="ZB_FW_err", + message="ZB_FW_err", + data="", + origin="http://slzb-06p10.local", + last_event_id="", +) + +MOCK_FIRMWARE_NOTES = [ + Firmware( + ver="v2.3.6", + mode="ESP", + notes=None, + ) +] + + +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.UPDATE] + + +async def test_update_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup of SMLIGHT switches.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + await hass.config_entries.async_unload(entry.entry_id) + + +@patch("homeassistant.components.smlight.update.asyncio.sleep", return_value=None) +async def test_update_firmware( + mock_sleep: MagicMock, + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test firmware updates.""" + await setup_integration(hass, mock_config_entry) + entity_id = "update.mock_title_core_firmware" + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + await hass.services.async_call( + PLATFORM, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=False, + ) + + assert len(mock_smlight_client.fw_update.mock_calls) == 1 + + event_function = get_mock_event_function(mock_smlight_client, SmEvents.ZB_FW_prgs) + + event_function(MOCK_FIRMWARE_PROGRESS) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 + + event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done) + + event_function(MOCK_FIRMWARE_DONE) + + mock_smlight_client.get_info.return_value = Info( + sw_version="v2.5.2", + ) + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.5.2" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + +async def test_update_legacy_firmware_v2( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test firmware update for legacy v2 firmware.""" + mock_smlight_client.get_info.return_value = Info( + sw_version="v2.0.18", + legacy_api=1, + MAC="AA:BB:CC:DD:EE:FF", + ) + await setup_integration(hass, mock_config_entry) + entity_id = "update.mock_title_core_firmware" + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.0.18" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + await hass.services.async_call( + PLATFORM, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=False, + ) + + assert len(mock_smlight_client.fw_update.mock_calls) == 1 + + event_function = get_mock_event_function(mock_smlight_client, SmEvents.ESP_UPD_done) + + event_function(MOCK_FIRMWARE_DONE) + + mock_smlight_client.get_info.return_value = Info( + sw_version="v2.5.2", + ) + + freezer.tick(SCAN_FIRMWARE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.5.2" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + +async def test_update_firmware_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test firmware updates.""" + await setup_integration(hass, mock_config_entry) + entity_id = "update.mock_title_core_firmware" + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + await hass.services.async_call( + PLATFORM, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=False, + ) + + assert len(mock_smlight_client.fw_update.mock_calls) == 1 + + event_function = get_mock_event_function(mock_smlight_client, SmEvents.ZB_FW_err) + + async def _call_event_function(event: MessageEvent): + event_function(event) + + with pytest.raises(HomeAssistantError): + await _call_event_function(MOCK_FIRMWARE_FAIL) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None + + +@patch("homeassistant.components.smlight.const.LOGGER.warning") +async def test_update_reboot_timeout( + mock_warning: MagicMock, + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test firmware updates.""" + await setup_integration(hass, mock_config_entry) + entity_id = "update.mock_title_core_firmware" + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + with ( + patch( + "homeassistant.components.smlight.update.asyncio.timeout", + side_effect=TimeoutError, + ), + patch( + "homeassistant.components.smlight.update.asyncio.sleep", + return_value=None, + ), + ): + await hass.services.async_call( + PLATFORM, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=False, + ) + + assert len(mock_smlight_client.fw_update.mock_calls) == 1 + + event_function = get_mock_event_function( + mock_smlight_client, SmEvents.FW_UPD_done + ) + + event_function(MOCK_FIRMWARE_DONE) + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_warning.assert_called_once() + + +async def test_update_release_notes( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test firmware release notes.""" + await setup_integration(hass, mock_config_entry) + ws_client = await hass_ws_client(hass) + await hass.async_block_till_done() + entity_id = "update.mock_title_core_firmware" + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + await ws_client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + result = await ws_client.receive_json() + assert result["result"] is not None + + mock_smlight_client.get_firmware_version.side_effect = None + mock_smlight_client.get_firmware_version.return_value = MOCK_FIRMWARE_NOTES + + freezer.tick(SCAN_FIRMWARE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + await ws_client.send_json( + { + "id": 2, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + result = await ws_client.receive_json() + await hass.async_block_till_done() + assert result["result"] is None diff --git a/tests/components/snooz/__init__.py b/tests/components/snooz/__init__.py index c314fde5c90..f27ef91fe5a 100644 --- a/tests/components/snooz/__init__.py +++ b/tests/components/snooz/__init__.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from unittest.mock import patch from pysnooz.commands import SnoozCommandData -from pysnooz.device import DisconnectionReason +from pysnooz.device import DisconnectionReason, SnoozConnectionStatus from pysnooz.testing import MockSnoozDevice as ParentMockSnoozDevice from homeassistant.components.snooz.const import DOMAIN @@ -70,13 +70,31 @@ class SnoozFixture: class MockSnoozDevice(ParentMockSnoozDevice): """Used for testing integration with Bleak. - Adjusted for https://github.com/AustinBrunkhorst/pysnooz/issues/6 + Adjusted for https://github.com/AustinBrunkhorst/pysnooz/pull/19 """ - def _on_device_disconnected(self, e) -> None: - if self._is_manually_disconnecting: - e.kwargs.set("reason", DisconnectionReason.USER) - return super()._on_device_disconnected(e) + async def async_disconnect(self) -> None: + """Disconnect from the device.""" + self._is_manually_disconnecting = True + try: + self._cancel_current_command() + if ( + self._reconnection_task is not None + and not self._reconnection_task.done() + ): + self._reconnection_task.cancel() + + if self._connection_task is not None and not self._connection_task.done(): + self._connection_task.cancel() + + if self._api is not None: + await self._api.async_disconnect() + + if self.connection_status != SnoozConnectionStatus.DISCONNECTED: + self._machine.device_disconnected(reason=DisconnectionReason.USER) + + finally: + self._is_manually_disconnecting = False async def create_mock_snooz( diff --git a/tests/components/snooz/test_fan.py b/tests/components/snooz/test_fan.py index ddc93a4ba1f..127895d7de7 100644 --- a/tests/components/snooz/test_fan.py +++ b/tests/components/snooz/test_fan.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import timedelta -from unittest.mock import Mock +from unittest.mock import Mock, patch from pysnooz.api import SnoozDeviceState, UnknownSnoozState from pysnooz.commands import SnoozCommandResult, SnoozCommandResultStatus @@ -32,6 +32,8 @@ from homeassistant.helpers import entity_registry as er from . import SnoozFixture, create_mock_snooz, create_mock_snooz_config_entry +from tests.components.bluetooth import generate_ble_device + async def test_turn_on(hass: HomeAssistant, snooz_fan_entity_id: str) -> None: """Test turning on the device.""" @@ -149,8 +151,6 @@ async def test_transition_off(hass: HomeAssistant, snooz_fan_entity_id: str) -> assert ATTR_ASSUMED_STATE not in state.attributes -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_push_events( hass: HomeAssistant, mock_connected_snooz: SnoozFixture, snooz_fan_entity_id: str ) -> None: @@ -174,9 +174,10 @@ async def test_push_events( state = hass.states.get(snooz_fan_entity_id) assert state.attributes[ATTR_ASSUMED_STATE] is True + # Don't attempt to reconnect + await mock_connected_snooz.device.async_disconnect() + -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_restore_state( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: @@ -201,7 +202,14 @@ async def test_restore_state( assert state.state == STATE_UNAVAILABLE # reload entry - await create_mock_snooz_config_entry(hass, device) + with ( + patch("homeassistant.components.snooz.SnoozDevice", return_value=device), + patch( + "homeassistant.components.snooz.async_ble_device_from_address", + return_value=generate_ble_device(device.address, device.name), + ), + ): + await hass.config_entries.async_setup(entry.entry_id) # should match last known state state = hass.states.get(entity_id) @@ -226,7 +234,14 @@ async def test_restore_unknown_state( assert state.state == STATE_UNAVAILABLE # reload entry - await create_mock_snooz_config_entry(hass, device) + with ( + patch("homeassistant.components.snooz.SnoozDevice", return_value=device), + patch( + "homeassistant.components.snooz.async_ble_device_from_address", + return_value=generate_ble_device(device.address, device.name), + ), + ): + await hass.config_entries.async_setup(entry.entry_id) # should match last known state state = hass.states.get(entity_id) diff --git a/tests/components/snooz/test_init.py b/tests/components/snooz/test_init.py index b1ab06fcc8e..edcd7913792 100644 --- a/tests/components/snooz/test_init.py +++ b/tests/components/snooz/test_init.py @@ -2,15 +2,11 @@ from __future__ import annotations -import pytest - from homeassistant.core import HomeAssistant from . import SnoozFixture -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_removing_entry_cleans_up_connections( hass: HomeAssistant, mock_connected_snooz: SnoozFixture ) -> None: @@ -21,8 +17,6 @@ async def test_removing_entry_cleans_up_connections( assert not mock_connected_snooz.device.is_connected -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_reloading_entry_cleans_up_connections( hass: HomeAssistant, mock_connected_snooz: SnoozFixture ) -> None: diff --git a/tests/components/solarlog/__init__.py b/tests/components/solarlog/__init__.py index 74b19bd297e..c2c0296d9e2 100644 --- a/tests/components/solarlog/__init__.py +++ b/tests/components/solarlog/__init__.py @@ -17,3 +17,5 @@ async def setup_platform( with patch("homeassistant.components.solarlog.PLATFORMS", platforms): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index c34d0c011a3..caa3621b9bb 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -1,17 +1,34 @@ """Test helpers.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest +from solarlog_cli.solarlog_models import InverterData, SolarlogData -from homeassistant.components.solarlog.const import DOMAIN as SOLARLOG_DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.components.solarlog.const import ( + CONF_HAS_PWD, + DOMAIN as SOLARLOG_DOMAIN, +) +from homeassistant.const import CONF_HOST, CONF_PASSWORD -from .const import HOST, NAME +from .const import HOST from tests.common import MockConfigEntry, load_json_object_fixture +DEVICE_LIST = { + 0: InverterData(name="Inverter 1", enabled=True), + 1: InverterData(name="Inverter 2", enabled=True), +} +INVERTER_DATA = { + 0: InverterData( + name="Inverter 1", enabled=True, consumption_year=354687, current_power=5 + ), + 1: InverterData( + name="Inverter 2", enabled=True, consumption_year=354, current_power=6 + ), +} + @pytest.fixture def mock_config_entry() -> MockConfigEntry: @@ -21,10 +38,10 @@ def mock_config_entry() -> MockConfigEntry: title="solarlog", data={ CONF_HOST: HOST, - CONF_NAME: NAME, - "extended_data": True, + CONF_HAS_PWD: True, + CONF_PASSWORD: "pwd", }, - minor_version=2, + minor_version=3, entry_id="ce5f5431554d101905d31797e1232da8", ) @@ -33,11 +50,23 @@ def mock_config_entry() -> MockConfigEntry: def mock_solarlog_connector(): """Build a fixture for the SolarLog API that connects successfully and returns one device.""" - mock_solarlog_api = AsyncMock() - mock_solarlog_api.test_connection = AsyncMock(return_value=True) - mock_solarlog_api.update_data.return_value = load_json_object_fixture( - "solarlog_data.json", SOLARLOG_DOMAIN + data = SolarlogData.from_dict( + load_json_object_fixture("solarlog_data.json", SOLARLOG_DOMAIN) ) + data.inverter_data = INVERTER_DATA + + mock_solarlog_api = AsyncMock() + mock_solarlog_api.set_enabled_devices = MagicMock() + mock_solarlog_api.test_connection.return_value = True + mock_solarlog_api.test_extended_data_available.return_value = True + mock_solarlog_api.extended_data.return_value = True + mock_solarlog_api.update_data.return_value = data + mock_solarlog_api.update_device_list.return_value = DEVICE_LIST + mock_solarlog_api.update_inverter_data.return_value = INVERTER_DATA + mock_solarlog_api.device_name = {0: "Inverter 1", 1: "Inverter 2"}.get + mock_solarlog_api.device_enabled = {0: True, 1: True}.get + mock_solarlog_api.password.return_value = "pwd" + with ( patch( "homeassistant.components.solarlog.coordinator.SolarLogConnector", diff --git a/tests/components/solarlog/const.py b/tests/components/solarlog/const.py index e23633c80ae..1294a376b01 100644 --- a/tests/components/solarlog/const.py +++ b/tests/components/solarlog/const.py @@ -1,4 +1,3 @@ """Common const used across tests for SolarLog.""" -NAME = "Solarlog test 1 2 3" HOST = "http://1.1.1.1" diff --git a/tests/components/solarlog/fixtures/solarlog_data.json b/tests/components/solarlog/fixtures/solarlog_data.json index 4976f4fa8b7..339ab4a4dfc 100644 --- a/tests/components/solarlog/fixtures/solarlog_data.json +++ b/tests/components/solarlog/fixtures/solarlog_data.json @@ -17,8 +17,9 @@ "total_power": 120, "self_consumption_year": 545, "alternator_loss": 2, - "efficiency": 0.9804, - "usage": 0.5487, + "efficiency": 98.1, + "usage": 54.8, "power_available": 45.13, - "capacity": 0.85 + "capacity": 85.5, + "last_updated": "2024-08-01T15:20:45Z" } diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..6aef72ebbd5 --- /dev/null +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -0,0 +1,68 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'has_password': True, + 'host': '**REDACTED**', + 'password': 'pwd', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'solarlog', + 'entry_id': 'ce5f5431554d101905d31797e1232da8', + 'minor_version': 3, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'solarlog', + 'unique_id': None, + 'version': 1, + }), + 'solarlog_data': dict({ + 'alternator_loss': 2.0, + 'capacity': 85.5, + 'consumption_ac': 54.87, + 'consumption_day': 5.31, + 'consumption_month': 758.0, + 'consumption_total': 354687.0, + 'consumption_year': 4587.0, + 'consumption_yesterday': 7.34, + 'efficiency': 98.1, + 'inverter_data': dict({ + '0': dict({ + 'consumption_year': 354687, + 'current_power': 5, + 'enabled': True, + 'name': 'Inverter 1', + }), + '1': dict({ + 'consumption_year': 354, + 'current_power': 6, + 'enabled': True, + 'name': 'Inverter 2', + }), + }), + 'last_updated': '2024-08-01T15:20:45+00:00', + 'power_ac': 100.0, + 'power_available': 45.13, + 'power_dc': 102.0, + 'production_year': None, + 'self_consumption_year': 545.0, + 'total_power': 120.0, + 'usage': 54.8, + 'voltage_ac': 100.0, + 'voltage_dc': 100.0, + 'yield_day': 4.21, + 'yield_month': 515.0, + 'yield_total': 56513.0, + 'yield_year': 1023.0, + 'yield_yesterday': 5.21, + }), + }) +# --- diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index df154a5eb9b..06bc01f9d39 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -1,4 +1,220 @@ # serializer version: 1 +# name: test_all_entities[sensor.inverter_1_consumption_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_consumption_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption year', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_1_consumption_year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_1_consumption_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter 1 Consumption year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1_consumption_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '354.687', + }) +# --- +# name: test_all_entities[sensor.inverter_1_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_1_current_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_1_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_all_entities[sensor.inverter_2_consumption_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_2_consumption_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption year', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_2_consumption_year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_2_consumption_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter 2 Consumption year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_2_consumption_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.354', + }) +# --- +# name: test_all_entities[sensor.inverter_2_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_2_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_2_current_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_2_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 2 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_2_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- # name: test_all_entities[sensor.solarlog_alternator_loss-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -38,7 +254,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Alternator loss', + 'friendly_name': 'SolarLog Alternator loss', 'state_class': , 'unit_of_measurement': , }), @@ -47,7 +263,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2', + 'state': '2.0', }) # --- # name: test_all_entities[sensor.solarlog_capacity-entry] @@ -73,6 +289,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -89,7 +308,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'solarlog Capacity', + 'friendly_name': 'SolarLog Capacity', 'state_class': , 'unit_of_measurement': '%', }), @@ -98,7 +317,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '85.0', + 'state': '85.5', }) # --- # name: test_all_entities[sensor.solarlog_consumption_ac-entry] @@ -140,7 +359,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Consumption AC', + 'friendly_name': 'SolarLog Consumption AC', 'state_class': , 'unit_of_measurement': , }), @@ -157,7 +376,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -173,6 +394,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -189,7 +416,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption day', + 'friendly_name': 'SolarLog Consumption day', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -197,7 +425,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.005', + 'state': '0.00531', }) # --- # name: test_all_entities[sensor.solarlog_consumption_month-entry] @@ -205,7 +433,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -221,6 +451,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -237,7 +473,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption month', + 'friendly_name': 'SolarLog Consumption month', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -271,6 +508,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -287,7 +530,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption total', + 'friendly_name': 'SolarLog Consumption total', 'state_class': , 'unit_of_measurement': , }), @@ -304,7 +547,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -320,6 +565,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -336,7 +587,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption year', + 'friendly_name': 'SolarLog Consumption year', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -368,6 +620,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -384,7 +642,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Consumption yesterday', + 'friendly_name': 'SolarLog Consumption yesterday', 'unit_of_measurement': , }), 'context': , @@ -392,7 +650,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.007', + 'state': '0.00734', }) # --- # name: test_all_entities[sensor.solarlog_efficiency-entry] @@ -418,6 +676,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -434,7 +695,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'solarlog Efficiency', + 'friendly_name': 'SolarLog Efficiency', 'state_class': , 'unit_of_measurement': '%', }), @@ -443,7 +704,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '98.0', + 'state': '98.1', }) # --- # name: test_all_entities[sensor.solarlog_installed_peak_power-entry] @@ -451,7 +712,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -483,7 +746,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Installed peak power', + 'friendly_name': 'SolarLog Installed peak power', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -491,7 +755,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '120', + 'state': '120.0', }) # --- # name: test_all_entities[sensor.solarlog_last_update-entry] @@ -531,14 +795,14 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', - 'friendly_name': 'solarlog Last update', + 'friendly_name': 'SolarLog Last update', }), 'context': , 'entity_id': 'sensor.solarlog_last_update', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2024-08-01T15:20:45+00:00', }) # --- # name: test_all_entities[sensor.solarlog_power_ac-entry] @@ -580,7 +844,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Power AC', + 'friendly_name': 'SolarLog Power AC', 'state_class': , 'unit_of_measurement': , }), @@ -589,7 +853,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '100.0', }) # --- # name: test_all_entities[sensor.solarlog_power_available-entry] @@ -631,7 +895,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Power available', + 'friendly_name': 'SolarLog Power available', 'state_class': , 'unit_of_measurement': , }), @@ -682,7 +946,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'solarlog Power DC', + 'friendly_name': 'SolarLog Power DC', 'state_class': , 'unit_of_measurement': , }), @@ -691,7 +955,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '102', + 'state': '102.0', }) # --- # name: test_all_entities[sensor.solarlog_self_consumption_year-entry] @@ -733,7 +997,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Self-consumption year', + 'friendly_name': 'SolarLog Self-consumption year', 'state_class': , 'unit_of_measurement': , }), @@ -742,7 +1006,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '545', + 'state': '545.0', }) # --- # name: test_all_entities[sensor.solarlog_usage-entry] @@ -768,6 +1032,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -784,7 +1051,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'solarlog Usage', + 'friendly_name': 'SolarLog Usage', 'state_class': , 'unit_of_measurement': '%', }), @@ -793,7 +1060,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '54.9', + 'state': '54.8', }) # --- # name: test_all_entities[sensor.solarlog_voltage_ac-entry] @@ -835,7 +1102,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', - 'friendly_name': 'solarlog Voltage AC', + 'friendly_name': 'SolarLog Voltage AC', 'state_class': , 'unit_of_measurement': , }), @@ -844,7 +1111,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '100.0', }) # --- # name: test_all_entities[sensor.solarlog_voltage_dc-entry] @@ -886,7 +1153,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', - 'friendly_name': 'solarlog Voltage DC', + 'friendly_name': 'SolarLog Voltage DC', 'state_class': , 'unit_of_measurement': , }), @@ -895,7 +1162,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '100.0', }) # --- # name: test_all_entities[sensor.solarlog_yield_day-entry] @@ -903,7 +1170,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -919,6 +1188,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -935,7 +1210,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield day', + 'friendly_name': 'SolarLog Yield day', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -943,7 +1219,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.004', + 'state': '0.00421', }) # --- # name: test_all_entities[sensor.solarlog_yield_month-entry] @@ -951,7 +1227,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -967,6 +1245,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -983,7 +1267,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield month', + 'friendly_name': 'SolarLog Yield month', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1017,6 +1302,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1033,7 +1324,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield total', + 'friendly_name': 'SolarLog Yield total', 'state_class': , 'unit_of_measurement': , }), @@ -1050,7 +1341,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1066,6 +1359,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1082,7 +1378,8 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield year', + 'friendly_name': 'SolarLog Yield year', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1090,7 +1387,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1.023', + 'state': '1.0230', }) # --- # name: test_all_entities[sensor.solarlog_yield_yesterday-entry] @@ -1114,6 +1411,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1130,7 +1433,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'solarlog Yield yesterday', + 'friendly_name': 'SolarLog Yield yesterday', 'unit_of_measurement': , }), 'context': , @@ -1138,6 +1441,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.005', + 'state': '0.00521', }) # --- diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index f71282a7c9b..58a5faa0772 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -1,57 +1,47 @@ """Test the solarlog config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock import pytest -from solarlog_cli.solarlog_exceptions import SolarLogConnectionError, SolarLogError +from solarlog_cli.solarlog_exceptions import ( + SolarLogAuthenticationError, + SolarLogConnectionError, + SolarLogError, +) -from homeassistant import config_entries -from homeassistant.components.solarlog import config_flow -from homeassistant.components.solarlog.const import DEFAULT_HOST, DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.components.solarlog.const import CONF_HAS_PWD, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import HOST, NAME +from .const import HOST from tests.common import MockConfigEntry +@pytest.mark.usefixtures("test_connect") async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with ( - patch( - "homeassistant.components.solarlog.config_flow.SolarLogConfigFlow._test_connection", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST, CONF_NAME: NAME, "extended_data": False}, - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_HAS_PWD: False}, + ) assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "solarlog_test_1_2_3" + assert result2["title"] == HOST assert result2["data"][CONF_HOST] == "http://1.1.1.1" - assert result2["data"]["extended_data"] is False + assert result2["data"][CONF_HAS_PWD] is False assert len(mock_setup_entry.mock_calls) == 1 -def init_config_flow(hass: HomeAssistant) -> config_flow.SolarLogConfigFlow: - """Init a configuration flow.""" - flow = config_flow.SolarLogConfigFlow() - flow.hass = hass - return flow - - @pytest.mark.usefixtures("test_connect") async def test_user( hass: HomeAssistant, @@ -60,165 +50,211 @@ async def test_user( ) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} # tests with all provided result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: HOST, CONF_NAME: NAME, "extended_data": False} + result["flow_id"], {CONF_HOST: HOST, CONF_HAS_PWD: False} ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" + assert result["title"] == HOST assert result["data"][CONF_HOST] == HOST assert len(mock_setup_entry.mock_calls) == 1 @pytest.mark.parametrize( - ("exception", "error"), + ("exception1", "error1", "exception2", "error2"), [ - (SolarLogConnectionError, {CONF_HOST: "cannot_connect"}), - (SolarLogError, {CONF_HOST: "unknown"}), + ( + SolarLogConnectionError, + {CONF_HOST: "cannot_connect"}, + SolarLogAuthenticationError, + {CONF_HOST: "password_error"}, + ), + (SolarLogError, {CONF_HOST: "unknown"}, SolarLogError, {CONF_HOST: "unknown"}), ], ) async def test_form_exceptions( hass: HomeAssistant, - exception: Exception, - error: dict[str, str], + exception1: Exception, + error1: dict[str, str], + exception2: Exception, + error2: dict[str, str], mock_solarlog_connector: AsyncMock, ) -> None: """Test we can handle Form exceptions.""" - flow = init_config_flow(hass) - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" + assert result["errors"] == {} - mock_solarlog_connector.test_connection.side_effect = exception + mock_solarlog_connector.test_connection.side_effect = exception1 # tests with connection error - result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, "extended_data": False} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: HOST, CONF_HAS_PWD: False} ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert result["errors"] == error + assert result["errors"] == error1 + # tests with password error mock_solarlog_connector.test_connection.side_effect = None + mock_solarlog_connector.test_extended_data_available.side_effect = exception2 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: HOST, CONF_HAS_PWD: True} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "password" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_PASSWORD: "pwd"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "password" + assert result["errors"] == error2 + + mock_solarlog_connector.test_extended_data_available.side_effect = None # tests with all provided - result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, "extended_data": False} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_PASSWORD: "pwd"} ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == HOST - assert result["data"]["extended_data"] is False + assert result["title"] == HOST + assert result["data"][CONF_PASSWORD] == "pwd" -async def test_import(hass: HomeAssistant, test_connect) -> None: - """Test import step.""" - flow = init_config_flow(hass) - - # import with only host - result = await flow.async_step_import({CONF_HOST: HOST}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog" - assert result["data"][CONF_HOST] == HOST - - # import with only name - result = await flow.async_step_import({CONF_NAME: NAME}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == DEFAULT_HOST - - # import with host and name - result = await flow.async_step_import({CONF_HOST: HOST, CONF_NAME: NAME}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == HOST - - -async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None: +async def test_abort_if_already_setup(hass: HomeAssistant, test_connect: None) -> None: """Test we abort if the device is already setup.""" - flow = init_config_flow(hass) - MockConfigEntry( - domain="solarlog", data={CONF_NAME: NAME, CONF_HOST: HOST} - ).add_to_hass(hass) - # Should fail, same HOST different NAME (default) - result = await flow.async_step_import( - {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} + MockConfigEntry(domain=DOMAIN, data={CONF_HOST: HOST}).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_HAS_PWD: False}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - # Should fail, same HOST and NAME - result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME}) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {CONF_HOST: "already_configured"} - - # SHOULD pass, diff HOST (without http://), different NAME - result = await flow.async_step_import( - {CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_7_8_9" - assert result["data"][CONF_HOST] == "http://2.2.2.2" - - # SHOULD pass, diff HOST, same NAME - result = await flow.async_step_import( - {CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME, "extended_data": False} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == "http://2.2.2.2" - +@pytest.mark.parametrize( + ("has_password", "password"), + [ + (True, "pwd"), + (False, ""), + ], +) async def test_reconfigure_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_solarlog_connector: AsyncMock, + has_password: bool, + password: str, ) -> None: """Test config flow options.""" entry = MockConfigEntry( domain=DOMAIN, - title="solarlog_test_1_2_3", + title=HOST, data={ CONF_HOST: HOST, - "extended_data": False, + CONF_HAS_PWD: False, }, + minor_version=3, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" + # test with all data provided result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"extended_data": True} + result["flow_id"], {CONF_HAS_PWD: True, CONF_PASSWORD: password} ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" assert len(mock_setup_entry.mock_calls) == 1 + + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry + assert entry.title == HOST + assert entry.data[CONF_HAS_PWD] == has_password + assert entry.data[CONF_PASSWORD] == password + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (SolarLogAuthenticationError, {CONF_HOST: "password_error"}), + (SolarLogError, {CONF_HOST: "unknown"}), + ], +) +async def test_reauth( + hass: HomeAssistant, + exception: Exception, + error: dict[str, str], + mock_solarlog_connector: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth-flow works.""" + + entry = MockConfigEntry( + domain=DOMAIN, + title=HOST, + data={ + CONF_HOST: HOST, + CONF_HAS_PWD: True, + CONF_PASSWORD: "pwd", + }, + minor_version=3, + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_solarlog_connector.test_extended_data_available.side_effect = exception + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "other_pwd"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == error + + mock_solarlog_connector.test_extended_data_available.side_effect = None + + # tests with all information provided + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "other_pwd"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert entry.data[CONF_PASSWORD] == "other_pwd" diff --git a/tests/components/solarlog/test_diagnostics.py b/tests/components/solarlog/test_diagnostics.py new file mode 100644 index 00000000000..bc0b020462d --- /dev/null +++ b/tests/components/solarlog/test_diagnostics.py @@ -0,0 +1,32 @@ +"""Test Solarlog diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_platform + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_solarlog_connector: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/solarlog/test_init.py b/tests/components/solarlog/test_init.py index 0044d09f20e..a9a595f8962 100644 --- a/tests/components/solarlog/test_init.py +++ b/tests/components/solarlog/test_init.py @@ -2,17 +2,24 @@ from unittest.mock import AsyncMock -from solarlog_cli.solarlog_exceptions import SolarLogConnectionError +import pytest +from solarlog_cli.solarlog_exceptions import ( + SolarLogAuthenticationError, + SolarLogConnectionError, + SolarLogError, + SolarLogUpdateError, +) -from homeassistant.components.solarlog.const import DOMAIN +from homeassistant.components.solarlog.const import CONF_HAS_PWD, DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry from . import setup_platform -from .const import HOST, NAME +from .const import HOST from tests.common import MockConfigEntry @@ -32,37 +39,113 @@ async def test_load_unload( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED -async def test_raise_config_entry_not_ready_when_offline( +@pytest.mark.parametrize( + ("exception", "error"), + [ + (SolarLogAuthenticationError, ConfigEntryState.SETUP_ERROR), + (SolarLogUpdateError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_error( hass: HomeAssistant, + exception: SolarLogError, + error: str, mock_config_entry: MockConfigEntry, mock_solarlog_connector: AsyncMock, ) -> None: - """Config entry state is SETUP_RETRY when Solarlog is offline.""" + """Test errors in setting up coordinator (i.e. login error).""" - mock_solarlog_connector.update_data.side_effect = SolarLogConnectionError + mock_solarlog_connector.login.side_effect = exception await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert mock_config_entry.state == error + + if error == ConfigEntryState.SETUP_RETRY: + assert len(hass.config_entries.flow.async_progress()) == 0 + + +@pytest.mark.parametrize( + ("login_side_effect", "login_return_value", "entry_state"), + [ + (SolarLogAuthenticationError, False, ConfigEntryState.SETUP_ERROR), + (ConfigEntryNotReady, False, ConfigEntryState.SETUP_RETRY), + (None, False, ConfigEntryState.SETUP_ERROR), + (None, True, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_auth_error_during_first_refresh( + hass: HomeAssistant, + login_side_effect: Exception | None, + login_return_value: bool, + entry_state: str, + mock_config_entry: MockConfigEntry, + mock_solarlog_connector: AsyncMock, +) -> None: + """Test the correct exceptions are thrown for auth error during first refresh.""" + + mock_solarlog_connector.password.return_value = "" + mock_solarlog_connector.update_data.side_effect = SolarLogAuthenticationError + + mock_solarlog_connector.login.return_value = login_return_value + mock_solarlog_connector.login.side_effect = login_side_effect + + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + await hass.async_block_till_done() + + assert mock_config_entry.state == entry_state + + +@pytest.mark.parametrize( + ("exception"), + [ + (SolarLogConnectionError), + (SolarLogUpdateError), + ], +) +async def test_other_exceptions_during_first_refresh( + hass: HomeAssistant, + exception: SolarLogError, + mock_config_entry: MockConfigEntry, + mock_solarlog_connector: AsyncMock, +) -> None: + """Test the correct exceptions are thrown during first refresh.""" + + mock_solarlog_connector.update_data.side_effect = exception + + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + await hass.async_block_till_done() + + assert mock_config_entry.state == ConfigEntryState.SETUP_RETRY assert len(hass.config_entries.flow.async_progress()) == 0 +@pytest.mark.parametrize( + ("minor_version", "suffix"), + [ + (1, "time"), + (2, "last_updated"), + ], +) async def test_migrate_config_entry( hass: HomeAssistant, + minor_version: int, + suffix: str, device_registry: DeviceRegistry, entity_registry: EntityRegistry, + mock_solarlog_connector: AsyncMock, ) -> None: """Test successful migration of entry data.""" entry = MockConfigEntry( domain=DOMAIN, - title=NAME, + title=HOST, data={ CONF_HOST: HOST, }, version=1, - minor_version=1, + minor_version=minor_version, ) entry.add_to_hass(hass) @@ -72,17 +155,19 @@ async def test_migrate_config_entry( manufacturer="Solar-Log", name="solarlog", ) + uid = f"{entry.entry_id}_{suffix}" + sensor_entity = entity_registry.async_get_or_create( config_entry=entry, platform=DOMAIN, domain=Platform.SENSOR, - unique_id=f"{entry.entry_id}_time", + unique_id=uid, device_id=device.id, ) assert entry.version == 1 - assert entry.minor_version == 1 - assert sensor_entity.unique_id == f"{entry.entry_id}_time" + assert entry.minor_version == minor_version + assert sensor_entity.unique_id == f"{entry.entry_id}_{suffix}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -92,6 +177,6 @@ async def test_migrate_config_entry( assert entity_migrated.unique_id == f"{entry.entry_id}_last_updated" assert entry.version == 1 - assert entry.minor_version == 2 + assert entry.minor_version == 3 assert entry.data[CONF_HOST] == HOST - assert entry.data["extended_data"] is False + assert entry.data[CONF_HAS_PWD] is False diff --git a/tests/components/solarlog/test_sensor.py b/tests/components/solarlog/test_sensor.py index bc90e8b25c0..77aa0308cda 100644 --- a/tests/components/solarlog/test_sensor.py +++ b/tests/components/solarlog/test_sensor.py @@ -9,11 +9,13 @@ from solarlog_cli.solarlog_exceptions import ( SolarLogConnectionError, SolarLogUpdateError, ) +from solarlog_cli.solarlog_models import InverterData from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry from . import setup_platform @@ -25,7 +27,7 @@ async def test_all_entities( snapshot: SnapshotAssertion, mock_solarlog_connector: AsyncMock, mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, + entity_registry: EntityRegistry, ) -> None: """Test all entities.""" @@ -33,6 +35,49 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +async def test_add_remove_entities( + hass: HomeAssistant, + mock_solarlog_connector: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: DeviceRegistry, + entity_registry: EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test if entities are added and old are removed.""" + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + + assert hass.states.get("sensor.inverter_1_consumption_year").state == "354.687" + + # test no changes (coordinator.py line 114) + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_solarlog_connector.update_device_list.return_value = { + 0: InverterData(name="Inv 1", enabled=True), + 2: InverterData(name="Inverter 3", enabled=True), + } + mock_solarlog_connector.update_inverter_data.return_value = { + 0: InverterData( + name="Inv 1", enabled=True, consumption_year=354687, current_power=5 + ), + 2: InverterData( + name="Inverter 3", enabled=True, consumption_year=454, current_power=7 + ), + } + mock_solarlog_connector.device_name = {0: "Inv 1", 2: "Inverter 3"}.get + mock_solarlog_connector.device_enabled = {0: True, 2: True}.get + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.inverter_1_consumption_year") is None + assert hass.states.get("sensor.inv_1_consumption_year").state == "354.687" + assert hass.states.get("sensor.inverter_2_consumption_year") is None + assert hass.states.get("sensor.inverter_3_consumption_year").state == "0.454" + + @pytest.mark.parametrize( "exception", [ diff --git a/tests/components/soma/test_config_flow.py b/tests/components/soma/test_config_flow.py index 8b8548bfe3e..67109e37c6d 100644 --- a/tests/components/soma/test_config_flow.py +++ b/tests/components/soma/test_config_flow.py @@ -5,7 +5,8 @@ from unittest.mock import patch from api.soma_api import SomaApi from requests import RequestException -from homeassistant.components.soma import DOMAIN, config_flow +from homeassistant.components.soma import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,57 +18,66 @@ MOCK_PORT = 3000 async def test_form(hass: HomeAssistant) -> None: """Test user form showing.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM async def test_import_abort(hass: HomeAssistant) -> None: """Test configuration from YAML aborting with existing entity.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await flow.async_step_import() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_setup" async def test_import_create(hass: HomeAssistant) -> None: """Test configuration from YAML.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY async def test_error_status(hass: HomeAssistant) -> None: """Test Connect successfully returning error status.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "error"}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "result_error" async def test_key_error(hass: HomeAssistant) -> None: """Test Connect returning empty string.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass + with patch.object(SomaApi, "list_devices", return_value={}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" async def test_exception(hass: HomeAssistant) -> None: """Test if RequestException fires when no connection can be made.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", side_effect=RequestException()): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" @@ -75,8 +85,10 @@ async def test_exception(hass: HomeAssistant) -> None: async def test_full_flow(hass: HomeAssistant) -> None: """Check classic use case.""" hass.data[DOMAIN] = {} - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await flow.async_step_user({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/sonarr/__init__.py b/tests/components/sonarr/__init__.py index b6050808a34..660102ed082 100644 --- a/tests/components/sonarr/__init__.py +++ b/tests/components/sonarr/__init__.py @@ -5,6 +5,6 @@ from homeassistant.const import CONF_API_KEY, CONF_URL MOCK_REAUTH_INPUT = {CONF_API_KEY: "test-api-key-reauth"} MOCK_USER_INPUT = { - CONF_URL: "http://192.168.1.189:8989", + CONF_URL: "http://192.168.1.189:8989/", CONF_API_KEY: "MOCK_API_KEY", } diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index 6bd14e8b581..efbfbd749b3 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.sonarr.const import ( DEFAULT_WANTED_MAX_ITEMS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -50,6 +50,34 @@ async def test_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} +async def test_url_rewrite( + hass: HomeAssistant, + mock_sonarr_config_flow: MagicMock, + mock_setup_entry: None, +) -> None: + """Test the full manual user flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_USER_INPUT.copy() + user_input[CONF_URL] = "https://192.168.1.189" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "192.168.1.189" + + assert result["data"] + assert result["data"][CONF_URL] == "https://192.168.1.189:443/" + + async def test_invalid_auth( hass: HomeAssistant, mock_sonarr_config_flow: MagicMock ) -> None: @@ -96,15 +124,7 @@ async def test_full_reauth_flow_implementation( """Test the manual reauth flow from start to finish.""" entry = init_integration - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -153,7 +173,7 @@ async def test_full_user_flow_implementation( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" async def test_full_user_flow_advanced_options( @@ -183,7 +203,7 @@ async def test_full_user_flow_advanced_options( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" assert result["data"][CONF_VERIFY_SSL] diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 6abb010557e..04b35e2c021 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -10,7 +10,12 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from soco import SoCo from soco.alarms import Alarms -from soco.data_structures import DidlFavorite, DidlPlaylistContainer, SearchResult +from soco.data_structures import ( + DidlFavorite, + DidlMusicTrack, + DidlPlaylistContainer, + SearchResult, +) from soco.events_base import Event as SonosEvent from homeassistant.components import ssdp, zeroconf @@ -185,6 +190,7 @@ class SoCoMockFactory: battery_info, alarm_clock, sonos_playlists: SearchResult, + sonos_queue: list[DidlMusicTrack], ) -> None: """Initialize the mock factory.""" self.mock_list: dict[str, MockSoCo] = {} @@ -194,6 +200,7 @@ class SoCoMockFactory: self.battery_info = battery_info self.alarm_clock = alarm_clock self.sonos_playlists = sonos_playlists + self.sonos_queue = sonos_queue def cache_mock( self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A" @@ -207,6 +214,7 @@ class SoCoMockFactory: mock_soco.get_current_track_info.return_value = self.current_track_info mock_soco.music_source_from_uri = SoCo.music_source_from_uri mock_soco.get_sonos_playlists.return_value = self.sonos_playlists + mock_soco.get_queue.return_value = self.sonos_queue my_speaker_info = self.speaker_info.copy() my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid @@ -277,6 +285,7 @@ def soco_factory( alarm_clock, sonos_playlists: SearchResult, sonos_websocket, + sonos_queue: list[DidlMusicTrack], ): """Create factory for instantiating SoCo mocks.""" factory = SoCoMockFactory( @@ -286,6 +295,7 @@ def soco_factory( battery_info, alarm_clock, sonos_playlists, + sonos_queue=sonos_queue, ) with ( patch("homeassistant.components.sonos.SoCo", new=factory.get_mock), @@ -370,6 +380,13 @@ def sonos_playlists_fixture() -> SearchResult: return SearchResult(playlists_list, "sonos_playlists", 1, 1, 0) +@pytest.fixture(name="sonos_queue") +def sonos_queue() -> list[DidlMusicTrack]: + """Create sonos queue fixture.""" + queue = load_json_value_fixture("sonos_queue.json", "sonos") + return [DidlMusicTrack.from_dict(track) for track in queue] + + class MockMusicServiceItem: """Mocks a Soco MusicServiceItem.""" diff --git a/tests/components/sonos/fixtures/sonos_queue.json b/tests/components/sonos/fixtures/sonos_queue.json new file mode 100644 index 00000000000..ffe08fc2b08 --- /dev/null +++ b/tests/components/sonos/fixtures/sonos_queue.json @@ -0,0 +1,42 @@ +[ + { + "title": "Something", + "album": "Abbey Road", + "creator": "The Beatles", + "item_id": "Q:0/1", + "parent_id": "Q:0", + "original_track_number": 3, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + }, + { + "title": "Come Together", + "album": "Abbey Road", + "creator": "The Beatles", + "item_id": "Q:0/2", + "parent_id": "Q:0", + "original_track_number": 1, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + }, + { + "title": "Track with no album or creator", + "item_id": "Q:0/3", + "parent_id": "Q:0", + "original_track_number": 1, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/TrackWithNoAlbumOrCreator.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + } +] diff --git a/tests/components/sonos/snapshots/test_media_player.ambr b/tests/components/sonos/snapshots/test_media_player.ambr index 9c43bceb43b..8ef298de3db 100644 --- a/tests/components/sonos/snapshots/test_media_player.ambr +++ b/tests/components/sonos/snapshots/test_media_player.ambr @@ -56,3 +56,27 @@ 'state': 'idle', }) # --- +# name: test_media_get_queue + dict({ + 'media_player.zone_a': list([ + dict({ + 'media_album_name': 'Abbey Road', + 'media_artist': 'The Beatles', + 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3', + 'media_title': 'Something', + }), + dict({ + 'media_album_name': 'Abbey Road', + 'media_artist': 'The Beatles', + 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3', + 'media_title': 'Come Together', + }), + dict({ + 'media_album_name': None, + 'media_artist': None, + 'media_content_id': 'x-file-cifs://192.168.42.10/music/TrackWithNoAlbumOrCreator.mp3', + 'media_title': 'Track with no album or creator', + }), + ]), + }) +# --- diff --git a/tests/components/sonos/test_init.py b/tests/components/sonos/test_init.py index 85ab8f4dd5a..36a6571f3b0 100644 --- a/tests/components/sonos/test_init.py +++ b/tests/components/sonos/test_init.py @@ -138,7 +138,7 @@ async def test_async_poll_manual_hosts_warnings( await manager.async_poll_manual_hosts() assert len(caplog.messages) == 1 record = caplog.records[0] - assert record.levelname == "INFO" + assert record.levelname == "WARNING" assert "Connection reestablished to Sonos device" in record.message assert mock_async_call_later.call_count == 3 diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index fa77293fbde..63b2c8889ec 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -32,6 +32,7 @@ from homeassistant.components.sonos.const import ( ) from homeassistant.components.sonos.media_player import ( LONG_SERVICE_TIMEOUT, + SERVICE_GET_QUEUE, SERVICE_RESTORE, SERVICE_SNAPSHOT, VOLUME_INCREMENT, @@ -71,6 +72,7 @@ async def test_device_registry( ) assert reg_device is not None assert reg_device.model == "Model Name" + assert reg_device.model_id == "S12" assert reg_device.sw_version == "13.1" assert reg_device.connections == { (CONNECTION_NETWORK_MAC, "00:11:22:33:44:55"), @@ -231,6 +233,45 @@ async def test_play_media_library( ) +@pytest.mark.parametrize( + ("media_content_type", "media_content_id", "message"), + [ + ( + "artist", + "A:ALBUM/UnknowAlbum", + "Could not find media in library: A:ALBUM/UnknowAlbum", + ), + ( + "UnknownContent", + "A:ALBUM/UnknowAlbum", + "Sonos does not support media content type: UnknownContent", + ), + ], +) +async def test_play_media_library_content_error( + hass: HomeAssistant, + async_autosetup_sonos, + media_content_type, + media_content_id, + message, +) -> None: + """Test playing local library errors on content and content type.""" + with pytest.raises( + ServiceValidationError, + match=message, + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: media_content_type, + ATTR_MEDIA_CONTENT_ID: media_content_id, + }, + blocking=True, + ) + + _track_url = "S://192.168.42.100/music/iTunes/The%20Beatles/A%20Hard%20Day%2fs%I%20Should%20Have%20Known%20Better.mp3" @@ -1121,3 +1162,46 @@ async def test_play_media_announce( blocking=True, ) assert sonos_websocket.play_clip.call_count == 1 + + # Test speakers that do not support announce. This + # will result in playing the clip directly via play_uri + sonos_websocket.play_clip.reset_mock() + sonos_websocket.play_clip.side_effect = None + retval = {"success": 0, "type": "globalError"} + sonos_websocket.play_clip.return_value = [retval, {}] + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + soco.play_uri.assert_called_with(content_id, force_radio=False) + + +async def test_media_get_queue( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + soco_factory, + snapshot: SnapshotAssertion, +) -> None: + """Test getting the media queue.""" + soco_mock = soco_factory.mock_list.get("192.168.42.2") + result = await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + return_response=True, + ) + soco_mock.get_queue.assert_called_with(max_items=0) + assert result == snapshot diff --git a/tests/components/spaceapi/test_init.py b/tests/components/spaceapi/test_init.py index 0de96d05605..8c0e897947a 100644 --- a/tests/components/spaceapi/test_init.py +++ b/tests/components/spaceapi/test_init.py @@ -6,7 +6,12 @@ from unittest.mock import patch from aiohttp.test_utils import TestClient import pytest -from homeassistant.components.spaceapi import DOMAIN, SPACEAPI_VERSION, URL_API_SPACEAPI +from homeassistant.components.spaceapi import ( + ATTR_SENSOR_LOCATION, + DOMAIN, + SPACEAPI_VERSION, + URL_API_SPACEAPI, +) from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -27,7 +32,7 @@ CONFIG = { "icon_closed": "https://home-assistant.io/close.png", }, "sensors": { - "temperature": ["test.temp1", "test.temp2"], + "temperature": ["test.temp1", "test.temp2", "test.temp3"], "humidity": ["test.hum1"], }, "spacefed": {"spacenet": True, "spacesaml": False, "spacephone": True}, @@ -67,17 +72,23 @@ SENSOR_OUTPUT = { "location": "Home", "name": "temp1", "unit": UnitOfTemperature.CELSIUS, - "value": "25", + "value": 25.0, + }, + { + "location": "outside", + "name": "temp2", + "unit": UnitOfTemperature.CELSIUS, + "value": 23.0, }, { "location": "Home", - "name": "temp2", + "name": "temp3", "unit": UnitOfTemperature.CELSIUS, - "value": "23", + "value": None, }, ], "humidity": [ - {"location": "Home", "name": "hum1", "unit": PERCENTAGE, "value": "88"} + {"location": "Home", "name": "hum1", "unit": PERCENTAGE, "value": 88.0} ], } @@ -96,6 +107,19 @@ def mock_client(hass: HomeAssistant, hass_client: ClientSessionGenerator) -> Tes hass.states.async_set( "test.temp2", 23, + attributes={ + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + ATTR_SENSOR_LOCATION: "outside", + }, + ) + hass.states.async_set( + "test.temp3", + "foo", + attributes={ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "test.temp3", + "foo", attributes={ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, ) hass.states.async_set( diff --git a/tests/components/spc/test_alarm_control_panel.py b/tests/components/spc/test_alarm_control_panel.py index 7b1ab4ff947..12fb885b92b 100644 --- a/tests/components/spc/test_alarm_control_panel.py +++ b/tests/components/spc/test_alarm_control_panel.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from pyspcwebgw.const import AreaMode -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -19,7 +19,7 @@ async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) entity_id = "alarm_control_panel.house" - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" mock_area = mock_client.return_value.areas["1"] @@ -30,5 +30,5 @@ async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) await mock_client.call_args_list[0][1]["async_callback"](mock_area) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spider/__init__.py b/tests/components/spider/__init__.py index d145f4efc09..4d9139a501e 100644 --- a/tests/components/spider/__init__.py +++ b/tests/components/spider/__init__.py @@ -1 +1 @@ -"""Tests for the Spider component.""" +"""Tests for the Spider integration.""" diff --git a/tests/components/spider/test_config_flow.py b/tests/components/spider/test_config_flow.py deleted file mode 100644 index 69f97130f8c..00000000000 --- a/tests/components/spider/test_config_flow.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Tests for the Spider config flow.""" - -from unittest.mock import Mock, patch - -import pytest - -from homeassistant import config_entries -from homeassistant.components.spider.const import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - -USERNAME = "spider-username" -PASSWORD = "spider-password" - -SPIDER_USER_DATA = { - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, -} - - -@pytest.fixture(name="spider") -def spider_fixture() -> Mock: - """Patch libraries.""" - with patch("homeassistant.components.spider.config_flow.SpiderApi") as spider: - yield spider - - -async def test_user(hass: HomeAssistant, spider) -> None: - """Test user config.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - with ( - patch( - "homeassistant.components.spider.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.spider.async_setup_entry", return_value=True - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=SPIDER_USER_DATA - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DOMAIN - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert not result["result"].unique_id - - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_import(hass: HomeAssistant, spider) -> None: - """Test import step.""" - - with ( - patch( - "homeassistant.components.spider.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.spider.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=SPIDER_USER_DATA, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DOMAIN - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert not result["result"].unique_id - - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_abort_if_already_setup(hass: HomeAssistant, spider) -> None: - """Test we abort if Spider is already setup.""" - MockConfigEntry(domain=DOMAIN, data=SPIDER_USER_DATA).add_to_hass(hass) - - # Should fail, config exist (import) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=SPIDER_USER_DATA - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - - # Should fail, config exist (flow) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=SPIDER_USER_DATA - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/spider/test_init.py b/tests/components/spider/test_init.py new file mode 100644 index 00000000000..6d1d87cfa6a --- /dev/null +++ b/tests/components/spider/test_init.py @@ -0,0 +1,50 @@ +"""Tests for the Spider integration.""" + +from homeassistant.components.spider import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from tests.common import MockConfigEntry + + +async def test_spider_repair_issue( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the Spider configuration entry loading/unloading handles the repair.""" + config_entry_1 = MockConfigEntry( + title="Example 1", + domain=DOMAIN, + ) + config_entry_1.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_1.entry_id) + await hass.async_block_till_done() + assert config_entry_1.state is ConfigEntryState.LOADED + + # Add a second one + config_entry_2 = MockConfigEntry( + title="Example 2", + domain=DOMAIN, + ) + config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_2.entry_id) + await hass.async_block_till_done() + + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the first one + await hass.config_entries.async_remove(config_entry_1.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the second one + await hass.config_entries.async_remove(config_entry_2.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.NOT_LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/spotify/__init__.py b/tests/components/spotify/__init__.py index 51e3404d3ad..4730530b4f3 100644 --- a/tests/components/spotify/__init__.py +++ b/tests/components/spotify/__init__.py @@ -1 +1,13 @@ -"""Tests for the Spotify integration.""" +"""Tests for the Spotify component.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Set up the component.""" + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py index 3f248b54529..67d4eac3960 100644 --- a/tests/components/spotify/conftest.py +++ b/tests/components/spotify/conftest.py @@ -1,128 +1,150 @@ """Common test fixtures.""" from collections.abc import Generator -from typing import Any -from unittest.mock import MagicMock, patch +import time +from unittest.mock import AsyncMock, patch import pytest +from spotifyaio.models import ( + Album, + Artist, + ArtistResponse, + Devices, + NewReleasesResponse, + NewReleasesResponseInner, + PlaybackState, + PlayedTrackResponse, + Playlist, + PlaylistResponse, + SavedAlbumResponse, + SavedShowResponse, + SavedTrackResponse, + Show, + ShowEpisodesResponse, + TopArtistsResponse, + TopTracksResponse, + UserProfile, +) from homeassistant.components.application_credentials import ( ClientCredential, async_import_client_credential, ) -from homeassistant.components.spotify import DOMAIN +from homeassistant.components.spotify.const import DOMAIN, SPOTIFY_SCOPES from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_fixture + +SCOPES = " ".join(SPOTIFY_SCOPES) + + +@pytest.fixture(name="expires_at") +def mock_expires_at() -> int: + """Fixture to set the oauth token expiration time.""" + return time.time() + 3600 @pytest.fixture -def mock_config_entry_1() -> MockConfigEntry: - """Mock a config entry with an upper case entry id.""" +def mock_config_entry(expires_at: int) -> MockConfigEntry: + """Create Spotify entry in Home Assistant.""" return MockConfigEntry( domain=DOMAIN, title="spotify_1", + unique_id="1112264111", data={ - "auth_implementation": "spotify_c95e4090d4d3438b922331e7428f8171", + "auth_implementation": DOMAIN, "token": { - "access_token": "AccessToken", - "token_type": "Bearer", - "expires_in": 3600, - "refresh_token": "RefreshToken", - "scope": "playlist-read-private ...", - "expires_at": 1724198975.8829377, + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": SCOPES, }, - "id": "32oesphrnacjcf7vw5bf6odx3oiu", + "id": "1112264111", "name": "spotify_account_1", }, - unique_id="84fce612f5b8", entry_id="01J5TX5A0FF6G5V0QJX6HBC94T", ) @pytest.fixture -def mock_config_entry_2() -> MockConfigEntry: - """Mock a config entry with a lower case entry id.""" - return MockConfigEntry( - domain=DOMAIN, - title="spotify_2", - data={ - "auth_implementation": "spotify_c95e4090d4d3438b922331e7428f8171", - "token": { - "access_token": "AccessToken", - "token_type": "Bearer", - "expires_in": 3600, - "refresh_token": "RefreshToken", - "scope": "playlist-read-private ...", - "expires_at": 1724198975.8829377, - }, - "id": "55oesphrnacjcf7vw5bf6odx3oiu", - "name": "spotify_account_2", - }, - unique_id="99fce612f5b8", - entry_id="32oesphrnacjcf7vw5bf6odx3", +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("CLIENT_ID", "CLIENT_SECRET"), + DOMAIN, ) -@pytest.fixture -def spotify_playlists() -> dict[str, Any]: - """Mock the return from getting a list of playlists.""" - return { - "href": "https://api.spotify.com/v1/users/31oesphrnacjcf7vw5bf6odx3oiu/playlists?offset=0&limit=48", - "limit": 48, - "next": None, - "offset": 0, - "previous": None, - "total": 1, - "items": [ - { - "collaborative": False, - "description": "", - "id": "unique_identifier_00", - "name": "Playlist1", - "type": "playlist", - "uri": "spotify:playlist:unique_identifier_00", - } - ], - } - - -@pytest.fixture -def spotify_mock(spotify_playlists: dict[str, Any]) -> Generator[MagicMock]: - """Mock the Spotify API.""" - with patch("homeassistant.components.spotify.Spotify") as spotify_mock: - mock = MagicMock() - mock.current_user_playlists.return_value = spotify_playlists - spotify_mock.return_value = mock - yield spotify_mock - - -@pytest.fixture -async def spotify_setup( - hass: HomeAssistant, - spotify_mock: MagicMock, - mock_config_entry_1: MockConfigEntry, - mock_config_entry_2: MockConfigEntry, -): - """Set up the spotify integration.""" - with patch( - "homeassistant.components.spotify.OAuth2Session.async_ensure_token_valid" - ): - await async_setup_component(hass, "application_credentials", {}) - await hass.async_block_till_done() - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential("CLIENT_ID", "CLIENT_SECRET"), - "spotify_c95e4090d4d3438b922331e7428f8171", - ) - await hass.async_block_till_done() - mock_config_entry_1.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_1.entry_id) - mock_config_entry_2.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_2.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done(wait_background_tasks=True) +@pytest.fixture(autouse=True) +async def patch_sleep() -> Generator[AsyncMock]: + """Fixture to setup credentials.""" + with patch("homeassistant.components.spotify.media_player.AFTER_REQUEST_SLEEP", 0): yield + + +@pytest.fixture +def mock_spotify() -> Generator[AsyncMock]: + """Mock the Spotify API.""" + with ( + patch( + "homeassistant.components.spotify.SpotifyClient", autospec=True + ) as spotify_mock, + patch( + "homeassistant.components.spotify.config_flow.SpotifyClient", + new=spotify_mock, + ), + ): + client = spotify_mock.return_value + # All these fixtures can be retrieved using the Web API client at + # https://developer.spotify.com/documentation/web-api + for fixture, method, obj in ( + ( + "current_user_playlist.json", + "get_playlists_for_current_user", + PlaylistResponse, + ), + ("saved_albums.json", "get_saved_albums", SavedAlbumResponse), + ("saved_tracks.json", "get_saved_tracks", SavedTrackResponse), + ("saved_shows.json", "get_saved_shows", SavedShowResponse), + ( + "recently_played_tracks.json", + "get_recently_played_tracks", + PlayedTrackResponse, + ), + ("top_artists.json", "get_top_artists", TopArtistsResponse), + ("top_tracks.json", "get_top_tracks", TopTracksResponse), + ("show_episodes.json", "get_show_episodes", ShowEpisodesResponse), + ("artist_albums.json", "get_artist_albums", NewReleasesResponseInner), + ): + getattr(client, method).return_value = obj.from_json( + load_fixture(fixture, DOMAIN) + ).items + for fixture, method, obj in ( + ( + "playback.json", + "get_playback", + PlaybackState, + ), + ("current_user.json", "get_current_user", UserProfile), + ("playlist.json", "get_playlist", Playlist), + ("album.json", "get_album", Album), + ("artist.json", "get_artist", Artist), + ("show.json", "get_show", Show), + ): + getattr(client, method).return_value = obj.from_json( + load_fixture(fixture, DOMAIN) + ) + client.get_followed_artists.return_value = ArtistResponse.from_json( + load_fixture("followed_artists.json", DOMAIN) + ).artists.items + client.get_new_releases.return_value = NewReleasesResponse.from_json( + load_fixture("new_releases.json", DOMAIN) + ).albums.items + client.get_devices.return_value = Devices.from_json( + load_fixture("devices.json", DOMAIN) + ).devices + yield spotify_mock diff --git a/tests/components/spotify/fixtures/album.json b/tests/components/spotify/fixtures/album.json new file mode 100644 index 00000000000..d7240298e9f --- /dev/null +++ b/tests/components/spotify/fixtures/album.json @@ -0,0 +1,128 @@ +{ + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" + }, + "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", + "id": "3jULn43a6xfzqleyeFjPIq", + "name": "Area 11", + "type": "artist", + "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" + } + ], + "available_markets": [], + "copyrights": [ + { + "text": "2020 Smihilism Records", + "type": "C" + }, + { + "text": "2020 Smihilism Records", + "type": "P" + } + ], + "external_ids": { + "upc": "195916707034" + }, + "external_urls": { + "spotify": "https://open.spotify.com/album/3IqzqH6ShrRtie9Yd2ODyG" + }, + "genres": [], + "href": "https://api.spotify.com/v1/albums/3IqzqH6ShrRtie9Yd2ODyG", + "id": "3IqzqH6ShrRtie9Yd2ODyG", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273a61a28c2f084761f8833bce6", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02a61a28c2f084761f8833bce6", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851a61a28c2f084761f8833bce6", + "width": 64 + } + ], + "label": "Smihilism Records", + "name": "SINGLARITY", + "popularity": 29, + "release_date": "2020-12-18", + "release_date_precision": "day", + "total_tracks": 11, + "tracks": { + "href": "https://api.spotify.com/v1/albums/3IqzqH6ShrRtie9Yd2ODyG/tracks?offset=0&limit=50&locale=en-US,en;q=0.5", + "items": [ + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" + }, + "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", + "id": "3jULn43a6xfzqleyeFjPIq", + "name": "Area 11", + "type": "artist", + "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" + } + ], + "available_markets": [], + "disc_number": 1, + "duration_ms": 260372, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6akJGriy4njdP8fZTPGjwz" + }, + "href": "https://api.spotify.com/v1/tracks/6akJGriy4njdP8fZTPGjwz", + "id": "6akJGriy4njdP8fZTPGjwz", + "is_local": false, + "name": "All Your Friends", + "preview_url": "https://p.scdn.co/mp3-preview/484344e579edfdb8e8f872d73299aff2c3d0369d?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:6akJGriy4njdP8fZTPGjwz" + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" + }, + "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", + "id": "3jULn43a6xfzqleyeFjPIq", + "name": "Area 11", + "type": "artist", + "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" + } + ], + "available_markets": [], + "disc_number": 1, + "duration_ms": 206613, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/7N02bJK1amhplZ8yAapRS5" + }, + "href": "https://api.spotify.com/v1/tracks/7N02bJK1amhplZ8yAapRS5", + "id": "7N02bJK1amhplZ8yAapRS5", + "is_local": false, + "name": "New Magiks", + "preview_url": "https://p.scdn.co/mp3-preview/b59a5a73ed2e9a61be471822993e91210d5f255a?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:7N02bJK1amhplZ8yAapRS5" + } + ], + "limit": 50, + "next": null, + "offset": 0, + "previous": null, + "total": 11 + }, + "type": "album", + "uri": "spotify:album:3IqzqH6ShrRtie9Yd2ODyG" +} diff --git a/tests/components/spotify/fixtures/artist.json b/tests/components/spotify/fixtures/artist.json new file mode 100644 index 00000000000..e60429fa030 --- /dev/null +++ b/tests/components/spotify/fixtures/artist.json @@ -0,0 +1,33 @@ +{ + "external_urls": { + "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" + }, + "followers": { + "href": null, + "total": 10817055 + }, + "genres": ["dance pop", "miami hip hop", "pop"], + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg?locale=en-US%2Cen%3Bq%3D0.5", + "id": "0TnOYISbd1XYRBk9myaseg", + "images": [ + { + "url": "https://i.scdn.co/image/ab6761610000e5ebee07b5820dd91d15d397e29c", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616100005174ee07b5820dd91d15d397e29c", + "height": 320, + "width": 320 + }, + { + "url": "https://i.scdn.co/image/ab6761610000f178ee07b5820dd91d15d397e29c", + "height": 160, + "width": 160 + } + ], + "name": "Pitbull", + "popularity": 85, + "type": "artist", + "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" +} diff --git a/tests/components/spotify/fixtures/artist_albums.json b/tests/components/spotify/fixtures/artist_albums.json new file mode 100644 index 00000000000..2cc66d1ac0b --- /dev/null +++ b/tests/components/spotify/fixtures/artist_albums.json @@ -0,0 +1,472 @@ +{ + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg/albums?offset=0&limit=20&locale=en-US,en;q%3D0.5&include_groups=album,single,compilation,appears_on", + "limit": 20, + "next": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg/albums?offset=20&limit=20&locale=en-US,en;q%3D0.5&include_groups=album,single,compilation,appears_on", + "offset": 0, + "previous": null, + "total": 903, + "items": [ + { + "album_type": "album", + "total_tracks": 7, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/56jg3KJcYmfL7RzYmG2O1Q" + }, + "href": "https://api.spotify.com/v1/albums/56jg3KJcYmfL7RzYmG2O1Q", + "id": "56jg3KJcYmfL7RzYmG2O1Q", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273a0bac1996f26274685db1520", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02a0bac1996f26274685db1520", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851a0bac1996f26274685db1520", + "height": 64, + "width": 64 + } + ], + "name": "Trackhouse (Daytona 500 Edition)", + "release_date": "2024-02-16", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:56jg3KJcYmfL7RzYmG2O1Q", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" + }, + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg", + "id": "0TnOYISbd1XYRBk9myaseg", + "name": "Pitbull", + "type": "artist", + "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" + } + ], + "album_group": "album" + }, + { + "album_type": "album", + "total_tracks": 14, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/1l86t4bTNT2j1X0ZBCIv6R" + }, + "href": "https://api.spotify.com/v1/albums/1l86t4bTNT2j1X0ZBCIv6R", + "id": "1l86t4bTNT2j1X0ZBCIv6R", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b27333a4ba8f73271a749c5d953d", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e0233a4ba8f73271a749c5d953d", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d0000485133a4ba8f73271a749c5d953d", + "height": 64, + "width": 64 + } + ], + "name": "Trackhouse", + "release_date": "2023-10-06", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:1l86t4bTNT2j1X0ZBCIv6R", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" + }, + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg", + "id": "0TnOYISbd1XYRBk9myaseg", + "name": "Pitbull", + "type": "artist", + "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" + } + ], + "album_group": "album" + } + ] +} diff --git a/tests/components/spotify/fixtures/current_user.json b/tests/components/spotify/fixtures/current_user.json new file mode 100644 index 00000000000..a4f95b6c33e --- /dev/null +++ b/tests/components/spotify/fixtures/current_user.json @@ -0,0 +1,33 @@ +{ + "display_name": "Henk", + "external_urls": { + "spotify": "https://open.spotify.com/user/1112264111" + }, + "href": "https://api.spotify.com/v1/users/1112264111", + "id": "1112264111", + "images": [ + { + "url": "https://i.scdn.co/image/ab67757000003b8246569a64d252247acb1491bc", + "height": 64, + "width": 64 + }, + { + "url": "https://i.scdn.co/image/ab6775700000ee8546569a64d252247acb1491bc", + "height": 300, + "width": 300 + } + ], + "type": "user", + "uri": "spotify:user:1112264111", + "followers": { + "href": null, + "total": 21 + }, + "country": "NL", + "product": "premium", + "explicit_content": { + "filter_enabled": false, + "filter_locked": false + }, + "email": "henk@outlook.com" +} diff --git a/tests/components/spotify/fixtures/current_user_playlist.json b/tests/components/spotify/fixtures/current_user_playlist.json new file mode 100644 index 00000000000..c9d306504db --- /dev/null +++ b/tests/components/spotify/fixtures/current_user_playlist.json @@ -0,0 +1,92 @@ +{ + "href": "https://api.spotify.com/v1/users/1112264111/playlists?offset=0&limit=20", + "items": [ + { + "collaborative": false, + "description": "", + "external_urls": { + "spotify": "https://open.spotify.com/playlist/4WkWJ0EjHEFASDevhM8oPw" + }, + "href": "https://api.spotify.com/v1/playlists/4WkWJ0EjHEFASDevhM8oPw", + "id": "4WkWJ0EjHEFASDevhM8oPw", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1", + "width": 640 + } + ], + "name": "Hyper", + "owner": { + "display_name": "Henk", + "external_urls": { + "spotify": "https://open.spotify.com/user/1112264111" + }, + "href": "https://api.spotify.com/v1/users/1112264111", + "id": "1112264111", + "type": "user", + "uri": "spotify:user:1112264111" + }, + "primary_color": null, + "public": true, + "snapshot_id": "Myw2ZjkyN2Q1ZWEwMjU1YWJjM2EwOWQ5YzA2ZDJjYjIzNTEzNzVmYmVl", + "tracks": { + "href": "https://api.spotify.com/v1/playlists/4WkWJ0EjHEFASDevhM8oPw/tracks", + "total": 1 + }, + "type": "playlist", + "uri": "spotify:playlist:4WkWJ0EjHEFASDevhM8oPw" + }, + { + "collaborative": false, + "description": "", + "external_urls": { + "spotify": "https://open.spotify.com/playlist/1RHirWgH1weMsBLi4KOK9d" + }, + "href": "https://api.spotify.com/v1/playlists/1RHirWgH1weMsBLi4KOK9d", + "id": "1RHirWgH1weMsBLi4KOK9d", + "images": [ + { + "height": 640, + "url": "https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6", + "width": 640 + }, + { + "height": 300, + "url": "https://mosaic.scdn.co/300/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6", + "width": 300 + }, + { + "height": 60, + "url": "https://mosaic.scdn.co/60/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6", + "width": 60 + } + ], + "name": "Ain’t got shit on me", + "owner": { + "display_name": "Rens Boeser", + "external_urls": { + "spotify": "https://open.spotify.com/user/317g2sbpe3ccycu45fes6lfr5lpe" + }, + "href": "https://api.spotify.com/v1/users/317g2sbpe3ccycu45fes6lfr5lpe", + "id": "317g2sbpe3ccycu45fes6lfr5lpe", + "type": "user", + "uri": "spotify:user:317g2sbpe3ccycu45fes6lfr5lpe" + }, + "primary_color": null, + "public": false, + "snapshot_id": "MjksMTdlMGU4ZGIxZWY5NWRkNjVkMzQ1YzUxYjk3YWZkMDdhNzRjNWE0Zg==", + "tracks": { + "href": "https://api.spotify.com/v1/playlists/1RHirWgH1weMsBLi4KOK9d/tracks", + "total": 28 + }, + "type": "playlist", + "uri": "spotify:playlist:1RHirWgH1weMsBLi4KOK9d" + } + ], + "limit": 18, + "next": "https://api.spotify.com/v1/users/1112264111/playlists?offset=18&limit=20", + "offset": 0, + "previous": null, + "total": 101 +} diff --git a/tests/components/spotify/fixtures/devices.json b/tests/components/spotify/fixtures/devices.json new file mode 100644 index 00000000000..2dd8dfd7c3b --- /dev/null +++ b/tests/components/spotify/fixtures/devices.json @@ -0,0 +1,14 @@ +{ + "devices": [ + { + "id": "21dac6b0e0a1f181870fdc9749b2656466557666", + "is_active": false, + "is_private_session": false, + "is_restricted": false, + "name": "DESKTOP-BKC5SIK", + "supports_volume": true, + "type": "Computer", + "volume_percent": 69 + } + ] +} diff --git a/tests/components/spotify/fixtures/followed_artists.json b/tests/components/spotify/fixtures/followed_artists.json new file mode 100644 index 00000000000..4e03ed8291b --- /dev/null +++ b/tests/components/spotify/fixtures/followed_artists.json @@ -0,0 +1,87 @@ +{ + "artists": { + "items": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0lLY20XpZ9yDobkbHI7u1y" + }, + "followers": { + "href": null, + "total": 349437 + }, + "genres": [ + "brostep", + "complextro", + "danish electronic", + "edm", + "electro house", + "glitch", + "speedrun" + ], + "href": "https://api.spotify.com/v1/artists/0lLY20XpZ9yDobkbHI7u1y", + "id": "0lLY20XpZ9yDobkbHI7u1y", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5eb0fb1220e7e3ace47ebad023e", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab676161000051740fb1220e7e3ace47ebad023e", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f1780fb1220e7e3ace47ebad023e", + "width": 160 + } + ], + "name": "Pegboard Nerds", + "popularity": 52, + "type": "artist", + "uri": "spotify:artist:0lLY20XpZ9yDobkbHI7u1y" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0p4nmQO2msCgU4IF37Wi3j" + }, + "followers": { + "href": null, + "total": 11296082 + }, + "genres": ["canadian pop", "candy pop", "dance pop", "pop"], + "href": "https://api.spotify.com/v1/artists/0p4nmQO2msCgU4IF37Wi3j", + "id": "0p4nmQO2msCgU4IF37Wi3j", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5eb5c3349ddba6b8e064c1bab16", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab676161000051745c3349ddba6b8e064c1bab16", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f1785c3349ddba6b8e064c1bab16", + "width": 160 + } + ], + "name": "Avril Lavigne", + "popularity": 78, + "type": "artist", + "uri": "spotify:artist:0p4nmQO2msCgU4IF37Wi3j" + } + ], + "next": "https://api.spotify.com/v1/me/following?type=artist&limit=20&locale=en-US,en;q=0.5&after=2NZMqINcyfepvLxQJdzcZk", + "total": 74, + "cursors": { + "after": "2NZMqINcyfepvLxQJdzcZk" + }, + "limit": 20, + "href": "https://api.spotify.com/v1/me/following?type=artist&limit=20&locale=en-US,en;q=0.5" + } +} diff --git a/tests/components/spotify/fixtures/new_releases.json b/tests/components/spotify/fixtures/new_releases.json new file mode 100644 index 00000000000..b6948ef79a5 --- /dev/null +++ b/tests/components/spotify/fixtures/new_releases.json @@ -0,0 +1,469 @@ +{ + "albums": { + "href": "https://api.spotify.com/v1/browse/new-releases?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4gzpq5DPGxSnKTe4SA8HAU" + }, + "href": "https://api.spotify.com/v1/artists/4gzpq5DPGxSnKTe4SA8HAU", + "id": "4gzpq5DPGxSnKTe4SA8HAU", + "name": "Coldplay", + "type": "artist", + "uri": "spotify:artist:4gzpq5DPGxSnKTe4SA8HAU" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/5SGtrmYbIo0Dsg4kJ4qjM6" + }, + "href": "https://api.spotify.com/v1/albums/5SGtrmYbIo0Dsg4kJ4qjM6", + "id": "5SGtrmYbIo0Dsg4kJ4qjM6", + "images": [ + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e0209ba52a5116e0c3e8461f58b", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d0000485109ba52a5116e0c3e8461f58b", + "width": 64 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b27309ba52a5116e0c3e8461f58b", + "width": 640 + } + ], + "name": "Moon Music", + "release_date": "2024-10-04", + "release_date_precision": "day", + "total_tracks": 10, + "type": "album", + "uri": "spotify:album:5SGtrmYbIo0Dsg4kJ4qjM6" + }, + { + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4U9nsRTH2mr9L4UXEWqG5e" + }, + "href": "https://api.spotify.com/v1/artists/4U9nsRTH2mr9L4UXEWqG5e", + "id": "4U9nsRTH2mr9L4UXEWqG5e", + "name": "Bente", + "type": "artist", + "uri": "spotify:artist:4U9nsRTH2mr9L4UXEWqG5e" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/713lZ7AF55fEFSQgcttj9y" + }, + "href": "https://api.spotify.com/v1/albums/713lZ7AF55fEFSQgcttj9y", + "id": "713lZ7AF55fEFSQgcttj9y", + "images": [ + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02ab9953b1d18f8233f6b26027", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851ab9953b1d18f8233f6b26027", + "width": 64 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273ab9953b1d18f8233f6b26027", + "width": 640 + } + ], + "name": "drift", + "release_date": "2024-10-03", + "release_date_precision": "day", + "total_tracks": 14, + "type": "album", + "uri": "spotify:album:713lZ7AF55fEFSQgcttj9y" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/browse/new-releases?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 100 + } +} diff --git a/tests/components/spotify/fixtures/playback.json b/tests/components/spotify/fixtures/playback.json new file mode 100644 index 00000000000..d0bf8e0478a --- /dev/null +++ b/tests/components/spotify/fixtures/playback.json @@ -0,0 +1,106 @@ +{ + "device": { + "id": "a19f7a03a25aff3e43f457a328a8ba67a8c44789", + "is_active": true, + "is_private_session": false, + "is_restricted": false, + "name": "Master Bathroom Speaker", + "type": "Speaker", + "volume_percent": 25 + }, + "shuffle_state": false, + "repeat_state": "off", + "timestamp": 1689639030791, + "context": { + "external_urls": { + "spotify": "https://open.spotify.com/playlist/2r35vbe6hHl6yDSMfjKgmm" + }, + "href": "https://api.spotify.com/v1/playlists/2r35vbe6hHl6yDSMfjKgmm", + "type": "playlist", + "uri": "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + }, + "progress_ms": 249367, + "item": { + "album": { + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2Hkut4rAAyrQxRdof7FVJq" + }, + "href": "https://api.spotify.com/v1/artists/2Hkut4rAAyrQxRdof7FVJq", + "id": "2Hkut4rAAyrQxRdof7FVJq", + "name": "Rush", + "type": "artist", + "uri": "spotify:artist:2Hkut4rAAyrQxRdof7FVJq" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3nUNxSh2szhmN7iifAKv5i" + }, + "href": "https://api.spotify.com/v1/albums/3nUNxSh2szhmN7iifAKv5i", + "id": "3nUNxSh2szhmN7iifAKv5i", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b27306c0d7ebcabad0c39b566983", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e0206c0d7ebcabad0c39b566983", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d0000485106c0d7ebcabad0c39b566983", + "width": 64 + } + ], + "name": "Permanent Waves", + "release_date": "1980-01-01", + "release_date_precision": "day", + "total_tracks": 6, + "type": "album", + "uri": "spotify:album:3nUNxSh2szhmN7iifAKv5i" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2Hkut4rAAyrQxRdof7FVJq" + }, + "href": "https://api.spotify.com/v1/artists/2Hkut4rAAyrQxRdof7FVJq", + "id": "2Hkut4rAAyrQxRdof7FVJq", + "name": "Rush", + "type": "artist", + "uri": "spotify:artist:2Hkut4rAAyrQxRdof7FVJq" + } + ], + "disc_number": 1, + "duration_ms": 296466, + "explicit": false, + "external_ids": { + "isrc": "USMR18070028" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/4e9hUiLsN4mx61ARosFi7p" + }, + "href": "https://api.spotify.com/v1/tracks/4e9hUiLsN4mx61ARosFi7p", + "id": "4e9hUiLsN4mx61ARosFi7p", + "is_local": false, + "name": "The Spirit Of Radio", + "popularity": 68, + "preview_url": "https://p.scdn.co/mp3-preview/75cc52f458b2416f33f15c499783c51119ba9a93?cid=20bbc62823a3412ba5267ea5398e52d0", + "track_number": 1, + "type": "track", + "uri": "spotify:track:4e9hUiLsN4mx61ARosFi7p" + }, + "currently_playing_type": "track", + "actions": { + "disallows": { + "skipping_prev": true, + "toggling_repeat_track": true + } + }, + "is_playing": true +} diff --git a/tests/components/spotify/fixtures/playback_episode.json b/tests/components/spotify/fixtures/playback_episode.json new file mode 100644 index 00000000000..6a9de50a534 --- /dev/null +++ b/tests/components/spotify/fixtures/playback_episode.json @@ -0,0 +1,110 @@ +{ + "device": { + "id": null, + "is_active": true, + "is_private_session": false, + "is_restricted": true, + "name": "Sonos Roam SL", + "supports_volume": true, + "type": "Speaker", + "volume_percent": 46 + }, + "shuffle_state": false, + "smart_shuffle": false, + "repeat_state": "off", + "timestamp": 1728219605131, + "context": { + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "progress_ms": 5410, + "item": { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "show": { + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8b", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "total_episodes": 120, + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + "currently_playing_type": "episode", + "actions": { + "disallows": { + "resuming": true + } + }, + "is_playing": true +} diff --git a/tests/components/spotify/fixtures/playlist.json b/tests/components/spotify/fixtures/playlist.json new file mode 100644 index 00000000000..5680ac9109c --- /dev/null +++ b/tests/components/spotify/fixtures/playlist.json @@ -0,0 +1,986 @@ +{ + "collaborative": false, + "external_urls": { + "spotify": "https://open.spotify.com/playlist/3cEYpjA9oz9GiPac4AsH4n" + }, + "followers": { + "href": null, + "total": 562 + }, + "href": "https://api.spotify.com/v1/playlists/3cEYpjA9oz9GiPac4AsH4n?locale=en-US%2Cen%3Bq%3D0.5", + "id": "3cEYpjA9oz9GiPac4AsH4n", + "images": [ + { + "url": "https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba", + "height": null, + "width": null + } + ], + "primary_color": null, + "name": "Spotify Web API Testing playlist", + "description": "A playlist for testing pourposes", + "type": "playlist", + "uri": "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n", + "owner": { + "href": "https://api.spotify.com/v1/users/jmperezperez", + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "display_name": "JMPerez²", + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + } + }, + "public": true, + "snapshot_id": "MTgsZWFmNmZiNTIzYTg4ODM0OGQzZWQzOGI4NTdkNTJlMjU0OWFkYTUxMA==", + "tracks": { + "limit": 100, + "next": null, + "offset": 0, + "previous": null, + "href": "https://api.spotify.com/v1/playlists/3cEYpjA9oz9GiPac4AsH4n/tracks?offset=0&limit=100&locale=en-US%2Cen%3Bq%3D0.5", + "total": 5, + "items": [ + { + "added_at": "2015-01-15T12:39:22Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/04599a1fe12ffac01d2bcb08340f84c0dd2cc335?cid=c7c59b798aab4892ac040a25f7dd1575", + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "type": "album", + "album_type": "compilation", + "href": "https://api.spotify.com/v1/albums/2pANdqPvxInB0YvcDiw4ko", + "id": "2pANdqPvxInB0YvcDiw4ko", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02ce6d0eef0c1ce77e5f95bbbc", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851ce6d0eef0c1ce77e5f95bbbc", + "width": 64, + "height": 64 + } + ], + "name": "Progressive Psy Trance Picks Vol.8", + "release_date": "2012-04-02", + "release_date_precision": "day", + "uri": "spotify:album:2pANdqPvxInB0YvcDiw4ko", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of" + }, + "href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of", + "id": "0LyfQWJT6nXafLPZqxe9Of", + "name": "Various Artists", + "type": "artist", + "uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/2pANdqPvxInB0YvcDiw4ko" + }, + "total_tracks": 20 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6eSdhw46riw2OUHgMwR8B5" + }, + "href": "https://api.spotify.com/v1/artists/6eSdhw46riw2OUHgMwR8B5", + "id": "6eSdhw46riw2OUHgMwR8B5", + "name": "Odiseo", + "type": "artist", + "uri": "spotify:artist:6eSdhw46riw2OUHgMwR8B5" + } + ], + "disc_number": 1, + "track_number": 10, + "duration_ms": 376000, + "external_ids": { + "isrc": "DEKC41200989" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/4rzfv0JLZfVhOhbSQ8o5jZ" + }, + "href": "https://api.spotify.com/v1/tracks/4rzfv0JLZfVhOhbSQ8o5jZ", + "id": "4rzfv0JLZfVhOhbSQ8o5jZ", + "name": "Api", + "popularity": 2, + "uri": "spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:40:03Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/d61fbb7016904624373008ea056d45e6df891071?cid=c7c59b798aab4892ac040a25f7dd1575", + "available_markets": [], + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "available_markets": [], + "type": "album", + "album_type": "compilation", + "href": "https://api.spotify.com/v1/albums/6nlfkk5GoXRL1nktlATNsy", + "id": "6nlfkk5GoXRL1nktlATNsy", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02aa2ff29970d9a63a49dfaeb2", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851aa2ff29970d9a63a49dfaeb2", + "width": 64, + "height": 64 + } + ], + "name": "Wellness & Dreaming Source", + "release_date": "2015-01-09", + "release_date_precision": "day", + "uri": "spotify:album:6nlfkk5GoXRL1nktlATNsy", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of" + }, + "href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of", + "id": "0LyfQWJT6nXafLPZqxe9Of", + "name": "Various Artists", + "type": "artist", + "uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/6nlfkk5GoXRL1nktlATNsy" + }, + "total_tracks": 25 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5VQE4WOzPu9h3HnGLuBoA6" + }, + "href": "https://api.spotify.com/v1/artists/5VQE4WOzPu9h3HnGLuBoA6", + "id": "5VQE4WOzPu9h3HnGLuBoA6", + "name": "Vlasta Marek", + "type": "artist", + "uri": "spotify:artist:5VQE4WOzPu9h3HnGLuBoA6" + } + ], + "disc_number": 1, + "track_number": 21, + "duration_ms": 730066, + "external_ids": { + "isrc": "FR2X41475057" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/5o3jMYOSbaVz3tkgwhELSV" + }, + "href": "https://api.spotify.com/v1/tracks/5o3jMYOSbaVz3tkgwhELSV", + "id": "5o3jMYOSbaVz3tkgwhELSV", + "name": "Is", + "popularity": 0, + "uri": "spotify:track:5o3jMYOSbaVz3tkgwhELSV", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:22:30Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/cc680ec0f5fd5ff21f0cd11ac47e10d3cbb92190?cid=c7c59b798aab4892ac040a25f7dd1575", + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "type": "album", + "album_type": "album", + "href": "https://api.spotify.com/v1/albums/4hnqM0JK4CM1phwfq1Ldyz", + "id": "4hnqM0JK4CM1phwfq1Ldyz", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02ee0d0dce888c6c8a70db6e8b", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851ee0d0dce888c6c8a70db6e8b", + "width": 64, + "height": 64 + } + ], + "name": "This Is Happening", + "release_date": "2010-05-17", + "release_date_precision": "day", + "uri": "spotify:album:4hnqM0JK4CM1phwfq1Ldyz", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/066X20Nz7iquqkkCW6Jxy6" + }, + "href": "https://api.spotify.com/v1/artists/066X20Nz7iquqkkCW6Jxy6", + "id": "066X20Nz7iquqkkCW6Jxy6", + "name": "LCD Soundsystem", + "type": "artist", + "uri": "spotify:artist:066X20Nz7iquqkkCW6Jxy6" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/4hnqM0JK4CM1phwfq1Ldyz" + }, + "total_tracks": 9 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/066X20Nz7iquqkkCW6Jxy6" + }, + "href": "https://api.spotify.com/v1/artists/066X20Nz7iquqkkCW6Jxy6", + "id": "066X20Nz7iquqkkCW6Jxy6", + "name": "LCD Soundsystem", + "type": "artist", + "uri": "spotify:artist:066X20Nz7iquqkkCW6Jxy6" + } + ], + "disc_number": 1, + "track_number": 4, + "duration_ms": 401440, + "external_ids": { + "isrc": "US4GE1000022" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/4Cy0NHJ8Gh0xMdwyM9RkQm" + }, + "href": "https://api.spotify.com/v1/tracks/4Cy0NHJ8Gh0xMdwyM9RkQm", + "id": "4Cy0NHJ8Gh0xMdwyM9RkQm", + "name": "All I Want", + "popularity": 45, + "uri": "spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:40:35Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/d6ecf1f98d0b1fdc8c535de8e2010d0d8b8d040b?cid=c7c59b798aab4892ac040a25f7dd1575", + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "type": "album", + "album_type": "album", + "href": "https://api.spotify.com/v1/albums/2usKFntxa98WHMcyW6xJBz", + "id": "2usKFntxa98WHMcyW6xJBz", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e028b7447ac3daa1da18811cf7b", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d000048518b7447ac3daa1da18811cf7b", + "width": 64, + "height": 64 + } + ], + "name": "Glenn Horiuchi Trio / Gelenn Horiuchi Quartet: Mercy / Jump Start / Endpoints / Curl Out / Earthworks / Mind Probe / Null Set / Another Space (A)", + "release_date": "2011-04-01", + "release_date_precision": "day", + "uri": "spotify:album:2usKFntxa98WHMcyW6xJBz", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/272ArH9SUAlslQqsSgPJA2" + }, + "href": "https://api.spotify.com/v1/artists/272ArH9SUAlslQqsSgPJA2", + "id": "272ArH9SUAlslQqsSgPJA2", + "name": "Glenn Horiuchi Trio", + "type": "artist", + "uri": "spotify:artist:272ArH9SUAlslQqsSgPJA2" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/2usKFntxa98WHMcyW6xJBz" + }, + "total_tracks": 8 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/272ArH9SUAlslQqsSgPJA2" + }, + "href": "https://api.spotify.com/v1/artists/272ArH9SUAlslQqsSgPJA2", + "id": "272ArH9SUAlslQqsSgPJA2", + "name": "Glenn Horiuchi Trio", + "type": "artist", + "uri": "spotify:artist:272ArH9SUAlslQqsSgPJA2" + } + ], + "disc_number": 1, + "track_number": 2, + "duration_ms": 358760, + "external_ids": { + "isrc": "USB8U1025969" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/6hvFrZNocdt2FcKGCSY5NI" + }, + "href": "https://api.spotify.com/v1/tracks/6hvFrZNocdt2FcKGCSY5NI", + "id": "6hvFrZNocdt2FcKGCSY5NI", + "name": "Endpoints", + "popularity": 0, + "uri": "spotify:track:6hvFrZNocdt2FcKGCSY5NI", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:41:10Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/47b974e463b1e862c7b3c18fa2ceedc513f2106b?cid=c7c59b798aab4892ac040a25f7dd1575", + "available_markets": [], + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "available_markets": [], + "type": "album", + "album_type": "album", + "href": "https://api.spotify.com/v1/albums/0ivM6kSawaug0j3tZVusG2", + "id": "0ivM6kSawaug0j3tZVusG2", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e0204e57d181ff062f8339d6c71", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d0000485104e57d181ff062f8339d6c71", + "width": 64, + "height": 64 + } + ], + "name": "All The Best (Spanish Version)", + "release_date": "2007-01-01", + "release_date_precision": "day", + "uri": "spotify:album:0ivM6kSawaug0j3tZVusG2", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2KftmGt9sk1yLjsAoloC3M" + }, + "href": "https://api.spotify.com/v1/artists/2KftmGt9sk1yLjsAoloC3M", + "id": "2KftmGt9sk1yLjsAoloC3M", + "name": "Zucchero", + "type": "artist", + "uri": "spotify:artist:2KftmGt9sk1yLjsAoloC3M" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/0ivM6kSawaug0j3tZVusG2" + }, + "total_tracks": 18 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2KftmGt9sk1yLjsAoloC3M" + }, + "href": "https://api.spotify.com/v1/artists/2KftmGt9sk1yLjsAoloC3M", + "id": "2KftmGt9sk1yLjsAoloC3M", + "name": "Zucchero", + "type": "artist", + "uri": "spotify:artist:2KftmGt9sk1yLjsAoloC3M" + } + ], + "disc_number": 1, + "track_number": 18, + "duration_ms": 176093, + "external_ids": { + "isrc": "ITUM70701043" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/2E2znCPaS8anQe21GLxcvJ" + }, + "href": "https://api.spotify.com/v1/tracks/2E2znCPaS8anQe21GLxcvJ", + "id": "2E2znCPaS8anQe21GLxcvJ", + "name": "You Are So Beautiful", + "popularity": 0, + "uri": "spotify:track:2E2znCPaS8anQe21GLxcvJ", + "is_local": false + } + }, + { + "added_at": "2024-11-28T11:20:58Z", + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/1112264649" + }, + "href": "https://api.spotify.com/v1/users/1112264649", + "id": "1112264649", + "type": "user", + "uri": "spotify:user:1112264649" + }, + "is_local": false, + "primary_color": null, + "track": { + "explicit": false, + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "episode": true, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "total_episodes": 120, + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "track": false, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + "video_thumbnail": { + "url": null + } + } + ] + } +} diff --git a/tests/components/spotify/fixtures/recently_played_tracks.json b/tests/components/spotify/fixtures/recently_played_tracks.json new file mode 100644 index 00000000000..f000d76a52f --- /dev/null +++ b/tests/components/spotify/fixtures/recently_played_tracks.json @@ -0,0 +1,964 @@ +{ + "items": [ + { + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/6Ab1VSoMD5fvlagOW2QDOJ" + }, + "href": "https://api.spotify.com/v1/albums/6Ab1VSoMD5fvlagOW2QDOJ", + "id": "6Ab1VSoMD5fvlagOW2QDOJ", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02cdac047e7894fb56a0dfdcde", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851cdac047e7894fb56a0dfdcde", + "width": 64 + } + ], + "name": "Super Breath", + "release_date": "2024-07-24", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:6Ab1VSoMD5fvlagOW2QDOJ" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 211800, + "explicit": false, + "external_ids": { + "isrc": "QMB622409101" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/71dMjqJ8UJV700zYs5YZCh" + }, + "href": "https://api.spotify.com/v1/tracks/71dMjqJ8UJV700zYs5YZCh", + "id": "71dMjqJ8UJV700zYs5YZCh", + "is_local": false, + "name": "Super Breath", + "popularity": 58, + "preview_url": "https://p.scdn.co/mp3-preview/f1ee3ade75c6eb5cb227ed8c96de8674d8ce581f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:71dMjqJ8UJV700zYs5YZCh" + }, + "played_at": "2024-10-06T18:09:18.556Z", + "context": null + }, + { + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/6Ab1VSoMD5fvlagOW2QDOJ" + }, + "href": "https://api.spotify.com/v1/albums/6Ab1VSoMD5fvlagOW2QDOJ", + "id": "6Ab1VSoMD5fvlagOW2QDOJ", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02cdac047e7894fb56a0dfdcde", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851cdac047e7894fb56a0dfdcde", + "width": 64 + } + ], + "name": "Super Breath", + "release_date": "2024-07-24", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:6Ab1VSoMD5fvlagOW2QDOJ" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 211800, + "explicit": false, + "external_ids": { + "isrc": "QMB622409101" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/71dMjqJ8UJV700zYs5YZCh" + }, + "href": "https://api.spotify.com/v1/tracks/71dMjqJ8UJV700zYs5YZCh", + "id": "71dMjqJ8UJV700zYs5YZCh", + "is_local": false, + "name": "Super Breath", + "popularity": 58, + "preview_url": "https://p.scdn.co/mp3-preview/f1ee3ade75c6eb5cb227ed8c96de8674d8ce581f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:71dMjqJ8UJV700zYs5YZCh" + }, + "played_at": "2024-10-06T18:05:33.902Z", + "context": { + "type": "album", + "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP", + "external_urls": { + "spotify": "https://open.spotify.com/album/57MSBg5pBQZH5bfLVDmeuP" + }, + "uri": "spotify:album:57MSBg5pBQZH5bfLVDmeuP" + } + } + ], + "next": "https://api.spotify.com/v1/me/player/recently-played?before=1728234176022", + "cursors": { + "after": "1728238158556", + "before": "1728234176022" + }, + "limit": 20, + "href": "https://api.spotify.com/v1/me/player/recently-played" +} diff --git a/tests/components/spotify/fixtures/saved_albums.json b/tests/components/spotify/fixtures/saved_albums.json new file mode 100644 index 00000000000..0d58ecb89ea --- /dev/null +++ b/tests/components/spotify/fixtures/saved_albums.json @@ -0,0 +1,7637 @@ +{ + "href": "https://api.spotify.com/v1/me/albums?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "added_at": "2024-09-19T22:00:00Z", + "album": { + "album_type": "album", + "total_tracks": 12, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/57MSBg5pBQZH5bfLVDmeuP" + }, + "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP?locale=en-US%2Cen%3Bq%3D0.5", + "id": "57MSBg5pBQZH5bfLVDmeuP", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b2733126a95bb7ed4146a80c7fc6", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e023126a95bb7ed4146a80c7fc6", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d000048513126a95bb7ed4146a80c7fc6", + "height": 64, + "width": 64 + } + ], + "name": "In Waves", + "release_date": "2024-09-20", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:57MSBg5pBQZH5bfLVDmeuP", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "tracks": { + "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP/tracks?offset=0&limit=50&locale=en-US,en;q%3D0.5", + "limit": 50, + "next": null, + "offset": 0, + "previous": null, + "total": 12, + "items": [ + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 135835, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/7uLBdV19ad7kAjU2oB1l6p" + }, + "href": "https://api.spotify.com/v1/tracks/7uLBdV19ad7kAjU2oB1l6p", + "id": "7uLBdV19ad7kAjU2oB1l6p", + "name": "Wanna", + "preview_url": "https://p.scdn.co/mp3-preview/fc112f83fe770b09e4c1bd586e5b9c144e384bd7?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:7uLBdV19ad7kAjU2oB1l6p", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 240580, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3pjX4hC8adabkXGu3X9GTC" + }, + "href": "https://api.spotify.com/v1/tracks/3pjX4hC8adabkXGu3X9GTC", + "id": "3pjX4hC8adabkXGu3X9GTC", + "name": "Treat Each Other Right", + "preview_url": "https://p.scdn.co/mp3-preview/a518fdb34284daa9a2298fd5491d6cede24a3e01?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:3pjX4hC8adabkXGu3X9GTC", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3X2DdnmoANw8Rg8luHyZQb" + }, + "href": "https://api.spotify.com/v1/artists/3X2DdnmoANw8Rg8luHyZQb", + "id": "3X2DdnmoANw8Rg8luHyZQb", + "name": "Romy", + "type": "artist", + "uri": "spotify:artist:3X2DdnmoANw8Rg8luHyZQb" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4KDu9uqzqseVCpQXMa8Pvm" + }, + "href": "https://api.spotify.com/v1/artists/4KDu9uqzqseVCpQXMa8Pvm", + "id": "4KDu9uqzqseVCpQXMa8Pvm", + "name": "Oliver Sim", + "type": "artist", + "uri": "spotify:artist:4KDu9uqzqseVCpQXMa8Pvm" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3iOvXCl6edW5Um0fXEBRXy" + }, + "href": "https://api.spotify.com/v1/artists/3iOvXCl6edW5Um0fXEBRXy", + "id": "3iOvXCl6edW5Um0fXEBRXy", + "name": "The xx", + "type": "artist", + "uri": "spotify:artist:3iOvXCl6edW5Um0fXEBRXy" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 208334, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4gBniy3TwR9o2JDBx48TlD" + }, + "href": "https://api.spotify.com/v1/tracks/4gBniy3TwR9o2JDBx48TlD", + "id": "4gBniy3TwR9o2JDBx48TlD", + "name": "Waited All Night", + "preview_url": "https://p.scdn.co/mp3-preview/b7820ac10349ca374242240f69887c073a4980f2?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 3, + "type": "track", + "uri": "spotify:track:4gBniy3TwR9o2JDBx48TlD", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0XfQBWgzisaS9ltDV9bXAS" + }, + "href": "https://api.spotify.com/v1/artists/0XfQBWgzisaS9ltDV9bXAS", + "id": "0XfQBWgzisaS9ltDV9bXAS", + "name": "Honey Dijon", + "type": "artist", + "uri": "spotify:artist:0XfQBWgzisaS9ltDV9bXAS" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 222315, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/79gWc6dZ1dXH7rC67DTunz" + }, + "href": "https://api.spotify.com/v1/tracks/79gWc6dZ1dXH7rC67DTunz", + "id": "79gWc6dZ1dXH7rC67DTunz", + "name": "Baddy On The Floor", + "preview_url": "https://p.scdn.co/mp3-preview/c260664dd5adc2290fce52cb51aa8667e39c2118?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 4, + "type": "track", + "uri": "spotify:track:79gWc6dZ1dXH7rC67DTunz", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0fEfMW5bypHZ0A8eLnhwj5" + }, + "href": "https://api.spotify.com/v1/artists/0fEfMW5bypHZ0A8eLnhwj5", + "id": "0fEfMW5bypHZ0A8eLnhwj5", + "name": "Kelsey Lu", + "type": "artist", + "uri": "spotify:artist:0fEfMW5bypHZ0A8eLnhwj5" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0FNfiTQCR5o3ounOlWzm1d" + }, + "href": "https://api.spotify.com/v1/artists/0FNfiTQCR5o3ounOlWzm1d", + "id": "0FNfiTQCR5o3ounOlWzm1d", + "name": "John Glacier", + "type": "artist", + "uri": "spotify:artist:0FNfiTQCR5o3ounOlWzm1d" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/1R84VlXnFFULOsWWV8IrCQ" + }, + "href": "https://api.spotify.com/v1/artists/1R84VlXnFFULOsWWV8IrCQ", + "id": "1R84VlXnFFULOsWWV8IrCQ", + "name": "Panda Bear", + "type": "artist", + "uri": "spotify:artist:1R84VlXnFFULOsWWV8IrCQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 212339, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1gRMKwvMvp6LcQVMpMXQg2" + }, + "href": "https://api.spotify.com/v1/tracks/1gRMKwvMvp6LcQVMpMXQg2", + "id": "1gRMKwvMvp6LcQVMpMXQg2", + "name": "Dafodil", + "preview_url": "https://p.scdn.co/mp3-preview/173fad98e5e51a6cfb02b3cb394ab46c70d44303?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 5, + "type": "track", + "uri": "spotify:track:1gRMKwvMvp6LcQVMpMXQg2", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 205638, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/27D9YN3uHPD3PTXvzNtbto" + }, + "href": "https://api.spotify.com/v1/tracks/27D9YN3uHPD3PTXvzNtbto", + "id": "27D9YN3uHPD3PTXvzNtbto", + "name": "Still Summer", + "preview_url": "https://p.scdn.co/mp3-preview/e959ae6394e9d19e00cd474ed2b76bb43b6063d9?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 6, + "type": "track", + "uri": "spotify:track:27D9YN3uHPD3PTXvzNtbto", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6UE7nl9mha6s8z0wFQFIZ2" + }, + "href": "https://api.spotify.com/v1/artists/6UE7nl9mha6s8z0wFQFIZ2", + "id": "6UE7nl9mha6s8z0wFQFIZ2", + "name": "Robyn", + "type": "artist", + "uri": "spotify:artist:6UE7nl9mha6s8z0wFQFIZ2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 202648, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/0pMj03SiaZ9bkFlXQWNhtZ" + }, + "href": "https://api.spotify.com/v1/tracks/0pMj03SiaZ9bkFlXQWNhtZ", + "id": "0pMj03SiaZ9bkFlXQWNhtZ", + "name": "Life", + "preview_url": "https://p.scdn.co/mp3-preview/261bc3bd3192ef4158b1ca42e95262113241a326?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 7, + "type": "track", + "uri": "spotify:track:0pMj03SiaZ9bkFlXQWNhtZ", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 222365, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/7gb0pekqHQYTGo6NWLBvT5" + }, + "href": "https://api.spotify.com/v1/tracks/7gb0pekqHQYTGo6NWLBvT5", + "id": "7gb0pekqHQYTGo6NWLBvT5", + "name": "The Feeling I Get From You", + "preview_url": "https://p.scdn.co/mp3-preview/da24fadc4bca20394435e53f5d61e8f6c36f9614?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 8, + "type": "track", + "uri": "spotify:track:7gb0pekqHQYTGo6NWLBvT5", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 376918, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6pOzbdJKEr4hvXkX7VkfY6" + }, + "href": "https://api.spotify.com/v1/tracks/6pOzbdJKEr4hvXkX7VkfY6", + "id": "6pOzbdJKEr4hvXkX7VkfY6", + "name": "Breather", + "preview_url": "https://p.scdn.co/mp3-preview/dc7cd612c205968f5d6cb32696305656ae7ad888?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 9, + "type": "track", + "uri": "spotify:track:6pOzbdJKEr4hvXkX7VkfY6", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3C8RpaI3Go0yFF9whvKoED" + }, + "href": "https://api.spotify.com/v1/artists/3C8RpaI3Go0yFF9whvKoED", + "id": "3C8RpaI3Go0yFF9whvKoED", + "name": "The Avalanches", + "type": "artist", + "uri": "spotify:artist:3C8RpaI3Go0yFF9whvKoED" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 254142, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3cfgisz6DhZmooQk08P4Eu" + }, + "href": "https://api.spotify.com/v1/tracks/3cfgisz6DhZmooQk08P4Eu", + "id": "3cfgisz6DhZmooQk08P4Eu", + "name": "All You Children", + "preview_url": "https://p.scdn.co/mp3-preview/ff3fc064f340e47347d4677332daf6da8155ae38?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 10, + "type": "track", + "uri": "spotify:track:3cfgisz6DhZmooQk08P4Eu", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 71680, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1wpcJ6TCrKpH6KdBmrp9yN" + }, + "href": "https://api.spotify.com/v1/tracks/1wpcJ6TCrKpH6KdBmrp9yN", + "id": "1wpcJ6TCrKpH6KdBmrp9yN", + "name": "Every Single Weekend - Interlude", + "preview_url": "https://p.scdn.co/mp3-preview/2c46e4cea66da846807b70c7974d19b7837eba52?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 11, + "type": "track", + "uri": "spotify:track:1wpcJ6TCrKpH6KdBmrp9yN", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2Q4FR4Ss0mh6EvbiQBHEOU" + }, + "href": "https://api.spotify.com/v1/artists/2Q4FR4Ss0mh6EvbiQBHEOU", + "id": "2Q4FR4Ss0mh6EvbiQBHEOU", + "name": "Oona Doherty", + "type": "artist", + "uri": "spotify:artist:2Q4FR4Ss0mh6EvbiQBHEOU" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 337414, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/08Jhu8OZ6gCIGWQn6vP3uI" + }, + "href": "https://api.spotify.com/v1/tracks/08Jhu8OZ6gCIGWQn6vP3uI", + "id": "08Jhu8OZ6gCIGWQn6vP3uI", + "name": "Falling Together", + "preview_url": "https://p.scdn.co/mp3-preview/2fa5fc5e733495719170f672a07b172bf678a89f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 12, + "type": "track", + "uri": "spotify:track:08Jhu8OZ6gCIGWQn6vP3uI", + "is_local": false + } + ] + }, + "copyrights": [ + { + "text": "2024 Young", + "type": "C" + }, + { + "text": "2024 Young", + "type": "P" + } + ], + "external_ids": { + "upc": "889030035653" + }, + "genres": [], + "label": "Young", + "popularity": 73 + } + }, + { + "added_at": "2024-09-05T22:00:00Z", + "album": { + "album_type": "album", + "total_tracks": 20, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3DQueEd1Ft9PHWgovDzPKh" + }, + "href": "https://api.spotify.com/v1/albums/3DQueEd1Ft9PHWgovDzPKh?locale=en-US%2Cen%3Bq%3D0.5", + "id": "3DQueEd1Ft9PHWgovDzPKh", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b2736b8a4828e057b7dc1c4a4d39", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e026b8a4828e057b7dc1c4a4d39", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d000048516b8a4828e057b7dc1c4a4d39", + "height": 64, + "width": 64 + } + ], + "name": "ten days", + "release_date": "2024-09-06", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:3DQueEd1Ft9PHWgovDzPKh", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "tracks": { + "href": "https://api.spotify.com/v1/albums/3DQueEd1Ft9PHWgovDzPKh/tracks?offset=0&limit=50&locale=en-US,en;q%3D0.5", + "limit": 50, + "next": null, + "offset": 0, + "previous": null, + "total": 20, + "items": [ + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 30857, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/00nDbqJkHBGUFdim9M0xGc" + }, + "href": "https://api.spotify.com/v1/tracks/00nDbqJkHBGUFdim9M0xGc", + "id": "00nDbqJkHBGUFdim9M0xGc", + "name": ".one", + "preview_url": "https://p.scdn.co/mp3-preview/52224422e178fa35baa9ffbf097372b7031fbecf?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:00nDbqJkHBGUFdim9M0xGc", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6l7R1jntPahGxwJt7Tky8h" + }, + "href": "https://api.spotify.com/v1/artists/6l7R1jntPahGxwJt7Tky8h", + "id": "6l7R1jntPahGxwJt7Tky8h", + "name": "Obongjayar", + "type": "artist", + "uri": "spotify:artist:6l7R1jntPahGxwJt7Tky8h" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 220653, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1rf4SX7dduNbrNnOmupLzi" + }, + "href": "https://api.spotify.com/v1/tracks/1rf4SX7dduNbrNnOmupLzi", + "id": "1rf4SX7dduNbrNnOmupLzi", + "name": "adore u", + "preview_url": "https://p.scdn.co/mp3-preview/49ddf22bfe3925899cbb9ecf5d5157525becdcb4?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:1rf4SX7dduNbrNnOmupLzi", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 10670, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/0lt9clHEwYyheuC9rik9UH" + }, + "href": "https://api.spotify.com/v1/tracks/0lt9clHEwYyheuC9rik9UH", + "id": "0lt9clHEwYyheuC9rik9UH", + "name": ".two", + "preview_url": "https://p.scdn.co/mp3-preview/59a26651d9742fa1856469cf1c0f8c7c55819525?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 3, + "type": "track", + "uri": "spotify:track:0lt9clHEwYyheuC9rik9UH", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6Ja6zFB5d7XRihhfMo6KzY" + }, + "href": "https://api.spotify.com/v1/artists/6Ja6zFB5d7XRihhfMo6KzY", + "id": "6Ja6zFB5d7XRihhfMo6KzY", + "name": "Jozzy", + "type": "artist", + "uri": "spotify:artist:6Ja6zFB5d7XRihhfMo6KzY" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7IrBqZo6diq3hV3GpUhrs2" + }, + "href": "https://api.spotify.com/v1/artists/7IrBqZo6diq3hV3GpUhrs2", + "id": "7IrBqZo6diq3hV3GpUhrs2", + "name": "Jim Legxacy", + "type": "artist", + "uri": "spotify:artist:7IrBqZo6diq3hV3GpUhrs2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 181545, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6twB0uYXJYW9t5GHfYaQ3i" + }, + "href": "https://api.spotify.com/v1/tracks/6twB0uYXJYW9t5GHfYaQ3i", + "id": "6twB0uYXJYW9t5GHfYaQ3i", + "name": "ten", + "preview_url": "https://p.scdn.co/mp3-preview/99fc4c0f25e64d30af9e619ea820bed60aa2b1c6?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 4, + "type": "track", + "uri": "spotify:track:6twB0uYXJYW9t5GHfYaQ3i", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 15034, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6G7TRmzTt9tnrM0QqSVpJW" + }, + "href": "https://api.spotify.com/v1/tracks/6G7TRmzTt9tnrM0QqSVpJW", + "id": "6G7TRmzTt9tnrM0QqSVpJW", + "name": ".three", + "preview_url": "https://p.scdn.co/mp3-preview/7aeb75b213d74995df23a41d86494834bc801d78?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 5, + "type": "track", + "uri": "spotify:track:6G7TRmzTt9tnrM0QqSVpJW", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2WoVwexZuODvclzULjPQtm" + }, + "href": "https://api.spotify.com/v1/artists/2WoVwexZuODvclzULjPQtm", + "id": "2WoVwexZuODvclzULjPQtm", + "name": "Sampha", + "type": "artist", + "uri": "spotify:artist:2WoVwexZuODvclzULjPQtm" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 214469, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4IHblO52meh2jwqES1BA7X" + }, + "href": "https://api.spotify.com/v1/tracks/4IHblO52meh2jwqES1BA7X", + "id": "4IHblO52meh2jwqES1BA7X", + "name": "fear less", + "preview_url": "https://p.scdn.co/mp3-preview/c0952ae5c7423cc08ca7a53f0f182a6f20586cde?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 6, + "type": "track", + "uri": "spotify:track:4IHblO52meh2jwqES1BA7X", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 9856, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1wU9pfdw6ht8HKfxz6wMNq" + }, + "href": "https://api.spotify.com/v1/tracks/1wU9pfdw6ht8HKfxz6wMNq", + "id": "1wU9pfdw6ht8HKfxz6wMNq", + "name": ".four", + "preview_url": "https://p.scdn.co/mp3-preview/a4a6f591cb0cf93a7d57df33ad70ac1d8b7db349?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 7, + "type": "track", + "uri": "spotify:track:1wU9pfdw6ht8HKfxz6wMNq", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4PLsMEk2DCRVlVL2a9aZAv" + }, + "href": "https://api.spotify.com/v1/artists/4PLsMEk2DCRVlVL2a9aZAv", + "id": "4PLsMEk2DCRVlVL2a9aZAv", + "name": "SOAK", + "type": "artist", + "uri": "spotify:artist:4PLsMEk2DCRVlVL2a9aZAv" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 260997, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/2D9a9CXeo3HFtVeaNlzp4a" + }, + "href": "https://api.spotify.com/v1/tracks/2D9a9CXeo3HFtVeaNlzp4a", + "id": "2D9a9CXeo3HFtVeaNlzp4a", + "name": "just stand there", + "preview_url": "https://p.scdn.co/mp3-preview/06a95f2285831e3f4848718f5c8c2f7deeafaf80?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 8, + "type": "track", + "uri": "spotify:track:2D9a9CXeo3HFtVeaNlzp4a", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 15254, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3vTHKAYJy0hY1OkVv1qLNM" + }, + "href": "https://api.spotify.com/v1/tracks/3vTHKAYJy0hY1OkVv1qLNM", + "id": "3vTHKAYJy0hY1OkVv1qLNM", + "name": ".five", + "preview_url": "https://p.scdn.co/mp3-preview/29846c63d0cf33c05ee69ea92d412a2f473e1604?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 9, + "type": "track", + "uri": "spotify:track:3vTHKAYJy0hY1OkVv1qLNM", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jK9MiCrA42lLAdMGUZpwa" + }, + "href": "https://api.spotify.com/v1/artists/3jK9MiCrA42lLAdMGUZpwa", + "id": "3jK9MiCrA42lLAdMGUZpwa", + "name": "Anderson .Paak", + "type": "artist", + "uri": "spotify:artist:3jK9MiCrA42lLAdMGUZpwa" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6UtYvUtXnmg5EtllDFlWp8" + }, + "href": "https://api.spotify.com/v1/artists/6UtYvUtXnmg5EtllDFlWp8", + "id": "6UtYvUtXnmg5EtllDFlWp8", + "name": "CHIKA", + "type": "artist", + "uri": "spotify:artist:6UtYvUtXnmg5EtllDFlWp8" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 224073, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/1qfJ6OvxrspQTmcvdIEoX6" + }, + "href": "https://api.spotify.com/v1/tracks/1qfJ6OvxrspQTmcvdIEoX6", + "id": "1qfJ6OvxrspQTmcvdIEoX6", + "name": "places to be", + "preview_url": "https://p.scdn.co/mp3-preview/5c1c520365bbd3c9e2e84be42d9d70b0ec71ed01?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 10, + "type": "track", + "uri": "spotify:track:1qfJ6OvxrspQTmcvdIEoX6", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 28836, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/13H2XgH3k8SEptaoD5qeLG" + }, + "href": "https://api.spotify.com/v1/tracks/13H2XgH3k8SEptaoD5qeLG", + "id": "13H2XgH3k8SEptaoD5qeLG", + "name": ".six", + "preview_url": "https://p.scdn.co/mp3-preview/e630a09889f8e86bca24bcb54a6448e8c969936f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 11, + "type": "track", + "uri": "spotify:track:13H2XgH3k8SEptaoD5qeLG", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/59MDSNIYoOY0WRYuodzJPD" + }, + "href": "https://api.spotify.com/v1/artists/59MDSNIYoOY0WRYuodzJPD", + "id": "59MDSNIYoOY0WRYuodzJPD", + "name": "Duskus", + "type": "artist", + "uri": "spotify:artist:59MDSNIYoOY0WRYuodzJPD" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7Eu1txygG6nJttLHbZdQOh" + }, + "href": "https://api.spotify.com/v1/artists/7Eu1txygG6nJttLHbZdQOh", + "id": "7Eu1txygG6nJttLHbZdQOh", + "name": "Four Tet", + "type": "artist", + "uri": "spotify:artist:7Eu1txygG6nJttLHbZdQOh" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3pK4EcflBpG1Kpmjk5LK2R" + }, + "href": "https://api.spotify.com/v1/artists/3pK4EcflBpG1Kpmjk5LK2R", + "id": "3pK4EcflBpG1Kpmjk5LK2R", + "name": "Joy Anonymous", + "type": "artist", + "uri": "spotify:artist:3pK4EcflBpG1Kpmjk5LK2R" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5he5w2lnU9x7JFhnwcekXX" + }, + "href": "https://api.spotify.com/v1/artists/5he5w2lnU9x7JFhnwcekXX", + "id": "5he5w2lnU9x7JFhnwcekXX", + "name": "Skrillex", + "type": "artist", + "uri": "spotify:artist:5he5w2lnU9x7JFhnwcekXX" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 453068, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3i9QKRl5Ql3pgUfNdYBVTc" + }, + "href": "https://api.spotify.com/v1/tracks/3i9QKRl5Ql3pgUfNdYBVTc", + "id": "3i9QKRl5Ql3pgUfNdYBVTc", + "name": "glow", + "preview_url": "https://p.scdn.co/mp3-preview/4ddd31cf8fe9f76b8aa72e2a1b5d51ccc9e00e5a?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 12, + "type": "track", + "uri": "spotify:track:3i9QKRl5Ql3pgUfNdYBVTc", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 31749, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/2OLH9ukOFDVBMuVUuy2sFW" + }, + "href": "https://api.spotify.com/v1/tracks/2OLH9ukOFDVBMuVUuy2sFW", + "id": "2OLH9ukOFDVBMuVUuy2sFW", + "name": ".seven", + "preview_url": "https://p.scdn.co/mp3-preview/cc0e8af8b91eff643b65fefdbc6b32fe2a7ad7db?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 13, + "type": "track", + "uri": "spotify:track:2OLH9ukOFDVBMuVUuy2sFW", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 220656, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3DzWFxyzsAVblVNndiU9CW" + }, + "href": "https://api.spotify.com/v1/tracks/3DzWFxyzsAVblVNndiU9CW", + "id": "3DzWFxyzsAVblVNndiU9CW", + "name": "i saw you", + "preview_url": "https://p.scdn.co/mp3-preview/e2b23e98a35b1ccbce037d34c2c38c49b2371142?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 14, + "type": "track", + "uri": "spotify:track:3DzWFxyzsAVblVNndiU9CW", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 15037, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1aTcAf7K1ym8lBcuu8nmJA" + }, + "href": "https://api.spotify.com/v1/tracks/1aTcAf7K1ym8lBcuu8nmJA", + "id": "1aTcAf7K1ym8lBcuu8nmJA", + "name": ".eight", + "preview_url": "https://p.scdn.co/mp3-preview/d2910a98ace82ead87c06aad442b0f8104263feb?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 15, + "type": "track", + "uri": "spotify:track:1aTcAf7K1ym8lBcuu8nmJA", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5s6TJEuHTr9GR894wc6VfP" + }, + "href": "https://api.spotify.com/v1/artists/5s6TJEuHTr9GR894wc6VfP", + "id": "5s6TJEuHTr9GR894wc6VfP", + "name": "Emmylou Harris", + "type": "artist", + "uri": "spotify:artist:5s6TJEuHTr9GR894wc6VfP" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 200737, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4S05mkyTtAiWy5l4umch0X" + }, + "href": "https://api.spotify.com/v1/tracks/4S05mkyTtAiWy5l4umch0X", + "id": "4S05mkyTtAiWy5l4umch0X", + "name": "where will i be", + "preview_url": "https://p.scdn.co/mp3-preview/c8b398eaced8e21a97b1460480ab58a2c44364dd?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 16, + "type": "track", + "uri": "spotify:track:4S05mkyTtAiWy5l4umch0X", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 19060, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/5aNwAqN5Gk5oZIwW5KfhXN" + }, + "href": "https://api.spotify.com/v1/tracks/5aNwAqN5Gk5oZIwW5KfhXN", + "id": "5aNwAqN5Gk5oZIwW5KfhXN", + "name": ".nine", + "preview_url": "https://p.scdn.co/mp3-preview/d444f5f0921bee7a12beff1649a3cf295a822c76?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 17, + "type": "track", + "uri": "spotify:track:5aNwAqN5Gk5oZIwW5KfhXN", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3pK4EcflBpG1Kpmjk5LK2R" + }, + "href": "https://api.spotify.com/v1/artists/3pK4EcflBpG1Kpmjk5LK2R", + "id": "3pK4EcflBpG1Kpmjk5LK2R", + "name": "Joy Anonymous", + "type": "artist", + "uri": "spotify:artist:3pK4EcflBpG1Kpmjk5LK2R" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 344068, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4A8tKYA7gwZzQ4jVwIv1sv" + }, + "href": "https://api.spotify.com/v1/tracks/4A8tKYA7gwZzQ4jVwIv1sv", + "id": "4A8tKYA7gwZzQ4jVwIv1sv", + "name": "peace u need", + "preview_url": "https://p.scdn.co/mp3-preview/d333ce79ff70629051c9db4c5850b2b22288df71?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 18, + "type": "track", + "uri": "spotify:track:4A8tKYA7gwZzQ4jVwIv1sv", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 29540, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/2feEZkLf7dZUueeVBNsdor" + }, + "href": "https://api.spotify.com/v1/tracks/2feEZkLf7dZUueeVBNsdor", + "id": "2feEZkLf7dZUueeVBNsdor", + "name": ".ten", + "preview_url": "https://p.scdn.co/mp3-preview/72d66fa681d50abf590a9cca9553b112fa03c1ee?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 19, + "type": "track", + "uri": "spotify:track:2feEZkLf7dZUueeVBNsdor", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3IunaFjvNKj98JW89JYv9u" + }, + "href": "https://api.spotify.com/v1/artists/3IunaFjvNKj98JW89JYv9u", + "id": "3IunaFjvNKj98JW89JYv9u", + "name": "The Japanese House", + "type": "artist", + "uri": "spotify:artist:3IunaFjvNKj98JW89JYv9u" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6M98IZJK2tx6x2YVyHua9K" + }, + "href": "https://api.spotify.com/v1/artists/6M98IZJK2tx6x2YVyHua9K", + "id": "6M98IZJK2tx6x2YVyHua9K", + "name": "Scott Hardkiss", + "type": "artist", + "uri": "spotify:artist:6M98IZJK2tx6x2YVyHua9K" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 314007, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/61pyjiweMDS1h930OgS0XO" + }, + "href": "https://api.spotify.com/v1/tracks/61pyjiweMDS1h930OgS0XO", + "id": "61pyjiweMDS1h930OgS0XO", + "name": "backseat", + "preview_url": "https://p.scdn.co/mp3-preview/f14667711679c1f2c09e356ed12f1a1fad7464ac?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 20, + "type": "track", + "uri": "spotify:track:61pyjiweMDS1h930OgS0XO", + "is_local": false + } + ] + }, + "copyrights": [ + { + "text": "Under exclusive licence to Warner Music UK Limited. An Atlantic Records UK., © 2024 Fred Gibson", + "type": "C" + }, + { + "text": "Under exclusive licence to Warner Music UK Limited. An Atlantic Records UK., ℗ 2024 Fred Gibson", + "type": "P" + } + ], + "external_ids": { + "upc": "5021732457110" + }, + "genres": [], + "label": "Atlantic Records UK", + "popularity": 80 + } + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/me/albums?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 34 +} diff --git a/tests/components/spotify/fixtures/saved_shows.json b/tests/components/spotify/fixtures/saved_shows.json new file mode 100644 index 00000000000..acfd5a1b465 --- /dev/null +++ b/tests/components/spotify/fixtures/saved_shows.json @@ -0,0 +1,462 @@ +{ + "href": "https://api.spotify.com/v1/me/shows?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "added_at": "2023-08-10T08:17:09Z", + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "We’ll all giggle along at naughty jokes, your dating horror stories and give questionable recommendations on movies, food and relationships. This podcast is hot, fun garbage and we (Toni Lodge and Ryan Jon here in Melbourne, Australia) would love you to climb aboard and be our friends. Hosted on Acast. See acast.com/privacy for more information.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/5OzkclFjD6iAjtAuo7aIYt" + }, + "href": "https://api.spotify.com/v1/shows/5OzkclFjD6iAjtAuo7aIYt", + "html_description": "We’ll all giggle along at naughty jokes, your dating horror stories and give questionable recommendations on movies, food and relationships. This podcast is hot, fun garbage and we (Toni Lodge and Ryan Jon here in Melbourne, Australia) would love you to climb aboard and be our friends.

Hosted on Acast. See acast.com/privacy for more information.

", + "id": "5OzkclFjD6iAjtAuo7aIYt", + "images": [ + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68db5f65a943ef4f707bf79949b", + "width": 64 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fb5f65a943ef4f707bf79949b", + "width": 300 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ab5f65a943ef4f707bf79949b", + "width": 640 + } + ], + "is_externally_hosted": false, + "languages": ["en"], + "media_type": "audio", + "name": "Toni and Ryan", + "publisher": "Toni Lodge and Ryan Jon", + "total_episodes": 741, + "type": "show", + "uri": "spotify:show:5OzkclFjD6iAjtAuo7aIYt" + } + }, + { + "added_at": "2022-09-15T23:48:23Z", + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Welcome to BLAST Push To Talk, Counter-Strike like you’ve never heard it before.Join our host Moses and our field reporters Scrawny and Launders as they interview pro players, share their hot takes on the latest and greatest news in the CS world courtesy of EPOS.", + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/show/6XYRres0KZtnTqKcLavWR2" + }, + "href": "https://api.spotify.com/v1/shows/6XYRres0KZtnTqKcLavWR2", + "html_description": "Welcome to BLAST Push To Talk, Counter-Strike like you’ve never heard it before.

Join our host Moses and our field reporters Scrawny and Launders as they interview pro players, share their hot takes on the latest and greatest news in the CS world courtesy of EPOS.", + "id": "6XYRres0KZtnTqKcLavWR2", + "images": [ + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68d5fccb05c5685c081d5c2ad9c", + "width": 64 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1f5fccb05c5685c081d5c2ad9c", + "width": 300 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8a5fccb05c5685c081d5c2ad9c", + "width": 640 + } + ], + "is_externally_hosted": false, + "languages": ["en"], + "media_type": "audio", + "name": "BLAST Push To Talk", + "publisher": "BLAST Premier", + "total_episodes": 19, + "type": "show", + "uri": "spotify:show:6XYRres0KZtnTqKcLavWR2" + } + } + ], + "limit": 20, + "next": null, + "offset": 0, + "previous": null, + "total": 10 +} diff --git a/tests/components/spotify/fixtures/saved_tracks.json b/tests/components/spotify/fixtures/saved_tracks.json new file mode 100644 index 00000000000..e80d5b39dcd --- /dev/null +++ b/tests/components/spotify/fixtures/saved_tracks.json @@ -0,0 +1,978 @@ +{ + "href": "https://api.spotify.com/v1/me/tracks?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "added_at": "2024-10-06T11:35:02Z", + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7zrkALJ9ayRjzysp4QYoEg" + }, + "href": "https://api.spotify.com/v1/artists/7zrkALJ9ayRjzysp4QYoEg", + "id": "7zrkALJ9ayRjzysp4QYoEg", + "name": "Maribou State", + "type": "artist", + "uri": "spotify:artist:7zrkALJ9ayRjzysp4QYoEg" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5vssQp6TyMHsx4mihKVAsC" + }, + "href": "https://api.spotify.com/v1/artists/5vssQp6TyMHsx4mihKVAsC", + "id": "5vssQp6TyMHsx4mihKVAsC", + "name": "Holly Walker", + "type": "artist", + "uri": "spotify:artist:5vssQp6TyMHsx4mihKVAsC" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3BYf1IG8EqDbhzdpljcFWY" + }, + "href": "https://api.spotify.com/v1/albums/3BYf1IG8EqDbhzdpljcFWY", + "id": "3BYf1IG8EqDbhzdpljcFWY", + "images": [ + { + "height": 640, + "width": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273ac9dd449e38e5e8952fd22ad" + }, + { + "height": 300, + "width": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02ac9dd449e38e5e8952fd22ad" + }, + { + "height": 64, + "width": 64, + "url": "https://i.scdn.co/image/ab67616d00004851ac9dd449e38e5e8952fd22ad" + } + ], + "is_playable": true, + "name": "Otherside", + "release_date": "2024-10-02", + "release_date_precision": "day", + "total_tracks": 2, + "type": "album", + "uri": "spotify:album:3BYf1IG8EqDbhzdpljcFWY" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7zrkALJ9ayRjzysp4QYoEg" + }, + "href": "https://api.spotify.com/v1/artists/7zrkALJ9ayRjzysp4QYoEg", + "id": "7zrkALJ9ayRjzysp4QYoEg", + "name": "Maribou State", + "type": "artist", + "uri": "spotify:artist:7zrkALJ9ayRjzysp4QYoEg" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5vssQp6TyMHsx4mihKVAsC" + }, + "href": "https://api.spotify.com/v1/artists/5vssQp6TyMHsx4mihKVAsC", + "id": "5vssQp6TyMHsx4mihKVAsC", + "name": "Holly Walker", + "type": "artist", + "uri": "spotify:artist:5vssQp6TyMHsx4mihKVAsC" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 233211, + "explicit": false, + "external_ids": { + "isrc": "GBCFB2300767" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/2pj2A25YQK4uMxhZheNx7R" + }, + "href": "https://api.spotify.com/v1/tracks/2pj2A25YQK4uMxhZheNx7R", + "id": "2pj2A25YQK4uMxhZheNx7R", + "is_local": false, + "is_playable": true, + "name": "Otherside", + "popularity": 47, + "preview_url": "https://p.scdn.co/mp3-preview/f18011c5d9a973f85ed8dce6d698e6043efdcf60?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:2pj2A25YQK4uMxhZheNx7R" + } + }, + { + "added_at": "2024-10-06T07:37:53Z", + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0HHa7ZJZxUQlg5l2mB0N0f" + }, + "href": "https://api.spotify.com/v1/artists/0HHa7ZJZxUQlg5l2mB0N0f", + "id": "0HHa7ZJZxUQlg5l2mB0N0f", + "name": "Marlon Hoffstadt", + "type": "artist", + "uri": "spotify:artist:0HHa7ZJZxUQlg5l2mB0N0f" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/68sTQgQtPe9e4Bb7OtoqET" + }, + "href": "https://api.spotify.com/v1/artists/68sTQgQtPe9e4Bb7OtoqET", + "id": "68sTQgQtPe9e4Bb7OtoqET", + "name": "Crybaby", + "type": "artist", + "uri": "spotify:artist:68sTQgQtPe9e4Bb7OtoqET" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4lBSzo2LS8asEzoePv6VLM" + }, + "href": "https://api.spotify.com/v1/artists/4lBSzo2LS8asEzoePv6VLM", + "id": "4lBSzo2LS8asEzoePv6VLM", + "name": "DJ Daddy Trance", + "type": "artist", + "uri": "spotify:artist:4lBSzo2LS8asEzoePv6VLM" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/1ElP3WFqq5sgMcc3ScIR4l" + }, + "href": "https://api.spotify.com/v1/albums/1ElP3WFqq5sgMcc3ScIR4l", + "id": "1ElP3WFqq5sgMcc3ScIR4l", + "images": [ + { + "height": 640, + "width": 640, + "url": "https://i.scdn.co/image/ab67616d0000b2733d710ab088ff797e80cc5aed" + }, + { + "height": 300, + "width": 300, + "url": "https://i.scdn.co/image/ab67616d00001e023d710ab088ff797e80cc5aed" + }, + { + "height": 64, + "width": 64, + "url": "https://i.scdn.co/image/ab67616d000048513d710ab088ff797e80cc5aed" + } + ], + "is_playable": true, + "name": "I Think I Need A DJ", + "release_date": "2024-09-20", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:1ElP3WFqq5sgMcc3ScIR4l" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0HHa7ZJZxUQlg5l2mB0N0f" + }, + "href": "https://api.spotify.com/v1/artists/0HHa7ZJZxUQlg5l2mB0N0f", + "id": "0HHa7ZJZxUQlg5l2mB0N0f", + "name": "Marlon Hoffstadt", + "type": "artist", + "uri": "spotify:artist:0HHa7ZJZxUQlg5l2mB0N0f" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/68sTQgQtPe9e4Bb7OtoqET" + }, + "href": "https://api.spotify.com/v1/artists/68sTQgQtPe9e4Bb7OtoqET", + "id": "68sTQgQtPe9e4Bb7OtoqET", + "name": "Crybaby", + "type": "artist", + "uri": "spotify:artist:68sTQgQtPe9e4Bb7OtoqET" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4lBSzo2LS8asEzoePv6VLM" + }, + "href": "https://api.spotify.com/v1/artists/4lBSzo2LS8asEzoePv6VLM", + "id": "4lBSzo2LS8asEzoePv6VLM", + "name": "DJ Daddy Trance", + "type": "artist", + "uri": "spotify:artist:4lBSzo2LS8asEzoePv6VLM" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 155000, + "explicit": false, + "external_ids": { + "isrc": "DEKF22400978" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/2lKOI1nwP5qZtZC7TGQVY8" + }, + "href": "https://api.spotify.com/v1/tracks/2lKOI1nwP5qZtZC7TGQVY8", + "id": "2lKOI1nwP5qZtZC7TGQVY8", + "is_local": false, + "is_playable": true, + "name": "I Think I Need A DJ", + "popularity": 53, + "preview_url": "https://p.scdn.co/mp3-preview/ad1c9d47d0f5ed500118e9dfc2558bd77612cae3?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:2lKOI1nwP5qZtZC7TGQVY8" + } + } + ], + "limit": 2, + "next": "https://api.spotify.com/v1/me/tracks?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 4816 +} diff --git a/tests/components/spotify/fixtures/show.json b/tests/components/spotify/fixtures/show.json new file mode 100644 index 00000000000..d9a89b2cc8d --- /dev/null +++ b/tests/components/spotify/fixtures/show.json @@ -0,0 +1,317 @@ +{ + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD?locale=en-US%2Cen%3Bq%3D0.5", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD", + "total_episodes": 120, + "episodes": { + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD/episodes?offset=0&limit=50&locale=en-US,en;q%3D0.5", + "limit": 50, + "next": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD/episodes?offset=50&limit=50&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 120, + "items": [ + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "height": 64, + "width": 64 + } + ], + "is_externally_hosted": true, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/6msRFio3561me28DofTad7/clip_570865_630865.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 5690591, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/7CbsFHQq8ljztiUSGw46Fj" + }, + "href": "https://api.spotify.com/v1/episodes/7CbsFHQq8ljztiUSGw46Fj", + "id": "7CbsFHQq8ljztiUSGw46Fj", + "images": [ + { + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "height": 64, + "width": 64 + } + ], + "is_externally_hosted": true, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "Math Haters vs Math Nerd - Safety Third 118", + "release_date": "2024-07-18", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:7CbsFHQq8ljztiUSGw46Fj" + } + ] + } +} diff --git a/tests/components/spotify/fixtures/show_episodes.json b/tests/components/spotify/fixtures/show_episodes.json new file mode 100644 index 00000000000..0189fb10c11 --- /dev/null +++ b/tests/components/spotify/fixtures/show_episodes.json @@ -0,0 +1,94 @@ +{ + "href": "https://api.spotify.com/v1/shows/0e30iIgSffe6xJhFKe35Db/episodes?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/2O4OLlf7wsvLzCeUbNB3UK/clip_1204000_1256300.mp3", + "description": "The Great War of 2077 and how the Fallout world diverged from our own.Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecastBuy cool stuff and support the show!Fallout 76: https://amzn.to/3h99B3UFallout Cookbook: https://amzn.to/3aGjeodFallout Boardgame: https://amzn.to/2EgmBq3The Art of Fallout 4: https://amzn.to/3gfQST3Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zGFallout Funco Pop Figures: https://amzn.to/3gcYsOcLinks: Live Shows every Monday Night and game streams: twitch.tv/robotsradioFallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hubTalk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhMStay plugged in on Twitter: twitter.com/falloutlorecastRobots Radio Youtube: youtube.com/c/r0b0tsSend me a note! Email: falloutlorecast@gmail.com www.robotsradio.netOur Sponsors:* Check out Bandai Namco: unknown9.com/FALLOUTLOREAdvertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 2117616, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3ssmxnilHYaKhwRWoBGMbU" + }, + "href": "https://api.spotify.com/v1/episodes/3ssmxnilHYaKhwRWoBGMbU", + "html_description": "

The Great War of 2077 and how the Fallout world diverged from our own.

Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast

Buy cool stuff and support the show!

Fallout 76: https://amzn.to/3h99B3U

Fallout Cookbook: https://amzn.to/3aGjeod

Fallout Boardgame: https://amzn.to/2EgmBq3

The Art of Fallout 4: https://amzn.to/3gfQST3

Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG

Fallout Funco Pop Figures: https://amzn.to/3gcYsOc

Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio

Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub

Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM

Stay plugged in on Twitter: twitter.com/falloutlorecast

Robots Radio Youtube: youtube.com/c/r0b0ts

Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.net



Our Sponsors:
* Check out Bandai Namco: unknown9.com/FALLOUTLORE


Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3ssmxnilHYaKhwRWoBGMbU", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8af44e9ef63c2d6fb44cb0c9bf", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1ff44e9ef63c2d6fb44cb0c9bf", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68df44e9ef63c2d6fb44cb0c9bf", + "width": 64 + } + ], + "is_externally_hosted": false, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "The Great War - Fallout Lorecast EP 1", + "release_date": "2019-01-09", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:3ssmxnilHYaKhwRWoBGMbU" + }, + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/0PGDORXTYiO2Til9131l6X/clip_310950_371500.mp3", + "description": "Support the show to keep it going, plus get great rewards at patreon.com/falloutlorecast Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast Audiobooks.com - Get 3 FREE Audiobooks! https://www.dpbolvw.net/click-100173810-11099382?sid=flore Gamefly - Want 2 months of rentals for the price of 1 at Gamefly? https://www.dpbolvw.net/click-100173810-10495782?sid=flore Loot Crate - 15% off Loot Crate. Click the link and use coupon code: ROBOTSRADIO https://www.dpbolvw.net/click-100173810-13902093?sid=flore GreenMan Gaming - Get awesome discounts on games. https://www.dpbolvw.net/click-100173810-13764551?sid=flore NordVPN - Stay Safe on the Internet and get 68% off. https://www.dpbolvw.net/click-100173810-12814552?sid=flore Buy cool stuff and support the show! Fallout 76: https://amzn.to/3h99B3U Fallout Cookbook: https://amzn.to/3aGjeod Fallout Boardgame: https://amzn.to/2EgmBq3 The Art of Fallout 4: https://amzn.to/3gfQST3 Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG Fallout Funco Pop Figures: https://amzn.to/3gcYsOc Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM Stay plugged in on Twitter: twitter.com/falloutlorecast Robots Radio Youtube: youtube.com/c/r0b0ts Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.netOur Sponsors:* Check out Bandai Namco: unknown9.com/FALLOUTLOREAdvertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 2376881, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/1bbj9aqeeZ3UMUlcWN0S03" + }, + "href": "https://api.spotify.com/v1/episodes/1bbj9aqeeZ3UMUlcWN0S03", + "html_description": "

Support the show to keep it going, plus get great rewards at patreon.com/falloutlorecast Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast Audiobooks.com - Get 3 FREE Audiobooks! https://www.dpbolvw.net/click-100173810-11099382?sid=flore Gamefly - Want 2 months of rentals for the price of 1 at Gamefly? https://www.dpbolvw.net/click-100173810-10495782?sid=flore Loot Crate - 15% off Loot Crate. Click the link and use coupon code: ROBOTSRADIO https://www.dpbolvw.net/click-100173810-13902093?sid=flore GreenMan Gaming - Get awesome discounts on games. https://www.dpbolvw.net/click-100173810-13764551?sid=flore NordVPN - Stay Safe on the Internet and get 68% off. https://www.dpbolvw.net/click-100173810-12814552?sid=flore Buy cool stuff and support the show! Fallout 76: https://amzn.to/3h99B3U Fallout Cookbook: https://amzn.to/3aGjeod Fallout Boardgame: https://amzn.to/2EgmBq3 The Art of Fallout 4: https://amzn.to/3gfQST3 Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG Fallout Funco Pop Figures: https://amzn.to/3gcYsOc Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM Stay plugged in on Twitter: twitter.com/falloutlorecast Robots Radio Youtube: youtube.com/c/r0b0ts Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.net



Our Sponsors:
* Check out Bandai Namco: unknown9.com/FALLOUTLORE


Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "1bbj9aqeeZ3UMUlcWN0S03", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8a655b54a66471089d27dbb03f", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1f655b54a66471089d27dbb03f", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68d655b54a66471089d27dbb03f", + "width": 64 + } + ], + "is_externally_hosted": false, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "Who Dropped the First Bomb?", + "release_date": "2019-01-15", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:1bbj9aqeeZ3UMUlcWN0S03" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/shows/0e30iIgSffe6xJhFKe35Db/episodes?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 323 +} diff --git a/tests/components/spotify/fixtures/top_artists.json b/tests/components/spotify/fixtures/top_artists.json new file mode 100644 index 00000000000..cd39d57e4ee --- /dev/null +++ b/tests/components/spotify/fixtures/top_artists.json @@ -0,0 +1,76 @@ +{ + "items": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/74Yus6IHfa3tWZzXXAYtS2" + }, + "followers": { + "href": null, + "total": 488 + }, + "genres": [], + "href": "https://api.spotify.com/v1/artists/74Yus6IHfa3tWZzXXAYtS2", + "id": "74Yus6IHfa3tWZzXXAYtS2", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5ebf749f53f8bb5ffccf6105ce3", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab67616100005174f749f53f8bb5ffccf6105ce3", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f178f749f53f8bb5ffccf6105ce3", + "width": 160 + } + ], + "name": "Onkruid", + "popularity": 7, + "type": "artist", + "uri": "spotify:artist:74Yus6IHfa3tWZzXXAYtS2" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6s5ubAp65wXoTZefE01RNR" + }, + "followers": { + "href": null, + "total": 805497 + }, + "genres": [], + "href": "https://api.spotify.com/v1/artists/6s5ubAp65wXoTZefE01RNR", + "id": "6s5ubAp65wXoTZefE01RNR", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5eb8e750249623067fe3c557cf0", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab676161000051748e750249623067fe3c557cf0", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f1788e750249623067fe3c557cf0", + "width": 160 + } + ], + "name": "Joost", + "popularity": 69, + "type": "artist", + "uri": "spotify:artist:6s5ubAp65wXoTZefE01RNR" + } + ], + "total": 192, + "limit": 20, + "offset": 0, + "href": "https://api.spotify.com/v1/me/top/artists?locale=en-US,en;q%3D0.5", + "next": "https://api.spotify.com/v1/me/top/artists?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "previous": null +} diff --git a/tests/components/spotify/fixtures/top_tracks.json b/tests/components/spotify/fixtures/top_tracks.json new file mode 100644 index 00000000000..9b99b5974f3 --- /dev/null +++ b/tests/components/spotify/fixtures/top_tracks.json @@ -0,0 +1,922 @@ +{ + "items": [ + { + "album": { + "album_type": "SINGLE", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0PCCGZ0wGLizHt2KZ7hhA2" + }, + "href": "https://api.spotify.com/v1/artists/0PCCGZ0wGLizHt2KZ7hhA2", + "id": "0PCCGZ0wGLizHt2KZ7hhA2", + "name": "Artemas", + "type": "artist", + "uri": "spotify:artist:0PCCGZ0wGLizHt2KZ7hhA2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/45Qix7gFNajr6IofEIhhE4" + }, + "href": "https://api.spotify.com/v1/albums/45Qix7gFNajr6IofEIhhE4", + "id": "45Qix7gFNajr6IofEIhhE4", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273c88e6a4447087f41eb388b14", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02c88e6a4447087f41eb388b14", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851c88e6a4447087f41eb388b14", + "width": 64 + } + ], + "name": "i like the way you kiss me (burnt)", + "release_date": "2024-03-26", + "release_date_precision": "day", + "total_tracks": 2, + "type": "album", + "uri": "spotify:album:45Qix7gFNajr6IofEIhhE4" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0PCCGZ0wGLizHt2KZ7hhA2" + }, + "href": "https://api.spotify.com/v1/artists/0PCCGZ0wGLizHt2KZ7hhA2", + "id": "0PCCGZ0wGLizHt2KZ7hhA2", + "name": "Artemas", + "type": "artist", + "uri": "spotify:artist:0PCCGZ0wGLizHt2KZ7hhA2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 142514, + "explicit": false, + "external_ids": { + "isrc": "QZJ842400387" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/3oRoMXsP2NRzm51lldj1RO" + }, + "href": "https://api.spotify.com/v1/tracks/3oRoMXsP2NRzm51lldj1RO", + "id": "3oRoMXsP2NRzm51lldj1RO", + "is_local": false, + "name": "i like the way you kiss me", + "popularity": 51, + "preview_url": "https://p.scdn.co/mp3-preview/6ce9233edb212fe7cf02273f4369d2c60c28e887?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:3oRoMXsP2NRzm51lldj1RO" + }, + { + "album": { + "album_type": "SINGLE", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4YLtscXsxbVgi031ovDDdh" + }, + "href": "https://api.spotify.com/v1/artists/4YLtscXsxbVgi031ovDDdh", + "id": "4YLtscXsxbVgi031ovDDdh", + "name": "Chris Stapleton", + "type": "artist", + "uri": "spotify:artist:4YLtscXsxbVgi031ovDDdh" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6M2wZ9GZgrQXHCFfjv46we" + }, + "href": "https://api.spotify.com/v1/artists/6M2wZ9GZgrQXHCFfjv46we", + "id": "6M2wZ9GZgrQXHCFfjv46we", + "name": "Dua Lipa", + "type": "artist", + "uri": "spotify:artist:6M2wZ9GZgrQXHCFfjv46we" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3pjMBXbDLg2oGL7HtVxWgY" + }, + "href": "https://api.spotify.com/v1/albums/3pjMBXbDLg2oGL7HtVxWgY", + "id": "3pjMBXbDLg2oGL7HtVxWgY", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b27386f028311a5a746aa46b412f", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e0286f028311a5a746aa46b412f", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d0000485186f028311a5a746aa46b412f", + "width": 64 + } + ], + "name": "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", + "release_date": "2024-05-01", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:3pjMBXbDLg2oGL7HtVxWgY" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4YLtscXsxbVgi031ovDDdh" + }, + "href": "https://api.spotify.com/v1/artists/4YLtscXsxbVgi031ovDDdh", + "id": "4YLtscXsxbVgi031ovDDdh", + "name": "Chris Stapleton", + "type": "artist", + "uri": "spotify:artist:4YLtscXsxbVgi031ovDDdh" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6M2wZ9GZgrQXHCFfjv46we" + }, + "href": "https://api.spotify.com/v1/artists/6M2wZ9GZgrQXHCFfjv46we", + "id": "6M2wZ9GZgrQXHCFfjv46we", + "name": "Dua Lipa", + "type": "artist", + "uri": "spotify:artist:6M2wZ9GZgrQXHCFfjv46we" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 277066, + "explicit": false, + "external_ids": { + "isrc": "USUG12403278" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/69zgu5rlAie3IPZOEXLxyS" + }, + "href": "https://api.spotify.com/v1/tracks/69zgu5rlAie3IPZOEXLxyS", + "id": "69zgu5rlAie3IPZOEXLxyS", + "is_local": false, + "name": "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", + "popularity": 60, + "preview_url": "https://p.scdn.co/mp3-preview/c4fa0377538248e0a3c7e92bcf5a58be2f32b342?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:69zgu5rlAie3IPZOEXLxyS" + } + ], + "total": 2951, + "limit": 20, + "offset": 0, + "href": "https://api.spotify.com/v1/me/top/tracks?locale=en-US,en;q%3D0.5", + "next": "https://api.spotify.com/v1/me/top/tracks?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "previous": null +} diff --git a/tests/components/spotify/snapshots/test_diagnostics.ambr b/tests/components/spotify/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..0ac375d18e3 --- /dev/null +++ b/tests/components/spotify/snapshots/test_diagnostics.ambr @@ -0,0 +1,481 @@ +# serializer version: 1 +# name: test_diagnostics_polling_instance + dict({ + 'devices': list([ + dict({ + 'device_id': '21dac6b0e0a1f181870fdc9749b2656466557666', + 'device_type': 'Computer', + 'is_active': False, + 'is_private_session': False, + 'is_restricted': False, + 'name': 'DESKTOP-BKC5SIK', + 'supports_volume': True, + 'volume_percent': 69, + }), + ]), + 'playback': dict({ + 'current_playback': dict({ + 'context': dict({ + 'context_type': 'playlist', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/playlist/2r35vbe6hHl6yDSMfjKgmm', + }), + 'href': 'https://api.spotify.com/v1/playlists/2r35vbe6hHl6yDSMfjKgmm', + 'uri': 'spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm', + }), + 'currently_playing_type': 'track', + 'device': dict({ + 'device_id': 'a19f7a03a25aff3e43f457a328a8ba67a8c44789', + 'device_type': 'Speaker', + 'is_active': True, + 'is_private_session': False, + 'is_restricted': False, + 'name': 'Master Bathroom Speaker', + 'supports_volume': True, + 'volume_percent': 25, + }), + 'is_playing': True, + 'item': dict({ + 'album': dict({ + 'album_id': '3nUNxSh2szhmN7iifAKv5i', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '2Hkut4rAAyrQxRdof7FVJq', + 'name': 'Rush', + 'uri': 'spotify:artist:2Hkut4rAAyrQxRdof7FVJq', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b27306c0d7ebcabad0c39b566983', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e0206c0d7ebcabad0c39b566983', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d0000485106c0d7ebcabad0c39b566983', + 'width': 64, + }), + ]), + 'name': 'Permanent Waves', + 'release_date': '1980-01-01', + 'release_date_precision': 'day', + 'total_tracks': 6, + 'uri': 'spotify:album:3nUNxSh2szhmN7iifAKv5i', + }), + 'artists': list([ + dict({ + 'artist_id': '2Hkut4rAAyrQxRdof7FVJq', + 'name': 'Rush', + 'uri': 'spotify:artist:2Hkut4rAAyrQxRdof7FVJq', + }), + ]), + 'disc_number': 1, + 'duration_ms': 296466, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/4e9hUiLsN4mx61ARosFi7p', + }), + 'href': 'https://api.spotify.com/v1/tracks/4e9hUiLsN4mx61ARosFi7p', + 'is_local': False, + 'name': 'The Spirit Of Radio', + 'track_id': '4e9hUiLsN4mx61ARosFi7p', + 'track_number': 1, + 'type': 'track', + 'uri': 'spotify:track:4e9hUiLsN4mx61ARosFi7p', + }), + 'progress_ms': 249367, + 'repeat_mode': 'off', + 'shuffle': False, + }), + 'dj_playlist': False, + 'playlist': dict({ + 'collaborative': False, + 'description': 'A playlist for testing pourposes', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/playlist/3cEYpjA9oz9GiPac4AsH4n', + }), + 'images': list([ + dict({ + 'height': None, + 'url': 'https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba', + 'width': None, + }), + ]), + 'name': 'Spotify Web API Testing playlist', + 'object_type': 'playlist', + 'owner': dict({ + 'display_name': 'JMPerez²', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/user/jmperezperez', + }), + 'href': 'https://api.spotify.com/v1/users/jmperezperez', + 'object_type': 'user', + 'owner_id': 'jmperezperez', + 'uri': 'spotify:user:jmperezperez', + }), + 'playlist_id': '3cEYpjA9oz9GiPac4AsH4n', + 'public': True, + 'tracks': dict({ + 'items': list([ + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '2pANdqPvxInB0YvcDiw4ko', + 'album_type': 'compilation', + 'artists': list([ + dict({ + 'artist_id': '0LyfQWJT6nXafLPZqxe9Of', + 'name': 'Various Artists', + 'uri': 'spotify:artist:0LyfQWJT6nXafLPZqxe9Of', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e02ce6d0eef0c1ce77e5f95bbbc', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d00004851ce6d0eef0c1ce77e5f95bbbc', + 'width': 64, + }), + ]), + 'name': 'Progressive Psy Trance Picks Vol.8', + 'release_date': '2012-04-02', + 'release_date_precision': 'day', + 'total_tracks': 20, + 'uri': 'spotify:album:2pANdqPvxInB0YvcDiw4ko', + }), + 'artists': list([ + dict({ + 'artist_id': '6eSdhw46riw2OUHgMwR8B5', + 'name': 'Odiseo', + 'uri': 'spotify:artist:6eSdhw46riw2OUHgMwR8B5', + }), + ]), + 'disc_number': 1, + 'duration_ms': 376000, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/4rzfv0JLZfVhOhbSQ8o5jZ', + }), + 'href': 'https://api.spotify.com/v1/tracks/4rzfv0JLZfVhOhbSQ8o5jZ', + 'is_local': False, + 'name': 'Api', + 'track_id': '4rzfv0JLZfVhOhbSQ8o5jZ', + 'track_number': 10, + 'type': 'track', + 'uri': 'spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '6nlfkk5GoXRL1nktlATNsy', + 'album_type': 'compilation', + 'artists': list([ + dict({ + 'artist_id': '0LyfQWJT6nXafLPZqxe9Of', + 'name': 'Various Artists', + 'uri': 'spotify:artist:0LyfQWJT6nXafLPZqxe9Of', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e02aa2ff29970d9a63a49dfaeb2', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d00004851aa2ff29970d9a63a49dfaeb2', + 'width': 64, + }), + ]), + 'name': 'Wellness & Dreaming Source', + 'release_date': '2015-01-09', + 'release_date_precision': 'day', + 'total_tracks': 25, + 'uri': 'spotify:album:6nlfkk5GoXRL1nktlATNsy', + }), + 'artists': list([ + dict({ + 'artist_id': '5VQE4WOzPu9h3HnGLuBoA6', + 'name': 'Vlasta Marek', + 'uri': 'spotify:artist:5VQE4WOzPu9h3HnGLuBoA6', + }), + ]), + 'disc_number': 1, + 'duration_ms': 730066, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/5o3jMYOSbaVz3tkgwhELSV', + }), + 'href': 'https://api.spotify.com/v1/tracks/5o3jMYOSbaVz3tkgwhELSV', + 'is_local': False, + 'name': 'Is', + 'track_id': '5o3jMYOSbaVz3tkgwhELSV', + 'track_number': 21, + 'type': 'track', + 'uri': 'spotify:track:5o3jMYOSbaVz3tkgwhELSV', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '4hnqM0JK4CM1phwfq1Ldyz', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '066X20Nz7iquqkkCW6Jxy6', + 'name': 'LCD Soundsystem', + 'uri': 'spotify:artist:066X20Nz7iquqkkCW6Jxy6', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e02ee0d0dce888c6c8a70db6e8b', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d00004851ee0d0dce888c6c8a70db6e8b', + 'width': 64, + }), + ]), + 'name': 'This Is Happening', + 'release_date': '2010-05-17', + 'release_date_precision': 'day', + 'total_tracks': 9, + 'uri': 'spotify:album:4hnqM0JK4CM1phwfq1Ldyz', + }), + 'artists': list([ + dict({ + 'artist_id': '066X20Nz7iquqkkCW6Jxy6', + 'name': 'LCD Soundsystem', + 'uri': 'spotify:artist:066X20Nz7iquqkkCW6Jxy6', + }), + ]), + 'disc_number': 1, + 'duration_ms': 401440, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/4Cy0NHJ8Gh0xMdwyM9RkQm', + }), + 'href': 'https://api.spotify.com/v1/tracks/4Cy0NHJ8Gh0xMdwyM9RkQm', + 'is_local': False, + 'name': 'All I Want', + 'track_id': '4Cy0NHJ8Gh0xMdwyM9RkQm', + 'track_number': 4, + 'type': 'track', + 'uri': 'spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '2usKFntxa98WHMcyW6xJBz', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '272ArH9SUAlslQqsSgPJA2', + 'name': 'Glenn Horiuchi Trio', + 'uri': 'spotify:artist:272ArH9SUAlslQqsSgPJA2', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e028b7447ac3daa1da18811cf7b', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d000048518b7447ac3daa1da18811cf7b', + 'width': 64, + }), + ]), + 'name': 'Glenn Horiuchi Trio / Gelenn Horiuchi Quartet: Mercy / Jump Start / Endpoints / Curl Out / Earthworks / Mind Probe / Null Set / Another Space (A)', + 'release_date': '2011-04-01', + 'release_date_precision': 'day', + 'total_tracks': 8, + 'uri': 'spotify:album:2usKFntxa98WHMcyW6xJBz', + }), + 'artists': list([ + dict({ + 'artist_id': '272ArH9SUAlslQqsSgPJA2', + 'name': 'Glenn Horiuchi Trio', + 'uri': 'spotify:artist:272ArH9SUAlslQqsSgPJA2', + }), + ]), + 'disc_number': 1, + 'duration_ms': 358760, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/6hvFrZNocdt2FcKGCSY5NI', + }), + 'href': 'https://api.spotify.com/v1/tracks/6hvFrZNocdt2FcKGCSY5NI', + 'is_local': False, + 'name': 'Endpoints', + 'track_id': '6hvFrZNocdt2FcKGCSY5NI', + 'track_number': 2, + 'type': 'track', + 'uri': 'spotify:track:6hvFrZNocdt2FcKGCSY5NI', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '0ivM6kSawaug0j3tZVusG2', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '2KftmGt9sk1yLjsAoloC3M', + 'name': 'Zucchero', + 'uri': 'spotify:artist:2KftmGt9sk1yLjsAoloC3M', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e0204e57d181ff062f8339d6c71', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d0000485104e57d181ff062f8339d6c71', + 'width': 64, + }), + ]), + 'name': 'All The Best (Spanish Version)', + 'release_date': '2007-01-01', + 'release_date_precision': 'day', + 'total_tracks': 18, + 'uri': 'spotify:album:0ivM6kSawaug0j3tZVusG2', + }), + 'artists': list([ + dict({ + 'artist_id': '2KftmGt9sk1yLjsAoloC3M', + 'name': 'Zucchero', + 'uri': 'spotify:artist:2KftmGt9sk1yLjsAoloC3M', + }), + ]), + 'disc_number': 1, + 'duration_ms': 176093, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/2E2znCPaS8anQe21GLxcvJ', + }), + 'href': 'https://api.spotify.com/v1/tracks/2E2znCPaS8anQe21GLxcvJ', + 'is_local': False, + 'name': 'You Are So Beautiful', + 'track_id': '2E2znCPaS8anQe21GLxcvJ', + 'track_number': 18, + 'type': 'track', + 'uri': 'spotify:track:2E2znCPaS8anQe21GLxcvJ', + }), + }), + dict({ + 'track': dict({ + 'description': 'Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy', + 'duration_ms': 3690161, + 'episode_id': '3o0RYoo5iOMKSmEbunsbvW', + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW', + }), + 'href': 'https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW', + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a', + 'width': 64, + }), + ]), + 'name': 'My Squirrel Has Brain Damage - Safety Third 119', + 'release_date': '2024-07-26', + 'release_date_precision': 'day', + 'show': dict({ + 'description': 'Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it\'s just us, but always: safety is our number three priority.', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD', + }), + 'href': 'https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD', + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a', + 'width': 64, + }), + ]), + 'name': 'Safety Third', + 'publisher': 'Safety Third ', + 'show_id': '1Y9ExMgMxoBVrgrfU7u0nD', + 'total_episodes': 120, + 'uri': 'spotify:show:1Y9ExMgMxoBVrgrfU7u0nD', + }), + 'type': 'episode', + 'uri': 'spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + }), + }), + ]), + }), + 'uri': 'spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', + }), + }), + }) +# --- diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr index 4236fcb2e79..6b217977227 100644 --- a/tests/components/spotify/snapshots/test_media_browser.ambr +++ b/tests/components/spotify/snapshots/test_media_browser.ambr @@ -84,26 +84,6 @@ 'thumbnail': None, 'title': 'Top Tracks', }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'thumbnail': None, - 'title': 'Categories', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'thumbnail': None, - 'title': 'Featured Playlists', - }), dict({ 'can_expand': True, 'can_play': False, @@ -124,31 +104,6 @@ 'title': 'Media Library', }) # --- -# name: test_browse_media_playlists - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:unique_identifier_00', - 'media_content_type': 'spotify://playlist', - 'thumbnail': None, - 'title': 'Playlist1', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', - 'media_content_type': 'spotify://current_user_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Playlists', - }) -# --- # name: test_browse_media_playlists[01J5TX5A0FF6G5V0QJX6HBC94T] dict({ 'can_expand': True, @@ -159,10 +114,20 @@ 'can_play': True, 'children_media_class': , 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:unique_identifier_00', + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', 'media_content_type': 'spotify://playlist', - 'thumbnail': None, - 'title': 'Playlist1', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', + 'title': 'Hyper', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', + 'title': 'Ain’t got shit on me', }), ]), 'children_media_class': , @@ -184,10 +149,20 @@ 'can_play': True, 'children_media_class': , 'media_class': , - 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/spotify:playlist:unique_identifier_00', + 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', 'media_content_type': 'spotify://playlist', - 'thumbnail': None, - 'title': 'Playlist1', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', + 'title': 'Hyper', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', + 'title': 'Ain’t got shit on me', }), ]), 'children_media_class': , @@ -234,3 +209,498 @@ 'title': 'Spotify', }) # --- +# name: test_browsing[album-spotify:album:3IqzqH6ShrRtie9Yd2ODyG] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:6akJGriy4njdP8fZTPGjwz', + 'media_content_type': 'spotify://track', + 'thumbnail': None, + 'title': 'All Your Friends', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:7N02bJK1amhplZ8yAapRS5', + 'media_content_type': 'spotify://track', + 'thumbnail': None, + 'title': 'New Magiks', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:3IqzqH6ShrRtie9Yd2ODyG', + 'media_content_type': 'spotify://album', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273a61a28c2f084761f8833bce6', + 'title': 'SINGLARITY', + }) +# --- +# name: test_browsing[artist-spotify:artist:0TnOYISbd1XYRBk9myaseg] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:56jg3KJcYmfL7RzYmG2O1Q', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273a0bac1996f26274685db1520', + 'title': 'Trackhouse (Daytona 500 Edition)', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:1l86t4bTNT2j1X0ZBCIv6R', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27333a4ba8f73271a749c5d953d', + 'title': 'Trackhouse', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0TnOYISbd1XYRBk9myaseg', + 'media_content_type': 'spotify://artist', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5ebee07b5820dd91d15d397e29c', + 'title': 'Pitbull', + }) +# --- +# name: test_browsing[current_user_followed_artists-current_user_followed_artists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0lLY20XpZ9yDobkbHI7u1y', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb0fb1220e7e3ace47ebad023e', + 'title': 'Pegboard Nerds', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0p4nmQO2msCgU4IF37Wi3j', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb5c3349ddba6b8e064c1bab16', + 'title': 'Avril Lavigne', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_followed_artists', + 'media_content_type': 'spotify://current_user_followed_artists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Artists', + }) +# --- +# name: test_browsing[current_user_playlists-current_user_playlists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', + 'title': 'Hyper', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', + 'title': 'Ain’t got shit on me', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', + 'media_content_type': 'spotify://current_user_playlists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Playlists', + }) +# --- +# name: test_browsing[current_user_recently_played-current_user_recently_played] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:71dMjqJ8UJV700zYs5YZCh', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde', + 'title': 'Super Breath', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:71dMjqJ8UJV700zYs5YZCh', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde', + 'title': 'Super Breath', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_recently_played', + 'media_content_type': 'spotify://current_user_recently_played', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Recently played', + }) +# --- +# name: test_browsing[current_user_saved_albums-current_user_saved_albums] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:57MSBg5pBQZH5bfLVDmeuP', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2733126a95bb7ed4146a80c7fc6', + 'title': 'In Waves', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:3DQueEd1Ft9PHWgovDzPKh', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2736b8a4828e057b7dc1c4a4d39', + 'title': 'ten days', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_albums', + 'media_content_type': 'spotify://current_user_saved_albums', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Albums', + }) +# --- +# name: test_browsing[current_user_saved_shows-current_user_saved_shows] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:5OzkclFjD6iAjtAuo7aIYt', + 'media_content_type': 'spotify://show', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000f68db5f65a943ef4f707bf79949b', + 'title': 'Toni and Ryan', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:6XYRres0KZtnTqKcLavWR2', + 'media_content_type': 'spotify://show', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000f68d5fccb05c5685c081d5c2ad9c', + 'title': 'BLAST Push To Talk', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_shows', + 'media_content_type': 'spotify://current_user_saved_shows', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Podcasts', + }) +# --- +# name: test_browsing[current_user_saved_tracks-current_user_saved_tracks] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2pj2A25YQK4uMxhZheNx7R', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ac9dd449e38e5e8952fd22ad', + 'title': 'Otherside', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2lKOI1nwP5qZtZC7TGQVY8', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2733d710ab088ff797e80cc5aed', + 'title': 'I Think I Need A DJ', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_tracks', + 'media_content_type': 'spotify://current_user_saved_tracks', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Tracks', + }) +# --- +# name: test_browsing[current_user_top_artists-current_user_top_artists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:74Yus6IHfa3tWZzXXAYtS2', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5ebf749f53f8bb5ffccf6105ce3', + 'title': 'Onkruid', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:6s5ubAp65wXoTZefE01RNR', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb8e750249623067fe3c557cf0', + 'title': 'Joost', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_artists', + 'media_content_type': 'spotify://current_user_top_artists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Top Artists', + }) +# --- +# name: test_browsing[current_user_top_tracks-current_user_top_tracks] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:3oRoMXsP2NRzm51lldj1RO', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273c88e6a4447087f41eb388b14', + 'title': 'i like the way you kiss me', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:69zgu5rlAie3IPZOEXLxyS', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27386f028311a5a746aa46b412f', + 'title': "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_tracks', + 'media_content_type': 'spotify://current_user_top_tracks', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Top Tracks', + }) +# --- +# name: test_browsing[new_releases-new_releases] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:5SGtrmYbIo0Dsg4kJ4qjM6', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d00001e0209ba52a5116e0c3e8461f58b', + 'title': 'Moon Music', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:713lZ7AF55fEFSQgcttj9y', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d00001e02ab9953b1d18f8233f6b26027', + 'title': 'drift', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/new_releases', + 'media_content_type': 'spotify://new_releases', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'New Releases', + }) +# --- +# name: test_browsing[playlist-spotify:playlist:3cEYpjA9oz9GiPac4AsH4n] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc', + 'title': 'Api', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:5o3jMYOSbaVz3tkgwhELSV', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2', + 'title': 'Is', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b', + 'title': 'All I Want', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:6hvFrZNocdt2FcKGCSY5NI', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b', + 'title': 'Endpoints', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2E2znCPaS8anQe21GLxcvJ', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', + 'title': 'You Are So Beautiful', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'title': 'My Squirrel Has Brain Damage - Safety Third 119', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', + 'media_content_type': 'spotify://playlist', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba', + 'title': 'Spotify Web API Testing playlist', + }) +# --- +# name: test_browsing[show-spotify:show:1Y9ExMgMxoBVrgrfU7u0nD] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:3ssmxnilHYaKhwRWoBGMbU', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8af44e9ef63c2d6fb44cb0c9bf', + 'title': 'The Great War - Fallout Lorecast EP 1', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:1bbj9aqeeZ3UMUlcWN0S03', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8a655b54a66471089d27dbb03f', + 'title': 'Who Dropped the First Bomb?', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:1Y9ExMgMxoBVrgrfU7u0nD', + 'media_content_type': 'spotify://show', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'title': 'Safety Third', + }) +# --- diff --git a/tests/components/spotify/snapshots/test_media_player.ambr b/tests/components/spotify/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..9692d59cfd1 --- /dev/null +++ b/tests/components/spotify/snapshots/test_media_player.ambr @@ -0,0 +1,137 @@ +# serializer version: 1 +# name: test_entities[media_player.spotify_spotify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.spotify_spotify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'spotify', + 'unique_id': '1112264111', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[media_player.spotify_spotify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': '/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=7bb89748322acb6c', + 'friendly_name': 'Spotify spotify_1', + 'media_album_name': 'Permanent Waves', + 'media_artist': 'Rush', + 'media_content_id': 'spotify:track:4e9hUiLsN4mx61ARosFi7p', + 'media_content_type': , + 'media_duration': 296, + 'media_playlist': 'Spotify Web API Testing playlist', + 'media_position': 249, + 'media_position_updated_at': HAFakeDatetime(2023, 10, 21, 0, 0, tzinfo=datetime.timezone.utc), + 'media_title': 'The Spirit Of Radio', + 'media_track': 1, + 'repeat': , + 'shuffle': False, + 'source': 'Master Bathroom Speaker', + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + 'supported_features': , + 'volume_level': 0.25, + }), + 'context': , + 'entity_id': 'media_player.spotify_spotify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_podcast[media_player.spotify_spotify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.spotify_spotify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'spotify', + 'unique_id': '1112264111', + 'unit_of_measurement': None, + }) +# --- +# name: test_podcast[media_player.spotify_spotify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': '/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=cf1e6e1e830f08d3', + 'friendly_name': 'Spotify spotify_1', + 'media_album_name': 'Safety Third', + 'media_artist': 'Safety Third ', + 'media_content_id': 'spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + 'media_content_type': , + 'media_duration': 3690, + 'media_position': 5, + 'media_position_updated_at': HAFakeDatetime(2023, 10, 21, 0, 0, tzinfo=datetime.timezone.utc), + 'media_title': 'My Squirrel Has Brain Damage - Safety Third 119', + 'repeat': , + 'shuffle': False, + 'source': 'Sonos Roam SL', + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + 'supported_features': , + 'volume_level': 0.46, + }), + 'context': , + 'entity_id': 'media_player.spotify_spotify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/spotify/test_config_flow.py b/tests/components/spotify/test_config_flow.py index 6040fcd84f2..cb942a63568 100644 --- a/tests/components/spotify/test_config_flow.py +++ b/tests/components/spotify/test_config_flow.py @@ -2,22 +2,17 @@ from http import HTTPStatus from ipaddress import ip_address -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest -from spotipy import SpotifyException +from spotifyaio import SpotifyConnectionError from homeassistant.components import zeroconf -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) from homeassistant.components.spotify.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -34,19 +29,6 @@ BLANK_ZEROCONF_INFO = zeroconf.ZeroconfServiceInfo( ) -@pytest.fixture -async def component_setup(hass: HomeAssistant) -> None: - """Fixture for setting up the integration.""" - result = await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - - await async_import_client_credential( - hass, DOMAIN, ClientCredential("client", "secret"), "cred" - ) - - assert result - - async def test_abort_if_no_configuration(hass: HomeAssistant) -> None: """Check flow aborts when no configuration is present.""" result = await hass.config_entries.flow.async_init( @@ -77,11 +59,12 @@ async def test_zeroconf_abort_if_existing_entry(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("setup_credentials") async def test_full_flow( hass: HomeAssistant, - component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + mock_spotify: MagicMock, ) -> None: """Check a full flow.""" result = await hass.config_entries.flow.async_init( @@ -99,7 +82,7 @@ async def test_full_flow( assert result["type"] is FlowResultType.EXTERNAL_STEP assert result["url"] == ( "https://accounts.spotify.com/authorize" - "?response_type=code&client_id=client" + "?response_type=code&client_id=CLIENT_ID" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" "&scope=user-modify-playback-state,user-read-playback-state,user-read-private," @@ -112,6 +95,7 @@ async def test_full_flow( assert resp.status == HTTPStatus.OK assert resp.headers["content-type"] == "text/html; charset=utf-8" + aioclient_mock.clear_requests() aioclient_mock.post( "https://accounts.spotify.com/api/token", json={ @@ -124,31 +108,31 @@ async def test_full_flow( with ( patch("homeassistant.components.spotify.async_setup_entry", return_value=True), - patch("homeassistant.components.spotify.config_flow.Spotify") as spotify_mock, ): - spotify_mock.return_value.current_user.return_value = { - "id": "fake_id", - "display_name": "frenck", - } result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["data"]["auth_implementation"] == "cred" + assert result["type"] is FlowResultType.CREATE_ENTRY + assert len(hass.config_entries.async_entries(DOMAIN)) == 1, result + + assert result["type"] is FlowResultType.CREATE_ENTRY result["data"]["token"].pop("expires_at") - assert result["data"]["name"] == "frenck" + assert result["data"]["name"] == "Henk" assert result["data"]["token"] == { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", "type": "Bearer", "expires_in": 60, } + assert result["result"].unique_id == "1112264111" @pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("setup_credentials") async def test_abort_if_spotify_error( hass: HomeAssistant, - component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + mock_spotify: MagicMock, ) -> None: """Check Spotify errors causes flow to abort.""" result = await hass.config_entries.flow.async_init( @@ -175,46 +159,32 @@ async def test_abort_if_spotify_error( }, ) - with patch( - "homeassistant.components.spotify.config_flow.Spotify.current_user", - side_effect=SpotifyException(400, -1, "message"), - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + mock_spotify.return_value.get_current_user.side_effect = SpotifyConnectionError + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" @pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("setup_credentials") async def test_reauthentication( hass: HomeAssistant, - component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test Spotify reauthentication.""" - old_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=123, - version=1, - data={"id": "frenck", "auth_implementation": "cred"}, - ) - old_entry.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, @@ -229,8 +199,8 @@ async def test_reauthentication( aioclient_mock.post( "https://accounts.spotify.com/api/token", json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", + "refresh_token": "new-refresh-token", + "access_token": "new-access-token", "type": "Bearer", "expires_in": 60, }, @@ -238,49 +208,35 @@ async def test_reauthentication( with ( patch("homeassistant.components.spotify.async_setup_entry", return_value=True), - patch("homeassistant.components.spotify.config_flow.Spotify") as spotify_mock, ): - spotify_mock.return_value.current_user.return_value = {"id": "frenck"} result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["data"]["auth_implementation"] == "cred" - result["data"]["token"].pop("expires_at") - assert result["data"]["token"] == { - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + mock_config_entry.data["token"].pop("expires_at") + assert mock_config_entry.data["token"] == { + "refresh_token": "new-refresh-token", + "access_token": "new-access-token", "type": "Bearer", "expires_in": 60, } @pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("setup_credentials") async def test_reauth_account_mismatch( hass: HomeAssistant, - component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test Spotify reauthentication with different account.""" - old_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=123, - version=1, - data={"id": "frenck", "auth_implementation": "cred"}, - ) - old_entry.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) - flows = hass.config_entries.flow.async_progress() - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, @@ -302,19 +258,10 @@ async def test_reauth_account_mismatch( }, ) - with patch("homeassistant.components.spotify.config_flow.Spotify") as spotify_mock: - spotify_mock.return_value.current_user.return_value = {"id": "fake_id"} - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + mock_spotify.return_value.get_current_user.return_value.user_id = ( + "different_user_id" + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_account_mismatch" - - -async def test_abort_if_no_reauth_entry(hass: HomeAssistant) -> None: - """Check flow aborts when no entry is known when entring reauth confirmation.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth_confirm"} - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "reauth_account_mismatch" diff --git a/tests/components/spotify/test_diagnostics.py b/tests/components/spotify/test_diagnostics.py new file mode 100644 index 00000000000..6744ca11a00 --- /dev/null +++ b/tests/components/spotify/test_diagnostics.py @@ -0,0 +1,31 @@ +"""Tests for the diagnostics data provided by the Spotify integration.""" + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("setup_credentials") +async def test_diagnostics_polling_instance( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_spotify: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_integration(hass, mock_config_entry) + + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("position_updated_at")) diff --git a/tests/components/spotify/test_init.py b/tests/components/spotify/test_init.py new file mode 100644 index 00000000000..21129d20c07 --- /dev/null +++ b/tests/components/spotify/test_init.py @@ -0,0 +1,50 @@ +"""Tests for the Spotify initialization.""" + +from unittest.mock import MagicMock + +import pytest +from spotifyaio import SpotifyConnectionError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_credentials") +async def test_setup( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify setup.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + "method", + [ + "get_current_user", + "get_devices", + ], +) +async def test_setup_with_required_calls_failing( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + method: str, +) -> None: + """Test the Spotify setup with required calls failing.""" + getattr(mock_spotify.return_value, method).side_effect = SpotifyConnectionError + mock_config_entry.add_to_hass(hass) + + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/spotify/test_media_browser.py b/tests/components/spotify/test_media_browser.py index 2b47aed9ee3..ff3404dcfe9 100644 --- a/tests/components/spotify/test_media_browser.py +++ b/tests/components/spotify/test_media_browser.py @@ -1,44 +1,66 @@ """Test the media browser interface.""" +from unittest.mock import MagicMock + import pytest from syrupy import SnapshotAssertion +from homeassistant.components.media_player import BrowseError from homeassistant.components.spotify import DOMAIN from homeassistant.components.spotify.browse_media import async_browse_media +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component + +from . import setup_integration +from .conftest import SCOPES from tests.common import MockConfigEntry -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done(wait_background_tasks=True) - - +@pytest.mark.usefixtures("setup_credentials") async def test_browse_media_root( hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, snapshot: SnapshotAssertion, - spotify_setup, + expires_at: int, ) -> None: """Test browsing the root.""" + await setup_integration(hass, mock_config_entry) + # We add a second config entry to test that lowercase entry_ids also work + config_entry = MockConfigEntry( + domain=DOMAIN, + title="spotify_2", + unique_id="second_fake_id", + data={ + CONF_ID: "second_fake_id", + "name": "spotify_account_2", + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": SCOPES, + }, + }, + entry_id="32oesphrnacjcf7vw5bf6odx3", + ) + await setup_integration(hass, config_entry) response = await async_browse_media(hass, None, None) assert response.as_dict() == snapshot +@pytest.mark.usefixtures("setup_credentials") async def test_browse_media_categories( hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, snapshot: SnapshotAssertion, - spotify_setup, ) -> None: """Test browsing categories.""" + await setup_integration(hass, mock_config_entry) response = await async_browse_media( - hass, "spotify://library", "spotify://01J5TX5A0FF6G5V0QJX6HBC94T" + hass, "spotify://library", f"spotify://{mock_config_entry.entry_id}" ) assert response.as_dict() == snapshot @@ -46,16 +68,110 @@ async def test_browse_media_categories( @pytest.mark.parametrize( ("config_entry_id"), [("01J5TX5A0FF6G5V0QJX6HBC94T"), ("32oesphrnacjcf7vw5bf6odx3")] ) +@pytest.mark.usefixtures("setup_credentials") async def test_browse_media_playlists( hass: HomeAssistant, - snapshot: SnapshotAssertion, config_entry_id: str, - spotify_setup, + mock_spotify: MagicMock, + snapshot: SnapshotAssertion, + expires_at: int, ) -> None: """Test browsing playlists for the two config entries.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="Spotify", + unique_id="1112264649", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": SCOPES, + }, + }, + entry_id=config_entry_id, + ) + await setup_integration(hass, mock_config_entry) response = await async_browse_media( hass, "spotify://current_user_playlists", f"spotify://{config_entry_id}/current_user_playlists", ) assert response.as_dict() == snapshot + + +@pytest.mark.parametrize( + ("media_content_type", "media_content_id"), + [ + ("current_user_playlists", "current_user_playlists"), + ("current_user_followed_artists", "current_user_followed_artists"), + ("current_user_saved_albums", "current_user_saved_albums"), + ("current_user_saved_tracks", "current_user_saved_tracks"), + ("current_user_saved_shows", "current_user_saved_shows"), + ("current_user_recently_played", "current_user_recently_played"), + ("current_user_top_artists", "current_user_top_artists"), + ("current_user_top_tracks", "current_user_top_tracks"), + ("new_releases", "new_releases"), + ("playlist", "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n"), + ("album", "spotify:album:3IqzqH6ShrRtie9Yd2ODyG"), + ("artist", "spotify:artist:0TnOYISbd1XYRBk9myaseg"), + ("show", "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD"), + ], +) +@pytest.mark.usefixtures("setup_credentials") +async def test_browsing( + hass: HomeAssistant, + mock_spotify: MagicMock, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + media_content_type: str, + media_content_id: str, +) -> None: + """Test browsing playlists for the two config entries.""" + await setup_integration(hass, mock_config_entry) + response = await async_browse_media( + hass, + f"spotify://{media_content_type}", + f"spotify://{mock_config_entry.entry_id}/{media_content_id}", + ) + assert response.as_dict() == snapshot + + +@pytest.mark.parametrize( + ("media_content_id"), + [ + "artist", + None, + ], +) +@pytest.mark.usefixtures("setup_credentials") +async def test_invalid_spotify_url( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + media_content_id: str | None, +) -> None: + """Test browsing with an invalid Spotify URL.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises(BrowseError, match="Invalid Spotify URL specified"): + await async_browse_media( + hass, + "spotify://artist", + media_content_id, + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_browsing_not_loaded_entry( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test browsing with an unloaded config entry.""" + with pytest.raises(BrowseError, match="Invalid Spotify account specified"): + await async_browse_media( + hass, + "spotify://artist", + f"spotify://{mock_config_entry.entry_id}/spotify:artist:0TnOYISbd1XYRBk9myaseg", + ) diff --git a/tests/components/spotify/test_media_player.py b/tests/components/spotify/test_media_player.py new file mode 100644 index 00000000000..55e0ea8f1d8 --- /dev/null +++ b/tests/components/spotify/test_media_player.py @@ -0,0 +1,643 @@ +"""Tests for the Spotify media player platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from spotifyaio import ( + PlaybackState, + ProductType, + RepeatMode as SpotifyRepeatMode, + SpotifyConnectionError, + SpotifyNotFoundError, +) +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_INPUT_SOURCE_LIST, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + SERVICE_SELECT_SOURCE, + MediaPlayerEnqueue, + MediaPlayerEntityFeature, + MediaPlayerState, + MediaType, + RepeatMode, +) +from homeassistant.components.spotify import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_VOLUME_SET, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_entities( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify entities.""" + freezer.move_to("2023-10-21") + with ( + patch("secrets.token_hex", return_value="mock-token"), + patch("homeassistant.components.spotify.PLATFORMS", [Platform.MEDIA_PLAYER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_podcast( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify entities while listening a podcast.""" + freezer.move_to("2023-10-21") + mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( + load_fixture("playback_episode.json", DOMAIN) + ) + with ( + patch("secrets.token_hex", return_value="mock-token"), + patch("homeassistant.components.spotify.PLATFORMS", [Platform.MEDIA_PLAYER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_free_account( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities with a free account.""" + mock_spotify.return_value.get_current_user.return_value.product = ProductType.FREE + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_restricted_device( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities with a restricted device.""" + mock_spotify.return_value.get_playback.return_value.device.is_restricted = True + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ( + state.attributes["supported_features"] == MediaPlayerEntityFeature.SELECT_SOURCE + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_spotify_dj_list( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify entities with a Spotify DJ playlist.""" + mock_spotify.return_value.get_playback.return_value.context.uri = ( + "spotify:playlist:37i9dQZF1EYkqdzj48dyYq" + ) + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "DJ" + + mock_spotify.return_value.get_playlist.assert_not_called() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "DJ" + + mock_spotify.return_value.get_playlist.assert_not_called() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_normal_playlist( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, +) -> None: + """Test normal playlist switching.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "Spotify Web API Testing playlist" + + mock_spotify.return_value.get_playlist.assert_called_once_with( + "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + ) + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "Spotify Web API Testing playlist" + + mock_spotify.return_value.get_playlist.assert_called_once_with( + "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + ) + + mock_spotify.return_value.get_playback.return_value.context.uri = ( + "spotify:playlist:123123123123123" + ) + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playlist.assert_called_with( + "spotify:playlist:123123123123123" + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fetching_playlist_does_not_fail( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test failing fetching playlist does not fail update.""" + mock_spotify.return_value.get_playlist.side_effect = SpotifyConnectionError + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_spotify.return_value.get_playlist.call_count == 2 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fetching_playlist_once( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that not being able to find a playlist doesn't retry.""" + mock_spotify.return_value.get_playlist.side_effect = SpotifyNotFoundError + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_idle( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities in idle state.""" + mock_spotify.return_value.get_playback.return_value = {} + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.state == MediaPlayerState.IDLE + assert ( + state.attributes["supported_features"] == MediaPlayerEntityFeature.SELECT_SOURCE + ) + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + ("service", "method"), + [ + (SERVICE_MEDIA_PLAY, "start_playback"), + (SERVICE_MEDIA_PAUSE, "pause_playback"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "previous_track"), + (SERVICE_MEDIA_NEXT_TRACK, "next_track"), + ], +) +async def test_simple_actions( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + service: str, + method: str, +) -> None: + """Test the Spotify media player.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + service, + {ATTR_ENTITY_ID: "media_player.spotify_spotify_1"}, + blocking=True, + ) + getattr(mock_spotify.return_value, method).assert_called_once_with() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_repeat_mode( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player repeat mode.""" + await setup_integration(hass, mock_config_entry) + for mode, spotify_mode in ( + (RepeatMode.ALL, SpotifyRepeatMode.CONTEXT), + (RepeatMode.ONE, SpotifyRepeatMode.TRACK), + (RepeatMode.OFF, SpotifyRepeatMode.OFF), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: "media_player.spotify_spotify_1", ATTR_MEDIA_REPEAT: mode}, + blocking=True, + ) + mock_spotify.return_value.set_repeat.assert_called_once_with(spotify_mode) + mock_spotify.return_value.set_repeat.reset_mock() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_shuffle( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player shuffle.""" + await setup_integration(hass, mock_config_entry) + for shuffle in (True, False): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_SHUFFLE: shuffle, + }, + blocking=True, + ) + mock_spotify.return_value.set_shuffle.assert_called_once_with(state=shuffle) + mock_spotify.return_value.set_shuffle.reset_mock() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_volume_level( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player volume level.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_VOLUME_LEVEL: 0.5, + }, + blocking=True, + ) + mock_spotify.return_value.set_volume.assert_called_with(50) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_seek( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player seeking.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_SEEK, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_SEEK_POSITION: 100, + }, + blocking=True, + ) + mock_spotify.return_value.seek_track.assert_called_with(100000) + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + ("media_type", "media_id"), + [ + ("spotify://track", "spotify:track:3oRoMXsP2NRzm51lldj1RO"), + ("spotify://episode", "spotify:episode:3oRoMXsP2NRzm51lldj1RO"), + (MediaType.MUSIC, "spotify:track:3oRoMXsP2NRzm51lldj1RO"), + ], +) +async def test_play_media_in_queue( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + media_type: str, + media_id: str, +) -> None: + """Test the Spotify media player play media.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: media_type, + ATTR_MEDIA_CONTENT_ID: media_id, + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + mock_spotify.return_value.add_to_queue.assert_called_with(media_id, None) + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + ("media_type", "media_id", "called_with"), + [ + ( + "spotify://artist", + "spotify:artist:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:artist:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + "spotify://playlist", + "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + "spotify://album", + "spotify:album:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:album:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + "spotify://show", + "spotify:show:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:show:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + MediaType.MUSIC, + "spotify:track:3oRoMXsP2NRzm51lldj1RO", + {"uris": ["spotify:track:3oRoMXsP2NRzm51lldj1RO"]}, + ), + ( + "spotify://track", + "spotify:track:3oRoMXsP2NRzm51lldj1RO", + {"uris": ["spotify:track:3oRoMXsP2NRzm51lldj1RO"]}, + ), + ( + "spotify://episode", + "spotify:episode:3oRoMXsP2NRzm51lldj1RO", + {"uris": ["spotify:episode:3oRoMXsP2NRzm51lldj1RO"]}, + ), + ], +) +async def test_play_media( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + media_type: str, + media_id: str, + called_with: dict, +) -> None: + """Test the Spotify media player play media.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: media_type, + ATTR_MEDIA_CONTENT_ID: media_id, + }, + blocking=True, + ) + mock_spotify.return_value.start_playback.assert_called_with(**called_with) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_add_unsupported_media_to_queue( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player add unsupported media to queue.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises( + ValueError, match="Media type playlist is not supported when enqueue is ADD" + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: "spotify://playlist", + ATTR_MEDIA_CONTENT_ID: "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2", + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_play_unsupported_media( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player play media.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: MediaType.COMPOSER, + ATTR_MEDIA_CONTENT_ID: "spotify:track:3oRoMXsP2NRzm51lldj1RO", + }, + blocking=True, + ) + assert mock_spotify.return_value.start_playback.call_count == 0 + assert mock_spotify.return_value.add_to_queue.call_count == 0 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_select_source( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player source select.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_INPUT_SOURCE: "DESKTOP-BKC5SIK", + }, + blocking=True, + ) + mock_spotify.return_value.transfer_playback.assert_called_with( + "21dac6b0e0a1f181870fdc9749b2656466557666" + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_source_devices( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify media player available source devices.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["DESKTOP-BKC5SIK"] + + mock_spotify.return_value.get_devices.side_effect = SpotifyConnectionError + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.state != STATE_UNAVAILABLE + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["DESKTOP-BKC5SIK"] + + +@pytest.mark.usefixtures("setup_credentials") +async def test_paused_playback( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with paused playback.""" + mock_spotify.return_value.get_playback.return_value.is_playing = False + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.state == MediaPlayerState.PAUSED + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fallback_show_image( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with a fallback image.""" + playback = PlaybackState.from_json(load_fixture("playback_episode.json", DOMAIN)) + playback.item.images = [] + mock_spotify.return_value.get_playback.return_value = playback + with patch("secrets.token_hex", return_value="mock-token"): + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ( + state.attributes[ATTR_ENTITY_PICTURE] + == "/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=16ff384dbae94fea" + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_no_episode_images( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with no episode images.""" + playback = PlaybackState.from_json(load_fixture("playback_episode.json", DOMAIN)) + playback.item.images = [] + playback.item.show.images = [] + mock_spotify.return_value.get_playback.return_value = playback + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ATTR_ENTITY_PICTURE not in state.attributes + + +@pytest.mark.usefixtures("setup_credentials") +async def test_no_album_images( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with no album images.""" + mock_spotify.return_value.get_playback.return_value.item.album.images = [] + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ATTR_ENTITY_PICTURE not in state.attributes diff --git a/tests/components/sql/test_config_flow.py b/tests/components/sql/test_config_flow.py index cb990e454b7..3f2400c0a32 100644 --- a/tests/components/sql/test_config_flow.py +++ b/tests/components/sql/test_config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from pathlib import Path from unittest.mock import patch from sqlalchemy.exc import SQLAlchemyError @@ -597,9 +598,6 @@ async def test_options_flow_db_url_empty( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result = await hass.config_entries.options.async_configure( result["flow_id"], @@ -621,7 +619,9 @@ async def test_options_flow_db_url_empty( async def test_full_flow_not_recorder_db( - recorder_mock: Recorder, hass: HomeAssistant + recorder_mock: Recorder, + hass: HomeAssistant, + tmp_path: Path, ) -> None: """Test full config flow with not using recorder db.""" result = await hass.config_entries.flow.async_init( @@ -629,20 +629,19 @@ async def test_full_flow_not_recorder_db( ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + db_path = tmp_path / "db.db" + db_path_str = f"sqlite:///{db_path}" with ( patch( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "name": "Get Value", "query": "SELECT 5 as value", "column": "value", @@ -654,7 +653,7 @@ async def test_full_flow_not_recorder_db( assert result2["title"] == "Get Value" assert result2["options"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", } @@ -671,15 +670,12 @@ async def test_full_flow_not_recorder_db( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ "query": "SELECT 5 as value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "column": "value", "unit_of_measurement": "MiB", }, @@ -689,7 +685,7 @@ async def test_full_flow_not_recorder_db( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MiB", @@ -697,24 +693,22 @@ async def test_full_flow_not_recorder_db( # Need to test same again to mitigate issue with db_url removal result = await hass.config_entries.options.async_init(entry.entry_id) - with patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "query": "SELECT 5 as value", - "db_url": "sqlite://path/to/db.db", - "column": "value", - "unit_of_measurement": "MB", - }, - ) - await hass.async_block_till_done() + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "query": "SELECT 5 as value", + "db_url": db_path_str, + "column": "value", + "unit_of_measurement": "MB", + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MB", @@ -722,7 +716,7 @@ async def test_full_flow_not_recorder_db( assert entry.options == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MB", diff --git a/tests/components/sql/test_sensor.py b/tests/components/sql/test_sensor.py index b219ad47f3a..6b4032323d0 100644 --- a/tests/components/sql/test_sensor.py +++ b/tests/components/sql/test_sensor.py @@ -3,12 +3,13 @@ from __future__ import annotations from datetime import timedelta +from pathlib import Path +import sqlite3 from typing import Any from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from sqlalchemy import text as sql_text from sqlalchemy.exc import SQLAlchemyError from homeassistant.components.recorder import Recorder @@ -143,29 +144,37 @@ async def test_query_no_value( assert text in caplog.text -async def test_query_mssql_no_result( - recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture +async def test_query_on_disk_sqlite_no_result( + recorder_mock: Recorder, + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test the SQL sensor with a query that returns no value.""" - config = { - "db_url": "mssql://", - "query": "SELECT 5 as value where 1=2", - "column": "value", - "name": "count_tables", - } - with ( - patch("homeassistant.components.sql.sensor.sqlalchemy"), - patch( - "homeassistant.components.sql.sensor.sqlalchemy.text", - return_value=sql_text("SELECT TOP 1 5 as value where 1=2"), - ), - ): - await init_integration(hass, config) + db_path = tmp_path / "test.db" + db_path_str = f"sqlite:///{db_path}" - state = hass.states.get("sensor.count_tables") + def make_test_db(): + """Create a test database.""" + conn = sqlite3.connect(db_path) + conn.execute("CREATE TABLE users (value INTEGER)") + conn.commit() + conn.close() + + await hass.async_add_executor_job(make_test_db) + + config = { + "db_url": db_path_str, + "query": "SELECT value from users", + "column": "value", + "name": "count_users", + } + await init_integration(hass, config) + + state = hass.states.get("sensor.count_users") assert state.state == STATE_UNKNOWN - text = "SELECT TOP 1 5 AS VALUE WHERE 1=2 returned no results" + text = "SELECT value from users LIMIT 1; returned no results" assert text in caplog.text diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py new file mode 100644 index 00000000000..2dc0cabeaa6 --- /dev/null +++ b/tests/components/squeezebox/conftest.py @@ -0,0 +1,289 @@ +"""Setup the squeezebox tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.media_player import MediaType +from homeassistant.components.squeezebox import const +from homeassistant.components.squeezebox.browse_media import ( + MEDIA_TYPE_TO_SQUEEZEBOX, + SQUEEZEBOX_ID_BY_TYPE, +) +from homeassistant.components.squeezebox.const import ( + STATUS_QUERY_LIBRARYNAME, + STATUS_QUERY_MAC, + STATUS_QUERY_UUID, + STATUS_QUERY_VERSION, + STATUS_SENSOR_INFO_TOTAL_ALBUMS, + STATUS_SENSOR_INFO_TOTAL_ARTISTS, + STATUS_SENSOR_INFO_TOTAL_DURATION, + STATUS_SENSOR_INFO_TOTAL_GENRES, + STATUS_SENSOR_INFO_TOTAL_SONGS, + STATUS_SENSOR_LASTSCAN, + STATUS_SENSOR_OTHER_PLAYER_COUNT, + STATUS_SENSOR_PLAYER_COUNT, + STATUS_SENSOR_RESCAN, +) +from homeassistant.const import CONF_HOST, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import format_mac + +# from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + +TEST_HOST = "1.2.3.4" +TEST_PORT = "9000" +TEST_USE_HTTPS = False +SERVER_UUIDS = [ + "12345678-1234-1234-1234-123456789012", + "87654321-4321-4321-4321-210987654321", +] +TEST_MAC = ["aa:bb:cc:dd:ee:ff", "ff:ee:dd:cc:bb:aa"] +TEST_PLAYER_NAME = "Test Player" +TEST_SERVER_NAME = "Test Server" +FAKE_VALID_ITEM_ID = "1234" +FAKE_INVALID_ITEM_ID = "4321" + +FAKE_IP = "42.42.42.42" +FAKE_MAC = "deadbeefdead" +FAKE_UUID = "deadbeefdeadbeefbeefdeafbeef42" +FAKE_PORT = 9000 +FAKE_VERSION = "42.0" + +FAKE_QUERY_RESPONSE = { + STATUS_QUERY_UUID: FAKE_UUID, + STATUS_QUERY_MAC: FAKE_MAC, + STATUS_QUERY_VERSION: FAKE_VERSION, + STATUS_SENSOR_RESCAN: 1, + STATUS_SENSOR_LASTSCAN: 0, + STATUS_QUERY_LIBRARYNAME: "FakeLib", + STATUS_SENSOR_INFO_TOTAL_ALBUMS: 4, + STATUS_SENSOR_INFO_TOTAL_ARTISTS: 2, + STATUS_SENSOR_INFO_TOTAL_DURATION: 500, + STATUS_SENSOR_INFO_TOTAL_GENRES: 1, + STATUS_SENSOR_INFO_TOTAL_SONGS: 42, + STATUS_SENSOR_PLAYER_COUNT: 10, + STATUS_SENSOR_OTHER_PLAYER_COUNT: 0, + "players_loop": [ + { + "isplaying": 0, + "name": "SqueezeLite-HA-Addon", + "seq_no": 0, + "modelname": "SqueezeLite-HA-Addon", + "playerindex": "status", + "model": "squeezelite", + "uuid": FAKE_UUID, + "canpoweroff": 1, + "ip": "192.168.78.86:57700", + "displaytype": "none", + "playerid": "f9:23:cd:37:c5:ff", + "power": 0, + "isplayer": 1, + "connected": 1, + "firmware": "v2.0.0-1488", + } + ], + "count": 1, +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.squeezebox.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Add the squeezebox mock config entry to hass.""" + config_entry = MockConfigEntry( + domain=const.DOMAIN, + unique_id=SERVER_UUIDS[0], + data={ + CONF_HOST: TEST_HOST, + CONF_PORT: TEST_PORT, + const.CONF_HTTPS: TEST_USE_HTTPS, + }, + ) + config_entry.add_to_hass(hass) + return config_entry + + +async def mock_async_browse( + media_type: MediaType, limit: int, browse_id: tuple | None = None +) -> dict | None: + """Mock the async_browse method of pysqueezebox.Player.""" + child_types = { + "favorites": "favorites", + "new music": "album", + "albums": "album", + "album": "track", + "genres": "genre", + "genre": "album", + "artists": "artist", + "artist": "album", + "titles": "title", + "title": "title", + "playlists": "playlist", + "playlist": "title", + } + fake_items = [ + { + "title": "Fake Item 1", + "id": FAKE_VALID_ITEM_ID, + "hasitems": False, + "item_type": child_types[media_type], + "artwork_track_id": "b35bb9e9", + "url": "file:///var/lib/squeezeboxserver/music/track_1.mp3", + }, + { + "title": "Fake Item 2", + "id": FAKE_VALID_ITEM_ID + "_2", + "hasitems": media_type == "favorites", + "item_type": child_types[media_type], + "image_url": "http://lms.internal:9000/html/images/favorites.png", + "url": "file:///var/lib/squeezeboxserver/music/track_2.mp3", + }, + { + "title": "Fake Item 3", + "id": FAKE_VALID_ITEM_ID + "_3", + "hasitems": media_type == "favorites", + "album_id": FAKE_VALID_ITEM_ID if media_type == "favorites" else None, + "url": "file:///var/lib/squeezeboxserver/music/track_3.mp3", + }, + ] + + if browse_id: + search_type, search_id = browse_id + if search_id: + if search_type == "playlist_id": + return ( + { + "title": "Fake Item 1", + "items": fake_items, + } + if search_id == FAKE_VALID_ITEM_ID + else None + ) + if search_type in SQUEEZEBOX_ID_BY_TYPE.values(): + for item in fake_items: + if item["id"] == search_id: + return { + "title": item["title"], + "items": [item], + } + return None + if search_type in SQUEEZEBOX_ID_BY_TYPE.values(): + return { + "title": search_type, + "items": fake_items, + } + return None + if media_type in MEDIA_TYPE_TO_SQUEEZEBOX.values(): + return { + "title": media_type, + "items": fake_items, + } + return None + + +@pytest.fixture +def player() -> MagicMock: + """Return a mock player.""" + return mock_pysqueezebox_player() + + +@pytest.fixture +def player_factory() -> MagicMock: + """Return a factory for creating mock players.""" + return mock_pysqueezebox_player + + +def mock_pysqueezebox_player(uuid: str) -> MagicMock: + """Mock a Lyrion Media Server player.""" + with patch( + "homeassistant.components.squeezebox.Player", autospec=True + ) as mock_player: + mock_player.async_browse = AsyncMock(side_effect=mock_async_browse) + mock_player.generate_image_url_from_track_id = MagicMock( + return_value="http://lms.internal:9000/html/images/favorites.png" + ) + mock_player.name = TEST_PLAYER_NAME + mock_player.player_id = uuid + mock_player.mode = "stop" + mock_player.playlist = None + mock_player.album = None + mock_player.artist = None + mock_player.remote_title = None + mock_player.title = None + mock_player.image_url = None + mock_player.model = "SqueezeLite" + + return mock_player + + +@pytest.fixture +def lms_factory(player_factory: MagicMock) -> MagicMock: + """Return a factory for creating mock Lyrion Media Servers with arbitrary number of players.""" + return lambda player_count, uuid: mock_pysqueezebox_server( + player_factory, player_count, uuid + ) + + +@pytest.fixture +def lms(player_factory: MagicMock) -> MagicMock: + """Mock a Lyrion Media Server with one mock player attached.""" + return mock_pysqueezebox_server(player_factory, 1, uuid=TEST_MAC[0]) + + +def mock_pysqueezebox_server( + player_factory: MagicMock, player_count: int, uuid: str +) -> MagicMock: + """Create a mock Lyrion Media Server with the given number of mock players attached.""" + with patch("homeassistant.components.squeezebox.Server", autospec=True) as mock_lms: + players = [player_factory(TEST_MAC[index]) for index in range(player_count)] + mock_lms.async_get_players = AsyncMock(return_value=players) + + mock_lms.uuid = uuid + mock_lms.name = TEST_SERVER_NAME + mock_lms.async_query = AsyncMock(return_value={"uuid": format_mac(uuid)}) + mock_lms.async_status = AsyncMock(return_value={"uuid": format_mac(uuid)}) + return mock_lms + + +async def configure_squeezebox_media_player_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + lms: MagicMock, +) -> None: + """Configure a squeezebox config entry with appropriate mocks for media_player.""" + with ( + patch("homeassistant.components.squeezebox.PLATFORMS", [Platform.MEDIA_PLAYER]), + patch("homeassistant.components.squeezebox.Server", return_value=lms), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + +@pytest.fixture +async def configured_player( + hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock +) -> MagicMock: + """Fixture mocking calls to pysqueezebox Player from a configured squeezebox.""" + await configure_squeezebox_media_player_platform(hass, config_entry, lms) + return (await lms.async_get_players())[0] + + +@pytest.fixture +async def configured_players( + hass: HomeAssistant, config_entry: MockConfigEntry, lms_factory: MagicMock +) -> list[MagicMock]: + """Fixture mocking calls to two pysqueezebox Players from a configured squeezebox.""" + lms = lms_factory(2, uuid=SERVER_UUIDS[0]) + await configure_squeezebox_media_player_platform(hass, config_entry, lms) + return await lms.async_get_players() diff --git a/tests/components/squeezebox/snapshots/test_media_player.ambr b/tests/components/squeezebox/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..ddd5b9868a1 --- /dev/null +++ b/tests/components/squeezebox/snapshots/test_media_player.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'squeezebox', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Ralph Irving', + 'model': 'SqueezeLite', + 'model_id': None, + 'name': 'Test Player', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- +# name: test_entity_registry[media_player.test_player-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_player', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'squeezebox', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[media_player.test_player-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Player', + 'group_members': list([ + ]), + 'is_volume_muted': True, + 'media_album_name': 'None', + 'media_artist': 'None', + 'media_channel': 'None', + 'media_duration': 1, + 'media_position': 1, + 'media_title': 'None', + 'query_result': dict({ + }), + 'repeat': , + 'shuffle': False, + 'supported_features': , + 'volume_level': 0.01, + }), + 'context': , + 'entity_id': 'media_player.test_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/squeezebox/test_binary_sensor.py b/tests/components/squeezebox/test_binary_sensor.py new file mode 100644 index 00000000000..71cb5ceb105 --- /dev/null +++ b/tests/components/squeezebox/test_binary_sensor.py @@ -0,0 +1,35 @@ +"""Test squeezebox binary sensors.""" + +from copy import deepcopy +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .conftest import FAKE_QUERY_RESPONSE + +from tests.common import MockConfigEntry + + +async def test_binary_sensor( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test binary sensor states and attributes.""" + with ( + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.BINARY_SENSOR], + ), + patch( + "homeassistant.components.squeezebox.Server.async_query", + return_value=deepcopy(FAKE_QUERY_RESPONSE), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("binary_sensor.fakelib_needs_restart") + + assert state is not None + assert state.state == "off" diff --git a/tests/components/squeezebox/test_init.py b/tests/components/squeezebox/test_init.py new file mode 100644 index 00000000000..9074f57cdcb --- /dev/null +++ b/tests/components/squeezebox/test_init.py @@ -0,0 +1,23 @@ +"""Test squeezebox initialization.""" + +from unittest.mock import patch + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_init_api_fail( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test init fail due to API fail.""" + + # Setup component to fail... + with ( + patch( + "homeassistant.components.squeezebox.Server.async_query", + return_value=False, + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/squeezebox/test_media_browser.py b/tests/components/squeezebox/test_media_browser.py new file mode 100644 index 00000000000..c03c1b6344d --- /dev/null +++ b/tests/components/squeezebox/test_media_browser.py @@ -0,0 +1,216 @@ +"""Test the media browser interface.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + BrowseError, + MediaType, +) +from homeassistant.components.squeezebox.browse_media import ( + LIBRARY, + MEDIA_TYPE_TO_SQUEEZEBOX, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock +) -> None: + """Fixture for setting up the component.""" + with ( + patch("homeassistant.components.squeezebox.Server", return_value=lms), + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.MEDIA_PLAYER], + ), + patch( + "homeassistant.components.squeezebox.media_player.start_server_discovery" + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + +async def test_async_browse_media_root( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the async_browse_media function at the root level.""" + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "", + "media_content_type": "library", + } + ) + response = await client.receive_json() + assert response["success"] + result = response["result"] + for idx, item in enumerate(result["children"]): + assert item["title"] == LIBRARY[idx] + + +async def test_async_browse_media_with_subitems( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test each category with subitems.""" + for category in ( + "Favorites", + "Artists", + "Albums", + "Playlists", + "Genres", + "New Music", + ): + with patch( + "homeassistant.components.squeezebox.browse_media.is_internal_request", + return_value=False, + ): + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "", + "media_content_type": category, + } + ) + response = await client.receive_json() + assert response["success"] + category_level = response["result"] + assert category_level["title"] == MEDIA_TYPE_TO_SQUEEZEBOX[category] + assert category_level["children"][0]["title"] == "Fake Item 1" + + # Look up a subitem + search_type = category_level["children"][0]["media_content_type"] + search_id = category_level["children"][0]["media_content_id"] + await client.send_json( + { + "id": 2, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": search_id, + "media_content_type": search_type, + } + ) + response = await client.receive_json() + assert response["success"] + search = response["result"] + assert search["title"] == "Fake Item 1" + + +async def test_async_browse_tracks( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test tracks (no subitems).""" + with patch( + "homeassistant.components.squeezebox.browse_media.is_internal_request", + return_value=True, + ): + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "", + "media_content_type": "Tracks", + } + ) + response = await client.receive_json() + assert response["success"] + tracks = response["result"] + assert tracks["title"] == "titles" + assert len(tracks["children"]) == 3 + + +async def test_async_browse_error( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Search for a non-existent item and assert error.""" + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "0", + "media_content_type": MediaType.ALBUM, + } + ) + response = await client.receive_json() + assert not response["success"] + + +async def test_play_browse_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test play browse item.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "1234", + ATTR_MEDIA_CONTENT_TYPE: "album", + }, + ) + + +async def test_play_browse_item_nonexistent( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test trying to play an item that doesn't exist.""" + with pytest.raises(BrowseError): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "0", + ATTR_MEDIA_CONTENT_TYPE: "album", + }, + blocking=True, + ) + + +async def test_play_browse_item_bad_category( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test trying to play an item whose category doesn't exist.""" + with pytest.raises(BrowseError): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "1234", + ATTR_MEDIA_CONTENT_TYPE: "bad_category", + }, + blocking=True, + ) diff --git a/tests/components/squeezebox/test_media_player.py b/tests/components/squeezebox/test_media_player.py new file mode 100644 index 00000000000..080a2161b4d --- /dev/null +++ b/tests/components/squeezebox/test_media_player.py @@ -0,0 +1,816 @@ +"""Tests for the squeezebox media player component.""" + +from datetime import timedelta +import json +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_GROUP_MEMBERS, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_POSITION, + ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, + SERVICE_PLAY_MEDIA, + SERVICE_UNJOIN, + MediaPlayerEnqueue, + MediaPlayerState, + MediaType, + RepeatMode, +) +from homeassistant.components.squeezebox.const import ( + DISCOVERY_INTERVAL, + DOMAIN, + PLAYER_UPDATE_INTERVAL, + SENSOR_UPDATE_INTERVAL, +) +from homeassistant.components.squeezebox.media_player import ( + ATTR_PARAMETERS, + SERVICE_CALL_METHOD, + SERVICE_CALL_QUERY, +) +from homeassistant.const import ( + ATTR_COMMAND, + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PLAY_PAUSE, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_UNAVAILABLE, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry +from homeassistant.util.dt import utcnow + +from .conftest import FAKE_VALID_ITEM_ID, TEST_MAC + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_device_registry( + hass: HomeAssistant, + device_registry: DeviceRegistry, + configured_player: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test squeezebox device registered in the device registry.""" + reg_device = device_registry.async_get_device(identifiers={(DOMAIN, TEST_MAC[0])}) + assert reg_device is not None + assert reg_device == snapshot + + +async def test_entity_registry( + hass: HomeAssistant, + entity_registry: EntityRegistry, + configured_player: MagicMock, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, +) -> None: + """Test squeezebox media_player entity registered in the entity registry.""" + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_squeezebox_player_rediscovery( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test rediscovery of a squeezebox player.""" + + assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE + + # Make the player appear unavailable + configured_player.connected = False + freezer.tick(timedelta(seconds=PLAYER_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == STATE_UNAVAILABLE + + # Make the player available again + configured_player.connected = True + freezer.tick(timedelta(seconds=DISCOVERY_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + freezer.tick(timedelta(seconds=PLAYER_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE + + +async def test_squeezebox_turn_on( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test turn on service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_set_power.assert_called_once_with(True) + + +async def test_squeezebox_turn_off( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test turn off service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_set_power.assert_called_once_with(False) + + +async def test_squeezebox_state( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test determining the MediaPlayerState.""" + + configured_player.power = True + configured_player.mode = "stop" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE + + configured_player.mode = "play" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == MediaPlayerState.PLAYING + + configured_player.mode = "pause" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == MediaPlayerState.PAUSED + + configured_player.power = False + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == MediaPlayerState.OFF + + +async def test_squeezebox_volume_up( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test volume up service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_set_volume.assert_called_once_with("+5") + + +async def test_squeezebox_volume_down( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test volume down service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_set_volume.assert_called_once_with("-5") + + +async def test_squeezebox_volume_set( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test volume set service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_LEVEL: 0.5}, + blocking=True, + ) + configured_player.async_set_volume.assert_called_once_with("50") + + +async def test_squeezebox_volume_property( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test volume property.""" + + configured_player.volume = 50 + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_VOLUME_LEVEL] + == 0.5 + ) + + configured_player.volume = None + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + ATTR_MEDIA_VOLUME_LEVEL + not in hass.states.get("media_player.test_player").attributes + ) + + +async def test_squeezebox_mute( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test mute service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_MUTED: True}, + blocking=True, + ) + configured_player.async_set_muting.assert_called_once_with(True) + + +async def test_squeezebox_unmute( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test unmute service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_MUTED: False}, + blocking=True, + ) + configured_player.async_set_muting.assert_called_once_with(False) + + +async def test_squeezebox_mute_property( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test the mute property.""" + + configured_player.muting = True + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_VOLUME_MUTED] + is True + ) + + configured_player.muting = False + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_VOLUME_MUTED] + is False + ) + + +async def test_squeezebox_repeat_mode( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test set repeat mode service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_REPEAT: RepeatMode.ALL, + }, + blocking=True, + ) + configured_player.async_set_repeat.assert_called_once_with("playlist") + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_REPEAT: RepeatMode.ONE, + }, + blocking=True, + ) + configured_player.async_set_repeat.assert_called_with("song") + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_REPEAT: RepeatMode.OFF, + }, + blocking=True, + ) + configured_player.async_set_repeat.assert_called_with("none") + + +async def test_squeezebox_repeat_mode_property( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test the repeat mode property.""" + configured_player.repeat = "playlist" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_REPEAT] + == RepeatMode.ALL + ) + + configured_player.repeat = "song" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_REPEAT] + == RepeatMode.ONE + ) + + configured_player.repeat = "none" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_REPEAT] + == RepeatMode.OFF + ) + + +async def test_squeezebox_shuffle( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test set shuffle service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_SHUFFLE: True, + }, + blocking=True, + ) + configured_player.async_set_shuffle.assert_called_once_with("song") + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_SHUFFLE: False, + }, + blocking=True, + ) + configured_player.async_set_shuffle.assert_called_with("none") + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_SHUFFLE] + is False + ) + + +async def test_squeezebox_shuffle_property( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test the shuffle property.""" + + configured_player.shuffle = "song" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_SHUFFLE] + is True + ) + + configured_player.shuffle = "none" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_SHUFFLE] + is False + ) + + +async def test_squeezebox_play( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test play service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_play.assert_called_once() + + +async def test_squeezebox_play_pause( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test play/pause service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY_PAUSE, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_toggle_pause.assert_called_once() + + +async def test_squeezebox_pause( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test pause service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PAUSE, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_pause.assert_called_once() + + +async def test_squeezebox_seek( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test seek service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + }, + blocking=True, + ) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_SEEK, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_SEEK_POSITION: 100, + }, + blocking=True, + ) + configured_player.async_time.assert_called_once_with(100) + + +async def test_squeezebox_stop( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test stop service call.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_STOP, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_stop.assert_called_once() + + +async def test_squeezebox_load_playlist( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test load a playlist.""" + # load a playlist by number + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, + ATTR_MEDIA_CONTENT_TYPE: MediaType.PLAYLIST, + }, + blocking=True, + ) + assert configured_player.async_load_playlist.call_count == 1 + + # load a list of urls + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: json.dumps( + { + "urls": [ + {"url": FAKE_VALID_ITEM_ID}, + {"url": FAKE_VALID_ITEM_ID + "_2"}, + ], + "index": "0", + } + ), + ATTR_MEDIA_CONTENT_TYPE: MediaType.PLAYLIST, + }, + blocking=True, + ) + assert configured_player.async_load_playlist.call_count == 2 + + # clear the playlist + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_clear_playlist.assert_called_once() + + +async def test_squeezebox_enqueue( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test the various enqueue service calls.""" + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + configured_player.async_load_url.assert_called_once_with(FAKE_VALID_ITEM_ID, "add") + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, + }, + blocking=True, + ) + configured_player.async_load_url.assert_called_with(FAKE_VALID_ITEM_ID, "insert") + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, + }, + blocking=True, + ) + configured_player.async_load_url.assert_called_with(FAKE_VALID_ITEM_ID, "play_now") + + +async def test_squeezebox_skip_tracks( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test track skipping service calls.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, + ATTR_MEDIA_CONTENT_TYPE: MediaType.PLAYLIST, + }, + blocking=True, + ) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_index.assert_called_once_with("+1") + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_index.assert_called_with("-1") + + +async def test_squeezebox_call_query( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test query service call.""" + await hass.services.async_call( + DOMAIN, + SERVICE_CALL_QUERY, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_COMMAND: "test_command", + ATTR_PARAMETERS: ["param1", "param2"], + }, + blocking=True, + ) + configured_player.async_query.assert_called_once_with( + "test_command", "param1", "param2" + ) + + +async def test_squeezebox_call_method( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test method call service call.""" + await hass.services.async_call( + DOMAIN, + SERVICE_CALL_METHOD, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_COMMAND: "test_command", + ATTR_PARAMETERS: ["param1", "param2"], + }, + blocking=True, + ) + configured_player.async_query.assert_called_once_with( + "test_command", "param1", "param2" + ) + + +async def test_squeezebox_invalid_state( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test handling an unexpected state from pysqueezebox.""" + configured_player.mode = "invalid" + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").state == STATE_UNKNOWN + + +async def test_squeezebox_server_discovery( + hass: HomeAssistant, + lms: MagicMock, + lms_factory: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test discovery of a squeezebox server.""" + + async def mock_async_discover(callback): + """Mock the async_discover function of pysqueezebox.""" + return callback(lms_factory(2)) + + with ( + patch( + "homeassistant.components.squeezebox.Server", + return_value=lms, + ), + patch( + "homeassistant.components.squeezebox.media_player.async_discover", + mock_async_discover, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + # how do we check that a config flow started? + + +async def test_squeezebox_join(hass: HomeAssistant, configured_players: list) -> None: + """Test joining a squeezebox player.""" + + # join a valid player + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_GROUP_MEMBERS: ["media_player.test_player_2"], + }, + blocking=True, + ) + configured_players[0].async_sync.assert_called_once_with( + configured_players[1].player_id + ) + + # try to join an invalid player + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_GROUP_MEMBERS: ["media_player.invalid"], + }, + blocking=True, + ) + + +async def test_squeezebox_unjoin( + hass: HomeAssistant, configured_player: MagicMock +) -> None: + """Test unjoining a squeezebox player.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, + {ATTR_ENTITY_ID: "media_player.test_player"}, + blocking=True, + ) + configured_player.async_unsync.assert_called_once() + + +async def test_squeezebox_media_content_properties( + hass: HomeAssistant, + configured_player: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test media_content_id and media_content_type properties.""" + playlist_urls = [ + {"url": "test_title"}, + {"url": "test_title_2"}, + ] + configured_player.current_index = 0 + configured_player.playlist = playlist_urls + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("media_player.test_player").attributes[ + ATTR_MEDIA_CONTENT_ID + ] == json.dumps({"index": 0, "urls": playlist_urls}) + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_CONTENT_TYPE] + == MediaType.PLAYLIST + ) + + configured_player.url = "test_url" + configured_player.playlist = [{"url": "test_url"}] + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_CONTENT_ID] + == "test_url" + ) + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_CONTENT_TYPE] + == MediaType.MUSIC + ) + + configured_player.playlist = None + configured_player.url = None + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + ATTR_MEDIA_CONTENT_ID + not in hass.states.get("media_player.test_player").attributes + ) + assert ( + ATTR_MEDIA_CONTENT_TYPE + not in hass.states.get("media_player.test_player").attributes + ) + + +async def test_squeezebox_media_position_property( + hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory +) -> None: + """Test media_position property.""" + configured_player.time = 100 + configured_player.async_update = AsyncMock( + side_effect=lambda: setattr(configured_player, "time", 105) + ) + last_update = utcnow() + freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_POSITION] + == 105 + ) + assert ( + ( + hass.states.get("media_player.test_player").attributes[ + ATTR_MEDIA_POSITION_UPDATED_AT + ] + ) + > last_update + ) diff --git a/tests/components/squeezebox/test_sensor.py b/tests/components/squeezebox/test_sensor.py new file mode 100644 index 00000000000..c262c2a0e7c --- /dev/null +++ b/tests/components/squeezebox/test_sensor.py @@ -0,0 +1,34 @@ +"""Test squeezebox sensors.""" + +from copy import deepcopy +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .conftest import FAKE_QUERY_RESPONSE + +from tests.common import MockConfigEntry + + +async def test_sensor(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Test sensor states and attributes.""" + + # Setup component + with ( + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.SENSOR], + ), + patch( + "homeassistant.components.squeezebox.Server.async_query", + return_value=deepcopy(FAKE_QUERY_RESPONSE), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.fakelib_player_count") + + assert state is not None + assert state.state == "10" diff --git a/tests/components/ssdp/test_init.py b/tests/components/ssdp/test_init.py index d10496500d2..7dc0f0095d4 100644 --- a/tests/components/ssdp/test_init.py +++ b/tests/components/ssdp/test_init.py @@ -18,10 +18,16 @@ from homeassistant.const import ( MATCH_ALL, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import ( + MockConfigEntry, + MockModule, + async_fire_time_changed, + mock_integration, +) from tests.test_util.aiohttp import AiohttpClientMocker @@ -65,7 +71,8 @@ async def test_ssdp_flow_dispatched_on_st( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_SSDP + "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), + "source": config_entries.SOURCE_SSDP, } mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] assert mock_call_data.ssdp_st == "mock-st" @@ -108,7 +115,8 @@ async def test_ssdp_flow_dispatched_on_manufacturer_url( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_SSDP + "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), + "source": config_entries.SOURCE_SSDP, } mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] assert mock_call_data.ssdp_st == "mock-st" @@ -163,7 +171,8 @@ async def test_scan_match_upnp_devicedesc_manufacturer( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_SSDP + "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), + "source": config_entries.SOURCE_SSDP, } @@ -208,7 +217,8 @@ async def test_scan_match_upnp_devicedesc_devicetype( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "source": config_entries.SOURCE_SSDP + "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), + "source": config_entries.SOURCE_SSDP, } @@ -339,7 +349,14 @@ async def test_flow_start_only_alive( await hass.async_block_till_done(wait_background_tasks=True) mock_flow_init.assert_awaited_once_with( - "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY + "mock-domain", + context={ + "discovery_key": DiscoveryKey( + domain="ssdp", key="uuid:mock-udn", version=1 + ), + "source": config_entries.SOURCE_SSDP, + }, + data=ANY, ) # ssdp:alive advertisement should start a flow @@ -356,7 +373,14 @@ async def test_flow_start_only_alive( ssdp_listener._on_alive(mock_ssdp_advertisement) await hass.async_block_till_done() mock_flow_init.assert_awaited_once_with( - "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY + "mock-domain", + context={ + "discovery_key": DiscoveryKey( + domain="ssdp", key="uuid:mock-udn", version=1 + ), + "source": config_entries.SOURCE_SSDP, + }, + data=ANY, ) # ssdp:byebye advertisement should not start a flow @@ -372,7 +396,14 @@ async def test_flow_start_only_alive( ssdp_listener._on_update(mock_ssdp_advertisement) await hass.async_block_till_done() mock_flow_init.assert_awaited_once_with( - "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY + "mock-domain", + context={ + "discovery_key": DiscoveryKey( + domain="ssdp", key="uuid:mock-udn", version=1 + ), + "source": config_entries.SOURCE_SSDP, + }, + data=ANY, ) @@ -824,7 +855,14 @@ async def test_flow_dismiss_on_byebye( await hass.async_block_till_done(wait_background_tasks=True) mock_flow_init.assert_awaited_once_with( - "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY + "mock-domain", + context={ + "discovery_key": DiscoveryKey( + domain="ssdp", key="uuid:mock-udn", version=1 + ), + "source": config_entries.SOURCE_SSDP, + }, + data=ANY, ) # ssdp:alive advertisement should start a flow @@ -841,7 +879,14 @@ async def test_flow_dismiss_on_byebye( ssdp_listener._on_alive(mock_ssdp_advertisement) await hass.async_block_till_done(wait_background_tasks=True) mock_flow_init.assert_awaited_once_with( - "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY + "mock-domain", + context={ + "discovery_key": DiscoveryKey( + domain="ssdp", key="uuid:mock-udn", version=1 + ), + "source": config_entries.SOURCE_SSDP, + }, + data=ANY, ) mock_ssdp_advertisement["nts"] = "ssdp:byebye" @@ -859,3 +904,193 @@ async def test_flow_dismiss_on_byebye( assert len(mock_async_progress_by_init_data_type.mock_calls) == 1 assert mock_async_abort.mock_calls[0][1][0] == "mock_flow_id" + + +@patch( + "homeassistant.components.ssdp.async_get_ssdp", + return_value={"mock-domain": [{"st": "mock-st"}]}, +) +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + ), + [ + # Matching discovery key + ( + "mock-domain", + {"ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1),)}, + ), + # Matching discovery key + ( + "mock-domain", + { + "ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1),), + "other": (DiscoveryKey(domain="other", key="blah", version=1),), + }, + ), + # Matching discovery key, other domain + # Note: Rediscovery is not currently restricted to the domain of the removed + # entry. Such a check can be added if needed. + ( + "comp", + {"ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1),)}, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_SSDP, + config_entries.SOURCE_USER, + ], +) +async def test_ssdp_rediscover( + mock_get_ssdp, + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_flow_init, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, +) -> None: + """Test we reinitiate flows when an ignored config entry is removed.""" + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id="mock-unique-id", + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + mock_ssdp_search_response = _ssdp_headers( + { + "st": "mock-st", + "location": "http://1.1.1.1", + "usn": "uuid:mock-udn::mock-st", + "server": "mock-server", + "ext": "", + "_source": "search", + } + ) + aioclient_mock.get( + "http://1.1.1.1", + text=""" + + + Paulus + Paulus + + + """, + ) + ssdp_listener = await init_ssdp_component(hass) + ssdp_listener._on_search(mock_ssdp_search_response) + await hass.async_block_till_done() + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + expected_context = { + "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), + "source": config_entries.SOURCE_SSDP, + } + assert len(mock_flow_init.mock_calls) == 1 + assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" + assert mock_flow_init.mock_calls[0][2]["context"] == expected_context + mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] + assert mock_call_data.ssdp_st == "mock-st" + assert mock_call_data.ssdp_location == "http://1.1.1.1" + + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_flow_init.mock_calls) == 2 + assert mock_flow_init.mock_calls[1][1][0] == "mock-domain" + assert mock_flow_init.mock_calls[1][2]["context"] == expected_context + assert ( + mock_flow_init.mock_calls[1][2]["data"] + == mock_flow_init.mock_calls[0][2]["data"] + ) + + +@patch( + "homeassistant.components.ssdp.async_get_ssdp", + return_value={"mock-domain": [{"st": "mock-st"}]}, +) +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + "entry_source", + "entry_unique_id", + ), + [ + # Discovery key from other domain + ( + "mock-domain", + {"dhcp": (DiscoveryKey(domain="dhcp", key="uuid:mock-udn", version=1),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + # Discovery key from the future + ( + "mock-domain", + {"ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=2),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + ], +) +async def test_ssdp_rediscover_no_match( + mock_get_ssdp, + hass: HomeAssistant, + mock_flow_init, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + entry_unique_id: str, +) -> None: + """Test we don't reinitiate flows when a non matching config entry is removed.""" + mock_integration(hass, MockModule(entry_domain)) + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + mock_ssdp_search_response = _ssdp_headers( + { + "st": "mock-st", + "location": "http://1.1.1.1", + "usn": "uuid:mock-udn::mock-st", + "server": "mock-server", + "ext": "", + "_source": "search", + } + ) + ssdp_listener = await init_ssdp_component(hass) + ssdp_listener._on_search(mock_ssdp_search_response) + await hass.async_block_till_done() + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + expected_context = { + "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), + "source": config_entries.SOURCE_SSDP, + } + assert len(mock_flow_init.mock_calls) == 1 + assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" + assert mock_flow_init.mock_calls[0][2]["context"] == expected_context + mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] + assert mock_call_data.ssdp_st == "mock-st" + assert mock_call_data.ssdp_location == "http://1.1.1.1" + + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_flow_init.mock_calls) == 1 diff --git a/tests/components/starlink/fixtures/history_stats_success.json b/tests/components/starlink/fixtures/history_stats_success.json new file mode 100644 index 00000000000..5a228dd34af --- /dev/null +++ b/tests/components/starlink/fixtures/history_stats_success.json @@ -0,0 +1,112 @@ +[ + { + "samples": 900, + "end_counter": 119395 + }, + { + "total_ping_drop": 2.4592087380588055, + "count_full_ping_drop": 0, + "count_obstructed": 0, + "total_obstructed_ping_drop": 0, + "count_full_obstructed_ping_drop": 0, + "count_unscheduled": 0, + "total_unscheduled_ping_drop": 0, + "count_full_unscheduled_ping_drop": 0 + }, + { + "init_run_fragment": 0, + "final_run_fragment": 0, + "run_seconds[1,]": [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + "run_minutes[1,]": [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + }, + { + "mean_all_ping_latency": 31.55747121333472, + "deciles_all_ping_latency[]": [ + 21.005102157592773, 22.67989158630371, 25.310760498046875, + 26.85667610168457, 27.947458267211914, 29.192155838012695, + 31.26323890686035, 34.226768493652344, 38.54373550415039, + 42.308048248291016, 60.11151885986328 + ], + "mean_full_ping_latency": 31.526783029284427, + "deciles_full_ping_latency[]": [ + 21.070240020751953, 22.841461181640625, 25.34041976928711, + 26.908039093017578, 27.947458267211914, 29.135879516601562, + 31.122955322265625, 34.1280403137207, 38.49388122558594, + 42.308048248291016, 60.11151885986328 + ], + "stdev_full_ping_latency": 7.8141330200011785 + }, + { + "load_bucket_samples[]": [738, 24, 39, 55, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + "load_bucket_min_latency[]": [ + 21.070240020751953, + 21.35713768005371, + 21.156545639038086, + 24.763751983642578, + 24.7109317779541, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + "load_bucket_median_latency[]": [ + 29.2450590133667, + 27.031108856201172, + 25.726211547851562, + 31.845806121826172, + 28.919479370117188, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ], + "load_bucket_max_latency[]": [ + 60.11151885986328, + 40.572628021240234, + 48.063961029052734, + 53.505126953125, + 38.7435302734375, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + }, + { + "download_usage": 72504227, + "upload_usage": 5719755 + }, + { + "latest_power": 27.54502296447754, + "mean_power": 31.449254739549424, + "min_power": 21.826229095458984, + "max_power": 41.71160888671875, + "total_energy": 0.007862313684887356 + } +] diff --git a/tests/components/starlink/patchers.py b/tests/components/starlink/patchers.py index f8179f07bed..08e82548ef8 100644 --- a/tests/components/starlink/patchers.py +++ b/tests/components/starlink/patchers.py @@ -24,6 +24,11 @@ SLEEP_DATA_SUCCESS_PATCHER = patch( return_value=json.loads(load_fixture("sleep_data_success.json", "starlink")), ) +HISTORY_STATS_SUCCESS_PATCHER = patch( + "homeassistant.components.starlink.coordinator.history_stats", + return_value=json.loads(load_fixture("history_stats_success.json", "starlink")), +) + DEVICE_FOUND_PATCHER = patch( "homeassistant.components.starlink.config_flow.get_id", return_value="some-valid-id" ) diff --git a/tests/components/starlink/snapshots/test_diagnostics.ambr b/tests/components/starlink/snapshots/test_diagnostics.ambr index 4c85ad84ca7..c54e0b2df6d 100644 --- a/tests/components/starlink/snapshots/test_diagnostics.ambr +++ b/tests/components/starlink/snapshots/test_diagnostics.ambr @@ -16,6 +16,13 @@ 'alert_thermal_throttle': False, 'alert_unexpected_location': False, }), + 'consumption': dict({ + 'latest_power': 27.54502296447754, + 'max_power': 41.71160888671875, + 'mean_power': 31.449254739549424, + 'min_power': 21.826229095458984, + 'total_energy': 0.007862313684887356, + }), 'location': dict({ 'altitude': '**REDACTED**', 'latitude': '**REDACTED**', @@ -79,5 +86,9 @@ 'uplink_throughput_bps': 11802.771484375, 'uptime': 804138, }), + 'usage': dict({ + 'download_usage': 72504227, + 'upload_usage': 5719755, + }), }) # --- diff --git a/tests/components/starlink/test_diagnostics.py b/tests/components/starlink/test_diagnostics.py index c5876e5e9f2..cd36dd0367e 100644 --- a/tests/components/starlink/test_diagnostics.py +++ b/tests/components/starlink/test_diagnostics.py @@ -7,6 +7,7 @@ from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant from .patchers import ( + HISTORY_STATS_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, STATUS_DATA_SUCCESS_PATCHER, @@ -32,6 +33,7 @@ async def test_diagnostics( STATUS_DATA_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, + HISTORY_STATS_SUCCESS_PATCHER, ): entry.add_to_hass(hass) diff --git a/tests/components/starlink/test_init.py b/tests/components/starlink/test_init.py index 62a1ee41236..7e04c21562a 100644 --- a/tests/components/starlink/test_init.py +++ b/tests/components/starlink/test_init.py @@ -6,6 +6,7 @@ from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant from .patchers import ( + HISTORY_STATS_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, STATUS_DATA_SUCCESS_PATCHER, @@ -25,6 +26,7 @@ async def test_successful_entry(hass: HomeAssistant) -> None: STATUS_DATA_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, + HISTORY_STATS_SUCCESS_PATCHER, ): entry.add_to_hass(hass) @@ -46,6 +48,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: STATUS_DATA_SUCCESS_PATCHER, LOCATION_DATA_SUCCESS_PATCHER, SLEEP_DATA_SUCCESS_PATCHER, + HISTORY_STATS_SUCCESS_PATCHER, ): entry.add_to_hass(hass) diff --git a/tests/components/statistics/snapshots/test_config_flow.ambr b/tests/components/statistics/snapshots/test_config_flow.ambr new file mode 100644 index 00000000000..5f79c56dec7 --- /dev/null +++ b/tests/components/statistics/snapshots/test_config_flow.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_config_flow_preview_success[missing_size_and_age] + dict({ + 'attributes': dict({ + 'friendly_name': 'Statistical characteristic', + 'icon': 'mdi:calculator', + }), + 'state': 'unavailable', + }) +# --- +# name: test_config_flow_preview_success[success] + dict({ + 'attributes': dict({ + 'buffer_usage_ratio': 0.1, + 'friendly_name': 'Statistical characteristic', + 'icon': 'mdi:calculator', + 'source_value_valid': True, + 'state_class': 'measurement', + }), + 'state': '16.0', + }) +# --- +# name: test_options_flow_preview + dict({ + 'attributes': dict({ + 'age_coverage_ratio': 0.0, + 'buffer_usage_ratio': 0.05, + 'friendly_name': 'Statistical characteristic', + 'icon': 'mdi:calculator', + 'source_value_valid': True, + 'state_class': 'measurement', + }), + 'state': '16.0', + }) +# --- +# name: test_options_flow_preview[updated] + dict({ + 'attributes': dict({ + 'age_coverage_ratio': 0.0, + 'buffer_usage_ratio': 0.1, + 'friendly_name': 'Statistical characteristic', + 'icon': 'mdi:calculator', + 'source_value_valid': True, + 'state_class': 'measurement', + }), + 'state': '20.0', + }) +# --- diff --git a/tests/components/statistics/test_config_flow.py b/tests/components/statistics/test_config_flow.py index 7c9ed5bed47..77ccba5ba4c 100644 --- a/tests/components/statistics/test_config_flow.py +++ b/tests/components/statistics/test_config_flow.py @@ -4,7 +4,11 @@ from __future__ import annotations from unittest.mock import AsyncMock +import pytest +from syrupy import SnapshotAssertion + from homeassistant import config_entries +from homeassistant.components.recorder import Recorder from homeassistant.components.statistics import DOMAIN from homeassistant.components.statistics.sensor import ( CONF_KEEP_LAST_SAMPLE, @@ -16,12 +20,14 @@ from homeassistant.components.statistics.sensor import ( DEFAULT_NAME, STAT_AVERAGE_LINEAR, STAT_COUNT, + STAT_VALUE_MAX, ) from homeassistant.const import CONF_ENTITY_ID, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: @@ -271,3 +277,204 @@ async def test_entry_already_exist( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + "user_input", + [ + ( + { + CONF_SAMPLES_MAX_BUFFER_SIZE: 10.0, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50, + CONF_PRECISION: 2, + } + ), + ( + { + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50, + CONF_PRECISION: 2, + } + ), + ], + ids=("success", "missing_size_and_age"), +) +async def test_config_flow_preview_success( + recorder_mock: Recorder, + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + user_input: str, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow preview.""" + client = await hass_ws_client(hass) + + # add state for the tests + hass.states.async_set("sensor.test_monitored", "16") + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] is None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_STATE_CHARACTERISTIC: STAT_VALUE_MAX, + }, + ) + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "options" + assert result["errors"] is None + assert result["preview"] == "statistics" + + await client.send_json_auto_id( + { + "type": "statistics/start_preview", + "flow_id": result["flow_id"], + "flow_type": "config_flow", + "user_input": user_input, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + + msg = await client.receive_json() + assert msg["event"] == snapshot + assert len(hass.states.async_all()) == 1 + + +async def test_options_flow_preview( + recorder_mock: Recorder, + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the options flow preview.""" + client = await hass_ws_client(hass) + + # add state for the tests + hass.states.async_set("sensor.test_monitored", "16") + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_VALUE_MAX, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + }, + title=DEFAULT_NAME, + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + assert result["preview"] == "statistics" + + await client.send_json_auto_id( + { + "type": "statistics/start_preview", + "flow_id": result["flow_id"], + "flow_type": "options_flow", + "user_input": { + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + }, + } + ) + + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + + msg = await client.receive_json() + assert msg["event"] == snapshot + assert len(hass.states.async_all()) == 2 + + # add state for the tests + hass.states.async_set("sensor.test_monitored", "20") + await hass.async_block_till_done() + + msg = await client.receive_json() + assert msg["event"] == snapshot(name="updated") + + +async def test_options_flow_sensor_preview_config_entry_removed( + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test the option flow preview where the config entry is removed.""" + client = await hass_ws_client(hass) + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + }, + title=DEFAULT_NAME, + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + assert result["preview"] == "statistics" + + await hass.config_entries.async_remove(config_entry.entry_id) + + await client.send_json_auto_id( + { + "type": "statistics/start_preview", + "flow_id": result["flow_id"], + "flow_type": "options_flow", + "user_input": { + CONF_SAMPLES_MAX_BUFFER_SIZE: 25.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + }, + } + ) + msg = await client.receive_json() + assert not msg["success"] + assert msg["error"] == { + "code": "home_assistant_error", + "message": "Config entry not found", + } diff --git a/tests/components/statistics/test_sensor.py b/tests/components/statistics/test_sensor.py index c90d685714c..1dff13bb21a 100644 --- a/tests/components/statistics/test_sensor.py +++ b/tests/components/statistics/test_sensor.py @@ -2,9 +2,11 @@ from __future__ import annotations +from asyncio import Event as AsyncioEvent from collections.abc import Sequence from datetime import datetime, timedelta import statistics +from threading import Event from typing import Any from unittest.mock import patch @@ -12,7 +14,7 @@ from freezegun import freeze_time import pytest from homeassistant import config as hass_config -from homeassistant.components.recorder import Recorder +from homeassistant.components.recorder import Recorder, history from homeassistant.components.sensor import ( ATTR_STATE_CLASS, SensorDeviceClass, @@ -50,6 +52,7 @@ from tests.components.recorder.common import async_wait_recording_done VALUES_BINARY = ["on", "off", "on", "off", "on", "off", "on", "off", "on"] VALUES_NUMERIC = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] +VALUES_NUMERIC_LINEAR = [1, 2, 3, 4, 5, 6, 7, 8, 9] async def test_unique_id( @@ -115,7 +118,6 @@ async def test_sensor_defaults_numeric(hass: HomeAssistant) -> None: assert state.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) assert state.attributes.get("source_value_valid") is True assert "age_coverage_ratio" not in state.attributes - # Source sensor turns unavailable, then available with valid value, # statistics sensor should follow state = hass.states.get("sensor.test") @@ -247,8 +249,15 @@ async def test_sensor_defaults_binary(hass: HomeAssistant) -> None: assert "age_coverage_ratio" not in state.attributes -async def test_sensor_source_with_force_update(hass: HomeAssistant) -> None: - """Test the behavior of the sensor when the source sensor force-updates with same value.""" +async def test_sensor_state_reported(hass: HomeAssistant) -> None: + """Test the behavior of the sensor with a sequence of identical values. + + Forced updates no longer make a difference, since the statistics are now reacting not + only to state change events but also to state report events (EVENT_STATE_REPORTED). + This means repeating values will be added to the buffer repeatedly in both cases. + This fixes problems with time based averages and some other functions that behave + differently when repeating values are reported. + """ repeating_values = [18, 0, 0, 0, 0, 0, 0, 0, 9] assert await async_setup_component( hass, @@ -291,9 +300,9 @@ async def test_sensor_source_with_force_update(hass: HomeAssistant) -> None: state_normal = hass.states.get("sensor.test_normal") state_force = hass.states.get("sensor.test_force") assert state_normal and state_force - assert state_normal.state == str(round(sum(repeating_values) / 3, 2)) + assert state_normal.state == str(round(sum(repeating_values) / 9, 2)) assert state_force.state == str(round(sum(repeating_values) / 9, 2)) - assert state_normal.attributes.get("buffer_usage_ratio") == round(3 / 20, 2) + assert state_normal.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) assert state_force.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) @@ -566,7 +575,7 @@ async def test_age_limit_expiry(hass: HomeAssistant) -> None: assert state is not None assert state.state == STATE_UNKNOWN assert state.attributes.get("buffer_usage_ratio") == round(0 / 20, 2) - assert state.attributes.get("age_coverage_ratio") is None + assert state.attributes.get("age_coverage_ratio") == 0 async def test_age_limit_expiry_with_keep_last_sample(hass: HomeAssistant) -> None: @@ -1013,7 +1022,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "average_linear", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 6.0, "value_9": 10.68, "unit": "°C", }, @@ -1021,7 +1030,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "average_step", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 6.0, "value_9": 11.36, "unit": "°C", }, @@ -1113,7 +1122,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "distance_95_percent_of_values", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(2 * 1.96 * statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1121,7 +1130,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "distance_99_percent_of_values", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(2 * 2.58 * statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1161,7 +1170,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "noisiness", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(sum([3, 4.8, 10.2, 1.2, 5.4, 2.5, 7.3, 8]) / 8, 2)), "unit": "°C", }, @@ -1169,7 +1178,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "percentile", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 6.0, "value_9": 9.2, "unit": "°C", }, @@ -1177,7 +1186,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "standard_deviation", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1193,7 +1202,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "sum_differences", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float( sum( [ @@ -1214,7 +1223,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "sum_differences_nonnegative", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float( sum( [ @@ -1259,7 +1268,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "variance", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(statistics.variance(VALUES_NUMERIC), 2)), "unit": "°C²", }, @@ -1267,7 +1276,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "binary_sensor", "name": "average_step", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 100.0, "value_9": 50.0, "unit": "%", }, @@ -1701,3 +1710,382 @@ async def test_device_id( statistics_entity = entity_registry.async_get("sensor.statistics") assert statistics_entity is not None assert statistics_entity.device_id == source_entity.device_id + + +async def test_update_before_load(recorder_mock: Recorder, hass: HomeAssistant) -> None: + """Verify that updates happening before reloading from the database are handled correctly.""" + + current_time = dt_util.utcnow() + + # enable and pre-fill the recorder + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + with ( + freeze_time(current_time) as freezer, + ): + for value in VALUES_NUMERIC_LINEAR: + hass.states.async_set( + "sensor.test_monitored", + str(value), + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + await hass.async_block_till_done() + current_time += timedelta(seconds=1) + freezer.move_to(current_time) + + await async_wait_recording_done(hass) + + # some synchronisation is needed to prevent that loading from the database finishes too soon + # we want this to take long enough to be able to try to add a value BEFORE loading is done + state_changes_during_period_called_evt = AsyncioEvent() + state_changes_during_period_stall_evt = Event() + real_state_changes_during_period = history.state_changes_during_period + + def mock_state_changes_during_period(*args, **kwargs): + states = real_state_changes_during_period(*args, **kwargs) + hass.loop.call_soon_threadsafe(state_changes_during_period_called_evt.set) + state_changes_during_period_stall_evt.wait() + return states + + # create the statistics component, get filled from database + with patch( + "homeassistant.components.statistics.sensor.history.state_changes_during_period", + mock_state_changes_during_period, + ): + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "average_step", + "max_age": {"seconds": 10}, + }, + ] + }, + ) + # adding this value is going to be ignored, since loading from the database hasn't finished yet + # if this value would be added before loading from the database is done + # it would mess up the order of the internal queue which is supposed to be sorted by time + await state_changes_during_period_called_evt.wait() + hass.states.async_set( + "sensor.test_monitored", + "10", + {ATTR_UNIT_OF_MEASUREMENT: DEGREE}, + ) + state_changes_during_period_stall_evt.set() + await hass.async_block_till_done() + + # we will end up with a buffer of [1 .. 9] (10 wasn't added) + # so the computed average_step is 1+2+3+4+5+6+7+8/8 = 4.5 + assert float(hass.states.get("sensor.test").state) == pytest.approx(4.5) + + +async def test_average_linear_unevenly_timed(hass: HomeAssistant) -> None: + """Test the average_linear state characteristic with unevenly distributed values. + + This also implicitly tests the correct timing of repeating values. + """ + values_and_times = [[5.0, 2], [10.0, 1], [10.0, 1], [10.0, 2], [5.0, 1]] + + current_time = dt_util.utcnow() + + with ( + freeze_time(current_time) as freezer, + ): + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test_sensor_average_linear", + "entity_id": "sensor.test_monitored", + "state_characteristic": "average_linear", + "max_age": {"seconds": 10}, + }, + ] + }, + ) + await hass.async_block_till_done() + + for value_and_time in values_and_times: + hass.states.async_set( + "sensor.test_monitored", + str(value_and_time[0]), + {ATTR_UNIT_OF_MEASUREMENT: DEGREE}, + ) + current_time += timedelta(seconds=value_and_time[1]) + freezer.move_to(current_time) + + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sensor_average_linear") + assert state is not None + assert state.state == "8.33", ( + "value mismatch for characteristic 'sensor/average_linear' - " + f"assert {state.state} == 8.33" + ) + + +async def test_sensor_unit_gets_removed(hass: HomeAssistant) -> None: + """Test when input lose its unit of measurement.""" + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "sampling_size": 10, + }, + ] + }, + ) + await hass.async_block_till_done() + + input_attributes = { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + } + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + hass.states.async_set( + "sensor.test_monitored", + str(VALUES_NUMERIC[0]), + { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + # Temperature device class is not valid with no unit of measurement + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + +async def test_sensor_device_class_gets_removed(hass: HomeAssistant) -> None: + """Test when device class gets removed.""" + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "sampling_size": 10, + }, + ] + }, + ) + await hass.async_block_till_done() + + input_attributes = { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + } + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + hass.states.async_set( + "sensor.test_monitored", + str(VALUES_NUMERIC[0]), + { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + +async def test_not_valid_device_class(hass: HomeAssistant) -> None: + """Test when not valid device class.""" + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "sampling_size": 10, + }, + ] + }, + ) + await hass.async_block_till_done() + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + { + ATTR_DEVICE_CLASS: SensorDeviceClass.DATE, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + hass.states.async_set( + "sensor.test_monitored", + str(10), + { + ATTR_DEVICE_CLASS: "not_exist", + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "10.69" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + +async def test_attributes_remains(recorder_mock: Recorder, hass: HomeAssistant) -> None: + """Test attributes are always present.""" + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + current_time = dt_util.utcnow() + with freeze_time(current_time) as freezer: + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "max_age": {"seconds": 10}, + }, + ] + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes == { + "age_coverage_ratio": 0.0, + "friendly_name": "test", + "icon": "mdi:calculator", + "source_value_valid": True, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": "°C", + } + + freezer.move_to(current_time + timedelta(minutes=1)) + async_fire_time_changed(hass) + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == STATE_UNKNOWN + assert state.attributes == { + "age_coverage_ratio": 0, + "friendly_name": "test", + "icon": "mdi:calculator", + "source_value_valid": True, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": "°C", + } diff --git a/tests/components/steam_online/test_config_flow.py b/tests/components/steam_online/test_config_flow.py index a5bce80d890..140a8309ff9 100644 --- a/tests/components/steam_online/test_config_flow.py +++ b/tests/components/steam_online/test_config_flow.py @@ -5,8 +5,8 @@ from unittest.mock import patch import steam from homeassistant.components.steam_online.const import CONF_ACCOUNTS, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER -from homeassistant.const import CONF_API_KEY, CONF_SOURCE +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import entity_registry as er @@ -111,18 +111,10 @@ async def test_flow_user_already_configured(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch_interface(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={}, diff --git a/tests/components/stookalert/__init__.py b/tests/components/stookalert/__init__.py deleted file mode 100644 index 3785c76639a..00000000000 --- a/tests/components/stookalert/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Stookalert integration.""" diff --git a/tests/components/stookalert/test_config_flow.py b/tests/components/stookalert/test_config_flow.py deleted file mode 100644 index 3664527cbcf..00000000000 --- a/tests/components/stookalert/test_config_flow.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Tests for the Stookalert config flow.""" - -from unittest.mock import patch - -from homeassistant.components.stookalert.const import CONF_PROVINCE, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_user_flow(hass: HomeAssistant) -> None: - """Test the full user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - - with patch( - "homeassistant.components.stookalert.async_setup_entry", return_value=True - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PROVINCE: "Overijssel", - }, - ) - - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "Overijssel" - assert result2.get("data") == { - CONF_PROVINCE: "Overijssel", - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_already_configured(hass: HomeAssistant) -> None: - """Test we abort if the Stookalert province is already configured.""" - MockConfigEntry( - domain=DOMAIN, data={CONF_PROVINCE: "Overijssel"}, unique_id="Overijssel" - ).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PROVINCE: "Overijssel", - }, - ) - - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "already_configured" diff --git a/tests/components/stookwijzer/conftest.py b/tests/components/stookwijzer/conftest.py new file mode 100644 index 00000000000..3f7303e97f6 --- /dev/null +++ b/tests/components/stookwijzer/conftest.py @@ -0,0 +1,99 @@ +"""Fixtures for Stookwijzer integration tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.stookwijzer.const import DOMAIN +from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="Stookwijzer", + domain=DOMAIN, + data={ + CONF_LATITUDE: 200000.1234567890, + CONF_LONGITUDE: 450000.1234567890, + }, + version=2, + entry_id="12345", + ) + + +@pytest.fixture +def mock_v1_config_entry() -> MockConfigEntry: + """Return the default mocked version 1 config entry.""" + return MockConfigEntry( + title="Stookwijzer", + domain=DOMAIN, + data={ + CONF_LOCATION: { + CONF_LATITUDE: 1.0, + CONF_LONGITUDE: 1.1, + }, + }, + version=1, + entry_id="12345", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.stookwijzer.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +def mock_stookwijzer() -> Generator[MagicMock]: + """Return a mocked Stookwijzer client.""" + with ( + patch( + "homeassistant.components.stookwijzer.Stookwijzer", + autospec=True, + ) as stookwijzer_mock, + patch( + "homeassistant.components.stookwijzer.coordinator.Stookwijzer", + new=stookwijzer_mock, + ), + patch( + "homeassistant.components.stookwijzer.config_flow.Stookwijzer", + new=stookwijzer_mock, + ), + ): + stookwijzer_mock.async_transform_coordinates.return_value = ( + 200000.123456789, + 450000.123456789, + ) + + client = stookwijzer_mock.return_value + client.lki = 2 + client.windspeed_ms = 2.5 + client.windspeed_bft = 2 + client.advice = "code_yellow" + + yield stookwijzer_mock + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> MockConfigEntry: + """Set up the Stookwijzer integration for testing.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/stookwijzer/snapshots/test_diagnostics.ambr b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e2535d54466 --- /dev/null +++ b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr @@ -0,0 +1,8 @@ +# serializer version: 1 +# name: test_get_diagnostics + dict({ + 'advice': 'code_yellow', + 'air_quality_index': 2, + 'windspeed_ms': 2.5, + }) +# --- diff --git a/tests/components/stookwijzer/snapshots/test_sensor.ambr b/tests/components/stookwijzer/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..f6751a84f22 --- /dev/null +++ b/tests/components/stookwijzer/snapshots/test_sensor.ambr @@ -0,0 +1,169 @@ +# serializer version: 1 +# name: test_entities[sensor.stookwijzer_advice_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'code_yellow', + 'code_orange', + 'code_red', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stookwijzer_advice_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Advice code', + 'platform': 'stookwijzer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'advice', + 'unique_id': '12345_advice', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.stookwijzer_advice_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by atlasleefomgeving.nl', + 'device_class': 'enum', + 'friendly_name': 'Stookwijzer Advice code', + 'options': list([ + 'code_yellow', + 'code_orange', + 'code_red', + ]), + }), + 'context': , + 'entity_id': 'sensor.stookwijzer_advice_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'code_yellow', + }) +# --- +# name: test_entities[sensor.stookwijzer_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stookwijzer_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air quality index', + 'platform': 'stookwijzer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345_air_quality_index', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.stookwijzer_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by atlasleefomgeving.nl', + 'device_class': 'aqi', + 'friendly_name': 'Stookwijzer Air quality index', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.stookwijzer_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_entities[sensor.stookwijzer_wind_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stookwijzer_wind_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind speed', + 'platform': 'stookwijzer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345_windspeed', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor.stookwijzer_wind_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by atlasleefomgeving.nl', + 'device_class': 'wind_speed', + 'friendly_name': 'Stookwijzer Wind speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stookwijzer_wind_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- diff --git a/tests/components/stookwijzer/test_config_flow.py b/tests/components/stookwijzer/test_config_flow.py index 732e8abfc98..6dddf83c27a 100644 --- a/tests/components/stookwijzer/test_config_flow.py +++ b/tests/components/stookwijzer/test_config_flow.py @@ -1,6 +1,8 @@ """Tests for the Stookwijzer config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, MagicMock + +import pytest from homeassistant.components.stookwijzer.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -9,35 +11,65 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_full_user_flow(hass: HomeAssistant) -> None: +async def test_full_user_flow( + hass: HomeAssistant, + mock_stookwijzer: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test the full user configuration flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert "flow_id" in result + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - with patch( - "homeassistant.components.stookwijzer.async_setup_entry", return_value=True - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_LOCATION: { - CONF_LATITUDE: 1.0, - CONF_LONGITUDE: 1.1, - } - }, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LOCATION: {CONF_LATITUDE: 1.0, CONF_LONGITUDE: 1.1}}, + ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { - "location": { - "latitude": 1.0, - "longitude": 1.1, - }, + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stookwijzer" + assert result["data"] == { + CONF_LATITUDE: 200000.123456789, + CONF_LONGITUDE: 450000.123456789, } assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_stookwijzer.async_transform_coordinates.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_connection_error( + hass: HomeAssistant, + mock_stookwijzer: MagicMock, +) -> None: + """Test user configuration flow while connection fails.""" + original_return_value = mock_stookwijzer.async_transform_coordinates.return_value + mock_stookwijzer.async_transform_coordinates.return_value = (None, None) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LOCATION: {CONF_LATITUDE: 1.0, CONF_LONGITUDE: 1.1}}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + # Ensure we can continue the flow, when it now works + mock_stookwijzer.async_transform_coordinates.return_value = original_return_value + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LOCATION: {CONF_LATITUDE: 1.0, CONF_LONGITUDE: 1.1}}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/stookwijzer/test_diagnostics.py b/tests/components/stookwijzer/test_diagnostics.py new file mode 100644 index 00000000000..f40165020c1 --- /dev/null +++ b/tests/components/stookwijzer/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Test the Stookwijzer diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_get_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Stookwijzer diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/stookwijzer/test_init.py b/tests/components/stookwijzer/test_init.py new file mode 100644 index 00000000000..0df9b55d1a9 --- /dev/null +++ b/tests/components/stookwijzer/test_init.py @@ -0,0 +1,134 @@ +"""Test the Stookwijzer init.""" + +from unittest.mock import MagicMock + +import pytest + +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.stookwijzer.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> None: + """Test the Stookwijzer configuration entry loading and unloading.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert len(mock_stookwijzer.return_value.async_update.mock_calls) == 1 + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> None: + """Test the Stookwijzer configuration entry loading and unloading.""" + mock_stookwijzer.return_value.advice = None + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert len(mock_stookwijzer.return_value.async_update.mock_calls) == 1 + + +async def test_migrate_entry( + hass: HomeAssistant, + mock_v1_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> None: + """Test successful migration of entry data.""" + assert mock_v1_config_entry.version == 1 + + mock_v1_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_v1_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_v1_config_entry.state is ConfigEntryState.LOADED + assert len(mock_stookwijzer.async_transform_coordinates.mock_calls) == 1 + + assert mock_v1_config_entry.version == 2 + assert mock_v1_config_entry.data == { + CONF_LATITUDE: 200000.123456789, + CONF_LONGITUDE: 450000.123456789, + } + + +async def test_entry_migration_failure( + hass: HomeAssistant, + mock_v1_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test successful migration of entry data.""" + assert mock_v1_config_entry.version == 1 + + # Failed getting the transformed coordinates + mock_stookwijzer.async_transform_coordinates.return_value = (None, None) + + mock_v1_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_v1_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_v1_config_entry.state is ConfigEntryState.MIGRATION_ERROR + assert issue_registry.async_get_issue(DOMAIN, "location_migration_failed") + + assert len(mock_stookwijzer.async_transform_coordinates.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_stookwijzer") +async def test_entity_entry_migration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test successful migration of entry data.""" + entity = entity_registry.async_get_or_create( + suggested_object_id="advice", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=mock_config_entry.entry_id, + config_entry=mock_config_entry, + ) + + assert entity.unique_id == mock_config_entry.entry_id + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, + DOMAIN, + mock_config_entry.entry_id, + ) + is None + ) + + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, + DOMAIN, + f"{mock_config_entry.entry_id}_advice", + ) + == "sensor.advice" + ) diff --git a/tests/components/stookwijzer/test_sensor.py b/tests/components/stookwijzer/test_sensor.py new file mode 100644 index 00000000000..10eeef72d74 --- /dev/null +++ b/tests/components/stookwijzer/test_sensor.py @@ -0,0 +1,20 @@ +"""Tests for the Stookwijzer sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Stookwijzer entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/stt/common.py b/tests/components/stt/common.py index e6c36c5b350..f964fca6b67 100644 --- a/tests/components/stt/common.py +++ b/tests/components/stt/common.py @@ -2,11 +2,22 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from collections.abc import AsyncIterable, Callable, Coroutine from pathlib import Path from typing import Any -from homeassistant.components.stt import Provider +from homeassistant.components.stt import ( + AudioBitRates, + AudioChannels, + AudioCodecs, + AudioFormats, + AudioSampleRates, + Provider, + SpeechMetadata, + SpeechResult, + SpeechResultState, + SpeechToTextEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -14,6 +25,80 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockPlatform, mock_platform +TEST_DOMAIN = "test" + + +class BaseProvider: + """Mock STT provider.""" + + fail_process_audio = False + + def __init__( + self, *, supported_languages: list[str] | None = None, text: str = "test_result" + ) -> None: + """Init test provider.""" + self._supported_languages = supported_languages or ["de", "de-CH", "en"] + self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] + self.received: list[bytes] = [] + self.text = text + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._supported_languages + + @property + def supported_formats(self) -> list[AudioFormats]: + """Return a list of supported formats.""" + return [AudioFormats.WAV, AudioFormats.OGG] + + @property + def supported_codecs(self) -> list[AudioCodecs]: + """Return a list of supported codecs.""" + return [AudioCodecs.PCM, AudioCodecs.OPUS] + + @property + def supported_bit_rates(self) -> list[AudioBitRates]: + """Return a list of supported bitrates.""" + return [AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[AudioSampleRates]: + """Return a list of supported samplerates.""" + return [AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[AudioChannels]: + """Return a list of supported channels.""" + return [AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] + ) -> SpeechResult: + """Process an audio stream.""" + self.calls.append((metadata, stream)) + async for data in stream: + if not data: + break + self.received.append(data) + if self.fail_process_audio: + return SpeechResult(None, SpeechResultState.ERROR) + + return SpeechResult(self.text, SpeechResultState.SUCCESS) + + +class MockSTTProvider(BaseProvider, Provider): + """Mock provider.""" + + url_path = TEST_DOMAIN + + +class MockSTTProviderEntity(BaseProvider, SpeechToTextEntity): + """Mock provider entity.""" + + url_path = "stt.test" + _attr_name = "test" + class MockSTTPlatform(MockPlatform): """Help to set up test stt service.""" diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index e5d75d3c4a5..3d5daab2bec 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,6 +1,6 @@ """Test STT component setup.""" -from collections.abc import AsyncIterable, Generator, Iterable +from collections.abc import Generator, Iterable from contextlib import ExitStack from http import HTTPStatus from pathlib import Path @@ -10,16 +10,6 @@ import pytest from homeassistant.components.stt import ( DOMAIN, - AudioBitRates, - AudioChannels, - AudioCodecs, - AudioFormats, - AudioSampleRates, - Provider, - SpeechMetadata, - SpeechResult, - SpeechResultState, - SpeechToTextEntity, async_default_engine, async_get_provider, async_get_speech_to_text_engine, @@ -29,7 +19,13 @@ from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from .common import mock_stt_entity_platform, mock_stt_platform +from .common import ( + TEST_DOMAIN, + MockSTTProvider, + MockSTTProviderEntity, + mock_stt_entity_platform, + mock_stt_platform, +) from tests.common import ( MockConfigEntry, @@ -38,85 +34,21 @@ from tests.common import ( mock_integration, mock_platform, mock_restore_cache, + reset_translation_cache, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -TEST_DOMAIN = "test" - - -class BaseProvider: - """Mock provider.""" - - fail_process_audio = False - - def __init__(self) -> None: - """Init test provider.""" - self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return ["de", "de-CH", "en"] - - @property - def supported_formats(self) -> list[AudioFormats]: - """Return a list of supported formats.""" - return [AudioFormats.WAV, AudioFormats.OGG] - - @property - def supported_codecs(self) -> list[AudioCodecs]: - """Return a list of supported codecs.""" - return [AudioCodecs.PCM, AudioCodecs.OPUS] - - @property - def supported_bit_rates(self) -> list[AudioBitRates]: - """Return a list of supported bitrates.""" - return [AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[AudioSampleRates]: - """Return a list of supported samplerates.""" - return [AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[AudioChannels]: - """Return a list of supported channels.""" - return [AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] - ) -> SpeechResult: - """Process an audio stream.""" - self.calls.append((metadata, stream)) - if self.fail_process_audio: - return SpeechResult(None, SpeechResultState.ERROR) - - return SpeechResult("test_result", SpeechResultState.SUCCESS) - - -class MockProvider(BaseProvider, Provider): - """Mock provider.""" - - url_path = TEST_DOMAIN - - -class MockProviderEntity(BaseProvider, SpeechToTextEntity): - """Mock provider entity.""" - - url_path = "stt.test" - _attr_name = "test" - @pytest.fixture -def mock_provider() -> MockProvider: +def mock_provider() -> MockSTTProvider: """Test provider fixture.""" - return MockProvider() + return MockSTTProvider() @pytest.fixture -def mock_provider_entity() -> MockProviderEntity: +def mock_provider_entity() -> MockSTTProviderEntity: """Test provider entity fixture.""" - return MockProviderEntity() + return MockSTTProviderEntity() class STTFlow(ConfigFlow): @@ -148,14 +80,14 @@ async def setup_fixture( hass: HomeAssistant, tmp_path: Path, request: pytest.FixtureRequest, -) -> MockProvider | MockProviderEntity: +) -> MockSTTProvider | MockSTTProviderEntity: """Set up the test environment.""" - provider: MockProvider | MockProviderEntity + provider: MockSTTProvider | MockSTTProviderEntity if request.param == "mock_setup": - provider = MockProvider() + provider = MockSTTProvider() await mock_setup(hass, tmp_path, provider) elif request.param == "mock_config_entry_setup": - provider = MockProviderEntity() + provider = MockSTTProviderEntity() await mock_config_entry_setup(hass, tmp_path, provider) else: raise RuntimeError("Invalid setup fixture") @@ -166,7 +98,7 @@ async def setup_fixture( async def mock_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Set up a test provider.""" mock_stt_platform( @@ -182,7 +114,7 @@ async def mock_setup( async def mock_config_entry_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, + mock_provider_entity: MockSTTProviderEntity, test_domain: str = TEST_DOMAIN, ) -> MockConfigEntry: """Set up a test provider via config entry.""" @@ -234,7 +166,7 @@ async def mock_config_entry_setup( async def test_get_provider_info( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test engine that doesn't exist.""" client = await hass_client() @@ -256,7 +188,7 @@ async def test_get_provider_info( async def test_non_existing_provider( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test streaming to engine that doesn't exist.""" client = await hass_client() @@ -282,7 +214,7 @@ async def test_non_existing_provider( async def test_stream_audio( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test streaming audio and getting response.""" client = await hass_client() @@ -343,7 +275,7 @@ async def test_metadata_errors( header: str | None, status: int, error: str, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test metadata errors.""" client = await hass_client() @@ -359,7 +291,7 @@ async def test_metadata_errors( async def test_get_provider( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Test we can get STT providers.""" await mock_setup(hass, tmp_path, mock_provider) @@ -370,7 +302,7 @@ async def test_get_provider( async def test_config_entry_unload( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test we can unload config entry.""" config_entry = await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -382,7 +314,7 @@ async def test_config_entry_unload( async def test_restore_state( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, + mock_provider_entity: MockSTTProviderEntity, ) -> None: """Test we restore state in the integration.""" entity_id = f"{DOMAIN}.{TEST_DOMAIN}" @@ -399,15 +331,19 @@ async def test_restore_state( @pytest.mark.parametrize( - ("setup", "engine_id"), - [("mock_setup", "test"), ("mock_config_entry_setup", "stt.test")], + ("setup", "engine_id", "extra_data"), + [ + ("mock_setup", "test", {"name": "test"}), + ("mock_config_entry_setup", "stt.test", {}), + ], indirect=["setup"], ) async def test_ws_list_engines( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, engine_id: str, + extra_data: dict[str, str], ) -> None: """Test listing speech-to-text engines.""" client = await hass_ws_client() @@ -419,6 +355,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de", "de-CH", "en"]} + | extra_data ] } @@ -427,7 +364,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []}] + "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en"}) @@ -435,7 +372,9 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data + ] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en-UK"}) @@ -443,7 +382,9 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data + ] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "de"}) @@ -451,7 +392,10 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["de", "de-CH"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["de", "de-CH"]} + | extra_data + ] } await client.send_json_auto_id( @@ -461,7 +405,10 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["de-CH", "de"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["de-CH", "de"]} + | extra_data + ] } @@ -476,7 +423,7 @@ async def test_default_engine_none(hass: HomeAssistant, tmp_path: Path) -> None: async def test_default_engine( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Test async_default_engine.""" mock_stt_platform( @@ -492,7 +439,7 @@ async def test_default_engine( async def test_default_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test async_default_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -504,8 +451,8 @@ async def test_default_engine_entity( async def test_default_engine_prefer_entity( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, - mock_provider: MockProvider, + mock_provider_entity: MockSTTProviderEntity, + mock_provider: MockSTTProvider, config_flow_test_domains: str, ) -> None: """Test async_default_engine. @@ -543,7 +490,7 @@ async def test_default_engine_prefer_entity( async def test_default_engine_prefer_cloud_entity( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, config_flow_test_domains: str, ) -> None: """Test async_default_engine. @@ -554,7 +501,7 @@ async def test_default_engine_prefer_cloud_entity( """ await mock_setup(hass, tmp_path, mock_provider) for domain in config_flow_test_domains: - entity = MockProviderEntity() + entity = MockSTTProviderEntity() entity.url_path = f"stt.{domain}" entity._attr_name = f"{domain} STT entity" await mock_config_entry_setup(hass, tmp_path, entity, test_domain=domain) @@ -572,9 +519,12 @@ async def test_default_engine_prefer_cloud_entity( assert provider_engine.name == "test" assert async_default_engine(hass) == "stt.cloud_stt_entity" + # Reset the `cloud` translations cache to avoid flaky translation checks + reset_translation_cache(hass, ["cloud"]) + async def test_get_engine_legacy( - hass: HomeAssistant, tmp_path: Path, mock_provider: MockProvider + hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider ) -> None: """Test async_get_speech_to_text_engine.""" mock_stt_platform( @@ -599,7 +549,7 @@ async def test_get_engine_legacy( async def test_get_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test async_get_speech_to_text_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 6abc544c92a..0b45546902b 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -136,6 +136,7 @@ async def test_user_form_pin_not_required( "data": deepcopy(TEST_CONFIG), "options": {}, "minor_version": 1, + "subentries": (), } expected["data"][CONF_PIN] = None @@ -341,6 +342,7 @@ async def test_pin_form_success(hass: HomeAssistant, pin_form) -> None: "data": TEST_CONFIG, "options": {}, "minor_version": 1, + "subentries": (), } result["data"][CONF_DEVICE_ID] = TEST_DEVICE_ID assert result == expected diff --git a/tests/components/suez_water/__init__.py b/tests/components/suez_water/__init__.py index 4605e06344a..a90df738454 100644 --- a/tests/components/suez_water/__init__.py +++ b/tests/components/suez_water/__init__.py @@ -1 +1,15 @@ """Tests for the Suez Water integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Init suez water integration.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f218fb7d833..f634a053c65 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -3,8 +3,31 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from pysuez import AggregatedData, PriceResult +from pysuez.const import ATTRIBUTION import pytest +from homeassistant.components.suez_water.const import DOMAIN + +from tests.common import MockConfigEntry + +MOCK_DATA = { + "username": "test-username", + "password": "test-password", + "counter_id": "test-counter", +} + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create mock config_entry needed by suez_water integration.""" + return MockConfigEntry( + unique_id=MOCK_DATA["username"], + domain=DOMAIN, + title="Suez mock device", + data=MOCK_DATA, + ) + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -13,3 +36,45 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.suez_water.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture(name="suez_client") +def mock_suez_client() -> Generator[AsyncMock]: + """Create mock for suez_water external api.""" + with ( + patch( + "homeassistant.components.suez_water.coordinator.SuezClient", autospec=True + ) as mock_client, + patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + new=mock_client, + ), + ): + suez_client = mock_client.return_value + suez_client.check_credentials.return_value = True + + result = AggregatedData( + value=160, + current_month={ + "2024-01-01": 130, + "2024-01-02": 145, + }, + previous_month={ + "2024-12-01": 154, + "2024-12-02": 166, + }, + current_year=1500, + previous_year=1000, + attribution=ATTRIBUTION, + highest_monthly_consumption=2558, + history={ + "2024-01-01": 130, + "2024-01-02": 145, + "2024-12-01": 154, + "2024-12-02": 166, + }, + ) + + suez_client.fetch_aggregated_data.return_value = result + suez_client.get_price.return_value = PriceResult("4.74") + yield suez_client diff --git a/tests/components/suez_water/snapshots/test_sensor.ambr b/tests/components/suez_water/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..da0ed3df7dd --- /dev/null +++ b/tests/components/suez_water/snapshots/test_sensor.ambr @@ -0,0 +1,116 @@ +# serializer version: 1 +# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.suez_mock_device_water_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water price', + 'platform': 'suez_water', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_price', + 'unique_id': 'test-counter_water_price', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by toutsurmoneau.fr', + 'device_class': 'monetary', + 'friendly_name': 'Suez mock device Water price', + 'unit_of_measurement': '€', + }), + 'context': , + 'entity_id': 'sensor.suez_mock_device_water_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.74', + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water usage yesterday', + 'platform': 'suez_water', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_usage_yesterday', + 'unique_id': 'test-counter_water_usage_yesterday', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by toutsurmoneau.fr', + 'device_class': 'water', + 'friendly_name': 'Suez mock device Water usage yesterday', + 'highest_monthly_consumption': 2558, + 'history': dict({ + '2024-01-01': 130, + '2024-01-02': 145, + '2024-12-01': 154, + '2024-12-02': 166, + }), + 'last_year_overall': 1000, + 'previous_month_consumption': dict({ + '2024-12-01': 154, + '2024-12-02': 166, + }), + 'this_month_consumption': dict({ + '2024-01-01': 130, + '2024-01-02': 145, + }), + 'this_year_overall': 1500, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '160', + }) +# --- diff --git a/tests/components/suez_water/test_config_flow.py b/tests/components/suez_water/test_config_flow.py index 3170a6779f0..6779b4c7d02 100644 --- a/tests/components/suez_water/test_config_flow.py +++ b/tests/components/suez_water/test_config_flow.py @@ -1,25 +1,23 @@ """Test the Suez Water config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock -from pysuez.client import PySuezError +from pysuez.exception import PySuezError import pytest from homeassistant import config_entries -from homeassistant.components.suez_water.const import DOMAIN +from homeassistant.components.suez_water.const import CONF_COUNTER_ID, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import MOCK_DATA + from tests.common import MockConfigEntry -MOCK_DATA = { - "username": "test-username", - "password": "test-password", - "counter_id": "test-counter", -} - -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -27,12 +25,11 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("homeassistant.components.suez_water.config_flow.SuezClient"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -42,37 +39,28 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: async def test_form_invalid_auth( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock ) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.suez_water.config_flow.SuezClient.__init__", - return_value=None, - ), - patch( - "homeassistant.components.suez_water.config_flow.SuezClient.check_credentials", - return_value=False, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - with patch("homeassistant.components.suez_water.config_flow.SuezClient"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + suez_client.check_credentials.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -108,34 +96,71 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: ("exception", "error"), [(PySuezError, "cannot_connect"), (Exception, "unknown")] ) async def test_form_error( - hass: HomeAssistant, mock_setup_entry: AsyncMock, exception: Exception, error: str + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + exception: Exception, + suez_client: AsyncMock, + error: str, ) -> None: """Test we handle errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - with patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.return_value = True + suez_client.check_credentials.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" assert result["data"] == MOCK_DATA assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_auto_counter( + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock +) -> None: + """Test form set counter if not set by user.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + partial_form = {**MOCK_DATA} + partial_form.pop(CONF_COUNTER_ID) + suez_client.find_counter.side_effect = PySuezError("test counter not found") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + partial_form, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "counter_not_found"} + + suez_client.find_counter.side_effect = None + suez_client.find_counter.return_value = MOCK_DATA[CONF_COUNTER_ID] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + partial_form, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["result"].unique_id == "test-username" + assert result["data"] == MOCK_DATA + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/suez_water/test_init.py b/tests/components/suez_water/test_init.py new file mode 100644 index 00000000000..78d086af38f --- /dev/null +++ b/tests/components/suez_water/test_init.py @@ -0,0 +1,37 @@ +"""Test Suez_water integration initialization.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.suez_water.coordinator import PySuezError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_initialization_invalid_credentials( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that suez_water can't be loaded with invalid credentials.""" + + suez_client.check_credentials.return_value = False + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_initialization_setup_api_error( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that suez_water needs to retry loading if api failed to connect.""" + + suez_client.check_credentials.side_effect = PySuezError("Test failure") + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py new file mode 100644 index 00000000000..cb578432f62 --- /dev/null +++ b/tests/components/suez_water/test_sensor.py @@ -0,0 +1,67 @@ +"""Test Suez_water sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.suez_water.const import DATA_REFRESH_INTERVAL +from homeassistant.components.suez_water.coordinator import PySuezError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors_valid_state( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test that suez_water sensor is loaded and in a valid state.""" + with patch("homeassistant.components.suez_water.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) +async def test_sensors_failed_update( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + method: str, +) -> None: + """Test that suez_water sensor reflect failure when api fails.""" + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + entity_ids = await hass.async_add_executor_job(hass.states.entity_ids) + assert len(entity_ids) == 2 + + for entity in entity_ids: + state = hass.states.get(entity) + assert entity + assert state.state != STATE_UNAVAILABLE + + getattr(suez_client, method).side_effect = PySuezError("Should fail to update") + + freezer.tick(DATA_REFRESH_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(True) + + for entity in entity_ids: + state = hass.states.get(entity) + assert entity + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/sunweg/test_config_flow.py b/tests/components/sunweg/test_config_flow.py index 80b6a946749..8103003d7fb 100644 --- a/tests/components/sunweg/test_config_flow.py +++ b/tests/components/sunweg/test_config_flow.py @@ -69,14 +69,7 @@ async def test_reauth(hass: HomeAssistant, plant_fixture, inverter_fixture) -> N assert entries[0].data[CONF_USERNAME] == SUNWEG_MOCK_ENTRY.data[CONF_USERNAME] assert entries[0].data[CONF_PASSWORD] == SUNWEG_MOCK_ENTRY.data[CONF_PASSWORD] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sunweg/test_init.py b/tests/components/sunweg/test_init.py index 41edda38a5a..6cbe38a128b 100644 --- a/tests/components/sunweg/test_init.py +++ b/tests/components/sunweg/test_init.py @@ -7,7 +7,7 @@ from sunweg.api import APIHelper, SunWegApiError from homeassistant.components.sunweg import SunWEGData from homeassistant.components.sunweg.const import DOMAIN, DeviceType -from homeassistant.components.sunweg.sensor_types.sensor_entity_description import ( +from homeassistant.components.sunweg.sensor.sensor_entity_description import ( SunWEGSensorEntityDescription, ) from homeassistant.config_entries import ConfigEntryState diff --git a/tests/components/surepetcare/test_config_flow.py b/tests/components/surepetcare/test_config_flow.py index c3c13195aca..1140a2c54ef 100644 --- a/tests/components/surepetcare/test_config_flow.py +++ b/tests/components/surepetcare/test_config_flow.py @@ -6,6 +6,7 @@ from surepy.exceptions import SurePetcareAuthenticationError, SurePetcareError from homeassistant import config_entries from homeassistant.components.surepetcare.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -24,7 +25,7 @@ async def test_form(hass: HomeAssistant, surepetcare: NonCallableMagicMock) -> N DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["errors"] is None + assert not result["errors"] with patch( "homeassistant.components.surepetcare.async_setup_entry", @@ -146,42 +147,44 @@ async def test_flow_entry_already_exists( assert result["reason"] == "already_configured" -async def test_reauthentication(hass: HomeAssistant) -> None: +async def test_reauthentication( + hass: HomeAssistant, surepetcare: NonCallableMagicMock +) -> None: """Test surepetcare reauthentication.""" old_entry = MockConfigEntry( domain="surepetcare", - data=INPUT_DATA, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: "token", + }, unique_id="test-username", ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} assert result["step_id"] == "reauth_confirm" - with patch( - "homeassistant.components.surepetcare.config_flow.surepy.client.SureAPIClient.get_token", - return_value={"token": "token"}, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"password": "test-password"}, - ) - await hass.async_block_till_done() + surepetcare.get_token.return_value = "token2" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "test-password2"}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" + assert old_entry.data == { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password2", + CONF_TOKEN: "token2", + } + async def test_reauthentication_failure(hass: HomeAssistant) -> None: """Test surepetcare reauthentication failure.""" @@ -192,15 +195,7 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -230,15 +225,7 @@ async def test_reauthentication_cannot_connect(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -268,15 +255,7 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json index 4edead56f14..7e61206c366 100644 --- a/tests/components/swiss_public_transport/fixtures/connections.json +++ b/tests/components/swiss_public_transport/fixtures/connections.json @@ -5,7 +5,8 @@ "platform": 0, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:04:00+0100", @@ -13,7 +14,8 @@ "platform": 1, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": null }, { "departure": "2024-01-06T18:05:00+0100", @@ -21,7 +23,8 @@ "platform": 2, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:06:00+0100", @@ -29,7 +32,8 @@ "platform": 3, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:07:00+0100", @@ -37,7 +41,8 @@ "platform": 4, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:08:00+0100", @@ -45,7 +50,8 @@ "platform": 5, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:09:00+0100", @@ -53,7 +59,8 @@ "platform": 6, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:10:00+0100", @@ -61,7 +68,8 @@ "platform": 7, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:11:00+0100", @@ -69,7 +77,8 @@ "platform": 8, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:12:00+0100", @@ -77,7 +86,8 @@ "platform": 9, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:13:00+0100", @@ -85,15 +95,17 @@ "platform": 10, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { - "departure": "2024-01-06T18:14:00+0100", + "departure": "invalid", "number": 11, "platform": 11, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:15:00+0100", @@ -101,7 +113,8 @@ "platform": 12, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:16:00+0100", @@ -109,7 +122,8 @@ "platform": 13, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:17:00+0100", @@ -117,7 +131,8 @@ "platform": 14, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" }, { "departure": "2024-01-06T18:18:00+0100", @@ -125,6 +140,7 @@ "platform": 15, "transfers": 0, "duration": "10", - "delay": 0 + "delay": 0, + "line": "T10" } ] diff --git a/tests/components/swiss_public_transport/test_config_flow.py b/tests/components/swiss_public_transport/test_config_flow.py index 027336e28a6..7c17b0d4c30 100644 --- a/tests/components/swiss_public_transport/test_config_flow.py +++ b/tests/components/swiss_public_transport/test_config_flow.py @@ -12,6 +12,10 @@ from homeassistant.components.swiss_public_transport import config_flow from homeassistant.components.swiss_public_transport.const import ( CONF_DESTINATION, CONF_START, + CONF_TIME_FIXED, + CONF_TIME_MODE, + CONF_TIME_OFFSET, + CONF_TIME_STATION, CONF_VIA, MAX_VIA, ) @@ -23,40 +27,86 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -MOCK_DATA_STEP = { +MOCK_USER_DATA_STEP = { CONF_START: "test_start", CONF_DESTINATION: "test_destination", + CONF_TIME_STATION: "departure", + CONF_TIME_MODE: "now", } -MOCK_DATA_STEP_ONE_VIA = { - **MOCK_DATA_STEP, +MOCK_USER_DATA_STEP_ONE_VIA = { + **MOCK_USER_DATA_STEP, CONF_VIA: ["via_station"], } -MOCK_DATA_STEP_MANY_VIA = { - **MOCK_DATA_STEP, +MOCK_USER_DATA_STEP_MANY_VIA = { + **MOCK_USER_DATA_STEP, CONF_VIA: ["via_station_1", "via_station_2", "via_station_3"], } -MOCK_DATA_STEP_TOO_MANY_STATIONS = { - **MOCK_DATA_STEP, - CONF_VIA: MOCK_DATA_STEP_ONE_VIA[CONF_VIA] * (MAX_VIA + 1), +MOCK_USER_DATA_STEP_TOO_MANY_STATIONS = { + **MOCK_USER_DATA_STEP, + CONF_VIA: MOCK_USER_DATA_STEP_ONE_VIA[CONF_VIA] * (MAX_VIA + 1), +} + +MOCK_USER_DATA_STEP_ARRIVAL = { + **MOCK_USER_DATA_STEP, + CONF_TIME_STATION: "arrival", +} + +MOCK_USER_DATA_STEP_TIME_FIXED = { + **MOCK_USER_DATA_STEP, + CONF_TIME_MODE: "fixed", +} + +MOCK_USER_DATA_STEP_TIME_FIXED_OFFSET = { + **MOCK_USER_DATA_STEP, + CONF_TIME_MODE: "offset", +} + +MOCK_USER_DATA_STEP_BAD = { + **MOCK_USER_DATA_STEP, + CONF_TIME_MODE: "bad", +} + +MOCK_ADVANCED_DATA_STEP_TIME = { + CONF_TIME_FIXED: "18:03:00", +} + +MOCK_ADVANCED_DATA_STEP_TIME_OFFSET = { + CONF_TIME_OFFSET: {"hours": 0, "minutes": 10, "seconds": 0}, } @pytest.mark.parametrize( - ("user_input", "config_title"), + ("user_input", "time_mode_input", "config_title"), [ - (MOCK_DATA_STEP, "test_start test_destination"), - (MOCK_DATA_STEP_ONE_VIA, "test_start test_destination via via_station"), + (MOCK_USER_DATA_STEP, None, "test_start test_destination"), ( - MOCK_DATA_STEP_MANY_VIA, + MOCK_USER_DATA_STEP_ONE_VIA, + None, + "test_start test_destination via via_station", + ), + ( + MOCK_USER_DATA_STEP_MANY_VIA, + None, "test_start test_destination via via_station_1, via_station_2, via_station_3", ), + (MOCK_USER_DATA_STEP_ARRIVAL, None, "test_start test_destination arrival"), + ( + MOCK_USER_DATA_STEP_TIME_FIXED, + MOCK_ADVANCED_DATA_STEP_TIME, + "test_start test_destination at 18:03:00", + ), + ( + MOCK_USER_DATA_STEP_TIME_FIXED_OFFSET, + MOCK_ADVANCED_DATA_STEP_TIME_OFFSET, + "test_start test_destination in 00:10:00", + ), ], ) async def test_flow_user_init_data_success( - hass: HomeAssistant, user_input, config_title + hass: HomeAssistant, user_input, time_mode_input, config_title ) -> None: """Test success response.""" result = await hass.config_entries.flow.async_init( @@ -66,48 +116,56 @@ async def test_flow_user_init_data_success( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["handler"] == "swiss_public_transport" - assert result["data_schema"] == config_flow.DATA_SCHEMA + assert result["data_schema"] == config_flow.USER_DATA_SCHEMA with patch( "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", autospec=True, return_value=True, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, context={"source": "user"} - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, ) + if time_mode_input: + assert result["type"] == FlowResultType.FORM + if CONF_TIME_FIXED in time_mode_input: + assert result["step_id"] == "time_fixed" + if CONF_TIME_OFFSET in time_mode_input: + assert result["step_id"] == "time_offset" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=time_mode_input, + ) + assert result["type"] == FlowResultType.CREATE_ENTRY assert result["result"].title == config_title - assert result["data"] == user_input + assert result["data"] == {**user_input, **(time_mode_input or {})} @pytest.mark.parametrize( ("raise_error", "text_error", "user_input_error"), [ - (OpendataTransportConnectionError(), "cannot_connect", MOCK_DATA_STEP), - (OpendataTransportError(), "bad_config", MOCK_DATA_STEP), - (None, "too_many_via_stations", MOCK_DATA_STEP_TOO_MANY_STATIONS), - (IndexError(), "unknown", MOCK_DATA_STEP), + (OpendataTransportConnectionError(), "cannot_connect", MOCK_USER_DATA_STEP), + (OpendataTransportError(), "bad_config", MOCK_USER_DATA_STEP), + (None, "too_many_via_stations", MOCK_USER_DATA_STEP_TOO_MANY_STATIONS), + (IndexError(), "unknown", MOCK_USER_DATA_STEP), ], ) -async def test_flow_user_init_data_error_and_recover( +async def test_flow_user_init_data_error_and_recover_on_step_1( hass: HomeAssistant, raise_error, text_error, user_input_error ) -> None: - """Test unknown errors.""" + """Test errors in user step.""" + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, context={"source": "user"} + ) with patch( "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", autospec=True, side_effect=raise_error, ) as mock_OpendataTransport: - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, context={"source": "user"} - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input_error, @@ -121,13 +179,75 @@ async def test_flow_user_init_data_error_and_recover( mock_OpendataTransport.return_value = True result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_STEP, + user_input=MOCK_USER_DATA_STEP, ) assert result["type"] == FlowResultType.CREATE_ENTRY assert result["result"].title == "test_start test_destination" - assert result["data"] == MOCK_DATA_STEP + assert result["data"] == MOCK_USER_DATA_STEP + + +@pytest.mark.parametrize( + ("raise_error", "text_error", "user_input"), + [ + ( + OpendataTransportConnectionError(), + "cannot_connect", + MOCK_ADVANCED_DATA_STEP_TIME, + ), + (OpendataTransportError(), "bad_config", MOCK_ADVANCED_DATA_STEP_TIME), + (IndexError(), "unknown", MOCK_ADVANCED_DATA_STEP_TIME), + ], +) +async def test_flow_user_init_data_error_and_recover_on_step_2( + hass: HomeAssistant, raise_error, text_error, user_input +) -> None: + """Test errors in time mode step.""" + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, context={"source": "user"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "swiss_public_transport" + assert result["data_schema"] == config_flow.USER_DATA_SCHEMA + + with patch( + "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", + autospec=True, + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_USER_DATA_STEP_TIME_FIXED, + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "time_fixed" + + with patch( + "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", + autospec=True, + side_effect=raise_error, + ) as mock_OpendataTransport: + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_OpendataTransport.side_effect = None + mock_OpendataTransport.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["result"].title == "test_start test_destination at 18:03:00" async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> None: @@ -135,8 +255,8 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No entry = MockConfigEntry( domain=config_flow.DOMAIN, - data=MOCK_DATA_STEP, - unique_id=unique_id_from_config(MOCK_DATA_STEP), + data=MOCK_USER_DATA_STEP, + unique_id=unique_id_from_config(MOCK_USER_DATA_STEP), ) entry.add_to_hass(hass) @@ -151,7 +271,7 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_STEP, + user_input=MOCK_USER_DATA_STEP, ) assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/swiss_public_transport/test_init.py b/tests/components/swiss_public_transport/test_init.py index 7ee8b696499..963f5e6fa40 100644 --- a/tests/components/swiss_public_transport/test_init.py +++ b/tests/components/swiss_public_transport/test_init.py @@ -7,6 +7,9 @@ import pytest from homeassistant.components.swiss_public_transport.const import ( CONF_DESTINATION, CONF_START, + CONF_TIME_FIXED, + CONF_TIME_OFFSET, + CONF_TIME_STATION, CONF_VIA, DOMAIN, ) @@ -28,6 +31,17 @@ MOCK_DATA_STEP_VIA = { CONF_VIA: ["via_station"], } +MOCK_DATA_STEP_TIME_FIXED = { + **MOCK_DATA_STEP_VIA, + CONF_TIME_FIXED: "18:03:00", +} + +MOCK_DATA_STEP_TIME_OFFSET = { + **MOCK_DATA_STEP_VIA, + CONF_TIME_OFFSET: {"hours": 0, "minutes": 10, "seconds": 0}, + CONF_TIME_STATION: "arrival", +} + CONNECTIONS = [ { "departure": "2024-01-06T18:03:00+0100", @@ -36,6 +50,7 @@ CONNECTIONS = [ "transfers": 0, "duration": "10", "delay": 0, + "line": "T10", }, { "departure": "2024-01-06T18:04:00+0100", @@ -44,6 +59,7 @@ CONNECTIONS = [ "transfers": 0, "duration": "10", "delay": 0, + "line": "T10", }, { "departure": "2024-01-06T18:05:00+0100", @@ -52,6 +68,7 @@ CONNECTIONS = [ "transfers": 0, "duration": "10", "delay": 0, + "line": "T10", }, ] @@ -67,6 +84,8 @@ CONNECTIONS = [ (1, 1, MOCK_DATA_STEP_BASE, "None_departure"), (1, 2, MOCK_DATA_STEP_BASE, None), (2, 1, MOCK_DATA_STEP_VIA, None), + (3, 1, MOCK_DATA_STEP_TIME_FIXED, None), + (3, 1, MOCK_DATA_STEP_TIME_OFFSET, None), ], ) async def test_migration_from( @@ -110,7 +129,7 @@ async def test_migration_from( ) # Check change in config entry and verify most recent version - assert config_entry.version == 2 + assert config_entry.version == 3 assert config_entry.minor_version == 1 assert config_entry.unique_id == unique_id @@ -127,7 +146,7 @@ async def test_migrate_error_from_future(hass: HomeAssistant) -> None: mock_entry = MockConfigEntry( domain=DOMAIN, - version=3, + version=4, minor_version=1, unique_id="some_crazy_future_unique_id", data=MOCK_DATA_STEP_BASE, diff --git a/tests/components/switch/test_init.py b/tests/components/switch/test_init.py index 989b10c11d6..f52c455dabd 100644 --- a/tests/components/switch/test_init.py +++ b/tests/components/switch/test_init.py @@ -11,12 +11,7 @@ from homeassistant.setup import async_setup_component from . import common from .common import MockSwitch -from tests.common import ( - MockUser, - help_test_all, - import_and_test_deprecated_constant_enum, - setup_test_component_platform, -) +from tests.common import MockUser, setup_test_component_platform @pytest.fixture(autouse=True) @@ -87,19 +82,3 @@ async def test_switch_context( assert state2 is not None assert state.state != state2.state assert state2.context.user_id == hass_admin_user.id - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(switch) - - -@pytest.mark.parametrize(("enum"), list(switch.SwitchDeviceClass)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: switch.SwitchDeviceClass, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, switch, enum, "DEVICE_CLASS_", "2025.1" - ) diff --git a/tests/components/switch_as_x/__init__.py b/tests/components/switch_as_x/__init__.py index de6f1bac790..2addb832462 100644 --- a/tests/components/switch_as_x/__init__.py +++ b/tests/components/switch_as_x/__init__.py @@ -1,14 +1,7 @@ """The tests for Switch as X platforms.""" -from homeassistant.const import ( - STATE_CLOSED, - STATE_LOCKED, - STATE_OFF, - STATE_ON, - STATE_OPEN, - STATE_UNLOCKED, - Platform, -) +from homeassistant.components.lock import LockState +from homeassistant.const import STATE_CLOSED, STATE_OFF, STATE_ON, STATE_OPEN, Platform PLATFORMS_TO_TEST = ( Platform.COVER, @@ -24,7 +17,7 @@ STATE_MAP = { Platform.COVER: {STATE_ON: STATE_OPEN, STATE_OFF: STATE_CLOSED}, Platform.FAN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.LIGHT: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, - Platform.LOCK: {STATE_ON: STATE_UNLOCKED, STATE_OFF: STATE_LOCKED}, + Platform.LOCK: {STATE_ON: LockState.UNLOCKED, STATE_OFF: LockState.LOCKED}, Platform.SIREN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.VALVE: {STATE_ON: STATE_OPEN, STATE_OFF: STATE_CLOSED}, }, @@ -32,7 +25,7 @@ STATE_MAP = { Platform.COVER: {STATE_ON: STATE_CLOSED, STATE_OFF: STATE_OPEN}, Platform.FAN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.LIGHT: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, - Platform.LOCK: {STATE_ON: STATE_LOCKED, STATE_OFF: STATE_UNLOCKED}, + Platform.LOCK: {STATE_ON: LockState.LOCKED, STATE_OFF: LockState.UNLOCKED}, Platform.SIREN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.VALVE: {STATE_ON: STATE_CLOSED, STATE_OFF: STATE_OPEN}, }, diff --git a/tests/components/switch_as_x/test_cover.py b/tests/components/switch_as_x/test_cover.py index 78a76c20beb..acb382a635a 100644 --- a/tests/components/switch_as_x/test_cover.py +++ b/tests/components/switch_as_x/test_cover.py @@ -1,6 +1,6 @@ """Tests for the Switch as X Cover platform.""" -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( @@ -15,10 +15,8 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_CLOSED, STATE_OFF, STATE_ON, - STATE_OPEN, Platform, ) from homeassistant.core import HomeAssistant @@ -71,7 +69,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -81,7 +79,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -91,7 +89,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -101,7 +99,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -111,7 +109,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -121,7 +119,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -131,7 +129,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -154,7 +152,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -164,7 +162,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -174,7 +172,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -184,7 +182,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -194,7 +192,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -204,7 +202,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -214,4 +212,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED diff --git a/tests/components/switch_as_x/test_init.py b/tests/components/switch_as_x/test_init.py index e250cacb7ac..cd80fab69bc 100644 --- a/tests/components/switch_as_x/test_init.py +++ b/tests/components/switch_as_x/test_init.py @@ -7,6 +7,7 @@ from unittest.mock import patch import pytest from homeassistant.components.homeassistant import exposed_entities +from homeassistant.components.lock import LockState from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( CONF_INVERT, @@ -17,11 +18,9 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_ENTITY_ID, STATE_CLOSED, - STATE_LOCKED, STATE_OFF, STATE_ON, STATE_OPEN, - STATE_UNLOCKED, EntityCategory, Platform, ) @@ -74,7 +73,7 @@ async def test_config_entry_unregistered_uuid( (Platform.COVER, STATE_OPEN, STATE_CLOSED), (Platform.FAN, STATE_ON, STATE_OFF), (Platform.LIGHT, STATE_ON, STATE_OFF), - (Platform.LOCK, STATE_UNLOCKED, STATE_LOCKED), + (Platform.LOCK, LockState.UNLOCKED, LockState.LOCKED), (Platform.SIREN, STATE_ON, STATE_OFF), (Platform.VALVE, STATE_OPEN, STATE_CLOSED), ], diff --git a/tests/components/switch_as_x/test_light.py b/tests/components/switch_as_x/test_light.py index 5e48b7db965..5f724a2d7e7 100644 --- a/tests/components/switch_as_x/test_light.py +++ b/tests/components/switch_as_x/test_light.py @@ -3,7 +3,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, @@ -57,7 +57,7 @@ async def test_default_state(hass: HomeAssistant) -> None: assert state.attributes["supported_features"] == 0 assert state.attributes.get(ATTR_BRIGHTNESS) is None assert state.attributes.get(ATTR_HS_COLOR) is None - assert state.attributes.get(ATTR_COLOR_TEMP) is None + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(ATTR_EFFECT_LIST) is None assert state.attributes.get(ATTR_EFFECT) is None assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.ONOFF] diff --git a/tests/components/switch_as_x/test_lock.py b/tests/components/switch_as_x/test_lock.py index f7d61cf6895..c2a0806778d 100644 --- a/tests/components/switch_as_x/test_lock.py +++ b/tests/components/switch_as_x/test_lock.py @@ -1,6 +1,6 @@ """Tests for the Switch as X Lock platform.""" -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( @@ -15,10 +15,8 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_UNLOCK, - STATE_LOCKED, STATE_OFF, STATE_ON, - STATE_UNLOCKED, Platform, ) from homeassistant.core import HomeAssistant @@ -70,7 +68,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -80,7 +78,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -90,7 +88,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -100,7 +98,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -110,7 +108,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -120,7 +118,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -143,7 +141,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -153,7 +151,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -163,7 +161,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -173,7 +171,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -183,7 +181,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -193,4 +191,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED + assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED diff --git a/tests/components/switch_as_x/test_valve.py b/tests/components/switch_as_x/test_valve.py index 854f693404f..6f6ef719ae1 100644 --- a/tests/components/switch_as_x/test_valve.py +++ b/tests/components/switch_as_x/test_valve.py @@ -7,7 +7,7 @@ from homeassistant.components.switch_as_x.const import ( CONF_TARGET_DOMAIN, DOMAIN, ) -from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState from homeassistant.const import ( CONF_ENTITY_ID, SERVICE_CLOSE_VALVE, @@ -15,10 +15,8 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_CLOSED, STATE_OFF, STATE_ON, - STATE_OPEN, Platform, ) from homeassistant.core import HomeAssistant @@ -71,7 +69,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -81,7 +79,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -91,7 +89,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -101,7 +99,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -111,7 +109,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -121,7 +119,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -131,7 +129,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -154,7 +152,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -164,7 +162,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -174,7 +172,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -184,7 +182,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -194,7 +192,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -204,7 +202,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == STATE_OPEN + assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -214,4 +212,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED + assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index b2a8445546e..bd3985ff062 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -205,3 +205,28 @@ NOT_SWITCHBOT_INFO = BluetoothServiceInfoBleak( connectable=True, tx_power=-127, ) + + +WOMETERTHPC_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoTHPc", + manufacturer_data={ + 2409: b"\xb0\xe9\xfeT2\x15\xb7\xe4\x07\x9b\xa4\x007\x02\xd5\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"5\x00d"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:AA", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoTHPc", + manufacturer_data={ + 2409: b"\xb0\xe9\xfeT2\x15\xb7\xe4\x07\x9b\xa4\x007\x02\xd5\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"5\x00d"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:AA", "WoTHPc"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_config_flow.py b/tests/components/switchbot/test_config_flow.py index 182e9457f22..b0fba2a5f18 100644 --- a/tests/components/switchbot/test_config_flow.py +++ b/tests/components/switchbot/test_config_flow.py @@ -7,6 +7,7 @@ from switchbot import SwitchbotAccountConnectionError, SwitchbotAuthenticationEr from homeassistant.components.switchbot.const import ( CONF_ENCRYPTION_KEY, CONF_KEY_ID, + CONF_LOCK_NIGHTLATCH, CONF_RETRY_COUNT, ) from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER @@ -782,3 +783,65 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 assert entry.options[CONF_RETRY_COUNT] == 6 + + +async def test_options_flow_lock_pro(hass: HomeAssistant) -> None: + """Test updating options.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_PASSWORD: "test-password", + CONF_SENSOR_TYPE: "lock_pro", + }, + options={CONF_RETRY_COUNT: 10}, + unique_id="aabbccddeeff", + ) + entry.add_to_hass(hass) + + # Test Force night_latch should be disabled by default. + with patch_async_setup_entry() as mock_setup_entry: + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] is None + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_RETRY_COUNT: 3, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_LOCK_NIGHTLATCH] is False + + assert len(mock_setup_entry.mock_calls) == 1 + + # Test Set force night_latch to be enabled. + + with patch_async_setup_entry() as mock_setup_entry: + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] is None + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_LOCK_NIGHTLATCH: True, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_LOCK_NIGHTLATCH] is True + + assert len(mock_setup_entry.mock_calls) == 0 + + assert entry.options[CONF_LOCK_NIGHTLATCH] is True diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 030a477596c..3adeaef936c 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -15,7 +15,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import WOHAND_SERVICE_INFO +from . import WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info @@ -59,3 +59,49 @@ async def test_sensors(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_co2_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the co2 sensor for a WoTHPc.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WOMETERTHPC_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "AA:BB:CC:DD:EE:AA", + CONF_NAME: "test-name", + CONF_PASSWORD: "test-password", + CONF_SENSOR_TYPE: "hygrometer_co2", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 5 + + battery_sensor = hass.states.get("sensor.test_name_battery") + battery_sensor_attrs = battery_sensor.attributes + assert battery_sensor.state == "100" + assert battery_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Battery" + assert battery_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert battery_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + co2_sensor = hass.states.get("sensor.test_name_carbon_dioxide") + co2_sensor_attrs = co2_sensor.attributes + assert co2_sensor.state == "725" + assert co2_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Carbon dioxide" + assert co2_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "ppm" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/switchbot_cloud/conftest.py b/tests/components/switchbot_cloud/conftest.py index b559930dedb..09c953da06b 100644 --- a/tests/components/switchbot_cloud/conftest.py +++ b/tests/components/switchbot_cloud/conftest.py @@ -5,6 +5,8 @@ from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.switchbot_cloud import SwitchBotAPI + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -14,3 +16,17 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_list_devices(): + """Mock list_devices.""" + with patch.object(SwitchBotAPI, "list_devices") as mock_list_devices: + yield mock_list_devices + + +@pytest.fixture +def mock_get_status(): + """Mock get_status.""" + with patch.object(SwitchBotAPI, "get_status") as mock_get_status: + yield mock_get_status diff --git a/tests/components/switchbot_cloud/test_init.py b/tests/components/switchbot_cloud/test_init.py index 25ea370efe5..43431ae04c0 100644 --- a/tests/components/switchbot_cloud/test_init.py +++ b/tests/components/switchbot_cloud/test_init.py @@ -50,6 +50,18 @@ async def test_setup_entry_success( remoteType="DIY Plug", hubDeviceId="test-hub-id", ), + Remote( + deviceId="meter-pro-1", + deviceName="meter-pro-name-1", + deviceType="MeterPro(CO2)", + hubDeviceId="test-hub-id", + ), + Remote( + deviceId="hub2-1", + deviceName="hub2-name-1", + deviceType="Hub 2", + hubDeviceId="test-hub-id", + ), ] mock_get_status.return_value = {"power": PowerState.ON.value} entry = configure_integration(hass) diff --git a/tests/components/switchbot_cloud/test_lock.py b/tests/components/switchbot_cloud/test_lock.py new file mode 100644 index 00000000000..a09d7241794 --- /dev/null +++ b/tests/components/switchbot_cloud/test_lock.py @@ -0,0 +1,48 @@ +"""Test for the switchbot_cloud lock.""" + +from unittest.mock import patch + +from switchbot_api import Device + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.switchbot_cloud import SwitchBotAPI +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK +from homeassistant.core import HomeAssistant + +from . import configure_integration + + +async def test_lock(hass: HomeAssistant, mock_list_devices, mock_get_status) -> None: + """Test locking and unlocking.""" + mock_list_devices.return_value = [ + Device( + deviceId="lock-id-1", + deviceName="lock-1", + deviceType="Smart Lock", + hubDeviceId="test-hub-id", + ), + ] + + mock_get_status.return_value = {"lockState": "locked"} + + entry = configure_integration(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + lock_id = "lock.lock_1" + assert hass.states.get(lock_id).state == LockState.LOCKED + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: lock_id}, blocking=True + ) + assert hass.states.get(lock_id).state == LockState.UNLOCKED + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: lock_id}, blocking=True + ) + assert hass.states.get(lock_id).state == LockState.LOCKED diff --git a/tests/components/switcher_kis/__init__.py b/tests/components/switcher_kis/__init__.py index 3f08afcbc9f..b9b44eb6d72 100644 --- a/tests/components/switcher_kis/__init__.py +++ b/tests/components/switcher_kis/__init__.py @@ -1,14 +1,23 @@ """Test cases and object for the Switcher integration tests.""" from homeassistant.components.switcher_kis.const import DOMAIN +from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def init_integration( + hass: HomeAssistant, username: str | None = None, token: str | None = None +) -> MockConfigEntry: """Set up the Switcher integration in Home Assistant.""" - entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) + data = {} + if username is not None: + data[CONF_USERNAME] = username + if token is not None: + data[CONF_TOKEN] = token + + entry = MockConfigEntry(domain=DOMAIN, data=data, unique_id=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 2cf123af2b0..58172a6962d 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -60,19 +60,11 @@ def mock_api(): patchers = [ patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.connect", + "homeassistant.components.switcher_kis.entity.SwitcherApi.connect", new=api_mock, ), patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.disconnect", - new=api_mock, - ), - patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.connect", - new=api_mock, - ), - patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.disconnect", + "homeassistant.components.switcher_kis.entity.SwitcherApi.disconnect", new=api_mock, ), ] diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index ffeef64b5d7..defe970c674 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -3,9 +3,13 @@ from aioswitcher.device import ( DeviceState, DeviceType, + ShutterChildLock, ShutterDirection, + SwitcherDualShutterSingleLight, + SwitcherLight, SwitcherPowerPlug, SwitcherShutter, + SwitcherSingleShutterDualLight, SwitcherThermostat, SwitcherWaterHeater, ThermostatFanLevel, @@ -19,14 +23,29 @@ DUMMY_DEVICE_ID1 = "a123bc" DUMMY_DEVICE_ID2 = "cafe12" DUMMY_DEVICE_ID3 = "bada77" DUMMY_DEVICE_ID4 = "bbd164" +DUMMY_DEVICE_ID5 = "bcdb64" +DUMMY_DEVICE_ID6 = "bcdc64" +DUMMY_DEVICE_ID7 = "bcdd64" +DUMMY_DEVICE_ID8 = "bcde64" +DUMMY_DEVICE_ID9 = "bcdf64" DUMMY_DEVICE_KEY1 = "18" DUMMY_DEVICE_KEY2 = "01" DUMMY_DEVICE_KEY3 = "12" DUMMY_DEVICE_KEY4 = "07" +DUMMY_DEVICE_KEY5 = "15" +DUMMY_DEVICE_KEY6 = "16" +DUMMY_DEVICE_KEY7 = "17" +DUMMY_DEVICE_KEY8 = "18" +DUMMY_DEVICE_KEY9 = "19" DUMMY_DEVICE_NAME1 = "Plug 23BC" DUMMY_DEVICE_NAME2 = "Heater FE12" DUMMY_DEVICE_NAME3 = "Breeze AB39" DUMMY_DEVICE_NAME4 = "Runner DD77" +DUMMY_DEVICE_NAME5 = "RunnerS11 6CF5" +DUMMY_DEVICE_NAME6 = "RunnerS12 A9BE" +DUMMY_DEVICE_NAME7 = "Light 36BB" +DUMMY_DEVICE_NAME8 = "Light 36CB" +DUMMY_DEVICE_NAME9 = "Light 36DB" DUMMY_DEVICE_PASSWORD = "12345678" DUMMY_ELECTRIC_CURRENT1 = 0.5 DUMMY_ELECTRIC_CURRENT2 = 12.8 @@ -34,14 +53,29 @@ DUMMY_IP_ADDRESS1 = "192.168.100.157" DUMMY_IP_ADDRESS2 = "192.168.100.158" DUMMY_IP_ADDRESS3 = "192.168.100.159" DUMMY_IP_ADDRESS4 = "192.168.100.160" +DUMMY_IP_ADDRESS5 = "192.168.100.161" +DUMMY_IP_ADDRESS6 = "192.168.100.162" +DUMMY_IP_ADDRESS7 = "192.168.100.163" +DUMMY_IP_ADDRESS8 = "192.168.100.164" +DUMMY_IP_ADDRESS9 = "192.168.100.165" DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" +DUMMY_MAC_ADDRESS5 = "A1:B2:C3:45:67:DC" +DUMMY_MAC_ADDRESS6 = "A1:B2:C3:45:67:DD" +DUMMY_MAC_ADDRESS7 = "A1:B2:C3:45:67:DE" +DUMMY_MAC_ADDRESS8 = "A1:B2:C3:45:67:DF" +DUMMY_MAC_ADDRESS9 = "A1:B2:C3:45:67:DG" DUMMY_TOKEN_NEEDED1 = False DUMMY_TOKEN_NEEDED2 = False DUMMY_TOKEN_NEEDED3 = False DUMMY_TOKEN_NEEDED4 = False +DUMMY_TOKEN_NEEDED5 = True +DUMMY_TOKEN_NEEDED6 = True +DUMMY_TOKEN_NEEDED7 = True +DUMMY_TOKEN_NEEDED8 = True +DUMMY_TOKEN_NEEDED9 = True DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -53,8 +87,17 @@ DUMMY_TARGET_TEMPERATURE = 23 DUMMY_FAN_LEVEL = ThermostatFanLevel.LOW DUMMY_SWING = ThermostatSwing.OFF DUMMY_REMOTE_ID = "ELEC7001" -DUMMY_POSITION = 54 -DUMMY_DIRECTION = ShutterDirection.SHUTTER_STOP +DUMMY_POSITION = [54] +DUMMY_POSITION_2 = [54, 54] +DUMMY_DIRECTION = [ShutterDirection.SHUTTER_STOP] +DUMMY_DIRECTION_2 = [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP] +DUMMY_CHILD_LOCK = [ShutterChildLock.OFF] +DUMMY_CHILD_LOCK_2 = [ShutterChildLock.OFF, ShutterChildLock.OFF] +DUMMY_USERNAME = "email" +DUMMY_TOKEN = "zvVvd7JxtN7CgvkD1Psujw==" +DUMMY_LIGHT = [DeviceState.ON] +DUMMY_LIGHT_2 = [DeviceState.ON, DeviceState.ON] +DUMMY_LIGHT_3 = [DeviceState.ON, DeviceState.ON, DeviceState.ON] DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DeviceType.POWER_PLUG, @@ -95,6 +138,37 @@ DUMMY_SHUTTER_DEVICE = SwitcherShutter( DUMMY_TOKEN_NEEDED4, DUMMY_POSITION, DUMMY_DIRECTION, + DUMMY_CHILD_LOCK, +) + +DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE = SwitcherSingleShutterDualLight( + DeviceType.RUNNER_S11, + DeviceState.ON, + DUMMY_DEVICE_ID5, + DUMMY_DEVICE_KEY5, + DUMMY_IP_ADDRESS5, + DUMMY_MAC_ADDRESS5, + DUMMY_DEVICE_NAME5, + DUMMY_TOKEN_NEEDED5, + DUMMY_POSITION, + DUMMY_DIRECTION, + DUMMY_CHILD_LOCK, + DUMMY_LIGHT_2, +) + +DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE = SwitcherDualShutterSingleLight( + DeviceType.RUNNER_S12, + DeviceState.ON, + DUMMY_DEVICE_ID6, + DUMMY_DEVICE_KEY6, + DUMMY_IP_ADDRESS6, + DUMMY_MAC_ADDRESS6, + DUMMY_DEVICE_NAME6, + DUMMY_TOKEN_NEEDED6, + DUMMY_POSITION_2, + DUMMY_DIRECTION_2, + DUMMY_CHILD_LOCK_2, + DUMMY_LIGHT, ) DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( @@ -114,4 +188,46 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_REMOTE_ID, ) +DUMMY_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL01, + DeviceState.ON, + DUMMY_DEVICE_ID7, + DUMMY_DEVICE_KEY7, + DUMMY_IP_ADDRESS7, + DUMMY_MAC_ADDRESS7, + DUMMY_DEVICE_NAME7, + DUMMY_TOKEN_NEEDED7, + DUMMY_LIGHT, +) + +DUMMY_DUAL_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL02, + DeviceState.ON, + DUMMY_DEVICE_ID8, + DUMMY_DEVICE_KEY8, + DUMMY_IP_ADDRESS8, + DUMMY_MAC_ADDRESS8, + DUMMY_DEVICE_NAME8, + DUMMY_TOKEN_NEEDED8, + DUMMY_LIGHT_2, +) + +DUMMY_TRIPLE_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL03, + DeviceState.ON, + DUMMY_DEVICE_ID9, + DUMMY_DEVICE_KEY9, + DUMMY_IP_ADDRESS9, + DUMMY_MAC_ADDRESS9, + DUMMY_DEVICE_NAME9, + DUMMY_TOKEN_NEEDED9, + DUMMY_LIGHT_3, +) + DUMMY_SWITCHER_DEVICES = [DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE] + +DUMMY_SWITCHER_SENSORS_DEVICES = [ + DUMMY_PLUG_DEVICE, + DUMMY_WATER_HEATER_DEVICE, + DUMMY_THERMOSTAT_DEVICE, +] diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index d0604487370..6ebd82363e4 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -42,7 +42,7 @@ async def test_assume_button( assert hass.states.get(SWING_OFF_EID) is None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -79,7 +79,7 @@ async def test_swing_button( assert hass.states.get(SWING_OFF_EID) is not None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -103,7 +103,7 @@ async def test_control_device_fail( # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -130,7 +130,7 @@ async def test_control_device_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index 5da9684bf2a..72a25d20d04 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -49,7 +49,7 @@ async def test_climate_hvac_mode( # Test set hvac mode heat with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -71,7 +71,7 @@ async def test_climate_hvac_mode( # Test set hvac mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -98,13 +98,17 @@ async def test_climate_temperature( await init_integration(hass) assert mock_bridge + monkeypatch.setattr(DEVICE, "mode", ThermostatMode.HEAT) + mock_bridge.mock_callbacks([DEVICE]) + await hass.async_block_till_done() + # Test initial target temperature state = hass.states.get(ENTITY_ID) assert state.attributes["temperature"] == 23 # Test set target temperature with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -124,9 +128,9 @@ async def test_climate_temperature( # Test set target temperature - incorrect params with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, @@ -156,7 +160,7 @@ async def test_climate_fan_level( # Test set fan level to high with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -191,7 +195,7 @@ async def test_climate_swing( # Test set swing mode on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -214,7 +218,7 @@ async def test_climate_swing( # Test set swing mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -245,7 +249,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -272,7 +276,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_config_flow.py b/tests/components/switcher_kis/test_config_flow.py index e42b8ac484d..48cc0beacb8 100644 --- a/tests/components/switcher_kis/test_config_flow.py +++ b/tests/components/switcher_kis/test_config_flow.py @@ -6,10 +6,18 @@ import pytest from homeassistant import config_entries from homeassistant.components.switcher_kis.const import DOMAIN +from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .consts import DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE +from .consts import ( + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, + DUMMY_PLUG_DEVICE, + DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, + DUMMY_TOKEN, + DUMMY_USERNAME, + DUMMY_WATER_HEATER_DEVICE, +) from tests.common import MockConfigEntry @@ -43,13 +51,98 @@ async def test_user_setup( assert mock_bridge.is_running is False assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Switcher" - assert result2["result"].data == {} + assert result2["result"].data == {CONF_USERNAME: None, CONF_TOKEN: None} await hass.async_block_till_done() assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.parametrize( + "mock_bridge", + [ + [ + DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, + ] + ], + indirect=True, +) +async def test_user_setup_found_token_device_valid_token( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_bridge +) -> None: + """Test we can finish a config flow with token device found.""" + with patch("homeassistant.components.switcher_kis.utils.DISCOVERY_TIME_SEC", 0): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + + assert mock_bridge.is_running is False + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "credentials" + + with patch( + "homeassistant.components.switcher_kis.config_flow.validate_token", + return_value=True, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Switcher" + assert result3["result"].data == { + CONF_USERNAME: DUMMY_USERNAME, + CONF_TOKEN: DUMMY_TOKEN, + } + + +@pytest.mark.parametrize( + "mock_bridge", + [ + [ + DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, + ] + ], + indirect=True, +) +async def test_user_setup_found_token_device_invalid_token( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_bridge +) -> None: + """Test we can finish a config flow with token device found.""" + with patch("homeassistant.components.switcher_kis.utils.DISCOVERY_TIME_SEC", 0): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "credentials" + + with patch( + "homeassistant.components.switcher_kis.config_flow.validate_token", + return_value=False, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["errors"] == {"base": "invalid_auth"} + + async def test_user_setup_abort_no_devices_found( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_bridge ) -> None: @@ -84,3 +177,62 @@ async def test_single_instance(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" + + +@pytest.mark.parametrize( + ("user_input"), + [ + ({CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}), + ], +) +async def test_reauth_successful( + hass: HomeAssistant, + user_input: dict[str, str], +) -> None: + """Test starting a reauthentication flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with patch( + "homeassistant.components.switcher_kis.config_flow.validate_token", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +async def test_reauth_invalid_auth(hass: HomeAssistant) -> None: + """Test reauthentication flow with invalid credentials.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with patch( + "homeassistant.components.switcher_kis.config_flow.validate_token", + return_value=False, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "invalid_user", CONF_TOKEN: "invalid_token"}, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index c228da6b556..5829d6345ef 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -14,10 +14,7 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -25,163 +22,264 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from . import init_integration -from .consts import DUMMY_SHUTTER_DEVICE as DEVICE +from .consts import ( + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE3, + DUMMY_SHUTTER_DEVICE as DEVICE, + DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE2, + DUMMY_TOKEN as TOKEN, + DUMMY_USERNAME as USERNAME, +) ENTITY_ID = f"{COVER_DOMAIN}.{slugify(DEVICE.name)}" +ENTITY_ID2 = f"{COVER_DOMAIN}.{slugify(DEVICE2.name)}" +ENTITY_ID3 = f"{COVER_DOMAIN}.{slugify(DEVICE3.name)}_cover_1" +ENTITY_ID3_2 = f"{COVER_DOMAIN}.{slugify(DEVICE3.name)}_cover_2" -@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) +@pytest.mark.parametrize( + ( + "device", + "entity_id", + "cover_id", + "position_open", + "position_close", + "direction_open", + "direction_close", + "direction_stop", + ), + [ + ( + DEVICE, + ENTITY_ID, + 0, + [77], + [0], + [ShutterDirection.SHUTTER_UP], + [ShutterDirection.SHUTTER_DOWN], + [ShutterDirection.SHUTTER_STOP], + ), + ( + DEVICE2, + ENTITY_ID2, + 0, + [77], + [0], + [ShutterDirection.SHUTTER_UP], + [ShutterDirection.SHUTTER_DOWN], + [ShutterDirection.SHUTTER_STOP], + ), + ( + DEVICE3, + ENTITY_ID3, + 0, + [77, 0], + [0, 0], + [ShutterDirection.SHUTTER_UP, ShutterDirection.SHUTTER_STOP], + [ShutterDirection.SHUTTER_DOWN, ShutterDirection.SHUTTER_STOP], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP], + ), + ( + DEVICE3, + ENTITY_ID3_2, + 1, + [0, 77], + [0, 0], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_UP], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_DOWN], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP], + ), + ], +) +@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2, DEVICE3]], indirect=True) async def test_cover( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, + mock_bridge, + mock_api, + monkeypatch: pytest.MonkeyPatch, + device, + entity_id: str, + cover_id: int, + position_open: list[int], + position_close: list[int], + direction_open: list[ShutterDirection], + direction_close: list[ShutterDirection], + direction_stop: list[ShutterDirection], ) -> None: """Test cover services.""" - await init_integration(hass) + await init_integration(hass, USERNAME, TOKEN) assert mock_bridge # Test initial state - open - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OPEN + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN # Test set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 77}, + {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 77}, blocking=True, ) - monkeypatch.setattr(DEVICE, "position", 77) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "position", position_open) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(77) - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OPEN + mock_control_device.assert_called_once_with(77, cover_id) + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 77 # Test open with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: ENTITY_ID}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - monkeypatch.setattr(DEVICE, "direction", ShutterDirection.SHUTTER_UP) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "direction", direction_open) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(100) - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OPENING + mock_control_device.assert_called_once_with(100, cover_id) + state = hass.states.get(entity_id) + assert state.state == CoverState.OPENING # Test close with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: ENTITY_ID}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - monkeypatch.setattr(DEVICE, "direction", ShutterDirection.SHUTTER_DOWN) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "direction", direction_close) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 6 - mock_control_device.assert_called_once_with(0) - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_CLOSING + mock_control_device.assert_called_once_with(0, cover_id) + state = hass.states.get(entity_id) + assert state.state == CoverState.CLOSING # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop_shutter" + "homeassistant.components.switcher_kis.entity.SwitcherApi.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: ENTITY_ID}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - monkeypatch.setattr(DEVICE, "direction", ShutterDirection.SHUTTER_STOP) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "direction", direction_stop) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 8 - mock_control_device.assert_called_once() - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OPEN + mock_control_device.assert_called_once_with(cover_id) + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN # Test closed on position == 0 - monkeypatch.setattr(DEVICE, "position", 0) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "position", position_close) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_CLOSED + state = hass.states.get(entity_id) + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 -@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) -async def test_cover_control_fail(hass: HomeAssistant, mock_bridge, mock_api) -> None: +@pytest.mark.parametrize( + ("device", "entity_id", "cover_id"), + [ + (DEVICE, ENTITY_ID, 0), + (DEVICE2, ENTITY_ID2, 0), + (DEVICE3, ENTITY_ID3, 0), + (DEVICE3, ENTITY_ID3_2, 1), + ], +) +@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2, DEVICE3]], indirect=True) +async def test_cover_control_fail( + hass: HomeAssistant, + mock_bridge, + mock_api, + device, + entity_id: str, + cover_id: int, +) -> None: """Test cover control fail.""" - await init_integration(hass) + await init_integration(hass, USERNAME, TOKEN) assert mock_bridge # Test initial state - open - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OPEN + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN # Test exception during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 44}, + {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 44}, blocking=True, ) assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(44) - state = hass.states.get(ENTITY_ID) + mock_control_device.assert_called_once_with(44, cover_id) + state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE # Make device available again - mock_bridge.mock_callbacks([DEVICE]) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OPEN + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN # Test error response during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 27}, + {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 27}, blocking=True, ) assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(27) - state = hass.states.get(ENTITY_ID) + mock_control_device.assert_called_once_with(27, cover_id) + state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize("mock_bridge", [[DEVICE2, DEVICE3]], indirect=True) +async def test_cover2_no_token( + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test cover with token needed without token specified.""" + await init_integration(hass) + assert mock_bridge + + assert mock_api.call_count == 0 diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index 89bcefa5138..f59958420c4 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -68,5 +68,7 @@ async def test_diagnostics( "disabled_by": None, "created_at": ANY, "modified_at": ANY, + "discovery_keys": {}, + "subentries": [], }, } diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py new file mode 100644 index 00000000000..51d0eb6332f --- /dev/null +++ b/tests/components/switcher_kis/test_light.py @@ -0,0 +1,195 @@ +"""Test the Switcher light platform.""" + +from unittest.mock import patch + +from aioswitcher.api import SwitcherBaseResponse +from aioswitcher.device import DeviceState +import pytest + +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util import slugify + +from . import init_integration +from .consts import ( + DUMMY_DUAL_LIGHT_DEVICE as DEVICE4, + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE2, + DUMMY_LIGHT_DEVICE as DEVICE3, + DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE, + DUMMY_TOKEN as TOKEN, + DUMMY_TRIPLE_LIGHT_DEVICE as DEVICE5, + DUMMY_USERNAME as USERNAME, +) + +ENTITY_ID = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_1" +ENTITY_ID_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" +ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" +ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE3.name)}" +ENTITY_ID4 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_1" +ENTITY_ID4_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_2" +ENTITY_ID5 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_1" +ENTITY_ID5_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_2" +ENTITY_ID5_3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_3" + + +@pytest.mark.parametrize( + ("device", "entity_id", "light_id", "device_state"), + [ + (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), + (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), + ], +) +@pytest.mark.parametrize( + "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True +) +async def test_light( + hass: HomeAssistant, + mock_bridge, + mock_api, + monkeypatch: pytest.MonkeyPatch, + device, + entity_id: str, + light_id: int, + device_state: list[DeviceState], +) -> None: + """Test the light.""" + await init_integration(hass, USERNAME, TOKEN) + assert mock_bridge + + # Test initial state - light on + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + # Test state change on --> off for light + monkeypatch.setattr(device, "light", device_state) + mock_bridge.mock_callbacks([device]) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + + # Test turning on light + with patch( + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", + ) as mock_set_light: + await hass.services.async_call( + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + + assert mock_api.call_count == 2 + mock_set_light.assert_called_once_with(DeviceState.ON, light_id) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + # Test turning off light + with patch( + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light" + ) as mock_set_light: + await hass.services.async_call( + LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + + assert mock_api.call_count == 4 + mock_set_light.assert_called_once_with(DeviceState.OFF, light_id) + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + + +@pytest.mark.parametrize( + ("device", "entity_id", "light_id", "device_state"), + [ + (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), + (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), + ], +) +@pytest.mark.parametrize( + "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True +) +async def test_light_control_fail( + hass: HomeAssistant, + mock_bridge, + mock_api, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, + device, + entity_id: str, + light_id: int, + device_state: list[DeviceState], +) -> None: + """Test light control fail.""" + await init_integration(hass, USERNAME, TOKEN) + assert mock_bridge + + # Test initial state - light off + monkeypatch.setattr(device, "light", device_state) + mock_bridge.mock_callbacks([device]) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + + # Test exception during turn on + with patch( + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", + side_effect=RuntimeError("fake error"), + ) as mock_control_device: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_api.call_count == 2 + mock_control_device.assert_called_once_with(DeviceState.ON, light_id) + state = hass.states.get(entity_id) + assert state.state == STATE_UNAVAILABLE + + # Make device available again + mock_bridge.mock_callbacks([device]) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + + # Test error response during turn on + with patch( + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", + return_value=SwitcherBaseResponse(None), + ) as mock_control_device: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_api.call_count == 4 + mock_control_device.assert_called_once_with(DeviceState.ON, light_id) + state = hass.states.get(entity_id) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/switcher_kis/test_sensor.py b/tests/components/switcher_kis/test_sensor.py index 8ccc33f2d37..f99d91bd9a3 100644 --- a/tests/components/switcher_kis/test_sensor.py +++ b/tests/components/switcher_kis/test_sensor.py @@ -7,7 +7,12 @@ from homeassistant.helpers import entity_registry as er from homeassistant.util import slugify from . import init_integration -from .consts import DUMMY_PLUG_DEVICE, DUMMY_SWITCHER_DEVICES, DUMMY_WATER_HEATER_DEVICE +from .consts import ( + DUMMY_PLUG_DEVICE, + DUMMY_SWITCHER_SENSORS_DEVICES, + DUMMY_THERMOSTAT_DEVICE, + DUMMY_WATER_HEATER_DEVICE, +) DEVICE_SENSORS_TUPLE = ( ( @@ -25,17 +30,23 @@ DEVICE_SENSORS_TUPLE = ( ("remaining_time", "remaining_time"), ], ), + ( + DUMMY_THERMOSTAT_DEVICE, + [ + ("current_temperature", "temperature"), + ], + ), ) -@pytest.mark.parametrize("mock_bridge", [DUMMY_SWITCHER_DEVICES], indirect=True) +@pytest.mark.parametrize("mock_bridge", [DUMMY_SWITCHER_SENSORS_DEVICES], indirect=True) async def test_sensor_platform(hass: HomeAssistant, mock_bridge) -> None: """Test sensor platform.""" entry = await init_integration(hass) assert mock_bridge assert mock_bridge.is_running is True - assert len(entry.runtime_data) == 2 + assert len(entry.runtime_data) == 3 for device, sensors in DEVICE_SENSORS_TUPLE: for sensor, field in sensors: diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 26c54ee53ed..b4a8168419f 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -16,6 +16,7 @@ from homeassistant.components.switcher_kis.const import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.config_validation import time_period_str from homeassistant.util import slugify @@ -48,7 +49,7 @@ async def test_turn_on_with_timer_service( assert state.state == STATE_OFF with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device" + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( DOMAIN, @@ -78,7 +79,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.set_auto_shutdown" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_auto_shutdown" ) as mock_set_auto_shutdown: await hass.services.async_call( DOMAIN, @@ -95,7 +96,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) async def test_set_auto_off_service_fail( - hass: HomeAssistant, mock_bridge, mock_api, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, mock_bridge, mock_api ) -> None: """Test set auto off service failed.""" await init_integration(hass) @@ -105,24 +106,21 @@ async def test_set_auto_off_service_fail( entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.set_auto_shutdown", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_auto_shutdown", return_value=None, ) as mock_set_auto_shutdown: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_AUTO_OFF_NAME, - {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, - blocking=True, - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_AUTO_OFF_NAME, + {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, + blocking=True, + ) assert mock_api.call_count == 2 mock_set_auto_shutdown.assert_called_once_with( time_period_str(DUMMY_AUTO_OFF_SET) ) - assert ( - f"Call api for {device.name} failed, api: 'set_auto_shutdown'" - in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index f14a8f5b1ca..9bfe11fe202 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -16,6 +16,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from . import init_integration @@ -47,7 +48,7 @@ async def test_switch( # Test turning on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -60,7 +61,7 @@ async def test_switch( # Test turning off with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device" + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -78,7 +79,6 @@ async def test_switch_control_fail( mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, ) -> None: """Test switch control fail.""" await init_integration(hass) @@ -97,18 +97,19 @@ async def test_switch_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) assert mock_api.call_count == 2 mock_control_device.assert_called_once_with(Command.ON) - assert ( - f"Call api for {device.name} failed, api: 'control_device'" in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE @@ -121,17 +122,18 @@ async def test_switch_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) assert mock_api.call_count == 4 mock_control_device.assert_called_once_with(Command.ON) - assert ( - f"Call api for {device.name} failed, api: 'control_device'" in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index 1574526a701..e5494b7179f 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -21,12 +21,7 @@ from homeassistant.components.synology_dsm.const import ( DEFAULT_SNAPSHOT_QUALITY, DOMAIN, ) -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -297,24 +292,7 @@ async def test_reauth(hass: HomeAssistant, service: MagicMock) -> None: ) entry.add_to_hass(hass) - with patch( - "homeassistant.config_entries.ConfigEntries.async_reload", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - "title_placeholders": {"name": entry.title}, - }, - data={ - CONF_HOST: HOST, - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/system_bridge/test_config_flow.py b/tests/components/system_bridge/test_config_flow.py index 727d93de893..ada44de2d12 100644 --- a/tests/components/system_bridge/test_config_flow.py +++ b/tests/components/system_bridge/test_config_flow.py @@ -259,9 +259,12 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -291,9 +294,12 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -336,9 +342,12 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: async def test_reauth_connection_closed_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -373,9 +382,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" diff --git a/tests/components/system_bridge/test_media_source.py b/tests/components/system_bridge/test_media_source.py index 161d69569b6..58ee4ebe05c 100644 --- a/tests/components/system_bridge/test_media_source.py +++ b/tests/components/system_bridge/test_media_source.py @@ -4,7 +4,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import paths -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import BrowseError from homeassistant.components.media_source import ( DOMAIN as MEDIA_SOURCE_DOMAIN, URI_SCHEME, diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index 328065f6098..afa508cc004 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -34,6 +34,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'systemmonitor', 'minor_version': 3, 'options': dict({ @@ -54,6 +56,65 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), + 'title': 'System Monitor', + 'unique_id': None, + 'version': 1, + }), + }) +# --- +# name: test_diagnostics_missing_items[test_diagnostics_missing_items] + dict({ + 'coordinators': dict({ + 'data': dict({ + 'addresses': None, + 'boot_time': '2024-02-24 15:00:00+00:00', + 'cpu_percent': '10.0', + 'disk_usage': dict({ + '/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', + '/home/notexist/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', + '/media/share': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', + }), + 'io_counters': None, + 'load': '(1, 2, 3)', + 'memory': 'VirtualMemory(total=104857600, available=41943040, percent=40.0, used=62914560, free=31457280)', + 'processes': "[tests.components.systemmonitor.conftest.MockProcess(pid=1, name='python3', status='sleeping', started='2024-02-23 15:00:00'), tests.components.systemmonitor.conftest.MockProcess(pid=1, name='pip', status='sleeping', started='2024-02-23 15:00:00')]", + 'swap': 'sswap(total=104857600, used=62914560, free=41943040, percent=60.0, sin=1, sout=1)', + 'temperatures': dict({ + 'cpu0-thermal': "[shwtemp(label='cpu0-thermal', current=50.0, high=60.0, critical=70.0)]", + }), + }), + 'last_update_success': True, + }), + 'entry': dict({ + 'data': dict({ + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'systemmonitor', + 'minor_version': 3, + 'options': dict({ + 'binary_sensor': dict({ + 'process': list([ + 'python3', + 'pip', + ]), + }), + 'resources': list([ + 'disk_use_percent_/', + 'disk_use_percent_/home/notexist/', + 'memory_free_', + 'network_out_eth0', + 'process_python3', + ]), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, diff --git a/tests/components/systemmonitor/snapshots/test_repairs.ambr b/tests/components/systemmonitor/snapshots/test_repairs.ambr deleted file mode 100644 index dc659918b5f..00000000000 --- a/tests/components/systemmonitor/snapshots/test_repairs.ambr +++ /dev/null @@ -1,73 +0,0 @@ -# serializer version: 1 -# name: test_migrate_process_sensor[after_migration] - list([ - ConfigEntrySnapshot({ - 'data': dict({ - }), - 'disabled_by': None, - 'domain': 'systemmonitor', - 'entry_id': , - 'minor_version': 2, - 'options': dict({ - 'binary_sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - 'resources': list([ - 'disk_use_percent_/', - 'disk_use_percent_/home/notexist/', - 'memory_free_', - 'network_out_eth0', - 'process_python3', - ]), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'System Monitor', - 'unique_id': None, - 'version': 1, - }), - ]) -# --- -# name: test_migrate_process_sensor[before_migration] - list([ - ConfigEntrySnapshot({ - 'data': dict({ - }), - 'disabled_by': None, - 'domain': 'systemmonitor', - 'entry_id': , - 'minor_version': 2, - 'options': dict({ - 'binary_sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - 'resources': list([ - 'disk_use_percent_/', - 'disk_use_percent_/home/notexist/', - 'memory_free_', - 'network_out_eth0', - 'process_python3', - ]), - 'sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'System Monitor', - 'unique_id': None, - 'version': 1, - }), - ]) -# --- diff --git a/tests/components/systemmonitor/test_diagnostics.py b/tests/components/systemmonitor/test_diagnostics.py index b0f4fca3d0c..26e421e6574 100644 --- a/tests/components/systemmonitor/test_diagnostics.py +++ b/tests/components/systemmonitor/test_diagnostics.py @@ -2,6 +2,7 @@ from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from syrupy.filters import props @@ -24,3 +25,26 @@ async def test_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, mock_added_config_entry ) == snapshot(exclude=props("last_update", "entry_id", "created_at", "modified_at")) + + +async def test_diagnostics_missing_items( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_psutil: Mock, + mock_os: Mock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test diagnostics.""" + mock_psutil.net_if_addrs.return_value = None + mock_psutil.net_io_counters.return_value = None + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot( + exclude=props("last_update", "entry_id", "created_at", "modified_at"), + name="test_diagnostics_missing_items", + ) diff --git a/tests/components/tado/fixtures/home.json b/tests/components/tado/fixtures/home.json new file mode 100644 index 00000000000..3431c1c2471 --- /dev/null +++ b/tests/components/tado/fixtures/home.json @@ -0,0 +1,47 @@ +{ + "id": 1, + "name": "My Home", + "dateTimeZone": "Europe/Berlin", + "dateCreated": "2019-03-24T16:16:19.541Z", + "temperatureUnit": "CELSIUS", + "partner": null, + "simpleSmartScheduleEnabled": true, + "awayRadiusInMeters": 100.0, + "installationCompleted": true, + "incidentDetection": { "supported": true, "enabled": true }, + "generation": "PRE_LINE_X", + "zonesCount": 7, + "language": "de-DE", + "skills": ["AUTO_ASSIST"], + "christmasModeEnabled": true, + "showAutoAssistReminders": true, + "contactDetails": { + "name": "Max Mustermann", + "email": "max@example.com", + "phone": "+493023125431" + }, + "address": { + "addressLine1": "Musterstrasse 123", + "addressLine2": null, + "zipCode": "12345", + "city": "Berlin", + "state": null, + "country": "DEU" + }, + "geolocation": { "latitude": 52.0, "longitude": 13.0 }, + "consentGrantSkippable": true, + "enabledFeatures": [ + "EIQ_SETTINGS_AS_WEBVIEW", + "HIDE_BOILER_REPAIR_SERVICE", + "INTERCOM_ENABLED", + "MORE_AS_WEBVIEW", + "OWD_SETTINGS_AS_WEBVIEW", + "SETTINGS_OVERVIEW_AS_WEBVIEW" + ], + "isAirComfortEligible": true, + "isBalanceAcEligible": false, + "isEnergyIqEligible": true, + "isHeatSourceInstalled": false, + "isHeatPumpInstalled": false, + "supportsFlowTemperatureOptimization": false +} diff --git a/tests/components/tado/test_config_flow.py b/tests/components/tado/test_config_flow.py index 4f5f4180fb5..63b17dad13e 100644 --- a/tests/components/tado/test_config_flow.py +++ b/tests/components/tado/test_config_flow.py @@ -295,13 +295,7 @@ async def test_reconfigure_flow( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index de4fd515e5a..a76858ab98e 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -20,6 +20,7 @@ async def async_init_integration( mobile_devices_fixture = "tado/mobile_devices.json" me_fixture = "tado/me.json" weather_fixture = "tado/weather.json" + home_fixture = "tado/home.json" home_state_fixture = "tado/home_state.json" zones_fixture = "tado/zones.json" zone_states_fixture = "tado/zone_states.json" @@ -65,6 +66,10 @@ async def async_init_integration( "https://my.tado.com/api/v2/me", text=load_fixture(me_fixture), ) + m.get( + "https://my.tado.com/api/v2/homes/1/", + text=load_fixture(home_fixture), + ) m.get( "https://my.tado.com/api/v2/homes/1/weather", text=load_fixture(weather_fixture), diff --git a/tests/components/tag/snapshots/test_init.ambr b/tests/components/tag/snapshots/test_init.ambr index 29a9a2665b8..caa88b8ca9a 100644 --- a/tests/components/tag/snapshots/test_init.ambr +++ b/tests/components/tag/snapshots/test_init.ambr @@ -5,8 +5,6 @@ 'items': list([ dict({ 'id': 'test tag id', - 'migrated': True, - 'name': 'test tag name', }), dict({ 'device_id': 'some_scanner', @@ -23,3 +21,24 @@ 'version': 1, }) # --- +# name: test_tag_scanned + dict({ + 'data': dict({ + 'items': list([ + dict({ + 'id': 'test tag id', + }), + dict({ + 'id': 'test tag id 2', + }), + dict({ + 'device_id': 'some_scanner', + 'id': 'new tag', + }), + ]), + }), + 'key': 'tag', + 'minor_version': 3, + 'version': 1, + }) +# --- diff --git a/tests/components/tag/test_init.py b/tests/components/tag/test_init.py index 6f309391d2b..ac862e59f2d 100644 --- a/tests/components/tag/test_init.py +++ b/tests/components/tag/test_init.py @@ -6,6 +6,7 @@ from typing import Any from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.tag import DOMAIN, _create_entry, async_scan_tag from homeassistant.const import CONF_NAME, STATE_UNKNOWN @@ -165,7 +166,9 @@ async def test_tag_scanned( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], storage_setup, + snapshot: SnapshotAssertion, ) -> None: """Test scanning tags.""" assert await storage_setup() @@ -205,6 +208,12 @@ async def test_tag_scanned( }, ] + # Trigger store + freezer.tick(11) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass_storage[DOMAIN] == snapshot(exclude=props("last_scanned")) + def track_changes(coll: collection.ObservableCollection): """Create helper to track changes in a collection.""" @@ -294,6 +303,10 @@ async def test_entity_created_and_removed( assert item["id"] == "1234567890" assert item["name"] == "Kitchen tag" + await hass.async_block_till_done() + er_entity = entity_registry.async_get("tag.kitchen_tag") + assert er_entity.name == "Kitchen tag" + entity = hass.states.get("tag.kitchen_tag") assert entity assert entity.state == STATE_UNKNOWN diff --git a/tests/components/tailscale/test_config_flow.py b/tests/components/tailscale/test_config_flow.py index 86daa40d8dc..3a67f46a496 100644 --- a/tests/components/tailscale/test_config_flow.py +++ b/tests/components/tailscale/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from tailscale import TailscaleAuthenticationError, TailscaleConnectionError from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -128,15 +128,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -170,15 +162,7 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -222,15 +206,7 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/tailwind/snapshots/test_config_flow.ambr b/tests/components/tailwind/snapshots/test_config_flow.ambr deleted file mode 100644 index 5c01f35e09c..00000000000 --- a/tests/components/tailwind/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,85 +0,0 @@ -# serializer version: 1 -# name: test_user_flow - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '3c:e9:0e:6d:21:84', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'tailwind', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'disabled_by': None, - 'domain': 'tailwind', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Tailwind iQ3', - 'unique_id': '3c:e9:0e:6d:21:84', - 'version': 1, - }), - 'title': 'Tailwind iQ3', - 'type': , - 'version': 1, - }) -# --- -# name: test_zeroconf_flow - FlowResultSnapshot({ - 'context': dict({ - 'configuration_url': 'https://web.gotailwind.com/client/integration/local-control-key', - 'source': 'zeroconf', - 'title_placeholders': dict({ - 'name': 'Tailwind iQ3', - }), - 'unique_id': '3c:e9:0e:6d:21:84', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'tailwind', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'disabled_by': None, - 'domain': 'tailwind', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'Tailwind iQ3', - 'unique_id': '3c:e9:0e:6d:21:84', - 'version': 1, - }), - 'title': 'Tailwind iQ3', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/tailwind/test_config_flow.py b/tests/components/tailwind/test_config_flow.py index f70ab6e27ff..ca6fbacf0fc 100644 --- a/tests/components/tailwind/test_config_flow.py +++ b/tests/components/tailwind/test_config_flow.py @@ -14,12 +14,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.tailwind.const import DOMAIN -from homeassistant.config_entries import ( - SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -30,20 +25,17 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.usefixtures("mock_tailwind") -async def test_user_flow( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: +async def test_user_flow(hass: HomeAssistant) -> None: """Test the full happy path user flow from start to finish.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_HOST: "127.0.0.1", @@ -51,8 +43,15 @@ async def test_user_flow( }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "987654", + } + assert not config_entry.options @pytest.mark.parametrize( @@ -81,19 +80,27 @@ async def test_user_flow_errors( }, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_HOST: "127.0.0.2", CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_TOKEN: "123456", + } + assert not config_entry.options async def test_user_flow_unsupported_firmware_version( @@ -110,8 +117,8 @@ async def test_user_flow_unsupported_firmware_version( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unsupported_firmware" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unsupported_firmware" @pytest.mark.usefixtures("mock_tailwind") @@ -134,8 +141,8 @@ async def test_user_flow_already_configured( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" assert mock_config_entry.data[CONF_TOKEN] == "987654" @@ -165,19 +172,26 @@ async def test_zeroconf_flow( ), ) - assert result.get("step_id") == "zeroconf_confirm" - assert result.get("type") is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM progress = hass.config_entries.flow.async_progress() assert len(progress) == 1 assert progress[0].get("flow_id") == result["flow_id"] - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "987654"} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "987654", + } + assert not config_entry.options @pytest.mark.parametrize( @@ -205,8 +219,8 @@ async def test_zeroconf_flow_abort_incompatible_properties( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == expected_reason + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == expected_reason @pytest.mark.parametrize( @@ -245,25 +259,33 @@ async def test_zeroconf_flow_errors( ), ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "zeroconf_confirm" - assert result2.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "123456", + } + assert not config_entry.options @pytest.mark.usefixtures("mock_tailwind") @@ -297,8 +319,8 @@ async def test_zeroconf_flow_not_discovered_again( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" @@ -311,26 +333,18 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) assert mock_config_entry.data[CONF_TOKEN] == "123456" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "reauth_confirm" + result = await mock_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_TOKEN: "987654"}, ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data[CONF_TOKEN] == "987654" @@ -354,37 +368,29 @@ async def test_reauth_flow_errors( mock_config_entry.add_to_hass(hass) mock_tailwind.status.side_effect = side_effect - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "reauth_confirm" - assert result2.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result3.get("type") is FlowResultType.ABORT - assert result3.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" async def test_dhcp_discovery_updates_entry( @@ -405,8 +411,8 @@ async def test_dhcp_discovery_updates_entry( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" @@ -425,5 +431,5 @@ async def test_dhcp_discovery_ignores_unknown(hass: HomeAssistant) -> None: ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unknown" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/tailwind/test_cover.py b/tests/components/tailwind/test_cover.py index 8ccb8947624..a658f842885 100644 --- a/tests/components/tailwind/test_cover.py +++ b/tests/components/tailwind/test_cover.py @@ -3,6 +3,7 @@ from unittest.mock import ANY, MagicMock from gotailwind import ( + TailwindDoorAlreadyInStateError, TailwindDoorDisabledError, TailwindDoorLockedOutError, TailwindDoorOperationCommand, @@ -181,3 +182,28 @@ async def test_cover_operations( ) assert excinfo.value.translation_domain == DOMAIN assert excinfo.value.translation_key == "communication_error" + + # Test door already in state + mock_tailwind.operate.side_effect = TailwindDoorAlreadyInStateError( + "Door is already in the requested state" + ) + + # This call should not raise an exception + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + { + ATTR_ENTITY_ID: "cover.door_1", + }, + blocking=True, + ) + + # This call should not raise an exception + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + { + ATTR_ENTITY_ID: "cover.door_1", + }, + blocking=True, + ) diff --git a/tests/components/tailwind/test_init.py b/tests/components/tailwind/test_init.py index 8ea5f1108f4..8e075a26279 100644 --- a/tests/components/tailwind/test_init.py +++ b/tests/components/tailwind/test_init.py @@ -66,8 +66,8 @@ async def test_config_entry_authentication_failed( assert len(flows) == 1 flow = flows[0] - assert flow.get("step_id") == "reauth_confirm" - assert flow.get("handler") == DOMAIN + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index f52cb3a88a5..b5b33d7c246 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -26,6 +26,8 @@ ]), }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'tankerkoenig', 'entry_id': '8036b4412f2fae6bb9dbab7fe8e37f87', 'minor_version': 1, @@ -35,6 +37,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index 022b49fd3f8..bb1e943bbb9 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components.tankerkoenig.const import ( CONF_STATIONS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -162,6 +162,10 @@ async def test_user_no_stations(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test starting a flow by user to re-auth.""" config_entry.add_to_hass(hass) + # re-auth initialized + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -171,15 +175,6 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", ) as mock_nearby_stations, ): - # re-auth initialized - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=config_entry.data, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - # re-auth unsuccessful mock_nearby_stations.side_effect = TankerkoenigInvalidKeyError("Booom!") result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tasmota/test_light.py b/tests/components/tasmota/test_light.py index f5802c509bf..4f4daee1301 100644 --- a/tests/components/tasmota/test_light.py +++ b/tests/components/tasmota/test_light.py @@ -1108,7 +1108,7 @@ async def test_sending_mqtt_commands_rgbww( ) mqtt_mock.async_publish.reset_mock() - await common.async_turn_on(hass, "light.tasmota_test", color_temp=200) + await common.async_turn_on(hass, "light.tasmota_test", color_temp_kelvin=5000) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON;NoDelay;CT 200", @@ -1350,7 +1350,9 @@ async def test_transition( assert state.attributes.get("color_temp") == 153 # Set color_temp of the light from 153 to 500 @ 50%: Speed should be 6*2*2=24 - await common.async_turn_on(hass, "light.tasmota_test", color_temp=500, transition=6) + await common.async_turn_on( + hass, "light.tasmota_test", color_temp_kelvin=2000, transition=6 + ) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade2 1;NoDelay;Speed2 24;NoDelay;Power1 ON;NoDelay;CT 500", @@ -1369,7 +1371,9 @@ async def test_transition( assert state.attributes.get("color_temp") == 500 # Set color_temp of the light from 500 to 326 @ 50%: Speed should be 6*2*2*2=48->40 - await common.async_turn_on(hass, "light.tasmota_test", color_temp=326, transition=6) + await common.async_turn_on( + hass, "light.tasmota_test", color_temp_kelvin=3067, transition=6 + ) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade2 1;NoDelay;Speed2 40;NoDelay;Power1 ON;NoDelay;CT 326", diff --git a/tests/components/tautulli/test_config_flow.py b/tests/components/tautulli/test_config_flow.py index ca563cfad77..722fd0a7616 100644 --- a/tests/components/tautulli/test_config_flow.py +++ b/tests/components/tautulli/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from pytautulli import exceptions from homeassistant.components.tautulli.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -156,15 +156,7 @@ async def test_flow_reauth( """Test reauth flow.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -193,14 +185,7 @@ async def test_flow_reauth_error( """Test reauth flow with invalid authentication.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) with patch_config_flow_tautulli(AsyncMock()) as tautullimock: tautullimock.side_effect = exceptions.PyTautulliAuthenticationException result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tcp/test_binary_sensor.py b/tests/components/tcp/test_binary_sensor.py index 05aa2a471db..c84a36016ad 100644 --- a/tests/components/tcp/test_binary_sensor.py +++ b/tests/components/tcp/test_binary_sensor.py @@ -23,9 +23,9 @@ TEST_ENTITY = "binary_sensor.test_name" def mock_socket_fixture(): """Mock the socket.""" with ( - patch("homeassistant.components.tcp.common.socket.socket") as mock_socket, + patch("homeassistant.components.tcp.entity.socket.socket") as mock_socket, patch( - "homeassistant.components.tcp.common.select.select", + "homeassistant.components.tcp.entity.select.select", return_value=(True, False, False), ), ): diff --git a/tests/components/tcp/test_sensor.py b/tests/components/tcp/test_sensor.py index 04fbb2c667e..27003df46cd 100644 --- a/tests/components/tcp/test_sensor.py +++ b/tests/components/tcp/test_sensor.py @@ -43,7 +43,7 @@ socket_test_value = "123" @pytest.fixture(name="mock_socket") def mock_socket_fixture(mock_select): """Mock socket.""" - with patch("homeassistant.components.tcp.common.socket.socket") as mock_socket: + with patch("homeassistant.components.tcp.entity.socket.socket") as mock_socket: socket_instance = mock_socket.return_value.__enter__.return_value socket_instance.recv.return_value = socket_test_value.encode() yield socket_instance @@ -53,7 +53,7 @@ def mock_socket_fixture(mock_select): def mock_select_fixture(): """Mock select.""" with patch( - "homeassistant.components.tcp.common.select.select", + "homeassistant.components.tcp.entity.select.select", return_value=(True, False, False), ) as mock_select: yield mock_select @@ -63,7 +63,7 @@ def mock_select_fixture(): def mock_ssl_context_fixture(): """Mock select.""" with patch( - "homeassistant.components.tcp.common.ssl.create_default_context", + "homeassistant.components.tcp.entity.ssl.create_default_context", ) as mock_ssl_context: mock_ssl_context.return_value.wrap_socket.return_value.recv.return_value = ( socket_test_value + "567" diff --git a/tests/components/tedee/__init__.py b/tests/components/tedee/__init__.py index a72b1fbdd6a..0bff030d2df 100644 --- a/tests/components/tedee/__init__.py +++ b/tests/components/tedee/__init__.py @@ -1 +1,14 @@ """Add tests for Tedee components.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the acaia integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 68444de640c..d659560ee61 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -6,14 +6,16 @@ from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch -from pytedee_async.bridge import TedeeBridge -from pytedee_async.lock import TedeeLock +from aiotedee.bridge import TedeeBridge +from aiotedee.lock import TedeeLock import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant +from . import setup_integration + from tests.common import MockConfigEntry, load_fixture WEBHOOK_ID = "bq33efxmdi3vxy55q2wbnudbra7iv8mjrq9x0gea33g4zqtd87093pwveg8xcb33" @@ -84,8 +86,6 @@ async def init_integration( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock ) -> MockConfigEntry: """Set up the Tedee integration for testing.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) return mock_config_entry diff --git a/tests/components/tedee/snapshots/test_binary_sensor.ambr b/tests/components/tedee/snapshots/test_binary_sensor.ambr index 385e4ac9bc1..e3238dacda1 100644 --- a/tests/components/tedee/snapshots/test_binary_sensor.ambr +++ b/tests/components/tedee/snapshots/test_binary_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_binary_sensors[entry-charging] +# name: test_binary_sensors[binary_sensor.lock_1a2b_charging-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -32,7 +32,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-lock_uncalibrated] +# name: test_binary_sensors[binary_sensor.lock_1a2b_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Lock-1A2B Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_lock_uncalibrated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -65,7 +79,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-pullspring_enabled] +# name: test_binary_sensors[binary_sensor.lock_1a2b_lock_uncalibrated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-1A2B Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_pullspring_enabled-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -98,7 +126,20 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-semi_locked] +# name: test_binary_sensors[binary_sensor.lock_1a2b_pullspring_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-1A2B Pullspring enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_pullspring_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_semi_locked-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -131,48 +172,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[state-charging] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Lock-1A2B Charging', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[state-lock_uncalibrated] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Lock-1A2B Lock uncalibrated', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[state-pullspring_enabled] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Lock-1A2B Pullspring enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_pullspring_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[state-semi_locked] +# name: test_binary_sensors[binary_sensor.lock_1a2b_semi_locked-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Lock-1A2B Semi locked', @@ -185,3 +185,189 @@ 'state': 'off', }) # --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Lock-2C3D Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_lock_uncalibrated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_lock_uncalibrated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock uncalibrated', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uncalibrated', + 'unique_id': '98765-uncalibrated', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_lock_uncalibrated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-2C3D Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_pullspring_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_pullspring_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pullspring enabled', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pullspring_enabled', + 'unique_id': '98765-pullspring_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_pullspring_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D Pullspring enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_pullspring_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_semi_locked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_semi_locked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Semi locked', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'semi_locked', + 'unique_id': '98765-semi_locked', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_semi_locked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D Semi locked', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_semi_locked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tedee/snapshots/test_init.ambr b/tests/components/tedee/snapshots/test_init.ambr index 20d6bfcdc2a..af559f561b2 100644 --- a/tests/components/tedee/snapshots/test_init.ambr +++ b/tests/components/tedee/snapshots/test_init.ambr @@ -31,3 +31,35 @@ 'via_device_id': None, }) # --- +# name: test_lock_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tedee', + '12345', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tedee', + 'model': 'Tedee PRO', + 'model_id': 'Tedee PRO', + 'name': 'Lock-1A2B', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 14913e32ba5..cca988663d2 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -1,83 +1,4 @@ # serializer version: 1 -# name: test_lock - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Lock-1A2B', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.lock_1a2b', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unlocked', - }) -# --- -# name: test_lock.1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.lock_1a2b', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'tedee', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '12345-lock', - 'unit_of_measurement': None, - }) -# --- -# name: test_lock.2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tedee', - '12345', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tedee', - 'model': 'Tedee PRO', - 'model_id': None, - 'name': 'Lock-1A2B', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- # name: test_lock_without_pullspring StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -147,7 +68,7 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee GO', - 'model_id': None, + 'model_id': 'Tedee GO', 'name': 'Lock-2C3D', 'name_by_user': None, 'primary_config_entry': , @@ -157,3 +78,97 @@ 'via_device_id': , }) # --- +# name: test_locks[lock.lock_1a2b-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.lock_1a2b', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '12345-lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[lock.lock_1a2b-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-1A2B', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.lock_1a2b', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- +# name: test_locks[lock.lock_2c3d-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.lock_2c3d', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[lock.lock_2c3d-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.lock_2c3d', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/tedee/snapshots/test_sensor.ambr b/tests/components/tedee/snapshots/test_sensor.ambr index d5f4c8361c3..297fe9b0d37 100644 --- a/tests/components/tedee/snapshots/test_sensor.ambr +++ b/tests/components/tedee/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[entry-battery] +# name: test_sensors[sensor.lock_1a2b_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -34,7 +34,23 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[entry-pullspring_duration] +# name: test_sensors[sensor.lock_1a2b_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Lock-1A2B Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lock_1a2b_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_sensors[sensor.lock_1a2b_pullspring_duration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -69,23 +85,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[state-battery] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Lock-1A2B Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.lock_1a2b_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70', - }) -# --- -# name: test_sensors[state-pullspring_duration] +# name: test_sensors[sensor.lock_1a2b_pullspring_duration-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -101,3 +101,105 @@ 'state': '2', }) # --- +# name: test_sensors[sensor.lock_2c3d_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.lock_2c3d_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-battery_sensor', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.lock_2c3d_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Lock-2C3D Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lock_2c3d_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_sensors[sensor.lock_2c3d_pullspring_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.lock_2c3d_pullspring_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pullspring duration', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pullspring_duration', + 'unique_id': '98765-pullspring_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lock_2c3d_pullspring_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Lock-2C3D Pullspring duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lock_2c3d_pullspring_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index 788d31c84d2..ccfd12440ea 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -1,19 +1,20 @@ """Tests for the Tedee Binary Sensors.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch +from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed +from . import setup_integration -pytestmark = pytest.mark.usefixtures("init_integration") +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalibrated") @@ -22,21 +23,19 @@ BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalib async def test_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test tedee binary sensor.""" - for key in BINARY_SENSORS: - state = hass.states.get(f"binary_sensor.lock_1a2b_{key}") - assert state - assert state == snapshot(name=f"state-{key}") + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot(name=f"entry-{key}") + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("init_integration") async def test_new_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index d5dc5d4efcf..825e01aca70 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -2,15 +2,16 @@ from unittest.mock import MagicMock, patch -from pytedee_async import ( +from aiotedee import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) +from aiotedee.bridge import TedeeBridge import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -122,18 +123,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data={ - CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, - CONF_HOST: "192.168.1.42", - }, - ) + reauth_result = await mock_config_entry.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], @@ -145,33 +135,29 @@ async def test_reauth_flow( assert result["reason"] == "reauth_successful" +async def __do_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, + ) + + async def test_reconfigure_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock ) -> None: """Test that the reconfigure flow works.""" - mock_config_entry.add_to_hass(hass) - - reconfigure_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_RECONFIGURE, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data={ - CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, - CONF_HOST: "192.168.1.42", - }, - ) - - assert reconfigure_result["type"] is FlowResultType.FORM - assert reconfigure_result["step_id"] == "reconfigure_confirm" - - result = await hass.config_entries.flow.async_configure( - reconfigure_result["flow_id"], - {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, - ) + result = await __do_reconfigure_flow(hass, mock_config_entry) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -184,3 +170,18 @@ async def test_reconfigure_flow( CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_WEBHOOK_ID: WEBHOOK_ID, } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock +) -> None: + """Ensure reconfigure flow aborts when the bride changes.""" + + mock_tedee.get_local_bridge.return_value = TedeeBridge( + 0, "1111-1111", "Bridge-R2D2" + ) + + result = await __do_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index d4ac1c9d290..71bf5262f00 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -5,7 +5,7 @@ from typing import Any from unittest.mock import MagicMock, patch from urllib.parse import urlparse -from pytedee_async.exception import ( +from aiotedee.exception import ( TedeeAuthException, TedeeClientException, TedeeWebhookException, @@ -20,6 +20,7 @@ from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from . import setup_integration from .conftest import WEBHOOK_ID from tests.common import MockConfigEntry @@ -32,9 +33,7 @@ async def test_load_unload_config_entry( mock_tedee: MagicMock, ) -> None: """Test loading and unloading the integration.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -56,9 +55,7 @@ async def test_config_entry_not_ready( """Test the Tedee configuration entry not ready.""" mock_tedee.get_locks.side_effect = side_effect - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert len(mock_tedee.get_locks.mock_calls) == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY @@ -70,9 +67,7 @@ async def test_cleanup_on_shutdown( mock_tedee: MagicMock, ) -> None: """Test the webhook is cleaned up on shutdown.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -88,9 +83,7 @@ async def test_webhook_cleanup_errors( caplog: pytest.LogCaptureFixture, ) -> None: """Test the webhook is cleaned up on shutdown.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -110,9 +103,7 @@ async def test_webhook_registration_errors( ) -> None: """Test the webhook is cleaned up on shutdown.""" mock_tedee.register_webhook.side_effect = TedeeWebhookException("") - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -128,9 +119,7 @@ async def test_webhook_registration_cleanup_errors( ) -> None: """Test the errors during webhook cleanup during registration.""" mock_tedee.cleanup_webhooks_by_host.side_effect = TedeeWebhookException("") - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -138,6 +127,21 @@ async def test_webhook_registration_cleanup_errors( assert "Failed to cleanup Tedee webhooks by host:" in caplog.text +async def test_lock_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_tedee: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the lock device is registered.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device({(mock_config_entry.domain, "12345")}) + assert device + assert device == snapshot + + async def test_bridge_device( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -146,9 +150,7 @@ async def test_bridge_device( snapshot: SnapshotAssertion, ) -> None: """Ensure the bridge device is registered.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) device = device_registry.async_get_device( {(mock_config_entry.domain, mock_tedee.get_local_bridge.return_value.serial)} @@ -192,9 +194,7 @@ async def test_webhook_post( ) -> None: """Test webhook callback.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -241,9 +241,7 @@ async def test_migration( "homeassistant.components.tedee.webhook_generate_id", return_value=WEBHOOK_ID, ): - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.version == 1 assert mock_config_entry.minor_version == 2 diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index 741bc3156cb..e0fe9673a46 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -1,16 +1,16 @@ """Tests for tedee lock.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from urllib.parse import urlparse -from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock, TedeeLockState -from pytedee_async.exception import ( +from aiotedee import TedeeLock, TedeeLockState +from aiotedee.exception import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -19,48 +19,47 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - STATE_LOCKED, - STATE_LOCKING, - STATE_UNLOCKED, - STATE_UNLOCKING, + LockState, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component +from . import setup_integration from .conftest import WEBHOOK_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.typing import ClientSessionGenerator -pytestmark = pytest.mark.usefixtures("init_integration") - -async def test_lock( +async def test_locks( hass: HomeAssistant, mock_tedee: MagicMock, - device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, +) -> None: + """Test tedee locks.""" + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.LOCK]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("init_integration") +async def test_lock_service_calls( + hass: HomeAssistant, + mock_tedee: MagicMock, ) -> None: """Test the tedee lock.""" - mock_tedee.lock.return_value = None - mock_tedee.unlock.return_value = None - mock_tedee.open.return_value = None - - state = hass.states.get("lock.lock_1a2b") - assert state - assert state == snapshot - - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot - assert entry.device_id - - device = device_registry.async_get(entry.device_id) - assert device == snapshot await hass.services.async_call( LOCK_DOMAIN, @@ -75,7 +74,7 @@ async def test_lock( mock_tedee.lock.assert_called_once_with(12345) state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_LOCKING + assert state.state == LockState.LOCKING await hass.services.async_call( LOCK_DOMAIN, @@ -90,7 +89,7 @@ async def test_lock( mock_tedee.unlock.assert_called_once_with(12345) state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_UNLOCKING + assert state.state == LockState.UNLOCKING await hass.services.async_call( LOCK_DOMAIN, @@ -105,9 +104,10 @@ async def test_lock( mock_tedee.open.assert_called_once_with(12345) state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_UNLOCKING + assert state.state == LockState.UNLOCKING +@pytest.mark.usefixtures("init_integration") async def test_lock_without_pullspring( hass: HomeAssistant, mock_tedee: MagicMock, @@ -116,9 +116,8 @@ async def test_lock_without_pullspring( snapshot: SnapshotAssertion, ) -> None: """Test the tedee lock without pullspring.""" - mock_tedee.lock.return_value = None - mock_tedee.unlock.return_value = None - mock_tedee.open.return_value = None + # Fetch translations + await async_setup_component(hass, "homeassistant", {}) state = hass.states.get("lock.lock_2c3d") assert state @@ -134,8 +133,8 @@ async def test_lock_without_pullspring( assert device == snapshot with pytest.raises( - HomeAssistantError, - match="Entity lock.lock_2c3d does not support this service.", + ServiceNotSupported, + match=f"Entity lock.lock_2c3d does not support action {LOCK_DOMAIN}.{SERVICE_OPEN}", ): await hass.services.async_call( LOCK_DOMAIN, @@ -149,13 +148,14 @@ async def test_lock_without_pullspring( assert len(mock_tedee.open.mock_calls) == 0 +@pytest.mark.usefixtures("init_integration") async def test_lock_errors( hass: HomeAssistant, mock_tedee: MagicMock, ) -> None: """Test event errors.""" mock_tedee.lock.side_effect = TedeeClientException("Boom") - with pytest.raises(HomeAssistantError, match="Failed to lock the door. Lock 12345"): + with pytest.raises(HomeAssistantError) as exc_info: await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, @@ -164,11 +164,10 @@ async def test_lock_errors( }, blocking=True, ) + assert exc_info.value.translation_key == "lock_failed" mock_tedee.unlock.side_effect = TedeeClientException("Boom") - with pytest.raises( - HomeAssistantError, match="Failed to unlock the door. Lock 12345" - ): + with pytest.raises(HomeAssistantError) as exc_info: await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, @@ -177,11 +176,10 @@ async def test_lock_errors( }, blocking=True, ) + assert exc_info.value.translation_key == "unlock_failed" mock_tedee.open.side_effect = TedeeClientException("Boom") - with pytest.raises( - HomeAssistantError, match="Failed to unlatch the door. Lock 12345" - ): + with pytest.raises(HomeAssistantError) as exc_info: await hass.services.async_call( LOCK_DOMAIN, SERVICE_OPEN, @@ -190,8 +188,10 @@ async def test_lock_errors( }, blocking=True, ) + assert exc_info.value.translation_key == "open_failed" +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "side_effect", [ @@ -218,6 +218,7 @@ async def test_update_failed( assert state.state == STATE_UNAVAILABLE +@pytest.mark.usefixtures("init_integration") async def test_cleanup_removed_locks( hass: HomeAssistant, mock_tedee: MagicMock, @@ -248,6 +249,7 @@ async def test_cleanup_removed_locks( assert "Lock-1A2B" not in locks +@pytest.mark.usefixtures("init_integration") async def test_new_lock( hass: HomeAssistant, mock_tedee: MagicMock, @@ -276,10 +278,11 @@ async def test_new_lock( assert state +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( ("lib_state", "expected_state"), [ - (TedeeLockState.LOCKED, STATE_LOCKED), + (TedeeLockState.LOCKED, LockState.LOCKED), (TedeeLockState.HALF_OPEN, STATE_UNKNOWN), (TedeeLockState.UNKNOWN, STATE_UNKNOWN), (TedeeLockState.UNCALIBRATED, STATE_UNAVAILABLE), @@ -296,7 +299,7 @@ async def test_webhook_update( state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED webhook_data = {"dummystate": lib_state.value} # is updated in the lib, so mock and assert below diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index 72fbd9cbe8d..3c03d340100 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -1,20 +1,20 @@ """Tests for the Tedee Sensors.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch +from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed - -pytestmark = pytest.mark.usefixtures("init_integration") +from . import setup_integration +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform SENSORS = ( "battery", @@ -25,21 +25,18 @@ SENSORS = ( async def test_sensors( hass: HomeAssistant, mock_tedee: MagicMock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test tedee sensors.""" - for key in SENSORS: - state = hass.states.get(f"sensor.lock_1a2b_{key}") - assert state - assert state == snapshot(name=f"state-{key}") + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry.device_id - assert entry == snapshot(name=f"entry-{key}") + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +@pytest.mark.usefixtures("init_integration") async def test_new_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/telegram_bot/conftest.py b/tests/components/telegram_bot/conftest.py index 1afe70dcb8a..93137c3815e 100644 --- a/tests/components/telegram_bot/conftest.py +++ b/tests/components/telegram_bot/conftest.py @@ -6,7 +6,7 @@ from typing import Any from unittest.mock import patch import pytest -from telegram import Chat, Message, User +from telegram import Bot, Chat, Message, User from telegram.constants import ChatType from homeassistant.components.telegram_bot import ( @@ -89,23 +89,22 @@ def mock_external_calls() -> Generator[None]: date=datetime.now(), chat=Chat(id=123456, type=ChatType.PRIVATE), ) + + class BotMock(Bot): + """Mock bot class.""" + + __slots__ = () + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize BotMock instance.""" + super().__init__(*args, **kwargs) + self._bot_user = test_user + with ( - patch( - "telegram.Bot.get_me", - return_value=test_user, - ), - patch( - "telegram.Bot._bot_user", - test_user, - ), - patch( - "telegram.Bot.bot", - test_user, - ), - patch( - "telegram.Bot.send_message", - return_value=message, - ), + patch("homeassistant.components.telegram_bot.Bot", BotMock), + patch.object(BotMock, "get_me", return_value=test_user), + patch.object(BotMock, "bot", test_user), + patch.object(BotMock, "send_message", return_value=message), patch("telegram.ext.Updater._bootstrap"), ): yield diff --git a/tests/components/telegram_bot/test_telegram_bot.py b/tests/components/telegram_bot/test_telegram_bot.py index aad758827ca..bdf6ba72fcc 100644 --- a/tests/components/telegram_bot/test_telegram_bot.py +++ b/tests/components/telegram_bot/test_telegram_bot.py @@ -1,8 +1,11 @@ """Tests for the telegram_bot component.""" -from unittest.mock import AsyncMock, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch +import pytest from telegram import Update +from telegram.error import NetworkError, RetryAfter, TelegramError, TimedOut from homeassistant.components.telegram_bot import ( ATTR_MESSAGE, @@ -11,6 +14,7 @@ from homeassistant.components.telegram_bot import ( SERVICE_SEND_MESSAGE, ) from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL +from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component @@ -188,6 +192,103 @@ async def test_polling_platform_message_text_update( assert isinstance(events[0].context, Context) +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + TelegramError("Telegram error"), + 'caused error: "Telegram error"', + ), + (NetworkError("Network error"), ""), + (RetryAfter(42), ""), + (TimedOut("TimedOut error"), ""), + ], +) +async def test_polling_platform_add_error_handler( + hass: HomeAssistant, + config_polling: dict[str, Any], + update_message_text: dict[str, Any], + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test polling add error handler.""" + with patch( + "homeassistant.components.telegram_bot.polling.ApplicationBuilder" + ) as application_builder_class: + await async_setup_component( + hass, + DOMAIN, + config_polling, + ) + await hass.async_block_till_done() + + application = ( + application_builder_class.return_value.bot.return_value.build.return_value + ) + application.updater.stop = AsyncMock() + application.stop = AsyncMock() + application.shutdown = AsyncMock() + process_error = application.add_error_handler.call_args[0][0] + application.bot.defaults.tzinfo = None + update = Update.de_json(update_message_text, application.bot) + + await process_error(update, MagicMock(error=error)) + + assert log_message in caplog.text + + +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + TelegramError("Telegram error"), + "TelegramError: Telegram error", + ), + (NetworkError("Network error"), ""), + (RetryAfter(42), ""), + (TimedOut("TimedOut error"), ""), + ], +) +async def test_polling_platform_start_polling_error_callback( + hass: HomeAssistant, + config_polling: dict[str, Any], + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test polling add error handler.""" + with patch( + "homeassistant.components.telegram_bot.polling.ApplicationBuilder" + ) as application_builder_class: + await async_setup_component( + hass, + DOMAIN, + config_polling, + ) + await hass.async_block_till_done() + + application = ( + application_builder_class.return_value.bot.return_value.build.return_value + ) + application.initialize = AsyncMock() + application.updater.start_polling = AsyncMock() + application.start = AsyncMock() + application.updater.stop = AsyncMock() + application.stop = AsyncMock() + application.shutdown = AsyncMock() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_START) + await hass.async_block_till_done() + error_callback = application.updater.start_polling.call_args.kwargs[ + "error_callback" + ] + + error_callback(error) + + assert log_message in caplog.text + + async def test_webhook_endpoint_unauthorized_update_doesnt_generate_telegram_text_event( hass: HomeAssistant, webhook_platform, diff --git a/tests/components/template/conftest.py b/tests/components/template/conftest.py index b400d443be7..bdca84ba071 100644 --- a/tests/components/template/conftest.py +++ b/tests/components/template/conftest.py @@ -3,6 +3,7 @@ import pytest from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_mock_service @@ -16,8 +17,8 @@ def calls(hass: HomeAssistant) -> list[ServiceCall]: @pytest.fixture async def start_ha( - hass: HomeAssistant, count, domain, config, caplog: pytest.LogCaptureFixture -): + hass: HomeAssistant, count: int, domain: str, config: ConfigType +) -> None: """Do setup of integration.""" with assert_setup_component(count, domain): assert await async_setup_component( @@ -35,3 +36,8 @@ async def start_ha( async def caplog_setup_text(caplog: pytest.LogCaptureFixture) -> str: """Return setup log of integration.""" return caplog.text + + +@pytest.fixture(autouse=True, name="stub_blueprint_populate") +def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: + """Stub copying the blueprints to the config folder.""" diff --git a/tests/components/template/snapshots/test_alarm_control_panel.ambr b/tests/components/template/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..9772c31220e --- /dev/null +++ b/tests/components/template/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,18 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': True, + 'code_format': , + 'friendly_name': 'My template', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'armed_away', + }) +# --- diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index ea63d7b9926..4b259fabac2 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -1,24 +1,26 @@ """The tests for the Template alarm control panel platform.""" import pytest +from syrupy.assertion import SnapshotAssertion -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components import template +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, ATTR_SERVICE_DATA, EVENT_CALL_SERVICE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, ) -from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, State, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, assert_setup_component, mock_restore_cache TEMPLATE_NAME = "alarm_control_panel.test_template_panel" PANEL_NAME = "alarm_control_panel.test" @@ -100,19 +102,20 @@ TEMPLATE_ALARM_CONFIG = { }, ], ) -async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" for set_state in ( - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.TRIGGERED, ): hass.states.async_set(PANEL_NAME, set_state) await hass.async_block_till_done() @@ -125,6 +128,41 @@ async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: assert state.state == "unknown" +async def test_setup_config_entry( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test the config flow.""" + value_template = "{{ states('alarm_control_panel.one') }}" + + hass.states.async_set("alarm_control_panel.one", "armed_away", {}) + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": value_template, + "template_type": "alarm_control_panel", + "code_arm_required": True, + "code_format": "number", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("alarm_control_panel.my_template") + assert state is not None + assert state == snapshot + + hass.states.async_set("alarm_control_panel.one", "disarmed", {}) + await hass.async_block_till_done() + state = hass.states.get("alarm_control_panel.my_template") + assert state.state == AlarmControlPanelState.DISARMED + + @pytest.mark.parametrize(("count", "domain"), [(1, "alarm_control_panel")]) @pytest.mark.parametrize( "config", @@ -137,7 +175,8 @@ async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_optimistic_states(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_optimistic_states(hass: HomeAssistant) -> None: """Test the optimistic state.""" state = hass.states.get(TEMPLATE_NAME) @@ -145,13 +184,13 @@ async def test_optimistic_states(hass: HomeAssistant, start_ha) -> None: assert state.state == "unknown" for service, set_state in ( - ("alarm_arm_away", STATE_ALARM_ARMED_AWAY), - ("alarm_arm_home", STATE_ALARM_ARMED_HOME), - ("alarm_arm_night", STATE_ALARM_ARMED_NIGHT), - ("alarm_arm_vacation", STATE_ALARM_ARMED_VACATION), - ("alarm_arm_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS), - ("alarm_disarm", STATE_ALARM_DISARMED), - ("alarm_trigger", STATE_ALARM_TRIGGERED), + ("alarm_arm_away", AlarmControlPanelState.ARMED_AWAY), + ("alarm_arm_home", AlarmControlPanelState.ARMED_HOME), + ("alarm_arm_night", AlarmControlPanelState.ARMED_NIGHT), + ("alarm_arm_vacation", AlarmControlPanelState.ARMED_VACATION), + ("alarm_arm_custom_bypass", AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + ("alarm_disarm", AlarmControlPanelState.DISARMED), + ("alarm_trigger", AlarmControlPanelState.TRIGGERED), ): await hass.services.async_call( ALARM_DOMAIN, @@ -227,8 +266,9 @@ async def test_optimistic_states(hass: HomeAssistant, start_ha) -> None: ), ], ) +@pytest.mark.usefixtures("start_ha") async def test_template_syntax_error( - hass: HomeAssistant, msg, start_ha, caplog_setup_text + hass: HomeAssistant, msg, caplog_setup_text ) -> None: """Test templating syntax error.""" assert len(hass.states.async_all("alarm_control_panel")) == 0 @@ -253,7 +293,8 @@ async def test_template_syntax_error( }, ], ) -async def test_name(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_name(hass: HomeAssistant) -> None: """Test the accessibility of the name attribute.""" state = hass.states.get(TEMPLATE_NAME) assert state is not None @@ -284,8 +325,9 @@ async def test_name(hass: HomeAssistant, start_ha) -> None: "alarm_trigger", ], ) +@pytest.mark.usefixtures("start_ha") async def test_actions( - hass: HomeAssistant, service, start_ha, call_service_events: list[Event] + hass: HomeAssistant, service, call_service_events: list[Event] ) -> None: """Test alarm actions.""" await hass.services.async_call( @@ -321,7 +363,8 @@ async def test_actions( }, ], ) -async def test_unique_id(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_unique_id(hass: HomeAssistant) -> None: """Test unique_id option only creates one alarm control panel per id.""" assert len(hass.states.async_all()) == 1 @@ -393,10 +436,130 @@ async def test_unique_id(hass: HomeAssistant, start_ha) -> None: ), ], ) -async def test_code_config( - hass: HomeAssistant, code_format, code_arm_required, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_code_config(hass: HomeAssistant, code_format, code_arm_required) -> None: """Test configuration options related to alarm code.""" state = hass.states.get(TEMPLATE_NAME) assert state.attributes.get("code_format") == code_format assert state.attributes.get("code_arm_required") == code_arm_required + + +@pytest.mark.parametrize(("count", "domain"), [(1, "alarm_control_panel")]) +@pytest.mark.parametrize( + "config", + [ + { + "alarm_control_panel": { + "platform": "template", + "panels": {"test_template_panel": TEMPLATE_ALARM_CONFIG}, + } + }, + ], +) +@pytest.mark.parametrize( + ("restored_state", "initial_state"), + [ + ( + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, + ), + ( + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + ( + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, + ), + ( + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, + ), + ( + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_VACATION, + ), + (AlarmControlPanelState.ARMING, AlarmControlPanelState.ARMING), + (AlarmControlPanelState.DISARMED, AlarmControlPanelState.DISARMED), + (AlarmControlPanelState.PENDING, AlarmControlPanelState.PENDING), + ( + AlarmControlPanelState.TRIGGERED, + AlarmControlPanelState.TRIGGERED, + ), + (STATE_UNAVAILABLE, STATE_UNKNOWN), + (STATE_UNKNOWN, STATE_UNKNOWN), + ("faulty_state", STATE_UNKNOWN), + ], +) +async def test_restore_state( + hass: HomeAssistant, + count, + domain, + config, + restored_state, + initial_state, +) -> None: + """Test restoring template alarm control panel.""" + + fake_state = State( + "alarm_control_panel.test_template_panel", + restored_state, + {}, + ) + mock_restore_cache(hass, (fake_state,)) + with assert_setup_component(count, domain): + assert await async_setup_component( + hass, + domain, + config, + ) + + await hass.async_block_till_done() + + await hass.async_start() + await hass.async_block_till_done() + + state = hass.states.get("alarm_control_panel.test_template_panel") + assert state.state == initial_state + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for button template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "disarmed", + "template_type": "alarm_control_panel", + "code_arm_required": True, + "code_format": "number", + "device_id": device_entry.id, + }, + title="My template", + ) + + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("alarm_control_panel.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_binary_sensor.py b/tests/components/template/test_binary_sensor.py index eb51b3f53b4..3e3a629b4be 100644 --- a/tests/components/template/test_binary_sensor.py +++ b/tests/components/template/test_binary_sensor.py @@ -33,9 +33,6 @@ from tests.common import ( mock_restore_cache_with_extra_data, ) -ON = "on" -OFF = "off" - @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( @@ -72,14 +69,13 @@ OFF = "off" ), ], ) -async def test_setup_minimal( - hass: HomeAssistant, start_ha, entity_id, name, attributes -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_setup_minimal(hass: HomeAssistant, entity_id, name, attributes) -> None: """Test the setup.""" state = hass.states.get(entity_id) assert state is not None assert state.name == name - assert state.state == ON + assert state.state == STATE_ON assert state.attributes == attributes @@ -118,12 +114,13 @@ async def test_setup_minimal( ), ], ) -async def test_setup(hass: HomeAssistant, start_ha, entity_id) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_setup(hass: HomeAssistant, entity_id) -> None: """Test the setup.""" state = hass.states.get(entity_id) assert state is not None assert state.name == "virtual thingy" - assert state.state == ON + assert state.state == STATE_ON assert state.attributes["device_class"] == "motion" @@ -234,7 +231,8 @@ async def test_setup_config_entry( ), ], ) -async def test_setup_invalid_sensors(hass: HomeAssistant, count, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: """Test setup with no sensors.""" assert len(hass.states.async_entity_ids("binary_sensor")) == count @@ -252,7 +250,7 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count, start_ha) -> No "value_template": "{{ states.sensor.xyz.state }}", "icon_template": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "mdi:check" "{% endif %}", }, @@ -269,7 +267,7 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count, start_ha) -> No "state": "{{ states.sensor.xyz.state }}", "icon": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "mdi:check" "{% endif %}", }, @@ -280,12 +278,13 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count, start_ha) -> No ), ], ) -async def test_icon_template(hass: HomeAssistant, start_ha, entity_id) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_icon_template(hass: HomeAssistant, entity_id) -> None: """Test icon template.""" state = hass.states.get(entity_id) assert state.attributes.get("icon") == "" - hass.states.async_set("binary_sensor.test_state", "Works") + hass.states.async_set("binary_sensor.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes["icon"] == "mdi:check" @@ -304,7 +303,7 @@ async def test_icon_template(hass: HomeAssistant, start_ha, entity_id) -> None: "value_template": "{{ states.sensor.xyz.state }}", "entity_picture_template": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "/local/sensor.png" "{% endif %}", }, @@ -321,7 +320,7 @@ async def test_icon_template(hass: HomeAssistant, start_ha, entity_id) -> None: "state": "{{ states.sensor.xyz.state }}", "picture": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "/local/sensor.png" "{% endif %}", }, @@ -332,14 +331,13 @@ async def test_icon_template(hass: HomeAssistant, start_ha, entity_id) -> None: ), ], ) -async def test_entity_picture_template( - hass: HomeAssistant, start_ha, entity_id -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_entity_picture_template(hass: HomeAssistant, entity_id) -> None: """Test entity_picture template.""" state = hass.states.get(entity_id) assert state.attributes.get("entity_picture") == "" - hass.states.async_set("binary_sensor.test_state", "Works") + hass.states.async_set("binary_sensor.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes["entity_picture"] == "/local/sensor.png" @@ -382,7 +380,8 @@ async def test_entity_picture_template( ), ], ) -async def test_attribute_templates(hass: HomeAssistant, start_ha, entity_id) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_attribute_templates(hass: HomeAssistant, entity_id) -> None: """Test attribute_templates template.""" state = hass.states.get(entity_id) assert state.attributes.get("test_attribute") == "It ." @@ -426,7 +425,8 @@ async def setup_mock(): }, ], ) -async def test_match_all(hass: HomeAssistant, setup_mock, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_match_all(hass: HomeAssistant, setup_mock) -> None: """Test template that is rerendered on any state lifecycle.""" init_calls = len(setup_mock.mock_calls) @@ -453,16 +453,17 @@ async def test_match_all(hass: HomeAssistant, setup_mock, start_ha) -> None: }, ], ) -async def test_event(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_event(hass: HomeAssistant) -> None: """Test the event.""" state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF - hass.states.async_set("sensor.test_state", ON) + hass.states.async_set("sensor.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == ON + assert state.state == STATE_ON @pytest.mark.parametrize( @@ -563,45 +564,46 @@ async def test_event(hass: HomeAssistant, start_ha) -> None: ), ], ) -async def test_template_delay_on_off(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_delay_on_off(hass: HomeAssistant) -> None: """Test binary sensor template delay on.""" # Ensure the initial state is not on - assert hass.states.get("binary_sensor.test_on").state != ON - assert hass.states.get("binary_sensor.test_off").state != ON + assert hass.states.get("binary_sensor.test_on").state != STATE_ON + assert hass.states.get("binary_sensor.test_off").state != STATE_ON hass.states.async_set("input_number.delay", 5) - hass.states.async_set("sensor.test_state", ON) + hass.states.async_set("sensor.test_state", STATE_ON) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON future = dt_util.utcnow() + timedelta(seconds=5) async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == ON - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_ON + assert hass.states.get("binary_sensor.test_off").state == STATE_ON # check with time changes - hass.states.async_set("sensor.test_state", OFF) + hass.states.async_set("sensor.test_state", STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON - hass.states.async_set("sensor.test_state", ON) + hass.states.async_set("sensor.test_state", STATE_ON) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON - hass.states.async_set("sensor.test_state", OFF) + hass.states.async_set("sensor.test_state", STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON future = dt_util.utcnow() + timedelta(seconds=5) async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == OFF + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_OFF @pytest.mark.parametrize("count", [1]) @@ -641,8 +643,9 @@ async def test_template_delay_on_off(hass: HomeAssistant, start_ha) -> None: ), ], ) +@pytest.mark.usefixtures("start_ha") async def test_available_without_availability_template( - hass: HomeAssistant, start_ha, entity_id + hass: HomeAssistant, entity_id ) -> None: """Ensure availability is true without an availability_template.""" state = hass.states.get(entity_id) @@ -690,7 +693,8 @@ async def test_available_without_availability_template( ), ], ) -async def test_availability_template(hass: HomeAssistant, start_ha, entity_id) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_availability_template(hass: HomeAssistant, entity_id) -> None: """Test availability template.""" hass.states.async_set("sensor.test_state", STATE_OFF) await hass.async_block_till_done() @@ -725,11 +729,12 @@ async def test_availability_template(hass: HomeAssistant, start_ha, entity_id) - }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_attribute_template( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" - hass.states.async_set("binary_sensor.test_sensor", "true") + hass.states.async_set("binary_sensor.test_sensor", STATE_ON) assert len(hass.states.async_all()) == 2 assert ("test_attribute") in caplog_setup_text assert ("TemplateError") in caplog_setup_text @@ -752,8 +757,9 @@ async def test_invalid_attribute_template( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" @@ -793,7 +799,7 @@ async def test_no_update_template_match_all( }, ) await hass.async_block_till_done() - hass.states.async_set("binary_sensor.test_sensor", "true") + hass.states.async_set("binary_sensor.test_sensor", STATE_ON) assert len(hass.states.async_all()) == 5 assert hass.states.get("binary_sensor.all_state").state == STATE_UNKNOWN @@ -804,29 +810,29 @@ async def test_no_update_template_match_all( hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.all_state").state == ON - assert hass.states.get("binary_sensor.all_icon").state == ON - assert hass.states.get("binary_sensor.all_entity_picture").state == ON - assert hass.states.get("binary_sensor.all_attribute").state == ON + assert hass.states.get("binary_sensor.all_state").state == STATE_ON + assert hass.states.get("binary_sensor.all_icon").state == STATE_ON + assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_ON + assert hass.states.get("binary_sensor.all_attribute").state == STATE_ON - hass.states.async_set("binary_sensor.test_sensor", "false") + hass.states.async_set("binary_sensor.test_sensor", STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.all_state").state == ON + assert hass.states.get("binary_sensor.all_state").state == STATE_ON # Will now process because we have one valid template - assert hass.states.get("binary_sensor.all_icon").state == OFF - assert hass.states.get("binary_sensor.all_entity_picture").state == OFF - assert hass.states.get("binary_sensor.all_attribute").state == OFF + assert hass.states.get("binary_sensor.all_icon").state == STATE_OFF + assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_OFF + assert hass.states.get("binary_sensor.all_attribute").state == STATE_OFF await async_update_entity(hass, "binary_sensor.all_state") await async_update_entity(hass, "binary_sensor.all_icon") await async_update_entity(hass, "binary_sensor.all_entity_picture") await async_update_entity(hass, "binary_sensor.all_attribute") - assert hass.states.get("binary_sensor.all_state").state == ON - assert hass.states.get("binary_sensor.all_icon").state == OFF - assert hass.states.get("binary_sensor.all_entity_picture").state == OFF - assert hass.states.get("binary_sensor.all_attribute").state == OFF + assert hass.states.get("binary_sensor.all_state").state == STATE_ON + assert hass.states.get("binary_sensor.all_icon").state == STATE_OFF + assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_OFF + assert hass.states.get("binary_sensor.all_attribute").state == STATE_OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -839,7 +845,7 @@ async def test_no_update_template_match_all( "binary_sensor": { "name": "top-level", "unique_id": "sensor-id", - "state": ON, + "state": STATE_ON, }, }, "binary_sensor": { @@ -858,8 +864,9 @@ async def test_no_update_template_match_all( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_unique_id( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test unique_id option only creates one binary sensor per id.""" assert len(hass.states.async_all()) == 2 @@ -893,8 +900,9 @@ async def test_unique_id( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_template_validation_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, start_ha + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test binary sensor template delay on.""" caplog.set_level(logging.ERROR) @@ -957,9 +965,8 @@ async def test_template_validation_error( ), ], ) -async def test_availability_icon_picture( - hass: HomeAssistant, start_ha, entity_id -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_availability_icon_picture(hass: HomeAssistant, entity_id) -> None: """Test name, icon and picture templates are rendered at setup.""" state = hass.states.get(entity_id) assert state.state == "unavailable" @@ -998,30 +1005,30 @@ async def test_availability_icon_picture( @pytest.mark.parametrize( ("extra_config", "source_state", "restored_state", "initial_state"), [ - ({}, OFF, ON, OFF), - ({}, OFF, OFF, OFF), - ({}, OFF, STATE_UNAVAILABLE, OFF), - ({}, OFF, STATE_UNKNOWN, OFF), - ({"delay_off": 5}, OFF, ON, ON), - ({"delay_off": 5}, OFF, OFF, OFF), - ({"delay_off": 5}, OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_off": 5}, OFF, STATE_UNKNOWN, STATE_UNKNOWN), - ({"delay_on": 5}, OFF, ON, OFF), - ({"delay_on": 5}, OFF, OFF, OFF), - ({"delay_on": 5}, OFF, STATE_UNAVAILABLE, OFF), - ({"delay_on": 5}, OFF, STATE_UNKNOWN, OFF), - ({}, ON, ON, ON), - ({}, ON, OFF, ON), - ({}, ON, STATE_UNAVAILABLE, ON), - ({}, ON, STATE_UNKNOWN, ON), - ({"delay_off": 5}, ON, ON, ON), - ({"delay_off": 5}, ON, OFF, ON), - ({"delay_off": 5}, ON, STATE_UNAVAILABLE, ON), - ({"delay_off": 5}, ON, STATE_UNKNOWN, ON), - ({"delay_on": 5}, ON, ON, ON), - ({"delay_on": 5}, ON, OFF, OFF), - ({"delay_on": 5}, ON, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_on": 5}, ON, STATE_UNKNOWN, STATE_UNKNOWN), + ({}, STATE_OFF, STATE_ON, STATE_OFF), + ({}, STATE_OFF, STATE_OFF, STATE_OFF), + ({}, STATE_OFF, STATE_UNAVAILABLE, STATE_OFF), + ({}, STATE_OFF, STATE_UNKNOWN, STATE_OFF), + ({"delay_off": 5}, STATE_OFF, STATE_ON, STATE_ON), + ({"delay_off": 5}, STATE_OFF, STATE_OFF, STATE_OFF), + ({"delay_off": 5}, STATE_OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_off": 5}, STATE_OFF, STATE_UNKNOWN, STATE_UNKNOWN), + ({"delay_on": 5}, STATE_OFF, STATE_ON, STATE_OFF), + ({"delay_on": 5}, STATE_OFF, STATE_OFF, STATE_OFF), + ({"delay_on": 5}, STATE_OFF, STATE_UNAVAILABLE, STATE_OFF), + ({"delay_on": 5}, STATE_OFF, STATE_UNKNOWN, STATE_OFF), + ({}, STATE_ON, STATE_ON, STATE_ON), + ({}, STATE_ON, STATE_OFF, STATE_ON), + ({}, STATE_ON, STATE_UNAVAILABLE, STATE_ON), + ({}, STATE_ON, STATE_UNKNOWN, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_ON, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_OFF, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_UNAVAILABLE, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_UNKNOWN, STATE_ON), + ({"delay_on": 5}, STATE_ON, STATE_ON, STATE_ON), + ({"delay_on": 5}, STATE_ON, STATE_OFF, STATE_OFF), + ({"delay_on": 5}, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_on": 5}, STATE_ON, STATE_UNKNOWN, STATE_UNKNOWN), ], ) async def test_restore_state( @@ -1116,8 +1123,9 @@ async def test_restore_state( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_trigger_entity( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test trigger entity works.""" await hass.async_block_till_done() @@ -1134,7 +1142,7 @@ async def test_trigger_entity( await hass.async_block_till_done() state = hass.states.get("binary_sensor.hello_name") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes.get("device_class") == "battery" assert state.attributes.get("icon") == "mdi:pirate" assert state.attributes.get("entity_picture") == "/local/dogs.png" @@ -1152,7 +1160,7 @@ async def test_trigger_entity( ) state = hass.states.get("binary_sensor.via_list") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes.get("device_class") == "battery" assert state.attributes.get("icon") == "mdi:pirate" assert state.attributes.get("entity_picture") == "/local/dogs.png" @@ -1164,7 +1172,7 @@ async def test_trigger_entity( hass.bus.async_fire("test_event", {"beer": 2, "uno_mas": "si"}) await hass.async_block_till_done() state = hass.states.get("binary_sensor.via_list") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes.get("another") == "si" @@ -1186,9 +1194,8 @@ async def test_trigger_entity( }, ], ) -async def test_template_with_trigger_templated_delay_on( - hass: HomeAssistant, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> None: """Test binary sensor template with template delay on.""" state = hass.states.get("binary_sensor.test") assert state.state == STATE_UNKNOWN @@ -1207,7 +1214,7 @@ async def test_template_with_trigger_templated_delay_on( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == ON + assert state.state == STATE_ON # Now wait for the auto-off future = dt_util.utcnow() + timedelta(seconds=2) @@ -1215,7 +1222,7 @@ async def test_template_with_trigger_templated_delay_on( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -1243,8 +1250,8 @@ async def test_template_with_trigger_templated_delay_on( @pytest.mark.parametrize( ("restored_state", "initial_state", "initial_attributes"), [ - (ON, ON, ["entity_picture", "icon", "plus_one"]), - (OFF, OFF, ["entity_picture", "icon", "plus_one"]), + (STATE_ON, STATE_ON, ["entity_picture", "icon", "plus_one"]), + (STATE_OFF, STATE_OFF, ["entity_picture", "icon", "plus_one"]), (STATE_UNAVAILABLE, STATE_UNKNOWN, []), (STATE_UNKNOWN, STATE_UNKNOWN, []), ], @@ -1299,7 +1306,7 @@ async def test_trigger_entity_restore_state( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes["icon"] == "mdi:pirate" assert state.attributes["entity_picture"] == "/local/dogs.png" assert state.attributes["plus_one"] == 3 @@ -1323,7 +1330,7 @@ async def test_trigger_entity_restore_state( }, ], ) -@pytest.mark.parametrize("restored_state", [ON, OFF]) +@pytest.mark.parametrize("restored_state", [STATE_ON, STATE_OFF]) async def test_trigger_entity_restore_state_auto_off( hass: HomeAssistant, count, @@ -1367,7 +1374,7 @@ async def test_trigger_entity_restore_state_auto_off( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -1395,7 +1402,7 @@ async def test_trigger_entity_restore_state_auto_off_expired( freezer.move_to("2022-02-02 12:02:00+00:00") fake_state = State( "binary_sensor.test", - ON, + STATE_ON, {}, ) fake_extra_data = { @@ -1417,7 +1424,7 @@ async def test_trigger_entity_restore_state_auto_off_expired( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF async def test_device_id( diff --git a/tests/components/template/test_blueprint.py b/tests/components/template/test_blueprint.py new file mode 100644 index 00000000000..1df9e738b06 --- /dev/null +++ b/tests/components/template/test_blueprint.py @@ -0,0 +1,242 @@ +"""Test blueprints.""" + +from collections.abc import Iterator +import contextlib +from os import PathLike +import pathlib +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components import template +from homeassistant.components.blueprint import ( + BLUEPRINT_SCHEMA, + Blueprint, + BlueprintInUse, + DomainBlueprints, +) +from homeassistant.components.template import DOMAIN, SERVICE_RELOAD +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component +from homeassistant.util import yaml + +from tests.common import async_mock_service + +BUILTIN_BLUEPRINT_FOLDER = pathlib.Path(template.__file__).parent / "blueprints" + + +@contextlib.contextmanager +def patch_blueprint( + blueprint_path: str, data_path: str | PathLike[str] +) -> Iterator[None]: + """Patch blueprint loading from a different source.""" + orig_load = DomainBlueprints._load_blueprint + + @callback + def mock_load_blueprint(self, path): + if path != blueprint_path: + pytest.fail(f"Unexpected blueprint {path}") + return orig_load(self, path) + + return Blueprint( + yaml.load_yaml(data_path), + expected_domain=self.domain, + path=path, + schema=BLUEPRINT_SCHEMA, + ) + + with patch( + "homeassistant.components.blueprint.models.DomainBlueprints._load_blueprint", + mock_load_blueprint, + ): + yield + + +@contextlib.contextmanager +def patch_invalid_blueprint() -> Iterator[None]: + """Patch blueprint returning an invalid one.""" + + @callback + def mock_load_blueprint(self, path): + return Blueprint( + { + "blueprint": { + "domain": "template", + "name": "Invalid template blueprint", + }, + "binary_sensor": {}, + "sensor": {}, + }, + expected_domain=self.domain, + path=path, + schema=BLUEPRINT_SCHEMA, + ) + + with patch( + "homeassistant.components.blueprint.models.DomainBlueprints._load_blueprint", + mock_load_blueprint, + ): + yield + + +async def test_inverted_binary_sensor( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test inverted binary sensor blueprint.""" + hass.states.async_set("binary_sensor.foo", "on", {"friendly_name": "Foo"}) + hass.states.async_set("binary_sensor.bar", "off", {"friendly_name": "Bar"}) + + with patch_blueprint( + "inverted_binary_sensor.yaml", + BUILTIN_BLUEPRINT_FOLDER / "inverted_binary_sensor.yaml", + ): + assert await async_setup_component( + hass, + "template", + { + "template": [ + { + "use_blueprint": { + "path": "inverted_binary_sensor.yaml", + "input": {"reference_entity": "binary_sensor.foo"}, + }, + "name": "Inverted foo", + }, + { + "use_blueprint": { + "path": "inverted_binary_sensor.yaml", + "input": {"reference_entity": "binary_sensor.bar"}, + }, + "name": "Inverted bar", + }, + ] + }, + ) + + hass.states.async_set("binary_sensor.foo", "off", {"friendly_name": "Foo"}) + hass.states.async_set("binary_sensor.bar", "on", {"friendly_name": "Bar"}) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.foo").state == "off" + assert hass.states.get("binary_sensor.bar").state == "on" + + inverted_foo = hass.states.get("binary_sensor.inverted_foo") + assert inverted_foo + assert inverted_foo.state == "on" + + inverted_bar = hass.states.get("binary_sensor.inverted_bar") + assert inverted_bar + assert inverted_bar.state == "off" + + foo_template = template.helpers.blueprint_in_template(hass, "binary_sensor.foo") + inverted_foo_template = template.helpers.blueprint_in_template( + hass, "binary_sensor.inverted_foo" + ) + assert foo_template is None + assert inverted_foo_template == "inverted_binary_sensor.yaml" + + inverted_binary_sensor_blueprint_entity_ids = ( + template.helpers.templates_with_blueprint(hass, "inverted_binary_sensor.yaml") + ) + assert len(inverted_binary_sensor_blueprint_entity_ids) == 2 + + assert len(template.helpers.templates_with_blueprint(hass, "dummy.yaml")) == 0 + + with pytest.raises(BlueprintInUse): + await template.async_get_blueprints(hass).async_remove_blueprint( + "inverted_binary_sensor.yaml" + ) + + +async def test_domain_blueprint(hass: HomeAssistant) -> None: + """Test DomainBlueprint services.""" + reload_handler_calls = async_mock_service(hass, DOMAIN, SERVICE_RELOAD) + mock_create_file = MagicMock() + mock_create_file.return_value = True + + with patch( + "homeassistant.components.blueprint.models.DomainBlueprints._create_file", + mock_create_file, + ): + await template.async_get_blueprints(hass).async_add_blueprint( + Blueprint( + { + "blueprint": { + "domain": DOMAIN, + "name": "Test", + }, + }, + expected_domain="template", + path="xxx", + schema=BLUEPRINT_SCHEMA, + ), + "xxx", + True, + ) + assert len(reload_handler_calls) == 1 + + +async def test_invalid_blueprint( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test an invalid blueprint definition.""" + + with patch_invalid_blueprint(): + assert await async_setup_component( + hass, + "template", + { + "template": [ + { + "use_blueprint": { + "path": "invalid.yaml", + }, + "name": "Invalid blueprint instance", + }, + ] + }, + ) + + assert "more than one platform defined per blueprint" in caplog.text + assert await template.async_get_blueprints(hass).async_get_blueprints() == {} + + +async def test_no_blueprint(hass: HomeAssistant) -> None: + """Test templates without blueprints.""" + with patch_blueprint( + "inverted_binary_sensor.yaml", + BUILTIN_BLUEPRINT_FOLDER / "inverted_binary_sensor.yaml", + ): + assert await async_setup_component( + hass, + "template", + { + "template": [ + {"binary_sensor": {"name": "test entity", "state": "off"}}, + { + "use_blueprint": { + "path": "inverted_binary_sensor.yaml", + "input": {"reference_entity": "binary_sensor.foo"}, + }, + "name": "inverted entity", + }, + ] + }, + ) + + hass.states.async_set("binary_sensor.foo", "off", {"friendly_name": "Foo"}) + await hass.async_block_till_done() + + assert ( + len( + template.helpers.templates_with_blueprint( + hass, "inverted_binary_sensor.yaml" + ) + ) + == 1 + ) + assert ( + template.helpers.blueprint_in_template(hass, "binary_sensor.test_entity") + is None + ) diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index a62370f4261..2c9b81e7c91 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -29,6 +29,16 @@ from tests.typing import WebSocketGenerator "extra_attrs", ), [ + ( + "alarm_control_panel", + {"value_template": "{{ states('alarm_control_panel.one') }}"}, + "armed_away", + {"one": "armed_away", "two": "disarmed"}, + {}, + {}, + {"code_arm_required": True, "code_format": "number"}, + {}, + ), ( "binary_sensor", { @@ -63,7 +73,7 @@ from tests.typing import WebSocketGenerator "device_class": "restart", "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -73,7 +83,7 @@ from tests.typing import WebSocketGenerator "device_class": "restart", "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -98,14 +108,26 @@ from tests.typing import WebSocketGenerator {"one": "30.0", "two": "20.0"}, {}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": "0", + "max": "100", + "step": "0.1", + "unit_of_measurement": "cm", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "unit_of_measurement": "cm", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, {}, ), @@ -134,14 +156,14 @@ from tests.typing import WebSocketGenerator @pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_config_flow( hass: HomeAssistant, - template_type, - state_template, - template_state, - input_states, - input_attributes, - extra_input, - extra_options, - extra_attrs, + template_type: str, + state_template: dict[str, Any], + template_state: str, + input_states: dict[str, Any], + input_attributes: dict[str, Any], + extra_input: dict[str, Any], + extra_options: dict[str, Any], + extra_attrs: dict[str, Any], ) -> None: """Test the config flow.""" input_entities = ["one", "two"] @@ -200,8 +222,8 @@ async def test_config_flow( state = hass.states.get(f"{template_type}.my_template") assert state.state == template_state - for key in extra_attrs: - assert state.attributes[key] == extra_attrs[key] + for key, value in extra_attrs.items(): + assert state.attributes[key] == value @pytest.mark.parametrize( @@ -248,16 +270,32 @@ async def test_config_flow( "number", {"state": "{{ states('number.one') }}"}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": "0", + "max": "100", + "step": "0.1", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, ), + ( + "alarm_control_panel", + {"value_template": "{{ states('alarm_control_panel.one') }}"}, + {"code_arm_required": True, "code_format": "number"}, + {"code_arm_required": True, "code_format": "number"}, + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -400,7 +438,7 @@ def get_suggested(schema, key): "device_class": "restart", "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -409,7 +447,7 @@ def get_suggested(schema, key): { "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -441,17 +479,39 @@ def get_suggested(schema, key): ["30.0", "20.0"], {"one": "30.0", "two": "20.0"}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "unit_of_measurement": "cm", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "unit_of_measurement": "cm", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, "state", ), + ( + "alarm_control_panel", + {"value_template": "{{ states('alarm_control_panel.one') }}"}, + {"value_template": "{{ states('alarm_control_panel.two') }}"}, + ["armed_away", "disarmed"], + {"one": "armed_away", "two": "disarmed"}, + {"code_arm_required": True, "code_format": "number"}, + {"code_arm_required": True, "code_format": "number"}, + "value_template", + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -477,14 +537,14 @@ def get_suggested(schema, key): @pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_options( hass: HomeAssistant, - template_type, - old_state_template, - new_state_template, - template_state, - input_states, - extra_options, - options_options, - key_template, + template_type: str, + old_state_template: dict[str, Any], + new_state_template: dict[str, Any], + template_state: list[str], + input_states: dict[str, Any], + extra_options: dict[str, Any], + options_options: dict[str, Any], + key_template: str, ) -> None: """Test reconfiguring.""" input_entities = ["one", "two"] @@ -606,7 +666,7 @@ async def test_config_flow_preview( template_type: str, state_template: str, extra_user_input: dict[str, Any], - input_states: list[str], + input_states: dict[str, Any], template_states: str, extra_attributes: list[dict[str, Any]], listeners: list[list[str]], @@ -744,7 +804,7 @@ EARLY_END_ERROR = "invalid template (TemplateSyntaxError: unexpected 'end of tem ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'GJ', 'kWh', 'MJ', 'MWh', 'Wh'" + "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'mWh', 'TWh', 'Wh'" ), }, ), @@ -756,7 +816,7 @@ async def test_config_flow_preview_bad_input( template_type: str, state_template: str, extra_user_input: dict[str, str], - error: str, + error: dict[str, str], ) -> None: """Test the config flow preview.""" client = await hass_ws_client(hass) @@ -1068,7 +1128,7 @@ async def test_option_flow_preview( new_state_template: str, extra_config_flow_data: dict[str, Any], extra_user_input: dict[str, Any], - input_states: list[str], + input_states: dict[str, Any], template_state: str, extra_attributes: dict[str, Any], listeners: list[str], @@ -1210,16 +1270,32 @@ async def test_option_flow_sensor_preview_config_entry_removed( "number", {"state": "{{ states('number.one') }}"}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, ), + ( + "alarm_control_panel", + {"value_template": "{{ states('alarm_control_panel.one') }}"}, + {"code_arm_required": True, "code_format": "number"}, + {"code_arm_required": True, "code_format": "number"}, + ), ( "select", {"state": "{{ states('select.one') }}"}, diff --git a/tests/components/template/test_cover.py b/tests/components/template/test_cover.py index 2674b9697ed..c49db59c2ee 100644 --- a/tests/components/template/test_cover.py +++ b/tests/components/template/test_cover.py @@ -5,7 +5,12 @@ from typing import Any import pytest from homeassistant import setup -from homeassistant.components.cover import ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_POSITION, + ATTR_TILT_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, @@ -17,12 +22,8 @@ from homeassistant.const import ( SERVICE_STOP_COVER, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, STATE_OFF, STATE_ON, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -51,13 +52,13 @@ OPEN_CLOSE_COVER_CONFIG = { } -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( ("config", "states"), [ ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -68,10 +69,24 @@ OPEN_CLOSE_COVER_CONFIG = { } }, [ - ("cover.test_state", STATE_OPEN, STATE_OPEN, {}, -1, ""), - ("cover.test_state", STATE_CLOSED, STATE_CLOSED, {}, -1, ""), - ("cover.test_state", STATE_OPENING, STATE_OPENING, {}, -1, ""), - ("cover.test_state", STATE_CLOSING, STATE_CLOSING, {}, -1, ""), + ("cover.test_state", CoverState.OPEN, CoverState.OPEN, {}, -1, ""), + ("cover.test_state", CoverState.CLOSED, CoverState.CLOSED, {}, -1, ""), + ( + "cover.test_state", + CoverState.OPENING, + CoverState.OPENING, + {}, + -1, + "", + ), + ( + "cover.test_state", + CoverState.CLOSING, + CoverState.CLOSING, + {}, + -1, + "", + ), ( "cover.test_state", "dog", @@ -80,7 +95,7 @@ OPEN_CLOSE_COVER_CONFIG = { -1, "Received invalid cover is_on state: dog", ), - ("cover.test_state", STATE_OPEN, STATE_OPEN, {}, -1, ""), + ("cover.test_state", CoverState.OPEN, CoverState.OPEN, {}, -1, ""), ( "cover.test_state", "cat", @@ -89,7 +104,7 @@ OPEN_CLOSE_COVER_CONFIG = { -1, "Received invalid cover is_on state: cat", ), - ("cover.test_state", STATE_CLOSED, STATE_CLOSED, {}, -1, ""), + ("cover.test_state", CoverState.CLOSED, CoverState.CLOSED, {}, -1, ""), ( "cover.test_state", "bear", @@ -102,7 +117,7 @@ OPEN_CLOSE_COVER_CONFIG = { ), ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -116,17 +131,45 @@ OPEN_CLOSE_COVER_CONFIG = { } }, [ - ("cover.test_state", STATE_OPEN, STATE_UNKNOWN, {}, -1, ""), - ("cover.test_state", STATE_CLOSED, STATE_UNKNOWN, {}, -1, ""), - ("cover.test_state", STATE_OPENING, STATE_OPENING, {}, -1, ""), - ("cover.test_state", STATE_CLOSING, STATE_CLOSING, {}, -1, ""), - ("cover.test", STATE_CLOSED, STATE_CLOSING, {"position": 0}, 0, ""), - ("cover.test_state", STATE_OPEN, STATE_CLOSED, {}, -1, ""), - ("cover.test", STATE_CLOSED, STATE_OPEN, {"position": 10}, 10, ""), + ("cover.test_state", CoverState.OPEN, STATE_UNKNOWN, {}, -1, ""), + ("cover.test_state", CoverState.CLOSED, STATE_UNKNOWN, {}, -1, ""), + ( + "cover.test_state", + CoverState.OPENING, + CoverState.OPENING, + {}, + -1, + "", + ), + ( + "cover.test_state", + CoverState.CLOSING, + CoverState.CLOSING, + {}, + -1, + "", + ), + ( + "cover.test", + CoverState.CLOSED, + CoverState.CLOSING, + {"position": 0}, + 0, + "", + ), + ("cover.test_state", CoverState.OPEN, CoverState.CLOSED, {}, -1, ""), + ( + "cover.test", + CoverState.CLOSED, + CoverState.OPEN, + {"position": 10}, + 10, + "", + ), ( "cover.test_state", "dog", - STATE_OPEN, + CoverState.OPEN, {}, -1, "Received invalid cover is_on state: dog", @@ -135,8 +178,9 @@ OPEN_CLOSE_COVER_CONFIG = { ), ], ) +@pytest.mark.usefixtures("start_ha") async def test_template_state_text( - hass: HomeAssistant, states, start_ha, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, states, caplog: pytest.LogCaptureFixture ) -> None: """Test the state text of a template.""" state = hass.states.get("cover.test_template_cover") @@ -152,13 +196,13 @@ async def test_template_state_text( assert text in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( ("config", "entity", "set_state", "test_state", "attr"), [ ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -178,7 +222,7 @@ async def test_template_state_text( ), ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -198,13 +242,13 @@ async def test_template_state_text( ), ], ) +@pytest.mark.usefixtures("start_ha") async def test_template_state_text_ignored_if_none_or_empty( hass: HomeAssistant, entity: str, set_state: str, test_state: str, attr: dict[str, Any], - start_ha, caplog: pytest.LogCaptureFixture, ) -> None: """Test ignoring an empty state text of a template.""" @@ -218,12 +262,12 @@ async def test_template_state_text_ignored_if_none_or_empty( assert "ERROR" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -235,18 +279,19 @@ async def test_template_state_text_ignored_if_none_or_empty( }, ], ) -async def test_template_state_boolean(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_state_boolean(hass: HomeAssistant) -> None: """Test the value_template attribute.""" state = hass.states.get("cover.test_template_cover") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -260,17 +305,18 @@ async def test_template_state_boolean(hass: HomeAssistant, start_ha) -> None: }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_template_position( - hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test the position_template attribute.""" - hass.states.async_set("cover.test", STATE_OPEN) + hass.states.async_set("cover.test", CoverState.OPEN) attrs = {} for set_state, pos, test_state in ( - (STATE_CLOSED, 42, STATE_OPEN), - (STATE_OPEN, 0.0, STATE_CLOSED), - (STATE_CLOSED, None, STATE_UNKNOWN), + (CoverState.CLOSED, 42, CoverState.OPEN), + (CoverState.OPEN, 0.0, CoverState.CLOSED), + (CoverState.CLOSED, None, STATE_UNKNOWN), ): attrs["position"] = pos hass.states.async_set("cover.test", set_state, attributes=attrs) @@ -281,12 +327,12 @@ async def test_template_position( assert "ValueError" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -298,19 +344,20 @@ async def test_template_position( }, ], ) -async def test_template_not_optimistic(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_not_optimistic(hass: HomeAssistant) -> None: """Test the is_closed attribute.""" state = hass.states.get("cover.test_template_cover") assert state.state == STATE_UNKNOWN -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( ("config", "tilt_position"), [ ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -325,7 +372,7 @@ async def test_template_not_optimistic(hass: HomeAssistant, start_ha) -> None: ), ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -340,20 +387,19 @@ async def test_template_not_optimistic(hass: HomeAssistant, start_ha) -> None: ), ], ) -async def test_template_tilt( - hass: HomeAssistant, tilt_position: float | None, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_tilt(hass: HomeAssistant, tilt_position: float | None) -> None: """Test the tilt_template attribute.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") == tilt_position -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -365,7 +411,7 @@ async def test_template_tilt( } }, { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -384,25 +430,26 @@ async def test_template_tilt( }, ], ) -async def test_template_out_of_bounds(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_out_of_bounds(hass: HomeAssistant) -> None: """Test template out-of-bounds condition.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") is None assert state.attributes.get("current_position") is None -@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": {"test_template_cover": {"value_template": "{{ 1 == 1 }}"}}, } }, { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -420,20 +467,21 @@ async def test_template_out_of_bounds(hass: HomeAssistant, start_ha) -> None: }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_template_open_or_position( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that at least one of open_cover or set_position is used.""" assert hass.states.async_all("cover") == [] assert "Invalid config for 'cover' from integration 'template'" in caplog_setup_text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -445,15 +493,14 @@ async def test_template_open_or_position( }, ], ) -async def test_open_action( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the open_cover command.""" state = hass.states.get("cover.test_template_cover") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() @@ -462,12 +509,12 @@ async def test_open_action( assert calls[0].data["caller"] == "cover.test_template_cover" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -486,20 +533,19 @@ async def test_open_action( }, ], ) -async def test_close_stop_action( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_close_stop_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the close-cover and stop_cover commands.""" state = hass.states.get("cover.test_template_cover") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() @@ -517,9 +563,8 @@ async def test_close_stop_action( {"input_number": {"test": {"min": "0", "max": "100", "initial": "42"}}}, ], ) -async def test_set_position( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the set_position command.""" with assert_setup_component(1, "cover"): assert await setup.async_setup_component( @@ -554,7 +599,7 @@ async def test_set_position( assert state.state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -565,7 +610,7 @@ async def test_set_position( assert calls[-1].data["position"] == 100 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -576,7 +621,7 @@ async def test_set_position( assert calls[-1].data["position"] == 0 await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -587,7 +632,7 @@ async def test_set_position( assert calls[-1].data["position"] == 100 await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -598,7 +643,7 @@ async def test_set_position( assert calls[-1].data["position"] == 0 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 25}, blocking=True, @@ -612,12 +657,12 @@ async def test_set_position( assert calls[-1].data["position"] == 25 -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -648,17 +693,17 @@ async def test_set_position( (SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, 0), ], ) +@pytest.mark.usefixtures("start_ha") async def test_set_tilt_position( hass: HomeAssistant, service, attr, - start_ha, calls: list[ServiceCall], tilt_position, ) -> None: """Test the set_tilt_position command.""" await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, service, attr, blocking=True, @@ -671,12 +716,12 @@ async def test_set_tilt_position( assert calls[-1].data["tilt_position"] == tilt_position -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -687,15 +732,16 @@ async def test_set_tilt_position( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_set_position_optimistic( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test optimistic position mode.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_position") is None await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 42}, blocking=True, @@ -705,25 +751,25 @@ async def test_set_position_optimistic( assert state.attributes.get("current_position") == 42.0 for service, test_state in ( - (SERVICE_CLOSE_COVER, STATE_CLOSED), - (SERVICE_OPEN_COVER, STATE_OPEN), - (SERVICE_TOGGLE, STATE_CLOSED), - (SERVICE_TOGGLE, STATE_OPEN), + (SERVICE_CLOSE_COVER, CoverState.CLOSED), + (SERVICE_OPEN_COVER, CoverState.OPEN), + (SERVICE_TOGGLE, CoverState.CLOSED), + (SERVICE_TOGGLE, CoverState.OPEN), ): await hass.services.async_call( - DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") assert state.state == test_state -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -736,15 +782,16 @@ async def test_set_position_optimistic( }, ], ) +@pytest.mark.usefixtures("calls", "start_ha") async def test_set_tilt_position_optimistic( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the optimistic tilt_position mode.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") is None await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 42}, blocking=True, @@ -760,19 +807,19 @@ async def test_set_tilt_position_optimistic( (SERVICE_TOGGLE_COVER_TILT, 100.0), ): await hass.services.async_call( - DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") == pos -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -787,12 +834,13 @@ async def test_set_tilt_position_optimistic( }, ], ) -async def test_icon_template(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_icon_template(hass: HomeAssistant) -> None: """Test icon template.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("icon") == "" - state = hass.states.async_set("cover.test_state", STATE_OPEN) + state = hass.states.async_set("cover.test_state", CoverState.OPEN) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -800,12 +848,12 @@ async def test_icon_template(hass: HomeAssistant, start_ha) -> None: assert state.attributes["icon"] == "mdi:check" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -822,12 +870,13 @@ async def test_icon_template(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_entity_picture_template(hass: HomeAssistant) -> None: """Test icon template.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("entity_picture") == "" - state = hass.states.async_set("cover.test_state", STATE_OPEN) + state = hass.states.async_set("cover.test_state", CoverState.OPEN) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -835,12 +884,12 @@ async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: assert state.attributes["entity_picture"] == "/local/cover.png" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -855,7 +904,8 @@ async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_availability_template(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_availability_template(hass: HomeAssistant) -> None: """Test availability template.""" hass.states.async_set("availability_state.state", STATE_OFF) await hass.async_block_till_done() @@ -868,12 +918,12 @@ async def test_availability_template(hass: HomeAssistant, start_ha) -> None: assert hass.states.get("cover.test_template_cover").state != STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -885,20 +935,19 @@ async def test_availability_template(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_availability_without_availability_template( - hass: HomeAssistant, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_availability_without_availability_template(hass: HomeAssistant) -> None: """Test that component is available if there is no.""" state = hass.states.get("cover.test_template_cover") assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -911,20 +960,21 @@ async def test_availability_without_availability_template( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("cover.test_template_cover") != STATE_UNAVAILABLE assert "UndefinedError: 'x' is undefined" in caplog_setup_text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -937,18 +987,19 @@ async def test_invalid_availability_template_keeps_component_available( }, ], ) -async def test_device_class(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_device_class(hass: HomeAssistant) -> None: """Test device class.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("device_class") == "door" -@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -961,18 +1012,19 @@ async def test_device_class(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_invalid_device_class(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_invalid_device_class(hass: HomeAssistant) -> None: """Test device class.""" state = hass.states.get("cover.test_template_cover") assert not state -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover_01": { @@ -990,17 +1042,18 @@ async def test_invalid_device_class(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_unique_id(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_unique_id(hass: HomeAssistant) -> None: """Test unique_id option only creates one cover per id.""" assert len(hass.states.async_all()) == 1 -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "garage_door": { @@ -1015,7 +1068,8 @@ async def test_unique_id(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_state_gets_lowercased(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_state_gets_lowercased(hass: HomeAssistant) -> None: """Test True/False is lowercased.""" hass.states.async_set("binary_sensor.garage_door_sensor", "off") @@ -1023,18 +1077,18 @@ async def test_state_gets_lowercased(hass: HomeAssistant, start_ha) -> None: assert len(hass.states.async_all()) == 2 - assert hass.states.get("cover.garage_door").state == STATE_OPEN + assert hass.states.get("cover.garage_door").state == CoverState.OPEN hass.states.async_set("binary_sensor.garage_door_sensor", "on") await hass.async_block_till_done() - assert hass.states.get("cover.garage_door").state == STATE_CLOSED + assert hass.states.get("cover.garage_door").state == CoverState.CLOSED -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "office": { @@ -1061,8 +1115,9 @@ async def test_state_gets_lowercased(hass: HomeAssistant, start_ha) -> None: }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_self_referencing_icon_with_no_template_is_not_a_loop( - hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test a self referencing icon with no value template is not a loop.""" assert len(hass.states.async_all()) == 1 diff --git a/tests/components/template/test_fan.py b/tests/components/template/test_fan.py index 40966d5557c..e92bc82f5ae 100644 --- a/tests/components/template/test_fan.py +++ b/tests/components/template/test_fan.py @@ -11,7 +11,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODE, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, FanEntityFeature, NotValidPresetModeError, ) @@ -36,12 +36,12 @@ _OSC_INPUT = "input_select.osc" _DIRECTION_INPUT_SELECT = "input_select.direction" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -54,17 +54,18 @@ _DIRECTION_INPUT_SELECT = "input_select.direction" }, ], ) -async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" _verify(hass, STATE_ON, None, None, None, None) -@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -78,7 +79,7 @@ async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: } }, { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -92,7 +93,7 @@ async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: } }, { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -107,17 +108,18 @@ async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_wrong_template_config(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_wrong_template_config(hass: HomeAssistant) -> None: """Test: missing 'value_template' will fail.""" assert hass.states.async_all("fan") == [] -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -149,7 +151,8 @@ async def test_wrong_template_config(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_templates_with_entities(hass: HomeAssistant) -> None: """Test tempalates with values from other entities.""" _verify(hass, STATE_OFF, 0, None, None, None) @@ -173,13 +176,13 @@ async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: _verify(hass, STATE_OFF, 0, True, DIRECTION_FORWARD, None) -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( ("config", "entity", "tests"), [ ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -203,7 +206,7 @@ async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -229,9 +232,8 @@ async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: ), ], ) -async def test_templates_with_entities2( - hass: HomeAssistant, entity, tests, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_templates_with_entities2(hass: HomeAssistant, entity, tests) -> None: """Test templates with values from other entities.""" for set_percentage, test_percentage, test_type in tests: hass.states.async_set(entity, set_percentage) @@ -239,12 +241,12 @@ async def test_templates_with_entities2( _verify(hass, STATE_ON, test_percentage, None, None, test_type) -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -262,9 +264,8 @@ async def test_templates_with_entities2( }, ], ) -async def test_availability_template_with_entities( - hass: HomeAssistant, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_availability_template_with_entities(hass: HomeAssistant) -> None: """Test availability tempalates with values from other entities.""" for state, test_assert in ((STATE_ON, True), (STATE_OFF, False)): hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, state) @@ -272,13 +273,13 @@ async def test_availability_template_with_entities( assert (hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE) == test_assert -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( ("config", "states"), [ ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -293,7 +294,7 @@ async def test_availability_template_with_entities( ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -311,7 +312,7 @@ async def test_availability_template_with_entities( ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -329,7 +330,7 @@ async def test_availability_template_with_entities( ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -347,19 +348,18 @@ async def test_availability_template_with_entities( ), ], ) -async def test_template_with_unavailable_entities( - hass: HomeAssistant, states, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_with_unavailable_entities(hass: HomeAssistant, states) -> None: """Test unavailability with value_template.""" _verify(hass, states[0], states[1], states[2], states[3], None) -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -378,8 +378,9 @@ async def test_template_with_unavailable_entities( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE @@ -903,12 +904,12 @@ async def _register_components( await hass.async_block_till_done() -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_template_fan_01": { @@ -940,7 +941,8 @@ async def _register_components( }, ], ) -async def test_unique_id(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_unique_id(hass: HomeAssistant) -> None: """Test unique_id option only creates one fan per id.""" assert len(hass.states.async_all()) == 1 @@ -1024,12 +1026,12 @@ async def test_implemented_percentage( assert attributes.get("supported_features") & FanEntityFeature.SET_SPEED -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "mechanical_ventilation": { @@ -1082,7 +1084,8 @@ async def test_implemented_percentage( }, ], ) -async def test_implemented_preset_mode(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_implemented_preset_mode(hass: HomeAssistant) -> None: """Test a fan that implements preset_mode.""" assert len(hass.states.async_all()) == 1 diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index 06d59d4d176..cab940d4c66 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -51,7 +51,8 @@ from tests.common import MockConfigEntry, async_fire_time_changed, get_fixture_p }, ], ) -async def test_reloadable(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_reloadable(hass: HomeAssistant) -> None: """Test that we can reload.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -102,7 +103,8 @@ async def test_reloadable(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_reloadable_can_remove(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_reloadable_can_remove(hass: HomeAssistant) -> None: """Test that we can reload and remove all template sensors.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -132,9 +134,8 @@ async def test_reloadable_can_remove(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_reloadable_stops_on_invalid_config( - hass: HomeAssistant, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_reloadable_stops_on_invalid_config(hass: HomeAssistant) -> None: """Test we stop the reload if configuration.yaml is completely broken.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -162,9 +163,8 @@ async def test_reloadable_stops_on_invalid_config( }, ], ) -async def test_reloadable_handles_partial_valid_config( - hass: HomeAssistant, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_reloadable_handles_partial_valid_config(hass: HomeAssistant) -> None: """Test we can still setup valid sensors when configuration.yaml has a broken entry.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -195,7 +195,8 @@ async def test_reloadable_handles_partial_valid_config( }, ], ) -async def test_reloadable_multiple_platforms(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_reloadable_multiple_platforms(hass: HomeAssistant) -> None: """Test that we can reload.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( @@ -239,8 +240,9 @@ async def test_reloadable_multiple_platforms(hass: HomeAssistant, start_ha) -> N }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_reload_sensors_that_reference_other_template_sensors( - hass: HomeAssistant, start_ha + hass: HomeAssistant, ) -> None: """Test that we can reload sensor that reference other template sensors.""" await async_yaml_patch_helper(hass, "ref_configuration.yaml") @@ -319,15 +321,25 @@ async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: "template_type": "number", "name": "My template", "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { "state": "{{ 11 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, ), ( @@ -444,3 +456,40 @@ async def test_change_device( ) == [] ) + + +async def test_fail_non_numerical_number_settings( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test that non numerical number options causes config entry setup to fail. + + Support for non numerical max, min and step was added in HA Core 2024.9.0 and + removed in HA Core 2024.9.1. + """ + + options = { + "template_type": "number", + "name": "My template", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + } + # Setup the config entry + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options=options, + title="Template", + ) + template_config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(template_config_entry.entry_id) + assert ( + "The 'My template' number template needs to be reconfigured, " + "max must be a number, got '{{ 100 }}'" in caplog.text + ) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index 065a1488dc9..b5ba93a4bd0 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -773,7 +773,7 @@ async def test_temperature_action_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 345}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 2898}, blocking=True, ) @@ -1395,7 +1395,7 @@ async def test_all_colors_mode_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 123}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 8130}, blocking=True, ) @@ -1531,7 +1531,7 @@ async def test_all_colors_mode_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 234}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 4273}, blocking=True, ) diff --git a/tests/components/template/test_lock.py b/tests/components/template/test_lock.py index f4e81cbfd63..d9cb294c41f 100644 --- a/tests/components/template/test_lock.py +++ b/tests/components/template/test_lock.py @@ -4,11 +4,13 @@ import pytest from homeassistant import setup from homeassistant.components import lock +from homeassistant.components.lock import LockState from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, STATE_OFF, STATE_ON, + STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant, ServiceCall @@ -29,6 +31,13 @@ OPTIMISTIC_LOCK_CONFIG = { "caller": "{{ this.entity_id }}", }, }, + "open": { + "service": "test.automation", + "data_template": { + "action": "open", + "caller": "{{ this.entity_id }}", + }, + }, } OPTIMISTIC_CODED_LOCK_CONFIG = { @@ -65,19 +74,67 @@ OPTIMISTIC_CODED_LOCK_CONFIG = { }, ], ) -async def test_template_state(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_state(hass: HomeAssistant) -> None: """Test template.""" hass.states.async_set("switch.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get("lock.test_template_lock") - assert state.state == lock.STATE_LOCKED + assert state.state == LockState.LOCKED hass.states.async_set("switch.test_state", STATE_OFF) await hass.async_block_till_done() state = hass.states.get("lock.test_template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED + + hass.states.async_set("switch.test_state", STATE_OPEN) + await hass.async_block_till_done() + + state = hass.states.get("lock.test_template_lock") + assert state.state == LockState.OPEN + + +@pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + lock.DOMAIN: { + **OPTIMISTIC_LOCK_CONFIG, + "name": "Test lock", + "optimistic": True, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_open_lock_optimistic( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: + """Test optimistic open.""" + await setup.async_setup_component(hass, "switch", {}) + hass.states.async_set("switch.test_state", STATE_ON) + await hass.async_block_till_done() + + state = hass.states.get("lock.test_lock") + assert state.state == LockState.LOCKED + + await hass.services.async_call( + lock.DOMAIN, + lock.SERVICE_OPEN, + {ATTR_ENTITY_ID: "lock.test_lock"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].data["action"] == "open" + assert calls[0].data["caller"] == "lock.test_lock" + + state = hass.states.get("lock.test_lock") + assert state.state == LockState.OPEN @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @@ -92,10 +149,11 @@ async def test_template_state(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_template_state_boolean_on(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_state_boolean_on(hass: HomeAssistant) -> None: """Test the setting of the state with boolean on.""" state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_LOCKED + assert state.state == LockState.LOCKED @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @@ -110,10 +168,11 @@ async def test_template_state_boolean_on(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_template_state_boolean_off(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_state_boolean_off(hass: HomeAssistant) -> None: """Test the setting of the state with off.""" state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED @pytest.mark.parametrize(("count", "domain"), [(0, lock.DOMAIN)]) @@ -180,7 +239,8 @@ async def test_template_state_boolean_off(hass: HomeAssistant, start_ha) -> None }, ], ) -async def test_template_syntax_error(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_syntax_error(hass: HomeAssistant) -> None: """Test templating syntax errors don't create entities.""" assert hass.states.async_all("lock") == [] @@ -197,15 +257,16 @@ async def test_template_syntax_error(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_template_static(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_static(hass: HomeAssistant) -> None: """Test that we allow static templates.""" state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED - hass.states.async_set("lock.template_lock", lock.STATE_LOCKED) + hass.states.async_set("lock.template_lock", LockState.LOCKED) await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_LOCKED + assert state.state == LockState.LOCKED @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @@ -220,16 +281,15 @@ async def test_template_static(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_lock_action( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_lock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test lock action.""" await setup.async_setup_component(hass, "switch", {}) hass.states.async_set("switch.test_state", STATE_OFF) await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -255,16 +315,15 @@ async def test_lock_action( }, ], ) -async def test_unlock_action( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_unlock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test unlock action.""" await setup.async_setup_component(hass, "switch", {}) hass.states.async_set("switch.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_LOCKED + assert state.state == LockState.LOCKED await hass.services.async_call( lock.DOMAIN, @@ -278,6 +337,40 @@ async def test_unlock_action( assert calls[0].data["caller"] == "lock.template_lock" +@pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + lock.DOMAIN: { + **OPTIMISTIC_LOCK_CONFIG, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: + """Test open action.""" + await setup.async_setup_component(hass, "switch", {}) + hass.states.async_set("switch.test_state", STATE_ON) + await hass.async_block_till_done() + + state = hass.states.get("lock.template_lock") + assert state.state == LockState.LOCKED + + await hass.services.async_call( + lock.DOMAIN, + lock.SERVICE_OPEN, + {ATTR_ENTITY_ID: "lock.template_lock"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].data["action"] == "open" + assert calls[0].data["caller"] == "lock.template_lock" + + @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @pytest.mark.parametrize( "config", @@ -291,8 +384,9 @@ async def test_unlock_action( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_lock_action_with_code( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test lock action with defined code format and supplied lock code.""" await setup.async_setup_component(hass, "switch", {}) @@ -300,7 +394,7 @@ async def test_lock_action_with_code( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -328,8 +422,9 @@ async def test_lock_action_with_code( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_unlock_action_with_code( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test unlock action with code format and supplied unlock code.""" await setup.async_setup_component(hass, "switch", {}) @@ -337,7 +432,7 @@ async def test_unlock_action_with_code( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_LOCKED + assert state.state == LockState.LOCKED await hass.services.async_call( lock.DOMAIN, @@ -372,8 +467,9 @@ async def test_unlock_action_with_code( lock.SERVICE_UNLOCK, ], ) +@pytest.mark.usefixtures("start_ha") async def test_lock_actions_fail_with_invalid_code( - hass: HomeAssistant, start_ha, calls: list[ServiceCall], test_action + hass: HomeAssistant, calls: list[ServiceCall], test_action ) -> None: """Test invalid lock codes.""" await hass.services.async_call( @@ -404,8 +500,9 @@ async def test_lock_actions_fail_with_invalid_code( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_lock_actions_dont_execute_with_code_template_rendering_error( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test lock code format rendering fails block lock/unlock actions.""" await hass.services.async_call( @@ -437,8 +534,9 @@ async def test_lock_actions_dont_execute_with_code_template_rendering_error( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_actions_with_none_as_codeformat_ignores_code( - hass: HomeAssistant, action, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, action, calls: list[ServiceCall] ) -> None: """Test lock actions with supplied lock code.""" await setup.async_setup_component(hass, "switch", {}) @@ -446,7 +544,7 @@ async def test_actions_with_none_as_codeformat_ignores_code( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -475,8 +573,9 @@ async def test_actions_with_none_as_codeformat_ignores_code( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_actions_with_invalid_regexp_as_codeformat_never_execute( - hass: HomeAssistant, action, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, action, calls: list[ServiceCall] ) -> None: """Test lock actions don't execute with invalid regexp.""" await setup.async_setup_component(hass, "switch", {}) @@ -484,7 +583,7 @@ async def test_actions_with_invalid_regexp_as_codeformat_never_execute( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == lock.STATE_UNLOCKED + assert state.state == LockState.UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -519,9 +618,10 @@ async def test_actions_with_invalid_regexp_as_codeformat_never_execute( ], ) @pytest.mark.parametrize( - "test_state", [lock.STATE_UNLOCKING, lock.STATE_LOCKING, lock.STATE_JAMMED] + "test_state", [LockState.UNLOCKING, LockState.LOCKING, LockState.JAMMED] ) -async def test_lock_state(hass: HomeAssistant, test_state, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_lock_state(hass: HomeAssistant, test_state) -> None: """Test value template.""" hass.states.async_set("input_select.test_state", test_state) await hass.async_block_till_done() @@ -543,7 +643,8 @@ async def test_lock_state(hass: HomeAssistant, test_state, start_ha) -> None: }, ], ) -async def test_available_template_with_entities(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_available_template_with_entities(hass: HomeAssistant) -> None: """Test availability templates with values from other entities.""" # When template returns true.. hass.states.async_set("availability_state.state", STATE_ON) @@ -573,8 +674,9 @@ async def test_available_template_with_entities(hass: HomeAssistant, start_ha) - }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("lock.template_lock").state != STATE_UNAVAILABLE @@ -595,7 +697,8 @@ async def test_invalid_availability_template_keeps_component_available( }, ], ) -async def test_unique_id(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_unique_id(hass: HomeAssistant) -> None: """Test unique_id option only creates one lock per id.""" await setup.async_setup_component( hass, diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index c8befc2b8f8..ec96245b4d0 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -17,7 +17,12 @@ from homeassistant.components.number import ( SERVICE_SET_VALUE as NUMBER_SERVICE_SET_VALUE, ) from homeassistant.components.template import DOMAIN -from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ICON, + CONF_ENTITY_ID, + CONF_UNIT_OF_MEASUREMENT, + STATE_UNKNOWN, +) from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -58,9 +63,14 @@ async def test_setup_config_entry( "name": "My template", "template_type": "number", "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, title="My template", ) @@ -95,7 +105,7 @@ async def test_missing_optional_config(hass: HomeAssistant) -> None: await hass.async_start() await hass.async_block_till_done() - _verify(hass, 4, 1, 0.0, 100.0) + _verify(hass, 4, 1, 0.0, 100.0, None) async def test_missing_required_keys(hass: HomeAssistant) -> None: @@ -147,6 +157,7 @@ async def test_all_optional_config(hass: HomeAssistant) -> None: "min": "{{ 3 }}", "max": "{{ 5 }}", "step": "{{ 1 }}", + "unit_of_measurement": "beer", } } }, @@ -156,7 +167,7 @@ async def test_all_optional_config(hass: HomeAssistant) -> None: await hass.async_start() await hass.async_block_till_done() - _verify(hass, 4, 1, 3, 5) + _verify(hass, 4, 1, 3, 5, "beer") async def test_templates_with_entities( @@ -244,7 +255,7 @@ async def test_templates_with_entities( assert entry assert entry.unique_id == "b-a" - _verify(hass, 4, 1, 3, 5) + _verify(hass, 4, 1, 3, 5, None) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -253,7 +264,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 1, 3, 5) + _verify(hass, 5, 1, 3, 5, None) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -262,7 +273,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 2, 3, 5) + _verify(hass, 5, 2, 3, 5, None) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -271,7 +282,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 2, 2, 5) + _verify(hass, 5, 2, 2, 5, None) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -280,7 +291,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 2, 2, 6) + _verify(hass, 5, 2, 2, 6, None) await hass.services.async_call( NUMBER_DOMAIN, @@ -288,7 +299,7 @@ async def test_templates_with_entities( {CONF_ENTITY_ID: _TEST_NUMBER, NUMBER_ATTR_VALUE: 2}, blocking=True, ) - _verify(hass, 2, 2, 2, 6) + _verify(hass, 2, 2, 2, 6, None) # Check this variable can be used in set_value script assert len(calls) == 1 @@ -318,6 +329,7 @@ async def test_trigger_number(hass: HomeAssistant) -> None: "min": "{{ trigger.event.data.min_beers }}", "max": "{{ trigger.event.data.max_beers }}", "step": "{{ trigger.event.data.step }}", + "unit_of_measurement": "beer", "set_value": {"event": "test_number_event"}, "optimistic": True, }, @@ -337,11 +349,17 @@ async def test_trigger_number(hass: HomeAssistant) -> None: assert state.attributes["min"] == 0.0 assert state.attributes["max"] == 100.0 assert state.attributes["step"] == 1.0 + assert state.attributes["unit_of_measurement"] == "beer" context = Context() hass.bus.async_fire( "test_event", - {"beers_drank": 3, "min_beers": 1.0, "max_beers": 5.0, "step": 0.5}, + { + "beers_drank": 3, + "min_beers": 1.0, + "max_beers": 5.0, + "step": 0.5, + }, context=context, ) await hass.async_block_till_done() @@ -369,6 +387,7 @@ def _verify( expected_step: int, expected_minimum: int, expected_maximum: int, + expected_unit_of_measurement: str | None, ) -> None: """Verify number's state.""" state = hass.states.get(_TEST_NUMBER) @@ -377,6 +396,7 @@ def _verify( assert attributes.get(ATTR_STEP) == float(expected_step) assert attributes.get(ATTR_MAX) == float(expected_maximum) assert attributes.get(ATTR_MIN) == float(expected_minimum) + assert attributes.get(CONF_UNIT_OF_MEASUREMENT) == expected_unit_of_measurement async def test_icon_template(hass: HomeAssistant) -> None: @@ -519,9 +539,14 @@ async def test_device_id( "name": "My template", "template_type": "number", "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, "device_id": device_entry.id, }, title="My template", diff --git a/tests/components/template/test_sensor.py b/tests/components/template/test_sensor.py index fb352ebcb8c..929a890ab38 100644 --- a/tests/components/template/test_sensor.py +++ b/tests/components/template/test_sensor.py @@ -12,6 +12,7 @@ from homeassistant.components import sensor, template from homeassistant.components.template.sensor import TriggerSensorEntity from homeassistant.const import ( ATTR_ENTITY_PICTURE, + ATTR_FRIENDLY_NAME, ATTR_ICON, EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START, @@ -106,7 +107,8 @@ async def test_setup_config_entry( }, ], ) -async def test_template_legacy(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_legacy(hass: HomeAssistant) -> None: """Test template.""" assert hass.states.get(TEST_NAME).state == "It ." @@ -135,7 +137,8 @@ async def test_template_legacy(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_icon_template(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_icon_template(hass: HomeAssistant) -> None: """Test icon template.""" assert hass.states.get(TEST_NAME).attributes.get("icon") == "" @@ -164,7 +167,8 @@ async def test_icon_template(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_entity_picture_template(hass: HomeAssistant) -> None: """Test entity_picture template.""" assert hass.states.get(TEST_NAME).attributes.get("entity_picture") == "" @@ -243,9 +247,8 @@ async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: ), ], ) -async def test_friendly_name_template( - hass: HomeAssistant, attribute, expected, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_friendly_name_template(hass: HomeAssistant, attribute, expected) -> None: """Test friendly_name template with an unknown value_template.""" assert hass.states.get(TEST_NAME).attributes.get(attribute) == expected[0] @@ -314,7 +317,8 @@ async def test_friendly_name_template( }, ], ) -async def test_template_syntax_error(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_syntax_error(hass: HomeAssistant) -> None: """Test setup with invalid device_class.""" assert hass.states.async_all("sensor") == [] @@ -336,7 +340,8 @@ async def test_template_syntax_error(hass: HomeAssistant, start_ha) -> None: }, ], ) -async def test_template_attribute_missing(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_attribute_missing(hass: HomeAssistant) -> None: """Test missing attribute template.""" assert hass.states.get(TEST_NAME).state == STATE_UNAVAILABLE @@ -362,7 +367,8 @@ async def test_template_attribute_missing(hass: HomeAssistant, start_ha) -> None }, ], ) -async def test_setup_valid_device_class(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_setup_valid_device_class(hass: HomeAssistant) -> None: """Test setup with valid device_class.""" hass.states.async_set("sensor.test_sensor", "75") await hass.async_block_till_done() @@ -434,7 +440,8 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: }, ], ) -async def test_available_template_with_entities(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_available_template_with_entities(hass: HomeAssistant) -> None: """Test availability tempalates with values from other entities.""" hass.states.async_set("sensor.availability_sensor", STATE_OFF) @@ -472,8 +479,9 @@ async def test_available_template_with_entities(hass: HomeAssistant, start_ha) - }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_attribute_template( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, start_ha, caplog_setup_text + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" hass.states.async_set("sensor.test_sensor", "startup") @@ -508,8 +516,9 @@ async def test_invalid_attribute_template( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("sensor.my_sensor").state != STATE_UNAVAILABLE @@ -625,8 +634,9 @@ async def test_no_template_match_all( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_unique_id( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test unique_id option only creates one sensor per id.""" assert len(hass.states.async_all()) == 2 @@ -661,7 +671,8 @@ async def test_unique_id( }, ], ) -async def test_sun_renders_once_per_sensor(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_sun_renders_once_per_sensor(hass: HomeAssistant) -> None: """Test sun change renders the template only once per sensor.""" now = dt_util.utcnow() @@ -730,7 +741,8 @@ async def test_sun_renders_once_per_sensor(hass: HomeAssistant, start_ha) -> Non }, ], ) -async def test_this_variable(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_this_variable(hass: HomeAssistant) -> None: """Test template.""" assert hass.states.get(TEST_NAME).state == "It: " + TEST_NAME @@ -875,8 +887,9 @@ async def test_this_variable_early_hass_running( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_self_referencing_sensor_loop( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test a self referencing sensor does not loop forever.""" assert len(hass.states.async_all()) == 1 @@ -905,8 +918,9 @@ async def test_self_referencing_sensor_loop( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_self_referencing_sensor_with_icon_loop( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test a self referencing sensor loops forever with a valid self referencing icon.""" assert len(hass.states.async_all()) == 1 @@ -940,8 +954,9 @@ async def test_self_referencing_sensor_with_icon_loop( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_self_referencing_sensor_with_icon_and_picture_entity_loop( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test a self referencing sensor loop forevers with a valid self referencing icon.""" assert len(hass.states.async_all()) == 1 @@ -969,14 +984,16 @@ async def test_self_referencing_sensor_with_icon_and_picture_entity_loop( "test": { "value_template": "{{ 1 }}", "entity_picture_template": "{{ ((states.sensor.test.attributes['entity_picture'] or 0) | int) + 1 }}", + "friendly_name_template": "{{ ((states.sensor.test.attributes['friendly_name'] or 0) | int) + 1 }}", }, }, } }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_self_referencing_entity_picture_loop( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test a self referencing sensor does not loop forever with a looping self referencing entity picture.""" assert len(hass.states.async_all()) == 1 @@ -992,7 +1009,8 @@ async def test_self_referencing_entity_picture_loop( state = hass.states.get("sensor.test") assert int(state.state) == 1 - assert state.attributes[ATTR_ENTITY_PICTURE] == 2 + assert state.attributes[ATTR_ENTITY_PICTURE] == "3" + assert state.attributes[ATTR_FRIENDLY_NAME] == "3" await hass.async_block_till_done() assert int(state.state) == 1 @@ -1092,7 +1110,8 @@ async def test_self_referencing_icon_with_no_loop( }, ], ) -async def test_duplicate_templates(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_duplicate_templates(hass: HomeAssistant) -> None: """Test template entity where the value and friendly name as the same template.""" hass.states.async_set("sensor.test_state", "Abc") await hass.async_block_till_done() @@ -1161,8 +1180,9 @@ async def test_duplicate_templates(hass: HomeAssistant, start_ha) -> None: }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_trigger_entity( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test trigger entity works.""" state = hass.states.get("sensor.hello_name") @@ -1207,6 +1227,127 @@ async def test_trigger_entity( assert state.context is context +@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + "template": [ + { + "unique_id": "listening-test-event", + "trigger": {"platform": "event", "event_type": "test_event"}, + "condition": [ + { + "condition": "template", + "value_template": "{{ trigger.event.data.beer >= 42 }}", + } + ], + "sensor": [ + { + "name": "Enough Name", + "unique_id": "enough-id", + "state": "You had enough Beer.", + } + ], + }, + ], + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_trigger_conditional_entity(hass: HomeAssistant) -> None: + """Test conditional trigger entity works.""" + state = hass.states.get("sensor.enough_name") + assert state is not None + assert state.state == STATE_UNKNOWN + + hass.bus.async_fire("test_event", {"beer": 2}) + await hass.async_block_till_done() + + state = hass.states.get("sensor.enough_name") + assert state.state == STATE_UNKNOWN + + hass.bus.async_fire("test_event", {"beer": 42}) + await hass.async_block_till_done() + + state = hass.states.get("sensor.enough_name") + assert state.state == "You had enough Beer." + + +@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + "template": [ + { + "unique_id": "listening-test-event", + "trigger": {"platform": "event", "event_type": "test_event"}, + "condition": [ + { + "condition": "template", + "value_template": "{{ trigger.event.data.beer / 0 == 'narf' }}", + } + ], + "sensor": [ + { + "name": "Enough Name", + "unique_id": "enough-id", + "state": "You had enough Beer.", + } + ], + }, + ], + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_trigger_conditional_entity_evaluation_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test trigger entity is not updated when condition evaluation fails.""" + hass.bus.async_fire("test_event", {"beer": 1}) + await hass.async_block_till_done() + + state = hass.states.get("sensor.enough_name") + assert state is not None + assert state.state == STATE_UNKNOWN + + assert "Error evaluating condition in 'template entity'" in caplog.text + + +@pytest.mark.parametrize(("count", "domain"), [(0, template.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + "template": [ + { + "unique_id": "listening-test-event", + "trigger": {"platform": "event", "event_type": "test_event"}, + "condition": [ + {"condition": "template", "value_template": "{{ invalid"} + ], + "sensor": [ + { + "name": "Will Not Exist Name", + "state": "Unimportant", + } + ], + }, + ], + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_trigger_conditional_entity_invalid_condition( + hass: HomeAssistant, +) -> None: + """Test trigger entity is not created when condition is invalid.""" + state = hass.states.get("sensor.will_not_exist_name") + assert state is None + + @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @pytest.mark.parametrize( "config", @@ -1232,9 +1373,8 @@ async def test_trigger_entity( }, ], ) -async def test_trigger_entity_runs_once( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_trigger_entity_runs_once(hass: HomeAssistant) -> None: """Test trigger entity handles a trigger once.""" state = hass.states.get("sensor.hello_name") assert state is not None @@ -1267,8 +1407,9 @@ async def test_trigger_entity_runs_once( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_trigger_entity_render_error( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test trigger entity handles render error.""" state = hass.states.get("sensor.hello") @@ -1304,8 +1445,9 @@ async def test_trigger_entity_render_error( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_trigger_not_allowed_platform_config( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test we throw a helpful warning if a trigger is configured in platform config.""" state = hass.states.get(TEST_NAME) @@ -1333,7 +1475,8 @@ async def test_trigger_not_allowed_platform_config( }, ], ) -async def test_config_top_level(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_config_top_level(hass: HomeAssistant) -> None: """Test unique_id option only creates one sensor per id.""" assert len(hass.states.async_all()) == 1 state = hass.states.get("sensor.top_level") @@ -1879,9 +2022,8 @@ async def test_trigger_entity_restore_state( }, ], ) -async def test_trigger_action( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_trigger_action(hass: HomeAssistant) -> None: """Test trigger entity with an action works.""" event = "test_event2" context = Context() @@ -1903,6 +2045,53 @@ async def test_trigger_action( assert events[0].context.parent_id == context.id +@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + "template": [ + { + "unique_id": "listening-test-event", + "trigger": {"platform": "event", "event_type": "test_event"}, + "condition": [ + { + "condition": "template", + "value_template": "{{ trigger.event.data.beer >= 42 }}", + } + ], + "action": [ + {"event": "test_event_by_action"}, + ], + "sensor": [ + { + "name": "Not That Important", + "state": "Really not.", + } + ], + }, + ], + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_trigger_conditional_action(hass: HomeAssistant) -> None: + """Test conditional trigger entity with an action works.""" + + event = "test_event_by_action" + events = async_capture_events(hass, event) + + hass.bus.async_fire("test_event", {"beer": 1}) + await hass.async_block_till_done() + + assert len(events) == 0 + + hass.bus.async_fire("test_event", {"beer": 42}) + await hass.async_block_till_done() + + assert len(events) == 1 + + async def test_device_id( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/template/test_trigger.py b/tests/components/template/test_trigger.py index 98b03be3c64..a131f5f606b 100644 --- a/tests/components/template/test_trigger.py +++ b/tests/components/template/test_trigger.py @@ -48,8 +48,9 @@ def setup_comp(hass: HomeAssistant, calls: list[ServiceCall]) -> None: }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_bool( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on boolean change.""" assert len(calls) == 0 @@ -271,8 +272,9 @@ async def test_if_fires_on_change_bool( ), ], ) +@pytest.mark.usefixtures("start_ha") async def test_general( - hass: HomeAssistant, call_setup, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, call_setup, calls: list[ServiceCall] ) -> None: """Test for firing on change.""" assert len(calls) == 0 @@ -308,8 +310,9 @@ async def test_general( ), ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_not_fires_because_fail( - hass: HomeAssistant, call_setup, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, call_setup, calls: list[ServiceCall] ) -> None: """Test for not firing after TemplateError.""" assert len(calls) == 0 @@ -346,8 +349,9 @@ async def test_if_not_fires_because_fail( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_template_advanced( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with template advanced.""" context = Context() @@ -378,9 +382,8 @@ async def test_if_fires_on_change_with_template_advanced( }, ], ) -async def test_if_action( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test for firing if action.""" # Condition is not true yet hass.bus.async_fire("test_event") @@ -410,8 +413,9 @@ async def test_if_action( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_bad_template( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with bad template.""" assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE @@ -447,8 +451,9 @@ async def test_if_fires_on_change_with_bad_template( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_wait_template_with_trigger( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test using wait template with 'trigger.entity_id'.""" await hass.async_block_till_done() @@ -519,8 +524,9 @@ async def test_if_fires_on_change_with_for( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_advanced( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for advanced.""" context = Context() @@ -563,8 +569,9 @@ async def test_if_fires_on_change_with_for_advanced( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_0_advanced( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for: 0 advanced.""" context = Context() @@ -604,8 +611,9 @@ async def test_if_fires_on_change_with_for_0_advanced( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_2( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for.""" context = Context() @@ -635,8 +643,9 @@ async def test_if_fires_on_change_with_for_2( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_not_fires_on_change_with_for( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for.""" hass.states.async_set("test.entity", "world") @@ -669,8 +678,9 @@ async def test_if_not_fires_on_change_with_for( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_not_fires_when_turned_off_with_for( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for.""" hass.states.async_set("test.entity", "world") @@ -707,8 +717,9 @@ async def test_if_not_fires_when_turned_off_with_for( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", "world") @@ -735,8 +746,9 @@ async def test_if_fires_on_change_with_for_template_1( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", "world") @@ -763,8 +775,9 @@ async def test_if_fires_on_change_with_for_template_2( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", "world") @@ -791,8 +804,9 @@ async def test_if_fires_on_change_with_for_template_3( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_for_template_1( - hass: HomeAssistant, start_ha, calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for invalid for template.""" with mock.patch.object(template_trigger, "_LOGGER") as mock_logger: diff --git a/tests/components/template/test_vacuum.py b/tests/components/template/test_vacuum.py index fd3e3e872ad..6053a2bd9ec 100644 --- a/tests/components/template/test_vacuum.py +++ b/tests/components/template/test_vacuum.py @@ -3,14 +3,7 @@ import pytest from homeassistant import setup -from homeassistant.components.vacuum import ( - ATTR_BATTERY_LEVEL, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, -) +from homeassistant.components.vacuum import ATTR_BATTERY_LEVEL, VacuumActivity from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError @@ -44,7 +37,7 @@ _BATTERY_LEVEL_INPUT_NUMBER = "input_number.battery_level" }, ), ( - STATE_CLEANING, + VacuumActivity.CLEANING, 100, { "vacuum": { @@ -94,9 +87,8 @@ _BATTERY_LEVEL_INPUT_NUMBER = "input_number.battery_level" ), ], ) -async def test_valid_configs( - hass: HomeAssistant, count, parm1, parm2, start_ha -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_valid_configs(hass: HomeAssistant, count, parm1, parm2) -> None: """Test: configs.""" assert len(hass.states.async_all("vacuum")) == count _verify(hass, parm1, parm2) @@ -118,7 +110,8 @@ async def test_valid_configs( }, ], ) -async def test_invalid_configs(hass: HomeAssistant, count, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_invalid_configs(hass: HomeAssistant, count) -> None: """Test: configs.""" assert len(hass.states.async_all("vacuum")) == count @@ -144,14 +137,15 @@ async def test_invalid_configs(hass: HomeAssistant, count, start_ha) -> None: ) ], ) -async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_templates_with_entities(hass: HomeAssistant) -> None: """Test templates with values from other entities.""" _verify(hass, STATE_UNKNOWN, None) - hass.states.async_set(_STATE_INPUT_SELECT, STATE_CLEANING) + hass.states.async_set(_STATE_INPUT_SELECT, VacuumActivity.CLEANING) hass.states.async_set(_BATTERY_LEVEL_INPUT_NUMBER, 100) await hass.async_block_till_done() - _verify(hass, STATE_CLEANING, 100) + _verify(hass, VacuumActivity.CLEANING, 100) @pytest.mark.parametrize( @@ -174,7 +168,8 @@ async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: ) ], ) -async def test_available_template_with_entities(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_available_template_with_entities(hass: HomeAssistant) -> None: """Test availability templates with values from other entities.""" # When template returns true.. @@ -212,8 +207,9 @@ async def test_available_template_with_entities(hass: HomeAssistant, start_ha) - ) ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("vacuum.test_template_vacuum") != STATE_UNAVAILABLE @@ -243,7 +239,8 @@ async def test_invalid_availability_template_keeps_component_available( ) ], ) -async def test_attribute_templates(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_attribute_templates(hass: HomeAssistant) -> None: """Test attribute_templates template.""" state = hass.states.get("vacuum.test_template_vacuum") assert state.attributes["test_attribute"] == "It ." @@ -278,8 +275,9 @@ async def test_attribute_templates(hass: HomeAssistant, start_ha) -> None: ) ], ) +@pytest.mark.usefixtures("start_ha") async def test_invalid_attribute_template( - hass: HomeAssistant, start_ha, caplog_setup_text + hass: HomeAssistant, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" assert len(hass.states.async_all("vacuum")) == 1 @@ -313,7 +311,8 @@ async def test_invalid_attribute_template( ), ], ) -async def test_unique_id(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_unique_id(hass: HomeAssistant) -> None: """Test unique_id option only creates one vacuum per id.""" assert len(hass.states.async_all("vacuum")) == 1 @@ -364,8 +363,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_CLEANING - _verify(hass, STATE_CLEANING, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.CLEANING + _verify(hass, VacuumActivity.CLEANING, None) assert len(calls) == 1 assert calls[-1].data["action"] == "start" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -375,8 +374,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_PAUSED - _verify(hass, STATE_PAUSED, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.PAUSED + _verify(hass, VacuumActivity.PAUSED, None) assert len(calls) == 2 assert calls[-1].data["action"] == "pause" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -386,8 +385,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_IDLE - _verify(hass, STATE_IDLE, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.IDLE + _verify(hass, VacuumActivity.IDLE, None) assert len(calls) == 3 assert calls[-1].data["action"] == "stop" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -397,8 +396,8 @@ async def test_state_services(hass: HomeAssistant, calls: list[ServiceCall]) -> await hass.async_block_till_done() # verify - assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_RETURNING - _verify(hass, STATE_RETURNING, None) + assert hass.states.get(_STATE_INPUT_SELECT).state == VacuumActivity.RETURNING + _verify(hass, VacuumActivity.RETURNING, None) assert len(calls) == 4 assert calls[-1].data["action"] == "return_to_base" assert calls[-1].data["caller"] == _TEST_VACUUM @@ -500,7 +499,11 @@ async def _register_basic_vacuum(hass: HomeAssistant) -> None: assert await setup.async_setup_component( hass, "input_select", - {"input_select": {"state": {"name": "State", "options": [STATE_CLEANING]}}}, + { + "input_select": { + "state": {"name": "State", "options": [VacuumActivity.CLEANING]} + } + }, ) with assert_setup_component(1, "vacuum"): @@ -516,7 +519,7 @@ async def _register_basic_vacuum(hass: HomeAssistant) -> None: "service": "input_select.select_option", "data": { "entity_id": _STATE_INPUT_SELECT, - "option": STATE_CLEANING, + "option": VacuumActivity.CLEANING, }, } } @@ -548,11 +551,11 @@ async def _register_components(hass: HomeAssistant) -> None: "state": { "name": "State", "options": [ - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, + VacuumActivity.CLEANING, + VacuumActivity.DOCKED, + VacuumActivity.IDLE, + VacuumActivity.PAUSED, + VacuumActivity.RETURNING, ], }, "fan_speed": { @@ -572,7 +575,7 @@ async def _register_components(hass: HomeAssistant) -> None: "service": "input_select.select_option", "data": { "entity_id": _STATE_INPUT_SELECT, - "option": STATE_CLEANING, + "option": VacuumActivity.CLEANING, }, }, { @@ -586,7 +589,10 @@ async def _register_components(hass: HomeAssistant) -> None: "pause": [ { "service": "input_select.select_option", - "data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_PAUSED}, + "data": { + "entity_id": _STATE_INPUT_SELECT, + "option": VacuumActivity.PAUSED, + }, }, { "service": "test.automation", @@ -599,7 +605,10 @@ async def _register_components(hass: HomeAssistant) -> None: "stop": [ { "service": "input_select.select_option", - "data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_IDLE}, + "data": { + "entity_id": _STATE_INPUT_SELECT, + "option": VacuumActivity.IDLE, + }, }, { "service": "test.automation", @@ -614,7 +623,7 @@ async def _register_components(hass: HomeAssistant) -> None: "service": "input_select.select_option", "data": { "entity_id": _STATE_INPUT_SELECT, - "option": STATE_RETURNING, + "option": VacuumActivity.RETURNING, }, }, { diff --git a/tests/components/template/test_weather.py b/tests/components/template/test_weather.py index fd7694cfbed..081028b6f5b 100644 --- a/tests/components/template/test_weather.py +++ b/tests/components/template/test_weather.py @@ -23,7 +23,6 @@ from homeassistant.components.weather import ( ) from homeassistant.const import ATTR_ATTRIBUTION, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, State -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.restore_state import STORAGE_KEY as RESTORE_STATE_KEY from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -65,7 +64,8 @@ ATTR_FORECAST = "forecast" }, ], ) -async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" for attr, v_attr, value in ( ( @@ -117,8 +117,9 @@ async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_forecasts( - hass: HomeAssistant, start_ha, snapshot: SnapshotAssertion, service: str + hass: HomeAssistant, snapshot: SnapshotAssertion, service: str ) -> None: """Test forecast service.""" for attr, _v_attr, value in ( @@ -241,9 +242,9 @@ async def test_forecasts( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_forecast_invalid( hass: HomeAssistant, - start_ha, caplog: pytest.LogCaptureFixture, service: str, expected: dict[str, Any], @@ -323,9 +324,9 @@ async def test_forecast_invalid( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_forecast_invalid_is_daytime_missing_in_twice_daily( hass: HomeAssistant, - start_ha, caplog: pytest.LogCaptureFixture, service: str, expected: dict[str, Any], @@ -391,9 +392,9 @@ async def test_forecast_invalid_is_daytime_missing_in_twice_daily( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_forecast_invalid_datetime_missing( hass: HomeAssistant, - start_ha, caplog: pytest.LogCaptureFixture, service: str, expected: dict[str, Any], @@ -458,8 +459,9 @@ async def test_forecast_invalid_datetime_missing( }, ], ) +@pytest.mark.usefixtures("start_ha") async def test_forecast_format_error( - hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture, service: str + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, service: str ) -> None: """Test forecast service invalid on incorrect format.""" for attr, _v_attr, value in ( @@ -649,9 +651,8 @@ async def test_trigger_entity_restore_state( }, ], ) -async def test_trigger_action( - hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry -) -> None: +@pytest.mark.usefixtures("start_ha") +async def test_trigger_action(hass: HomeAssistant) -> None: """Test trigger entity with an action works.""" state = hass.states.get("weather.hello_name") assert state is not None @@ -720,11 +721,10 @@ async def test_trigger_action( }, ], ) +@pytest.mark.usefixtures("start_ha") @pytest.mark.freeze_time("2023-10-19 13:50:05") async def test_trigger_weather_services( hass: HomeAssistant, - start_ha, - entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, service: str, ) -> None: diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 49f0be9cca7..0dc5d87984f 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -9,10 +9,18 @@ from unittest.mock import AsyncMock, patch import jwt import pytest +from tesla_fleet_api.const import Scope from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES -from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE +from .const import ( + COMMAND_OK, + LIVE_STATUS, + PRODUCTS, + SITE_INFO, + VEHICLE_DATA, + VEHICLE_ONLINE, +) from tests.common import MockConfigEntry @@ -25,16 +33,8 @@ def mock_expires_at() -> int: return time.time() + 3600 -@pytest.fixture(name="scopes") -def mock_scopes() -> list[str]: - """Fixture to set the scopes present in the OAuth token.""" - return SCOPES - - -@pytest.fixture -def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: +def create_config_entry(expires_at: int, scopes: list[Scope]) -> MockConfigEntry: """Create Tesla Fleet entry in Home Assistant.""" - access_token = jwt.encode( { "sub": UID, @@ -64,6 +64,32 @@ def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: ) +@pytest.fixture +def normal_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant.""" + return create_config_entry(expires_at, SCOPES) + + +@pytest.fixture +def noscope_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant without scopes.""" + return create_config_entry(expires_at, [Scope.OPENID, Scope.OFFLINE_ACCESS]) + + +@pytest.fixture +def readonly_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant without scopes.""" + return create_config_entry( + expires_at, + [ + Scope.OPENID, + Scope.OFFLINE_ACCESS, + Scope.VEHICLE_DEVICE_DATA, + Scope.ENERGY_DEVICE_DATA, + ], + ) + + @pytest.fixture(autouse=True) def mock_products() -> Generator[AsyncMock]: """Mock Tesla Fleet Api products method.""" @@ -124,10 +150,30 @@ def mock_site_info() -> Generator[AsyncMock]: yield mock_live_status -@pytest.fixture(autouse=True) +@pytest.fixture def mock_find_server() -> Generator[AsyncMock]: """Mock Tesla Fleet find server method.""" with patch( "homeassistant.components.tesla_fleet.TeslaFleetApi.find_server", ) as mock_find_server: yield mock_find_server + + +@pytest.fixture +def mock_request(): + """Mock all Tesla Fleet API requests.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi._request", + return_value=COMMAND_OK, + ) as mock_request: + yield mock_request + + +@pytest.fixture(autouse=True) +def mock_signed_command() -> Generator[AsyncMock]: + """Mock Tesla Fleet Api signed_command method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", + return_value=COMMAND_OK, + ) as mock_signed_command: + yield mock_signed_command diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data.json b/tests/components/tesla_fleet/fixtures/vehicle_data.json index 3845ae48559..d99bc8de5a8 100644 --- a/tests/components/tesla_fleet/fixtures/vehicle_data.json +++ b/tests/components/tesla_fleet/fixtures/vehicle_data.json @@ -112,6 +112,7 @@ "wiper_blade_heater": false }, "drive_state": { + "active_route_destination": "Home", "active_route_latitude": 30.2226265, "active_route_longitude": -97.6236871, "active_route_miles_to_arrival": 0.039491, diff --git a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr index 05ef4879de6..479d647e1c7 100644 --- a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr +++ b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr @@ -137,6 +137,52 @@ 'state': 'off', }) # --- +# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storm watch active', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storm_mode_active', + 'unique_id': '123456-storm_mode_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor[binary_sensor.test_battery_heater-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1252,6 +1298,19 @@ 'state': 'off', }) # --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_storm_watch_active-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor_refresh[binary_sensor.test_battery_heater-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/tesla_fleet/snapshots/test_button.ambr b/tests/components/tesla_fleet/snapshots/test_button.ambr new file mode 100644 index 00000000000..8b5270d4852 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_button.ambr @@ -0,0 +1,277 @@ +# serializer version: 1 +# name: test_button[button.test_flash_lights-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_flash_lights', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flash lights', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flash_lights', + 'unique_id': 'LRWXF7EK4KC700000-flash_lights', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.test_flash_lights-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Flash lights', + }), + 'context': , + 'entity_id': 'button.test_flash_lights', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[button.test_homelink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_homelink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Homelink', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'homelink', + 'unique_id': 'LRWXF7EK4KC700000-homelink', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.test_homelink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Homelink', + }), + 'context': , + 'entity_id': 'button.test_homelink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[button.test_honk_horn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_honk_horn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Honk horn', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'honk', + 'unique_id': 'LRWXF7EK4KC700000-honk', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.test_honk_horn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Honk horn', + }), + 'context': , + 'entity_id': 'button.test_honk_horn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[button.test_keyless_driving-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_keyless_driving', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keyless driving', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'enable_keyless_driving', + 'unique_id': 'LRWXF7EK4KC700000-enable_keyless_driving', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.test_keyless_driving-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Keyless driving', + }), + 'context': , + 'entity_id': 'button.test_keyless_driving', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[button.test_play_fart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_play_fart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Play fart', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boombox', + 'unique_id': 'LRWXF7EK4KC700000-boombox', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.test_play_fart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Play fart', + }), + 'context': , + 'entity_id': 'button.test_play_fart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[button.test_wake-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_wake', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wake', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wake', + 'unique_id': 'LRWXF7EK4KC700000-wake', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.test_wake-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Wake', + }), + 'context': , + 'entity_id': 'button.test_wake', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_climate.ambr b/tests/components/tesla_fleet/snapshots/test_climate.ambr new file mode 100644 index 00000000000..696f8c37f08 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_climate.ambr @@ -0,0 +1,422 @@ +# serializer version: 1 +# name: test_climate[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[climate.test_cabin_overheat_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + 'temperature': 40, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_climate[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[climate.test_climate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': 'keep', + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 22.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- +# name: test_climate_alt[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_alt[climate.test_cabin_overheat_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climate_alt[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_alt[climate.test_climate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': 'off', + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 22.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climate_offline[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_offline[climate.test_cabin_overheat_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_climate_offline[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_offline[climate.test_climate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_cover.ambr b/tests/components/tesla_fleet/snapshots/test_cover.ambr new file mode 100644 index 00000000000..dbdb003d802 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_cover.ambr @@ -0,0 +1,721 @@ +# serializer version: 1 +# name: test_cover[cover.test_charge_port_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_charge_port_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge port door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'charge_state_charge_port_door_open', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_charge_port_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Charge port door', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_charge_port_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover[cover.test_frunk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_frunk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frunk', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_ft', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_frunk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Frunk', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_frunk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_cover[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover[cover.test_trunk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_trunk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Trunk', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_rt', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_trunk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Trunk', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_trunk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_cover[cover.test_windows-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_windows', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Windows', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'windows', + 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_windows-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Windows', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_windows', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_cover_alt[cover.test_charge_port_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_charge_port_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge port door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'charge_state_charge_port_door_open', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_charge_port_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Charge port door', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_charge_port_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover_alt[cover.test_frunk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_frunk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frunk', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_ft', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_frunk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Frunk', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_frunk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover_alt[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_cover_alt[cover.test_trunk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_trunk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Trunk', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_rt', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_trunk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Trunk', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_trunk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover_alt[cover.test_windows-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_windows', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Windows', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'windows', + 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_windows-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Windows', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_windows', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover_readonly[cover.test_charge_port_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_charge_port_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge port door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_port_door_open', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_readonly[cover.test_charge_port_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Charge port door', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_charge_port_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover_readonly[cover.test_frunk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_frunk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frunk', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_ft', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_readonly[cover.test_frunk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Frunk', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_frunk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_cover_readonly[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_readonly[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_cover_readonly[cover.test_trunk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_trunk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Trunk', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_rt', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_readonly[cover.test_trunk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Trunk', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_trunk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_cover_readonly[cover.test_windows-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_windows', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Windows', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'windows', + 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_readonly[cover.test_windows-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Windows', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_windows', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr index 194eda6fcff..02ad4b01002 100644 --- a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr +++ b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr @@ -96,6 +96,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'not_home', + 'state': 'home', }) # --- diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr index 902c7af131e..cdb24b1d2b5 100644 --- a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr +++ b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr @@ -165,6 +165,7 @@ 'openid', 'offline_access', 'vehicle_device_data', + 'vehicle_location', 'vehicle_cmds', 'vehicle_charging_cmds', 'energy_device_data', @@ -269,6 +270,7 @@ 'climate_state_timestamp': 1705707520649, 'climate_state_wiper_blade_heater': False, 'color': None, + 'drive_state_active_route_destination': 'Home', 'drive_state_active_route_latitude': '**REDACTED**', 'drive_state_active_route_longitude': '**REDACTED**', 'drive_state_active_route_miles_to_arrival': 0.039491, diff --git a/tests/components/tesla_fleet/snapshots/test_lock.ambr b/tests/components/tesla_fleet/snapshots/test_lock.ambr new file mode 100644 index 00000000000..3384bb0eb97 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_lock.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_lock[lock.test_charge_cable_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.test_charge_cable_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge cable lock', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_port_latch', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_latch', + 'unit_of_measurement': None, + }) +# --- +# name: test_lock[lock.test_charge_cable_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charge cable lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.test_charge_cable_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'locked', + }) +# --- +# name: test_lock[lock.test_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.test_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lock', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_locked', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_locked', + 'unit_of_measurement': None, + }) +# --- +# name: test_lock[lock.test_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.test_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_media_player.ambr b/tests/components/tesla_fleet/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..cc3018364a5 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_media_player.ambr @@ -0,0 +1,136 @@ +# serializer version: 1 +# name: test_media_player[media_player.test_media_player-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_media_player', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Media player', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'media', + 'unique_id': 'LRWXF7EK4KC700000-media', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.test_media_player-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'media_album_name': 'Elon Musk', + 'media_artist': 'Walter Isaacson', + 'media_duration': 651.0, + 'media_playlist': 'Elon Musk', + 'media_position': 1.0, + 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', + 'source': 'Audible', + 'supported_features': , + 'volume_level': 0.16129355359011466, + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_media_player_alt[media_player.test_media_player-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'media_album_name': '', + 'media_artist': '', + 'media_playlist': '', + 'media_title': '', + 'source': 'Spotify', + 'supported_features': , + 'volume_level': 0.25806775026025003, + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_media_player_noscope[media_player.test_media_player-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_media_player', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Media player', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'media', + 'unique_id': 'LRWXF7EK4KC700000-media', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player_noscope[media_player.test_media_player-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'media_album_name': 'Elon Musk', + 'media_artist': 'Walter Isaacson', + 'media_duration': 651.0, + 'media_playlist': 'Elon Musk', + 'media_position': 1.0, + 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', + 'source': 'Audible', + 'supported_features': , + 'volume_level': 0.16129355359011466, + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_number.ambr b/tests/components/tesla_fleet/snapshots/test_number.ambr new file mode 100644 index 00000000000..00dd67015fe --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_number.ambr @@ -0,0 +1,231 @@ +# serializer version: 1 +# name: test_number[number.energy_site_backup_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.energy_site_backup_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Backup reserve', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_reserve_percent', + 'unique_id': '123456-backup_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[number.energy_site_backup_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Backup reserve', + 'icon': 'mdi:battery-alert', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.energy_site_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_number[number.energy_site_off_grid_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.energy_site_off_grid_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-unknown', + 'original_name': 'Off grid reserve', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_vehicle_charging_reserve_percent', + 'unique_id': '123456-off_grid_vehicle_charging_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[number.energy_site_off_grid_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Off grid reserve', + 'icon': 'mdi:battery-unknown', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.energy_site_off_grid_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_number[number.test_charge_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 16, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.test_charge_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge current', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_current_request', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_current_request', + 'unit_of_measurement': , + }) +# --- +# name: test_number[number.test_charge_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Test Charge current', + 'max': 16, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.test_charge_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_number[number.test_charge_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 50, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.test_charge_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge limit', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_limit_soc', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_limit_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[number.test_charge_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Charge limit', + 'max': 100, + 'min': 50, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.test_charge_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_select.ambr b/tests/components/tesla_fleet/snapshots/test_select.ambr new file mode 100644 index 00000000000..f29ce841113 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_select.ambr @@ -0,0 +1,585 @@ +# serializer version: 1 +# name: test_select[select.energy_site_allow_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.energy_site_allow_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow export', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_customer_preferred_export_rule', + 'unique_id': '123456-components_customer_preferred_export_rule', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.energy_site_allow_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Allow export', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.energy_site_allow_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pv_only', + }) +# --- +# name: test_select[select.energy_site_operation_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.energy_site_operation_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Operation mode', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'default_real_mode', + 'unique_id': '123456-default_real_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.energy_site_operation_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Operation mode', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.energy_site_operation_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'self_consumption', + }) +# --- +# name: test_select[select.test_seat_heater_front_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_front_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater front left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_left', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_front_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater front left', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_heater_front_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_front_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater front right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_right', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_front_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater front right', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_heater_rear_center-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_rear_center', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater rear center', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_rear_center', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_center', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_rear_center-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater rear center', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_rear_center', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_heater_rear_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_rear_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater rear left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_rear_left', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_rear_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater rear left', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_heater_rear_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_rear_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater rear right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_rear_right', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_rear_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater rear right', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_heater_third_row_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_third_row_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater third row left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_third_row_left', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_third_row_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater third row left', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_third_row_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_select[select.test_seat_heater_third_row_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_heater_third_row_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat heater third row right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_heater_third_row_right', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_heater_third_row_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat heater third row right', + 'options': list([ + 'off', + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_heater_third_row_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_select[select.test_steering_wheel_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'low', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_steering_wheel_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Steering wheel heater', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_steering_wheel_heat_level', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_steering_wheel_heat_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_steering_wheel_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Steering wheel heater', + 'options': list([ + 'off', + 'low', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.test_steering_wheel_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr index c6a4860056a..2c3780749ca 100644 --- a/tests/components/tesla_fleet/snapshots/test_sensor.ambr +++ b/tests/components/tesla_fleet/snapshots/test_sensor.ambr @@ -364,6 +364,89 @@ 'state': '0.0', }) # --- +# name: test_sensors[sensor.energy_site_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'island_status_unknown', + 'on_grid', + 'off_grid', + 'off_grid_unintentional', + 'off_grid_intentional', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid Status', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'island_status', + 'unique_id': '123456-island_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site Grid Status', + 'options': list([ + 'island_status_unknown', + 'on_grid', + 'off_grid', + 'off_grid_unintentional', + 'off_grid_intentional', + ]), + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_status-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site Grid Status', + 'options': list([ + 'island_status_unknown', + 'on_grid', + 'off_grid', + 'off_grid_unintentional', + 'off_grid_intentional', + ]), + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- # name: test_sensors[sensor.energy_site_load_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tesla_fleet/snapshots/test_switch.ambr b/tests/components/tesla_fleet/snapshots/test_switch.ambr new file mode 100644 index 00000000000..2d69a7d314a --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_switch.ambr @@ -0,0 +1,489 @@ +# serializer version: 1 +# name: test_switch[switch.energy_site_allow_charging_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Allow charging from grid', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_disallow_charge_from_grid_with_solar_installed', + 'unique_id': '123456-components_disallow_charge_from_grid_with_solar_installed', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.energy_site_allow_charging_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Energy Site Allow charging from grid', + }), + 'context': , + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.energy_site_storm_watch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.energy_site_storm_watch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Storm watch', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'user_settings_storm_mode_enabled', + 'unique_id': '123456-user_settings_storm_mode_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.energy_site_storm_watch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Energy Site Storm watch', + }), + 'context': , + 'entity_id': 'switch.energy_site_storm_watch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.test_auto_seat_climate_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_auto_seat_climate_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto seat climate left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_auto_seat_climate_left', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_auto_seat_climate_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Auto seat climate left', + }), + 'context': , + 'entity_id': 'switch.test_auto_seat_climate_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.test_auto_seat_climate_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_auto_seat_climate_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto seat climate right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_auto_seat_climate_right', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_auto_seat_climate_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Auto seat climate right', + }), + 'context': , + 'entity_id': 'switch.test_auto_seat_climate_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.test_auto_steering_wheel_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_auto_steering_wheel_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto steering wheel heater', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_auto_steering_wheel_heat', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_steering_wheel_heat', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_auto_steering_wheel_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Auto steering wheel heater', + }), + 'context': , + 'entity_id': 'switch.test_auto_steering_wheel_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.test_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_user_charge_enable_request', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_user_charge_enable_request', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Charge', + }), + 'context': , + 'entity_id': 'switch.test_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.test_defrost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_defrost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Defrost', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_defrost_mode', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_defrost_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_defrost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Defrost', + }), + 'context': , + 'entity_id': 'switch.test_defrost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.test_sentry_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_sentry_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sentry mode', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_sentry_mode', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sentry_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_sentry_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Sentry mode', + }), + 'context': , + 'entity_id': 'switch.test_sentry_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_alt[switch.energy_site_allow_charging_from_grid-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Energy Site Allow charging from grid', + }), + 'context': , + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_alt[switch.energy_site_storm_watch-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Energy Site Storm watch', + }), + 'context': , + 'entity_id': 'switch.energy_site_storm_watch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_alt[switch.test_auto_seat_climate_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Auto seat climate left', + }), + 'context': , + 'entity_id': 'switch.test_auto_seat_climate_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_alt[switch.test_auto_seat_climate_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Auto seat climate right', + }), + 'context': , + 'entity_id': 'switch.test_auto_seat_climate_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_alt[switch.test_auto_steering_wheel_heater-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Auto steering wheel heater', + }), + 'context': , + 'entity_id': 'switch.test_auto_steering_wheel_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_alt[switch.test_charge-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Charge', + }), + 'context': , + 'entity_id': 'switch.test_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_alt[switch.test_defrost-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Defrost', + }), + 'context': , + 'entity_id': 'switch.test_defrost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_alt[switch.test_sentry_mode-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Test Sentry mode', + }), + 'context': , + 'entity_id': 'switch.test_sentry_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py new file mode 100644 index 00000000000..ef1cfd90357 --- /dev/null +++ b/tests/components/tesla_fleet/test_button.py @@ -0,0 +1,99 @@ +"""Test the Tesla Fleet button platform.""" + +from copy import deepcopy +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import NotOnWhitelistFault + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform +from .const import COMMAND_OK + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_button( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Tests that the button entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: ["button.test_wake"]}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("name", "func"), + [ + ("flash_lights", "flash_lights"), + ("honk_horn", "honk_horn"), + ("keyless_driving", "remote_start_drive"), + ("play_fart", "remote_boombox"), + ("homelink", "trigger_homelink"), + ], +) +async def test_press( + hass: HomeAssistant, normal_config_entry: MockConfigEntry, name: str, func: str +) -> None: + """Test pressing the API buttons.""" + await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) + + with patch( + f"homeassistant.components.tesla_fleet.VehicleSpecific.{func}", + return_value=COMMAND_OK, + ) as command: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: [f"button.test_{name}"]}, + blocking=True, + ) + command.assert_called_once() + + +async def test_press_signing_error( + hass: HomeAssistant, normal_config_entry: MockConfigEntry, mock_products: AsyncMock +) -> None: + """Test pressing a button with a signing error.""" + # Enable Signing + new_product = deepcopy(mock_products.return_value) + new_product["response"][0]["command_signing"] = "required" + mock_products.return_value = new_product + + with ( + patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), + ): + await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) + + with ( + patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), + patch( + "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", + side_effect=NotOnWhitelistFault, + ), + pytest.raises(HomeAssistantError) as error, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: ["button.test_flash_lights"]}, + blocking=True, + ) + assert error.from_exception(NotOnWhitelistFault) diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py new file mode 100644 index 00000000000..b45e5259a5c --- /dev/null +++ b/tests/components/tesla_fleet/test_climate.py @@ -0,0 +1,458 @@ +"""Test the Tesla Fleet climate platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import InvalidCommand, VehicleOffline + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ATTR_PRESET_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + HVACMode, +) +from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ( + HomeAssistantError, + ServiceNotSupported, + ServiceValidationError, +) +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component + +from . import assert_entities, setup_platform +from .const import ( + COMMAND_ERRORS, + COMMAND_IGNORED_REASON, + VEHICLE_ASLEEP, + VEHICLE_DATA_ALT, + VEHICLE_ONLINE, +) + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the climate entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_climate_services( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, + mock_request: AsyncMock, +) -> None: + """Tests that the climate services work.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + # Turn On and Set Temp + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 20, + ATTR_HVAC_MODE: HVACMode.HEAT_COOL, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 20 + assert state.state == HVACMode.HEAT_COOL + + # Set Temp + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 21, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 21 + + # Set Preset + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_PRESET_MODE: "keep"}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "keep" + + # Set Preset + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_PRESET_MODE: "off"}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "off" + + # Turn Off + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == HVACMode.OFF + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate_overheat_protection_services( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, + mock_request: AsyncMock, +) -> None: + """Tests that the climate overheat protection services work.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_cabin_overheat_protection" + + # Turn On and Set Low + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 30, + ATTR_HVAC_MODE: HVACMode.FAN_ONLY, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 30 + assert state.state == HVACMode.FAN_ONLY + + # Set Temp Medium + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 35, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 35 + + # Set Temp High + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 40, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 40 + + # Turn Off + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == HVACMode.OFF + + # Turn On + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == HVACMode.COOL + + # Call set temp with invalid temperature + with pytest.raises( + ServiceValidationError, + match="Cabin overheat protection does not support that temperature", + ): + # Invalid Temp + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, + blocking=True, + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate_alt( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the climate entity is correct.""" + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate_offline( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the climate entity is correct.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_invalid_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests service error is handled.""" + + await setup_platform(hass, normal_config_entry, platforms=[Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with ( + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + side_effect=InvalidCommand, + ) as mock_on, + pytest.raises( + HomeAssistantError, + match="Command failed: The data request or command is unknown.", + ), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_on.assert_called_once() + + +@pytest.mark.parametrize("response", COMMAND_ERRORS) +async def test_errors( + hass: HomeAssistant, response: str, normal_config_entry: MockConfigEntry +) -> None: + """Tests service reason is handled.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with ( + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + return_value=response, + ) as mock_on, + pytest.raises(HomeAssistantError), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_on.assert_called_once() + + +async def test_ignored_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests ignored error is handled.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + return_value=COMMAND_IGNORED_REASON, + ) as mock_on: + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_on.assert_called_once() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_asleep_or_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + mock_wake_up: AsyncMock, + mock_vehicle_state: AsyncMock, + freezer: FrozenDateTimeFactory, + normal_config_entry: MockConfigEntry, + mock_request: AsyncMock, +) -> None: + """Tests asleep is handled.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + mock_vehicle_data.assert_called_once() + + # Put the vehicle alseep + mock_vehicle_data.reset_mock() + mock_vehicle_data.side_effect = VehicleOffline + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_vehicle_data.assert_called_once() + mock_wake_up.reset_mock() + + # Run a command but fail trying to wake up the vehicle + mock_wake_up.side_effect = InvalidCommand + with pytest.raises( + HomeAssistantError, match="The data request or command is unknown." + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_wake_up.assert_called_once() + + mock_wake_up.side_effect = None + mock_wake_up.reset_mock() + + # Run a command but timeout trying to wake up the vehicle + mock_wake_up.return_value = VEHICLE_ASLEEP + mock_vehicle_state.return_value = VEHICLE_ASLEEP + with ( + patch("homeassistant.components.tesla_fleet.helpers.asyncio.sleep"), + pytest.raises(HomeAssistantError, match="Could not wake up vehicle"), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_wake_up.assert_called_once() + mock_vehicle_state.assert_called() + + mock_wake_up.reset_mock() + mock_vehicle_state.reset_mock() + mock_wake_up.return_value = VEHICLE_ONLINE + mock_vehicle_state.return_value = VEHICLE_ONLINE + + # Run a command and wake up the vehicle immediately + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True + ) + await hass.async_block_till_done() + mock_wake_up.assert_called_once() + + +async def test_climate_noscope( + hass: HomeAssistant, + readonly_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Tests with no command scopes.""" + await async_setup_component(hass, "homeassistant", {}) + await setup_platform(hass, readonly_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with pytest.raises( + ServiceValidationError, match="Climate mode off is not supported" + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + with pytest.raises( + ServiceNotSupported, + match="Entity climate.test_climate does not " + "support action climate.set_temperature", + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, + blocking=True, + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("entity_id", "low", "high"), + [ + ("climate.test_climate", 16, 28), + ("climate.test_cabin_overheat_protection", 30, 40), + ], +) +async def test_climate_notemp( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + entity_id: str, + high: int, + low: int, +) -> None: + """Tests that set temp fails without a temp attribute.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + + with pytest.raises( + ServiceValidationError, + match="Set temperature action was used with the target temperature low/high parameter but the entity does not support it", + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TARGET_TEMP_HIGH: high, + ATTR_TARGET_TEMP_LOW: low, + }, + blocking=True, + ) diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index 81ba92f1e9c..6cb8c60ac0c 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -11,12 +11,11 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.tesla_fleet.const import ( AUTHORIZE_URL, - CLIENT_ID, DOMAIN, SCOPES, TOKEN_URL, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -52,69 +51,18 @@ async def access_token(hass: HomeAssistant) -> str: ) -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - access_token: str, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - state = config_entry_oauth2_flow._encode_jwt( +@pytest.fixture(autouse=True) +async def create_credential(hass: HomeAssistant) -> None: + """Create a user credential.""" + # Create user application credential + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT, - }, + DOMAIN, + ClientCredential("user_client_id", "user_client_secret"), + "user_cred", ) - assert result["type"] is FlowResultType.EXTERNAL_STEP - - assert result["url"].startswith(AUTHORIZE_URL) - parsed_url = urlparse(result["url"]) - parsed_query = parse_qs(parsed_url.query) - assert parsed_query["response_type"][0] == "code" - assert parsed_query["client_id"][0] == CLIENT_ID - assert parsed_query["redirect_uri"][0] == REDIRECT - assert parsed_query["state"][0] == state - assert parsed_query["scope"][0] == " ".join(SCOPES) - assert parsed_query["code_challenge"][0] is not None - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.clear_requests() - aioclient_mock.post( - TOKEN_URL, - json={ - "refresh_token": "mock-refresh-token", - "access_token": access_token, - "type": "Bearer", - "expires_in": 60, - }, - ) - with patch( - "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == UNIQUE_ID - assert "result" in result - assert result["result"].unique_id == UNIQUE_ID - assert "token" in result["result"].data - assert result["result"].data["token"]["access_token"] == access_token - assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" - @pytest.mark.usefixtures("current_request_with_host") async def test_full_flow_user_cred( @@ -125,24 +73,10 @@ async def test_full_flow_user_cred( ) -> None: """Check full flow.""" - # Create user application credential - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential("user_client_id", "user_client_secret"), - "user_cred", - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"implementation": "user_cred"} - ) assert result["type"] is FlowResultType.EXTERNAL_STEP state = config_entry_oauth2_flow._encode_jwt( @@ -211,15 +145,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -267,15 +193,7 @@ async def test_reauth_account_mismatch( old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) diff --git a/tests/components/tesla_fleet/test_cover.py b/tests/components/tesla_fleet/test_cover.py new file mode 100644 index 00000000000..ac5307b2fdd --- /dev/null +++ b/tests/components/tesla_fleet/test_cover.py @@ -0,0 +1,235 @@ +"""Test the Teslemetry cover platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, + CoverState, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform +from .const import COMMAND_OK, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_cover( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the cover entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.COVER]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_cover_alt( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the cover entities are correct with alternate values.""" + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.COVER]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_cover_readonly( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + readonly_config_entry: MockConfigEntry, +) -> None: + """Tests that the cover entities are correct without scopes.""" + + await setup_platform(hass, readonly_config_entry, [Platform.COVER]) + assert_entities(hass, readonly_config_entry.entry_id, entity_registry, snapshot) + + +async def test_cover_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the cover entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.COVER]) + state = hass.states.get("cover.test_windows") + assert state.state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_cover_services( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the cover entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.COVER]) + + # Vent Windows + entity_id = "cover.test_windows" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.window_control", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: ["cover.test_windows"]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.CLOSED + + # Charge Port Door + entity_id = "cover.test_charge_port_door" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.OPEN + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.CLOSED + + # Frunk + entity_id = "cover.test_frunk" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.OPEN + + # Trunk + entity_id = "cover.test_trunk" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.CLOSED + + # Sunroof + entity_id = "cover.test_sunroof" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state == CoverState.CLOSED diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index b5eb21d1cdd..7c17f986663 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -1,7 +1,10 @@ """Test the Tesla Fleet init.""" -from unittest.mock import AsyncMock +from copy import deepcopy +from unittest.mock import AsyncMock, patch +from aiohttp import RequestInfo +from aiohttp.client_exceptions import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -16,6 +19,7 @@ from tesla_fleet_api.exceptions import ( VehicleOffline, ) +from homeassistant.components.tesla_fleet.const import AUTHORIZE_URL from homeassistant.components.tesla_fleet.coordinator import ( ENERGY_INTERVAL, ENERGY_INTERVAL_SECONDS, @@ -72,6 +76,50 @@ async def test_init_error( assert normal_config_entry.state is state +async def test_oauth_refresh_expired( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Test init with expired Oauth token.""" + + # Patch the token refresh to raise an error + with patch( + "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=401 + ), + ) as mock_async_ensure_token_valid: + # Trigger an unmocked function call + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + + mock_async_ensure_token_valid.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_oauth_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Test init with Oauth refresh failure.""" + + # Patch the token refresh to raise an error + with patch( + "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=400 + ), + ) as mock_async_ensure_token_valid: + # Trigger an unmocked function call + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + + mock_async_ensure_token_valid.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY + + # Test devices async def test_devices( hass: HomeAssistant, @@ -357,3 +405,22 @@ async def test_init_region_issue_failed( await setup_platform(hass, normal_config_entry) mock_find_server.assert_called_once() assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_signing( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Tests when a vehicle requires signing.""" + + # Make the vehicle require command signing + products = deepcopy(mock_products.return_value) + products["response"][0]["command_signing"] = "required" + mock_products.return_value = products + + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key" + ) as mock_get_private_key: + await setup_platform(hass, normal_config_entry) + mock_get_private_key.assert_called_once() diff --git a/tests/components/tesla_fleet/test_lock.py b/tests/components/tesla_fleet/test_lock.py new file mode 100644 index 00000000000..00b77aefcaf --- /dev/null +++ b/tests/components/tesla_fleet/test_lock.py @@ -0,0 +1,111 @@ +"""Test the Tesla Fleet lock platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + SERVICE_LOCK, + SERVICE_UNLOCK, + LockState, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform +from .const import COMMAND_OK + +from tests.common import MockConfigEntry + + +async def test_lock( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the lock entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.LOCK]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_lock_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the lock entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.LOCK]) + state = hass.states.get("lock.test_lock") + assert state.state == STATE_UNKNOWN + + +async def test_lock_services( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the lock services work.""" + + await setup_platform(hass, normal_config_entry, [Platform.LOCK]) + + entity_id = "lock.test_lock" + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.door_lock", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_LOCK, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == LockState.LOCKED + call.assert_called_once() + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.door_unlock", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_UNLOCK, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == LockState.UNLOCKED + call.assert_called_once() + + entity_id = "lock.test_charge_cable_lock" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_LOCK, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.charge_port_door_open", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_UNLOCK, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == LockState.UNLOCKED + call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_media_player.py b/tests/components/tesla_fleet/test_media_player.py new file mode 100644 index 00000000000..4c833e7499f --- /dev/null +++ b/tests/components/tesla_fleet/test_media_player.py @@ -0,0 +1,157 @@ +"""Test the Tesla Fleet media player platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.media_player import ( + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_VOLUME_SET, + MediaPlayerState, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, assert_entities_alt, setup_platform +from .const import COMMAND_OK, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry + + +async def test_media_player( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the media player entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_media_player_alt( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the media player entities are correct.""" + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) + assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_media_player_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the media player entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) + state = hass.states.get("media_player.test_media_player") + assert state.state == MediaPlayerState.OFF + + +async def test_media_player_noscope( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + readonly_config_entry: MockConfigEntry, +) -> None: + """Tests that the media player entities are correct without required scope.""" + + await setup_platform(hass, readonly_config_entry, [Platform.MEDIA_PLAYER]) + assert_entities(hass, readonly_config_entry.entry_id, entity_registry, snapshot) + + +async def test_media_player_services( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the media player services work.""" + + await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) + + entity_id = "media_player.test_media_player" + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.adjust_volume", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: entity_id, ATTR_MEDIA_VOLUME_LEVEL: 0.5}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.5 + call.assert_called_once() + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PAUSE, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == MediaPlayerState.PAUSED + call.assert_called_once() + + # This test will fail without the previous call to pause playback + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == MediaPlayerState.PLAYING + call.assert_called_once() + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.media_next_track", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + call.assert_called_once() + + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.media_prev_track", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_number.py b/tests/components/tesla_fleet/test_number.py new file mode 100644 index 00000000000..8551a99ee29 --- /dev/null +++ b/tests/components/tesla_fleet/test_number.py @@ -0,0 +1,119 @@ +"""Test the Tesla Fleet number platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform +from .const import COMMAND_OK, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the number entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.NUMBER]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_number_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the number entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.NUMBER]) + state = hass.states.get("number.test_charge_current") + assert state.state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_services( + hass: HomeAssistant, mock_vehicle_data, normal_config_entry: MockConfigEntry +) -> None: + """Tests that the number services work.""" + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.NUMBER]) + + entity_id = "number.test_charge_current" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.set_charging_amps", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 16}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "16" + call.assert_called_once() + + entity_id = "number.test_charge_limit" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.set_charge_limit", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 60}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "60" + call.assert_called_once() + + entity_id = "number.energy_site_backup_reserve" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.backup", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 80, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "80" + call.assert_called_once() + + entity_id = "number.energy_site_off_grid_reserve" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.off_grid_vehicle_charging_reserve", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 88}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "88" + call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_select.py b/tests/components/tesla_fleet/test_select.py new file mode 100644 index 00000000000..902b28ddb7a --- /dev/null +++ b/tests/components/tesla_fleet/test_select.py @@ -0,0 +1,136 @@ +"""Test the Tesla Fleet select platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.components.tesla_fleet.select import LOW +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform +from .const import COMMAND_OK, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the select entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.SELECT]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_select_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the select entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.SELECT]) + state = hass.states.get("select.test_seat_heater_front_left") + assert state.state == STATE_UNKNOWN + + +async def test_select_services( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the select services work.""" + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.SELECT]) + + entity_id = "select.test_seat_heater_front_left" + with ( + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.remote_seat_heater_request", + return_value=COMMAND_OK, + ) as remote_seat_heater_request, + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + return_value=COMMAND_OK, + ) as auto_conditioning_start, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: LOW}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == LOW + auto_conditioning_start.assert_called_once() + remote_seat_heater_request.assert_called_once() + + entity_id = "select.test_steering_wheel_heater" + with ( + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.remote_steering_wheel_heat_level_request", + return_value=COMMAND_OK, + ) as remote_steering_wheel_heat_level_request, + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + return_value=COMMAND_OK, + ) as auto_conditioning_start, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: LOW}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == LOW + auto_conditioning_start.assert_called_once() + remote_steering_wheel_heat_level_request.assert_called_once() + + entity_id = "select.energy_site_operation_mode" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.operation", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == EnergyOperationMode.AUTONOMOUS.value + call.assert_called_once() + + entity_id = "select.energy_site_allow_export" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.grid_import_export", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: EnergyExportMode.BATTERY_OK.value}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == EnergyExportMode.BATTERY_OK.value + call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py index 377179ca26a..5faebbc47e2 100644 --- a/tests/components/tesla_fleet/test_sensor.py +++ b/tests/components/tesla_fleet/test_sensor.py @@ -1,13 +1,14 @@ """Test the Tesla Fleet sensor platform.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL -from homeassistant.const import Platform +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -41,3 +42,38 @@ async def test_sensors( await hass.async_block_till_done() assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.parametrize( + ("entity_id", "initial", "restored"), + [ + ("sensor.test_battery_level", "77", "77"), + ("sensor.test_outside_temperature", "30", "30"), + ("sensor.test_time_to_arrival", "2024-01-01T00:00:06+00:00", STATE_UNAVAILABLE), + ], +) +async def test_sensors_restore( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_vehicle_data: AsyncMock, + entity_id: str, + initial: str, + restored: str, +) -> None: + """Test if the sensor should restore it's state or not when vehicle is offline.""" + + freezer.move_to("2024-01-01 00:00:00+00:00") + + await setup_platform(hass, normal_config_entry, [Platform.SENSOR]) + + assert hass.states.get(entity_id).state == initial + + mock_vehicle_data.side_effect = VehicleOffline + + with patch("homeassistant.components.tesla_fleet.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_reload(normal_config_entry.entry_id) + + assert hass.states.get(entity_id).state == restored diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py new file mode 100644 index 00000000000..fba4fc05cc4 --- /dev/null +++ b/tests/components/tesla_fleet/test_switch.py @@ -0,0 +1,167 @@ +"""Test the tesla_fleet switch platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, assert_entities_alt, setup_platform +from .const import COMMAND_OK, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry + + +async def test_switch( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the switch entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_switch_alt( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the switch entities are correct.""" + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) + assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_switch_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the switch entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) + state = hass.states.get("switch.test_auto_seat_climate_left") + assert state.state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("name", "on", "off"), + [ + ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ( + "test_auto_seat_climate_left", + "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleSpecific.remote_auto_seat_climate_request", + ), + ( + "test_auto_seat_climate_right", + "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleSpecific.remote_auto_seat_climate_request", + ), + ( + "test_auto_steering_wheel_heater", + "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + ), + ( + "test_defrost", + "VehicleSpecific.set_preconditioning_max", + "VehicleSpecific.set_preconditioning_max", + ), + ( + "energy_site_storm_watch", + "EnergySpecific.storm_mode", + "EnergySpecific.storm_mode", + ), + ( + "energy_site_allow_charging_from_grid", + "EnergySpecific.grid_import_export", + "EnergySpecific.grid_import_export", + ), + ( + "test_sentry_mode", + "VehicleSpecific.set_sentry_mode", + "VehicleSpecific.set_sentry_mode", + ), + ], +) +async def test_switch_services( + hass: HomeAssistant, + name: str, + on: str, + off: str, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the switch service calls work.""" + + await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) + + entity_id = f"switch.{name}" + with patch( + f"homeassistant.components.tesla_fleet.{on}", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + call.assert_called_once() + + with patch( + f"homeassistant.components.tesla_fleet.{off}", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + call.assert_called_once() + + +async def test_switch_no_scope( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + readonly_config_entry: MockConfigEntry, +) -> None: + """Tests that the switch entities are correct.""" + + await setup_platform(hass, readonly_config_entry, [Platform.SWITCH]) + with pytest.raises(ServiceValidationError, match="Missing vehicle commands scope"): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "switch.test_auto_steering_wheel_heater"}, + blocking=True, + ) diff --git a/tests/components/teslemetry/__init__.py b/tests/components/teslemetry/__init__.py index c4fbdaf3fbd..b6b9df7eb4b 100644 --- a/tests/components/teslemetry/__init__.py +++ b/tests/components/teslemetry/__init__.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.teslemetry.const import DOMAIN from homeassistant.const import Platform diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index 03b9e2c6eb6..256428aa703 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -1,4 +1,4 @@ -"""Fixtures for Tessie.""" +"""Fixtures for Teslemetry.""" from __future__ import annotations @@ -10,6 +10,7 @@ import pytest from .const import ( COMMAND_OK, + ENERGY_HISTORY, LIVE_STATUS, METADATA, PRODUCTS, @@ -95,3 +96,22 @@ def mock_site_info(): side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_energy_history(): + """Mock Teslemetry Energy Specific site_info method.""" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.energy_history", + return_value=ENERGY_HISTORY, + ) as mock_live_status: + yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_listen(): + """Mock Teslemetry Stream listen method.""" + with patch( + "homeassistant.components.teslemetry.TeslemetryStream.listen", + ) as mock_listen: + yield mock_listen diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index 6a3a657a1b1..bf483d576cd 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -12,9 +12,12 @@ WAKE_UP_ASLEEP = {"response": {"state": TeslemetryState.ASLEEP}, "error": None} PRODUCTS = load_json_object_fixture("products.json", DOMAIN) VEHICLE_DATA = load_json_object_fixture("vehicle_data.json", DOMAIN) +VEHICLE_DATA_ASLEEP = load_json_object_fixture("vehicle_data.json", DOMAIN) +VEHICLE_DATA_ASLEEP["response"]["state"] = TeslemetryState.OFFLINE VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) +ENERGY_HISTORY = load_json_object_fixture("energy_history.json", DOMAIN) COMMAND_OK = {"response": {"result": True, "reason": ""}} COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} diff --git a/tests/components/teslemetry/fixtures/energy_history.json b/tests/components/teslemetry/fixtures/energy_history.json new file mode 100644 index 00000000000..2b787beafac --- /dev/null +++ b/tests/components/teslemetry/fixtures/energy_history.json @@ -0,0 +1,55 @@ +{ + "response": { + "serial_number": "xxxxxx", + "period": "day", + "installation_time_zone": "Australia/Brisbane", + "time_series": [ + { + "timestamp": "2024-09-18T00:00:00+10:00", + "solar_energy_exported": 0, + "generator_energy_exported": 0, + "grid_energy_imported": 0, + "grid_services_energy_imported": 0, + "grid_services_energy_exported": 0, + "grid_energy_exported_from_solar": 0, + "grid_energy_exported_from_generator": 0, + "grid_energy_exported_from_battery": 0, + "battery_energy_exported": 36, + "battery_energy_imported_from_grid": 0, + "battery_energy_imported_from_solar": 0, + "battery_energy_imported_from_generator": 0, + "consumer_energy_imported_from_grid": 0, + "consumer_energy_imported_from_solar": 0, + "consumer_energy_imported_from_battery": 36, + "consumer_energy_imported_from_generator": 0, + "raw_timestamp": "2024-09-18T00:00:00+10:00", + "total_home_usage": 36, + "total_battery_discharge": 36 + }, + { + "timestamp": "2024-09-18T08:45:00+10:00", + "solar_energy_exported": 724, + "generator_energy_exported": 0, + "grid_energy_imported": 0, + "grid_services_energy_imported": 0, + "grid_services_energy_exported": 0, + "grid_energy_exported_from_solar": 2, + "grid_energy_exported_from_generator": 0, + "grid_energy_exported_from_battery": 0, + "battery_energy_exported": 0, + "battery_energy_imported_from_grid": 0, + "battery_energy_imported_from_solar": 684, + "battery_energy_imported_from_generator": 0, + "consumer_energy_imported_from_grid": 0, + "consumer_energy_imported_from_solar": 38, + "consumer_energy_imported_from_battery": 0, + "consumer_energy_imported_from_generator": 0, + "raw_timestamp": "2024-09-18T08:45:00+10:00", + "total_home_usage": 38, + "total_solar_generation": 724, + "total_battery_charge": 684, + "total_grid_energy_exported": 2 + } + ] + } +} diff --git a/tests/components/teslemetry/fixtures/products.json b/tests/components/teslemetry/fixtures/products.json index 8da921a33f4..56497a6d936 100644 --- a/tests/components/teslemetry/fixtures/products.json +++ b/tests/components/teslemetry/fixtures/products.json @@ -4,7 +4,7 @@ "id": 1234, "user_id": 1234, "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", + "vin": "LRW3F7EK4NC700000", "color": null, "access_type": "OWNER", "display_name": "Test", diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index 3845ae48559..fcfa0707b2c 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -3,7 +3,7 @@ "id": 1234, "user_id": 1234, "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", + "vin": "LRW3F7EK4NC700000", "color": null, "access_type": "OWNER", "granular_access": { @@ -112,6 +112,7 @@ "wiper_blade_heater": false }, "drive_state": { + "active_route_destination": "Home", "active_route_latitude": 30.2226265, "active_route_longitude": -97.6236871, "active_route_miles_to_arrival": 0.039491, diff --git a/tests/components/teslemetry/fixtures/vehicle_data_alt.json b/tests/components/teslemetry/fixtures/vehicle_data_alt.json index 76416982eba..5ef5ea92a74 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data_alt.json +++ b/tests/components/teslemetry/fixtures/vehicle_data_alt.json @@ -3,7 +3,7 @@ "id": 1234, "user_id": 1234, "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", + "vin": "LRW3F7EK4NC700000", "color": null, "access_type": "OWNER", "granular_access": { @@ -24,7 +24,6 @@ "battery_range": 266.87, "charge_amps": 16, "charge_current_request": 16, - "charge_current_request_max": 16, "charge_enable_request": true, "charge_energy_added": 0, "charge_limit_soc": 80, @@ -72,16 +71,16 @@ "user_charge_enable_request": true }, "climate_state": { - "allow_cabin_overheat_protection": true, + "allow_cabin_overheat_protection": null, "auto_seat_climate_left": false, "auto_seat_climate_right": false, "auto_steering_wheel_heat": false, "battery_heater": true, "battery_heater_no_power": null, - "cabin_overheat_protection": "Off", + "cabin_overheat_protection": null, "cabin_overheat_protection_actively_cooling": false, "climate_keeper_mode": "off", - "cop_activation_temperature": "Low", + "cop_activation_temperature": null, "defrost_mode": 0, "driver_temp_setting": 22, "fan_status": 0, @@ -106,7 +105,7 @@ "seat_heater_right": 0, "side_mirror_heaters": false, "steering_wheel_heat_level": 0, - "steering_wheel_heater": false, + "steering_wheel_heater": true, "supports_fan_only_cabin_overheat_protection": true, "timestamp": 1705707520649, "wiper_blade_heater": false @@ -204,9 +203,9 @@ "is_user_present": true, "locked": false, "media_info": { - "audio_volume": 2.6667, - "audio_volume_increment": 0.333333, - "audio_volume_max": 10.333333, + "audio_volume": null, + "audio_volume_increment": null, + "audio_volume_max": null, "media_playback_status": "Stopped", "now_playing_album": "", "now_playing_artist": "", diff --git a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr b/tests/components/teslemetry/snapshots/test_binary_sensors.ambr index 6f35fe9da25..95330840109 100644 --- a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr +++ b/tests/components/teslemetry/snapshots/test_binary_sensors.ambr @@ -137,6 +137,52 @@ 'state': 'off', }) # --- +# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storm watch active', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storm_mode_active', + 'unique_id': '123456-storm_mode_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor[binary_sensor.test_battery_heater-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -166,7 +212,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_battery_heater_on', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_heater_on', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_battery_heater_on', 'unit_of_measurement': None, }) # --- @@ -213,7 +259,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_cabin_overheat_protection_actively_cooling', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection_actively_cooling', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection_actively_cooling', 'unit_of_measurement': None, }) # --- @@ -260,7 +306,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_conn_charge_cable', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_conn_charge_cable', 'unit_of_measurement': None, }) # --- @@ -307,7 +353,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_phases', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_phases', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_phases', 'unit_of_measurement': None, }) # --- @@ -353,7 +399,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_dashcam_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dashcam_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_dashcam_state', 'unit_of_measurement': None, }) # --- @@ -400,7 +446,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_df', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_df', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_df', 'unit_of_measurement': None, }) # --- @@ -447,7 +493,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_fd_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fd_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_fd_window', 'unit_of_measurement': None, }) # --- @@ -494,7 +540,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_pf', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pf', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_pf', 'unit_of_measurement': None, }) # --- @@ -541,7 +587,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_fp_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fp_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_fp_window', 'unit_of_measurement': None, }) # --- @@ -588,7 +634,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_is_preconditioning', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_is_preconditioning', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_is_preconditioning', 'unit_of_measurement': None, }) # --- @@ -634,7 +680,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_preconditioning_enabled', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_preconditioning_enabled', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_preconditioning_enabled', 'unit_of_measurement': None, }) # --- @@ -680,7 +726,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_dr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_dr', 'unit_of_measurement': None, }) # --- @@ -727,7 +773,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_rd_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rd_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rd_window', 'unit_of_measurement': None, }) # --- @@ -774,7 +820,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_pr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_pr', 'unit_of_measurement': None, }) # --- @@ -821,7 +867,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_rp_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rp_window', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rp_window', 'unit_of_measurement': None, }) # --- @@ -868,7 +914,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_scheduled_charging_pending', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_scheduled_charging_pending', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_scheduled_charging_pending', 'unit_of_measurement': None, }) # --- @@ -914,7 +960,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'state', - 'unique_id': 'LRWXF7EK4KC700000-state', + 'unique_id': 'LRW3F7EK4NC700000-state', 'unit_of_measurement': None, }) # --- @@ -961,7 +1007,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_fl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_fl', 'unit_of_measurement': None, }) # --- @@ -1008,7 +1054,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_fr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_fr', 'unit_of_measurement': None, }) # --- @@ -1055,7 +1101,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_rl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_rl', 'unit_of_measurement': None, }) # --- @@ -1102,7 +1148,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_soft_warning_rr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_soft_warning_rr', 'unit_of_measurement': None, }) # --- @@ -1149,7 +1195,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_trip_charging', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_trip_charging', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_trip_charging', 'unit_of_measurement': None, }) # --- @@ -1195,7 +1241,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_is_user_present', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_is_user_present', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_is_user_present', 'unit_of_measurement': None, }) # --- @@ -1252,6 +1298,19 @@ 'state': 'off', }) # --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_storm_watch_active-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor_refresh[binary_sensor.test_battery_heater-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/teslemetry/snapshots/test_button.ambr b/tests/components/teslemetry/snapshots/test_button.ambr index 84cf4c21078..6d3016186ae 100644 --- a/tests/components/teslemetry/snapshots/test_button.ambr +++ b/tests/components/teslemetry/snapshots/test_button.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'flash_lights', - 'unique_id': 'LRWXF7EK4KC700000-flash_lights', + 'unique_id': 'LRW3F7EK4NC700000-flash_lights', 'unit_of_measurement': None, }) # --- @@ -74,7 +74,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'homelink', - 'unique_id': 'LRWXF7EK4KC700000-homelink', + 'unique_id': 'LRW3F7EK4NC700000-homelink', 'unit_of_measurement': None, }) # --- @@ -120,7 +120,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'honk', - 'unique_id': 'LRWXF7EK4KC700000-honk', + 'unique_id': 'LRW3F7EK4NC700000-honk', 'unit_of_measurement': None, }) # --- @@ -166,7 +166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'enable_keyless_driving', - 'unique_id': 'LRWXF7EK4KC700000-enable_keyless_driving', + 'unique_id': 'LRW3F7EK4NC700000-enable_keyless_driving', 'unit_of_measurement': None, }) # --- @@ -212,7 +212,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'boombox', - 'unique_id': 'LRWXF7EK4KC700000-boombox', + 'unique_id': 'LRW3F7EK4NC700000-boombox', 'unit_of_measurement': None, }) # --- @@ -258,7 +258,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wake', - 'unique_id': 'LRWXF7EK4KC700000-wake', + 'unique_id': 'LRW3F7EK4NC700000-wake', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index b65796fe10e..7064309e98b 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -1,4 +1,10 @@ # serializer version: 1 +# name: test_asleep_or_offline[HomeAssistantError] + 'Timed out trying to wake up vehicle' +# --- +# name: test_asleep_or_offline[InvalidCommand] + 'Failed to wake up vehicle: The data request or command is unknown.' +# --- # name: test_climate[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -37,7 +43,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- @@ -107,7 +113,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- @@ -178,7 +184,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- @@ -202,7 +208,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'off', + 'state': 'unknown', }) # --- # name: test_climate_alt[climate.test_climate-entry] @@ -247,7 +253,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- @@ -280,16 +286,14 @@ 'state': 'off', }) # --- -# name: test_climate_offline[climate.test_cabin_overheat_protection-entry] +# name: test_climate_noscope[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), 'area_id': None, 'capabilities': dict({ 'hvac_modes': list([ - , , - , ]), 'max_temp': 40, 'min_temp': 30, @@ -298,7 +302,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': None, + 'disabled_by': , 'domain': 'climate', 'entity_category': None, 'entity_id': 'climate.test_cabin_overheat_protection', @@ -316,36 +320,13 @@ 'original_name': 'Cabin overheat protection', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': 0, 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_cabin_overheat_protection', 'unit_of_measurement': None, }) # --- -# name: test_climate_offline[climate.test_cabin_overheat_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Test Cabin overheat protection', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'supported_features': , - 'target_temp_step': 5, - }), - 'context': , - 'entity_id': 'climate.test_cabin_overheat_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_climate_offline[climate.test_climate-entry] +# name: test_climate_noscope[climate.test_climate-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -353,16 +334,9 @@ 'capabilities': dict({ 'hvac_modes': list([ , - , ]), 'max_temp': 28.0, 'min_temp': 15.0, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), }), 'config_entry_id': , 'device_class': None, @@ -385,38 +359,12 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': 0, 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, }) # --- -# name: test_climate_offline[climate.test_climate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Test Climate', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_mode': None, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - 'supported_features': , - 'temperature': None, - }), - 'context': , - 'entity_id': 'climate.test_climate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) +# name: test_invalid_error[error] + 'Command returned exception: The data request or command is unknown.' # --- diff --git a/tests/components/teslemetry/snapshots/test_cover.ambr b/tests/components/teslemetry/snapshots/test_cover.ambr index 7ffb9c4a1f9..24e1b02a5f8 100644 --- a/tests/components/teslemetry/snapshots/test_cover.ambr +++ b/tests/components/teslemetry/snapshots/test_cover.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_door_open', 'unit_of_measurement': None, }) # --- @@ -76,7 +76,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_ft', 'unit_of_measurement': None, }) # --- @@ -124,7 +124,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sun_roof_state', 'unit_of_measurement': None, }) # --- @@ -172,7 +172,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rt', 'unit_of_measurement': None, }) # --- @@ -220,7 +220,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unique_id': 'LRW3F7EK4NC700000-windows', 'unit_of_measurement': None, }) # --- @@ -268,7 +268,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_door_open', 'unit_of_measurement': None, }) # --- @@ -316,7 +316,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_ft', 'unit_of_measurement': None, }) # --- @@ -364,7 +364,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sun_roof_state', 'unit_of_measurement': None, }) # --- @@ -412,7 +412,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rt', 'unit_of_measurement': None, }) # --- @@ -460,7 +460,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unique_id': 'LRW3F7EK4NC700000-windows', 'unit_of_measurement': None, }) # --- @@ -508,7 +508,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_door_open', 'unit_of_measurement': None, }) # --- @@ -556,7 +556,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_ft', 'unit_of_measurement': None, }) # --- @@ -604,7 +604,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sun_roof_state', 'unit_of_measurement': None, }) # --- @@ -652,7 +652,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_rt', 'unit_of_measurement': None, }) # --- @@ -700,7 +700,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', + 'unique_id': 'LRW3F7EK4NC700000-windows', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_device_tracker.ambr b/tests/components/teslemetry/snapshots/test_device_tracker.ambr index 9859d9db360..ac4c388873f 100644 --- a/tests/components/teslemetry/snapshots/test_device_tracker.ambr +++ b/tests/components/teslemetry/snapshots/test_device_tracker.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'location', - 'unique_id': 'LRWXF7EK4KC700000-location', + 'unique_id': 'LRW3F7EK4NC700000-location', 'unit_of_measurement': None, }) # --- @@ -78,11 +78,45 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'route', - 'unique_id': 'LRWXF7EK4KC700000-route', + 'unique_id': 'LRW3F7EK4NC700000-route', 'unit_of_measurement': None, }) # --- # name: test_device_tracker[device_tracker.test_route-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Route', + 'gps_accuracy': 0, + 'latitude': 30.2226265, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_route', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'home', + }) +# --- +# name: test_device_tracker_alt[device_tracker.test_location-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Location', + 'gps_accuracy': 0, + 'latitude': -30.222626, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- +# name: test_device_tracker_alt[device_tracker.test_route-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Test Route', diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 11f8a91c1aa..3b96d6f70c0 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -270,6 +270,7 @@ 'climate_state_timestamp': 1705707520649, 'climate_state_wiper_blade_heater': False, 'color': None, + 'drive_state_active_route_destination': 'Home', 'drive_state_active_route_latitude': '**REDACTED**', 'drive_state_active_route_longitude': '**REDACTED**', 'drive_state_active_route_miles_to_arrival': 0.039491, diff --git a/tests/components/teslemetry/snapshots/test_init.ambr b/tests/components/teslemetry/snapshots/test_init.ambr index e07f075b7d8..7d60ed82859 100644 --- a/tests/components/teslemetry/snapshots/test_init.ambr +++ b/tests/components/teslemetry/snapshots/test_init.ambr @@ -31,7 +31,7 @@ 'via_device_id': None, }) # --- -# name: test_devices[{('teslemetry', 'LRWXF7EK4KC700000')}] +# name: test_devices[{('teslemetry', 'LRW3F7EK4NC700000')}] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -45,19 +45,19 @@ 'identifiers': set({ tuple( 'teslemetry', - 'LRWXF7EK4KC700000', + 'LRW3F7EK4NC700000', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'Tesla', - 'model': 'Model X', + 'model': 'Model 3', 'model_id': None, 'name': 'Test', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 'LRWXF7EK4KC700000', + 'serial_number': 'LRW3F7EK4NC700000', 'suggested_area': None, 'sw_version': None, 'via_device_id': None, diff --git a/tests/components/teslemetry/snapshots/test_lock.ambr b/tests/components/teslemetry/snapshots/test_lock.ambr index deaabbae904..2130c4d9574 100644 --- a/tests/components/teslemetry/snapshots/test_lock.ambr +++ b/tests/components/teslemetry/snapshots/test_lock.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_port_latch', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_latch', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_port_latch', 'unit_of_measurement': None, }) # --- @@ -75,7 +75,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_locked', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_locked', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_locked', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_media_player.ambr b/tests/components/teslemetry/snapshots/test_media_player.ambr index 06500437701..a9d2569c637 100644 --- a/tests/components/teslemetry/snapshots/test_media_player.ambr +++ b/tests/components/teslemetry/snapshots/test_media_player.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'media', - 'unique_id': 'LRWXF7EK4KC700000-media', + 'unique_id': 'LRW3F7EK4NC700000-media', 'unit_of_measurement': None, }) # --- @@ -67,7 +67,6 @@ 'media_title': '', 'source': 'Spotify', 'supported_features': , - 'volume_level': 0.25806775026025003, }), 'context': , 'entity_id': 'media_player.test_media_player', @@ -107,7 +106,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'media', - 'unique_id': 'LRWXF7EK4KC700000-media', + 'unique_id': 'LRW3F7EK4NC700000-media', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_number.ambr b/tests/components/teslemetry/snapshots/test_number.ambr index f33b5e15d30..0f30daf635e 100644 --- a/tests/components/teslemetry/snapshots/test_number.ambr +++ b/tests/components/teslemetry/snapshots/test_number.ambr @@ -149,7 +149,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_current_request', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_current_request', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_current_request', 'unit_of_measurement': , }) # --- @@ -206,7 +206,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_limit_soc', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_limit_soc', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_limit_soc', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/teslemetry/snapshots/test_select.ambr b/tests/components/teslemetry/snapshots/test_select.ambr index 4e6feda7e5d..0c2547f309d 100644 --- a/tests/components/teslemetry/snapshots/test_select.ambr +++ b/tests/components/teslemetry/snapshots/test_select.ambr @@ -149,7 +149,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_left', 'unit_of_measurement': None, }) # --- @@ -208,7 +208,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_right', 'unit_of_measurement': None, }) # --- @@ -267,7 +267,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_rear_center', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_center', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_rear_center', 'unit_of_measurement': None, }) # --- @@ -326,7 +326,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_rear_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_rear_left', 'unit_of_measurement': None, }) # --- @@ -385,7 +385,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_seat_heater_rear_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_seat_heater_rear_right', 'unit_of_measurement': None, }) # --- @@ -408,178 +408,3 @@ 'state': 'off', }) # --- -# name: test_select[select.test_seat_heater_third_row_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_third_row_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater third row left', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_third_row_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_third_row_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater third row left', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_third_row_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_select[select.test_seat_heater_third_row_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_third_row_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater third row right', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_third_row_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_third_row_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater third row right', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_third_row_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_select[select.test_steering_wheel_heater-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_steering_wheel_heater', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Steering wheel heater', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_steering_wheel_heat_level', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_steering_wheel_heat_level', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_steering_wheel_heater-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Steering wheel heater', - 'options': list([ - 'off', - 'low', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_steering_wheel_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index 0b664e78626..acff157bfea 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -1,4 +1,442 @@ # serializer version: 1 +# name: test_sensors[sensor.energy_site_battery_charged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery charged', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_battery_charge', + 'unique_id': '123456-total_battery_charge', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_charged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery charged', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.684', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_charged-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery charged', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.684', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_discharged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery discharged', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_battery_discharge', + 'unique_id': '123456-total_battery_discharge', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_discharged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery discharged', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_discharged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.036', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_discharged-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery discharged', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_discharged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.036', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_exported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_exported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery exported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_energy_exported', + 'unique_id': '123456-battery_energy_exported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_exported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.036', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_exported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.036', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_generator-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_imported_from_generator', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery imported from generator', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_energy_imported_from_generator', + 'unique_id': '123456-battery_energy_imported_from_generator', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_generator-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery imported from generator', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_imported_from_generator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_generator-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery imported from generator', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_imported_from_generator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_imported_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery imported from grid', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_energy_imported_from_grid', + 'unique_id': '123456-battery_energy_imported_from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery imported from grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_imported_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_grid-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery imported from grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_imported_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_solar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_imported_from_solar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery imported from solar', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_energy_imported_from_solar', + 'unique_id': '123456-battery_energy_imported_from_solar', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_solar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery imported from solar', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_imported_from_solar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.684', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_imported_from_solar-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Battery imported from solar', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_imported_from_solar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.684', + }) +# --- # name: test_sensors[sensor.energy_site_battery_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -72,6 +510,298 @@ 'state': '5.06', }) # --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_consumer_imported_from_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumer imported from battery', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumer_energy_imported_from_battery', + 'unique_id': '123456-consumer_energy_imported_from_battery', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.036', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_battery-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.036', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_generator-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_consumer_imported_from_generator', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumer imported from generator', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumer_energy_imported_from_generator', + 'unique_id': '123456-consumer_energy_imported_from_generator', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_generator-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from generator', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_generator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_generator-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from generator', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_generator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_consumer_imported_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumer imported from grid', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumer_energy_imported_from_grid', + 'unique_id': '123456-consumer_energy_imported_from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_grid-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_solar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_consumer_imported_from_solar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumer imported from solar', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumer_energy_imported_from_solar', + 'unique_id': '123456-consumer_energy_imported_from_solar', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_solar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from solar', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_solar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.038', + }) +# --- +# name: test_sensors[sensor.energy_site_consumer_imported_from_solar-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Consumer imported from solar', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_consumer_imported_from_solar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.038', + }) +# --- # name: test_sensors[sensor.energy_site_energy_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -145,6 +875,79 @@ 'state': '38.8964736842105', }) # --- +# name: test_sensors[sensor.energy_site_generator_exported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_generator_exported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Generator exported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'generator_energy_exported', + 'unique_id': '123456-generator_energy_exported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_generator_exported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Generator exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_generator_exported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Generator exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_sensors[sensor.energy_site_generator_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -218,6 +1021,371 @@ 'state': '0.0', }) # --- +# name: test_sensors[sensor.energy_site_grid_exported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_exported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid exported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_grid_energy_exported', + 'unique_id': '123456-total_grid_energy_exported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_exported_from_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid exported from battery', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_energy_exported_from_battery', + 'unique_id': '123456-grid_energy_exported_from_battery', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported from battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported_from_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_battery-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported from battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported_from_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_generator-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_exported_from_generator', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid exported from generator', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_energy_exported_from_generator', + 'unique_id': '123456-grid_energy_exported_from_generator', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_generator-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported from generator', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported_from_generator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_generator-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported from generator', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported_from_generator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_solar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_exported_from_solar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid exported from solar', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_energy_exported_from_solar', + 'unique_id': '123456-grid_energy_exported_from_solar', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_solar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported from solar', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported_from_solar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_exported_from_solar-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid exported from solar', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_exported_from_solar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_imported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_imported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid imported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_energy_imported', + 'unique_id': '123456-grid_energy_imported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_imported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid imported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_imported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_imported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid imported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_imported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_sensors[sensor.energy_site_grid_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -291,6 +1459,152 @@ 'state': '0.0', }) # --- +# name: test_sensors[sensor.energy_site_grid_services_exported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_services_exported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid services exported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_energy_exported', + 'unique_id': '123456-grid_services_energy_exported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_exported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid services exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_exported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid services exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_imported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_services_imported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid services imported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_energy_imported', + 'unique_id': '123456-grid_services_energy_imported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_imported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid services imported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_imported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_imported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Grid services imported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_imported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_sensors[sensor.energy_site_grid_services_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -364,6 +1678,162 @@ 'state': '0.0', }) # --- +# name: test_sensors[sensor.energy_site_home_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_home_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Home usage', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_home_usage', + 'unique_id': '123456-total_home_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_home_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Home usage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_home_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.074', + }) +# --- +# name: test_sensors[sensor.energy_site_home_usage-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Home usage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_home_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.074', + }) +# --- +# name: test_sensors[sensor.energy_site_island_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on_grid', + 'off_grid', + 'off_grid_intentional', + 'off_grid_unintentional', + 'island_status_unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_island_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Island status', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'island_status', + 'unique_id': '123456-island_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_island_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site Island status', + 'options': list([ + 'on_grid', + 'off_grid', + 'off_grid_intentional', + 'off_grid_unintentional', + 'island_status_unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.energy_site_island_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- +# name: test_sensors[sensor.energy_site_island_status-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site Island status', + 'options': list([ + 'on_grid', + 'off_grid', + 'off_grid_intentional', + 'off_grid_unintentional', + 'island_status_unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.energy_site_island_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- # name: test_sensors[sensor.energy_site_load_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -437,67 +1907,6 @@ 'state': '6.245', }) # --- -# name: test_sensors[sensor.energy_site_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'island_status', - 'unique_id': '123456-island_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.energy_site_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site None', - }), - 'context': , - 'entity_id': 'sensor.energy_site_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- -# name: test_sensors[sensor.energy_site_none-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site None', - }), - 'context': , - 'entity_id': 'sensor.energy_site_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- # name: test_sensors[sensor.energy_site_percentage_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -568,6 +1977,152 @@ 'state': '95.5053740373966', }) # --- +# name: test_sensors[sensor.energy_site_solar_exported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_solar_exported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Solar exported', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'solar_energy_exported', + 'unique_id': '123456-solar_energy_exported', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_solar_exported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Solar exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.724', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_exported-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Solar exported', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_exported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.724', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_solar_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Solar generated', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_solar_generation', + 'unique_id': '123456-total_solar_generation', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_solar_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Solar generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.724', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_generated-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Site Solar generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.724', + }) +# --- # name: test_sensors[sensor.energy_site_solar_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -867,7 +2422,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_battery_level', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_level', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_battery_level', 'unit_of_measurement': '%', }) # --- @@ -940,7 +2495,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_range', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_battery_range', 'unit_of_measurement': , }) # --- @@ -1005,7 +2560,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_conn_charge_cable', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_conn_charge_cable', 'unit_of_measurement': None, }) # --- @@ -1069,7 +2624,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_energy_added', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_energy_added', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_energy_added', 'unit_of_measurement': , }) # --- @@ -1139,7 +2694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charge_rate', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_rate', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charge_rate', 'unit_of_measurement': , }) # --- @@ -1206,7 +2761,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_actual_current', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_actual_current', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_actual_current', 'unit_of_measurement': , }) # --- @@ -1273,7 +2828,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_power', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_power', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_power', 'unit_of_measurement': , }) # --- @@ -1340,7 +2895,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charger_voltage', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_voltage', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charger_voltage', 'unit_of_measurement': , }) # --- @@ -1414,7 +2969,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_charging_state', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charging_state', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_charging_state', 'unit_of_measurement': None, }) # --- @@ -1496,7 +3051,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_miles_to_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_miles_to_arrival', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_miles_to_arrival', 'unit_of_measurement': , }) # --- @@ -1566,7 +3121,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_driver_temp_setting', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_driver_temp_setting', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_driver_temp_setting', 'unit_of_measurement': , }) # --- @@ -1639,7 +3194,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_est_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_est_battery_range', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_est_battery_range', 'unit_of_measurement': , }) # --- @@ -1704,7 +3259,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_fast_charger_type', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_fast_charger_type', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_fast_charger_type', 'unit_of_measurement': None, }) # --- @@ -1771,7 +3326,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_ideal_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_ideal_battery_range', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_ideal_battery_range', 'unit_of_measurement': , }) # --- @@ -1841,7 +3396,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_inside_temp', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_inside_temp', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_inside_temp', 'unit_of_measurement': , }) # --- @@ -1914,7 +3469,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_odometer', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_odometer', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_odometer', 'unit_of_measurement': , }) # --- @@ -1984,7 +3539,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_outside_temp', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_outside_temp', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_outside_temp', 'unit_of_measurement': , }) # --- @@ -2054,7 +3609,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_passenger_temp_setting', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_passenger_temp_setting', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_passenger_temp_setting', 'unit_of_measurement': , }) # --- @@ -2121,7 +3676,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_power', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_power', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_power', 'unit_of_measurement': , }) # --- @@ -2193,7 +3748,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_shift_state', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_shift_state', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_shift_state', 'unit_of_measurement': None, }) # --- @@ -2271,7 +3826,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_speed', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_speed', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_speed', 'unit_of_measurement': , }) # --- @@ -2338,7 +3893,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_energy_at_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_energy_at_arrival', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_energy_at_arrival', 'unit_of_measurement': '%', }) # --- @@ -2403,7 +3958,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_minutes_to_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_minutes_to_arrival', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_minutes_to_arrival', 'unit_of_measurement': None, }) # --- @@ -2464,7 +4019,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_minutes_to_full_charge', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_minutes_to_full_charge', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_minutes_to_full_charge', 'unit_of_measurement': None, }) # --- @@ -2533,7 +4088,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_fl', 'unit_of_measurement': , }) # --- @@ -2606,7 +4161,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_fr', 'unit_of_measurement': , }) # --- @@ -2679,7 +4234,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rl', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_rl', 'unit_of_measurement': , }) # --- @@ -2752,7 +4307,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rr', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_tpms_pressure_rr', 'unit_of_measurement': , }) # --- @@ -2819,7 +4374,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drive_state_active_route_traffic_minutes_delay', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_traffic_minutes_delay', + 'unique_id': 'LRW3F7EK4NC700000-drive_state_active_route_traffic_minutes_delay', 'unit_of_measurement': , }) # --- @@ -2886,7 +4441,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_usable_battery_level', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_usable_battery_level', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_usable_battery_level', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/teslemetry/snapshots/test_switch.ambr b/tests/components/teslemetry/snapshots/test_switch.ambr index f55cbae6a54..5693d4bdd5e 100644 --- a/tests/components/teslemetry/snapshots/test_switch.ambr +++ b/tests/components/teslemetry/snapshots/test_switch.ambr @@ -122,7 +122,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_auto_seat_climate_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_left', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_auto_seat_climate_left', 'unit_of_measurement': None, }) # --- @@ -169,7 +169,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_auto_seat_climate_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_right', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_auto_seat_climate_right', 'unit_of_measurement': None, }) # --- @@ -216,7 +216,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_auto_steering_wheel_heat', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_steering_wheel_heat', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_auto_steering_wheel_heat', 'unit_of_measurement': None, }) # --- @@ -263,7 +263,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charge_state_user_charge_enable_request', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_user_charge_enable_request', + 'unique_id': 'LRW3F7EK4NC700000-charge_state_user_charge_enable_request', 'unit_of_measurement': None, }) # --- @@ -310,7 +310,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_state_defrost_mode', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_defrost_mode', + 'unique_id': 'LRW3F7EK4NC700000-climate_state_defrost_mode', 'unit_of_measurement': None, }) # --- @@ -357,7 +357,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'vehicle_state_sentry_mode', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sentry_mode', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_sentry_mode', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/teslemetry/snapshots/test_update.ambr b/tests/components/teslemetry/snapshots/test_update.ambr index 19dac161516..0777f4ccdb9 100644 --- a/tests/components/teslemetry/snapshots/test_update.ambr +++ b/tests/components/teslemetry/snapshots/test_update.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_software_update_status', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_software_update_status', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_software_update_status', 'unit_of_measurement': None, }) # --- @@ -36,6 +36,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -46,6 +47,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', @@ -84,7 +86,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'vehicle_state_software_update_status', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_software_update_status', + 'unique_id': 'LRW3F7EK4NC700000-vehicle_state_software_update_status', 'unit_of_measurement': None, }) # --- @@ -92,6 +94,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -102,6 +105,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', diff --git a/tests/components/teslemetry/test_binary_sensors.py b/tests/components/teslemetry/test_binary_sensors.py index a7a8c03c174..0a47dce9537 100644 --- a/tests/components/teslemetry/test_binary_sensors.py +++ b/tests/components/teslemetry/test_binary_sensors.py @@ -1,12 +1,13 @@ """Test the Teslemetry binary sensor platform.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL -from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -33,7 +34,7 @@ async def test_binary_sensor_refresh( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Tests that the binary sensor entities are correct.""" @@ -47,15 +48,3 @@ async def test_binary_sensor_refresh( await hass.async_block_till_done() assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) - - -async def test_binary_sensor_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the binary sensor entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.BINARY_SENSOR]) - state = hass.states.get("binary_sensor.test_status") - assert state.state == STATE_UNKNOWN diff --git a/tests/components/teslemetry/test_button.py b/tests/components/teslemetry/test_button.py index a10e3efdff2..04edf668765 100644 --- a/tests/components/teslemetry/test_button.py +++ b/tests/components/teslemetry/test_button.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 31a39f1f21a..33f2e134806 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -1,17 +1,15 @@ """Test the Teslemetry climate platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import InvalidCommand, VehicleOffline +from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import InvalidCommand from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_PRESET_MODE, - ATTR_TARGET_TEMP_HIGH, - ATTR_TARGET_TEMP_LOW, ATTR_TEMPERATURE, DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, @@ -21,7 +19,6 @@ from homeassistant.components.climate import ( SERVICE_TURN_ON, HVACMode, ) -from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -33,12 +30,11 @@ from .const import ( COMMAND_IGNORED_REASON, METADATA_NOSCOPE, VEHICLE_DATA_ALT, + VEHICLE_DATA_ASLEEP, WAKE_UP_ASLEEP, WAKE_UP_ONLINE, ) -from tests.common import async_fire_time_changed - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_climate( @@ -175,17 +171,6 @@ async def test_climate( state = hass.states.get(entity_id) assert state.state == HVACMode.COOL - # Set Temp do nothing - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TARGET_TEMP_HIGH: 30, - ATTR_TARGET_TEMP_LOW: 30, - }, - blocking=True, - ) state = hass.states.get(entity_id) assert state.attributes[ATTR_TEMPERATURE] == 40 assert state.state == HVACMode.COOL @@ -209,7 +194,7 @@ async def test_climate_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -218,21 +203,7 @@ async def test_climate_alt( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_climate_offline( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data, -) -> None: - """Tests that the climate entity is correct.""" - - mock_vehicle_data.side_effect = VehicleOffline - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert_entities(hass, entry.entry_id, entity_registry, snapshot) - - -async def test_invalid_error(hass: HomeAssistant) -> None: +async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Tests service error is handled.""" await setup_platform(hass, platforms=[Platform.CLIMATE]) @@ -252,10 +223,7 @@ async def test_invalid_error(hass: HomeAssistant) -> None: blocking=True, ) mock_on.assert_called_once() - assert ( - str(error.value) - == "Teslemetry command failed, The data request or command is unknown." - ) + assert str(error.value) == snapshot(name="error") @pytest.mark.parametrize("response", COMMAND_ERRORS) @@ -304,25 +272,17 @@ async def test_ignored_error( @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_asleep_or_offline( hass: HomeAssistant, - mock_vehicle_data, - mock_wake_up, - mock_vehicle, + mock_vehicle_data: AsyncMock, + mock_wake_up: AsyncMock, + mock_vehicle: AsyncMock, freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, ) -> None: """Tests asleep is handled.""" + mock_vehicle_data.return_value = VEHICLE_DATA_ASLEEP await setup_platform(hass, [Platform.CLIMATE]) entity_id = "climate.test_climate" - mock_vehicle_data.assert_called_once() - - # Put the vehicle alseep - mock_vehicle_data.reset_mock() - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_vehicle_data.assert_called_once() - mock_wake_up.reset_mock() # Run a command but fail trying to wake up the vehicle mock_wake_up.side_effect = InvalidCommand @@ -333,7 +293,7 @@ async def test_asleep_or_offline( {ATTR_ENTITY_ID: [entity_id]}, blocking=True, ) - assert str(error.value) == "The data request or command is unknown." + assert str(error.value) == snapshot(name="InvalidCommand") mock_wake_up.assert_called_once() mock_wake_up.side_effect = None @@ -352,7 +312,7 @@ async def test_asleep_or_offline( {ATTR_ENTITY_ID: [entity_id]}, blocking=True, ) - assert str(error.value) == "Could not wake up vehicle" + assert str(error.value) == snapshot(name="HomeAssistantError") mock_wake_up.assert_called_once() mock_vehicle.assert_called() @@ -371,12 +331,21 @@ async def test_asleep_or_offline( async def test_climate_noscope( hass: HomeAssistant, - mock_metadata, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_metadata: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" mock_metadata.return_value = METADATA_NOSCOPE - await setup_platform(hass, [Platform.CLIMATE]) + entry = await setup_platform(hass, [Platform.CLIMATE]) + + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + entity_id = "climate.test_climate" with pytest.raises(ServiceValidationError): diff --git a/tests/components/teslemetry/test_config_flow.py b/tests/components/teslemetry/test_config_flow.py index fa35142dc07..aeee3a620d4 100644 --- a/tests/components/teslemetry/test_config_flow.py +++ b/tests/components/teslemetry/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Teslemetry config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiohttp import ClientConnectionError import pytest @@ -60,7 +60,10 @@ async def test_form( ], ) async def test_form_errors( - hass: HomeAssistant, side_effect, error, mock_metadata + hass: HomeAssistant, + side_effect: TeslaFleetError, + error: dict[str, str], + mock_metadata: AsyncMock, ) -> None: """Test errors are handled.""" @@ -86,7 +89,7 @@ async def test_form_errors( assert result3["type"] is FlowResultType.CREATE_ENTRY -async def test_reauth(hass: HomeAssistant, mock_metadata) -> None: +async def test_reauth(hass: HomeAssistant, mock_metadata: AsyncMock) -> None: """Test reauth flow.""" mock_entry = MockConfigEntry( @@ -94,14 +97,7 @@ async def test_reauth(hass: HomeAssistant, mock_metadata) -> None: ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=BAD_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -134,7 +130,10 @@ async def test_reauth(hass: HomeAssistant, mock_metadata) -> None: ], ) async def test_reauth_errors( - hass: HomeAssistant, mock_metadata, side_effect, error + hass: HomeAssistant, + mock_metadata: AsyncMock, + side_effect: TeslaFleetError, + error: dict[str, str], ) -> None: """Test reauth flows that fail.""" @@ -144,15 +143,7 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=BAD_CONFIG, - ) + result = await mock_entry.start_reauth_flow(hass) mock_metadata.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( @@ -193,7 +184,7 @@ async def test_unique_id_abort( assert result2["type"] is FlowResultType.ABORT -async def test_migrate_from_1_1(hass: HomeAssistant, mock_metadata) -> None: +async def test_migrate_from_1_1(hass: HomeAssistant, mock_metadata: AsyncMock) -> None: """Test config migration.""" mock_entry = MockConfigEntry( @@ -214,7 +205,9 @@ async def test_migrate_from_1_1(hass: HomeAssistant, mock_metadata) -> None: assert entry.unique_id == METADATA["uid"] -async def test_migrate_error_from_1_1(hass: HomeAssistant, mock_metadata) -> None: +async def test_migrate_error_from_1_1( + hass: HomeAssistant, mock_metadata: AsyncMock +) -> None: """Test config migration handles errors.""" mock_metadata.side_effect = TeslaFleetError @@ -235,7 +228,9 @@ async def test_migrate_error_from_1_1(hass: HomeAssistant, mock_metadata) -> Non assert entry.state is ConfigEntryState.MIGRATION_ERROR -async def test_migrate_error_from_future(hass: HomeAssistant, mock_metadata) -> None: +async def test_migrate_error_from_future( + hass: HomeAssistant, mock_metadata: AsyncMock +) -> None: """Test a future version isn't migrated.""" mock_metadata.side_effect = TeslaFleetError diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 8d4493ab25f..7dbdcfa5747 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -1,24 +1,18 @@ """Test the Teslemetry cover platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, + CoverState, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_OPEN, - STATE_UNKNOWN, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -43,7 +37,7 @@ async def test_cover_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the cover entities are correct with alternate values.""" @@ -57,7 +51,7 @@ async def test_cover_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_metadata, + mock_metadata: AsyncMock, ) -> None: """Tests that the cover entities are correct without scopes.""" @@ -66,18 +60,6 @@ async def test_cover_noscope( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_cover_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the cover entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.COVER]) - state = hass.states.get("cover.test_windows") - assert state.state == STATE_UNKNOWN - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_services( hass: HomeAssistant, @@ -101,7 +83,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -113,7 +95,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Charge Port Door entity_id = "cover.test_charge_port_door" @@ -130,7 +112,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN with patch( "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", @@ -145,7 +127,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Frunk entity_id = "cover.test_frunk" @@ -162,7 +144,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN # Trunk entity_id = "cover.test_trunk" @@ -179,7 +161,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -191,7 +173,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Sunroof entity_id = "cover.test_sunroof" @@ -208,7 +190,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -220,7 +202,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -232,4 +214,4 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED diff --git a/tests/components/teslemetry/test_device_tracker.py b/tests/components/teslemetry/test_device_tracker.py index 55deaefdab5..d86c3ca8596 100644 --- a/tests/components/teslemetry/test_device_tracker.py +++ b/tests/components/teslemetry/test_device_tracker.py @@ -1,13 +1,15 @@ """Test the Teslemetry device tracker platform.""" -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from unittest.mock import AsyncMock -from homeassistant.const import STATE_UNKNOWN, Platform +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, assert_entities_alt, setup_platform +from .const import VEHICLE_DATA_ALT async def test_device_tracker( @@ -21,13 +23,14 @@ async def test_device_tracker( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_device_tracker_offline( +async def test_device_tracker_alt( hass: HomeAssistant, - mock_vehicle_data, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, ) -> None: - """Tests that the device tracker entities are correct when offline.""" + """Tests that the device tracker entities are correct.""" - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.DEVICE_TRACKER]) - state = hass.states.get("device_tracker.test_location") - assert state.state == STATE_UNKNOWN + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + entry = await setup_platform(hass, [Platform.DEVICE_TRACKER]) + assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 5520a5549bd..6d4e04c21b4 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, @@ -12,18 +12,15 @@ from tesla_fleet_api.exceptions import ( VehicleOffline, ) -from homeassistant.components.teslemetry.coordinator import ( - VEHICLE_INTERVAL, - VEHICLE_WAIT, -) +from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_platform -from .const import VEHICLE_DATA_ALT, WAKE_UP_ASLEEP +from .const import VEHICLE_DATA_ALT from tests.common import async_fire_time_changed @@ -48,7 +45,10 @@ async def test_load_unload(hass: HomeAssistant) -> None: @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_init_error( - hass: HomeAssistant, mock_products, side_effect, state + hass: HomeAssistant, + mock_products: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test init with errors.""" @@ -69,24 +69,8 @@ async def test_devices( assert device == snapshot(name=f"{device.identifiers}") -# Vehicle Coordinator -async def test_vehicle_refresh_asleep( - hass: HomeAssistant, - mock_vehicle: AsyncMock, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh with an error.""" - - mock_vehicle.return_value = WAKE_UP_ASLEEP - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert entry.state is ConfigEntryState.LOADED - mock_vehicle.assert_called_once() - mock_vehicle_data.assert_not_called() - - async def test_vehicle_refresh_offline( - hass: HomeAssistant, mock_vehicle_data, freezer: FrozenDateTimeFactory + hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory ) -> None: """Test coordinator refresh with an error.""" entry = await setup_platform(hass, [Platform.CLIMATE]) @@ -103,7 +87,10 @@ async def test_vehicle_refresh_offline( @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_vehicle_refresh_error( - hass: HomeAssistant, mock_vehicle_data, side_effect, state + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_vehicle_data.side_effect = side_effect @@ -111,67 +98,13 @@ async def test_vehicle_refresh_error( assert entry.state is state -async def test_vehicle_sleep( - hass: HomeAssistant, mock_vehicle_data, freezer: FrozenDateTimeFactory -) -> None: - """Test coordinator refresh with an error.""" - await setup_platform(hass, [Platform.CLIMATE]) - assert mock_vehicle_data.call_count == 1 - - freezer.tick(VEHICLE_WAIT + VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Let vehicle sleep, no updates for 15 minutes - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # No polling, call_count should not increase - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # No polling, call_count should not increase - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Vehicle didn't sleep, go back to normal - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 3 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Regular polling - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 4 - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Vehicle active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 5 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Dont let sleep when active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 6 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Dont let sleep when active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 7 - - # Test Energy Live Coordinator @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_energy_live_refresh_error( - hass: HomeAssistant, mock_live_status, side_effect, state + hass: HomeAssistant, + mock_live_status: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_live_status.side_effect = side_effect @@ -182,9 +115,70 @@ async def test_energy_live_refresh_error( # Test Energy Site Coordinator @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_energy_site_refresh_error( - hass: HomeAssistant, mock_site_info, side_effect, state + hass: HomeAssistant, + mock_site_info: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_site_info.side_effect = side_effect entry = await setup_platform(hass) assert entry.state is state + + +# Test Energy History Coordinator +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_energy_history_refresh_error( + hass: HomeAssistant, + mock_energy_history: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, +) -> None: + """Test coordinator refresh with an error.""" + mock_energy_history.side_effect = side_effect + entry = await setup_platform(hass) + assert entry.state is state + + +async def test_vehicle_stream( + hass: HomeAssistant, + mock_listen: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test vehicle stream events.""" + + entry = await setup_platform(hass, [Platform.BINARY_SENSOR]) + mock_listen.assert_called_once() + + state = hass.states.get("binary_sensor.test_status") + assert state.state == STATE_ON + + state = hass.states.get("binary_sensor.test_user_present") + assert state.state == STATE_OFF + + runtime_data: TeslemetryData = entry.runtime_data + for listener, _ in runtime_data.vehicles[0].stream._listeners.values(): + listener( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "vehicle_data": VEHICLE_DATA_ALT["response"], + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.test_user_present") + assert state.state == STATE_ON + + for listener, _ in runtime_data.vehicles[0].stream._listeners.values(): + listener( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "state": "offline", + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.test_status") + assert state.state == STATE_OFF diff --git a/tests/components/teslemetry/test_lock.py b/tests/components/teslemetry/test_lock.py index a50e97fe6ad..f7c9fea1400 100644 --- a/tests/components/teslemetry/test_lock.py +++ b/tests/components/teslemetry/test_lock.py @@ -3,21 +3,15 @@ from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, + LockState, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_LOCKED, - STATE_UNKNOWN, - STATE_UNLOCKED, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er @@ -37,18 +31,6 @@ async def test_lock( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_lock_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the lock entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.LOCK]) - state = hass.states.get("lock.test_lock") - assert state.state == STATE_UNKNOWN - - async def test_lock_services( hass: HomeAssistant, ) -> None: @@ -69,7 +51,7 @@ async def test_lock_services( blocking=True, ) state = hass.states.get(entity_id) - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED call.assert_called_once() with patch( @@ -83,7 +65,7 @@ async def test_lock_services( blocking=True, ) state = hass.states.get(entity_id) - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED call.assert_called_once() entity_id = "lock.test_charge_cable_lock" @@ -107,5 +89,5 @@ async def test_lock_services( blocking=True, ) state = hass.states.get(entity_id) - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED call.assert_called_once() diff --git a/tests/components/teslemetry/test_media_player.py b/tests/components/teslemetry/test_media_player.py index 8544c11a625..ae462bfd026 100644 --- a/tests/components/teslemetry/test_media_player.py +++ b/tests/components/teslemetry/test_media_player.py @@ -1,9 +1,8 @@ """Test the Teslemetry media player platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.media_player import ( ATTR_MEDIA_VOLUME_LEVEL, @@ -38,7 +37,7 @@ async def test_media_player_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the media player entities are correct.""" @@ -47,23 +46,11 @@ async def test_media_player_alt( assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) -async def test_media_player_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the media player entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.MEDIA_PLAYER]) - state = hass.states.get("media_player.test_media_player") - assert state.state == MediaPlayerState.OFF - - async def test_media_player_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_metadata, + mock_metadata: AsyncMock, ) -> None: """Tests that the media player entities are correct without required scope.""" diff --git a/tests/components/teslemetry/test_number.py b/tests/components/teslemetry/test_number.py index 728d37c4d7c..65c03514d22 100644 --- a/tests/components/teslemetry/test_number.py +++ b/tests/components/teslemetry/test_number.py @@ -1,17 +1,16 @@ """Test the Teslemetry number platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -31,20 +30,10 @@ async def test_number( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_number_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the number entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.NUMBER]) - state = hass.states.get("number.test_charge_current") - assert state.state == STATE_UNKNOWN - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_number_services(hass: HomeAssistant, mock_vehicle_data) -> None: +async def test_number_services( + hass: HomeAssistant, mock_vehicle_data: AsyncMock +) -> None: """Tests that the number services work.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT await setup_platform(hass, [Platform.NUMBER]) diff --git a/tests/components/teslemetry/test_select.py b/tests/components/teslemetry/test_select.py index 3b1c8c436bf..005a6a2004e 100644 --- a/tests/components/teslemetry/test_select.py +++ b/tests/components/teslemetry/test_select.py @@ -1,11 +1,10 @@ """Test the Teslemetry select platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode -from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.select import ( ATTR_OPTION, @@ -33,18 +32,6 @@ async def test_select( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_select_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the select entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.SELECT]) - state = hass.states.get("select.test_seat_heater_front_left") - assert state.state == STATE_UNKNOWN - - async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: """Tests that the select services work.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT @@ -112,3 +99,23 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: state = hass.states.get(entity_id) assert state.state == EnergyExportMode.BATTERY_OK.value call.assert_called_once() + + +async def test_select_invalid_data( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, +) -> None: + """Tests that the select entities handle invalid data.""" + + broken_data = VEHICLE_DATA_ALT.copy() + broken_data["response"]["climate_state"]["seat_heater_left"] = "green" + broken_data["response"]["climate_state"]["steering_wheel_heat_level"] = "yellow" + + mock_vehicle_data.return_value = broken_data + await setup_platform(hass, [Platform.SELECT]) + state = hass.states.get("select.test_seat_heater_front_left") + assert state.state == STATE_UNKNOWN + state = hass.states.get("select.test_steering_wheel_heater") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/teslemetry/test_sensor.py b/tests/components/teslemetry/test_sensor.py index c5bdd15d712..f0b472a7183 100644 --- a/tests/components/teslemetry/test_sensor.py +++ b/tests/components/teslemetry/test_sensor.py @@ -1,8 +1,10 @@ """Test the Teslemetry sensor platform.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.const import Platform @@ -21,7 +23,7 @@ async def test_sensors( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the sensor entities are correct.""" diff --git a/tests/components/teslemetry/test_switch.py b/tests/components/teslemetry/test_switch.py index 47a2843eb8f..6a1ddb430ce 100644 --- a/tests/components/teslemetry/test_switch.py +++ b/tests/components/teslemetry/test_switch.py @@ -1,23 +1,16 @@ """Test the Teslemetry switch platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -40,7 +33,7 @@ async def test_switch_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the switch entities are correct.""" @@ -49,18 +42,6 @@ async def test_switch_alt( assert_entities_alt(hass, entry.entry_id, entity_registry, snapshot) -async def test_switch_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the switch entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.SWITCH]) - state = hass.states.get("switch.test_auto_seat_climate_left") - assert state.state == STATE_UNKNOWN - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("name", "on", "off"), diff --git a/tests/components/teslemetry/test_update.py b/tests/components/teslemetry/test_update.py index 62bbcc94516..448f31afd67 100644 --- a/tests/components/teslemetry/test_update.py +++ b/tests/components/teslemetry/test_update.py @@ -1,16 +1,15 @@ """Test the Teslemetry update platform.""" import copy -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline +from syrupy.assertion import SnapshotAssertion from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.update import INSTALLING from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -35,7 +34,7 @@ async def test_update_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the update entities are correct.""" @@ -44,21 +43,9 @@ async def test_update_alt( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_update_offline( - hass: HomeAssistant, - mock_vehicle_data, -) -> None: - """Tests that the update entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, [Platform.UPDATE]) - state = hass.states.get("update.test_update") - assert state.state == STATE_UNKNOWN - - async def test_update_services( hass: HomeAssistant, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, ) -> None: diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensors.ambr index e8912bb0e7f..6c0da044df2 100644 --- a/tests/components/tessie/snapshots/test_binary_sensors.ambr +++ b/tests/components/tessie/snapshots/test_binary_sensors.ambr @@ -137,6 +137,52 @@ 'state': 'off', }) # --- +# name: test_binary_sensors[binary_sensor.energy_site_storm_watch_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storm watch active', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storm_mode_active', + 'unique_id': '123456-storm_mode_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_storm_watch_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_storm_watch_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensors[binary_sensor.test_auto_seat_climate_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_update.ambr b/tests/components/tessie/snapshots/test_update.ambr index 622cf69c7f0..1728c13b0ad 100644 --- a/tests/components/tessie/snapshots/test_update.ambr +++ b/tests/components/tessie/snapshots/test_update.ambr @@ -36,6 +36,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/tessie/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -46,6 +47,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', diff --git a/tests/components/tessie/test_config_flow.py b/tests/components/tessie/test_config_flow.py index 043086971fa..d51d467002d 100644 --- a/tests/components/tessie/test_config_flow.py +++ b/tests/components/tessie/test_config_flow.py @@ -143,14 +143,7 @@ async def test_reauth( ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=TEST_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -194,15 +187,7 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=TEST_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index be4dda3ec7b..49a53fd327c 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -9,8 +9,7 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - STATE_CLOSED, - STATE_OPEN, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant @@ -57,7 +56,7 @@ async def test_covers( blocking=True, ) mock_open.assert_called_once() - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # Test close windows if closefunc: @@ -72,7 +71,7 @@ async def test_covers( blocking=True, ) mock_close.assert_called_once() - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED async def test_errors(hass: HomeAssistant) -> None: @@ -113,4 +112,4 @@ async def test_errors(hass: HomeAssistant) -> None: blocking=True, ) mock_set.assert_called_once() - assert str(error.value) == TEST_RESPONSE_ERROR["reason"] + assert str(error.value) == f"Command failed, {TEST_RESPONSE_ERROR["reason"]}" diff --git a/tests/components/tessie/test_lock.py b/tests/components/tessie/test_lock.py index cfb6168b399..1208bb17d55 100644 --- a/tests/components/tessie/test_lock.py +++ b/tests/components/tessie/test_lock.py @@ -6,17 +6,17 @@ import pytest from syrupy import SnapshotAssertion from homeassistant.components.lock import ( - ATTR_CODE, DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, + LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er -from .common import DOMAIN, assert_entities, setup_platform +from .common import assert_entities, setup_platform async def test_locks( @@ -24,17 +24,6 @@ async def test_locks( ) -> None: """Tests that the lock entity is correct.""" - # Create the deprecated speed limit lock entity - entity_registry.async_get_or_create( - LOCK_DOMAIN, - DOMAIN, - "VINVINVIN-vehicle_state_speed_limit_mode_active", - original_name="Charge cable lock", - has_entity_name=True, - translation_key="vehicle_state_speed_limit_mode_active", - disabled_by=er.RegistryEntryDisabler.INTEGRATION, - ) - entry = await setup_platform(hass, [Platform.LOCK]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -49,7 +38,7 @@ async def test_locks( blocking=True, ) mock_run.assert_called_once() - assert hass.states.get(entity_id).state == STATE_LOCKED + assert hass.states.get(entity_id).state == LockState.LOCKED with patch("homeassistant.components.tessie.lock.unlock") as mock_run: await hass.services.async_call( @@ -59,7 +48,7 @@ async def test_locks( blocking=True, ) mock_run.assert_called_once() - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity_id).state == LockState.UNLOCKED # Test charge cable lock set value functions entity_id = "lock.test_charge_cable_lock" @@ -80,67 +69,5 @@ async def test_locks( {ATTR_ENTITY_ID: [entity_id]}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity_id).state == LockState.UNLOCKED mock_run.assert_called_once() - - -async def test_speed_limit_lock( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, -) -> None: - """Tests that the deprecated speed limit lock entity is correct.""" - # Create the deprecated speed limit lock entity - entity = entity_registry.async_get_or_create( - LOCK_DOMAIN, - DOMAIN, - "VINVINVIN-vehicle_state_speed_limit_mode_active", - original_name="Charge cable lock", - has_entity_name=True, - translation_key="vehicle_state_speed_limit_mode_active", - ) - - with patch( - "homeassistant.components.tessie.lock.automations_with_entity", - return_value=["item"], - ): - await setup_platform(hass, [Platform.LOCK]) - assert issue_registry.async_get_issue( - DOMAIN, f"deprecated_speed_limit_{entity.entity_id}_item" - ) - - # Test lock set value functions - with patch( - "homeassistant.components.tessie.lock.enable_speed_limit" - ) as mock_enable_speed_limit: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_LOCK, - {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, - blocking=True, - ) - assert hass.states.get(entity.entity_id).state == STATE_LOCKED - mock_enable_speed_limit.assert_called_once() - # Assert issue has been raised in the issue register - assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_locked") - - with patch( - "homeassistant.components.tessie.lock.disable_speed_limit" - ) as mock_disable_speed_limit: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, - blocking=True, - ) - assert hass.states.get(entity.entity_id).state == STATE_UNLOCKED - mock_disable_speed_limit.assert_called_once() - assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_unlocked") - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "abc"}, - blocking=True, - ) diff --git a/tests/components/text/test_init.py b/tests/components/text/test_init.py index 8e20af6cb7a..3764d481928 100644 --- a/tests/components/text/test_init.py +++ b/tests/components/text/test_init.py @@ -64,21 +64,22 @@ async def test_text_set_value(hass: HomeAssistant) -> None: with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: ""}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: ""}) ) with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "hello world!"}) + text, + ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "hello world!"}), ) with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "HELLO"}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "HELLO"}) ) await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "test2"}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "test2"}) ) assert text.state == "test2" diff --git a/tests/components/thethingsnetwork/test_config_flow.py b/tests/components/thethingsnetwork/test_config_flow.py index 107d84e099b..99c4a080e17 100644 --- a/tests/components/thethingsnetwork/test_config_flow.py +++ b/tests/components/thethingsnetwork/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from ttn_client import TTNAuthError from homeassistant.components.thethingsnetwork.const import CONF_APP_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -12,6 +12,8 @@ from homeassistant.data_entry_flow import FlowResultType from . import init_integration from .conftest import API_KEY, APP_ID, HOST +from tests.common import MockConfigEntry + USER_DATA = {CONF_HOST: HOST, CONF_APP_ID: APP_ID, CONF_API_KEY: API_KEY} @@ -92,21 +94,13 @@ async def test_duplicate_entry( async def test_step_reauth( - hass: HomeAssistant, mock_ttnclient, mock_config_entry + hass: HomeAssistant, mock_ttnclient, mock_config_entry: MockConfigEntry ) -> None: """Test that the reauth step works.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": APP_ID, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_DATA, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert not result["errors"] diff --git a/tests/components/thethingsnetwork/test_init.py b/tests/components/thethingsnetwork/test_init.py index 1e0b64c933d..e39c764d5f9 100644 --- a/tests/components/thethingsnetwork/test_init.py +++ b/tests/components/thethingsnetwork/test_init.py @@ -4,22 +4,6 @@ import pytest from ttn_client import TTNAuthError from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .conftest import DOMAIN - - -async def test_error_configuration( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test issue is logged when deprecated configuration is used.""" - await async_setup_component( - hass, DOMAIN, {DOMAIN: {"app_id": "123", "access_key": "42"}} - ) - await hass.async_block_till_done() - assert issue_registry.async_get_issue(DOMAIN, "manual_migration") @pytest.mark.parametrize(("exception_class"), [TTNAuthError, Exception]) diff --git a/tests/components/thread/test_discovery.py b/tests/components/thread/test_discovery.py index d9895aa72b2..3cf195ad40e 100644 --- a/tests/components/thread/test_discovery.py +++ b/tests/components/thread/test_discovery.py @@ -74,6 +74,7 @@ async def test_discover_routers( assert discovered[-1] == ( "aeeb2f594b570bbf", discovery.ThreadRouterDiscoveryData( + instance_name="HomeAssistant OpenThreadBorderRouter #0BBF", addresses=["192.168.0.115"], border_agent_id="230c6a1ac57f6f4be262acf32e5ef52c", brand="homeassistant", @@ -101,6 +102,7 @@ async def test_discover_routers( assert discovered[-1] == ( "f6a99b425a67abed", discovery.ThreadRouterDiscoveryData( + instance_name="Google-Nest-Hub-#ABED", addresses=["192.168.0.124"], border_agent_id="bc3740c3e963aa8735bebecd7cc503c7", brand="google", @@ -180,6 +182,7 @@ async def test_discover_routers_unconfigured( router_discovered_removed.assert_called_once_with( "aeeb2f594b570bbf", discovery.ThreadRouterDiscoveryData( + instance_name="HomeAssistant OpenThreadBorderRouter #0BBF", addresses=["192.168.0.115"], border_agent_id="230c6a1ac57f6f4be262acf32e5ef52c", brand="homeassistant", @@ -226,6 +229,7 @@ async def test_discover_routers_bad_or_missing_optional_data( router_discovered_removed.assert_called_once_with( "aeeb2f594b570bbf", discovery.ThreadRouterDiscoveryData( + instance_name="HomeAssistant OpenThreadBorderRouter #0BBF", addresses=["192.168.0.115"], border_agent_id="230c6a1ac57f6f4be262acf32e5ef52c", brand=None, diff --git a/tests/components/thread/test_websocket_api.py b/tests/components/thread/test_websocket_api.py index f3390a9d8b8..fb429acc3e0 100644 --- a/tests/components/thread/test_websocket_api.py +++ b/tests/components/thread/test_websocket_api.py @@ -353,6 +353,7 @@ async def test_discover_routers( assert msg == { "event": { "data": { + "instance_name": "HomeAssistant OpenThreadBorderRouter #0BBF", "addresses": ["192.168.0.115"], "border_agent_id": "230c6a1ac57f6f4be262acf32e5ef52c", "brand": "homeassistant", @@ -388,6 +389,7 @@ async def test_discover_routers( "brand": "google", "extended_address": "f6a99b425a67abed", "extended_pan_id": "9e75e256f61409a3", + "instance_name": "Google-Nest-Hub-#ABED", "model_name": "Google Nest Hub", "network_name": "NEST-PAN-E1AF", "server": "2d99f293-cd8e-2770-8dd2-6675de9fa000.local.", diff --git a/tests/components/threshold/test_binary_sensor.py b/tests/components/threshold/test_binary_sensor.py index 53a8446c210..259009c6319 100644 --- a/tests/components/threshold/test_binary_sensor.py +++ b/tests/components/threshold/test_binary_sensor.py @@ -2,11 +2,36 @@ import pytest -from homeassistant.components.threshold.const import DOMAIN +from homeassistant.components.threshold.const import ( + ATTR_HYSTERESIS, + ATTR_LOWER, + ATTR_POSITION, + ATTR_SENSOR_VALUE, + ATTR_TYPE, + ATTR_UPPER, + CONF_HYSTERESIS, + CONF_LOWER, + CONF_UPPER, + DOMAIN, + POSITION_ABOVE, + POSITION_BELOW, + POSITION_IN_RANGE, + POSITION_UNKNOWN, + TYPE_LOWER, + TYPE_RANGE, + TYPE_UPPER, +) from homeassistant.const import ( + ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, + CONF_ENTITY_ID, + CONF_NAME, + CONF_PLATFORM, + STATE_OFF, + STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN, + Platform, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -16,461 +41,378 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def test_sensor_upper(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("vals", "expected_position", "expected_state"), + [ + ([15], POSITION_BELOW, STATE_OFF), # at threshold + ([15, 16], POSITION_ABOVE, STATE_ON), + ([15, 16, 14], POSITION_BELOW, STATE_OFF), + ([15, 16, 14, 15], POSITION_BELOW, STATE_OFF), # below -> threshold + ([15, 16, 14, 15, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), + ([15, 16, 14, 15, "cat", 15], POSITION_BELOW, STATE_OFF), + ([15, None], POSITION_UNKNOWN, STATE_UNKNOWN), + ], +) +async def test_sensor_upper( + hass: HomeAssistant, + vals: list[float | str | None], + expected_position: str, + expected_state: str, +) -> None: """Test if source is above threshold.""" config = { - "binary_sensor": { - "platform": "threshold", - "upper": "15", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_UPPER: "15", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - # Set the monitored sensor's state to the threshold - hass.states.async_set("sensor.test_monitored", 15) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set( - "sensor.test_monitored", - 16, - {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_UPPER] == float( + config[Platform.BINARY_SENSOR][CONF_UPPER] ) - await hass.async_block_till_done() + assert state.attributes[ATTR_HYSTERESIS] == 0.0 + assert state.attributes[ATTR_TYPE] == TYPE_UPPER + + for val in vals: + hass.states.async_set("sensor.test_monitored", val) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["entity_id"] == "sensor.test_monitored" - assert state.attributes["sensor_value"] == 16 - assert state.attributes["position"] == "above" - assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) - assert state.attributes["hysteresis"] == 0.0 - assert state.attributes["type"] == "upper" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 14) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 15) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", "cat") - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" - - hass.states.async_set("sensor.test_monitored", 15) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" + assert state.attributes[ATTR_POSITION] == expected_position + assert state.state == expected_state -async def test_sensor_lower(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("vals", "expected_position", "expected_state"), + [ + ([15], POSITION_ABOVE, STATE_OFF), # at threshold + ([15, 16], POSITION_ABOVE, STATE_OFF), + ([15, 16, 14], POSITION_BELOW, STATE_ON), + ([15, 16, 14, 15], POSITION_BELOW, STATE_ON), + ([15, 16, 14, 15, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), + ([15, 16, 14, 15, "cat", 15], POSITION_ABOVE, STATE_OFF), + ([15, None], POSITION_UNKNOWN, STATE_UNKNOWN), + ], +) +async def test_sensor_lower( + hass: HomeAssistant, + vals: list[float | str | None], + expected_position: str, + expected_state: str, +) -> None: """Test if source is below threshold.""" config = { - "binary_sensor": { - "platform": "threshold", - "lower": "15", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_LOWER: "15", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - # Set the monitored sensor's state to the threshold - hass.states.async_set("sensor.test_monitored", 15) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_LOWER] == float( + config[Platform.BINARY_SENSOR][CONF_LOWER] + ) + assert state.attributes[ATTR_HYSTERESIS] == 0.0 + assert state.attributes[ATTR_TYPE] == TYPE_LOWER - hass.states.async_set("sensor.test_monitored", 16) - await hass.async_block_till_done() + for val in vals: + hass.states.async_set("sensor.test_monitored", val) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) - assert state.attributes["hysteresis"] == 0.0 - assert state.attributes["type"] == "lower" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 14) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 15) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", "cat") - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" - - hass.states.async_set("sensor.test_monitored", 15) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" + assert state.attributes[ATTR_POSITION] == expected_position + assert state.state == expected_state -async def test_sensor_upper_hysteresis(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("vals", "expected_position", "expected_state"), + [ + ([17.5], POSITION_BELOW, STATE_OFF), # threshold + hysteresis + ([17.5, 12.5], POSITION_BELOW, STATE_OFF), # threshold - hysteresis + ([17.5, 12.5, 20], POSITION_ABOVE, STATE_ON), + ([17.5, 12.5, 20, 13], POSITION_ABOVE, STATE_ON), + ([17.5, 12.5, 20, 13, 12], POSITION_BELOW, STATE_OFF), + ([17.5, 12.5, 20, 13, 12, 17], POSITION_BELOW, STATE_OFF), + ([17.5, 12.5, 20, 13, 12, 17, 18], POSITION_ABOVE, STATE_ON), + ([17.5, 12.5, 20, 13, 12, 17, 18, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), + ([17.5, 12.5, 20, 13, 12, 17, 18, "cat", 18], POSITION_ABOVE, STATE_ON), + ([18, None], POSITION_UNKNOWN, STATE_UNKNOWN), + # below within -> above + ([14, 17.6], POSITION_ABOVE, STATE_ON), + # above within -> below + ([16, 12.4], POSITION_BELOW, STATE_OFF), + # below within -> above within + ([14, 16], POSITION_BELOW, STATE_OFF), + # above within -> below within + ([16, 14], POSITION_BELOW, STATE_OFF), + # above -> above within -> below within + ([20, 16, 14], POSITION_ABOVE, STATE_ON), + # below -> below within -> above within + ([10, 14, 16], POSITION_BELOW, STATE_OFF), + ], +) +async def test_sensor_upper_hysteresis( + hass: HomeAssistant, + vals: list[float | str | None], + expected_position: str, + expected_state: str, +) -> None: """Test if source is above threshold using hysteresis.""" config = { - "binary_sensor": { - "platform": "threshold", - "upper": "15", - "hysteresis": "2.5", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_UPPER: "15", + CONF_HYSTERESIS: "2.5", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - # Set the monitored sensor's state to the threshold + hysteresis - hass.states.async_set("sensor.test_monitored", 17.5) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_UPPER] == float( + config[Platform.BINARY_SENSOR][CONF_UPPER] + ) + assert state.attributes[ATTR_HYSTERESIS] == 2.5 + assert state.attributes[ATTR_TYPE] == TYPE_UPPER - # Set the monitored sensor's state to the threshold - hysteresis - hass.states.async_set("sensor.test_monitored", 12.5) - await hass.async_block_till_done() + for val in vals: + hass.states.async_set("sensor.test_monitored", val) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 20) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) - assert state.attributes["hysteresis"] == 2.5 - assert state.attributes["type"] == "upper" - assert state.attributes["position"] == "above" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 13) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 12) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 17) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 18) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", "cat") - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" - - hass.states.async_set("sensor.test_monitored", 18) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "on" + assert state.attributes[ATTR_POSITION] == expected_position + assert state.state == expected_state -async def test_sensor_lower_hysteresis(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("vals", "expected_position", "expected_state"), + [ + ([17.5], POSITION_ABOVE, STATE_OFF), # threshold + hysteresis + ([17.5, 12.5], POSITION_ABOVE, STATE_OFF), # threshold - hysteresis + ([17.5, 12.5, 20], POSITION_ABOVE, STATE_OFF), + ([17.5, 12.5, 20, 13], POSITION_ABOVE, STATE_OFF), + ([17.5, 12.5, 20, 13, 12], POSITION_BELOW, STATE_ON), + ([17.5, 12.5, 20, 13, 12, 17], POSITION_BELOW, STATE_ON), + ([17.5, 12.5, 20, 13, 12, 17, 18], POSITION_ABOVE, STATE_OFF), + ([17.5, 12.5, 20, 13, 12, 17, 18, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), + ([17.5, 12.5, 20, 13, 12, 17, 18, "cat", 18], POSITION_ABOVE, STATE_OFF), + ([18, None], POSITION_UNKNOWN, STATE_UNKNOWN), + # below within -> above + ([14, 17.6], POSITION_ABOVE, STATE_OFF), + # above within -> below + ([16, 12.4], POSITION_BELOW, STATE_ON), + # below within -> above within + ([14, 16], POSITION_ABOVE, STATE_OFF), + # above within -> below within + ([16, 14], POSITION_ABOVE, STATE_OFF), + # above -> above within -> below within + ([20, 16, 14], POSITION_ABOVE, STATE_OFF), + # below -> below within -> above within + ([10, 14, 16], POSITION_BELOW, STATE_ON), + ], +) +async def test_sensor_lower_hysteresis( + hass: HomeAssistant, + vals: list[float | str | None], + expected_position: str, + expected_state: str, +) -> None: """Test if source is below threshold using hysteresis.""" config = { - "binary_sensor": { - "platform": "threshold", - "lower": "15", - "hysteresis": "2.5", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_LOWER: "15", + CONF_HYSTERESIS: "2.5", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - # Set the monitored sensor's state to the threshold + hysteresis - hass.states.async_set("sensor.test_monitored", 17.5) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_LOWER] == float( + config[Platform.BINARY_SENSOR][CONF_LOWER] + ) + assert state.attributes[ATTR_HYSTERESIS] == 2.5 + assert state.attributes[ATTR_TYPE] == TYPE_LOWER - # Set the monitored sensor's state to the threshold - hysteresis - hass.states.async_set("sensor.test_monitored", 12.5) - await hass.async_block_till_done() + for val in vals: + hass.states.async_set("sensor.test_monitored", val) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 20) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) - assert state.attributes["hysteresis"] == 2.5 - assert state.attributes["type"] == "lower" - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 13) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 12) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 17) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 18) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", "cat") - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" - - hass.states.async_set("sensor.test_monitored", 18) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" + assert state.attributes[ATTR_POSITION] == expected_position + assert state.state == expected_state -async def test_sensor_in_range_no_hysteresis(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("vals", "expected_position", "expected_state"), + [ + ([10], POSITION_IN_RANGE, STATE_ON), # at lower threshold + ([10, 20], POSITION_IN_RANGE, STATE_ON), # lower threshold -> upper threshold + ([10, 20, 16], POSITION_IN_RANGE, STATE_ON), + ([10, 20, 16, 9], POSITION_BELOW, STATE_OFF), + ([10, 20, 16, 9, 21], POSITION_ABOVE, STATE_OFF), + ([10, 20, 16, 9, 21, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), + ([10, 20, 16, 9, 21, "cat", 21], POSITION_ABOVE, STATE_OFF), + ([21, None], POSITION_UNKNOWN, STATE_UNKNOWN), + # upper threshold -> lower threshold + ([20, 10], POSITION_IN_RANGE, STATE_ON), + # in-range -> upper threshold + ([15, 20], POSITION_IN_RANGE, STATE_ON), + # in-range -> lower threshold + ([15, 10], POSITION_IN_RANGE, STATE_ON), + # below -> above + ([5, 25], POSITION_ABOVE, STATE_OFF), + # above -> below + ([25, 5], POSITION_BELOW, STATE_OFF), + # in-range -> above + ([15, 25], POSITION_ABOVE, STATE_OFF), + # in-range -> below + ([15, 5], POSITION_BELOW, STATE_OFF), + ], +) +async def test_sensor_in_range_no_hysteresis( + hass: HomeAssistant, + vals: list[float | str | None], + expected_position: str, + expected_state: str, +) -> None: """Test if source is within the range.""" config = { - "binary_sensor": { - "platform": "threshold", - "lower": "10", - "upper": "20", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_LOWER: "10", + CONF_UPPER: "20", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - # Set the monitored sensor's state to the lower threshold - hass.states.async_set("sensor.test_monitored", 10) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - # Set the monitored sensor's state to the upper threshold - hass.states.async_set("sensor.test_monitored", 20) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - hass.states.async_set( - "sensor.test_monitored", - 16, - {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_LOWER] == float( + config[Platform.BINARY_SENSOR][CONF_LOWER] ) - await hass.async_block_till_done() + assert state.attributes[ATTR_UPPER] == float( + config[Platform.BINARY_SENSOR][CONF_UPPER] + ) + assert state.attributes[ATTR_HYSTERESIS] == 0.0 + assert state.attributes[ATTR_TYPE] == TYPE_RANGE + + for val in vals: + hass.states.async_set("sensor.test_monitored", val) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["entity_id"] == "sensor.test_monitored" - assert state.attributes["sensor_value"] == 16 - assert state.attributes["position"] == "in_range" - assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) - assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) - assert state.attributes["hysteresis"] == 0.0 - assert state.attributes["type"] == "range" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 9) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 21) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", "cat") - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" - - hass.states.async_set("sensor.test_monitored", 21) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" + assert state.attributes[ATTR_POSITION] == expected_position + assert state.state == expected_state -async def test_sensor_in_range_with_hysteresis(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("vals", "expected_position", "expected_state"), + [ + ([12], POSITION_IN_RANGE, STATE_ON), # lower threshold + hysteresis + ([12, 22], POSITION_IN_RANGE, STATE_ON), # upper threshold + hysteresis + ([12, 22, 18], POSITION_IN_RANGE, STATE_ON), # upper threshold - hysteresis + ([12, 22, 18, 16], POSITION_IN_RANGE, STATE_ON), + ([12, 22, 18, 16, 8], POSITION_IN_RANGE, STATE_ON), + ([12, 22, 18, 16, 8, 7], POSITION_BELOW, STATE_OFF), + ([12, 22, 18, 16, 8, 7, 12], POSITION_BELOW, STATE_OFF), + ([12, 22, 18, 16, 8, 7, 12, 13], POSITION_IN_RANGE, STATE_ON), + ([12, 22, 18, 16, 8, 7, 12, 13, 22], POSITION_IN_RANGE, STATE_ON), + ([12, 22, 18, 16, 8, 7, 12, 13, 22, 23], POSITION_ABOVE, STATE_OFF), + ([12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18], POSITION_ABOVE, STATE_OFF), + ([12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18, 17], POSITION_IN_RANGE, STATE_ON), + ( + [12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18, 17, "cat"], + POSITION_UNKNOWN, + STATE_UNKNOWN, + ), + ( + [12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18, 17, "cat", 17], + POSITION_IN_RANGE, + STATE_ON, + ), + ([17, None], POSITION_UNKNOWN, STATE_UNKNOWN), + # upper threshold -> lower threshold + ([20, 10], POSITION_IN_RANGE, STATE_ON), + # in-range -> upper threshold + ([15, 20], POSITION_IN_RANGE, STATE_ON), + # in-range -> lower threshold + ([15, 10], POSITION_IN_RANGE, STATE_ON), + # below -> above + ([5, 25], POSITION_ABOVE, STATE_OFF), + # above -> below + ([25, 5], POSITION_BELOW, STATE_OFF), + # in-range -> above + ([15, 25], POSITION_ABOVE, STATE_OFF), + # in-range -> below + ([15, 5], POSITION_BELOW, STATE_OFF), + # below -> lower threshold + ([5, 10], POSITION_BELOW, STATE_OFF), + # below -> in-range -> lower threshold + ([5, 15, 10], POSITION_IN_RANGE, STATE_ON), + # above -> upper threshold + ([25, 20], POSITION_ABOVE, STATE_OFF), + # above -> in-range -> upper threshold + ([25, 15, 20], POSITION_IN_RANGE, STATE_ON), + ([15, 22.1], POSITION_ABOVE, STATE_OFF), # in-range -> above hysteresis edge + ([15, 7.9], POSITION_BELOW, STATE_OFF), # in-range -> below hysteresis edge + ([7, 11.9], POSITION_BELOW, STATE_OFF), + ([23, 18.1], POSITION_ABOVE, STATE_OFF), + ], +) +async def test_sensor_in_range_with_hysteresis( + hass: HomeAssistant, + vals: list[float | str | None], + expected_position: str, + expected_state: str, +) -> None: """Test if source is within the range.""" config = { - "binary_sensor": { - "platform": "threshold", - "lower": "10", - "upper": "20", - "hysteresis": "2", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_LOWER: "10", + CONF_UPPER: "20", + CONF_HYSTERESIS: "2", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - # Set the monitored sensor's state to the lower threshold - hysteresis - hass.states.async_set("sensor.test_monitored", 8) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - # Set the monitored sensor's state to the lower threshold + hysteresis - hass.states.async_set("sensor.test_monitored", 12) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - # Set the monitored sensor's state to the upper threshold + hysteresis - hass.states.async_set("sensor.test_monitored", 22) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - # Set the monitored sensor's state to the upper threshold - hysteresis - hass.states.async_set("sensor.test_monitored", 18) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - hass.states.async_set( - "sensor.test_monitored", - 16, - {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_LOWER] == float( + config[Platform.BINARY_SENSOR][CONF_LOWER] ) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.threshold") - - assert state.attributes["entity_id"] == "sensor.test_monitored" - assert state.attributes["sensor_value"] == 16 - assert state.attributes["position"] == "in_range" - assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) - assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) - assert state.attributes["hysteresis"] == float( - config["binary_sensor"]["hysteresis"] + assert state.attributes[ATTR_UPPER] == float( + config[Platform.BINARY_SENSOR][CONF_UPPER] ) - assert state.attributes["type"] == "range" - assert state.state == "on" + assert state.attributes[ATTR_HYSTERESIS] == 2.0 + assert state.attributes[ATTR_TYPE] == TYPE_RANGE - hass.states.async_set("sensor.test_monitored", 8) - await hass.async_block_till_done() + for val in vals: + hass.states.async_set("sensor.test_monitored", val) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 7) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 12) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "below" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 13) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 22) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", 23) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 18) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "above" - assert state.state == "off" - - hass.states.async_set("sensor.test_monitored", 17) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" - - hass.states.async_set("sensor.test_monitored", "cat") - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" - - hass.states.async_set("sensor.test_monitored", 17) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "in_range" - assert state.state == "on" + assert state.attributes[ATTR_POSITION] == expected_position + assert state.state == expected_state async def test_sensor_in_range_unknown_state( @@ -478,15 +420,15 @@ async def test_sensor_in_range_unknown_state( ) -> None: """Test if source is within the range.""" config = { - "binary_sensor": { - "platform": "threshold", - "lower": "10", - "upper": "20", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_LOWER: "10", + CONF_UPPER: "20", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() hass.states.async_set( @@ -498,26 +440,30 @@ async def test_sensor_in_range_unknown_state( state = hass.states.get("binary_sensor.threshold") - assert state.attributes["entity_id"] == "sensor.test_monitored" - assert state.attributes["sensor_value"] == 16 - assert state.attributes["position"] == "in_range" - assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) - assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) - assert state.attributes["hysteresis"] == 0.0 - assert state.attributes["type"] == "range" - assert state.state == "on" + assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" + assert state.attributes[ATTR_SENSOR_VALUE] == 16 + assert state.attributes[ATTR_POSITION] == POSITION_IN_RANGE + assert state.attributes[ATTR_LOWER] == float( + config[Platform.BINARY_SENSOR][CONF_LOWER] + ) + assert state.attributes[ATTR_UPPER] == float( + config[Platform.BINARY_SENSOR][CONF_UPPER] + ) + assert state.attributes[ATTR_HYSTERESIS] == 0.0 + assert state.attributes[ATTR_TYPE] == TYPE_RANGE + assert state.state == STATE_ON hass.states.async_set("sensor.test_monitored", STATE_UNKNOWN) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" + assert state.attributes[ATTR_POSITION] == POSITION_UNKNOWN + assert state.state == STATE_UNKNOWN hass.states.async_set("sensor.test_monitored", STATE_UNAVAILABLE) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["position"] == "unknown" - assert state.state == "unknown" + assert state.attributes[ATTR_POSITION] == POSITION_UNKNOWN + assert state.state == STATE_UNKNOWN assert "State is not numerical" not in caplog.text @@ -525,53 +471,57 @@ async def test_sensor_in_range_unknown_state( async def test_sensor_lower_zero_threshold(hass: HomeAssistant) -> None: """Test if a lower threshold of zero is set.""" config = { - "binary_sensor": { - "platform": "threshold", - "lower": "0", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_LOWER: "0", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() hass.states.async_set("sensor.test_monitored", 16) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["type"] == "lower" - assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) - assert state.state == "off" + assert state.attributes[ATTR_TYPE] == TYPE_LOWER + assert state.attributes[ATTR_LOWER] == float( + config[Platform.BINARY_SENSOR][CONF_LOWER] + ) + assert state.state == STATE_OFF hass.states.async_set("sensor.test_monitored", -3) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.state == "on" + assert state.state == STATE_ON async def test_sensor_upper_zero_threshold(hass: HomeAssistant) -> None: """Test if an upper threshold of zero is set.""" config = { - "binary_sensor": { - "platform": "threshold", - "upper": "0", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_UPPER: "0", + CONF_ENTITY_ID: "sensor.test_monitored", } } - assert await async_setup_component(hass, "binary_sensor", config) + assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() hass.states.async_set("sensor.test_monitored", -10) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes["type"] == "upper" - assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) - assert state.state == "off" + assert state.attributes[ATTR_TYPE] == TYPE_UPPER + assert state.attributes[ATTR_UPPER] == float( + config[Platform.BINARY_SENSOR][CONF_UPPER] + ) + assert state.state == STATE_OFF hass.states.async_set("sensor.test_monitored", 2) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.state == "on" + assert state.state == STATE_ON async def test_sensor_no_lower_upper( @@ -579,16 +529,16 @@ async def test_sensor_no_lower_upper( ) -> None: """Test if no lower or upper has been provided.""" config = { - "binary_sensor": { - "platform": "threshold", - "entity_id": "sensor.test_monitored", + Platform.BINARY_SENSOR: { + CONF_PLATFORM: "threshold", + CONF_ENTITY_ID: "sensor.test_monitored", } } - await async_setup_component(hass, "binary_sensor", config) + await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - assert "Lower or Upper thresholds not provided" in caplog.text + assert "Lower or Upper thresholds are not provided" in caplog.text async def test_device_id( @@ -618,11 +568,11 @@ async def test_device_id( data={}, domain=DOMAIN, options={ - "entity_id": "sensor.test_source", - "hysteresis": 0.0, - "lower": -2.0, - "name": "Threshold", - "upper": None, + CONF_ENTITY_ID: "sensor.test_source", + CONF_HYSTERESIS: 0.0, + CONF_LOWER: -2.0, + CONF_NAME: "Threshold", + CONF_UPPER: None, }, title="Threshold", ) diff --git a/tests/components/tibber/conftest.py b/tests/components/tibber/conftest.py index 0b48531bde1..441a9d0b888 100644 --- a/tests/components/tibber/conftest.py +++ b/tests/components/tibber/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest +from homeassistant.components.recorder import Recorder from homeassistant.components.tibber.const import DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -26,7 +27,7 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture async def mock_tibber_setup( - config_entry: MockConfigEntry, hass: HomeAssistant + recorder_mock: Recorder, config_entry: MockConfigEntry, hass: HomeAssistant ) -> AsyncGenerator[MagicMock]: """Mock tibber entry setup.""" unique_user_id = "unique_user_id" diff --git a/tests/components/tibber/test_config_flow.py b/tests/components/tibber/test_config_flow.py index 28b590a29d2..0c12c4a247b 100644 --- a/tests/components/tibber/test_config_flow.py +++ b/tests/components/tibber/test_config_flow.py @@ -5,7 +5,11 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aiohttp import ClientError import pytest -from tibber import FatalHttpException, InvalidLogin, RetryableHttpException +from tibber import ( + FatalHttpExceptionError, + InvalidLoginError, + RetryableHttpExceptionError, +) from homeassistant import config_entries from homeassistant.components.recorder import Recorder @@ -66,9 +70,9 @@ async def test_create_entry(recorder_mock: Recorder, hass: HomeAssistant) -> Non [ (TimeoutError, ERR_TIMEOUT), (ClientError, ERR_CLIENT), - (InvalidLogin(401), ERR_TOKEN), - (RetryableHttpException(503), ERR_CLIENT), - (FatalHttpException(404), ERR_CLIENT), + (InvalidLoginError(401), ERR_TOKEN), + (RetryableHttpExceptionError(503), ERR_CLIENT), + (FatalHttpExceptionError(404), ERR_CLIENT), ], ) async def test_create_entry_exceptions( diff --git a/tests/components/tibber/test_diagnostics.py b/tests/components/tibber/test_diagnostics.py index 34ecb63dfec..16c735596d0 100644 --- a/tests/components/tibber/test_diagnostics.py +++ b/tests/components/tibber/test_diagnostics.py @@ -19,12 +19,9 @@ async def test_entry_diagnostics( config_entry, ) -> None: """Test config entry diagnostics.""" - with ( - patch( - "tibber.Tibber.update_info", - return_value=None, - ), - patch("homeassistant.components.tibber.discovery.async_load_platform"), + with patch( + "tibber.Tibber.update_info", + return_value=None, ): assert await async_setup_component(hass, "tibber", {}) diff --git a/tests/components/tibber/test_notify.py b/tests/components/tibber/test_notify.py index 69af92c4d5d..9b731e78bf6 100644 --- a/tests/components/tibber/test_notify.py +++ b/tests/components/tibber/test_notify.py @@ -6,7 +6,6 @@ from unittest.mock import MagicMock import pytest from homeassistant.components.recorder import Recorder -from homeassistant.components.tibber import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -19,18 +18,8 @@ async def test_notification_services( notify_state = hass.states.get("notify.tibber") assert notify_state is not None - # Assert legacy notify service hass been added - assert hass.services.has_service("notify", DOMAIN) - - # Test legacy notify service - service = "tibber" - service_data = {"message": "The message", "title": "A title"} - await hass.services.async_call("notify", service, service_data, blocking=True) calls: MagicMock = mock_tibber_setup.send_notification - calls.assert_called_once_with(message="The message", title="A title") - calls.reset_mock() - # Test notify entity service service = "send_message" service_data = { @@ -44,15 +33,6 @@ async def test_notification_services( calls.side_effect = TimeoutError - with pytest.raises(HomeAssistantError): - # Test legacy notify service - await hass.services.async_call( - "notify", - service="tibber", - service_data={"message": "The message", "title": "A title"}, - blocking=True, - ) - with pytest.raises(HomeAssistantError): # Test notify entity service await hass.services.async_call( diff --git a/tests/components/tibber/test_repairs.py b/tests/components/tibber/test_repairs.py deleted file mode 100644 index 89e85e5f8e1..00000000000 --- a/tests/components/tibber/test_repairs.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Test loading of the Tibber config entry.""" - -from http import HTTPStatus -from unittest.mock import MagicMock - -from homeassistant.components.recorder import Recorder -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.typing import ClientSessionGenerator - - -async def test_repair_flow( - recorder_mock: Recorder, - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_tibber_setup: MagicMock, - hass_client: ClientSessionGenerator, -) -> None: - """Test unloading the entry.""" - - # Test legacy notify service - service = "tibber" - service_data = {"message": "The message", "title": "A title"} - await hass.services.async_call("notify", service, service_data, blocking=True) - calls: MagicMock = mock_tibber_setup.send_notification - - calls.assert_called_once_with(message="The message", title="A title") - calls.reset_mock() - - http_client = await hass_client() - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_tibber_{service}", - ) - assert len(issue_registry.issues) == 1 - - url = RepairsFlowIndexView.url - resp = await http_client.post( - url, json={"handler": "notify", "issue_id": f"migrate_notify_tibber_{service}"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - # Simulate the users confirmed the repair flow - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await http_client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data["type"] == "create_entry" - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_tibber_{service}", - ) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/tibber/test_services.py b/tests/components/tibber/test_services.py index e9bee3ba31f..dc6f5d2789d 100644 --- a/tests/components/tibber/test_services.py +++ b/tests/components/tibber/test_services.py @@ -1,6 +1,5 @@ """Test service for Tibber integration.""" -import asyncio import datetime as dt from unittest.mock import MagicMock @@ -8,195 +7,104 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.tibber.const import DOMAIN -from homeassistant.components.tibber.services import PRICE_SERVICE_NAME, __get_prices -from homeassistant.core import ServiceCall +from homeassistant.components.tibber.services import PRICE_SERVICE_NAME +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -STARTTIME = dt.datetime.fromtimestamp(1615766400) +START_TIME = dt.datetime.fromtimestamp(1615766400).replace(tzinfo=dt.UTC) def generate_mock_home_data(): """Create mock data from the tibber connection.""" - tomorrow = STARTTIME + dt.timedelta(days=1) + tomorrow = START_TIME + dt.timedelta(days=1) mock_homes = [ MagicMock( name="first_home", - info={ - "viewer": { - "home": { - "currentSubscription": { - "priceInfo": { - "today": [ - { - "startsAt": STARTTIME.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - STARTTIME + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - "tomorrow": [ - { - "startsAt": tomorrow.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - tomorrow + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } - } - } + price_total={ + START_TIME.isoformat(): 0.36914, + (START_TIME + dt.timedelta(hours=1)).isoformat(): 0.36914, + tomorrow.isoformat(): 0.46914, + (tomorrow + dt.timedelta(hours=1)).isoformat(): 0.46914, + }, + price_level={ + START_TIME.isoformat(): "VERY_EXPENSIVE", + (START_TIME + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", + tomorrow.isoformat(): "VERY_EXPENSIVE", + (tomorrow + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", }, ), MagicMock( name="second_home", - info={ - "viewer": { - "home": { - "currentSubscription": { - "priceInfo": { - "today": [ - { - "startsAt": STARTTIME.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - STARTTIME + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - "tomorrow": [ - { - "startsAt": tomorrow.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - tomorrow + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } - } - } + price_total={ + START_TIME.isoformat(): 0.36914, + (START_TIME + dt.timedelta(hours=1)).isoformat(): 0.36914, + tomorrow.isoformat(): 0.46914, + (tomorrow + dt.timedelta(hours=1)).isoformat(): 0.46914, + }, + price_level={ + START_TIME.isoformat(): "VERY_EXPENSIVE", + (START_TIME + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", + tomorrow.isoformat(): "VERY_EXPENSIVE", + (tomorrow + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", }, ), ] + # set name again, as the name is special in mock objects + # see documentation: https://docs.python.org/3/library/unittest.mock.html#mock-names-and-the-name-attribute mock_homes[0].name = "first_home" mock_homes[1].name = "second_home" return mock_homes -def create_mock_tibber_connection(): - """Create a mock tibber connection.""" - tibber_connection = MagicMock() - tibber_connection.get_homes.return_value = generate_mock_home_data() - return tibber_connection - - -def create_mock_hass(): - """Create a mock hass object.""" - mock_hass = MagicMock - mock_hass.data = {"tibber": create_mock_tibber_connection()} - return mock_hass - - +@pytest.mark.parametrize( + "data", + [ + {}, + {"start": START_TIME.isoformat()}, + { + "start": START_TIME.isoformat(), + "end": (START_TIME + dt.timedelta(days=1)).isoformat(), + }, + ], +) async def test_get_prices( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, + data, ) -> None: - """Test __get_prices with mock data.""" - freezer.move_to(STARTTIME) - tomorrow = STARTTIME + dt.timedelta(days=1) - call = ServiceCall( - DOMAIN, - PRICE_SERVICE_NAME, - {"start": STARTTIME.date().isoformat(), "end": tomorrow.date().isoformat()}, + """Test get_prices with mock data.""" + freezer.move_to(START_TIME) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() + + result = await hass.services.async_call( + DOMAIN, PRICE_SERVICE_NAME, data, blocking=True, return_response=True ) - - result = await __get_prices(call, hass=create_mock_hass()) + await hass.async_block_till_done() assert result == { "prices": { "first_home": [ { - "start_time": STARTTIME, - "price": 0.46914, + "start_time": START_TIME.isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": STARTTIME, - "price": 0.46914, + "start_time": START_TIME.isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } - - -async def test_get_prices_no_input( - freezer: FrozenDateTimeFactory, -) -> None: - """Test __get_prices with no input.""" - freezer.move_to(STARTTIME) - call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {}) - - result = await __get_prices(call, hass=create_mock_hass()) - - assert result == { - "prices": { - "first_home": [ - { - "start_time": STARTTIME, - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - "second_home": [ - { - "start_time": STARTTIME, - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, ], @@ -205,39 +113,47 @@ async def test_get_prices_no_input( async def test_get_prices_start_tomorrow( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, ) -> None: - """Test __get_prices with start date tomorrow.""" - freezer.move_to(STARTTIME) - tomorrow = STARTTIME + dt.timedelta(days=1) - call = ServiceCall( - DOMAIN, PRICE_SERVICE_NAME, {"start": tomorrow.date().isoformat()} - ) + """Test get_prices with start date tomorrow.""" + freezer.move_to(START_TIME) + tomorrow = START_TIME + dt.timedelta(days=1) - result = await __get_prices(call, hass=create_mock_hass()) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() + + result = await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": tomorrow.isoformat()}, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() assert result == { "prices": { "first_home": [ { - "start_time": tomorrow, + "start_time": tomorrow.isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": tomorrow + dt.timedelta(hours=1), + "start_time": (tomorrow + dt.timedelta(hours=1)).isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": tomorrow, + "start_time": tomorrow.isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": tomorrow + dt.timedelta(hours=1), + "start_time": (tomorrow + dt.timedelta(hours=1)).isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -246,13 +162,115 @@ async def test_get_prices_start_tomorrow( } -async def test_get_prices_invalid_input() -> None: - """Test __get_prices with invalid input.""" +@pytest.mark.parametrize( + "start_time", + [ + START_TIME.isoformat(), + (START_TIME + dt.timedelta(hours=4)) + .replace(tzinfo=dt.timezone(dt.timedelta(hours=4))) + .isoformat(), + ], +) +async def test_get_prices_with_timezones( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + start_time: str, +) -> None: + """Test get_prices with timezone and without.""" + freezer.move_to(START_TIME) - call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": "test"}) - task = asyncio.create_task(__get_prices(call, hass=create_mock_hass())) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() - with pytest.raises(ServiceValidationError) as excinfo: - await task + result = await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": start_time}, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() - assert "Invalid datetime provided." in str(excinfo.value) + assert result == { + "prices": { + "first_home": [ + { + "start_time": START_TIME.isoformat(), + "price": 0.36914, + "level": "VERY_EXPENSIVE", + }, + { + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, + "level": "VERY_EXPENSIVE", + }, + ], + "second_home": [ + { + "start_time": START_TIME.isoformat(), + "price": 0.36914, + "level": "VERY_EXPENSIVE", + }, + { + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, + "level": "VERY_EXPENSIVE", + }, + ], + } + } + + +@pytest.mark.parametrize( + "start_time", + [ + (START_TIME + dt.timedelta(hours=2)).isoformat(), + (START_TIME + dt.timedelta(hours=2)) + .astimezone(tz=dt.timezone(dt.timedelta(hours=5))) + .isoformat(), + (START_TIME + dt.timedelta(hours=2)) + .astimezone(tz=dt.timezone(dt.timedelta(hours=8))) + .isoformat(), + (START_TIME + dt.timedelta(hours=2)) + .astimezone(tz=dt.timezone(dt.timedelta(hours=-8))) + .isoformat(), + ], +) +async def test_get_prices_with_wrong_timezones( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + start_time: str, +) -> None: + """Test get_prices with incorrect time and/or timezone. We expect an empty list.""" + freezer.move_to(START_TIME) + tomorrow = START_TIME + dt.timedelta(days=1) + + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() + + result = await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": start_time, "end": tomorrow.isoformat()}, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + + assert result == {"prices": {"first_home": [], "second_home": []}} + + +async def test_get_prices_invalid_input( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, +) -> None: + """Test get_prices with invalid input.""" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": "test"}, + blocking=True, + return_response=True, + ) diff --git a/tests/components/tile/test_config_flow.py b/tests/components/tile/test_config_flow.py index 87fe976ca3f..849be41d560 100644 --- a/tests/components/tile/test_config_flow.py +++ b/tests/components/tile/test_config_flow.py @@ -6,13 +6,15 @@ import pytest from pytile.errors import InvalidAuthError, TileError from homeassistant.components.tile import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("mock_login_response", "errors"), @@ -77,12 +79,10 @@ async def test_import_entry(hass: HomeAssistant, config, mock_pytile) -> None: async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/todo/__init__.py b/tests/components/todo/__init__.py index dfee74599cd..0138e561fad 100644 --- a/tests/components/todo/__init__.py +++ b/tests/components/todo/__init__.py @@ -1 +1,63 @@ """Tests for the To-do integration.""" + +from homeassistant.components.todo import DOMAIN, TodoItem, TodoListEntity +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from tests.common import MockConfigEntry, MockPlatform, mock_platform + +TEST_DOMAIN = "test" + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +class MockTodoListEntity(TodoListEntity): + """Test todo list entity.""" + + def __init__(self, items: list[TodoItem] | None = None) -> None: + """Initialize entity.""" + self._attr_todo_items = items or [] + + @property + def items(self) -> list[TodoItem]: + """Return the items in the To-do list.""" + return self._attr_todo_items + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + self._attr_todo_items.append(item) + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item in the To-do list.""" + self._attr_todo_items = [item for item in self.items if item.uid not in uids] + + +async def create_mock_platform( + hass: HomeAssistant, + entities: list[TodoListEntity], +) -> MockConfigEntry: + """Create a todo platform with the specified entities.""" + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test event platform via config entry.""" + async_add_entities(entities) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/todo/conftest.py b/tests/components/todo/conftest.py new file mode 100644 index 00000000000..bcee60e1d96 --- /dev/null +++ b/tests/components/todo/conftest.py @@ -0,0 +1,92 @@ +"""Fixtures for the todo component tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock + +import pytest + +from homeassistant.components.todo import ( + DOMAIN, + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import TEST_DOMAIN, MockFlow, MockTodoListEntity + +from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform + + +@pytest.fixture(autouse=True) +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + + with mock_config_flow(TEST_DOMAIN, MockFlow): + yield + + +@pytest.fixture(autouse=True) +def mock_setup_integration(hass: HomeAssistant) -> None: + """Fixture to set up a mock integration.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, + config_entry: ConfigEntry, + ) -> bool: + await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) + return True + + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + + +@pytest.fixture(autouse=True) +async def set_time_zone(hass: HomeAssistant) -> None: + """Set the time zone for the tests that keesp UTC-6 all year round.""" + await hass.config.async_set_time_zone("America/Regina") + + +@pytest.fixture(name="test_entity_items") +def mock_test_entity_items() -> list[TodoItem]: + """Fixture that creates the items returned by the test entity.""" + return [ + TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), + TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), + ] + + +@pytest.fixture(name="test_entity") +def mock_test_entity(test_entity_items: list[TodoItem]) -> TodoListEntity: + """Fixture that creates a test TodoList entity with mock service calls.""" + entity1 = MockTodoListEntity(test_entity_items) + entity1.entity_id = "todo.entity1" + entity1._attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + | TodoListEntityFeature.MOVE_TODO_ITEM + ) + entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) + entity1.async_update_todo_item = AsyncMock() + entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) + entity1.async_move_todo_item = AsyncMock() + return entity1 diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index b62505b14b4..8e8c010f758 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -1,9 +1,7 @@ """Tests for the todo integration.""" -from collections.abc import Generator import datetime from typing import Any -from unittest.mock import AsyncMock import zoneinfo import pytest @@ -26,25 +24,21 @@ from homeassistant.components.todo import ( TodoServices, intent as todo_intent, ) -from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ( + HomeAssistantError, + ServiceNotSupported, + ServiceValidationError, +) from homeassistant.helpers import intent -from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - MockModule, - MockPlatform, - mock_config_flow, - mock_integration, - mock_platform, -) +from . import MockTodoListEntity, create_mock_platform + from tests.typing import WebSocketGenerator -TEST_DOMAIN = "test" ITEM_1 = { "uid": "1", "summary": "Item #1", @@ -59,130 +53,6 @@ TEST_TIMEZONE = zoneinfo.ZoneInfo("America/Regina") TEST_OFFSET = "-06:00" -class MockFlow(ConfigFlow): - """Test flow.""" - - -class MockTodoListEntity(TodoListEntity): - """Test todo list entity.""" - - def __init__(self, items: list[TodoItem] | None = None) -> None: - """Initialize entity.""" - self._attr_todo_items = items or [] - - @property - def items(self) -> list[TodoItem]: - """Return the items in the To-do list.""" - return self._attr_todo_items - - async def async_create_todo_item(self, item: TodoItem) -> None: - """Add an item to the To-do list.""" - self._attr_todo_items.append(item) - - async def async_delete_todo_items(self, uids: list[str]) -> None: - """Delete an item in the To-do list.""" - self._attr_todo_items = [item for item in self.items if item.uid not in uids] - - -@pytest.fixture(autouse=True) -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - - with mock_config_flow(TEST_DOMAIN, MockFlow): - yield - - -@pytest.fixture(autouse=True) -def mock_setup_integration(hass: HomeAssistant) -> None: - """Fixture to set up a mock integration.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, - config_entry: ConfigEntry, - ) -> bool: - await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) - return True - - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - - -@pytest.fixture(autouse=True) -async def set_time_zone(hass: HomeAssistant) -> None: - """Set the time zone for the tests that keesp UTC-6 all year round.""" - await hass.config.async_set_time_zone("America/Regina") - - -async def create_mock_platform( - hass: HomeAssistant, - entities: list[TodoListEntity], -) -> MockConfigEntry: - """Create a todo platform with the specified entities.""" - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test event platform via config entry.""" - async_add_entities(entities) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -@pytest.fixture(name="test_entity_items") -def mock_test_entity_items() -> list[TodoItem]: - """Fixture that creates the items returned by the test entity.""" - return [ - TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), - TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), - ] - - -@pytest.fixture(name="test_entity") -def mock_test_entity(test_entity_items: list[TodoItem]) -> TodoListEntity: - """Fixture that creates a test TodoList entity with mock service calls.""" - entity1 = MockTodoListEntity(test_entity_items) - entity1.entity_id = "todo.entity1" - entity1._attr_supported_features = ( - TodoListEntityFeature.CREATE_TODO_ITEM - | TodoListEntityFeature.UPDATE_TODO_ITEM - | TodoListEntityFeature.DELETE_TODO_ITEM - | TodoListEntityFeature.MOVE_TODO_ITEM - ) - entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) - entity1.async_update_todo_item = AsyncMock() - entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) - entity1.async_move_todo_item = AsyncMock() - return entity1 - - async def test_unload_entry( hass: HomeAssistant, test_entity: TodoListEntity, @@ -1075,14 +945,15 @@ async def test_unsupported_service( payload: dict[str, Any] | None, ) -> None: """Test a To-do list that does not support features.""" - + # Fetch translations + await async_setup_component(hass, "homeassistant", "") entity1 = TodoListEntity() entity1.entity_id = "todo.entity1" await create_mock_platform(hass, [entity1]) with pytest.raises( - HomeAssistantError, - match="does not support this service", + ServiceNotSupported, + match=f"Entity todo.entity1 does not support action {DOMAIN}.{service_name}", ): await hass.services.async_call( DOMAIN, @@ -1141,14 +1012,17 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": "beer"}, "name": {"value": "list 1"}}, + {ATTR_ITEM: {"value": " beer "}, "name": {"value": "list 1"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.success_results[0].name == "list 1" + assert response.success_results[0].type == intent.IntentResponseTargetType.ENTITY + assert response.success_results[0].id == entity1.entity_id assert len(entity1.items) == 1 assert len(entity2.items) == 0 - assert entity1.items[0].summary == "beer" + assert entity1.items[0].summary == "beer" # summary is trimmed assert entity1.items[0].status == TodoItemStatus.NEEDS_ACTION entity1.items.clear() diff --git a/tests/components/tolo/test_config_flow.py b/tests/components/tolo/test_config_flow.py index 9dcca4b704f..73382944cf0 100644 --- a/tests/components/tolo/test_config_flow.py +++ b/tests/components/tolo/test_config_flow.py @@ -31,7 +31,7 @@ def coordinator_toloclient() -> Mock: Throw exception to abort entry setup and prevent socket IO. Only testing config flow. """ with patch( - "homeassistant.components.tolo.ToloClient", side_effect=Exception + "homeassistant.components.tolo.coordinator.ToloClient", side_effect=Exception ) as toloclient: yield toloclient diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index 9484d3393d7..f50d999548f 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -7,7 +7,7 @@ import requests import requests_mock import voluptuous as vol -from homeassistant.components.device_tracker import DOMAIN +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN import homeassistant.components.tomato.device_tracker as tomato from homeassistant.const import ( CONF_HOST, @@ -68,9 +68,9 @@ def mock_session_send(): def test_config_missing_optional_params(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without optional parameters.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "password", @@ -94,9 +94,9 @@ def test_config_missing_optional_params(hass: HomeAssistant, mock_session_send) def test_config_default_nonssl_port(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without a default port set without ssl enabled.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "password", @@ -113,9 +113,9 @@ def test_config_default_nonssl_port(hass: HomeAssistant, mock_session_send) -> N def test_config_default_ssl_port(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without a default port set with ssl enabled.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_SSL: True, CONF_USERNAME: "foo", @@ -135,9 +135,9 @@ def test_config_verify_ssl_but_no_ssl_enabled( ) -> None: """Test the setup with a string with ssl_verify but ssl not enabled.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: False, @@ -169,9 +169,9 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> Representing the absolute path to a CA certificate bundle. """ config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -200,9 +200,9 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> def test_config_valid_verify_ssl_bool(hass: HomeAssistant, mock_session_send) -> None: """Test the setup with a bool for ssl_verify.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -233,7 +233,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, # No Host, CONF_PORT: 1234, CONF_SSL: True, @@ -246,7 +246,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: -123456789, # Bad Port CONF_SSL: True, @@ -259,7 +259,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -272,7 +272,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -285,7 +285,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -301,9 +301,9 @@ def test_config_errors() -> None: def test_config_bad_credentials(hass: HomeAssistant, mock_exception_logger) -> None: """Test the setup with bad credentials.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "i_am", CONF_PASSWORD: "an_imposter", @@ -324,9 +324,9 @@ def test_config_bad_credentials(hass: HomeAssistant, mock_exception_logger) -> N def test_bad_response(hass: HomeAssistant, mock_exception_logger) -> None: """Test the setup with bad response from router.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -347,9 +347,9 @@ def test_bad_response(hass: HomeAssistant, mock_exception_logger) -> None: def test_scan_devices(hass: HomeAssistant, mock_exception_logger) -> None: """Test scanning for new devices.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -366,9 +366,9 @@ def test_scan_devices(hass: HomeAssistant, mock_exception_logger) -> None: def test_bad_connection(hass: HomeAssistant, mock_exception_logger) -> None: """Test the router with a connection error.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -394,9 +394,9 @@ def test_bad_connection(hass: HomeAssistant, mock_exception_logger) -> None: def test_router_timeout(hass: HomeAssistant, mock_exception_logger) -> None: """Test the router with a timeout error.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -422,9 +422,9 @@ def test_router_timeout(hass: HomeAssistant, mock_exception_logger) -> None: def test_get_device_name(hass: HomeAssistant, mock_exception_logger) -> None: """Test getting device names.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: tomato.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", diff --git a/tests/components/toon/test_config_flow.py b/tests/components/toon/test_config_flow.py index 492e2a220ad..1ad5ea1ca3d 100644 --- a/tests/components/toon/test_config_flow.py +++ b/tests/components/toon/test_config_flow.py @@ -6,11 +6,11 @@ from unittest.mock import patch import pytest from toonapi import Agreement, ToonError -from homeassistant.components.toon.const import CONF_AGREEMENT, CONF_MIGRATE, DOMAIN -from homeassistant.config import async_process_ha_core_config +from homeassistant.components.toon.const import CONF_AGREEMENT, DOMAIN from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.setup import async_setup_component @@ -324,7 +324,8 @@ async def test_import_migration( flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 - assert flows[0]["context"][CONF_MIGRATE] == old_entry.entry_id + flow = hass.config_entries.flow._progress[flows[0]["flow_id"]] + assert flow.migrate_entry == old_entry.entry_id state = config_entry_oauth2_flow._encode_jwt( hass, diff --git a/tests/components/totalconnect/common.py b/tests/components/totalconnect/common.py index 6e9bb28a9b6..828cad71e07 100644 --- a/tests/components/totalconnect/common.py +++ b/tests/components/totalconnect/common.py @@ -1,17 +1,23 @@ """Common methods used across tests for TotalConnect.""" +from typing import Any from unittest.mock import patch from total_connect_client import ArmingState, ResultCode, ZoneStatus, ZoneType -from homeassistant.components.totalconnect.const import CONF_USERCODES, DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.components.totalconnect.const import ( + AUTO_BYPASS, + CODE_REQUIRED, + CONF_USERCODES, + DOMAIN, +) +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -LOCATION_ID = "123456" +LOCATION_ID = 123456 DEVICE_INFO_BASIC_1 = { "DeviceID": "987654", @@ -341,7 +347,7 @@ RESPONSE_ZONE_BYPASS_FAILURE = { USERNAME = "username@me.com" PASSWORD = "password" -USERCODES = {123456: "7890"} +USERCODES = {LOCATION_ID: "7890"} CONFIG_DATA = { CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, @@ -349,6 +355,9 @@ CONFIG_DATA = { } CONFIG_DATA_NO_USERCODES = {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} +OPTIONS_DATA = {AUTO_BYPASS: False, CODE_REQUIRED: False} +OPTIONS_DATA_CODE_REQUIRED = {AUTO_BYPASS: False, CODE_REQUIRED: True} + PARTITION_DETAILS_1 = { "PartitionID": 1, "ArmingState": ArmingState.DISARMED.value, @@ -395,10 +404,19 @@ TOTALCONNECT_REQUEST = ( ) -async def setup_platform(hass: HomeAssistant, platform: Platform) -> MockConfigEntry: +async def setup_platform( + hass: HomeAssistant, platform: Any, code_required: bool = False +) -> MockConfigEntry: """Set up the TotalConnect platform.""" # first set up a config entry and add it to hass - mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) + if code_required: + mock_entry = MockConfigEntry( + domain=DOMAIN, data=CONFIG_DATA, options=OPTIONS_DATA_CODE_REQUIRED + ) + else: + mock_entry = MockConfigEntry( + domain=DOMAIN, data=CONFIG_DATA, options=OPTIONS_DATA + ) mock_entry.add_to_hass(hass) responses = [ @@ -426,7 +444,7 @@ async def setup_platform(hass: HomeAssistant, platform: Platform) -> MockConfigE async def init_integration(hass: HomeAssistant) -> MockConfigEntry: """Set up the TotalConnect integration.""" # first set up a config entry and add it to hass - mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) + mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA, options=OPTIONS_DATA) mock_entry.add_to_hass(hass) responses = [ diff --git a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr index 0b8b8bb79ac..ef7cb386b33 100644 --- a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr @@ -41,7 +41,7 @@ 'code_format': None, 'cover_tampered': False, 'friendly_name': 'test', - 'location_id': '123456', + 'location_id': 123456, 'location_name': 'test', 'low_battery': False, 'partition': 1, @@ -99,7 +99,7 @@ 'code_format': None, 'cover_tampered': False, 'friendly_name': 'test Partition 2', - 'location_id': '123456', + 'location_id': 123456, 'location_name': 'test partition 2', 'low_battery': False, 'partition': 2, diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr index 81cfecbc530..1eccff1dfc3 100644 --- a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -37,7 +37,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'smoke', 'friendly_name': 'Fire', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '2', }), @@ -87,7 +87,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Fire Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '2', }), @@ -137,7 +137,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Fire Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '2', }), @@ -187,7 +187,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'gas', 'friendly_name': 'Gas', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '3', }), @@ -237,7 +237,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Gas Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '3', }), @@ -287,7 +287,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Gas Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '3', }), @@ -337,7 +337,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'safety', 'friendly_name': 'Medical', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '5', }), @@ -387,7 +387,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'motion', 'friendly_name': 'Motion', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '4', }), @@ -437,7 +437,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Motion Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '4', }), @@ -487,7 +487,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Motion Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '4', }), @@ -537,7 +537,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Security', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '1', }), @@ -587,7 +587,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Security Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '1', }), @@ -637,7 +637,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Security Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '1', }), @@ -687,7 +687,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'problem', 'friendly_name': 'Temperature', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': 7, }), @@ -737,7 +737,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Temperature Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': 7, }), @@ -787,7 +787,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Temperature Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': 7, }), @@ -837,7 +837,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'test Battery', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_battery', @@ -885,7 +885,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'carbon_monoxide', 'friendly_name': 'test Carbon monoxide', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_carbon_monoxide', @@ -932,7 +932,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'test Police emergency', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_police_emergency', @@ -980,7 +980,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'test Power', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_power', @@ -1028,7 +1028,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'smoke', 'friendly_name': 'test Smoke', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_smoke', @@ -1076,7 +1076,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'test Tamper', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_tamper', @@ -1124,7 +1124,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Unknown', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '6', }), @@ -1174,7 +1174,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Unknown Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '6', }), @@ -1224,7 +1224,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Unknown Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '6', }), diff --git a/tests/components/totalconnect/test_alarm_control_panel.py b/tests/components/totalconnect/test_alarm_control_panel.py index a4f8333e8a8..bc76f7243ca 100644 --- a/tests/components/totalconnect/test_alarm_control_panel.py +++ b/tests/components/totalconnect/test_alarm_control_panel.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion from total_connect_client.exceptions import ( @@ -11,7 +12,10 @@ from total_connect_client.exceptions import ( TotalConnectError, ) -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.components.totalconnect.alarm_control_panel import ( SERVICE_ALARM_ARM_AWAY_INSTANT, SERVICE_ALARM_ARM_HOME_INSTANT, @@ -25,23 +29,15 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity -from homeassistant.util import dt as dt_util from .common import ( + LOCATION_ID, RESPONSE_ARM_FAILURE, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY, @@ -60,6 +56,7 @@ from .common import ( RESPONSE_UNKNOWN, RESPONSE_USER_CODE_INVALID, TOTALCONNECT_REQUEST, + USERCODES, setup_platform, ) @@ -89,15 +86,17 @@ async def test_attributes( assert mock_request.call_count == 1 -async def test_arm_home_success(hass: HomeAssistant) -> None: +async def test_arm_home_success( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test arm home method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -105,12 +104,13 @@ async def test_arm_home_success(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_HOME # second partition should not be armed - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED async def test_arm_home_failure(hass: HomeAssistant) -> None: @@ -120,7 +120,7 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -128,32 +128,34 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_HOME, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect failed to arm home test." - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Failed to arm home test" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 - # usercode is invalid + # config entry usercode is invalid with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( ALARM_DOMAIN, SERVICE_ALARM_ARM_HOME, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect usercode is invalid. Did not arm home" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Usercode is invalid, did not arm home" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_home_instant_success(hass: HomeAssistant) -> None: +async def test_arm_home_instant_success( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test arm home instant method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -161,10 +163,11 @@ async def test_arm_home_instant_success(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_HOME async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: @@ -174,7 +177,7 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -182,8 +185,8 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_HOME_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect failed to arm home instant test." - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Failed to arm home instant test" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -192,25 +195,24 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_HOME_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert ( - f"{err.value}" - == "TotalConnect usercode is invalid. Did not arm home instant" - ) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Usercode is invalid, did not arm home instant" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_away_instant_success(hass: HomeAssistant) -> None: +async def test_arm_away_instant_success( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test arm home instant method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -218,10 +220,11 @@ async def test_arm_away_instant_success(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: @@ -231,7 +234,7 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -239,8 +242,8 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_AWAY_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect failed to arm away instant test." - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Failed to arm away instant test" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -249,24 +252,23 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_AWAY_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert ( - f"{err.value}" - == "TotalConnect usercode is invalid. Did not arm away instant" - ) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Usercode is invalid, did not arm away instant" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_away_success(hass: HomeAssistant) -> None: +async def test_arm_away_success( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test arm away method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -274,10 +276,11 @@ async def test_arm_away_success(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY async def test_arm_away_failure(hass: HomeAssistant) -> None: @@ -287,7 +290,7 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -295,8 +298,8 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_AWAY, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect failed to arm away test." - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Failed to arm away test" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -305,21 +308,23 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_AWAY, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect usercode is invalid. Did not arm away" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Usercode is invalid, did not arm away" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_disarm_success(hass: HomeAssistant) -> None: +async def test_disarm_success( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test disarm method success.""" responses = [RESPONSE_ARMED_AWAY, RESPONSE_DISARM_SUCCESS, RESPONSE_DISARMED] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 await hass.services.async_call( @@ -327,10 +332,11 @@ async def test_disarm_success(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED async def test_disarm_failure(hass: HomeAssistant) -> None: @@ -344,7 +350,7 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -352,8 +358,8 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect failed to disarm test." - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert f"{err.value}" == "Failed to disarm test" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 2 # usercode is invalid @@ -362,21 +368,61 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect usercode is invalid. Did not disarm" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert f"{err.value}" == "Usercode is invalid, did not disarm" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_night_success(hass: HomeAssistant) -> None: +async def test_disarm_code_required( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test disarm with code.""" + responses = [RESPONSE_ARMED_AWAY, RESPONSE_DISARM_SUCCESS, RESPONSE_DISARMED] + await setup_platform(hass, ALARM_DOMAIN, code_required=True) + with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: + await async_update_entity(hass, ENTITY_ID) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert mock_request.call_count == 1 + + # runtime user entered code is bad + DATA_WITH_CODE = DATA.copy() + DATA_WITH_CODE["code"] = "666" + with pytest.raises(ServiceValidationError, match="Incorrect code entered"): + await hass.services.async_call( + ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA_WITH_CODE, blocking=True + ) + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + # code check means the call to total_connect never happens + assert mock_request.call_count == 1 + + # runtime user entered code that is in config + DATA_WITH_CODE["code"] = USERCODES[LOCATION_ID] + await hass.services.async_call( + ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA_WITH_CODE, blocking=True + ) + await hass.async_block_till_done() + assert mock_request.call_count == 2 + + freezer.tick(DELAY) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert mock_request.call_count == 3 + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + + +async def test_arm_night_success( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test arm night method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_NIGHT] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -384,10 +430,11 @@ async def test_arm_night_success(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_NIGHT async def test_arm_night_failure(hass: HomeAssistant) -> None: @@ -397,7 +444,7 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -405,8 +452,8 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_NIGHT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect failed to arm night test." - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Failed to arm night test" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -415,21 +462,21 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_NIGHT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "TotalConnect usercode is invalid. Did not arm night" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert f"{err.value}" == "Usercode is invalid, did not arm night" + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arming(hass: HomeAssistant) -> None: +async def test_arming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: """Test arming.""" responses = [RESPONSE_DISARMED, RESPONSE_SUCCESS, RESPONSE_ARMING] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -437,20 +484,21 @@ async def test_arming(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMING + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMING -async def test_disarming(hass: HomeAssistant) -> None: +async def test_disarming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: """Test disarming.""" responses = [RESPONSE_ARMED_AWAY, RESPONSE_SUCCESS, RESPONSE_DISARMING] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 await hass.services.async_call( @@ -458,10 +506,11 @@ async def test_disarming(hass: HomeAssistant) -> None: ) assert mock_request.call_count == 2 - async_fire_time_changed(hass, dt_util.utcnow() + DELAY) + freezer.tick(DELAY) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMING + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMING async def test_triggered_fire(hass: HomeAssistant) -> None: @@ -472,7 +521,7 @@ async def test_triggered_fire(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED assert state.attributes.get("triggered_source") == "Fire/Smoke" assert mock_request.call_count == 1 @@ -485,7 +534,7 @@ async def test_triggered_police(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED assert state.attributes.get("triggered_source") == "Police/Medical" assert mock_request.call_count == 1 @@ -498,7 +547,7 @@ async def test_triggered_carbon_monoxide(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED assert state.attributes.get("triggered_source") == "Carbon Monoxide" assert mock_request.call_count == 1 @@ -510,7 +559,10 @@ async def test_armed_custom(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_CUSTOM_BYPASS + assert ( + hass.states.get(ENTITY_ID).state + == AlarmControlPanelState.ARMED_CUSTOM_BYPASS + ) assert mock_request.call_count == 1 @@ -525,7 +577,9 @@ async def test_unknown(hass: HomeAssistant) -> None: assert mock_request.call_count == 1 -async def test_other_update_failures(hass: HomeAssistant) -> None: +async def test_other_update_failures( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test other failures seen during updates.""" responses = [ RESPONSE_DISARMED, @@ -540,35 +594,40 @@ async def test_other_update_failures(hass: HomeAssistant) -> None: # first things work as planned await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 # then an error: ServiceUnavailable --> UpdateFailed - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 2 # works again - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 2) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 3 # then an error: TotalConnectError --> UpdateFailed - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 3) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 4 # works again - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 4) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 5 # unknown TotalConnect status via ValueError - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 5) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 6 diff --git a/tests/components/totalconnect/test_config_flow.py b/tests/components/totalconnect/test_config_flow.py index 98de748faea..86419bff817 100644 --- a/tests/components/totalconnect/test_config_flow.py +++ b/tests/components/totalconnect/test_config_flow.py @@ -6,10 +6,11 @@ from total_connect_client.exceptions import AuthenticationError from homeassistant.components.totalconnect.const import ( AUTO_BYPASS, + CODE_REQUIRED, CONF_USERCODES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -141,9 +142,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -240,11 +239,11 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={AUTO_BYPASS: True} + result["flow_id"], user_input={AUTO_BYPASS: True, CODE_REQUIRED: False} ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == {AUTO_BYPASS: True} + assert config_entry.options == {AUTO_BYPASS: True, CODE_REQUIRED: False} await hass.async_block_till_done() assert await hass.config_entries.async_unload(config_entry.entry_id) diff --git a/tests/components/touchline_sl/__init__.py b/tests/components/touchline_sl/__init__.py new file mode 100644 index 00000000000..c22e9d329db --- /dev/null +++ b/tests/components/touchline_sl/__init__.py @@ -0,0 +1 @@ +"""Tests for the Roth Touchline SL integration.""" diff --git a/tests/components/touchline_sl/conftest.py b/tests/components/touchline_sl/conftest.py new file mode 100644 index 00000000000..4edeb048f5b --- /dev/null +++ b/tests/components/touchline_sl/conftest.py @@ -0,0 +1,61 @@ +"""Common fixtures for the Roth Touchline SL tests.""" + +from collections.abc import Generator +from typing import NamedTuple +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.touchline_sl.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + + +class FakeModule(NamedTuple): + """Fake Module used for unit testing only.""" + + name: str + id: str + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.touchline_sl.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_touchlinesl_client() -> Generator[AsyncMock]: + """Mock a pytouchlinesl client.""" + with ( + patch( + "homeassistant.components.touchline_sl.TouchlineSL", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.touchline_sl.config_flow.TouchlineSL", + new=mock_client, + ), + ): + client = mock_client.return_value + client.user_id.return_value = 12345 + client.modules.return_value = [FakeModule(name="Foobar", id="deadbeef")] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="TouchlineSL", + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + unique_id="12345", + ) diff --git a/tests/components/touchline_sl/test_config_flow.py b/tests/components/touchline_sl/test_config_flow.py new file mode 100644 index 00000000000..992fa2bdb3e --- /dev/null +++ b/tests/components/touchline_sl/test_config_flow.py @@ -0,0 +1,113 @@ +"""Test the Roth Touchline SL config flow.""" + +from unittest.mock import AsyncMock + +import pytest +from pytouchlinesl.client import RothAPIError + +from homeassistant.components.touchline_sl.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +RESULT_UNIQUE_ID = "12345" + +CONFIG_DATA = { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + + +async def test_config_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_touchlinesl_client: AsyncMock +) -> None: + """Test the happy path where the provided username/password result in a new entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == CONFIG_DATA + assert result["result"].unique_id == RESULT_UNIQUE_ID + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error_base"), + [ + (RothAPIError(status=401), "invalid_auth"), + (RothAPIError(status=502), "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_config_flow_failure_api_exceptions( + hass: HomeAssistant, + exception: Exception, + error_base: str, + mock_setup_entry: AsyncMock, + mock_touchlinesl_client: AsyncMock, +) -> None: + """Test for invalid credentials or API connection errors, and that the form can recover.""" + mock_touchlinesl_client.user_id.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_base} + + # "Fix" the problem, and try again. + mock_touchlinesl_client.user_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == CONFIG_DATA + assert result["result"].unique_id == RESULT_UNIQUE_ID + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_failure_adding_non_unique_account( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_touchlinesl_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the config flow fails when user tries to add duplicate accounts.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index c63ca9139f1..809ab3bfd78 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -6,6 +6,7 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from kasa import ( + BaseProtocol, Device, DeviceConfig, DeviceConnectionParameters, @@ -17,15 +18,18 @@ from kasa import ( Module, ) from kasa.interfaces import Fan, Light, LightEffect, LightState -from kasa.protocol import BaseProtocol +from kasa.smart.modules.alarm import Alarm from syrupy import SnapshotAssertion +from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.tplink import ( + CONF_AES_KEYS, CONF_ALIAS, + CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, - CONF_DEVICE_CONFIG, CONF_HOST, CONF_MODEL, + CONF_USES_HTTP, Credentials, ) from homeassistant.components.tplink.const import DOMAIN @@ -54,35 +58,48 @@ DHCP_FORMATTED_MAC_ADDRESS = MAC_ADDRESS.replace(":", "") MAC_ADDRESS2 = "11:22:33:44:55:66" DEFAULT_ENTRY_TITLE = f"{ALIAS} {MODEL}" CREDENTIALS_HASH_LEGACY = "" +CONN_PARAMS_LEGACY = DeviceConnectionParameters( + DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Xor +) DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS) -DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_LEGACY = { + k: v for k, v in DEVICE_CONFIG_LEGACY.to_dict().items() if k != "credentials" +} CREDENTIALS = Credentials("foo", "bar") CREDENTIALS_HASH_AES = "AES/abcdefghijklmnopqrstuvabcdefghijklmnopqrstuv==" CREDENTIALS_HASH_KLAP = "KLAP/abcdefghijklmnopqrstuv==" +CONN_PARAMS_KLAP = DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap +) DEVICE_CONFIG_KLAP = DeviceConfig( IP_ADDRESS, credentials=CREDENTIALS, - connection_type=DeviceConnectionParameters( - DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap - ), + connection_type=CONN_PARAMS_KLAP, uses_http=True, ) +CONN_PARAMS_AES = DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes +) +AES_KEYS = {"private": "foo", "public": "bar"} DEVICE_CONFIG_AES = DeviceConfig( IP_ADDRESS2, credentials=CREDENTIALS, - connection_type=DeviceConnectionParameters( - DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes - ), + connection_type=CONN_PARAMS_AES, uses_http=True, + aes_keys=AES_KEYS, ) -DEVICE_CONFIG_DICT_KLAP = DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True) -DEVICE_CONFIG_DICT_AES = DEVICE_CONFIG_AES.to_dict(exclude_credentials=True) - +DEVICE_CONFIG_DICT_KLAP = { + k: v for k, v in DEVICE_CONFIG_KLAP.to_dict().items() if k != "credentials" +} +DEVICE_CONFIG_DICT_AES = { + k: v for k, v in DEVICE_CONFIG_AES.to_dict().items() if k != "credentials" +} CREATE_ENTRY_DATA_LEGACY = { CONF_HOST: IP_ADDRESS, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, + CONF_CONNECTION_PARAMETERS: CONN_PARAMS_LEGACY.to_dict(), + CONF_USES_HTTP: False, } CREATE_ENTRY_DATA_KLAP = { @@ -90,23 +107,18 @@ CREATE_ENTRY_DATA_KLAP = { CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_KLAP, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, + CONF_CONNECTION_PARAMETERS: CONN_PARAMS_KLAP.to_dict(), + CONF_USES_HTTP: True, } CREATE_ENTRY_DATA_AES = { CONF_HOST: IP_ADDRESS2, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AES, + CONF_CONNECTION_PARAMETERS: CONN_PARAMS_AES.to_dict(), + CONF_USES_HTTP: True, + CONF_AES_KEYS: AES_KEYS, } -CONNECTION_TYPE_KLAP = DeviceConnectionParameters( - DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap -) -CONNECTION_TYPE_KLAP_DICT = CONNECTION_TYPE_KLAP.to_dict() -CONNECTION_TYPE_AES = DeviceConnectionParameters( - DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes -) -CONNECTION_TYPE_AES_DICT = CONNECTION_TYPE_AES.to_dict() def _load_feature_fixtures(): @@ -162,12 +174,18 @@ async def snapshot_platform( ), "Please limit the loaded platforms to 1 platform." translations = await async_get_translations(hass, "en", "entity", [DOMAIN]) + unique_device_classes = [] for entity_entry in entity_entries: if entity_entry.translation_key: key = f"component.{DOMAIN}.entity.{entity_entry.domain}.{entity_entry.translation_key}.name" + single_device_class_translation = False + if key not in translations and entity_entry.original_device_class: + if entity_entry.original_device_class not in unique_device_classes: + single_device_class_translation = True + unique_device_classes.append(entity_entry.original_device_class) assert ( - key in translations - ), f"No translation for entity {entity_entry.unique_id}, expected {key}" + (key in translations) or single_device_class_translation + ), f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}" assert entity_entry == snapshot( name=f"{entity_entry.entity_id}-entry" ), f"entity entry snapshot failed for {entity_entry.entity_id}" @@ -179,6 +197,21 @@ async def snapshot_platform( ), f"state snapshot failed for {entity_entry.entity_id}" +async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None: + """Set up an automation for tests.""" + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": alias, + "trigger": {"platform": "state", "entity_id": entity_id, "to": "on"}, + "action": {"action": "notify.notify", "metadata": {}, "data": {}}, + } + }, + ) + + def _mock_protocol() -> BaseProtocol: protocol = MagicMock(spec=BaseProtocol) protocol.close = AsyncMock() @@ -383,6 +416,15 @@ def _mocked_fan_module(effect) -> Fan: return fan +def _mocked_alarm_module(device): + alarm = MagicMock(auto_spec=Alarm, name="Mocked alarm") + alarm.active = False + alarm.play = AsyncMock() + alarm.stop = AsyncMock() + + return alarm + + def _mocked_strip_children(features=None, alias=None) -> list[Device]: plug0 = _mocked_device( alias="Plug0" if alias is None else alias, @@ -449,14 +491,15 @@ MODULE_TO_MOCK_GEN = { Module.Light: _mocked_light_module, Module.LightEffect: _mocked_light_effect_module, Module.Fan: _mocked_fan_module, + Module.Alarm: _mocked_alarm_module, } -def _patch_discovery(device=None, no_device=False): +def _patch_discovery(device=None, no_device=False, ip_address=IP_ADDRESS): async def _discovery(*args, **kwargs): if no_device: return {} - return {IP_ADDRESS: _mocked_device()} + return {ip_address: device if device else _mocked_device()} return patch("homeassistant.components.tplink.Discover.discover", new=_discovery) diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index ee4530575ce..25a4bd20270 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -1,9 +1,9 @@ """tplink conftest.""" from collections.abc import Generator -import copy from unittest.mock import DEFAULT, AsyncMock, patch +from kasa import DeviceConfig import pytest from homeassistant.components.tplink import DOMAIN @@ -32,21 +32,23 @@ def mock_discovery(): "homeassistant.components.tplink.Discover", discover=DEFAULT, discover_single=DEFAULT, + try_connect_all=DEFAULT, ) as mock_discovery: device = _mocked_device( - device_config=copy.deepcopy(DEVICE_CONFIG_KLAP), + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), credentials_hash=CREDENTIALS_HASH_KLAP, - alias=None, + alias="My Bulb", ) devices = { "127.0.0.1": _mocked_device( - device_config=copy.deepcopy(DEVICE_CONFIG_KLAP), + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), credentials_hash=CREDENTIALS_HASH_KLAP, alias=None, ) } mock_discovery["discover"].return_value = devices mock_discovery["discover_single"].return_value = device + mock_discovery["try_connect_all"].return_value = device mock_discovery["mock_device"] = device yield mock_discovery @@ -57,12 +59,12 @@ def mock_connect(): with patch("homeassistant.components.tplink.Device.connect") as mock_connect: devices = { IP_ADDRESS: _mocked_device( - device_config=DEVICE_CONFIG_KLAP, + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), credentials_hash=CREDENTIALS_HASH_KLAP, ip_address=IP_ADDRESS, ), IP_ADDRESS2: _mocked_device( - device_config=DEVICE_CONFIG_AES, + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES.to_dict()), credentials_hash=CREDENTIALS_HASH_AES, mac=MAC_ADDRESS2, ip_address=IP_ADDRESS2, diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index 7cfe979ea25..f60132fd2c2 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -34,6 +34,16 @@ "type": "Switch", "category": "Config" }, + "child_lock": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "pir_enabled": { + "value": true, + "type": "Switch", + "category": "Config" + }, "current_consumption": { "value": 5.23, "type": "Sensor", @@ -150,6 +160,11 @@ "type": "Sensor", "category": "Debug" }, + "check_latest_firmware": { + "value": "", + "type": "Action", + "category": "Info" + }, "thermostat_mode": { "value": "off", "type": "Sensor", @@ -195,6 +210,21 @@ "type": "BinarySensor", "category": "Primary" }, + "motion_detected": { + "value": false, + "type": "BinarySensor", + "category": "Primary" + }, + "alarm": { + "value": false, + "type": "BinarySensor", + "category": "Info" + }, + "reboot": { + "value": "", + "type": "Action", + "category": "Debug" + }, "test_alarm": { "value": "", "type": "Action", @@ -283,5 +313,10 @@ "type": "Choice", "category": "Config", "choices": ["low", "normal", "high"] + }, + "water_alert_timestamp": { + "type": "Sensor", + "category": "Info", + "value": "2024-06-24 10:03:11.046643+01:00" } } diff --git a/tests/components/tplink/snapshots/test_binary_sensor.ambr b/tests/components/tplink/snapshots/test_binary_sensor.ambr index cded74da363..4a1cfe5b411 100644 --- a/tests/components/tplink/snapshots/test_binary_sensor.ambr +++ b/tests/components/tplink/snapshots/test_binary_sensor.ambr @@ -206,6 +206,53 @@ 'state': 'off', }) # --- +# name: test_states[binary_sensor.my_device_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.my_device_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion_detected', + 'unique_id': '123456789ABCDEFGH_motion_detected', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'my_device Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_states[binary_sensor.my_device_overheated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -286,53 +333,6 @@ 'unit_of_measurement': None, }) # --- -# name: test_states[binary_sensor.my_device_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_device_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Update', - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'update_available', - 'unique_id': '123456789ABCDEFGH_update_available', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.my_device_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'update', - 'friendly_name': 'my_device Update', - }), - 'context': , - 'entity_id': 'binary_sensor.my_device_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_states[my_device-entry] DeviceRegistryEntrySnapshot({ 'area_id': None, diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr index d6019861804..bb75f4642e1 100644 --- a/tests/components/tplink/snapshots/test_button.ambr +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -1,4 +1,37 @@ # serializer version: 1 +# name: test_states[button.my_device_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reboot', + 'unique_id': '123456789ABCDEFGH_reboot', + 'unit_of_measurement': None, + }) +# --- # name: test_states[button.my_device_stop_alarm-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr index ad863fc79ae..8236f332046 100644 --- a/tests/components/tplink/snapshots/test_climate.ambr +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -42,7 +42,7 @@ # name: test_states[climate.thermostat-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'current_temperature': 20, + 'current_temperature': 20.2, 'friendly_name': 'thermostat', 'hvac_action': , 'hvac_modes': list([ @@ -52,7 +52,7 @@ 'max_temp': 65536, 'min_temp': None, 'supported_features': , - 'temperature': 22, + 'temperature': 22.2, }), 'context': , 'entity_id': 'climate.thermostat', diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index ee06314ffe3..977d2098fb9 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -43,7 +43,7 @@ 'capabilities': dict({ 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -79,7 +79,7 @@ 'friendly_name': 'my_device Smooth off', 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , @@ -98,7 +98,7 @@ 'capabilities': dict({ 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -134,7 +134,7 @@ 'friendly_name': 'my_device Smooth on', 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , @@ -153,7 +153,7 @@ 'capabilities': dict({ 'max': 65536, 'min': -10, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -189,7 +189,7 @@ 'friendly_name': 'my_device Temperature offset', 'max': 65536, 'min': -10, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , @@ -208,7 +208,7 @@ 'capabilities': dict({ 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -244,7 +244,7 @@ 'friendly_name': 'my_device Turn off in', 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , diff --git a/tests/components/tplink/snapshots/test_sensor.ambr b/tests/components/tplink/snapshots/test_sensor.ambr index e639540e552..739f02e51f0 100644 --- a/tests/components/tplink/snapshots/test_sensor.ambr +++ b/tests/components/tplink/snapshots/test_sensor.ambr @@ -358,6 +358,53 @@ 'state': '12', }) # --- +# name: test_states[sensor.my_device_last_water_leak_alert-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_last_water_leak_alert', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last water leak alert', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_alert_timestamp', + 'unique_id': '123456789ABCDEFGH_water_alert_timestamp', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_last_water_leak_alert-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my_device Last water leak alert', + }), + 'context': , + 'entity_id': 'sensor.my_device_last_water_leak_alert', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-24T09:03:11+00:00', + }) +# --- # name: test_states[sensor.my_device_on_since-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -546,7 +593,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , diff --git a/tests/components/tplink/snapshots/test_siren.ambr b/tests/components/tplink/snapshots/test_siren.ambr new file mode 100644 index 00000000000..b144288bd1c --- /dev/null +++ b/tests/components/tplink/snapshots/test_siren.ambr @@ -0,0 +1,84 @@ +# serializer version: 1 +# name: test_states[hub-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'hub', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- +# name: test_states[siren.hub-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'siren', + 'entity_category': None, + 'entity_id': 'siren.hub', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABCDEFGH', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[siren.hub-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'hub', + 'supported_features': , + }), + 'context': , + 'entity_id': 'siren.hub', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 4354ea1905a..36c630474c8 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -173,6 +173,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '123456789ABCDEFGH_child_lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Child lock', + }), + 'context': , + 'entity_id': 'switch.my_device_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_fan_sleep_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -265,6 +311,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_motion_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_motion_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion sensor', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pir_enabled', + 'unique_id': '123456789ABCDEFGH_pir_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_motion_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Motion sensor', + }), + 'context': , + 'entity_id': 'switch.my_device_motion_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_smooth_transitions-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/test_button.py b/tests/components/tplink/test_button.py index 143a882a6cb..a3eb8950336 100644 --- a/tests/components/tplink/test_button.py +++ b/tests/components/tplink/test_button.py @@ -11,7 +11,11 @@ from homeassistant.components.tplink.const import DOMAIN from homeassistant.components.tplink.entity import EXCLUDED_FEATURES from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from homeassistant.setup import async_setup_component from . import ( @@ -22,6 +26,7 @@ from . import ( _mocked_strip_children, _patch_connect, _patch_discovery, + setup_automation, setup_platform_for_device, snapshot_platform, ) @@ -29,6 +34,53 @@ from . import ( from tests.common import MockConfigEntry +@pytest.fixture +def create_deprecated_button_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +): + """Create the entity so it is not ignored by the deprecation check.""" + mock_config_entry.add_to_hass(hass) + + def create_entry(device_name, device_id, key): + unique_id = f"{device_id}_{key}" + + entity_registry.async_get_or_create( + domain=BUTTON_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{key}", + config_entry=mock_config_entry, + ) + + create_entry("my_device", "123456789ABCDEFGH", "stop_alarm") + create_entry("my_device", "123456789ABCDEFGH", "test_alarm") + + +@pytest.fixture +def create_deprecated_child_button_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +): + """Create the entity so it is not ignored by the deprecation check.""" + + def create_entry(device_name, key): + for plug_id in range(2): + unique_id = f"PLUG{plug_id}DEVICEID_{key}" + entity_registry.async_get_or_create( + domain=BUTTON_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"my_device_plug{plug_id}_{key}", + config_entry=mock_config_entry, + ) + + create_entry("my_device", "stop_alarm") + create_entry("my_device", "test_alarm") + + @pytest.fixture def mocked_feature_button() -> Feature: """Return mocked tplink binary sensor feature.""" @@ -47,6 +99,7 @@ async def test_states( entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, + create_deprecated_button_entities, ) -> None: """Test a sensor unique ids.""" features = {description.key for description in BUTTON_DESCRIPTIONS} @@ -66,21 +119,17 @@ async def test_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, mocked_feature_button: Feature, + create_deprecated_button_entities, ) -> None: """Test a sensor unique ids.""" mocked_feature = mocked_feature_button - already_migrated_config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS - ) - already_migrated_config_entry.add_to_hass(hass) - - plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + plug = _mocked_device(alias="my_device", features=[mocked_feature]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() # The entity_id is based on standard name from core. - entity_id = "button.my_plug_test_alarm" + entity_id = "button.my_device_test_alarm" entity = entity_registry.async_get(entity_id) assert entity assert entity.unique_id == f"{DEVICE_ID}_{mocked_feature.id}" @@ -91,15 +140,13 @@ async def test_button_children( entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, mocked_feature_button: Feature, + create_deprecated_button_entities, + create_deprecated_child_button_entities, ) -> None: """Test a sensor unique ids.""" mocked_feature = mocked_feature_button - already_migrated_config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS - ) - already_migrated_config_entry.add_to_hass(hass) plug = _mocked_device( - alias="my_plug", + alias="my_device", features=[mocked_feature], children=_mocked_strip_children(features=[mocked_feature]), ) @@ -107,13 +154,13 @@ async def test_button_children( await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "button.my_plug_test_alarm" + entity_id = "button.my_device_test_alarm" entity = entity_registry.async_get(entity_id) assert entity device = device_registry.async_get(entity.device_id) for plug_id in range(2): - child_entity_id = f"button.my_plug_plug{plug_id}_test_alarm" + child_entity_id = f"button.my_device_plug{plug_id}_test_alarm" child_entity = entity_registry.async_get(child_entity_id) assert child_entity assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_{mocked_feature.id}" @@ -127,19 +174,16 @@ async def test_button_press( hass: HomeAssistant, entity_registry: er.EntityRegistry, mocked_feature_button: Feature, + create_deprecated_button_entities, ) -> None: """Test a number entity limits and setting values.""" mocked_feature = mocked_feature_button - already_migrated_config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS - ) - already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + plug = _mocked_device(alias="my_device", features=[mocked_feature]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "button.my_plug_test_alarm" + entity_id = "button.my_device_test_alarm" entity = entity_registry.async_get(entity_id) assert entity assert entity.unique_id == f"{DEVICE_ID}_test_alarm" @@ -151,3 +195,84 @@ async def test_button_press( blocking=True, ) mocked_feature.set_value.assert_called_with(True) + + +async def test_button_not_exists_with_deprecation( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mocked_feature_button: Feature, +) -> None: + """Test deprecated buttons are not created if they don't previously exist.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + config_entry.add_to_hass(hass) + entity_id = "button.my_device_test_alarm" + + assert not hass.states.get(entity_id) + mocked_feature = mocked_feature_button + dev = _mocked_device(alias="my_device", features=[mocked_feature]) + with _patch_discovery(device=dev), _patch_connect(device=dev): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + assert not entity_registry.async_get(entity_id) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + assert not hass.states.get(entity_id) + + +@pytest.mark.parametrize( + ("entity_disabled", "entity_has_automations"), + [ + pytest.param(False, False, id="without-automations"), + pytest.param(False, True, id="with-automations"), + pytest.param(True, False, id="disabled"), + ], +) +async def test_button_exists_with_deprecation( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + mocked_feature_button: Feature, + entity_disabled: bool, + entity_has_automations: bool, +) -> None: + """Test the deprecated buttons are deleted or raise issues.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + config_entry.add_to_hass(hass) + + object_id = "my_device_test_alarm" + entity_id = f"button.{object_id}" + unique_id = f"{DEVICE_ID}_test_alarm" + issue_id = f"deprecated_entity_{entity_id}_automation.test_automation" + + if entity_has_automations: + await setup_automation(hass, "test_automation", entity_id) + + entity = entity_registry.async_get_or_create( + domain=BUTTON_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=object_id, + config_entry=config_entry, + disabled_by=er.RegistryEntryDisabler.USER if entity_disabled else None, + ) + assert entity.entity_id == entity_id + assert not hass.states.get(entity_id) + + mocked_feature = mocked_feature_button + dev = _mocked_device(alias="my_device", features=[mocked_feature]) + with _patch_discovery(device=dev), _patch_connect(device=dev): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity = entity_registry.async_get(entity_id) + # entity and state will be none if removed from registry + assert (entity is None) == entity_disabled + assert (hass.states.get(entity_id) is None) == entity_disabled + + assert ( + issue_registry.async_get_issue(DOMAIN, issue_id) is not None + ) == entity_has_automations diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py index 2f24fa829f9..3a54048e1d6 100644 --- a/tests/components/tplink/test_climate.py +++ b/tests/components/tplink/test_climate.py @@ -45,11 +45,11 @@ async def mocked_hub(hass: HomeAssistant) -> Device: features = [ _mocked_feature( - "temperature", value=20, category=Feature.Category.Primary, unit="celsius" + "temperature", value=20.2, category=Feature.Category.Primary, unit="celsius" ), _mocked_feature( "target_temperature", - value=22, + value=22.2, type_=Feature.Type.Number, category=Feature.Category.Primary, unit="celsius", @@ -94,8 +94,8 @@ async def test_climate( state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_HVAC_ACTION] is HVACAction.HEATING - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20 - assert state.attributes[ATTR_TEMPERATURE] == 22 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.2 + assert state.attributes[ATTR_TEMPERATURE] == 22.2 async def test_states( diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index f90eb985d38..2697696c667 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1,7 +1,8 @@ """Test the tplink config flow.""" +from contextlib import contextmanager import logging -from unittest.mock import AsyncMock, patch +from unittest.mock import ANY, AsyncMock, patch from kasa import TimeoutError import pytest @@ -16,8 +17,9 @@ from homeassistant.components.tplink import ( DeviceConfig, KasaException, ) +from homeassistant.components.tplink.config_flow import TPLinkConfigFlow from homeassistant.components.tplink.const import ( - CONF_CONNECTION_TYPE, + CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, ) @@ -28,23 +30,28 @@ from homeassistant.const import ( CONF_HOST, CONF_MAC, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import ( + AES_KEYS, ALIAS, - CONNECTION_TYPE_KLAP_DICT, + CONN_PARAMS_AES, + CONN_PARAMS_KLAP, + CONN_PARAMS_LEGACY, CREATE_ENTRY_DATA_AES, CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, CREDENTIALS_HASH_AES, CREDENTIALS_HASH_KLAP, DEFAULT_ENTRY_TITLE, - DEVICE_CONFIG_DICT_AES, + DEVICE_CONFIG_AES, DEVICE_CONFIG_DICT_KLAP, - DEVICE_CONFIG_DICT_LEGACY, + DEVICE_CONFIG_KLAP, + DEVICE_CONFIG_LEGACY, DHCP_FORMATTED_MAC_ADDRESS, IP_ADDRESS, MAC_ADDRESS, @@ -59,9 +66,44 @@ from . import ( from tests.common import MockConfigEntry -async def test_discovery(hass: HomeAssistant) -> None: +@contextmanager +def override_side_effect(mock: AsyncMock, effect): + """Temporarily override a mock side effect and replace afterwards.""" + try: + default_side_effect = mock.side_effect + mock.side_effect = effect + yield mock + finally: + mock.side_effect = default_side_effect + + +@pytest.mark.parametrize( + ("device_config", "expected_entry_data", "credentials_hash"), + [ + pytest.param( + DEVICE_CONFIG_KLAP, CREATE_ENTRY_DATA_KLAP, CREDENTIALS_HASH_KLAP, id="KLAP" + ), + pytest.param( + DEVICE_CONFIG_AES, CREATE_ENTRY_DATA_AES, CREDENTIALS_HASH_AES, id="AES" + ), + pytest.param(DEVICE_CONFIG_LEGACY, CREATE_ENTRY_DATA_LEGACY, None, id="Legacy"), + ], +) +async def test_discovery( + hass: HomeAssistant, device_config, expected_entry_data, credentials_hash +) -> None: """Test setting up discovery.""" - with _patch_discovery(), _patch_single_discovery(), _patch_connect(): + ip_address = device_config.host + device = _mocked_device( + device_config=device_config, + credentials_hash=credentials_hash, + ip_address=ip_address, + ) + with ( + _patch_discovery(device, ip_address=ip_address), + _patch_single_discovery(device), + _patch_connect(device), + ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -91,9 +133,9 @@ async def test_discovery(hass: HomeAssistant) -> None: assert not result2["errors"] with ( - _patch_discovery(), - _patch_single_discovery(), - _patch_connect(), + _patch_discovery(device, ip_address=ip_address), + _patch_single_discovery(device), + _patch_connect(device), patch(f"{MODULE}.async_setup", return_value=True) as mock_setup, patch(f"{MODULE}.async_setup_entry", return_value=True) as mock_setup_entry, ): @@ -105,7 +147,7 @@ async def test_discovery(hass: HomeAssistant) -> None: assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == CREATE_ENTRY_DATA_LEGACY + assert result3["data"] == expected_entry_data mock_setup.assert_called_once() mock_setup_entry.assert_called_once() @@ -130,24 +172,25 @@ async def test_discovery_auth( ) -> None: """Test authenticated discovery.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationError + mock_device = mock_connect["mock_devices"][IP_ADDRESS] + assert mock_device.config == DEVICE_CONFIG_KLAP - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, - ) + with override_side_effect(mock_connect["connect"], AuthenticationError): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - mock_discovery["mock_device"].update.reset_mock(side_effect=True) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -172,40 +215,43 @@ async def test_discovery_auth( ) async def test_discovery_auth_errors( hass: HomeAssistant, - mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init, error_type, errors_msg, error_placement, ) -> None: - """Test handling of discovery authentication errors.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationError - default_connect_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = error_type + """Test handling of discovery authentication errors. - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, - ) - await hass.async_block_till_done() + Tests for errors received during credential + entry during discovery_auth_confirm. + """ + mock_device = mock_connect["mock_devices"][IP_ADDRESS] + + with override_side_effect(mock_connect["connect"], AuthenticationError): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + with override_side_effect(mock_connect["connect"], error_type): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {error_placement: errors_msg} @@ -213,7 +259,6 @@ async def test_discovery_auth_errors( await hass.async_block_till_done() - mock_connect["connect"].side_effect = default_connect_side_effect result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { @@ -228,29 +273,29 @@ async def test_discovery_auth_errors( async def test_discovery_new_credentials( hass: HomeAssistant, - mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init, ) -> None: """Test setting up discovery with new credentials.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationError + mock_device = mock_connect["mock_devices"][IP_ADDRESS] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, - ) - await hass.async_block_till_done() + with override_side_effect(mock_connect["connect"], AuthenticationError): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - assert mock_connect["connect"].call_count == 0 + assert mock_connect["connect"].call_count == 1 with patch( "homeassistant.components.tplink.config_flow.get_credentials", @@ -260,7 +305,7 @@ async def test_discovery_new_credentials( result["flow_id"], ) - assert mock_connect["connect"].call_count == 1 + assert mock_connect["connect"].call_count == 2 assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "discovery_confirm" @@ -277,48 +322,54 @@ async def test_discovery_new_credentials( async def test_discovery_new_credentials_invalid( hass: HomeAssistant, - mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init, ) -> None: """Test setting up discovery with new invalid credentials.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationError - default_connect_side_effect = mock_connect["connect"].side_effect + mock_device = mock_connect["mock_devices"][IP_ADDRESS] - mock_connect["connect"].side_effect = AuthenticationError - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, - ) - await hass.async_block_till_done() + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + patch( + "homeassistant.components.tplink.config_flow.get_credentials", + return_value=None, + ), + override_side_effect(mock_connect["connect"], AuthenticationError), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - assert mock_connect["connect"].call_count == 0 + assert mock_connect["connect"].call_count == 1 - with patch( - "homeassistant.components.tplink.config_flow.get_credentials", - return_value=Credentials("fake_user", "fake_pass"), + with ( + patch( + "homeassistant.components.tplink.config_flow.get_credentials", + return_value=Credentials("fake_user", "fake_pass"), + ), + override_side_effect(mock_connect["connect"], AuthenticationError), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], ) - assert mock_connect["connect"].call_count == 1 + assert mock_connect["connect"].call_count == 2 assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "discovery_auth_confirm" await hass.async_block_till_done() - mock_connect["connect"].side_effect = default_connect_side_effect result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { @@ -577,32 +628,30 @@ async def test_manual_auth_errors( assert not result["errors"] mock_discovery["mock_device"].update.side_effect = AuthenticationError - default_connect_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = error_type - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} - ) + with override_side_effect(mock_connect["connect"], error_type): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} + ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user_auth_confirm" assert not result2["errors"] await hass.async_block_till_done() - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - await hass.async_block_till_done() + with override_side_effect(mock_connect["connect"], error_type): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.FORM assert result3["step_id"] == "user_auth_confirm" assert result3["errors"] == {error_placement: errors_msg} assert result3["description_placeholders"]["error"] == str(error_type) - mock_connect["connect"].side_effect = default_connect_side_effect result4 = await hass.config_entries.flow.async_configure( result3["flow_id"], { @@ -617,6 +666,93 @@ async def test_manual_auth_errors( await hass.async_block_till_done() +@pytest.mark.parametrize( + ("host_str", "host", "port"), + [ + (f"{IP_ADDRESS}:1234", IP_ADDRESS, 1234), + ("[2001:db8:0::1]:4321", "2001:db8:0::1", 4321), + ], +) +async def test_manual_port_override( + hass: HomeAssistant, + mock_connect: AsyncMock, + mock_discovery: AsyncMock, + host_str, + host, + port, +) -> None: + """Test manually setup.""" + mock_discovery["mock_device"].config.port_override = port + mock_discovery["mock_device"].host = host + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + # side_effects to cause auth confirm as the port override usually only + # works with direct connections. + mock_discovery["discover_single"].side_effect = TimeoutError + mock_connect["connect"].side_effect = AuthenticationError + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: host_str} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "user_auth_confirm" + assert not result2["errors"] + + creds = Credentials("fake_username", "fake_password") + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + mock_discovery["try_connect_all"].assert_called_once_with( + host, credentials=creds, port=port, http_client=ANY + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == DEFAULT_ENTRY_TITLE + assert result3["data"] == { + **CREATE_ENTRY_DATA_KLAP, + CONF_PORT: port, + CONF_HOST: host, + } + assert result3["context"]["unique_id"] == MAC_ADDRESS + + +async def test_manual_port_override_invalid( + hass: HomeAssistant, mock_connect: AsyncMock, mock_discovery: AsyncMock +) -> None: + """Test manually setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: f"{IP_ADDRESS}:foo"} + ) + await hass.async_block_till_done() + + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=None, port=None + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == DEFAULT_ENTRY_TITLE + assert result2["data"] == CREATE_ENTRY_DATA_KLAP + assert result2["context"]["unique_id"] == MAC_ADDRESS + + async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: """Test we get the form with discovery and abort for dhcp source when we get both.""" @@ -628,14 +764,26 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, + CONF_DEVICE: _mocked_device(device_config=DEVICE_CONFIG_LEGACY), }, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["errors"] is None - with _patch_discovery(), _patch_single_discovery(), _patch_connect(): + real_is_matching = TPLinkConfigFlow.is_matching + return_values = [] + + def is_matching(self, other_flow) -> bool: + return_values.append(real_is_matching(self, other_flow)) + return return_values[-1] + + with ( + _patch_discovery(), + _patch_single_discovery(), + _patch_connect(), + patch.object(TPLinkConfigFlow, "is_matching", wraps=is_matching, autospec=True), + ): result2 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -646,6 +794,8 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" + # Ensure the is_matching method returned True + assert return_values == [True] with _patch_discovery(), _patch_single_discovery(), _patch_connect(): result3 = await hass.config_entries.flow.async_init( @@ -691,7 +841,7 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, + CONF_DEVICE: _mocked_device(device_config=DEVICE_CONFIG_LEGACY), }, ), ], @@ -745,7 +895,7 @@ async def test_discovered_by_dhcp_or_discovery( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, + CONF_DEVICE: _mocked_device(device_config=DEVICE_CONFIG_LEGACY), }, ), ], @@ -775,9 +925,11 @@ async def test_integration_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" - mock_connect["connect"].side_effect = KasaException() mock_config_entry.add_to_hass(hass) - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], KasaException()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -785,39 +937,57 @@ async def test_integration_discovery_with_ip_change( flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY - assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1" - - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: "127.0.0.2", - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] + == CONN_PARAMS_LEGACY.to_dict() ) + assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + + mocked_device = _mocked_device(device_config=DEVICE_CONFIG_KLAP) + with override_side_effect(mock_connect["connect"], lambda *_, **__: mocked_device): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mocked_device, + }, + ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP) + # Do a reload here and check that the + # new config is picked up in setup_entry mock_connect["connect"].reset_mock(side_effect=True) bulb = _mocked_device( device_config=config, mac=mock_config_entry.unique_id, ) - mock_connect["connect"].return_value = bulb - await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.tplink.async_create_clientsession", + return_value="Foo", + ), + override_side_effect(mock_connect["connect"], lambda *_, **__: bulb), + ): + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED # Check that init set the new host correctly before calling connect assert config.host == "127.0.0.1" config.host = "127.0.0.2" + config.uses_http = False # Not passed in to new config class + config.http_client = "Foo" mock_connect["connect"].assert_awaited_once_with(config=config) @@ -831,8 +1001,6 @@ async def test_integration_discovery_with_connection_change( And that connection_hash is removed as it will be invalid. """ - mock_connect["connect"].side_effect = KasaException() - mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, @@ -840,7 +1008,10 @@ async def test_integration_discovery_with_connection_change( unique_id=MAC_ADDRESS2, ) mock_config_entry.add_to_hass(hass) - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], KasaException()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -854,43 +1025,57 @@ async def test_integration_discovery_with_connection_change( == 0 ) assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES - assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.2" + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() + ) assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES + mock_connect["connect"].reset_mock() NEW_DEVICE_CONFIG = { **DEVICE_CONFIG_DICT_KLAP, - CONF_CONNECTION_TYPE: CONNECTION_TYPE_KLAP_DICT, + "connection_type": CONN_PARAMS_KLAP.to_dict(), CONF_HOST: "127.0.0.2", } config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG) # Reset the connect mock so when the config flow reloads the entry it succeeds - mock_connect["connect"].reset_mock(side_effect=True) + bulb = _mocked_device( device_config=config, mac=mock_config_entry.unique_id, ) - mock_connect["connect"].return_value = bulb - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: "127.0.0.2", - CONF_MAC: MAC_ADDRESS2, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: NEW_DEVICE_CONFIG, - }, - ) + with ( + patch( + "homeassistant.components.tplink.async_create_clientsession", + return_value="Foo", + ), + override_side_effect(mock_connect["connect"], lambda *_, **__: bulb), + ): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS2, + CONF_ALIAS: ALIAS, + CONF_DEVICE: bulb, + }, + ) await hass.async_block_till_done(wait_background_tasks=True) assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == NEW_DEVICE_CONFIG + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" assert CREDENTIALS_HASH_AES not in mock_config_entry.data assert mock_config_entry.state is ConfigEntryState.LOADED + config.host = "127.0.0.2" + config.uses_http = False # Not passed in to new config class + config.http_client = "Foo" + config.aes_keys = AES_KEYS mock_connect["connect"].assert_awaited_once_with(config=config) @@ -901,17 +1086,18 @@ async def test_dhcp_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test dhcp discovery with an IP change.""" - mock_connect["connect"].side_effect = KasaException() mock_config_entry.add_to_hass(hass) - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], KasaException()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY - assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" discovery_result = await hass.config_entries.flow.async_init( DOMAIN, @@ -925,6 +1111,30 @@ async def test_dhcp_discovery_with_ip_change( assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" +async def test_dhcp_discovery_discover_fail( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test dhcp discovery source cannot discover_single.""" + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + + with override_side_effect(mock_discovery["discover_single"], TimeoutError): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS + ), + ) + assert discovery_result["type"] is FlowResultType.ABORT + assert discovery_result["reason"] == "cannot_connect" + + async def test_reauth( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, @@ -950,7 +1160,7 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -959,6 +1169,76 @@ async def test_reauth( await hass.async_block_till_done() +async def test_reauth_try_connect_all( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + assert mock_added_config_entry.state is ConfigEntryState.LOADED + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + with override_side_effect(mock_discovery["discover_single"], TimeoutError): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + credentials = Credentials("fake_username", "fake_password") + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=credentials, port=None + ) + mock_discovery["try_connect_all"].assert_called_once() + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + + await hass.async_block_till_done() + + +async def test_reauth_try_connect_all_fail( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + assert mock_added_config_entry.state is ConfigEntryState.LOADED + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + with ( + override_side_effect(mock_discovery["discover_single"], TimeoutError), + override_side_effect(mock_discovery["try_connect_all"], lambda *_, **__: None), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + credentials = Credentials("fake_username", "fake_password") + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=credentials, port=None + ) + mock_discovery["try_connect_all"].assert_called_once() + assert result2["errors"] == {"base": "cannot_connect"} + + async def test_reauth_update_with_encryption_change( hass: HomeAssistant, mock_discovery: AsyncMock, @@ -966,8 +1246,7 @@ async def test_reauth_update_with_encryption_change( caplog: pytest.LogCaptureFixture, ) -> None: """Test reauth flow.""" - orig_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = AuthenticationError() + mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, @@ -975,10 +1254,15 @@ async def test_reauth_update_with_encryption_change( unique_id=MAC_ADDRESS2, ) mock_config_entry.add_to_hass(hass) - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() + ) assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], AuthenticationError()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR @@ -988,7 +1272,9 @@ async def test_reauth_update_with_encryption_change( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() + ) assert CONF_CREDENTIALS_HASH not in mock_config_entry.data new_config = DeviceConfig( @@ -1005,7 +1291,6 @@ async def test_reauth_update_with_encryption_change( mock_connect["mock_devices"]["127.0.0.2"].config = new_config mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP - mock_connect["connect"].side_effect = orig_side_effect result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -1017,16 +1302,16 @@ async def test_reauth_update_with_encryption_change( assert "Connection type changed for 127.0.0.2" in caplog.text credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials + "127.0.0.2", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert mock_config_entry.state is ConfigEntryState.LOADED - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == { - **DEVICE_CONFIG_DICT_KLAP, - CONF_HOST: "127.0.0.2", - } + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) + assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP @@ -1037,9 +1322,11 @@ async def test_reauth_update_from_discovery( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" - mock_connect["connect"].side_effect = AuthenticationError mock_config_entry.add_to_hass(hass) - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], AuthenticationError()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -1049,22 +1336,32 @@ async def test_reauth_update_from_discovery( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY - - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] + == CONN_PARAMS_LEGACY.to_dict() ) + + device = _mocked_device( + device_config=DEVICE_CONFIG_KLAP, + mac=mock_config_entry.unique_id, + ) + with override_side_effect(mock_connect["connect"], lambda *_, **__: device): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: device, + }, + ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) async def test_reauth_update_from_discovery_with_ip_change( @@ -1074,9 +1371,11 @@ async def test_reauth_update_from_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" - mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], AuthenticationError()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR @@ -1085,22 +1384,32 @@ async def test_reauth_update_from_discovery_with_ip_change( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY - - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: "127.0.0.2", - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] + == CONN_PARAMS_LEGACY.to_dict() ) + + device = _mocked_device( + device_config=DEVICE_CONFIG_KLAP, + mac=mock_config_entry.unique_id, + ) + with override_side_effect(mock_connect["connect"], lambda *_, **__: device): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: device, + }, + ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" @@ -1111,8 +1420,8 @@ async def test_reauth_no_update_if_config_and_ip_the_same( mock_connect: AsyncMock, ) -> None: """Test reauth discovery does not update when the host and config are the same.""" - mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( mock_config_entry, data={ @@ -1120,30 +1429,40 @@ async def test_reauth_no_update_if_config_and_ip_the_same( CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + with override_side_effect(mock_connect["connect"], AuthenticationError()): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP - - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, - }, + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) + + device = _mocked_device( + device_config=DEVICE_CONFIG_KLAP, + mac=mock_config_entry.unique_id, + ) + with override_side_effect(mock_connect["connect"], lambda *_, **__: device): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE: device, + }, + ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS @@ -1185,7 +1504,7 @@ async def test_reauth_errors( credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM @@ -1203,7 +1522,7 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() @@ -1241,17 +1560,15 @@ async def test_pick_device_errors( assert result2["step_id"] == "pick_device" assert not result2["errors"] - default_connect_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = error_type - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - {CONF_DEVICE: MAC_ADDRESS}, - ) - await hass.async_block_till_done() + with override_side_effect(mock_connect["connect"], error_type): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {CONF_DEVICE: MAC_ADDRESS}, + ) + await hass.async_block_till_done() assert result3["type"] == expected_flow if expected_flow != FlowResultType.ABORT: - mock_connect["connect"].side_effect = default_connect_side_effect result4 = await hass.config_entries.flow.async_configure( result3["flow_id"], user_input={ @@ -1263,7 +1580,7 @@ async def test_pick_device_errors( assert result4["context"]["unique_id"] == MAC_ADDRESS -async def test_discovery_timeout_connect( +async def test_discovery_timeout_try_connect_all( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, @@ -1289,7 +1606,7 @@ async def test_discovery_timeout_connect( assert mock_connect["connect"].call_count == 1 -async def test_discovery_timeout_connect_legacy_error( +async def test_discovery_timeout_try_connect_all_needs_creds( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, @@ -1300,19 +1617,68 @@ async def test_discovery_timeout_connect_legacy_error( DOMAIN, context={"source": config_entries.SOURCE_USER} ) mock_discovery["discover_single"].side_effect = TimeoutError - mock_connect["connect"].side_effect = KasaException await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert not result["errors"] assert mock_connect["connect"].call_count == 0 - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: IP_ADDRESS} + with override_side_effect(mock_connect["connect"], KasaException): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS} + ) + await hass.async_block_till_done() + assert result2["step_id"] == "user_auth_confirm" + assert result2["type"] is FlowResultType.FORM + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, ) await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["context"]["unique_id"] == MAC_ADDRESS + assert mock_connect["connect"].call_count == 1 + + +async def test_discovery_timeout_try_connect_all_fail( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + mock_init, +) -> None: + """Test discovery tries legacy connect on timeout.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + mock_discovery["discover_single"].side_effect = TimeoutError + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + assert mock_connect["connect"].call_count == 0 + + with override_side_effect(mock_connect["connect"], KasaException): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS} + ) + await hass.async_block_till_done() + assert result2["step_id"] == "user_auth_confirm" assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} + + with override_side_effect(mock_discovery["try_connect_all"], lambda *_, **__: None): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + assert result3["errors"] == {"base": "cannot_connect"} assert mock_connect["connect"].call_count == 1 @@ -1334,17 +1700,17 @@ async def test_reauth_update_other_flows( data={**CREATE_ENTRY_DATA_AES}, unique_id=MAC_ADDRESS2, ) - default_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) mock_config_entry2.add_to_hass(hass) - with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + with ( + patch("homeassistant.components.tplink.Discover.discover", return_value={}), + override_side_effect(mock_connect["connect"], AuthenticationError()), + ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry2.state is ConfigEntryState.SETUP_ERROR assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - mock_connect["connect"].side_effect = default_side_effect await hass.async_block_till_done() @@ -1353,7 +1719,9 @@ async def test_reauth_update_other_flows( flows_by_entry_id = {flow["context"]["entry_id"]: flow for flow in flows} result = flows_by_entry_id[mock_config_entry.entry_id] assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP + assert ( + mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -1363,7 +1731,7 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index 986aaebd170..766e6784c8b 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations import copy from datetime import timedelta +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from freezegun.api import FrozenDateTimeFactory @@ -13,14 +14,18 @@ import pytest from homeassistant import setup from homeassistant.components import tplink from homeassistant.components.tplink.const import ( + CONF_AES_KEYS, + CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, DOMAIN, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( + CONF_ALIAS, CONF_AUTHENTICATION, CONF_HOST, + CONF_MODEL, CONF_PASSWORD, CONF_USERNAME, STATE_ON, @@ -33,13 +38,21 @@ from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from . import ( + ALIAS, + CREATE_ENTRY_DATA_AES, CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, + CREDENTIALS_HASH_AES, + CREDENTIALS_HASH_KLAP, + DEVICE_CONFIG_AES, + DEVICE_CONFIG_DICT_KLAP, DEVICE_CONFIG_KLAP, + DEVICE_CONFIG_LEGACY, DEVICE_ID, DEVICE_ID_MAC, IP_ADDRESS, MAC_ADDRESS, + MODEL, _mocked_device, _patch_connect, _patch_discovery, @@ -207,16 +220,21 @@ async def test_config_entry_with_stored_credentials( hass.data.setdefault(DOMAIN, {})[CONF_AUTHENTICATION] = auth mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) + with patch( + "homeassistant.components.tplink.async_create_clientsession", return_value="Foo" + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - config = DEVICE_CONFIG_KLAP + config = DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()) + config.uses_http = False + config.http_client = "Foo" assert config.credentials != stored_credentials config.credentials = stored_credentials mock_connect["connect"].assert_called_once_with(config=config) -async def test_config_entry_device_config_invalid( +async def test_config_entry_conn_params_invalid( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, @@ -224,7 +242,7 @@ async def test_config_entry_device_config_invalid( ) -> None: """Test that an invalid device config logs an error and loads the config entry.""" entry_data = copy.deepcopy(CREATE_ENTRY_DATA_KLAP) - entry_data[CONF_DEVICE_CONFIG] = {"foo": "bar"} + entry_data[CONF_CONNECTION_PARAMETERS] = {"foo": "bar"} mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, @@ -237,7 +255,7 @@ async def test_config_entry_device_config_invalid( assert mock_config_entry.state is ConfigEntryState.LOADED assert ( - f"Invalid connection type dict for {IP_ADDRESS}: {entry_data.get(CONF_DEVICE_CONFIG)}" + f"Invalid connection parameters dict for {IP_ADDRESS}: {entry_data.get(CONF_CONNECTION_PARAMETERS)}" in caplog.text ) @@ -495,8 +513,9 @@ async def test_unlink_devices( } assert device_entries[0].identifiers == set(test_identifiers) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + with patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 3): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) @@ -504,7 +523,7 @@ async def test_unlink_devices( assert device_entries[0].identifiers == set(expected_identifiers) assert entry.version == 1 - assert entry.minor_version == 4 + assert entry.minor_version == 3 assert update_msg in caplog.text assert "Migration to version 1.3 complete" in caplog.text @@ -520,9 +539,8 @@ async def test_move_credentials_hash( from the device. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( - exclude_credentials=True, credentials_hash="theHash" - ) + **DEVICE_CONFIG_DICT_KLAP, + "credentials_hash": "theHash", } entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} @@ -545,6 +563,7 @@ async def test_move_credentials_hash( with ( patch("homeassistant.components.tplink.Device.connect", new=_connect), patch("homeassistant.components.tplink.PLATFORMS", []), + patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 4), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -567,9 +586,8 @@ async def test_move_credentials_hash_auth_error( in async_setup_entry. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( - exclude_credentials=True, credentials_hash="theHash" - ) + **DEVICE_CONFIG_DICT_KLAP, + "credentials_hash": "theHash", } entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} @@ -589,6 +607,7 @@ async def test_move_credentials_hash_auth_error( side_effect=AuthenticationError, ), patch("homeassistant.components.tplink.PLATFORMS", []), + patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 4), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -610,9 +629,8 @@ async def test_move_credentials_hash_other_error( at the end of the test. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( - exclude_credentials=True, credentials_hash="theHash" - ) + **DEVICE_CONFIG_DICT_KLAP, + "credentials_hash": "theHash", } entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} @@ -631,6 +649,7 @@ async def test_move_credentials_hash_other_error( "homeassistant.components.tplink.Device.connect", side_effect=KasaException ), patch("homeassistant.components.tplink.PLATFORMS", []), + patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 4), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -647,10 +666,8 @@ async def test_credentials_hash( hass: HomeAssistant, ) -> None: """Test credentials_hash used to call connect.""" - device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)} entry_data = { **CREATE_ENTRY_DATA_KLAP, - CONF_DEVICE_CONFIG: device_config, CONF_CREDENTIALS_HASH: "theHash", } @@ -674,9 +691,7 @@ async def test_credentials_hash( await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED - assert CONF_CREDENTIALS_HASH not in entry.data[CONF_DEVICE_CONFIG] assert CONF_CREDENTIALS_HASH in entry.data - assert entry.data[CONF_DEVICE_CONFIG] == device_config assert entry.data[CONF_CREDENTIALS_HASH] == "theHash" @@ -684,10 +699,8 @@ async def test_credentials_hash_auth_error( hass: HomeAssistant, ) -> None: """Test credentials_hash is deleted after an auth failure.""" - device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)} entry_data = { **CREATE_ENTRY_DATA_KLAP, - CONF_DEVICE_CONFIG: device_config, CONF_CREDENTIALS_HASH: "theHash", } @@ -700,6 +713,10 @@ async def test_credentials_hash_auth_error( with ( patch("homeassistant.components.tplink.PLATFORMS", []), + patch( + "homeassistant.components.tplink.async_create_clientsession", + return_value="Foo", + ), patch( "homeassistant.components.tplink.Device.connect", side_effect=AuthenticationError, @@ -710,8 +727,80 @@ async def test_credentials_hash_auth_error( await hass.async_block_till_done() expected_config = DeviceConfig.from_dict( - DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True, credentials_hash="theHash") + {**DEVICE_CONFIG_DICT_KLAP, "credentials_hash": "theHash"} ) + expected_config.uses_http = False + expected_config.http_client = "Foo" connect_mock.assert_called_with(config=expected_config) assert entry.state is ConfigEntryState.SETUP_ERROR assert CONF_CREDENTIALS_HASH not in entry.data + + +@pytest.mark.parametrize( + ("device_config", "expected_entry_data", "credentials_hash"), + [ + pytest.param( + DEVICE_CONFIG_KLAP, CREATE_ENTRY_DATA_KLAP, CREDENTIALS_HASH_KLAP, id="KLAP" + ), + pytest.param( + DEVICE_CONFIG_AES, CREATE_ENTRY_DATA_AES, CREDENTIALS_HASH_AES, id="AES" + ), + pytest.param(DEVICE_CONFIG_LEGACY, CREATE_ENTRY_DATA_LEGACY, None, id="Legacy"), + ], +) +async def test_migrate_remove_device_config( + hass: HomeAssistant, + mock_connect: AsyncMock, + caplog: pytest.LogCaptureFixture, + device_config: DeviceConfig, + expected_entry_data: dict[str, Any], + credentials_hash: str, +) -> None: + """Test credentials hash moved to parent. + + As async_setup_entry will succeed the hash on the parent is updated + from the device. + """ + OLD_CREATE_ENTRY_DATA = { + CONF_HOST: expected_entry_data[CONF_HOST], + CONF_ALIAS: ALIAS, + CONF_MODEL: MODEL, + CONF_DEVICE_CONFIG: { + k: v for k, v in device_config.to_dict().items() if k != "credentials" + }, + } + + entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=OLD_CREATE_ENTRY_DATA, + entry_id="123456", + unique_id=MAC_ADDRESS, + version=1, + minor_version=4, + ) + entry.add_to_hass(hass) + + async def _connect(config): + config.credentials_hash = credentials_hash + config.aes_keys = expected_entry_data.get(CONF_AES_KEYS) + return _mocked_device(device_config=config, credentials_hash=credentials_hash) + + with ( + patch("homeassistant.components.tplink.Device.connect", new=_connect), + patch("homeassistant.components.tplink.PLATFORMS", []), + patch( + "homeassistant.components.tplink.async_create_clientsession", + return_value="Foo", + ), + patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 5), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.minor_version == 5 + assert entry.state is ConfigEntryState.LOADED + assert CONF_DEVICE_CONFIG not in entry.data + assert entry.data == expected_entry_data + + assert "Migration to version 1.5 complete" in caplog.text diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 6998d8fbcc7..b7f4ed6b8f4 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -26,8 +26,8 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -153,8 +153,8 @@ async def test_color_light( assert attributes[ATTR_COLOR_MODE] == "brightness" else: assert attributes[ATTR_COLOR_MODE] == "hs" - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 4000 + assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 9000 assert attributes[ATTR_HS_COLOR] == (10, 30) assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) @@ -307,8 +307,8 @@ async def test_color_temp_light( assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] else: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp"] - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 9000 + assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 4000 assert attributes[ATTR_COLOR_TEMP_KELVIN] == 4000 await hass.services.async_call( diff --git a/tests/components/tplink/test_siren.py b/tests/components/tplink/test_siren.py new file mode 100644 index 00000000000..8c3328558b0 --- /dev/null +++ b/tests/components/tplink/test_siren.py @@ -0,0 +1,76 @@ +"""Tests for siren platform.""" + +from __future__ import annotations + +from kasa import Device, Module +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.siren import ( + DOMAIN as SIREN_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import _mocked_device, setup_platform_for_device, snapshot_platform + +from tests.common import MockConfigEntry + +ENTITY_ID = "siren.hub" + + +@pytest.fixture +async def mocked_hub(hass: HomeAssistant) -> Device: + """Return mocked tplink hub with an alarm module.""" + + return _mocked_device( + alias="hub", + modules=[Module.Alarm], + device_type=Device.Type.Hub, + ) + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + mocked_hub: Device, +) -> None: + """Snapshot test.""" + await setup_platform_for_device(hass, mock_config_entry, Platform.SIREN, mocked_hub) + + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + +async def test_turn_on_and_off( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device +) -> None: + """Test that turn_on and turn_off services work as expected.""" + await setup_platform_for_device(hass, mock_config_entry, Platform.SIREN, mocked_hub) + + alarm_module = mocked_hub.modules[Module.Alarm] + + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: [ENTITY_ID]}, + blocking=True, + ) + + alarm_module.stop.assert_called() + + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [ENTITY_ID]}, + blocking=True, + ) + + alarm_module.play.assert_called() diff --git a/tests/components/tplink_omada/conftest.py b/tests/components/tplink_omada/conftest.py index 510a2e7a87c..b9bdb5ef94a 100644 --- a/tests/components/tplink_omada/conftest.py +++ b/tests/components/tplink_omada/conftest.py @@ -163,21 +163,10 @@ def mock_omada_clients_only_client( @pytest.fixture async def init_integration( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_omada_client: MagicMock, ) -> MockConfigEntry: """Set up the TP-Link Omada integration for testing.""" - mock_config_entry = MockConfigEntry( - title="Test Omada Controller", - domain=DOMAIN, - data={ - CONF_HOST: "127.0.0.1", - CONF_PASSWORD: "mocked-password", - CONF_USERNAME: "mocked-user", - CONF_VERIFY_SSL: False, - CONF_SITE: "Default", - }, - unique_id="12345", - ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/tplink_omada/snapshots/test_sensor.ambr b/tests/components/tplink_omada/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..6c332eb9696 --- /dev/null +++ b/tests/components/tplink_omada/snapshots/test_sensor.ambr @@ -0,0 +1,333 @@ +# serializer version: 1 +# name: test_entities[sensor.test_poe_switch_cpu_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_poe_switch_cpu_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cpu_usage', + 'unique_id': '54-AF-97-00-00-01_cpu_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_poe_switch_cpu_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch CPU usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_poe_switch_cpu_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_entities[sensor.test_poe_switch_device_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_poe_switch_device_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device status', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_status', + 'unique_id': '54-AF-97-00-00-01_device_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.test_poe_switch_device_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test PoE Switch Device status', + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_poe_switch_device_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'connected', + }) +# --- +# name: test_entities[sensor.test_poe_switch_memory_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_poe_switch_memory_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mem_usage', + 'unique_id': '54-AF-97-00-00-01_mem_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_poe_switch_memory_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Memory usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_poe_switch_memory_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_entities[sensor.test_router_cpu_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_router_cpu_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cpu_usage', + 'unique_id': 'AA-BB-CC-DD-EE-FF_cpu_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_router_cpu_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Router CPU usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_router_cpu_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_entities[sensor.test_router_device_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_router_device_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device status', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_status', + 'unique_id': 'AA-BB-CC-DD-EE-FF_device_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.test_router_device_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Router Device status', + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_router_device_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'connected', + }) +# --- +# name: test_entities[sensor.test_router_memory_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_router_memory_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mem_usage', + 'unique_id': 'AA-BB-CC-DD-EE-FF_mem_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_router_memory_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Router Memory usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_router_memory_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '47', + }) +# --- diff --git a/tests/components/tplink_omada/test_config_flow.py b/tests/components/tplink_omada/test_config_flow.py index 08606fe126c..28ef0da170f 100644 --- a/tests/components/tplink_omada/test_config_flow.py +++ b/tests/components/tplink_omada/test_config_flow.py @@ -251,14 +251,7 @@ async def test_async_step_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -298,14 +291,7 @@ async def test_async_step_reauth_invalid_auth(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tplink_omada/test_init.py b/tests/components/tplink_omada/test_init.py new file mode 100644 index 00000000000..762168df9d6 --- /dev/null +++ b/tests/components/tplink_omada/test_init.py @@ -0,0 +1,47 @@ +"""Tests for TP-Link Omada integration init.""" + +from unittest.mock import MagicMock + +from homeassistant.components.tplink_omada.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from tests.common import MockConfigEntry + +MOCK_ENTRY_DATA = { + "host": "https://fake.omada.host", + "verify_ssl": True, + "site": "SiteId", + "username": "test-username", + "password": "test-password", +} + + +async def test_missing_devices_removed_at_startup( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_omada_client: MagicMock, +) -> None: + """Test missing devices are removed at startup.""" + mock_config_entry = MockConfigEntry( + title="Test Omada Controller", + domain=DOMAIN, + data=dict(MOCK_ENTRY_DATA), + unique_id="12345", + ) + mock_config_entry.add_to_hass(hass) + + device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={(DOMAIN, "AA:BB:CC:DD:EE:FF")}, + manufacturer="TPLink", + name="Old Device", + model="Some old model", + ) + + assert device_registry.async_get(device_entry.id) == device_entry + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert device_registry.async_get(device_entry.id) is None diff --git a/tests/components/tplink_omada/test_sensor.py b/tests/components/tplink_omada/test_sensor.py new file mode 100644 index 00000000000..54df7c5bcad --- /dev/null +++ b/tests/components/tplink_omada/test_sensor.py @@ -0,0 +1,117 @@ +"""Tests for TP-Link Omada sensor entities.""" + +from datetime import timedelta +import json +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion +from tplink_omada_client.definitions import DeviceStatus, DeviceStatusCategory +from tplink_omada_client.devices import OmadaGatewayPortStatus, OmadaListDevice + +from homeassistant.components.tplink_omada.const import DOMAIN +from homeassistant.components.tplink_omada.coordinator import POLL_DEVICES +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + +POLL_INTERVAL = timedelta(seconds=POLL_DEVICES) + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_omada_client: MagicMock, +) -> MockConfigEntry: + """Set up the TP-Link Omada integration for testing.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.tplink_omada.PLATFORMS", ["sensor"]): + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry + + +async def test_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the TP-Link Omada sensor entities.""" + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) + + +async def test_device_specific_status( + hass: HomeAssistant, + init_integration: MockConfigEntry, + mock_omada_site_client: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test a connection status is reported from known detailed status.""" + entity_id = "sensor.test_poe_switch_device_status" + entity = hass.states.get(entity_id) + assert entity is not None + assert entity.state == "connected" + + _set_test_device_status( + mock_omada_site_client, + DeviceStatus.ADOPT_FAILED.value, + DeviceStatusCategory.CONNECTED.value, + ) + + freezer.tick(POLL_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + entity = hass.states.get(entity_id) + assert entity.state == "adopt_failed" + + +async def test_device_category_status( + hass: HomeAssistant, + init_integration: MockConfigEntry, + mock_omada_site_client: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test a connection status is reported, with fallback to status category.""" + entity_id = "sensor.test_poe_switch_device_status" + entity = hass.states.get(entity_id) + assert entity is not None + assert entity.state == "connected" + + _set_test_device_status( + mock_omada_site_client, + DeviceStatus.PENDING_WIRELESS, + DeviceStatusCategory.PENDING.value, + ) + + freezer.tick(POLL_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + entity = hass.states.get(entity_id) + assert entity.state == "pending" + + +def _set_test_device_status( + mock_omada_site_client: MagicMock, + status: int, + status_category: int, +) -> OmadaGatewayPortStatus: + devices_data = json.loads(load_fixture("devices.json", DOMAIN)) + devices_data[1]["status"] = status + devices_data[1]["statusCategory"] = status_category + devices = [OmadaListDevice(d) for d in devices_data] + + mock_omada_site_client.get_devices.reset_mock() + mock_omada_site_client.get_devices.return_value = devices diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index 49127aec347..fb90262a084 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -11,9 +11,9 @@ from homeassistant.components import traccar, zone from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.traccar import DOMAIN, TRACKER_UPDATE -from homeassistant.config import async_process_ha_core_config from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import DATA_DISPATCHER @@ -121,18 +121,14 @@ async def test_enter_and_exit( req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state assert state_name == STATE_HOME # Enter Home again req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state assert state_name == STATE_HOME data["lon"] = 0 @@ -142,9 +138,7 @@ async def test_enter_and_exit( req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state assert state_name == STATE_NOT_HOME assert len(device_registry.devices) == 1 @@ -171,7 +165,7 @@ async def test_enter_with_attrs(hass: HomeAssistant, client, webhook_id) -> None req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}") assert state.state == STATE_NOT_HOME assert state.attributes["gps_accuracy"] == 10.5 assert state.attributes["battery_level"] == 10.0 @@ -194,7 +188,7 @@ async def test_enter_with_attrs(hass: HomeAssistant, client, webhook_id) -> None req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}") assert state.state == STATE_HOME assert state.attributes["gps_accuracy"] == 123 assert state.attributes["battery_level"] == 23 @@ -214,7 +208,7 @@ async def test_two_devices(hass: HomeAssistant, client, webhook_id) -> None: await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["id"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['id']}") assert state.state == "not_home" # Enter Home @@ -226,9 +220,9 @@ async def test_two_devices(hass: HomeAssistant, client, webhook_id) -> None: await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_2["id"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_2['id']}") assert state.state == "home" - state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["id"])) + state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['id']}") assert state.state == "not_home" @@ -244,9 +238,7 @@ async def test_load_unload_entry(hass: HomeAssistant, client, webhook_id) -> Non req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get( - "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) - ).state + state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state assert state_name == STATE_HOME assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1 diff --git a/tests/components/traccar_server/test_config_flow.py b/tests/components/traccar_server/test_config_flow.py index 62f39f00dc1..0418e4a5a72 100644 --- a/tests/components/traccar_server/test_config_flow.py +++ b/tests/components/traccar_server/test_config_flow.py @@ -1,21 +1,18 @@ """Test the Traccar Server config flow.""" from collections.abc import Generator -from typing import Any from unittest.mock import AsyncMock import pytest from pytraccar import TraccarException from homeassistant import config_entries -from homeassistant.components.traccar.device_tracker import PLATFORM_SCHEMA from homeassistant.components.traccar_server.const import ( CONF_CUSTOM_ATTRIBUTES, CONF_EVENTS, CONF_MAX_ACCURACY, CONF_SKIP_ACCURACY_FILTER_FOR, DOMAIN, - EVENTS, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( @@ -153,127 +150,6 @@ async def test_options( } -@pytest.mark.parametrize( - ("imported", "data", "options"), - [ - ( - { - CONF_HOST: "1.1.1.1", - CONF_PORT: 443, - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "1.1.1.1", - CONF_PORT: "443", - CONF_VERIFY_SSL: True, - CONF_SSL: False, - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_EVENTS: [], - CONF_CUSTOM_ATTRIBUTES: [], - CONF_SKIP_ACCURACY_FILTER_FOR: [], - CONF_MAX_ACCURACY: 0, - }, - ), - ( - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_SSL: True, - "event": ["device_online", "device_offline"], - }, - { - CONF_HOST: "1.1.1.1", - CONF_PORT: "8082", - CONF_VERIFY_SSL: True, - CONF_SSL: True, - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_EVENTS: ["device_online", "device_offline"], - CONF_CUSTOM_ATTRIBUTES: [], - CONF_SKIP_ACCURACY_FILTER_FOR: [], - CONF_MAX_ACCURACY: 0, - }, - ), - ( - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_SSL: True, - "event": ["device_online", "device_offline", "all_events"], - }, - { - CONF_HOST: "1.1.1.1", - CONF_PORT: "8082", - CONF_VERIFY_SSL: True, - CONF_SSL: True, - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_EVENTS: list(EVENTS.values()), - CONF_CUSTOM_ATTRIBUTES: [], - CONF_SKIP_ACCURACY_FILTER_FOR: [], - CONF_MAX_ACCURACY: 0, - }, - ), - ], -) -async def test_import_from_yaml( - hass: HomeAssistant, - imported: dict[str, Any], - data: dict[str, Any], - options: dict[str, Any], - mock_traccar_api_client: Generator[AsyncMock], -) -> None: - """Test importing configuration from YAML.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=PLATFORM_SCHEMA({"platform": "traccar", **imported}), - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"{data[CONF_HOST]}:{data[CONF_PORT]}" - assert result["data"] == data - assert result["options"] == options - assert result["result"].state is ConfigEntryState.LOADED - - -async def test_abort_import_already_configured(hass: HomeAssistant) -> None: - """Test abort for existing server while importing.""" - - config_entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_HOST: "1.1.1.1", CONF_PORT: "8082"}, - ) - - config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=PLATFORM_SCHEMA( - { - "platform": "traccar", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_HOST: "1.1.1.1", - CONF_PORT: "8082", - } - ), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - async def test_abort_already_configured( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 7b292ed39e3..43664c6e7ce 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -47,7 +47,7 @@ async def _setup_automation_or_script( ) -> None: """Set up automations or scripts from automation config.""" if domain == "script": - configs = {config["id"]: {"sequence": config["action"]} for config in configs} + configs = {config["id"]: {"sequence": config["actions"]} for config in configs} if script_config: if domain == "automation": @@ -85,7 +85,7 @@ async def _run_automation_or_script( def _assert_raw_config(domain, config, trace): if domain == "script": - config = {"sequence": config["action"]} + config = {"sequence": config["actions"]} assert trace["config"] == config @@ -152,20 +152,20 @@ async def test_get_trace( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"service": "test.automation"}, } moon_config = { "id": "moon", - "trigger": [ + "triggers": [ {"platform": "event", "event_type": "test_event2"}, {"platform": "event", "event_type": "test_event3"}, ], - "condition": { + "conditions": { "condition": "template", "value_template": "{{ trigger.event.event_type=='test_event2' }}", }, - "action": {"event": "another_event"}, + "actions": {"event": "another_event"}, } sun_action = { @@ -551,13 +551,13 @@ async def test_trace_overflow( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"event": "some_event"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"event": "some_event"}, } moon_config = { "id": "moon", - "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"event": "another_event"}, + "triggers": {"platform": "event", "event_type": "test_event2"}, + "actions": {"event": "another_event"}, } await _setup_automation_or_script( hass, domain, [sun_config, moon_config], stored_traces=stored_traces @@ -632,13 +632,13 @@ async def test_restore_traces_overflow( hass_storage["trace.saved_traces"] = saved_traces sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"event": "some_event"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"event": "some_event"}, } moon_config = { "id": "moon", - "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"event": "another_event"}, + "triggers": {"platform": "event", "event_type": "test_event2"}, + "actions": {"event": "another_event"}, } await _setup_automation_or_script(hass, domain, [sun_config, moon_config]) await hass.async_start() @@ -713,13 +713,13 @@ async def test_restore_traces_late_overflow( hass_storage["trace.saved_traces"] = saved_traces sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"event": "some_event"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"event": "some_event"}, } moon_config = { "id": "moon", - "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"event": "another_event"}, + "triggers": {"platform": "event", "event_type": "test_event2"}, + "actions": {"event": "another_event"}, } await _setup_automation_or_script(hass, domain, [sun_config, moon_config]) await hass.async_start() @@ -765,8 +765,8 @@ async def test_trace_no_traces( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"event": "some_event"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"event": "some_event"}, } await _setup_automation_or_script(hass, domain, [sun_config], stored_traces=0) @@ -832,20 +832,20 @@ async def test_list_traces( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"service": "test.automation"}, } moon_config = { "id": "moon", - "trigger": [ + "triggers": [ {"platform": "event", "event_type": "test_event2"}, {"platform": "event", "event_type": "test_event3"}, ], - "condition": { + "conditions": { "condition": "template", "value_template": "{{ trigger.event.event_type=='test_event2' }}", }, - "action": {"event": "another_event"}, + "actions": {"event": "another_event"}, } await _setup_automation_or_script(hass, domain, [sun_config, moon_config]) @@ -965,8 +965,8 @@ async def test_nested_traces( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "script.moon"}, + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": {"service": "script.moon"}, } moon_config = {"moon": {"sequence": {"event": "another_event"}}} await _setup_automation_or_script(hass, domain, [sun_config], moon_config) @@ -1036,8 +1036,8 @@ async def test_breakpoints( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [ + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [ {"event": "event0"}, {"event": "event1"}, {"event": "event2"}, @@ -1206,8 +1206,8 @@ async def test_breakpoints_2( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [ + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [ {"event": "event0"}, {"event": "event1"}, {"event": "event2"}, @@ -1311,8 +1311,8 @@ async def test_breakpoints_3( sun_config = { "id": "sun", - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [ + "triggers": {"platform": "event", "event_type": "test_event"}, + "actions": [ {"event": "event0"}, {"event": "event1"}, {"event": "event2"}, diff --git a/tests/components/tractive/snapshots/test_diagnostics.ambr b/tests/components/tractive/snapshots/test_diagnostics.ambr index a66247749b7..3613f7e5997 100644 --- a/tests/components/tractive/snapshots/test_diagnostics.ambr +++ b/tests/components/tractive/snapshots/test_diagnostics.ambr @@ -7,6 +7,8 @@ 'password': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'tractive', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, @@ -15,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/tractive/snapshots/test_sensor.ambr b/tests/components/tractive/snapshots/test_sensor.ambr index f1ed397450e..f10cfb29226 100644 --- a/tests/components/tractive/snapshots/test_sensor.ambr +++ b/tests/components/tractive/snapshots/test_sensor.ambr @@ -139,7 +139,7 @@ 'supported_features': 0, 'translation_key': 'calories', 'unique_id': 'pet_id_123_calories', - 'unit_of_measurement': 'kcal', + 'unit_of_measurement': , }) # --- # name: test_sensor[sensor.test_pet_calories_burned-state] @@ -147,7 +147,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Test Pet Calories burned', 'state_class': , - 'unit_of_measurement': 'kcal', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_pet_calories_burned', diff --git a/tests/components/tractive/test_config_flow.py b/tests/components/tractive/test_config_flow.py index 5cedb51e5af..691bf671afd 100644 --- a/tests/components/tractive/test_config_flow.py +++ b/tests/components/tractive/test_config_flow.py @@ -110,15 +110,7 @@ async def test_reauthentication(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -151,15 +143,7 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -189,15 +173,7 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -227,15 +203,7 @@ async def test_reauthentication_failure_no_existing_entry(hass: HomeAssistant) - ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/tradfri/test_config_flow.py b/tests/components/tradfri/test_config_flow.py index af2fdc22d2a..b6f38b1d83d 100644 --- a/tests/components/tradfri/test_config_flow.py +++ b/tests/components/tradfri/test_config_flow.py @@ -103,7 +103,7 @@ async def test_user_connection_bad_key( assert len(mock_entry_setup.mock_calls) == 0 assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"security_code": "invalid_security_code"} + assert result["errors"] == {"base": "invalid_security_code"} async def test_discovery_connection( diff --git a/tests/components/tradfri/test_cover.py b/tests/components/tradfri/test_cover.py index 5aa4e75728d..59f3f8a956a 100644 --- a/tests/components/tradfri/test_cover.py +++ b/tests/components/tradfri/test_cover.py @@ -8,8 +8,12 @@ import pytest from pytradfri.const import ATTR_REACHABLE_STATE from pytradfri.device import Device -from homeassistant.components.cover import ATTR_CURRENT_POSITION, DOMAIN as COVER_DOMAIN -from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from .common import CommandStore, setup_integration @@ -27,7 +31,7 @@ async def test_cover_available( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 assert state.attributes["model"] == "FYRTUR block-out roller blind" @@ -44,11 +48,11 @@ async def test_cover_available( @pytest.mark.parametrize( ("service", "service_data", "expected_state", "expected_position"), [ - ("set_cover_position", {"position": 100}, STATE_OPEN, 100), - ("set_cover_position", {"position": 0}, STATE_CLOSED, 0), - ("open_cover", {}, STATE_OPEN, 100), - ("close_cover", {}, STATE_CLOSED, 0), - ("stop_cover", {}, STATE_OPEN, 60), + ("set_cover_position", {"position": 100}, CoverState.OPEN, 100), + ("set_cover_position", {"position": 0}, CoverState.CLOSED, 0), + ("open_cover", {}, CoverState.OPEN, 100), + ("close_cover", {}, CoverState.CLOSED, 0), + ("stop_cover", {}, CoverState.OPEN, 60), ], ) async def test_cover_services( @@ -66,7 +70,7 @@ async def test_cover_services( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 await hass.services.async_call( diff --git a/tests/components/tradfri/test_light.py b/tests/components/tradfri/test_light.py index 887b043689f..c7091e77343 100644 --- a/tests/components/tradfri/test_light.py +++ b/tests/components/tradfri/test_light.py @@ -9,10 +9,10 @@ from pytradfri.device import Device from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -67,9 +67,9 @@ def bulb_cws() -> str: "light.test_ws", { ATTR_BRIGHTNESS: 250, - ATTR_COLOR_TEMP: 400, - ATTR_MIN_MIREDS: 250, - ATTR_MAX_MIREDS: 454, + ATTR_COLOR_TEMP_KELVIN: 2500, + ATTR_MAX_COLOR_TEMP_KELVIN: 4000, + ATTR_MIN_COLOR_TEMP_KELVIN: 2202, ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, }, diff --git a/tests/components/trafikverket_camera/conftest.py b/tests/components/trafikverket_camera/conftest.py index cef85af2228..5e0e9bfa593 100644 --- a/tests/components/trafikverket_camera/conftest.py +++ b/tests/components/trafikverket_camera/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER diff --git a/tests/components/trafikverket_camera/test_binary_sensor.py b/tests/components/trafikverket_camera/test_binary_sensor.py index 6750c05772b..46cf93726c7 100644 --- a/tests/components/trafikverket_camera/test_binary_sensor.py +++ b/tests/components/trafikverket_camera/test_binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_ON diff --git a/tests/components/trafikverket_camera/test_camera.py b/tests/components/trafikverket_camera/test_camera.py index 51d4563c19b..f61dd497c9c 100644 --- a/tests/components/trafikverket_camera/test_camera.py +++ b/tests/components/trafikverket_camera/test_camera.py @@ -7,7 +7,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.components.camera import async_get_image from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 2e9e34f4c35..cc37e2b5441 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -5,8 +5,12 @@ from __future__ import annotations from unittest.mock import patch import pytest -from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import ( + CameraInfoModel, + InvalidAuthentication, + NoCameraFound, + UnknownError, +) from homeassistant import config_entries from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -208,15 +212,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -280,15 +276,7 @@ async def test_reauth_flow_error( entry.add_to_hass(hass) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", @@ -325,3 +313,150 @@ async def test_reauth_flow_error( "api_key": "1234567891", "id": "1234", } + + +async def test_reconfigure_flow( + hass: HomeAssistant, + get_cameras: list[CameraInfoModel], + get_camera2: CameraInfoModel, +) -> None: + """Test a reconfigure flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_API_KEY: "1234567890", + CONF_ID: "1234", + }, + unique_id="1234", + version=3, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", + return_value=get_cameras, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "1234567890", + CONF_LOCATION: "Test loc", + }, + ) + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", + return_value=[get_camera2], + ), + patch( + "homeassistant.components.trafikverket_camera.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ID: "5678", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "api_key": "1234567890", + "id": "5678", + } + + +@pytest.mark.parametrize( + ("side_effect", "error_key", "p_error"), + [ + ( + InvalidAuthentication, + "base", + "invalid_auth", + ), + ( + NoCameraFound, + "location", + "invalid_location", + ), + ( + UnknownError, + "base", + "cannot_connect", + ), + ], +) +async def test_reconfigure_flow_error( + hass: HomeAssistant, + get_camera: CameraInfoModel, + side_effect: Exception, + error_key: str, + p_error: str, +) -> None: + """Test a reauthentication flow with error.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_API_KEY: "1234567890", + CONF_ID: "1234", + }, + unique_id="1234", + version=3, + ) + entry.add_to_hass(hass) + await hass.async_block_till_done() + + result = await entry.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", + side_effect=side_effect, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "1234567890", + CONF_LOCATION: "Test loc", + }, + ) + await hass.async_block_till_done() + + assert result2["step_id"] == "reconfigure" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {error_key: p_error} + + with ( + patch( + "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", + return_value=[get_camera], + ), + patch( + "homeassistant.components.trafikverket_camera.async_setup_entry", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "1234567891", + CONF_LOCATION: "Test loc", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == { + CONF_ID: "1234", + CONF_API_KEY: "1234567891", + } diff --git a/tests/components/trafikverket_camera/test_coordinator.py b/tests/components/trafikverket_camera/test_coordinator.py index f50ab56724e..7deeeccf8ad 100644 --- a/tests/components/trafikverket_camera/test_coordinator.py +++ b/tests/components/trafikverket_camera/test_coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from unittest.mock import patch import pytest -from pytrafikverket.exceptions import ( +from pytrafikverket import ( + CameraInfoModel, InvalidAuthentication, MultipleCamerasFound, NoCameraFound, UnknownError, ) -from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState diff --git a/tests/components/trafikverket_camera/test_init.py b/tests/components/trafikverket_camera/test_init.py index aaa4c3cfed7..5b77f17ac3e 100644 --- a/tests/components/trafikverket_camera/test_init.py +++ b/tests/components/trafikverket_camera/test_init.py @@ -6,8 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.exceptions import UnknownError -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel, UnknownError from homeassistant.components.trafikverket_camera import async_migrate_entry from homeassistant.components.trafikverket_camera.const import DOMAIN diff --git a/tests/components/trafikverket_camera/test_recorder.py b/tests/components/trafikverket_camera/test_recorder.py index d9778ab851a..c14f05ca7ab 100644 --- a/tests/components/trafikverket_camera/test_recorder.py +++ b/tests/components/trafikverket_camera/test_recorder.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states diff --git a/tests/components/trafikverket_camera/test_sensor.py b/tests/components/trafikverket_camera/test_sensor.py index 0f4ef02a850..f8e0342b0f6 100644 --- a/tests/components/trafikverket_camera/test_sensor.py +++ b/tests/components/trafikverket_camera/test_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/trafikverket_ferry/test_config_flow.py b/tests/components/trafikverket_ferry/test_config_flow.py index 1c170a917cc..5671d9d3fb7 100644 --- a/tests/components/trafikverket_ferry/test_config_flow.py +++ b/tests/components/trafikverket_ferry/test_config_flow.py @@ -62,9 +62,7 @@ async def test_form(hass: HomeAssistant) -> None: "weekday": ["mon", "fri"], } assert len(mock_setup_entry.mock_calls) == 1 - assert result2["result"].unique_id == "{}-{}-{}-{}".format( - "eker\u00f6", "slagsta", "10:00", "['mon', 'fri']" - ) + assert result2["result"].unique_id == "eker\u00f6-slagsta-10:00-['mon', 'fri']" @pytest.mark.parametrize( @@ -128,15 +126,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -203,15 +193,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.trafikverket_ferry.config_flow.TrafikverketFerry.async_get_next_ferry_stop", diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 14671d27252..234269cc9f8 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -38,7 +38,7 @@ async def load_integration_from_entry( return_value=get_train_stop, ), patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), ): await hass.config_entries.async_setup(config_entry_id) @@ -50,7 +50,8 @@ async def load_integration_from_entry( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="stockholmc-uppsalac--['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) await setup_config_entry_with_mocked_data(config_entry.entry_id) @@ -60,7 +61,8 @@ async def load_integration_from_entry( source=SOURCE_USER, data=ENTRY_CONFIG2, entry_id="2", - unique_id="stockholmc-uppsalac-1100-['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']", + version=1, + minor_version=2, ) config_entry2.add_to_hass(hass) await setup_config_entry_with_mocked_data(config_entry2.entry_id) diff --git a/tests/components/trafikverket_train/snapshots/test_init.ambr b/tests/components/trafikverket_train/snapshots/test_init.ambr index c32995fdb76..2b3693eddc1 100644 --- a/tests/components/trafikverket_train/snapshots/test_init.ambr +++ b/tests/components/trafikverket_train/snapshots/test_init.ambr @@ -7,7 +7,7 @@ 'title_placeholders': dict({ 'name': 'Mock Title', }), - 'unique_id': '321', + 'unique_id': None, }), 'flow_id': , 'handler': 'trafikverket_train', diff --git a/tests/components/trafikverket_train/snapshots/test_sensor.ambr b/tests/components/trafikverket_train/snapshots/test_sensor.ambr index cae0457bbff..6caf1f86b51 100644 --- a/tests/components/trafikverket_train/snapshots/test_sensor.ambr +++ b/tests/components/trafikverket_train/snapshots/test_sensor.ambr @@ -222,7 +222,7 @@ 'title_placeholders': dict({ 'name': 'Mock Title', }), - 'unique_id': "stockholmc-uppsalac--['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']", + 'unique_id': None, }), 'flow_id': , 'handler': 'trafikverket_train', diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 83cc5a89016..eac5e629bf0 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -16,6 +16,7 @@ from pytrafikverket.models import TrainStopModel from homeassistant import config_entries from homeassistant.components.trafikverket_train.const import ( + CONF_FILTER_PRODUCT, CONF_FROM, CONF_TIME, CONF_TO, @@ -39,7 +40,7 @@ async def test_form(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -73,9 +74,6 @@ async def test_form(hass: HomeAssistant) -> None: } assert result["options"] == {"filter_product": None} assert len(mock_setup_entry.mock_calls) == 1 - assert result["result"].unique_id == "{}-{}-{}-{}".format( - "stockholmc", "uppsalac", "10:00", "['mon', 'fri']" - ) async def test_form_entry_already_exist(hass: HomeAssistant) -> None: @@ -90,8 +88,10 @@ async def test_form_entry_already_exist(hass: HomeAssistant) -> None: CONF_TO: "Uppsala C", CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, + CONF_FILTER_PRODUCT: None, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -103,7 +103,7 @@ async def test_form_entry_already_exist(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -163,7 +163,7 @@ async def test_flow_fails( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", side_effect=side_effect(), ), patch( @@ -208,7 +208,7 @@ async def test_flow_fails_departures( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_next_train_stops", @@ -242,26 +242,19 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -324,23 +317,16 @@ async def test_reauth_flow_error( CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", side_effect=side_effect(), ), patch( @@ -359,7 +345,7 @@ async def test_reauth_flow_error( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -414,23 +400,16 @@ async def test_reauth_flow_error_departures( CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -449,7 +428,7 @@ async def test_reauth_flow_error_departures( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -493,13 +472,14 @@ async def test_options_flow( CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index c8fea174e83..41c8e2432ef 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -28,13 +28,14 @@ async def test_unload_entry( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -64,12 +65,13 @@ async def test_auth_failed( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", side_effect=InvalidAuthentication, ): await hass.config_entries.async_setup(entry.entry_id) @@ -94,12 +96,13 @@ async def test_no_stations( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", side_effect=NoTrainStationFound, ): await hass.config_entries.async_setup(entry.entry_id) @@ -121,7 +124,8 @@ async def test_migrate_entity_unique_id( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -135,7 +139,7 @@ async def test_migrate_entity_unique_id( with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -149,3 +153,69 @@ async def test_migrate_entity_unique_id( entity = entity_registry.async_get(entity.entity_id) assert entity.unique_id == f"{entry.entry_id}-departure_time" + + +async def test_migrate_entry( + hass: HomeAssistant, + get_trains: list[TrainStopModel], +) -> None: + """Test migrate entry unique id.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + options=OPTIONS_CONFIG, + version=1, + minor_version=1, + entry_id="1", + unique_id="321", + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", + ), + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", + return_value=get_trains, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id is None + + +async def test_migrate_entry_from_future_version_fails( + hass: HomeAssistant, + get_trains: list[TrainStopModel], +) -> None: + """Test migrate entry from future version fails.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + options=OPTIONS_CONFIG, + version=2, + entry_id="1", + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", + ), + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", + return_value=get_trains, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/trafikverket_weatherstation/test_config_flow.py b/tests/components/trafikverket_weatherstation/test_config_flow.py index 771336301ff..f8a0f636718 100644 --- a/tests/components/trafikverket_weatherstation/test_config_flow.py +++ b/tests/components/trafikverket_weatherstation/test_config_flow.py @@ -116,14 +116,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -182,14 +175,7 @@ async def test_reauth_flow_fails( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -206,3 +192,111 @@ async def test_reauth_flow_fails( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": base_error} + + +async def test_reconfigure_flow(hass: HomeAssistant) -> None: + """Test a reconfigure flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_API_KEY: "1234567890", + CONF_STATION: "Vallby", + }, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", + ), + patch( + "homeassistant.components.trafikverket_weatherstation.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891", CONF_STATION: "Vallby_new"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == {"api_key": "1234567891", "station": "Vallby_new"} + + +@pytest.mark.parametrize( + ("side_effect", "base_error"), + [ + ( + InvalidAuthentication, + "invalid_auth", + ), + ( + NoWeatherStationFound, + "invalid_station", + ), + ( + MultipleWeatherStationsFound, + "more_stations", + ), + ( + Exception, + "cannot_connect", + ), + ], +) +async def test_reconfigure_flow_fails( + hass: HomeAssistant, side_effect: Exception, base_error: str +) -> None: + """Test a reauthentication flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_API_KEY: "1234567890", + CONF_STATION: "Vallby", + }, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", + side_effect=side_effect(), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891", CONF_STATION: "Vallby_new"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": base_error} + + with ( + patch( + "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", + ), + patch( + "homeassistant.components.trafikverket_weatherstation.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891", CONF_STATION: "Vallby_new"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == {"api_key": "1234567891", "station": "Vallby_new"} diff --git a/tests/components/transmission/test_config_flow.py b/tests/components/transmission/test_config_flow.py index e6c523bf1f6..b724a91f7a1 100644 --- a/tests/components/transmission/test_config_flow.py +++ b/tests/components/transmission/test_config_flow.py @@ -160,18 +160,14 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=transmission.DOMAIN, data=MOCK_CONFIG_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "user"} + assert result["description_placeholders"] == { + "username": "user", + "name": "Mock Title", + } with patch( "homeassistant.components.transmission.async_setup_entry", @@ -197,18 +193,14 @@ async def test_reauth_failed(hass: HomeAssistant, mock_api: MagicMock) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "user"} + assert result["description_placeholders"] == { + "username": "user", + "name": "Mock Title", + } mock_api.side_effect = TransmissionAuthError() result2 = await hass.config_entries.flow.async_configure( @@ -232,18 +224,14 @@ async def test_reauth_failed_connection_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "user"} + assert result["description_placeholders"] == { + "username": "user", + "name": "Mock Title", + } mock_api.side_effect = TransmissionConnectError() result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/trend/test_binary_sensor.py b/tests/components/trend/test_binary_sensor.py index ad85f65a9fc..4a829bb86d2 100644 --- a/tests/components/trend/test_binary_sensor.py +++ b/tests/components/trend/test_binary_sensor.py @@ -9,7 +9,7 @@ import pytest from homeassistant import setup from homeassistant.components.trend.const import DOMAIN -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -395,3 +395,45 @@ async def test_device_id( trend_entity = entity_registry.async_get("binary_sensor.trend") assert trend_entity is not None assert trend_entity.device_id == source_entity.device_id + + +@pytest.mark.parametrize( + "error_state", + [ + STATE_UNKNOWN, + STATE_UNAVAILABLE, + ], +) +async def test_unavailable_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + setup_component: ComponentSetup, + error_state: str, +) -> None: + """Test for unavailable source.""" + await setup_component( + { + "sample_duration": 10000, + "min_gradient": 1, + "max_samples": 25, + "min_samples": 5, + }, + ) + + for val in (10, 20, 30, 40, 50, 60): + freezer.tick(timedelta(seconds=2)) + hass.states.async_set("sensor.test_state", val) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" + + hass.states.async_set("sensor.test_state", error_state) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == STATE_UNAVAILABLE + + hass.states.async_set("sensor.test_state", 50) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" diff --git a/tests/components/triggercmd/__init__.py b/tests/components/triggercmd/__init__.py new file mode 100644 index 00000000000..90562a67386 --- /dev/null +++ b/tests/components/triggercmd/__init__.py @@ -0,0 +1 @@ +"""Tests for the triggercmd integration.""" diff --git a/tests/components/triggercmd/conftest.py b/tests/components/triggercmd/conftest.py new file mode 100644 index 00000000000..5e2ac250d61 --- /dev/null +++ b/tests/components/triggercmd/conftest.py @@ -0,0 +1,15 @@ +"""triggercmd conftest.""" + +from unittest.mock import patch + +import pytest + + +@pytest.fixture +def mock_async_setup_entry(): + """Mock async_setup_entry.""" + with patch( + "homeassistant.components.triggercmd.async_setup_entry", + return_value=True, + ) as mock_async_setup_entry: + yield mock_async_setup_entry diff --git a/tests/components/triggercmd/test_config_flow.py b/tests/components/triggercmd/test_config_flow.py new file mode 100644 index 00000000000..f12fcfef768 --- /dev/null +++ b/tests/components/triggercmd/test_config_flow.py @@ -0,0 +1,161 @@ +"""Define tests for the triggercmd config flow.""" + +from unittest.mock import patch + +import pytest +from triggercmd import TRIGGERcmdConnectionError + +from homeassistant.components.triggercmd.const import CONF_TOKEN, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +invalid_token_with_length_100_or_more = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEyMzQ1Njc4OTBxd2VydHl1aW9wYXNkZiIsImlhdCI6MTcxOTg4MTU4M30.E4T2S4RQfuI2ww74sUkkT-wyTGrV5_VDkgUdae5yo4E" +invalid_token_id = "1234567890qwertyuiopasdf" +invalid_token_with_length_100_or_more_and_no_id = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJub2lkIjoiMTIzNDU2Nzg5MHF3ZXJ0eXVpb3Bhc2RmIiwiaWF0IjoxNzE5ODgxNTgzfQ.MaJLNWPGCE51Zibhbq-Yz7h3GkUxLurR2eoM2frnO6Y" + + +async def test_full_flow( + hass: HomeAssistant, +) -> None: + """Test config flow happy path.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["errors"] == {} + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch( + "homeassistant.components.triggercmd.client.async_connection_test", + return_value=200, + ), + patch( + "homeassistant.components.triggercmd.ha.Hub", + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: invalid_token_with_length_100_or_more}, + ) + + assert result["data"] == {CONF_TOKEN: invalid_token_with_length_100_or_more} + assert result["result"].unique_id == invalid_token_id + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("test_input", "expected"), + [ + (invalid_token_with_length_100_or_more_and_no_id, {"base": "unknown"}), + ("not-a-token", {CONF_TOKEN: "invalid_token"}), + ], +) +async def test_config_flow_user_invalid_token( + hass: HomeAssistant, + test_input: str, + expected: dict, +) -> None: + """Test the initial step of the config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + with ( + patch( + "homeassistant.components.triggercmd.client.async_connection_test", + return_value=200, + ), + patch( + "homeassistant.components.triggercmd.ha.Hub", + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: test_input}, + ) + + assert result["errors"] == expected + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: invalid_token_with_length_100_or_more}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_config_flow_entry_already_configured(hass: HomeAssistant) -> None: + """Test user input for config_entry that already exists.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + MockConfigEntry( + domain=DOMAIN, + data={CONF_TOKEN: invalid_token_with_length_100_or_more}, + unique_id=invalid_token_id, + ).add_to_hass(hass) + + with ( + patch( + "homeassistant.components.triggercmd.client.async_connection_test", + return_value=200, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: invalid_token_with_length_100_or_more}, + ) + + assert result["reason"] == "already_configured" + assert result["type"] is FlowResultType.ABORT + + +async def test_config_flow_connection_error(hass: HomeAssistant) -> None: + """Test a connection error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + with ( + patch( + "homeassistant.components.triggercmd.client.async_connection_test", + side_effect=TRIGGERcmdConnectionError, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: invalid_token_with_length_100_or_more}, + ) + + assert result["errors"] == { + "base": "cannot_connect", + } + assert result["type"] is FlowResultType.FORM + + with ( + patch( + "homeassistant.components.triggercmd.client.async_connection_test", + return_value=200, + ), + patch( + "homeassistant.components.triggercmd.ha.Hub", + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_TOKEN: invalid_token_with_length_100_or_more}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 4acba401fad..b1eae12d694 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -130,6 +130,8 @@ class BaseProvider: def __init__(self, lang: str) -> None: """Initialize test provider.""" self._lang = lang + self._supported_languages = SUPPORT_LANGUAGES + self._supported_options = ["voice", "age"] @property def default_language(self) -> str: @@ -139,7 +141,7 @@ class BaseProvider: @property def supported_languages(self) -> list[str]: """Return list of supported languages.""" - return SUPPORT_LANGUAGES + return self._supported_languages @callback def async_get_supported_voices(self, language: str) -> list[Voice] | None: @@ -154,7 +156,7 @@ class BaseProvider: @property def supported_options(self) -> list[str]: """Return list of supported options like voice, emotions.""" - return ["voice", "age"] + return self._supported_options def get_tts_audio( self, message: str, language: str, options: dict[str, Any] @@ -163,7 +165,7 @@ class BaseProvider: return ("mp3", b"") -class MockProvider(BaseProvider, Provider): +class MockTTSProvider(BaseProvider, Provider): """Test speech API provider.""" def __init__(self, lang: str) -> None: @@ -185,7 +187,7 @@ class MockTTS(MockPlatform): {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) - def __init__(self, provider: MockProvider, **kwargs: Any) -> None: + def __init__(self, provider: MockTTSProvider, **kwargs: Any) -> None: """Initialize.""" super().__init__(**kwargs) self._provider = provider @@ -202,7 +204,7 @@ class MockTTS(MockPlatform): async def mock_setup( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Set up a test provider.""" mock_integration(hass, MockModule(domain=TEST_DOMAIN)) diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index 91ddd7742af..ddef3ee0c28 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -10,16 +10,16 @@ from unittest.mock import MagicMock import pytest -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from .common import ( DEFAULT_LANG, TEST_DOMAIN, - MockProvider, MockTTS, MockTTSEntity, + MockTTSProvider, mock_config_entry_setup, mock_setup, ) @@ -67,9 +67,9 @@ async def mock_tts(hass: HomeAssistant, mock_provider) -> None: @pytest.fixture -def mock_provider() -> MockProvider: +def mock_provider() -> MockTTSProvider: """Test TTS provider.""" - return MockProvider(DEFAULT_LANG) + return MockTTSProvider(DEFAULT_LANG) @pytest.fixture @@ -106,7 +106,7 @@ def config_flow_fixture( async def setup_fixture( hass: HomeAssistant, request: pytest.FixtureRequest, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, mock_tts_entity: MockTTSEntity, ) -> None: """Set up the test environment.""" diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 05c19622e84..0b01a24720d 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -30,15 +30,23 @@ from .common import ( DEFAULT_LANG, SUPPORT_LANGUAGES, TEST_DOMAIN, - MockProvider, + MockTTS, MockTTSEntity, + MockTTSProvider, get_media_source_url, mock_config_entry_setup, mock_setup, retrieve_media, ) -from tests.common import async_mock_service, mock_restore_cache +from tests.common import ( + MockModule, + async_mock_service, + mock_integration, + mock_platform, + mock_restore_cache, + reset_translation_cache, +) from tests.typing import ClientSessionGenerator, WebSocketGenerator ORIG_WRITE_TAGS = tts.SpeechManager.write_tags @@ -197,23 +205,25 @@ async def test_service( blocking=True, ) - assert len(calls) == 1 - assert calls[0].data[ATTR_MEDIA_ANNOUNCE] is True - assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" - ).is_file() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert len(calls) == 1 + assert calls[0].data[ATTR_MEDIA_ANNOUNCE] is True + assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" + ).is_file() @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -259,22 +269,25 @@ async def test_service_default_language( ) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" + ) + ).is_file() @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("en_US"), MockTTSEntity("en_US"))], + [(MockTTSProvider("en_US"), MockTTSEntity("en_US"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -320,15 +333,18 @@ async def test_service_default_special_language( ) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" + ).is_file() @pytest.mark.parametrize( @@ -377,15 +393,18 @@ async def test_service_language( ) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" + ).is_file() @pytest.mark.parametrize( @@ -490,21 +509,24 @@ async def test_service_options( assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + "42f18378fd4393d18c8dd11d03fa9563c1e54491" + f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" + ) + ).is_file() -class MockProviderWithDefaults(MockProvider): +class MockProviderWithDefaults(MockTTSProvider): """Mock provider with default options.""" @property @@ -571,18 +593,21 @@ async def test_service_default_options( assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + "42f18378fd4393d18c8dd11d03fa9563c1e54491" + f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" + ) + ).is_file() @pytest.mark.parametrize( @@ -642,18 +667,21 @@ async def test_merge_default_service_options( assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + "42f18378fd4393d18c8dd11d03fa9563c1e54491" + f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" + ) + ).is_file() @pytest.mark.parametrize( @@ -847,7 +875,7 @@ async def test_service_receive_voice( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -1008,7 +1036,7 @@ async def test_service_without_cache( ).is_file() -class MockProviderBoom(MockProvider): +class MockProviderBoom(MockTTSProvider): """Mock provider that blows up.""" def get_tts_audio( @@ -1034,7 +1062,7 @@ class MockEntityBoom(MockTTSEntity): async def test_setup_legacy_cache_dir( hass: HomeAssistant, mock_tts_cache_dir: Path, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Set up a TTS platform with cache and call service without cache.""" calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @@ -1058,10 +1086,14 @@ async def test_setup_legacy_cache_dir( ) assert len(calls) == 1 - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_test.mp3" - ) - await hass.async_block_till_done() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() @pytest.mark.parametrize("mock_tts_entity", [MockEntityBoom(DEFAULT_LANG)]) @@ -1093,13 +1125,16 @@ async def test_setup_cache_dir( ) assert len(calls) == 1 - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" - ) - await hass.async_block_till_done() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() -class MockProviderEmpty(MockProvider): +class MockProviderEmpty(MockTTSProvider): """Mock provider with empty get_tts_audio.""" def get_tts_audio( @@ -1169,13 +1204,13 @@ async def test_service_get_tts_error( ) -async def test_load_cache_legacy_retrieve_without_mem_cache( +async def test_legacy_cannot_retrieve_without_token( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: - """Set up component and load cache and get without mem cache.""" + """Verify that a TTS cannot be retrieved by filename directly.""" tts_data = b"" cache_file = ( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" @@ -1189,17 +1224,16 @@ async def test_load_cache_legacy_retrieve_without_mem_cache( url = "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" req = await client.get(url) - assert req.status == HTTPStatus.OK - assert await req.read() == tts_data + assert req.status == HTTPStatus.NOT_FOUND -async def test_load_cache_retrieve_without_mem_cache( +async def test_cannot_retrieve_without_token( hass: HomeAssistant, mock_tts_entity: MockTTSEntity, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: - """Set up component and load cache and get without mem cache.""" + """Verify that a TTS cannot be retrieved by filename directly.""" tts_data = b"" cache_file = mock_tts_cache_dir / ( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" @@ -1213,45 +1247,37 @@ async def test_load_cache_retrieve_without_mem_cache( url = "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" req = await client.get(url) - assert req.status == HTTPStatus.OK - assert await req.read() == tts_data + assert req.status == HTTPStatus.NOT_FOUND @pytest.mark.parametrize( - ("setup", "data", "expected_url_suffix"), + ("setup", "data"), [ - ("mock_setup", {"platform": "test"}, "test"), - ("mock_setup", {"engine_id": "test"}, "test"), - ("mock_config_entry_setup", {"engine_id": "tts.test"}, "tts.test"), + ("mock_setup", {"platform": "test"}), + ("mock_setup", {"engine_id": "test"}), + ("mock_config_entry_setup", {"engine_id": "tts.test"}), ], indirect=["setup"], ) async def test_web_get_url( - hass_client: ClientSessionGenerator, - setup: str, - data: dict[str, Any], - expected_url_suffix: str, + hass_client: ClientSessionGenerator, setup: str, data: dict[str, Any] ) -> None: """Set up a TTS platform and receive file from web.""" client = await hass_client() - url = "/api/tts_get_url" - data |= {"message": "There is someone at the door."} + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= {"message": "There is someone at the door."} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ), - } + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } @pytest.mark.parametrize( @@ -1311,10 +1337,16 @@ async def test_tags_with_wave() -> None: @pytest.mark.parametrize( ("engine", "language", "options", "cache", "result_query"), [ - (None, None, None, None, ""), - (None, "de_DE", None, None, "language=de_DE"), - (None, "de_DE", {"voice": "henk"}, None, "language=de_DE&voice=henk"), - (None, "de_DE", None, True, "cache=true&language=de_DE"), + (None, None, None, None, "&tts_options=null"), + (None, "de_DE", None, None, "&language=de_DE&tts_options=null"), + ( + None, + "de_DE", + {"voice": "henk"}, + None, + "&language=de_DE&tts_options=%7B%22voice%22:%22henk%22%7D", + ), + (None, "de_DE", None, True, "&cache=true&language=de_DE&tts_options=null"), ], ) async def test_generate_media_source_id( @@ -1336,8 +1368,9 @@ async def test_generate_media_source_id( _, _, engine_query = media_source_id.rpartition("/") engine, _, query = engine_query.partition("?") assert engine == result_engine - assert query.startswith("message=msg") - assert query[12:] == result_query + query_prefix = "message=msg" + assert query.startswith(query_prefix) + assert query[len(query_prefix) :] == result_query @pytest.mark.parametrize( @@ -1419,7 +1452,7 @@ async def test_legacy_fetching_in_async( """Test async fetching of data for a legacy provider.""" tts_audio: asyncio.Future[bytes] = asyncio.Future() - class ProviderWithAsyncFetching(MockProvider): + class ProviderWithAsyncFetching(MockTTSProvider): """Provider that supports audio output option.""" @property @@ -1558,15 +1591,19 @@ async def test_fetching_in_async( @pytest.mark.parametrize( - ("setup", "engine_id"), + ("setup", "engine_id", "extra_data"), [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + ("mock_setup", "test", {"name": "Test"}), + ("mock_config_entry_setup", "tts.test", {}), ], indirect=["setup"], ) async def test_ws_list_engines( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup: str, + engine_id: str, + extra_data: dict[str, str], ) -> None: """Test listing tts engines and supported languages.""" client = await hass_ws_client() @@ -1581,6 +1618,7 @@ async def test_ws_list_engines( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } + | extra_data ] } @@ -1589,7 +1627,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []}] + "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] } await client.send_json_auto_id({"type": "tts/engine/list", "language": "en"}) @@ -1599,6 +1637,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_US", "en_GB"]} + | extra_data ] } @@ -1609,6 +1648,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_GB", "en_US"]} + | extra_data ] } @@ -1619,6 +1659,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_DE", "de_CH"]} + | extra_data ] } @@ -1631,20 +1672,74 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_CH", "de_DE"]} + | extra_data + ] + } + + +async def test_ws_list_engines_deprecated( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_tts_entity: MockTTSEntity, +) -> None: + """Test listing tts engines. + + This test asserts the deprecated flag is set on a legacy engine whose integration + also provides tts entities. + """ + + mock_provider = MockTTSProvider(DEFAULT_LANG) + mock_provider_2 = MockTTSProvider(DEFAULT_LANG) + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.tts", MockTTS(mock_provider)) + mock_integration(hass, MockModule(domain="test_2")) + mock_platform(hass, "test_2.tts", MockTTS(mock_provider_2)) + await async_setup_component( + hass, "tts", {"tts": [{"platform": "test"}, {"platform": "test_2"}]} + ) + await mock_config_entry_setup(hass, mock_tts_entity) + + client = await hass_ws_client() + + await client.send_json_auto_id({"type": "tts/engine/list"}) + + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "providers": [ + { + "engine_id": "tts.test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + { + "deprecated": True, + "engine_id": "test", + "name": "Test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + { + "engine_id": "test_2", + "name": "Test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, ] } @pytest.mark.parametrize( - ("setup", "engine_id"), + ("setup", "engine_id", "extra_data"), [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + ("mock_setup", "test", {"name": "Test"}), + ("mock_config_entry_setup", "tts.test", {}), ], indirect=["setup"], ) async def test_ws_get_engine( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup: str, + engine_id: str, + extra_data: dict[str, str], ) -> None: """Test getting an tts engine.""" client = await hass_ws_client() @@ -1658,6 +1753,7 @@ async def test_ws_get_engine( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } + | extra_data } @@ -1840,7 +1936,7 @@ async def test_ttsentity_subclass_properties( async def test_default_engine_prefer_entity( hass: HomeAssistant, mock_tts_entity: MockTTSEntity, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Test async_default_engine. @@ -1871,7 +1967,7 @@ async def test_default_engine_prefer_entity( ) async def test_default_engine_prefer_cloud_entity( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, config_flow_test_domains: str, ) -> None: """Test async_default_engine. @@ -1893,3 +1989,6 @@ async def test_default_engine_prefer_cloud_entity( provider_engine = tts.async_resolve_engine(hass, "test") assert provider_engine == "test" assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" + + # Reset the `cloud` translations cache to avoid flaky translation checks + reset_translation_cache(hass, ["cloud"]) diff --git a/tests/components/tts/test_legacy.py b/tests/components/tts/test_legacy.py index 0d7f99e8cd1..22e8ac35f16 100644 --- a/tests/components/tts/test_legacy.py +++ b/tests/components/tts/test_legacy.py @@ -17,7 +17,7 @@ from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from .common import SUPPORT_LANGUAGES, MockProvider, MockTTS +from .common import SUPPORT_LANGUAGES, MockTTS, MockTTSProvider from tests.common import ( MockModule, @@ -75,7 +75,9 @@ async def test_invalid_platform( async def test_platform_setup_without_provider( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_provider: MockProvider + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_provider: MockTTSProvider, ) -> None: """Test platform setup without provider returned.""" @@ -109,7 +111,7 @@ async def test_platform_setup_without_provider( async def test_platform_setup_with_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Test platform setup with an error during setup.""" diff --git a/tests/components/tts/test_media_source.py b/tests/components/tts/test_media_source.py index 4c10d8f0b08..d90923b02ab 100644 --- a/tests/components/tts/test_media_source.py +++ b/tests/components/tts/test_media_source.py @@ -1,19 +1,25 @@ """Tests for TTS media source.""" from http import HTTPStatus +import re from unittest.mock import MagicMock import pytest from homeassistant.components import media_source -from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_player import BrowseError +from homeassistant.components.tts.media_source import ( + MediaSourceOptions, + generate_media_source_id, + media_source_id_to_kwargs, +) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .common import ( DEFAULT_LANG, - MockProvider, MockTTSEntity, + MockTTSProvider, mock_config_entry_setup, mock_setup, retrieve_media, @@ -28,7 +34,7 @@ class MSEntity(MockTTSEntity): get_tts_audio = MagicMock(return_value=("mp3", b"")) -class MSProvider(MockProvider): +class MSProvider(MockTTSProvider): """Test speech API provider.""" get_tts_audio = MagicMock(return_value=("mp3", b"")) @@ -92,14 +98,24 @@ async def test_browsing(hass: HomeAssistant, setup: str) -> None: await media_source.async_browse_media(hass, "media-source://tts/non-existing") -@pytest.mark.parametrize("mock_provider", [MSProvider(DEFAULT_LANG)]) +@pytest.mark.parametrize( + ("mock_provider", "extra_options"), + [ + (MSProvider(DEFAULT_LANG), "&tts_options=%7B%22voice%22%3A%22Paulus%22%7D"), + (MSProvider(DEFAULT_LANG), "&voice=Paulus"), + ], +) async def test_legacy_resolving( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_provider: MSProvider + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_provider: MSProvider, + extra_options: str, ) -> None: """Test resolving legacy provider.""" await mock_setup(hass, mock_provider) mock_get_tts_audio = mock_provider.get_tts_audio + mock_get_tts_audio.reset_mock() media_id = "media-source://tts/test?message=Hello%20World" media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") @@ -114,7 +130,9 @@ async def test_legacy_resolving( # Pass language and options mock_get_tts_audio.reset_mock() - media_id = "media-source://tts/test?message=Bye%20World&language=de_DE&voice=Paulus" + media_id = ( + f"media-source://tts/test?message=Bye%20World&language=de_DE{extra_options}" + ) media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") assert media.mime_type == "audio/mpeg" @@ -127,14 +145,24 @@ async def test_legacy_resolving( assert mock_get_tts_audio.mock_calls[0][2]["options"] == {"voice": "Paulus"} -@pytest.mark.parametrize("mock_tts_entity", [MSEntity(DEFAULT_LANG)]) +@pytest.mark.parametrize( + ("mock_tts_entity", "extra_options"), + [ + (MSEntity(DEFAULT_LANG), "&tts_options=%7B%22voice%22%3A%22Paulus%22%7D"), + (MSEntity(DEFAULT_LANG), "&voice=Paulus"), + ], +) async def test_resolving( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts_entity: MSEntity + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_tts_entity: MSEntity, + extra_options: str, ) -> None: """Test resolving entity.""" await mock_config_entry_setup(hass, mock_tts_entity) mock_get_tts_audio = mock_tts_entity.get_tts_audio + mock_get_tts_audio.reset_mock() media_id = "media-source://tts/tts.test?message=Hello%20World" media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") @@ -150,7 +178,7 @@ async def test_resolving( # Pass language and options mock_get_tts_audio.reset_mock() media_id = ( - "media-source://tts/tts.test?message=Bye%20World&language=de_DE&voice=Paulus" + f"media-source://tts/tts.test?message=Bye%20World&language=de_DE{extra_options}" ) media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") @@ -169,29 +197,108 @@ async def test_resolving( [(MSProvider(DEFAULT_LANG), MSEntity(DEFAULT_LANG))], ) @pytest.mark.parametrize( - "setup", + ("setup", "engine"), [ - "mock_setup", - "mock_config_entry_setup", + ("mock_setup", "test"), + ("mock_config_entry_setup", "tts.test"), ], indirect=["setup"], ) -async def test_resolving_errors(hass: HomeAssistant, setup: str) -> None: +async def test_resolving_errors(hass: HomeAssistant, setup: str, engine: str) -> None: """Test resolving.""" # No message added with pytest.raises(media_source.Unresolvable): await media_source.async_resolve_media(hass, "media-source://tts/test", None) # Non-existing provider - with pytest.raises(media_source.Unresolvable): + with pytest.raises( + media_source.Unresolvable, match="Provider non-existing not found" + ): await media_source.async_resolve_media( hass, "media-source://tts/non-existing?message=bla", None ) - # Non-existing option - with pytest.raises(media_source.Unresolvable): + # Non-JSON tts options + with pytest.raises( + media_source.Unresolvable, + match="Invalid TTS options: Expecting property name enclosed in double quotes", + ): await media_source.async_resolve_media( hass, - "media-source://tts/non-existing?message=bla&non_existing_option=bla", + f"media-source://tts/{engine}?message=bla&tts_options=%7Binvalid json", None, ) + + # Non-existing option + with pytest.raises( + media_source.Unresolvable, + match=re.escape("Invalid options found: ['non_existing_option']"), + ): + await media_source.async_resolve_media( + hass, + f"media-source://tts/{engine}?message=bla&tts_options=%7B%22non_existing_option%22%3A%22bla%22%7D", + None, + ) + + +@pytest.mark.parametrize( + ("setup", "result_engine"), + [ + ("mock_setup", "test"), + ("mock_config_entry_setup", "tts.test"), + ], + indirect=["setup"], +) +async def test_generate_media_source_id_and_media_source_id_to_kwargs( + hass: HomeAssistant, + setup: str, + result_engine: str, +) -> None: + """Test media_source_id and media_source_id_to_kwargs.""" + kwargs: MediaSourceOptions = { + "engine": None, + "message": "hello", + "language": "en_US", + "options": {"age": 5}, + "cache": True, + } + media_source_id = generate_media_source_id(hass, **kwargs) + assert media_source_id_to_kwargs(media_source_id) == { + "engine": result_engine, + "message": "hello", + "language": "en_US", + "options": {"age": 5}, + "cache": True, + } + + kwargs = { + "engine": None, + "message": "hello", + "language": "en_US", + "options": {"age": [5, 6]}, + "cache": True, + } + media_source_id = generate_media_source_id(hass, **kwargs) + assert media_source_id_to_kwargs(media_source_id) == { + "engine": result_engine, + "message": "hello", + "language": "en_US", + "options": {"age": [5, 6]}, + "cache": True, + } + + kwargs = { + "engine": None, + "message": "hello", + "language": "en_US", + "options": {"age": {"k1": [5, 6], "k2": "v2"}}, + "cache": True, + } + media_source_id = generate_media_source_id(hass, **kwargs) + assert media_source_id_to_kwargs(media_source_id) == { + "engine": result_engine, + "message": "hello", + "language": "en_US", + "options": {"age": {"k1": [5, 6], "k2": "v2"}}, + "cache": True, + } diff --git a/tests/components/tts/test_notify.py b/tests/components/tts/test_notify.py index 07ba2f2f3f5..00cdae2934f 100644 --- a/tests/components/tts/test_notify.py +++ b/tests/components/tts/test_notify.py @@ -9,8 +9,8 @@ from homeassistant.components.media_player import ( DOMAIN as DOMAIN_MP, SERVICE_PLAY_MEDIA, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from .common import MockTTSEntity, mock_config_entry_setup diff --git a/tests/components/tuya/snapshots/test_config_flow.ambr b/tests/components/tuya/snapshots/test_config_flow.ambr index 416a656c238..90d83d69814 100644 --- a/tests/components/tuya/snapshots/test_config_flow.ambr +++ b/tests/components/tuya/snapshots/test_config_flow.ambr @@ -14,6 +14,8 @@ 'user_code': '12345', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'tuya', 'entry_id': , 'minor_version': 1, @@ -22,6 +24,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '12345', 'unique_id': '12345', 'version': 1, @@ -42,6 +46,8 @@ 'user_code': '12345', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'tuya', 'entry_id': , 'minor_version': 1, @@ -50,6 +56,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Old Tuya configuration entry', 'unique_id': '12345', 'version': 1, @@ -93,6 +101,8 @@ 'user_code': '12345', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'tuya', 'entry_id': , 'minor_version': 1, @@ -101,10 +111,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'mocked_username', 'unique_id': None, 'version': 1, }), + 'subentries': tuple( + ), 'title': 'mocked_username', 'type': , 'version': 1, diff --git a/tests/components/tuya/test_config_flow.py b/tests/components/tuya/test_config_flow.py index 6e971262bc8..247aec02cd1 100644 --- a/tests/components/tuya/test_config_flow.py +++ b/tests/components/tuya/test_config_flow.py @@ -8,7 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -145,15 +145,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "scan" @@ -185,15 +177,7 @@ async def test_reauth_flow_migration( assert CONF_APP_TYPE in mock_old_config_entry.data assert CONF_USER_CODE not in mock_old_config_entry.data - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_old_config_entry.unique_id, - "entry_id": mock_old_config_entry.entry_id, - }, - data=mock_old_config_entry.data, - ) + result = await mock_old_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_user_code" @@ -229,15 +213,7 @@ async def test_reauth_flow_failed_qr_code( """Test an error occurring while retrieving the QR code.""" mock_old_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_old_config_entry.unique_id, - "entry_id": mock_old_config_entry.entry_id, - }, - data=mock_old_config_entry.data, - ) + result = await mock_old_config_entry.start_reauth_flow(hass) # Something went wrong getting the QR code (like an invalid user code) mock_tuya_login_control.qr_code.return_value["success"] = False diff --git a/tests/components/twentemilieu/conftest.py b/tests/components/twentemilieu/conftest.py index 7ecf1657ce9..e3e3c97034c 100644 --- a/tests/components/twentemilieu/conftest.py +++ b/tests/components/twentemilieu/conftest.py @@ -51,7 +51,8 @@ def mock_twentemilieu() -> Generator[MagicMock]: """Return a mocked Twente Milieu client.""" with ( patch( - "homeassistant.components.twentemilieu.TwenteMilieu", autospec=True + "homeassistant.components.twentemilieu.coordinator.TwenteMilieu", + autospec=True, ) as twentemilieu_mock, patch( "homeassistant.components.twentemilieu.config_flow.TwenteMilieu", diff --git a/tests/components/twentemilieu/snapshots/test_config_flow.ambr b/tests/components/twentemilieu/snapshots/test_config_flow.ambr deleted file mode 100644 index 00b96062052..00000000000 --- a/tests/components/twentemilieu/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,89 +0,0 @@ -# serializer version: 1 -# name: test_full_user_flow - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '12345', - }), - 'data': dict({ - 'house_letter': 'A', - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'twentemilieu', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'house_letter': 'A', - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'disabled_by': None, - 'domain': 'twentemilieu', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '12345', - 'unique_id': '12345', - 'version': 1, - }), - 'title': '12345', - 'type': , - 'version': 1, - }) -# --- -# name: test_invalid_address - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '12345', - }), - 'data': dict({ - 'house_letter': None, - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'twentemilieu', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'house_letter': None, - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'disabled_by': None, - 'domain': 'twentemilieu', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '12345', - 'unique_id': '12345', - 'version': 1, - }), - 'title': '12345', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/twentemilieu/test_config_flow.py b/tests/components/twentemilieu/test_config_flow.py index dbc01c69acb..6dc261b8769 100644 --- a/tests/components/twentemilieu/test_config_flow.py +++ b/tests/components/twentemilieu/test_config_flow.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock import pytest -from syrupy.assertion import SnapshotAssertion from twentemilieu import TwenteMilieuAddressError, TwenteMilieuConnectionError from homeassistant import config_entries @@ -15,6 +14,7 @@ from homeassistant.components.twentemilieu.const import ( DOMAIN, ) from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -24,16 +24,16 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.usefixtures("mock_twentemilieu") -async def test_full_user_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +async def test_full_user_flow(hass: HomeAssistant) -> None: """Test registering an integration and finishing flow works.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234AB", @@ -42,14 +42,22 @@ async def test_full_user_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: "A", + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options async def test_invalid_address( hass: HomeAssistant, mock_twentemilieu: MagicMock, - snapshot: SnapshotAssertion, ) -> None: """Test full user flow when the user enters an incorrect address. @@ -60,11 +68,11 @@ async def test_invalid_address( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" mock_twentemilieu.unique_id.side_effect = TwenteMilieuAddressError - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234", @@ -72,12 +80,12 @@ async def test_invalid_address( }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "user" - assert result2.get("errors") == {"base": "invalid_address"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_address"} mock_twentemilieu.unique_id.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234AB", @@ -85,8 +93,17 @@ async def test_invalid_address( }, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: None, + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options async def test_connection_error( @@ -106,9 +123,33 @@ async def test_connection_error( }, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # Recover from error + mock_twentemilieu.unique_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_POST_CODE: "1234AB", + CONF_HOUSE_NUMBER: "1", + CONF_HOUSE_LETTER: "A", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: "A", + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options @pytest.mark.usefixtures("mock_twentemilieu") @@ -128,5 +169,5 @@ async def test_address_already_set_up( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/twentemilieu/test_init.py b/tests/components/twentemilieu/test_init.py index d4c519d6f66..5cc09e6875d 100644 --- a/tests/components/twentemilieu/test_init.py +++ b/tests/components/twentemilieu/test_init.py @@ -29,7 +29,7 @@ async def test_load_unload_config_entry( @patch( - "homeassistant.components.twentemilieu.TwenteMilieu.update", + "homeassistant.components.twentemilieu.coordinator.TwenteMilieu.update", side_effect=RuntimeError, ) async def test_config_entry_not_ready( @@ -44,18 +44,3 @@ async def test_config_entry_not_ready( assert mock_request.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -@pytest.mark.usefixtures("mock_twentemilieu") -async def test_update_config_entry_unique_id( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the we update old config entries with an unique ID.""" - mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(mock_config_entry, unique_id=None) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.unique_id == "12345" diff --git a/tests/components/twilio/test_init.py b/tests/components/twilio/test_init.py index 8efa1c24742..9c07bd6f3d8 100644 --- a/tests/components/twilio/test_init.py +++ b/tests/components/twilio/test_init.py @@ -2,8 +2,8 @@ from homeassistant import config_entries from homeassistant.components import twilio -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.typing import ClientSessionGenerator diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 0601159ca4c..e52f76634fd 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -27,6 +27,8 @@ 'name': 'twinkly_test_device_name', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'twinkly', 'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'minor_version': 1, @@ -35,6 +37,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Twinkly', 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'version': 1, diff --git a/tests/components/twitch/__init__.py b/tests/components/twitch/__init__.py index 2d70aaf9649..1887861f6e5 100644 --- a/tests/components/twitch/__init__.py +++ b/tests/components/twitch/__init__.py @@ -5,7 +5,7 @@ from typing import Any, Generic, TypeVar from twitchAPI.object.base import TwitchObject -from homeassistant.components.twitch import DOMAIN +from homeassistant.components.twitch.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_array_fixture diff --git a/tests/components/twitch/conftest.py b/tests/components/twitch/conftest.py index 25e443c2778..07732de1b0c 100644 --- a/tests/components/twitch/conftest.py +++ b/tests/components/twitch/conftest.py @@ -111,8 +111,8 @@ def twitch_mock() -> Generator[AsyncMock]: mock_client.return_value.get_followed_channels.return_value = TwitchIterObject( "get_followed_channels.json", FollowedChannel ) - mock_client.return_value.get_streams.return_value = get_generator( - "get_streams.json", Stream + mock_client.return_value.get_followed_streams.return_value = get_generator( + "get_followed_streams.json", Stream ) mock_client.return_value.check_user_subscription.return_value = ( UserSubscription( diff --git a/tests/components/twitch/fixtures/check_user_subscription.json b/tests/components/twitch/fixtures/check_user_subscription.json index b1b2a3d852a..5e710b72699 100644 --- a/tests/components/twitch/fixtures/check_user_subscription.json +++ b/tests/components/twitch/fixtures/check_user_subscription.json @@ -1,3 +1,4 @@ { - "is_gift": true + "is_gift": true, + "tier": "2000" } diff --git a/tests/components/twitch/fixtures/check_user_subscription_2.json b/tests/components/twitch/fixtures/check_user_subscription_2.json index 94d56c5ee12..38a1f063f96 100644 --- a/tests/components/twitch/fixtures/check_user_subscription_2.json +++ b/tests/components/twitch/fixtures/check_user_subscription_2.json @@ -1,3 +1,4 @@ { - "is_gift": false + "is_gift": false, + "tier": "1000" } diff --git a/tests/components/twitch/fixtures/get_followed_channels.json b/tests/components/twitch/fixtures/get_followed_channels.json index 4add7cc0a98..990fac390e9 100644 --- a/tests/components/twitch/fixtures/get_followed_channels.json +++ b/tests/components/twitch/fixtures/get_followed_channels.json @@ -1,9 +1,11 @@ [ { + "broadcaster_id": 123, "broadcaster_login": "internetofthings", "followed_at": "2023-08-01" }, { + "broadcaster_id": 456, "broadcaster_login": "homeassistant", "followed_at": "2023-08-01" } diff --git a/tests/components/twitch/fixtures/get_streams.json b/tests/components/twitch/fixtures/get_followed_streams.json similarity index 55% rename from tests/components/twitch/fixtures/get_streams.json rename to tests/components/twitch/fixtures/get_followed_streams.json index 53330c9c82e..e02c594c4cc 100644 --- a/tests/components/twitch/fixtures/get_streams.json +++ b/tests/components/twitch/fixtures/get_followed_streams.json @@ -1,8 +1,10 @@ [ { + "user_id": 123, "game_name": "Good game", "title": "Title", "thumbnail_url": "stream-medium.png", - "started_at": "2021-03-10T03:18:11Z" + "started_at": "2021-03-10T03:18:11Z", + "viewer_count": 42 } ] diff --git a/tests/components/twitch/test_config_flow.py b/tests/components/twitch/test_config_flow.py index 6935943a4d3..fc53b17551c 100644 --- a/tests/components/twitch/test_config_flow.py +++ b/tests/components/twitch/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.twitch.const import ( DOMAIN, OAUTH2_AUTHORIZE, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -109,14 +109,7 @@ async def test_reauth( ) -> None: """Check reauth flow.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -184,14 +177,7 @@ async def test_reauth_wrong_account( twitch_mock.return_value.get_users = lambda *args, **kwargs: get_generator( "get_users_2.json", TwitchUser ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index 8ce146adf07..c8cc009f3e1 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -7,7 +7,7 @@ from dateutil.tz import tzutc from twitchAPI.object.api import FollowedChannel, Stream, UserSubscription from twitchAPI.type import TwitchResourceNotFound -from homeassistant.components.twitch import DOMAIN +from homeassistant.components.twitch.const import DOMAIN from homeassistant.core import HomeAssistant from . import TwitchIterObject, get_generator_from_data, setup_integration @@ -21,8 +21,8 @@ async def test_offline( hass: HomeAssistant, twitch_mock: AsyncMock, config_entry: MockConfigEntry ) -> None: """Test offline state.""" - twitch_mock.return_value.get_streams.return_value = get_generator_from_data( - [], Stream + twitch_mock.return_value.get_followed_streams.return_value = ( + get_generator_from_data([], Stream) ) await setup_integration(hass, config_entry) @@ -45,6 +45,7 @@ async def test_streaming( assert sensor_state.attributes["started_at"] == datetime( year=2021, month=3, day=10, hour=3, minute=18, second=11, tzinfo=tzutc() ) + assert sensor_state.attributes["viewers"] == 42 async def test_oauth_without_sub_and_follow( @@ -79,6 +80,7 @@ async def test_oauth_with_sub( sensor_state = hass.states.get(ENTITY_ID) assert sensor_state.attributes["subscribed"] is True assert sensor_state.attributes["subscription_is_gifted"] is False + assert sensor_state.attributes["subscription_tier"] == 1 assert sensor_state.attributes["following"] is False diff --git a/tests/components/unifi/snapshots/test_button.ambr b/tests/components/unifi/snapshots/test_button.ambr index de305aee7eb..3729bd31cf0 100644 --- a/tests/components/unifi/snapshots/test_button.ambr +++ b/tests/components/unifi/snapshots/test_button.ambr @@ -27,7 +27,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'wlan_regenerate_password', 'unique_id': 'regenerate_password-012345678910111213141516', 'unit_of_measurement': None, }) diff --git a/tests/components/unifi/snapshots/test_diagnostics.ambr b/tests/components/unifi/snapshots/test_diagnostics.ambr index fb7415c59ab..aa7337be0ba 100644 --- a/tests/components/unifi/snapshots/test_diagnostics.ambr +++ b/tests/components/unifi/snapshots/test_diagnostics.ambr @@ -27,6 +27,8 @@ 'verify_ssl': False, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'unifi', 'entry_id': '1', 'minor_version': 1, @@ -40,6 +42,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '1', 'version': 1, diff --git a/tests/components/unifi/snapshots/test_image.ambr b/tests/components/unifi/snapshots/test_image.ambr index 0922320ed4d..32e1a5ff622 100644 --- a/tests/components/unifi/snapshots/test_image.ambr +++ b/tests/components/unifi/snapshots/test_image.ambr @@ -27,7 +27,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'wlan_qr_code', 'unique_id': 'qr_code-012345678910111213141516', 'unit_of_measurement': None, }) diff --git a/tests/components/unifi/snapshots/test_sensor.ambr b/tests/components/unifi/snapshots/test_sensor.ambr index 3053f69d616..fc86a57a294 100644 --- a/tests/components/unifi/snapshots/test_sensor.ambr +++ b/tests/components/unifi/snapshots/test_sensor.ambr @@ -29,7 +29,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_clients', 'unique_id': 'device_clients-20:00:00:00:01:01', 'unit_of_measurement': None, }) @@ -92,7 +92,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_state', 'unique_id': 'device_state-20:00:00:00:01:01', 'unit_of_measurement': None, }) @@ -359,7 +359,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_clients', 'unique_id': 'device_clients-01:02:03:04:05:ff', 'unit_of_measurement': None, }) @@ -408,7 +408,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_cpu_utilization', 'unique_id': 'cpu_utilization-01:02:03:04:05:ff', 'unit_of_measurement': '%', }) @@ -458,7 +458,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_memory_utilization', 'unique_id': 'memory_utilization-01:02:03:04:05:ff', 'unit_of_measurement': '%', }) @@ -573,7 +573,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_state', 'unique_id': 'device_state-01:02:03:04:05:ff', 'unit_of_measurement': None, }) @@ -684,7 +684,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_clients', 'unique_id': 'device_clients-10:00:00:00:01:01', 'unit_of_measurement': None, }) @@ -1088,12 +1088,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:download', + 'original_icon': None, 'original_name': 'Port 1 RX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_rx', 'unique_id': 'port_rx-10:00:00:00:01:01_1', 'unit_of_measurement': , }) @@ -1103,7 +1103,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 1 RX', - 'icon': 'mdi:download', 'state_class': , 'unit_of_measurement': , }), @@ -1143,12 +1142,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:upload', + 'original_icon': None, 'original_name': 'Port 1 TX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_tx', 'unique_id': 'port_tx-10:00:00:00:01:01_1', 'unit_of_measurement': , }) @@ -1158,7 +1157,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 1 TX', - 'icon': 'mdi:upload', 'state_class': , 'unit_of_measurement': , }), @@ -1249,12 +1247,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:download', + 'original_icon': None, 'original_name': 'Port 2 RX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_rx', 'unique_id': 'port_rx-10:00:00:00:01:01_2', 'unit_of_measurement': , }) @@ -1264,7 +1262,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 2 RX', - 'icon': 'mdi:download', 'state_class': , 'unit_of_measurement': , }), @@ -1304,12 +1301,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:upload', + 'original_icon': None, 'original_name': 'Port 2 TX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_tx', 'unique_id': 'port_tx-10:00:00:00:01:01_2', 'unit_of_measurement': , }) @@ -1319,7 +1316,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 2 TX', - 'icon': 'mdi:upload', 'state_class': , 'unit_of_measurement': , }), @@ -1359,12 +1355,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:download', + 'original_icon': None, 'original_name': 'Port 3 RX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_rx', 'unique_id': 'port_rx-10:00:00:00:01:01_3', 'unit_of_measurement': , }) @@ -1374,7 +1370,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 3 RX', - 'icon': 'mdi:download', 'state_class': , 'unit_of_measurement': , }), @@ -1414,12 +1409,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:upload', + 'original_icon': None, 'original_name': 'Port 3 TX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_tx', 'unique_id': 'port_tx-10:00:00:00:01:01_3', 'unit_of_measurement': , }) @@ -1429,7 +1424,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 3 TX', - 'icon': 'mdi:upload', 'state_class': , 'unit_of_measurement': , }), @@ -1520,12 +1514,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:download', + 'original_icon': None, 'original_name': 'Port 4 RX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_rx', 'unique_id': 'port_rx-10:00:00:00:01:01_4', 'unit_of_measurement': , }) @@ -1535,7 +1529,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 4 RX', - 'icon': 'mdi:download', 'state_class': , 'unit_of_measurement': , }), @@ -1575,12 +1568,12 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:upload', + 'original_icon': None, 'original_name': 'Port 4 TX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_bandwidth_tx', 'unique_id': 'port_tx-10:00:00:00:01:01_4', 'unit_of_measurement': , }) @@ -1590,7 +1583,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'mock-name Port 4 TX', - 'icon': 'mdi:upload', 'state_class': , 'unit_of_measurement': , }), @@ -1646,7 +1638,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'device_state', 'unique_id': 'device_state-10:00:00:00:01:01', 'unit_of_measurement': None, }) @@ -1757,7 +1749,7 @@ 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'wlan_clients', 'unique_id': 'wlan_clients-012345678910111213141516', 'unit_of_measurement': None, }) @@ -1801,12 +1793,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:upload', + 'original_icon': None, 'original_name': 'RX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'client_bandwidth_rx', 'unique_id': 'rx-00:00:00:00:00:01', 'unit_of_measurement': , }) @@ -1816,7 +1808,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'Wired client RX', - 'icon': 'mdi:upload', 'state_class': , 'unit_of_measurement': , }), @@ -1853,12 +1844,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:download', + 'original_icon': None, 'original_name': 'TX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'client_bandwidth_tx', 'unique_id': 'tx-00:00:00:00:00:01', 'unit_of_measurement': , }) @@ -1868,7 +1859,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'Wired client TX', - 'icon': 'mdi:download', 'state_class': , 'unit_of_measurement': , }), @@ -1952,12 +1942,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:upload', + 'original_icon': None, 'original_name': 'RX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'client_bandwidth_rx', 'unique_id': 'rx-00:00:00:00:00:02', 'unit_of_measurement': , }) @@ -1967,7 +1957,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'Wireless client RX', - 'icon': 'mdi:upload', 'state_class': , 'unit_of_measurement': , }), @@ -2004,12 +1993,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:download', + 'original_icon': None, 'original_name': 'TX', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'client_bandwidth_tx', 'unique_id': 'tx-00:00:00:00:00:02', 'unit_of_measurement': , }) @@ -2019,7 +2008,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', 'friendly_name': 'Wireless client TX', - 'icon': 'mdi:download', 'state_class': , 'unit_of_measurement': , }), diff --git a/tests/components/unifi/snapshots/test_switch.ambr b/tests/components/unifi/snapshots/test_switch.ambr index 04b15f329fd..45e6188a3f4 100644 --- a/tests/components/unifi/snapshots/test_switch.ambr +++ b/tests/components/unifi/snapshots/test_switch.ambr @@ -1,1952 +1,4 @@ # serializer version: 1 -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_port_1_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Port 1 Power Cycle', - }), - 'context': , - 'entity_id': 'button.mock_name_port_1_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_port_2_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 2 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Port 2 Power Cycle', - }), - 'context': , - 'entity_id': 'button.mock_name_port_2_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_port_4_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 4 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Port 4 Power Cycle', - }), - 'context': , - 'entity_id': 'button.mock_name_port_4_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_restart-10:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Restart', - }), - 'context': , - 'entity_id': 'button.mock_name_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:wifi-check', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_client_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'block-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Block Client 1', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.block_client_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:wifi-check', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.unifi_network_plex', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:upload-network', - 'original_name': 'plex', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'UniFi Network plex', - 'icon': 'mdi:upload-network', - }), - 'context': , - 'entity_id': 'switch.unifi_network_plex', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:wifi-check', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.unifi_network_plex', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:upload-network', - 'original_name': 'plex', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'UniFi Network plex', - 'icon': 'mdi:upload-network', - }), - 'context': , - 'entity_id': 'switch.unifi_network_plex', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1970,12 +22,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ethernet', + 'original_icon': None, 'original_name': None, 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'block_client', 'unique_id': 'block-00:00:00:00:01:01', 'unit_of_measurement': None, }) @@ -1985,7 +37,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'switch', 'friendly_name': 'Block Client 1', - 'icon': 'mdi:ethernet', }), 'context': , 'entity_id': 'switch.block_client_1', @@ -2018,12 +69,12 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:network', + 'original_icon': None, 'original_name': 'Block Media Streaming', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'dpi_restriction', 'unique_id': '5f976f4ae3c58f018ec7dff6', 'unit_of_measurement': None, }) @@ -2032,7 +83,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', }), 'context': , 'entity_id': 'switch.block_media_streaming', @@ -2159,12 +209,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ethernet', + 'original_icon': None, 'original_name': 'Port 1 PoE', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'poe_port_control', 'unique_id': 'poe-10:00:00:00:01:01_1', 'unit_of_measurement': None, }) @@ -2174,7 +224,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'outlet', 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', }), 'context': , 'entity_id': 'switch.mock_name_port_1_poe', @@ -2207,12 +256,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ethernet', + 'original_icon': None, 'original_name': 'Port 2 PoE', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'poe_port_control', 'unique_id': 'poe-10:00:00:00:01:01_2', 'unit_of_measurement': None, }) @@ -2222,7 +271,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'outlet', 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', }), 'context': , 'entity_id': 'switch.mock_name_port_2_poe', @@ -2255,12 +303,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ethernet', + 'original_icon': None, 'original_name': 'Port 4 PoE', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'poe_port_control', 'unique_id': 'poe-10:00:00:00:01:01_4', 'unit_of_measurement': None, }) @@ -2270,7 +318,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'outlet', 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', }), 'context': , 'entity_id': 'switch.mock_name_port_4_poe', @@ -2350,12 +397,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:wifi-check', + 'original_icon': None, 'original_name': None, 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'wlan_control', 'unique_id': 'wlan-012345678910111213141516', 'unit_of_measurement': None, }) @@ -2365,7 +412,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'switch', 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', }), 'context': , 'entity_id': 'switch.ssid_1', @@ -2398,12 +444,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:upload-network', + 'original_icon': None, 'original_name': 'plex', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'port_forward_control', 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', 'unit_of_measurement': None, }) @@ -2413,7 +459,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'switch', 'friendly_name': 'UniFi Network plex', - 'icon': 'mdi:upload-network', }), 'context': , 'entity_id': 'switch.unifi_network_plex', @@ -2446,12 +491,12 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:security-network', + 'original_icon': None, 'original_name': 'Test Traffic Rule', 'platform': 'unifi', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'traffic_rule_control', 'unique_id': 'traffic_rule-6452cd9b859d5b11aa002ea1', 'unit_of_measurement': None, }) @@ -2461,7 +506,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'switch', 'friendly_name': 'UniFi Network Test Traffic Rule', - 'icon': 'mdi:security-network', }), 'context': , 'entity_id': 'switch.unifi_network_test_traffic_rule', diff --git a/tests/components/unifi/snapshots/test_update.ambr b/tests/components/unifi/snapshots/test_update.ambr index 99a403a8f21..405cb9d52a6 100644 --- a/tests/components/unifi/snapshots/test_update.ambr +++ b/tests/components/unifi/snapshots/test_update.ambr @@ -37,6 +37,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 1', 'in_progress': False, @@ -47,6 +48,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_1', @@ -94,6 +96,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 2', 'in_progress': False, @@ -104,6 +107,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_2', @@ -151,6 +155,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 1', 'in_progress': False, @@ -161,6 +166,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_1', @@ -208,6 +214,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 2', 'in_progress': False, @@ -218,6 +225,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_2', diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 1d745511dc5..71b196550da 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -24,7 +24,6 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -302,15 +301,7 @@ async def test_reauth_flow_update_configuration( """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup - result = await hass.config_entries.flow.async_init( - UNIFI_DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -344,15 +335,7 @@ async def test_reauth_flow_update_configuration_on_not_loaded_entry( with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): config_entry = await config_entry_factory() - result = await hass.config_entries.flow.async_init( - UNIFI_DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index 0bef1ff0eb9..3ed559b71ec 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -51,11 +51,11 @@ def mock_nvr(): nvr = NVR.from_unifi_dict(**data) # disable pydantic validation so mocking can happen - NVR.__config__.validate_assignment = False + NVR.model_config["validate_assignment"] = False yield nvr - NVR.__config__.validate_assignment = True + NVR.model_config["validate_assignment"] = True @pytest.fixture(name="ufp_config_entry") @@ -120,7 +120,11 @@ def mock_ufp_client(bootstrap: Bootstrap): client.base_url = "https://127.0.0.1" client.connection_host = IPv4Address("127.0.0.1") - client.get_nvr = AsyncMock(return_value=nvr) + + async def get_nvr(*args: Any, **kwargs: Any) -> NVR: + return client.bootstrap.nvr + + client.get_nvr = get_nvr client.get_bootstrap = AsyncMock(return_value=bootstrap) client.update = AsyncMock(return_value=bootstrap) client.async_disconnect_ws = AsyncMock() @@ -173,7 +177,7 @@ def camera_fixture(fixed_now: datetime): """Mock UniFi Protect Camera device.""" # disable pydantic validation so mocking can happen - Camera.__config__.validate_assignment = False + Camera.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_camera.json", integration=DOMAIN)) camera = Camera.from_unifi_dict(**data) @@ -181,23 +185,23 @@ def camera_fixture(fixed_now: datetime): yield camera - Camera.__config__.validate_assignment = True + Camera.model_config["validate_assignment"] = True @pytest.fixture(name="camera_all") def camera_all_fixture(camera: Camera): """Mock UniFi Protect Camera device.""" - all_camera = camera.copy() - all_camera.channels = [all_camera.channels[0].copy()] + all_camera = camera.model_copy() + all_camera.channels = [all_camera.channels[0].model_copy()] - medium_channel = all_camera.channels[0].copy() + medium_channel = all_camera.channels[0].model_copy() medium_channel.name = "Medium" medium_channel.id = 1 medium_channel.rtsp_alias = "test_medium_alias" all_camera.channels.append(medium_channel) - low_channel = all_camera.channels[0].copy() + low_channel = all_camera.channels[0].model_copy() low_channel.name = "Low" low_channel.id = 2 low_channel.rtsp_alias = "test_medium_alias" @@ -210,10 +214,10 @@ def camera_all_fixture(camera: Camera): def doorbell_fixture(camera: Camera, fixed_now: datetime): """Mock UniFi Protect Camera device (with chime).""" - doorbell = camera.copy() - doorbell.channels = [c.copy() for c in doorbell.channels] + doorbell = camera.model_copy() + doorbell.channels = [c.model_copy() for c in doorbell.channels] - package_channel = doorbell.channels[0].copy() + package_channel = doorbell.channels[0].model_copy() package_channel.name = "Package Camera" package_channel.id = 3 package_channel.fps = 2 @@ -233,6 +237,8 @@ def doorbell_fixture(camera: Camera, fixed_now: datetime): doorbell.feature_flags.has_speaker = True doorbell.feature_flags.has_privacy_mask = True doorbell.feature_flags.is_doorbell = True + doorbell.feature_flags.has_fingerprint_sensor = True + doorbell.feature_flags.support_nfc = True doorbell.feature_flags.has_chime = True doorbell.feature_flags.has_smart_detect = True doorbell.feature_flags.has_package_camera = True @@ -245,8 +251,8 @@ def doorbell_fixture(camera: Camera, fixed_now: datetime): def unadopted_camera(camera: Camera): """Mock UniFi Protect Camera device (unadopted).""" - no_camera = camera.copy() - no_camera.channels = [c.copy() for c in no_camera.channels] + no_camera = camera.model_copy() + no_camera.channels = [c.model_copy() for c in no_camera.channels] no_camera.name = "Unadopted Camera" no_camera.is_adopted = False return no_camera @@ -257,19 +263,19 @@ def light_fixture(): """Mock UniFi Protect Light device.""" # disable pydantic validation so mocking can happen - Light.__config__.validate_assignment = False + Light.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_light.json", integration=DOMAIN)) yield Light.from_unifi_dict(**data) - Light.__config__.validate_assignment = True + Light.model_config["validate_assignment"] = True @pytest.fixture def unadopted_light(light: Light): """Mock UniFi Protect Light device (unadopted).""" - no_light = light.copy() + no_light = light.model_copy() no_light.name = "Unadopted Light" no_light.is_adopted = False return no_light @@ -280,12 +286,12 @@ def viewer(): """Mock UniFi Protect Viewport device.""" # disable pydantic validation so mocking can happen - Viewer.__config__.validate_assignment = False + Viewer.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_viewport.json", integration=DOMAIN)) yield Viewer.from_unifi_dict(**data) - Viewer.__config__.validate_assignment = True + Viewer.model_config["validate_assignment"] = True @pytest.fixture(name="sensor") @@ -293,7 +299,7 @@ def sensor_fixture(fixed_now: datetime): """Mock UniFi Protect Sensor device.""" # disable pydantic validation so mocking can happen - Sensor.__config__.validate_assignment = False + Sensor.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_sensor.json", integration=DOMAIN)) sensor: Sensor = Sensor.from_unifi_dict(**data) @@ -302,14 +308,14 @@ def sensor_fixture(fixed_now: datetime): sensor.alarm_triggered_at = fixed_now - timedelta(hours=1) yield sensor - Sensor.__config__.validate_assignment = True + Sensor.model_config["validate_assignment"] = True @pytest.fixture(name="sensor_all") def csensor_all_fixture(sensor: Sensor): """Mock UniFi Protect Sensor device.""" - all_sensor = sensor.copy() + all_sensor = sensor.model_copy() all_sensor.light_settings.is_enabled = True all_sensor.humidity_settings.is_enabled = True all_sensor.temperature_settings.is_enabled = True @@ -325,19 +331,19 @@ def doorlock_fixture(): """Mock UniFi Protect Doorlock device.""" # disable pydantic validation so mocking can happen - Doorlock.__config__.validate_assignment = False + Doorlock.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_doorlock.json", integration=DOMAIN)) yield Doorlock.from_unifi_dict(**data) - Doorlock.__config__.validate_assignment = True + Doorlock.model_config["validate_assignment"] = True @pytest.fixture def unadopted_doorlock(doorlock: Doorlock): """Mock UniFi Protect Light device (unadopted).""" - no_doorlock = doorlock.copy() + no_doorlock = doorlock.model_copy() no_doorlock.name = "Unadopted Lock" no_doorlock.is_adopted = False return no_doorlock @@ -348,12 +354,12 @@ def chime(): """Mock UniFi Protect Chime device.""" # disable pydantic validation so mocking can happen - Chime.__config__.validate_assignment = False + Chime.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_chime.json", integration=DOMAIN)) yield Chime.from_unifi_dict(**data) - Chime.__config__.validate_assignment = True + Chime.model_config["validate_assignment"] = True @pytest.fixture(name="fixed_now") diff --git a/tests/components/unifiprotect/fixtures/sample_bootstrap.json b/tests/components/unifiprotect/fixtures/sample_bootstrap.json index 2b7326831eb..240a9938b64 100644 --- a/tests/components/unifiprotect/fixtures/sample_bootstrap.json +++ b/tests/components/unifiprotect/fixtures/sample_bootstrap.json @@ -57,7 +57,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -118,7 +118,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -134,7 +134,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -246,7 +246,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -314,7 +314,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -365,7 +365,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -381,7 +381,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -432,7 +432,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -448,7 +448,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -496,7 +496,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -526,7 +526,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -546,7 +546,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", diff --git a/tests/components/unifiprotect/test_binary_sensor.py b/tests/components/unifiprotect/test_binary_sensor.py index af8ce015955..3a8d5d952ce 100644 --- a/tests/components/unifiprotect/test_binary_sensor.py +++ b/tests/components/unifiprotect/test_binary_sensor.py @@ -305,7 +305,7 @@ async def test_binary_sensor_update_motion( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_motion_detected = True new_camera.last_motion_event_id = event.id @@ -352,7 +352,7 @@ async def test_binary_sensor_update_light_motion( api=ufp.api, ) - new_light = light.copy() + new_light = light.model_copy() new_light.is_pir_motion_detected = True new_light.last_motion_event_id = event.id @@ -386,7 +386,7 @@ async def test_binary_sensor_update_mount_type_window( assert state assert state.attributes[ATTR_DEVICE_CLASS] == BinarySensorDeviceClass.DOOR.value - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.mount_type = MountType.WINDOW mock_msg = Mock() @@ -418,7 +418,7 @@ async def test_binary_sensor_update_mount_type_garage( assert state assert state.attributes[ATTR_DEVICE_CLASS] == BinarySensorDeviceClass.DOOR.value - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.mount_type = MountType.GARAGE mock_msg = Mock() @@ -468,7 +468,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -501,7 +501,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -534,7 +534,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -575,3 +575,149 @@ async def test_binary_sensor_package_detected( ufp.ws_msg(mock_msg) await hass.async_block_till_done() assert len(state_changes) == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor_person_detected( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test binary_sensor person detected detection entity.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.BINARY_SENSOR, 15, 15) + + doorbell.smart_detect_settings.object_types.append(SmartDetectObjectType.PERSON) + + _, entity_id = ids_from_device_description( + Platform.BINARY_SENSOR, doorbell, EVENT_SENSORS[3] + ) + + events = async_capture_events(hass, EVENT_STATE_CHANGED) + + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.SMART_DETECT, + start=fixed_now - timedelta(seconds=1), + end=None, + score=50, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.model_copy() + new_camera.is_smart_detected = True + + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.SMART_DETECT, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=65, + smart_detect_types=[SmartDetectObjectType.PERSON], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.model_copy() + new_camera.is_smart_detected = True + new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id + + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + entity_events = [event for event in events if event.data["entity_id"] == entity_id] + assert len(entity_events) == 3 + assert entity_events[0].data["new_state"].state == STATE_OFF + assert entity_events[1].data["new_state"].state == STATE_ON + assert entity_events[2].data["new_state"].state == STATE_OFF + + # Event is already seen and has end, should now be off + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + # Now send an event that has an end right away + event = Event( + model=ModelType.EVENT, + id="new_event_id", + type=EventType.SMART_DETECT, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=80, + smart_detect_types=[SmartDetectObjectType.PERSON], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.model_copy() + new_camera.is_smart_detected = True + new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id + + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + + state_changes: list[HAEvent[EventStateChangedData]] = async_capture_events( + hass, EVENT_STATE_CHANGED + ) + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + assert len(state_changes) == 2 + + on_event = state_changes[0] + state = on_event.data["new_state"] + assert state + assert state.state == STATE_ON + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_SCORE] == 80 + + off_event = state_changes[1] + state = off_event.data["new_state"] + assert state + assert state.state == STATE_OFF + assert ATTR_EVENT_SCORE not in state.attributes + + # replay and ensure ignored + ufp.ws_msg(mock_msg) + await hass.async_block_till_done() + assert len(state_changes) == 2 diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 9fedb67fea4..12b92beedd0 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -4,16 +4,22 @@ from __future__ import annotations from unittest.mock import AsyncMock, Mock +import pytest from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import Camera as ProtectCamera, CameraChannel, StateType from uiprotect.exceptions import NvrError from uiprotect.websocket import WebsocketState +from webrtc_models import RTCIceCandidateInit from homeassistant.components.camera import ( - STATE_IDLE, CameraEntityFeature, + CameraState, + CameraWebRTCProvider, + StreamType, + WebRTCSendMessage, async_get_image, async_get_stream_source, + async_register_webrtc_provider, ) from homeassistant.components.unifiprotect.const import ( ATTR_BITRATE, @@ -22,6 +28,7 @@ from homeassistant.components.unifiprotect.const import ( ATTR_HEIGHT, ATTR_WIDTH, DEFAULT_ATTRIBUTION, + DOMAIN, ) from homeassistant.components.unifiprotect.utils import get_camera_base_name from homeassistant.const import ( @@ -31,11 +38,12 @@ from homeassistant.const import ( STATE_UNAVAILABLE, Platform, ) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .utils import ( + Camera, MockUFPFixture, adopt_devices, assert_entity_counts, @@ -46,6 +54,45 @@ from .utils import ( ) +class MockWebRTCProvider(CameraWebRTCProvider): + """WebRTC provider.""" + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return DOMAIN + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Return if this provider is supports the Camera as source.""" + return True + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle the WebRTC candidate.""" + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" + + +@pytest.fixture +async def web_rtc_provider(hass: HomeAssistant) -> None: + """Fixture to enable WebRTC provider for camera entities.""" + await async_setup_component(hass, "camera", {}) + async_register_webrtc_provider(hass, MockWebRTCProvider()) + + def validate_default_camera_entity( hass: HomeAssistant, camera_obj: ProtectCamera, @@ -66,6 +113,14 @@ def validate_default_camera_entity( assert entity.disabled is False assert entity.unique_id == unique_id + device_registry = dr.async_get(hass) + device = device_registry.async_get(entity.device_id) + assert device + assert device.manufacturer == "Ubiquiti" + assert device.name == camera_obj.name + assert device.model == camera_obj.market_name or camera_obj.type + assert device.model_id == camera_obj.type + return entity_id @@ -141,7 +196,7 @@ async def validate_rtsps_camera_state( """Validate a camera's state.""" channel = camera_obj.channels[channel_id] - assert await async_get_stream_source(hass, entity_id) == channel.rtsps_url + assert await async_get_stream_source(hass, entity_id) == channel.rtsps_no_srtp_url validate_common_camera_state(hass, channel, entity_id, features) @@ -181,15 +236,15 @@ async def test_basic_setup( ) -> None: """Test working setup of unifiprotect entry.""" - camera_high_only = camera_all.copy() - camera_high_only.channels = [c.copy() for c in camera_all.channels] + camera_high_only = camera_all.model_copy() + camera_high_only.channels = [c.model_copy() for c in camera_all.channels] camera_high_only.name = "Test Camera 1" camera_high_only.channels[0].is_rtsp_enabled = True camera_high_only.channels[1].is_rtsp_enabled = False camera_high_only.channels[2].is_rtsp_enabled = False - camera_medium_only = camera_all.copy() - camera_medium_only.channels = [c.copy() for c in camera_all.channels] + camera_medium_only = camera_all.model_copy() + camera_medium_only.channels = [c.model_copy() for c in camera_all.channels] camera_medium_only.name = "Test Camera 2" camera_medium_only.channels[0].is_rtsp_enabled = False camera_medium_only.channels[1].is_rtsp_enabled = True @@ -197,8 +252,8 @@ async def test_basic_setup( camera_all.name = "Test Camera 3" - camera_no_channels = camera_all.copy() - camera_no_channels.channels = [c.copy() for c in camera_all.channels] + camera_no_channels = camera_all.model_copy() + camera_no_channels.channels = [c.model_copy() for c in camera_all.channels] camera_no_channels.name = "Test Camera 4" camera_no_channels.channels[0].is_rtsp_enabled = False camera_no_channels.channels[1].is_rtsp_enabled = False @@ -275,12 +330,32 @@ async def test_basic_setup( await validate_no_stream_camera_state(hass, doorbell, 3, entity_id, features=0) +@pytest.mark.usefixtures("web_rtc_provider") +async def test_webrtc_support( + hass: HomeAssistant, + ufp: MockUFPFixture, + camera_all: ProtectCamera, +) -> None: + """Test webrtc support is available.""" + camera_high_only = camera_all.model_copy() + camera_high_only.channels = [c.model_copy() for c in camera_all.channels] + camera_high_only.name = "Test Camera 1" + camera_high_only.channels[0].is_rtsp_enabled = True + camera_high_only.channels[1].is_rtsp_enabled = False + camera_high_only.channels[2].is_rtsp_enabled = False + await init_entry(hass, ufp, [camera_high_only]) + entity_id = validate_default_camera_entity(hass, camera_high_only, 0) + state = hass.states.get(entity_id) + assert state + assert StreamType.WEB_RTC in state.attributes["frontend_stream_type"] + + async def test_adopt( hass: HomeAssistant, ufp: MockUFPFixture, camera: ProtectCamera ) -> None: """Test setting up camera with no camera channels.""" - camera1 = camera.copy() + camera1 = camera.model_copy() camera1.channels = [] await init_entry(hass, ufp, [camera1]) @@ -375,7 +450,7 @@ async def test_camera_interval_update( state = hass.states.get(entity_id) assert state and state.state == "idle" - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_recording = True ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -423,7 +498,7 @@ async def test_camera_websocket_disconnected( entity_id = "camera.test_camera_high_resolution_channel" state = hass.states.get(entity_id) - assert state and state.state == STATE_IDLE + assert state and state.state == CameraState.IDLE # websocket disconnects ufp.ws_state_subscription(WebsocketState.DISCONNECTED) @@ -437,7 +512,7 @@ async def test_camera_websocket_disconnected( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state and state.state == STATE_IDLE + assert state and state.state == CameraState.IDLE async def test_camera_ws_update( @@ -452,10 +527,10 @@ async def test_camera_ws_update( state = hass.states.get(entity_id) assert state and state.state == "idle" - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_recording = True - no_camera = camera.copy() + no_camera = camera.model_copy() no_camera.is_adopted = False ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -488,7 +563,7 @@ async def test_camera_ws_update_offline( assert state and state.state == "idle" # camera goes offline - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.state = StateType.DISCONNECTED mock_msg = Mock() @@ -526,7 +601,7 @@ async def test_camera_enable_motion( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__fields__["set_motion_detection"] = Mock(final=False) + camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) camera.set_motion_detection = AsyncMock() await hass.services.async_call( @@ -548,7 +623,7 @@ async def test_camera_disable_motion( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__fields__["set_motion_detection"] = Mock(final=False) + camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) camera.set_motion_detection = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_config_flow.py b/tests/components/unifiprotect/test_config_flow.py index 5d02e1cf098..8bfdc004092 100644 --- a/tests/components/unifiprotect/test_config_flow.py +++ b/tests/components/unifiprotect/test_config_flow.py @@ -224,13 +224,7 @@ async def test_form_reauth_auth( ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py index 9d1a701fe39..6a26738f5e8 100644 --- a/tests/components/unifiprotect/test_event.py +++ b/tests/components/unifiprotect/test_event.py @@ -33,11 +33,11 @@ async def test_camera_remove( ufp.api.bootstrap.nvr.system_info.ustorage = None await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) + assert_entity_counts(hass, Platform.EVENT, 3, 3) await remove_entities(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.EVENT, 0, 0) await adopt_devices(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) + assert_entity_counts(hass, Platform.EVENT, 3, 3) async def test_doorbell_ring( @@ -50,7 +50,7 @@ async def test_doorbell_ring( """Test a doorbell ring event.""" await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) + assert_entity_counts(hass, Platform.EVENT, 3, 3) events: list[HAEvent] = [] @callback @@ -75,7 +75,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_ring_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -107,7 +107,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -137,7 +137,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -152,3 +152,177 @@ async def test_doorbell_ring( assert state assert state.state == timestamp unsub() + + +async def test_doorbell_nfc_scanned( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": "test_nfc_id", "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.model_copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + + unsub() + + +async def test_doorbell_fingerprint_identified( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {"ulp_id": "test_ulp_id"}}, + ) + + new_camera = doorbell.model_copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == "test_ulp_id" + + unsub() + + +async def test_doorbell_fingerprint_not_identified( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {}}, + ) + + new_camera = doorbell.model_copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == "" + + unsub() diff --git a/tests/components/unifiprotect/test_init.py b/tests/components/unifiprotect/test_init.py index 46e57c62101..b01c7e0cf4a 100644 --- a/tests/components/unifiprotect/test_init.py +++ b/tests/components/unifiprotect/test_init.py @@ -2,8 +2,9 @@ from __future__ import annotations -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +import pytest from uiprotect import NotAuthorized, NvrError, ProtectApiClient from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import NVR, Bootstrap, CloudAccount, Light @@ -13,6 +14,9 @@ from homeassistant.components.unifiprotect.const import ( CONF_DISABLE_RTSP, DOMAIN, ) +from homeassistant.components.unifiprotect.data import ( + async_ufp_instance_for_config_entry_ids, +) from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -114,7 +118,7 @@ async def test_setup_too_old( ) -> None: """Test setup of unifiprotect entry with too old of version of UniFi Protect.""" - old_bootstrap = ufp.api.bootstrap.copy() + old_bootstrap = ufp.api.bootstrap.model_copy() old_bootstrap.nvr = old_nvr ufp.api.update.return_value = old_bootstrap ufp.api.bootstrap = old_bootstrap @@ -286,3 +290,58 @@ async def test_device_remove_devices_nvr( client = await hass_ws_client(hass) response = await client.remove_device(live_device_entry.id, entry_id) assert not response["success"] + + +@pytest.mark.parametrize( + ("mock_entries", "expected_result"), + [ + pytest.param( + [ + MockConfigEntry( + domain=DOMAIN, + entry_id="1", + data={}, + ), + MockConfigEntry( + domain="other_domain", + entry_id="2", + data={}, + ), + ], + "mock_api_instance_1", + id="one_matching_domain", + ), + pytest.param( + [ + MockConfigEntry( + domain="other_domain", + entry_id="1", + data={}, + ), + MockConfigEntry( + domain="other_domain", + entry_id="2", + data={}, + ), + ], + None, + id="no_matching_domain", + ), + ], +) +async def test_async_ufp_instance_for_config_entry_ids( + hass: HomeAssistant, + mock_entries: list[MockConfigEntry], + expected_result: str | None, +) -> None: + """Test async_ufp_instance_for_config_entry_ids with various entry configurations.""" + + for index, entry in enumerate(mock_entries): + entry.add_to_hass(hass) + entry.runtime_data = Mock(api=f"mock_api_instance_{index + 1}") + + entry_ids = {entry.entry_id for entry in mock_entries} + + result = async_ufp_instance_for_config_entry_ids(hass, entry_ids) + + assert result == expected_result diff --git a/tests/components/unifiprotect/test_light.py b/tests/components/unifiprotect/test_light.py index bb0b6992e4e..724ed108673 100644 --- a/tests/components/unifiprotect/test_light.py +++ b/tests/components/unifiprotect/test_light.py @@ -74,7 +74,7 @@ async def test_light_update( await init_entry(hass, ufp, [light, unadopted_light]) assert_entity_counts(hass, Platform.LIGHT, 1, 1) - new_light = light.copy() + new_light = light.model_copy() new_light.is_light_on = True new_light.light_device_settings.led_level = LEDLevel(3) @@ -101,7 +101,7 @@ async def test_light_turn_on( assert_entity_counts(hass, Platform.LIGHT, 1, 1) entity_id = "light.test_light" - light.__fields__["set_light"] = Mock(final=False) + light.__pydantic_fields__["set_light"] = Mock(final=False, frozen=False) light.set_light = AsyncMock() await hass.services.async_call( @@ -123,7 +123,7 @@ async def test_light_turn_off( assert_entity_counts(hass, Platform.LIGHT, 1, 1) entity_id = "light.test_light" - light.__fields__["set_light"] = Mock(final=False) + light.__pydantic_fields__["set_light"] = Mock(final=False, frozen=False) light.set_light = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_lock.py b/tests/components/unifiprotect/test_lock.py index 62a1cb9ff46..9095c092ea2 100644 --- a/tests/components/unifiprotect/test_lock.py +++ b/tests/components/unifiprotect/test_lock.py @@ -6,16 +6,12 @@ from unittest.mock import AsyncMock, Mock from uiprotect.data import Doorlock, LockStatusType +from homeassistant.components.lock import LockState from homeassistant.components.unifiprotect.const import DEFAULT_ATTRIBUTION from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_ENTITY_ID, - STATE_JAMMED, - STATE_LOCKED, - STATE_LOCKING, STATE_UNAVAILABLE, - STATE_UNLOCKED, - STATE_UNLOCKING, Platform, ) from homeassistant.core import HomeAssistant @@ -64,7 +60,7 @@ async def test_lock_setup( state = hass.states.get(entity_id) assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION @@ -79,7 +75,7 @@ async def test_lock_locked( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSED mock_msg = Mock() @@ -92,7 +88,7 @@ async def test_lock_locked( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == STATE_LOCKED + assert state.state == LockState.LOCKED async def test_lock_unlocking( @@ -106,7 +102,7 @@ async def test_lock_unlocking( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.OPENING mock_msg = Mock() @@ -119,7 +115,7 @@ async def test_lock_unlocking( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == STATE_UNLOCKING + assert state.state == LockState.UNLOCKING async def test_lock_locking( @@ -133,7 +129,7 @@ async def test_lock_locking( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSING mock_msg = Mock() @@ -146,7 +142,7 @@ async def test_lock_locking( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == STATE_LOCKING + assert state.state == LockState.LOCKING async def test_lock_jammed( @@ -160,7 +156,7 @@ async def test_lock_jammed( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.JAMMED_WHILE_CLOSING mock_msg = Mock() @@ -173,7 +169,7 @@ async def test_lock_jammed( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == STATE_JAMMED + assert state.state == LockState.JAMMED async def test_lock_unavailable( @@ -187,7 +183,7 @@ async def test_lock_unavailable( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.NOT_CALIBRATED mock_msg = Mock() @@ -214,7 +210,7 @@ async def test_lock_do_lock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - doorlock.__fields__["close_lock"] = Mock(final=False) + doorlock.__pydantic_fields__["close_lock"] = Mock(final=False, frozen=False) doorlock.close_lock = AsyncMock() await hass.services.async_call( @@ -238,7 +234,7 @@ async def test_lock_do_unlock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSED mock_msg = Mock() @@ -249,7 +245,7 @@ async def test_lock_do_unlock( ufp.ws_msg(mock_msg) await hass.async_block_till_done() - new_lock.__fields__["open_lock"] = Mock(final=False) + doorlock.__pydantic_fields__["open_lock"] = Mock(final=False, frozen=False) new_lock.open_lock = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_media_player.py b/tests/components/unifiprotect/test_media_player.py index 642a3a1e372..6d27eb2a206 100644 --- a/tests/components/unifiprotect/test_media_player.py +++ b/tests/components/unifiprotect/test_media_player.py @@ -88,7 +88,7 @@ async def test_media_player_update( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.talkback_stream = Mock() new_camera.talkback_stream.is_running = True @@ -116,7 +116,7 @@ async def test_media_player_set_volume( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["set_speaker_volume"] = Mock(final=False) + doorbell.__pydantic_fields__["set_speaker_volume"] = Mock(final=False, frozen=False) doorbell.set_speaker_volume = AsyncMock() await hass.services.async_call( @@ -140,7 +140,7 @@ async def test_media_player_stop( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.talkback_stream = AsyncMock() new_camera.talkback_stream.is_running = True @@ -173,9 +173,11 @@ async def test_media_player_play( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["stop_audio"] = Mock(final=False) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.stop_audio = AsyncMock() doorbell.play_audio = AsyncMock() doorbell.wait_until_audio_completes = AsyncMock() @@ -208,9 +210,11 @@ async def test_media_player_play_media_source( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["stop_audio"] = Mock(final=False) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.stop_audio = AsyncMock() doorbell.play_audio = AsyncMock() doorbell.wait_until_audio_completes = AsyncMock() @@ -247,7 +251,7 @@ async def test_media_player_play_invalid( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["play_audio"] = Mock(final=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) doorbell.play_audio = AsyncMock() with pytest.raises(HomeAssistantError): @@ -276,8 +280,10 @@ async def test_media_player_play_error( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.play_audio = AsyncMock(side_effect=StreamError) doorbell.wait_until_audio_completes = AsyncMock() diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 60cd3150884..61f9680bdbc 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -204,9 +204,9 @@ async def test_browse_media_root_multiple_consoles( await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() - bootstrap2 = bootstrap.copy() + bootstrap2 = bootstrap.model_copy() bootstrap2._has_media = True - bootstrap2.nvr = bootstrap.nvr.copy() + bootstrap2.nvr = bootstrap.nvr.model_copy() bootstrap2.nvr.id = "test_id2" bootstrap2.nvr.mac = "A2E00C826924" bootstrap2.nvr.name = "UnifiProtect2" @@ -270,9 +270,9 @@ async def test_browse_media_root_multiple_consoles_only_one_media( await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() - bootstrap2 = bootstrap.copy() + bootstrap2 = bootstrap.model_copy() bootstrap2._has_media = False - bootstrap2.nvr = bootstrap.nvr.copy() + bootstrap2.nvr = bootstrap.nvr.model_copy() bootstrap2.nvr.id = "test_id2" bootstrap2.nvr.mac = "A2E00C826924" bootstrap2.nvr.name = "UnifiProtect2" @@ -669,7 +669,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.RING, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -683,7 +683,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.MOTION, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -697,7 +697,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["person"], @@ -706,7 +706,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", } @@ -720,7 +720,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "person"], @@ -734,7 +734,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -748,7 +748,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -758,7 +758,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", } @@ -772,7 +772,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -782,7 +782,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -802,7 +802,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -812,7 +812,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -823,7 +823,7 @@ async def test_browse_media_recent_truncated( }, }, { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", }, @@ -837,7 +837,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle"], @@ -846,7 +846,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -870,7 +870,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_AUDIO_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["alrmSpeak"], diff --git a/tests/components/unifiprotect/test_migrate.py b/tests/components/unifiprotect/test_migrate.py index 4e1bf8bd418..4bfc29a142b 100644 --- a/tests/components/unifiprotect/test_migrate.py +++ b/tests/components/unifiprotect/test_migrate.py @@ -7,9 +7,6 @@ from unittest.mock import patch from uiprotect.data import Camera from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN from homeassistant.components.unifiprotect.const import DOMAIN from homeassistant.const import SERVICE_RELOAD, Platform @@ -19,6 +16,7 @@ from homeassistant.setup import async_setup_component from .utils import MockUFPFixture, init_entry +from tests.components.repairs import async_process_repairs_platforms from tests.typing import WebSocketGenerator diff --git a/tests/components/unifiprotect/test_number.py b/tests/components/unifiprotect/test_number.py index 77a409551b1..1838a574bc4 100644 --- a/tests/components/unifiprotect/test_number.py +++ b/tests/components/unifiprotect/test_number.py @@ -162,7 +162,7 @@ async def test_number_light_sensitivity( description = LIGHT_NUMBERS[0] assert description.ufp_set_method is not None - light.__fields__["set_sensitivity"] = Mock(final=False) + light.__pydantic_fields__["set_sensitivity"] = Mock(final=False, frozen=False) light.set_sensitivity = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, light, description) @@ -184,7 +184,7 @@ async def test_number_light_duration( description = LIGHT_NUMBERS[1] - light.__fields__["set_duration"] = Mock(final=False) + light.__pydantic_fields__["set_duration"] = Mock(final=False, frozen=False) light.set_duration = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, light, description) @@ -210,7 +210,9 @@ async def test_number_camera_simple( assert description.ufp_set_method is not None - camera.__fields__[description.ufp_set_method] = Mock(final=False) + camera.__pydantic_fields__[description.ufp_set_method] = Mock( + final=False, frozen=False + ) setattr(camera, description.ufp_set_method, AsyncMock()) _, entity_id = ids_from_device_description(Platform.NUMBER, camera, description) @@ -230,7 +232,9 @@ async def test_number_lock_auto_close( description = DOORLOCK_NUMBERS[0] - doorlock.__fields__["set_auto_close_time"] = Mock(final=False) + doorlock.__pydantic_fields__["set_auto_close_time"] = Mock( + final=False, frozen=False + ) doorlock.set_auto_close_time = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, doorlock, description) diff --git a/tests/components/unifiprotect/test_recorder.py b/tests/components/unifiprotect/test_recorder.py index fe102c2fdbc..1f025a63306 100644 --- a/tests/components/unifiprotect/test_recorder.py +++ b/tests/components/unifiprotect/test_recorder.py @@ -51,7 +51,7 @@ async def test_exclude_attributes( camera_id=doorbell.id, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_motion_detected = True new_camera.last_motion_event_id = event.id diff --git a/tests/components/unifiprotect/test_repairs.py b/tests/components/unifiprotect/test_repairs.py index bdfcd6ff475..1117038bbd0 100644 --- a/tests/components/unifiprotect/test_repairs.py +++ b/tests/components/unifiprotect/test_repairs.py @@ -3,24 +3,21 @@ from __future__ import annotations from copy import copy, deepcopy -from http import HTTPStatus from unittest.mock import AsyncMock, Mock from uiprotect.data import Camera, CloudAccount, ModelType, Version -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.components.unifiprotect.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from .utils import MockUFPFixture, init_entry +from tests.components.repairs import ( + async_process_repairs_platforms, + process_repair_fix_flow, + start_repair_fix_flow, +) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -52,12 +49,7 @@ async def test_ea_warning_ignore( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post( - url, json={"handler": DOMAIN, "issue_id": "ea_channel_warning"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "ea_channel_warning") flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -66,10 +58,7 @@ async def test_ea_warning_ignore( } assert data["step_id"] == "start" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -78,10 +67,7 @@ async def test_ea_warning_ignore( } assert data["step_id"] == "confirm" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" @@ -114,12 +100,7 @@ async def test_ea_warning_fix( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post( - url, json={"handler": DOMAIN, "issue_id": "ea_channel_warning"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "ea_channel_warning") flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -139,10 +120,7 @@ async def test_ea_warning_fix( ufp.ws_msg(mock_msg) await hass.async_block_till_done() - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" @@ -176,18 +154,12 @@ async def test_cloud_user_fix( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "cloud_user"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "cloud_user") flow_id = data["flow_id"] assert data["step_id"] == "confirm" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -228,26 +200,17 @@ async def test_rtsp_read_only_ignore( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "start" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) flow_id = data["flow_id"] assert data["step_id"] == "confirm" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" @@ -287,18 +250,12 @@ async def test_rtsp_read_only_fix( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "start" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" @@ -337,18 +294,12 @@ async def test_rtsp_writable_fix( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "start" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" @@ -398,18 +349,12 @@ async def test_rtsp_writable_fix_when_not_setup( await hass.config_entries.async_unload(ufp.entry.entry_id) await hass.async_block_till_done() - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "start" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" @@ -418,3 +363,30 @@ async def test_rtsp_writable_fix_when_not_setup( ufp.api.update_device.assert_called_with( ModelType.CAMERA, doorbell.id, {"channels": channels} ) + + +async def test_rtsp_no_fix_if_third_party( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test no RTSP disabled warning if camera is third-party.""" + + for channel in doorbell.channels: + channel.is_rtsp_enabled = False + for user in ufp.api.bootstrap.users.values(): + user.all_permissions = [] + + ufp.api.get_camera = AsyncMock(return_value=doorbell) + doorbell.is_third_party_camera = True + + await init_entry(hass, ufp, [doorbell]) + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + assert not msg["result"]["issues"] diff --git a/tests/components/unifiprotect/test_select.py b/tests/components/unifiprotect/test_select.py index 8795af57214..6db3ae22dcb 100644 --- a/tests/components/unifiprotect/test_select.py +++ b/tests/components/unifiprotect/test_select.py @@ -262,7 +262,7 @@ async def test_select_update_doorbell_settings( expected_length += 1 new_nvr = copy(ufp.api.bootstrap.nvr) - new_nvr.__fields__["update_all_messages"] = Mock(final=False) + new_nvr.__pydantic_fields__["update_all_messages"] = Mock(final=False, frozen=False) new_nvr.update_all_messages = Mock() new_nvr.doorbell_settings.all_messages = [ @@ -304,7 +304,7 @@ async def test_select_update_doorbell_message( assert state assert state.state == "Default Message (Welcome)" - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.lcd_message = LCDMessage( type=DoorbellMessageType.CUSTOM_MESSAGE, text="Test" ) @@ -332,7 +332,7 @@ async def test_select_set_option_light_motion( _, entity_id = ids_from_device_description(Platform.SELECT, light, LIGHT_SELECTS[0]) - light.__fields__["set_light_settings"] = Mock(final=False) + light.__pydantic_fields__["set_light_settings"] = Mock(final=False, frozen=False) light.set_light_settings = AsyncMock() await hass.services.async_call( @@ -357,7 +357,7 @@ async def test_select_set_option_light_camera( _, entity_id = ids_from_device_description(Platform.SELECT, light, LIGHT_SELECTS[1]) - light.__fields__["set_paired_camera"] = Mock(final=False) + light.__pydantic_fields__["set_paired_camera"] = Mock(final=False, frozen=False) light.set_paired_camera = AsyncMock() camera = list(light.api.bootstrap.cameras.values())[0] @@ -393,7 +393,7 @@ async def test_select_set_option_camera_recording( Platform.SELECT, doorbell, CAMERA_SELECTS[0] ) - doorbell.__fields__["set_recording_mode"] = Mock(final=False) + doorbell.__pydantic_fields__["set_recording_mode"] = Mock(final=False, frozen=False) doorbell.set_recording_mode = AsyncMock() await hass.services.async_call( @@ -418,7 +418,7 @@ async def test_select_set_option_camera_ir( Platform.SELECT, doorbell, CAMERA_SELECTS[1] ) - doorbell.__fields__["set_ir_led_model"] = Mock(final=False) + doorbell.__pydantic_fields__["set_ir_led_model"] = Mock(final=False, frozen=False) doorbell.set_ir_led_model = AsyncMock() await hass.services.async_call( @@ -443,7 +443,7 @@ async def test_select_set_option_camera_doorbell_custom( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -470,7 +470,7 @@ async def test_select_set_option_camera_doorbell_unifi( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -512,7 +512,7 @@ async def test_select_set_option_camera_doorbell_default( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -541,7 +541,7 @@ async def test_select_set_option_viewer( Platform.SELECT, viewer, VIEWER_SELECTS[0] ) - viewer.__fields__["set_liveview"] = Mock(final=False) + viewer.__pydantic_fields__["set_liveview"] = Mock(final=False, frozen=False) viewer.set_liveview = AsyncMock() liveview = list(viewer.api.bootstrap.liveviews.values())[0] diff --git a/tests/components/unifiprotect/test_sensor.py b/tests/components/unifiprotect/test_sensor.py index bc5f372c598..9489a49bf22 100644 --- a/tests/components/unifiprotect/test_sensor.py +++ b/tests/components/unifiprotect/test_sensor.py @@ -464,7 +464,7 @@ async def test_sensor_update_alarm( api=ufp.api, ) - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.set_alarm_timeout() new_sensor.last_alarm_event_id = event.id @@ -548,7 +548,7 @@ async def test_camera_update_license_plate( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -663,7 +663,7 @@ async def test_camera_update_license_plate_changes_number_during_detect( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -750,7 +750,7 @@ async def test_camera_update_license_plate_multiple_updates( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -873,7 +873,7 @@ async def test_camera_update_license_no_dupes( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index 6808bacb40c..84e0e74a492 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -56,7 +56,9 @@ async def test_global_service_bad_device( """Test global service, invalid device ID.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() with pytest.raises(HomeAssistantError): @@ -75,7 +77,9 @@ async def test_global_service_exception( """Test global service, unexpected error.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock(side_effect=BadRequest) with pytest.raises(HomeAssistantError): @@ -94,7 +98,9 @@ async def test_add_doorbell_text( """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() await hass.services.async_call( @@ -112,7 +118,9 @@ async def test_remove_doorbell_text( """Test remove_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["remove_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["remove_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.remove_custom_doorbell_message = AsyncMock() await hass.services.async_call( @@ -129,7 +137,9 @@ async def test_add_doorbell_text_disabled_config_entry( ) -> None: """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() await hass.config_entries.async_set_disabled_by( @@ -158,10 +168,10 @@ async def test_set_chime_paired_doorbells( ufp.api.update_device = AsyncMock() - camera1 = doorbell.copy() + camera1 = doorbell.model_copy() camera1.name = "Test Camera 1" - camera2 = doorbell.copy() + camera2 = doorbell.model_copy() camera2.name = "Test Camera 2" await init_entry(hass, ufp, [camera1, camera2, chime]) diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 9e0e9efa0ce..194e46681ce 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -89,7 +89,7 @@ async def test_switch_nvr(hass: HomeAssistant, ufp: MockUFPFixture) -> None: assert_entity_counts(hass, Platform.SWITCH, 2, 2) nvr = ufp.api.bootstrap.nvr - nvr.__fields__["set_insights"] = Mock(final=False) + nvr.__pydantic_fields__["set_insights"] = Mock(final=False, frozen=False) nvr.set_insights = AsyncMock() entity_id = "switch.unifiprotect_insights_enabled" @@ -272,7 +272,7 @@ async def test_switch_light_status( description = LIGHT_SWITCHES[1] - light.__fields__["set_status_light"] = Mock(final=False) + light.__pydantic_fields__["set_status_light"] = Mock(final=False, frozen=False) light.set_status_light = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, light, description) @@ -300,7 +300,7 @@ async def test_switch_camera_ssh( description = CAMERA_SWITCHES[0] - doorbell.__fields__["set_ssh"] = Mock(final=False) + doorbell.__pydantic_fields__["set_ssh"] = Mock(final=False, frozen=False) doorbell.set_ssh = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -333,7 +333,9 @@ async def test_switch_camera_simple( assert description.ufp_set_method is not None - doorbell.__fields__[description.ufp_set_method] = Mock(final=False) + doorbell.__pydantic_fields__[description.ufp_set_method] = Mock( + final=False, frozen=False + ) setattr(doorbell, description.ufp_set_method, AsyncMock()) set_method = getattr(doorbell, description.ufp_set_method) @@ -362,7 +364,7 @@ async def test_switch_camera_highfps( description = CAMERA_SWITCHES[3] - doorbell.__fields__["set_video_mode"] = Mock(final=False) + doorbell.__pydantic_fields__["set_video_mode"] = Mock(final=False, frozen=False) doorbell.set_video_mode = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -393,7 +395,7 @@ async def test_switch_camera_privacy( description = PRIVACY_MODE_SWITCH - doorbell.__fields__["set_privacy"] = Mock(final=False) + doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) doorbell.set_privacy = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -409,7 +411,7 @@ async def test_switch_camera_privacy( doorbell.set_privacy.assert_called_with(True, 0, RecordingMode.NEVER) - new_doorbell = doorbell.copy() + new_doorbell = doorbell.model_copy() new_doorbell.add_privacy_zone() new_doorbell.mic_volume = 0 new_doorbell.recording_settings.mode = RecordingMode.NEVER @@ -445,7 +447,7 @@ async def test_switch_camera_privacy_already_on( description = PRIVACY_MODE_SWITCH - doorbell.__fields__["set_privacy"] = Mock(final=False) + doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) doorbell.set_privacy = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) diff --git a/tests/components/unifiprotect/test_text.py b/tests/components/unifiprotect/test_text.py index 3ca11744abb..c34611c43a9 100644 --- a/tests/components/unifiprotect/test_text.py +++ b/tests/components/unifiprotect/test_text.py @@ -78,7 +78,7 @@ async def test_text_camera_set( Platform.TEXT, doorbell, description ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_views.py b/tests/components/unifiprotect/test_views.py index fed0a98552d..0f1b7791680 100644 --- a/tests/components/unifiprotect/test_views.py +++ b/tests/components/unifiprotect/test_views.py @@ -11,6 +11,7 @@ from uiprotect.exceptions import ClientError from homeassistant.components.unifiprotect.views import ( async_generate_event_video_url, + async_generate_proxy_event_video_url, async_generate_thumbnail_url, ) from homeassistant.core import HomeAssistant @@ -520,3 +521,219 @@ async def test_video_entity_id( assert response.status == 200 ufp.api.request.assert_called_once() + + +async def test_video_event_bad_nvr_id( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + camera: Camera, + ufp: MockUFPFixture, +) -> None: + """Test video proxy URL with bad NVR id.""" + + ufp.api.request = AsyncMock() + await init_entry(hass, ufp, [camera]) + + url = async_generate_proxy_event_video_url("bad_id", "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_bad_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, +) -> None: + """Test generating event with bad event ID.""" + + ufp.api.get_event = AsyncMock(side_effect=ClientError()) + + await init_entry(hass, ufp, [camera]) + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "bad_event_id") + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_bad_camera( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, +) -> None: + """Test generating event with bad camera ID.""" + + ufp.api.get_event = AsyncMock(side_effect=ClientError()) + + await init_entry(hass, ufp, [camera]) + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "bad_event_id") + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_bad_camera_perms( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test video URL with bad camera perms.""" + + ufp.api.request = AsyncMock() + await init_entry(hass, ufp, [camera]) + + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + end=fixed_now, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id="bad_id", + camera=camera, + ) + + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + ufp.api.bootstrap.auth_user.all_permissions = [] + ufp.api.bootstrap.auth_user._perm_cache = {} + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_ongoing( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test video URL with ongoing event.""" + + ufp.api.request = AsyncMock() + await init_entry(hass, ufp, [camera]) + + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=camera.id, + camera=camera, + ) + + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 400 + ufp.api.request.assert_not_called() + + +async def test_event_video_no_data( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test invalid no event video returned.""" + + await init_entry(hass, ufp, [camera]) + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + end=fixed_now, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=camera.id, + camera=camera, + ) + + ufp.api.request = AsyncMock(side_effect=ClientError) + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 404 + + +async def test_event_video( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test event video URL with no video.""" + + content = Mock() + content.__anext__ = AsyncMock(side_effect=[b"test", b"test", StopAsyncIteration()]) + content.__aiter__ = Mock(return_value=content) + + mock_response = Mock() + mock_response.content_length = 8 + mock_response.content.iter_chunked = Mock(return_value=content) + + ufp.api.request = AsyncMock(return_value=mock_response) + await init_entry(hass, ufp, [camera]) + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + end=fixed_now, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=camera.id, + camera=camera, + ) + + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + assert await response.content.read() == b"testtest" + + assert response.status == 200 + ufp.api.request.assert_called_once() diff --git a/tests/components/unifiprotect/utils.py b/tests/components/unifiprotect/utils.py index 25a9ddcbb92..5a1ffa8258e 100644 --- a/tests/components/unifiprotect/utils.py +++ b/tests/components/unifiprotect/utils.py @@ -109,7 +109,11 @@ def ids_from_device_description( """Return expected unique_id and entity_id for a give platform/device/description combination.""" entity_name = normalize_name(device.display_name) - description_entity_name = normalize_name(str(description.name)) + + if description.name and isinstance(description.name, str): + description_entity_name = normalize_name(description.name) + else: + description_entity_name = normalize_name(description.key) unique_id = f"{device.mac}_{description.key}" entity_id = f"{platform.value}.{entity_name}_{description_entity_name}" diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 7c992814cfe..5ebfd2c13ad 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -8,8 +8,11 @@ from voluptuous.error import MultipleInvalid from homeassistant import config as hass_config from homeassistant.components import input_number, input_select, media_player, switch -from homeassistant.components.media_player import MediaClass, MediaPlayerEntityFeature -from homeassistant.components.media_player.browse_media import BrowseMedia +from homeassistant.components.media_player import ( + BrowseMedia, + MediaClass, + MediaPlayerEntityFeature, +) import homeassistant.components.universal.media_player as universal from homeassistant.const import ( SERVICE_RELOAD, diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index 5f28f1d9b17..59a4e97d22b 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -114,42 +114,3 @@ async def test_form_user_with_already_configured(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" await hass.async_block_till_done() - - -async def test_form_import(hass: HomeAssistant) -> None: - """Test we get the form with import source.""" - - with ( - mocked_upb(), - patch( - "homeassistant.components.upb.async_setup_entry", return_value=True - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"host": "tcp://42.4.2.42", "file_path": "upb.upe"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "UPB" - - assert result["data"] == {"host": "tcp://42.4.2.42", "file_path": "upb.upe"} - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_junk_input(hass: HomeAssistant) -> None: - """Test we get the form with import source.""" - - with mocked_upb(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"foo": "goo", "goo": "foo"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - - await hass.async_block_till_done() diff --git a/tests/components/upb/test_init.py b/tests/components/upb/test_init.py new file mode 100644 index 00000000000..a7621ce65fe --- /dev/null +++ b/tests/components/upb/test_init.py @@ -0,0 +1,25 @@ +"""The init tests for the UPB platform.""" + +from unittest.mock import patch + +from homeassistant.components.upb.const import DOMAIN +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: + """Test migrating a 1.1 config entry to 1.2.""" + with patch("homeassistant.components.upb.async_setup_entry", return_value=True): + entry = MockConfigEntry( + domain=DOMAIN, + data={"protocol": "TCP", "address": "1.2.3.4", "file_path": "upb.upe"}, + version=1, + minor_version=1, + unique_id=123456, + ) + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id == "123456" diff --git a/tests/components/update/common.py b/tests/components/update/common.py index 70b69498f66..465812e6a3a 100644 --- a/tests/components/update/common.py +++ b/tests/components/update/common.py @@ -5,48 +5,16 @@ from typing import Any from homeassistant.components.update import UpdateEntity -from tests.common import MockEntity - _LOGGER = logging.getLogger(__name__) -class MockUpdateEntity(MockEntity, UpdateEntity): +class MockUpdateEntity(UpdateEntity): """Mock UpdateEntity class.""" - @property - def auto_update(self) -> bool: - """Indicate if the device or service has auto update enabled.""" - return self._handle("auto_update") - - @property - def installed_version(self) -> str | None: - """Version currently installed and in use.""" - return self._handle("installed_version") - - @property - def in_progress(self) -> bool | int | None: - """Update installation progress.""" - return self._handle("in_progress") - - @property - def latest_version(self) -> str | None: - """Latest version available for install.""" - return self._handle("latest_version") - - @property - def release_summary(self) -> str | None: - """Summary of the release notes or changelog.""" - return self._handle("release_summary") - - @property - def release_url(self) -> str | None: - """URL to the full release notes of the latest version available.""" - return self._handle("release_url") - - @property - def title(self) -> str | None: - """Title of the software.""" - return self._handle("title") + def __init__(self, **values: Any) -> None: + """Initialize an entity.""" + for key, val in values.items(): + setattr(self, f"_attr_{key}", val) def install(self, version: str | None, backup: bool, **kwargs: Any) -> None: """Install an update.""" @@ -54,10 +22,10 @@ class MockUpdateEntity(MockEntity, UpdateEntity): _LOGGER.info("Creating backup before installing update") if version is not None: - self._values["installed_version"] = version + self._attr_installed_version = version _LOGGER.info("Installed update with version: %s", version) else: - self._values["installed_version"] = self.latest_version + self._attr_installed_version = self.latest_version _LOGGER.info("Installed latest update") def release_notes(self) -> str | None: diff --git a/tests/components/update/conftest.py b/tests/components/update/conftest.py index 759f243e8db..eae5cc318da 100644 --- a/tests/components/update/conftest.py +++ b/tests/components/update/conftest.py @@ -51,12 +51,24 @@ def mock_update_entities() -> list[MockUpdateEntity]: ), MockUpdateEntity( name="Update Already in Progress", - unique_id="update_already_in_progres", + unique_id="update_already_in_progress", installed_version="1.0.0", latest_version="1.0.1", - in_progress=50, + in_progress=True, supported_features=UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS, + update_percentage=50, + ), + MockUpdateEntity( + name="Update Already in Progress Float", + unique_id="update_already_in_progress_float", + installed_version="1.0.0", + latest_version="1.0.1", + in_progress=True, + supported_features=UpdateEntityFeature.INSTALL + | UpdateEntityFeature.PROGRESS, + update_percentage=0.25, + display_precision=2, ), MockUpdateEntity( name="Update No Install", diff --git a/tests/components/update/test_init.py b/tests/components/update/test_init.py index 7860c679f37..d4916de8039 100644 --- a/tests/components/update/test_init.py +++ b/tests/components/update/test_init.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import MagicMock, patch +from awesomeversion import AwesomeVersion, AwesomeVersionStrategy import pytest from homeassistant.components.update import ( @@ -17,6 +18,7 @@ from homeassistant.components.update import ( ) from homeassistant.components.update.const import ( ATTR_AUTO_UPDATE, + ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, @@ -24,11 +26,15 @@ from homeassistant.components.update.const import ( ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, ATTR_TITLE, + ATTR_UPDATE_PERCENTAGE, UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, + ATTR_FRIENDLY_NAME, + ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, @@ -87,6 +93,7 @@ async def test_update(hass: HomeAssistant) -> None: assert update.state == STATE_ON assert update.state_attributes == { ATTR_AUTO_UPDATE: False, + ATTR_DISPLAY_PRECISION: 0, ATTR_INSTALLED_VERSION: "1.0.0", ATTR_IN_PROGRESS: False, ATTR_LATEST_VERSION: "1.0.1", @@ -94,6 +101,7 @@ async def test_update(hass: HomeAssistant) -> None: ATTR_RELEASE_URL: "https://example.com", ATTR_SKIPPED_VERSION: None, ATTR_TITLE: "Title", + ATTR_UPDATE_PERCENTAGE: None, } # Test no update available @@ -540,10 +548,20 @@ async def test_entity_with_backup_support( assert "Installed update with version: 0.9.8" in caplog.text +@pytest.mark.parametrize( + ("entity_id", "expected_display_precision", "expected_update_percentage"), + [ + ("update.update_already_in_progress", 0, 50), + ("update.update_already_in_progress_float", 2, 0.25), + ], +) async def test_entity_already_in_progress( hass: HomeAssistant, mock_update_entities: list[MockUpdateEntity], caplog: pytest.LogCaptureFixture, + entity_id: str, + expected_display_precision: int, + expected_update_percentage: float, ) -> None: """Test update install already in progress.""" setup_test_component_platform(hass, DOMAIN, mock_update_entities) @@ -551,12 +569,14 @@ async def test_entity_already_in_progress( assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() - state = hass.states.get("update.update_already_in_progress") + state = hass.states.get(entity_id) assert state assert state.state == STATE_ON + assert state.attributes[ATTR_DISPLAY_PRECISION] == expected_display_precision assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.1" - assert state.attributes[ATTR_IN_PROGRESS] == 50 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == expected_update_percentage with pytest.raises( HomeAssistantError, @@ -565,10 +585,20 @@ async def test_entity_already_in_progress( await hass.services.async_call( DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.update_already_in_progress"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) + # Check update percentage is suppressed when in_progress is False + entity = next( + entity for entity in mock_update_entities if entity.entity_id == entity_id + ) + entity._attr_in_progress = False + entity.async_write_ha_state() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None + async def test_entity_without_progress_support( hass: HomeAssistant, @@ -866,93 +896,123 @@ async def test_name(hass: HomeAssistant) -> None: assert expected.items() <= state.attributes.items() -def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated supported features ints.""" +async def test_custom_version_is_newer(hass: HomeAssistant) -> None: + """Test UpdateEntity with overridden version_is_newer method.""" class MockUpdateEntity(UpdateEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return 1 + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return True if latest_version is newer than installed_version.""" + return AwesomeVersion( + latest_version, + find_first_match=True, + ensure_strategy=[AwesomeVersionStrategy.SEMVER], + ) > AwesomeVersion( + installed_version, + find_first_match=True, + ensure_strategy=[AwesomeVersionStrategy.SEMVER], + ) - entity = MockUpdateEntity() - assert entity.supported_features_compat is UpdateEntityFeature(1) - assert "MockUpdateEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "UpdateEntityFeature.INSTALL" in caplog.text - caplog.clear() - assert entity.supported_features_compat is UpdateEntityFeature(1) - assert "is using deprecated supported features values" not in caplog.text + update = MockUpdateEntity() + update.hass = hass + update.platform = MockEntityPlatform(hass) + + STABLE = "20230913-111730/v1.14.0-gcb84623" + BETA = "20231107-162609/v1.14.1-rc1-g0617c15" + + # Set current installed version to STABLE + update._attr_installed_version = STABLE + update._attr_latest_version = BETA + + assert update.installed_version == STABLE + assert update.latest_version == BETA + assert update.state == STATE_ON + + # Set current installed version to BETA + update._attr_installed_version = BETA + update._attr_latest_version = STABLE + + assert update.installed_version == BETA + assert update.latest_version == STABLE + assert update.state == STATE_OFF -async def test_deprecated_supported_features_ints_with_service_call( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test deprecated supported features ints with install service.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, +@pytest.mark.parametrize( + ("supported_features", "extra_expected_attributes"), + [ + ( + UpdateEntityFeature(0), + [ + {}, + {}, + {}, + {}, + {}, + {}, + {}, + ], ), - ) + ( + UpdateEntityFeature.PROGRESS, + [ + {ATTR_IN_PROGRESS: False}, + {ATTR_IN_PROGRESS: False}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 0}, + {ATTR_IN_PROGRESS: True}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 1}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 10}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 100}, + ], + ), + ], +) +async def test_update_percentage_backwards_compatibility( + hass: HomeAssistant, + supported_features: UpdateEntityFeature, + extra_expected_attributes: list[dict], +) -> None: + """Test deriving update percentage from deprecated in_progress.""" + update = MockUpdateEntity() - class MockUpdateEntity(UpdateEntity): - _attr_supported_features = 1 | 2 + update._attr_installed_version = "1.0.0" + update._attr_latest_version = "1.0.1" + update._attr_name = "legacy" + update._attr_release_summary = "Summary" + update._attr_release_url = "https://example.com" + update._attr_supported_features = supported_features + update._attr_title = "Title" - def install(self, version: str | None = None, backup: bool = False) -> None: - """Install an update.""" - - entity = MockUpdateEntity() - entity.entity_id = ( - "update.test_deprecated_supported_features_ints_with_service_call" - ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test update platform via config entry.""" - async_add_entities([entity]) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform(hass, DOMAIN, [update]) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() - assert "is using deprecated supported features values" in caplog.text + expected_attributes = { + ATTR_AUTO_UPDATE: False, + ATTR_DISPLAY_PRECISION: 0, + ATTR_ENTITY_PICTURE: "https://brands.home-assistant.io/_/test/icon.png", + ATTR_FRIENDLY_NAME: "legacy", + ATTR_INSTALLED_VERSION: "1.0.0", + ATTR_IN_PROGRESS: False, + ATTR_LATEST_VERSION: "1.0.1", + ATTR_RELEASE_SUMMARY: "Summary", + ATTR_RELEASE_URL: "https://example.com", + ATTR_SKIPPED_VERSION: None, + ATTR_SUPPORTED_FEATURES: supported_features, + ATTR_TITLE: "Title", + ATTR_UPDATE_PERCENTAGE: None, + } - assert isinstance(entity.supported_features, int) + state = hass.states.get("update.legacy") + assert state is not None + assert state.state == STATE_ON + assert state.attributes == expected_attributes | extra_expected_attributes[0] - with pytest.raises( - HomeAssistantError, - match="Backup is not supported for update.test_deprecated_supported_features_ints_with_service_call", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_INSTALL, - { - ATTR_VERSION: "0.9.9", - ATTR_BACKUP: True, - ATTR_ENTITY_ID: "update.test_deprecated_supported_features_ints_with_service_call", - }, - blocking=True, + in_progress_list = [False, 0, True, 1, 10, 100] + + for i, in_progress in enumerate(in_progress_list): + update._attr_in_progress = in_progress + update.async_write_ha_state() + state = hass.states.get("update.legacy") + assert state.state == STATE_ON + assert ( + state.attributes == expected_attributes | extra_expected_attributes[i + 1] ) diff --git a/tests/components/update/test_recorder.py b/tests/components/update/test_recorder.py index 0bd209ce1c2..68e5f93a757 100644 --- a/tests/components/update/test_recorder.py +++ b/tests/components/update/test_recorder.py @@ -7,9 +7,11 @@ from datetime import timedelta from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.update.const import ( + ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_RELEASE_SUMMARY, + ATTR_UPDATE_PERCENTAGE, DOMAIN, ) from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_PLATFORM @@ -34,7 +36,9 @@ async def test_exclude_attributes( assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() state = hass.states.get("update.update_already_in_progress") - assert state.attributes[ATTR_IN_PROGRESS] == 50 + assert state.attributes[ATTR_DISPLAY_PRECISION] == 0 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 assert ( state.attributes[ATTR_ENTITY_PICTURE] == "https://brands.home-assistant.io/_/test/icon.png" @@ -52,7 +56,9 @@ async def test_exclude_attributes( assert len(states) >= 1 for entity_states in states.values(): for state in entity_states: + assert ATTR_DISPLAY_PRECISION not in state.attributes assert ATTR_ENTITY_PICTURE not in state.attributes assert ATTR_IN_PROGRESS not in state.attributes assert ATTR_RELEASE_SUMMARY not in state.attributes assert ATTR_INSTALLED_VERSION in state.attributes + assert ATTR_UPDATE_PERCENTAGE not in state.attributes diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index 0e8551dd8a1..ff74ca87b12 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -7,6 +7,7 @@ import copy from typing import Any from unittest.mock import AsyncMock, MagicMock, patch +from async_upnp_client.exceptions import UpnpCommunicationError from async_upnp_client.profiles.igd import IgdDevice import pytest @@ -179,7 +180,7 @@ async def test_async_setup_udn_mismatch( async def test_async_setup_entry_force_poll( hass: HomeAssistant, mock_igd_device: IgdDevice ) -> None: - """Test async_setup_entry.""" + """Test async_setup_entry with forced polling.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_USN, @@ -200,3 +201,47 @@ async def test_async_setup_entry_force_poll( assert await hass.config_entries.async_setup(entry.entry_id) is True mock_igd_device.async_subscribe_services.assert_not_called() + + # Ensure that the device is forced to poll. + mock_igd_device.async_get_traffic_and_status_data.assert_called_with( + None, force_poll=True + ) + + +@pytest.mark.usefixtures( + "ssdp_instant_discovery", + "mock_get_source_ip", + "mock_mac_address_from_host", +) +async def test_async_setup_entry_force_poll_subscribe_error( + hass: HomeAssistant, mock_igd_device: IgdDevice +) -> None: + """Test async_setup_entry where subscribing fails.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USN, + data={ + CONFIG_ENTRY_ST: TEST_ST, + CONFIG_ENTRY_UDN: TEST_UDN, + CONFIG_ENTRY_ORIGINAL_UDN: TEST_UDN, + CONFIG_ENTRY_LOCATION: TEST_LOCATION, + CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, + }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, + ) + + # Subscribing partially succeeds, but not completely. + # Unsubscribing will fail for the subscribed services afterwards. + mock_igd_device.async_subscribe_services.side_effect = UpnpCommunicationError + mock_igd_device.async_unsubscribe_services.side_effect = UpnpCommunicationError + + # Load config_entry, should still be able to load, falling back to polling/the old functionality. + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) is True + + # Ensure that the device is forced to poll. + mock_igd_device.async_get_traffic_and_status_data.assert_called_with( + None, force_poll=True + ) diff --git a/tests/components/uptime/snapshots/test_config_flow.ambr b/tests/components/uptime/snapshots/test_config_flow.ambr index 3e5b492f871..93b1da60998 100644 --- a/tests/components/uptime/snapshots/test_config_flow.ambr +++ b/tests/components/uptime/snapshots/test_config_flow.ambr @@ -17,6 +17,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'uptime', 'entry_id': , 'minor_version': 1, @@ -25,10 +27,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Uptime', 'unique_id': None, 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Uptime', 'type': , 'version': 1, diff --git a/tests/components/uptimerobot/test_config_flow.py b/tests/components/uptimerobot/test_config_flow.py index 1cf0a358a87..3ba5ad696a6 100644 --- a/tests/components/uptimerobot/test_config_flow.py +++ b/tests/components/uptimerobot/test_config_flow.py @@ -168,15 +168,7 @@ async def test_reauthentication( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -209,15 +201,7 @@ async def test_reauthentication_failure( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -253,15 +237,7 @@ async def test_reauthentication_failure_no_existing_entry( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -294,15 +270,7 @@ async def test_reauthentication_failure_account_not_matching( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index 28841854766..ef235bba99d 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -5,6 +5,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'utility_meter', 'minor_version': 1, 'options': dict({ @@ -23,6 +25,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Energy Bill', 'unique_id': None, 'version': 2, @@ -39,7 +43,17 @@ 'status': 'collecting', 'tariff': 'tariff0', }), - 'last_sensor_data': None, + 'last_sensor_data': dict({ + 'last_period': '0', + 'last_reset': '2024-04-05T00:00:00+00:00', + 'last_valid_state': 3, + 'native_unit_of_measurement': 'kWh', + 'native_value': dict({ + '__type': "", + 'decimal_str': '3', + }), + 'status': 'collecting', + }), 'name': 'Energy Bill tariff0', 'period': 'monthly', 'source': 'sensor.input1', @@ -55,7 +69,17 @@ 'status': 'paused', 'tariff': 'tariff1', }), - 'last_sensor_data': None, + 'last_sensor_data': dict({ + 'last_period': '0', + 'last_reset': '2024-04-05T00:00:00+00:00', + 'last_valid_state': 7, + 'native_unit_of_measurement': 'kWh', + 'native_value': dict({ + '__type': "", + 'decimal_str': '7', + }), + 'status': 'paused', + }), 'name': 'Energy Bill tariff1', 'period': 'monthly', 'source': 'sensor.input1', diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index 9ecabe813b1..8be5f949940 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -91,7 +91,17 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": 3, + "status": "collecting", + }, ), ( State( @@ -101,7 +111,17 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "7", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": 7, + "status": "paused", + }, ), ], ) diff --git a/tests/components/utility_meter/test_select.py b/tests/components/utility_meter/test_select.py index 61f6cbe75b9..1f54f3b500a 100644 --- a/tests/components/utility_meter/test_select.py +++ b/tests/components/utility_meter/test_select.py @@ -3,10 +3,72 @@ from homeassistant.components.utility_meter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +async def test_select_entity_name_config_entry( + hass: HomeAssistant, +) -> None: + """Test for Utility Meter select platform.""" + + config_entry_config = { + "cycle": "none", + "delta_values": False, + "name": "Energy bill", + "net_consumption": False, + "offset": 0, + "periodically_resetting": True, + "source": "sensor.energy", + "tariffs": ["peak", "offpeak"], + } + + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + utility_meter_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options=config_entry_config, + title=config_entry_config["name"], + ) + + utility_meter_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(utility_meter_config_entry.entry_id) + + await hass.async_block_till_done() + + state = hass.states.get("select.energy_bill") + assert state is not None + assert state.attributes.get("friendly_name") == "Energy bill" + + +async def test_select_entity_name_yaml( + hass: HomeAssistant, +) -> None: + """Test for Utility Meter select platform.""" + + yaml_config = { + "utility_meter": { + "energy_bill": { + "name": "Energy bill", + "source": "sensor.energy", + "tariffs": ["peak", "offpeak"], + "unique_id": "1234abcd", + } + } + } + + assert await async_setup_component(hass, DOMAIN, yaml_config) + + await hass.async_block_till_done() + + state = hass.states.get("select.energy_bill") + assert state is not None + assert state.attributes.get("friendly_name") == "Energy bill" + + async def test_device_id( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 745bf0ce012..348afac57f7 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -26,7 +26,6 @@ from homeassistant.components.utility_meter.const import ( ) from homeassistant.components.utility_meter.sensor import ( ATTR_LAST_RESET, - ATTR_LAST_VALID_STATE, ATTR_STATUS, COLLECTING, PAUSED, @@ -760,64 +759,6 @@ async def test_restore_state( "status": "paused", }, ), - # sensor.energy_bill_tariff2 has missing keys and falls back to - # saved state - ( - State( - "sensor.energy_bill_tariff2", - "2.1", - attributes={ - ATTR_STATUS: PAUSED, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - { - "native_value": { - "__type": "", - "decimal_str": "2.2", - }, - "native_unit_of_measurement": "kWh", - "last_valid_state": "None", - }, - ), - # sensor.energy_bill_tariff3 has invalid data and falls back to - # saved state - ( - State( - "sensor.energy_bill_tariff3", - "3.1", - attributes={ - ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - { - "native_value": { - "__type": "", - "decimal_str": "3f", # Invalid - }, - "native_unit_of_measurement": "kWh", - "last_valid_state": "None", - }, - ), - # No extra saved data, fall back to saved state - ( - State( - "sensor.energy_bill_tariff4", - "error", - attributes={ - ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - {}, - ), ], ) @@ -852,25 +793,6 @@ async def test_restore_state( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - state = hass.states.get("sensor.energy_bill_tariff2") - assert state.state == "2.1" - assert state.attributes.get("status") == PAUSED - assert state.attributes.get("last_reset") == last_reset_1 - assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - - state = hass.states.get("sensor.energy_bill_tariff3") - assert state.state == "3.1" - assert state.attributes.get("status") == COLLECTING - assert state.attributes.get("last_reset") == last_reset_1 - assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - - state = hass.states.get("sensor.energy_bill_tariff4") - assert state.state == STATE_UNKNOWN - # utility_meter is loaded, now set sensors according to utility_meter: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -882,12 +804,7 @@ async def test_restore_state( state = hass.states.get("sensor.energy_bill_tariff0") assert state.attributes.get("status") == COLLECTING - for entity_id in ( - "sensor.energy_bill_tariff1", - "sensor.energy_bill_tariff2", - "sensor.energy_bill_tariff3", - "sensor.energy_bill_tariff4", - ): + for entity_id in ("sensor.energy_bill_tariff1",): state = hass.states.get(entity_id) assert state.attributes.get("status") == PAUSED @@ -939,7 +856,18 @@ async def test_service_reset_no_tariffs( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": None, + "status": "collecting", + "input_device_class": "energy", + }, ), ], ) @@ -1045,21 +973,33 @@ async def test_service_reset_no_tariffs_correct_with_multi( State( "sensor.energy_bill", "3", - attributes={ - ATTR_LAST_RESET: last_reset, - }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "status": "collecting", + }, ), ( State( "sensor.water_bill", "6", - attributes={ - ATTR_LAST_RESET: last_reset, - }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "6", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "status": "collecting", + }, ), ], ) @@ -1804,6 +1744,43 @@ async def test_self_reset_hourly_dst(hass: HomeAssistant) -> None: ) +async def test_self_reset_hourly_dst2(hass: HomeAssistant) -> None: + """Test weekly reset of meter in DST change conditions.""" + + hass.config.time_zone = "Europe/Berlin" + dt_util.set_default_time_zone(dt_util.get_time_zone(hass.config.time_zone)) + await _test_self_reset( + hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" + ) + + state = hass.states.get("sensor.energy_bill") + last_reset = dt_util.parse_datetime("2024-10-27T00:00:00.000000+02:00") + assert ( + dt_util.as_local(dt_util.parse_datetime(state.attributes.get("last_reset"))) + == last_reset + ) + + next_reset = dt_util.parse_datetime("2024-10-28T00:00:00.000000+01:00").isoformat() + assert state.attributes.get("next_reset") == next_reset + + +async def test_tz_changes(hass: HomeAssistant) -> None: + """Test that a timezone change changes the scheduler.""" + + await hass.config.async_update(time_zone="Europe/Prague") + + await _test_self_reset( + hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" + ) + state = hass.states.get("sensor.energy_bill") + assert state.attributes.get("next_reset") == "2024-10-28T00:00:00+01:00" + + await hass.config.async_update(time_zone="Pacific/Fiji") + + state = hass.states.get("sensor.energy_bill") + assert state.attributes.get("next_reset") != "2024-10-28T00:00:00+01:00" + + async def test_self_reset_daily(hass: HomeAssistant) -> None: """Test daily reset of meter.""" await _test_self_reset( diff --git a/tests/components/uvc/test_camera.py b/tests/components/uvc/test_camera.py index 5ce8baf9919..43216e354c7 100644 --- a/tests/components/uvc/test_camera.py +++ b/tests/components/uvc/test_camera.py @@ -4,15 +4,14 @@ from datetime import UTC, datetime, timedelta from unittest.mock import call, patch import pytest -import requests from uvcclient import camera, nvr from homeassistant.components.camera import ( DEFAULT_CONTENT_TYPE, SERVICE_DISABLE_MOTION, SERVICE_ENABLE_MOTION, - STATE_RECORDING, CameraEntityFeature, + CameraState, async_get_image, async_get_stream_source, ) @@ -46,6 +45,7 @@ def mock_remote_fixture(camera_info): ] mock_remote.return_value.index.return_value = mock_cameras mock_remote.return_value.server_version = (3, 2, 0) + mock_remote.return_value.camera_identifier = "id" yield mock_remote @@ -205,6 +205,7 @@ async def test_setup_partial_config_v31x( """Test the setup with a v3.1.x server.""" config = {"platform": "uvc", "nvr": "foo", "key": "secret"} mock_remote.return_value.server_version = (3, 1, 3) + mock_remote.return_value.camera_identifier = "uuid" assert await async_setup_component(hass, "camera", {"camera": config}) await hass.async_block_till_done() @@ -260,7 +261,6 @@ async def test_setup_incomplete_config( [ (nvr.NotAuthorized, 0), (nvr.NvrError, 2), - (requests.exceptions.ConnectionError, 2), ], ) async def test_setup_nvr_errors_during_indexing( @@ -293,7 +293,6 @@ async def test_setup_nvr_errors_during_indexing( [ (nvr.NotAuthorized, 0), (nvr.NvrError, 2), - (requests.exceptions.ConnectionError, 2), ], ) async def test_setup_nvr_errors_during_initialization( @@ -337,7 +336,7 @@ async def test_properties(hass: HomeAssistant, mock_remote) -> None: assert state assert state.name == "Front" - assert state.state == STATE_RECORDING + assert state.state == CameraState.RECORDING assert state.attributes["brand"] == "Ubiquiti" assert state.attributes["model_name"] == "UVC" assert state.attributes["supported_features"] == CameraEntityFeature.STREAM @@ -355,7 +354,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state == STATE_RECORDING + assert state.state == CameraState.RECORDING mock_remote.return_value.get_camera.return_value["recordingSettings"][ "fullTimeRecordEnabled" @@ -370,7 +369,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state != STATE_RECORDING + assert state.state != CameraState.RECORDING assert state.attributes["last_recording_start_time"] == datetime( 2021, 1, 8, 1, 56, 32, 367000, tzinfo=UTC ) @@ -383,7 +382,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state != STATE_RECORDING + assert state.state != CameraState.RECORDING mock_remote.return_value.get_camera.return_value["recordingIndicator"] = ( "MOTION_INPROGRESS" @@ -395,7 +394,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state == STATE_RECORDING + assert state.state == CameraState.RECORDING mock_remote.return_value.get_camera.return_value["recordingIndicator"] = ( "MOTION_FINISHED" @@ -407,7 +406,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state == STATE_RECORDING + assert state.state == CameraState.RECORDING async def test_stream(hass: HomeAssistant, mock_remote) -> None: diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index cc34cae87f8..780a00acd64 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -6,6 +6,8 @@ 'host': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'v2c', 'entry_id': 'da58ee91f38c2406c2a36d0a1a7f8569', 'minor_version': 1, @@ -14,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': 'ABC123', 'version': 1, diff --git a/tests/components/vacuum/__init__.py b/tests/components/vacuum/__init__.py index 0a681730cb2..26e31a87eee 100644 --- a/tests/components/vacuum/__init__.py +++ b/tests/components/vacuum/__init__.py @@ -4,12 +4,8 @@ from typing import Any from homeassistant.components.vacuum import ( DOMAIN, - STATE_CLEANING, - STATE_DOCKED, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -39,20 +35,20 @@ class MockVacuum(MockEntity, StateVacuumEntity): def __init__(self, **values: Any) -> None: """Initialize a mock vacuum entity.""" super().__init__(**values) - self._attr_state = STATE_DOCKED + self._attr_activity = VacuumActivity.DOCKED self._attr_fan_speed = "slow" def stop(self, **kwargs: Any) -> None: """Stop cleaning.""" - self._attr_state = STATE_IDLE + self._attr_activity = VacuumActivity.IDLE def return_to_base(self, **kwargs: Any) -> None: """Return to base.""" - self._attr_state = STATE_RETURNING + self._attr_activity = VacuumActivity.RETURNING def clean_spot(self, **kwargs: Any) -> None: """Clean a spot.""" - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING def set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None: """Set the fan speed.""" @@ -60,11 +56,11 @@ class MockVacuum(MockEntity, StateVacuumEntity): def start(self) -> None: """Start cleaning.""" - self._attr_state = STATE_CLEANING + self._attr_activity = VacuumActivity.CLEANING def pause(self) -> None: """Pause cleaning.""" - self._attr_state = STATE_PAUSED + self._attr_activity = VacuumActivity.PAUSED async def help_async_setup_entry_init( diff --git a/tests/components/vacuum/conftest.py b/tests/components/vacuum/conftest.py index d298260c575..6e6639431d0 100644 --- a/tests/components/vacuum/conftest.py +++ b/tests/components/vacuum/conftest.py @@ -1,13 +1,28 @@ """Fixtures for Vacuum platform tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator +from unittest.mock import MagicMock, patch import pytest -from homeassistant.config_entries import ConfigFlow +from homeassistant.components.vacuum import DOMAIN as VACUUM_DOMAIN, VacuumEntityFeature +from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, frame +from homeassistant.helpers.entity_platform import AddEntitiesCallback -from tests.common import mock_config_flow, mock_platform +from . import MockVacuum + +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + mock_config_flow, + mock_integration, + mock_platform, +) + +TEST_DOMAIN = "test" class MockFlow(ConfigFlow): @@ -17,7 +32,94 @@ class MockFlow(ConfigFlow): @pytest.fixture def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: """Mock config flow.""" - mock_platform(hass, "test.config_flow") + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - with mock_config_flow("test", MockFlow): + with mock_config_flow(TEST_DOMAIN, MockFlow): + yield + + +@pytest.fixture(name="supported_features") +async def vacuum_supported_features() -> VacuumEntityFeature: + """Return the supported features for the test vacuum entity.""" + return ( + VacuumEntityFeature.PAUSE + | VacuumEntityFeature.STOP + | VacuumEntityFeature.RETURN_HOME + | VacuumEntityFeature.FAN_SPEED + | VacuumEntityFeature.BATTERY + | VacuumEntityFeature.CLEAN_SPOT + | VacuumEntityFeature.MAP + | VacuumEntityFeature.STATE + | VacuumEntityFeature.START + ) + + +@pytest.fixture(name="mock_vacuum_entity") +async def setup_vacuum_platform_test_entity( + hass: HomeAssistant, + config_flow_fixture: None, + entity_registry: er.EntityRegistry, + supported_features: VacuumEntityFeature, +) -> MagicMock: + """Set up vacuum entity using an entity platform.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [VACUUM_DOMAIN] + ) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + ), + ) + + entity = MockVacuum( + supported_features=supported_features, + ) + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test vacuum platform via config entry.""" + async_add_entities([entity]) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{VACUUM_DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state is not None + + return entity + + +@pytest.fixture(name="mock_as_custom_component") +async def mock_frame(hass: HomeAssistant) -> AsyncGenerator[None]: + """Mock frame.""" + with patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=frame.IntegrationFrame( + custom_integration=True, + integration="alarm_control_panel", + module="test_init.py", + relative_filename="test_init.py", + frame=frame.get_current_frame(), + ), + ): yield diff --git a/tests/components/vacuum/test_device_condition.py b/tests/components/vacuum/test_device_condition.py index 9a2a67f7141..5a1b1fea7de 100644 --- a/tests/components/vacuum/test_device_condition.py +++ b/tests/components/vacuum/test_device_condition.py @@ -5,12 +5,7 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.vacuum import ( - DOMAIN, - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, -) +from homeassistant.components.vacuum import DOMAIN, VacuumActivity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -122,7 +117,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -174,7 +169,7 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_docked - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() @@ -182,7 +177,7 @@ async def test_if_state( assert service_calls[1].data["some"] == "is_cleaning - event - test_event1" # Returning means it's still cleaning - hass.states.async_set(entry.entity_id, STATE_RETURNING) + hass.states.async_set(entry.entity_id, VacuumActivity.RETURNING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() @@ -207,7 +202,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) assert await async_setup_component( hass, diff --git a/tests/components/vacuum/test_device_trigger.py b/tests/components/vacuum/test_device_trigger.py index c186bd4d9eb..3a0cbafb4a1 100644 --- a/tests/components/vacuum/test_device_trigger.py +++ b/tests/components/vacuum/test_device_trigger.py @@ -7,7 +7,7 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.vacuum import DOMAIN, STATE_CLEANING, STATE_DOCKED +from homeassistant.components.vacuum import DOMAIN, VacuumActivity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -188,7 +188,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -238,7 +238,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is cleaning - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -247,7 +247,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is docked - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -273,7 +273,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -304,7 +304,7 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is cleaning - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -330,7 +330,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_DOCKED) + hass.states.async_set(entry.entity_id, VacuumActivity.DOCKED) assert await async_setup_component( hass, @@ -365,7 +365,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_CLEANING) + hass.states.async_set(entry.entity_id, VacuumActivity.CLEANING) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) diff --git a/tests/components/vacuum/test_init.py b/tests/components/vacuum/test_init.py index efd2a63f0f7..db6cd242f3f 100644 --- a/tests/components/vacuum/test_init.py +++ b/tests/components/vacuum/test_init.py @@ -2,12 +2,16 @@ from __future__ import annotations +from enum import Enum +from types import ModuleType from typing import Any +from unittest.mock import patch import pytest +from homeassistant.components import vacuum from homeassistant.components.vacuum import ( - DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -16,33 +20,86 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_IDLE, - STATE_PAUSED, - STATE_RETURNING, StateVacuumEntity, + VacuumActivity, VacuumEntityFeature, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import frame from . import MockVacuum, help_async_setup_entry_init, help_async_unload_entry +from .common import async_start from tests.common import ( MockConfigEntry, + MockEntity, MockModule, + help_test_all, + import_and_test_deprecated_constant_enum, mock_integration, setup_test_component_platform, ) +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: + return [(enum_field, constant_prefix) for enum_field in enum if enum_field] + + +@pytest.mark.parametrize( + "module", + [vacuum], +) +def test_all(module: ModuleType) -> None: + """Test module.__all__ is correctly set.""" + help_test_all(module) + + +@pytest.mark.parametrize( + ("enum", "constant_prefix"), _create_tuples(vacuum.VacuumEntityFeature, "SUPPORT_") +) +@pytest.mark.parametrize( + "module", + [vacuum], +) +def test_deprecated_constants( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + module: ModuleType, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, module, enum, constant_prefix, "2025.10" + ) + + +@pytest.mark.parametrize( + ("enum", "constant_prefix"), _create_tuples(vacuum.VacuumActivity, "STATE_") +) +@pytest.mark.parametrize( + "module", + [vacuum], +) +def test_deprecated_constants_for_state( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + module: ModuleType, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, module, enum, constant_prefix, "2026.1" + ) + + @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_CLEAN_SPOT, STATE_CLEANING), - (SERVICE_PAUSE, STATE_PAUSED), - (SERVICE_RETURN_TO_BASE, STATE_RETURNING), - (SERVICE_START, STATE_CLEANING), - (SERVICE_STOP, STATE_IDLE), + (SERVICE_CLEAN_SPOT, VacuumActivity.CLEANING), + (SERVICE_PAUSE, VacuumActivity.PAUSED), + (SERVICE_RETURN_TO_BASE, VacuumActivity.RETURNING), + (SERVICE_START, VacuumActivity.CLEANING), + (SERVICE_STOP, VacuumActivity.IDLE), ], ) async def test_state_services( @@ -64,18 +121,20 @@ async def test_state_services( async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, service, {"entity_id": mock_vacuum.entity_id}, blocking=True, ) - vacuum_state = hass.states.get(mock_vacuum.entity_id) + activity = hass.states.get(mock_vacuum.entity_id) - assert vacuum_state.state == expected_state + assert activity.state == expected_state async def test_fan_speed(hass: HomeAssistant, config_flow_fixture: None) -> None: @@ -95,14 +154,16 @@ async def test_fan_speed(hass: HomeAssistant, config_flow_fixture: None) -> None async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": mock_vacuum.entity_id, "fan_speed": "high"}, blocking=True, @@ -141,11 +202,13 @@ async def test_locate(hass: HomeAssistant, config_flow_fixture: None) -> None: async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_LOCATE, {"entity_id": mock_vacuum.entity_id}, blocking=True, @@ -190,11 +253,13 @@ async def test_send_command(hass: HomeAssistant, config_flow_fixture: None) -> N async_unload_entry=help_async_unload_entry, ), ) - setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + setup_test_component_platform( + hass, VACUUM_DOMAIN, [mock_vacuum], from_config_entry=True + ) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SEND_COMMAND, { "entity_id": mock_vacuum.entity_id, @@ -207,37 +272,176 @@ async def test_send_command(hass: HomeAssistant, config_flow_fixture: None) -> N assert "test" in strings -async def test_supported_features_compat(hass: HomeAssistant) -> None: - """Test StateVacuumEntity using deprecated feature constants features.""" - - features = ( - VacuumEntityFeature.BATTERY - | VacuumEntityFeature.FAN_SPEED - | VacuumEntityFeature.START - | VacuumEntityFeature.STOP - | VacuumEntityFeature.PAUSE +async def test_vacuum_not_log_deprecated_state_warning( + hass: HomeAssistant, + mock_vacuum_entity: MockVacuum, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test correctly using activity doesn't log issue or raise repair.""" + state = hass.states.get(mock_vacuum_entity.entity_id) + assert state is not None + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + not in caplog.text ) - class _LegacyConstantsStateVacuum(StateVacuumEntity): - _attr_supported_features = int(features) - _attr_fan_speed_list = ["silent", "normal", "pet hair"] - entity = _LegacyConstantsStateVacuum() - assert isinstance(entity.supported_features, int) - assert entity.supported_features == int(features) - assert entity.supported_features_compat is ( - VacuumEntityFeature.BATTERY - | VacuumEntityFeature.FAN_SPEED - | VacuumEntityFeature.START - | VacuumEntityFeature.STOP - | VacuumEntityFeature.PAUSE +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_vacuum_log_deprecated_state_warning_using_state_prop( + hass: HomeAssistant, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using state property does log issue and raise repair.""" + + class MockLegacyVacuum(MockVacuum): + """Mocked vacuum entity.""" + + @property + def state(self) -> str: + """Return the state of the entity.""" + return VacuumActivity.CLEANING + + entity = MockLegacyVacuum( + name="Testing", + entity_id="vacuum.test", ) - assert entity.state_attributes == { - "battery_level": None, - "battery_icon": "mdi:battery-unknown", - "fan_speed": None, - } - assert entity.capability_attributes == { - "fan_speed_list": ["silent", "normal", "pet hair"] - } - assert entity._deprecated_supported_features_reported + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_vacuum_log_deprecated_state_warning_using_attr_state_attr( + hass: HomeAssistant, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using _attr_state attribute does log issue and raise repair.""" + + class MockLegacyVacuum(MockVacuum): + """Mocked vacuum entity.""" + + def start(self) -> None: + """Start cleaning.""" + self._attr_state = VacuumActivity.CLEANING + + entity = MockLegacyVacuum( + name="Testing", + entity_id="vacuum.test", + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + not in caplog.text + ) + + await async_start(hass, entity.entity_id) + + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + in caplog.text + ) + caplog.clear() + await async_start(hass, entity.entity_id) + # Test we only log once + assert ( + "should implement the 'activity' property and return its state using the VacuumActivity enum" + not in caplog.text + ) + + +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_alarm_control_panel_deprecated_state_does_not_break_state( + hass: HomeAssistant, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test using _attr_state attribute does not break state.""" + + class MockLegacyVacuum(MockEntity, StateVacuumEntity): + """Mocked vacuum entity.""" + + _attr_supported_features = VacuumEntityFeature.STATE | VacuumEntityFeature.START + + def __init__(self, **values: Any) -> None: + """Initialize a mock vacuum entity.""" + super().__init__(**values) + self._attr_state = VacuumActivity.DOCKED + + def start(self) -> None: + """Start cleaning.""" + self._attr_state = VacuumActivity.CLEANING + + entity = MockLegacyVacuum( + name="Testing", + entity_id="vacuum.test", + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + state = hass.states.get(entity.entity_id) + assert state is not None + assert state.state == "docked" + + await hass.services.async_call( + VACUUM_DOMAIN, + SERVICE_START, + { + "entity_id": entity.entity_id, + }, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state is not None + assert state.state == "cleaning" diff --git a/tests/components/vacuum/test_intent.py b/tests/components/vacuum/test_intent.py index cf96d32ad49..9ede7dbc04e 100644 --- a/tests/components/vacuum/test_intent.py +++ b/tests/components/vacuum/test_intent.py @@ -37,6 +37,27 @@ async def test_start_vacuum_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": entity_id} +async def test_start_vacuum_without_name(hass: HomeAssistant) -> None: + """Test starting a vacuum without specifying the name.""" + await vacuum_intent.async_setup_intents(hass) + + entity_id = f"{DOMAIN}.test_vacuum" + hass.states.async_set(entity_id, STATE_IDLE) + calls = async_mock_service(hass, DOMAIN, SERVICE_START) + + response = await intent.async_handle( + hass, "test", vacuum_intent.INTENT_VACUUM_START, {} + ) + await hass.async_block_till_done() + + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert len(calls) == 1 + call = calls[0] + assert call.domain == DOMAIN + assert call.service == SERVICE_START + assert call.data == {"entity_id": entity_id} + + async def test_stop_vacuum_intent(hass: HomeAssistant) -> None: """Test HassTurnOff intent for vacuums.""" await vacuum_intent.async_setup_intents(hass) @@ -59,3 +80,24 @@ async def test_stop_vacuum_intent(hass: HomeAssistant) -> None: assert call.domain == DOMAIN assert call.service == SERVICE_RETURN_TO_BASE assert call.data == {"entity_id": entity_id} + + +async def test_stop_vacuum_without_name(hass: HomeAssistant) -> None: + """Test stopping a vacuum without specifying the name.""" + await vacuum_intent.async_setup_intents(hass) + + entity_id = f"{DOMAIN}.test_vacuum" + hass.states.async_set(entity_id, STATE_IDLE) + calls = async_mock_service(hass, DOMAIN, SERVICE_RETURN_TO_BASE) + + response = await intent.async_handle( + hass, "test", vacuum_intent.INTENT_VACUUM_RETURN_TO_BASE, {} + ) + await hass.async_block_till_done() + + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert len(calls) == 1 + call = calls[0] + assert call.domain == DOMAIN + assert call.service == SERVICE_RETURN_TO_BASE + assert call.data == {"entity_id": entity_id} diff --git a/tests/components/vacuum/test_reproduce_state.py b/tests/components/vacuum/test_reproduce_state.py index ff8da28e98c..dc5d81e8f08 100644 --- a/tests/components/vacuum/test_reproduce_state.py +++ b/tests/components/vacuum/test_reproduce_state.py @@ -9,18 +9,9 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_DOCKED, - STATE_RETURNING, -) -from homeassistant.const import ( - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_IDLE, - STATE_OFF, - STATE_ON, - STATE_PAUSED, + VacuumActivity, ) +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -39,11 +30,11 @@ async def test_reproducing_states( hass.states.async_set( "vacuum.entity_on_fan", STATE_ON, {ATTR_FAN_SPEED: FAN_SPEED_LOW} ) - hass.states.async_set("vacuum.entity_cleaning", STATE_CLEANING, {}) - hass.states.async_set("vacuum.entity_docked", STATE_DOCKED, {}) - hass.states.async_set("vacuum.entity_idle", STATE_IDLE, {}) - hass.states.async_set("vacuum.entity_returning", STATE_RETURNING, {}) - hass.states.async_set("vacuum.entity_paused", STATE_PAUSED, {}) + hass.states.async_set("vacuum.entity_cleaning", VacuumActivity.CLEANING, {}) + hass.states.async_set("vacuum.entity_docked", VacuumActivity.DOCKED, {}) + hass.states.async_set("vacuum.entity_idle", VacuumActivity.IDLE, {}) + hass.states.async_set("vacuum.entity_returning", VacuumActivity.RETURNING, {}) + hass.states.async_set("vacuum.entity_paused", VacuumActivity.PAUSED, {}) turn_on_calls = async_mock_service(hass, "vacuum", SERVICE_TURN_ON) turn_off_calls = async_mock_service(hass, "vacuum", SERVICE_TURN_OFF) @@ -60,11 +51,11 @@ async def test_reproducing_states( State("vacuum.entity_off", STATE_OFF), State("vacuum.entity_on", STATE_ON), State("vacuum.entity_on_fan", STATE_ON, {ATTR_FAN_SPEED: FAN_SPEED_LOW}), - State("vacuum.entity_cleaning", STATE_CLEANING), - State("vacuum.entity_docked", STATE_DOCKED), - State("vacuum.entity_idle", STATE_IDLE), - State("vacuum.entity_returning", STATE_RETURNING), - State("vacuum.entity_paused", STATE_PAUSED), + State("vacuum.entity_cleaning", VacuumActivity.CLEANING), + State("vacuum.entity_docked", VacuumActivity.DOCKED), + State("vacuum.entity_idle", VacuumActivity.IDLE), + State("vacuum.entity_returning", VacuumActivity.RETURNING), + State("vacuum.entity_paused", VacuumActivity.PAUSED), ], ) @@ -95,11 +86,11 @@ async def test_reproducing_states( State("vacuum.entity_off", STATE_ON), State("vacuum.entity_on", STATE_OFF), State("vacuum.entity_on_fan", STATE_ON, {ATTR_FAN_SPEED: FAN_SPEED_HIGH}), - State("vacuum.entity_cleaning", STATE_PAUSED), - State("vacuum.entity_docked", STATE_CLEANING), - State("vacuum.entity_idle", STATE_DOCKED), - State("vacuum.entity_returning", STATE_CLEANING), - State("vacuum.entity_paused", STATE_IDLE), + State("vacuum.entity_cleaning", VacuumActivity.PAUSED), + State("vacuum.entity_docked", VacuumActivity.CLEANING), + State("vacuum.entity_idle", VacuumActivity.DOCKED), + State("vacuum.entity_returning", VacuumActivity.CLEANING), + State("vacuum.entity_paused", VacuumActivity.IDLE), # Should not raise State("vacuum.non_existing", STATE_ON), ], diff --git a/tests/components/vallox/conftest.py b/tests/components/vallox/conftest.py index a6ea95944b3..b6529409300 100644 --- a/tests/components/vallox/conftest.py +++ b/tests/components/vallox/conftest.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, patch import pytest from vallox_websocket_api import MetricData -from homeassistant import config_entries from homeassistant.components.vallox.const import DOMAIN from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME @@ -79,13 +78,7 @@ async def init_reconfigure_flow( hass: HomeAssistant, mock_entry, setup_vallox_entry ) -> tuple[MockConfigEntry, ConfigFlowResult]: """Initialize a config entry and a reconfigure flow for it.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" diff --git a/tests/components/vallox/test_init.py b/tests/components/vallox/test_init.py index 58e46acd689..4fbde7e0357 100644 --- a/tests/components/vallox/test_init.py +++ b/tests/components/vallox/test_init.py @@ -4,7 +4,11 @@ import pytest from vallox_websocket_api import Profile from homeassistant.components.vallox import ( + ATTR_DURATION, + ATTR_PROFILE, ATTR_PROFILE_FAN_SPEED, + I18N_KEY_TO_VALLOX_PROFILE, + SERVICE_SET_PROFILE, SERVICE_SET_PROFILE_FAN_SPEED_AWAY, SERVICE_SET_PROFILE_FAN_SPEED_BOOST, SERVICE_SET_PROFILE_FAN_SPEED_HOME, @@ -12,7 +16,7 @@ from homeassistant.components.vallox import ( from homeassistant.components.vallox.const import DOMAIN from homeassistant.core import HomeAssistant -from .conftest import patch_set_fan_speed +from .conftest import patch_set_fan_speed, patch_set_profile from tests.common import MockConfigEntry @@ -47,3 +51,45 @@ async def test_create_service( # Assert set_fan_speed.assert_called_once_with(profile, 30) + + +@pytest.mark.parametrize( + ("profile", "duration"), + [ + ("home", None), + ("home", 15), + ("away", None), + ("away", 15), + ("boost", None), + ("boost", 15), + ("fireplace", None), + ("fireplace", 15), + ("extra", None), + ("extra", 15), + ], +) +async def test_set_profile_service( + hass: HomeAssistant, mock_entry: MockConfigEntry, profile: str, duration: int | None +) -> None: + """Test service for setting profile and duration.""" + # Act + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + with patch_set_profile() as set_profile: + service_data = {ATTR_PROFILE: profile} | ( + {ATTR_DURATION: duration} if duration is not None else {} + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_PROFILE, + service_data=service_data, + ) + + await hass.async_block_till_done() + + # Assert + set_profile.assert_called_once_with( + I18N_KEY_TO_VALLOX_PROFILE[profile], duration + ) diff --git a/tests/components/valve/test_init.py b/tests/components/valve/test_init.py index 378ddb2a94b..d8eb38a3b9b 100644 --- a/tests/components/valve/test_init.py +++ b/tests/components/valve/test_init.py @@ -11,16 +11,13 @@ from homeassistant.components.valve import ( ValveEntity, ValveEntityDescription, ValveEntityFeature, + ValveState, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_SET_VALVE_POSITION, SERVICE_TOGGLE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, Platform, ) @@ -349,19 +346,19 @@ def set_valve_position(ent, position) -> None: def is_open(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_OPEN) + return hass.states.is_state(ent.entity_id, ValveState.OPEN) def is_opening(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_OPENING) + return hass.states.is_state(ent.entity_id, ValveState.OPENING) def is_closed(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_CLOSED) + return hass.states.is_state(ent.entity_id, ValveState.CLOSED) def is_closing(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_CLOSING) + return hass.states.is_state(ent.entity_id, ValveState.CLOSING) diff --git a/tests/components/valve/test_intent.py b/tests/components/valve/test_intent.py index a8f4054602b..4f29017b4c1 100644 --- a/tests/components/valve/test_intent.py +++ b/tests/components/valve/test_intent.py @@ -6,8 +6,8 @@ from homeassistant.components.valve import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, + ValveState, ) -from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -20,7 +20,7 @@ async def test_open_valve_intent(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_valve" - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, ValveState.CLOSED) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_VALVE) response = await intent.async_handle( @@ -41,7 +41,7 @@ async def test_close_valve_intent(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_valve" - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, ValveState.OPEN) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_VALVE) response = await intent.async_handle( @@ -63,7 +63,7 @@ async def test_set_valve_position(hass: HomeAssistant) -> None: entity_id = f"{DOMAIN}.test_valve" hass.states.async_set( - entity_id, STATE_CLOSED, attributes={ATTR_CURRENT_POSITION: 0} + entity_id, ValveState.CLOSED, attributes={ATTR_CURRENT_POSITION: 0} ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_VALVE_POSITION) diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index 432fcea10db..5e81a3f8a36 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -156,12 +156,18 @@ async def test_flow_usb(hass: HomeAssistant) -> None: user_input={}, ) assert result + assert result["result"].unique_id == "0B1B:10CF_1234_Velleman_Velbus VMB1USB" assert result.get("type") is FlowResultType.CREATE_ENTRY - # test an already configured discovery + +@pytest.mark.usefixtures("controller") +@patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()])) +async def test_flow_usb_if_already_setup(hass: HomeAssistant) -> None: + """Test we abort if Velbus USB discovbery aborts in case it is already setup.""" entry = MockConfigEntry( domain=DOMAIN, data={CONF_PORT: PORT_SERIAL}, + unique_id="0B1B:10CF_1234_Velleman_Velbus VMB1USB", ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( diff --git a/tests/components/venstar/util.py b/tests/components/venstar/util.py index f1e85e9019e..44b3efe0720 100644 --- a/tests/components/venstar/util.py +++ b/tests/components/venstar/util.py @@ -2,7 +2,7 @@ import requests_mock -from homeassistant.components.climate import DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN from homeassistant.const import CONF_HOST, CONF_PLATFORM from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -54,7 +54,7 @@ async def async_init_integration( } for model in TEST_MODELS ] - config = {DOMAIN: platform_config} + config = {CLIMATE_DOMAIN: platform_config} - await async_setup_component(hass, DOMAIN, config) + await async_setup_component(hass, CLIMATE_DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/vera/test_config_flow.py b/tests/components/vera/test_config_flow.py index 057945450e3..9572645f6d2 100644 --- a/tests/components/vera/test_config_flow.py +++ b/tests/components/vera/test_config_flow.py @@ -5,7 +5,11 @@ from unittest.mock import MagicMock, patch from requests.exceptions import RequestException from homeassistant import config_entries -from homeassistant.components.vera import CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN +from homeassistant.components.vera.const import ( + CONF_CONTROLLER, + CONF_LEGACY_UNIQUE_ID, + DOMAIN, +) from homeassistant.const import CONF_EXCLUDE, CONF_LIGHTS, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType diff --git a/tests/components/vera/test_light.py b/tests/components/vera/test_light.py index 6bdc3df9a64..e66d19ec46e 100644 --- a/tests/components/vera/test_light.py +++ b/tests/components/vera/test_light.py @@ -52,13 +52,13 @@ async def test_light( {"entity_id": entity_id, ATTR_HS_COLOR: [300, 70]}, ) await hass.async_block_till_done() - vera_device.set_color.assert_called_with((255, 76, 255)) + vera_device.set_color.assert_called_with((255, 77, 255)) vera_device.is_switched_on.return_value = True - vera_device.get_color.return_value = (255, 76, 255) + vera_device.get_color.return_value = (255, 77, 255) update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).state == "on" - assert hass.states.get(entity_id).attributes["hs_color"] == (300.0, 70.196) + assert hass.states.get(entity_id).attributes["hs_color"] == (300.0, 69.804) await hass.services.async_call( "light", diff --git a/tests/components/vera/test_lock.py b/tests/components/vera/test_lock.py index 4139a494e1f..d24a0e1265f 100644 --- a/tests/components/vera/test_lock.py +++ b/tests/components/vera/test_lock.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock import pyvera as pv -from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED +from homeassistant.components.lock import LockState from homeassistant.core import HomeAssistant from .common import ComponentFactory, new_simple_controller_config @@ -29,7 +29,7 @@ async def test_lock( ) update_callback = component_data.controller_data[0].update_callback - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity_id).state == LockState.UNLOCKED await hass.services.async_call( "lock", @@ -41,7 +41,7 @@ async def test_lock( vera_device.is_locked.return_value = True update_callback(vera_device) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_LOCKED + assert hass.states.get(entity_id).state == LockState.LOCKED await hass.services.async_call( "lock", @@ -53,4 +53,4 @@ async def test_lock( vera_device.is_locked.return_value = False update_callback(vera_device) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity_id).state == LockState.UNLOCKED diff --git a/tests/components/verisure/test_config_flow.py b/tests/components/verisure/test_config_flow.py index cf478b093c0..e6dd11669d1 100644 --- a/tests/components/verisure/test_config_flow.py +++ b/tests/components/verisure/test_config_flow.py @@ -352,15 +352,7 @@ async def test_reauth_flow( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -395,15 +387,7 @@ async def test_reauth_flow_with_mfa( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -466,15 +450,7 @@ async def test_reauth_flow_errors( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) mock_verisure_config_flow.login.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index 21985afd7bf..60af4ae3d5b 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -67,7 +67,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': 'air-purifier', 'unit_of_measurement': None, }), @@ -158,7 +158,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, }), @@ -256,7 +256,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': '400s-purifier', 'unit_of_measurement': None, }), @@ -355,7 +355,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': '600s-purifier', 'unit_of_measurement': None, }), diff --git a/tests/components/vesync/snapshots/test_light.ambr b/tests/components/vesync/snapshots/test_light.ambr index 36694ae3ef6..2e7fe9ac1bb 100644 --- a/tests/components/vesync/snapshots/test_light.ambr +++ b/tests/components/vesync/snapshots/test_light.ambr @@ -428,10 +428,10 @@ }), 'area_id': None, 'capabilities': dict({ - 'max_color_temp_kelvin': 6493, + 'max_color_temp_kelvin': 6500, 'max_mireds': 370, - 'min_color_temp_kelvin': 2702, - 'min_mireds': 154, + 'min_color_temp_kelvin': 2700, + 'min_mireds': 153, 'supported_color_modes': list([ , ]), @@ -473,10 +473,10 @@ 'color_temp_kelvin': None, 'friendly_name': 'Temperature Light', 'hs_color': None, - 'max_color_temp_kelvin': 6493, + 'max_color_temp_kelvin': 6500, 'max_mireds': 370, - 'min_color_temp_kelvin': 2702, - 'min_mireds': 154, + 'min_color_temp_kelvin': 2700, + 'min_mireds': 153, 'rgb_color': None, 'supported_color_modes': list([ , diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index c78669d1c3e..aadf85e7081 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -92,6 +92,24 @@ async def mock_vicare_gas_boiler( yield mock_config_entry +@pytest.fixture +async def mock_vicare_room_sensors( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> AsyncGenerator[MockConfigEntry]: + """Return a mocked ViCare API representing multiple room sensor devices.""" + fixtures: list[Fixture] = [ + Fixture({"type:climateSensor"}, "vicare/RoomSensor1.json"), + Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), + ] + with patch( + f"{MODULE}.vicare_login", + return_value=MockPyViCare(fixtures), + ): + await setup_integration(hass, mock_config_entry) + + yield mock_config_entry + + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" diff --git a/tests/components/vicare/fixtures/RoomSensor1.json b/tests/components/vicare/fixtures/RoomSensor1.json new file mode 100644 index 00000000000..b970e54a48c --- /dev/null +++ b/tests/components/vicare/fixtures/RoomSensor1.json @@ -0,0 +1,99 @@ +{ + "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "zigbee-d87a3bfffe5d844a", + "feature": "device.messages.errors.raw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "entries": { + "type": "array", + "value": [] + } + }, + "timestamp": "2024-03-01T04:40:59.911Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.messages.errors.raw" + }, + { + "apiVersion": 1, + "commands": { + "setName": { + "isExecutable": true, + "name": "setName", + "params": { + "name": { + "constraints": { + "maxLength": 40, + "minLength": 1, + "regEx": "^[\\p{L}0-9]+( [\\p{L}0-9]+)*$" + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.name/commands/setName" + } + }, + "deviceId": "zigbee-d87a3bfffe5d844a", + "feature": "device.name", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "name": { + "type": "string", + "value": "Office" + } + }, + "timestamp": "2024-03-01T04:40:59.911Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.name" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "zigbee-d87a3bfffe5d844a", + "feature": "device.sensors.humidity", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "percent", + "value": 53 + } + }, + "timestamp": "2024-03-02T07:51:07.303Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.sensors.humidity" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "zigbee-d87a3bfffe5d844a", + "feature": "device.sensors.temperature", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 17.5 + } + }, + "timestamp": "2024-03-02T07:52:42.043Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.sensors.temperature" + } + ] +} diff --git a/tests/components/vicare/fixtures/RoomSensor2.json b/tests/components/vicare/fixtures/RoomSensor2.json new file mode 100644 index 00000000000..81a1d935700 --- /dev/null +++ b/tests/components/vicare/fixtures/RoomSensor2.json @@ -0,0 +1,99 @@ +{ + "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "zigbee-5cc7c1fffea33a3b", + "feature": "device.messages.errors.raw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "entries": { + "type": "array", + "value": [] + } + }, + "timestamp": "2024-03-01T04:40:59.911Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.messages.errors.raw" + }, + { + "apiVersion": 1, + "commands": { + "setName": { + "isExecutable": true, + "name": "setName", + "params": { + "name": { + "constraints": { + "maxLength": 40, + "minLength": 1, + "regEx": "^[\\p{L}0-9]+( [\\p{L}0-9]+)*$" + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.name/commands/setName" + } + }, + "deviceId": "zigbee-5cc7c1fffea33a3b", + "feature": "device.name", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "name": { + "type": "string", + "value": "" + } + }, + "timestamp": "2024-03-01T04:40:59.911Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.name" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "zigbee-5cc7c1fffea33a3b", + "feature": "device.sensors.humidity", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "percent", + "value": 52 + } + }, + "timestamp": "2024-03-02T07:42:06.922Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.sensors.humidity" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "zigbee-5cc7c1fffea33a3b", + "feature": "device.sensors.temperature", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 16.9 + } + }, + "timestamp": "2024-03-02T07:24:48.056Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.sensors.temperature" + } + ] +} diff --git a/tests/components/vicare/fixtures/ViAir300F.json b/tests/components/vicare/fixtures/ViAir300F.json index b1ec747e127..090c7a81ddf 100644 --- a/tests/components/vicare/fixtures/ViAir300F.json +++ b/tests/components/vicare/fixtures/ViAir300F.json @@ -50,7 +50,7 @@ "properties": { "value": { "type": "string", - "value": "################" + "value": "deviceSerialViAir300F" } }, "timestamp": "2024-03-20T01:29:35.549Z", diff --git a/tests/components/vicare/fixtures/Vitodens300W.json b/tests/components/vicare/fixtures/Vitodens300W.json index 4cf67ebe0f7..d183146e94d 100644 --- a/tests/components/vicare/fixtures/Vitodens300W.json +++ b/tests/components/vicare/fixtures/Vitodens300W.json @@ -1,5 +1,22 @@ { "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "deviceSerialVitodens300W" + } + }, + "timestamp": "2024-07-30T20:03:40.073Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" + }, { "properties": {}, "commands": {}, diff --git a/tests/components/vicare/fixtures/dummy-device-no-serial.json b/tests/components/vicare/fixtures/dummy-device-no-serial.json new file mode 100644 index 00000000000..268c73f0e37 --- /dev/null +++ b/tests/components/vicare/fixtures/dummy-device-no-serial.json @@ -0,0 +1,3 @@ +{ + "data": [] +} diff --git a/tests/components/vicare/snapshots/test_binary_sensor.ambr b/tests/components/vicare/snapshots/test_binary_sensor.ambr index a03a6150c45..f3e4d4e1c84 100644 --- a/tests/components/vicare/snapshots/test_binary_sensor.ambr +++ b/tests/components/vicare/snapshots/test_binary_sensor.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner', - 'unique_id': 'gateway0-burner_active-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_active-0', 'unit_of_measurement': None, }) # --- @@ -75,7 +75,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'circulation_pump', - 'unique_id': 'gateway0-circulationpump_active-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-circulationpump_active-0', 'unit_of_measurement': None, }) # --- @@ -122,7 +122,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'circulation_pump', - 'unique_id': 'gateway0-circulationpump_active-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-circulationpump_active-1', 'unit_of_measurement': None, }) # --- @@ -169,7 +169,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'domestic_hot_water_charging', - 'unique_id': 'gateway0-charging_active', + 'unique_id': 'gateway0_deviceSerialVitodens300W-charging_active', 'unit_of_measurement': None, }) # --- @@ -216,7 +216,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'domestic_hot_water_circulation_pump', - 'unique_id': 'gateway0-dhw_circulationpump_active', + 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_circulationpump_active', 'unit_of_measurement': None, }) # --- @@ -263,7 +263,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'domestic_hot_water_pump', - 'unique_id': 'gateway0-dhw_pump_active', + 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_pump_active', 'unit_of_measurement': None, }) # --- @@ -310,7 +310,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'frost_protection', - 'unique_id': 'gateway0-frost_protection_active-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-frost_protection_active-0', 'unit_of_measurement': None, }) # --- @@ -356,7 +356,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'frost_protection', - 'unique_id': 'gateway0-frost_protection_active-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-frost_protection_active-1', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_button.ambr b/tests/components/vicare/snapshots/test_button.ambr index 01120b8b0d6..9fadc6a983f 100644 --- a/tests/components/vicare/snapshots/test_button.ambr +++ b/tests/components/vicare/snapshots/test_button.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'activate_onetimecharge', - 'unique_id': 'gateway0-activate_onetimecharge', + 'unique_id': 'gateway0_deviceSerialVitodens300W-activate_onetimecharge', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_climate.ambr b/tests/components/vicare/snapshots/test_climate.ambr index a01d1c43bea..aea0ea879c2 100644 --- a/tests/components/vicare/snapshots/test_climate.ambr +++ b/tests/components/vicare/snapshots/test_climate.ambr @@ -40,7 +40,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'heating', - 'unique_id': 'gateway0-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating-0', 'unit_of_measurement': None, }) # --- @@ -123,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'heating', - 'unique_id': 'gateway0-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating-1', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index dfc29d46cc2..0b1dcef5a29 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4,6 +4,24 @@ 'data': list([ dict({ 'data': list([ + dict({ + 'apiVersion': 1, + 'commands': dict({ + }), + 'deviceId': '0', + 'feature': 'device.serial', + 'gatewayId': '################', + 'isEnabled': True, + 'isReady': True, + 'properties': dict({ + 'value': dict({ + 'type': 'string', + 'value': 'deviceSerialVitodens300W', + }), + }), + 'timestamp': '2024-07-30T20:03:40.073Z', + 'uri': 'https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial', + }), dict({ 'apiVersion': 1, 'commands': dict({ @@ -4703,6 +4721,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'vicare', 'entry_id': '1234', 'minor_version': 1, @@ -4711,6 +4731,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'ViCare', 'version': 1, diff --git a/tests/components/vicare/snapshots/test_fan.ambr b/tests/components/vicare/snapshots/test_fan.ambr index 48c8d728569..3ecc4277fd9 100644 --- a/tests/components/vicare/snapshots/test_fan.ambr +++ b/tests/components/vicare/snapshots/test_fan.ambr @@ -29,13 +29,13 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': None, + 'original_icon': 'mdi:fan', 'original_name': 'Ventilation', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': , 'translation_key': 'ventilation', - 'unique_id': 'gateway0-ventilation', + 'unique_id': 'gateway0_deviceSerialViAir300F-ventilation', 'unit_of_measurement': None, }) # --- @@ -43,6 +43,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Ventilation', + 'icon': 'mdi:fan', 'percentage': 0, 'percentage_step': 25.0, 'preset_mode': None, diff --git a/tests/components/vicare/snapshots/test_number.ambr b/tests/components/vicare/snapshots/test_number.ambr index a55c29ab8c1..5a030fc0213 100644 --- a/tests/components/vicare/snapshots/test_number.ambr +++ b/tests/components/vicare/snapshots/test_number.ambr @@ -33,7 +33,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'comfort_temperature', - 'unique_id': 'gateway0-comfort_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-comfort_temperature-0', 'unit_of_measurement': , }) # --- @@ -90,7 +90,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'comfort_temperature', - 'unique_id': 'gateway0-comfort_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-comfort_temperature-1', 'unit_of_measurement': , }) # --- @@ -147,7 +147,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_shift', - 'unique_id': 'gateway0-heating curve shift-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve shift-0', 'unit_of_measurement': , }) # --- @@ -204,7 +204,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_shift', - 'unique_id': 'gateway0-heating curve shift-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve shift-1', 'unit_of_measurement': , }) # --- @@ -261,7 +261,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_slope', - 'unique_id': 'gateway0-heating curve slope-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve slope-0', 'unit_of_measurement': None, }) # --- @@ -316,7 +316,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_slope', - 'unique_id': 'gateway0-heating curve slope-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve slope-1', 'unit_of_measurement': None, }) # --- @@ -371,7 +371,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'normal_temperature', - 'unique_id': 'gateway0-normal_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-normal_temperature-0', 'unit_of_measurement': , }) # --- @@ -428,7 +428,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'normal_temperature', - 'unique_id': 'gateway0-normal_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-normal_temperature-1', 'unit_of_measurement': , }) # --- @@ -485,7 +485,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'reduced_temperature', - 'unique_id': 'gateway0-reduced_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-reduced_temperature-0', 'unit_of_measurement': , }) # --- @@ -542,7 +542,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'reduced_temperature', - 'unique_id': 'gateway0-reduced_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-reduced_temperature-1', 'unit_of_measurement': , }) # --- @@ -565,3 +565,60 @@ 'state': 'unavailable', }) # --- +# name: test_all_entities[number.model0_dhw_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_dhw_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dhw_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_dhw_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_dhw_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr index 7bbac75bedc..793f3e87611 100644 --- a/tests/components/vicare/snapshots/test_sensor.ambr +++ b/tests/components/vicare/snapshots/test_sensor.ambr @@ -30,7 +30,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'boiler_temperature', - 'unique_id': 'gateway0-boiler_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-boiler_temperature', 'unit_of_measurement': , }) # --- @@ -81,7 +81,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner_hours', - 'unique_id': 'gateway0-burner_hours-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_hours-0', 'unit_of_measurement': , }) # --- @@ -131,7 +131,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner_modulation', - 'unique_id': 'gateway0-burner_modulation-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_modulation-0', 'unit_of_measurement': '%', }) # --- @@ -181,7 +181,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner_starts', - 'unique_id': 'gateway0-burner_starts-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_starts-0', 'unit_of_measurement': None, }) # --- @@ -230,7 +230,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_heating_this_month', - 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_month', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_month', 'unit_of_measurement': None, }) # --- @@ -279,7 +279,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_heating_this_week', - 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_week', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_week', 'unit_of_measurement': None, }) # --- @@ -328,7 +328,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_heating_this_year', - 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_year', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_year', 'unit_of_measurement': None, }) # --- @@ -377,7 +377,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_today', - 'unique_id': 'gateway0-hotwater_gas_consumption_today', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_today', 'unit_of_measurement': None, }) # --- @@ -426,7 +426,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_max_temperature', - 'unique_id': 'gateway0-hotwater_max_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_max_temperature', 'unit_of_measurement': , }) # --- @@ -477,7 +477,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_min_temperature', - 'unique_id': 'gateway0-hotwater_min_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_min_temperature', 'unit_of_measurement': , }) # --- @@ -528,7 +528,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power consumption this month', - 'unique_id': 'gateway0-power consumption this month', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this month', 'unit_of_measurement': , }) # --- @@ -548,7 +548,7 @@ 'state': '7.843', }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_this_year-entry] +# name: test_all_entities[sensor.model0_electricity_consumption_this_year-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -562,7 +562,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'entity_id': 'sensor.model0_electricity_consumption_this_year', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -574,32 +574,32 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Energy consumption this year', + 'original_name': 'Electricity consumption this year', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_consumption_this_year', - 'unique_id': 'gateway0-power consumption this year', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this year', 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_this_year-state] +# name: test_all_entities[sensor.model0_electricity_consumption_this_year-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'model0 Energy consumption this year', + 'friendly_name': 'model0 Electricity consumption this year', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'entity_id': 'sensor.model0_electricity_consumption_this_year', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '207.106', }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_today-entry] +# name: test_all_entities[sensor.model0_electricity_consumption_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -613,7 +613,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.model0_energy_consumption_today', + 'entity_id': 'sensor.model0_electricity_consumption_today', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -625,25 +625,25 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Energy consumption today', + 'original_name': 'Electricity consumption today', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_consumption_today', - 'unique_id': 'gateway0-power consumption today', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption today', 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_today-state] +# name: test_all_entities[sensor.model0_electricity_consumption_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'model0 Energy consumption today', + 'friendly_name': 'model0 Electricity consumption today', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.model0_energy_consumption_today', + 'entity_id': 'sensor.model0_electricity_consumption_today', 'last_changed': , 'last_reported': , 'last_updated': , @@ -681,7 +681,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_this_month', - 'unique_id': 'gateway0-gas_consumption_heating_this_month', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_month', 'unit_of_measurement': None, }) # --- @@ -730,7 +730,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_this_week', - 'unique_id': 'gateway0-gas_consumption_heating_this_week', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_week', 'unit_of_measurement': None, }) # --- @@ -779,7 +779,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_this_year', - 'unique_id': 'gateway0-gas_consumption_heating_this_year', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_year', 'unit_of_measurement': None, }) # --- @@ -828,7 +828,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_today', - 'unique_id': 'gateway0-gas_consumption_heating_today', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_today', 'unit_of_measurement': None, }) # --- @@ -877,7 +877,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'outside_temperature', - 'unique_id': 'gateway0-outside_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-outside_temperature', 'unit_of_measurement': , }) # --- @@ -897,7 +897,7 @@ 'state': '20.8', }) # --- -# name: test_all_entities[sensor.model0_power_consumption_this_week-entry] +# name: test_all_entities[sensor.model0_electricity_consumption_this_week-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -911,7 +911,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.model0_power_consumption_this_week', + 'entity_id': 'sensor.model0_electricity_consumption_this_week', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -923,25 +923,25 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Power consumption this week', + 'original_name': 'Electricity consumption this week', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_consumption_this_week', - 'unique_id': 'gateway0-power consumption this week', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this week', 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.model0_power_consumption_this_week-state] +# name: test_all_entities[sensor.model0_electricity_consumption_this_week-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'model0 Power consumption this week', + 'friendly_name': 'model0 Electricity consumption this week', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.model0_power_consumption_this_week', + 'entity_id': 'sensor.model0_electricity_consumption_this_week', 'last_changed': , 'last_reported': , 'last_updated': , @@ -979,7 +979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'supply_temperature', - 'unique_id': 'gateway0-supply_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-supply_temperature-0', 'unit_of_measurement': , }) # --- @@ -1030,7 +1030,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'supply_temperature', - 'unique_id': 'gateway0-supply_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-supply_temperature-1', 'unit_of_measurement': , }) # --- @@ -1050,3 +1050,207 @@ 'state': '25.5', }) # --- +# name: test_room_sensors[sensor.model0_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'gateway0_zigbee_d87a3bfffe5d844a-room_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_room_sensors[sensor.model0_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'model0 Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.model0_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '53', + }) +# --- +# name: test_room_sensors[sensor.model0_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'gateway0_zigbee_d87a3bfffe5d844a-room_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_room_sensors[sensor.model0_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.5', + }) +# --- +# name: test_room_sensors[sensor.model1_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model1_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'gateway1_zigbee_5cc7c1fffea33a3b-room_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_room_sensors[sensor.model1_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'model1 Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.model1_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '52', + }) +# --- +# name: test_room_sensors[sensor.model1_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model1_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'gateway1_zigbee_5cc7c1fffea33a3b-room_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_room_sensors[sensor.model1_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model1 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model1_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16.9', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_water_heater.ambr b/tests/components/vicare/snapshots/test_water_heater.ambr index 5ab4fcc78bd..bca04b1bbfa 100644 --- a/tests/components/vicare/snapshots/test_water_heater.ambr +++ b/tests/components/vicare/snapshots/test_water_heater.ambr @@ -31,7 +31,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'domestic_hot_water', - 'unique_id': 'gateway0-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-0', 'unit_of_measurement': None, }) # --- @@ -87,7 +87,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'domestic_hot_water', - 'unique_id': 'gateway0-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-1', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/test_config_flow.py b/tests/components/vicare/test_config_flow.py index b823bb72dc9..a522cf75d5d 100644 --- a/tests/components/vicare/test_config_flow.py +++ b/tests/components/vicare/test_config_flow.py @@ -11,7 +11,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import dhcp from homeassistant.components.vicare.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,11 +104,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=VALID_CONFIG, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/vicare/test_init.py b/tests/components/vicare/test_init.py new file mode 100644 index 00000000000..62bec7f50c5 --- /dev/null +++ b/tests/components/vicare/test_init.py @@ -0,0 +1,107 @@ +"""Test ViCare migration.""" + +from unittest.mock import patch + +from homeassistant.components.vicare.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import MODULE +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry + + +# Device migration test can be removed in 2025.4.0 +async def test_device_and_entity_migration( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the device registry is updated correctly.""" + fixtures: list[Fixture] = [ + Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), + Fixture({"type:boiler"}, "vicare/dummy-device-no-serial.json"), + ] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), + ): + mock_config_entry.add_to_hass(hass) + + # device with serial data point + device0 = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={ + (DOMAIN, "gateway0"), + }, + model="model0", + ) + entry0 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway0-0", + translation_key="heating", + device_id=device0.id, + ) + entry1 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway0_deviceSerialVitodens300W-heating-1", + translation_key="heating", + device_id=device0.id, + ) + # device without serial data point + device1 = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={ + (DOMAIN, "gateway1"), + }, + model="model1", + ) + entry2 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway1-0", + translation_key="heating", + device_id=device1.id, + ) + # device is not provided by api + device2 = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={ + (DOMAIN, "gateway2"), + }, + model="model2", + ) + entry3 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway2-0", + translation_key="heating", + device_id=device2.id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.async_block_till_done() + + assert ( + entity_registry.async_get(entry0.entity_id).unique_id + == "gateway0_deviceSerialVitodens300W-heating-0" + ) + assert ( + entity_registry.async_get(entry1.entity_id).unique_id + == "gateway0_deviceSerialVitodens300W-heating-1" + ) + assert ( + entity_registry.async_get(entry2.entity_id).unique_id + == "gateway1_deviceId1-heating-0" + ) + assert entity_registry.async_get(entry3.entity_id).unique_id == "gateway2-0" diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py index 624fdf2cd5d..06c8b963680 100644 --- a/tests/components/vicare/test_sensor.py +++ b/tests/components/vicare/test_sensor.py @@ -23,7 +23,30 @@ async def test_all_entities( entity_registry: er.EntityRegistry, ) -> None: """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + fixtures: list[Fixture] = [ + Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), + ] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_room_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [ + Fixture({"type:climateSensor"}, "vicare/RoomSensor1.json"), + Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), + ] with ( patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py index 13d8255cf8d..c411213f13e 100644 --- a/tests/components/vicare/test_types.py +++ b/tests/components/vicare/test_types.py @@ -39,7 +39,7 @@ async def test_ha_preset_to_heating_program( ha_preset: str | None, expected_result: str | None, ) -> None: - """Testing HA Preset tp ViCare HeatingProgram.""" + """Testing HA Preset to ViCare HeatingProgram.""" supported_programs = [ HeatingProgram.COMFORT, @@ -52,6 +52,17 @@ async def test_ha_preset_to_heating_program( ) +async def test_ha_preset_to_heating_program_error() -> None: + """Testing HA Preset to ViCare HeatingProgram.""" + + supported_programs = [ + "test", + ] + assert ( + HeatingProgram.from_ha_preset(HeatingProgram.NORMAL, supported_programs) is None + ) + + @pytest.mark.parametrize( ("vicare_mode", "expected_result"), [ diff --git a/tests/components/vilfo/test_config_flow.py b/tests/components/vilfo/test_config_flow.py index c4fdb2fe22c..dcfdc8a9ffa 100644 --- a/tests/components/vilfo/test_config_flow.py +++ b/tests/components/vilfo/test_config_flow.py @@ -165,7 +165,7 @@ async def test_form_wrong_host( }, ) - assert result["errors"] == {"host": "wrong_host"} + assert result["errors"] == {"base": "invalid_host"} async def test_form_already_configured( diff --git a/tests/components/vizio/const.py b/tests/components/vizio/const.py index 3e7b0c83c70..51151ae8f42 100644 --- a/tests/components/vizio/const.py +++ b/tests/components/vizio/const.py @@ -112,14 +112,6 @@ MOCK_OPTIONS = { CONF_VOLUME_STEP: VOLUME_STEP, } -MOCK_IMPORT_VALID_TV_CONFIG = { - CONF_NAME: NAME, - CONF_HOST: HOST, - CONF_DEVICE_CLASS: MediaPlayerDeviceClass.TV, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_VOLUME_STEP: VOLUME_STEP, -} - MOCK_TV_WITH_INCLUDE_CONFIG = { CONF_NAME: NAME, CONF_HOST: HOST, @@ -147,23 +139,6 @@ MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG = { CONF_APPS: {CONF_ADDITIONAL_CONFIGS: [ADDITIONAL_APP_CONFIG]}, } -MOCK_SPEAKER_APPS_FAILURE = { - CONF_NAME: NAME, - CONF_HOST: HOST, - CONF_DEVICE_CLASS: MediaPlayerDeviceClass.SPEAKER, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_VOLUME_STEP: VOLUME_STEP, - CONF_APPS: {CONF_ADDITIONAL_CONFIGS: [ADDITIONAL_APP_CONFIG]}, -} - -MOCK_TV_APPS_FAILURE = { - CONF_NAME: NAME, - CONF_HOST: HOST, - CONF_DEVICE_CLASS: MediaPlayerDeviceClass.TV, - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_VOLUME_STEP: VOLUME_STEP, - CONF_APPS: None, -} MOCK_TV_APPS_WITH_VALID_APPS_CONFIG = { CONF_HOST: HOST, diff --git a/tests/components/vizio/test_config_flow.py b/tests/components/vizio/test_config_flow.py index 42d4394ca80..2ef7c18bd04 100644 --- a/tests/components/vizio/test_config_flow.py +++ b/tests/components/vizio/test_config_flow.py @@ -3,30 +3,20 @@ import dataclasses import pytest -import voluptuous as vol from homeassistant.components.media_player import MediaPlayerDeviceClass -from homeassistant.components.vizio.config_flow import _get_config_schema from homeassistant.components.vizio.const import ( CONF_APPS, CONF_APPS_TO_INCLUDE_OR_EXCLUDE, - CONF_INCLUDE, CONF_VOLUME_STEP, - DEFAULT_NAME, - DEFAULT_VOLUME_STEP, DOMAIN, - VIZIO_SCHEMA, -) -from homeassistant.config_entries import ( - SOURCE_IGNORE, - SOURCE_IMPORT, - SOURCE_USER, - SOURCE_ZEROCONF, ) +from homeassistant.config_entries import SOURCE_IGNORE, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_DEVICE_CLASS, CONF_HOST, + CONF_INCLUDE, CONF_NAME, CONF_PIN, ) @@ -38,14 +28,11 @@ from .const import ( CURRENT_APP, HOST, HOST2, - MOCK_IMPORT_VALID_TV_CONFIG, MOCK_INCLUDE_APPS, MOCK_INCLUDE_NO_APPS, MOCK_PIN_CONFIG, MOCK_SPEAKER_CONFIG, MOCK_TV_CONFIG_NO_TOKEN, - MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG, - MOCK_TV_WITH_EXCLUDE_CONFIG, MOCK_USER_VALID_TV_CONFIG, MOCK_ZEROCONF_SERVICE_INFO, NAME, @@ -370,297 +357,6 @@ async def test_user_ignore(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_flow_minimum_fields(hass: HomeAssistant) -> None: - """Test import config flow with minimum fields.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)( - {CONF_HOST: HOST, CONF_DEVICE_CLASS: MediaPlayerDeviceClass.SPEAKER} - ), - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"][CONF_NAME] == DEFAULT_NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER - assert result["data"][CONF_VOLUME_STEP] == DEFAULT_VOLUME_STEP - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_flow_all_fields(hass: HomeAssistant) -> None: - """Test import config flow with all fields.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_IMPORT_VALID_TV_CONFIG), - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"][CONF_NAME] == NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV - assert result["data"][CONF_ACCESS_TOKEN] == ACCESS_TOKEN - assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_entity_already_configured(hass: HomeAssistant) -> None: - """Test entity is already configured during import setup.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - options={CONF_VOLUME_STEP: VOLUME_STEP}, - ) - entry.add_to_hass(hass) - fail_entry = vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG.copy()) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=fail_entry - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured_device" - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_options(hass: HomeAssistant) -> None: - """Test import config flow with updated options.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].options == {CONF_VOLUME_STEP: DEFAULT_VOLUME_STEP} - assert result["type"] is FlowResultType.CREATE_ENTRY - entry_id = result["result"].entry_id - - updated_config = MOCK_SPEAKER_CONFIG.copy() - updated_config[CONF_VOLUME_STEP] = VOLUME_STEP + 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(updated_config), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - config_entry = hass.config_entries.async_get_entry(entry_id) - assert config_entry.options[CONF_VOLUME_STEP] == VOLUME_STEP + 1 - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_name_and_apps(hass: HomeAssistant) -> None: - """Test import config flow with updated name and apps.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_IMPORT_VALID_TV_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].data[CONF_NAME] == NAME - assert result["type"] is FlowResultType.CREATE_ENTRY - entry_id = result["result"].entry_id - - updated_config = MOCK_IMPORT_VALID_TV_CONFIG.copy() - updated_config[CONF_NAME] = NAME2 - updated_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(updated_config), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - config_entry = hass.config_entries.async_get_entry(entry_id) - assert config_entry.data[CONF_NAME] == NAME2 - assert config_entry.data[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} - assert config_entry.options[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_remove_apps(hass: HomeAssistant) -> None: - """Test import config flow with removed apps.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_TV_WITH_EXCLUDE_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].data[CONF_NAME] == NAME - assert result["type"] is FlowResultType.CREATE_ENTRY - config_entry = hass.config_entries.async_get_entry(result["result"].entry_id) - assert CONF_APPS in config_entry.data - assert CONF_APPS in config_entry.options - - updated_config = MOCK_TV_WITH_EXCLUDE_CONFIG.copy() - updated_config.pop(CONF_APPS) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(updated_config), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - assert CONF_APPS not in config_entry.data - assert CONF_APPS not in config_entry.options - - -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_import_needs_pairing(hass: HomeAssistant) -> None: - """Test pairing config flow when access token not provided for tv during import.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_TV_CONFIG_NO_TOKEN - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_TV_CONFIG_NO_TOKEN - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pair_tv" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_PIN_CONFIG - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pairing_complete_import" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"][CONF_NAME] == NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV - - -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_import_with_apps_needs_pairing(hass: HomeAssistant) -> None: - """Test pairing config flow when access token not provided for tv but apps are included during import.""" - import_config = MOCK_TV_CONFIG_NO_TOKEN.copy() - import_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=import_config - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Mock inputting info without apps to make sure apps get stored - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=_get_config_schema(MOCK_TV_CONFIG_NO_TOKEN)(MOCK_TV_CONFIG_NO_TOKEN), - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pair_tv" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_PIN_CONFIG - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pairing_complete_import" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"][CONF_NAME] == NAME - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV - assert result["data"][CONF_APPS][CONF_INCLUDE] == [CURRENT_APP] - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_additional_configs(hass: HomeAssistant) -> None: - """Test import config flow with additional configs defined in CONF_APPS.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG), - ) - await hass.async_block_till_done() - - assert result["result"].data[CONF_NAME] == NAME - assert result["type"] is FlowResultType.CREATE_ENTRY - config_entry = hass.config_entries.async_get_entry(result["result"].entry_id) - assert CONF_APPS in config_entry.data - assert CONF_APPS not in config_entry.options - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_error( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that error is logged when import config has an error.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - options={CONF_VOLUME_STEP: VOLUME_STEP}, - unique_id=UNIQUE_ID, - ) - entry.add_to_hass(hass) - fail_entry = MOCK_SPEAKER_CONFIG.copy() - fail_entry[CONF_HOST] = "0.0.0.0" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(fail_entry), - ) - - assert result["type"] is FlowResultType.FORM - - # Ensure error gets logged - vizio_log_list = [ - log - for log in caplog.records - if log.name == "homeassistant.components.vizio.config_flow" - ] - assert len(vizio_log_list) == 1 - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_ignore(hass: HomeAssistant) -> None: - """Test import config flow doesn't throw an error when there's an existing ignored source.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_SPEAKER_CONFIG, - options={CONF_VOLUME_STEP: VOLUME_STEP}, - source=SOURCE_IGNORE, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - @pytest.mark.usefixtures( "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" ) @@ -854,26 +550,3 @@ async def test_zeroconf_flow_already_configured_hostname(hass: HomeAssistant) -> # Flow should abort because device is already setup assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup", "vizio_hostname_check") -async def test_import_flow_already_configured_hostname(hass: HomeAssistant) -> None: - """Test entity is already configured during import setup when existing entry uses hostname.""" - config = MOCK_SPEAKER_CONFIG.copy() - config[CONF_HOST] = "hostname" - entry = MockConfigEntry( - domain=DOMAIN, data=config, options={CONF_VOLUME_STEP: VOLUME_STEP} - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), - ) - - # Flow should abort because device was updated - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "updated_entry" - - assert entry.data[CONF_HOST] == HOST diff --git a/tests/components/vizio/test_init.py b/tests/components/vizio/test_init.py index c2b19377809..e004255ec6d 100644 --- a/tests/components/vizio/test_init.py +++ b/tests/components/vizio/test_init.py @@ -7,7 +7,6 @@ import pytest from homeassistant.components.vizio.const import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID @@ -15,16 +14,6 @@ from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID from tests.common import MockConfigEntry, async_fire_time_changed -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_setup_component(hass: HomeAssistant) -> None: - """Test component setup.""" - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: MOCK_USER_VALID_TV_CONFIG} - ) - await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(Platform.MEDIA_PLAYER)) == 1 - - @pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_tv_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading TV entry.""" diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index 12e19077c8e..a76dfa3fa2d 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -19,7 +19,6 @@ from pyvizio.const import ( MAX_VOLUME, UNKNOWN_APP, ) -import voluptuous as vol from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, @@ -42,7 +41,6 @@ from homeassistant.components.media_player import ( SERVICE_VOLUME_UP, MediaPlayerDeviceClass, ) -from homeassistant.components.vizio import validate_apps from homeassistant.components.vizio.const import ( CONF_ADDITIONAL_CONFIGS, CONF_APPS, @@ -50,7 +48,6 @@ from homeassistant.components.vizio.const import ( DEFAULT_VOLUME_STEP, DOMAIN, SERVICE_UPDATE_SETTING, - VIZIO_SCHEMA, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -69,9 +66,7 @@ from .const import ( EQ_LIST, INPUT_LIST, INPUT_LIST_WITH_APPS, - MOCK_SPEAKER_APPS_FAILURE, MOCK_SPEAKER_CONFIG, - MOCK_TV_APPS_FAILURE, MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG, MOCK_TV_WITH_EXCLUDE_CONFIG, MOCK_TV_WITH_INCLUDE_CONFIG, @@ -155,7 +150,7 @@ async def _test_setup_tv(hass: HomeAssistant, vizio_power_state: bool | None) -> config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG), + data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID, ) @@ -181,7 +176,7 @@ async def _test_setup_speaker( config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG), + data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID, ) @@ -215,7 +210,7 @@ async def _cm_for_test_setup_tv_with_apps( ) -> AsyncIterator[None]: """Context manager to setup test for Vizio TV with support for apps.""" config_entry = MockConfigEntry( - domain=DOMAIN, data=vol.Schema(VIZIO_SCHEMA)(device_config), unique_id=UNIQUE_ID + domain=DOMAIN, data=device_config, unique_id=UNIQUE_ID ) async with _cm_for_test_setup_without_apps( @@ -641,15 +636,6 @@ async def test_setup_with_apps_additional_apps_config( assert not service_call2.called -def test_invalid_apps_config(hass: HomeAssistant) -> None: - """Test that schema validation fails on certain conditions.""" - with pytest.raises(vol.Invalid): - vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_TV_APPS_FAILURE) - - with pytest.raises(vol.Invalid): - vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE) - - @pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_unknown_app_config( hass: HomeAssistant, @@ -687,7 +673,7 @@ async def test_setup_tv_without_mute(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when mute property isn't returned by Vizio API.""" config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG), + data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID, ) @@ -742,7 +728,7 @@ async def test_vizio_update_with_apps_on_input(hass: HomeAssistant) -> None: """Test a vizio TV with apps that is on a TV input.""" config_entry = MockConfigEntry( domain=DOMAIN, - data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG), + data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID, ) await _add_config_entry_to_hass(hass, config_entry) diff --git a/tests/components/vlc_telnet/test_config_flow.py b/tests/components/vlc_telnet/test_config_flow.py index 54edafab14a..a4b559bbe1b 100644 --- a/tests/components/vlc_telnet/test_config_flow.py +++ b/tests/components/vlc_telnet/test_config_flow.py @@ -9,10 +9,10 @@ from aiovlc.exceptions import AuthError, ConnectError import pytest from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.vlc_telnet.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -153,15 +153,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry_data, - ) + result = await entry.start_reauth_flow(hass) with ( patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), @@ -209,15 +201,7 @@ async def test_reauth_errors( entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry_data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( diff --git a/tests/components/vodafone_station/const.py b/tests/components/vodafone_station/const.py index 1b3d36def03..fc6bbd01398 100644 --- a/tests/components/vodafone_station/const.py +++ b/tests/components/vodafone_station/const.py @@ -1,5 +1,7 @@ """Common stuff for Vodafone Station tests.""" +from aiovodafone.api import VodafoneStationDevice + from homeassistant.components.vodafone_station.const import DOMAIN from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME @@ -16,3 +18,100 @@ MOCK_CONFIG = { } MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] + + +DEVICE_DATA_QUERY = { + "xx:xx:xx:xx:xx:xx": VodafoneStationDevice( + connected=True, + connection_type="wifi", + ip_address="192.168.1.10", + name="WifiDevice0", + mac="xx:xx:xx:xx:xx:xx", + type="laptop", + wifi="2.4G", + ), +} + +SERIAL = "m123456789" + +SENSOR_DATA_QUERY = { + "sys_serial_number": SERIAL, + "sys_firmware_version": "XF6_4.0.05.04", + "sys_bootloader_version": "0220", + "sys_hardware_version": "RHG3006 v1", + "omci_software_version": "\t\t1.0.0.1_41032\t\t\n", + "sys_uptime": "12:16:41", + "sys_cpu_usage": "97%", + "sys_reboot_cause": "Web Reboot", + "sys_memory_usage": "51.94%", + "sys_wireless_driver_version": "17.10.188.75;17.10.188.75", + "sys_wireless_driver_version_5g": "17.10.188.75;17.10.188.75", + "vf_internet_key_online_since": "", + "vf_internet_key_ip_addr": "0.0.0.0", + "vf_internet_key_system": "0.0.0.0", + "vf_internet_key_mode": "Auto", + "sys_voip_version": "v02.01.00_01.13a\n", + "sys_date_time": "20.10.2024 | 03:44 pm", + "sys_build_time": "Sun Jun 23 17:55:49 CST 2024\n", + "sys_model_name": "RHG3006", + "inter_ip_address": "1.1.1.1", + "inter_gateway": "1.1.1.2", + "inter_primary_dns": "1.1.1.3", + "inter_secondary_dns": "1.1.1.4", + "inter_firewall": "601036", + "inter_wan_ip_address": "1.1.1.1", + "inter_ipv6_link_local_address": "", + "inter_ipv6_link_global_address": "", + "inter_ipv6_gateway": "", + "inter_ipv6_prefix_delegation": "", + "inter_ipv6_dns_address1": "", + "inter_ipv6_dns_address2": "", + "lan_ip_network": "192.168.0.1/24", + "lan_default_gateway": "192.168.0.1", + "lan_subnet_address_subnet1": "", + "lan_mac_address": "11:22:33:44:55:66", + "lan_dhcp_server": "601036", + "lan_dhcpv6_server": "601036", + "lan_router_advertisement": "601036", + "lan_ipv6_default_gateway": "fe80::1", + "lan_port1_switch_mode": "1301722", + "lan_port2_switch_mode": "1301722", + "lan_port3_switch_mode": "1301722", + "lan_port4_switch_mode": "1301722", + "lan_port1_switch_speed": "10", + "lan_port2_switch_speed": "100", + "lan_port3_switch_speed": "1000", + "lan_port4_switch_speed": "1000", + "lan_port1_switch_status": "1301724", + "lan_port2_switch_status": "1301724", + "lan_port3_switch_status": "1301724", + "lan_port4_switch_status": "1301724", + "wifi_status": "601036", + "wifi_name": "Wifi-Main-Network", + "wifi_mac_address": "AA:BB:CC:DD:EE:FF", + "wifi_security": "401027", + "wifi_channel": "8", + "wifi_bandwidth": "573", + "guest_wifi_status": "601037", + "guest_wifi_name": "Wifi-Guest", + "guest_wifi_mac_addr": "AA:BB:CC:DD:EE:GG", + "guest_wifi_security": "401027", + "guest_wifi_channel": "N/A", + "guest_wifi_ip": "192.168.2.1", + "guest_wifi_subnet_addr": "255.255.255.0", + "guest_wifi_dhcp_server": "192.168.2.1", + "wifi_status_5g": "601036", + "wifi_name_5g": "Wifi-Main-Network", + "wifi_mac_address_5g": "AA:BB:CC:DD:EE:HH", + "wifi_security_5g": "401027", + "wifi_channel_5g": "36", + "wifi_bandwidth_5g": "4803", + "guest_wifi_status_5g": "601037", + "guest_wifi_name_5g": "Wifi-Guest", + "guest_wifi_mac_addr_5g": "AA:BB:CC:DD:EE:II", + "guest_wifi_channel_5g": "N/A", + "guest_wifi_security_5g": "401027", + "guest_wifi_ip_5g": "192.168.2.1", + "guest_wifi_subnet_addr_5g": "255.255.255.0", + "guest_wifi_dhcp_server_5g": "192.168.2.1", +} diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..dd268f4ed1a --- /dev/null +++ b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr @@ -0,0 +1,45 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'device_info': dict({ + 'client_devices': list([ + dict({ + 'connected': True, + 'connection_type': 'wifi', + 'hostname': 'WifiDevice0', + 'type': 'laptop', + }), + ]), + 'last_exception': None, + 'last_update success': True, + 'sys_cpu_usage': '97', + 'sys_firmware_version': 'XF6_4.0.05.04', + 'sys_hardware_version': 'RHG3006 v1', + 'sys_memory_usage': '51.94', + 'sys_model_name': 'RHG3006', + 'sys_reboot_cause': 'Web Reboot', + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_host', + 'password': '**REDACTED**', + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'vodafone_station', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/vodafone_station/test_button.py b/tests/components/vodafone_station/test_button.py new file mode 100644 index 00000000000..8b9b0753caa --- /dev/null +++ b/tests/components/vodafone_station/test_button.py @@ -0,0 +1,56 @@ +"""Tests for Vodafone Station button platform.""" + +from unittest.mock import patch + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.vodafone_station.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_registry import EntityRegistry + +from .const import DEVICE_DATA_QUERY, MOCK_USER_DATA, SENSOR_DATA_QUERY, SERIAL + +from tests.common import MockConfigEntry + + +async def test_button(hass: HomeAssistant, entity_registry: EntityRegistry) -> None: + """Test device restart button.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiovodafone.api.VodafoneStationSercommApi.login"), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_devices_data", + return_value=DEVICE_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_sensor_data", + return_value=SENSOR_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.restart_router", + ) as mock_router_restart, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_id = f"button.vodafone_station_{SERIAL}_restart" + + # restart button + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{SERIAL}_reboot" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert mock_router_restart.call_count == 1 diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 0492d32070f..3a54f250871 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.components.vodafone_station.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,6 +124,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -136,15 +139,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: "homeassistant.components.vodafone_station.async_setup_entry", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -172,6 +166,10 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + with ( patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.login", @@ -184,15 +182,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> "homeassistant.components.vodafone_station.async_setup_entry", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/vodafone_station/test_diagnostics.py b/tests/components/vodafone_station/test_diagnostics.py new file mode 100644 index 00000000000..02918d81912 --- /dev/null +++ b/tests/components/vodafone_station/test_diagnostics.py @@ -0,0 +1,51 @@ +"""Tests for Vodafone Station diagnostics platform.""" + +from __future__ import annotations + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.vodafone_station.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .const import DEVICE_DATA_QUERY, MOCK_USER_DATA, SENSOR_DATA_QUERY + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiovodafone.api.VodafoneStationSercommApi.login"), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_devices_data", + return_value=DEVICE_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_sensor_data", + return_value=SENSOR_DATA_QUERY, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props( + "entry_id", + "created_at", + "modified_at", + ) + ) diff --git a/tests/components/voip/conftest.py b/tests/components/voip/conftest.py index b039a49e0f0..99707297230 100644 --- a/tests/components/voip/conftest.py +++ b/tests/components/voip/conftest.py @@ -6,6 +6,7 @@ from unittest.mock import AsyncMock, Mock, patch import pytest from voip_utils import CallInfo +from voip_utils.sip import get_sip_endpoint from homeassistant.components.voip import DOMAIN from homeassistant.components.voip.devices import VoIPDevice, VoIPDevices @@ -14,6 +15,9 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.components.tts.conftest import ( + mock_tts_cache_dir_fixture_autouse, # noqa: F401 +) @pytest.fixture(autouse=True) @@ -52,8 +56,7 @@ async def voip_devices(hass: HomeAssistant, setup_voip: None) -> VoIPDevices: def call_info() -> CallInfo: """Fake call info.""" return CallInfo( - caller_ip="192.168.1.210", - caller_sip_port=5060, + caller_endpoint=get_sip_endpoint("192.168.1.210", 5060), caller_rtp_port=5004, server_ip="192.168.1.10", headers={ diff --git a/tests/components/voip/snapshots/test_voip.ambr b/tests/components/voip/snapshots/test_voip.ambr new file mode 100644 index 00000000000..3cc64400419 --- /dev/null +++ b/tests/components/voip/snapshots/test_voip.ambr @@ -0,0 +1,7 @@ +# serializer version: 1 +# name: test_calls_not_allowed + b'\xfe\xff\x04\x00\x05\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x03\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\xfe\xff\xfc\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x02\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfd\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\xff\xff\x00\x00\xfd\xff\xfa\xff\xfc\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfd\xff\x00\x00\x00\x00\x00\x00\xfb\xff\xfb\xff\xfa\xff\xfd\xff\xff\xff\xff\xff\x01\x00\xfc\xff\xff\xff\xf8\xff\xff\xff\x00\x00\xf3\xff\xfd\xff\xf3\xff\xfb\xff\x01\x00\xff\xff\xfa\xff\x02\x00\xf4\xff\xeb\xff\xfc\xff\xf7\xff\xe8\xff\xfb\xff\xf8\xff\xf7\xff\r\x00\xfe\xff\x02\x00\xfe\xff\xf9\xff\xfa\xff\xf8\xff\x00\x00\xf6\xff\xfe\xff\x02\x00\x05\x00\x04\x00\xfa\xff\xf4\xff\xe8\xff\xf3\xff\x06\x00\xf9\xff\x06\x00\n\x00\xf8\xff\xfa\xff\x01\x00\xf4\xff\xfd\xff\xf7\xff\xf4\xff\x01\x00\x05\x00\x02\x00\x04\x00\xfc\xff\xef\xff\x03\x00\xf3\xff\xfc\xff\x08\x00\x04\x00\xfd\xff\x08\x00\x04\x00\x00\x00\x00\x00\x06\x00\x03\x00\xfd\xff\x04\x00\x15\x00\x06\x00\x12\x00\x15\x00\x05\x00\x04\x00\x05\x00\x05\x00\x02\x00\x07\x00\x05\x00\xfc\xff\xfd\xff\x06\x00\xff\xff\xf8\xff\x01\x00\xf2\xff\xe6\xff\xf4\xff\xef\xff\xfb\xff\xfc\xff\xf2\xff\xec\xff\xe4\xff\xe6\xff\xf9\xff\xfa\xff\xee\xff\xea\xff\xe9\xff\xf8\xff\x06\x00\x0b\x00\xe9\xff\x03\x00\xea\xff\xfc\xff\x0f\x00\x00\x00\x13\x00\xe6\xff\xfe\xff\x10\x00\x12\x00\xfd\xff\x03\x00\xf1\xff\xfb\xff\x18\x00\x1f\x00\x08\x00\xfa\xff\xf9\xff\xf6\xff\r\x00\x17\x00\x03\x00\xfb\xff\xfc\xff\xf3\xff,\x00\x1c\x00\xf8\xff\xed\xff\x05\x00\x10\x00$\x00@\x00\x19\x00\x00\x00\x19\x004\x00G\x00]\x001\x00\x07\x005\x00J\x00X\x00\\\x00\x03\x00\xf6\xff\x13\x007\x00]\x008\x00\xef\xff\xeb\xff\x00\x00#\x00\x85\x00S\x00\xb6\xff\xcf\xff\x1a\x00\xc3\xff\xb6\x00\x8a\x00^\xff\xe0\xff\xfc\xff\xba\xff4\x00n\x00\xc5\xff5\xff\xf4\xffR\x00\xe8\xff-\x00\x11\x00z\xff\xb0\xff\x92\x00\xeb\xff\xca\xff\t\x00\xa0\xff\xcb\xff6\x00L\x00\x02\x00\x91\xff\xdb\xff\xd3\xff\xed\xff\xc0\xff\x8b\xff\x97\x00\xe2\xff\x16\x00B\x00\xbc\xff\xfb\xff1\x00\xe4\xff\xed\xff\x95\x00\xcc\x00H\x00>\x00\x03\x00g\xff\x18\x01\x8c\x01\xa8\xff?\xff\xc6\xfeO\xff\xaa\x00\x00\x01Q\xff\xaf\xfe\xce\xfe\xd8\xfe\x7f\xff\xce\xfe\x93\xfd\xb6\xfc\x9c\xfd\xb1\xff\xf7\x00H\x00D\xfe\x8d\xfc\xc2\xfco\xffG\x01r\x00\x94\xffG\x007\x01,\x02\xc0\x02\x18\x01\xaa\xff\xf0\xffS\x00\xbf\x029\x03\xa0\x01p\x00/\x00\xc4\xff\xb3\xff\xd4\xffU\xfdB\xfd\x8b\xfe\xfb\xfe\x86\xfe\x0e\xfd\xba\xfd\xb7\xfd\x8e\xfc\xf0\xfc\x88\xfd"\xfe\'\xfe]\xfe\xfb\xfe\x13\x00\x08\x01\xe1\x00&\xff\xf0\xfe\x05\x015\x01E\x02:\x02G\x02*\x02E\x02\xcf\x02\x1f\x03\xcc\x03\x15\x03N\x03\xdf\x03\x82\x04X\x05P\x05f\x04}\x04Q\x06\xe3\x06\x9a\x06\x8e\x06\xc7\x05a\x05\xe6\x05-\x06g\x066\x06\x9e\x05\xf4\x03\x9b\x03\x14\x03e\x02\x99\x01\xdf\xff\xa1\xfe{\xfe%\xfe2\xfd/\xfc\xc3\xfa-\xf9\xe2\xf8\xa2\xf8\x8d\xf8\xa0\xf9B\xf9\x15\xf9\xf3\xf8<\xf9y\xfa\xe1\xfa\xce\xfa#\xfb\xa1\xfc\xf3\xfd\xec\xfeE\xff\xc5\xfe\x9f\xfe8\xff\x19\xff\xff\xfe5\xff\xd8\xfe\x90\xfe\x87\xfd\xb5\xfcR\xfc\x18\xfc\xae\xfaI\xf9/\xf9\x14\xf9>\xf9\xb6\xf8d\xf8o\xf8E\xf8\x18\xf8c\xf8g\xfaA\xfb\xe2\xfak\xfb\xda\xfbM\xfd\xa0\xfe\x1c\xfft\xfe\xee\xfe\xf9\xff\x0e\x00y\x00*\x00P\xff\xfa\xfe\x84\xfe\xef\xfd\xd4\xfe\xb3\xfdf\xfd\xfa\xfbq\xfb\xfa\xfb \xfd{\xfd\xe4\xfc\xb3\xfc\xe5\xfa\x97\xfd\xee\xffP\x00o\x01o\x00\xfc\x01\x13\x04S\x05R\x08\x13\x07\xda\x08\xa6\t`\x0cX\x11\x1c\x0f\x88\x0b\xb5\x04\x17\x08\x8f\x17\x8f)\x9f4G+\xa0\x1c\x9f\x12\xe9\x13\x88#\xac+++\x94"\x8f\x1bM\x1f\xa0\x1e\x05\x17\xf1\x04\x17\xf4V\xec\x13\xf0\x0e\xfaJ\xfe&\xf9\xcb\xe7\x96\xd7\xa6\xcf\xab\xd2\xd2\xd9\x95\xdbT\xd9J\xdb\x84\xe2\x98\xe8\x06\xeb8\xe8J\xe5\x93\xe5\xfa\xea\xa2\xf8:\t\xe9\x11\xd3\x10c\n\x97\x05\x1a\x08\xbb\r\x94\r\x15\x0e\xef\rz\x0eU\x10\xc1\r+\x08\xbd\xfd(\xf24\xeaj\xec\x1f\xf3H\xf7\x0e\xf5\x07\xed\xcb\xe6\x9f\xe2\xe1\xe2\xdc\xe5&\xe87\xed\xcb\xf1\x13\xf8\xf9\xfe*\x039\x04\xda\x00h\xff\xe3\x03\xdf\x0eY\x18\xf4\x1d\xa7\x1d8\x1a=\x16\xad\x12\xa8\x118\x11\xa7\x10\xaa\x0e\xfb\r`\x0c}\n\xd2\x06|\xfe@\xf5\x11\xf1U\xf2K\xf6\x9c\xf9\xf3\xf8\xf8\xf5\xc6\xf2\xb5\xf0\x1a\xf2\xb6\xf4\xa5\xf6\x87\xf7~\xf9\xa3\xfd1\x01{\x03\xff\x01\xfc\xfc\x1c\xfb\xa7\xfb\xf7\xfd\x85\x00\xb1\x00\xaf\xfe\x01\xfc\xf1\xf9x\xf7\xab\xf6c\xf4M\xf2f\xf2?\xf2\x1f\xf7 \xf8\xf7\xf7!\xf6W\xf1@\xf34\xf5\xce\xf8\xdb\xfdA\x00\x9f\x02[\x03\x18\x04\x0b\x01\xb0\x02\x0c\x06>\x08 \x0b\xfa\n\xc6\x0e\xaa\x12K\x13>\x12y\x11o\x11C\x17\xe6\':7w9 /\x0e$\'$\xcb)\x04,\xc8+\xc3+\x9a)t"`\x18\xf0\x0f\x88\x07s\xfc\xe7\xef\xf7\xe7\x9a\xe9\x0c\xed3\xec\xcf\xe4*\xda\x11\xd1\xab\xcc\xf6\xcc\x00\xd1^\xd8\x93\xdf\xb2\xe4\x08\xe75\xea\xad\xefZ\xf3`\xf4\x0c\xf6\x05\xfd/\tU\x13\xaa\x17\x06\x16,\x13-\x10\xdb\r\x18\x0c"\n\xe7\t\x8b\x08\xef\x04\x19\x00P\xfb\xd8\xf5\xbe\xed{\xe4C\xe0\x04\xe1\xff\xe2\xe4\xe2\xaf\xe2\x89\xe2N\xe2\x9e\xe2 \xe4{\xe8\x1e\xef\xf5\xf5\x1a\xfc\xf2\x01\x9e\x08\xba\x0fh\x12\xf5\x14\xcc\x17\xb1\x1cb \xb8 9!\xa6 |\x1f\xbe\x1bu\x17\xef\x13~\x0fo\nt\x05\xef\x00;\xfd\xe3\xf9L\xf6\xab\xf1&\xef\xc7\xed\xf0\xec\x1a\xed\xf4\xec;\xee/\xf0\xa9\xf22\xf68\xf9\xf3\xfax\xfb\xd2\xfd(\xff\x00\x01\x9d\x02\xad\x02T\x03R\x02\xf6\x00$\xff\\\xfd\x9e\xfa\xef\xf6\x91\xf4\x1d\xf3K\xf3\x80\xf2\x88\xf0X\xed#\xec\x8f\xebb\xeaK\xeb\xdc\xec;\xf0\xf2\xf3\x15\xf5\xfe\xf7\xb2\xfa\xf3\xfb(\xff\xc9\x00`\x03]\t\x0b\x0cW\x10O\x14\xbf\x14\x9c\x14\xb6\x11\x81\x11X\x14y\x17f\x1b\xce&c9vB\x96:,&\xd5\x1b\xbc%\xf20H4.3\x983\xaa0P%B\x15\xa8\n/\x03i\xf8#\xf0\xcf\xf03\xf9"\xfc\x8a\xf1\xb0\xde{\xd1\xf9\xcc\xa9\xcc>\xcfw\xd67\xe1\xb8\xe7F\xe7\x06\xe6\xf1\xe7\x0c\xe9C\xe8\x0c\xea\xa2\xf2\x83\x00k\r\x91\x13\xb1\x13X\x0f\xa4\n\x13\x07(\x05\xe9\x06L\n\x00\x0b\xbb\x08\xe9\x04\x01\xffN\xf7\x97\xee\'\xe6\x9c\xe0\x95\xdeu\xdfX\xe3\x14\xe5\xba\xe4\xee\xe1\xad\xddE\xdc+\xe0\x10\xe8\xab\xf0\xbc\xf7\x06\xfet\x05\xdf\n\xc3\x0fH\x13)\x16W\x18\x7f\x1c\xc3"\x14)\xaf+<)k$0\x1e8\x19V\x15\r\x13\xd7\x0f3\x0c\x0e\x08\xb8\x01l\xfa\xe3\xf3\x05\xef\x17\xec_\xeah\xea\xa6\xeb\xfd\xec`\xeex\xef\x82\xf0\xcf\xf0U\xf2\x9d\xf5S\xfa\x89\xff\x1f\x02[\x03>\x039\x02L\x01\x0b\x00\x1e\x00\x11\x00\xb5\xfe~\xfcv\xf9!\xf7\xb5\xf3\xde\xf0Z\xed\xac\xeat\xe9\xc3\xe8\xdd\xe9\xf5\xe9\xe5\xe9\xce\xe8+\xe9_\xeb\x0f\xee\xa8\xf3x\xf6\xc7\xf9\x8d\xfc`\xfe\xd3\x03R\tA\x0f\x15\x12C\x12\xdb\x12\xfd\x14%\x1bK\x1eN\x1f\xd3\x1f\x08#\xa5.\xe7<\xb7D\x99?\x101*&\xc8&O.N3\xac2w/\x03(\xf0\x1c\xbe\x0e\xd9\x03\x90\xfd\x9f\xf5*\xefz\xeb\x04\xed\x81\xee\xac\xe9\x04\xe0\xcd\xd4`\xcdL\xcc7\xd2x\xdb0\xe4?\xea\x03\xeb\xe6\xe9\x1d\xeaH\xed\xd2\xf2\xfe\xf6\xc3\xfc\x11\x04\xa6\x0cc\x12H\x14\'\x12\x06\r!\x08h\x04\x1c\x03\x0c\x03\x10\x03{\x02\x18\xff?\xf8\xe8\xef\x1a\xe7\x00\xe1\x1e\xddb\xdb\xcf\xdby\xdd)\xe0=\xe2T\xe2\xf5\xe1\xac\xe2\xcf\xe4\xd4\xe8\xbc\xef?\xfa\xf3\x03k\x0cF\x12A\x16R\x18F\x19U\x1c\xca w%\x9a(\x99)])S&(!\xa5\x1aD\x14\x12\x0fP\x0b\x12\x08g\x04q\x00\xa2\xfb\xf8\xf5x\xef1\xea^\xe7\x82\xe7\x04\xe9p\xeb\xc0\xed\xf6\xef\x94\xf1\x94\xf2#\xf3v\xf4k\xf7j\xfb\xbf\xff\xcc\x03N\x07\x1f\x08\xbc\x06\x03\x04\xe8\x01\xe7\x00\r\x00\xf5\xfeo\xfdE\xfbl\xf8\x8e\xf5|\xf1;\xed\'\xea\n\xe8,\xe7\xe3\xe7,\xe9d\xea`\xea#\xe9\xf5\xe8\x88\xea\x11\xede\xf2\xcb\xf7`\xfc\xda\xff7\x00l\x01H\x04g\tX\x0f\x93\x13\x1c\x162\x18S\x1c\xa5!\xa5,&=\xe5J\x93L\xaf>[1\x0e1\x819x?j>\x0b;\x8c6\xc3,\\\x1de\x0e\xb3\x04X\xfc8\xf35\xeb_\xe87\xebR\xeb\xf2\xe2\x8e\xd4\x8c\xc9\xad\xc6\xb1\xc9\xe4\xce\xc2\xd5c\xde\x8d\xe5x\xe8\xc8\xe8\xd7\xe9\xc7\xed\x9a\xf2\x15\xf7\xa0\xfc\x87\x04\xc1\x0fd\x19V\x1d\x97\x19a\x12\x93\x0b\xee\x06\xf7\x04E\x05\xe8\x06\x05\x07\xbb\x02\xb6\xf9\xbd\xee|\xe5P\xe0\xa5\xdc\x98\xd9*\xd8\x86\xd9;\xdc\x05\xde>\xde]\xdeg\xdf\xd7\xe1\xea\xe6\r\xeeE\xf7`\x01 \nM\x0fI\x12C\x15T\x1a\x1a \x91$q\'\xa6)\x89*H)\xb4%\xf8 6\x1d\x80\x19\xe1\x14d\x0f|\n\x8f\x06\x83\x02[\xfc\xd1\xf5\'\xf0T\xec\x99\xea4\xea\xad\xeaP\xeb1\xec\xfa\xec\x9a\xed\xb2\xeeo\xf0|\xf3]\xf7!\xfb8\xff\xe3\x02\xed\x05\xd4\x07\xc2\x07b\x06\x9a\x04\xc7\x03\x97\x03\xcc\x03\xad\x02\xe1\xff\xe6\xfb0\xf7X\xf3G\xf0t\xed\xcb\xea\x80\xe8\x08\xe7b\xe6\xab\xe6J\xe7\xef\xe75\xe9\xc1\xea\xde\xec\xcd\xef\xd2\xf2\x93\xf5\xdc\xf7#\xfa\xce\xfd\xc7\x02:\x07Y\x0b\xc3\x0e\xc8\x10\x82\x11]\x11\xb9\x12\x98\x16 \x1c"%b3+C\xd8JVD\x845\xd2*\x89)I-\xca3X;\xc0?\xc3;\xfd,\r\x19\x8f\x08r\xfd\t\xf8c\xf5^\xf2\x06\xf0W\xedR\xea\x0c\xe5\xdb\xdc\xe9\xd2\xee\xca\x87\xc6\xf4\xc5\xe7\xcaP\xd5\xb0\xe1\x8c\xeaM\xec\xdb\xe8\xc5\xe5\x99\xe5U\xe9G\xf1\xd1\xfc\x8b\t\xff\x12+\x16\x90\x15\xc9\x13\xe8\x10J\x0cQ\x07\x99\x04U\x05\x85\x07\x06\t\xb2\x08\xd9\x04\xa1\xfck\xf1)\xe6\xe3\xddA\xda:\xdc\xe1\xe1\xb4\xe7\x93\xe9\x90\xe7\x0f\xe5K\xe3b\xe3\x14\xe6\xc6\xeb=\xf4"\xfdI\x05\xaa\x0c\xd7\x12\xaf\x17}\x1a\xe9\x1a\x8e\x1a\x07\x1b\x97\x1e\xc6#\xc3(M*\'(\xe4"I\x1b\xd4\x13\x9c\r\\\n\xdc\x08\xcd\x07\xaa\x04b\xffH\xf9\xba\xf3f\xef\x0b\xec\xb7\xe9\x89\xe8=\xe8\x8a\xe8\xc4\xe9\x18\xec\xf7\xeeK\xf1\x00\xf3\x02\xf4\x11\xf5\x87\xf6\xd9\xf9\x0b\xfe\xdf\x01b\x04\xee\x04\xd1\x04o\x03\xfc\x00\xd8\xfe\x9f\xfd\x06\xfdd\xfc,\xfa\xa9\xf7A\xf5M\xf3\x85\xf1\xab\xef\xac\xed\xa8\xec\xd0\xecp\xed\xae\xee\xda\xefn\xf1\x8a\xf3"\xf5\xc6\xf6\x19\xf9\x0f\xfc_\xff$\x02\x0e\x05d\x08_\x0c\x01\x10&\x12\x17\x13Q\x13\x14\x142\x15\x0b\x18b\x1fK,\xd89\x86@\x8e<\x991\xf4\'t$\xb5&\xcf+\xfa1\xa16\x036\x7f-s\x1e.\x0e\x1b\x02\x97\xfb\x81\xf93\xf8\x1d\xf55\xf0k\xeat\xe5\x7f\xe0\xaa\xda\x00\xd4\xfb\xcd\xa8\xc9r\xc8i\xcb\x1d\xd2T\xda\xf2\xe0g\xe4\'\xe5]\xe4\xb4\xe3e\xe5\x07\xeb\xf4\xf4\xcd\xffL\x08\x12\rD\x0f\xfc\x0f\xa4\x0f\xea\r\x13\x0cE\x0b\xca\x0b.\rj\x0e\xa3\x0e\xe6\x0c\xf7\x08\xf9\x02\xd0\xfb\x84\xf4\xd9\xee]\xec\xcd\xec\xca\xee \xf0\x16\xf0:\xef\xb4\xed\xfe\xeb\xe5\xea\xdc\xeb\x94\xefW\xf5\xf7\xfb\xe6\x01\x90\x06\x11\n\xf5\x0c?\x0f\xd3\x10\xb1\x11\x0c\x13c\x15\x8a\x18\xef\x1a\xfb\x1bK\x1b+\x19\xa0\x15\xed\x10\xec\x0b/\x07\xe3\x03\xdd\x01\x94\x00\xfc\xfe\xba\xfc\x8d\xf9\xe5\xf5\xef\xf1g\xee\xe3\xeb\xf6\xea\xb1\xebx\xed\xac\xef\xa0\xf1\xea\xf2k\xf3y\xf3q\xf3\xf6\xf3\x0e\xf5\xd4\xf6\xfe\xf8\x7f\xfb\xd0\xfd\xa0\xffX\x00\x1c\x00G\xff&\xfeV\xfd\xbe\xfc\xcb\xfc2\xfd\xe5\xfd\x94\xfe\xce\xfe^\xfep\xfdf\xfc\xb9\xfb\xa3\xfb\x05\xfc\x18\xfdo\xfe\xce\xff\xf0\x00\x88\x01\x8a\x01\x0b\x01B\x00\xea\xff\xa1\x00A\x02|\x04y\x06\xc5\x07V\x08\\\x08\x08\x089\x08\xff\x08\xe5\nj\x0ej\x14\x14\x1ds&\xde,\xc6-\xd4)\x19$\x17 \x8c\x1f\xa9"\xf1\'?-0/\xba+\x88"\xa1\x15\x97\x08\xc7\xfe\x01\xfa\xed\xf8x\xf8\xf3\xf5\x84\xf0$\xe9c\xe1\x19\xdaC\xd4Z\xd0\x97\xce:\xcel\xce\xf4\xceF\xd08\xd3\xe0\xd7\x9d\xdd5\xe3e\xe7\xe5\xe9\xfc\xebr\xef^\xf5\x06\xfd\x90\x05\x86\r\xf0\x13l\x17\x9a\x17\x95\x15P\x13\x88\x12\x99\x13\xdc\x15\xc4\x17$\x18\xed\x15\x94\x11\xd0\x0b\xd4\x05\xb0\x00\xf9\xfc\xcb\xfa^\xf9\xfc\xf7\x05\xf6\xca\xf3\xbd\xf1t\xf0\xab\xef\xfb\xeeA\xee\xcb\xed\'\xeeW\xef\xa4\xf1\x13\xf5Q\xf9\xa3\xfd\xe0\x00\xa2\x02\xd4\x02F\x02\x90\x02[\x04\xb5\x07\xa1\x0b\xd9\x0e\x0f\x11\xc5\x11\xd9\x10\xc1\x0e\x02\x0c\xdf\t\x95\x08E\x08N\x08\x08\x08\x05\x07&\x05\xe8\x02\x0c\x00\x81\xfc\xa2\xf8o\xf5\xc7\xf3\xaf\xf3\x81\xf4\xb5\xf5\xd4\xf6_\xf7\x1a\xf7\xdb\xf5\x07\xf4c\xf2\xc9\xf1\xff\xf2\xd9\xf5\x93\xf9\xcf\xfc\xf4\xfe\x06\x00d\x00M\x00\xd8\xff\xd5\xff\xb2\x00v\x02O\x04\xaa\x05Q\x06;\x06|\x05,\x04\xb0\x02n\x01\xbf\x00n\x00z\x00`\x00\xe1\xff\xdc\xfe\x7f\xfd?\xfca\xfb\xea\xfa\xdf\xfa\xf8\xfa\x13\xfb,\xfbH\xfb\x96\xfb\xce\xfb\x18\xfc\xd4\xfcx\xfe\xc5\x00z\x03\xea\x06$\x0cG\x13\xd5\x1ao \xec"\xea"\xb2!\x7f \xe7\x1f\xfc \x00$=(\xba+\xfd+q\'\xb1\x1e\xe7\x13\xd3\t\\\x02<\xfe\x9c\xfc\xde\xfbO\xfa\\\xf6\x96\xef\xb6\xe6\xa9\xddf\xd6\x93\xd2&\xd2:\xd4:\xd7\xfa\xd9\xed\xdb4\xdd<\xde\x85\xdf\xe4\xe1\x8a\xe5\x88\xea/\xf0.\xf63\xfcv\x02o\x08\xaa\rX\x11:\x13\x80\x13\xc9\x12\xee\x11\x85\x117\x12\xbe\x13\x87\x15\xe7\x15\xdb\x13\xf0\x0e0\x08\x1a\x01\x08\xfb\x16\xf7>\xf5#\xf5;\xf5`\xf4\xcd\xf1\xed\xed\x15\xea\x85\xe7\xa2\xe6;\xe7\xc7\xe8+\xeb"\xeeS\xf1\xb5\xf4\xff\xf7B\xfbA\xfe\xb6\x00\xaf\x02Z\x04\x84\x06\xb6\t\xff\r\x8f\x12R\x16C\x188\x18\xc6\x16\xa0\x14\xb2\x12O\x11\xd0\x10\xdc\x10\x8d\x10\xfb\x0e\xe7\x0b\xd2\x07\x98\x03\xe3\xff\xa1\xfc\xb8\xf9\x18\xf7\x02\xf5\xb3\xf3\x16\xf3\xf5\xf2\x10\xf3#\xf3\xf9\xf2X\xf2h\xf1\xba\xf0A\xf1Q\xf3\xa8\xf62\xfa\xf9\xfcV\xfe~\xfe<\xfe8\xfe\xf7\xfe;\x00\xf0\x01\x80\x03\x81\x04\x84\x04\x8e\x03\x06\x02\x91\x00\xb7\xffK\xff\x0e\xffe\xfe\x8b\xfdg\xfcX\xfb`\xfa\x8a\xf9\x1e\xf9\x11\xf9;\xf9!\xf9o\xf8\x8e\xf7\xf6\xf6f\xf7\x07\xf9m\xfb\xea\xfd6\xff[\xff\xe7\xfe\xf5\xfeS\x00\x83\x03+\t\xe2\x11:\x1c\t%\xfd(\xb5\'\xf1#0!\x82!\xdf$K*\xe3/\xb93\xdc3m/|&\xe7\x1a\xec\x0fb\x08\xd5\x04\xa0\x035\x02.\xff\x08\xfa\x96\xf2Q\xe9o\xdf\x1c\xd70\xd2\'\xd1\xbe\xd2f\xd5\n\xd8H\xda\x0e\xdcd\xdd:\xde.\xdf\x93\xe1(\xe6\n\xed\xd4\xf4V\xfc\xb6\x02\xfc\x07\xdd\x0b9\x0e\xf0\x0e\xb1\x0eI\x0e[\x0e\xfe\x0e\xd1\x0f\xaf\x10\x00\x11\x18\x10\xc5\x0c\xda\x067\xff\xce\xf7b\xf2p\xef\xe7\xee\xa8\xef\x80\xf0\x07\xf0\xbc\xed7\xea\xe2\xe6a\xe5\x92\xe68\xea\x00\xef\xce\xf3\x03\xf8\xab\xfb\xc6\xfe~\x01i\x04\x89\x07Z\x0b\x19\x0f\x9e\x12x\x15\x94\x17\x8d\x19\xea\x1a\x8e\x1b\xe7\x1at\x19\xaf\x17\x00\x16]\x14\x87\x12p\x10\xe7\r\xfc\n\xb4\x076\x04v\x00\xc4\xfcF\xf9\xf0\xf6z\xf5w\xf4J\xf3\xdf\xf1\xce\xf0\xf4\xefj\xef\'\xef\xa8\xef\r\xf1\xd7\xf2\\\xf4U\xf5=\xf6O\xf7\xb8\xf8\x18\xfa \xfb\xa2\xfb\xee\xfbl\xfc\x08\xfd\xb0\xfd\xdb\xfd\xc1\xfd[\xfd\xd5\xfc\xf4\xfb\xd8\xfa\x00\xfa\xcb\xf9/\xfaa\xfa\x0c\xfa\x02\xf9\xdf\xf7\xf5\xf6\xb3\xf6\xe2\xf6_\xf7.\xf8\xf0\xf8m\xf9U\xf9\xc9\xf8~\xf8\xef\xf8c\xfa\xa9\xfcp\xff\x81\x02\xfe\x04\x80\x06E\x07T\x08\xa3\n\xe8\x0e\xec\x15\xf8 E.\x9e8\x88:Z4\x0f,\x91(5,=4\x87<\xe9@v?j7u*z\x1br\x0ey\x06\xef\x03\x94\x03\xd2\x00s\xf9\t\xef\xe5\xe4%\xdcE\xd4\xc1\xcc\xc5\xc6\xa8\xc3I\xc4\xe9\xc7I\xcd\xac\xd2.\xd6\x82\xd7\x9f\xd7\n\xd85\xda\x07\xe0\x92\xea\x99\xf8L\x05\xf8\x0c|\x0f\xb0\x0fK\x0f\x9e\x0e\xd6\r}\x0e\x8c\x11\x9b\x15G\x18\xdc\x17D\x14\xaf\r\xe0\x04J\xfb \xf3\xec\xed\\\xec<\xeeN\xf1\x84\xf2\xdd\xef$\xea\x9a\xe4\xcb\xe1\\\xe2b\xe5\x8d\xeaZ\xf1\x9b\xf8\xe3\xfe\xfb\x02n\x05:\x07\xcb\t\xa2\x0cD\x10\x9e\x14\x11\x1a\xc1\x1f\xfa#\x18%\x8a"\x90\x1e\xee\x1a\x98\x18k\x16\xb7\x14\x80\x14\xea\x14\x8c\x13\x1f\x0e\xbd\x05j\xfd\xa3\xf7\xee\xf4Y\xf4\xd5\xf43\xf5W\xf4E\xf2,\xefg\xeb\x19\xe8K\xe7\xb0\xe9\xde\xed\xe8\xf1 \xf5\x95\xf7\x88\xf8\xa0\xf7\x9b\xf5N\xf4\xe8\xf4)\xf7#\xfa\xfc\xfc\xe9\xfen\xff\x9a\xfe\x8e\xfc\x92\xf9\xd3\xf6y\xf5A\xf6z\xf8\xc1\xfa\xa2\xfb\xdb\xfa\x97\xf9K\xf8\x84\xf7\xe6\xf6\xdc\xf6\x0c\xf8\xaf\xf9\x99\xfb\xe1\xfc\'\xfd\x81\xfdc\xfe\xe5\xff-\x017\x01\xcf\x00W\x01\xf3\x02\x1e\x05\xbd\x06\x7f\x07O\x08g\t\x1d\x0b6\r\xd3\x0f\xfd\x12\xef\x17\xe3\x1e\x16\'\x11/ 414\x84/\xb7)\x1e(\x1e-\x125\xad9\xfc6\xef.\t%\xd0\x1ab\x10?\x07\x8b\x01\xa7\xfe\xba\xfb\xd3\xf5S\xed\xb1\xe3\x96\xda\xd4\xd2\x04\xcd8\xc9\x0e\xc7\xe3\xc6\xc5\xc9\xb4\xce\x9b\xd2I\xd3M\xd2\xe7\xd2\x0e\xd7u\xde\x11\xe8\x8a\xf29\xfc\xf5\x03\xcd\x07j\x086\x08\xba\nO\x105\x16&\x193\x19#\x18\x92\x16\x8d\x13\xe3\x0e\x90\t\xb3\x04\xc0\x00|\xfd\x88\xfa\xad\xf7\xbe\xf4\x03\xf2\xd0\xee\xba\xea\xec\xe6\xaa\xe5\x0f\xe8\x01\xec\xfa\xeeV\xf0D\xf2\xfc\xf5W\xfaN\xfe\xa1\x01\xee\x057\nV\x0ex\x11\xe6\x14\x11\x18\x02\x1b<\x1c\xc0\x1b\x8d\x1a\xee\x18\x80\x18n\x17\x9b\x152\x12\n\x0f\x1b\r\x92\x0bd\x08\xba\x02\x8f\xfc\x14\xf8\t\xf6^\xf5\xe0\xf4\xfb\xf3\xa3\xf2\xb6\xf0\xd1\xee\xb5\xec,\xeb\x96\xea\n\xecr\xefM\xf2\xf7\xf3\xbe\xf46\xf5\xff\xf4\xac\xf3\xc7\xf2/\xf4\x84\xf7\xbd\xfa\xbc\xfc4\xfd\xbc\xfc\x85\xfb\xcf\xf9\x85\xf8P\xf8\x9c\xf9i\xfcP\xffu\x006\xff\xea\xfc\x1e\xfba\xfa\xd4\xf9\xde\xf9\xa2\xfb\x1f\xfey\xff#\x00\x8f\x00T\x00\x10\xfd~\xf8\xe3\xf8\xcf\xfe\xc9\x05B\tW\x07\xfc\x03V\x01P\x01}\x03\xb6\x05=\x08J\n\x04\r\xd1\x0f;\x17S$j/a.\x99!V\x1a<"p0\xb47\xab7N7S6\xe4/\x80%\x86\x1d\xcb\x183\x13z\r\x14\n\xde\x08\xcf\x04\x9d\xfb\xeb\xee^\xe0E\xd5\x90\xd0\x1c\xd2Y\xd5\xde\xd4\xf7\xd1\x14\xcf\xa0\xcc\xdb\xc9\x07\xc8\x8e\xca\xd8\xd1m\xdb\xa0\xe4\x8a\xec\xb9\xf3\xd1\xf7K\xf8\x9b\xf8\'\xfd\x9f\x05\xfd\rr\x13\xa7\x17\\\x1a\t\x1bi\x18\xf2\x12\x86\r\xb0\n\xa8\x0b/\r\xbb\x0bY\x07\xbc\x01\x88\xfb\xb8\xf4r\xee\xf5\xea\x9e\xea\x04\xeb\xf7\xeb-\xedk\xee\xdd\xeda\xec\x1e\xec\x0f\xeeL\xf2\xf7\xf8x\x00\x11\x07\xcf\t\xc3\n\xc7\x0ba\x0e,\x110\x13\x14\x15\xfb\x18\xdc\x1di \xe9\x1e\x1c\x19S\x131\x0f\x11\x0f\xbd\x10\xd7\x10W\r8\t7\x05d\x00\xc4\xfa\xfe\xf5\xda\xf3\x8a\xf2\x0e\xf2\n\xf3Q\xf3r\xf1\x8c\xed\x03\xea\xbb\xe8\x1e\xea\n\xed\xa8\xf0!\xf3\xbb\xf3\xad\xf2O\xf2\x96\xf2\xec\xf2\x98\xf3\xf1\xf5!\xf9\xaf\xfb\x16\xfd\xa3\xfd\xaf\xfc\x95\xfbs\xfb\xfb\xfc\xd8\xff\xfa\x01\xa4\x02b\x01\xe7\xffQ\x00\xa7\x00\xc9\xff\x9d\xfeD\xfdS\xfe7\x00\xd9\xff?\xff\xb6\xfd&\xfd\xdd\xfd\x99\xfe\xdc\x00\x80\x02n\x02\xe9\x02\xc3\x03\xea\x03F\x04\xe7\x04\xe1\x062\t\x01\x0b\x83\r\x8f\x0fH\x15t\x1fJ);\'&\x1e\xc6\x1cU& 0\x0f0p.\x8e1\xc22\xe1,\x06%Z \x9f\x1b\xd7\x13\xe7\rk\x0e\x89\x0e\xe4\x06\xeb\xf9f\xeef\xe7\x0b\xe2A\xdc\xd5\xd8e\xd7\t\xd6\x82\xd3\xbb\xd1\xee\xd0d\xcf?\xcd\xf8\xcc\x14\xd1\xec\xd7\x1b\xdfS\xe5\x06\xea:\xed\x08\xf0\x1b\xf4"\xf9@\xfeO\x02\xb3\x06(\x0c\x8d\x10\xc7\x12\xf6\x12\xa1\x11K\x0f\xa9\x0c\xef\x0be\x0c\x0e\x0c\x06\n\x98\x07\xbb\x04\x08\x00w\xfa\xe9\xf6\xa1\xf4q\xf26\xf1m\xf2\xcc\xf5\xd2\xf5c\xf3.\xf1L\xf1)\xf2m\xf4\x95\xf8~\xfe\x0b\x03/\x060\t\xf0\t\xa6\x08\xe1\x07\x1a\n\xe3\x0e[\x15\xf7\x1a\xfd\x1b\x96\x17\xef\x11\xb7\x0f\x05\x10\x08\x0eM\x0c\xd0\x0be\x0c \x0cK\t\xdc\x03\xa2\xfc\x90\xf5\xc5\xf1\xfa\xf2\xec\xf4r\xf5\xe8\xf3\xdf\xef\xe3\xeb\xf7\xe8p\xe7\xdb\xe6\xd3\xe6\xbc\xe7a\xeb`\xef\xde\xf1\xc6\xf1\x86\xf0M\xef\xf1\xf0D\xf4\xb0\xf7Y\xfb\n\xfe\xa0\x00#\x03\\\x03z\x02\x94\x01P\x01`\x02\xf2\x03/\x08\x92\x0bh\t\xa8\x06\xf5\x03\xcc\x03$\x05\x16\x03\x01\x03\xaf\x05\xea\x06\x85\tW\n\x89\x07}\x01%\xfe\xd2\x01o\x04\xee\x05\x11\tN\n\xb9\x07H\x06v\x06\xd5\x06\xf8\x05\x8d\x05]\x07g\x0b\xee\x12b\x1a\xb1\x1b\x05\x17\\\x14?\x16\n\x19H\x1b\xe8\x1d\x8e!\xf7"l!\xf4\x1f\xbd\x1e\xe0\x19\xfb\x112\r\xa6\r\xf4\r\xec\n%\x07c\x03\x94\xfc\xff\xf3Z\xeeV\xeb{\xe7\xa2\xe2\xd3\xe0Y\xe1w\xe0Q\xdd8\xda\xf4\xd7O\xd6;\xd5\x8b\xd7\xb7\xdc{\xe1+\xe4\xef\xe5-\xe91\xec\xa8\xedc\xf0\xfb\xf4\xb9\xf8-\xfd]\x02\xac\x06\x0f\t\x97\t>\tf\t4\nW\x0b9\r\x02\x0eF\x0e\xb7\rM\x0c\x90\n\x9d\x07\xcc\x04L\x03\xc0\x024\x03$\x04]\x03\xfa\x01\x8e\xff\xe1\xfd\xc4\xfd\xdd\xfd\x8d\xfd\xdc\xfe,\x01J\x02B\x03\xdd\x041\x05J\x03\xfb\x01\x87\x03K\x06R\x07\x15\x08\xd1\x07V\x08m\x06\x84\x03\xdd\x02\x87\x008\xffT\x00\r\x002\xff\xe9\xfd\xcf\xfbJ\xf8e\xf6\xe0\xf60\xf5\xbc\xf2\xf5\xf4\x14\xf8\x12\xf6\xd9\xf0%\xf5\x06\xf8\x96\xefi\xf0\xe3\xf7 \xf9\xd0\xf5\xf1\xf6x\xfe\xed\xf9\xae\xf6z\xfc\xf1\xfb\xf1\xf7\x86\xfbc\x02\xae\xfd\x85\xfb+\x06\xdf\x01\x10\xf7V\x00r\x08\xcb\xfe`\xfa\x85\t\x97\x0f\xce\xfcy\x01\x17\x16\x84\x07\xa1\xfc\xe7\ny\x11\x02\t\xa3\x08\x1e\x12\x93\x0e?\x05\x8b\rQ\x11\x87\n(\x08\xe4\x0b[\x0c\xae\t)\x0c\x8c\x0c\x07\tZ\x03a\x05\x01\x07\x9c\x04\xeb\x03\xf9\x02\x8a\x04\xe4\x04\xdf\x00\xc9\x01e\x05\xf9\xfe\x03\xfe!\x06@\x06S\x01\xac\x05V\x08\xba\x05]\x03\xfd\x04\x8d\x07\xb2\x05\xc0\x01\xba\x04\xd8\x07F\x05\xcb\x02\xd8\x02i\x00\xcd\xfb$\xfb>\xfc\x15\xfb\x02\xf9\xe4\xf6\x04\xf5\x9e\xf4\xdc\xf3\xbd\xf1\xa0\xef\x8c\xefD\xee\xe4\xed\xd3\xf0c\xf2\x82\xef\x1b\xf0q\xf2&\xf4\xf6\xf2F\xf7B\xf9\xe0\xf5j\xfb\xae\xff\xe3\xfe\xa3\xfd\xbd\xff#\x03\xa9\x02<\x02\xab\x04F\x07\x1b\x04\xcb\x02\xb5\x07\xc2\x06\xa6\x03\xc2\x05\xa5\x04\xc9\x04\x96\x05\x13\x04\x10\x05\xcc\x04\xa3\x02r\x03\xb7\x04\xcf\x04\xd3\x01\x83\x040\x02\xb2\x01j\x04:\x01\x1b\xff#\x02\xb1\x01\xd3\xfb\x08\xfc\xc7\x00%\xfd\xec\xf4\xa3\xfb"\xfc\x08\xf7\xd4\xf7?\xfb\xb1\xf4|\xf2\x16\xf9\xd9\xf8Z\xf2\xf4\xf4V\x01\'\xf3\x15\xf4)\x00\x1a\xfdY\xef\xaf\xfb\x9c\xffd\x01\xc5\xf8\xcb\xf8\x95\x0c\xbc\xfea\xf6\xa7\t\xd0\x05%\xfaC\x047\x06C\x04\x06\xff5\t\xe7\x06\x07\xf8\xec\x07}\x0c\xa3\xfa\xbf\xf8"\x08\x81\x08\x03\xf7\xeb\xfb\xcb\x0e\xd2\xff\xf3\xf8\xb8\x05\x88\x03U\xfd\xad\x019\t~\x00\xc4\xff\x98\t\xbb\x04*\xfe\xa1\x0bk\x0c\x97\xfdI\x08M\n\xb9\x04\xe7\x04\xb6\n\xc3\tV\x05\xc3\x08\xed\x05\xf3\x03\xe7\x0cY\x07\xdd\xfbk\x05D\x10\xf5\xfa\x1a\xfd\xee\x10E\x01C\xf7\xf7\x04\x88\x05\xf4\xf4G\x04b\x00\xc3\xf8\xea\x00b\xfb\xc9\xfb\x8d\xfcG\xff\x18\xf7j\xf3\xda\x03,\x03\xd3\xf3\xb8\xf8\xc7\x06\x1a\xfa\x8d\xfb\x08\xfe\xdc\x03\x80\x00\xdc\xfa\xbe\x08\xa8\x02:\x00\xd2\x05\x9d\x05\x1f\x00[\x03\xbd\x06\x91\x03\xf4\x03\xf8\x03P\x04\x18\x02f\x01\xc6\x01\xfd\x01\x8a\x01\xa3\xfd@\xfd\x11\x05#\xfdG\xfa\xd2\x01g\xfdg\xf7\xe8\xfd\xb7\x02\x94\xf7\xf8\xf5q\x02\xf7\xfd\xda\xf3\xa8\xfd:\x01\x86\xf7D\xf5\xc7\x03\xa8\xfd\xc8\xfbt\xfc@\x00 \xfe\x04\xfee\x060\xfa\xf7\xf8\xcc\x06\xdb\x00\\\xfa\x11\x00\x07\x03\xa3\xfb|\xfc\xc7\x04\xef\xf9\x17\xf8\xe5\xfc\xd7\x05\xb0\xef\x81\xfa\x1f\x08\xdd\xf8Q\xf1\xe7\xfd\x88\x01\x89\xf5Q\xf3\x00\x00\x08\x04\xd6\xed\xf1\x01\x8f\x00?\xf4O\x01\xcf\xfa\x89\xff\xfe\x01\xa0\xf8y\x06T\x01\x1c\xfbD\t[\x05\x85\xfa}\t\xf5\x07n\xfd\x0f\n\t\x04\xa1\x02\xa4\t\xff\x01c\x01V\x0bv\x04\x7f\x00\xf9\x08R\xff&\x04]\x07\xfa\xfc\n\x01P\n\xcb\xfc*\xff\xff\x05,\xfdb\xff\xe1\xffH\x06\x1e\xfbS\xf1\xbd\x07@\x10n\xeb\x1c\x01\xcf\x0b0\xf5\x9e\xf2(\x0c\xb1\x06\xb4\xef"\x01\xaf\t\xd3\xfeF\xf7`\x05Y\xffy\xff\x10\xfae\x05\x83\x04\x80\xfc\x97\xfd\x0f\x07I\x02\xfb\xf4\x1e\xfd\xd9\x0b\xa2\xf8\x98\xf6\xd7\x06p\x01\xd6\xfc\x11\xfdB\xff\xc1\xf3\xb6\x04\x98\xfa\xbd\xfb\x95\xffe\x04\xc0\xfe\xc4\xf6\x13\xff9\x06\x18\xff\xc4\xf4e\x0b\xee\x069\xf7g\x01R\x11\xd4\xfd \xfd\x89\t\xc2\nE\xf9D\x06\x07\x0fb\xff\x94\xfd\x9e\x08l\x0b\xc7\xfa\xd5\x03a\x0c\xaa\xf8\x8b\xfb\x9b\x0b%\x02v\xfc,\x04\xf6\x00\'\xff\xfd\x01\xc3\xfd\x94\x03\xfa\xfa\x14\xff,\x03\xe6\x01\xfb\xfc*\xfe+\x01[\xfa^\xff\xbe\xfdQ\x00\xf8\xfd&\xfd\x1e\xfb\x1c\x06\xbf\xfe\x00\xf2\x8e\x0b\xd0\xf8\x96\xf8\xe9\x02\xcc\xfd\xe2\xfeH\xf8\xf8\x04\xc0\xf9\xc0\xfd\x1b\xffh\xfdm\xfc\x0c\x01]\xf5\x90\x00y\x03\xbb\xf5C\x08{\xf6\xe4\xfc\xa2\x00\xca\x00\x8f\xf9\x95\x02\xb6\xf6\xa2\x04&\x00\x85\xf9\xde\x02\'\xffn\x00\x13\xff\xaf\x02\xf8\xf5~\x0b+\xfa6\x02f\x05f\xfd\xe5\xfe6\x07\xff\x02B\xfc\x1e\x00\xc6\x07\xf4\x01\xa9\xfbp\x0bk\xfb\xce\x03\x0b\x05\x96\x01\xb9\xfa8\x05Z\x00\xf7\x02\xcf\x00;\xfc\xf6\x00\xec\x02\xed\xff\xbb\xfe\x85\xfc\xa5\x02\x11\x02\xbd\xf4\xac\x08\x9f\x06\x00\xf1\x17\x01n\x10u\xec\x88\x05x\r\x96\xf3\xdb\xffJ\x0eV\xfa\xa8\xfc\x0c\x07\xe4\xfd\x1b\x06l\xfb\xff\x05\x95\x01[\xfd\xfc\xfeJ\x055\x02\x17\xf6_\x07\xb9\xf7\x15\x04\xa6\x019\xf4\x0e\x06\xb7\xfc\xb7\xf9\xd5\xffV\xfd&\xfbz\x02\xbf\xee\x82\x0f\\\x02x\xea\x17\x08E\x00\xff\xfdo\xf5\x9d\x0f\xc5\xff\xcc\xf9\x8c\xff~\x00\x03\x073\xf49\r\x19\x01\xb7\xfd\xa8\x01!\x02\xf4\x01\xe1\xffh\xfeo\xfcX\n\xa5\xfdi\x03r\x04\x0e\xfd\xed\xffb\x06\xdb\xf9\x02\x00\xba\rL\xf4\xc0\x02I\x08\x92\x038\xf8\xc8\x03T\x03\xd6\xf8\x9d\x08p\x05\x1f\xff\xd8\xf5\x96\x14\xae\xfdH\xf0/\x10\xcf\x07\xfa\xed\xa7\xfd\xff\x15\xa7\xf6\x9f\xf9\xcb\x08\x80\x02\x9f\xf7\t\xf2|\x0c\xca\x00\xd5\xf1I\x00\xe7\x06\xae\xf9s\xef\xe9\x0b@\xf7\x8f\xf2I\x06\xe6\xf4\xe4\xfc&\x05\xaf\xf3`\x02]\xf9\xb7\xfc\xc1\x003\xf9\x1f\x03O\xf9\xeb\x04\xa7\xfa\x1c\x07\x1b\xfb\xd0\xfe5\x05\xbc\xfe\xda\x02\xda\xfdE\x05\x1a\xffH\x08\xfb\xfd\x1e\x05\xbb\x08\x0e\xf4\xa5\x04E\x08(\x02\xbc\xfd\x89\xff\xf9\x0fg\xf3\t\xfe\x90\x12\xa9\xfb\xd6\xef\x18\x06\xd9\x11\x95\xf0B\xfb:\x14\x9a\xfe\xda\xf1\xff\xfc\xb5\x0e\xde\xfd\xe0\xf5]\x0cK\xfd\xc6\xf2\xaa\x14r\x01h\xe9X\x04t\x0b\xaa\xf8\x9f\xf5\xb3\x0b5\x07\x86\xf2\xe0\xfa\x0c\x0f\xce\xf2H\x01\x9d\x00i\xfb\x1c\x05\x10\x00\x7f\xfcs\xfb\x93\x05\x0c\xfa\x90\x00\x1f\xf8O\x05\x17\xff\xb1\xff\xbb\xf2j\x05\xec\x04\'\xec\x99\x10x\xff\xf8\xec\xc3\x06v\x0b\xcc\xe5\x82\x04\xe7\x19`\xe6\x8f\xfa\xe6\x1a\x8a\xf2\x1b\xf7\x88\x05G\x08{\xfbd\xf7\xc7\x16\xc7\xfa\xe0\xf4\xfb\x0c\xd2\x04\xdc\xf9[\x07\xe6\xff"\xfa$\x0f\'\x01P\xfa9\x08\x9e\x04\xe8\xff\x83\xffq\xff\xac\x06R\xff\x80\x01W\x03\x89\x02}\x00\xb0\xfcX\x07x\x00\xed\xf7p\n\x08\xfa\xd0\x00%\x08\xb4\xf6\\\x05o\x02[\xfd&\xf9\x8b\x08\xee\xf9\x89\xfcq\tN\xf8\x80\xf9#\n\x04\xff\x9c\xf2\x9e\r\xcf\xf5\xc1\x005\x05\xe1\xf7\xda\xf9\x91\t\x1b\xfb(\xf4\x1d\x13h\xed\xe2\xfb\xe1\r3\xf5}\xf8\x06\xfe\xf4\x07\x88\xf4\x87\xfa\x02\x05,\xffu\xfa\xd2\xf7\xf4\x04!\x00\x17\xfcQ\xf6\xb9\x0b\xf6\xfc\xb3\xf5\xbc\x06D\x04\xc2\xfbj\xff\\\x06\x8f\xfa\x9c\x01X\x03}\x02K\x00w\x10r\xf5\xeb\xffK\r`\xfbc\xfd\x82\x0c\xb3\x02\x88\xfb*\x00-\ns\xfc\x86\xf9\x01\t*\xfal\x018\xf5\x82\x11#\xfc\xe2\xef\x9b\n^\x06\xae\xf2\x88\x02@\x0c\x1d\xfa\xae\xf6\xb4\x13\x99\x01\x84\xeer\r\x00\x0c\xf1\xf7\x8e\xf1\x0e\x18\x97\xfc\x89\xef#\x0c\xde\x08\xa1\xefk\xff^\x0c\xde\xf2~\xfa\xfd\x08\x84\xf9\x81\xf3\x89\x06\xd0\x06\xf3\xec\x17\xfci\x0e\xe6\xf2\x99\xf3\x8b\x05_\x06\xb1\xf6\x85\xf3\xf5\x14\xa8\xfeH\xe2I\x12\xa8\x0f\xda\xe80\x01\xbd\x14\x8b\xf5\xd6\xf8#\r\x93\x046\xf3\x0e\x06~\x0b\x0b\xfd\x17\xfe\x08\x08\xfc\x06)\xf5p\x07\x0b\x08\xee\xfc\x8b\xf9\xd4\tf\x03\x0e\xfb\xc7\x02\x8c\x02F\x000\xfcQ\x00\x05\x01\x10\xfec\xfb\x03\x08$\x00\x84\xf9\x87\td\xff\xa6\xeeD\x03\xe4\x07L\xfd\x83\xfe\xb7\x02\xff\xfb]\xfd}\x05\x91\xf9\x97\xfer\xff\xad\xfa\x0e\x00\x14\x02d\x01_\xfcJ\xfb\xa8\xfc\x85\xfd\xd6\xfe\xb7\x02\x9a\xf6\x1a\xfd\xe8\x03\xd3\xfb\xb0\xfb\x9b\xfc6\x02\x94\xf5x\xfdn\x07\x81\xf9\x97\xff\x97\x03\xf2\xf6\xa1\xf6\xfa\x10\xe3\x01y\xf1q\x04\xd3\x04\xc7\xfe\xe9\xfb\xb8\x0c\xc5\xffi\xf7E\x03P\nD\tG\xf4\x96\x08\xe5\x08\xb0\xf6(\x07\x06\x0b\xad\x02W\x03\xec\x01\xa4\x012\x052\x02!\x05#\x01\n\x00\xcd\x03r\x02\xb8\x02E\xff\x19\xffL\xfe\xf1\xfd\xeb\x01A\xff\xec\xffz\xf8\n\xfal\x06\xa0\xf7\x90\xfd\xa2\xfe\xcd\xf9\xfb\xfe\x86\x001\xf8\xfa\xfb\xc2\x08\xac\xf3i\xf6\xa7\x07A\xff\xae\xf7\x9f\x00\x85\x01\xf7\xfa\xc9\xfb\xda\x02\xc6\xfb\xa3\xf8m\x03\xce\xfe\x87\xff\xb7\x00n\xff[\xfcd\xfe\x10\xff\xd1\xfdh\xfe \xfc7\xff\x90\x03\xae\x013\xfe\x1e\x01o\x01\xf4\xfdn\x03Y\x07\x17\x03\xe3\x01K\t;\x0b\xf8\x04\x80\nM\x0e\x8a\x04t\x05\xcd\x0f\xe8\x0c\xdf\x08<\x0cc\x08e\x08v\n\x05\n\xaa\x05\x8f\x01\x07\x05V\x02G\xfe\x8f\x02\xad\x01\x9e\xfa\x9b\xfa\xc0\xfa\xcf\xf8\xd9\xfa\xde\xf8I\xf5\x0e\xf8\x05\xf9]\xf7\x13\xfa\xa1\xfa\xaf\xf8e\xf8\x90\xfa\xee\xfd\x1e\xfe\x0f\xff\t\xfe\xa5\xfd\xc9\x00t\x01I\xff&\x00e\x03\xca\xfc\x95\xfa\x13\x02\xd8\x02\xcb\xfb\xa6\xfa\xa9\xfb\x0e\xf9\x0c\xf9\x04\xfat\xf9\xe3\xf4/\xf6\xb0\xf5\xf3\xf5\x80\xf8\xbe\xf5\x14\xf8\xca\xf2\x07\xf3\x0b\xfa\x84\xfc\xb1\xf6V\xf8\xe1\xfc[\xf7\xfb\xfb;\x00\x03\xff\xad\xf9r\xfb)\xffN\xfc\xbb\x01T\x01\xd9\xf9\x9f\xfd\xb9\x00d\xfd\x11\xfd\x0e\x00\xcc\xff\x8a\xf9,\xfc\xb1\x03\x87\x05\n\x02\x9c\xfdx\xffj\x03\x16\x04E\x05v\x05%\x07\x8d\t\'\x0bu\r\xa2\x0e\x90\rB\x0fM\x11=\x13\xcd\x19\xef\x1dH \xa7"k#\xd2"\xcc\x1e$\x1e\xd8\x1f\xef\x1f\xe6\x1b`\x18|\x18>\x17}\x11\xfc\n\x92\x03\'\xfdV\xf7u\xf2\x93\xf1\xbc\xef~\xeb\xb2\xe7\xd2\xe6\xc0\xe5[\xe3E\xe2#\xe0\xca\xde\x93\xdf\x0f\xe3\x14\xe8\xb7\xec\x14\xf0\x1f\xf2\x99\xf3=\xf7\x9c\xfb\xac\xfd\xc5\xfeb\x01\xbb\x02u\x05j\n\xba\x0b\n\x0c\xa5\x0b+\x07\xec\x035\x03=\x02*\xff\xb7\xfa\x08\xf8E\xf7\xb9\xf6\xcf\xf5\x8f\xf5&\xf1\xbc\xec\xa2\xebV\xecq\xee\x9f\xf0\xfd\xf1\xfa\xf2\xdb\xf6\xa1\xfc\x07\x01\x04\x03\x08\x05G\x04q\x06\x05\n\xd5\rJ\x11\x9f\x12R\x12\xc3\x11\xa1\x12\xef\x12\x1c\x10s\x0b\xb0\x07\r\x05u\x03\xb4\x02\x84\x01\x93\xfe\xbf\xfa"\xf7^\xf5\xfc\xf3\x10\xf22\xefG\xed\xa8\xed(\xf0\xd8\xf2\xf4\xf2\x19\xf3\xd4\xf3\xbd\xf3\x10\xf4\xa0\xf5\xca\xf6\xd1\xf7n\xf9\x18\xfbt\xfc\xce\xfdO\xfd\xf3\xfb\xe9\xfa#\xfa\xb0\xfa\x9d\xf9\x9d\xf8\xcc\xf8\xf4\xf9`\xfc\'\xfc\x9a\xfb\xa9\xfb\x87\xfc\x03\xff~\xffs\xfd\xce\x03U\x16O%?*\xc9\'\x0b*\xb52r5\xff2\x872F5T5e2\x9d2\xf14\xc10\x15#\x99\x13\xa4\t\x03\x04\x94\xfco\xf3\xc7\xeb5\xe7\xb2\xe3\xf5\xdf\xfa\xdc\xd7\xd9\xf3\xd5\x89\xd0\x89\xcb#\xcd\x84\xd5\xdf\xdc\x1b\xe1\x94\xe6\x86\xee#\xf6\x80\xfc|\x01D\x06+\x08\xe6\t\xa9\x0e\xb4\x13b\x19\xd5\x1d[\x1cL\x19\xb7\x16\x9f\x13\xe2\x0eR\t)\x03\xb1\xfbl\xf4\xac\xef\x8d\xef\x86\xed^\xe9\x06\xe4;\xde\xc0\xdaU\xd9P\xda\xb9\xdc\x17\xdes\xe1\x1d\xe8\xa2\xee\xa9\xf5\xe0\xfa\xab\xfe\x93\x01y\x05\xfe\t\xbf\x0f\x85\x15\x06\x1a\xc1\x1d\xcb\x1e\xd2\x1d\x16\x1e=\x1d\xfb\x19\xcd\x15v\x11[\x0e\x0c\x0b\xe8\x07\xe4\x04T\x01\xd7\xfc\xa4\xf8?\xf5\xcf\xf2%\xf2~\xf1\x99\xf0\xcc\xef{\xf0\xa0\xf2`\xf4A\xf5m\xf6R\xf7\xf8\xf8H\xfa\xc6\xfa\xe9\xfb4\xfc\xd6\xfb\x12\xfa\xa0\xf8A\xf8\xa8\xf8N\xf6\xc6\xf3\x8b\xf2l\xf1\xd4\xf1q\xf1\xa6\xf0\xd2\xf0\xd3\xf0f\xf2\xae\xf3\xcf\xf3\x9c\xf5\x8c\xf9\x9c\xfcX\x00\n\x05\xcb\x07\x8e\t[\x0b3\x0f\xad\x16\xff n+\xfa2_6\xd86\xc87\xd48\xf96\\2^,\x98(k\'\xa8$\xd1\x1f\xaf\x19\x9d\x11Z\x076\xfc\xa7\xf2\x90\xeb\xc4\xe5\x1d\xdf\xf5\xd8\x97\xd6\x8a\xd7D\xda\x89\xdc\xf8\xdc\x11\xdc\xa2\xdc&\xdf\x86\xe2X\xe7k\xed\xf6\xf3\xe9\xf8N\xfe\x99\x05f\ry\x12T\x14\xcc\x13w\x13\xbb\x13\xf8\x13E\x13\x88\x11\x03\x0f\xd7\x0bg\x08y\x04\x1f\x01\xf0\xfc\xf5\xf6\x9c\xefM\xe9\x85\xe5\xab\xe3\xf8\xe1o\xe0\xd3\xdf6\xe0\x83\xe1\x94\xe3\xb9\xe5~\xe8-\xeb\x9a\xedO\xf18\xf6x\xfcr\x02V\x07"\x0b\xf2\x0e\x18\x12m\x14\xc7\x15\x85\x16\xb2\x16+\x16\xa4\x14\xb0\x13\x12\x13\xcc\x11i\x0f\xaf\x0b\xdd\x07i\x05\xa6\x02T\xff\x05\xfd\xee\xfa\x18\xf9\xd7\xf7\xfd\xf6\x89\xf6\xed\xf6\x86\xf7_\xf7p\xf6\xd7\xf5\xf7\xf6G\xf8\x7f\xf8\xd1\xf8\x0e\xfa\'\xfb\x9a\xfbo\xfc\xa8\xfc\xcb\xfc\xce\xfd\x0b\xfe\x8a\xfdh\xfd:\xfe\xf3\xfe\x88\xffp\x00\xd4\x01\xd0\x02\x8a\x03d\x04\xdc\x04\xde\x05\x8b\x06\xdc\x06t\x07\xe2\x07\x97\x08s\tN\tz\x08z\x083\x08\x9c\x06\xf7\x04\xa1\x044\x05.\x05,\x04\r\x02\x8b\x00\x80\x00\xac\x00\xd5\x00o\x00\xf6\xff\x03\x00D\xff\x99\xfeE\xffr\x00\xc8\x01\xd8\x02\x19\x03.\x031\x04\x8b\x05k\x06i\x07\xf1\x08E\n\xa3\n\x8e\n\xd4\ns\x0b\xb3\x0b\xfa\n\xad\t>\x08\xdf\x06j\x05\xad\x03R\x02(\x01\xa4\xff\xc2\xfd\xb7\xfbT\xfa\x94\xf9g\xf8\xa9\xf6$\xf5c\xf4+\xf4\xda\xf3H\xf3/\xf3@\xf3D\xf3\xbf\xf3\x86\xf4\xbd\xf5\x1f\xf7\x9f\xf7z\xf8\xac\xf9{\xfa\x89\xfb\'\xfc\xbf\xfc\x85\xfd\xd2\xfdR\xfeq\xfey\xfe\xe5\xfe\xdb\xfe\xb3\xfe\x7f\xfe\x1c\xfe\xbf\xfd\xc9\xfd\xd7\xfd\xd7\xfd\x9a\xfd\xc1\xfdp\xfe\x8a\xfe\xf6\xfe\xcf\xff^\x00\x07\x01\x85\x01\xdf\x01\x99\x02i\x03\xf5\x03f\x04\x8a\x04\xa1\x04\xf5\x04]\x05]\x05\x1c\x05\xcb\x04_\x04\xc5\x03\x17\x03\x15\x03\xc9\x02\x1d\x02\xaf\x01\xde\x00j\x006\x00\xc9\xffW\xff;\xff=\xff\xcf\xfe\x95\xfe\xc9\xfe\xd6\xfe\x9f\xfe\xa2\xfe\xf0\xfe\xa3\xfe\xbb\xfe\x08\xff\xbd\xfe\xb4\xfe\xd5\xfe\x97\xfe4\xfe\t\xfe\xcf\xfd:\xfdy\xfc\x0e\xfc\xbf\xfb\x04\xfbl\xfa5\xfa\xca\xf9R\xf92\xf9*\xf9\x19\xf9\\\xf9\x02\xfa\xbc\xfa3\xfb\xfd\xfbA\xfdU\xfee\xff|\x00\xc3\x01\xda\x02?\x04h\x05e\x064\x07h\x08\x01\t\x18\t\x88\t\xb2\t\xc5\t\xb9\t\x9d\t"\t\xc7\x08W\x08\xda\x07\x1c\x07\x8f\x06\x11\x06\x82\x05\xa1\x04\xdc\x03s\x03[\x03\r\x03\x86\x02\x85\x02\x93\x02l\x02:\x02u\x02z\x02w\x02C\x02(\x02\xed\x01\xd7\x01\xac\x01<\x01\xcf\x00N\x00\xd4\xff?\xff\x86\xfe\xac\xfd\x1d\xfd\xb5\xfc\x13\xfc0\xfb3\xfa\xa3\xf9(\xf9v\xf8\xdb\xf7g\xf7\x0c\xf7\x01\xf7B\xf7a\xf7\xa4\xf7\x12\xf8u\xf8\xa6\xf8\x02\xf9\x0b\xfa\xb8\xfa\x0b\xfb\xdb\xfb\xf4\xfc\x01\xfe\xe4\xfe\xdc\xffL\x00\xa5\x00M\x01\xd3\x01\x02\x02v\x02\xb4\x02\xbf\x020\x031\x03\x0c\x035\x03\x1b\x03\xa5\x02\x9c\x02u\x02,\x02\xf8\x01\x00\x02\xdb\x01~\x01~\x01\x98\x01I\x01\x0f\x01/\x01#\x01\xde\x00\xb9\x00\xdf\x00\xe1\x00\xb5\x00\xa3\x00\xad\x00_\x00\xfd\xff\xcf\xffz\xff\x02\xff\xa1\xfe\x0e\xfel\xfd\xe5\xfc\xa0\xfc8\xfc\xbc\xfb,\xfb\xbe\xfa\xb0\xfa\xab\xfa\xa0\xfa\xd5\xfaW\xfb\xbd\xfb)\xfc\xf9\xfc\t\xfe\xa2\xfe0\xff\x04\x00E\x01\x0c\x02\xc1\x02\x8f\x03J\x04\xd5\x04b\x05\xef\x05\x01\x06\xee\x05\xf0\x05\xbd\x05I\x05\x1c\x05\xa8\x04\x15\x04\x97\x03;\x03}\x02\xe9\x01\xb8\x01-\x01\xb0\x00q\x00o\x00;\x00a\x00\x93\x00\x94\x00\xc2\x00_\x01\xc8\x01\xff\x01]\x02\xd4\x02$\x03t\x03\xcb\x03\xe0\x03\x0c\x04\x19\x04\x10\x04\xda\x03\x9e\x03M\x03\x97\x02\xfa\x01[\x01=\x00X\xff\x97\xfe\x88\xfd\x99\xfc\xc2\xfb\xe9\xfa0\xfa\xbf\xf9Y\xf9\xf6\xf8\xae\xf8\xcd\xf8\xbb\xf8\xfe\xf8\x85\xf9\x15\xfa\xf3\xfa\xa9\xfb\xa0\xfc\x99\xfd\x7f\xfe;\xff\x0f\x00\xdb\x00\x82\x01\xf7\x01r\x02\xf8\x02B\x03f\x03\x81\x03f\x03\x14\x03\x1a\x03\xe4\x029\x02\xb2\x01\x8c\x01&\x01\x98\x00\x0f\x00\xf4\xff\xb3\xff\n\xff\xe3\xfe\x01\xff\xef\xfe\xe5\xfe\xf1\xfe\xfa\xfe6\xffv\xff\xa5\xff\xd8\xff\x00\x00=\x00Z\x00\x80\x00\xca\x00\xe8\x00\xed\x00\xb5\x00\xa2\x00\x9e\x00\x7f\x004\x00\xe4\xff\x7f\xff\x1f\xff\xba\xfel\xfe]\xfe\xfc\xfd\xbc\xfd\x87\xfdN\xfd$\xfd2\xfd\'\xfd1\xfdu\xfd\xbc\xfd0\xfe\x90\xfe\xe7\xfeG\xff\xb0\xff/\x00\x80\x00\xe3\x00+\x01&\x01:\x01\x84\x01\xa4\x01w\x01m\x01n\x013\x01\xd9\x00\x97\x00W\x00\x07\x00\xdf\xff\x9c\xff\x8e\xff\x95\xffy\xff`\xff\x98\xff\xb4\xff\xba\xff\xf1\xff9\x00\x96\x00\x0f\x01k\x01\xc4\x015\x02\x96\x02\xc9\x02\t\x03+\x034\x039\x039\x03+\x03\xef\x02\xc5\x02{\x02\x1e\x02\xb5\x014\x01\xb1\x008\x00\xaf\xff\x08\xffX\xfe\xc8\xfd\x9a\xfd\x1d\xfd\x94\xfcB\xfc\x13\xfc\x06\xfc\xfc\xfb\x10\xfc?\xfcb\xfc\xa9\xfc\x06\xfd\\\xfd\xd1\xfdA\xfe\x8f\xfe\xec\xfeU\xff\xa2\xff\xf7\xff2\x00k\x00\xa3\x00\xd4\x00\xd3\x00\xc9\x00\xe1\x00\xdd\x00\xc8\x00\xa8\x00\xab\x00\x9b\x00a\x00S\x00[\x008\x00\x16\x00\x00\x00\xfa\xff\xfa\xff\x1b\x007\x00:\x00K\x00o\x00\x80\x00\xac\x00\xf2\x00\x06\x01\x13\x01?\x01|\x01\xa1\x01\xdb\x01\xd9\x01\xcb\x01\xdd\x01\xbe\x01\xbd\x01\xb8\x01\x95\x01e\x01]\x01"\x01\xd0\x00\xa9\x00\x8e\x00G\x00\xeb\xff\xb1\xfff\xff8\xff)\xff\x04\xff\xd6\xfe\xc8\xfe\xcb\xfe\xbc\xfe\xbc\xfe\xc3\xfe\xdc\xfe\xd1\xfe\xba\xfe\xcb\xfe\xe2\xfe\xf4\xfe\x06\xff\x07\xff\x0b\xff\x1c\xff\x03\xff\xfd\xfe%\xff\x14\xff\x0c\xff\x04\xff\r\xff7\xffa\xffj\xff\x80\xff\x9f\xff\xcf\xff\t\x00<\x00r\x00\xa7\x00\xba\x00\xeb\x00/\x01n\x01\xb0\x01\xde\x01\x11\x02L\x02z\x02\xa4\x02\xe0\x02\xf6\x02\xf8\x02\xea\x02\xd5\x02\xb4\x02\x97\x02\x85\x02;\x02\xeb\x01\x92\x01;\x01\xc7\x00W\x00\xf1\xff\x8d\xff7\xff\xea\xfe\xbb\xfe\xa0\xfe{\xfeZ\xfe8\xfe*\xfe1\xfe0\xfe>\xfeg\xfe\xa1\xfe\xd2\xfe\xf8\xfe \xff=\xffm\xff\x83\xffx\xfff\xffb\xff^\xffZ\xffK\xff1\xff\x0c\xff\xd7\xfe\x9e\xfen\xfe7\xfe\xfc\xfd\xd4\xfd\xbb\xfd\xad\xfd\xae\xfd\xbc\xfd\xe4\xfd\x13\xfeQ\xfe\x9c\xfe\xee\xfeI\xff\xb5\xff\x12\x00f\x00\xbf\x00-\x01\x92\x01\xe0\x01/\x02w\x02\xad\x02\xd9\x02\x04\x03\x18\x03\x19\x03\x18\x03\x07\x03\xed\x02\xca\x02\xa0\x02d\x02\x1b\x02\xcd\x01}\x01+\x01\xde\x00\x85\x001\x00\xe4\xff\x8f\xff>\xff\xf5\xfe\xb0\xfev\xfe;\xfe\x06\xfe\xd7\xfd\xba\xfd\xa7\xfd\x93\xfd\x89\xfd\x80\xfdt\xfdg\xfda\xfdd\xfdm\xfdz\xfd\x97\xfd\xc7\xfd\xe5\xfd\xfb\xfd*\xfeg\xfe\x93\xfe\xc1\xfe\xfc\xfe9\xffo\xff\xb9\xff\x03\x00G\x00\x87\x00\xcf\x00\r\x01E\x01\x8b\x01\xc0\x01\xe2\x01\x05\x02\x18\x02&\x02+\x02%\x02\x1b\x02\xfd\x01\xd9\x01\xbb\x01\xa8\x01\x96\x01u\x01H\x01\x12\x01\xec\x00\xc1\x00\x9c\x00|\x00`\x00C\x007\x006\x00<\x007\x00\x1a\x00\x0b\x00\x03\x00\xf8\xff\xed\xff\xe8\xff\xf3\xff\xef\xff\xed\xff\xe9\xff\xe2\xff\xde\xff\xd3\xff\xb3\xff\x89\xfff\xffD\xff\x1d\xff\xfc\xfe\xd7\xfe\xb2\xfe\x8b\xfeh\xfeP\xfe;\xfe\x1f\xfe\x03\xfe\xf4\xfd\xf2\xfd\xf0\xfd\xfb\xfd\t\xfe\x12\xfe9\xfeo\xfe\x9c\xfe\xd1\xfe\r\xffP\xff\x85\xff\xbc\xff\xfb\xffA\x00\x82\x00\xc0\x00\x08\x01E\x01\x81\x01\xb4\x01\xd7\x01\xf4\x01\x10\x025\x02J\x02S\x02V\x02Y\x02]\x02]\x02W\x02E\x02%\x02\x07\x02\xe5\x01\xc0\x01\x92\x01^\x01 \x01\xde\x00\xa6\x00w\x00>\x00\xfc\xff\xbd\xff\x7f\xffG\xff\x11\xff\xd4\xfe\x96\xfe[\xfe4\xfe\x19\xfe\x05\xfe\xf9\xfd\xf5\xfd\xf4\xfd\xf1\xfd\xfd\xfd\r\xfe\x16\xfe\'\xfe>\xfe^\xfe\x82\xfe\xb5\xfe\xe6\xfe\x14\xffE\xffj\xff\x87\xff\xa9\xff\xcd\xff\xf6\xff\x17\x00:\x00]\x00\x80\x00\xa3\x00\xbf\x00\xdb\x00\xe8\x00\xef\x00\xff\x00\x13\x01 \x01$\x01\'\x01!\x01&\x01\'\x01\x16\x01\x04\x01\xf2\x00\xdb\x00\xd5\x00\xc6\x00\xbd\x00\xad\x00\xa1\x00\x9e\x00\xa2\x00\xaa\x00\xa8\x00\xa5\x00\xa4\x00\xac\x00\xbc\x00\xca\x00\xc8\x00\xc9\x00\xc4\x00\xc3\x00\xc8\x00\xb5\x00\x9c\x00\x81\x00f\x00Z\x00J\x00.\x00\x03\x00\xe4\xff\xd0\xff\xb1\xff\x8d\xffd\xff9\xff\x11\xff\x05\xff\xf6\xfe\xe6\xfe\xc6\xfe\xa4\xfe\x8d\xfe\x91\xfe\x90\xfeh\xfeX\xfeQ\xfeM\xfe^\xfez\xfe\x8d\xfe\x9b\xfe\xbb\xfe\xd7\xfe\xff\xfe\'\xffT\xffy\xff\xa5\xff\xd1\xff\t\x007\x00]\x00\x8c\x00\xa7\x00\xbe\x00\xd5\x00\xeb\x00\xfd\x00\x02\x01\xfe\x00\x00\x01\x05\x01\x04\x01\xfb\x00\xed\x00\xe5\x00\xd4\x00\xbc\x00\xa8\x00\x93\x00~\x00k\x00U\x00?\x00%\x00"\x00\x15\x00\x05\x00\xfc\xff\xf2\xff\xee\xff\xe6\xff\xde\xff\xd8\xff\xcf\xff\xbe\xff\xb8\xff\xad\xff\xab\xff\xac\xff\xa7\xff\x9f\xff\x97\xff\x94\xff\x84\xffu\xffc\xff\\\xffS\xff@\xff2\xff$\xff&\xff#\xff\x1d\xff+\xff3\xff>\xffG\xffT\xffb\xffv\xff\x8c\xff\x9c\xff\xbb\xff\xdc\xff\xf4\xff\x07\x00\x1c\x00,\x000\x00F\x00c\x00a\x00k\x00{\x00\x82\x00\x8b\x00\x90\x00\xa2\x00\x9b\x00\x84\x00\x83\x00}\x00s\x00k\x00j\x00`\x00S\x00M\x00N\x00^\x00U\x00P\x00Q\x00`\x00v\x00z\x00y\x00{\x00\x87\x00\x98\x00\xa3\x00\x9b\x00\x91\x00\x89\x00q\x00_\x00T\x008\x00(\x00\n\x00\xf4\xff\xd8\xff\xc3\xff\xaa\xff\x8d\xff\xa3\xffg\xffj\xff\x7f\xffH\xffe\xffH\xff\n\xffk\xff\x0e\xffU\xff\x1d\xff\x19\xff)\xff\xf5\xfe{\xff\x0b\xff_\xff^\xff,\xffi\xffj\xffp\xff\xad\xff\x94\xff\xcb\xff\xdb\xff\xba\xff\xe1\xff\x1f\x00\x00\x00\xf4\xff\xf6\xff\xfb\xff0\x00)\x00~\x007\x00\xc2\x00l\x00t\x00\xcb\x00\xa3\x00 \x01\xda\x00\xf2\x00\xf7\x00\x12\x01\xe9\x00\x9d\x01\x9c\x00\x1a\x02~\xfe\x1a\xffM\x0e\x90\tk\x02o\xfe\x02\xfd\xc5\xff\xff\xfc\xf2\xfc\x93\xfcQ\xfd\x93\xfc\xda\x00\x82\x03N\x00 \xfd\x00\xfe\x07\x03\xf8\x02x\xfc[\xfc\xae\x01\xc7\n:\x08;\xfe\xa1\xfa\x88\xf8\xfb\xfa\xb6\xff\xef\xfda\xfd#\xfe\x88\x03\x11\nk\xfc\xbc\x02\xb8\x07\xad\x03\n\x06z\x05b\x04[\x03\xb0\x07\xce\xfe\xd9\xf8x\xfc+\x01$\x03C\x03M\xfd\x18\xf5\xb1\xef-\xec\xa1\xeb\x8e\xf2\xc1\xf9\xf0\xfb\n\xff\xf6\xfaH\xf7B\xfb \xff\xc9\x00\xf2\x02\xba\x05\xc2\x07b\x01(\x03\xfc\x04\xd3\x08_\x0e`\x08\xc0\x05\xd5\x00-\x04\x99\x07\x88\x07\xee\x03!\x02\xf0\xffn\xfe\xe2\xfd\xa6\xf9D\xfb|\xfd=\xffX\xfe2\xfc\xa0\xfc\xec\xfbQ\xf8\\\xfba\xfb4\xfc\x14\xff\x90\xfd\x15\x00\x06\x01\x1e\x01\x11\x06\x11\x08!\x08"\x04\xf3\x01}\x02\xc5\xff\xc0\x01\x0c\x02\x10\x05\x1a\x02\xb6\x04\x91\x02\xb3\xffi\x01\x90\xfd \xfb\xbb\xf8\x8e\xfd\xdd\xfcw\xff_\x01\xcc\xfd{\xfa\x07\xf7\x04\xfa]\xfe\x07\x01\xa9\x01\xa5\x02\xd5\x00\x98\x01\xfd\x00\xf7\x00k\x01\xca\x01\x98\x01.\x07\xf0\x12Q\x0f\xcb\x08\xce\xffA\xfep\xfb\xdb\xf6\xa5\xf9\xe1\xf7F\xf8\xec\xfd\x8b\xff\x80\xff\t\x01\xc6\xfe\xee\xff8\xfcu\xfa\x01\xfc5\xfa\xab\xfcE\x003\x02]\x02Q\x02\x90\x03\xba\x03\\\x02\xf5\x02\xab\x02\x08\x01~\xffQ\xfe\x86\xfc{\xfa\x1d\xfe\xc4\xfd\xe8\x00\xe1\x01\xbc\x01`\x011\xfd=\xfa@\xf6D\xf94\xfe\xd1\x00|\x02\xc7\xfe\xd8\xfd\x86\xff\xf5\xff\xc9\x01\xcf\x05Z\x08\xab\x07\xba\x048\x01 \xfdJ\xfb\x03\xfb\xe8\xfa\xe0\x01\xde\x06\x11\x06\xb7\x05t\x02\x14\xfd\xa7\xfd\xf9\xf9Y\xfaW\xff\x7f\xfe\xb6\xff\xd2\x007\x00\x8a\xfeE\x00\xc2\x01p\x04\xe9\x04\xe0\x05\xf1\x05&\x01\x13\xfe)\xfb\xce\xfc@\x03_\x04\xe7\x04C\x043\x00\x97\xfc\x82\xfd\xf6\x00\xec\x01\x90\x00\x01\xff6\xfd\xf6\xfa\x1d\xfa\xf2\xf9#\xfd\xcd\x01\xca\x04\xa6\x06\xf2\x03\x95\xff\x9d\xfd:\xfa\xab\xf8\xe7\xfa\xec\xfd\xb5\x01\xf5\x034\x03L\x01\xac\xfc\xd2\xfc=\x00\x12\x01\x84\x01u\x01"\x03\x0b\x02\xc8\xfeL\xff\xbd\xff\xe8\xff\xf5\x00\xe4\x00\x81\xff\x90\xfco\xfd9\xff\xf7\xfeQ\x04\xb2\x05\x11\x03\xf2\xfe/\xfcA\xffG\xff\xdc\x01\x97\x03\xe8\x00\x9e\x00\x1b\x01\xb5\x00e\x01\xca\xfe\xd8\xfe&\x00\'\x01\xde\x00y\xff\xe6\xfc\xe6\xfc\x8a\x00\n\x01\xcf\x02h\x03\x0c\x04-\x03\xfe\x00<\xff?\xfdb\xfa\n\xfa\xd2\xfc?\x01\xc1\x04\xd9\x04\x9e\x04Y\x02\xd9\xfe\xde\xfb\xe0\xfa\xe7\xfa\xa3\xfe\x08\x03\xf7\x04D\x03#\xfe\xd5\xf8B\xfc\x1b\xff=\xff|\x03\x82\x04\x1d\x01\xd0\xfdN\xfeg\xfdN\xfc\xfc\xfb\xe8\xfcy\x00\xdc\x02V\x00\xa6\xff\xc4\xff\xb8\xfd\xb3\xfcr\xfb;\xfe$\x02\xd3\x05\xaa\x06^\x01\x08\xfe\xe7\xfd\xfe\xfdu\xffb\x00\xbc\x01\xf8\x03\xf6\x04\xed\x03[\x01y\xfeZ\xfe\xe1\xffw\xff>\x03\x03\x04(\x01\x08\xfe\xd5\xfb\x1e\xfd\xce\xff\x9d\x02X\x015\x01\x87\x00\xfc\xfe\xf0\xfd\'\xf8\x9e\xf5\xc8\xf9\xac\xfei\x03%\x05\x05\x03\x8e\x00\\\xfeG\xff[\x00\xec\xfe\xd3\xffI\x03 \x07\xcd\x04\xd9\x01\xae\xff\xec\xfb\xd8\xfb\xad\xfa\xa2\xfd\xdc\x03s\x07\xab\x05L\x00\xa9\xfcy\xf9\x99\xf9\xf9\xfc\x0c\xff\xfd\xff\xe9\x01\x82\x02\xac\x01\x85\xff+\xfc(\xfa<\xf9N\xfau\xfd\xb4\x00\xef\x03Z\x05\x11\x05j\x03\xdd\x00\xd5\xff\xd6\xff\xd5\x00\xc8\x01w\x01\xbf\x00\'\x01\xb8\x01y\x00\x07\xff@\xff\xcb\x01\xb8\x01B\x03n\x04*\x01\x85\xfeK\xfb\x83\xfbN\xfdq\xfd_\xfe\xb1\x00U\x04\x8b\x03\xbf\x01\x18\x00(\xfdH\xfd\x9f\xfdU\xff\xa9\x00&\x00\x97\xff\x9a\xff\x8f\x00\x1e\x00\x81\x00J\x01%\x01\xda\x00+\xff\xb8\xfd=\xfe\x9f\xfe\xdd\xfe\'\xff\xa7\xff-\x00\xf1\xff`\xfe\x80\xfdM\xfe^\xfe\xb0\xff5\x01\xda\x01\x9a\x02\x87\x01\x06\x01\x13\x01\xc7\xff\x83\x01\xfa\x02\x97\x03|\x04\xd6\x02\xd2\x00\xbf\xfe\xc0\xfb\xc6\xfb~\xfeD\xfe\x05\x00\xb1\x02\xfe\x01\xfc\xff\x07\xfe\xfa\xfc\xaa\xfd\xd7\xff\xf0\x01\xda\x02\x8d\x024\x02\xc2\x011\x01\xb1\x00\x19\x00L\xff\xf1\xff\xac\x00|\x00*\xff9\xfe\xa8\xfd\x80\xfc\xf5\xfd\x94\x00\x86\x03^\x047\x043\x03K\x01\xe6\x00\xf0\xff5\xff;\xff4\xfd\xdd\xfc\xb1\xfex\xfe\xd8\xfe\n\xfe\xdb\xfcg\xfd\x8f\xfdC\xfds\xfd\xc2\xfeL\x00\x9d\x01\xc4\x02p\x04\n\x03}\x01\x8d\x00\x01\xff\xc7\xfe!\xff\x97\xff\x1f\xfe(\xff\xe4\xff\x9a\xfd\xc6\xfc\x8f\xfc\xd9\xfd\xe4\xff\x82\x01w\x03+\x06\xd8\x07\x10\x06T\x03\xbb\x00[\xff(\xfe\x1e\xfd\xfd\xfc\x88\xfd%\x00\xba\x01\xe9\x01\x13\x01\xa8\xffj\xff"\xfe\x93\xfd\xc1\xfd;\xfe\x98\xffr\x00\xed\xff\\\xff\x86\xfe\x81\xfeF\xffq\xff\x17\x00\xfb\xffI\xff\xda\x00\xf1\x02\xb0\x02{\x02\xc3\x02\xaa\x02\x1c\x03U\x03\x10\x02k\x01k\xffh\x00\xab\x00\xcd\xff\xf9\xff:\xfe\xaa\xfe\x10\xffE\xffB\xff\x1e\xffu\xff>\x00\xe3\x00\xb5\x00\xd9\xff\xba\xff\xc3\xff\xb5\x00`\x02\xe6\x02 \x04\x8d\x04\xbd\x04>\x04\x99\x03\xc9\x02\x1b\x01\x96\xff]\xff\xfa\xff\xd3\x00\x1c\x01K\x00\x13\xff\x0e\xfeq\xfd\xed\xfcl\xfc\xa3\xfb\x07\xfcb\xfdN\xfd\x05\xfd\xd3\xfc\x88\xfcr\xfc|\xfc\x89\xfc\x8a\xfc\t\xfdy\xfdH\xfe\xfd\xfeh\xfe\x0c\xfe\x8a\xfdM\xfd\xd8\xfc\\\xfb\x85\xfa\x0b\xfa \xfa\x00\xfa\xc1\xf9\xff\xf9B\xf9\xde\xf8\xe7\xf8\xc8\xf8\xa7\xf8\r\xf8\xa9\xf8-\xfak\xfbw\xfc\x85\xfd\xbc\xfe\xd7\xfe\xd7\xfe\xbb\xfe\n\xfeo\xff0\x00G\x01\xc9\x02U\x02\xed\x02#\x03]\x03%\x03d\x01f\x01+\x01>\x02\x9c\x03\n\x04\xb1\x05 \x05r\x05\xf9\x04\xdb\x039\x03>\x02\x85\x02t\x04U\x07\x9c\tW\x0b\xc4\x0c\xeb\r\xd7\x10\x99\x13t\x15t\x18\xda\x1b/ h#\xc6#\x16"Q\x1d\xe4\x17\xe4\x11<\x0b\xd6\x04\xc6\xffZ\xfc\x16\xfa\xcb\xf8\xc0\xf6\xc5\xf3\xcd\xf0\x1d\xee2\xecb\xeb;\xeb\x04\xec\xcd\xed\x01\xf0\xe6\xf1\xf7\xf2n\xf3\x92\xf3\xcc\xf4\x8e\xf6\xf6\xf8\x06\xfc\n\xff\xe6\x01\xf4\x03f\x04c\x03=\x01\x86\xfe\x04\xfc\x00\xfaG\xf8B\xf6c\xf4A\xf2\xd9\xefv\xed\xd4\xea7\xe9\xd0\xe8\x86\xe9\x8d\xeb=\xee\xf6\xf05\xf3F\xf4\xab\xf4\x1d\xf5\x83\xf5H\xf6M\xf8\xfd\xfa\xcd\xfd\xe4\x00\x14\x03\xe9\x03\x84\x03\xf6\x01\xb4\x00\x92\xff\x00\xff\xf0\xfe\xca\xfe\xda\xfe2\xfe\x98\xfdi\xfc\x95\xfa\xf8\xf9\x03\xfa\xb2\xfb\x88\xfeY\x01\x81\x03\x1f\x05\xe3\x05\xd7\x06\xed\x07C\x08\xbc\t\xd9\nF\x0c\xc8\x0eD\x0f\xca\x0eY\x0c\x17\x08\t\x06\xf8\x06L\x0c1\x15%\x1f<)\xe21K7\x837\xf03\xdd-}(\x86%u#G!\x15\x1de\x16\x04\r\xce\x01\x08\xf6\xbf\xebv\xe6\xed\xe55\xe8\xd8\xeb\xe5\xed\xb1\xed\xec\xeb\xaa\xe8\x00\xe6\xc8\xe4O\xe5\xc2\xe8:\xeeW\xf4"\xf9d\xfb\xa8\xfb\xcb\xfb\x8f\xfd\xd1\x00\xab\x05s\ni\x0e\xc0\x10\xdc\x0f\x86\x0bU\x04P\xfcx\xf5\xb0\xf0m\xee:\xed\xbe\xec\xfc\xeb;\xea"\xe8\xed\xe4F\xe2\xe9\xe1\xc0\xe3\xdd\xe7K\xecg\xf0\x8f\xf3\xf8\xf4\x81\xf5\xe5\xf5\xab\xf6\x9b\xf8\x8f\xfb\xf9\xfe\xbc\x01;\x03D\x03z\x01:\xff\x8c\xfd\xcb\xfc7\xfd\xb2\xfdG\xfe\xaf\xfdw\xfcx\xfak\xf7\xbe\xf5j\xf4%\xf5\xc8\xf7\x8f\xf9\x8b\xfc\xe4\xfdD\xfe|\xff\xde\xfe\xc9\xff\xc4\x004\x02\x03\x06\x81\t\xda\r\xd3\x10\x8d\x12>\x12<\x0f\x01\x0b\xe4\x04Y\x00\xf6\xfe\xb5\x01a\n\xa6\x16\xf5$\x822\x11<\x86@\x02A\xe7>\x02\xfa+\xf8\x04\xf6p\xf5\x8a\xf4\xcd\xf3v\xf3S\xf2\x00\xf2\xea\xf1w\xf2\xca\xf4\xbc\xf7\xbd\xfb0\xff\x01\x01\xee\x02\x15\x03\xb5\x03\x11\x05<\x06\xb4\x08\xca\t3\n`\n`\x08\x98\x06V\x03\xca\x00\xa7\x00,\x02z\x07z\x0e\xdf\x17r#\xa9.\xdf9\xdaB!IXL\x08KEF\x91<\xfc/\xb3!\x8e\x11,\x03F\xf5\xe2\xe8\x15\xdf\x83\xd7%\xd4Z\xd4\'\xd8\xc6\xde*\xe6\xf8\xec\x99\xf1\xa3\xf4_\xf6\xce\xf7\x17\xfa\xa8\xfdy\x02\xad\x07\x05\x0c\x19\x0f\xe6\x10\x98\x11\xb0\x11\xc1\x10\xa2\x0e\xd1\n\xce\x04K\xfd\xbe\xf4M\xec|\xe55\xe0\xb3\xdd\x98\xdd\xef\xde\xc9\xe1\xf1\xe4\x05\xe96\xeeA\xf3\xc0\xf7I\xfb-\xfd\x97\xfe\x8f\xff\xf2\xffZ\x00-\x00\xd5\xffv\xffp\xfe\xce\xfc\x10\xfb\x0b\xf9\x07\xf8\xd8\xf7\xbc\xf7{\xf7\xa8\xf6s\xf5\xb8\xf4\xd2\xf4\xaa\xf5\xe8\xf6*\xf8\xaa\xf8\xee\xf7K\xf6k\xf4\x16\xf3\x11\xf3\xda\xf3\xcc\xf5\xbf\xf89\xfbK\xfd\xbd\xfe\xaf\x00\xa6\x03\xd3\x06b\x08|\x08\x80\x06\xd0\x03\xc3\x01.\xff`\xfd\xee\xfbH\xfb\x00\xfb\xe2\xf8,\xf5*\xf2\xb6\xf5\xac\x02a\x17\x0e/\xc5CnS\x8b]\xdcbzc\xe0^8V\xe6J3;\x83&\xad\x0c1\xf1 \xda\xb7\xca\xe3\xc3\x1d\xc4\xb6\xc7{\xcc\x9e\xd2\x80\xd9v\xe2\x01\xec^\xf5R\xfd\x01\x03\xdd\x06\xa3\x07\x8e\x06\xed\x04\xd9\x04\x8b\x08\xf2\x0e\xc8\x151\x1a\x99\x18\xc7\x12\xfa\t\x9b\x01F\xfa\xd4\xf2\x86\xebv\xe2\xbc\xd9\xc3\xd2\x13\xcf?\xd1\xdf\xd6:\xdf\xb0\xe8\xe2\xf0\xba\xf8\xbf\xfe\x80\x03j\x07\xd7\t\xd3\n\xc4\t\xa7\x06\xd0\x02^\xff\xd7\xfc\xc6\xfb\x0f\xfb\xe3\xf9\xf7\xf7\xac\xf5\x8a\xf4}\xf4\xd0\xf5\x90\xf8\xb9\xfa"\xfcn\xfb\xbc\xf8\x17\xf6\x19\xf6\xe5\xf8q\xfc\xfe\xfd \xfbL\xf6%\xf2\xb2\xef"\xf0\xd2\xf1\x9e\xf4\x04\xf8\xa8\xfa\x85\xfdC\x00\x1c\x03\xdb\x06\xba\n\x07\rH\x0c\xc7\x08S\x04\xa1\xff\xde\xfb4\xf8\xfb\xf3c\xf0\x1b\xec\xc4\xe7\xaf\xe4\xe1\xe6\xe0\xf4G\x0e\x06.=IkY\xc9b\'hwm2p\xfdh\xdfX\x11B\xaf&6\t\xdd\xe9\xb6\xce\xc7\xbc\xff\xb5\x08\xb7[\xb8\x83\xbam\xc1\xf4\xd0\xd8\xe7\xd8\xfd;\x0c\x7f\x12\xf0\x14\xc2\x160\x18\xbe\x183\x18u\x17\x03\x176\x16\xa6\x13\xe6\x0e\x8e\n\n\x08L\x06~\x002\xf5\xb2\xe6F\xd9.\xd0O\xcb\x96\xc8I\xc7.\xc8\xb5\xcd#\xd8\xbc\xe6\x04\xf7p\x06N\x13\x1b\x1c?!\xe2!2\x1f\x05\x1b\xa9\x14\xf6\x0b\xa0\x00\x07\xf5\xfa\xeb\x96\xe6t\xe4}\xe4D\xe5y\xe6\xf0\xe8f\xed.\xf3\xe5\xf9o\x009\x05\xfe\x07A\tp\tS\tn\x08k\x06\xde\x02\xe0\xfdl\xf8\xf3\xf2 \xef\xc2\xed\x0b\xee\x9f\xef\x99\xf12\xf4\xdb\xf7\xe0\xfb\xd4\x00\xca\x04\xaf\x06-\x07x\x05\xe7\x01\xaf\xfd\x12\xfa\xdf\xf6\xe2\xf1|\xe9\x08\xe0\xd7\xda\x8a\xde3\xebR\xfbi\x0b\xea\x1e&6?P"h\x90v\xa4zwwJo\x02bAO\xe46\xb0\x17b\xf7\xe1\xdb\x04\xc6j\xb7\xad\xafD\xaeb\xb3\x18\xbeQ\xcdh\xdf\xb0\xf2\xa1\x03~\x0f\xf5\x15\xff\x17,\x18\xe6\x187\x19w\x17\xaa\x13\xbf\x0e|\n\x0e\x08%\x07\x0c\x06\xaf\x01\xbb\xfa\x85\xf2V\xea\x08\xe4\xc6\xde_\xd9\xf8\xd40\xd2\x1a\xd4:\xda)\xe3C\xee\xce\xf9\xcb\x04[\x0fM\x17\xd3\x1b\xb1\x1cd\x19\x1e\x13\xae\n\x80\x01\x94\xf7\xab\xecp\xe3\xa9\xdd\xa7\xdbL\xdeQ\xe3\x1f\xe8\'\xeek\xf5\x7f\xfdz\x05V\x0bY\x0e\xac\x0f\xe7\x0ed\x0cs\x08\x16\x04G\x00v\xfcA\xf7V\xf1\x0b\xeci\xe9\xeb\xe9\x87\xeba\xed\x9f\xef\xf2\xf2\xbe\xf7\xbf\xfc\x90\x01(\x06\x19\n\xbc\x0c\xf0\x0c\xa2\n%\x06w\x01\xc7\xfc\xdf\xf5\xbd\xef\xf5\xe9`\xe4\xdb\xdf\x7f\xda\x9b\xd9\xc3\xe3s\xfar\x18\x9d1fC\xb1P9_%r\xac\x7f\xff\x7f#rSYR?\x06(x\x0e\xfc\xf1"\xd5R\xbc\xc8\xac5\xa8D\xab\x8b\xb3\xf4\xc0\xe1\xcf\'\xe0W\xf2.\x03B\x12Z\x1fb%&$) p\x1c\xd1\x196\x18J\x14/\x0c\x9d\x02[\xfb\x9b\xf7\x99\xf6\x05\xf5\x16\xf0\x1a\xe7\xbc\xde1\xdb\x96\xdb9\xdd\x82\xde^\xdeb\xe0t\xe7&\xf2=\xfdS\x06w\r\xa9\x11\x9b\x11"\x11 \x11\xc1\r\xd8\x07\x91\xff\xed\xf4\xfb\xec\xd9\xe9p\xe8{\xe6\xdc\xe5\xea\xe7\xa7\xec\xbe\xf3\xdb\xfb\xca\x012\x06W\n\x0f\r\xc6\r\xb3\r\xa9\x0b\x81\x06\'\x009\xfa\xde\xf4c\xf0\x17\xedq\xeaV\xe9S\xeb\x8a\xefe\xf4)\xf9\x06\xfe\x98\x02q\x06\x9c\t\xb9\x0bS\x0cR\x0b:\x08}\x03\xef\xfdR\xf8p\xf3\xf0\xee\xcf\xeb=\xeaG\xe7\x94\xe2\xa0\xe0\xe3\xe5\xc9\xf4\xfe\n\xef \xe50o>\xd5P\x18e-t\x0fygr&d\x1fT\x10B\xb8)\xec\x0c\xa7\xf0\x87\xd5+\xc0i\xb3\xaf\xad!\xaf\x9c\xb5s\xbe\xf5\xc9\x81\xdat\xef(\x03\xcd\x10a\x17\xa1\x19\x05\x1c>\x1f\x12 &\x1c\x18\x14\xce\x0bi\x06c\x04\x83\x02\xdc\xfe\xeb\xf9[\xf5\xd1\xf1\x9e\xefS\xed\x1a\xea\xf8\xe5\xfb\xe1\x14\xdf\xb1\xde\\\xe1\x83\xe66\xec\x9f\xf0\xf6\xf4\xaf\xfb\xc5\x04+\r,\x11\xa9\x0f\xfc\x0b\x87\t\xe8\x07L\x04\xda\xfd\xdb\xf5\xe4\xee\xa8\xebm\xeb\x9d\xec\x9e\xee\xa2\xf0\x1e\xf3\xd5\xf7\xc9\xfd\xcc\x03\x0e\x08\xf4\x08T\x07\x84\x05\x82\x03N\x00\xa2\xfb\'\xf6\x08\xf1@\xedj\xeb\x83\xebQ\xed\xb7\xf0\x8e\xf4\x10\xf8?\xfc\xba\x01}\x07\x92\x0b\x8a\x0c#\x0b\xe4\x08\xbb\x06\x95\x04Q\x01V\xfc\xa2\xf55\xf0\xce\xec\xb3\xea\x8b\xe9\xa5\xe5\r\xe0A\xe1\x94\xedP\x02\xd9\x17\xab&\x981N@LVMl\xdbw1v#l``\xf4S\xc9A\xac(\x9d\x0b\x94\xef\x15\xd8`\xc4\x15\xb6\xc4\xae\x05\xae\xa8\xb1\x14\xb8\x8a\xc2\xa2\xd1\'\xe5^\xf8\x06\x04\xd4\tB\x0fL\x15\xf5\x1bm\x1fA\x1c[\x15\xaf\x10\xdf\x0ec\r\x04\x0b~\x07\xc5\x02\xd0\xfe[\xfb\xd4\xf6O\xf1\xb8\xec0\xe8\x93\xe2\x05\xdd\xbc\xd9\xec\xd9\xea\xdd\xb6\xe3\x92\xe7\x86\xea\x81\xf0&\xfa\x98\x03(\t5\nz\t\xa5\t9\x0b\xd0\n+\x06\x19\x00\x1c\xfb\xbf\xf7\xef\xf5\xb7\xf5\xae\xf5\xdc\xf5\xb4\xf6m\xf8\xd1\xfa\xc4\xfd\xd4\x00J\x02*\x01w\xff\x8e\xfd\n\xfb\x1e\xf9d\xf6\x98\xf2\xc1\xef\xb0\xee\x98\xef\x17\xf2C\xf5\x81\xf8\xf7\xfb\xb5\xffR\x04\xb6\x08\xb0\x0b-\r\xdc\x0ck\x0b\x03\t\xaa\x05\x98\x01\xc7\xfc\xaf\xf7\xc4\xf2\x0f\xee\\\xeb\xb1\xe9\x02\xe7R\xe3\xdc\xe0\xaf\xe4I\xf1g\x03\xd5\x14\x1e"\x1b.\x00?\xeaS\xe1e2n\xfck\xacd\x8a][T\x96D/.\xf7\x14g\xfd\xd5\xe9\xeb\xd9\x9e\xccX\xc2\xf9\xbb~\xb9X\xbb6\xc2\x96\xcc\x9e\xd7\xdd\xe0@\xe8\x90\xefA\xf8p\x01f\x08\xa3\x0b-\x0c\x07\r\x14\x10\xbb\x14\x0b\x18\xaf\x18\xa5\x17\x9e\x16\xe7\x15\x99\x14>\x11\x85\x0b\x00\x04\x1d\xfc\x1f\xf4\x96\xec\xcc\xe5\xf6\xdf"\xdb>\xd7o\xd5\xd5\xd6\xd7\xda\xc9\xdf\x1d\xe4\xc9\xe7\x94\xec\x1e\xf3$\xfaO\xff\xe7\x01\x1b\x03\x80\x04\xb8\x06\x9a\x08p\t\xf3\x08\xc2\x07\x85\x06\xf9\x05\x05\x06\xf5\x05\x05\x050\x03\xd9\x00\xe7\xfeg\xfd\xc1\xfb.\xf9\x17\xf6G\xf3/\xf1[\xf0\x94\xf0h\xf1\xa1\xf2I\xf4\xd5\xf6\x91\xfa^\xffg\x04\xca\x08\x00\x0c[\x0e\n\x10%\x11v\x11e\x10\xf0\r\xa6\n<\x07\xdb\x03~\x00,\xfd\x05\xfa \xf7[\xf4J\xf1|\xee\xac\xed;\xf0\xfa\xf5\xd5\xfcO\x03\x9b\t_\x113\x1bq%l-I2\xb14\xf15\x056\xd83\xb5.F\',\x1f%\x17\x19\x0f\xd8\x06\xaf\xfe\xc4\xf7m\xf2<\xee\xef\xeai\xe8\x07\xe7\xda\xe6\x10\xe7\n\xe7\xf0\xe69\xe7+\xe8\x7f\xe9\xa2\xeat\xeb^\xec\xef\xedA\xf0%\xf3\xdf\xf5|\xf8?\xfb<\xfec\x01)\x04W\x06-\x08\xae\t\x9c\n\xa8\n\xc4\tU\x08\xcb\x06\xed\x04\xa7\x02\xdd\xff\x06\xfd\xb6\xfa\xd9\xf8J\xf7\xba\xf5N\xf4b\xf3\xe7\xf2\xea\xf20\xf3\x8d\xf3=\xf4.\xf5.\xf6=\xf7@\xf8z\xf9\xc0\xfa\x0f\xfc7\xfd;\xfe<\xffB\x00R\x01L\x02\xf8\x02@\x03y\x03\xcf\x03^\x04\xf6\x04K\x05t\x05y\x05r\x05T\x05\x13\x05\xcb\x04{\x04\x11\x04\xa5\x03\x0b\x03|\x02\xbe\x01\xea\x008\x00\x81\xff\xb0\xfe\xc8\xfd\xfa\xfc\x80\xfc:\xfc\xf8\xfb\xa9\xfb2\xfb\xea\xfa\xcb\xfa\xc7\xfa\xee\xfa\x1d\xfbh\xfb\xb5\xfb\n\xfc[\xfc\xa9\xfc\x02\xfdH\xfd\x8c\xfd\xb3\xfd\xb5\xfd\xe2\xfd]\xfe\x1f\xff1\x00\x91\x01A\x03#\x05\xf7\x06\xa9\x08I\n\xf5\x0bu\r\xca\x0e\xd4\x0fx\x10\xf4\x10l\x11\xf5\x11a\x12\x90\x12{\x12\x08\x12f\x11\x99\x10\x80\x0f \x0et\x0c\x95\n\x9d\x08s\x06\x05\x04f\x01\xe4\xfe\xae\xfc\xe4\xfa]\xf9\x0c\xf8\x1f\xf7\xcb\xf6\xbc\xf6\xf9\xf6e\xf7\xf5\xf7\xb8\xf8O\xf9\x8e\xf9U\xf9\xc4\xf8\x16\xf8U\xf7\xa1\xf6\xec\xf5u\xf5B\xf5k\xf5\xdc\xf5\xa2\xf6\x96\xf7\xb6\xf8\xbd\xf9\x87\xfa\xfa\xfa\x1e\xfb\x12\xfb\xc1\xfa.\xfak\xf9\x97\xf8\x02\xf8\xd5\xf7\x19\xf8\xc1\xf8\x9d\xf9\xd2\xfa<\xfc\xba\xfd:\xff\x95\x00\xd6\x01\xdd\x02\xaa\x03H\x04\xa0\x04\xaf\x04\xa8\x04k\x04\x0b\x04\x99\x03\x1d\x03m\x02\xa4\x01\xc8\x00\xf3\xff\x1a\xff]\xfe\x9f\xfd\xde\xfc=\xfc\xdd\xfb\xc9\xfb\x1e\xfc\xc0\xfc\x9f\xfd\x94\xfe\x86\xff\x87\x00\x85\x01\x84\x02\x80\x035\x04\xa4\x04\xe3\x04\xf4\x04\xdf\x04\xa0\x04\x17\x04l\x03\xa4\x02\xc9\x01\xec\x00\xe2\xff\xdf\xfe\xf1\xfd\x06\xfd8\xfc|\xfb\xe6\xfa\x87\xfaQ\xfam\xfa\xc6\xfa\x83\xfb\x8d\xfc\xc9\xfda\xff\r\x01\xdf\x02\xc3\x04\xb8\x06\xb5\x08\x92\n8\x0c\x9b\r\xb1\x0ev\x0f\xef\x0f\x1e\x10\xcf\x0f\x0f\x0f\xf7\r\xc7\x0c\x9a\x0be\n\x07\t\x9e\x07q\x06\x99\x05\xe8\x04B\x04\x95\x03\x05\x03\x97\x02+\x02\xab\x01\x08\x01V\x00\x9a\xff\xb9\xfe\xa8\xfd\x84\xfcw\xfb\x86\xfa\xaf\xf9\xbc\xf8\xbf\xf7\xff\xf6\x95\xf6s\xf6]\xf6B\xf6C\xf6y\xf6\xe4\xf6T\xf7\xb8\xf7\xf0\xf7:\xf8\x98\xf8\xf7\xf84\xf94\xf9\x14\xf9\xfc\xf8\xea\xf8\xf8\xf8\x00\xf9!\xf9s\xf9\xfc\xf9\xb3\xfav\xfb^\xfcY\xfdC\xfe\x11\xff\xb0\xffA\x00\xa7\x00\xd5\x00\xbc\x00k\x00\xf8\xff\x8e\xff\x10\xff\x8c\xfe\x1f\xfe\xe0\xfd\xb7\xfd\xbe\xfd\xf4\xfda\xfe\x08\xff\xc8\xfft\x00\x06\x01\x8e\x012\x02\xe4\x02\x98\x03\x15\x04K\x04]\x04z\x04\x9a\x04\xa8\x04\x88\x04-\x04\xb8\x03"\x03\xa3\x02\x0c\x02s\x01\xd5\x00\x16\x00V\xff\xae\xfe^\xfeg\xfe\x92\xfe\xc0\xfe\xe4\xfe\x1c\xff\x8c\xff\xf6\xffE\x00J\x00\x08\x00\x92\xff\xf5\xfeT\xfe\xa5\xfd\xd7\xfc\x0c\xfcg\xfb\x19\xfb.\xfb\xa2\xfbg\xfcG\xfde\xfe\xbe\xff9\x01\xc8\x02)\x04P\x05K\x06)\x07\xf2\x07\x9d\x08\x05\t%\t5\tM\tm\tp\t7\t\xd9\x08\x80\x08!\x08\xcd\x07o\x07\xde\x06>\x06\x87\x05\xce\x04$\x04\x8b\x03\xf8\x02S\x02\xaf\x01"\x01\xa4\x002\x00\xcd\xffR\xff\xc9\xfeI\xfe\xd5\xfdv\xfd\x1a\xfd\xbf\xfcV\xfc\x16\xfc\xd4\xfb\xa4\xfb\xa9\xfb\xb6\xfb\xce\xfb\xde\xfb\xc4\xfb\x92\xfbW\xfb\x0b\xfb\xa6\xfa"\xfa\x97\xf9\n\xf9\x85\xf8\x0f\xf8\xc7\xf7\xba\xf7\xd3\xf7\x08\xf8@\xf8\xbf\xf8w\xf9n\xfam\xfb4\xfc\xe0\xfc\xa8\xfdy\xfe"\xff\x98\xff\xbd\xff\xc8\xff\xe9\xff\x10\x00\x1e\x00\x05\x00\xcb\xff\x9f\xff\x83\xffm\xffd\xff^\xffy\xff\xa9\xff\xf8\xffV\x00\xe2\x00\x8a\x01M\x02&\x03\xec\x03\x9c\x04\x1e\x05o\x05\x8f\x05o\x05 \x05\x95\x04\xde\x03\'\x03i\x02\xad\x01\xfa\x00\\\x00\xf6\xff\xac\xff\x98\xff\x80\xffq\xffe\xff`\xffa\xffc\xffC\xff\x0e\xff\xd4\xfe\xb2\xfe\xbc\xfe\xdb\xfe\x12\xffB\xff\x8b\xff\xdf\xff5\x00t\x00\x8d\x00|\x00G\x00\xf1\xff\x81\xff\x02\xff\x83\xfe"\xfe\xd1\xfd\xa1\xfd\x88\xfd\xa0\xfd\xef\xfd`\xfe\xcf\xfe7\xff\xae\xff.\x00\xbc\x00O\x01\xe2\x01|\x02*\x03\xfd\x03\xf0\x04\xf7\x05\x01\x07\r\x08\x12\t\x0c\n\xde\ni\x0b\xb4\x0b\xaa\x0b\\\x0b\xc8\n\xe0\t\xc7\x08\x8f\x079\x06\xd2\x04q\x03\x1d\x02\xe9\x00\xd8\xff\xef\xfe*\xfe\x90\xfd#\xfd\xe2\xfc\xbf\xfc\xa7\xfc\x91\xfc\x9f\xfc\xac\xfc\xaf\xfc\x8d\xfcE\xfc\xf3\xfb\x89\xfb\x02\xfb_\xfa\x9c\xf9\xdc\xf8+\xf8\x87\xf7\xf4\xf6|\xf6A\xf60\xf6E\xf6t\xf6\xbd\xf6*\xf7\xad\xf7K\xf8\xf2\xf8\x8f\xf9E\xfa\x02\xfb\xbf\xfbt\xfc\x14\xfd\xbc\xfdR\xfe\xd9\xfeG\xff\xa1\xff\xe2\xff\x1d\x00I\x00Y\x00c\x00`\x00`\x00p\x00\x83\x00\xa6\x00\xd1\x00\x0f\x01f\x01\xda\x01[\x02\xde\x02X\x03\xc8\x036\x04\x96\x04\xd5\x04\xfa\x04\xf3\x04\xc5\x04y\x04\t\x04\x8f\x03\r\x03\x86\x02\x06\x02\xa1\x01U\x01,\x01\x1e\x011\x01P\x01w\x01\x9d\x01\xb2\x01\xa0\x01c\x01\xfc\x00v\x00\xc5\xff\xfa\xfe-\xfeh\xfd\xc4\xfc9\xfc\xe3\xfb\xb3\xfb\xa8\xfb\xca\xfb\r\xfc_\xfc\xaa\xfc\xe5\xfc\x07\xfd\x1b\xfd%\xfd\x14\xfd\x01\xfd\xe9\xfc\xd5\xfc\xde\xfc\xf6\xfc-\xfd\x85\xfd\xee\xfde\xfe\xe2\xfek\xff\xf9\xff\x96\x003\x01\xd3\x01~\x028\x03\x08\x04\xf1\x04\xed\x05\xf3\x06\xf6\x07\xee\x08\xe2\t\xbf\nt\x0b\xf6\x0bA\x0cX\x0c@\x0c\xf4\x0by\x0b\xd5\n\t\n\x1e\t6\x08K\x07P\x06k\x05\x8b\x04\xb4\x03\xf1\x022\x02\x85\x01\xde\x002\x00\x88\xff\xdb\xfe5\xfe\x97\xfd\xf3\xfcW\xfc\xd8\xfbl\xfb\x13\xfb\xd2\xfa\x94\xfa\\\xfa7\xfa&\xfa\x15\xfa\xf3\xf9\xc4\xf9\x97\xf9k\xf9.\xf9\xe8\xf8\xa0\xf8j\xf8=\xf8\x17\xf8\xff\xf7\xf9\xf7\t\xf8)\xf8S\xf8\x9a\xf8\xea\xf8M\xf9\xc6\xf99\xfa\xb4\xfa/\xfb\xa7\xfb#\xfc\x9c\xfc\x1b\xfd\xa8\xfd;\xfe\xd3\xfek\xff\x11\x00\xc0\x00u\x010\x02\xde\x02s\x03\xed\x03\\\x04\xa8\x04\xd7\x04\xe8\x04\xc3\x04\x9c\x04\\\x04\x10\x04\xb9\x03Y\x03\xfd\x02\xa3\x02^\x02\x1c\x02\xef\x01\xd0\x01\xa9\x01\x9b\x01\xa6\x01\xb8\x01\xc5\x01\xcf\x01\xd1\x01\xc0\x01\xaf\x01\x8a\x01;\x01\xe7\x00\x8a\x00\x1e\x00\xb0\xff@\xff\xcf\xfeg\xfe\x05\xfe\xb4\xfds\xfdE\xfd\x1e\xfd\x0b\xfd\xfb\xfc\xfa\xfc\x0c\xfd\x1c\xfd1\xfdE\xfdu\xfd\xa4\xfd\xbe\xfd\xe7\xfd\x1f\xfeX\xfe\x93\xfe\xcd\xfe\x03\xff=\xff}\xff\xbf\xff\xfb\xff+\x00P\x00\x84\x00\xb7\x00\xdd\x00\xf8\x00!\x01K\x01\x7f\x01\xc3\x01\xf5\x01\'\x02P\x02\x81\x02\xb5\x02\xcc\x02\xf3\x02\x01\x03\n\x03,\x03\x18\x031\x037\x03=\x03f\x03\x80\x03\xc6\x03\xef\x03(\x04_\x04\x7f\x04\x9f\x04\xad\x04\xa4\x04\x94\x04q\x04?\x04\x02\x04\xaa\x03a\x03\x10\x03\xb0\x02l\x02\x18\x02\xd4\x01\x8b\x019\x01\xfb\x00\xa7\x00W\x00\x03\x00\xbb\xff[\xff\xf2\xfe\x86\xfe\x1b\xfe\xb4\xfd9\xfd\xc1\xfcB\xfc\xd1\xfbT\xfb\xfb\xfa\x9a\xfaR\xfa\x10\xfa\xd9\xf9\xc2\xf9\xaa\xf9\xa2\xf9\xbc\xf9\xd6\xf9\x00\xfa,\xfak\xfa\xca\xfa\x1e\xfbs\xfb\xd5\xfb?\xfc\xa9\xfc\r\xfd~\xfd\xe5\xfd4\xfe~\xfe\xcf\xfe,\xff\x80\xff\xc1\xff\x17\x00g\x00\x9d\x00\xaf\x00\xdb\x00\x0b\x01\x15\x01\x1b\x01(\x01L\x01P\x01.\x01$\x01\xfb\x00\n\x01\x13\x01!\x01_\x01_\x01]\x01c\x01\r\x01\xf3\x00\xcf\x00\x94\x00\x10\x01\x1f\x01\x06\x01\x1b\x01\xcc\x00\xa1\x00\x91\x00u\x00\x85\x00\xe1\x00\xe1\x00\xa4\x00f\x00J\x00\x16\x00\x14\x00\x98\x00\xdf\x00\xe6\x00D\x01P\x01P\x01%\x01%\x01\xcf\x00\xd5\x00T\x01\xb1\x01^\x01\xe9\x00\x01\x01\x10\x01;\x00\xf3\xfe\xe0\x00\xc4\x06\xc3\t\x91\x08j\x01\x94\xf8&\xf58\xf6\xb7\xf7\x8e\xfb\xb8\x00*\x04\x0b\x05\xbf\xfb\xcd\xf9A\xf9\xda\xf7\xb8\xfc\xb4\x01#\x04\xd7\x02e\x02\x7f\x00\xdd\xfe\xce\xff\xd9\x02\xfb\x04\x7f\x06\xab\x07\x14\x08\xe8\x05r\x02\x9e\xff#\xff{\x00:\x02&\x03\xc6\x01\xee\xff\x10\xfe\xc8\xfc\x9f\xfc\xe9\xfc;\xfd\x8a\xfd\x89\xfd\xe2\xfde\xfc\xf1\xfc\xd8\xfc\xed\xfa\xb1\xfbn\xfe\xdc\xffM\x00\xed\x00\x8e\x00\xfe\xff\x05\xff\xf4\x00\x11\x03\xe7\x03\xe0\x03\xa8\x037\x03\x92\x01b\x00\xb1\x01\x8f\x03\xcc\x03\xa7\x04^\x044\x03\xc0\x01i\x00\xd3\xff\x92\x02\x94\x03E\x06\x1c\x06|\x03P\x03L\x00m\x00\x1c\x03i\x06\x04\x06s\x05W\x02l\x00\xce\x01\xdd\x02H\x03\x95\x01\x1a\xffA\xfd\xdd\xfe\xe4\x00\xf9\x00\xa0\xff\xce\xfc\xd1\xfb\xba\xfb+\xfb\x9c\xfbe\xfc\xe5\xfc\xf5\xfc\\\xfc\x04\xfa\xa8\xf7\x1c\xf7\xe3\xf8\xf9\xfb\xdf\xfd\xe1\xfd\xa2\xfb\xa4\xf7\x05\xf7\xe1\xf8\xc7\xfaL\xfd\x92\xfez\xfe\xb8\xfch\xfa\xc7\xf9\xa5\xf92\xfb\xf7\xfdh\x00V\x01\xae\xfe\xd6\xfb\xc0\xfa4\xfc:\x00\x95\x02^\x03G\x02D\xff\xef\xfd_\xfd\xaa\xff\xd2\x01I\x02\x8b\x04\x9d\x03o\x00\x0b\xfe4\xfd\x10\xfe1\x01\x90\x04;\x06\xfb\x03\xe5\xfe\xba\xfb8\xfbi\xfe\x0f\x04\xfd\x06\xc6\x04\xcb\x00N\xfe\xae\xfd\xa4\xff\x02\x01\xbc\x01O\x02\xb7\x01\n\x01+\xffL\xfes\x00\xc8\x02\x81\x00\x19\xfe\xc6\xfd_\xfe\x92\x01\x8e\x02Z\x00\x9b\xfdB\xfb\xa5\xfb\xa3\xfd\xd2\xfe\xf0\xfe~\xfd\xfd\xfa\xb7\xf9\x80\xf9\xcd\xf9\x8a\xfa\xa9\xfbq\xfb\xac\xfaZ\xfa\t\xfbO\xfcP\xfdP\xfe\x91\x01e\x06\xb0\n\x16\x0e\xdd\x10R\x12\xa3\x13g\x15 \x17\xdf\x18&\x1as\x1a<\x19R\x17\x8c\x15q\x14\xf1\x13\x92\x13\xd0\x11\xa7\x0e@\x0b\xee\x07\xf2\x04\x06\x02\x16\xff\xa7\xfb\xbe\xf8A\xf6\x90\xf3\x10\xf1\x12\xef\xe2\xed_\xedm\xed\xa6\xed\x99\xed*\xed\xfa\xec)\xed\xd1\xed\x04\xef\x11\xf0\xf7\xf0\xc8\xf1\xbd\xf2!\xf4\xe6\xf5\xd6\xf7\xe0\xf9{\xfb\x81\xfc4\xfd\xc8\xfdB\xfe\xaa\xfe\xd2\xfe\x95\xfey\xfeb\xfes\xfe\xcf\xfe\xe6\xfe\x18\xffx\xff\xc6\xff/\x00\x81\x00.\x00\xe4\xff\xa2\xff|\xff\xec\xff\xdd\xff\x89\xff\xa6\xff\x17\x00 \x00\xd0\x00{\x01[\x01\x83\x01\xc6\x01\x14\x02\x97\x02"\x03J\x03\x9a\x03\x13\x04q\x04%\x05F\x06\x10\x07\xd6\x07\xbd\x08\x1a\t\xe3\t5\n\x9a\n\xfa\n\xbe\n\xa9\n\x87\n\x1e\nt\t\xb8\x08\xba\x07P\x06\xa3\x05\xd0\x04\xa5\x03V\x02h\x00I\xfe\xce\xfc\xcb\xfb<\xfa\x03\xf9v\xf7\x1a\xf6\xfb\xf4\xfc\xf46\xf5\x14\xf5\\\xf5\xbe\xf5\x07\xf6z\xf6\x1a\xf7\x19\xf7\xb1\xf7\x9f\xf8q\xf9S\xfa\x98\xfaa\xfa\xd4\xfaz\xfc\xb1\xfd\xac\xfe\xae\xfeH\xfe\x84\xfe\x91\xff\xee\x00/\x01(\x00\r\xff\x8b\xfe\x0f\xff\xd4\x00)\x01\x99\xff_\xfeD\xffI\x00\xc9\x00U\x00\xca\xfe[\xfe\xa7\x00\xc9\x02\x89\x03]\x02*\x00K\x01\xc8\x02\xc6\x02\xe7\x01`\x01\x85\x01\x10\x02\xf6\x01y\x000\xff\xee\xfe\xe8\xffo\x00P\x00\xde\xff\x04\xff\xe8\xfe\xa0\xff\x86\x00R\x02\x1a\x05\xbf\x07\x19\n\xcc\x0bs\r\xe6\x0fl\x12%\x14\x8e\x14\x15\x14\n\x13v\x12\xcd\x12\x14\x13\xd3\x126\x11\x01\x0f\x18\x0e\xef\r\x18\r\x15\x0bC\x08O\x059\x02\r\xff\xcf\xfb\xac\xf8r\xf5=\xf2\xe0\xef\x06\xee&\xec1\xea\xfc\xe8s\xe9\x01\xeb\xbc\xebQ\xeb"\xebB\xec\xb0\xeeT\xf1\'\xf3)\xf4\x06\xf5S\xf6\xbf\xf8\xed\xfbw\xfe\xd1\xff\xa1\x00\xff\x01\xce\x03\x07\x05\x1e\x05\xde\x04"\x05=\x05\x89\x04$\x03\xa3\x01\x9b\x00\xfa\xfft\xff\xb5\xfe\xbd\xfd\x99\xfc\xd1\xfb\xab\xfb\xbd\xfb\x8d\xfb\xec\xfa\\\xfa\\\xfa\x81\xfa^\xfam\xfa\xab\xfa\x05\xfb\xfb\xfbm\xfc\xae\xfc\xaf\xfd\xc0\xfe\xc5\xff\'\x01\xdd\x01\xca\x01\x82\x02m\x03\xc2\x04\xb9\x053\x06\xb9\x06\xae\x07\xc0\x08\x9b\tE\n.\n\xb3\n`\x0b2\x0cO\x0c\xaa\x0b\xfa\tG\t6\t\xb4\x08\xb9\x07^\x05\xab\x03\x86\x03_\x03\x01\x02\x1f\xff|\xfco\xfb\x86\xfb\x80\xfbU\xf9\xa3\xf6\xb7\xf4\xfe\xf4\x9c\xf5\xe6\xf44\xf4\x82\xf4\x17\xf5\x81\xf5\x1d\xf6V\xf6m\xf7*\xf9e\xfa\x06\xfb\xc2\xfb\xc3\xfb\xbc\xfc\xcb\xfeB\x01\\\x02\xcd\x00n\xffj\x00,\x04\xfb\x05\x9b\x04\xe9\x01%\x00\x8f\x00\xfc\x01U\x03\xb3\x02\xcf\x00?\xfe+\xfc(\xfc\xf7\xfd\xf5\xfe\xf3\xfd\xd5\xfb`\xfa\xfb\xf9\x96\xfa^\xfbs\xfbN\xfbJ\xfbD\xfb\xb3\xfb;\xfc\x0c\xfd\x80\xfeH\xff\x16\xffz\xfeG\xfeh\xff\x82\x01\xeb\x02\xa7\x02b\x02U\x04\x9d\t\xc8\x0f\x1e\x13\x17\x136\x12[\x14V\x19\xc3\x1c\xef\x1b\xb7\x18\xbf\x16\x9c\x17T\x19G\x19y\x17\x0c\x15@\x13[\x13_\x143\x13\x19\x0f\x16\n\xeb\x06\xca\x057\x03\xf2\xfd\xb5\xf88\xf5\xdd\xf2\xfb\xef\x81\xec\xf1\xe9\xfc\xe8\xd2\xe8\xa0\xe8\xa7\xe8\xd3\xe8\xd9\xe8>\xe9k\xea\xfb\xeb\x03\xed%\xed\x11\xee\xc3\xf0N\xf3T\xf4\xb2\xf4F\xf6\x80\xf9K\xfc$\xfdT\xfd\x15\xfe(\xff\xf8\xff\x99\x00\xe1\x00m\x00<\xff\xa8\xfe%\xff\xb0\xffR\xffh\xfeu\xfe5\xff|\xff\xf3\xfex\xfe\xb0\xfe\x05\xff\xe7\xfek\xfeG\xfeU\xfeL\xfe\x87\xfe\n\xff\xc5\xff-\x00?\x00\x02\x01\xe1\x012\x02\x80\x02{\x02\xe6\x02\xeb\x03"\x04\x19\x04\x92\x04h\x05\x16\x06\x9e\x06\xb7\x06\xdf\x06\xce\x077\x08E\x08\x17\x08r\x07\xa8\x06\x94\x06%\x07~\x07!\x07\xa3\x05\xf2\x04\x80\x053\x06\x0c\x06b\x05!\x04\xf6\x02\x91\x01C\x00\xc3\xff\xa1\xff;\xff\x1e\xfe%\xfc\xdc\xf9\x18\xf8\x8f\xf7\xe9\xf7O\xf85\xf8\x89\xf7\x06\xf7e\xf6\xf7\xf5\x04\xf5Y\xf4\x0b\xf6\x1c\xf9\xee\xfa*\xfa\xe0\xf7\xba\xf6;\xf9\x14\xfdg\xff;\xffI\xfe\x06\xfe\x99\xffy\x01\xa1\x02\x8b\x02\xc5\x00N\x00F\x01/\x03v\x03\xc4\x01\xc3\xffW\xffB\x00\x90\x00+\x00\x7f\xff,\xff\x14\xff\x91\xfe\xc6\xfcR\xfcK\xfdb\xfem\xffS\xff\xb9\xfd\xf4\xfc\x9c\xfd\xee\xfe\x15\x00\xee\xff\xe0\xfe\xff\xfe\xfc\xff\xa1\x01\x1c\x02\x8b\x00\x8e\xff5\x00\x05\x02\xc6\x02\x8e\x01\x1f\x00w\x00\xb4\x02J\x05\xdc\x07\x19\nw\x0b\xe4\x0bh\x0c\x1c\x0e\xb9\x10\xc8\x11\x8c\x10\xa5\x0f\x18\x10W\x11\xb6\x11\x0e\x11%\x11\x06\x12\xcd\x11\xaa\x11\xb0\x11f\x10\x9d\r\xc1\t\x9a\x07"\x07\xb1\x04e\xff&\xfb\x11\xfa0\xfa\xfd\xf7\xcb\xf3\x9f\xf0\xdc\xef*\xf0\x08\xf0\xc7\xefv\xef,\xee(\xedS\xee\x86\xf0R\xf1\x1c\xf0\xf2\xef\xdb\xf27\xf5h\xf4\xa5\xf2\xe2\xf3X\xf8\xa8\xfb(\xfb\x91\xf9\x96\xf9\x92\xfa\x86\xfb\xb9\xfcv\xfe*\xff\x82\xfd.\xfc\n\xfd\xcb\xfe\xfd\xfe\x96\xfd\x81\xfd\xe1\xfet\xff\x8a\xfe!\xfeP\xff\x95\x00D\x00!\x00\x8f\x00\xb9\x00\x98\x00\x00\x00\xd3\x00\xe1\x01\xf1\x00\xb7\xff)\x01\xa3\x02)\x03\x87\x02\xe0\x00\xa1\x01\xcc\x041\x05\x87\x04\xad\x04\xfc\x04O\x05\x1b\x06M\x06\x9c\x06c\x07\x1b\x07\xfc\x06\xcd\x079\x07d\x06\x12\x07s\x08\x10\n\xba\t\xd9\x06_\x05\xb6\x05\xaa\x05\xbb\x05\x05\x05\xa5\x03Y\x03\xb1\x01@\x01\xa0\x00\xd8\xfe\x11\xfe\x93\xfe\xc6\xff\x17\x00\xae\xfd\xa1\xfa\xa9\xf9^\xfb\x17\xfe]\xfe\x95\xfcc\xfa\xf3\xfay\xfc:\xfd\xcf\xfb\xb6\xf9\x8f\xf91\xfb\xbd\xfc\x86\xfc\xa2\xfa}\xf9\xfa\xf9\xc1\xfb\xd6\xfc\x9c\xfbS\xfa\xab\xf9W\xfap\xfb"\xfbp\xfa\xdd\xf9&\xfa\xfa\xfa\xfe\xfa\xa1\xfa\xda\xfa\xe3\xfa-\xfb\x91\xfbJ\xfb\x00\xfbl\xfa\xee\xf96\xfa\xd1\xfa\xf4\xfa\x1c\xfb\xd7\xfaM\xfb\xe3\xfb#\xfc\xe4\xfc\xd8\xfc\x9b\xfc:\xfd\xbb\xfd0\xfeK\xfe\x08\xfew\xff\xdc\x00\xb0\x00\xf8\xff,\x00Y\x01-\x03\xc2\x030\x03\xda\x02+\x02Q\x032\x07\x85\n\x95\x0b\xd8\x0fL\x1d\x0e/X7;2\\-\x9b4\xc5?\xe4>\xc4/\xaa \x05\x1c\x0c\x1b\n\x14\x00\x07w\xfa\xbe\xf2\x12\xefk\xed6\xebT\xe4\x18\xdc\xb8\xd8\x00\xdb\xe4\xdc\xef\xd8S\xd4_\xd6\x83\xdc\xc5\xe0\x97\xe3\xb4\xea\xcc\xf5`\xfe<\x02^\x07\x9b\x10A\x18\xac\x18\x96\x14\xb8\x12\xa7\x13l\x13\x10\x0f\x1f\x08\xc2\x01\xac\xfdT\xfb\xe4\xf8\x99\xf4z\xee~\xe8\n\xe4\xca\xe1I\xe0r\xdd\xc4\xd9?\xd8P\xda\xe9\xde(\xe4\x89\xe9X\xef\x14\xf6\xee\xfd\xfc\x06\x83\x0f\xf9\x14i\x17t\x19\xa9\x1c3\x1fU\x1e\xf7\x19Y\x15\xbd\x12;\x11G\x0eK\ts\x04\x00\x01"\xfe\x91\xfa\xa2\xf6\xc1\xf3\x9e\xf1\x14\xef\xce\xec\xb8\xecF\xefS\xf2o\xf4\x19\xf7\x17\xfc\x9b\x02\x95\x08\xf6\x0c\x07\x10\x8e\x12\x7f\x15x\x18\xfd\x19\x9b\x18]\x15\xde\x13\x82\x13\xf9\x11W\rV\x07\xe1\x03\xe9\x00\x8d\xfcN\xf7]\xf2\x7f\xef\n\xecq\xe8\x0f\xe7T\xe7B\xe9\xf3\xe9\xf3\xea\xf5\xef\xb8\xf7{\x00T\x06T\t\xc3\r\x05\x14\xfe\x1a|\x1d\xdb\x1a\xe3\x17\x18\x18\xcd\x19\x7f\x17h\x10\xb5\tu\x06\xde\x04\x0e\x012\xfa~\xf4\x12\xf1m\xee\xff\xeaC\xe7\xad\xe5|\xe5\n\xe5\xe1\xe45\xe6\xf6\xe9\xbb\xee\x01\xf2{\xf4\xa2\xf7\xfd\xfb\x94\x00g\x03\xb5\x03\xd9\x032\x05\x89\x07P\t\xd2\x08\xd3\x07\xf9\x07\xa3\x08\x93\x08\x11\x07\xa3\x04=\x02\xbf\xff\xfd\xfc^\xfa\xd4\xf7\xd8\xf5\xb3\xf3+\xf2\x1f\xf2U\xf3\xc4\xf4s\xf5]\xf5\x0e\xf6\xad\xf8|\xfb4\xfdz\xfd\xbb\xfe\xbc\x01\t\x05T\x077\t\x83\x0b\x17\r\xae\r\xa3\x0e7\x103\x0e\x0e\nO\x0bS\x17\xc1&\x10,\x8d\'\xba&o1\xbf<\xbe:\xd7,\x91\x1f\x1b\x1b\xe7\x17\x87\r/\xff\x91\xf46\xf0\xc8\xeb\xc8\xe3\x90\xde\xfd\xe0E\xe5\x92\xe2\xb1\xdb9\xdbP\xe4\xf5\xeb\xe1\xe9\'\xe4g\xe6\xeb\xf0Q\xfa>\xfdG\xfe\xc5\x02\xf2\t\xf1\x0f\xd7\x133\x16\x84\x15U\x11\x07\x0c\x7f\x08/\x05}\xfe\xa5\xf4\xdc\xeb\xb8\xe6\xa8\xe4\x04\xe3\xa2\xe0[\xde\xfe\xdd_\xe0\xde\xe3\t\xe7W\xe9\x8d\xebm\xee\x81\xf2\xef\xf6\x14\xfbM\xff \x04\x84\x08.\x0c\xc7\x10\x9c\x16\x96\x1a\xa4\x1a\xf9\x18\xf4\x18\n\x1a"\x18\xeb\x11^\n\xe2\x04V\x01A\xfd\xa7\xf7X\xf2T\xef\xa7\xeeP\xef8\xf09\xf1\xbc\xf2\xfd\xf4\x96\xf7E\xfa\xff\xfc\xb2\xff\xda\x01\xa5\x030\x06\xb8\t0\r\xd4\x0f\xe3\x11\t\x14=\x16\xb9\x17\x18\x18\x06\x17F\x14\xab\x10\xfc\x0c\x17\t<\x04\\\xfe\x87\xf8\xea\xf3j\xf0c\xed\x00\xeb^\xe9\xfc\xe8\xf5\xe9\xe8\xebZ\xee\xd2\xf0\xf8\xf3\xd0\xf7\x1b\xfc\x98\xff\xa1\x02\xd5\x07x\x0e\xa3\x13X\x15\xc3\x15\x88\x18\x19\x1b\x07\x1a\xd3\x15i\x11p\x0e{\n\xb2\x04x\xfe\xae\xf8*\xf4\x8e\xf0\xa0\xedc\xeb\x08\xea\xe2\xe9\xad\xea\x15\xec\xdb\xed\t\xf0\xe4\xf2u\xf5\xef\xf7\xa1\xfa\xc4\xfd`\x01\x1f\x04\xda\x05n\x07@\t\x1c\x0b\xc2\x0b\xc0\ni\ty\x08V\x07n\x05\x85\x02\xdc\xff\x14\xfe)\xfcW\xfa\xba\xf8R\xf7\xc5\xf6 \xf6\x94\xf5\xe4\xf53\xf6\xee\xf6\xa4\xf7!\xf8t\xf9\xe1\xfa\x8b\xfc6\xfe/\xff\xbd\x00\x86\x027\x04\xa8\x05\x86\x06"\x07\xa0\x07\xb7\x07~\x07\x07\x07\xc2\x05;\x04p\x02\xe5\x00\x87\xff\xaa\xfd-\xfb\xbb\xf9\xc1\xf9\xbd\xf9\xd6\xf7\xf5\xf4s\xf5\xb1\xfb\xc2\x02b\x05\xa0\x07\x04\x11a!\xef,\xef,\xa7)G.S8h:\xa2/\xc7!"\x1bB\x19\xb5\x12\x1c\x05E\xf7\x0c\xef\xe2\xeb_\xe9\xce\xe4P\xdf\x8e\xdc\xd2\xddw\xe0f\xe17\xe1\xbe\xe2\x80\xe6d\xe9\x06\xeb\x7f\xee\x98\xf5_\xfc\x0c\xff6\x00\x87\x05\x95\x0eA\x14\xbb\x12\x14\x0fZ\x0f\xad\x11\n\x10|\x087\x00N\xfb9\xf8\x03\xf4&\xee\x03\xe9\xb4\xe6A\xe6\xc9\xe5\x82\xe5\x9c\xe60\xe9\x8d\xebX\xed\x95\xef\xf3\xf2\x1b\xf7\xf2\xfah\xfdj\xff\xf5\x02f\x08\x03\r\xc2\x0e\x9d\x0f\xff\x11+\x15]\x16{\x14D\x11\x9c\x0eN\x0c\xcd\x08\xb6\x03\x87\xfe\xa6\xfa\xe2\xf7\x8b\xf5\xaf\xf3\xee\xf2\xa2\xf3O\xf5;\xf7v\xf9p\xfc\xdb\xff\xe7\x02\x07\x05\xd4\x06\x19\t[\x0b\xdc\x0c~\r\xd3\r\x84\x0eo\x0f\xf8\x0f\x96\x0fM\x0e\xd4\x0cy\x0b\xb3\t\xb9\x06\xb0\x02\x95\xfe$\xfb\xdf\xf7L\xf4\xe4\xf0E\xee\xd8\xece\xec\xa9\xec\xbd\xed\xb4\xef\x83\xf2y\xf5\x7f\xf8\xd8\xfb<\xff<\x02(\x04\x9e\x05\x15\x07c\x08L\t5\t\r\tT\t\xb8\t\xb8\t\xf1\x08\xa8\x08/\t5\t\x1b\x08\x13\x06\x18\x04\xb1\x02\xbf\x00\xd8\xfd\xad\xfa\xf4\xf7e\xf6.\xf5\xda\xf3\xfb\xf2\xec\xf2 \xf4\x0e\xf6\xc5\xf7\x86\xf9\x81\xfb\xcd\xfd<\x00\x07\x02:\x03r\x04z\x05*\x067\x06\xa3\x05,\x05\x9f\x04\xd4\x03\xd2\x02\xb9\x01\xdc\x00\xe4\xff\xe3\xfe\xc8\xfd\x03\xfd}\xfc\x03\xfc~\xfb\xea\xfa\xcd\xfa\x1d\xfb\x81\xfb\xa5\xfb\xb4\xfb\xef\xfbh\xfc\xdb\xfc\t\xfd\xe4\xfc\xf4\xfc\x17\xfd8\xfdI\xfd\'\xfd8\xfdr\xfd\x9a\xfd\x0c\xfef\xfe\xa7\xfe\xfc\xfe\x1c\xff|\xff\xbe\xff\x83\xffN\xffC\xffD\xffU\xff+\xff^\xff)\x00\xdb\x00\xec\x00/\x01B\x02\xb1\x03\xf1\x03\xe9\x02I\x04\xdf\tb\x10s\x13\x18\x14\x8c\x17X\x1fv%\x82%\x8d"m!\x07"\xc1\x1f\x0e\x19q\x11/\x0bx\x05R\xff\xdd\xf8\x88\xf3\t\xef\\\xeb=\xe9z\xe8\x00\xe8l\xe7\x00\xe8\xa1\xe9\xcc\xea5\xebA\xec\xcb\xee\x15\xf1\x16\xf2X\xf3\xae\xf6\xe9\xfa\xdc\xfd\x9b\xff\x82\x02\xfc\x06\x83\nm\x0b%\x0b\xab\x0b(\x0c\x97\n\xec\x067\x03%\x00\xdd\xfc\xcd\xf8\x93\xf4r\xf1\x87\xefI\xeeN\xed\n\xed\xd9\xedv\xef:\xf1\x0e\xf3\x00\xf5%\xf7\x99\xf95\xfci\xfe\xfb\xff\xb6\x01M\x04\xfb\x06\xb2\x08\x07\n\xdb\x0b\x1e\x0e\xc6\x0f;\x10\xdf\x0fn\x0f\xc5\x0e=\r\x82\n\x1c\x07\xe2\x03\x13\x01G\xfee\xfb\xf3\xf8\xb1\xf7\x95\xf7\xe1\xf7G\xf8N\xf9L\xfb\x9e\xfdo\xff\xab\x00\xf8\x01b\x03_\x04\xad\x04\xa9\x04\xab\x04\xe7\x04J\x05u\x05b\x05;\x05h\x05\xcb\x05\xb6\x05\xef\x04\xbf\x03\xa0\x02J\x01e\xff\x13\xfd\xbf\xfa\xc6\xf8+\xf7\xd8\xf5\xd8\xf4q\xf4\xea\xf4\xe0\xf5\x0f\xf7\xa7\xf8\xb1\xfa\x04\xfd\xf9\xfe\x9a\x00,\x02\xba\x03\x0e\x05\xbd\x05\'\x06\x9d\x06)\x07{\x07\x82\x07\x06\x08\x0c\t\xe1\t\xdf\t6\t\x8e\x08\xf7\x07\xd1\x06\xb1\x04\xeb\x01\x0b\xffd\xfc\xd3\xf9O\xf7\xf7\xf46\xf3Z\xf2`\xf2\xed\xf2\xcf\xf3 \xf5\t\xf7b\xf9\xb6\xfb\xd2\xfd\xcf\xff\xb6\x01k\x03\xb4\x04\x89\x05\x18\x06q\x06\x83\x06T\x06\xd7\x053\x05r\x04d\x03/\x02\xfa\x00\xa4\xffJ\xfe\xb0\xfc\xe2\xfaV\xf9\xee\xf7\xc3\xf6\xc0\xf5\xc0\xf4\x1e\xf4\xe5\xf3\xf3\xf3]\xf4\x00\xf5\xd6\xf5\x00\xf7F\xf8\xc3\xf9k\xfb\r\xfd\xb6\xfeP\x00\'\x02\xe5\x03Z\x05j\x06\x9b\x07\xc0\x08\x82\t\x9e\tQ\tn\t.\tN\x08\xb9\x06G\x05\x98\x04p\x03j\x01\xaf\xff\x04\xff\xe0\xfet\xfdC\xfb\x9d\xfb\x8e\xff\x07\x04\xa5\x05>\x06?\n\xac\x11q\x17\xe2\x18\xff\x18\x8b\x1b\x05\x1f\x91\x1f\x8b\x1c\xc5\x18\xff\x15\xa9\x12\xc4\r\x08\x08\xd2\x02A\xfe\xe5\xf9\xd7\xf5Q\xf2>\xef\xeb\xec\xca\xeb\xf9\xea\x97\xe9=\xe8^\xe8\xc4\xe9k\xea\xfc\xe9X\xea\xe9\xec\x10\xf0\xd5\xf1\xec\xf2\xb2\xf5C\xfaO\xfe\x9f\x00\x85\x02\x81\x05\xa3\x08%\n\xf5\tx\t3\t2\x08\xf5\x05\x15\x03\\\x00\x05\xfe\xab\xfb\x19\xf9\xcf\xf6A\xf5|\xf4\xfc\xf3\xae\xf3\xb1\xf3L\xf4\x99\xf5.\xf7\x83\xf8\x96\xf9 \xfb\x99\xfd\xf2\xffV\x01\x84\x02p\x04\xf0\x06\xe3\x08\xf9\t\xc6\n\xea\x0b\n\ru\r\xec\x0c\xfe\x0b#\x0b:\n\xc3\x08\xa2\x06`\x04\xba\x02z\x01\xd0\xff\xc1\xfdh\xfcV\xfc\x84\xfc\xf6\xfbI\xfb\xc6\xfb\x10\xfd\xec\xfd\xeb\xfd\xfb\xfd\xbc\xfe\xce\xffp\x00l\x00R\x00\x95\x00@\x01\xb0\x01u\x01)\x01d\x01\xf4\x01\xed\x01 \x01\x8c\x00\x8b\x00_\x00\x80\xffT\xfe\xa7\xfd\x84\xfd@\xfd\x93\xfc\xfc\xfb\x1b\xfc\xd4\xfca\xfd\x97\xfd\x06\xfe\x06\xff-\x00\xf7\x00z\x01\x18\x02\xe5\x02\x8c\x03\xde\x03\xe7\x03\x10\x04H\x04[\x04A\x04\x0b\x04\xea\x03\xb4\x03g\x03\r\x03\xa7\x020\x02\xac\x01\x11\x01\x8b\x00.\x00\xd6\xff^\xff\xe6\xfe\xe3\xfe\x1e\xffB\xff\x10\xff\xf7\xfeO\xff\xa0\xff\x9a\xffA\xff\x02\xff\xf7\xfe\xe8\xfe\x93\xfe\x0f\xfe\xc1\xfd\xb0\xfd\x9a\xfdM\xfd\x08\xfd\x12\xfd6\xfdD\xfd"\xfd\x0f\xfd7\xfdX\xfd!\xfd\xb7\xfc\xac\xfc\xe6\xfc\xf0\xfc\xba\xfc\x95\xfc\xc5\xfc&\xfdi\xfd\x9f\xfd\x10\xfe\xb4\xfeG\xff\xa3\xff\x0e\x00\xc7\x00\x84\x01\xdd\x01\xf0\x018\x02\x99\x02\xba\x02}\x024\x02)\x029\x02\x08\x02\x9c\x01Y\x012\x01\x06\x01\xc7\x00\x8e\x00e\x007\x00\x00\x00\xc7\xff\x9c\xffw\xffa\xffW\xff@\xff0\xffC\xffw\xff\x8b\xffv\xffw\xff\x9d\xff\xcd\xff\xb9\xff\x8a\xffu\xff\x9c\xff\xab\xff{\xff4\xff\x0b\xffB\xff`\xffj\xffb\xffb\xff\x98\xff\x96\xffx\xff\x7f\xff\x81\xffd\xff\x02\xffh\xfe\x1e\xfe+\xfe\x08\xfeU\xfd\xab\xfc&\xfd\xde\xfe\x0c\x01\xd6\x02<\x04F\x06^\t\xc2\x0c\xb5\x0e\xd2\x0e\xf1\x0eI\x10\x8b\x11w\x10S\r\xf2\nx\n\x9a\to\x06\x96\x02\xee\x00\xf4\x00\xc4\xff\x11\xfdI\xfb\x97\xfb\x07\xfc\xbd\xfa\xc1\xf8/\xf8\xcd\xf8\xb3\xf8R\xf7\xe9\xf5\xde\xf5\xc5\xf6\x1c\xf7\x87\xf6Z\xf6\x8b\xf7x\xf9\x98\xfa\xc3\xfa\x8f\xfbf\xfd\x0c\xfft\xff/\xff|\xff9\x00C\x00N\xffI\xfe\xfb\xfd\xe1\xfdM\xfdf\xfc\xda\xfb\xef\xfb0\xfc\'\xfc\xfc\xfb.\xfc\xd3\xfco\xfd\x8f\xfd\x8b\xfd\xf2\xfd\x9d\xfe\xec\xfe\xc8\xfe\xc5\xfe?\xff\xc9\xff\x0b\x00*\x00u\x00\x1e\x01\xcd\x01S\x02\xb3\x02\x17\x03\xa7\x030\x04Y\x04;\x04\x1c\x04@\x04Q\x04\xe9\x036\x03\xc9\x02\xd3\x02\xcb\x02W\x02\xd8\x01\xda\x01!\x02:\x02\x11\x02\x04\x023\x02X\x02P\x02$\x02\xf9\x01\xec\x01\xee\x01\xca\x01\\\x01\xe3\x00\xc8\x00\xe3\x00\x9e\x00\xf6\xffo\xff_\xfff\xff\xf5\xfeI\xfe\xee\xfd\xf7\xfd\xeb\xfdx\xfd\x08\xfd\x02\xfd1\xfd\x1e\xfd\xd1\xfc\xc4\xfc$\xfd\x8a\xfd\xa6\xfd\xc1\xfd$\xfe\xba\xfe,\xff\x83\xff\xe0\xffF\x00\x96\x00\xd2\x00\x0e\x015\x018\x01,\x012\x01A\x01B\x016\x010\x01H\x01l\x01\x89\x01\x8e\x01}\x01w\x01\x85\x01x\x01\'\x01\xad\x00n\x00C\x00\xea\xffo\xff!\xff*\xffM\xffG\xffY\xff\xac\xff+\x00\xa7\x00\xfc\x00Z\x01\xdd\x01b\x02\xd0\x02\xf4\x02\xcf\x02\xbc\x02\xdb\x02\xc0\x02,\x02w\x01\x18\x01\xf8\x00|\x00\x80\xff\xcd\xfe\x96\xfeO\xfe\x87\xfd\xa5\xfcN\xfcB\xfc\xfc\xfb\x88\xfbQ\xfb\xa6\xfb\x0e\xfc\x1f\xfc*\xfc\x9e\xfcl\xfd\x05\xfej\xfe\x13\xffB\x00\x85\x01a\x02\xfd\x02\xc6\x03\xd3\x04d\x05Y\x05\x13\x05\xf5\x04\xd8\x049\x04=\x03T\x02\xbd\x01\x1a\x019\x00Z\xff\xc4\xfe\x8e\xfeN\xfe\xd8\xfdo\xfdo\xfd\xac\xfd\xb5\xfd}\xfdj\xfd\xa7\xfd\xe9\xfd\xe5\xfd\xb8\xfd\xca\xfd\x15\xfe@\xfe1\xfe%\xfeF\xfes\xfek\xfeB\xfe4\xfe1\xfe$\xfe\xec\xfd\xae\xfd\x98\xfd\x80\xfdt\xfdO\xfd*\xfdO\xfdV\xfdp\xfdc\xfdf\xfd\xa9\xfd\xb6\xfd\xc8\xfd\xd0\xfd\xea\xfd:\xfe^\xfem\xfe\x91\xfe\xba\xfe\xec\xfe\xfd\xfe\xf9\xfe\n\xff)\xffH\xffT\xff[\xffV\xffm\xff\x91\xffz\xffg\xff\xa9\xff(\x00\xaf\x00\xf9\x00\xbb\x01Q\x03\t\x05e\x06\x8f\x07\'\t\x1e\x0bf\x0c\xfe\x0c\xa8\r\x83\x0e\xe8\x0e^\x0e\xeb\r\t\x0e\xd7\r\xdc\x0c\xc7\x0b\x9f\x0b\x96\x0b}\n\xd5\x08\xfa\x07\xa0\x07R\x06\xc1\x03\x7f\x01b\x00D\xff\x00\xfdR\xfa\xdc\xf8\x88\xf8\xd4\xf7I\xf6,\xf5{\xf5Z\xf6k\xf6\xd5\xf5\xc5\xf5\x9d\xf6U\xf7\x1f\xf7\x90\xf6\x98\xf6\t\xf76\xf7\xe1\xf6\x8e\xf6\xc6\xf6O\xf7\xc4\xf7\xf1\xf7/\xf8\xd5\xf8\xc2\xf9\x86\xfa\xfe\xfak\xfb+\xfc\x14\xfd\xa9\xfd\xea\xfdE\xfe\xe3\xfek\xff\x9f\xff\xc0\xff%\x00\x9d\x00\xd7\x00\xe6\x00\x1d\x01\x8b\x01\xe6\x013\x02{\x02\xdc\x02P\x03\xba\x03\x00\x043\x04j\x04\xa0\x04\xb6\x04\xac\x04\x95\x04\x82\x04\x80\x04o\x04J\x04\'\x04\x1b\x04$\x042\x043\x046\x04?\x048\x04\x13\x04\xd5\x03\x91\x03F\x03\xe9\x02\x83\x02\x02\x02w\x01\xf0\x00y\x00\xfb\xffh\xff\xdf\xfeo\xfe\x16\xfe\xaa\xfd1\xfd\xc4\xfc}\xfcN\xfc\n\xfc\xc6\xfb\xb5\xfb\xc6\xfb\xc8\xfb\xb8\xfb\xe4\xfb;\xfcw\xfc\xa0\xfc\xef\xfc[\xfd\xa8\xfd\xbc\xfd\xef\xfd>\xfel\xfeo\xfeo\xfe\xa5\xfe\xd4\xfe\xdd\xfe\xf7\xfeK\xff\xb0\xff\x05\x00_\x00\xcb\x00M\x01\xb4\x01\x15\x02{\x02\xc6\x02\xf0\x02(\x03p\x03s\x034\x03\x18\x03;\x03A\x03\xfc\x02\xd3\x02 \x03\x9b\x03\xb3\x03d\x03G\x03\xa2\x03\xf4\x03\xb5\x037\x03,\x03q\x03F\x03\x9c\x02\x1d\x022\x02B\x02\x99\x01\xbe\x00.\x00\xeb\xffS\xffB\xfea\xfd\xd4\xfcg\xfc\xbe\xfb\xe9\xfao\xfa;\xfa\x1b\xfa\xe8\xf9\xa8\xf9\xa8\xf9\xd4\xf9\x01\xfa\x16\xfa\x1b\xfa]\xfa\xc5\xfa\x18\xfbJ\xfbw\xfb\xe6\xfbk\xfc\xc2\xfc\x02\xfdX\xfd\xe2\xfdi\xfe\xc0\xfe\x08\xffs\xff\x03\x00\x84\x00\xce\x00\x17\x01\x8d\x01\x01\x02E\x02E\x02\\\x02\xbb\x02\xe3\x02\xcb\x02\x80\x02w\x02\xab\x02\x88\x024\x02\xeb\x01\xe8\x01\x05\x02\xbc\x01S\x016\x01>\x01#\x01\xb8\x00Q\x00@\x00@\x00\x0e\x00\xc1\xff\x89\xfft\xffx\xffa\xff\x1e\xff\x0c\xff7\xffm\xffy\xffN\xffu\xff\xf0\xff>\x00`\x00\xcd\x00\xe1\x01"\x03\xdb\x03i\x04\x80\x05\xce\x06w\x07v\x07\xd5\x07\xbc\x08\xfd\x08L\x08\xc4\x07N\x08\xbc\x08\xf8\x07\xef\x06\x12\x07\xa8\x07\xf5\x06%\x05\x18\x04*\x04\x85\x03H\x01\x0e\xffK\xfe\t\xfe\xa5\xfc\x80\xfa\x85\xf9\xff\xf9=\xfaS\xf9n\xf8\xcd\xf8\xbc\xf9\xb1\xf9\xd3\xf8\x91\xf8C\xf9\xac\xf9*\xf9\x8d\xf8\xdd\xf8\x98\xf9\xda\xf9\xd4\xf94\xfa\x04\xfb\xb3\xfb\x10\xfcw\xfc\x12\xfd\x8c\xfd\xd4\xfd\x16\xfeT\xfer\xfe\x8f\xfe\xc0\xfe\xfd\xfe\x1e\xff8\xff\x85\xff\xe5\xff#\x00>\x00t\x00\xc1\x00\xd9\x00\xbf\x00\x9b\x00\xa4\x00\xb5\x00\x8d\x00@\x00&\x00M\x00q\x00P\x00F\x00\x97\x00\xed\x00\x0c\x01\t\x01=\x01\x9f\x01\xd1\x01\xcd\x01\xe2\x01"\x02c\x02y\x02\x81\x02\xad\x02\xeb\x02\x14\x03\x19\x03\x17\x03&\x03&\x03\x01\x03\xb7\x02o\x025\x02\xe7\x01\x87\x01#\x01\xd7\x00\x95\x00K\x00\xf2\xff\xb2\xff\x93\xffp\xff<\xff\x03\xff\xd3\xfe\xaa\xfeu\xfeH\xfe0\xfe\x18\xfe\x0c\xfe"\xfeF\xfeb\xfey\xfe\xb9\xfe\x07\xff2\xffV\xff\x92\xff\xcd\xff\xd0\xff\xb8\xff\xdb\xff%\x00)\x00\x01\x00+\x00\x98\x00\xcd\x00\x9c\x00\xb8\x00v\x01\x0c\x02\xd3\x01j\x01\xaf\x01H\x02$\x02`\x01\x17\x01u\x01{\x01\xa2\x00\xce\xff\xce\xff\x12\x00\xa4\xff\xc2\xfeb\xfe\x9e\xfe\xae\xfe=\xfe\xcf\xfd\xf0\xfd9\xfe(\xfe\xde\xfd\xe3\xfdI\xfel\xfej\xfe\x92\xfe\xda\xfe\x02\xff\x05\xff:\xffv\xff\x89\xff\x86\xff\xa3\xff\xc5\xff\xab\xff\x8a\xff\x89\xff\x99\xff\x97\xffv\xffY\xffV\xffK\xff7\xff3\xff?\xff?\xff\x1b\xff\x1c\xffB\xff9\xff\xff\xfe\xe6\xfe\'\xffT\xff"\xff\xd9\xfe\xe4\xfe(\xff \xff\xcb\xfe\xa2\xfe\xdd\xfe\t\xff\xd6\xfe\x98\xfe\xb4\xfe\x08\xff\n\xff\xe8\xfe\xec\xfe2\xffr\xff}\xff\xa8\xff\xee\xff*\x00R\x00\x92\x00\xff\x00V\x01p\x01\xc8\x01\x91\x02K\x03\xc2\x03E\x04U\x05\x8e\x06\x0e\x07\xfd\x06\x86\x07\xbd\x08K\t\xa7\x08\x18\x08\xb1\x08|\t\x08\t\x00\x08\xf2\x07\xb8\x08\xac\x08`\x07N\x06c\x06\\\x06\x0c\x05\x14\x03\xc3\x01?\x01U\x00\xb2\xfe\t\xfd\x16\xfc\xa9\xfb\xf9\xfa\xdf\xf9\xe8\xf8\x96\xf8\x96\xf83\xf8]\xf7\xbf\xf6\xa7\xf6\xb4\xf6d\xf6\xd6\xf5\xaa\xf5\xe6\xf5C\xf6p\xf6\x90\xf6\xff\xf6\xb8\xf7|\xf8\x03\xf9d\xf9\xe6\xf9\x92\xfa-\xfb\x82\xfb\xb6\xfb.\xfc\xdb\xfcg\xfd\xc2\xfd!\xfe\xc0\xfe\x82\xff\x1b\x00\x91\x00\x01\x01\x85\x01\x05\x02O\x02~\x02\xb6\x02\x04\x03E\x03P\x03R\x03\x82\x03\xd3\x03\xfd\x03\x06\x04"\x04f\x04\xa1\x04\xbd\x04\xdb\x04\xff\x04\x1e\x05!\x05 \x05#\x05\x16\x05\xfd\x04\xf0\x04\xe7\x04\xc8\x04\x90\x04l\x04g\x04Q\x04\x08\x04\xba\x03\x84\x03V\x03\xf5\x02\\\x02\xd3\x01o\x01\xfb\x00K\x00\x86\xff\xfa\xfe\x9d\xfe&\xfex\xfd\xda\xfc\x96\xfcW\xfc\xe8\xfbt\xfb;\xfb+\xfb\xe0\xfa\x8d\xfa\x97\xfa\xe1\xfa\xfa\xfa\xdc\xfa#\xfb\xc4\xfb \xfc\x0f\xfca\xfc\x82\xfd\x9b\xfe\xde\xfe\xde\xfe\x80\xff\x85\x00\xf4\x00\xe5\x00$\x01\xd5\x01t\x02\x86\x02_\x02\x9f\x02!\x03t\x03b\x03*\x03L\x03\x83\x03\x98\x03t\x03+\x03\x01\x03\xfe\x02\xe8\x02\xa9\x02[\x02\x08\x02\x0b\x02(\x02\xea\x01y\x01&\x01\x1e\x01\n\x01\x9c\x00"\x00\xe7\xff\xaa\xff?\xff\xc8\xfep\xfe5\xfe\xec\xfd\x9f\xfd_\xfd\x16\xfd\xb0\xfcV\xfc1\xfc+\xfc\xfa\xfb\xab\xfb\x9d\xfb\xc6\xfb\xcc\xfb\x98\xfb\x88\xfb\xdc\xfbC\xfct\xfcv\xfc\xa7\xfc\x1e\xfd\x8c\xfd\xc0\xfd\xe2\xfd9\xfe\xad\xfe\xf8\xfe\x0c\xff\x1f\xffc\xff\xaf\xff\xc7\xff\xb8\xff\xbd\xff\xfa\xff3\x00.\x00\x11\x00&\x00o\x00\xad\x00\x9e\x00\x94\x00\xe4\x00O\x01|\x01Q\x01r\x01\xfa\x01V\x02J\x02 \x02j\x02\xfa\x02\x1f\x03\xd3\x02\xc2\x02&\x03\x7f\x03Y\x03\x15\x03=\x03\xab\x03\xd4\x03\xbe\x03\xf2\x03n\x04\xc0\x04\xd8\x04\xf9\x04a\x05\x9a\x05x\x05u\x05\xb0\x05\xd2\x05\x91\x05;\x05F\x05w\x05\x1e\x05c\x04\xed\x03\xc4\x03b\x03\x81\x02o\x01\x9f\x00\x01\x00,\xff\t\xfe\xe3\xfc\x1e\xfc\xa0\xfb\xf4\xfa\x07\xfa@\xf9\xe7\xf8\xb8\xf8h\xf8\xea\xf7\xa5\xf7\xb6\xf7\xc7\xf7\xc3\xf7\xb7\xf7\xe1\xf7@\xf8\x94\xf8\xdb\xf84\xf9\xa8\xf9;\xfa\xda\xfak\xfb\xeb\xfbq\xfc\x13\xfd\xb7\xfd9\xfe\x9a\xfe\x13\xff\xa0\xff\xfd\xff3\x00w\x00\xd2\x00\x15\x01&\x011\x01\\\x01\x88\x01\x9d\x01\xa6\x01\xc3\x01\xe5\x01\xeb\x01\xdf\x01\xce\x01\xd1\x01\xd6\x01\xbc\x01\x96\x01\x87\x01\x98\x01\xaa\x01\x9e\x01\x9d\x01\xc1\x01\xe2\x01\xed\x01\xed\x01\x0b\x02A\x02[\x02I\x02@\x02S\x02l\x02j\x02V\x02\\\x02p\x02n\x02N\x025\x023\x02 \x02\xec\x01\xaa\x01{\x01H\x01\x01\x01\xb2\x00n\x00)\x00\xe0\xff\x9b\xfff\xff9\xff\xfd\xfe\xcb\xfe\xb1\xfe\x9e\xfe|\xfeW\xfeQ\xfe_\xfe`\xfeU\xfem\xfe\xa7\xfe\xd3\xfe\xd6\xfe\xe8\xfe9\xff\x8d\xff\xa3\xff\xa4\xff\xd7\xff2\x00\\\x00Y\x00~\x00\xd9\x00\x1f\x01\x1f\x01\x18\x01?\x01n\x01d\x01E\x01@\x01;\x01\x1b\x01\xe9\x00\xcf\x00\xaf\x00v\x00Q\x00D\x00\x1d\x00\xde\xff\xb5\xff\xb2\xff\xad\xff\x7f\xffU\xff]\xffv\xffo\xffH\xffC\xffo\xff\x99\xff\xa1\xff\x9f\xff\xbb\xff\xeb\xff\x05\x00\x08\x00\r\x00\'\x00D\x00D\x00,\x00\x1b\x00\x17\x00\x1b\x00\x06\x00\xd3\xff\xb1\xff\xa7\xff\xa5\xff\x94\xffw\xffh\xfft\xffz\xffa\xff<\xff4\xffP\xff_\xffW\xffR\xffw\xff\xa5\xff\xc0\xff\xd6\xff\xf1\xff!\x00Q\x00y\x00\x8c\x00\x96\x00\xae\x00\xd5\x00\xe1\x00\xc9\x00\xc2\x00\xdc\x00\xfb\x00\xf8\x00\xe4\x00\xf4\x00 \x01:\x01%\x01\x0f\x01\x1f\x012\x01\x1b\x01\xe6\x00\xbe\x00\xa9\x00\x9f\x00x\x00?\x00\x18\x00\x02\x00\xe2\xff\xa9\xffl\xff3\xff\xf5\xfe\xb9\xfet\xfe\x1e\xfe\xc2\xfdp\xfd"\xfd\xdb\xfc\x97\xfce\xfcA\xfc4\xfc9\xfc7\xfc5\xfcM\xfc\x89\xfc\xb8\xfc\xd7\xfc\x04\xfdM\xfd\xa0\xfd\xe1\xfd!\xfe\x80\xfe\xf5\xfec\xff\xb6\xff\x17\x00\x96\x00\x06\x01R\x01\x99\x01\xfb\x01J\x02p\x02\x8b\x02\xa3\x02\xad\x02\xa2\x02\x8d\x02\x84\x02o\x02F\x02\'\x02\n\x02\xd1\x01\x86\x01:\x01\xf5\x00\xaf\x00N\x00\xed\xff\xbc\xff\x8c\xffK\xff\x04\xff\xd7\xfe\xc9\xfe\xa9\xfe\x82\xfeo\xfev\xfe\x84\xfe\x8f\xfe\xa3\xfe\xbe\xfe\xe9\xfe5\xff\x90\xff\xe4\xff>\x00\xbc\x00R\x01\xdc\x01Z\x02\xf0\x02\x97\x03\x1e\x04\x85\x04\xeb\x04]\x05\xb6\x05\xdb\x05\xea\x05\x05\x06\x15\x06\xfb\x05\xbc\x05~\x05K\x05\xe3\x04L\x04\xb6\x032\x03\x9e\x02\xe0\x01\x1f\x01r\x00\xd1\xff$\xffr\xfe\xdd\xfdd\xfd\xf5\xfc\x9e\xfcJ\xfc\x00\xfc\xcd\xfb\xa5\xfb\x8e\xfbv\xfbY\xfbQ\xfbb\xfb}\xfb\x89\xfb\x93\xfb\xad\xfb\xdf\xfb\x0e\xfc*\xfcL\xfct\xfc\xa4\xfc\xd0\xfc\xfa\xfc#\xfdV\xfd\x89\xfd\xb2\xfd\xd8\xfd\xff\xfd+\xfeY\xfe\x7f\xfe\x9f\xfe\xcb\xfe\xf6\xfe&\xffR\xfft\xff\x9c\xff\xbf\xff\xe1\xff\x01\x00+\x00W\x00t\x00\x92\x00\xb0\x00\xd7\x00\xfc\x00\x1d\x01F\x01n\x01\x97\x01\xbd\x01\xe1\x01\x0b\x02,\x02J\x02i\x02\x8d\x02\x9e\x02\xa4\x02\xa6\x02\xa9\x02\xa4\x02\x92\x02w\x02h\x02U\x024\x02\x16\x02\xf5\x01\xd1\x01\xa6\x01w\x01H\x01\x0b\x01\xd9\x00\xb4\x00z\x00*\x00\xe9\xff\xc6\xff\xb1\xffr\xff$\xff\x1b\xff5\xff2\xff\xf9\xfe\xf0\xfeQ\xff\xb2\xff\xba\xff\xa0\xff\xc7\xff\x13\x00!\x00\xfb\xff\xef\xff\x00\x00\x0b\x00\xd2\xff|\xffP\xffB\xff\'\xff\xd9\xfe\x8a\xfex\xfey\xfed\xfe:\xfe&\xfe9\xfeO\xfeN\xfeD\xfeN\xfef\xfe\x8f\xfe\xb8\xfe\xd7\xfe\xfc\xfe+\xffo\xff\xa5\xff\xcc\xff\xed\xff\x1f\x00O\x00d\x00s\x00~\x00\x92\x00\xa2\x00\x9c\x00\x89\x00\x86\x00\x80\x00o\x00N\x006\x002\x00\x1a\x00\xfc\xff\xde\xff\xcb\xff\xb8\xff\x9d\xff\x83\xffu\xffx\xffu\xffn\xffk\xffr\xff\x84\xff\x86\xff\x8b\xff\x89\xff\x93\xff\x9e\xff\x9d\xff\x99\xff\x9a\xff\x9e\xff\xa1\xff\xa2\xff\xa7\xff\xb7\xff\xcc\xff\xd7\xff\xdf\xff\xed\xff\x07\x00.\x00T\x00s\x00\x9a\x00\xbf\x00\xde\x00\xf7\x00\x15\x01:\x01W\x01k\x01t\x01\x83\x01\x85\x01u\x01[\x01H\x01=\x01(\x01\x12\x01\xfb\x00\xe2\x00\xc2\x00\xaa\x00\xa9\x00\xc6\x00\xee\x00\x18\x01@\x01s\x01\xb6\x01\xff\x01I\x02\x9a\x02\x00\x03l\x03\xb5\x03\xd9\x03\xf5\x03\x1b\x04;\x04/\x04\x05\x04\xd9\x03\xa2\x03]\x03\xf2\x02|\x02\x12\x02\xa2\x01$\x01\x96\x00\x07\x00\x82\xff\xfd\xfem\xfe\xe1\xfd\\\xfd\xd9\xfcZ\xfc\xdd\xfbw\xfb\x1f\xfb\xc8\xfa\x7f\xfaH\xfa.\xfa\x1f\xfa\x1d\xfa0\xfaK\xfak\xfa\x92\xfa\xbf\xfa\xfc\xfa8\xfbs\xfb\xb5\xfb\xfa\xfbD\xfc\x8c\xfc\xd7\xfc%\xfdp\xfd\xc1\xfd\x0f\xfeZ\xfe\xa5\xfe\xed\xfe>\xff\x90\xff\xe4\xff,\x00|\x00\xca\x00\x0e\x01N\x01\x89\x01\xc7\x01\x04\x02@\x02n\x02\x8f\x02\xb3\x02\xd7\x02\xee\x02\xf7\x02\xfd\x02\x01\x03\x02\x03\xf6\x02\xe6\x02\xd7\x02\xc7\x02\xaf\x02\x96\x02x\x02[\x02C\x02$\x02\x03\x02\xe2\x01\xc7\x01\xab\x01\x86\x01c\x01H\x016\x01\x1f\x01\x07\x01\xf7\x00\xf1\x00\xe8\x00\xd5\x00\xc0\x00\xb3\x00\xa9\x00\x91\x00q\x00W\x00<\x00\x1f\x00\xf8\xff\xd3\xff\xb3\xff\x93\xffm\xffJ\xff.\xff\x13\xff\xf6\xfe\xd8\xfe\xbd\xfe\x9c\xfe\x7f\xfec\xfeJ\xfe5\xfe\x1d\xfe\r\xfe\x01\xfe\xfc\xfd\xf2\xfd\xf2\xfd\xff\xfd\x17\xfe4\xfeO\xfew\xfe\xa1\xfe\xca\xfe\xf1\xfe\x16\xffE\xfft\xff\x9f\xff\xc2\xff\xe4\xff\x0b\x007\x00\\\x00z\x00\x93\x00\xaf\x00\xc8\x00\xd4\x00\xda\x00\xe3\x00\xeb\x00\xec\x00\xde\x00\xd9\x00\xd5\x00\xcf\x00\xc2\x00\xaf\x00\x99\x00\x88\x00~\x00m\x00]\x00L\x00>\x003\x00&\x00\x1a\x00\x0b\x00\x07\x00\x06\x00\x04\x00\x01\x00\xfc\xff\x01\x00\x00\x00\xff\xff\x02\x00\x06\x00\r\x00\x15\x00\x17\x00\x1b\x00)\x00,\x000\x00/\x001\x00>\x00K\x00K\x00>\x00B\x00G\x00E\x00>\x004\x00-\x00-\x00+\x00\x1b\x00\r\x00\x02\x00\xfa\xff\xee\xff\xdc\xff\xd1\xff\xc3\xff\xb6\xff\xa9\xff\xa9\xff\xb6\xff\xc6\xff\xd7\xff\xea\xff\r\x009\x00b\x00\x95\x00\xd7\x00&\x01m\x01\xaa\x01\xe8\x01(\x02^\x02\x86\x02\xaf\x02\xcf\x02\xdf\x02\xda\x02\xca\x02\xbc\x02\x9e\x02d\x02"\x02\xe0\x01\x98\x01@\x01\xd6\x00i\x00\xfe\xff\x9b\xff0\xff\xbb\xfeQ\xfe\xee\xfd\x9f\xfdM\xfd\xfa\xfc\xb6\xfc\x86\xfce\xfcH\xfc0\xfc+\xfc9\xfcK\xfc`\xfc|\xfc\xa6\xfc\xd2\xfc\xfd\xfc.\xfda\xfd\x9a\xfd\xcb\xfd\xfb\xfd-\xfe\\\xfe\x95\xfe\xc2\xfe\xeb\xfe\x14\xff9\xffh\xff\x8c\xff\xb5\xff\xd3\xff\xf4\xff\x1f\x00D\x00d\x00\x87\x00\xb2\x00\xdf\x00\n\x01+\x01I\x01m\x01\x8f\x01\x9e\x01\xa7\x01\xb5\x01\xc2\x01\xc4\x01\xb6\x01\xb1\x01\xae\x01\xa6\x01\x99\x01\x86\x01p\x01\\\x01P\x01;\x01 \x01\x12\x01\x0c\x01\x04\x01\xf7\x00\xe8\x00\xdb\x00\xd7\x00\xcc\x00\xba\x00\xa3\x00\x8c\x00\x85\x00p\x00E\x00\x16\x00\xef\xff\xc1\xff\x8b\xff[\xff5\xff\x12\xff\xe5\xfe\xb6\xfe\x86\xfe`\xfeB\xfe4\xfe\x1c\xfe\x0f\xfe\x0c\xfe\x0f\xfe\x1d\xfe$\xfe.\xfe>\xfeN\xfei\xfe\x83\xfe\xa7\xfe\xd2\xfe\xf1\xfe\x1c\xffP\xff\x93\xff\xc5\xff\xf6\xff(\x00_\x00\x90\x00\xb4\x00\xee\x00(\x01J\x01[\x01m\x01\x80\x01u\x01z\x01\x8b\x01\x97\x01\xaa\x01\xc6\x01\xb8\x01\x86\x01J\x01/\x01%\x01\x14\x01\x0f\x01\xf3\x00\xc9\x00\xae\x00\x8e\x00N\x00\x0e\x00\xe2\xff\xdf\xff\xf9\xff\x06\x00\xf2\xff\xd8\xff\xca\xff\xbc\xff\xc8\xff\xd4\xff\xda\xff\xd5\xff\xc3\xff\xa4\xffb\xff"\xff\x10\xff\x19\xff\x1d\xff&\xffA\xff\x81\xff\xde\xff\\\x00\xf7\x00\xb4\x01\x90\x02`\x03\x17\x04\xc4\x04i\x05\xfb\x05a\x06\x89\x06\x95\x06\x8e\x06d\x06\x0b\x06\x96\x05\x12\x05y\x04\xca\x03\x14\x03d\x02\xbb\x01\x1a\x01k\x00\xaa\xff\xeb\xfe6\xfe~\xfd\xbb\xfc\x01\xfcb\xfb\xda\xfa\\\xfa\xf2\xf9\xbb\xf9\xb6\xf9\xc7\xf9\xf1\xf9E\xfa\xc1\xfaC\xfb\xca\xfbV\xfc\xe1\xfc]\xfd\xca\xfd"\xfeX\xfep\xfe}\xfe\x8d\xfe\x86\xfen\xfeN\xfe3\xfe \xfe\x04\xfe\xeb\xfd\xdf\xfd\xd6\xfd\xce\xfd\xca\xfd\xc9\xfd\xc4\xfd\xd3\xfd\xd8\xfd\xd3\xfd\xe4\xfd\xfa\xfd\x1b\xfe@\xfey\xfe\xc1\xfe\x12\xffl\xff\xcd\xff1\x00\x90\x00\xf1\x00N\x01\xa6\x01\xef\x01#\x02D\x02l\x02\x84\x02\x84\x02{\x02}\x02\x85\x02\x86\x02\x85\x02\x9b\x02\xda\x02\x1b\x039\x03M\x03\x82\x03\xbc\x03\xd4\x03\xc8\x03\xaf\x03p\x03\x1b\x03\xc9\x02~\x02\x03\x02e\x01\xcf\x00@\x00\xb3\xff&\xff\xb4\xfeO\xfe\xf9\xfd\xaa\xfdA\xfd\xe9\xfc\xb3\xfc\x98\xfcV\xfc\x03\xfc\xe9\xfb\xe4\xfb\xb7\xfb\x85\xfb\x8c\xfb\xd3\xfb\xf7\xfb!\xfc\x8f\xfc\xff\xfc3\xfdI\xfd\x8b\xfd\xe0\xfd\xec\xfd\xe9\xfd;\xfez\xfe]\xfer\xfe\xc2\xfe\xe1\xfe\t\xffX\xff~\xff\x81\xff\x9f\xff\xba\xff\x9a\xff\xc4\xff"\x009\x00,\x00\x88\x00\x07\x01\x15\x01\r\x01b\x01\xea\x01*\x02A\x02\x8b\x02\xe4\x02\x13\x03a\x03\xab\x03\x9b\x03m\x03I\x03I\x03L\x03B\x03^\x03h\x03\xdd\x02j\x02\x8e\x02\xb7\x02\x87\x02P\x02\xa0\x02P\x03\xfd\x03\xcf\x04"\x06\xdb\x07z\t\x90\n&\x0b\xed\x0b\xec\x0c}\r#\r\x81\x0c\x0c\x0cx\x0bC\n\xef\x08\x0b\x08B\x07\xe5\x05\xfb\x03\r\x026\x00R\xfe\\\xfcD\xfa\xfb\xf7\xde\xf57\xf4\xf0\xf2\xca\xf1%\xf1#\xf14\xf1\x11\xf1*\xf1\xe4\xf1\xc9\xf2\x89\xf3K\xf48\xf5\x0f\xf6\x1b\xf7\xb2\xf86\xfal\xfb\xdf\xfc\xc4\xfe:\x00\xe3\x00\xb8\x01\x07\x03\xd0\x03}\x03\xff\x02\x0f\x03\x13\x03s\x02\xc9\x01\x89\x01Y\x01\xb4\x00\xd7\xffG\xff\xe2\xfe6\xfeR\xfdx\xfc\xbd\xfb\x06\xfb\x8b\xfas\xfa\x96\xfa\xe7\xfaQ\xfb\xee\xfb\xb4\xfc\x9a\xfd\xb6\xfe\xc6\xff\xa3\x00F\x01\xe9\x01\xa2\x02r\x03_\x04*\x05\xaf\x05"\x06\x9a\x06\x0b\x07B\x079\x07\x1d\x07\xc1\x060\x06z\x05\xc1\x04;\x04\xbd\x033\x03\x84\x028\x02\xb9\x02\\\x03R\x03\xde\x02\x97\x02\x80\x02\xce\x01\xfd\x00\x8b\x00\x07\x00\x04\xff\xf4\xfdx\xfdT\xfd\x04\xfd\xc6\xfc\x9c\xfc*\xfc\xaa\xfb\x81\xfb\xac\xfb\x9a\xfb_\xfbs\xfb\x9d\xfb\x83\xfbY\xfb\xb7\xfbt\xfc\xd3\xfc\xa0\xfcl\xfc\x85\xfc\xb2\xfc\x8e\xfcm\xfc\x87\xfc\xbb\xfc\xb1\xfc\x97\xfc\x0f\xfd\xd0\xfdM\xfe;\xfe\x0e\xfe\xdc\xfdz\xfd\x14\xfd\xf0\xfc\xf3\xfc\xd2\xfc\x86\xfcV\xfcF\xfch\xfc\xb4\xfc\n\xfd9\xfd\x11\xfd\x1b\xfd5\xfd\x19\xfd\xd0\xfc]\xfd\x1a\xff\xb5\x00-\x01\x82\x01\xb0\x02\xe9\x03\xd9\x03h\x03\n\x04\xf9\x04\xe0\x04u\x04]\x05\x1b\x07\xfd\x07\x8d\x07\x03\x07\t\x073\x07\xcc\x07\xbb\t8\rc\x11\xff\x13y\x14|\x14Z\x15\x00\x16\x81\x14\xdb\x12C\x13\xd4\x13\xbd\x11\x00\x0f>\x0f\xf1\x0f\x93\x0c\x96\x06\xa0\x02=\x00\xce\xfb\x96\xf6X\xf4\x9a\xf3#\xf1\xc1\xed\xbe\xeca\xed\xae\xec\xed\xea\x8b\xe9\xe9\xe8\x8a\xe8\xe0\xe8\x1a\xea.\xecd\xef\xf2\xf2\x94\xf5\xc7\xf7\xe4\xfa,\xfe}\xff\xb5\xff8\x01s\x03n\x04\xd0\x04\xb1\x06\xed\x08\x1b\t\xee\x07C\x07\xbf\x06\xbd\x04\xd0\x01\x90\xff\x15\xfej\xfc\x94\xfaY\xf9\xa0\xf8\xce\xf7\xa1\xf6\\\xf5O\xf4\x82\xf3\x1a\xf3Q\xf3\xeb\xf3,\xf5\x07\xf7F\xf9W\xfb\xfc\xfc\xe5\xfe\xca\x00k\x02\x99\x03\x08\x05\xfe\x06\xea\x08Q\n\xe2\x0bY\ro\x0eJ\x0eK\x0e\x93\x0e\x19\x0e\xfb\x0c.\x0c\x10\x0b\x8b\tq\t\x8d\x0c\xc9\x0e\x87\x0c\x8c\x08\x83\x06\x80\x05n\x02\x17\x00\xc7\xff9\xffQ\xfc\x1e\xfa\xbb\xfa\xc8\xfb^\xfa7\xf7d\xf4\xad\xf2\x16\xf2\xfe\xf2\xd6\xf4[\xf6\xbf\xf6\xab\xf6+\xf7\xd4\xf7\xb0\xf8:\xf9\\\xf9\\\xf9\xc9\xf9\x93\xfb\x00\xfe\xbd\xff\xad\x00N\x00h\xff\xad\xfe)\xffm\x00\x9e\x00/\x00\xdd\xff\x95\xff.\xff\xea\xfeo\xff\x1e\xff\x84\xfd\x0f\xfc\xab\xfb\xd2\xfbT\xfb2\xfbS\xfbr\xfb\x93\xfa1\xfa$\xfb\xb2\xfc6\xfd&\xfc%\xfc\x1d\xfd\xc3\xfe\xb1\xff\xee\x00]\x03\xe8\x04\xb6\x05\x86\x06\xd1\x07d\x08\x98\x08K\t/\n=\ng\x0b5\x11\xbb\x19\xdf\x1d\x92\x1a\x96\x15\x9d\x15\xe5\x17\x1c\x17\xd1\x15|\x19^\x1d\xf9\x19!\x13\xba\x12\xe0\x15\xde\x10\xc8\x05\x18\xff\xf8\xfe\x03\xfd\xb4\xf8\x15\xf9b\xfbn\xf7V\xee>\xe9\x02\xe9\xec\xe7\x8c\xe5\xed\xe45\xe6\x86\xe7m\xe9\x86\xec\xc8\xee1\xefK\xef\x06\xef\x1e\xf0\x86\xf3v\xf9\x13\xfe+\x00)\x022\x04O\x05\x1a\x05\xc6\x046\x05R\x05\x04\x05\xf1\x04\xfd\x05\xaf\x07\xc7\x06"\x03I\xff9\xfdu\xfbD\xf8\xb7\xf6\xea\xf6X\xf6X\xf4\x8a\xf3\xb5\xf4\xdd\xf4\x14\xf3\x80\xf1\x8e\xf1G\xf2\xaa\xf3\xbe\xf6\xe7\xf9T\xfc_\xfdC\xffn\x00\xde\x01\x8a\x03\xb4\x054\x07\x9f\x08\x9a\n\xce\x0cZ\x0eX\x0fl\x0fN\x0e%\x0eX\x0e\xfc\x0eN\x0e\x1d\x0eG\r\n\r\xfc\r!\x0f\x89\r1\t\xf9\x05K\x03\x0e\x01Y\xffe\xff\xaa\xfe\xeb\xfb$\xf9?\xf7(\xf6N\xf4z\xf2\xe2\xf0\t\xf0}\xf1\xab\xf2\xa9\xf3e\xf4\xd7\xf4\xc6\xf4\xa9\xf3\xe3\xf4\xa5\xf6\x8a\xf8}\xf9i\xfa\x16\xfc\x0e\xfdW\xfe\xde\xfe\xaf\xff@\x00L\x00\xcc\x00\x06\x02\xf0\x03~\x05F\x05\xcb\x03!\x03\x19\x02E\x01\xed\x01b\x03\xb0\x01\xb7\xff\xa4\xff\x92\xfeS\xfdA\xfe\x15\xfeB\xfb\x1b\xfb\xbb\xfc\x83\xfd\xdd\xfc\x06\x00O\x02\x95\xff\x02\xff\xc8\x03F\x05M\x04?\x05\xe6\x07\xc3\x07\xde\x06l\x0bI\x0c[\n\x0e\n+\x0c\r\x0b\x1f\n\x18\x0bi\n\r\n\xb4\t\xf1\n\x8b\t\t\nJ\x0bE\x0cn\x0b\xa4\t\x1b\n\xe1\to\t\x0c\tw\x08\xcc\x08\x1f\x08X\x06i\x05^\x04l\x02\xec\xffI\xfe\x9f\xfdN\xfcg\xfb\xdd\xfa\xa3\xf9\x84\xf8w\xf7\x85\xf6\x15\xf5\xd3\xf4\x8f\xf4N\xf4\xcc\xf4V\xf5M\xf6f\xf5\xce\xf5\xa3\xf6R\xf7\xb6\xf7^\xf8\xdc\xf9\xf1\xfa\xd1\xfbY\xfc~\xfd\x15\xfe4\xfe@\xfea\xfe\x83\xfe\x91\xfe\xd8\xfe\x03\xff!\xffP\xfe\xa4\xfd\xee\xfcg\xfc4\xfc;\xfb\xbc\xfa\xad\xfa#\xfa\x87\xfa\xf6\xf9\x14\xfa\xfc\xf9\xbe\xf9\x1c\xfaj\xfa\x8c\xfb\x18\xfc\xb1\xfc\xae\xfd\x13\xfeR\x00\x9d\x01{\x03@\x06\x98\x06\xff\x07Z\x075\t2\tw\n\xf0\n\x04\n\x87\n\xa7\x078\x08\x13\x06\xf9\x04\xfd\x02\x06\x01\xb4\xff\x8a\xfe\xbe\xfe\x05\xfd\xb7\xfd\xb6\xfbI\xfa\x87\xf9\xb0\xf9\xe2\xf9\xbb\xf9P\xfa\xb7\xfa\xe4\xf9\xf7\xfa\xfe\xfaP\xfc\xdc\xfc\xd0\xfb\x01\xfe\xfc\xfc\xe0\xfc\n\x00\xde\x00\x1b\xfej\xff\xd6\x01\x92\xff\xeb\x00?\x02:\x02\x12\xff\xf0\x00\xcc\x03\x8d\xff\xd2\xff\x06\x04\x80\x03\x0c\xfd\xcd\xff~\x01D\xffk\x01\xef\x03\x1f\x00\x0b\xffe\x02\xfe\x01\x85\xffc\x02\xbc\x03\xdc\x00\xd7\x01\x84\x02\x97\x02\x92\x01\x0b\x04\xb4\x00\xd4\x00\xeb\x013\x01\xa1\x01f\x02\xf7\x01\x9b\xfe\x89\x00\xee\x00\xab\x00\xc5\x01\xd9\x01L\x00?\x02G\x00\x07\x02\xbd\x02:\x01\xff\x011\x01\x92\x03G\x01\x0c\x031\x04G\x01f\x03\xe5\x018\x02a\x02f\x00\xe4\x01,\x00\xe0\x00]\x00\xa2\xff\x04\xfe5\xffC\xff\x1a\xfd\xfa\xfd\xfd\xfcc\xfe0\xfd\xa5\xfd\r\xfe,\xfd\xa1\xfd\x86\xfe\x92\xfe\xd0\xfen\x00.\x00j\xffr\x00\xf8\x01j\x00[\x02\x85\x02\x9d\x01\xbd\x02~\x03\xea\x01\xe5\x01\x99\x03 \x01$\x01S\x02\x96\x01\x9a\x00&\x00\xeb\x00D\x00\xed\xfe\x99\xff\x08\xfe\xf4\xfe"\xfe\xf8\xfd\xe5\xfdn\xfdh\xfd\xe2\xfc\xdf\xfe\x8f\xfbY\xfd\xc4\xfd\x85\xfb\xd7\xfc\x82\xfd\x96\xfc\xf1\xfcE\xfe\x1f\xfd(\xfd\x05\xfe\xce\xfd\xf1\xfdM\xfe\xd9\xfex\xff\x0e\xffZ\xff\x1e\xff\x12\x010\xff\x8a\xff\xda\x00 \x001\x00E\x00\xcd\x00\xcf\xff\x07\x01\xf8\x00\xf5\x00\xa4\x00\xc7\xff\x9c\x00\xe4\xff\xfc\xfe\xbc\x00A\x00k\xff=\xff\xa8\x00D\xffP\xff\xe5\xff:\xfe6\x00\xca\xff\xd9\xff\xa0\x00\xe7\x00X\x00%\x00\xad\x00\x10\x01{\x00\xba\x01\x9e\x01\xea\x00\xb6\x01\xaa\x01\xb3\x01\xa0\x00\x03\x01\t\x01\xc1\x00V\x00\xb0\x02\xb2\xff\xb5\xff\xfd\x01\xe0\xfe\xab\xff@\x00\xfa\xff\xd0\xfe\xd4\xff!\x00\xeb\xfe\xcb\xff\x9c\xff?\xff\xa8\xfe3\x00i\xffJ\xff\xf1\xff\x8d\xff\x88\xff{\xff\xa0\x00>\xff-\x00\x0b\x00\x82\xff=\x00\xdf\x00X\x00\xbd\xffp\x00\x1b\x01[\x00\x9b\x01H\x01\xd3\x00\x86\x01\x82\x00\xca\x01q\x01v\x00\x10\x01\xca\x00\xa8\x00R\x01\xba\x00i\x01m\x00\xea\xff\x9e\x00f\x00\xef\x00L\xff\x9c\xff5\xff\xc2\xfe\x1b\x01\x19\xfe\x98\xff\xdb\xff\xd5\xfdR\xff\xd9\xfe6\xff&\xfe\x9f\x00\xd8\xfe\xdb\xfe \x00I\xff\xc6\xffo\xff\x83\x00\x07\x00@\x00\xc9\xff\xb3\x00\x97\x00\x87\x00\x89\x01\x9a\x00\xde\x00\x8c\xff/\x02\x92\x00L\x00\x17\x01H\x00\x98\x00x\xffi\x01\x10\x00\x91\xff\xdc\xff\xcc\xff\x9f\xff\x96\xff\x84\xff>\xff\x90\xfe\xd0\xff#\xff0\xff\x80\xff\x0c\xff;\xff\xc2\xfe\xd7\xffp\xfe\xc6\xff\xc2\xfe\xcb\xffF\xffn\xff\xf3\xff\xa7\xff|\xff]\xff\xc3\xffy\xff\x1a\x00\x99\xffq\x00\x7f\xff_\x00_\x00\xa7\xff\xff\xffu\x00\xce\xff\xdb\xff\xdc\xff\xbd\xff\x9c\xff~\xff\xb1\xff\x18\xff\xc4\xffX\xfe\x1f\xff%\xff\x17\xffM\xff^\xff\xda\xfeY\xff^\xff\x80\xff%\xff\x9e\xff\xe7\xff\x00\xffm\x009\x00+\x00X\x00\xda\x00\xb8\x00\xf1\x00\x9c\x00]\x01\xe1\x00\xa3\x01\x19\x01\xd8\x01P\x01\xfa\x00\\\x02d\x00\xb7\x01\x94\x00\x0c\x01g\x00 \x00\x13\x00Q\x00\xc6\xff}\xff\xca\xff\x0f\xff7\xff\x1b\xff#\xff\xd9\xfe\x04\xff\xdc\xfe?\xff\xa9\xfel\xff*\xff\x81\xff\xb3\xfex\x00\xc7\xfeS\xff#\x01X\xffe\x00)\x00-\x01\x05\x00W\x01D\x00<\x01\x9d\x00\xec\x002\x01\xbc\x00\x96\x01\xe2\x00n\x01\xb1\x00\xe4\x00\x8d\x00\x08\x01/\x00\xc6\x00\x89\x00_\x005\x00\x12\x00\x0f\x00\x8a\xff\xda\xffy\xff\x89\xff[\xffY\xff\xb6\xffq\xff\x16\xff\xa0\xff\x08\xffI\xffo\xff\xa0\xff\x85\xff\xad\xff\xc2\xff\x00\x00\x08\x00\x1d\x00\x92\x00`\x00\xb1\x00\xb5\x00?\x016\x01\xfb\x001\x01\x06\x01\x1e\x01;\x018\x01c\x01\xbb\x00\xd7\x00\xd7\x00s\x00\xed\xff3\x00\'\x00)\xff\xe2\xff1\xff\x17\xff\x0b\xff\xb4\xfe\xe9\xfe2\xfe\xc8\xfe\xb1\xfe$\xfe\xd3\xfe\x9f\xfe\xa9\xfe\xe9\xfe\xec\xfe\r\xffy\xffO\xffs\xff\xdb\xffM\x00N\x00j\x00\xa9\x00\xa1\x00B\x01\xa2\x00\xbd\x00_\x01\xeb\x00\x15\x01P\x01\xcf\x00\x9c\x00\xdd\x00\xdb\xffh\x00\xdc\xffr\xff\x06\x00\\\xff\x1b\xff\x93\xff\xc8\xfe`\xfe\x9e\xfeT\xfe\xbf\xfe\xf5\xfd\xd9\xfe\x84\xfe\n\xfe\x1d\xff\xc1\xfe\xae\xfe\xda\xfeQ\xff\xf2\xfej\xff\xb9\xff\x1a\x00e\x00\x0f\x00\x9a\x00\x10\x01\x85\x00\xeb\x00\'\x01\xe1\x00z\x010\x01\xa6\x01\xb9\x00\x8e\x01_\x01\xda\x00\xfc\x00\xc7\x00\xb2\x00O\x00\xcd\x00\'\x00<\x00P\x00d\xff\xd2\xff\xb8\xff&\xff`\xff\x1a\xff\xf8\xfe\x15\xff\x08\xff\xaa\xfe\xdf\xfe\xea\xfef\xffy\xfe%\xff\x92\xfea\xffo\xff\x92\xfe\x00\x00\x14\xffx\x00\xda\xffZ\x00[\x00\xac\xff\xf3\xff\x02\x00\x00\x01*\x00`\x00\xa5\x00\x9c\x00\x06\x00\x83\x00\xd8\xff\xbc\xff\x17\x00\n\x00R\x00W\xff\x99\x00\x88\xff\x88\xff\xa2\xff{\xff\xdb\xff\xdb\xffk\x00\x86\xff\x1b\x00\xd6\xff$\x00\xea\xff\xfc\xff\xfe\xff\xa1\x00\xa3\x00&\x00K\x01|\x00\x94\x00\x94\x00\xb4\x00\xd1\x00\xce\x00\x05\x01L\x01\xfa\x00\xa1\x00\xee\x00\xdf\x00h\x00\x8f\x00\xa6\x00\xc7\x00\xd3\x00\x80\xff\x92\x00\x16\x00\xd1\xff\xb7\xffA\x00\x0c\xff\x0c\xff\xb3\xff\x0e\xff\x81\xff\xc9\xfeN\xff\xc6\xfex\xff\xa4\xfez\xff\xf3\xfe\xf5\xfe\x8f\xff\xa2\xfe\xbc\xff+\xff\xdf\xff\xce\xff\xdc\xff\x16\x00\xb6\xff\xd2\xff\r\x014\x00\xe5\x00\xea\x00\xac\x00\xa5\x01\xb2\x00\x8a\x01m\x01\x04\x01\xa1\x01\xce\x01\xcf\x00\x83\x02\x10\x02e\x000\x01#\x01\xd5\x00\xa3\x00\x87\x00\x03\x00\x13\x00l\xff\'\xffX\xff\x9a\xfe\xa6\xfeg\xfe\x0f\xfek\xfe9\xfe{\xfeV\xfe\x8f\xfeT\xfe\xa2\xfe\x13\xff\xf1\xfe\x03\xff\x01\xff\xa1\xff\xed\xff\x02\x00}\x00Z\x00@\x00\xaa\x00\xcc\x00D\x01V\x01p\x01\xd7\x00\xb4\x01\x13\x01H\x01\xa4\x01\xa1\x00\xce\x00$\x01\x8a\x00\xa9\x00\n\x01\xae\xff\x10\xff8\x01\x18\xffF\xfe\xe9\x00\x91\xfe>\xff\xcf\xfeG\xff\xa8\xff\xe0\xfe\xa2\xfeu\xffo\xfeG\xff\x0b\x00\x1d\xff\xec\xffi\xff\xd5\xff\xaa\xff\xd6\xff*\x01\xde\xff\xcb\xff\'\x01\x0e\x00\xad\x00\xed\x00\x10\x01\x00\x00\xbf\x00\xf5\x00\xf9\xff@\x01\xc1\xff\xb8\x00\x00\x00\x7f\xffM\x00\x83\xff`\xff\xe4\xff=\xff\xb2\xfe\x1b\x00W\xfe\xc3\xfe~\xff\xcc\xfen\xfe\\\xff\x1b\xff\xa2\xfe\x03\xff\x15\xffJ\xff\xe3\xfe\xa7\x00\xa2\xfe\x00\x00\xcf\xff@\xff8\x00\x12\x00b\x00(\x00\x19\x01\xb5\xff~\x00\xde\x00\xf8\x00d\x00:\x01N\x00+\x01S\x01Q\x01\x12\x01\x9f\x00k\x01\x06\x00\xbd\x00#\x01<\x001\x00\x95\x00\xe7\xff\n\x01\x04\x00\x19\xff\xbc\xff\xb5\xff\x81\xff\xa4\xff5\x00P\xff&\x00\r\x00e\xfe\x98\x00M\xfe\xc8\xffl\x000\xffe\x005\xffL\x00u\xff6\x00O\xff\xc4\xff\x13\x00\x11\x00\xd8\xff\x8d\xff$\x00\xda\xff:\x00]\xfeK\x00\x1b\x00\xc3\xff\xcd\xff\xb4\xff\x17\xff+\xff_\x01\xaa\xffe\xff\xb0\xff\x1e\x00V\xff\xa4\x00\xc4\x00\x12\xffi\x00}\xffc\x00\xa9\xff(\xff\xe4\xff\x00\x00[\x00{\xffT\x00\x95\xfet\xff\xd6\xff<\x00\xb4\x00s\xff\x13\x01#\xfe\xce\x01R\xffZ\x00\xcd\x01E\xff\x05\x01\x83\xff\xc1\x01\xce\xff\xb8\x01\xc9\x008\xff\xed\x00\xc4\x00\xdb\xfe\xe1\x008\xff\x12\x01\xd8\x00t\xff\xa2\x01\xcc\xfch\x01y\x00S\xfe\r\x00\xa4\x00\xfa\xff\xe6\xff\x11\x01=\xfd\\\xfe\x8f\x00s\xff\x85\x00\xeb\x00\xcc\xff-\xff\x96\xff@\x00\xfc\xfe\xb5\x019\xfe\xa8\x02\xa8\x00\xc6\xff\xa0\x00\xb0\xfa\xd6\x01i\x01g\x01\xd6\x00M\xff\x18\xff>\xfe\xb2\x00\xfd\xfe2\x01\x13\x00\x0e\xfe\xbc\x00\x15\xfd\xcf\x034\xfe \xfa\x92\x04X\xfd\xaa\xfe\x89\x00=\x01\x8b\xfd\xb7\x00\x86\x01\xef\xfd\xce\x01*\xff\xb1\xff3\x02\x96\x00\x95\x01\x0e\x01\x90\xffL\x02\xba\x009\xffM\x01\xc3\x01d\x03%\xff\x92\xfe\x92\x04\x18\xfd#\x05i\x00\x8f\xfb\xb3\x03\xf9\xfbi\x05L\xfd\x01\x02\x95\x00*\xf9\xb4\x02u\x01(\xff\xd8\xfe\x9d\xfd\xfc\xffq\x02D\xfc\x0c\x00E\x01\x1a\x03e\xfbA\xffA\x02g\xfd\x92\x03\x8f\x00\x81\xfd\xae\x00\xdf\xfe=\xfec\x02\x81\x00\x95\xffr\xfc\xc7\xfe\xa4\x02t\xff\xde\xfd\x90\xfe\x0e\x01\x1c\xfdv\xfe\x9a\x04,\xfe\xdc\x00\x9e\xfb\xeb\x01\x83\xfe\x16\xff\\\x04\x88\xfb\xdc\x03\x1c\x00\x08\x00X\xfdf\x01\x1e\x00P\xfe\xef\xfeE\x05\xfd\xff@\xfe\t\x01\x96\xfc\xe9\x00\x1b\xfe~\x01g\xff-\xfe\xc1\x01\x08\x02\xde\xfc\x17\x03\xbf\xfe\x07\x00\x03\x01\xae\xfd\x1c\x02t\xfe]\x02\x19\x02\xd6\xff\xaa\xff\xb2\xfe\xf6\xff$\x01\x80\xff\x9b\xffS\xfc\x1c\xff\xad\x00\xa5\xfdL\x02\xb6\xffb\xfdZ\x02\x94\xfe\xc5\xfd\x7f\xfd\xaa\xf7\x9c\x00\xac\x10\x02\x03\x97\x00#\xfe\xf9\xfb\xa6\xffy\x02\x1f\x03N\xff\xd0\x06Y\xfd\x8e\x01\xda\x0bn\x047\xf5&\xf7\xff\xfa1\xfd\x8b\x03}\x00\xda\xfeR\xffC\xfc\t\xf8i\xfc\x7f\x02\xe5\xfb$\xfe\xb6\xffL\x03u\x03\xeb\xfe\xa4\x05\x9e\xfdP\x00\xae\x03 \x02\xd0\x03\xbb\x07\x14\x02\xa9\xff\xc4\x07a\x01\x84\xfb?\x02\x10\x06\x11\x00\xa8\xfd\xaa\x002\xfcM\xfc\xf3\x02\x08\xfds\xfb\xc8\xfa\xed\xfc\xb8\xfb\xf2\x03\x04\x00b\xf9\xf3\xfe\xf7\xf9\x8c\x00`\x00n\x00\x85\x01\xfb\x02\xf2\xfc0\xfeU\xfe\xca\xff\xc1\x04z\xfe\x1f\x01\x9a\xfd\xa5\xfdZ\xffB\x01\r\x01X\xfc\xa0\xff\x10\x00)\xfdU\x01\x8d\x00\xae\xff\xb3\x00\xec\xfe\xf0\xfe\xc7\x00$\x04"\x01\x16\x01\xfb\xffA\x01?\x03s\x02A\x01C\x01\xcc\x01\x89\x01]\xff\x15\x01\xe1\x03%\x01\x1e\xff|\xff\x88\x00\x14\xff\x1e\x01\x9c\x01\xea\xff\xbc\x00\xc2\x00\xdb\xfe\x90\xff\xba\x01\x1a\x01\xe9\x00\x8e\xff\x9b\x00\xea\x01~\x01\xb9\x00\x82\xff\xe1\xffk\x00\xd1\x00\x93\x01\xcc\x01\x9e\x01Z\xff\\\x00v\x00i\x01\x9e\x00\xdb\xff \x01:\x00\xbd\x01w\x01\r\x00\xd4\xfe=\xfe\xeb\xfe \xfe\x14\xfe\x96\xff\xd4\xffS\x00I\xff\xac\xfb\x9b\xfb\x90\xfc\x1d\xfe<\xfd\xd0\xfe\xb9\x00\x11\x00,\xff\x03\xfdu\xfcr\xfb=\xfd\xa5\xfdg\x001\x03m\xfd\xe4\xfb\xf2\xff[\xff\x03\xf9]\xf9\xb4\xfa\x1d\xfd6\xfe[\xfc\x0e\xfe\xcb\xfc\xa7\xf9\xcc\xf6}\xf9\xff\xfc\xa6\xf9\xa0\xf9\xf6\xfbu\xfe\xfd\x00\x89\xfe*\xfbY\xf8\xb0\xfa\xdd\xfd\xb5\xff\xf2\xffl\x00.\xffp\xfc\xbc\xf9\xa0\xf9\x8b\x00\xfa\x07\xfa\x15<\x17\x96\x10\xd9\x0c\x9f\x0f \x18\xc4\x17\xa7\x17\xd7\x1c\xcf!\xef \x92\x1e\xc4\x1f\x19\x1e\x92\x13\\\ti\x07$\x0c\x03\x0c\xdd\x07\x86\x03\xb1\xfd\xc0\xf7\xa3\xf0\x9c\xec\xb4\xeb\xe4\xe8~\xe6 \xe5\n\xe7)\xea\x82\xe9\xac\xe61\xe5\xc9\xe6\xc2\xe8Q\xec:\xf3\xae\xfa~\xfd\xf3\xfb\x8d\xfc*\x00\xf4\x02\xf8\x03]\x04\x05\x08v\n+\x0bd\x0b\xdc\x0b\x0e\x0bj\x04e\xff=\xfd\xac\xfd\xe5\xfe\x83\xfdx\xfbI\xf9\x86\xf3\xf4\xee7\xee\xee\xeeM\xf1\xa4\xf0{\xf0\x86\xf2\xc9\xf4`\xf6q\xf7u\xfa\xc1\xfcK\xff\x04\x023\x06\xd9\x0b\x14\x0e \x0f \x0f\x06\x10\xe3\x11\xfe\x12\x19\x13\xcd\x12\xb9\x12\x92\x10\xa8\x0eG\r\x83\x0b\x90\x08j\x04\x14\x01\x00\x00\xe2\xfe\xa6\xfcg\xfa\xf5\xf7\x92\xf4\x99\xf1\xc2\xf0w\xf08\xf1=\xf2m\xf1\xee\xef\xa2\xef\x82\xef\x81\xf0]\xf2\xfe\xf4\xdb\xf6\t\xf7\x8a\xf8\xab\xf9\xc8\xf9\xae\xfa\x04\xfb\xaf\xfb \xfd\xb1\xfd\xb2\xfe\x88\xfeb\xfe4\xfc9\xf9D\xf8d\xf7\xcc\xf6\x17\xf7\xed\xf8\xf1\xf7\x1b\xf6i\xf5\xc0\xf5\xda\xf4\xd4\xf1\xe7\xf7\xa0\x0c+$"-\x15%\x15\x1b\xe1\x1a\xc4\x1fM%F1DDWN\xa5E\xce5%-_*\xaa \xa4\x14&\x14o\x1a\'\x1bh\x10\xf4\x05O\xfd\x94\xeeI\xdc\x18\xd1\x04\xd4`\xdcy\xe1\x05\xe0i\xdc\x15\xd9b\xd2P\xccn\xcd\x9e\xd7Z\xe4\x04\xed\xa1\xf5\x8f\xfe>\x03\n\xffO\xf9_\xfc\xf8\x05\xf7\x0e\x82\x14#\x19\xac\x1cT\x1a\x8b\x11\x14\n\x0c\x07\xe8\x05\xd8\x02\xc4\xff\x18\x00\xc6\x00\xdc\xfb{\xf2\x19\xea{\xe4\x8f\xdf)\xddJ\xe0\x16\xe8@\xed\xa7\xeb\x99\xe8E\xe6x\xe6\xd9\xe7\xd0\xed8\xf83\x02\xb8\x07$\t9\n\xeb\n\x9c\x0b7\r\x02\x11>\x18\x91\x1d\xfc\x1f\xe5!\xd0 \xdd\x1a\xd3\x11\xed\x0e\x9e\x16\xfd\x1e\xf7\x1e\xad\x19\x9d\x12>\nz\x00\x8c\xfc\x85\x00+\x05d\x03\xfa\xfc\xc4\xf7\x9d\xf2\\\xec\x08\xe9W\xea\xaf\xec\xa7\xec\x16\xebF\xec\xdd\xedx\xed\xea\xeaW\xe8\xbb\xe7\x0c\xea^\xef\x13\xf5\xf0\xf8\xa1\xf8s\xf5-\xf3V\xf2f\xf5\xb5\xfb0\x00\xdd\x02\xd0\x02\x12\x01\xcd\xfdg\xfb\x96\xfb\x93\xfer\x01\r\x02\xea\x02\x9b\x01)\x00\xd3\xfeq\xfe@\xfe\xe0\xfc\x98\xfc\xf0\xfc\x05\x00)\x05\x9d\x0c\xb0\x11\xa6\x10A\x0e}\x0c\xcd\r|\x12\xa2\x1al&\x7f-;,P&4!\x94\x1e~\x1dA \xb8%\xe1(\xe8$L\x1c\xb7\x14\xab\r9\x06S\x00\x9f\xfe\xa3\xff\x98\xfd\x82\xf8\xaf\xf3$\xee|\xe6p\xde"\xdc\x13\xe0\xa7\xe4N\xe6o\xe6:\xe6\x00\xe3\xbb\xde&\xde{\xe4\xff\xec\x97\xf2\xb8\xf5K\xf7\xc9\xf6\xd6\xf4\x80\xf4*\xf8\x9d\xfd\xca\x014\x04I\x05$\x04Z\x01\xca\xfeu\xfe\xce\xff\x82\x01\xfe\x02W\x03\xf0\x01]\xff\xd0\xfc7\xfb\xdf\xf9D\xfab\xfb\xc8\xfc\x0c\xfd\x85\xfc\xef\xfb)\xfb\xcc\xfaB\xfb\x94\xfdM\x00w\x02\n\x03\x02\x03g\x03X\x04\x9a\x06r\tK\x0c,\r\'\x0c,\nP\t.\x0b\x84\r\xc1\x0fx\x10<\x0e\xa3\n\xea\x05\x16\x04\x87\x04\xd0\x04{\x05\xd8\x04\xe6\x02\xb3\xfdf\xf9\xd3\xf7\x8f\xf7c\xf7\x15\xf82\xfau\xf9\xc3\xf5E\xf3\x9f\xf3\xa1\xf4\xcb\xf4\x03\xf60\xf8\x90\xf8\x19\xf7-\xf6\x88\xf6.\xf7\x85\xf7\x04\xf9\x02\xfb~\xfb\xb4\xfa\x02\xfat\xfa\xff\xfa\xb0\xfb\xee\xfc\x83\xfe\x08\xff\xd5\xfe\xba\xfe#\xffQ\x007\x01+\x02\xe3\x02]\x03t\x04U\x05D\x06\x92\x07\x86\x08\xcd\x08\xd6\x07\xbf\x07c\t\xb9\np\x0bU\x0b\xdc\nn\n&\tK\x086\x08\xba\x08\xa0\x08\x05\x08\x04\x07\xce\x05\x1f\x05?\x04X\x04\xdb\x04i\x05\x84\x05\xaa\x04(\x04\x13\x04P\x04\xad\x04\xb7\x05\xb1\x06\xb5\x06\xf2\x05\xe0\x04\xc8\x04\xdd\x04\xa6\x04\xd6\x04\xab\x04\xd0\x03]\x02\xc5\x00\xf4\xff\x08\xff\xd6\xfd\x96\xfc\x85\xfbe\xfa\xb8\xf8\xfb\xf6\xb7\xf5;\xf5\xe0\xf4w\xf4!\xf4\xf8\xf3\x14\xf4K\xf4\x9a\xf4i\xf5\x99\xf6\xb5\xf7\xc3\xf8\xe9\xf9m\xfb\xc7\xfc\xb9\xfd\xbd\xfe\x14\x00X\x01!\x02\x98\x02\x17\x03\x9a\x03\xe6\x03\xf8\x03$\x04D\x04\xe8\x03U\x03\xed\x02m\x02\xcf\x01L\x01\x16\x01\x8c\x00\xbc\xff1\xff\xdd\xfe?\xfe\x98\xfd~\xfd\xa1\xfd{\xfdG\xfdO\xfdS\xfd8\xfdC\xfd\xaf\xfd!\xfem\xfe\x94\xfe\xa1\xfe\xb2\xfe\xce\xfe\x04\xff.\xffB\xff7\xff\x06\xff\xd1\xfe\xc4\xfe\xb5\xfe\xa1\xfee\xfe\x19\xfe\xbc\xfdw\xfd\x91\xfd\xfd\xfd\x1f\xfe\xee\xfd\xe2\xfd\xf4\xfd\xee\xfd\x18\xfe\x9e\xfe+\xffK\xff%\xffS\xff\xa6\xff\xe8\xff+\x00\x87\x00\xca\x00\xda\x00\xfd\x00O\x01\xb6\x01\xee\x01\xe7\x01\xf3\x01!\x02K\x02[\x02i\x02}\x02q\x02I\x02\x0b\x02\x04\x02\xfd\x01\xe2\x01\xca\x01\x98\x01|\x01l\x01T\x01S\x01n\x01\xa4\x01\xa0\x01\x98\x01\x96\x01\xc5\x01\xf9\x01*\x02e\x02\x83\x02\x80\x02G\x021\x020\x02(\x02\x05\x02\xbc\x01~\x01@\x01\xed\x00\x91\x00B\x00\xe3\xffv\xff\xe5\xfem\xfe1\xfe\x13\xfe\xe3\xfd\x8c\xfd&\xfd\xc8\xfc\xa9\xfc\xbe\xfc\xfc\xfc=\xfdr\xfd\x90\xfd\x97\xfd\xaa\xfd\xf7\xfd\x82\xfe*\xff\xc3\xff:\x00\x99\x00\xe6\x004\x01\x98\x01\x18\x02\x8e\x02\xe7\x02.\x03n\x03\x87\x03z\x03B\x03\x10\x03\xd7\x02\xb7\x02\x8c\x02T\x02%\x02\xdb\x01Y\x01\xa4\x00\xfe\xff\xa3\xff\x80\xffn\xffF\xff\xe3\xfe_\xfe\xe1\xfd\x93\xfdw\xfd\x83\xfd\x8b\xfd{\xfd\x82\xfd\x8d\xfd\x9d\xfd\xaa\xfd\xb8\xfd\xd1\xfd\xfb\xfd.\xfe\x94\xfe\x17\xffa\xffh\xffA\xff$\xffX\xff\xbc\xff\x1c\x006\x00\t\x00\xc0\xff\x9c\xffv\xff`\xffY\xff4\xff\xfb\xfe\xcb\xfe\x98\xfeg\xfeQ\xfeF\xfe\x03\xfe\xaf\xfd\xc4\xfd\x15\xfe[\xfe\x83\xfe\x8c\xfe\xab\xfe\xae\xfe\xb7\xfe\x11\xff\xaf\xffG\x00\xad\x00\xd3\x00\xf3\x00C\x01\x89\x01\xce\x01A\x02\x90\x02\xa9\x02\xa4\x02\xc6\x02\xf6\x02\n\x03\x00\x03\xd8\x02\xc2\x02\xc3\x02\xc1\x02\xc7\x02\xbf\x02\x88\x02K\x02(\x02\x01\x02\x05\x02\xed\x01\xa9\x01h\x018\x01\x1c\x01\xf4\x00\x90\x00>\x00\xf5\xff\x93\xff*\xff\xf4\xfe\xeb\xfe\xc7\xfe\x89\xfe&\xfe\xed\xfd\xc0\xfd\x8c\xfd\x83\xfd\x9b\xfd\xcc\xfd\xdb\xfd\xbb\xfd\xd1\xfd\x0f\xfe+\xfe7\xfeJ\xfe\x92\xfe\xf3\xfeL\xff\x90\xff\xe5\xff\x15\x00/\x00U\x00\xa2\x00\x13\x01w\x01\xc2\x01\xe8\x01\xf8\x01\x03\x02\x16\x02:\x02V\x02g\x02d\x02L\x02\'\x02\x00\x02\xd5\x01\xaf\x01w\x012\x01\x01\x01\xe9\x00\xcc\x00\xa9\x00}\x00L\x00\x12\x00\xd8\xff\xae\xff\xaa\xff\xa8\xff\x9e\xff\x7f\xffF\xff\x19\xff\x0f\xff\t\xff\xf2\xfe\xd3\xfe\xb5\xfe\x89\xfeV\xfe2\xfe:\xfe6\xfe\x11\xfe\xec\xfd\xe4\xfd\xdf\xfd\xc7\xfd\xbf\xfd\xda\xfd\xf3\xfd\xfb\xfd\x08\xfe;\xfeu\xfev\xfet\xfe\x92\xfe\xc4\xfe\xe8\xfe\x04\xff,\xffL\xffq\xffw\xffu\xff\x8b\xff\xaf\xff\xd4\xff\xfd\xff\x1e\x00O\x00w\x00\x92\x00\xaa\x00\xdc\x00\xf6\x00\xf0\x00\x00\x01F\x01\x92\x01\xba\x01\xd8\x01\xfc\x01\x0e\x02\x04\x02\x0e\x02F\x02o\x02j\x02Q\x02Q\x02\\\x02Q\x02=\x02"\x02\xef\x01\xa8\x01k\x01K\x01/\x01\x04\x01\xc7\x00\x81\x00/\x00\xe7\xff\xac\xff|\xffF\xff\x10\xff\xd3\xfe\x97\xfeX\xfe!\xfe\xed\xfd\xbd\xfd\x89\xfdT\xfd?\xfd/\xfd\x1f\xfd\x19\xfd\x03\xfd\xef\xfc\xe0\xfc\xe2\xfc\xf6\xfc!\xfd?\xfdb\xfd\x86\xfd\x99\xfd\xbb\xfd\xf1\xfd.\xfet\xfe\xb0\xfe\xfa\xfeA\xff\x85\xff\xc5\xff\x15\x00h\x00\xbd\x00\x1b\x01\x87\x01\xfb\x01^\x02\xb5\x02\x01\x03K\x03\x99\x03\xe5\x031\x04r\x04\x97\x04\xa1\x04\x97\x04\x84\x04{\x04`\x046\x04\xfb\x03\xa9\x03L\x03\xe7\x02v\x02\x05\x02\x93\x01\x1d\x01\xa0\x00!\x00\xa6\xff)\xff\xb0\xfe6\xfe\xc0\xfdY\xfd\x05\xfd\xca\xfc\x8f\xfcN\xfc\x13\xfc\xf1\xfb\xe0\xfb\xe4\xfb\xfd\xfb"\xfcA\xfcV\xfcw\xfc\xbe\xfc\x16\xfdk\xfd\xbb\xfd\x03\xfeJ\xfe\x90\xfe\xde\xfe;\xff\xa0\xff\x01\x00Q\x00\x94\x00\xcf\x00\r\x01A\x01c\x01\x87\x01\xb6\x01\xda\x01\xf7\x01\x0c\x02\x13\x02\x06\x02\xe6\x01\xc7\x01\xb8\x01\xbb\x01\xc7\x01\xc8\x01\xcb\x01\xb4\x01\x94\x01~\x01\x82\x01\x8f\x01\x90\x01\x8f\x01\x8b\x01\x86\x01\x85\x01\x91\x01\x9c\x01\x9f\x01\x92\x01z\x01q\x01g\x01_\x01Y\x01S\x015\x01\xfc\x00\xd4\x00\xab\x00|\x00H\x00\x14\x00\xe5\xff\xab\xff\x85\xffV\xff\x17\xff\xcf\xfe\x86\xfe@\xfe\x04\xfe\xde\xfd\xc4\xfd\x9c\xfdb\xfd"\xfd\xed\xfc\xc6\xfc\xb2\xfc\xb1\xfc\xc0\xfc\xc8\xfc\xd0\xfc\xe7\xfc\x04\xfd!\xfdG\xfd|\xfd\xcb\xfd\x1e\xfex\xfe\xdc\xfe,\xffn\xff\xb4\xff\t\x00q\x00\xd4\x00<\x01\x8a\x01\xc6\x01\xf4\x01\x1e\x02R\x02\x82\x02\xae\x02\xd0\x02\xe2\x02\xf1\x02\xf5\x02\xf9\x02\xed\x02\xd3\x02\xab\x02\x8a\x02v\x02m\x02S\x02&\x02\xee\x01\xb0\x01y\x01=\x01\x17\x01\xed\x00\xb5\x00p\x00*\x00\xf4\xff\xb9\xff\x7f\xffA\xff\t\xff\xcb\xfe\x8c\xfeW\xfe$\xfe\xf4\xfd\xc3\xfd\x9a\xfd\x81\xfdg\xfdI\xfd3\xfd\'\xfd\x1d\xfd!\xfd0\xfdN\xfdd\xfdq\xfd\x85\xfd\xa2\xfd\xc8\xfd\xf2\xfd\x1e\xfeS\xfe\x88\xfe\xb8\xfe\xe0\xfe\r\xff8\xffm\xff\x9f\xff\xd8\xff\x18\x00P\x00\x7f\x00\xa7\x00\xd2\x00\xfb\x00\x18\x014\x01^\x01\x90\x01\xbd\x01\xd3\x01\xd8\x01\xdd\x01\xe0\x01\xe8\x01\xfc\x01\x15\x02\x1d\x02\x0c\x02\x04\x02\xff\x01\xfb\x01\xe4\x01\xd2\x01\xcf\x01\xbb\x01\xb2\x01\xa5\x01\x90\x01q\x01A\x01\x18\x01\x03\x01\xf4\x00\xde\x00\xc6\x00\x9a\x00^\x00 \x00\xe6\xff\xc4\xff\xa4\xff\x89\xffa\xff.\xff\xfd\xfe\xbb\xfev\xfe;\xfe\x05\xfe\xe3\xfd\xc5\xfd\xaa\xfd\x94\xfds\xfdM\xfd"\xfd\x11\xfd!\xfd;\xfdY\xfdn\xfd\x84\xfd\x9b\xfd\xbc\xfd\xea\xfd*\xfeu\xfe\xc2\xfe\x12\xffY\xff\x9b\xff\xe2\xff\x1e\x00h\x00\xc0\x00\x0b\x01R\x01\x92\x01\xc8\x01\xe5\x01\x07\x02!\x02D\x02\x81\x02\x94\x02\xa3\x02\xa7\x02\x92\x02y\x02l\x02V\x02F\x02:\x02,\x02\x11\x02\xe2\x01\xb2\x01~\x01S\x015\x01\x1b\x01\xf0\x00\xca\x00\x94\x00a\x00;\x00\x0e\x00\xf3\xff\xd0\xff\xaa\xff\x8a\xff]\xff1\xff\x03\xff\xe0\xfe\xc5\xfe\xae\xfe\x9a\xfe\x7f\xfee\xfeF\xfe9\xfe,\xfe\x1c\xfe \xfe$\xfe.\xfe:\xfe;\xfeC\xfeR\xfef\xfe{\xfe\x98\xfe\xbd\xfe\xdb\xfe\x01\xff \xff0\xffM\xff\x88\xff\xc6\xff\xdf\xff\xed\xff\x1e\x00W\x00\x84\x00\xad\x00\xce\x00\x00\x01(\x01=\x01M\x01R\x01Y\x01v\x01r\x01r\x01\x86\x01\x99\x01\x9f\x01\x98\x01\xb2\x01\xd8\x01\xe0\x01\xda\x01\xce\x01\x95\x01G\x01.\x01K\x01l\x01\xad\x01\xe3\x01\x96\x01\xdf\x00\xfd\xff\xd9\xff\x03\x00:\x00\x82\x00\x7f\x002\x00\x8b\xff\xdc\xfe\xa1\xfe\xc4\xfe\xe2\xfe\x0c\xff\xf1\xfe\xb6\xfen\xfe#\xfeL\xfe\x88\xfeK\xfeQ\xfe\xdf\xfe\xc2\xfe\xb0\xfe;\xfe\xcb\xfd\x04\xfe\xa8\xfd\xae\xfd\xfd\xfd\x19\xfe\x15\xfe\x8f\xfeJ\xff\x81\xff\xab\xff\xdf\xff7\x00\xe5\xff\x9e\xff\xfd\xfe\x16\xfe\xd4\xfc\xfc\x00\xd9\x0e\xbc\x14P\x05\xc9\xf5\x93\xf8\xdd\xfa\xe7\xf8\x7f\xfe\xaf\t6\t\xbf\x00#\xfd\x86\xfa\xec\xf6S\xf5\xba\xfd$\x064\tY\x08\x98\x02\x1d\xfe\x9d\xfc\xe2\xfb\xcf\x00\xea\x05\xda\x08\xc4\x04n\x01\xf0\xff8\xfe\x13\xff\x00\x00L\x03\xc7\x02\x9a\x00$\xfd\x9b\xfc\xf8\xfc\x95\xfd:\xfe\x97\x00\x86\x00\x7f\xfeJ\xfb4\xfc\x1f\xfb\x0b\xfb\xf8\xfe\xde\xff5\x02\xf2\xfe\x03\xfe\x8b\xfa\x04\xfd\xa0\xff\x81\x05\x8e\x04d\x03f\x02\xaa\xfb\xe4\xfcT\xff`\x07\x16\x07\xf8\x05f\x03\xf3\xfe9\xfc\xe4\xfb\x92\xffK\x02@\x02*\x002\xfc\xf3\xfa\xc3\xf9\xa8\xfb\xf9\xfe\xb0\x00\xfd\xff_\xfe\xf2\xfba\xff\x98\xfb\xd3\xf4%\x06)\x19E\x17H\x06-\xffH\xfb\x0c\xf7\xd7\xfc\xf4\x0bV\x14O\r\xbb\x02\x1a\xf8\x11\xf1\xe0\xf0\xab\xfb\x01\x03\x1e\x03\xcb\x01x\xff0\xf9\x8e\xf3>\xf5+\xfb\x88\xfes\x02\x95\x08\xdf\x02\x15\xf8\xa8\xf7\x9a\xfb\xd4\xfeQ\x04D\x0b\x01\t\x11\xfe\x8a\xf9\xfc\xfaI\x03\xaf\x06a\tk\t\xda\x00k\xfa\xb9\xf8\x9d\xfb\xcd\xfd\\\x02Z\x08\xb2\x04\xa0\xfas\xf59\xf4;\xf9?\xfd\xc8\x01\x9d\x03)\x00U\xfd\xb6\xfbx\xfc\x05\xfe\xae\x03\x1b\x04F\x04\x84\x03V\xffi\xfe\xe3\xff[\x04\xf2\x02\xb0\x01\xbe\x04\x83\x05\x18\x01Y\xfe@\xfe\xd7\xfe5\x02\x8d\x05\xd5\x06\x18\x02\xd9\x00\x05\xfe\xf1\xfe\xd1\x03g\x03g\x02\xa2\xfdL\xfc3\xfb/\xff\xe0\x00\xa1\x03\xec\x02\x18\xfa6\xfb&\xfe\xed\xff\x07\x02\x0f\x03 \x01\xb7\x00\xdb\x01\x07\xffy\xfbw\xff\xe1\x01_\x00\xcf\xffx\x02\xcb\x02!\xfbm\xf9h\xfem\xfer\xfc\xee\xfa\x9b\xfdY\x01\xf1\xfd\xea\xfd\xd2\xfd\xd9\xfb\xaf\xfc\x1f\xffA\xff\xeb\xff\xbf\x03a\x03\x9e\x02\x86\x02I\x01!\x01\xee\xff\xba\xff\xb9\x02#\x03v\x01\xd1\x02\xa0\x07\x1c\x04\xdd\xfd\x1c\xfe\x04\xff\xa1\x02\xbd\x03$\x02\xd6\x03\xaa\x00\x08\xff\xdc\xfe=\x009\x00s\xfe\xeb\xfep\xfec\xff<\xff\xa9\xff\x99\xfc\x80\xf9J\xfa\xab\xfd{\xfe\xce\xfew\x01\xc6\x00h\xfe\x07\xfa\x07\xfa\x1f\xff\xc6\x03\x0f\x06-\x016\xfe\x1c\x00\xe1\x02\x14\x02\xb9\xff"\xfe\xb0\x07D\x05\xb2\xf7\xc9\xfa\xe4\x06\xd9\x07y\xfb7\xfe\xa0\x00&\xfa{\xfb\xfb\x00\xf8\x00\x84\xfco\x01E\x06\xfe\x02\xe2\x00\x12\x00\xe0\xfd\x8c\xfe\xbb\x07\x9f\x0bm\x01\xc8\xfcl\xff \xfb\x9f\xf68\xf9V\x03\x89\x06\xe5\x02\x03\xfd\xa8\xf7,\xf7\xf0\xf8j\xfe\xd4\x05m\nd\x06\xfc\x00s\x01\xb4\xfe\x03\xfb6\x03\xfc\n\xd8\x0eA\n\xc1\x04\xa7\xfa\xed\xf2\xb8\xf8"\x02\xa9\x06m\xfc\xb2\xfe\\\xf9\x10\xf4\x0e\xf7\xce\xf7\xbd\xf9G\xf7\xe4\x00Q\x07\x19\x01\xaa\xfe\xb3\xfea\xfe\xbc\xfa\xc9\xfcj\x08\xa6\x0b`\x07\xf5\x03\x04\x04[\xfde\xfb\xa0\xfe\x0e\x003\x044\x08\xd2\x08\xd5\x00\x9d\xfd^\xfa\xbf\xfb\xbb\xfd\xdd\xfbo\x00\xb1\xfd\xd3\xffS\x01\x89\xf8v\xf6\x07\xf7k\xfbe\x00\xd9\x02W\x08P\x03\xd2\xfa\xf5\xfdh\x00\xc4\x00\xe9\x05\xb5\x12\x93\x0e\xbd\xfc:\xfc\xda\x02\xd3\x06r\x03\xbf\x06-\t}\xff\x01\xfc\x9a\xfe\xfc\x00&\xfb\xf6\xfb\xe2\xff\x17\xfdM\xfa\x1e\xf8v\xfc\xa1\xfe\xe0\xff~\xfdd\xf8\r\xfb\xe8\xff\xd9\x03j\x01u\xfd\x1a\x01f\x055\x03 \x03\xa6\x03I\x04^\xfe\xe0\xfd\xd9\x03\xde\x03\xd6\x01W\x03V\x03s\xfd\x12\xf5\xef\xf8\xc6\x00\xec\xffT\xfd\x98\xfcK\x00W\xfaE\xfaY\x00\xc8\xfeg\xfc\xda\xffp\x04\xd1\xfd)\xfc\xc5\x03\xe2\x04G\x02\x98\x03\xa3\x03\x1d\x00&\xff{\x04\xb9\x04\x1a\x00h\x00\x8b\x03\xe6\x04d\x02\x8a\xfd\n\xfd\xe9\xfc\xac\xfd\x14\xfft\xfeC\x00P\xff\'\xfd%\xf9;\xfc\x92\x00A\xfd\xd3\xfc\xe3\x01\xdb\x05\x00\x01R\xffU\xfe\xdd\x02\xd8\t\xb3\x05\xaa\x02\x00\x01j\xfe\xb3\x01\x91\x04\x8a\x07(\t\x8c\x02\xfc\xfb/\xfbW\xfc\xc6\xfbY\xfc\xeb\xfe\xfa\xfe\x93\xff\xb4\xfe\xa9\xf9\xcd\xf8u\xfb\xa9\xfba\xfaC\xfd\x9b\x01\xb0\x00\x06\xfc\xd0\xfb\x1d\x01/\xff\x11\xfc \xfd\xe8\x02\xc0\x02\xb6\x01\xfa\x04\xed\x02\xb9\xfe\x17\xfeg\x06\x12\x08\xda\x05>\x05J\x03~\xff\xa1\xff\xba\x03=\x01\xed\xfd\xa9\xfe2\x02\x0f\x00s\xfa\x13\xfb\xd1\xfci\xff\x8b\xfb\xea\xfb<\xff2\x00\t\x08<\x00l\xfb\x14\xfeJ\x00\xd2\x02\xb5\x05\x9f\nh\x04%\xfeD\xfa\x9c\xf8\x84\xfc;\x04\x8e\x07\x9c\x06&\x05\xe1\xfe\xfe\xf6\x0b\xf6\xed\xfcY\x01\xd4\x00\x12\x01\xcf\x03}\xff;\xf9\x11\xfa\xfa\xfb\x0e\xfc[\xfag\xfc\xc3\x01\xc3\x05&\x04[\xfeO\xfb\x12\xfd\xdb\x01\xc6\x04\xab\x06\n\x049\x01+\x02\xb3\x02\r\x05\x05\x07\x19\x05\x94\xfd\xa5\xf8a\xfb:\xfe\xd9\xfd\xa9\xfdi\xfe\x84\xfd\x08\xfa\x7f\xfc\xdd\xfe\t\xfe\xdb\xfd\xfd\xfdx\x01:\x03m\x05\xf9\x01#\xff\x8d\x00x\x02\x18\x05\xd2\x04\x89\x06\xe6\x04\x86\x00W\xfd\xf5\xfc\xaa\x01\x13\x05\xea\x07^\x06H\x01\xdf\xfb\x97\xfb\x87\xfd\xff\xf9\xca\xfay\xfeE\x02\xa7\x00\x17\xfd.\xfd\x05\xfai\xf6\x07\xf6\xba\xfc\\\x042\x05\x87\x02\xe2\xfd\xb6\xf9\xb3\xfbH\xff\xe2\x03\xe0\x04\xaa\x03\xe2\x01\xb2\x00\xd4\x02U\x01\xd9\x00\n\x00"\xff-\x01\xe0\x03\xab\x02\xc7\xfe\xbd\xfd\xc5\xfb\xa7\xfa\x91\xfd\x16\x00\x93\x00+\x00m\x00u\xff\x80\xfd\xa2\xfb\x9b\xf9\xc2\xfc?\x00q\x03\xdf\x04m\x04\x06\x03\x82\xfe\xe4\xf9L\xfb;\x01\x8d\x03\xb3\x02C\x02\xd9\x02O\x02E\xff\xf1\xfd\xc0\xfe\xe8\xff\x7f\x00f\xfe\xe8\xfff\x02\xfe\x00m\xff\x00\xff\xf0\xfd(\xfd>\xfa"\xfa\x02\xfc\x05\xfc\xf8\xff\xc0\x01"\x02\x83\x01\x19\xff;\xfd\x0f\xfbQ\xfb\x1a\x02\xda\x06\xdc\x08\xa4\x08\xdb\x05*\x02\x86\xfc\xe1\xfaX\xff\xfd\x04?\x07\x9a\t\x0c\x06\x1e\x01\xb9\xff|\xfeL\xff;\x00\xf8\x04T\x08\x19\t\xf4\x08&\x06:\x03\xa8\x00\x17\x00^\x04\xd7\x08\xf0\n\xa9\tI\x08\xfa\x04R\x00\xee\xfe9\x00\x9d\x04\x8a\x04\xa6\x04c\x05`\x03\x0b\x02\xc0\xff}\xfd\xf3\xfcy\xfe\xe5\xfe\xa1\xfeJ\xff`\xff\xf9\xfc\xf2\xf9\xa8\xf8O\xf6l\xf4\xda\xf6\xbc\xf9\xd5\xfaP\xfb\x80\xf9*\xf7\x14\xf6E\xf7\x0b\xf8\x95\xf7\xc3\xf8\xf2\xf9\xb1\xfa\x14\xf9\x89\xf8Y\xf9\xc3\xf8\x9d\xf9\x8e\xf9\x98\xf8\xb4\xf7\xf5\xf8\x11\xf9i\xf9\xa6\xfb\xa3\xfaZ\xf9\x1d\xf8\x81\xf6\x04\xf5\xe4\xf2\xe3\xf2\xf6\xf32\xf5\xd7\xf7\x1a\xf9U\xf8]\xf9\xf4\xf9\x94\xf78\xf8\xdc\xfc\x8b\x03,\x0b\x15\x0b\x8f\x08K\x07\xf7\x02\x11\x04\xb9\x06\xb4\x08J\r*\x0c\x92\t\xee\x07j\x08\x82\t\xac\x07\xc0\x07N\x05\'\x04\xbd\x03\xa5\x03\x8f\x07\xf9\x04J\x04\t\x07\xb5\x01\x81\xfb1\x00\x92\x15\xd7)\xd5,k$Q\x1bc\x18\x8a\x17\x81\x1a5&01\xa12k)\xe3\x1fx\x18\x91\x11=\x07{\xfe\x96\xfb\x14\xfd\x1b\x00\x06\x01`\x00y\xf4M\xed\xe3\xe3\x12\xd8\xbc\xdaI\xe6\xe2\xf0-\xf2r\xf1\x99\xee\x18\xedc\xe9\x0c\xe8\xd8\xef\x84\xf5A\xfaL\xfd_\xfdM\x04=\x07\xb3\x01Z\xff\xb0\xf9\x01\xf6\x1d\xfaL\xff)\xfe\xdf\xfcN\xfav\xf2\x1b\xe9\x95\xe4\xd9\xe8\xb9\xeb\xbc\xec\xba\xed\xcb\xea\xf0\xe7\xde\xe6k\xe7\xd9\xe8;\xec\xab\xf17\xf5\xf8\xf7\xe0\xf9\x1a\xfb!\xfa\xe4\xf8\x9f\xf9\xec\xfb\xcf\xfeN\x00\xd1\x00G\x01\x11\xff8\xf9\t\xf5\xa5\xf3\xc1\xf3\xaf\xf7\xa2\xfb\xb5\xfc\xf9\xfc\x82\xfb\xcc\xf8\xec\xf5\xa3\xf5\xfa\xfa\xf5\x02U\x05\xcd\x03\x19\x07\xae\r\x06\x13\x1e\x11R\n\xda\x07\xa3\x069\x0bs\x105\x15\xd7\x1a\x14\x16\x90\x0e\x1a\x07\xec\x01z\xfd\xd8\xfb\xeb\x0e\xb9,\x9e>\x1d=\xaa-\xa0"\xaf \xfc\x1bv\x1d\xa1,\xac;@?\x0c8\xb0*h\x1b\x88\x0e\x03\xfd\xf0\xf1\x16\xf3\x9f\xf8\x0b\x03{\x04\x99\xfd\x89\xf2\x10\xe2\x16\xd3\xd2\xcc\xda\xd0\xe4\xdf\xaa\xef/\xf4\xcc\xf5\xe2\xf6\xb3\xf1\\\xec\x88\xe9\xb7\xeb\xf3\xf3\xa1\xfb\x00\x03\xb9\nF\x13\x14\x11\xad\x04\xcf\xf9\x86\xf0\xec\xee\xa1\xf4-\xfb\xc0\xffI\x01\xee\xfd>\xf4\xca\xe9f\xe4\xb5\xe4\xc9\xe6\x13\xe8\xc5\xee~\xf4R\xf7\xbf\xf7\xe3\xf2\xea\xec^\xe8\x95\xe8\x9d\xec\x11\xf5s\xfea\x03j\x03\xb0\x01j\xfd\xde\xf8\t\xf7\xdc\xf8C\xfd\xd9\x02\xca\x06\x17\x04\x17\xfe\xe0\xf7\x8e\xef\xf5\xe9\xcb\xe9P\xec\xf8\xf0k\xf3\xc9\xf6\x80\xf7 \xf4Q\xf2\xa5\xf0n\xf4\x8c\xfb\xa4\x01 \x07~\x08L\x0c\xf3\x0e\xf6\x08@\x03\xec\x04\x81\x07\xd3\x0c\xe2\x10\x91\x10l\x11\xdb\x0b\xa5\x05\x02\x04p\xfe<\xfb\xa0\x0c\xf5.\xefK\x04T\x8dA-*\x94"\'!S\'\x9c9\xebH\xcfJ@;\x90%\xb4\x16\x95\x06\xda\xf8\xea\xed\xfb\xe5*\xe9p\xf0\xdb\xf4S\xf11\xecO\xe0Y\xd0L\xc8Y\xca\xd2\xd8\xb4\xedE\xfb\xda\x01\xce\x05\xc5\x01:\xfb\xb2\xf6\xe5\xf6y\xfa\xab\x00.\x07I\x0e\xb8\x11;\x10T\x0cq\xfeB\xf4\xca\xef\xc2\xe6\x9c\xe3\xdd\xe7K\xee\x95\xf4]\xf5\x8b\xf0\xb4\xe6\x80\xdeW\xdd\x0f\xe2i\xe9\xcd\xf2Z\xfd\xec\x01"\x02\x14\x02\xc3\x01g\x00\x14\xfdn\xfa\xad\xfc\xee\x014\x08o\x0b\t\t!\x05\xd6\xfe\x04\xf8\xb4\xf3\xf2\xf3,\xf9B\xfcW\xfc\x86\xf9\xa8\xf5\xef\xf4*\xf2b\xee<\xeb\xdf\xe9\x1b\xeb)\xed\xb7\xf3\xea\xfa\xfe\xfe\x83\x01\xfb\xfdp\xf8\x88\xf7\xed\xf8\x95\xfc\x94\x028\x057\x06\xe0\x05\xeb\x03\x90\x01\xf7\x00\x98\xfc\xfa\xf5\xa7\xf5\xcd\xf5d\xf2\xdc\xf1\xce\xfd\xef \xb8O\xe6f\x83^\xa6I\xd96F.\x913\x9b?!Pv[\xceR\xb1>\x0c*\x19\x12\x17\xfc\x94\xe5\x19\xcf\xfe\xc6\xbb\xcb\x13\xd4~\xdc\xbd\xe1\xd7\xdfZ\xd8\x9d\xcb\xf5\xc2A\xc8%\xd9\xc1\xee\x0c\x04\x13\x14\x95\x1e\xb4%\xd7\'\x02"\xba\x17\xcb\x0ca\x05\xfc\x04]\x07b\x0e`\x14\x0e\x10&\x07O\xf5\xde\xdeP\xcd\x18\xc0\xea\xbe\xf8\xc6\xd9\xd1C\xdej\xe6*\xe8\xe9\xe5\xa8\xe5\xff\xe6\x1b\xe8\\\xee\x8a\xfa\xef\x08s\x18\x05(\xe9.\x00*2!J\x16e\n\x9e\x04\xb4\x01"\x00^\x02\x01\x03\xf3\xfd\xa7\xf6&\xf1"\xeb0\xe6\xa7\xe2 \xe2I\xe7\xb9\xee\xf0\xf31\xf7\xec\xf6\x1e\xf5\xf8\xf4\xc0\xf14\xf3\x98\xf9\xf3\xfc\xf2\x02\x8e\x06\xc6\x05\xfa\x06U\x04\xed\x00\xc3\xfe\xe5\xf93\xfc9\xfe\x9e\xfe \x02\xda\xfe\xb2\xf9\x90\xf5\x10\xef\x86\xedA\xef\x88\xee\x1a\xf0\xf9\xf2\x84\xed\x07\xe9=\xf6\xa2\x15\xb1@e[\xa1W\x08K>B\xffB\xbaH\x0fL\xd8P\xa0Q\xf3Jk>\xea-\x0b\x1f`\r\x1d\xf3,\xd7\xa2\xc4\x95\xc0\xdc\xc7p\xd0\xfb\xd5D\xd9\x89\xd9\xb2\xd7\x97\xd6)\xd9\x12\xe2\xe1\xed\xfa\xfa\xd5\x08p\x19\x97)\x913\x8f3\x9a(\xcf\x1a/\rk\x03;\xfeH\xfc\xcf\x00f\x00\xc1\xfa!\xf2z\xe1a\xd1\x95\xc4\xa3\xbc;\xbe\xc7\xc7Q\xd5\xeb\xe2\xba\xee\xd4\xf5c\xf9l\xfa(\xfbp\xfe\xc3\x03\xf9\n\x0e\x15\xb2 \x06)M*\xfb#p\x18\xbe\nk\xfe\x1e\xf7\n\xf3\xba\xf0\x95\xf1\x1d\xf1r\xee\xc9\xee\xec\xedX\xec\xa4\xebF\xe9M\xe9\x85\xed\x0e\xf5\x9a\xfbu\x01W\x05I\x04\xfb\x02\xf7\xff@\xfc\xa1\xfb\x1d\xfbM\xfc\xe4\xfe\x0e\x01\xf1\x02\xf0\x01\xc4\xff\xc4\xf9X\xf4\xdc\xf0p\xedl\xf06\xf4\x85\xf5\xc9\xf8\xf5\xf5e\xf0\xa9\xefX\xebZ\xeb*\xf2\x89\xf0<\xec\xed\xf3l\x0cC6\xf5]\xf5f{ZKL\xa2?i<\x8e@\xccDrL!NDBu/\xb7\x1b\x0c\n?\xf7\x9e\xdc\xb3\xc3\xc7\xb7\x00\xba/\xc5\xd6\xd2\xba\xdd\xa7\xe3\xe1\xe2:\xdeh\xdb\x18\xe0W\xec\xaf\xfb\xc8\t\xf6\x16\xb4%\xe8116T/?!\xb9\x12\xe2\x06\xa3\xfdk\xfc\x10\x02\x9d\x04.\x02/\xf5\xde\xe3n\xd5D\xcaw\xc6\x00\xc6?\xc9\xb6\xd2 \xdd\x89\xea\xc6\xf6L\xfe\n\x02d\x00c\xfeY\xff.\x04\xc8\r\xb3\x18\x9f \xa1!Z\x1dg\x15/\nO\x00\x7f\xf8o\xf0\xef\xea\xf9\xe9,\xeb8\xee\xb4\xf2\xb1\xf4\x81\xf3k\xf1\x1b\xee\xa2\xece\xf0F\xf7a\xfe\x8d\x05\xbf\x08)\t\xdf\x07W\x05\x99\x03\x87\x00\xd7\xfdL\xfb\xb0\xfa\xc5\xfc@\xff\xd3\x01\xac\x01/\xfd\xf1\xf7\xd6\xf1\xb9\xebA\xe9Q\xe9.\xea\xc6\xec\x9a\xefH\xf1\x1d\xf2\x94\xf1\xce\xee[\xec\xa9\xeb\x93\xea\x08\xea\xd1\xec\x8f\xf8\xe4\x18\xf1C(d(n\xeccWS$I\xc8D\xdeB\x92E\xecJhM\xd1G\xe76\x11"\x96\x0e\xbd\xf5\x01\xd9\xfa\xbd\xc9\xacz\xadg\xba\xee\xca\r\xd7\x86\xdd\xda\xdd\xc6\xdbH\xd9c\xdar\xe4\xa1\xf4D\x06\x01\x17H%\xb61\xd09\xe980/?\x1f\xe8\x0f\x18\x04\xc3\xfc\xa2\xf9\xec\xf9\x17\xfd\x9a\xfb\xa5\xf4)\xe8\xe0\xd89\xcc$\xc2\xf0\xbe\x9a\xc2s\xcb\x08\xdb:\xeb#\xf8\xb2\xff\xa1\x02\xb1\x02&\x01.\x01i\x03\x0c\tM\x13\x8b\x1e\xcd$]#\x87\x1c\xc8\x12q\x07\xd0\xfd\x8e\xf5/\xefq\xedx\xef\n\xf1\xbd\xf1\xf7\xf2o\xf2\xc0\xf0\x1d\xef\x12\xec\xc0\xeb\x9f\xf0\xb8\xf6\xcd\xfeY\x05\xb7\x07\xc1\t#\x08\x12\x05G\x03\\\x01\xc1\x00d\x01P\x02\xfd\x02o\x03\xf2\x02\xbb\xff\xb3\xfa\xa6\xf6\x84\xf1\xbe\xed\x82\xec7\xebZ\xec\t\xee\xe9\xedQ\xedp\xecR\xe9\xb9\xe8\xe7\xecz\xefR\xf1p\xf0Q\xec4\xf3\x1e\x0c\xfe0\xf1V\x95iif\x9dY\xf6NxK\x9bL\x9cK\xe2H\xe8G\\Bs6l&\x0e\x12\xcf\xfd\xb3\xe9\x8b\xd2$\xbd\xa6\xb1\xcf\xb2Z\xbe\x83\xcc\x01\xd5\xdd\xd6\x92\xd6/\xd5\xe1\xd6T\xde+\xea\x1d\xf9\xec\x08\xd2\x15\x85 g*\xe40\xed2\xcf-\x9d"\xc7\x15\xcb\n\x0e\x04\x97\x01\xf1\x02\xa8\x02E\xfe\xed\xf4\xb3\xe5\x9e\xd6I\xccM\xc7\xdd\xc79\xcb\x82\xd0\x00\xd8\x98\xe1O\xebH\xf4\xb8\xfa\xf6\xfc.\xfd\x1a\xfd\x1f\xfes\x03\xf3\x0cx\x17\x1d\x1f\xce #\x1d|\x16F\x0f}\x083\x02\xba\xfc\x15\xf8p\xf4\xce\xf2\xf9\xf3\r\xf7c\xf9\'\xf9\x01\xf5(\xef\xab\xea\xf4\xe9\x1c\xee\xeb\xf4\xf7\xfa\xe2\xff\xe0\x01b\x02.\x02f\x01\x90\x01i\x01\x0c\x01p\x01*\x02\xce\x04R\x07L\x08[\x07\x92\x01\xee\xfa\x19\xf5?\xf0=\xee\x9a\xec\x8e\xeb\xb2\xec\n\xed\x0b\xecS\xeb\x9e\xea\xc5\xea\xea\xe9\xb4\xe6\xbe\xe6I\xf2\x94\x0c\xa80zP\xd1]\xf2Y\xdfN+E.B{B\x7fC\xb2G/K\x85IH>\x92)*\x13M\x00T\xf0z\xdf\r\xcd[\xbeS\xb9\xcc\xbe\xfd\xc9A\xd4\x13\xd8\xa6\xd6\x8c\xd4s\xd4`\xd8R\xe0i\xebI\xfaK\n\xc3\x178"x(\xd8+=-\x91*\xde"\x12\x18\xea\rq\x08\x90\x084\tT\x06+\xffB\xf4\x92\xe81\xddd\xd2{\xcab\xc7\xd4\xc8\x02\xcez\xd4S\xda\xf7\xe0\x17\xe9`\xf0\x89\xf5S\xf7\xd8\xf7N\xfb[\x02\xa9\x0bD\x15I\x1d\xa7"\x81$\xaa!\xfd\x1a\xca\x12\xa3\x0b\xf0\x06\xa5\x03\x85\x00,\xfd\x04\xfa\xf9\xf7\xef\xf5\x16\xf3\x03\xf0\xa4\xec\xa1\xea\x06\xea\xcd\xe9\x7f\xeb)\xefS\xf3Q\xf9\x92\xfe\xba\x01q\x04Z\x05z\x05p\x07\xc3\x07\x01\t"\x0b\xa3\x0b`\r\x9f\x0c\x1d\t\x9e\x03l\xfc\x1b\xf6\xa3\xf1\xb5\xefB\xee\xe7\xecO\xec\xab\xe9q\xe7X\xe6\xf4\xe3\xd3\xe3~\xe4<\xe5\xed\xecO\xfe\xf0\x16\x9c1^E\xaaM\xe5M\xdbH\xa9A`=4=9A\xc2G\x1fK\x10EU7o%\xa9\x12\xa6\x02\x82\xf1\xd4\xe0\x82\xd4\x97\xcd\xa9\xcc\x87\xce\xf9\xcf$\xd1\xae\xd2\xb4\xd3j\xd4\xb4\xd4\xdc\xd5%\xdbc\xe5\xbc\xf2\xb6\x00\xa2\x0c\x0f\x17\xf3\x1f\xac%!\'\t$\x0b\x1f\xc5\x1a\x9d\x17/\x15\xb6\x12\x95\x0f\xbb\x0b\xf3\x05T\xfd\xe1\xf2k\xe8\xb4\xdf}\xd9\xad\xd4\xeb\xd0\xae\xce3\xcf\xa7\xd2\xf7\xd77\xde,\xe4\x99\xe9d\xee\xe6\xf1\x17\xf5\xa3\xf9b\xffE\x07\xee\x0f\x9b\x16\xf1\x1bL\x1f\xf6\x1f)\x1f\xbd\x1b4\x16\xad\x10\r\x0b\x86\x06\xfd\x03\xaa\x01\t\xff\x10\xfc\x99\xf70\xf24\xedY\xe9\xc3\xe7:\xe8*\xea\xef\xecz\xf0=\xf4q\xf7\x8a\xfa\xde\xfd{\x01\x82\x05b\t|\x0b\\\x0c\x0b\x0c\x96\n"\t\xdd\x06Q\x04x\x01\xe4\xfd\x17\xfa\xe2\xf5a\xf2T\xef\x1f\xed\xf8\xeb\xb5\xea\xe8\xeaR\xeb\x90\xeb\'\xec\xed\xeb\x1c\xee\x0b\xf5\x93\x02\xe3\x16\xed+2<[D\xdbD\xe5A\x16=\x9f8\xb16e7\x9d;\xa9?\xb0>\xff6K)\x87\x19\xed\nO\xfd\x00\xefg\xe1\xe3\xd6\xe7\xd0J\xd0X\xd2\xcb\xd4X\xd7\xa8\xd8M\xd8w\xd6w\xd3F\xd2\xdd\xd5]\xdf\xc7\xed\x87\xfdZ\x0bh\x15=\x1b\xb1\x1d\xf3\x1c\xdf\x1a\xfc\x18\x96\x17c\x17\x10\x17P\x15G\x12\xc9\r\xa2\x08h\x034\xfc\xf7\xf2\xf8\xe8\xc0\xdf\x0b\xd9u\xd5\xd2\xd3\x80\xd4\x89\xd7\x12\xdcg\xe1\xd1\xe5L\xe8$\xe9\x0f\xea\xf3\xeb\xff\xef\xae\xf66\xff.\tE\x13,\x1b}\x1f\xb8\x1f\xb4\x1c\xfb\x18\xbe\x15_\x13\xd2\x11A\x10\xa7\x0e\x9a\x0c}\t\x8a\x05\xe4\x00m\xfb\x1b\xf6@\xf1\x10\xedB\xea&\xe9\r\xea\xdd\xec\xca\xf0\x83\xf44\xf7n\xf8\xf6\xf8o\xf9\x8e\xfa\xa9\xfc\xfa\xfe`\x010\x03\x81\x03}\x03\x92\x02g\x01\x8c\x00\x0c\xff\xd8\xfdZ\xfc\x82\xfa\x1b\xf9\xbf\xf7\xdd\xf6\xcc\xf6\xdd\xf6s\xf6@\xf5#\xf3\xd4\xf2p\xf7c\x02\xc9\x11\x99!Q-F3\xad4t2\x8c.a+\xd1*L.\xfe4;;\xcd\xf63\xf5\x01\xf4\x88\xf2[\xf1\x00\xf1T\xf1\xaf\xf2\xf6\xf44\xf7>\xf9\x91\xfa\xf0\xfa\x00\xfb\xc6\xfa_\xfa\x89\xfa\xc1\xfaZ\xfb\xab\xfc\xe7\xfdR\xff\xee\xff\xa3\xff\xa7\xfe\xd8\xfc\xf9\xfa\t\xf9/\xf8\xc3\xf9\x07\xffo\x08Z\x14\xba\x1f$(\xe3+R+\x9a(}%\xfa#\xe9$\x03(\xdd,11\xc92\x130M(d\x1d\xd0\x11\xca\x07\xac\x00\xde\xfb\x19\xf9d\xf7\xee\xf5\x9b\xf3\x11\xf0\x8b\xeb\xb5\xe6\xf7\xe2\x93\xe0<\xdf\x8e\xde\xfc\xdd\x1f\xde\xb3\xdf\xf2\xe2\xd7\xe7\x9c\xedx\xf3\xc2\xf8p\xfc&\xfe\xf4\xfd\xdc\xfc1\xfc\t\xfd\xff\xffw\x04H\t\xf5\x0cK\x0e\x8b\x0c\xda\x07.\x01r\xfa\x84\xf5<\xf3S\xf3\x89\xf4\xdd\xf5\xef\xf5\xb3\xf4@\xf2\xf4\xee\x0c\xec\x1b\xea\x8c\xe9\xcb\xea<\xed\x90\xf0/\xf4\xc8\xf7\xb3\xfb\xd0\xff\xe3\x03[\x07\n\n\xad\x0br\x0c\x9a\x0c~\x0c\xc1\x0c\xad\rq\x0f\xaf\x11~\x13\xb5\x13\xd6\x11\x19\x0e<\t\x04\x04O\xff\xd1\xfb9\xfa8\xfa\x18\xfb\xeb\xfb\xc3\xfbc\xfa\x05\xf8\xed\xf4\x18\xf2\xea\xef\x0e\xef\xea\xef\x0f\xf2\xde\xf45\xf7v\xf8\x9d\xf8\xc7\xf7\xb7\xf6\xbd\xf5u\xf5\x06\xf6H\xf7P\xf9p\xfbB\xfdX\xfeQ\xfep\xfd\x19\xfc\x94\xfa\xa0\xf9/\xfa\xa8\xfcl\x02\x16\x0b>\x15\xe8\x1ex%\x11(\x9b\'X%{#0#\x7f$\xf3\'\x0e,\xc5/c1\xff.\x01)\r Y\x16\xf7\r/\x076\x02\x89\xfei\xfb\xfe\xf8\n\xf6T\xf2\xd6\xed\xb5\xe8\x87\xe4O\xe1\x19\xdf\xbb\xdd\x80\xdci\xdc\xb2\xdd\xad\xe0,\xe5\x11\xea\xf5\xeee\xf3\xb6\xf6\xe9\xf8\xe3\xf9q\xfak\xfb\x8a\xfd\x1e\x01\x84\x05\xee\t@\r\xac\x0e\xbd\r\x87\n\xb5\x05\xa6\x00\x99\xfcd\xfa\xed\xf95\xfa\xab\xfa-\xfa\xbc\xf8g\xf6`\xf3\x90\xf0M\xee*\xeds\xed\xc9\xee5\xf1\xef\xf3\xe9\xf6:\xfa\x8f\xfd\xfc\x00\xae\x03\xc0\x05\xfa\x06{\x07\xca\x07\xdd\x07r\x08\xc1\t\xa5\x0b\x14\x0e%\x10\xd2\x10\xed\x0fW\r\xa9\t\x99\x05\xca\x01\xe1\xfe\x80\xfdd\xfd\x17\xfe\xf7\xfe\x19\xff\x17\xfe!\xfcC\xf9+\xf6\x94\xf3\xc8\xf13\xf1\xd7\xf1\xf7\xf2S\xf4I\xf5\x88\xf50\xf5~\xf4\x8e\xf3\x17\xf3\xc8\xf2\xe7\xf2\x99\xf3v\xf4,\xf6\x01\xf8\xa0\xf9\x0c\xfb\x90\xfb\xd5\xfb\xcd\xfb\x7f\xfb,\xfc\x93\xfe5\x04\x81\rM\x18\xaa"\xdc)\xe1,\xfe,\x0f+\xff(\x02(\xb1(\t,z0+4\xf34\xbd0\xa9(j\x1e?\x14\x01\x0c\x07\x05R\xff\xcb\xfa"\xf7\xd4\xf3\xef\xefm\xeb\xc2\xe6\xb8\xe2\x97\xdf\xe1\xdc\x88\xdar\xd8/\xd7\xae\xd7\xfd\xd9\x0f\xde\x84\xe3\xe2\xe9\x94\xf0F\xf6X\xfas\xfc^\xfdM\xfe\xfc\xff\xf7\x02\xfd\x06o\x0b\x93\x0fm\x12\x02\x13\xfd\x10\x8d\x0c$\x07^\x02\x08\xff\x06\xfdz\xfb\x13\xfa@\xf8$\xf6\xd0\xf3:\xf1\xd0\xee\xb8\xec,\xeb\xae\xea\x18\xebn\xecj\xee\r\xf1\x91\xf4\xe1\xf8~\xfdx\x01u\x045\x06\x0e\x07U\x07k\x07\xfb\x07U\t\x93\x0bO\x0e\xcc\x10\x1f\x12\xcf\x11\xe8\x0f\xcb\x0c\xd9\x08\xe3\x04{\x01W\xff_\xfe5\xfe`\xfe;\xfe\x8f\xfd=\xfc\x0c\xfaP\xf7b\xf4\xf8\xf1\xb4\xf0\x9d\xf0S\xf1t\xf2f\xf3\x00\xf4\xe9\xf3A\xf3"\xf2*\xf1\xd7\xf0]\xf1\xd5\xf2\x7f\xf4\x9d\xf5.\xf6U\xf6\xb8\xf6d\xf8\x99\xfa0\xfdu\xffK\x00}\x00n\x00j\x01W\x05\xc9\x0c\xa3\x17i$T/"6\x1a7K3\xc3.\xc3+\xc8,\x900\x9a4c7T6=1\xd7(\xa7\x1d\x89\x12\x93\x08d\x006\xfaE\xf4Z\xee\x0e\xe8G\xe2B\xde\xbb\xdb\\\xda-\xd9;\xd7 \xd5\xe2\xd2\xc8\xd1\x04\xd3M\xd7I\xdf[\xe9\\\xf3 \xfb\xd8\xff\xb1\x02\xdd\x04G\x07\xf8\t\xb7\x0c\xe9\x0fb\x13u\x16\xf1\x17\x1b\x17\x88\x14\x12\x11C\r\x07\t\xed\x03\x0c\xfe-\xf8\x05\xf3T\xef\x1a\xed\xd2\xeb;\xeb\xb6\xea\xfe\xe9\xd3\xe8,\xe7\xdf\xe5}\xe5\xf4\xe6\x85\xea\x85\xefg\xf5\x06\xfb\x0b\x00o\x04\xa2\x07\xe2\t\xe6\n\x82\x0b\xac\x0c5\x0ey\x10\x90\x12\xd9\x137\x14>\x13\x89\x11\x0f\x0f\xef\x0b\xd3\x08r\x05b\x02\x92\xff\x04\xfd]\xfb3\xfa\xba\xf9\x81\xf9\xb1\xf8{\xf7\xba\xf5\xc5\xf3H\xf2)\xf1.\xf1\x18\xf2\x97\xf3 \xf5\xd7\xf5\x82\xf5\x9f\xf4\xa3\xf3|\xf3M\xf44\xf5F\xf6\xcb\xf6\xb0\xf6\x9c\xf6i\xf6\x9f\xf6\xc2\xf7\x7f\xf9x\xfc.\xff\xc0\x00e\x01\xb0\x00\x1e\x00\x06\x00\x14\x01\x01\x06D\x0fo\x1cn*6429%9\xa26m4\xff1<1\xdc0\xd30\xb00F.u*\xdd#\xf0\x1a\xf4\x0f\xce\x02\xfd\xf5\x96\xeao\xe2\xce\xdd\x9a\xdbk\xdb[\xdbW\xda\xc5\xd7o\xd4\xd9\xd1b\xd1{\xd3\xcf\xd7\x81\xddI\xe4\xf3\xebI\xf4\xaa\xfd\x9f\x06a\x0e\xea\x13\x94\x16\xe4\x16\xb0\x15\xb8\x13\xe7\x12\xab\x13\xaf\x15\xe0\x17\x12\x18W\x15/\x0f\x9b\x06/\xfd?\xf4\xb6\xecC\xe7\xd4\xe3$\xe2z\xe1\x81\xe1\xc1\xe1\xbf\xe1.\xe1Q\xe0\xba\xdf*\xe0\x8e\xe2Z\xe7m\xee\t\xf7\xaf\xff\x1e\x07\xa5\x0c%\x10&\x12.\x13\xbe\x13+\x14\xa8\x14\xfe\x14?\x15\x1e\x15\xa6\x14w\x13K\x11\xcb\r\x02\tO\x03\xe4\xfd[\xf9\x90\xf6\xcc\xf5\n\xf6\xf3\xf6\x1a\xf7\x81\xf6\xb9\xf5\xc4\xf4<\xf4\xdc\xf3\xfe\xf3\xd5\xf4\xe2\xf5w\xf7\xc5\xf8\xe0\xf9\xcf\xfa\xb3\xfb\xb6\xfc7\xfds\xfc\x9c\xfa\x80\xf8O\xf6\xff\xf4\x03\xf4\xc5\xf3[\xf5\xf3\xf6g\xf8T\xf8\xb3\xf6\xa1\xf6\xd5\xf6\xba\xf8\xcc\xfa\xc0\xfbs\xfd\x96\xfd\x9b\xff\x9a\x05\xa2\x10\xce R0p;\xd4?\xe7=m9N4b1\xcd0\xe81c4\xab3\xf1/p(\x00\x1e\x14\x13\xf1\x05 \xf8\xca\xeb\xf3\xe1$\xdc\x8d\xd9\x1e\xd8\x94\xd8G\xd9\xf5\xd8\xc4\xd7f\xd5\xcd\xd3\xf5\xd4F\xd8;\xde1\xe6q\xef\x8a\xfaR\x05\xe4\x0e\\\x15\xa3\x18\xc5\x19\xa8\x19\x9a\x19\x86\x19\xa5\x19\x0f\x1a\x05\x1a\xe1\x18\xef\x15\x00\x11\xb8\n,\x03\x8e\xfa+\xf1:\xe8\x02\xe1\xb9\xdc\xf9\xda\x10\xdb\xb4\xdb\x1d\xdc\xdc\xdb\xde\xda\xd5\xd9\x04\xdaO\xdcO\xe1T\xe8t\xf0\xa2\xf8S\x00G\x08\xad\x0f\xba\x15\x04\x19\\\x19G\x184\x17\x0b\x17S\x18\xa7\x19r\x1aM\x19\x0f\x16\xb3\x10h\n\xbc\x04\xfb\xff\xd4\xfc\xa0\xf9\xe3\xf6\xfd\xf4p\xf4S\xf5\x92\xf6$\xf7\x9e\xf6\xa8\xf5\xd6\xf4\x02\xf5\xe8\xf5\x90\xf7\xe1\xf9\x8a\xfc\x1d\xff\xfd\x00e\x01\xd2\x00\xa6\xff%\xfe\x93\xfc\x1c\xfa\xe0\xf76\xf6\x1e\xf6\xe6\xf6}\xf7\xde\xf6&\xf5?\xf3[\xf1\xce\xf0R\xf0\xb5\xf18\xf4\xfd\xf6\xa9\xfa\xfa\xfc\xef\xff[\x02\x7f\x039\x04\xdf\x02x\x04n\x0c\xc5\x1a\x85-%:\xdf>\xe8;\x0c6&3\xf91;2\xbe1\x9f03.k)c!\xd3\x17\xdf\x0e<\x06e\xfcQ\xef\xf8\xe1\xfd\xd8$\xd7+\xdaf\xdd\xbc\xdd\xbd\xdb\xbb\xd9\x02\xd9\xac\xd9\xe1\xdbc\xe0c\xe7\x86\xf0\x7f\xf9\x85\x01/\tk\x10\xff\x16\x99\x1a\xf9\x19\xc0\x16\x9e\x13F\x13\x94\x15\xbc\x17\x93\x17N\x14\xa5\x0eY\x07]\xfe\x82\xf4\x13\xeb\xd6\xe33\xdf\xc9\xdbo\xd91\xd8\xfb\xd8\x05\xdb;\xdc\x1b\xdbw\xd8\x02\xd7[\xd9\x02\xe0L\xe9:\xf3\x90\xfc\xe0\x04\x9d\x0b^\x10;\x13,\x15|\x16\x14\x18\xfc\x18(\x1a\x8c\x1b0\x1d0\x1e:\x1c8\x17\x05\x10\xbb\x08\xe2\x02W\xfex\xfb\xf2\xf9\x94\xf9\xb0\xf9\x85\xf8x\xf6\xce\xf3\x88\xf1\'\xf0Y\xef4\xf0\xc7\xf2&\xf7\xce\xfbK\xff\x9c\x00\xd0\x00\x1a\x005\xff\xc0\xfeC\xfe\x86\xff\x8b\x00\x00\x01D\x00\xd5\xfd\\\xfb \xf8\xeb\xf4\xe2\xf1\x9e\xee\x88\xedZ\xed\xc5\xee\xc0\xf0\x83\xf0,\xf1\xaf\xf18\xf3\x99\xf5\xfe\xf6\x84\xf9I\xfc\x8b\x00\xbf\x04\xed\x07!\x0b\x93\r?\x14\xe3\x1fo-5:F>3:\xa93c.\xab.U1\xd01\xba0>-\xe8\'\xc9!]\x18\x82\r\xdc\x01\xe9\xf5\xc7\xebK\xe3<\xdf\x89\xdfb\xe2\t\xe5-\xe4a\xe0&\xdc\xa9\xd9>\xdb\x06\xe0\xc2\xe6\x8d\xee\xe3\xf6\x84\xffG\x07\xa5\r\n\x11<\x12g\x11\xed\x0f_\x0f\xeb\x0f\xc1\x115\x14G\x15\x16\x134\r\x89\x04N\xfb\xe7\xf2\xfa\xeaQ\xe4\xb4\xdf\xb8\xdd\xbb\xde\x9a\xdf\x83\xdf\xf4\xdd\xe2\xdb\xaa\xda1\xda\xf1\xda=\xde5\xe4S\xed\x06\xf7\\\xffW\x06A\x0b\xe5\x0f}\x12z\x13P\x14\xa0\x15\x81\x18\x89\x1b\x9a\x1d\x80\x1e\x82\x1c\x01\x19E\x13\xa0\x0cT\x06\xdf\x00\xc6\xfd\xba\xfb\xc8\xfa?\xfa\x92\xf9\x07\xf8\xd4\xf5N\xf3\t\xf1\xd4\xf0\x10\xf2\xd3\xf4w\xf8\xe7\xfb+\xff9\x01C\x02\xa1\x02(\x02\x99\x01\x9f\x00\x94\xff\xee\xfe\xd5\xfe\x87\xfeo\xfd$\xfb\xa7\xf7U\xf4\xa2\xf0\xfa\xed[\xec\x0b\xeb\xb3\xeb\x13\xec\xa4\xedb\xef+\xf0\x83\xf2\xcf\xf3\xbe\xf6:\xfa\xca\xfd\xa0\x02l\x06\x11\t\xfc\x08\x12\x08\x80\n\x1c\x12\xff\x1f\xa7-\xb25\xbf6\xb12\xd6.\x08,\xeb*8+\xb5+\xcc+V*\xb2&\x06!\x1a\x19\xc6\x0e\x82\x02\x8a\xf62\xed\xce\xe8\xf6\xe8\xb1\xea\xbd\xec\x05\xecm\xe8\x8d\xe3E\xdf\xfb\xdda\xdf\xc7\xe2\n\xe8,\xef\xd7\xf7h\x00d\x06\xa1\x08f\x07\xd5\x05\xc4\x044\x05\x96\x07,\x0b\xe1\x0fx\x12\xe8\x10/\x0c\x19\x05\xd6\xfd\xce\xf6#\xf0\xa5\xeb\xc3\xe9Q\xea\x08\xeb\x0c\xeb\x90\xe9L\xe7%\xe4\xeb\xe0N\xde\x8d\xde\xb5\xe2\xff\xe8\xab\xef\xbb\xf5<\xfb\x97\x00\xf5\x03\xff\x04&\x05\xd6\x05\xc9\x08\xe5\x0b)\x10\xcf\x13\xa4\x17N\x19\n\x17\x98\x12\xe8\x0c\x1c\t\x17\x06"\x03i\x01,\x00\xec\x00\x19\x01\x0e\xff\x1e\xfc"\xf8L\xf5\x9c\xf3\xd7\xf3\xba\xf5\x84\xf9M\xfd\x94\xff\x10\x01A\x01L\x01\xa2\x00\x9b\xffz\xfe0\xfe{\xff\xec\x00\x10\x02\x17\x01f\xfdB\xf9A\xf5\xd9\xf2\xde\xf1\xed\xf0\xb8\xef\x08\xeeq\xed:\xee\x0f\xf0\x1c\xf2\xc3\xf1z\xf1X\xf2h\xf4\x1f\xf9+\xfd\xf7\xff\x98\x02\x0f\x04\xdc\x05\x9d\x08\xf0\x07\x8d\x06\xff\x07\xcb\x0fp\x1f=->2P.\x85\'\x0e&\xb2(0+\xc4*\xd4(^)\x1b)\x89&\xad!\xe9\x19\x1b\x10S\x03\xa9\xf6\xf9\xef\x13\xf1.\xf5\xad\xf6\x15\xf3\x80\xed\x19\xeaB\xe7\x03\xe4\xbb\xe0b\xdfs\xe2\xc0\xe8u\xf0\xdd\xf7\x93\xfd\x98\x00\xfd\xfe\x0c\xfb\x17\xf8v\xfa\x8c\x00\xf7\x05\xcf\x08\xb1\t\xb9\x0b\x9a\x0c!\t\x13\x03\x91\xfc\xfb\xf7\xa5\xf5\xba\xf3I\xf3\xcf\xf4\xe7\xf6>\xf6\xe7\xf1O\xecH\xe9\x98\xe8O\xe8\xf7\xe7\x9e\xe9<\xee\xbb\xf4\xc6\xf9\x99\xfb\xe3\xfc.\xfd\xc0\xfd,\xfe~\xff\xd1\x03\xc9\x08\x84\x0c\x1b\x0e\x8c\x0e\xeb\x0fB\x0f~\x0c\xdf\x07\xe9\x04\xbe\x04\xe5\x05\x7f\x06\x19\x05\xf0\x03\x16\x02\xe5\xff\x0c\xfd[\xfb\xe3\xfb\xb3\xfc;\xfd\x7f\xfd\xa2\xfe\xd2\x00g\x025\x02\x0c\x00\x9e\xfef\xfe$\xff\xe3\xff\'\xff\x7f\xfe\xb6\xfcI\xfb\x82\xf9\xfa\xf6\x9e\xf5\x02\xf4n\xf3\x93\xf1\xb6\xef\xa1\xef\x1e\xf0\xa5\xf1\xce\xf0\xfd\xef\xc1\xf0G\xf2\xae\xf5\xc6\xf7\xe3\xf9-\xfc\x15\xfd\x97\xfe\x17\x00\x99\x02\xb5\x05\x1a\x06\x88\x04\xe2\x02\xde\x07u\x14k"\x80*\xf4(\x0b$o"\x86%\xe3**,*+\xdb*\xf8*\xd4+%)\xda"\x94\x19Q\rR\x02\xb1\xfcX\xfc\xae\xfd\xda\xfb{\xf5\xb1\xee\x00\xea\x1a\xe7-\xe5\x95\xe1 \xde\xed\xdd*\xe1\xb9\xe7\x8b\xef\xea\xf4\x01\xf6\x06\xf3a\xf0\x81\xf2\xff\xf8\x0c\x002\x05\xf5\x07\xf7\x08\x9e\n \x0c\x96\x0c\xa3\n8\x04g\xfe\xca\xfb.\xfdR\x01\xeb\x01\xd8\xfe5\xf9A\xf3-\xef^\xec\x00\xea\xc4\xe8U\xe8\xad\xe8\x86\xeb\xba\xefE\xf3\x07\xf5\xfd\xf2/\xf0\xe1\xefE\xf3f\xfa\xc1\xff\x1f\x04\xa3\x06n\t\x83\x0b\n\x0cR\x0b;\t\x8f\x07\x0e\x07\xb9\x08\xc1\x0b\xf3\r\xb2\rj\n\xfc\x04\x8f\x01\xe1\xfft\xffq\xfe\xc1\xfdQ\xfe\xdb\xff}\x01\xc1\x00\xb7\xfe;\xfcn\xfa\xee\xf9F\xfaS\xfb\x06\xfdx\xfd\xd2\xfck\xfb\xe1\xf9\xc8\xf8\x14\xf7Z\xf5[\xf3\xe2\xf2\xd4\xf3\xac\xf4\x8a\xf4i\xf3T\xf2\xcd\xf2\xde\xf2\x7f\xf3\xdf\xf4\x03\xf6\x1a\xf9\xf8\xf9\xe1\xfa\x18\xfd\x99\xfe\x9c\x01\x00\x02\x18\x02\xe2\x03\x94\x03\xbb\x02\x10\x03\x86\t[\x19\x96%\xe9\'\xc6#j!\x00&u*\xcd)\x9f(\x8e+80a2W/%)\x12!Q\x16\xeb\t3\x01\xbf\xfe\xec\xffH\xfe\xc9\xf7?\xf1\x13\xed\x8b\xe9\x01\xe3g\xdbi\xd7\xb2\xd8\xfd\xdd#\xe4\xdf\xe9B\xef\xa6\xf1\xe3\xf0\'\xeea\xeeC\xf4\x80\xfbE\x02\x1b\x06/\nK\x0f\xda\x11\x02\x11\x1b\r\xa1\x08\x0c\x06H\x04N\x04[\x05\x86\x06\xe2\x05\xc3\x00=\xfa\n\xf4|\xf0\x84\xedH\xea\xac\xe7O\xe7I\xe9-\xec(\xeec\xee\x17\xee\x1b\xed6\xedh\xee\xad\xf2$\xf9Q\xffA\x03[\x06\x14\t\x92\x0b\xf9\x0bM\n\x90\t\x88\n\xd2\x0c[\x0f\x02\x11\xe2\x10.\x0fj\x0bz\x07\x00\x05\xa0\x03\xd9\x02&\x02\xcf\x00\xbb\x00\xe5\x00k\x00\xe2\xfe\x16\xfb2\xf8I\xf7(\xf8T\xfa\x90\xfb\xed\xfb%\xfb"\xf9 \xf7\xbc\xf5\xec\xf4\r\xf5\x80\xf5\xd1\xf6\x9b\xf7m\xf7\x90\xf69\xf5\x0f\xf4\x1f\xf3N\xf3\x82\xf5\xd7\xf7s\xf8\x95\xf8\xa9\xf8\x1c\xfbM\xfd\xcf\xfd\x8d\xfd\xa4\xfc\x02\xff\xcb\x01\x05\x04i\x05\xca\x04\xf7\x05\xad\ni\x13\x12\x1f\xa9%v$\x9d\x1f\x93\x1e\xb9${+\xed,\x1b+O+M-\xcc,E&\x0b\x1d\xb1\x14;\rR\x064\x01D\x00\x0c\x00\x9b\xfb4\xf3\x9b\xeaD\xe5\x82\xe2o\xdf\x00\xdd^\xdc+\xdfU\xe4l\xe9\xd5\xec\xe4\xed\xb7\xed\x0c\xed\xc3\xee{\xf3\x0e\xfb\xbe\x02\x1d\x08\xf6\tk\n\x88\x0b\x11\x0cu\x0b\xd7\x08\xd2\x06\xad\x07#\tI\t)\x07$\x03\x98\xfe\xca\xf8R\xf3;\xefE\xed\x0b\xed:\xec\x00\xebW\xea\xa3\xea}\xeb\xd7\xeaS\xe9\xa3\xe9E\xec\xb7\xf0+\xf5p\xf9\xb3\xfd\xc8\x00\xe6\x01\xe5\x02G\x057\x08\xa8\t\x17\nm\n\xb0\x0c\xf0\x0et\x0f\xa1\x0e\xac\x0b\xbd\tK\x083\x07\xd0\x06\xf4\x05{\x05V\x04~\x02\x1d\x01\xa1\xff\xc5\xfed\xfda\xfb@\xfa*\xfa\x06\xfb\xfe\xfa\x9f\xf9C\xf8\xac\xf6I\xf6\x9b\xf69\xf6K\xf6s\xf5\xeb\xf4\x92\xf5\xd4\xf4:\xf4\xc4\xf39\xf3\xe3\xf4\xeb\xf5/\xf7n\xf8\xf1\xf7\x9b\xf8~\xf9\xd8\xfaB\xfd\xea\xfe>\x01\x11\x03\t\x02\x15\x00r\xff\xed\x02~\t\x95\x10m\x15%\x19\x07\x1ds\x1fg \x11\x1f_\x1e1!\xb8&\x83++,\xd4(&$\xe9\x1f;\x1b\xa5\x14\xd7\r\xc6\x08\xe4\x066\x06\xac\x03\xa7\xff\xfc\xf9i\xf3\xb0\xec$\xe7\xd5\xe4\t\xe55\xe6E\xe7^\xe8M\xea\xe2\xeb_\xec\xdb\xeb\xbf\xeb\xb3\xedJ\xf1\x9d\xf6\xf8\xfc\xa6\x02\xc3\x05Y\x05\xac\x03\xad\x030\x05?\x06\xd2\x05#\x05\xc5\x05(\x07)\x07\xe5\x04\xbd\x00\x82\xfcp\xf9\xa5\xf7\xb6\xf6w\xf5P\xf4\xe1\xf3\xe1\xf3\x8c\xf3G\xf2|\xf0c\xef\xfd\xee\xd2\xefQ\xf2\xca\xf5\xe5\xf9\x17\xfc\xa2\xfc0\xfcG\xfc\xc3\xfd\xcb\xff\xb3\x01\x88\x03\r\x05\xcf\x06\x0b\x08 \x08X\x077\x06\x95\x05\xa9\x05\xc0\x06\xfc\x07\xf5\x08\xb4\x08\x8c\x07;\x06\x95\x05\x10\x05G\x04\x93\x03\x9a\x03x\x04\xff\x04\x82\x04\x84\x02[\x00\xe2\xfd\xac\xfby\xfaa\xf9\xd3\xf8M\xf8p\xf7#\xf7?\xf6\x87\xf4\xc4\xf2x\xf1\xdd\xf1v\xf3\r\xf5a\xf6[\xf7\x19\xf8\x1f\xf9\xd4\xfaf\xfcu\xfdV\xfes\xff\xd9\x01\xa5\x047\x06\xf2\x06\x1e\x06\xb6\x04\xe0\x03\x8f\x04\xf3\x07\xdf\x0b\xdf\rK\r(\x0cn\r\x84\x0f\xbb\x10\x87\x10\xc7\x0f\x00\x110\x13\x1a\x15\x00\x161\x15\xd2\x13\x0f\x12\x0f\x10\xc8\x0e\xa3\rn\x0c\xb7\n\xb8\x08\xf8\x07 \x07\x11\x05*\x01\xe0\xfc\x96\xfa\xbf\xf9>\xf9\t\xf9\xc7\xf8\xa1\xf8r\xf7\xcd\xf54\xf5\xfc\xf4\x81\xf4.\xf3\x80\xf2\x84\xf3I\xf5\x80\xf6\xa8\xf6\x92\xf6w\xf6\x0b\xf6\x92\xf5\r\xf6\x9b\xf7\x1e\xf9\xb0\xf9\xb3\xf9?\xfa\x1e\xfb\x8e\xfb\x7f\xfb\xac\xfb\x1a\xfcx\xfc\x01\xfd\xcb\xfd\x8d\xfe[\xfe[\xfd\xd7\xfc\x01\xfd0\xfd\xe8\xfca\xfc\x9f\xfc\xca\xfcf\xfc\x06\xfcc\xfcp\xfd#\xfe1\xfeC\xfe\x93\xfe\x05\xff?\xff\xd1\xff\xe3\x00\xb4\x01@\x02b\x02\xf4\x02\x12\x04\xc1\x04\xf9\x04:\x05\xb0\x05\x18\x06J\x06J\x06n\x06a\x06\xde\x05(\x05\x98\x04\xf7\x03\xed\x02\x97\x01N\x00!\xff\xd9\xfdu\xfc\x1f\xfb\xf0\xf9\xce\xf8\x92\xf7M\xf6e\xf5\x9e\xf4\xe8\xf3q\xf3R\xf3\xaa\xf3\x17\xf4s\xf4\xfb\xf4\xda\xf5\xe3\xf6\xf3\xf7\xdd\xf8\xcf\xf9\xba\xfa\xa4\xfb\x9b\xfc\xb5\xfd\xc7\xfe\x9b\xff\xd1\xff\x96\xff=\xffO\xff$\x00L\x01\xa0\x02\xe3\x039\x05\xe2\x06\x96\x08[\n~\x0c\x0f\x0f\xc8\x11\x89\x14\x1b\x17\xba\x19!\x1c\x82\x1d#\x1e\xc9\x1eC\x1f\xcc\x1e)\x1d^\x1b\x05\x1au\x18\xe1\x15\xa7\x12\xf8\x0f|\rb\n\xeb\x06(\x045\x02\xcf\xff\xec\xfc:\xfar\xf8\xae\xf6E\xf4<\xf2\xf9\xf0\x1b\xf0\xcc\xeet\xed\x19\xed_\xedk\xed\x19\xed.\xed\x08\xee\xe8\xeea\xef\xea\xef\xd1\xf0\x07\xf2\x17\xf3\xc6\xf3\xa4\xf4\xba\xf5\xdf\xf6\xff\xf7\x05\xf9\xef\xf9z\xfa\xa8\xfa\xab\xfa\xf6\xfa\x98\xfb\x14\xfcz\xfc\x97\xfc\x81\xfc\x8d\xfc\xaf\xfc1\xfd\xfe\xfd\x9e\xfe\x11\xffz\xff0\x00p\x01\x89\x02\r\x03*\x03p\x03\xff\x03\x9c\x04\x18\x05{\x05\xaf\x05\x89\x05$\x05\xdd\x04\xd0\x04\xa0\x04;\x04\xe8\x03\xbd\x03\x91\x03%\x03\x9e\x02d\x02V\x02\xfb\x01m\x01\x10\x01\xd9\x00\x91\x00\xf9\xffm\xff\x19\xff\x95\xfe\xcf\xfd&\xfd\x9e\xfc\xfd\xfb\x1e\xfbL\xfa\xdb\xf9x\xf9\xdd\xf8E\xf8\x16\xf8\x13\xf8\xf8\xf7\xa9\xf7c\xf7\x95\xf7\xee\xf7\x0f\xf8\x1e\xf8D\xf8r\xf8\xac\xf8\x01\xf9R\xf9\x96\xf9\xbd\xf9\xcf\xf9\xa0\xfaL\xfc\x0b\xfe\xe5\xff\xa3\x01\x87\x03#\x06\x15\tm\x0c\x1a\x10d\x13\x1a\x16K\x18l\x1a\xc2\x1c!\x1f\x92 \xc1 . ^\x1f\x84\x1e8\x1d\x1a\x1b\x9a\x18\xdc\x15\xe4\x12\xd9\x0f\xd4\x0c\xf8\t\xc4\x06/\x03\x03\x00\xc5\xfd\x11\xfc\xfc\xf9\xcc\xf7\x08\xf6\xc3\xf4\x8c\xf3\x19\xf2\xfc\xf0H\xf0\xa3\xef"\xefH\xef\xbf\xef\xfb\xef\xd1\xef\x98\xef\x08\xf0\xa6\xf0$\xf1\xa9\xf1\x18\xf2\x89\xf2\x02\xf3\xc5\xf3\xd8\xf4\xc0\xf59\xf6u\xf6\xda\xf6[\xf7\xe2\xf7T\xf8\xac\xf8\x0c\xf9x\xf9\x02\xfa\x9d\xfa\x1c\xfb\x81\xfb\xd5\xfbK\xfc\xf4\xfc\xe8\xfd\xf9\xfe\xe4\xff\x9a\x00:\x01\xfb\x01\xcd\x02\x97\x03I\x04\xc8\x042\x05\x82\x05\xb0\x05\xdc\x05\xd8\x05\xb2\x05p\x05/\x05\x00\x05\xa1\x04\x18\x04\xa2\x03N\x03\r\x03\xc5\x02\x88\x02P\x02\x02\x02\x95\x011\x01\xdb\x00[\x00\xc1\xff\'\xff\xab\xfe \xfek\xfd\xa4\xfc\xfd\xfb^\xfb\xac\xfa\xfc\xf9l\xf9\xf8\xf8\x87\xf8U\xf8D\xf8D\xf8X\xf8^\xf8s\xf8\xba\xf8\xd9\xf8\x03\xf9U\xf9x\xf9\xb5\xf9\x11\xfal\xfa\xe9\xfaU\xfb\x94\xfb\x08\xfc\x95\xfcT\xfdz\xfe\xf5\xff\x05\x02c\x04\xc6\x06`\t\xd7\x0b\x1c\x0eq\x10\xf4\x12\xce\x15\x95\x18\x84\x1a\xdd\x1b\xe6\x1c\xa5\x1d\n\x1e\xde\x1d^\x1dt\x1c\xb6\x1a\x99\x18\x89\x16\xbc\x14\xaa\x12\xdd\x0f\xd2\x0c\xce\t\x1a\x07\x9d\x046\x02?\x00;\xfe2\xfcB\xfa\xa4\xf8u\xf7\x1f\xf6\xcf\xf4z\xf3p\xf2\xa0\xf1\xba\xf0#\xf0\xb4\xefU\xef\x1d\xef\xe4\xee\xe7\xee\xf3\xee\xfb\xee0\xef\x9b\xef\x1b\xf0\xa4\xf0_\xf1S\xf2)\xf3\x02\xf4\xe3\xf4\xbb\xf5q\xf6\xe5\xf6\x96\xf7x\xf84\xf9\xc6\xf9k\xfa5\xfb\xcc\xfb3\xfc\xc2\xfcr\xfd\x0e\xfe\xa8\xfe\x82\xff\xbc\x00\xac\x01>\x02\xc5\x02T\x03\xf5\x03U\x04\xc7\x04d\x05\xbd\x05\xd6\x05\xca\x05\xf5\x05\x10\x06\xd1\x05r\x05D\x050\x05\xd8\x04X\x04\x1a\x04\xf7\x03\xac\x03=\x03\xea\x02\xac\x025\x02\xad\x018\x01\xe9\x00y\x00\xde\xff@\xff\xaa\xfe/\xfe\x93\xfd\xf9\xfcg\xfc\xd9\xfbY\xfb\xea\xfa\x8d\xfa\x1d\xfa\xbf\xf9t\xf99\xf9"\xf9.\xf9R\xf9i\xf9O\xf9=\xf9f\xf9\x91\xf9\x88\xf9\x9c\xf9\xe7\xf96\xfaM\xfaB\xfa\x7f\xfa)\xfb\xcb\xfb\x04\xfc\xa5\xfc\xe1\xfd(\xff\x8e\x00/\x02A\x04\xc2\x06\xf6\x080\x0b\xe5\r\\\x10\\\x12!\x14\x1c\x16\x0c\x18\x99\x19\xa0\x1aS\x1b\xd0\x1b\x95\x1b\xd6\x1a\x13\x1a\x01\x19\xae\x17\xc6\x15\xb8\x13\xe6\x11\xbf\x0f|\r\x05\x0b\xa7\x08w\x06\xfd\x03\x9c\x01j\xffp\xfd\x82\xfbz\xf9\xbd\xf7A\xf6\xc9\xf4W\xf3\xf5\xf1\xde\xf0\n\xf03\xef\x88\xee\x08\xee\x8f\xedC\xed,\xed]\xed\xbb\xed\xd4\xed\xf6\xedb\xee\x0c\xef\xe1\xef\xa6\xf0o\xf1F\xf2#\xf3\r\xf4\x18\xf5&\xf6\t\xf7\xcb\xf7\x89\xf8u\xf9\xa3\xfa\xa2\xfb_\xfc\xfc\xfc\xa5\xfdj\xfe\x1c\xff\xd8\xff\x94\x008\x01\xb4\x01&\x02\xc7\x02h\x03\xd3\x03\x19\x04X\x04\xb3\x04\x00\x05(\x057\x05C\x05M\x05]\x05`\x05R\x058\x05\r\x05\xd6\x04\xd0\x04\xde\x04\xcc\x04\x93\x04I\x04\x1f\x04\xdf\x03z\x03?\x03 \x03\xd2\x02H\x02\xea\x01\xb6\x012\x01X\x00s\xff\xe4\xfe_\xfe\xb0\xfd\xf4\xfca\xfc\xc5\xfb\x0c\xfbz\xfa\x11\xfa\xbd\xf96\xf9\x92\xf81\xf8+\xf8=\xf8G\xf8\\\xf8\x88\xf8\xc7\xf8\x0b\xf9C\xf9\xb4\xf9S\xfa\xdb\xfae\xfb\xf6\xfb\xb8\xfc\x8c\xfdA\xfe\x11\xff\x0e\x00"\x01\x16\x02\xfd\x02\x1a\x04H\x05r\x06\xd8\x07\x80\t\x17\x0bs\x0c\xb3\r\x1d\x0f\x85\x10\xac\x11\xc1\x12\xe1\x13\xbd\x14:\x15\x88\x15\xe1\x15!\x16\xe0\x15#\x15M\x14o\x13Z\x12\xe6\x106\x0f\x8d\r\xd3\x0b\xf6\t\x17\x08?\x06G\x04\x1d\x02\t\x00:\xfe\x9d\xfc\xe1\xfa"\xf9\xac\xf7j\xf6$\xf5\xf5\xf3\xf1\xf2\x1d\xf2L\xf1o\xf0\xe7\xef\x9e\xeff\xef2\xef*\xefw\xef\xca\xef\t\xf0_\xf0\xe4\xf0\x91\xf1"\xf2\xc1\xf2\x8e\xf3^\xf4)\xf5\xe1\xf5\xba\xf6\x9c\xf7R\xf8\xf7\xf8\xa8\xf9p\xfa\'\xfb\xc8\xfbk\xfc\x0f\xfd\xa0\xfd&\xfe\xb5\xfea\xff\xf9\xff\x80\x00\x0e\x01\xc2\x01\x8b\x02.\x03\xb9\x03@\x04\xc0\x041\x05\xa0\x05\'\x06\x96\x06\xca\x06\xe7\x06\x1a\x07Z\x07^\x07%\x07\xe5\x06\xa4\x06T\x06\xf2\x05y\x05\xff\x04h\x04\xcc\x03:\x03\xab\x02\n\x02>\x01]\x00\x96\xff\xf0\xfeU\xfe\xba\xfd\x1d\xfd\x82\xfc\xf7\xfb\x8c\xfb\x1f\xfb\x95\xfa$\xfa\xbb\xf9S\xf9\x10\xf9\r\xf99\xf9?\xf9\x14\xf9\x04\xf9H\xf9\xb8\xf9\r\xfaW\xfa\xc7\xfa[\xfb\xc6\xfb/\xfc\xea\xfc\xd9\xfd\xa8\xfe\x16\xff\xae\xff\xa3\x00\x96\x01g\x02,\x03\xfc\x03\xe0\x04\xa0\x05J\x06,\x07\xf0\x07k\x08\xd9\x08L\t\xc8\tM\n\xb7\n\x15\x0b[\x0b\x88\x0b\xdb\x0b@\x0ch\x0cX\x0cC\x0c=\x0c\x1b\x0c\xc9\x0b\\\x0b\xf2\n\x87\n\xeb\t5\t|\x08\xcd\x07\xf6\x06\xf6\x05\xe8\x04\xe8\x03\xf4\x02\xd8\x01\xaf\x00\x9b\xff\x99\xfe\x9b\xfd\x84\xfc\x85\xfb\x91\xfam\xf9M\xf8U\xf7}\xf6\xac\xf5\xb6\xf4\xf7\xf3o\xf3\xeb\xf2z\xf2#\xf2\xea\xf1\xc7\xf1\xa8\xf1\xb9\xf1\xec\xf18\xf2\xa5\xf21\xf3\xef\xf3\xaa\xf4d\xf5<\xf6K\xf7e\xf8j\xf9r\xfaz\xfb~\xfcn\xfdS\xfeL\xff=\x00\x16\x01\xe2\x01\xb7\x02\x87\x031\x04\xb2\x043\x05\xb1\x05\x19\x06h\x06\x9a\x06\xbc\x06\xde\x06\xe0\x06\xce\x06\xbd\x06\xb7\x06\x9d\x06T\x06\x0e\x06\xc8\x05m\x05\xf2\x04x\x04\x00\x04u\x03\xd9\x02P\x02\xe4\x01b\x01\xb1\x00\xef\xffE\xff\xaf\xfe\x15\xfeO\xfd\x90\xfc\xee\xfbg\xfb\xe2\xfa\x7f\xfaM\xfa(\xfa\xd4\xf9x\xf9l\xf9\x91\xf9\xaa\xf9\xbb\xf9\r\xfa{\xfa\xc3\xfa\xf0\xfaa\xfb\x15\xfc\x9b\xfc\x11\xfd\x98\xfd3\xfe\xc8\xfec\xff\x19\x00\xce\x00^\x01\xca\x01R\x02\x0b\x03\x9a\x03\xef\x03\\\x04\xfc\x04|\x05\xcd\x05\x13\x06t\x06\xc1\x06\xdf\x06\t\x07R\x07\xa8\x07\xec\x07\x14\x08;\x08W\x08:\x08\xfa\x07\xc4\x07\x9e\x07\x85\x07g\x07\x18\x07\xc7\x06\x87\x06\x1f\x06\xad\x05A\x05\xf1\x04\x90\x04\x13\x04\x8b\x03\xf9\x02y\x02\xfa\x01\x8b\x01A\x01\xf9\x00\x93\x00\x1c\x00\x95\xff\x0c\xff\x81\xfe\xf4\xfdq\xfd\x01\xfd\x9a\xfc\x10\xfc{\xfb\xe2\xfaJ\xfa\xc1\xf9^\xf90\xf9\x14\xf9\xf5\xf8\xbd\xf8\x7f\xf8]\xf8C\xf8Y\xf8\x8c\xf8\xc8\xf8\x08\xf9K\xf9\xb8\xf9/\xfa\xa7\xfa\x1d\xfb\xa1\xfb>\xfc\xdf\xfcs\xfd\n\xfe\xa4\xfe\'\xff\xaf\xff:\x00\xc0\x00P\x01\xc9\x014\x02\x98\x02\xf6\x02b\x03\xb1\x03\xe6\x03\xf6\x03\xe4\x03\xe6\x03\xf1\x03\xf7\x03\xfa\x03\xe4\x03\xb7\x03U\x03\xea\x02\x89\x02\x1c\x02\xc1\x01`\x01\xf4\x00\x8e\x00\x1a\x00\xa6\xff>\xff\xc9\xfed\xfe\xfb\xfd\xa1\xfd/\xfd\xb7\xfci\xfc\x1b\xfc\xdc\xfb\xbb\xfbd\xfb-\xfb\x00\xfb\xc3\xfa\xcd\xfa\xa4\xfa\x98\xfa\xbc\xfa\xd9\xfa\xfe\xfaD\xfb\x8e\xfb\xdf\xfbN\xfc\xa1\xfc \xfd\x94\xfd3\xfe\xaa\xfe"\xff\xb5\xffn\x00\x13\x01\xdd\x01U\x02\xf8\x02\x95\x03\xec\x03`\x04\x87\x04\xd3\x04\xed\x04\xef\x04\xfd\x04"\x05$\x05~\x05\x7f\x05\xca\x05\xb4\x05`\x05q\x05\x11\x053\x05\x86\x04\xed\x04|\x04\xe1\x03u\x04\xb3\x03o\x030\x03\x19\x03\xb7\x02Y\x02c\x02\x8f\x02\x9d\x02\xf4\x01\xe9\x01\xd1\x01\'\x01\xcd\x00\r\x00\xe7\xffj\xff\xf3\xfe\xc5\xfe"\xfey\xfbH\xfa\x8d\x01\x9c\x0fV\x15\x8a\x03\x89\xee\x1c\xe8\xd7\xee\\\xf7\x91\x03|\x05\x86\xfd\n\xf3\xe9\xe7\xc7\xeee\xf3=\xf8\xde\xf8\x1e\xf6\x00\xf6\x80\xf6\xe9\xf8\xbd\xfe\n\x04\x1f\x02W\xfb\x17\xf8B\xfcI\x028\r+\r\xcf\x04u\xff\x7f\x00\x93\x06\xa5\x0c\xaf\n\xa1\x02\xbb\x005\x05\n\x0b5\x0b\xe4\x06"\x02\x90\xff\xbf\x04\xb3\x06\x0c\xfff\x04\xde\x02\xa8\xff\x01\x01 \x03\x17\x07\xd3\xff\xed\xfd\xc0\xfe\x97\xfc\xd3\xfe\xbf\x03a\x04\xed\x01\x92\xfc\x02\xfbt\xfe\xfd\x00\xd3\xfe\x80\xfdO\xffj\xfeE\xfc^\xfe\xab\xfcH\xfe\xf4\xfd\x13\xf8\x97\xf7\xa8\xf9\xea\x034\xfcN\xfa\x88\xfbQ\xf6c\xf8S\xfc+\x02\x1d\xf9\xac\xfc3\xfeZ\xfau\xfb\xe8\xfeP\x02\xe3\x00\xd3\xff\xa1\xfc(\xfdf\xfd\xe7\x01t\x07\xe8\x04?\x02\xda\x01\xc2\x00\x8e\x01\xac\x05-\x05\x00\x03\x1c\x04\xae\x07\xb2\x04 \x03\x87\x04f\x04\xce\x06\xa1\x06F\x03\xc3\x01\xb5\x02\x93\x03\xba\x05\xbc\x04\xeb\x04\xb9\x01\xd1\xfe`\x00\xa4\x04\x00\x03\xc0\xffx\xff\x9a\x01\x00\x00w\x00\xa4\x02u\x00\xcd\xfd"\xfd\xda\xfdZ\xfeA\x02\\\x001\xfes\xfb\xfa\xfa\xe1\xff0\x01\x9b\xfd\xdb\xfc_\xfdB\xfc<\xfc\x9e\xfd\xad\xfeG\xfd\xf5\xfb\xb4\xfa\xcb\xfb\x0e\xfdd\xfc\x0c\xfd\xdd\xfd\x8f\xfco\xfb-\xfb\xae\xfc\xc3\xfe\xe5\xfeg\xff\xdc\xfe\x0e\xfe\x80\xfc\x11\x00\xf1\x03\xce\x01\x0f\x03=\x02\xc1\xfe\xbc\xff\xb6\x04\x00\x05\xbd\x025\x03a\x01\xfe\x00\xd1\x05+\x05\xca\x01s\x01\x06\x05\xb6\x03$\x02\xad\x019\x02\x9e\x02\xf0\x01\x84\x01C\xff\xab\xfe\xa2\xfe,\x02z\xff\x0c\xfd\x99\xfb\x85\xfc\xc5\xfe\xb4\xfd\x81\xfc\xd9\xfcn\xfe_\xfd\x88\xfb\x8f\xfc2\xfe\xfc\xfd\xd2\xfb\xbd\xfa\x9c\xfe\xc0\xfb\xa0\xfcC\xfe\xa0\xfc\xfc\xfb?\xfc\xc8\xfe?\xfdw\xfc|\xfd\x8d\xfd!\xff\xe9\xff8\x00V\xfe\xce\xfdO\xff\x0e\xff\x15\x00\xb3\xff\'\x02\xd6\x01\xc4\x00\xc5\x01_\x03F\x01\x94\x00h\x02\xbc\x02\xe1\x04W\x04\x03\x04\x87\x02R\x04v\x04\x0e\x04U\x07n\x05q\x02\x18\x03\xe4\x03\x14\x03\xeb\x03\x8b\x06^\x04\xa4\x02\t\x02F\x02\x92\x00|\x02\'\x03\xc7\xff\x16\x017\x01m\x01\xeb\xff\xc2\xffr\xfd\xf6\xfd\x0c\x00h\xfe$\xff\x92\xffw\xfc\xe0\xfa\x16\xfe+\xfc2\xfbC\xfd\x7f\xfbw\xfb\xa0\xfc?\xfc\xd0\xfb/\xfbY\xfb\xdc\xf9\x05\xfc>\xfdk\xfd\\\xfd\x8a\xfc\x18\xfe\xa8\xfd"\xfe\x06\xff\x19\x00!\xfd\xfa\xfe\xce\x00\xae\x00\x82\x01S\xff\xfa\xff,\x01;\x02X\x03\xf2\x03\xf0\x01\xc2\x01\xf9\x01E\x02\xdf\x03e\x05\xb2\x04\x81\x02\xc6\x03\xdf\x02\xab\x03^\x03\xc2\x03\x19\x02\xa6\x02"\x04\x1c\x02\x84\x02\x95\x01n\x02\x96\x00X\x00:\x00\xf3\x00\xb7\xff\xbe\xfe\xac\xff\xa1\xfe\x90\xfe3\xfe:\xff|\xfe\xe3\xfe4\xfe\x15\xfe\xa5\xfe%\xfe\x82\xfc\x07\xfd\x00\xfe\xbb\xfe\xbf\xfd0\xfc\x1d\xfe\xf9\xfd\xb4\xfd\x8c\xfc\x91\xfd\xe6\xfb\x02\xfc\x86\xfd\n\xfd\xea\xfeb\xfd\xa8\xfd\xd0\xfc\x8f\xfe\x04\x00-\x00r\xfdG\xfd\x15\x01v\xfe{\x00\xff\x00s\x01\xb2\x00W\x01\x7f\x025\x02\x85\x019\x00b\x04\xd1\x03T\x03\xe7\x02\x9c\x02 \x04\xc4\x03\r\x04\xcf\x04j\x04$\x02e\x02\xed\x04\xa9\x03Y\x03\xfc\x04\xe7\x03{\x02\xeb\x01\x9d\x02\xc9\x00c\x02R\x02\x9e\x02\xbd\xffr\x01\xc4\x01\x9f\x00\xda\xfe\xeb\xfcu\xff9\xfc\xca\xfe5\xfeG\xff\xdf\xfd\xe8\xfb\xb0\xfb}\xfbI\xfb\x8b\xfd\xd5\xfa\xad\xfa\x11\xfe\x94\xfb\x0c\xfd\x99\xfdG\xfd\x98\xfa\xec\xfa\x00\xfd\x86\xfe\xc3\xfb\x1e\xff0\xfeX\xfeZ\xfe\x95\xfd\xf4\xfe\x9f\xfe{\x01\xfc\xfe\x0b\x01-\x00\xe3\x00\xf5\x00e\x01b\x02"\x02\x9a\x01\xd7\x01\x82\x01I\x04l\x04.\x01\x91\x03e\x03\xb0\x02\xc9\x03\x9a\x03I\x02\xda\x02;\x01\xc6\x03\xb7\x02\x08\x01\xf1\x02\x8f\x01\xfb\xff\xc4\x00\xe2\x01\xb6\xff"\x00(\x00\x01\x01d\x00c\x00\xba\xffW\xfe\xcc\xfe\xb3\xfe\xaf\x00O\x00\xa8\xfe]\xfe\xb4\xff\xed\xfe$\xfe[\xfe\x93\xfe\xe5\xfe\xdf\xfeF\xff\xe0\xfd\x96\xfdN\xfe\xf3\xfdQ\xfc\xd0\xfdN\xfeQ\xfe\xd8\xfe\xbf\xfb\x17\xfb\x03\xfd\xef\xfc\x81\xfc\xae\xfdM\xfch\xfdd\xfee\xfd\x9f\xfb<\xfd^\xfe\xf9\xfe\xd3\xff%\xfe[\xff\xce\xfe\xb2\x00J\x01\xa6\x00s\x00\xc4\x01\xae\x01\xad\x00H\x03\x8a\x03\x9c\x02w\x02\xbf\x02\x81\x02\xfd\x05:\x033\x01\xcf\x03\x8e\x03\xee\x03\xba\x02\x1e\x03F\x02<\x02p\x03\xc4\x02m\x01\x15\x01\xb7\x01<\x01~\x00_\x00}\x00(\xff\xc2\x00d\xff\xeb\xfd4\x00\\\xfe:\xfd,\xfe\x0e\xfe\x0b\xfe\xc9\xfdX\xfe\xf5\xfc%\xfd\xa9\xfd\xb5\xfc\x1b\xfe=\xfe*\xfd\x85\xfdG\xfet\xfd\x8e\xfe4\xfe\x0b\xfe;\xfd\xb2\xff\xfb\xff[\xff\xe6\x00\xd1\xfe\x05\xff\xa7\xff\xd9\x00\'\x01\xe4\x00P\x00\xf1\x01p\x01t\x01\xbc\x01\x10\x01\xd9\x011\x01;\x02\x9c\x02\x1e\x022\x02s\x01\x86\x01\xab\x013\x01N\x02\xf6\x01\xc1\x00\x94\x01\xe2\x01\x84\x01\xf9\x01#\x01\xf1\x00H\x01\x06\x01\x86\x00\xac\x01F\x02\x01\x01I\x00\x18\x01\xb0\xff5\x00\xd1\x01\xe8\xffH\x01F\xff\xb5\xff\xd7\xffq\xff\xf2\xfe0\xffR\xffD\xfe\x08\xffS\xfd\x0e\xfe\xdd\xfd\xa0\xfdY\xfeD\xfd\xdf\xfc\xae\xfd\xa2\xfd\xaf\xfd>\xfd\xea\xfc\x95\xfc\x95\xfd\xc6\xfd\x8b\xfe\x92\xfe\xc2\xfdb\xfeD\xfd\xb5\xfe\xe0\xfd\x1b\x00\xe6\xfe\x89\xff\xa3\xff~\xff\xf9\x01\x16\x00\'\x01\x0e\x01\x84\x01$\x00J\x02\xed\x01>\x03\xcf\x03\xc8\x02\xfa\x020\x028\x03\xfb\x02\xb2\x02\xc4\x02P\x04j\x03\xa3\x03w\x02\xb6\x03\x85\x01N\x02\xa2\x02\x0c\x03_\x017\x00\xf3\x00t\x00L\x02\xaa\xff\xce\x00\x94\xfe\xc0\xfe]\xfe&\xff\xd8\xfe\x11\xfe\xb2\xfd\'\xfd\xea\xfdr\xfc\xea\xfc\xe3\xfc\x95\xfc\xc8\xfd\x14\xfcT\xfc\x87\xfc\x92\xfc\x9d\xfc\xc7\xfdS\xfe\xdd\xfc<\xffR\xfd\r\xfeJ\xfe2\xffo\xfeG\xff\xc0\xffN\xff\xf2\x007\xff~\x01\x03\x00F\x00\xa5\x00\x7f\x01\x85\x01\xfa\x00\x82\x02\xe2\x00A\x02~\x02\xa2\x01\x8b\x00$\x01J\x02\xe0\x017\x03;\x01\x8a\x00\xc0\x00\x11\x01V\x01c\x02\xff\x01\x05\x00\xd1\x00\xf8\x00Z\x00\xa1\x01_\x01\x06\x00\xb8\x00\x16\x00c\x00\x81\x00{\x00\xbe\xffP\x01V\xff\x95\xff\xf7\xfe\x97\x00\xd8\xfe\xc2\xfe\\\x01<\xfe\xa3\xfeS\xfd\'\xff\xdb\xfe\x0f\xffO\xfdv\xfd\xc8\xfd\xb7\xfc\xd0\xfd\xad\xfd\x84\xfd\xef\xfct\xfd\xe8\xfcX\xfe\x05\xfd\x03\xfd\xbd\xfe+\xfd\xf3\xfe\xd7\xfd\x9f\xffc\xfe\x0c\xff\x15\x011\xfe\xbd\x00\x95\xff~\x01^\x00F\x01\xc2\x00\xf4\x01l\x02[\x00\t\x03\xb2\x01\x06\x03\t\x02\xb8\x02\xdc\x01{\x02\x95\x02|\x02Y\x02<\x02i\x02\t\x02B\x02.\x01e\x02v\x01B\x01\x9a\x01=\x00\xbd\x01\x9d\x00\xcc\x00\x0f\x00\xcd\xfev\x01\xf8\xfe\x8b\xff\xa9\xff"\xff\xb5\xfe\xad\xff\x1d\xfe\xf2\xfdT\xff\n\xfe8\xff>\xfe\xed\xfd\x8f\xfd\x12\xfe[\xfe\xd1\xff\xd7\xfe\xd1\xff\xfc\xfd\x00\xfe2\xfen\xfe\xa5\xffR\xffc\x00Q\xfe\xf5\x00\xad\xfd\x95\x00\x9e\x00\xfa\xff\x17\x00\xaf\xfe(\x01\xf8\x00\x87\x02\x1d\x00w\x02\xcb\x00\x1d\x02\xe6\x02I\x01\x05\x03\x99\x01\xb3\x02\xcf\x01\xe3\x02"\x02?\x02\xe6\x01=\x01\xfb\x01\x7f\x00O\x01u\x00:\x00n\x00\x08\x01T\xff\x98\xff\x16\xff\xbc\xffm\xff\x89\xffV\xff\xd9\xfd\xbb\xff9\xff0\xfe\xc2\xffO\xfe\xa9\xfe\xc5\x00_\xfd\xa7\xff\x02\xfe\xdb\xff<\xfe\x93\xff\x1c\xff\x1b\xff\xdd\xfer\xfe\x8d\xff/\xfe\x1e\x00U\xfd[\xfff\xfd\xa1\xff\x15\xff\x8c\xfe\xb0\xfe\x13\xfew\xfe\x87\xfeW\xff\x07\xff\x15\x00\xc3\xfe\xa5\xff\xfd\xfe\x1f\x00\xae\xff\xa3\xff\xc2\x00Q\x00g\xff\xed\x00\xcf\xff\xac\x01b\x01\xa1\xff<\x01\xd2\x00\xd2\x01S\x00\xee\x01\x91\x00\x0b\x02\xa8\x02\x96\x01\xcf\x01\x88\xff\xe1\x00y\x01G\x01g\x03\xa5\x01c\x00\x19\x01\xeb\x01\xc6\x00E\x02\xcc\x00\x08\x01a\x01\x89\x00\x1c\x01\xad\xffd\x02\x8b\xff\x81\xff\xc3\xff\x86\xfe\xd9\xffy\xff!\xff\x13\xff\x12\xfer\xfe\x19\xff@\xfe\xa6\xfe&\xfd\xe0\xfd9\xffp\xfe=\xfe\xce\xfd\xde\xff\x86\xfe;\xfe\x85\xfft\xfd?\xffq\xff\xa6\x00\x18\xff\xce\xff\xa6\xff~\xfe9\x01u\x00\x83\xff\xbc\x00\xce\x00\xda\xff\x01\x00\x9e\x00\xa1\x00I\x00\x96\x015\x00\x96\x018\xff\x15\x02\x03\x00\xc8\x00\xcf\x01E\xff\xfe\x01\x95\x00\xb7\x01\xfe\xffO\x01\'\x01\xe4\x00\xec\xfe\x15\x01\xed\xff\x9c\xff\xc8\x00\xc8\xff*\x01\xde\xfe*\x00\xc6\xfe\xd0\xffl\xffP\xfe/\x01\xbb\xfe\xaf\xff\xa3\xfe\xf6\xfe2\xff?\xff_\x00\xc4\xfd\xd2\xff\xeb\xfe\xea\xff\x7f\xfe\'\xff\n\xff\xb0\xfe\xc0\xffm\xfe\xf7\xffr\xff\x89\xff\xd9\xfe\xdc\xfe\xab\xfe\x1c\xff\xe8\xff\xf7\xfe7\xfe\xdc\xff8\xff;\x00\xbe\xff\x94\xfe\x94\xff\xc5\xff{\xff\xc6\xff*\x00\xc4\xff\xb8\xff\x8c\x01\xea\xff#\xff\xe6\x00K\x01\xe3\x00Z\x00\xd0\xff\xf8\x01,\x00\xb9\x011\x02b\x00q\x02\x1b\xff\x08\x03$\x00\xb4\x01&\x01\x8c\x00\xee\x01a\x00\x9c\x03_\x01\xf5\xff]\xff\xdc\xff\xc0\xffN\x01\xd5\x00\x93\x00\x10\x00t\xff\x95\xff\xae\x00S\xff\x1c\xff\xa6\xfe\xa9\xfeR\x00g\x00u\xff\x08\xffQ\x00\xaa\xfeD\xff`\xfea\x01\x88\xff\xd6\xffU\xff\xf2\xfek\x00/\xff\xcd\xff\xc4\xffq\x00\x9b\xff\x93\x00\xac\xff\x00\xffR\xff`\x00\xf4\x00N\x00\xef\xff\xbd\x00\r\x00w\xff\x91\x00\xc1\xff\xe5\xff#\x01\x15\x00\xc6\x00\x08\x00\t\x00\xd0\x00F\xffd\x00\x83\xff\xde\x01\x8f\xff\xd4\x00\xa2\x00\x7f\x00\xea\x00\x08\x01\xe2\x01\xe0\xff\xc8\x01\xce\xfe\xa7\x01\xdc\x00/\x01\x1c\x01-\x00Z\x01\xa2\xff4\x00\xcb\xff\xb0\xff\xfe\xfe\xa2\x01F\xff\x81\xff\x9a\xff\xf2\xfe~\xff5\xff\x1b\x00\xf5\xfe\x13\xff\x96\xfe\x14\xfe\xa0\xff\xe2\xffx\xfe\x17\xfe\x94\xfd\xf6\xff\xf6\xff\xba\x00:\xff6\xff\xaa\xff.\xfey\x00\xe8\xfd\x04\xff9\x00\x86\xff.\x02\xba\xfe6\x00(\xff`\x00t\xfe\xe7\xff2\x01*\xff\xe3\x01\x0e\x00\x19\x01\xc9\xfe\x9d\x00\xeb\xfe\x00\xff\xce\x00W\x01\x9d\x01\xdc\xfea\x00\xdd\xff\xd3\xfe\xa6\xff\xe1\x00\xe7\x00\xbe\xff|\x01\x11\x00\\\x00@\x01\xc1\xfe\x94\xff\xb6\xff\x07\x01\x81\xff+\x01\x05\x00\xa1\x00v\xff\x90\xff&\xff\xf7\xff}\x01i\xfe\xe9\xffn\x00`\x00\xde\xfe\xdb\x00Y\xfeh\x00\xac\xff\xd0\xff\xda\xffR\xfe]\xff\xb0\x00\xc9\x00\x90\xff.\xff\xcf\xfe\xbf\x00\xd3\xff\xcc\xff>\xfe3\x00\xde\xffJ\x00\x8e\x00\x8f\x006\xff\xd2\xff\x98\xfe\xea\xfe\x95\x00i\xff7\x01\xc9\xfe:\x02\xbc\xff\x19\xff\xba\xfe$\xff\xb3\x00\n\x00\xd8\x01S\x00K\x00\xbe\x00\x12\xff\xa8\x00\xc8\xff\x91\xfe\xd1\x01L\xff\x9d\x00\xc2\x00\xd2\x01\xb1\x00\n\x00\xf7\xff\xdc\x00\x10\x01\xe6\xfd\xd3\x00W\x00\xa7\x00{\x02\x08\x01}\xfe|\xff\xef\xffI\xff\xb1\xff_\x00\xab\xfe\xdd\xff\x8c\x00\xde\xfe\x8f\xffh\xffI\xff|\xff\xea\xfe\\\x00C\xffG\x00~\x00\xb0\xff\x04\xffJ\xffR\x00\xee\xff\xca\xff\xc0\xfd\xe9\x00\xe3\xffN\xff\xb6\xfew\xff\xf7\xffd\x01\xab\x00t\x00<\x00\xbb\xfe\xaa\xff\x7f\xfeK\xff\x14\x01L\x01m\x01\x10\x01a\xff~\xff.\xffT\x00\x96\x00\xcf\x00G\x00m\x00\x88\xff2\x00R\x00e\x00\x83\x01\xe7\xff\xef\x00}\x00"\x02\xce\xff]\xfe\x10\x01\r\x00\x8a\xff\x0e\x00w\x00\x8b\x00\x07\x02\xd3\x00\xd0\xfe\x8b\xfe\x8a\xff\x80\x00q\x00\xf9\x00\xc9\xff\xcb\xfe\xcf\xfe.\x00\xbe\x01T\xff\xbb\xfc\x80\x00\xca\x00\xa5\xff\'\x00\xc5\xff\x8a\xfd_\xfc\xd7\xff"\x022\x02Q\x00\x95\xff\xf7\xff\x8b\x00\x80\xfe\xbd\xfe\xaa\xff\xbb\xff\xd9\xfe\xbb\xff\xf1\x01<\xff\x03\x01%\x01\xd3\xff\xe5\xff\x85\xff\x88\x01`\xff\x15\xff\x06\x00\x13\x00<\xffV\x01\xe0\x02\xa4\x01\xca\xff\xd4\xff\x03\x000\xfe%\xff\x12\xff\xea\x01\xca\xff\x94\x00\xb2\x00\xd6\xff\xab\x01\x0e\xff\x16\x00x\xff\xa2\x00X\x029\x00\x81\x00*\x01W\x00\x81\xff\xcc\x008\xfe{\xfd\xae\xff\xcd\x02<\x04n\x00\x12\xff*\xfe\xff\xfd\xe5\xffz\x02\x9a\xff\xa4\xfdW\xff\x9c\x01\xc6\x01J\x00\x0b\x00J\xff2\xfd\x00\xfe\x18\x00`\xffk\xff\xaf\xff\x8a\x00\xf5\xff#\x01\xfe\xff\xac\xfe\x99\xff\xe3\xfe\r\x00H\xffn\x01\xeb\x01G\x00\xe2\xff\xc9\xfe\xf2\xfe\x00\xff\x91\x00\xb5\x01\xda\x01\x9f\x01\xd5\x01\xb3\xffN\xfe\xec\xfe\xc1\xffA\x01i\x00\xf7\x00\x00\x01h\x00\x88\x00\xd4\xfei\xfe\x8b\xfe\x86\xff\xf6\x00\xc4\x00\xa9\xff\xf4\x00\xd0\x00a\xfe\xcb\xfe\x1d\x00v\xfe\xe5\xfd\x06\x00C\x00D\x01\xfc\xff\x8c\xfe\x14\xfe\xe3\xfd\\\xfeX\xfdB\xfe(\xff\xb3\xfeh\xff\x97\xff\x83\xfe?\xfd\xed\xfc\x8d\xffO\xfe\x06\xfe\x9c\xfd\x82\xfd\xfd\xff\xec\xffB\xff\xb3\xfd\xb3\xfdy\xfem\xfe\xc1\xff\xee\xfe8\xff\x14\xff\x80\xff1\xff7\xfe\xe5\xfd$\xfc\x1b\xff\xa5\x004\xff\x08\xfe\xce\xfd\x81\xfec\xfe\xce\xfeC\xfe\xc2\xffo\xff\xe7\xff\xb5\x00H\x00G\x00\xb5\xff\xc0\xfe\xe6\xfe\x89\xff7\x00\xb0\xff\xb0\xff\xc5\xff\x83\xfe\x9a\xfeQ\xff\xbb\xfeX\xff\x1a\x00\xf7\x00o\x02\x1c\x04s\x05\x86\x07\xbd\t\xac\x0bH\x0e\xa9\x10\xef\x12U\x14O\x15\xd5\x15\xaf\x15\x1f\x15\x0b\x14h\x12\x14\x10\x10\r8\nG\x07z\x03\xe9\xff\x01\xfdt\xfa\xa5\xf7\x9b\xf5\xdb\xf3\xf8\xf1B\xf1\xc6\xf0$\xf0\x05\xf0,\xf0g\xf0\xd0\xf0\xbf\xf1\\\xf2\x91\xf2\xa7\xf3\t\xf5\x01\xf6\xe2\xf7\x1e\xfa\x92\xfb%\xfd!\xff\xec\x003\x02_\x03\xbe\x03\xc9\x031\x04\x13\x04\xaa\x03\x1c\x03\x06\x02\xd3\x00\xa7\xff\xc0\xfe\xeb\xfd\xa0\xfc\xfe\xfb\xa3\xfb]\xfb7\xfb{\xfb&\xfc9\xfc}\xfcD\xfd\r\xfe\x17\xff\xc6\xff+\x00O\x01t\x02&\x037\x03`\x03\xb9\x034\x04\x01\x04\xb3\x03\x92\x03T\x03\xb7\x02\xc7\x01k\x00\xf0\xfff\xff\xff\xfe\x15\xff\x91\xfe.\xfe6\xfd\xdc\xfb4\xfb\x0c\xfd\xd4\xfeI\xff\x9c\xfd\xcc\xfc\xc8\xfd%\xff\x81\xff8\xfeb\xfd\x9c\xfe\x87\xfff\xfe\x03\xfeO\xfe4\xfe\xb9\xfc\xf8\xfb\xcc\xfbv\xfbT\xfb\xd4\xf9\xe8\xf8Z\xf8\xdc\xf8@\xf8\xe2\xf7\x95\xf7\xe9\xf7\xf1\xf7#\xf8\xd4\xf8\x0f\xf9\x10\xfa\xde\xfa\n\xfcf\xfd$\xff\x16\x01L\x04\xb9\x07\r\x0bR\x0f\xa3\x13\xa1\x18@\x1e"#$\'0*\xb7,I.\x9f.\x82-\x0f+d\'\xb1!R\x1b\xb1\x14\xae\r\xb6\x06^\xff\x05\xf8\xf9\xf1\x02\xed\xdd\xe8r\xe5\x99\xe2\x9a\xe0\xb8\xdf\x9c\xdf\xd8\xdf\x96\xe0\xff\xe1t\xe3\xc3\xe4\x87\xe6\xfb\xe8\x05\xec\xe1\xeej\xf1_\xf47\xf8h\xfcg\x00:\x04\xc2\x07\x00\x0b\xcb\r\xc4\x0f\xdf\x10"\x11{\x10\xd5\x0e;\x0c\xe9\x08|\x05\xa4\x01`\xfd\xf9\xf8\x18\xf5\xef\xf1S\xefW\xed\xec\xebw\xeb\xc9\xeb\xb5\xec\xe3\xedy\xef\xd1\xf17\xf4\'\xf6/\xf8\x9e\xfa6\xfd\t\x00N\x02\x11\x04|\x06)\tD\x0b\xa4\r\xa3\x0f \x11c\x12Y\x13\xd3\x13\xac\x13\x1b\x13\xbe\x11\x91\x0f\x14\r\x8e\n\xcb\x07\xc1\x04\xfd\x01F\xff\x83\xfc\xac\xfa=\xf9\xb8\xf7\xc8\xf6_\xf6!\xf6\xd1\xf5\xde\xf5\xf1\xf5\xe0\xf5)\xf6Z\xf6I\xf64\xf6V\xf6M\xf6\x11\xf6&\xf6Z\xf6\x8f\xf6\xb4\xf6\x89\xf6\xa6\xf6\xe4\xf6N\xf7\xef\xf74\xf8\xa5\xf84\xf9\xe4\xf9]\xfa\xe9\xfa\xf4\xfb\x97\xfc\xc3\xfc\t\xfd^\xfd\xd7\xfd\xfb\xfd"\xfe\x04\xfe\xab\xfd\xfa\xfd\x0f\xfe\x9a\xfe$\x002\x02\xd6\x04{\x085\r\x83\x12\x7f\x18u\x1f2&\x19,\xad1V6)::\x11\xc6\x14[\x17\xbc\x18\xf2\x18\n\x18\xbf\x15\x00\x12)\r\xc7\x07\xd9\x01Q\xfbW\xf4\xcc\xedJ\xe8\xc9\xe3M\xe0\xde\xdd\xbb\xdc\x11\xdd\xdc\xde\xe1\xe1\xca\xe5k\xea\x93\xef\n\xf5X\xfa\x97\xff\xb4\x04p\t\x8a\r\r\x11\xe1\x13]\x16\xa2\x18b\x1a\\\x1b\xd7\x1b\x16\x1c\xd3\x1b\xfa\x1a\xb7\x19\xe0\x17n\x15Z\x12\x93\x0ea\n!\x06\xb1\x01V\xfd#\xf9"\xf5\xc3\xf1E\xef\xa6\xed\xfe\xec\xdc\xecR\xedm\xee\x16\xf0\x1c\xf2V\xf4\xb5\xf6\xcf\xf8|\xfa\xe3\xfb\xf5\xfc\xc5\xfdl\xfe\x86\xfe\'\xfe\xa8\xfd\x07\xfdi\xfc\xb0\xfb\xc7\xfa\xe8\xf9\xf3\xf8\xf6\xf7-\xf7\x8b\xf6\xe2\xf5N\xf5\xcc\xf4E\xf4M\xf4\xaa\xf4E\xf5\xf6\xf5\xe0\xf6\xec\xf7\x0e\xf9\x88\xfa\x05\xfc`\xfd\xd6\xfe\xd3\xff\xbb\x00\xa3\x01\x8a\x02 \x04_\x05Z\x067\x08~\x0b\xf3\x10i\x17\x15\x1d\x87"a(\x02/\xe45\xea:\x96=\xaa>N>\xe9; 7O0\x12(\xa4\x1e\xb0\x13\xbf\x07\x96\xfc\xe4\xf2E\xea\xe9\xe1 \xda\xac\xd4\x15\xd2[\xd1m\xd1\x15\xd2\xfe\xd3\x03\xd7c\xda\xef\xdd\xa4\xe1\x97\xe5^\xe9u\xec`\xef\x12\xf3\xc4\xf7\xb7\xfc\xd3\x00>\x04X\x08C\r\x01\x12\x84\x15\xae\x17\xbe\x18\xb2\x18\xff\x16\x9b\x13\xd8\x0e/\ts\x02\xb2\xfaV\xf2\xb1\xeam\xe4\x1b\xdf\xa3\xdaI\xd7\xd4\xd5\xa9\xd6\x8a\xd9\xb5\xdd\xc1\xe2\xab\xe8J\xef`\xf6V\xfd\xe1\x03\xfd\tH\x0f~\x13\xaa\x16\xf8\x18\xfd\x1aX\x1c\xd8\x1cy\x1c\xad\x1b\xd4\x1a\xf2\x19\x8f\x18\xb8\x16\x89\x14\xf7\x11\x08\x0f\xb5\x0bT\x08\xc0\x04\xcd\x00\xad\xfc\xb8\xf84\xf5l\xf2y\xf0,\xef\x9e\xee\xcd\xee\xde\xef\xbe\xf1.\xf4\xc5\xf6\x93\xf9Z\xfc\xf0\xfe(\x01\xcb\x02\xce\x038\x04\xf5\x03\x12\x03\xb2\x01\x12\x00;\xfe\x10\xfc\xc0\xf9\xb5\xf7\x02\xf6q\xf4\xe6\xf2\x94\xf1\xb9\xf01\xf0\xea\xef\xee\xef3\xf0\xd2\xf0\x98\xf1U\xf2n\xf3 \xf59\xf7\'\xf9\xdf\xfa\xea\xfcZ\xff\xa0\x01\xf2\x02\xb6\x03\\\x04\xb3\x04a\x04\xf4\x02Q\x01V\x00\x1a\xff\xcd\xfd\x1a\xfd\xee\xfe\xa4\x03 \t\xbf\x0eq\x15\xf6\x1e\xb7*!5\xd4<\xa0B\x0fH\x11L\xf7K\xc7G\x00A\x868\xc2-* \xad\x11t\x04Q\xf8C\xec\xa4\xe0p\xd7\r\xd2y\xcf\x0b\xceB\xcd\x12\xce\xd2\xd0\x8c\xd4\x03\xd8\t\xdbo\xde\x16\xe27\xe5\xc0\xe7\xab\xea\xf0\xee$\xf42\xf9\x12\xfe\xfa\x036\x0b\x94\x12\xb5\x18?\x1d\x9a \xb9"\xca"K %\x1b3\x14\xc0\x0b:\x02%\xf8D\xeeR\xe5\x83\xdd\x1b\xd7\xbc\xd2\xc5\xd0Y\xd1\xf7\xd3\xfd\xd7!\xddL\xe3p\xea\n\xf2\x85\xf9K\x00\xeb\x05\xb8\n\x0b\x0f\x1d\x13\xae\x16\\\x19\x02\x1b\x1d\x1c\xe6\x1c\xcc\x1dn\x1e{\x1e\x81\x1d\\\x1bQ\x18\xcc\x14\x0e\x11\xa2\x0c\'\x07\xf8\x00\xd7\xfaJ\xf5\x9d\xf0$\xed\xe9\xea\xbf\xe9\x87\xe9\x99\xeaN\xedp\xf15\xf6\xea\xfa\t\xff\xba\x02\x16\x06+\t\x85\x0b\x83\x0c\x02\x0cr\nj\x08g\x06P\x04\x01\x025\xffA\xfc\x8c\xf9m\xf7\xe7\xf5\x9b\xf4\x1f\xf3I\xf1]\xef\xee\xed\x1b\xed\xb7\xec\x8d\xec\xb3\xec\x04\xed\xf0\xed\xc2\xef^\xf2\xea\xf5\x80\xf9\x9c\xfc\x87\xff&\x02\xde\x04\x1e\x07\xfc\x07\x08\x08\xab\x07s\x06\xc5\x04\xa2\x02~\x00\x03\xff4\xfdq\xfa\x8e\xf7`\xf5V\xf4\xc2\xf3\x85\xf2Y\xf1\x9c\xf1\xa8\xf3X\xf8\n\x01\xb7\r\x0b\x1c\xac(\xf52\'>=K\x89V\xd6[\x1d[\rW\xe5P\xe6F\xf08\\)\xda\x19\xa4\t\x7f\xf8\xe8\xe8\xaf\xddo\xd6\xf4\xd0\x92\xcbx\xc7\xd8\xc5,\xc6]\xc7\xa3\xc8\xc0\xc9\x00\xcb\x88\xcc7\xcf&\xd4\x9d\xdb1\xe5\x82\xef3\xfa\x9c\x05\t\x12\xb5\x1e\xea)t2\x017\x957\xbc4\xfa.\xbe&k\x1c@\x10m\x03q\xf6R\xeaK\xe0\xed\xd8/\xd4\x10\xd1\xf7\xcei\xce\xa2\xcfu\xd2\xef\xd5\x98\xd9\xa2\xdd=\xe2\x88\xe7\x82\xed\xa1\xf4\r\xfd\xeb\x05\x85\x0e\xef\x16*\x1f\x98&\xe5,H1;3{2D/\x14*\x18#]\x1aa\x10\xad\x05\x90\xfb\xe8\xf2\xdc\xebm\xe6\xdc\xe2:\xe1G\xe1\xd0\xe2\xa1\xe5n\xe9\xbd\xed\xa8\xf1.\xf5\xe9\xf8\xca\xfda\x03j\x08T\x0c&\x10\xa5\x146\x19\xa8\x1c\xac\x1em\x1f\xd5\x1eM\x1c\xb6\x17\x04\x12\xcc\x0b\xe2\x04D\xfdB\xf5\xe3\xed\xfb\xe7\xdc\xe3\xf8\xe05\xdf\xc1\xde\xf9\xdf]\xe2\x8d\xe5o\xe9\xb4\xed\x0e\xf2\xd6\xf5_\xf9o\xfd\x9e\x01\x7f\x05\xfd\x07n\tt\x0b\xbb\rR\x0fP\x0f\xdd\r\x07\x0c\xb5\t\t\x06.\x01\x95\xfc0\xf8a\xf3\xec\xed\x07\xe9\xa3\xe6|\xe6\x17\xe6j\xe5\xbe\xe5&\xe8"\xec\x8c\xef\x9f\xf2\xc5\xf6$\xfbA\xff\x96\x05U\x12\x97%\x898\xabDdK\xefR,\\\x7fax^WUmK\x8c?X/b\x1d\n\x0f\\\x04\xa4\xf8\xef\xe9\xb2\xdc\xc6\xd5\xf9\xd2\xc8\xceh\xc8h\xc3\x10\xc11\xc0\xf7\xbf\xad\xc1\x9a\xc7J\xd01\xd9g\xe2\x0b\xee\xd1\xfdP\x0e \x1b\xd3#<*\xe7.\x061\xab0O-\xd4&\x1d\x1e\xd4\x14\xb8\x0b\x07\x03\xaf\xfa\x90\xf3\xb3\xec\x8e\xe5h\xde\xa5\xd8\x9b\xd4\xa8\xd0F\xcc$\xc8]\xc6\xd3\xc7\n\xcc^\xd2x\xdb\xca\xe6[\xf2=\xfdt\x08#\x14*\x1e\xae$\xfb\'\x87)\x9f)\x8c(\x17&\x9e"T\x1e\x97\x19\xcd\x14\xcc\x0fr\n\x88\x04\xc7\xfd\xed\xf6\x99\xf0\xb9\xeaY\xe5\x1b\xe1\xfd\xde\xc2\xde\xe8\xdfv\xe2X\xe7\t\xee\xfd\xf3\xd5\xf8\x02\x00<\r\x1a\x1c\xb5$\xb4%\xdb%\xe8)\xab-p+\xc7$\xf3\x1eY\x1an\x13=\nl\x03\x1c\x01\x97\xfeh\xf7\xaa\xed\x9e\xe62\xe4h\xe2\x01\xde_\xd9g\xd8\xbb\xdb1\xdf\xff\xe1\xc0\xe7\xaa\xf1\xd9\xfb\x14\x01&\x03\xf1\x07=\x0fO\x14^\x13\x07\x10\xf9\x0e\xf5\x0el\x0c\xef\x07\x12\x05-\x04\x9e\x01\x0b\xfb\xdb\xf3\xbb\xefS\xed}\xe8\x89\xe0\x91\xd9\xfd\xd6A\xd8\xcb\xd98\xdc0\xe1L\xe8\xe0\xef\xf1\xf5\xad\xfby\x01\xd7\x05\xef\x08\x18\n\xa8\n\xb8\x0b\xb6\x0eI\x14\x93\x1aO%\x116\x08H\x90S\xbeV\xd6WVX\x86R\xf6C(4\xa2(\xb6\x1d)\x0f>\x01D\xfbx\xfa(\xf6\x1e\xed\x90\xe3l\xda\xac\xd1\x84\xc8\x97\xc1\xa1\xbf\xa1\xc1\xf9\xc6\xe0\xce\xda\xd8\x14\xe6i\xf5n\x02\xdb\n\x9c\x0f$\x12\xe7\x132\x15\x98\x15\x06\x16\xe9\x16\xef\x18\x1e\x1b\xc3\x1aM\x18K\x15+\x10N\x06-\xf8\x16\xea\xe8\xde\xcd\xd5\x17\xce\n\xcak\xcb\x92\xcf\xa9\xd3\xea\xd6W\xdb\xb9\xe0\x11\xe51\xe8\x9e\xeb\xac\xf0\xe7\xf6\xcf\xfe\x9a\x08\xcb\x13\x0f\x1e\xe2%\xc1*\xc7,w+\xa0\'\x94"\xdb\x1c\xa8\x16U\x10\x95\nd\x06\xa8\x03K\x01\xb0\xfd\x9d\xf8-\xf3\x9a\xeeE\xea\x9a\xe6\x03\xe4\x92\xe3\x17\xe5)\xe8s\xedz\xf5P\xfeP\x06\xa7\x0b\xf5\x0e4\x11_\x13\t\x16_\x17U\x19\x02\x1e/%\xb5*\xc7+5) $\xb0\x1b/\x10\r\x04/\xf9\xc2\xef]\xe7\xc2\xe0\xa8\xdc\x99\xdb\xa1\xdb\xac\xdcK\xddu\xdd\x0b\xde\xb9\xde\xb4\xe1\xd3\xe6\x0e\xef"\xf9\xb9\x02M\x0bB\x12\x8f\x17[\x1a\x90\x1a\x1b\x19\x1e\x16\xda\x11\xa2\x0c~\x08\t\x06\xf8\x03\xc1\x00\xbf\xfb\x84\xf5\x87\xee\xd9\xe7k\xe2R\xdf\x1d\xde\xd8\xdd\xf9\xdeA\xe1\x96\xe5m\xeb^\xf0l\xf4r\xf7\x03\xfb\x87\xfe\x03\x02\x94\x06\xa2\n\xc2\rZ\x0f\x0b\x10=\x11\xd5\x12\xbe\x13~\x13\x11\x10I\x0c\x02\n\x8e\x08Q\rf\x1e\xd55\xf9B\xc9;,+_$\x07&\x0c#d\x1b\xd8\x17\xe1\x19\x8a\x18\xd6\x0e\x11\t|\r\xef\r\xc7\xff\xb0\xe9g\xda\'\xd6\xa9\xd6\x83\xd8#\xdf\x9f\xe6\x90\xe8\x04\xe5\x88\xe1\x92\xe3\xfb\xe9\x8a\xed\x0e\xec\x9f\xeaX\xee\x97\xf7\xfa\x02\x04\x0eg\x163\x18\x06\x13\x9b\x0c\r\t\xca\x07\x86\x07\xa2\x06\xc4\x03\xa5\xfe#\xf9\xe3\xf7l\xf9\xc4\xf8\x9b\xf3\xc5\xeb:\xe4\x15\xdfR\xde/\xe2\xe2\xe89\xefX\xf3L\xf5k\xf7z\xfb\x93\x00\x98\x04\x07\x07\xd8\x08\xd5\n\xd0\rS\x12\xe6\x17\x89\x1b\xaf\x1a\x1e\x15\xdf\r}\x07\xde\x025\xffU\xfc#\xfa\x1c\xf8\x8b\xf5\xf0\xf3\x0b\xf4@\xf4)\xf2I\xee\xe5\xeb\x1a\xec\xbb\xee\xc3\xf3\xe7\xfa\x08\x03\x88\x08\xb2\n\xc3\n\xa4\x0b\x18\x0e\xf8\x10\xe4\x12\xe1\x13\xf0\x15\xfb\x18\x1d\x1au\x19\xd9\x17\x0b\x16\xe1\x10\xe0\x07\x85\xff\'\xfaI\xf7\xcf\xf4\xc9\xf2^\xf1\xa7\xef\xd1\xec\xc2\xe9\x1c\xe7\xd7\xe6\\\xe8\x05\xebd\xee\xc9\xf3\xd2\xfaU\x01c\x05\xaf\x07\xce\t6\x0bL\x0b\x8e\n\x80\nh\x0bG\x0c\xfb\x0b\xa0\nW\x08\xf3\x04C\x00\xcd\xfa\xa9\xf6\xb3\xf4=\xf4j\xf4\xa4\xf3_\xf3\xd7\xf3T\xf4\xc3\xf4\xc3\xf3K\xf3\x10\xf4\x91\xf6\xb1\xf9\x9d\xfc!\xff\x8d\xff\x18\xff \xfdQ\xfc\x10\xfd\xd4\xfcn\xfc8\xfb`\xfb\xba\xfbD\xfae\xf7\x98\xf5\xb2\xf5\xf7\xf5V\xf5\xd1\xf5w\xf88\xfc\xb1\x03y\x15D.\x01=\x9c7\xc6\'\x7f#(+\x821\x0f3\xe17uB\xe0@s.\xb7\x1bR\x167\x13x\x03\x02\xf0\x83\xe8\'\xecX\xec\x12\xe6\x9e\xe2\xa5\xde\xb9\xd4:\xc6\xfe\xbeo\xc6-\xd6,\xe4t\xebJ\xf0v\xf5=\xf9G\xfb\x16\xff0\x08\xbc\x10.\x16\xe2\x1b\x81"?\'\x9b&3!\x1f\x18:\r\xc1\x047\x02*\x03o\x02\xac\xfdP\xf5!\xeb\x9d\xe0\xd1\xd8k\xd5\xd4\xd5\xb2\xd7\x97\xd9F\xdc\x92\xe0\xc9\xe5\x05\xe9k\xea\xc8\xec\xe6\xf1,\xf8g\xff\x1c\x08\x1d\x11\x1b\x16/\x16h\x14;\x13\x8e\x12F\x12\x8f\x12)\x13}\x12I\x0f"\n\x88\x04\x16\x00\xca\xfc~\xf9R\xf6\xd6\xf4O\xf5\x82\xf5{\xf5\xbb\xf5\x04\xf7\x9c\xf7\xaf\xf7\x18\xf9\xa1\xfc\xcb\x00y\x04\xf6\x06e\t\'\x0b\xc4\x0c\xfd\r\xda\r\xb9\x0c\x8c\x0c}\x10\xc2\x15\x80\x18d\x16\x92\x11\xcc\x0b\xb9\x05\xc0\x01_\x01\xce\x02\x13\x02\x94\xfd\x0b\xf8\xda\xf3\xfb\xf0\x18\xf0w\xf0\x19\xf1C\xf1D\xf1n\xf2L\xf4P\xf6j\xf8$\xfa\x04\xfb\'\xfc\x88\xfe\xc7\x01\xe8\x03\x10\x05\x9f\x05:\x05\xe9\x03]\x02#\x02\xb4\x02\x9a\x02\x0b\x01\xa2\xfe@\xfc\x17\xfan\xf8(\xf7\r\xf6R\xf4\xe6\xf1:\xf0\t\xf0\xce\xf0(\xf1p\xf1\xea\xf18\xf2\xb3\xf1\x9f\xf1\xb3\xf3c\xf7P\xfa\xe8\xfb0\xfd\xda\xfe&\x00\xeb\xff\x0b\x00\xdd\x00u\x03]\x06\x12\x08\x9b\x08\xef\x06\xfb\x057\x058\x04\xd7\x07\xf7\x16\x801e?\'3\x0b\x1a\x1e\x10,\x1a\x92$\xad+R7MA\x1d5\x8d\x16\xeb\x04I\t\x9a\x0e\xcf\x05-\xfdi\xff\x89\x01\xbb\xf6\xda\xe7^\xe2\xae\xe2z\xdc\xa8\xd2S\xd2\x0b\xe0\x08\xee-\xee\x15\xe6\xd8\xe1z\xe3S\xe5;\xead\xf8X\x086\rq\t\\\x08\xee\x0cn\x0e\xdd\x0c<\x0f\xaf\x14q\x16\xd2\x11\xca\x0e\xb1\x0e\xe2\x0b\xae\x01@\xf6x\xf1\x80\xf1)\xf1U\xefT\xee+\xec?\xe4\xab\xda\xf6\xd7O\xdd]\xe4\xc4\xe8\xa5\xecq\xf0\xb7\xf1\x92\xf0\xa2\xf1y\xf7;\xff\xef\x05S\x0b\xfb\x0fY\x13\x81\x13x\x12\xa1\x12?\x14\xb7\x15@\x16\x0f\x17\xf5\x16\xc8\x13#\x0e>\t]\x06\x9f\x03(\x01:\x00,\x00\xaf\xfdH\xf9\xfa\xf5"\xf5\x88\xf4\x8c\xf4\x81\xf6\xfb\xf9\xc4\xfb\xa9\xfb\xb9\xfc\x05\xff\xa2\xfe6\xfe\x8a\x01Z\nZ\x0f\xa8\r\xff\ni\t\xd5\x07\xa3\x04\x1b\x08r\x0fw\x11J\x0b\xac\x03\xcd\xff~\xfc\xbc\xf9c\xfcM\x01\xff\x00\x87\xfb \xf7\x9f\xf6\xe7\xf5\xd6\xf4\xa7\xf6$\xfaN\xfb\xb2\xfa\xf9\xfb\xe3\xfd\xc2\xfd\xdb\xfb\xd0\xfbk\xfd\xe5\xfe\xa9\x00\xed\x02\x0b\x04\xd8\x017\xfe>\xfc\xc4\xfc\xb5\xfew\x00Z\x00T\xfe\xcb\xfa\x89\xf8\xee\xf7\xd0\xf8\xf0\xf9\xd8\xf9|\xf8}\xf6"\xf62\xf7\xab\xf8]\xf9R\xf9K\xf9A\xf9f\xfa\xf2\xfc%\xff\x94\xff\x9c\xfem\xfd\xef\xfc\x84\xfc\x97\xfdJ\xff\x08\x00\x8b\xfe\x0c\xfc\xab\xfb\\\xfbM\xfb\x9b\xfa\xff\xf9Y\xf7\x94\xf2\x1c\xf4\x81\xff\xd0\x11N\x1f\x0f d\x14\xa0\x03\xf4\x02\xc9\x17\xa15\x88Aa8\xc0+\x9c!\x02\x1a\xd9\x15b!\xcc2F19\x1b=\x07\xe7\x06\x94\x07\x0e\xfd6\xf2\x9c\xf4$\xf82\xee\xf2\xe2n\xe4\x11\xe7\xd9\xda\xb8\xcb\x88\xd0\xd9\xe3A\xee\xc0\xe9S\xe5z\xe6\xa7\xe3\xcb\xe1n\xec\xf0\x01\xd7\r\xf2\x07J\x00w\x01\xfd\x05R\x06R\t?\x13\xad\x1a\x13\x15i\t\x93\x03\x90\x03@\x02\xf5\xfe\xd6\xff\x95\x02[\xff\xb0\xf5\xc0\xed^\xeb2\xea-\xe8\x81\xe9\xd1\xee\xeb\xf1\xac\xed\xe3\xe7?\xe7\xac\xeam\xee\xd0\xf2\x0f\xfal\x00\xb1\x00p\xfd\xf9\xfc\x11\x01\xab\x05\xa3\t\x80\x0e\xec\x12]\x13\xa9\x0f\x96\x0cQ\x0cm\r\x9a\x0ek\x10\x94\x12\x19\x12\xb1\rf\x07<\x03\xa3\x02Q\x04\xcf\x05e\x06U\x05t\x02I\xfd\xf2\xf8O\xf8\xeb\xfa8\xfd.\xfeg\xfeI\xfe`\xfbP\xf8\xb3\xf7\x84\xfa\xe0\xfd\xf8\xff\x0e\x03\x8d\x04\x95\x03\xe6\xfe\xb6\xfc\x9d\xff\xe4\x05W\x0c\xaf\x0f\x9b\x0e8\x08X\x01\x1e\xff\x9d\x02\x97\x08 \x0cx\x0b\x7f\x05\xf4\xfd\x0f\xf9\xaa\xf8h\xfbs\xfd\x95\xfey\xfd\x15\xfa\xa5\xf6\xc7\xf4\x00\xf6X\xf7m\xf8\xb9\xf9\xf6\xfa*\xfb\x15\xfa4\xf9*\xf9\x88\xf9\x93\xfa\xd7\xfcF\xff\xad\xffu\xfd\xe0\xfa\xb0\xf9\xb4\xfa\x9e\xfc\xb7\xfe\xcf\xffJ\xfe\x9e\xfbg\xf8l\xf77\xf8\xad\xfa\xe5\xfc\x9d\xfc\x8a\xfb\xc6\xf8\xab\xf7@\xf6\x86\xf6m\xf8F\xfaz\xfc\x94\xfc\xd2\xfc%\xfc*\xfa\xb0\xf8.\xf9\x98\xfc\x19\xfe\x0b\xfe!\x06\xaf\x15\x02\x1d\x13\x0fn\xff\x9c\x05u\x19\xc3\'&//6\xf3/X\x17\x82\n\xcd\x1c\xa55A7F-x(B\x1d\x07\x07b\xfe_\x0f\xfd\x1b\x06\x0f\xe8\xfd"\xfb\x13\xf7~\xe7P\xdf\xb3\xe7\xbb\xed\xf0\xe3\xf3\xdc\xc4\xe2\xb2\xe5\xeb\xdb\xd2\xd3\xec\xda\x81\xe6\x9d\xec\x8b\xef\xeb\xf3\\\xf4\xea\xedG\xebe\xf4W\x03\xff\r\xae\x0eF\x08$\x02w\x00\x05\x04$\x08\x9a\x0b\x0b\x0eY\x0c\x90\x04\x83\xfc\x01\xfb\xa3\xfcE\xfal\xf5\x85\xf5\x89\xf8\xc7\xf6/\xf03\xec\xcd\xeb\x11\xea\x9f\xe8\x16\xed1\xf5\xae\xf7\x08\xf3\x8f\xef,\xf1\xad\xf4>\xf8q\xfe\x00\x06\xbf\x08\x97\x05\x17\x03\xb1\x05\x1e\nu\x0c\xa6\x0es\x12"\x14\xe4\x10\xcb\x0c\xdb\x0c?\x0f;\x0f\xef\r\x91\x0e\x05\x0f\x9d\x0b\x83\x06\x12\x05(\x06\xba\x04\xba\x02\x03\x03\xe4\x04f\x01y\xfbM\xf9\xeb\xfa\x8f\xfaL\xf9\xdb\xfc\xc9\x02^\x00\x93\xf7P\xf4y\xf9\xd0\xfee\xff\xd0\x02X\x07\x07\x05m\xfd\x86\xfc#\x03\xc5\x07+\x06[\x06\x0f\t\xb3\x06\x12\x01\x00\xff\xf9\x02?\x04\xdc\x01r\x01}\x02&\x00\x9e\xf9\x10\xf7\x1c\xf9\xc4\xfa\xf2\xf9\x08\xf9\xd3\xf8\x8b\xf5f\xf1-\xf0\xc2\xf2\n\xf5(\xf5\x0c\xf5S\xf4\xff\xf26\xf2\xa7\xf30\xf7\x0f\xf9Q\xfa\xbd\xfaQ\xfb\xfd\xfb\xc4\xfc\x05\xff\x9f\x00\x94\x02\x89\x03%\x04\xcb\x03\x10\x03(\x03\xb7\x03\x99\x04W\x05\x12\x06\x86\x05,\x04r\x02c\x02\xa4\x02L\x03!\x04\x8c\x04\x03\x045\x02o\x02\xa5\x03V\x04C\x04\x1d\x04\xdc\x04[\x03\xc8\x03;\x05\xe6\x06\xa4\x07\xbc\x07\xcf\n\xfe\n\\\x0c\x84\x119\x17\x8f\x16\xce\x0f\xef\r\x9d\x12\xe5\x15\x99\x16m\x19q\x1b\xa0\x16\xe0\x0bk\t1\rI\x0e\xe9\n\x0c\x08\x9c\x08\xc8\x02\xe5\xfa\xd0\xf7u\xf9\x9c\xf8\x07\xf3\xf5\xf0%\xf2\xf2\xf0,\xec\x81\xe8\xf8\xe8\xda\xe9E\xe9\xb9\xe9*\xed\x80\xef\xfb\xec\xba\xe9\xcf\xeaQ\xf0\x86\xf3\xd3\xf4V\xf7\x90\xf9P\xf94\xf7\xbe\xf9J\xfe\x12\x00\xa0\xff\xe4\x00\xe7\x03\xa8\x032\x01\xe1\x00\xb8\x02\xd4\x02\xf6\x00\x88\x01\xfe\x03\xc6\x035\x00\xc5\xfdt\xfe\xa2\xfe\x03\xfe`\xfe\x80\x00\x98\xffz\xfc\x11\xfb\xcf\xfcf\xfe\x0f\xfe\r\xffg\x00\x1d\x00L\xfeH\xfe\xfb\x006\x02\xe7\x01\xe7\x01S\x03\x02\x04A\x03\xed\x03a\x05\xf4\x05\xcf\x04\xe4\x04"\x070\x08\xf1\x06\x98\x06\x9d\x07_\x076\x06\r\x06\xa0\x08\xdd\x08\x80\x06\xfe\x04\xc5\x04\xdf\x03E\x02\xf6\x01\xde\x02\xb6\x01\x94\xffX\xfe\xbf\xfd\x97\xfcd\xfb\xf6\xfbE\xfcd\xfc\xc9\xfb\x0c\xfc\x81\xfb;\xfa\xe2\xf9I\xfa\xa3\xfbX\xfcc\xfd\x9b\xfd0\xfd\x12\xfcP\xfb\xe9\xfb\xc7\xfc4\xfe \xff0\xff\x92\xfe7\xfd\xa9\xfc\xfe\xfcA\xfd\xcd\xfd\xc9\xfd\xa6\xfd\xbc\xfc;\xfbj\xfa\x9d\xf9\xfd\xf9[\xfag\xfb]\xfc\r\xfcP\xfa\x96\xfa\\\xfa*\xfbA\xfd\x01\xff}\x01s\x00\xf9\xff\x07\x00\xe4\x00j\x01\xa3\x01\xa1\x02L\x04\x81\x04\xdb\x03*\x05\x05\x05\xb1\x03N\x04\xc3\x05:\x06\xeb\x04\x89\x05\xc9\x07F\x07+\x07\x0e\x07i\x06?\x05\x81\x06B\x08\xbf\x08\xd6\x06\xcd\x030\x04l\x06m\x06\xbc\x052\x07\xb5\x06\xb5\x01o\x00\xff\x03X\x01\xba\xffO\x03\xb0\x06t\x02]\xfb\x9d\xfd\xf7\xfe\xe5\xfd\xf3\xfc\xa5\xfe\xfe\xfe\xb2\xfc\xf0\xfb\xeb\xfe\xa2\x02\x88\xff\xa7\xfb{\xfd\xe1\x00\r\x01Z\xff\xb9\x01\x0c\x04B\x02\xdc\xff/\x02\xee\x03p\x00\xcb\xfep\x01\xed\x02\x1a\x00x\xfd&\xfeG\xfe~\xfbF\xfb/\xfc\xc8\xfa)\xf9\xec\xf8\xcb\xf9\x1b\xfa\xdf\xf9\x84\xf9\xf0\xfaz\xfa \xf9W\xfa4\xfc \xfd6\xfe\xd3\xfe\xfd\xfe\xa4\xfe\xbf\xfc?\xfe\xc3\x00n\x01\x0c\x01l\xff\xb4\x00Q\x02"\x01\x7f\x00~\x00\xde\x00\x1b\x00&\xff\x9b\x019\x02\xd1\xfe\xd4\xfd\xba\xfe&\xfe<\xffr\xffK\xff\'\xfe]\xfd\x10\xfeK\xfeZ\xff\xb5\xfe\xb2\x00\x1a\xff-\xfd\x1f\xff\x92\xffS\xfeg\xff]\x01M\xfd\xd3\xfb\xce\xff,\x02\x12\xfe\xb6\xfe\xd3\x00%\xfeo\xff\x9e\xff\xa0\xffN\xfe\x9d\x02\xfa\x00\x00\x01\xf3\x03/\x00\x92\x00\xaa\x05\xa8\x03\xc8\xff\x98\x01\xce\x04\xbf\x05/\x00U\x06\xdd\x06\x12\x00\xb4\xffq\x02\xee\xff\xb9\x00\xfe\x03\xc5\xffN\x00\xa3\xff6\xff\xc3\xfb?\xfeX\x00\x16\xfd\xde\xfeg\x02\x9a\x00\xce\xfb\xc6\xfc\xdc\xf9W\xfd\xf5\x01b\x00\x00\x03I\xfe\x17\x01\xcb\xfe\x80\xfb\x93\xff\x8f\x002\x08!\x01s\xfd\x0e\x05\xcc\x04\x04\x00\xa3\xfd\'\x07~\x04\xaf\xfe\xa2\x01\xfa\x04X\x04\xf0\xfe_\x02\x1c\x05\xcc\x01\xd3\xfb\xee\x07\x10\x05\xa3\xf8B\xfe\xbb\x04\x90\x03\x9b\xfbW\x06\xc9\x03\xa3\xf5V\xff9\nG\xf9?\xfc\\\n\x90\xfc%\xfc\xd4\x028\x05\x0f\xfco\xfa\x8b\x04\x93\xfe\x1a\xfd%\x04\xba\x04/\xf9\x98\xfeJ\x00p\xf9\x86\xfe\xc0\x00\x04\xfe3\xfd\xcc\x01{\xfd\n\xfa\xba\xfe\x87\xfe\x90\xfa\xcb\xff;\x02\xb6\x00R\xfdx\x01\xd2\xfd\xb7\xfd$\xff\x98\xfd\xb4\x04\x9b\xfen\xfd\xed\xffK\x00\x1a\xfe=\xfe\xe7\xfb\x98\xfeI\x01\x87\xfdj\xff\xec\x03\xf1\xfd~\xfa\xea\x02\x00\xff\x10\xfa\xf5\x00\x1a\x01\x00\xffL\x01\x92\xfe\xc9\xff.\x04\x04\xfc\x8f\xfd\x96\x02I\xfe\x97\x06\x99\x00x\x02\x93\x01\x1d\xfe\xc9\x00F\xff\xca\x04\xf7\x04y\xfe\xef\xff]\x03\xe0\xfa\xd0\x01\xbe\x00&\xfd\x01\xff\xe3\xfc\xaa\x02r\x00S\xfc\xd5\xf8G\xfb\xf9\x00\xbd\xfb\xde\x01\xd9\x03\x88\xf9J\xfbj\xff\xd2\xfb\xc2\xfa|\x03\x15\x04\xcb\xf9=\x00|\x03W\xff\xea\xfb\xcb\x00z\x01\xa7\xfbU\x04\xaa\x07\xed\xfa\x96\x01\xfd\t\xca\xfb#\xf7\x1b\x06\xd6\x03\xe8\xfd\x1f\n\x80\x00\xf5\xf6\xef\t\xf5\x04\x8f\xf99\x02\xb4\x02g\xff,\xfa\xb6\x0bX\x0c\xbd\xf8a\xff[\x04\x16\xf3\x1e\x00\x95\x0e\x83\xf9\x87\x06P\x06\x07\xfc\x1c\xf9\x04\x01\x93\xfe\xd4\xfc\xfa\x06\xfd\xf9>\x02\xc8\x05\x17\x01E\xfa\xa9\xef\xa7\t\x9e\xfd,\xf8t\x0e\xfb\xfa\x1a\xfa\x93\x02\x0e\x01\x1a\xf4\xce\x03\xfa\x021\xf7\xfe\x00\xcb\r~\x01\xc8\xf6\xa5\t}\xfb\xd4\xf0\xa3\x0c\xf0\x07\xc5\xf7\x04\x14R\x02B\xf7\xf3\xfc\xf1\x05\x83\xfb)\xfcJ\n\xea\xfeT\x02v\x01N\x05[\xf8n\xf2\xdf\x01\xdd\x040\xf9\xc2\t9\n\xc8\xf0-\x01\x17\x08\xa9\xf6C\xfeF\x05k\x02\x99\xff\n\x03A\x06\x9b\xfb\x80\xffv\xf7\x07\x02\xf5\x00\x96\xfb\x89\x0c\x06\xfc\xd2\xf6\xec\x02H\x01$\xf7\x0b\t\xf5\xfc\x10\xfcB\x00\xa9\xfc\x0c\x07\x10\xfb\xa3\xfeO\x01\xc5\x04\xaa\xfa\x8b\x02\x8b\x07\xa8\xf8w\xfd\xf1\x03\x07\xff7\x04\x95\n\xa4\xfbs\xfa\x91\x02\x8a\x02\xc2\xff\x95\xfd@\xffx\x06v\xffh\xfc\xcc\xfcM\x04\xb1\xfb\x16\x00\x9a\xfe\x0c\xfbx\xfar\x03\x1e\x06}\xf8Z\x00:\xfd\x1d\xf9\xa3\x00c\x05\xe6\xf3@\x06?\x08\xf4\xf4^\xfe\xd0\x03\xb7\x04\x19\xf9\x8b\xfd\xdf\x05\x00\xfc\xe5\xfe\xd7\n\x85\xfe\xc3\xfc\t\xfe\x18\x00\xfc\x02\xfd\xffz\xfb.\x0c\x7f\x03\xeb\xf3\xf4\x02\x06\r\xfc\xf7D\xf0\x1a\x15\xaf\xfc\xad\xf9\xd3\x06\x0f\x07[\xff\x1b\xf9o\xff\x87\xff\xc1\x01\xac\xfb$\x0b.\xff\x7f\xfe\x8b\x00\x17\x04\xb5\xf7@\xf6\xcf\x0f\xe6\xfe\x14\xf5b\x05e\x13\xf8\xee\xbf\xf1\xbe\x18\xbd\xf9\x15\xe7\x85\x13\x04\x0b\x81\xed\xa7\x05\x00\x0b\xbc\xfc\x8c\xf2l\x06\xbb\x00~\xf3H\t\xa2\t\x14\x00\x8a\xf8D\x05)\xff\xb0\xe8\xa4\n\xee\t\xb3\xf1\\\x0c~\x08\xd2\xef\x0f\xfd}\x03P\xfb\x91\xf7\xce\xff\x17\x0b?\x01g\xf7\x9c\x08V\xfb\x01\xf4\xff\x08K\xf8\x18\x03\x81\x08\xce\xf77\x02o\t\xa1\xf5q\x03\x13\xfd\xee\xf6\'\x0c_\xfeh\x03\xe8\x06\x14\xfa\xf4\xff\xb5\x04\x1b\xf6\xd9\x07\x18\x03A\xf8%\x039\x087\x03o\xf9\xf6\x07\x8f\xfd\xd1\xf8\xb8\x01-\x03\xe2\x00d\x03\xb6\x07\xe0\xf7\xb8\x01\x18\xff\xb5\x00\xa1\xfb3\xfd\xa2\t?\xf9j\x02\x8d\xfd\xc4\x06\xeb\xfci\xf4\xf6\x02\x88\x03\xe7\xfd~\xfe&\x01\x11\xff\xc1\x03[\xfd\x16\xf8\x99\x03\x9b\x03^\xf8H\x06\x8f\xfe\x9f\xfc\xa8\x073\x01\xb0\xf2?\x0b0\x07\xee\xe9\xfa\t{\x06\xa5\xffT\xfe\xa8\xffd\x05\xfe\xfb\xfc\xf9\xcc\x04M\x000\xfcW\x06\x8b\xfb\x9b\x03\x13\x00\xcc\xfbF\x010\x01L\xf2Z\n\xa3\x04~\xf7Z\x07\xd0\xff\xcb\xfc\xb5\xfd(\xfd\xe1\xfc&\x04/\xfd\xe5\x06b\xfc\xa5\xfcR\x08\xa8\xf7f\xf6\\\x0b\xf2\xfeM\xf9s\x07\xfd\x07\xfe\xf9\x08\xf7\xf3\n#\xf9\xaf\xff\xfd\xfe\x8f\r\x7f\xf8q\xfa\xe5\x11\xb9\xf7\xf9\xf3\x1a\x02\xa3\r\xe3\xf3o\x04\xbb\r\t\xf7\x7f\xf8\xcd\x05\x00\x009\xf7\xe8\x05\xc1\x06\xc5\xfd\x9b\xfb\x8f\n\x11\xfdC\xed\xf8\x0c\xf5\x02\xc1\xf3\x14\x06\xb0\n\x1e\xf8\xfa\xf7\xe1\n\xdb\xfe\xbb\xf1\x9f\x01\x11\x0e\xd0\xf2\xff\x00\xc6\x0f\xcc\xf7:\xf7\xc3\x0co\x00\xe4\xefL\x058\x11d\xf3\x07\xfb\xe5\x1f\xd3\xe8\x04\xf5\xfc\x1ah\xf1\x08\xf8\xaf\x0c\x80\x03\x08\xfa6\xfb\x0b\x0b\xb2\xfc`\xf6\t\x08\x95\xfc\xdc\xf3\x90\x07\x1c\t\x9f\xed\xaa\t\xde\x02\xa3\xf4\xef\x008\x04}\xfb\xd2\xfeF\x070\xfb@\x02\xf9\xfe\xf3\x04\x8b\xf7c\x07k\x01Z\xf9O\x03\x0e\t\xc6\xfa\x91\xfcr\n\xc6\xfa\xbd\xfeX\xfc%\x07\xfa\xfcC\xfc!\t\x18\xfdX\xf8V\x0b\x1e\xf7e\x01\xc2\xf8#\x08g\xfe\x05\xf8y\x0c\x95\xf7\x1a\x07\xeb\xf2\x92\x04\xec\x00\xf8\xfd\xcf\x01\x9b\x03\x00\x01a\xfb\xc3\x01Z\x04\xaf\xfa<\xfe\x11\x08=\xfd\x8c\x00&\xfbQ\x08\x9f\x02\x0c\xfa\x02\xfc\xb5\x06\xee\xfd\xfa\xf3g\x0e\x91\xfe\xdb\xf7\xbd\x07\xda\x04j\xf3\xf2\xff\x16\r\x1e\xf35\xf8,\x12\x00\x02!\xf4\r\x07\xba\x07\x03\xf2\x0b\xfa\xe6\n\xda\xffS\xffC\xfb-\x0b\xb1\xffO\xf6:\x03\xcc\xfe\x8e\xfe\xb7\xfd\x82\x04\xed\x00s\x08\xf5\xf3\x14\x02\x81\x07d\xf1/\x03\xee\x05d\xfe\'\xf9\x91\x06\x05\x06\'\xf7\xfc\x00<\x01\xa6\xf8-\xfd\x03\x0b\xbe\xfc)\xfd\x80\x051\xf9\'\x01`\xfdt\x04\x87\xfb\x00\xf9c\x0c\x00\x04&\xf0\xe4\t:\x05M\xef`\n\xf6\x00\xbb\xf8A\x01\xf1\x07Y\xfd\xe5\xf9\x15\x07\x9a\x022\xf2\xbc\x076\t\xd6\xef\xac\x03\xf9\x11U\xf3\xa5\xf7\xab\x0f3\xfe+\xf1\x13\x07\xdf\r\x0b\xf5}\xfa\x16\r\x07\xfe\x8e\xf1\xbd\t\xdb\x07\xe9\xf8\x7f\xfa\x95\r\xd1\xf9B\xf6\xd5\r8\xfd\xf7\xf5\xc2\x05\xad\x07\x82\xf6~\x03d\n\xa3\xf2\xa8\xfel\x00\x85\x07j\xfa\xee\xfb\xaf\x0f\xd1\xf4n\xfc\xbf\x06\x86\xfe\x1d\xfa\xeb\x035\xff\xda\xfd\x93\x02\xd6\x00\xe3\xfeZ\xfb\xfa\x01\xa7\x03m\xfeO\xf3\xa5\x10\x96\xfe\xa5\xf2s\t\x9f\x04\xc8\xf7\xd7\x02\xfb\x03\x0e\xfby\x02\r\xfa\xf8\nx\xfa\xba\xff\x85\x0b3\xf6\x1f\xff+\x04X\x00\xc1\xfa\xd7\x05\xb0\x01P\xfdp\x03\x1f\xfdr\x03e\xfa.\x01S\x01 \xffR\x00\xc1\xfc\xe9\t\xb4\xf9\x7f\xfd\x0b\x02\x9e\xffD\xfaq\x05<\x01V\xf5\xf2\x0bp\x01\xfa\xf7\xe9\x04%\xff5\xf8\xe9\x06\x03\xf88\x06l\x04\x82\xfb\xaf\xff\xad\x02=\x02\xd5\xf2\x98\x07\xa7\x07\x94\xf1\xeb\x02n\t\xab\xfa;\x01L\x04C\xfaJ\xfd-\x008\x00\xd8\x01$\xfc\x02\x06\x1e\x02\xc9\xf8^\x05|\x01G\xf8&\xfb\x99\x08D\xfd\xce\xfa:\x13.\xf5\xf5\xfcU\x08\xf3\xf6y\xfeT\x01\xf2\x07\xe6\xfcP\xff\x0c\x07\x02\x02\xde\xf4\xa1\x04\x17\x01k\xf8s\t_\xfe\xab\x01\xcf\x05\x8c\xf8\xb5\x01\x19\xfe{\xfe\xbc\x055\xf7\xc6\x08\xf9\x04Y\xf9\xaa\xfd\xa2\x08\xc1\xf6\xed\xfd\x0f\x07\xd9\xf8\x16\x08\x83\xfe~\xfc\xf0\x02\x8b\xfe\x92\xf8\xe6\x07\xd0\xf9y\xffF\x08V\xf6t\x05c\x03\x7f\xf5\xf7\xff\xd0\x05s\xf6\x98\x01\x1e\x06\x84\xff\x1a\xfd\xd2\x00\x94\x02\xb4\xf8\n\x01\xc3\x06\xce\xfa\xd4\x02\xbf\x04\x10\xff\x17\xfc\xd7\x03\xdd\x02\x0c\xf5\xd1\t\xe3\xfd\xd9\xfd\xc6\x01\x89\x00\r\xffG\xfe\x0c\xfe\xe3\xff`\x02\x86\xf7\x7f\x0bs\xf8\xfa\x00\xbd\x05\xcb\xf6\xcb\xfd4\x04\xb1\xfe6\xfe\x1c\x06l\xfb\xcf\xff\x1b\x02j\x00\xeb\xfd\x9e\xfc\xdd\x05\xbc\x03\x12\xf75\x07z\x06\xc0\xf5\xe8\xfe\xb0\x0b\xc3\xfad\xf9@\t%\x01\x06\xf8~\x06N\x06\xaa\xf5!\x05\x14\xff\x18\xf7\xbb\x07\x9d\x06\x1c\xf3\xf0\x05\x9d\x05\xb2\xf7}\x02N\xfd\x96\xfe.\x00~\x00:\x01b\x02N\xff\x1e\x00\x08\xfc=\xff\r\xfdo\x03\xe0\xfeN\x00r\x04\r\xfbu\x00\xe4\x01\xe0\xfan\x00>\x01.\xf8\xaf\x075\x01d\xff\xed\xffF\xfd\x15\xff6\x01(\x00\'\xff3\x00\x91\x01\xa3\xff\x1a\xff\xfe\x03|\x03\xa4\xfbi\xfd\xe3\x02\xb7\xfb\x94\x03j\x060\xfc\xce\x00b\x05\xd2\xf9\x02\xff\x9d\x06\xc0\xfb-\xff\xba\x06\xaa\xfa\xfe\x01\x06\x08\x89\xfa\x9e\x00;\xff\xdc\xffV\xfa\xa7\x04-\x07\x98\xf7J\x04l\x03\xcd\xfaz\xfd\x82\x05\xdc\xfb\xbc\xfad\x05\xde\x02\x11\xfbl\x04b\x02\x15\xfa\xdf\xfc\xe5\x00V\x02\xcc\xf9\x97\x060\x00q\xf9v\x06}\x00\\\xfc\xfc\xfc\x1e\x02\r\xfc\xb6\xffl\x05%\xfd\x9e\x01\xcd\xff\x02\x00\x14\xffT\xfe\xf9\xfe\xba\xfc\xde\x02\xb6\xff\'\x01\r\x04\xa3\xff\x81\xfa\x01\x02\x01\xfeo\xfa\x88\x04\x89\x00\x01\x01\xe8\xfd\xd2\x05\xcf\x00;\xf9n\x03\x19\xfb,\xfe\x99\x02S\x00\xcf\x04d\x01\x90\xffD\xff\x1b\xffr\x00\xff\xfb\n\x04<\xff\x9d\xfe\xb4\x07b\xff\xb9\x00\x94\x01\x8e\xfc\xce\xfd\xdb\xfee\x00\x1b\x00\\\x03N\x02\x12\xfe\xb1\x01[\xfdA\xfe\x0e\x00@\xfc\xc7\x00\xab\xff\xf7\x00\xc1\x02\xcc\x01\xb8\xfd\'\xfe\xf9\x00\xa8\xf8\x05\x04-\x03\xf1\xf9\xc3\x04s\x00\x16\xfe\xcf\x01\\\x02\xe5\xfcx\x01\xf9\xfb"\xfd\r\x077\xfd\x0b\x031\x02/\xfd\xc9\xff\x9d\xfey\xfe7\xff\x8c\x00*\x00\xa7\xfe\x87\xff!\x04\xa9\xff\xa9\xfc,\xff\xd1\xfd\xc0\xfen\x01\xd4\x01\x87\x00\x08\x03\xf6\xfd7\xff\xbc\x01h\xff>\x01L\xfd.\xff<\x04r\xff%\x00\xd6\x03u\xfd\xc6\xfc\xbe\x02\x9d\xfd\x1c\xff\n\x03\xcb\xfd]\xfe\xe2\x02\xbd\x00\x08\xfe\x07\x00\xa3\xff\xda\xfd(\xfe\x8a\x01\xe5\x00K\xfe\xf0\x00\xc2\x01\xb7\xfd\'\xff[\x02\xec\xfd^\xfd\x98\x02\x92\x00\xca\xffd\x017\x00\x1e\xff\xd0\xfe\xf8\xff\x1b\xff\xce\x003\x00\xd0\xff3\x01\x94\x00\xb3\xfe\xfc\xff\xc5\xff\xc3\xfc2\x02\xfd\x00#\xff\x8a\x01\x19\x00\xa3\xff8\xff\x12\x00\x18\xffC\xffT\x01\xfa\x00\xef\x01&\x00\x0e\xff\xc4\x00\x80\xff\x80\xfd\xab\x00\x8f\x02x\xfe\xbd\x01\xc1\x01<\xfe*\x00\xe7\x00\xf8\xfd\xd4\xfe=\x01&\x00\xc1\xff\xff\x00\x05\x03G\xff\xbb\xfd\xbf\x00\xdd\xff\xd8\xfc[\x00Y\x04(\xfeF\x00\x8e\x02\x88\xfes\xff\x1e\x00\xc7\xfdD\xfdr\x02\xeb\x01^\xfe<\x01\xd8\x00&\xfe\xaa\xfe\xca\xfeR\xfeQ\x00\x8e\xff\x19\x00\xc7\x00y\xff\x1b\x00\x83\xffi\xfc\r\xff\'\x00\xff\xfc\xe1\xff\x9c\x03\r\x00u\xfc\xf3\xff\xa0\xfe\x98\xfb`\xfe\xaa\xffI\xfd\xbb\xff\xcc\x01\xae\xfd\xf1\xfdO\xff\x8c\xfdt\xfd\x00\x00\x8f\x00\xaa\x00\xe3\x02F\x03\xaf\x03?\x04\x8f\x04Y\x05)\x04\xb9\x05o\x07\xfc\x07\r\tK\t\x90\x07O\x07\x0f\x06\xba\x044\x04@\x03\xd9\x02\xbb\x00\x8a\x00\x81\xff\xce\xfd\xa0\xfc\xa6\xfa\xca\xf8\xc5\xf7\xc7\xf7Q\xf8:\xf8\xca\xf7\x82\xf8\xba\xf8\x7f\xf8\xd4\xf9s\xfa\xa9\xfa>\xfc\x82\xfd.\xff\x84\x00\xfe\x015\x02Z\x02\xa4\x03\xee\x02\xce\x03\xc6\x04\xe5\x04\xac\x04\xb6\x03\xba\x03-\x03\xc8\x02\xa9\x01\x9f\x00[\x00\x04\xff\xd0\xfe\xc9\xfe\xc6\xfd{\xfd\x0b\xfda\xfb\x0c\xfc\xbc\xfc\xc9\xfb9\xfc\xb8\xfc\xcc\xfc\xf5\xfc|\xfe\x1f\xff\xc8\xfe`\xff\xdf\xff:\x00\xea\x00\x8e\x02V\x02\x08\x02\xbc\x02\xe3\x02\xff\x02?\x037\x03\n\x02\xa6\x02\xc6\x02%\x02\x8f\x02\xed\x01\xbf\x00\x00\x01\xce\x00d\x00\xa3\x00c\x00\x90\xff}\xff\xe8\xffI\xff^\xff\x98\xff\xd3\xfe\xc6\xfe0\xff\x8e\xff\xb5\xff\xfe\xfeI\xff\xee\xfe\xaf\xfe\x18\xff\x11\xff\xf8\xfe-\xff \xff\x05\xffe\xff)\xff\xe0\xfe\xe9\xfe\x08\xff\xeb\xfe\x9c\xff\xbd\xff\xfb\xfe\x80\xff\x97\xff0\xff\x97\xff\x06\x00\xc4\xfft\xff8\x00\x1a\x00\x00\x00{\x00`\x00j\x00\x07\x00\xb0\x00\xf0\x00n\x00\xa1\x00\xb9\x00p\x00\x7f\x00\x9a\x00\xbe\x00\xc4\x00e\x00\xbe\x00\x0f\x00\x13\x00\x82\x00\xe9\xff\xff\xff\'\x00\xd8\xff\xf4\xff\x0b\x00X\xff\xa0\xffT\xff.\xff\xd8\xff\x89\xff\xb0\xff\xfc\xff\x88\xff\x83\xffo\xff\x88\xff\xbc\xff{\xff\xf5\xff+\x00\xdc\xff\xef\xff8\x00\xb8\xff\x98\xff\xdc\xff\xbb\xff\n\x00w\x00<\x00`\x00}\x00\xb6\xff;\x00\x02\x00\xdb\xffo\x00K\x00R\x005\x00\x98\x00\xfb\xff\xdd\xff\xdc\xff\xb9\xff\xb7\xff\xb3\xff&\x00\x04\x00\xa9\xff\x03\x00\xb2\xff6\xff\xac\xff\x90\xffm\xff\xb2\xff\xfd\xff\xf5\xff\x02\x00\x1a\x00(\x00\xf5\xff;\x00\x1c\x00E\x00\x96\x00\x95\x00\x87\x00\xb0\x00\xd0\x00y\x00\xca\x00E\x00n\x00\x93\x00J\x00x\x00\x9d\x00D\x00U\x00?\x00\xe3\xff\x1a\x00\x0c\x00\xc8\xff\xde\xff\xbb\xff\x90\xff\x11\x00\xa1\xff\x85\xff\xdb\xff\x1d\xff;\xff6\xff\xe8\xfe\xa2\xff\x8b\xffr\xff\x96\x00\xd1\xffy\x00\xcc\x00\x93\xff=\x00\x15\x00\xfe\xff\x1d\x04\x11\x05\xf4\x03o\x03\x11\x02\x7f\x01W\x01P\x02\xc6\x02\xc3\x02`\x022\x02\xb0\x01]\x00\xda\xfe\x16\xfc\xd2\xfa\xaf\xfb{\xfc\r\xfdD\xfd\xe3\xfc\x1c\xfb\xc5\xfaF\xfbt\xfa\x83\xfb\xb3\xfb\xea\xfa\xab\xfd\xf1\xfe\xae\xfe\xbb\xff\xd6\xfe\xcb\xfd\x0f\xfef\xfe\\\xff\xc2\xff\xe9\xffg\x00\xca\x00\x9f\x00\xe6\x00g\x00t\xff\x86\xffO\x00\x07\x01\x0c\x02+\x03\x8f\x03a\x03\xb7\x038\x04W\x04\xa3\x04\xdd\x04]\x05$\x06\xf3\x06Y\x071\x07^\x06*\x05S\x04\xc3\x03j\x03\xc7\x02\x01\x02t\x01\xbd\x00\x00\x00\xeb\xfe\x9d\xfd`\xfct\xfb\x13\xfb\x00\xfb\x1a\xfb`\xfb|\xfbr\xfbu\xfb\xa3\xfb\xde\xfbs\xfcW\xfdQ\xfe_\xffR\x009\x01\xd3\x01%\x026\x024\x02\x84\x02\x06\x03\x8a\x03\xb8\x03b\x03\xb8\x02\x16\x02M\x01\x82\x00\xe5\xff\x1b\xffP\xfe\xa8\xfd)\xfd\xa7\xfcD\xfc\xb0\xfb\xf5\xfa\x90\xfam\xfau\xfa\t\xfbN\xfbs\xfb\x0f\xfc\x97\xfc\x18\xfd\xef\xfd\xee\xfe\x1b\xff\xb1\xff\x96\x00\x0e\x01\xd7\x01c\x02\xae\x02\xf8\x021\x03F\x03a\x03I\x03\xd3\x02u\x02F\x02\x12\x02\x0e\x02\xab\x01\x06\x01\x8f\x00\xff\xff\x9d\xff\x80\xffF\xff\xe5\xfe\xd4\xfe\xc9\xfe\xca\xfe\xfc\xfe\x0c\xff\xe4\xfe\x17\xffG\xff\x90\xff\x10\x00R\x00o\x00\xba\x00\xbb\x00\xb4\x00\xc1\x00\x13\x01\xfd\x00\xd1\x00\r\x01\x03\x01\xcf\x00\x8b\x00I\x00\xf6\xff\xbf\xff\x90\xffd\xffd\xffI\xff\x16\xff\xeb\xfe\xd3\xfe\xba\xfe\x8c\xfe\x83\xfe\x82\xfe\xa9\xfe\x0e\xff#\xff{\xffb\xff\x80\xff\xbf\xff\xb8\xff\x13\x00L\x00\x89\x00\xe6\x000\x01@\x01A\x01]\x016\x01\x02\x01\x07\x01\x06\x01\xdd\x00\xd2\x00\x8e\x00\x1d\x00\xda\xff\x80\xff\xfa\xfe\xf5\xfe\xcf\xfe\xc0\xfe\xdd\xfe\xf3\xfe\x06\xff\xbc\xfez\xfe\x9c\xfe\x03\xff\xfe\xffZ\x01J\x03g\x02\x10\x01\x1e\x02\x97\x01\x17\x02\xeb\x02}\x02 \x04\xae\x03\xfe\x02/\x03\xc0\x01\x1e\x00\xeb\xfeY\xfe\x9c\xfe\xda\xfe\xae\xfdZ\xfe\xa8\xfd\xf4\xfc\\\xfdP\xfc\x99\xfd\xf0\xfc}\xfb\xbf\xfdY\xfe\x9b\xff\x7f\x03\x80\x03\xde\x02W\x01\x9a\x00\xca\x01\x95\x01\xc8\x01\x83\x02\xa4\x02\x82\x01\xef\x01\x9e\x01\xb2\xfe\x91\xfc\xb9\xfa\t\xfa\xe3\xfap\xfb\x1b\xfc<\xfb\xa0\xfa\xaf\xf9\xcb\xf8\xa2\xf8\x84\xf8W\xf9\x0f\xfas\xfa\xe7\xfbI\xfd%\xfd\xa2\xfd\xc7\xfd)\xfeI\xff\xa5\x00j\x02\x07\x04\xb7\x05\x8e\x07"\tS\n|\x0c\xd3\rq\rc\r>\x0e\x1c\x0f\xb8\x0f\x10\x10r\x0f\xe3\r\xe7\x0b\xf4\t\x16\x08\xd6\x05:\x03\x87\x00\x97\xfe\xdb\xfd\x8b\xfca\xfa\x11\xf8$\xf6\x9e\xf4y\xf38\xf3\xa5\xf3R\xf4\x8b\xf4\xd7\xf4:\xf6\x91\xf7|\xf8i\xf9\xb5\xfaG\xfc\x1d\xfe\xf4\xff\xe2\x01\xc3\x03\xb5\x04\xc9\x04\r\x05\xd8\x05V\x06\x18\x06\xc7\x05Z\x05\xc9\x04\xfe\x03\xe5\x02\xd4\x01P\x003\xfeG\xfcg\xfbu\xfb\x18\xfb\xa6\xf9t\xf8#\xf8\xf8\xf7\xdf\xf7\xf1\xf7k\xf8\xd6\xf8=\xf9~\xfa\x99\xfcR\xfe\xff\xfe\x13\xff\xe4\xffg\x01\xa5\x02\xd2\x03\xcb\x043\x05\xf8\x05\x02\x06\x96\x062\x07\xce\x06\\\x05\xa4\x04\x90\x04A\x04u\x04V\x03E\x02\x97\x01\x98\x003\x00\xa5\xff\x1f\xff"\xfe\x9d\xfc\x0e\xfc\xbf\xfc\x1e\xffr\x00o\xfd8\xfc\xa8\xfc\xb2\xfc\xef\xfd\x1b\xfe\x97\xfeb\xfe\x93\xfe\xb2\x00p\x02\xcb\x02\xd3\x00X\xfe\xb5\xfe\x95\x028\x04\xad\x04\xe3\x04-\x03!\x03\xac\x034\x03`\x03G\x01\n\xff\xad\xff\xab\x00\xa7\x01\x88\x00\xff\xfd\xf1\xfb\xb5\xfam\xfa\x1c\xfae\xfa\xe6\xf9\x82\xf8\x14\xf9>\xfa4\xfas\xfa\x11\xf94\xf8v\xf9\x89\xfa\xa7\xfb\xd0\xfcp\xfc\xb0\xfd>\xfe\xd5\xfd\x12\x00h\x00<\xff\xf2\xff@\x01\x82\x01G\x02a\x01\x16\x01\xd3\x00\x97\x00\xbe\x02\xbd\x01\xa5\x01/\x01\xbb\x00\xf3\x01\x0c\x01c\x01]\x02h\x02\x18\x04\xc1\x08H\x0c\x82\n\xce\x08=\t\xb2\x08m\n\x83\x0b\xe2\x0c\xd2\x0e\x8e\x0b\'\x0b\x00\x0c\xde\x08\xd0\x05B\x02L\x00s\x00\x85\xff\xf1\xfeh\xfe\x06\xfc\x99\xf9L\xf8\xaf\xf7\xa1\xf7\xb5\xf5\x12\xf4j\xf5\xa4\xf6r\xf8 \xfa\xf1\xfal\xfb\xb0\xfa\x11\xfbB\xfd6\xff\xb9\xffr\x00\xb1\x01\x9b\x03\xc5\x04E\x05\x8c\x05\xa4\x04\xa8\x02\xca\x01\x9f\x01V\x02\x82\x01U\x00\xa7\xff\xc3\xfeN\xfey\xfc\xe8\xfa\x18\xf9>\xf7|\xf5\xc9\xf5\xac\xf76\xf9\x08\xf9\xea\xf8E\xf9%\xf9\x98\xfa}\xfb\xa1\xfd\x01\xfe\xd7\xff\x89\x05\xa5\t0\r\xf0\r\x0e\x0c@\x0bH\n\xfe\nd\x0cP\x0e\x01\r\xc3\x0b\x1d\x0e\x88\x0b\x07\x08\xf5\x02\xf1\xfb\xce\xf78\xf7u\xf9\x8b\xfb\x9f\xf8A\xf6\xcb\xf4[\xf2\xba\xf1\xe9\xf1\xd9\xf0\xe4\xef\x8f\xf0\x8e\xf4\x03\xfa\xca\xf9_\xfa\x9e\xf7>\xf6\x16\xf8\x0e\xfb \xfe\xce\xfb\xa9\xfdZ\x00[\xff\x08\xfe\x91\x01\xdd\x01\x9f\xfa\x13\xfb\x96\x01v\x009\xffR\x03H\x05\x15\x02\xf3\xfeA\x05\xba\x05\xe2\x01\xd5\x06\x9f\x08\xa7\x064\x08\xc9\x0b\x1c\n\x0e\x05\xb0\x07\x04\x08[\x05R\x08l\tK\x04\x12\x02i\x06\x1c\x05`\x04o\t\xd8\x08l\x04\x15\x07I\x0bl\n\x15\x0c\x84\x0c0\x0b+\x0b\x1a\r%\x0e\x06\r\xe0\x0b-\x08W\x06\xa4\x06\xfe\x06\xf8\x03\x82\xff,\xfd\xf6\xfb\x9c\xfc<\xfbd\xf9`\xf7N\xf5\x9c\xf3\xd2\xf3\x88\xf48\xf3\xe4\xf2\xc9\xf2\xb7\xf3g\xf5\x81\xf5\xd5\xf6\xc0\xf79\xf6)\xf6n\xf8\xcc\xfa\xc6\xfc`\xfdp\xfdo\xfd\x0e\xfe\x11\xff(\xff\xfd\xfe\x00\xfeG\xfd\r\xfe<\xff_\xfe\xe4\xfd\x0b\xfd\xee\xfb\x1c\xfc9\xfc\xa7\xfc4\xfd\\\xfdE\xfcf\xff\xef\x00\xee\x00\xf9\x02u\x02\x11\x03\xa6\x02\x16\x04\x8d\x05\xe3\x05\xf0\x06\x0c\x06\x90\x05\xec\x05\x00\x04`\x03\xf6\x01\xfd\xff[\x00o\xfd\x18\xfe@\xfe\xc4\xfa\xb7\xfb9\xf8\x88\xf6\x18\xf9\x08\xf7\xca\xf6\x96\xfa\xce\xf9\x12\xf8\n\xf9\xa0\xfb&\xfc\xae\xfa\x0f\xfek\xfd\xf2\xfe\xd3\x00r\x00\xeb\x02\xa7\x00~\xff\\\xff-\xfe\xa4\x01J\x02B\xfe\xe7\x00\xc4\xff\xc6\xfbG\xfc\xd5\x00\xe2\xfb\xe7\xfc+\x02\xfc\xfb;\xff\xc7\x03\xf8\x00\x8f\xff\x1b\x03P\x05<\x04\xdd\x02t\t\xe4\x08O\x04\xa8\t\x9d\n\xb1\x07\x19\x08\x18\n\x07\x08\x17\x06b\x07\x8a\x08!\x07 \x06h\x05\x1c\x04\x11\x04\xf7\x02\xed\x03\xdf\x04\x1d\x02\xd8\xfe\n\x05?\x05}\xfbE\x02\xcc\x04*\xfb\xa4\xfdV\x04\x0b\x00\xa8\xff\xef\x01\x0c\xff+\xff\x95\x01E\xfe\xad\xfd\xa3\x00N\x00\xfc\xff0\x023\x02\xb5\xfe\xde\x00\x1c\xfd\xd4\xfc\xa7\x03T\xfa6\xfcG\x03:\xfd\xb2\xfb\xc9\xfd\xbf\xf9\xf5\xfa\x13\xfc\xe4\xf9}\xfaf\xfd\xf9\xfd\xb1\xf9`\xffg\xfd\xc5\xfb\xc1\xfb\x10\xfdK\x01\x98\xfa!\x01\xe5\x044\xfb#\x02\x03\x06\xdd\xf6\xac\x00\xf4\x061\xfcS\xfa\xd0\n\xc2\x03-\xfb#\x03\xcd\x05\x06\x03\x8f\xf7L\x05\xe4\x04"\xfa\x83\xfdt\x05\x98\x00\xb0\xf9#\x04\x9c\x01\x00\xf8\x1f\xfb_\x04/\xf9N\xf6<\x08\xe2\xfe\x87\xf7\xcb\xfc\x92\t\x91\xf4\xa9\xf8\xe2\x06\x0c\xf7\x88\xf9\xee\x05|\xfe\x8d\xf5\xb4\x0b\xa9\xfd}\xef,\x052\x08N\xf0\x1b\xff\xf1\x0c\xa0\xf6m\xf8P\t\xe9\x04Z\xf3-\x02\xbc\t\x1c\xf5\x8a\xf7W\x13s\xfb\xe4\xef\xd3\r\x11\x06`\xf3 \x01L\x0eT\xf3\x8c\xfe\xd0\x0b\xde\xfa\xdc\xff\xb7\tM\xff\xe3\xfbM\x07\xee\x05!\xf5^\x05\x1f\x0f\xce\xf2\x88\x04\xdb\x0f%\xf1\xc2\x04.\x11C\xfa-\x01\xf9\t\xd4\xfc\x93\xfbB\x0e\x88\x02\xa7\xfeP\x0b\x16\xfe\x02\x00\x1b\n\xbb\xfe7\xf8%\n\x00\x034\xf98\x07#\x08g\xf6\xee\xfdk\x0e\xba\xf0R\xf2\xf9\x16\xa0\xf7S\xeeo\x0b\x04\x0c\x00\xeb\xb3\xfbO\r*\xef\x92\xf4\xa8\x02.\x08%\xf8)\xfd\x00\x02\xd8\x00\xc1\xf1\x0e\x01\xc8\t\xd2\xf4b\xfa+\x0f\xa1\xfeh\xf0\x1b\x10\xec\x02\xf8\xfbV\xf9\xac\x08\xed\xfe\xf9\xfdK\x02\x17\xfe\xff\xfes\x01\x1c\xff\x8a\xfaa\x07\x89\xf7\x8c\xf9\xc3\x02\x1f\x00\xdb\xfb\xfc\n\xc8\xfcd\xf7o\x0c\xe4\xfd@\xfe\x08\x03\\\xfbo\x0e\xe9\xf9j\x00\x1b\x11\xcb\xee[\x02B\x05%\xfb\x0c\xfc\xfd\x07<\x00\xe3\xf8\x7f\x01\xd7\x04\x1c\x05\x9a\xee\xa2\t\x1b\x00.\xf1/\x04K\x05\xc3\xfe-\xfa\x0e\x01H\x02\xb5\xfb7\xf7\xde\t\xd1\xfa\xf5\xf7\xed\x06;\xfen\xfb(\x07]\x06\xfd\xea4\x05\xcc\x0eW\xea\xad\x02\x08\x11+\xf1\xcf\xf9\x15\x0b(\x01\x1d\xef\x87\x068\x06-\xf5 \xff\xfd\x04\xc3\xfb\xc4\xf8\x85\x0c\r\xfa\xab\xf1\xee\x12\x92\xfe~\xf1\xd6\x11"\x00W\xf5\xa2\x08\xb6\x07\xa6\xfc~\xfc\xd4\x0e\xae\x03\x10\xf3\xe9\r2\x06Z\xf9>\x03\xe1\x08#\xfbh\xfe\xc4\x08\xf0\xf9\x8c\n\xdc\xf75\xfd\x13\x0c\xff\xf7\xec\xf8\x0e\x0eQ\xfa\xaf\xf1\xa1\x10\xe1\x00\x8a\xf36\x056\rY\xf6\x0e\xf9\xce\x0c\x89\x02G\xf0\x91\x04\xae\x16C\xf13\xfa\xcd\x0f\xe9\xf6\x08\xff[\x00\x17\x05\xc3\xfb\xaf\x00\xc8\xfb\x8b\x02\x0c\x0b\x97\xe71\x10\x01\xfa+\xf5\xe3\x08\xea\xfe1\xf7\xb5\xf8\x83\x0f\xa7\xfa\n\xf25\nv\x04\xf8\xe8\xa3\t\x9c\n\xaf\xf8*\xfd\xa9\x08`\x01b\xeft\x12\x0e\xfb\xa3\xf9\xfb\r\xba\x034\xf6\xb1\x05a\x05e\xff\xc2\xf3h\x04B\x13\xc6\xeb\x05\x0b\x85\x061\xf7\xe9\x01U\x05\xae\xf6\x15\x03\xdf\x07\x9a\xf1\xf5\x06\xea\x08I\xf2\xde\x04\xfd\x00\xb8\xf3\xcd\x05j\x02]\xf9\xb6\n\xc3\xf1\x8e\x02\xca\x11\x82\xe2\xf6\n\x13\x0c\xc2\xee\xe0\xf9\xe1\x11K\xfdB\xf7\xac\x08I\x03O\xf8\xa3\xf4\xda\r\xa0\xff\xa8\xfaS\x06W\x02\x91\xfa\xe0\xfc\x94\x0cB\xf4S\xfbP\r]\xf5\x06\xff(\t\xb2\xfb\x01\xfe~\x02t\xf9\x0b\x04 \xfe\x8a\x00\x8a\xff\xb1\xfd\xa3\x08\xaa\xfc\xed\xf5\x9f\x063\x07b\xf2_\x03o\x08\xac\xf8\x8d\x02\x0f\x05\x18\xf2d\r\xe5\x00\x0c\xf03\n;\x03\xfd\xfd\x98\xfc9\x05\xbb\xfe\xf1\xfc\xbd\x02V\xff\xc1\x00\x94\xf9\xc2\x040\x00A\xfe\xb7\x04u\xfe\xfa\xffC\x01\xbe\xf8\xcd\x04V\x01V\xfa\x1c\x03\x1d\x03\xd4\xfe\xf5\x00\xba\xfc]\xfc\xbe\xff\xaf\xf6\x8b\n\x13\xfb\x88\xfdv\x08`\xf6\xb9\x00.\x05\xd9\xf5\x98\x01\'\x02\x88\x01\xf3\x01\x81\x006\x03b\xfa\xea\x04$\xfdP\x04\xab\xf9\xfa\x06\x9c\x06\xc1\xf84\x00\xd8\x04\x99\x00\x1b\xfc\x98\x02f\x08S\xf7\xc7\xfe\xa8\x04\x1e\xfe\x13\x02p\xfa\xbe\x07\xb8\x00\xce\xfa\x0f\xfe6\x07\x18\xf4X\x00\xa5\x06\xcf\xfcm\xfe\x9e\xff>\x01\xf0\xf8\x8e\x02i\x02y\xfb1\xf3\xe3\x133\xf5\x91\xf9\x10\x0f\x9a\xf8\xb7\xfbM\x06\x92\x016\xf2\xc1\x0b\xc2\x00=\xfa\x86\x04D\x08\x95\xf6\xa4\x00/\x07\x1f\xf7\xd0\x04r\xfdY\x01\xcc\x05\xf0\xf5\t\x01\xf4\n\xa7\xf2\x89\x04\x05\x01D\xf5\xdd\x02\xf0\x07\xd6\xf2\x1a\x04\x8e\x03\xf5\xf7c\x06\x1d\xf9\xbf\n\x00\xf8\xef\x01\x8c\x04I\xfdS\x02\x7f\x00\x8d\xfe\xd1\x08\xcd\xfcD\xfaH\x0e\x81\x02\xc9\xf0\x12\x0b\xd9\te\xef\xa3\x08\xf2\x02\xfb\xf8H\x06\xf3\x03M\xf3#\r\xc2\xfe\x96\xf6B\x02\x9b\x0b\x1f\xef[\xff\xba\x11\x9b\xeb\xbd\x06W\x03\xa3\x03X\xed\xe7\t\xb0\x02U\xf7X\x04\xf0\x02\x9f\x01Y\xfa\x8a\x01\xe6\xfe\xdc\x04n\xf8E\x07\xfc\x03\xff\xf6\x18\x00:\x07\xf5\xfe\xe8\xfb\xa4\xfeh\x04\\\xf9\x8b\x05\x12\x01\x82\xf2\xd0\x0eT\xfb\x07\xf9\xae\x07\x86\x01\x1f\xfeA\xfb\'\xfd\xa1\r\x99\xfaL\xf8\xfd\x12\xb8\xf52\xfb\xc5\x06\x1e\xff8\xf8\xf2\r\xc6\xf8\xd3\xfbI\r%\xf9?\x02,\xf8\xa5\n\xa4\xfa\xaf\xf9\xe5\x0c\xa7\xfe\xa4\xf6\x00\x0b\x16\xfe\xfd\xf7i\nd\xf6K\x03\xbb\x06\xf3\xf1\x13\t{\t\x03\xf6t\xfdr\x07\xf2\xf6\xc9\xfe\x13\x0e9\xf3\x8e\x01\x16\x05\x00\xfe\x10\xf6n\x0e2\xf7\xb0\xf38\x14\xfa\xfa\x07\xf1\x9c\x11\x98\xfcv\xee.\x16\xc0\xf4\x19\xf4,\x0cD\x03\xf5\xed1\x11\xdf\xfds\xf2\xf0\x0c\xe0\xf7k\x00\xbf\xfe\xd9\x00=\x07\xb3\xfc[\xfe\xfa\xff\x10\x07\xe1\xef\xac\x0c\x85\x06\xca\xec\xa6\x16M\xf6A\xfa]\x0b\x03\xfc\xbf\xfc\xca\n\x80\xf8\xdc\xfe\x7f\n\x9f\xf8\xbc\x017\x04\x8f\xf78\x05q\xfbz\x08\xf1\x06{\xeeO\nJ\x03i\xf3\xe5\x00\xb6\r\x06\xf1\xad\x06~\x01\x80\xfc\x07\x01\x1b\xfc\xab\x03\x1e\xfe\xc6\xf8b\x03\xd4\x08|\xf0\xb7\t\x86\xfb)\xfe\'\x03\xdb\x003\xf5\xce\x06s\x06\x9f\xf2\xa7\x01s\x0f5\xf0\xfc\xfd\xe2\x14\x85\xe92\x066\x03\xf2\x02\x12\xf8B\x05\xb8\t\x95\xee\xd4\n\x9b\xfd\xa5\xff<\xfa\xe1\rR\xf7\x9e\xfb\xeb\x10\xc1\xec\t\x0c\xed\xfb\xff\xfb4\x04\xe2\x00\xe0\xff\xd5\xf5\xa8\x12=\xf9\x85\xf3-\x11\x08\xfa\xa2\xf4$\x0eh\x04t\xe8\x94\x11\x8d\r\x9a\xe85\n\xc0\x05\x10\xf3\xbe\x04\xed\x01V\xfc8\x06\xb7\x02)\xf3\xf5\x10\xcc\xf7\xd9\xf4U\x10\x10\xfd\xb9\xf5\xba\x06\xe5\x04\xbf\xf4\x1b\x0c\xcb\xfd\xd7\xf9\xb5\x032\xfb\xa2\x02\x14\x04\x8c\xf1\x11\nQ\x06,\xf1+\x04\x8e\x0e\xf8\xf0G\xf6\x01\x12\x8d\xf0\xf6\x02\x93\x0c\x90\xf2K\x03\xe4\x08_\xf7\x8e\xfd\xb3\x01 \x01d\x03\xda\xfc\xe5\xffc\nD\xf7\xbb\x01y\xfb\x88\x02\xce\n3\xf2\xe7\x01V\x12D\xf2\xa3\xf5\x15\x18\x08\xf3h\xf9\x0c\x0bm\x03\x95\xf8J\x02\xbd\x06\x9d\xf9\x9a\xfb\xe9\x08\xca\xfe\xae\xf9\xf6\x03\xe1\x07#\xf8\xd8\xfa\xe1\x0c6\xf4\x89\x05\x10\xfbA\x05j\xfcl\xfc\xfa\x0e\x0f\xf1c\x00a\x03U\x03\x96\xec\x8e\r\xb1\x03"\xf9\xb5\x04\x1f\xfbo\x04\xd9\xf8\x00\n\xde\xf3\x93\x07\xfc\x05R\xf7[\x07V\xfb0\x06\xe6\xfb\xd1\xf6\x12\x13\xcb\xf3\xc3\xfcY\x08\x16\xfc\xa1\xfb\x91\x01\xa2\x06\x7f\xf1r\x08\xe8\xfa\xf8\x06\xe7\xf8y\x03*\x04\xf6\xf8\xfa\xf9\xab\x04Z\x03\xab\xfa:\x07\xee\xf3\x90\x0e\xa8\xf6>\xfd\xb1\x08\xf2\xfd\x19\xfe*\xfd;\x06w\x05\xd2\xf5\xf7\x00\x00\nT\xfdF\xf5v\rW\x01\xea\xf0\x82\n\x99\x03\x16\xf9\xb3\x05B\x02\x86\xf4{\x06b\xffE\x03\xb6\xf5\x18\x08\xd7\xfcy\x03e\x00\xab\xf4Q\x0cn\xf7I\x00Y\x01n\x03\x15\x01$\xf9#\xffG\r\xcc\xec\xdc\x05\xf0\x08w\xf6 \x01\x0b\xff\x16\x06\xe3\xf2.\x0f\xd8\xf8\xbf\xf7\x98\x08T\xff\xa8\xfc\xa8\xfcQ\n\xe1\xf8\xe7\xf6U\x13\x92\xf73\xfb`\x06G\xfb\x1b\x00\x80\xfcS\tB\xffO\xfb\x1f\x01G\x04\x85\xfa*\xfa3\x15\xb4\xeew\xf8Y\x1ep\xeao\xfb\xd2\x14\x03\xf72\xf5\x02\x11\xb0\xf9_\xfb\x87\x08\xb2\xf8\x91\x0e\xe5\xf0\x0c\x05H\x026\x006\xfc/\xfe\x91\n\x8c\xf66\x08l\xf5C\x06\xb0\x02`\xf9E\xfb\xc6\x0c$\x002\xef\x84\n\x9f\x06"\xf5\xa8\xfel\x05\xb2\xffK\x01\x1c\xf5\xa9\x06\xb1\xff\x8b\x04N\xfa\x8b\xfd!\x04\xf3\xfe\xe1\xfe\x11\xfb\xac\x0c-\xf0\xa6\x04\xd3\x08X\xfbm\xfcr\x03\x8c\x03S\xefH\x0c\x98\t\xff\xeb\x00\x07\x8d\x0e"\xee\xcd\x05"\x04\xad\xfb\x16\xfdW\x00\xbc\x05}\xfc\xfe\xff\xe0\x036\x03\xd5\xf0\xf8\x08\\\x06 \xf5\xd0\x01U\x03\xda\x01\xc4\xfe\x19\xfc\xb1\x01\x1e\x0b\xfa\xf3?\xfe\xd8\t\x98\xfcv\xfb\xa7\x04\xa3\x02\xff\xf8\xf6\x0b\xee\xf21\x07\x92\x01\xb9\xf8i\x07\xce\xfc\x01\x04\xa4\xf8\xf2\x04\xa1\xff\xa9\xfd\xe5\x01\xd5\x01\xad\xf7\xd0\x06\xdf\x01\x14\xfd\xc9\xfbb\x00\xc8\x07c\xf8\xcd\x01\x8a\xfbd\x10\xb7\xef}\xfc"\t\x02\x02\x93\xff\xfc\xf3j\x10\xba\xf9\x17\xf8\xa9\x0b\x1d\xfa\x8e\x00\xe5\x08:\xf8\xf5\xfa\xdc\x04\x98\x00\x9a\xf9\xfe\x0b7\xfd\xc0\xf71\x04e\x05w\xf5\xf1\xfa\x05\x0fP\xf5\xd3\xfeT\r\x1e\xfe\xd5\xf7\xf9\x05?\xf7\xf9\x027\x07\xb1\xf7\x18\x0f\x8a\xf8\xce\xfe2\x04~\xf6H\x07\xdc\x01C\xf7\x8b\x05\x88\tv\xf3=\x02{\x07r\xf5y\xfe\x92\x0b@\xf5\xc3\x02\xa8\x03\xf7\xfc\x89\xff\xbc\x01p\x04\xf2\xeeC\x0c\x08\x03&\xee\x0c\x0c\x13\x0b\xf4\xee\xa7\x03S\x08-\xef\xbf\ny\x05\x82\xf3\xc3\x08\xd8\xf8"\x05S\x00{\xfcM\x01x\x02\x0e\xfd\xbd\x00\x9f\x02\xd4\xfb\xe5\x06\x03\xf7\xee\x08Q\xfc0\xff\xa3\xff\x99\xfb\x8b\x0c\xbd\xf9\x82\xf8\x92\x040\x0c|\xee\x85\xffG\x11\xd5\xf4\x8b\xfe\xd2\x04k\x00Y\xf8\xbd\x08\x94\xf9\x9b\x02\xef\xfe\xe9\xff\x87\tF\xed\x8e\x0cY\xfe\xe2\xfa\xc1\x00\xdb\t\x04\xf9\x19\xf92\x0e\xe5\xf1\x8b\x08\xc0\xfeL\xfbZ\xfe<\t\xfc\xfb\xf3\xf8.\x13\xe8\xeb\xf0\x06\x1b\xfc\xfc\x03\x15\x06\x99\xee\x99\x10\xba\xfc\xbc\xfa\x13\xff\xe6\x06\x08\xfa\xb9\x01Z\x01\xeb\xf6\x06\x0e\x85\xf9\xaa\xf8\xc0\tv\xfd\xc4\xf9;\x0e_\xee\x9f\x08@\x06&\xf4\xb8\x00\x12\x06\xf6\xff\xb7\x01\xda\xf98\x00(\x0f{\xe8Y\x08\xf6\n\xeb\xfa\xc5\xf8{\x07\x80\xffs\x00\xec\xfb5\xfeC\r\x10\xf3\x00\x027\x08,\xf5\xd4\x03\xbb\x04\x9a\xf6q\t\x97\xfbH\x02 \xf9\xe0\x01N\x08\xbd\xf8\x85\xfb\xc6\x0c\x1f\xf6\xd2\xff&\x0cB\xec\x19\n\x7f\x03\xe4\x01\xe3\xf1\xeb\x0c\x81\x02\x82\xf0\xed\x05d\x08\xee\xfa!\xf7\x9b\n\xd0\x018\xf7\xbb\xfe\xa1\n\xa2\xff\xcb\xf5\xe0\x01\x89\x03\xd7\xfbT\x06w\x01&\xf8\x19\x05\xa7\x03k\xf6\xb8\x02\x8c\x02T\xffK\xfaW\x08Q\x03t\xf6u\x07\x8e\xfd?\xf5/\n\xde\xfd\x85\x03\x05\xfd\xc2\x00\x03\x06\x05\xf7\xd9\x01(\xfe\x1d\x08&\xfb\x92\xfd\x07\x0c+\xfb\x01\xf3\xe2\x12&\xf4\t\xfa\xa5\x0e|\x00i\xf2\xfe\x0c;\xfd2\xfar\x07\x08\xf2L\r\xc2\x00@\xf6\xaa\x08\xc3\x089\xe9\xe4\x11\xe9\xf7P\xfe\x97\x039\xfeW\x02\x19\x01\x93\x00\x96\xfc\x88\x02\x97\xf5-\x13\x16\xeb\x8e\x07\x05\n\x1b\xf4\xd7\x08-\xfe\x0c\xf9\xdc\x03\xd5\x04"\xe7\xfd\x11U\x0fw\xe8\x95\x02w\x122\xf0n\xf7\xbc\x177\xee\xed\xff\x7f\x0b4\x00<\xf7\x82\x04\x07\x0c\n\xef#\xfe\x96\x13+\xf4\x05\xf6\xc6\x0e2\xfb\x90\xfb\xa6\x02\x15\x07\xe8\xf2(\x01\x1d\x01\xef\x07\xab\xf4b\x07\xe7\x05\xf2\xf1\xbe\x01\xde\x00}\x05n\xf9\xf1\x07\xd8\xf3\xd8\x04\x84\x04>\xfet\xfd;\x031\x03\xee\xf0o\x0f\x9d\x01\x1f\xf3a\x08\xdd\xff=\x01\x8d\x00\xb4\xff\x94\xfd\xb5\xfb8\x05\xe0\xffb\xfd\x8a\tc\xfb\x05\xf7\xa5\x06\xc2\xff\xe8\x02M\xf1\'\n\x1b\x02\xa7\xff\xff\x01[\xf6\x01\ti\xf7A\xfe\x89\t\xbb\x01\xeb\xf4\x8e\x0b\x80\xffy\xf1\x1d\x0b\xdf\x01K\xf6I\x03U\t\t\xf6x\xfc\xd9\rT\xf8\x8f\xf7|\x08\xac\xfex\xf8&\t\x80\x06\x9b\xf0\xd6\xff\xe0\t\x89\x01\xe6\xf5"\x05x\x05\t\xf6^\x02\xee\x04\x87\xfe\x11\x00s\x04"\xf9\xa9\xf7G\x15\xb8\xf3\xa9\xfdw\r\xcb\xf1.\x07\xc4\xfc\xbe\x00\xdd\xff{\x04\x90\xfd0\x00^\x01\xf2\xfb\x98\x05\xc3\xf6J\n\xb6\xf5\xd0\x07H\x00\x11\xfa#\x0bU\xf23\x04&\x02\xf9\xfd\x93\xfa0\x0b \xfc=\xf7\xf2\r\x02\x00\xe0\xef\xcc\x01J\r\x9b\xf7\xf5\xf9/\x0c!\x059\xf0A\t\x82\xf6Y\x01\x16\x04\x16\xff\xec\x04\xf1\xf7j\n\xe8\xf6\xc9\xff\x8c\x05\xc5\xfb\x8e\xf5\xf0\x07u\x14\xee\xe8e\x01\xa8\x13\x11\xe6\xc4\x01\x10\x10\x01\xfbM\xf6\xec\x07\xef\x08+\xf3#\x02\x1a\x07%\xf5\xd3\xf9\x87\x12\x08\xf9\xd1\xfcy\x07\x9f\xff\xb9\xf7\x10\x00\x14\t\xb0\xf46\x08\xa4\x02\x16\xf9\xf4\x006\t\xf1\xfc.\xec8\x16\xc4\x07-\xe9\xfb\x05\xad\x0ft\xf6\xbf\xf2\x87\x113\xfe\xbf\xf6t\t\xb7\xfc?\xfa\x8a\x0cA\xf56\xff\xf9\x02\'\x05\xe1\xf9\x0f\xf8C\x0f\xba\xf1\xd8\x06\x15\xfe\xce\x00\x88\x02$\xfe\xdb\xf6\xfb\x06\xb2\x00\xe4\xfd\xa6\xfe%\x02)\tC\xefG\ry\xf4R\x05D\x02\xd4\x00%\xfc\xbc\x07\xee\xfb\xec\xfd\xd5\n\xd9\xf6A\x07t\xfe\x8d\xf5\xc8\x03\x83\x0f}\xef\x12\x06\xe1\tS\xf0\xcd\xfap\x0b\xee\xfd\xf5\xf5O\xfd\x90\nk\x05\xa1\xf3\x07\x05H\x00\xa7\xfd2\xfb\xb1\xfe\xce\r\xba\xff\x16\xfc\r\x00Q\x00\x87\x04\x80\xf8\xd2\xf7X\x0e.\x02z\xf3T\n\xcf\x02R\xf5\x80\x00!\xff\xc8\xfa\n\x0b\xe2\xfd\x05\xfeN\xffL\xff\xd5\x04_\xf2\xff\x02\x83\x0b\x93\xefK\x04\xba\n\x84\xfc\x93\x02\xd7\xee\xdd\x0c\xa8\x05\xe3\xf0\x0e\x0b7\x0b3\xf4\xcb\xfa\x9d\t\xae\xfe\x88\xfa\xeb\x05u\x06\xaa\xf1d\x06\x18\x0f\x12\xef]\xf9\xf9\x17\x01\xf5\xc2\xf5\x8f\n(\x05\xc7\xfa\xbf\xf5}\x0c\x83\x00\xa2\xf6\xdb\x05\x9c\x01@\xfe\xad\xfb\xeb\x00\x8f\x05u\xfb"\xfc\xc4\t\\\xfe\xfd\xf1\x80\x05\xc4\x08\xf5\xfb$\xf8\xc3\x06\xe2\xfe\x98\xf5\xf3\x08\x19\x06\xb2\xf6\\\x00P\x04\x8e\xf7f\x02m\x05\xa9\xfb_\xfeL\x00\xe7\x02\xdb\xfe\x87\x01e\x00\xd8\xf9;\xff\x10\x02d\xffB\x08J\xfaV\xf8\xd5\x0b\x99\xfbO\xfb\xec\x07%\xfd\xb5\xf4*\x04g\rm\xf8\xc0\x00r\x01\xe5\xf6\x9f\x009\x02J\x01\'\x00\xe5\x018\xf9:\x02\x10\x08*\xf7h\x01\xbe\xffQ\xfa\x1a\x05j\x03\xdc\x02\xd7\x01\x80\xfbW\xfa9\x05\xb2\x00\xd9\xff\x8c\x05\x91\xfe\n\xfcO\x05\x13\x01\x0b\xfa\x1d\x04\xd1\x02p\xf7\xd1\x03\x8e\n\xc6\xfa|\xf9+\xff\x07\x05\xc4\xfa\x9f\xfb\x03\x08\xad\x03\xaa\xf6+\xfe4\x02q\xfdk\x02y\xfd\x85\xfe\xc5\xfe|\x02Q\xff\xae\x02\xac\xfc3\xfd2\x01\xbb\xfa\x10\x07\xbe\x03E\xfa9\xfc\xb2\x06\xef\xfeg\xfa\x9b\x01\x03\x04F\xfe\xd1\xfb\x1e\x05\n\x01\x83\xfc\xa2\x00\xea\xfd\x8a\xfe;\x02d\x00L\xff\xfa\x01\xa0\x00\x99\xfc\x1f\xff\x7f\xff\x92\x01\xc6\xff\xa6\xfd=\x03\x9b\x02G\xfe\x14\xfe\x1b\x047\xfdq\xfd\xf7\x02}\x02U\x02\xce\xff\xa7\x00\x0b\xfc\xbb\x00L\x00\x91\x01\r\x01\xf7\xfe\xae\xff\xbb\xfb\xf6\x02\x1b\x03\x1c\xfb\x07\xfd\xda\x01\x05\xfea\x00\x0f\x00\x8f\xff0\xff\xa9\xfbK\x02R\x02\xfe\xff\xd3\xfdi\xffC\x014\x00T\x04\xb7\x00\xb6\xff\xbe\xfd~\x000\x04\x15\x02\x0f\x03\x00\x00\x98\xfe\xba\xfea\x02a\x02\xe9\xff\x0f\x017\x01\xd6\xff\x98\x00\xc5\x02\x85\xfe\xed\xfc+\x02\xf4\x01d\x00A\x02\x1f\x02\xc0\xffI\xfe\x96\x01\x9c\x00z\x01\x98\x02\xa7\x01\xfb\xff\x92\x02\xea\x01i\xff\xfa\xfe#\x00\xe5\x01\x99\xff\xc4\x01\x8a\x01\x89\xff\xf1\xfc\xb4\xfe\xc0\xfe\xf9\xfd1\x00\x15\xff\xff\xfd#\xff\xfb\xfd\x15\xfe\xa9\xfes\xfc\xab\xfe\x9f\xfe~\xfe;\x00]\xff\x7f\xfc\xbb\xfa_\xfe0\x00\xd8\xfd!\xfb\xf1\xfd\xfc\xfb\xf6\xf7x\xfcd\xfa\xc2\xf8\xcd\xf7-\xf9\xec\xf8\xca\xf7b\xf7\x9b\xf3\xb9\xf3N\xf7\xb5\xf9\xd9\xf5L\xfa_\xf99\xf5\xe8\xf8N\xfc\xbe\xff2\xff\t\x03\xdc\x05\xc9\x05\x85\x08\x9b\x0c6\x0f\x85\x11=\x13\x87\x16\x05\x19{\x1c,\x1d\xcd\x1b\xb4\x1b\x83\x1a\xa7\x1c5\x1c\xd1\x1a\xd1\x1aT\x16\xe4\x10\xc1\x0f\x93\r-\t+\x05\x14\x02^\xfe\xd6\xfb\xf2\xf8\xd9\xf4/\xf1W\xed\xf6\xeb\x8c\xec\xb7\xec\xa8\xeb\x03\xea\xf8\xe8e\xea\x96\xec\xa3\xedA\xf0\x91\xf3\xc7\xf3\xb7\xf6\x10\xfa\xac\xfbN\xfe\xc6\xff\xd0\x00d\x04!\x07\x18\x066\x06\x04\x06\xb4\x04\xb0\x02\x9c\x02z\x02T\x00<\xfe\xef\xfb\xd4\xf8_\xf4\xce\xf1!\xf0\x15\xee\xbd\xec\xac\xea;\xea\x12\xe8]\xe5\x13\xe5]\xe5\xa6\xe4\xe5\xe5p\xe7\xaf\xe7F\xea[\xeb\xf3\xe9\x1a\xec{\xf2\x04\xf5\xf7\xf6\x90\xf9\xa0\xf8\xe8\xfd\xab\x01\xd0\x03\xf5\t\xab\r&\x16\xf0\x1f\xc0%\r&\t%\x04(e,\xb75A;\xbc>\xb3AA\n\xb1\n\xed\x0c\xbb\x0cK\ry\x0f\x02\x0f2\x0c\x06\x06\xbc\x01\x15\x01\x92\x00\xbc\xfd\xf0\xf9J\xf3\x92\xeco\xea\x01\xeaP\xe8\x0c\xe6M\xe3b\xe3>\xe5/\xe5j\xe6\x19\xe7s\xe6\xd7\xe8\x8c\xedS\xf1C\xf5}\xf7\xbc\xf6\xad\xf8\xc8\xfb\x11\xfd\x9b\xff\x0b\x00\xc3\xfd\xd7\xfe\xa3\xff\x97\xff\xe1\x01\x15\x00B\xfco\xfd\xdf\xfc\x90\xfdc\xfe\xbe\xfc\xde\xfc\xcd\xfb\xda\xfd8\x08\xdc\x14q\x14\x86\rM\nr\x10\xb0\x1f\xca(L,\x1a-2,d-\xc1/\xa50\r/\x1a*\xa9\'\xa8+\xbb.C)\x0f\x1c\'\x0f\x0e\x08s\x06\xc3\x06\xbf\x06\x01\xff-\xf4\x9d\xec|\xe6l\xe5\x82\xe4\x1b\xe0\xb8\xdd\\\xdf\x9b\xe1\x91\xe3\xa4\xe3o\xe1a\xe1\xf8\xe3\x1d\xea\xb2\xf3?\xf9P\xfb\x82\xfc@\xfd\xc0\xff \x05\xe4\t,\x0bC\x0c\x1d\r\xea\r\xf9\x0eD\x0c\x96\x07\xd6\x02r\x01\x9d\x02\xa8\x012\xfdU\xf7l\xf0\xde\xed\x95\xec\x13\xeax\xe9\xed\xe7{\xe5\xde\xe4\xe3\xe6\xb2\xe6\x11\xe6\x1f\xe7\x16\xe9\x1c\xed6\xf2\xf3\xf4\x90\xf4x\xf4\xf0\xf6\xa2\xf9\xac\xfb0\xff\x7f\x00\x90\x00\xdb\x02\xc9\x01\xbd\xffR\x01\xca\x00\x9b\x02v\x02\x8c\x02=\x03\x8e\x01\xe4\x00\x98\x00Y\x02\xff\x03\xbc\x02\xe4\xff\x1c\xffB\x04,\x0c\xa3\n\x85\x0b\r\x11\x81\x14\x18\x18k\x1au\x1e\x1f \x92\x1f\xb1#\xe8+\x1a1\xe8+-%\x80#\x1e$\x17%\xae#\xff!\t\x1d\xad\x13\x83\x0f\xa2\r\xb5\n\xc6\x05L\xfd\xfd\xf9\xff\xf7)\xf4^\xf2\xef\xedv\xe7u\xe3\xdb\xe1P\xe5\x80\xe7\xd9\xe5\xdb\xe3\x04\xe2M\xe2t\xe6\xe9\xea\x08\xed"\xef\xbe\xef(\xf3\xd5\xf7?\xfb*\xfd\x95\xfc\x82\xfc\xf9\x00\xae\x07\xe0\x07M\x07\x0f\x05\x8f\x01\x16\x02\xcf\x03\xa8\x04g\x02\x89\xfd\xf3\xf9\x07\xf9\x03\xfa\x0e\xf8\x14\xf4z\xf0[\xed\xaa\xebi\xf1\t\xf2x\xee\xcb\xec\xaa\xe89\xec \xf2X\xf26\xf1o\xf4\xa0\xf3,\xf3\x81\xf8A\xfbo\xfa\xec\xf8\xb5\xf9@\xfc\xf7\x00\\\x01\xbd\xff\xbf\xff\xac\xff\xfa\x028\t\xc9\x07Q\x05\x00\x02\xd0\x02u\x07\x97\n6\x11K\x0c\xa6\x05\xf5\x07\xea\x04k\x08\xa9\x0e\x84\x08\xec\x07\x15\x0e.\x15\xf9\x15\xab\x11=\x0b@\x0ba\x10J\x17\xda\x1cD\x1e>\x1b#\x16\xd1\x14v\x14\x92\x17\x8b\x15g\x13\xeb\x14\xcd\x13\x1d\x13<\x0f\x91\x08C\x03<\x00\x0b\x01\xfa\x048\x02:\xfc^\xf8"\xf36\xf0\n\xf0a\xefV\xef\xd4\xee\xb5\xed\'\xec\xf3\xea:\xeb\x91\xe9\xb8\xe82\xee\x88\xf2L\xf2\xc5\xf4\xbb\xf5\x8e\xf3d\xf7\xe9\xf9\xfe\xfc}\x02\xb6\x02-\x03\xb0\x02\xd8\x02\x83\x02V\x02\xeb\x02Y\x02\x98\x01s\x01\xd9\xfey\xfb\xfb\xf6\xcd\xf5N\xf5Y\xf3f\xf3\xfd\xf6\xb1\xf1\xb4\xed\x93\xef\xd4\xeb\x14\xedM\xf1@\xf2\xa4\xf4j\xf6\x08\xf3\xea\xf3\xf6\xf7\x0c\xf6j\xf8\'\xffO\x01/\x03\xcc\x06\xb9\x08\xaa\xff\xe3\xfe[\r\xe5\ra\x0c\x81\x0fd\x0c@\x0c\xe9\n\xa6\x0bi\x0e\xed\x08\\\x06E\x0eY\x0f\xea\n\xe9\x04)\x02.\x04\xdd\xfdw\x06\xe8\x10\x1d\x05\x16\xfa\xba\x00\xb5\x02\x8f\xfan\xff\x96\x07b\xfdp\xf8\x16\x08(\x04\x14\xf9\x14\xfe\x1c\x01\xc6\x00\x7f\x00\xfa\x06\xcb\x08\x88\tu\x08p\x03\x82\n3\x0b\xd5\x08Q\x11\x13\x14\xfd\x0e~\r\xef\x0f\xad\x0e\xb2\n\x82\x0c@\x0cy\x0c\x9c\x0c\x95\x08\xfa\x08\xbd\x04 \xff\xdd\xfd\xdd\xff\x93\xff\x8e\xfb\xaa\xfb\x96\xf9\x92\xf5\xb7\xf4,\xf4`\xf1\xb3\xf1\x96\xf2]\xf09\xf5\x90\xf5!\xf1\x19\xf1_\xf5\x1d\xf4I\xf2\xba\xf7\xce\xf9i\xf8\x1c\xf8\xcd\xfdA\xfa>\xf9O\xfd\xad\xf9\x99\xfb\x12\xffc\x02.\xfag\xfd\x8c\x00M\x00\x92\xf72\xfa!\xff\xb4\xfb\xb6\xfb\xe9\xffe\xf8M\xfc\xad\xfc\xf6\xfa\xc9\xfbk\xfa^\xfa\xb2\xf9=\x02\xbe\xfb\xb8\x00T\xf8\xa5\x00\x8f\xf8v\xfe\'\x01"\xf7\x07\x03\xdb\xfe&\x05\x9a\xf9x\x022\x00\'\xff\r\xfb\xe7\x06(\tT\xfb4\x0f\xaa\x04\x14\xfbX\xfc\x98\x11l\x00$\xfci\x16c\x082\xfaL\x06\xb2\x11\xbe\xf6\xcd\xfd\x1a\x1b\xf2\x03\xc0\xf4\xf7\x10\xb1\n/\xfaH\xfe>\x10\xe9\xfc\xa8\xf9b\x06\xf1\x04h\x06\xc3\xf2m\x07\xbe\x07\xbd\xf4R\x02\x7f\x06\x06\xfc\x0f\xff\xe0\x00\x83\xfe\xe3\x02\xf9\x02X\xffm\xfc\xaa\xfat\x055\x08\x0b\xfa\x89\xfb\xeb\x06\xef\x00\xb9\xfea\x02\x1a\x07\xbc\xffL\xfc\xa6\x02v\x07r\x07\xdd\xfcx\x06\x82\x02\xd1\x02M\x03\xfa\x02\x05\x07\x0b\xfeY\x03\xca\x08\xaa\xfbi\x05\x19\x07i\xfd\xae\xfc(\x04\xe5\x05\x83\xfd\xb9\x01#\xff\x95\xfe\xf5\xf9\xae\x02\t\x00{\xf3\xe6\xfc6\xfe\x14\xf9\x9e\xf2\x07\xfay\xfdf\xebU\xf5\xf8\xfe\x93\xf5S\xf6\xa4\xef\x9a\xfc\x94\xf5\x1a\xf1\x90\x04\xa9\xf7O\xf4\xc4\xf9\xee\xfc\xff\xf9S\xfe\xed\xfc\xed\xf5\x16\x05\xea\x05\xe3\xef2\x05\x94\x05\xdc\xfbv\xfb\xbf\xfc~\x11\x9a\x02Z\xed;\n\x19\x06\\\xfe\xf5\xfbn\x06\xfc\x02O\xf7\xff\x0f\xe5\xf2B\x01\x1b\x12\xfd\xefQ\xfa\xa8\x18\x05\x00a\xf2\xc0\n\x06\x0c)\xf8\x81\xff\x8a\x12\xc5\x07\xfb\xfa[\xff\x87\x12\xe2\x03\x9d\xf4B\x0b\x81\x0f\x0c\xfb\xa4\xfe\x0c\x0e,\x04\xfd\xfeC\xfc \n%\x01\x0e\x00h\x06E\x01\xa0\x04\x9d\x02\x11\xf6Y\t2\t\xca\xedS\x10\xc9\x0b\x11\xe7H\x07\xe9\x19\xb1\xf2\xfe\xf5\xec\x05\xff\x0b2\xf1\x01\x02T\x1a\xed\xeed\xf3\xf6\x0e\xbc\x02e\xec\xea\x04\x7f\n\xe0\xfa\xd7\xf2\x11\x0b\x83\x03\xc2\xe7\x87\xfa\xb9\r\x1a\xf8\x05\xf0\x03\x08S\x04x\xef\x1a\xf8\xf3\xfdl\xfd\x1b\xff\xf4\xf1X\x0c\xd2\xf4@\x00\xc5\xfe\x16\xfd*\xf7\xb5\x01\xd4\n\x05\xef\x8c\t\x96\x05\x9c\x07\xc9\xf0\x9f\x07\x85\x0c\x80\xec\xa8\x07\x01\x10v\xfc`\x07\x1c\x05\xc6\xf8/\x01\xb7\x03\xb1\x02\xaf\x00\xa6\xff\x15\x07\xcd\xfb\xd6\xfe`\x02\x15\x01\xaf\xf3\xee\xfc|\x0f\xc4\xf4\xee\x05{\x00\xda\xfc\xd5\xf86\x06\x9a\xfd\x02\x06n\xfe\xf2\xfb?\n\xff\xfcp\x03\xc5\xf5.\t0\xff\xaf\xf3\x18\x0c\xb4\ta\xf0q\x03\x86\xff\xf2\xff\x1e\x02U\xf5\x1c\x0b\x0c\xff\x82\xfb\xfc\xfa|\t\xcd\x04\xd4\xe8\xb7\x0e}\xff\xe8\xf6F\x0f?\xfb\x80\xfb\t\x08\x81\xfa\xaf\xfa\x93\x04\xe8\rG\xf1\xb4\x01B\t\xba\xf4\x83\x0c\xcf\xf4\xa2\x06\xfe\xf5\xbf\x08\x80\xf5\x9b\x0b\xe6\xf6h\xfbw\x05\xc6\xf8\xb4\x07\'\xf2U\n\x8f\xef*\x0f?\xf9>\xfeQ\xf9\x93\r\x9c\x02\x17\xec\xa9\x0c\t\x0c\xa2\xf0\xb9\xf9e\x1e\xa7\xee\xbf\x00\x9c\n\xe0\xff\xf6\xf4\xe3\x08\x83\x03\xe1\xf8\xcb\t\xd3\xfb)\xfd\xca\x02\x0f\xf96\x07\xa8\xed\xd2\x086\t\xe8\xe5\xbe\x0e\xda\x01Y\xf3\xcb\xee7\x12\xf3\xfe\x0c\xe4\x08\x18\xf3\x01\x19\xef6\x02n\x03K\xfe\xcf\xf1\xb4\x0c\xa5\x059\xf7\x8f\x08K\x00\x91\xf8\xb1\x06s\x04Q\xfc\x87\x02I\x07\xa9\xff\xd8\xfe+\x10\xe0\xf6i\xfeI\x06K\x03C\x04\x1e\xfa\t\x10 \xf5\xfa\xfe>\x18<\xf3\x97\xf8\xf6\x0f\x97\xf6\xb4\x03\x97\x06\x0f\x05S\xff"\xff\xe9\xfdZ\xf9\x7f\x0f\xa2\xedy\rJ\x07\xeb\xf0V\x03\xb8\x01\xc9\xf5\xcc\x00*\xfe7\xf7F\t~\xf9\x9a\xfb\xa7\x00\x05\xfb\x0e\xf2\xc9\x10"\xf1\x1e\xf9S\rZ\xf2\xaf\xf8\xb0\t\xec\xf8@\xfa/\x05C\xefL\n*\x03\x9d\xf8Q\x04-\xf9\x9e\xff?\x0c(\xee\xc0\x07\xa8\x14\xe2\xe8\xab\xfa\xd1\x1a\xa5\xfay\xef\xda\x0e\x83\x0c\\\xf3+\xf70\x16R\x00\xab\xf7\x81\x01\xb7\x0b\x13\xfaw\xf7\\\x1bs\xef}\xfbB\x17\xd5\xed\xac\xfb\xb1\x13\xe9\xf8\x87\xf6q\r\xc5\xfa\x07\x05\x14\xfd\x85\xfb\x92\x07\xdf\xf7\x03\x03\x89\x07\xfe\xfa\x8b\xfa\xdc\x06\xc7\x02j\xf6\xde\x00\xb8\x0e\x18\xedg\x07\x11\x01`\x02/\x04\x19\xf0\x11\nw\xff\x1a\x01X\xf5\xcc\x0e\xf4\xf9\xef\xf8,\x08a\xfd\x0f\x04\xbd\xf3\x9d\x00\xc1\x07\xda\xfa\xc3\x00\x8d\x021\xfb\x8a\ta\xea<\x0c\xdf\x0b\x06\xeb\xc8\x08\xec\x00]\xfe\r\x06\xa4\xfb\x08\xf4\x9f\x10L\xf6v\xfb:\t\xa1\x04\x13\xf7\xc5\xfa\n\x0e\xf5\xfc\xb3\xf3\xe8\x0bp\x04\x91\xf1\x99\x11\xd6\xfb\xe3\xfb\xb9\xfd\x18\tm\xf5B\t\x1b\xff\x95\xf6\xf6\x14\xa1\xf1\xf8\xf4\xf6\x10\xa5\xfc]\xf0Y\x0f\xd3\x01\x8c\xf7\xac\xfd\xab\t\xa3\xf4C\xfeY\x08\x12\xf9U\x01\x1a\x06"\xfa\xf0\xff\xcf\xff\xe6\xfcD\x01:\x01\xb7\x00\xf0\xf7C\x0b\x0f\xf6p\xffM\n\x83\xf7\xeb\xf8\xff\x064\x04h\xfaD\xfd\xd4\x08\xd7\xfeR\xf8\xb1\n\x94\xfc\x99\xfc*\x06\xd1\x04\xc8\xf63\x08\x1b\x06_\xf4+\x05x\x08\x9f\xfa\xa1\xfc\xec\t)\xfc{\xfcR\x05\x14\x00~\xfeT\x03v\xf7^\x01\xe0\x07\xec\xf1P\x07r\x06\xac\xe8\xe7\x12y\x01\xf1\xea9\x0c\xa1\x07X\xec\xcc\x06d\r6\xe9\xc4\x0b\x10\xfa\x0f\x05\x11\xfd\n\xf4g\x13\xa9\xf1{\xfe\xf1\x08\xac\xf6\xeb\xfc~\x0b\xac\xf11\xfb\xf8\x13\x89\xf0E\xf6\xdf\x12\xe3\xf7l\xf6\x92\x0b\xe8\xfc\x93\xf7c\x02\x7f\x05\x12\xfb\xc8\xfe:\x07\xfd\xfeE\xf7/\t\xb0\xff!\xfd\xdc\x02\x15\x07\x17\xfe\xe4\x00\xb5\x06\xca\xf9\xf6\x04t\x06k\xfbD\x07\xca\xfd\xd2\x01q\xfe\xb2\x05\xb2\x06\x93\xeb5\x13\xec\xf9\x1c\xfa\'\x08\x85\xfc\xf5\xf88\x08\x9e\x03\xd7\xf03\x07G\x0f~\xe7a\xfd#\x1f\xd0\xe6e\xfd\xa4\x17\xb5\xf4\x15\xef\\\x15\x8b\xf9\xb6\xf3\x91\x11\x82\xf8\'\xfe\xeb\x02E\x01e\x00\x99\xf8c\x06\xe1\xfb\xa1\xfeZ\x08/\xff\xef\xf2\xcc\x0b\xe1\x04\xe7\xe7\x99\x11\xed\xff_\xf7\xdf\xfa\xb3\x0c6\x01\x7f\xecg\x107\xf8\x05\xf9s\x04\x03\x01\x13\xff\x9d\xfd\xbc\xfd\x97\xf8\xd2\r\x87\xf0\xf3\x01:\t\xa4\xeb\x9f\x0c"\x07G\xef\xc1\xff\xce\x0f\x9b\xf0>\x00\x94\t\x7f\xfb\x90\xf8\xad\x0c\xe7\xfd\xee\xf0\xc8\x19\xfb\xf1\xd9\xfe\xe7\t^\xfbU\x01N\xff\x97\x07\x18\x02\xc0\xfe\xf7\xf7\x80\x11@\xf6\xe2\xfcY\x14\x93\xf1:\x05\xdf\x001\xfeU\x04\\\xfd{\x06"\xf9=\x06k\xfcL\xff;\x03<\xfc^\tV\xeb\xf2\r=\x04\x9f\xf6\xc7\x002\x06\xce\xf9\x1b\xf0t\x18\x1b\xf7\x0c\xf0\xd3\x0f\x9c\xfe\xd1\xf3\x8a\x05\xd1\x04a\xf4\xe0\x00I\x06\x84\xf7,\x01\xc7\x06\x88\xff+\xed\xb0\x10\xa7\x02e\xf2\x8e\x04\xb3\x05\xa1\x00\xc1\xf1\xbd\x16J\xf4w\xfd\xaf\x04\xed\x00\xe1\xfb\xbb\x02~\x0c\xa6\xee\xd7\x0eq\xfdI\xf4i\x0c\xf6\x00\xdd\xf82\x03\xf8\x06\xa7\xf8\x13\x00\xb8\x074\xf7G\xff\xbb\x03\x14\x04\x00\xf9.\x02\xb5\xfd\xc7\x00\x05\x03\xf2\xf6\xaa\x08\x94\xfb\xcb\xf9|\x07\x02\x05\xad\xefW\t5\x05E\xef\x19\x0e_\x03\xaa\xf3J\x07\xe9\x02\x07\xf3\xce\x11=\xfb\x84\xf3\xbb\x12\x80\xf7\xd4\x009\x05\xf2\xf8-\x03\xef\x02\x94\xfe\x8f\xf9\xb2\x0eb\xfe\t\xeb^\x11H\x05\\\xee~\x07k\x07\xb0\xf0\n\t(\x05\xe8\xf0\x06\x06\x19\n/\xf1\xc0\xff\x9f\r\xf5\xf3\x1e\x02^\x00\xdc\x01\x8c\xfe\x8d\xfey\x02(\x01\xd4\xfb\xc0\xfc\xfa\x0b\xfa\xfa\xed\xf6\xfa\x0cS\xfb\xcc\xf8_\x0b\x12\xfd2\xfa}\x03l\x04Z\xf9\x10\x082\xf7\xab\x02^\x05\xdc\xf9(\x004\x02\x8d\x08\x19\xee\x1d\x0c\x0f\x00\x7f\xf6{\x0c`\xfa\xd1\xfa\x85\x07X\xfd\xea\xfd\x90\x005\xff8\x00z\x013\xfa\x91\xfc\x8a\x06\\\xfd\x00\xfa[\x03\x1a\x01e\xf6\xa2\x06s\xfbQ\xfc~\x04\x1d\xfc3\xfe\xe0\x05\x8d\xff\x03\xf9\x8c\x08\x0b\xfcn\xfe`\x04+\x02\xf7\x01/\xff\x0f\x00N\x05h\xfe\x94\xfd \x07\xd8\xfe\x15\xffR\x04\xaa\x00\x17\xff\xf3\xfdi\x045\xff9\xfdL\x043\xfd\xf1\xfe\xec\x02\xed\xfd\xf1\xfd\x04\x03\xf5\xf9\x88\x01\xa6\x00\xd6\xfb\xd4\xff\xe9\x00\xaa\x01u\xf8\x83\x01\xd7\x03\x8e\xf74\x05\x0f\x00\x01\xf9;\x06\xaf\xfde\xfc-\x04z\x025\xf9\x86\x04\xff\x02\x8e\xfa\xb8\x02\xe3\x03^\xfe\xc6\xff\xb3\x04\xa1\xfa\xd5\x03U\x02\x1c\xfdy\x01 \x00\xdf\x00\x16\xfes\x04:\xfc3\xfe\xc3\x03n\xfc\x7f\xfe\x95\x010\xfe`\xfe)\xff\xf3\xfe\xc2\xffS\xfd\xc9\xfe=\x01I\xfd\xb8\xfc1\x04\xe9\xfe\xc4\xfd<\xff\xcf\xff3\x02\xcc\xfe\xeb\xffz\x02\xb9\xff\xe4\x00\xb3\x01r\x01\xe1\x01D\x01\xf2\xff\x1a\x02d\x03`\x00\xd8\x00\x18\x02{\x01k\xff\x7f\x01\xab\x00\xb1\xffH\x00\xf9\xffi\xfe\x98\xff\'\x00q\xfd\x14\xff`\xff\x11\xfe\xb0\xfe\x93\xff\xba\xfdc\xffN\xff|\xfe\'\xff\xdc\x00\x1b\xff\x02\x00L\x01H\xff`\x00\xd8\x00\xf0\x00\xe9\x007\x01\xdd\x00\xe3\x00\x92\x00\xda\x00\xe2\x00\x84\x00\xe9\xff\xa5\x00\x0f\x01\x01\xff2\x003\x00f\xff\x9f\xff\xfa\xffY\xffz\xff\x8a\xffw\xff\x95\xffc\xff\x8c\xff\xa8\xffw\xff\xa1\xff\xf2\xff\xb7\xff\xc8\xff\xb8\xffG\x00\xd1\xff\xd0\xffd\x00\xfa\xff\xa9\xff\x9c\x00M\x00\xca\xff+\x00\xd3\xffa\x00\xda\xff\xfe\xffb\x00\xaf\xff\xb4\xffK\x00\x98\xff\x94\xff[\x00\xb4\xff[\xff\x06\x00\x02\x00r\xff\xdf\xff-\x00\xae\xff\xfa\xff`\x00\x01\x009\x00`\x00\x16\x00\x8b\x00\x96\x00=\x00\xf0\x00\x99\x00i\x00\xf9\x00\x91\x00B\x00r\x00\x8a\x003\x00\x07\x00\x82\x00\x15\x006\xff\xe7\xff?\x00\x04\xff\x8e\xffB\x00\xb0\xfe\r\xff\x0c\x00T\xff\x11\xff\xb1\xffa\xffD\xff\xf2\xff\xbd\xffx\xff\xf9\xff\x02\x00\xf8\xff\xe9\xffj\x00P\x00\xfc\xff]\x00\x87\x00J\x00H\x00\x99\x00K\x00\xec\xffk\x008\x00\xda\xffX\x00\xfe\xff\xb7\xff\xc6\xff\xc8\xffD\x00\xc0\xff9\xff\x11\x00\xc9\xff\xe7\xff\xab\xff\x9b\xffw\x00\xa4\xffY\xff\xd4\x00)\x00(\xff\xef\xff/\x01/\xff.\xff\x05\x01}\xff\x9b\xff\xbe\xff\x13\x00K\x00\xa3\xff\xb9\xff\xaf\xffb\xff\xaa\x00\xb9\xff\xeb\xfeI\x00\x97\xff$\x00\x07\xff\xc7\xff\r\x01\x9c\xfe\x13\xffE\x01Q\x01\x1c\xfe\xaf\xff\x8a\x01\x87\xffl\x00\x8f\xff\x0e\x01r\x01L\xfd\xf7\x00\xc3\x03d\xfe\xd9\xfd4\x02\x00\x01\x00\x00\xa0\xffh\x01.\x01v\xfe\x8c\xff\x18\x02a\x01\xac\xfcM\x01\xd4\x00R\x00\xb7\x00\x15\x01w\xfd\x80\xfd`\x02\xa3\x03\x07\xfe\xeb\xfa\x80\x03\x94\x01p\xfd\\\xff"\x02\x05\xfc,\xfe*\x04\x0f\xfe\x93\xfdK\x03\xf8\xfd\x13\xfb\x85\x04\xd2\x02\xc4\xf8{\xfd\xc4\x08{\xfe\xd5\xfa\xde\x00+\x04\xff\xfd\x02\xfe#\x00\x03\x01\xad\x00\xc6\xfd|\x01\xbc\x02\xbd\xfe\x02\x00\xaf\x00\x04\xfe\x94\x00\'\x01\xba\x03\xbb\xff\x9d\xfc\xbd\x02\xd0\xfd\xa4\x01\xc3\x01z\xf9\xdd\x06\xb3\x04\x95\xf4r\x00q\x04|\xffW\xfeE\xfe\xdd\x03~\xfc\x1b\x00\x12\x02\x07\xfb\xbd\xfe\x18\x07\x97\x00W\xf7\x0b\x05>\x04B\xf7\xe6\x03\x81\x03[\xfd$\xfcM\x03\x85\x07\x05\xfa\x1c\xfd\xac\x01c\x06y\xfe\xcb\xf7\xd5\xff\x08\nx\x03\x85\xf2\xec\x02K\x0b#\xf5@\xf8$\n\xbb\x06H\xf5S\xfc\xa1\x07\x05\x02\xe3\xf9\xe9\xfbB\x07(\x00\x12\xf8\x8e\x02~\x0c(\xfa\xa8\xf3\x04\ta\x05\xe7\xf8\x84\xfe\x97\x06\xc8\x04%\xf3\x92\xfd\xdc\x0e\xa0\xfe\x93\xf2\xd3\x00\x90\x05\x88\x01\xaa\xfbH\x00?\x05?\xfa/\xfd:\x05\xd8\x05\xa5\xf8I\xfb\xfa\nK\x07\xb4\xf2\xf5\xfdX\n\xfe\x03\xb7\xf6z\xff\xf1\x03\x98\xff\x85\x032\xff\xce\xfc\xb4\xfb\xea\x00H\x07\xf4\xf6\xb7\xfd\xe9\x07\xec\xff\x08\xf8\xc4\xffN\x04\x81\x00}\xfcn\xfa\xee\x04\x98\xfc\xce\x03l\xfc\xd0\x04\x0b\xff\xea\xf3e\x06\xb3\x07\xbf\xfc\x86\xf92\t\x82\xff\x10\xfe@\t!\xf9&\xfd\xa2\x07\xf8\xfa\x99\t\x03\x02\xc5\xf7\t\xf8\xaa\x0ci\x07\x0e\xec|\x00/\x05\xe6\t\x07\xf8\x0e\xf17\x07\x18\x03\xaf\xfd8\xff\xbd\x03S\xf2\xaf\xfc\x9a\x15\xd7\xfc.\xf3\x8f\xf7\x9e\x07+\x14\x9c\xf8\xbb\xe8Z\x03D\x1b\x11\xfd\xa7\xe9\x9a\xff\x81\x12\xbd\x05.\xf7\xe3\xf5\xd0\xff>\x04\x99\nu\xfdB\xfa\x14\xfe\xa1\xfcn\r\xc1\xf7Q\xfb\xbd\r\x1b\xfc \xf8i\x04\xeb\n\x08\x00\x0c\xf7\xe9\xfa?\x02?\x01\x06\r(\xff/\xf1\xf5\xff\xb5\x01g\x03\x17\xfe#\xfe\x13\x06\xb9\xf8\x8e\xf4z\x0b\x1b\x0c\x9d\xfc\xc2\xf1\xe2\xf9\x86\n\x9d\n\xad\xf7\xed\xf3\x9f\tZ\tN\xf3\xc2\xf9D\x10\xd9\x02\x88\xf2\xdf\xf6\xa4\x0e3\x0f\xc0\xf5\xdf\xf2\xb2\x0b\xa4\x02\x81\xf4\x18\x06\x07\x06l\xff\x88\xf9\xfe\xfc\x1d\nk\xfc\xee\xf4\xf6\x06\x88\x01\xb4\xf9E\x05\xf1\x05\xd7\xf4\xc2\xfe\x96\x07\xf9\xf59\x03\x9e\x08\xf0\xf5:\x01@\x0e\x93\xfa}\xf2\xb6\x02\t\xff\xcd\x02\x02\x05\xc0\xf6\xaa\x07\xb3\x02\xa3\xf3\x1b\x07\xea\xff\xf4\xf1N\xfe\x92\x10\xfb\x06\xcb\xf5\xb5\xf7#\x00\xf1\x07\xcc\xfd\x92\x03Z\xf9\x17\xf6!\x0f\x80\x07\xd5\xfa\xfd\xfc/\x02\x04\xfa\xcb\xf8\xe8\x07\x86\t\x91\xfe\xf3\xfc\r\x05\xf6\xfdm\xf4 \x00\x93\n\xbf\x07j\xf3|\xf8\xeb\x0eQ\t:\xf5\x19\xf5\x9c\x06\x8a\x02\xda\x00\xc8\xf5\xd5\x06\x1a\x0c\xa9\xed.\xfd<\x0c1\xfff\xedq\x01^\x0e\xdb\xfd\xb3\xf6\xc7\x05\x0c\x08\xef\xf7\xea\xf6o\x06\x10\r\xde\xf9\xd1\xf0\xf8\x07c\x14\xb0\xfc\x08\xf8\xe4\xfb\xd9\x00\xca\x06H\x01\x82\xfb[\x02m\x00\x87\x01\x8b\x03\x03\xfe\x9e\xf7\xcc\xfb\x94\n>\x01q\xf8X\xf99\n:\x08Q\xfd$\xf9\xe9\xfb4\x08\x02\x07\x89\xfau\xf6E\nr\x0b\x16\xfb\x82\xfc\x1b\x01\xff\x03w\x01\x1e\xfb\xf9\xff\x9b\x03\xdb\x00\\\x03U\x02\x85\xfb!\xfcR\x03\x89\x01\x10\xfc\xec\xfe\xd3\x01/\x06L\xfbH\xf6Q\x02\x11\x02\xa9\x01n\xf8\x19\xf9l\x02|\x04\xd8\xfc\xe2\xfbW\xfdQ\xfe\x85\x00R\xfe\x17\x00E\x00\x15\xff\xf0\xfd\xde\xfb2\xff\x17\x08K\xff\x16\xfa\xa9\xfd\x8d\x01?\x03\x98\xfb=\x00\x8e\x04D\xfd\xdb\xf5\x94\x03n\x0b\x91\xfd\xc2\xf0\xf5\xf9\x87\t\xd3\x05\xb4\xfb\x8f\xf2\xf3\xf7u\x06C\x08\x00\xf9i\xf0\xe5\xf6Z\x01\xf5\x07?\xfe\x05\xf6I\xf9t\xff\x7f\xfe/\xf7\xf7\x01\x98\t\xea\xfd\x06\xf4\x19\xf7h\x05\x8f\t\x87\xfd_\xf3\x97\xf5k\x03<\x08I\x05\x91\xfc\x95\xf2+\xf8\xe4\x02\xda\x07w\x07\x91\x08}\x07&\x06\x1f\tP\x11\xa8\x12\xa3\x07\xa6\x06\xce\x15\xfd#y \xdd\x10\x11\x0c2\x12\xa9\x12O\x11\x81\x12\xfd\x108\x0b\x99\x05\xbe\t$\rT\x00\x8f\xf0g\xee\xce\xf6,\xfb\x08\xf6\x93\xf0W\xee2\xea\x17\xe7s\xe80\xebU\xe9\xc9\xe6\xc8\xeb\xaa\xf4\xf3\xf8\r\xf5\xe0\xef\xbf\xf1]\xf6\xd0\xfa\xfb\xff{\x03\x1f\x05v\x044\x045\x07\xf9\x07\xe2\x03\x00\x00\xb0\x02B\t\x85\x0b\x83\x06a\x02\xa8\xff\xbf\xfd \xfc\x8e\xfc\x1b\xfdl\xfb\xa4\xf9\x00\xf9\x13\xfb\xb9\xfa$\xf7U\xf3D\xf3;\xf8s\xfc\xed\xfcT\xfd\xb8\xfc\xd3\xfbA\xfeN\x01\r\x02"\x02\x83\x03\xb3\x06\x97\tf\n\xad\x08m\x07\xb2\x07^\x07C\n\xa9\x0c\xf7\x0c\x96\nn\n\x13\n7\n!\tX\x07\x19\t\x8f\t*\n\xbb\n\xb5\x08\xa7\x03\xc1\x00\x8e\x001\x01\x9d\x01\xec\xff\xa9\xfe+\xfc\x8b\xf9\xe4\xf7f\xf6\xbe\xf4k\xf3(\xf4\xcf\xf5\x98\xf5\t\xf5\xa0\xf4{\xf2^\xf1\xdb\xf3\x0e\xf5\x9a\xf5a\xf7u\xf8\xf2\xf8\xef\xf9\xa5\xfa}\xf9+\xf9\xce\xfb\x0f\xfe)\xffv\x00\xc5\xff\x07\xffE\xff0\xff\x1e\x00\xc7\xfe\xf0\xfd\xe9\xfdi\xfd\x9e\xfc&\xfb\xe5\xf7\x07\xf7C\xf7j\xf8\x94\xfa\x88\xf80\xf7\xc8\xf43\xf3;\xf5\x9f\xfe0\x0c}\x13b\x11T\n\x85\x0e\x8c\x1a|\x1f\x12\x1dr\x1e=,\xf88\xf38\n/\xca$1!\xfa\x1e\xa4!\x07$Y#\x81\x1c\xcc\x12\x98\x0c\xf3\x04\x0f\xfa-\xefH\xeap\xeb\x9f\xeeq\xeeo\xe8\x11\xe0c\xd7\xe7\xd3[\xd5T\xda\x93\xdf\xc4\xe2\xbe\xe7\x1f\xec\x81\xee\x8f\xed\xaf\xecQ\xf0\xee\xf4\xc3\xfb\xeb\x03[\n{\x0c\xd2\x08\xcb\x05\x93\x06\xb1\x08\x02\x08I\x05\xc1\x06i\n|\x0b\x91\x06\x10\x000\xfaT\xf6\x84\xf4\xff\xf5_\xf82\xf7\xae\xf3\x9a\xf1-\xf1\xa5\xef3\xedZ\xeb/\xee\xd7\xf2\xdd\xf6)\xfa\xb3\xfb\xf6\xfa\xfc\xf8\xcd\xf9\xf3\xfe\xc8\x02d\x05\r\x08;\x0b\xa7\x0e\xec\x0e@\r;\x0c/\x0c4\x0c\xfd\x0e^\x12\xbb\x13O\x11\xd3\r\x06\x0bF\n\xee\x08|\x06x\x05,\x05\x8c\x06f\x07\xce\x04;\x00\xb3\xfbZ\xf8>\xfa\xf5\xfc,\xfel\xfe\xc0\xfc\xb9\xfa.\xf9n\xf90\xf9\x9d\xf8-\xf9\xbb\xfb\xbf\xffI\x01\xa4\x00p\xfd\xc9\xfa_\xfc\xee\xfdR\x011\x02s\x02\xc6\x01\xfa\x00\xb5\x00P\xfe\x86\xfc\x9a\xfb\x8e\xfb\r\xfd\xd5\xfe6\xfe\xbe\xfb\xf0\xf8\xb3\xf6\x17\xf7\xd5\xf7\x0e\xf8\x91\xf8v\xf9\xb8\xfa\x07\xfbK\xfa\xb2\xf81\xf7:\xf7v\xf9\x0c\xfc\xb2\xfc\xd4\xfd\xb8\xfd\xb7\xfbs\xfc\xde\xfb\xf9\xfb|\xfd_\xfd\x0c\xff5\xff\xcd\xfe}\xff]\xfc\x90\xfc\xec\xfci\xfc\x9c\xfep\xfe\x83\xff\r\xfe\xc7\xfc\xae\xfd\xde\x04m\x0f\xa9\x13a\x0fq\ne\x0eV\x1a\x1f \x9d\x1c\x99\x1d\xa8#\x1e+Q+\x96$q\x1f\xa6\x1a-\x18\x13\x19\xcd\x1b\x9b\x1bh\x13\xef\x07\xd9\x02\x9d\x00\x83\xfb\x1e\xf4\x07\xee\x97\xec0\xec\xb0\xe9d\xe9K\xe7\xd5\xe1\xcc\xdc\x8b\xdc\x9a\xe2\xdc\xe8e\xeb\xe1\xebS\xee\x1a\xf22\xf5\x0c\xf7d\xf8\xac\xfb\x01\xff\xac\x02\xf7\x07\xa1\x0b?\x0bk\x07m\x05\xe3\x06\xd2\x08\xef\x08\x1e\x06\xab\x04\xcc\x03r\x02G\x00\x01\xfd\xde\xf9b\xf6$\xf4\xbb\xf4\xda\xf6g\xf6\xd7\xf2\xbb\xef\x9f\xef\xd2\xf0\xd5\xf0(\xf1{\xf2\xb3\xf4g\xf6\xf8\xf7\x97\xfa\xc2\xfb\xbb\xfbW\xfc\xe5\xfe\xd8\x02\x06\x06/\x07\xb5\x07\x9d\x08p\t\x9b\n\xa1\n\x96\nW\nG\x0b\x9f\x0c\xd1\x0cO\x0cg\nW\x08\xb9\x06o\x06M\x06@\x06&\x05E\x037\x02\xa9\x01X\x00\x9a\xfe\t\xfdv\xfbZ\xfb\xc0\xfb\xcc\xfb\xde\xfb\x15\xfbh\xf9N\xfa\x02\xfb\x06\xfb+\xfb\x9b\xfa\xc7\xfd\x18\x00\x89\x01)\x03q\x03\xee\x03\x1f\x05\xec\x06k\x08\xa1\t?\t!\n\xf1\x0b_\x0b\x17\x0b8\x08\x95\x06V\x06B\x04\x11\x04\xee\x01R\x01\xed\xfef\xfcp\xfb]\xf9}\xf7n\xf5C\xf4U\xf4\x82\xf4]\xf44\xf4\x7f\xf3N\xf25\xf2\x88\xf2W\xf35\xf4\x86\xf4<\xf6(\xf7\x1c\xf8\xe8\xf8 \xf8\xff\xf7\xe3\xf89\xfa\xa5\xfb\xf3\xfcT\xfd\xd5\xfd\xcb\xfd\xe0\xfd\x11\xfe\xf0\xfd\xa1\xfe[\xfe\xd1\xfe>\x00\xbb\x00\xf5\xfe\xb9\xfe\x94\xfe]\xfe\x93\xff\xbb\xff\x19\x01\xd6\xff/\xff\xcd\xff\xd1\x01\xc9\x05V\n)\r\xae\r\xed\x0c*\x0e\x88\x12n\x176\x1b\xaf\x1b\x91\x1d7 \x9f!\x15 \x8e\x1c\xd7\x1b\xce\x1aI\x19y\x18(\x17\xaf\x14\xe7\x0e\xd5\x08\xc7\x05\x96\x03p\xff\x90\xfa\xa8\xf7c\xf6\xcd\xf4\xb2\xf1\xc1\xef\x1e\xef\xc2\xec\n\xea \xeb]\xee\x08\xef\xd7\xed\xa3\xed\x9b\xefq\xf1\x8d\xf1:\xf3c\xf4I\xf4B\xf4\xf5\xf4|\xf7\x90\xf8\x9f\xf7#\xf7\x06\xf8\xd4\xf8F\xf8\x7f\xf8\x9e\xf8\x84\xf8R\xf8?\xf8~\xf9\xe5\xf9\xcb\xf9!\xfa\xcb\xfaW\xfb\xa7\xfb\xb1\xfb5\xfc\xdf\xfc(\xfdi\xfe\xcc\xff\xce\x00\x9e\x00j\x00\xb7\x00T\x01\xbc\x01\xee\x01\xbe\x02\xb5\x03\xc4\x03\x01\x04\x83\x04\x89\x04\xe2\x03\xa2\x03*\x04\x04\x05\x91\x05\x06\x06\xa1\x06>\x07Q\x07@\x07S\x07a\x07\xc4\x07\xb0\x07q\x081\t\xc4\x08P\x08\xb5\x07\'\x07y\x06\x81\x05\xa6\x04\x0e\x04\x7f\x03~\x02\xf0\x01X\x01+\x00\xf4\xfe\xbb\xfd\x9a\xfcr\xfc\xb3\xfb,\xfb\x19\xfb\xc6\xfao\xfaG\xfaE\xfa\x8c\xfa`\xfa}\xfa#\xfb\xcb\xfb\xe5\xfc\xf5\xfdx\xfe\x8a\x00\xf4\x01\xd8\x01Z\x02\x7f\x03\x80\x05\x9b\x05\xaa\x05q\x06\x8c\x07"\x08\xb4\x067\x06P\x06\x01\x05"\x03\xbe\x02\xcb\x02\x18\x02\xb1\xff\x0c\xfe2\xfe\xb6\xfc\xf8\xfaN\xfa\xbd\xf9X\xf9\xf7\xf7Y\xf7\xe6\xf7t\xf7\x8f\xf6\x1a\xf6\x96\xf6\x82\xf6\x9c\xf6\xd1\xf61\xf7\x92\xf7z\xf7\xbb\xf7x\xf8.\xf9B\xf9j\xf9\\\xfa\xc6\xfbu\xfc\x8e\xfc\xf7\xfc\xb6\xfd\xea\xfeO\xff\x03\x00\xee\x00|\x01K\x019\x01\xe6\x01^\x02\x1f\x02\xf5\x01{\x024\x02l\x02%\x02U\x02\xd9\x02\xc8\x02-\x03\xd4\x03\x1a\x04\xd3\x04I\x08\xb9\n-\x0bS\x0b\xd1\x0c\x9f\x0f)\x11\x88\x12\xcb\x14\t\x16\xff\x15\x14\x16\xab\x16\x10\x17\x0f\x16\x8f\x144\x13\xb6\x11\x9a\x0f\x87\r\xda\x0b\xc4\x08J\x05/\x03\xf4\x000\xfe\x18\xfbE\xf9\xba\xf7\x86\xf4\x0c\xf2\xb8\xf1=\xf27\xf1\xf0\xee\x00\xef\x0b\xf0\xbe\xef\xd4\xee\x04\xef\x9e\xf0\xb1\xf07\xf0b\xf1\x11\xf3\xae\xf3\x9f\xf2\xc3\xf2\x9d\xf4\xb7\xf5\xb4\xf5\xda\xf5\x0c\xf7P\xf8e\xf8\xa4\xf8,\xfa\x94\xfb\xdc\xfb\x81\xfb\xd9\xfc=\xfe\x91\xfe\xa8\xfeK\xff\x95\x00\xe6\x00Q\x00\xaf\x00\xaa\x01\xb7\x01\n\x01\xeb\x00\xa4\x01\xe5\x01\x1a\x01\xae\x00\xa5\x01\xf1\x01\x1b\x01\xee\x00\xc1\x01\xf9\x01\xa5\x01X\x01:\x02Z\x03\xd7\x02\xa2\x02\x9b\x03n\x04n\x04>\x04\xf5\x04\xb1\x05\x7f\x05N\x056\x06\xe9\x06\x91\x06;\x06q\x06\xa7\x06G\x06\xd6\x05\xae\x05\x80\x05\xe1\x04 \x04\xb5\x038\x03O\x02|\x01\xe6\x00\x16\x00\x87\xff\xb6\xfe\xe7\xfdW\xfd\xb6\xfc-\xfc\xe1\xfb\x82\xfbI\xfb\xec\xfa\x0f\xfbB\xfb\xe7\xfa\xe9\xfae\xfb\x97\xfb\t\xfc\x8e\xfd#\xfe\xea\xfd\xa9\xfe\x1e\xff#\x00\xcb\x00,\x01Z\x02\x9d\x02\x85\x02\xb4\x02u\x03?\x04\xa4\x03\\\x03\x9f\x03\xd1\x03*\x03l\x02\xac\x02K\x02\xa3\x01\xe3\x00\xae\x00\xb0\x00\xc1\xff\x8c\xfe0\xfe!\xfel\xfdg\xfc8\xfc0\xfc\xc5\xfb \xfb\xf9\xfaO\xfb*\xfb\x93\xfa\xa4\xfaZ\xfb\xca\xfb\x9d\xfb\x9c\xfbm\xfc\xbe\xfc\x95\xfc\xd6\xfcv\xfd\x13\xfe{\xfe\x83\xfe\xcf\xfe\x90\xff\xc5\xff\xc1\xff~\x00\xba\x00\xbd\x00\xa2\x00\xe8\x00"\x01\xf1\x00\xe7\x00\xf0\x00\xf2\x00\xbe\x007\x00\x95\xffe\xff\x0b\xff\x05\xff\xff\xfe\x08\xff\xe2\xfe\xcb\xfe\x1c\xff\x83\xff]\xff\xc3\xffr\x01-\x03\xe0\x04\x07\x06\x1d\x08@\n"\x0b\x18\x0c\xd3\x0e\xe9\x11L\x13\xc1\x13r\x14\x97\x15\x8a\x15;\x14i\x14`\x15\x19\x14\xef\x104\x0eN\r\xbb\x0b\x16\x08\xce\x04\x1e\x03\x02\x016\xfd\xfa\xf9\xf5\xf8\x12\xf8\xe2\xf4^\xf1\x85\xf0S\xf1\x17\xf0\x1d\xeeM\xee\xa1\xef\x80\xef\x18\xee\xba\xee\xd0\xf0\x1f\xf1I\xf0A\xf1\xcf\xf3\xdb\xf4\x8a\xf4\xeb\xf4\xae\xf6\xb8\xf7y\xf7Y\xf8\x04\xfas\xfa8\xfa\xa1\xfaG\xfcU\xfd\x0e\xfdR\xfdH\xfe\x89\xfeY\xfe\xfb\xfe\xd5\xff\xfb\xff\xae\xff\xef\xff\xa0\x00\xba\x00T\x00m\x00\xc1\x00\x82\x005\x00\xad\x00j\x01(\x01\t\x01\x9a\x01\xea\x01\x03\x02f\x02\x13\x03\xc0\x03\x06\x04k\x04U\x05\xf8\x05Q\x06\x9a\x06\x06\x07z\x07`\x07b\x07\xa8\x07\x84\x072\x07\xdd\x06\xbe\x06\x91\x06\xd5\x05 \x05\xa6\x04\xf4\x03\n\x03?\x02\x90\x01\x0f\x016\x00\x06\xffN\xfe\xc8\xfd\xd2\xfc\xf1\xfbU\xfb\xd3\xfa=\xfa\xa5\xf9\x82\xf9\x89\xf9\x7f\xf9M\xf9\\\xf9\xa9\xf9\xcd\xf9\xf4\xf9e\xfa\xf3\xfaP\xfb\xa7\xfbQ\xfc\r\xfd\x8d\xfd"\xfe\xa0\xfe\xfe\xfe\x9f\xff[\x00+\x01\x0c\x02\xf5\x02\xbc\x033\x04\xdd\x04\xa8\x05\x9e\x06\xfb\x06\xe5\x06|\x07\xf6\x07\xcd\x07C\x07\x10\x07\xfd\x06\x16\x06\xd9\x04\n\x04\x8e\x03\x92\x02\x02\x01\xd8\xff<\xffa\xfe\x1b\xfd\x0e\xfc\xd4\xfbQ\xfbR\xfa\xbe\xf9\xcb\xf9\xa4\xf97\xf9\xfa\xf8&\xf9\xa2\xf9\xc9\xf9\xd4\xf98\xfa\xd2\xfa=\xfb\x98\xfb.\xfc\xf4\xfcx\xfd\xa3\xfd-\xfe\xc2\xfe+\xffv\xffV\xff\x80\xff\x00\x00\x95\xffH\xffp\xffu\xff\x13\xff\xa0\xfek\xfew\xfe:\xfe\xc1\xfd\xc3\xfd\xd6\xfd\xcc\xfd\xe5\xfdD\xfe\xd0\xfe\xf2\xfe\xb8\xfeS\xff%\x00\xe7\x00a\x01&\x02\x10\x03t\x03\x14\x04 \x05\xc7\x05p\x06w\x07\xa9\x08\xe6\t\x8c\n@\x0b5\x0c\xf4\x0c\\\r`\x0e\x88\x0f\x07\x10\x04\x10\xd1\x0f\x02\x10\xe9\x0f\x1c\x0f\xc2\x0e\x96\x0e\xa3\r\xd9\x0b\xfa\t\xdf\x08\x9b\x07K\x05\xf8\x02b\x01\xdf\xff\x94\xfd&\xfb\x8e\xf95\xf8%\xf6\xe8\xf3\xe6\xf2\x92\xf2l\xf1F\xf0\x1d\xf0\x8a\xf0e\xf0\xee\xef_\xf0]\xf1\xd7\xf1\x0e\xf2\x15\xf3\xa5\xf4\x87\xf5\xf3\xf5\xd0\xf6"\xf8 \xf9\x94\xf9Z\xfa}\xfb8\xfc\xa1\xfc8\xfd>\xfe\xf8\xfe+\xff\x88\xff\x1b\x00\x8c\x00\xc7\x00\x1d\x01\xa1\x01\xda\x01\xe8\x01%\x02\x7f\x02\x9c\x02\x8a\x02\x9a\x02\xa8\x02\x89\x02b\x02t\x02\x86\x02\\\x02?\x02F\x026\x02\x01\x02\xea\x01\n\x02\x18\x02\x02\x02\x02\x026\x02a\x02X\x02i\x02\xaf\x02\xd6\x02\xae\x02\x8a\x02\xbc\x02\xe1\x02\xc8\x02\x9b\x02\xa3\x02\xb3\x02m\x02\x1e\x02\x02\x02\xda\x01g\x01\xe4\x00\x9b\x00\x82\x00\x1f\x00\x8c\xff\x1c\xff\xd2\xfed\xfe\xdd\xfdu\xfd.\xfd\xd7\xfck\xfcA\xfc1\xfc\t\xfc\xdc\xfb\xc9\xfb\xed\xfb\x14\xfc*\xfc`\xfc\xa1\xfc\xd8\xfc\x13\xfdm\xfd\xef\xfde\xfe\xcf\xfe\x18\xffl\xff\xe0\xffy\x00\x12\x01\x80\x01\xf2\x01y\x02\xc3\x02P\x03\xd9\x03\x8a\x04\xc1\x04\x84\x04\xe2\x04C\x05q\x050\x05S\x05\x91\x05\x17\x05p\x045\x04r\x04\xd9\x03\xfe\x02\x9b\x02l\x02\xc9\x01\xc1\x00P\x00\xfc\xff7\xffg\xfe"\xfe\xe8\xfd\x08\xfd"\xfc\xda\xfb\xaa\xfb@\xfb\xc4\xfa\xbd\xfa\x98\xfa\x10\xfa\xd8\xf9\x1d\xfaD\xfa\x05\xfa\n\xfa<\xfa\x8c\xfa\x86\xfa\xa0\xfa\x17\xfb6\xfbE\xfb\x8d\xfb\xf3\xfb>\xfc\x87\xfc\xc2\xfc \xfds\xfd\xa9\xfd\x1c\xfe\x7f\xfe\xe4\xfeA\xff\x9d\xff\x0b\x00\x80\x00\x08\x01\x89\x01\xff\x01y\x02\xea\x02R\x03\xd0\x03B\x04\x95\x04\xe5\x04Q\x05\xd7\x05*\x06\x19\x06Z\x06\xb7\x06\xba\x06\x9e\x06\x8d\x06\xb1\x06\xab\x06D\x06\xf4\x05\xd9\x05\x91\x05\x1a\x05\xc2\x04\x80\x04-\x04\xbb\x03B\x03\x1d\x03\xf1\x02\xac\x02\x86\x02{\x02y\x02M\x028\x02_\x02z\x02n\x02T\x02U\x02U\x02\x14\x02\xf3\x01\xd2\x01}\x01\x15\x01\x9e\x00A\x00\xc0\xff)\xff\x89\xfe\xd7\xfd7\xfd\x93\xfc\x0c\xfc\x92\xfb\xfa\xfa\xa5\xfa,\xfa\xd1\xf9\xa3\xf9z\xf9\x8a\xf9a\xf9~\xf9\xca\xf9\xf8\xf9U\xfa\x85\xfa\xda\xfaQ\xfb\xa5\xfb+\xfc\x9b\xfc\x0c\xfde\xfd\xae\xfd5\xfe\xa5\xfe\xfc\xfeH\xff\x88\xff\xd5\xff\x11\x00M\x00\x9b\x00\xcb\x00\xee\x00\x1c\x01M\x01u\x01\x80\x01\x88\x01\x87\x01\x88\x01\x94\x01\x9d\x01\x96\x01\x7f\x01[\x015\x01\x13\x01\xed\x00\xbe\x00\x89\x00X\x00(\x00\x03\x00\xdb\xff\xa7\xffv\xffQ\xff-\xff\x15\xff\x03\xff\xf0\xfe\xf8\xfe\xff\xfe\x06\xff\x0e\xff\x1b\xffE\xffv\xff\xae\xff\xd6\xff\xfe\xff-\x00f\x00\xa4\x00\xd3\x00\x03\x011\x01]\x01\x85\x01\xb2\x01\xdc\x01\xf5\x01\x0b\x02"\x02A\x02N\x02E\x02>\x028\x02.\x02*\x02\x1c\x02\t\x02\xe4\x01\xb8\x01\x9b\x01\x81\x01R\x01\x0c\x01\xd3\x00\xa0\x00l\x00@\x00\x06\x00\xc4\xffv\xff2\xff\n\xff\xdd\xfe\xa9\xfei\xfe7\xfe\x17\xfe\xeb\xfd\xc4\xfd\xa7\xfd\x8b\xfdy\xfdc\xfdV\xfdD\xfd\x1f\xfd\x00\xfd\xfd\xfc\n\xfd\x0f\xfd\xf8\xfc\xea\xfc\xf0\xfc\xed\xfc\xe8\xfc\xf6\xfc\x04\xfd\x08\xfd\x14\xfd(\xfdc\xfd\x86\xfd\x98\xfd\xd7\xfd\x1d\xfei\xfe\xb2\xfe\xf1\xfeW\xff\xb7\xff\xff\xffq\x00\xec\x00E\x01\x9c\x01\xfb\x01d\x02\xca\x02\x06\x03I\x03\x9f\x03\xd6\x03\xfd\x03%\x04L\x04m\x04\x84\x04\x8d\x04\x95\x04{\x04`\x04L\x040\x04\x18\x04\xf0\x03\xb9\x03{\x03.\x03\xe6\x02\xa0\x02Q\x02\x01\x02\xaa\x01Z\x01\xfe\x00\x9a\x00<\x00\xdf\xff\x80\xff\x1c\xff\xbf\xfeg\xfe\x11\xfe\xc8\xfd\x93\xfdd\xfd6\xfd\x13\xfd\xfd\xfc\xe9\xfc\xec\xfc\xfb\xfc\x0b\xfd\x1e\xfd?\xfdc\xfd\x88\xfd\xb5\xfd\xe3\xfd\x04\xfe(\xfeL\xfe\x80\xfe\xb3\xfe\xb1\xfe\xd5\xfe\xf2\xfe\xf5\xfe\x11\xff\x15\xff%\xff=\xff+\xff&\xff:\xff:\xff?\xffH\xffT\xffh\xff\x80\xff\x91\xff\xb6\xff\xde\xff\xf5\xff*\x00[\x00\x92\x00\xd5\x00\x11\x01W\x01\xa4\x01\xd7\x01\t\x02P\x02\x80\x02\xa9\x02\xd8\x02\xfd\x02$\x030\x03&\x03+\x03\t\x03\xfb\x02\xca\x02\x8f\x02b\x02.\x02\xe5\x01\x8c\x01K\x01\xeb\x00\xa1\x00C\x00\xd7\xff\xa1\xffb\xff\x01\xff\xdb\xfe\x97\xfef\xfe-\xfe\x10\xfe\xda\xfd\xcc\xfd\xc0\xfd\xa7\xfd\xa6\xfd\xb0\xfd\xb9\xfd\xc2\xfd\xd4\xfd\xb3\xfd\x05\xfe\xd8\xfd-\xfe+\xfeY\xfe\x85\xfep\xfe\xe7\xfe\xb9\xfe\x15\xff)\xff\x90\xffu\xff\x94\xff\x07\x00\xfd\xff@\x00\x8f\x00\xdc\x00\x00\x01\x0b\x01N\x01\x98\x01\x90\x01\xae\x01\xcb\x01\xf1\x01>\x02\x17\x025\x02\x82\x022\x02\x93\x02\xa3\x01<\x02\x03\x01\xf5\x00\x81\x00`\xff<\x05\x02\x059\x00\xf1\xfb\x98\x04\x9f\xfbB\xfck\x04\x06\xfa\x02\x02\x88\xfa\x8c\xff\xd8\xfdt\xf9*\xfe\xad\xfb\x8c\xfd8\xfdt\xfd;\xffZ\xfd\r\xfe\xe9\xff\xfd\xfd\xa9\xff\x18\xff\x83\x00\xfe\xfe\x82\x02\x85\xfec\x01|\x00x\xff\xb6\x02,\xfft\x01\x00\x00\x9b\x00\x93\x00\xff\x00\xf4\xffk\x01\xe1\xff\xe6\x00\t\x00,\x00\x19\x01_\xff(\x01?\x00\xda\x00H\x00\n\x01\x97\x00\'\x01\x07\xff;\x02=\x00\x1c\x00\xc1\x01"\x00\x9a\x01\xae\x00{\x01\x11\x00,\x01\x00\x00\xaf\x01\x0c\x00\x1e\x02\xc6\xff\x1c\x02\xa8\xff\x06\x01T\x00\xa6\xff\x92\x019\xfds\x04\x16\xfc\xe2\x01\xe9\xfe\x0e\xff\x7f\x00\xcc\xfd\xbb\xff\xea\xfd\xf2\xfe\xe4\xfd\x90\xfe\x86\xfe\x98\xfb\xf5\xffA\xfc:\xfd=\xff,\xfbr\xffi\xfc\xa2\xfe\xad\xfd\x8e\xfec\xfe@\xfe\xd6\xff\xa5\xfe\xed\xff\xdd\xffN\xff\x98\x010\xffO\x02~\xff\x04\x01\xfe\x01\xd4\xfe\xac\x032\xff\xe8\x02\xdd\xff\xf8\x01\xd6\x00\xa6\x01%\x00[\x02\x12\x015\xff\x06\x04a\xfe\x9d\x01.\x01\xaf\x00\xba\x00T\x00\x8d\x02\x8c\xff\xd5\x01\xed\xfe\xfe\x01\xb8\xff\xc1\xffZ\x03\xba\xfd\xe4\x03\xf2\xfd\xd5\xff\xcc\x02\xec\xfeK\x00\x85\x01\xfe\xfc\x83\x01\xd1\xff\x1d\xfd\x03\x04\xa5\xfb\x19\x01\x1a\x00\xa1\xfb\xe2\x02\xfd\xfcO\xfd\xe6\x02`\xfbs\x00\xc1\x00\x8e\xfb\x15\x01T\x00$\xfc5\x02\xdb\xfc,\xffT\x02\xab\xfb\xf6\x02.\xfe\xb9\xff\x82\xffU\x00\x08\xff\xca\x00\xf5\xff\xd5\xff\x15\xff\xf3\x01\xdf\xfe\x10\x03\xd8\xff\x90\xfd\xc0\x04V\xfd"\x01M\x03\xb1\xfe\xad\x00\xb0\x03b\xfcO\x04\xf8\xffD\x00\x93\x010\x01,\xff\xc9\x00t\x02\xf0\xfc\xb2\x03\x89\xff2\x003\x01\xe8\xff^\xfe\xd8\x03\xe0\xfc\xde\x01,\x01\x9c\xfc\xcb\x01\xfd\x02[\xfa\xc1\x04\xf1\xfe\x8c\xfcW\x068\xfb\xe6\xfd\xbd\x03(\xff\xc6\xfd:\x06n\xf9U\x01\xb3\x00\xf3\xfd\xbf\xffO\x03\x0e\xfd\x0b\x01\xfe\x01#\xf9\xfe\x04Q\xf9\x9a\x06.\xfc\xc2\x01P\xfdr\x02\x16\xfdC\xff\xf9\x01\xe0\xf8\xf8\x07\xe5\xfa\xa1\x01\xb3\xfe\xbe\x03/\xf9\x17\x07\xe8\xf9h\x00\x88\x06\x19\xf4\x8e\t3\xfe\xa6\xfeS\x00\x96\x03\xb4\xf8\x9d\x05\xb4\x00\xa3\xfa\xa0\n\xa7\xf78\x04\xc7\x01|\xfcx\x04\xbf\xfeE\x01 \xfc\x92\x06\x04\xfcF\x04\xc8\x01\xd9\xfbn\x06\xda\xfa^\x02r\xff\x1d\x03x\xfd\x8b\x03t\xfe\xd6\xffO\x02=\xfaI\x04\xa2\x00\x82\xf7\x88\x07\x12\xfb\x18\x03>\xfe\xd6\xfcF\x03\xff\xf8\x7f\x08\xe6\xf6\xa7\x05\x0f\xfbH\x04(\xfb&\xffw\x07)\xf5n\x08\'\xfaX\x04\xc9\xf9t\x03R\x01\xa9\xfc\x9e\x03\xe1\xfd\x07\x02\x18\xfb\x06\x04c\x00\x88\xfdW\xff\xd3\x03v\xfb\xff\x03\xf4\xfd#\xfd5\x04\xb4\xf9\xc2\x03\xd3\xfd?\xff\x9f\x00\xbb\x01\x8b\xfd\x1d\xfd\x8e\x03K\xfb\xe9\x02\xf3\xfeG\x00\xf3\xfc\x91\x06[\xfa\x87\xfe\x7f\x05\xf7\xf7\x91\x04k\xfe\xec\x01\x10\x00\xe4\xfds\x02\xb8\x02\xe1\xf7L\x07\x98\xfd\xc0\x01\x98\x01l\xfdQ\x04A\xfb\xbf\x05\x9a\xfc\xf7\x03\xae\xf9\x91\x07\xa9\xfd\x8b\xffn\x00P\xfeX\x00\x94\xfeH\x05\xd5\xfc\xa9\x00\x19\xfc\x8a\x03\xcb\xfb8\x00E\x03o\xfa@\x03\x9e\xffM\xfdr\x04\xa4\xf7J\x02\x88\x03g\xfa\\\x02\x8c\x00T\xfc-\x01\xab\x01]\xfd\xe2\x00(\xffX\xfcr\x06\x9d\xfc!\xfe\x7f\x08"\xf8\x1d\x05,\x00\xd6\xfe\xcd\xff\xd3\x01\x17\x01\x06\xff\x15\x07\xcf\xf7c\x05j\x02D\xf9b\x06d\xfe\xa6\xfeC\x05\xc6\xf8\xb5\x03\xa1\x04\xac\xf7\xa7\x05\xe9\xfd\xbb\xfd\xa8\x00\xaa\x00\x81\xfed\xff\xb8\x00\x9f\xfc;\x06D\xf8\xb5\x05\xc3\xfe\xa1\xfd!\x03\xc0\xfe\xcc\xff\x00\x00\xe3\x00\xca\xff\xfd\x00\x80\xfee\x02\x15\x00\xd3\xfd\x07\x00\x1c\x05a\xf8\xb4\x04l\xff\xd1\xfaB\x08\x15\xf9U\x02\xa9\x00n\xfc\xf3\x04\xa9\xfa\x0e\x04\xca\xfa|\x03\x84\xfeL\xfdz\x06+\xf8\xe8\t\x0f\xf6\x0b\x01\xaf\x05X\xfa\x87\x03\x08\x03\x18\xf9\xa2\x05\xba\x01\xc3\xf8\xb2\x06\xe2\x00\xf4\xfda\x02\x06\xff\x81\xfd\x1a\x05f\xfdu\x038\xfd"\x01\xc5\xffu\xff\xe0\x01\x89\xfb\x1a\x04B\x00\xe9\xfd\x06\x01t\x05J\xf9\xd6\x00\xb9\x01\xb1\x00z\xfe\x07\x00\x1d\x07\x98\xf5\x1b\t\x85\xf9\xbb\x00\x13\x04\x18\xfb\xb9\xff!\x03l\x01Q\xf8m\nl\xf7\x08\x00r\x03\xcd\xfd\r\xff\x97\x02&\xff\x15\xfd\x13\x03\xec\xfe\x03\xff\xc4\xfev\x030\xfd\xfa\xfa\xa9\x06b\xff=\x01@\xff\x17\xf9\xe4\x07\xcc\xfb\xc5\x00\xc2\x02\xa4\xfdM\xfd\x1c\x04\x03\xfe\x14\x00T\x03\xd6\xf7\x0b\x07\x83\x01K\xf7\xa0\t\xf1\xf9\xbe\x00\xd0\x04+\xfa\x8f\x03=\xfb\xd6\x053\xf9\x06\x07\xd7\xfbr\x00\x01\x042\xf7\n\x07\xb9\xfbf\x01\xa5\x05\xd2\xf8\xcd\x02\x8f\x01\xc8\xf8\xe4\x07\xd7\xfb^\xff\xe4\x03\xa5\xfci\x00\x93\xff\xb9\xff\x01\xffi\x01\xfe\x00\xb3\xfdV\x02F\xfc1\x03\x97\x00\xfc\xf9%\x04\x82\xff\xf6\xfd\x15\x00\x8f\x05\x9f\xffL\xf6\xe1\x080\xfb\xd1\xfd\xb8\x07\xb5\xf8\xec\x06\x9f\xfb\xb9\xfd\xf6\x03Y\x00w\xfc\x95\x03-\xfe\x99\xfb\xcc\n\x9e\xf4\xbd\x04\xb7\x00\x04\xfc\xdd\x07Q\xfa \xfc\xfe\t(\xfa{\xfeX\xff\x87\x06\x8f\xfc\x19\xfc\xb8\x0f\xab\xef!\x06U\xfe\xed\x00Y\x04c\xf9#\x08\x0c\xfd\xc4\xfeR\xfe\xe2\x04\xe5\xf9E\x07u\xfa\x8b\x01\xfd\x025\xf9\xcc\nh\xf1\xdf\n\x17\x00\x02\xf7~\t\'\xf9\x1c\x02\xd9\x02\xa2\xfdW\xfe\xad\x00\xcf\x02a\xfa}\x06c\xf8\xa6\x00\xc9\n\xc9\xf6\x8e\x01\xa4\xff\x97\x00T\xfd(\x00\x17\x04\x19\xfc*\x03\x1e\xfc\xdc\x04\xc3\xfc\x9b\xfb\xf8\x08\xc7\xfeo\xf6\x16\x0b\x01\xf9Q\xfe\xf9\x0b8\xf5\xf2\x03g\x00@\xff,\xfb\x97\x0b\xa5\xf3\xc5\x03\xf8\x05w\xf7t\x056\x00E\x04`\xf4\x98\x06\xda\xfeZ\xfbA\nf\xf8[\x03\xde\xffe\x00 \x03\xf5\xf2\x1a\x0e\x96\xff\xb5\xf5\x8c\n\x00\x02t\xf4\x14\x0b\x9a\xfb!\xfce\x07\xc7\xfcu\xfdZ\x07@\xfc\x02\xfb\x8c\t\r\xf5\x9a\x08\x1e\xfb\x9b\x01\x8a\x06\x16\xf7k\x00p\x08\xde\xf6\x8e\xfe%\x0b7\xf4L\x03\xf8\x06\x9c\xf7\x17\x01\xf2\x02\x82\xfb\xfc\x01G\xff\'\xfe\x97\x02\x1b\xfcB\x07D\xfef\xf7h\x07\xf2\xfd\xe0\xf6/\x05b\x08B\xf6\xb5\x07\x95\xfa4\xff\'\x04f\xfc\xe2\xfc\xee\x06\xdf\x03\x8a\xf4\xcd\x0c\x11\xf6?\t\x14\xfd@\xf6e\x14\x92\xed\x84\x06\xda\x03\xb7\xf8\xbb\x03V\x00&\x02\xf3\xf3\x10\x10\xc6\xf3z\x02V\x08n\xf4m\n\x82\xfb:\xf7\x88\t\xcb\xf9}\x04U\x032\xf4\xc7\x0c\xc1\xf7\xdd\xff4\x03u\xff\x85\xfeo\x00\x86\x00\xc9\x02[\xfc\x9d\x00m\x02.\xff\x89\xfb\xc0\x08\x00\xf8\xe0\x00l\x04\x90\xfa1\x06I\xfc\x81\x06\xf5\xef*\r\xbd\xfc\xca\xfc\xba\x00\x0b\x01\xc7\xff+\x01\xb6\x03+\xf1R\x0f\xb6\xf5\xc6\x00*\n\xb8\xf2\xfb\x0b\xe7\xfb"\xf8`\x0eY\xf1\x9d\x060\xff\xc1\x008\x02?\xf8T\x08\xb9\xf4\xe1\x0f\x0b\xf5l\xfb;\n7\xfa\x94\x012\xfc_\ns\xf4#\xfc\\\x13\xdf\xf2\xe1\x01x\x02o\xfbw\x00\xca\x00^\x01w\x02\xba\x00T\xfa~\x06\x84\xf7\xa2\x02+\n\x07\xf3\x81\xff\xb9\x10\xf1\xee\r\x05\xfe\x07w\xf4\x83\x07\x81\xffa\xfb\x00\x03\xf9\x03\xd8\xf6\xd9\x0c\x1e\xf9\xd5\xfc\xb0\x03\x97\x00\x19\xfd\xa3\xff=\x05\x0e\xfaS\x06\n\xf9@\x03\x8b\xfeu\x01\x80\xfd\xde\xffM\x05C\xf9L\x05\xa6\xfc~\xff\x8d\x01\xc4\xfb\xab\x057\x00a\xf9\xd9\xfeB\x06\xa5\x02\xf7\xfam\xfd\xd1\x01\\\x01\x17\x01\xef\xf96\x06\x17\xfe>\xfb)\x07=\xfdl\x03\x11\xfe \xfb\xec\xfd\xce\x0c\x86\xf9C\xf6\xa7\x0f\x11\xffG\xf1x\r\xc8\xfeT\xf8k\x06\x0b\xfb\xd7\x01T\x05\x8e\xf7\xc0\x06x\x06\xf7\xea\x10\x10Z\x00\xad\xf3,\n[\xfeo\xfeT\x02V\xff\x9c\xf9\x91\x10!\xf5z\xf6\xee\x13\xef\xf5M\xfc\xb4\x06\xf6\x01=\xf7C\x0bO\xf5\xa3\x08\\\xfd|\xfa\xe3\x0bn\xf3t\r\x9d\xf0j\x08L\x02\xd1\xf9\xa0\x022\x02\xd6\xfa6\x05L\xff]\xfam\x06\xc3\xf6\xb0\x08}\xfe\xe3\xfep\xfb{\x11W\xeb\xa7\x00\xed\x0b5\xf7\x16\x08\xb9\xf8y\x08\xfd\xf9\x88\xfe\x1f\x07\xb0\xf9\xbc\x01\xde\x06\x17\xfci\xfa\xeb\xff\xfc\x06\xfa\xfa\x1e\x01\x17\x07\xe6\xf5\xf1\x00\xa5\x04z\xf9\'\xfe\xdf\x01\xb9\x04\x8e\xf8\n\x07>\x06\xe5\xf6\x89\x04\xa9\xf2\xbf\x08\xd5\x08\xff\xef]\x11a\x00\x15\xf6\n\x01\x17\x01\xc0\x04]\xf5\xf8\x05\xf5\x04\xd1\xfa\x9f\x02\x87\xff/\xfee\x01\xc2\xfa)\x03O\x05\xdb\xf6E\x01w\n\xc9\xf4n\x03\x91\n"\xeb7\x07\xa0\x0c\x99\xe9l\x07A\x14#\xeb\xd5\x04\xc2\x05\xa3\xf1\x05\tP\x08\xaa\xf1j\x03|\x06c\xf8F\x00\xf7\tD\xf4\xc2\x01\xe5\x08\x1b\xf5\xb4\x04S\x06\xb3\xf7 \xfe,\x0c8\xf82\xfco\x04\x80\xfcv\x03\xfb\x04v\xf4u\x03\xad\x0e2\xe9\xb5\x02\xbb\x11!\xf2\x90\x05O\xff\x00\xfd|\x00\x02\x06,\xf2\xe0\x0bb\x00|\xf1\xb2\x16\xe2\xed\xf5\x03\x0b\x02\xd0\xfdg\xfb\xfc\x0b\n\xfaK\xf8M\x0f\xe7\xec\x1a\x12\xa5\xf4M\xff\xbb\x02\xe0\xfb\x08\x0b\xae\xf4M\x108\xec\xa4\x08\xde\x00\r\xf3<\x17`\xee\xf0\x03=\x07\xd4\xf87\xfe/\x07`\xfa\xe1\xff\xa7\x06\xc5\xef\xd8\x11\xbe\xfc\x1d\xef\xcf\x16\xe2\xf1\x08\xfdQ\x15\x93\xe5\xa7\r}\xfe\x99\x00r\xf8\x19\x03\xd9\t2\xfbX\xfd\xb5\xfa\x8e\x16\xe7\xde{\x10o\r{\xed\t\x02\xa8\x0c\xdf\xf4\xc6\x02-\x03\x9d\xf31\x15.\xedG\x04\xc3\rv\xeb\x85\n\xac\x03\xee\xf3]\r\x99\xf7\xcb\x01(\xfe\x99\xffH\x03\xcd\x04t\xf2\xed\n4\xfd\x88\xfaw\x0c\xda\xe9U\x159\xf6_\nN\xf4\x9d\x01(\x05\xdd\xf7A\x04\x9a\xfa3\x0eH\xf2q\x00T\x0c\x17\xf6[\xf6P\x15\xbc\xf8\x83\xef=\x16r\xef\x17\x04r\x0b\x02\xf7\xd5\x01{\xf8d\x11n\xee\xc7\x04\x1c\x04\xca\xfb\x80\x01\xa5\xfe\x0b\t\xfe\xf7\xbb\x03\x90\xff"\xf6\x95\x07t\xfd-\x046\x01\x9d\xf9\x85\x0b\r\xf6u\x02\xab\xf9\xf9\x0bP\xfc\x8e\xf6\xe6\x15\xba\xef]\xfd\xe7\x10\x8d\xed\xdd\x01Q\x0b\xf3\xfe\x97\xeeW\x19u\xf2\xf7\xf9\xf9\x11\x06\xe7g\x12\x11\xff1\xf9y\x04\xdb\x06\x12\xf0z\x10Q\xf5\xf8\xfd\xee\n\xee\xf4\xcb\x04\xd5\x05&\xfc\x19\xfbM\x07\xbe\xf60\x0b\xd8\xf3\x1f\x02\xf6\x0b\xaf\xf2t\x08\xcf\x02\xd7\xf6\x16\xfc\xf9\r\x0e\xec\x08\x02\x1c\x19\xdb\xe6C\x0bD\x00\xa9\xf8\x8e\x04f\x03\xc9\xf36\x0b\x17\x04\xa3\xf6w\n\xb8\xf6\x8b\x0b\'\xf3\xda\x02,\x10\x9e\xe8\x80\x0cl\xff\xb1\xf5Z\r\r\xf6.\n\xc0\xf1\x02\x02\x90\x06\x12\xfc\xc0\x02\t\xff\x12\x043\xfav\xf9N\x06\xdd\xff\x9b\xfeY\x06\x1c\xf4\xda\x08:\xfc\xac\x00T\x04\x7f\xf7\xa3\x06_\xffQ\xfc\xe8\x08\xb5\xf7\x1b\x03\x0b\x02{\xfb\xdc\x07p\xff\x83\xf44\t\xfe\xfb4\x01I\x05\xbc\xfc\x9f\x07\xfc\xecS\x0bs\x02\x89\xfa\xcd\xf7_\x12-\xef\xf8\x0c@\x03\xbb\xe7\xf3\x18n\xef\x9f\x017\x03F\n\xf9\xf1\xe7\x05\xf9\x04\xed\xf2F\t\x98\xf8\xde\x04\xa4\x01\x06\xfa\xeb\x02\x80\xff2\x00u\x05I\xf8\x95\xfbU\x07\xd6\xfc\xa9\xf9\xbc\x12\xb2\xf2\xd6\xf5\x0e\x0fT\x02x\xfd\xfd\xf8\x1a\x08\xaa\xf6\x0e\x03\xb9\x01\xfc\xfdG\x0f\xa8\xf1\x0f\x02j\x01\x0f\xfe\xd7\xfd\xb2\x04\x99\x02\x95\xf2\x08\x11G\xfa\xa7\xfdQ\x01\x7f\xfe\xb3\x04*\xf8<\x06\xa8\xfb;\x07d\xf8\x97\x06P\xfd\x84\xfc\x8e\x06I\xf5\xa8\x0c^\xf4\xc1\x07\x07\xfe\xfc\x00\xb5\xfe\x87\xff\xa8\xfe\xed\xfb\xee\x0e;\xf1V\x05J\x00\xb5\xfa\x08\n!\xf7\x1d\xfd\xd1\x0e\xf8\xf5\xb3\xfe\xad\xfc\xe6\x06\xa3\xfe\xa7\xfb1\n9\xf5\\\x03R\x03\x1e\xf9\xa0\x07\xf1\xfeP\xf4\xdc\x07\xf0\x06}\xfc\xf2\xf9\xac\x08v\xf9\xed\xfe\xca\x02|\xfd\x07\t\xe2\xf1\x84\x05\x11\ns\xf3>\t\xda\xf4W\x05\'\x01s\xf7\xe5\x0e\x12\xf9\x10\xfe!\xffn\n\xbb\xf1\x95\x08|\x01Q\xf6Z\t\x03\xf73\n]\x02\x97\xf0\x1d\x0e\xaa\x02,\xf15\x0fT\xf5\x89\x01\xf5\x06\x11\xf77\x03Q\x0bP\xf3\xb0\x02E\x00\xdd\x01;\xfes\xfaY\x06\xcb\x02\x9f\xfaT\xfa\x85\x10\x06\xed\x12\n\xca\xfd\xb0\xfb\xc0\t\xf0\xf5+\x017\x03o\xfa\x80\x07\xff\xf9\r\x02\xa0\t\xdc\xeaD\x11w\xf3/\x07V\x03\x19\x01\x80\xf6\x02\x08\xd2\x06\xb2\xe9\x90\x1b\x92\xf3\xd2\xfb\xcd\x10\xbc\xee\x93\xfe^\x11R\xf4\xa7\xfe\xdd\n\xe6\xf2r\xff.\r\x7f\xee\xe7\x04\x1c\x01d\xf5\xb0\n\x08\x014\xff\xf4\xf7\xb9\r\xbb\xf5P\xfb\xab\r\x81\xfd\xc5\xfc\xcd\x06\xe1\xfc(\xff\xbc\x08A\xef\xaf\tW\x07\x0e\xf3x\x06\x12\x01\x99\xfeN\xfb\r\x04\xab\xfb!\xfd\x81\x0b?\xf8b\xfd%\x08\xff\xfd\xb2\xf5\xb1\t0\xfd\x81\xf0X\x11\xac\xfc\x9a\xf99\x10~\xf2\xe5\xfc\xa1\x07^\xfe,\xfe\xa4\x06\x80\x00\xd1\xf4\x07\rR\xfc\xd1\xfb\\\x04N\xfep\xfe\xce\x04\x1f\xff\x17\xfc\xf0\x020\x03\x0f\xfe3\xfc\x86\x03\x10\xfc\xc9\x03v\xfd\xda\x01\x16\x04e\xf8z\t\x07\xf8\x8c\x00[\x01\xc0\xfa\xd1\x03\x07\x03\xea\x00\xe4\x02\x07\x01\xb7\xf2\xd2\xfe\x81\x08\xa9\xff\xa3\xf8\x05\nz\xfe\xaa\xf6s\x07\xd3\x01\xb6\xf5\xd5\x05\x12\xfd\x9e\xf4\x16\x10\x13\xff}\xfd\xe5\x04%\xf7F\x00\x9a\xff\x02\x00%\x06|\xfaI\x01\x92\x06/\xf9\xdd\x056\xff\xf2\xf3\xc0\n\x9c\xfch\xfe\x11\t\xe8\xfd\xc9\xf8\xa2\x00^\x06\xa0\xf7\x18\x05\xf9\x01R\xf78\x02\x1f\x03\xdd\x01\r\xfb\xc8\x03\x83\xfc\xd2\xfb\x13\t\x08\xf9\x8b\x02T\x00\x80\xfa,\x05\xe0\xff\xd7\x01\xee\xff\xe8\xfe\xf4\xfaY\x00.\x02J\x017\x00\xe4\x00A\x00,\xfc\x12\xfe\x9e\x03o\xfd\xff\xfe\x98\xff\x12\x05\xd2\x04\xc4\xf9\x12\x00w\xfd\xdd\x037\xfc\xc7\xfd\xe3\x04^\x07\x13\xfa\xdd\xfe?\x04\x0c\xfb)\x02\xa1\x00?\xfc\x87\xfc\xd7\x03\xd6\xfd\x1d\x05\xeb\xfbs\xfe\x10\x00\xb2\xf8n\x02n\xfe\xa8\x01\x16\xfe\xf4\x00\x98\x02~\xfe\xe3\xfc4\x03\xb0\x00?\xfe\xb8\x02A\x00T\x00i\x04\xd7\x00\x17\xfd\x0c\x00@\x03\x00\xfd\xb7\x00\xc4\x02#\xfcQ\xfe\x16\xfd)\x01Y\xfeW\x00\x14\xff8\xff\xf7\xfcN\x02\xe2\x02e\xfb2\x04V\xff\xca\x02:\x03\x16\x03\x93\x02l\xfd\xb6\xff\xaf\x01n\x03\xf0\x02\xd1\xffx\xfe\xe7\xfc \x00\x94\x02\xe6\xfa\xfa\xfe\xaf\xfd(\xfd\x9b\xff\x03\xfe\x91\xfd\n\xfd8\x00\xe5\x00b\xff-\x02\x96\xfe\xce\xf9]\x04\xa1\x02\x84\x00\x04\x06`\x02\xf7\xfeF\x01\x85\x00\x06\x00\x10\x03.\x03;\xfe!\x00\x8d\x020\x00\xdf\xfe\xde\xfd\xcb\xff\x08\xff"\xfc\x19\x01J\x01Q\xfd\x11\x00l\xffk\xfd!\xff\xf6\x00\xec\xfe\xb6\xff\xda\x04\xf0\xff|\xfe*\x02\xf2\x00\xcb\xff\x17\x03b\x01\xdc\x01\xb8\x00\xed\xff\xce\x004\x00\xc0\x01\xd1\x005\x01\xab\xfe\xb0\x00\xae\xff\x98\xfe\x88\x00\xa7\xfeJ\xff\xd5\x01\x99\x00\x86\x00\x97\x00\r\xfcF\xfd\xfe\xff\xb4\x00\xff\x01]\x02|\xff\xcd\xfe\x87\x00\xfb\xff\xa3\xfeW\xfe\xcf\xff\xc8\x01\xd3\x01\xbc\x02\xe5\xff:\xfe8\xfdc\xfc.\x00\xfc\xff\x02\x00\x8d\x00\x1c\xff\xd9\xff\x1e\x00r\xfd\x8b\xfc\xf1\xfc\xf5\xfc8\x00\x1c\x02\xfe\x00\xfc\xfd\xf4\xfd^\xfd\xf2\xfb\xc3\xfd\x9c\xfe+\xfe\x00\xffW\xff\xcc\xff\x8b\xfd\x94\xfc\x93\xfb\xc6\xfb\xf1\xfco\xfdM\x01\xef\xff\xfa\xfc\xe3\xfe\xee\xfd$\xfdO\xfe\xf0\xfc\xec\xfc\x13\xfd1\xffU\xff\xf5\xfe\x0c\xfe\x7f\xfd\x14\xfd@\xfc\xf6\xfa\xfe\xfb+\xffB\x02{\x07\x1f\x0b\xe1\nB\x08n\n\xdc\x0c\x03\x11c\x12\x19\x14\xd9\x17\x9d\x18\xef\x19f\x19\x9c\x16\xc7\x11\x9e\r\x11\x0b\x95\x0c\xb3\r\xb7\t\xc4\x049\x01\x99\xfb\xf8\xf6\xb5\xf4X\xf1w\xeeN\xeb.\xed\x86\xed_\xee\x11\xee\x93\xea%\xeb\xe0\xea\xec\xed\x16\xf2Z\xf7\xed\xfaA\xfd*\xff\xc6\x025\x06\xa9\x05\xf3\x07\xe4\x06\xa0\tb\x0c\xa7\x0c\x90\x0c\x16\x07\x07\x05m\x01\xf8\xfe+\xfek\xfax\xf6\x0f\xf4/\xf3\xd6\xf1\x0f\xf1\xbc\xed\xa0\xeaU\xea\x07\xea|\xed\xdd\xefM\xf0\x84\xf0x\xf1;\xf3\x7f\xf5\xde\xf8\xe1\xf7\xa3\xf7\x96\xfbd\xfe\x03\x00\xda\x02q\x01F\xfeK\x00`\x00\x06\x02M\x03\xbd\xff:\xfe\x0f\xff\x05\x00\x11\xff\xbd\xfbS\xf9)\xf8\xcd\xfaG\xfdR\xfc\x9d\xfb\xfd\xf8\r\xf9\xbb\xffc\x04\xe2\x048\x03b\x03\xf1\xff\xa3\x016\nY\x15\xd8#\xda(\xd9*\xc8+\xe2,\xad.I+\xac)Q-24\xa68~3\xd2(\xf7\x1b\x8d\x0f\xb3\x08(\x03(\xfe4\xf8E\xf3\xe9\xf1\xe1\xf03\xec\xf2\xe1J\xd7G\xd4\xab\xd6x\xddb\xe5~\xe9\xc9\xeb\xb9\xed]\xf0t\xf3\xd4\xf7\xbd\xf8\x9d\xfb\x9e\x02\xc1\n\xb3\x12\x11\x15\xf0\x11_\x0cc\x07F\x05\xe1\x04\x9e\x03\xbf\x00\x1d\xfe\xb8\xfa\xc2\xf7\xcc\xf4%\xed\x10\xe6\xef\xdf\xee\xdd\xa2\xe0\xe3\xe3\x18\xe6W\xe7\xa8\xe7"\xe9]\xec\x8b\xf0\r\xf5\xf3\xf7\x16\xfd\x82\x04\x0b\rR\x14\x0e\x17f\x16\xc7\x15\x15\x16\x99\x17\xc9\x19C\x19h\x15\xbc\x12\x91\x0f\xed\x0b0\x07\x18\x01\x19\xfb\xed\xf6\x9d\xf6\xd5\xf5R\xf5R\xf3\x0e\xf1+\xef\x9c\xf0\xf4\xf3\x10\xf5}\xf6\xd9\xf8\xf5\xfa\xb7\xfez\x02\x1c\x01\xe0\x00\x99\x00\xa4\x00\x9f\x04\x96\x05\x18\x03S\x01\xab\xfd\xa7\xfc\x86\xfe\xa9\xfa\xfb\xf6\xb1\xf3\xaa\xf1 \xf2M\xf3\xb1\xf1\xff\xee\x06\xeeN\xeb\xdf\xed;\xf2\xea\xf3~\xf6p\xf4U\xf7\xc1\xfb\xde\xff\x19\x03\xae\xff#\x04+\t\xc3\x16\x15-\xb76\xdd8\x1f1\xa1+\xa71p7\x8c6\xd0344\xd73p/\xdb$\xeb\x16\xc9\x04\x9c\xf4\xd0\xed\n\xef\xcc\xf1\x01\xee\xb2\xe5\x8f\xde\xde\xdb\xeb\xd9\xe6\xd7h\xd7Q\xdaC\xe1#\xed"\xfaS\x02\xcf\x03\xc8\x00Y\x00\x04\x04\x11\x0c*\x14\x8a\x18\\\x1a\x0b\x1b\x94\x18y\x13&\x0c^\x00\xec\xf7\xe5\xf4\xb6\xf3"\xf4\xc9\xf0@\xe9\x7f\xdf\xe7\xd6\xde\xd4.\xd3\x80\xd3\xac\xd6>\xdc\xdd\xe4\x1a\xebb\xef#\xf1m\xf1r\xf5\x07\xfd0\x08\x7f\x12,\x18\x06\x1ai\x1a\xd1\x1af\x1a\x8e\x18@\x14\xf9\x11\x00\x13&\x16:\x14\xae\x0c\xe1\x02q\xfa\xb3\xf6s\xf5\x84\xf65\xf5\xe1\xf4M\xf4\x0f\xf4\x0e\xf8\x1c\xf7\xce\xf5\xd6\xf7\xb8\xf9\xb8\x02\xaa\tl\x0c\r\rU\n\xc5\t\xc9\tP\n\x87\x08\x98\x05\xc6\x03\x0c\x02\x17\x01\xe2\xfdj\xf6\xf5\xee.\xeaD\xe7[\xe7h\xe6\xd1\xe4\xa9\xe4\x15\xe4\xee\xe6\x9f\xe81\xe6Q\xe6\x13\xe7\x7f\xec\xb5\xf5\x8b\xfa;\xfc\xe9\xffA\x03\x9f\x06\x94\x0bE\x08\xc7\x05\x18\n;\x0f\xb7\x16\x1c\x199\x19\xe7\x1e\xdb+\x025\x193r*\xeb"\xd6!^#\xbe\'\xbd,R,\xc9"\xe6\x15c\r\xa8\x07\x9c\xfe\xa3\xf3\xb0\xef\xeb\xf12\xf6\xe4\xf7\xc6\xf6\xff\xf1\xb7\xe9\x19\xe3\xcc\xe4\x97\xee\\\xf8\x1b\xfe)\x03H\x07\xb9\x08\x05\x07\xd4\x03\xdc\x01\xb7\x01\xc8\x04\x07\nw\x0f\x8c\x10\xb5\nS\xffM\xf4\x9b\xef\xe1\xeb~\xea\xcf\xea\x1d\xeb\x06\xed\x87\xea\xb7\xe7\xb3\xe3\xb7\xde\xa2\xddN\xe0\xc5\xe8\xce\xf2\xc6\xfaS\xfe\x0f\x00`\x00&\x00l\x00\x95\x02.\x06\x00\x0b\x00\x11\xcc\x12O\x13j\x0e\x92\x06\xea\x01\xdb\xffK\x01\xbf\x02\xd3\x04\x8f\x04s\x02c\x00\x82\xfc\xf0\xfa\xfc\xf95\xfa\x13\xfd\x86\x00y\x05.\x08{\x07\x14\x06\xcd\x04V\x05\x89\x08f\n\x05\rA\x0e\x83\x0e.\x0e\x1d\x0c(\x08\x8d\x03\x10\xff)\xfcS\xfc\x89\xfb\x7f\xf8Y\xf3~\xee\xeb\xea\x8d\xe8X\xe7\xf1\xe4n\xe4W\xe7\x7f\xe9\x06\xed\x05\xef\xf1\xed~\xefF\xf0\xe2\xf2T\xf8\xa7\xfa\xf2\xfd7\x01|\x03T\x05\xb6\x03\x91\x01\xbf\xfe\xc4\xfe\xd0\xfek\x02/\x04H\x03\x84\x02\x00\xfe\xea\xfaV\xf5f\xf55\xffL\x0b:\x19\x86&\xd30\x003\x86+\xa1\x1e-\x1a\xfd#81\xb6:s:t2\xf6%;\x17\xe2\x08.\xfd\x88\xf3U\xed\xa6\xed\x1c\xf2)\xfa\xc9\xf7\x14\xec\x07\xe0J\xd9\xde\xda\xb6\xe1\x05\xec=\xf7\x11\x01\xf0\x07\x10\n\xb7\tE\x07\x05\x02\xb6\x00\xf6\x04\x87\x0c\xf5\x13q\x15\xeb\x10\xee\x07g\xfc\xd8\xf3\xe1\xedL\xe7M\xe5\xbe\xe6\xa1\xe7m\xea)\xe9\xe7\xe3U\xdd\xed\xd8\xe1\xda\xa5\xe1m\xea\xf7\xf1\x7f\xf9\x18\xff\xd4\x02I\x04=\x04%\x03\x11\x05 \t\xa6\x0e\x00\x14\xc0\x14\x0c\x12\xbf\r\x02\n\x0b\x04\xda\xfe:\xfc\x1e\xfft\x05\xfb\x07i\x06\r\x01\xf5\xfcn\xfb\xe9\xfa\xfb\xfc<\x01*\x05\xf3\x07r\n\xec\t\xf9\x065\x03{\x00\xf5\x02\x8e\x06g\n\xf7\n\xf6\t\xcb\x08L\x05\xde\x02\x0b\xffB\xfc\xbb\xfc\xa4\xfc\x9f\xfd\x1f\xfd\xcf\xf8\xcc\xf2\xf8\xee\x9f\xec{\xeb\x8f\xecp\xec\xbe\xec{\xee\x11\xf0\xee\xf0\x8f\xf0r\xf0\x15\xf1\xd4\xf3\xfd\xf7\x98\xfb\x0b\xfd\xea\xfc\x98\xfc!\xfd\xe6\xfdi\xfd}\xfb\xa0\xf9>\xf8\x9b\xf9-\xf9\xd4\xf6\xfa\xf3\x00\xf4\xb8\xf9\xc3\xfb\x19\xfd\xf3\xfb\xcf\xf9k\xfeu\x03Q\x0c4\x1c\xcb+\x954\x817\xd6638\x179\xc16\xe16\x819\x819\x8c5\xb9.\xe4$\xfb\x17P\x06\xad\xf8\xdf\xf2O\xefZ\xed\x95\xea\n\xe9)\xe7\x96\xe2\x98\xdd\xef\xdcy\xdf\x00\xe4\xd2\xeby\xf5\xbd\xfe\xad\x04a\x05\xfc\x04\xc1\x04\x06\x05=\x07H\n\xef\x0e\xb9\x10\xb3\x0e\xde\x08\x94\x01.\xfbo\xf3\xaa\xedW\xeb\xae\xebR\xeb\x03\xe9@\xe6\xba\xe2\xfd\xde\xf9\xdb\x9f\xdc\x0b\xe1\xe1\xe5\xd5\xeb\xca\xf0~\xf4\\\xf7t\xf8\xbc\xf9\xc3\xfc\xc4\x00f\x06\xaa\x0b#\x0e\xfb\x0f)\r\xd5\tz\tg\t\x97\nw\n\xd2\x07\xfa\x06\xb4\x07\xbb\x07\xf8\x07\x8f\x05i\x02\x02\x03\x0b\x05l\x07s\t\xf4\x07j\x07\x14\x08o\x08\x86\x08\xde\x06T\x05\x81\x05\xad\x05\xec\x06e\t\x93\tC\x06\xff\x03x\x02\x1e\x02\x03\x014\xfe/\xfd\xf2\xfd\xdd\xfc\xb5\xf9\xed\xf5s\xf0\\\xec\xd3\xeaH\xe9M\xe9\xf3\xe9%\xe8\x86\xe9\xd9\xe9\x9f\xe8Y\xe9\xfa\xe7\xb3\xe9\x1e\xef\xa3\xf2\xb9\xf6\xbf\xf9\xad\xf9\x14\xfa\xe8\xf8.\xfa\x87\xfc,\xfe\x0e\x00\xfc\x01R\x01\x08\x01j\x00\x18\xfd:\xfeu\xff\x8b\x01\xfa\x05\xba\x06\x02\x06\xa4\x06\x15\x07]\tS\x0bK\x0c\xe0\x0f+\x14\x80\x17]\x1b\x10!\x8f(\xda0\x8401*o(\xcc*\xf2,I+\xdf\'l&Z"f\x1a\x90\x12w\x0by\x04S\xfb\xea\xf52\xf6\xbc\xf6\xb8\xf3p\xecG\xe82\xe7\xd5\xe5\xba\xe5u\xe7\xe3\xe9\xf2\xec\x9b\xee\xbe\xf1\xca\xf5\xec\xf5t\xf4\x06\xf5\xfe\xf7\xff\xfbI\xfe\x9d\xfe\xd0\xfd\t\xfcI\xfaA\xf7\xaf\xf4\xde\xf3\xf6\xf1f\xef\xfe\xee\xa0\xef.\xeeN\xeb>\xe9\xd2\xe8\xb8\xe8Q\xea\x0b\xed\x15\xf0\xd5\xf2c\xf4\xe6\xf6\x1e\xf9\x1b\xfb|\xfd\xef\xffD\x035\x07\xcb\nD\x0e5\x0f\x90\x0f\xaf\x10@\x10}\x104\x11\xd3\x10\xbf\x10m\x11\xb4\x13\x05\x15\\\x10Q\x08_\x04\xc2\x03}\x04c\x050\x03\xcd\x00(\xffD\xfd\xa3\xfc\x9a\xfb\xb7\xf83\xf8\xd0\xf9[\xfd\x93\x01{\x03\xf4\x00A\xfd\xfc\xfb\xf9\xfb\x9d\xfcv\xfc#\xfcZ\xfc\xcf\xfb\xc7\xfa \xf87\xf4\'\xf0>\xee(\xee\x18\xef\xc7\xf0\x1a\xf1\xa0\xf0-\xf0R\xef\xb5\xefK\xf1\xb5\xf2Z\xf5\x00\xf8\xc2\xfb\xf1\xfeB\xfe\x15\xfd(\xfd\xab\xfd\xa4\xff\xc8\x01Z\x02\x95\x05.\x07\r\x06\x8e\x07\x9c\x06E\x04\xc4\x04T\x05C\to\x0b\x06\n\xc4\n\xe5\t\xfc\x07\xd4\x05\x06\x05\xec\x06*\x08\xab\n\x8b\r\x11\x0e\x0b\r\x91\x0cl\x0f)\x14\x14\x18\xa4\x1ac\x1f\x96%\xf2&\x9f%A$\xcb"\xdc"\x0b!\x91\x1f^\x1f2\x1a\xbc\x13\x87\x0e\xbb\x08a\x02\xdf\xfb\xae\xf5\xe6\xf1\x13\xef\xda\xeb\x1a\xe9\xbe\xe5\xb3\xe1G\xdf\x96\xde\xae\xdf\xb5\xe1\xfb\xe2\x11\xe4\xee\xe5v\xe8\x1f\xeb=\xed\xb0\xefM\xf2\t\xf5O\xf8n\xfba\xfe&\xff\xa7\xfe\xa4\xfe\xa8\xffg\x00p\xff\xc5\xfe\xed\xfe\x1c\xfe\x0f\xfcd\xfa\x10\xf9\'\xf7 \xf5\xcd\xf4\xe1\xf5\x9a\xf6p\xf6\xbc\xf6\xca\xf7\x88\xf8y\xf9j\xfbA\xfe\xb4\x00R\x03@\x060\t#\x0b\x1e\x0cE\re\x0f\xd2\x0f\xec\x0f\x7f\x10\xf7\x103\x11X\x0fQ\x0c!\nQ\x08\xfc\x05\xc0\x031\x01k\xff\xc4\xfd\x16\xfc\xfd\xfa\x88\xf9\xe3\xf6:\xf5\xbc\xf5}\xf6\x11\xf7\x80\xf7\xcf\xf7\\\xf8\xde\xf7\x04\xf8j\xf8\x9c\xf7x\xf7\xd5\xf7\x07\xf9u\xf9\xe2\xf8\xe0\xf7\xe4\xf5\x15\xf5\xbb\xf4-\xf5\xcb\xf4\xb2\xf5\x89\xf5\xd9\xf5u\xf7\xbe\xf7<\xf8\xa6\xf7\xb6\xf8\x05\xfd\x00\x01\x97\x00\x15\x01\xc4\x03\x15\x05\xf2\x04\xb9\x04Q\t\x0c\n\xfd\x08\xce\n\xac\x0b-\n\xc3\t-\n-\x08\xfc\x08\x19\x0b\xbd\n\x89\x08\x18\x08\x0c\x08\xb7\x07f\x07\x8d\x08\xb5\x07\xa1\x04\xef\x05\x91\x08<\x08@\x06.\x07\xf3\x07p\x05I\x04\xc9\x04\xab\x04\xd6\x042\x04\xbf\x02d\x04|\x07?\x08&\x07\xeb\x06i\n%\x0c\xb8\x0b\xed\x0bw\x0c2\x0c\x04\x0c\xa3\x0c\xd9\x0b\xb4\t\x95\x06@\x04\xda\x01\xc7\xff\x8e\xfe\xb6\xfcw\xfa\xab\xf7\x1b\xf6U\xf5\xfa\xf3\xff\xf16\xf0k\xf0\xfa\xf1\xa2\xf1\x80\xf1>\xf2>\xf2\xc1\xf1\xa3\xf21\xf5D\xf6S\xf6\xc1\xf7\x1b\xfa\xba\xfb\xe4\xfc\xf1\xfd\xf3\xfe.\xff2\x00(\x02\xbd\x03\xb9\x038\x03\xaa\x02M\x02\x13\x03\x90\x02@\x01O\x00\x14\x00\x84\xff\x1e\xfe\xc2\xfd\xa7\xfcy\xfa\xba\xf9\x8a\xfa\x1b\xfb\x02\xfa\xcb\xf9\xc1\xfa\x8a\xfbr\xfb\xf7\xfbI\xfd#\xff-\xff\x9d\xff\x1f\x02\x04\x04q\x03\x8f\x03"\x04\xae\x03\xb5\x04_\x03\xcb\x01\x9a\x03;\x03\x92\x01E\xfek\xfev\xfe\x82\xfa\xdf\xfb\x93\xfc\xdb\xf9\xee\xf9\xe6\xfa\xf4\xf8\xaa\xfc\x10\xfdP\xf9h\xfe\xc1\xff\x9f\xfd\xd2\x00\r\x03\xc8\x01\xc5\x02L\x03\x18\x07\x8e\x07=\x03\x82\xfeb\x05_\x05\xe4\xff(\x08\xf6\x01\x17\xfe\x8d\x02\xe6\x02\xc1\x03\xd5\x00D\xfc\x83\x00\xca\x03\x84\x01\x9e\x05\x97\x02r\xfd\x89\x02L\x04a\x02y\x03\xc3\x049\x001\x01\xd4\x04\\\x04\xda\x04\x1c\x01\x1d\x00\t\x00\xf4\x00\xa6\x02\xac\x02\xd0\xfeY\xfa\x95\x00\x9a\xff\xfa\xfa\xd8\x01\x05\x00#\xf8\x9b\xf8>\xff\xb2\x01\xd5\xfd\xca\xfc\xde\xfd\x80\xfe6\x00)\x01\xb5\x01\xf9\x00:\xfe"\x02\xc6\x05\xce\x01\xc6\x00\xbb\x01\x89\x02\x04\x01\x81\x03\xdd\x04G\x01\x9e\x00\xb0\x00\xff\x00\xa7\x03\xba\x040\xfeF\xff\xa9\x03\xe5\x02+\x00T\x00\xc7\x02j\xfd]\x01\xfe\x05_\x01f\xffH\x02\xec\x034\x00\xa4\x01\x10\x03\xac\x003\xff\x0e\x02<\x01\xcc\xfd[\x00\x12\xff\x1e\xfd\x87\xfb\xe4\xfbv\xff0\xfe\x86\xfa\x8e\xfb\x8e\xfb\x16\xf9\xa4\xfc$\xfdU\xfc}\xfc\xdb\xfb\xc1\xfc\xb0\xfe^\x00\xe7\xfcE\xfe\xf6\xfeH\xffo\x01Q\x01\xcc\xffW\xff\xfe\xfeN\x00\x8c\xffi\x01\x0e\x01\x7f\xfb>\xfd\xe9\x01\x1e\x01\xf9\xfcz\xfe\x16\xff\x8f\xfe\x1e\xff\xb8\xffP\x00\x93\xfe\xd7\xfc%\xff\x16\x01\xc2\xfe\x8a\xff0\xfe\x89\xff\xe6\xfd\xe1\xff?\x01W\x00Q\x02\xca\xfe\xfd\xfew\x02]\x04f\xff\xa4\xfd\x0b\x00\x9c\x01\xa2\x03%\x00\x9d\xfe\xbf\x00\xba\x03\xee\xfc\xea\xfd\xfa\x01\x89\xfdL\x01\x0c\xfe\x10\xff\x07\x01\xf8\xfd\x07\xfd\x89\xfcO\xff\xef\xff\x01\x008\xfat\xfe\x0e\x01\x9f\xfc\xf6\xfba\xfe\xf3\xff\x98\xf8T\xffR\x02i\xfe\xac\xfc8\xf8\xe0\x03\xad\x01\xf0\xfa>\x00\xc0\x032\xfe\x9a\xff\x87\x04\xc7\x02\xfd\xfc\x1c\x00\x1b\x08\xb5\x01\x94\x00\xce\x07\x0e\x04\xbb\xfe\xb2\x03\xae\x08)\x03D\xfc\xee\x06\xdf\t\xe4\xfb\xef\x03\x98\x08\xb0\xfd\xdf\xff\xc0\x07\x11\x03\xd7\xfd\xd9\x01\xaa\x05\xe5\x03\xfc\xfe\x8b\x05J\x02\xd1\xfd\xbb\x01\xa6\x01\xc2\x05\x97\xfe\xd2\xfd\x9a\x01 \x03\xbc\xffD\xf8|\xfeV\x03\xce\xfc\x83\xfa\x1f\xfe\xde\xfd\xd1\xfcJ\xfa3\xfe\xcf\xfcL\xf9\x0f\xfe4\xfc\xaa\xfc\x08\xfc\x90\xff\x90\xfc\xab\xfd\xe4\xfd\xfb\xfb\xdf\xfd\xa8\xfc\xc8\x02\x10\xfe\xab\xffg\x01\x02\xfe\x00\xfbY\x02\xfd\x02\x12\xff\xd7\xff\xc9\x03\x83\xff\x8b\xfc\x19\x05\x8e\x05v\xff\xbf\xfec\x071\x00\xa9\xff\x1e\x039\x06S\xff\xf7\xfd@\x036\x067\x05~\xfe\xe8\x02}\x01b\x02\xe6\x02\x9e\t\xd4\x03\x89\xff\xb1\x06\xf5\x05\x11\x00&\x03\x84\x06\x00\x00\xe1\xfe\x92\x01\x9e\x05f\xffV\xfb\x0c\xfb\x96\x00\x05\xfd\x1b\xfbn\x02\x82\xfe|\xf3\xf1\xff\x86\x02\xef\xf62\x00?\xfb\x12\xf7W\x00\xaa\xfb\xd1\x00\x89\xfd\xc6\xf6\xb6\xfe\xcd\xff;\xfb!\xfc\x81\x03\x14\xfa\x94\xfb\x08\xff\xe6\xfcQ\x01\xc0\x019\xfbH\xfb(\x03 \xfd\x9e\xffl\xfe\xf8\x01\xf4\xfe\xd4\xfdZ\xfe\xb2\x00\xfb\x03\xc7\xfb\x1c\x01\x18\x03-\xfe\x92\xff\x83\x03\xbb\x03\x00\x01\x04\xfe\x02\x01\xc2\x08!\x04\x86\xfd\x10\x01\x8b\x08\xe7\x02H\xff\xd5\x00d\tU\x04\xfe\xfb[\x05f\x02\x8f\x00\x16\x03\xbd\x00\x06\x02\xa8\x01\xbd\xf8\x86\x05\xd2\x03\xd2\xf8g\xfe\x1f\x06\x9f\xf9\xeb\xf9:\x07O\x00\xed\xf1\x9b\xfb`\ny\xf9\xd3\xfan\x00q\xff\x1e\xfb7\xfcG\xff\xcc\x00W\xf38\xfe\x1f\x08\x03\xfaD\xfb\xdb\xfd\x0b\x00\xaf\xfc\xa0\xfc\x83\x00\x16\xfd\xa1\xfa\x04\x05(\xffD\xfc\xd1\xff\x9d\x00\xe9\xfc\x93\xf8\xf9\x05\xf0\x03\x06\xf9\xc4\x00Q\x00\xea\x04\xf4\xfft\xfcb\x03\n\x00\xc7\xffX\x04\x9b\n\xc3\x01v\xfe\xfe\x01Y\x06G\x01 \x02\xa5\td\x05\xbd\xfe6\xff\'\x0c\x87\x00\xa3\xfaZ\x04Y\x03\xdf\xff\r\x02\xd8\x03\xa7\x00W\x01\xdb\xff\xdd\xfc\x86\xfe\xb8\x05I\xff\xaf\xfd\x03\xfd[\x03\xb2\x02\xb9\xfd\xa7\xfa\xf0\xf9\x05\x06\xcc\x02\xec\xf8\xb1\xfb=\x08\xc2\xfb\xf0\xf3\x0e\x03\x95\x05f\xf6Y\xf7N\x04\xbe\xfe\xe6\xfc\xd0\xfc\xb1\xff\x93\xfe\xb9\xf7\x10\xfd\x90\x01\x88\xff\x9b\xff\x03\xff\xd9\xfc\x08\xfc\xcf\x01:\x03T\xfeX\xfc}\xfdz\x06;\x03O\xfb\xf8\x01\xdb\x04T\xff\x8d\xff\x83\x06\r\x02\xa5\xff\xdc\xfeJ\x08\x1e\x03\xbe\xfc\x14\x055\x00\xbc\x00\xab\x02K\x05\xea\xfd\x82\x00G\x04%\x00\x11\x01\xeb\xff\xd8\x03\xee\xfdD\xff\\\x05\x8c\x00\xcb\xfd\xcc\x02\\\x00\x19\xfa\x98\x07\x86\x006\xff\x0c\xfe\xb4\xf7]\x0bS\x03\r\xf4\x9c\x00\x99\r]\xf6)\xf3\x18\x08J\x08U\xf6\x96\xf7\xf5\x01!\x00\xf8\x01\xb2\xf8\xe9\xfb\xf8\xfax\xfa\x86\x04\xb4\xff\x14\xf9\xd4\xfc \x02\xc9\xfb0\xfe\xae\xfe\x7f\x01L\xfe\x85\xff\xb6\x01\x91\x00\x12\xfde\xfc\x0e\x07g\xfe2\xf8\xb5\x03\xa2\t\xfc\xfb\xe2\xf7\xfc\x04_\x02K\xf7\xad\x04\xe2\n}\xfc\xd7\xf8W\x07\x06\x05\xab\xfb\x9a\xfe\xc6\x07&\x05\xb8\xfd:\x05\xf2\x03\xd1\xff\x07\x00k\x05Q\xfe\xe5\x04\xfd\x061\xf9\x07\xf9\xe7\x07Y\x0c_\xf3\x82\xfa\xef\x08\xb6\xfa\xba\xf8\xa8\x03\xf9\x07\x1a\xf8\xc8\xf5\xa5\x06d\x03v\xf8\x95\xfdC\x02\xcd\xfc\x97\x02\xdf\xfd\xce\xfc\x06\x05\x94\xfc\xf4\xfd\x1c\xfe\xdd\xff\n\x01\xde\x00\xa4\xfb(\xfe\xe3\xfc\xea\x00R\x04}\xf4\x03\xf9{\x01?\t\x95\xf62\xf5.\n\x1d\x03{\xf2\xac\xfc\xff\r\xa6\x02\x05\xf5k\x01\x9f\x0e\xfe\xfa{\xf9x\x08\xf5\x08N\xfd\xb6\xfc*\x08e\x07G\xfd+\x03\x19\xff\xe0\xfeG\x05\xf0\x02\x86\xfei\x04\xac\x03\xbc\xf6l\x01\x81\x07`\xfe\xa9\xfa_\x05\x8c\x00\xfa\xf6~\xfe\xe3\x07\x9b\xfdl\xfbl\x03\xa8\xff\xdb\xf9Y\x02\xff\x01\x80\x00\x06\x00\xeb\xfc\x19\x08\xae\xff)\xfb"\x01\x8f\x04m\xfc\x99\x02\x01\xfd\x8c\xfc\xb9\x03\xb6\xf9\x0c\xfc\xc6\x029\xfd@\xf6\xda\xfe\xa4\xfc\xbb\xf9C\x00\xaa\x00o\xfa\x1a\xfcJ\xff\xd4\xfch\xfb@\x08\xb8\xff\x8e\xf3\'\x03\x18\x074\x00z\xf9\xfc\xff]\xff\xb1\x02A\x03\x9d\x01\xcc\x034\xfd\xc7\xff\xa5\x04\x84\t\xef\xfe3\xfb\xe6\x06)\x07\x83\x05c\xfa\xc4\x05\xc7\x05|\xf8\x98\x04o\t\x1a\xff\x9d\xff\xd6\x01(\x02\xda\xfe[\xfd\x8a\x05>\xfe\xf9\xf9\x8f\x03"\x05\xe7\xf7T\xfc\xd6\xfe\xa0\xfb\xb0\xfc\r\x08\xe9\x00\xbf\xf4\xb9\xfe\xd2\xfeY\x01\xcf\xfd\xc8\x03\n\x02-\xf3F\x02\xc4\x0e\xa5\xfb\xc2\xf53\x02G\x07\xbb\xfa\x90\xfe\x9f\x0e\x05\x04\x91\xf2\xe2\xf7\x8b\x04P\x11`\x01V\xf3\x14\xf8\x86\x06:\x07=\xf7\xb5\x03;\x00\xb1\xf7\\\xf6\x95\xfd<\r\xde\x06\xc6\xf2\xb2\xf5\x83\x03\x91\x04\xa1\xfa\xe4\x01\xe1\x02U\xf8(\x03\x9b\x08\x16\x02\x0e\xfd\xf3\xff\xa6\xfaK\x04\xdf\x07X\xfe\xf0\xfeG\x05\xde\x00\x85\xfd\x03\x06^\xfd\x0f\xfd\x87\x03\x12\x02\xc7\x03\xb8\x02\xa0\xfcn\xfa\x80\x04k\x03\xc0\xfbQ\xfdP\x08\xd7\x00l\xf6\xc7\x00M\x01r\xfc|\x01]\xfd-\xfdV\x03\xd7\xfbL\xfcl\xf9\x0b\x02\x81\x0cm\xf3B\xf2 \x0b\x93\x07\xac\xfdj\xf2\xe6\x00\xa6\n\x1a\xfa\xf8\xf9d\x05(\x0b\xf8\xfc\xb3\xf0p\x00\x15\x0eo\xfd\xc2\xf2S\x02\xc1\x0e\x0c\xfb\x84\xf4D\x06Y\x08L\xfe\x01\xf5\xa7\xffh\x0fn\x080\xfa\xda\xf6u\x00n\x0c@\x03h\xfa\xf5\x01|\x06\x01\x00\xdf\xfb\xec\x03L\x05\xcb\xfd\x1b\xf7\x94\xfd\xbc\x0eA\x04\xf9\xf4\x8d\xfb/\xfdY\xff-\x03}\xfd}\x00\x03\xfe\x0f\xfc\xbf\x02\x8e\x02\x9b\xf9[\xfc+\x02g\x01\xd0\x073\x00\xc9\xfd\xa2\xf7\xc0\xf9+\tg\x04\x16\x02\xbc\xffw\xf8P\xfea\x07\xdc\xfe\x8e\xfcz\xfc"\x00\xbc\x02\xb1\xffI\x05U\x00\x8b\xf5m\xf9-\x07\n\x05\xe0\x01\xf9\xfa\xdc\xfa\x81\xfb6\xff\xd5\x05\x1e\x028\x02\xdd\xf8\x90\xfbU\x02[\x03\xbd\x00W\x01`\x00\x1e\x00%\x02d\x04a\x04m\xfcC\xfc*\xfe\xad\x00+\x08\x8f\x04\xc5\xfb\xb7\xfc\xd3\xfd\xb9\x00@\x00d\xf94\xfe5\x05\x9a\xff]\xff\x1e\x04\xca\x02\xed\xf8\xff\xf4\x11\x03`\r\x0b\x04\xeb\xfc\xbb\xfb\x15\xfd\xac\x00\xf2\xfe:\x04\xb2\x00\xa1\xf9\xb1\xfe~\x011\x04[\x04\x96\xfc%\xf3<\xfb\x8a\x0b\x10\x07\xd6\xffV\xfd\xa9\xf9\x96\xfby\x00\x16\x06s\x04\x86\xfd\x87\xfb\xa1\xfe\xae\x01\xa2\x04D\x01o\xfb\xed\xfd\x9c\x03\xa8\x02"\x05\xdd\x01\xdc\xf9\x82\xfcD\x01\xed\x04\x15\x01?\x00\xfc\xffl\x00\xa9\x01O\xff\x07\x00\xf2\xff\xa8\xfe\xdf\x00\x93\x00\xd3\x01\x87\x03k\xfc\xb8\xfa)\x00\xc3\x01\x18\x02_\x01\xd4\xfd\xd7\xfc.\xfd<\xff\xb8\x00\xff\x05 \xfd\xe5\xf7\xb2\xff\xf3\x04)\x03\xa7\xf9\x1d\xfdL\x00\xe7\x01\xb9\x01?\x03\xb7\xfdD\xfc\x94\xffj\x00:\x05:\x04\xd4\x00\xc2\xf85\xfd;\x031\x03\xbf\x03[\x00B\xfd|\xfd\x8c\x00\xd1\x01\xcf\x00\xe2\xfd\x8b\xfdV\xffB\x03*\x06{\xfeg\xfa)\x00\x9f\x01\xd3\xfee\x01\x92\x03\xb6\x02\xf8\x00\xcb\xfdp\xfe7\x00\xd4\xff2\xff"\x04\xdf\x025\xfd\xe6\xfd\xe9\x00J\x01\x8f\xfb{\xfd!\x02$\x03\xb5\x02\xc1\xfd_\xfb\n\xfe\xfb\xfe6\x00g\x02\x81\x02\x8f\xfe\x94\xfcx\xfe\n\x00K\xfe<\xfd\x19\x00+\x03w\x02\xd7\xfe\xeb\xfb\x8d\xfc\x83\xfe*\xff\xd8\x00V\x02\xdc\x02}\xfdO\xfa\xba\xfe\xbf\x01(\x00\x9b\x00\xbb\x01\xb0\x00\x03\x00\x91\xfe4\xfd\xd8\xff\xf7\x01R\x00 \x02\xd2\x02v\x00(\xfd\xa5\xfcf\x00\x19\x02\xad\x01\x94\x00\x00\x01\xa1\x01r\xfe\x17\xfe\xfe\xfe\xb2\xff\xb3\x01\x12\x01h\x02<\x03\x19\x00P\xfc\xc6\xfb\xad\x01\x8a\x05q\x03\xb2\xff\x86\x00\xb8\xff\xe1\xfd5\xff\xe2\x01\x16\x02g\xff\xef\xfe]\x016\x02\xdf\xfdZ\xfcE\xfe\x19\x00\x95\x01\xd7\x00e\xff\xc0\xfd\xc2\xfb\xbf\xfe\xed\x00&\x00\x03\x01\x8d\xfe/\xfd\xbd\xfd\x9f\x00\xf3\xff\xb7\xff\x03\x02/\xffq\xfc\xa8\xfd\x97\xfe\xbf\xfd\xe3\xfew\xff\xf2\xffM\xfc\x98\xfb\x85\xfd\x02\xfd3\xffr\xff\xcc\x01\xd1\x02T\x03\xd5\x01L\x02\xf5\x04\x85\x07{\n^\n\x0c\x0c\xc0\n3\n,\x0b%\x0b\xd8\x0bI\x0b.\n\x02\x0b\xfe\x07}\x05\xeb\x03\x9e\x00\\\x00\x9d\xff\x0c\xfe@\xfb\xe5\xf9\x83\xf6d\xf4T\xf4\xa7\xf4Z\xf4\xa0\xf3G\xf4\xba\xf2\xee\xf2w\xf4\xa9\xf6\xdf\xf8\x85\xf9<\xfb.\xfc-\xfe\xb8\xff!\x00S\x03\xe4\x04\x0c\x05\xc1\x051\x07\xf4\x06U\x05\xf9\x04p\x04"\x050\x04X\x02\x12\x01\xc8\xfd5\xfcw\xfb\t\xfa\xf5\xf8\xcf\xf7\xf3\xf6\xc0\xf4\x0f\xf5V\xf4\xbe\xf4C\xf4\n\xf5\x95\xf6\x89\xf64\xf8r\xf7\x1f\xf9\xf9\xfae\xfcs\xfe\xef\xfe\x96\x00\xb2\x00\x9b\x00B\x01\xdd\x02\xff\x04\xe6\x03a\x03\xc4\x02\xbe\x02\x1e\x03\xbd\x02\x1e\x03\xfe\x01z\x01\xa3\x01=\x00\xca\xff\xaa\xff\x11\x01\xac\x01\xd3\xff\x1a\xff\xef\xfd\xfa\xfeg\xfd\xfc\xff\xd4\xff\xbd\xfb\x9c\xfd\x9d\xfdh\x00\x11\x01\xc0\xff\x84\xffk\xfcO\xfd\xd6\x08B\x14m\x16~\x0f\t\t\xd1\x0e|\x182\x1f\x0f \xaf\x1f\x98!\x8a\x1f\xf3\x1e}\x1e\x92\x1a\x8c\x16\xf6\x11v\x16\xb1\x18\x1b\x11>\x06\xd1\xfb\x05\xfa\xba\xf9\x9d\xf8\xa9\xf7\xe5\xf1\x9b\xea\xdc\xe4\xbb\xe4\xaa\xe5\xd2\xe5B\xe4\x83\xe5\x90\xe7\xd6\xe8\x97\xea\xa8\xe9+\xeb\x80\xeeL\xf4<\xfaO\xfd\x84\xfe\xc5\xfb\x9e\xfb\xd6\xffw\x04\xa0\x07\x85\x07t\x06\xba\x04<\x03\x12\x03A\x02X\x01\xff\xff\x0f\x004\xff\x97\xfdE\xfa\xba\xf6\xe3\xf4|\xf5\xa4\xf7\xef\xf8\xe0\xf7>\xf52\xf3{\xf3{\xf6\xbc\xf8\xdf\xfa\t\xfcD\xfc\x96\xfd\xae\xfe\xb4\xff\x00\x01\xc3\x02\x98\x05k\x07\xa2\x08i\x08\x86\x07!\x07\xc0\x07}\t\xb1\n\xbf\nl\t\x18\x07\xa3\x05\x90\x05\xbf\x05[\x05\r\x04\x84\x04\xf1\x03\xa2\x01T\x00\t\xff\xa7\xfe\x0c\x01\xa0\x02\x92\x02A\x005\xfd+\xfe\xef\xfe\xac\x01g\x03\xea\x03\xf2\x00\x11\xffK\x02n\x01[\x02\xf7\x00\xa3\x01P\x03\xf0\x00\xda\x04\x90\x03~\xfe\xf5\xfb^\xfc\xf9\x02\xa9\x01\t\xff\x19\xfd\xea\xf9\xdd\xf8\xdb\xf7\x14\xfb\x13\xfc!\xf8\x08\xf5#\xf5\x14\xf70\xf6&\xf5\xbc\xf58\xf6\xef\xf5\x91\xf67\xf8\x99\xf8\x11\xf7D\xf7W\xfa\xd3\xfc]\xfc6\xfb\xdd\xfb\x1d\xfc\x9a\xfc\xb3\xfe\x9f\x00X\x00\xbc\xfe\xae\xfd*\xff@\xff\xd4\xfe\x9f\xfe\xca\xfd\xcd\xfd\xdf\xfd\xf5\xfc\x1a\xfc~\xfb\xf3\xfbh\xfc0\xfb \xfc\x95\xfb\x9d\xfey\x08\x19\x10s\r.\x04\xd9\x03"\x13\x8c\x1f\x96%M&\xe2!*\x1d\xf0\x19\xfe#\x99.\x04/\x07&\xa1\x1d\'\x1b\x07\x18\x8c\x15\xc5\x12\xb8\x0e]\x08\xf6\x02\xf5\xfe.\xf8\xfc\xef\xa6\xean\xe9\x85\xebo\xeb\x9d\xe8\xdf\xe1\x1d\xdd6\xdeL\xe4\xe7\xea\xb1\xee\x89\xef\x11\xedZ\xec\x81\xf0\xcf\xf8\xd0\xff\x1f\x02\xb3\x02\xee\x02\xb9\x04\xd2\x04J\x06\xf0\x08\xa0\t4\tD\x07*\x06r\x03\xcc\xfd\x91\xfb3\xfc\xdc\xfc\xdf\xf9^\xf5\xc5\xf1o\xee\x16\xec\xac\xec\x9a\xef*\xf0\x94\xedq\xeb\xd4\xec\xd8\xee\xd6\xf0 \xf4\x88\xf7\x17\xfa\x87\xfa\xd2\xfc\x00\x007\x02(\x04>\x07E\x0b\x9a\x0cF\x0c\xee\x0b\xc2\x0c\xae\r\x18\x0ed\x0f\xb0\x0f5\x0eO\x0bh\t%\t\x97\x08*\x08\xa2\x06\x07\x06\xf0\x03U\x01\x94\xff<\xff\xa3\xffW\xff\xff\xfd\xc1\xfd\xf2\xfc\xa8\xfb\x84\xfb\x93\xfbU\xfd\x93\xfd\x9c\xfd\x12\xfd(\xfd\xc8\xfc\xce\xfdw\xff\xd9\xff\xd4\x00\xa6\xff\x83\x00\xfd\x00\x91\x01\x06\x02|\x03\x16\x04;\x03\xcc\x02\xc4\x02\xee\x03\xae\x03B\x04\x01\x04\xba\x02\x9c\x01O\x00\x05\x01\x02\x01[\x00b\x00\x08\xfe\xee\xfc\xf5\xfa\xf9\xfcm\xfe\xf9\xfe\x17\xffo\xfcM\xfc\xb9\xfa\xe2\xfd\xab\xff\xc2\x00\xde\x00\x84\xfeF\xfe\xd4\xfc>\xfd\t\xfeR\xffq\xff\xc1\xfd\xd0\xfa!\xf9\'\xf9\xca\xf9{\xfaa\xf9\x01\xf9\x03\xf8\\\xf6C\xf6y\xf6\xeb\xf7\xf4\xf7\x07\xf9`\xfa\xd3\xf9\xcf\xf8\xbc\xf8Q\xfd\x0c\xffP\x01\x08\x03\xd4\x02\x80\x02\x03\x01\x01\x07\xe2\nN\x0b\x99\x0c\xf0\r\x06\x0e\xc9\x07\x8b\x06E\x0c\x91\x12\x86\x14\xfe\x10\x92\x0c\xa9\x04\xc6\x01V\x08V\x11\xbe\x12u\t\xee\x00\xe7\xfej\x01R\x06i\tC\x08\xee\x03\xdd\xfe\xa0\xffY\x02\xc2\x03)\x04\x03\x03:\x05\xf1\x05\xa5\x05V\x04\xd4\x01Y\x02\xf7\x04]\x07T\tt\x05\x15\x00\xd9\xfb\xbc\xfa\xf0\xffX\x00d\xfd\xf6\xf6\xbc\xf1s\xf1\xf1\xef\x9a\xf1d\xf2\xad\xef\x1e\xec\x96\xe9\xf9\xeb\n\xefc\xee\x06\xef\xae\xf1\t\xf3\xab\xf2\xb0\xf3\x86\xf7\x03\xfbT\xfb\xe4\xfby\xff\x87\x00\t\x00\x1e\x00\xb0\x02\xc4\x04\x08\x03\x84\x02n\x03\x8c\x03\x82\x01\xc4\x00\xd2\x01B\x02j\x00\x95\xfe\xc1\x00\xf8\xfeG\xfd\xac\xfe,\x02-\x03\xee\xfe\xa1\xfdS\x020\x05\x13\x05\x0b\x05\xf2\x05\x9f\x06\x1d\x04c\x06\r\t]\x08\xa2\x05\'\x03\xe3\x04\xcc\x04\xbb\x02\xe0\xff<\xff\xbc\xfeI\xfd`\xfc\t\xfd\xe9\xfb\xb7\xf9J\xf9\xe0\xfb\x85\xfce\xfb!\xfe\x1e\x01\xfc\x01\xb0\xfe\xe5\xff\xe1\x05O\x08\x9b\x07\xbf\x06\xef\x07\xef\x06\xed\x05)\tZ\x0b\xda\x06\xad\xff\xc3\xfe_\x03\x1f\x04y\x00_\xfb\xd1\xf7\xb3\xf5\xc0\xf5\x03\xf9E\xf9!\xf5\xd6\xf0_\xf2\xdc\xf5U\xf6+\xf6\x03\xf7\x19\xf9\xbd\xfac\xfe\xde\x01\xae\x00l\xff\x18\x02\xff\t/\x0ey\x0e\xb2\x0e\xf5\x0b9\t\xdc\t\xba\x10R\x16\x14\x12\x98\x0b\xf1\x06\x8d\x04\xd0\x04-\x06O\t\xc8\x05\xfb\xfc\xe4\xf6\x01\xf8\xcc\xfc\xe6\xfd\x14\xfc\xe9\xf9K\xf8\x1f\xf7\x10\xfa?\xff\x0b\x02(\xff7\xfd\x94\xff\xc4\x02W\x04\xfd\x04\xd6\x05u\x03\xdc\x00d\x02\x04\x06&\x06\xce\x02\xdd\x00E\x00\xd9\xff\x84\x00\x1b\x01,\x00T\xfd\x87\xfc\x93\xfe^\xfee\xfd\xba\xfcS\xfc.\xfc\x9c\xfbH\xfe$\xff\x98\xfcL\xfa\xfc\xfaR\xfd\x87\xfd\xa2\xfd\xe8\xfd8\xfdV\xfa`\xfa`\xfdK\xfe\xd5\xfc\x9c\xfa\x11\xfb&\xfb\xce\xfa\x01\xfb\xdd\xfb=\xfc\xb5\xfa\xf2\xfaH\xfc\x1a\xfd\xad\xfc\xe8\xfc\xd8\xfe\x9a\xff\x7f\x00\x8d\x00\xbc\x01\x9d\x02\x84\x02\xcd\x03:\x05\x13\x06\xbf\x05\x14\x05v\x05\xba\x05\xc4\x05\r\x06\xee\x05H\x04\x7f\x02o\x012\x02%\x026\x00f\xfe\xeb\xfc\x06\xfd~\xfcU\xfc\x93\xfc\xc4\xfbI\xfb\x87\xfb\x10\xfee\xfeJ\xfe\xcd\xff\x9a\x02\x99\x03\xb5\x02\x7f\x03)\x06\xfb\x07\xe4\x07\xd2\x08\x92\x08\x91\x07\xc2\x05F\x06\x94\x07h\x06\x85\x03P\x01\xdb\xffK\xfe\x8b\xfd\x0c\xfcC\xfbI\xf9;\xf7\x9d\xf6K\xf6f\xf6\x83\xf5\x99\xf5\xf6\xf5\xf4\xf57\xf6\x8e\xf6\xb2\xf7{\xf8\x10\xf9\x01\xfa\xe0\xfa\xc1\xfb0\xfcx\xfdy\xfe\x81\xff \x00\xa8\x00O\x01N\x01\x03\x02\xda\x02E\x035\x03\xe5\x02\xd5\x02\xd3\x02+\x03\xf4\x02\xe4\x02\x84\x02R\x01\x82\x01\x98\x01\x13\x020\x01e\x00\xf6\xff\x08\x00\x00\x00\x04\x01\x14\x01"\x00\xcd\xff`\xffK\x01\x8b\x01\x9f\x02\x12\x02\x83\x01\x02\x01\x00\x014\x02\xa4\x02\x9e\x02y\x01\x85\x00\xa5\xff\xb1\xff\xf6\xfd\x94\xfd\x05\xfd\xe7\xfc\x92\xfc,\xfc\xcb\xfd\x03\xfdo\xfd]\xff\xf8\x02\x17\x05\x88\x06N\tx\x0c\x14\r2\x0e\xc3\x12&\x17\xdc\x17\xa2\x15\xdf\x15w\x15\x08\x148\x13\x7f\x14e\x13F\x0c\xee\x05\x87\x02_\x01i\xfeD\xfc\x04\xfa\xbc\xf4L\xed\xf6\xe9\xd4\xeb\x89\xed\xfa\xecf\xeb\xaf\xea_\xe9V\xe9\x02\xedd\xf2\xdf\xf5:\xf6\xd6\xf6m\xf89\xfb\x82\xfe[\x02\xdf\x04\xca\x04\xd0\x03\xfe\x03\x7f\x052\x06R\x06\xa1\x05\x9c\x03\x90\x01\x08\x00\xea\xff&\xff`\xfd\x97\xfb?\xfa\x1c\xf9\x88\xf8\xd2\xf8\xd3\xf8\xe5\xf7\xe9\xf6s\xf7\x03\xf9\'\xfa\x1d\xfb\xee\xfb%\xfc\x8b\xfc\xda\xfd\x1a\x00\xef\x01\xa3\x02\xc3\x02\xe3\x02\xff\x02\xea\x03o\x05/\x06\r\x06.\x05\x80\x04/\x04O\x04\xc2\x04\xba\x04\xc8\x03\xc0\x02%\x02\xba\x01\x85\x01\x96\x01\x8d\x01(\x01\xaa\x00\xa0\x00\xfb\x00\xb6\x00\xd1\x007\x01\xd8\x01*\x02"\x02\x1a\x02\x04\x02\xf6\x01#\x02b\x02\x84\x02\xe7\x01\x0e\x01+\x00\xe6\xff\xb0\xff1\xff\xfe\xfe=\xfe\x81\xfdI\xfdQ\xfd4\xfd\xc2\xfds\xfe\xab\xfe\x18\xff\xc2\xff3\x01\xd0\x01e\x02\x86\x03H\x04\x04\x05\x00\x05<\x05\xae\x05;\x05\xc8\x04]\x04h\x03K\x025\x01\x9e\x00\xc6\xffn\xfe\r\xfd\xc3\xfb\r\xfb\x98\xfa9\xfa\'\xfaZ\xf9\xba\xf8\x9b\xf8\x18\xf9\xf6\xf9S\xfaV\xfa\x88\xfa\xe4\xfa\xc6\xfb\xcf\xfc\x87\xfd=\xfe\x97\xfe\x04\xff\xe2\xffZ\x00/\x01\x89\x01\xd8\x01\xf8\x01\xd0\x01\x1e\x02=\x02\xeb\x01\xb1\x01f\x01\xfd\x00j\x00\xee\xff\xa8\xff\xa8\xff-\xffu\xfeK\xfe\xe0\xfd\xb7\xfd\x05\xfe{\xfe\x7f\xfe"\xfe\x0e\xfe\xad\xfe7\xffF\xff9\x00\xb6\x00\xa5\x00\x7f\x00\xf9\x00\xe3\x01\x0e\x02<\x02\xd1\x02\x13\x03\xc2\x02\xac\x02\n\x03\xef\x02\xd0\x02\xae\x02\x1c\x02A\x01\x9f\x00\xb0\x00\xf7\xff"\xff\xdb\xfe\xa1\xfe\x91\xfd\t\xfd(\xfe\xaa\xff\x83\x00\xcc\x01\xaa\x03~\x04\x86\x04\xc8\x05.\nS\r\x96\x0e\xbf\x0e\xd7\x0ev\x0e\x0f\x0e\xb1\x0fU\x11\'\x10n\x0c\x00\t\xba\x060\x05\xd7\x03\xa2\x02\xa1\xff6\xfb\x98\xf7\xb9\xf5K\xf5\xc1\xf4\xdd\xf3\xb7\xf22\xf1H\xf0\xa2\xf0\xec\xf1\xbf\xf3\x91\xf4\xee\xf4\x81\xf5\x9f\xf6~\xf8\x1d\xfa\x85\xfb\x8a\xfc\xe2\xfc\x9a\xfd\x9e\xfe\xb3\xff\x9b\x00\x8b\x007\x00\x04\x00\x1a\x00\xac\x00|\x00\n\x00i\xff\xbc\xfe^\xfe7\xfeE\xfe\xc5\xfd\xe2\xfcB\xfc*\xfcD\xfcV\xfcl\xfcz\xfcW\xfcx\xfc\x01\xfd\xd5\xfd\x81\xfe\xe3\xfeS\xff\x06\x00\xa8\x00)\x01\xbc\x01g\x02\xc5\x02\xdb\x02#\x03}\x03\xa0\x03\xa6\x03\xd1\x03\xf8\x03\xfd\x03\xd1\x03\xc0\x03\xb5\x03\xbc\x03\xe2\x03\xf8\x03\xf0\x03\xbd\x03\x8e\x03l\x03\x7f\x03\x8e\x03v\x03-\x03\xce\x02p\x02A\x02\x0f\x02\xcc\x01k\x01\xf4\x00\x8e\x00\x1d\x00\xd8\xff\x9d\xff;\xff\xe8\xfe\xb8\xfe\x86\xfeV\xfe=\xfe*\xfe\x1c\xfe\x1a\xfe3\xfe\\\xfeo\xfex\xfee\xfe]\xfe\x93\xfe\xc6\xfe\x06\xff(\xffM\xff~\xff\x94\xff\xc4\xffG\x00\xea\x001\x01\'\x01n\x01\xdc\x01v\x02\xef\x02\x89\x03\xa0\x03\x03\x03\x81\x02\x94\x02\xef\x02\xa1\x02\xe9\x01R\x01\xba\x00\xcd\xff\x1e\xff\n\xff\xc7\xfe\xfc\xfd1\xfd\x0c\xfd$\xfd\xd2\xfc\xae\xfc\r\xfdA\xfd\x02\xfd\n\xfd\x87\xfd\xdf\xfd\xe1\xfd\xda\xfd=\xfe\x85\xfe\\\xfel\xfe\xbf\xfe\xd2\xfe\xa6\xfe\xb2\xfe\x14\xffM\xffK\xff9\xff\xa4\xff\xc4\xffo\xff\x81\xff\xca\xff\xe1\xff\x8d\xffa\xfft\xff:\xff\xdf\xfe\xc1\xfe\xd4\xfe\xdb\xfe\xa5\xfe\x81\xfe\x8e\xfe\xd6\xfe\x1d\xffZ\xff\xad\xff\xed\xff\x1f\x00R\x00\x8f\x00\xd6\x00\xf8\x00\x12\x01\x1d\x01\xfd\x00\t\x01\xe1\x00\xc4\x00\xb1\x00\x86\x00\x89\x00l\x00W\x00\\\x00d\x00\x7f\x00k\x00k\x00}\x00F\x005\x006\x002\x00\xfd\xff\xa1\xff\x8b\xffp\xff\xd8\xfez\xfe\xc2\xfen\xff:\x003\x01t\x02T\x03!\x04^\x05c\x07\x8e\tJ\x0b\x91\x0cH\r\x96\r\xe2\r\x1f\x0eV\x0e\x06\x0e\xb0\x0c\xa8\nh\x08\x8a\x06\xc1\x04\x95\x021\x00\xa8\xfd\x1f\xfb\x9b\xf8\xd0\xf6\xfb\xf55\xf5\xfa\xf3\xec\xf2z\xf2\xc4\xf23\xf3\xdf\xf3\x05\xf5\xf6\xf5\x8c\xf6W\xf7\xc5\xf8d\xfaS\xfb\xfc\xfb\x0b\xfd\t\xfe\x97\xfe\x0e\xff\xbc\xffI\x00)\x00\xf7\xff]\x00\xbc\x00x\x00\xfe\xff\xdc\xff\xdc\xffu\xff\x15\xff\x11\xff\xef\xfe\x1f\xfeb\xfdp\xfd\x9e\xfd^\xfd\x0c\xfd\x10\xfd8\xfd\x1a\xfd6\xfd\xe3\xfd\x81\xfe\xa5\xfe\xbf\xfeS\xff\xf7\xffY\x00\xdd\x00k\x01\xb3\x01\xc9\x01\n\x02k\x02\xbe\x02\xcf\x02\xd7\x02\xf1\x02\xeb\x02\xde\x02\xe0\x02\xf8\x02\x08\x03\xf5\x02\xe7\x02\xf4\x02\xe1\x02\xca\x02\xe0\x02\xfa\x02\xed\x02\xcb\x02\xcc\x02\xaf\x02g\x02%\x02\xf2\x01\x9f\x010\x01\xc2\x00Y\x00\xda\xffM\xff\xd3\xfey\xfe\x13\xfe\xac\xfdg\xfd;\xfd\x1c\xfd\x06\xfd"\xfdu\xfd\xb9\xfd\x04\xfek\xfe\xeb\xfeg\xff\xe0\xffe\x00\xe7\x00W\x01\xad\x01\xfe\x01J\x02l\x02i\x02\\\x02Q\x02+\x02\xeb\x01\x96\x013\x01\xc7\x00o\x00\x1e\x00\xcb\xffn\xff@\xff+\xff\xe6\xfe\xd5\xfe(\xff\x98\xff\xa6\xff\xa3\xff\t\x00r\x00\xbb\x00D\x01o\x02)\x03\xd4\x02\x99\x02\n\x03P\x03\xf9\x02\xc9\x02\xf7\x02z\x028\x01e\x00m\x00\xf9\xff\xd7\xfe\x0f\xfe\xd1\xfdC\xfd]\xfc\x12\xfct\xfcC\xfc\xa0\xfb\x97\xfb\x1c\xfcU\xfcD\xfc\x8f\xfc8\xfdt\xfdU\xfd\xbd\xfdq\xfe\x7f\xfeA\xfe\x82\xfe\x02\xff-\xff\'\xffb\xff\xd3\xff\xe3\xff\xd4\xff=\x00\xd5\x00\x16\x01\x16\x01<\x01\x85\x01\xa3\x01\xa9\x01\xc5\x01\xc4\x01\x8e\x015\x01\xfe\x00\xdc\x00\xb1\x00u\x00*\x00\xed\xff\xb7\xff\xa6\xff\xac\xff\xb7\xff\xcd\xff\xd5\xff\x00\x00<\x00v\x00\xaf\x00\xd0\x00\xe8\x00\xee\x00\xfe\x00\x06\x01\xd8\x00\x83\x005\x00\xe3\xff\x84\xff\x13\xff\xaa\xfeE\xfe\xb6\xfd%\xfd\xc6\xfc\x93\xfcS\xfc\xfb\xfb\xcf\xfb\xc6\xfb\xb8\xfb\xca\xfbI\xfcO\xfdj\xfew\xff\xb8\x003\x02\xbb\x03E\x05=\x07q\t<\x0bc\x0cF\r\x1b\x0e\xc4\x0e\xed\x0e\x08\x0f\xd8\x0e\xe4\r.\x0c?\n\xaa\x08\xf1\x06\xcf\x04\x7f\x02/\x00\xd7\xfdi\xfb\x86\xf9]\xf8U\xf7\x13\xf6\xff\xf4|\xf4w\xf4\x97\xf4\n\xf5\xd4\xf5\x8f\xf6\x18\xf7\xd7\xf7\xfb\xf8A\xfa.\xfb\xf8\xfb\xef\xfc\xb6\xfdD\xfe\xcb\xfeW\xff\xb7\xff\xaa\xff\xa3\xff\xdc\xff\xfa\xff\xc3\xff\x82\xffl\xffW\xff\x08\xff\xc9\xfe\xc9\xfe\xa9\xfe4\xfe\xda\xfd\xcf\xfd\xc0\xfd\x9a\xfd\x87\xfd\x91\xfd\xa4\xfd\x9e\xfd\xba\xfd\x18\xfev\xfe\xbf\xfe\x07\xff[\xff\xb3\xff\xf2\xffJ\x00\xa0\x00\xd1\x00\xfd\x00+\x01M\x01g\x01v\x01\x90\x01\xa3\x01\xa1\x01\xa5\x01\xbb\x01\xd0\x01\xe4\x01\xfe\x01(\x02R\x02e\x02\x86\x02\xc7\x02\xe9\x02\xfb\x02\x19\x035\x03&\x03\xf5\x02\xcf\x02\xb9\x02m\x02\x03\x02\xac\x01P\x01\xd0\x00C\x00\xce\xff]\xff\xd6\xfeU\xfe\xff\xfd\xba\xfdw\xfd;\xfd*\xfd9\xfd?\xfd]\xfd\x9e\xfd\xed\xfd3\xfe|\xfe\xe1\xfeM\xff\x98\xff\xea\xffW\x00\xc4\x00\xff\x006\x01\x85\x01\xca\x01\xe4\x01\xf8\x01\x17\x02 \x02\xfe\x01\xf0\x01\xff\x01\xf3\x01\xb6\x01|\x01a\x01A\x01\t\x01\xd5\x00\xb4\x00\x87\x00/\x00\xef\xff\xd8\xff\xc2\xff\x94\xfff\xff^\xffW\xffB\xff1\xffH\xffk\xffi\xffm\xff\x94\xff\xc7\xff\xe3\xff\x05\x002\x00R\x00Y\x00d\x00\x7f\x00\x8c\x00|\x00t\x00m\x00X\x00A\x008\x00\'\x00\xff\xff\xcc\xff\xc5\xff\xb8\xff\x8f\xff\x80\xff\x8f\xff\x82\xffS\xffO\xffm\xffp\xff^\xffh\xff\x95\xff\x96\xff\x89\xff\xa5\xff\xdd\xff\xf1\xff\xe8\xff\xfd\xff5\x00Q\x00Y\x00q\x00\xa7\x00\xc8\x00\xc6\x00\xe8\x00%\x01M\x01V\x01S\x01x\x01\x91\x01\xa1\x01\xc0\x01\xfc\x01\xf5\x01\xb2\x01\x92\x01\x8f\x01h\x01#\x01\xe4\x00\xb2\x00.\x00\xa1\xffD\xff\r\xff\xa7\xfe)\xfe\xd2\xfd\x93\xfd5\xfd\xd7\xfc\xbb\xfc\xca\xfc\xad\xfc|\xfc\x85\xfc\xbb\xfc\xcb\xfc\xc5\xfc\xfe\xfco\xfd\xb5\xfd\xe1\xfdU\xfe\n\xfff\xff\x81\xff\xe1\xffe\x00\xa8\x00\xb8\x00\x03\x01j\x01b\x01\x1f\x01\xfc\x00\xf9\x00\xce\x00\x94\x00u\x00X\x00\t\x00\xa6\xffk\xffd\xff[\xffC\xff\x1f\xff\xf4\xfe\xd4\xfe\xd4\xfe\xe7\xfe\t\xff$\xff.\xff#\xff4\xffd\xff\xa0\xff\xdb\xff\xf4\xff\r\x00,\x00B\x00}\x00\xbd\x00\xfa\x00\x1c\x011\x01M\x01~\x01\xad\x01\xc6\x01\xdb\x01\xf3\x01\xf7\x01\xdb\x01\xdb\x01\xf9\x01\xef\x01\xb5\x01u\x01Q\x01\x07\x01\xaa\x00\x80\x00f\x00\xf9\xffk\xff.\xff"\xff\xf8\xfe\xd2\xfe\x0f\xff]\xffJ\xffN\xff\xce\xfft\x00\xe7\x00y\x01A\x02\xe6\x020\x03\xb3\x03\x8a\x04\x1b\x05J\x05\x8e\x05\xee\x05\n\x06\xe3\x05\xc7\x05\x96\x05\x0b\x054\x04y\x03\xd0\x02\xf8\x01\xdb\x00\xbd\xff\x9c\xfek\xfd`\xfc\x93\xfb\x00\xfbb\xfa\xa9\xf9%\xf9\xdd\xf8\xda\xf8\x10\xf9r\xf9\xd7\xf9.\xfa\xa0\xfa6\xfb\xf7\xfb\xc7\xfc\x8b\xfd=\xfe\xb1\xfe\x1a\xff\x97\xff\x15\x00}\x00\xc9\x00\xfb\x00\x03\x01\xe9\x00\xe7\x00\xf2\x00\xe9\x00\xbd\x00\x83\x00N\x00\x0e\x00\xde\xff\xc7\xff\xb2\xff\x8e\xffW\xffD\xff;\xff7\xff@\xffR\xffR\xffE\xffM\xffY\xffS\xffJ\xffB\xff@\xff7\xff/\xffA\xffd\xff]\xffL\xffT\xffx\xff\x94\xff\xa7\xff\xcd\xff\xf6\xff\xff\xff\xfb\xff#\x00p\x00\x98\x00\x9b\x00\xaa\x00\xc4\x00\xbf\x00\xc2\x00\xdd\x00\x06\x01\x0c\x01\xfc\x00\xfd\x00\x18\x01)\x016\x01M\x01V\x01Z\x01c\x01x\x01\x83\x01\x83\x01\x84\x01n\x01N\x01<\x01&\x01\x02\x01\xd5\x00\xa1\x00i\x00,\x00\xf9\xff\xd9\xff\xbe\xff\x9a\xffx\xff_\xffX\xffW\xff_\xff{\xff\x96\xff\xaa\xff\xbe\xff\xde\xff\x08\x000\x00P\x00q\x00\x88\x00\x94\x00\x9d\x00\xa0\x00\xad\x00\xa3\x00~\x00Y\x009\x00\x1c\x00\x00\x00\xde\xff\xb9\xff\x93\xffy\xffk\xff[\xffT\xffL\xff:\xff3\xff+\xff4\xff@\xffH\xffR\xffQ\xffU\xfff\xff{\xff\x95\xff\xab\xff\xbd\xff\xcf\xff\xde\xff\xf4\xff\x19\x004\x00A\x00R\x00n\x00\x89\x00\x9d\x00\xb2\x00\xce\x00\xde\x00\xe2\x00\xef\x00\x04\x01\x0f\x01\x19\x01\x15\x01\x12\x01\n\x01\xfe\x00\xed\x00\xdd\x00\xc3\x00\x9d\x00v\x00Q\x000\x00\xfe\xff\xca\xff\x98\xffl\xffM\xff#\xff\x01\xff\xdc\xfe\xbe\xfe\xa3\xfe\x9b\xfe\x9d\xfe\x9d\xfe\xa6\xfe\xb3\xfe\xc8\xfe\xe3\xfe\x05\xff&\xffC\xffj\xff\x8e\xff\xa7\xff\xc0\xff\xd4\xff\xed\xff\x00\x00\x01\x00\x1b\x00\x1e\x00,\x004\x00)\x00G\x00i\x00\x8b\x00\xa1\x00\x8c\x00\x9c\x00\x92\x00\xd3\x00P\x01\xe8\x01A\x027\x02[\x02j\x02f\x026\x02/\x021\x02\xb7\x01\'\x01\xac\x00G\x00\xd6\xff=\xff\xb6\xfe0\xfe\x99\xfd\x0e\xfd\x8a\xfcF\xfc\x13\xfc\xe3\xfb\xb2\xfb\x9a\xfb\xc4\xfb\xfe\xfb4\xfco\xfc\xd5\xfc9\xfdz\xfd\xdf\xfdF\xfe\xa7\xfe\xee\xfe\x0c\xffU\xff\x9b\xff\xc7\xff\xeb\xff\xfa\xff\x06\x00\xe5\xff\xbe\xff\xb8\xff\x9e\xff}\xff8\xff\xf4\xfe\xdc\xfe\xb6\xfe\x95\xfe\x88\xfe\x95\xfe\x9c\xfe\x91\xfe\xa9\xfe\xde\xfe\x1c\xffD\xff\x8d\xff\xfa\xff>\x00u\x00\xc1\x009\x01\x91\x01\xc9\x01\x1d\x02{\x02\x90\x02\x90\x02\x9e\x02\xb9\x02\xa6\x02|\x02\x8a\x02\x99\x02\x8f\x02j\x02p\x02\x8b\x02\xa8\x02\xde\x02t\x03g\x04G\x05\xfb\x05\xb4\x06\xaa\x07^\x08\xd0\x08o\t\x0c\n9\n\x0f\n\xd3\t\xa5\t\x0c\t:\x08H\x07.\x06\xd4\x047\x03\xb4\x010\x00\xa8\xfe/\xfd\xc3\xfb\x83\xfa\x8c\xf9\xb4\xf8\x18\xf8\xa0\xf7_\xf7T\xf7L\xf7y\xf7\xce\xf7;\xf8\xbc\xf8O\xf9\x0e\xfa\xca\xfa\x98\xfbg\xfc\xfa\xfc\x84\xfd\xf1\xfdF\xfe\x89\xfe\xa9\xfe\xc3\xfe\xd1\xfe\xbf\xfe\xa3\xfey\xfeH\xfe\x18\xfe\xcb\xfd\x83\xfdA\xfd\x0f\xfd\xd8\xfc\xb7\xfc\xc3\xfc\xd3\xfc\xf8\xfc0\xfdv\xfd\xc6\xfd-\xfe\x9e\xfe\x06\xffk\xff\xce\xff(\x00\x93\x00\xfb\x00J\x01\x9d\x01\xef\x01\x1d\x026\x02E\x02M\x02B\x020\x02\x17\x02\xf9\x01\xda\x01\xc9\x01\xac\x01\x96\x01{\x01[\x01S\x01M\x01F\x01:\x01;\x01B\x01G\x01T\x01[\x01`\x01X\x01P\x01N\x01:\x01\x14\x01\xeb\x00\xbf\x00\x8b\x00N\x00\x0e\x00\xd3\xff\x98\xffQ\xff\x0e\xff\xd2\xfe\xa5\xfer\xfeO\xfe6\xfe&\xfe\x1d\xfe"\xfe5\xfeM\xfel\xfe\x95\xfe\xc1\xfe\xf3\xfe-\xffe\xff\x98\xff\xd3\xff\t\x00I\x00z\x00\xa2\x00\xbb\x00\xcf\x00\xe8\x00\xf1\x00\xf9\x00\x03\x01\xfa\x00\xf4\x00\xe8\x00\xe2\x00\xd6\x00\xbf\x00\xa5\x00\x8b\x00t\x00a\x00J\x00;\x00(\x00\x19\x00\x16\x00\x17\x00 \x00%\x00)\x00<\x00F\x00Y\x00m\x00t\x00\x87\x00\xa0\x00\xa5\x00\xbb\x00\xb0\x00\xb6\x00\xb8\x00\xb0\x00\xb2\x00\x97\x00\x93\x00\x95\x00\x87\x00\x84\x00f\x00?\x00\x1c\x00\xd9\xff\xab\xff\x88\xffg\xffw\xff6\xff\xfd\xfe\x03\xff\xfc\xfe\xef\xfe$\xffk\xff\xa1\xff\x9c\xff\xb8\xff6\x00\xc2\x00\xe8\x01K\x04\xa4\x05\xf7\x05\xd9\x05`\x05\xae\x04\x83\x02v\x01\xe1\x00o\xffH\xfeU\xfd\xe9\xfcH\xfc\xf0\xfa\xcf\xf9d\xf8\xc0\xf6\x15\xf6\x87\xf5\xb6\xf5y\xf6\xf5\xf7\x8f\xf9)\xfa\xb7\xfb\xad\xfd\xa4\xfe\x7f\xfe\xf5\xfe-\x00y\x00\x19\x01\x10\x02\x07\x03q\x03\x15\x03\xab\x03\x14\x04\xa8\x03I\x03\xe3\x01\x11\x01\xa7\x00\xef\xff\xb1\xff\x82\xff\xbe\xffA\xff\xfc\xfe_\xff}\xff\xfa\xfe\x8d\xfe\x88\xfe\xa2\xfe\xf7\xfe\x07\x00\x00\x01\x9f\x01#\x02\xa4\x02\xe0\x03u\x04n\x04m\x04\xd2\x04C\x05\x19\x05}\x05\xcf\x06\xb6\x06\x08\x06\xb8\x05\xa3\x05\xfe\x05\x98\x05\xbe\x05\xd2\x05\xc8\x05\x9a\x05T\x05u\x05#\x05R\x04n\x03\xd6\x02\x96\x020\x02\xb6\x01;\x01\xc2\x00/\x00A\xffi\xfe\xac\xfd\r\xfdJ\xfc\x9b\xfbH\xfbo\xfb\x9e\xfb\x8f\xfbt\xfbZ\xfb3\xfb\x0e\xfb\xe2\xfa\xbf\xfa\xda\xfa\x02\xfbf\xfb\xe8\xfb\xc1\xfcj\xfd\xa4\xfd\xb3\xfd\xb5\xfd\xcc\xfd\xaa\xfd\x9a\xfd\xbd\xfd\x08\xfeP\xfe\x95\xfe\xdf\xfe \xff\x04\xff\x9b\xfe\'\xfe\xcb\xfd\x90\xfdD\xfdT\xfd\xb5\xfd\x19\xfeg\xfe\xc1\xfe8\xffb\xffh\xff\x83\xff\xac\xff\xec\xff0\x00\xb6\x000\x01\x84\x01\xdc\x01 \x02^\x02@\x02\xf9\x01\xb2\x01x\x01J\x01A\x01A\x01\\\x01Q\x016\x01:\x01\xfd\x00\xd3\x00\x8f\x00L\x00\x15\x00\xe8\xff\x00\x00,\x00K\x00g\x00`\x00S\x00M\x00>\x00\x0c\x00\xe5\xff\xcf\xff\xbc\xff\xc4\xff\xdf\xff\x12\x00/\x00,\x00;\x00B\x006\x00=\x00+\x00\x1f\x00\x04\x00\x11\x00E\x00M\x00|\x00\xa0\x00\x8a\x00\x99\x00t\x00U\x00,\x00\xf7\xff\xe9\xff\xb5\xff\xb9\xff\xb0\xff\xb3\xff\xc0\xff\xae\xff\x8e\xffX\xff*\xff\x08\xff\xed\xfe\xb6\xfe\x9d\xfe\xa8\xfe\xae\xfe\x06\xffT\xffw\xff\xae\xff\x9a\xff\xc2\xff\xfc\xff$\x00Y\x00g\x00\x91\x00\xbe\x00 \x01\x8a\x01\x9f\x01\xab\x01G\x01\xd2\x00\xa8\x00S\x00A\x00\x03\x00\xa5\xff\xaa\xff|\xffe\xffy\xffa\xffA\xff\x19\xff\x01\xff!\xfft\xffm\xff\xbc\xffT\x00\xca\x007\x04\x8a\x06p\x07\xfd\x07\xa2\x06\xf5\x05\xbd\x03\xe5\x02\xc5\x02\x03\x02b\x02\xe3\x01\xb7\x01\x8a\x01Q\x00\xdb\xfd\xeb\xfa\r\xf9\x85\xf7\xf9\xf6\xbd\xf7\xa6\xf8\x1e\xfa\x14\xfb\xda\xfb4\xfc\xf0\xfc\xa4\xfci\xfaV\xfb\x1a\xfc4\xfc\xbd\xfe\x00\x00\x19\x01\xfe\x01H\x02\x8c\x02\xa3\x01\xe3\x00\x95\x00\xa3\xff\x8d\xff\xb0\x00,\x01\xb7\x01d\x02\xd1\x02"\x02\xaa\x00\x94\xff\xed\xfeX\xfd\x1a\xfde\xfd\xc5\xfd\xf6\xfd\x15\xff\xc2\xffg\xff\xeb\xff\x8d\xff\x18\xff\x84\xfeg\xff\xe7\xfe\x82\x00Y\x03\xa1\x01\x8e\x03I\x05\xb3\x03\xe8\x02%\x03~\x03\r\x01\x8a\x00\xd0\x03C\x01\x94\xff&\x04l\x02\x90\xff\xa4\x01\xbd\x01\xe4\xfeq\xff\x81\x01\x89\xff\xd3\xff\xc0\x01\xaf\x02\xc2\x01@\x02\xf5\x02\x18\x01\xb4\x00\x84\x02\xf4\x02\x06\x003\x01\xf9\x02\x1e\x005\x00e\x02\xbf\x00\xe2\xfe\xc1\xff\xfe\xff\x8b\xfe9\xff\x9e\x00\x9a\xfe\xb7\xff\x18\x01\x92\xfe\xd8\xff\xce\x01\xa3\xff\xc9\xfd\x83\x00\xd3\x00\x05\xff\xfd\x00\x04\x02*\x00g\x00\xe1\x01\xe9\xff\x1f\xff\x05\x01\xad\xff8\xfdY\x01\x82\x01\x0c\xfd\x9f\xff\xd5\x01\x98\xfe^\xfc\xcf\xff\x83\xfe\xdd\xfaV\xffI\x00\x95\xfb\xfe\xfc]\x00k\xfea\xfd\xbc\xff9\xfe\xb0\xfcZ\xff\xb9\xff\xde\xfd\x92\xff\xa0\x00\x13\xff\xf9\xfe\x1e\x00,\x00\xb0\xfeZ\xffM\xff+\xfe\x17\xff\xcd\xfe\xb0\xff\x11\x00\x03\xffm\xffb\xff\xf1\xfey\xfe\xb8\xfe2\x00\xe2\x00G\xff\x89\x00\xa9\x01\xb8\xff1\x00\xcc\x00j\x00\x9a\x00q\x01\x8d\x00\xfd\x00\x85\x02\x7f\x01\xdd\xff\xab\x00\x00\x00\xbb\xfe\xe0\x00l\x00m\xffD\x00\xe6\x00a\xff4\xfd\xe4\xfeu\xff\x0f\xfc\xc7\xfd\xd9\x02\x96\xfe:\xfdM\x03\xf1\x00\x8b\xfb>\x007\x02)\xfdI\xff\x1c\x05N\x01y\xfeU\x03\xbd\x03\x86\x01\x08\xff\xe8\x02\xff\xfc2\xff\xde\x00\xe7\xff\xe6\xfd\xfe\xfe\x91\x02C\xfc\r\xfeV\x00\xca\xfd?\xfa\x00\xffn\x00\xde\xfb)\xff\xdc\x01\x0b\xfe\xb1\x01e\x01\xb5\xfd\xe0\x00\xaf\x02\xa1\xfd\x9d\xfd\x02\x04\xfb\xffq\xfc\t\x01\xab\x060\xfd(\xff\xaf\x06\x15\xffT\xfc\xbb\x04,\x02\xe9\xfa\n\x03\xb8\x03F\xfd\x9c\xff\xe4\x05\xc4\x00\x08\xff(\x01\x91\x00u\xfft\xfbz\x033\x03\xf7\xfb\xbe\x02\n\x04\x96\x01\xf2\xfe\xe1\x00\x7f\x03\xc8\xfd\x15\xfe(\x01\x96\x020\x01\x19\x02\xe4\x00_\x01\x1e\x01\xd9\xfcS\xfe\xbc\x03\xd6\xfd\xfb\xfc\x9b\x012\x00<\x00}\xfc\xe0\x02\xea\xff\xf2\xfbE\xfe0\x01\xb8\x00C\xfcy\x01\xd0\x00~\xfe\xdc\x01\xc0\x00\r\x01`\xfe_\xfes\x03\x08\xfbT\x01\xc6\x04x\xfb\xc1\xfe\x1c\x05\xbc\xfc=\xfe\xb5\x00\xa1\xfd\xd4\xfd[\xfd\x7f\x01\x85\xfc\x08\x01\xe9\x00\x98\xfc\x05\x00\xdc\x02\x03\xfbR\xfc;\x05\x1e\xff\xa2\xfcF\x02\x17\x03y\xfe]\x01\xbc\x01$\x00\xeb\xfc\x18\x01r\xfd\xcb\x00\xa9\x04\xa4\xfe.\xfeb\x03f\x034\xf9o\x04\xf0\x00\xe8\xfa\x14\xff\x14\x02\xb8\x01\xf9\xfeM\xfd\xde\x03\xf6\x00\xd2\xfa%\x02\x8a\x01\xe0\xfc\xf3\xfc\x82\x03E\x03\x0c\xfen\x02V\x03y\xfd\xcf\xff\x9c\x03\xf2\xfet\x00\xbe\x03\xe6\xfdW\x02\x81\x03\x98\xfa\xe3\x00\xb4\x027\xfe\xfa\xfd\xbf\x03K\x01\x9a\xfa6\x03y\x00\xbc\xfc\x9c\x00p\x01)\xff\xeb\xfe\x07\x03\xa4\xfe?\xfed\x02\xd8\xfa`\x00\xcc\x05\x00\xfb\x9e\x00\x16\x07T\xf9\xf4\xfc\x05\x08\x0e\xfc\xac\xf9\xbe\x07O\xfe\xff\xfb\xfc\x04\xf2\x01\xee\xfc\xe1\xfe\xd9\x03\xdb\xfa\xa1\xfa\xbc\x04"\x07w\xf8\xc3\x01\xe6\x07\x1a\xf8\xec\xfd\xaf\x04o\xfdw\xfb@\x04\x86\x02A\xfc\xf0\x00\xe1\x04\xf0\xfbP\xfc\x96\x01\'\x02\x95\xfb\xb9\x00]\x03\x85\xfe\xa3\x00\x86\xfe\x91\xfd^\x00p\x02\xe5\xfa\xea\x01\xd3\x01:\xfd\x16\x03\xb9\xfe\xda\xfb\x8e\x02z\x00\xc2\xfb\xbf\xfe\x1f\x07w\x01e\xf8\x89\x03E\x05\x96\xf8\xbb\xfe\x04\x08]\xfaz\xfd\xd0\x07W\xfeG\xfe\x18\x05\xa8\xfc\x11\x00\x15\x00\xd7\xfeL\x00\xce\x00{\x01\xa0\xff\x88\x002\x01_\x01\x85\xf9\x98\xff\xd9\x00\xed\xfb|\x01\x9d\x04\xef\xfe\x93\xfc0\x03g\xfe\x96\xfcG\x00\xf7\xff\xe0\xff1\x006\x05\xb2\x00\x1d\xfb\xa1\x02F\x00\x07\xfc_\xff\xc1\x01d\x02+\xff\xff\xffc\x02\x03\xffP\xfd.\xff5\x04l\xfdU\xfa\xb4\x05\xd1\x01X\xfcn\x01\xd8\x03\xac\xfd\x96\xfa|\x06\xe6\xfd\xbf\xf7t\x05\xdd\x01\x90\xfd\xaa\x02l\x03\xdf\xfa\xaa\xfe\xc0\x01\xd1\xfc%\xff\x01\x01\xf7\xfe\x08\x01=\x01\xbd\x01\xa5\xff\x96\xfcQ\x03\xbc\xfe\xeb\xfc6\x00f\x01\xb3\x03N\xfe%\x00\xfc\x06\x83\xfc\x15\xf9[\x07\x11\x01n\xf7\xd7\x03\xe7\x07\xca\xfa\xa5\xfc\x17\x07\xbc\x01\xaf\xf6\xaa\x04\xa7\x02a\xf5(\x05\x99\x04[\xf9\xaf\x03\xb8\x00\xe7\xfb\xab\x02\x82\x02\x11\xfc\xc7\xfd\xd3\x05\x16\xfd)\xffu\x00\xcf\x00\xe4\x03\xd4\xfc\xce\xfex\x02&\x00h\xfe1\xff\xc5\x01c\xff^\xfek\x032\xff\xcf\xfa\xa4\x06;\x01\xbc\xf6\x85\x04\x08\x02\xab\xfc%\xfe\x07\x04\xec\xff\xe4\xfa\xf5\x03s\x00\x1a\xfd\x87\x00\xcd\x01\x94\xfcF\xff\x1d\x02\x12\x011\xfe\x89\x02%\x00\x9f\x00\xc4\xfc\xe5\xfen\x06\xce\xfc)\xfe\xac\x07\xa2\xfe6\xf9\x02\x07q\x00u\xfa%\x01\xa9\x05\xee\xf9\x98\x00\x88\tp\xf7\xde\xfb:\x08\xeb\xfe&\xf7\x9e\x06^\x05d\xf9\x11\xfe\xcb\x07\xa4\xff\xe3\xf7q\x06I\x00v\xf9\xa2\x02j\x04w\xfc\xcc\xfek\x04\x1e\xfe:\xfb\xa5\x04>\x01\x83\xf9B\x02P\x02c\x01_\xfcA\x03\x8f\x02 \xf9\xf7\xff|\x03/\xfd\xca\xfd(\x07G\xfb\xba\xff\x97\x04e\x00d\xf9\x85\xfe\x86\x06^\xfb\xef\xff\xd1\x05\xd1\x00\xb1\xfa\x81\xffy\x04\x06\xfa\x83\xfd\x97\x04\xae\xfe\x99\x01\xd2\x02;\xfb\x13\x02T\x00\xf8\xf9L\x04\xdb\xfe$\xfc\xbc\x05\xdd\x03\xa7\xf6\xf4\x05\xc7\x05d\xf3\xfa\x01\x0e\x06\xf9\xf9\xa1\xfe\x98\x07:\x02\xb0\xfb_\x04\xb7\xfek\xf86\x032\x03\xac\xfei\xfeH\x05a\xfd\x10\xfeB\x05}\xf8\xe6\x01X\x025\xf7\xd3\x06v\x02\x9c\xfbc\xff\xab\x05/\xf9\xaf\xfa~\x08s\x03\xa5\xf5\xcc\x01\xc9\n\xbe\xf4\xf6\xfdM\t.\xfc\x1d\xf8\xb2\x07\xda\x01\xee\xf7L\x03\xc2\x04\xd0\xfcl\xfa\xea\x04\xff\x04q\xf5\xf1\x05\xd8\x03\x04\xf8\xba\x01\x97\x08s\xf8\xec\xfa\x03\x11\xa1\xf5\xa9\xf8Y\rc\x00\x81\xf1\xf0\x08\xd1\x07\xa2\xf6\xd4\xff`\x06?\x02>\xf5Q\x03~\x04\xd0\xfc\x8d\xfd%\x03\xfd\x05j\xf8\xcb\x02\xb2\x02\xbd\xf8\xd6\x01\x1d\x02\x15\x01@\xfc\xe5\x03~\x01D\xfa\xe2\x02}\x04%\xf6n\xff_\n\xb3\xf7\x86\xfc\xd3\x0b(\xfb\xbe\xf7r\n\x84\x01y\xf3+\x05\xf7\x07v\xf1\xb4\x01\xf8\x0fH\xf6\xa5\xf7\x0c\x0e\xca\xfe/\xf46\x06R\x01;\xfb\x8d\xff\xc2\x05\xb2\x03\xe8\xf9D\xff\x91\x06u\xf9b\xf9c\x0c\x97\xfc\xdf\xf5P\n\xce\x06s\xf6\x17\xfd\xe6\n\xe9\xfc\xf5\xf6\x98\x05\x96\x05\x90\xf6\xbc\x00\xb7\x0eU\xf5O\xfd\xbf\x0b\xf2\xf8\x0e\xfd\xd3\x034\x00B\xffM\xffT\x05\xa7\xfe\r\xfb\x8b\x05>\x03\x01\xf7|\xfe\x99\t+\xfb\x81\xfbc\t\xe5\xfd\x97\xf9S\x04x\x03Q\xfa\xd3\xfc\x12\t-\xfd\x9d\xf8\xd4\x03\xff\x07\xa3\xf9`\xfb\xf6\x07-\x01\xda\xf4\x9d\x05\x15\x06q\xf7\\\x02\xea\x05)\xfc(\xf7\x1a\x07\xc7\x05\x1c\xf6\xdd\xff\xf8\n\xb8\xf4\'\xfen\x08\x90\xfc\xb1\xf8{\x01\xf8\t\xca\xf7\x7f\x00r\x07E\xfb\xea\xf8\xc7\x04\xe0\x06!\xf9\x05\x03\x17\x06:\xfb\xb0\xfa\xc8\x06\xe9\x01\xcc\xf8\xd7\x02\xe6\x05\xd5\xfa\x13\x00\xe0\x05{\xf9J\xff\x9a\x04\x0e\xff\xf2\xf8\xf1\x06\xd4\x04\xdc\xf7\x87\x01\xce\x04\x9b\xfa^\xfdS\x05 \xfc\xe8\xfcd\x04\xe2\x04\x8b\xfb\x16\xfb\x84\x06\'\xfc\x97\xfc\xf4\x05\xc8\xfe\xb0\xfc\xc5\x00\xdd\x05z\xfb\xd7\xfcj\x03\xf7\x01\xbf\xfaR\x00{\x04\xa4\xfd\xf9\xfe7\x03\x96\x01\xc1\xf8\xee\x02\xb0\xfe\x83\x01Y\x01\xba\xfa\x0e\x05\x86\xff\xb8\xfa\xee\x06\xdb\xff\xf9\xf2\x80\n\xed\x05G\xf2\xc3\x02+\x0b\xfa\xfa\xe1\xf5\x0b\t\xb9\x038\xf6\x01\x01:\x08g\xfc\xa8\xf7\xac\t\x81\x03l\xf5\xa8\x02\xe5\x08W\xf6\xbe\xfbd\x0bg\xff*\xf6\xd5\x04y\x06\x02\xfbe\xfe\xea\x03\x19\x02G\xf6\xa0\xffK\r$\xf8\x12\xf9\xf3\x0e\x98\xfe\x19\xef\x14\n\xc8\x08w\xf2X\xffs\x08h\x00\x9f\xf8;\x01\xae\x08\xe5\xfb\x93\xf4\xf3\n\xef\x05\x97\xedH\n\x9d\x08\x97\xf4!\xfe\xcd\x071\xff=\xfa\xf7\x04\x80\x00B\xfb\xda\xffi\x05W\xffA\xfc|\x03\xf4\x01?\xfa\x03\x00\x01\x06W\xfd\xb7\xfb\xf2\x05\x9c\x01n\xf9\xee\x02(\x06^\xf5\xb6\xff\n\n}\xfa\xb1\xfc\t\x04A\x04-\xfb{\xfb\x16\x08t\xfc\xab\xf9M\t\xd0\xfd\xd1\xfb\x95\x02:\x07_\xfb\x01\xf9\xb4\x08\xfb\xfa\x1a\xff5\x01\xc3\x00\xde\x02U\xff\xa0\xfd\xf6\x02X\xfe\xfd\xf9\xe2\x05\xb7\x022\xf8\xd3\x03\x9e\x03)\xfb\x9d\x03>\x02\x12\xfb/\xfdI\x04\xd4\xff\x94\xfc\xa9\x01-\x06\xe0\xf9K\xfcF\x0b\xc7\xfd\xff\xf4e\x04\xba\x05\xb5\xf8\x9e\xff\x0c\tE\xfd\xa8\xfb\x86\x03r\x01-\xfa\xd1\xff\xc1\x04a\xff\x14\xfd0\x04\xeb\x03$\xf8\x1b\x04E\xfc*\x00Q\x04r\xfc\xd1\x00X\x02\xcf\x00\x9e\xfc\x03\x01k\x00d\xff~\xff\xe2\xff\xeb\x03c\xff\xe0\xfb-\x04\x9a\xff\xe1\xfc\xa0\xff\xe3\x05&\xfdA\xfb$\x08\x12\xff\'\xfa\x8a\x00x\x07\x9d\xf8\x92\xfe\xd0\x07\x9d\xfb\x8c\xff\xcc\x03\x91\xfd\xb5\xf9\xf9\x06\x08\xfd\xa5\xfa}\x059\x03\x95\xfb\x15\xfe>\x03\x90\x00\xc7\xfct\xfe\xaa\x06\xf3\xfc_\xfe\x94\x04\x7f\x00\xd5\xf9;\x04\xda\x03:\xf9\x95\xfe[\x08\xf1\xfb\x05\xfb\x05\x03\xcb\x03\xad\xfd\xce\xf93\x04\x0e\x02/\xfe\'\xfdx\x06F\xfb$\x00\xb1\x01!\xf8\x95\x04V\x06\x95\xfb\xb4\xfa\xca\x05\x94\x00\x97\xfb\xf0\x00\x84\x04\'\xfd\xdc\xfb\xe9\x05[\x03\xf2\xf8\xd6\xfe\x9a\x07G\xfdJ\xfb\xb5\x04e\xfev\x01\x80\x00\xc9\xfc\xdb\x015\x04\xbd\xfc%\xf9\xbb\x06\xda\x00\x85\xff\x17\xfc\xa0\x03g\xfd\xe6\x01\xea\x04\x15\xf5\xb3\x02\xa9\x03}\x00\x7f\xfaR\x03\xe4\x05\x9c\xf8\x14\xfe\xed\x07z\xfa\x07\xfb\xd5\x07\xd5\x00\x08\xfc\x07\xfc/\x05\xc8\x04\xf6\xf9R\xfa\xb6\x08\x90\xfe6\xf9\x01\x02\xeb\x08\x1c\xfc\xb3\xf3\xdc\x0bi\x05?\xf5~\xff~\tn\xfai\xf7\xb4\nJ\x02\xa4\xf9\x97\x03\x02\x010\xfc\xd8\xfdJ\x03y\x04i\xf9\xe8\xfcV\x0c,\xfb\xab\xf6\r\x0c\xe3\x01M\xf2\'\x05D\x06\x8f\xfap\x00,\x02\x03\x04u\xf8\xc0\x00\xbd\x03\xf1\xfc\x1a\xfef\x01\xd7\x04\xa6\xfa\xc5\x02\xc6\x001\xfd\xd4\xfd\xec\x03M\xfe\xee\xfcx\x04\xb7\x03q\xf8\xb8\xfer\x0bi\xf5\xc8\xfd\x0b\x07[\x01\xb7\xf7V\xff\xe3\x0c\xfb\xf9\xec\xfa~\x02\x19\x03\x94\xfa\xab\xfei\x08-\xfbj\xfc\xb4\x02\x04\x06\xcd\xfb\xaa\xf9k\x08\x88\xfe\x87\xf6W\x05p\x08\x8a\xf8`\xfd\xd9\x07J\xfd\xa1\xfc\x1f\x01\x98\x00\xf9\xfe\xcc\xfe\xde\x00s\x04\xff\xfd\xc5\xfeg\x03#\xfb\xf5\xfe\xe8\x03\xb5\xfc9\x01#\x01\xf7\xff\x99\x03\x94\xfa\xf4\xff]\x05\x12\x00\xbd\xf6a\x03\xe6\x07\xc8\xf9\x87\x00\xfc\x01\xfb\x02\xa6\xfc\xa9\xfb\x9c\x07\xc3\xfd\x94\xf9\xb0\x05\x7f\x05\x15\xf9q\xfc\x8e\t\xaa\xfd\xd2\xf7\x9d\x06\xa0\x02\x17\xf7\xb8\x01~\x08\xf1\xfb8\xfa|\x03R\x05\xa2\xfaQ\xfc9\x06\x8e\x00\x15\xfa\x87\x01{\x02<\x00\x19\xff~\x01\x0c\xfdJ\x03\xf4\xfc\x9b\xff\xb3\x04H\xfc\xa3\x03\x8b\x00$\xfe\xca\xfbm\x01!\x06\xda\xfb\xc3\xfb\x89\x05\x00\x00\xb0\xfb\xd4\xff\xf2\x03d\xfb\xd3\xfb\xdb\x08\x84\xffd\xfa\xcd\x03K\x054\xf7F\xfd\x97\tx\x01e\xfa\xd1\xff\x12\x06l\xfd\xc4\xfc\xaf\x02\x87\x01Y\xfc\xad\x00t\x02\xe3\xff\x88\xffv\xfeL\x01\xf5\xfe\xe9\xfe\xe3\xffG\x03\xa4\xfc\x05\x00I\x03u\xfe\xb2\xfd\x91\xffr\x02\xc1\xfcB\xfd\xde\x04H\x03\xc8\xfaL\x00\xf2\x03-\xf9\xd6\x01\xe6\x06\xbd\xf9Z\xfd\x08\x05\xe1\x02\x9d\xf8\xfd\x02\x03\x05\xf3\xf9;\xfc\xca\x08\x1d\x00\xb5\xf6_\x06\x85\x06O\xf8\x00\xfb\xfe\t\xee\xfd\x9b\xf9\xe4\x03\xc6\x04\xc0\xf9[\xffC\x06\xa6\xfd\xd8\xfa\x81\x02F\x04\xed\xfa\xec\x00\xd9\x02!\xfe\x02\x00\xc7\xfeF\x00^\x017\xfdK\x02\x98\x00p\xfc9\x00\x14\x04\xa4\xff\xf7\xfbX\xff\xb5\x04\x0b\x00\r\xf8a\x05\xb2\x04\xea\xf8n\xfd\xb0\x07\xc3\x00\xd3\xf8i\x02\x9c\x04"\xfb\xcd\xfc\x98\x06q\x00*\xf9w\x03\xa9\x05\x9e\xf8z\xfe\x85\x05J\x00\xa5\xfa\xa0\x00\xd4\x03\xf0\xfd\x93\xfe\xf8\x01l\x00\x8f\xfcg\x00~\x03\x8a\xfdC\xfd\x08\x05\xdc\xfen\xfb\x17\x02\x8b\x05P\xfb3\xfc^\x07{\xfe\xb0\xfa\x90\x02\xb5\x04\x01\xfdX\xfcs\x04\xa7\x01\n\xfcL\x00`\x01\x06\x01\xc6\xfdD\xff\xd6\x04\x02\xfd\xd0\xfeO\x01\x80\x00\r\xff\x9e\xff\x9e\x00\xe8\xff\xd5\x00\xfb\xfd\t\x020\x00\x8c\xfd\xc0\xfe\x0f\x04\xbd\xfe\x19\xfc\xfa\x02\xfe\x03\x02\xfd\xf3\xfa\x9a\x06\xdf\xff\x1e\xfa\xb7\x00.\x05>\xfeb\xfe8\x01K\x00\xa9\xff\x8a\xfeX\x00\xbb\x00}\xffB\x00:\xff\xad\xff\xfe\x02D\x00#\xfcM\x00Q\x03\x15\xfcX\x00\xbb\x03N\xfd\x84\xfd}\x03\xfd\x03H\xfbe\xfc&\x05\xa5\xff\xeb\xfb\x95\x01v\x03\xc5\xfe\x8c\xff\x02\x01\xaf\xfe\x03\x00\x0e\x01\xa5\xff\x98\xfd\xbd\x01\xd0\x03/\xfe\x1f\xfe\xb2\x02\x08\xfe\xce\xffS\x00\x96\x00c\xffY\x00\x80\x01\x16\xff\xe6\xffg\x00\xb2\x00\x17\xfeh\x00\xfb\x01:\xff\xaa\xfd\x0e\x03)\x01\xea\xfc\xec\xff\xfe\x03\xe3\xfc\x82\xfe\x92\x05\x84\xfd\xce\xfb\x0e\x03l\x05u\xfa\x85\xfd\xe2\x05\x8f\xff\xf8\xfb\x0f\x01\xb0\x02\x02\xfc\xe1\xfe\x08\x02\x8f\xff~\xfeI\x01!\x01\x9e\xfd\'\xfe\r\x03\xa9\x01\xaf\xfc\x82\xff\x10\x04\xa1\x00\xf4\xfc\xf4\x01A\x01\xd3\xfeY\xff \x01\x1b\x00\x9c\xfe\xc1\xff\x00\x01\xd0\xfe\xa8\xfe"\x03\xfd\xfdP\xfd`\x01\xa3\x02\xbe\xfe3\xfd\xda\x011\x01\xda\xfc\xdc\xfe\xaf\x02\xbd\x01\x0c\xfe\xb3\xfdQ\x01n\x03.\xfdc\xfe\xb4\x02\x7f\x00\xa6\xfe\xda\x00\x8f\x02~\xfd\x16\xfe#\x02\x98\x02`\xfdh\xfe\x8c\x02r\x01\x11\xfc\x18\x01%\x03\x80\xfd\xa6\xfeN\x00\xef\x01e\xfe.\x01\x18\x00\xfc\xfde\x00\x8c\x01\xc6\xff\x9c\xfe\xdb\x00\xd1\xff\x89\xff\x01\x01\x0b\x01\x81\xff\xfc\xff\x93\xfe1\x00\'\x01\x03\xffA\x01\x1d\x00\xc2\xfe\x03\xff\x03\x01\x9e\x01-\xfe}\xfe\x90\x00\xe9\xfe\x8e\x00\xee\x01\x8e\xff\xd3\xfd\x89\xffg\x01\xec\xffE\x00\xea\xff(\xff,\xff\xa4\x006\x01\xa9\xff\xb3\xff\xcc\x01\xc8\xfd7\xfe\xed\x02+\x00\xc0\xfe\x7f\x00\xf3\xff@\xff\xc5\x01\xd5\xff\xe0\xff\xba\xff)\xff2\x00>\x00\\\x01c\xff\xfb\xffA\x00Y\xfe\x9e\x00\x8f\x01\xba\xfe\xa7\xfe\xe1\x00\xf7\x00\x9d\xffX\xff\xe7\x00\x8a\xff\xd6\xfe\x12\x00\xaf\x00f\x00g\x00f\xff,\xfe\xdf\x00\x89\x01:\xff\x9f\xfe\xde\xffF\x01A\xff\x02\x00\xdb\x00\x94\xff\xad\xfem\xff\x14\x01\xc8\xff\xc0\xff\xd3\x00\x81\xff\xbd\xfe`\x00-\x01s\xff\x8d\xfe\xe5\xff\x91\x00/\x00\x16\x00C\x00-\x00\xe2\xfen\xff\xb7\x00\xb8\xff\xb4\xff\x82\x00\x98\xff\x08\x00U\x00\xcb\xff\x11\x00I\x00V\xff\xfc\xfe\xc1\xffe\x01{\x00\x95\xff\xe7\xff\xdb\xff1\x00\x0e\x00\xd1\x00\xc3\xffb\xff\xde\x00r\x00B\xff\x80\x00\xd1\x01T\xff\xa0\xfe\xae\x00\xf9\x00p\xff\x03\x00\xba\x00\xd4\xfe\xef\xffz\x01T\xff\xb8\xfe\xfb\x00"\x00\xf1\xfeA\x00\x12\x00\xde\xff+\x00Y\x00i\xff*\xff\x0b\x01\x95\x00\xab\xfe\x91\xff\x0c\x01W\x00\x0e\xfe\x8d\x00T\x021\xff\xfb\xfe\x9b\x00S\x00l\xff\xfe\xff\xd4\x00s\x00\xfc\xff\x01\x00\xbe\xff\xec\xff\x87\xff\xd9\xff<\x00i\xff\x00\x00\x7f\xff\x93\x00\xf2\xff\xad\xfe}\x00Y\x00\xd2\xfe\x96\x00E\x01d\xff\xe1\xff\x0e\x01\x1d\x00\x92\xff\x85\x00\x86\x00\xcd\xff\x98\xff\\\x00\xbb\x00Y\xff\x87\xffB\x01J\x00\x9a\xfe\xbd\xff\x19\x01\n\x00\xc0\xfe\xa5\xff\x96\x01?\x00\xa6\xfeU\x00\x95\x00\x06\xff\xca\xff\xad\x00\x91\xff&\x00a\x00\x00\x00 \x00y\x00\x83\x00\x96\xffb\xff\x89\x00^\x00}\x00W\x00\xe8\xfe\x11\x00\xaf\x00\xb0\xff\xb5\xff%\x00\x01\x00\xcb\xffp\x00#\x00L\xff\x10\x00\x9b\x00\xb9\xffv\xff\xf3\xff\xc1\x00\x07\x00\xeb\xff1\x00\xa1\xff\xd7\xff\xd3\x00\xb6\xffL\xff\xa7\x00\xd1\x00\xdd\xff\xb2\xfe\xd4\x00\xd4\x00x\xfe/\xff\xb5\x00\xd1\x00(\xffQ\xff\xc1\x00\x00\x00n\xfe2\x00\xff\x00~\xff\x84\xfe\xb8\x00\x82\x01\x19\xff\xf0\xfe[\x00\xdd\x00\x86\xff(\xff\xfc\x00\x95\x00\xe6\xfe\n\x00\xfe\x00\x14\x00e\xfe\xfd\xff\xcc\x01\xa4\xff}\xfe\x91\x00A\x01\x80\xff\xba\xfej\x00\xe3\x00\xf3\xfe\xd7\xff_\x00\xc3\xff\xfb\xff\x00\x00\xe7\xff\xc3\xff\xae\xff\xff\xff\x08\x00\xf6\xff\x1f\x00\x89\xff\xf2\xff\x9e\x00\x19\x00A\xff\x84\xff\xbc\x00_\x00\xac\xff\x8d\xffM\x00m\x00\xf8\xff\xab\xff\xca\xffX\x00\xc7\xff\xbe\xff\x99\x00\xce\xff\\\xff\x84\x00_\x00<\xff\xa4\xffX\x00\xb6\xffS\xffA\x00;\x00`\xff\xbf\xffq\x00\xec\xffZ\xff\r\x00o\x00\x86\xff\xb4\xff{\x00a\x00\xc7\xff\xff\xff`\x00\xaa\xff\x08\x00\x8a\x00q\xff\x8c\xff\xa5\x00M\x00P\xff\xb3\xff\x95\x00\xed\xff2\xff\x02\x00\xe1\xff\x91\xff\xbb\xff\x99\x00\xf6\xffA\xffj\x00p\x00^\xff\xa4\xff\xdc\x00s\x00k\xff\xce\xff\xc3\x00o\x00\x7f\xff\r\x00\xaa\x003\x00\xb3\xff8\x00h\x00\xfe\xff\xd8\xff\xf7\xff\xf1\xff\xd4\xff\x13\x00-\x00\xf9\xff|\xff\xd5\xffg\x00\xf1\xff\x9b\xff\xbe\xff2\x00\xfb\xff\xd6\xffT\x00B\x00\xb2\xff\x87\xff\xf1\xffa\x00.\x00\xf5\xff\x02\x00\xfe\xff\x14\x00\x1d\x00\x19\x00\x14\x00\xff\xff\xd5\xff\xc1\xff=\x00G\x00\x16\x00\xc5\xff\x9f\xff\xe1\xff\'\x00\x0f\x00\xc2\xff\xba\xff\xdd\xff\x06\x00\x19\x00\t\x00\xd2\xff\x0b\x00\xec\xff\x01\x00-\x00(\x00N\x00\x0b\x00\x0e\x00\x0c\x00^\x00V\x00\x00\x00\xe0\xff\x16\x00N\x006\x00\x02\x00\xff\xff\x13\x00\xf9\xff\xda\xff#\x00;\x00\xfe\xff\xc2\xff\x06\x00 \x00\t\x00*\x00\xe8\xff\x00\x00=\x00\xf4\xff\xdc\xff>\x00M\x00\xf5\xff\r\x00h\x00R\x00\xcd\xff\'\x00\\\x00\xd5\xff\xed\xffB\x00&\x00\xd6\xff\xfe\xff+\x00\xe0\xff\xb2\xff\x10\x00(\x00\xc7\xff\xeb\xff\x0b\x00\xfc\xff\xef\xff\t\x00\x11\x00\xdd\xff\xd3\xff\x16\x000\x00\xcf\xff\xeb\xff5\x00\xea\xff\xf6\xff\x08\x00\xde\xff\xf4\xff\x17\x00\r\x00\xe0\xff\x06\x00!\x00\xe5\xff\xd8\xff\xff\xff\xd1\xff\xbf\xff\xf1\xff\xff\xff\xee\xff\xe8\xff\n\x00\xe1\xff\xf1\xff\xf7\xff\xf6\xff\xda\xff\x12\x00 \x00\xee\xff\xf6\xff:\x00!\x00\xb7\xff\x12\x00\x0f\x00\xf6\xff\x16\x00\r\x00\x1a\x00\x08\x00\x01\x00\x06\x00\x0e\x00\x17\x00\xdd\xff\xdf\xff\'\x00\x05\x00\xf2\xff\xf2\xff\xf9\xff\xea\xff\xe6\xff\x0e\x00\xe2\xff\xcb\xff\x00\x00\x1e\x00\xdc\xff\xdd\xff\x0b\x00\xfa\xff\xee\xff\xdd\xff\xff\xff\xeb\xff\xe9\xff\x05\x00\xdc\xff\xef\xff\x02\x00\xe5\xff\xdf\xff\xf9\xff\x13\x00\xe7\xff\xe1\xff\xd8\xff\t\x00\x11\x00\xcb\xff\xe7\xff\x04\x00\xde\xff\xc4\xff\xf6\xff\x04\x00\xe0\xff\xdf\xff\x1e\x00\xf2\xff\xcb\xff\x1d\x00A\x00\xba\xff\xbe\xff*\x00\x04\x00\xee\xff\xff\xff\x01\x00\xf5\xff\xf1\xff\xfc\xff\x0e\x00\x1b\x00\xf0\xff\xcf\xff\t\x00(\x00\x1a\x00\xf8\xff\xf8\xff\xfa\xff\x05\x00\x0e\x00\xf3\xff\xe0\xff\x00\x00\xf5\xff\xd8\xff\x00\x00\x16\x00\x03\x00\xe7\xff\xd8\xff\xf6\xff\x03\x00\x08\x00\x0f\x00\x00\x00\xf7\xff\x1e\x00\t\x00\x1b\x00\r\x00\xe9\xff\xf7\xff\x0c\x00\x13\x00\x05\x00\x17\x00\xdb\xff\xc5\xff\xe5\xff\x11\x00\xfc\xff\xbb\xff\xeb\xff\t\x00\xe3\xff\xc2\xff\x1a\x00\x1f\x00\xb3\xff\xb9\xff\x04\x00\x16\x00\xdd\xff\xe9\xff\xef\xff\xe1\xff\xf3\xff\xf8\xff\xf6\xff\xfc\xff\xf6\xff\xf0\xff\xf3\xff\x00\x00 \x00\x11\x00\xf5\xff\xf8\xff\x11\x00\r\x00\x16\x00\x0f\x00\x14\x00\xec\xff\x1e\x00)\x00\xf3\xff\x05\x00\xfd\xff\x00\x00\xed\xff\xfb\xff\x13\x00\t\x00\x03\x00\x08\x00 \x00 \x00\xf2\xff\x03\x00$\x00\xf4\xff\x07\x00!\x00\t\x00\xe6\xff!\x006\x00\xec\xff\xf9\xff#\x00\x17\x00\x01\x00\x12\x003\x00"\x00\xf7\xff\x1a\x00<\x00\'\x00\n\x00\n\x00+\x00+\x00\x14\x00\x18\x00=\x00\x12\x00\xf3\xff-\x004\x00\xfc\xff\x10\x00\x1f\x00\xeb\xff\xf4\xff\x14\x00\x16\x00\xe9\xff\xe7\xff\x00\x00\xe5\xff\xf3\xff\x08\x00\xf0\xff\xdb\xff\x01\x00\x19\x00\xf2\xff\xef\xff\x0f\x00\x08\x00\xed\xff\x12\x00\x0e\x00\xf2\xff\x18\x00"\x00\xf4\xff\xef\xff:\x00\x11\x00\xf5\xff\x0f\x00\x08\x00\x15\x00\x04\x00\x07\x00\x02\x00\x07\x00\x11\x00\xff\xff\xf7\xff\xff\xff\x05\x00\xf9\xff\xe2\xff\xfa\xff+\x00\x1a\x00\xf3\xff\xf3\xff/\x00\x1a\x00\xde\xff\xed\xff0\x00\x10\x00\xf5\xff\xf5\xff\x15\x00\x00\x00\xc5\xff\n\x00\xf6\xff\xe4\xff\xf7\xff\xeb\xff\xed\xff\xf3\xff\xf8\xff\xe4\xff\xec\xff\xf8\xff\xe5\xff\xda\xff\x05\x00\x00\x00\xf3\xff\xea\xff\xf7\xff\xfc\xff\xec\xff\x01\x00\xf6\xff\xe6\xff\xe9\xff\xfe\xff\x10\x00\xfa\xff\xef\xff\xf8\xff\x01\x00\xf0\xff\xe1\xff\xff\xff\xf9\xff\xe6\xff\xf3\xff\xf3\xff\xe2\xff\xe3\xff\xf6\xff\xf5\xff\xe4\xff\xea\xff\xf6\xff\xf0\xff\x00\x00\xf8\xff\xf7\xff\x02\x00\xff\xff\xf9\xff\xff\xff\x10\x00\x00\x00\xee\xff\xea\xff\x14\x00\x0e\x00\xe4\xff\r\x00\x13\x00\xd1\xff\xd9\xff\x15\x00\xf6\xff\xd3\xff\xf0\xff\x01\x00\xee\xff\xea\xff\xf6\xff\xf5\xff\xf1\xff\xde\xff\xe7\xff\n\x00\x00\x00\x02\x00\xf0\xff\xf0\xff\x05\x00\x12\x00\xfe\xff\xe4\xff\xfa\xff\x11\x00\xf8\xff\xe9\xff\xfc\xff\r\x00\xe9\xff\xe4\xff\xec\xff\xea\xff\xe0\xff\xf4\xff\x02\x00\xd1\xff\xcf\xff\x00\x00\xff\xff\xd1\xff\xdf\xff\xf6\xff\xe5\xff\xdd\xff\xf1\xff\x0b\x00\x06\x00\xe1\xff\xe3\xff\xff\xff\x18\x00\x08\x00\xf4\xff\r\x00\x1e\x00\n\x00\xfa\xff\x19\x00\x18\x00\x00\x00\xf9\xff\x0c\x00\x19\x00\x08\x00\x01\x00\x01\x00\xfc\xff\xf5\xff\xfd\xff\t\x00\xfa\xff\xeb\xff\xeb\xff\xf5\xff\xf3\xff\xf0\xff\xf0\xff\xe4\xff\xdc\xff\xf2\xff\xf3\xff\xee\xff\x06\x00\x00\x00\xe1\xff\xf5\xff\x12\x00\xf7\xff\xed\xff\x01\x00\x0b\x00\xef\xff\xf7\xff\x17\x00\x05\x00\xf8\xff\xf9\xff\x10\x00\x0c\x00\xfd\xff\xff\xff\x0f\x00\x0e\x00\xf8\xff\x06\x00\x0e\x00\xff\xff\x0f\x00\x15\x00\x0b\x00\r\x00\x15\x00\n\x00\t\x00\x16\x00\x16\x00\x0e\x00\x06\x00\x13\x00\x18\x00\x11\x00\x0f\x00\x0f\x00\x12\x00\x16\x00\x16\x00\x1a\x00\x1b\x00\r\x00\x1d\x00"\x00\x17\x00\x15\x00$\x00\x17\x00\x05\x00\x1b\x00\x19\x00\x13\x00\x17\x00\x12\x00\x10\x00\x10\x00\x1b\x00\x1d\x00\x02\x00\xfc\xff\x1c\x00\x18\x00\x04\x00\x0c\x00\x0e\x00\xfe\xff\n\x00&\x00\xff\xff\xf3\xff\x1a\x00\x0c\x00\xe5\xff\x07\x00\x16\x00\xe5\xff\xf5\xff\r\x00\xee\xff\xf1\xff\x10\x00\xf9\xff\xf2\xff\t\x00\x11\x00\xf5\xff\xfa\xff\x0b\x00\xf7\xff\xf8\xff\xfc\xff\x03\x00\x05\x00\x03\x00\xfc\xff\x00\x00\x13\x00\x01\x00\xf4\xff\xf7\xff\x03\x00\xfc\xff\xff\xff\xfc\xff\xff\xff\xf0\xff\xff\xff\x11\x00\xf4\xff\xee\xff\x05\x00\xfe\xff\xee\xff\x04\x00\x01\x00\xf0\xff\xf6\xff\x07\x00\xf8\xff\xed\xff\x00\x00\xf7\xff\xed\xff\xfe\xff\xff\xff\xf2\xff\xf9\xff\t\x00\x06\x00\xed\xff\xfd\xff\x06\x00\xf5\xff\xf8\xff\x00\x00\xf7\xff\xf8\xff\xf3\xff\xf3\xff\x00\x00\xf6\xff\xe9\xff\xfe\xff\xfb\xff\xe4\xff\xe9\xff\x00\x00\xf7\xff\xe8\xff\xf0\xff\x04\x00\xf8\xff\xe9\xff\xfd\xff\xf7\xff\xf3\xff\xf2\xff\x04\x00\xfa\xff\xf9\xff\xff\xff\xfb\xff\xf6\xff\x03\x00\x02\x00\xf1\xff\xf9\xff\xfc\xff\xed\xff\xe8\xff\xf4\xff\xe1\xff\xee\xff\xef\xff\xe3\xff\xe7\xff\xef\xff\xe1\xff\xe3\xff\xf5\xff\xea\xff\xe4\xff\xf1\xff\xf3\xff\xe8\xff\xe9\xff\xf2\xff\xee\xff\xf0\xff\xf9\xff\xee\xff\xec\xff\xfe\xff\xf0\xff\xee\xff\x01\x00\xfc\xff\xe8\xff\xf4\xff\x05\x00\xf0\xff\xe6\xff\x04\x00\x05\x00\xf6\xff\xf5\xff\n\x00\xfc\xff\xf1\xff\x04\x00\xfd\xff\xeb\xff\xf9\xff\x02\x00\xf3\xff\xf9\xff\x00\x00\xeb\xff\xed\xff\t\x00\xfc\xff\xe4\xff\xff\xff\x10\x00\xea\xff\xeb\xff\x11\x00\xfc\xff\xe2\xff\x02\x00\x12\x00\xf3\xff\xf7\xff\x0b\x00\x00\x00\xf7\xff\x07\x00\x01\x00\xfe\xff\x05\x00\x04\x00\x00\x00\xfe\xff\x04\x00\x07\x00\xfb\xff\x02\x00\x08\x00\x00\x00\x01\x00\x01\x00\x04\x00\xfd\xff\xfc\xff\x04\x00\xff\xff\xfb\xff\x04\x00\x04\x00\xfc\xff\xfc\xff\x07\x00\xf9\xff\xfc\xff\x11\x00\x01\x00\x00\x00\r\x00\x0c\x00\xfc\xff\xfa\xff\x07\x00\x06\x00\xfd\xff\x00\x00\x04\x00\x04\x00\xf7\xff\x02\x00\x05\x00\xf5\xff\xf7\xff\x07\x00\x04\x00\xf6\xff\x00\x00\x10\x00\x0c\x00\x00\x00\n\x00\x11\x00\x00\x00\xff\xff\x1a\x00\x0e\x00\x04\x00\x1a\x00\x0c\x00\x08\x00\x12\x00\x10\x00\t\x00\x0b\x00\x0e\x00\x0f\x00\n\x00\x0c\x00\x0f\x00\x0c\x00\x0c\x00\x13\x00\n\x00\t\x00\x17\x00\x12\x00\x0c\x00\x13\x00\x15\x00\x08\x00\r\x00\x17\x00\x05\x00\x04\x00\x14\x00\x07\x00\xff\xff\x10\x00\x11\x00\x02\x00\r\x00\x1b\x00\x01\x00\xfd\xff\x17\x00\x13\x00\xfe\xff\x07\x00\x11\x00\x04\x00\x01\x00\x0b\x00\x06\x00\xfa\xff\x04\x00\x05\x00\xfe\xff\xf8\xff\x05\x00\x0b\x00\x02\x00\xf8\xff\xfc\xff\x13\x00\x02\x00\xf7\xff\x02\x00\x03\x00\xf7\xff\xff\xff\x00\x00\xf6\xff\xf7\xff\x02\x00\xff\xff\xf6\xff\xfb\xff\xfe\xff\xf7\xff\xf4\xff\x00\x00\xfb\xff\xf2\xff\x00\x00\x03\x00\xfc\xff\xff\xff\x03\x00\xfc\xff\xf9\xff\x04\x00\x00\x00\xfc\xff\x01\x00\x06\x00\x05\x00\xfc\xff\x00\x00\x05\x00\xfd\xff\xff\xff\x08\x00\xfd\xff\xf9\xff\x00\x00\x03\x00\xf7\xff\xf0\xff\xfb\xff\xfd\xff\xf4\xff\xf0\xff\xf2\xff\xf4\xff\xf1\xff\xf0\xff\xf7\xff\xf2\xff\xf1\xff\xfb\xff\xfd\xff\xf4\xff\xf9\xff\x00\x00\x01\x00\xf7\xff\xfc\xff\x07\x00\xf7\xff\xf0\xff\x05\x00\x03\x00\xef\xff\xfc\xff\xff\xff\xef\xff\xf2\xff\xfa\xff\xf1\xff\xf0\xff\xf6\xff\xfb\xff\xf5\xff\xf2\xff\xf7\xff\xf6\xff\xf3\xff\xf6\xff\xf6\xff\xf7\xff\xf9\xff\xf5\xff\xed\xff\xf3\xff\xf3\xff\xf6\xff\xf4\xff\xf4\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\xf7\xff\xef\xff\xf6\xff\xfa\xff\xee\xff\xef\xff\xfb\xff\xf9\xff\xf2\xff\xfa\xff\xfd\xff\xf5\xff\xf3\xff\xf7\xff\xf8\xff\xef\xff\xf8\xff\x00\x00\xf9\xff\xf2\xff\xf7\xff\xfd\xff\xf5\xff\xf8\xff\xfc\xff\xfa\xff\xf4\xff\xfd\xff\x01\x00\xf1\xff\xf5\xff\x01\x00\xfb\xff\xf2\xff\xfd\xff\xff\xff\xf3\xff\xee\xff\xfe\xff\xfd\xff\xf1\xff\xfd\xff\xff\xff\xf6\xff\xf8\xff\x00\x00\xf5\xff\xf4\xff\x00\x00\x04\x00\xf9\xff\xfb\xff\x07\x00\x00\x00\xf9\xff\x02\x00\n\x00\xfc\xff\xff\xff\x0b\x00\t\x00\xfc\xff\x07\x00\x0f\x00\xfc\xff\xf9\xff\x08\x00\t\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfd\xff\x07\x00\x02\x00\xfb\xff\x05\x00\n\x00\x01\x00\xfd\xff\x03\x00\x0b\x00\t\x00\x04\x00\n\x00\n\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\xff\xff\xfc\xff\xfa\xff\xff\xff\x06\x00\x03\x00\x00\x00\x00\x00\xfc\xff\xfa\xff\xfb\xff\xfd\xff\xf5\xff\xf8\xff\x01\x00\xff\xff\xff\xff\x00\x00\x03\x00\x02\x00\x00\x00\x02\x00\x06\x00\x04\x00\x06\x00\n\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x0c\x00\x13\x00\x17\x00\x14\x00\x0e\x00\x17\x00\x13\x00\n\x00\x11\x00\x0e\x00\x06\x00\x06\x00\x11\x00\x0c\x00\x0b\x00\x13\x00\x12\x00\x08\x00\n\x00\x12\x00\x0c\x00\x06\x00\x05\x00\t\x00\x07\x00\n\x00\x06\x00\x00\x00\t\x00\x05\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x02\x00\x00\x00\x00\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf8\xff\xfd\xff\xfb\xff\xf9\xff\xfc\xff\xfc\xff\xf4\xff\xf1\xff\xfb\xff\xfd\xff\xf7\xff\xfc\xff\x00\x00\xf7\xff\xfb\xff\xff\xff\xfb\xff\xf9\xff\xff\xff\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfa\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xfd\xff\xf9\xff\xf8\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\xfc\xff\xf7\xff\x05\x00\x01\x00\xfd\xff\x03\x00\x03\x00\xfd\xff\xfd\xff\x00\x00\xfb\xff\xf8\xff\xfd\xff\xfe\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xf5\xff\xf2\xff\xf5\xff\xf9\xff\xf4\xff\xf5\xff\xf6\xff\xf3\xff\xfa\xff\xf3\xff\xf5\xff\xf7\xff\xf3\xff\xf2\xff\xf5\xff\xf2\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf6\xff\xf6\xff\xf0\xff\xf4\xff\xf7\xff\xf5\xff\xf3\xff\xfa\xff\xfb\xff\xf4\xff\xf7\xff\xf8\xff\xf8\xff\xf7\xff\xf8\xff\xfd\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf6\xff\xf2\xff\xf3\xff\xf5\xff\xf7\xff\xf9\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\xf9\xff\xfa\xff\xfe\xff\xfe\xff\xfc\xff\xff\xff\xfe\xff\xfa\xff\xfc\xff\xfb\xff\xfb\xff\xf9\xff\xfc\xff\xfa\xff\xf6\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\xfa\xff\xf9\xff\xfc\xff\xfd\xff\xfe\xff\xfa\xff\xfb\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfa\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x03\x00\x01\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x01\x00\x03\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x04\x00\x04\x00\x03\x00\x06\x00\x05\x00\x06\x00\x04\x00\x07\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x06\x00\x03\x00\x00\x00\x01\x00\x04\x00\x04\x00\x04\x00\x08\x00\x07\x00\x06\x00\x07\x00\x04\x00\x03\x00\x06\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\x0b\x00\n\x00\x0c\x00\r\x00\x0c\x00\x0b\x00\x0b\x00\x0c\x00\x07\x00\x05\x00\x07\x00\x06\x00\x03\x00\x04\x00\x04\x00\x02\x00\x03\x00\x03\x00\x06\x00\x03\x00\x00\x00\x01\x00\x07\x00\x07\x00\x05\x00\x06\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x01\x00\x03\x00\x04\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xff\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfb\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfa\xff\xf9\xff\xfa\xff\xfa\xff\xfc\xff\xff\xff\x00\x00\xff\xff\xfc\xff\xfd\xff\xfd\xff\xfa\xff\xf5\xff\xf3\xff\xf8\xff\xf7\xff\xf6\xff\xf6\xff\xf5\xff\xf4\xff\xf6\xff\xf6\xff\xf4\xff\xf4\xff\xf7\xff\xf8\xff\xf7\xff\xfa\xff\xf6\xff\xf3\xff\xf3\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xf7\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf9\xff\xf9\xff\xf9\xff\xfa\xff\xf8\xff\xf9\xff\xfb\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xf8\xff\xfa\xff\xfb\xff\xfa\xff\xf6\xff\xf4\xff\xf3\xff\xf6\xff\xf6\xff\xf8\xff\xf8\xff\xf5\xff\xf7\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xfe\xff\xf9\xff\xfc\xff\xfa\xff\xf9\xff\xf8\xff\xf5\xff\xf4\xff\xf4\xff\xf1\xff\xf2\xff\xf4\xff\xf4\xff\xf5\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf8\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\x00\x00\x00\x00\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xfe\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x01\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x07\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x05\x00\x02\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x05\x00\x03\x00\x05\x00\x04\x00\x06\x00\x08\x00\x08\x00\t\x00\x08\x00\x06\x00\x05\x00\x05\x00\x03\x00\x05\x00\x07\x00\x08\x00\x06\x00\t\x00\x07\x00\x05\x00\x06\x00\x04\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xff\xff\x01\x00\x01\x00\x04\x00\x05\x00\x04\x00\x03\x00\x05\x00\x05\x00\x05\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x04\x00\x07\x00\x05\x00\x04\x00\x05\x00\x04\x00\x02\x00\x03\x00\x05\x00\x05\x00\x03\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xfd\xff\xff\xff\xfd\xff\xf9\xff\xfc\xff\xfd\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xfb\xff\xfa\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xf9\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfa\xff\xf8\xff\xf6\xff\xf6\xff\xf5\xff\xf8\xff\xf7\xff\xf4\xff\xf7\xff\xf5\xff\xf8\xff\xfb\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xf3\xff\xf0\xff\xf1\xff\xf1\xff\xf1\xff\xf3\xff\xef\xff\xee\xff\xf1\xff\xf3\xff\xf3\xff\xf1\xff\xf2\xff\xf3\xff\xf3\xff\xf4\xff\xf6\xff\xf4\xff\xf7\xff\xf5\xff\xf6\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf6\xff\xf6\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x00\xb5\x00\x9d\x00/\x00\x16\x01F\x01\xcb\x00#\x01\xc3\x00p\x00\xe7\x00\x9f\x01\x87\x01\t\x01\xd3\x00\xa0\x01`\x00\xc2\xfe9\x00\xae\x01\x9f\x00\xc5\xff\xfe\xff\x85\xff\xea\xfe\xf3\x00;\x00o\xfdQ\xfd\xd3\x00U\x01\xdb\xfeh\x00:\x02\x9d\xffb\xfe\t\x00<\x00\xbb\xffk\x02\r\x01\xd9\xfe5\x00\x7f\x01\x9f\x00\xa5\xfe`\xfe\xcc\xfd\x91\xfe\x9e\xff6\x00\x83\xfed\xfe\x13\xfe\x9c\xfd\xa4\xfe}\xff\xf6\xfe\xb4\xfd\xe9\xff\xff\x00\xd6\xff\xc9\x00L\x01#\x00\xf0\xff\xa4\x00\x16\x02\xfd\x01N\x02\xba\x02\xf1\x01\x9c\x01\xa7\x01,\x02\x9c\x01\x86\x01w\x01\x9f\x01\xca\x01\\\x01\xe4\x00\x9f\x00\xcc\xffh\xff\xa8\xff\xc0\xff\xc3\xffE\xff\xae\xfeE\xfe$\xfe \xfeT\xfe|\xfe!\xfe\x99\xfe\x83\xfe\x16\xff\xbe\xfe\xaf\xfd+\xff\xa9\xff\r\xff\x81\xff\xde\xff\xf9\xfe4\xff\x06\x00a\xffF\xff\x85\xff@\x00J\x00u\x00\xae\xff\xb0\xfe\x9e\xff\x9d\xffP\xff\x82\x00\x1c\x00\xbc\x00 \x01P\xff\x88\xfe\xeb\xfd\xc4\xff\xe3\xffB\x00\xd7\x01\x92\xffr\xfe\x08\x00$\xff&\xfe"\xffe\xffT\xfe\xc5\x00\xbd\x02\x13\x00\xb1\xff7\xff\xcf\xfe\x17\xff\xd9\xff~\x020\x026\xffS\x01\xd0\x01\xaf\xffE\xff\xa9\x00\x8d\x02\xaf\x00#\x00\xe3\xffa\x02n\x02\xdc\x01\xe4\x01\xbf\xff\xc1\xfe\xcc\xfew\xffM\x00#\x02\x82\x02.\xff}\xfc\x13\xff\x03\x01\xbc\xfe\x0b\xfcq\xfb\xc3\xff\xa8\x02\xb2\xfd\xa9\x00\xc1\x00\xd8\xfb\x0e\xfc\xad\xff\x0e\xff\x9b\xfb\xcb\x00\xcc\x01\x82\x00u\x00\xc2\x00F\xff9\xfb\xa2\xfd\xe0\x01\xca\x01\xb8\x00\x8f\x02\xb7\x04\xa7\xff+\xfe\xe0\x01\xfc\x00\xbf\xfc\x80\x00R\x04G\x03|\x01\xa6\x01u\x02\x0f\xfe\xc6\xfe\x1f\x01\x08\xfe\xe4\x00>\x04\x18\x03\x93\xff\xc2\xfeA\x01R\xfd\xdd\xfd=\x003\xff!\x00k\x026\x00\xc1\xff\xe3\x01\x1e\xfco\xfc\xe2\x00"\x01u\x02s\x03*\x01\x17\x00u\xfc\xe9\xfb\xbe\x02\xbb\x02\\\xfe\xfd\x00l\x029\xfe\xd9\xfcl\x00x\xffs\xfe\xb8\x02\xa2\xff\x93\xfd\xb9\x04\xaa\x03\x9c\xfcc\xfe_\xfe2\xfe\x93\x02\xc2\x01\xdf\x01\x0f\xfek\xfb\xf5\xfe\x13\x01\xc3\x00\xf9\xfb\xff\xfe\x8e\x00\x06\xfe\x12\x03\x8a\x02s\x00\xcb\x01\xc8\x00a\xfe>\xff\x86\x02=\x08\xee\x04r\xfd\x9f\xfb\xc0\xfb\xe4\xfd\xde\x01\x16\x05\xa9\x00\x92\xfc\xc4\xfa;\xfc\xda\x01\t\x01`\x01\x16\xfd<\xfd\xbc\x024\x01\xf5\xfe\x00\x02\xad\xfe\x84\xf8C\xfd\x80\x03;\x03%\x02\xbd\x03\x06\xfch\xf9\x00\x03U\x05\xe5\x00\x1a\x02P\x03E\x01C\x02\x1b\xff\x8f\xff\x8b\xfd\x08\xfd\xe1\xff/\xfcg\xffA\x03\xf2\xff\x9a\xfa\xc2\xfa\xee\xfa\x1f\xfbR\x02\xb3\x06D\xfe\x11\xfc\xec\x00\x03\x01H\x04R\x01i\xfd\x8c\xfdl\x03\xdd\x03/\x04!\xff\x9a\xfe\x83\x03\xd3\x01\xce\x04@\x01\x15\xfa\xae\xfb\xef\x01\x12\x03\xc2\x02\x9f\x02\xb0\x02\xe7\xfbg\xfb\x15\xff\x1e\xffu\xff`\x005\x01_\xfec\x01\x92\x03X\x04\xc9\xfd\x8b\xfdN\xff\x04\xfc\x05\x02\xfe\x04\xf6\xff\x7f\x00B\xfe\x05\xfe\xa1\xff\x0f\xfa\xb3\xfd&\xff6\xfc\x02\x02Y\x04J\x02\x9c\xfe\x1c\xfdf\xff~\x01\xbf\x00\xff\xff\x98\x00\xc6\x03d\x06\xce\x00\x89\xff\x1d\xfe$\xfc\xa0\x01\n\x07\xc2\x01\xf5\xf8\xf7\xfd\x03\xfe{\xfd\xaa\x02\xc2\x01\x84\xf8\xaa\xf4\xca\xfb\xba\xfc\xac\xfe\xca\x02\x00\xff\x9b\xfb\x9e\xfd\xcc\x00\x92\x01\xaa\xfd\x14\x00\xf8\x04\xb1\x03\xe7\x02_\x05\xe2\x007\xfb\x10\x04l\x03?\xfd\xb1\xfe\xd6\x03C\x05\xb0\x02\xc3\xfc\xeb\xfaf\xfe(\xfeU\x02\x86\x03\x86\xfd\xc6\xf7V\xfcV\xfe`\x03\x11\n\x1b\xfc\xdd\xf28\xf9\xe2\x01$\x02\xe4\x05\xc5\x01$\xfaA\xfa4\x06\xba\x06)\xfe\xf4\xfb8\xfd\x1b\x04\xda\x04^\x05\xd3\x03I\x03t\x03`\x03M\x04Y\r\x00\x03\xa8\xf4\xdd\xfft\x00:\xfd\xdf\x00\xa1\x01`\x05f\xfc@\xf3{\xf0\xc2\xf3\x9f\x00\x99\x03\x15\x05\xfd\xfb{\xf4\xfe\xf9\xbf\x03\xc7\n-\x07\xe8\x01#\xf8\xe9\xff\x88\x0e\xfa\x053\x03\n\x03\x93\x05\x7f\x03\x8b\xfc\x03\x02\x89\x01m\xfdF\x00-\xfd\xed\xfbX\xfd\xe2\xff\xad\x05f\x008\xf2\xa5\xed\x15\xfa#\x08\'\tm\xfd\xd4\xf9\xbd\xfe1\xff\x7f\x046\x05\xf3\xf9\x15\xf7]\x01\xe0\x0bT\n\xc4\x03\x9d\x011\xfe\x89\xfa\x96\x02u\n\xea\x01\xe1\xfac\xfa"\x05\x90\x04\xa9\x00K\xfe\x98\xf4\xa6\xf4\xb2\xfa\xde\x01f\x02\xed\xff)\xff\x06\xfe4\xff:\xff(\xff^\xfe@\x01x\x06\t\xffB\x01R\x04\xb8\x03\xde\x03J\xfc\xd4\xfb\xf2\x00\x05\x06\xc8\x02d\x06\x10\x03\x9d\xfa,\x00\x89\x06M\x00\xbd\xf6\x1f\xfd\xdc\x02G\x02\xc5\xfbI\xf6\\\xfd*\x03\x95\xfe^\xfa\x0b\xf9)\xfcl\x07\x82\x08v\x03\x87\xfeq\xff\x88\xff\xf3\xfdh\t\x87\x0bz\x00d\xfd\xe4\xff\x16\x02\xfe\x03M\x03\x7f\x01\xff\xfa\x85\xf4\xdf\xfc\x93\x05\xd8\x00"\xfb\x10\xfb\xa4\xfd\xda\xfd\xd4\xfeF\xfa\x00\xf8M\xff=\x04\xda\x05\x8c\x01\xe8\xfd\xed\xfb\xf9\x01^\x05{\x04S\x00\xfc\xfb\x15\x02\x9e\x03\xde\x01g\x03A\xfe\x9c\xfb\xb4\x00,\x00\xa6\xffp\xfe\xc7\xfb\xe7\xfd\xd3\xfe\xdc\x00\xba\x00[\xfd\xde\xfeG\xffF\xfc\xe8\x00\xb8\x02\xa0\x01L\x02n\xff\xf9\xfd\xd4\x00#\x02\xdf\x05\x80\x01\x8e\xfd\x17\x00Q\xff\xc1\x03\xe7\x03\xa5\xfe\x04\xfdo\xfec\xfc\x1a\x00R\x00\xab\xfe\x9b\xfc\xb1\xf8\xe5\xfdh\x01K\x02\x92\xfe\xb5\xfa\xff\xfce\xfe\x8c\x02n\x0by\x04\xd7\xfc0\x01\xa3\xff,\x003\t\xa6\x05\x14\xffU\x00J\xfd\n\x00y\x04\x04\xfe\xc5\xfb,\xfa+\xfe\\\x03\xe8\x00\xf1\xfbh\xfa5\xfdb\xfdU\x00d\x01\x1d\x00\x91\xfa\x8b\xfcG\x02\x16\x02\xf1\xffT\xfc*\x02\xec\x03m\x00\xb9\xfe\xe8\xfd"\x01\xc5\x08\xdc\x05\xdf\xf8\x11\xfb\xfb\x03s\x05\xcb\x04\xe0\xfdy\xf8\xd3\xfa5\x07\x86\x08m\x00H\xfc\xa6\xf9\xbb\xffB\x06\x00\x04d\xfe=\xfe\xc5\xfb\xb0\xfe&\x06\x81\x04\x94\xfe\xbb\xfb"\xfc\xdc\xff\xdf\x00\xba\xff\x82\x00\x0b\xfd\xdf\xfc\xc2\xfe\x91\x00\x9e\x01\x99\xff\x89\xfe\x90\xfcL\xffh\x03\xad\x03\x9f\x02b\xfd\xc4\xfdi\x02\\\x02\xd3\x00O\x01\xce\xfe\xb3\xfc\xd5\x01c\x02a\xffh\xfe\x86\xfc\xa5\xfd+\xffC\x05e\x02\x85\xfa,\xfbE\xff\xb2\x01%\x04E\x03\xb1\xfd\xe8\xfb\xf2\xff\xf5\x04*\x03\xb0\xfe\x94\xf9\xab\xfe\x86\x05\x9d\x01\xec\xff\xea\xfc\xe4\xfdN\x03r\x02\x8b\x00\x0b\xfe8\x00\x98\x02\x92\x01\xf3\xffX\xfdn\xfff\x01\x90\x01)\xfc\xd9\xfc\x91\x02\x0e\xff\xdc\xfc4\xff@\x00\x06\xff~\xfe\xdd\xff\xf3\xff1\x03$\x02\xcc\xfd\xe5\xfcJ\x03\xcd\x04\xac\xff\xcc\xfd\xde\xffh\x02D\x00\xb2\x03\xdb\x029\xfc\x18\xfbV\x02i\x05\xe8\xfdQ\xfc\xa7\xfeM\xff\xe4\xff\x91\xff\x18\xffr\xfer\xfe\xc6\x00_\x00\xc9\xff&\x01\x1c\x01>\x00t\xff\xc2\xfeP\x02s\x02@\x01\xf0\xff\x01\xfe\xa6\x01\x04\x03\x07\x01\xf2\xfc\x8a\xfcG\x00\x1c\x04\x1f\x02\xda\xfdg\xfct\xfeF\x04\xde\x03\\\xfbo\xf9\xab\xfe\x1b\x01\xfa\x03\xf1\x01\xd1\xfdq\xfb\xe8\xfc\xd1\x01\r\x03\xa1\xffz\xff\x1b\x00H\xff\x8c\x03\x83\x05\x87\x01\n\x00\xe2\xfe\xea\xfcB\x03O\x05R\x01;\xfe\x9f\xfc\x88\xfeC\x01f\xfe\xbe\xfd\x8f\xfd\xe1\xfc \xffO\x00\x93\xff\xf3\xfd2\xfe\xd2\xfe\xc6\x01\x99\xff\x81\xfe!\x00\xf3\x02\x12\x02\x01\x017\x01\x19\xfeV\x01\x9c\x02R\x02\x06\x01\xaf\xfd\xa4\xfe\xe7\x00f\x02\xf2\xfe\x11\xfd\x1c\xfck\xfd\xcb\x04\xac\xfe^\xfa\xa7\xfdk\xfc\x85\x02B\x05\x83\xfe\x98\xfbw\xfd\x03\x01w\x05\xbb\x05Z\x00\xef\xfc\x9e\xfdM\x01\x17\x07k\x03\x12\xfc\x9a\xf96\x00\x15\x07\xff\x05\x86\xfe\xa7\xf7%\xfa|\xff+\x05t\x05u\xfd\x9e\xf7\xb4\xf9f\x03\xe0\x07D\xfe<\xf8\x1a\xf9\x16\x01\x99\x04k\x02\xf8\xfd\x80\xf8\xd5\x00\xb2\x03n\x01\x1a\x00\xe7\xff\xf4\xff\x87\x03\x11\x03\xcc\xff\x86\x02\x1e\x031\x03\xdd\x00&\x00Y\x01\xb2\x02y\x01$\xfc.\xfe\'\x01Y\x00_\xfe\x16\xfd\x9f\xfe\x04\xfd%\xfb}\xfb\xd8\xff\x99\xff5\xfb"\xfd\x96\xfd\xd7\xfd\xd6\xff\xc8\x03\xdb\xfd\xc6\xfa\xb1\x03\xdb\x03\x01\x08\x17\x05\x06\xfc\x98\x02\xf1\x03\xd4\x05a\x08<\x027\xffx\xffS\x02A\x03\x80\x00 \xfa\xf9\xf8\x05\xfc\xeb\x00\x81\xff\xe6\xfa%\xf9\xbd\xf8\xe3\xfb\xd5\xfe\x0f\x00\x82\xfc\xbf\xfeH\x01s\x03\x0f\x05\x1b\x03\xd1\x00\x10\x03q\x02\xe4\x03\xba\x08\x9c\x02\xa8\xff\xbb\xff?\x01\xa6\x05\x05\x05;\xfa\xb7\xf9\xd3\x00\x94\x00\x9f\xff\x97\x00\xe8\xfbT\xf9\x87\xff\x89\x00\x87\xff\x1e\xfce\xfc\x86\xfeT\xfe\x9b\x01\x15\x02o\xff\x11\xfc3\xfc*\x03G\x06P\x04#\x01\x93\xff\xb0\xff\t\x04s\tt\x06D\xfb\xcf\xfb\xf5\x05\x89\x04%\x02q\xfft\xfa\xa2\xfa\xd5\x00\x9d\xffa\xfdu\xfa\xc6\xfa6\xff0\xfe\xb8\x00\xdf\xfd\x1f\xf99\xfd\xf6\x06\x13\x06\xfa\xfc\x91\xfcm\xfe\xb7\x01\x1b\x08G\x03\xcb\xfa\x1d\xfe\xcb\x02\xef\x05\xa7\x03\xef\xfde\xfde\xfe\xda\x00\xd3\x02s\x02\xf5\x00\xba\xfc\x13\xfe\xc5\x00\xe5\xff\xf1\xffd\xfe\\\xfc\xab\xfe-\x00#\x01J\x01\xba\xfc\xcc\xfc>\xff\x1c\x01H\x00\xba\xfe\x87\xfe\xa8\x01x\x01\x88\x00\xfd\x04\xac\x01\x1b\xfd/\x00\x8b\x02t\x02\xdf\x01\xbc\x00\xc6\x01\x15\x02Y\x01\xcb\xff\\\x00\x05\xff\x91\xfc\xeb\xfck\x02\xdf\x03\x85\x01\xe5\xfd\xf3\xfb\x96\xfd\xe4\xff(\x03\x02\xff\x7f\xfc\xdb\xff\xcf\x03\xe6\x01\xef\xff\xe8\xfd\xa0\xfb\x1d\xfe\xb1\xfd&\x03\x8e\x02\xf8\xfd\x95\xfb\xed\xfb\x90\x02x\x03\xfe\xfc\x87\xf7v\xfc\xe8\x02\xae\x05`\x00\xa8\xfaC\xfd\xc7\xfe\xd6\xff\x12\xfd\xca\xfd\xcd\xfe\x7f\xff\xc5\x00\x01\x00\xe3\x00\xb2\xfe\x9b\xfbA\xfb\xfc\x02\x19\x04\xe1\x01\xe5\x01`\xffK\x03*\x04\x92\xff\x1d\xfd\xce\xff\xa8\x01\x18\x02t\x02/\xfft\xff\xe4\x009\xfe\\\xffY\xfeH\xfe\xc7\x00\xe7\x00L\x00\xd9\xfe\xc6\x00\xe5\xfe{\x00\xd3\xff\xa8\xfd5\x01\xdc\x00\xe8\x00\x98\x02\x93\xff\xea\xfc\xd3\x02\xc2\x02\xc6\x00\xea\xff\xdb\xff\x13\x01\x00\x01\x15\x03y\x01T\xffc\x00e\x04\xbb\x03\xd7\x00\x88\xffB\xfe\xfb\x00\x98\x02\x18\x04\x10\x02z\xfe\xb3\x00\xa9\x01W\xffe\xfe\xf4\xff\x9c\xfe\xa2\xfe\xb9\x01\xa4\x00\x1b\x01\x1d\x00\x02\xfd\x92\xfb\xb8\xfb\xb5\xff\x19\x01\xc2\xffh\x00\x92\xff\x89\xff\xe1\xfe.\xfd\x93\xfbj\xfbu\xfe\xc4\x00b\x02\x80\x02.\xff\x82\xfcd\xfbP\xfb\xe4\xfc\xf8\xfd\xe3\x01x\x01\xfb\xfe\x1f\x00K\xff\xa9\xfc\x81\xfai\xfa\x10\xfc\x0c\x01!\x02\xb2\x01\xd0\xfe \xfb]\xfb\x8f\xfc\xf4\xfdG\xfc8\xfa\xe3\xfb\x0e\x01\\\x02\xbe\xfe\xaa\xfa{\xf6\xbc\xf8{\xfc\r\xffB\xfe.\xfb^\xfbx\xfc"\xfe\x08\xfd\x0f\xfbz\xf9\x1a\xfbz\xff\x1f\x01\xa2\x027\x016\xfd\xda\xfd\xe2\x02P\x04\x00\x02F\x04\x9d\x06\xca\x05\x08\t&\x08\xe3\x06\x1e\x06\x14\x05t\t\xfd\x0b\x82\t\x12\t\xd6\x07c\x06e\nz\x0by\x08G\x07\xfa\n\xae\x0c\xa4\x0cM\x0e\xa9\x10\xd1\x11"\x11&\x12\xdc\x14L\x17\x95\x15\x8d\x15\x85\x14\x05\x15\n\x17\r\x14"\x10\x12\r\x16\n.\x07\xf7\x04\x95\x02\x1c\xfe\xe7\xf9\xfe\xf6\n\xf4\x82\xf0\xf0\xec\xed\xe8B\xe7Y\xe7v\xe6\x07\xe5 \xe5\xa1\xe4\xb2\xe3\xa1\xe3\x08\xe5\xa6\xe6\xbb\xe8y\xea3\xecq\xef\\\xf0\xbb\xef.\xf1\xf1\xf2r\xf5\xbf\xf6U\xf6\xa5\xf8\xe7\xf9\xc9\xf7*\xf6l\xf7\xe3\xf6\x92\xf4_\xf4\xd6\xf4b\xf4\xe8\xf2}\xf2\xa3\xefQ\xee\x96\xee\xb1\xf2\xe5\xf3<\xf0\xff\xee\xa5\xf0\x11\xf7\x1c\xfd\xe5\xfb\x1f\xf9\xb3\xfc\xc1\x01\xcf\x05C\x08\xda\x08o\t\x18\n\xa8\x0e+\x12\x8d\x12;\x11\xe5\x0e\xe3\x10\x8a\x14\xd4\x16\x93\x15\xb6\x11\x88\x0f\xaa\x12B\x1e\xdc&\xac$C\x1dF\x1c\xc5!\x00)z/\xa21\x920H.n0\x894\x891\x16&\xe6\x1c\xcf\x1d~"\xf7!X\x19\xb4\x0b\x99\xfeK\xf6q\xf3\xee\xf2\x81\xed\n\xe3\xe5\xdae\xd9?\xd8\xb4\xd2O\xcd\x0c\xca\x1c\xcaY\xceA\xd4\xf8\xd7\xa4\xd7\t\xd7\x8a\xda\xd7\xe1;\xea\x1e\xf15\xf3\xf0\xf6U\xff\xa5\x04\xff\x06\x93\tr\n\x02\r=\x10\xbc\x13\x80\x15_\x11\x88\n\xa2\x07\x8c\x08\n\x08/\x03y\xfc\x08\xf8\xf6\xf4\x8d\xf1\xb3\xee\xc7\xea\x11\xe6%\xe2\xe5\xe1\xe8\xe4#\xe5\x05\xe2\x90\xe0\xf4\xe3r\xe9\x94\xeb\xbf\xec\xc1\xefC\xf2/\xf5\x93\xfa4\xff!\x01Z\xff\xd3\xff\xf4\x04w\x08\x1b\t8\x07g\x05\x97\x06\x14\x07\x81\x07\xa6\x07\xfa\x041\x03\x9d\x01\x84\x01\x10\x03\xfe\x00\x10\x01\x90\xffk\xff\t\xff\xee\xffv\x05\xf1\x03\x00\x02<\x01,\x022\t\x9e\x12\xc2\x1dz\x1f\x89\x18\x07\x19\xd4#O-\xd91\xf84\xf57f:2:H:\xfa7O0\x18*\x97*\xca-\xa1,\xf7 \x16\x11\t\x07\xe5\x00s\xfeg\xfc\xdc\xf5\xd1\xecP\xe43\xdd\xe6\xda#\xd9;\xd4\xce\xd0\xb6\xd1\xd6\xd5\xce\xd6\xcb\xd4"\xd4n\xd5\\\xda\xd8\xe1\xde\xe8\x90\xecE\xef\'\xf1\x19\xf3y\xfa\xae\x00%\x03\'\x07\x11\x0b\x03\r5\x0b=\n&\x0b\x1c\x0c\xd5\x0bv\x0c\x1b\x0b\x97\x05\xa7\xfe\x80\xfa\x9d\xf9R\xf8K\xf5\x13\xf29\xef\xfd\xea\xae\xe6\xf6\xe4\x01\xe6o\xe6;\xe6\xe6\xe6\xd6\xe8`\xe93\xe8\xb3\xe8T\xec\xf2\xf0m\xf3V\xf5\xc6\xf6\xb3\xf7(\xf8\xbb\xf9N\xfb\xe9\xfd\xfe\xffJ\x03\xbf\x04T\x04\xd3\x00\xf8\xfe\x97\x03d\x06\xd0\x08l\t\xbb\x07,\x07\xd8\x02\xfe\x02*\x08\xfa\t\xd5\x08J\x081\x0c\t\rh\x0b=\x0c\xfd\x13\x04\x1f\xda"\xd0"\xa9!\n$\xf6)\xa71 8\x0f<\x8c:b4\xfa1>2C1\xb9-X\'S$* \x1b\x17\xce\r\x12\x03\xaa\xf9\x8d\xf4\xaa\xf1s\xef\x12\xe8\xcb\xdc\x9e\xd3\xf2\xcf\x8a\xd1\xcb\xd2\xc0\xd3\xe1\xd15\xcfr\xcf\xc0\xd1\x81\xd6~\xdb\xe2\xe1\xda\xe6\xc1\xe9t\xee\x93\xf3\xf4\xf4\xc5\xf8\x91\x00i\x07\xe5\x0br\x0c5\x0bj\n?\tf\x0c8\x11|\x11g\r\x03\x07^\x02\xc8\x00\x91\xfee\xfd\xdb\xfb\xfe\xf6#\xf2\'\xef\x86\xec\xb9\xe9\xaa\xe6\xfd\xe6\xa5\xea\x0c\xeb\xa8\xe8\xc1\xe5s\xe6c\xe9L\xed\x02\xf1\xc4\xf1\x13\xf2<\xf2\xe8\xf3\xf5\xf6K\xf9\xad\xf9Q\xfbY\xfd\x9f\xfd\xef\xfdG\xff\xbb\x00\n\x01X\x01.\x03\xcc\x03T\x04q\x04\xd7\x03\x06\x04\x93\x03\\\x07)\x08\x9f\x03\x9e\x01\xa1\x02\xad\x06.\x0b\xe2\rM\x0fe\x0b\xb3\t\xbc\x12\xc5\x1d\xb8$\'$|#\x97\'D,\xc90\xaf5\xa07\x9a6\x884\xb13\xe33A/\xc6)i&\x15#\x91\x1e\x90\x16\x9f\x0cy\x04\xb9\xfe\x8c\xfa\xb2\xf6\xbf\xef\xa9\xe5\x85\xde\xfe\xda\xea\xd9\xc5\xd8y\xd5w\xd2F\xcf)\xcf\xce\xd2U\xd6\x1a\xd8\x8d\xd9\x16\xdc\xa8\xdf\x1b\xe4\xb4\xe8\xef\xecg\xf2\x8b\xf7\xfe\xfa\xdf\xfd\x00\x00\x16\x03\x93\x07y\n\xbb\x0c6\r\x12\x0c\xc3\ng\tu\t\x14\n\xaa\x08\x1c\x05\x07\x01\xb6\xfc\xc1\xf9\xa8\xf9n\xf9\x93\xf6s\xf2\x80\xee|\xee\xc7\xee\xeb\xedP\xee\xd2\xed-\xed<\xed\xeb\xed\x83\xf0\x85\xf1T\xf1w\xf2y\xf3f\xf4\x94\xf5\xdc\xf6$\xf8\xfa\xf9J\xfa\'\xfaI\xfbw\xfd\xb9\xfe\x9c\xff\x17\x01Q\x02\x99\x01\xe1\x01X\x04?\x06\x02\x07\x9a\x05\xbd\x07D\t\x89\t\xfb\x0b|\r\xaf\r\xdc\x0el\x15\x9c\x1c/\x1d\x9a\x1a\xa2\x1c\x8a#\xec((-\xc40=0\xa1.\xbe,\xf7.\xf41k0\xc0,)\'\x02#K\x1e\x8c\x18\x8f\x14v\x0f\xb3\x08Y\x01\xe1\xfa\xff\xf4\x8e\xef\xc9\xea\xce\xe6\xbf\xe2\xc4\xde}\xdb\xd4\xd8&\xd7\x99\xd6\x9f\xd8V\xda\x1d\xdb\xea\xdb5\xdd\xc1\xe0@\xe5\x9d\xe9\xa7\xec\x90\xef[\xf3|\xf6\xbb\xf9P\xfdb\x00\xd8\x02x\x03\x7f\x04\x93\x06\x90\x07\xfe\x06%\x06\xbe\x059\x05\xa5\x03\xc8\x015\x00_\xfd\x85\xfa\xca\xf9(\xf9\x87\xf6\xa2\xf2\x00\xf0\xb5\xf0\x8a\xf1u\xf0M\xef\xbd\xed\xd7\xec>\xee)\xf1a\xf3\x98\xf1\x17\xf0\xe0\xf1_\xf4\x94\xf5\xdd\xf5=\xf7\x92\xf7}\xf7n\xf8\xcd\xf9i\xf9B\xfb\xfd\xfe\x1b\xff\xbb\xfc\xfb\xfa=\xfe\x14\x03\xfd\x04\xf2\x02\x14\x00\xde\xff\xeb\x02\x98\x07q\n~\tT\x06t\x05[\t \x11\xd9\x17\xfd\x19}\x18\x8c\x15\xfb\x17\xab!@-L3\xdf,>(\xe0)20[6-7?3\x0b,h&\xa6$\xd0%M#M\x1b\x1a\x12[\x0c\x9f\x06\xa8\xff\\\xfa\x0f\xf7\xf1\xf1d\xea\xf6\xe3\x1b\xe0\x87\xdc\x92\xd9\xb3\xd9\x19\xda\xe1\xd7-\xd3\x1f\xd2\n\xd6\xe6\xdas\xde\xa0\xdfQ\xdf+\xe0\xf9\xe3A\xeb\xb7\xf1\xb6\xf3\x06\xf4\xf2\xf5\xa5\xf8\xe4\xfc2\x02\x8a\x05m\x04\x94\x02v\x04\x8d\x07\xea\x07\xd3\x06c\x06\x8c\x05\xd7\x03\x0e\x027\x01}\xff\xc0\xfc\x93\xfb\xb5\xfb\xa1\xf9b\xf5S\xf3\x17\xf4\xe3\xf4\xb2\xf3\xc2\xf1\x88\xf1w\xf0\xf2\xef\x11\xf2k\xf3\xb3\xf2B\xf1\xf9\xf1l\xf4W\xf5\xde\xf4\xef\xf5&\xf7\xee\xf8\x98\xfa\xbc\xfah\xfbF\xfc0\xfe\xbb\x00\xb6\x01\xd2\x01q\x01\xe8\x01R\x05\'\x07X\x07\xc6\x06<\x05\x88\x06\xb9\x0b\x80\x127\x13T\x0e\x02\r^\x15\xcc\x1f\xb3#\x02"p G#1)\x910\x9b3\x050\x0b+\xfc*\xa0/O1\x91-\x9b%\xd6\x1f\xb9\x1cL\x1a\xce\x16\xaf\x10\xac\x08\xa0\x01\x10\xfd\x0c\xf9\x00\xf4\xa2\xedr\xe95\xe6\xe7\xe2\xd1\xdfL\xdd\xc6\xda3\xd9\xa9\xda\x12\xddw\xdcF\xda\xa1\xdbj\xdf\xb2\xe3\xaf\xe6\xa3\xe8\xfc\xe8\xfb\xea\xe8\xef\x8f\xf5\xed\xf8^\xf9\x0b\xfam\xfc\x90\xff\x0e\x03\xc2\x04\xe3\x03\xdd\x02\t\x04\xcb\x052\x05T\x03V\x02\xdb\x01\x08\x00\x83\xfe#\xfe\x85\xfcG\xf9{\xf7\x1c\xf8\x9c\xf7\xa6\xf4W\xf28\xf2\x99\xf2M\xf2\xae\xf1\x9e\xf1\x86\xf0\xe5\xef\x8b\xf1|\xf3&\xf3&\xf2\x9e\xf3\x9e\xf5"\xf6\xff\xf6\xac\xf8\x84\xfa\xbe\xfa\x1b\xfc\x1b\xff\x1c\x00d\x00\xc0\x026\x05b\x05z\x05\x88\x06\x95\x08S\nS\x0c\x10\rB\x0b\xac\nX\rP\x13\x9c\x18\x83\x19\xe5\x16\xeb\x15\x1c\x1a\xa6"\x1a)b*\xe3\']%y&@+\x120d0s*\xca#\x8c \xc7 ^ \x08\x1c\xbf\x15\xeb\r\x16\x07\x1a\x03\x9d\x00d\xfcD\xf5\x80\xee-\xeb\xbc\xe7\xde\xe3\xcc\xe0\xbd\xdep\xdd\x18\xdbo\xda;\xdbA\xdb]\xdb\x1e\xdd\xc4\xe0\x83\xe2t\xe2l\xe5\x9f\xe90\xed{\xef\xfc\xf1f\xf4g\xf6\xee\xf8\xcc\xfc\xa9\xffs\x00h\x00H\x01\x90\x02\n\x04/\x05 \x05A\x03\xde\x00\xb5\x00\xbe\x017\x01\x9a\xfe\x0e\xfc\x9e\xfa\xca\xf9\x16\xf9F\xf8\xa5\xf6\x18\xf4r\xf3~\xf4\xf6\xf3\x8d\xf1\xad\xf0\xe7\xf19\xf2f\xf1\x9a\xf1$\xf2\xd7\xf1\xc9\xf1\x84\xf3.\xf5\xb0\xf5<\xf6I\xf7\xf0\xf7\x8d\xf8\x1a\xfb4\xff\x12\x00\x0c\xfe\xe0\xfdt\x01\xfc\x05\xaa\x078\x07\xc5\x05\xb8\x04\xbe\x08\xae\x11\x9e\x15\x9d\x12-\x0e\x90\x11\x99\x1bp!\xc0%_&|$[$=*P3\x856\x9c2\x1c.=-3.Z/M.\xc0)\xaf!\x13\x1a\x16\x17\xc1\x14\xca\x0f\x02\x08\x9c\xff_\xf9\xd4\xf3\xac\xefY\xebZ\xe6#\xe1\xe7\xdc\xc2\xda\x10\xd9\x0e\xd8\xbf\xd6k\xd6\x88\xd7\xd2\xd8\xbb\xd9}\xdb\x81\xde\xf1\xe1\xcf\xe4\xc5\xe7~\xea\x86\xedz\xf0>\xf4!\xf8\xd8\xfa\x06\xfc?\xfd\xe9\xfe\xb6\x01\xee\x04\xb0\x06\x7f\x05,\x03M\x03\xb2\x05g\x07\xbf\x06\x05\x04\xe0\x00(\xffW\x00\xf4\x01\x15\x00\xfc\xfb\xda\xf8\x07\xf9\x07\xf9x\xf8\xf8\xf7L\xf6\xb8\xf3l\xf26\xf4\xb1\xf4\x7f\xf2\x17\xf1\x9f\xf2\x10\xf3\x1d\xf2\xc8\xf1\x08\xf3?\xf3\xac\xf3&\xf6}\xf7\x1e\xf6:\xf5o\xf8M\xfcS\xfe\x12\xff\x8c\xfe\\\xfef\xfe\xa0\x02\xb5\t\xea\x0b\xc9\x087\x05@\x05\x0c\x0c`\x17U\x1c\x1d\x18\xab\x10*\x13\x96\x1f\x03*5-\x0b)3&\xd4&\xcf-X5\x837\xc61{*y)\x14+W+\x1a\'\xa9 \x96\x19h\x13H\x0f9\x0b\xcb\x05g\xff\x12\xf97\xf3\xe7\xed\xc8\xe9\x92\xe5k\xe2d\xe0\x9f\xddu\xdaV\xd7\x93\xd7\x95\xd9\xc7\xdb\'\xdc8\xdbM\xdb \xdeE\xe3I\xe8\xf5\xea\x1d\xeb\xbb\xebd\xef\xa8\xf5F\xfby\xfc\xb2\xfb\xff\xfb\xe8\xff\xa3\x04\x9d\x06\xd2\x05\x94\x04{\x04;\x05F\x06\x87\x06\t\x05\xfe\x01\x17\x00:\x00e\xff\x94\xfc2\xfa\xf6\xf8\xc2\xf7!\xf6J\xf4\x9c\xf3C\xf2\x81\xf0\n\xf1\xa7\xf1\xf2\xef\xac\xedl\xee,\xf1\x10\xf1\x98\xef\xaf\xef$\xf1\xbc\xf1\x80\xf3\x10\xf6\xfa\xf5\x15\xf5\xb3\xf7\xb0\xfb\xca\xfd\x8a\xfd5\xfe\xb0\x00\xa0\x01F\x04\x1b\x06\x0c\x07\x9a\x08)\x08D\x06\xe5\x05\xac\x0bb\x144\x15B\x0e\xc2\tr\x0e\xa3\x1b\x15&\x8f&\xb5\x1e\x83\x19\xd8 \xbb/\x8f8;5]+]&-+c3\xaa5\xd3.0#X\x1b\xef\x1a\x8a\x1c>\x19\x96\x0f+\x06\x19\xff&\xfa6\xf7\xea\xf3\xcd\xee\x89\xe7R\xe2,\xe0\xed\xdd\xd8\xdb\x98\xda\xdc\xda<\xda$\xd8*\xd8B\xda\xe2\xddg\xe0\xb9\xe2\xc3\xe3\x19\xe4\xce\xe6\t\xed)\xf3\xf6\xf4\xe1\xf3[\xf4\x11\xf8<\xfe\xad\x02.\x03\x0f\x00=\xfe\x0f\x01T\x06V\x086\x05\x84\x00T\xfe\x1e\x00\x90\x02\x0c\x02\xe1\xfd\'\xf9\xc0\xf7\x1c\xf9\xaa\xf9\\\xf7\x9f\xf4\xef\xf2\xbe\xf1\xa6\xf1V\xf21\xf2\x15\xf0\xb7\xee\x08\xf0\xc3\xf0O\xf0\xf8\xef"\xf17\xf2\xf8\xf2\x1b\xf4\xa7\xf5\xae\xf6\xcb\xf7\xe1\xfa\x0f\xfd\xfa\xfe,\x01\xf4\x02\xb6\x04\x1f\x06\t\x08:\n\xfd\x0c\xe5\x0e\x7f\x0f\x99\x0f!\x10\x95\x11\xb6\x14<\x1a6\x1e\x85\x1d+\x19\xbe\x1a\x03#h+f.\x8d*(\'\x99&\x91+\xaa2\x064\xe1,\x7f#\x98\x1f\xb0"=$\x1f \x01\x17N\x0c\xea\x05F\x03\xdd\x03:\x00P\xf7\x83\xed\xc4\xe7\x85\xe6\x1a\xe6\x10\xe5l\xe1\x87\xdc2\xd8\xd5\xd8(\xdc\xcd\xdd"\xdd\x06\xdd-\xde\x10\xdf\xf5\xe0\x0e\xe6N\xeaE\xec\xce\xec9\xee\xf5\xf0\x1b\xf5\x8e\xfa:\xfeC\xfe\xff\xfc8\xfe_\x02D\x06<\x08\xde\x06\xd4\x03,\x02\x1a\x04\xb0\x06\xf8\x05|\x02 \xff\xa4\xfd\x96\xfcN\xfc6\xfcx\xfa\xb2\xf6\xd5\xf3k\xf4\xe9\xf4\xa5\xf3+\xf2\xb9\xf1\xc2\xf0C\xefN\xefd\xf1t\xf1-\xf0\x82\xf0o\xf1n\xf1\xf1\xf1\xc8\xf4\x16\xf7\xa0\xf6#\xf7\xa1\xf9\x1e\xfc\xde\xfcQ\xff\xe7\x02\x9e\x03h\x03\x93\x05l\t\xce\x0b\xe8\x0b\x18\x0cf\r\x81\x10:\x16\xac\x17s\x15X\x15\x84\x19\x98 |$\xd4%\x81#\x7f"L&\x7f-M1d-\x9b(\x84&Z(u*\x99(U#\xe9\x1aA\x15\x00\x14\xa2\x12\x07\x0e\xde\x05v\xfe\xda\xf7!\xf4\x1f\xf3\x9c\xf0\x10\xead\xe2\xc5\xdfP\xe0Y\xe0\x84\xdey\xdc\x9c\xda\x97\xda\xba\xdc\xff\xdf\xaf\xe1\xad\xe1t\xe2\x8b\xe5\xff\xe9[\xed\xb7\xee\x90\xf0q\xf3\xfe\xf6\xd8\xf9\x9f\xfb\x8e\xfca\xfe\x16\x01\xac\x02\xd1\x01\x14\x01\x02\x02\xe1\x02}\x02m\x01p\xff\x9d\xfc]\xfbs\xfck\xfc\xee\xf8\x03\xf5\xf1\xf3k\xf4:\xf4\x90\xf3~\xf2U\xf0j\xeeB\xef\xd4\xf1\xff\xf1A\xf0`\xefS\xf0\xf9\xf0<\xf2\x9b\xf4\x9b\xf5\x82\xf4.\xf5\xeb\xf7\xec\xfa\xf1\xfb\x14\xfd}\xff\xcb\xff\xfc\x00\xcf\x03\xc0\x06\xd1\x08\xc0\x06\x82\x06$\t\xc5\x0c@\x11\xa1\x0f=\x0c\xd8\x0b\xd0\x10\x93\x19\xd4\x1d\x11\x1b\xfc\x16v\x18}!\xe1+6.\xb3*\x07&\x88\'Y.75M5\xad-\xfd%\xfc#\xbb&$\'\xa1!\xf3\x17\x1b\x10\xa9\ne\x07\xcb\x04\xc6\xff\x99\xf7.\xef\x8a\xea\xa6\xe8\xe3\xe5\x83\xe23\xdf$\xdc\x16\xdae\xd8/\xd9\xb9\xda&\xdcc\xdc\xaa\xdc\x0f\xde\r\xe08\xe3;\xe8\xfa\xecV\xed\xcc\xecG\xef\xa3\xf4\x88\xfaI\xfd\xad\xfd\xc3\xfb\x83\xfc\xd2\x00[\x051\x06\xbc\x03#\x01g\x00(\x02F\x04^\x03[\xff\x18\xfc\xf6\xfb\x94\xfcu\xfb\x01\xf9\'\xf7\xfc\xf5\x01\xf5R\xf4\xfb\xf3\xa2\xf2\xe3\xf0\xec\xf0\xf2\xf1i\xf1\xb4\xef\x87\xef\x0f\xf1\xa0\xf1\xd7\xf1p\xf2w\xf3\xe6\xf3c\xf5\xfe\xf7\xa4\xf9N\xfa\x1f\xfc\x16\xff`\x01\x1b\x03\x12\x04\xc4\x05f\x07d\n\xd4\r\x99\x0e)\x0e\xcd\x0c3\x0f\x15\x16B\x1b\x95\x1b\x84\x177\x16\xc3\x1b\x8b$\xa3*\xc2)\xaf$K"|\'\x9a/~3\xeb.\xaf\'V#\xfc$\xb5(\xf7\'5!\x0b\x17\x1a\x10\xf7\r<\x0eP\x0b\xb6\x03\x06\xfa\xd4\xf2\x0e\xf05\xef\xbf\xedG\xe9\x17\xe3\xd0\xdd\xa2\xdc\x83\xde\x89\xdf\x8a\xde\n\xddC\xdc\xa5\xdc\x84\xdet\xe2\x88\xe5*\xe7\xe6\xe7@\xe9\x90\xeb\x15\xef\xce\xf3\xf8\xf7\x8e\xf9<\xf9\x89\xf9\x8f\xfc\x06\x01\xbf\x04\xdf\x045\x02\x10\x00E\x01\x1d\x04/\x05"\x03r\xffh\xfc\xcd\xfaq\xfbo\xfc\x98\xfa\x06\xf65\xf2\x93\xf1[\xf2\x7f\xf2P\xf1z\xeff\xed\xa4\xec\x83\xed\x1f\xef\x85\xef\x07\xef\xaf\xeeF\xefh\xf0g\xf2\x9a\xf4H\xf5o\xf5Y\xf7\xb2\xf9\xb8\xfb\xea\xfc\xc2\xfe)\x01\xce\x01\x94\x03a\x07:\tR\t\xb0\t\xe9\n\x8d\rb\x11\xbc\x16?\x17v\x13\x92\x13\xa1\x19y"\xb5&\xb6%\x86"\xe4!\xbf&h/\x8b3\xa9/\xf6(\xdc%H)\xc6,\xc9+\x83%\xb5\x1c\xec\x16\xe6\x14\xc1\x14\x10\x11g\t\xe6\x00\xe7\xf9r\xf6\xdf\xf4W\xf2\\\xec\xde\xe5$\xe2\xa9\xe0\xde\xdf\xf3\xdeS\xdeT\xdcp\xdb\xe8\xdb\x0e\xdev\xe0\x13\xe2\xd1\xe3\x9c\xe5M\xe8@\xeb\xaf\xedB\xf1\xbb\xf4Y\xf7\x83\xf8\x0c\xfa\x92\xfc\\\xffD\x01i\x02\x00\x02+\x01\x13\x01\r\x02\x89\x02N\x01\xbc\xfe\x7f\xfc\xad\xfbE\xfb8\xfa0\xf8\xfe\xf5\n\xf4J\xf3E\xf3\xdc\xf2\xc4\xf1\x0f\xf0\x92\xef3\xf0X\xf1B\xf12\xf0\xdd\xef\xff\xf0\x8d\xf2\xb8\xf3/\xf4.\xf43\xf4\xa8\xf5!\xf8\xbe\xfaU\xfbd\xfbF\xfc\xae\xfd\x19\x00\x03\x03\xa7\x05\xf7\x06\xe3\x05\x90\x04W\x07\xfb\x0c6\x126\x13*\x10F\x0e<\x11Q\x19\xfb!\x1e$\x96 \x89\x1c\x88 \xb2)\xd30\xb82\x80-\xf9)o)9.;2\x1f0\x88)B"g\x1f\xbe\x1e\x82\x1c\x80\x176\x11.\nr\x03\'\xff\xfe\xfb\xe0\xf7\xe4\xf1j\xec\xb0\xe8\xa5\xe4\x86\xe1\x8b\xdf\xa7\xde\xd2\xdd`\xdb\'\xdac\xda\x01\xdc\x0e\xde\xfd\xdf\xb5\xe1\x8f\xe2\xc1\xe3\xcf\xe7\x8f\xec0\xf0s\xf1\x95\xf2\x0f\xf5\xaa\xf8a\xfcD\xff\xce\xff~\xff\xd2\xff\xb5\x01\x9e\x03=\x04\xf7\x02\xdc\x00\xb0\xffM\xff\xe7\xfe\x97\xfd\xa0\xfb|\xf9[\xf7\xf1\xf5\xd0\xf4|\xf3)\xf28\xf1\xfc\xef/\xefw\xee"\xee\xc4\xee\xed\xee\x01\xef\xd1\xee\x1f\xef\x0e\xf1\xae\xf2\xb5\xf3R\xf4\x9f\xf5\xc6\xf6\xc1\xf8Z\xfb\xe7\xfd\x7f\xfe \xffL\x01U\x04\xa2\x06\xd1\x06\xc9\x08\x15\x0b\xdf\r\x1f\x0fp\x0f\xdf\x11c\x13\xe0\x18\xf1\x1d\x14\x1e\x1a\x1c\xc5\x1c\xcd#w+\x9a-\\+\xd1(O(=,\xd11\x1d3a-\xb1$\x18";$\n%\x84 1\x18\xb3\x10E\x0b\xf8\x07)\x06_\x02\xf9\xfa\xb1\xf2\xa5\xed\xa2\xeb\x81\xe9r\xe6\xf0\xe2\xd3\xdf\xc9\xdc\xf3\xda\xad\xdb~\xdd\x01\xde\xd0\xdc\xaa\xdd\x91\xdf\xcb\xe0\x06\xe3\xfd\xe7<\xec\xcf\xec\x90\xec~\xef\xbe\xf3\x0f\xf8\xae\xfbh\xfd\x8f\xfc\xf1\xfb\xd1\xfeQ\x03\xf8\x04\xa4\x03\xca\x00\xc4\xfe\x1a\xffF\x01\xc4\x01e\xfe\xe6\xf9@\xf7N\xf7f\xf7\\\xf6@\xf40\xf1\x9b\xee\xc4\xed\xaa\xee\x11\xef1\xee\xaf\xec$\xec\x11\xec4\xed\xa8\xee\x1f\xf0\xc3\xf0\x0c\xf1#\xf2\x92\xf3\x0b\xf5\xef\xf6\x0c\xf9V\xfa\xab\xfb\\\xfd\xfa\xfd\xf4\xfe\xe9\xff\xb2\x02\x99\x07\xc7\x089\x07c\x06.\x08\xdd\x0fk\x16J\x18z\x17\xc6\x152\x1b\xdd#\xa3+\xeb.\xf9+++e/\xc96\xef:\xad9K5q2\x012\xab2.1\t,o$.\x1eo\x1a\x7f\x16:\x10y\x08\x12\x02\xe6\xfb#\xf6\xae\xf0n\xec\x9f\xe6\x11\xe1r\xde\xfa\xdc\xfc\xd9\x8f\xd5\xd2\xd4Z\xd6_\xd7\x9d\xd6\x18\xd7\x0b\xd8I\xda\xc3\xdd\xac\xe2d\xe6\xc4\xe6\x86\xe8\xcd\xec\xe8\xf2\x0c\xf8|\xf9v\xfav\xfb\xa1\xff\x0c\x04x\x05O\x04\xca\x03\x98\x04\xc0\x04r\x04\n\x04\x17\x02\x98\xfe\xdd\xfc\xd9\xfc\x1f\xfb]\xf7\x92\xf4L\xf3$\xf2\xa6\xf0o\xefx\xed\xde\xeb\xbc\xeb\xb5\xec{\xed]\xecE\xec\xcc\xedk\xef\x88\xf0{\xf2c\xf4\x18\xf6\x8c\xf7W\xf9\xc5\xfc$\xfe\x08\xffF\x02n\x043\x06j\x06#\x072\t\x9d\x0b\xa0\x0e\xd2\x0f\xa2\x0e\x17\x0eS\x10\xf9\x15p\x1c[\x1e)\x1c\xff\x19\x00\x1e\xa5\'p.\x93/\x8e-y*h,\x801\xf66[7+0\x1b+\xa4(k)\x9c\'B#\x19\x1d]\x15#\x0f\x07\n\x83\x06\x83\x001\xf9\xd4\xf2N\xee\x03\xe9[\xe3\x12\xdf(\xdcf\xda\x1e\xd7q\xd5\n\xd5\xd7\xd3&\xd3\xf7\xd4C\xd9G\xdb\x08\xdb1\xde\xab\xe2R\xe6\xa3\xe9\x06\xee\x13\xf2\xef\xf4\xf6\xf6\xb2\xfa\xa5\xfe\x80\x01F\x03O\x04j\x04\xb8\x04\xc3\x05\xaf\x06C\x06\x07\x04g\x01\x9f\xff\xc2\xfe\x83\xfd`\xfb\x8b\xf8\xd4\xf5}\xf3\x0b\xf2w\xf1Y\xefh\xec\xa2\xeb\x1a\xecA\xeb\xff\xe9\x97\xe9\xdb\xe9A\xea\x7f\xeb\x10\xed\x15\xee:\xee4\xef\xb5\xf1\xf6\xf4N\xf7\x8f\xf8\xce\xf9\xc2\xfb_\xfd\x9f\x00\x83\x03n\x06\xe8\x08\xee\x08\xd1\x0b\x82\r2\x0fm\x13\xcf\x17-\x1d\n \xdb\x1e\x07\x1f!"/+\xa03\xd73\x0b2\x16/\x96/;4\xbf:?;\xcc4r-m)\x82*\x8f)\xcc%\x86\x1eD\x16\r\x0fV\t\xe3\x05_\x01z\xfa\x89\xf2\x9f\xec\x8c\xe8\x08\xe3\x8d\xde\xab\xdb\xd7\xd9\xce\xd8L\xd5w\xd3\x08\xd36\xd4\xc1\xd6\xa3\xd9]\xdc\xa1\xdd\xe4\xdd \xe1\x8e\xe8\xed\xed%\xf1_\xf3\xa5\xf4\x9b\xf7p\xfb\xb9\x00\x82\x03\x18\x04\xc8\x03\xc4\x03\xf7\x03z\x05\xa2\x06\x1c\x05}\x02\xf7\xff]\xfe\x15\xfd]\xfbm\xf9\n\xf7\x13\xf4\xb4\xf1\xf8\xefr\xeei\xed\xff\xeb\xb9\xeaF\xea\x8d\xe9\x91\xe8B\xe8;\xe9\xfe\xea\xe1\xeb\x0c\xec\x97\xec\x02\xee*\xf0C\xf3\x82\xf5A\xf7\x16\xf8t\xf9\x99\xfc\x90\x00Q\x03\xf9\x03x\x03\xcf\x042\t\xfa\r\\\x12J\x10\x9c\r\xeb\r\x1b\x15/ \x06&\\$\x93\x1e\x10\x1fY&\xaf1\x959\x039\xe53\x08/\xba1}9_<\xc78\xf81\x12.\x8e+](\x91$\x8b\x1eh\x18\x9f\x12\xe6\r\xec\x07\xe0\xfe:\xf6\x90\xf0\x1c\xef\xec\xec\x9e\xe6\xbb\xde\xb8\xd7J\xd4d\xd6(\xd9<\xd9\x90\xd5\x96\xd2\xa0\xd3\x1f\xd7\xd3\xdc\xc6\xe1\xa0\xe2E\xe4\xb0\xe7i\xebT\xf0\xed\xf3c\xf5\xee\xf9\x97\xfe\x87\x00\xc3\x00\x1a\x01\xf3\x01o\x03\x8e\x05\xff\x07\xf0\x06>\x01s\xfd\xb0\xfd;\xff\x84\xfe}\xfb]\xf8\xc8\xf3}\xef\xfb\xee%\xef\x87\xee\xe9\xeb,\xe94\xe8\xed\xe6\xf6\xe5\xf7\xe5\xc3\xe7\xc5\xe8\xc0\xe9\x86\xea\xe7\xea\x97\xeb\xc5\xecz\xf0\xc7\xf5\\\xf83\xf9\x96\xfan\xfc;\xff;\x02\xc6\x04\xa5\x08\xe4\t\x8a\x0b\x01\x10\xfb\x0e\xf8\rz\x0e\x80\x11\x05\x1a\xe0\x1f \x1f\xf6\x1a2\x19(\x1e\x85\'\x81.\xc7/\x11-\xc3)G-w2\xe16\x985j/ .\xc0-\x91.Q+\x8b$\x9d\x1e\x11\x1a\xca\x18\r\x16]\x0fJ\x05\x9a\xfd\xc5\xf9~\xf7x\xf4\x08\xef{\xe8F\xe0\xb7\xdd\xce\xde\x0c\xdd\x1a\xd9\xa9\xd6\x15\xd8\xd1\xd9\xd9\xd9\xd1\xdb\xce\xda\xcf\xdb\x8b\xe0:\xe52\xecf\xed\xe7\xebg\xee,\xf4\xb8\xf9\xc0\xfbp\xfd\x81\xfe\xa9\xff\x02\x01,\x03\x97\x03\x83\x01\x1e\xff\x9b\xfe\xf8\xff\x05\x00\xf0\xfb\xc5\xf6u\xf4\xfa\xf3J\xf3\x00\xf15\xee\xe3\xea\xb8\xe8\xc8\xe8\xe4\xe8\xe8\xe7\xb9\xe61\xe6(\xe6\x17\xe8\x1d\xea4\xea\x9e\xea\x14\xec\x0f\xef\xe1\xf1L\xf4\x87\xf7H\xf7\x99\xf8%\xfeY\x01\xf5\x03\xdb\x06\x9a\x07\xcc\t{\n\x04\r\x1e\x11G\x11\xc4\x12\xc6\x13\xf5\x15\xb0\x16U\x14\x99\x13\x89\x18W \xe8"\xeb!\xbf\x1e\x00\x1f\xd8!\x06&\xad+\xeb.\x94-\xba)\x98)3*\xea)\xbb\'\x7f&\xd5&}$\xc6\x1f\xb8\x1a%\x15\x19\x0f\xcc\x0be\n\x8d\x08\xd6\x02J\xfa\r\xf4\xd9\xef8\xeb\x81\xe8\xb3\xe7*\xe5\xdf\xdf\\\xdcS\xdbQ\xdb\xa8\xd8%\xd8N\xdd8\xe0\xa9\xdf\xe7\xe0\xb2\xe1\xea\xe2\x83\xe7\xff\xec\xc8\xf2O\xf5 \xf5\xa5\xf5\xdc\xf8\xf4\xfc\xfe\xfe[\x01Y\x03\xf1\x02\x8b\x00G\x01\xf8\x01\x0b\x00\xef\xfc\xe1\xfc\x86\xfdz\xfbi\xf7{\xf3\x98\xf2\x85\xf0\xe8\xed\xbe\xeez\xef\xf6\xea\xb9\xe9\xaa\xea\x94\xe8R\xe8\xee\xec\xc8\xeb\xda\xe9a\xf0m\xf3`\xf2\xa4\xf1>\xf4t\xf87\xfc\x17\xff\x9c\x04\xd8\x04c\x04X\x07\xe1\t\x13\x0c\x14\x10u\x12\xff\x0f\xd4\x11D\x14\xad\x11\xcc\x10D\x11\xcb\x11\\\x12[\x14\xc7\x15\x81\x11\xb2\x10\xcf\x10-\x12\xc3\x17\xd1\x1b\x84\x1c\x8e\x19\x9e\x1b\xb0\x1b\x8c\x1d@#\x1b$I#\n"_"\xca!\x80\x1f\xd0\x1d\x8f\x1c\r\x1a\x08\x18E\x15\xc2\x11B\x0c\xf3\x05\x13\x02\xba\x00\x93\xfdH\xf9\xeb\xf4B\xee\xf4\xea\x86\xe7\xc8\xe5C\xe4\x06\xe2\xe0\xe0D\xe0(\xdf\x89\xdf\xea\xde\xe8\xde"\xe3\xe6\xe4\x88\xe8\x05\xecc\xeb\xcc\xedy\xf0\xd5\xf1\x82\xf7\x88\xfb\xb5\xfah\xfd\xc2\xff\xb2\xff\x9d\x00F\x00c\xff\x7f\x00n\x006\x00:\xff\xd0\xfc:\xf9\xf1\xf6\x1f\xf6\x8f\xf4\x89\xf3\x82\xf3\x8d\xf1:\xee\x06\xefR\xec\xbf\xea/\xef\x81\xec@\xec\xc9\xf49\xf0n\xef\x90\xf5\xf1\xf2\x19\xf9\x01\xfb\xdd\xfa;\xfe\xb4\x05j\xff\x98\x03X\x0b/\x06\xac\t\xc0\t\xe7\x0f\x0b\rg\x08K\x11\xbe\x11\x90\x08\x1f\x11\xaf\x11:\x06\x13\x0fu\x0e\x83\n\xd4\r}\n<\x07\xb1\n\xc1\x0cN\x08\x1c\tL\t\xed\x08F\x0b\xbc\x0c\x0c\x0e^\r/\r\xe5\x0ed\x0f\x81\x12\xa7\x13\xee\x13\xbb\x14\xc5\x11\xa4\x13\xf1\x16U\x12\x87\x0f\xc1\x11\xfd\x11y\r\x05\x0f.\x0f_\x08D\x04\xcf\x037\x031\x01\x13\xff\xa6\xfd\xd5\xfbe\xf6\x8b\xf4i\xf4\x18\xf2X\xf0\xe8\xef.\xefv\xee\xea\xee~\xec\xdc\xeb\xcc\xed\x0f\xedG\xeb_\xee\x11\xf2\x89\xf0\x1f\xf0i\xf2Z\xf2f\xf1\x18\xf4\xb7\xf5\x9c\xf7\x1b\xf7\xd8\xf5j\xf6R\xfa\xfd\xf8\xc4\xf7\xda\xf6)\xf8\x1d\xfc\xa3\xfa\x1f\xf7\x11\xf9?\xf80\xf8\xfd\xf6\x8e\xf7.\xfb\x05\xf9F\xf6O\xf7\xb6\xfb\x8b\xf65\xf5c\xfa\xc1\xf5\x1c\xfe\x03\xfa\xe4\xf8\xfe\xfa\x1d\xfdv\xf9\x9d\xf7N\x04\xe1\xfe\x8d\xfc\x8c\xfe\xfa\x03\xaf\xff\x84\x04\x8b\x05\x82\x01\x7f\t\x83\x05\xed\x02z\x0fQ\t\xc3\x05\xc3\x0e\x97\x0f|\t\xa6\x0cp\x0e\xe3\n\xc3\r\xdf\x0b\xde\x12\x8c\x0f3\n\x07\x0f\x03\x08\x87\x06\xd5\r\x1f\rJ\x08`\t\x1d\x0b\xb8\x06\xec\xff\xd2\x06\xd9\x08Y\xff8\x04\xaf\x0c]\x05u\x03\xd7\x07}\x03\x15\x06\x90\x07\xf3\x05R\rW\x0b\xaf\x064\x0c\xbc\x0fQ\x08}\x03\x89\x0bw\x0bs\x04t\n_\x0bc\x05)\x02\x05\xff\x17\xfd\xd8\x00\xd6\xfd\x15\xfb~\xfc\xf8\xf8s\xf4S\xf5|\xf6\t\xf1\xf6\xf1u\xf4\xab\xf4\xb9\xf5 \xf4\xbb\xf3<\xf5\x9c\xf5\xa2\xf6\xcf\xfa\xd3\xfck\xf84\xfd\x05\xfdJ\xfb\x8c\xff\x87\xfd\xe0\xfd\xd8\x01\xf6\xf6o\x00X\xff\x83\xf8j\xf80\xfbb\xfa\xa1\xf1\xb7\xf87\xf8_\xf2\xc8\xee\x82\xf8\xdb\xf3\xcf\xec\xbc\xf2\xab\xf5C\xf4-\xee\x12\xf5\xe2\xf3/\xf4F\xf4\xed\xf8%\xf5\x8e\xf6^\xff\x03\xf9\xad\xf9\xf3\x04\x82\x01N\xf7\x99\x03C\t\x1c\x04i\x03W\rw\x0c\x1c\x02\xb5\x02\x9a\x0c\x9b\t6\t\xc4\x0b\xd8\x08\n\x0b&\tS\x05j\x04\xcd\n\x1b\x08S\x01\x8f\r\xdd\x0b\xf4\x01\xdd\x02O\x08e\x03\x80\x00\x80\x07Z\ne\x01\xca\x04m\t\x8f\xfe\x8d\x00\x13\t\xc8\x03\x8f\x00x\x0ci\r\xde\x001\x03\xcb\t\xc9\x08j\x06\xf3\x0c\x8f\x0e\x81\t@\t\xdc\x0e\x04\x08\xc8\x05x\r\xea\x0b<\x08\x85\x0c9\x0b\xb4\x05|\x06s\x04\xd3\xff\xb4\x02\x95\x07\x1b\x01[\xfd\xa3\x00E\xfd&\xf4\t\xf9\x84\xfc\xeb\xf6\x88\xf4B\xf7\x94\xf7O\xf3\xdc\xf2W\xf8\x93\xf4\xc6\xec_\xf29\xf7e\xf8\xb1\xf5]\xf5)\xf7N\xef\x90\xf0\xd3\xfa1\xf9\xc9\xef\xec\xf8<\xfd?\xf4\xe7\xee\xb3\xf7\x83\xf7X\xee\'\xf4h\xfbJ\xf3\xf5\xfa7\xf6\xde\xefA\xee\x7f\xf6\x97\xf9\xc0\xf3\x16\xfe\x04\xfaA\xf9B\xf4\x8b\x00X\xf7\x8d\xf8\x8e\x06O\xfc\x98\xfe\xbc\x06\xad\rS\xff\xff\xf7\x18\t\x9f\n\x11\xfd\x92\x07\xf1\x18\x03\n=\xf6\xb5\x13Q\n\xeb\xf7\xf4\x0e*\x10g\x02O\x07\xa0\rj\n\x85\x03\xb0\x03\x1e\t \x06\xd5\x06\xf4\x0e\x1c\x10W\x02\xd9\x05\x96\t \r\xc1\x02\x8d\t\xc4\x10\x8e\x04z\x05F\x0c~\x05,\x00_\x08>\x02\xf2\x02\xfd\x05V\x04\x9d\x00\x04\x02`\x02i\xfb\xbd\x03E\x00I\x00;\x05q\xfc\xe2\xfa\xab\x04\x17\x02\xc9\xf9j\x05Z\xff\xf8\xfa\xa8\xfek\x04\x9b\x01\xaa\xf9,\xfe\x0f\xfd\xfb\xfd|\xfd\xa0\x03\x16\xfe\xaf\xf3I\xf9h\x04w\xf9K\xf2\xc0\x06\xb6\xf7\xbb\xf3\x13\xf7c\x03p\xfaF\xefW\xfe5\xfe\xf9\xf0\x89\xf6L\t\xa9\xf9+\xf2\x08\xf5\xf7\x0b\xe9\xfc\'\xf1\x05\x0c\x16\x04\x01\xec?\x08\x06\x13Q\xfa\x1b\xfc\n\x05\xa9\x0e(\xf8v\x00\x13\x12\x8b\x02\xc1\xf8\xec\x04\x00\x049\x03\xcb\xfd\x0f\xf9\xbd\x07\xfa\xf9W\x04\xc9\xfcc\xfaP\x01\xb1\xfc\xc1\xf4\xf6\xfd\x11\nb\x00\x0f\xfa\x85\xfbC\x07\xea\x06\x93\xfbc\xf5\x87\x0f\r\x031\xf9\xd4\x0c\x93\x14J\xf8\xa3\xf4\xd6\x0c6\x06\xf4\x01\xf7\x03\x15\x0bR\xff\xfa\x04h\x02P\x07\xfd\xfeX\xf4f\x08e\x00\xf7\xfd\xaf\x05\x98\x042\xeeV\xf9{\ne\xf0K\xf8\x81\x03\x8e\xfb\x15\xf2\xa7\x00\x1f\x00y\xfa\x0b\xef?\xf9\x08\x03\xcc\xee\x02\xfeS\x08\x02\xfa}\xe8\xee\x05\xeb\xfa\xc8\xed\xcf\x03_\xf9\x03\xf98\x01\xac\xfd\xfe\xf9\'\x01V\xf7\xd5\xf7\xf0\x0c\x0e\xfe\xc9\xfa\x1c\x07\xc2\x07\xe4\xfdw\xf8\x7f\x0f>\th\xf5\xbb\x018\x1b\x7f\x01s\xf4\x8b\x10{\x0f\xf9\xf7\x1b\x03\xf4\x16\xa0\xfct\x01\x9b\x0cI\t\x86\xfa"\t9\t\xf9\xfa\xff\x06k\x0b\xc0\xfa\xeb\xfa\xed\r<\x04\xd6\xfc\xae\xf5\xcf\x0e\x1b\x02\xad\xf25\x04\x06\x0bN\xf8\xcf\xf9\xa0\x06h\xfa+\xfe\n\xfe\xf2\x08N\xf9\xe5\x01\x86\xff\x9b\xfa7\xfdH\xff\xa7\ti\xf9L\xfb*\x06\xb9\x06\x9b\xf5\xd2\xfcC\x0e\x07\xf7\x17\xf6\xaa\x11\xe2\t\xed\xf5\x91\x00\x0e\n-\xfb\xf4\xfd\xb7\nx\x03=\xfb\xc1\x05\xaf\x068\xfe\x87\x03[\x03\x1c\xf7[\xfa\xf0\x0b\xc1\x00L\xf9\x15\x01\x06\x03\xc3\xf3?\xf42\x03\x93\x02E\xf0\x82\xf2\xe0\x07\x1c\xfb\x7f\xf0`\xf9\xf2\xfeL\xf1\xdc\xf2[\x02-\x04n\xf4\xd8\xf8h\x01\x86\xf9?\xf9\x85\xfcP\x04\xc3\xfa]\xf98\x05a\xfen\xf2~\x02\x06\x00\x81\xf2\x81\xfc\xf9\xff\xe9\xfa\xf3\xf9\xd9\xfc\x95\xfb\xd1\xf7 \xf4\x8d\x04\xe3\x01\x8e\xf2&\xf8\n\x0eG\xfbF\xec\xeb\t\xf7\x10(\xf9n\xf1\x9f\x11L\x0c\x89\xf5W\x04A\x0f6\x07\xd4\xf3.\x0e\xf1\x17S\xf1\xb9\xfdW\x1eX\xfe\x05\xedq\x19}\x08\xce\xfa\xa9\x02a\nJ\x00m\x04I\x06.\x06\xaa\xfdm\xfb\x90\x0ft\x06\x8c\x00\xf7\x01\xe2\n\x93\xfc\x13\x00\xa5\x0fz\xffa\xfa\xf8\x0f\xcb\x05\xda\xf5\xd6\x07V\n\xda\xfb(\xf8\x1f\x08\xb8\x02\x91\xfa\xad\x03?\xfeI\x00T\xfb\x9e\xfe\xc9\xff\xc2\xff\x90\x01\x8c\xfe\x0c\xfch\x07\xda\x04\x9c\xf8\n\x01J\nh\x018\x04f\nj\x05\x00\x02\xc4\x08X\x0c\xbb\xfe\xae\x07x\x05N\t\xb9\x06{\x03J\x0cQ\xff\xce\xf8\x9f\x05\xbc\x07\xc8\xf8C\x02\xb3\x00\r\xfa\x8b\xf7\x93\xfe\xba\x01\xaa\xf5\xca\xf1A\xfa6\xfd\xaf\xfd\xa5\xf7D\xf9\x03\xfdG\xf1\xbd\xfb\x81\xfe\xcf\xf8\x13\xfdy\xfc\xa0\xf5\xac\x00&\x03\x05\xf3t\xf6\x12\x00\xaa\xf7\xa0\xf8\xd3\xffa\x00\xe4\xf6!\xf4M\xf4X\xfaC\xfe \xf3x\xf9Z\xf9\x10\xfad\xfb\xf5\xf0~\xfb\xa5\x00\xfc\xf0Z\xf7i\x00,\x01\xa9\xff\xa4\xf1t\xfc\xd1\x0cg\xfc\xe0\xeer\t\xb5\x0e\x98\xf6\x0e\xfb\x80\x0e`\x05h\xfc\xe4\xff\xc8\x08\xdd\xfe\xbf\xff\x1e\x05\xbf\x051\x05\xa6\xfb\r\x0bS\xfb\xb9\xfe\xb1\x06h\xfeC\xfa\x97\x04\xe5\r\x14\xfc\xcf\xfa\x87\x03b\x03M\x00;\xfb]\x07\x15\x04\x14\x05%\x02\x17\x02\xb1\r\xde\x02\xc2\xfbW\x03\xf9\x10\x14\x03K\x00d\t\xe1\x0b4\xff\xf9\x00\xfb\x0b\xc0\xff\x8e\x03_\x02\'\x01\xdb\x08T\x03z\xffv\x050\x02\xae\x00\x91\x01\x0b\x00\x15\x00M\x05K\x02\x1e\x05S\x03\xf9\x00\x05\x02y\x01}\x01\xb5\x03L\x078\x01B\x06\t\x08u\x03\x1e\x03<\x03T\x06\x0c\x04\x1a\x03Y\x07\xe1\x06\xa8\x02\r\x03\xa4\x01\xc8\x01*\x02n\xfd\xdf\x01\x9f\x02\x83\xff%\xfea\xfc\\\xfe\xf7\xfc\x8a\xfb\xb4\xfav\xfc\x81\xfa#\xfb\x0e\xfd\xbc\xf8\xf8\xf8\x9f\xf5\xd7\xf8\xfe\xfa\x04\xf7V\xf9\xae\xf8{\xf4\xad\xf8m\xf8H\xf5O\xf7\x15\xf5\xd7\xf6\x1f\xfb\xa3\xf7s\xf9s\xf5\xa7\xf3\xcb\xf63\xf8\x96\xfb\x1c\xf9\xc0\xf7\xd6\xfa\xc8\xf6\n\xf3\x08\xff^\xfa\x88\xf4\xbc\xfc\xca\xf9\xa4\xfcP\xfc~\xfc?\xfa\xa5\xfa\x06\xf9\x8b\x00\x12\xff\x06\x00\xee\x03\x90\xfa\xe3\x00g\x03}\x01\xeb\xfd\x05\x02c\x00R\x04\xa1\t\xf6\x05\xe0\x01C\x04\xd0\x01\x93\x05\x11\x05e\x06\xb1\x07\xcd\x05\xae\x0c\xbe\x03?\x06\x18\x08v\x05\x8b\x04\x12\x07k\x06\x80\x035\ri\x07\xe8\x02\xfa\x03\x8a\x05\\\x04i\x04D\x05\x88\x04"\x03r\x03\x9a\x07\x15\x03\xaf\x01\xa7\x03\x8b\x02\x05\x03\xb3\x03\xe1\x05\xe0\x03\xd0\x04m\x06\x14\x04(\x05-\x04\x08\x05\x85\x07\x97\x07Y\x05\xd9\x07\xa7\x06\xd7\x05\xcb\x07o\x05\xca\x05\xdf\x03\xb0\x04+\x05C\x05\xfa\x03\x0e\x02\xf7\x00\xb5\x00\xef\x00\xdd\xff5\x00\xec\xfe8\xfd\x1b\xfd\xe7\xff\xfe\xfc,\xfb>\xfd~\xfb\xc4\xf98\xfc\xd0\xfcu\xfa\x89\xfa\xe0\xfb\xc8\xfa\x12\xfaT\xfc\xd9\xfaz\xfa\xfb\xfa\x99\xfc\t\xfbV\xfaT\xfc\xe1\xfc\x15\xfc\x9e\xfa3\xfb)\xfbu\xfc\x9d\xfb\xe0\xfa\xf2\xfa_\xfbq\xfa\xdb\xfb\xa1\xfa\xf4\xf9\x94\xfaP\xfa\xa4\xf9F\xfb\x9a\xfb\x11\xf9m\xfa?\xfb\x99\xf9\x15\xfa\xe3\xfb\xe6\xf7\xa4\xfbA\xfc\xce\xf9i\xfb\xe9\xfc\xc2\xfa\x95\xfaO\xfd\xbf\xfcf\xfb?\xfe\xfb\xff~\xfdL\xff\xcf\xff\x8c\xfev\xff\xcf\x01<\x01\xdb\x02\x9b\x02\x12\x02\xd7\x03 \x04S\x04I\x04\xd8\x03\x1f\x04\xc4\x05\x8d\x06\xda\x057\x04\x8b\x052\x05T\x04\x9a\x03\x83\x05@\x05e\x04\xde\x03b\x03\x93\x02F\x02\xf9\x03\x10\x02\x98\x02\xaa\x02\n\x01\xe9\xff\xf9\x01c\x02\x92\x00\x15\x01\xfe\x00v\x00\xef\xff&\x01\xbf\x00?\x00_\x00\xf0\xff\xea\x00\xed\x00\x8e\x01\xd5\x00t\xff/\x02\xbe\x00X\x00\x17\x02\x05\x03\x1d\x02M\x01\xaf\x01\xe7\x02.\x03\x01\x03\xc1\x03O\x03\x8c\x03;\x03\x9f\x03\xe3\x04\x13\x05\x96\x03\x1f\x04\xb6\x03P\x03~\x03\x98\x03\xcc\x02X\x02\x17\x02\xdc\x00\x17\x01\xa0\x00\xb2\xff\r\xff\x16\xff\xe2\xfd\xad\xfd\xc1\xfd\xd3\xfc`\xfcN\xfcq\xfc;\xfba\xfb>\xfbB\xfb$\xfb\x16\xfb\x03\xfa\xe0\xfa\x13\xfbf\xfb\xef\xfbE\xfa\xfe\xf95\xfa\xad\xfbv\xfb\x9f\xfb\\\xfc%\xfb\x89\xfb\x99\xfbe\xfb\xcd\xfc\xb3\xfc.\xfc\xf0\xfc\x9d\xfc\x97\xfbH\xfdM\xfeP\xfdO\xfb\xe1\xfdK\xfe(\xfd|\xfe\x14\xfep\xfd^\xfdu\xff\xd7\xfe\xa5\xfe\x83\xffS\xff\xf1\xfd\x0f\x01@\x01\n\xff\xcd\x00\xd9\x01\xa5\x00\xc4\x02\x8d\x03\x81\x00\x9a\x02\xe7\x031\x02F\x03\xf1\x04\x89\x03\xbe\x02\xbd\x03\xb0\x04W\x035\x02\xa6\x03\x8b\x04)\x03\xed\x02(\x03\x9f\x02\xfa\x00\x8a\x02\xaa\x02\xc7\x00\xcb\x02\xa4\x01\x9f\xffL\x00\xb9\x00\xd4\x00\xe1\xff\xcc\xfe\xe0\x00\xd2\xff\x80\xff\x9b\x01T\x00\xad\x00\xe3\xffp\x00u\x01\xf5\x02m\x014\x01\x0c\x025\x03\x98\x03\x19\x02\x95\x03\x08\x05\xad\x03\xf1\x01>\x04\xac\x05J\x04r\x03W\x04\xd1\x03\xcb\x03\xb2\x03r\x03r\x03J\x03\xb8\x02\xb7\x01\x99\x02W\x02V\x01:\x00\xa7\x00{\x00\x18\x00Y\x00I\xff{\xff\x05\xff\xb1\xfe\xf2\xfd#\xfeK\xfe\x84\xfe~\xfd\xd7\xfd\xed\xfd\xb7\xfd&\xfdx\xfdN\xfdd\xfc\x81\xfd\xbd\xfc\x0c\xfd\xb1\xfc\x1c\xfd \xfd^\xfd\xb6\xfc\xc0\xfb\x8d\xfb\xbf\xfdT\xfdF\xfc(\xfe\xb5\xfc\x13\xfd\x03\xfcd\xfc\xb6\xfd\xde\xfc)\xfc\xbb\xfc\x85\xfd\xb2\xfbW\xfd\xa0\xfc@\xfc\xe6\xfby\xfc@\xfe\x9b\xfc\xae\xfd_\xfd=\xfd\x10\xfd\xea\xfef\xffM\xfej\x003\xffl\xff\xdc\xff(\x00y\x00\x06\x01\xae\x01\xc7\x00<\x01X\x01\x1c\x02\x0c\x02\xcf\x01\xc1\x01*\x02\xfb\x01 \x02k\x02n\x01\xbd\x01~\x028\x01\x1d\x01\xe5\x01^\x015\x01\xfb\x00;\x01R\x00\xb1\xff\xd6\xffw\x01!\x01x\xfe;\x00\xd1\x00\xb2\xff\xc7\xff\x8f\xff\x88\xff\xda\xfe\xc0\x00k\x00\x16\x00\xc4\xff5\xff\x03\x001\x00\xf3\xff\xb8\x00\xdc\x00\x93\xff\xae\xff\xd6\x00\xb5\x01\x1a\x01\xe5\x00\x05\x00\x99\x00\xd9\x012\x02\xbd\x01\xdc\x01j\x01\xb9\x01S\x02\x83\x02w\x02g\x02\xe8\x028\x02d\x02H\x03\xe5\x02\xbe\x02x\x02{\x02B\x02\x95\x02\xe7\x02|\x02t\x02\xb7\x01Y\x01\'\x01\xc7\x01\xb8\x01\x89\x00\x14\x00\xde\x00\x12\x00\xad\xffe\x00\x0c\x00R\xff\x06\xff\xa7\xff\xba\xfe\x9a\xfe\xda\xfe\xe6\xfe0\xfe\x06\xfey\xfeq\xfe;\xfdK\xfe\xe7\xfd\x86\xfc\xd9\xfdi\xfd\xaa\xfd\x90\xfdW\xfcp\xfd\xee\xfc\xb2\xfc6\xfd\xbd\xfd6\xfc\xea\xfc[\xfd\xb0\xfci\xfdc\xfdf\xfc\x1c\xfe\x07\xff\xae\xfc\x17\xff`\xfd\x9b\xfc.\xff\xe7\xfe\x04\xff\xa9\xff+\x00\x91\xfd"\xfe$\x00\xd9\xfe\t\xff\x0e\x00]\x00\x84\xfe,\x00\xcd\x00v\x006\xfe$\x00\x9a\x01\x9e\xfd\x98\x01X\x01\x8b\x00\xf9\xff\xb0\x00p\x00/\x00\t\x01\xa4\x00\x17\x01M\x00\x92\x02u\x00\x8e\x00\xe8\x00\x8b\x01\x03\x00]\x01\x85\x01e\xff\xa6\x01=\x00g\x00\xa1\x00H\x01\xe8\x003\x00\xa8\x00\xaf\xff\xc2\x00\xd6\x00"\x00\x8f\x01%\x01#\x01b\xff\xe4\x01\xdf\x01\xa6\xff\xd4\x01\xa5\x00\xe0\x01V\x02\xd0\x01&\x00T\x02\xb5\x00\xd5\x01\xd9\x029\x02\xdf\x02v\x01\xfe\x01a\x01\xf4\x03e\x01\x03\x04d\x03\x97\x01\xba\x02.\x03i\x02\xf6\x02\xc0\x03|\x02\x95\x02\xe4\x001\x03\xc7\x01\x15\x02#\x02x\x00\xee\x01\xe8\xffS\x00\xd7\x01L\xff\r\x00\xe3\xff:\x00\xc6\xffN\xff\xd3\xff\x8c\xfe\x13\xff\x1f\xfeT\xfe\xce\xff\xd8\xfe\xe6\xfd\x8e\xfd\xaa\xfc"\xfe\x06\xfd"\xfe{\xfd\x14\xfe\xa8\xfb\xa6\xfcr\xfd\x9a\xfdE\xfd\xb2\xfc\x00\xfe>\xfc@\xfdI\xfc6\xfe\xb6\xfd\x8a\xfb|\xfdJ\xfe\xd3\xfc0\xfd\x03\xffV\xfd\xcb\xfb\x17\xfd"\xfe\xbf\xfe\xea\xfc[\x00k\xfbp\xfc=\xfeG\xfec\xff\\\xfd\xfe\x00q\xf9\xe6\x00^\xfeF\x01\x00\x00W\xfc\x93\x00\xc2\xfb\xea\x02-\x01\x8a\xff\x14\x00\x9e\x00\x8a\xff\xf7\xff\xc8\xfeG\x02;\x01\xd8\x003\x02\xaa\xfd\x8d\xffY\x02@\x03\xf8\x023\x00=\xfd\xce\xff\xe5\x01\xb8\x02\x81\x03\\\x02\xc8\xfd\xa9\xfd}\x02\x83\x02\x85\x00\xd2\x00|\xfec\xff\x0b\x02\xae\xff\xc9\x00z\x00\xb9\x00\x07\x00@\xfe\xc6\x00J\x03\xf9\xffj\x00\x10\x02\x19\x00\x98\x01\x8a\x01\x07\x01\xe4\x03\xd8\x00\x89\x01C\x02\xb2\x021\x04\x84\x02\x05\x04(\x01v\x01{\x04\xa3\x03j\x03\x1f\x04x\x01\xf7\x03\xff\x03y\x03R\x02M\x02B\x02\xa9\x01x\x03\xe3\x02}\x02\xde\x00S\x01$\x00c\x01\x8f\x01\xa4\x00n\x00\x92\xfe?\xfe%\x00\xb2\x00\xfd\xfe\x0e\x00\x00\xfd4\xfd1\xff\x98\xfe}\xfd\x19\x00\xc6\xfd\xe0\xfcG\xfdt\xfc\xe5\xfe9\xfc\xbd\xfd\xc4\xfe\x87\xfc\x99\xff\xb6\xfe\xa6\xfa\xb9\xfbc\xfcV\xfc\x9e\xfd\x8c\xfd\x8c\xfc\x86\xfd-\xfa\x16\xfc\x8e\xfc<\xfd/\xfc\xe2\xfbp\xfc\xaa\xfc|\xfd\xbb\xfai\xfe<\xfc\x8a\xfd\xcf\xfd+\xfd<\xfd\x03\x02\x9c\xfc\xf3\xf6j\x031\x00\x12\xfc\x12\x03\x9e\x02\xf9\xfaH\xfe\x9f\x02\xf1\x00\xbc\xfew\xfc\xab\x02\'\x05\x8e\x05t\x026\xfd\x9c\xff\xeb\xfd\xe5\x02B\x02\xda\x02&\x03\x1f\x05\xdf\x04\x15\xfc\xce\xff\xff\x00!\xffl\xff\xc6\x07\x11\x03\xaa\xfd\x99\x03\x9d\x00\x8b\xff\xed\xffx\x01\\\x03\xa4\x03m\xfe\xbb\xfe\x1b\x02p\x03c\x04_\x00W\x02N\x049\xfez\x00j\x05\xfe\x03\xaf\x02\xab\x04#\x06\xa5\x01\xbc\x00\xb6\x066\x06;\x03\x18\x04\x9c\x02\xc3\x02\xd6\x04\xb0\x07\xc1\x043\x02\xad\x03\x0b\x01\xc0\xffl\x02\x96\x03\xce\x01d\x02\xe1\x02\x84\xfdO\xfe\xc6\x01\x97\xffe\xfe\x8a\xff\xe6\xfd\x01\xfd\x8d\xff\xb4\xff\xc0\x01\x82\x00\x17\xfb\xe0\xf9|\xff\xcf\x00\'\xff\xcb\x00I\x01\xbd\xfe\xb1\xfav\xfc\xa2\xfe\x1e\xff$\xff\x99\xfd\xb8\xfd\x8a\xfdY\xff\x99\xff3\xfd\xf8\xf9\xd4\xfb\xad\xfd\xd4\xf9\xda\xfb`\x02\x08\x00\xc9\xf7_\xf9\x99\xfb\xa0\xfa\xe6\xfc(\xfe\xd4\xf8s\xfa\xef\xfc\xeb\xfa@\xfd\xe8\xfb\xdb\xf8V\xf7\xb0\xffh\xfeD\xf8F\xfc\x02\xfe\x90\xfao\xfcM\xfc&\xf9`\xff\x9c\x01\x07\xfdI\xfb\xb6\xfcg\x00n\x03`\x00\xfe\xfa:\xfd \x01_\x02\x8c\xff\xe5\x00\xe5\x02B\x00\x86\x03\x00\x03\xd3\xfcP\xfd\xb6\xff\xa4\x03r\t\xa5\x05f\xfd|\xfc\xfd\xfc\xb0\x00\x85\x039\x01&\xff\x8e\xff\xd5\xff \xfek\x00\x80\x01\x8f\xff\xd1\xfe+\xff\x9f\x03\xaf\x05-\x06\'\t\x0f\x0b\x1b\x0b\xaa\t\xec\x08\xef\x07C\x0c\xd3\x11\xcb\x11x\x11\x08\x12\x9e\x0f$\r;\x0e\x98\r$\x0c\x06\t\x18\x07\xb2\x07@\x08\xfb\x06\xcf\x05\xd4\x02\xf4\xfeF\xfa.\xf8n\xf9\x9e\xf9x\xf8;\xf7\xe7\xf7\x8a\xf5S\xf6U\xf5\xeb\xf4"\xf7\xe1\xf3\x1a\xf3\x12\xf5\xf8\xf9y\xfc\xbd\xfbr\xfb\xe8\xf9\xdf\xf8\'\xf96\xfb\xe1\xfd\x96\xff\xdb\xfc\t\xfe\xe4\xfd\x80\xfcs\xff%\x00\r\xfe\xd8\xfb\xab\xfb\xfb\xfb\xd7\xfe\xc9\xfdp\xfe)\x00(\xfe}\xfb\xf7\xf9N\xff\x06\x00\x18\xfc\x03\xfe\xb8\xffd\xfdA\xfd\xb9\x00*\x01\x1c\xfc\x94\xfa\x93\xfb\x8a\xfb \xfe\xea\x010\xfe\x95\xfcD\xfd\x18\xf9b\xf9:\xfa9\xfc\x98\xfb\r\xf9\x10\xfa\xff\x00u\x01\xd3\xfc\xb2\xf9\xf5\xf4\xdc\xf4\x17\xfc\xbd\x01\xe4\x01.\x00\xc3\xfe\xff\xfd)\xfb\x9c\xf9\x8e\xfc\xe6\xfb\x8f\xfbW\xfc\xb0\xf9\xad\xfd\r\x03\x84\xffc\xfc\xb7\xf8\x8d\xf0\xff\xeb\xfb\xf1G\x01\x13\x12\x0c\x1c\xe0\x1a|\x12H\x0eu\x0fX\x13g\x1c\xb5)\xa44\xf85\xed482D.\r)5\x1f%\x18\xd0\x15\xf0\x17\t\x1c\xfb\x19/\x11r\x041\xf68\xe8\'\xe0\xa5\xdf\x99\xe2u\xe47\xe3\x89\xe1\xdb\xde\xa3\xda\x1b\xd7\x9e\xd4\xb3\xd4\x90\xda\xb4\xe4T\xef\xbf\xf8>\xffW\xffu\xfa<\xfa-\xfd\xe5\x03\xe0\r\x00\x13\xa4\x16s\x18[\x17\x96\x14v\x0f\x13\t\x9e\x04\xde\x03\xc8\x04\xf1\x07!\x08\xfe\x04\xac\xfdX\xf2D\xea\xc7\xe7\xc1\xe7C\xe9\xd0\xec\x9c\xee\xbf\xee\xfd\xed\xf9\xed\x93\xecw\xeb@\xed\x05\xf1`\xf7D\x00A\x07u\x07\xcd\x04\xe0\x019\xfeM\xfe\xb2\x01\x9b\x05\x86\x08\x19\x07"\x03\xba\xfeK\xfa\x9d\xf6\xa1\xf4\x8a\xf0h\xf0\x96\xf2\x98\xf3\x97\xf6N\xf7\xd6\xf2\xfa\xebK\xe7\xea\xe5\x89\xea\xd8\xf2*\xf9\x98\xf9+\xf7\x19\xf5{\xf7\xd3\xf8@\xf8\xde\xf5F\xeep\xeb\xc1\xf3$\x15\\A\xf5S"D\x9b"\x03\x13\x7f!?r\xe9ZqL\x11Ha?\xdf&\xef\x0cc\x071\x10\xd0\x13\xf0\x07\x84\xee\x9e\xd2\xd4\xbb\x86\xad\x91\xad\x8e\xbbR\xcb\x11\xd0&\xc7\x9f\xbe_\xc0F\xc9\x8b\xd0\x87\xd7u\xe5g\xf6|\x08h\x12\x7f\x19*\x1e\xc1\x1c\xad\x1d\xbe\x1f\x0e"8&\xbd"\xff\x18\xe0\x12\x8b\x0f\x97\t}\xfd\x9e\xec\x90\xde\x9f\xd6\xbd\xcf\xbf\xcd\xe0\xcf\xf2\xd0\xea\xcc\xa8\xc6\x0f\xc5\xf3\xca\x14\xd6Z\xdcb\xe0L\xe7w\xf1\x88\xfe\x0c\n\xf2\x12\xd9\x18?\x19g\x15]\x14\x8b\x18\x00 \xd7#\xf1\x1e\xa9\x16\x0c\x0fd\x07Q\xff\x1a\xf7s\xf0\x95\xed\x96\xec\xc7\xe9\xcb\xe6\xf7\xe1\x1a\xdc"\xd6\xe3\xd2)\xd6f\xdfd\xe8\xf1\xee\xb0\xf0\x02\xf2\x90\xf6\x98\xfa\t\x02q\x08\xe0\x0e*\x13\x1a\x15\xb9\x17\xca\x180\x19\xe1\x12\xd1\x0b\x00\ta\x03\xa0\xfd\x96\xfe\xaa\x14;>\xa9Y\x81PC.9\x17\x03\x1d\xa44\xfcL\x88[\x9ba\xfdV\xc8=e\'5\x1c\xa3\x17\xc3\x0bk\xfb\x1f\xf7\xfd\x01\xc4\x0bi\x01\xff\xe2\xf2\xc1\x95\xaf=\xb1\xee\xc1`\xdaN\xed\xfb\xf0\xd4\xe3\xba\xd4\x9f\xd2*\xdc\xa5\xeb\xbb\xf8\xab\x06)\x16\x00$[)\x81"8\x12`\x03\xa3\xffp\x03\x93\x0e`\x17[\x10L\x01\x07\xed\x89\xdaP\xd4\xca\xd3\xfe\xd2\xa7\xd0\xe3\xd0\x85\xd7\xd5\xe0X\xe4I\xe1\xd8\xdc}\xd9\x1d\xdf\xb9\xf1\xc5\x08\xd5\x19W\x1c;\x13?\x0b\xbf\t\x8a\x0f\xb3\x17\xe3\x1a\xf4\x18\x88\x13\xee\x0c\xcf\x05T\xfe\xdf\xf6\xde\xed\x16\xe6\xfa\xe1Z\xe4\xea\xe9Y\xe9\xcf\xe2G\xd9\x07\xd4\xb2\xd5\xc6\xdb`\xe5\x81\xee\xdc\xf4L\xf8\x81\xf9\xf9\xfc@\x01H\x03\x8c\x05=\t~\x11\xff\x1a\x90\x1f\xa8\x1d\xa5\x14\x85\x08\x92\xfej\xfa\x00\xfe\xf7\x04\xc0\x03\xb2\xf5p\xe4\xf1\xe6\xa0\x0c\xda>\xceS\x03=\xbb\x1b\xa7\x14\xaa,\xd4M\xef^\xbdbs^qN]=\xba0j\'\xcd\x1bT\x05\xb6\xf6j\xfc\x8a\t\xf5\x07\xfa\xee\xf8\xcc\xf6\xb7\'\xb5\xaf\xbf\xcd\xcfa\xde\xbc\xe4k\xe2h\xdbe\xda\xbc\xe3X\xf0\x17\xf9B\xfe\xe6\x08\x96\x1a\xf2*\x06,\x0f!j\x11\xa9\x03V\x03\x02\x08U\r1\x0e0\x04=\xf9|\xebR\xdd\xc5\xd38\xcb\x93\xcc\x1b\xd4b\xdb\xab\xe3\xcd\xe7\x0b\xe7,\xe2J\xde\xa2\xe3\x91\xf3g\x05w\x11q\x17e\x17g\x12\x8f\r\'\x0b,\x0b\xa6\x0c\x87\r\xca\x0c@\x0bW\x08\xa0\xff\x16\xf1a\xe2\xd0\xda\xf6\xdb)\xe2\xd4\xe8\x1c\xeb\xc0\xe7\xb7\xe0\x8f\xd8\xf9\xd6\xe8\xdd\xbe\xe9\xfd\xf4C\xfc\xba\xfex\x02!\x05\xe6\x03\xad\x04\xa4\x05\xdf\t+\x11X\x14\xcf\x14\x9c\x11$\t8\xff\x01\xf9\x18\xf8\x16\xf9\n\xf9"\xf2\x92\xe3\x8b\xd78\xdb\xf1\xfd\xdb1\x95RtO\xfc8\xcf,~8SRYb\xe4h^o\xbcq\xb3i@Xb=S\x1c\\\xfd\x80\xe9\xb8\xeaY\xfag\x01f\xefJ\xcf\xd9\xb2Q\xa9\xce\xae\xaa\xb8d\xc6j\xd7{\xe5\xce\xed\xd7\xf2\xb1\xf6\xe6\xf8!\xf6\xf5\xf5\x93\x04\xfb\x1f\x047w=\xf6-\xfd\x15\x1a\x06b\xfd_\xff\x94\xff\xef\xf8\x1e\xf4\xaf\xeb\x94\xe3\xa4\xdc\x19\xd13\xc6\x18\xbf\xe4\xc0\xae\xcd\'\xdeY\xec\xab\xf2Y\xf1\xa1\xee\x9d\xefr\xf7\xa9\x03\x04\x11\xef\x1b\xa7\x1f\x9e\x1d\x9c\x19\xad\x14\xe6\x0er\n@\t\xf3\x08\x84\x07\xf4\x02\x1e\xf9Z\xee\x89\xe5r\xe0\x1e\xdf\xb2\xde\x90\xe0D\xe1@\xe1\xc4\xe0 \xe0\x8d\xe2\xb5\xe7~\xed\xa2\xf4\x82\xfc\xd4\x03~\x08D\x07\x03\x04\xb7\x03\xe4\x07\x8c\x0f\xb5\x14\xba\x14I\x10r\x079\xfe\x16\xf8\xd2\xf5\xc5\xf4\x9f\xf1\xfb\xeb\xc4\xe9\x8f\xec\xa0\xe9\xdf\xdb\x07\xce\x96\xd4\xc4\xfcE2\x95T\x00Z\x94I\xa1:\x85=1M{c\x14uxyRp\x98\\\xf5E\xc50n\x1a\xf2\xfeB\xe7\x1a\xdf\x85\xe3\xcf\xe9V\xe9\x03\xdbC\xc5<\xb2F\xab\\\xb6\x11\xce]\xe7\x1b\xf8\xb9\xfc\x95\xfba\xfb\xea\x00\xe3\x08\x15\r\xc2\x12\xa6\x1b\xe2%\x11,2(\x10\x1c\xdb\x08\x84\xf3\x1d\xe5\xc7\xe1m\xe6W\xed{\xed\xeb\xe2\xb7\xd49\xc8)\xc4(\xc8\xfb\xce-\xd7\x00\xe0%\xeb9\xf6L\xff\x9e\x03\xbe\x02\xc1\x00,\x00\xcc\x05\xce\x13\xcc \x9f#\xed\x1do\x14\xc2\r6\x0b \x08]\x033\xfe\xc5\xfa\xce\xf6\xe6\xf0v\xe9!\xe3#\xde\xa9\xd8>\xd6\xb7\xd8h\xdf\xb4\xe6\xd1\xe8\xd2\xe7\xa1\xe8*\xee\xb4\xf6\xcf\xfd\x07\x04j\t\xac\rU\x11n\x11\xf7\x0e \r\x9c\n\xd0\x08\xb1\x08D\x07\'\x03\x80\xfc]\xf5|\xf0;\xee@\xea\xbe\xe4\xc4\xe2\xd7\xe1\xd3\xdd\x82\xdd8\xec\x93\x10\x98;qR\xefQ\x82FYB\x89K\x0c\\Fk\xddsQu\xafl\xddW\xb8;\xb6\x1d\xfd\x03Q\xf2<\xe8\xd6\xe7]\xeb\xb9\xe8O\xddG\xcdF\xbd\xe0\xb5\xea\xb9\xf3\xc6Z\xda\xe1\xee<\xff\xc9\x08K\t\xd8\x04\x9e\x00\x9c\x00]\x06\xcf\x10H\x1e\xb8\'M(p\x1bu\x07\x0c\xf57\xe7\x82\xe1\xf5\xdf|\xe1\xa3\xe5Y\xe8\x89\xe8\xe6\xe1(\xd8\xee\xcf^\xcbY\xd0\xf5\xdb]\xeb\x1e\xfaJ\x00&\xff\xd4\xfc\xf1\xfb\x99\xfd\xd0\x00j\x04r\x0c\x9a\x17\xb4\x1e\x80\x1b\t\x10\x1e\x03w\xfa\x88\xf8\x95\xfaW\xfd\x92\xff\x9c\xff\x98\xf9\xa2\xee\x9f\xe3\xf3\xdc\xd0\xdbX\xde-\xe4\xca\xeda\xf7N\xfc\x9f\xf9\xb6\xf16\xec\x9f\xedP\xf5&\x01\xb6\x0bS\x12\xed\x13[\x0f\xc9\x07\xf6\xff\xac\xfb\x9c\xfc\xfb\xfe4\x01>\x01\xd2\xfe\xa2\xfb\x01\xf7t\xf2s\xec6\xe7\xff\xe5s\xe8B\xeew\xeb\xf5\xe6\xa2\xf3\xe4\x145>\xa1T>R\xbcG\x9eC$K\'V\xc3^\xc3c&d\x94`\xa2Q\x8f8\x11\x1b\x98\xff6\xea\xf3\xdaE\xd7z\xde\'\xe7\xde\xe6(\xdb\xb9\xca\xf3\xbfx\xbf\xd0\xc7i\xd7\xb5\xeb\xe8\x00!\x11l\x16\\\x13\xf6\x0b\x10\x06v\x04\x8f\x06\x02\x10\xab\x1a\x9d!\xf1\x1e\xea\x0f\xfe\xfdl\xed\x85\xe3\xc1\xe0\x91\xde\x16\xdfS\xe1\xab\xe3\x11\xe4`\xe0\x14\xd9\x81\xd1P\xd0\x84\xd6\xa9\xe2U\xef\x86\xf8\x96\xfdZ\xfe*\xfer\xff\x92\x03\x1c\t\x91\x0e\x85\x13\xb9\x15\x05\x14\xdf\x0e\xad\x07\x7f\xff\x1e\xf9\xfe\xf6\xe8\xf7\xab\xfb\x97\xfe\xbc\xfb\x08\xf4\xc5\xe9\xbf\xe1\xb8\xde\x01\xe1\xc7\xe7\xf1\xeeI\xf5S\xf9\x94\xf9\xda\xf8\xd4\xf6f\xf4\xbc\xf4\x82\xf7;\xff\xb4\t\xfb\x10\x86\x13\xc4\x0eM\x05*\xfeM\xfa\xa0\xfa\x0c\xfd\xb9\xfe%\xff|\xfc\xc0\xf8p\xf2\xdf\xeb\xcc\xe7\xdf\xe6\xe0\xed\x08\xf8\x1b\xfd\x8c\xf8\x80\xee\xbf\xecV\xff\xfe%9M,_\xd4Y\x03L7G\x90NyU\x1dU\xe2OkIqF~>\x82*\xcc\x10\x0b\xf5\xf4\xde\xce\xd3\x0e\xd3\x9a\xdb\xd9\xe5B\xe9\x01\xe3{\xd7b\xd0\t\xd3\xc7\xdd\x92\xeb\xaa\xf8Y\x03\x8f\x0co\x13s\x15l\x12\x89\x0b\xf3\x03O\xffJ\x01\x1f\t\xfd\x120\x16X\x0f\xbc\x00\xda\xedU\xe2n\xde[\xdf\xbe\xe4.\xe8)\xea)\xe9\xb3\xe3\xb6\xdeR\xdb<\xda>\xdc\xd7\xdfS\xe6\x9e\xf0\x1e\xfc\xf5\x05Q\n\x9f\x07\xd4\x01\x0f\xffy\x02J\n\x90\x11\xf4\x15\x1a\x15\x1b\x0f\x02\x07\x16\xfe\xfe\xf6\x02\xf3~\xf0\xb9\xef\xc3\xefk\xf0W\xf0\xab\xee\xff\xea\xcb\xe6\xfa\xe5.\xe8\x98\xee\xff\xf5\x16\xfdS\x02\xa1\x02\x1f\x01\x07\xfe^\xfbm\xfcT\xff\x15\x05\x19\x0bm\x0e\xa0\x0e\r\t\x08\x00W\xf7\xf6\xf0\x0f\xef\xca\xf0H\xf4\\\xf8\xb4\xfa\xe3\xfa\xff\xf7\xc1\xf2~\xed\xf1\xeb}\xf0\xb5\xf5\xc6\xf9\x10\xff\xfe\t\x8f!\xa7<\xaeP\x8bY\x97SKI\x9e@\xec<\xdeB\x8aK\xa1Q\xc4M\x19<\xb7#.\x0b\xf3\xf7\xd0\xea\x15\xe0\x0c\xd9\xc7\xd8\x86\xde\x8a\xe5\xed\xe8H\xe5+\xdeV\xd8\xa2\xd6\xf0\xda\xe7\xe5\xfd\xf55\x08\xd1\x15\xd1\x19,\x15;\rR\x08\xdf\x07\xd4\x08\xff\x08"\t[\nk\x0c\x10\x0bR\x03\n\xf7e\xe9\xef\xde[\xd8\'\xd6\x80\xd9\'\xe0\\\xe7\xce\xe9\xdc\xe4\x7f\xdd\x03\xd9\xe8\xdac\xe1\x97\xe8\xe7\xef\x98\xf8\xef\x02=\x0c\xcd\x10>\x0fQ\n|\x05\x1a\x03V\x03M\x06\x93\x0b/\x10\xa4\x10\x84\n)\xffQ\xf3\xfc\xebf\xea7\xed\xf3\xf1\x94\xf4\x84\xf5\xda\xf4\xec\xf1\xac\xee\x82\xeb\x81\xe9P\xeb\x06\xf0\x1b\xf7\x91\xff[\x05\xc9\x07\x1d\x06\x90\x00\x9a\xfc\xcd\xfb\x0f\x00\xb3\x07\xf6\r^\x11\x8a\x0eT\x07Y\xfe\xca\xf4\xbc\xf0\xc4\xf0\x9a\xf37\xf7\xbc\xf6\xbd\xf5\x8f\xf4%\xf2\xea\xee(\xec\x86\xec\xed\xee\xa6\xf1I\xf4}\xfc\xaf\x11z/\x1eMp^\xd4\\nP\xb1D\xc2?\xafB\x95H6M{N\xf7F\x905\xb8\x1d\xfe\x05\xbd\xf3Y\xe6\x17\xdc;\xd5\xd0\xd3\xc6\xd8\x1c\xe0\xd4\xe4\xdc\xe3Q\xde\xa5\xd8.\xd68\xd9\xb5\xe2\x92\xf2G\x03\x00\x0fh\x121\x0f\xfe\x0b\xe9\x0b\xe7\x0c\xd3\x0c\xe6\n1\t\xbc\n\xb7\r\xaf\x0el\x0bF\x03\xf2\xf8\xed\xed\xf7\xe3\x7f\xdd\xec\xdc\x05\xe2\xea\xe8?\xec)\xea@\xe3[\xdc\x98\xd9\x82\xda\xac\xdf\xf5\xe6@\xef\xa4\xf7\xd8\xfd\x82\x01\xf3\x03\x0c\x06\x89\x085\n"\t\xae\x06\xc9\x05\x86\x07\x9f\x0b\xda\x0e\xbd\r\x0f\t\x80\x01\xaa\xf9\xaa\xf3V\xef\xef\xeeu\xf1u\xf4I\xf6O\xf5\x8c\xf2\xb6\xf0z\xeff\xf0\xfb\xf2\x85\xf60\xfc^\x02$\x07\xca\t\xe4\x08E\x06\xc7\x04\xba\x04\xf4\x06\x1e\t\xa8\t\x17\x08!\x04z\xfe>\xf8\x07\xf4\xe2\xf1]\xf1\xaf\xf2\x0c\xf3\xe7\xf1b\xef\x9b\xea\xdc\xe9!\xeb\xae\xeb&\xea\xc6\xe6\x8c\xed\xa1\x026$\xf6G&]\x99`wUUE\x17<\xe4:\xccB\xa3NDW\xcdW\xbcJ|2k\x15h\xfa\xf4\xe6\xf2\xdc\x89\xd9T\xda\xf9\xdbD\xdd\xcb\xde,\xdfW\xdd~\xd8\xdb\xd1\xae\xcd\xfe\xcf}\xd9\x9d\xe8\x85\xf9\xc6\tG\x16\x1e\x1cN\x19\x0e\x0f*\x03\xe8\xfc\xf7\x00\x9c\x0c#\x19\x87\x1f\x15\x1d\xc3\x13\xc9\x05m\xf6\xab\xe8=\xdf\xbf\xdc\xe0\xdf\xb3\xe4\xe3\xe6\x17\xe4<\xdf\xe4\xdb%\xdb\xd7\xdb\x1e\xdd\x85\xde\xe0\xe0\x95\xe5\x83\xec\r\xf6\xd5\x00\x00\x0b\x9e\x11\x99\x12\x86\x0eN\x08I\x03\x94\x026\x07\xef\x0e\xba\x15\x80\x17\xec\x12&\t\xb0\xfd4\xf4\xb0\xee\xb3\xed4\xefh\xf1\n\xf3w\xf2A\xf0q\xee\x14\xee\xa9\xef\x82\xf2\xae\xf5K\xf9\xca\xfd\xe9\x02e\x08h\rY\x10\x9a\x11\xed\x10\xf3\r\x91\n\xb0\x06\x1b\x043\x03\x06\x02\xe7\xff\x7f\xfb\xb1\xf5.\xf0\xa3\xeb\t\xe9\x88\xe7\xa3\xe7\x03\xe8\xf6\xe6\xd4\xe3\x1b\xdev\xd8\xf1\xd7\xb5\xe1\xd5\xf8\xba\x18L8yOPXGUmJ\x11@\xe8=\xceEOU\x05b[dJY\x02D\x8f+\x0b\x14\xfd\xffc\xee.\xe0@\xd8\xd5\xd59\xd7\x8d\xd8O\xd8\xe9\xd6\x9f\xd4P\xd0\x0f\xca\xf8\xc4\xd5\xc6P\xd4w\xeaz\x01\xfa\x10\xe6\x16l\x16\xbc\x12\xac\x0e3\x0b\x8a\n/\x0f\x8e\x17\t [#\xce\x1e\x02\x153\t\x10\xfdZ\xf1\xc2\xe5\xdb\xdbO\xd6\xf6\xd5\x02\xd9\'\xdck\xdd#\xdd\x92\xdb\r\xd94\xd6e\xd57\xd9\x0e\xe2\xa9\xee&\xfb-\x05w\x0c\xe2\x11\xe1\x15\x06\x18\x9c\x17\x0c\x15\xe1\x11F\x10]\x11\x10\x14]\x16\xf1\x15\x0e\x12$\n\xed\xfe\xc4\xf2\xda\xe8\x8f\xe4_\xe6^\xec\xb9\xf2\xf9\xf5\xaa\xf4\x95\xf0.\xec_\xea\x0b\xedD\xf4\t\xff}\t\x14\x11-\x14-\x13\x17\x11\x12\x0f\xad\x0eL\x0f\xe3\x0e\xab\r\x0f\n\xcc\x04\x00\xffx\xf8\x8b\xf3k\xef\xec\xeb"\xe8\xc6\xe28\xde#\xdb{\xda\xab\xdc\xbd\xde\xb7\xe2.\xe5\xf9\xe3\xd5\xdf.\xdaN\xde\xe9\xf0\x0f\x11&7\x01U\\dgd|Y\x00M\x01D:C\xbeLFZ/e\x7fdfT\xdd8Q\x19\r\xff\xba\xed\x84\xe3\xe1\xddT\xda\xde\xd8,\xd9\xc2\xd9\x9f\xd9g\xd8~\xd6\x01\xd5\x9a\xd3\xe6\xd2\x10\xd6\xad\xdf\x1a\xf1\x04\x06D\x17p\x1f\xfb\x1d}\x16\x0e\x0ek\x08\xcc\x06\x1a\t\x01\x0e$\x13p\x157\x12\x9c\x08\xac\xfa;\xec\x03\xe0\x83\xd7\xc1\xd2@\xd1\xca\xd2\x9e\xd6\x07\xdb\xbb\xdd\xc8\xdd!\xdc{\xda\xdb\xda\xed\xdd\xff\xe3\x0c\xed\xfe\xf7A\x03\x82\x0c\x80\x12\x95\x15P\x173\x19S\x1b\x12\x1c\xfb\x1a\xa3\x18&\x16Q\x14J\x12\xeb\x0e\x9f\t\xf5\x02?\xfb\xcb\xf2\xc5\xea7\xe5*\xe4,\xe7L\xec\x89\xf0P\xf2\xc2\xf1\xbd\xf0\x0b\xf1:\xf3(\xf8\xb3\xff\xd5\x08]\x11\xf9\x16[\x18\x11\x16\xea\x11P\x0e\xf5\x0b\x86\n\xf1\x08\xe6\x05\xec\x01P\xfc\xfa\xf5^\xef\x04\xe9\x14\xe5\x91\xe2J\xe0\xbb\xddC\xda\x1d\xd9=\xd9\x9b\xda\xed\xdb\xbb\xdcl\xdeB\xde\x8e\xdc\xb6\xdc\xb7\xe5+\xfd\xb7\x1fSBSZ\xf5b\xb4`\x1dY\x02R\xf1NIQ\xa6Y\xa5b\x1bfb^NK:1d\x17\xb5\x02\xfc\xf3\x17\xea\xce\xe1\xfa\xdad\xd6O\xd4\xd3\xd4\x19\xd6H\xd7\xc6\xd7\x02\xd7}\xd5\x9d\xd4\x86\xd7\xc2\xe0o\xf0\xb5\x02\xda\x11l\x19\xfe\x18\x86\x13\x8b\rm\n\x9b\ne\x0c \x0eM\x0eC\x0cU\x07b\xff\x85\xf5\xcb\xeb\x80\xe3_\xdd\x1b\xd9&\xd6\xcf\xd4J\xd5\xa5\xd7O\xda\x8e\xdc\xea\xdd\x19\xdf\xc0\xe0\x08\xe3\xf4\xe6\xae\xec~\xf4\xb4\xfd\xce\x06\xb9\x0e\x13\x15\xf8\x19&\x1eM!0"\xa0 -\x1d\xb2\x19\xa2\x17h\x16r\x14;\x10\x89\t\xb8\x00\x80\xf6{\xec\xf5\xe4\x9b\xe2+\xe5\x8a\xea\xef\xefb\xf2I\xf2\xd1\xf1\x7f\xf2\x9c\xf5\xd8\xfa\xb7\x01\xe9\t\xaf\x10[\x15\xd0\x166\x15[\x13/\x11\xc3\x0f\xf9\r"\n\xb0\x05\xd1\xff$\xfa\xd8\xf4\x92\xeec\xe8+\xe2k\xdd\x12\xda2\xd7t\xd5\x18\xd4\xcb\xd3\x14\xd5?\xd7\x7f\xdc\xe1\xe2\xd9\xe8\xa1\xed\xfa\xef\xce\xf23\xf6#\xfa\x80\x01\xb5\x0f\x95\'SF\xc2`\xe7o\xaep\x1eh\x8b^\x80V2S\xe7R?SqR\xcbK`>\xb9*\xf3\x12!\xfc*\xe8R\xd9\xab\xcf\xfc\xc9\xab\xc8"\xcbV\xd0A\xd6\x03\xda\xb3\xda\x1b\xdav\xda\xf0\xdeJ\xe7\x19\xf2p\xfd\xed\x07p\x10l\x16\x16\x19n\x18\x91\x15\xf2\x11~\x0e\xca\t\xbc\x02\xee\xf9\xc8\xf2\xc6\xef\xf3\xef\xdb\xefX\xec\xb5\xe5B\xde\xf8\xd7\xad\xd38\xd1S\xd1\xa0\xd4\xd7\xda\xe2\xe1\x86\xe7%\xeb\x1d\xee\xa1\xf1\xa8\xf54\xfa\xfe\xfe\xe2\x04\x9e\x0c\x0b\x16Z\x1f\x11&N(Y&?!\xfc\x19\x87\x11\x02\tf\x02\x1e\xff\x13\xfe\xd0\xfc\xf7\xf8\x0f\xf3\xb6\xec\x7f\xe7\x99\xe4\x8d\xe4\x00\xe8:\xee\xb4\xf6\xe9\xff"\x07\x96\x0b\xa6\ro\x0e\xfb\x0e\xc2\x0f\xf4\x10}\x12\xb5\x13\xa1\x14\xff\x13.\x11\xa4\x0b\xc3\x03\xaa\xfb\xb3\xf3R\xed\xfa\xe8\xf1\xe5\xec\xe4\xf4\xe3\xe2\xe13\xde\xca\xd8\xcd\xd4\xb9\xd2\xe3\xd3\x19\xd8n\xdd(\xe4\xce\xea\xbb\xf0\xaf\xf5\x94\xf8%\xfb\xeb\xfdX\x01\x03\x06G\t\x03\n\xa6\x06\x90\x01N\x01g\x0c\xda#uA\xf1Y\xe9e\x03d\xffXNL5C9@(BvF\x87H\xe8C\xc76\xbd#{\x10\x87\x00\x85\xf2\xad\xe3e\xd45\xc9e\xc6\xb1\xcc\xa9\xd7\xa9\xe1W\xe7\xa4\xe7a\xe4(\xdfa\xdb\xb8\xdc\xbb\xe5<\xf5\xec\x05b\x11%\x15r\x13\x81\x10v\x0e\x9d\x0bj\x06\x8a\xffz\xf9V\xf6\x17\xf6u\xf7O\xf9e\xf97\xf6\xf2\xeex\xe4\x95\xda\x9b\xd4\x1f\xd5\x9e\xdb#\xe5\x07\xee\xa6\xf3\xde\xf4\xce\xf2\xe4\xef\x8c\xee\x1b\xf0\xd7\xf3\xd7\xf8\xe8\xfdv\x03\xd3\t\x1b\x10h\x15\xde\x17.\x17\xc4\x13T\x0e\xfb\x08\x06\x05\n\x04\x9b\x05B\x07\xcc\x07Q\x05[\x00\xb0\xfa$\xf5\xd4\xf1\xd3\xefY\xf0f\xf3c\xf7\xb1\xfc_\x01\x89\x05\x03\t\'\x0b\x9f\x0c\xcf\x0cD\x0cH\x0c\x9f\x0c-\r\xc5\x0c?\n\t\x06\xcb\x00\x1c\xfb\xd9\xf5\xad\xf0\xe5\xeb\x1a\xe8\x81\xe5\x11\xe41\xe3\xa0\xe2\x93\xe21\xe3\x9b\xe4\x16\xe6\xeb\xe7\xb3\xea&\xee/\xf3\xc5\xf7Z\xfb\t\xfe\xac\xff(\x01X\x02\xd7\x03\xdf\x06j\n\x08\r\xf9\r\x84\x0c\x18\nj\x05\xbd\xfe\xc5\xf8a\xfa\xea\x08?$;B\x85V7Z\xb8NF?\xa64y2\xea5\xba9|;X9S2\x05\'\xdb\x16{\x05\r\xf4a\xe3\xe9\xd4t\xc9?\xc6\xe9\xcb=\xd8\x0f\xe5@\xeb-\xe99\xe2R\xdb`\xdae\xe0\xf5\xeb\xa4\xfaD\t\x89\x15\x9c\x1d\xb7 \x06\x1f\xa3\x1a\xbb\x14.\x0e4\x07\xe2\x003\xfd\xd3\xfc\xb8\xfek\xff\xa6\xfbL\xf2\x1d\xe6\x0c\xdb\xce\xd3\x19\xd1\x8b\xd2\x0f\xd7|\xdd\xae\xe3i\xe8\x05\xebu\xec\x88\xedx\xee*\xef\xc9\xef%\xf2~\xf7>\x00\'\n\xee\x11/\x15+\x14\xd0\x10\xaa\r\xf4\x0b\xe3\x0b\x10\rD\x0ey\x0e\xd1\x0c\xa7\t\x17\x06\x9d\x02\\\xffL\xfb\xa3\xf6\xd2\xf2F\xf1\xc7\xf2\xa7\xf6s\xfb$\x00v\x03\xcd\x04\x0b\x05\x01\x05\xe8\x05=\x08\x9f\n\xa3\x0c}\r\xcc\x0c\xc1\n\x06\x07\x15\x02J\xfc\xfc\xf6\xa9\xf2\x89\xef\xe6\xed\x8e\xec\xa9\xeb7\xeb\x10\xeb)\xeb"\xeb\xe9\xea\xfd\xeb,\xee\xff\xf0\xff\xf3\x96\xf6,\xf9\xfb\xfbd\xfd*\xfeD\xff\x14\x01\x1a\x04\x06\x06\x83\x06\xbc\x05L\x03\xa3\x00\x16\xfeP\xfbR\xfa\xee\xf6b\xf0\xb5\xea\x9d\xe9\xfc\xf48\n\x0f ?0<5\xfc2a0\x151o5\xf79X=\xf9=\xa3=\x15;\x8a4o,Q!\x9a\x13\x10\x04%\xf4%\xea\xb1\xe8z\xed_\xf4\xe0\xf6\xfd\xf3\x0b\xee\x11\xe8T\xe5!\xe6H\xe9\xc7\xed`\xf2\xb5\xf7D\xfeb\x05\xd2\x0b\xa0\x0e\x16\r\xa8\x08\x05\x04\xb5\x01\x1c\x02\xcc\x03\xd1\x04\xbe\x03\xe5\xffi\xfat\xf4\xe4\xee\xef\xe9Z\xe4\x1e\xde6\xd9^\xd7<\xd9D\xdd\xc8\xe0;\xe2\xf5\xe1q\xe1\xbe\xe2>\xe6t\xeb\xa3\xf1\xb8\xf7\xcd\xfc\xbf\x00\xbc\x03\xe6\x06\xb9\n\xd0\rb\x0f\xf8\x0eq\x0eP\x0f_\x11<\x13\xdf\x12\x12\x10+\x0c\x0e\x08\xcb\x04Y\x01_\xfeA\xfc\x9a\xfa&\xfa\x12\xfa\x01\xfb3\xfd`\xff&\x01\xf1\x01i\x02\x13\x04\xb8\x06\xb1\t5\x0cH\rc\ry\x0c[\nE\x07\x7f\x03\xb5\xff\x8e\xfc\xe7\xf9\xd2\xf7\xc6\xf6\xc4\xf5\x91\xf4\x98\xf2\xc7\xef\xa1\xed\xfd\xebq\xeb\xbe\xeb\x1f\xec,\xed\x83\xef,\xf2\xe1\xf4N\xf6w\xf6\xa3\xf6W\xf7\xae\xf8A\xfa\xbc\xfb`\xfc\xf6\xfb\x13\xfa\xd8\xf6\xfe\xf3\xcf\xf31\xf6u\xfa8\xfe\x8e\xff\xc0\xff\xdf\xfeT\xfeI\xfe\x1b\xfe\xe5\xfd!\xfd2\xfd\x84\x00\xc4\t%\x18\x07(!4O:\x9e;\xf4:\xbc;\xdc=\xe0@rC\xb7DeD\xe9@\x84:\n1s%\x1f\x19\xfc\x0b(\x01\xb7\xf9\xaa\xf5G\xf4\xab\xf1t\xed\xf5\xe72\xe2\xa3\xdd\x1d\xdb\xd4\xda)\xdc\xae\xdeo\xe2\x0b\xe7e\xec\xe1\xf1>\xf6\xaa\xf9\x87\xfbk\xfcY\xfd\x8d\xfe\xd6\xff(\x01\xfb\x00l\xff\xb3\xfc-\xf9\x82\xf5\xa8\xf1\x8c\xed\x08\xea\xac\xe7U\xe6\xa5\xe5\x8d\xe4\x0f\xe3\\\xe1^\xe0z\xe0\xc5\xe1\xb7\xe3\xd9\xe5P\xe8t\xebJ\xef>\xf4U\xfaP\x01\xdc\x08e\x0f\xed\x14\xe1\x18\x87\x1b\xc8\x1c\x01\x1c\xd6\x19\x9c\x16\x80\x13p\x10U\r$\x0b\x87\x08i\x05\xb4\x01\x95\xfd\x8f\xfa\x07\xf9\xbe\xf8W\xf9\x93\xfa\xc7\xfb\xd3\xfc\xba\xfd\x01\xfe;\xfe=\xfe\xa6\xfdo\xfd|\xfdD\xfe\xf8\xff\x00\x01\xa1\x01\xfe\x00\xcd\xff\xcd\xfd\xef\xfb.\xfb\xfd\xf9\x97\xf8\x07\xf7s\xf5z\xf4\xb7\xf3a\xf2\x86\xf1\xb3\xf19\xf0\x8e\xed\xcd\xec\x01\xee"\xf1k\xf46\xf6\xde\xf7-\xfal\xfc\xf9\xfd!\x00#\x02\xb9\x02\x0f\x03^\x057\x08\x18\x0c\xb5\x10\xbd\x12\xff\x12\xbd\x11:\x0f\xfb\r\x86\r\x07\r\x85\rE\r\xcd\x0b\x15\n;\x08k\x06\x9c\x05<\x05\xcd\x04\x15\x04\xdd\x02g\x02K\x03\xf4\x05\xab\t\xdc\r\xff\x11I\x16r\x19\x1f\x1b\x0c\x1c\x81\x1c\xe6\x1c\xba\x1d5\x1f\xb6 \xdf!\xd6\x1f\x05\x1cS\x17:\x11\xc0\x0cc\t\xcd\x06\x02\x05o\x02\x90\xfe\xb9\xf9w\xf5\x98\xf1\xa0\xedO\xea\x0f\xe8\x06\xe7\xb7\xe6V\xe6Q\xe5\xab\xe4\x9f\xe4\xd7\xe4l\xe5V\xe6_\xe7\x89\xe9K\xec\xc4\xeeh\xf1(\xf3\x94\xf4;\xf55\xf5\x03\xf6\x95\xf7\xdd\xf8\x96\xf9\x90\xfa\x0f\xfb\xe0\xfb\xfe\xfcq\xfd\xf0\xfd\x06\xfe\xca\xfd\xad\xfdH\xfe\x81\xff\x99\x00\x16\x01\xdb\x00.\x00\x1e\x00Y\x01\xb0\x01|\x03@\x05\xec\x04C\x05{\x06\xec\x05]\x06\x19\x06\xda\x04\n\x05\x82\x05\xbb\x05>\x04\x92\x02\x8e\x02\r\x02\xfa\xff\xbb\xfep\xfd)\xfc\x0f\xfcR\xfb\xb9\xf8&\xf8\xdd\xfa*\xfb$\xf9\xf3\xf8,\xfa\x15\xfd\x00\xfe4\xfe\xe0\xfb\x8c\xfa/\xfc\xd5\xffA\x01Q\xffx\xfdp\xfd\x1c\xfe\x92\xfd\x93\xfeh\xfc\x92\xfa\xa8\xfb\xa2\xf9!\xfa\xaf\xfcQ\xfbN\xfd\x8d\xfe\x10\xfc\x0f\xfd)\xfd\x81\xfc\x12\xff[\x02\xc0\x04\xc2\x04G\x045\x05\xab\x07\xd6\x07\xa6\x06\xc3\x07\xc3\x07\x96\t\xb5\t\xcc\nH\x0b\xf4\x0b\xd7\r\'\r\t\x10\xca\x0c:\n\xbe\x0b\x0c\n\x0f\n\x8c\nM\x08\xee\x07\xf7\t\xc3\x07\x08\t\x7f\x07#\x02C\x02\x85\x02\xfa\x03!\x059\x08t\x04\x16\x01j\xfe\xac\xfe\x87\x02\xa9\x01\'\xff\xa7\xfci\xfb\xe7\xfb`\xff_\xfb>\xfc\x8b\xfb;\xfaa\xf9G\xf51\xf8\x14\xfa\x89\xfb\x13\xfa\xf7\xf8\xc1\xf9\xf1\xf9\x8d\xf9(\xfaS\xfa\xb6\xfa8\xfap\xfb\x96\xfd\x94\xfe\xed\xff5\xfe\xa8\xfc\xfc\xfc[\xfe;\x00\xe2\x00\x05\x01)\x02w\x01\xb9\x00\xbf\xff\x8b\x00\x1b\x01D\x02\x1b\x02Z\x04\xa2\x03\xe2\x02V\x04\xf7\x03*\x05C\x07\xe1\x04\x13\x01o\x06D\x07\x93\x01\xd9\x01l\x05\xbc\x03\x87\xfd\xe9\x00p\xfe\xd4\xfaV\xfe?\xfe\xe3\xfb\xb8\xfa3\xfd\xf5\xf75\xf6\xef\xf5\xb7\xf7\x8e\xf6\xfd\xf5x\xfb2\xf6(\xf3\x94\xf6B\xf64\xf5~\xf55\xf9\xc6\xf7B\xf6W\xfb\xfb\xf9\xc1\xfaZ\xfb\'\xfa\xda\xf7\xd2\xf7:\xf72\xfat\xfb\xa6\xfa\x0f\xf9\x1a\xfd\xb3\xfaI\xfa/\x00\xf4\xfd\xda\xff\xe9\xfd\xb5\x01V\x04z\x06\xdd\x05\x16\x06\xc9\x05\xfa\x07\x85\x0b\x1f\nq\x08\xd7\t\xf7\r\x9f\x0cL\n\x83\t\xe7\n&\x0c\xba\x06}\x04>\n\x8c\n\x15\x07\xc0\x068\x08&\x04\x06\x03\x95\x05\x1a\x05\xee\x04\xdc\x02\x84\x01\x06\x02\xec\x06\x0c\x05\x99\x02Q\x00\xd4\x00>\x000\xff\xed\xfe+\xfa%\xfd\x1d\xfd\xeb\xfb\xb7\xfb\x97\xfc\xb4\xf9$\xfdU\xfb\x08\xfay\xfb\x01\xf8\x16\xfbr\xf9\x18\xf8\xb1\xf7\x94\xfb\x81\xfc\x1d\xf5I\xf9\xb1\xfau\xfet\xfb\x9b\xf8\xcb\xff\xaf\xfd\x92\xfeU\xffh\x00\xca\x01\xec\x02~\x00\xc7\x02t\x06\xa9\x03\x1d\x03t\x04\x9d\x02H\x02%\x06\xcb\x0c.\x06\x99\x04F\x02\x1e\xff!\x08\x91\x0cY\x06\x06\x04A\x07\xd3\x03\x15\x06~\x03N\n\xf7\x02\x83\x03&\x08\x9d\x06\xae\x03R\xfd\xfa\x06W\x00\xdb\xfd\xdd\xfe\xb2\x00\xcf\xfd\x06\xfdT\xfe\xd0\xf8+\xfa\x15\xfb\xa7\xf6\x0c\xf7\xeb\xf8\xd8\xf7\xf5\xf5@\xf7\x12\xf9\x0f\xf6%\xf9\xa0\xf5\xf7\xf8b\xfbd\xf8\xe5\xfc)\x00f\xfd\x9f\xfd6\x00v\xfc\x92\x01\xc0\x03\xf3\xfd\xc1\xff!\xfe\xeb\xfdz\x02\x91\x02\xca\xfe\xf1\xfe\x01\x00\xe8\xff6\x06\xa4\x06\x9e\x02~\x01\xbb\x06W\x084\x06\xb2\x03\xee\x04\xb3\x065\x04q\x07\xc3\x046\x03q\tK\x05\xe3\x01\xce\x01\xb9\x00\xc2\x03\x9c\x03g\xff\x90\xffp\xff\xba\x02M\x01\x0c\x01\xbe\x02\xcb\xfeE\xfd\x8d\x00N\x00T\xfc\xe6\xfd\xc7\xfd\x14\xfe\xf8\xff~\xfe\x8e\xfet\xfdP\xfeX\xfa\x13\xf9\xa6\xfdZ\xfeB\xfe\xc2\xf87\xfb`\xfaE\x00\x85\xf8\xc2\xf6]\xfb\xdd\xf8\x14\xfc\xba\xf9\xec\xfa\xa0\xfd\xea\xfa|\xf9\x99\xff\xc9\xfc`\xfc\xa1\xf3\x19\xfd\t\x00\xac\x03\x85\x04.\xff\xbf\xfft\xfe\xe9\x06q\x00\xbc\x030\x06$\x06\xa9\x04\x9c\x07\xb6\tg\x05.\x05N\x03\xd8\x00\x92\x05\xa6\x04\xa5\x02\x87\x07\x80\x04\xcf\x02\xf6\xfe`\x04\xcd\x05\x0f\x00\xfc\x03\xc3\x00\xeb\xff\x8b\x04\xb1\x08&\x00\x03\xfdX\xfd#\xfe\xaa\x01\xe1\xfd\x19\x03\xc6\x00\xe4\xfc\xda\xfb\xdf\xf2\xf8\xf8=\xfd:\xfa\x96\xfdt\xf9 \xf9!\xf8p\xf8)\xf8\xa7\xf8\xa8\xf8\xf5\xf9`\xfc\x94\xfd\xd6\xfb\xd5\xfb\x81\xfc\x96\xfe\x8f\x02z\x01\xf0\x00*\x00\x12\x05\x89\xfa@\x02#\x02\x12\x01\xb8\x02\x97\x02O\x02}\xff\xcd\x01\x82\xf8>\x038\x02v\x03H\xffQ\x00\x8a\x04\x99\x04A\x02#\x01m\x042\x03s\x03\xeb\x03\x1e\x08\t\x05\xed\x02_\x01S\x06\x0c\x07\x9f\x04\x7f\x07F\x04\xf9\xff.\x03H\x06\x85\x05J\x06\xc2\x01\x16\xfe\xac\xfe4\x01\xb2\x01\x10\x00\xf1\xff8\xff\xb2\xfe\xee\xfd\x83\xfbO\xfb\'\xfb:\xfd\x83\xfc\x8b\xf9\x1d\xfbN\xffe\xfc\x13\xf9\x96\xfc<\xfb\xb3\xf9_\xf9\x82\xf9I\xfcH\x014\xfbm\xfa\xad\xfa\xcf\xfd\x9a\xfeK\xfb\xa4\xfe\x82\xfd.\xfe \xfb7\x00\x1c\x02\x00\x02\xd4\xfeE\xff\x91\x03`\xffr\x03R\x02/\x03\x1e\x06\xa4\x02\xb0\x05\x04\x08\xf5\x05\x0f\x005\x05\x9e\x08\x91\x05w\x06i\x05\x03\x03"\x07%\t\xf6\x05R\x04(\x02v\x00\x07\x02*\x03\xb9\x02\xf6\x01\x14\xfd\xf2\x00+\x00\\\xffT\xff\xd0\xfa\xd7\xf9!\xfc\xea\xfd\xa7\xfe\x93\xfc\xa4\xf9L\xfd\x88\x00P\xfa1\xfc\xf2\x02a\xf9|\xfa\xba\xfb\x95\xfa\xd8\xfdI\xfe\xf0\xfd\xbd\xfa5\xfaj\xfe\xe0\xfe\n\xfb\xfa\xf9\x9d\xfb\xbe\xfd\x9c\xff:\x02r\xfcd\xfc\xd7\x00\x16\xff\xc8\xff@\x01\xf6\xfe\t\x01\xdb\x01\x0c\x03\xde\x02?\x00\xc2\x04v\xfe(\x02e\x05\xdb\x01\xf7\xfe\x92\x03\x98\x030\x02U\x05\x01\x04\xa7\x02u\x02\xfc\x01\xca\x03\xd9\x07\xa6\x01\x82\x03\xce\x02\x05\x036\xff.\x04W\x02R\x01\n\x05M\x01Q\x03\xeb\xfeW\x03\xb9\x00\xbf\x01\x9c\x00u\xfdW\xff\xa5\x03a\x00\xab\xfd\xf5\xfb\xa1\xfb\xc6\xfdl\xfc\xf0\xfb\x82\xfcG\x00X\xfb\xae\xfaF\xfa\xa9\xfb\'\xfa!\xfb|\xfc\xe7\xfa\xe0\xfa\xc3\xfca\xfd\xb1\xfbE\xfb\xb1\xf9\x88\xfd\x00\xfe\xa4\xfd\xbf\x00\xaa\xff\xf1\xfd\x82\xff\x12\x01\xe6\xfd\xd9\x01`\x04`\xfe\x89\xffg\x01D\x04=\x02!\x05E\x05\xfa\x02Q\xff\x7f\xffN\x06\xc4\x07\xc7\x08W\x012\x04\xad\x00\xf1\x03\x11\x04H\x03\xc2\x02\x85\x02\x06\x03\xae\xfe\x00\x07\xb3\x00\x1e\xfc\xa9\xfd\x12\x01r\x02G\x02\x91\xfe\xf8\xfc\xa4\xfd\xea\xfc\xf1\x01\x95\xff\xf6\xfc\x17\xfd\x8d\xfcd\xff\xdc\xfcC\xfdS\xfc\xe3\xfc\xf2\xfb\x97\xfc\xb8\xfcG\xfe~\xfe\x16\xfd\xe4\x00s\xf8\xb2\xfab\xff\x16\xffE\x00c\x00!\xffR\xfa\xc4\xfb\xd3\x00_\xfe/\xffT\xfe.\xfd\x93\xffW\x00\xac\xfb2\xfd\x94\x01\x84\xfc\xab\x01}\x01\x9a\x01\xe4\xff>\x01\x8b\xff.\x03`\x03\xeb\x00\x8f\x03\xfb\x04^\x046\x03\x13\x05\xcf\x02\xd2\x03\x04\x02\xdf\x06U\x03\x93\x04\xa2\x01a\x01\xb5\x04\xc4\x04{\x02\xe8\x02#\x03\xd6\xfd2\x00 \x04I\x040\xff0\x02\xf2\xff\xf6\xfb\xec\xff\x1a\xff\x97\x01\xcd\x03\xd6\xfb\x00\xf91\xfeY\x01:\xfe"\xfb\xf9\xfaJ\xfdH\xfd[\xffo\xfc\xd6\xf9\xae\xfa\xa5\xfbq\xfe~\xfc\xcc\xfc\xa7\xfb\x9e\xfc\xca\xfe\x85\x03\x81\xfe0\xfb\x11\x00\x8a\x01\xc1\xff\xaf\xff\x9b\xfe\x8e\x02\xa0\xff\xf2\xff\xb7\x03\xc5\xff\xea\x00\xdf\x01\xa6\x00\xed\xfc\xad\x02\xdd\xfe\xb7\x04x\x04\xa6\x00\xe7\x00W\xffv\x03*\x05Y\x06\xcc\xff#\x01-\x04<\x01\xda\x00G\x02r\x06\xca\x019\xffa\x00R\x01\xb3\x04m\x008\x00\xba\xff\xd5\x01\xd7\x00m\xfe\xef\x00\x9a\x02\xc2\xff\x8f\xffv\xff\xcb\xfdK\xfd\xb0\xfe\xac\x01\xd7\xff\xaf\xff\xd2\xfa\xa2\xfd\xe1\xfd5\xff,\xfc\xc6\xfd\x16\xfe\xf0\xfe\xa4\x004\xff\x0c\xffw\xfa,\x01\x7f\xfc\x9b\xfc&\xff*\x00\x9d\xfe/\xff\x9d\xfeQ\xfd\x8d\xfe\x9d\xff\xb1\xfc\xfc\xfc\xd2\x00\xb8\x00\xae\x02\xb8\xfe\xea\xfe\xba\x01\xb9\x02\x8a\xff\x1b\xff\x93\xff\r\x04\xa4\x02P\x02\xb0\x01\xc1\x00\xe9\x00\xdc\xff\xbe\x04\xde\x021\x01%\x01\xaf\x01\x97\x01(\x03\xe8\x02\xa2\x00\xab\x00\xa7\xff\xe7\xfdB\x02&\x00\xa1\x00\x05\x02m\xff\xfd\x01\x9f\x00\xfb\xfe\x00\x01\xe3\xfe\n\xfe4\x00\xc4\x01\xfa\x01\xd7\xff\xd7\xfe4\xff,\xfe\xae\xfc|\xfe\xe6\xfe\xa1\xffO\xfee\xfd|\xfe9\xfc\xe5\xfdV\xffT\xfeP\xfcL\xfc\xce\xfeX\xfd\x85\xfeW\xffE\xff\x14\x01\xe6\xfc8\x00\xd0\xfd\x15\xff\xdd\x04\x01\x00\xd8\x00\xa9\xfe7\xff_\x00\xec\x02\x97\x00\xab\xfe\x18\x04j\x00\xa5\xfd.\x01\x8f\x01\xf8\xff\xba\xfd\xda\x00>\x02\x18\x00\xa7\x02\xba\x01\xb3\xff{\xffj\xffS\x007\x00i\x01F\x03\r\x00\xdc\xfd\x17\xff\x1c\xff\x9a\x01W\x02\xa0\xfe/\x01\x0c\x01+\xfd\xd2\xff\x14\x00\xc3\xfe\xc1\xfft\x02\x10\x05\xd6\xfe\x98\xff=\xfe\x80\xfc!\x01\x99\x00\xf2\xff\xb0\x01>\x01-\x00\xb1\xfe\xe1\xfc\xb8\x01y\xfe\x80\xfd\xc8\xfe0\xff\xea\x00x\xfff\xff\xa8\x00~\xfe\x1e\xfd\x84\xfc\xe8\xfd\xd0\x01\xa7\xff\xd6\xfe`\xff\xb2\xfd\xfe\xff*\x00\x00\x00\x1f\x00`\x00\xb2\xff2\x01\xc4\x01\x85\x00A\x01]\x00\xac\x01\x9f\xffA\x02\xb1\x00\xcc\x01"\x03\x86\x00\xbe\xff\xc0\x01$\x01Z\xff:\xff+\x03$\x01\xb6\x03\t\x02\xa0\xfc\xb0\x01s\xfe\x11\x03<\x00\x1f\xffS\xff!\x01\x02\x03\xba\xffg\xfe\xb4\xfe\xa9\xff\xae\xfdN\xfd\xb7\xffx\x03G\xff\xc8\xfe\xf7\xff$\xffY\xff\x1b\xfe\xc0\x00\x9e\xfe\x7f\x00i\x00d\xff\xf6\x00\xd2\xfd\x86\x00\xd3\xfd\xb8\x00\x0f\xff\xdf\xfe\xc5\xfdf\x03N\x00\xdb\xf9\x06\x03\x19\xfe\xb2\xff\x17\x04l\xfdc\xfd\x98\x03w\x00\x04\x00\xdb\x01q\xffZ\x01\xcc\xff\x8a\x01\xa4\t\x94\xfe\xb6\xfb\r\x00<\x05\xab\x02\xbb\x01H\x04\x0e\xfdP\xfc<\xfd\xa7\xff\xa3\x02\xc1\xfe>\xfd\xfc\xfe\x8c\xfb\xaf\x01r\x03\xef\x018\xfcv\x00\x80\xfc\xd0\xff\xfe\x06\xcf\xff\x19\x01\xc7\xff4\x03n\x025\x00=\xfc5\xfd\xd2\xfb\x1a\x015\x00\x14\xfee\xfe\x10\xff\xb9\x01\x0c\x01M\x01O\xfd\xc0\xfb%\xfd\x9a\xfe5\xfe\xd9\xfe\xf7\xff\x0f\x00V\xff\x85\x00\xb3\xfb\x1e\xfd^\x02e\x02"\x00X\xfaC\xffb\x02s\x01\xca\x021\xffw\xff\xc3\x00B\x02\x01\x01_\x00\xe8\x01\xdf\xff\xb8\xffP\x03\x9f\x03\x9d\x02\xd9\xfc\x1d\xfd\xeb\x01\x1a\x05\x88\x00\x1b\x03N\x07$\xfc\x02\x01\x90\xfd\x97\xfd\x0e\x03N\x04\xc2\x038\xfeu\x00T\x05!\x02\xca\xfb-\xfe\xe5\xff!\xfd\x93\xfe\xf1\x035\x00\xe2\xfc\xcb\xfd\x0f\x017\xfd{\xfa\xb6\xfd\x18\xfe\xe7\xff\x82\xfe\x8b\xfe\xb2\xfc8\xfd\xd7\x00\xbb\x00\x96\xfe%\xfd\xd9\xfa\xe8\xfd?\x01\xbd\xff\xd6\x02\xcf\x00\xb0\xfb\x8f\xff\x8e\x00\xc7\xfd\xbe\xfe-\x00\x9d\xff{\x00~\x04[\x00\x03\xff\xdc\x023\x00\x00\x00-\x01{\x01\xf0\xff\t\xfe\x14\x04\xda\x04\xeb\x010\x01\xa3\xfdd\xfc\x15\xffJ\x01\xc3\xffP\x01[\x03\x91\x03\x1c\x02q\x01\xeb\xfc\xba\xf9\xa6\xfcI\xff\xbe\x01\xc6\xfe$\xff`\x05\xc3\x02\xfb\xff#\x03\xd2\xfb\xf3\xf9\x1b\x00\\\xfck\x01\xd6\x06\x8b\x029\xffS\x01b\xfd\x15\xfe6\x00\xd5\xfc\t\x00O\xffT\xfc\x8d\xfed\xff\xd7\xfct\x00a\xff\x94\x01C\x00\xcc\xf92\xfb\x9f\xfd\xf7\xfe\xa4\xff\xef\x00\xc7\xfd\xf4\xfe\x87\x01~\x01\x17\xfc\x08\xfd\x9b\xffX\xfd\xca\xfe\xcb\xff \x00h\x02\x7f\x04\xcd\x02!\x01i\xfcq\xfe.\x01\xf6\xfe\x16\x02 \x04\xb7\x02x\x05P\x05E\x03\xd9\x01y\x01\xea\xfe\xd5\x00x\x02\xf1\x01\xb9\x03\x1c\x01p\x01R\xff\xb5\x02\xea\x05r\x00(\xfeg\x01\x92\x02J\x00\x1e\xfc\xd9\xfce\xfc\x1f\xff\xe0\x01\x19\x02 \x03z\x012\xffM\xfc\xee\xfa\xce\xfb\xbe\xfd\x0f\xfe\xe9\x00\xd1\x01\xb1\x01\xec\x00\x0f\x01R\x00}\xfc\xca\xf9\x85\xfb\x95\x00\x9c\x00\xec\x00\xea\xfdf\xff1\x00\xec\x00\x04\x02\xe3\xfd"\xfe\xe7\xfez\x03\xe7\x03\x14\x01%\x00\xbf\xfcr\xfc\x99\xff\xe7\xffd\xfek\xff!\x02;\x02\xf6\xff\xb3\xff6\x005\x01\xc1\xfd\xdb\xfd\xec\xff\xe9\x01m\x02\xf3\x03\x96\x05\x0b\x03\\\xffS\xfd\x15\xff\xce\xfeM\xfe\xa3\x000\x03\xad\x01I\x02\n\x01|\x00\xf2\x01\x1b\xff\xc4\xff?\xff\xb0\x00\xe3\x00\x1d\xfed\xfe\x8d\x00\xf1\xffE\xfe\x8e\x00\xc4\xfd\xa6\xfd\x95\xff/\x01\xf8\x01u\xff\x07\xfeY\xfc:\xfd\xb6\xff\x87\x03i\x00r\xfc\xf6\xfb\xa6\xfd\xc4\x00m\x01\x81\xff\xfe\xfcB\xf9\xa2\xfb\x86\x01\x1f\x02:\x017\xff\xda\xfd\xc4\xfe\x03\x04c\x03]\xff\xd5\xffM\xff\xee\xfe\x1f\x017\x03-\x02\x8e\x00\xb3\xfe*\xfe\x03\xff\x9b\x00\x0f\x01<\x00\xb4\xfe\xce\xfe\\\xff\x02\x01;\x01S\x00\x9e\x01\x82\x01V\xffC\x00\x11\x021\x01\x16\x01\xff\xffW\xffU\xff\x81\x02B\x02\xe6\xfe\x9a\xfe3\xff\x86\xfes\xfeP\x00z\x00T\xffX\xff\xc0\x000\x00\x8e\x01\xa5\x01\xd7\xff=\xfec\xff\xd3\xffi\xffd\xff\xed\xfd>\xfe\xd3\xfe\n\xff~\xfeU\xfe(\xfe\xf1\xfe\xd6\xfey\xfe\xa6\xff\xa9\xff\r\x00^\x00\xed\x00P\x01q\x01_\x01\xdf\xff\xa5\xff\x83\xff5\x00\xbe\x00\xa7\x00L\x01\x1c\x01\r\x00(\xffh\x00A\x01\x7f\x01\x08\x01\xa9\xff\x8d\x00c\x01\x00\x01\x9e\x00\xf6\xff_\x008\x01P\x01\x11\x010\x01\xec\x00!\x00\x82\x00\x1c\x01V\x017\x01\x13\x01\xc1\x00\x9c\x00\xbc\x00\x03\x01\xa7\x006\x00O\x00\\\x00@\x00h\xff\xdd\xff\xf8\xff/\xffK\xfe\x15\xfe\xb9\xfe\x14\xffM\xff-\xff9\xfeC\xfe\x85\xff\xba\xffc\xffF\xff\xde\xfe)\xff\xa3\xff\xcf\xff5\x00\xcf\xff\xaa\xfe7\xfd\x1b\xfd\x02\xfd\x10\xfd\'\xfd\xf8\xfc\xde\xfd\x19\xfe\x11\xfeW\xfdN\xfd\xe2\xfe\xc3\xfe[\xfd\xb5\xfc!\xfdv\xfe\xbc\xfea\xfe*\xfe#\xfe\x17\xff\t\xff\xbd\xfe\xfe\xff\x05\x00m\xfe\xb2\xfd\xb7\xfdm\xfe\x9c\xfe\xdc\xfdO\xfd\xc2\xfd\xc8\xfe\xf8\xfe\xbb\xff\x80\x00\xfe\xff\x1b\x009\x00C\x00\xac\xff\xcc\xfe\xfb\xfc6\xfa\xcc\xf8\x95\xf7\x05\xf7\xb3\xf9M\x01`\x0c;\x17Y"\x13,\x8c2u5\xb73\x080\x06*|"Z\x1a\xf8\x11\x94\t\x9b\x01p\xfb\n\xf7\x1f\xf3H\xf0\x0c\xed\xdb\xe9\xf7\xe7\xeb\xe5\xf4\xe56\xe6\xcc\xe6\xc0\xe7W\xe9\xa0\xed\x15\xf3\xaf\xf8J\xfe\xff\x02\xd5\x05;\x08\xb8\t\xd4\tM\x08\x17\x05\x12\x00q\xfb\x96\xf7\xcf\xf4c\xf3|\xf18\xf0\x1d\xf0V\xf0\xa4\xf1\xad\xf2x\xf3U\xf4\xc0\xf4\x87\xf5W\xf6\xa8\xf7&\xf8\x9c\xf8\xb9\xf9\x0b\xfb!\xfd3\xfe\r\xff\xe6\x00\xa1\x02\x9c\x05\xa6\x07\xc0\x08\xfe\t\x87\t\x1c\t\x96\x08e\x08\xd1\n%\x0fQ\x12Z\x15t\x16\x9c\x16\xd8\x15\x0f\x12\xe7\x0bx\x03\xa1\xfb\x9e\xf4&\xef\x86\xea\xb4\xe7\x02\xe6f\xe5&\xe8\xa5\xec\xa2\xf1\xa1\xf6\xe3\xfaT\xfe\xa6\x01\xa4\x04d\x06\xc4\x06\xc9\x05\x9e\x03<\x01H\xfe\x97\xfc\x81\xfa\xcc\xf7\x15\xf6O\xf4\xff\xf2d\xf3\xa2\xf3\xef\xf2\xb3\xf3\xb9\xf4r\xf7\xa5\xfa\x89\xfdi\xff\xec\xff\x15\xffO\xfd\x9c\xfb\x1f\xfa4\xf9%\xf8\xe9\xf7\x9e\xf7\x81\xf8J\xfb\xa5\xff@\x04U\x08m\t\x81\t\xf3\t\x18\n\x9d\t\x18\x05x\x01j\xffW\xfc:\xf93\xf6\x00\xf6n\xf7Q\xfa=\x05V!\x8bJ"i\x0bp;gHd\x0fk\x88gOR\xe40A\x12\xc9\xfa\xda\xe6/\xd7 \xce\xb0\xcci\xcco\xc7\xb4\xc3\x07\xcc\xa7\xdd\x8c\xe9\xd2\xe5\xd9\xdcT\xde\xa7\xecc\xfb\xb1\x00\xe5\xff]\x02\x80\x0b\x04\x15\xbf\x1b\x07\x1e\xfe\x1b\xea\x14;\x07\x85\xf8\xaa\xf0\x88\xeb\x1c\xe4\xa5\xd67\xcb\xef\xc9C\xd0\x9e\xd9\x1c\xdfQ\xe0\x85\xe2\xce\xe7V\xee\x98\xf3\x1a\xf9\xd1\xfe\xdc\x00\xaa\x00\xcc\x02S\nt\x13\x94\x18\xb2\x17>\x13\xa7\x0f\x07\x0e\xb9\n|\x03\xb4\xf9\x8b\xef\x07\xe8\x15\xe5\xa0\xe7J\xed\xe5\xf2\xb7\xf7\xbb\xfdn\x06T\x10E\x18\x06\x1b\x8c\x18\xc1\x13\xa9\x0f\x0e\r\xfb\nx\x07w\x02\x92\xfe\x96\xfd1\x00q\x04d\x07@\x08=\x07\xf7\x054\x06\xc0\x05\xa9\x04X\x01a\xfdy\xfb\x19\xfc\x00\x00\xf4\x03{\x06\xd8\x07\x94\x08-\x0b\xe7\r\x1b\x0e\xcd\n\xe0\x03\x85\xfd\x92\xf8\xac\xf4\xf8\xf1K\xef%\xee\x9c\xed_\xeel\xf1\xa6\xf6\xd6\xfb\xd5\xfe\x98\x00\xd3\x022\x06\x91\t\xed\ni\n5\t\xb3\x08\xcf\x08\xb9\x07\x08\x05\x1a\x01o\xfc\x96\xf7\xb4\xf3 \xf1\x0e\xef \xee\xb3\xedg\xeeZ\xf13\xf6\xb4\xfb\x95\xff(\x02(\x04\x84\x05\x85\x060\x06\x9b\x04\x12\x02\xe9\xffI\xfe\'\xfd\x94\xfc\x14\xfcc\xfb\x0e\xfa\x00\xf9#\xf9M\xfa\xbb\xfan\xfa\x04\xfaX\xfa\x8e\xfb\xc5\xfc\x99\xfd\xec\xfd\x06\xff\xa6\x00x\x02n\x04\xd2\x05+\x06\x10\x05(\x03\xf8\x01n\x01\xd4\xffD\xfc{\xf6\xc9\xee\xbd\xea\xea\xeez\xfb\\\x0b\xd8\x17\r#{7GU\x13i\xfdd\xc5M\x898\x102\x97-\x04\x1d\x92\x015\xe9-\xdf\xee\xdfH\xe0\xf0\xdem\xdf\xf5\xe0W\xe1\xab\xe0\x81\xe3\x7f\xeb>\xf3\x9e\xf5\x0c\xf5*\xfa\xec\x08t\x19\tL\x0e}\x11\x9f\x10\xad\n\x0f\x01)\xf7\x8e\xefH\xeb\xb4\xe9P\xe9/\xea\xd3\xec\xe9\xf2\xdf\xfa>\x01\xea\x03s\x02r\xff\x8b\xfd\xf1\xfc&\xfdi\xfc\'\xfb\xd5\xfa\xd5\xfc\xe5\x00T\x04\xad\x04\x08\x02\x0c\xfe\x86\xfa\x13\xf8\xf2\xf5s\xf3h\xf0\xc8\xee\x13\xf1\xb2\xf6\xe6\xfd!\x03\xab\x05\xef\x06\xda\x07j\t\xd4\t\x9d\x071\x04\x91\x00\x04\xff\x03\xff\xad\xff\xe9\xffS\xff\x89\xfe/\xffW\x01_\x03\x13\x04\x13\x02\xb0\xff\xf7\xfe\xb9\x00o\x03\x1c\x05\xe3\x05\xf3\x06\xfb\x08\x03\n\xfe\x08\x96\x06a\x04\xb7\x02\xe2\x01\xb1\x00\x87\xfe\x88\xfa\x14\xf5\xf1\xf2\x10\xf9\x1a\x07\xc7\x16\xb5%}9JS\xdegDi\xfaV{?0.\xbb\x1e\xb2\x08\xcc\xebZ\xd2f\xc65\xc6\xdd\xc9\xdd\xcd\xae\xd4A\xe0G\xeba\xf1\x06\xf4c\xf8C\xff!\x04\x83\x03\x91\x01\xd7\x04D\r\x8e\x13\xd4\x11\xad\t\x7f\x00\x08\xf8:\xee\xff\xe1\xce\xd6\xc5\xcf\xb1\xcc\x85\xcc\x8c\xd1\x10\xdd\\\xed\xe5\xfc6\x08`\x0f\x90\x14\x15\x19\x1e\x1a!\x14\xf4\x07\xff\xf9\x9f\xef`\xeb\xd6\xebg\xec\x8e\xea\xd6\xe8\x17\xeb\x81\xf1\x89\xf8\x99\xfb`\xf9\xb3\xf5\xc6\xf5\xf8\xfa\xfe\x01V\x07\xd4\n$\x0e\x80\x13\\\x1a\t \x0b!\x83\x1ba\x11\x96\x06\x0b\xfe3\xf8o\xf2\xcd\xeb*\xe6\x00\xe5\x8c\xea/\xf5\xfa\xff\xea\x06\x99\n\xb0\x0e\xbe\x14\xb6\x19$\x1a\xe0\x15\xf2\x0fa\x0c\xfb\x0b\xee\x0c\x16\x0c\x89\x08\x0c\x04\x1c\x00\xc4\xfc\xb1\xf9\xe2\xf5q\xf1$\xed\x17\xeb7\xed9\xf3\\\xfa\xe9\xff3\x03\xa6\x06\xcf\x0b:\x11\xa3\x12\xea\rI\x05\xdd\xfd\x1c\xfa\x80\xf8\x99\xf5n\xf14\xef2\xf2\x82\xf9y\x01\xcc\x06\x07\t<\tb\x08\x95\x06Y\x03#\xfe\xb5\xf7s\xf1s\xed\xa0\xed\xbd\xf1Z\xf7.\xfck\xff/\x02%\x05\x80\x07W\x07\xac\x03\x97\xfd\x8e\xf8\x92\xf6\xc2\xf7j\xf9N\xfa%\xfbR\xfd,\x01\x03\x05\xce\x06u\x05\x0c\x02\x04\xff\x9e\xfd\x93\xfd\x9e\xfd\xbb\xfd"\xfek\xff\x15\x02@\x05 \x07\xbd\x06\x14\x04\x99\x00\x13\xfe@\xfd \xfd\xa4\xfc\xb7\xfbj\xfb\x03\xfd\xf5\x00\xa5\x05}\x08f\x08\x84\x06\xe3\x04\x10\x04\x8d\x03a\x02\x82\xff\x90\xfc\xa6\xfb\xe0\xfc!\xff\x11\x00G\xff\xeb\xfd\xf0\xfc\x87\xfd\xfd\xfe7\x00\x04\x00\xde\xfe\x86\xfe\x8a\xff\xc4\x01j\x03\xdf\x03\x1d\x03\xcd\x01\xe3\x00q\x00R\xff\xf9\xfc/\xfa\x8c\xf8\x94\xf9\xad\xfdW\x02S\x043\x03\x1f\x01\xd4\x00\xa8\x03#\x06\xb0\x03w\xfb?\xf3\xe5\xf5\x01\tn$q8\xcd<\xe6:%A\xa9L\xd0J\xe50\xbf\t|\xec\x15\xe3\x9c\xe4\xab\xe3;\xdbg\xd4\xfa\xd7\x15\xe6\x16\xf5;\xfcX\xfa9\xf3\xd2\xed+\xee\xdf\xf3\x89\xfb\x8c\x009\x02f\x03\xf6\x07/\x0e\xb7\x0f\xa8\x07o\xf7c\xe6\xeb\xda\x8c\xd6\xa1\xd7A\xdbc\xe1\xa7\xe9\x86\xf3n\x00\x1b\x0fB\x1a\xa0\x1b\xe3\x12\xef\x06\x08\xff\xab\xfbB\xf8#\xf1\x99\xe8\xef\xe5\xa3\xeb\x0e\xf5\x1c\xfbH\xfb\xd5\xf8\x8d\xf7\x8b\xf8g\xfa\x90\xfbb\xfc\xfc\xfeL\x04\x10\x0c\xa8\x153\x1e\xb6!\xa7\x1e\xe4\x17P\x11X\x0bi\x03\xab\xf8\xc0\xed\n\xe72\xe7\xdb\xec\xcd\xf3\xac\xf9D\xff\n\x06\x18\r\xdb\x11:\x13[\x11\x9f\rg\t0\x06P\x05,\x06\xef\x06~\x06n\x05w\x04\xf0\x03\xba\x02x\xff\x1f\xfa@\xf5\xbc\xf3\x03\xf6\xf5\xf9\xd5\xfc9\xfe\x1c\x00J\x03\xb0\x06e\x07\xd1\x04\xc8\x00\xd8\xfdV\xfd(\xfe\x7f\xfep\xfd\xd9\xfb\xa2\xfb\x05\xfd\xa0\xff\xd3\x01\xac\x02k\x02\x16\x02\xb0\x02\xcb\x03\xea\x03H\x02\x84\xff\xdb\xfdo\xfe\xed\x00\x1d\x03_\x030\x02\xde\x00,\x00\x80\xff\xac\xfdq\xfaK\xf6\r\xf33\xf2\x12\xf4\xdc\xf6\xe3\xf8\xcd\xf9)\xfb3\xfes\x02\x8b\x05a\x05=\x02\xd3\xfe)\xfd\x9e\xfdW\xfe]\xfe\xa7\xfd@\xfdq\xfe9\x00\x0f\x01\xad\xffU\xfc\x04\xf9\xc2\xf7\x10\xf9B\xfb\x85\xfcq\xfcj\xfc\xe1\xfd\x91\x00\x99\x02b\x02\xcc\xff\xfa\xfc\x12\xfc\x87\xfd\xf6\xff\xe9\x00>\xff]\xfd\x84\xfd\x96\xff\xd8\x01\xc4\x01\xea\xff.\xfe\xec\xfd\x11\xff?\x00\x18\x00f\xfeA\xfc-\xfbA\xfc\x1a\xff\xce\x00\xe9\xff\x9c\xfd\x8d\xfcP\xfd\xc6\xfd\xc1\xfbE\xf7\x18\xf2;\xeei\xed-\xf6:\x0f\xd53\xd7R\x84\\KUJN\x9bK\x94A\xaa\'\xfe\x07\xe7\xf40\xf4\xf8\xfa\x9b\xfb\xdc\xf4d\xeeq\xee\xa6\xefr\xeb\x80\xe2\xc1\xdac\xda\xb1\xe0\xeb\xebi\xfay\t\xf5\x14\xb3\x17\x1d\x13C\x0br\x02\xf2\xf7M\xea[\xdc\t\xd4\xcf\xd4S\xdd3\xe8A\xf1\xeb\xf5\xe3\xf7o\xfbi\x023\x08\xa3\x06\x0c\xff\x1a\xf84\xf8y\xfe[\x04\x98\x05\x83\x010\xfc\x01\xf85\xf4x\xef\xe3\xe8\xff\xe1\xb1\xdd\xe8\xdeb\xe6\xb9\xf1f\xfdx\x06;\r\xfc\x12\xfe\x17\xbf\x1ad\x18?\x11\xcb\x08n\x03\xfa\x01\x01\x02\x07\x01\xbe\xfe\x86\xfd\x91\xfe\xf7\x00\x12\x01v\xfd\xbe\xf8\xef\xf6\xf0\xf9G\xff\xb9\x04(\nn\x10B\x169\x19\xe9\x18\xde\x16h\x14\xa6\x10:\x0b\xa2\x05[\x02\\\x02\x0f\x03\x1e\x02,\x00\xd3\xff\xaa\x01\xbc\x02\x81\x00\xf8\xfb\x0c\xf9\x98\xf9\xa2\xfbG\xfc\x1d\xfc\xb3\xfd\n\x02\xe3\x06\x92\tz\t\xac\x07v\x04\r\x008\xfb\x97\xf7\x86\xf5\x1d\xf4\xfd\xf2I\xf3\xec\xf5\x0b\xfa{\xfcv\xfbg\xf8\xf4\xf6f\xf8\x1d\xfb$\xfc\xc8\xfb\x17\xfd\x8f\x01\xc4\x06C\t}\x07n\x03r\xffh\xfc\xa4\xf9\xb9\xf66\xf4[\xf3\xea\xf4\x82\xf8\xd7\xfc\x90\x00\n\x02,\x01\x06\xff\xaf\xfd\xb5\xfd/\xfe\xbb\xfd\xa3\xfc\x89\xfc\x0b\xfeY\x00\xe2\x01\x8f\x01\xd3\xff\xd0\xfd\xa3\xfcZ\xfc>\xfc\xb5\xfbE\xfb\xe0\xfb/\xfeo\x01d\x04\xa7\x05L\x05F\x04x\x03\x10\x03r\x02Z\x01\x0f\x00\x8a\xff\x03\x00%\x01=\x02g\x02<\x015\xff\x81\xfdL\xfds\xfe\xb8\xff\xcb\xffJ\xfe\xb7\xfcV\xfd\x9d\xff\xa3\x00G\xfe=\xfa\x0b\xf9#\xfc\x08\x01.\x04\xdc\x03\x8a\x02y\x02\xeb\x05~\x0b~\x0f\x7f\x0f\n\r\xba\x0c\xb0\x11\x19\x1al%\xb31\xc7=\x81E\xbfB\x995 #\xed\x11\xde\x04^\xfa\x94\xf2\xe0\xec\x0e\xe8\x07\xe5\xcd\xe3G\xe3\xb1\xe0f\xde#\xe0\xb7\xe5\xee\xec\xbb\xf3\xf0\xfb\xd9\x04\xa1\n\xb6\x0be\t\xf6\x04U\xfd_\xf2\x1b\xe8\x08\xe3\xe2\xe3\xbc\xe6\x06\xe7o\xe6\x14\xeaj\xf3\x86\xfa\xb7\xf7\x97\xef\x12\xee\x8e\xf6L\xff\x1f\x01k\x00\x1b\x04\x97\t\xf7\x08@\x01\xb9\xf8\x8d\xf3T\xef\x7f\xe9\xd7\xe4\x89\xe5\x0c\xec\xf6\xf3\xd2\xf8\xbf\xfa\xa1\xfd=\x03\x97\x08\xed\x08\xd7\x04\xc3\x02Z\x07J\x0f\xe8\x13\xc8\x12Y\x0f\x1d\rT\x0b\\\x07\xc2\x00\n\xfa\xa7\xf5M\xf4\xaf\xf5r\xf9c\xfe\xdf\x01\xdf\x02\xdd\x02)\x05_\n\x98\x0f\xa3\x11\x9f\x10k\x10Q\x13\xd9\x16\x8e\x16\xcb\x10\x0c\x08\x99\x00\x99\xfd\x1c\xfe~\xfe\xd0\xfc\x08\xfb\x94\xfcz\x01%\x05O\x03\x1a\xfd\x9a\xf8[\xfb\xf1\x03s\x0c\xcd\x0fU\x0e\xc9\x0b\xdb\t4\x07=\x01H\xf8\r\xf0%\xec\x18\xee\t\xf4\xf5\xfa\x82\xffv\x00g\xfe\x08\xfcr\xfb\xff\xfbP\xfc\x8a\xfbz\xfb\x16\xfen\x02\xc5\x056\x05c\x00\xed\xf9\xe5\xf4?\xf3\x14\xf4o\xf5C\xf7e\xfaL\xff\x0b\x04~\x06n\x05\x80\x01\xd8\xfc\x90\xf9\x8c\xf8)\xf9Q\xfa\xcf\xfb\xe0\xfcK\xfd|\xfc\x93\xfa\\\xf8s\xf6\xed\xf5\x87\xf7$\xfb\n\x00\x80\x04\xb0\x07}\t\xc3\t\xc0\x089\x06\n\x03\xd5\x00I\x00\xb7\x00\\\x004\xff/\xfeN\xfeU\xfe;\xfd\x89\xfb\xae\xfa;\xfd\x96\x01\x96\x05\xb5\x07-\x08\xa6\x08\n\t\xaf\x07Y\x04F\x00\x82\xfd\x82\xfc\xfa\xfc\x9f\xfdq\xfd\xc4\xfc\xc4\xfb\x15\xfc,\xfd\xbd\xfd\xca\xfe\x00\x02\x93\x08\x00\x0e\xf5\x0e\xc9\x0b\xc0\x08\xb0\t\x80\r{\x0e\x03\t\x9e\x07i\x19\xac:\x18QSF\x1a$B\t\x82\x06A\x12R\x17\xf0\x0f(\x05>\x01U\x04\x1c\x060\xfe\xeb\xebP\xd8\xc0\xcd\x0e\xd13\xdeU\xee\x8d\xfa\xfc\xfe\x94\xfcu\xf7\xf5\xf32\xf2\xa5\xf0\xa5\xef\xd9\xf0z\xf6\x81\xff+\x07^\x08U\x02q\xf7C\xeao\xdeb\xdb\x80\xe5\x87\xf5\x01\xfe-\xfbi\xf5\xae\xf4W\xf7*\xf6\xab\xf0\xb3\xec\xbd\xef\xe7\xf9\x86\x04\x9a\t5\x08\x84\x02\x80\xfb$\xf6\x8c\xf3\xfa\xf3s\xf5\xf6\xf7\xb9\xfbG\x00\x17\x05\x9e\x08\xc0\x08\xa6\x03\xa1\xfc\xd9\xf9j\xfe#\x06L\ng\t\x92\x07\x8a\x07S\x07\xe9\x04\xde\x00\x9e\xfd\x03\xfc\xc9\xfb\x9e\xfd\xdb\x01&\x06\x9a\x07=\x06x\x04\t\x05\x83\x07\x80\n\x88\x0c-\r4\x0eC\x10M\x12\xcf\x11\r\r\xf7\x07\xd4\x05\x9b\x06\xd3\x05[\x01\x02\xfe\xbd\xfe\xd6\x01C\x02\x1e\xfe\xe7\xf8\xbf\xf5\xae\xf4\r\xf5\xb1\xf5]\xf7\x85\xf9\xd8\xfa\xdb\xfb\x91\xfc[\xfd\xd2\xfcd\xfb\x9b\xfa!\xfb\xae\xfc\x1d\xfe`\xff\xe2\xff\x86\xffT\xfes\xfc\x94\xfa"\xf94\xf9^\xfa\xd0\xfb\xae\xfc&\xfd\xe5\xfdC\xfe`\xfe\xb6\xfd\x0e\xfd\xb7\xfc\xd8\xfcn\xfd\xfc\xfd\x9c\xfe\xec\xfe\xd5\xfe\x1c\xfe:\xfd\x9b\xfc\x17\xfc\x8a\xfb\xb4\xfb\xab\xfcE\xfe#\xff\xbe\xfe\x0f\xfe\x85\xfd\x9f\xfd\xe1\xfd}\xfe\xdc\xff.\x02=\x04\xbb\x04\\\x03\xe7\x01\x91\x01\xc9\x02\xc8\x04t\x06\x85\x07\xa3\x07w\x06Q\x04p\x01\xd4\xff$\xff\'\xfe\xd0\xfc\xfa\xfc\xa0\xff\x18\x03\xe3\x02\x00\xff%\xfb\x92\xf9\xe2\xfbw\xff(\x03\x01\x07\xd0\x06\xd6\x02\x83\xfb\x0b\xfc\x14\r\xd2\'V8\xce1\xf7!\x9c\x1bs"\xec*\x7f+l(\xa5\'\xb2(\xdb&\x17\x1e?\x10\x8c\x03\xcb\xf9\xd0\xf1p\xeb(\xea\xe3\xec\x04\xec\x87\xe2\xca\xd8\xae\xd8\x0b\xdf\x1b\xe2+\xe0\x05\xe2s\xeb]\xf4F\xf7\xa1\xf6.\xf8\r\xfc\'\xfe\xb0\xff\x94\x03\x00\n\xef\x0bJ\x07\xc9\x01\xbc\x00\xc1\x01\xed\xfe\xb9\xf8\xca\xf3\x97\xf2A\xf3.\xf26\xee,\xe9\xec\xe5\x12\xe5\xf3\xe4N\xe5A\xe7\x82\xea+\xedt\xee1\xf0\xcc\xf3\x7f\xf8\x1e\xfc\xf5\xfd\xe5\xff\x00\x04*\t\xc8\x0ca\r\x9c\r\x0b\x0f\x8a\x10(\x10\x88\x0e\xa6\r\x95\x0c\xe3\t\xaf\x06\x94\x05\xa1\x06\xfa\x05\n\x03g\xffz\xfd{\xfc\x95\xfbL\xfc\xf6\xfd\xb3\xff\x89\x01\x95\x03\x1e\x05\xa1\x04*\x04\xc8\x06\xb1\n\x14\x0e~\x119\x15o\x16\xb5\x11h\n\xbf\x06<\x07\x89\x08\x1b\x08~\x06\xa0\x03\x1b\xff\x03\xfa#\xf6S\xf3J\xf1\x8f\xf0\x95\xf1\xdb\xf2\xd2\xf2\xd3\xf1\xb5\xf0b\xf07\xf1\x8e\xf3b\xf7\xff\xfa\x02\xfd\xe4\xfd\x0f\xfe"\xfe/\xfe\x16\xff,\x01\xd8\x02;\x034\x02\xa9\x00\xdd\xfe\x01\xfd\xad\xfbg\xfb\x7f\xfb3\xfbX\xfa\xf2\xf8\x88\xf7\x1c\xf6\x1f\xf5\xd2\xf43\xf5\x86\xf6\xa5\xf8P\xfa\xb2\xfaa\xfad\xfaT\xfb\xf0\xfb#\xfd\x1d\x00\xba\x04n\x07\xc9\x06;\x04\x05\x028\x02\x15\x046\x07\xfb\t\xb0\n\xad\t\xab\x05g\x00z\xfdI\x00\x80\x06\x14\t\xb4\x05I\x00s\xfeD\x01\xd7\x05\x94\t"\tq\x05\xdc\x01\x8a\x03\x15\t]\x0e\xc6\x14\xaa \x03,M*m\x1b&\x11\xc2\x16\x93#\xa9+\xcb-\xda-\xca\'\x85\x1a\xfb\r\xdc\x08\xac\x08\xaf\x07C\x05?\x02\x14\xfd\x1f\xf4>\xe9Y\xe0\x13\xdb%\xda\xce\xdcL\xe0\xd6\xe27\xe1\x8f\xdcU\xd8-\xd7a\xdbP\xe45\xefh\xf7\x9f\xf8\xc6\xf5\x98\xf5h\xf9\xca\xfe\xdb\x03Y\n/\x10\xb2\x10J\x0c8\x07[\x05f\x05z\x05W\x05\xb5\x04\x02\x03E\xfem\xf6\xb0\xee\x8d\xeb\xe8\xec\xc1\xee\xf8\xed/\xec\x07\xeb\xc1\xe9\xd5\xe7\xb8\xe7M\xeb\xf3\xf0\x84\xf5\xe2\xf8(\xfc\x83\xff\xbb\x01\x88\x03\x07\x06<\n\xcc\x0e\x8e\x12\xef\x14\xbd\x15V\x14G\x12+\x11`\x12S\x14\x82\x14\x8f\x11H\x0c\x97\x08\xc8\tD\x0e\xaa\x0f\xc9\n\xa7\x03\x0b\x01\x9e\x02\x86\x03\n\x03\xb7\x03t\x04\x1c\x02p\xfdc\xfb\xa3\xfc\xba\xfda\xfd\x1f\xfd\x9d\xfd\xac\xfd\\\xfc7\xfb\xf9\xf9$\xf9j\xf9\xeb\xfa\x1e\xfc\x9a\xfb\x87\xf9S\xf7+\xf6\xbf\xf6\x9d\xf8~\xfaE\xfbi\xfa\xd6\xf8\xd1\xf7\xd4\xf8\xb4\xfa>\xfc\x18\xfd\xf2\xfd]\xffo\xff\xeb\xfdA\xfcg\xfc2\xfe\xaa\xff\xd4\xff\xfe\xfe\xb0\xfd\x15\xfc\xef\xf9<\xf9\x15\xfa_\xfb-\xfc\x93\xfb\x0f\xfb\x0f\xfa\xce\xf8\x91\xf8\x89\xf96\xfb\xea\xfc!\xfe\xc6\xfe\r\xff\xe9\xfe\xc2\xff\xd5\x00q\x01\xfe\x02n\x06\x9b\x08\x01\x08r\x05\x13\x05\xb5\x07&\t\xc8\t\xf9\nT\x0c\xa6\x0b/\x08S\x06\xaa\x07\xa0\n\xe0\x0cr\rm\x0c\xba\nF\n(\x0c\xeb\x10\x9a\x18C\x1f\xd3\x1fB\x18\xe8\x0f\x11\x10\xfd\x17) \xc3!\xb0\x1e\x88\x1a}\x15\x9c\x0f\xeb\n\x99\t\xca\n\x91\t\x93\x05\xcf\x00\xc4\xfb\xda\xf5\x0e\xef\xdd\xea\n\xea\x08\xea\x83\xe9\xf5\xe7\xfb\xe4\x02\xe1T\xddw\xdcX\xde\x81\xe1w\xe5\xaa\xe8?\xe9G\xe7\x97\xe6G\xe9\xb5\xed\x8e\xf1\\\xf5\xfa\xf9k\xfc\x91\xfb\x13\xfa\x9f\xfaR\xfd\xcb\xff\x9e\x02\xdc\x052\x075\x05`\x01\xc5\xfeW\xfeX\xff=\x01\x9b\x02v\x01\xc3\xfd\r\xfa>\xf8\xcf\xf7\xa2\xf7\xcf\xf7\xf4\xf8\xca\xf9\x02\xf9\x0e\xf7/\xf6o\xf7/\xf9v\xfb\xa1\xfdI\x00\x9c\x01\x8b\x02\x92\x04\xa0\x07\xb1\t\xf9\t$\x0b(\x0fL\x14[\x17A\x17\xf8\x13h\x10\xb3\x0fh\x13\xcb\x16\x8b\x15\x02\x11D\r!\x0b\xf3\x07\x9f\x04\xb4\x03\x0e\x04\x98\x02\xee\xfe\xef\xfb\xa5\xf9\xc2\xf6{\xf4\xb1\xf4\xef\xf5q\xf5\x1c\xf4l\xf3\t\xf3\xcf\xf1\xed\xf0z\xf2\xfd\xf4^\xf6\xf4\xf6\xf2\xf7\xfb\xf8%\xf9K\xf9\x7f\xfaI\xfc|\xfd6\xfe\xf9\xfe9\xff\xb5\xfec\xfe\xe3\xfe\xeb\xffu\x00Z\x00 \x00\xdc\xff0\xffO\xfe\x8d\xfdy\xfd\x1e\xfe\x9d\xfeT\xfe`\xfd\xf4\xfc\x86\xfd)\xfeb\xfe\xe4\xfe\xc8\xff\x9f\x00\xa7\x00\x98\x00e\x01\'\x02b\x02(\x02+\x03\x9b\x04U\x04;\x03\x82\x03\x9e\x05\xc5\x06\xda\x05\xd1\x04x\x05\xdb\x06\x0e\x07\xaa\x05k\x04\xa1\x04a\x06\xa9\x07\xc5\x07\x9f\x08\xca\x0bJ\x0fb\x0f\\\r\x94\r\x01\x11\xa2\x13\xb3\x14F\x15\xe1\x15\x8e\x14\x9f\x12\x04\x13\xb8\x14\x94\x14.\x12U\x10\x0f\x0e[\nn\x07\x18\x07a\x07,\x04}\xff?\xfc\xb2\xf9!\xf6&\xf3\x9b\xf3\x12\xf4\xf3\xf0\x07\xec\r\xea\x1e\xebJ\xebp\xea\xc6\xea\x04\xec\xed\xeb\x14\xeb\x07\xec\x8a\xee\x10\xf0\xb8\xf08\xf2g\xf4<\xf5\xe7\xf4\xc4\xf5U\xf8U\xfa\xeb\xfa\x8a\xfb\x96\xfc\xcb\xfc\xf3\xfb!\xfc\xa5\xfe\x1a\x01T\x01\xe6\xff\xbe\xfew\xfe\xd4\xfe\x8e\xff|\x00\x8f\x00H\xff\x89\xfd\xd5\xfcV\xfd\xf2\xfd\x9e\xfd\xa5\xfc\xc0\xfb\xc4\xfb\x11\xfcm\xfc\x98\xfc\xc5\xfc"\xfd\xba\xfd\xed\xfe\x98\x00\'\x02\xf8\x02\x84\x03\xde\x04\xa0\x06v\x08\xb9\t\xb5\n5\x0b6\x0bg\x0b\xe9\x0b\x94\x0ce\x0c\xec\x0bx\x0b\xd0\n\xbe\t\x1d\x08\xfb\x06\x0f\x06\\\x05\x98\x04|\x03>\x02\xaf\x005\xff\xea\xfd\xee\xfcR\xfc\xcd\xfb/\xfb<\xfa\x02\xf9\xa8\xf7\xe9\xf6\xfc\xf6\\\xf7G\xf7\xb6\xf6\x1f\xf6\xc9\xf5\x94\xf5\xf5\xf5\x00\xf7\xfb\xf78\xf8\xf7\xf7\xd5\xf7\x19\xf8\x99\xf8p\xf9\x85\xfa\\\xfb\x8d\xfb\xa4\xfb(\xfc\x0b\xfd~\xfd\x83\xfd\xdf\xfd\x9e\xfeR\xff\xe4\xff\xb4\x00|\x01h\x01\xe2\x00\xdd\x00\x83\x01\x87\x01\xed\x00\r\x01=\x02?\x03\xb0\x02\x8c\x01\x88\x01\x0e\x02J\x02Q\x02\x7f\x02\xa7\x02#\x02\xd9\x01\xc8\x02A\x04\xe1\x04\\\x04W\x04\xe1\x04\x94\x05\'\x06\xa4\x07s\n\xcc\x0cx\rw\r\x80\x0e\xc0\x10\xab\x12\xfd\x13E\x15$\x16\xf4\x15h\x15\xfe\x15\x1c\x17\xeb\x16\x0f\x15F\x13?\x12\xca\x10i\x0e\xbe\x0b\xb7\t\x16\x07\xbf\x03\x82\x00\x03\xfe\xca\xfb\xb1\xf8H\xf5\xc6\xf25\xf1\xd2\xef\xf2\xed\x06\xec\x8e\xea\x9d\xe9A\xe9\x8e\xe9\x1f\xeau\xeae\xea\xa7\xea\x8a\xeb\xee\xecO\xee\xb5\xef#\xf1k\xf2U\xf39\xf4\xac\xf5\x86\xf7%\xf9\'\xfa\xe3\xfa\xe1\xfb\xd7\xfc\xce\xfd\xdd\xfe\xd9\xff;\x00\x0e\x003\x00\xe7\x00\x8a\x01\xae\x01s\x01O\x01J\x01b\x01k\x01_\x014\x01)\x011\x01{\x01\xdc\x01\'\x02\t\x02\xad\x01\xca\x01\xd9\x02R\x04%\x05\x19\x05\x8a\x04\n\x04J\x04T\x05\xac\x06E\x07\x0b\x07\x8a\x06C\x06\x02\x06\x10\x06\xbb\x06h\x07@\x07A\x06w\x052\x05\xcb\x04J\x04%\x04\x0f\x04\x1e\x03\xaf\x01\xb9\x00R\x00\x9f\xff\x88\xfe\xc0\xfdT\xfd\x8f\xfcx\xfb\xc2\xfah\xfa\xeb\xf9k\xf9\x1c\xf9\xe2\xf8k\xf8\x06\xf8\x04\xf8\x13\xf8\xe3\xf7\xc5\xf7\x0e\xf8\xa6\xf84\xf9\x89\xf9\xa9\xf9\xb7\xf9\xc4\xf9[\xfa\x9a\xfb\xf9\xfc\x90\xfd`\xfd\xfc\xfc;\xfd\x1c\xfe$\xff\xde\xff"\x00\x1c\x00!\x00a\x00\xe8\x00q\x01\xcc\x01\xb2\x01\x9c\x01\xe4\x01u\x02\xe7\x02\x07\x03\x14\x03!\x03\xfb\x02\xb7\x02\xc6\x021\x03\x81\x03\x9c\x03\x81\x03Z\x03\x0e\x03\xf1\x02.\x03{\x03\xe3\x03\xe4\x04\xcf\x06\xc8\x08\x8d\t\xa4\t$\nx\x0b<\re\x0f\xc9\x11p\x13\xba\x13\x9e\x13\xfa\x13\x97\x14w\x14*\x14`\x14N\x14\x0b\x13\xd7\x10\xd9\x0e\x01\r\x87\n\x18\x089\x06I\x04I\x01\xe3\xfd%\xfb\x06\xf9\x8c\xf6\xfa\xf3\xed\xf1S\xf0\xa4\xee\x01\xed\x01\xec_\xeb\xae\xeaE\xeaz\xea/\xeb\xa0\xeb\x07\xec\xc6\xec\xc4\xed\xc7\xee\xed\xefm\xf1\x12\xf3O\xf4I\xf5I\xf6a\xf7i\xf8s\xf9\xac\xfa\xf3\xfb\xcf\xfc5\xfd~\xfd\x0b\xfe\xa7\xfe\n\xff6\xff\x92\xff\x17\x00]\x00\x0e\x00\xab\xff\xb0\xff\xcd\xff\xa2\xffj\xff\xcc\xffg\x00_\x00\xf4\xff\xf0\xffq\x00\xb6\x00\xcf\x00|\x01d\x02\xaf\x02\x9e\x024\x03\x1f\x040\x04\xd4\x03-\x04\x1c\x05b\x05\x1c\x05F\x05\xc7\x05\x95\x05\xe1\x04\xe0\x04i\x05~\x05\xfd\x04\xdb\x04I\x05R\x05\xf6\x04\xd3\x04\xb3\x04(\x04\x86\x03\x84\x03\xb1\x03+\x03%\x02"\x01d\x00\x9a\xff\xdf\xfe;\xfex\xfdk\xfcb\xfb\x86\xfa\xed\xf9l\xf9\x02\xf9Y\xf8\x96\xf7"\xf7+\xf7`\xf7l\xf7U\xf7@\xf7@\xf7\x86\xf7!\xf8\xfa\xf8\x90\xf9\x1e\xfa\xb0\xfa(\xfbU\xfb\xc1\xfb\xf8\xfcb\xfe\'\xff=\xffB\xfft\xff\xb4\xffe\x00\x82\x01a\x02\xf7\x01\xfa\x00\xe9\x00%\x02o\x03\xe9\x03\xe9\x03\xa8\x03\x16\x03\xb2\x02w\x03\x1c\x05:\x06\x1f\x06e\x05\x1e\x05~\x05+\x06\xf2\x06\xb5\x07\xde\x07\x88\x07\xb0\x07}\th\x0c\xae\x0e\x07\x0f\x0e\x0e\xce\r\x83\x0f\x90\x12m\x15\x01\x17!\x176\x16\x0f\x15\xc8\x14\xea\x15W\x17M\x17\x0e\x15\x99\x11\x85\x0e~\x0c/\x0b\xd7\t\x93\x07\xda\x03\xfd\xfe\xab\xfa\xbb\xf7\xe1\xf5\xfc\xf3\x80\xf1\xa9\xeey\xeb\xa0\xe8\x12\xe7\xc4\xe6\xd9\xe6C\xe6`\xe5\xee\xe41\xe5W\xe6J\xe8U\xea\xad\xebN\xec \xed\xff\xee\xcb\xf1\xe9\xf4\x9d\xf7\x90\xf9^\xfa\xc6\xfa\xce\xfb\xe2\xfd\x90\x00\xad\x02\xb6\x03\xbd\x033\x03\xef\x02G\x03\x1b\x04\xa4\x04\xad\x04J\x04Y\x03g\x02\x02\x02;\x02\x90\x02.\x02\x80\x01\x97\x00\x0b\x00\xcd\x00\xac\x02E\x04\xa8\x03\xb0\x01x\x00!\x01\xe3\x02\x8e\x04a\x05\x02\x05y\x03\xc2\x01X\x01\x1b\x02\x82\x03\x1b\x04\xc1\x03\xa9\x02\\\x01\x97\x00\x8a\x00\x16\x01\xc8\x01\xef\x01\xa2\x01D\x01\x18\x01\xe9\x00K\x00\xe0\xff\xd8\xff&\x00N\x00_\x00p\x00\xcd\xffY\xfe\xc2\xfc\'\xfc{\xfc\xde\xfc\x10\xfd\xe8\xfc<\xfc\xdc\xfa;\xf9Q\xf8D\xf8\xa3\xf8\xff\xf8V\xf9d\xf9\xb4\xf8\xb1\xf7\x0c\xf7\x17\xf7\x93\xf7O\xf80\xf9\xf9\xf93\xfa\x03\xfa\xed\xf9C\xfa?\xfbp\xfce\xfd\xb1\xfdZ\xfdW\xfd\x0c\xfe\x18\xff\xf6\xff\x80\x00\xa9\x00\x91\x00/\x00P\x00\xfa\x00\xb3\x01\xfd\x01\xc5\x01\xee\x01\x14\x03\xae\x04\xa1\x05C\x05M\x04Y\x040\x06\x85\nz\x10\xac\x15.\x17\x03\x15\xdd\x12W\x14s\x19\xe6\x1fw%\xca(\x06(\xc2#i\x1f\xa9\x1e\x87!\xe0#\\#\xee\x1f:\x1b\x94\x15\xe4\x0fI\x0b\xde\x07\xb1\x04\xb3\xff\xb2\xf9\x0c\xf4\xb5\xef^\xec"\xe8;\xe3\x80\xde\x00\xdb)\xd9\xb9\xd8\x9f\xd9\x82\xda&\xdad\xd89\xd7\xaa\xd8\x90\xdc\xc3\xe1~\xe6\xd3\xe9V\xeb\x8a\xec\'\xef\x9b\xf3\xdf\xf8\x03\xfe\'\x02-\x04|\x04Q\x05Y\x08\x02\x0c\x0c\x0e6\x0e\x0f\x0e\xe9\r+\r\xb6\x0c,\r\x1e\r;\x0b\xf6\x07T\x05\x13\x04\xd5\x02$\x02\xb1\x00\xa2\xfe\xf4\xfb\xfd\xf9_\xf9Q\xf9\x92\xf9\xfb\xf9\x99\xf9\xe1\xf7\xe1\xf5G\xf6\x8d\xf9\x17\xfe0\x01\xd2\x01:\x00J\xfeh\xfe\x93\x00~\x04\xc0\x07\xff\t\x9c\t\xa0\x07!\x06\x80\x06C\x08\xb8\x08G\x08l\x07\xb7\x06\xe1\x05|\x05\xf0\x05l\x05\xe3\x02v\xffU\xfd\x1b\xfdj\xfd\x90\xfdu\xfd\xc1\xfb\xf3\xf8\xe4\xf5\xb4\xf4\xc3\xf4\xe1\xf4\x14\xf5<\xf5\xdc\xf4\xc2\xf3\xe5\xf2\r\xf3s\xf3d\xf3e\xf3:\xf4\xa0\xf5\xba\xf6\xc8\xf7A\xf8`\xf8\xed\xf7%\xf8k\xf9*\xfb\x18\xfd\xad\xfe\x8a\xff=\xff;\xfeg\xfe\\\xffX\x00\xde\x00J\x01\x1f\x02\xcc\x02\xd8\x03\xb0\x04\xf3\x04\xd5\x04\xd3\x04;\x05\xa0\x07\x82\x0ex\x19\xcf!\x89#\xcc!\xe5!\xbc#\xd6%\xf9+\xfd6\xb8?\x1b?\x958\xf33-1\xc3,.(\xe7%\xfd"/\x1b\xa5\x11\x8f\n\xd1\x04\xd2\xfc\x07\xf3\xb7\xe9\x16\xe0\xc3\xd6a\xd0/\xcei\xcez\xcda\xcb\\\xc8\x1e\xc5F\xc4O\xc7G\xcd|\xd3G\xd9\x8a\xdf\xe9\xe5\x19\xec9\xf2\x00\xf9\x16\xffV\x032\x06{\t\xdb\r\xda\x12\x14\x17\xac\x19\xb8\x19\xb2\x16!\x12\x1e\x0e\x9b\x0b\r\n)\x08\xf7\x05\xe2\x02V\xfe\xe9\xf8r\xf4t\xf1\x01\xefO\xecx\xea$\xea\x17\xeb\xe7\xeb\xa2\xed\x95\xefS\xf1=\xf2\x12\xf3\xa4\xf5\xc1\xf98\xff\xcd\x044\t\x11\x0cb\r\xed\r7\x0ec\x0f5\x11\xf2\x12\xf3\x13\xb3\x13A\x13\x05\x12P\x10\xd3\r\x1b\x0b\xd6\x07\x8f\x041\x02\xa7\x00\x1e\x00\x08\xff<\xfd=\xfa\x11\xf7?\xf48\xf2F\xf1%\xf1\xa9\xf1:\xf2\xa5\xf2\x04\xf3\x13\xf3\r\xf3\xe6\xf2\x8c\xf2\x94\xf2\xb2\xf2\x0f\xf4\xba\xf5\x1b\xf7\xbe\xf7\xf1\xf7\xa7\xf7T\xf6\xf6\xf4\xf1\xf4\xf3\xf5\x8b\xf6\x19\xf7\xaf\xf75\xf8\xe6\xf6\xa4\xf6\x9b\xf7e\xf8\xb6\xf7~\xf6J\xf7\xb8\xf8\xde\xfa\x05\xfe6\x01\xab\x02@\x02\xe6\x02\x94\x05\xa5\x08\t\r\xf3\x11\xde\x18B =)w4\x97<\xd7?\t? @\xe8BGD\tFMH\x18JkEz;J2T*\n"\xb2\x15G\t\xd2\xfe\xc3\xf4,\xeaF\xe0\xf1\xdaf\xd62\xcf\xda\xc5[\xbf\xaf\xbcQ\xba\x16\xba\xe2\xbd\xff\xc3\xb3\xc8\x08\xcdW\xd4\x94\xdd\xae\xe5\xb0\xed\x05\xf6\x06\xfd\xb8\x01\x08\x07%\x0eR\x14_\x19C\x1dK\x1f\xfc\x1d;\x1a1\x17\xc7\x13>\x0f\x07\n/\x04a\xfd\x82\xf6x\xf1\xa8\xed\xfa\xe8\x84\xe4\xf5\xe0U\xdd\x9a\xd9:\xd8\xc6\xda)\xde\x8c\xe1\xac\xe5\xd6\xea\x8a\xf0\xba\xf5\xf0\xfc=\x04\x19\x0b\x97\x10G\x15\xdd\x18\xd1\x1d\xf9"K\'N)6)B(\xbf$\xbb!\x91\x1e\x92\x1b%\x17\xd6\x11\xac\x0c\x11\x07\x95\x01n\xfdr\xf9\xb8\xf5#\xf2\xad\xee\xbf\xebM\xe9*\xe9\x19\xea0\xeah\xeav\xebE\xed\x1f\xef\x84\xf0Y\xf3-\xf5=\xf6\xc2\xf6A\xf7\x9d\xf8\x18\xf9\x1a\xf9\x8b\xf8\xf0\xf6\xd8\xf5\xb6\xf4S\xf3\xa4\xf1\x10\xef\x1a\xee\xf9\xec\xf5\xeb\xe4\xea8\xea\xf8\xea\x03\xea\xf4\xe94\xec\x8a\xf0\x82\xf3\xd9\xf2\xaa\xf3\x8c\xf6\x0c\xfa\xba\xfc\xad\x02\xe5\n\xac\x10\xb7\x14w\x1a!"\xa5(\xc80%=7H;NLR\xf4VUY\xafV[T\xf6R\xcfN\x94F`=\x165\xac+#!p\x153\x08=\xfa\xbd\xec"\xe0p\xd5\x00\xcde\xc7\xa1\xc1o\xbc\x80\xb9\x94\xb9X\xbb@\xbe\xef\xc2\xb5\xc7\x9b\xcde\xd4\xe1\xdc\x14\xe6\x0f\xf0\x16\xfb\xae\x03\xc3\t\x8a\x0f\xa9\x15\xc1\x1a\xdb\x1b\xc5\x1b\xf1\x1a\t\x18J\x13\xf6\r\xc2\t{\x04m\xfd\xde\xf5\x9b\xee(\xe8+\xe2\x96\xdc\x04\xd8e\xd4\x9b\xd1\xc8\xcf\x1e\xd0\xb5\xd2\xed\xd6,\xdbr\xe0\xdb\xe6\x98\xedd\xf5$\xfd<\x05\xde\x0b]\x12 \x18\x13\x1d\xc7"M*\t0\xc51\xef0\xd00\xc6.\x86)\x9a%;"k\x1d\xf4\x14L\r\x14\t\x10\x04\xdd\xff&\xfc\xc2\xf8C\xf4\x94\xee4\xec\x00\xea6\xe9Q\xe9\x1f\xe9\xa7\xe9n\xe9b\xec\x9c\xf0\t\xf4\xec\xf6T\xf88\xfa\x97\xf9;\xf9\xc9\xf9D\xfa\xb6\xf9\xd7\xf6\xb2\xf4v\xf3?\xf2\x82\xf0\x7f\xee\xfb\xec8\xea]\xe7\xe6\xe4\x87\xe3.\xe3\x98\xe2\x8b\xe3z\xe3E\xe5\xfa\xe7\xe0\xeb\xa2\xefX\xf2\xa5\xf6\xc7\xf9\xd8\xfc\\\xffK\x04P\n\xf8\x0fX\x17\xf6\x1f\x8a(\xd80B\xbb\x90\xb7\x8a\xb7N\xba\xf3\xbev\xc6m\xcf\xdb\xd7\xc4\xe0\xe8\xea\xb6\xf4\xcc\xfc\xb5\x04\xff\x0c\x0b\x13P\x16\x96\x19o\x1d\x12\x1f\xcb\x1dI\x1b\x9c\x17\x87\x11\xab\t\xd3\x01a\xf9\x1f\xf0\xf2\xe6N\xde\xad\xd6\xa3\xd0\xf4\xcc\xcf\xca\xc5\xc9\xc2\xca\x11\xcd\x9c\xcf7\xd3\x13\xd9\xba\xdf[\xe6!\xee\xe0\xf6\x86\xff/\x08e\x11O\x1a\xdd!\x81(\xc7-\xd90L1I2k1i.L*\x11&\x87!\xc6\x1a\xfd\x14\xdd\x0f\x9f\tG\x03J\xfdE\xf8-\xf3\x9e\xeeO\xec\x05\xea\r\xe9X\xe9\xbc\xea\xd4\xec\xd1\xeeV\xf2\x13\xf6\xc2\xf82\xfb%\xfd\x8e\xfeL\xff\xe0\xfeA\xff\xa0\xfe\xe7\xfd|\xfc`\xfa\xb1\xf8\xd6\xf5\xcc\xf2\xf3\xee\xba\xea,\xe7\xb7\xe3\x01\xe1\xd6\xdeS\xdd\xb1\xddD\xde\x0e\xdf\xf7\xdf\xc4\xe1h\xe4\xbd\xe5\xf9\xe6\x03\xea\x17\xef\x9c\xf3\x08\xf7/\xfb\x9d\x00\xab\x06\x85\x0e\xc2\x18\x86"\x9c+\x1a7\xd4D)P\xf8X\xa2c\xd7l\xb0n\xecj@h\x0beZ[\xbbM\\A 5\x87%@\x14q\x05\xce\xf8\xff\xec4\xe1w\xd5\x85\xcb^\xc4\xd8\xbe\xae\xb9\xf9\xb6\xe7\xb7\xa0\xb9\xc0\xbb\x1b\xc1z\xca\x93\xd4\xef\xdex\xeb\'\xf8p\x02\xab\x0bw\x15n\x1d\x9b!\xf6#w%j#\xb2\x1e\x89\x1aB\x16\xa3\x0f$\x07\xd6\xfe\xef\xf5\xee\xebq\xe2\x92\xda\xdf\xd2\x19\xcb\xd6\xc4\x0e\xc0\xbd\xbcZ\xbc/\xbf\x9e\xc3\x1f\xc9\xc9\xd0\xfe\xd9\xab\xe3\x1a\xee\x94\xf9\xd5\x04\x9c\x0e\xd0\x17\xbf 5(Y.\xb03\xc97:939\xd37\xf84i0\xf4*\x93$\x92\x1c\xff\x13\x8c\x0b)\x03\x83\xfbW\xf5r\xf02\xec\xad\xe8/\xe77\xe7\xf9\xe7\xb1\xe9G\xec2\xef\xde\xf1\x14\xf5\xe8\xf8\xab\xfc\xbe\xff\x92\x02\x87\x04\x00\x06[\x06R\x06U\x05(\x03\x90\x00\xe6\xfc\xab\xf8\xfa\xf3\xc1\xef\xf7\xeb\x0e\xe8^\xe4\xd5\xe1\xbf\xdfN\xdd\xe8\xdb\xd4\xdb=\xdc\xfb\xdb\x10\xdc\xaa\xddl\xdf,\xe1\xe9\xe3\xce\xe7\x9d\xec!\xf0\xf0\xf3\x0c\xf9\x08\xfek\x022\x06}\x0c\xd3\x14\xe3\x1c\xbc%m2LBRP\xe8ZRd7m\x9ar\x80sxp\xaajca\xfaS\x19C\xc31o"\xc7\x13\xd3\x03*\xf4\x96\xe7\x87\xdd\xb3\xd3F\xcb\xfa\xc5?\xc2f\xbe*\xbb\x1a\xba\xc9\xbb\r\xc0A\xc6K\xcd\xe1\xd5\x01\xe1f\xed~\xf9\xee\x05\xd2\x12\xb4\x1d\xdd$b)2,\x83,\xad)d$\x11\x1d\xee\x13"\n\x9f\x00\xbd\xf6\xda\xec\xad\xe3\xca\xda\xc6\xd1\xb3\xc9\x8f\xc3\x8f\xbe\xdb\xbay\xb9\xd4\xb9Q\xbb\x91\xbf\x80\xc7\xba\xd0\xc8\xdaU\xe7T\xf5\xd9\x01\x90\r.\x1aM%\xed,?3k8\x16:\xf98}8n7\xb42\xbb,Q(p"\x06\x1a\\\x12{\r\xfb\x064\xfe\x0f\xf7C\xf2p\xedH\xe8\x16\xe6)\xe6\x80\xe6\x84\xe7\x94\xea<\xef\x96\xf4\xa9\xfay\x00\xe2\x04\xe8\x07m\n\x9d\x0b\x01\x0b)\t\xd1\x06h\x03e\xfei\xf9\x93\xf5\x16\xf2:\xeeb\xea8\xe7\x07\xe4\xc0\xe0\xe5\xdd^\xdb\xb8\xd8z\xd65\xd5|\xd4D\xd4\xa3\xd5R\xd9\x91\xdd\xc0\xe1\xe8\xe6.\xee\xb6\xf5V\xfb\x8b\x00o\x06\x98\x0b\x88\x0e\t\x12\xf2\x18\xb8!\\)\xc71\x04=SI\xf9S8]\x00fDk?k\x9dg\x02a\\WJJq;K+K\x1ap\n\x01\xfd\xa9\xf1\xe4\xe7\x17\xe0g\xda\xa9\xd5\xe0\xd1_\xcf\xae\xcd\x90\xcc\xfa\xcb\xe2\xcbK\xcd\xc8\xd0;\xd6P\xdd\xb6\xe5\xa4\xef\x1e\xfa@\x04\xe6\r\x84\x16V\x1d\xae!\t#\x1b!\xdf\x1c&\x17\x1e\x10.\x07\xbe\xfd\x94\xf5\x16\xee\xd0\xe5F\xde\x13\xd9\xa5\xd4Y\xcf\x80\xca\xc1\xc7\x9c\xc5\xf8\xc2\xe5\xc1\xb8\xc3\xe3\xc6\x05\xcb\xa2\xd1"\xdb\xd3\xe5,\xf1\x1d\xfe\x8d\x0b9\x17\xd4 \xef(\xd0.W2\x813\xc22Z0\xe5,.(\xfa"T\x1e\xe7\x19\x14\x15\x9b\x0f\x01\x0bG\x07"\x03\xc7\xfe\xc2\xfb\xa4\xf9"\xf7\xe3\xf4%\xf4\xc3\xf4\xf9\xf5\x17\xf8\xe2\xfa\x15\xfe\xc5\x00e\x03\xad\x050\x07\xce\x07G\x07i\x05\xa0\x02\x80\xff\x89\xfc\x15\xf9\x0b\xf5+\xf1e\xedi\xe9\xb8\xe5Y\xe3\x89\xe1\x0e\xdfM\xdc\xa9\xda\xd1\xd9\xe6\xd8\xd7\xd8\x10\xda{\xdc\x90\xde\xa6\xe1\x8c\xe7h\xee\x9f\xf4\x0e\xfa\xe4\xff\x88\x05\x82\x08\xbb\n;\x0eS\x11\x86\x12\xc6\x13%\x18X\x1e\xfb$\x17.\x849\x97C\xdcJ\x85Q\x9eW\x83Y\x9eV\xc4QtJ\xaf?H2\xfa%\xc4\x1aU\x0f\x9a\x04\xda\xfb3\xf5\xf1\xef\x00\xecA\xe9\xd8\xe65\xe4<\xe1~\xde\x91\xdc^\xdbq\xda\x92\xda\x85\xdcL\xe0j\xe5\x0e\xecQ\xf4v\xfc\x8d\x03\x94\t\xc7\x0eo\x12\\\x13J\x12d\x0f\x02\x0bF\x05\t\xff\xa9\xf9\xd8\xf4\xd2\xef\xd3\xea\xa3\xe6\x8f\xe3\xb5\xe0\xb0\xdd5\xdb\x17\xd99\xd63\xd3\xa6\xd1\xea\xd1:\xd2q\xd3u\xd7w\xdd#\xe4\xce\xeb\xaa\xf5\xe2\xff\x12\x08\x19\x0fW\x16\xcc\x1b\xa2\x1e\xb1 u"p"\xb7 l\x1fk\x1e2\x1c[\x19j\x17G\x15\xe4\x11\x7f\x0e(\x0c~\t\xe8\x05\xa6\x02\x91\x00\x00\xff\xa6\xfdX\xfd\xf0\xfd\xa2\xfep\xff!\x00\r\x01\xf3\x01Q\x02\x85\x01\xed\xffk\xfe\xea\xfc\xe7\xfa\xda\xf89\xf7\x92\xf5/\xf3\xb7\xf0%\xef\xf8\xed\x8b\xec\r\xeb}\xe90\xe8\xe1\xe6\x0b\xe6\x89\xe5L\xe5\xcd\xe5\xa4\xe6\xe3\xe7\xd0\xe9\xf4\xec\x1c\xf1#\xf5Z\xf9\x82\xfdo\x01\xec\x04\xcd\x07u\nP\x0c\x8c\ra\x0e\xbd\x0e\xb9\x0f\xd9\x11\xee\x14\x8b\x18\xdf\x1c\x8d!\xb2%\x8f)\xc6-~1\xf42Q2\x1d1\xec.++\x8a&\xae"\x98\x1e\x8a\x19z\x14\xac\x10\xb7\rB\n~\x06\xfa\x02q\xffV\xfb\xe1\xf6\xec\xf2\xc0\xef\xe0\xec\xd4\xe9\xba\xe7\x03\xe7(\xe7\x98\xe7\xc0\xe8R\xeb\x1c\xee)\xf0\x04\xf2\x81\xf4\xe8\xf6\xce\xf7\xd4\xf76\xf8j\xf8q\xf7\t\xf6}\xf5\x84\xf5\x84\xf4\xfa\xf2)\xf2\xb4\xf1N\xf0\x96\xee\x9f\xed\xc1\xec\xdf\xea\xb6\xe8\xfc\xe7\xf9\xe7C\xe7.\xe7\xee\xe8\xab\xeb\x18\xee\xfa\xf0\x84\xf5\t\xfa[\xfd7\x00r\x03\x0c\x06I\x07y\x080\n\x96\x0bK\x0c}\rX\x0f\xea\x10\xe9\x11\xec\x12\xb9\x13\x86\x13\x91\x12s\x11\xfb\x0f\xa1\r\xb7\n"\x08\xc6\x05\x9b\x03\xeb\x01\xba\x00\xfd\xffz\xffp\xff\xb6\xff\xf4\xff!\x00\xf3\xff^\xffy\xfek\xfd\x93\xfc\xcd\xfb\xdd\xfa\x14\xfa\x96\xf9L\xf9\xd1\xf8i\xf8Y\xf8\xf8\xf7\x9f\xf6\xd6\xf4\x98\xf3Z\xf2w\xf0\xd8\xee_\xee\xae\xee\x18\xefZ\xf0\xeb\xf2\xd7\xf5\x01\xf8\xba\xf9\x91\xfb&\xfd\xdf\xfd<\xfe\xe5\xfe\x00\x00\xfd\x00G\x02\x01\x04s\x06B\t\x87\x0b\xb8\r\x00\x10*\x12\xbf\x13~\x14C\x158\x16\xf7\x16Q\x17,\x18\xd5\x194\x1b\xcb\x1bS\x1c"\x1d8\x1d\x10\x1cv\x1a\xd0\x18Z\x16\x1c\x13u\x10}\x0ex\x0cq\n\x01\t\xeb\x07z\x06\xe2\x04\xa3\x03#\x02\xfb\xff\xc1\xfd\xfe\xfb\'\xfa\x0f\xf8@\xf6\x98\xf4\x0c\xf3\xa5\xf1\x88\xf0\xf2\xef\x89\xefA\xef\x14\xef\x11\xef\xfd\xee\xb6\xeeD\xee\xb9\xed\x0b\xedh\xec\xd0\xebO\xeb\xe5\xea\xb9\xea\xc5\xea\xd6\xea\xf7\xeav\xebX\xec4\xed\x16\xeed\xef3\xf1\x02\xf3\xba\xf4\r\xf7\xa3\xf9\xfe\xfb\xf1\xfd\xc9\xff\xe0\x01\xa7\x03\xfe\x044\x06s\x07n\x08\x14\t\xbe\t{\n"\x0bx\x0b\xcb\x0bM\x0c\xa1\x0cg\x0c\n\x0c\xea\x0b\x93\x0b~\nk\t\xcd\x08U\x08u\x07\xae\x06\x9d\x06\x8d\x06\x1d\x06\x94\x05\x8c\x05n\x05\xa8\x04\xb0\x03\xef\x02.\x02\xd6\x00\x98\xff\xc3\xfe\x08\xfe\x05\xfd\xf9\xfbV\xfb\xb3\xfa\xb1\xf9\x89\xf8\x90\xf7\x98\xf6:\xf5\x07\xf4;\xf3\x9d\xf2\x14\xf2\xbb\xf1\x01\xf2z\xf2\xd8\xf2]\xf3\x19\xf4\xda\xf4z\xf5S\xf6u\xf7\x8a\xf8\x8b\xf9\xcd\xfaE\xfc\xb1\xfd\xff\xfeL\x00\x86\x01\xb0\x02\xcc\x03,\x05\x8b\x06\xc4\x07\xd5\x08\xe6\t\xdf\n\x9e\x0bF\x0c\xeb\x0cH\rL\rX\r\x94\r\xc5\r\xdd\r\x03\x0e1\x0e,\x0e\xff\r\xe4\r\xaa\r;\r\xa5\x0c\x0f\x0cy\x0b\xb8\n\xf1\t\\\t\xae\x08\xef\x07#\x07N\x06c\x05E\x04T\x03h\x02b\x01[\x00b\xff\x83\xfe\x94\xfd\x9d\xfc\xce\xfb\xed\xfa\xc6\xf9f\xf8\xfd\xf6\xa9\xf5Z\xf4\x08\xf3\xea\xf1\x11\xf1{\xf0\x0e\xf0\xf2\xef2\xf0\xa7\xf0\x14\xf1~\xf1\x02\xf2\xa1\xf2<\xf3\xd4\xf3\x9b\xf4\x89\xf5\x98\xf6\xc4\xf7\x1d\xf9\x9f\xfa\x08\xfcU\xfd\x95\xfe\xd4\xff\xec\x00\xd4\x01\x8f\x024\x03\xf2\x03\xc3\x04p\x05\x17\x06\xcd\x06\xa0\x07O\x08\xc4\x08.\t\x88\t\xa8\t\x7f\tR\t/\t\x07\t\xad\x08k\x08:\x08\xde\x07q\x07\x11\x07\xb4\x06\x13\x064\x05a\x04{\x03m\x02K\x01c\x00\x93\xff{\xfe<\xfd+\xfc2\xfb%\xfa\x00\xf9\n\xf87\xf7P\xf6\x98\xf5d\xf5\x82\xf5\x9e\xf5\xdb\xf5|\xf6,\xf7\x89\xf7\xd0\xf7.\xf8\xa4\xf8\xb3\xf8\xbe\xf8U\xf9\x1b\xfa\xd3\xfa\xaf\xfb\xf5\xfcE\xfe\x1e\xff\xb5\xffP\x00\xc0\x00\xc7\x00\xa9\x00\xb7\x00\x03\x01+\x01m\x01+\x02A\x03=\x04 \x05?\x06J\x07\x01\x08\x92\x08\x0e\tz\t\xba\t\xe1\t@\n\xb5\n\x04\x0bK\x0b\x8c\x0b\x8b\x0bS\x0b\x02\x0b\x8e\n\xf4\tC\t\x91\x08\xfa\x07;\x07q\x06\xc7\x05"\x05N\x04K\x03>\x02\x14\x01\xd5\xff\x92\xfe`\xfdF\xfc.\xfb)\xfah\xf9\xb8\xf8\x0f\xf8x\xf7\xf8\xf6o\xf6\xe3\xf5j\xf5\r\xf5\xc7\xf4\x9d\xf4\xa9\xf4\xfe\xf4i\xf5\xea\xf5\x85\xf60\xf7\xdf\xf7\x86\xf8.\xf9\xc7\xf9e\xfa\x0c\xfb\xbb\xfbr\xfc\x18\xfd\xcb\xfd\x8c\xfeX\xff\x0c\x00\xb3\x00m\x01&\x02\xea\x02\xbb\x03\x88\x04O\x05\xef\x05_\x06\xb8\x06\x02\x07"\x07 \x07\x19\x07\x13\x07\x1b\x07\x0e\x07\x15\x078\x07I\x07D\x075\x07!\x07\xf9\x06\x9e\x063\x06\xcc\x059\x05\x93\x04\xdf\x03$\x03Z\x02c\x01x\x00\x88\xff\x93\xfe\x89\xfdw\xfc\x7f\xfb\x86\xfa\xa1\xf9\xce\xf8\x1c\xf8\x98\xf7D\xf7$\xf7\'\xf7L\xf7\x9a\xf7\r\xf8\xa0\xf8*\xf9\xb0\xf9&\xfa\x8b\xfa\xdd\xfa8\xfb\x93\xfb\xef\xfbH\xfc\x90\xfc\xd8\xfc\x13\xfdN\xfd\x8c\xfd\xcc\xfd\n\xfeQ\xfe\xa0\xfe\xfd\xfe^\xff\xdd\xff\x94\x00P\x01\x02\x02\xba\x02{\x03,\x04\xd0\x04x\x05\x1d\x06\xa5\x06\xfa\x06D\x07\x86\x07\xa3\x07\xbb\x07\xcb\x07\xc9\x07\xb4\x07z\x07P\x073\x07\xfd\x06\xa3\x06O\x06\xe5\x05^\x05\xd8\x04S\x04\xba\x03\x1f\x03o\x02\xa8\x01\xea\x00\x15\x00S\xff\x97\xfe\xe0\xfd6\xfd\x9a\xfc\r\xfc\x8e\xfb\x1a\xfb\xb9\xfal\xfa\x1f\xfa\xd7\xf9\x97\xf9Z\xf9\x1c\xf9\xe0\xf8\xc1\xf8\xa2\xf8\x8b\xf8~\xf8\x91\xf8\xb8\xf8\xdd\xf8&\xf9}\xf9\xd8\xf9:\xfa\xa6\xfa\x1c\xfb\x99\xfb\x16\xfc\xa9\xfcP\xfd\xfe\xfd\xcb\xfe\xb3\xff\xa5\x00\xa0\x01\x93\x02p\x03=\x04\xed\x04|\x05\xe2\x05(\x06`\x06\x87\x06\xa8\x06\xc1\x06\xd5\x06\xd7\x06\xde\x06\xf4\x06\xec\x06\xcd\x06\xa0\x06m\x06+\x06\xd1\x05\x87\x05=\x05\xdf\x04f\x04\xed\x03\x8c\x03\x16\x03z\x02\xdc\x01,\x01t\x00\xa2\xff\xca\xfe\xfc\xfd/\xfdc\xfc\xae\xfb\x11\xfb\x83\xfa\r\xfa\xae\xf9d\xf9&\xf9\x05\xf9\xf4\xf8\xf8\xf8\x05\xf9\x10\xf97\xf9r\xf9\xb8\xf9\x04\xfaI\xfa\xa9\xfa\x11\xfb\x81\xfb\xfd\xfbu\xfc\x01\xfd\x8a\xfd\x17\xfe\x99\xfe\x15\xff\x98\xff\x0c\x00{\x00\xe9\x00a\x01\xd1\x010\x02\x94\x02\x07\x03l\x03\xb9\x03\x0e\x04z\x04\xda\x04/\x05\x8d\x05\xec\x05D\x06\x7f\x06\xb3\x06\xe4\x06\xfe\x06\xf7\x06\xe4\x06\xc7\x06\xa4\x06|\x06U\x06"\x06\xf4\x05\xc1\x05\x86\x057\x05\xd4\x04V\x04\xcd\x033\x03u\x02\xb3\x01\xe3\x00\r\x00.\xffU\xfe\x84\xfd\xbb\xfc\xeb\xfb&\xfb\x86\xfa\xe9\xf9X\xf9\xd7\xf8[\xf8\xfb\xf7\x98\xf7V\xf7,\xf7\x00\xf7\xf2\xf6\x10\xf7=\xf7\x88\xf7\xea\xf7u\xf8)\xf9\xdb\xf9\x8b\xfaD\xfb\xfe\xfb\xba\xfc`\xfd\xf5\xfd\x9c\xfeR\xff\x03\x00\x96\x00c\x01U\x02)\x03\xf8\x03\xc3\x04e\x05\xc1\x05\x01\x06\x1a\x06\'\x06#\x06\x03\x06\xe8\x05\xf8\x05\xf8\x05\xe2\x05\xd9\x05\xc4\x05\xab\x05q\x054\x05\xe7\x04~\x04\xe7\x03e\x03\xf4\x02y\x02\x04\x02\xa6\x01F\x01\xe5\x00\x81\x00\x12\x00\xb3\xff/\xff\x9b\xfe\x01\xfee\xfd\xda\xfcS\xfc\xdb\xfb\x80\xfb.\xfb\xfc\xfa\xe3\xfa\xc5\xfa\xa8\xfa\x94\xfa\x87\xfax\xfa_\xfaT\xfaU\xfaU\xfar\xfa\xbc\xfa\x1a\xfb\x8c\xfb\xfc\xfb\x82\xfc\xfa\xfcl\xfd\xdb\xfdD\xfe\xac\xfe\xff\xfe]\xff\xc4\xff+\x00\x9b\x00\n\x01~\x01\xe2\x01I\x02\x8d\x02\xba\x02\xed\x02\x18\x03T\x03\x86\x03\xbe\x03\xea\x03\x10\x049\x04\\\x04m\x04a\x04e\x04_\x04S\x04i\x04\x8e\x04\xd0\x04\xf9\x04/\x05n\x05\x84\x05\x83\x05g\x054\x05\xe6\x04k\x04\xdf\x03O\x03\xad\x02\xf9\x015\x01~\x00\xaf\xff\xca\xfe\xea\xfd\x15\xfdG\xfc\x84\xfb\xdd\xfa@\xfa\xc5\xf9a\xf9\x1d\xf9\xf0\xf8\xd4\xf8\xe2\xf8\x03\xf9C\xf9\x92\xf9\xe1\xf9d\xfa\xe4\xfaj\xfb\xee\xfbf\xfc\xe1\xfc6\xfd\x97\xfd\xf3\xfdQ\xfe\xc0\xfe9\xff\xb6\xff>\x00\xe9\x00\x8f\x017\x02\xd0\x02L\x03\xbe\x03\xf7\x03 \x04A\x045\x049\x041\x04&\x04\x1c\x04\x04\x04\xde\x03\xc7\x03\x95\x03I\x03\xfd\x02\xa7\x02\\\x02\xf6\x01\xa3\x01y\x01J\x01\x0e\x01\xd8\x00\xd3\x00\x01\x01\xe9\x00\xc0\x00\xb8\x00\xb3\x00\x93\x00M\x00&\x00$\x00\xf0\xff\x9b\xffW\xff\x10\xff\xac\xfe\x14\xfe\x9f\xfdh\xfd\x18\xfd\xac\xfcd\xfcD\xfc\x17\xfc\xe7\xfb\xb9\xfb\x8d\xfb\x99\xfb\xb3\xfb\xc0\xfb\xfd\xfb\x17\xfc\xea\xfb\xc3\xfb\x99\xfb\xbd\xfb%\xfc\xaa\xfc\x16\xfd\x19\xfdh\xfd\x0e\xfe\xa2\xfec\xff\xc9\xff~\x00\x02\x01\x02\x02\xc3\x02\xb7\x02"\x02\x1b\x02\t\x038\x05"\rs\x1a\xb6\x1f\xd3\x13\xf8\x03\xf8\xfd\xba\xfb~\xf5a\xf50\x00c\x0c\xda\x0c\xec\x06\x02\x04\xcd\x01\xcb\xfa6\xf1\x99\xf03\xf8V\xff\xe0\x01H\x06g\x0b\xf0\t\xe8\x01b\xfbU\xf9\xd6\xf86\xf9\xc8\xfc\x03\x02|\x05\x9f\x06\xf4\x03l\xff\xbd\xf9\x05\xf7\xba\xf4\x14\xf7]\xfc\x97\x02\x19\x04\x01\x01&\xff\xbd\xfb\xc0\xf8A\xf6[\xf9\xaf\xfd\x83\x01b\x02\xe3\x01\x80\x00&\xfd\x83\xfa\x9b\xf96\xfb\x8b\xfd\xa6\x01\x1b\x06\xd9\x06\xdc\x02\x80\xfe\x1a\xfdM\xfc\xe5\xfb3\xff\xf0\x04>\x08b\x06\x9d\x03\xd0\x02\xfd\x00\xb1\xfd\'\xfc\x85\x00\xec\x05t\x07C\x06\x87\x078\tJ\x04\xb7\xfd\xcf\xfd\xd7\x02\xeb\x04\x88\x03\xba\x03\x84\x05\xad\x01\x84\xfc.\xfa\x1b\xfc\xd1\xfef\xfe\xed\xfew\xfe\x9c\xff9\x01B\x01\xd3\xfd\xc5\xf9\x96\xfb]\xfd=\xfd\x17\xfc&\xfe\xbb\xfe\x86\xfb\xa5\xfb\x89\x00\xf5\x02\xbe\xfe\xa2\xfcX\x00\x81\x02\x13\xfe\xae\xfa\xd5\xfe]\x05\xa6\x04S\xfd\xa1\xf8.\xf9\xee\xfb\xd7\xfbg\xfc\x0e\x01F\x02\x96\xff\xf1\xfa_\xfb\x17\xfeX\xff\x1f\x00;\x00c\x04\x97\x02\x8e\xff\x8b\xfc\x1e\x00\x87\x01\xd5\xfa\xe4\xf7\xa4\xfdQ\t3\t\xad\x04S\x02\xf0\x03\xed\x00\x99\xfc6\xfe{\x04\x8a\t\xf1\t\xb9\x07>\x06N\x05\xac\x04\xea\x03\x8d\x01H\x01\x19\x03\x86\x04x\x05\xfc\x07\x1b\x07\xc4\x02Y\xfcq\xfa\x00\xfd\'\x02Q\x05Q\x02,\xff{\xfa\xd8\xf5G\xf5\xcf\xfb&\xff\x98\xf7\x18\xf1s\xf8R\x039\x01\x98\xfa\xf0\xfd7\x01H\xfa\xc2\xf3\xa7\xfb6\x06\n\x03o\x00\x90\x046\x05\xb7\xf9\xf5\xf5\x9c\x02\x91\n\xce\x06,\x01\x1e\x042\x01E\xfb\x8f\xfe:\x07:\x07d\xf9\xe4\xf6Q\xfc\xc7\xffo\xfdE\xfau\xfc\x9d\xf8N\xf5\xd5\xf9*\x00\x80\xffW\xfb\x0b\xfd\xe0\x00F\xfe\xb2\xfcN\x00F\x07T\x0b\x92\rt\x10\xc5\x0e\xfc\x08\xa1\x06\xe2\x0b\xac\x0c\x92\t\xb7\t\x1c\tg\x05\xed\x03\xe2\x00\xd9\xf9\xc0\xf4\xff\xf1\x05\xf5\x92\xf5\xc7\xf4x\xf7\x9a\xf5\x80\xf1u\xeez\xf0\xb3\xf2\'\xf5\x05\xfcV\x03^\x02\x00\xfc\xb8\xfdk\x02\x9c\x03\x07\x05a\nc\r\xd9\x0bD\t\xeb\tV\nu\x08\x94\x08\x8c\x08A\x04U\x00L\x01d\x01\x88\xfd\x87\xf8\xd1\xf9\xc9\xf8i\xf5\x94\xf4;\xf7;\xf9\xe0\xf5\x17\xf5\xce\xf8\x91\xfd6\xfe\xe8\x00^\x05A\x05\xc4\x01\x9b\x01T\x07/\x0b\xd2\x08\x14\t\x88\r,\r\x1a\x07\xec\x04\n\x08\xac\x04\x03\x00m\x03O\tr\x05\xbc\xfc,\xfd\xf8\xfc\x8a\xf9S\xfa\xbe\xfe\x8e\xff|\xfba\xf9\xa9\xf7?\xf6\x00\xf7\xcb\xf9>\xfd\xc5\xfc\xea\xf9\x19\xfb\x17\xfc\x9c\xfd\xa7\xfc\n\xfc*\xff\xe6\x006\x02\xa4\x04\xdf\x05\xe3\x00\x85\xfc\xcb\x00Z\x04\xa4\x03\xd6\x03\x95\x06s\x04\xeb\xfc#\xfd\x8a\x02.\x00\x10\xfe\x00\x01D\x03\xcd\x01U\xfe\xf8\xff\xd2\xfe\x81\xfc]\xfe7\x02\xbd\xfe(\xfa,\x00\xe8\x04(\x04\xd1\x00(\x00M\x02\xf2\xfe(\xfb\xac\xfe\x17\x02U\x00|\xff\x1d\x05\xe9\x05\x17\xff;\xfb\xf0\xfd\xc6\x019\x01\xa2\x02"\x05~\x02\x88\xfe\xe3\xfc\xba\x00\x08\x02\x90\x01\xea\x01\xaa\x00)\xff=\xffR\x003\xfe\x12\xfd\xc2\xfe#\x01\x0f\x00|\xff\xe7\x00J\xff\xcc\xfa\x9c\xfa\xcf\xff\xd7\xffM\xfc\x90\xfd\xf9\x00@\xfe\xf2\xf9\xbb\xfb\xd8\xfe\xd0\xfeJ\xfc\x89\xfeP\x02(\xffk\xfd\xf7\xfe/\x03\x1b\x02y\xfe\xbe\x008\x04W\x03&\xff\xfa\xff\xb1\x00;\xff\xae\x00f\x05\x01\x06\xdc\x02\x16\x02\xa6\xffA\xfb\x9a\xfd\xab\x06\xad\x08\xca\x02-\xff\xb4\xfeF\xfc\xd7\xfa\xc3\xff\xf3\x04\xa5\x02e\xfd4\xfe\x0e\x01\x80\xff\x90\xfd6\xfd\xbb\xfc\x0b\xfdF\x01g\x04\xda\x00\x08\xfcA\xfd\xfb\xff[\xfeO\x00\x19\x05b\x03\x89\xfb_\xfb\xc6\x02\xb8\x00\xd2\xfc\xb6\xfdL\x04;\x03\xc2\xfcE\xff\xf7\x00\xaa\xff\x05\xff\xe6\x01W\x04\xc6\x00\x81\x01a\x01\xd2\xfe\xf9\xff\xda\x02$\x03d\xfe\x10\xff@\x016\x00\xbb\xfdr\xfc\xad\xff8\x00\x82\x00X\x00\xef\xff\xa6\xfd?\xfd\x8c\x00\xd6\x00x\x01\xfd\x00\xd8\x02\xc3\x02\xe9\xfez\x01]\x03\x81\x01\x0b\x00R\x03l\x06D\x01\x14\xfe\xce\xffV\x00\x19\x00\xd7\x00I\x04\xa2\x01Y\xfc\xcb\xfa\xf1\xfdQ\x02\x85\x00b\xffy\xfe\x07\xff\xa8\xff%\xff\xfb\xfdG\xfdr\xfdU\xfd\xbb\xfe0\x00^\x00L\xfd\xd6\xf9Z\xf9\xf1\xfb|\xfeF\x00n\xff\xa7\xfe\xe5\xfa%\xf8\xeb\xf9\xd1\xfd\x80\x00x\xff\xf0\xfe\x8a\xfd\xa1\xfcS\xf9\x9b\xf8\xcb\xfc\x00\x00\xba\xffr\xfd^\xfdO\xfd\xba\xfb\xe0\xfa\xf4\xfcu\xff\xc9\xfe\x1a\xfdM\xffk\x02\xf7\x00\x1d\xff#\xfe\xa6\xfe\xcf\xfe\xd7\xff2\x04*\x06O\x03\xb9\xffn\x00\xd3\x03\xb1\x06\xdb\x05\xd2\x05\xc5\x054\x06[\x07\xc6\x08\x89\t\xe9\x07"\x06\x8c\x05-\x07t\x07\n\tR\x05\xe4\x00\x08\x01\xc5\x03\x94\x07\xbc\x05w\x05\xb2\x05\x8a\x04\xbb\x04v\x08\xa0\x0f\xf1\x10%\r\x0c\r\xdf\x0f\xf6\x11\x1d\x11H\x11\x9e\x12\x84\x10\xd8\x0c\x17\x0cv\x0e)\x0c\xb0\x06t\x02)\x01\'\x00\x03\xfd\xd7\xfb\xfc\xfa\x1d\xf7L\xf2\xe5\xef\x99\xef\x8b\xee0\xec\xa0\xebP\xeb-\xea\xac\xe9\xf2\xe9\x99\xea\x0f\xea\xf6\xe9H\xebe\xed\xb1\xef\xbf\xf1\xe7\xf2P\xf3]\xf3\x85\xf4\x03\xf6q\xf7\x0f\xf9\xdc\xf9w\xf9\xb7\xf8C\xf8\xa5\xf7\xda\xf6k\xf6\xbd\xf5B\xf5\xe5\xf4u\xf3\xa4\xf2X\xf1\x1a\xf0\x9d\xef\x8c\xef(\xf0\xdf\xf0]\xf1u\xf11\xf2\xd3\xf2\x00\xf4\\\xf6\xf4\xf8"\xfa\x9a\xfb\xda\xfc\xd6\xff5\x02\xa4\x04\xe3\t\x0c\n\x98\t\x10\x0b.\x0e\x14\x12\x8b\x12\x87\x14\x9f\x14D\x12\xc2\x10\xdb\x10\x08\x16\xdd\x1c\xa0%f&\x8e\x1e^\x1b\x1c#U/\x020e)\x89\'\xc4)\xd8-)1(4\xd1/\xa9#\xa7\x19k\x17~\x1aZ\x1a4\x16}\r\xd7\x03\x94\xfc\xce\xf8\xfb\xf5Z\xf1*\xec\xc9\xe6\x18\xe3\xc5\xe2\x05\xe4X\xe3\xb7\xdf\xe0\xdc\r\xdeT\xe0{\xe3\xd7\xe5\x85\xe7\xe8\xe7\x08\xe8\xff\xea,\xf1&\xf6\xd2\xf6}\xf3\x0f\xf1\xc5\xf5L\xfe=\x01\xdd\xffw\xfc]\xf9O\xf9\x93\xf9\xe0\xfd\x97\x009\xfd\x9f\xf5\xdb\xf2)\xf5\x0f\xf73\xf7?\xf4\x19\xf2m\xef\x13\xefO\xf2|\xf5>\xf5\xdd\xf1b\xef\xbb\xef\xb8\xf3%\xf8\x8c\xfa\xeb\xf9\xac\xf6\x12\xf5v\xf50\xf7\xec\xf9\x07\xfb}\xfa\x04\xf9\x94\xf8Z\xf8\xf3\xf8\xbb\xf8Z\xf8d\xf81\xf9\xa1\xfb+\xfdL\xfd\x06\xfcp\xfa\xa2\xfa(\xfd\x88\x00\x99\x033\x037\x02y\x01\xec\x02\\\x06\xa1\x08\x13\n\xc6\x0b1\r\xf4\r\xbf\x0e\xe6\x0e\xe4\rB\x0f\xf0\x16\xc5!>\'\xdb#\xf0\x1f\xec\x1eU"\x9f\'\xfe+R/\xb8.f,\x13+\x06*\n(i#q\x1d\xe7\x18\x15\x16b\x16\xd4\x15\x18\x11l\x08\x8c\xff\xbb\xf9\xc5\xf6\xc3\xf44\xf2\x17\xf0c\xec\x10\xe8b\xe6\xa9\xe6\xb4\xe7\n\xe6\xe9\xe2\xaa\xe1\xef\xe2h\xe6K\xea\xfa\xec\xd6\xed%\xed\xf8\xec\xc3\xefK\xf3\x7f\xf4T\xf44\xf4\x8a\xf6S\xf9\x8c\xfaU\xfa\xc9\xf8\x10\xf7\x87\xf5*\xf6\xdf\xf8\r\xfa\xbc\xf9;\xf8d\xf7\xed\xf6\x01\xf6\x8d\xf53\xf5\xf2\xf5\x08\xf7\xab\xf7\xcf\xf7\x80\xf7s\xf7\x9f\xf7\xea\xf7\r\xf8\xfd\xf8.\xfa;\xfb-\xfc\x87\xfc\xe4\xfcd\xfd\x12\xfe.\xff;\x00\xe7\x00d\x01\xde\x01\xf8\x01q\x02\xe9\x02\x18\x03-\x03\x9e\x02j\x02\xef\x02\xee\x02@\x02\xae\x01\xe2\x01h\x025\x02\xb4\x01\xc3\x01\x16\x02\xce\x01\xf8\x00"\x01\xa5\x01\xdd\x01i\x01\x84\x00\xe0\xff\xbb\xff\xb1\xffR\xff\xad\xff\xea\xff\x91\xff\x85\xff\x15\x00\x88\x01>\x03\xe5\x04O\x06\xae\x07P\t\x01\x0bt\r\xbc\x10}\x13\xd3\x15\xc3\x16\xab\x17\xeb\x18\xec\x19A\x1b\xb8\x1b\xe7\x1bX\x1b\'\x19\xb5\x17u\x16g\x14P\x126\x0f7\x0c\xa5\t\xab\x06+\x04\xa8\x01:\xff=\xfc\xc6\xf9\xcd\xf7\x18\xf6\xe2\xf4B\xf3\xf9\xf1\xdf\xf0\x8f\xef\xd4\xee\x99\xee\xee\xee1\xef\x8d\xeef\xee\xed\xee\x9f\xef~\xf0B\xf1B\xf2\x1d\xf3\'\xf3Q\xf3q\xf4\xb0\xf5\xe2\xf6\xe2\xf7\x9f\xf8Q\xf9\x13\xfa\xbb\xfa\xbb\xfb5\xfc8\xfc\xe8\xfc\xad\xfd\'\xfez\xfeD\xfe\xe7\xfdv\xfd\xcf\xfc\xd6\xfc\x05\xfd\x92\xfc/\xfc\xde\xfb\x1a\xfb\xc4\xfa\xd1\xfa\x84\xfar\xfa\x06\xfa\x8d\xf9\xb1\xf9\x91\xf9M\xf9\x83\xf9g\xf9,\xf9H\xf9G\xf96\xf9F\xf9\xe2\xf8\xd0\xf8+\xf9\x89\xf9\xfe\xf9`\xfa\xa0\xfa\xee\xfa\xfe\xfa\xfa\xfa\x9e\xfb\x94\xfc\x00\xfd\x0e\xfd;\xfd`\xfd\x8e\xfd\x01\xfeG\xfe\xbc\xfe<\xff\x08\x00r\x01\xf2\x02\x01\x05E\x08\x07\x0c\xdd\x0e\x80\x11\xdc\x14\xbc\x18y\x1cL\x1f\n"N%\xc6\'p(\x80(\xe0(\x93(\x80&\xd5#\xec!\xb9\x1f\xb8\x1b"\x17\x8e\x13$\x10\xa3\x0b\xc3\x061\x03\x08\x01\x1a\xfes\xfa\xd9\xf7=\xf6q\xf4l\xf2.\xf1+\xf1\xa9\xf0$\xefm\xee!\xef\xa1\xef\x8a\xef]\xef\x8d\xef\xc7\xefm\xef"\xef\xba\xef\'\xf0\x08\xf0\xd5\xef=\xf0\r\xf1`\xf1\xd3\xf1z\xf2$\xf3\xc6\xf3\x1a\xf4\xf2\xf4\x10\xf6\x00\xf7\xcc\xf7n\xf8\xd3\xf8Q\xf9\xef\xf9\x81\xfa\xfd\xfa5\xfb_\xfb\xb6\xfb\xf3\xfb\xe2\xfb\xce\xfb\xd2\xfb\x8b\xfb\x8a\xfb\xcf\xfb8\xfc\xa2\xfc\xbe\xfc\xe4\xfc\x1c\xfdV\xfd\x80\xfd\xb6\xfd\x14\xfe[\xfe\x80\xfek\xfeR\xfej\xfet\xfey\xfe\x81\xfep\xfe^\xfe,\xfe\x0f\xfeG\xfe\xa2\xfe\xbc\xfe\xcc\xfe\xaf\xfe\x93\xfe\x9d\xfeR\xfe8\xfee\xfe~\xfe\xc9\xfe\x14\xff&\xff\x1b\xff\xe4\xfe\x99\xfe\xfd\xfep\x00A\x03\x83\x06v\x08\x13\n\x87\x0c;\x0f\xe3\x11\x93\x14i\x18W\x1c\x0f\x1e\xbe\x1e< \x0b"\t#\xed!\xaf v |\x1e\x0e\x1bY\x18G\x16\xf6\x13\x83\x0f\xb2\nt\x08\x90\x06\xc8\x02\x11\xff\xd1\xfc\xbb\xfb\xaa\xf9W\xf7\xd8\xf6\x1e\xf7\xd0\xf5H\xf3\x97\xf2s\xf3\xc8\xf3_\xf3A\xf3\x07\xf4\xbf\xf3j\xf2\x1a\xf2i\xf2]\xf2\x97\xf11\xf1\xf7\xf1,\xf2\xd4\xf1\xc0\xf1\x1c\xf2\'\xf2\x0c\xf2\x97\xf2\xa5\xf3G\xf4q\xf4\xd0\xf4\x7f\xf5%\xf6\xb9\xf6\x08\xf7C\xf7}\xf7\x96\xf7\xe5\xf7M\xf8\xa4\xf8\xfa\xf8\xe9\xf8\xb8\xf8\xd9\xf8\'\xf9Z\xf9\x8f\xf9\x16\xfa\xb2\xfa\x13\xfb\x9d\xfb\x87\xfc[\xfd\xb1\xfd\xed\xfd\x87\xfei\xff\x05\x00R\x00\xbd\x00\x12\x01 \x01+\x01\x80\x01\xc1\x01u\x01\xe8\x00l\x00\xc2\x00\x14\x01\x06\x01\x01\x01\xa9\x00\x0b\x00\xa0\xff\xb7\xff\x04\x00\x1d\x00\xb0\xffA\xffA\xff\xea\xfe\xc6\xfe\xe2\xff\xe5\x00\xcc\x00\x8a\x00%\x01\x04\x03\x8a\x05/\x08S\x0b\x01\x0e\x0c\x0fG\x10\x0b\x13|\x16"\x19\xf7\x1a\xaf\x1c\x19\x1e\xe4\x1d\x1e\x1d\x80\x1d\x1f\x1e\xee\x1c\xfb\x19:\x17I\x15\xb3\x12\x86\x0f\xb1\x0c\n\n\x84\x06S\x028\xff\xba\xfd>\xfc\xd8\xf9I\xf7\xd4\xf5\xf4\xf4-\xf4\x99\xf3\xa3\xf3\xc7\xf3\xfc\xf2\x11\xf2k\xf2\xb9\xf3\xdb\xf4\x15\xf5,\xf5\xf2\xf56\xf6\xbb\xf5\x82\xf5\xe5\xf5j\xf6\x02\xf6V\xf5\xce\xf5Q\xf6\xc4\xf5\xaa\xf4J\xf4\xcc\xf4\xdb\xf4r\xf4\xac\xf4\x85\xf5\x7f\xf5\xb6\xf4\xa4\xf4q\xf5"\xf66\xf6\x85\xf6Q\xf7\xf1\xf7F\xf8\xb5\xf8\x90\xf9O\xfa\xb5\xfa\xe5\xfar\xfb1\xfc\xae\xfc4\xfd\xdb\xfd\x9c\xfe.\xffY\xff\x8d\xff\xc4\xff\x06\x00J\x00\x92\x00\x13\x01\x8a\x01\xa2\x01\x93\x01\x80\x01\xa1\x01\xa9\x01p\x01i\x01\x95\x01\xac\x01\xbe\x01\xd0\x01\xfd\x01\xda\x01c\x01\xf9\x00\xde\x00\xf1\x00\xee\x00\xf4\x00\xc9\x00V\x00\xf0\xff\xc5\xff\xc2\xff\xf4\xff\x0b\x00&\x00s\x00p\x00\x9e\x00\xd5\x01\xc3\x03\xc8\x05\xca\x07y\t\x11\x0b1\r\xc7\x0f\xa1\x12\xeb\x14\xf5\x15Y\x17\x0f\x19>\x1a<\x1b\x98\x1b}\x1bU\x1a\t\x18\x95\x16\xba\x156\x14\xa1\x11\x88\x0e\xbe\x0b\'\tt\x06N\x04\x81\x02I\x00|\xfdB\xfbL\xfa\xed\xf97\xf9\x10\xf8\x18\xf7\x81\xf6\xe2\xf5\x9e\xf5\xf3\xf5n\xf6S\xf6\x91\xf5l\xf5\x18\xf6q\xf6\x18\xf6\x86\xf5c\xf5<\xf5\xbd\xf4\x96\xf4\xe3\xf4\xbf\xf4\xec\xf3?\xf3U\xf3\xc6\xf3\xbc\xf3s\xf3u\xf3\x98\xf3\xdd\xf3L\xf4\xf8\xf4O\xf5V\xf5i\xf5\xc7\xf5\xa3\xf6\x81\xf7G\xf8\xd2\xf8\t\xf9Z\xf9\xd8\xf9{\xfa\xfb\xfa\x94\xfb\n\xfc=\xfc\x8f\xfc\t\xfd\x96\xfd\xcc\xfd\xb2\xfd\xcf\xfdI\xfe\xd5\xfec\xff\xe7\xff\x0e\x00<\x00j\x00\xd8\x00\x87\x01\x0e\x02J\x02n\x02\xa1\x02\xf6\x029\x03;\x03L\x03L\x03\xf9\x02\x8f\x02\x90\x02\xc2\x02\x8b\x02\xf8\x01p\x01s\x01M\x01\xa3\x007\x00\xf3\xff\xc8\xff\xc4\xff\xb4\xff\xf0\xff\xd8\xff^\xffq\xff\x05\x00\n\x01Q\x02\x1b\x045\x06i\x07\x15\x08\x9b\t(\x0c\x99\x0e\xc5\x10\xec\x12\xda\x14\xdb\x15)\x16\xdf\x16\x1a\x18\xc5\x18\x7f\x18\xaf\x17\xa2\x16\x18\x15F\x13}\x11\x00\x10U\x0e\xf4\x0b\n\tU\x06E\x04X\x02%\x00.\xfe\xae\xfcX\xfb}\xf9\xda\xf7,\xf7\xac\xf6\x87\xf53\xf4\x94\xf3e\xf3\xbf\xf2\n\xf2 \xf2H\xf2\xb0\xf1\xf1\xf0\xd5\xf09\xf1\x1c\xf1\xc0\xf0#\xf1\xba\xf1\xbe\xf1\xac\xf1@\xf2[\xf3\xeb\xf3\x15\xf4\xcf\xf4\xd5\xf5\\\xf6\xb3\xf6\x95\xf7\xbe\xf8_\xf9\x92\xf9\xf5\xf9\xb4\xfa\x14\xfb2\xfb\xac\xfbO\xfc\x81\xfcq\xfc\x8a\xfc\xe2\xfc\xf6\xfc\xbf\xfc\xd7\xfc6\xfdt\xfd\x87\xfd\xb5\xfd\xf3\xfd0\xfeh\xfe\xdb\xfe\x7f\xff\x08\x00\x8e\x00\r\x01\xb7\x01R\x02\x08\x03\xbe\x03E\x04\xbd\x048\x05\x98\x05\xe4\x05V\x06\xd6\x06\xe7\x06t\x069\x06T\x06\r\x06N\x05\xdc\x04\xb9\x04j\x04\x9b\x03\xae\x02%\x02\x15\x02\xe3\x01+\x01\x00\x00/\xff"\xff\xc0\xfew\xfe:\xfe\x18\xfem\xfe\xa1\xfed\xfe\xb8\xfe\xb8\xff\xb1\x00\xf8\x00!\x01\x8a\x02Y\x04a\x05\xc3\x06\xe6\x08\x93\n\xfa\na\x0b\xf8\x0c\xd2\x0e\xb2\x0f\x01\x10\xcd\x10\x9e\x11Y\x11\xb2\x10\x9e\x10\xc3\x10\x11\x10\x80\x0eM\r\x82\x0cD\x0b\xa4\t\xfc\x07\x8d\x06\xc5\x04\xc2\x02\x00\x01\xb5\xffz\xfe\xe2\xfc4\xfb\xca\xf9\x9b\xf8\x94\xf7\x90\xf6\xc6\xf5!\xf5T\xf4\xc8\xf3|\xf35\xf3&\xf3#\xf38\xf3A\xf3O\xf3\xb2\xf3+\xf4w\xf4\xc8\xf4H\xf5\xec\xf5p\xf6\xbb\xf6\x19\xf7\x99\xf7\x04\xf8j\xf8\x0c\xf9\xc6\xf90\xfaa\xfa\x90\xfa\xdd\xfa!\xfbI\xfb\x82\xfb\xd4\xfb\x16\xfc=\xfcU\xfc\x83\xfc\xa6\xfc\xae\xfc\x9f\xfc\xb6\xfc1\xfd\xb5\xfd-\xfe\x9f\xfe\xde\xfe\xf5\xfe!\xff\x9a\xffr\x007\x01\xc6\x01&\x02s\x02\n\x03\x94\x03\t\x04r\x04\xda\x04\x1f\x052\x05y\x05\xff\x050\x06\xd2\x05a\x05V\x05w\x05!\x05\xb6\x04`\x04\xa2\x03\xd5\x029\x02+\x02\xfa\x01\x16\x01x\x00Q\x00\xe3\xff>\xff\x0b\xffr\xff\x88\xff\x05\xff\xb1\xfe\r\xff{\xff\xb6\xff\x1f\x00L\x00\x9e\x00\xd8\x00\x1b\x01\x14\x02\xae\x02\xec\x02)\x03\x87\x03\xac\x03\x08\x04\xa8\x04`\x05\xe9\x05\xef\x05\xec\x05,\x06\x18\x06\x12\x06\x01\x06\xc2\x05\xc2\x05M\x05\xa4\x04\x06\x04v\x03\x0c\x03t\x02\x9a\x01\x01\x01\xa2\x00\xcc\xffY\xfe\xa0\xfd\xba\xfd\xaa\xfd\xde\xfc\x1c\xfc\r\xfc[\xfbB\xfa\xeb\xf9\x98\xfa#\xfb\xe7\xfa\xcb\xfa\xee\xfa\xe0\xfa\xcb\xfa9\xfb\xe7\xfb\x83\xfc\x88\xfc\xc0\xfc\xaf\xfd\x8a\xfe\xf2\xfe/\xffo\xff\x86\xff\x9c\xffU\xff\xb5\xffK\x00]\x00e\x00\x1a\x00\xf4\xff\xe6\xff\x99\xff\x99\xffy\xff*\xff!\xff\x11\xff%\xff\xe5\xfe\xb6\xfe\x9b\xfef\xfeD\xfeL\xfe\x98\xfeq\xfe\xcf\xfe\xab\xfe\x8e\xfe\xcf\xfe\xdf\xfe\xb7\xfeV\xff\x9c\xffJ\xff\xa3\xffi\xff\x88\xffV\xff4\xff\x95\xff\xa0\xff\x96\xff\xe9\xfe\xf4\xfe\xc3\xfeV\xfe_\xfe_\xfe8\xff\x80\x00\xca\x02\xb8\x012\xfe\x82\xfc\x89\xfd!\xfei\xffr\x00\xc4\x00l\x004\xfe\xa2\xfe[\xff\x17\xff\x81\xffH\x01\x12\x02Z\x02\x84\x02M\x03\xf5\x03\x1c\x04B\x04Z\x04r\x05\xe9\x05\xdf\x05U\x06\xa3\x06*\x06\xf9\x05\x07\x05.\x05\x02\x05\xed\x03\x7f\x03\xef\x02\xfa\x01G\x01\x08\x01(\x00W\x00\x01\xff\xd9\xfd\x92\xfc^\xfc\xe6\xfc<\xfc+\xfc:\xfcV\xfcw\xfb\x13\xfc\x7f\xfc\x80\xfc\xed\xfc\x87\xfd\xb0\xfd\xca\xfd\x86\xfer\xfe\xb3\xfe\xb6\xfe\x0c\xff \xff\xaf\xff\x92\xff\x18\x00Q\x00\xa0\xff\xc0\xff\r\x00\x92\x00\xd8\x00\x05\x01\\\x01\xc9\x01H\x02\xcd\x02\x98\x02\x13\x03S\x03\x08\x04\xc7\x03N\x044\x04t\x03\xd7\x03%\x03\xd6\x02\x13\x03\xfd\x02E\x02\xbb\x018\x01&\x01%\x00l\xffl\xff\x90\xfe\x1b\xffH\xfe\xd8\xffu\xfe\n\xfdd\xfd\x90\xfc\x05\xfd\xfa\xfb\x0c\xfd\x1c\xfe\xc7\xfdG\xfd\x08\xfdn\xfc\\\xfcI\xfb3\xfd\xae\xfd\x8e\xfe\xcd\xfe\x1f\xfe\x86\xfe\x1b\xfdZ\xfe\xf6\xfd\x05\xff\xc0\xfe\x1f\xff\xc7\xff\x81\xff\xb9\xff#\x00\xee\xff\xba\xff\x9b\xfe\xe5\xfe\x02\x00\xd2\xff1\x00\xd9\x00,\x01\xa8\xff\x02\x00Z\x00\xac\x00u\x00\xc1\x00\x8e\x01V\x01\xdf\x00\x03\x02d\x02,\x01\x9d\x01\xd1\x01/\x02^\x01\xee\x01V\x02\xe5\x01\xec\x01\xb4\x00\xa9\x00\xc3\xffM\x00{\x00\xe1\xff\xb5\xff\xba\xfeh\xfe\x0c\xfe\xd9\xfd1\xfe+\xff\xcc\xfd^\xfd\xb4\xfd,\xfe\xac\xfdB\xfeY\xfe\x8f\xfe,\xfe\xe5\xfc\xe2\xfc\x99\xfd\x05\xfe\xa9\xfd\xf4\xfe\x08\xff0\xfe\xb6\xfc\x9f\xfd\xa5\xfd\x91\xfe\x14\xffg\xff\xcd\xff\x83\x00p\x01\xd5\x00\xe2\xff\x90\x00\x94\x03\xc1\x02$\x03\xbf\x03\xba\x04c\x03\x95\x02\x8e\x03h\x05\x0f\x06\xbf\x046\x03\xa4\x04M\x03,\x02b\x01\x85\x03R\x02\x03\x00\xad\x00\x81\x00\x95\xff\x8e\xfd\xd3\xffs\xff\xcd\xfe\xc5\xfd\x1b\xff\xa9\xfdS\xfem\xfdE\xfeK\xfe2\xfd\r\x00E\xfdy\xff(\xfe@\xfe&\xfdO\xfc.\xfeh\xfe\xe8\xfd\x92\xfe:\xff\xad\xfdK\xfd#\xfe\x83\xfe\x10\xff\x94\x00L\xff\x04\xff\xc8\xff!\xff?\x00\xe7\x00\xe0\x02\xac\x01\x7f\x00\xf5\xff\x00\x01\x9b\x01>\x01\x8d\x02\xdb\x01\xff\x01?\x01p\x01\xb8\x00\xc1\x01\xe4\xff_\x00\xa2\x00\xe8\x01\xfb\x01V\x00/\x02\xe0\xff\t\x006\xff\x83\xff\xd6\xff\xa3\x00\xa4\x00u\x01\x83\x00\xf5\xfe\xc9\xff\x9f\xfe\xe7\xff\x1f\xffW\x00`\x01\xf5\xff\x06\xff\xdc\xfeW\xffh\x00\xf0\x00\x83\xff\xe5\xff^\xfe\x96\xffG\x00\xee\xfe9\xff0\xfee\xffV\xfec\xfe\x9f\xfe|\xfe\xa0\xfe[\xfd\x1b\xfd<\xfeO\xfd\xea\xfe\'\xff\x97\xff\xd3\xffX\xfeO\x01/\x01\xf4\x00\xe6\xff\x9e\x02\xb1\x02\xda\x02\x0b\x05\xc4\x03\x91\x04\xa8\x02G\x02\xc1\x03\x16\x04\xf1\x01a\x03\xff\x01\xdc\x01_\x01\xa2\x01G\xff\xdc\xff\xe8\x00\xca\xfd\xeb\xff\x98\xfd\xf0\xfe\xfd\xfc\x1a\x01\xb0\xfc\x8f\xfd<\xfd\x00\xfdv\xff\x81\xfe\xfd\x00\xe1\xf9\xaf\x008\xff\x89\x00J\x00#\xff\xac\xff\xe0\xfe\xfe\xfe\xba\x01y\x01\xec\xfe\x19\x00\xc5\xff\xc0\x02\xf5\x01\x18\x00\x0c\xfe\xd5\xff\xb4\xfds\x01#\x02\xc4\x00\xd5\xff(\xfe\xf0\xfe2\xff\xf2\xfe\x8f\x005\x00 \xfe\x9a\x01.\xffr\x00\xc1\x00\x93\xff\x89\xff"\x00\x17\x02R\x00\x94\xff\xc9\x01\xa7\xff\xd5\xfe(\x00\xa5\xfd\xdb\xffe\x00\x12\xff\xe8\x01\x9a\xff\x84\xfe}\xff4\xfen\xfd\xed\xfcY\x00#\x03\x1a\x00\x9d\xfe$\x00I\xfc%\xff\n\x00\x7f\x00\xca\xfe\xfa\xff\xd3\x01\xbb\xfd\xbc\xfeO\xff\xe0\x00$\xfe\xd2\xff\xb0\x01\xf6\xfb\n\x01U\x00e\x00\xd3\xfdn\xfdX\x01\x0f\xff\xdf\x00\x06\xff\xad\x02\xaa\xfc\x86\x01`\x01\xba\x01\xdd\xfe\xec\x00\n\x05\xbd\x00\xfd\x01\xb3\xfe\x96\x03`\x02?\x01+\x00\xbe\x02\xbb\x00\x12\xff\xeb\xfec\x02\xf6\x025\xfc8\xfc\x19\x01\xee\xfda\x00L\xfd\x0b\x00Z\xfe1\xfbO\x01\xe8\xfbq\xff\x9f\xfcr\xfe\xb0\xfe\xd7\x00\x7f\x00\xa9\xffJ\xfe\x96\xfdt\xfd\xf3\xfd\x9e\x02)\x01\xc1\xff$\x01\x1d\x01\xbb\xff\x12\xff\xa9\x00G\x03~\x01\xda\xff\xcc\xfc\xcb\x02\x91\x02\x86\x06\x98\x01\xdc\x00\xf5\x00\x89\xfc\x7f\x03\xd7\x00\x08\x03\xdf\x01\x85\x00$\x04F\x00\xba\xfc\x87\x01t\x02B\xff\t\xfff\xfe9\x00\xc0\xfeH\x00:\x02\x01\x02\x91\xfeR\xf9\x99\xff\r\xfe\xee\x00\x17\x00\xc1\xfe\x9b\x00^\xfd\xef\x02\x87\x00\x08\xfe\xb2\x00\xd2\xfd\xba\xfe\xb8\xfe\x94\xff\x04\x02\xf8\xfe\xab\xffA\xff\xac\x03\xb5\xfe\x1d\xfd\n\xfb\xf1\xfb@\x02\xeb\x003\xff\x0b\xff@\x00\xce\xfe\xc6\xfe\x94\xfe\xa3\x04\xe5\xfeT\xfd\x1d\x02e\xfb1\x00\xc4\x04\xbc\x02|\x02 \xff2\x01\xb3\x00)\xff\xb1\xfc\x9d\x00\x94\x04\xf3\x04\\\t\x81\xfb\xb1\xfc\x8e\x01\x1f\x00\xe1\x02\xf6\xfe\x13\x04R\x00\xcb\x00M\x02\xeb\x01h\xfc\x86\xfc\xbf\xff\xae\xfbb\x03>\x01\xa8\xff\x12\xfe\x88\xfc[\xfc\x13\x01\x01\xff\x02\xfb\xb3\x03\x83\x00C\xff\r\x00R\xffn\xfe\xd5\xff\x0b\xfe\x92\xfe\xb3\xfe|\xfd\x9d\xffi\x02s\x00\x17\x02\x04\xfd\x1e\xf8\xed\xfbV\xff\xe1\x03\x96\x04\x95\x04\n\x01\xb8\xfb\xdb\xfc:\xfe@\x00<\x04?\x04r\x01\x85\x00\x8d\x03\x9b\xfd\xd3\x02\xdc\x05"\x00\xf8\x00\x1c\xfe\xc2\x00\xeb\x02\xed\x01\xbe\x02k\x03e\xff\xc1\xfa\x07\xffi\x01h\x00\xc1\xff\xfc\xff~\xfe]\x00\n\x00\xd2\x04\x87\xfbt\xf8\xf6\x02\xc5\xfe!\xfc*\x03\x84\t\xe1\xfe\xc3\xfa*\xfa)\xfc\xb4\xfdw\x02\x14\x05z\x02\xb1\xfe\xfd\x02\x91\xfe\xb8\xf7\xb4\xfe\xb4\xfbg\xfe\xf0\xfe\x1d\x04V\x04\x0e\x02\x1c\x02l\x00\xd1\xf1o\xf0w\xfd\xc3\x07\xd1\x0cI\x08q\x02\xb4\xf7\xb4\xf8\x1c\xf6Z\x01\xd3\x04\xba\x03\xf5\x02h\x03\xed\x00y\xfb\xee\x03I\xfe\x9e\x01\x0e\xff/\xf9z\xff\xef\x04\xb8\n.\x05\x8b\xfd\xb4\xfc\x06\xfb\xfe\xf6(\x00\xa4\t\xce\tn\x02\xb3\xf5c\xfau\xfb\x18\x04\xf6\tA\x03\x1c\xfc\xfc\xf6;\xf9}\xfbS\x06G\t\x15\x03\xd2\xf7i\xf87\x01\xc1\x03\x08\xfd\x14\x00\xbf\xfe\xca\xfa\xd6\xfe\xd4\xf8\xf6\x04\x17\x0f\x88\x02\xbd\xf7\xd1\xfb\x92\xfb\xd3\xfd"\x05\x11\x03\xad\x03\x8b\x02\xde\xff\xab\xfb\xe0\xfem\x05n\x04W\x00\xcc\xfd\x1b\xff/\xff\x7f\xfd\xcb\x03&\x06H\x02\xeb\x00\x14\xfe+\x02\x7f\xff\x8b\xfaJ\x01\xbc\xffr\xfeO\x04\xab\x04\xec\x05.\x00\xbf\xf9@\x00\xd0\xf5g\xf8{\x06/\x0c\x9a\x04\xe9\xf8\xdc\xf9\x91\xfc"\xff \xfc\xf9\x064\x05W\xfb\xfe\xfd \x01<\x03\x11\xfd\xd6\xfc\xaa\xff\xe9\xfcW\x05\x87\x04Y\xff\xe3\xff\xe9\xfdl\xfb\x15\xfb\x8d\x00!\xffK\x01\xe0\x01\x84\x03v\x03\xd6\xfaX\xfb\xdb\x003\x06\xe9\xfd\xf5\xf62\x06,\x05\xa7\xff\xf8\x05{\x06\x1b\xfa\xf7\xf4\x16\xfd\x9e\x03\xa9\x05@\x01\xb3\x04\xfd\x03w\xf9\x9e\xfc\x8c\x01:\xfd&\x00y\x06\xc3\x00\xc5\xf5:\xff\x82\x054\x00\xee\x06\xb9\x05\xb2\xfd,\xf5\xdf\xf65\x01m\x02\xfd\x02\xbe\x06\xce\x06\x1e\x03\xc4\xff\x1b\xfcz\xf8\xb3\xf7[\xff\xc1\x05\xa1\x05o\x08\xc6\t\xa0\xfd-\xf3\x01\xf5\x13\xfa\xd0\xf9\xab\x01\xf2\x0cc\x0cs\tq\xfa&\xf3\xe3\xf3\xbf\xfan\x00_\x02\xf6\t\xa1\n\xf9\x050\xfc\xae\xfc\xce\xf9~\xf8\xfa\xfa\xa7\xfc|\x05w\x0b\x14\r\xaf\x06\xc0\xfd\x89\xf6H\xee\xec\xf5^\x03c\x08\x12\x10\x11\rE\xf9\x00\xec\xfd\xf1S\x00\r\x07\x8d\x06-\x07\x12\x00\xc8\xfa\xf6\xfb\xa9\xfd\xfa\xfc\xca\xff\xe1\xfe\x0e\x01\xfb\t\xc7\x06m\x02\xf1\xfb\x14\xf2\xd7\xf5\xc1\x00$\x07\xd9\x0b\x8d\n\x9b\x01{\xf8\xe2\xf7\x12\xf8\xb5\xfa\xd0\x039\x0c\x05\x0b\xff\xfbk\xf9\xd4\xfb\x12\xfb\xc4\xfd\r\x07\x1c\x05\x86\xfc\x05\xfe\x16\x00\xe5\x00\xcf\x02~\x00\xd5\xfam\xfc\xee\xfd\x87\x05\x14\t\xe6\x014\xfc\xe3\xf7\xc7\xf9\xb7\xff7\x06\xa3\nz\x03\xf4\xfa\xfd\xf9n\xfbN\xfd\x8b\x02\xca\x06\xc9\x06\xdf\x00\x87\xf8v\xf8"\xfe\xf9\x04\xd8\x02X\xffy\xfe\xe5\xffo\xff\x86\xfd\xd4\xfe\x13\x00\xf4\xfe\xb4\xfb\x14\xfe\xc6\x02\xb3\x04\x1d\x02*\xfd\'\xfcR\xfd\x8f\xfb\xb3\xfeL\x02\xd0\x02\xa4\x04\xd9\x01\xf5\xfdU\xfc\x05\xfc\x8c\xfdu\xffu\x03n\x03\xa1\x01\x0c\x01\x83\x00\xd1\xff\x1d\xfe\x96\xfek\xfd\xd9\xfe+\x02{\x04y\x02+\xff\xcf\xfd\x01\xfe\xd6\xfes\xffD\x01\xb7\xff\xc2\xfc\'\xfb\xbf\xfb\xb6\xfe\xda\x02\x99\x01\x11\xfd\xd9\xfa\x90\xfb\xa9\xfa\xe1\xfb.\x00\xfd\xff\x11\x00\x03\xff\xf5\xfe\xa1\xfe)\xfd\x84\xfd\xbd\xfdk\x00\xb1\x03\xd0\x04I\x04\xd0\x03\xe0\x03\xe2\x02\xab\x01\xe4\x04[\n\x02\n\x0f\t\x8a\x08~\x06\xfd\x06u\x08\xa6\t\xd0\x0b\x18\rQ\tB\x06?\x04\xf8\x04\xc0\tE\x0c#\n\t\x05\xee\x02!\x01^\x00\xad\x01\x1b\x03\xf7\x03\xb3\x01Q\xfe~\xfc\xc4\xfbB\xf9}\xf8\x9c\xf8D\xf9|\xfa\x87\xf9\xf3\xf7\x12\xf6K\xf3`\xf0\xbc\xef\xcf\xf2\xab\xf6q\xf7\x16\xf6\x99\xf3\xfb\xf0\x95\xedJ\xec\xa1\xf0\xe0\xf6.\xf9\xbb\xf6\xec\xf2\x03\xf0\xf0\xeeT\xf0\xc9\xf2\x8b\xf4\x99\xf6\x14\xf6C\xf3@\xf2o\xf2+\xf3\xb2\xf2\xdf\xf0\xc0\xf3l\xf7-\xf74\xf5\xa7\xf4\xd3\xf5\xf1\xf4\xb2\xf6\x06\xfa#\xfb\xe5\xfc\x04\xfd\x8c\xfdn\x00s\x03\xc4\x06\n\x08b\x07Q\t(\x11\xc0\x1f\xb6-\x9c2Q*\xac"\x84&\x19/6<\xe7H\x93P\xe6Lc>02\x111\xe85\xa15\xc22i/U)8\x1fn\x12\xb3\x08r\x00p\xf63\xee\x97\xebY\xedI\xeb;\xe2\xb9\xd3\x93\xc8l\xc56\xc6l\xcb\xb8\xd2\xe6\xd7\xea\xd5\x81\xcf@\xcdt\xd2\xbc\xda\xa4\xe01\xe8\x83\xee\x90\xf4\x95\xf8\x18\xf9\x85\xfe\xdc\x03\x11\x05\xa2\x04@\t\xed\x10\xbd\x15\xe2\x153\x13\xe3\x0f[\t\xf1\x05~\x07B\n\xf3\n\xcf\x05\xfa\xfd\xca\xf6\x08\xf1\xf6\xee5\xee\xc6\xecY\xeb\xde\xe71\xe4\xd9\xe1\xe3\xe1\x9a\xe3\x18\xe3\xa0\xe2\xb0\xe3y\xe5<\xe8\t\xeaK\xed\x15\xee\xa7\xed\xa2\xefR\xf3\xd8\xf8$\xfb\x99\xfc+\xfe&\xfe\xe0\xfd/\xfe;\x02\x1e\x06\x04\x06\x86\x04\'\x02\xd1\x04\xaa\x04\xf0\x02{\x01%\x00\xa2\x00\x9c\xfd\x93\x02\xd5\x07w\n\x86\x03\x87\x01\xad\x10\xcf\x1e\x82#"!l(|/o.\xe1,\x8b3\xa7C\xafG\x9bA\xa2?\xc7A\xb9?\xe83P-\x95-5-j\'\xaf\x1eh\x1a\xf8\x11\xbc\x04\xee\xf7&\xef\xc7\xec\x8c\xe9\xcf\xe6\x16\xe4\xf6\xdd\xe3\xd3B\xc9X\xc7\xcf\xcc\x94\xd3\\\xd6\xfd\xd5g\xd6\x0e\xd6%\xd5.\xd9\xca\xe1\x11\xe9\x11\xee#\xf1\xf1\xf3\xf5\xf7\xde\xf9\xcc\xfa\xaf\xfd\xb4\x01\xf5\x07\xa9\x0b\xad\x0c[\x0ba\x07\xbc\x02\x03\x01:\x05\x03\x0b\x08\x0c/\x06\xa9\xff\xbe\xf9N\xf5\xdf\xf4{\xf8\x8a\xfb\x1d\xf9\x90\xf3T\xf0\xdf\xedN\xec_\xedA\xf0+\xf3\xbd\xf2\xfe\xf1\x11\xf2x\xf2\x94\xf1 \xf1\xf2\xf3\x84\xf8\xe5\xfb\x05\xfd*\xfc\xca\xfa\xe1\xf8\xe1\xf7>\xfb\xd6\x00\xb8\x04@\x04K\xff\xfd\xfb\xcf\xfb\xa6\xfd\xbe\xff\x1a\x02\xbc\x01\x13\xffU\xfb\xf5\xf8\x89\xfa@\xf9\xb3\xf7$\xf8\xb7\xf9\xc5\xf94\xf7j\xf4\xcc\xf0\xf9\xef\xf2\xfaK\x12,$\x1b\x1d\xff\x0e\xc6\x0b%\x16d\'\xcf6NJ\x16R\xf2G!5\x13/y;AHDL\x98HfB76\x80%\x94\x1c\x18\x1c\xfc\x1ay\x13\xc0\x08X\x03\x9a\xfc\xd8\xee9\xe0d\xd8[\xd6g\xd5\x9a\xd4a\xd5\n\xd4\xd4\xca(\xc0\x91\xbej\xc7\xea\xd2Y\xd9\xa5\xdb\x05\xdd\x8c\xdb \xda\x9e\xdf\x0c\xeb\x05\xf8\x10\xfe\xf1\xfc{\xfc\x84\xfeK\x01\x0f\x04%\n\xc3\x0f\xff\x10\x87\x0ca\t#\x0b\x82\x0b\xed\x08\x14\x08\x8e\t\x15\t\xb6\x05z\x02\xc9\x00#\xfd\x9a\xf8Q\xf7\xc4\xfa\x11\xfcM\xfa\xfe\xf5\xe3\xf1I\xee\xc8\xedZ\xf2\xce\xf6j\xf9d\xf6,\xf2\xbf\xf0\xce\xf2\xb1\xf7B\xfc_\xff\x7f\xff(\xfec\xfe>\xff0\x02C\x04,\x05\xba\x050\x05\x10\x05\x8d\x04]\x04\x0f\x03\x1d\x02S\x01\xec\x00\x16\x00U\xfc\x14\xfa\x8a\xf8)\xf8\xa5\xf7\'\xf6\xe5\xf4\xa7\xf2\xa9\xf0\xff\xec|\xeb\xa9\xeb\xf0\xec\x13\xf3p\xf8\x9e\xfcP\xfai\xf4m\xf7(\x03!\x13i\x1f\xdc#\n!\xd5\x1a\x81\x1a\x0c&\xf79\x02E\xe1D\xe7<\xff2\x17/81[9\xe4=19l-\xc2"\xf4\x1bu\x17W\x16b\x13\x90\x0b\x97\x00E\xf6\x11\xf2`\xef\x9f\xeb\xfa\xe5\x14\xdf\x85\xd9\xa7\xd6:\xd7\xed\xd7D\xd8\xe7\xd55\xd3\x1d\xd33\xd6\x94\xdc\xb9\xe0\xc2\xe15\xe2\xcf\xe2T\xe5\x01\xe9\xa9\xef\x0b\xf6\x07\xf8\n\xf6\x14\xf5\xf7\xf8W\xfd\xb6\xff|\x02]\x04\xd8\x03-\x01\x8e\x01n\x05\xe3\x06\xbb\x05\xe6\x04H\x05\xd7\x03\x0f\x022\x02\xea\x03A\x021\xff]\xfeS\xff%\xff\\\xfdN\xfc\xdd\xfb\xc6\xf9o\xf8\x8c\xfav\xfc\xe4\xfc\xff\xf9\x80\xf8u\xf8\x1d\xfa\x7f\xfcM\xff\x1a\x00(\xfen\xfc^\xfca\xfe\\\x01K\x03~\x033\x02r\xff\xbd\xfe\xeb\xfe\xfd\xffq\x00\x9a\xffJ\xfe\x90\xfc\xd8\xfar\xf9\xd4\xf8\xe2\xf7\xd9\xf6\xf9\xf4\xdc\xf4\xc4\xf4(\xf3\xc0\xf1k\xf1Z\xf3\xe0\xf4\xbd\xf6t\xf8.\xf7\xef\xf3\xc1\xf6\x0b\x02m\x0e\xc3\x13\x99\x11\xd4\x0e\x1a\r\x8c\x10\x9b\x1d\xe3.\xb48\xa23o)_$R\'~0\xe09\xa3?q9c+& +\x1e\x7f#c&\xae$\xbb\x1c\xba\x10\x9d\x05\xec\xff\xe2\x00K\x01\xdf\xfd\x9a\xf7F\xf1\xa9\xeb\x87\xe6E\xe4<\xe5\x0e\xe6-\xe4"\xe1\xab\xdf\x82\xdf/\xdeG\xddZ\xden\xe1d\xe3a\xe3h\xe4\t\xe5\xd0\xe4J\xe4r\xe6X\xeb2\xee\x15\xef\xfe\xee\x91\xef\x86\xf0B\xf1E\xf4e\xf7\xd1\xf9r\xfa\x93\xfa\x0c\xfco\xfd\xbd\xfen\x00x\x02D\x047\x04\x9d\x03\xdc\x04\xbb\x06\xbd\x08\x87\x08J\x081\x08\xbd\x07\xf0\x07P\x08\x11\n]\n\x00\t\xfd\x06\x93\x06\xa5\x07\x8a\x08\xdd\x07\xfa\x06d\x06\x19\x06\x8f\x05\xed\x05\xf5\x06:\x06\t\x04\xab\x02\n\x038\x04/\x04s\x03s\x02g\xffu\xfc\xcf\xfb0\xfd\xcd\xfd\xf5\xfbr\xf96\xf73\xf5\xce\xf3\x1f\xf4\xe4\xf4\xeb\xf3\xcf\xf1]\xf0u\xf0\xca\xf0\x07\xf1o\xf1\xfb\xf1\xf1\xf1\x15\xf2\xd5\xf3!\xf6c\xf7|\xf7J\xf8\xaa\xfa\xfb\xfd4\x01\xb2\x03)\x05\xd3\x05P\x07\xd1\n\xaa\x0f\xda\x13d\x16k\x17\x0e\x18l\x19X\x1c9 =#\x95#}"\xf9 \xa9 }!N"\x1f"\xd7\x1f1\x1c>\x18\xa6\x15F\x14\xde\x12k\x10\x8a\x0c\x04\x08\x91\x03I\x00P\xfe\xa6\xfc*\xfa\xdb\xf6l\xf3\xc6\xf0*\xef\x08\xee\xfe\xec\xaf\xeb\x1e\xea\xf0\xe8\\\xe8\x84\xe8\xb1\xe8\x8b\xe8\xfc\xe7\xca\xe7\x12\xe8\xce\xe8\xe0\xe9\x90\xea\xd1\xea"\xeb\xbe\xeb\xcc\xec\x16\xee_\xef\xa4\xf0\xa5\xf1\x90\xf2\xf3\xf3\xa2\xf5O\xf7\xad\xf8\xc9\xf9\xef\xfaL\xfc\xc2\xfdN\xff\xa8\x00\xab\x01c\x02\x15\x03\xff\x03\x08\x05\xe6\x05o\x06\xc0\x06\xf7\x06/\x07|\x07\xed\x079\x08.\x08\xd9\x07\x91\x07\x87\x07\x87\x07\x9b\x07\x97\x07Z\x07\xc9\x06\x08\x06\x9a\x05o\x053\x05\xb4\x04\xe6\x03\xfd\x02\x07\x02\x0f\x01Z\x00\xb3\xff\xcb\xfe\xb7\xfd\x7f\xfcr\xfbs\xfa}\xf9\xa5\xf8\xeb\xf7)\xf7K\xf6\xae\xf5j\xf5J\xf55\xf5\x00\xf5\xe2\xf4\xe6\xf4$\xf5\xde\xf5\xf3\xf6\x00\xf8\xe4\xf8\x91\xf9F\xfa/\xfb\x84\xfc$\xfe\xb0\xff(\x01i\x02p\x03e\x04\x8a\x05\xe9\x06\x1f\x08\xff\x08\xac\tN\n\xc1\nY\x0bG\x0cF\r\xe1\r\xf4\r\xc1\r\xe2\rh\x0e\x14\x0f\xd2\x0f,\x10\x11\x10\xa5\x0fM\x0fb\x0f\x88\x0fa\x0f\xff\x0eL\x0eJ\r[\x0c\x8b\x0b\x03\x0bD\n\x02\t\x8b\x07^\x06U\x05Z\x04m\x03c\x02&\x01\xec\xff\x0b\xff\x7f\xfe\xf4\xfd&\xfdY\xfc\x94\xfb\xf3\xfa\x83\xfai\xfa;\xfa\xae\xf9\xe3\xf8\x1d\xf8\xdb\xf7\xa8\xf7\x81\xf71\xf7\xa5\xf6\xce\xf5\t\xf5\xae\xf4\xa5\xf4\xb6\xf4\x9c\xf42\xf4\x9c\xf3\x1f\xf3\xf2\xf22\xf3\xa3\xf3\xf4\xf3\x19\xf4\x0f\xf4\x13\xf4g\xf4\x01\xf5\xe0\xf5\xb4\xf6V\xf7\xf8\xf7\x93\xf8d\xf9N\xfaX\xfbd\xfc]\xfd5\xfe\x0c\xff\xf1\xff\xd7\x00\xaf\x01r\x026\x03\xe7\x03\x80\x04\xe0\x04/\x05}\x05\xb6\x05\xef\x05\x05\x06\xfc\x05\xd1\x05\x8a\x05=\x05\xf1\x04\xac\x04[\x04\xf1\x03}\x03\xff\x02\x87\x02 \x02\xbc\x01h\x01#\x01\xd3\x00j\x00\x00\x00\xbf\xff\x99\xffj\xffC\xff\x17\xff\xcf\xfej\xfe\x15\xfe\xf4\xfd\xd9\xfd\xb9\xfd\x88\xfdF\xfd\xdd\xfcr\xfc5\xfc7\xfc8\xfc\x12\xfc\xbb\xfb<\xfb\xd9\xfa\x9b\xfa\x9c\xfa\xae\xfa\xae\xfa\x94\xfa}\xfa\x87\xfa\xb9\xfa\x1e\xfb\x94\xfb\x1a\xfc\xa7\xfcN\xfd1\xfeS\xff\xb2\x00"\x02\x91\x03\xf0\x047\x06\xa7\x07_\tM\x0b\x1c\r\xb5\x0e!\x10j\x11\x8b\x12\x8a\x13\xb1\x14\xdf\x15\xaa\x16\r\x17\x07\x17\xc0\x16G\x16\xd2\x15T\x15\xa3\x14\x8f\x133\x12\x8b\x10\xbc\x0e\x0f\rZ\x0b\x83\t\x8e\x07r\x05O\x039\x016\xff9\xfd8\xfbA\xf9c\xf7\xb0\xf5(\xf4\xa2\xf2.\xf1\xcb\xef\x97\xee\x99\xed\xc2\xec$\xec\xa0\xeb\'\xeb\xbe\xea\x85\xea\x7f\xea\xa2\xea\xe1\xea:\xeb\xb8\xeb>\xec\xc7\xec\x94\xed\x98\xee\x81\xefY\xf08\xf1U\xf2\x8e\xf3\xcc\xf4\x13\xf6R\xf7\x85\xf8\xbb\xf9)\xfb\xbd\xfcC\xfe\xa0\xff\xd1\x00\x0c\x02[\x03\xae\x04\x02\x062\x07/\x08\xf3\x08\xa9\tr\nH\x0b\xf3\x0bG\x0ck\x0cy\x0cv\x0c_\x0c.\x0c\xd3\x0bC\x0b\x80\n\x9e\t\xcf\x08\x00\x08\x11\x07\x0c\x06\xde\x04\xb3\x03\x80\x02H\x01+\x00\x13\xff\xfa\xfd\xdf\xfc\xce\xfb\xd2\xfa\xe8\xf9\x10\xf9J\xf8\xa3\xf7\r\xf7\x84\xf6\n\xf6\xa9\xf5h\xf5:\xf5:\xf5`\xf5\x97\xf5\xdc\xf5,\xf6\x8d\xf60\xf7\xed\xf7\xde\xf8\xc7\xf9\x94\xfak\xfbQ\xfcr\xfd\xb4\xfe\x03\x007\x01F\x02)\x03\r\x04\x1c\x05^\x06\x91\x07l\x08\x12\t\xaa\t8\n\xee\n\xc6\x0b\x80\x0c\x07\ra\r\xb3\r\xff\r*\x0eD\x0e\x82\x0e\xcd\x0e\xfe\x0e%\x0f\x14\x0f\xdd\x0e\x95\x0e{\x0em\x0eX\x0e\x1f\x0e\xa6\r\xda\x0c\xff\x0bi\x0b\xe6\nQ\nz\t^\x08\x06\x07\xa5\x05m\x04f\x03e\x02\x14\x01\x91\xff\t\xfe\x91\xfc5\xfb\x03\xfa\xff\xf8\xf2\xf7\xbe\xf6t\xf5b\xf4\x8a\xf3\xdd\xf2Y\xf2\xde\xf1e\xf1\xf2\xf0\x9f\xf0\x87\xf0\xa3\xf0\xc5\xf0\xee\xf0\x1b\xf1i\xf1\xe3\xf1c\xf2\xef\xf2\x88\xf3(\xf4\xd0\xf4\x94\xf5f\xf65\xf7\x0f\xf8\xe1\xf8\xb3\xf9\x92\xfau\xfbg\xfcN\xfd$\xfe\xec\xfe\xb4\xff\x87\x00V\x01\t\x02\x88\x02\xeb\x02H\x03\xbd\x03&\x04\x84\x04\xb8\x04\xbc\x04\xa2\x04y\x04x\x04\x87\x04\x7f\x04Q\x04\xfb\x03\x9a\x03:\x03\xf5\x02\xc5\x02\xa0\x02S\x02\xd1\x01U\x01\xff\x00\xd1\x00\x9e\x00u\x00/\x00\xd2\xffi\xff1\xff6\xff2\xff\x18\xff\xcd\xfe\x8e\xfeu\xfep\xfe\x97\xfe\xb6\xfe\x9a\xfeo\xfeP\xfec\xfe\x89\xfe\x94\xfe\x8b\xfem\xfeL\xfeM\xfeg\xfe\x7f\xfes\xfe8\xfe\x15\xfe\t\xfe\x02\xfe\xf5\xfd\xef\xfd\xd2\xfd\xb8\xfd\x8b\xfd\x94\xfd\xd6\xfd\xf3\xfd\xf9\xfd\xe1\xfd\xe9\xfd1\xfe\xad\xfe"\xffr\xff\xc1\xff\'\x00\xbc\x00\x8a\x01\x84\x02u\x03B\x04\xf1\x04\xd0\x05\xf3\x06&\x088\th\n\x98\x0b\x99\x0cO\r\xee\r\xc5\x0e\x96\x0f&\x10m\x10\x8f\x10O\x10\xcb\x0fb\x0f\x1b\x0f\xb0\x0e\xc1\r1\x0c\x8b\n!\t\xfb\x07\xdf\x06h\x05\xa5\x03\x90\x01s\xff\xa2\xfdN\xfc,\xfb\xf2\xf99\xf85\xf6\x88\xf4\xc1\xf3{\xf3\xe7\xf2\xe0\xf1\xc8\xf04\xf0=\xf0\x9a\xf0\t\xf1%\xf1\xff\xf0\xfa\xf0s\xf1{\xf2\xc8\xf3\xa5\xf4\xd8\xf4\x17\xf5\xe3\xf5W\xf7q\xf8#\xf9\x90\xf93\xfa\xe6\xfa\x89\xfb\xb1\xfc\xd1\xfd3\xfe\xd6\xfd\xf1\xfd>\xff\x84\x00\xb5\x00\x7f\x00\x8c\x00\xea\x00B\x01\x12\x02z\x02\xed\x02\xb3\x03\x86\x03\x88\x02\xc1\x02\x12\x04w\x02\x88\x01\x17\x08\xa4\x0f\x9f\np\xfc\\\xfa\xe4\x05l\x0el\r\x15\t\x95\x030\xfb\x1b\xfa1\x07\x9c\x0fO\x08\x03\xfe!\xfa\x98\xfb\xf6\xfe\xa9\x02\xb7\x01\xa0\xfb\xb1\xf6\x91\xf7\x13\xf9\xe8\xf7\x9e\xf8\x05\xfcA\xf9\xa9\xf0?\xee<\xf6\x90\xfd.\xfc\xf8\xf6e\xf42\xf5\xfa\xf6V\xfbJ\x01\xe7\x01\x7f\xfcu\xf8\x0c\xfb\xb0\x01\xc2\x05m\x05\xeb\x02m\x00b\x01\x11\x05#\x08\xa2\t\x8e\tt\x06\xdc\x03\xb1\x064\x0c\xb1\r*\n\x85\x08\xfb\n\x7f\x0c\x1c\x0cV\x0c\x0f\r\xa5\x0c\xb3\x0b\x9e\x0c\x02\x0e\xa8\rf\x0c=\x0b\xdb\n[\nm\nb\x0b:\x0cM\n|\x06\x1d\x05\x19\x07\x9f\t\xb2\x07\xc1\x04\x02\x04\x8f\x03\xbd\x01\xb6\x01\xb8\x03\x96\x01\xc8\xfb\xea\xf9\xd4\xfd\x02\x00\x9b\xfc?\xf8\xce\xf6\x1a\xf6\'\xf5\xeb\xf6\xac\xf9\xda\xf8\x87\xf3\x84\xee\x14\xefk\xf3\xc2\xf6\x02\xf7\xd5\xf4\\\xf1\xe8\xeeW\xf0\xf5\xf5\x82\xfa\xeb\xf9S\xf70\xf6B\xf6\x1b\xf8\xf7\xfc\x9f\x01\x87\x01\xbb\xfd2\xfd\xd8\x00t\x03\x91\x04#\x06e\x07!\x06!\x05\x9b\x07\xa0\t\x16\x08\xd0\x068\x07\x02\x08S\x07\xdc\x06\xd1\x05\x91\x01\xab\xfd\x0c\xff\xf8\x01\xb6\xff\x85\xfb\xf3\xf8D\xf5\xc6\xf0|\xf1\xaa\xf6\x03\xf7\x83\xf0\x02\xebk\xe9O\xeb"\xeew\xf0\x89\xf0\xf3\xed\xcd\xeb\xfc\xeb\xdb\xed\'\xf2\'\xf6c\xf6\x1d\xf4\xf9\xf2\x05\xf5g\xf8\x19\xfcT\xfe\xcc\xfel\xfd\t\xfc\x9c\xfe\xf2\x01\xb3\x03]\x05\xe7\x07\xd2\x08i\x05J\x01@\x02\xc2\x08\xa7\x0e\xb8\x0fh\x0b\x13\x06>\x04^\x06\x7f\t\x8a\x0f*\x14\x9c\x112\x0bk\t\x87\r\xe6\x0f\x98\x14\x12 \xd3,\xe6(\xa4\x18\x7f\x0f?\x19\x9c,\xe26.8\xd42}(\xce\x1b\x9e\x18o%K2\x890f!\x8d\x13q\x0cc\x08\xed\x08W\n\xae\x07\x1e\xfe\xee\xf1\xdb\xe8\xb8\xe3\xd1\xe1\xa8\xdfo\xdcT\xd8\xe8\xd5=\xd4\xa2\xce\x06\xc9D\xc8\xbc\xcd\x93\xd3\xd4\xd6\x03\xd8C\xd7\xcf\xd4\x87\xd4\xfa\xdb+\xe6F\xee\xc0\xf1N\xf1\xe5\xf0C\xf1t\xf5\xb9\xfe\xe3\x06\x8a\n{\t\x1a\x07\xf0\x06\x80\t/\rC\x11?\x13\x9f\x12\xea\x10\xd4\x0e\x9f\rx\r"\x0e\xb7\r\x06\r9\x0b%\t\xf6\x06\x8a\x03\xf1\x00\xf3\xff\x89\xff:\xff\xb1\xfd\xb7\xfb\xeb\xf8 \xf5-\xf4\x19\xf5\x91\xf7\xee\xf7\xba\xf6N\xf5B\xf3\x0f\xf2j\xf3\xfb\xf6r\xfa\'\xfb\x8a\xf9\xb3\xf7A\xf7N\xf8\xc8\xfa\xbb\xfd\x83\xff\xf4\xfe:\xfc\x1e\xfa\xa0\xfa\xaf\xfc|\xfe\xc1\xfe\x9f\xfd\xd8\xfbC\xf9\xce\xf7\x9a\xf7R\xf9\x9a\xf9\xdf\xf7c\xf5\x83\xf3\x10\xf3|\xf2\x12\xf3\x87\xf5d\xf8 \xfa\xed\xf9Z\xf9\x92\xfa\xbd\xfd\xe7\x04;\x11C#\x93+\x03"\xec\x12R\x16}1\xc4I\xc4PQLKG\x8a@F8\x87<\x99M\x9eY\xd1O\t;E.\xe8)\xe2\'\x91"\x9c\x1dC\x17v\n\xce\xfa9\xeeH\xe7N\xe2\x8f\xdb;\xd3\xbe\xcd\xc6\xcb\xc1\xc6\xe3\xbd<\xb6\xa0\xb57\xbb\x01\xc1\x8e\xc4\x0f\xc7\xe9\xc6;\xc5\x92\xc6*\xce\xf6\xda\xf7\xe5f\xec~\xef\xbe\xf0[\xf2y\xf8\xbd\x01@\n\x0c\x0f\x97\x11\t\x13\xa4\x12\xff\x11\xaa\x14\xa9\x18\xdb\x17\xa1\x13L\x12R\x13\x1e\x11\xc3\n\xcc\x05\'\x05p\x03\xea\xff&\xfd\xca\xfbe\xf9\xe3\xf3_\xef:\xef1\xf1\xe7\xf2)\xf2\xc8\xef\x9c\xee\xe2\xee&\xf1\xf6\xf3\x7f\xf7\x9a\xfa\x81\xfc\x88\xfc\xfb\xfd\xd2\x01g\x05-\x07{\x08\xb1\nC\r\x17\rE\x0b\x04\x0b\xf2\x0b2\x0c\x1b\x0b\x18\t\xc1\x06\xda\x03\n\x00H\xfe1\xfdE\xfb\xbb\xf6\xdc\xf1\xb9\xed]\xeb\x07\xea3\xe9S\xe7\xb9\xe3\xdf\xe1\xae\xdfO\xde0\xdd\xf0\xdf\xc1\xe6\x0b\xe9\xde\xe7(\xed$\xf63\xf7:\xf1\xae\xfd\xc0#_>\xae1^\x18\xf6\x1b\x847\xbdJ\xe7O\xeb\\zl\x04ffJ\xfd:3IO[nZrL&Ex?\xc1+\xbd\x12\x05\t0\r\x80\n\xc2\xf9\x15\xea\x06\xe5.\xde_\xcb\xa1\xb9\x95\xb4\x89\xb9\x93\xbev\xbd \xbc\xe2\xb8l\xb2\xb2\xae\x95\xb3[\xc0V\xcf\xb7\xdb\x83\xe1h\xe2$\xe2?\xe6G\xf0\x8f\xfb\xb9\x08\x87\x15Z\x1ak\x17\x91\x12\\\x14<\x1a\xfb\x1d\x9b\x1f\xc1!f!_\x1b\xa7\x12\x1c\r\xa4\n\xc4\x06\xd0\x00<\xfcB\xfbj\xf9\xec\xf3\xf6\xea\x14\xe2\x1d\xdeT\xde\xc5\xe0\x93\xe42\xe7\xe2\xe6\x7f\xe2_\xdd\xbd\xde\xcb\xe6\x92\xf1\xca\xf8\xc6\xfb\xb1\xfc{\xfd`\xff(\x04\xe8\n\x17\x12\x8d\x15n\x155\x15<\x16\x94\x178\x17\xb1\x14\x08\x13\xed\x12\xc9\x12\xc0\x10h\x0c\xce\x06p\x01\x8d\xfc\x8e\xf9\x95\xf8\xa3\xf7\xce\xf5\x8a\xef@\xe94\xe4\xe1\xe1\xf6\xe0\xba\xdfS\xe1F\xe10\xe0\x12\xdf\xa2\xe0\xbd\xe4\xa3\xe5\xa9\xe6\x04\xe8\xa6\xe8\xde\xeb\xcd\xfav\x1d96\xdf/\xc7\x14\xfe\x08\xa2\x1e\xe3>}Z\x9amJs_bqC\xf56\xc9GHb*k|a\tQ\x80=\t,w\x1e\x8e\x16&\x13:\x0c\xee\xff4\xf1r\xe4Y\xdbG\xd1\x08\xbf3\xacR\xa5z\xab\xa3\xb7]\xbb\xa3\xb6y\xad\x9d\xa5\x80\xa4\xbd\xaeA\xc3O\xd9\xc6\xe7\xe7\xe8\xcf\xe5\xf7\xe5\x05\xedH\xfb5\x0e\xce V+o(\xdf \x8c\x1dJ#t+\x9d0\x990\xab,\x01&\xfa\x1c\xb2\x15M\x11=\r-\x06V\xfc\xaf\xf5\x0f\xf3\x83\xf0\x02\xeb\x96\xe2\x80\xdaF\xd4\xcf\xd1\x90\xd4I\xdaV\xdf|\xe03\xde\x93\xdbj\xdc$\xe2\xa8\xec\xd6\xf6\xf5\xfdj\x01s\x03\xbf\x04t\x07E\x0c\x9b\x12\xb0\x17\x15\x1a\xc2\x1a\xe5\x1b\xd4\x1b\x86\x19\xc4\x15\xcf\x12I\x11/\x10]\x0eb\x0b\x14\x07\x0f\x00\x19\xf8\xe5\xf0\x02\xed\xb2\xebS\xeb\x94\xe9\xb9\xe51\xe10\xdd\xab\xda\xfc\xd8\xe1\xda\xbb\xddL\xe1\xa0\xe3\xa0\xe4H\xe7\xe6\xea\x19\xef\xd2\xf3\x15\xf8N\xfb\xa0\xfaU\xf8\x12\xfd\x9b\x13\xa08\x95S\x80M\x99,2\x16\x96&tM.l\x92w\\w\xb5n\xdcW\x97>\xfe8iIXYMU\x8fA\xf3,3\x1fP\x11l\x00G\xefJ\xe3P\xdce\xd6\xe8\xce\x95\xc8r\xc1\xa1\xb2\x8a\x9e?\x8f\xc6\x90y\xa0\x18\xb2?\xbc\xbc\xbc4\xb7\xd6\xb1\x9f\xb3r\xc0\xb0\xd6\xb4\xee\x8d\x00\xe0\x07\xe4\x07\x83\x07\x96\x0cN\x17\xf5$X/\xca3\xe23\xc83\x022\x9f-s(?%\xe7 k\x19\x99\x12u\x10\x8f\x0e\xd7\x06\r\xfa\xd1\xed\x1e\xe6\x96\xe1\x1d\xe1^\xe3\x83\xe4\x11\xe1)\xdb\t\xd7k\xd6\xd9\xd8\xc7\xdf\x85\xe8\\\xeeQ\xf0\x98\xf2-\xf7\xc1\xfbe\xff\'\x03\x12\nY\x0f[\x12m\x14\x8b\x17s\x1a\x95\x1a\xf3\x17\xcf\x15\xb4\x14\xcf\x14#\x15\n\x144\x108\n\xf2\x02(\xfc(\xf7\xae\xf3\x8e\xf2\x82\xf0k\xec}\xe5`\xdeg\xda{\xd9@\xda\x8d\xda\xdb\xdbv\xdcn\xdc\xd8\xdc\xed\xde\x1d\xe3J\xe7v\xe9X\xeb\x9e\xeeh\xf4\x90\xfc~\x04\xba\x08\xc1\x08\xc8\x06\xbb\x05M\x06\xa4\r\xcd&\x80M\x95f|^\xed@\x11/]7vK\x8d^|q\xff\x7f\xbc{\x88b\x88D_5\xcc5\x9a7\xba2\xdd\'\xb3\x1bZ\x10)\x04\xaf\xf4V\xe2\x90\xcf)\xbe\x8c\xaf\xc5\xa5\xa1\xa6\xa4\xb1\xc9\xbb.\xb9O\xa9\xbf\x98\xa6\x91\x05\x97\xbd\xa5\xeb\xba\xc8\xd2\x05\xe5\x13\xeb\xb4\xe6\xe7\xe5\xc1\xf0@\x01\x08\r\xed\x14z\x1e\xc2(\xbe.p1\x0e4@6z2\x10\'\xee\x19D\x11\x9e\x10\xe9\x15\xd6\x1a%\x18,\x0c\x89\xfc\xdb\xeeV\xe3m\xdbI\xdbt\xe2\x91\xe9\xe5\xe8\xba\xe4\xff\xe3\xcd\xe5l\xe5E\xe2\xbb\xe1\xf2\xe6\x8c\xee\x06\xf7\xb7\x016\x0c(\x12i\x11T\x0c3\x08\xef\x08\xab\x0e\x0f\x18i\x1f\xb4!\x8e\x1f\xf0\x1aI\x15a\x10\x12\r\xc5\n\x05\x08\x0e\x04\xc2\x00\x1a\xff\x07\xfd\xb8\xf80\xf2\xe0\xe9A\xe1\xb8\xda7\xd9\x9f\xdc;\xe1\xd8\xe2\xe7\xdf\x93\xdb\xf9\xd6\xa7\xd4\x91\xd7\xa2\xdd\x90\xe4\x01\xea?\xed\xb2\xefS\xf1\x8a\xf21\xf6\xf5\xfb\xc1\x01\x9c\x07!\x0c\xbd\x0f$\x11c\x10\xaf\x0f\x89\x13v \x036OJ\xdcQoK\x9eA\\>\xe0A>G\xdcMgW\x04_\xb0\\`O|@\x816\xb0/\x8f%Q\x18\x9a\x0c\xdd\x03t\xfc\xf1\xf4\xb3\xed\xe5\xe4A\xd9\xd8\xca\xce\xbc\x9e\xb1_\xab\x10\xac\x00\xb34\xbb\x8e\xbf\x9d\xbe\xab\xbb\x85\xba\x02\xbd~\xc4Z\xd0I\xde\xb7\xea\xef\xf4\xeb\xfcQ\x04\xa9\x0bN\x13\x82\x19v\x1d\xab\x1ew\x1e\xc6\x1e) \xa3"0%\xf5%}"\x86\x1a\x95\x0f\xd8\x04\xaf\xfc\x16\xf8\xcf\xf7\xde\xf8\x00\xf8]\xf3\x17\xecf\xe5\xd9\xe0}\xde"\xde\xab\xde\xa8\xe0H\xe3\xc8\xe6E\xeb\x82\xf0\xfc\xf5\x07\xfa\xbc\xfb>\xfc"\xfd\xdb\x00\x17\x07\x83\x0eC\x15+\x19p\x1a?\x19\x14\x18\xb8\x16\xe1\x15=\x16\xb5\x16\x02\x16i\x13\x01\x10?\r\x93\n\x16\x07z\x02\xd6\xfcL\xf6\xd2\xef?\xeb1\xe9\xf0\xe8;\xe8\xc9\xe5&\xe2\xac\xdd\x19\xda\x92\xd8p\xd9*\xdci\xdf\x88\xe2$\xe5\xf8\xe6#\xe8\xcb\xe9;\xed$\xf3\xd0\xf9F\x00\n\x05\x98\x07\xaa\t\x99\x0cP\x11\x93\x16\\\x1a\xd9\x1a\xf9\x19\xdf\x19(\x1e\xb7\'\x9c4\x16@\xa7E\x8fD\n?\xb78e4-4w7\x86;\x9e<\x109\xd31\xb2(\xbd\x1f\xd6\x17+\x10\x0b\x07\xf6\xfb3\xf1!\xe9\x02\xe4X\xe1\xb2\xdf\xb3\xdd4\xd9\xb5\xd1\xa9\xc9\xf5\xc34\xc2\x9d\xc4\x18\xca\xef\xd0\x9e\xd6\xa3\xda\x97\xde\x03\xe4=\xeb\x1f\xf2\xa7\xf7g\xfb\\\xfeq\x01\xa5\x05,\x0b&\x11\x0f\x16\xbf\x18\x86\x18\x88\x15\xd5\x10k\x0c.\t\x0e\x07\x04\x05M\x02*\xff\xe5\xfb\xfe\xf8_\xf6\xca\xf3\t\xf1&\xeem\xeb\xfd\xe8\x0f\xe7\xa2\xe6`\xe8\xbc\xeb\xed\xee\xcb\xf0\xe8\xf1J\xf3\xbd\xf5\xf4\xf8\xee\xfc\xf7\x00\x82\x04D\x07\xc1\t\x95\x0cZ\x0f\xed\x11\x8d\x14\x9a\x16&\x17\xff\x15\xa1\x14B\x14W\x14\xad\x13=\x12\xdd\x10\xed\x0e\xdf\x0b\xe5\x07\x91\x03\x9b\xff\xb6\xfb\xdb\xf7h\xf4%\xf1\xe9\xed6\xeb\x12\xe9_\xe7\xf2\xe5\xf1\xe4\xd8\xe4\x13\xe5l\xe4\xc2\xe2\xac\xe2]\xe5\xc0\xe8Q\xeb\t\xed\xe6\xef\\\xf3\xbb\xf6\\\xf9A\xfc\x1f\x00q\x03Y\x05&\x06\xdd\x07/\x0b\xb5\x0f\xf5\x12G\x14X\x14\xd9\x14\'\x17\xe4\x1b\xf0!\n(\xce,\xa6.\xc1-"+\x1d(\x11&\'%\xff$\x05%\xb3#\x7f \xcc\x1b\\\x16\x89\x11Y\r\xd8\x08\xc1\x03/\xfeo\xf9?\xf6\x04\xf4W\xf2N\xf1\x93\xf0\xdf\xee\xa6\xeb\xff\xe7X\xe5\x9a\xe4\x84\xe5\x1f\xe8\xa8\xeb\xd4\xee\x7f\xf0\x0f\xf1\xcf\xf1\xc5\xf3f\xf6\x15\xf9`\xfb\xe7\xfc\\\xfd\xe8\xfc\x14\xfcO\xfb\x13\xfb0\xfb\x95\xfbM\xfb\xd5\xf9y\xf7\x02\xf5\xce\xf2\n\xf1e\xf0\x9b\xf0V\xf1\xd0\xf1!\xf2N\xf2\xb5\xf2f\xf3\xac\xf4\\\xf6\x06\xf8\x8a\xf9\xda\xfa\x1d\xfcz\xfd\x15\xff,\x01}\x03^\x05]\x06\xf5\x06b\x07\x17\x08\x90\x08J\tq\n\xac\x0bV\x0c\x1d\x0c\xa6\x0b\x19\x0b2\n_\t\x08\t!\t\xd5\x08z\x07\t\x06\xbf\x04\xa1\x03i\x02Q\x01C\x00\x17\xff\xca\xfd\x91\xfcK\xfbO\xf9\xa2\xf65\xf4\xad\xf2q\xf1\'\xf0}\xefg\xf0\xaf\xf1\xc2\xf1Z\xf0\xc3\xee[\xedO\xec\xb8\xecD\xee\xb8\xf0\xfb\xf3\x8a\xf7>\xfa\xa0\xfbq\xfc\x01\xfe\xc5\x00\x18\x04\xb0\x06c\x08}\n\xd9\r\xfd\x11{\x16\xf9\x1a\xd9\x1e\xf5 \x11!\x0c \x8f\x1e\x1d\x1d\xa7\x1b\x93\x1a;\x1a^\x19\xeb\x16!\x13n\x0f\x0f\r4\x0b\x86\t\x1f\x08\x05\x07\x86\x05\n\x03/\x00\x83\xfd\x88\xfb8\xfa\x0c\xfa\x15\xfbu\xfc\xe6\xfc\xc5\xfb\xc1\xf9\xe9\xf7\xde\xf6\xd9\xf6\xde\xf7\xf1\xf9\xd2\xfc\xa5\xff3\x01\xd1\x00\xdb\xfe\xfd\xfb5\xf9T\xf7\x0c\xf7\t\xf8:\xf9\xa9\xf9\xcc\xf8\xc8\xf6\xf8\xf3\x03\xf1\x17\xef\xc3\xee\x1d\xf0\x0f\xf2\x10\xf4\xb1\xf5\x88\xf6R\xf6C\xf5V\xf4\x0b\xf4\xa4\xf44\xf6@\xf8/\xfa\x93\xfb\x1b\xfcO\xfc\xc3\xfc\x85\xfd\x08\xff\xf8\x00\xe3\x02\x14\x04$\x04\xfb\x026\x01\xe7\xff\xd8\xff\xf0\x00Q\x02\x07\x03\xb3\x02\x8e\x01\xe0\xffl\xfe\xc4\xfd-\xfe\x94\xffY\x01\x11\x03\xb0\x045\x05|\x04\xee\x021\x01@\x00U\x00\r\x01g\x02h\x03-\x03*\x02\x03\x01/\x00s\xff\xc8\xfe\x8f\xfe\xf5\xfe\x88\xff]\x00\xdf\x01!\x04\x06\x07\xe3\t\xa6\x0c\xef\x0ev\x10a\x117\x12\xfe\x120\x13\xd9\x12U\x12F\x12\xa8\x12\x13\x13\xa6\x12\xf0\x10$\x0e\xa4\n\xe2\x06\xf0\x02\xfe\xfem\xfbM\xf8\xb4\xf5\x8d\xf3\xd4\xf0\xc3\xed$\xebN\xe9U\xe8\x02\xe8\x17\xe8\xf8\xe8_\xea^\xec\x80\xee\xb4\xf0\x00\xf3\xda\xf5\x94\xf9\x7f\xfd\x0f\x01\x9d\x030\x05w\x06\x0f\x08\x1e\nv\x0c\xd8\x0ea\x11\xe7\x13\xd3\x15,\x16\x9c\x14,\x11\x9f\x0c\xe8\x07\xb9\x03|\x00>\xfe\xf3\xfc%\xfc\x08\xfb\x18\xf9\xf0\xf5\xe9\xf1\x01\xee\xf7\xeaF\xe91\xe9\x8a\xea\xcd\xecx\xef\x84\xf1\x02\xf3-\xf4\x18\xf57\xf6\x96\xf7c\xf9\xa5\xfb\xe1\xfd\xf4\xffh\x01K\x02\xcc\x02%\x03\x91\x03\xfb\x03\xfb\x03:\x03\xa5\x01a\xff\xfe\xfc\xc0\xfa\x13\xf9_\xf8q\xf8\xfa\xf81\xf9\xaf\xf8,\xf7)\xf5\x10\xf3}\xf1\'\xf1k\xf2\n\xf5Q\xf8v\xfb\xca\xfd\xf4\xfeX\xff-\x00\xcf\x02\x0c\x08\x9d\x0f\xb4\x18,"\xe4*\xab1t5\xe45f4\x802T1\x061\x041\xc80S/\x15,\xb3&\x97\x1fe\x17\xcc\x0e\xbd\x06\x83\xff\xf0\xf8\xbf\xf2\xee\xec\xf3\xe7\xe6\xe3\xe1\xe0\x7f\xdeP\xdc\x06\xda!\xd8\xcd\xd6T\xd6\xbc\xd6K\xd8\xaa\xdb\xc9\xe0\xe3\xe6\xe6\xec\xec\xf1\xa8\xf5g\xf8\x8c\xfaB\xfc\xfe\xfd\x12\x00\xd3\x02W\x06\xcd\t\x82\x0c\xbf\r\x80\r\xb6\x0b\xd3\x08G\x05\xa3\x01\xc9\xfe\x08\xfdA\xfc\xee\xfbA\xfb\x9d\xf9\xd8\xf6P\xf3\x83\xefl\xec\xe0\xea\x1d\xeb$\xed\xb5\xef\xa2\xf1"\xf2\xc9\xf1\xa8\xf1%\xf3\xb8\xf6\xc5\xfb\x01\x01"\x05\x98\x07q\x08\xb9\x08B\t2\x0bG\x0e\xc5\x11\xd9\x14$\x16s\x15\xf7\x12?\x0f\xbb\x0b:\t\x92\x07\x92\x06-\x05@\x03\x04\x01\xfd\xfd@\xfaA\xf6?\xf2\xfa\xee\x81\xec\x00\xeb\xfe\xea\xf4\xeb:\xed\xbf\xed\xfe\xecG\xebs\xe9@\xe9\xcb\xeb0\xf1.\xf7o\xfb\x1f\xfd&\xfc\xd1\xfan\xfai\xfc\xc9\x00\xe7\x05p\n\xbf\x0c\xc0\x0c\xac\x0bK\x0c1\x12Q\x1e7.\x91\xf5Q\xf0+\xec\x14\xe9\xd6\xe6c\xe6\x95\xe8\xd0\xec\t\xf2\x91\xf64\xfa\xa8\xfbj\xfaM\xf7\xb3\xf4\xc5\xf4o\xf8\xaa\xfd"\x02b\x02\x16\xfe\xb3\xf6P\xee\xf8\xe7\xc9\xe4w\xe5\x88\xe8%\xebc\xeb(\xe9\xf9\xe4\x9a\xe0\x10\xde\xda\xdf\xf9\xe5\xf7\xed\xdd\xf4\x05\xf9\xef\xfa\x82\xfbT\xfd\x18\x04\x07\x15\xb8/\x89M\xafc\xa2j\xbacrW\xf1P\x05U2a2o*xLu\xbdd\xddH%)\x84\x0e`\xfdQ\xf4\xc9\xef\x9d\xebp\xe4,\xd9\x19\xcaJ\xb9*\xaaB\xa0\x19\x9d\xb6\xa1\x19\xad\xd2\xbd\xb4\xd07\xdfX\xe5Z\xe4\xbe\xe1Q\xe6\xc8\xf4\xe7\x0b\x06&\xe9:5E\xcaB:7\r)\xe3\x1f@\x1ew"\xea%|"\x0f\x17\x1d\x06#\xf4\xe1\xe2\xe4\xd3\xd9\xc80\xc3a\xc1_\xc06\xbf\xb2\xbe\x7f\xbf\xdd\xc0r\xc1\xa6\xc1\xda\xc5\xbe\xd0\x91\xe2\xe0\xf5\xb6\x04-\x0e4\x12G\x14a\x18,\x1f\x88)@4\xda:\xac:\xa23\xc5*Q#\xa3\x1e}\x1bU\x17 \x12\xd7\n\x9d\x01\x8b\xf9\xe6\xf2+\xefr\xed\xa3\xeb\x14\xe9\xa9\xe6\xb1\xe6\xb6\xea\x8e\xf0K\xf5\x8c\xf8d\xfa\x8b\xfb\xc5\xfc\\\xfeg\x02<\x066\x08\xbe\x06i\x02k\xfd\xa5\xf9\x9e\xf7\xb7\xf5\xcf\xf2\x7f\xee\xac\xe8?\xe3\xb3\xdf\xe6\xdd\x88\xde\x92\xdf\xe0\xdf\x18\xe0\xf8\xdf\x08\xe0\xec\xe2\x08\xe7\xb8\xed\xe2\xf4J\xf9\x8d\xfdT\x01\xaa\x06\xa5\x0bu\x0f\xae\x14\xa1\x1b\x89%\xb04JJ\xa8`\x9cg\x15\\\x01I\x8a>}D1Q\r^pd_\\\x8eDP#@\x07V\xfa\x9c\xf8\x9b\xfa\xb5\xf8?\xef[\xe1\xbc\xd2*\xc6z\xbc\xbe\xb8o\xba\x94\xc0\x85\xc6\xc6\xcc\x85\xd6,\xe2\xbd\xe9\xe3\xe9\xee\xe8\x00\xeeM\xfc\x1a\x0e\x98\x1d\x81(L.7-\x85%\xb2\x1c|\x19>\x1d\xe5 \x97\x1f\xf5\x16W\n\xfd\xfb\x13\xee>\xe3\x7f\xdb\x81\xd5w\xcf\x9c\xc8\xc4\xc3\x08\xc2\xd5\xc2\xa3\xc3<\xc3\xaf\xc3\x16\xc5\xb7\xcaa\xd6\x01\xe6\x99\xf3\x12\xfb\xe9\xfe\xf3\x01\xed\x06\xe4\x0f\x01\x1cM(\xdc.\x1c/\n,\xe0(\xb8&#&\x00&\xb2#\xc1\x1e\xdc\x184\x13\x9e\r\x04\x07q\xff\xa5\xf9]\xf4\x15\xf2`\xf2\x04\xf4\x07\xf4&\xf1\x85\xec\xcf\xeau\xed\x87\xf1\xc3\xf65\xfa\xea\xfb2\xfc\x99\xfa\xd2\xf9L\xfa\xf0\xfa\x1d\xfd\xf1\xfd\x11\xfeP\xfc\xd5\xf8\xb3\xf5\xff\xf0O\xed\xd6\xea3\xeb\xd0\xec\xc1\xedg\xec8\xe9\xba\xe6\xeb\xe4\xe2\xe5\xbc\xe9 \xed\x8f\xf3\x95\xf7\xe6\xf8\\\xf9Q\xf4\x8c\xf3\xd2\xf7\xc3\xff\xf6\x0b.\x12\x9d\x11\x9e\x0c\x9f\x0e\xeb \xc9=\xaeP\x9bM:A\xf79\x89=IH\xfaS\xd6_5c\xd8RK:I(b#<&:!\xb2\x15c\x08e\xfd}\xf4\xa8\xeb\xc9\xe08\xd7\x17\xd1\x89\xccW\xccO\xcf"\xd5I\xd9_\xd4\xe6\xcb\xa4\xcaX\xd6>\xe9J\xf7S\xfd\xab\xfe8\xfe\x83\xfd\x89\x015\x0br\x17`\x1e(\x1d\r\x17)\x10$\x0b\x95\x08"\x07\xe2\x03\xb7\xfd\x16\xf5\xcd\xed\x16\xe7\x1c\xe1\x03\xd9\x94\xd1\x95\xcc\x86\xcb\xfc\xcc\x9a\xcf1\xd2\xfd\xd2\xc2\xd0\xac\xcf\x91\xd5\x89\xe0,\xef\xa7\xfa}\xff\x10\x00\x8c\x02\xad\n\xa0\x16s\x1fp& +\x9c)\x06&{%d)\\,G)\xec"\x99\x1d\xff\x18s\x15N\x11F\x0c\xaa\x05\x04\x00U\xfb\x0f\xf9\x90\xf7\x84\xf5;\xf3\xd5\xef\x00\xee-\xee\x08\xef\xb0\xefr\xefL\xf0\x98\xf1\xfe\xf2\x90\xf3\xba\xf33\xf5H\xf5\x00\xf5\x17\xf4)\xf4\xb0\xf5\x02\xf5V\xf2\x01\xef\xbc\xed\xa9\xed\x13\xee&\xedA\xea\x9a\xe7_\xe4\xb5\xe5|\xea\x9f\xed\x12\xf0\x00\xed\xbe\xeb\x0e\xecp\xed\xea\xf0\xfa\xf1t\xf3\x97\xf3\xd7\xf9\xc8\n\x13!\xcc0\xda+\x08!<"z4\xf4L\\]\xc3f\x04g6[kJ\x08D[MaV\xa5R\'Ce1i"4\x13\x1c\x07/\xffG\xf8\x0c\xf0D\xe4\x91\xd9X\xd1c\xca\x97\xc2\xd2\xbb\xb2\xba_\xc0\xff\xc9\xf8\xcf\x92\xd0j\xcf\xe1\xd1z\xda+\xe7S\xf4\x1e\x01?\n\xf1\rF\x0e\xf9\x0e\xf2\x13=\x1c\x05"\x06#\xd1\x1f\x9c\x191\x12)\t6\x03\xc5\xfe\xf3\xfa>\xf5\x0f\xed-\xe2\xaa\xd6\xe3\xcdD\xca\xf9\xca\xf0\xcc\x7f\xcf\x99\xce\xe7\xc9\xf6\xc6\x9d\xcaD\xd5\x10\xe1\xaf\xeb\xc8\xf5N\xfb*\xff\xa3\x02p\x0b\xdd\x15\x03\x1f\xa6\'X-\x840\x11/a-I-\xba,\xd3,(,$*^&Z\x1e\xf1\x15\xc8\r\xa7\x08\xb4\x04.\x01h\xfdU\xf8=\xf2Y\xebl\xe7I\xe6\x15\xe7\xd7\xe7\xdc\xe6\x00\xe7\x8a\xe6+\xe5K\xe5\xf0\xe5\xc6\xe9?\xed\x10\xee&\xee\xc4\xee\x85\xf0R\xf1\xa1\xf0\xbf\xf0$\xf2M\xf3Q\xf3/\xf3\x85\xf3b\xf2Y\xef\xca\xef3\xf1\x1a\xf3\xa5\xf2\xe6\xec\xc8\xeb\xaa\xec0\xf2l\xf6z\xf7]\xfb\xc2\x02\xfa\x0b\x84\x12\\\x1b,+\x84<_DR@^=\x87D\xd6P&[sa;c\x8c]!O)>]6\x076\xd85\xfc/\xac"\x8c\x13\x03\x03\xa7\xf2\x05\xe6\xfd\xdel\xdb\x0f\xd7\xca\xd0\xef\xc8\xfd\xc2\xd1\xbe\x03\xbb\xb5\xba\xd2\xbe\xd5\xc7\x98\xd3\xfe\xd9\xc9\xdc\x1c\xde\x0f\xe2W\xeb\x1f\xf7`\x05T\x11y\x18\xfa\x17\xa9\x14\xd1\x13\xd4\x16\xaf\x1dZ"\xc3"\x9d\x1d\xe3\x12\xb0\x07c\xfe\xeb\xf8\xd2\xf6\xc6\xf4u\xef\xc5\xe6\xa8\xddy\xd4\x19\xcd\xa8\xc8(\xc9(\xce\xae\xd1\xef\xd3\xb6\xd5z\xd6!\xd7\x06\xda\xf9\xe3\r\xf3=\x02~\t\t\n\xdd\x08\xea\x0c\'\x18\xd6#K,\xd4/\x8c.\xca*\x81\'\xc9\'a+\xc5+\x0b*d%\xb0\x1fO\x17\xc1\x0e\x0f\x08\xa4\x04\xb9\x02\xe4\xffV\xfbs\xf3\xbc\xea\xbe\xe2\xe1\xdf\xbd\xe0\x0e\xe4.\xe6\xea\xe4\x89\xe0\xd5\xdb\x0b\xdbn\xdf\xda\xe5s\xeb\x9b\xed\x12\xed\xb2\xeb\\\xeau\xed\r\xf1\x0b\xf7\x8a\xfbY\xfc\xbb\xfay\xf8\r\xf6M\xf6\x1c\xfa\x81\xfe=\x04\xfc\xff\xd4\xf7\x0e\xf2e\xf1\x7f\xf8u\xfd\xe5\xfe\x02\xfc\x0b\xfdC\t/\x1a\x81#\xc9\x1d\xa7\x18\xc8\x1f\x1d/\xd4A:M\xbcQVM[A\xe8<\xffB!M6P2H[9\xdc,\x86#x\x1c\xcc\x17q\x10\\\x07\xfc\xfa8\xed\xf1\xe2\xa2\xdb\xa2\xd6\\\xd1\xe5\xca\xa0\xc63\xc5/\xc6!\xc5\x95\xc3\xf3\xc4\xe5\xca\x93\xd4\x7f\xdc\x03\xe3\xa7\xe7@\xeb\xda\xef\xd2\xf7\xde\x02u\r\x04\x13M\x13%\x12\x15\x12\x10\x15_\x18b\x19p\x17K\x13U\r\x11\x07\xb1\x004\xfdE\xfa^\xf5\xdd\xee\xc2\xe8\xa7\xe3\xad\xdd\xb0\xd9j\xd8}\xd9\xe6\xd9\xd9\xd8\x1b\xd9&\xdb\xec\xdc\xfb\xe0L\xe6\x95\xeeA\xf6s\xfa#\xfeM\x02\x98\x08\xa1\x0f\xe3\x15K\x1b|\x1f\xd0!/#\x9e#\x92$\xb7%\xf7%\x1e%\xcb"\x80\x1f{\x1b\xcf\x16Q\x13A\x0f*\x0b&\x05\x95\xfe]\xfa\xdd\xf5;\xf24\xedY\xe9\xb2\xe6G\xe4\xe5\xe3$\xe3=\xe3N\xe1\xb9\xe1\x83\xe3\x07\xe6\xcc\xe8J\xe9o\xea\xb0\xeb\xd7\xee\xfd\xf2?\xf6F\xf7\x8c\xf8\xf3\xf9\xe3\xfa\xed\xfc\xa6\xffj\x018\x01\xa7\xfe\xde\xff\x11\x01c\x01\x8d\x01_\x01\xcc\x02l\xffT\xff\x05\x01\xee\x08[\x14\x92\x1b\xc2\x1d \x17\x8f\x14Y\x1bs*\xf98\x17=\xf8:\x813I/H0p67>!=\xee5\xf3)| \xd6\x1b\xd9\x19\xe5\x19\x97\x14\xd1\x0b\xa6\x00a\xf6@\xef\xd5\xe9n\xe8\'\xe7\xa3\xe4B\xdfQ\xd9.\xd5\xe4\xd3\xbc\xd5\xdf\xd9\x1a\xdf\xac\xe2U\xe3\x9c\xe2\x98\xe2\x13\xe6G\xec\x00\xf4\x1e\xfaK\xfc\x0e\xfc%\xfa\x15\xfa9\xfc\x16\x01\xaa\x05[\x06J\x02\x8d\xfd\xeb\xfa$\xfa\xd7\xfa1\xfb\xb5\xfa\x8d\xf6\x9a\xf1U\xf0\x7f\xf0\xd6\xef\xeb\xec\x88\xebt\xeeM\xf0!\xf1\x9f\xef@\xee$\xee\x1a\xf1c\xf7\xee\xfc\xbe\xfe\x0e\xfd\r\xfc\xd6\xfdQ\x02\xc9\x08^\x0e\x8f\x10\xa6\x0e\x1c\x0c\xe4\x0ct\x10\xbb\x14\xb1\x16\xed\x16\x04\x15\xa1\x11\xc3\x0e\xb4\r\x19\x0e\x05\x0e\xe9\x0b\xd0\x08`\x04\x1c\x00\xe3\xfc\xd6\xfa\x86\xf9\xa3\xf7\xdb\xf5\x85\xf3\xd6\xef(\xedE\xecl\xec\xe2\xed\'\xed\x12\xed\xf0\xeb\xf6\xea\x17\xeb|\xebN\xed$\xef\xae\xf0c\xf2\xe3\xf2b\xf2\xd2\xf1f\xf1b\xf6\xe9\xfb\x82\xff\x91\xfe\xaa\xf9\xc4\xf9\xe5\xfc\x0b\x04\xaf\x08\xff\x06\xd6\x02/\x01e\x07 \x10.\x15\xfd\x13\xc2\x117\x13y\x1b\xb0%\xcf*N)\xda%\x90\'N-[3\x8b675\x851\x0c-\x9c*\xa7*\x0e*\r([#\xbb\x1c\xaa\x15\x01\x10\xb2\r\xd9\n\xee\x05\x08\xffr\xf8+\xf4\xe5\xef\x0c\xed\xe2\xe9\x0b\xe7T\xe3\xdc\xdf\x15\xdf\x8a\xdf\xa2\xe0\x94\xdf\xd4\xde&\xdf\xf5\xe0\xbf\xe3\x81\xe6\xe6\xe8\xdd\xe9\xd0\xe9\x01\xea\xcd\xeb2\xefj\xf2\x02\xf4c\xf3\x0b\xf2\xe3\xf1\x0f\xf2V\xf3\xa1\xf4\xb0\xf4\x0e\xf4\xe5\xf2\xaa\xf2\xdf\xf2\x0f\xf2S\xf1\r\xf2\xab\xf3+\xf51\xf5P\xf5\x0e\xf6\xd2\xf6\xfd\xf8\t\xfb\x90\xfd\x9b\xff\xdb\x00i\x02*\x04\x8e\x05\x00\x08,\nl\x0c\'\x0e\xc0\x0e\x82\x0f\xf6\x0f\x8b\x10\xe5\x10.\x12\x86\x12m\x12Q\x11u\x0fU\x0eU\ra\x0c\x93\x0b\xe1\t\x82\x06\x88\x04h\x03X\x02\xf7\x00\xa0\xfe\xb8\xfb\xa9\xf9\x9f\xf8\xe5\xf8\x8f\xf7\xd4\xf5\xd6\xf5\xc6\xf5W\xf5\xf2\xf3B\xf0\t\xef\xc7\xf1u\xf5\xfc\xf6\x06\xf57\xf2\x00\xf18\xf1T\xf3\xf7\xf5R\xf6c\xf4\x08\xf4\x1c\xf8\x92\xfd\xb6\xfe\xe4\xf91\xf7\xb4\xf7\xba\xfa#\x00K\x04#\x06S\x05\xf6\x05\xd6\x06x\x07E\x07 \x06\xb7\n\x1b\x14\xbc\x17\xa0\x132\x0b~\t\x83\x11\xc7\x1a3 s\x1b\x08\x11\xc5\x0bx\x10\xbc\x1eO*\r(\xf7\x1b\xa0\x0f\x96\r,\x16\xf3\x1f\x16%\x8d"9\x1a\xb5\x11\x0f\x0e\x8a\x0f\xc8\x11\xdc\x12\x02\x11\x88\x0bK\x06d\x01y\xff_\xff\xcc\xfd\x0e\xfa\xc9\xf5\n\xf3 \xf1\xbf\xee\xef\xea\x92\xe8X\xe7\xcc\xe6\xdb\xe60\xe4\xb8\xe0\xab\xde\xc9\xde\xe1\xe0\x90\xe2c\xe3Z\xe3\x8b\xe2\x88\xe0\x95\xdf\xcf\xe2O\xe8-\xec\xff\xec\x96\xed\x12\xed\\\xee8\xefE\xf0\x05\xf6]\xfc\xa2\xff\xe8\xfe\x8e\xfc\x92\xfd\xe4\x00\x90\x013\x03\xb9\x07\xec\x0bJ\x0c*\nS\x07\xcc\x06\xfa\nm\x0f\xef\x11\xf2\x0e\xc6\n_\n\xa0\x0b\xa4\x0eH\x0f\x82\r\xf7\x07\xb2\x08\xf9\n\x1b\x0b\xeb\x06&\x04\x01\x05Q\x05\xc9\x03\xde\x02h\x01\xbb\x00@\x01_\xfe<\xfb\x89\xf5\xb0\xf9H\x00\xab\x01\x11\xf9\x16\xf2u\xf1 \xf3\x0e\xfb\xf5\xfe\xa5\xfbD\xf0N\xe78\xf0\x7f\xfa\x19\xfc|\xf9\xce\xf8\x9a\xf3^\xec&\xf1R\xfaU\xf5\x83\xf8\xc3\x01\xbd\xfe\xe1\xf6\n\xef\xc8\xf5\xa9\xfe\xdf\x08\xad\t8\x01B\xf7\xf7\xf9\x0e\x0bC\x0e\xd0\x05\x82\x01f\x03\x1c\x0f\xa8\x1e\x9f\x13s\xfc\x1a\xf7\xe6\x05\x9e\x1c\\#I\x1d1\x0eC\xfa\x06\x00y\r\x7f\x12\xc9\x15\x98\x1aW\x11y\x0b\xeb\n\xb5\x01R\x03{\t\r\x16y\x19\x18\x0f\x1c\x08>\x00\x08\x01\xe2\n\x1a\n\xa3\nr\rC\r\xa4\x07\x11\xfd\x0b\xf9\xc5\xff9\x01a\x05\xea\x06\x89\xfb\x95\xf8\xf5\xf6\x9d\xf4L\xf1\x13\xef\xc1\xf1N\xf5d\xfb\xb0\xf3\xb1\xea\xe8\xddJ\xe3\xd5\xf1|\xf5F\xf8\x9a\xef\xaf\xe7\xfa\xe33\xe8;\xf3\xf1\xf8&\xfe\xf7\x00\xad\xf2\xd1\xe9X\xed:\xfbE\t&\n\xc4\x06\x8f\xfel\xfd\xa3\x00\xc4\xfd?\n\x8b\x15\xcb\r\x8b\x06\xe2\x04=\x0f\x15\x11_\x01g\x02\xe0\x0e/\x14\x8d\x17\x10\x10B\xfe\xd7\xf7\x8e\x04\x1b\x15&\x10\x07\nW\x06m\xff<\xfc9\xff\xfd\x03\xe2\x02\xb1\xfcA\xfb\x0c\xf6u\xfa\xe9\x03\xd4\xfd\x12\xef8\xe5#\xec\x82\xf7\xa9\x02c\xf9\x8c\xf0Z\xe4U\xe1\x12\xfb\xeb\x06\x17\xfdH\xf2q\xe8\x8f\xe1\xef\xf2\xda\x05m\x08\xa2\x07\xa2\x06\x12\xef\x10\xdd@\xf3*\x0f\xce\x0f\x98\x0b\xdb\x02\xd6\xf6z\xf6\x85\xfe\xe5\x10F\x0ek\x04"\x02\x10\x05-\x04r\x03\xe6\r\x0f\x13}\x07@\xfa\xeb\xf93\t7\x16\xde\x15\xf1\x07\xf6\xfeU\xf7\x1f\xf4[\x13\xb3\x1c\xae\x15\xa7\x00G\xf1\xa8\xf8\xe1\x04\xe5\x183\x19L\t\xeb\xfa\xaf\xf4\x9f\x01`\x07%\n\x99\x0fO\t\xbb\x082\x042\xf4Z\xf3y\x082\x12Q\x0b\xf7\xfa\xd2\xf6\x87\xfb&\xfc\xe8\x05"\x07\xb3\xec\xae\xf2M\x03z\xf8\xf3\xfa\r\xf5\xc7\xf1m\xf8m\xf9\x9d\xf3\x7f\xedR\xeeq\xf0w\x02\x92\xfd\xec\xf0l\xeb,\xef[\x00\x8e\xf9\x1f\xf4~\xf8\xe0\xf1)\xfc$\x10n\x0b\x90\xf3v\xef\'\xfeS\t\xb6\x11\xc9\x06\x8e\xfc\xf9\x072\x11\xb0\x18H\x04\x07\xf3\x80\x00\x8e\x12\xc9#\x92\x14\xa3\xf3\x87\xfb\xd3\x14\x83\x0cp\x0c\xe7\t\xa3\xfc\xbd\x00\xb5\x05\xaf\x0e\xad\x03\x17\xfbJ\xf8\xb7\x08;\x0f\xd9\x005\xe8\xe0\xe1\xf7\x04\xa7\x12\xe4\xff\xfe\xfaW\xe8\'\xe5\xc9\x03\xbb\xf9/\xf6\x8b\xf5\xa2\xf2\xcb\xf3\xaf\xf7\x0b\xfez\xf9\xa1\xf5\x0f\xf6\x10\xf0|\xe2J\xff\x1b\x17h\x16\xbc\xf5\xc5\xd0\xa8\xdc\xde\x07\xf4$\x15\x12\x8b\t%\xf4?\xdb\xd1\xe6\xa6\x06\xd7/\xe4#\xa3\xff\x89\xdb\xf8\xd36\x11\xae;\n W\xf0\x18\xdb\x16\xf3k\x1dr*\x93\x18E\xfb\xff\xe1L\xeb\x7f\x1a\xd7&P\x12\t\x02\x87\xf2\x05\xfb)\x05\\\r\x9c\x1b3\x06\x0c\xf2K\xf9\xec\xfcd\x15\xe3\x12n\x01\x06\xfc\xc3\xf1c\xfb\x12\x03N\x07\x1b\x06\x1b\x04\xdf\x05\xf1\xf8\x8b\xf5\x99\xe1\xef\xe9\'%\xd8\'\xaf\xf6\xc7\xe0\x93\xe3\xcf\xef\xdc\x07\x9e\x04}\x01}\x04\x04\xebA\xf6_\xf8\xc3\xef3\xf0\x00\xfe\x8c\t\x10\x046\xf2}\xe1\xe4\xef\xb1\x08\xa0\x07\xf7\xfc \xfb\xe4\xee@\x06h\nc\x03\'\xf0\xd6\xf1p\x0f>\x11y\x10\xd9\x08_\x00c\xfd\xcb\x02\xe4\x07H\x07g\x08\x93!\xd7\x1d\xcd\x02L\xeb\x16\xedm\n\xfa\x1eb&\xe1\r\xc0\xfbO\xf5u\xf1k\xfa\xce\x08\xbf\x16\x1d\tx\x02\xbb\x03\xf5\x00\xc7\xf6\x0b\xe7\x7f\xec\xa2\xfaC\x08\x16\x19d\x0b\xc4\xf5\xe7\xe7\x93\xd1\xf6\xe9 \x0b\xbb\tG\x05\xfe\xf3\t\xfb\x10\xfd\x9b\xf5z\xe4x\xda\xd6\xfc4\x0f@\x1a\xb9\x07\xd7\xe4$\xe3/\xed\x0e\x08\xab\x061\xf6\x08\x07\xe4\x0cI\xf9\x1b\xf3\xca\xf9\x17\xfb\xf5\t\x0b\x0c\xb2\n\x05\x104\xfd4\xf8\xab\xfb\xf4\xfb\xac\x0c>\x19S\rv\x17\r\x0b\xae\xf2\xbb\xff<\xf8\x0b\x05\xe1\x16\xf7\x1d\xdb\x18\x9b\x03\x8f\xfc\xa5\xf29\xf4\xde\x0b\x96\x12\x02\x13\xdd\rr\x08\xfb\xf8\xf4\xe69\xfe\xd1\x05\x04\x00\xda\x0f#\t\xa6\xf4\xb5\xf2\xc3\xf8]\x04\x81\xee\xd5\xf2i\r\xb8\xf8\xff\xf9\xb8\x00J\nF\xdd\xcb\xcf\x10\x00X\x17v\x1b\x0b\xf0\x9c\xe5\x96\xe1Q\xea\x15\xfe\'\x0f\xc8\x16\xc1\x05o\xd5w\xd3\xb0\xfa\xb6\x0e\xc5\x1c\xe2\x11\x12\xf3\x1a\xd9\xa4\xee\x0b\x02\x14\x0c\x99\x1c5\x10\xef\xe8\xf8\xdb;\t\x0f$v\x16\xb6\xfb*\xf0\xfc\xf8\xe0\t\x19\x1c\xfb\x11~\xf6v\xfd2\x0b\xff\t\xf5\x0f\x03\xffW\xff\'\x0e\xdb\x10\xd4\x02\x00\xf6\x99\xfe\'\x08K\x0f-\x06\x89\x10\x81\x05\xa9\xe7\x86\xf9\xe4\xfee\xfe;\x17|\x11|\x03!\xee\xbb\xd7g\xf6\xd7\x154\x15*\x01?\xe8-\xec%\xfa\x97\x05\xfa\xf5\xaf\xf4\x95\xf9Z\xff\x9b\xf6$\xf7\xa5\x03}\xf0\x1e\xf8G\xf4\xe7\xefH\xfan\x0e?\nq\xf2f\xf4\x15\xe0K\xf1[$g\n8\xf7s\xffi\xef\x01\xf3\x94\nk\x12\x00\x00W\xfb\xae\xfcp\x03q\x11\xfc\x0c"\xfb\xa9\xf0}\xf04\x14\xa7)Q\x1c\x1d\xf4K\xebQ\xee\xcb\xefw"\x83@\xcf\x16^\xddF\xed\x07\xff>\x04\xb3\x16\x12\x1dV\x02\xb7\xea\xfb\x0b!\x10{\xfaH\t1\x06\xc0\xdc\x7f\xf5s \xc2!\xd8\xf9\xc8\xe7\x03\xf2\xc5\xe2\xcb\n@\x07\xea\xf8J\x10\x0e\x00\xb1\xf54\xd0\x02\xe4\x81\x0c9\x18R\x13\xc7\xebN\xdf\xf2\xe07\xf6\xb1\x079\x1b\xe8\xff\x98\xf1O\xf4\xe9\xe3a\xeeB\x0b\xa5\x130\x12\xb3\xfc\xf9\xda\xbf\xf3\xd2\x03\xf9\r\xe3\x0e=\x14\x1e\xf5t\xec~\xef\xd6\xff\xbd5T\x19\x90\xfb|\xdff\xd9\x17\r\x18*\xf07\x83\x14P\xd8\xbc\xc8\xcc\xef\xaf/\x1d"\x85\x12\xf1\x17\xd6\xe8l\xe8\r\xe8\xcd\xfd\xf80[\x19\xad\x081\xf4]\xdf\xb0\xf2g\x0c1!d\x0b\n\xed\xa6\xf1\xaf\xfb\xd7\xff\xbc\x13t\xf8d\xe2\x90\xfd\xd2\x12V\x04\x96\xe1\x01\xfaN\x17h\xf0\xb3\xe8\xdd\xf9U\xfe\xf9\xfdR\xf1!\x17P%\xcf\xdf\xca\xb7B\xee\xf4\x0f\xcd)\x01-`\xf1\xe9\xd3u\xd98\xf7\x89\t\x93\x10\n\x1b8\x14f\xe6\xcf\xe3d\xed\x06\x04p\'\x14\x13\x1c\xf7H\xdb_\xeb\xf2!\xd6+J\x1e_\xe9\xba\xc6H\xf2\x04\x11\xb8;\xb95\xdb\xffa\xc7\x9c\xd5\xdb\x13\xdb-\xb3#n\xfaa\xf6\xa8\xfc\x0f\x07\x84\xfaV\x00\xea\x15\xde\x02\x8f\xf4\xf8\xffY\xf8l\x0f\xbc\x0f\xb3\xf8\xba\xe6\x9e\xed*&\x14\xfc.\xe4\xcf\x0c\x14\x02\xc8\xf1\xdb\xf5\x84\xe8y\x13e\x0b\x8b\xe5\x89\x11\x05\xf9\xd9\xe9\xbe\xf5\xeb\xf2\xfa\xfd3\x19\xe7\n\xec\xf4x\xec\x00\xf4\xeb\x00a\xf2\xd4\xf3\xb0\x14\xc5\x1e\xff\x07\xb2\xe3V\xdb\xa9\xf1[\x13l*\x97\xf2`\xec@\xf8\x1d\x06\xf97\xe5\x01\x00\xc5\xea\xd4\xe4\t\xc0=\x08A\x1f\xf09\xcb\r\xf0\xbc\xff\xf2\x04\x18\x1f\xd2\r@\x08H\t\xad\xe9\xeb\xf3u\x0b\x1c\x0f;\x01\x98\x0e&\xfb\x80\xffh\xf5\xff\xff\x89!\xe3\xfd\x99\xf8\x04\xf2\x07\xe5B\x0b\x1c!\xf0\x1c\'\xf87\xbf\xd0\xe5\x06\r\xac#\x91\x1c\x10\xee\x07\xecA\xe7\x1f\xec\xa7\x12\xcd\x07\xac\xfa+\x06\xd0\xe2\xa2\xe9f#\xdd\n\xcd\xfaW\x05\xba\xcb\xb5\xdc\xb8 m# \x10\xf4\xfd\'\xe2\x08\xf4E\t\xb3\xf6\x9e\x05\x9d\x05\xc0\x02\x14\nI\x03!\x06\xa3\x00\x03\xe3\xa7\xf8)\x19Z\x16\xca\xf92\xf0\xfe\x02\x9f\tb\x06\xd2\xff.\xf8\xbe\xfb\x91\xfao\x0f\x17\x0eD\x03(\x07M\xea\xd3\x08p\xf0\xd1\xef+%\r\x01\xd4\x00\xbe\x19n\xfec\xe0\x87\xe2\xe3\xff\xa1\x1b\xb2!_\x065\xef \xf0[\xe6\xbf\xf1r\x1b#\x1b\xb8\xff\x80\xfdC\xe2\xa5\xe8\xed\x04H\x12A\x10\xa6\xf6\xb5\xf3t\xf5v\xf4\xec\x05[\x03e\t\xa8\x14\xa2\xd8\x1f\xdd\xbb\x1ct\x0fZ\x00*\x0f#\xea\x19\xe5w\xf8\xc2\x10\xa0!\xbd\x02#\xfa\x16\xed8\xe2\xdc\x03\x84$p\x06f\xff8\x04\'\xf5\x1c\xff\x82\xf1\xd5\xf7^\x14\xaf\x19\xd7\x19j\xe4\xe4\xd7\x18\xfb\x8d#\xd7\x13\x91\t\xc3\xee\x96\xdfI\x0c\xf3\x17z\x10\xa8\xf3y\xe7\xec\xf2\xa3\x1c\\\x02\x85\xfb\x9b\x17\x82\xf7H\xeb\x94\xfa\x90\xff\x00\xf6\xef\xfb\xbc"\xa6!\xe4\xe0f\xe0\x1b\xee\xc5\x03\xf5\x11.\nJ\x01\x10\xfe>\xfa\x85\xfb-\xfc\x00\xe2w\x00\xa8%\x8c\n\xe1\xe2\x84\xf4r\x0bR\n\xb1\x08\xc4\xeac\xd8\xe9\x02\xb2&)\'p\x07\xa6\xc7J\xd0\x83\x18\xc4&\r\x01:\xfe\x0c\xfay\x03\xbd\xfe"\xf6\xc8\x04\x9d\xf2\x85\x00\x0b\x1e\xfb\xf9h\xf4w\x1e3\t\xdf\xd4\x10\xe7\xf5\t\xda\x1ae,\x9c\x04Y\xe0\xce\xd7\xfa\xfdZ3(\x1a\x0b\xe1\xee\xe7\x94\x03A\x12\x0f\x10}\x00\xbe\x04\x90\xe9\xc2\xf4o\x0c|\xf9\xe4\xfb\x8c\x14\xfa\x06\x85\xfe\x01\xef(\xf2\n\xff\xe1\xfd\xdf\x15\x0c\xf9\xd1\xef\xd1\xee\xe1\n\x051\x90\xe3P\xd0\xea\x0c\x9f\x17\x9a\x05$\xf65\xef\xed\xf2E\x12\xe6\x1a\xac\xf3\x9b\xf0\xa4\xf9\x89\x0b\xdc\x08B\xdf\x81\x04%#"\xfb7\xf6@\xfd\xf7\xeel\x07\xd9\x04\xef\t\xc1\x1aL\xe8\xba\xdb2\x04\xb7\x18\x98\x16\x87\xff}\xe3%\xec\x18\t\x16\x1cz\xff4\xf4\xbe\x04\xcb\xf7!\xfaw\x0b\x1e\x02r\xff\x8a\xf7v\xff?\r\x7f\xf7\x01\xfc\xc2\r\xdf\t)\xfc"\xe6\xfb\xee\xc1\x193\x1a\x9c\xffA\xfc\xc9\xebJ\xde\x8d\x16\x8b4^\xfd\xc9\xde\xf9\xe2^\xfdU\x1e\xbe\x13-\x05\x0e\t\x9f\xdf\xc3\xc7\x0f\x0f\xe5/P\x17\xb6\x01\xee\xdb\xdb\xe8g\xfb\x1d\x02\xdb\x15:\x1c\x14\xf1!\xdd\xc5\xf6\xb9\x11\xff\x1e\xae\xefo\xe2\x1a\xf8\xbf\x12\x81\n9\xf6~\x0f\xa7\x05\xf1\xfa\x92\xdd\x95\xe4\x0c e&\xa4\x0e\t\xf0\xc8\xea\x91\xea\x8f\xf9\x90\x1aQ \x81\x04\xc9\xe6\xd9\xea\xf1\x01v\nD\x14d\x17\xf4\xee\xed\xdf\x18\xec<\r 2?\x0e\xcf\xf1\x00\xea\'\xdf\x04\xf5\xf0\x1d\\/\xee\x0b\xa6\xdf\x89\xd5L\xfe\x1c\x18k\x0b+\x0b-\xf2"\xfb\xcc\xfd \xef\xdb\x13\xe2\x03 \xf4&\xff\x98\x04\x19\xfas\xec\x98\x0e\x8c#\x01\xf0\x1b\xd4B\xff\x05\x14\xaa\nQ\x05\x7f\xf9\xa4\xfe\xb1\xec\xcf\xfa0\x13\xaa\xf4K\xfdd\x12\xb0\x12\xf2\xfa\x88\xd9\xfc\xe4\xf5\x0f\xc5*\x9a\x15\xc1\xfd\x0c\xe0g\xd9{\xfd\xa3\x1d~*\xee\xfdE\xea\xf2\xf0\x13\xec\xb1\n\x94\x1e\x92\t\x1c\xf9.\xfa\x1a\xdey\xf7\xb3"\xf4\x1c!\n3\xe6q\xdar\xf3M\x13\xf7\x1a\xf2%\xfa\x00<\xcc^\xe8\xf8\xfdU\x10\x19&\x91\r\xc4\xfd\xda\xe3\xac\xe1\xf2\xfa\x8b\t<"\xe4\x16\t\x00~\xd8\xa0\xe3\xfd\x06\x83\x17\xfd\x1b\xc0\xf1\xc2\xdb\xa2\xfb\xe5\x172\x14\xee\xfc\x7f\xf4X\xea(\xf2\x07\x07>\x04\x9e\x0b\xb3\x18\x1d\x00\xeb\xe4n\xf3#\xf3\x97\x02\xd9\x0b\xe2\r\xd8\x11Y\xf9\xbd\xf1\xe2\xf2\xf4\xf7\xfe\x08\x9a\x10\xf5\x02\xb6\x03i\xf7W\xf0\xab\xfc\x1c\to\x13\xd4\rD\xf5d\xe4+\xfb\xec\x07\x9f\x06\x82\r\x13\x08\x06\xf7\x03\xeb\xc6\xf5\x89\x19m\x1d\x7f\xfc\x99\xdd\x11\xeb\xa4\xfe\xa1\x1e\'\x1d\x8d\x07\xd9\xf6e\xde+\xef\xda\x04\xd6\x0b\xa5\x11\xc4\x12c\xf1\xab\xedN\xef\xcf\xf8\xd6\x167\x14(\xfaA\xe6\x88\xf4\xc5\x0e\xc0\x02\x10\xf9\x05\x13\xb3\xfd\xad\xe2Z\xef\x8d\t\xb8\x11\x94\x12X\n\xb3\xef\xb9\xd8e\xf2]\x1a\xfe\x13^\x04K\x01z\xf6\x8a\xed\xa5\x00\xa8\x06k\nD\x0b\x15\xf6`\xed\x98\x07J\x06+\rY\xff\x8e\xeb\x11\x0c\xd2\xfe\xf6\xf6"\x10\x9a\x06j\xefa\xfd\x8c\t\xe2\x05\xc0\x04v\xfc,\xf9^\xfaT\xfb\x8a\x108\n\x91\xf7v\x06\x87\xf9E\xf9\x98\x03b\xfb\x81\x02\xa0\x0b\x0c\x00\xea\xf2n\x01\xc3\x0f\x01\x04\xae\xe8\xc7\xea\xc8\x05>\x19\xe9\x0e\x18\x00\xa8\xea;\xe3\xdd\x04\xe2\x10\x1e\x0b\x9e\x02c\xef%\xf0\xd3\x08\xb4\n\xad\x03\x16\xf6\xb8\xfe\x14\xf4\xc5\xec\xad\x15\x10\x1a\xca\x08L\xee\xdf\xde\xc1\xf3i\n#\x17*\x1d\xb7\x00\x8e\xdb?\xee\xda\x04\x88\r$\x0f\xe7\x06`\xf3O\xf2s\xfd)\r\xb3\x19\xa3\xf9\xcf\xe24\xf2\x17\x05\xc6\x16\x17\x14(\x08\xbb\xf4\x82\xe2\xe9\xf2\xe6\x04\xc3\x13\x8f\x11\xcd\x08~\xf9\xd7\xea6\xf6\x98\x06\xc3\x0b\xc4\x0eD\xfd\xf1\xee\xc0\xfb[\x07Q\x06\xfb\x08\xa1\x06\x9c\xf2\xfb\xefu\xff\xd9\x05<\x08\x1f\x0b\xfd\x04=\xf9\xed\xe8i\xf5\xc7\x0c\xbf\x0c\x13\x05\x7f\xfb\x93\xf7<\xf6+\x00\x0c\x04\xed\xfe\x07\xfe\xfa\xf95\x00\x9c\x01\x04\xfe\x10\x06.\xfd\xeb\xf6\xa3\xf9\x94\xf6\x17\x03\xb4\x0f/\x0bn\xfc\x0b\xf1\xeb\xf1\x80\x05x\x11\xbe\x01v\xf8\x1c\xfe\x16\x02\xe5\x04\xb0\x04`\x05\xa3\xfd\x01\xf4D\xf7"\x0bF\t@\x08n\x07g\xf9\xb1\xf7\x7f\xf3+\xfc+\x08\x8b\x0e\xd5\x10\x98\xf8\x03\xf4\xcf\xfe\x1e\xf6\xf2\xff\x07\x13\xa9\x03\x1a\xf8\xc3\xf6\xac\xfe\x92\r\xb2\x02\xdf\x02&\x01G\xe86\xef\xe4\x0cE\x17B\x0b\xf5\xfc\xfc\xf0~\xf2\xe4\xfa)\x02\xbe\x0f\xb6\x02\xba\xf7\x8d\xfb*\x004\x07,\xfdx\xf2,\xfd\x8c\x04\xb8\x00\xc1\xfef\xfes\xff4\xff\xbe\xfcq\x00,\x01\xb8\x013\xfd.\xfb\r\x02\xc1\x02\xe6\x00\xbb\x00\x8f\x01"\xfe\xbb\xfd\x82\xfbG\x01\x91\x06P\x02\xec\xfc\x13\xfb\x0c\x05\x9a\x03}\x00\x10\x00\xc7\xffa\xfbB\x00\xdb\t>\x03A\xff\xf6\xffH\xff4\xfd\x11\xff\x91\x03\xc5\x06\xd7\x02\xce\xfd\xf0\xfcj\xfc\xf2\x01D\t#\x04w\xf9\x13\xf9\xe7\x03H\x04\xe9\xfb\x08\xfe\x18\x02\xbd\xff8\xfa\x97\x003\x01\xe9\xffe\x01\xf3\xfc\xb9\xfc\xd5\xf9p\xf7\'\x05o\x0e\xb4\x02\xd5\xf6\x13\xf5e\xfc`\x00R\x06\x05\t/\xff\xfb\xf7,\xf8\x82\xfe\xb9\nU\x05\xec\xfbx\xfcx\xfc2\xfc\xa2\x01\xf0\tT\x03M\xfd\x96\xf7\xbf\xfc\x9b\x04\xdf\x02\xfe\x03\xaa\x00\x06\xffr\x01\xf4\xfd\x93\xfb\x8f\x022\x04\xe9\x00\xcc\x01\x97\x02\xe9\xfc1\xfc\xce\x02\xd9\x03k\xff\xe4\xfb6\x01\xa7\x00~\x00H\x02e\xfe{\x02\xb9\xff\xc0\xf9m\x00\xf5\x02\xcf\x00\xf0\xff\xdb\xfe\x81\xfeA\x019\x01\x00\x00\x13\xfe\xc8\xfa\x8c\x00\xab\x02\xd7\xfd\xe9\x00*\x02\xad\xfe\x87\xfdU\xfc\xfe\xfd/\x01\x04\xff\xf0\xfd\xc2\x02\xaa\x00\x9e\xfda\xffy\xff\x15\xfe,\xfe6\x01\xbc\xff\xd1\xfd\x9c\x01Z\x03w\xff\xc3\x00e\xff\xf1\xfa\x08\x017\x02\xa9\xff+\x00,\xfe \xffk\x03\x00\x03\xcd\xff\xd6\xfa\xc7\xf8Q\x00\x03\x05\t\x04&\x00\xac\xfd\xaf\xfbP\xfdw\x03&\x02\xb5\xfe\xf1\xfb\x97\xfe\xd1\x03\x99\x05\x89\x01\x81\xfb\xdd\xfc\x95\xff\xb6\x00T\x03$\x02\xb6\x02[\x04\x0e\xfd\xc3\xf9\xab\xfdz\x02\xd1\x06\x82\x04F\xfd\xad\xf8\xcf\xfb\xed\x01\xdc\x06\xf1\x04\x1a\xfd&\xf7_\xf7\xe7\xff\xcc\x07\xaf\x06\x8c\xff-\xf8\x87\xf6`\xfc\xb0\x05r\x06Q\x00\x8c\xf8;\xf4\xfa\xfas\x02\xa3\x05 \x003\xf9\x1c\xf7\xb5\xf8\xd9\xff\x7f\x07\xd9\x05\xfe\xfd\xdb\xfbQ\x01B\x08\x16\x0e\xd3\r\xa6\t_\x07r\x08\xd3\x0e\xeb\x15\x01\x15\xcb\x108\x0c\xf4\n\xc1\x0e\xaf\x11r\x0f?\x07\x0b\x03$\x04\xbf\x04^\x04\xa5\xffV\xf9\xc0\xf5l\xf3\xc6\xf3\xb9\xf5\xf3\xf3\xa4\xf0\xa2\xeeV\xee\xa9\xee\xc0\xef\xaf\xf1\xaf\xf3s\xf3\xc5\xf3\x8d\xf6^\xf9h\xfd\xc2\xfe\xb6\xfd+\xfe\xf0\xfe\x8a\x02\xdb\x08\x87\n\x96\x05\xf2\x00\x87\x01\xc3\x06\xde\x08#\x07d\x04\x9a\xfd\xd5\xf9\x8c\xfd\x7f\x05v\x06\xc7\xfb~\xf1\x87\xef\xcc\xf6i\xfe\x08\xff\x06\xf9\xd6\xf0`\xf0\xec\xf4\xbd\xf9\x83\xfe\xf8\xfa\x84\xf6\x93\xf6\xa3\xfcb\x00\xc5\xfd\xab\xfd\xd0\xfdy\xfe\xc2\xff\xc8\xff\x92\x00\xfb\x02\xa6\x02z\x04\xa1\xffy\xfa\xc4\xfb=\xff\xe7\x05\x02\x06\xe0\x00\xda\xfc*\xfc]\xfcp\xff\xf4\xff\xf5\x00_\x01\x00\x00/\xff\xca\xff\xd8\x00\xc3\x00\xde\xffd\xfd\xc7\xfc(\x00\n\x046\x04K\x03U\x01u\xff\xba\x01\xf9\x02\x87\x02f\x05\x8a\x0b\xd7\x11\xc1\x14\xae\x11\xa0\r\xf5\x0e_\x14Q\x1a\x11\x1e\xf0\x1dP\x1c\x1c\x1c\x86\x1c\xca\x1b\xaf\x17\x95\x12M\x10@\x0f\x1f\rA\n\xe5\x06\xbc\x02\x80\xfe7\xf9b\xf3/\xefm\xee\x82\xeeX\xed\xe0\xeb\xd9\xea\x97\xea\xe1\xea/\xec\x07\xed\x8a\xebY\xea\t\xed\xab\xf3\xf0\xf8j\xfaK\xf8\x1e\xf6a\xf7\x1a\xfa\x06\xfdh\xfeM\xfd\x0f\xfb\x1e\xfbl\xfd\x91\xfe\x07\xfd\xfd\xf9"\xf8>\xf7\x90\xf7\xb9\xf8$\xf9\xab\xf8\n\xf7w\xf6\xbd\xf7\xd9\xf8\x01\xf9\xac\xf8\xfb\xf81\xfa\xa6\xfb\x90\xfd\xa2\xff\xb6\xff\xd7\xfe\xc7\xff\xe6\x00\xf0\x01\x17\x03\xe9\x03\x81\x04S\x045\x04\'\x05\xf2\x05\xcf\x05\xef\x04T\x04>\x05-\x06X\x06r\x06\x1c\x06L\x05\x80\x058\x06\xc1\x06_\x07\xb8\x06<\x06l\x07\x0e\x08\xd8\x07\x91\x07\xcf\x06B\x06%\x06\x1c\x06\x11\x06\x15\x06h\x04.\x03\xf3\x02\xbd\x014\x01\x7f\x00\x0b\xff\xed\xfd\x1c\xfe\xd8\xfd\xb9\xfd\xc0\xfd\xbf\xfd\x8d\xfc[\xfc\x9f\xfcF\xfd\x03\xfe\xe6\xfd\x1f\xff\x06\x00\xdf\xffV\xff\xa2\xff\xe6\xff8\xff#\xff\xd4\xff\xcf\xff\\\xff\xbb\xfeO\xfem\xfd\xbd\xfc\x7f\xfc\xb5\xfb\x8e\xfb:\xfb\xe1\xfa\r\xfb\x88\xfa-\xfa\xb1\xf9\xfc\xf8]\xf9\x89\xf9\xc6\xf9\x97\xf9\xa3\xf9>\xfaE\xfa_\xfa\x04\xfas\xfa\x05\xfb\x90\xfb!\xfc/\xfc\xd3\xfc*\xfd\xd3\xfcA\xfd4\xfe\x82\xfe]\xfe\xb2\xfe\x0b\xff9\xff\x9b\xff\xa8\xff,\x00E\x00G\x00\xda\xff\xc0\xff\xe1\xff\x0c\x00\xcb\xff\xb8\xff\xa4\xff\xf6\xfeh\xff\xcf\xff\x92\x00\xf5\xff\xb0\xff:\x01\'\x04\x8b\x07<\n\x85\x0b}\x0c4\x0f\xc0\x12\xf9\x16<\x1bj\x1f\x03 \xaf\x1e\xdd\x1e\xce!4$\xbb"\x15 \xa9\x1dk\x1bt\x18\xe4\x13\x88\x10\xd9\r\x82\t\x07\x04\\\xfe&\xfb\xc3\xf9Q\xf6\xe6\xf1\x87\xee\xb9\xec\xd3\xea\x80\xe9Q\xe9\xc3\xe9\x1d\xea\xe6\xe8\xc1\xe7\x97\xe8L\xea\x85\xeb \xec\xb3\xec\xbd\xee6\xf0F\xf0\xeb\xf0\xec\xf1`\xf3<\xf4\xd1\xf4;\xf6\xc4\xf7\xbb\xf7|\xf7^\xf9\x07\xfby\xfb&\xfb\x84\xfb\x84\xfc\x9b\xfci\xfc\xfe\xfd\xf4\xfeB\xfe\x89\xfdM\xfd\xdb\xfdF\xfe\xc2\xfd\xb5\xfd\x88\xfe\x80\xfe\xf7\xfd\xff\xfd\xe0\xfep\xffK\xff\x14\xff\x19\x00\x05\x01m\x012\x02+\x03\xd1\x03\x07\x04h\x04]\x05\xb6\x06\xbc\x07a\x08\xd8\x08W\t\xee\t\xc1\nb\x0b\xaf\x0b\xdc\x0b\x98\x0b\xb4\x0b\xd4\x0bW\x0b\xde\n\n\n\xb5\x08\xa4\x07\xc5\x06\x8b\x058\x04n\x02N\x00\x10\xffI\xfe+\xfd\x0b\xfcV\xfa\xa9\xf8)\xf8\x9d\xf7Z\xf7\xb5\xf7\xb5\xf7i\xf7\x9a\xf7>\xf8\xf3\xf8\xdd\xf9\xa4\xfa[\xfb!\xfd\x97\xfe\xd3\xff\x1c\x01j\x028\x03\xa9\x03\xab\x04\xdc\x05\xc3\x06\xb6\x06&\x06$\x06M\x06\x8b\x05.\x04q\x03\xc7\x02\x1e\x01I\xff\xfe\xfe\xdc\xfe\x97\xfd\x9b\xfb\x98\xfa0\xfal\xf9\xb6\xf8\xd2\xf8k\xf9u\xf9\xb2\xf8H\xf8&\xf9\x81\xf9y\xf9\xaf\xf9 \xfa\xd7\xfay\xfbw\xfb\xbc\xfb\xa1\xfc\x84\xfc1\xfcl\xfc\x9c\xfd4\xfe\xe6\xfd\xe7\xfd\x07\xfe\x16\xfe\xe8\xfd\x87\xfdT\xfd\xa4\xfd\x7f\xfd\x85\xfd\x03\xfd\xab\xfc\x85\xfc\xef\xfb\x07\xfc\x81\xfc\xcb\xfc\xe3\xfc$\xfd\r\xfd\x85\xfdN\xfd\xf5\xfdA\xff\x17\x00\x08\x01\xff\x01\x8c\x02\x99\x03/\x05\xe4\x05\xbc\x06\x04\t\xde\x0c\x81\x10z\x12.\x14\x83\x17L\x1aY\x1b_\x1cu\x1f\xb0#\x1b%W#\t"\x87"A!\x18\x1d\xde\x19\x90\x19\xff\x16\x99\x0f\x9b\x08\x90\x06Z\x05i\x00\x8f\xf9\x80\xf5\x88\xf3\xaf\xef|\xea\x98\xe8\x0e\xeac\xea \xe7v\xe4F\xe5&\xe7\x1f\xe7c\xe6\xcf\xe7\'\xea\xd6\xea\r\xeb\xac\xecD\xefH\xf0\xc3\xef\xf4\xf0\x8e\xf3\x02\xf5\x87\xf5k\xf6\xdf\xf7\xca\xf8O\xf9\xc8\xfaj\xfc\xa2\xfc\xf9\xfb\xa3\xfc\xef\xfd\x7f\xfeP\xfe\xcc\xfeF\xffL\xfe\xf8\xfd\'\xff\xab\xff\xcc\xfe\xcd\xfd$\xfe6\xff\x82\xfe\x8f\xfe\xff\xffS\x00g\xffE\xffn\x00\xe9\x01\xdd\x01\xfb\x01d\x03\x16\x04z\x044\x05\x81\x06\xcb\x07\x04\x08\xf2\x07\x00\t)\n\x8b\n\xb0\n\xfb\n[\x0b?\x0b\xde\n\xd2\n\xe3\nG\nH\t\x8b\x08\x0e\x08\xa1\x07z\x06[\x05d\x04H\x03\xbd\x01\x91\x00\xf3\xff\xf5\xfeY\xfd\x18\xfc\x82\xfb\xb4\xfal\xf9\xaf\xf8[\xf8\x99\xf7p\xf7e\xf7\xcc\xf6\xa7\xf65\xf7\xab\xf7K\xf8j\xf9\xef\xf9\x82\xfa\x88\xfb\x83\xfc\xe6\xfdI\xffm\x00\x89\x01\x95\x02=\x03\x9d\x03M\x04\x1a\x05\x12\x05\xeb\x04\xb4\x04\xa0\x04a\x04s\x03\x8d\x02\xf1\x01c\x01X\x006\xff\x06\xff\x8c\xfe\x8e\xfd\xd2\xfc\xc9\xfc\xb5\xfcJ\xfc\xf1\xfb\xf7\xfb"\xfcZ\xfc\x88\xfc\xfc\xfcr\xfdu\xfd9\xfd\x8f\xfd6\xfe\x82\xfe\xdb\xfe.\xff,\xff\x0f\xff\x0f\xff1\xff\x9b\xff\x08\x00w\xff"\xffW\xff?\xff\xd3\xfe\xaa\xfe+\xfe\xca\xfdD\xfd\x8c\xfc0\xfc\x02\xfcx\xfb\xc7\xfae\xfa{\xfaK\xfaM\xf96\xf9\xdf\xf9o\xfab\xfaj\xfa\xcb\xfa\xef\xfa\x0b\xfbh\xfb\xf2\xfb\x8c\xfc/\xfd\x95\xfdI\xfe\x1a\xff\x1c\x00\xb1\x00\x8c\x01\x99\x03@\x06\xee\x07\x02\nf\x0e6\x13\x9c\x15Z\x16i\x18Y\x1dL!\xf4!V"\xb2$Y&\x16$\xef \xb4 \xa5 \xff\x1b6\x15\x13\x12\x04\x11\xb6\x0c\x14\x05_\xffj\xfd3\xfa\xb3\xf3\xdf\xee?\xeeh\xed\xfd\xe8\xed\xe4)\xe5j\xe7a\xe6\xab\xe3~\xe4\xe6\xe7\xf6\xe8\x16\xe8A\xe9\xfc\xec2\xef\xd2\xee\xc0\xef\xb6\xf2\xbd\xf4\xd3\xf4C\xf5M\xf7\xbe\xf8\xe6\xf8e\xf9\xb7\xfah\xfbK\xfbw\xfb \xfc\xb3\xfc\x0e\xfdo\xfdy\xfd\x1a\xfd\x1e\xfd\xde\xfd4\xfe\x98\xfdS\xfd\xe1\xfd\xf6\xfd\x17\xfdB\xfd\xab\xfe1\xffN\xfeJ\xfe\xf2\xff\x0b\x01\xc8\x00J\x01\x18\x03?\x048\x04\xf6\x04\xff\x06{\x08u\x08\xdc\x08\x83\n\xb0\x0b\xd4\x0b\xff\x0b\x07\r\xbe\r^\r#\r\xa0\r\xd4\r\x0e\r\x01\x0c\x83\x0b[\x0bh\n\xe3\x08\xc0\x07\xcb\x06;\x05D\x03\xae\x01w\x00\xfa\xfe\xf9\xfcU\xfb8\xfa\x08\xf9\x89\xf7T\xf6\xcd\xf5[\xf5\x80\xf4%\xf4\x81\xf4\xc5\xf4\xcf\xf46\xf5\t\xf6\xe1\xf6\xd0\xf7\xa1\xf8~\xf9\x9e\xfa\xc2\xfb\xf1\xfc>\xfe\xd4\xff\xc4\x00\x84\x01\xba\x02\xa0\x03\xa7\x04\x8a\x05e\x06H\x07\x97\x07\x85\x07\x8a\x07\x03\x08\xe4\x07<\x07\xbf\x062\x06\xbf\x05\xd1\x04\xc2\x03\xd2\x02\xfc\x01\x1f\x01+\x00p\xff\t\xff4\xfej\xfd\x08\xfd\x00\xfd\xd3\xfcn\xfc<\xfcV\xfcx\xfcz\xfc\xb7\xfc-\xfdv\xfdw\xfdm\xfd\xd0\xfd_\xfer\xfe\x81\xfe\xc7\xfe\xe7\xfe\xe7\xfe\xa4\xfe\x89\xfe\xa2\xfet\xfe\xa5\xfd\xf0\xfc\xb9\xfco\xfc\xa8\xfb\xdb\xfa4\xfa\x9e\xf9\xe1\xf8(\xf8\xe5\xf7\xc3\xf7o\xf7.\xf7K\xf7\xb1\xf7\xe7\xf7\x16\xf8\x9d\xf8\x81\xf9s\xfa\x1d\xfb\xeb\xfb\xd0\xfco\xfd=\xfe,\xff\xe2\xff\xb9\x00{\x01\xeb\x01Y\x02\x94\x02\xf6\x02\xab\x03\x1e\x04x\x04"\x05\xf9\x05,\x07\x99\x08\x1c\n\xf2\x0b{\r\x0b\x0f\xda\x11S\x15\x99\x17\x81\x18\x15\x1a\xaf\x1c\x0f\x1e\xbf\x1d6\x1e\xc3\x1f\x15\x1fp\x1b\xa1\x18\xfa\x17#\x16\x04\x11\x02\x0c\x8d\t\x86\x06\x80\x00\xa0\xfaV\xf8\xb5\xf6\xec\xf1X\xec\x10\xea\x02\xea\xf7\xe7\xe9\xe4d\xe4\x1f\xe6G\xe6\xbd\xe4\x04\xe5\xbb\xe7\xdd\xe9\x02\xea\x82\xea\xfc\xec\xa4\xef\xdf\xf0k\xf1\x19\xf3u\xf5\xd6\xf6B\xf7O\xf8|\xfaO\xfcX\xfc6\xfc\xca\xfd\xf8\xff\x84\x00\xe0\xffy\x00\x08\x02)\x02\x0b\x01M\x01\t\x035\x03s\x01\xa8\x00\xe5\x01\x87\x029\x01:\x00\xfe\x00\x92\x01\x83\x00\xbf\xff\xcc\x00\xfd\x01g\x01{\x00.\x01\x89\x02\xc3\x02F\x02\x02\x03\x8a\x04\xf3\x04\xb9\x04n\x05\xd3\x06o\x07\x14\x07T\x07h\x08\xe3\x08\x8b\x08p\x08\xef\x08\x17\tl\x08\xf3\x07\xe2\x07m\x075\x06\x05\x05r\x04\xd9\x03\xa1\x02S\x01J\x001\xff\xc7\xfd\x85\xfc\xbb\xfb\r\xfb\x15\xfa\x1a\xf9\xa3\xf8T\xf8\xf1\xf7\xbe\xf7\xf0\xf7.\xf81\xf8\x86\xf8?\xf9\xfe\xf9\xac\xfar\xfbT\xfc!\xfd\xd8\xfd\xa8\xfe|\xff1\x00\xac\x00H\x01\xde\x016\x02^\x02\x91\x02\x07\x03[\x03g\x03\x87\x03\x9d\x03\xaa\x03n\x03n\x03@\x04N\x05\\\x05\xd7\x040\x05+\x06f\x06\xef\x05T\x06U\x07-\x07\xcf\x05I\x05\x06\x06\xad\x05\xb4\x03\\\x02p\x02\x06\x02"\x00\xa9\xfe\xa0\xfe0\xfex\xfc.\xfb[\xfb\x83\xfb\x91\xfa~\xf9\x95\xf9\xdf\xf9\x84\xf9\x1e\xf9W\xf9\x8f\xf9)\xf9\xc8\xf8\xfe\xf8\x81\xf9\xb1\xf9\x91\xf9\xa9\xf9\xe3\xf9:\xfa\xa4\xfa\x08\xfbl\xfb\xe0\xfbT\xfc\xb9\xfc/\xfd\xe8\xfd\x85\xfe\xc2\xfe\xf2\xfeN\xff\xc8\xff\x05\x00#\x00T\x00|\x00\x89\x00\xa1\x00\xd1\x00\xec\x00\xf4\x00\xd9\x00\xcf\x00\xff\x00#\x01%\x01&\x01\xf1\x00\xc7\x00\xbf\x00\x8f\x00i\x00N\x00(\x00\xf8\xff\xe1\xff!\x00Y\x00E\x000\x00\xc3\x00\xe7\x01*\x03\x99\x04c\x06Y\x08\x0c\nM\x0b\xea\x0cZ\x0f\xcd\x11>\x13$\x14C\x154\x16\x13\x16S\x15\xeb\x14h\x14\x81\x12\xa4\x0fF\rR\x0bl\x08\x93\x04&\x01r\xfeh\xfb\xe4\xf7\x17\xf53\xf3\'\xf1\xdb\xeeD\xed\xe0\xec\x9a\xec\x00\xec\xeb\xeb\xa3\xec~\xed-\xeeR\xef\xff\xf0p\xf2\x87\xf3\xb7\xf4R\xf6\xfd\xf7<\xf9^\xfa\x92\xfb|\xfc_\xfdY\xfe~\xffK\x00\x95\x00\xe8\x00[\x01\xb8\x01\xc9\x01\xe8\x014\x02\x1e\x02\xa9\x01L\x01/\x01\xde\x00\x08\x00G\xff\xf1\xfe\x9b\xfe\x0f\xfer\xfd\x10\xfd\xa7\xfc\x00\xfc\xb4\xfb\xe3\xfb\x10\xfc\xf5\xfb\xd4\xfb\x1f\xfc\x81\xfc\xb2\xfc\x08\xfd\xaa\xfdX\xfe\xdb\xfeT\xff\'\x00\x13\x01\xbb\x01\\\x02&\x03\x0b\x04\xec\x04\xb3\x05w\x061\x07\xb9\x07,\x08\x90\x08\xe5\x08\r\t\x03\t\xe4\x08\x9e\x08&\x08y\x07\xa3\x06\xa8\x05\xa3\x04\x9f\x03\x9b\x02}\x01T\x00+\xff\x12\xfe\x13\xfd;\xfc\x83\xfb\xe8\xfaa\xfa\x15\xfa\x04\xfa\x10\xfa:\xfa\xa0\xfa0\xfb\xce\xfbh\xfc\x01\xfd\xcb\xfd\x97\xfec\xffW\x007\x01\xea\x01^\x02\xa7\x02\t\x03t\x03\xb8\x03\xd8\x03\xdb\x03\xc9\x03\x8e\x03/\x03\xc3\x02H\x02\xa2\x01\xf8\x00f\x00\xee\xffz\xff\xe1\xfe(\xfe\x7f\xfd\xf7\xfc\xa3\xfcg\xfcC\xfcN\xfcL\xfcH\xfc\\\xfc\x9f\xfc\x13\xfd|\xfd\xdd\xfdv\xfe3\xff\xc7\xffH\x00\xe6\x00\x97\x01#\x02\x96\x02(\x03\xc0\x03*\x04a\x04\x90\x04\xd5\x04\xf8\x04\xd3\x04\xa9\x04\xa0\x04\x87\x047\x04\xf0\x03\xa3\x03/\x03\x9b\x02\x1c\x02\xbc\x01[\x01\r\x01\xa2\x00\x0f\x00\xa5\xffb\xff\x1a\xff\xb6\xfeg\xfeT\xfeh\xfeN\xfe\x10\xfe\xd2\xfd\xb3\xfd\xc1\xfd\xc9\xfd\xc6\xfd\xb6\xfd\xb5\xfd\xb1\xfd\xc5\xfd\xe2\xfd\xf2\xfd\xd6\xfd\xa9\xfd\xde\xfdU\xfe\xca\xfe\xf0\xfe\x1f\xff\x9d\xff\'\x00\x87\x00J\x01\xe6\x01\xbe\x00C\xff\x97\x01<\x07\x14\t\xbc\x03~\xfei\xffn\x03\xf0\x04\x80\x04\xed\x03\xe0\x00H\xfbJ\xf9V\xfd)\x00#\xfc\x88\xf6#\xf6\xdc\xf8 \xf9\x9e\xf6\xed\xf4\xc5\xf4\xb8\xf5\xb3\xf7\x0b\xfa\xc8\xfa\xdf\xf8*\xf7\xae\xf8\x08\xfd\x01\x01\x9c\x01\xd3\xff\xe3\xfeW\x006\x03\x01\x05X\x05\x0e\x05q\x04\x16\x04B\x04\xb0\x04\x14\x04\xe8\x01!\x00\x14\x00\xf6\x00\x7f\x00t\xfe\x0b\xfc\x9a\xfa\xc7\xf9\x10\xf9i\xf9V\xfca\x00\x0e\x01\xa1\xfd<\xfb\xe4\xfc\xd1\x01:\x08\t\x0f$\x13c\x0f^\x08\r\x08=\x10\xe0\x17M\x18\xda\x15\x90\x14\x97\x11A\x0c\x08\nk\x0cr\r\xe6\t\xed\x06\xaf\x05\x90\x01-\xf9\xed\xf3\x92\xf6\xfa\xfad\xfb\xe8\xf7X\xf3r\xee\xfd\xea\xf3\xech\xf3\xec\xf7\\\xf7j\xf4\x84\xf2j\xf2\x1e\xf4\xa4\xf8\x0c\xfe\xe0\x00\x88\x00e\xff+\xff\xc8\xff\xba\x01,\x05l\x08$\t\xce\x07\xc7\x05\x81\x03\x0e\x02\xd5\x02\xa9\x05\x05\x07\xda\x04O\x00\x10\xfc\xb9\xf9\xc7\xf9\xfe\xfbG\xfdc\xfb\x89\xf7\xb9\xf4/\xf4\x8d\xf4u\xf5\xee\xf6\x03\xf8\xdb\xf7\xe2\xf6\xa8\xf6\x85\xf7,\xf9\x84\xfb"\xfe\xcb\xff\xde\xff\x8d\xff9\x00^\x02\xb5\x04^\x06P\x07\xa4\x07y\x07\x18\x072\x07\xc2\x07\x8b\x08\xcf\x08j\x08Z\x07\xaf\x051\x04v\x03\x93\x03\xb6\x03\x0f\x03\xa8\x01\x17\x00\xe0\xfe?\xfe\x1a\xfes\xfe\xcd\xfe\xa0\xfe\xf1\xfd\\\xfdi\xfd\xc0\xfdC\xfe\x0c\xff\x06\x00\x93\x00z\x00<\x00d\x00\xdf\x00\x96\x01X\x02\xf0\x02\x02\x03]\x02l\x01\x02\x01M\x01\x02\x02O\x02\xdc\x01\xd7\x00\x94\xff\x08\xffn\xff\x0e\x00h\xff<\xfe\xab\xfe\x15\x00y\xff\xbf\xfcG\xfbB\xfd\x1a\x00\xca\x00\x1f\xff\xa9\xfc<\xfb1\xfc\t\xff\xa4\x00n\xffn\xfd\xf8\xfc\x84\xfd\xb6\xfd \xfe \xff\x92\xff-\xff\xd8\xfe\xd7\xfe\xb2\xfe\xb2\xfe\xd7\xffN\x01\xa4\x01\x16\x01\xb0\x00\xc9\x00\n\x01\x9c\x01\xac\x02\x92\x03\x89\x03\xfb\x02\x8d\x02{\x02\xf2\x02\xd7\x03\x8e\x04F\x04l\x03\xc0\x02\x9c\x02\xac\x02\xf3\x02&\x03\xb5\x02\xae\x01\xce\x00\x93\x00\x8e\x00,\x00\xbe\xffe\xff\xf9\xfei\xfe\x00\xfe\xcb\xfdz\xfdI\xfd~\xfd\xc2\xfdX\xfd\xaf\xfc\xe9\xfc\xb1\xfdv\xfes\xfe\x1a\xfe\x05\xfek\xfeb\xff*\x00;\x00\xe8\xff\x05\x00d\x00\xa5\x00\xb8\x00\x15\x01Q\x01\x01\x01\xc1\x00\xa7\x00\x86\x00*\x00\xf4\xff+\x00;\x00\xc5\xff\xfe\xfei\xfe3\xfe7\xfeZ\xfeh\xfe\x13\xfe}\xfdH\xfdq\xfd\xc8\xfd\n\xfe]\xfe\xc3\xfe\xfb\xfe\x03\xff,\xff\x90\xff#\x00\xb4\x00\x11\x01/\x01\x0e\x01%\x01s\x01\xc7\x01\x08\x02\x16\x02\x02\x02\xaa\x01U\x01K\x01T\x01Q\x01-\x01\xec\x00\x85\x00\xf8\xff\xb9\xff\xeb\xff\x0f\x00\xe5\xff\xa2\xff}\xffc\xff5\xffK\xff\x85\xff\xbc\xff\xcc\xff\xbf\xff\xb3\xff\x9a\xff\xa0\xff\xca\xff\x0f\x003\x00$\x00\xf6\xff\xc6\xff\xba\xff\xc2\xff\xcb\xff\xb3\xff\x97\xffz\xffP\xff.\xff\x15\xff\x13\xff&\xff%\xff:\xff;\xffL\xff_\xff\x97\xff\xe6\xff\x1c\x004\x00F\x00o\x00\xcc\x00\x17\x01)\x01\x19\x01\x0f\x01\x19\x014\x01U\x01r\x01J\x01\xec\x00\xb5\x00\xbd\x00\xcf\x00\x9d\x00U\x00\x01\x00\xb7\xffy\xffh\xffk\xff<\xff\xec\xfe\xaf\xfe\x9a\xfe\x9b\xfe\xab\xfe\xaf\xfe\xc9\xfe\xd2\xfe\xf5\xfe-\xff\\\xff\x83\xff\xbf\xff,\x00\x90\x00\xcb\x00\xe8\x00\r\x01I\x01\x8a\x01\xba\x01\xc8\x01\xa5\x01m\x01j\x01q\x01E\x01\xe2\x00|\x005\x00\xe0\xff\xac\xff\xa5\xffh\xff\xf2\xfeK\xfe\x0f\xfe&\xfe?\xfe\x1a\xfe\x01\xfe5\xfe\xb0\xfen\xfe!\xfeN\xfe&\xfe3\xfe\xc3\xff@\x04\x12\x05\xf6\xff\x86\xfbt\xfe\xae\x05\xb9\x07\xd4\x05\xa6\x03\x88\x01\xb8\xfe\x17\x00\x82\x06\x12\t\xb5\x03$\xfe\x16\xff\xa1\x01\xcc\x00W\xff>\x00S\x00\xd5\xfd\xc6\xfc\x04\xfe\xd4\xfdh\xfb\x17\xfb*\xfe\xbc\xff\x87\xfd\xe9\xfa%\xfb\xf2\xfc0\xfeN\xff&\x00\x8b\xff\x84\xfd/\xfdh\xff\xba\x019\x02\x84\x01\x14\x01\xb1\x00H\x00\xf1\x00I\x02\xbf\x02\xa0\x01\x86\x00\x89\x00o\x00\xbd\xff)\xff\'\xff>\xff\xc5\xfe+\xfeQ\xfds\xfc\x0e\xfc\x94\xfc`\xfdC\xfdz\xfc\x8e\xfb\x07\xfbP\xfb\\\xfcP\xfd,\xfd?\xfc\xbc\xfb\x0c\xfc\x93\xfc\xc4\xfc\x1c\xfd\xa6\xfe\xa4\x00w\x01O\x00W\xff\xc6\x00\xb2\x04\x83\x08\xcc\t\xe9\x08\x9a\x075\x08\xca\n\xd0\r[\x0f\x9a\x0e\xf4\x0c\xa2\x0b0\x0b*\x0b\xa8\n\x97\t\x06\x08\xf5\x05n\x03\x98\x00d\xfe\\\xfd\xdf\xfc\xfe\xfb\xee\xf9:\xf7F\xf5\xda\xf4\xbd\xf5\xe9\xf6y\xf7$\xf7g\xf6j\xf6\xf6\xf7g\xfav\xfc\xb8\xfd_\xfe\xdf\xfe*\xff\xfb\xff\xa0\x01/\x03\xc1\x031\x03m\x02\xba\x01)\x01\x0c\x01\xfe\x00\xa0\x00p\xff\xd6\xfde\xfcU\xfb\xf1\xfa\xfd\xfa\xe9\xfaO\xfas\xf9\xcb\xf8\xac\xf8\r\xf9\x1e\xfa6\xfb\xc6\xfb\xd9\xfb\xe5\xfb\xa3\xfc\xff\xfd\xa8\xff\x0c\x01\x94\x01\x99\x01\xde\x01\xbf\x02\xb0\x03]\x04\xdf\x044\x05\x08\x05\xae\x04\x84\x04\xab\x04\xc9\x04\xa5\x04\x85\x04\x17\x04t\x03\xe1\x02\x88\x02p\x02_\x024\x02\xc8\x01\x1a\x01\x93\x00l\x00\x81\x00\x85\x00]\x00\r\x00\xb1\xffq\xff[\xffO\xff\x13\xff\xec\xfe\xf7\xfe\xe9\xfe\x99\xfe=\xfe\x10\xfe\x0e\xfe\x1a\xfe6\xfeM\xfe=\xfe\x1e\xfe-\xfe\x80\xfe\xe2\xfe\x15\xff6\xffm\xff\xcb\xff"\x00]\x00|\x00\xa7\x00\xce\x00\xf6\x00\x1d\x015\x01/\x01\x0b\x01\xe4\x00\xda\x00\xdc\x00\xac\x00\\\x00\x1a\x00\x02\x00\xea\xff\xc5\xff\x93\xffi\xff0\xff$\xff9\xff^\xff`\xff5\xffB\xff}\xff\xad\xff\xcf\xff\x02\x00.\x00\\\x00\x80\x00\xca\x00\xfb\x00\xf3\x00\xf1\x00\x14\x01c\x01s\x01a\x01S\x01>\x011\x01)\x013\x01#\x01\xd3\x00\x8f\x00\x99\x00\xa1\x00{\x00)\x00\x07\x00\xff\xff\xd6\xff\xb5\xff\xbb\xff\xab\xff\x89\xff6\xff\x0c\xff1\xffW\xff\x05\xff\xdb\xfe`\xff\x11\x00\x97\xff\x04\xff\xb3\xffT\xff<\xfe\x9e\xff\xd0\x04\xd9\x05B\xff\x9e\xfa\xaf\xfe\x81\x05\xe8\x05\x80\x03O\x02\x08\x00c\xfco\xfe\xcf\x05,\x074\x00J\xfb-\xfeM\x01\x0c\x00\xd8\xfe\xea\xffo\xff\x03\xfdU\xfd\x84\xff\xfc\xfeR\xfc\xa6\xfc\x87\xff9\x00\xef\xfd_\xfc \xfd%\xfe\xfb\xfe\x07\x00H\x00\xa3\xfe\xfd\xfc\xcd\xfd\xdb\xff\xb0\x00\\\x00/\x00\xc5\xff\xdb\xfe\xc6\xfeB\x00B\x01\xd4\x00\xec\xff\xdc\xff\n\x00\xd2\xff\xde\xffQ\x00P\x00\xf4\xff\xef\xff(\x00\xc4\xff\xea\xfe\xb7\xfeq\xff=\x00\x0f\x00J\xff\x8d\xfe\'\xfe\xab\xfe\xf0\xff\xab\x00\xdd\xffx\xfeG\xfer\xff.\x00\x0f\x00\x83\xff\x07\xff\xb8\xfe\xf9\xfe\xb3\xff\xb1\xff|\xfe\x9e\xfd\x12\xfe\xdb\xfe#\xff\x15\xff\x86\xff\x89\xff\x18\xff\x02\x00W\x02\xdc\x04X\x05\xf9\x04M\x054\x06C\x08\xab\n\x8f\x0c\xf6\x0bs\t\x8b\x08\xf5\t\x8f\x0b\xfb\n\x94\x08\x00\x06\x1f\x04\xc7\x02\x02\x02\xa3\x00:\xfe\x91\xfb\xfc\xf9|\xf9V\xf8_\xf6\xf5\xf4\xef\xf4\xcf\xf5`\xf6p\xf6T\xf6n\xf6w\xf7\xe7\xf9z\xfc\xcf\xfd\xdf\xfd-\xfe\xac\xff\x9f\x014\x03\x08\x04\x01\x04V\x03\xc7\x02\x17\x03~\x03\xd8\x02\x10\x01\xae\xff\x16\xffD\xfe\xbd\xfc\\\xfb\x8c\xfa\x92\xf9\x9c\xf8[\xf8\x96\xf8&\xf8P\xf7\x88\xf7\xa5\xf8\xc2\xf9\x8e\xfa\x9c\xfb\xa2\xfcc\xfd~\xfeX\x00\x14\x02\xd5\x02g\x03\x8d\x04\xae\x055\x06i\x06\xf3\x067\x07\x14\x07#\x07\\\x07\xf4\x06\n\x06V\x05\x14\x05\x98\x04\x03\x04\x94\x03\x00\x03\x03\x02\x11\x01\x9f\x00\x8d\x00g\x00\t\x00\x92\xff\x08\xff\xc0\xfe\xdf\xfe+\xffn\xffP\xff\x03\xff\xd5\xfe\x04\xffW\xffk\xffy\xff\x92\xff|\xffK\xff\x16\xff\x02\xff\xe2\xfe\xc5\xfe\xc1\xfe\xbf\xfe\x90\xfe2\xfe\xcf\xfd\xab\xfd\xc4\xfd\xdc\xfd\xc9\xfd\xf5\xfd\x82\xfe\x98\xfe\xe0\xfd\x92\xfd\xae\xfe\x14\x00p\x00n\x00\x96\x00L\x00\x01\x00\x8d\x01\xdc\x03\xe3\x03\x03\x02\xbd\x01e\x03\xb7\x03\x89\x02\xaa\x02\x03\x04\xb3\x03\xec\x01R\x01\xc2\x010\x01$\x00\x93\x00w\x01\x83\x00U\xfe\xc6\xfd\xe9\xfec\xff\xbb\xfe\x8c\xfe\x03\xff\xb8\xfe\xdc\xfd-\xfea\xff\xa7\xff\xf8\xfe \xff\xc5\xff\x95\xff\xf0\xfeG\xff#\x00\x00\x00i\xff\x98\xff\xbe\xff\x02\xff\x85\xfe7\xff\x92\xff\x8c\xfe\xd6\xfd\x7f\xfe\xa5\xfe\x97\xfd\xf7\xfc\xaf\xfd\xe4\xfdW\xfd~\xfd\xd6\xfd#\xfdY\xfc8\xfdc\xfe\xe9\xfd\x1c\xfdf\xfd\xd0\xfdU\xfd\x9c\xfd^\xfeO\xfeN\xfdl\xfdU\xfeY\xfem\xfd\xe5\xfcT\xfdY\xfd\xf1\xfc\x90\xfcc\xfc\x07\xfc\xcb\xfb\xcb\xfb@\xfcn\xfc*\xfdB\xfe\xdf\xffj\x02\x0f\x04\x13\x05\xb4\x053\t\xa1\x0e\xaf\x12,\x14\xd3\x13K\x14n\x16\xe4\x19\x86\x1c\xe8\x1b\'\x19\xaf\x161\x15\x01\x14\x12\x12\xed\x0ec\n\xc7\x05\x9b\x02\x8a\xffd\xfb\xb6\xf6<\xf3\r\xf1\xf9\xee\x01\xed\xd7\xea\xec\xe8\x1b\xe8\x01\xe9\x9a\xea\xb6\xeb\xbb\xec,\xee\x02\xf0{\xf2\xd1\xf5\x0b\xf9\x07\xfb\xdb\xfc\xc2\xff\xff\x02_\x04H\x04M\x05\x8f\x07\xd0\x08\x12\x08,\x07\xaf\x060\x05\x04\x03{\x02\x1c\x03V\x01\xeb\xfc&\xfa\x8e\xfa\xa0\xfa\x81\xf8\xa3\xf6\x84\xf6\x1b\xf6\x85\xf4\xc7\xf4"\xf7\xe1\xf7\x84\xf6R\xf6\xd7\xf9\xba\xfc\x9a\xfcF\xfd\xeb\xff\x1d\x02\xf6\x01L\x03G\x07\xb0\x08\x14\x08\xb3\x08y\n\x11\n\xcc\x08/\x0b\xc8\x0c\x15\n\x13\x07D\x07\xf1\x07\xf1\x05Q\x043\x04\x01\x028\xff\x9e\xfe?\xff\xb3\xfd\x0f\xfbQ\xfax\xfa\xa7\xf94\xf9\x80\xf9!\xf9\xd6\xf7\xab\xf7C\xf9e\xfa\x0c\xfa\xba\xf9w\xfaW\xfb\xb1\xfbT\xfc\x95\xfd<\xfe\xbf\xfd\xee\xfd3\xff\xfc\xff)\xffo\xfe\xea\xfeb\xffB\xff\xe5\xfe.\xfe\x0e\xfd6\xfc\xa5\xfc-\xfd\xae\xfc\x96\xfbG\xfa?\xf9I\xf9w\xfbN\xfc9\xfa\xe9\xf7C\xf8\x13\xfa3\xfa\x04\xfb\x08\xfc\xd4\xfb`\xfa\x14\xfc\x1b\x01\xfd\x02\xbc\x01*\x02-\x07\xd4\x0b\x9a\x0e\xeb\x12<\x17\xab\x17u\x15\xf6\x18\xc7"\xa3(\xe5%\xb6 \x82 \xf5"\xa6"f \xf3\x1d\x1f\x1a\xd6\x13\xfa\x0e\xfb\x0c.\ta\x01\x9d\xf9\x85\xf6\xd0\xf5\x89\xf23\xed\xd8\xe7\xac\xe4\xcc\xe3\xa6\xe4\t\xe6 \xe6\xd1\xe5a\xe6\xfa\xe7\x8d\xeb\xde\xef\xe7\xf2\xc2\xf3Q\xf6\xad\xfb\xa0\xff\xd9\xff\xf0\xff=\x03#\x05\xf4\x04&\x06\xa8\x08\x1b\x07\x1c\x01\xb8\xff\x9e\x02\xe4\x01\x08\xfc\xa9\xf9\xee\xfb\xb0\xfa\xb4\xf4M\xf2\x9f\xf4\x8e\xf4\xb3\xf1\xae\xf2\xac\xf6\xc6\xf6\xa2\xf3^\xf4\n\xf9\xb9\xfb\xa8\xfc\x16\xff\xe9\x02\x8f\x03\x04\x03\xa5\x040\x08\xc7\t\xb0\t\x02\x0b\xa7\x0c\xee\x0c\xf2\nS\t\xdd\x08\xba\x08\xa2\x08\x91\x08\xb1\x07!\x05u\x01o\xff2\xff\xf4\xfe\xe0\xfd0\xfd\x8b\xfc&\xfb\xe8\xf8\t\xf87\xf8a\xf8\xd5\xf8Z\xfa8\xfbn\xfa!\xf9Z\xf9\x8d\xfa\xa0\xfb\x1d\xfd(\xfeM\xfeU\xfd\x1c\xfdR\xfd{\xfd\xe6\xfdu\xfe\x8b\xfeE\xfd\xdc\xfb\xa6\xfa>\xfat\xfa\x87\xfa\x9e\xf9\xd9\xf7X\xf6\xe6\xf5\x8f\xf5\x81\xf6\x03\xf7\x1c\xf6\\\xf4>\xf5\x0f\xf9B\xfa\x99\xf8\x93\xf7\xd9\xfa8\x00\x06\x04\x82\x06\x07\x07q\x06\x0c\x08&\x0e\xda\x16\xb5\x1b\x07\x1c\xe0\x1cm \'$\x11%\x8b&i)\x89+\x85*5(y&\n#J\x1d\x96\x17\xc1\x14\xb6\x12N\x0e(\x07\xd7\xff\xc6\xf9\xb8\xf4\xea\xf0\xbb\xedP\xeb\xe3\xe8\xda\xe6`\xe5^\xe4\xfc\xe30\xe4\xf0\xe5T\xe9\xc2\xec\x11\xefS\xf05\xf2\xa1\xf4K\xf7k\xfay\xfd=\xff\x0b\xff*\xff\xad\x00\xc5\x01t\x00\x9c\xfd\xaa\xfc\x84\xfd4\xfde\xfa\x00\xf7\x1b\xf5\xcb\xf3_\xf2\xa1\xf1\x8d\xf1~\xf0\xe9\xee/\xefP\xf1\x18\xf3\x06\xf3\x10\xf4\xa5\xf6O\xf9\xcc\xfb\xe2\xfe\x91\x02\xe4\x03\x80\x04\x94\x06E\n\x99\x0c\xee\x0cc\r\xe6\rr\x0ey\x0e\x8c\x0e\xf8\x0c\xbe\nw\t\xac\t\xe4\x08B\x06\xd1\x03\x1f\x02\x05\x01\xd8\xff\xe5\xfe\xff\xfdC\xfc\xf0\xfa\xc4\xfaO\xfb\xfb\xfb\xc1\xfbd\xfb\x03\xfbF\xfbd\xfct\xfd-\xfe\x14\xfe\xbc\xfd\x12\xfd\r\xfd;\xfd-\xfd\xcd\xfb\x12\xfa\xcd\xf8d\xf8Q\xf8\xf0\xf6n\xf4\xaa\xf1\x98\xf0<\xf2\xa6\xf3\xf0\xf2T\xf0\xab\xee\\\xf0\xa9\xf3O\xf6\xb9\xf6<\xf59\xf5f\xf8g\xfdK\x00\xce\xff\xe1\xfe\x0c\x01\xdd\x05\x8f\tm\nB\n)\x0b%\x0e\x9f\x11Y\x14\x7f\x15\xa5\x15|\x16G\x18\xe5\x1a\xf7\x1cF\x1e\xba\x1e%\x1e\x1d\x1eP\x1f\xf7 \x07!\xa9\x1eH\x1c\x9e\x1b\x84\x1b\x13\x1a\xf6\x16\xa2\x13\x8c\x10|\r\xb3\n\x81\x08\xd3\x05\xa0\x01\x0c\xfd\x1c\xfa\xd9\xf8\xea\xf6\x9a\xf3\x01\xf1\xaf\xefJ\xeea\xec@\xebn\xeb\xc7\xea_\xe9*\xe9g\xea\x93\xebA\xeb\x1e\xeb\xd5\xeb\xe2\xec\xfe\xed\xe0\xee\xe7\xef\x9b\xf0\xc7\xf0P\xf1`\xf2\xbe\xf3Z\xf4\xf9\xf3,\xf4@\xf5\x8f\xf6\xf8\xf6\xc8\xf6\x80\xf7\x95\xf8\x8c\xf9)\xfa!\xfb\xb0\xfcR\xfd\xb6\xfd\xd3\xfe\x87\x00\xe7\x01b\x029\x03\x7f\x04\x80\x05\x1c\x06\x0c\x07H\x08\x80\x08"\x08|\x08\xd4\t\\\n\x85\t\xb6\x08\x9b\x08\xb4\x08\x06\x08\xc5\x07\xbe\x07A\x07\x0b\x06$\x05\x06\x05\xed\x04{\x04\xb7\x03\xf7\x02@\x02\xd8\x01\xb2\x01;\x01+\x00\x1e\xffZ\xfe\xe0\xfda\xfd\xb3\xfc\xaf\xfb=\xfa\xdf\xf8!\xf8\xa1\xf7\xd4\xf6\xa8\xf5f\xf4m\xf3\xb9\xf2E\xf2\xf7\xf1\xa9\xf1(\xf1\xa5\xf0\xa6\xf0 \xf1\x91\xf1\xd8\xf1J\xf2\x05\xf3\xe7\xf3\xee\xf4\x1b\xf6A\xf7L\xf8U\xf9\xb1\xfau\xfc4\xfeq\xffi\x00\xd2\x01\xb5\x03y\x05\xae\x06\x04\x08\xcc\t\x18\x0b\xef\x0b2\r\xc1\x0eb\x10\x8f\x11\x96\x12\xe3\x13\xcd\x14\x9b\x15b\x16\x92\x17\xa4\x187\x19\x1e\x1a1\x1b/\x1c^\x1c\x90\x1cu\x1d\xd8\x1d\x97\x1d$\x1d\x1d\x1d\xc0\x1c\xf1\x1a\xe2\x182\x17o\x15\xed\x12j\x0fC\x0c\xf9\x08\x19\x05I\x01\xf0\xfd\xca\xfa\x0e\xf7\r\xf3\xde\xef\x1f\xed\x97\xeap\xe8\xaf\xe6\xfc\xe4A\xe3.\xe2\x0e\xe2\x1f\xe2\x06\xe2>\xe2\xcc\xe2\x90\xe3~\xe4\xc7\xe5?\xe7\x97\xe8\xb2\xe9\x05\xeb\xb6\xec\x81\xee \xf0b\xf1\xb9\xf2B\xf4\xdb\xf55\xf7q\xf8\xb6\xf9\xef\xfa\xfe\xfb\x12\xfd\x85\xfe\x16\x00=\x01+\x02\x87\x03\xfa\x04\xec\x05j\x066\x07\x8d\x08\x04\n\xf5\n9\x0b*\x0b\xdb\n\x98\n\xba\n\x03\x0b\xdb\n\x17\n%\tI\x08~\x07\xc8\x065\x06d\x05:\x04e\x03\x1b\x03\xce\x02\x19\x02C\x01\xca\x00d\x00\xda\xff\x9d\xff\x9f\xffd\xff\xa6\xfe\x00\xfe\xe8\xfd\xce\xfd;\xfdO\xfcs\xfb\xbd\xfa\xeb\xf9/\xf9`\xf8E\xf7\xf0\xf5\xa8\xf4\xa6\xf3\x03\xf3\x7f\xf2\xf2\xf1`\xf1\x06\xf1%\xf1\x8a\xf1\x05\xf2Q\xf2\xf0\xf2\xe0\xf3\t\xf5U\xf6\xa7\xf7\xf7\xf82\xfaS\xfb\xa2\xfc.\xfe\xc5\xff)\x01j\x02\xd4\x03(\x05\x84\x06\xb2\x07\xf6\x08\x87\n+\x0c\xbe\r\xe1\x0e\'\x10\x18\x11\xb8\x11\xc9\x12\n\x14]\x15\x16\x16\x0b\x16\x14\x16U\x16\xa8\x16j\x17`\x18\xdf\x18\xe4\x18\x11\x19\xbb\x19!\x1a~\x1a:\x1b\xce\x1b\xed\x1a\x1d\x19\x89\x18l\x18>\x17\x8b\x14\xf0\x11\xe3\x0f\xad\x0c~\x08\x1c\x05\x90\x02d\xff\x99\xfa\x8d\xf6?\xf4\xd7\xf1x\xee\'\xebL\xe9\x1d\xe8\xfd\xe5\x06\xe4s\xe3\xa5\xe3=\xe3=\xe2b\xe2\xbf\xe3o\xe4F\xe4\xd0\xe4\xca\xe6\x0c\xe9\xec\xe9\xba\xea\xcf\xec&\xef\xab\xf0\xb2\xf1\xaa\xf3U\xf6\x11\xf8\xae\xf8\xfa\xf9d\xfcs\xfe5\xff\xbc\xffK\x01\xe0\x02-\x03H\x03G\x04\xc8\x05.\x06\xa9\x05\r\x06\xf9\x06\x1a\x07\xff\x05\x93\x05\x83\x06\xd5\x06\t\x06F\x05\x97\x05\xb3\x05q\x04J\x03B\x03\xa5\x03\xff\x02\xe4\x01\xd3\x01$\x02\x92\x01P\x00\xb1\xff\xe6\xff\xa1\xff\xa2\xfe=\xfe\xa0\xfe\x84\xfe\x8c\xfd\xca\xfc\xbf\xfc\x9d\xfc\xb9\xfb\x10\xfb5\xfb?\xfb\x82\xfa\x9a\xf9D\xf9\xfd\xf8;\xf8=\xf7\xc0\xf6\x98\xf6\xfd\xf5N\xf5\'\xf5!\xf5\xd6\xf4\x80\xf4\xab\xf4R\xf5\xe4\xf5|\xf6\x96\xf7\xc2\xf8\xb4\xf9\xb5\xfa>\xfc\xd4\xfd\xfd\xfe*\x00\x96\x01\x17\x03\x1b\x04\xe7\x04b\x06\xb8\x07Z\x08\x06\t\xf2\tL\x0b\xea\x0b\x18\x0c9\ro\x0e\x0b\x0f;\x0f\x8d\x0f\xba\x10\x1d\x11t\x10\xf7\x10`\x11-\x11i\x10\xd3\x0fo\x10"\x10h\x0e\xce\r@\x0e8\x0ea\r1\r\xbb\x0e-\x0f\xf8\r\x86\r$\x0f\xa6\x10\xbb\x0f\xbe\x0eK\x0f\xeb\x0f\xbc\x0e2\x0c\x00\x0ca\x0c\x1b\nC\x06\x98\x03C\x03\x96\x01\xfd\xfc\xb3\xf9\xc1\xf8D\xf7"\xf3b\xef#\xef\xe1\xee\xac\xebx\xe8\xf7\xe8q\xea\x8b\xe8\xe4\xe5\x07\xe7\xc6\xe9\xab\xe9a\xe8\xe7\xe9?\xed\x18\xee\x13\xed/\xef\xf3\xf2j\xf46\xf4\xa1\xf5\xcb\xf8\x1c\xfa\xd7\xf9\x82\xfb\x16\xfe\xb1\xfe\xc3\xfd\x83\xfe\xc5\x00\x98\x01\xf2\x00\x0c\x01Q\x02\xa2\x02<\x02i\x02V\x03\x9e\x03\xee\x02\xdf\x02\x90\x03\xdc\x03_\x03\xc3\x02\xe2\x02\x14\x03\xd1\x02\x07\x02\xba\x01\x9f\x01?\x01x\x00\xd1\xff\xf7\xff\xcf\xff\x1e\xff<\xfe\x07\xfe\xe8\xfdd\xfd\xc3\xfc\xd9\xfc\x07\xfdw\xfc\xb5\xfb\x84\xfb\'\xfcA\xfc\x8d\xfb\x8f\xfb\xea\xfb3\xfcd\xfc\x8a\xfcL\xfd\xa4\xfd#\xfd\x03\xfd\xa4\xfd\x9d\xfe\xbc\xfe\xe9\xfd\xf2\xfb\xdd\xfa\xb3\xfc\x7f\x00\xd4\x03\x9a\x00\x93\xf9\x16\xf6v\xf8O\xfe\xff\x01\t\x01\xc4\xfe\xac\xfd*\xfd\x11\xfd6\xfd{\xfe\x10\x01\xb4\x02 \x03O\x05\x7f\x07{\x08q\x07!\x07<\t3\n\xe5\n#\x0c!\x0f\x03\x12\x16\x11\xf7\x0f\xbf\x0e\x82\x0c\x81\x0b&\x0b\xd5\x0c\xed\r\xfd\x0b\xa8\n\x14\n\x08\t\x13\x06i\x029\x00\xc3\xff\xaa\x00=\x02\xb4\x03\x94\x04\xc0\x02\xf8\xffP\xfeV\x00(\x05\xca\x08>\n\t\x0b\x05\x0eE\x11\xdb\x11#\x0f\xa3\x0cl\r\xf0\x11T\x16B\x18\x9d\x14\x1e\r\xc2\x07\xde\x06\x1e\t|\x07\x8c\x01\x8b\xfc[\xfbn\xfc\x1d\xfaD\xf3\xe9\xea\x0c\xe5\'\xe5\xdb\xe8~\xec\x02\xebP\xe5\xd9\xe1\x95\xe3\xf7\xe7e\xe8X\xe5l\xe6\xf5\xed$\xf6|\xf9\x07\xf7\xd1\xf4\x9f\xf4\x15\xf7O\xfbI\xff0\x02\x03\x03r\x04\xe2\x04G\x03s\xff\xb0\xfc:\xfeb\x01\xa4\x03_\x03V\x01\x8d\xfep\xfa\x93\xf7\x8a\xf6\xea\xf7Y\xf9\xcc\xf9\x18\xfal\xf9\xbb\xf8x\xf6\xf8\xf4\xed\xf3*\xf5z\xf8;\xfc\xe2\xfec\xfe!\xfc\x07\xfa\x98\xf9\n\xfbm\xfc\x12\xfe\xb7\xffL\x01\xeb\x01C\x00\xd7\xfdr\xfb\x84\xfb\xfc\xfc\x19\x00q\x02\x00\x03z\x02\xef\x00\xe1\xff\xa4\xfe\x80\xfe9\x00\xa3\x02\xdb\x044\x05u\x05\xc2\x04^\x02\xb7\xff\xd5\xfe\x98\x01\xe1\x04A\x06\xea\x04\xd2\x02\x04\x01\\\x00\xc6\x02\x07\x03\x86\x01\xb3\xfe\x1a\xff\xbb\x04\xcd\x061\x03\x80\xfe\xbf\xfc\xac\x00\x14\x03p\x02\xcb\x011\x01\x1d\x04\xc5\x06c\x08\x07\x07\xec\x03>\x04\\\x08\xb4\x0cB\x0e\xc2\nR\x083\t\xeb\n\xe2\r\xbe\x0b\xf6\x08m\x07v\x07\\\t\x14\t\xe9\x06b\x04W\x02\x9a\x02\xe4\x03\x89\x04\xd9\x04\r\x03\xa9\x00\xb0\xfe\x1d\xfd\x8d\xfc\x1f\xfd$\x02!\n\x1f\x0f\x16\x0f\xe6\x08\x05\x05\xc5\x05\x01\r\xa5\x15 \x19\xe9\x17\xcf\x13\xc4\x14s\x14\xda\x10\xe4\x08\x99\x02\xfb\x04V\n\xd1\r\x9f\n\xc3\x00\x02\xf7\x0c\xf0\x8b\xef\xd8\xf1\xd1\xf2\xbd\xf3}\xf2>\xf1d\xec\x18\xe71\xe5%\xe6\x1a\xebB\xef\x83\xf2\xf4\xf3\xc9\xf2\xb8\xf2\xeb\xf1\xfb\xf0\x8c\xf1\xff\xf4\xb6\xfc\xe3\x02\xec\x04\xc7\x00$\xfa\xf9\xf6P\xf8\xda\xfd\xa4\x01\xda\x02\xbd\x01(\xfe\xad\xfb%\xf9\x07\xf9u\xf8\xdf\xf6\xff\xf6\x00\xf9O\xfcD\xfcl\xf8\x01\xf3m\xf1\n\xf4\xdf\xf9(\xfe_\xfd*\xfb\x84\xf8p\xfa\x02\xfd\xc1\xfdb\xfd\x14\xfd"\x00$\x03b\x03$\x00\xda\xfb\x1d\xf9\x1c\xfa\xb6\xfd\xb4\x005\x01,\xfe\x11\xfb\xad\xf9k\xfa}\xfbu\xfb+\xfb\xa8\xfd\x91\x01\x9e\x04\x90\x03F\xfeh\xfbj\xfcK\x00\xa2\x03\x98\x02\\\x01\x0c\x02\x16\x04\xc2\x05?\x02\xe2\xfc\xbf\xfee\x03=\to\n\xc5\x07/\x08Y\x06k\x02\xe7\x01\xe6\x01o\x07\x08\n\x8c\x07\xbc\x06\x1d\x03\xc3\x01\x00\x01\xbf\x00\xe5\x033\x07\xa4\nP\x0et\x0b4\x07\xc2\x019\x02g\x08\xb4\x0c\x97\x11\x90\x0f\xbc\x0c\xc0\x08S\x06\xf5\x06\x1e\x05H\x01}\x02\xad\x06=\x0c\xfb\x0cl\x07O\x05"\x01\x10\xff\x9d\xff!\x01\xf0\x05\xb7\x08\x84\t\xd9\t\xe0\x04%\xff\x11\xfb$\xfb\xa3\x01\x15\x08\xdb\x0c\xa8\n\x00\x03~\xfa\x9a\xf7\xb1\xfb\xf5\x00-\x03\x9c\x01\x88\x00\x8d\xffS\xfd\xce\xfa\xa6\xf80\xfb\x90\xfd\x93\x01\xfe\x01\x95\xff\x01\xfd*\xf7\t\xf6>\xf4\x8c\xf7\xa3\xfa\xdc\xf9m\xf8\xc4\xf3\x92\xf3\xcd\xf4\x91\xf7\xb8\xf9y\xf9\xcc\xfa)\xfb\xb0\xfdF\xfe\xeb\xfe(\x00q\xffC\x00X\xfc\x86\xf9\xaa\xf8\x06\xf9\x88\xfbo\xfb2\xf90\xf8^\xf6>\xf6\x89\xf5q\xf4c\xf8\xec\xfc\xca\x01\x84\x00\'\xfbC\xf8k\xf9\xe6\xfdr\x00-\x00%\x00\xdc\x00\xd9\x02\x06\x03!\x00v\xfd\xb6\xfc\xbc\x01\xa6\x08\xf4\t\xea\x04i\xfe8\xfde\x00\x04\x02\xd1\x02G\x01\xbb\x00\xe3\xff\xf4\xfe\x9a\xfc\xec\xf9\xe6\xf8y\xf9u\xfbl\xfa\xc8\xfa@\xfb,\xfc\x90\xfcf\xf9\xe7\xf9\xf4\xfc\xd7\x01\x04\x06L\x05\x0f\x05\xb9\x04x\x05\x84\x07\x93\x08z\x08\xd9\x03\xf2\x01\xf3\x02O\x07\xef\x08m\x06w\x04u\x01>\x02\xe2\x02>\x03\x1e\x03o\xff\xe9\xfd\xd8\xfef\x02#\x07-\x077\x03\x9d\xfd5\xf9\xe0\xf9\x9e\xff\xf0\x05\xc9\t\x9c\x08\xb8\x04\t\x03:\x02\xe8\x03\xe6\x024\x01E\x02\x89\x06\x81\x0b\xb7\t\xed\x03:\xfcr\xf8\xff\xfa\xf7\x00\xed\x06\xc8\x06\x1c\x04\x1a\x00]\xfc\x85\xf9\x9f\xf8\xd9\xfb\x88\x00\x8f\x02\x06\x01\xa4\xfd\xb0\xfbt\xfa\xe9\xf9\xca\xfa,\xfc\xca\xfe\xd3\x01c\x05\x1a\x067\x04j\x01\xad\x00\xd9\x01\xbb\x01\xe9\x01V\x01\x16\x02;\x01\xcb\xfe3\xfd\x8d\xfcR\xfcE\xfc\xe3\xfc\'\xff\xba\x01\xdb\x03}\x05\x1c\x04\xc9\x00\x91\xfc\xeb\xfb\x06\xff\xd2\x03\x19\x07`\x06s\x02\x93\xfd\x90\xfav\xfa\x12\xfd\xf2\xffW\x03\xf7\x06\xe5\n6\x0cI\t\xee\x01=\xfbV\xf9F\xff\xec\x06}\x08\xfc\x02}\xf9\xd1\xf4\x90\xf4\x90\xf7)\xfb\x05\xfc\x80\xfd \xff\xb0\x001\x01\x1c\xfc\xd6\xf6\xd4\xf2\xe1\xf4\xaa\xfb\xba\x00\x82\x03\xa8\x00\xb8\xfb\xcf\xf5\x81\xf3$\xf7K\xfc\xd5\xfe\xb4\xfe\x80\xfe\xa2\xffQ\x01\x17\x00G\xfe\x8b\xfbU\xfb\x04\xff\xc5\x01\xe6\x02\xae\xff\x07\xfd\xff\xfd\x9e\x01\xdc\x04y\x03\xb5\xff\xb0\xfd\xcb\xfdy\xff\x8f\x01\xd7\x02\xad\x01\xa7\xff\x91\xff\x15\x005\x00"\xff\xdc\xff\x80\x00,\x02\x81\x03\x9b\x04\x08\x06\x0f\x04W\x02\x06\x00\xa7\x00j\x03\x90\x04\x99\x04K\x02k\x00\x9b\x00\x7f\x00@\x01\xbc\x00\x17\x00\xf1\x01\xe7\x02b\x03\xc0\x00\xc5\xfd\xc6\xfb?\xfb\xe7\xff\xb9\x04\xc0\x04O\xfe\x1d\xf7i\xf6D\xfcp\x01\x17\x02\xc9\xfe\xcf\xfc\xa0\xfe\xc5\xffZ\xfe"\xfa7\xf7 \xf8\xe5\xfc/\x02\xf4\x04\xad\x06\xbc\x05T\x02\x01\xfd\xb0\xf8D\xfb%\x03i\x0b\xf1\x0c\xc8\x06\x8a\xfe\xce\xf7\xf7\xf4\xfa\xf5v\xfa\xeb\xfd\xa8\xff\x1e\x03*\x05r\x04\x01\x01l\xfdY\xfe\\\x00\xc5\x04\x00\nR\x0c#\x0b\x82\x05\xab\xfe\xf4\xf9J\xfa\xb7\xfdw\x01u\x03Z\x03\x8b\x00_\xfdn\xfdF\x00\xd2\x04p\x08\x83\x06\x10\x02\xdd\xff\x18\x00\xaa\xffa\xfd\x93\xfd\x86\x00\x84\x05\xf3\x07`\x07\x11\x033\xfe\xfa\xfb\x98\xf8\xae\xfa\xfb\xff\x0e\x06q\t\xfe\x04\x05\xfe\xe9\xf6?\xf6\x12\xfc\xbb\x00\xa1\x01j\xff\x92\xff\x1c\x02\xae\x03\x99\x01\x97\xfeM\xfc\xbe\xfb\x11\xfc\xc1\xfe0\x03\x1c\x03\x08\xff\xdc\xf9\xb1\xf8z\xfcq\x00\x7f\x02\xf5\x01\xf0\xff\xf7\xfe\r\xfez\xffr\x01\x8c\x01\xba\x00\xb1\xfe\xca\xff\x81\x00\x1b\xff\x9c\xfbP\xf7\x92\xf8\xa1\xfc\x85\x01\xf2\x04\xd8\x04\xe2\x02\xbe\xfek\xfe6\xff\xcc\xff?\x01L\x04\x88\x07\xed\x04e\x01\x01\xff\x9d\xfeQ\xffj\x01\xe8\x03\x12\x06\xba\x06\x87\x04o\x00\xfc\xfc\xb5\xfc\x1c\xff\xb3\x03\xa5\x057\x01\x87\xf9\x1e\xf7\x99\xfa\x15\xff_\x01\\\x01y\x003\xfdv\xfbQ\xfc\x81\xfe\x87\xff\xfd\xfe`\x01\xdb\x04\xa4\x03k\xff\x1a\xfd\xe9\xfd\t\xfe\xfd\xfc\xb6\xfd\x04\x02\xf4\x04\xf4\x01\x01\xfb\xea\xf3\x1e\xf3\xa3\xf6\x90\xfdP\x04\xe0\x03\xbb\x00\n\xfeb\xfd\xb3\xfe\xdc\xfd\xef\xfd\x93\xfe\xb1\x01 \x06K\x064\x02o\xfa\xfa\xf5\x11\xf5h\xf8^\xfe\xee\x04\x12\t\xb5\x07\xee\x05\xff\x04\x97\x02\xca\xfdB\xfd\x1c\x016\x06U\nq\t\xef\x03\x82\xf9\xdb\xf0\xa8\xf1B\xfc\x89\x08\x9b\x0c`\x08\xed\x00\xf9\xf8\x06\xf6D\xfa\xce\x01\xf0\x06{\t\x86\x0c2\n\xaf\x01\xfa\xf8\x93\xf5{\xf7\x04\xfd\xeb\x04:\x0b\xb5\x0c\x93\x07\xed\xfd\xf3\xf3\xf1\xf1\xc8\xf9\xb8\x02J\ne\x0c\x9f\x07N\xff\x1b\xf7&\xf4\xf7\xf4\xd8\xf9\x8c\x01\x9f\x08o\ro\x0b\x01\x04)\xfa\xb2\xf1l\xf1M\xf7\xc8\x00 \tC\n1\x07*\x02v\xfc.\xf9\xb2\xf6\xa9\xf7B\xfdh\x03\xc1\x08\x82\x06\x12\xff\xf5\xf8\xa6\xf6V\xf9\x0c\xfey\x04k\x086\x05h\xff\x99\xf99\xf8\x99\xfd\xe1\x04\xb4\t\xf0\x08\xc1\x04\x00\x00 \xfc\x7f\xfb\x82\xfc:\xfd\xb3\xfdW\x00\x04\x05j\x08O\x08u\x04\x88\xff\xab\xfa\xe9\xf9\xd5\xfd_\x04\x82\t\xec\x08\r\x07\xdf\x03\xcb\xff\x0b\xfc\xa1\xf7\xcb\xf7\x17\xfb\xc2\xfd\xd3\x01\x16\x06:\x08%\x06n\x00\x95\xfa\xd1\xf4\xee\xf4\xfb\xfa\x17\xff\xac\x01"\x03L\x04\x80\x03n\xfe\xaf\xfa\xa9\xf8\xfe\xf86\xfc\x92\xfe\x16\x03:\x05\x9c\x04\x9b\x02}\xfd\xda\xf9\xfa\xf5\xc0\xf5"\xfd\xfe\x03U\x07>\x03\x9d\xfc\x08\xfb9\xfc4\xfe\x9d\xfe\xb9\xfei\x00\x95\x02\x16\x05\x10\x07[\x03`\xfd\xc4\xfaS\xfc\xa0\xfe\x90\x01\x9b\x05F\x08\xbb\x06\x82\x02j\xffS\xfc\x86\xfa\x0c\xfc\x9f\xff\xa1\x03\xc5\x06\xce\x06\xcf\x04\xba\x00 \xfb*\xf6(\xf5\x81\xfa\xa4\x02\x83\x08\x97\n1\x07>\x01c\xfdn\xfd\xa0\xfd{\xfdA\x01V\x05:\x08\xeb\x07\xd1\x05$\x01@\xfc\x12\xf9M\xf8\x05\xfd\x9a\x04\xa6\x08\xa0\x06\xde\x01\x00\xff1\xfd\x03\xfbq\xf9\x96\xfa\x97\xffe\x03\xad\x05\xec\x04U\x00\xfd\xfa\x8f\xf6\xde\xf6\x96\xfaD\x01\xc7\t\xe4\x0ct\x08v\x013\xfc\x84\xf8I\xf6\x08\xf8\x8b\xfd\xe2\x05\x96\x0c<\r\xf3\x07\xaa\xfe\xb8\xf3E\xec\xf2\xed\xc3\xf5\x88\x01\xaa\x0c\xa7\x10b\x0e\xa8\x06\x8b\xfd\xda\xf6X\xf1\xfd\xf2\x93\xfa\xb7\x041\x0e\x96\x11\xe0\x0c\xf0\xff\xd2\xf3s\xed\x98\xf2\x04\xff\xf5\x06U\tz\x07\xa4\x07\xec\x07\x0c\x05\x16\xfe\x98\xf6\xeb\xf4T\xf9\xcf\x00\xb5\x05\xfa\x06\x9f\x03\x03\xfc\x00\xf7\xe8\xfa\xd1\x01\xfd\x04{\x04j\x02o\xff\t\xfe|\x01\xb8\x04=\x02\x11\xfd\x87\xfb\xc6\xfa\xd2\xfb\x1a\x00F\x03\xa6\xff\xa2\xfa9\xfaA\xfd\x05\x02\x9c\x04i\x04\xfd\x00T\xfe\xe8\xfd\xe9\xfcR\xfdc\xfe\xc4\x00\t\x02q\xfem\xfa<\xf7\x07\xf6\xc6\xf9\x84\xff\x8e\x05\xc7\x07\xc8\x06h\x02\x8c\xf9\x13\xf5\x18\xf6\x08\xfb\x82\x02\xb2\t\xc4\x0f\x1c\x0f\xaa\x07L\xff\xf8\xf6\x1f\xf3\xcc\xf74\xff\xae\x08\xee\x0cj\x0b\x11\x05\x82\xf9\xb9\xf0I\xee>\xf8c\x07=\x11\xcb\x13N\x0f\xcb\x07\xf8\xff\xa9\xf8>\xf4\xe1\xf5/\xfd\xb8\x04G\x08\xe8\x07\xc3\x02\xf3\xf7\xfe\xec\x92\xecj\xf6\xae\x03b\x0e\x03\x14\x9e\x11\xa8\x07T\xfb\xf3\xf1\xdd\xf2\xe7\xfbc\x04\'\n\xd6\n<\x08\x01\x02\xe2\xf6+\xf1\xf0\xf2\xf9\xf6\x8d\xfd-\x07\xbe\x0f\xde\x0f;\x08\x8b\xfc\xed\xf0x\xeeR\xf3|\xf8\xa3\xfe:\x05\x99\x0b\xd6\x0b\xee\x03t\xfd\xf6\xf8\xd7\xf5e\xf6\xd7\xf9\x1a\x03Y\x0b\x8a\r\xaf\tv\xffl\xf8\x84\xf4@\xf3A\xfb\xdc\x03Q\t\xd6\x07V\x03\x00\x02\x97\x01;\x00\x96\xfc\xad\xfa6\xfc \xff\xb4\x019\x04\xec\x02C\xff\xaf\xfd\x00\xfe\x89\xfe]\xff\xaa\x00f\x01\xa2\xff\xa1\x00\xca\x04\x84\x06\x81\x03\xa2\xfe^\xfe*\xff`\x00\xfe\x00x\xff\x16\xfeF\xfb\xea\xf8\x9c\xf7\x1d\xf8\xd2\xfe\xfb\x05\x94\n\xc6\t9\x05V\x01\x8b\xfdE\xfa\xc1\xf9\xe6\xfd\xe9\x01\xf1\x04\x1f\x07\xb3\x07\xda\x04\xd5\xff\x0f\xfb\xfa\xf6\xce\xf8\xd7\xfe\xc3\x03\xe5\x06\xb2\x08\xc2\x08\xd8\x03g\xfc\x98\xf8\x02\xf8\x87\xfaG\xff`\x04\x7f\x07n\x05\xa8\x00\xf1\xfc\xb7\xfaT\xfa\x07\xfb\x91\xfd\xfe\x01\x8b\x06\xfd\x07|\x05\xef\xff\x07\xfb\xad\xf9$\xfb\xd6\xff\x80\x05\xcd\ta\n.\x07\x1b\x01N\xfb*\xf8\xca\xf7%\xfb\xf2\xff\xbb\x03^\x05!\x03\xa5\xfe\xd0\xfb\xfb\xf8h\xf8i\xf9\xcb\xfc\xf3\x01\xc5\x05\x8e\x08^\x08\xa1\x05\x02\x00u\xfb\x06\xfb\xff\xfbU\xfd;\xfe-\x01\xad\x04!\x06\xcc\x02\xd4\xfb\x97\xf9\xfd\xfd\xf1\x00\xb4\x00\x99\x012\x03n\x00A\xfdM\x00\x9a\x02\xa5\x01\x81\x00T\xff\xa1\xfb\x19\xf8\xba\xf8Y\xfb\x03\xfe\r\x01g\x04\xf6\x02\x7f\x00h\xff\xa2\xffF\xfe\xfe\xfcH\xfe\xdb\xfeD\xff\xff\xff\xef\x01b\x02\x80\x00\xbd\xfe\t\xff\x95\x00\x95\x00\t\x01\x02\x03\x1e\x027\xff\xd0\xfez\xff\xaf\xfe\xa4\xfc\xff\xfb@\xfd\xe0\xff\xae\x02t\x02\x17\xffQ\xfb*\xf9\xaf\xfa\xd8\xfd|\x01d\x03\xc5\x04\xc3\x04,\x01\x0b\xfd\x80\xfb\xa8\xfbb\xfd\xf7\xffg\x03D\x05\x80\x03\x1f\x01\xbc\xfe\xdd\xff\x90\x03<\x04\x87\x01\x81\xfe>\xfe\x99\x01\x80\x03&\x02\xe9\x00+\xff6\xfc<\xfa#\xfb\x83\xfb;\xfbM\xffH\x06\\\x06\xf7\x00\xb0\xfd\xd8\xfd\x84\xffJ\xff\x86\xfd\xb1\xfe\xb5\x04-\nH\ta\x02\x00\xfb!\xf8\x9d\xf8M\xfd\xaa\x04Q\x08\x8e\x07\x17\x04"\x01\x9c\xfdj\xfa\xdb\xfb+\xff\xab\x01e\x03\x93\x02\xa9\x01\r\x02\xe3\xff\xe7\xfd\xfe\xfcD\xfc\xbd\xff\x94\x04\xa9\x07\x82\x07\x01\x03\x9d\xfet\xfbY\xfa\xb0\xfe\xf9\x01\x92\x01\xf5\x00f\x00\x05\x02\t\x00\xbe\xfd\xaa\xfc\xfc\xf9\x17\xfb\xbd\xffP\x04x\x08\xa8\x07/\x02\xcd\xfb\xb6\xf8\xc7\xfc\xf5\x02\x11\x07,\x07g\x04\x81\x03\x0f\x04\xb8\x02_\xfe\xf0\xf8\xbe\xf7\xc9\xf9\xd9\xfe\x9a\x04}\x05C\x03\xa4\xfd\x9a\xf9\'\xf7\xe7\xf5\xb5\xf8\xa9\xfb\xaf\xffW\x03%\x04\xc8\x03\xc0\x02u\x028\x01\x03\x00\xb0\x01\xe1\x02\x9a\x02\xc8\x03+\x04%\x02\xfb\xffp\xff\xf9\xff\x95\xff\xba\xff\xc6\x00\x90\xff\xbb\xfe\x94\xfd\xd5\xfc\x19\xfe:\xff\x91\xfe\xf3\xfb\xb1\xfc#\xff\x87\xff%\xff\x84\xfe\x1d\xfd/\xfd\xe4\xfe\x87\x00\n\x01\xe7\x00\x1b\x019\xff6\xfc\x8a\xfb\xd4\xfa\x82\xf9\x87\xfb\xb0\xfc%\xfe\\\xff\x9d\xff\x1d\xff\x18\xfc!\xfb\xe8\xfa\x11\xfc\xfc\xfei\x02\xdf\x04I\x04\xd8\x02\xee\x00Q\xffu\xfe^\xfc\xd0\xf9\xfb\xf9\xea\xfeW\x04\xbe\x05\xb1\x04\x97\x00\xc8\xfa\n\xf9\x15\xfd\xea\x02\xf9\x05\xe0\x06S\x06-\x06\x93\x05\xdf\x01W\xfe\x89\xf9\xd9\xf6\x13\xf9\xfa\xfb\xbf\x01?\t\x01\x0c\x03\x06\xe3\xfc\x99\xf94\xf9.\xfa\xb5\x00P\t\\\x0e\x95\x0f\x82\re\x07\xe6\xfd\xff\xf5\xcb\xf3\x18\xf7\x86\x00\x13\nu\r\xa0\x0ce\x06>\xff\xd4\xfa\xe2\xf8C\xfb\xfb\xfc.\x00\xe5\x04B\x08\xd9\t\xc8\x04\x12\xfc1\xf8\x06\xf7\x85\xf7E\xfc\xaa\x020\x075\x04"\x01\x9d\x01\xce\xfer\xfb\x02\xf8g\xf5l\xf8\x1b\xfb\xab\xfc\xc6\xfcH\x02\xcd\r\x13\x12K\x11D\x0f\xa2\x10\x06\x13\x19\x13=\x15B\x17G\x18\xe8\x16y\x13a\x10P\x0b\xb8\x04\xf4\xfd\x1e\xf9\xe0\xf7\xfb\xf8\xcb\xfa,\xfa\x13\xf8\xbf\xf5\x12\xf5\xb8\xf5\xde\xf5\xb3\xf4$\xf3\x97\xf4|\xf5\x1a\xf4\x10\xf6e\xf8^\xf9\xea\xf7\xa6\xf6p\xf7W\xf8\x98\xfaH\xfcl\xfd\x08\xfe~\xff\xed\xffL\xfe\xf0\xfd_\xfc\\\xf8\r\xf5k\xf4o\xf4\x18\xf4\x1c\xf4C\xf4R\xf3\'\xf2\x05\xf2\xae\xf1\xde\xf1z\xf3M\xf5\x05\xf6\x11\xf7\x8b\xf8K\xf8\xeb\xf6\xd8\xf5\xa5\xf5\xd3\xf59\xf7$\xf9\xee\xf9n\xfa\x10\xfb\x97\xfbj\xfbf\xfb\x0e\xfc\xec\xfc\x9b\xfd@\xfda\xfd\xc0\xfd?\xff\x05\x01\xf8\x01\x9b\x01\x19\x02p\x05\x82\x08\x82\x08@\x07\xcb\x06\xab\x06\xb0\x08\xf7\x0b\x07\r \x0b\x97\nz\x0b\xaa\tr\x03\xbc\x00\xce\x04\x1d\x08\xfa\x08\xdd\nI\x0e\xee\x10\xa9\x0f\x1a\x0b$\x08\x9c\t\x86\x0bz\n\x91\tc\x0bV\r&\x0c\x89\te\x05&\x02\xee\x00?\x00\xac\x00\xc9\xfd.\xf9\xd5\xf9(\x06\xb4\x1d\x9e3\x97:\x994\xe4,i(\xc3#e f!\xc4$A%\xd6!G\x1f\x7f\x1aE\x11r\x01\xe3\xef;\xe3\xc9\xdb\x1b\xdb\x1e\xde@\xe2h\xe4N\xe3\x12\xe2\x96\xe1\xe0\xdf\x10\xdc\x83\xd8~\xd8\x1c\xdd\x10\xe5r\xf0\x00\xfd\x16\x03^\x00\x96\xfb\xd3\xf8\xf4\xf8\xbe\xf88\xfa\xed\xfd\xf2\xff\x11\x02W\x03f\x03R\x01\x14\xfbJ\xf4\x87\xee\xd8\xeb\xbf\xeb\x0c\xebg\xecR\xf0\x0e\xf4Y\xf4\xf0\xf3\xf0\xf4u\xf4e\xf3\xa0\xf4\xad\xfae\x01\xa5\x07\xeb\x0e\xec\x12\xc3\x12\xfe\x12\xf7\x13\xae\x14\xa9\x11c\x0e\xa4\r\xdb\ng\x08\xf6\x05&\x02r\xfd\xf8\xf7\xeb\xf41\xf4\xc3\xf3\x12\xf4\x16\xf3A\xf1\x04\xf1\xf1\xf1\xa7\xf3\xfc\xf4\x82\xf5Q\xf6Y\xf6k\xf6\x14\xf8C\xf9\xd5\xf9R\xf9h\xf9`\xfb\x11\xfd.\xfe\x06\xfed\xfc\xe0\xfa\xce\xf9\xe1\xf8\xef\xf8\xe8\xf8c\xf8\xa7\xf5\x1d\xf1\xf3\xedn\xed\x97\xedO\xed*\xeeA\xf1,\xf5\x07\xf9T\xfc5\xffb\x00\xe9\xff\x82\x01&\x05\x7f\t\xee\x0e\xd8\x11\xfd\x13\x07\x14\xf0\x11\xa6\x0c>\tE\x14\xca.DJ9WJW\xb6S@P\xbbI\xb5A?=\xad;X8\x982\x19.<+\xe7")\x12\xe1\xfaM\xe5\xd0\xd6&\xce\xda\xcbU\xcdC\xd2)\xd8"\xdb\xc5\xdb\xb3\xdb\x8d\xdb+\xda\xda\xd7\x12\xda1\xe4R\xf3\xe5\x01\xa2\x0b6\x10]\x0fz\x0c\xc0\x07\x9b\x03r\x00\x88\xfeL\xfd\xa1\xfb\x03\xfc:\xfcE\xf8\xed\xf0\xd3\xe7\x90\xdfA\xd8\xa4\xd3\xee\xd3X\xd5\xe3\xd7\x8a\xdb>\xe0d\xe4\x15\xe7\x08\xea\xef\xec\xc2\xef\xa6\xf3\xb4\xf9%\x01b\t\\\x11\x05\x17\xf0\x19B\x1c\xaf\x1d\xcd\x1d\xf1\x1b\xb2\x19\xdb\x17g\x16\x96\x17x\x1a\xde\x1a\x0f\x18^\x12\x13\x0b\x97\x02(\xfbQ\xf7Z\xf5\x9c\xf3i\xf3X\xf59\xf8\xeb\xfa\x91\xfc\xdc\xfd\x05\xfe\xc9\xfd\xff\xfe\x00\x02\xd6\x05\x07\t)\x0b\xd2\x0bi\n=\x06h\x00\xf2\xf8\xdc\xf0\x10\xea(\xe5\x83\xe2R\xe1\xf7\xe0\xd8\xe0\xaa\xdfH\xdda\xda\x02\xd8V\xd7\x1a\xd9\x0c\xdd\xfd\xe1-\xe7p\xeb\x8b\xee=\xf1\x85\xf34\xf6\xe9\xf9\x98\xfd\xb7\xffD\x02\x9a\x06\xda\x0c\xb2\x13\xac\x18\xed\x1cF\x1f\xe0\x1fO >#\xa2)\xd30A8\xc5A\xdbK&R\xdaPLJeD\x8b=\xc04H*\xce"\xbc\x1f\xac\x1b\xa3\x14\x12\x0b\xd0\x02\xbb\xfb\xb3\xf2\xdd\xe8\xaa\xe0(\xdc\xe6\xdbB\xdd!\xe0\xe2\xe45\xebn\xf0~\xf2\x91\xf3\x88\xf6g\xfa\x0f\xfc\xf2\xfc2\xff\xd0\x02\xb1\x05\xa9\x07\xc4\x08\xe8\x07\x97\x04\xfc\xff\xeb\xf9\xe7\xf3W\xef\x91\xeb\xd4\xe6I\xe1=\xde\xdc\xdca\xdbj\xd9X\xd8\x83\xd7\x0b\xd6\x83\xd6\t\xdaU\xdf\x87\xe5\xb5\xeb\x07\xf1\xfd\xf5\xf4\xfb\x0b\x03\x05\x083\n\xf2\x0b^\x0e#\x10\xcc\x11\xc7\x13\xd3\x14\x9f\x13\x19\x11\xf7\x0e\x0f\r\xb2\x0b\x00\x0b\xe4\x08-\x068\x04\xd3\x04\xf1\x05\xf3\x05\xb5\x05p\x05]\x05\x92\x05\xe1\x06\xc6\x08x\n\xed\n\x8e\np\n9\x0b\xf9\x0c \x0e\xa4\r\xd6\x0b\xd7\t\xff\x07\xdd\x05-\x03\xc8\xff~\xfb\xaf\xf6G\xf2$\xef\x9e\xec\xce\xe9;\xe6\xae\xe2\x05\xe0\xf9\xde0\xdf\x80\xdfG\xe0\xc7\xe1\xed\xe3Y\xe6T\xe9>\xed\x12\xf1\xb5\xf2+\xf37\xf4\xbf\xf6\xb4\xf8\r\xf8w\xf6\x16\xf6\x9b\xf6\xf8\xf5Y\xf4\xb3\xf3\xdf\xf3\xab\xf3;\xf26\xf1\xb1\xf2\x16\xf6\xce\xf9\xd4\xfd\xe8\x02\xc6\n\x9a\x13\x07\x1d9*\xe7=\xaeR;]\xf5\\\x1a[(_\xd8aJ[\xc6O\xdbH\xc1E\xc5=@0\xed#\x18\x1bT\x0f\n\xfcN\xe7\xeb\xda6\xd6\x9a\xd1!\xca\xd1\xc6\xcb\xca\xf3\xd04\xd4#\xd79\xddZ\xe3\xb0\xe6A\xe9\xfd\xee\xf7\xf7\xa9\x01\xac\x08\x11\r\x9c\x10\x8d\x14\\\x16\x14\x14s\x0f?\n\x97\x02\x92\xf9\xcb\xf2@\xee6\xe9\xc2\xe2\xb1\xdd\xae\xd9\xd7\xd4\x07\xd1\xcf\xcf\x15\xcf\x05\xce\x00\xce\x01\xd1\xf9\xd6\n\xdf]\xe8\x91\xf0k\xf7^\xfe\xae\x05\xb9\x0b}\x10\xf7\x14/\x19\xf8\x1aU\x1b&\x1d7 \xfa!\x94 z\x1c\xa6\x18\xe8\x14\xe3\x11<\r\x82\x08]\x04Z\x01\x1e\xff\xd6\xfcH\xfd`\xffb\x00t\xffY\xff\x04\x03\x11\x07\xaf\x08g\t\x80\n\xf9\x0b\x08\x0cN\x0cu\r\xbd\r\xdf\x0cZ\n\xea\x07\xba\x05\xf9\x03\xfc\x00\xa5\xfbX\xf6*\xf2\x1b\xef\x18\xec\xbe\xe96\xe8~\xe6\xba\xe4.\xe3\xa8\xe2c\xe2\xfc\xe1\xf3\xe1&\xe2\x8b\xe3\x9c\xe6\xbf\xeak\xee\xed\xf0\x04\xf3\xc5\xf4\x95\xf5\x8c\xf5\xae\xf5\xe8\xf5\xbf\xf4\x9a\xf2\xd2\xf0*\xf1\xbf\xf2i\xf4\r\xf5\xf7\xf4\x05\xf5y\xf6u\xf9K\xfc#\xff\xc5\x02q\x06@\n\xba\x104\x1b\x91\'\xf93\x04C#U\xa0a5c{`\xafa\x15d\xcc]GQ\x85G\xedB\xb5;\xa1.B"Q\x18 \x0c+\xfa&\xe7\xad\xd9\xcd\xd1\xd6\xc9\xb3\xc0\xb5\xba\x9f\xbc\x86\xc3j\xc9\x11\xcdl\xd2\xe0\xd9\x89\xdfZ\xe3\xf0\xe8\xe4\xf1\x10\xfap\xff\t\x04y\n\xf1\x11)\x17\\\x17\x01\x14\x97\x0f\x96\n\xc3\x02\x0f\xf9l\xf1B\xec\xa6\xe6\x93\xdf\xe3\xda\xe3\xd9v\xd9\x14\xd7\x0f\xd4"\xd2\xc0\xd1\x11\xd3\x01\xd6\x8e\xda\xd7\xe0\xb9\xe8\xd0\xf0\x1d\xf9l\x02\xd7\x0b\x07\x13a\x17\x03\x1a\x9d\x1c\xa4\x1ej \xc0!N"\xd2!\xb8 \xaf\x1e\x90\x1c,\x1a#\x16K\x0fA\x07@\x01\x92\xfdV\xfa>\xf8=\xf8\x17\xf9\xe3\xf8\x1e\xf9<\xfc\xa8\xff\xca\xff_\xfd5\xfd\xf0\xff\x9a\x02\xf9\x04\x14\x08\xd7\x0b\xe3\r\xac\x0eR\x0f-\x0f\x0f\ru\x08\xae\x02\xeb\xfc\xcf\xf8\x92\xf5\'\xf2O\xee\xc4\xeav\xe8e\xe65\xe4!\xe2a\xe0\xd2\xde9\xdde\xddz\xe0$\xe5\xc6\xe9\xb3\xed\xf6\xf1\x03\xf7\x85\xfb\x04\xff#\x01\'\x02\x93\x02\x9b\x02\xca\x021\x03(\x04\x95\x04r\x03:\x01\xfe\xfe\xf3\xfc[\xfa\xe4\xf6\xa4\xf3\xb4\xf1%\xf1\xed\xf1\xad\xf3\xf4\xf6\xae\xfa\xb7\xfd?\x00\x99\x02<\x06\xa6\x0b\xa3\x13\x86 \x9f1\xf2AJL\x13R>YKaxc\xe6\\\xdfS\xd7M\xe0H\x1b@\xea5\xcf-\xab%_\x19\x8e\t\xd4\xfb\x90\xf0\x99\xe4\x87\xd5\xc4\xc6\xa6\xbc\xd4\xb7\xd9\xb5\x98\xb5a\xb7=\xbc\xb7\xc2;\xc9"\xd0\t\xd8\xf6\xdf\xbc\xe5Y\xea\x85\xf07\xf9\x1b\x03\xeb\x0b\xb8\x12\xbd\x17\xa9\x1b\x9a\x1e&\x1f\xeb\x1cd\x17\xe7\x0f\xda\x06\xc9\xfd^\xf6o\xf0k\xebg\xe66\xe2N\xdf\xa9\xdd\xee\xdc\xab\xdc\xf6\xdb\x88\xdb\x8e\xdcg\xdfN\xe4\xfe\xea\xdf\xf2\xf4\xfa\xb6\x02\xa3\n\xb8\x12\xbc\x19$\x1f\xab!a"v!S +\x1f\xb2\x1d\xca\x1a-\x17\xf5\x12\x8d\x0fx\x0c\x9c\x08v\x04\xee\xfe:\xfa\xd1\xf5B\xf2o\xf0A\xf0n\xf1s\xf2t\xf3 \xf6\x97\xfa\xca\xfe\x96\x01\xac\x02\xc7\x03\x1e\x05E\x06v\x067\x06\x17\x06\x03\x06H\x05\x9f\x04\xce\x04b\x04i\x02\x92\xfe\x11\xfb[\xf8\x95\xf5\xd0\xf2\x19\xf0\x14\xeeC\xed\xb2\xed$\xef\xef\xf0\xb1\xf2t\xf4\xf6\xf5"\xf7N\xf8\x04\xfa\xb6\xfb\xc4\xfc\x7f\xfd\xe8\xfe\x99\x013\x04_\x05[\x05v\x04\x0b\x03\x8b\x00\xce\xfd\xaf\xfb\x04\xfa\x83\xf8\x84\xf6\x19\xf56\xf5[\xf6`\xf7S\xf7\xaf\xf6\x8d\xf6\xf4\xf6\xd4\xf7\x08\xf9\xb2\xfa`\xfc\x88\xfdP\xff:\x02\x9e\x05\xb0\tO\x10\x03\x1a\x7f#_*%1\xdf9\xc7A\xd5C\xecA\x8b@J@\xe2<\xa35\x85.\x19)}"\x08\x19\xe2\x0f\xa8\x08g\x01E\xf7#\xecU\xe3\xc6\xdcu\xd6\xec\xcf\x1f\xcb\xa9\xc9~\xca/\xcc\xe4\xceT\xd46\xdb\x10\xe14\xe6\x14\xec\xcc\xf2\x9e\xf8_\xfdI\x02\xfe\x06\xc2\n\x9b\x0e\xb2\x12}\x15\xc8\x15;\x15\\\x14\n\x12\xad\rr\x08D\x03W\xfd\xad\xf6\x08\xf1\x1d\xed!\xea7\xe7@\xe4\x9a\xe2b\xe2\r\xe3\xbb\xe3\x83\xe4e\xe5\x9e\xe6\xe0\xe8\x11\xec\r\xf0!\xf4@\xf8d\xfc\x94\x00\x88\x04x\x08\xb3\x0b\xab\r\xe0\r\xf7\r\xf8\r*\x0e\xec\r\xf4\x0c\x8d\x0c\xa1\x0b\x06\x0bR\n\x92\t%\t?\x08\x11\x07V\x05\xfd\x03\xb1\x03\xc5\x03\x0c\x04\x1c\x04\x18\x04X\x04\xb6\x04t\x05\xa8\x05\x7f\x05\xeb\x04\x11\x04\x13\x03%\x02\xe0\x01\x85\x01\xe1\x00\xfd\xff@\xff\xf7\xfe\x8f\xfe\x0b\xfe>\xfdQ\xfc\xa4\xfbE\xfbM\xfb(\xfb\x1d\xfbp\xfb\xd7\xfb\'\xfc4\xfcU\xfcC\xfc\xd5\xfb2\xfb\xd3\xfa\xf8\xfa$\xfb\xf4\xfa\xc2\xfa\xb1\xfa\xda\xfa\xb8\xfaI\xfa\xd6\xf9X\xf9\xc2\xf8\xcf\xf7\xd6\xf67\xf6\xd6\xf5\x81\xf5\x00\xf5\xa9\xf4\xc7\xf4\x00\xf5\xc8\xf4I\xf4\xfa\xf3\x05\xf4\xf4\xf3-\xf4K\xf5z\xf7\x18\xfa\x86\xfc\xe2\xfe9\x01\x9a\x03\xfb\x05N\x08.\x0b\xc5\x0fR\x16k\x1dV#\x16()-\x952\xdb6X8G8\xe97\x037\x0e4\xfb/\x8c,\xe7)\xea%\xc1\x1f\x18\x19x\x13m\x0e\xe4\x07\xa9\x00\xea\xf9\x02\xf4\x1c\xee\x1c\xe8/\xe3\x16\xe0\x08\xde/\xdc\xda\xda\xee\xdaj\xdc)\xde\xaf\xdfD\xe1\xa9\xe3G\xe6\xd3\xe8c\xeb\xe4\xedE\xf0\x9b\xf2\xf0\xf4\x17\xf7\xb3\xf8%\xfae\xfb\xdd\xfb\x95\xfb\n\xfb\xb5\xfa\x07\xfa\x8f\xf8\x07\xf7\xcc\xf5\xed\xf4A\xf4\xae\xf3\x96\xf3\xdf\xf3F\xf4\xe7\xf4\xdf\xf5\x0b\xf7;\xf8\x7f\xf9\xd8\xfaM\xfc\xd3\xfdf\xff\xfa\x00\x81\x02\xd5\x03\x17\x05<\x06\x15\x07\xa3\x07\x06\x080\x08/\x08&\x08\xf3\x07\xf2\x07\x10\x08R\x08\xc5\x08\x8e\t\x95\n\xcc\x0b\x1f\r9\x0e\x1a\x0f\x80\x0f\xb9\x0f\xb8\x0f\x80\x0f\xbf\x0e\xdf\r\xe7\x0c\xd2\x0bx\n\x9b\x08\xc3\x06\xed\x04\xeb\x02w\x00\xe1\xfd~\xfbM\xf94\xf7#\xf5W\xf3\xfe\xf1\xf0\xf0\x10\xf0u\xefO\xefj\xef\x85\xef\xb0\xef2\xf0!\xf1\x12\xf2\xd5\xf2x\xf3U\xf4J\xf51\xf6\xf3\xf6\xcb\xf7\xbd\xf8\x8f\xf9&\xfa\x9f\xfa-\xfb\xad\xfb\xea\xfb\xfa\xfb*\xfc\x8b\xfc\x07\xfd~\xfd\xd9\xfdI\xfe\x8e\xfe\xcd\xfe\x16\xffl\xff\xcc\xff!\x00\x7f\x00\xf7\x00X\x01\xb6\x01\x15\x02\x94\x02\'\x03\x91\x03\x1a\x04\xf1\x04\x06\x06\x1b\x07\xfd\x07\xda\x08\r\n1\x0b\xdb\x0b\x19\x0c\x8c\x0cl\r[\x0e^\x0f\xdb\x10\x14\x13(\x15G\x16\xb4\x16b\x17z\x18)\x19\x0c\x19\xe4\x18\xf8\x18\xa6\x18l\x17\xe5\x15\x1a\x15\x81\x14\xe8\x124\x10T\r\xda\n\x0b\x08\x87\x04\x00\x01\xdd\xfd\xf4\xfa\xc2\xf7\x9e\xf4-\xf2\x81\xf0!\xef\x9f\xed \xec\xd6\xea\xe5\xe9=\xe9\xc3\xe8\xb3\xe8\xe1\xe8*\xe9/\xe9A\xe9\xf4\xe9>\xeb\xd6\xecR\xee\xa3\xef\x12\xf1\x9b\xf2\x13\xf4\x99\xf5\x16\xf7\x8d\xf8\xd3\xf9\xda\xfa\xbf\xfb\xcf\xfc%\xfe\x8b\xff\xb9\x00\xbe\x01\xbe\x02\xae\x03v\x04\xe6\x043\x05s\x05e\x05\xfb\x04K\x04\xb1\x03.\x03\x88\x02\xb7\x01\xe5\x00j\x00\x1e\x00\xbc\xffS\xffE\xffy\xffv\xff\'\xff\x1a\xffo\xff\xdf\xff\'\x00\x96\x00f\x01w\x02k\x03T\x04_\x05t\x06\x8d\x07J\x08\x96\x08\xe3\x08%\t4\t\xe8\x08v\x08#\x08\xad\x07\xbb\x06e\x05\x14\x04\xe8\x02\x93\x01\xda\xff!\xfe\xa3\xfcW\xfb\x04\xfa\xd9\xf8.\xf8\xfa\xf7\xcf\xf7\x84\xf7[\xf7\x99\xf7\x18\xf8G\xf8V\xf8\x93\xf8\xee\xf82\xf9O\xf9\x8b\xf9!\xfa\xc1\xfa\x0c\xfb*\xfbw\xfb\xfb\xfbH\xfcB\xfc%\xfc)\xfc\x1e\xfc\xca\xfb\x94\xfb\xab\xfb\x06\xfcA\xfcA\xfcX\xfc\xac\xfc\x1d\xfdU\xfd\x83\xfd\xce\xfd \xfe(\xfe\x17\xfeU\xfe\xca\xfe.\xffj\xff\xb9\xff^\x00\x1d\x01\xaa\x01+\x02\xcc\x02l\x03\xc8\x03\x13\x04\xeb\x04e\x06\xd8\x07I\t\r\x0b\x1a\r\x06\x0f\xb5\x10\xe9\x12\xa6\x15\xe0\x17\n\x19P\x1a\'\x1c\xe1\x1d\xa8\x1e\x1b\x1f\x05 h +\x1f\xfd\x1cD\x1b\xc6\x19(\x17\x19\x13\x03\x0f\x89\x0b\xd9\x07Y\x03\x08\xff\xac\xfb\xe9\xf8\x95\xf5\x01\xf27\xef\x91\xed,\xecK\xea{\xe8x\xe7\x0e\xe7d\xe6\x85\xe5\x83\xe5q\xe6v\xe7\x04\xe8\xb8\xe8a\xeab\xec\xf6\xed2\xef\xbd\xf0\x8b\xf2\xf2\xf3\xe2\xf4\xce\xf5\x06\xf7R\xf8(\xf9\xc9\xf9\x9a\xfa\x92\xfb=\xfcv\xfc\xbe\xfcM\xfd\x95\xfdU\xfd\xe0\xfc\x93\xfc[\xfc\xdd\xfbe\xfb\x17\xfb\xfa\xfa\n\xfb\x1c\xfb]\xfb\xdc\xfb\x8d\xfcY\xfd\n\xfe\xd0\xfe\xcc\xff\xef\x00\x08\x02\x06\x03#\x04{\x05\xd6\x06\x01\x08\x12\tB\nu\x0bz\x0c`\r3\x0e\xd0\x0e*\x0f%\x0f\xe3\x0ee\x0e\xaf\r\xd0\x0c\xcf\x0b\xb2\n\x83\t^\x08O\x07N\x065\x05 \x04+\x03%\x02\xf3\x00\xa7\xffZ\xfe\x0b\xfd\xa4\xfbN\xfaJ\xf9P\xf8\\\xf7\x9f\xf61\xf6\xf4\xf5\xa1\xf5:\xf5\xf6\xf4\xa9\xf4/\xf4\xb2\xf3E\xf3\x01\xf3\xd8\xf2\xba\xf2\xe9\xf2S\xf3\xd9\xf3p\xf4\xfe\xf4\xab\xf5C\xf6\xb4\xf6\x12\xf7d\xf7\xc0\xf7\x1b\xf8u\xf8\xf8\xf8\xab\xf9\x98\xfa\x8f\xfb\x88\xfc\x83\xfd}\xfeY\xff\x05\x00\xd5\x00\xdb\x01\xde\x02\x7f\x03\x15\x04\x0c\x05*\x06\x11\x07\xe7\x07\x05\t\x7f\n\xc5\x0b\xd9\x0ce\x0e\xbf\x10/\x13Y\x15\xba\x17\x87\x1a>\x1d\x02\x1f\x88 J"\xdb#V$n$\xf1$\\%\xc4$R#\xf1!k \xc4\x1d\x00\x1a\x18\x166\x12\xc4\r\xa9\x08\xc4\x03\x87\xff\x96\xfbo\xf7~\xf3\x1f\xf0N\xed\xb8\xeaT\xe8U\xe6\xbf\xe4\x83\xe3I\xe2/\xe1\xb6\xe0\xd0\xe0F\xe1\xa6\xe1[\xe2}\xe3\xce\xe4%\xe6z\xe7\x19\xe9\xab\xea\xcb\xeb\xc3\xec\xe2\xed2\xef\xa4\xf0\xff\xf1g\xf3\xfa\xf4\xa5\xf69\xf8\xb2\xf92\xfb\xba\xfc\t\xfe\x0c\xff\xeb\xff\xde\x00\xd1\x01\x92\x02.\x03\xaf\x034\x04\x8f\x04\xad\x04\xba\x04\xd9\x04\xfa\x04\xce\x04m\x04\x12\x04\xe8\x03\xce\x03\x8d\x03y\x03\xb9\x03/\x04\xab\x04\x19\x05\xc3\x05\xa6\x06h\x07\xe2\x07e\x087\t\xfe\tv\n\xd0\nq\x0bM\x0c\xdf\x0c\x0b\r=\rV\r\xe4\x0c\xbc\x0bD\n\xda\x08G\x07Y\x05\x8e\x03,\x02\x0f\x01\xee\xff\xdb\xfe\xee\xfd\xff\xfc\x06\xfc\xdd\xfa\xaa\xf9e\xf8\x15\xf7\xcf\xf5\x90\xf4\x8a\xf3\xdb\xf2\x82\xf2!\xf2\xd6\xf1\xe5\xf1+\xf2u\xf2\x8d\xf2\xaa\xf2\xcf\xf2\xdb\xf2\xd9\xf2\xed\xf2M\xf3\xd7\xf3y\xf4I\xf5[\xf6{\xf7e\xf80\xf9\xea\xf9\x99\xfa\xfc\xfa\'\xfbd\xfb\xaf\xfb#\xfc\xaf\xfcl\xfdb\xfe5\xff\xc4\xffF\x00\xe6\x00l\x01\xc4\x01\x0e\x02\x9c\x02Z\x03\x00\x04\x0b\x05\xe6\x06&\t\t\x0b\xc5\x0c\xfc\x0e\x8b\x11\xd5\x13\xcc\x15!\x18\xbe\x1a\xce\x1c%\x1e\x98\x1f\x95!?#\xd2#\xd6#&$G$!#\xf6 \xe8\x1e\xe3\x1c\xc1\x19\x8e\x15\x90\x11X\x0e\xd3\np\x06M\x02\t\xff\xf7\xfbN\xf8\x87\xf4\x94\xf1\x1b\xefC\xecO\xe9\xe9\xe63\xe5\xce\xe3s\xe2}\xe1e\xe1\x85\xe1\xbd\xe1\xfa\xe1\x9d\xe2\xcd\xe3\xf2\xe4\x03\xe63\xe7\xb5\xe8f\xea%\xec\x17\xee^\xf0\xc6\xf2\x0f\xf5>\xf7\x84\xf9\x96\xfbu\xfdP\xff\x02\x01\x94\x02\xd2\x03\xda\x04\xc0\x05p\x06\xd5\x06\x02\x07\x08\x07\xd5\x06\x8a\x06\x12\x06_\x05\x93\x04\xd0\x03\x08\x03\x1e\x02-\x01u\x00\x00\x00\x87\xff\xfb\xfe\xb7\xfe\xeb\xfeG\xff\x81\xff\xf6\xff\xf0\x00\'\x02\x15\x03\xfd\x03B\x05\x9b\x06\xb1\x07l\x08D\tK\n\x1c\x0b{\x0b\xc1\x0b\x07\x0c\x18\x0c\xcf\x0bQ\x0b\xce\n@\n~\tb\x08\x1a\x07\xc3\x05Y\x04\xb7\x02\xfc\x00_\xff\xf5\xfd\xa4\xfcZ\xfb+\xfaG\xf9\x91\xf8\xee\xf7S\xf7\xd7\xf6\x88\xf6?\xf6\xf3\xf5\xbc\xf5\xb4\xf5\xc9\xf5\xee\xf5+\xf6\x9b\xf66\xf7\xcb\xf7P\xf8\xcf\xf8\\\xf9\xc2\xf9\x02\xfa6\xfaf\xfa\x8d\xfa\xa5\xfa\xd2\xfa\x10\xfb\\\xfb\x98\xfb\xcf\xfb\x0e\xfc.\xfc4\xfc\x07\xfc\xbf\xfby\xfb\x1f\xfb\xbf\xfan\xfaG\xfa+\xfa\x00\xfa\xfe\xf9u\xfaB\xfb\r\xfc\xc5\xfc\xeb\xfds\xff\x11\x01\xe5\x02f\x05\xb0\x08\xf8\x0b\x9d\x0eB\x11}\x14\xa3\x17\x1e\x1a"\x1c\xa5\x1e&!h"\xa0"K#~$\xa8$N#\xd2!\xe4 (\x1f\x8d\x1b\x92\x17\xac\x14\xc5\x11b\r,\x08\x17\x04\xe8\x00\x1d\xfd\xa1\xf8\x1b\xf5\xcf\xf2u\xf0I\xedQ\xea\x9c\xe8\x90\xe7\x1f\xe6\x89\xe4\xb6\xe3\xe2\xe3\x1d\xe4\xf3\xe3A\xe4\xaa\xe5q\xe7\xae\xe8\xba\xe9J\xebO\xed\xfe\xee>\xf0\xc4\xf1\xb2\xf3h\xf5\xa0\xf6\xe4\xf7|\xf91\xfb\x9d\xfc\xce\xfd\x10\xff4\x009\x01\x06\x02\xab\x02@\x03\xa6\x03\xf5\x03\x15\x04\x0f\x04\xf7\x03\xd0\x03\xa0\x03w\x03\'\x03\xc1\x02T\x02\xe9\x01\x96\x01\x16\x01\xc3\x00\xb1\x00\xac\x00\x96\x00\x80\x00\xd7\x00h\x01\xd5\x01P\x02\'\x03\x13\x04\xcd\x04g\x05#\x06\xed\x06\x84\x07\xfb\x07h\x08\xc8\x08\xe9\x08\xc4\x08y\x08\x04\x08u\x07\xc3\x06\xe1\x05\xee\x04\xf4\x03\xed\x02\xc7\x01\x91\x00r\xfft\xfel\xfdj\xfc\x84\xfb\xb4\xfa\xe3\xf9,\xf9\x95\xf8$\xf8\xef\xf7\xc7\xf7\xbd\xf7\xd2\xf7*\xf8\xa3\xf8\x15\xf9\x81\xf9\xf1\xf9\x96\xfa"\xfb\x95\xfb\x1d\xfc\xd2\xfc\x99\xfd!\xfe\xb1\xfer\xff*\x00\xca\x00%\x01\x9c\x01\x03\x02@\x02O\x02R\x02\x82\x02\xa3\x02\x96\x02u\x02\x81\x02\x9f\x02\xa4\x02\x94\x02\x92\x02\x9b\x02m\x02\x1a\x02\xd1\x01\x96\x01T\x01\xe9\x00s\x00\x06\x00\x94\xff\x16\xff\xa2\xfe\x0b\xfen\xfd\xe0\xfcF\xfc\x91\xfb\xcc\xfa!\xfa\xa0\xf93\xf9\xe2\xf8\r\xf9\xa7\xf9d\xfa)\xfbB\xfc\xdc\xfd\xbb\xffj\x01-\x03<\x05t\x07X\t\x08\x0b\xe1\x0c\x06\x0f\x00\x11U\x12\x7f\x13\xcf\x14%\x16\xd7\x16\xe7\x16\xea\x16\xcd\x16\x05\x16]\x14x\x12\xd3\x10\xff\x0e\x8d\x0c\xc8\ta\x07@\x05\xde\x02)\x00\xd3\xfd\xfc\xfbF\xfa=\xf8-\xf6\xcb\xf4\xdd\xf3\xec\xf2\x0c\xf2\xb1\xf1\x04\xf2`\xf2\x9d\xf2\x1e\xf32\xf4Y\xf5\x1e\xf6\xc6\xf6\xb7\xf7\xa2\xf8\x1e\xf9a\xf9\xda\xf9\x83\xfa\xd6\xfa\xee\xfa\x1a\xfb`\xfbt\xfb\\\xfbO\xfbH\xfb#\xfb\xcc\xfat\xfa6\xfa\xf2\xf9\xba\xf9}\xf9L\xf9\x1e\xf9\xe7\xf8\xda\xf8\xfe\xf8:\xf9l\xf9\x9d\xf9\xc4\xf9\x11\xfaz\xfa\xf1\xfa\x92\xfbG\xfc\xef\xfc\x87\xfdN\xfe<\xffA\x00:\x01"\x02\xff\x02\xd8\x03\x9c\x04E\x05\xe4\x05z\x06\xe3\x06\x16\x070\x07I\x07a\x07g\x07S\x07\x1f\x07\xca\x06\x7f\x06\x15\x06\x98\x05+\x05\xaf\x04*\x04\x8d\x03\xf4\x02u\x02\xf8\x01\x8b\x010\x01\xce\x00o\x00+\x00\xf6\xff\xcb\xff\x99\xff\x82\xffn\xffM\xff&\xff\x0f\xff\r\xff\xf5\xfe\xdc\xfe\xe9\xfe\x0e\xff;\xffc\xff\xbc\xff#\x00A\x001\x00S\x00\x9c\x00\xc5\x00\xab\x00\xba\x00\xfd\x00\x00\x01\xcf\x00\xc3\x00\x04\x012\x01\xd0\x00J\x00\x0c\x00\xd8\xff^\xff\xa9\xfe.\xfe\xd7\xfdP\xfd_\xfc\xa1\xfbM\xfb*\xfb\xe2\xfaY\xfa\xfe\xf9\xe8\xf9\xdd\xf9\xb7\xf9\xb0\xf9\x1a\xfa\xa2\xfa\x01\xfbB\xfb\xe7\xfb\xfb\xfc\x02\xfe\xbe\xfeR\xff\x03\x00\xd4\x00\x94\x01\x0c\x02o\x02\xcb\x02\x0c\x03\xf1\x02\xa1\x02j\x02<\x02\xe4\x01I\x01\x99\x00\x1b\x00\xaa\xffO\xff\xd0\xfeS\xfe\xf0\xfd\xa6\xfdi\xfdE\xfdQ\xfd\xa7\xfdX\xfe7\xff7\x00w\x01\xbc\x02\xc8\x03\xb6\x04\xb8\x05\xd5\x06\xd8\x07\xa1\x08\x8a\t\x9a\n\xca\x0b\xa0\x0c*\r\xd3\r\x1e\x0e\xe6\rp\r\xd3\x0cG\x0co\x0b~\n\xcc\t\x19\t=\x08]\x07\x9d\x06\xc0\x05\x98\x04D\x031\x02!\x01t\xff\xf7\xfd\x19\xfd2\xfc\xd9\xfaM\xf9\x87\xf8F\xf8!\xf7\xb6\xf5{\xf5d\xf5h\xf4n\xf3I\xf3t\xf3\xb6\xf2=\xf2\xc6\xf2M\xf3\xb6\xf3[\xf4\x88\xf5\x9a\xf6\x11\xf7\x9e\xf7\x9f\xf8\x95\xf9T\xfaO\xfb\x91\xfc\xed\xfdJ\xff\xc1\x00\xff\x01\xb1\x02W\x03\xe0\x03\xcf\x03\xbf\x03\xa8\x03:\x03\xeb\x02\'\x03?\x03\xd4\x02o\x02\x03\x02\x12\x01\xe9\xff\x10\xffA\xfej\xfd5\xfdt\xfd\xc5\xfdf\xfe\x8e\xff\xa4\x00\xfe\x00"\x01f\x01O\x01F\x01\x81\x01\xe8\x01\xac\x02\x97\x03~\x04K\x05\xdf\x05d\x06\x06\x06C\x05\x8d\x04\xae\x03\xf9\x02e\x02%\x02\'\x02\x06\x02\xe3\x01t\x01\x17\x01\xe6\x00\x19\x00\x16\xffW\xfe\xed\xfd\xb8\xfd\xa0\xfd\xba\xfd\xfd\xfd9\xfeX\xfep\xfe\x9c\xfe\xa0\xfe\x92\xfer\xfe?\xfe8\xfe\x9e\xfeu\xffh\xff4\xff\x9b\xff\xfc\xff\xf3\xff\xff\xff\x95\x00\xf5\x00\xb5\x00\x99\x00\xf1\x00\x1f\x01\'\x01\xde\x00~\x00\x8e\x00\x02\x01P\x01`\x01\xac\x01\xc7\x01\xf6\x00\x04\x00\x0e\x00\x08\x00K\xff\xd6\xfe \xffU\xffk\xff\xab\xff0\xff\xf3\xfc\x17\xfb\xfb\xf9\x86\xf8~\xf9w\x01\xbd\t\xce\x08I\x04D\x05Q\x088\x04b\xfeR\xfe\\\x00\xb9\x00\xfa\x00\xb8\x03&\x05p\x02\xef\xfc\xe7\xf7Y\xf6\xc2\xf70\xf9\xfa\xf8\xa5\xfa#\xff\x8e\x03C\x051\x06\x97\x07\x92\x05\xad\x00\xfc\xfe\xce\x01q\x05\xd3\x07\x15\x08+\x07\x91\x05\xaf\x03\xaf\xff~\xf8B\xf4\x82\xf3\xfe\xf1Q\xf1\xcf\xf3\\\xf7\xab\xf6\xa3\xf3,\xf1\xff\xed\xd6\xec\xdc\xedU\xef\x10\xf0\xa9\xf3\xb1\xf8\xd2\xfa_\xfa\x1e\xfa\xd5\xf9\xb2\xf7=\xf5\xb5\xf6U\xfb\x83\xff\x17\x05\xfb\x0f,\x1d\xdd!\x8a\x1eo\x1d\xd6\x1f\x85 \x04\x1f\xcb\x1e\xc4\x1fx\x1fV\x1f\xb4\x1c\r\x16l\r\xa4\x03\x1d\xfa\xca\xf1\x91\xed\xa0\xeb\xfe\xe9\xb0\xe9\xae\xeb\xc3\xed\x83\xef\xd4\xf0\xc0\xf0\xe8\xf0\xb0\xf3K\xf9\x90\xff\x08\x05\xb8\tk\rt\x0e\xd4\r\xf7\x0c\x19\x0b\xed\x07:\x04\x85\x02q\x03\xee\x03\x81\x02;\xffF\xfb\x9d\xf8\xbc\xf6g\xf4\xe0\xf1\x9f\xf1\x91\xf3\x00\xf5\x8e\xf6\xad\xf9x\xfb\xf3\xf9\xe4\xf8{\xfa0\xfc\x8e\xfc\xfd\xfc+\xfeX\xff\xe0\x00\xbd\x02\xde\x02\x13\x01\xfb\xfey\xfd\xd9\xfcm\xfdZ\xfe\x90\xfe\xdb\xfe\xeb\xffw\x01\x08\x02m\x01?\x00:\xff\x87\xff\xf9\x00\xcb\x02(\x04\xb4\x04\x85\x04\xc1\x04~\x05\x03\x06\xc4\x04\xf5\x02i\x02"\x03\xac\x04\x80\x05[\x05\x8d\x04n\x03\x9c\x02\x06\x02\xa4\x01\xeb\x00t\xff:\xfe`\xfe\x81\xff\xc0\xffC\xfeA\xfcK\xfb?\xfb#\xfb\xea\xfa\xda\xfa\xb4\xfao\xfav\xfa \xfb\x12\xfc\xcc\xfc\xc8\xfc\x16\xfd\xcf\xfeK\x01\x1e\x03{\x03\x96\x03R\x04u\x05\x80\x06\x00\x07s\x07.\x07\x14\x06K\x05\xc9\x04C\x04\xcb\x02\xc3\x00v\xff\xb6\xfe\x06\xff\x04\xff\xf6\xfd\r\xfd\x94\xfc\xaf\xfc\xe1\xfc\xf6\xfc\xaa\xfd\x10\xfe!\xfe\xe0\xfe,\x00|\x01\x15\x01\xee\xff)\xff\x07\xff\x92\xff\xc6\xff\xab\xff%\xff\x88\xff\x13\x00\x7f\x00\xd2\x00\x05\x01\n\x01\xbe\x00X\x01\x97\x02l\x03\xaf\x03\xad\x03\xdc\x03Z\x04\xd1\x04k\x04\x1c\x037\x01\xa3\xff\x9b\xfep\xfec\xfe\r\xfd\x80\xfb\xbf\xfa\x87\xfa0\xf9*\xf7\x1f\xf6\xb5\xf5\xf6\xf5\x07\xf7\xed\xf8\xf1\xf9\t\xfak\xfaM\xfbQ\xfc\x18\xfdb\xfd*\xfe\xc2\xff\x0e\x02\x9d\x03\xee\x03[\x04\xa7\x04,\x04F\x03\xb1\x02\x0b\x03d\x032\x03W\x03\xda\x03"\x04\xb9\x02\x86\x00@\xffP\xff\xfb\xff\'\x01&\x02#\x02\xc3\x01\x0b\x01;\x01\xe4\x013\x02\x90\x01G\x00/\x02\x0e\x063\x07\xb0\x03\x00\x00\xbc\x01\x12\x04t\x01\x83\xfc\xc4\xfbr\xfe\x96\xff\xc7\xfe\xc5\xfe\xa8\xff5\xff\x02\xffU\x00Z\x01\xec\xff\x8e\xfe\x90\x00[\x04\xf1\x04\xf1\x00\x83\xfc\xee\xfaS\xfcz\xfdL\xfc\x02\xfa\x18\xf9\x96\xfb\x84\xff\xe0\x00\x91\xfe\xe9\xfb<\xfet\x02b\x03>\x01\xdc\x00\xcb\x01\x0e\x00\xc7\xfd)\xfe\xfb\xff9\xff`\xfd\x02\xfd\xc2\xfd\xf7\xfd\x99\xfd[\xfe\xfe\xff_\x02\xf9\x03\xc4\x04}\x05\xb4\x06s\x07?\x06\xa4\x04\x06\x05\x00\tq\r\x1e\x0e\x99\n\xe9\x05\xe7\x03\x06\x03\xd8\x01\x03\x01F\x01b\x01\xda\xff\x17\xffR\x01\x95\x01\x86\xfb\x1f\xf5s\xf6\xe3\xfbO\xfc\xd5\xf8\x96\xf6\xe2\xf6b\xf8\xc0\xf9M\xfa\xa6\xf8\xf3\xf6\\\xf7\xa0\xf86\xfa\x82\xfby\xfd4\x01\xd9\x05x\x08u\x07\xe1\x05\xc1\x04\x0e\x03\x11\x02$\x03\x9c\x04.\x04$\x03\xf5\x01\x0f\x00\xab\xfd`\xfbu\xf9\x8d\xf8>\xf9S\xfa\xdd\xfa\xe8\xfb\xa0\xfd:\xfe\xba\xfd\xaa\xfd\xc0\xfe\x1a\x00q\x00\x9c\x00\x96\x01\xb7\x03\x1c\x05\xa5\x04\xa1\x04I\x05\x83\x04\x83\x02U\x01\xa2\x01=\x02\x87\x02\xef\x01\xd3\x00\xad\x00>\x01\xd6\x00\xfc\xff\x19\x00\x92\x00r\x00\x15\x00p\x00:\x01\x8f\x01\x8b\x01q\x01\x8d\x01c\x01\xaf\x00\xf3\xff\x90\xffy\xff\x91\xff\xac\xff\x87\xfft\xffN\xff\xed\xfe\x9e\xfeh\xfe9\xfe\x19\xfeL\xfe\x80\xfeF\xfe\xd9\xfd\x8d\xfd\x95\xfd\xc2\xfd\x04\xfe/\xfe\x06\xfe\xe6\xfd\x16\xfel\xfe\xc3\xfe\x14\xff\xc8\xff\x8f\x00j\x01S\x02\xd5\x02\xf3\x02\xcb\x02z\x02\x1b\x02\xc2\x01\xb6\x01\x8a\x01\x11\x01@\x01\xc8\x01N\x01\x0b\x00\x11\xff\x82\xfe\x93\xfd\xa5\xfc\xa0\xfcA\xfct\xfd\x0f\x021\x07\xf7\x08\x8d\x06w\x04\x00\x03Z\x01\x1c\x01\xa1\x02e\x04^\x059\x06M\x06^\x05\'\x03\\\xff\xa4\xfct\xfc\x8e\xfe\x9e\x00\x01\x02\x1d\x03\xf7\x01I\xff\xd5\xfd\x19\xfd\xeb\xfb\xd0\xf9\n\xf8\x9e\xf7k\xf8.\xfa\xd7\xfae\xf9-\xf8\xa7\xf7\x1d\xf78\xf7f\xf8\xa1\xf9\xf1\xf9\xb8\xfan\xfc\x12\xfeL\xfee\xfd)\xfc\xf8\xfa\xa8\xfa\xe0\xfaU\xfb.\xfb\x9c\xfa\xa6\xf9:\xf8\xd5\xf7-\xf9\xc9\xf8\x90\xf4\x96\xef)\xef$\xf1*\xf2\xe4\xf1)\xf2\xc1\xf3\xd0\xf3U\xf2\x19\xf2\x18\xf5\xdf\xf9\xd8\xfd\x90\x02_\tj\x11A\x18\x1f\x1f\xae(\xda1\'5a4\xfb6\xad9\xc66\xe8/f*\x1a\'U"$\x1b\xa0\x12\xb2\x08\xe5\xfd`\xf3\x9f\xea\x89\xe6\xc7\xe4\x84\xe3\xa4\xe2\xff\xe2E\xe4\xb2\xe6+\xea\x85\xec\xa5\xed\x96\xf0\x8f\xf6.\xfd\xce\x02_\x07\x04\n\xb2\t%\t\x98\t\x05\t\xeb\x06d\x04\x1a\x03-\x03\x17\x03\xec\x01\xc4\xfe\x1b\xfa\xed\xf4\x87\xf1\xd2\xef\xca\xee\x18\xef%\xf0\xbd\xf0z\xf15\xf3f\xf4\x15\xf4\x90\xf3\xbb\xf3\xc3\xf4\xdb\xf7\x8f\xfc\xdf\xff+\x01\xe1\x01\x89\x029\x02\xac\x01\x8b\x01\xf1\x00+\x00\xf7\x00\xaa\x02\x84\x036\x03\x1f\x02\xbc\x00s\xff\xdc\xfe\xd2\xfe\x95\xfex\xffc\x00\xde\x00\x1b\x01v\x01w\x01\x95\x00\xda\x00\xf4\x01)\x03/\x048\x05f\x06z\x06\xb8\x06=\x07\xd9\x06\xfc\x05\x15\t\x12\x12v\x16 \x12`\x0b\'\x06\xc6\x01\x87\xfe\xc3\xff\x96\x02\xe6\xffP\xfc%\xfe\xbc\xfeb\xf9\xe3\xf2`\xee*\xec\xa4\xee9\xf6\xc8\xfc\x8c\xfey\xfdS\xfd\x1e\xfd\x88\xfc\xb3\xfc\x00\xfb\x1d\xf9\x18\xfb\x02\xff\xa6\x02\x90\x02\xb0\xff&\xfc\xec\xf7\xdc\xf4\x12\xf4\xfb\xf3\xa6\xf3o\xf4\xef\xf5-\xf9\xb1\xf9\xc0\xf6-\xf4\x07\xf12\xf1\xda\xf4@\xf7\x98\xf9\xb2\xf9\x16\xfa\xf9\xfas\xf8\x13\xf9\x03\xfa\x94\xfbZ\x00\xfe\x02\x12\x02\xd9\x02\xeb\x07\xdc\x0e(\x10\xdb\x10g\x14\xd2\x13U\x17\x0c$\xa34y8$0\x1a.\x920o1],]%\xa9 "\x1c\xee\x1c\xd4\x1b\x97\x12\xf0\x06\x0c\xfb\xc7\xf0\xce\xe8j\xe5,\xe5c\xe1T\xdb\xa3\xdc\xaf\xe20\xe5\xbf\xe2\x08\xe0\x90\xe1\x95\xe7\xbe\xed\xb1\xf5\xd4\xfd2\x02\x11\x04\xe0\x05\xa6\n\x18\x0eO\x0c\xd5\t\\\n&\n\xa3\x08\xe2\x06]\x06 \x03\x0f\xfdh\xf7\x8c\xf3\xc1\xef\xc4\xe9J\xe7\xb8\xe7M\xe8\xba\xe8\xc3\xe9\xce\xea!\xea\x88\xeb\x0e\xeff\xf2\x19\xf6\x95\xfb\x96\x01\xfe\x044\x07\x1f\na\x0b\r\n\xd0\t\xb1\n\x86\n\x82\tv\x08\xc0\x07b\x05\xd3\x03\x10\x04>\x02\xae\xfe\xa6\xfd\xd3\xfe\x8b\x00T\x01\xd9\x02\\\x06\xe7\x052\x051\x07\xa3\x08*\t\x11\x07\x08\x07\xa7\x08\xeb\x06\xcf\x04\x93\x03\xdb\x023\x01\x95\xff%\xff\x14\x00\'\xfe\xd7\xfai\xfaq\xf9O\xf7\x08\xf2\xfd\xee\xda\xf0\x8d\xee\xb7\xeb\xfb\xeaR\xe9L\xe8\x8d\xe4\xfd\xe5R\xe9\xce\xe7)\xe9\xf2\xebt\xf1n\xf5\x86\xf6\x18\xf9Z\xfb\x84\xfd<\x00Q\x03Z\x05v\x05\xd9\x05[\t\x8a\x0b)\x0b{\x06\xb9\x03\t\t\x84\r\xe0\x0e/\t\xc2\x06\x06\x08\x8d\x05\xdb\x03\xa1\x04\xd7\x05\x8a\x05\xf7\x08-\x0e\x1b\x12`\x11\x90\x0f\xde\x12\x1c\x15\xaf\x19\xc5\x1c\xa1\x1f1#v"\x16\'\x81/\x993\xae,*$\x91#\xec"\xc4\x1c\x83\x18\xc1\x16\xdf\x0eh\x055\x00\xf8\xfd\x8f\xf4.\xe8\xd2\xe0\x98\xdf\xa1\xe1\x03\xe2\x9d\xe2.\xe2\xca\xe2\xf9\xe1\x01\xe39\xe9\xe8\xed\x1e\xf1N\xf5\x1e\xfdn\x01\t\x03\x8e\x05\x10\x03O\xfd\xe6\xf9Z\xfa\xf5\xfaQ\xf9\x91\xf8\x89\xf7\xda\xf35\xf1\xe1\xefc\xec\xbe\xe9\xcd\xe8\x81\xeb+\xf1d\xf4\x12\xf7/\xf8\xbb\xf9;\xf9]\xf9\x87\xfc,\x00\xd1\x01r\x03\xd9\x07\x8f\x0b\xcc\x0c\xab\x0b`\x0b^\x08\xd7\x06\xf2\x06n\x08\t\x0b\x8f\x08\xef\x06v\x04\xcf\x020\x02\x0f\xfe4\xfc\xd3\xf9\xce\xf6\xf9\xf6\xca\xf5\x1c\xf5d\xf2\xfb\xeep\xeeL\xef\x11\xf1\x14\xf2\x0f\xf2R\xf2i\xf3\xbd\xf4\x9c\xf7\xa5\xfa\xee\xfc\x9f\xfd\xca\xff\x88\x02\xa3\x03\xaa\x04B\x05]\x05\xdb\x03s\x04r\x06\xa6\x06\x10\x06e\x03r\x00\x0f\xffu\xff\xcc\x01!\x017\xff\xe4\xff\xbf\xfc\x7f\x01@\x04\xc5\x02\xde\x036\x01c\x00W\x00\xe3\x05[\x0b\xfd\n@\n\x0b\x0e\xbd\x0f\xb4\x0e\x10\r\r\x0fo\nW\x06\xda\x0e\xae\x10+\r\x9b\rC\r\x9d\x07!\x047\x06\x82\x06\x05\x00b\x02\x81\x08\xd6\x06\xcc\x08\x08\x0c\xb8\nz\x01\\\xffY\x06m\x05\x05\x04\xd6\n\xc1\x08\xe4\x03\x87\x07?\x07\x13\x02\xdd\xfdp\xfd\xd9\xfb\xb7\xfa\xd2\xfd\x89\x01t\x00\xeb\xfaw\xf9\xd1\xf6\xd6\xf3\x8d\xf3\xb5\xf5?\xf7Q\xf2\xc0\xf4$\xf9q\xf7\x9c\xf7!\xf8\x86\xf43\xf5\x8c\xf8z\xfa\x06\xfe]\xffP\xff\'\xfb\x87\xfc~\x00\x15\x00(\xfe\xd5\xff!\x01\x8c\xfe]\x02\n\x04q\x00\r\xff#\xfc+\xf9e\xfdM\xfe\xf2\xfb\xbc\xfa\xe0\xfc\xca\xf8\xab\xf6\xed\xf8\x9a\xfb|\xfb\xf1\xf3p\xf7\xbb\xfd\xfe\xfb\xd7\xfap\xfe\xd4\xfa\n\xfc\xd2\x003\xff@\x02|\x02\x87\xfd\xef\xf9\xd6\x038\x07;\x01\x87\xfe_\x02m\x05\xfa\xfb\x94\x00\x90\x01K\xffu\xfdB\xffM\x03\x87\xfa9\x08\x0e\xff\xdb\xf3G\x01/\xfe\xa5\xf4\x18\x02\xd0\x05\x0c\xf4\xd3\xf8\x12\x02\xed\xff\xd9\xfa\xb0\xfc;\x02~\xf5\x8e\xf53\x0e\xc1\xfc\x01\xfa8\x0c\x01\xfcB\x02z\x0c&\x00R\xfe\x92\x08\xb5\x04\xe1\x05\x8f\r:\x0c\xf3\x01\xbf\x02\xec\x07\xa4\x04\xa2\x01]\x08\xef\x03~\x03\xf1\x04\x85\x03\x16\x05I\n\xc3\x03\xe1\xff\x97\x0b\x99\xfc\xe8\xfb\x18\x03v\x0b\xff\x06]\x02Y\x06?\x015\x05\xa7\xfc\xc7\xfe+\x03"\xfb|\x08\x9e\x05\x17\x03\x8d\x08\xb3\xfc"\xf8\xad\x01\xba\xf8\xfa\xf9\x9b\x0b\xfc\xf3\xa0\xfc\xa9\x06\xa1\xfa\xdb\xf9\xf3\x00\x9a\xef\xbd\xf3#\xff\x12\xfd\xbb\x02*\xfd\xe2\x01)\xfd\x1d\xff\x8a\xf2W\x05#\x03\x95\xf6\xa6\x06b\x06\x81\xfe;\x00\xad\n6\xfbV\x04\xc3\xfc\x9d\x00A\x03q\xfe\xc6\x01\x03\x01B\xf7\xa9\x04\xb3\xfc\xca\xf2\xbc\x03:\xfb,\xfbi\xf9\x9c\x04-\x0bd\xf4\xc6\xfa\xf2\x0c\xbb\xf9\xe1\xff\x1a\nR\xfe/\x02\x0c\x0b\xb6\xf7/\x00B\t\xb9\xf3\xa6\x00\x9b\xfbR\x01g\xff\xbc\x01\x11\x04o\xf8\x8a\x00N\x08s\xfc\xdc\xf6\x13\x0e.\xf3\xc1\xf2{\x13:\xf5\xc4\xfce\nU\xf4\xf2\xfeF\xfd\x0c\xee\xee\x08\x8d\x06\x0f\xee\xdc\x04\x08\xffB\xfb?\x05\xc3\x04\xc6\xf56\xfc]\x08"\xf3\xa7\x06\xa3\t\x02\x03\xba\xf6:\x00N\x03\x92\xfc\xf2\x04\xd6\xf9!\x0b\x82\xfac\xf5\xc0\x15\xd5\xf7\xcc\xf8\xc4\n\x8c\xf56\x03\xbc\x0e\xbd\xff\xf1\xfd|\x07u\xfc\xb8\x07D\xfd\xd7\x03\xba\x0e\xaf\xf6\xb5\x03\n\x06\xc9\x03\x02\x01\xbf\xfe\x8e\x01\xe7\xfe\xe7\x00*\xfc\x90\xfd\x8f\x02R\x02u\xf2\xfa\x054\x02\x0c\xf2\xa4\x02H\x00{\xf3\xd3\x00\xfa\x07^\xf8\xe5\x02\x1a\x00\xd0\xfc\xed\x05\xff\xf9\xec\xf9e\x07\xd7\xf6W\x02\xd9\n\xd2\xf4\x15\xfd\x14\x077\xef\x87\xfb\x11\x10\x0b\xf8@\xf4\xe6\x08\x05\x06\xe5\xef\xdb\r\x07\xfb%\xf5\xf2\x05\x14\xf7\x17\x07\x1b\x06\x90\xf2)\xfej\x10\xd5\xf9\xbb\xf9\x1e\x0b\xec\xf8\x15\x03\xa5\xff\xb8\x08\x8b\x00\xae\x07\xf0\x03\x96\xf6\xc8\n\xdd\x02\xc4\xfbz\x02\'\x11K\xf5\xb4\x03\x1f\x01:\xffL\x03\xe2\xee\xb7\x0f\x84\xfc\xce\xf9\xef\x0e\xa2\xf5\x0b\xf7B\x15{\xf4\xa2\xf0\xce\x17\x1c\xfc#\xf0\xef\x0c\xea\x07m\xf1\xcd\x06\xbe\xfb>\xfb\x8b\x02\xc1\xfe\xfd\x03\xc3\x00*\xf6\x08\xff\x10\x0b\xc5\xef9\xfe\x81\ns\xee\xe2\x04\xb2\x00]\xfa/\x07I\xfd\x00\xf8;\x009\xf8\xbc\xf6\x84\x18T\xf89\xfb\x81\x07\x10\xfd\xf9\xfb\xa8\x02\xca\x05\xa7\xfd\xf2\x04\xda\xfd{\x05\xb6\x08\xd6\xfc\xdf\x02\xb0\x04\xed\xf8\xff\n\x11\x03\xa4\x00y\xfd:\x0b\x16\x04\\\xf7\x02\xfe\xd3\x01\xf3\x06G\xf7z\xfe\x91\x08v\x04\xe8\xf8\x93\xfb\x90\xfa\r\x06\xd1\xf9\xa8\xf4\xdb\x14\x87\xfen\xf2\x16\x08\xa6\xfc\x1c\xfc\xb1\xfe\xdd\t\xb0\xf7\xff\x01\xd5\x03:\xf5\x8f\t\x8b\x00\xcc\xfc3\x00R\x06\xe9\xf1\xd1\x06\x8d\x01\x1b\xf8&\xfb4\x00|\x02\xe3\xf5\xa6\x08\xcf\xfa\xb9\xedT\x02{\xff\x06\xf2\xb3\x0e\xcf\x00\xd7\xf1\x90\x06\xa3\x08\xe2\xf9g\xfc|\x01\xb7\t*\xfbe\t\xc0\x0b\xb2\xf60\x05\xb0\x00\xd1\x06\xff\xf8z\x00\x10\x0f\xf6\xfdi\xf8\x98\na\x04%\xf5W\x08\xd0\x03\xeb\xfc\xdc\xf8\x0b\x06\xfa\x04y\xfc<\xfd\xff\x00f\x0c(\xf4\xbf\x001\x07\x8f\xf4\xce\x01\xdb\x00\xb9\xfc\x9d\x06#\x01\x8f\xf6\xe1\xfb_\x04\x8e\xf9|\xff\\\x03l\xedE\x04\x07\x08Q\xf3O\xf8\xea\x10<\xf2\x92\xfa\x1c\x08x\xf5\xa5\xfc\x93\x06\xd9\xff\xc2\xfe\xc4\x01\xab\xff\xf0\x00k\xf6\xe0\rA\xf6)\x05\xa1\x03B\xfe\xdd\xffI\xfcg\x0e\x91\xefN\x0b\x1f\x01\x04\xef\xda\x07\x88\x0c:\xf45\x04\xfb\xf9\xf8\x05j\x08\xae\xf3I\tu\x08\xf8\xf4\xdc\x02\xe3\x13^\xef%\x06>\x07\x0e\x00\xcd\xff\xce\x01\xab\x06\x98\xff\xb5\x06\xac\xf8\xc9\x02&\t=\xf6\xc8\x00\xd6\x01D\x07\xda\xfcy\xf2a\x19\xb0\xed\x81\xff\x0c\x031\xfcZ\x03\xf1\xec\x7f\x12\xb6\xef$\x00\x9b\x085\xf6\xa8\xf5\xaf\x05\x1b\xfc\xa2\xf7 \x02\xfe\x02g\x03\x06\xf9P\xfc\x0b\x01\xe4\xfc\xa1\xfb\x18\x08\xba\x04\xdd\xf5\x88\x08\x8a\x03\x13\xf0I\x07\xbe\xfe%\x02\xfd\xf8\xc6\r4\xff\x01\xf2\xd7\r\xde\x01\xc7\xf6F\xf61\x1a\xaa\xf3\x9a\xf9\xff\x12\xec\xfb\xca\xfc\x98\x03\xc2\x04H\xf5\x13\x04\x9c\x03\x9b\xfb\x87\x01(\t\xda\xff\xc7\xf5\x84\x04f\x07\xd0\xf6J\x04\xe8\x08`\xefZ\x11G\x02\xb6\xf5\xb1\x04\x13\x07\x04\xf6\xc3\xf2\xd9\x18\x8b\xf9\xbd\xfa\xfa\x04o\x08|\xf5\xa3\xfdf\x0b\x90\xf1\xb2\x03\x94\x03\xf0\xfc\x00\x00\xab\n\xb5\xf2\xdd\xf9\xfd\x07\xfb\xefN\n\x19\xf5\xd9\xf6J\x16R\xf3\x1c\xf5\x80\t\x12\xf6\xe0\xffW\xfe\xcc\xf7\x02\x06\xb4\xfcT\x04\x14\xf5\x86\x06\xd6\t\xc5\xedD\x07A\x01\xea\xf9N\x04\xe6\x01\xa6\n>\x07\xa3\xfa#\xfdV\x05\xe3\xf9\x93\r=\xff\xdf\xff\x9d\tQ\xfc5\xfdB\tV\xffz\xfb\x99\x07\x9b\xf1\x17\x13\x98\xfb\x8f\xfb\xf1\x06E\x05\xab\xeeM\xfe\xac\x12s\xfc\xab\xf7\xdd\x00\x91\nC\xf5\x0f\xfc`\x05\x13\x03v\xee\xc4\x08\x18\x07\xf4\xf5\x14\xfb\x95\n\xfe\xfc\xd5\xee\x8b\x0c\\\x03\x14\xf2\x99\n\xb6\x01\xfe\xee\xf5\np\x05-\xf1\xf2\x00\xb4\x14\x1b\xeb\x90\xfa\x85\x17s\xf5\xd9\xed\xe0\nb\n.\xefJ\x02\xf6\r\xd7\xfc\xef\xf6p\xf8\xc9\x06\x84\x06\xa1\xf4\xe2\xf8\x02\x19E\xf4\xf3\xf3T\x16\x10\xef\xaa\xfb\x11\n\xd5\x01\x80\xf6\xce\x05\xb2\x06\xf8\xf5e\x03~\x10\xdd\xea\x13\xfd\xbe\x16&\xef\xc8\xff\xc8\x12\xd9\xf9c\xf8d\x0e\x81\xff\xc9\xf8\x19\x06\xef\x05\xa1\xf1\xbf\x05d\x18\xc7\xed\x0c\xf5\x06\x1c\x9e\xfc\xeb\xe6\xf4\x0f\xe0\x07\xd8\xf0\xed\x00\xe1\x07Y\x02-\xff)\xf6r\x06\xe8\xfe\xeb\xed"\x15\xf4\xf6#\xf1\xa8\x0b\r\x07?\xf5\xa3\xf5\xb3\x10.\x00\xdd\xed\x1e\x05\x89\x08\xb6\xf6+\xff\x9d\x04\x9a\x04Q\xf8\xdd\x04e\xff\xd4\x00Z\xfb\x06\xffn\nI\xf87\xfe\xc0\x0b!\xf7\xbe\xf8\xa9\x0f\xd7\xfa:\xf7\xdd\x01\xf3\np\xfa\x1b\xffH\xfd\xfb\x030\x02\xb6\xfb^\x01\xaa\x03j\x03\xd7\xfa\xc0\xff\xa8\xff\xe6\r\x15\xfa7\xfc/\r\x9c\xfc\n\xf8\xaf\x10\x80\xf8/\xfc\x8e\x10+\xfc\xf3\xf0\x1f\x08\xd7\x08e\xf6\r\xff\n\x01~\x07\xa5\xed\x90\x04O\x07I\xf1\x9e\x02\x91\xfdn\x06\x14\xff\x1e\xf8\xae\x0bS\xf8o\xf1U\x12a\x02`\xf01\x0f8\t\x18\xed\x88\xfb\x94\x12v\xf2\xbd\xff\xfc\x05\xdd\xfc\xf4\x04Q\xfb2\xff\x10\xfd1\x05\x16\xfcB\xfe\xc4\xfb\xe1\x0f$\xf6\x08\xf7?\x14;\xf6;\xf5\xc2\x06i\x05\xdc\xf0\\\x07\x17\x07\xfc\xfa\xb0\x018\xff\x85\xfd\xe5\xf7\xe2\x0f\x8f\xfd\xa1\xf5\xb5\x06\xec\x01\xd8\x01c\xf5d\r4\xffx\xf5[\x080\x08M\xf2\x92\x02\xe8\n\xef\xf7\xeb\x07\xeb\xf9d\xfe\xa7\x01\xb0\x01\x07\xfe\x9b\xfd5\x04i\x02\xf5\xf1&\rx\x04o\xe7*\x0e\x9b\x0c\xd7\xeb\xf1\xfcC\x12\x80\xfc\x1b\xf0\x97\xfc\x18\x1a\xa7\xf4\x86\xe9\xd1\x16 \x05t\xe2\x11\r\xef\x15\x08\xea\xb5\xfa9\x14\xac\xf3g\xf3\xa6\x15y\xfb\x7f\xf3I\x03Y\x0e\x81\xf83\xf6\xc8\x19\x12\xf0\xe4\xef\xe2\x10\x87\n\xe7\xe7\xde\x08|\x1b\x88\xe36\xf4\x8d\x1e\xdc\xfa\xb9\xe6t\x17a\xfa\x08\xfb\x9b\x05W\xfe\xba\x01\xcf\xfc\xb4\x00Y\xfeR\x06b\xf1g\x0fb\xff\xdd\xf1m\x0c\x81\x00\xbb\xf6O\x06M\x0b|\xec\xcd\x04d\n\'\xf3\xd1\x04\xd6\x0c~\xf3\x85\xffR\x0cB\xf3\xc5\xff\xf6\x08\x13\xfb\xe1\xfe`\x02.\x05\xc7\xf8\x9c\x00}\x05>\xf3\xed\x05\xab\x064\xf6\xb9\xf9\xd3\x13\x11\xfa\x17\xf1S\x0cg\x02w\xf2\xcf\x02\x06\t\xc6\xf7\xbb\xff\xf0\n9\xff"\xf5n\x06\x8e\xfeN\x00\xbb\xf7\xb6\x05\x17\nZ\xf9E\xfdX\x07O\x02U\xebj\x0b_\x12\xe0\xe4F\x03\xc8\x11\xb4\xf2\xe3\x03g\x0b\xe2\xf0\n\x015\x04\xe5\xfa\xbb\xff\xcb\x04\x7f\x02j\xf9_\xff\xec\tM\x04F\xeb\xae\x01\xcb\n\x8f\xf5\xa3\xfbI\x1a(\xf4z\xf4O\x0fD\xf7\xbf\xf9\xfe\x06\x81\x05\xf3\xf3\x15\x07"\x0ch\xf8\xc4\xf7O\x0c\xe0\xf3S\xfc\x80\x0e\xa6\xfbg\xfa,\x10:\xf6\x9f\xf6\xa7\x0fb\xf9`\xfa\x03\x02^\x07\n\x01\x97\xfd\xd1\xfc\xba\ts\xf7\xa8\xfcD\x05H\xfe\xce\x01P\x07\xfa\xf9\xcd\xfeb\x08\x9a\xf0\xb6\x08.\x03`\xf8\xa0\x04,\x02\xe9\xfb\x16\x05`\xfdi\xf8\xf9\x02\xd6\xf7D\x01}\x05N\x07\xb5\xf3\xe3\xff\x16\t5\xf6\'\x006\t\xa3\xfb\x8f\xfd\xb1\x0b\x8f\xf8V\x04#\x02\xb1\xfat\xfe\xcd\x06\xf5\xfao\xfe\xd4\x02\xd4\xfc\xf1\xfdT\xff\xf7\x00\xc7\xfcY\xffU\xfb\xe1\x0ca\xf2\xb8\x02B\n\x96\xf3l\xf8\x17\x04\x86\x07\x81\xf99\x05\xeb\xfe\xa8\xf7:\x064\x04X\xf8-\xff\x1f\t\xd7\xfe\xb1\xf7\xd7\x0e\xbf\x00\xa6\xed\x9f\x07\xbf\x0e\xb9\xf2\x1a\xfd\xac\x0e\xcf\xfap\xf2\xf2\x10P\x06\xb8\xed\xc3\x08\xd1\x00\x8a\xf5\x94\x045\x0c\xf7\xf7\x11\xf9\r\x0b\x8f\xfe\x0f\xf9\x80\x01\xb6\x000\xfe\xcd\xffZ\x06o\x02\xf4\xfa\x1b\x03\xe8\xfa\xeb\xfd\x8f\x00\x9e\x00\xf0\x03\x8d\xffw\xfd\xcc\x03\xc0\xfa\xb4\xfel\x04\x8f\xf8h\xfdJ\x00n\x01\xcd\x04\xd5\x02\xf9\xf5 \x02\x12\xfd\xa2\xfb\'\x07\x80\xfd\x12\xfe\xbd\x03k\xf8\xf6\x05\x82\x06\x8b\xf5\xe4\x04_\xfd6\xf8\x87\x01D\x08\\\x02\xae\xfc\xae\x03\x99\xfb\'\xfd\x18\x03\xbb\x03\xfe\xf8y\x02\xf2\x07\xb6\xf6\x89\x07i\x07\xad\xf8\xa1\xfdb\xfe5\xff\xea\xffe\t\x96\xfe5\xfa\xc8\x06g\x00\x9a\xfbG\xff[\x03\x9d\xf7\x8a\x00\x8b\x05+\x06\x8b\xff\xe5\xfd\x1f\xf9\x90\xfd=\x05N\xfc\xe5\x02\x18\x01\x9d\x01\x90\xfc\x10\x00\xb4\x046\x02S\xf9\x91\xf3\xc4\x08y\t\xbb\xf96\x06\x96\x00\x93\xf5\xb7\x00g\x08Z\xfcX\xfaP\x04\xc6\xf9e\x01\xfd\x05\xfc\x04\xb0\xfb|\xf5\x8b\xfe\xc6\x04c\xff)\xfc{\x058\xff"\xfa\x05\xfe\xb1\t%\xfe\x9a\xf9\x1d\xfe\x06\xfdD\x020\x06\xb3\x00\xbb\xfe\xb9\x00\xff\xfb\x19\x00H\x03\x16\x03N\xfc\xbc\xfe\xb6\x01\xba\x01I\x03\x05\x02\x8b\xfd\xa3\xfd\xa4\x00D\x00t\xff5\x02\xe3\xff\xa4\xfdD\x04\xc4\x00\xa9\xff\xe8\xff\xb5\xfcB\xfdC\x01\xa2\x02\x10\x01D\x01\x15\xfe\xbb\x00\xcc\x00k\xfeQ\x00\xc4\xfcV\xff[\x03\x99\xff\xa1\xff=\x01\xab\xff\xba\xfd\x90\x01\x17\x03\xbf\xff\xd0\xfa\xfe\xfc\xb0\x05\xb2\x01\n\x01B\x04\xba\xfd\x0b\xfc\xbf\x01\x93\x00\xe6\xfc\x91\x01P\x02\x9e\xfd\xc4\x01\x1e\x04\x81\xff\xde\xf9\xa4\xfer\x03c\xfdi\x01\xe3\x02\xc5\xfd\x89\x01\xdb\x00[\xfe\xf5\xff\xe9\xff`\xfdv\xfd\x1a\x03\x98\x05\xd3\xfd\r\xfe\x01\x02x\xfe\x87\xfd\xc9\x02\xd2\x00\xd1\xfe\xa0\x00\xd7\xff}\xffS\x01\xc2\x02\xce\xfd\x0b\xfe"\x018\x016\xfdA\x02o\x01h\xfd\xbe\x00*\x00\xe5\x01u\x02\t\xff,\xfb"\x00^\x01\x08\x00\x80\x04\xdf\xff\x92\xfd1\xff\x06\x01z\xfd\x91\xff\xef\x02a\xfc\x92\x00\x89\x03]\x01!\xfe\x01\xfe\xf9\xfd\xf7\xfd\xe3\x00\r\x00T\x02#\x02\xcc\xfb\x07\x00\x92\x01\x8f\xfc\xb8\xfe\xdc\x00\xbd\xfc\xc1\xfd\t\x05\x0f\x024\xfb(\xfe&\x01\xf3\xfa\x80\xfc\x80\x02k\xff\'\xff%\x00X\xfe\xe9\xffp\xff\xb7\xfb\x12\xffa\xfd\x8d\xfd\x9c\x01\x8b\x01q\xfeG\xff2\xfew\xfbv\xff\xc3\x00\x8a\xfd\xf3\xfd\x1b\x030\x00\xb2\xfd\x82\x01?\x00\xc1\xfbZ\x00\x8a\x00K\xfe"\x04\xc8\x02\x9e\xfe\xfb\x00\xc6\x01\xd4\xff\x96\x00p\x01\x05\x03p\x01\x92\x03&\x06x\x03\x87\x03\xa0\x05T\x03\x93\x05\'\n\xae\x07[\x08\x9d\n\x9a\n\x82\t\xcc\x0b.\n\xcb\x06\x8e\ns\n\xca\x07\xcc\x07a\x08_\x042\x02W\x02I\xfeD\xfc\xdb\xfd@\xfbW\xf8\xa5\xf8\xb6\xf6m\xf4\x8e\xf3\xa5\xf3\xa0\xf2\xeb\xf1\x03\xf4\x8d\xf5\x1c\xf5\x9f\xf5K\xf6-\xf7\xdb\xf8F\xfa\xf1\xfa\x8c\xfd\x05\xfe\x0e\xfe\xc4\x00\xb3\x00\x1d\x00\x83\xff\x81\x00\x1c\x01\x80\x00J\x00\x87\xfd\x02\xfd_\xfd\xb0\xfa\x96\xf9\xf8\xfa9\xf8|\xf5:\xf63\xf5\x14\xf5\xb0\xf3F\xf3\xf5\xf4\xd4\xf4\x88\xf3n\xf5\xbb\xf8\xa1\xf54\xf8G\xfb+\xf9\xb0\xfc\x01\x01\x80\xfeh\xfc\x8f\x03\x0e\x05\x85\xffc\x02[\x07\xf6\x02\xab\xfe3\x05(\x05{\x01u\x02`\x04\x13\x00\x8e\xfe\x90\x00\xb2\xfe?\xfe\xed\x03}\x03\x14\xfe7\x03\x82\x06\x94\x06r\x05\x0b\x07\xa1\t\xb5\x0cT\x148\x19\xf4\x1b\xaa \x9f\x1f\xf6\x1dF!5&Y\'h%4(\xf3*\xba)F%: |\x1b\x0c\x14\xdb\x0c\x8a\x0b\x05\r=\x08X\xfeq\xf8a\xf5\x88\xef\xac\xe7\xce\xe1\r\xe01\xdes\xdc\x0e\xde\x8d\xe0>\xdf\xe2\xda\xbd\xd8\xab\xdc\x8e\xe0|\xe1$\xe6y\xed\x15\xf1j\xf3\xa9\xf6I\xfa\x86\xfc}\xfc!\xff\xf8\x04V\t\x83\x0bx\x0bu\x0cY\x0c\x05\t\x7f\x07:\x07\xa9\x06:\x04&\x03%\x03o\x01\xc1\xfd\xc9\xfa\xaf\xf7\xa3\xf4L\xf3\xc1\xf36\xf5\x97\xf4\'\xf3Z\xf3\xf4\xf5l\xf5^\xf4\x98\xf6|\xf9i\xfb\xe4\xfef\x02#\x04\xc9\x04;\x07B\x084\x08\x82\x0b\xca\ru\rF\x0e\x0e\x10\xd0\x0eM\rD\x0cE\x0b\x0f\t\x9a\x08\xea\x07 \x06\x86\x04\x8d\x02v\x00\n\xfe]\xfc/\xfaR\xf8\xac\xf9#\xf8S\xf6\xc6\xf5x\xf4~\xf3\xba\xf2\xf2\xf2_\xf5\xf3\xf2\x00\xf2\xb1\xf5\x84\xf5s\xf4\x1a\xf6P\xf61\xf5L\xf7e\xf9\x93\xfc!\xf9+\xf9m\x00\xd7\xfe\xb6\xfb\x88\xff\x89\x03\xd2\xfd\x07\x00)\x06\x98\x04A\x01\x03\x04"\x08\x89\x03@\x04\xb8\x05\xd4\x05\xcc\x06\xed\x06\xc6\t\x1e\t\xe4\x08\x10\x08\xf3\x08\x93\n\x1c\r\x8e\x0f\xe9\r\xec\x0f+\x14\xf6\x17m\x16c\x14.\x14d\x15\xb1\x18\xdf\x1b[\x1b\xa8\x18Y\x16u\x14\xd9\x13A\x12\xc1\r\x88\t\xd6\x05\'\x03\xf6\x02\xfc\x00\xcb\xfb\x93\xf6\t\xf2K\xee4\xecO\xeb}\xe9L\xe7\xce\xe5\x9a\xe5w\xe7_\xe8\xd7\xe6\x16\xe63\xe7\x9b\xe9M\xedG\xf1\x16\xf5/\xf6\xa2\xf5\xd0\xf7\x1b\xfb\xe8\xfd\xd4\xfd\xb9\xfe\xd3\x00<\x039\x05\x9f\x05\xe0\x04\x80\x02\xc5\x00\x84\xff\xa8\x01*\x03\xc9\x00\x19\xff\xd3\xfe<\xfe,\xfd\xaa\xfbB\xfbL\xfa\x8c\xf8\x86\xfaf\xfe\n\xff2\xfd\xd3\xfc\xde\xfc\xff\xfdX\xff\xdd\xff\xa9\x01~\x03b\x03^\x03\xed\x07\x14\x07D\x02\xe6\x02\x8e\x04\xb2\x04\xfc\x04\x94\x04\xc5\x06z\x04\'\xff\x1f\x00!\x05C\x00}\xf8\x02\xfe\xc2\xff\xd2\xf9n\xfd\x10\xff\x8e\xf9\x87\xf6\xb0\xfb\x05\xf5x\xf6u\xfb\x9c\xf5\x8e\xf9\xcd\xf4\x9e\xfa>\x02A\xf2,\xf5\xbc\xfe\xbc\xf6\xd8\xf5\xd7\xfb\xd4\x00[\x01\xf5\xfaX\xff\xba\xfe\x12\xfd\x1a\x02N\xfe\xef\xfe\xe8\x07\x07\x07m\x04\x0e\x07z\np\x08\x13\xff\x93\x06\\\x11\xe5\t)\x06\x8e\x10K\x0e\x0e\x087\t\xe0\n\x97\x07B\x06\x98\t\xed\n\x8c\t\x92\x0b)\x07\xbb\x04\xf4\x05\xb5\x07N\x08\xb4\x08B\t\xa7\x0c3\x0f)\n\xe4\x0cS\x0eW\x0b!\re\x10\x8e\x10,\x0fW\x0eq\x0e\xe6\x0c\xca\tj\x08J\x05\xf2\x03\xd1\x02\x05\x01^\xffN\xfb\xc1\xf7\xcb\xf4\x01\xf3 \xf14\xef\xa4\xee#\xee\xb0\xeb\x1c\xebi\xed\xaf\xecR\xeaZ\xed4\xeef\xee\x05\xf1\x8c\xf4o\xf4`\xf5^\xf8\x9d\xf8\'\xfa\xd5\xfb\x9d\xfd\xe9\xfc\x17\xfe\xc8\x01\x9c\x00\x02\xff3\x01x\x00\xdd\xfe@\x00\xa9\xfe \x01\x9d\xff\x00\xfe\xaf\xff\xad\xfe\xd7\xfe\xbc\xffq\xff3\xfd\xc7\xff\xb5\x03\xe1\xfe\xdb\xff\xf8\x05\xca\x01\x87\xff\x92\x06\x0c\x04h\x01\x00\x05\x04\x05Q\xffd\x034\x05\x01\xfe\x1d\x01#\x02\xae\xfd\x8c\xfd\x84\xfd\xb5\x00\xbd\xf76\xfa;\x00M\xf6\xdf\xfbd\xfa\t\xfa*\xfe\x18\xf8\xf2\xf6\xfb\xfd\x0b\xfd_\xfaM\xfd\xd4\xfc\x08\xff,\xf8\xfa\x03C\xfe\x9b\xfa-\x02\xc2\x01\xd3\xfe=\xf9\n\x07!\xfcp\xfa\xd6\xfcc\x02w\x03\x12\xf5\xf8\xfe\x98\t\x04\xf5Q\xf2\xab\n\xc0\x02\xfe\xf6\xb2\xff\xc2\x08\x08\xfe;\xfe\xdc\t0\x00\x0c\xfe\xe3\x0e\xa7\xfe\xa3\x07)\x12/\x04\xa5\x07\x04\n\x86\t\x86\x018\x0f\xc0\x0ft\x03\t\n\xb8\n\x8e\n\xb4\x00:\tT\nd\xfb\xcd\x03(\x10\xba\x08A\xf6~\t\xe3\x04"\xfc6\x00\xeb\x06;\x07\xe4\xf7x\x08\xaf\x03d\xffU\x04\x92\x03\x82\xfb\xf0\xfd\x9a\x08)\x01\xfa\xfe\x08\x01=\x06B\xf9\x95\xfe\xec\x03%\xfc\xa6\xff\xff\xfb\x16\xff\x18\x02\xee\xfdS\xfb\xaa\xfd\x18\xfc\xad\xf9\x8f\xfb\x0e\x02L\xf88\xf5\xac\x03*\xfab\xf2\x06\xfeH\x02l\xf0\x0e\xefo\x08\xa6\xfc\x18\xf4T\xfb\xdc\xfe*\xfd\x14\xf2\xe8\x03\xdf\xfeo\xf4U\xfc\xec\x01\xa0\xff\x84\xfa\x1f\x01>\xfcL\xfa\x1a\x03\xed\xfcZ\xfd\xec\x00\xd4\x07\x8c\xf6\r\xffA\x0b\x9e\x06\xb5\xed[\x02e\x11C\xf9%\xf85\r\xc1\x0br\xe8\xf4\x0b\x94\x05f\xf3\xdc\x01\xe4\xff\xfe\xfe\xbc\xfcC\x01\x96\x03\x9a\xf5\xad\xf5\x7f\x07\xfc\xfa\\\xf3\xdf\t\xe4\x01^\xed\xef\x08\xe0\x01\xce\xf4&\x03[\xf8\xa5\x05\xae\xffx\xfc\x9e\n\x91\x02N\xf4e\x06\xb7\x0cL\xee\xbb\x0b\xb1\nP\xf7i\x08\x8e\x01l\x003\t\xaa\xfc\xc2\x00/\x08\xa9\xf1\x9c\x04\xc6\x19\xf6\xef\x9e\xfcm\x13\x88\xf8\xe6\xec\'\x18\x90\x06\x0e\xe7D\x12\xce\x04\xc4\xfcr\xff\xd1\x08V\xf8\x0e\x04C\xfe\xa5\x03\xe1\x08\xd0\xf5\xb2\x07{\x01*\x04\x1c\xf9\xbd\xfd\xcf\t\x0c\xf8s\xfd\x8c\x06\xa3\xff|\xfeM\xfdq\t\xda\xe9\x16\r\r\x03\x98\xef\x94\x06\x8d\x05\x8a\x03\x99\xefT\r\xae\xfcS\xfc;\x06\xbd\x00I\xfd\xe3\x03\x10\xff:\x03\xfe\x03+\x01*\x02\xe8\xfd\x89\x06U\xfa%\x04z\x05\xe5\xf8>\x00v\x06h\xfb\xd3\x06\x06\xff\xd8\xed\xb3\x12\x11\xf7\xad\xf5\x92\x11\x17\xffL\xf5/\x05\xed\x08\xa9\xf7\xaa\x01\x8f\x00\xd2\x05\xd9\xf4o\x13\x9a\xff\xa9\xf7~\x05g\x01\xf9\x00\x9e\xf7\xa5\nm\xfef\x00\\\xf8\x05\n\xb0\xfd\xd3\xf4A\x07\x18\xfe\xd6\xfc\xff\xf6\x8a\x059\x03\xa6\xeeW\x06\x87\x01j\xf3.\xffK\x05\xdf\xf7r\xfc\xfa\xfe\x98\xfa\\\x03\xa8\xfd\x1e\xf9\x88\x01]\xfdZ\xf9\x85\x06\xa3\xf9\x16\x06\x18\xf2\xc8\x03\xf3\x053\xfa\xb1\xf8\xd1\x078\x02\xd1\xf6\x18\x08@\xfb\x85\x02\x9b\xf6e\x10a\xfd;\xf1\xb2\x0e\xda\x04\x99\xf4M\x01D\x0bT\xfd\xdd\xf4G\r\xf9\x05\x9b\xefb\t\xc3\x0b\xd0\xf1\x07\xff\x13\r\xae\xfa\x0b\xfd\xce\x01\xac\x0b\x9d\xee\x99\x06\xab\x03?\x02&\xf6H\x02\x95\x0c\xcd\xee\xfd\x05\x81\x05\x91\xfd\xd3\xf0p\x15\x97\xf2f\xfd)\t=\xfa^\x00\xb9\xfe|\x03\xc2\xf9J\x01\xfa\xf9\xe3\x07@\xff$\xfa\xd2\x02m\x02C\xf6\xa0\x05\xf6\x02\x88\xfa\xe2\xfa\xb3\x05\x7f\x02\xff\xf5(\t\x03\x00Q\xf4y\x03\xf8\x0b\xb5\xef\x81\x05\xcf\x03\xdb\xfa\xea\x07\x96\xff\xb2\xfbc\x04L\xff\x0b\xfe\xe3\x04s\x07O\xfc\x99\x03\xef\x017\xfa6\n\xeb\xfc\x13\x00\xa7\x05@\x06&\xf7\x80\x06\xea\x04\xf5\xfc\x06\xfc \x02[\x05\x86\xff|\x02\xac\xfa\x16\x06\x1d\x00;\xfa\x80\x03n\x03^\xf7-\x02\x03\x03\xd6\xfdH\xff\xdf\x03\xaa\xf7!\x00\x8b\x040\xfcf\x00\xf1\x00\x9c\xfc=\x03\xb8\x00\x94\xfa\xb2\x06\x16\xfc\xb7\xfe\x9d\xff\xf2\x01\xd1\x02\x0c\xfcX\x00\xd7\x03\x0c\xff\x98\xf9\xbe\x05\x96\xfd\x11\x01\xd3\x01w\xfa\x07\x04\x9a\xff2\xff\xff\xfc\xdb\x01J\x01\x87\xfa5\x01W\x03p\xfe]\xfcu\x04\x88\xfe?\xfd[\x06\xa7\xfd:\xfb7\x06\x04\x02\xe0\xfa\xb0\x02\x83\x04\x11\xfc\xc6\x03\xde\x01\xa8\xfe\x06\x01x\xff\xf5\x01-\x03\xca\x02\xee\xfc\xd3\xfe\xa9\x06a\xfb\x14\x01W\x05\x19\xfb5\x02\xb4\xfd\x96\x00e\x019\xfe\xcb\x00M\xf9h\x06~\xfb\xb5\xfa0\x07\xfd\xfa\xdd\xff\xa5\xfb\xc7\xfe\xd3\x02\xa8\x00\x1f\xf7\x12\x03/\x02X\xf7Y\x04\x98\xfeH\xf9\xcd\x03\x81\xfe\x8b\xf9\xfe\x05\r\xfd\x1c\xfc\x07\x01.\x01\xa6\xfaQ\x01Y\x03\xc6\xfe\n\xfbB\x04R\x03\xcc\xf9\xa0\x01B\x01\xc2\x04\x02\xf9\xc7\x03\x90\x02\x02\x00T\xfdm\xfde\x08,\xfb\x95\x01\x9c\x02\x9a\xff\xb3\xff\xe3\xfe\xb0\x00\x98\x01\x08\x02G\xfd\xd9\x00/\x04*\xfe\xdb\xfe@\x01\xb1\x00\xb6\x00\x80\xfei\x01\x98\x01\xd8\xfc\xdb\x01p\x00T\xfe\x95\x00\xeb\xfd\x13\x00\xf0\xff\x0f\xff|\xff\x03\x00d\xfd\x86\xff\x91\xff\x04\xfe+\xffv\xfd\xe6\xff\r\xff\xf6\xfc\xc3\xffF\xff8\xfc\x04\xff\xd6\xff-\xfc\xbd\xff\xa0\xff\xf5\xfb\xb4\xff\xdd\x00\xd1\xfcT\xfe}\x00\xba\xfe\x96\xfe\x15\x00\xdc\xff\x9c\xff\xe5\xffu\x003\x01\xd7\x00\x06\x01\x18\x00\x90\x01\xa6\x02k\x00\xb0\x01\xec\x02C\x00\xa8\x02r\x03\x08\x00X\x02\xf6\x02\xd1\x00\xa0\x01R\x02N\x01!\x01\x8a\x01\x14\x02^\x00\xb0\x00c\x01\xb0\xff{\x00\x92\x01}\xff\xbd\xfe+\x01\xb3\x00\xce\xfd\xdf\xff\x98\x00\xd7\xfe\xad\xfe>\x00\xbe\xff\xe4\xfdd\x00\xf4\x00C\xfe\xea\xfd\xfe\x00\xa7\x00\xa8\xfd\x9c\xffw\x000\xff\xdd\xfe\xd2\xff\x9b\xff\x13\xfe.\xff\xb9\x007\xfe8\xfe\x93\xff|\xff\xbc\xfd\xdb\xfe*\xff\x07\xfe*\xff\xf7\xfe\x7f\xfe\xa4\xfe+\xff\xc5\xfe\x80\xfe\xd8\xfe\xe5\xffA\xff\xe6\xfex\x00\x85\xffh\xff\xc3\x00\xca\x00\xea\xffb\x00\x81\x01k\x01\xb4\x00\x83\x01C\x02\xf5\x00\xaf\x01\xc0\x02t\x01p\x01\xc4\x02"\x02\x11\x01\xaa\x02\xe0\x01Z\x01\xab\x01\xa0\x01R\x01\xae\x00u\x01v\x01\x10\x00\xce\xff\xea\x009\x00R\xff\x03\x00\xc5\xff\xdb\xfe-\xffZ\xff\xd5\xfes\xfe\xbb\xfe\x0b\xff_\xfe5\xfe\x8c\xfe\xcb\xfe1\xfe7\xfe\xad\xfe\xa4\xfe\'\xfe\xeb\xfe_\xff\x86\xfe\xde\xfe\x89\xffK\xff\xdd\xfe\x17\x00\xe6\xff\x10\xff\x00\x00\xb9\x00w\xff\x1f\x005\x01\xc1\xff*\x00\xf3\x00\xd3\x00j\x00e\x00R\x01\x0b\x01x\x00b\x01\x04\x01\xb7\x00\x10\x01\x01\x01\t\x01\xf7\x008\x01R\x01\x08\x01\xd1\x00\x1f\x01\x0b\x01\x9b\x00\x01\x01\xf7\x00y\x00\xd9\x00\x0c\x01w\x00@\x00\xd8\x00u\x00\x14\x00Q\x00\x12\x00\xd0\xff\xe8\xff\xf0\xffN\xff\xa1\xff\xe7\xff\x13\xff\xd5\xfe\xd3\xff*\xffl\xfe~\xff=\xff\x87\xfeB\xff\x02\xff\xed\xfe$\xff\xd3\xfe&\xff\x1e\xff\xfe\xfe<\xff\x1b\xff\x08\xff\x87\xff\x1e\xff \xff\xce\xff\x85\xffH\xffv\xff\x17\x00b\xffY\xff=\x00\xf7\xff\x92\xff%\x00\x88\x00\xca\xff/\x00\\\x00F\x00T\x00y\x00\x86\x00\x8d\x00\xc2\x00\x9a\x00\xa1\x00\x8c\x00\xbf\x00\xcc\x009\x00\x90\x00?\x01\x88\x00\x14\x00\xff\x00\xb8\x00\xef\xff\x86\x00q\x000\x00\xe3\xffM\x00S\x00\xce\xff\xf8\xff\x0c\x00\x80\xff\xea\xff\x01\x00\x98\xffu\xff\xb6\xff\xd2\xff-\xffo\xff\xc9\xffS\xff\x13\xff\x99\xffL\xff%\xffe\xff,\xff^\xffE\xff.\xffz\xff\x7f\xff\x88\xffh\xffs\xff\xa1\xff^\xff\x8a\xff\xf7\xff\xa3\xff\xc8\xff\xe9\xff\xe2\xff\x03\x00"\x00\xf1\xff\xff\xffi\x00(\x00E\x00\x8b\x00?\x00L\x00\x9b\x00R\x00\x8c\x00\x94\x00\x94\x00T\x00\x7f\x00\xb6\x00C\x00F\x00\x93\x00D\x00\xf6\xff\x83\x00[\x00\xfb\xff*\x00)\x00\xeb\xff \x00\x1b\x00\xd8\xff\n\x00\x0f\x00\xdc\xff\x00\x00\xb9\xff\xdc\xff\x0f\x00d\xff\xc5\xff^\x00v\xff\xe1\xff\xfd\xff\x8f\xff\xe1\xff\xb5\xff\xc4\xff\xf5\xff\x14\x00k\xff\xf6\xff\x02\x00\x94\xff\xaa\xff\x0e\x00\xa9\xff\x86\xff<\x00\xb0\xff\x90\xffN\x00<\x00^\xff\xbd\xff\x86\x00\xe9\xff\x99\xffk\x00M\x00\x1f\x00\xe8\xff8\x00\x91\x00\xd0\xff\r\x00\x98\x00\xb4\x00\xc8\xff!\x00\x87\x00\x1b\x00M\x00B\x00\x0f\x00G\x00\x08\x00r\x00F\x00\xbc\xffJ\x00m\x00\xdf\xff\x06\x00\xa0\x00\xdc\xff4\x00P\x00\xfd\xff`\x00\x12\x00\xf6\xffg\x00\xfd\xff:\x00\x19\x00\x11\x00\x11\x00\xee\xff\x1f\x00,\x00\xf7\xff!\x00\x08\x00\x9d\xffd\x00\x03\x00\x80\xff\xe7\xff*\x00\xc6\xff\xf4\xfe\x15\x00\x1d\x00\xbd\xfe\x90\xff\\\x00h\xff\xba\xfeL\x00;\x00_\xff\x8a\xff\xf8\xff\xc7\xff\x19\xff\x1a\x00\xb6\x004\xffL\xffn\x00\xbb\x00\x99\xff\xf1\xff\x83\x00\xb7\xff\x8e\xff\xdf\x00\x1e\x01\xf4\xfex\xff\r\x02\xfb\x00M\xfd\xbb\x01\xee\x02\xf8\xfc\x95\xff\x18\x03 \x00\xc2\xfe\x08\x00,\x01*\x00\xb3\xff4\x01\xb6\xff[\xff;\x00\x9d\x00\x1c\x00\t\x00\x92\xff,\xff\xaf\x00X\xff,\xfe\x88\x02L\xff\xe4\xfd\xd4\xff\xcc\x00t\xff\xd0\xfd\xd2\x01e\xff\x98\x00<\xfe\xe9\xff\xa0\xff\xd0\xff\xd6\x01r\xfd4\xfe\xe1\x00\x16\x01h\xfe\xd0\xfe\x85\x00|\x01\xe3\xfe\x8d\xfe\x84\xfc&\x00D\x03\xa9\x03\\\x02\xcf\xfe\xf2\xfb\xf5\xfe\xca\x07\xf3\xfe\xdf\xfbv\x04\xa1\x00X\xfc8\x02\xc8\x04\x82\xfcY\xfa\x9c\x02 \x03z\xfd\xef\xfdh\x000\x00\x8e\xfe\xfa\xff{\xffc\x00\xa7\xfe\xd4\xff\xa2\x00\xd0\xfc\xc4\xff\x8e\x02\x1d\x05>\xfc\x18\xfd\x0b\x04\xb3\x02\x82\xfdb\xff\xc4\x03\xe6\xfe\xb5\xff\xd1\x00L\x00\xb4\xfc\xc1\x00\\\xff\xaa\xfd\x18\x02\xde\xfd\x07\x01~\xff\x94\xfd\xea\xfe\xac\x01\xfb\xfcZ\x00\x9e\x00\xf1\xfd\xf4\x00U\x01+\xff}\xfd\n\x00!\xff\x89\x02\xa2\xffD\xff\xcd\x02\xbf\x00\xfa\x01S\xfd\n\xfdr\x00\x88\x05\xeb\x00\\\xfdn\x02\xff\xfdB\x03(\x00\xbe\xf7\xe1\x02\x99\x04\\\xf9)\x00\x1f\t\xd7\xf8\xaa\xf8~\x04\xb7\x04j\xf9\xfb\xfbu\x03\xc9\x03\xa9\xfe\t\xfe\x85\x01\x8e\xfd\\\x00\xf6\x01$\x03*\xfa\xa4\x00E\t\xed\xfb\xc9\xfc\xa9\x04D\x00]\xfc;\x03\x16\xfd\x18\xff\x9c\x03^\x01-\xfb2\x01`\xfeh\x02\x82\xfe\t\xf8\xfc\x06\xd2\x07\xba\xf9<\xf7C\t\xff\x03h\xfb\x88\xfb\xca\x00\x81\x01\x08\x01\xe0\x00w\xff^\xfdN\xfc\xae\x05a\xfe\xfa\xf8\x95\x02k\x07\x89\xf7\xa3\xfc3\x0b\xb1\xfe,\xf8\xad\xfd1\x0b\x1d\x00\xd0\xf7\xe4\x03O\x03\t\xfd\xee\xfd\xb2\x06\xef\x02$\xf9\xe5\xffR\n\xf7\xfb\xba\xf8\x1c\t\xb0\x06\xff\xf7\xd8\xffb\tO\xf9 \xf8\xc1\t\x11\x07\xc2\xfav\xf8\x17\x03\xfc\x01\xe5\xff\xf5\x00E\xfd\xd9\xfct\x03\xb6\x02E\xf8\xb2\x02\x1e\x08q\xfa\xba\xf66\x04\x05\x0b\x8b\xfc\xdb\xf7\xda\x04\xc6\x05\'\xf90\xf4\x95\x0e\xc3\x04\xce\xed\xad\x05\xe6\x0b*\xf48\xf9\x8c\x0f\x16\xf6\x84\xee^\x13\x13\n\xe2\xec|\xf91\x12w\x01K\xea\xaf\x08\xf3\x0b\xee\xf5\x1e\xf7o\r\xb3\x08\x14\xf1\xa5\x02)\x06\xf7\xf5G\xf9e\x13\xb7\x03\x00\xf4m\x02\x88\x02\xd2\xfex\xfe\x7f\xff\x0f\x06\xf3\xfee\xfa\xc7\x03O\x02\xf9\x00l\xfe\xeb\xfdO\xfc\x99\x02\x13\x03\xea\x02j\xfb\n\xfb\xd8\x05A\x00\xed\xfep\xfc\x9c\x02\xd3\xfe\xab\x02\xe7\xfd\xfc\x01g\x08\xdb\xff\xeb\xf4\x88\xfb\xd2\x0bN\x04\xa1\xf6F\xff\x0e\x0fK\xf9\xee\xf0\x1c\x05\xab\re\xf6?\xf2,\x06@\t%\xfc\x95\xf8"\xfe\x80\x02P\xfa\xad\xfcD\x08G\xfc\x9a\xff\x89\xfb\xd3\xfe\xf3\x07\x96\x05\x08\xfc\xb3\xf9\xb4\x01\x99\x07z\x05\x0e\xfaJ\xfb!\x02\xf8\x04\x91\xf9\xc0\xff\x85\x0cp\xfd\x12\xf0\xef\x02\xb9\x08U\xfb\x8f\xf7F\x07$\x04\x03\xfb\x01\xfec\x04h\x02B\xf7\'\xff\xe8\x08&\x03\x88\xf5h\x026\x07\xd6\x01\xe0\xf8/\xfd\n\x05\xfa\xfdN\xfd\xdd\x02\n\x03q\xfdo\xfd=\x00Q\x03v\xff&\xfb\xe7\x00\xbb\x00&\xffT\x070\x00\xf0\xf8\xfe\xffd\x04\x80\xff$\xfey\x04w\x03\'\xf9\x84\xf9\xac\x0cj\x07\x19\xf4\xc8\xf9\'\x08\xd6\x05\x03\xfb\xa4\xfe\xe8\x03\x96\xfd\x1d\xfbg\x03\xb4\x049\xfe\x9b\xfc\xea\xfcg\xffW\x02;\x03~\xfe \xfa\xd1\xffN\x04}\x03\x1e\xfc!\xfd\x06\x01\xef\xfa\xb6\x03\xff\x0b\xea\xfd\x16\xf7\xe8\x00\xb7\x06-\x00p\xfe\xa7\x04N\xffk\xfa\x14\x07>\n\x0c\xfa\xf7\xf5\xcc\x01a\x05\x16\xff&\xff:\x01g\xfa\xf0\xf6\x08\x03e\x08\x84\xfa\xae\xf3\x16\xfd\xdc\x04\x97\xfe#\x00\xe0\x00\xb9\xf5$\xf9\xcf\x06\x88\x06l\xfc\x0b\xfc\xf0\xff\xb4\xfd\xab\xffD\x086\x03\x9a\xf6\x9a\xf9\x0c\x04\xc1\x03\x08\xff+\xfe\x16\xfb\x1b\xf9\xc8\xfe\xda\x02\xfe\xfb\xb1\xf6\x9f\xfa\x01\xfeu\xfcz\xf8=\xf8\x1b\xf9\xde\xf6|\xf9h\xfd\xe4\xfe\x98\xfdJ\xfcM\xfa\xf3\xfd\xc6\x03\xe4\x084\x12\xf8\x16h\x10\x01\x0bB\x14\xb4\x1e\xcd\x1b\x0f\x14\xcb\x19\xf7$\'$0\x1d4\x1a&\x19\x85\x0f\\\x06\x96\n\x10\x12z\x0c\xcd\xfe\x15\xfa\xb1\xfd$\xf7U\xeaG\xe3\xfc\xe5Q\xe8H\xe4V\xe6\xda\xeb\x13\xe8\x9e\xdc\x11\xd9\x90\xe3\'\xec\xf1\xe9\xb8\xe8\xb1\xf2\xfa\xfc}\xfc\x99\xf7X\xf8\xfa\xfd:\xff\x88\x00\x80\x08C\x13\xc2\x11@\x067\x05\x07\x0c\xf9\x0b\xf7\x02\xf4\x00\x9a\x08U\rX\x07r\x00\xd0\xfei\xfa\xa1\xf4\xe9\xf4\xb0\xfa\xea\xfc\xa5\xf8N\xf6\x14\xf8\xf2\xf8\x89\xf3\'\xf0\xb5\xf2{\xf8*\xfe\x8f\x00V\x02\xf7\xfe\x9a\xfb>\xfc\x8f\x01\xc0\x05\xc6\x06\xd3\x08S\x0c\xb5\x0e\xdc\x0cN\x0c\xfb\t\xf4\x07\xbd\t|\x0eH\x13\x18\x12(\x0cy\x08o\x07\xb6\x06\xe7\x05h\x05M\x05\xc2\x04\xe5\x02D\x01\x08\xfe\xb8\xfa\xcb\xf7\xfd\xf5E\xf8\x17\xfa\xf0\xf8\xc9\xf5\x03\xf3\x95\xf1h\xf2\x96\xf1\xbb\xf1#\xf5\x8a\xf6\x1d\xf7\x1e\xf6\xcd\xf7<\xf8\xab\xf6>\xf8\xfa\xfb\xb3\xff\xae\x00a\xff\xa0\xffk\x00\xba\xfe\r\xfe\xa4\xff\xd3\x01(\x02d\x01\x1c\x01\xa0\xff\xed\xfd\x82\xfc\xa8\xfc\xe5\xfc\'\xfd\x02\xfd`\xfb\xb4\xfb\x0b\xf9\x96\xf5\x1c\xf4\x9c\xf2\xa4\xf5\x16\xf6O\xf6\x17\xf6\xe6\xf5L\xf7\xe4\xf7\x1c\xfa\xde\xfb\xfd\xfc\xd9\x01\xb9\x10B"J%L\x18\xf3\x13\xe4"Z1#2I0\xcc6!;\xa88\x825\xd10\xea$\xae\x18\x89\x1a\x9f$\xd3"R\x12s\x02x\xfb?\xf3I\xec\xf0\xe7>\xe4E\xdd\x1a\xd7\xea\xd9\x98\xde\x81\xd8\xa5\xc8\xd0\xc2i\xce\xc8\xdbh\xdei\xdc\x9a\xe1\x98\xe9$\xed\xe4\xec\xfb\xf0\xae\xf8x\xfcy\x00>\t\x10\x15\x91\x15\x0c\n\n\x07P\x0f\xa2\x14x\x0eI\n(\x0e}\x10\xf1\n[\x035\x00\x87\xfb4\xf4\xbe\xf3V\xf9F\xfb\xa1\xf4M\xed\x17\xee*\xf1\xb7\xed\xbf\xea=\xee\xa9\xf4(\xf9\x1b\xfac\xfc\x85\xfc^\xf9z\xf9u\x00\xe0\x07\xcb\n\xa3\x0b\xff\x0cL\x0f\x1d\x0f\xe4\r\x1c\r\xa3\r\x1a\x0f,\x12%\x15\xa7\x14\xd5\x0f\xd6\t\x97\x06S\x05\xda\x05E\x05)\x04\x87\x03O\x01c\xfe\xa0\xfa\xc0\xf7z\xf4;\xf3\x9a\xf6E\xfc\xa0\xffJ\xfe\xbb\xfa;\xf7\x04\xf8\xeb\xf9\x0b\xfd\xb9\x01\xf0\x02\xb8\x03\'\x046\x03\xff\x00j\xfdQ\xfc\xfd\xfd7\x01\x84\x03\x17\x02\x11\x00\xa9\xfc=\xfa\xc8\xf8h\xf9\xe2\xfa\xdb\xfb\xc0\xfcC\xfd\xd4\xfc\x9e\xfa\x9d\xf8\xde\xf7\xd0\xf8\x80\xfbv\xfdv\xfe\xdb\xfft\xfe\xa6\xfcp\xfc"\xfd\x85\xff(\x00\xb2\x012\x04\xbd\x03\xc0\x02\xa8\x00\x9c\xfe\xe5\xfe<\xff\x9e\x00\x1b\x02\x85\x01:\xfff\xfc\xf1\xfa\x0b\xfb\xef\xfak\xfa*\xfc\x0f\xfeT\xff\xf9\xfe\xd0\xfc\xb1\xfb&\xfb\xa0\xfc\xfb\x00\xe3\x01\xcf\x00\x87\xfet\xfc\x9f\xfd\xeb\xfbF\xfc\xab\xfe\x12\x01\xb7\x00I\xff\x1f\xfe\x16\xfb\xc2\xf8\x94\x01\xb9\x18\xa7%\x81\x19\xd5\x06f\x0e\x7f$\x83(G\x1f\x80!\xc91y4\xb7*N(\xdd&~\x18\xf3\x08\xb9\x10E$\xaa!\xe4\t\x89\xf9\xe7\xfa\xf3\xf5s\xe8>\xe1\xb5\xe5\x9b\xe6i\xdfe\xe0h\xe6\xfb\xde8\xcc\xfe\xc6R\xd8\x0b\xe9\xd5\xe8\xe7\xe3a\xe9\xb3\xf1\xa1\xf1\xb8\xee\xa5\xf2\xd8\xfa\xce\xfc\xec\xfe\xf4\x08q\x13\x86\x0fh\x00\xa0\xfd\x11\x08\x88\r\x8b\x06:\x02\xff\x06\x16\t\xad\x013\xfa\xa2\xf9\xf5\xf6(\xf0\xfe\xf0G\xf9\xc6\xfa\x8d\xf1\xfd\xea\xe2\xee\xe1\xf2\x9b\xee\xbc\xec^\xf3\xf3\xf8\x8c\xf8\xe9\xf8\x81\xfe\xf0\xff\xee\xf9\xa9\xf8,\x02B\x0b\xf8\n)\tq\x0b!\x0e\x80\x0c\xab\x0bk\x0ep\x10q\x0f_\x0f\xa0\x12\x8f\x14\x91\x10\xb7\t1\x07\x1c\to\n\xc5\t\x11\x08\xb3\x06\xbd\x03\xd3\xff\xdf\xfd\xdb\xfdZ\xfcm\xf9\xdd\xf8\xd7\xfa\xc0\xfb\x14\xfa\xab\xf6w\xf5\xd0\xf6F\xf7]\xf8\xf6\xfa\xdc\xfcY\xfd\x84\xfc[\xfd\xf7\xff\xb3\x00\x14\x00\xee\x00c\x04\x00\x07\r\x07\xcf\x06\xbc\x06\xd8\x05=\x04\x19\x04x\x05k\x06;\x05k\x039\x02Y\x01\xd6\xff\x04\xfe\x90\xfd\x10\xfe\x08\xfe\xc0\xfc\xc7\xfcD\xfc[\xfb\x89\xfa&\xfa\xf2\xfbG\xfd\xdc\xfd!\xfe\x82\xfe\xd6\xfe\x9a\xfeo\xff\x14\x02\xfb\x04s\x03*\x02G\x04\x1e\t\x13\x0b\xca\x07\x17\x07B\x07\xe7\x07|\x07K\x08\xd1\x08\x11\x07\xe5\x04 \x04\xb5\x02g\xff\x9f\xfdU\xfc\xa7\xfc\x9e\xfc\xae\xfa]\xf9y\xf7\xe2\xf5\x04\xf5\xa2\xf4\xe9\xf5&\xf6\xec\xf5\x82\xf6\xdc\xf6\xeb\xf6"\xf6\xb4\xf6g\xf8v\xfaA\xfbc\xfc\xef\xfd\xea\xfe\x9f\xff\xce\xff*\x01 \x031\x04\x12\x05\xfc\x05\xaf\x06\xb7\x06\xf4\x05L\x06?\x07\xb0\x06\x00\x06\xef\x05\xef\x05\xf8\x041\x03}\x02\xf5\x01r\x006\xffK\xfe\t\xfeW\xfd\x9d\xfb7\xfa\x99\xf9E\xf9\x81\xf9r\xf9\xe8\xf8j\xf9\xf8\xf9V\xfau\xfa[\xfa\x81\xfa\x9d\xfa\xc9\xfa\xe4\xfb\x02\xfc \xfb\xe8\xfa\xe2\xfa\xd6\xfa8\xf9\x1d\xf8(\xfa\xe1\xfc\xcb\xfcK\xfb@\xfc_\x02\x87\x07\x8f\t\x13\x0e\xba\x15\xfc\x18\x9f\x14\xf9\x14m\x1f6(_&\x9f"\x8d&]*l%9\x1d\xdf\x1a\xa1\x1a\xde\x166\x11\xa7\x0f\xdc\x0c\xd6\x03\xa3\xf9k\xf4\xf0\xf1\x81\xed\x91\xe8Y\xe6T\xe5m\xe3\xfc\xe1\xa8\xe1\xe7\xdf\xf9\xdc\xb7\xdd&\xe3\xe0\xe81\xec\xbe\xedF\xf0^\xf3\xbf\xf5d\xf8\xed\xfa\x97\xfdV\x00\xc2\x03u\x07\xfb\t\xa1\to\x06\t\x04\xfb\x03\xe2\x05u\x06}\x05\x10\x04.\x02\x00\x00\xb1\xfd\xc4\xfb1\xf9n\xf6\xb9\xf5\x9c\xf7\xd9\xf8\x13\xf7\x1c\xf5\xd9\xf4\x1b\xf5a\xf4\xa5\xf4\x92\xf7*\xfam\xfa\x05\xfb\xe1\xfd0\x00\x93\xff\xd8\xfe\xd3\x00\x08\x04n\x05$\x06\xf5\x07$\tR\x08I\x07\xf8\x07j\t\x84\t\xc2\x08\xf2\x08\xe0\t\x8f\t\xcc\x07y\x06\r\x061\x05M\x04-\x04R\x04S\x03O\x01\xfe\xff\x9e\xff\xc8\xfe?\xfd[\xfc]\xfc\x99\xfcn\xfc\x9d\xfb\\\xfbO\xfb\xc9\xfa\xd4\xfa\xf3\xfb\x14\xfd\x80\xfd\xb7\xfdC\xfe,\xff\xe9\xff\n\x00\xa6\x00\xdb\x01\xca\x02\xf4\x02\x12\x03\x99\x03\x96\x03<\x03\x1c\x03$\x03\xf8\x02\xe2\x02\xb3\x029\x02\xb4\x01\x0c\x01i\x00\x15\x00\xc9\xff]\xff!\xff/\xff/\xff\xfd\xfe\x9a\xfe\\\xfep\xfer\xfe\x9e\xfe\x07\xff`\xff\xad\xff\xc6\xff\xaf\xff\xb5\xff\xa2\xff\xaa\xff\x18\x00S\x00d\x00\xa8\x00\x94\x00Z\x00M\x00F\x00\x19\x00\xe5\xff\x0f\x00o\x00\xa6\x00\x84\x00\x14\x00\x01\x00>\x00.\x00,\x00\xc6\x00\xbc\x01Q\x02`\x02\xaa\x02\xe7\x02\xfa\x02Q\x03\xf2\x03m\x04?\x04E\x04\x0e\x05\x1a\x06#\x05\xd5\x02\xde\x02b\x04\x16\x04t\x01q\x00\x06\x02\x0b\x02\x13\xffW\xfd\xa2\xfe\xd6\xfe\x9d\xfb\x88\xf9\x98\xfb\xf9\xfc`\xfa\xb9\xf7\x19\xf9\x1c\xfb\xa8\xf9\x8a\xf7y\xf8\x88\xfa:\xfa\xf7\xf8F\xfa\x97\xfc\xae\xfc\x95\xfb\x8c\xfc\xf8\xfe\xdc\xffe\xff\xae\xffx\x01\xe4\x02\xef\x02\xf8\x02\xad\x03\'\x04\xbc\x03-\x03|\x03\r\x04\x88\x03E\x02\xb2\x01\xef\x01t\x01\xe9\xff\xb6\xfe\x8a\xfe0\xfe/\xfdd\xfc:\xfc\xfe\xfb:\xfb\xbf\xfa\x03\xfbe\xfbZ\xfb$\xfbo\xfb7\xfc\xc2\xfc\xfb\xfcr\xfd\x1a\xfe\xa7\xfe\xc0\xfe\xdf\xfe*\xff)\xff\xbc\xfe{\xfe\xa9\xfe\xcf\xfek\xfe\xce\xfd8\xfd\xe5\xfc\xb4\xfc\xa2\xfc&\xfd\x8d\xfe\xca\x00\xf7\x02f\x04b\x05m\x07\xb0\n\xaf\r\x13\x10\xd6\x12\xc9\x15z\x17\xd0\x17k\x18\xc9\x19Y\x1a6\x19e\x17\x15\x16\x9b\x14\xf5\x11\xb5\x0e\xe1\x0b~\tz\x06\xdd\x02\x89\xff\n\xfd\xb7\xfa\x16\xf8\xcb\xf5W\xf4v\xf3\x10\xf2w\xf0\x9c\xef}\xef\x82\xef\x0f\xef\xd2\xeex\xef?\xf0\xb1\xf04\xf1#\xf2$\xf3\xc9\xf3Z\xf4\x8d\xf5\x01\xf7:\xf8C\xf9\x1a\xfa\x14\xfb\x00\xfc\xa4\xfc2\xfd\xde\xfd\x93\xfe\xfc\xfe\xfc\xfe\xe4\xfe\xf2\xfe\xd1\xfel\xfe\x03\xfe\xcb\xfd\x8b\xfd\x18\xfdk\xfc\x00\xfc\x1a\xfc!\xfc&\xfch\xfc\xc0\xfc\xf5\xfc\xf4\xfc#\xfd\x84\xfd\xfc\xfd\x83\xfe\x0f\xff\xa1\xff\x1d\x00s\x00\xbd\x00\x07\x01F\x01\xb9\x01i\x02\x11\x03\xb0\x03X\x04\xda\x04P\x05\xba\x05Y\x06\x0e\x07\x9c\x07\x07\x08\x95\x08B\tq\t\x1a\t\xe2\x08\xd6\x08V\x08{\x07\xf7\x06\x9a\x06\x85\x05\x01\x04\xe1\x02$\x02\xdd\x00?\xff/\xfe\xa7\xfd\xbb\xfc\x8a\xfb\xfd\xfa\xf8\xfa\x8f\xfa\xdc\xf9\xd7\xf9l\xfa\x81\xfa0\xfa\x93\xfa\x85\xfb\xcd\xfb\xc5\xfb9\xfc\xf4\xfc \xfd\x06\xfdj\xfd\x1d\xfe[\xfe7\xfew\xfe(\xffK\xff\x11\xff]\xff\xef\xff$\x00\x1c\x00}\x00\x13\x01F\x01O\x01\x94\x01\xec\x01\xf5\x01\xde\x01\xeb\x01\r\x02\x11\x02\xd7\x01\xaa\x01\xb4\x01\x8d\x01>\x01\x18\x01\x18\x01\x16\x01\x08\x01\x1e\x01:\x01x\x01\x8a\x01\x84\x01\xb7\x01\xed\x01\xfc\x01\t\x02#\x02*\x02\x1a\x02\x08\x02\x01\x02\xd8\x01\x85\x01\x0e\x01\xb8\x00\x99\x00r\x00\x02\x00{\xff\xa2\xff\x06\x00\xdb\xff\xbb\xff\xf2\xff[\x00\x17\x00\xf1\xff|\x01\xb6\x03\xd7\x03\x84\x02\r\x03\x12\x05t\x05\n\x04\x1a\x04\x7f\x05[\x05c\x03\x8f\x02M\x03\x87\x02\xf0\xff\\\xfe\xe9\xfe\xdc\xfe\x8a\xfc~\xfa\x82\xfa\xb1\xfa\xb5\xf9\xc0\xf8\x01\xf9[\xf9\xde\xf8k\xf8Q\xf9\xc8\xfa=\xfbF\xfb\x1b\xfcs\xfds\xfe\x0b\xffi\xff.\x00\xf7\x00\x83\x01\x00\x02_\x02\xb9\x02\xb0\x02q\x02f\x02h\x02\xf9\x01\x17\x01\x81\x00\x82\x00?\x00S\xff\x81\xfe-\xfe\xbb\xfd\x0c\xfd\x82\xfcZ\xfc\x18\xfc\x96\xfb|\xfb\xe0\xfb<\xfc,\xfc\x13\xfcd\xfc\xe7\xfcA\xfd\x97\xfd"\xfe\x9b\xfe\xfc\xfeW\xff\xda\xffi\x00\xab\x00\xae\x00\xdb\x00Z\x01\xc8\x01\xd7\x01\xd5\x01\x12\x02q\x02v\x02[\x02}\x02\xa0\x02u\x02E\x02g\x02o\x02\x1a\x02\xbc\x01\x7f\x01\x14\x01a\x00\xc9\xffL\xff\xa6\xfe\xde\xfdA\xfd\xde\xfcw\xfc\r\xfc\xcd\xfb\xc8\xfb\xc7\xfb9\xfc^\xfd\x04\xff\xa8\x00R\x02N\x04\x88\x06\xa8\x08\xbd\n2\r\xd2\x0f\xca\x11\x04\x13e\x14\xe4\x15o\x16\xd8\x15b\x156\x157\x14\xc7\x11l\x0f\xc8\r\x85\x0b\xec\x07x\x049\x02\xbd\xff\xfe\xfbx\xf8{\xf6\xec\xf4S\xf2\xd0\xef\xcb\xee{\xee\x82\xed[\xecq\xecD\xedy\xedZ\xed%\xee\xbb\xef\xd8\xf0S\xf1c\xf2\x0b\xf4L\xf5\x18\xf6\x15\xf7\xa3\xf8\xe7\xf9\xb1\xfa\xb1\xfb\x04\xfd\x1d\xfe\xbb\xfeQ\xff\x07\x00\x8d\x00\xf2\x00i\x01\xd1\x01\xd6\x01\xa9\x01\xae\x01\xb0\x01Y\x01\xf0\x00\xa1\x00W\x00\xe1\xffy\xffn\xffE\xff\xd0\xfey\xfe\x8f\xfe\xd0\xfe\xab\xfe\x98\xfe\xf2\xfeX\xff\xc6\xff<\x00\xd7\x00g\x01\xd0\x01K\x02\xf7\x02\x9b\x03\xef\x03E\x04\xc0\x04F\x05\xb0\x05\x08\x06K\x06e\x06v\x06\x99\x06\xd9\x06\xf5\x06\xd4\x06\x96\x06G\x06\xe8\x05t\x05\xe5\x04\x18\x04<\x03~\x02\xbe\x01\xcc\x00\xad\xff\x95\xfe\xa5\xfd\xc3\xfc\xe0\xfb"\xfb\x92\xfa\xf3\xf9G\xf9\xd4\xf8\xb6\xf8\xa2\xf8\x84\xf8\x9c\xf8\xf3\xf8e\xf9\xd3\xf9F\xfa\xeb\xfa\x9e\xfbH\xfc\xfb\xfc\xb9\xfd\x93\xfe{\xffL\x00\x1b\x01\xd5\x01\xab\x02x\x03\x02\x04t\x04\xd7\x04C\x05\x9c\x05\xd9\x05\xfb\x05\xf5\x05\xd4\x05\x82\x05%\x05\xd9\x04s\x04\xe3\x03X\x03\xdc\x02Q\x02\xc5\x01@\x01\xb6\x00*\x00\xb3\xff{\xffZ\xff0\xff\x19\xff\n\xff\xfb\xfe\r\xff)\xffD\xffa\xff\x92\xff\xd4\xff\x0b\x00-\x00M\x00l\x00q\x00v\x00\xa7\x00\xd7\x00\xbf\x00\x95\x00\x90\x00\x91\x00~\x00A\x00\x13\x00\xf8\xff\xc4\xfft\xff\'\xff\x0f\xff\xf6\xfe\xb2\xfe|\xfe\x9a\xfe\xc5\xfe\xb7\xfe\xd2\xfe%\xff\x95\xff\xba\xff\xda\xff\x9b\x00\xa5\x01\x03\x02\xee\x01t\x02A\x03p\x03\r\x03"\x03\xa4\x03\x9b\x03\x05\x03\xa1\x02\x85\x02\r\x02\x19\x01\\\x009\x00\xe7\xff\xf5\xfe\x05\xfe\x9e\xfdW\xfd\xa6\xfc\xf4\xfb\x99\xfbg\xfb-\xfb\xd8\xfa\xd2\xfa\xf3\xfa\xe2\xfa\xc4\xfa\xdd\xfa.\xfb{\xfb\xa8\xfb\xc9\xfb\x1d\xfc\x8a\xfc\xdc\xfc\x1b\xfd>\xfdy\xfd\xd4\xfd0\xfe~\xfe\xd8\xfe\x1d\xffI\xffu\xff\xc3\xff\x1d\x00T\x00\x8b\x00\xba\x00\x05\x01x\x01\xdf\x01 \x02V\x02\x99\x02\xeb\x02;\x03}\x03\xe1\x03\x16\x04\x1c\x04B\x04q\x04\x88\x04W\x04\x13\x04\xe7\x03\xca\x03\xaa\x03V\x03\xeb\x02s\x02\x13\x02\xbe\x01o\x01\x1d\x01\xb8\x00`\x00\x1d\x00\xf0\xff\xcf\xff\xc5\xff\xa3\xff\x8a\xff\xa1\xff\xc7\xff\xd5\xff\xdf\xff\x00\x00,\x00^\x00\x87\x00\xae\x00\xc8\x00\xca\x00\xcd\x00\xd5\x00\xe1\x00\xdc\x00\xbb\x00\x9d\x00v\x00`\x00:\x00\xf9\xff\xb9\xff}\xff;\xff\t\xff\xd4\xfe\x9c\xfex\xfeC\xfe\x12\xfe\xee\xfd\xd5\xfd\xce\xfd\xbd\xfd\xc3\xfd\xc8\xfd\xd1\xfd\xdd\xfd\x01\xfe8\xfek\xfe\xa7\xfe\xdd\xfe$\xff^\xff\x97\xff\xd4\xff\x08\x00S\x00\x8b\x00\xa0\x00\xab\x00\xa1\x00\x96\x00\x81\x00Z\x005\x00\xf4\xff\x91\xff!\xff\xca\xfey\xfe+\xfe\xe4\xfd\xb5\xfd\x8a\xfda\xfd3\xfd2\xfdo\xfd\xd4\xfd\x1f\xfeD\xfe\x89\xfe\xe2\xfe2\xff^\xff\xc3\xff6\x00p\x00\x95\x00\xbc\x00\xea\x00\x01\x01\xfe\x00\xef\x00\xff\x00\x00\x01\xf3\x00\xcb\x00\xbb\x00\xbe\x00\xa9\x00\xa0\x00\x91\x00\x90\x00\x93\x00\x98\x00\x97\x00\xa6\x00\xad\x00\xb7\x00\xc1\x00\xc1\x00\xcc\x00\xc6\x00\xb7\x00\xbc\x00\xba\x00\xa4\x00\x83\x00]\x00E\x00$\x00\xfb\xff\xd6\xff\xc1\xff\x92\xffN\xff\x14\xff\xfd\xfe\xec\xfe\xc1\xfe\xb1\xfe\xb3\xfe\xb3\xfe\xa1\xfe\x97\xfe\xb2\xfe\xd4\xfe\xe6\xfe\xfc\xfe4\xffh\xff\x8c\xff\xbd\xff\xfe\xff2\x00=\x00`\x00\x8f\x00\xba\x00\xe5\x00\xfd\x00&\x01=\x01H\x01[\x01c\x01n\x01\x85\x01\x90\x01\xa4\x01\xb7\x01\xbc\x01\xb3\x01\x84\x01~\x01\xa2\x01\xae\x01\xaf\x01\xbe\x01\xdf\x01\xec\x01\xeb\x01\xf7\x01\n\x02"\x02;\x02Q\x02c\x02s\x02t\x02w\x02q\x02]\x02<\x024\x02/\x02\x13\x02\xfc\x01\xb9\x01X\x01\xef\x00\xa5\x00^\x00 \x00\xe7\xff\x98\xffL\xff\xf0\xfe\x97\xfen\xfeG\xfe\x01\xfe\xf0\xfd\xf7\xfd\xdc\xfd\xcc\xfd\xb5\xfd\xa6\xfd\xb5\xfd\x9e\xfd\x96\xfd\xb8\xfd\xb8\xfd\xa5\xfd\x94\xfd\x92\xfd\x98\xfd\x91\xfd}\xfd\x80\xfd\x85\xfdx\xfdh\xfdO\xfdQ\xfdQ\xfd?\xfdN\xfd`\xfdv\xfd\x90\xfd\x93\xfd\xa5\xfd\xc2\xfd\xe3\xfd\xec\xfd\x05\xfe>\xfe]\xfet\xfe\x95\xfe\xdc\xfe\x15\xff\x15\xffB\xff\x92\xff\xc3\xff\xee\xff!\x00d\x00\x94\x00\xc2\x00\x02\x01M\x01z\x01\x99\x01\xca\x01\xf9\x01!\x029\x02H\x02]\x02f\x02i\x02a\x02V\x02D\x025\x02%\x02\x0e\x02\x02\x02\xf0\x01\xe1\x01\xc8\x01\xa8\x01\x8e\x01\x89\x01z\x01j\x01i\x01W\x019\x01\x1f\x01\x05\x01\xf5\x00\xe8\x00\xcd\x00\xc5\x00\xb4\x00\x8e\x00n\x00E\x00\x19\x00\x00\x00\xdc\xff\xaf\xff\x9a\xff~\xffG\xff:\xff(\xff\xf9\xfe\xe2\xfe\xcd\xfe\xbc\xfe\x9c\xfe\x8e\xfe\x8f\xfe}\xfeW\xfeD\xfe]\xfeT\xfeU\xfec\xfe]\xfea\xfeo\xfe|\xfe\x83\xfe\x9c\xfe\xab\xfe\xb8\xfe\xe2\xfe\xf5\xfe\x12\xff.\xff/\xffK\xffx\xff\x96\xff\x9a\xff\xb4\xff\xda\xff\xed\xff\xfa\xff\xee\xff\xfc\xff\x1b\x00\x10\x00\x13\x00/\x00@\x00B\x00I\x00m\x00{\x00\x96\x00\xb4\x00\xd0\x00\xf3\x00\x1a\x018\x01h\x01\x99\x01\xbe\x01\xd0\x01\xe4\x01\x02\x02\x1e\x02$\x02\x07\x02\xfe\x01\xf3\x01\xcd\x01\x9c\x01n\x01F\x01\r\x01\xba\x00w\x00A\x00\x01\x00\xab\xffU\xff)\xff\xf0\xfe\xbf\xfe\x97\xfel\xfeM\xfe+\xfe\x1c\xfe\x15\xfe\x15\xfe\x14\xfe!\xfeS\xfev\xfe\x92\xfe\xb5\xfe\xe7\xfe\x12\xffD\xff|\xff\xb2\xff\xe7\xff\x0c\x000\x00`\x00~\x00\x89\x00\xa1\x00\xb2\x00\xc7\x00\xcb\x00\xc7\x00\xce\x00\xcd\x00\xc3\x00\xb0\x00\xae\x00\xa1\x00~\x00x\x00a\x00K\x00C\x00,\x008\x007\x00!\x00$\x009\x008\x000\x004\x00=\x00Q\x00M\x00A\x00N\x00S\x007\x00-\x006\x00?\x009\x00 \x00\x1e\x00\x17\x00\x08\x00\xfb\xff\xe3\xff\xce\xff\xb5\xff\x9b\xff\x93\xff\x8e\xff\x8a\xffz\xffe\xffW\xff^\xffa\xffa\xffe\xffs\xff\x86\xff\x88\xff\x89\xff\x92\xff\xa5\xff\xbc\xff\xcc\xff\xe2\xff\x07\x00\x10\x00\x1c\x004\x00H\x00W\x00n\x00\x8b\x00\x99\x00\xa2\x00\x9b\x00\x96\x00\x8d\x00\x88\x00\x7f\x00z\x00\x7f\x00v\x00j\x00g\x00[\x00J\x00H\x00B\x00A\x00?\x00<\x000\x00/\x00(\x00\x1c\x00\x13\x00\x06\x00\xfd\xff\xf0\xff\xf1\xff\xdd\xff\xbd\xff\xb2\xff\xab\xff\xa1\xff\x86\xffz\xff}\xffr\xffc\xffe\xffm\xfff\xff]\xffW\xfft\xff\x7f\xff\x80\xff\x86\xff\x8e\xff\x99\xff\xa2\xff\xa5\xff\xa3\xff\xab\xff\xae\xff\xb2\xff\xb3\xff\xb5\xff\xaf\xff\xaf\xff\xb1\xff\xa6\xff\xa3\xff\xb0\xff\xba\xff\xb5\xff\xbc\xff\xb8\xff\xb8\xff\xb7\xff\xc2\xff\xc6\xff\xcb\xff\xd4\xff\xda\xff\xeb\xff\xf3\xff\xff\xff\x08\x00\x14\x00\x17\x00\x1f\x00:\x00B\x00A\x00\\\x00g\x00o\x00\x7f\x00\x85\x00\x83\x00\x8f\x00\xa2\x00\x9b\x00\x91\x00\x91\x00\x8f\x00\x8b\x00\x90\x00\x98\x00\x8e\x00y\x00v\x00m\x00k\x00m\x00X\x00D\x00@\x00;\x00#\x00\x0c\x00\xf4\xff\xe1\xff\xdf\xff\xce\xff\xc0\xff\xab\xff\x96\xff\x85\xff{\xffj\xffh\xffl\xff\\\xffR\xffR\xffN\xffF\xff:\xff9\xff1\xff4\xff@\xffF\xffH\xffG\xffK\xffV\xffl\xff{\xff\x89\xff\x9d\xff\xb3\xff\xc9\xff\xd4\xff\xec\xff\x01\x00\x12\x00&\x00<\x00P\x00e\x00v\x00\x84\x00\x95\x00\x9d\x00\xa7\x00\xb2\x00\xbc\x00\xbc\x00\xb6\x00\xba\x00\xbc\x00\xb2\x00\xa8\x00\xa7\x00\xa5\x00\xa2\x00\x9e\x00\x9b\x00\x8e\x00{\x00t\x00n\x00j\x00[\x00S\x00I\x00C\x009\x00,\x00\'\x00\x1d\x00\x12\x00\x0e\x00\x04\x00\x07\x00\r\x00\x00\x00\xeb\xff\xe1\xff\xd6\xff\xd1\xff\xc4\xff\xb2\xff\xa3\xff\x8a\xff\x8a\xff\x8d\xff\x82\xff}\xff~\xffv\xffr\xff\x83\xffy\xffs\xff~\xffv\xffz\xff\x82\xff}\xffr\xff\x80\xff\x90\xff\x9b\xff\xa6\xff\xb1\xff\xbb\xff\xc8\xff\xce\xff\xd5\xff\xe1\xff\xf6\xff\x0c\x00\x15\x00\x14\x00\x1f\x008\x00G\x00W\x00[\x00b\x00t\x00\x84\x00\x7f\x00\x84\x00\x89\x00\x91\x00\x90\x00\x90\x00\x93\x00\x8d\x00\x90\x00\x81\x00{\x00d\x00c\x00\\\x00B\x00A\x00/\x00\x1c\x00\x1d\x00\t\x00\xf2\xff\xee\xff\xdc\xff\xcf\xff\xc8\xff\xb3\xff\xa5\xff\xb4\xff\xb3\xff\xb4\xff\x9c\xff\x8a\xff\xa4\xff\xa2\xff\x99\xff\xae\xff\x9b\xff\x93\xff\xab\xff\xc8\xff\xae\xff\x93\xff\xb0\xff\xe2\xff\xde\xff\xb4\xff\xb8\xff\x91\xff\x9d\xff\xb9\xff\xb1\xff\xc4\xff\xc0\xff\xad\xffa\xffC\xff\x84\xffn\xffP\xff\x92\xff\xbc\xff\xb5\xff\x9a\xff\x97\xff\xe4\xff\xc0\xff\x86\xff\xa5\xff\xae\xff\xfc\xff\x1d\x00v\x00\x04\x01\xf5\x00\x81\x01\x1e\x01\xef\x01>\x01\xa7\x01\xdd\x00\x00\xfd\xdc\x08\xea\x10G\x04\xce\xf3.\xfe\x19\x05\x10\x04\xaa\x02A\xff\xb1\xfa/\xf7\xce\x00\xc7\xf9[\xfc\xbd\xf9\xa5\xf40\xfe\xd9\x00\xc7\xfe5\xfe)\xfdy\xfb \xfb\xec\x05-\x0b\xb7\xfb\xba\xfeC\x04\xcf\x01\xe2\x03m\x03v\x03\xee\xffM\xfa\xe1\x00\x98\tR\x03`\xfad\x01\xae\x01\xfe\xf8\xb1\x00\x96\x04h\xff\xd1\xfb\xa5\xfb\xaa\x011\x00\xa1\xfc\xf5\x01+\xfeH\xf33\x02]\x05`\x00}\xfa\x9e\xfd\xee\x01\xc6\x00\xae\xfe@\xfb\x18\x07\x1b\x04a\xfa\x19\x02s\x06\xab\xfe}\x02\xd5\x02-\xff4\x02c\xfdu\x06~\x08\xcc\xfe\xf8\xf5\x18\x07a\x07\xc9\xfey\xff\x01\xff:\x01K\xff\xb0\x04\x9e\xfft\x03\x9a\xf9\xe8\xfb\xce\x01\xc3\xff\xdf\x02\xda\xf6 \x03\xcb\x00\x9a\xf6\x96\xfeS\x00\xd5\xff2\xf7&\xfei\x04\x9d\xf9\x03\xfd\xec\x06O\xfei\xf4\xf3\x02=\x06\x90\x01\xad\xfe\x18\x02\xe4\x00\xa4\x01L\xffp\x044\x05\x19\x00\xbb\xff\xa3\xfe2\x04\xec\xff\xdf\x04{\x04c\xfa\xbc\xfd\xc0\x0bG\xf9\xaf\xf9\x8a\x06\xf5\x01n\xfc\x0c\xfap\x068\xfd\xc1\xfa\\\xfcL\x05-\x01]\xf8\xd5\xfd\x99\x039\xf9=\x00\x8b\n\xde\xfbz\xf7A\x04\xd1\np\xf2Z\xff\x9e\x10x\xfb\xb7\xf3\x02\t\xe9\t\x08\xef\xfd\x01\xad\r\xf1\xfa)\xf5\xbc\xffa\x0b\xa8\xfci\xfc\x98\x05O\xfd\x7f\xfa>\x00\x13\t-\x00\xc4\xfa|\xfc\x9a\x05\xd3\x01\x15\xfa\r\x05L\xfe\xce\xf4L\x01\xda\x05\xf3\xfb\x91\x02\x1e\x01\xc5\xefS\x03\x18\x0b\xfe\xf6C\xff\x11\x06\xad\xff\x91\xfe\xfe\xff\x05\x04\xb6\x04\xa1\xf9\x03\xfe\xdf\x05\x99\xfb\x02\x03\xb4\x07u\xf6h\xffd\x05\xe1\xfc\xa1\xfd\x87\xfe\xd1\x01\x94\xff\xe6\x00N\xf8\x1d\x04\xb8\xfeL\xfa\xba\x04\xbb\xfd\x85\xf5\x8b\x07\xc0\x08\xe2\xf5\'\x07\x88\xffD\x01\x1f\x00\xb8\x04\xa8\x05\xca\xf8E\x01z\t\xcf\x00L\xfc\xc2\x02\x06\x01\xef\xf8\xb2\xfbH\x06\xc7\x06%\xf8\x86\xf3A\x0e\x02\xfe\'\xf3\xa5\x00\x14\n\x8a\xfeJ\xf0$\x07\x12\x07\xaa\xfc\x04\xfbf\x01U\x04\xac\xff\x9b\x02\x04\x06J\x01~\xfc\x9c\x00\x1a\x06)\x05\x9a\x05 \x04\x14\xf6\x1e\xfe1\x06O\x03\x9f\x03\x05\xf9,\xfc\x8f\x04P\xfb\xc4\xf8N\x03\xe1\x03o\xf7A\xf9{\xff\xf1\xfe\xcf\x05f\xf9\xe4\xfa#\x04\x82\xfcG\x009\x02\xb3\xfe\xe4\xff\x83\x02\x9e\xfb\x0c\xf9\x97\x05\x87\x04\x02\xfd:\x01\x98\xfa\x7f\xfa\xe2\x07\xab\x05[\xf4[\xfc\xe0\x0br\xfc\xc9\xfc\xdd\t\x16\x00\x93\xf3!\xfa:\x11\xcb\x06\xf9\xf24\x01\xb7\x07:\xf5\xfc\x00\xe4\x10\xf6\xfau\xe8\xdb\x00\xab\x16`\xfd\xea\xf9v\x06\xc3\xfdw\xea\xa8\x02\xff\x13\x10\x03v\xf7\x13\xfaq\x03\xcc\xff\x83\x02J\x06\xa2\xfd\x87\xf3#\xfc\x02\x0c#\x06\xb5\xfc\x86\xfd\xba\xfa\xc3\xf8\x13\xfe`\x0c\xe3\x02\x17\xf5\xb9\x00\x85\x05\x0c\xfeP\xff\x81\x08\xee\x01\xfe\xef8\xfd\xc3\x0b\xd3\x06\xdd\x03\xa1\xfb\xdc\xf7\xbf\xffM\x06\xd0\xfc\xf8\xff\xa0\x04\xaa\xfe\xa2\xfe\x98\xfcG\x01r\x05\xea\xfc\xc3\xf7\xe9\xfc\xd2\x04a\x06x\xfe\xec\xf6c\x01\x07\x03;\xff@\x000\x01\xc3\xfe:\xff\xa0\x05\xb4\x00\xe0\xfe\x1d\x02\xf3\x00\x97\xfd\xcd\xffO\x06\xcb\x02p\xfd\x01\xffS\x02\xbe\x01\x9c\xfd\x81\xfd\xf2\x03C\xff\xcf\xfb^\x01L\x03\xb7\x00e\xfb2\xfe;\x03K\xff\xb7\xf8`\x03\x1e\x04\x04\xfc\xe1\xffO\x01\xcd\xfew\xfb\xcc\x00\xc4\x04\xef\x00\xe6\xfa\x14\x01\x82\x04\xc8\xff\x94\x00\xa1\x00\xbd\xff\xb4\xfaU\x011\x084\x02t\xfc%\xfci\xfcZ\xfe\xf5\x06c\x06q\xf8[\xf6X\x02\x89\x04\xd4\xfeM\xff;\x05\xb6\xfe3\xf8\xbb\x00\xea\x04\xa5\x04U\xff\x98\xfc`\xfa\xf4\x02\x18\n\x92\x01\xe6\xf6\x9e\xfa\x9e\x022\x01A\x02\xd5\x01\xe9\x02D\xfbi\xfbb\x01\xae\x03V\x00!\xfd\x87\x00^\x03\xa2\x00\x07\xfc\xf3\x00\xcd\x00\xd2\xfeO\x00\xca\x02\xef\xfd\x02\xfc\x14\x02\xe9\x05S\x01\x17\xfa\xaf\xfc\xf2\x00j\xff\x0e\x03\x15\x06[\xfe\xaf\xf5z\xfdu\x03Y\x06\t\x01\x14\xf6\x9a\x00\x05\x05/\x04\xbb\xfd\xd0\xfcY\xfer\xfeh\x03\x9a\x03\x98\x00\x8f\xfb\xbe\x00x\x03\x1c\xff\xf8\xffc\x00:\x00\xd4\xfa\xa3\xfd_\t\x14\x08\xb5\xfc?\xf2$\xfb\x19\x08b\x04\xd0\x00T\xfb\xe9\xfa\xa5\xfe\xf6\x03\xd1\x05M\xfc\xb9\xf8N\xfe\x08\x04V\x03.\x03\xf8\x01t\xfa\x8d\xf82\x00\x96\x06\xd9\x04y\x00\x08\xfc\x97\xfb\x99\x02e\x06u\xff\xc0\xfbG\xfb\x14\xfe\xa3\x06M\x06\xcc\xffm\xfc\xf0\xfb\xbe\xfe\x14\x01\xaf\x00&\x03\xcf\x02\xeb\xf9%\xfa2\x05B\x06\xc4\xff\x99\xfa\x9f\xf7,\xff\x9a\x06\xd1\x04e\x01B\xfb\xb7\xfc\xb7\xfd\x1e\x03\xab\x04\xce\x02\x86\xf9+\xf8\x84\x04\x0c\x08`\x05s\xfb\xd6\xf8\xc5\xfe~\x01\x1a\x02e\x05\xd3\xfe@\xf9)\x00a\x06\x1f\x00\xf8\xfe\x83\xfe\xeb\xf7f\xff,\t\xb8\x05{\xfe\xa5\xfa\x16\xfb~\xff\xc5\x02\xdf\x03\n\x01p\xfe\xbc\xfa\xf9\xff)\x05D\x02\xcd\x00H\xfb\x1c\xfa\xaa\x01\xf1\x05\xe2\x00s\xfd\xfb\xfd\t\x01&\x01m\xff\xbf\xfdK\xff\xf1\x00\xe7\x01\x97\x02S\xfc\n\xff4\xffK\xfd%\x02\x92\x04\xac\x01\xff\xfa!\xfc\x8d\x01\x95\x02\xef\xff\x14\xfe\x83\xfeS\x00:\x026\x03%\x01\x18\xfb\xb8\xf8\x80\x00C\x07~\x05>\xfel\xfb\xf7\xff\x0c\x02\x84\x01\x98\x00}\xff\x82\xfd\x15\x00\xed\x01L\x03\xca\x01\x07\xfem\xfc5\xffW\xff\xca\xffI\x04d\x02\x89\xfc]\xfa\xda\x03\x9d\x04\x8f\xfcD\xff\x06\x00\x86\xfd\xa8\x03\x9f\x03q\xfe1\xfd_\xff;\x010\x04\xa8\x01\x87\xfe^\xfe\x0c\xfd\xec\x00I\x03\xa8\x02\xea\x01\xc7\xfd\xe2\xfa\xda\xfe \x04\xfb\x02\x04\xfeh\xfa\xfd\xfcZ\x00\xb2\x00\xf6\x01|\x02n\xfd\xf3\xf8n\xfe\xf0\x02\x9e\x04\x07\x04\xc9\xfc<\xfa`\x03\x0c\x05\x99\x00\xa7\xff\xd6\xfd[\x00s\x01\xd0\x00\x16\xffO\x00d\xffN\xfc\x93\xffw\x02O\x01\xa5\xff\xac\xfc\xae\xfeS\x04W\x00V\xfa\x82\xfdp\x00\t\x01X\x02\x93\x01\x00\xfd\x86\xfcT\x02`\x02\xdb\xfd>\x00\x8f\x02\xdc\xfeb\x00\xa2\x03I\x01\xe9\xfc\xcf\xfc\x02\x02)\x02\xe1\xfed\x00.\x02\x7f\x00\x05\xfd\xeb\xff<\xff\x7f\xfe\x9c\x03h\x00I\xfc@\xfd\x9b\x02\xa8\x04\xe9\x01-\xfd\xb5\xfb\xfa\xfc\xc3\x02\xf8\x03,\x02:\x02m\xff\x1d\xfd\x84\xfd\x88\x02@\x02\xe2\x00K\xfeb\xfe\x07\x01\xc3\x02\xe3\x01\xb1\xfd\xb3\xfc\t\xfdF\xff\xf6\xff\xb3\x00c\x03\x19\x02\xe1\xfc\xa5\xfaS\xfe\x15\x01s\x01:\x02h\xfe\xa4\xfc\x16\x01B\x03~\x00\xb4\xfey\xfd\x8f\xfc/\x01\xd7\x03\x14\x02\xb3\xfd~\xfe\x81\x000\x01\x05\x01R\xffv\xff\xba\xfd\x15\x01\x92\x03\xa2\x02\xf5\x00\n\xfe>\xfc\xe3\xfd\x19\x03E\x04\xa3\x00\x18\xfd\x06\xfe\x91\xff_\x01\x1e\x03\x9f\xfe\xb1\xfb\xc2\xfdU\x01\x81\x02@\x01\xb3\xff\xab\xfd)\xfe\xa7\xff\xca\x00w\x00~\xff\xc0\xffm\x00\xc6\xff\xb3\x00\x00\x03\xd5\xfe\x83\xfc\xa8\xfeN\x00\xfa\x00\xd0\x02\xa4\x024\xfd\x1f\xfe\xb9\x00\x9c\xffb\x00n\x00y\xff`\xff\x8a\x00\x88\x01\x06\x00\t\xff\xfa\xff\x81\xfe\x19\xfe\xf6\xff0\x01$\x01\xc0\xff\x87\xfe\xc7\xff,\x00\x11\x00\xc1\xff\x1c\xff`\xff\xca\x00*\x00>\xff\xc0\x01)\x01U\xfek\xfej\xff\xd2\x00\x9c\x02\xf3\x01j\xff\xd8\xfd\xcf\xfe\xb6\x01\xb8\x01\x94\xff\x9a\xff\xfa\xfe\x99\xff\x95\x01\xa1\x01S\x00\x87\xfe=\xfe\xe9\xfes\x00\xd7\x00\xd6\x01{\x00\xae\xfc\x85\xff\x83\x01\xad\x00!\x00\xdf\xfdU\xfe\xac\x00\xa5\x02\xb4\x01]\xffl\xfe7\xfe\xcd\xff\xeb\x00h\x00\x0b\x00\xbc\xff_\x00H\x01\x8c\x00\xdb\xff\x10\xff\x04\xfe\xe6\xffq\x01\xdf\x00\xfe\x00\x12\x01\xae\x00C\xff\xe8\xfd\x1d\xfe\xa2\xffc\x01\xad\x01\x02\x01\xd4\xff>\xff\xda\xff\xae\xff{\xff\xdb\xff#\x00\xd0\x00%\x01\x17\x00/\x01(\x02E\x00\xa6\xfd\xff\xfc\xa9\xff\xdd\x01\n\x02\x8d\x00\xb0\xfe\xe6\xfe\x0f\x00\xb1\x00#\x00\x1f\xff\x88\xfe\x19\xff\xde\x00\x90\x01\x90\x01\x95\x00\x83\xfe\xa6\xfd\x83\xfe\xbe\x00\xbf\x01\x16\x01F\x00W\xff\x8e\xff\x7f\x00T\x01\xd0\x00Q\xff\xf3\xfe\xb0\xff`\x01J\x01\xed\x00\x9e\xff\x7f\xfd\x03\xfe\x14\x00\xff\x00\x87\x00\xe7\xffY\xff\xe4\xff\xca\x00\x89\x00\x91\xff\xf3\xfe\xc5\xfe\x8a\x00\x93\x01\xaf\x00\x85\x00\x91\x00\xed\xffj\xff\xb1\xff\x93\x00c\x00\xdb\xff_\x00R\x01\x06\x01u\xff\x11\xff\'\xff\xa3\xff\xb0\xff\\\xffq\xffr\xff/\x00\xdc\x00\xd7\xff\xd2\xfe\x80\xfe\xb7\xfel\x00\xab\x01\x10\x01\x10\x00|\xffT\xff\xbc\xff>\x00\xc4\x00\xc4\x00Q\x00\x82\xff\x85\xff^\x00\xb6\x00\x1b\x00%\xff\x0b\xff\xaa\xff\x97\x004\x01\x8e\x00R\xff\xe1\xfeS\xff\xe3\xff+\x00\xff\xff\x02\x00\xe6\xff\xd5\xff\x91\xff8\xffx\xff\xf6\xffE\x00\x08\x00\xed\xff\x83\xff\xaf\xff\xb0\x00\xef\x00\xaf\xff\xcd\xfe\x98\xffs\x00\xc0\x00\x1c\x01\xa0\x00\xf7\xfe\xb4\xfe\xd1\xff\x03\x008\x00\xb7\x00\x89\xff&\xfe^\xff@\x01m\x01O\x00\xa9\xfe\xfe\xfd>\xff\xff\x00}\x01\xa0\x00L\xff\x8e\xfeD\xff8\x00\xe5\x00\xd1\x00C\xff\xfc\xfe\x00\x00\x1b\x01\x10\x01\xa3\xff\xc7\xfee\xff\x89\x00\x03\x01u\x00\xa7\xff\x99\xff\xd9\xff3\x00,\x00\xc4\xffM\xff\x83\xff5\x00h\x00\x04\x00\xc6\xff\x92\xff`\xff\x8d\xffz\xffg\xff\xfa\xff\x82\x00{\x00\x1b\x00\xbc\xff\x93\xffs\xff\xbd\xff6\x00\x9f\x00\x8b\x00\xe1\xff\xf8\xff\x85\x00\xc1\x00,\x00*\xff\xef\xfe\x14\x00>\x01\x19\x01N\x00\xa9\xff\x7f\xfff\xff*\x00\xab\x00\x00\x00\xb1\xff\xe5\xff?\x00\xcc\x00\xcd\x00z\xff~\xfe;\xff\x91\x00\x00\x01\xc3\x00A\x00\x86\xff\xe2\xfe\x91\xff\xea\x00\xb3\x00\xfa\xff{\xff\xa1\xffM\x00\xe5\x00\xda\x00\xf2\xff\x01\xff\xcb\xfe\xc8\xff\xdb\x00\xd2\x00\\\x00\xd4\xffC\xffm\xff\xf6\xff\xf4\xff\xc7\xff\x9b\xff\xaf\xff<\x00\x9a\x00d\x00\xd9\xffT\xffQ\xff\x0e\x00\xea\x00\xc3\x001\x00V\x00\x7f\x009\x00F\x00\x11\x00}\xff\x8c\xff\x95\x006\x01\xd1\x00\x04\x00\x1e\xff\xae\xfeV\xff\x8e\x00\xfc\x00\x8a\x00\x16\x00_\xffr\xff4\x00o\x00<\x00z\xffE\xff\x17\x00/\x01$\x01F\x00h\xff\x15\xff\xb2\xff\xcf\x00M\x01\xf6\x00q\x00\xf6\xff\xcc\xff0\x00k\x00\xb8\xffM\xff\x97\xff!\x00\x97\x00o\x00\xbf\xff\x0c\xff\xef\xfea\xff\xd8\xff\x15\x00\x00\x00\xc5\xff\xba\xff\xcb\xff\xed\xff\xf8\xff\xbf\xff\x94\xff\xa4\xff\xf9\xffT\x00m\x00-\x00\xa7\xff\x8b\xff\xda\xff?\x00\xbc\x00\xc5\x00b\x00\xf4\xff\xec\xff\xf4\xff\xde\xff\xe1\xff\xe9\xff\xfa\xff\n\x00(\x00.\x00\xd2\xffS\xffx\xff\xe8\xff\x0c\x00\x10\x003\x002\x005\x00U\x00\xff\xff\xc0\xff\xdb\xff\x07\x008\x00j\x00s\x00c\x00L\x00\x18\x00\xae\xff\x96\xff\xf4\xff\x05\x001\x00\x8a\x00W\x00\xbf\xff\x88\xff\xbf\xff\xd5\xff\xfe\xff\x03\x00\xc2\xff\xc6\xff9\x00\xa5\x007\x00_\xff\xf6\xfe`\xff$\x00Y\x00\xf0\xff\xae\xff\x9e\xff\xb7\xff\xee\xff\xdf\xff\x89\xffG\xff\x93\xff\xe9\xffJ\x00g\x00\xf8\xff\x95\xff\xa6\xff\xe5\xff\x18\x00.\x00\x0b\x00\xb7\xffx\xff\xec\xffb\x009\x00\x94\xffR\xff\xa0\xff\xcf\xff\x1e\x00U\x00\xf4\xffp\xff\x92\xff\x00\x008\x001\x00\xf5\xff\x8f\xff\x86\xff\xe2\xff:\x005\x00\xe4\xff\x9f\xff\xa1\xff\xeb\xffV\x00K\x00$\x00!\x00D\x00t\x00g\x00\x1e\x00\xf4\xff\x0b\x00F\x00e\x00<\x00\xdd\xff\xb0\xff\xca\xff\xf5\xff\xff\xff\xca\xff\x9b\xff\xa1\xff\xe5\xff:\x00N\x00\xec\xfft\xffs\xff\xd3\xffA\x00;\x00\xee\xff\x98\xffw\xff\xde\xff]\x00$\x00~\xffW\xff\xaa\xff!\x00r\x002\x00\xb3\xffi\xff\xb0\xff3\x00_\x00 \x00\xd8\xff\xc3\xff\xd8\xff*\x00~\x00I\x00\xb5\xff\\\xff\x82\xff\xfc\xffi\x00w\x00\x0c\x00\x8c\xff\x84\xff\xf2\xff|\x00\x8b\x00.\x00\xe0\xff\xfd\xff\\\x00\xa5\x00\x93\x00\'\x00\xd1\xff\xce\xff\x12\x00Y\x00K\x00\x0e\x00\xe4\xff\xd0\xff\xf9\xff/\x00 \x00\xde\xff\xc2\xff\x03\x005\x00?\x00-\x00\r\x00\xee\xff\xf0\xff*\x00S\x00C\x00\r\x00\xf2\xff\x13\x00T\x00}\x00M\x00\xc4\xff\x9c\xff\x05\x00r\x00f\x00$\x00\xf5\xff\xc8\xff\xf2\xffH\x00M\x00\xf0\xff\xb7\xff\xda\xff\t\x00E\x00y\x008\x00\xb9\xff\x9a\xff\xef\xffF\x00M\x00\t\x00\xd4\xff\xdc\xff&\x00]\x00?\x00\xf0\xff\xdb\xff+\x00S\x00M\x00A\x00 \x00\xee\xff\xff\xffA\x00:\x00\xf2\xff\xc0\xff\xcd\xff\xf5\xff\x1b\x00\x1e\x00\xfc\xff\xcf\xff\xd3\xff\x01\x00\n\x00\xdb\xff\xd5\xff\xef\xff\x00\x00\x07\x00\xfc\xff\xe6\xff\xc3\xff\xd5\xff\xe6\xff\xfb\xff\x06\x00\xf1\xff\xd3\xff\xf0\xffG\x002\x00\xf8\xff\xd4\xff\xe7\xff\x15\x00F\x008\x00\xf6\xff\xbd\xff\xcb\xff\x15\x001\x00\x08\x00\xb2\xff\x8d\xff\xb7\xff\x07\x002\x00\x08\x00\xbe\xff\x9a\xff\xb5\xff\xfa\xffD\x00%\x00\xd8\xff\xb9\xff\xda\xff:\x00{\x008\x00\xbd\xff\xa6\xff\xdf\xff8\x00h\x004\x00\xaa\xff\x8c\xff\xf6\xff3\x003\x00\x04\x00\xc7\xff\xa4\xff\xea\xffB\x000\x00\xeb\xff\xb5\xff\xa8\xff\xce\xff\x12\x004\x00\x14\x00\xd9\xff\xbe\xff\xda\xff\x04\x00\x0f\x00\x06\x00\xc5\xff\xb4\xff\xf6\xff0\x008\x00\x0f\x00\xde\xff\xa9\xff\xc2\xff\xf0\xff\x00\x00\xf2\xff\xd2\xff\xbe\xff\xb2\xff\xd6\xff\xf7\xff\xfa\xff\xd8\xff\xc6\xff\xde\xff\x08\x00"\x00\x00\x00\xdb\xff\xcd\xff\xe9\xff\xf9\xff\x1b\x00%\x00\x04\x00\xf9\xff\x06\x00\x0c\x00\xff\xff\xf4\xff\xf4\xff\xed\xff\xe5\xff\xf1\xff\xfc\xff\xe6\xff\xc2\xff\xaf\xff\xb6\xff\xc6\xff\xdc\xff\xe2\xff\xd7\xff\xd3\xff\xd3\xff\xd3\xff\xd3\xff\xdf\xff\xe4\xff\xd4\xff\xea\xff\x03\x00\x07\x00\t\x00\x0b\x00\xf5\xff\xe1\xff\xee\xff\x16\x00*\x00\x1a\x00\x0b\x00\x04\x00\x07\x00\x0c\x00\n\x00\x02\x00\xfe\xff\xf8\xff\xf4\xff\x02\x00\x12\x00\t\x00\x03\x00\x02\x00\xf7\xff\xf4\xff\xfe\xff!\x00\x1e\x00\x05\x00\x0f\x00\x11\x00\x01\x00\xfc\xff\x06\x00\x01\x00\x01\x00\x06\x00\x03\x00\x05\x00\x07\x00\x00\x00\xfe\xff\xfc\xff\x00\x00\x10\x00\x0e\x00\t\x00\n\x00\x14\x00\x19\x00\x1a\x00\x11\x00\x00\x00\xfe\xff\x08\x00\x17\x00\x15\x00\x02\x00\xf2\xff\xfa\xff\x0f\x00\x1c\x00\x04\x00\xfc\xff\x11\x00\x1d\x002\x00=\x00A\x00(\x00\x14\x00\x1d\x00-\x001\x00*\x00\x19\x00\x0e\x00\x16\x00\x1f\x00.\x00\x1d\x00\xfe\xff\xf2\xff\x05\x00,\x005\x00(\x00\t\x00\xfb\xff\r\x00%\x003\x00%\x00\x13\x00\x11\x00\x16\x00"\x00+\x00!\x00\t\x00\x00\x00\x12\x00\x05\x00\x07\x00*\x00\x11\x00\xef\xff\xe4\xff\xf4\xff\xfb\xff\xfa\xff\xeb\xff\xd5\xff\xdf\xff\x0f\x00*\x00\x0c\x00\x00\x00\x03\x00\x05\x00\xfc\xff\xfd\xff\x07\x00\x05\x00\n\x00\x14\x00\x12\x00\x01\x00\xfe\xff\xf3\xff\xe9\xff\xe2\xff\xf8\xff\x05\x00\xff\xff\xfa\xff\xf7\xff\xf4\xff\xf0\xff\xf6\xff\xfc\xff\xfb\xff\xfe\xff\x04\x00\x01\x00\xfe\xff\x02\x00\xfe\xff\xf5\xff\xeb\xff\xef\xff\xfc\xff\x00\x00\xf6\xff\xdf\xff\xd6\xff\xea\xff\xf7\xff\xf9\xff\xea\xff\xd7\xff\xdc\xff\xf8\xff\x06\x00\xfc\xff\xf2\xff\xfc\xff\xf4\xff\xfe\xff\x18\x00\x17\x00\xff\xff\xf5\xff\x01\x00\x08\x00\x0c\x00\r\x00\xfc\xff\xef\xff\xf0\xff\xf4\xff\xf4\xff\xe9\xff\xdf\xff\xcf\xff\xcf\xff\xe8\xff\xf1\xff\xf4\xff\xf0\xff\xed\xff\xe4\xff\xf1\xff\x03\x00\x06\x00\x00\x00\x04\x00\x0c\x00\xfb\xff\x0b\x00"\x00\x0e\x00\xf0\xff\xf8\xff\x0e\x00\x12\x00\x02\x00\x0e\x00\x05\x00\xdf\xff\xea\xff\xf3\xff\xec\xff\xda\xff\xd0\xff\xc9\xff\xc4\xff\xc6\xff\xc9\xff\xbc\xff\xaa\xff\xb6\xff\xc1\xff\xc8\xff\xd3\xff\xc8\xff\xce\xff\xd3\xff\xdf\xff\xec\xff\xeb\xff\xea\xff\xf2\xff\x00\x00\t\x00\x10\x00\n\x00\xf8\xff\xf6\xff\xf7\xff\xf1\xff\xf5\xff\xf9\xff\xf3\xff\xea\xff\xe4\xff\xef\xff\xe3\xff\xd8\xff\xd6\xff\xda\xff\xe9\xff\xf1\xff\xf7\xff\xf0\xff\xe5\xff\xde\xff\xeb\xff\xfe\xff\xfb\xff\xfa\xff\xfe\xff\xfd\xff\x00\x00\n\x00\x0c\x00\xfe\xff\xf2\xff\xfb\xff\x05\x00\x15\x00\x1d\x00\x0c\x00\x03\x00\x05\x00\x0c\x00\r\x00\x10\x00\x11\x00\x10\x00\x0b\x00\x12\x00\x12\x00\x08\x00\x06\x00\x00\x00\x03\x00\x0f\x00"\x00\x1e\x00\x0f\x00\t\x00\x07\x00\x06\x00\x15\x00\x16\x00\x00\x00\xf6\xff\x00\x00\x13\x00\x0b\x00\xf9\xff\xe9\xff\xe1\xff\xe3\xff\xf7\xff\x00\x00\xee\xff\xd9\xff\xd8\xff\xec\xff\xf0\xff\xf7\xff\xfa\xff\xf6\xff\xf8\xff\x05\x00\x16\x00\x14\x00\t\x00\x04\x00\x04\x00\x0e\x00 \x00,\x00&\x00\x1f\x00\x19\x00%\x00)\x00\x1b\x00\x10\x00\n\x00\x07\x00\r\x00\x18\x00\x04\x00\xfc\xff\t\x00\x07\x00\x05\x00\x0b\x00\t\x00\r\x00\x03\x00\x02\x00\x05\x00\r\x00\x1e\x00 \x00 \x00\x1d\x00\'\x00"\x00\x13\x00\x12\x00\x18\x00\x14\x00\x13\x00\x12\x00\x1c\x00 \x00\x1f\x00#\x00\x18\x00\x17\x00\x1a\x00\x1a\x00\x18\x00 \x00\x1c\x00\x10\x00\x13\x00\x04\x00\xf9\xff\xfe\xff\x0e\x00\x11\x00\t\x00\x05\x00\x00\x00\xfe\xff\x08\x00\t\x00\xfe\xff\xf8\xff\xf8\xff\x00\x00\x00\x00\x00\x00\x08\x00\xfb\xff\xfc\xff\x05\x00\x04\x00\x00\x00\xfe\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xfb\xff\x01\x00\x01\x00\xf6\xff\xf6\xff\xfc\xff\xfc\xff\xf7\xff\xf9\xff\xfc\xff\xf9\xff\xf8\xff\xff\xff\xfe\xff\xf7\xff\xf4\xff\xed\xff\xe8\xff\xed\xff\xec\xff\xe3\xff\xe8\xff\xe4\xff\xde\xff\xe1\xff\xe7\xff\xde\xff\xdc\xff\xe2\xff\xe1\xff\xe7\xff\xf6\xff\xfc\xff\xfb\xff\x00\x00\x0c\x00\x07\x00\x04\x00\x06\x00\xfd\xff\xfd\xff\xfe\xff\xfb\xff\xf8\xff\xed\xff\xee\xff\xf3\xff\xf1\xff\xef\xff\xeb\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\xfa\xff\xf7\xff\xfb\xff\x05\x00\x06\x00\x00\x00\xfe\xff\xff\xff\xfa\xff\xf3\xff\xfa\xff\x01\x00\xfe\xff\xfb\xff\x00\x00\xf7\xff\xf9\xff\xfa\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x05\x00\xfd\xff\xfd\xff\xf5\xff\xf4\xff\xf3\xff\xea\xff\xe9\xff\xe6\xff\xe4\xff\xdb\xff\xd6\xff\xdb\xff\xe5\xff\xe4\xff\xe3\xff\xd4\xff\xd4\xff\xe1\xff\xdd\xff\xe6\xff\xe8\xff\xf1\xff\xf5\xff\xf4\xff\xf6\xff\xf4\xff\xf4\xff\xe8\xff\xe5\xff\xeb\xff\xef\xff\xf1\xff\xf1\xff\xe4\xff\xdb\xff\xe0\xff\xe2\xff\xe9\xff\xec\xff\xeb\xff\xeb\xff\xe9\xff\xf0\xff\xf8\xff\xf0\xff\xee\xff\xf5\xff\xfb\xff\xf6\xff\xfc\xff\x00\x00\xfa\xff\x01\x00\x00\x00\x05\x00\x06\x00\x03\x00\xff\xff\xf6\xff\x03\x00\x06\x00\xfe\xff\x02\x00\x06\x00\xfc\xff\xfc\xff\x00\x00\xfe\xff\x00\x00\xfe\xff\xf9\xff\xfc\xff\x03\x00\x03\x00\x00\x00\xff\xff\x02\x00\x10\x00\x11\x00\x10\x00\x13\x00\x1c\x00"\x00\x1b\x00\x14\x00\x13\x00\x11\x00\x10\x00\x0b\x00\x00\x00\xfc\xff\xfd\xff\x00\x00\xfc\xff\xf5\xff\xf2\xff\xf7\xff\xf9\xff\xfe\xff\t\x00\x0c\x00\x12\x00\x10\x00\x06\x00\x02\x00\r\x00\r\x00\x08\x00\x03\x00\x04\x00\x05\x00\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xf7\xff\xff\xff\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x12\x00\x18\x00\x11\x00\x14\x00#\x00\x1f\x00\x1f\x00(\x00\x1f\x00\x1e\x00&\x00\x1f\x00\x17\x00\x11\x00\x15\x00\x16\x00\x14\x00\x16\x00\x13\x00\r\x00\x15\x00\x1b\x00\x0f\x00\x16\x00 \x00\x1c\x00\x16\x00\x13\x00\x0f\x00\x0f\x00\x0f\x00\x10\x00\x0e\x00\n\x00\n\x00\x02\x00\x05\x00\x04\x00\x04\x00\x00\x00\x00\x00\x04\x00\x05\x00\x04\x00\xfd\xff\xf8\xff\xfb\xff\xfa\xff\xfa\xff\xf8\xff\xf8\xff\xfb\xff\xfa\xff\xf5\xff\xf7\xff\xf7\xff\xfa\xff\xf6\xff\xf6\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\xfb\xff\xfb\xff\x04\x00\x04\x00\xfd\xff\xfa\xff\x05\x00\x01\x00\x07\x00\x13\x00\r\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\xfb\xff\x02\x00\t\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xf4\xff\xee\xff\xfb\xff\x01\x00\xfb\xff\xfb\xff\x00\x00\xff\xff\x01\x00\x06\x00\x07\x00\x00\x00\x02\x00\x07\x00\x00\x00\x01\x00\xfb\xff\xf3\xff\xee\xff\xee\xff\xf1\xff\xf4\xff\xee\xff\xf5\xff\xfb\xff\xef\xff\xea\xff\xe8\xff\xef\xff\xec\xff\xf2\xff\xf1\xff\xec\xff\xee\xff\xeb\xff\xe7\xff\xe4\xff\xed\xff\xef\xff\xec\xff\xec\xff\xea\xff\xef\xff\xeb\xff\xe7\xff\xed\xff\xf7\xff\xf9\xff\xff\xff\xfe\xff\xf3\xff\xef\xff\xee\xff\xf3\xff\xf0\xff\xee\xff\xee\xff\xea\xff\xe6\xff\xe6\xff\xe9\xff\xe7\xff\xf0\xff\xe9\xff\xe3\xff\xe7\xff\xe7\xff\xec\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf4\xff\x00\x00\xf8\xff\xf9\xff\x00\x00\xfb\xff\xfc\xff\xfd\xff\x00\x00\xf8\xff\xf9\xff\xfb\xff\xf4\xff\xef\xff\xf8\xff\xf6\xff\xf4\xff\xf8\xff\xf9\xff\xfb\xff\xfa\xff\xfc\xff\xf8\xff\xff\xff\x00\x00\xfd\xff\xf7\xff\xf6\xff\xf8\xff\xfc\xff\x06\x00\x04\x00\x02\x00\x08\x00\x05\x00\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xf8\xff\xef\xff\xf2\xff\xf2\xff\xf1\xff\xf5\xff\xec\xff\xec\xff\xf0\xff\xef\xff\xec\xff\xec\xff\xf1\xff\xf6\xff\xf4\xff\xf5\xff\xfa\xff\xfe\xff\x05\x00\x04\x00\x04\x00\x05\x00\x03\x00\x03\x00\x04\x00\t\x00\x0c\x00\t\x00\t\x00\t\x00\x08\x00\x08\x00\t\x00\r\x00\r\x00\x0c\x00\n\x00\t\x00\x08\x00\x08\x00\x0c\x00\x0c\x00\x0b\x00\n\x00\x08\x00\n\x00\x0c\x00\x03\x00\x04\x00\x00\x00\x05\x00\x0e\x00\r\x00\x0c\x00\r\x00\x15\x00\x0e\x00\t\x00\x0e\x00\x11\x00\x0b\x00\x0b\x00\t\x00\x0e\x00\x15\x00\x14\x00\x15\x00\x0e\x00\x0f\x00\x14\x00\x16\x00\x12\x00\x16\x00\x19\x00\x12\x00\x11\x00\x0b\x00\x01\x00\x01\x00\n\x00\x0b\x00\x0f\x00\x14\x00\x12\x00\x10\x00\x17\x00\x14\x00\x16\x00\x15\x00\n\x00\x0b\x00\x0f\x00\r\x00\x0f\x00\n\x00\r\x00\x10\x00\r\x00\r\x00\x10\x00\r\x00\x04\x00\x00\x00\x07\x00\n\x00\n\x00\n\x00\x08\x00\x05\x00\x02\x00\x06\x00\x05\x00\x07\x00\x04\x00\x02\x00\x05\x00\x02\x00\xff\xff\xfa\xff\xfa\xff\xf9\xff\xf4\xff\xf3\xff\xf6\xff\xf4\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf8\xff\xee\xff\xea\xff\xee\xff\xf2\xff\xf6\xff\xfa\xff\xfc\xff\x00\x00\x01\x00\x05\x00\x06\x00\x04\x00\x03\x00\x00\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf4\xff\xf4\xff\xf7\xff\xf6\xff\xf3\xff\xf3\xff\xf2\xff\xf3\xff\xf5\xff\xf4\xff\xf4\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\xfe\xff\xfa\xff\xfc\xff\xfc\xff\xf7\xff\xf8\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xf6\xff\xf5\xff\xf5\xff\xfa\xff\xfc\xff\xfa\xff\xfc\xff\xf9\xff\xfa\xff\xf4\xff\xf6\xff\xf4\xff\xee\xff\xee\xff\xf0\xff\xee\xff\xed\xff\xea\xff\xea\xff\xea\xff\xe5\xff\xeb\xff\xe8\xff\xed\xff\xf2\xff\xed\xff\xf3\xff\xf1\xff\xf5\xff\xf4\xff\xf7\xff\xfa\xff\xf2\xff\xf6\xff\xf0\xff\xea\xff\xef\xff\xf2\xff\xef\xff\xec\xff\xe6\xff\xe3\xff\xe3\xff\xe1\xff\xe5\xff\xec\xff\xe6\xff\xe6\xff\xea\xff\xee\xff\xf4\xff\xf3\xff\xf5\xff\xfb\xff\xfd\xff\xf9\xff\xfd\xff\xfa\xff\xf5\xff\xfa\xff\xf9\xff\x00\x00\xfc\xff\xff\xff\xff\xff\xf3\xff\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\x00\x00\xff\xff\xfc\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x04\x00\xff\xff\xfc\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x03\x00\x06\x00\x07\x00\x04\x00\x03\x00\x07\x00\x03\x00\x00\x00\xfe\xff\xf9\xff\xfb\xff\xf9\xff\xfa\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xf4\xff\xf9\xff\xfe\xff\x00\x00\x07\x00\x05\x00\x04\x00\x02\x00\x04\x00\x08\x00\x03\x00\x01\x00\x01\x00\x04\x00\x01\x00\x02\x00\x05\x00\x02\x00\x00\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\n\x00\x02\x00\x04\x00\x05\x00\x07\x00\x04\x00\x07\x00\x02\x00\x00\x00\x04\x00\x0c\x00\t\x00\x08\x00\x0c\x00\x0b\x00\r\x00\r\x00\x0c\x00\x0c\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x18\x00\x16\x00\x18\x00\x15\x00\x13\x00\x14\x00\x13\x00\x12\x00\x0f\x00\r\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfc\xff\x01\x00\x03\x00\x07\x00\x02\x00\x02\x00\x07\x00\x07\x00\t\x00\x07\x00\t\x00\t\x00\x06\x00\x08\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xff\xff\x02\x00\x04\x00\x07\x00\x06\x00\x02\x00\x02\x00\x04\x00\x01\x00\x01\x00\x05\x00\x03\x00\xff\xff\xfa\xff\xff\xff\xff\xff\xfa\xff\xf9\xff\x00\x00\xff\xff\xfd\xff\x01\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\x02\x00\x02\x00\x06\x00\x08\x00\x02\x00\x01\x00\x04\x00\x07\x00\x03\x00\x00\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xfe\xff\xf9\xff\xfa\xff\xfb\xff\xfd\xff\xf8\xff\xf5\xff\xf3\xff\xed\xff\xf2\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xf5\xff\xfe\xff\xfd\xff\xff\xff\x04\x00\x00\x00\xfa\xff\xfa\xff\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xf8\xff\xfa\xff\xfa\xff\xf6\xff\xfa\xff\xfc\xff\xfe\xff\x03\x00\x00\x00\xf7\xff\xf4\xff\xf4\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf4\xff\xf1\xff\xf3\xff\xf4\xff\xf2\xff\xf0\xff\xef\xff\xef\xff\xeb\xff\xee\xff\xee\xff\xf0\xff\xf2\xff\xf3\xff\xf5\xff\xf2\xff\xf2\xff\xf7\xff\xf5\xff\xf1\xff\xf6\xff\xf9\xff\xfa\xff\xf3\xff\xf1\xff\xef\xff\xed\xff\xe8\xff\xe9\xff\xea\xff\xf1\xff\xf1\xff\xf0\xff\xf7\xff\xf6\xff\xfc\xff\xf7\xff\xf8\xff\xf7\xff\xfd\xff\x00\x00\xf6\xff\xf6\xff\xfb\xff\xfb\xff\xfe\xff\x04\x00\xfe\xff\xf9\xff\x01\x00\xff\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xff\xff\xfb\xff\xfc\xff\xfe\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x03\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\xfc\xff\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfd\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xfa\xff\xfa\xff\xf6\xff\xf8\xff\xfc\xff\xfa\xff\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\x00\x00\x01\x00\x06\x00\x00\x00\x03\x00\t\x00\t\x00\t\x00\x06\x00\x04\x00\x02\x00\x0c\x00\t\x00\t\x00\x0f\x00\x12\x00\x13\x00\x17\x00\x19\x00\x1d\x00\x1d\x00\x16\x00\x16\x00\x18\x00\x19\x00\x15\x00\x16\x00\x16\x00\x14\x00\x13\x00\x17\x00\x16\x00\x10\x00\r\x00\n\x00\x0b\x00\t\x00\x04\x00\x03\x00\x04\x00\x03\x00\x03\x00\x05\x00\x02\x00\x03\x00\x04\x00\x02\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xff\xff\xff\xff\xfb\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x05\x00\x08\x00\x05\x00\x04\x00\x06\x00\x05\x00\x01\x00\x00\x00\x02\x00\xfc\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfa\xff\xf8\xff\xf4\xff\xfa\xff\xfe\xff\xfa\xff\xfe\xff\xfe\xff\x01\x00\x01\x00\xfc\xff\xfd\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\x00\x00\xfd\xff\xfc\xff\x00\x00\x00\x00\xfb\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf9\xff\xf8\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xf6\xff\xf8\xff\xf7\xff\xf7\xff\xf7\xff\xfa\xff\xfb\xff\xfa\xff\xfc\xff\x01\x00\xfc\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xf7\xff\xfa\xff\xfa\xff\xf6\xff\xf7\xff\xf9\xff\xf6\xff\xf4\xff\xf3\xff\xf0\xff\xf2\xff\xf0\xff\xed\xff\xf2\xff\xf2\xff\xf2\xff\xf6\xff\xf6\xff\xf7\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf7\xff\xf4\xff\xf3\xff\xef\xff\xf2\xff\xf0\xff\xf1\xff\xef\xff\xf2\xff\xf4\xff\xeb\xff\xef\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf3\xff\xf5\xff\xf5\xff\xf6\xff\xf5\xff\xf4\xff\xf5\xff\xf7\xff\xf3\xff\xf3\xff\xf4\xff\xf8\xff\xf8\xff\xf6\xff\xfa\xff\xfd\xff\xfa\xff\xf8\xff\xfb\xff\xfc\xff\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xf8\xff\xf9\xff\xff\xff\xfe\xff\xfd\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x01\x00\x01\x00\x05\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfa\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfb\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfa\xff\xf9\xff\xfb\xff\x00\x00\x00\x00\x02\x00\x01\x00\x04\x00\x06\x00\x04\x00\x05\x00\x05\x00\x06\x00\x06\x00\x07\x00\t\x00\t\x00\t\x00\x03\x00\x01\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\x03\x00\x06\x00\x02\x00\x05\x00\x08\x00\x05\x00\x07\x00\t\x00\x08\x00\x05\x00\x06\x00\x0c\x00\x0c\x00\x08\x00\t\x00\x0b\x00\x0b\x00\t\x00\n\x00\x08\x00\x07\x00\n\x00\x04\x00\x06\x00\x08\x00\x08\x00\x08\x00\t\x00\x08\x00\x07\x00\x08\x00\x08\x00\x06\x00\x06\x00\t\x00\n\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\x0e\x00\r\x00\x0c\x00\n\x00\x08\x00\x05\x00\x06\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf7\xff\xfd\xff\xfd\xff\xf8\xff\xfc\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfb\xff\xff\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xf7\xff\xf9\xff\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\xfb\xff\xfd\xff\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xf9\xff\xf5\xff\xf6\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf2\xff\xf7\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf9\xff\xf7\xff\xf0\xff\xf1\xff\xef\xff\xef\xff\xea\xff\xe8\xff\xea\xff\xed\xff\xee\xff\xf0\xff\xf7\xff\xf6\xff\xf8\xff\xf3\xff\xf1\xff\xf2\xff\xf4\xff\xf7\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xf7\xff\xf4\xff\xf3\xff\xf6\xff\xf5\xff\xf1\xff\xf5\xff\xfa\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xfa\xff\xf7\xff\xf7\xff\xfd\xff\xfb\xff\xfb\xff\xfa\xff\xfb\xff\xff\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xfb\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfd\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf9\xff\xf9\xff\xf6\xff\xfa\xff\xf9\xff\xf8\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xfb\xff\xfd\xff\xfb\xff\xfd\xff\xfb\xff\xf9\xff\xf9\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x03\x00\x06\x00\x06\x00\x07\x00\x08\x00\x08\x00\n\x00\x0b\x00\x08\x00\t\x00\x08\x00\n\x00\x08\x00\x08\x00\x05\x00\x08\x00\t\x00\n\x00\x0b\x00\t\x00\x05\x00\x03\x00\x04\x00\x06\x00\x04\x00\x01\x00\x02\x00\x03\x00\x03\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xfc\xff\xfc\xff\x03\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\xfd\xff\x02\x00\x03\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x03\x00\x00\x00\x00\x00\x04\x00\x04\x00\x04\x00\x06\x00\n\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x03\x00\x02\x00\x03\x00\x06\x00\x08\x00\x04\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\x08\x00\x07\x00\x07\x00\x07\x00\x08\x00\t\x00\t\x00\x07\x00\x05\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\xfc\xff\xf9\xff\xf7\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xff\xff\xfb\xff\xfd\xff\xf8\xff\xf4\xff\xf6\xff\xf7\xff\xf5\xff\xf6\xff\xf4\xff\xf4\xff\xf4\xff\xf2\xff\xf4\xff\xf7\xff\xf6\xff\xf4\xff\xfa\xff\xfb\xff\xfa\xff\xf4\xff\xf7\xff\xf8\xff\xfa\xff\xf9\xff\xf6\xff\xfa\xff\xf8\xff\xf9\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf8\xff\xf5\xff\xf8\xff\xf9\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xf8\xff\xf3\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xf0\xff\xf2\xff\xf3\xff\xf2\xff\xf2\xff\xf7\xff\xf7\xff\xfa\xff\xf8\xff\xf7\xff\xf7\xff\xf5\xff\xf7\xff\xf8\xff\xf8\xff\xf9\xff\xfa\xff\xf9\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfb\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfb\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xf8\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xf6\xff\xf6\xff\xf7\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfb\xff\xf9\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfa\xff\xf7\xff\xf7\xff\xfc\xff\xfa\xff\xfc\xff\xfd\xff\xfd\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xfa\xff\xf7\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xfa\xff\xf7\xff\xf7\xff\xf6\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfd\xff\x03\x00\x00\x00\x04\x00\t\x00\x05\x00\x07\x00\t\x00\t\x00\x08\x00\t\x00\n\x00\n\x00\x05\x00\x02\x00\x02\x00\x02\x00\x00\x00\x01\x00\x01\x00\x02\x00\x07\x00\x05\x00\x06\x00\x04\x00\x01\x00\x00\x00\x02\x00\x02\x00\x00\x00\x05\x00\t\x00\x0b\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\t\x00\n\x00\t\x00\n\x00\r\x00\x0e\x00\r\x00\x08\x00\x05\x00\x02\x00\x04\x00\x02\x00\x01\x00\x03\x00\x00\x00\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x00\x00\x00\x00\x06\x00\t\x00\x04\x00\x01\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\x01\x00\x02\x00\x00\x00\x03\x00\x02\x00\x04\x00\x05\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x04\x00\x06\x00\x06\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfc\xff\xf8\xff\xf5\xff\xf6\xff\xf6\xff\xf5\xff\xf6\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf1\xff\xf4\xff\xf4\xff\xf2\xff\xee\xff\xee\xff\xef\xff\xee\xff\xf0\xff\xf2\xff\xf2\xff\xf5\xff\xef\xff\xf0\xff\xee\xff\xf1\xff\xf3\xff\xf1\xff\xf0\xff\xf3\xff\xf3\xff\xf3\xff\xf0\xff\xf4\xff\xf8\xff\xee\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +# --- +# name: test_pipeline_error + b'\'\xff\x9d\xfe\xc7\xfe\x92\xfe\x88\xfe\xe2\xfe\x02\x00\x9a\x00!\x00H\xff$\xff|\xff\x94\xff1\xff\xd6\xfe\xdf\xfe8\xffj\xff*\xff\xba\xfe\x99\xfe\xf1\xfe\\\xff\x87\xff\x84\xffs\xff?\xff\xf5\xfe\xce\xfe\xd7\xfe\x0e\xff\x8e\xff\xed\xff\xea\xff\xd2\xff\xcf\xff\xa4\xffP\xff\x1b\xff=\xff\x8e\xff\xbe\xff\xd1\xff\xe9\xff\x01\x00\xdf\xffe\xff\xc9\xfe\x88\xfe\xd6\xfe[\xff\x9e\xff\x9d\xff\x9c\xff\xbe\xff\xde\xff\xc5\xff\x95\xff\x98\xff\xc7\xff\xf0\xff\n\x00\x15\x00\xf3\xff\xba\xff\x9a\xff\xae\xff\xe5\xff\r\x00\x15\x00!\x00A\x00Z\x00[\x00A\x00\r\x00\xee\xff\r\x00V\x00\x8a\x00\x89\x00p\x00l\x00\x98\x00\xe2\x00\x13\x01\xff\x00\xc6\x00\xa9\x00\xae\x00\x9e\x00x\x00_\x00x\x00\xc9\x00\x10\x01%\x01)\x01\x1c\x01\xea\x00\xa1\x00j\x00\x85\x00\xf7\x00i\x01q\x01\x1e\x01\xe0\x00\xea\x00\n\x01\n\x01\xe0\x00\xb3\x00\xb3\x00\xeb\x00.\x01K\x01=\x01\xff\x00\xae\x00\x81\x00\x97\x00\xd6\x00\x10\x016\x01K\x010\x01\xe6\x00\x9f\x00^\x00\'\x00*\x00|\x00\xdf\x00\xfa\x00\xcc\x00\x94\x00X\x00\xfa\xff\xc0\xff\xfb\xff\x89\x00\xed\x00\xe3\x00\xa5\x00\x81\x00\x88\x00\x95\x00\x89\x00q\x00c\x00S\x00B\x005\x00\'\x000\x00H\x00H\x00<\x007\x00#\x00\xe8\xff\xa3\xff\xba\xff?\x00\x9d\x00l\x00\xf8\xff\xb9\xff\xbf\xff\xd3\xff\xdd\xff\xe6\xff\xf3\xff\x02\x00"\x008\x00+\x00\n\x00\xf8\xff\x04\x00\r\x00\xf4\xff\xc1\xff\xa9\xff\xd8\xffI\x00\xba\x00\xd3\x00u\x00\xf1\xff\x97\xffh\xffY\xff}\xff\xcf\xff8\x00}\x00r\x008\x00\t\x00\xfb\xff\x02\x00\x12\x003\x00l\x00\x8b\x00^\x00!\x004\x00b\x00.\x00\x1a\x00\xa2\x00\xfa\x00\x93\x00\xed\xff\xa7\xff\xd8\xff(\x00<\x00\x04\x00\xd4\xff\xf7\xffR\x00\x88\x00W\x00\xef\xff\x94\xffm\xffW\xff\xde\xfe_\xff\xb3\x01\x86\x02v\x00\x87\xfe\xae\xfe\xb6\xff\xe5\xffg\xff\x1d\xffF\xff\xa4\xff\xe3\xff\xdf\xff\xdb\xff\xed\xff\xf0\xff\xc1\xffl\xffm\xff\xce\xff\xf8\xff\xc1\xff\x8d\xff\xa7\xff\x05\x00\x83\x00\xde\x00\xed\x00\xad\x00\x0c\x00@\xff\xcb\xfe\x0c\xff\xec\xff\xbb\x00\x03\x01\x04\x01\xd6\x00c\x00\xe0\xffz\xff>\xffh\xff\xf7\xffw\x00\xa0\x00{\x00\x0f\x00\\\xff\xb3\xfe\xb3\xfe\xb6\xff\xe7\x00\x0e\x01>\x00\x92\xff\xbc\xffY\x00\xa1\x00N\x00\xcb\xff|\xffn\xff\x81\xff\xb3\xff,\x00\xb9\x00\xc6\x00R\x00\x01\x00\x1e\x00_\x00`\x00 \x00\xd8\xff\xc5\xff\xf4\xff6\x00`\x00v\x00\x8d\x00\xb4\x00\xe4\x00\xf4\x00\xad\x00,\x00\xbc\xff\x96\xff\xde\xff~\x00.\x01m\x01\xea\x00/\x00\xd8\xff\xb5\xff\xa3\xff\xcb\xff\xfc\xff\xee\xff\xa6\xff\x8d\xff\x00\x00\xd2\x00c\x01$\x01\x11\x00\x0c\xff\xe2\xfe;\xfft\xff\x9f\xff$\x00\xd5\x00\x1e\x01\xce\x00E\x00\xda\xffs\xff\xea\xfep\xfe\x80\xfeH\xffW\x00\xf6\x00\x03\x01\xd1\x00S\x00}\xff\xcb\xfe\x8b\xfe\x96\xfe\xcb\xfeB\xff\xee\xff\x86\x00\xd5\x00\xdf\x00y\x00\x94\xff\x9a\xfe\x14\xfe>\xfe\xf1\xfe\xaa\xff\xe9\xff\xe7\xff\x11\x00;\x00\x13\x00\xaa\xffF\xff\x1b\xff%\xffU\xff\xc7\xff\x82\x00\x1a\x01)\x01\xd3\x00\x80\x00C\x00\xde\xffY\xff)\xff\x9c\xffl\x00\x19\x01\\\x017\x01\xc7\x003\x00\xb3\xffq\xffp\xff\xb0\xff,\x00\x9f\x00\xbb\x00\x9b\x00\x91\x00\x8e\x00P\x00\xdb\xffr\xffF\xff]\xff\x9d\xff\xf2\xff/\x00#\x00\xe1\xff\xa8\xff\x8f\xff\x87\xff\x85\xff|\xfff\xffH\xffJ\xff\x85\xff\xd7\xff\x0c\x00\xfe\xff\x98\xff\xe5\xfe6\xfe\x14\xfe\xc7\xfe\xe7\xff\x9a\x00g\x00\xb6\xff=\xff&\xff\x18\xff\xb6\xfe\x11\xfe\xaa\xfd#\xfen\xff\xb7\x002\x01\xc5\x00\xe8\xff(\xff\xd7\xfe\xf4\xfe=\xffl\xfft\xff\x8c\xff\xda\xff\x14\x00\xdc\xffl\xffY\xff\xd0\xffo\x00\xb7\x00m\x00\xb9\xff\x02\xff\x97\xfe\x9b\xfe\x10\xff\xd2\xff\x89\x00\xcb\x00\x8b\x005\x00:\x00\xa8\x00\r\x01\xeb\x00p\x00\x10\x00\xf3\xff\'\x00\x91\x00\xf8\x00V\x01\xa9\x01\xbf\x01j\x01\xd0\x00)\x00\x8c\xff/\xffw\xffg\x00z\x01+\x027\x02\xbf\x01\x19\x01\x85\x00\x05\x00\x80\xff-\xffp\xffD\x00(\x01\xb9\x01\x15\x02[\x026\x02+\x01V\xff\xa0\xfd\n\xfd\xdc\xfd\x92\xff~\x01\xf4\x02G\x03A\x02\x8c\x00U\xff\xed\xfeP\xfe\xeb\xfc\x13\xfc\xa4\xfd/\x01\xec\x03\xc1\x03v\x01\x84\xff\x15\xffZ\xffZ\xffI\xff\xc5\xff\xad\x00?\x01A\x01b\x01\n\x02d\x02|\x01\xe1\xff\x1c\xff\xe5\xff\x89\x01\xb5\x02\xdb\x02V\x02~\x01\\\x00!\xff\x0c\xfeD\xfd\x03\xfd\x84\xfd\xbb\xfeI\x00p\x01\x8d\x01\xac\x00b\xffV\xfe\xbd\xfdZ\xfd\x0c\xfd\x1c\xfd\xe6\xfdO\xff\xda\x00\x07\x02\x93\x02k\x02\xc2\x01\xe2\x00\n\x00w\xffM\xff\xaa\xff\x93\x00\x94\x01*\x02O\x02\'\x02h\x01\xdb\xff\xfa\xfd\xaf\xfc\x8c\xfct\xfd\xd5\xfe@\x00|\x013\x02\xee\x01t\x00L\xfe\x7f\xfc\xd7\xfbR\xfc~\xfd\x03\xff\xae\x00\x0c\x02\x8a\x02\x14\x023\x01H\x00$\xff\xd2\xfd%\xfd\xb6\xfd\xa4\xfe\x95\xfe\xdf\xfdu\xfe\x18\x01i\x03o\x02\x92\xfe$\xfb\xb5\xfa\x03\xfd"\x00\xc4\x02\x8b\x04 \x05\x15\x04\xd2\x01\x86\xff\x13\xfe}\xfdl\xfd\x1b\xfe\t\x00\xa5\x02F\x04\xd2\x03\xde\x01\xdc\xff\xc0\xfey\xfe}\xfe\x9b\xfe\xa9\xfe`\xfe\xc3\xfd2\xfd\xd6\xfc\x97\xfc\xb6\xfc~\xfd\xa9\xfe\xb1\xffS\x00\xad\x00\xd8\x00\x9b\x00\x04\x00v\xff\xe1\xfe\xe9\xfd\xca\xfci\xfc\x8e\xfd\xd4\xff\xba\x018\x02\xb8\x01C\x01_\x01\xd6\x01\xe9\x01\x19\x01\xc3\xff\x8b\xfe\xc6\xfd\xba\xfd\xab\xfe=\x00\x82\x01\xd2\x01a\x01\x02\x01\x0b\x01\xfc\x00]\x00f\xff\xf8\xfez\xff1\x00d\x00F\x00s\x00\x19\x01\xd8\x01%\x02\xe0\x01\x8f\x01\xac\x01\x02\x02N\x02\xd2\x02\xaa\x03T\x04f\x04\xc4\x03\r\x02\xf2\xfe\xc4\xfb*\xfb\xf8\xfd+\x01\x1e\x01\xcb\xfdr\xfa\xdd\xf9\x17\xfcI\xff\xcf\x01\x07\x03\xbd\x02\x06\x01\xc0\xfe \xfd-\xfc\x17\xfb]\xfa\\\xfc2\x02\xce\x08\x1f\x0bj\x073\x01h\xfd\xa7\xfd\x1d\x00\x7f\x02\x95\x03\xdb\x02M\x00\xaa\xfcI\xf9W\xf7\x06\xf7\xd8\xf7~\xf9\xe8\xfb\xcc\xfe`\x01\x87\x02\x88\x01\x05\xff\xb5\xfc\xcc\xfb\xf5\xfbk\xfc<\xfd\xa3\xfe7\x00\x8d\x01\xee\x02t\x04~\x05`\x05w\x04\xe8\x03\x02\x04\xb7\x03\x00\x02G\xffn\xfd\x13\xfe\xb0\x00\xf1\x02J\x03c\x02\xbe\x01\xf2\x01\xbf\x02\xdc\x03\x0f\x05F\x06;\x07%\x07O\x05 \x02\xfc\xfeJ\xfd\xb7\xfd\xa3\xff\x87\x01\x1b\x02m\x01p\x00\xf6\xff_\x000\x01*\x01\xb6\xff\xae\xfd$\xfc\n\xfb\xbe\xf9\x08\xf9T\xfbO\x01\x97\x07\x13\t\x06\x04I\xfc\xce\xf7,\xf9U\xfe\x03\x04O\x08\x02\n6\x08\x83\x03\xe6\xfd!\xf9\r\xf66\xf5=\xf7\x8a\xfb\x07\x00"\x03\xdd\x04\xbe\x05\x82\x05\x98\x03h\x00*\xfd\x94\xfa\xc1\xf8\xf3\xf7y\xf88\xfa\xbc\xfc\x80\xff\xc2\x01\xd2\x02\x98\x02\xb2\x01\x1d\x01\xe0\x00\x93\xffX\xfc\xa3\xf8\x1c\xf7@\xf9"\xfeM\x03\xc1\x06!\x08\x01\x08\xef\x06\xce\x05\xb5\x05\xfa\x06\xfb\x08m\n\xf7\t\x1b\x07\x87\x02\x1d\xfe\xe3\xfbZ\xfc7\xfe\xfb\xff\x1d\x01\xa3\x01}\x01\x88\x00\xfc\xfe\x89\xfd\xe3\xfc\'\xfd\xbb\xfd\x16\xfe\x1d\xfe\x9a\xfd\x9d\xfc\xd2\xfc0\x00h\x05\x10\x08\xa8\x05\xca\x00\xfd\xfd\xf1\xfe\xd4\x01F\x04\x9f\x05\xcf\x05\x9d\x03\xe1\xfd=\xf6C\xf1w\xf2\x1a\xf8\x93\xfcO\xfcE\xf9A\xf8I\xfb\xd1\xff)\x02\x8d\x01\x01\x00\xf5\xfe\xc2\xfd\x83\xfb\xfc\xf8\xec\xf7\\\xf9\x03\xfd\xa2\x01-\x05\x12\x06\xea\x04\xdd\x03,\x04\xd8\x04\xec\x03\x91\x00\xea\xfbJ\xf8s\xf7/\xf9\xf3\xfb\xa9\xfe\xea\x00T\x02\xb1\x02\xa9\x029\x03b\x04h\x05\x9d\x054\x04\xce\x00\xb1\xfc\x1a\xfa\\\xfa\x08\xfd\x98\x00m\x03\xdd\x049\x05\xa2\x04!\x03\\\x01N\x00w\x00z\x01r\x02\x95\x02I\x01o\xfeZ\xfb\xb4\xfa4\xfe\xae\x03s\x06\xaa\x04j\x01J\x004\x01X\x02\xaf\x03\x8c\x05<\x06\xc5\x03\xce\xfe\xea\xf9\xfb\xf6B\xf6r\xf7\r\xfaC\xfd]\x00\xb0\x02f\x03G\x02C\x00\x9f\xfe\xf5\xfd\xa8\xfdN\xfc\x1d\xf9\xf9\xf5\xb2\xf6L\xfc\xb1\x02\xa6\x04f\x01\x13\xfd\x1d\xfc]\xff\x06\x04~\x06}\x05d\x02\x1c\xff\x87\xfc\xa5\xfa\xc0\xf9\xe1\xfa\x9d\xfe\xce\x03\x05\x08\xa6\t6\tH\x08\xc1\x07\xa6\x07\xbf\x07v\x07\xad\x05\xdd\x01F\xfd\xfe\xf9E\xf9\xef\xfa\xab\xfd\x8f\xff\xa9\xff\xdf\xfe\xa5\xfe\n\xff!\xff~\xfej\xfdZ\xfc\x84\xfb\xa8\xfav\xf9\x02\xf9\xb5\xfb\x06\x02M\x08e\nM\x08P\x05\xea\x03H\x04\xe1\x05!\x08g\t\xc9\x07<\x03\xb1\xfdT\xf9\x1e\xf7\xf5\xf6y\xf8\xfd\xfae\xfd\x17\xffD\x00\xc5\x00\xfe\xffl\xfe\xb6\xfdG\xfex\xfe \xfd-\xfb6\xfa\xe3\xfa\xda\xfc\x8c\xff\\\x02\x97\x04\xc6\x05\xd8\x05G\x05x\x04\x0c\x03\xb8\x00\x00\xfe\x85\xfb~\xf9C\xf8`\xf9X\xfe\x1b\x05Z\x08\xa2\x05\xa4\x00|\xfe?\x00\xd6\x038\x07Y\t\xa4\t\xdd\x07_\x04\x1e\x00R\xfc%\xfan\xfa\x17\xfd\xc5\x00\x80\x03.\x04U\x03&\x029\x01\xaa\x00\x99\x00\x0f\x01\xf7\x00u\xfe\x90\xf9\xbd\xf5\x1a\xf7=\xfd\x9e\x02\xf9\x02\x1c\x00\xee\xfe \x01"\x04\x9b\x05G\x052\x03c\xff\x02\xfb\x97\xf7\xb4\xf5^\xf5\xe5\xf6c\xfa\xd7\xfe\xbf\x02\xfb\x04$\x05\xa5\x03\xbd\x01\xc3\x00\xdd\x00\xac\x00\xeb\xfe\x1f\xfc\x0b\xfa\xc1\xf9\xd8\xfa\xcb\xfc\xa0\xff\xac\x02b\x04\xf7\x03f\x02=\x01\xd9\x00\x87\x00\x8e\xff\xcf\xfd.\xfc\xd9\xfb\xfc\xfc\xf4\xfeU\x01\x7f\x03^\x04M\x044\x058\x07\xe2\x07 \x06\xef\x03d\x03=\x04=\x04\x84\x01\xcb\xfca\xf9\x9e\xfa\xba\xffI\x036\x01\xa9\xfbI\xf8\xd3\xf9$\xfe\xc4\x01V\x03\xbb\x03\xc0\x03\x7f\x02\xaf\xfe{\xf9\x0e\xf7\x12\xfa\xdc\xff\xa4\x03%\x04\xa3\x03\xd8\x03\x8c\x04\x85\x05\x1c\x07\x91\x08\xe7\x07\xe7\x03w\xfd\xbf\xf6\xe7\xf1\x89\xf0A\xf3\xb0\xf8\xac\xfd\t\x00n\x00\x81\x00Q\x00\xf6\xfe\xb2\xfc\xa2\xfa7\xf9P\xf8\xf2\xf7\x17\xf8\xbb\xf8N\xfa\x88\xfd[\x02\x06\x07\x1d\t\xdf\x071\x05\x9f\x03\xb0\x03\xc3\x03O\x02}\xff\xe3\xfc\xe4\xfb\xc4\xfc\xff\xfe\xb6\x01\xc6\x03\xb0\x04D\x05:\x06\xb1\x06\xc5\x05^\x04\xc3\x03\xaf\x03\xc9\x02\xc4\x00\x03\xff\x99\xfe\x07\xffm\xff\x98\xffw\xffI\xff\xef\xff}\x01<\x02\xaf\x00 \xfe\x8f\xfd\x02\x00i\x02^\x00\xe8\xf9L\xf5\xde\xf8G\x02\xde\x07\x19\x04\xfe\xfb\x82\xf8\x84\xfc\xd4\x03\x8f\t\xfc\x0b/\x0b\x9f\x077\x02%\xfc~\xf6\xbc\xf2{\xf2\x13\xf6\xf0\xfb\x8f\x01\x07\x05\x88\x05\xa3\x03\xe2\x00\xee\xfe\x9f\xfe>\xff1\xff\x92\xfd\xb2\xfa\x9a\xf7\xd9\xf5\x19\xf7d\xfb%\x00>\x02\x83\x01\x01\x01\xe5\x02k\x05\x05\x05\x8d\x00\x99\xfa\x11\xf7\xb6\xf7\xe4\xfa\x1d\xfe\xd6\x00f\x03\x8f\x05g\x07Z\tz\n*\tw\x06\x98\x05#\x07\n\x08\xfa\x05\x0e\x02\xe0\xfe\xa3\xfd\xcb\xfd`\xfe\x18\xff\xf6\xff\xd4\x00E\x01\xe0\x00\xd5\xff\xd8\xfe\x96\xfeB\xffC\x00V\x00\xd9\xfe\n\xfd<\xfd\xcf\xffK\x02\x9b\x02u\x01f\x01\xab\x031\x06(\x06\xd2\x03\xfa\x01\xf2\x01\xe6\x01S\xff\xdf\xfa\x06\xf8\n\xf9R\xfc)\xfe\x01\xfd\x16\xfb\x99\xfb\xd8\xfe3\x02k\x03\xc3\x02\x9f\x01c\x00\xa4\xfe\x85\xfc\xb1\xfa\xbf\xf9$\xfaJ\xfc\xf7\xff|\x03\xeb\x04\x1d\x04\x9b\x02\xcb\x01\x9c\x01 \x01"\x008\xff\x85\xfev\xfd\x08\xfcc\xfb\x9e\xfc,\xffP\x01\x04\x02\xeb\x01.\x026\x03w\x04\xd7\x04W\x03\'\x00\x13\xfd\xe4\xfb\x80\xfc8\xfd>\xfd\xad\xfd\x8a\xff\xfb\x010\x03S\x02:\x00y\xfeB\xfe\x9a\xff&\x01T\x01\xb8\xffG\xfd\x85\xfb\\\xfb\x8a\xfc\x03\xfeZ\xff\x01\x01\xe7\x02\xf1\x03s\x03?\x02g\x01\xec\x00;\x00\xfa\xfeq\xfd\x93\xfc\xf6\xfc\x1d\xfe\xce\xfe}\xfe\x19\xfe\xe7\xfe\xc1\x00\xe0\x01\xaa\x00\xf6\xfd\xa2\xfc\x0b\xfe0\x00V\x00t\xfe\xb0\xfc]\xfc\xfe\xfc\xfc\xfdb\xff\x0b\x01a\x02\x1c\x03\x96\x03 \x04\x86\x04A\x04\x0e\x03G\x01\x8d\xffG\xfe\xdc\xfd\x8e\xfe\xd9\xff\xfe\x00\xdb\x01\xbc\x02y\x03|\x03\xb7\x02\xab\x01\xb6\x000\x00w\x00\x10\x01\xb0\x00\xb7\xfeI\xfcq\xfb\xf6\xfc`\xff\xb6\x00\xce\x00\xf1\x00\xb4\x01B\x02\xd6\x01\xaf\x00}\xff\xa3\xfe\x1b\xfe\xc2\xfds\xfd#\xfd`\xfd\x0e\xff\xe2\x01\xf0\x03\x84\x03@\x01q\xff\x85\xff\x02\x01g\x02n\x02\xd9\x00\x83\xfe\x9d\xfc\xe1\xfbC\xfc:\xfdT\xfe\xca\xff\xb1\x01\x0f\x03\xac\x02\xb5\x00\xc8\xfeW\xfe=\xff\x1b\x00h\x00\xb7\x00\xa8\x00\x86\xff\xed\xfdR\xfd\x98\xfe\x1f\x018\x03\xa4\x03\xb7\x02\xbe\x01\xbd\x01\x97\x02\r\x03\xdc\x01>\xff\x11\xfd\x0f\xfd\x95\xfes\x00\x08\x03\xbd\x04\t\x04\xd1\x02\xb0\x02(\x03-\x03\x92\x02\xa1\x01\x8b\x00}\xff\xa4\xfe0\xfet\xfeC\xff\x00\x00Z\x00j\x00\x9e\x00\xe4\x00\x96\x00\xa6\xff\xd6\xfe\xd7\xfe\x9b\xffu\x00\x97\x00\xbd\xff\x88\xfe\xbf\xfd\xa3\xfd\x0f\xfe\xc5\xfe\xd6\xff2\x01d\x026\x03a\x03e\x02\x96\x00\xd8\xfe\x9c\xfd\x14\xfd?\xfd\xd1\xfd\x9f\xfe\x8a\xffA\x00:\x006\xff\xeb\xfd\xb5\xfd\x02\xff\xa7\x00j\x01+\x01\x82\x00\xc4\xff\x9f\xfe\xec\xfc\x86\xfb\x8b\xfbO\xfd\x01\x000\x02/\x03Z\x03\xe1\x02\xa7\x014\x00o\xff\xa2\xff\x1e\x00\x00\x00`\xff$\xff\xa0\xff1\x00~\x00\r\x01\x17\x02\xfd\x02\t\x03d\x02\x99\x01\xba\x00\xa8\xff\x9d\xfe\x15\xfeb\xfeZ\xffP\x00\x9d\x00\x82\x00\xc9\x00>\x01?\x01\xfe\x00\xf2\x00\x18\x011\x01\x16\x01\xc5\x00p\x00X\x00g\x00\x0b\x00\x10\xff\x0b\xfe\xbf\xfd\x8d\xfe:\x00\xf3\x01\x94\x02\xb1\x01\x1f\x00.\xffU\xff\x9c\xff\xc8\xfe\x11\xfd\xf4\xfb`\xfc\xde\xfdA\xff\xe9\xff&\x00N\x00P\x00J\x00o\x00\xb4\x00\xe4\x00\xc4\x00D\x00\x8a\xff\xd1\xfeN\xfe\x15\xfe\x15\xfet\xfeW\xffm\x00B\x01\xb9\x01\xbd\x01C\x01\xc1\x00g\x00\xaf\xffN\xfe\xbc\xfc\xc7\xfb\xe4\xfb\x05\xfd\xe2\xfe#\x01\x07\x03\xda\x03\xa6\x03\x00\x03=\x02\x14\x01`\xff\xea\xfd\xa8\xfd\x8f\xfe\xb6\xffW\x00|\x00\x94\x00\xc2\x00\x00\x01 \x01\xef\x00\xaa\x00\xc4\x00\x1b\x01\xfc\x00.\x00/\xffu\xfeD\xfe\xa0\xfe\x12\xff?\xffR\xff\xbd\xff\x9a\x00>\x01\x05\x01[\x00H\x00\x0f\x01\xd9\x01\xab\x01Q\x00\x96\xfe\x94\xfd\xaa\xfd,\xfe\x86\xfe\xff\xfe\xfb\xff\xff\x00\xff\x00\xc6\xffl\xfe%\xfe=\xff\xe9\x00\xf0\x01\x90\x01\x00\x00\xfd\xfd=\xfc\x13\xfb\xa4\xfa\x1d\xfb\xb4\xfcY\xff0\x02\r\x04\x89\x04\xf7\x03\x9e\x02\xb9\x00\xa7\xfe\x03\xfd\x81\xfc9\xfdo\xfe_\xff\xf0\xff\x8d\x00\xa1\x01\x03\x03\xd5\x03~\x03G\x02\xda\x00\x9b\xff\x90\xfe\x8e\xfdp\xfc}\xfbd\xfb\xa1\xfc\xe8\xfeI\x01\xda\x02K\x03\xf6\x02\\\x02\xa8\x01\xfd\x00\x80\x00$\x00\xf0\xff\xf7\xff\x04\x00 \x00u\x00\xd6\x00\xf2\x00\xef\x00\x1e\x01N\x012\x01\xe3\x00\xab\x00\x99\x00\x95\x00\x82\x00L\x00\xfc\xff\xb4\xffu\xff(\xff\x1e\xff\xcd\xff\xec\x00\x9a\x01^\x01\x0e\x01\xb7\x01\x04\x03\xad\x03\xec\x02\xfb\x00\xb6\xfe\x0f\xfd\x82\xfc\x04\xfd\x17\xfe>\xff\x11\x00\x8a\x00\xfb\x00q\x01\x18\x01<\xff\xf6\xfc\xba\xfcM\xffU\x02\x08\x03%\x01\xa6\xfe/\xfd\xc7\xfc\x16\xfdN\xfe~\x00\xc6\x02\xfb\x03\xfe\x03\xcd\x03\xeb\x03F\x03\xaa\x00\x10\xfd\xf1\xfa\xa5\xfb~\xfe\x95\x01\x81\x03,\x04\xfa\x03\xfd\x02i\x01\xb9\xffE\xfel\xfd\x92\xfd\x92\xfe\xa7\xff\x00\x00H\xff\xf0\xfd\xcd\xfc\x7f\xfc(\xfdp\xfed\xff\x87\xff\x97\xff#\x00\xc8\x00&\x01\\\x01\x7f\x01_\x01\xf2\x00\x82\x00+\x00\xd7\xff\xdc\xff\xc9\x00`\x02\x8f\x03\xbf\x03:\x03O\x02\x06\x01\x99\xffj\xfe\xb6\xfdi\xfd<\xfd#\xfd\x92\xfd\xd1\xfeA\x00\xc3\x00\xce\xff!\xfe\x17\xfdG\xfdJ\xfe\x96\xff\x02\x01T\x02\xc2\x02\xe0\x01\\\x00!\xff-\xfe+\xfd\xac\xfc\xa1\xfd\xa9\xff\xfe\x00\xc1\x00X\x00\x81\x01i\x034\x03\xd1\xff\x99\xfb\x7f\xf9N\xfa\xbe\xfc\x8c\xff]\x02\xa9\x04h\x05B\x047\x02\x91\x00\xd3\xff|\xff\x1b\xff\x1f\xff\x00\x007\x01\xa9\x01\xf9\x00\xdd\xfff\xff\xea\xff\xb5\x00\x02\x01\x8e\x00\x91\xff\x92\xfe\x0f\xfe\xf3\xfd\xb5\xfdc\xfd\xa2\xfdi\xfe\xef\xfe\xcd\xfe\x93\xfe8\xff\xcd\x00b\x02\x04\x035\x02#\x00\xb0\xfd\x19\xfcn\xfc\x9a\xfe\t\x01\x0f\x02\xa0\x01\xde\x00\x8f\x00\xbd\x00\x06\x01\xe9\x00Y\x00\xb7\xff7\xff%\xff\xea\xffT\x01h\x02Q\x020\x01\x03\x00\x7f\xffc\xff*\xff\xf9\xfeo\xff\x95\x00d\x01\x05\x01\x17\x00\xe5\xff\xee\x00\\\x02\xe6\x02\x0e\x02\x95\x00\x9d\xffz\xff\xf5\xff\x12\x01\xd1\x02\x9d\x04\xb0\x05\xae\x05[\x04\x8f\x01\r\xfe0\xfc\xa4\xfd|\x00\xff\x00\x1a\xfeC\xfac\xf8j\xf9l\xfc\xdd\xffF\x02\xa0\x02\xe3\x00}\xfe\x85\xfdH\xfe\xa8\xfe+\xfd\xc7\xfb\xf7\xfd\xa0\x03\xfc\x07\x02\x07>\x02\xea\xfe\xd8\xff4\x03\xe4\x05\x91\x06X\x05\x89\x02q\xfe\xe3\xf9>\xf6\x88\xf4\xe3\xf4\xde\xf6\xc2\xf9\xd3\xfcw\xff\x1f\x01*\x01l\xff\xe1\xfc(\xfb\x15\xfb*\xfc\x86\xfd\xc1\xfe\xd5\xff\xf0\x00w\x02<\x043\x05y\x04\xbb\x02\xf9\x01G\x03`\x05\xe3\x05\x9d\x03\xd8\xffn\xfd\xe8\xfd\t\x00\x82\x01\x82\x01\xec\x00\x96\x00\xbf\x00\xb8\x01\x90\x03\x9b\x05\xe4\x06\xb8\x06\xf0\x04E\x02\xee\xff\xc4\xfe\xf2\xfe\xf5\xff\xe2\x00\x13\x01\x98\x00\xcd\xff\xf3\xfew\xfe\xc5\xfeZ\xffd\xff\xd8\xfe\x18\xfe\x10\xfdw\xfb\r\xfa\x0e\xfb\xda\xff\x13\x06\x96\x08J\x04\x1d\xfc\xaf\xf6\xea\xf7\xdd\xfd\x16\x04M\x08B\n\x8a\t\xde\x05\'\x005\xfa\xd1\xf5[\xf4u\xf6\x08\xfbX\xff\x7f\x01(\x02\n\x03g\x04\xde\x04u\x03\xc6\x00\xfa\xfd\x9f\xfb\xfc\xf9B\xf9c\xf9c\xfar\xfc,\xffm\x01S\x02\r\x02\xab\x01\xa7\x01\xe3\x00)\xfeH\xfa\xcc\xf7\xc3\xf8\xef\xfc\xf6\x01s\x05\xcb\x06\xe2\x06\x85\x06(\x06g\x06\x9b\x07_\t\xd8\n.\x0b\xbe\t,\x06D\x01\\\xfdn\xfc\xfd\xfd\xc9\xffW\x00\xd1\xff\x1e\xff\xc2\xfe\x9d\xfe^\xfe\xdd\xfd?\xfd\xdb\xfc\xff\xfc\xc2\xfdw\xfe\x1b\xfeo\xfd#\xff\x05\x04V\x08\xd9\x07B\x03D\xff\t\xff\x8c\x012\x04\xe1\x05\x9f\x06\x95\x05d\x01\xaa\xfa\xe4\xf4\xc3\xf3\x0c\xf7X\xfa\xee\xf9\xcd\xf6C\xf5\x1b\xf8\xb1\xfd\xe2\x01\x8f\x02U\x01\\\x00x\xffs\xfd\x89\xfaq\xf8\xb2\xf8\x7f\xfb\xd0\xff\xca\x03\xa2\x05*\x05\xfe\x03\xce\x03\xbc\x047\x05\x85\x03\x80\xff(\xfb\xfa\xf8y\xf9\x01\xfb<\xfcj\xfd\x0b\xff\xca\x00:\x02\xae\x03[\x05\xd4\x06|\x07\x95\x06\x8a\x03,\xff\x88\xfb?\xfa\xa7\xfb\xea\xfeg\x02\x85\x04\xd8\x04\xdb\x03C\x02\xd3\x00&\x00b\x00\x1b\x01\x9d\x01i\x01O\x00Q\xfe!\xfcs\xfb\xe4\xfd\xa2\x02\xfc\x05"\x05\xc0\x01\xcb\xffo\x00\xc8\x01\xfd\x02\xc8\x04l\x06\x9a\x05\xb0\x01\xb1\xfc\xe7\xf8\xfd\xf6\xa2\xf6\xb8\xf7\xf5\xf9\xbc\xfcE\xff\xc2\x00\xe3\x00.\x00N\xff\xa2\xfe-\xfeO\xfd\xfe\xfa\xb3\xf7\x8c\xf6\xe8\xf9\xa4\xff\xbc\x02\xad\x00#\xfc\xdb\xf9\x1b\xfcM\x01\xdb\x05\x12\x07\xce\x04\xd2\x00\x14\xfd{\xfa\x16\xf9T\xf9\xfd\xfb\xb8\x00\x8c\x05\x84\x08a\t6\t\xdc\x08\xaa\x08\xf1\x08z\t\xd3\x08|\x05\x19\x00C\xfb.\xf9\x15\xfa\x85\xfck\xfe\x9f\xfe\xe2\xfd\xc3\xfd\x9c\xfeJ\xff\xec\xfe\xc6\xfd\x86\xfc\xa0\xfb\x04\xfb1\xfav\xf9\xf1\xfaF\x00(\x07\xfb\n\x1d\n\x03\x07\x9f\x04\x06\x04%\x05\xb0\x07G\ns\n\xd5\x06\xad\x00\xe9\xfa\x90\xf7\x91\xf6\x11\xf7\xa2\xf8\xf4\xfat\xfda\xff\xfd\xff\x13\xff\x94\xfd)\xfdM\xfeB\xffI\xfe\xfa\xfb/\xfa\x0c\xfa\x96\xfbV\xfey\x01\xf1\x03!\x05;\x05\xc7\x04-\x04L\x03\xd2\x01\xd5\xff\x86\xfd\xff\xfa\xd3\xf8\xba\xf8\xc2\xfc\xda\x03\xb8\x08/\x07\x85\x01\xc6\xfd\xd9\xfe\xd9\x02\xda\x06y\t\x98\n\x10\nh\x07\xcc\x02\x91\xfd\xe8\xf9r\xf9\xed\xfb\x84\xffG\x02H\x03\xe4\x02\xf8\x01\'\x01\xc4\x00\xf3\x00\x8f\x01\xbc\x01\xf6\xff\xb2\xfb/\xf7z\xf6\t\xfb\xf7\x00\x03\x03\xe0\x00\x07\xff\x99\x00\x13\x04\x9d\x06O\x07\xe9\x05\xfd\x01\xa7\xfc4\xf8\xe5\xf5\x81\xf5\xb0\xf6\x87\xf9x\xfdH\x01\xdf\x03\xb4\x04\xce\x03\xeb\x01Z\x00\x1b\x00\xb3\x00\\\x00\x19\xfe\x1a\xfbq\xf9\xc6\xf9v\xfb\r\xfe"\x01\x8a\x03\x1d\x04\x17\x03\xd0\x01\r\x01m\x00M\xffy\xfd\xb7\xfb_\xfb\xe2\xfcO\xff\x96\x01>\x03\xe9\x03\xfb\x03\xf8\x04=\x07\x7f\x08\x11\x07\xb5\x04\t\x04+\x05\xba\x05s\x03\x9b\xfe(\xfa\xbe\xf9\x14\xfe\xd3\x02\x93\x02K\xfdc\xf8j\xf8\x8f\xfc\xd1\x00\xae\x02\xe6\x02J\x03\x90\x03q\x01b\xfc\x02\xf8\x9f\xf8\xa8\xfdN\x02\xaa\x03B\x03{\x03\xa3\x04\xf6\x05m\x07\xe1\x08\xe4\x08\xd4\x05\xbb\xffy\xf8\x83\xf2\xcd\xefU\xf1b\xf6\xff\xfbJ\xff0\x00|\x00\xc8\x00\xf2\xff\xa8\xfd7\xfb\x8f\xf9\x8c\xf8$\xf8p\xf8,\xf9J\xfaz\xfcQ\x00\xd3\x04\xc9\x07\xd7\x07\xc8\x05\xd7\x03q\x03\xd5\x03+\x03\x9b\x00K\xfdJ\xfb\xa4\xfb\xfe\xfd\x1a\x01m\x034\x04w\x04\x9c\x05\xfe\x06\xc8\x06\xf6\x04a\x03\x0e\x03\x02\x03\xc8\x01\xc5\xff\xab\xfe\xe5\xfe.\xff\xc9\xfe\x00\xfey\xfd\x00\xfe\xe6\xff\xc8\x01}\x01\xf9\xfe\xff\xfc\x0c\xfe\xc4\x00\xc7\x00\x05\xfc\x93\xf6a\xf7u\xff\x00\x07\t\x06-\xfeX\xf8\x14\xfa\xee\x00\xa3\x07\x96\x0b?\x0c\x90\tR\x04\x12\xfe)\xf8\xcb\xf3K\xf2d\xf4U\xf9*\xff\xc0\x03\x95\x05w\x04\xa0\x01\xeb\xfe\xe1\xfd\x8a\xfe>\xffR\xfe\xac\xfbt\xf82\xf6b\xf6\xc1\xf9\xcd\xfe\x14\x02\xe0\x01\xab\x00\xe6\x011\x05\xa2\x06{\x03;\xfd\x1a\xf8>\xf70\xfa\x0b\xfe\x0e\x01e\x03h\x05V\x07\x86\t\x1d\x0b&\n\xb4\x06\xfa\x03`\x04X\x06\\\x064\x03%\xff\x17\xfd\x8f\xfd\xe0\xfe\x80\xffO\xff,\xff\xc5\xff\xb6\x00\xf5\x000\x00N\xffX\xff*\x00s\x003\xffV\xfd\x1e\xfdj\xff*\x02\xc5\x02z\x01\xf9\x00\xf6\x02\xd9\x05\xa7\x06\xd3\x04\xad\x02\t\x02\x06\x02Y\x00t\xfc\xc6\xf8L\xf81\xfb_\xfe\xcf\xfe\xe7\xfc\xf2\xfb\x05\xfe\x97\x01\x9e\x03*\x03\xb6\x01\xab\x00\xf6\xff\xd8\xfe\xf9\xfc\xe0\xfa\xb8\xf9\xa3\xfa\xec\xfd?\x02E\x05\x92\x05\xef\x03I\x02\x9e\x01M\x01\x86\x00z\xff\xac\xfe\xe4\xfd\xc7\xfc\x0e\xfc\x02\xfd\xaf\xffX\x02Y\x03\xee\x02\xbc\x02\xbf\x03]\x05\x0e\x06\x9e\x04J\x01\xd0\xfd\x1d\xfc\xa7\xfc\n\xfe\xa3\xfe\x8b\xfe\x1c\xff\xbf\x00m\x02\xcf\x02r\x01+\xff\xba\xfdr\xfe\xb1\x006\x02?\x01A\xfe\x83\xfb\xef\xfad\xfcF\xfe\xa4\xff\xf2\x00\xa5\x02\x0e\x04"\x04\xf4\x02\x95\x01\xa3\x00\xe0\xff\x06\xff\x1f\xfel\xfdE\xfd\xb0\xfd\x0f\xfe\xb7\xfd\x10\xfdu\xfdo\xff\x83\x01v\x01\x03\xff\xb5\xfc\x07\xfd$\xff\x1a\x00\xb5\xfe\x82\xfc\x9a\xfbu\xfc\n\xfel\xff\x85\x00\x88\x01\x87\x02|\x03\x1d\x04\x10\x04R\x03;\x02\x1e\x01\x11\x00\x07\xffL\xfe\x88\xfe\xbd\xff\xf3\x00\x81\x01\xe0\x01\x91\x027\x03)\x03c\x02M\x01Y\x00\xd3\xff\xba\xffo\xff$\xfe\x1d\xfc\x1e\xfb\x9d\xfc\xaa\xff\xb6\x01\xa9\x01\xd7\x00\xdf\x00\xb5\x01,\x02\x98\x01N\x00\x00\xff1\xfe\x0f\xfeh\xfe\xaa\xfeq\xfeb\xfe\x83\xff\x85\x01\xa5\x02\xc7\x01\xf3\xff\x1b\xff\xdc\xff \x01h\x01\x03\x00\x83\xfdR\xfb|\xfa\xed\xfa\xfb\xfb5\xfd\xb6\xfe\x88\x00\t\x02?\x02\xf5\x000\xff\'\xfe2\xfe\xd7\xfe|\xff\xb3\xffJ\xffU\xfe<\xfd\xc7\xfc\xa1\xfd\xad\xff\xd0\x01\xd6\x02q\x02V\x01\x95\x00\xbe\x00Q\x01\x1b\x01\x88\xff\xab\xfdY\xfd\r\xff8\x01-\x02(\x02\x8d\x02\x97\x030\x04\xdc\x03K\x03\x03\x03\xa0\x02\xa6\x018\x00\xe3\xfe\x0b\xfe\xc8\xfd2\xfe)\xff)\x00\xc5\x00\x10\x01\x1d\x01\xb7\x00\xe1\xff*\xff\x17\xff\x9a\xff#\x00\x1d\x00{\xff\xaf\xfeD\xfeh\xfe\xd7\xfed\xff\x0e\x00\xee\x00\x01\x02\xd3\x02\xe5\x029\x02G\x01`\x00T\xff\x15\xfe\x1f\xfd\x0c\xfd\xf4\xfdA\xffD\x00\xc2\x00\xa7\x00\x0b\x00`\xff>\xff\xd3\xff\xbc\x00.\x01\x9a\x00D\xff\xe2\xfd\xbf\xfc\xdb\xfb\xb5\xfb\x19\xfd\xdf\xff\x9c\x02\xdd\x03\x95\x03\xb3\x02\xca\x01\xcc\x00\xda\xff^\xffk\xff\x98\xff\xaf\xff\n\x00\xad\x00\xf6\x00\xdd\x006\x01@\x02\xfb\x02\x87\x02d\x01\xbc\x00\xbf\x00r\x00F\xff\xfd\xfd\x9e\xfdV\xfet\xff\x15\x008\x00\xa4\x00\xb1\x01\xa1\x02\x9c\x02\xde\x01O\x01h\x01\xab\x01M\x01;\x009\xff\xf7\xfe`\xff\xb5\xffT\xff\xae\xfe\xd3\xfe\x18\x00\xa7\x017\x02&\x01H\xff#\xfe`\xfe3\xffH\xffA\xfe\x0c\xfd\xda\xfc\xbf\xfd\xc5\xfe?\xff\x87\xff\x12\x00\xe4\x00\xb5\x016\x02C\x02\xe4\x01\x14\x01\xe4\xff\x92\xfez\xfd\xfa\xfc>\xfd$\xfeM\xffd\x00<\x01\xb9\x01\xcf\x01\x93\x01\x00\x01;\x00\xa1\xff"\xffZ\xfeG\xfdt\xfc~\xfcu\xfd\xff\xfe\xcc\x00~\x02v\x03c\x03\xa3\x02\xd1\x01 \x01C\x00\'\xffO\xfe\x16\xfe&\xfe\x1a\xfe!\xfe\xab\xfe\xc0\xff\xf9\x00\xc5\x01\xce\x01=\x01\xa3\x00m\x00d\x00\xf7\xff\xff\xfe\x12\xfe\xdd\xfd\x8b\xfe\x94\xff\x03\x00\xaa\xff\x89\xffv\x00\xd1\x01W\x02\xbf\x01\x04\x01\x04\x01\x83\x01\x86\x01x\x00\xc8\xfe\xaf\xfd\xb0\xfd\xff\xfd\xfc\xfd&\xfe)\xff\xd2\x00\x1e\x02/\x02\x1b\x01\xcb\xff0\xff\x82\xff-\x00p\x00\xeb\xff\xc1\xfeg\xfdg\xfc\x16\xfc\x93\xfc\xe2\xfd\xcb\xff\xb9\x01 \x03\xc2\x03\xad\x03\x04\x03\xd0\x01\x13\x00\x16\xfe\xc7\xfc\x0e\xfd\x91\xfe\xf6\xffs\x00\x86\x00#\x01{\x02\xa6\x03\xab\x03\x81\x02\xda\x00a\xff]\xfe\xae\xfd\xf0\xfc\xf2\xfb)\xfb\x89\xfb\x89\xfd]\x00\x8e\x02,\x03y\x02\x80\x01\x03\x01\r\x01\x18\x01\xaf\x00\xe6\xff \xff\xb0\xfe\xa7\xfe\xcc\xfe\xf4\xfeI\xff\xfb\xff\xcf\x00j\x01\xa3\x01{\x01\xe9\x00\x00\x00\t\xffP\xfe\xef\xfd\xf5\xfdR\xfe\xba\xfe\x18\xff\xd5\xff/\x01\xa2\x02;\x03Q\x02v\x00t\xff:\x00\xcf\x01\xb4\x02Y\x02(\x01\xc5\xffw\xfe\'\xfd\xef\xfbs\xfbU\xfcq\xfe\xc0\x006\x02\x80\x02\x06\x02K\x01\x90\x00\xfb\xff\x89\xff\x01\xffo\xfe8\xfee\xfe\xaa\xfe\x03\xff\xe7\xff\x82\x01-\x03\x1f\x043\x04\xa4\x03\x98\x02\xfd\x00\xc9\xfe\x93\xfc[\xfb\xa5\xfb\x14\xfd\xd8\xfe=\x00\x18\x01\xa1\x01\xf3\x01\t\x02\xef\x01\xa4\x010\x01\xaf\x00\x0e\x00(\xff%\xfed\xfd+\xfd}\xfd\x13\xfe\xeb\xfe\'\x01I\x04\xd0\x04I\x02\xf2\xffv\xff\xac\xffb\xff\xf2\xfe\xe4\xfe\x1b\xffL\xff_\xff{\xff\xf6\xff\xd8\x00\xd0\x01I\x02\xf4\x01g\x01@\x01D\x01\x00\x01~\x00$\x00\xf9\xff{\xffJ\xfe\xd2\xfc,\xfc;\xfd\x8f\xff\x8a\x01\x0e\x02\x7f\x01\xd8\x00\xc0\x00L\x01\xcc\x01\x82\x01l\x00\x12\xff\x1c\xfe\x01\xfe}\xfe\xd4\xfe\xf3\xfe\x9d\xff+\x01\xd4\x02\x8b\x03\x1a\x03\xec\x01\x8d\x00\x8d\xff\x01\xff\x8c\xfe\x02\xfe\x9f\xfd\x9a\xfd\x02\xfe\xd2\xfe\xe1\xff\xe6\x00\x9f\x01\x03\x02<\x02s\x02\x80\x02\x08\x02\xb8\x00\xb8\xfe\xe4\xfc9\xfc\xee\xfc[\xfe\xc4\xff\xe3\x00\xac\x01\x0e\x02\xff\x01\xa1\x01>\x01\xe3\x00Z\x00\xa4\xff?\xff~\xff\xc5\xffj\xff\x9f\xfeC\xfe\x10\xff\xf0\x00\xfb\x02\xca\x03\xc3\x02\x0e\x01\x15\x00\xf6\xff#\x00M\x00i\x00=\x00\x9e\xff\xc9\xfe\x18\xfe\xcb\xfd<\xfeS\xfft\x00C\x01\xbc\x01\xe8\x01\xd8\x01\x9c\x01\x1e\x01v\x00\xd9\xffV\xff\xde\xfea\xfe\x17\xfeg\xfev\xff\xc5\x00\x90\x01}\x01\xcd\x00E\x00u\x00\xf5\x00\x07\x01`\x006\xff\xf4\xfd\x10\xfd\xe6\xfc}\xfd\x88\xfe\xc1\xff\xe5\x00\xaf\x01\x1a\x02F\x02#\x02}\x01\x8d\x00\xad\xff\xe4\xfe<\xfe\xdd\xfd\xc2\xfd\xa8\xfd\xa2\xfd\x0e\xfe\xf2\xfe\xdd\xffS\x00D\x00\x1b\x00P\x00\xcb\x00\xed\x00:\x00\xf3\xfe\xc3\xfd&\xfdN\xfdK\xfe\x05\x00\xfc\x01F\x03P\x03|\x02\x92\x01\xea\x00O\x00\x9e\xff\x1e\xff\x0f\xff\x1d\xff\xb2\xfe\xad\xfd\xaa\xfc\x96\xfc\xad\xfd \xff\xf8\xff\x12\x00\xfe\xff?\x00\xcb\x00+\x01\xfc\x00b\x00\xbf\xffO\xff\x1b\xff\xfb\xfe\xcc\xfe\xb4\xfe\t\xff\xee\xff\x1d\x01\r\x02=\x02\x97\x01\x8e\x00\xad\xff%\xff\xc7\xfeT\xfe\xce\xfdu\xfd\x94\xfdJ\xfeY\xff_\x00\xf3\x00\xe6\x00r\x003\x00\x99\x00W\x01\x94\x01\xe7\x00\xb6\xff\xbc\xfem\xfe\x86\xfek\xfe9\xfe}\xfe:\xff\xfc\xffr\x00\xa9\x00\xbc\x00\xca\x00\xdc\x00\xed\x00\xde\x00\x96\x00\x06\x000\xffd\xfeI\xfe>\xff\xc0\x00\xbc\x01\xb9\x01J\x01;\x01\xaf\x01!\x02\x0f\x02K\x01\x13\x00\xee\xfe7\xfe\xe9\xfd\x05\xfe\xa7\xfe\xac\xff\xb9\x00\x96\x01\'\x02`\x02P\x02\x03\x02\x85\x01\xf5\x00b\x00\xc3\xff/\xff\xd7\xfe\xec\xfev\xff6\x00\xc4\x00\xf5\x00\x04\x01G\x01\xa8\x01\xd6\x01\x9b\x01\x03\x01J\x00\xb5\xffO\xff\x1d\xffS\xff\xf1\xff\xab\x00;\x01\xa3\x01\x0f\x02\x90\x02\xf2\x02\xca\x02\xe8\x01\xb1\x00\xd7\xff\xa7\xff\xca\xff\xaf\xff\x19\xffe\xfe7\xfe\xdb\xfe\xec\xff\x9c\x00L\x004\xffc\xfe\xc8\xfe?\x00\xa7\x01\x05\x02q\x01\xa7\x00!\x00\xdb\xff\xb7\xff\xb9\xff\xf4\xffZ\x00\xba\x00\x1f\x01\xb8\x01M\x02E\x02\x8b\x01\xbf\x00E\x00\xd9\xff\x17\xff\xfd\xfd\xf1\xfc\x83\xfc\x10\xfdp\xfe\xe1\xff\x9c\x00\x98\x00}\x00\xcb\x00F\x01B\x01s\x00+\xff\x01\xfe\x80\xfd\xd3\xfd\x9c\xfeR\xff\xa7\xff\xb7\xff\xe5\xffx\x00G\x01\xbe\x01{\x01\xb6\x00\xee\xffI\xff\x8e\xfe\xc2\xfdw\xfd#\xfeq\xff\x8e\x00\x11\x013\x01[\x01\xad\x01\xf5\x01\xe1\x01f\x01\xac\x00\xc7\xff\xd0\xfe$\xfe\x0e\xfeo\xfe\xda\xfe\x06\xff\x06\xff\x19\xff?\xff>\xff\x0c\xff\xf1\xfe\x1f\xfft\xff\xc8\xff\x13\x00E\x00\x15\x00\\\xffw\xfe,\xfe\xe3\xfe\x19\x00\xc2\x00\x81\x00\xfe\xff\x07\x00\xa0\x000\x01]\x01*\x01\xa9\x00\xd9\xff\xb4\xfer\xfd\x97\xfc\x9d\xfc\x92\xfd\xe3\xfe\xe7\xffi\x00\xad\x00\xf8\x00$\x01\xec\x00R\x00\xac\xffK\xff\x19\xff\xcf\xfeL\xfe\xd0\xfd\xc9\xfdn\xfe\x80\xffk\x00\xda\x00\xec\x00\xec\x00\xf7\x00\xe5\x00r\x00\x9d\xff\xac\xfe\n\xfe\x1b\xfe\xfa\xfe9\x000\x01\x8d\x01\x84\x01\x98\x01\x11\x02\xa5\x02\xc5\x025\x02&\x01\xf5\xff\x07\xff\x9e\xfe\xc7\xfeM\xff\xd0\xff\x05\x00\xf3\xff\xdc\xff\xd9\xff\xd1\xff\xb8\xff\xc3\xff4\x00\xf7\x00\x8c\x01\x8b\x01\xf9\x00C\x00\xde\xff\x02\x00\x7f\x00\xe6\x00\x13\x01*\x01a\x01\xc9\x01 \x02\x04\x02h\x01\xba\x00`\x00(\x00\x9a\xff\xbc\xfe\x11\xfe\x0e\xfe\xa9\xfen\xff\xe1\xff\xea\xff\xea\xffJ\x00\xea\x00.\x01\xd0\x008\x00\xda\xff\xb3\xffz\xff\x12\xff\xc2\xfe\xf9\xfe\xbe\xff\x82\x00\xbd\x00\x83\x00Y\x00\x8e\x00\xda\x00\xc9\x00N\x00\xd2\xff\xb4\xff\xe1\xff\x00\x00\xdd\xff\xa3\xff\xa4\xff\t\x00\xaf\x006\x01f\x01^\x01l\x01\xa8\x01\xc5\x01U\x01I\x00\x1a\xffa\xfec\xfe\xeb\xfeu\xff\xa1\xff\x8d\xff\x82\xff\x98\xff\xbf\xff\xe3\xff\r\x00M\x00\x87\x00~\x00\x16\x00\x8b\xffO\xff\xa5\xffK\x00\xb7\x00\xc8\x00\xcc\x00\t\x01p\x01\xb3\x01\x97\x010\x01\xe2\x00\xe1\x00\xd0\x00&\x00\xfe\xfe&\xfe1\xfe\xd5\xfep\xff\xb0\xff\xbf\xff\xe1\xff&\x00`\x00U\x00\xe1\xff9\xff\xca\xfe\xbb\xfe\xcd\xfe\xc9\xfe\xc8\xfe\xfb\xfeV\xff\x90\xffj\xff\x00\xff\xd3\xfe?\xff\xf1\xffC\x00\x08\x00\xb5\xff\xae\xff\xce\xff\xaf\xffJ\xff\x0f\xffm\xffI\x00\xff\x00\n\x01\x95\x00E\x00u\x00\xdb\x00\xee\x00e\x00\x8d\xff\x04\xff"\xff\x9c\xff\xcc\xff~\xff\x1e\xff\r\xffF\xff~\xff\x8a\xff\x81\xff\xa1\xff\x11\x00\xb5\x00<\x01<\x01\x97\x00\xb3\xff:\xffd\xff\xd1\xff(\x00k\x00\xb2\x00\xde\x00\xd5\x00\xad\x00~\x00W\x00F\x00<\x00\x02\x00\x8e\xff6\xffQ\xff\xb4\xff\xe6\xff\xcb\xff\xb0\xff\xde\xffK\x00\xa7\x00\xc0\x00\xa2\x00y\x00^\x00%\x00\xa1\xff\xf2\xfe\x84\xfe\xa8\xfe>\xff\xcf\xff\xf4\xff\xa1\xffK\xff]\xff\xb7\xff\xe9\xff\xbd\xffx\xff\x81\xff\xdc\xff\x17\x00\xf5\xff\xb5\xff\xbd\xff\x19\x00w\x00y\x00\x19\x00\xc6\xff\xfa\xff\xa3\x00%\x01\x12\x01\x9a\x00"\x00\xbf\xff`\xff\x16\xff\x1d\xff|\xff\x00\x00b\x00]\x00\xf3\xff|\xffY\xff\x9b\xff\x0c\x00r\x00\xa5\x00\x9b\x00m\x00J\x00B\x002\x00\x0e\x00\x17\x00n\x00\xca\x00\xe4\x00\xcc\x00\xbb\x00\xcc\x00\xfb\x00(\x01%\x01\xd2\x00L\x00\xdc\xff\xaf\xff\xaf\xff\xa8\xff\x91\xff\x8c\xff\xa8\xff\xdc\xff\x0e\x00\'\x00)\x00!\x00\t\x00\xd8\xff\x82\xff\x1d\xff\xfc\xfeQ\xff\xe1\xffS\x00p\x00A\x00\x0e\x00\x1f\x00]\x00y\x00^\x00I\x00S\x00T\x00#\x00\xdb\xff\xc0\xff\x06\x00\x8b\x00\xd0\x00\x83\x00\xe8\xff\xa3\xff\xe9\xffG\x00E\x00\xf1\xff\xbb\xff\xe1\xff-\x000\x00\xcd\xffY\xffE\xff\xab\xff\x1c\x00\x1f\x00\xab\xff1\xff\x1f\xffx\xff\xf6\xffN\x00[\x00\x12\x00\xac\xff\x86\xff\xbf\xff\x12\x00<\x00G\x00B\x00\x1c\x00\xe4\xff\xd3\xff\xf6\xff/\x00j\x00\x9b\x00\x91\x00>\x00\xf0\xff\xf8\xff&\x00\x19\x00\xd5\xff\xaa\xff\xc0\xff\xf0\xff\x00\x00\xf2\xff\xfd\xff-\x00K\x00\x16\x00\x8f\xff\xf4\xfe\x9b\xfe\xc2\xfeV\xff\x00\x00O\x00\x14\x00\x88\xff\x1f\xff\x18\xffE\xffe\xff\x83\xff\xd4\xffJ\x00\x91\x00\x85\x00e\x00\x80\x00\xbc\x00\xd5\x00\xae\x00d\x00E\x00j\x00\x8e\x00]\x00\xf1\xff\xd8\xffK\x00\xcf\x00\xba\x00\x0b\x00P\xff\x19\xffw\xff\xf4\xff\t\x00\xaf\xffb\xff\x7f\xff\xdc\xff\x15\x00\xfc\xff\xc4\xff\xb9\xff\xf1\xff:\x00V\x00)\x00\xe9\xff\xed\xff+\x00M\x00\x1c\x00\xc8\xff\xb0\xff\xe6\xff7\x00l\x00q\x00L\x00\x07\x00\xd1\xff\xd6\xff\x03\x00\x1f\x00\x1e\x00\x19\x00\x13\x00\xf0\xff\xbc\xff\x9f\xff\xae\xff\xd4\xff\xe2\xff\xba\xffz\xffS\xffj\xff\xb5\xff\n\x00=\x009\x00\x06\x00\xc9\xff\xa0\xff\x91\xff\x9b\xff\xd0\xffC\x00\xc0\x00\xeb\x00\xa1\x00%\x00\xdf\xff\xfc\xffR\x00\x91\x00\x94\x00v\x00T\x00\x1b\x00\xb2\xffO\xffJ\xff\xb3\xff5\x00p\x00^\x00C\x00Q\x00\x81\x00\x9a\x00t\x00 \x00\xde\xff\xe7\xff)\x00Y\x00W\x00R\x00\x88\x00\xdc\x00\xe5\x00m\x00\xb3\xff=\xff\\\xff\xe9\xffc\x00c\x00\x06\x00\xbf\xff\xd4\xff\x10\x00+\x00\x14\x00\xe9\xff\xbf\xff\xb2\xff\xdb\xff*\x00r\x00\x9b\x00\xb1\x00\x9e\x00:\x00\x95\xff\x12\xff\xfe\xfeA\xff\x97\xff\xc6\xff\xca\xff\xc4\xff\xc6\xff\xb5\xff\x8a\xffh\xffw\xff\xb6\xff\xed\xff\xdf\xff\x89\xff0\xff\x14\xff[\xff\xf1\xffy\x00\x9b\x00_\x00+\x00Q\x00\xb3\x00\xf8\x00\xe0\x00\x86\x001\x00\x08\x00\xea\xff\xbd\xff\x9b\xff\xae\xff\xfb\xffE\x00Y\x00?\x00\x0c\x00\xd6\xff\xb4\xff\xb7\xff\xc8\xff\xc6\xff\xb1\xff\x94\xffu\xffh\xff\x83\xff\xd1\xffD\x00\x9d\x00\x99\x00G\x00\x0c\x001\x00\x93\x00\xca\x00\x92\x00\x10\x00\xb9\xff\xd0\xff&\x00b\x00b\x00J\x009\x002\x00\x14\x00\xd4\xff\x9f\xff\xa6\xff\xf3\xffY\x00y\x00\x17\x00y\xff%\xff8\xfft\xff\x9b\xff\xa7\xff\xb6\xff\xd5\xff\xeb\xff\xe3\xff\xc3\xff\xb3\xff\xd5\xff\x0c\x00\xec\xffZ\xff\xd3\xfe\xc2\xfe\x15\xff}\xff\xbb\xff\xcc\xff\xca\xff\xd3\xff\xf0\xff\x1a\x00>\x00i\x00\xa0\x00\xbc\x00\x91\x00\x1d\x00\x86\xff\t\xff\xf3\xfeO\xff\xce\xff+\x00\\\x00\x80\x00\xab\x00\xd5\x00\xdc\x00\x9f\x00:\x00\xf2\xff\xee\xff\xfc\xff\xda\xff\xac\xff\xba\xff\x16\x00\x8b\x00\xcc\x00\xba\x00}\x00Q\x00<\x003\x00 \x00\xf9\xff\xba\xfft\xffJ\xffX\xff\x91\xff\xca\xff\xed\xff\x14\x00V\x00\x9f\x00\xbe\x00\xa7\x00\x8b\x00\x8d\x00\x93\x00j\x00\x1c\x00\xf0\xff\xf6\xff\xfc\xff\xdd\xff\xba\xff\xca\xff\t\x002\x00\r\x00\xab\xff]\xffj\xff\xcd\xff\x1e\x00\xfc\xff\x88\xff0\xffB\xff\x9f\xff\xec\xff\x03\x00\t\x00-\x00p\x00\x9b\x00\x8c\x00m\x00{\x00\xb7\x00\xd8\x00\xa5\x00\x1d\x00\x86\xff3\xff6\xffd\xff\x8e\xff\xb9\xff\xfc\xffK\x00y\x00x\x00`\x00I\x006\x00.\x00/\x00\x11\x00\xcc\xff\x97\xff\xc3\xffM\x00\xd3\x00\xfd\x00\xdb\x00\xbf\x00\xdc\x00\x06\x01\xf0\x00\x91\x009\x00 \x00\x1b\x00\xf4\xff\xc5\xff\xcd\xff\x07\x001\x00&\x00\x15\x001\x00`\x00o\x00U\x00*\x00\x04\x00\xf6\xff\xf0\xff\xd0\xff\x8d\xffN\xff<\xffb\xff\xc5\xffL\x00\xb0\x00\xb0\x00\\\x00 \x00J\x00\xa0\x00\x92\x00\xfe\xfff\xffN\xff\xa0\xff\xf4\xff\x0e\x00\x11\x00*\x00?\x00)\x00\xeb\xff\xa3\xffz\xff\x9a\xff\xfc\xffI\x001\x00\xb3\xff(\xff\xef\xfe\x19\xffT\xffq\xff\xa0\xff\x19\x00\xab\x00\xe8\x00\xaa\x009\x00\xe9\xff\xd5\xff\xd2\xff\xb8\xff\x86\xffU\xffF\xff^\xff\x8a\xff\xb7\xff\xd2\xff\xd8\xff\xcf\xff\xba\xff\xa5\xff\x97\xff\x8c\xff\x92\xff\xa3\xff\xa3\xff\x8e\xff\x80\xff\x89\xff\x99\xff\xa2\xff\xaf\xff\xd8\xff\x18\x00n\x00\xd6\x00\x1e\x01\x0b\x01\xa5\x004\x00\xda\xffk\xff\xe8\xfe\x9d\xfe\xd6\xfeh\xff\xe6\xff\x14\x00\x08\x00\xe8\xff\xcf\xff\xc5\xff\xc4\xff\xb2\xffx\xff/\xff\x0e\xff:\xff\x82\xff\xac\xff\xb8\xff\xda\xff8\x00\xaa\x00\xde\x00\xb4\x00v\x00\x87\x00\xe4\x00#\x01\xee\x00O\x00\xad\xffj\xff\x8c\xff\xbc\xff\xc2\xff\xc1\xff\xf1\xff;\x00S\x000\x00\x03\x00\x04\x00\'\x00=\x003\x00\x03\x00\xb3\xffm\xffc\xff\xb0\xff3\x00\xa8\x00\xeb\x00\x03\x01\xff\x00\xd8\x00\x96\x00Z\x00A\x00@\x00.\x00\xfa\xff\xc1\xff\x9e\xff\x86\xffg\xffU\xffs\xff\xcd\xff7\x00\x80\x00\x9b\x00\x92\x00w\x00P\x00 \x00\xe5\xff\xa6\xff\x87\xff\x9b\xff\xd4\xff\xf8\xff\xf2\xff\x03\x00b\x00\xf6\x00c\x01f\x01\n\x01\x91\x00-\x00\xef\xff\xd0\xff\xb9\xff\xa7\xff\xa8\xff\xca\xff\r\x00d\x00\x9e\x00\x84\x00\x18\x00\xbc\xff\xc2\xff\x1b\x00j\x00_\x00\x0e\x00\xbd\xff\x94\xff\x8a\xff\x8a\xff\x9d\xff\xe1\xffY\x00\xcd\x00\x05\x01\n\x01\x0c\x01\x17\x01\xff\x00\xae\x00E\x00\xf2\xff\xc7\xff\xaf\xff\x99\xff\x8d\xff\xb2\xff\x0e\x00g\x00}\x00D\x00\xf4\xff\xc9\xff\xd1\xff\xf0\xff\xfa\xff\xdc\xff\xa0\xffk\xffk\xff\x9d\xff\xcd\xff\xe3\xff\x05\x00N\x00\x9d\x00\xcb\x00\xd1\x00\xaa\x00P\x00\xd7\xffp\xffA\xffC\xff^\xffo\xffT\xff\x1a\xff\xff\xfe/\xff\x9a\xff\xff\xff$\x00\x0e\x00\xef\xff\xdc\xff\xc7\xff\x9c\xffk\xff\\\xff\x85\xff\xbd\xff\xdc\xff\xda\xff\xd4\xff\xe7\xff\x17\x00U\x00\x86\x00\x95\x00\x80\x00_\x003\x00\xe7\xff\x88\xffB\xff#\xff\x1b\xff7\xff\x93\xff\x06\x00F\x00@\x00\x1a\x00\xee\xff\xc9\xff\xb3\xff\xb3\xff\xc7\xff\xcd\xff\xad\xffh\xff4\xffK\xff\xbd\xffS\x00\xba\x00\xc6\x00\x98\x00x\x00{\x00s\x00A\x00\x07\x00\x01\x007\x00_\x009\x00\xbe\xff/\xff\xf6\xfe?\xff\xca\xff,\x00G\x00;\x005\x006\x00)\x00\xf2\xff\x8a\xff#\xff\n\xffR\xff\xc0\xff\x0e\x006\x00d\x00\xa4\x00\xc3\x00\x97\x00U\x00<\x00<\x00(\x00\xfe\xff\xe8\xff\xfe\xff\x0e\x00\xca\xffB\xff\xe9\xfe\x14\xff\x85\xff\xd6\xff\xf7\xff\x18\x00W\x00\x89\x00v\x00&\x00\xd5\xff\xa7\xff\x94\xff|\xffg\xff\x81\xff\xdb\xffO\x00\xb0\x00\xfb\x00/\x01-\x01\xf1\x00\xaa\x00\x80\x00T\x00\xfb\xff\x88\xff:\xff;\xff\x8a\xff\xfd\xffd\x00\x99\x00\x8e\x00]\x00!\x00\xdf\xff\xa6\xff\x9a\xff\xc6\xff\x11\x00J\x00J\x00\x05\x00\xb4\xff\xa7\xff\xec\xffP\x00\x98\x00\xb5\x00\xc5\x00\xd6\x00\xce\x00\x90\x00A\x00\x1c\x00$\x00+\x00\x05\x00\xb0\xffR\xff-\xffa\xff\xd4\xff?\x00j\x00\\\x00D\x00>\x00<\x00%\x00\xe6\xff\x99\xffa\xffP\xffj\xff\x9f\xff\xe1\xff\x1d\x00C\x00M\x00J\x00F\x00H\x00G\x002\x00\x11\x00\xfa\xff\xea\xff\xbb\xffS\xff\xe2\xfe\xc2\xfe$\xff\xd8\xff{\x00\xc6\x00\xc1\x00\xa6\x00\x97\x00\x88\x00T\x00\xeb\xffi\xff\t\xff\xf8\xfe4\xff\x99\xff\x06\x00d\x00\xac\x00\xe2\x00\n\x01\xfe\x00\x93\x00\x00\x00\xaf\xff\xd1\xff \x001\x00\xea\xff\x90\xffz\xff\xc7\xffB\x00\x97\x00\x98\x00U\x00\x0b\x00\xf2\xff\r\x005\x00<\x00\x14\x00\xdf\xff\xbd\xff\xa6\xff\x8f\xff\x83\xff\x9d\xff\xea\xffY\x00\xc7\x00\x06\x01\x02\x01\xc9\x00~\x00*\x00\xe4\xff\xbb\xff\xa4\xff\x83\xffU\xff<\xffT\xff\x8c\xff\xbc\xff\xc6\xff\xb4\xff\xac\xff\xc9\xff\x12\x00a\x00\x82\x00H\x00\xdd\xff\x7f\xff:\xff\t\xff\x02\xffT\xff\xf4\xff\x9d\x00\x02\x01\r\x01\xdb\x00\x9c\x00e\x00*\x00\xdf\xff\x98\xffo\xff[\xffA\xff.\xffW\xff\xc1\xff-\x00`\x00b\x00b\x00b\x00I\x00\x19\x00\xfc\xff\x07\x00\x17\x00\xfb\xff\xb0\xffq\xffr\xff\xa0\xff\xbf\xff\xc5\xff\xed\xffS\x00\xc2\x00\xed\x00\xc6\x00\x89\x00`\x00/\x00\xd2\xff_\xff\x17\xff!\xffg\xff\xcc\xffE\x00\xb4\x00\xed\x00\xd3\x00\x80\x00)\x00\xf2\xff\xd6\xff\xb1\xff|\xffH\xff&\xff\x17\xff*\xffY\xff\x90\xff\xc1\xff\xed\xff#\x00S\x00m\x00o\x00W\x00\x1d\x00\xd5\xff\xa1\xff~\xff\\\xff2\xff(\xffo\xff\xf7\xff\x81\x00\xc2\x00\xb2\x00}\x00_\x00[\x00I\x00\x01\x00\x91\xff2\xff\t\xff\x17\xffX\xff\xc8\xff@\x00\x8f\x00\xa1\x00\x8e\x00l\x000\x00\xdb\xff\x9d\xff\xb5\xff\x1c\x00y\x00m\x00\xfd\xff\x83\xffT\xff\x87\xff\xe0\xff!\x00H\x00z\x00\xc0\x00\xf8\x00\xfc\x00\xc2\x00c\x00\xfd\xff\xa8\xffj\xffC\xff9\xff[\xff\xb1\xff\'\x00\x96\x00\xe2\x00\xf7\x00\xdd\x00\xac\x00p\x00.\x00\xeb\xff\xa6\xffc\xff2\xff2\xffc\xff\xb7\xff\x11\x00`\x00\x8e\x00\x84\x00W\x00/\x00\x1c\x00\x0c\x00\xe6\xff\xab\xffp\xffG\xff?\xff^\xff\x9f\xff\xf4\xffV\x00\xc1\x00\x1b\x017\x01\x07\x01\xb3\x00n\x00N\x00G\x002\x00\xf4\xff\x9c\xffh\xff\x85\xff\xe2\xffC\x00z\x00x\x00U\x00/\x00\x18\x00\r\x00\n\x00\x11\x00\x15\x00\x00\x00\xbb\xffk\xffW\xff\x96\xff\x02\x00Q\x00f\x00`\x00_\x00c\x00`\x00_\x00f\x00t\x00\x87\x00~\x00$\x00z\xff\xee\xfe\xf0\xfe\x80\xff3\x00\xa9\x00\xd7\x00\xe6\x00\xe0\x00\xae\x00`\x00\x10\x00\xba\xffZ\xff\n\xff\xe5\xfe\xe7\xfe\xfd\xfe\'\xffg\xff\xbc\xff\x07\x00"\x00\x11\x00\x01\x00\n\x00\x12\x00\xf1\xff\xac\xff_\xff3\xff9\xffl\xff\xbb\xff$\x00\xa2\x00\t\x01!\x01\xdd\x00{\x00F\x00N\x00T\x00"\x00\xc0\xffj\xffR\xffy\xff\xb8\xff\xfb\xffD\x00\x7f\x00\x8b\x00q\x00F\x00\x13\x00\xdc\xff\xcc\xff\x08\x00h\x00\x86\x00/\x00\xad\xff\x84\xff\xce\xff-\x00Q\x00?\x000\x00=\x00_\x00z\x00}\x00m\x00c\x00c\x00K\x00\xfd\xff\x9a\xffs\xff\xaa\xff\x14\x00[\x00]\x004\x00\r\x00\x07\x00\x18\x00#\x00\x16\x00\xf8\xff\xc8\xff}\xff\x16\xff\xb8\xfe\x95\xfe\xd2\xfe\\\xff\xeb\xff6\x00-\x00\x02\x00\xe3\xff\xd4\xff\xc5\xff\xb0\xff\x94\xffh\xff2\xff\x12\xff0\xff\x83\xff\xe9\xff6\x00V\x00L\x00+\x00\x17\x00.\x00]\x00h\x00.\x00\xd4\xff\x8d\xfft\xff\x89\xff\xcb\xff4\x00\xa0\x00\xd0\x00\xab\x00\\\x00\x18\x00\xef\xff\xd4\xff\xc5\xff\xd1\xff\xea\xff\xe5\xff\xba\xff\xa1\xff\xbc\xff\xf5\xff$\x00F\x00e\x00p\x00P\x00!\x00\x1d\x00]\x00\xae\x00\xd0\x00\xa7\x00[\x00\x1a\x00\x06\x00\x19\x00.\x00&\x00\x01\x00\xd6\xff\xca\xff\xf6\xff5\x00H\x00\'\x00\x01\x00\xf6\xff\xed\xff\xb3\xffH\xff\xf1\xfe\xf5\xfeL\xff\xb1\xff\xd5\xff\xac\xffx\xff\x86\xff\xd4\xff\x1f\x005\x00\x17\x00\xcd\xffa\xff\xf3\xfe\xc2\xfe\xf6\xfer\xff\xf3\xff\\\x00\xa9\x00\xcc\x00\xbd\x00\x95\x00~\x00q\x00U\x00+\x00\x0f\x00\xfb\xff\xd6\xff\xa8\xff\xa4\xff\xeb\xffT\x00\x89\x00X\x00\xef\xff\x9b\xff\x80\xff\x9d\xff\xd6\xff\x07\x00\x10\x00\xe4\xff\xb0\xff\xb2\xff\xf5\xffO\x00\x92\x00\xb5\x00\xbc\x00\xa9\x00\x8d\x00{\x00|\x00\x87\x00\xa0\x00\xb5\x00\xa5\x00Z\x00\xfd\xff\xde\xff\x16\x00f\x00x\x00<\x00\xee\xff\xce\xff\xda\xff\xf3\xff\x11\x00;\x00]\x00\\\x00*\x00\xdf\xff\xa6\xff\x92\xff\xa1\xff\xbe\xff\xda\xff\xee\xff\xf6\xff\xf2\xff\xec\xff\xec\xff\xf3\xff\x01\x00\x12\x00\x0f\x00\xdb\xff\x86\xff\\\xff\x88\xff\xeb\xffE\x00c\x00?\x00\x08\x00\x00\x008\x00u\x00x\x00H\x00\x18\x00\xfe\xff\xdb\xff\xa7\xff\x91\xff\xc4\xff\'\x00x\x00\x88\x00W\x00\x12\x00\xdd\xff\xcd\xff\xe3\xff\x0f\x000\x00-\x00\x07\x00\xd8\xff\xbd\xff\xbf\xff\xd9\xff\x0b\x00J\x00i\x00=\x00\xe7\xff\xb8\xff\xda\xff%\x00]\x00b\x004\x00\xf2\xff\xca\xff\xda\xff\x10\x00N\x00n\x00[\x00,\x00\x04\x00\xf6\xff\xfb\xff\xfd\xff\xf3\xff\xea\xff\xe3\xff\xd2\xff\xa9\xff\x84\xff\x8d\xff\xc8\xff\x0c\x00\'\x00\r\x00\xd3\xff\x92\xffk\xffu\xff\xb8\xff\x12\x00>\x00\x18\x00\xba\xffm\xffn\xff\xb6\xff\x04\x00)\x00,\x00"\x00\x13\x00\x01\x00\xf1\xff\xe8\xff\xf4\xff\x15\x00A\x00T\x006\x00\xfc\xff\xe1\xff\xfc\xff\'\x00 \x00\xd7\xff\x83\xffc\xff\x85\xff\xb4\xff\xc7\xff\xc1\xff\xc1\xff\xc7\xff\xc3\xff\xa8\xff\x84\xff}\xff\xb6\xff\x1c\x00n\x00t\x00<\x00\x08\x00\x05\x00,\x00_\x00\x89\x00\x9a\x00\x7f\x005\x00\xde\xff\xac\xff\xba\xff\xf1\xff#\x00$\x00\xe6\xff\x91\xff^\xffg\xff\x95\xff\xbb\xff\xd8\xff\xf4\xff\x0b\x00\x0b\x00\xf9\xff\xea\xff\xea\xff\xf7\xff\x13\x009\x00L\x00.\x00\xef\xff\xc8\xff\xe0\xff\x19\x001\x00\xfe\xff\xa7\xff{\xff\xa9\xff\x0b\x00Y\x00o\x00_\x00;\x00\t\x00\xd9\xff\xd3\xff\x0e\x00k\x00\xb3\x00\xc0\x00\x90\x00P\x005\x00Q\x00v\x00t\x00A\x00\n\x00\xe8\xff\xc8\xff\x9e\xff\x80\xff\x8e\xff\xc3\xff\r\x00K\x00Y\x006\x00\x0b\x00\x10\x00B\x00b\x00<\x00\xec\xff\xbc\xff\xd4\xff\x17\x00I\x00U\x00X\x00j\x00o\x00H\x00\xf6\xff\xb2\xff\xa7\xff\xd0\xff\x07\x00\x18\x00\xec\xff\xaf\xff\xa6\xff\xe6\xffA\x00\x80\x00\x8d\x00y\x00K\x00\x01\x00\xaa\xffn\xffk\xff\xa2\xff\xf0\xff\x13\x00\xe2\xff\x81\xffK\xffc\xff\xa0\xff\xc7\xff\xba\xff\x89\xffY\xffS\xffq\xff\xa2\xff\xdb\xff%\x00t\x00\x9b\x00v\x00\x1f\x00\xde\xff\xe4\xff*\x00l\x00g\x00"\x00\xef\xff\x08\x00W\x00\x8d\x00}\x00<\x00\xfd\xff\xce\xff\x9c\xff]\xff<\xffq\xff\xed\xff`\x00\x8b\x00n\x00>\x00)\x001\x00:\x00,\x00\x0c\x00\xed\xff\xe0\xff\xe2\xff\xf1\xff\x13\x00J\x00\x89\x00\xae\x00\xa0\x00d\x00,\x00\x1d\x00$\x00\x11\x00\xcc\xffr\xffI\xff\x7f\xff\xf3\xffU\x00r\x00c\x00\\\x00c\x00J\x00\x01\x00\xad\xff\x87\xff\xa1\xff\xed\xff8\x00F\x00\x0b\x00\xc4\xff\xb7\xff\xf9\xffL\x00\\\x00\x17\x00\xb9\xff{\xff`\xff[\xffs\xff\xbe\xff"\x00[\x00H\x00\x04\x00\xc7\xff\xa7\xff\x9c\xff\xaa\xff\xd5\xff\x0f\x00<\x00I\x00A\x004\x00#\x00\x12\x00\x0b\x00\x03\x00\xe4\xff\xac\xffz\xffs\xff\xa8\xff\x00\x00O\x00o\x00c\x00O\x00M\x00O\x00E\x002\x00#\x00\x1b\x00\x14\x00\x08\x00\x05\x00%\x00b\x00\x98\x00\xa2\x00\x82\x00Y\x00A\x00\x1c\x00\xd7\xff\x92\xffl\xffd\xffy\xff\xa1\xff\xd0\xff\xf9\xff\x15\x007\x00e\x00\x82\x00g\x00&\x00\xf3\xff\xed\xff\x02\x00\x0f\x00\xfa\xff\xcc\xff\xa8\xff\xb7\xff\xfa\xff9\x00?\x00\n\x00\xc6\xff\x95\xff\x80\xffs\xffj\xffm\xff\x99\xff\xf0\xffF\x00e\x00D\x00\x05\x00\xe0\xff\xe8\xff\r\x00-\x00>\x00B\x004\x00\x13\x00\xf3\xff\xe8\xff\xf4\xff\xf8\xff\xd4\xff\x91\xffQ\xff1\xff6\xffZ\xff\x96\xff\xe3\xff\'\x00<\x00"\x00\xf9\xff\xdb\xff\xd5\xff\xe6\xff\x04\x00\x12\x00\x00\x00\xec\xff\xfe\xff/\x00a\x00s\x00c\x00?\x00\x1e\x00\n\x00\xff\xff\xf7\xff\xea\xff\xdd\xff\xd9\xff\xe6\xff\xee\xff\xe6\xff\xd9\xff\xe9\xff!\x00c\x00\x87\x00\x85\x00i\x008\x00\xfb\xff\xc7\xff\xb6\xff\xc0\xff\xc5\xff\xb5\xff\xa8\xff\xc3\xff\xf8\xff\x1a\x00\x17\x00\x04\x00\xf3\xff\xdf\xff\xbd\xff\xa1\xff\xaf\xff\xe0\xff\r\x00 \x00%\x007\x00]\x00}\x00t\x00B\x00\t\x00\xf8\xff\x12\x008\x00N\x00N\x00A\x00.\x00\x15\x00\xf4\xff\xcb\xff\x9e\xff{\xff\x82\xff\xc0\xff\x1d\x00d\x00n\x00D\x00\x18\x00\x13\x00$\x00.\x00\'\x00\x0e\x00\xf1\xff\xdf\xff\xde\xff\xed\xff\n\x00&\x004\x002\x00#\x00\x07\x00\xe6\xff\xcf\xff\xc7\xff\xc6\xff\xbf\xff\xaf\xff\xa9\xff\xb4\xff\xc5\xff\xd8\xff\xf2\xff\x1b\x00G\x00b\x00i\x00c\x00Y\x00O\x00C\x00>\x003\x00\x10\x00\xde\xff\xc1\xff\xcc\xff\xf0\xff\x15\x002\x00<\x001\x00\x03\x00\xaf\xff`\xffN\xff\x84\xff\xd4\xff\x0f\x00$\x00\x1d\x00\x0f\x00\x08\x00\x05\x00\xfe\xff\xfe\xff\x17\x00E\x00q\x00~\x00j\x00C\x00\x12\x00\xe0\xff\xb8\xff\xa9\xff\xbb\xff\xe4\xff\x04\x00\x03\x00\xf2\xff\xf0\xff\x00\x00\x1a\x006\x00T\x00f\x00\\\x00;\x00\x18\x00\xfb\xff\xe8\xff\xe0\xff\xed\xff\x17\x00N\x00j\x00P\x00\x11\x00\xd7\xff\xca\xff\xe6\xff\xfd\xff\xe4\xff\xa9\xff~\xff\x82\xff\xa3\xff\xc0\xff\xc9\xff\xcf\xff\xde\xff\xf7\xff\x19\x004\x006\x00\x1d\x00\xff\xff\xf9\xff\xfd\xff\xe9\xff\xb5\xff\x89\xff\x91\xff\xc5\xff\xfd\xff\r\x00\xfd\xff\xe9\xff\xe1\xff\xda\xff\xcf\xff\xd3\xff\xf3\xff&\x00F\x00;\x00\x11\x00\xeb\xff\xdf\xff\xe8\xff\xf3\xff\xff\xff\x1a\x00>\x00L\x006\x00\x0b\x00\xde\xff\xb6\xff\x9d\xff\x99\xff\xa3\xff\xae\xff\xb4\xff\xaf\xff\xab\xff\xb9\xff\xe2\xff\x1d\x00V\x00{\x00\x8f\x00\x95\x00\x91\x00x\x00A\x00\n\x00\xf7\xff\x12\x00;\x00X\x00Z\x00C\x00 \x00\x06\x00\xfc\xff\x06\x00\x1a\x00#\x00\x10\x00\xe6\xff\xc2\xff\xb7\xff\xc1\xff\xcd\xff\xd7\xff\xef\xff\x1a\x00H\x00i\x00v\x00p\x00^\x00F\x006\x005\x00,\x00\x0b\x00\xe2\xff\xca\xff\xcf\xff\xda\xff\xce\xff\xae\xff\x9b\xff\xa1\xff\xae\xff\xae\xff\xa0\xff\xa2\xff\xc0\xff\xec\xff\xf4\xff\xc6\xff\x8f\xff\x7f\xff\x9d\xff\xce\xff\xf1\xff\x05\x00\x11\x00\x17\x00\x1c\x00.\x00H\x00S\x00?\x00\x1b\x00\xfe\xff\xeb\xff\xd1\xff\xaa\xff\x8b\xff\x8c\xff\xba\xff\xff\xff>\x00]\x00X\x00>\x00#\x00\x06\x00\xdf\xff\xbb\xff\xb5\xff\xd5\xff\x06\x00)\x00+\x00\x18\x00\x0e\x00\'\x00G\x00G\x00\x1d\x00\xf2\xff\xea\xff\xfe\xff\n\x00\x01\x00\xf6\xff\xfc\xff\x15\x000\x007\x00)\x00\x11\x00\x00\x00\x06\x00#\x00P\x00\x81\x00\x9d\x00\x8d\x00P\x00\x07\x00\xd3\xff\xba\xff\xb6\xff\xc3\xff\xe2\xff\t\x00\'\x00)\x00\r\x00\xdd\xff\xb6\xff\xb4\xff\xd4\xff\xf0\xff\xe5\xff\xbd\xff\x9a\xff\x90\xff\x9b\xff\xaf\xff\xbc\xff\xc1\xff\xc5\xff\xca\xff\xd6\xff\xe6\xff\xef\xff\xea\xff\xde\xff\xd7\xff\xd3\xff\xc4\xff\xa6\xff\x8c\xff\x90\xff\xb6\xff\xf7\xffA\x00u\x00\x85\x00\x84\x00\x8c\x00\x9a\x00\x97\x00n\x000\x00\xff\xff\xf4\xff\x03\x00\t\x00\x01\x00\xff\xff\x1b\x00C\x00L\x00&\x00\xed\xff\xd0\xff\xda\xff\xef\xff\xf3\xff\xe9\xff\xe2\xff\xeb\xff\x05\x00"\x001\x000\x001\x00H\x00l\x00~\x00u\x00o\x00w\x00{\x00W\x00\r\x00\xc3\xff\xa5\xff\xbf\xff\xf3\xff\x16\x00\x1d\x00\x15\x00\t\x00\x03\x00\t\x00\x1e\x002\x00=\x00?\x004\x00\x0e\x00\xd4\xff\x9b\xff\x82\xff\x96\xff\xc9\xff\xff\xff \x00&\x00\x1b\x00\x05\x00\xed\xff\xd0\xff\xb3\xff\x9f\xff\x96\xff\x98\xff\xa3\xff\xb8\xff\xda\xff\x01\x00\x1d\x00 \x00\x11\x00\x0e\x00(\x00R\x00r\x00o\x00M\x00$\x00\x0c\x00\xf6\xff\xce\xff\x9c\xff\x8a\xff\xb5\xff\t\x00B\x004\x00\xfb\xff\xdf\xff\xf9\xff&\x000\x00\n\x00\xda\xff\xbd\xff\xbe\xff\xd1\xff\xf2\xff\x1a\x00@\x00]\x00j\x00b\x00E\x00(\x00\x1d\x00\x1e\x00\x14\x00\xf7\xff\xd5\xff\xc7\xff\xd4\xff\xe6\xff\xe9\xff\xdf\xff\xda\xff\xeb\xff\x06\x00\x0b\x00\xed\xff\xd4\xff\xe6\xff\x1a\x00<\x00\x1e\x00\xd2\xff\x95\xff\x90\xff\xb2\xff\xd9\xff\xee\xff\xf5\xff\xfa\xff\x04\x00\x11\x00\x1e\x00\x1b\x00\x04\x00\xe5\xff\xd7\xff\xde\xff\xe2\xff\xd1\xff\xc3\xff\xd1\xff\xfb\xff#\x00.\x00#\x00\x1c\x00*\x00>\x00@\x00%\x00\xfc\xff\xd4\xff\xb6\xff\xa6\xff\xa8\xff\xba\xff\xd9\xff\xf8\xff\x08\x00\xfa\xff\xd2\xff\xb7\xff\xbf\xff\xe5\xff\x08\x00\x14\x00\x0b\x00\xf9\xff\xdb\xff\xb9\xff\xa8\xff\xbd\xff\xfa\xffA\x00k\x00e\x00D\x00/\x007\x00M\x00N\x00)\x00\xf2\xff\xcb\xff\xc7\xff\xd6\xff\xd7\xff\xc5\xff\xc0\xff\xe0\xff\x10\x00\x1e\x00\xfa\xff\xd1\xff\xd4\xff\x04\x00.\x00\x1d\x00\xe1\xff\xb1\xff\xb3\xff\xd4\xff\xec\xff\xeb\xff\xed\xff\x05\x001\x00M\x00A\x00\x17\x00\xf0\xff\xea\xff\n\x000\x005\x00\x11\x00\xec\xff\xe8\xff\x08\x00/\x00<\x003\x001\x00P\x00~\x00\x93\x00\x81\x00\\\x00>\x00.\x00\x1b\x00\xf2\xff\xc6\xff\xb1\xff\xc2\xff\xe4\xff\xfa\xff\xfb\xff\xf7\xff\xff\xff\x03\x00\xf1\xff\xd9\xff\xd8\xff\xe7\xff\xf3\xff\xf0\xff\xf0\xff\x04\x00$\x00;\x009\x00!\x00\x0c\x00\x11\x009\x00h\x00}\x00c\x000\x00\x03\x00\xe9\xff\xd6\xff\xb5\xff\x8f\xff\x8b\xff\xb6\xff\xf2\xff\x08\x00\xf2\xff\xd5\xff\xdc\xff\x00\x00\x17\x00\x01\x00\xc8\xff\x96\xff\x8f\xff\xb3\xff\xda\xff\xe4\xff\xdc\xff\xe6\xff\x08\x00\'\x00#\x00\x0b\x00\xff\xff\x0f\x000\x00G\x00@\x00$\x00\n\x00\x0e\x00+\x00C\x00C\x007\x002\x006\x005\x00%\x00\x1a\x00#\x00;\x00E\x003\x00\x08\x00\xd7\xff\xad\xff\x91\xff\x96\xff\xb5\xff\xe0\xff\x05\x00-\x00M\x00M\x00+\x00\x08\x00\x04\x00\x1c\x00+\x00\x17\x00\xee\xff\xd2\xff\xde\xff\x04\x00#\x001\x00?\x00O\x00K\x00!\x00\xe6\xff\xbd\xff\xb3\xff\xba\xff\xc7\xff\xd3\xff\xda\xff\xdb\xff\xcd\xff\xb3\xff\x9f\xff\xa0\xff\xb7\xff\xd6\xff\xef\xff\xf7\xff\xf2\xff\xe9\xff\xe8\xff\xec\xff\xde\xff\xbc\xff\x93\xff\x81\xff\x9a\xff\xd8\xff\x11\x00\x1d\x00\x01\x00\xef\xff\xfe\xff\x1a\x00%\x00\x15\x00\xf7\xff\xe6\xff\xf2\xff\x16\x001\x00&\x00\x00\x00\xdd\xff\xd8\xff\xf2\xff\x18\x00J\x00w\x00\x8a\x00r\x00=\x00\r\x00\xee\xff\xd5\xff\xbd\xff\xaf\xff\xbf\xff\xe8\xff\x17\x004\x00>\x004\x00\x19\x00\x02\x00\x00\x00\x1b\x002\x00"\x00\xed\xff\xc0\xff\xc2\xff\xee\xff!\x00D\x00X\x00a\x00W\x008\x00\x0f\x00\xf7\xff\xf3\xff\xf6\xff\xf5\xff\xf9\xff\x02\x00\x00\x00\xec\xff\xd0\xff\xbf\xff\xbb\xff\xba\xff\xb3\xff\xa6\xff\x9a\xff\x9e\xff\xb9\xff\xdb\xff\xf1\xff\xf8\xff\xf5\xff\xe4\xff\xc9\xff\xb1\xff\xab\xff\xb0\xff\xb6\xff\xcb\xff\xfb\xff6\x00\\\x00b\x00_\x00d\x00i\x00Z\x005\x00\r\x00\xf4\xff\xe7\xff\xe9\xff\xfd\xff\x16\x00%\x00\'\x00&\x00+\x00(\x00\x14\x00\xf9\xff\xe3\xff\xd9\xff\xd5\xff\xd0\xff\xca\xff\xc7\xff\xce\xff\xe2\xff\t\x00<\x00f\x00y\x00t\x00e\x00S\x007\x00\x10\x00\xec\xff\xe3\xff\xfd\xff,\x00X\x00k\x00_\x00<\x00\n\x00\xe3\xff\xd8\xff\xe6\xff\xf5\xff\xfb\xff\xfa\xff\xf7\xff\xf6\xff\xee\xff\xdb\xff\xca\xff\xcc\xff\xe0\xff\xf2\xff\xf3\xff\xe8\xff\xdd\xff\xda\xff\xdf\xff\xe9\xff\xf4\xff\xfd\xff\xfc\xff\xee\xff\xdb\xff\xd1\xff\xd2\xff\xd3\xff\xce\xff\xc1\xff\xb9\xff\xc6\xff\x01\x00\\\x00\x8c\x00u\x00=\x00\x0f\x00\xfb\xff\xf5\xff\xed\xff\xde\xff\xd4\xff\xdd\xff\xf7\xff\x19\x00;\x00I\x00@\x00(\x00\x17\x00\x1c\x00"\x00\n\x00\xd3\xff\xa4\xff\xa0\xff\xc0\xff\xe9\xff\x0c\x00(\x00;\x00:\x00/\x00+\x000\x00&\x00\x06\x00\xe2\xff\xd8\xff\xe9\xff\xfb\xff\xfe\xff\xf6\xff\xf3\xff\xfb\xff\x06\x00\x10\x00\x18\x00!\x00\'\x00\x18\x00\xf9\xff\xe0\xff\xd5\xff\xd7\xff\xdf\xff\xec\xff\x00\x00\x13\x00\x12\x00\xfa\xff\xe4\xff\xe9\xff\xfb\xff\xfd\xff\xe8\xff\xd9\xff\xe9\xff\t\x00\x0f\x00\xea\xff\xb7\xff\xa1\xff\xb6\xff\xe0\xff\xfb\xff\xfa\xff\xee\xff\xf1\xff\x10\x00=\x00[\x00O\x00(\x00\x07\x00\x00\x00\x05\x00\x01\x00\xf5\xff\xea\xff\xe7\xff\xec\xff\xf2\xff\xf8\xff\x03\x00\x11\x00\x15\x00\x07\x00\xf0\xff\xda\xff\xcc\xff\xc4\xff\xc0\xff\xc0\xff\xc8\xff\xe4\xff\x1b\x00R\x00e\x00L\x00)\x00\x1c\x005\x00Z\x00g\x00M\x00!\x00\xfb\xff\xf2\xff\x01\x00\x12\x00\x0b\x00\xee\xff\xd0\xff\xc9\xff\xd6\xff\xe2\xff\xe5\xff\xdb\xff\xce\xff\xca\xff\xdf\xff\xf7\xff\xef\xff\xc5\xff\xa1\xff\xa5\xff\xc9\xff\xed\xff\x06\x00\x1d\x001\x008\x00.\x00\x1e\x00\x18\x00\x12\x00\x00\x00\xe5\xff\xda\xff\xeb\xff\x0b\x00\x1f\x00\x16\x00\xf9\xff\xea\xff\x02\x00)\x00A\x00@\x003\x00&\x00\x1b\x00\x03\x00\xd9\xff\xb8\xff\xb7\xff\xd2\xff\xfb\xff\x1b\x00(\x00\x1f\x00\x0c\x00\xf8\xff\xec\xff\xe7\xff\xe3\xff\xe6\xff\xf1\xff\x03\x00\x05\x00\xf8\xff\xe8\xff\xf0\xff\x10\x000\x008\x00&\x00\x18\x00#\x00=\x00K\x00=\x00 \x00\x0b\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf8\xff\xed\xff\xe5\xff\xe2\xff\xe2\xff\xe6\xff\xef\xff\x04\x00\x1a\x00.\x005\x00$\x00\xf3\xff\xb8\xff\x9b\xff\xab\xff\xd3\xff\xf5\xff\x06\x00\x0b\x00\x03\x00\xfe\xff\x02\x00\r\x00\x16\x00\x18\x00\x08\x00\xe0\xff\xb4\xff\xa5\xff\xb8\xff\xd1\xff\xda\xff\xe3\xff\t\x00G\x00v\x00|\x00`\x00;\x00\x1c\x00\xff\xff\xe3\xff\xd4\xff\xdd\xff\xff\xff"\x000\x00\x1f\x00\xff\xff\xec\xff\xeb\xff\xee\xff\xe8\xff\xdf\xff\xdb\xff\xdb\xff\xdd\xff\xdc\xff\xd1\xff\xc0\xff\xbe\xff\xe0\xff\x1d\x00J\x00J\x00,\x00\x10\x00\x15\x003\x00G\x00=\x00*\x00\x1f\x00\x1f\x00 \x00\x1e\x00\x14\x00\xf8\xff\xd1\xff\xad\xff\xa2\xff\xb7\xff\xdd\xff\xf7\xff\xfb\xff\xf9\xff\r\x00*\x00)\x00\xf7\xff\xb9\xff\xa5\xff\xc4\xff\xf3\xff\x13\x00\x1d\x00\x15\x00\x0c\x00\x14\x001\x00S\x00^\x00D\x00\x11\x00\xd7\xff\xab\xff\x9b\xff\xa5\xff\xbc\xff\xdb\xff\x03\x00,\x00D\x00J\x00=\x00\'\x00\x0f\x00\x04\x00\x02\x00\xf7\xff\xe7\xff\xe2\xff\xef\xff\xff\xff\xfc\xff\xef\xff\xf2\xff\t\x00\x1b\x00\x12\x00\xf9\xff\xe9\xff\xea\xff\xed\xff\xe7\xff\xe2\xff\xea\xff\xff\xff\x10\x00\x15\x00\x1d\x00.\x00A\x00D\x008\x003\x00=\x00@\x00/\x00\x14\x00\x05\x00\n\x00\x12\x00\x07\x00\xf1\xff\xd8\xff\xbe\xff\xa5\xff\x9e\xff\xb3\xff\xdc\xff\xfc\xff\x05\x00\x05\x00\x18\x003\x00/\x00\x04\x00\xd8\xff\xcf\xff\xe1\xff\xef\xff\xf2\xff\xf9\xff\x0f\x00(\x00;\x00C\x00?\x00/\x00\x10\x00\xe8\xff\xc8\xff\xbc\xff\xc5\xff\xd3\xff\xdc\xff\xe6\xff\xf7\xff\n\x00\x15\x00\x19\x00\x1a\x00\x16\x00\x10\x00\x0c\x00\x10\x00\x11\x00\x07\x00\xeb\xff\xcb\xff\xb5\xff\xb5\xff\xca\xff\xe6\xff\xf6\xff\xf7\xff\xf2\xff\xf1\xff\xf4\xff\xf8\xff\xfd\xff\xfc\xff\xf1\xff\xe3\xff\xd8\xff\xd9\xff\xe2\xff\xfb\xff\x1f\x00:\x00C\x00>\x005\x004\x007\x009\x002\x00&\x00\x1c\x00\x16\x00\x0b\x00\xe9\xff\xb5\xff\x8d\xff\x96\xff\xca\xff\x04\x00 \x00\x17\x00\x08\x00\x11\x000\x00;\x00\x1a\x00\xe4\xff\xc5\xff\xc8\xff\xdd\xff\xee\xff\xfd\xff\x15\x00,\x004\x002\x00.\x00/\x00-\x00 \x00\x06\x00\xec\xff\xd7\xff\xca\xff\xbf\xff\xb8\xff\xc0\xff\xdd\xff\x05\x00(\x00:\x00?\x008\x00-\x00"\x00\x1b\x00\x10\x00\x01\x00\xeb\xff\xd5\xff\xcd\xff\xd2\xff\xd4\xff\xd1\xff\xd0\xff\xdb\xff\xef\xff\xfa\xff\xf9\xff\xf0\xff\xec\xff\xec\xff\xe6\xff\xd5\xff\xbe\xff\xb4\xff\xc9\xff\xf4\xff\x1f\x005\x009\x00@\x00R\x00d\x00j\x00\\\x00D\x00,\x00\x12\x00\xf7\xff\xda\xff\xbe\xff\xa3\xff\x9a\xff\xac\xff\xd6\xff\x00\x00\r\x00\xff\xff\xf6\xff\x06\x00\x1e\x00 \x00\x08\x00\xf2\xff\xf0\xff\xf5\xff\xf1\xff\xed\xff\x03\x00.\x00R\x00[\x00S\x00Q\x00S\x00K\x000\x00\n\x00\xe9\xff\xca\xff\xae\xff\x9a\xff\x97\xff\xb5\xff\xee\xff+\x00H\x00<\x00\x1e\x00\n\x00\x0b\x00\x11\x00\x11\x00\x06\x00\xfc\xff\xf4\xff\xeb\xff\xe0\xff\xd8\xff\xd9\xff\xe5\xff\xf4\xff\xfc\xff\xfc\xff\xf7\xff\xf6\xff\xfd\xff\x05\x00\x06\x00\xfc\xff\xe9\xff\xd6\xff\xd0\xff\xdc\xff\xf2\xff\xff\xff\x00\x00\x07\x00$\x00I\x00_\x00W\x00?\x00$\x00\x08\x00\xec\xff\xd4\xff\xc2\xff\xb4\xff\xab\xff\xb1\xff\xcf\xff\xf4\xff\x0e\x00\x12\x00\x14\x00+\x00M\x00Y\x00=\x00\n\x00\xe7\xff\xdd\xff\xe0\xff\xe0\xff\xdf\xff\xeb\xff\x07\x00$\x00;\x00G\x00H\x00@\x00.\x00\x14\x00\xf4\xff\xd6\xff\xbb\xff\xa8\xff\xa3\xff\xb7\xff\xe7\xff!\x00M\x00U\x00G\x00:\x006\x00/\x00\x17\x00\xfb\xff\xf0\xff\xf2\xff\xf1\xff\xe2\xff\xcd\xff\xc3\xff\xc7\xff\xd4\xff\xe0\xff\xe5\xff\xe4\xff\xe4\xff\xea\xff\xf6\xff\xff\xff\xff\xff\xf8\xff\xeb\xff\xde\xff\xd6\xff\xdf\xff\xf5\xff\x0c\x00\x1f\x003\x00F\x00O\x00K\x00C\x00C\x00C\x003\x00\x0e\x00\xe6\xff\xc8\xff\xb5\xff\xa9\xff\xa9\xff\xb8\xff\xd1\xff\xeb\xff\xff\xff\x15\x000\x00E\x00=\x00\x17\x00\xeb\xff\xd9\xff\xdd\xff\xe1\xff\xe0\xff\xe8\xff\t\x005\x00U\x00^\x00\\\x00Z\x00V\x00E\x00+\x00\x08\x00\xde\xff\xb2\xff\x8f\xff\x89\xff\xa6\xff\xd4\xff\xfb\xff\x0e\x00\x11\x00\x11\x00\x1b\x00\'\x00/\x00#\x00\x04\x00\xde\xff\xc1\xff\xb5\xff\xbc\xff\xd2\xff\xee\xff\x04\x00\r\x00\x11\x00\x12\x00\x0e\x00\n\x00\x02\x00\xf7\xff\xea\xff\xe3\xff\xde\xff\xd6\xff\xc5\xff\xba\xff\xc4\xff\xde\xff\xf5\xff\x01\x00\x0b\x00\x1a\x000\x00E\x00M\x00A\x00$\x00\x02\x00\xe4\xff\xca\xff\xb8\xff\xb1\xff\xba\xff\xd5\xff\xfa\xff\x19\x00$\x00%\x000\x00E\x00O\x00?\x00\x16\x00\xf3\xff\xe5\xff\xea\xff\xed\xff\xf0\xff\xf9\xff\x05\x00\n\x00\x06\x00\x0b\x00 \x00<\x00I\x00=\x00!\x00\xfc\xff\xd4\xff\xb0\xff\x9b\xff\x9e\xff\xba\xff\xe3\xff\x0f\x00,\x006\x006\x00@\x00R\x00T\x007\x00\x06\x00\xd9\xff\xba\xff\xac\xff\xae\xff\xbb\xff\xcd\xff\xe0\xff\xf0\xff\xf8\xff\xf8\xff\xf7\xff\xfd\xff\x0c\x00\x16\x00\r\x00\xf4\xff\xd5\xff\xbf\xff\xbd\xff\xd1\xff\xf7\xff\x15\x00\x1e\x00\x1c\x00$\x00=\x00Y\x00_\x00K\x00\'\x00\x04\x00\xf1\xff\xe6\xff\xd6\xff\xc5\xff\xc4\xff\xe0\xff\n\x00%\x00%\x00\x1b\x00\x1f\x009\x00V\x00W\x007\x00\x0e\x00\xf5\xff\xf4\xff\xfa\xff\x00\x00\x04\x00\r\x00\x1d\x00*\x00(\x00\x1e\x00\x1b\x00(\x00=\x00G\x00@\x00 \x00\xee\xff\xbd\xff\xa7\xff\xb7\xff\xde\xff\x00\x00\n\x00\xfd\xff\xf3\xff\xfe\xff\x1b\x003\x00+\x00\x07\x00\xdf\xff\xc8\xff\xbc\xff\xaf\xff\x9e\xff\x97\xff\xa5\xff\xc8\xff\xf0\xff\x0b\x00\x12\x00\x15\x00\x1d\x00%\x00\x17\x00\xfb\xff\xde\xff\xc8\xff\xbf\xff\xc7\xff\xe4\xff\x06\x00\x0f\x00\xfd\xff\xeb\xff\xed\xff\x0b\x007\x00S\x00K\x00)\x00\x02\x00\xdd\xff\xbf\xff\xad\xff\xb0\xff\xcb\xff\xf6\xff\x1d\x00*\x00\x1d\x00\x0e\x00\x18\x00:\x00Y\x00Z\x009\x00\x13\x00\xfa\xff\xef\xff\xea\xff\xf1\xff\x08\x00(\x005\x00\'\x00\x0f\x00\x08\x00\x18\x00,\x002\x00+\x00\x1c\x00\x05\x00\xe5\xff\xc9\xff\xbd\xff\xc9\xff\xe5\xff\xff\xff\x0c\x00\r\x00\x07\x00\x07\x00\x10\x00\x1c\x00\x1e\x00\x13\x00\xfc\xff\xda\xff\xb7\xff\xa4\xff\xab\xff\xbf\xff\xd2\xff\xdd\xff\xe0\xff\xdf\xff\xe6\xff\xfa\xff\x0c\x00\x06\x00\xea\xff\xcf\xff\xc9\xff\xd2\xff\xda\xff\xdb\xff\xdf\xff\xe7\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x0e\x00,\x00C\x00F\x00.\x00\x06\x00\xdb\xff\xbb\xff\xb1\xff\xc1\xff\xe8\xff\x0c\x00\x1a\x00\x13\x00\n\x00\x16\x00<\x00f\x00w\x00^\x00/\x00\x06\x00\xf0\xff\xe1\xff\xda\xff\xe6\xff\x08\x00/\x00:\x00)\x00\x13\x00\r\x00\x15\x00!\x00(\x00(\x00\x1b\x00\xfe\xff\xdd\xff\xcc\xff\xd7\xff\xf7\xff\x13\x00\x1e\x00\x19\x00\x15\x00\x1d\x004\x00?\x001\x00\n\x00\xe3\xff\xcc\xff\xc2\xff\xb9\xff\xb4\xff\xbb\xff\xce\xff\xe8\xff\xfd\xff\x07\x00\x01\x00\xf2\xff\xea\xff\xed\xff\xf1\xff\xf3\xff\xf0\xff\xea\xff\xe7\xff\xed\xff\xfb\xff\t\x00\n\x00\xf8\xff\xdf\xff\xca\xff\xcb\xff\xec\xff\x1b\x008\x00.\x00\x08\x00\xe0\xff\xc7\xff\xc1\xff\xc6\xff\xcf\xff\xde\xff\xf3\xff\x0c\x00#\x004\x00@\x00I\x00N\x00N\x00E\x000\x00\x14\x00\xf7\xff\xe6\xff\xee\xff\x0b\x00%\x00,\x00 \x00\x19\x00!\x001\x009\x00.\x00\x1d\x00\x14\x00\x0f\x00\xff\xff\xe1\xff\xcd\xff\xd4\xff\xf6\xff\x16\x00"\x00\x1c\x00\x19\x00!\x00*\x00(\x00\x0f\x00\xee\xff\xd5\xff\xce\xff\xcf\xff\xd4\xff\xde\xff\xee\xff\t\x00\x1e\x00#\x00\x17\x00\x0c\x00\x0f\x00\x1a\x00\x13\x00\xf6\xff\xd2\xff\xbf\xff\xc7\xff\xdb\xff\xec\xff\xfa\xff\x05\x00\x05\x00\xf9\xff\xe7\xff\xdc\xff\xd7\xff\xd9\xff\xe2\xff\xed\xff\xf3\xff\xf4\xff\xf1\xff\xef\xff\xf2\xff\xf7\xff\xf9\xff\xf9\xff\x00\x00\x0c\x00\x12\x00\t\x00\x03\x00\x0e\x00$\x00+\x00\x1f\x00\x0b\x00\xfc\xff\xf2\xff\xe9\xff\xe0\xff\xd6\xff\xda\xff\xf0\xff\x0f\x00-\x00;\x002\x00\x1c\x00\x0b\x00\x08\x00\x0c\x00\x08\x00\xf5\xff\xe2\xff\xe8\xff\t\x00\'\x00,\x00\x1c\x00\x0f\x00\x12\x00"\x00/\x00#\x00\x04\x00\xe5\xff\xd8\xff\xd4\xff\xcc\xff\xc1\xff\xc5\xff\xd9\xff\xf3\xff\x07\x00\x12\x00\x19\x00!\x00"\x00\x15\x00\xfb\xff\xe0\xff\xd3\xff\xd3\xff\xdc\xff\xf0\xff\x0f\x00+\x008\x001\x00\x1c\x00\t\x00\xfb\xff\xf1\xff\xed\xff\xeb\xff\xe6\xff\xe3\xff\xe0\xff\xe3\xff\xe7\xff\xeb\xff\xea\xff\xea\xff\xf6\xff\x0e\x00\x1c\x00\x16\x00\x03\x00\xf6\xff\xf9\xff\x08\x00\x18\x00!\x00#\x00\x1f\x00\x11\x00\xfa\xff\xe2\xff\xd6\xff\xdd\xff\xf3\xff\t\x00\x13\x00\x17\x00\x19\x00\x17\x00\x0e\x00\xfd\xff\xeb\xff\xdc\xff\xd6\xff\xd7\xff\xdf\xff\xe8\xff\xeb\xff\xf1\xff\x05\x00&\x00>\x00?\x00*\x00\x0e\x00\xf7\xff\xe5\xff\xd0\xff\xb9\xff\xae\xff\xc1\xff\xef\xff\x1e\x005\x005\x002\x006\x00>\x009\x00#\x00\x04\x00\xeb\xff\xe0\xff\xe4\xff\xf1\xff\xfc\xff\x06\x00\x12\x00\x1a\x00\x18\x00\x0b\x00\xfe\xff\xf5\xff\xef\xff\xe4\xff\xd5\xff\xcd\xff\xd7\xff\xef\xff\t\x00\x14\x00\x14\x00\x14\x00\x18\x00!\x00)\x00.\x002\x002\x001\x000\x00)\x00\x1c\x00\x0b\x00\xfa\xff\xec\xff\xe3\xff\xe2\xff\xe5\xff\xe9\xff\xeb\xff\xf3\xff\xfb\xff\x03\x00\x07\x00\x02\x00\xfc\xff\xfb\xff\x01\x00\x06\x00\x00\x00\xf2\xff\xe7\xff\xe6\xff\xef\xff\xfd\xff\x0c\x00\x16\x00\x19\x00\x16\x00\x16\x00\x14\x00\x00\x00\xd9\xff\xb1\xff\x98\xff\x98\xff\xad\xff\xcf\xff\xf0\xff\x02\x00\n\x00\x15\x00 \x00$\x00\x1d\x00\x11\x00\x05\x00\xfd\xff\xf5\xff\xeb\xff\xe3\xff\xe6\xff\xfb\xff\x16\x00)\x002\x00,\x00\x1b\x00\n\x00\x02\x00\xfe\xff\xf7\xff\xec\xff\xe3\xff\xe6\xff\xf1\xff\x00\x00\t\x00\x06\x00\xff\xff\x05\x00\x13\x00\x1f\x00!\x00\x1c\x00\x1a\x00\x1b\x00\x17\x00\x06\x00\xf5\xff\xef\xff\xf5\xff\x01\x00\n\x00\x0e\x00\n\x00\xfb\xff\xea\xff\xeb\xff\x04\x00%\x006\x001\x00!\x00\x0f\x00\xfb\xff\xe4\xff\xd5\xff\xdb\xff\xf4\xff\n\x00\r\x00\x07\x00\t\x00\x15\x00!\x00\x1c\x00\n\x00\xf0\xff\xdd\xff\xd7\xff\xd7\xff\xd4\xff\xcb\xff\xc9\xff\xd4\xff\xec\xff\x03\x00\x0c\x00\x0e\x00\r\x00\r\x00\x0e\x00\r\x00\t\x00\x00\x00\xf1\xff\xde\xff\xd4\xff\xde\xff\xf6\xff\x0c\x00\x10\x00\x07\x00\xff\xff\x02\x00\r\x00\x12\x00\x08\x00\xf2\xff\xe0\xff\xde\xff\xe6\xff\xee\xff\xf7\xff\x01\x00\x0f\x00\x1f\x00/\x00B\x00I\x00>\x00%\x00\x16\x00\x1b\x00\'\x00(\x00\x11\x00\xf9\xff\xef\xff\xf6\xff\xf9\xff\xee\xff\xdc\xff\xd3\xff\xda\xff\xee\xff\x07\x00\x17\x00\x18\x00\x05\x00\xf2\xff\xe7\xff\xe6\xff\xe7\xff\xee\xff\x02\x00\x1b\x00$\x00\x1c\x00\x0f\x00\x10\x00$\x009\x00>\x000\x00\x12\x00\xf2\xff\xd6\xff\xbe\xff\xaf\xff\xb7\xff\xd8\xff\x00\x00\x18\x00\x12\x00\xfd\xff\xf3\xff\xfb\xff\x06\x00\x08\x00\xff\xff\xf5\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xea\xff\xe6\xff\xe8\xff\xf1\xff\xfd\xff\x03\x00\x04\x00\x05\x00\xff\xff\xf1\xff\xe4\xff\xde\xff\xdf\xff\xdd\xff\xdb\xff\xe0\xff\xf0\xff\x03\x00\x0f\x00\x15\x00\x18\x00\x1c\x00\x1f\x00$\x00*\x00*\x00\x18\x00\xfa\xff\xe0\xff\xdb\xff\xe3\xff\xea\xff\xea\xff\xe9\xff\xef\xff\xfc\xff\x0c\x00\x1c\x00\'\x00"\x00\x11\x00\x01\x00\xfe\xff\x03\x00\x04\x00\xff\xff\x00\x00\n\x00\x12\x00\x0f\x00\x0b\x00\x12\x00!\x00,\x00,\x00$\x00\x13\x00\xfb\xff\xe0\xff\xc8\xff\xba\xff\xc2\xff\xe1\xff\x0b\x00!\x00\x18\x00\x05\x00\x07\x00"\x00>\x00@\x00,\x00\x17\x00\x12\x00\x0e\x00\xff\xff\xe8\xff\xd9\xff\xe2\xff\xfb\xff\x15\x00\x1d\x00\x12\x00\xfe\xff\xed\xff\xe8\xff\xea\xff\xe9\xff\xe2\xff\xd7\xff\xd5\xff\xde\xff\xed\xff\xf8\xff\xfb\xff\xf8\xff\xf9\xff\x06\x00\x16\x00\x1d\x00\x1d\x00 \x00#\x00\x19\x00\x05\x00\xee\xff\xe1\xff\xdd\xff\xdc\xff\xda\xff\xdc\xff\xe0\xff\xe6\xff\xe7\xff\xf1\xff\t\x00 \x00!\x00\r\x00\xf2\xff\xe1\xff\xd9\xff\xd6\xff\xe0\xff\xf4\xff\t\x00\x18\x00#\x00.\x006\x007\x000\x00)\x00!\x00\x13\x00\xff\xff\xe9\xff\xdb\xff\xdb\xff\xf0\xff\x0c\x00\x1c\x00\x16\x00\x08\x00\x08\x00\x16\x00%\x00#\x00\x17\x00\x0b\x00\t\x00\n\x00\x02\x00\xf2\xff\xe9\xff\xf1\xff\x03\x00\x0e\x00\n\x00\x00\x00\xff\xff\x07\x00\x11\x00\x11\x00\x01\x00\xf0\xff\xed\xff\xf1\xff\xe7\xff\xd1\xff\xc3\xff\xd1\xff\xf3\xff\x10\x00\x1b\x00\x1c\x00\x1a\x00\x15\x00\x0f\x00\x0b\x00\x03\x00\xf7\xff\xe8\xff\xe1\xff\xe9\xff\xf5\xff\xf7\xff\xee\xff\xe0\xff\xda\xff\xd8\xff\xd9\xff\xe9\xff\x04\x00\x1c\x00\x1e\x00\x0b\x00\xf4\xff\xe5\xff\xdb\xff\xd5\xff\xd5\xff\xde\xff\xf0\xff\x06\x00\x16\x00\x1c\x00\x1e\x00!\x00)\x00*\x00\x1b\x00\x01\x00\xe4\xff\xcf\xff\xc7\xff\xd1\xff\xe9\xff\x04\x00\x16\x00\x1b\x00\x1b\x00$\x001\x003\x00(\x00\x1c\x00\x1b\x00\x1e\x00\x18\x00\x06\x00\xf3\xff\xf1\xff\xfa\xff\x00\x00\x03\x00\t\x00\x14\x00\x18\x00\x13\x00\t\x00\x01\x00\xf9\xff\xf0\xff\xe4\xff\xd9\xff\xd1\xff\xd4\xff\xe9\xff\t\x00\x1c\x00\x19\x00\x11\x00\x12\x00\x1f\x00*\x00)\x00\x1e\x00\x0e\x00\x06\x00\x07\x00\x03\x00\xed\xff\xd1\xff\xc8\xff\xd7\xff\xf6\xff\x02\x00\xf8\xff\xe6\xff\xe7\xff\xf9\xff\x0c\x00\t\x00\xf5\xff\xe1\xff\xdd\xff\xe9\xff\xf4\xff\xf7\xff\xf4\xff\xf3\xff\xfc\xff\x0c\x00\x16\x00\x1c\x00#\x00,\x00+\x00\x1c\x00\x03\x00\xeb\xff\xd6\xff\xc8\xff\xc7\xff\xd9\xff\xf4\xff\x06\x00\n\x00\x0c\x00\x12\x00\x1c\x00$\x00&\x00$\x00\x1f\x00\n\x00\xe8\xff\xd0\xff\xd6\xff\xee\xff\xfa\xff\xf4\xff\xf3\xff\x07\x00&\x001\x00!\x00\x07\x00\xf8\xff\xf6\xff\xf7\xff\xef\xff\xe1\xff\xda\xff\xe0\xff\xf4\xff\t\x00\x11\x00\x11\x00\x17\x00#\x00.\x00.\x00*\x00%\x00\x19\x00\x03\x00\xe7\xff\xd3\xff\xd1\xff\xe2\xff\xfd\xff\x0e\x00\n\x00\xfa\xff\xec\xff\xed\xff\x00\x00\x13\x00\x10\x00\xff\xff\xf2\xff\xf6\xff\xfb\xff\xee\xff\xd6\xff\xd0\xff\xe4\xff\x06\x00\x1f\x00\'\x00$\x00 \x00\x1f\x00\x1f\x00\x17\x00\x06\x00\xf6\xff\xeb\xff\xe9\xff\xe7\xff\xe7\xff\xed\xff\xfb\xff\x0b\x00\r\x00\x08\x00\x08\x00\x13\x00"\x00(\x00!\x00\n\x00\xf2\xff\xdf\xff\xd9\xff\xde\xff\xe2\xff\xe2\xff\xe5\xff\xed\xff\xfe\xff\r\x00\x13\x00\x17\x00\x18\x00\x16\x00\x0b\x00\xf5\xff\xd6\xff\xbd\xff\xbc\xff\xd8\xff\xf4\xff\xf9\xff\xf6\xff\x06\x00/\x00O\x00J\x00/\x00\x18\x00\x11\x00\t\x00\xf1\xff\xd5\xff\xd0\xff\xe6\xff\t\x00\x1f\x00\x1f\x00\x11\x00\xfe\xff\xf6\xff\xff\xff\x13\x00\x1e\x00\x17\x00\x03\x00\xf0\xff\xe4\xff\xdd\xff\xd6\xff\xd8\xff\xe6\xff\xfe\xff\x13\x00\x1f\x00!\x00 \x00!\x00%\x00!\x00\x14\x00\x06\x00\xfe\xff\xf9\xff\xed\xff\xdd\xff\xdc\xff\xf3\xff\x15\x00+\x00\'\x00\x1b\x00\x17\x00\x1c\x00\x18\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xef\xff\xf0\xff\xf0\xff\xea\xff\xe2\xff\xe2\xff\xee\xff\xf9\xff\xfd\xff\x00\x00\x05\x00\x0c\x00\x0b\x00\x01\x00\xee\xff\xd7\xff\xc3\xff\xbf\xff\xcb\xff\xdd\xff\xed\xff\xff\xff\x14\x00#\x00)\x00\'\x00*\x002\x00+\x00\x0c\x00\xdf\xff\xc1\xff\xc5\xff\xde\xff\xf6\xff\xfd\xff\xfe\xff\x01\x00\x0b\x00\x15\x00\x13\x00\x0c\x00\x08\x00\x08\x00\x06\x00\xfa\xff\xe8\xff\xdb\xff\xdc\xff\xef\xff\x06\x00\x17\x00\x1d\x00\x1e\x00 \x00#\x00)\x00+\x00\'\x00 \x00\x14\x00\x02\x00\xe6\xff\xcf\xff\xc7\xff\xd6\xff\xf6\xff\x12\x00$\x00*\x00+\x00,\x00-\x00%\x00\x15\x00\xfe\xff\xec\xff\xe3\xff\xe0\xff\xe6\xff\xf3\xff\x02\x00\x0b\x00\n\x00\x05\x00\x01\x00\xff\xff\xff\xff\xfb\xff\xf0\xff\xe4\xff\xe0\xff\xe2\xff\xdf\xff\xd5\xff\xd3\xff\xe1\xff\xf3\xff\xfc\xff\xfe\xff\xfc\xff\xfd\xff\x07\x00\x17\x00(\x00-\x00\x1e\x00\xfe\xff\xde\xff\xcb\xff\xcb\xff\xd7\xff\xe5\xff\xf8\xff\x0b\x00\x17\x00\x11\x00\x06\x00\x04\x00\r\x00\x19\x00\x19\x00\r\x00\xfc\xff\xe7\xff\xd9\xff\xda\xff\xe5\xff\xf6\xff\x03\x00\x0e\x00\x15\x00\x19\x00\x19\x00\x1a\x00\x1f\x00$\x00\x1e\x00\x0c\x00\xf4\xff\xe0\xff\xda\xff\xe1\xff\xf5\xff\x10\x00(\x006\x00:\x009\x003\x00)\x00\x1a\x00\n\x00\xf6\xff\xe3\xff\xd5\xff\xd2\xff\xdc\xff\xec\xff\xf8\xff\x02\x00\n\x00\n\x00\x04\x00\xf9\xff\xf5\xff\xf9\xff\xfc\xff\xfb\xff\xf1\xff\xdf\xff\xcb\xff\xc5\xff\xdb\xff\x01\x00\x1d\x00\x1e\x00\r\x00\x03\x00\x02\x00\x07\x00\x07\x00\x02\x00\xfc\xff\xf9\xff\xfa\xff\xf6\xff\xec\xff\xe6\xff\xea\xff\xf7\xff\t\x00\x13\x00\x0f\x00\x04\x00\x00\x00\x08\x00\x18\x00#\x00\x1b\x00\x07\x00\xee\xff\xdb\xff\xdb\xff\xe2\xff\xed\xff\xf8\xff\x05\x00\x10\x00\x17\x00\x17\x00\x17\x00\x1a\x00\x1f\x00\x1a\x00\x0f\x00\x02\x00\xf7\xff\xf2\xff\xf3\xff\xfd\xff\n\x00\x14\x00\x16\x00\x18\x00\x1c\x00!\x00"\x00\x1c\x00\x14\x00\n\x00\xfa\xff\xe7\xff\xd7\xff\xd2\xff\xd8\xff\xe3\xff\xf3\xff\x03\x00\x0e\x00\r\x00\x07\x00\x04\x00\x05\x00\x02\x00\xf8\xff\xee\xff\xe5\xff\xdd\xff\xd5\xff\xdb\xff\xee\xff\x00\x00\x08\x00\x08\x00\x0b\x00\x0e\x00\r\x00\x08\x00\x06\x00\x0b\x00\x10\x00\x0b\x00\xfe\xff\xf1\xff\xea\xff\xee\xff\xfd\xff\x0f\x00\x1f\x00"\x00\x18\x00\t\x00\x02\x00\x04\x00\x05\x00\x02\x00\x04\x00\n\x00\n\x00\xff\xff\xef\xff\xea\xff\xf3\xff\x00\x00\x06\x00\x03\x00\xfd\xff\x02\x00\r\x00\x16\x00\x19\x00\x17\x00\x14\x00\r\x00\x01\x00\xf5\xff\xee\xff\xef\xff\xf4\xff\x00\x00\x0b\x00\x12\x00\x16\x00\x19\x00\x1c\x00\x1e\x00\x1b\x00\x0b\x00\xf4\xff\xdc\xff\xce\xff\xcf\xff\xdd\xff\xee\xff\xf9\xff\xfb\xff\xf4\xff\xf1\xff\xf4\xff\xfc\xff\xfd\xff\xf8\xff\xf4\xff\xf2\xff\xed\xff\xe0\xff\xd7\xff\xd8\xff\xe8\xff\xfa\xff\x04\x00\t\x00\x08\x00\x04\x00\xfd\xff\xfd\xff\x07\x00\x0f\x00\x0c\x00\x00\x00\xee\xff\xe3\xff\xe5\xff\xf3\xff\x04\x00\x13\x00\x1d\x00!\x00\x1e\x00\x18\x00\x12\x00\x10\x00\x11\x00\x14\x00\x17\x00\x13\x00\t\x00\x00\x00\xff\xff\x08\x00\x0e\x00\r\x00\x03\x00\xfa\xff\xf8\xff\xff\xff\n\x00\x0f\x00\x10\x00\x0f\x00\x11\x00\x11\x00\t\x00\xfa\xff\xee\xff\xf1\xff\x01\x00\x12\x00\x19\x00\x17\x00\x11\x00\x12\x00\x11\x00\t\x00\xfd\xff\xf0\xff\xe6\xff\xdf\xff\xdb\xff\xdb\xff\xde\xff\xe2\xff\xe7\xff\xf2\xff\xfc\xff\x00\x00\xfe\xff\xfd\xff\x00\x00\x03\x00\x02\x00\xfb\xff\xed\xff\xdf\xff\xdb\xff\xe4\xff\xf9\xff\x08\x00\x0c\x00\x04\x00\xf9\xff\xf5\xff\xfb\xff\x02\x00\x08\x00\x07\x00\x01\x00\xf7\xff\xeb\xff\xe8\xff\xec\xff\xf6\xff\x05\x00\x14\x00 \x00#\x00\x1b\x00\r\x00\x02\x00\x03\x00\x0b\x00\x13\x00\x12\x00\x08\x00\xfa\xff\xf3\xff\xf4\xff\xfa\xff\xff\xff\x04\x00\x0b\x00\x16\x00!\x00(\x00"\x00\x14\x00\x0e\x00\x11\x00\x12\x00\x0c\x00\xfd\xff\xf6\xff\xfe\xff\x13\x00#\x00\x1f\x00\x0e\x00\xff\xff\xfe\xff\x04\x00\x02\x00\xf6\xff\xec\xff\xe7\xff\xe5\xff\xe1\xff\xdd\xff\xe1\xff\xed\xff\xfd\xff\x08\x00\x0b\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xf8\xff\xf0\xff\xec\xff\xed\xff\xf0\xff\xee\xff\xe8\xff\xe8\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xfb\xff\x02\x00\x06\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00\x1d\x00\x12\x00\x06\x00\x05\x00\x0c\x00\x14\x00\x12\x00\x07\x00\xfa\xff\xee\xff\xe9\xff\xe7\xff\xe6\xff\xec\xff\xfc\xff\x12\x00#\x00&\x00\x1c\x00\r\x00\x05\x00\x03\x00\x03\x00\x02\x00\xfe\xff\xfb\xff\xfb\xff\xff\xff\x08\x00\x0f\x00\x14\x00\x18\x00\x1d\x00\x1e\x00\x16\x00\x08\x00\xfb\xff\xf5\xff\xf3\xff\xea\xff\xdb\xff\xd9\xff\xed\xff\t\x00\x1a\x00\x16\x00\t\x00\x04\x00\x07\x00\x0b\x00\x05\x00\xf9\xff\xf2\xff\xf3\xff\xf5\xff\xf0\xff\xe7\xff\xe3\xff\xee\xff\x00\x00\x0e\x00\x10\x00\x06\x00\xf9\xff\xf1\xff\xf0\xff\xf6\xff\xfa\xff\xfc\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xed\xff\xe9\xff\xf5\xff\x0c\x00\x1b\x00\x1c\x00\x13\x00\x0b\x00\t\x00\x07\x00\x05\x00\x03\x00\x06\x00\x05\x00\xf9\xff\xe9\xff\xe1\xff\xe9\xff\xfc\xff\x0e\x00\x16\x00\x17\x00\x15\x00\x17\x00\x1a\x00\x16\x00\x0b\x00\xfe\xff\xf6\xff\xf2\xff\xf1\xff\xef\xff\xeb\xff\xef\xff\xfb\xff\x11\x00$\x00&\x00\x18\x00\x03\x00\xf4\xff\xee\xff\xeb\xff\xe1\xff\xd6\xff\xd4\xff\xe1\xff\xf6\xff\x08\x00\x10\x00\x19\x00\x1e\x00#\x00"\x00\x1b\x00\x0f\x00\xff\xff\xf4\xff\xf0\xff\xee\xff\xea\xff\xe7\xff\xef\xff\x01\x00\x0f\x00\x10\x00\x07\x00\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf6\xff\xf8\xff\xfe\xff\x04\x00\xff\xff\xf2\xff\xe8\xff\xe7\xff\xf6\xff\x0c\x00\x1f\x00\'\x00$\x00\x1b\x00\x0f\x00\x04\x00\xfb\xff\xf7\xff\xf6\xff\xf6\xff\xf2\xff\xe9\xff\xe5\xff\xe8\xff\xee\xff\xf9\xff\x04\x00\x0f\x00\x17\x00\x19\x00\x15\x00\x0e\x00\x04\x00\xfc\xff\xfb\xff\xfb\xff\xf6\xff\xe9\xff\xe1\xff\xec\xff\x04\x00\x1e\x00%\x00\x1e\x00\x12\x00\r\x00\x0b\x00\x04\x00\xf8\xff\xe8\xff\xdd\xff\xdc\xff\xe4\xff\xef\xff\xf7\xff\xfc\xff\x04\x00\x13\x00 \x00#\x00\x18\x00\x08\x00\xfd\xff\xf6\xff\xee\xff\xe3\xff\xdc\xff\xde\xff\xe9\xff\xf7\xff\x03\x00\x0c\x00\x0e\x00\x13\x00\x18\x00\x1a\x00\x14\x00\n\x00\x04\x00\x00\x00\xfd\xff\xf9\xff\xf5\xff\xf5\xff\xfb\xff\x03\x00\x0b\x00\x10\x00\x13\x00\x14\x00\x13\x00\x0e\x00\x06\x00\xfb\xff\xf3\xff\xf3\xff\xf6\xff\xf3\xff\xea\xff\xe4\xff\xe3\xff\xe9\xff\xf2\xff\xfe\xff\x10\x00"\x00.\x00.\x00!\x00\x0e\x00\xfc\xff\xf2\xff\xee\xff\xec\xff\xe8\xff\xe9\xff\xf0\xff\xf9\xff\x06\x00\r\x00\x14\x00\x19\x00\x1a\x00\x13\x00\x02\x00\xf0\xff\xe6\xff\xe0\xff\xe0\xff\xe1\xff\xe4\xff\xed\xff\xf6\xff\x01\x00\r\x00\x18\x00 \x00$\x00\x1f\x00\x14\x00\x06\x00\xf7\xff\xe8\xff\xe0\xff\xe3\xff\xec\xff\xf6\xff\xfd\xff\x00\x00\x04\x00\x0c\x00\x13\x00\x16\x00\x11\x00\x06\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\xf7\xff\xf4\xff\xfa\xff\x01\x00\x03\x00\x04\x00\x06\x00\x0c\x00\x13\x00\x17\x00\x18\x00\x17\x00\x0e\x00\xfe\xff\xf1\xff\xec\xff\xed\xff\xec\xff\xe8\xff\xe7\xff\xec\xff\xf6\xff\x02\x00\x11\x00\x1c\x00\x1f\x00\x1b\x00\x14\x00\x0f\x00\n\x00\xff\xff\xf3\xff\xeb\xff\xed\xff\xf4\xff\xf9\xff\xfb\xff\xfd\xff\x05\x00\x0e\x00\x19\x00\x1a\x00\x13\x00\x07\x00\xf5\xff\xe9\xff\xe1\xff\xdf\xff\xe4\xff\xec\xff\xf2\xff\xf6\xff\xf7\xff\xff\xff\x0e\x00\x1f\x00%\x00\x1e\x00\x0f\x00\xfe\xff\xef\xff\xe2\xff\xdc\xff\xdf\xff\xe7\xff\xef\xff\xf6\xff\xf9\xff\xfc\xff\xff\xff\x05\x00\x0e\x00\x13\x00\x12\x00\x0e\x00\x0b\x00\t\x00\x05\x00\xfc\xff\xf5\xff\xf7\xff\xfb\xff\xfa\xff\xfb\xff\x02\x00\x10\x00\x1a\x00\x1b\x00\x11\x00\xfd\xff\xe9\xff\xda\xff\xd8\xff\xe4\xff\xf1\xff\xf8\xff\xf7\xff\xf2\xff\xf2\xff\xfa\xff\x06\x00\x13\x00 \x00%\x00&\x00"\x00\x1c\x00\x13\x00\x0b\x00\x04\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\x03\x00\x08\x00\x06\x00\x05\x00\x05\x00\x0c\x00\x11\x00\x0e\x00\x03\x00\xf2\xff\xe4\xff\xe1\xff\xe9\xff\xf3\xff\xf4\xff\xf1\xff\xf5\xff\x08\x00\x1f\x00*\x00$\x00\x15\x00\n\x00\x00\x00\xf5\xff\xec\xff\xe6\xff\xe8\xff\xed\xff\xf3\xff\xf9\xff\x01\x00\x07\x00\n\x00\n\x00\n\x00\t\x00\x0b\x00\x0b\x00\x0b\x00\x01\x00\xf0\xff\xe4\xff\xe4\xff\xed\xff\xf2\xff\xed\xff\xe8\xff\xee\xff\x00\x00\x10\x00\x12\x00\x07\x00\xf6\xff\xe8\xff\xe6\xff\xea\xff\xf2\xff\xf4\xff\xf3\xff\xf1\xff\xf3\xff\xfa\xff\x04\x00\x0f\x00\x17\x00\x1e\x00\x1d\x00\x17\x00\r\x00\x02\x00\xfa\xff\xf7\xff\xfc\xff\x06\x00\n\x00\x05\x00\xfc\xff\xf6\xff\xf9\xff\x01\x00\x0b\x00\x17\x00 \x00\x1f\x00\x14\x00\x08\x00\x03\x00\x04\x00\x02\x00\xfb\xff\xf7\xff\xf9\xff\x04\x00\x0f\x00\x15\x00\x13\x00\x0f\x00\x10\x00\x12\x00\x14\x00\x0c\x00\xfc\xff\xee\xff\xeb\xff\xef\xff\xf0\xff\xec\xff\xea\xff\xf0\xff\x00\x00\x0f\x00\x18\x00\x17\x00\x11\x00\x0f\x00\x13\x00\x13\x00\x08\x00\xf3\xff\xe5\xff\xe8\xff\xef\xff\xf3\xff\xef\xff\xf1\xff\xf8\xff\x00\x00\xff\xff\xfc\xff\xf8\xff\xf3\xff\xea\xff\xe3\xff\xe1\xff\xe5\xff\xe9\xff\xe8\xff\xe7\xff\xe5\xff\xe6\xff\xf2\xff\x03\x00\x16\x00\x1e\x00\x17\x00\x0c\x00\x06\x00\x07\x00\x06\x00\x03\x00\xfc\xff\xf6\xff\xf8\xff\xfb\xff\x02\x00\x06\x00\x04\x00\x02\x00\x07\x00\x0f\x00\x14\x00\x10\x00\x06\x00\x00\x00\xff\xff\x04\x00\x0c\x00\x10\x00\x0c\x00\x04\x00\x02\x00\x08\x00\x13\x00\x1a\x00\x19\x00\x18\x00\x1a\x00\x1a\x00\x12\x00\x04\x00\xfc\xff\xfa\xff\xf5\xff\xef\xff\xec\xff\xf7\xff\x06\x00\x0c\x00\x03\x00\xfb\xff\xfc\xff\t\x00\x16\x00\x1b\x00\x13\x00\x03\x00\xf5\xff\xef\xff\xf0\xff\xf0\xff\xec\xff\xe9\xff\xef\xff\xfb\xff\x03\x00\x02\x00\xfb\xff\xf8\xff\xf6\xff\xf3\xff\xec\xff\xe7\xff\xe7\xff\xec\xff\xf0\xff\xf1\xff\xf2\xff\xf4\xff\xfe\xff\x08\x00\x0f\x00\r\x00\x08\x00\x08\x00\x05\x00\x01\x00\xfa\xff\xf3\xff\xf0\xff\xed\xff\xed\xff\xee\xff\xed\xff\xed\xff\xf2\xff\xfa\xff\x08\x00\x0f\x00\x0f\x00\n\x00\x06\x00\t\x00\x0c\x00\t\x00\x05\x00\x04\x00\x05\x00\n\x00\x10\x00\x19\x00\x1c\x00\x18\x00\x16\x00\x13\x00\x12\x00\x0b\x00\x00\x00\xf8\xff\xf9\xff\xfe\xff\x01\x00\xff\xff\x01\x00\x05\x00\x08\x00\t\x00\x08\x00\x07\x00\n\x00\x0c\x00\x0f\x00\x12\x00\x11\x00\t\x00\x00\x00\xfa\xff\xf8\xff\xf5\xff\xf0\xff\xf1\xff\xf5\xff\xf6\xff\xf0\xff\xee\xff\xf3\xff\xfa\xff\xfb\xff\xf3\xff\xed\xff\xed\xff\xf4\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xfc\xff\x03\x00\x07\x00\x05\x00\x02\x00\x03\x00\x05\x00\x06\x00\x02\x00\x00\x00\x02\x00\x05\x00\x02\x00\xf9\xff\xf3\xff\xf3\xff\xf4\xff\xf5\xff\xf7\xff\xff\xff\t\x00\x10\x00\x11\x00\r\x00\x03\x00\xfa\xff\xf4\xff\xf9\xff\x01\x00\x05\x00\x02\x00\xfd\xff\x01\x00\t\x00\r\x00\n\x00\x04\x00\xfc\xff\xf7\xff\xf6\xff\xf9\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x07\x00\r\x00\x11\x00\x10\x00\x0e\x00\t\x00\x04\x00\x01\x00\x04\x00\x07\x00\t\x00\x07\x00\x06\x00\x06\x00\x05\x00\x01\x00\xfc\xff\xfb\xff\xfa\xff\xf7\xff\xf3\xff\xf3\xff\xf2\xff\xed\xff\xe7\xff\xe9\xff\xf7\xff\x06\x00\x0e\x00\x0b\x00\x02\x00\xfd\xff\xfb\xff\x00\x00\x02\x00\x00\x00\xfc\xff\xfd\xff\x03\x00\x05\x00\x00\x00\xfc\xff\xfd\xff\x02\x00\x03\x00\x00\x00\xfc\xff\xfc\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xfc\xff\x08\x00\x0e\x00\x0e\x00\x08\x00\x02\x00\x02\x00\x0b\x00\x13\x00\x13\x00\x0e\x00\t\x00\x0c\x00\x0f\x00\x0c\x00\x04\x00\xfc\xff\xfd\xff\x00\x00\xfe\xff\xf8\xff\xf1\xff\xee\xff\xf3\xff\xfb\xff\x02\x00\x02\x00\xfb\xff\xf7\xff\xf6\xff\xf9\xff\xfb\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xfc\xff\xfb\xff\x01\x00\x04\x00\x01\x00\xf8\xff\xf0\xff\xea\xff\xe6\xff\xe9\xff\xf3\xff\x05\x00\x12\x00\x15\x00\x12\x00\r\x00\n\x00\x06\x00\xfc\xff\xf8\xff\xf9\xff\x03\x00\x0b\x00\x08\x00\xfc\xff\xf2\xff\xf7\xff\t\x00\x1a\x00\x1a\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf7\xff\x01\x00\x0c\x00\x12\x00\x11\x00\x0b\x00\x06\x00\x02\x00\x04\x00\x0b\x00\x10\x00\x11\x00\x12\x00\x11\x00\x0b\x00\x05\x00\x00\x00\x01\x00\x02\x00\x01\x00\xfd\xff\xfe\xff\x03\x00\x0b\x00\x0f\x00\x0e\x00\x06\x00\x03\x00\x04\x00\t\x00\n\x00\x02\x00\xf4\xff\xed\xff\xf2\xff\xfc\xff\xff\xff\xf8\xff\xf3\xff\xf5\xff\xfe\xff\x01\x00\xfc\xff\xf4\xff\xeb\xff\xe6\xff\xe4\xff\xe5\xff\xe5\xff\xe1\xff\xde\xff\xe2\xff\xf2\xff\x05\x00\r\x00\x0b\x00\t\x00\n\x00\n\x00\x05\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xff\xff\xfe\xff\xf9\xff\xfc\xff\x05\x00\x11\x00\x16\x00\x13\x00\x0c\x00\x06\x00\x00\x00\xf5\xff\xeb\xff\xea\xff\xf6\xff\x05\x00\x0e\x00\x0b\x00\x05\x00\x01\x00\x02\x00\x08\x00\x10\x00\x10\x00\x0e\x00\x0e\x00\x11\x00\x11\x00\x0c\x00\x03\x00\xfd\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x02\x00\x00\x00\x05\x00\r\x00\x15\x00\x19\x00\x17\x00\x0f\x00\x07\x00\xfe\xff\xf8\xff\xf3\xff\xef\xff\xee\xff\xef\xff\xf6\xff\x05\x00\x12\x00\x15\x00\n\x00\xfd\xff\xf5\xff\xf3\xff\xed\xff\xe5\xff\xe2\xff\xe1\xff\xe4\xff\xe7\xff\xec\xff\xf2\xff\xf9\xff\x02\x00\x0c\x00\x14\x00\x15\x00\x0c\x00\xfb\xff\xeb\xff\xe2\xff\xe5\xff\xf2\xff\xfd\xff\xfd\xff\xf7\xff\xf6\xff\xfe\xff\x07\x00\t\x00\x06\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf8\xff\x01\x00\x0b\x00\x14\x00\x11\x00\n\x00\x06\x00\x08\x00\x0e\x00\x12\x00\x14\x00\x14\x00\x12\x00\x0e\x00\t\x00\x05\x00\xff\xff\xf9\xff\xf5\xff\xfa\xff\xff\xff\x06\x00\x0b\x00\x13\x00\x19\x00\x19\x00\x15\x00\x0e\x00\x07\x00\xfe\xff\xf7\xff\xf0\xff\xef\xff\xf0\xff\xf3\xff\xf4\xff\xf7\xff\xfd\xff\x03\x00\n\x00\r\x00\x0c\x00\x05\x00\xfa\xff\xeb\xff\xe5\xff\xea\xff\xf3\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\x04\x00\x0f\x00\x18\x00\x1a\x00\x19\x00\x12\x00\x04\x00\xf7\xff\xed\xff\xe9\xff\xea\xff\xee\xff\xf3\xff\xf5\xff\xf8\xff\xfc\xff\x01\x00\x07\x00\n\x00\n\x00\x07\x00\xfe\xff\xf1\xff\xe8\xff\xe7\xff\xef\xff\xfe\xff\x0c\x00\x10\x00\t\x00\x02\x00\x00\x00\x03\x00\x07\x00\t\x00\n\x00\r\x00\x13\x00\x17\x00\x12\x00\x06\x00\xfa\xff\xf4\xff\xf5\xff\xfa\xff\xff\xff\x03\x00\x06\x00\x0e\x00\x16\x00\x1a\x00\x17\x00\x10\x00\n\x00\x08\x00\x06\x00\xff\xff\xf6\xff\xee\xff\xe9\xff\xe7\xff\xe9\xff\xee\xff\xf9\xff\x05\x00\x0e\x00\x11\x00\n\x00\xfb\xff\xec\xff\xe4\xff\xe6\xff\xe9\xff\xec\xff\xf0\xff\xf5\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00$\x00\x1f\x00\x0f\x00\xfc\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf3\xff\xf7\xff\x03\x00\x11\x00\x17\x00\x15\x00\x0e\x00\n\x00\t\x00\x07\x00\xfe\xff\xf5\xff\xee\xff\xf2\xff\xfa\xff\x04\x00\x06\x00\x01\x00\xfe\xff\xff\xff\x04\x00\x07\x00\x04\x00\x01\x00\x00\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xff\xff\x00\x00\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xff\xff\x08\x00\x11\x00\x17\x00\x1a\x00\x1a\x00\x18\x00\x12\x00\x05\x00\xf7\xff\xec\xff\xe6\xff\xe7\xff\xea\xff\xed\xff\xf5\xff\x01\x00\x0c\x00\x11\x00\x0e\x00\x06\x00\xfc\xff\xf5\xff\xf2\xff\xf1\xff\xf0\xff\xee\xff\xf1\xff\xf5\xff\xf9\xff\xfd\xff\x02\x00\x0b\x00\x14\x00\x1c\x00\x1b\x00\r\x00\xf9\xff\xea\xff\xe8\xff\xec\xff\xf0\xff\xf3\xff\xf7\xff\xfe\xff\x08\x00\x0c\x00\t\x00\x04\x00\x07\x00\x10\x00\x17\x00\x17\x00\x0e\x00\x04\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\xfd\xff\x00\x00\x07\x00\x0b\x00\x08\x00\x02\x00\xff\xff\x01\x00\x06\x00\x06\x00\x02\x00\x00\x00\xfc\xff\xf6\xff\xf0\xff\xed\xff\xed\xff\xf6\xff\x01\x00\n\x00\x11\x00\x15\x00\x16\x00\x11\x00\x0b\x00\xff\xff\xf2\xff\xe9\xff\xe6\xff\xe8\xff\xe6\xff\xe3\xff\xe5\xff\xf3\xff\x03\x00\x0e\x00\r\x00\t\x00\x06\x00\x03\x00\xfd\xff\xf7\xff\xf3\xff\xf0\xff\xf2\xff\xfa\xff\x03\x00\t\x00\x0c\x00\x0b\x00\r\x00\x13\x00\x15\x00\x0f\x00\x04\x00\xfd\xff\xfc\xff\xff\xff\xfd\xff\xf7\xff\xf2\xff\xf6\xff\xfd\xff\x01\x00\x01\x00\xff\xff\x00\x00\x06\x00\x0c\x00\x0f\x00\x0b\x00\x02\x00\xfd\xff\xfc\xff\x01\x00\x06\x00\n\x00\x08\x00\x02\x00\xfa\xff\xf6\xff\xf7\xff\xfb\xff\x01\x00\n\x00\x13\x00\x15\x00\x16\x00\x12\x00\n\x00\xfd\xff\xf0\xff\xe6\xff\xe8\xff\xf3\xff\x01\x00\x0c\x00\x12\x00\x17\x00\x1c\x00\x1d\x00\x19\x00\x0e\x00\x02\x00\xf3\xff\xe8\xff\xe1\xff\xe0\xff\xe0\xff\xe3\xff\xec\xff\xf8\xff\x00\x00\x02\x00\x03\x00\x01\x00\xfd\xff\xf4\xff\xed\xff\xec\xff\xed\xff\xf3\xff\xfa\xff\x00\x00\x04\x00\x05\x00\x03\x00\x05\x00\n\x00\x10\x00\x11\x00\x0e\x00\x0b\x00\x08\x00\x05\x00\xfd\xff\xf4\xff\xf2\xff\xf6\xff\xfc\xff\xff\xff\xff\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x0e\x00\x0e\x00\x0c\x00\n\x00\x07\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfa\xff\xf4\xff\xf1\xff\xf4\xff\xfa\xff\x03\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfe\xff\x00\x00\x06\x00\x11\x00\x1e\x00%\x00"\x00\x18\x00\x08\x00\xf5\xff\xe5\xff\xdc\xff\xda\xff\xdd\xff\xe5\xff\xf1\xff\xfe\xff\x07\x00\n\x00\x0b\x00\x08\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf6\xff\xf4\xff\xf5\xff\xf9\xff\x01\x00\x06\x00\x07\x00\t\x00\x0c\x00\x11\x00\r\x00\x03\x00\xf8\xff\xee\xff\xe9\xff\xee\xff\xf6\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\x01\x00\x06\x00\x07\x00\x08\x00\x0c\x00\x10\x00\x10\x00\x0b\x00\x03\x00\xfa\xff\xf7\xff\xfd\xff\x01\x00\x00\x00\xf9\xff\xf5\xff\xfa\xff\x02\x00\x07\x00\x06\x00\x05\x00\x07\x00\x0c\x00\x11\x00\x11\x00\t\x00\xff\xff\xf7\xff\xf5\xff\xfa\xff\xfe\xff\x00\x00\x03\x00\t\x00\x14\x00\x1a\x00\x19\x00\x11\x00\x08\x00\xfe\xff\xf1\xff\xe7\xff\xe1\xff\xe3\xff\xe9\xff\xee\xff\xf5\xff\xfb\xff\x01\x00\x07\x00\r\x00\x10\x00\r\x00\x08\x00\x02\x00\xfb\xff\xf4\xff\xee\xff\xed\xff\xf3\xff\xff\xff\t\x00\x0e\x00\x0e\x00\x10\x00\x13\x00\x13\x00\x0e\x00\x04\x00\xfc\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xf1\xff\xf5\xff\xfe\xff\x04\x00\x04\x00\x05\x00\x07\x00\x0c\x00\x0b\x00\x03\x00\xf4\xff\xea\xff\xea\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\x03\x00\n\x00\x0e\x00\x0b\x00\x08\x00\x04\x00\xfe\xff\xf9\xff\xf7\xff\xf9\xff\xfe\xff\x03\x00\x06\x00\x0c\x00\x10\x00\x15\x00\x16\x00\x13\x00\x0e\x00\n\x00\x04\x00\xfc\xff\xf2\xff\xeb\xff\xe9\xff\xeb\xff\xf2\xff\xfb\xff\x04\x00\n\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xfb\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\x02\x00\x0b\x00\x11\x00\x13\x00\x0e\x00\x07\x00\xff\xff\xf8\xff\xf1\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x03\x00\x08\x00\x08\x00\t\x00\x11\x00\x15\x00\x10\x00\x08\x00\x00\x00\xfc\xff\xfb\xff\xf9\xff\xf4\xff\xf3\xff\xf7\xff\x00\x00\x01\x00\xfd\xff\xf8\xff\xf8\xff\xfd\xff\x01\x00\x00\x00\xfa\xff\xf4\xff\xf0\xff\xee\xff\xf0\xff\xf7\xff\xfe\xff\x04\x00\x05\x00\x07\x00\x08\x00\x0b\x00\r\x00\x10\x00\x12\x00\x0f\x00\x08\x00\xfd\xff\xf5\xff\xf1\xff\xef\xff\xf0\xff\xf2\xff\xf8\xff\xfd\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x04\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x07\x00\x05\x00\n\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xff\xff\xfa\xff\xf4\xff\xf2\xff\xf5\xff\xf9\xff\xfd\xff\xfe\xff\xfa\xff\xf7\xff\xf7\xff\xff\xff\x0c\x00\x16\x00\x18\x00\x0f\x00\x03\x00\xfa\xff\xf3\xff\xed\xff\xea\xff\xee\xff\xf8\xff\x00\x00\x03\x00\x02\x00\x01\x00\x04\x00\x08\x00\n\x00\x06\x00\x00\x00\xfc\xff\xf9\xff\xf7\xff\xf4\xff\xf6\xff\xfc\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x04\x00\x08\x00\x05\x00\xff\xff\xf6\xff\xef\xff\xec\xff\xef\xff\xf3\xff\xf3\xff\xf3\xff\xf5\xff\xfb\xff\x04\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x10\x00\x08\x00\x00\x00\xf8\xff\xf7\xff\xfb\xff\x04\x00\x0c\x00\x0b\x00\x07\x00\x05\x00\t\x00\r\x00\x10\x00\x0c\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf8\xff\xf7\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x03\x00\n\x00\x0e\x00\x0c\x00\x08\x00\x04\x00\x00\x00\xfa\xff\xf4\xff\xf3\xff\xf4\xff\xfb\xff\x00\x00\xff\xff\xfa\xff\xf7\xff\xfa\xff\x04\x00\x0c\x00\x0f\x00\x0b\x00\x03\x00\xfc\xff\xf5\xff\xf1\xff\xef\xff\xf1\xff\xf7\xff\x01\x00\x04\x00\x03\x00\x03\x00\x04\x00\x0b\x00\x0e\x00\r\x00\t\x00\x02\x00\xfd\xff\xf8\xff\xf4\xff\xf1\xff\xf1\xff\xf4\xff\xf9\xff\xff\xff\x01\x00\x02\x00\x04\x00\x08\x00\x0c\x00\x0c\x00\x04\x00\xfc\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x04\x00\t\x00\r\x00\x0e\x00\r\x00\r\x00\n\x00\x04\x00\xf9\xff\xf0\xff\xee\xff\xef\xff\xf5\xff\xf9\xff\xfd\xff\xff\xff\x01\x00\x04\x00\n\x00\x0e\x00\r\x00\x08\x00\x01\x00\xff\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\x03\x00\x07\x00\x08\x00\x05\x00\x02\x00\x01\x00\x01\x00\xfe\xff\xf8\xff\xf4\xff\xf6\xff\xff\xff\x07\x00\x07\x00\x02\x00\xfe\xff\x01\x00\x07\x00\n\x00\t\x00\x07\x00\x04\x00\x01\x00\xfe\xff\xf8\xff\xf3\xff\xed\xff\xec\xff\xf0\xff\xf6\xff\xfc\xff\x01\x00\x03\x00\x05\x00\t\x00\r\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xf9\xff\xf3\xff\xf4\xff\xfc\xff\x02\x00\x06\x00\t\x00\x0c\x00\x0f\x00\x11\x00\r\x00\x07\x00\xfd\xff\xf2\xff\xec\xff\xeb\xff\xee\xff\xf2\xff\xf4\xff\xf8\xff\xfc\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\t\x00\x05\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf5\xff\xf4\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\x05\x00\x06\x00\x08\x00\x0b\x00\r\x00\t\x00\x02\x00\xf9\xff\xf6\xff\xf6\xff\xfc\xff\x02\x00\x06\x00\x07\x00\x04\x00\x04\x00\t\x00\x0e\x00\r\x00\x08\x00\x02\x00\x04\x00\x07\x00\x06\x00\xff\xff\xf9\xff\xf6\xff\xf7\xff\xfa\xff\xfd\xff\xfc\xff\xfe\xff\x01\x00\x04\x00\t\x00\x0b\x00\x0b\x00\x08\x00\x01\x00\xfc\xff\xf7\xff\xef\xff\xeb\xff\xed\xff\xf4\xff\xfb\xff\x04\x00\r\x00\x13\x00\x12\x00\x0e\x00\x07\x00\x04\x00\xff\xff\xf8\xff\xf1\xff\xec\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\x05\x00\x0b\x00\x0c\x00\n\x00\x06\x00\x02\x00\xfd\xff\xfa\xff\xf5\xff\xf4\xff\xf5\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x0e\x00\x0b\x00\x07\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xf5\xff\xf2\xff\xf1\xff\xf9\xff\x01\x00\x06\x00\x04\x00\x01\x00\x02\x00\x06\x00\n\x00\x0b\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x06\x00\n\x00\r\x00\x0c\x00\x08\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfc\xff\xf6\xff\xf2\xff\xf6\xff\xfe\xff\x03\x00\x02\x00\x02\x00\x06\x00\x0b\x00\r\x00\x0e\x00\x0c\x00\x04\x00\xfa\xff\xf1\xff\xee\xff\xef\xff\xef\xff\xee\xff\xf1\xff\xf8\xff\x02\x00\x06\x00\n\x00\x08\x00\x04\x00\x01\x00\xfd\xff\xfc\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf8\xff\xfe\xff\x03\x00\x08\x00\x0c\x00\x0e\x00\x0e\x00\n\x00\x05\x00\x02\x00\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xfb\xff\x00\x00\x01\x00\xff\xff\xff\xff\x05\x00\x0b\x00\x0c\x00\t\x00\x04\x00\x05\x00\x08\x00\x05\x00\xff\xff\xf7\xff\xf5\xff\xf7\xff\xfe\xff\x07\x00\n\x00\x08\x00\x04\x00\x03\x00\x05\x00\x08\x00\x04\x00\x00\x00\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x05\x00\x0c\x00\x0e\x00\n\x00\x06\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf4\xff\xf2\xff\xf4\xff\xf9\xff\xff\xff\x02\x00\x00\x00\xfd\xff\xfc\xff\x01\x00\x05\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf9\xff\xfe\xff\x00\x00\xfe\xff\xfa\xff\xfc\xff\x03\x00\t\x00\n\x00\t\x00\x05\x00\x04\x00\x02\x00\xff\xff\xfb\xff\xf6\xff\xf2\xff\xf5\xff\xfa\xff\xff\xff\x00\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x08\x00\x07\x00\x07\x00\x03\x00\xfd\xff\xf9\xff\xfb\xff\xff\xff\x04\x00\x05\x00\x04\x00\x05\x00\x08\x00\x0b\x00\x0c\x00\x0b\x00\x05\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xf8\xff\xf4\xff\xf6\xff\xfb\xff\xff\xff\x01\x00\x03\x00\x06\x00\x08\x00\x08\x00\x06\x00\x01\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf6\xff\xf8\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf7\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x05\x00\x05\x00\x06\x00\x06\x00\x03\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf8\xff\xf4\xff\xf2\xff\xf6\xff\xff\xff\t\x00\x0f\x00\x0e\x00\x0c\x00\x0e\x00\r\x00\x07\x00\xfe\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xfa\xff\x03\x00\x0c\x00\x11\x00\x10\x00\r\x00\x08\x00\x05\x00\x04\x00\xff\xff\xfa\xff\xf5\xff\xf5\xff\xf8\xff\xff\xff\x03\x00\x05\x00\x06\x00\n\x00\x12\x00\x15\x00\x10\x00\x07\x00\x00\x00\xfc\xff\xfc\xff\xfc\xff\xfa\xff\xf5\xff\xf5\xff\xf9\xff\xff\xff\x03\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf7\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfc\xff\xf4\xff\xf0\xff\xee\xff\xf1\xff\xf6\xff\xfd\xff\x05\x00\t\x00\n\x00\x0b\x00\r\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x04\x00\x0b\x00\x0c\x00\x0c\x00\n\x00\x07\x00\x05\x00\x01\x00\xfc\xff\xf7\xff\xf4\xff\xf4\xff\xf5\xff\xf5\xff\xf3\xff\xf8\xff\x04\x00\x11\x00\x13\x00\r\x00\x04\x00\x01\x00\x03\x00\x04\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x04\x00\x06\x00\t\x00\x0b\x00\t\x00\x04\x00\xff\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\x01\x00\x06\x00\x06\x00\x03\x00\x03\x00\x03\x00\x04\x00\x00\x00\xfd\xff\xff\xff\x06\x00\n\x00\x08\x00\x00\x00\xf8\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xff\xff\x00\x00\xfd\xff\xf9\xff\xf4\xff\xf5\xff\xfb\xff\x01\x00\x06\x00\x07\x00\t\x00\x08\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf8\xff\xfb\xff\x02\x00\t\x00\n\x00\x06\x00\x02\x00\x02\x00\x06\x00\x05\x00\x01\x00\xfb\xff\xf8\xff\xf6\xff\xf4\xff\xf1\xff\xee\xff\xf3\xff\x00\x00\x0c\x00\x11\x00\t\x00\xff\xff\xfb\xff\xfd\xff\x02\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\x06\x00\x0b\x00\r\x00\x0b\x00\x0b\x00\n\x00\x07\x00\x03\x00\x02\x00\x00\x00\x01\x00\x02\x00\x02\x00\xfe\xff\xf9\xff\xf7\xff\xf8\xff\xf9\xff\xf6\xff\xf7\xff\xfc\xff\x02\x00\x05\x00\x03\x00\x01\x00\x02\x00\x06\x00\x07\x00\x05\x00\x00\x00\xfa\xff\xf9\xff\xfc\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x06\x00\x03\x00\xff\xff\xfb\xff\xf7\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xf9\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x05\x00\x07\x00\t\x00\t\x00\x07\x00\x05\x00\x02\x00\xfd\xff\xf7\xff\xf5\xff\xf7\xff\xfc\xff\x04\x00\x07\x00\x04\x00\xff\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xf8\xff\xf5\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\x00\x00\x05\x00\x0b\x00\x0f\x00\x0c\x00\x06\x00\x03\x00\x02\x00\x05\x00\x06\x00\x04\x00\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xfc\xff\xf8\xff\xfa\xff\xff\xff\x03\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x01\x00\xfd\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xff\xff\x05\x00\x0b\x00\x0e\x00\x0c\x00\x06\x00\x01\x00\xfd\xff\xfb\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x03\x00\x04\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x03\x00\x00\x00\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xfc\xff\xf7\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x03\x00\x05\x00\x03\x00\x01\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x05\x00\x05\x00\x03\x00\x02\x00\x04\x00\x04\x00\x02\x00\xff\xff\xfc\xff\xfa\xff\xf7\xff\xf3\xff\xef\xff\xee\xff\xf5\xff\xff\xff\x05\x00\x06\x00\x04\x00\x04\x00\x05\x00\x06\x00\x01\x00\xfa\xff\xf7\xff\xf6\xff\xfb\xff\x00\x00\x01\x00\x04\x00\x07\x00\n\x00\x0b\x00\t\x00\x02\x00\xfc\xff\xf8\xff\xf7\xff\xf8\xff\xfa\xff\xfb\xff\xfd\xff\xff\xff\x00\x00\x01\x00\x02\x00\x06\x00\x08\x00\x07\x00\x05\x00\x03\x00\x04\x00\x06\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xf9\xff\xf9\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfa\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfb\xff\xf2\xff\xef\xff\xf2\xff\xf8\xff\xfd\xff\xff\xff\xff\xff\x03\x00\x07\x00\x07\x00\x01\x00\xf9\xff\xf6\xff\xf6\xff\xf9\xff\xfa\xff\xf6\xff\xf5\xff\xfa\xff\x04\x00\r\x00\x0e\x00\x08\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfc\xff\x01\x00\x04\x00\x05\x00\x06\x00\x08\x00\n\x00\t\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x03\x00\x06\x00\x06\x00\x02\x00\xfe\xff\xfc\xff\xfd\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x00\x00\x03\x00\x07\x00\x07\x00\x02\x00\xfa\xff\xf6\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\xfc\xff\xf7\xff\xf3\xff\xf6\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfc\xff\xf9\xff\xf8\xff\xfb\xff\x00\x00\x05\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\x04\x00\xfe\xff\xf7\xff\xf7\xff\xfc\xff\x00\x00\xfe\xff\xfc\xff\xfe\xff\x06\x00\x0b\x00\x0c\x00\n\x00\t\x00\x07\x00\x05\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x00\x00\x05\x00\x08\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\x00\x00\x01\x00\x02\x00\x05\x00\x07\x00\x07\x00\x03\x00\xfb\xff\xf5\xff\xf6\xff\xfa\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\x00\x00\x04\x00\x04\x00\x03\x00\x01\x00\x04\x00\n\x00\t\x00\x05\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x03\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\n\x00\t\x00\x05\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfd\xff\xfa\xff\xfc\xff\x02\x00\x07\x00\t\x00\x06\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfa\xff\xf5\xff\xf4\xff\xf7\xff\xfd\xff\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf7\xff\xf6\xff\xfa\xff\xff\xff\x04\x00\x04\x00\x02\x00\xfe\xff\xf9\xff\xf5\xff\xf5\xff\xfa\xff\x00\x00\x00\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x06\x00\x05\x00\x06\x00\x07\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x03\x00\x05\x00\x06\x00\x08\x00\x08\x00\x06\x00\x07\x00\x0c\x00\x0e\x00\x0c\x00\x06\x00\x02\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\x00\x00\x05\x00\x07\x00\x06\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf9\xff\xf6\xff\xf5\xff\xf5\xff\xf4\xff\xf4\xff\xf9\xff\xfd\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfc\xff\xf7\xff\xf2\xff\xf2\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x03\x00\x05\x00\x03\x00\xfb\xff\xf4\xff\xf1\xff\xee\xff\xf1\xff\xf4\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x06\x00\x06\x00\x06\x00\x04\x00\x02\x00\x02\x00\x04\x00\x06\x00\x05\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\t\x00\x0b\x00\r\x00\x0c\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x03\x00\x07\x00\n\x00\x0c\x00\x0b\x00\n\x00\x08\x00\x04\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xf8\xff\x00\x00\x04\x00\x04\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xf8\xff\xf0\xff\xee\xff\xef\xff\xf4\xff\xf7\xff\xf9\xff\xfb\xff\xfd\xff\x01\x00\x06\x00\x07\x00\x05\x00\x00\x00\xf8\xff\xf5\xff\xf4\xff\xf6\xff\xf9\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x04\x00\t\x00\n\x00\x06\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x04\x00\x03\x00\x02\x00\x04\x00\t\x00\r\x00\x0c\x00\x05\x00\xfc\xff\xfb\xff\xff\xff\x03\x00\x06\x00\x02\x00\x00\x00\x00\x00\x04\x00\x0b\x00\r\x00\x0c\x00\t\x00\x07\x00\x05\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfa\xff\xf8\xff\xf9\xff\xfc\xff\x01\x00\x05\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\xfe\xff\xf9\xff\xf6\xff\xf5\xff\xf7\xff\xf9\xff\xf9\xff\xf6\xff\xf6\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf4\xff\xf5\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\x02\x00\x02\x00\x05\x00\x07\x00\x08\x00\t\x00\n\x00\x08\x00\x05\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x04\x00\x05\x00\x03\x00\x00\x00\xfb\xff\xfa\xff\xf9\xff\xf7\xff\xf7\xff\xfd\xff\x02\x00\x05\x00\x08\x00\x08\x00\n\x00\x0b\x00\r\x00\n\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\x03\x00\x05\x00\x04\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x08\x00\x06\x00\xff\xff\xf8\xff\xf5\xff\xf5\xff\xfc\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x04\x00\x07\x00\x06\x00\x04\x00\x02\x00\x03\x00\x04\x00\x05\x00\x04\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x07\x00\x08\x00\x04\x00\x00\x00\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xf9\xff\x00\x00\x04\x00\t\x00\x0c\x00\n\x00\x08\x00\x06\x00\x02\x00\xfe\xff\xfa\xff\xf7\xff\xf4\xff\xf3\xff\xf4\xff\xf9\xff\xff\xff\x03\x00\x01\x00\x00\x00\xfd\xff\xff\xff\x00\x00\xff\xff\xfb\xff\xfa\xff\xf8\xff\xfa\xff\xfa\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x06\x00\x08\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\x07\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\x02\x00\x04\x00\x05\x00\x07\x00\x0b\x00\x0c\x00\r\x00\x07\x00\x01\x00\xfd\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xf8\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xfa\xff\xff\xff\x01\x00\x04\x00\x04\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x05\x00\x04\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x04\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x02\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\x02\x00\x05\x00\x06\x00\x08\x00\t\x00\x07\x00\x04\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x03\x00\xfd\xff\xfa\xff\xf8\xff\xf8\xff\xf8\xff\xf9\xff\xf7\xff\xf9\xff\xfd\xff\x01\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfc\xff\xf9\xff\xfa\xff\xfa\xff\xfa\xff\xf8\xff\xfa\xff\xfc\xff\x00\x00\x02\x00\x04\x00\x06\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfb\xff\xf9\xff\xfa\xff\xff\xff\x05\x00\t\x00\t\x00\t\x00\x0c\x00\x0c\x00\n\x00\x08\x00\x04\x00\xff\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xf9\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\x01\x00\x06\x00\x08\x00\x07\x00\x06\x00\x04\x00\x03\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\x01\x00\x05\x00\x07\x00\x07\x00\x05\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\x03\x00\x03\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf9\xff\xfd\xff\x02\x00\x07\x00\x08\x00\t\x00\x08\x00\x08\x00\t\x00\x07\x00\x07\x00\x06\x00\x04\x00\x02\x00\xfd\xff\xfa\xff\xfb\xff\x00\x00\x04\x00\x07\x00\x06\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\x04\x00\x08\x00\x08\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x00\x00\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xf7\xff\xf5\xff\xf4\xff\xf6\xff\xfc\xff\x02\x00\x07\x00\x07\x00\x06\x00\x05\x00\x06\x00\x06\x00\x08\x00\t\x00\t\x00\x05\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x03\x00\x05\x00\x06\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfc\xff\xfc\xff\xfa\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x07\x00\x07\x00\x05\x00\x01\x00\x01\x00\x02\x00\x06\x00\x03\x00\xfe\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x02\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\x00\x00\x02\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xfd\xff\xff\xff\x03\x00\x05\x00\x06\x00\x07\x00\x06\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfb\xff\xf9\xff\xfb\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\x01\x00\x06\x00\x07\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x06\x00\x07\x00\x07\x00\x05\x00\x03\x00\x02\x00\x03\x00\x04\x00\x03\x00\xff\xff\xfc\xff\xfa\xff\xfb\xff\xfe\xff\x01\x00\xff\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfc\xff\xfb\xff\xfc\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfe\xff\x03\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\n\x00\t\x00\x07\x00\x04\x00\xff\xff\xfc\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xfd\xff\xfb\xff\xf9\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x04\x00\x07\x00\x08\x00\x04\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x04\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x04\x00\x03\x00\x04\x00\x04\x00\x05\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xfc\xff\xff\xff\x03\x00\x06\x00\x04\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfb\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x04\x00\x04\x00\x07\x00\x07\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfb\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x08\x00\x07\x00\x08\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\xfe\xff\xfa\xff\xf9\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x07\x00\x04\x00\x01\x00\x00\x00\x04\x00\x05\x00\x04\x00\x01\x00\xfb\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x03\x00\x04\x00\x01\x00\x01\x00\x03\x00\x05\x00\x07\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x05\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x06\x00\x05\x00\x05\x00\x04\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfd\xff\xfd\xff\xfd\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x05\x00\x02\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\x02\x00\x05\x00\x06\x00\x04\x00\x01\x00\xff\xff\x00\x00\x01\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x03\x00\x03\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x03\x00\x03\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfd\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x01\x00\x02\x00\x04\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x04\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\x01\x00\x01\x00\x03\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x04\x00\x02\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x04\x00\x04\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\xfe\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x04\x00\x05\x00\x04\x00\x03\x00\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x03\x00\x04\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x03\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x01\x00\x04\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\x02\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfb\xff\xfa\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x05\x00\x07\x00\x08\x00\x07\x00\x05\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xfe\xff\xff\xff\x02\x00\x04\x00\x07\x00\x05\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\xfb\xff\xfc\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xfe\xff\x01\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfd\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x04\x00\x05\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x02\x00\x04\x00\x05\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x04\x00\x06\x00\x04\x00\x04\x00\x02\x00\x03\x00\x04\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x02\x00\x04\x00\x06\x00\x06\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xff\xff\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x04\x00\x03\x00\x03\x00\x01\x00\x01\x00\x03\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x02\x00\x03\x00\x04\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x03\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x03\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfb\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00' +# --- diff --git a/tests/components/voip/test_binary_sensor.py b/tests/components/voip/test_binary_sensor.py index 58f1e0ea53b..44ac8e4d77f 100644 --- a/tests/components/voip/test_binary_sensor.py +++ b/tests/components/voip/test_binary_sensor.py @@ -1,10 +1,21 @@ """Test VoIP binary sensor devices.""" +from http import HTTPStatus + +import pytest + +from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN +from homeassistant.components.voip import DOMAIN from homeassistant.components.voip.devices import VoIPDevice from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.typing import ClientSessionGenerator +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_call_in_progress( hass: HomeAssistant, config_entry: ConfigEntry, @@ -24,3 +35,131 @@ async def test_call_in_progress( state = hass.states.get("binary_sensor.192_168_1_210_call_in_progress") assert state.state == "off" + + +@pytest.mark.usefixtures("voip_device") +async def test_assist_in_progress_disabled_by_default( + hass: HomeAssistant, + config_entry: ConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test assist in progress binary sensor is added disabled.""" + + assert not hass.states.get("binary_sensor.192_168_1_210_call_in_progress") + entity_entry = entity_registry.async_get( + "binary_sensor.192_168_1_210_call_in_progress" + ) + assert entity_entry + assert entity_entry.disabled + assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_assist_in_progress_issue( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + voip_device: VoIPDevice, +) -> None: + """Test assist in progress binary sensor.""" + + call_in_progress_entity_id = "binary_sensor.192_168_1_210_call_in_progress" + + state = hass.states.get(call_in_progress_entity_id) + assert state is not None + + entity_entry = entity_registry.async_get(call_in_progress_entity_id) + issue = issue_registry.async_get_issue( + DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" + ) + assert issue is not None + + # Test issue goes away after disabling the entity + entity_registry.async_update_entity( + call_in_progress_entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + await hass.async_block_till_done() + issue = issue_registry.async_get_issue( + DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" + ) + assert issue is None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_assist_in_progress_repair_flow( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + voip_device: VoIPDevice, +) -> None: + """Test assist in progress binary sensor deprecation issue flow.""" + + call_in_progress_entity_id = "binary_sensor.192_168_1_210_call_in_progress" + + state = hass.states.get(call_in_progress_entity_id) + assert state is not None + + entity_entry = entity_registry.async_get(call_in_progress_entity_id) + assert entity_entry.disabled_by is None + issue = issue_registry.async_get_issue( + DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" + ) + assert issue is not None + assert issue.data == { + "entity_id": call_in_progress_entity_id, + "entity_uuid": entity_entry.id, + "integration_name": "VoIP", + } + assert issue.translation_key == "assist_in_progress_deprecated" + assert issue.translation_placeholders == {"integration_name": "VoIP"} + + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + await hass.async_block_till_done() + await hass.async_start() + + client = await hass_client() + + resp = await client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "data_schema": [], + "description_placeholders": { + "assist_satellite_domain": "assist_satellite", + "entity_id": call_in_progress_entity_id, + "integration_name": "VoIP", + }, + "errors": None, + "flow_id": flow_id, + "handler": DOMAIN, + "last_step": None, + "preview": None, + "step_id": "confirm_disable_entity", + "type": "form", + } + + resp = await client.post(f"/api/repairs/issues/fix/{flow_id}") + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "description": None, + "description_placeholders": None, + "flow_id": flow_id, + "handler": DOMAIN, + "type": "create_entry", + } + + # Test the entity is disabled + entity_entry = entity_registry.async_get(call_in_progress_entity_id) + assert entity_entry.disabled_by is er.RegistryEntryDisabler.USER diff --git a/tests/components/voip/test_repairs.py b/tests/components/voip/test_repairs.py new file mode 100644 index 00000000000..ec2a2cfed96 --- /dev/null +++ b/tests/components/voip/test_repairs.py @@ -0,0 +1,13 @@ +"""Test VoIP repairs.""" + +import pytest + +from homeassistant.components.voip import repairs +from homeassistant.core import HomeAssistant + + +async def test_create_fix_flow_raises_on_unknown_issue_id(hass: HomeAssistant) -> None: + """Test reate_fix_flow raises on unknown issue_id.""" + + with pytest.raises(ValueError): + await repairs.async_create_fix_flow(hass, "no_such_issue", None) diff --git a/tests/components/voip/test_select.py b/tests/components/voip/test_select.py index a9741b44081..78bb8d6c6b4 100644 --- a/tests/components/voip/test_select.py +++ b/tests/components/voip/test_select.py @@ -15,7 +15,7 @@ async def test_pipeline_select( Functionality is tested in assist_pipeline/test_select.py. This test is only to ensure it is set up. """ - state = hass.states.get("select.192_168_1_210_assist_pipeline") + state = hass.states.get("select.192_168_1_210_assistant") assert state is not None assert state.state == "preferred" diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index aab35bfd029..17af2748c1c 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -3,15 +3,27 @@ import asyncio import io from pathlib import Path -import time +from typing import Any from unittest.mock import AsyncMock, Mock, patch import wave import pytest +from syrupy.assertion import SnapshotAssertion +from voip_utils import CallInfo -from homeassistant.components import assist_pipeline, voip -from homeassistant.components.voip.devices import VoIPDevice +from homeassistant.components import assist_pipeline, assist_satellite, tts, voip +from homeassistant.components.assist_satellite import AssistSatelliteEntity + +# pylint: disable-next=hass-component-root-import +from homeassistant.components.assist_satellite.entity import AssistSatelliteState +from homeassistant.components.voip import HassVoipDatagramProtocol +from homeassistant.components.voip.assist_satellite import Tones, VoipAssistSatellite +from homeassistant.components.voip.devices import VoIPDevice, VoIPDevices +from homeassistant.components.voip.voip import PreRecordMessageProtocol, make_protocol +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_component import EntityComponent from homeassistant.setup import async_setup_component _ONE_SECOND = 16000 * 2 # 16Khz 16-bit @@ -35,33 +47,194 @@ def _empty_wav() -> bytes: return wav_io.getvalue() +def async_get_satellite_entity( + hass: HomeAssistant, domain: str, unique_id_prefix: str +) -> AssistSatelliteEntity | None: + """Get Assist satellite entity.""" + ent_reg = er.async_get(hass) + satellite_entity_id = ent_reg.async_get_entity_id( + Platform.ASSIST_SATELLITE, domain, f"{unique_id_prefix}-assist_satellite" + ) + if satellite_entity_id is None: + return None + assert not satellite_entity_id.endswith("none") + + component: EntityComponent[AssistSatelliteEntity] = hass.data[ + assist_satellite.DOMAIN + ] + return component.get_entity(satellite_entity_id) + + +async def test_is_valid_call( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, +) -> None: + """Test that a call is now allowed from an unknown device.""" + assert await async_setup_component(hass, "voip", {}) + protocol = HassVoipDatagramProtocol(hass, voip_devices) + assert not protocol.is_valid_call(call_info) + + ent_reg = er.async_get(hass) + allowed_call_entity_id = ent_reg.async_get_entity_id( + "switch", voip.DOMAIN, f"{voip_device.voip_id}-allow_call" + ) + assert allowed_call_entity_id is not None + state = hass.states.get(allowed_call_entity_id) + assert state is not None + assert state.state == STATE_OFF + + # Allow calls + hass.states.async_set(allowed_call_entity_id, STATE_ON) + assert protocol.is_valid_call(call_info) + + +async def test_calls_not_allowed( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, + snapshot: SnapshotAssertion, +) -> None: + """Test that a pre-recorded message is played when calls aren't allowed.""" + assert await async_setup_component(hass, "voip", {}) + protocol: PreRecordMessageProtocol = make_protocol(hass, voip_devices, call_info) + assert isinstance(protocol, PreRecordMessageProtocol) + assert protocol.file_name == "problem.pcm" + + # Test the playback + done = asyncio.Event() + played_audio_bytes = b"" + + def send_audio(audio_bytes: bytes, **kwargs): + nonlocal played_audio_bytes + + # Should be problem.pcm from components/voip + played_audio_bytes = audio_bytes + done.set() + + protocol.transport = Mock() + protocol.loop_delay = 0 + with patch.object(protocol, "send_audio", send_audio): + protocol.on_chunk(bytes(_ONE_SECOND)) + + async with asyncio.timeout(1): + await done.wait() + + assert sum(played_audio_bytes) > 0 + assert played_audio_bytes == snapshot() + + +async def test_pipeline_not_found( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, + snapshot: SnapshotAssertion, +) -> None: + """Test that a pre-recorded message is played when a pipeline isn't found.""" + assert await async_setup_component(hass, "voip", {}) + + with patch( + "homeassistant.components.voip.voip.async_get_pipeline", return_value=None + ): + protocol: PreRecordMessageProtocol = make_protocol( + hass, voip_devices, call_info + ) + + assert isinstance(protocol, PreRecordMessageProtocol) + assert protocol.file_name == "problem.pcm" + + +async def test_satellite_prepared( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, + snapshot: SnapshotAssertion, +) -> None: + """Test that satellite is prepared for a call.""" + assert await async_setup_component(hass, "voip", {}) + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test", + conversation_language="en", + language="en", + name="test", + stt_engine="test", + stt_language="en", + tts_engine="test", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + + with ( + patch( + "homeassistant.components.voip.voip.async_get_pipeline", + return_value=pipeline, + ), + ): + protocol = make_protocol(hass, voip_devices, call_info) + assert protocol == satellite + + async def test_pipeline( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, + call_info: CallInfo, ) -> None: """Test that pipeline function is called from RTP protocol.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + voip_user_id = satellite.config_entry.data["user"] + assert voip_user_id - return 0 + # Satellite is muted until a call begins + assert satellite.state == AssistSatelliteState.IDLE done = asyncio.Event() # Used to test that audio queue is cleared before pipeline starts bad_chunk = bytes([1, 2, 3, 4]) - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + async def async_pipeline_from_audio_stream( + hass: HomeAssistant, + context: Context, + *args, + device_id: str | None, + tts_audio_output: str | dict[str, Any] | None, + **kwargs, + ): + assert context.user_id == voip_user_id assert device_id == voip_device.device_id + # voip can only stream WAV + assert tts_audio_output == { + tts.ATTR_PREFERRED_FORMAT: "wav", + tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: + in_command = False + async for chunk in stt_stream: # Stream will end when VAD detects end of "speech" - assert _chunk != bad_chunk + assert chunk != bad_chunk + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command # Test empty data event_callback( @@ -71,6 +244,38 @@ async def test_pipeline( ) ) + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_START, + data={"engine": "test", "metadata": {}}, + ) + ) + + assert satellite.state == AssistSatelliteState.LISTENING + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) + + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.INTENT_START, + data={ + "engine": "test", + "language": hass.config.language, + "intent_input": "fake-text", + "conversation_id": None, + "device_id": None, + }, + ) + ) + + assert satellite.state == AssistSatelliteState.PROCESSING + # Fake intent result event_callback( assist_pipeline.PipelineEvent( @@ -83,6 +288,21 @@ async def test_pipeline( ) ) + # Fake tts result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.TTS_START, + data={ + "engine": "test", + "language": hass.config.language, + "voice": "test", + "tts_input": "fake-text", + }, + ) + ) + + assert satellite.state == AssistSatelliteState.RESPONDING + # Proceed with media output event_callback( assist_pipeline.PipelineEvent( @@ -91,6 +311,18 @@ async def test_pipeline( ) ) + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.RUN_END + ) + ) + + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + done.set() + async def async_get_media_source_audio( hass: HomeAssistant, media_source_id: str, @@ -100,102 +332,56 @@ async def test_pipeline( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), + patch.object(satellite, "tts_response_finished", tts_response_finished), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - listening_tone_enabled=False, - processing_tone_enabled=False, - error_tone_enabled=False, - silence_seconds=assist_pipeline.vad.VadSensitivity.to_seconds("aggressive"), - ) - rtp_protocol.transport = Mock() + satellite._tones = Tones(0) + satellite.transport = Mock() + + satellite.connection_made(satellite.transport) + assert satellite.state == AssistSatelliteState.IDLE # Ensure audio queue is cleared before pipeline starts - rtp_protocol._audio_queue.put_nowait(bad_chunk) + satellite._audio_queue.put_nowait(bad_chunk) def send_audio(*args, **kwargs): - # Test finished successfully - done.set() + # Don't send audio + pass - rtp_protocol.send_audio = Mock(side_effect=send_audio) + satellite.send_audio = Mock(side_effect=send_audio) # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) - # silence (assumes aggressive VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + # silence + satellite.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): await done.wait() - -async def test_pipeline_timeout(hass: HomeAssistant, voip_device: VoIPDevice) -> None: - """Test timeout during pipeline run.""" - assert await async_setup_component(hass, "voip", {}) - - done = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, **kwargs): - await asyncio.sleep(10) - - with ( - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._wait_for_speech", - return_value=True, - ), - ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - pipeline_timeout=0.001, - listening_tone_enabled=False, - processing_tone_enabled=False, - error_tone_enabled=False, - ) - transport = Mock(spec=["close"]) - rtp_protocol.connection_made(transport) - - # Closing the transport will cause the test to succeed - transport.close.side_effect = done.set - - # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) - - # Wait for mock pipeline to time out - async with asyncio.timeout(1): - await done.wait() + # Finished speaking + assert satellite.state == AssistSatelliteState.IDLE -async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) -> None: +async def test_stt_stream_timeout( + hass: HomeAssistant, voip_devices: VoIPDevices, voip_device: VoIPDevice +) -> None: """Test timeout in STT stream during pipeline run.""" assert await async_setup_component(hass, "voip", {}) + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): @@ -205,28 +391,19 @@ async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) pass with patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - audio_timeout=0.001, - listening_tone_enabled=False, - processing_tone_enabled=False, - error_tone_enabled=False, - ) + satellite._tones = Tones(0) + satellite._audio_chunk_timeout = 0.001 transport = Mock(spec=["close"]) - rtp_protocol.connection_made(transport) + satellite.connection_made(transport) # Closing the transport will cause the test to succeed transport.close.side_effect = done.set # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to time out async with asyncio.timeout(1): @@ -235,26 +412,34 @@ async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) async def test_tts_timeout( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will time out based on its length.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -278,15 +463,7 @@ async def test_tts_timeout( tone_bytes = bytes([1, 2, 3, 4]) - def send_audio(audio_bytes, **kwargs): - if audio_bytes == tone_bytes: - # Not TTS - return - - # Block here to force a timeout in _send_tts - time.sleep(2) - - async def async_send_audio(audio_bytes, **kwargs): + async def async_send_audio(audio_bytes: bytes, **kwargs): if audio_bytes == tone_bytes: # Not TTS return @@ -303,37 +480,22 @@ async def test_tts_timeout( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - tts_extra_timeout=0.001, - listening_tone_enabled=True, - processing_tone_enabled=True, - error_tone_enabled=True, - silence_seconds=assist_pipeline.vad.VadSensitivity.to_seconds("relaxed"), - ) - rtp_protocol._tone_bytes = tone_bytes - rtp_protocol._processing_bytes = tone_bytes - rtp_protocol._error_bytes = tone_bytes - rtp_protocol.transport = Mock() - rtp_protocol.send_audio = Mock() + satellite._tts_extra_timeout = 0.001 + for tone in Tones: + satellite._tone_bytes[tone] = tone_bytes - original_send_tts = rtp_protocol._send_tts + satellite.transport = Mock() + satellite.send_audio = Mock() + + original_send_tts = satellite._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -342,17 +504,17 @@ async def test_tts_timeout( done.set() - rtp_protocol._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + satellite._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] + satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) - # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + # silence + satellite.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -361,26 +523,34 @@ async def test_tts_timeout( async def test_tts_wrong_extension( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will only stream WAV audio.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -411,28 +581,17 @@ async def test_tts_wrong_extension( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - ) - rtp_protocol.transport = Mock() + satellite.transport = Mock() - original_send_tts = rtp_protocol._send_tts + original_send_tts = satellite._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -441,16 +600,16 @@ async def test_tts_wrong_extension( done.set() - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + satellite.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -459,26 +618,34 @@ async def test_tts_wrong_extension( async def test_tts_wrong_wav_format( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will only stream WAV audio with a specific format.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -516,28 +683,17 @@ async def test_tts_wrong_wav_format( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - ) - rtp_protocol.transport = Mock() + satellite.transport = Mock() - original_send_tts = rtp_protocol._send_tts + original_send_tts = satellite._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -546,16 +702,16 @@ async def test_tts_wrong_wav_format( done.set() - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + satellite.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -564,24 +720,32 @@ async def test_tts_wrong_wav_format( async def test_empty_tts_output( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will not stream when output is empty.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -605,37 +769,78 @@ async def test_empty_tts_output( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._send_tts", + "homeassistant.components.voip.assist_satellite.VoipAssistSatellite._send_tts", ) as mock_send_tts, ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - ) - rtp_protocol.transport = Mock() + satellite.transport = Mock() # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + satellite.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to finish async with asyncio.timeout(1): - await rtp_protocol._tts_done.wait() + await satellite._tts_done.wait() mock_send_tts.assert_not_called() + + +async def test_pipeline_error( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + snapshot: SnapshotAssertion, +) -> None: + """Test that a pipeline error causes the error tone to be played.""" + assert await async_setup_component(hass, "voip", {}) + + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + + done = asyncio.Event() + played_audio_bytes = b"" + + async def async_pipeline_from_audio_stream(*args, **kwargs): + # Fake error + event_callback = kwargs["event_callback"] + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.ERROR, + data={"code": "error-code", "message": "error message"}, + ) + ) + + async def async_send_audio(audio_bytes: bytes, **kwargs): + nonlocal played_audio_bytes + + # Should be error.pcm from components/voip + played_audio_bytes = audio_bytes + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + ): + satellite._tones = Tones.ERROR + satellite.transport = Mock() + satellite._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] + + satellite.on_chunk(bytes(_ONE_SECOND)) + + # Wait for error tone to be played + async with asyncio.timeout(1): + await done.wait() + + assert sum(played_audio_bytes) > 0 + assert played_audio_bytes == snapshot() diff --git a/tests/components/volvooncall/test_config_flow.py b/tests/components/volvooncall/test_config_flow.py index 8bf8bcc7412..5268432c17e 100644 --- a/tests/components/volvooncall/test_config_flow.py +++ b/tests/components/volvooncall/test_config_flow.py @@ -153,13 +153,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) first_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": first_entry.entry_id, - }, - ) + result = await first_entry.start_reauth_flow(hass) # the first form is just the confirmation prompt assert result["type"] is FlowResultType.FORM diff --git a/tests/components/vulcan/test_config_flow.py b/tests/components/vulcan/test_config_flow.py index 3311f3c71b2..a51d9727126 100644 --- a/tests/components/vulcan/test_config_flow.py +++ b/tests/components/vulcan/test_config_flow.py @@ -137,14 +137,13 @@ async def test_config_flow_reauth_success( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - MockConfigEntry( + entry = MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "0"}, - ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -176,14 +175,13 @@ async def test_config_flow_reauth_without_matching_entries( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - MockConfigEntry( + entry = MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "1"}, - ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -206,9 +204,13 @@ async def test_config_flow_reauth_with_errors( """Test reauth config flow with errors.""" mock_keystore.return_value = fake_keystore mock_account.return_value = fake_account - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} + entry = MockConfigEntry( + domain=const.DOMAIN, + unique_id="0", + data={"student_id": "0"}, ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -308,7 +310,7 @@ async def test_multiple_config_entries( unique_id="123456", data=json.loads(load_fixture("fake_config_entry_data.json", "vulcan")), ).add_to_hass(hass) - await register.register(hass, "token", "region", "000000") + await register.register("token", "region", "000000") result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -701,7 +703,7 @@ async def test_student_already_exists( | {"student_id": "0"}, ).add_to_hass(hass) - await register.register(hass, "token", "region", "000000") + await register.register("token", "region", "000000") result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/wallbox/__init__.py b/tests/components/wallbox/__init__.py index f21e895b3a7..9ec10dc72aa 100644 --- a/tests/components/wallbox/__init__.py +++ b/tests/components/wallbox/__init__.py @@ -1,7 +1,6 @@ """Tests for the Wallbox integration.""" from http import HTTPStatus -import json import requests_mock @@ -14,11 +13,15 @@ from homeassistant.components.wallbox.const import ( CHARGER_CURRENT_VERSION_KEY, CHARGER_DATA_KEY, CHARGER_ENERGY_PRICE_KEY, + CHARGER_FEATURES_KEY, CHARGER_LOCKED_UNLOCKED_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_NAME_KEY, CHARGER_PART_NUMBER_KEY, + CHARGER_PLAN_KEY, + CHARGER_POWER_BOOST_KEY, CHARGER_SERIAL_NUMBER_KEY, CHARGER_SOFTWARE_KEY, CHARGER_STATUS_ID_KEY, @@ -45,6 +48,8 @@ test_response = { CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E", CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"}, CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"}, + CHARGER_MAX_ICP_CURRENT_KEY: 20, + CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]}, }, } @@ -64,6 +69,8 @@ test_response_bidir = { CHARGER_PART_NUMBER_KEY: "QSP1-0-2-4-9-002-E", CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"}, CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"}, + CHARGER_MAX_ICP_CURRENT_KEY: 20, + CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]}, }, } @@ -113,7 +120,7 @@ async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=HTTPStatus.OK, ) @@ -136,7 +143,7 @@ async def setup_integration_bidir(hass: HomeAssistant, entry: MockConfigEntry) - ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=HTTPStatus.OK, ) @@ -161,7 +168,7 @@ async def setup_integration_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=HTTPStatus.FORBIDDEN, ) diff --git a/tests/components/wallbox/const.py b/tests/components/wallbox/const.py index 452b3af0af8..a86ae9fc3b9 100644 --- a/tests/components/wallbox/const.py +++ b/tests/components/wallbox/const.py @@ -9,6 +9,7 @@ STATUS = "status" MOCK_NUMBER_ENTITY_ID = "number.wallbox_wallboxname_maximum_charging_current" MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID = "number.wallbox_wallboxname_energy_price" +MOCK_NUMBER_ENTITY_ICP_CURRENT_ID = "number.wallbox_wallboxname_maximum_icp_current" MOCK_LOCK_ENTITY_ID = "lock.wallbox_wallboxname_lock" MOCK_SENSOR_CHARGING_SPEED_ID = "sensor.wallbox_wallboxname_charging_speed" MOCK_SENSOR_CHARGING_POWER_ID = "sensor.wallbox_wallboxname_charging_power" diff --git a/tests/components/wallbox/test_config_flow.py b/tests/components/wallbox/test_config_flow.py index c0ff0b19c94..467e20c51c1 100644 --- a/tests/components/wallbox/test_config_flow.py +++ b/tests/components/wallbox/test_config_flow.py @@ -160,13 +160,7 @@ async def test_form_reauth(hass: HomeAssistant, entry: MockConfigEntry) -> None: status_code=200, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -192,7 +186,15 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) with requests_mock.Mocker() as mock_request: mock_request.get( "https://user-api.wall-box.com/users/signin", - text='{"jwt":"fakekeyhere","refresh_token": "refresh_fakekeyhere","user_id":12345,"ttl":145656758,"refresh_token_ttl":145756758,"error":false,"status":200}', + json={ + "jwt": "fakekeyhere", + "refresh_token": "refresh_fakekeyhere", + "user_id": 12345, + "ttl": 145656758, + "refresh_token_ttl": 145756758, + "error": False, + "status": 200, + }, status_code=200, ) mock_request.get( @@ -201,13 +203,7 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) status_code=200, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/wallbox/test_init.py b/tests/components/wallbox/test_init.py index f1362489c50..b4b5a199243 100644 --- a/tests/components/wallbox/test_init.py +++ b/tests/components/wallbox/test_init.py @@ -1,7 +1,5 @@ """Test Wallbox Init Component.""" -import json - import requests_mock from homeassistant.components.wallbox.const import ( @@ -90,7 +88,7 @@ async def test_wallbox_refresh_failed_invalid_auth( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=403, ) diff --git a/tests/components/wallbox/test_lock.py b/tests/components/wallbox/test_lock.py index 637f0c827f4..1d48e53b515 100644 --- a/tests/components/wallbox/test_lock.py +++ b/tests/components/wallbox/test_lock.py @@ -1,7 +1,5 @@ """Test Wallbox Lock component.""" -import json - import pytest import requests_mock @@ -38,7 +36,7 @@ async def test_wallbox_lock_class(hass: HomeAssistant, entry: MockConfigEntry) - ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_LOCKED_UNLOCKED_KEY: False})), + json={CHARGER_LOCKED_UNLOCKED_KEY: False}, status_code=200, ) @@ -60,8 +58,6 @@ async def test_wallbox_lock_class(hass: HomeAssistant, entry: MockConfigEntry) - blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_lock_class_connection_error( hass: HomeAssistant, entry: MockConfigEntry @@ -78,7 +74,7 @@ async def test_wallbox_lock_class_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_LOCKED_UNLOCKED_KEY: False})), + json={CHARGER_LOCKED_UNLOCKED_KEY: False}, status_code=404, ) @@ -101,8 +97,6 @@ async def test_wallbox_lock_class_connection_error( blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_lock_class_authentication_error( hass: HomeAssistant, entry: MockConfigEntry @@ -115,8 +109,6 @@ async def test_wallbox_lock_class_authentication_error( assert state is None - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_lock_class_platform_not_ready( hass: HomeAssistant, entry: MockConfigEntry @@ -128,5 +120,3 @@ async def test_wallbox_lock_class_platform_not_ready( state = hass.states.get(MOCK_LOCK_ENTITY_ID) assert state is None - - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_number.py b/tests/components/wallbox/test_number.py index 5d782224ce5..c319668c161 100644 --- a/tests/components/wallbox/test_number.py +++ b/tests/components/wallbox/test_number.py @@ -1,14 +1,15 @@ """Test Wallbox Switch component.""" -import json - import pytest import requests_mock from homeassistant.components.input_number import ATTR_VALUE, SERVICE_SET_VALUE +from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN +from homeassistant.components.wallbox import InvalidAuth from homeassistant.components.wallbox.const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -20,7 +21,11 @@ from . import ( setup_integration_bidir, setup_integration_platform_not_ready, ) -from .const import MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID, MOCK_NUMBER_ENTITY_ID +from .const import ( + MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID, + MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + MOCK_NUMBER_ENTITY_ID, +) from tests.common import MockConfigEntry @@ -40,7 +45,7 @@ async def test_wallbox_number_class( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=200, ) state = hass.states.get(MOCK_NUMBER_ENTITY_ID) @@ -56,7 +61,6 @@ async def test_wallbox_number_class( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_bidir( @@ -69,7 +73,6 @@ async def test_wallbox_number_class_bidir( state = hass.states.get(MOCK_NUMBER_ENTITY_ID) assert state.attributes["min"] == -25 assert state.attributes["max"] == 25 - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_energy_class( @@ -88,7 +91,7 @@ async def test_wallbox_number_energy_class( mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), + json={CHARGER_ENERGY_PRICE_KEY: 1.1}, status_code=200, ) @@ -101,7 +104,6 @@ async def test_wallbox_number_energy_class( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_connection_error( @@ -119,7 +121,7 @@ async def test_wallbox_number_class_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=404, ) @@ -133,7 +135,6 @@ async def test_wallbox_number_class_connection_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_energy_price_connection_error( @@ -151,7 +152,7 @@ async def test_wallbox_number_class_energy_price_connection_error( ) mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), + json={CHARGER_ENERGY_PRICE_KEY: 1.1}, status_code=404, ) @@ -165,7 +166,6 @@ async def test_wallbox_number_class_energy_price_connection_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_energy_price_auth_error( @@ -183,7 +183,7 @@ async def test_wallbox_number_class_energy_price_auth_error( ) mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), + json={CHARGER_ENERGY_PRICE_KEY: 1.1}, status_code=403, ) @@ -197,7 +197,6 @@ async def test_wallbox_number_class_energy_price_auth_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_platform_not_ready( @@ -211,4 +210,95 @@ async def test_wallbox_number_class_platform_not_ready( assert state is None - await hass.config_entries.async_unload(entry.entry_id) + +async def test_wallbox_number_class_icp_energy( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test wallbox sensor class.""" + + await setup_integration(hass, entry) + + with requests_mock.Mocker() as mock_request: + mock_request.get( + "https://user-api.wall-box.com/users/signin", + json=authorisation_response, + status_code=200, + ) + + mock_request.post( + "https://api.wall-box.com/chargers/config/12345", + json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, + status_code=200, + ) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + +async def test_wallbox_number_class_icp_energy_auth_error( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test wallbox sensor class.""" + + await setup_integration(hass, entry) + + with requests_mock.Mocker() as mock_request: + mock_request.get( + "https://user-api.wall-box.com/users/signin", + json=authorisation_response, + status_code=200, + ) + mock_request.post( + "https://api.wall-box.com/chargers/config/12345", + json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, + status_code=403, + ) + + with pytest.raises(InvalidAuth): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + +async def test_wallbox_number_class_icp_energy_connection_error( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test wallbox sensor class.""" + + await setup_integration(hass, entry) + + with requests_mock.Mocker() as mock_request: + mock_request.get( + "https://user-api.wall-box.com/users/signin", + json=authorisation_response, + status_code=200, + ) + mock_request.post( + "https://api.wall-box.com/chargers/config/12345", + json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, + status_code=404, + ) + + with pytest.raises(ConnectionError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + ATTR_VALUE: 10, + }, + blocking=True, + ) diff --git a/tests/components/wallbox/test_sensor.py b/tests/components/wallbox/test_sensor.py index 5a8b3c290c1..69d0cc57340 100644 --- a/tests/components/wallbox/test_sensor.py +++ b/tests/components/wallbox/test_sensor.py @@ -30,5 +30,3 @@ async def test_wallbox_sensor_class( # Test round with precision '0' works state = hass.states.get(MOCK_SENSOR_MAX_AVAILABLE_POWER) assert state.state == "25.0" - - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_switch.py b/tests/components/wallbox/test_switch.py index d06251db003..b7c3a81dc73 100644 --- a/tests/components/wallbox/test_switch.py +++ b/tests/components/wallbox/test_switch.py @@ -1,7 +1,5 @@ """Test Wallbox Lock component.""" -import json - import pytest import requests_mock @@ -36,7 +34,7 @@ async def test_wallbox_switch_class( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), + json={CHARGER_STATUS_ID_KEY: 193}, status_code=200, ) @@ -58,8 +56,6 @@ async def test_wallbox_switch_class( blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_switch_class_connection_error( hass: HomeAssistant, entry: MockConfigEntry @@ -76,7 +72,7 @@ async def test_wallbox_switch_class_connection_error( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), + json={CHARGER_STATUS_ID_KEY: 193}, status_code=404, ) @@ -99,8 +95,6 @@ async def test_wallbox_switch_class_connection_error( blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_switch_class_authentication_error( hass: HomeAssistant, entry: MockConfigEntry @@ -117,7 +111,7 @@ async def test_wallbox_switch_class_authentication_error( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), + json={CHARGER_STATUS_ID_KEY: 193}, status_code=403, ) @@ -139,5 +133,3 @@ async def test_wallbox_switch_class_authentication_error( }, blocking=True, ) - - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index 4e0f860366c..78efd94ef8e 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -8,10 +8,7 @@ from unittest.mock import AsyncMock, MagicMock import pytest import voluptuous as vol -from homeassistant.components import water_heater from homeassistant.components.water_heater import ( - ATTR_OPERATION_LIST, - ATTR_OPERATION_MODE, DOMAIN, SERVICE_SET_OPERATION_MODE, SET_TEMPERATURE_SCHEMA, @@ -30,8 +27,6 @@ from tests.common import ( MockModule, MockPlatform, async_mock_service, - help_test_all, - import_and_test_deprecated_constant_enum, mock_integration, mock_platform, ) @@ -209,51 +204,3 @@ async def test_operation_mode_validation( ) await hass.async_block_till_done() water_heater_entity.set_operation_mode.assert_has_calls([mock.call("eco")]) - - -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(water_heater) - - -@pytest.mark.parametrize( - ("enum"), - [ - WaterHeaterEntityFeature.TARGET_TEMPERATURE, - WaterHeaterEntityFeature.OPERATION_MODE, - WaterHeaterEntityFeature.AWAY_MODE, - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: WaterHeaterEntityFeature, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, water_heater, enum, "SUPPORT_", "2025.1" - ) - - -def test_deprecated_supported_features_ints( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test deprecated supported features ints.""" - - class MockWaterHeaterEntity(WaterHeaterEntity): - _attr_operation_list = ["mode1", "mode2"] - _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_current_operation = "mode1" - _attr_supported_features = WaterHeaterEntityFeature.OPERATION_MODE.value - - entity = MockWaterHeaterEntity() - entity.hass = hass - assert entity.supported_features_compat is WaterHeaterEntityFeature(2) - assert "MockWaterHeaterEntity" in caplog.text - assert "is using deprecated supported features values" in caplog.text - assert "Instead it should use" in caplog.text - assert "WaterHeaterEntityFeature.OPERATION_MODE" in caplog.text - caplog.clear() - assert entity.supported_features_compat is WaterHeaterEntityFeature(2) - assert "is using deprecated supported features values" not in caplog.text - assert entity.state_attributes[ATTR_OPERATION_MODE] == "mode1" - assert entity.capability_attributes[ATTR_OPERATION_LIST] == ["mode1", "mode2"] diff --git a/tests/components/watergate/__init__.py b/tests/components/watergate/__init__.py new file mode 100644 index 00000000000..c69129e4720 --- /dev/null +++ b/tests/components/watergate/__init__.py @@ -0,0 +1,11 @@ +"""Tests for the Watergate integration.""" + +from homeassistant.core import HomeAssistant + + +async def init_integration(hass: HomeAssistant, mock_entry) -> None: + """Set up the Watergate integration in Home Assistant.""" + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/watergate/conftest.py b/tests/components/watergate/conftest.py new file mode 100644 index 00000000000..d29b90431a4 --- /dev/null +++ b/tests/components/watergate/conftest.py @@ -0,0 +1,77 @@ +"""Fixtures for watergate platform tests.""" + +from collections.abc import Generator + +import pytest + +from homeassistant.components.watergate.const import DOMAIN +from homeassistant.const import CONF_IP_ADDRESS + +from .const import ( + DEFAULT_DEVICE_STATE, + DEFAULT_SERIAL_NUMBER, + MOCK_CONFIG, + MOCK_WEBHOOK_ID, +) + +from tests.common import AsyncMock, MockConfigEntry, patch + + +@pytest.fixture +def mock_watergate_client() -> Generator[AsyncMock]: + """Fixture to mock WatergateLocalApiClient.""" + with ( + patch( + "homeassistant.components.watergate.WatergateLocalApiClient", + autospec=True, + ) as mock_client_main, + patch( + "homeassistant.components.watergate.config_flow.WatergateLocalApiClient", + new=mock_client_main, + ), + ): + mock_client_instance = mock_client_main.return_value + + mock_client_instance.async_get_device_state = AsyncMock( + return_value=DEFAULT_DEVICE_STATE + ) + yield mock_client_instance + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.watergate.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_webhook_id_generation() -> Generator[None]: + """Fixture to mock webhook_id generation.""" + with patch( + "homeassistant.components.watergate.config_flow.webhook_generate_id", + return_value=MOCK_WEBHOOK_ID, + ): + yield + + +@pytest.fixture +def mock_entry() -> MockConfigEntry: + """Create full mocked entry to be used in config_flow tests.""" + return MockConfigEntry( + domain=DOMAIN, + title="Sonic", + data=MOCK_CONFIG, + entry_id="12345", + unique_id=DEFAULT_SERIAL_NUMBER, + ) + + +@pytest.fixture +def user_input() -> dict[str, str]: + """Create user input for config_flow tests.""" + return { + CONF_IP_ADDRESS: "192.168.1.100", + } diff --git a/tests/components/watergate/const.py b/tests/components/watergate/const.py new file mode 100644 index 00000000000..4297b3321ad --- /dev/null +++ b/tests/components/watergate/const.py @@ -0,0 +1,27 @@ +"""Constants for the Watergate tests.""" + +from watergate_local_api.models import DeviceState + +from homeassistant.const import CONF_IP_ADDRESS, CONF_NAME, CONF_WEBHOOK_ID + +MOCK_WEBHOOK_ID = "webhook_id" + +MOCK_CONFIG = { + CONF_NAME: "Sonic", + CONF_IP_ADDRESS: "http://localhost", + CONF_WEBHOOK_ID: MOCK_WEBHOOK_ID, +} + +DEFAULT_SERIAL_NUMBER = "a63182948ce2896a" + +DEFAULT_DEVICE_STATE = DeviceState( + "open", + "on", + True, + True, + "battery", + "1.0.0", + 100, + {"volume": 1.2, "duration": 100}, + DEFAULT_SERIAL_NUMBER, +) diff --git a/tests/components/watergate/snapshots/test_valve.ambr b/tests/components/watergate/snapshots/test_valve.ambr new file mode 100644 index 00000000000..1df1a0c748d --- /dev/null +++ b/tests/components/watergate/snapshots/test_valve.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_change_valve_state_snapshot + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Sonic', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.sonic', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/watergate/test_config_flow.py b/tests/components/watergate/test_config_flow.py new file mode 100644 index 00000000000..176047f5e23 --- /dev/null +++ b/tests/components/watergate/test_config_flow.py @@ -0,0 +1,107 @@ +"""Tests for the Watergate config flow.""" + +from collections.abc import Generator + +import pytest +from watergate_local_api import WatergateApiException + +from homeassistant.components.watergate.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID +from homeassistant.data_entry_flow import FlowResultType + +from .const import DEFAULT_DEVICE_STATE, DEFAULT_SERIAL_NUMBER, MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, HomeAssistant, MockConfigEntry + + +async def test_step_user_form( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + mock_webhook_id_generation: Generator[None], + user_input: dict[str, str], +) -> None: + """Test checking if registration form works end to end.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert CONF_IP_ADDRESS in result["data_schema"].schema + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Sonic" + assert result["data"] == {**user_input, CONF_WEBHOOK_ID: MOCK_WEBHOOK_ID} + assert result["result"].unique_id == DEFAULT_SERIAL_NUMBER + + +@pytest.mark.parametrize( + "client_result", + [AsyncMock(return_value=None), AsyncMock(side_effect=WatergateApiException)], +) +async def test_step_user_form_with_exception( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + user_input: dict[str, str], + client_result: AsyncMock, + mock_webhook_id_generation: Generator[None], +) -> None: + """Test checking if errors will be displayed when Exception is thrown while checking device state.""" + mock_watergate_client.async_get_device_state = client_result + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"][CONF_IP_ADDRESS] == "cannot_connect" + + mock_watergate_client.async_get_device_state = AsyncMock( + return_value=DEFAULT_DEVICE_STATE + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Sonic" + assert result["data"] == {**user_input, CONF_WEBHOOK_ID: MOCK_WEBHOOK_ID} + + +async def test_abort_if_id_is_not_unique( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + mock_entry: MockConfigEntry, + user_input: dict[str, str], +) -> None: + """Test checking if we will inform user that this entity is already registered.""" + mock_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert CONF_IP_ADDRESS in result["data_schema"].schema + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/watergate/test_init.py b/tests/components/watergate/test_init.py new file mode 100644 index 00000000000..71eb99d6470 --- /dev/null +++ b/tests/components/watergate/test_init.py @@ -0,0 +1,81 @@ +"""Tests for the Watergate integration init module.""" + +from collections.abc import Generator +from unittest.mock import patch + +from homeassistant.components.valve import ValveState +from homeassistant.components.watergate.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration +from .const import MOCK_WEBHOOK_ID + +from tests.common import ANY, AsyncMock, MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_async_setup_entry( + hass: HomeAssistant, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test setting up the Watergate integration.""" + hass.config.internal_url = "http://hassio.local" + + with ( + patch("homeassistant.components.watergate.async_register") as mock_webhook, + ): + await init_integration(hass, mock_entry) + + assert mock_entry.state is ConfigEntryState.LOADED + + mock_webhook.assert_called_once_with( + hass, + DOMAIN, + "Watergate", + MOCK_WEBHOOK_ID, + ANY, + ) + mock_watergate_client.async_set_webhook_url.assert_called_once_with( + f"http://hassio.local/api/webhook/{MOCK_WEBHOOK_ID}" + ) + mock_watergate_client.async_get_device_state.assert_called_once() + + +async def test_handle_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test handling webhook events.""" + await init_integration(hass, mock_entry) + + entity_id = "valve.sonic" + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPEN + + valve_change_data = { + "type": "valve", + "data": {"state": "closed"}, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=valve_change_data) + + await hass.async_block_till_done() # Ensure the webhook is processed + + assert hass.states.get(entity_id).state == ValveState.CLOSED + + valve_change_data = { + "type": "valve", + "data": {"state": "open"}, + } + + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=valve_change_data) + + await hass.async_block_till_done() # Ensure the webhook is processed + + assert hass.states.get(entity_id).state == ValveState.OPEN diff --git a/tests/components/watergate/test_valve.py b/tests/components/watergate/test_valve.py new file mode 100644 index 00000000000..b22f6967665 --- /dev/null +++ b/tests/components/watergate/test_valve.py @@ -0,0 +1,72 @@ +"""Tests for the Watergate valve platform.""" + +from collections.abc import Generator + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import AsyncMock, MockConfigEntry + + +async def test_change_valve_state_snapshot( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_watergate_client: Generator[AsyncMock], + mock_entry: MockConfigEntry, +) -> None: + """Test entities become unavailable after failed update.""" + await init_integration(hass, mock_entry) + + entity_id = "valve.sonic" + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPEN + assert registered_entity == snapshot + + +async def test_change_valve_state( + hass: HomeAssistant, + mock_watergate_client: Generator[AsyncMock], + mock_entry: MockConfigEntry, +) -> None: + """Test entities become unavailable after failed update.""" + await init_integration(hass, mock_entry) + + entity_id = "valve.sonic" + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPEN + + await hass.services.async_call( + VALVE_DOMAIN, + SERVICE_CLOSE_VALVE, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.CLOSING + + mock_watergate_client.async_set_valve_state.assert_called_once_with("closed") + mock_watergate_client.async_set_valve_state.reset_mock() + + await hass.services.async_call( + VALVE_DOMAIN, + SERVICE_OPEN_VALVE, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == ValveState.OPENING + + mock_watergate_client.async_set_valve_state.assert_called_once_with("open") diff --git a/tests/components/watttime/snapshots/test_diagnostics.ambr b/tests/components/watttime/snapshots/test_diagnostics.ambr index 2ed35c19ad1..3cc5e1d6f66 100644 --- a/tests/components/watttime/snapshots/test_diagnostics.ambr +++ b/tests/components/watttime/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'watttime', 'minor_version': 1, 'options': dict({ @@ -25,6 +27,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/watttime/test_config_flow.py b/tests/components/watttime/test_config_flow.py index f8eee6b48bf..5087717491f 100644 --- a/tests/components/watttime/test_config_flow.py +++ b/tests/components/watttime/test_config_flow.py @@ -25,6 +25,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("exc", "error"), @@ -144,21 +146,16 @@ async def test_show_form_user(hass: HomeAssistant) -> None: async def test_step_reauth( - hass: HomeAssistant, config_auth, config_coordinates, config_entry, setup_watttime + hass: HomeAssistant, + config_entry: MockConfigEntry, + setup_watttime, ) -> None: """Test a full reauth flow.""" + result = await config_entry.start_reauth_flow(hass) with patch( "homeassistant.components.watttime.async_setup_entry", return_value=True, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data={ - **config_auth, - **config_coordinates, - }, - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "password"}, diff --git a/tests/components/waze_travel_time/conftest.py b/tests/components/waze_travel_time/conftest.py index c929fc219f9..c9214ed8b71 100644 --- a/tests/components/waze_travel_time/conftest.py +++ b/tests/components/waze_travel_time/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import patch import pytest from pywaze.route_calculator import CalcRoutesResponse, WRCError +from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import DOMAIN from homeassistant.core import HomeAssistant @@ -19,6 +20,7 @@ async def mock_config_fixture(hass: HomeAssistant, data, options): data=data, options=options, entry_id="test", + version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/waze_travel_time/test_config_flow.py b/tests/components/waze_travel_time/test_config_flow.py index 5b1e3417bfc..9ff7509a52c 100644 --- a/tests/components/waze_travel_time/test_config_flow.py +++ b/tests/components/waze_travel_time/test_config_flow.py @@ -3,6 +3,7 @@ import pytest from homeassistant import config_entries +from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, @@ -60,18 +61,13 @@ async def test_reconfigure(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, + version=WazeConfigFlow.VERSION, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - reconfigure_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - }, - ) + reconfigure_result = await entry.start_reconfigure_flow(hass) assert reconfigure_result["type"] is FlowResultType.FORM assert reconfigure_result["step_id"] == "user" @@ -103,6 +99,7 @@ async def test_options(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, + version=WazeConfigFlow.VERSION, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -119,8 +116,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "exclude", - CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: ["exclude"], + CONF_INCL_FILTER: ["include"], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -132,8 +129,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "exclude", - CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: ["exclude"], + CONF_INCL_FILTER: ["include"], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -143,8 +140,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "exclude", - CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: ["exclude"], + CONF_INCL_FILTER: ["include"], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -209,10 +206,14 @@ async def test_invalid_config_entry( async def test_reset_filters(hass: HomeAssistant) -> None: """Test resetting inclusive and exclusive filters to empty string.""" options = {**DEFAULT_OPTIONS} - options[CONF_INCL_FILTER] = "test" - options[CONF_EXCL_FILTER] = "test" + options[CONF_INCL_FILTER] = ["test"] + options[CONF_EXCL_FILTER] = ["test"] config_entry = MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG, options=options, entry_id="test" + domain=DOMAIN, + data=MOCK_CONFIG, + options=options, + entry_id="test", + version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) @@ -228,8 +229,6 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "", - CONF_INCL_FILTER: "", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -240,8 +239,8 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "", - CONF_INCL_FILTER: "", + CONF_EXCL_FILTER: [""], + CONF_INCL_FILTER: [""], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", diff --git a/tests/components/waze_travel_time/test_init.py b/tests/components/waze_travel_time/test_init.py index 58aaa8983a7..89bccc00985 100644 --- a/tests/components/waze_travel_time/test_init.py +++ b/tests/components/waze_travel_time/test_init.py @@ -2,11 +2,32 @@ import pytest -from homeassistant.components.waze_travel_time.const import DEFAULT_OPTIONS +from homeassistant.components.waze_travel_time.const import ( + CONF_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS, + CONF_EXCL_FILTER, + CONF_INCL_FILTER, + CONF_REALTIME, + CONF_UNITS, + CONF_VEHICLE_TYPE, + DEFAULT_AVOID_FERRIES, + DEFAULT_AVOID_SUBSCRIPTION_ROADS, + DEFAULT_AVOID_TOLL_ROADS, + DEFAULT_FILTER, + DEFAULT_OPTIONS, + DEFAULT_REALTIME, + DEFAULT_VEHICLE_TYPE, + DOMAIN, + METRIC_UNITS, +) +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from .const import MOCK_CONFIG +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("data", "options"), @@ -23,6 +44,8 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: "destination": "location2", "vehicle_type": "car", "region": "us", + "units": "imperial", + "incl_filter": ["IncludeThis"], }, blocking=True, return_response=True, @@ -30,16 +53,66 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: assert response_data == { "routes": [ { - "distance": 300, + "distance": pytest.approx(186.4113), "duration": 150, "name": "E1337 - Teststreet", "street_names": ["E1337", "IncludeThis", "Teststreet"], }, - { - "distance": 500, - "duration": 600, - "name": "E0815 - Otherstreet", - "street_names": ["E0815", "ExcludeThis", "Otherstreet"], - }, ] } + + +@pytest.mark.usefixtures("mock_update") +async def test_migrate_entry_v1_v2(hass: HomeAssistant) -> None: + """Test successful migration of entry data.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=MOCK_CONFIG, + options={ + CONF_REALTIME: DEFAULT_REALTIME, + CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, + CONF_UNITS: METRIC_UNITS, + CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + }, + ) + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) + + assert updated_entry.state is ConfigEntryState.LOADED + assert updated_entry.version == 2 + assert updated_entry.options[CONF_INCL_FILTER] == DEFAULT_FILTER + assert updated_entry.options[CONF_EXCL_FILTER] == DEFAULT_FILTER + + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=MOCK_CONFIG, + options={ + CONF_REALTIME: DEFAULT_REALTIME, + CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, + CONF_UNITS: METRIC_UNITS, + CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: "exclude", + }, + ) + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) + + assert updated_entry.state is ConfigEntryState.LOADED + assert updated_entry.version == 2 + assert updated_entry.options[CONF_INCL_FILTER] == ["include"] + assert updated_entry.options[CONF_EXCL_FILTER] == ["exclude"] diff --git a/tests/components/waze_travel_time/test_sensor.py b/tests/components/waze_travel_time/test_sensor.py index e09a7199ff4..94e3a0cf9d7 100644 --- a/tests/components/waze_travel_time/test_sensor.py +++ b/tests/components/waze_travel_time/test_sensor.py @@ -3,6 +3,7 @@ import pytest from pywaze.route_calculator import WRCError +from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, @@ -74,6 +75,8 @@ async def test_sensor(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, + CONF_INCL_FILTER: [""], + CONF_EXCL_FILTER: [""], }, ) ], @@ -98,7 +101,8 @@ async def test_imperial(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_INCL_FILTER: "IncludeThis", + CONF_INCL_FILTER: ["IncludeThis"], + CONF_EXCL_FILTER: [""], }, ) ], @@ -121,7 +125,8 @@ async def test_incl_filter(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_EXCL_FILTER: "ExcludeThis", + CONF_INCL_FILTER: [""], + CONF_EXCL_FILTER: ["ExcludeThis"], }, ) ], @@ -138,7 +143,11 @@ async def test_sensor_failed_wrcerror( ) -> None: """Test that sensor update fails with log message.""" config_entry = MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, entry_id="test" + domain=DOMAIN, + data=MOCK_CONFIG, + options=DEFAULT_OPTIONS, + entry_id="test", + version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/weatherflow_cloud/conftest.py b/tests/components/weatherflow_cloud/conftest.py index d83ee082b26..36b42bf24a8 100644 --- a/tests/components/weatherflow_cloud/conftest.py +++ b/tests/components/weatherflow_cloud/conftest.py @@ -113,39 +113,3 @@ def mock_api(): mock_api_class.return_value = mock_api yield mock_api - - -# -# @pytest.fixture -# def mock_api_with_lightning_error(): -# """Fixture for Mock WeatherFlowRestAPI.""" -# get_stations_response_data = StationsResponseREST.from_json( -# load_fixture("stations.json", DOMAIN) -# ) -# get_forecast_response_data = WeatherDataForecastREST.from_json( -# load_fixture("forecast.json", DOMAIN) -# ) -# get_observation_response_data = ObservationStationREST.from_json( -# load_fixture("station_observation_error.json", DOMAIN) -# ) -# -# data = { -# 24432: WeatherFlowDataREST( -# weather=get_forecast_response_data, -# observation=get_observation_response_data, -# station=get_stations_response_data.stations[0], -# device_observations=None, -# ) -# } -# -# with patch( -# "homeassistant.components.weatherflow_cloud.coordinator.WeatherFlowRestAPI", -# autospec=True, -# ) as mock_api_class: -# # Create an instance of AsyncMock for the API -# mock_api = AsyncMock() -# mock_api.get_all_data.return_value = data -# # Patch the class to return our mock_api instance -# mock_api_class.return_value = mock_api -# -# yield mock_api diff --git a/tests/components/weatherflow_cloud/test_config_flow.py b/tests/components/weatherflow_cloud/test_config_flow.py index 7ade007ceac..9dc5ad1322d 100644 --- a/tests/components/weatherflow_cloud/test_config_flow.py +++ b/tests/components/weatherflow_cloud/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.weatherflow_cloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -111,15 +111,14 @@ async def test_reauth(hass: HomeAssistant, mock_get_stations_401_error) -> None: assert not await hass.config_entries.async_setup(entry.entry_id) assert entry.state is ConfigEntryState.SETUP_ERROR - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, data=None - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data={CONF_API_TOKEN: "SAME_SAME"}, + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "SAME_SAME"} ) assert result["reason"] == "reauth_successful" assert result["type"] is FlowResultType.ABORT + assert entry.data[CONF_API_TOKEN] == "SAME_SAME" diff --git a/tests/components/webhook/test_init.py b/tests/components/webhook/test_init.py index af07616024a..15ec1b15ee5 100644 --- a/tests/components/webhook/test_init.py +++ b/tests/components/webhook/test_init.py @@ -9,8 +9,8 @@ from aiohttp.test_utils import TestClient import pytest from homeassistant.components import webhook -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator, WebSocketGenerator diff --git a/tests/components/webmin/snapshots/test_diagnostics.ambr b/tests/components/webmin/snapshots/test_diagnostics.ambr index a56d6b35641..c64fa212a98 100644 --- a/tests/components/webmin/snapshots/test_diagnostics.ambr +++ b/tests/components/webmin/snapshots/test_diagnostics.ambr @@ -237,6 +237,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'webmin', 'entry_id': '**REDACTED**', 'minor_version': 1, @@ -251,6 +253,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/webmin/snapshots/test_sensor.ambr b/tests/components/webmin/snapshots/test_sensor.ambr index 8803ee684ae..6af768d63a8 100644 --- a/tests/components/webmin/snapshots/test_sensor.ambr +++ b/tests/components/webmin/snapshots/test_sensor.ambr @@ -1,688 +1,4 @@ # serializer version: 1 -# name: test_sensor[sensor.192_168_1_1_data_size-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_total', - 'unique_id': '12:34:56:78:9a:bc_disk_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16861.5074996948', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_10', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5543.82404708862', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_11-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_11', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_11-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_11', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4638.98014068604', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_12-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_12', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_free', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_12-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_12', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '625.379589080811', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_free', - 'unique_id': '12:34:56:78:9a:bc_disk_free', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7217.11803817749', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_used', - 'unique_id': '12:34:56:78:9a:bc_disk_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8794.3125', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '231.369548797607', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '173.85604095459', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/_free', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '45.6910972595215', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_7-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_7', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_7-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_7', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11086.3139038086', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_8-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_8', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_8-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3981.47631835938', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_9-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_9', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data size', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_free', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_data_size_9-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6546.04735183716', - }) -# --- # name: test_sensor[sensor.192_168_1_1_disk_free_inodes-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2369,747 +1685,6 @@ 'state': '31.248420715332', }) # --- -# name: test_sensor[sensor.192_168_1_1_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15482880', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_10', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_11-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_11', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_11-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_11', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '183140352', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_12-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_12', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_12-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_12', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9595', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_13-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_13', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_ifree', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_13-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_13', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '183130757', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_14-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_14', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_14-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_14', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '89', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_15-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_15', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_15-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_15', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/_iused', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '555674', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/_ifree', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '14927206', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/_used_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/_iused_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '366198784', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_7-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_7', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_7-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_7', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3542318', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_8-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_8', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_ifree', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_8-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '362656466', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_9-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_9', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_9-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '38', - }) -# --- # name: test_sensor[sensor.192_168_1_1_swap_free-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/webmin/test_config_flow.py b/tests/components/webmin/test_config_flow.py index 477ad230622..03da3340597 100644 --- a/tests/components/webmin/test_config_flow.py +++ b/tests/components/webmin/test_config_flow.py @@ -74,7 +74,7 @@ async def test_form_user( (Exception, "unknown"), ( Fault("5", "Webmin module net does not exist"), - "Fault 5: Webmin module net does not exist", + "unknown", ), ], ) diff --git a/tests/components/webmin/test_sensor.py b/tests/components/webmin/test_sensor.py index 5fb874825a3..dd68e2f9f8c 100644 --- a/tests/components/webmin/test_sensor.py +++ b/tests/components/webmin/test_sensor.py @@ -8,6 +8,8 @@ from homeassistant.helpers import entity_registry as er from .conftest import async_init_integration +from tests.common import snapshot_platform + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor( @@ -19,11 +21,4 @@ async def test_sensor( entry = await async_init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index 406bb9c8804..9b2983aab47 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -302,11 +302,7 @@ async def test_reauth_successful( entry = await setup_webostv(hass) assert client - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) @@ -339,11 +335,7 @@ async def test_reauth_errors( entry = await setup_webostv(hass) assert client - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index e2fbc43e187..7f54e940966 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -60,5 +60,7 @@ async def test_diagnostics( "disabled_by": None, "created_at": entry.created_at.isoformat(), "modified_at": entry.modified_at.isoformat(), + "discovery_keys": {}, + "subentries": [], }, } diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 20a728cf3cd..d55d2f97017 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -293,6 +293,6 @@ async def test_auth_sending_unknown_type_disconnects( auth_msg = await ws.receive_json() assert auth_msg["type"] == TYPE_AUTH_REQUIRED - await ws._writer._send_frame(b"1" * 130, 0x30) + await ws._writer.send_frame(b"1" * 130, 0x30) auth_msg = await ws.receive() assert auth_msg.type == WSMsgType.close diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index 772a8ee793e..22e839d84e4 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -1262,6 +1262,54 @@ async def test_subscribe_unsubscribe_entities_specific_entities( } +async def test_subscribe_unsubscribe_entities_with_filter( + hass: HomeAssistant, + websocket_client: MockHAClientWebSocket, + hass_admin_user: MockUser, +) -> None: + """Test subscribe/unsubscribe entities with an entity filter.""" + hass.states.async_set("switch.not_included", "off") + hass.states.async_set("light.include", "off") + await websocket_client.send_json( + {"id": 7, "type": "subscribe_entities", "include": {"domains": ["light"]}} + ) + + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == const.TYPE_RESULT + assert msg["success"] + + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "a": { + "light.include": { + "a": {}, + "c": ANY, + "lc": ANY, + "s": "off", + } + } + } + hass.states.async_set("switch.not_included", "on") + hass.states.async_set("light.include", "on") + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "c": { + "light.include": { + "+": { + "c": ANY, + "lc": ANY, + "s": "on", + } + } + } + } + + async def test_render_template_renders_template( hass: HomeAssistant, websocket_client ) -> None: @@ -2342,6 +2390,9 @@ async def test_execute_script( ), ], ) +@pytest.mark.parametrize( + "ignore_translations", ["component.test.exceptions.test_error.message"] +) async def test_execute_script_err_localization( hass: HomeAssistant, websocket_client: MockHAClientWebSocket, @@ -2518,18 +2569,18 @@ async def test_integration_setup_info( @pytest.mark.parametrize( ("key", "config"), [ - ("trigger", {"platform": "event", "event_type": "hello"}), - ("trigger", [{"platform": "event", "event_type": "hello"}]), + ("triggers", {"platform": "event", "event_type": "hello"}), + ("triggers", [{"platform": "event", "event_type": "hello"}]), ( - "condition", + "conditions", {"condition": "state", "entity_id": "hello.world", "state": "paulus"}, ), ( - "condition", + "conditions", [{"condition": "state", "entity_id": "hello.world", "state": "paulus"}], ), - ("action", {"service": "domain_test.test_service"}), - ("action", [{"service": "domain_test.test_service"}]), + ("actions", {"service": "domain_test.test_service"}), + ("actions", [{"service": "domain_test.test_service"}]), ], ) async def test_validate_config_works( @@ -2551,13 +2602,13 @@ async def test_validate_config_works( [ # Raises vol.Invalid ( - "trigger", + "triggers", {"platform": "non_existing", "event_type": "hello"}, - "Invalid platform 'non_existing' specified", + "Invalid trigger 'non_existing' specified", ), # Raises vol.Invalid ( - "condition", + "conditions", { "condition": "non_existing", "entity_id": "hello.world", @@ -2571,7 +2622,7 @@ async def test_validate_config_works( ), # Raises HomeAssistantError ( - "condition", + "conditions", { "above": 50, "condition": "device", @@ -2584,7 +2635,7 @@ async def test_validate_config_works( ), # Raises vol.Invalid ( - "action", + "actions", {"non_existing": "domain_test.test_service"}, "Unable to determine action @ data[0]", ), diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 2530d885942..03e30c11ee9 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -5,7 +5,7 @@ from datetime import timedelta from typing import Any, cast from unittest.mock import patch -from aiohttp import WSMsgType, WSServerHandshakeError, web +from aiohttp import ServerDisconnectedError, WSMsgType, web import pytest from homeassistant.components.websocket_api import ( @@ -374,7 +374,7 @@ async def test_prepare_fail_timeout( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(TimeoutError, web.WebSocketResponse.prepare), ), - pytest.raises(WSServerHandshakeError), + pytest.raises(ServerDisconnectedError), ): await hass_ws_client(hass) @@ -392,7 +392,7 @@ async def test_prepare_fail_connection_reset( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), ), - pytest.raises(WSServerHandshakeError), + pytest.raises(ServerDisconnectedError), ): await hass_ws_client(hass) diff --git a/tests/components/weheat/__init__.py b/tests/components/weheat/__init__.py new file mode 100644 index 00000000000..65c4f84ba77 --- /dev/null +++ b/tests/components/weheat/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Weheat integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py new file mode 100644 index 00000000000..6ecb64ffdf4 --- /dev/null +++ b/tests/components/weheat/conftest.py @@ -0,0 +1,138 @@ +"""Fixtures for Weheat tests.""" + +from collections.abc import Generator +from time import time +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from weheat.abstractions.discovery import HeatPumpDiscovery +from weheat.abstractions.heat_pump import HeatPump + +from homeassistant.components.application_credentials import ( + DOMAIN as APPLICATION_CREDENTIALS, + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.weheat.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import ( + CLIENT_ID, + CLIENT_SECRET, + TEST_HP_UUID, + TEST_MODEL, + TEST_SN, + USER_UUID_1, +) + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup credentials.""" + assert await async_setup_component(hass, APPLICATION_CREDENTIALS, {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), + ) + + +@pytest.fixture +def mock_setup_entry(): + """Mock a successful setup.""" + with patch( + "homeassistant.components.weheat.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +def mock_heat_pump_info() -> HeatPumpDiscovery.HeatPumpInfo: + """Create a HeatPumpInfo with default settings.""" + return HeatPumpDiscovery.HeatPumpInfo(TEST_HP_UUID, None, TEST_MODEL, TEST_SN, True) + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Weheat", + data={ + "id": "12345", + "auth_implementation": DOMAIN, + "token": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + "expires_at": time() + 60, + }, + }, + unique_id="123456789", + ) + + +@pytest.fixture +def mock_user_id() -> Generator[AsyncMock]: + """Mock the user API call.""" + with ( + patch( + "homeassistant.components.weheat.config_flow.get_user_id_from_token", + return_value=USER_UUID_1, + ) as user_mock, + ): + yield user_mock + + +@pytest.fixture +def mock_weheat_discover(mock_heat_pump_info) -> Generator[AsyncMock]: + """Mock an Weheat discovery.""" + with ( + patch( + "homeassistant.components.weheat.HeatPumpDiscovery.discover_active", + autospec=True, + ) as mock_discover, + ): + mock_discover.return_value = [mock_heat_pump_info] + + yield mock_discover + + +@pytest.fixture +def mock_weheat_heat_pump_instance() -> MagicMock: + """Mock an Weheat heat pump instance with a set of default values.""" + mock_heat_pump_instance = MagicMock(spec_set=HeatPump) + + mock_heat_pump_instance.water_inlet_temperature = 11 + mock_heat_pump_instance.water_outlet_temperature = 22 + mock_heat_pump_instance.water_house_in_temperature = 33 + mock_heat_pump_instance.air_inlet_temperature = 44 + mock_heat_pump_instance.power_input = 55 + mock_heat_pump_instance.power_output = 66 + mock_heat_pump_instance.dhw_top_temperature = 77 + mock_heat_pump_instance.dhw_bottom_temperature = 88 + mock_heat_pump_instance.thermostat_water_setpoint = 35 + mock_heat_pump_instance.thermostat_room_temperature = 19 + mock_heat_pump_instance.thermostat_room_temperature_setpoint = 21 + mock_heat_pump_instance.cop = 4.5 + mock_heat_pump_instance.heat_pump_state = HeatPump.State.HEATING + mock_heat_pump_instance.energy_total = 12345 + + return mock_heat_pump_instance + + +@pytest.fixture +def mock_weheat_heat_pump(mock_weheat_heat_pump_instance) -> Generator[AsyncMock]: + """Mock the coordinator HeatPump data.""" + with ( + patch( + "homeassistant.components.weheat.coordinator.HeatPump", + ) as mock_heat_pump, + ): + mock_heat_pump.return_value = mock_weheat_heat_pump_instance + + yield mock_weheat_heat_pump_instance diff --git a/tests/components/weheat/const.py b/tests/components/weheat/const.py new file mode 100644 index 00000000000..61203259c58 --- /dev/null +++ b/tests/components/weheat/const.py @@ -0,0 +1,17 @@ +"""Constants for weheat tests.""" + +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" + +USER_UUID_1 = "0000-1111-2222-3333" +USER_UUID_2 = "0000-1111-2222-4444" + +CONF_REFRESH_TOKEN = "refresh_token" +CONF_AUTH_IMPLEMENTATION = "auth_implementation" +MOCK_REFRESH_TOKEN = "mock_refresh_token" +MOCK_ACCESS_TOKEN = "mock_access_token" + +TEST_HP_UUID = "0000-1111-2222-3333" +TEST_NAME = "Test Heat Pump" +TEST_MODEL = "Test Model" +TEST_SN = "SN-Test-This" diff --git a/tests/components/weheat/snapshots/test_sensor.ambr b/tests/components/weheat/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..3bd4a254598 --- /dev/null +++ b/tests/components/weheat/snapshots/test_sensor.ambr @@ -0,0 +1,768 @@ +# serializer version: 1 +# name: test_all_entities[sensor.test_model-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standby', + 'water_check', + 'heating', + 'cooling', + 'dhw', + 'legionella_prevention', + 'defrosting', + 'self_test', + 'manual_control', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heat_pump_state', + 'unique_id': '0000-1111-2222-3333_heat_pump_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.test_model-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Model', + 'options': list([ + 'standby', + 'water_check', + 'heating', + 'cooling', + 'dhw', + 'legionella_prevention', + 'defrosting', + 'self_test', + 'manual_control', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_model', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heating', + }) +# --- +# name: test_all_entities[sensor.test_model_central_heating_inlet_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_central_heating_inlet_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Central heating inlet temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ch_inlet_temperature', + 'unique_id': '0000-1111-2222-3333_ch_inlet_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_central_heating_inlet_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Central heating inlet temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_central_heating_inlet_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33', + }) +# --- +# name: test_all_entities[sensor.test_model_cop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_cop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'COP', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cop', + 'unique_id': '0000-1111-2222-3333_cop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.test_model_cop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model COP', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.test_model_cop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.5', + }) +# --- +# name: test_all_entities[sensor.test_model_current_room_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_current_room_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current room temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_room_temperature', + 'unique_id': '0000-1111-2222-3333_thermostat_room_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_current_room_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Current room temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_current_room_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19', + }) +# --- +# name: test_all_entities[sensor.test_model_dhw_bottom_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_dhw_bottom_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW bottom temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dhw_bottom_temperature', + 'unique_id': '0000-1111-2222-3333_dhw_bottom_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_dhw_bottom_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model DHW bottom temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_dhw_bottom_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '88', + }) +# --- +# name: test_all_entities[sensor.test_model_dhw_top_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_dhw_top_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW top temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dhw_top_temperature', + 'unique_id': '0000-1111-2222-3333_dhw_top_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_dhw_top_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model DHW top temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_dhw_top_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_all_entities[sensor.test_model_electricity_used-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_electricity_used', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Electricity used', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'electricity_used', + 'unique_id': '0000-1111-2222-3333_electricity_used', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_electricity_used-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Model Electricity used', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_electricity_used', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12345', + }) +# --- +# name: test_all_entities[sensor.test_model_input_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_input_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Input power', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_input', + 'unique_id': '0000-1111-2222-3333_power_input', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_input_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Model Input power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_input_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55', + }) +# --- +# name: test_all_entities[sensor.test_model_output_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_output_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Output power', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_output', + 'unique_id': '0000-1111-2222-3333_power_output', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_output_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Model Output power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_output_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '66', + }) +# --- +# name: test_all_entities[sensor.test_model_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outside_temperature', + 'unique_id': '0000-1111-2222-3333_outside_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '44', + }) +# --- +# name: test_all_entities[sensor.test_model_room_temperature_setpoint-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_room_temperature_setpoint', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature setpoint', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_room_temperature_setpoint', + 'unique_id': '0000-1111-2222-3333_thermostat_room_temperature_setpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_room_temperature_setpoint-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Room temperature setpoint', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_room_temperature_setpoint', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21', + }) +# --- +# name: test_all_entities[sensor.test_model_water_inlet_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_water_inlet_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water inlet temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_inlet_temperature', + 'unique_id': '0000-1111-2222-3333_water_inlet_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_water_inlet_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Water inlet temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_water_inlet_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) +# --- +# name: test_all_entities[sensor.test_model_water_outlet_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_water_outlet_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water outlet temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_outlet_temperature', + 'unique_id': '0000-1111-2222-3333_water_outlet_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_water_outlet_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Water outlet temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_water_outlet_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_all_entities[sensor.test_model_water_target_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_water_target_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water target temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_water_setpoint', + 'unique_id': '0000-1111-2222-3333_thermostat_water_setpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_water_target_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Water target temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_water_target_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35', + }) +# --- diff --git a/tests/components/weheat/test_config_flow.py b/tests/components/weheat/test_config_flow.py new file mode 100644 index 00000000000..b33dd0a8db8 --- /dev/null +++ b/tests/components/weheat/test_config_flow.py @@ -0,0 +1,183 @@ +"""Test the Weheat config flow.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.weheat.const import ( + DOMAIN, + ENTRY_TITLE, + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_SOURCE, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import ( + CLIENT_ID, + CONF_AUTH_IMPLEMENTATION, + CONF_REFRESH_TOKEN, + MOCK_ACCESS_TOKEN, + MOCK_REFRESH_TOKEN, + USER_UUID_1, + USER_UUID_2, +) + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_setup_entry, +) -> None: + """Check full of adding a single heat pump.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER} + ) + + await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) + + with ( + patch( + "homeassistant.components.weheat.config_flow.get_user_id_from_token", + return_value=USER_UUID_1, + ) as mock_weheat, + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_weheat.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == USER_UUID_1 + assert result["result"].title == ENTRY_TITLE + assert result["data"][CONF_TOKEN][CONF_REFRESH_TOKEN] == MOCK_REFRESH_TOKEN + assert result["data"][CONF_TOKEN][CONF_ACCESS_TOKEN] == MOCK_ACCESS_TOKEN + assert result["data"][CONF_AUTH_IMPLEMENTATION] == DOMAIN + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_duplicate_unique_id( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_setup_entry, +) -> None: + """Check that the config flow is aborted when an entry with the same ID exists.""" + first_entry = MockConfigEntry( + domain=DOMAIN, + data={}, + unique_id=USER_UUID_1, + ) + + first_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER} + ) + + await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) + + with ( + patch( + "homeassistant.components.weheat.config_flow.get_user_id_from_token", + return_value=USER_UUID_1, + ), + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # only care that the config flow is aborted + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.parametrize( + ("logged_in_user", "expected_reason"), + [(USER_UUID_1, "reauth_successful"), (USER_UUID_2, "wrong_account")], +) +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_user_id: AsyncMock, + mock_weheat_discover: AsyncMock, + setup_credentials, + logged_in_user: str, + expected_reason: str, +) -> None: + """Check reauth flow both with and without the correct logged in user.""" + mock_user_id.return_value = logged_in_user + entry = MockConfigEntry( + domain=DOMAIN, + data={}, + unique_id=USER_UUID_1, + ) + + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input={}, + ) + + await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) + + assert result["type"] is FlowResultType.EXTERNAL_STEP + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == expected_reason + assert entry.unique_id == USER_UUID_1 + + +async def handle_oauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + result: ConfigFlowResult, +) -> None: + """Handle the Oauth2 part of the flow.""" + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=openid+offline_access" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": MOCK_REFRESH_TOKEN, + "access_token": MOCK_ACCESS_TOKEN, + "type": "Bearer", + "expires_in": 60, + }, + ) diff --git a/tests/components/weheat/test_sensor.py b/tests/components/weheat/test_sensor.py new file mode 100644 index 00000000000..d9055addc67 --- /dev/null +++ b/tests/components/weheat/test_sensor.py @@ -0,0 +1,56 @@ +"""Tests for the weheat sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from weheat.abstractions.discovery import HeatPumpDiscovery + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_weheat_discover: AsyncMock, + mock_weheat_heat_pump: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.weheat.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 12), (True, 14)]) +async def test_create_entities( + hass: HomeAssistant, + mock_weheat_discover: AsyncMock, + mock_weheat_heat_pump: AsyncMock, + mock_heat_pump_info: HeatPumpDiscovery.HeatPumpInfo, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + has_dhw: bool, + nr_of_entities: int, +) -> None: + """Test creating entities.""" + mock_heat_pump_info.has_dhw = has_dhw + mock_weheat_discover.return_value = [mock_heat_pump_info] + + with patch("homeassistant.components.weheat.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await hass.async_block_till_done() + assert len(hass.states.async_all()) == nr_of_entities diff --git a/tests/components/wemo/conftest.py b/tests/components/wemo/conftest.py index 64bd89f4793..fee981484ef 100644 --- a/tests/components/wemo/conftest.py +++ b/tests/components/wemo/conftest.py @@ -65,6 +65,7 @@ def create_pywemo_device( device.name = MOCK_NAME device.serial_number = MOCK_SERIAL_NUMBER device.model_name = pywemo_model.replace("LongPress", "") + device.model = device.model_name device.udn = f"uuid:{device.model_name}-1_0-{device.serial_number}" device.firmware_version = MOCK_FIRMWARE_VERSION device.get_state.return_value = 0 # Default to Off diff --git a/tests/components/wemo/test_coordinator.py b/tests/components/wemo/test_coordinator.py index f524633e701..17061aea2f6 100644 --- a/tests/components/wemo/test_coordinator.py +++ b/tests/components/wemo/test_coordinator.py @@ -178,6 +178,7 @@ async def test_device_info( } assert device_entries[0].manufacturer == "Belkin" assert device_entries[0].model == "LightSwitch" + assert device_entries[0].model_id == "LightSwitch" assert device_entries[0].sw_version == MOCK_FIRMWARE_VERSION diff --git a/tests/components/wemo/test_init.py b/tests/components/wemo/test_init.py index 48d8f8eac03..4a38775d331 100644 --- a/tests/components/wemo/test_init.py +++ b/tests/components/wemo/test_init.py @@ -201,6 +201,7 @@ async def test_discovery( device.name = f"{MOCK_NAME}_{counter}" device.serial_number = f"{MOCK_SERIAL_NUMBER}_{counter}" device.model_name = "Motion" + device.model = "Motion" device.udn = f"uuid:{device.model_name}-1_0-{device.serial_number}" device.firmware_version = MOCK_FIRMWARE_VERSION device.get_state.return_value = 0 # Default to Off diff --git a/tests/components/wemo/test_light_bridge.py b/tests/components/wemo/test_light_bridge.py index 48be2823750..4deddeaba94 100644 --- a/tests/components/wemo/test_light_bridge.py +++ b/tests/components/wemo/test_light_bridge.py @@ -11,7 +11,7 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.light import ( ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -116,7 +116,7 @@ async def test_light_update_entity( blocking=True, ) state = hass.states.get(wemo_entity.entity_id) - assert state.attributes.get(ATTR_COLOR_TEMP) == 432 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2314 assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.COLOR_TEMP] assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.COLOR_TEMP assert state.state == STATE_ON diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index 5a0beb112e6..ee8abe04bf1 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -29,6 +29,8 @@ 'username': '**REDACTED**', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'whirlpool', 'minor_version': 1, 'options': dict({ @@ -36,6 +38,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/whirlpool/test_config_flow.py b/tests/components/whirlpool/test_config_flow.py index e3896a436d4..1240e1303e1 100644 --- a/tests/components/whirlpool/test_config_flow.py +++ b/tests/components/whirlpool/test_config_flow.py @@ -235,15 +235,7 @@ async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -294,21 +286,7 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> Non ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "new-password", - "region": region[0], - "brand": brand[0], - }, - ) - + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -345,15 +323,7 @@ async def test_reauth_flow_connnection_error( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 08f3861dcd2..0d99b0596e3 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -20,6 +20,8 @@ 'domain': 'example.com', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -28,10 +30,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -58,6 +64,8 @@ 'domain': 'example.com', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -66,10 +74,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -96,6 +108,8 @@ 'domain': 'example.com', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -104,10 +118,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -134,6 +152,8 @@ 'domain': 'example.com', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -142,10 +162,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -172,6 +196,8 @@ 'domain': 'example.com', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -180,10 +206,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, diff --git a/tests/components/wilight/test_cover.py b/tests/components/wilight/test_cover.py index 5b89293032f..a844a61fc1a 100644 --- a/tests/components/wilight/test_cover.py +++ b/tests/components/wilight/test_cover.py @@ -9,6 +9,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -16,10 +17,6 @@ from homeassistant.const import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -70,7 +67,7 @@ async def test_loading_cover( # First segment of the strip state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED entry = entity_registry.async_get("cover.wl000000000099_1") assert entry @@ -94,7 +91,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Close await hass.services.async_call( @@ -107,7 +104,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Set position await hass.services.async_call( @@ -120,7 +117,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 50 # Stop @@ -134,4 +131,4 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN diff --git a/tests/components/withings/__init__.py b/tests/components/withings/__init__.py index 4b97fc48834..127bccbeb00 100644 --- a/tests/components/withings/__init__.py +++ b/tests/components/withings/__init__.py @@ -6,12 +6,12 @@ from typing import Any from urllib.parse import urlparse from aiohttp.test_utils import TestClient -from aiowithings import Activity, Goals, MeasurementGroup, SleepSummary, Workout +from aiowithings import Activity, Device, Goals, MeasurementGroup, SleepSummary, Workout from freezegun.api import FrozenDateTimeFactory from homeassistant.components.webhook import async_generate_url -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from tests.common import ( MockConfigEntry, @@ -109,3 +109,11 @@ def load_sleep_fixture( """Return sleep summaries from fixture.""" sleep_json = load_json_array_fixture("withings/sleep_summaries.json") return [SleepSummary.from_api(sleep_summary) for sleep_summary in sleep_json] + + +def load_device_fixture( + fixture: str = "withings/devices.json", +) -> list[Device]: + """Return sleep summaries from fixture.""" + devices_json = load_json_array_fixture(fixture) + return [Device.from_api(device) for device in devices_json] diff --git a/tests/components/withings/conftest.py b/tests/components/withings/conftest.py index dfb0658b64a..5b73240908a 100644 --- a/tests/components/withings/conftest.py +++ b/tests/components/withings/conftest.py @@ -133,6 +133,29 @@ def polling_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: ) +@pytest.fixture +def second_polling_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: + """Create Withings entry in Home Assistant.""" + return MockConfigEntry( + domain=DOMAIN, + title="Not Henk", + unique_id="54321", + data={ + "auth_implementation": DOMAIN, + "token": { + "status": 0, + "userid": "54321", + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": ",".join(scopes), + }, + "profile": TITLE, + "webhook_id": WEBHOOK_ID, + }, + ) + + @pytest.fixture(name="withings") def mock_withings(): """Mock withings.""" diff --git a/tests/components/withings/snapshots/test_init.ambr b/tests/components/withings/snapshots/test_init.ambr new file mode 100644 index 00000000000..be221cad313 --- /dev/null +++ b/tests/components/withings/snapshots/test_init.ambr @@ -0,0 +1,65 @@ +# serializer version: 1 +# name: test_devices[12345] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'withings', + '12345', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Withings', + 'model': None, + 'model_id': None, + 'name': 'henk', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'withings', + 'f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Withings', + 'model': 'Body+', + 'model_id': None, + 'name': 'Body+', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/withings/snapshots/test_sensor.ambr b/tests/components/withings/snapshots/test_sensor.ambr index 70a86c79038..cfecfb1e28e 100644 --- a/tests/components/withings/snapshots/test_sensor.ambr +++ b/tests/components/withings/snapshots/test_sensor.ambr @@ -1,4 +1,62 @@ # serializer version: 1 +# name: test_all_entities[sensor.body_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.body_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'withings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery', + 'unique_id': 'f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.body_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Body+ Battery', + 'options': list([ + 'low', + 'medium', + 'high', + ]), + }), + 'context': , + 'entity_id': 'sensor.body_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', + }) +# --- # name: test_all_entities[sensor.henk_active_calories_burnt_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py index 20bef90a31e..39c8340a78e 100644 --- a/tests/components/withings/test_config_flow.py +++ b/tests/components/withings/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.withings.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -145,14 +145,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -207,14 +200,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index 0375d1869d9..e07e1f90cb4 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -14,6 +14,7 @@ from aiowithings import ( ) from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from homeassistant import config_entries from homeassistant.components import cloud @@ -22,6 +23,7 @@ from homeassistant.components.webhook import async_generate_url from homeassistant.components.withings.const import DOMAIN from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.util import dt as dt_util from . import call_webhook, prepare_webhook_setup, setup_integration @@ -569,3 +571,21 @@ async def test_webhook_post( resp.close() assert data["code"] == expected_code + + +async def test_devices( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + device_registry: dr.DeviceRegistry, +) -> None: + """Test devices.""" + await setup_integration(hass, webhook_config_entry) + + await hass.async_block_till_done() + + for device_id in ("12345", "f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d"): + device = device_registry.async_get_device({(DOMAIN, device_id)}) + assert device is not None + assert device == snapshot(name=device_id) diff --git a/tests/components/withings/test_sensor.py b/tests/components/withings/test_sensor.py index 8966006e47f..20927c197a4 100644 --- a/tests/components/withings/test_sensor.py +++ b/tests/components/withings/test_sensor.py @@ -8,12 +8,14 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.components.withings import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from . import ( load_activity_fixture, + load_device_fixture, load_goals_fixture, load_measurements_fixture, load_sleep_fixture, @@ -351,3 +353,83 @@ async def test_warning_if_no_entities_created( await setup_integration(hass, polling_config_entry, False) assert "No data found for Withings entry" in caplog.text + + +async def test_device_sensors_created_when_device_data_received( + hass: HomeAssistant, + withings: AsyncMock, + polling_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device sensors will be added if we receive device data.""" + withings.get_devices.return_value = [] + await setup_integration(hass, polling_config_entry, False) + + assert hass.states.get("sensor.body_battery") is None + + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.body_battery") is None + + withings.get_devices.return_value = load_device_fixture() + + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.body_battery") + assert device_registry.async_get_device( + {(DOMAIN, "f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d")} + ) + + withings.get_devices.return_value = [] + + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.body_battery") is None + assert not device_registry.async_get_device( + {(DOMAIN, "f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d")} + ) + + +async def test_device_two_config_entries( + hass: HomeAssistant, + withings: AsyncMock, + polling_config_entry: MockConfigEntry, + second_polling_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device sensors will be added for one config entry only at a time.""" + await setup_integration(hass, polling_config_entry, False) + + assert hass.states.get("sensor.body_battery") is not None + + second_polling_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(second_polling_config_entry.entry_id) + + assert hass.states.get("sensor.not_henk_temperature") is not None + + assert "Platform withings does not generate unique IDs" not in caplog.text + + await hass.config_entries.async_unload(polling_config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("sensor.body_battery").state == STATE_UNAVAILABLE + + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.body_battery").state != STATE_UNAVAILABLE + + await hass.config_entries.async_setup(polling_config_entry.entry_id) + await hass.async_block_till_done() + + assert "Platform withings does not generate unique IDs" not in caplog.text diff --git a/tests/components/wiz/test_light.py b/tests/components/wiz/test_light.py index 1fb87b30a5f..5c74d407238 100644 --- a/tests/components/wiz/test_light.py +++ b/tests/components/wiz/test_light.py @@ -4,7 +4,7 @@ from pywizlight import PilotBuilder from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -91,7 +91,7 @@ async def test_rgbww_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -99,7 +99,7 @@ async def test_rgbww_light(hass: HomeAssistant) -> None: await async_push_update(hass, bulb, {"mac": FAKE_MAC, **pilot.pilot_params}) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 153 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 6535 bulb.turn_on.reset_mock() await hass.services.async_call( @@ -148,7 +148,7 @@ async def test_rgbw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -162,7 +162,7 @@ async def test_turnable_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -171,7 +171,7 @@ async def test_turnable_light(hass: HomeAssistant) -> None: await async_push_update(hass, bulb, {"mac": FAKE_MAC, **pilot.pilot_params}) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 153 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 6535 async def test_old_firmware_dimmable_light(hass: HomeAssistant) -> None: diff --git a/tests/components/wled/snapshots/test_diagnostics.ambr b/tests/components/wled/snapshots/test_diagnostics.ambr index 90732c02c36..46953b00440 100644 --- a/tests/components/wled/snapshots/test_diagnostics.ambr +++ b/tests/components/wled/snapshots/test_diagnostics.ambr @@ -224,7 +224,7 @@ 'udpport': 21324, 'uptime': 966, 'ver': '0.14.4', - 'vid': 2405180, + 'vid': '2405180', 'wifi': '**REDACTED**', }), 'palettes': dict({ diff --git a/tests/components/wmspro/__init__.py b/tests/components/wmspro/__init__.py new file mode 100644 index 00000000000..fee2fc64849 --- /dev/null +++ b/tests/components/wmspro/__init__.py @@ -0,0 +1,16 @@ +"""Tests for the wmspro integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> bool: + """Set up a config entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + return result diff --git a/tests/components/wmspro/conftest.py b/tests/components/wmspro/conftest.py new file mode 100644 index 00000000000..4b0e7eb4fef --- /dev/null +++ b/tests/components/wmspro/conftest.py @@ -0,0 +1,127 @@ +"""Common fixtures for the wmspro tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return a dummy config entry.""" + return MockConfigEntry( + title="WebControl", + domain=DOMAIN, + data={CONF_HOST: "webcontrol"}, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.wmspro.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_hub_ping() -> Generator[AsyncMock]: + """Override WebControlPro.ping.""" + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ) as mock_hub_ping: + yield mock_hub_ping + + +@pytest.fixture +def mock_hub_refresh() -> Generator[AsyncMock]: + """Override WebControlPro.refresh.""" + with patch( + "wmspro.webcontrol.WebControlPro.refresh", + return_value=True, + ) as mock_hub_refresh: + yield mock_hub_refresh + + +@pytest.fixture +def mock_hub_configuration_test() -> Generator[AsyncMock]: + """Override WebControlPro.configuration.""" + with patch( + "wmspro.webcontrol.WebControlPro._getConfiguration", + return_value=load_json_object_fixture("example_config_test.json", DOMAIN), + ) as mock_hub_configuration: + yield mock_hub_configuration + + +@pytest.fixture +def mock_hub_configuration_prod() -> Generator[AsyncMock]: + """Override WebControlPro._getConfiguration.""" + with patch( + "wmspro.webcontrol.WebControlPro._getConfiguration", + return_value=load_json_object_fixture("example_config_prod.json", DOMAIN), + ) as mock_hub_configuration: + yield mock_hub_configuration + + +@pytest.fixture +def mock_hub_status_prod_awning() -> Generator[AsyncMock]: + """Override WebControlPro._getStatus.""" + with patch( + "wmspro.webcontrol.WebControlPro._getStatus", + return_value=load_json_object_fixture( + "example_status_prod_awning.json", DOMAIN + ), + ) as mock_dest_refresh: + yield mock_dest_refresh + + +@pytest.fixture +def mock_hub_status_prod_dimmer() -> Generator[AsyncMock]: + """Override WebControlPro._getStatus.""" + with patch( + "wmspro.webcontrol.WebControlPro._getStatus", + return_value=load_json_object_fixture( + "example_status_prod_dimmer.json", DOMAIN + ), + ) as mock_dest_refresh: + yield mock_dest_refresh + + +@pytest.fixture +def mock_dest_refresh() -> Generator[AsyncMock]: + """Override Destination.refresh.""" + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ) as mock_dest_refresh: + yield mock_dest_refresh + + +@pytest.fixture +def mock_action_call() -> Generator[AsyncMock]: + """Override Action.__call__.""" + + async def fake_call(self, **kwargs): + self._update_params(kwargs) + + with patch( + "wmspro.action.Action.__call__", + fake_call, + ) as mock_action_call: + yield mock_action_call + + +@pytest.fixture +def mock_scene_call() -> Generator[AsyncMock]: + """Override Scene.__call__.""" + with patch( + "wmspro.scene.Scene.__call__", + ) as mock_scene_call: + yield mock_scene_call diff --git a/tests/components/wmspro/fixtures/example_config_prod.json b/tests/components/wmspro/fixtures/example_config_prod.json new file mode 100644 index 00000000000..6e313b566f7 --- /dev/null +++ b/tests/components/wmspro/fixtures/example_config_prod.json @@ -0,0 +1,77 @@ +{ + "command": "getConfiguration", + "protocolVersion": "1.0.0", + "destinations": [ + { + "id": 58717, + "animationType": 1, + "names": ["Markise", "", "", ""], + "actions": [ + { + "id": 0, + "actionType": 0, + "actionDescription": 0, + "minValue": 0, + "maxValue": 100 + }, + { + "id": 16, + "actionType": 6, + "actionDescription": 12 + }, + { + "id": 22, + "actionType": 8, + "actionDescription": 13 + } + ] + }, + { + "id": 97358, + "animationType": 6, + "names": ["Licht", "", "", ""], + "actions": [ + { + "id": 0, + "actionType": 0, + "actionDescription": 8, + "minValue": 0, + "maxValue": 100 + }, + { + "id": 17, + "actionType": 6, + "actionDescription": 12 + }, + { + "id": 20, + "actionType": 4, + "actionDescription": 6 + }, + { + "id": 22, + "actionType": 8, + "actionDescription": 13 + } + ] + } + ], + "rooms": [ + { + "id": 19239, + "name": "Terrasse", + "destinations": [58717, 97358], + "scenes": [687471, 765095] + } + ], + "scenes": [ + { + "id": 687471, + "names": ["Licht an", "", "", ""] + }, + { + "id": 765095, + "names": ["Licht aus", "", "", ""] + } + ] +} diff --git a/tests/components/wmspro/fixtures/example_config_test.json b/tests/components/wmspro/fixtures/example_config_test.json new file mode 100644 index 00000000000..1bb63e089ad --- /dev/null +++ b/tests/components/wmspro/fixtures/example_config_test.json @@ -0,0 +1,75 @@ +{ + "command": "getConfiguration", + "protocolVersion": "1.0.0", + "destinations": [ + { + "id": 17776, + "animationType": 0, + "names": ["Küche", "", "", ""], + "actions": [ + { + "id": 0, + "actionType": 0, + "actionDescription": 2, + "minValue": 0, + "maxValue": 100 + }, + { + "id": 6, + "actionType": 2, + "actionDescription": 3, + "minValue": -127, + "maxValue": 127 + }, + { + "id": 16, + "actionType": 6, + "actionDescription": 12 + }, + { + "id": 22, + "actionType": 8, + "actionDescription": 13 + }, + { + "id": 23, + "actionType": 7, + "actionDescription": 12 + } + ] + }, + { + "id": 200951, + "animationType": 999, + "names": ["Aktor Potentialfrei", "", "", ""], + "actions": [ + { + "id": 22, + "actionType": 8, + "actionDescription": 13 + }, + { + "id": 26, + "actionType": 9, + "actionDescription": 999, + "minValue": 0, + "maxValue": 16 + } + ] + } + ], + "rooms": [ + { + "id": 42581, + "name": "Raum 0", + "destinations": [17776, 116682, 194367, 200951], + "scenes": [688966] + } + ], + "scenes": [ + { + "id": 688966, + "names": ["Gute Nacht", "", "", ""] + } + ] +} diff --git a/tests/components/wmspro/fixtures/example_status_prod_awning.json b/tests/components/wmspro/fixtures/example_status_prod_awning.json new file mode 100644 index 00000000000..6ca697a4532 --- /dev/null +++ b/tests/components/wmspro/fixtures/example_status_prod_awning.json @@ -0,0 +1,22 @@ +{ + "command": "getStatus", + "protocolVersion": "1.0.0", + "details": [ + { + "destinationId": 58717, + "data": { + "drivingCause": 0, + "heartbeatError": false, + "blocking": false, + "productData": [ + { + "actionId": 0, + "value": { + "percentage": 100 + } + } + ] + } + } + ] +} diff --git a/tests/components/wmspro/fixtures/example_status_prod_dimmer.json b/tests/components/wmspro/fixtures/example_status_prod_dimmer.json new file mode 100644 index 00000000000..675549f2457 --- /dev/null +++ b/tests/components/wmspro/fixtures/example_status_prod_dimmer.json @@ -0,0 +1,28 @@ +{ + "command": "getStatus", + "protocolVersion": "1.0.0", + "details": [ + { + "destinationId": 97358, + "data": { + "drivingCause": 0, + "heartbeatError": false, + "blocking": false, + "productData": [ + { + "actionId": 0, + "value": { + "percentage": 0 + } + }, + { + "actionId": 20, + "value": { + "onOffState": false + } + } + ] + } + } + ] +} diff --git a/tests/components/wmspro/snapshots/test_cover.ambr b/tests/components/wmspro/snapshots/test_cover.ambr new file mode 100644 index 00000000000..0456f074d49 --- /dev/null +++ b/tests/components/wmspro/snapshots/test_cover.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_cover_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'terrasse', + 'config_entries': , + 'configuration_url': 'http://webcontrol/control', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wmspro', + '58717', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WAREMA Renkhoff SE', + 'model': 'Awning', + 'model_id': None, + 'name': 'Markise', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '58717', + 'suggested_area': 'Terrasse', + 'sw_version': None, + 'via_device_id': , + }) +# --- +# name: test_cover_update + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by WMS WebControl pro API', + 'current_position': 0, + 'device_class': 'awning', + 'friendly_name': 'Markise', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.markise', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/wmspro/snapshots/test_diagnostics.ambr b/tests/components/wmspro/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..00cb62e18c4 --- /dev/null +++ b/tests/components/wmspro/snapshots/test_diagnostics.ambr @@ -0,0 +1,244 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'config': dict({ + 'command': 'getConfiguration', + 'destinations': list([ + dict({ + 'actions': list([ + dict({ + 'actionDescription': 0, + 'actionType': 0, + 'id': 0, + 'maxValue': 100, + 'minValue': 0, + }), + dict({ + 'actionDescription': 12, + 'actionType': 6, + 'id': 16, + }), + dict({ + 'actionDescription': 13, + 'actionType': 8, + 'id': 22, + }), + ]), + 'animationType': 1, + 'id': 58717, + 'names': list([ + 'Markise', + '', + '', + '', + ]), + }), + dict({ + 'actions': list([ + dict({ + 'actionDescription': 8, + 'actionType': 0, + 'id': 0, + 'maxValue': 100, + 'minValue': 0, + }), + dict({ + 'actionDescription': 12, + 'actionType': 6, + 'id': 17, + }), + dict({ + 'actionDescription': 6, + 'actionType': 4, + 'id': 20, + }), + dict({ + 'actionDescription': 13, + 'actionType': 8, + 'id': 22, + }), + ]), + 'animationType': 6, + 'id': 97358, + 'names': list([ + 'Licht', + '', + '', + '', + ]), + }), + ]), + 'protocolVersion': '1.0.0', + 'rooms': list([ + dict({ + 'destinations': list([ + 58717, + 97358, + ]), + 'id': 19239, + 'name': 'Terrasse', + 'scenes': list([ + 687471, + 765095, + ]), + }), + ]), + 'scenes': list([ + dict({ + 'id': 687471, + 'names': list([ + 'Licht an', + '', + '', + '', + ]), + }), + dict({ + 'id': 765095, + 'names': list([ + 'Licht aus', + '', + '', + '', + ]), + }), + ]), + }), + 'dests': dict({ + '58717': dict({ + 'actions': dict({ + '0': dict({ + 'actionDescription': 'AwningDrive', + 'actionType': 'Percentage', + 'attrs': dict({ + 'maxValue': 100, + 'minValue': 0, + }), + 'id': 0, + 'params': dict({ + }), + }), + '16': dict({ + 'actionDescription': 'ManualCommand', + 'actionType': 'Stop', + 'attrs': dict({ + }), + 'id': 16, + 'params': dict({ + }), + }), + '22': dict({ + 'actionDescription': 'Identify', + 'actionType': 'Identify', + 'attrs': dict({ + }), + 'id': 22, + 'params': dict({ + }), + }), + }), + 'animationType': 'Awning', + 'available': True, + 'blocking': None, + 'drivingCause': 'Unknown', + 'heartbeatError': None, + 'id': 58717, + 'name': 'Markise', + 'room': dict({ + '19239': 'Terrasse', + }), + 'status': dict({ + }), + 'unknownProducts': dict({ + }), + }), + '97358': dict({ + 'actions': dict({ + '0': dict({ + 'actionDescription': 'LightDimming', + 'actionType': 'Percentage', + 'attrs': dict({ + 'maxValue': 100, + 'minValue': 0, + }), + 'id': 0, + 'params': dict({ + }), + }), + '17': dict({ + 'actionDescription': 'ManualCommand', + 'actionType': 'Stop', + 'attrs': dict({ + }), + 'id': 17, + 'params': dict({ + }), + }), + '20': dict({ + 'actionDescription': 'LightSwitch', + 'actionType': 'Switch', + 'attrs': dict({ + }), + 'id': 20, + 'params': dict({ + }), + }), + '22': dict({ + 'actionDescription': 'Identify', + 'actionType': 'Identify', + 'attrs': dict({ + }), + 'id': 22, + 'params': dict({ + }), + }), + }), + 'animationType': 'Dimmer', + 'available': True, + 'blocking': None, + 'drivingCause': 'Unknown', + 'heartbeatError': None, + 'id': 97358, + 'name': 'Licht', + 'room': dict({ + '19239': 'Terrasse', + }), + 'status': dict({ + }), + 'unknownProducts': dict({ + }), + }), + }), + 'host': 'webcontrol', + 'rooms': dict({ + '19239': dict({ + 'destinations': dict({ + '58717': 'Markise', + '97358': 'Licht', + }), + 'id': 19239, + 'name': 'Terrasse', + 'scenes': dict({ + '687471': 'Licht an', + '765095': 'Licht aus', + }), + }), + }), + 'scenes': dict({ + '687471': dict({ + 'id': 687471, + 'name': 'Licht an', + 'room': dict({ + '19239': 'Terrasse', + }), + }), + '765095': dict({ + 'id': 765095, + 'name': 'Licht aus', + 'room': dict({ + '19239': 'Terrasse', + }), + }), + }), + }) +# --- diff --git a/tests/components/wmspro/snapshots/test_light.ambr b/tests/components/wmspro/snapshots/test_light.ambr new file mode 100644 index 00000000000..d13e444645d --- /dev/null +++ b/tests/components/wmspro/snapshots/test_light.ambr @@ -0,0 +1,53 @@ +# serializer version: 1 +# name: test_light_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'terrasse', + 'config_entries': , + 'configuration_url': 'http://webcontrol/control', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wmspro', + '97358', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WAREMA Renkhoff SE', + 'model': 'Dimmer', + 'model_id': None, + 'name': 'Licht', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '97358', + 'suggested_area': 'Terrasse', + 'sw_version': None, + 'via_device_id': , + }) +# --- +# name: test_light_update + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by WMS WebControl pro API', + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Licht', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.licht', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/wmspro/snapshots/test_scene.ambr b/tests/components/wmspro/snapshots/test_scene.ambr new file mode 100644 index 00000000000..940d4e31e83 --- /dev/null +++ b/tests/components/wmspro/snapshots/test_scene.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_scene_activate + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by WMS WebControl pro API', + 'friendly_name': 'Raum 0 Gute Nacht', + }), + 'context': , + 'entity_id': 'scene.raum_0_gute_nacht', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_scene_room_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'raum_0', + 'config_entries': , + 'configuration_url': 'http://webcontrol/control', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wmspro', + '42581', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WAREMA Renkhoff SE', + 'model': 'Room', + 'model_id': None, + 'name': 'Raum 0', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '42581', + 'suggested_area': 'Raum 0', + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/wmspro/test_config_flow.py b/tests/components/wmspro/test_config_flow.py new file mode 100644 index 00000000000..782dc051c8c --- /dev/null +++ b/tests/components/wmspro/test_config_flow.py @@ -0,0 +1,394 @@ +"""Test the wmspro config flow.""" + +from unittest.mock import AsyncMock, patch + +import aiohttp + +from homeassistant.components.dhcp import DhcpServiceInfo +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, ConfigEntryState +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_config_entry + +from tests.common import MockConfigEntry + + +async def test_config_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock +) -> None: + """Test we can handle user-input to create a config entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_from_dhcp( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock +) -> None: + """Test we can handle DHCP discovery to create a config entry.""" + info = DhcpServiceInfo( + ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_from_dhcp_add_mac( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_hub_refresh: AsyncMock, +) -> None: + """Test we can use DHCP discovery to add MAC address to a config entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id is None + + info = DhcpServiceInfo( + ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + + +async def test_config_flow_from_dhcp_ip_update( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_hub_refresh: AsyncMock, +) -> None: + """Test we can use DHCP discovery to update IP in a config entry.""" + info = DhcpServiceInfo( + ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + + info = DhcpServiceInfo( + ip="5.6.7.8", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + assert hass.config_entries.async_entries(DOMAIN)[0].data[CONF_HOST] == "5.6.7.8" + + +async def test_config_flow_from_dhcp_no_update( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_hub_refresh: AsyncMock, +) -> None: + """Test we do not use DHCP discovery to overwrite hostname with IP in config entry.""" + info = DhcpServiceInfo( + ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "webcontrol", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "webcontrol" + assert result["data"] == { + CONF_HOST: "webcontrol", + } + assert len(mock_setup_entry.mock_calls) == 1 + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + + info = DhcpServiceInfo( + ip="5.6.7.8", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + assert hass.config_entries.async_entries(DOMAIN)[0].data[CONF_HOST] == "webcontrol" + + +async def test_config_flow_ping_failed( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock +) -> None: + """Test we handle ping failed error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=False, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_cannot_connect( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + side_effect=aiohttp.ClientError, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_unknown_error( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock +) -> None: + """Test we handle an unknown error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + side_effect=RuntimeError, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_duplicate_entries( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_dest_refresh: AsyncMock, + mock_hub_configuration_test: AsyncMock, +) -> None: + """Test we prevent creation of duplicate config entries.""" + await setup_config_entry(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "5.6.7.8", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +async def test_config_flow_multiple_entries( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_dest_refresh: AsyncMock, + mock_hub_configuration_test: AsyncMock, + mock_hub_configuration_prod: AsyncMock, +) -> None: + """Test we allow creation of different config entries.""" + await setup_config_entry(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + mock_hub_configuration_prod.return_value = mock_hub_configuration_test.return_value + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "5.6.7.8", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "5.6.7.8" + assert result["data"] == { + CONF_HOST: "5.6.7.8", + } + assert len(hass.config_entries.async_entries(DOMAIN)) == 2 diff --git a/tests/components/wmspro/test_cover.py b/tests/components/wmspro/test_cover.py new file mode 100644 index 00000000000..2c20ef51b64 --- /dev/null +++ b/tests/components/wmspro/test_cover.py @@ -0,0 +1,227 @@ +"""Test the wmspro cover support.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.components.wmspro.cover import SCAN_INTERVAL +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_OPEN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_config_entry + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_cover_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_awning: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that a cover device is created correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_awning.mock_calls) == 2 + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "58717")}) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_cover_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_awning: AsyncMock, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test that a cover entity is created and updated correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_awning.mock_calls) == 2 + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity == snapshot + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mock_hub_status_prod_awning.mock_calls) >= 3 + + +async def test_cover_open_and_close( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_awning: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a cover entity is opened and closed correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_awning.mock_calls) >= 1 + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_awning.mock_calls) + + await hass.services.async_call( + Platform.COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_OPEN + assert entity.attributes["current_position"] == 100 + assert len(mock_hub_status_prod_awning.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_awning.mock_calls) + + await hass.services.async_call( + Platform.COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 + assert len(mock_hub_status_prod_awning.mock_calls) == before + + +async def test_cover_open_to_pos( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_awning: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a cover entity is opened to correct position.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_awning.mock_calls) >= 1 + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_awning.mock_calls) + + await hass.services.async_call( + Platform.COVER, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: entity.entity_id, "position": 50}, + blocking=True, + ) + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_OPEN + assert entity.attributes["current_position"] == 50 + assert len(mock_hub_status_prod_awning.mock_calls) == before + + +async def test_cover_open_and_stop( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_awning: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a cover entity is opened and stopped correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_awning.mock_calls) >= 1 + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_awning.mock_calls) + + await hass.services.async_call( + Platform.COVER, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: entity.entity_id, "position": 80}, + blocking=True, + ) + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_OPEN + assert entity.attributes["current_position"] == 80 + assert len(mock_hub_status_prod_awning.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_awning.mock_calls) + + await hass.services.async_call( + Platform.COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("cover.markise") + assert entity is not None + assert entity.state == STATE_OPEN + assert entity.attributes["current_position"] == 80 + assert len(mock_hub_status_prod_awning.mock_calls) == before diff --git a/tests/components/wmspro/test_diagnostics.py b/tests/components/wmspro/test_diagnostics.py new file mode 100644 index 00000000000..930c3f2898e --- /dev/null +++ b/tests/components/wmspro/test_diagnostics.py @@ -0,0 +1,34 @@ +"""Test the wmspro diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_config_entry + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_dest_refresh: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test that a config entry can be loaded with DeviceConfig.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_dest_refresh.mock_calls) == 2 + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + assert result == snapshot diff --git a/tests/components/wmspro/test_init.py b/tests/components/wmspro/test_init.py new file mode 100644 index 00000000000..aeb5f3db152 --- /dev/null +++ b/tests/components/wmspro/test_init.py @@ -0,0 +1,38 @@ +"""Test the wmspro initialization.""" + +from unittest.mock import AsyncMock + +import aiohttp + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_config_entry + +from tests.common import MockConfigEntry + + +async def test_config_entry_device_config_ping_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, +) -> None: + """Test that a config entry will be retried due to ConfigEntryNotReady.""" + mock_hub_ping.side_effect = aiohttp.ClientError + await setup_config_entry(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert len(mock_hub_ping.mock_calls) == 1 + + +async def test_config_entry_device_config_refresh_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_refresh: AsyncMock, +) -> None: + """Test that a config entry will be retried due to ConfigEntryNotReady.""" + mock_hub_refresh.side_effect = aiohttp.ClientError + await setup_config_entry(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_refresh.mock_calls) == 1 diff --git a/tests/components/wmspro/test_light.py b/tests/components/wmspro/test_light.py new file mode 100644 index 00000000000..db53b54a2f6 --- /dev/null +++ b/tests/components/wmspro/test_light.py @@ -0,0 +1,206 @@ +"""Test the wmspro light support.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ATTR_BRIGHTNESS +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.components.wmspro.light import SCAN_INTERVAL +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_config_entry + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_light_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that a light device is created correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == 2 + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "97358")}) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_light_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test that a light entity is created and updated correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == 2 + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity == snapshot + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mock_hub_status_prod_dimmer.mock_calls) >= 3 + + +async def test_light_turn_on_and_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a light entity is turned on and off correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) >= 1 + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_ON + assert entity.attributes[ATTR_BRIGHTNESS] >= 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + +async def test_light_dimm_on_and_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a light entity is dimmed on and off correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) >= 1 + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_ON + assert entity.attributes[ATTR_BRIGHTNESS] >= 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id, ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_ON + assert entity.attributes[ATTR_BRIGHTNESS] == 128 + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + assert len(mock_hub_status_prod_dimmer.mock_calls) == before diff --git a/tests/components/wmspro/test_scene.py b/tests/components/wmspro/test_scene.py new file mode 100644 index 00000000000..a6b16e5bbc9 --- /dev/null +++ b/tests/components/wmspro/test_scene.py @@ -0,0 +1,63 @@ +"""Test the wmspro scene support.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from . import setup_config_entry + +from tests.common import MockConfigEntry + + +async def test_scene_room_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_test: AsyncMock, + mock_dest_refresh: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that a scene room device is created correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_test.mock_calls) == 1 + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "42581")}) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_scene_activate( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_test: AsyncMock, + mock_dest_refresh: AsyncMock, + mock_scene_call: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test that a scene entity is created and activated correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_test.mock_calls) == 1 + + entity = hass.states.get("scene.raum_0_gute_nacht") + assert entity is not None + assert entity == snapshot + + await async_setup_component(hass, "homeassistant", {}) + await hass.services.async_call( + "homeassistant", + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + assert len(mock_scene_call.mock_calls) == 1 diff --git a/tests/components/wolflink/const.py b/tests/components/wolflink/const.py new file mode 100644 index 00000000000..073faec51b2 --- /dev/null +++ b/tests/components/wolflink/const.py @@ -0,0 +1,16 @@ +"""Constants for the Wolf SmartSet Service tests.""" + +from homeassistant.components.wolflink.const import ( + DEVICE_GATEWAY, + DEVICE_ID, + DEVICE_NAME, +) +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +CONFIG = { + DEVICE_NAME: "test-device", + DEVICE_ID: 1234, + DEVICE_GATEWAY: 5678, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} diff --git a/tests/components/wolflink/test_config_flow.py b/tests/components/wolflink/test_config_flow.py index bd71d9d3180..d30cc046a85 100644 --- a/tests/components/wolflink/test_config_flow.py +++ b/tests/components/wolflink/test_config_flow.py @@ -17,15 +17,9 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +from .const import CONFIG -CONFIG = { - DEVICE_NAME: "test-device", - DEVICE_ID: 1234, - DEVICE_GATEWAY: 5678, - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", -} +from tests.common import MockConfigEntry INPUT_CONFIG = { CONF_USERNAME: CONFIG[CONF_USERNAME], @@ -134,7 +128,7 @@ async def test_already_configured_error(hass: HomeAssistant) -> None: patch("homeassistant.components.wolflink.async_setup_entry", return_value=True), ): MockConfigEntry( - domain=DOMAIN, unique_id=CONFIG[DEVICE_ID], data=CONFIG + domain=DOMAIN, unique_id=str(CONFIG[DEVICE_ID]), data=CONFIG ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( diff --git a/tests/components/wolflink/test_init.py b/tests/components/wolflink/test_init.py new file mode 100644 index 00000000000..ec39619452f --- /dev/null +++ b/tests/components/wolflink/test_init.py @@ -0,0 +1,59 @@ +"""Test the Wolf SmartSet Service.""" + +from unittest.mock import patch + +from httpx import RequestError + +from homeassistant.components.wolflink.const import DEVICE_ID, DOMAIN, MANUFACTURER +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .const import CONFIG + +from tests.common import MockConfigEntry + + +async def test_unique_id_migration( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test already configured while creating entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, unique_id=CONFIG[DEVICE_ID], data=CONFIG + ) + config_entry.add_to_hass(hass) + + device_id = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, CONFIG[DEVICE_ID])}, + configuration_url="https://www.wolf-smartset.com/", + manufacturer=MANUFACTURER, + ).id + + assert config_entry.version == 1 + assert config_entry.minor_version == 1 + assert config_entry.unique_id == 1234 + assert ( + hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, 1234) + is config_entry + ) + assert hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, "1234") is None + assert device_registry.async_get(device_id).identifiers == {(DOMAIN, 1234)} + + with ( + patch( + "homeassistant.components.wolflink.fetch_parameters", + side_effect=RequestError("Unable to fetch parameters"), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.version == 1 + assert config_entry.minor_version == 2 + assert config_entry.unique_id == "1234" + assert ( + hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, "1234") + is config_entry + ) + assert hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, 1234) is None + + assert device_registry.async_get(device_id).identifiers == {(DOMAIN, "1234")} diff --git a/tests/components/workday/snapshots/test_binary_sensor.ambr b/tests/components/workday/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..4cf7dca4861 --- /dev/null +++ b/tests/components/workday/snapshots/test_binary_sensor.ambr @@ -0,0 +1,59 @@ +# serializer version: 1 +# name: test_only_repairs_for_current_next_year + dict({ + tuple( + 'workday', + 'bad_date_holiday-1-2024_08_15', + ): IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': None, + 'created': , + 'data': dict({ + 'country': 'DE', + 'entry_id': '1', + 'named_holiday': '2024-08-15', + }), + 'dismissed_version': None, + 'domain': 'workday', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'bad_date_holiday-1-2024_08_15', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'bad_date_holiday', + 'translation_placeholders': dict({ + 'country': 'DE', + 'remove_holidays': '2024-08-15', + 'title': 'Mock Title', + }), + }), + tuple( + 'workday', + 'bad_date_holiday-1-2025_08_15', + ): IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': None, + 'created': , + 'data': dict({ + 'country': 'DE', + 'entry_id': '1', + 'named_holiday': '2025-08-15', + }), + 'dismissed_version': None, + 'domain': 'workday', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'bad_date_holiday-1-2025_08_15', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'bad_date_holiday', + 'translation_placeholders': dict({ + 'country': 'DE', + 'remove_holidays': '2025-08-15', + 'title': 'Mock Title', + }), + }), + }) +# --- diff --git a/tests/components/workday/snapshots/test_diagnostics.ambr b/tests/components/workday/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e7331b911a8 --- /dev/null +++ b/tests/components/workday/snapshots/test_diagnostics.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'workday', + 'entry_id': '1', + 'minor_version': 1, + 'options': dict({ + 'add_holidays': list([ + '2022-12-01', + '2022-12-05,2022-12-15', + ]), + 'country': 'DE', + 'days_offset': 0, + 'excludes': list([ + 'sat', + 'sun', + 'holiday', + ]), + 'language': 'de', + 'name': 'Workday Sensor', + 'province': 'BW', + 'remove_holidays': list([ + '2022-12-04', + '2022-12-24,2022-12-26', + ]), + 'workdays': list([ + 'mon', + 'tue', + 'wed', + 'thu', + 'fri', + ]), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/workday/test_binary_sensor.py b/tests/components/workday/test_binary_sensor.py index a2718c00824..212c3e9d305 100644 --- a/tests/components/workday/test_binary_sensor.py +++ b/tests/components/workday/test_binary_sensor.py @@ -5,10 +5,18 @@ from typing import Any from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.workday.binary_sensor import SERVICE_CHECK_DATE -from homeassistant.components.workday.const import DOMAIN +from homeassistant.components.workday.const import ( + DEFAULT_EXCLUDES, + DEFAULT_NAME, + DEFAULT_OFFSET, + DEFAULT_WORKDAYS, + DOMAIN, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.dt import UTC @@ -422,3 +430,34 @@ async def test_optional_category( state = hass.states.get("binary_sensor.workday_sensor") assert state is not None assert state.state == end_state + + +async def test_only_repairs_for_current_next_year( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + issue_registry: ir.IssueRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test only repairs are raised for current and next year.""" + freezer.move_to(datetime(2024, 8, 15, 12, tzinfo=UTC)) + remove_dates = [ + # None of these dates are holidays + "2024-08-15", # Creates issue + "2025-08-15", # Creates issue + "2026-08-15", # No issue + ] + config = { + "name": DEFAULT_NAME, + "country": "DE", + "province": "BW", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": [], + "remove_holidays": remove_dates, + "language": "de", + } + await init_integration(hass, config) + + assert len(issue_registry.issues) == 2 + assert issue_registry.issues == snapshot diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index cc83cee93a2..1bf0f176fe9 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -557,7 +557,7 @@ pytestmark = pytest.mark.usefixtures() ("language", "holiday"), [ ("de", "Weihnachtstag"), - ("en", "Christmas"), + ("en_US", "Christmas"), ], ) async def test_language( diff --git a/tests/components/workday/test_diagnostics.py b/tests/components/workday/test_diagnostics.py new file mode 100644 index 00000000000..13206a361f1 --- /dev/null +++ b/tests/components/workday/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Workday diagnostics.""" + +from __future__ import annotations + +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import TEST_CONFIG_ADD_REMOVE_DATE_RANGE, init_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating diagnostics for a config entry.""" + entry = await init_integration(hass, TEST_CONFIG_ADD_REMOVE_DATE_RANGE) + + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag == snapshot( + exclude=props("full_features", "created_at", "modified_at"), + ) diff --git a/tests/components/workday/test_repairs.py b/tests/components/workday/test_repairs.py index 60a55e1a347..adbae5676e6 100644 --- a/tests/components/workday/test_repairs.py +++ b/tests/components/workday/test_repairs.py @@ -2,12 +2,8 @@ from __future__ import annotations -from http import HTTPStatus +import pytest -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.components.workday.const import CONF_REMOVE_HOLIDAYS, DOMAIN from homeassistant.const import CONF_COUNTRY from homeassistant.core import HomeAssistant @@ -23,6 +19,7 @@ from . import ( ) from tests.common import ANY +from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -52,24 +49,15 @@ async def test_bad_country( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_country"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "bad_country") flow_id = data["flow_id"] assert data["description_placeholders"] == {"title": entry.title} assert data["step_id"] == "country" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={"country": "DE"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={"country": "DE"}) - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={"province": "HB"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={"province": "HB"}) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -114,24 +102,15 @@ async def test_bad_country_none( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_country"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "bad_country") flow_id = data["flow_id"] assert data["description_placeholders"] == {"title": entry.title} assert data["step_id"] == "country" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={"country": "DE"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={"country": "DE"}) - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={}) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -176,19 +155,13 @@ async def test_bad_country_no_province( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_country"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "bad_country") flow_id = data["flow_id"] assert data["description_placeholders"] == {"title": entry.title} assert data["step_id"] == "country" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={"country": "SE"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={"country": "SE"}) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -233,10 +206,7 @@ async def test_bad_province( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_province"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "bad_province") flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -245,10 +215,7 @@ async def test_bad_province( } assert data["step_id"] == "province" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={"province": "BW"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={"province": "BW"}) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -293,10 +260,7 @@ async def test_bad_province_none( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_province"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "bad_province") flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -305,10 +269,7 @@ async def test_bad_province_none( } assert data["step_id"] == "province" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id, json={}) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -359,13 +320,9 @@ async def test_bad_named_holiday( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post( - url, - json={"handler": DOMAIN, "issue_id": "bad_named_holiday-1-not_a_holiday"}, + data = await start_repair_fix_flow( + client, DOMAIN, "bad_named_holiday-1-not_a_holiday" ) - assert resp.status == HTTPStatus.OK - data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -375,23 +332,17 @@ async def test_bad_named_holiday( } assert data["step_id"] == "fix_remove_holiday" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post( - url, json={"remove_holidays": ["Christmas", "Not exist 2"]} + data = await process_repair_fix_flow( + client, flow_id, json={"remove_holidays": ["Christmas", "Not exist 2"]} ) - assert resp.status == HTTPStatus.OK - data = await resp.json() assert data["errors"] == { CONF_REMOVE_HOLIDAYS: "remove_holiday_error", } - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post( - url, json={"remove_holidays": ["Christmas", "Thanksgiving"]} + data = await process_repair_fix_flow( + client, flow_id, json={"remove_holidays": ["Christmas", "Thanksgiving"]} ) - assert resp.status == HTTPStatus.OK - data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -442,13 +393,7 @@ async def test_bad_date_holiday( issue = i assert issue is not None - url = RepairsFlowIndexView.url - resp = await client.post( - url, - json={"handler": DOMAIN, "issue_id": "bad_date_holiday-1-2024_02_05"}, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "bad_date_holiday-1-2024_02_05") flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -458,10 +403,9 @@ async def test_bad_date_holiday( } assert data["step_id"] == "fix_remove_holiday" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={"remove_holidays": ["2024-02-06"]}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow( + client, flow_id, json={"remove_holidays": ["2024-02-06"]} + ) assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -485,6 +429,10 @@ async def test_bad_date_holiday( assert issue +@pytest.mark.parametrize( + "ignore_translations", + ["component.workday.issues.issue_1.title"], +) async def test_other_fixable_issues( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -543,18 +491,12 @@ async def test_other_fixable_issues( "ignored": False, } in results - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "issue_1"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(client, DOMAIN, "issue_1") flow_id = data["flow_id"] assert data["step_id"] == "confirm" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(client, flow_id) assert data["type"] == "create_entry" await hass.async_block_till_done() diff --git a/tests/components/wyoming/__init__.py b/tests/components/wyoming/__init__.py index 5bfbbfe87b2..4540cdaabfd 100644 --- a/tests/components/wyoming/__init__.py +++ b/tests/components/wyoming/__init__.py @@ -8,7 +8,11 @@ from wyoming.info import ( AsrModel, AsrProgram, Attribution, + HandleModel, + HandleProgram, Info, + IntentModel, + IntentProgram, Satellite, TtsProgram, TtsVoice, @@ -87,6 +91,48 @@ WAKE_WORD_INFO = Info( ) ] ) +INTENT_INFO = Info( + intent=[ + IntentProgram( + name="Test Intent", + description="Test Intent", + installed=True, + attribution=TEST_ATTR, + models=[ + IntentModel( + name="Test Model", + description="Test Model", + installed=True, + attribution=TEST_ATTR, + languages=["en-US"], + version=None, + ) + ], + version=None, + ) + ] +) +HANDLE_INFO = Info( + handle=[ + HandleProgram( + name="Test Handle", + description="Test Handle", + installed=True, + attribution=TEST_ATTR, + models=[ + HandleModel( + name="Test Model", + description="Test Model", + installed=True, + attribution=TEST_ATTR, + languages=["en-US"], + version=None, + ) + ], + version=None, + ) + ] +) SATELLITE_INFO = Info( satellite=Satellite( name="Test Satellite", @@ -150,10 +196,10 @@ async def reload_satellite( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.run" + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.run" ) as _run_mock, ): # _run_mock: satellite task does not actually run await hass.config_entries.async_reload(config_entry_id) - return hass.data[DOMAIN][config_entry_id].satellite.device + return hass.data[DOMAIN][config_entry_id].device diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 770186d92aa..018fff33821 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -13,7 +13,14 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import SATELLITE_INFO, STT_INFO, TTS_INFO, WAKE_WORD_INFO +from . import ( + HANDLE_INFO, + INTENT_INFO, + SATELLITE_INFO, + STT_INFO, + TTS_INFO, + WAKE_WORD_INFO, +) from tests.common import MockConfigEntry @@ -83,6 +90,36 @@ def wake_word_config_entry(hass: HomeAssistant) -> ConfigEntry: return entry +@pytest.fixture +def intent_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Create a config entry.""" + entry = MockConfigEntry( + domain="wyoming", + data={ + "host": "1.2.3.4", + "port": 1234, + }, + title="Test Intent", + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def handle_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Create a config entry.""" + entry = MockConfigEntry( + domain="wyoming", + data={ + "host": "1.2.3.4", + "port": 1234, + }, + title="Test Handle", + ) + entry.add_to_hass(hass) + return entry + + @pytest.fixture async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): """Initialize Wyoming STT.""" @@ -115,6 +152,34 @@ async def init_wyoming_wake_word( await hass.config_entries.async_setup(wake_word_config_entry.entry_id) +@pytest.fixture +async def init_wyoming_intent( + hass: HomeAssistant, intent_config_entry: ConfigEntry +) -> ConfigEntry: + """Initialize Wyoming intent recognizer.""" + with patch( + "homeassistant.components.wyoming.data.load_wyoming_info", + return_value=INTENT_INFO, + ): + await hass.config_entries.async_setup(intent_config_entry.entry_id) + + return intent_config_entry + + +@pytest.fixture +async def init_wyoming_handle( + hass: HomeAssistant, handle_config_entry: ConfigEntry +) -> ConfigEntry: + """Initialize Wyoming intent handler.""" + with patch( + "homeassistant.components.wyoming.data.load_wyoming_info", + return_value=HANDLE_INFO, + ): + await hass.config_entries.async_setup(handle_config_entry.entry_id) + + return handle_config_entry + + @pytest.fixture def metadata(hass: HomeAssistant) -> stt.SpeechMetadata: """Get default STT metadata.""" @@ -152,7 +217,7 @@ async def init_satellite(hass: HomeAssistant, satellite_config_entry: ConfigEntr return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.run" + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.run" ) as _run_mock, ): # _run_mock: satellite task does not actually run @@ -164,4 +229,4 @@ async def satellite_device( hass: HomeAssistant, init_satellite, satellite_config_entry: ConfigEntry ) -> SatelliteDevice: """Get a satellite device fixture.""" - return hass.data[DOMAIN][satellite_config_entry.entry_id].satellite.device + return hass.data[DOMAIN][satellite_config_entry.entry_id].device diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index ee4c5533254..d288c531407 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -26,6 +26,8 @@ 'port': 10200, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'wyoming', 'entry_id': , 'minor_version': 1, @@ -34,10 +36,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', + 'subentries': list([ + ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Piper', 'type': , 'version': 1, @@ -70,6 +76,8 @@ 'port': 10200, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'wyoming', 'entry_id': , 'minor_version': 1, @@ -78,10 +86,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', + 'subentries': list([ + ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Piper', 'type': , 'version': 1, @@ -90,7 +102,6 @@ # name: test_zeroconf_discovery FlowResultSnapshot({ 'context': dict({ - 'name': 'Test Satellite', 'source': 'zeroconf', 'title_placeholders': dict({ 'name': 'Test Satellite', @@ -114,6 +125,8 @@ 'port': 12345, }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'wyoming', 'entry_id': , 'minor_version': 1, @@ -122,10 +135,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'Test Satellite', 'unique_id': 'test_zeroconf_name._wyoming._tcp.local._Test Satellite', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Test Satellite', 'type': , 'version': 1, diff --git a/tests/components/wyoming/snapshots/test_conversation.ambr b/tests/components/wyoming/snapshots/test_conversation.ambr new file mode 100644 index 00000000000..24763cac441 --- /dev/null +++ b/tests/components/wyoming/snapshots/test_conversation.ambr @@ -0,0 +1,7 @@ +# serializer version: 1 +# name: test_connection_lost + 'Connection to service was lost' +# --- +# name: test_oserror + 'Error communicating with service: Boom!' +# --- diff --git a/tests/components/wyoming/test_binary_sensor.py b/tests/components/wyoming/test_binary_sensor.py index 8d4e3c72c56..99ed5cda58e 100644 --- a/tests/components/wyoming/test_binary_sensor.py +++ b/tests/components/wyoming/test_binary_sensor.py @@ -1,13 +1,17 @@ """Test Wyoming binary sensor devices.""" +import pytest + from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from . import reload_satellite +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_assist_in_progress( hass: HomeAssistant, satellite_config_entry: ConfigEntry, @@ -36,3 +40,19 @@ async def test_assist_in_progress( assert state is not None assert state.state == STATE_OFF assert not satellite_device.is_active + + +async def test_assist_in_progress_disabled_by_default( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + satellite_device: SatelliteDevice, +) -> None: + """Test assist in progress binary sensor is added disabled.""" + assist_in_progress_id = satellite_device.get_assist_in_progress_entity_id(hass) + assert assist_in_progress_id + + assert not hass.states.get(assist_in_progress_id) + entity_entry = entity_registry.async_get(assist_in_progress_id) + assert entity_entry + assert entity_entry.disabled + assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION diff --git a/tests/components/wyoming/test_config_flow.py b/tests/components/wyoming/test_config_flow.py index e363a0650bc..6bca226d621 100644 --- a/tests/components/wyoming/test_config_flow.py +++ b/tests/components/wyoming/test_config_flow.py @@ -8,11 +8,11 @@ from syrupy.assertion import SnapshotAssertion from wyoming.info import Info from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.wyoming.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import EMPTY_INFO, SATELLITE_INFO, STT_INFO, TTS_INFO diff --git a/tests/components/wyoming/test_conversation.py b/tests/components/wyoming/test_conversation.py new file mode 100644 index 00000000000..02b04503962 --- /dev/null +++ b/tests/components/wyoming/test_conversation.py @@ -0,0 +1,224 @@ +"""Test conversation.""" + +from __future__ import annotations + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from wyoming.asr import Transcript +from wyoming.handle import Handled, NotHandled +from wyoming.intent import Entity, Intent, NotRecognized + +from homeassistant.components import conversation +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import intent + +from . import MockAsyncTcpClient + + +async def test_intent(hass: HomeAssistant, init_wyoming_intent: ConfigEntry) -> None: + """Test when an intent is recognized.""" + agent_id = "conversation.test_intent" + + conversation_id = "conversation-1234" + test_intent = Intent( + name="TestIntent", + entities=[Entity(name="entity", value="value")], + text="success", + ) + + class TestIntentHandler(intent.IntentHandler): + """Test Intent Handler.""" + + intent_type = "TestIntent" + + async def async_handle(self, intent_obj: intent.Intent): + """Handle the intent.""" + assert intent_obj.slots.get("entity", {}).get("value") == "value" + return intent_obj.create_response() + + intent.async_register(hass, TestIntentHandler()) + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([test_intent.event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=conversation_id, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "success" + assert result.conversation_id == conversation_id + + +async def test_intent_handle_error( + hass: HomeAssistant, init_wyoming_intent: ConfigEntry +) -> None: + """Test error during handling when an intent is recognized.""" + agent_id = "conversation.test_intent" + + test_intent = Intent(name="TestIntent", entities=[], text="success") + + class TestIntentHandler(intent.IntentHandler): + """Test Intent Handler.""" + + intent_type = "TestIntent" + + async def async_handle(self, intent_obj: intent.Intent): + """Handle the intent.""" + raise intent.IntentError + + intent.async_register(hass, TestIntentHandler()) + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([test_intent.event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE + + +async def test_not_recognized( + hass: HomeAssistant, init_wyoming_intent: ConfigEntry +) -> None: + """Test when an intent is not recognized.""" + agent_id = "conversation.test_intent" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([NotRecognized(text="failure").event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_INTENT_MATCH + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "failure" + + +async def test_handle(hass: HomeAssistant, init_wyoming_handle: ConfigEntry) -> None: + """Test when an intent is handled.""" + agent_id = "conversation.test_handle" + + conversation_id = "conversation-1234" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([Handled(text="success").event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=conversation_id, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "success" + assert result.conversation_id == conversation_id + + +async def test_not_handled( + hass: HomeAssistant, init_wyoming_handle: ConfigEntry +) -> None: + """Test when an intent is not handled.""" + agent_id = "conversation.test_handle" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([NotHandled(text="failure").event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "failure" + + +async def test_connection_lost( + hass: HomeAssistant, init_wyoming_handle: ConfigEntry, snapshot: SnapshotAssertion +) -> None: + """Test connection to client is lost.""" + agent_id = "conversation.test_handle" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([None]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.UNKNOWN + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == snapshot() + + +async def test_oserror( + hass: HomeAssistant, init_wyoming_handle: ConfigEntry, snapshot: SnapshotAssertion +) -> None: + """Test connection error.""" + agent_id = "conversation.test_handle" + + mock_client = MockAsyncTcpClient([Transcript("success").event()]) + + with ( + patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", mock_client + ), + patch.object(mock_client, "read_event", side_effect=OSError("Boom!")), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.UNKNOWN + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == snapshot() diff --git a/tests/components/wyoming/test_devices.py b/tests/components/wyoming/test_devices.py index 98efb76ab1d..24423264f93 100644 --- a/tests/components/wyoming/test_devices.py +++ b/tests/components/wyoming/test_devices.py @@ -32,8 +32,8 @@ async def test_device_registry_info( assist_in_progress_id = satellite_device.get_assist_in_progress_entity_id(hass) assert assist_in_progress_id assist_in_progress_state = hass.states.get(assist_in_progress_id) - assert assist_in_progress_state is not None - assert assist_in_progress_state.state == STATE_OFF + # assist_in_progress binary sensor is disabled + assert assist_in_progress_state is None muted_id = satellite_device.get_muted_entity_id(hass) assert muted_id @@ -58,7 +58,8 @@ async def test_remove_device_registry_entry( # Check associated entities assist_in_progress_id = satellite_device.get_assist_in_progress_entity_id(hass) assert assist_in_progress_id - assert hass.states.get(assist_in_progress_id) is not None + # assist_in_progress binary sensor is disabled + assert hass.states.get(assist_in_progress_id) is None muted_id = satellite_device.get_muted_entity_id(hass) assert muted_id diff --git a/tests/components/wyoming/test_satellite.py b/tests/components/wyoming/test_satellite.py index 1a291153ad0..f293f976242 100644 --- a/tests/components/wyoming/test_satellite.py +++ b/tests/components/wyoming/test_satellite.py @@ -23,6 +23,7 @@ from wyoming.vad import VoiceStarted, VoiceStopped from wyoming.wake import Detect, Detection from homeassistant.components import assist_pipeline, wyoming +from homeassistant.components.wyoming.assist_satellite import WyomingAssistSatellite from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant, State @@ -240,23 +241,22 @@ async def test_satellite_pipeline(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", + "homeassistant.components.wyoming.assist_satellite.tts.async_get_media_source_audio", return_value=("wav", get_test_wav()), ), - patch("homeassistant.components.wyoming.satellite._PING_SEND_DELAY", 0), + patch("homeassistant.components.wyoming.assist_satellite._PING_SEND_DELAY", 0), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device + assert device is not None async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -443,7 +443,7 @@ async def test_satellite_muted(hass: HomeAssistant) -> None: """Test callback for a satellite that has been muted.""" on_muted_event = asyncio.Event() - original_on_muted = wyoming.satellite.WyomingSatellite.on_muted + original_on_muted = WyomingAssistSatellite.on_muted async def on_muted(self): # Trigger original function @@ -462,12 +462,16 @@ async def test_satellite_muted(hass: HomeAssistant) -> None: "homeassistant.components.wyoming.data.load_wyoming_info", return_value=SATELLITE_INFO, ), + patch( + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + SatelliteAsyncTcpClient([]), + ), patch( "homeassistant.components.wyoming.switch.WyomingSatelliteMuteSwitch.async_get_last_state", return_value=State("switch.test_mute", STATE_ON), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_muted", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_muted", on_muted, ), ): @@ -484,11 +488,11 @@ async def test_satellite_restart(hass: HomeAssistant) -> None: """Test pipeline loop restart after unexpected error.""" on_restart_event = asyncio.Event() - original_on_restart = wyoming.satellite.WyomingSatellite.on_restart + original_on_restart = WyomingAssistSatellite.on_restart async def on_restart(self): await original_on_restart(self) - self.stop() + self.stop_satellite() on_restart_event.set() with ( @@ -497,14 +501,14 @@ async def test_satellite_restart(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite._connect_and_loop", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._connect_and_loop", side_effect=RuntimeError(), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", on_restart, ), - patch("homeassistant.components.wyoming.satellite._RESTART_SECONDS", 0), + patch("homeassistant.components.wyoming.assist_satellite._RESTART_SECONDS", 0), ): await setup_config_entry(hass) async with asyncio.timeout(1): @@ -517,7 +521,7 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: reconnect_event = asyncio.Event() stopped_event = asyncio.Event() - original_on_reconnect = wyoming.satellite.WyomingSatellite.on_reconnect + original_on_reconnect = WyomingAssistSatellite.on_reconnect async def on_reconnect(self): await original_on_reconnect(self) @@ -526,7 +530,7 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: num_reconnects += 1 if num_reconnects >= 2: reconnect_event.set() - self.stop() + self.stop_satellite() async def on_stopped(self): stopped_event.set() @@ -537,18 +541,20 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient.connect", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient.connect", side_effect=ConnectionRefusedError(), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_reconnect", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_reconnect", on_reconnect, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_stopped", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_stopped", on_stopped, ), - patch("homeassistant.components.wyoming.satellite._RECONNECT_SECONDS", 0), + patch( + "homeassistant.components.wyoming.assist_satellite._RECONNECT_SECONDS", 0 + ), ): await setup_config_entry(hass) async with asyncio.timeout(1): @@ -561,7 +567,7 @@ async def test_satellite_disconnect_before_pipeline(hass: HomeAssistant) -> None on_restart_event = asyncio.Event() async def on_restart(self): - self.stop() + self.stop_satellite() on_restart_event.set() with ( @@ -570,14 +576,14 @@ async def test_satellite_disconnect_before_pipeline(hass: HomeAssistant) -> None return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", MockAsyncTcpClient([]), # no RunPipeline event ), patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", on_restart, ), ): @@ -603,7 +609,7 @@ async def test_satellite_disconnect_during_pipeline(hass: HomeAssistant) -> None async def on_restart(self): # Pretend sensor got stuck on self.device.is_active = True - self.stop() + self.stop_satellite() on_restart_event.set() async def on_stopped(self): @@ -615,25 +621,23 @@ async def test_satellite_disconnect_during_pipeline(hass: HomeAssistant) -> None return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", MockAsyncTcpClient(events), ), patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", on_restart, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_stopped", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_stopped", on_stopped, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await on_restart_event.wait() @@ -665,11 +669,11 @@ async def test_satellite_error_during_pipeline(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, ): @@ -701,7 +705,7 @@ async def test_tts_not_wav(hass: HomeAssistant) -> None: """Test satellite receiving non-WAV audio from text-to-speech.""" assert await async_setup_component(hass, assist_pipeline.DOMAIN, {}) - original_stream_tts = wyoming.satellite.WyomingSatellite._stream_tts + original_stream_tts = WyomingAssistSatellite._stream_tts error_event = asyncio.Event() async def _stream_tts(self, media_id): @@ -724,19 +728,19 @@ async def test_tts_not_wav(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", + "homeassistant.components.wyoming.assist_satellite.tts.async_get_media_source_audio", return_value=("mp3", bytes(1)), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite._stream_tts", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._stream_tts", _stream_tts, ), ): @@ -819,18 +823,16 @@ async def test_pipeline_changed(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -893,18 +895,16 @@ async def test_audio_settings_changed(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -938,7 +938,7 @@ async def test_invalid_stages(hass: HomeAssistant) -> None: ).event(), ] - original_run_pipeline_once = wyoming.satellite.WyomingSatellite._run_pipeline_once + original_run_pipeline_once = WyomingAssistSatellite._run_pipeline_once start_stage_event = asyncio.Event() end_stage_event = asyncio.Event() @@ -967,11 +967,11 @@ async def test_invalid_stages(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite._run_pipeline_once", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._run_pipeline_once", _run_pipeline_once, ), ): @@ -1029,11 +1029,11 @@ async def test_client_stops_pipeline(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): @@ -1083,11 +1083,11 @@ async def test_wake_word_phrase(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ), patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, ): @@ -1114,14 +1114,12 @@ async def test_timers(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient([]), ) as mock_client, ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -1285,104 +1283,3 @@ async def test_timers(hass: HomeAssistant) -> None: timer_finished = mock_client.timer_finished assert timer_finished is not None assert timer_finished.id == timer_started.id - - -async def test_satellite_conversation_id(hass: HomeAssistant) -> None: - """Test that the same conversation id is used until timeout.""" - assert await async_setup_component(hass, assist_pipeline.DOMAIN, {}) - - events = [ - RunPipeline( - start_stage=PipelineStage.WAKE, - end_stage=PipelineStage.TTS, - restart_on_end=True, - ).event(), - ] - - pipeline_kwargs: dict[str, Any] = {} - pipeline_event_callback: Callable[[assist_pipeline.PipelineEvent], None] | None = ( - None - ) - run_pipeline_called = asyncio.Event() - - async def async_pipeline_from_audio_stream( - hass: HomeAssistant, - context, - event_callback, - stt_metadata, - stt_stream, - **kwargs, - ) -> None: - nonlocal pipeline_kwargs, pipeline_event_callback - pipeline_kwargs = kwargs - pipeline_event_callback = event_callback - - run_pipeline_called.set() - - with ( - patch( - "homeassistant.components.wyoming.data.load_wyoming_info", - return_value=SATELLITE_INFO, - ), - patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", - SatelliteAsyncTcpClient(events), - ) as mock_client, - patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", - async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", - return_value=("wav", get_test_wav()), - ), - patch("homeassistant.components.wyoming.satellite._PING_SEND_DELAY", 0), - ): - entry = await setup_config_entry(hass) - satellite: wyoming.WyomingSatellite = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite - - async with asyncio.timeout(1): - await mock_client.connect_event.wait() - await mock_client.run_satellite_event.wait() - - async with asyncio.timeout(1): - await run_pipeline_called.wait() - - assert pipeline_event_callback is not None - - # A conversation id should have been generated - conversation_id = pipeline_kwargs.get("conversation_id") - assert conversation_id - - # Reset and run again - run_pipeline_called.clear() - pipeline_kwargs.clear() - - pipeline_event_callback( - assist_pipeline.PipelineEvent(assist_pipeline.PipelineEventType.RUN_END) - ) - - async with asyncio.timeout(1): - await run_pipeline_called.wait() - - # Should be the same conversation id - assert pipeline_kwargs.get("conversation_id") == conversation_id - - # Reset and run again, but this time "time out" - satellite._conversation_id_time = None - run_pipeline_called.clear() - pipeline_kwargs.clear() - - pipeline_event_callback( - assist_pipeline.PipelineEvent(assist_pipeline.PipelineEventType.RUN_END) - ) - - async with asyncio.timeout(1): - await run_pipeline_called.wait() - - # Should be a different conversation id - new_conversation_id = pipeline_kwargs.get("conversation_id") - assert new_conversation_id - assert new_conversation_id != conversation_id diff --git a/tests/components/xiaomi/test_device_tracker.py b/tests/components/xiaomi/test_device_tracker.py index 0f1c36d1fba..625e6f404ad 100644 --- a/tests/components/xiaomi/test_device_tracker.py +++ b/tests/components/xiaomi/test_device_tracker.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock, call, patch import requests -from homeassistant.components.device_tracker import DOMAIN +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN import homeassistant.components.xiaomi.device_tracker as xiaomi from homeassistant.components.xiaomi.device_tracker import get_scanner from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PLATFORM, CONF_USERNAME @@ -154,9 +154,9 @@ def mocked_requests(*args, **kwargs): async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: """Testing minimal configuration.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: xiaomi.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "192.168.0.1", CONF_PASSWORD: "passwordTest", } @@ -164,7 +164,7 @@ async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 - assert xiaomi_mock.call_args == call(config[DOMAIN]) + assert xiaomi_mock.call_args == call(config[DEVICE_TRACKER_DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "admin" assert call_arg["password"] == "passwordTest" @@ -179,9 +179,9 @@ async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: """Testing full configuration.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: xiaomi.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: "alternativeAdminName", CONF_PASSWORD: "passwordTest", @@ -190,7 +190,7 @@ async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 - assert xiaomi_mock.call_args == call(config[DOMAIN]) + assert xiaomi_mock.call_args == call(config[DEVICE_TRACKER_DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "alternativeAdminName" assert call_arg["password"] == "passwordTest" @@ -203,9 +203,9 @@ async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: async def test_invalid_credential(mock_get, mock_post, hass: HomeAssistant) -> None: """Testing invalid credential handling.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: xiaomi.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: INVALID_USERNAME, CONF_PASSWORD: "passwordTest", @@ -220,9 +220,9 @@ async def test_invalid_credential(mock_get, mock_post, hass: HomeAssistant) -> N async def test_valid_credential(mock_get, mock_post, hass: HomeAssistant) -> None: """Testing valid refresh.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: xiaomi.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: "admin", CONF_PASSWORD: "passwordTest", @@ -244,9 +244,9 @@ async def test_token_timed_out(mock_get, mock_post, hass: HomeAssistant) -> None New token is requested and list is downloaded a second time. """ config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: xiaomi.DOMAIN, + CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: TOKEN_TIMEOUT_USERNAME, CONF_PASSWORD: "passwordTest", diff --git a/tests/components/xiaomi_ble/test_config_flow.py b/tests/components/xiaomi_ble/test_config_flow.py index b61615e0f79..e25ac939a53 100644 --- a/tests/components/xiaomi_ble/test_config_flow.py +++ b/tests/components/xiaomi_ble/test_config_flow.py @@ -2,7 +2,12 @@ from unittest.mock import patch -from xiaomi_ble import XiaomiBluetoothDeviceData as DeviceData +from xiaomi_ble import ( + XiaomiBluetoothDeviceData as DeviceData, + XiaomiCloudBLEDevice, + XiaomiCloudException, + XiaomiCloudInvalidAuthenticationException, +) from homeassistant import config_entries from homeassistant.components.bluetooth import BluetoothChange @@ -96,20 +101,25 @@ async def test_async_step_bluetooth_valid_device_but_missing_payload_then_full( context={"source": config_entries.SOURCE_BLUETOOTH}, data=MISSING_PAYLOAD_ENCRYPTED, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "a115210eed7a88e50ad52662e732a9fb"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} - assert result2["result"].unique_id == "A4:C1:38:56:53:84" + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} + assert result3["result"].unique_id == "A4:C1:38:56:53:84" async def test_async_step_bluetooth_during_onboarding(hass: HomeAssistant) -> None: @@ -239,21 +249,244 @@ async def test_async_step_bluetooth_valid_device_v4_encryption( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_device_v4_encryption_from_cloud( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with a valid v4 device, with auth from cloud.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "x"}, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_device_v4_encryption_from_cloud_wrong_key( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with a valid v4 device, with wrong auth from cloud.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ) + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "x"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" + + # Verify we can fallback to manual key + with patch( + "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_incorrect_cloud_account( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with incorrect cloud account.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=None, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "wrong@wrong.wrong", "password": "correct"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "cloud_auth" + assert result3["errors"]["base"] == "api_device_not_found" + + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + # Verify we can try again with the correct account + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"username": "correct@correct.correct", "password": "correct"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_incorrect_cloud_auth( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with incorrect cloud auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + side_effect=XiaomiCloudInvalidAuthenticationException, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "wrong"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "cloud_auth" + assert result3["errors"]["base"] == "auth_failed" + + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + # Verify we can try again with the correct password + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"username": "x@x.x", "password": "correct"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_cloud_offline( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth when the cloud is offline.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + side_effect=XiaomiCloudException, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "wrong"}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "api_error" async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key( @@ -265,31 +498,36 @@ async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "decryption_failed" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" # Test can finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key_length( @@ -301,31 +539,36 @@ async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key_length( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18fda143a58"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "expected_32_characters" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "expected_32_characters" # Test can finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_not_xiaomi(hass: HomeAssistant) -> None: @@ -457,20 +700,25 @@ async def test_async_step_user_short_payload_then_full(hass: HomeAssistant) -> N result["flow_id"], user_input={"address": "A4:C1:38:56:53:84"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "a115210eed7a88e50ad52662e732a9fb"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Temperature/Humidity Sensor 5384 (LYWSD03MMC)" - assert result2["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Temperature/Humidity Sensor 5384 (LYWSD03MMC)" + assert result3["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} async def test_async_step_user_with_found_devices_v4_encryption( @@ -492,21 +740,26 @@ async def test_async_step_user_with_found_devices_v4_encryption( result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_v4_encryption_wrong_key( @@ -530,31 +783,36 @@ async def test_async_step_user_with_found_devices_v4_encryption_wrong_key( result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) # Try an incorrect key - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "decryption_failed" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" # Check can still finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_v4_encryption_wrong_key_length( @@ -578,33 +836,38 @@ async def test_async_step_user_with_found_devices_v4_encryption_wrong_key_length result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) # Try an incorrect key - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef1dfda143a58"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "expected_32_characters" + assert result3["type"] is FlowResultType.FORM + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "expected_32_characters" # Check can still finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_legacy_encryption( @@ -1003,14 +1266,19 @@ async def test_async_step_reauth_v4(hass: HomeAssistant) -> None: assert len(results) == 1 result = results[0] - assert result["step_id"] == "get_encryption_key_4_5" + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" async def test_async_step_reauth_v4_wrong_key(hass: HomeAssistant) -> None: @@ -1052,22 +1320,90 @@ async def test_async_step_reauth_v4_wrong_key(hass: HomeAssistant) -> None: assert len(results) == 1 result = results[0] - assert result["step_id"] == "get_encryption_key_4_5" + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dada143a58"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "decryption_failed" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" + + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, + ) + assert result4["type"] is FlowResultType.ABORT + assert result4["reason"] == "reauth_successful" + + +async def test_async_step_reauth_v4_from_cloud(hass: HomeAssistant) -> None: + """Test reauth with a v4 key from the cloud.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="54:EF:44:E3:9C:BC", + ) + entry.add_to_hass(hass) + saved_callback = None + + def _async_register_callback(_hass, _callback, _matcher, _mode): + nonlocal saved_callback + saved_callback = _callback + return lambda: None + + with patch( + "homeassistant.components.bluetooth.update_coordinator.async_register_callback", + _async_register_callback, + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + + # WARNING: This test data is synthetic, rather than captured from a real device + # obj type is 0x1310, payload len is 0x2 and payload is 0x6000 + saved_callback( + make_advertisement( + "54:EF:44:E3:9C:BC", + b"XY\x97\tf\xbc\x9c\xe3D\xefT\x01\x08\x12\x05\x00\x00\x00q^\xbe\x90", + ), + BluetoothChange.ADVERTISEMENT, + ) + + await hass.async_block_till_done() + + results = hass.config_entries.flow.async_progress() + assert len(results) == 1 + result = results[0] + + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, + user_input={"next_step_id": "cloud_auth"}, ) - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "x"}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: @@ -1083,16 +1419,7 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - }, - data=entry.data | {"device": device}, - ) + result = await entry.start_reauth_flow(hass, data={"device": device}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/xiaomi_ble/test_sensor.py b/tests/components/xiaomi_ble/test_sensor.py index 4d9a29e3111..11a20a62d02 100644 --- a/tests/components/xiaomi_ble/test_sensor.py +++ b/tests/components/xiaomi_ble/test_sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.xiaomi_ble.const import CONF_SLEEPY_DEVICE, DOMAIN from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, + STATE_ON, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -465,6 +466,115 @@ async def test_xiaomi_hhccjcy01_only_some_sources_connectable( await hass.async_block_till_done() +async def test_xiaomi_xmosb01xs(hass: HomeAssistant) -> None: + """Test XMOSB01XS multiple advertisements. + + This device has multiple advertisements before all sensors are visible. + """ + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="DC:8E:95:23:07:B7", + data={"bindkey": "272b1c920ef435417c49228b8ab9a563"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + ( + b"\x58\x59\x83\x46\x91\xb7\x07\x23\x95\x8e\xdc\xc7\x17\x61\xc1" + b"\x24\x03\x00\x25\x44\xb0\x65" + ), + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + b"\x10\x59\x83\x46\x90\xb7\x07\x23\x95\x8e\xdc", + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + b"\x48\x59\x83\x46\x9d\x34\x45\xec\xab\xda\x93\xf9\x24\x03\x00\x9e\x01\x6d\x3d", + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + ( + b"\x58\x59\x83\x46\xa9\xb7\x07\x23\x95\x8e\xdc\xc6\x59\xa2\xdc\xc5" + b"\x24\x03\x00\xa0\x4d\x0d\x45" + ), + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + ( + b"\x58\x59\x83\x46\xa4\xb7\x07\x23\x95\x8e\xdc\x77\x2a\xe2\x5c\x11" + b"\x24\x03\x00\xab\x87\x7b\xd7" + ), + connectable=False, + ), + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 4 + + occupancy_sensor = hass.states.get("binary_sensor.occupancy_sensor_07b7_occupancy") + occupancy_sensor_attribtes = occupancy_sensor.attributes + assert occupancy_sensor.state == STATE_ON + assert ( + occupancy_sensor_attribtes[ATTR_FRIENDLY_NAME] + == "Occupancy Sensor 07B7 Occupancy" + ) + + illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_illuminance") + illum_sensor_attr = illum_sensor.attributes + assert illum_sensor.state == "111.0" + assert illum_sensor_attr[ATTR_FRIENDLY_NAME] == "Occupancy Sensor 07B7 Illuminance" + assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "lx" + assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" + + illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_duration_detected") + illum_sensor_attr = illum_sensor.attributes + assert illum_sensor.state == "2" + assert ( + illum_sensor_attr[ATTR_FRIENDLY_NAME] + == "Occupancy Sensor 07B7 Duration detected" + ) + assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "min" + assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" + + illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_duration_cleared") + illum_sensor_attr = illum_sensor.attributes + assert illum_sensor.state == "2" + assert ( + illum_sensor_attr[ATTR_FRIENDLY_NAME] + == "Occupancy Sensor 07B7 Duration cleared" + ) + assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "min" + assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.data[CONF_SLEEPY_DEVICE] is True + + async def test_xiaomi_cgdk2_bind_key(hass: HomeAssistant) -> None: """Test CGDK2 bind key. diff --git a/tests/components/xiaomi_miio/test_button.py b/tests/components/xiaomi_miio/test_button.py index 8159d7c49e5..1f79a3ec0d0 100644 --- a/tests/components/xiaomi_miio/test_button.py +++ b/tests/components/xiaomi_miio/test_button.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock, patch import pytest -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.xiaomi_miio.const import ( CONF_FLOW_TYPE, DOMAIN as XIAOMI_DOMAIN, @@ -68,7 +68,7 @@ async def test_vacuum_button_press(hass: HomeAssistant) -> None: pressed_at = dt_util.utcnow() await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id + "_reset_side_brush"}, blocking=True, @@ -81,7 +81,7 @@ async def test_vacuum_button_press(hass: HomeAssistant) -> None: async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = f"{BUTTON_DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/xiaomi_miio/test_config_flow.py b/tests/components/xiaomi_miio/test_config_flow.py index 707da4bff12..146526c69a5 100644 --- a/tests/components/xiaomi_miio/test_config_flow.py +++ b/tests/components/xiaomi_miio/test_config_flow.py @@ -976,11 +976,7 @@ async def test_reauth(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - const.DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/xiaomi_miio/test_select.py b/tests/components/xiaomi_miio/test_select.py index 584ef910c98..566f1516fdf 100644 --- a/tests/components/xiaomi_miio/test_select.py +++ b/tests/components/xiaomi_miio/test_select.py @@ -12,7 +12,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN, + DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.components.xiaomi_miio import UPDATE_INTERVAL @@ -143,7 +143,7 @@ async def test_select_coordinator_update(hass: HomeAssistant, setup_test) -> Non async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up component.""" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = f"{SELECT_DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 64612f6f464..e58f21e387b 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -12,7 +12,7 @@ from homeassistant.components.vacuum import ( ATTR_BATTERY_ICON, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, - DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -21,8 +21,7 @@ from homeassistant.components.vacuum import ( SERVICE_SET_FAN_SPEED, SERVICE_START, SERVICE_STOP, - STATE_CLEANING, - STATE_ERROR, + VacuumActivity, ) from homeassistant.components.xiaomi_miio.const import ( CONF_FLOW_TYPE, @@ -264,7 +263,7 @@ async def test_xiaomi_vacuum_services( # Check state attributes state = hass.states.get(entity_id) - assert state.state == STATE_ERROR + assert state.state == VacuumActivity.ERROR assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204 assert state.attributes.get(ATTR_ERROR) == "Error message" assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-80" @@ -283,7 +282,7 @@ async def test_xiaomi_vacuum_services( # Call services await hass.services.async_call( - DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls( [mock.call.resume_or_start()], any_order=True @@ -292,42 +291,42 @@ async def test_xiaomi_vacuum_services( mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_PAUSE, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_PAUSE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.pause()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.stop()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.home()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.find()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.spot()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SEND_COMMAND, {"entity_id": entity_id, "command": "raw"}, blocking=True, @@ -339,7 +338,7 @@ async def test_xiaomi_vacuum_services( mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SEND_COMMAND, {"entity_id": entity_id, "command": "raw", "params": {"k1": 2}}, blocking=True, @@ -450,7 +449,7 @@ async def test_xiaomi_specific_services( # Check state attributes state = hass.states.get(entity_id) - assert state.state == STATE_CLEANING + assert state.state == VacuumActivity.CLEANING assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204 assert state.attributes.get(ATTR_ERROR) is None assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-30" @@ -498,7 +497,7 @@ async def test_xiaomi_vacuum_fanspeeds( # Set speed service: await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": 60}, blocking=True, @@ -512,7 +511,7 @@ async def test_xiaomi_vacuum_fanspeeds( fan_speed_dict = mock_mirobo_fanspeeds.fan_speed_presets() await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": "Medium"}, blocking=True, @@ -525,7 +524,7 @@ async def test_xiaomi_vacuum_fanspeeds( assert "ERROR" not in caplog.text await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": "invent"}, blocking=True, @@ -535,7 +534,7 @@ async def test_xiaomi_vacuum_fanspeeds( async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = f"{VACUUM_DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/yale/__init__.py b/tests/components/yale/__init__.py new file mode 100644 index 00000000000..7f72d348042 --- /dev/null +++ b/tests/components/yale/__init__.py @@ -0,0 +1 @@ +"""Tests for the yale component.""" diff --git a/tests/components/yale/conftest.py b/tests/components/yale/conftest.py new file mode 100644 index 00000000000..3e633430846 --- /dev/null +++ b/tests/components/yale/conftest.py @@ -0,0 +1,72 @@ +"""Yale tests conftest.""" + +from unittest.mock import patch + +import pytest +from yalexs.manager.ratelimit import _RateLimitChecker + +from homeassistant.components.yale.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .mocks import mock_client_credentials, mock_config_entry + +from tests.common import MockConfigEntry, load_fixture + + +@pytest.fixture(name="mock_discovery", autouse=True) +def mock_discovery_fixture(): + """Mock discovery to avoid loading the whole bluetooth stack.""" + with patch( + "homeassistant.components.yale.data.discovery_flow.async_create_flow" + ) as mock_discovery: + yield mock_discovery + + +@pytest.fixture(name="disable_ratelimit_checks", autouse=True) +def disable_ratelimit_checks_fixture(): + """Disable rate limit checks.""" + with patch.object(_RateLimitChecker, "register_wakeup"): + yield + + +@pytest.fixture(name="mock_config_entry") +def mock_config_entry_fixture(jwt: str) -> MockConfigEntry: + """Return the default mocked config entry.""" + return mock_config_entry(jwt=jwt) + + +@pytest.fixture(name="jwt") +def load_jwt_fixture() -> str: + """Load Fixture data.""" + return load_fixture("jwt", DOMAIN).strip("\n") + + +@pytest.fixture(name="reauth_jwt") +def load_reauth_jwt_fixture() -> str: + """Load Fixture data.""" + return load_fixture("reauth_jwt", DOMAIN).strip("\n") + + +@pytest.fixture(name="reauth_jwt_wrong_account") +def load_reauth_jwt_wrong_account_fixture() -> str: + """Load Fixture data.""" + return load_fixture("reauth_jwt_wrong_account", DOMAIN).strip("\n") + + +@pytest.fixture(name="client_credentials", autouse=True) +async def mock_client_credentials_fixture(hass: HomeAssistant) -> None: + """Mock client credentials.""" + await mock_client_credentials(hass) + + +@pytest.fixture(name="skip_cloud", autouse=True) +def skip_cloud_fixture(): + """Skip setting up cloud. + + Cloud already has its own tests for account link. + + We do not need to test it here as we only need to test our + usage of the oauth2 helpers. + """ + with patch("homeassistant.components.cloud.async_setup", return_value=True): + yield diff --git a/tests/components/yale/fixtures/get_activity.bridge_offline.json b/tests/components/yale/fixtures/get_activity.bridge_offline.json new file mode 100644 index 00000000000..9c2ded96665 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.bridge_offline.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "associated_bridge_offline", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.bridge_online.json b/tests/components/yale/fixtures/get_activity.bridge_online.json new file mode 100644 index 00000000000..6f8b5e6a4a6 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.bridge_online.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "associated_bridge_online", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.doorbell_motion.json b/tests/components/yale/fixtures/get_activity.doorbell_motion.json new file mode 100644 index 00000000000..cf0f231a49a --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.doorbell_motion.json @@ -0,0 +1,58 @@ +[ + { + "otherUser": { + "FirstName": "Unknown", + "UserName": "deleteduser", + "LastName": "User", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "dateTime": 1582663119959, + "deviceID": "K98GiDT45GUL", + "info": { + "videoUploadProgress": "in_progress", + "image": { + "resource_type": "image", + "etag": "fdsf", + "created_at": "2020-02-25T20:38:39Z", + "type": "upload", + "format": "jpg", + "version": 1582663119, + "secure_url": "https://res.cloudinary.com/updated_image.jpg", + "signature": "fdfdfd", + "url": "http://res.cloudinary.com/updated_image.jpg", + "bytes": 48545, + "placeholder": false, + "original_filename": "file", + "width": 720, + "tags": [], + "public_id": "xnsj5gphpzij9brifpf4", + "height": 576 + }, + "dvrID": "dvr", + "videoAvailable": false, + "hasSubscription": false + }, + "callingUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "house": { + "houseName": "K98GiDT45GUL", + "houseID": "na" + }, + "action": "doorbell_motion_detected", + "deviceType": "doorbell", + "entities": { + "otherUser": "deleted", + "house": "na", + "device": "K98GiDT45GUL", + "activity": "de5585cfd4eae900bb5ba3dc", + "callingUser": "deleted" + }, + "deviceName": "Front Door" + } +] diff --git a/tests/components/yale/fixtures/get_activity.jammed.json b/tests/components/yale/fixtures/get_activity.jammed.json new file mode 100644 index 00000000000..782a13f9c73 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.jammed.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "jammed", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock.json b/tests/components/yale/fixtures/get_activity.lock.json new file mode 100644 index 00000000000..b40e7d61ccf --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json new file mode 100644 index 00000000000..38c26ffb7dd --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "Relock", + "UserID": "automaticrelock", + "FirstName": "Auto" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json new file mode 100644 index 00000000000..bfbc621e064 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_keypad.json b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json new file mode 100644 index 00000000000..1b1e13e67dd --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json @@ -0,0 +1,37 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_manual.json b/tests/components/yale/fixtures/get_activity.lock_from_manual.json new file mode 100644 index 00000000000..e2fc195cfda --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_manual.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": true, + "tag": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.locking.json b/tests/components/yale/fixtures/get_activity.locking.json new file mode 100644 index 00000000000..ad2df6f7e91 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.locking.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "locking", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_manual.json b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json new file mode 100644 index 00000000000..e8bf95818ce --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": true, + "tag": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_tag.json b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json new file mode 100644 index 00000000000..57876428677 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": false, + "tag": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlocking.json b/tests/components/yale/fixtures/get_activity.unlocking.json new file mode 100644 index 00000000000..0fbd0be3eb8 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlocking.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlocking", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_doorbell.json b/tests/components/yale/fixtures/get_doorbell.json new file mode 100644 index 00000000000..32714211618 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.json @@ -0,0 +1,81 @@ +{ + "status_timestamp": 1512811834532, + "appID": "august-iphone", + "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", + "recentImage": { + "original_filename": "file", + "placeholder": false, + "bytes": 24476, + "height": 640, + "format": "jpg", + "width": 480, + "version": 1512892814, + "resource_type": "image", + "etag": "54966926be2e93f77d498a55f247661f", + "tags": [], + "public_id": "qqqqt4ctmxwsysylaaaa", + "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", + "created_at": "2017-12-10T08:01:35Z", + "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", + "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", + "type": "upload" + }, + "settings": { + "keepEncoderRunning": true, + "videoResolution": "640x480", + "minACNoScaling": 40, + "irConfiguration": 8448272, + "directLink": true, + "overlayEnabled": true, + "notify_when_offline": true, + "micVolume": 100, + "bitrateCeiling": 512000, + "initialBitrate": 384000, + "IVAEnabled": false, + "turnOffCamera": false, + "ringSoundEnabled": true, + "JPGQuality": 70, + "motion_notifications": true, + "speakerVolume": 92, + "buttonpush_notifications": true, + "ABREnabled": true, + "debug": false, + "batteryLowThreshold": 3.1, + "batteryRun": false, + "IREnabled": true, + "batteryUseThreshold": 3.4 + }, + "doorbellServerURL": "https://doorbells.august.com", + "name": "Front Door", + "createdAt": "2016-11-26T22:27:11.176Z", + "installDate": "2016-11-26T22:27:11.176Z", + "serialNumber": "tBXZR0Z35E", + "dvrSubscriptionSetupDone": true, + "caps": ["reconnect"], + "doorbellID": "K98GiDT45GUL", + "HouseID": "mockhouseid1", + "telemetry": { + "signal_level": -56, + "date": "2017-12-10 08:05:12", + "battery_soc": 96, + "battery": 4.061763, + "steady_ac_in": 22.196405, + "BSSID": "88:ee:00:dd:aa:11", + "SSID": "foo_ssid", + "updated_at": "2017-12-10T08:05:13.650Z", + "temperature": 28.25, + "wifi_freq": 5745, + "load_average": "0.50 0.47 0.35 1/154 9345", + "link_quality": 54, + "battery_soh": 95, + "uptime": "16168.75 13830.49", + "ip_addr": "10.0.1.11", + "doorbell_low_battery": false, + "ac_in": 23.856874 + }, + "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", + "status": "doorbell_call_status_online", + "firmwareVersion": "2.3.0-RC153+201711151527", + "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", + "updatedAt": "2017-12-10T08:05:13.650Z" +} diff --git a/tests/components/yale/fixtures/get_doorbell.nobattery.json b/tests/components/yale/fixtures/get_doorbell.nobattery.json new file mode 100644 index 00000000000..2a7f1e2d3b2 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.nobattery.json @@ -0,0 +1,78 @@ +{ + "status_timestamp": 1512811834532, + "appID": "august-iphone", + "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", + "recentImage": { + "original_filename": "file", + "placeholder": false, + "bytes": 24476, + "height": 640, + "format": "jpg", + "width": 480, + "version": 1512892814, + "resource_type": "image", + "etag": "54966926be2e93f77d498a55f247661f", + "tags": [], + "public_id": "qqqqt4ctmxwsysylaaaa", + "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", + "created_at": "2017-12-10T08:01:35Z", + "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", + "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", + "type": "upload" + }, + "settings": { + "keepEncoderRunning": true, + "videoResolution": "640x480", + "minACNoScaling": 40, + "irConfiguration": 8448272, + "directLink": true, + "overlayEnabled": true, + "notify_when_offline": true, + "micVolume": 100, + "bitrateCeiling": 512000, + "initialBitrate": 384000, + "IVAEnabled": false, + "turnOffCamera": false, + "ringSoundEnabled": true, + "JPGQuality": 70, + "motion_notifications": true, + "speakerVolume": 92, + "buttonpush_notifications": true, + "ABREnabled": true, + "debug": false, + "batteryLowThreshold": 3.1, + "batteryRun": false, + "IREnabled": true, + "batteryUseThreshold": 3.4 + }, + "doorbellServerURL": "https://doorbells.august.com", + "name": "Front Door", + "createdAt": "2016-11-26T22:27:11.176Z", + "installDate": "2016-11-26T22:27:11.176Z", + "serialNumber": "tBXZR0Z35E", + "dvrSubscriptionSetupDone": true, + "caps": ["reconnect"], + "doorbellID": "K98GiDT45GUL", + "HouseID": "3dd2accaea08", + "telemetry": { + "signal_level": -56, + "date": "2017-12-10 08:05:12", + "steady_ac_in": 22.196405, + "BSSID": "88:ee:00:dd:aa:11", + "SSID": "foo_ssid", + "updated_at": "2017-12-10T08:05:13.650Z", + "temperature": 28.25, + "wifi_freq": 5745, + "load_average": "0.50 0.47 0.35 1/154 9345", + "link_quality": 54, + "uptime": "16168.75 13830.49", + "ip_addr": "10.0.1.11", + "doorbell_low_battery": false, + "ac_in": 23.856874 + }, + "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", + "status": "doorbell_call_status_online", + "firmwareVersion": "2.3.0-RC153+201711151527", + "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", + "updatedAt": "2017-12-10T08:05:13.650Z" +} diff --git a/tests/components/yale/fixtures/get_doorbell.offline.json b/tests/components/yale/fixtures/get_doorbell.offline.json new file mode 100644 index 00000000000..13a8483c995 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.offline.json @@ -0,0 +1,126 @@ +{ + "recentImage": { + "tags": [], + "height": 576, + "public_id": "fdsfds", + "bytes": 50013, + "resource_type": "image", + "original_filename": "file", + "version": 1582242766, + "format": "jpg", + "signature": "fdsfdsf", + "created_at": "2020-02-20T23:52:46Z", + "type": "upload", + "placeholder": false, + "url": "http://res.cloudinary.com/august-com/image/upload/ccc/ccccc.jpg", + "secure_url": "https://res.cloudinary.com/august-com/image/upload/cc/cccc.jpg", + "etag": "zds", + "width": 720 + }, + "firmwareVersion": "3.1.0-HYDRC75+201909251139", + "doorbellServerURL": "https://doorbells.august.com", + "installUserID": "mock", + "caps": ["reconnect", "webrtc", "tcp_wakeup"], + "messagingProtocol": "pubnub", + "createdAt": "2020-02-12T03:52:28.719Z", + "invitations": [], + "appID": "august-iphone-v5", + "HouseID": "houseid1", + "doorbellID": "tmt100", + "name": "Front Door", + "settings": { + "batteryUseThreshold": 3.4, + "brightness": 50, + "batteryChargeCurrent": 60, + "overCurrentThreshold": -250, + "irLedBrightness": 40, + "videoResolution": "720x576", + "pirPulseCounter": 1, + "contrast": 50, + "micVolume": 50, + "directLink": true, + "auto_contrast_mode": 0, + "saturation": 50, + "motion_notifications": true, + "pirSensitivity": 20, + "pirBlindTime": 7, + "notify_when_offline": false, + "nightModeAlsThreshold": 10, + "minACNoScaling": 40, + "DVRRecordingTimeout": 15, + "turnOffCamera": false, + "debug": false, + "keepEncoderRunning": true, + "pirWindowTime": 0, + "bitrateCeiling": 2000000, + "backlight_comp": false, + "buttonpush_notifications": true, + "buttonpush_notifications_partners": false, + "minimumSnapshotInterval": 30, + "pirConfiguration": 272, + "batteryLowThreshold": 3.1, + "sharpness": 50, + "ABREnabled": true, + "hue": 50, + "initialBitrate": 1000000, + "ringSoundEnabled": true, + "IVAEnabled": false, + "overlayEnabled": true, + "speakerVolume": 92, + "ringRepetitions": 3, + "powerProfilePreset": -1, + "irConfiguration": 16836880, + "JPGQuality": 70, + "IREnabled": true + }, + "updatedAt": "2020-02-20T23:58:21.580Z", + "serialNumber": "abc", + "installDate": "2019-02-12T03:52:28.719Z", + "dvrSubscriptionSetupDone": true, + "pubsubChannel": "mock", + "chimes": [ + { + "updatedAt": "2020-02-12T03:55:38.805Z", + "_id": "cccc", + "type": 1, + "serialNumber": "ccccc", + "doorbellID": "tmt100", + "name": "Living Room", + "chimeID": "cccc", + "createdAt": "2020-02-12T03:55:38.805Z", + "firmware": "3.1.16" + } + ], + "telemetry": { + "battery": 3.985, + "battery_soc": 81, + "load_average": "0.45 0.18 0.07 4/98 831", + "ip_addr": "192.168.100.174", + "BSSID": "snp", + "uptime": "96.55 70.59", + "SSID": "bob", + "updated_at": "2020-02-20T23:53:09.586Z", + "dtim_period": 0, + "wifi_freq": 2462, + "date": "2020-02-20 11:47:36", + "BSSIDManufacturer": "Ubiquiti - Ubiquiti Networks Inc.", + "battery_temp": 22, + "battery_avg_cur": -291, + "beacon_interval": 0, + "signal_level": -49, + "battery_soh": 95, + "doorbell_low_battery": false + }, + "secChipCertSerial": "", + "tcpKeepAlive": { + "keepAliveUUID": "mock", + "wakeUp": { + "token": "wakemeup", + "lastUpdated": 1582242723931 + } + }, + "statusUpdatedAtMs": 1582243101579, + "status": "doorbell_offline", + "type": "hydra1", + "HouseName": "housename" +} diff --git a/tests/components/yale/fixtures/get_lock.doorsense_init.json b/tests/components/yale/fixtures/get_lock.doorsense_init.json new file mode 100644 index 00000000000..1132cc61a8d --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.doorsense_init.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "init", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": false, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.low_keypad_battery.json b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json new file mode 100644 index 00000000000..43b5513a527 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Low", + "batteryRaw": 128 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.offline.json b/tests/components/yale/fixtures/get_lock.offline.json new file mode 100644 index 00000000000..50d3d345ef8 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.offline.json @@ -0,0 +1,57 @@ +{ + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "houseid", + "HouseName": "MockName", + "LockID": "ABC", + "LockName": "Test", + "LockStatus": { + "status": "unknown" + }, + "OfflineKeys": { + "created": [], + "createdhk": [ + { + "UserID": "mock-user-id", + "created": "2000-00-00T00:00:00.447Z", + "key": "mockkey", + "slot": 12 + } + ], + "deleted": [], + "loaded": [] + }, + "SerialNumber": "ABC", + "Type": 3, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": -1, + "cameras": [], + "currentFirmwareVersion": "undefined-1.59.0-1.13.2", + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minGPSAccuracyRequired": 80, + "minimumGeofence": 100 + } + }, + "homeKitEnabled": false, + "isGalileo": false, + "macAddress": "a:b:c", + "parametersToSet": {}, + "pubsubChannel": "mockpubsub", + "ruleHash": {}, + "skuNumber": "AUG-X", + "supportsEntryCodes": false, + "users": { + "mockuserid": { + "FirstName": "MockName", + "LastName": "House", + "UserType": "superuser", + "identifiers": ["phone:+15558675309", "email:mockme@mock.org"] + } + }, + "zWaveDSK": "1-2-3-4", + "zWaveEnabled": true +} diff --git a/tests/components/yale/fixtures/get_lock.online.json b/tests/components/yale/fixtures/get_lock.online.json new file mode 100644 index 00000000000..7abadeef4b6 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online.unknown_state.json b/tests/components/yale/fixtures/get_lock.online.unknown_state.json new file mode 100644 index 00000000000..abc8b40a132 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online.unknown_state.json @@ -0,0 +1,59 @@ +{ + "LockName": "Side Door", + "Type": 1001, + "Created": "2019-10-07T01:49:06.831Z", + "Updated": "2019-10-07T01:49:06.831Z", + "LockID": "BROKENID", + "HouseID": "abc", + "HouseName": "dog", + "Calibrated": false, + "timeZone": "America/Chicago", + "battery": 0.9524716174964851, + "hostLockInfo": { + "serialNumber": "YR", + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770 + }, + "supportsEntryCodes": true, + "skuNumber": "AUG-MD01", + "macAddress": "MAC", + "SerialNumber": "M1FXZ00EZ9", + "LockStatus": { + "status": "unknown_error_during_connect", + "dateTime": "2020-02-22T02:48:11.741Z", + "isLockStatusChanged": true, + "valid": true, + "doorState": "closed" + }, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "id", + "mfgBridgeID": "id", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "operative": true, + "status": { + "current": "online", + "updated": "2020-02-21T15:06:47.001Z", + "lastOnline": "2020-02-21T15:06:47.001Z", + "lastOffline": "2020-02-06T17:33:21.265Z" + }, + "hyperBridge": true + }, + "parametersToSet": {}, + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json new file mode 100644 index 00000000000..84822df9b89 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json @@ -0,0 +1,50 @@ +{ + "Bridge": { + "_id": "bridgeid", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "hyperBridge": true, + "mfgBridgeID": "C5WY200WSH", + "operative": true, + "status": { + "current": "online", + "lastOffline": "2000-00-00T00:00:00.447Z", + "lastOnline": "2000-00-00T00:00:00.447Z", + "updated": "2000-00-00T00:00:00.447Z" + } + }, + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "123", + "HouseName": "Test", + "LockID": "missing_doorsense_id", + "LockName": "Online door missing doorsense", + "LockStatus": { + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": false, + "status": "locked", + "valid": true + }, + "SerialNumber": "XY", + "Type": 1001, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": 0.922, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "hostLockInfo": { + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770, + "serialNumber": "ABC" + }, + "isGalileo": false, + "macAddress": "12:22", + "pins": { + "created": [], + "loaded": [] + }, + "skuNumber": "AUG-MD01", + "supportsEntryCodes": true, + "timeZone": "Pacific/Hawaii", + "zWaveEnabled": false +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_doorsense.json b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json new file mode 100644 index 00000000000..d9b413708ca --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json @@ -0,0 +1,52 @@ +{ + "Bridge": { + "_id": "bridgeid", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "hyperBridge": true, + "mfgBridgeID": "C5WY200WSH", + "operative": true, + "status": { + "current": "online", + "lastOffline": "2000-00-00T00:00:00.447Z", + "lastOnline": "2000-00-00T00:00:00.447Z", + "updated": "2000-00-00T00:00:00.447Z" + } + }, + "pubsubChannel": "pubsub", + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "mockhouseid1", + "HouseName": "Test", + "LockID": "online_with_doorsense", + "LockName": "Online door with doorsense", + "LockStatus": { + "dateTime": "2017-12-10T04:48:30.272Z", + "doorState": "open", + "isLockStatusChanged": false, + "status": "locked", + "valid": true + }, + "SerialNumber": "XY", + "Type": 1001, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": 0.922, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "hostLockInfo": { + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770, + "serialNumber": "ABC" + }, + "isGalileo": false, + "macAddress": "12:22", + "pins": { + "created": [], + "loaded": [] + }, + "skuNumber": "AUG-MD01", + "supportsEntryCodes": true, + "timeZone": "Pacific/Hawaii", + "zWaveEnabled": false +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_keys.json b/tests/components/yale/fixtures/get_lock.online_with_keys.json new file mode 100644 index 00000000000..4efcba44d09 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_keys.json @@ -0,0 +1,100 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8064", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a9", + "serialNumber": "K1GXB0054L", + "lockID": "92412D1B44004595B5DEB134E151A8D4", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "slot": 1, + "key": "kkk01d4300c1dcxxx1c330f794941111", + "created": "2017-12-10T03:12:09.215Z", + "loaded": "2017-12-10T03:12:54.391Z" + } + ], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_unlatch.json b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json new file mode 100644 index 00000000000..288ab1a2f28 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json @@ -0,0 +1,94 @@ +{ + "LockName": "Lock online with unlatch supported", + "Type": 17, + "Created": "2024-03-14T18:03:09.003Z", + "Updated": "2024-03-14T18:03:09.003Z", + "LockID": "online_with_unlatch", + "HouseID": "mockhouseid1", + "HouseName": "Zuhause", + "Calibrated": false, + "timeZone": "Europe/Berlin", + "battery": 0.61, + "batteryInfo": { + "level": 0.61, + "warningState": "lock_state_battery_warning_none", + "infoUpdatedDate": "2024-04-30T17:55:09.045Z", + "lastChangeDate": "2024-03-15T07:04:00.000Z", + "lastChangeVoltage": 8350, + "state": "Mittel", + "icon": "https://app-resources.aaecosystem.com/images/lock_battery_state_medium.png" + }, + "hostHardwareID": "xxx", + "supportsEntryCodes": true, + "remoteOperateSecret": "xxxx", + "skuNumber": "NONE", + "macAddress": "DE:AD:BE:00:00:00", + "SerialNumber": "LPOC000000", + "LockStatus": { + "status": "locked", + "dateTime": "2024-04-30T18:41:25.673Z", + "isLockStatusChanged": false, + "valid": true, + "doorState": "init" + }, + "currentFirmwareVersion": "1.0.4", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "65f33445529187c78a100000", + "mfgBridgeID": "LPOCH0004Y", + "deviceModel": "august-lock", + "firmwareVersion": "1.0.4", + "operative": true, + "status": { + "current": "online", + "lastOnline": "2024-04-30T18:41:27.971Z", + "updated": "2024-04-30T18:41:27.971Z", + "lastOffline": "2024-04-25T14:41:40.118Z" + }, + "locks": [ + { + "_id": "656858c182e6c7c555faf758", + "LockID": "68895DD075A1444FAD4C00B273EEEF28", + "macAddress": "DE:AD:BE:EF:0B:BC" + } + ], + "hyperBridge": true + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "created": "2024-03-14T18:03:09.034Z", + "key": "055281d4aa9bd7b68c7b7bb78e2f34ca", + "slot": 1, + "UserID": "b4b44424-0000-0000-0000-25c224dad337", + "loaded": "2024-03-14T18:03:33.470Z" + } + ], + "deleted": [] + }, + "parametersToSet": {}, + "users": { + "b4b44424-0000-0000-0000-25c224dad337": { + "UserType": "superuser", + "FirstName": "m10x", + "LastName": "m10x", + "identifiers": ["phone:+494444444", "email:m10x@example.com"] + } + }, + "pubsubChannel": "pubsub", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + }, + "accessSchedulesAllowed": true +} diff --git a/tests/components/yale/fixtures/get_locks.json b/tests/components/yale/fixtures/get_locks.json new file mode 100644 index 00000000000..3fab55f82c9 --- /dev/null +++ b/tests/components/yale/fixtures/get_locks.json @@ -0,0 +1,16 @@ +{ + "A6697750D607098BAE8D6BAA11EF8063": { + "LockName": "Front Door Lock", + "UserType": "superuser", + "macAddress": "2E:BA:C4:14:3F:09", + "HouseID": "000000000000", + "HouseName": "A House" + }, + "A6697750D607098BAE8D6BAA11EF9999": { + "LockName": "Back Door Lock", + "UserType": "user", + "macAddress": "2E:BA:C4:14:3F:88", + "HouseID": "000000000011", + "HouseName": "A House" + } +} diff --git a/tests/components/yale/fixtures/jwt b/tests/components/yale/fixtures/jwt new file mode 100644 index 00000000000..d64f31b9bb2 --- /dev/null +++ b/tests/components/yale/fixtures/jwt @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.BdRo-dEr-osbDQGB2XzlI-mIj4gqULtapODt-sj-eA8 diff --git a/tests/components/yale/fixtures/lock_open.json b/tests/components/yale/fixtures/lock_open.json new file mode 100644 index 00000000000..b6cfe3c90fc --- /dev/null +++ b/tests/components/yale/fixtures/lock_open.json @@ -0,0 +1,26 @@ +{ + "status": "kAugLockState_Locked", + "resultsFromOperationCache": false, + "retryCount": 1, + "info": { + "wlanRSSI": -54, + "lockType": "lock_version_1001", + "lockStatusChanged": false, + "serialNumber": "ABC", + "serial": "123", + "action": "lock", + "context": { + "startDate": "2020-02-19T01:59:39.516Z", + "retryCount": 1, + "transactionID": "mock" + }, + "bridgeID": "mock", + "wlanSNR": 41, + "startTime": "2020-02-19T01:59:39.517Z", + "duration": 5149, + "lockID": "ABC", + "rssi": -77 + }, + "totalTime": 5162, + "doorState": "kAugDoorState_Open" +} diff --git a/tests/components/yale/fixtures/lock_with_doorbell.online.json b/tests/components/yale/fixtures/lock_with_doorbell.online.json new file mode 100644 index 00000000000..bb2367d1111 --- /dev/null +++ b/tests/components/yale/fixtures/lock_with_doorbell.online.json @@ -0,0 +1,100 @@ +{ + "LockName": "Front Door Lock", + "Type": 7, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "slot": 1, + "key": "kkk01d4300c1dcxxx1c330f794941111", + "created": "2017-12-10T03:12:09.215Z", + "loaded": "2017-12-10T03:12:54.391Z" + } + ], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/reauth_jwt b/tests/components/yale/fixtures/reauth_jwt new file mode 100644 index 00000000000..4db8d061b68 --- /dev/null +++ b/tests/components/yale/fixtures/reauth_jwt @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjI3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.DtkHscsvbTE-SyKW3RxwXFQIKMf0xJwfPZN1X3JesqA diff --git a/tests/components/yale/fixtures/reauth_jwt_wrong_account b/tests/components/yale/fixtures/reauth_jwt_wrong_account new file mode 100644 index 00000000000..b0b62438178 --- /dev/null +++ b/tests/components/yale/fixtures/reauth_jwt_wrong_account @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6IjQ0NDQ0NDQ0LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.PenDp4JUIBQZEx2BFxaCqV1-6yMuUPtmnB6jq1wpoX8 diff --git a/tests/components/yale/fixtures/unlock_closed.json b/tests/components/yale/fixtures/unlock_closed.json new file mode 100644 index 00000000000..f676c005a17 --- /dev/null +++ b/tests/components/yale/fixtures/unlock_closed.json @@ -0,0 +1,26 @@ +{ + "status": "kAugLockState_Unlocked", + "resultsFromOperationCache": false, + "retryCount": 1, + "info": { + "wlanRSSI": -54, + "lockType": "lock_version_1001", + "lockStatusChanged": false, + "serialNumber": "ABC", + "serial": "123", + "action": "lock", + "context": { + "startDate": "2020-02-19T01:59:39.516Z", + "retryCount": 1, + "transactionID": "mock" + }, + "bridgeID": "mock", + "wlanSNR": 41, + "startTime": "2020-02-19T01:59:39.517Z", + "duration": 5149, + "lockID": "ABC", + "rssi": -77 + }, + "totalTime": 5162, + "doorState": "kAugDoorState_Closed" +} diff --git a/tests/components/yale/mocks.py b/tests/components/yale/mocks.py new file mode 100644 index 00000000000..03ab3609002 --- /dev/null +++ b/tests/components/yale/mocks.py @@ -0,0 +1,515 @@ +"""Mocks for the yale component.""" + +from __future__ import annotations + +from collections.abc import Iterable +from contextlib import contextmanager +import json +import os +import time +from typing import Any +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +from yalexs.activity import ( + ACTIVITY_ACTIONS_BRIDGE_OPERATION, + ACTIVITY_ACTIONS_DOOR_OPERATION, + ACTIVITY_ACTIONS_DOORBELL_DING, + ACTIVITY_ACTIONS_DOORBELL_MOTION, + ACTIVITY_ACTIONS_DOORBELL_VIEW, + ACTIVITY_ACTIONS_LOCK_OPERATION, + SOURCE_LOCK_OPERATE, + SOURCE_LOG, + Activity, + BridgeOperationActivity, + DoorbellDingActivity, + DoorbellMotionActivity, + DoorbellViewActivity, + DoorOperationActivity, + LockOperationActivity, +) +from yalexs.api_async import ApiAsync +from yalexs.authenticator_common import Authentication, AuthenticationState +from yalexs.const import Brand +from yalexs.doorbell import Doorbell, DoorbellDetail +from yalexs.lock import Lock, LockDetail +from yalexs.manager.ratelimit import _RateLimitChecker +from yalexs.manager.socketio import SocketIORunner + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, load_fixture + +USER_ID = "a76c25e5-49aa-4c14-cd0c-48a6931e2081" + + +def _mock_get_config( + brand: Brand = Brand.YALE_GLOBAL, jwt: str | None = None +) -> dict[str, Any]: + """Return a default yale config.""" + return { + DOMAIN: { + "auth_implementation": "yale", + "token": { + "access_token": jwt or "access_token", + "expires_in": 1, + "refresh_token": "refresh_token", + "expires_at": time.time() + 3600, + "service": "yale", + }, + } + } + + +def _mock_authenticator(auth_state: AuthenticationState) -> Authentication: + """Mock an yale authenticator.""" + authenticator = MagicMock() + type(authenticator).state = PropertyMock(return_value=auth_state) + return authenticator + + +def _timetoken() -> str: + return str(time.time_ns())[:-2] + + +async def mock_yale_config_entry( + hass: HomeAssistant, +) -> MockConfigEntry: + """Mock yale config entry and client credentials.""" + entry = mock_config_entry() + entry.add_to_hass(hass) + return entry + + +def mock_config_entry(jwt: str | None = None) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=_mock_get_config(jwt=jwt)[DOMAIN], + options={}, + unique_id=USER_ID, + ) + + +async def mock_client_credentials(hass: HomeAssistant) -> ClientCredential: + """Mock client credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("1", "2"), + DOMAIN, + ) + + +@contextmanager +def patch_yale_setup(): + """Patch yale setup process.""" + with ( + patch("yalexs.manager.gateway.ApiAsync") as api_mock, + patch.object(_RateLimitChecker, "register_wakeup") as authenticate_mock, + patch("yalexs.manager.data.SocketIORunner") as socketio_mock, + patch.object(socketio_mock, "run"), + patch( + "homeassistant.components.yale.config_entry_oauth2_flow.async_get_config_entry_implementation" + ), + ): + yield api_mock, authenticate_mock, socketio_mock + + +async def _mock_setup_yale( + hass: HomeAssistant, + api_instance: ApiAsync, + socketio_mock: SocketIORunner, + authenticate_side_effect: MagicMock, +) -> ConfigEntry: + """Set up yale integration.""" + entry = await mock_yale_config_entry(hass) + with patch_yale_setup() as patched_setup: + api_mock, authenticate_mock, sockio_mock_ = patched_setup + authenticate_mock.side_effect = authenticate_side_effect + sockio_mock_.return_value = socketio_mock + api_mock.return_value = api_instance + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry + + +async def _create_yale_with_devices( + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail] | None = None, + api_call_side_effects: dict[str, Any] | None = None, + activities: list[Any] | None = None, + brand: Brand = Brand.YALE_GLOBAL, + authenticate_side_effect: MagicMock | None = None, +) -> tuple[ConfigEntry, SocketIORunner]: + entry, _, socketio = await _create_yale_api_with_devices( + hass, + devices, + api_call_side_effects, + activities, + brand, + authenticate_side_effect, + ) + return entry, socketio + + +async def _create_yale_api_with_devices( + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail] | None = None, + api_call_side_effects: dict[str, Any] | None = None, + activities: dict[str, Any] | None = None, + brand: Brand = Brand.YALE_GLOBAL, + authenticate_side_effect: MagicMock | None = None, +) -> tuple[ConfigEntry, ApiAsync, SocketIORunner]: + if api_call_side_effects is None: + api_call_side_effects = {} + if devices is None: + devices = () + + update_api_call_side_effects(api_call_side_effects, devices, activities) + + api_instance = await make_mock_api(api_call_side_effects, brand) + socketio = SocketIORunner( + MagicMock( + api=api_instance, async_get_access_token=AsyncMock(return_value="token") + ) + ) + socketio.run = AsyncMock() + + entry = await _mock_setup_yale( + hass, + api_instance, + socketio, + authenticate_side_effect=authenticate_side_effect, + ) + + return entry, api_instance, socketio + + +def update_api_call_side_effects( + api_call_side_effects: dict[str, Any], + devices: Iterable[LockDetail | DoorbellDetail], + activities: dict[str, Any] | None = None, +) -> None: + """Update side effects dict from devices and activities.""" + + device_data = {"doorbells": [], "locks": []} + for device in devices or (): + if isinstance(device, LockDetail): + device_data["locks"].append( + {"base": _mock_yale_lock(device.device_id), "detail": device} + ) + elif isinstance(device, DoorbellDetail): + device_data["doorbells"].append( + { + "base": _mock_yale_doorbell( + deviceid=device.device_id, + brand=device._data.get("brand", Brand.YALE_GLOBAL), + ), + "detail": device, + } + ) + else: + raise ValueError # noqa: TRY004 + + def _get_device_detail(device_type, device_id): + for device in device_data[device_type]: + if device["detail"].device_id == device_id: + return device["detail"] + raise ValueError + + def _get_base_devices(device_type): + return [device["base"] for device in device_data[device_type]] + + def get_lock_detail_side_effect(access_token, device_id): + return _get_device_detail("locks", device_id) + + def get_doorbell_detail_side_effect(access_token, device_id): + return _get_device_detail("doorbells", device_id) + + def get_operable_locks_side_effect(access_token): + return _get_base_devices("locks") + + def get_doorbells_side_effect(access_token): + return _get_base_devices("doorbells") + + def get_house_activities_side_effect(access_token, house_id, limit=10): + if activities is not None: + return activities + return [] + + def lock_return_activities_side_effect(access_token, device_id): + lock = _get_device_detail("locks", device_id) + return [ + # There is a check to prevent out of order events + # so we set the doorclosed & lock event in the future + # to prevent a race condition where we reject the event + # because it happened before the dooropen & unlock event. + _mock_lock_operation_activity(lock, "lock", 2000), + _mock_door_operation_activity(lock, "doorclosed", 2000), + ] + + def unlock_return_activities_side_effect(access_token, device_id): + lock = _get_device_detail("locks", device_id) + return [ + _mock_lock_operation_activity(lock, "unlock", 0), + _mock_door_operation_activity(lock, "dooropen", 0), + ] + + api_call_side_effects.setdefault("get_lock_detail", get_lock_detail_side_effect) + api_call_side_effects.setdefault( + "get_doorbell_detail", get_doorbell_detail_side_effect + ) + api_call_side_effects.setdefault( + "get_operable_locks", get_operable_locks_side_effect + ) + api_call_side_effects.setdefault("get_doorbells", get_doorbells_side_effect) + api_call_side_effects.setdefault( + "get_house_activities", get_house_activities_side_effect + ) + api_call_side_effects.setdefault( + "lock_return_activities", lock_return_activities_side_effect + ) + api_call_side_effects.setdefault( + "unlock_return_activities", unlock_return_activities_side_effect + ) + api_call_side_effects.setdefault( + "async_unlatch_return_activities", unlock_return_activities_side_effect + ) + + +async def make_mock_api( + api_call_side_effects: dict[str, Any], + brand: Brand = Brand.YALE_GLOBAL, +) -> ApiAsync: + """Make a mock ApiAsync instance.""" + api_instance = MagicMock(name="Api", brand=brand) + + if api_call_side_effects["get_lock_detail"]: + type(api_instance).async_get_lock_detail = AsyncMock( + side_effect=api_call_side_effects["get_lock_detail"] + ) + + if api_call_side_effects["get_operable_locks"]: + type(api_instance).async_get_operable_locks = AsyncMock( + side_effect=api_call_side_effects["get_operable_locks"] + ) + + if api_call_side_effects["get_doorbells"]: + type(api_instance).async_get_doorbells = AsyncMock( + side_effect=api_call_side_effects["get_doorbells"] + ) + + if api_call_side_effects["get_doorbell_detail"]: + type(api_instance).async_get_doorbell_detail = AsyncMock( + side_effect=api_call_side_effects["get_doorbell_detail"] + ) + + if api_call_side_effects["get_house_activities"]: + type(api_instance).async_get_house_activities = AsyncMock( + side_effect=api_call_side_effects["get_house_activities"] + ) + + if api_call_side_effects["lock_return_activities"]: + type(api_instance).async_lock_return_activities = AsyncMock( + side_effect=api_call_side_effects["lock_return_activities"] + ) + + if api_call_side_effects["unlock_return_activities"]: + type(api_instance).async_unlock_return_activities = AsyncMock( + side_effect=api_call_side_effects["unlock_return_activities"] + ) + + if api_call_side_effects["async_unlatch_return_activities"]: + type(api_instance).async_unlatch_return_activities = AsyncMock( + side_effect=api_call_side_effects["async_unlatch_return_activities"] + ) + + api_instance.async_unlock_async = AsyncMock() + api_instance.async_lock_async = AsyncMock() + api_instance.async_status_async = AsyncMock() + api_instance.async_get_user = AsyncMock(return_value={"UserID": "abc"}) + api_instance.async_unlatch_async = AsyncMock() + api_instance.async_unlatch = AsyncMock() + api_instance.async_add_websocket_subscription = AsyncMock() + + return api_instance + + +def _mock_yale_authentication( + token_text: str, token_timestamp: float, state: AuthenticationState +) -> Authentication: + authentication = MagicMock(name="yalexs.authentication") + type(authentication).state = PropertyMock(return_value=state) + type(authentication).access_token = PropertyMock(return_value=token_text) + type(authentication).access_token_expires = PropertyMock( + return_value=token_timestamp + ) + return authentication + + +def _mock_yale_lock(lockid: str = "mocklockid1", houseid: str = "mockhouseid1") -> Lock: + return Lock(lockid, _mock_yale_lock_data(lockid=lockid, houseid=houseid)) + + +def _mock_yale_doorbell( + deviceid="mockdeviceid1", houseid="mockhouseid1", brand=Brand.YALE_GLOBAL +) -> Doorbell: + return Doorbell( + deviceid, + _mock_yale_doorbell_data(deviceid=deviceid, houseid=houseid, brand=brand), + ) + + +def _mock_yale_doorbell_data( + deviceid: str = "mockdeviceid1", + houseid: str = "mockhouseid1", + brand: Brand = Brand.YALE_GLOBAL, +) -> dict[str, Any]: + return { + "_id": deviceid, + "DeviceID": deviceid, + "name": f"{deviceid} Name", + "HouseID": houseid, + "UserType": "owner", + "serialNumber": "mockserial", + "battery": 90, + "status": "standby", + "currentFirmwareVersion": "mockfirmware", + "Bridge": { + "_id": "bridgeid1", + "firmwareVersion": "mockfirm", + "operative": True, + }, + "LockStatus": {"doorState": "open"}, + } + + +def _mock_yale_lock_data( + lockid: str = "mocklockid1", houseid: str = "mockhouseid1" +) -> dict[str, Any]: + return { + "_id": lockid, + "LockID": lockid, + "LockName": f"{lockid} Name", + "HouseID": houseid, + "UserType": "owner", + "SerialNumber": "mockserial", + "battery": 90, + "currentFirmwareVersion": "mockfirmware", + "Bridge": { + "_id": "bridgeid1", + "firmwareVersion": "mockfirm", + "operative": True, + }, + "LockStatus": {"doorState": "open"}, + } + + +async def _mock_operative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online.json") + + +async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") + + +async def _mock_inoperative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.offline.json") + + +async def _mock_activities_from_fixture( + hass: HomeAssistant, path: str +) -> list[Activity]: + json_dict = await _load_json_fixture(hass, path) + activities = [] + for activity_json in json_dict: + activity = _activity_from_dict(activity_json) + if activity: + activities.append(activity) + + return activities + + +async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: + json_dict = await _load_json_fixture(hass, path) + return LockDetail(json_dict) + + +async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: + json_dict = await _load_json_fixture(hass, path) + return DoorbellDetail(json_dict) + + +async def _load_json_fixture(hass: HomeAssistant, path: str) -> dict[str, Any]: + fixture = await hass.async_add_executor_job( + load_fixture, os.path.join("yale", path) + ) + return json.loads(fixture) + + +async def _mock_doorsense_enabled_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") + + +async def _mock_doorsense_missing_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") + + +async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") + + +def _mock_lock_operation_activity( + lock: Lock, action: str, offset: float +) -> LockOperationActivity: + return LockOperationActivity( + SOURCE_LOCK_OPERATE, + { + "dateTime": (time.time() + offset) * 1000, + "deviceID": lock.device_id, + "deviceType": "lock", + "action": action, + }, + ) + + +def _mock_door_operation_activity( + lock: Lock, action: str, offset: float +) -> DoorOperationActivity: + return DoorOperationActivity( + SOURCE_LOCK_OPERATE, + { + "dateTime": (time.time() + offset) * 1000, + "deviceID": lock.device_id, + "deviceType": "lock", + "action": action, + }, + ) + + +def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: + action = activity_dict.get("action") + + activity_dict["dateTime"] = time.time() * 1000 + + if action in ACTIVITY_ACTIONS_DOORBELL_DING: + return DoorbellDingActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOORBELL_MOTION: + return DoorbellMotionActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOORBELL_VIEW: + return DoorbellViewActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_LOCK_OPERATION: + return LockOperationActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOOR_OPERATION: + return DoorOperationActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_BRIDGE_OPERATION: + return BridgeOperationActivity(SOURCE_LOG, activity_dict) + return None diff --git a/tests/components/yale/snapshots/test_binary_sensor.ambr b/tests/components/yale/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..e294cb7c76c --- /dev/null +++ b/tests/components/yale/snapshots/test_binary_sensor.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_doorbell_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'tmt100_name', + 'config_entries': , + 'configuration_url': 'https://account.aaecosystem.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'yale', + 'tmt100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Yale Home Inc.', + 'model': 'hydra1', + 'model_id': None, + 'name': 'tmt100 Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'tmt100 Name', + 'sw_version': '3.1.0-HYDRC75+201909251139', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/yale/snapshots/test_diagnostics.ambr b/tests/components/yale/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..c3d8d8e2aaa --- /dev/null +++ b/tests/components/yale/snapshots/test_diagnostics.ambr @@ -0,0 +1,125 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'brand': 'yale_global', + 'doorbells': dict({ + 'K98GiDT45GUL': dict({ + 'HouseID': '**REDACTED**', + 'LockID': 'BBBB1F5F11114C24CCCC97571DD6AAAA', + 'appID': 'august-iphone', + 'caps': list([ + 'reconnect', + ]), + 'createdAt': '2016-11-26T22:27:11.176Z', + 'doorbellID': 'K98GiDT45GUL', + 'doorbellServerURL': 'https://doorbells.august.com', + 'dvrSubscriptionSetupDone': True, + 'firmwareVersion': '2.3.0-RC153+201711151527', + 'installDate': '2016-11-26T22:27:11.176Z', + 'installUserID': '**REDACTED**', + 'name': 'Front Door', + 'pubsubChannel': '**REDACTED**', + 'recentImage': '**REDACTED**', + 'serialNumber': 'tBXZR0Z35E', + 'settings': dict({ + 'ABREnabled': True, + 'IREnabled': True, + 'IVAEnabled': False, + 'JPGQuality': 70, + 'batteryLowThreshold': 3.1, + 'batteryRun': False, + 'batteryUseThreshold': 3.4, + 'bitrateCeiling': 512000, + 'buttonpush_notifications': True, + 'debug': False, + 'directLink': True, + 'initialBitrate': 384000, + 'irConfiguration': 8448272, + 'keepEncoderRunning': True, + 'micVolume': 100, + 'minACNoScaling': 40, + 'motion_notifications': True, + 'notify_when_offline': True, + 'overlayEnabled': True, + 'ringSoundEnabled': True, + 'speakerVolume': 92, + 'turnOffCamera': False, + 'videoResolution': '640x480', + }), + 'status': 'doorbell_call_status_online', + 'status_timestamp': 1512811834532, + 'telemetry': dict({ + 'BSSID': '88:ee:00:dd:aa:11', + 'SSID': 'foo_ssid', + 'ac_in': 23.856874, + 'battery': 4.061763, + 'battery_soc': 96, + 'battery_soh': 95, + 'date': '2017-12-10 08:05:12', + 'doorbell_low_battery': False, + 'ip_addr': '10.0.1.11', + 'link_quality': 54, + 'load_average': '0.50 0.47 0.35 1/154 9345', + 'signal_level': -56, + 'steady_ac_in': 22.196405, + 'temperature': 28.25, + 'updated_at': '2017-12-10T08:05:13.650Z', + 'uptime': '16168.75 13830.49', + 'wifi_freq': 5745, + }), + 'updatedAt': '2017-12-10T08:05:13.650Z', + }), + }), + 'locks': dict({ + 'online_with_doorsense': dict({ + 'Bridge': dict({ + '_id': 'bridgeid', + 'deviceModel': 'august-connect', + 'firmwareVersion': '2.2.1', + 'hyperBridge': True, + 'mfgBridgeID': 'C5WY200WSH', + 'operative': True, + 'status': dict({ + 'current': 'online', + 'lastOffline': '2000-00-00T00:00:00.447Z', + 'lastOnline': '2000-00-00T00:00:00.447Z', + 'updated': '2000-00-00T00:00:00.447Z', + }), + }), + 'Calibrated': False, + 'Created': '2000-00-00T00:00:00.447Z', + 'HouseID': '**REDACTED**', + 'HouseName': 'Test', + 'LockID': 'online_with_doorsense', + 'LockName': 'Online door with doorsense', + 'LockStatus': dict({ + 'dateTime': '2017-12-10T04:48:30.272Z', + 'doorState': 'open', + 'isLockStatusChanged': False, + 'status': 'locked', + 'valid': True, + }), + 'SerialNumber': 'XY', + 'Type': 1001, + 'Updated': '2000-00-00T00:00:00.447Z', + 'battery': 0.922, + 'currentFirmwareVersion': 'undefined-4.3.0-1.8.14', + 'homeKitEnabled': True, + 'hostLockInfo': dict({ + 'manufacturer': 'yale', + 'productID': 1536, + 'productTypeID': 32770, + 'serialNumber': 'ABC', + }), + 'isGalileo': False, + 'macAddress': '12:22', + 'pins': '**REDACTED**', + 'pubsubChannel': '**REDACTED**', + 'skuNumber': 'AUG-MD01', + 'supportsEntryCodes': True, + 'timeZone': 'Pacific/Hawaii', + 'zWaveEnabled': False, + }), + }), + }) +# --- diff --git a/tests/components/yale/snapshots/test_lock.ambr b/tests/components/yale/snapshots/test_lock.ambr new file mode 100644 index 00000000000..b1a9f6a4d86 --- /dev/null +++ b/tests/components/yale/snapshots/test_lock.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'online_with_doorsense_name', + 'config_entries': , + 'configuration_url': 'https://account.aaecosystem.com', + 'connections': set({ + tuple( + 'bluetooth', + '12:22', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'yale', + 'online_with_doorsense', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Yale Home Inc.', + 'model': 'AUG-MD01', + 'model_id': None, + 'name': 'online_with_doorsense Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'online_with_doorsense Name', + 'sw_version': 'undefined-4.3.0-1.8.14', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/yale/snapshots/test_sensor.ambr b/tests/components/yale/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a425cfa90de --- /dev/null +++ b/tests/components/yale/snapshots/test_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_lock_operator_autorelock + ReadOnlyDict({ + 'autorelock': True, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'autorelock', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_keypad + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': True, + 'manual': False, + 'method': 'keypad', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_manual + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': True, + 'method': 'manual', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_remote + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'remote', + 'remote': True, + 'tag': False, + }) +# --- +# name: test_restored_state + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'autorelock': False, + 'entity_picture': 'image.png', + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'tag', + 'remote': False, + 'tag': True, + }), + 'context': , + 'entity_id': 'sensor.online_with_doorsense_name_operator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Tag Unlock', + }) +# --- +# name: test_unlock_operator_manual + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': True, + 'method': 'manual', + 'remote': False, + 'tag': False, + }), + 'context': , + 'entity_id': 'sensor.online_with_doorsense_name_operator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Your favorite elven princess', + }) +# --- +# name: test_unlock_operator_tag + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'tag', + 'remote': False, + 'tag': True, + }) +# --- diff --git a/tests/components/yale/test_binary_sensor.py b/tests/components/yale/test_binary_sensor.py new file mode 100644 index 00000000000..811c845e359 --- /dev/null +++ b/tests/components/yale/test_binary_sensor.py @@ -0,0 +1,308 @@ +"""The binary_sensor tests for the yale platform.""" + +import datetime + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_UNLOCK, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, +) + +from tests.common import async_fire_time_changed + + +async def test_doorsense(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + await _create_yale_with_devices(hass, [lock_one]) + states = hass.states + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF + ) + + +async def test_lock_bridge_offline(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_offline.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + states = hass.states + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state + == STATE_UNAVAILABLE + ) + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + ) + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + ) + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + states = hass.states + assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE + assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF + assert ( + states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE + ) + + +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_yale_with_devices(hass, [doorbell_one], activities=activities) + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + + +async def test_doorbell_update_via_socketio( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell that can be updated via socketio.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + + listener = list(socketio._listeners)[0] + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "imagecapture", + "data": { + "result": { + "created_at": "2021-03-16T01:07:08.817Z", + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/zip/images/zip.jpeg" + ), + }, + }, + }, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + ) + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "buttonpush", + }, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + + +async def test_doorbell_device_registry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test creation of a lock with doorsense and bridge ands up in the registry.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + reg_device = device_registry.async_get_device(identifiers={("yale", "tmt100")}) + assert reg_device == snapshot + + +async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + config_entry, socketio = await _create_yale_with_devices( + hass, [lock_one], activities=activities + ) + states = hass.states + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Unlocking", "doorState": "closed"}, + ) + + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF + ) + + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Locking", "doorState": "open"}, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + socketio.connected = True + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + # Ensure socketio status is always preserved + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Unlocking", "doorState": "open"}, + ) + + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_yale_with_devices(hass, [lock_one]) + states = hass.states + assert ( + states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ).state + == STATE_OFF + ) diff --git a/tests/components/yale/test_button.py b/tests/components/yale/test_button.py new file mode 100644 index 00000000000..92d3ecef859 --- /dev/null +++ b/tests/components/yale/test_button.py @@ -0,0 +1,23 @@ +"""The button tests for the yale platform.""" + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from .mocks import _create_yale_api_with_devices, _mock_lock_from_fixture + + +async def test_wake_lock(hass: HomeAssistant) -> None: + """Test creation of a lock and wake it.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + _, api_instance, _ = await _create_yale_api_with_devices(hass, [lock_one]) + entity_id = "button.online_with_doorsense_name_wake" + binary_sensor_online_with_doorsense_name = hass.states.get(entity_id) + assert binary_sensor_online_with_doorsense_name is not None + api_instance.async_status_async.reset_mock() + await hass.services.async_call( + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + api_instance.async_status_async.assert_called_once() diff --git a/tests/components/yale/test_camera.py b/tests/components/yale/test_camera.py new file mode 100644 index 00000000000..122f3c65def --- /dev/null +++ b/tests/components/yale/test_camera.py @@ -0,0 +1,93 @@ +"""The camera tests for the yale platform.""" + +from http import HTTPStatus +from unittest.mock import patch + +from yalexs.const import Brand +from yalexs.doorbell import ContentTokenExpired + +from homeassistant.components.camera import CameraState +from homeassistant.core import HomeAssistant + +from .mocks import _create_yale_with_devices, _mock_doorbell_from_fixture + +from tests.typing import ClientSessionGenerator + + +async def test_create_doorbell( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + with patch.object( + doorbell_one, "async_get_doorbell_image", create=False, return_value="image" + ): + await _create_yale_with_devices(hass, [doorbell_one], brand=Brand.YALE_GLOBAL) + + camera_k98gidt45gul_name_camera = hass.states.get( + "camera.k98gidt45gul_name_camera" + ) + assert camera_k98gidt45gul_name_camera.state == CameraState.IDLE + + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.OK + body = await resp.text() + assert body == "image" + + +async def test_doorbell_refresh_content_token_recover( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test camera image content token expired.""" + doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + with patch.object( + doorbell_two, + "async_get_doorbell_image", + create=False, + side_effect=[ContentTokenExpired, "image"], + ): + await _create_yale_with_devices( + hass, + [doorbell_two], + brand=Brand.YALE_GLOBAL, + ) + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.OK + body = await resp.text() + assert body == "image" + + +async def test_doorbell_refresh_content_token_fail( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test camera image content token expired.""" + doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + with patch.object( + doorbell_two, + "async_get_doorbell_image", + create=False, + side_effect=ContentTokenExpired, + ): + await _create_yale_with_devices( + hass, + [doorbell_two], + brand=Brand.YALE_GLOBAL, + ) + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/tests/components/yale/test_config_flow.py b/tests/components/yale/test_config_flow.py new file mode 100644 index 00000000000..004162c0ebf --- /dev/null +++ b/tests/components/yale/test_config_flow.py @@ -0,0 +1,275 @@ +"""Test the yale config flow.""" + +from collections.abc import Generator +from unittest.mock import ANY, Mock, patch + +import pytest + +from homeassistant.components.yale.application_credentials import ( + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .mocks import USER_ID + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +CLIENT_ID = "1" + + +@pytest.fixture +def mock_setup_entry() -> Generator[Mock]: + """Patch setup entry.""" + with patch( + "homeassistant.components.yale.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + jwt: str, + mock_setup_entry: Mock, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": jwt, + "scope": "any", + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": "mock-user-id", + "expires_at": 1697753347, + }, + ) + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.unique_id == USER_ID + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == USER_ID + assert entry.data == { + "auth_implementation": "yale", + "token": { + "access_token": jwt, + "expires_at": ANY, + "expires_in": ANY, + "refresh_token": "mock-refresh-token", + "scope": "any", + "user_id": "mock-user-id", + }, + } + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow_already_exists( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + jwt: str, + mock_setup_entry: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Check full flow for a user that already exists.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": jwt, + "scope": "any", + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": "mock-user-id", + "expires_at": 1697753347, + }, + ) + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, + reauth_jwt: str, + mock_setup_entry: Mock, +) -> None: + """Test the reauthentication case updates the existing config entry.""" + + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": reauth_jwt, + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": USER_ID, + "token_type": "Bearer", + "expires_at": 1697753347, + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.unique_id == USER_ID + assert "token" in mock_config_entry.data + # Verify access token is refreshed + assert mock_config_entry.data["token"]["access_token"] == reauth_jwt + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth_wrong_account( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, + reauth_jwt_wrong_account: str, + jwt: str, + mock_setup_entry: Mock, +) -> None: + """Test the reauthentication aborts, if user tries to reauthenticate with another account.""" + assert mock_config_entry.data["token"]["access_token"] == jwt + + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": reauth_jwt_wrong_account, + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "token_type": "Bearer", + "expires_at": 1697753347, + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_invalid_user" + + assert mock_config_entry.unique_id == USER_ID + assert "token" in mock_config_entry.data + # Verify access token is like before + assert mock_config_entry.data["token"]["access_token"] == jwt diff --git a/tests/components/yale/test_diagnostics.py b/tests/components/yale/test_diagnostics.py new file mode 100644 index 00000000000..e5fd6b1c1a7 --- /dev/null +++ b/tests/components/yale/test_diagnostics.py @@ -0,0 +1,31 @@ +"""Test yale diagnostics.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from .mocks import ( + _create_yale_api_with_devices, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, +) + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating diagnostics for a config entry.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + entry, _, _ = await _create_yale_api_with_devices(hass, [lock_one, doorbell_one]) + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag == snapshot diff --git a/tests/components/yale/test_event.py b/tests/components/yale/test_event.py new file mode 100644 index 00000000000..7aeb9d8f12b --- /dev/null +++ b/tests/components/yale/test_event.py @@ -0,0 +1,162 @@ +"""The event tests for the yale.""" + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, +) + +from tests.common import async_fire_time_changed + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + motion_state = hass.states.get("event.tmt100_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNAVAILABLE + doorbell_state = hass.states.get("event.tmt100_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNAVAILABLE + + +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_yale_with_devices(hass, [doorbell_one], activities=activities) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state.state == isotime + + +async def test_doorbell_update_via_socketio( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell that can be updated via socketio.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + listener = list(socketio._listeners)[0] + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ) + + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "buttonpush", + }, + ) + + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + isotime = motion_state.state + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + assert motion_state.state == isotime + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_yale_with_devices(hass, [lock_one]) + + doorbell_state = hass.states.get( + "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" + ) + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/yale/test_init.py b/tests/components/yale/test_init.py new file mode 100644 index 00000000000..c028924199e --- /dev/null +++ b/tests/components/yale/test_init.py @@ -0,0 +1,236 @@ +"""The tests for the yale platform.""" + +from unittest.mock import Mock + +from aiohttp import ClientResponseError +import pytest +from yalexs.exceptions import InvalidAuth, YaleApiError + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_OPEN, + SERVICE_UNLOCK, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from .mocks import ( + _create_yale_with_devices, + _mock_doorsense_enabled_yale_lock_detail, + _mock_doorsense_missing_yale_lock_detail, + _mock_inoperative_yale_lock_detail, + _mock_lock_with_offline_key, + _mock_operative_yale_lock_detail, +) + +from tests.typing import WebSocketGenerator + + +async def test_yale_api_is_failing(hass: HomeAssistant) -> None: + """Config entry state is SETUP_RETRY when yale api is failing.""" + + config_entry, socketio = await _create_yale_with_devices( + hass, + authenticate_side_effect=YaleApiError( + "offline", ClientResponseError(None, None, status=500) + ), + ) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_yale_is_offline(hass: HomeAssistant) -> None: + """Config entry state is SETUP_RETRY when yale is offline.""" + + config_entry, socketio = await _create_yale_with_devices( + hass, authenticate_side_effect=TimeoutError + ) + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_yale_late_auth_failure(hass: HomeAssistant) -> None: + """Test we can detect a late auth failure.""" + config_entry, socketio = await _create_yale_with_devices( + hass, + authenticate_side_effect=InvalidAuth( + "authfailed", ClientResponseError(None, None, status=401) + ), + ) + + assert config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + + assert flows[0]["step_id"] == "pick_implementation" + + +async def test_unlock_throws_yale_api_http_error(hass: HomeAssistant) -> None: + """Test unlock throws correct error on http error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + aiohttp_client_response_exception = ClientResponseError(None, None, status=400) + + def _unlock_return_activities_side_effect(access_token, device_id): + raise YaleApiError( + "This should bubble up as its user consumable", + aiohttp_client_response_exception, + ) + + await _create_yale_with_devices( + hass, + [mocked_lock_detail], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + +async def test_lock_throws_yale_api_http_error(hass: HomeAssistant) -> None: + """Test lock throws correct error on http error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + aiohttp_client_response_exception = ClientResponseError(None, None, status=400) + + def _lock_return_activities_side_effect(access_token, device_id): + raise YaleApiError( + "This should bubble up as its user consumable", + aiohttp_client_response_exception, + ) + + await _create_yale_with_devices( + hass, + [mocked_lock_detail], + api_call_side_effects={ + "lock_return_activities": _lock_return_activities_side_effect + }, + ) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + +async def test_open_throws_hass_service_not_supported_error( + hass: HomeAssistant, +) -> None: + """Test open throws correct error on entity does not support this service error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [mocked_lock_detail]) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises(HomeAssistantError): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + +async def test_inoperative_locks_are_filtered_out(hass: HomeAssistant) -> None: + """Ensure inoperative locks do not get setup.""" + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [yale_operative_lock, yale_inoperative_lock]) + + lock_abc_name = hass.states.get("lock.abc_name") + assert lock_abc_name is None + lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get( + "lock.a6697750d607098bae8d6baa11ef8063_name" + ) + assert lock_a6697750d607098bae8d6baa11ef8063_name.state == LockState.LOCKED + + +async def test_lock_has_doorsense(hass: HomeAssistant) -> None: + """Check to see if a lock has doorsense.""" + doorsenselock = await _mock_doorsense_enabled_yale_lock_detail(hass) + nodoorsenselock = await _mock_doorsense_missing_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [doorsenselock, nodoorsenselock]) + + binary_sensor_online_with_doorsense_name_open = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name_open.state == STATE_ON + binary_sensor_missing_doorsense_id_name_open = hass.states.get( + "binary_sensor.missing_with_doorsense_name_door" + ) + assert binary_sensor_missing_doorsense_id_name_open is None + + +async def test_load_unload(hass: HomeAssistant) -> None: + """Config entry can be unloaded.""" + + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_operative_lock, yale_inoperative_lock] + ) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_load_triggers_ble_discovery( + hass: HomeAssistant, mock_discovery: Mock +) -> None: + """Test that loading a lock that supports offline ble operation passes the keys to yalexe_ble.""" + + yale_lock_with_key = await _mock_lock_with_offline_key(hass) + yale_lock_without_key = await _mock_operative_yale_lock_detail(hass) + + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_lock_with_key, yale_lock_without_key] + ) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + assert len(mock_discovery.mock_calls) == 1 + assert mock_discovery.mock_calls[0].kwargs["data"] == { + "name": "Front Door Lock", + "address": None, + "serial": "X2FSW05DGA", + "key": "kkk01d4300c1dcxxx1c330f794941111", + "slot": 1, + } + + +async def test_device_remove_devices( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test we can only remove a device that no longer exists.""" + assert await async_setup_component(hass, "config", {}) + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_operative_lock] + ) + entity = entity_registry.entities["lock.a6697750d607098bae8d6baa11ef8063_name"] + + device_entry = device_registry.async_get(entity.device_id) + client = await hass_ws_client(hass) + response = await client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + dead_device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "remove-device-id")}, + ) + response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) + assert response["success"] diff --git a/tests/components/yale/test_lock.py b/tests/components/yale/test_lock.py new file mode 100644 index 00000000000..f6b96120d0d --- /dev/null +++ b/tests/components/yale/test_lock.py @@ -0,0 +1,432 @@ +"""The lock tests for the yale platform.""" + +import datetime + +from aiohttp import ClientResponseError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_OPEN, + SERVICE_UNLOCK, + STATE_UNAVAILABLE, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceNotSupported +from homeassistant.helpers import device_registry as dr, entity_registry as er +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, + _mock_lock_with_unlatch, + _mock_operative_yale_lock_detail, + async_setup_component, +) + +from tests.common import async_fire_time_changed + + +async def test_lock_device_registry( + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion +) -> None: + """Test creation of a lock with doorsense and bridge ands up in the registry.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [lock_one]) + + reg_device = device_registry.async_get_device( + identifiers={("yale", "online_with_doorsense")} + ) + assert reg_device == snapshot + + +async def test_lock_changed_by(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.LOCKED + assert lock_state.attributes["changed_by"] == "Your favorite elven princess" + + +async def test_state_locking(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is locking.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + + +async def test_state_unlocking(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is unlocking.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlocking.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == LockState.UNLOCKING + + +async def test_state_jammed(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is jammed.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + assert hass.states.get("lock.online_with_doorsense_name").state == LockState.JAMMED + + +async def test_one_lock_operation( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [lock_one]) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + + assert lock_state.state == LockState.LOCKED + + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.UNLOCKED + + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.LOCKED + + # No activity means it will be unavailable until the activity feed has data + assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") + operator_state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert operator_state.state == STATE_UNKNOWN + + +async def test_open_lock_operation(hass: HomeAssistant) -> None: + """Test open lock operation using the open service.""" + lock_with_unlatch = await _mock_lock_with_unlatch(hass) + await _create_yale_with_devices(hass, [lock_with_unlatch]) + + assert hass.states.get("lock.online_with_unlatch_name").state == LockState.LOCKED + + data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + assert hass.states.get("lock.online_with_unlatch_name").state == LockState.UNLOCKED + + +async def test_open_lock_operation_socketio_connected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test open lock operation using the open service when socketio is connected.""" + lock_with_unlatch = await _mock_lock_with_unlatch(hass) + assert lock_with_unlatch.pubsub_channel == "pubsub" + + _, socketio = await _create_yale_with_devices(hass, [lock_with_unlatch]) + socketio.connected = True + + assert hass.states.get("lock.online_with_unlatch_name").state == LockState.LOCKED + + data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + listener = list(socketio._listeners)[0] + listener( + lock_with_unlatch.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert hass.states.get("lock.online_with_unlatch_name").state == LockState.UNLOCKED + await hass.async_block_till_done() + + +async def test_one_lock_operation_socketio_connected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lock and unlock operations are async when socketio is connected.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + states = hass.states + + _, socketio = await _create_yale_with_devices(hass, [lock_one]) + socketio.connected = True + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=1), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.UNLOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Locked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED + + # No activity means it will be unavailable until the activity feed has data + assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") + assert ( + states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN + ) + + freezer.tick(INITIAL_LOCK_RESYNC_TIME) + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKED + + +async def test_lock_jammed(hass: HomeAssistant) -> None: + """Test lock gets jammed on unlock.""" + + def _unlock_return_activities_side_effect(access_token, device_id): + raise ClientResponseError(None, None, status=531) + + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices( + hass, + [lock_one], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + + states = hass.states + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + assert states.get("lock.online_with_doorsense_name").state == LockState.JAMMED + + +async def test_lock_throws_exception_on_unknown_status_code( + hass: HomeAssistant, +) -> None: + """Test lock throws exception.""" + + def _unlock_return_activities_side_effect(access_token, device_id): + raise ClientResponseError(None, None, status=500) + + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices( + hass, + [lock_one], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == LockState.LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + with pytest.raises(ClientResponseError): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + +async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_lock_from_fixture( + hass, + "get_lock.online.unknown_state.json", + ) + await _create_yale_with_devices(hass, [lock_one]) + + assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN + + +async def test_lock_bridge_offline(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_offline.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + states = hass.states + assert states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE + + +async def test_lock_bridge_online(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_online.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + states = hass.states + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED + + +async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + config_entry, socketio = await _create_yale_with_devices( + hass, [lock_one], activities=activities + ) + socketio.connected = True + states = hass.states + + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow(), + { + "status": "kAugLockState_Unlocking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + + listener( + lock_one.device_id, + dt_util.utcnow(), + { + "status": "kAugLockState_Locking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + + socketio.connected = True + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + + # Ensure socketio status is always preserved + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_open_throws_hass_service_not_supported_error( + hass: HomeAssistant, +) -> None: + """Test open throws correct error on entity does not support this service error.""" + # Fetch translations + await async_setup_component(hass, "homeassistant", {}) + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [mocked_lock_detail]) + entity_id = "lock.a6697750d607098bae8d6baa11ef8063_name" + data = {ATTR_ENTITY_ID: entity_id} + with pytest.raises( + ServiceNotSupported, + match=f"Entity {entity_id} does not support action {LOCK_DOMAIN}.{SERVICE_OPEN}", + ): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/components/yale/test_sensor.py b/tests/components/yale/test_sensor.py new file mode 100644 index 00000000000..5d724b4bb9d --- /dev/null +++ b/tests/components/yale/test_sensor.py @@ -0,0 +1,320 @@ +"""The sensor tests for the yale platform.""" + +from typing import Any + +from syrupy import SnapshotAssertion + +from homeassistant import core as ha +from homeassistant.const import ( + ATTR_ENTITY_PICTURE, + ATTR_UNIT_OF_MEASUREMENT, + PERCENTAGE, + STATE_UNKNOWN, +) +from homeassistant.core import CoreState, HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, +) + +from tests.common import mock_restore_cache_with_extra_data + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") + assert battery_state.state == "96" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + + +async def test_create_doorbell_offline( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + battery_state = hass.states.get("sensor.tmt100_name_battery") + assert battery_state.state == "81" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + + entry = entity_registry.async_get("sensor.tmt100_name_battery") + assert entry + assert entry.unique_id == "tmt100_device_battery" + + +async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is hardwired without a battery.""" + doorbell_one = await _mock_doorbell_from_fixture( + hass, "get_doorbell.nobattery.json" + ) + await _create_yale_with_devices(hass, [doorbell_one]) + + sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") + assert sensor_tmt100_name_battery is None + + +async def test_create_lock_with_linked_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with a linked keypad that both have a battery.""" + lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") + await _create_yale_with_devices(hass, [lock_one]) + + battery_state = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + + entry = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert entry + assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" + + keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "62" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") + assert entry + assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" + + +async def test_create_lock_with_low_battery_linked_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with a linked keypad that both have a battery.""" + lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") + await _create_yale_with_devices(hass, [lock_one]) + + battery_state = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + entry = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert entry + assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" + + state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert state.state == "10" + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") + assert entry + assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" + + # No activity means it will be unavailable until someone unlocks/locks it + lock_operator_sensor = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" + ) + assert ( + lock_operator_sensor.unique_id + == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + ) + assert ( + hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state + == STATE_UNKNOWN + ) + + +async def test_lock_operator_bluetooth( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_bluetooth.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "mobile" + + +async def test_lock_operator_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_keypad.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_lock_operator_remote( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_lock_operator_manual( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_manual.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_lock_operator_autorelock( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_autorelock.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Auto Relock" + assert state.attributes == snapshot + + +async def test_unlock_operator_manual( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock manually.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlock_from_manual.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state == snapshot + + +async def test_unlock_operator_tag( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with a tag.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlock_from_tag.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_restored_state( + hass: HomeAssistant, hass_storage: dict[str, Any], snapshot: SnapshotAssertion +) -> None: + """Test restored state.""" + + entity_id = "sensor.online_with_doorsense_name_operator" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + fake_state = ha.State( + entity_id, + state="Tag Unlock", + attributes={ + "method": "tag", + "manual": False, + "remote": False, + "keypad": False, + "tag": True, + "autorelock": False, + ATTR_ENTITY_PICTURE: "image.png", + }, + ) + + # Home assistant is not running yet + hass.set_state(CoreState.not_running) + mock_restore_cache_with_extra_data( + hass, + [ + ( + fake_state, + {"native_value": "Tag Unlock", "native_unit_of_measurement": None}, + ) + ], + ) + + await _create_yale_with_devices(hass, [lock_one]) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == "Tag Unlock" + assert state == snapshot diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 6ac6dfc6871..91c64c7a7a7 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -7,7 +7,7 @@ from typing import Any from unittest.mock import Mock, patch import pytest -from yalesmartalarmclient import YaleSmartAlarmData +from yalesmartalarmclient import YaleDoorManAPI, YaleLock, YaleSmartAlarmData from yalesmartalarmclient.const import YALE_STATE_ARM_FULL from homeassistant.components.yale_smart_alarm.const import DOMAIN, PLATFORMS @@ -20,7 +20,6 @@ from tests.common import MockConfigEntry, load_fixture ENTRY_CONFIG = { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "1", } OPTIONS_CONFIG = {"lock_code_digits": 6} @@ -35,38 +34,64 @@ async def patch_platform_constant() -> list[Platform]: @pytest.fixture async def load_config_entry( hass: HomeAssistant, - get_data: YaleSmartAlarmData, - get_all_data: YaleSmartAlarmData, + get_client: Mock, load_platforms: list[Platform], ) -> tuple[MockConfigEntry, Mock]: """Set up the Yale Smart Living integration in Home Assistant.""" with patch("homeassistant.components.yale_smart_alarm.PLATFORMS", load_platforms): config_entry = MockConfigEntry( + title=ENTRY_CONFIG["username"], domain=DOMAIN, source=SOURCE_USER, data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", unique_id="username", - version=1, + version=2, + minor_version=2, ) config_entry.add_to_hass(hass) - with patch( "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", - autospec=True, - ) as mock_client_class: - client = mock_client_class.return_value - client.auth = Mock() - client.lock_api = Mock() - client.get_all.return_value = get_all_data - client.get_information.return_value = get_data - client.get_armed_status.return_value = YALE_STATE_ARM_FULL + return_value=get_client, + ): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - return (config_entry, client) + return (config_entry, get_client) + + +@pytest.fixture(name="get_client") +async def mock_client( + get_data: YaleSmartAlarmData, + get_all_data: YaleSmartAlarmData, +) -> Mock: + """Mock the Yale client.""" + cycle = get_data.cycle["data"] + data = {"data": cycle["device_status"]} + + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + autospec=True, + ) as mock_client_class: + client = mock_client_class.return_value + client.auth = Mock() + client.auth.get_authenticated = Mock(return_value=data) + client.auth.post_authenticated = Mock(return_value={"code": "000"}) + client.auth.put_authenticated = Mock(return_value={"code": "000"}) + client.lock_api = YaleDoorManAPI(client.auth) + locks = [ + YaleLock(device, lock_api=client.lock_api) + for device in cycle["device_status"] + if device["type"] == YaleLock.DEVICE_TYPE + ] + client.get_locks.return_value = locks + client.get_all.return_value = get_all_data + client.get_information.return_value = get_data + client.get_armed_status.return_value = YALE_STATE_ARM_FULL + + return client @pytest.fixture(name="loaded_fixture", scope="package") @@ -78,14 +103,14 @@ def get_fixture_data() -> dict[str, Any]: return json_data -@pytest.fixture(name="get_data", scope="package") +@pytest.fixture(name="get_data") def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: """Load update data and return.""" - status = loaded_fixture["STATUS"] - cycle = loaded_fixture["CYCLE"] - online = loaded_fixture["ONLINE"] - panel_info = loaded_fixture["PANEL INFO"] + status = {"data": loaded_fixture["STATUS"]} + cycle = {"data": loaded_fixture["CYCLE"]} + online = {"data": loaded_fixture["ONLINE"]} + panel_info = {"data": loaded_fixture["PANEL INFO"]} return YaleSmartAlarmData( status=status, cycle=cycle, @@ -94,18 +119,18 @@ def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: ) -@pytest.fixture(name="get_all_data", scope="package") +@pytest.fixture(name="get_all_data") def get_diag_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: """Load all data and return.""" - devices = loaded_fixture["DEVICES"] - mode = loaded_fixture["MODE"] - status = loaded_fixture["STATUS"] - cycle = loaded_fixture["CYCLE"] - online = loaded_fixture["ONLINE"] - history = loaded_fixture["HISTORY"] - panel_info = loaded_fixture["PANEL INFO"] - auth_check = loaded_fixture["AUTH CHECK"] + devices = {"data": loaded_fixture["DEVICES"]} + mode = {"data": loaded_fixture["MODE"]} + status = {"data": loaded_fixture["STATUS"]} + cycle = {"data": loaded_fixture["CYCLE"]} + online = {"data": loaded_fixture["ONLINE"]} + history = {"data": loaded_fixture["HISTORY"]} + panel_info = {"data": loaded_fixture["PANEL INFO"]} + auth_check = {"data": loaded_fixture["AUTH CHECK"]} return YaleSmartAlarmData( devices=devices, mode=mode, diff --git a/tests/components/yale_smart_alarm/fixtures/get_all.json b/tests/components/yale_smart_alarm/fixtures/get_all.json index e85a93f3c3e..6c68e05c566 100644 --- a/tests/components/yale_smart_alarm/fixtures/get_all.json +++ b/tests/components/yale_smart_alarm/fixtures/get_all.json @@ -175,7 +175,7 @@ "address": "RF4", "type": "device_type.door_contact", "name": "Device4", - "status1": "device_status.dc_close", + "status1": "device_status.dc_close,device_status.low_battery", "status2": null, "status_switch": null, "status_power": null, @@ -763,7 +763,7 @@ "address": "RF4", "type": "device_type.door_contact", "name": "Device4", - "status1": "device_status.dc_close", + "status1": "device_status.dc_close,device_status.low_battery", "status2": null, "status_switch": null, "status_power": null, diff --git a/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr b/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr index 749e62252f3..fcdb7baca03 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_alarm_control_panel.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.yale_smart_alarm-entry] +# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.test_username-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'alarm_control_panel', 'entity_category': None, - 'entity_id': 'alarm_control_panel.yale_smart_alarm', + 'entity_id': 'alarm_control_panel.test_username', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -32,17 +32,17 @@ 'unit_of_measurement': None, }) # --- -# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.yale_smart_alarm-state] +# name: test_alarm_control_panel[load_platforms0][alarm_control_panel.test_username-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'changed_by': None, 'code_arm_required': False, 'code_format': None, - 'friendly_name': 'Yale Smart Alarm', + 'friendly_name': 'test-username', 'supported_features': , }), 'context': , - 'entity_id': 'alarm_control_panel.yale_smart_alarm', + 'entity_id': 'alarm_control_panel.test_username', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr index 7bb144e8d2a..e519a880de9 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr @@ -1,4 +1,51 @@ # serializer version: 1 +# name: test_binary_sensor[load_platforms0][binary_sensor.device4_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.device4_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'RF4-battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device4_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Device4 Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.device4_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device4_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -46,6 +93,53 @@ 'state': 'off', }) # --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device5_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.device5_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'RF5-battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device5_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Device5 Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.device5_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device5_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -93,6 +187,53 @@ 'state': 'on', }) # --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device6_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.device6_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'RF6-battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device6_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Device6 Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.device6_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device6_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -140,7 +281,7 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_battery-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -152,7 +293,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_battery', + 'entity_id': 'binary_sensor.test_username_battery', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -173,21 +314,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_battery-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_battery-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Battery', + 'friendly_name': 'test-username Battery', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_battery', + 'entity_id': 'binary_sensor.test_username_battery', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_jam-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_jam-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -199,7 +340,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_jam', + 'entity_id': 'binary_sensor.test_username_jam', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -220,21 +361,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_jam-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_jam-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Jam', + 'friendly_name': 'test-username Jam', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_jam', + 'entity_id': 'binary_sensor.test_username_jam', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_power_loss-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_power_loss-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -246,7 +387,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_power_loss', + 'entity_id': 'binary_sensor.test_username_power_loss', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -267,21 +408,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_power_loss-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_power_loss-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Power loss', + 'friendly_name': 'test-username Power loss', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_power_loss', + 'entity_id': 'binary_sensor.test_username_power_loss', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_tamper-entry] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_tamper-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -293,7 +434,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.yale_smart_alarm_tamper', + 'entity_id': 'binary_sensor.test_username_tamper', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -314,14 +455,14 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.yale_smart_alarm_tamper-state] +# name: test_binary_sensor[load_platforms0][binary_sensor.test_username_tamper-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'Yale Smart Alarm Tamper', + 'friendly_name': 'test-username Tamper', }), 'context': , - 'entity_id': 'binary_sensor.yale_smart_alarm_tamper', + 'entity_id': 'binary_sensor.test_username_tamper', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/yale_smart_alarm/snapshots/test_button.ambr b/tests/components/yale_smart_alarm/snapshots/test_button.ambr index 8abceb0affa..951caced170 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_button.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_button.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_button[load_platforms0][button.yale_smart_alarm_panic_button-entry] +# name: test_button[load_platforms0][button.test_username_panic_button-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': None, - 'entity_id': 'button.yale_smart_alarm_panic_button', + 'entity_id': 'button.test_username_panic_button', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -32,13 +32,13 @@ 'unit_of_measurement': None, }) # --- -# name: test_button[load_platforms0][button.yale_smart_alarm_panic_button-state] +# name: test_button[load_platforms0][button.test_username_panic_button-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Yale Smart Alarm Panic button', + 'friendly_name': 'test-username Panic button', }), 'context': , - 'entity_id': 'button.yale_smart_alarm_panic_button', + 'entity_id': 'button.test_username_panic_button', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr index d4bbd42aaeb..af939336677 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr @@ -2,27 +2,653 @@ # name: test_diagnostics dict({ 'auth_check': dict({ - 'agent': False, - 'dealer_group': 'yale', - 'dealer_id': '605', - 'first_login': '1', - 'id': '**REDACTED**', - 'is_auth': '1', - 'mac': '**REDACTED**', - 'mail_address': '**REDACTED**', - 'master': '1', - 'name': '**REDACTED**', - 'token_time': '2023-08-17 16:19:20', - 'user_id': '**REDACTED**', - 'xml_version': '2', + 'data': dict({ + 'agent': False, + 'dealer_group': 'yale', + 'dealer_id': '605', + 'first_login': '1', + 'id': '**REDACTED**', + 'is_auth': '1', + 'mac': '**REDACTED**', + 'mail_address': '**REDACTED**', + 'master': '1', + 'name': '**REDACTED**', + 'token_time': '2023-08-17 16:19:20', + 'user_id': '**REDACTED**', + 'xml_version': '2', + }), }), 'cycle': dict({ - 'alarm_event_latest': None, - 'capture_latest': None, - 'device_status': list([ + 'data': dict({ + 'alarm_event_latest': None, + 'capture_latest': None, + 'device_status': list([ + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '35', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '1', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': None, + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '2', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.unlock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': None, + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '3', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + '_battery': True, + '_state': 'closed', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '4', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.dc_close,device_status.low_battery', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.dc_close', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + '_battery': False, + '_state': 'open', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '5', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.dc_open', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.dc_open', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + '_battery': False, + '_state': 'unavailable', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '6', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'unknwon', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '36', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '7', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '4', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '8', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.unlock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.unlock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '10', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '9', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.error', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.error', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '001', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '8', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': '', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': 21, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.temperature_sensor', + 'type_no': '40', + }), + ]), + 'model': list([ + dict({ + 'area': '1', + 'mode': 'disarm', + }), + ]), + 'panel_status': dict({ + 'warning_snd_mute': '0', + }), + 'report_event_latest': dict({ + 'cid_code': '1807', + 'event_time': None, + 'id': '**REDACTED**', + 'report_id': '1027299996', + 'time': '1692271914', + 'utc_event_time': None, + }), + }), + }), + 'devices': dict({ + 'data': list([ dict({ - '_state': 'locked', - '_state2': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -83,8 +709,6 @@ 'type_no': '72', }), dict({ - '_state': 'unlocked', - '_state2': 'unknown', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -144,8 +768,6 @@ 'type_no': '72', }), dict({ - '_state': 'locked', - '_state2': 'unknown', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -206,7 +828,6 @@ 'type_no': '72', }), dict({ - '_state': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -237,7 +858,7 @@ 'sresp_button_2': None, 'sresp_button_3': None, 'sresp_button_4': None, - 'status1': 'device_status.dc_close', + 'status1': 'device_status.dc_close,device_status.low_battery', 'status2': None, 'status_dim_level': None, 'status_fault': list([ @@ -267,7 +888,6 @@ 'type_no': '4', }), dict({ - '_state': 'open', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -328,7 +948,6 @@ 'type_no': '4', }), dict({ - '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -388,8 +1007,6 @@ 'type_no': '4', }), dict({ - '_state': 'unlocked', - '_state2': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -450,8 +1067,6 @@ 'type_no': '72', }), dict({ - '_state': 'unlocked', - '_state2': 'open', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -512,7 +1127,6 @@ 'type_no': '72', }), dict({ - '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -632,799 +1246,193 @@ 'type_no': '40', }), ]), - 'model': list([ + }), + 'history': dict({ + 'data': list([ + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027299996', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:54', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180201101', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1802', + 'name': '**REDACTED**', + 'report_id': '1027299889', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:43', + 'type': 'device_type.door_lock', + 'user': 101, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027299587', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:11', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180101001', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1801', + 'name': '**REDACTED**', + 'report_id': '1027296099', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:24:52', + 'type': 'device_type.door_lock', + 'user': 1, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027273782', + 'status_temp_format': 'C', + 'time': '2023/08/17 10:43:21', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180201101', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1802', + 'name': '**REDACTED**', + 'report_id': '1027273230', + 'status_temp_format': 'C', + 'time': '2023/08/17 10:42:09', + 'type': 'device_type.door_lock', + 'user': 101, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027100172', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:28:57', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180101001', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1801', + 'name': '**REDACTED**', + 'report_id': '1027099978', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:28:39', + 'type': 'device_type.door_lock', + 'user': 1, + 'zone': 1, + }), + dict({ + 'area': 0, + 'cid': '18160200000', + 'cid_source': 'SYSTEM', + 'event_time': None, + 'event_type': '1602', + 'name': '', + 'report_id': '1027093266', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:17:12', + 'type': '', + 'user': '', + 'zone': 0, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1026912623', + 'status_temp_format': 'C', + 'time': '2023/08/16 20:29:36', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + ]), + }), + 'mode': dict({ + 'data': list([ dict({ 'area': '1', 'mode': 'disarm', }), ]), - 'panel_status': dict({ - 'warning_snd_mute': '0', - }), - 'report_event_latest': dict({ - 'cid_code': '1807', - 'event_time': None, - 'id': '**REDACTED**', - 'report_id': '1027299996', - 'time': '1692271914', - 'utc_event_time': None, - }), }), - 'devices': list([ - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '35', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '1', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': None, - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '2', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.unlock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': None, - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '3', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '4', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.dc_close', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.dc_close', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '5', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.dc_open', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.dc_open', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '6', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'unknwon', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '36', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '7', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '4', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '8', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.unlock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.unlock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '10', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '9', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.error', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.error', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '001', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '8', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': '', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': 21, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.temperature_sensor', - 'type_no': '40', - }), - ]), - 'history': list([ - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027299996', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:54', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180201101', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1802', - 'name': '**REDACTED**', - 'report_id': '1027299889', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:43', - 'type': 'device_type.door_lock', - 'user': 101, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027299587', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:11', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180101001', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1801', - 'name': '**REDACTED**', - 'report_id': '1027296099', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:24:52', - 'type': 'device_type.door_lock', - 'user': 1, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027273782', - 'status_temp_format': 'C', - 'time': '2023/08/17 10:43:21', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180201101', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1802', - 'name': '**REDACTED**', - 'report_id': '1027273230', - 'status_temp_format': 'C', - 'time': '2023/08/17 10:42:09', - 'type': 'device_type.door_lock', - 'user': 101, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027100172', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:28:57', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180101001', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1801', - 'name': '**REDACTED**', - 'report_id': '1027099978', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:28:39', - 'type': 'device_type.door_lock', - 'user': 1, - 'zone': 1, - }), - dict({ - 'area': 0, - 'cid': '18160200000', - 'cid_source': 'SYSTEM', - 'event_time': None, - 'event_type': '1602', - 'name': '', - 'report_id': '1027093266', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:17:12', - 'type': '', - 'user': '', - 'zone': 0, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1026912623', - 'status_temp_format': 'C', - 'time': '2023/08/16 20:29:36', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - ]), - 'mode': list([ - dict({ - 'area': '1', - 'mode': 'disarm', - }), - ]), - 'online': 'online', + 'online': dict({ + 'data': 'online', + }), 'panel_info': dict({ - 'SMS_Balance': '50', - 'contact': '', - 'dealer_name': 'Poland', - 'mac': '**REDACTED**', - 'mail_address': '**REDACTED**', - 'name': '', - 'net_version': 'MINIGW-MZ-1_G 1.0.1.29A', - 'phone': 'UK-01902364606 / Sweden-0770373710 / Demark-89887818 / Norway-81569036', - 'report_account': '**REDACTED**', - 'rf51_version': '', - 'service_time': 'UK - Mon to Fri 8:30 til 17:30 / Scandinavia - Mon to Fri 8:00 til 20:00, Sat to Sun 10:00 til 15:00', - 'version': 'MINIGW-MZ-1_G 1.0.1.29A,,4.1.2.6.2,00:1D:94:0B:5E:A7,10111112,ML_yamga', - 'voice_balance': '0', - 'xml_version': '2', - 'zb_version': '4.1.2.6.2', - 'zw_version': '', + 'data': dict({ + 'SMS_Balance': '50', + 'contact': '', + 'dealer_name': 'Poland', + 'mac': '**REDACTED**', + 'mail_address': '**REDACTED**', + 'name': '', + 'net_version': 'MINIGW-MZ-1_G 1.0.1.29A', + 'phone': 'UK-01902364606 / Sweden-0770373710 / Demark-89887818 / Norway-81569036', + 'report_account': '**REDACTED**', + 'rf51_version': '', + 'service_time': 'UK - Mon to Fri 8:30 til 17:30 / Scandinavia - Mon to Fri 8:00 til 20:00, Sat to Sun 10:00 til 15:00', + 'version': 'MINIGW-MZ-1_G 1.0.1.29A,,4.1.2.6.2,00:1D:94:0B:5E:A7,10111112,ML_yamga', + 'voice_balance': '0', + 'xml_version': '2', + 'zb_version': '4.1.2.6.2', + 'zw_version': '', + }), }), 'status': dict({ - 'acfail': 'main.normal', - 'battery': 'main.normal', - 'gsm_rssi': '0', - 'imei': '', - 'imsi': '', - 'jam': 'main.normal', - 'rssi': '1', - 'tamper': 'main.normal', + 'data': dict({ + 'acfail': 'main.normal', + 'battery': 'main.normal', + 'gsm_rssi': '0', + 'imei': '', + 'imsi': '', + 'jam': 'main.normal', + 'rssi': '1', + 'tamper': 'main.normal', + }), }), }) # --- diff --git a/tests/components/yale_smart_alarm/snapshots/test_lock.ambr b/tests/components/yale_smart_alarm/snapshots/test_lock.ambr index da9c11e01d2..34da7db087a 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_lock.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_lock.ambr @@ -236,7 +236,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unlocked', + 'state': 'open', }) # --- # name: test_lock[load_platforms0][lock.device9-entry] diff --git a/tests/components/yale_smart_alarm/snapshots/test_select.ambr b/tests/components/yale_smart_alarm/snapshots/test_select.ambr new file mode 100644 index 00000000000..52ec7a99c2c --- /dev/null +++ b/tests/components/yale_smart_alarm/snapshots/test_select.ambr @@ -0,0 +1,343 @@ +# serializer version: 1 +# name: test_switch[load_platforms0][select.device1_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.device1_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '1111-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][select.device1_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device1 Volume', + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.device1_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'low', + }) +# --- +# name: test_switch[load_platforms0][select.device2_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.device2_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '2222-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][select.device2_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device2 Volume', + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.device2_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'low', + }) +# --- +# name: test_switch[load_platforms0][select.device3_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.device3_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '3333-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][select.device3_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device3 Volume', + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.device3_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'low', + }) +# --- +# name: test_switch[load_platforms0][select.device7_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.device7_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '7777-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][select.device7_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device7 Volume', + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.device7_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'low', + }) +# --- +# name: test_switch[load_platforms0][select.device8_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.device8_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '8888-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][select.device8_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device8 Volume', + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.device8_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'low', + }) +# --- +# name: test_switch[load_platforms0][select.device9_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.device9_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '9999-volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][select.device9_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device9 Volume', + 'options': list([ + 'high', + 'low', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.device9_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'low', + }) +# --- diff --git a/tests/components/yale_smart_alarm/snapshots/test_switch.ambr b/tests/components/yale_smart_alarm/snapshots/test_switch.ambr new file mode 100644 index 00000000000..f631a6fcbfe --- /dev/null +++ b/tests/components/yale_smart_alarm/snapshots/test_switch.ambr @@ -0,0 +1,277 @@ +# serializer version: 1 +# name: test_switch[load_platforms0][switch.device1_autolock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.device1_autolock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Autolock', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'autolock', + 'unique_id': '1111-autolock', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.device1_autolock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device1 Autolock', + }), + 'context': , + 'entity_id': 'switch.device1_autolock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[load_platforms0][switch.device2_autolock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.device2_autolock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Autolock', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'autolock', + 'unique_id': '2222-autolock', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.device2_autolock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device2 Autolock', + }), + 'context': , + 'entity_id': 'switch.device2_autolock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[load_platforms0][switch.device3_autolock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.device3_autolock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Autolock', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'autolock', + 'unique_id': '3333-autolock', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.device3_autolock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device3 Autolock', + }), + 'context': , + 'entity_id': 'switch.device3_autolock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[load_platforms0][switch.device7_autolock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.device7_autolock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Autolock', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'autolock', + 'unique_id': '7777-autolock', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.device7_autolock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device7 Autolock', + }), + 'context': , + 'entity_id': 'switch.device7_autolock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[load_platforms0][switch.device8_autolock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.device8_autolock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Autolock', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'autolock', + 'unique_id': '8888-autolock', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.device8_autolock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device8 Autolock', + }), + 'context': , + 'entity_id': 'switch.device8_autolock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[load_platforms0][switch.device9_autolock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.device9_autolock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Autolock', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'autolock', + 'unique_id': '9999-autolock', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.device9_autolock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device9 Autolock', + }), + 'context': , + 'entity_id': 'switch.device9_autolock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/yale_smart_alarm/test_alarm_control_panel.py b/tests/components/yale_smart_alarm/test_alarm_control_panel.py index 4e8330df071..0280223b72a 100644 --- a/tests/components/yale_smart_alarm/test_alarm_control_panel.py +++ b/tests/components/yale_smart_alarm/test_alarm_control_panel.py @@ -2,16 +2,27 @@ from __future__ import annotations +from copy import deepcopy from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from yalesmartalarmclient import YaleSmartAlarmData -from homeassistant.const import Platform +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_DISARM, + AlarmControlPanelState, +) +from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.parametrize( @@ -27,3 +38,111 @@ async def test_alarm_control_panel( """Test the Yale Smart Alarm alarm_control_panel.""" entry = load_config_entry[0] await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.ALARM_CONTROL_PANEL]], +) +async def test_alarm_control_panel_service_calls( + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + load_config_entry: tuple[MockConfigEntry, Mock], +) -> None: + """Test the Yale Smart Alarm alarm_control_panel action calls.""" + + client = load_config_entry[1] + + data = deepcopy(get_data.cycle) + data["data"] = data["data"].pop("device_status") + + client.auth.get_authenticated = Mock(return_value=data) + client.disarm = Mock(return_value=True) + client.arm_partial = Mock(return_value=True) + client.arm_full = Mock(return_value=True) + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.disarm.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.DISARMED + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.arm_partial.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_HOME + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.arm_full.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + client.disarm = Mock(side_effect=ConnectionError("no connection")) + + with pytest.raises( + HomeAssistantError, + match="Could not set alarm for test-username: no connection", + ): + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + client.disarm = Mock(return_value=False) + + with pytest.raises( + HomeAssistantError, + match="Could not change alarm, check system ready for arming", + ): + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.ALARM_CONTROL_PANEL]], +) +async def test_alarm_control_panel_not_available( + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + load_config_entry: tuple[MockConfigEntry, Mock], + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Yale Smart Alarm alarm_control_panel not being available.""" + + client = load_config_entry[1] + client.get_armed_status = Mock(return_value=None) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + freezer.tick(3600) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/yale_smart_alarm/test_button.py b/tests/components/yale_smart_alarm/test_button.py index ad6074345d3..cb28e60ab22 100644 --- a/tests/components/yale_smart_alarm/test_button.py +++ b/tests/components/yale_smart_alarm/test_button.py @@ -37,7 +37,7 @@ async def test_button( BUTTON_DOMAIN, SERVICE_PRESS, { - ATTR_ENTITY_ID: "button.yale_smart_alarm_panic_button", + ATTR_ENTITY_ID: "button.test_username_panic_button", }, blocking=True, ) @@ -50,7 +50,7 @@ async def test_button( BUTTON_DOMAIN, SERVICE_PRESS, { - ATTR_ENTITY_ID: "button.yale_smart_alarm_panic_button", + ATTR_ENTITY_ID: "button.test_username_panic_button", }, blocking=True, ) diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index 4ef201d2122..0b008d4c696 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError @@ -48,7 +48,6 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["data"] == { "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", } assert len(mock_setup_entry.mock_calls) == 1 @@ -112,7 +111,6 @@ async def test_form_invalid_auth( assert result2["data"] == { "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", } @@ -120,27 +118,20 @@ async def test_form_invalid_auth( async def test_reauth_flow(hass: HomeAssistant) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -157,7 +148,6 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", "password": "new-test-password", }, ) @@ -168,7 +158,6 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: assert entry.data == { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "1", } @@ -190,27 +179,20 @@ async def test_reauth_flow_error( ) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", @@ -219,7 +201,6 @@ async def test_reauth_flow_error( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", "password": "wrong-password", }, ) @@ -242,7 +223,6 @@ async def test_reauth_flow_error( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", "password": "new-test-password", }, ) @@ -253,48 +233,239 @@ async def test_reauth_flow_error( assert entry.data == { "username": "test-username", "password": "new-test-password", - "name": "Yale Smart Alarm", "area_id": "1", } -async def test_options_flow(hass: HomeAssistant) -> None: - """Test options config flow.""" +async def test_reconfigure(hass: HomeAssistant) -> None: + """Test reconfigure config flow.""" entry = MockConfigEntry( + title="test-username", domain=DOMAIN, unique_id="test-username", data={ "username": "test-username", "password": "test-password", - "name": "Yale Smart Alarm", "area_id": "1", }, version=2, + minor_version=2, ) entry.add_to_hass(hass) + result = await entry.start_reconfigure_flow(hass) + with ( patch( "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - return_value=True, + return_value="", ), patch( "homeassistant.components.yale_smart_alarm.async_setup_entry", return_value=True, ), ): - assert await hass.config_entries.async_setup(entry.entry_id) + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "new-test-password", + "area_id": "2", + }, + ) await hass.async_block_till_done() + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "test-username", + "password": "new-test-password", + "area_id": "2", + } + + +async def test_reconfigure_username_exist(hass: HomeAssistant) -> None: + """Test reconfigure config flow abort other username already exist.""" + entry = MockConfigEntry( + title="test-username", + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "area_id": "1", + }, + version=2, + minor_version=2, + ) + entry.add_to_hass(hass) + entry2 = MockConfigEntry( + title="other-username", + domain=DOMAIN, + unique_id="other-username", + data={ + "username": "other-username", + "password": "test-password", + "area_id": "1", + }, + version=2, + minor_version=2, + ) + entry2.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "other-username", + "password": "test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unique_id_exists"} + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "other-new-username", + "password": "test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "other-new-username", + "password": "test-password", + "area_id": "1", + } + + +@pytest.mark.parametrize( + ("sideeffect", "p_error"), + [ + (AuthenticationError, "invalid_auth"), + (ConnectionError, "cannot_connect"), + (TimeoutError, "cannot_connect"), + (UnknownError, "cannot_connect"), + ], +) +async def test_reconfigure_flow_error( + hass: HomeAssistant, sideeffect: Exception, p_error: str +) -> None: + """Test a reauthentication flow.""" + entry = MockConfigEntry( + title="test-username", + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "area_id": "1", + }, + version=2, + minor_version=2, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + side_effect=sideeffect, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "update-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": p_error} + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "new-test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "test-username", + "password": "new-test-password", + "area_id": "1", + } + + +async def test_options_flow( + hass: HomeAssistant, + load_config_entry: tuple[MockConfigEntry, Mock], +) -> None: + """Test options config flow.""" + entry = load_config_entry[0] + result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"lock_code_digits": 6}, - ) + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + return_value=load_config_entry[1], + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"lock_code_digits": 4}, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {"lock_code_digits": 6} + assert result["data"] == {"lock_code_digits": 4} + + assert entry.state == config_entries.ConfigEntryState.LOADED diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 41362f2318a..8d30e8ad21a 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -13,9 +13,10 @@ from yalesmartalarmclient import ( YaleSmartAlarmData, ) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.yale_smart_alarm.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -47,7 +48,8 @@ async def test_coordinator_setup_errors( options=OPTIONS_CONFIG, entry_id="1", unique_id="username", - version=1, + version=2, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -60,7 +62,7 @@ async def test_coordinator_setup_errors( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert not state @@ -73,15 +75,15 @@ async def test_coordinator_setup_and_update_errors( client = load_config_entry[1] - state = hass.states.get("alarm_control_panel.yale_smart_alarm") - assert state.state == STATE_ALARM_ARMED_AWAY + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY client.reset_mock() client.get_information.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -89,7 +91,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -97,7 +99,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=3)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -105,7 +107,7 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=4)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE client.reset_mock() @@ -115,13 +117,13 @@ async def test_coordinator_setup_and_update_errors( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") - assert state.state == STATE_ALARM_ARMED_AWAY + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY client.reset_mock() client.get_information.side_effect = AuthenticationError("Can not authenticate") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=6)) await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() - state = hass.states.get("alarm_control_panel.yale_smart_alarm") + state = hass.states.get("alarm_control_panel.test_username") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/yale_smart_alarm/test_init.py b/tests/components/yale_smart_alarm/test_init.py new file mode 100644 index 00000000000..c499320c29c --- /dev/null +++ b/tests/components/yale_smart_alarm/test_init.py @@ -0,0 +1,99 @@ +"""Test for Yale Smart Alarm component Init.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.yale_smart_alarm.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import ENTRY_CONFIG, OPTIONS_CONFIG + +from tests.common import MockConfigEntry + + +async def test_setup_entry( + hass: HomeAssistant, + get_client: Mock, +) -> None: + """Test setup entry.""" + entry = MockConfigEntry( + title=ENTRY_CONFIG["username"], + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + options=OPTIONS_CONFIG, + entry_id="1", + unique_id="username", + version=2, + minor_version=2, + ) + entry.add_to_hass(hass) + + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + return_value=get_client, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(entry.entry_id) + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_migrate_entry( + hass: HomeAssistant, + get_client: Mock, + entity_registry: er.EntityRegistry, +) -> None: + """Test migrate entry unique id.""" + config = { + "username": "test-username", + "password": "new-test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + } + options = {"lock_code_digits": 6, "code": "123456"} + entry = MockConfigEntry( + title=ENTRY_CONFIG["username"], + domain=DOMAIN, + source=SOURCE_USER, + data=config, + options=options, + entry_id="1", + unique_id="username", + version=1, + minor_version=1, + ) + entry.add_to_hass(hass) + lock = entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "1111", + config_entry=entry, + has_entity_name=True, + original_name="Device1", + ) + + with patch( + "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", + return_value=get_client, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + assert entry.version == 2 + assert entry.minor_version == 2 + assert entry.data == ENTRY_CONFIG + assert entry.options == OPTIONS_CONFIG + + lock_entity_id = entity_registry.async_get_entity_id(LOCK_DOMAIN, DOMAIN, "1111") + lock = entity_registry.async_get(lock_entity_id) + + assert lock.options == {"lock": {"default_code": "123456"}} diff --git a/tests/components/yale_smart_alarm/test_lock.py b/tests/components/yale_smart_alarm/test_lock.py index 7c67703924b..bb8c9d55053 100644 --- a/tests/components/yale_smart_alarm/test_lock.py +++ b/tests/components/yale_smart_alarm/test_lock.py @@ -47,15 +47,13 @@ async def test_lock_service_calls( hass: HomeAssistant, get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, ) -> None: """Test the Yale Smart Alarm lock.""" client = load_config_entry[1] data = deepcopy(get_data.cycle) - data["data"] = data.pop("device_status") + data["data"] = data["data"].pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "000"}) @@ -101,15 +99,13 @@ async def test_lock_service_call_fails( hass: HomeAssistant, get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, ) -> None: """Test the Yale Smart Alarm lock service call fails.""" client = load_config_entry[1] data = deepcopy(get_data.cycle) - data["data"] = data.pop("device_status") + data["data"] = data["data"].pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(side_effect=UnknownError("test_side_effect")) @@ -153,19 +149,15 @@ async def test_lock_service_call_fails_with_incorrect_status( hass: HomeAssistant, get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, ) -> None: """Test the Yale Smart Alarm lock service call fails with incorrect return state.""" client = load_config_entry[1] data = deepcopy(get_data.cycle) - data["data"] = data.pop("device_status") + data["data"] = data["data"].pop("device_status") - client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "FFF"}) - client.lock_api = YaleDoorManAPI(client.auth) state = hass.states.get("lock.device1") assert state.state == "locked" diff --git a/tests/components/yale_smart_alarm/test_select.py b/tests/components/yale_smart_alarm/test_select.py new file mode 100644 index 00000000000..c874f83aed7 --- /dev/null +++ b/tests/components/yale_smart_alarm/test_select.py @@ -0,0 +1,66 @@ +"""The test for the Yale smart living select.""" + +from __future__ import annotations + +from unittest.mock import Mock + +import pytest +from syrupy.assertion import SnapshotAssertion +from yalesmartalarmclient import YaleSmartAlarmData + +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SELECT]], +) +async def test_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + load_config_entry: tuple[MockConfigEntry, Mock], + get_data: YaleSmartAlarmData, + snapshot: SnapshotAssertion, +) -> None: + """Test the Yale Smart Living volume select.""" + client = load_config_entry[1] + + await snapshot_platform( + hass, entity_registry, snapshot, load_config_entry[0].entry_id + ) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.device1_volume", + ATTR_OPTION: "high", + }, + blocking=True, + ) + + client.auth.post_authenticated.assert_called_once() + client.auth.put_authenticated.assert_called_once() + + state = hass.states.get("select.device1_volume") + assert state.state == "high" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.device1_volume", + ATTR_OPTION: "not_exist", + }, + blocking=True, + ) diff --git a/tests/components/yale_smart_alarm/test_switch.py b/tests/components/yale_smart_alarm/test_switch.py new file mode 100644 index 00000000000..369f8f8f10c --- /dev/null +++ b/tests/components/yale_smart_alarm/test_switch.py @@ -0,0 +1,62 @@ +"""The test for the Yale smart living switch.""" + +from __future__ import annotations + +from unittest.mock import Mock + +import pytest +from syrupy.assertion import SnapshotAssertion +from yalesmartalarmclient import YaleSmartAlarmData + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SWITCH]], +) +async def test_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + load_config_entry: tuple[MockConfigEntry, Mock], + get_data: YaleSmartAlarmData, + snapshot: SnapshotAssertion, +) -> None: + """Test the Yale Smart Living autolock switch.""" + + await snapshot_platform( + hass, entity_registry, snapshot, load_config_entry[0].entry_id + ) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.device1_autolock", + }, + blocking=True, + ) + + state = hass.states.get("switch.device1_autolock") + assert state.state == STATE_OFF + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.device1_autolock", + }, + blocking=True, + ) + + state = hass.states.get("switch.device1_autolock") + assert state.state == STATE_ON diff --git a/tests/components/yalexs_ble/test_config_flow.py b/tests/components/yalexs_ble/test_config_flow.py index 15552fdec5f..c546e754239 100644 --- a/tests/components/yalexs_ble/test_config_flow.py +++ b/tests/components/yalexs_ble/test_config_flow.py @@ -513,14 +513,10 @@ async def test_integration_discovery_takes_precedence_over_bluetooth( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - flows = [ - flow - for flow in hass.config_entries.flow.async_progress() - if flow["handler"] == DOMAIN - ] + flows = list(hass.config_entries.flow._handler_progress_index[DOMAIN]) assert len(flows) == 1 - assert flows[0]["context"]["unique_id"] == YALE_ACCESS_LOCK_DISCOVERY_INFO.address - assert flows[0]["context"]["local_name"] == YALE_ACCESS_LOCK_DISCOVERY_INFO.name + assert flows[0].unique_id == YALE_ACCESS_LOCK_DISCOVERY_INFO.address + assert flows[0].local_name == YALE_ACCESS_LOCK_DISCOVERY_INFO.name with patch( "homeassistant.components.yalexs_ble.util.async_discovered_service_info", @@ -728,14 +724,10 @@ async def test_integration_discovery_takes_precedence_over_bluetooth_uuid_addres assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - flows = [ - flow - for flow in hass.config_entries.flow.async_progress() - if flow["handler"] == DOMAIN - ] + flows = list(hass.config_entries.flow._handler_progress_index[DOMAIN]) assert len(flows) == 1 - assert flows[0]["context"]["unique_id"] == LOCK_DISCOVERY_INFO_UUID_ADDRESS.address - assert flows[0]["context"]["local_name"] == LOCK_DISCOVERY_INFO_UUID_ADDRESS.name + assert flows[0].unique_id == LOCK_DISCOVERY_INFO_UUID_ADDRESS.address + assert flows[0].local_name == LOCK_DISCOVERY_INFO_UUID_ADDRESS.name with patch( "homeassistant.components.yalexs_ble.util.async_discovered_service_info", @@ -808,14 +800,10 @@ async def test_integration_discovery_takes_precedence_over_bluetooth_non_unique_ assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - flows = [ - flow - for flow in hass.config_entries.flow.async_progress() - if flow["handler"] == DOMAIN - ] + flows = list(hass.config_entries.flow._handler_progress_index[DOMAIN]) assert len(flows) == 1 - assert flows[0]["context"]["unique_id"] == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.address - assert flows[0]["context"]["local_name"] == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.name + assert flows[0].unique_id == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.address + assert flows[0].local_name == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.name with patch( "homeassistant.components.yalexs_ble.util.async_discovered_service_info", @@ -945,11 +933,7 @@ async def test_reauth(hass: HomeAssistant) -> None: unique_id=YALE_ACCESS_LOCK_DISCOVERY_INFO.address, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_validate" diff --git a/tests/components/yeelight/test_config_flow.py b/tests/components/yeelight/test_config_flow.py index 4d788ba8258..1acb553af3d 100644 --- a/tests/components/yeelight/test_config_flow.py +++ b/tests/components/yeelight/test_config_flow.py @@ -7,7 +7,11 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp, ssdp, zeroconf -from homeassistant.components.yeelight.config_flow import MODEL_UNKNOWN, CannotConnect +from homeassistant.components.yeelight.config_flow import ( + MODEL_UNKNOWN, + CannotConnect, + YeelightConfigFlow, +) from homeassistant.components.yeelight.const import ( CONF_DETECTED_MODEL, CONF_MODE_MUSIC, @@ -503,10 +507,20 @@ async def test_discovered_by_homekit_and_dhcp(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] is None + real_is_matching = YeelightConfigFlow.is_matching + return_values = [] + + def is_matching(self, other_flow) -> bool: + return_values.append(real_is_matching(self, other_flow)) + return return_values[-1] + with ( _patch_discovery(), _patch_discovery_interval(), patch(f"{MODULE_CONFIG_FLOW}.AsyncBulb", return_value=mocked_bulb), + patch.object( + YeelightConfigFlow, "is_matching", wraps=is_matching, autospec=True + ), ): result2 = await hass.config_entries.flow.async_init( DOMAIN, @@ -518,6 +532,8 @@ async def test_discovered_by_homekit_and_dhcp(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" + # Ensure the is_matching method returned True + assert return_values == [True] with ( _patch_discovery(), diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index eba4d4fe284..56162d4d9d1 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -24,17 +24,17 @@ from yeelight.main import _MODEL_SPECS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, FLASH_LONG, FLASH_SHORT, SERVICE_TURN_OFF, SERVICE_TURN_ON, + ColorMode, LightEntityFeature, ) from homeassistant.components.yeelight.const import ( @@ -58,6 +58,7 @@ from homeassistant.components.yeelight.const import ( YEELIGHT_TEMPERATURE_TRANSACTION, ) from homeassistant.components.yeelight.light import ( + ATTR_KELVIN, ATTR_MINUTES, ATTR_MODE, EFFECT_CANDLE_FLICKER, @@ -106,7 +107,6 @@ from homeassistant.util.color import ( color_RGB_to_hs, color_RGB_to_xy, color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, ) from . import ( @@ -288,7 +288,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - # turn_on color_temp brightness = 100 - color_temp = 200 + color_temp = 5000 transition = 1 mocked_bulb.last_properties["power"] = "off" await hass.services.async_call( @@ -297,7 +297,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: brightness, - ATTR_COLOR_TEMP: color_temp, + ATTR_COLOR_TEMP_KELVIN: color_temp, ATTR_FLASH: FLASH_LONG, ATTR_EFFECT: EFFECT_STOP, ATTR_TRANSITION: transition, @@ -315,7 +315,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main ) mocked_bulb.async_set_color_temp.assert_called_once_with( - color_temperature_mired_to_kelvin(color_temp), + color_temp, duration=transition * 1000, light_type=LightType.Main, ) @@ -326,7 +326,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - # turn_on color_temp - flash short brightness = 100 - color_temp = 200 + color_temp = 5000 transition = 1 mocked_bulb.async_start_music.reset_mock() mocked_bulb.async_set_brightness.reset_mock() @@ -341,7 +341,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: brightness, - ATTR_COLOR_TEMP: color_temp, + ATTR_COLOR_TEMP_KELVIN: color_temp, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_STOP, ATTR_TRANSITION: transition, @@ -359,7 +359,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main ) mocked_bulb.async_set_color_temp.assert_called_once_with( - color_temperature_mired_to_kelvin(color_temp), + color_temp, duration=transition * 1000, light_type=LightType.Main, ) @@ -690,7 +690,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -706,7 +706,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -719,7 +719,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -931,9 +931,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -946,8 +944,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp", "hs", "rgb"], "hs_color": (26.812, 34.87), - "rgb_color": (255, 205, 166), - "xy_color": (0.421, 0.364), + "rgb_color": (255, 206, 166), + "xy_color": (0.42, 0.365), }, nightlight_entity_properties={ "supported_features": 0, @@ -959,12 +957,10 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "hs_color": (28.401, 100.0), - "rgb_color": (255, 120, 0), - "xy_color": (0.621, 0.367), + "rgb_color": (255, 121, 0), + "xy_color": (0.62, 0.368), "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -992,9 +988,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1028,9 +1022,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1065,9 +1057,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1102,9 +1092,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1138,9 +1126,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": model_specs["color_temp"]["min"], - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1173,12 +1159,8 @@ async def test_device_types( "effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST, "effect": None, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1191,8 +1173,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp"], "hs_color": (26.812, 34.87), - "rgb_color": (255, 205, 166), - "xy_color": (0.421, 0.364), + "rgb_color": (255, 206, 166), + "xy_color": (0.42, 0.365), }, nightlight_entity_properties={ "supported_features": 0, @@ -1204,12 +1186,8 @@ async def test_device_types( "effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST, "effect": None, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1217,17 +1195,15 @@ async def test_device_types( model_specs["color_temp"]["min"] ), "brightness": nl_br, - "color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), + "color_temp_kelvin": model_specs["color_temp"]["min"], "color_temp": color_temperature_kelvin_to_mired( model_specs["color_temp"]["min"] ), "color_mode": "color_temp", "supported_color_modes": ["color_temp"], - "hs_color": (28.391, 65.659), - "rgb_color": (255, 166, 87), - "xy_color": (0.526, 0.387), + "hs_color": (28.395, 65.723), + "rgb_color": (255, 167, 87), + "xy_color": (0.525, 0.388), }, ) @@ -1245,12 +1221,8 @@ async def test_device_types( "flowing": False, "night_light": True, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1263,8 +1235,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp"], "hs_color": (26.812, 34.87), - "rgb_color": (255, 205, 166), - "xy_color": (0.421, 0.364), + "rgb_color": (255, 206, 166), + "xy_color": (0.42, 0.365), }, nightlight_entity_properties={ "supported_features": 0, @@ -1279,12 +1251,8 @@ async def test_device_types( "flowing": False, "night_light": True, "supported_features": SUPPORT_YEELIGHT, - "min_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) - ), + "min_color_temp_kelvin": model_specs["color_temp"]["min"], + "max_color_temp_kelvin": model_specs["color_temp"]["max"], "min_mireds": color_temperature_kelvin_to_mired( model_specs["color_temp"]["max"] ), @@ -1292,17 +1260,15 @@ async def test_device_types( model_specs["color_temp"]["min"] ), "brightness": nl_br, - "color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(model_specs["color_temp"]["min"]) - ), + "color_temp_kelvin": model_specs["color_temp"]["min"], "color_temp": color_temperature_kelvin_to_mired( model_specs["color_temp"]["min"] ), "color_mode": "color_temp", "supported_color_modes": ["color_temp"], - "hs_color": (28.391, 65.659), - "rgb_color": (255, 166, 87), - "xy_color": (0.526, 0.387), + "hs_color": (28.395, 65.723), + "rgb_color": (255, 167, 87), + "xy_color": (0.525, 0.388), }, ) # Background light - color mode CT @@ -1315,19 +1281,21 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": 1700, - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(6500) - ), + "max_color_temp_kelvin": 6500, "min_mireds": color_temperature_kelvin_to_mired(6500), "max_mireds": color_temperature_kelvin_to_mired(1700), "brightness": bg_bright, "color_temp_kelvin": bg_ct, "color_temp": bg_ct_kelvin, "color_mode": "color_temp", - "supported_color_modes": ["color_temp", "hs", "rgb"], + "supported_color_modes": [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ], "hs_color": (27.001, 19.243), - "rgb_color": (255, 228, 205), - "xy_color": (0.372, 0.35), + "rgb_color": (255, 228, 206), + "xy_color": (0.371, 0.349), }, name=f"{UNIQUE_FRIENDLY_NAME} Ambilight", entity_id=f"{ENTITY_LIGHT}_ambilight", @@ -1343,9 +1311,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": 1700, - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(6500) - ), + "max_color_temp_kelvin": 6500, "min_mireds": color_temperature_kelvin_to_mired(6500), "max_mireds": color_temperature_kelvin_to_mired(1700), "brightness": bg_bright, @@ -1355,7 +1321,11 @@ async def test_device_types( "color_temp": None, "color_temp_kelvin": None, "color_mode": "hs", - "supported_color_modes": ["color_temp", "hs", "rgb"], + "supported_color_modes": [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ], }, name=f"{UNIQUE_FRIENDLY_NAME} Ambilight", entity_id=f"{ENTITY_LIGHT}_ambilight", @@ -1371,9 +1341,7 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "min_color_temp_kelvin": 1700, - "max_color_temp_kelvin": color_temperature_mired_to_kelvin( - color_temperature_kelvin_to_mired(6500) - ), + "max_color_temp_kelvin": 6500, "min_mireds": color_temperature_kelvin_to_mired(6500), "max_mireds": color_temperature_kelvin_to_mired(1700), "brightness": bg_bright, @@ -1383,7 +1351,11 @@ async def test_device_types( "color_temp": None, "color_temp_kelvin": None, "color_mode": "rgb", - "supported_color_modes": ["color_temp", "hs", "rgb"], + "supported_color_modes": [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ], }, name=f"{UNIQUE_FRIENDLY_NAME} Ambilight", entity_id=f"{ENTITY_LIGHT}_ambilight", diff --git a/tests/components/yolink/test_config_flow.py b/tests/components/yolink/test_config_flow.py index d7ba09e4269..1dd71368d73 100644 --- a/tests/components/yolink/test_config_flow.py +++ b/tests/components/yolink/test_config_flow.py @@ -172,15 +172,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/yolink/test_device_trigger.py b/tests/components/yolink/test_device_trigger.py index 6b48b32fd62..c1d3a8acda8 100644 --- a/tests/components/yolink/test_device_trigger.py +++ b/tests/components/yolink/test_device_trigger.py @@ -6,6 +6,7 @@ from yolink.const import ATTR_DEVICE_DIMMER, ATTR_DEVICE_SMART_REMOTER from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.yolink import DOMAIN, YOLINK_EVENT +from homeassistant.components.yolink.const import DEV_MODEL_FLEX_FOB_YS3604_UC from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -23,6 +24,7 @@ async def test_get_triggers( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_SMART_REMOTER, + model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) expected_triggers = [ @@ -99,6 +101,7 @@ async def test_get_triggers_exception( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_DIMMER, + model_id=None, ) expected_triggers = [] @@ -123,6 +126,7 @@ async def test_if_fires_on_event( connections={connection}, identifiers={(DOMAIN, mac_address)}, model=ATTR_DEVICE_SMART_REMOTER, + model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) assert await async_setup_component( diff --git a/tests/components/youtube/snapshots/test_diagnostics.ambr b/tests/components/youtube/snapshots/test_diagnostics.ambr index a938cb8daad..50dc2757e8c 100644 --- a/tests/components/youtube/snapshots/test_diagnostics.ambr +++ b/tests/components/youtube/snapshots/test_diagnostics.ambr @@ -12,6 +12,7 @@ }), 'subscriber_count': 2290000, 'title': 'Google for Developers', + 'total_views': 214141263, }), }) # --- diff --git a/tests/components/youtube/snapshots/test_sensor.ambr b/tests/components/youtube/snapshots/test_sensor.ambr index cddfa6f6a3d..f4549e89c8c 100644 --- a/tests/components/youtube/snapshots/test_sensor.ambr +++ b/tests/components/youtube/snapshots/test_sensor.ambr @@ -4,7 +4,7 @@ 'attributes': ReadOnlyDict({ 'entity_picture': 'https://i.ytimg.com/vi/wysukDrMdqU/maxresdefault.jpg', 'friendly_name': 'Google for Developers Latest upload', - 'published_at': datetime.datetime(2023, 5, 11, 0, 20, 46, tzinfo=datetime.timezone.utc), + 'published_at': datetime.datetime(2023, 5, 11, 0, 20, 46, tzinfo=TzInfo(UTC)), 'video_id': 'wysukDrMdqU', }), 'context': , @@ -30,6 +30,21 @@ 'state': '2290000', }) # --- +# name: test_sensor.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://yt3.ggpht.com/fca_HuJ99xUxflWdex0XViC3NfctBFreIl8y4i9z411asnGTWY-Ql3MeH_ybA4kNaOjY7kyA=s800-c-k-c0x00ffffff-no-rj', + 'friendly_name': 'Google for Developers Views', + 'unit_of_measurement': 'views', + }), + 'context': , + 'entity_id': 'sensor.google_for_developers_views', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '214141263', + }) +# --- # name: test_sensor_without_uploaded_video StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -58,3 +73,18 @@ 'state': '2290000', }) # --- +# name: test_sensor_without_uploaded_video.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://yt3.ggpht.com/fca_HuJ99xUxflWdex0XViC3NfctBFreIl8y4i9z411asnGTWY-Ql3MeH_ybA4kNaOjY7kyA=s800-c-k-c0x00ffffff-no-rj', + 'friendly_name': 'Google for Developers Views', + 'unit_of_measurement': 'views', + }), + 'context': , + 'entity_id': 'sensor.google_for_developers_views', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '214141263', + }) +# --- diff --git a/tests/components/youtube/test_sensor.py b/tests/components/youtube/test_sensor.py index ae0c38306e4..e883347c8db 100644 --- a/tests/components/youtube/test_sensor.py +++ b/tests/components/youtube/test_sensor.py @@ -29,6 +29,9 @@ async def test_sensor( state = hass.states.get("sensor.google_for_developers_subscribers") assert state == snapshot + state = hass.states.get("sensor.google_for_developers_views") + assert state == snapshot + async def test_sensor_without_uploaded_video( hass: HomeAssistant, snapshot: SnapshotAssertion, setup_integration: ComponentSetup @@ -52,6 +55,9 @@ async def test_sensor_without_uploaded_video( state = hass.states.get("sensor.google_for_developers_subscribers") assert state == snapshot + state = hass.states.get("sensor.google_for_developers_views") + assert state == snapshot + async def test_sensor_updating( hass: HomeAssistant, setup_integration: ComponentSetup @@ -95,6 +101,9 @@ async def test_sensor_reauth_trigger( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "2290000" + state = hass.states.get("sensor.google_for_developers_views") + assert state.state == "214141263" + mock.set_thrown_exception(UnauthorizedError()) future = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, future) @@ -121,6 +130,9 @@ async def test_sensor_unavailable( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "2290000" + state = hass.states.get("sensor.google_for_developers_views") + assert state.state == "214141263" + mock.set_thrown_exception(YouTubeBackendError()) future = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, future) @@ -131,3 +143,6 @@ async def test_sensor_unavailable( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "unavailable" + + state = hass.states.get("sensor.google_for_developers_views") + assert state.state == "unavailable" diff --git a/tests/components/zeroconf/test_init.py b/tests/components/zeroconf/test_init.py index 0a552f37aa9..be78964f231 100644 --- a/tests/components/zeroconf/test_init.py +++ b/tests/components/zeroconf/test_init.py @@ -12,6 +12,7 @@ from zeroconf import ( ) from zeroconf.asyncio import AsyncServiceInfo +from homeassistant import config_entries from homeassistant.components import zeroconf from homeassistant.const import ( EVENT_COMPONENT_LOADED, @@ -22,8 +23,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.generated import zeroconf as zc_gen +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.setup import ATTR_COMPONENT, async_setup_component +from tests.common import MockConfigEntry, MockModule, mock_integration + NON_UTF8_VALUE = b"ABCDEF\x8a" NON_ASCII_KEY = b"non-ascii-key\x8a" PROPERTIES = { @@ -303,7 +307,14 @@ async def test_zeroconf_match_macaddress(hass: HomeAssistant) -> None: assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "shelly" - assert mock_config_flow.mock_calls[0][2]["context"] == {"source": "zeroconf"} + assert mock_config_flow.mock_calls[0][2]["context"] == { + "discovery_key": DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + "source": "zeroconf", + } @pytest.mark.usefixtures("mock_async_zeroconf") @@ -542,6 +553,11 @@ async def test_homekit_match_partial_space(hass: HomeAssistant) -> None: assert mock_config_flow.mock_calls[1][2]["context"] == { "source": "zeroconf", "alternative_domain": "lifx", + "discovery_key": DiscoveryKey( + domain="zeroconf", + key=("_hap._tcp.local.", "_name._hap._tcp.local."), + version=1, + ), } @@ -1381,3 +1397,261 @@ async def test_zeroconf_removed(hass: HomeAssistant) -> None: assert len(mock_service_browser.mock_calls) == 1 assert len(mock_async_progress_by_init_data_type.mock_calls) == 1 assert mock_async_abort.mock_calls[0][1][0] == "mock_flow_id" + + +@pytest.mark.usefixtures("mock_async_zeroconf") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + ), + [ + # Matching discovery key + ( + "shelly", + { + "zeroconf": ( + DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + ) + }, + ), + # Matching discovery key + ( + "shelly", + { + "zeroconf": ( + DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + ), + "other": ( + DiscoveryKey( + domain="other", + key="blah", + version=1, + ), + ), + }, + ), + # Matching discovery key, other domain + # Note: Rediscovery is not currently restricted to the domain of the removed + # entry. Such a check can be added if needed. + ( + "comp", + { + "zeroconf": ( + DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + ) + }, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_USER, + config_entries.SOURCE_ZEROCONF, + ], +) +async def test_zeroconf_rediscover( + hass: HomeAssistant, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, +) -> None: + """Test we reinitiate flows when an ignored config entry is removed.""" + + def http_only_service_update_mock(zeroconf, services, handlers): + """Call service update handler.""" + handlers[0]( + zeroconf, + "_http._tcp.local.", + "Shelly108._http._tcp.local.", + ServiceStateChange.Added, + ) + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id="mock-unique-id", + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + with ( + patch.dict( + zc_gen.ZEROCONF, + { + "_http._tcp.local.": [ + { + "domain": "shelly", + "name": "shelly*", + "properties": {"macaddress": "ffaadd*"}, + } + ] + }, + clear=True, + ), + patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, + patch.object( + zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock + ) as mock_service_browser, + patch( + "homeassistant.components.zeroconf.AsyncServiceInfo", + side_effect=get_zeroconf_info_mock("FFAADDCC11DD"), + ), + ): + assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + expected_context = { + "discovery_key": DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + "source": "zeroconf", + } + assert len(mock_service_browser.mock_calls) == 1 + assert len(mock_config_flow.mock_calls) == 1 + assert mock_config_flow.mock_calls[0][1][0] == "shelly" + assert mock_config_flow.mock_calls[0][2]["context"] == expected_context + + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_service_browser.mock_calls) == 1 + assert len(mock_config_flow.mock_calls) == 2 + assert mock_config_flow.mock_calls[1][1][0] == "shelly" + assert mock_config_flow.mock_calls[1][2]["context"] == expected_context + + +@pytest.mark.usefixtures("mock_async_zeroconf") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + "entry_source", + "entry_unique_id", + ), + [ + # Discovery key from other domain + ( + "shelly", + { + "bluetooth": ( + DiscoveryKey( + domain="bluetooth", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + ) + }, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + # Discovery key from the future + ( + "shelly", + { + "zeroconf": ( + DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=2, + ), + ) + }, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + ], +) +async def test_zeroconf_rediscover_no_match( + hass: HomeAssistant, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + entry_unique_id: str, +) -> None: + """Test we don't reinitiate flows when a non matching config entry is removed.""" + + def http_only_service_update_mock(zeroconf, services, handlers): + """Call service update handler.""" + handlers[0]( + zeroconf, + "_http._tcp.local.", + "Shelly108._http._tcp.local.", + ServiceStateChange.Added, + ) + + hass.config.components.add(entry_domain) + mock_integration(hass, MockModule(entry_domain)) + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + with ( + patch.dict( + zc_gen.ZEROCONF, + { + "_http._tcp.local.": [ + { + "domain": "shelly", + "name": "shelly*", + "properties": {"macaddress": "ffaadd*"}, + } + ] + }, + clear=True, + ), + patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, + patch.object( + zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock + ) as mock_service_browser, + patch( + "homeassistant.components.zeroconf.AsyncServiceInfo", + side_effect=get_zeroconf_info_mock("FFAADDCC11DD"), + ), + ): + assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + expected_context = { + "discovery_key": DiscoveryKey( + domain="zeroconf", + key=("_http._tcp.local.", "Shelly108._http._tcp.local."), + version=1, + ), + "source": "zeroconf", + } + assert len(mock_service_browser.mock_calls) == 1 + assert len(mock_config_flow.mock_calls) == 1 + assert mock_config_flow.mock_calls[0][1][0] == "shelly" + assert mock_config_flow.mock_calls[0][2]["context"] == expected_context + + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_service_browser.mock_calls) == 1 + assert len(mock_config_flow.mock_calls) == 1 diff --git a/tests/components/zerproc/test_light.py b/tests/components/zerproc/test_light.py index 6e00cfbde4c..724414b5965 100644 --- a/tests/components/zerproc/test_light.py +++ b/tests/components/zerproc/test_light.py @@ -215,7 +215,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(19, 17, 25) + mock_set_color.assert_called_with(20, 17, 25) with patch.object(mock_light, "set_color") as mock_set_color: await hass.services.async_call( @@ -226,7 +226,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(220, 201, 110) + mock_set_color.assert_called_with(220, 202, 110) with patch.object( mock_light, @@ -246,7 +246,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(75, 68, 37) + mock_set_color.assert_called_with(75, 69, 38) with patch.object(mock_light, "set_color") as mock_set_color: await hass.services.async_call( @@ -261,7 +261,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(162, 200, 50) + mock_set_color.assert_called_with(163, 200, 50) async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: @@ -352,6 +352,6 @@ async def test_light_update(hass: HomeAssistant, mock_light) -> None: ATTR_COLOR_MODE: ColorMode.HS, ATTR_BRIGHTNESS: 220, ATTR_HS_COLOR: (261.429, 31.818), - ATTR_RGB_COLOR: (202, 173, 255), - ATTR_XY_COLOR: (0.291, 0.232), + ATTR_RGB_COLOR: (203, 174, 255), + ATTR_XY_COLOR: (0.292, 0.234), } diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index a9f4c51d75d..1b280ea499a 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -8,6 +8,7 @@ from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import warnings import pytest +import zhaquirks import zigpy from zigpy.application import ControllerApplication import zigpy.backups @@ -38,7 +39,7 @@ FIXTURE_GRP_NAME = "fixture group" COUNTER_NAMES = ["counter_1", "counter_2", "counter_3"] -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(scope="package", autouse=True) def globally_load_quirks(): """Load quirks automatically so that ZHA tests run deterministically in isolation. @@ -47,8 +48,6 @@ def globally_load_quirks(): run. """ - import zhaquirks # pylint: disable=import-outside-toplevel - zhaquirks.setup() diff --git a/tests/components/zha/data.py b/tests/components/zha/data.py index eb135c7e8fe..80a3df524cd 100644 --- a/tests/components/zha/data.py +++ b/tests/components/zha/data.py @@ -23,12 +23,6 @@ BASE_CUSTOM_CONFIGURATION = { "required": True, "default": True, }, - { - "type": "boolean", - "name": "always_prefer_xy_color_mode", - "required": True, - "default": True, - }, { "type": "boolean", "name": "group_members_assume_state", @@ -55,6 +49,12 @@ BASE_CUSTOM_CONFIGURATION = { "optional": True, "default": 21600, }, + { + "default": True, + "name": "enable_mains_startup_polling", + "required": True, + "type": "boolean", + }, ] }, "data": { @@ -62,9 +62,9 @@ BASE_CUSTOM_CONFIGURATION = { "enhanced_light_transition": True, "default_light_transition": 0, "light_transitioning_flag": True, - "always_prefer_xy_color_mode": True, "group_members_assume_state": False, "enable_identify_on_join": True, + "enable_mains_startup_polling": True, "consider_unavailable_mains": 7200, "consider_unavailable_battery": 21600, } @@ -94,12 +94,6 @@ CONFIG_WITH_ALARM_OPTIONS = { "required": True, "default": True, }, - { - "type": "boolean", - "name": "always_prefer_xy_color_mode", - "required": True, - "default": True, - }, { "type": "boolean", "name": "group_members_assume_state", @@ -126,6 +120,12 @@ CONFIG_WITH_ALARM_OPTIONS = { "optional": True, "default": 21600, }, + { + "default": True, + "name": "enable_mains_startup_polling", + "required": True, + "type": "boolean", + }, ], "zha_alarm_options": [ { @@ -154,9 +154,9 @@ CONFIG_WITH_ALARM_OPTIONS = { "enhanced_light_transition": True, "default_light_transition": 0, "light_transitioning_flag": True, - "always_prefer_xy_color_mode": True, "group_members_assume_state": False, "enable_identify_on_join": True, + "enable_mains_startup_polling": True, "consider_unavailable_mains": 7200, "consider_unavailable_battery": 21600, }, diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index 67655aebc8c..08807f65d5d 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -93,6 +93,8 @@ 'radio_type': 'ezsp', }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'zha', 'minor_version': 1, 'options': dict({ @@ -111,6 +113,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 4, @@ -162,19 +166,19 @@ '0x0500': dict({ 'attributes': dict({ '0x0000': dict({ - 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0001': dict({ - 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0002': dict({ - 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0010': dict({ - 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': list([ 50, 79, @@ -187,15 +191,15 @@ ]), }), '0x0011': dict({ - 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0012': dict({ - 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", 'value': None, }), '0x0013': dict({ - 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", 'value': None, }), }), @@ -208,11 +212,11 @@ '0x0501': dict({ 'attributes': dict({ '0xfffd': dict({ - 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0xfffe': dict({ - 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", 'value': None, }), }), diff --git a/tests/components/zha/test_alarm_control_panel.py b/tests/components/zha/test_alarm_control_panel.py index 3473a9b00ad..609438cd725 100644 --- a/tests/components/zha/test_alarm_control_panel.py +++ b/tests/components/zha/test_alarm_control_panel.py @@ -8,22 +8,17 @@ from zigpy.zcl import Cluster from zigpy.zcl.clusters import security import zigpy.zcl.foundation as zcl_f -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, get_zha_gateway, get_zha_gateway_proxy, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from .common import find_entity_id @@ -79,7 +74,7 @@ async def test_alarm_control_panel( cluster = zigpy_device.endpoints[1].ias_ace assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED # arm_away from HA cluster.client_command.reset_mock() @@ -90,7 +85,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -113,7 +108,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY cluster.client_command.reset_mock() await hass.services.async_call( ALARM_DOMAIN, @@ -128,7 +123,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED assert cluster.client_command.call_count == 4 assert cluster.client_command.await_count == 4 assert cluster.client_command.call_args == call( @@ -151,7 +146,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -171,7 +166,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -190,7 +185,7 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_All_Zones, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -200,7 +195,7 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_Day_Home_Only, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -210,33 +205,33 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_Night_Sleep_Only, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT # disarm from panel with bad code cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT # disarm from panel with bad code for 2nd time trips alarm cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # disarm from panel with good code cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "4321", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED # panic from panel cluster.listener_event("cluster_command", 1, 4, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -244,7 +239,7 @@ async def test_alarm_control_panel( # fire from panel cluster.listener_event("cluster_command", 1, 3, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -252,7 +247,7 @@ async def test_alarm_control_panel( # emergency from panel cluster.listener_event("cluster_command", 1, 2, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -264,7 +259,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED assert cluster.client_command.call_count == 1 assert cluster.client_command.await_count == 1 assert cluster.client_command.call_args == call( @@ -290,7 +285,7 @@ async def reset_alarm_panel(hass: HomeAssistant, cluster: Cluster, entity_id: st blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index 419823b3b52..a9765a1b547 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -14,6 +14,7 @@ from homeassistant.components.zha.helpers import ( ) from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -37,6 +38,7 @@ def binary_sensor_platform_only(): async def test_binary_sensor( hass: HomeAssistant, + entity_registry: er.EntityRegistry, setup_zha, zigpy_device_mock, ) -> None: @@ -77,3 +79,20 @@ async def test_binary_sensor( hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} ) assert hass.states.get(entity_id).state == STATE_OFF + + # test enable / disable sync w/ ZHA library + entity_entry = entity_registry.async_get(entity_id) + entity_key = (Platform.BINARY_SENSOR, entity_entry.unique_id) + assert zha_device_proxy.device.platform_entities.get(entity_key).enabled + + entity_registry.async_update_entity( + entity_id=entity_id, disabled_by=er.RegistryEntryDisabler.USER + ) + await hass.async_block_till_done() + + assert not zha_device_proxy.device.platform_entities.get(entity_key).enabled + + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() + + assert zha_device_proxy.device.platform_entities.get(entity_key).enabled diff --git a/tests/components/zha/test_button.py b/tests/components/zha/test_button.py index 574805db5f6..33ed004312b 100644 --- a/tests/components/zha/test_button.py +++ b/tests/components/zha/test_button.py @@ -9,7 +9,11 @@ from zigpy.profiles import zha from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f -from homeassistant.components.button import DOMAIN, SERVICE_PRESS, ButtonDeviceClass +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + SERVICE_PRESS, + ButtonDeviceClass, +) from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, @@ -97,7 +101,7 @@ async def test_button( return_value=[0x00, zcl_f.Status.SUCCESS], ): await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True, diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index af6f2d9af0c..e0229ebe049 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -21,7 +21,7 @@ import zigpy.types from homeassistant import config_entries from homeassistant.components import ssdp, usb, zeroconf -from homeassistant.components.hassio import AddonState +from homeassistant.components.hassio import AddonError, AddonState from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL from homeassistant.components.zha import config_flow, radio_manager from homeassistant.components.zha.const import ( @@ -121,6 +121,13 @@ def backup(make_backup): return make_backup() +@pytest.fixture(autouse=True) +def mock_supervisor_client( + supervisor_client: AsyncMock, addon_store_info: AsyncMock +) -> None: + """Mock supervisor client.""" + + def mock_detect_radio_type( radio_type: RadioType = RadioType.ezsp, ret: ProbeResult = ProbeResult.RADIO_TYPE_DETECTED, @@ -147,104 +154,180 @@ def com_port(device="/dev/ttyUSB1234") -> ListPortInfo: return port +@pytest.mark.parametrize( + ("entry_name", "unique_id", "radio_type", "service_info"), + [ + ( + # TubesZB, old ESPHome devices (ZNP) + "tubeszb-cc2652-poe", + "tubeszb-cc2652-poe", + RadioType.znp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-cc2652-poe.local.", + name="tubeszb-cc2652-poe._esphomelib._tcp.local.", + port=6053, # the ESPHome API port is remapped to 6638 + type="_esphomelib._tcp.local.", + properties={ + "project_version": "3.0", + "project_name": "tubezb.cc2652-poe", + "network": "ethernet", + "board": "esp32-poe", + "platform": "ESP32", + "maс": "8c4b14c33c24", + "version": "2023.12.8", + }, + ), + ), + ( + # TubesZB, old ESPHome device (EFR32) + "tubeszb-efr32-poe", + "tubeszb-efr32-poe", + RadioType.ezsp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-efr32-poe.local.", + name="tubeszb-efr32-poe._esphomelib._tcp.local.", + port=6053, # the ESPHome API port is remapped to 6638 + type="_esphomelib._tcp.local.", + properties={ + "project_version": "3.0", + "project_name": "tubezb.efr32-poe", + "network": "ethernet", + "board": "esp32-poe", + "platform": "ESP32", + "maс": "8c4b14c33c24", + "version": "2023.12.8", + }, + ), + ), + ( + # TubesZB, newer devices + "TubeZB", + "tubeszb-cc2652-poe", + RadioType.znp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-cc2652-poe.local.", + name="tubeszb-cc2652-poe._tubeszb._tcp.local.", + port=6638, + properties={ + "name": "TubeZB", + "radio_type": "znp", + "version": "1.0", + "baud_rate": "115200", + "data_flow_control": "software", + }, + type="_tubeszb._tcp.local.", + ), + ), + ( + # Expected format for all new devices + "Some Zigbee Gateway (12345)", + "aabbccddeeff", + RadioType.znp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="some-zigbee-gateway-12345.local.", + name="Some Zigbee Gateway (12345)._zigbee-coordinator._tcp.local.", + port=6638, + properties={"radio_type": "znp", "serial_number": "aabbccddeeff"}, + type="_zigbee-coordinator._tcp.local.", + ), + ), + ], +) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_zeroconf_discovery_znp(hass: HomeAssistant) -> None: +@patch(f"bellows.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) +async def test_zeroconf_discovery( + entry_name: str, + unique_id: str, + radio_type: RadioType, + service_info: zeroconf.ZeroconfServiceInfo, + hass: HomeAssistant, +) -> None: """Test zeroconf flow -- radio detected.""" - service_info = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.200"), - ip_addresses=[ip_address("192.168.1.200")], - hostname="tube._tube_zb_gw._tcp.local.", - name="tube", - port=6053, - properties={"name": "tube_123456"}, - type="mock_type", - ) - flow = await hass.config_entries.flow.async_init( + result_init = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info ) - assert flow["step_id"] == "confirm" - - # Confirm discovery - result1 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} - ) - assert result1["step_id"] == "manual_port_config" + assert result_init["step_id"] == "confirm" # Confirm port settings - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], user_input={} + result_confirm = await hass.config_entries.flow.async_configure( + result_init["flow_id"], user_input={} ) - assert result2["type"] is FlowResultType.MENU - assert result2["step_id"] == "choose_formation_strategy" + assert result_confirm["type"] is FlowResultType.MENU + assert result_confirm["step_id"] == "choose_formation_strategy" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result_form = await hass.config_entries.flow.async_configure( + result_confirm["flow_id"], user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, ) await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "socket://192.168.1.200:6638" - assert result3["data"] == { + assert result_form["type"] is FlowResultType.CREATE_ENTRY + assert result_form["title"] == entry_name + assert result_form["context"]["unique_id"] == unique_id + assert result_form["data"] == { CONF_DEVICE: { CONF_BAUDRATE: 115200, CONF_FLOW_CONTROL: None, CONF_DEVICE_PATH: "socket://192.168.1.200:6638", }, - CONF_RADIO_TYPE: "znp", + CONF_RADIO_TYPE: radio_type.name, } @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_zigate.{PROBE_FUNCTION_PATH}") -async def test_zigate_via_zeroconf(setup_entry_mock, hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_zigate( + setup_entry_mock, hass: HomeAssistant +) -> None: """Test zeroconf flow -- zigate radio detected.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), ip_addresses=[ip_address("192.168.1.200")], - hostname="_zigate-zigbee-gateway._tcp.local.", - name="any", + hostname="_zigate-zigbee-gateway.local.", + name="some name._zigate-zigbee-gateway._tcp.local.", port=1234, - properties={"radio_type": "zigate"}, + properties={}, type="mock_type", ) - flow = await hass.config_entries.flow.async_init( + result_init = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info ) - assert flow["step_id"] == "confirm" - - # Confirm discovery - result1 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} - ) - assert result1["step_id"] == "manual_port_config" + assert result_init["step_id"] == "confirm" # Confirm the radio is deprecated - result2 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} + result_confirm_deprecated = await hass.config_entries.flow.async_configure( + result_init["flow_id"], user_input={} ) - assert result2["step_id"] == "verify_radio" - assert "ZiGate" in result2["description_placeholders"]["name"] + assert result_confirm_deprecated["step_id"] == "verify_radio" + assert "ZiGate" in result_confirm_deprecated["description_placeholders"]["name"] # Confirm port settings - result3 = await hass.config_entries.flow.async_configure( - result1["flow_id"], user_input={} + result_confirm = await hass.config_entries.flow.async_configure( + result_confirm_deprecated["flow_id"], user_input={} ) - assert result3["type"] is FlowResultType.MENU - assert result3["step_id"] == "choose_formation_strategy" + assert result_confirm["type"] is FlowResultType.MENU + assert result_confirm["step_id"] == "choose_formation_strategy" - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], + result_form = await hass.config_entries.flow.async_configure( + result_confirm["flow_id"], user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, ) await hass.async_block_till_done() - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "socket://192.168.1.200:1234" - assert result4["data"] == { + assert result_form["type"] is FlowResultType.CREATE_ENTRY + assert result_form["title"] == "some name" + assert result_form["data"] == { CONF_DEVICE: { CONF_DEVICE_PATH: "socket://192.168.1.200:1234", CONF_BAUDRATE: 115200, @@ -254,75 +337,50 @@ async def test_zigate_via_zeroconf(setup_entry_mock, hass: HomeAssistant) -> Non } -@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) -@patch(f"bellows.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_efr32_via_zeroconf(hass: HomeAssistant) -> None: - """Test zeroconf flow -- efr32 radio detected.""" +async def test_zeroconf_discovery_bad_payload(hass: HomeAssistant) -> None: + """Test zeroconf flow with a bad payload.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), ip_addresses=[ip_address("192.168.1.200")], - hostname="efr32._esphomelib._tcp.local.", - name="efr32", + hostname="some.hostname", + name="any", port=1234, - properties={}, - type="mock_type", + properties={"radio_type": "some bogus radio"}, + type="_zigbee-coordinator._tcp.local.", ) - flow = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info ) - assert flow["step_id"] == "confirm" - - # Confirm discovery - result1 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} - ) - assert result1["step_id"] == "manual_port_config" - - # Confirm port settings - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], user_input={} - ) - - assert result2["type"] is FlowResultType.MENU - assert result2["step_id"] == "choose_formation_strategy" - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "socket://192.168.1.200:1234" - assert result3["data"] == { - CONF_DEVICE: { - CONF_DEVICE_PATH: "socket://192.168.1.200:1234", - CONF_BAUDRATE: 115200, - CONF_FLOW_CONTROL: None, - }, - CONF_RADIO_TYPE: "ezsp", - } + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "invalid_zeroconf_data" @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_discovery_via_zeroconf_ip_change_ignored(hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_ip_change_ignored(hass: HomeAssistant) -> None: """Test zeroconf flow that was ignored gets updated.""" + entry = MockConfigEntry( domain=DOMAIN, - unique_id="tube_zb_gw_cc2652p2_poe", + unique_id="tubeszb-cc2652-poe", source=config_entries.SOURCE_IGNORE, ) entry.add_to_hass(hass) service_info = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.22"), - ip_addresses=[ip_address("192.168.1.22")], - hostname="tube_zb_gw_cc2652p2_poe.local.", - name="mock_name", - port=6053, - properties={"address": "tube_zb_gw_cc2652p2_poe.local"}, - type="mock_type", + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-cc2652-poe.local.", + name="tubeszb-cc2652-poe._tubeszb._tcp.local.", + port=6638, + properties={ + "name": "TubeZB", + "radio_type": "znp", + "version": "1.0", + "baud_rate": "115200", + "data_flow_control": "software", + }, + type="_tubeszb._tcp.local.", ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info @@ -331,11 +389,13 @@ async def test_discovery_via_zeroconf_ip_change_ignored(hass: HomeAssistant) -> assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" assert entry.data[CONF_DEVICE] == { - CONF_DEVICE_PATH: "socket://192.168.1.22:6638", + CONF_DEVICE_PATH: "socket://192.168.1.200:6638", } -async def test_discovery_confirm_final_abort_if_entries(hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_confirm_final_abort_if_entries( + hass: HomeAssistant, +) -> None: """Test discovery aborts if ZHA was set up after the confirmation dialog is shown.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), @@ -670,7 +730,7 @@ async def test_discovery_via_usb_zha_ignored_updates(hass: HomeAssistant) -> Non @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_discovery_already_setup(hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_already_setup(hass: HomeAssistant) -> None: """Test zeroconf flow -- radio detected.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), @@ -772,6 +832,7 @@ async def test_user_flow_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "choose_serial_port" +@pytest.mark.usefixtures("addon_not_installed") @patch("serial.tools.list_ports.comports", MagicMock(return_value=[])) async def test_user_flow_show_manual(hass: HomeAssistant) -> None: """Test user flow manual entry when no comport detected.""" @@ -1870,10 +1931,23 @@ async def test_config_flow_port_yellow_port_name(hass: HomeAssistant) -> None: ) +async def test_config_flow_ports_no_hassio(hass: HomeAssistant) -> None: + """Test config flow serial port name when this is not a hassio install.""" + + with ( + patch("homeassistant.components.zha.config_flow.is_hassio", return_value=False), + patch("serial.tools.list_ports.comports", MagicMock(return_value=[])), + ): + ports = await config_flow.list_serial_ports(hass) + + assert ports == [] + + async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> None: """Test config flow serial port name for multiprotocol add-on.""" with ( + patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True), patch( "homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info" ) as async_get_addon_info, @@ -1881,16 +1955,28 @@ async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> ): async_get_addon_info.return_value.state = AddonState.RUNNING async_get_addon_info.return_value.hostname = "core-silabs-multiprotocol" + ports = await config_flow.list_serial_ports(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={CONF_SOURCE: SOURCE_USER}, - ) + assert len(ports) == 1 + assert ports[0].description == "Multiprotocol add-on" + assert ports[0].manufacturer == "Nabu Casa" + assert ports[0].device == "socket://core-silabs-multiprotocol:9999" - assert ( - result["data_schema"].schema["path"].container[0] - == "socket://core-silabs-multiprotocol:9999 - Multiprotocol add-on - Nabu Casa" - ) + +async def test_config_flow_port_no_multiprotocol(hass: HomeAssistant) -> None: + """Test config flow serial port listing when addon info fails to load.""" + + with ( + patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True), + patch( + "homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info", + side_effect=AddonError, + ), + patch("serial.tools.list_ports.comports", MagicMock(return_value=[])), + ): + ports = await config_flow.list_serial_ports(hass) + + assert ports == [] @patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()])) diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index afef2aab70f..e5d588aa1bf 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -20,6 +20,7 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, + CoverState, ) from homeassistant.components.zha.helpers import ( ZHADeviceProxy, @@ -27,13 +28,7 @@ from homeassistant.components.zha.helpers import ( get_zha_gateway, get_zha_gateway_proxy, ) -from homeassistant.const import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_component import async_update_entity @@ -118,7 +113,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: await async_update_entity(hass, entity_id) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 58 @@ -126,25 +121,25 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED # test to see if it opens await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # test that the state remains after tilting to 100% await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # test to see the state remains after tilting to 0% await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # close from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): @@ -157,13 +152,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.down_close.name assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -182,13 +177,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 100 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED # open from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): @@ -201,13 +196,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.up_open.name assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -226,13 +221,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 0 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # set position UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): @@ -252,19 +247,19 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 53 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -283,19 +278,19 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 53 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # stop from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]): @@ -358,11 +353,11 @@ async def test_cover_failures( # test that the state has changed from unavailable to closed await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED # test to see if it opens await send_attributes_report(hass, cluster, {0: 1, 8: 0, 1: 100}) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # close from UI with patch( diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index ed3f83c0c36..0e78a9a1b5b 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -69,10 +69,11 @@ async def test_diagnostics_for_config_entry( scan = {c: c for c in range(11, 26 + 1)} - with patch.object(gateway.application_controller, "energy_scan", return_value=scan): - diagnostics_data = await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) + gateway.application_controller.energy_scan.side_effect = None + gateway.application_controller.energy_scan.return_value = scan + diagnostics_data = await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) assert diagnostics_data == snapshot( exclude=props("created_at", "modified_at", "entry_id", "versions") diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index 13c03c17cf7..f8a809df51e 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -5,16 +5,23 @@ from typing import Any import pytest import voluptuous_serialize +from zigpy.application import ControllerApplication from zigpy.types.basic import uint16_t from zigpy.zcl.clusters import lighting +import homeassistant.components.zha.const as zha_const from homeassistant.components.zha.helpers import ( cluster_command_schema_to_vol_schema, convert_to_zcl_values, + create_zha_config, exclude_none_values, + get_zha_data, ) from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry _LOGGER = logging.getLogger(__name__) @@ -60,16 +67,14 @@ async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: "required": True, }, { - "type": "integer", - "valueMin": 0, - "valueMax": 255, + "type": "multi_select", + "options": ["Execute if off present"], "name": "options_mask", "optional": True, }, { - "type": "integer", - "valueMin": 0, - "valueMax": 255, + "type": "multi_select", + "options": ["Execute if off"], "name": "options_override", "optional": True, }, @@ -177,5 +182,37 @@ def test_exclude_none_values( result = exclude_none_values(obj) assert result == expected_output - for key in expected_output: - assert expected_output[key] == obj[key] + for key, value in expected_output.items(): + assert value == obj[key] + + +async def test_create_zha_config_remove_unused( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_zigpy_connect: ControllerApplication, +) -> None: + """Test creating ZHA config data with unused keys.""" + config_entry.add_to_hass(hass) + + options = config_entry.options.copy() + options["custom_configuration"]["zha_options"]["some_random_key"] = "a value" + + hass.config_entries.async_update_entry(config_entry, options=options) + + assert ( + config_entry.options["custom_configuration"]["zha_options"]["some_random_key"] + == "a value" + ) + + status = await async_setup_component( + hass, + zha_const.DOMAIN, + {zha_const.DOMAIN: {zha_const.CONF_ENABLE_QUIRKS: False}}, + ) + assert status is True + await hass.async_block_till_done() + + ha_zha_data = get_zha_data(hass) + + # Does not error out + create_zha_config(hass, ha_zha_data) diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index 00fc3afd0ea..887284919da 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -252,7 +252,7 @@ async def test_zha_retry_unique_ids( ) as mock_connect: with patch( "homeassistant.config_entries.async_call_later", - lambda hass, delay, action: async_call_later(hass, 0, action), + lambda hass, delay, action: async_call_later(hass, 0.01, action), ): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/zha/test_lock.py b/tests/components/zha/test_lock.py index 4e1d092af9b..dd4afb0ae14 100644 --- a/tests/components/zha/test_lock.py +++ b/tests/components/zha/test_lock.py @@ -8,14 +8,14 @@ from zigpy.zcl import Cluster from zigpy.zcl.clusters import closures, general import zigpy.zcl.foundation as zcl_f -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, get_zha_gateway, get_zha_gateway_proxy, ) -from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from .common import find_entity_id, send_attributes_report @@ -65,7 +65,7 @@ async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: cluster = zigpy_device.endpoints[1].door_lock assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity_id).state == LockState.UNLOCKED # set state to locked await send_attributes_report( @@ -73,7 +73,7 @@ async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: cluster, {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Locked}, ) - assert hass.states.get(entity_id).state == STATE_LOCKED + assert hass.states.get(entity_id).state == LockState.LOCKED # set state to unlocked await send_attributes_report( @@ -81,7 +81,7 @@ async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: cluster, {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Unlocked}, ) - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity_id).state == LockState.UNLOCKED # lock from HA await async_lock(hass, cluster, entity_id) diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index 6a1a19b407f..c8cbc407106 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -1,16 +1,20 @@ """Test ZHA firmware updates.""" -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, PropertyMock, call, patch import pytest +from zha.application.platforms.update import ( + FirmwareUpdateEntity as ZhaFirmwareUpdateEntity, +) from zigpy.exceptions import DeliveryError -from zigpy.ota import OtaImageWithMetadata +from zigpy.ota import OtaImagesResult, OtaImageWithMetadata import zigpy.ota.image as firmware from zigpy.ota.providers import BaseOtaImageMetadata from zigpy.profiles import zha import zigpy.types as t from zigpy.zcl import foundation from zigpy.zcl.clusters import general +import zigpy.zdo.types as zdo_t from homeassistant.components.homeassistant import ( DOMAIN as HA_DOMAIN, @@ -20,6 +24,7 @@ from homeassistant.components.update import ( ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, ) @@ -29,6 +34,10 @@ from homeassistant.components.zha.helpers import ( get_zha_gateway, get_zha_gateway_proxy, ) +from homeassistant.components.zha.update import ( + OTA_MESSAGE_BATTERY_POWERED, + OTA_MESSAGE_RELIABILITY, +) from homeassistant.const import ( ATTR_ENTITY_ID, STATE_OFF, @@ -43,6 +52,8 @@ from homeassistant.setup import async_setup_component from .common import find_entity_id, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.typing import WebSocketGenerator + @pytest.fixture(autouse=True) def update_platform_only(): @@ -78,7 +89,26 @@ async def setup_test_data( SIG_EP_PROFILE: zha.PROFILE_ID, } }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + node_descriptor=zdo_t.NodeDescriptor( + logical_type=zdo_t.LogicalType.Router, + complex_descriptor_available=0, + user_descriptor_available=0, + reserved=0, + aps_flags=0, + frequency_band=zdo_t.NodeDescriptor.FrequencyBand.Freq2400MHz, + mac_capability_flags=( + zdo_t.NodeDescriptor.MACCapabilityFlags.FullFunctionDevice + | zdo_t.NodeDescriptor.MACCapabilityFlags.MainsPowered + | zdo_t.NodeDescriptor.MACCapabilityFlags.RxOnWhenIdle + | zdo_t.NodeDescriptor.MACCapabilityFlags.AllocateAddress + ), + manufacturer_code=4107, + maximum_buffer_size=82, + maximum_incoming_transfer_size=128, + server_mask=11264, + maximum_outgoing_transfer_size=128, + descriptor_capability_field=zdo_t.NodeDescriptor.DescriptorCapability.NONE, + ).serialize(), ) gateway.get_or_create_device(zigpy_device) @@ -119,8 +149,11 @@ async def setup_test_data( ), ) - cluster.endpoint.device.application.ota.get_ota_image = AsyncMock( - return_value=None if file_not_found else fw_image + cluster.endpoint.device.application.ota.get_ota_images = AsyncMock( + return_value=OtaImagesResult( + upgrades=() if file_not_found else (fw_image,), + downgrades=(), + ) ) zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) zha_device_proxy.device.async_update_sw_build_id(installed_fw_version) @@ -164,7 +197,8 @@ async def test_firmware_update_notification_from_zigpy( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) @@ -223,7 +257,8 @@ async def test_firmware_update_notification_from_service_call( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" @@ -264,7 +299,7 @@ async def test_firmware_update_success( ) -> None: """Test ZHA update platform - firmware update success.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device, ota_cluster, fw_image, installed_fw_version = await setup_test_data( hass, zigpy_device_mock ) @@ -276,7 +311,7 @@ async def test_firmware_update_success( assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification - await cluster._handle_query_next_image( + await ota_cluster._handle_query_next_image( foundation.ZCLHeader.cluster( tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id ), @@ -293,19 +328,20 @@ async def test_firmware_update_success( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) + async def endpoint_reply(cluster, sequence, data, **kwargs): + if cluster == general.Ota.cluster_id: + hdr, cmd = ota_cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -325,7 +361,7 @@ async def test_firmware_update_success( zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -352,7 +388,7 @@ async def test_firmware_update_success( zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -381,7 +417,8 @@ async def test_firmware_update_success( assert ( attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" ) - assert attrs[ATTR_IN_PROGRESS] == 58 + assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] == pytest.approx(100 * 40 / 70) assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" @@ -390,7 +427,7 @@ async def test_firmware_update_success( zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.upgrade_end.name, status=foundation.Status.SUCCESS, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -409,7 +446,7 @@ async def test_firmware_update_success( assert cmd.upgrade_time == 0 def read_new_fw_version(*args, **kwargs): - cluster.update_attribute( + ota_cluster.update_attribute( attrid=general.Ota.AttributeDefs.current_file_version.id, value=fw_image.firmware.header.file_version, ) @@ -419,9 +456,9 @@ async def test_firmware_update_success( ) }, {} - cluster.read_attributes.side_effect = read_new_fw_version + ota_cluster.read_attributes.side_effect = read_new_fw_version - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + ota_cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, @@ -438,7 +475,8 @@ async def test_firmware_update_success( attrs[ATTR_INSTALLED_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_LATEST_VERSION] == attrs[ATTR_INSTALLED_VERSION] # If we send a progress notification incorrectly, it won't be handled @@ -446,7 +484,8 @@ async def test_firmware_update_success( entity.entity_data.entity._update_progress(50, 100, 0.50) state = hass.states.get(entity_id) - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert state.state == STATE_OFF @@ -457,7 +496,7 @@ async def test_firmware_update_raises( ) -> None: """Test ZHA update platform - firmware update raises.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device, ota_cluster, fw_image, installed_fw_version = await setup_test_data( hass, zigpy_device_mock ) @@ -467,7 +506,7 @@ async def test_firmware_update_raises( assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification - await cluster._handle_query_next_image( + await ota_cluster._handle_query_next_image( foundation.ZCLHeader.cluster( tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id ), @@ -485,19 +524,20 @@ async def test_firmware_update_raises( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) + async def endpoint_reply(cluster, sequence, data, **kwargs): + if cluster == general.Ota.cluster_id: + hdr, cmd = ota_cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -516,7 +556,7 @@ async def test_firmware_update_raises( assert cmd.image_size == fw_image.firmware.header.image_size raise DeliveryError("failed to deliver") - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + ota_cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) with pytest.raises(HomeAssistantError): await hass.services.async_call( UPDATE_DOMAIN, @@ -544,81 +584,62 @@ async def test_firmware_update_raises( ) -async def test_firmware_update_no_longer_compatible( +async def test_update_release_notes( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, setup_zha, zigpy_device_mock, ) -> None: - """Test ZHA update platform - firmware update is no longer valid.""" + """Test ZHA update platform release notes.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - hass, zigpy_device_mock + zha_device, _, _, _ = await setup_test_data(hass, zigpy_device_mock) + + zha_lib_entity = next( + e + for e in zha_device.device.platform_entities.values() + if isinstance(e, ZhaFirmwareUpdateEntity) ) + zha_lib_entity._attr_release_notes = "Some lengthy release notes" + zha_lib_entity.maybe_emit_state_changed_event() + await hass.async_block_till_done() entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN + ws_client = await hass_ws_client(hass) - # simulate an image available notification - await cluster._handle_query_next_image( - foundation.ZCLHeader.cluster( - tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id - ), - general.QueryNextImageCommand( - fw_image.firmware.header.field_control, - zha_device.device.manufacturer_code, - fw_image.firmware.header.image_type, - installed_fw_version, - fw_image.firmware.header.header_version, - ), - ) - - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - attrs = state.attributes - assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] - assert ( - attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" - ) - - new_version = 0x99999999 - - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) - if isinstance(cmd, general.Ota.ImageNotifyCommand): - zha_device.device.device.packet_received( - make_packet( - zha_device.device.device, - cluster, - general.Ota.ServerCommandDefs.query_next_image.name, - field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, - manufacturer_code=fw_image.firmware.header.manufacturer_id, - image_type=fw_image.firmware.header.image_type, - # The device reports that it is no longer compatible! - current_file_version=new_version, - hardware_version=1, - ) - ) - - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, + # Mains-powered devices + with patch( + "zha.zigbee.device.Device.is_mains_powered", PropertyMock(return_value=True) + ): + await ws_client.send_json( { - ATTR_ENTITY_ID: entity_id, - }, - blocking=True, + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } ) - # We updated the currently installed firmware version, as it is no longer valid - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - attrs = state.attributes - assert attrs[ATTR_INSTALLED_VERSION] == f"0x{new_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] - assert attrs[ATTR_LATEST_VERSION] == f"0x{new_version:08x}" + result = await ws_client.receive_json() + assert result["success"] is True + assert "Some lengthy release notes" in result["result"] + assert OTA_MESSAGE_RELIABILITY in result["result"] + assert OTA_MESSAGE_BATTERY_POWERED not in result["result"] + + # Battery-powered devices + with patch( + "zha.zigbee.device.Device.is_mains_powered", PropertyMock(return_value=False) + ): + await ws_client.send_json( + { + "id": 2, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + + result = await ws_client.receive_json() + assert result["success"] is True + assert "Some lengthy release notes" in result["result"] + assert OTA_MESSAGE_RELIABILITY in result["result"] + assert OTA_MESSAGE_BATTERY_POWERED in result["result"] diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index a6bbe554f9a..37b1dde7316 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -3,36 +3,43 @@ import asyncio import copy import io -import json -from unittest.mock import DEFAULT, AsyncMock, patch +from typing import Any, cast +from unittest.mock import DEFAULT, AsyncMock, MagicMock, patch import pytest from zwave_js_server.event import Event from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node +from zwave_js_server.model.node.data_model import NodeDataType from zwave_js_server.version import VersionInfo +from homeassistant.components.zwave_js.const import DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType -from tests.common import MockConfigEntry, load_fixture +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) # State fixtures @pytest.fixture(name="controller_state", scope="package") -def controller_state_fixture(): +def controller_state_fixture() -> dict[str, Any]: """Load the controller state fixture data.""" - return json.loads(load_fixture("zwave_js/controller_state.json")) + return load_json_object_fixture("controller_state.json", DOMAIN) @pytest.fixture(name="controller_node_state", scope="package") -def controller_node_state_fixture(): +def controller_node_state_fixture() -> dict[str, Any]: """Load the controller node state fixture data.""" - return json.loads(load_fixture("zwave_js/controller_node_state.json")) + return load_json_object_fixture("controller_node_state.json", DOMAIN) @pytest.fixture(name="version_state", scope="package") -def version_state_fixture(): +def version_state_fixture() -> dict[str, Any]: """Load the version state fixture data.""" return { "type": "version", @@ -43,7 +50,7 @@ def version_state_fixture(): @pytest.fixture(name="log_config_state") -def log_config_state_fixture(): +def log_config_state_fixture() -> dict[str, Any]: """Return log config state fixture data.""" return { "enabled": True, @@ -55,70 +62,70 @@ def log_config_state_fixture(): @pytest.fixture(name="config_entry_diagnostics", scope="package") -def config_entry_diagnostics_fixture(): +def config_entry_diagnostics_fixture() -> JsonArrayType: """Load the config entry diagnostics fixture data.""" - return json.loads(load_fixture("zwave_js/config_entry_diagnostics.json")) + return load_json_array_fixture("config_entry_diagnostics.json", DOMAIN) @pytest.fixture(name="config_entry_diagnostics_redacted", scope="package") -def config_entry_diagnostics_redacted_fixture(): +def config_entry_diagnostics_redacted_fixture() -> dict[str, Any]: """Load the redacted config entry diagnostics fixture data.""" - return json.loads(load_fixture("zwave_js/config_entry_diagnostics_redacted.json")) + return load_json_object_fixture("config_entry_diagnostics_redacted.json", DOMAIN) @pytest.fixture(name="multisensor_6_state", scope="package") -def multisensor_6_state_fixture(): +def multisensor_6_state_fixture() -> dict[str, Any]: """Load the multisensor 6 node state fixture data.""" - return json.loads(load_fixture("zwave_js/multisensor_6_state.json")) + return load_json_object_fixture("multisensor_6_state.json", DOMAIN) @pytest.fixture(name="ecolink_door_sensor_state", scope="package") -def ecolink_door_sensor_state_fixture(): +def ecolink_door_sensor_state_fixture() -> dict[str, Any]: """Load the Ecolink Door/Window Sensor node state fixture data.""" - return json.loads(load_fixture("zwave_js/ecolink_door_sensor_state.json")) + return load_json_object_fixture("ecolink_door_sensor_state.json", DOMAIN) @pytest.fixture(name="hank_binary_switch_state", scope="package") -def binary_switch_state_fixture(): +def binary_switch_state_fixture() -> dict[str, Any]: """Load the hank binary switch node state fixture data.""" - return json.loads(load_fixture("zwave_js/hank_binary_switch_state.json")) + return load_json_object_fixture("hank_binary_switch_state.json", DOMAIN) @pytest.fixture(name="bulb_6_multi_color_state", scope="package") -def bulb_6_multi_color_state_fixture(): +def bulb_6_multi_color_state_fixture() -> dict[str, Any]: """Load the bulb 6 multi-color node state fixture data.""" - return json.loads(load_fixture("zwave_js/bulb_6_multi_color_state.json")) + return load_json_object_fixture("bulb_6_multi_color_state.json", DOMAIN) @pytest.fixture(name="light_color_null_values_state", scope="package") -def light_color_null_values_state_fixture(): +def light_color_null_values_state_fixture() -> dict[str, Any]: """Load the light color null values node state fixture data.""" - return json.loads(load_fixture("zwave_js/light_color_null_values_state.json")) + return load_json_object_fixture("light_color_null_values_state.json", DOMAIN) @pytest.fixture(name="eaton_rf9640_dimmer_state", scope="package") -def eaton_rf9640_dimmer_state_fixture(): +def eaton_rf9640_dimmer_state_fixture() -> dict[str, Any]: """Load the eaton rf9640 dimmer node state fixture data.""" - return json.loads(load_fixture("zwave_js/eaton_rf9640_dimmer_state.json")) + return load_json_object_fixture("eaton_rf9640_dimmer_state.json", DOMAIN) @pytest.fixture(name="lock_schlage_be469_state", scope="package") -def lock_schlage_be469_state_fixture(): +def lock_schlage_be469_state_fixture() -> dict[str, Any]: """Load the schlage lock node state fixture data.""" - return json.loads(load_fixture("zwave_js/lock_schlage_be469_state.json")) + return load_json_object_fixture("lock_schlage_be469_state.json", DOMAIN) @pytest.fixture(name="lock_august_asl03_state", scope="package") -def lock_august_asl03_state_fixture(): +def lock_august_asl03_state_fixture() -> dict[str, Any]: """Load the August Pro lock node state fixture data.""" - return json.loads(load_fixture("zwave_js/lock_august_asl03_state.json")) + return load_json_object_fixture("lock_august_asl03_state.json", DOMAIN) @pytest.fixture(name="climate_radio_thermostat_ct100_plus_state", scope="package") -def climate_radio_thermostat_ct100_plus_state_fixture(): +def climate_radio_thermostat_ct100_plus_state_fixture() -> dict[str, Any]: """Load the climate radio thermostat ct100 plus node state fixture data.""" - return json.loads( - load_fixture("zwave_js/climate_radio_thermostat_ct100_plus_state.json") + return load_json_object_fixture( + "climate_radio_thermostat_ct100_plus_state.json", DOMAIN ) @@ -126,217 +133,215 @@ def climate_radio_thermostat_ct100_plus_state_fixture(): name="climate_radio_thermostat_ct100_plus_different_endpoints_state", scope="package", ) -def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture(): +def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture() -> ( + dict[str, Any] +): """Load the thermostat fixture state with values on different endpoints. This device is a radio thermostat ct100. """ - return json.loads( - load_fixture( - "zwave_js/climate_radio_thermostat_ct100_plus_different_endpoints_state.json" - ) + return load_json_object_fixture( + "climate_radio_thermostat_ct100_plus_different_endpoints_state.json", DOMAIN ) @pytest.fixture(name="climate_adc_t3000_state", scope="package") -def climate_adc_t3000_state_fixture(): +def climate_adc_t3000_state_fixture() -> dict[str, Any]: """Load the climate ADC-T3000 node state fixture data.""" - return json.loads(load_fixture("zwave_js/climate_adc_t3000_state.json")) + return load_json_object_fixture("climate_adc_t3000_state.json", DOMAIN) @pytest.fixture(name="climate_airzone_aidoo_control_hvac_unit_state", scope="package") -def climate_airzone_aidoo_control_hvac_unit_state_fixture(): +def climate_airzone_aidoo_control_hvac_unit_state_fixture() -> dict[str, Any]: """Load the climate Airzone Aidoo Control HVAC Unit state fixture data.""" - return json.loads( - load_fixture("zwave_js/climate_airzone_aidoo_control_hvac_unit_state.json") + return load_json_object_fixture( + "climate_airzone_aidoo_control_hvac_unit_state.json", DOMAIN ) @pytest.fixture(name="climate_danfoss_lc_13_state", scope="package") -def climate_danfoss_lc_13_state_fixture(): +def climate_danfoss_lc_13_state_fixture() -> dict[str, Any]: """Load Danfoss (LC-13) electronic radiator thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/climate_danfoss_lc_13_state.json")) + return load_json_object_fixture("climate_danfoss_lc_13_state.json", DOMAIN) @pytest.fixture(name="climate_eurotronic_spirit_z_state", scope="package") -def climate_eurotronic_spirit_z_state_fixture(): +def climate_eurotronic_spirit_z_state_fixture() -> dict[str, Any]: """Load the climate Eurotronic Spirit Z thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/climate_eurotronic_spirit_z_state.json")) + return load_json_object_fixture("climate_eurotronic_spirit_z_state.json", DOMAIN) @pytest.fixture(name="climate_heatit_z_trm6_state", scope="package") -def climate_heatit_z_trm6_state_fixture(): +def climate_heatit_z_trm6_state_fixture() -> dict[str, Any]: """Load the climate HEATIT Z-TRM6 thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/climate_heatit_z_trm6_state.json")) + return load_json_object_fixture("climate_heatit_z_trm6_state.json", DOMAIN) @pytest.fixture(name="climate_heatit_z_trm3_state", scope="package") -def climate_heatit_z_trm3_state_fixture(): +def climate_heatit_z_trm3_state_fixture() -> dict[str, Any]: """Load the climate HEATIT Z-TRM3 thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/climate_heatit_z_trm3_state.json")) + return load_json_object_fixture("climate_heatit_z_trm3_state.json", DOMAIN) @pytest.fixture(name="climate_heatit_z_trm2fx_state", scope="package") -def climate_heatit_z_trm2fx_state_fixture(): +def climate_heatit_z_trm2fx_state_fixture() -> dict[str, Any]: """Load the climate HEATIT Z-TRM2fx thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/climate_heatit_z_trm2fx_state.json")) + return load_json_object_fixture("climate_heatit_z_trm2fx_state.json", DOMAIN) @pytest.fixture(name="climate_heatit_z_trm3_no_value_state", scope="package") -def climate_heatit_z_trm3_no_value_state_fixture(): +def climate_heatit_z_trm3_no_value_state_fixture() -> dict[str, Any]: """Load the climate HEATIT Z-TRM3 thermostat node w/no value state fixture data.""" - return json.loads( - load_fixture("zwave_js/climate_heatit_z_trm3_no_value_state.json") - ) + return load_json_object_fixture("climate_heatit_z_trm3_no_value_state.json", DOMAIN) @pytest.fixture(name="nortek_thermostat_state", scope="package") -def nortek_thermostat_state_fixture(): +def nortek_thermostat_state_fixture() -> dict[str, Any]: """Load the nortek thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/nortek_thermostat_state.json")) + return load_json_object_fixture("nortek_thermostat_state.json", DOMAIN) @pytest.fixture(name="srt321_hrt4_zw_state", scope="package") -def srt321_hrt4_zw_state_fixture(): +def srt321_hrt4_zw_state_fixture() -> dict[str, Any]: """Load the climate HRT4-ZW / SRT321 / SRT322 thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/srt321_hrt4_zw_state.json")) + return load_json_object_fixture("srt321_hrt4_zw_state.json", DOMAIN) @pytest.fixture(name="chain_actuator_zws12_state", scope="package") -def window_cover_state_fixture(): +def window_cover_state_fixture() -> dict[str, Any]: """Load the window cover node state fixture data.""" - return json.loads(load_fixture("zwave_js/chain_actuator_zws12_state.json")) + return load_json_object_fixture("chain_actuator_zws12_state.json", DOMAIN) @pytest.fixture(name="fan_generic_state", scope="package") -def fan_generic_state_fixture(): +def fan_generic_state_fixture() -> dict[str, Any]: """Load the fan node state fixture data.""" - return json.loads(load_fixture("zwave_js/fan_generic_state.json")) + return load_json_object_fixture("fan_generic_state.json", DOMAIN) @pytest.fixture(name="hs_fc200_state", scope="package") -def hs_fc200_state_fixture(): +def hs_fc200_state_fixture() -> dict[str, Any]: """Load the HS FC200+ node state fixture data.""" - return json.loads(load_fixture("zwave_js/fan_hs_fc200_state.json")) + return load_json_object_fixture("fan_hs_fc200_state.json", DOMAIN) @pytest.fixture(name="leviton_zw4sf_state", scope="package") -def leviton_zw4sf_state_fixture(): +def leviton_zw4sf_state_fixture() -> dict[str, Any]: """Load the Leviton ZW4SF node state fixture data.""" - return json.loads(load_fixture("zwave_js/leviton_zw4sf_state.json")) + return load_json_object_fixture("leviton_zw4sf_state.json", DOMAIN) @pytest.fixture(name="fan_honeywell_39358_state", scope="package") -def fan_honeywell_39358_state_fixture(): +def fan_honeywell_39358_state_fixture() -> dict[str, Any]: """Load the fan node state fixture data.""" - return json.loads(load_fixture("zwave_js/fan_honeywell_39358_state.json")) + return load_json_object_fixture("fan_honeywell_39358_state.json", DOMAIN) @pytest.fixture(name="gdc_zw062_state", scope="package") -def motorized_barrier_cover_state_fixture(): +def motorized_barrier_cover_state_fixture() -> dict[str, Any]: """Load the motorized barrier cover node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_zw062_state.json")) + return load_json_object_fixture("cover_zw062_state.json", DOMAIN) @pytest.fixture(name="iblinds_v2_state", scope="package") -def iblinds_v2_state_fixture(): +def iblinds_v2_state_fixture() -> dict[str, Any]: """Load the iBlinds v2 node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_iblinds_v2_state.json")) + return load_json_object_fixture("cover_iblinds_v2_state.json", DOMAIN) @pytest.fixture(name="iblinds_v3_state", scope="package") -def iblinds_v3_state_fixture(): +def iblinds_v3_state_fixture() -> dict[str, Any]: """Load the iBlinds v3 node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_iblinds_v3_state.json")) + return load_json_object_fixture("cover_iblinds_v3_state.json", DOMAIN) @pytest.fixture(name="zvidar_state", scope="package") -def zvidar_state_fixture(): +def zvidar_state_fixture() -> dict[str, Any]: """Load the ZVIDAR node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_zvidar_state.json")) + return load_json_object_fixture("cover_zvidar_state.json", DOMAIN) @pytest.fixture(name="qubino_shutter_state", scope="package") -def qubino_shutter_state_fixture(): +def qubino_shutter_state_fixture() -> dict[str, Any]: """Load the Qubino Shutter node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_qubino_shutter_state.json")) + return load_json_object_fixture("cover_qubino_shutter_state.json", DOMAIN) @pytest.fixture(name="aeotec_nano_shutter_state", scope="package") -def aeotec_nano_shutter_state_fixture(): +def aeotec_nano_shutter_state_fixture() -> dict[str, Any]: """Load the Aeotec Nano Shutter node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_aeotec_nano_shutter_state.json")) + return load_json_object_fixture("cover_aeotec_nano_shutter_state.json", DOMAIN) @pytest.fixture(name="fibaro_fgr222_shutter_state", scope="package") -def fibaro_fgr222_shutter_state_fixture(): +def fibaro_fgr222_shutter_state_fixture() -> dict[str, Any]: """Load the Fibaro FGR222 node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_fibaro_fgr222_state.json")) + return load_json_object_fixture("cover_fibaro_fgr222_state.json", DOMAIN) @pytest.fixture(name="fibaro_fgr223_shutter_state", scope="package") -def fibaro_fgr223_shutter_state_fixture(): +def fibaro_fgr223_shutter_state_fixture() -> dict[str, Any]: """Load the Fibaro FGR223 node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_fibaro_fgr223_state.json")) + return load_json_object_fixture("cover_fibaro_fgr223_state.json", DOMAIN) @pytest.fixture(name="shelly_europe_ltd_qnsh_001p10_state", scope="package") -def shelly_europe_ltd_qnsh_001p10_state_fixture(): +def shelly_europe_ltd_qnsh_001p10_state_fixture() -> dict[str, Any]: """Load the Shelly QNSH 001P10 node state fixture data.""" - return json.loads(load_fixture("zwave_js/shelly_europe_ltd_qnsh_001p10_state.json")) + return load_json_object_fixture("shelly_europe_ltd_qnsh_001p10_state.json", DOMAIN) @pytest.fixture(name="merten_507801_state", scope="package") -def merten_507801_state_fixture(): +def merten_507801_state_fixture() -> dict[str, Any]: """Load the Merten 507801 Shutter node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_merten_507801_state.json")) + return load_json_object_fixture("cover_merten_507801_state.json", DOMAIN) @pytest.fixture(name="aeon_smart_switch_6_state", scope="package") -def aeon_smart_switch_6_state_fixture(): +def aeon_smart_switch_6_state_fixture() -> dict[str, Any]: """Load the AEON Labs (ZW096) Smart Switch 6 node state fixture data.""" - return json.loads(load_fixture("zwave_js/aeon_smart_switch_6_state.json")) + return load_json_object_fixture("aeon_smart_switch_6_state.json", DOMAIN) @pytest.fixture(name="ge_12730_state", scope="package") -def ge_12730_state_fixture(): +def ge_12730_state_fixture() -> dict[str, Any]: """Load the GE 12730 node state fixture data.""" - return json.loads(load_fixture("zwave_js/fan_ge_12730_state.json")) + return load_json_object_fixture("fan_ge_12730_state.json", DOMAIN) @pytest.fixture(name="aeotec_radiator_thermostat_state", scope="package") -def aeotec_radiator_thermostat_state_fixture(): +def aeotec_radiator_thermostat_state_fixture() -> dict[str, Any]: """Load the Aeotec Radiator Thermostat node state fixture data.""" - return json.loads(load_fixture("zwave_js/aeotec_radiator_thermostat_state.json")) + return load_json_object_fixture("aeotec_radiator_thermostat_state.json", DOMAIN) @pytest.fixture(name="inovelli_lzw36_state", scope="package") -def inovelli_lzw36_state_fixture(): +def inovelli_lzw36_state_fixture() -> dict[str, Any]: """Load the Inovelli LZW36 node state fixture data.""" - return json.loads(load_fixture("zwave_js/inovelli_lzw36_state.json")) + return load_json_object_fixture("inovelli_lzw36_state.json", DOMAIN) @pytest.fixture(name="null_name_check_state", scope="package") -def null_name_check_state_fixture(): +def null_name_check_state_fixture() -> dict[str, Any]: """Load the null name check node state fixture data.""" - return json.loads(load_fixture("zwave_js/null_name_check_state.json")) + return load_json_object_fixture("null_name_check_state.json", DOMAIN) @pytest.fixture(name="lock_id_lock_as_id150_state", scope="package") -def lock_id_lock_as_id150_state_fixture(): +def lock_id_lock_as_id150_state_fixture() -> dict[str, Any]: """Load the id lock id-150 lock node state fixture data.""" - return json.loads(load_fixture("zwave_js/lock_id_lock_as_id150_state.json")) + return load_json_object_fixture("lock_id_lock_as_id150_state.json", DOMAIN) @pytest.fixture( name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package" ) -def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): +def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> ( + dict[str, Any] +): """Load the climate multiple temp units node state fixture data.""" - return json.loads( - load_fixture( - "zwave_js/climate_radio_thermostat_ct101_multiple_temp_units_state.json" - ) + return load_json_object_fixture( + "climate_radio_thermostat_ct101_multiple_temp_units_state.json", DOMAIN ) @@ -346,135 +351,160 @@ def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): ), scope="package", ) -def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state_fixture(): +def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state_fixture() -> ( + dict[str, Any] +): """Load climate device w/ mode+setpoint on diff endpoints node state fixture data.""" - return json.loads( - load_fixture( - "zwave_js/climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state.json" - ) + return load_json_object_fixture( + "climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state.json", + DOMAIN, ) @pytest.fixture(name="vision_security_zl7432_state", scope="package") -def vision_security_zl7432_state_fixture(): +def vision_security_zl7432_state_fixture() -> dict[str, Any]: """Load the vision security zl7432 switch node state fixture data.""" - return json.loads(load_fixture("zwave_js/vision_security_zl7432_state.json")) + return load_json_object_fixture("vision_security_zl7432_state.json", DOMAIN) @pytest.fixture(name="zen_31_state", scope="package") -def zem_31_state_fixture(): +def zem_31_state_fixture() -> dict[str, Any]: """Load the zen_31 node state fixture data.""" - return json.loads(load_fixture("zwave_js/zen_31_state.json")) + return load_json_object_fixture("zen_31_state.json", DOMAIN) @pytest.fixture(name="wallmote_central_scene_state", scope="package") -def wallmote_central_scene_state_fixture(): +def wallmote_central_scene_state_fixture() -> dict[str, Any]: """Load the wallmote central scene node state fixture data.""" - return json.loads(load_fixture("zwave_js/wallmote_central_scene_state.json")) + return load_json_object_fixture("wallmote_central_scene_state.json", DOMAIN) @pytest.fixture(name="ge_in_wall_dimmer_switch_state", scope="package") -def ge_in_wall_dimmer_switch_state_fixture(): +def ge_in_wall_dimmer_switch_state_fixture() -> dict[str, Any]: """Load the ge in-wall dimmer switch node state fixture data.""" - return json.loads(load_fixture("zwave_js/ge_in_wall_dimmer_switch_state.json")) + return load_json_object_fixture("ge_in_wall_dimmer_switch_state.json", DOMAIN) @pytest.fixture(name="aeotec_zw164_siren_state", scope="package") -def aeotec_zw164_siren_state_fixture(): +def aeotec_zw164_siren_state_fixture() -> dict[str, Any]: """Load the aeotec zw164 siren node state fixture data.""" - return json.loads(load_fixture("zwave_js/aeotec_zw164_siren_state.json")) + return load_json_object_fixture("aeotec_zw164_siren_state.json", DOMAIN) @pytest.fixture(name="lock_popp_electric_strike_lock_control_state", scope="package") -def lock_popp_electric_strike_lock_control_state_fixture(): +def lock_popp_electric_strike_lock_control_state_fixture() -> dict[str, Any]: """Load the popp electric strike lock control node state fixture data.""" - return json.loads( - load_fixture("zwave_js/lock_popp_electric_strike_lock_control_state.json") + return load_json_object_fixture( + "lock_popp_electric_strike_lock_control_state.json", DOMAIN ) @pytest.fixture(name="fortrezz_ssa1_siren_state", scope="package") -def fortrezz_ssa1_siren_state_fixture(): +def fortrezz_ssa1_siren_state_fixture() -> dict[str, Any]: """Load the fortrezz ssa1 siren node state fixture data.""" - return json.loads(load_fixture("zwave_js/fortrezz_ssa1_siren_state.json")) + return load_json_object_fixture("fortrezz_ssa1_siren_state.json", DOMAIN) @pytest.fixture(name="fortrezz_ssa3_siren_state", scope="package") -def fortrezz_ssa3_siren_state_fixture(): +def fortrezz_ssa3_siren_state_fixture() -> dict[str, Any]: """Load the fortrezz ssa3 siren node state fixture data.""" - return json.loads(load_fixture("zwave_js/fortrezz_ssa3_siren_state.json")) + return load_json_object_fixture("fortrezz_ssa3_siren_state.json", DOMAIN) @pytest.fixture(name="zp3111_not_ready_state", scope="package") -def zp3111_not_ready_state_fixture(): +def zp3111_not_ready_state_fixture() -> dict[str, Any]: """Load the zp3111 4-in-1 sensor not-ready node state fixture data.""" - return json.loads(load_fixture("zwave_js/zp3111-5_not_ready_state.json")) + return load_json_object_fixture("zp3111-5_not_ready_state.json", DOMAIN) @pytest.fixture(name="zp3111_state", scope="package") -def zp3111_state_fixture(): +def zp3111_state_fixture() -> dict[str, Any]: """Load the zp3111 4-in-1 sensor node state fixture data.""" - return json.loads(load_fixture("zwave_js/zp3111-5_state.json")) + return load_json_object_fixture("zp3111-5_state.json", DOMAIN) @pytest.fixture(name="express_controls_ezmultipli_state", scope="package") -def light_express_controls_ezmultipli_state_fixture(): +def light_express_controls_ezmultipli_state_fixture() -> dict[str, Any]: """Load the Express Controls EZMultiPli node state fixture data.""" - return json.loads(load_fixture("zwave_js/express_controls_ezmultipli_state.json")) + return load_json_object_fixture("express_controls_ezmultipli_state.json", DOMAIN) @pytest.fixture(name="lock_home_connect_620_state", scope="package") -def lock_home_connect_620_state_fixture(): +def lock_home_connect_620_state_fixture() -> dict[str, Any]: """Load the Home Connect 620 lock node state fixture data.""" - return json.loads(load_fixture("zwave_js/lock_home_connect_620_state.json")) + return load_json_object_fixture("lock_home_connect_620_state.json", DOMAIN) @pytest.fixture(name="switch_zooz_zen72_state", scope="package") -def switch_zooz_zen72_state_fixture(): +def switch_zooz_zen72_state_fixture() -> dict[str, Any]: """Load the Zooz Zen72 switch node state fixture data.""" - return json.loads(load_fixture("zwave_js/switch_zooz_zen72_state.json")) + return load_json_object_fixture("switch_zooz_zen72_state.json", DOMAIN) @pytest.fixture(name="indicator_test_state", scope="package") -def indicator_test_state_fixture(): +def indicator_test_state_fixture() -> dict[str, Any]: """Load the indicator CC test node state fixture data.""" - return json.loads(load_fixture("zwave_js/indicator_test_state.json")) + return load_json_object_fixture("indicator_test_state.json", DOMAIN) @pytest.fixture(name="energy_production_state", scope="package") -def energy_production_state_fixture(): +def energy_production_state_fixture() -> dict[str, Any]: """Load a mock node with energy production CC state fixture data.""" - return json.loads(load_fixture("zwave_js/energy_production_state.json")) + return load_json_object_fixture("energy_production_state.json", DOMAIN) @pytest.fixture(name="nice_ibt4zwave_state", scope="package") -def nice_ibt4zwave_state_fixture(): +def nice_ibt4zwave_state_fixture() -> dict[str, Any]: """Load a Nice IBT4ZWAVE cover node state fixture data.""" - return json.loads(load_fixture("zwave_js/cover_nice_ibt4zwave_state.json")) + return load_json_object_fixture("cover_nice_ibt4zwave_state.json", DOMAIN) @pytest.fixture(name="logic_group_zdb5100_state", scope="package") -def logic_group_zdb5100_state_fixture(): +def logic_group_zdb5100_state_fixture() -> dict[str, Any]: """Load the Logic Group ZDB5100 node state fixture data.""" - return json.loads(load_fixture("zwave_js/logic_group_zdb5100_state.json")) + return load_json_object_fixture("logic_group_zdb5100_state.json", DOMAIN) @pytest.fixture(name="central_scene_node_state", scope="package") -def central_scene_node_state_fixture(): +def central_scene_node_state_fixture() -> dict[str, Any]: """Load node with Central Scene CC node state fixture data.""" - return json.loads(load_fixture("zwave_js/central_scene_node_state.json")) + return load_json_object_fixture("central_scene_node_state.json", DOMAIN) @pytest.fixture(name="light_device_class_is_null_state", scope="package") -def light_device_class_is_null_state_fixture(): +def light_device_class_is_null_state_fixture() -> dict[str, Any]: """Load node with device class is None state fixture data.""" - return json.loads(load_fixture("zwave_js/light_device_class_is_null_state.json")) + return load_json_object_fixture("light_device_class_is_null_state.json", DOMAIN) @pytest.fixture(name="basic_cc_sensor_state", scope="package") -def basic_cc_sensor_state_fixture(): +def basic_cc_sensor_state_fixture() -> dict[str, Any]: """Load node with Basic CC sensor fixture data.""" - return json.loads(load_fixture("zwave_js/basic_cc_sensor_state.json")) + return load_json_object_fixture("basic_cc_sensor_state.json", DOMAIN) + + +@pytest.fixture(name="window_covering_outbound_bottom_state", scope="package") +def window_covering_outbound_bottom_state_fixture() -> dict[str, Any]: + """Load node with Window Covering CC fixture data, with only the outbound bottom position supported.""" + return load_json_object_fixture("window_covering_outbound_bottom.json", DOMAIN) + + +@pytest.fixture(name="siren_neo_coolcam_state") +def siren_neo_coolcam_state_state_fixture() -> NodeDataType: + """Load node with siren_neo_coolcam_state fixture data.""" + return cast( + NodeDataType, + load_json_object_fixture("siren_neo_coolcam_nas-ab01z_state.json", DOMAIN), + ) + + +@pytest.fixture(name="aeotec_smart_switch_7_state") +def aeotec_smart_switch_7_state_fixture() -> NodeDataType: + """Load node with fixture data for Aeotec Smart Switch 7.""" + return cast( + NodeDataType, + load_json_object_fixture("aeotec_smart_switch_7_state.json", DOMAIN), + ) # model fixtures @@ -538,7 +568,7 @@ def mock_client_fixture( @pytest.fixture(name="multisensor_6") -def multisensor_6_fixture(client, multisensor_6_state): +def multisensor_6_fixture(client, multisensor_6_state) -> Node: """Mock a multisensor 6 node.""" node = Node(client, copy.deepcopy(multisensor_6_state)) client.driver.controller.nodes[node.node_id] = node @@ -546,7 +576,7 @@ def multisensor_6_fixture(client, multisensor_6_state): @pytest.fixture(name="ecolink_door_sensor") -def legacy_binary_sensor_fixture(client, ecolink_door_sensor_state): +def legacy_binary_sensor_fixture(client, ecolink_door_sensor_state) -> Node: """Mock a legacy_binary_sensor node.""" node = Node(client, copy.deepcopy(ecolink_door_sensor_state)) client.driver.controller.nodes[node.node_id] = node @@ -554,7 +584,7 @@ def legacy_binary_sensor_fixture(client, ecolink_door_sensor_state): @pytest.fixture(name="hank_binary_switch") -def hank_binary_switch_fixture(client, hank_binary_switch_state): +def hank_binary_switch_fixture(client, hank_binary_switch_state) -> Node: """Mock a binary switch node.""" node = Node(client, copy.deepcopy(hank_binary_switch_state)) client.driver.controller.nodes[node.node_id] = node @@ -562,7 +592,7 @@ def hank_binary_switch_fixture(client, hank_binary_switch_state): @pytest.fixture(name="bulb_6_multi_color") -def bulb_6_multi_color_fixture(client, bulb_6_multi_color_state): +def bulb_6_multi_color_fixture(client, bulb_6_multi_color_state) -> Node: """Mock a bulb 6 multi-color node.""" node = Node(client, copy.deepcopy(bulb_6_multi_color_state)) client.driver.controller.nodes[node.node_id] = node @@ -570,7 +600,7 @@ def bulb_6_multi_color_fixture(client, bulb_6_multi_color_state): @pytest.fixture(name="light_color_null_values") -def light_color_null_values_fixture(client, light_color_null_values_state): +def light_color_null_values_fixture(client, light_color_null_values_state) -> Node: """Mock a node with current color value item being null.""" node = Node(client, copy.deepcopy(light_color_null_values_state)) client.driver.controller.nodes[node.node_id] = node @@ -578,7 +608,7 @@ def light_color_null_values_fixture(client, light_color_null_values_state): @pytest.fixture(name="eaton_rf9640_dimmer") -def eaton_rf9640_dimmer_fixture(client, eaton_rf9640_dimmer_state): +def eaton_rf9640_dimmer_fixture(client, eaton_rf9640_dimmer_state) -> Node: """Mock a Eaton RF9640 (V4 compatible) dimmer node.""" node = Node(client, copy.deepcopy(eaton_rf9640_dimmer_state)) client.driver.controller.nodes[node.node_id] = node @@ -586,7 +616,7 @@ def eaton_rf9640_dimmer_fixture(client, eaton_rf9640_dimmer_state): @pytest.fixture(name="lock_schlage_be469") -def lock_schlage_be469_fixture(client, lock_schlage_be469_state): +def lock_schlage_be469_fixture(client, lock_schlage_be469_state) -> Node: """Mock a schlage lock node.""" node = Node(client, copy.deepcopy(lock_schlage_be469_state)) client.driver.controller.nodes[node.node_id] = node @@ -594,7 +624,7 @@ def lock_schlage_be469_fixture(client, lock_schlage_be469_state): @pytest.fixture(name="lock_august_pro") -def lock_august_asl03_fixture(client, lock_august_asl03_state): +def lock_august_asl03_fixture(client, lock_august_asl03_state) -> Node: """Mock a August Pro lock node.""" node = Node(client, copy.deepcopy(lock_august_asl03_state)) client.driver.controller.nodes[node.node_id] = node @@ -604,7 +634,7 @@ def lock_august_asl03_fixture(client, lock_august_asl03_state): @pytest.fixture(name="climate_radio_thermostat_ct100_plus") def climate_radio_thermostat_ct100_plus_fixture( client, climate_radio_thermostat_ct100_plus_state -): +) -> Node: """Mock a climate radio thermostat ct100 plus node.""" node = Node(client, copy.deepcopy(climate_radio_thermostat_ct100_plus_state)) client.driver.controller.nodes[node.node_id] = node @@ -614,7 +644,7 @@ def climate_radio_thermostat_ct100_plus_fixture( @pytest.fixture(name="climate_radio_thermostat_ct100_plus_different_endpoints") def climate_radio_thermostat_ct100_plus_different_endpoints_fixture( client, climate_radio_thermostat_ct100_plus_different_endpoints_state -): +) -> Node: """Mock climate radio thermostat ct100 plus node w/ values on diff endpoints.""" node = Node( client, @@ -625,7 +655,7 @@ def climate_radio_thermostat_ct100_plus_different_endpoints_fixture( @pytest.fixture(name="climate_adc_t3000") -def climate_adc_t3000_fixture(client, climate_adc_t3000_state): +def climate_adc_t3000_fixture(client, climate_adc_t3000_state) -> Node: """Mock a climate ADC-T3000 node.""" node = Node(client, copy.deepcopy(climate_adc_t3000_state)) client.driver.controller.nodes[node.node_id] = node @@ -633,7 +663,7 @@ def climate_adc_t3000_fixture(client, climate_adc_t3000_state): @pytest.fixture(name="climate_adc_t3000_missing_setpoint") -def climate_adc_t3000_missing_setpoint_fixture(client, climate_adc_t3000_state): +def climate_adc_t3000_missing_setpoint_fixture(client, climate_adc_t3000_state) -> Node: """Mock a climate ADC-T3000 node with missing de-humidify setpoint.""" data = copy.deepcopy(climate_adc_t3000_state) data["name"] = f"{data['name']} missing setpoint" @@ -649,7 +679,7 @@ def climate_adc_t3000_missing_setpoint_fixture(client, climate_adc_t3000_state): @pytest.fixture(name="climate_adc_t3000_missing_mode") -def climate_adc_t3000_missing_mode_fixture(client, climate_adc_t3000_state): +def climate_adc_t3000_missing_mode_fixture(client, climate_adc_t3000_state) -> Node: """Mock a climate ADC-T3000 node with missing mode setpoint.""" data = copy.deepcopy(climate_adc_t3000_state) data["name"] = f"{data['name']} missing mode" @@ -665,7 +695,9 @@ def climate_adc_t3000_missing_mode_fixture(client, climate_adc_t3000_state): @pytest.fixture(name="climate_adc_t3000_missing_fan_mode_states") -def climate_adc_t3000_missing_fan_mode_states_fixture(client, climate_adc_t3000_state): +def climate_adc_t3000_missing_fan_mode_states_fixture( + client, climate_adc_t3000_state +) -> Node: """Mock ADC-T3000 node w/ missing 'states' metadata on Thermostat Fan Mode.""" data = copy.deepcopy(climate_adc_t3000_state) data["name"] = f"{data['name']} missing fan mode states" @@ -691,7 +723,7 @@ def climate_airzone_aidoo_control_hvac_unit_fixture( @pytest.fixture(name="climate_danfoss_lc_13") -def climate_danfoss_lc_13_fixture(client, climate_danfoss_lc_13_state): +def climate_danfoss_lc_13_fixture(client, climate_danfoss_lc_13_state) -> Node: """Mock a climate radio danfoss LC-13 node.""" node = Node(client, copy.deepcopy(climate_danfoss_lc_13_state)) client.driver.controller.nodes[node.node_id] = node @@ -699,7 +731,9 @@ def climate_danfoss_lc_13_fixture(client, climate_danfoss_lc_13_state): @pytest.fixture(name="climate_eurotronic_spirit_z") -def climate_eurotronic_spirit_z_fixture(client, climate_eurotronic_spirit_z_state): +def climate_eurotronic_spirit_z_fixture( + client, climate_eurotronic_spirit_z_state +) -> Node: """Mock a climate radio danfoss LC-13 node.""" node = Node(client, climate_eurotronic_spirit_z_state) client.driver.controller.nodes[node.node_id] = node @@ -707,7 +741,7 @@ def climate_eurotronic_spirit_z_fixture(client, climate_eurotronic_spirit_z_stat @pytest.fixture(name="climate_heatit_z_trm6") -def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state): +def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state) -> Node: """Mock a climate radio HEATIT Z-TRM6 node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm6_state)) client.driver.controller.nodes[node.node_id] = node @@ -717,7 +751,7 @@ def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state): @pytest.fixture(name="climate_heatit_z_trm3_no_value") def climate_heatit_z_trm3_no_value_fixture( client, climate_heatit_z_trm3_no_value_state -): +) -> Node: """Mock a climate radio HEATIT Z-TRM3 node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm3_no_value_state)) client.driver.controller.nodes[node.node_id] = node @@ -725,7 +759,7 @@ def climate_heatit_z_trm3_no_value_fixture( @pytest.fixture(name="climate_heatit_z_trm3") -def climate_heatit_z_trm3_fixture(client, climate_heatit_z_trm3_state): +def climate_heatit_z_trm3_fixture(client, climate_heatit_z_trm3_state) -> Node: """Mock a climate radio HEATIT Z-TRM3 node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm3_state)) client.driver.controller.nodes[node.node_id] = node @@ -733,7 +767,7 @@ def climate_heatit_z_trm3_fixture(client, climate_heatit_z_trm3_state): @pytest.fixture(name="climate_heatit_z_trm2fx") -def climate_heatit_z_trm2fx_fixture(client, climate_heatit_z_trm2fx_state): +def climate_heatit_z_trm2fx_fixture(client, climate_heatit_z_trm2fx_state) -> Node: """Mock a climate radio HEATIT Z-TRM2fx node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm2fx_state)) client.driver.controller.nodes[node.node_id] = node @@ -741,7 +775,7 @@ def climate_heatit_z_trm2fx_fixture(client, climate_heatit_z_trm2fx_state): @pytest.fixture(name="nortek_thermostat") -def nortek_thermostat_fixture(client, nortek_thermostat_state): +def nortek_thermostat_fixture(client, nortek_thermostat_state) -> Node: """Mock a nortek thermostat node.""" node = Node(client, copy.deepcopy(nortek_thermostat_state)) client.driver.controller.nodes[node.node_id] = node @@ -749,7 +783,7 @@ def nortek_thermostat_fixture(client, nortek_thermostat_state): @pytest.fixture(name="srt321_hrt4_zw") -def srt321_hrt4_zw_fixture(client, srt321_hrt4_zw_state): +def srt321_hrt4_zw_fixture(client, srt321_hrt4_zw_state) -> Node: """Mock a HRT4-ZW / SRT321 / SRT322 thermostat node.""" node = Node(client, copy.deepcopy(srt321_hrt4_zw_state)) client.driver.controller.nodes[node.node_id] = node @@ -757,7 +791,9 @@ def srt321_hrt4_zw_fixture(client, srt321_hrt4_zw_state): @pytest.fixture(name="aeotec_radiator_thermostat") -def aeotec_radiator_thermostat_fixture(client, aeotec_radiator_thermostat_state): +def aeotec_radiator_thermostat_fixture( + client, aeotec_radiator_thermostat_state +) -> Node: """Mock a Aeotec thermostat node.""" node = Node(client, aeotec_radiator_thermostat_state) client.driver.controller.nodes[node.node_id] = node @@ -765,23 +801,23 @@ def aeotec_radiator_thermostat_fixture(client, aeotec_radiator_thermostat_state) @pytest.fixture(name="nortek_thermostat_added_event") -def nortek_thermostat_added_event_fixture(client): +def nortek_thermostat_added_event_fixture(client) -> Node: """Mock a Nortek thermostat node added event.""" - event_data = json.loads(load_fixture("zwave_js/nortek_thermostat_added_event.json")) + event_data = load_json_object_fixture("nortek_thermostat_added_event.json", DOMAIN) return Event("node added", event_data) @pytest.fixture(name="nortek_thermostat_removed_event") -def nortek_thermostat_removed_event_fixture(client): +def nortek_thermostat_removed_event_fixture(client) -> Node: """Mock a Nortek thermostat node removed event.""" - event_data = json.loads( - load_fixture("zwave_js/nortek_thermostat_removed_event.json") + event_data = load_json_object_fixture( + "nortek_thermostat_removed_event.json", DOMAIN ) return Event("node removed", event_data) @pytest.fixture(name="integration") -async def integration_fixture(hass: HomeAssistant, client): +async def integration_fixture(hass: HomeAssistant, client) -> MockConfigEntry: """Set up the zwave_js integration.""" entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) entry.add_to_hass(hass) @@ -794,7 +830,7 @@ async def integration_fixture(hass: HomeAssistant, client): @pytest.fixture(name="chain_actuator_zws12") -def window_cover_fixture(client, chain_actuator_zws12_state): +def window_cover_fixture(client, chain_actuator_zws12_state) -> Node: """Mock a window cover node.""" node = Node(client, copy.deepcopy(chain_actuator_zws12_state)) client.driver.controller.nodes[node.node_id] = node @@ -802,7 +838,7 @@ def window_cover_fixture(client, chain_actuator_zws12_state): @pytest.fixture(name="fan_generic") -def fan_generic_fixture(client, fan_generic_state): +def fan_generic_fixture(client, fan_generic_state) -> Node: """Mock a fan node.""" node = Node(client, copy.deepcopy(fan_generic_state)) client.driver.controller.nodes[node.node_id] = node @@ -810,7 +846,7 @@ def fan_generic_fixture(client, fan_generic_state): @pytest.fixture(name="hs_fc200") -def hs_fc200_fixture(client, hs_fc200_state): +def hs_fc200_fixture(client, hs_fc200_state) -> Node: """Mock a fan node.""" node = Node(client, copy.deepcopy(hs_fc200_state)) client.driver.controller.nodes[node.node_id] = node @@ -818,7 +854,7 @@ def hs_fc200_fixture(client, hs_fc200_state): @pytest.fixture(name="leviton_zw4sf") -def leviton_zw4sf_fixture(client, leviton_zw4sf_state): +def leviton_zw4sf_fixture(client, leviton_zw4sf_state) -> Node: """Mock a fan node.""" node = Node(client, copy.deepcopy(leviton_zw4sf_state)) client.driver.controller.nodes[node.node_id] = node @@ -826,7 +862,7 @@ def leviton_zw4sf_fixture(client, leviton_zw4sf_state): @pytest.fixture(name="fan_honeywell_39358") -def fan_honeywell_39358_fixture(client, fan_honeywell_39358_state): +def fan_honeywell_39358_fixture(client, fan_honeywell_39358_state) -> Node: """Mock a fan node.""" node = Node(client, copy.deepcopy(fan_honeywell_39358_state)) client.driver.controller.nodes[node.node_id] = node @@ -834,7 +870,7 @@ def fan_honeywell_39358_fixture(client, fan_honeywell_39358_state): @pytest.fixture(name="null_name_check") -def null_name_check_fixture(client, null_name_check_state): +def null_name_check_fixture(client, null_name_check_state) -> Node: """Mock a node with no name.""" node = Node(client, copy.deepcopy(null_name_check_state)) client.driver.controller.nodes[node.node_id] = node @@ -842,7 +878,7 @@ def null_name_check_fixture(client, null_name_check_state): @pytest.fixture(name="gdc_zw062") -def motorized_barrier_cover_fixture(client, gdc_zw062_state): +def motorized_barrier_cover_fixture(client, gdc_zw062_state) -> Node: """Mock a motorized barrier node.""" node = Node(client, copy.deepcopy(gdc_zw062_state)) client.driver.controller.nodes[node.node_id] = node @@ -850,7 +886,7 @@ def motorized_barrier_cover_fixture(client, gdc_zw062_state): @pytest.fixture(name="iblinds_v2") -def iblinds_v2_cover_fixture(client, iblinds_v2_state): +def iblinds_v2_cover_fixture(client, iblinds_v2_state) -> Node: """Mock an iBlinds v2.0 window cover node.""" node = Node(client, copy.deepcopy(iblinds_v2_state)) client.driver.controller.nodes[node.node_id] = node @@ -858,7 +894,7 @@ def iblinds_v2_cover_fixture(client, iblinds_v2_state): @pytest.fixture(name="iblinds_v3") -def iblinds_v3_cover_fixture(client, iblinds_v3_state): +def iblinds_v3_cover_fixture(client, iblinds_v3_state) -> Node: """Mock an iBlinds v3 window cover node.""" node = Node(client, copy.deepcopy(iblinds_v3_state)) client.driver.controller.nodes[node.node_id] = node @@ -866,7 +902,7 @@ def iblinds_v3_cover_fixture(client, iblinds_v3_state): @pytest.fixture(name="zvidar") -def zvidar_cover_fixture(client, zvidar_state): +def zvidar_cover_fixture(client, zvidar_state) -> Node: """Mock a ZVIDAR window cover node.""" node = Node(client, copy.deepcopy(zvidar_state)) client.driver.controller.nodes[node.node_id] = node @@ -874,7 +910,7 @@ def zvidar_cover_fixture(client, zvidar_state): @pytest.fixture(name="qubino_shutter") -def qubino_shutter_cover_fixture(client, qubino_shutter_state): +def qubino_shutter_cover_fixture(client, qubino_shutter_state) -> Node: """Mock a Qubino flush shutter node.""" node = Node(client, copy.deepcopy(qubino_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -882,7 +918,7 @@ def qubino_shutter_cover_fixture(client, qubino_shutter_state): @pytest.fixture(name="aeotec_nano_shutter") -def aeotec_nano_shutter_cover_fixture(client, aeotec_nano_shutter_state): +def aeotec_nano_shutter_cover_fixture(client, aeotec_nano_shutter_state) -> Node: """Mock a Aeotec Nano Shutter node.""" node = Node(client, copy.deepcopy(aeotec_nano_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -890,7 +926,7 @@ def aeotec_nano_shutter_cover_fixture(client, aeotec_nano_shutter_state): @pytest.fixture(name="fibaro_fgr222_shutter") -def fibaro_fgr222_shutter_cover_fixture(client, fibaro_fgr222_shutter_state): +def fibaro_fgr222_shutter_cover_fixture(client, fibaro_fgr222_shutter_state) -> Node: """Mock a Fibaro FGR222 Shutter node.""" node = Node(client, copy.deepcopy(fibaro_fgr222_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -898,7 +934,7 @@ def fibaro_fgr222_shutter_cover_fixture(client, fibaro_fgr222_shutter_state): @pytest.fixture(name="fibaro_fgr223_shutter") -def fibaro_fgr223_shutter_cover_fixture(client, fibaro_fgr223_shutter_state): +def fibaro_fgr223_shutter_cover_fixture(client, fibaro_fgr223_shutter_state) -> Node: """Mock a Fibaro FGR223 Shutter node.""" node = Node(client, copy.deepcopy(fibaro_fgr223_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -908,7 +944,7 @@ def fibaro_fgr223_shutter_cover_fixture(client, fibaro_fgr223_shutter_state): @pytest.fixture(name="shelly_qnsh_001P10_shutter") def shelly_qnsh_001P10_cover_shutter_fixture( client, shelly_europe_ltd_qnsh_001p10_state -): +) -> Node: """Mock a Shelly QNSH 001P10 Shutter node.""" node = Node(client, copy.deepcopy(shelly_europe_ltd_qnsh_001p10_state)) client.driver.controller.nodes[node.node_id] = node @@ -916,7 +952,7 @@ def shelly_qnsh_001P10_cover_shutter_fixture( @pytest.fixture(name="merten_507801") -def merten_507801_cover_fixture(client, merten_507801_state): +def merten_507801_cover_fixture(client, merten_507801_state) -> Node: """Mock a Merten 507801 Shutter node.""" node = Node(client, copy.deepcopy(merten_507801_state)) client.driver.controller.nodes[node.node_id] = node @@ -924,7 +960,7 @@ def merten_507801_cover_fixture(client, merten_507801_state): @pytest.fixture(name="aeon_smart_switch_6") -def aeon_smart_switch_6_fixture(client, aeon_smart_switch_6_state): +def aeon_smart_switch_6_fixture(client, aeon_smart_switch_6_state) -> Node: """Mock an AEON Labs (ZW096) Smart Switch 6 node.""" node = Node(client, aeon_smart_switch_6_state) client.driver.controller.nodes[node.node_id] = node @@ -932,7 +968,7 @@ def aeon_smart_switch_6_fixture(client, aeon_smart_switch_6_state): @pytest.fixture(name="ge_12730") -def ge_12730_fixture(client, ge_12730_state): +def ge_12730_fixture(client, ge_12730_state) -> Node: """Mock a GE 12730 fan controller node.""" node = Node(client, copy.deepcopy(ge_12730_state)) client.driver.controller.nodes[node.node_id] = node @@ -940,7 +976,7 @@ def ge_12730_fixture(client, ge_12730_state): @pytest.fixture(name="inovelli_lzw36") -def inovelli_lzw36_fixture(client, inovelli_lzw36_state): +def inovelli_lzw36_fixture(client, inovelli_lzw36_state) -> Node: """Mock a Inovelli LZW36 fan controller node.""" node = Node(client, copy.deepcopy(inovelli_lzw36_state)) client.driver.controller.nodes[node.node_id] = node @@ -948,7 +984,7 @@ def inovelli_lzw36_fixture(client, inovelli_lzw36_state): @pytest.fixture(name="lock_id_lock_as_id150") -def lock_id_lock_as_id150(client, lock_id_lock_as_id150_state): +def lock_id_lock_as_id150_fixture(client, lock_id_lock_as_id150_state) -> Node: """Mock an id lock id-150 lock node.""" node = Node(client, copy.deepcopy(lock_id_lock_as_id150_state)) client.driver.controller.nodes[node.node_id] = node @@ -956,7 +992,7 @@ def lock_id_lock_as_id150(client, lock_id_lock_as_id150_state): @pytest.fixture(name="lock_id_lock_as_id150_not_ready") -def node_not_ready(client, lock_id_lock_as_id150_state): +def node_not_ready_fixture(client, lock_id_lock_as_id150_state) -> Node: """Mock an id lock id-150 lock node that's not ready.""" state = copy.deepcopy(lock_id_lock_as_id150_state) state["ready"] = False @@ -968,7 +1004,7 @@ def node_not_ready(client, lock_id_lock_as_id150_state): @pytest.fixture(name="climate_radio_thermostat_ct101_multiple_temp_units") def climate_radio_thermostat_ct101_multiple_temp_units_fixture( client, climate_radio_thermostat_ct101_multiple_temp_units_state -): +) -> Node: """Mock a climate device with multiple temp units node.""" node = Node( client, copy.deepcopy(climate_radio_thermostat_ct101_multiple_temp_units_state) @@ -983,7 +1019,7 @@ def climate_radio_thermostat_ct101_multiple_temp_units_fixture( def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_fixture( client, climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state, -): +) -> Node: """Mock a climate device with mode and setpoint on differenet endpoints node.""" node = Node( client, @@ -996,7 +1032,7 @@ def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_fixt @pytest.fixture(name="vision_security_zl7432") -def vision_security_zl7432_fixture(client, vision_security_zl7432_state): +def vision_security_zl7432_fixture(client, vision_security_zl7432_state) -> Node: """Mock a vision security zl7432 node.""" node = Node(client, copy.deepcopy(vision_security_zl7432_state)) client.driver.controller.nodes[node.node_id] = node @@ -1004,7 +1040,7 @@ def vision_security_zl7432_fixture(client, vision_security_zl7432_state): @pytest.fixture(name="zen_31") -def zen_31_fixture(client, zen_31_state): +def zen_31_fixture(client, zen_31_state) -> Node: """Mock a bulb 6 multi-color node.""" node = Node(client, copy.deepcopy(zen_31_state)) client.driver.controller.nodes[node.node_id] = node @@ -1012,7 +1048,7 @@ def zen_31_fixture(client, zen_31_state): @pytest.fixture(name="wallmote_central_scene") -def wallmote_central_scene_fixture(client, wallmote_central_scene_state): +def wallmote_central_scene_fixture(client, wallmote_central_scene_state) -> Node: """Mock a wallmote central scene node.""" node = Node(client, copy.deepcopy(wallmote_central_scene_state)) client.driver.controller.nodes[node.node_id] = node @@ -1020,7 +1056,7 @@ def wallmote_central_scene_fixture(client, wallmote_central_scene_state): @pytest.fixture(name="ge_in_wall_dimmer_switch") -def ge_in_wall_dimmer_switch_fixture(client, ge_in_wall_dimmer_switch_state): +def ge_in_wall_dimmer_switch_fixture(client, ge_in_wall_dimmer_switch_state) -> Node: """Mock a ge in-wall dimmer switch scene node.""" node = Node(client, copy.deepcopy(ge_in_wall_dimmer_switch_state)) client.driver.controller.nodes[node.node_id] = node @@ -1028,7 +1064,7 @@ def ge_in_wall_dimmer_switch_fixture(client, ge_in_wall_dimmer_switch_state): @pytest.fixture(name="aeotec_zw164_siren") -def aeotec_zw164_siren_fixture(client, aeotec_zw164_siren_state): +def aeotec_zw164_siren_fixture(client, aeotec_zw164_siren_state) -> Node: """Mock a aeotec zw164 siren node.""" node = Node(client, copy.deepcopy(aeotec_zw164_siren_state)) client.driver.controller.nodes[node.node_id] = node @@ -1038,7 +1074,7 @@ def aeotec_zw164_siren_fixture(client, aeotec_zw164_siren_state): @pytest.fixture(name="lock_popp_electric_strike_lock_control") def lock_popp_electric_strike_lock_control_fixture( client, lock_popp_electric_strike_lock_control_state -): +) -> Node: """Mock a popp electric strike lock control node.""" node = Node(client, copy.deepcopy(lock_popp_electric_strike_lock_control_state)) client.driver.controller.nodes[node.node_id] = node @@ -1046,7 +1082,7 @@ def lock_popp_electric_strike_lock_control_fixture( @pytest.fixture(name="fortrezz_ssa1_siren") -def fortrezz_ssa1_siren_fixture(client, fortrezz_ssa1_siren_state): +def fortrezz_ssa1_siren_fixture(client, fortrezz_ssa1_siren_state) -> Node: """Mock a fortrezz ssa1 siren node.""" node = Node(client, copy.deepcopy(fortrezz_ssa1_siren_state)) client.driver.controller.nodes[node.node_id] = node @@ -1054,7 +1090,7 @@ def fortrezz_ssa1_siren_fixture(client, fortrezz_ssa1_siren_state): @pytest.fixture(name="fortrezz_ssa3_siren") -def fortrezz_ssa3_siren_fixture(client, fortrezz_ssa3_siren_state): +def fortrezz_ssa3_siren_fixture(client, fortrezz_ssa3_siren_state) -> Node: """Mock a fortrezz ssa3 siren node.""" node = Node(client, copy.deepcopy(fortrezz_ssa3_siren_state)) client.driver.controller.nodes[node.node_id] = node @@ -1062,13 +1098,13 @@ def fortrezz_ssa3_siren_fixture(client, fortrezz_ssa3_siren_state): @pytest.fixture(name="firmware_file") -def firmware_file_fixture(): +def firmware_file_fixture() -> io.BytesIO: """Return mock firmware file stream.""" return io.BytesIO(bytes(10)) @pytest.fixture(name="zp3111_not_ready") -def zp3111_not_ready_fixture(client, zp3111_not_ready_state): +def zp3111_not_ready_fixture(client, zp3111_not_ready_state) -> Node: """Mock a zp3111 4-in-1 sensor node in a not-ready state.""" node = Node(client, copy.deepcopy(zp3111_not_ready_state)) client.driver.controller.nodes[node.node_id] = node @@ -1076,7 +1112,7 @@ def zp3111_not_ready_fixture(client, zp3111_not_ready_state): @pytest.fixture(name="zp3111") -def zp3111_fixture(client, zp3111_state): +def zp3111_fixture(client, zp3111_state) -> Node: """Mock a zp3111 4-in-1 sensor node.""" node = Node(client, copy.deepcopy(zp3111_state)) client.driver.controller.nodes[node.node_id] = node @@ -1084,7 +1120,9 @@ def zp3111_fixture(client, zp3111_state): @pytest.fixture(name="express_controls_ezmultipli") -def express_controls_ezmultipli_fixture(client, express_controls_ezmultipli_state): +def express_controls_ezmultipli_fixture( + client, express_controls_ezmultipli_state +) -> Node: """Mock a Express Controls EZMultiPli node.""" node = Node(client, copy.deepcopy(express_controls_ezmultipli_state)) client.driver.controller.nodes[node.node_id] = node @@ -1092,7 +1130,7 @@ def express_controls_ezmultipli_fixture(client, express_controls_ezmultipli_stat @pytest.fixture(name="lock_home_connect_620") -def lock_home_connect_620_fixture(client, lock_home_connect_620_state): +def lock_home_connect_620_fixture(client, lock_home_connect_620_state) -> Node: """Mock a Home Connect 620 lock node.""" node = Node(client, copy.deepcopy(lock_home_connect_620_state)) client.driver.controller.nodes[node.node_id] = node @@ -1100,7 +1138,7 @@ def lock_home_connect_620_fixture(client, lock_home_connect_620_state): @pytest.fixture(name="switch_zooz_zen72") -def switch_zooz_zen72_fixture(client, switch_zooz_zen72_state): +def switch_zooz_zen72_fixture(client, switch_zooz_zen72_state) -> Node: """Mock a Zooz Zen72 switch node.""" node = Node(client, copy.deepcopy(switch_zooz_zen72_state)) client.driver.controller.nodes[node.node_id] = node @@ -1108,7 +1146,7 @@ def switch_zooz_zen72_fixture(client, switch_zooz_zen72_state): @pytest.fixture(name="indicator_test") -def indicator_test_fixture(client, indicator_test_state): +def indicator_test_fixture(client, indicator_test_state) -> Node: """Mock a indicator CC test node.""" node = Node(client, copy.deepcopy(indicator_test_state)) client.driver.controller.nodes[node.node_id] = node @@ -1116,7 +1154,7 @@ def indicator_test_fixture(client, indicator_test_state): @pytest.fixture(name="energy_production") -def energy_production_fixture(client, energy_production_state): +def energy_production_fixture(client, energy_production_state) -> Node: """Mock a mock node with Energy Production CC.""" node = Node(client, copy.deepcopy(energy_production_state)) client.driver.controller.nodes[node.node_id] = node @@ -1124,7 +1162,7 @@ def energy_production_fixture(client, energy_production_state): @pytest.fixture(name="nice_ibt4zwave") -def nice_ibt4zwave_fixture(client, nice_ibt4zwave_state): +def nice_ibt4zwave_fixture(client, nice_ibt4zwave_state) -> Node: """Mock a Nice IBT4ZWAVE cover node.""" node = Node(client, copy.deepcopy(nice_ibt4zwave_state)) client.driver.controller.nodes[node.node_id] = node @@ -1132,7 +1170,7 @@ def nice_ibt4zwave_fixture(client, nice_ibt4zwave_state): @pytest.fixture(name="logic_group_zdb5100") -def logic_group_zdb5100_fixture(client, logic_group_zdb5100_state): +def logic_group_zdb5100_fixture(client, logic_group_zdb5100_state) -> Node: """Mock a ZDB5100 light node.""" node = Node(client, copy.deepcopy(logic_group_zdb5100_state)) client.driver.controller.nodes[node.node_id] = node @@ -1140,7 +1178,7 @@ def logic_group_zdb5100_fixture(client, logic_group_zdb5100_state): @pytest.fixture(name="central_scene_node") -def central_scene_node_fixture(client, central_scene_node_state): +def central_scene_node_fixture(client, central_scene_node_state) -> Node: """Mock a node with the Central Scene CC.""" node = Node(client, copy.deepcopy(central_scene_node_state)) client.driver.controller.nodes[node.node_id] = node @@ -1148,7 +1186,9 @@ def central_scene_node_fixture(client, central_scene_node_state): @pytest.fixture(name="light_device_class_is_null") -def light_device_class_is_null_fixture(client, light_device_class_is_null_state): +def light_device_class_is_null_fixture( + client, light_device_class_is_null_state +) -> Node: """Mock a node when device class is null.""" node = Node(client, copy.deepcopy(light_device_class_is_null_state)) client.driver.controller.nodes[node.node_id] = node @@ -1156,8 +1196,38 @@ def light_device_class_is_null_fixture(client, light_device_class_is_null_state) @pytest.fixture(name="basic_cc_sensor") -def basic_cc_sensor_fixture(client, basic_cc_sensor_state): +def basic_cc_sensor_fixture(client, basic_cc_sensor_state) -> Node: """Mock a node with a Basic CC.""" node = Node(client, copy.deepcopy(basic_cc_sensor_state)) client.driver.controller.nodes[node.node_id] = node return node + + +@pytest.fixture(name="window_covering_outbound_bottom") +def window_covering_outbound_bottom_fixture( + client, window_covering_outbound_bottom_state +) -> Node: + """Load node with Window Covering CC fixture data, with only the outbound bottom position supported.""" + node = Node(client, copy.deepcopy(window_covering_outbound_bottom_state)) + client.driver.controller.nodes[node.node_id] = node + return node + + +@pytest.fixture(name="siren_neo_coolcam") +def siren_neo_coolcam_fixture( + client: MagicMock, siren_neo_coolcam_state: NodeDataType +) -> Node: + """Load node for neo coolcam siren.""" + node = Node(client, siren_neo_coolcam_state) + client.driver.controller.nodes[node.node_id] = node + return node + + +@pytest.fixture(name="aeotec_smart_switch_7") +def aeotec_smart_switch_7_fixture( + client: MagicMock, aeotec_smart_switch_7_state: NodeDataType +) -> Node: + """Load node for Aeotec Smart Switch 7.""" + node = Node(client, aeotec_smart_switch_7_state) + client.driver.controller.nodes[node.node_id] = node + return node diff --git a/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json b/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json new file mode 100644 index 00000000000..ea7bbe8b16c --- /dev/null +++ b/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json @@ -0,0 +1,1863 @@ +{ + "nodeId": 9, + "index": 0, + "installerIcon": 1792, + "userIcon": 1792, + "status": 4, + "ready": true, + "isListening": true, + "isRouting": true, + "isSecure": true, + "manufacturerId": 881, + "productId": 175, + "productType": 3, + "firmwareVersion": "1.3", + "zwavePlusVersion": 1, + "deviceConfig": { + "filename": "/data/db/devices/0x0371/zw175.json", + "isEmbedded": true, + "manufacturer": "Aeotec Ltd.", + "manufacturerId": 881, + "label": "ZW175", + "description": "Smart Switch 7", + "devices": [ + { + "productType": 3, + "productId": 175 + } + ], + "firmwareVersion": { + "min": "0.0", + "max": "255.255" + }, + "preferred": false, + "associations": {}, + "paramInformation": { + "_map": {} + }, + "metadata": { + "inclusion": "This product supports Security 2 Command Class. While a Security S2 enabled Controller is needed in order to fully use the security feature. This product can be included and operated in any Z-Wave network with other Z-Wave certified devices from other manufacturers and/or other applications. All non-battery operated nodes within the network will act as repeaters regardless of vendor to increase reliability of the network.\n\n(1) SmartStart Learn Mode\nSmartStart enabled products can be added into a Z-Wave network by scanning the Z-Wave QR Code present on the product with a controller providing SmartStart inclusion. No further action is required and the SmartStart product will be added automatically within 10 minutes of being switched on in the network vicinity.\nIndicator Light will become flash white light for 1s indicating the product has been powered, and then become flash blue light indicating SmartStart Learn Mode starts. It will become constantly bright yellow light after being assigned a NodeID.\nIf Adding succeeds, it will bright blue light for 2s and become Load Indicator Mode.\nIf Adding fails, it will bright red light for 2s and turn back to breathing blue light and then start SmartStart Learn Mode again.\nNote:\nThe label of QR Code on the product and package are used for SmartStart Inclusion. The Z-Wave DSK Code is at bottom of the package. Please do not remove or damage them.\n\n(2) Classic Inclusion Learn Mode\n1. Set your Z-Wave Controller into its 'Add Device' mode in order to add the product into your Z-Wave system. Refer to the Controller's manual if you are unsure of how to perform this step.\n2. Make sure the product is powered. If not, plug it into a wall socket and power on; its LED will be breathing blue light all the time. \n3. Click Action Button once, it will quickly flash blue light for 30 seconds until it is added into the network. It will become constantly bright yellow light after being assigned a NodeID.\n4. If your Z-Wave Controller supports S2 encryption, enter the first 5 digits of DSK into your Controller's interface if /when requested. The DSK is printed on its housing.\n5. If Adding fails, it will bright red light for 2s and then become breathing blue light; repeat steps 1 to 4. Contact us for further support if needed.\n6. If Adding succeeds, it will bright blue light for 2s and then turn to Load Indicator Mode. Now, this product is a part of your Z-Wave home control system. You can configure it and its automations via your Z-Wave system; please refer to your software's user guide for precise instructions.\nNote:\nIf Action Button is clicked again during the Classic Inclusion Learn Mode, the Classic Inclusion Learn Mode will exit. At the same time, Indicator Light will bright red light for 2s, and then become breathing blue light", + "exclusion": "1. Set your Z-Wave Controller into its 'Remove Device' mode in order to remove the product from your Z-Wave system. Refer to the Controller's manual if you are unsure of how to perform this step.\n2. Make sure the product is powered. If not, plug it into a wall socket and power on. \n3. Click Action Button 2 times quickly; it will bright violet light, up to 2s.\n4. If Removing fails, it will bright red light for 2s and then turn back to Load Indicator Mode; repeat steps 1 to 3. Contact us for further support if needed.\n5. If Removing succeeds, it will become breathing blue light. Now, it is removed from Z-Wave network successfully", + "reset": "If the primary controller is missing or inoperable, you may need to reset the device to factory settings.\nMake sure the product is powered. If not, plug it into a wall socket and power on. To complete the reset process manually, press and hold the Action Button for at least 15s and then release. The LED indicator will become breathing blue light, which indicates the reset operation is successful. Otherwise, please try again. Contact us for further support if needed. \nNote: \n1. This procedure should only be used when the primary controller is missing or inoperable.\n2. Factory Reset will:\n(a) Remove the product from Z-Wave network;\n(b) Delete the Association setting;\n(c) Restore the configuration settings to the default.", + "manual": "https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/3437/Smart%20Switch%207%20product%20manual.pdf" + } + }, + "label": "ZW175", + "interviewAttempts": 1, + "isFrequentListening": false, + "maxDataRate": 100000, + "supportedDataRates": [40000, 100000], + "protocolVersion": 3, + "supportsBeaming": true, + "supportsSecurity": false, + "nodeType": 1, + "zwavePlusNodeType": 0, + "zwavePlusRoleType": 5, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 16, + "label": "Binary Switch" + }, + "specific": { + "key": 1, + "label": "Binary Power Switch" + } + }, + "interviewStage": "Complete", + "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x0371:0x0003:0x00af:1.3", + "statistics": { + "commandsTX": 221, + "commandsRX": 1452, + "commandsDroppedRX": 22, + "commandsDroppedTX": 0, + "timeoutResponse": 3, + "rtt": 29.9, + "lastSeen": "2024-10-01T13:21:14.968Z" + }, + "highestSecurityClass": 1, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2024-10-01T13:12:41.805Z", + "protocol": 0, + "values": [ + { + "endpoint": 0, + "commandClass": 37, + "commandClassName": "Binary Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": false, + "label": "Current value", + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 37, + "commandClassName": "Binary Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Current value", + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Up", + "propertyName": "Up", + "ccVersion": 2, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Up)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Down", + "propertyName": "Down", + "ccVersion": 2, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Down)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "duration", + "propertyName": "duration", + "ccVersion": 2, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "restorePrevious", + "propertyName": "restorePrevious", + "ccVersion": 2, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Restore previous value", + "states": { + "true": "Restore" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 43, + "commandClassName": "Scene Activation", + "property": "sceneId", + "propertyName": "sceneId", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Scene ID", + "valueChangeOptions": ["transitionDuration"], + "min": 1, + "max": 255, + "stateful": false, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 43, + "commandClassName": "Scene Activation", + "property": "dimmingDuration", + "propertyName": "dimmingDuration", + "ccVersion": 1, + "metadata": { + "type": "duration", + "readable": true, + "writeable": true, + "label": "Dimming duration", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 65537, + "propertyName": "value", + "propertyKeyName": "Electric_kWh_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [kWh]", + "ccSpecific": { + "meterType": 1, + "scale": 0, + "rateType": 1 + }, + "unit": "kWh", + "stateful": true, + "secret": false + }, + "value": 1.259 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66049, + "propertyName": "value", + "propertyKeyName": "Electric_W_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [W]", + "ccSpecific": { + "meterType": 1, + "scale": 2, + "rateType": 1 + }, + "unit": "W", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66561, + "propertyName": "value", + "propertyKeyName": "Electric_V_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [V]", + "ccSpecific": { + "meterType": 1, + "scale": 4, + "rateType": 1 + }, + "unit": "V", + "stateful": true, + "secret": false + }, + "value": 232.895 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66817, + "propertyName": "value", + "propertyKeyName": "Electric_A_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [A]", + "ccSpecific": { + "meterType": 1, + "scale": 5, + "rateType": 1 + }, + "unit": "A", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "reset", + "propertyName": "reset", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Reset accumulated values", + "states": { + "true": "Reset" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyKey": 2, + "propertyName": "currentColor", + "propertyKeyName": "Red", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "description": "The current value of the Red channel.", + "label": "Current value (Red)", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 255 + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyKey": 3, + "propertyName": "currentColor", + "propertyKeyName": "Green", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "description": "The current value of the Green channel.", + "label": "Current value (Green)", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 251 + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyKey": 4, + "propertyName": "currentColor", + "propertyKeyName": "Blue", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "description": "The current value of the Blue channel.", + "label": "Current value (Blue)", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 246 + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyName": "currentColor", + "ccVersion": 1, + "metadata": { + "type": "any", + "readable": true, + "writeable": false, + "label": "Current color", + "stateful": true, + "secret": false + }, + "value": { + "red": 255, + "green": 251, + "blue": 246 + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyName": "targetColor", + "ccVersion": 1, + "metadata": { + "type": "any", + "readable": true, + "writeable": true, + "label": "Target color", + "valueChangeOptions": ["transitionDuration"], + "stateful": true, + "secret": false + }, + "value": { + "red": 255, + "green": 251, + "blue": 246 + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "hexColor", + "propertyName": "hexColor", + "ccVersion": 1, + "metadata": { + "type": "color", + "readable": true, + "writeable": true, + "label": "RGB Color", + "valueChangeOptions": ["transitionDuration"], + "minLength": 6, + "maxLength": 7, + "stateful": true, + "secret": false + }, + "value": "fffbf6" + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyKey": 2, + "propertyName": "targetColor", + "propertyKeyName": "Red", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "The target value of the Red channel.", + "label": "Target value (Red)", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyKey": 3, + "propertyName": "targetColor", + "propertyKeyName": "Green", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "The target value of the Green channel.", + "label": "Target value (Green)", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyKey": 4, + "propertyName": "targetColor", + "propertyKeyName": "Blue", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "The target value of the Blue channel.", + "label": "Target value (Blue)", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "duration", + "propertyName": "duration", + "ccVersion": 1, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 4, + "propertyName": "Current Overload Protection Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Current Overload Protection Threshold", + "default": 2415, + "min": 0, + "max": 2415, + "states": { + "0": "Disable" + }, + "unit": "W", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 2415 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 1, + "propertyName": "Alarm Trigger State", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Alarm Trigger State", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Trigger on open state", + "1": "Trigger on closed state" + }, + "valueSize": 2, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 256, + "propertyName": "React to Alarm Type: Smoke Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Smoke Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 512, + "propertyName": "React to Alarm Type: CO Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: CO Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 1024, + "propertyName": "React to Alarm Type: CO2 Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "React to CO2 Alarms from other Z-Wave devices.", + "label": "React to Alarm Type: CO2 Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 2048, + "propertyName": "React to Alarm Type: Heart Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Heart Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 4096, + "propertyName": "React to Alarm Type: Water Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Water Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 8192, + "propertyName": "React to Alarm Type: Access Control Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Access Control Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 16384, + "propertyName": "React to Alarm Type: Home Security Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Home Security Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 8, + "propertyName": "Switch Action on Alarm", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Switch Action on Alarm", + "default": 0, + "min": 0, + "max": 3, + "states": { + "0": "Disable", + "1": "Turn on", + "2": "Turn off", + "3": "Cyclce on/off in 5 second intervals" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 10, + "propertyName": "Method to Disable Alarm", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 10-255 - Sets the method to disable the alarm or alarm duration", + "label": "Method to Disable Alarm", + "default": 0, + "min": 0, + "max": 255, + "states": { + "0": "Tap action button 3x", + "1": "Idle state from corresponding alarm" + }, + "unit": "seconds", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 18, + "propertyName": "LED Blinking Frequency", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "LED Blinking Frequency", + "default": 2, + "min": 0, + "max": 9, + "unit": "Hz", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 20, + "propertyName": "State After Power Failure", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "State After Power Failure", + "default": 0, + "min": 0, + "max": 2, + "states": { + "0": "Previous state", + "1": "Always on", + "2": "Always off" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 80, + "propertyName": "Report Type To Send", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Report Type To Send", + "default": 2, + "min": 0, + "max": 2, + "states": { + "0": "Disable", + "1": "Basic CC Report", + "2": "Binary Switch CC Report" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 81, + "propertyName": "LED Indicator", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "LED Indicator", + "default": 2, + "min": 0, + "max": 2, + "states": { + "0": "Disable", + "1": "Night light mode", + "2": "On/off mode" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 4278190080, + "propertyName": "Night Light (Enable): Hour", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-23", + "label": "Night Light (Enable): Hour", + "default": 18, + "min": 0, + "max": 23, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 18 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 16711680, + "propertyName": "Night Light (Enable): Minute", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-59", + "label": "Night Light (Enable): Minute", + "default": 0, + "min": 0, + "max": 59, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 65280, + "propertyName": "Night Light (Disable): Hour", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-23", + "label": "Night Light (Disable): Hour", + "default": 8, + "min": 0, + "max": 23, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 8 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 255, + "propertyName": "Night Light (Disable): Minute", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-59", + "label": "Night Light (Disable): Minute", + "default": 0, + "min": 0, + "max": 59, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 91, + "propertyName": "Power Change Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Threshold change in power consumption to induce an automatic report", + "label": "Power Change Threshold", + "default": 0, + "min": 0, + "max": 2300, + "states": { + "0": "Disable" + }, + "unit": "W", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 92, + "propertyName": "Power (kWh) Change Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Power (kWh) Change Threshold", + "default": 0, + "min": 0, + "max": 10000, + "states": { + "0": "Disable" + }, + "unit": "KwH", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 93, + "propertyName": "Current Change Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Current Change Threshold", + "default": 0, + "min": 0, + "max": 100, + "states": { + "0": "Disable" + }, + "unit": "A", + "valueSize": 1, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 1, + "propertyName": "Automatic Report: kWh", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: kWh", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 2, + "propertyName": "Automatic Report: Power", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: Power", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 4, + "propertyName": "Automatic Report: Voltage", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: Voltage", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 8, + "propertyName": "Automatic Report: Current", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: Current", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 111, + "propertyName": "Automatic Reporting Interval", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Reporting Interval", + "default": 600, + "min": 0, + "max": 2592000, + "states": { + "0": "Disable" + }, + "unit": "seconds", + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 600 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 19, + "propertyName": "LED Blink Duration", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": false, + "writeable": true, + "label": "LED Blink Duration", + "default": 0, + "min": 0, + "max": 255, + "unit": "seconds", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + } + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 255, + "propertyName": "Reset to Factory Default Setting", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": false, + "writeable": true, + "label": "Reset to Factory Default Setting", + "default": 0, + "min": 0, + "max": 1431655765, + "states": { + "0": "Normal Operation", + "1": "Resets all configuration parameters to default setting", + "1431655765": "Reset the product to factory default setting and exclude from Z-Wave network" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Over-current status", + "propertyName": "Power Management", + "propertyKeyName": "Over-current status", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Over-current status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "6": "Over-current detected" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Over-load status", + "propertyName": "Power Management", + "propertyKeyName": "Over-load status", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Over-load status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "8": "Over-load detected" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "System", + "propertyKey": "Hardware status", + "propertyName": "System", + "propertyKeyName": "Hardware status", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Hardware status", + "ccSpecific": { + "notificationType": 9 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "3": "System hardware failure (with failure code)" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmType", + "propertyName": "alarmType", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Type", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmLevel", + "propertyName": "alarmLevel", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Level", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "manufacturerId", + "propertyName": "manufacturerId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Manufacturer ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 881 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productType", + "propertyName": "productType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product type", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productId", + "propertyName": "productId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 175 + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "local", + "propertyName": "local", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Local protection state", + "states": { + "0": "Unprotected", + "2": "NoOperationPossible" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "rf", + "propertyName": "rf", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "RF protection state", + "states": { + "0": "Unprotected", + "1": "NoControl" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "exclusiveControlNodeId", + "propertyName": "exclusiveControlNodeId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Node ID with exclusive control", + "min": 1, + "max": 232, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "timeout", + "propertyName": "timeout", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "RF protection timeout", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 2, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "6.4" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 2, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + }, + "value": ["1.3"] + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hardwareVersion", + "propertyName": "hardwareVersion", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Z-Wave chip hardware version", + "stateful": true, + "secret": false + }, + "value": 175 + } + ], + "endpoints": [ + { + "nodeId": 9, + "index": 0, + "installerIcon": 1792, + "userIcon": 1792, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 16, + "label": "Binary Switch" + }, + "specific": { + "key": 1, + "label": "Binary Power Switch" + } + }, + "commandClasses": [ + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 85, + "name": "Transport Service", + "version": 2, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 1, + "isSecure": true + }, + { + "id": 112, + "name": "Configuration", + "version": 1, + "isSecure": true + }, + { + "id": 44, + "name": "Scene Actuator Configuration", + "version": 1, + "isSecure": true + }, + { + "id": 43, + "name": "Scene Activation", + "version": 1, + "isSecure": true + }, + { + "id": 129, + "name": "Clock", + "version": 1, + "isSecure": true + }, + { + "id": 113, + "name": "Notification", + "version": 4, + "isSecure": true + }, + { + "id": 50, + "name": "Meter", + "version": 4, + "isSecure": true + }, + { + "id": 37, + "name": "Binary Switch", + "version": 1, + "isSecure": true + }, + { + "id": 51, + "name": "Color Switch", + "version": 1, + "isSecure": true + }, + { + "id": 38, + "name": "Multilevel Switch", + "version": 2, + "isSecure": true + }, + { + "id": 117, + "name": "Protection", + "version": 2, + "isSecure": true + }, + { + "id": 115, + "name": "Powerlevel", + "version": 1, + "isSecure": true + }, + { + "id": 122, + "name": "Firmware Update Meta Data", + "version": 4, + "isSecure": true + }, + { + "id": 134, + "name": "Version", + "version": 2, + "isSecure": true + }, + { + "id": 90, + "name": "Device Reset Locally", + "version": 1, + "isSecure": true + }, + { + "id": 114, + "name": "Manufacturer Specific", + "version": 2, + "isSecure": true + } + ] + } + ] +} diff --git a/tests/components/zwave_js/fixtures/siren_neo_coolcam_nas-ab01z_state.json b/tests/components/zwave_js/fixtures/siren_neo_coolcam_nas-ab01z_state.json new file mode 100644 index 00000000000..41fc9e37423 --- /dev/null +++ b/tests/components/zwave_js/fixtures/siren_neo_coolcam_nas-ab01z_state.json @@ -0,0 +1,746 @@ +{ + "nodeId": 36, + "index": 0, + "installerIcon": 3840, + "userIcon": 3840, + "status": 4, + "ready": true, + "isListening": false, + "isRouting": true, + "manufacturerId": 600, + "productId": 4232, + "productType": 3, + "firmwareVersion": "2.94", + "zwavePlusVersion": 1, + "deviceConfig": { + "filename": "/usr/src/app/store/.config-db/devices/0x0258/nas-ab01z.json", + "isEmbedded": true, + "manufacturer": "Shenzhen Neo Electronics Co., Ltd.", + "manufacturerId": 600, + "label": "NAS-AB01Z", + "description": "Siren Alarm", + "devices": [ + { + "productType": 3, + "productId": 136 + }, + { + "productType": 3, + "productId": 4232 + }, + { + "productType": 3, + "productId": 8328 + }, + { + "productType": 3, + "productId": 24712 + } + ], + "firmwareVersion": { + "min": "0.0", + "max": "255.255" + }, + "preferred": false, + "associations": {}, + "paramInformation": { + "_map": {} + } + }, + "label": "NAS-AB01Z", + "interviewAttempts": 0, + "isFrequentListening": "1000ms", + "maxDataRate": 100000, + "supportedDataRates": [40000, 100000], + "protocolVersion": 3, + "supportsBeaming": true, + "supportsSecurity": false, + "nodeType": 1, + "zwavePlusNodeType": 0, + "zwavePlusRoleType": 7, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 16, + "label": "Binary Switch" + }, + "specific": { + "key": 5, + "label": "Siren" + } + }, + "interviewStage": "Complete", + "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x0258:0x0003:0x1088:2.94", + "statistics": { + "commandsTX": 15, + "commandsRX": 7, + "commandsDroppedRX": 0, + "commandsDroppedTX": 0, + "timeoutResponse": 0, + "rtt": 582.5, + "lastSeen": "2024-10-01T10:22:24.457Z", + "lwr": { + "repeaters": [], + "protocolDataRate": 2 + } + }, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2024-09-30T15:07:11.320Z", + "protocol": 0, + "values": [ + { + "endpoint": 0, + "commandClass": 37, + "commandClassName": "Binary Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": false, + "label": "Current value", + "stateful": true, + "secret": false + }, + "value": false + }, + { + "endpoint": 0, + "commandClass": 37, + "commandClassName": "Binary Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "stateful": true, + "secret": false + }, + "value": false + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 1, + "propertyName": "Alarm Volume", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Alarm Volume", + "default": 2, + "min": 1, + "max": 3, + "states": { + "1": "Low", + "2": "Middle", + "3": "High" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 2, + "propertyName": "Alarm Duration", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Alarm Duration", + "default": 2, + "min": 0, + "max": 255, + "states": { + "0": "Off", + "1": "30 seconds", + "2": "1 minute", + "3": "5 minutes", + "255": "Always on" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 3, + "propertyName": "Doorbell Duration", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Doorbell Duration", + "default": 1, + "min": 0, + "max": 255, + "states": { + "0": "Off", + "255": "Always" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 16 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 4, + "propertyName": "Doorbell Volume", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Doorbell Volume", + "default": 2, + "min": 1, + "max": 3, + "states": { + "1": "Low", + "2": "Middle", + "3": "High" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 5, + "propertyName": "Alarm Sound Selection", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Alarm Sound Selection", + "default": 10, + "min": 1, + "max": 10, + "states": { + "1": "Doorbell", + "2": "F\u00fcr Elise", + "3": "Westminster Chimes", + "4": "Ding Dong", + "5": "William Tell", + "6": "Rondo Alla Turca", + "7": "Police Siren", + "8": "Evacuation", + "9": "Beep Beep", + "10": "Beep" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 10 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 6, + "propertyName": "Doorbell Sound Selection", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Doorbell Sound Selection", + "default": 9, + "min": 1, + "max": 10, + "states": { + "1": "Doorbell", + "2": "F\u00fcr Elise", + "3": "Westminster Chimes", + "4": "Ding Dong", + "5": "William Tell", + "6": "Rondo Alla Turca", + "7": "Police Siren", + "8": "Evacuation", + "9": "Beep Beep", + "10": "Beep" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 10 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 7, + "propertyName": "Default Siren Sound", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Default Siren Sound", + "default": 1, + "min": 1, + "max": 2, + "states": { + "1": "Alarm Sound", + "2": "Doorbell Sound" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 8, + "propertyName": "Alarm LED", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Alarm LED", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyName": "Doorbell LED", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Doorbell LED", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Siren", + "propertyKey": "Siren status", + "propertyName": "Siren", + "propertyKeyName": "Siren status", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Siren status", + "ccSpecific": { + "notificationType": 14 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "1": "Siren active" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmType", + "propertyName": "alarmType", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Type", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmLevel", + "propertyName": "alarmLevel", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Level", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "manufacturerId", + "propertyName": "manufacturerId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Manufacturer ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 600 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productType", + "propertyName": "productType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product type", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productId", + "propertyName": "productId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 4232 + }, + { + "endpoint": 0, + "commandClass": 128, + "commandClassName": "Battery", + "property": "level", + "propertyName": "level", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Battery level", + "min": 0, + "max": 100, + "unit": "%", + "stateful": true, + "secret": false + }, + "value": 89 + }, + { + "endpoint": 0, + "commandClass": 128, + "commandClassName": "Battery", + "property": "isLow", + "propertyName": "isLow", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": false, + "label": "Low battery level", + "stateful": true, + "secret": false + }, + "value": false + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + }, + "value": 6 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 2, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "4.38" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 2, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + }, + "value": ["2.94"] + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hardwareVersion", + "propertyName": "hardwareVersion", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Z-Wave chip hardware version", + "stateful": true, + "secret": false + }, + "value": 48 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "value", + "propertyName": "value", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Indicator value", + "ccSpecific": { + "indicatorId": 0 + }, + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 0 + } + ], + "endpoints": [ + { + "nodeId": 36, + "index": 0, + "installerIcon": 3840, + "userIcon": 3840, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 16, + "label": "Binary Switch" + }, + "specific": { + "key": 5, + "label": "Siren" + } + }, + "commandClasses": [ + { + "id": 37, + "name": "Binary Switch", + "version": 1, + "isSecure": false + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": false + }, + { + "id": 89, + "name": "Association Group Information", + "version": 1, + "isSecure": false + }, + { + "id": 128, + "name": "Battery", + "version": 1, + "isSecure": false + }, + { + "id": 114, + "name": "Manufacturer Specific", + "version": 2, + "isSecure": false + }, + { + "id": 115, + "name": "Powerlevel", + "version": 1, + "isSecure": false + }, + { + "id": 134, + "name": "Version", + "version": 2, + "isSecure": false + }, + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 90, + "name": "Device Reset Locally", + "version": 1, + "isSecure": false + }, + { + "id": 112, + "name": "Configuration", + "version": 1, + "isSecure": false + }, + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": false + }, + { + "id": 135, + "name": "Indicator", + "version": 1, + "isSecure": false + } + ] + } + ] +} diff --git a/tests/components/zwave_js/fixtures/window_covering_outbound_bottom.json b/tests/components/zwave_js/fixtures/window_covering_outbound_bottom.json new file mode 100644 index 00000000000..4791e0d9486 --- /dev/null +++ b/tests/components/zwave_js/fixtures/window_covering_outbound_bottom.json @@ -0,0 +1,282 @@ +{ + "nodeId": 2, + "index": 0, + "status": 4, + "ready": true, + "isListening": true, + "isRouting": true, + "isSecure": false, + "interviewAttempts": 1, + "isFrequentListening": false, + "maxDataRate": 100000, + "supportedDataRates": [40000, 9600, 100000], + "protocolVersion": 3, + "supportsBeaming": true, + "supportsSecurity": false, + "nodeType": 1, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 6, + "label": "Appliance" + }, + "specific": { + "key": 1, + "label": "General Appliance" + } + }, + "interviewStage": "Complete", + "statistics": { + "commandsTX": 8, + "commandsRX": 5, + "commandsDroppedRX": 0, + "commandsDroppedTX": 0, + "timeoutResponse": 2, + "rtt": 96.3, + "lastSeen": "2024-09-12T11:46:43.065Z" + }, + "highestSecurityClass": -1, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2024-09-12T11:46:43.065Z", + "protocol": 0, + "values": [ + { + "endpoint": 0, + "commandClass": 106, + "commandClassName": "Window Covering", + "property": "levelChangeUp", + "propertyKey": 13, + "propertyName": "levelChangeUp", + "propertyKeyName": "Outbound Bottom", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Open - Outbound Bottom", + "ccSpecific": { + "parameter": 13 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 106, + "commandClassName": "Window Covering", + "property": "levelChangeDown", + "propertyKey": 13, + "propertyName": "levelChangeDown", + "propertyKeyName": "Outbound Bottom", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Close - Outbound Bottom", + "ccSpecific": { + "parameter": 13 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 106, + "commandClassName": "Window Covering", + "property": "targetValue", + "propertyKey": 13, + "propertyName": "targetValue", + "propertyKeyName": "Outbound Bottom", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Target value - Outbound Bottom", + "ccSpecific": { + "parameter": 13 + }, + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 99, + "states": { + "0": "Closed", + "99": "Open" + }, + "stateful": true, + "secret": false + }, + "value": 52 + }, + { + "endpoint": 0, + "commandClass": 106, + "commandClassName": "Window Covering", + "property": "currentValue", + "propertyKey": 13, + "propertyName": "currentValue", + "propertyKeyName": "Outbound Bottom", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Current value - Outbound Bottom", + "ccSpecific": { + "parameter": 13 + }, + "min": 0, + "max": 99, + "states": { + "0": "Closed", + "99": "Open" + }, + "stateful": true, + "secret": false + }, + "value": 52 + }, + { + "endpoint": 0, + "commandClass": 106, + "commandClassName": "Window Covering", + "property": "duration", + "propertyKey": 13, + "propertyName": "duration", + "propertyKeyName": "Outbound Bottom", + "ccVersion": 1, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration - Outbound Bottom", + "ccSpecific": { + "parameter": 13 + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 1, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 1, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + } + } + ], + "endpoints": [ + { + "nodeId": 2, + "index": 0, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 6, + "label": "Appliance" + }, + "specific": { + "key": 1, + "label": "General Appliance" + } + }, + "commandClasses": [ + { + "id": 134, + "name": "Version", + "version": 1, + "isSecure": false + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 106, + "name": "Window Covering", + "version": 1, + "isSecure": false + } + ] + } + ] +} diff --git a/tests/components/zwave_js/snapshots/test_diagnostics.ambr b/tests/components/zwave_js/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..dc0dbba59b5 --- /dev/null +++ b/tests/components/zwave_js/snapshots/test_diagnostics.ambr @@ -0,0 +1,3428 @@ +# serializer version: 1 +# name: test_device_diagnostics + dict({ + 'entities': list([ + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.multisensor_6_any', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Any', + 'primary_value': dict({ + 'command_class': 48, + 'command_class_name': 'Binary Sensor', + 'endpoint': 0, + 'property': 'Any', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Any', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-48-0-Any', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': 'diagnostic', + 'entity_id': 'binary_sensor.multisensor_6_low_battery_level', + 'hidden_by': None, + 'original_device_class': 'battery', + 'original_icon': None, + 'original_name': 'Low battery level', + 'primary_value': dict({ + 'command_class': 128, + 'command_class_name': 'Battery', + 'endpoint': 0, + 'property': 'isLow', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'isLow', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-128-0-isLow', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.multisensor_6_motion_detection', + 'hidden_by': None, + 'original_device_class': 'motion', + 'original_icon': None, + 'original_name': 'Motion detection', + 'primary_value': dict({ + 'command_class': 113, + 'command_class_name': 'Notification', + 'endpoint': 0, + 'property': 'Home Security', + 'property_key': 'Motion sensor status', + 'property_key_name': 'Motion sensor status', + 'property_name': 'Home Security', + 'state_key': 8, + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-113-0-Home Security-Motion sensor status', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': 'diagnostic', + 'entity_id': 'binary_sensor.multisensor_6_tampering_product_cover_removed', + 'hidden_by': None, + 'original_device_class': 'tamper', + 'original_icon': None, + 'original_name': 'Tampering, product cover removed', + 'primary_value': dict({ + 'command_class': 113, + 'command_class_name': 'Notification', + 'endpoint': 0, + 'property': 'Home Security', + 'property_key': 'Cover status', + 'property_key_name': 'Cover status', + 'property_name': 'Home Security', + 'state_key': 3, + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-113-0-Home Security-Cover status', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': 'config', + 'entity_id': 'button.multisensor_6_idle_home_security_cover_status', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Idle Home Security Cover status', + 'primary_value': dict({ + 'command_class': 113, + 'command_class_name': 'Notification', + 'endpoint': 0, + 'property': 'Home Security', + 'property_key': 'Cover status', + 'property_key_name': 'Cover status', + 'property_name': 'Home Security', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-113-0-Home Security-Cover status', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': 'config', + 'entity_id': 'button.multisensor_6_idle_home_security_motion_sensor_status', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Idle Home Security Motion sensor status', + 'primary_value': dict({ + 'command_class': 113, + 'command_class_name': 'Notification', + 'endpoint': 0, + 'property': 'Home Security', + 'property_key': 'Motion sensor status', + 'property_key_name': 'Motion sensor status', + 'property_name': 'Home Security', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-113-0-Home Security-Motion sensor status', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.multisensor_6_basic', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Basic', + 'primary_value': dict({ + 'command_class': 32, + 'command_class_name': 'Basic', + 'endpoint': 0, + 'property': 'currentValue', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'currentValue', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-32-0-currentValue', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_battery_threshold', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery Threshold', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 44, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Battery Threshold', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-44', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_default_unit_of_the_automatic_temperature_report', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Default unit of the automatic temperature report', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 64, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Default unit of the automatic temperature report', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-64', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_group_1_report_interval', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 1 Report Interval', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 111, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Group 1 Report Interval', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-111', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_group_2_report_interval', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 2 Report Interval', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 112, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Group 2 Report Interval', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-112', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_group_3_report_interval', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 3 Report Interval', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 113, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Group 3 Report Interval', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-113', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_humidity_sensor_calibration', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Humidity Sensor Calibration', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 202, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Humidity Sensor Calibration', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-202', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_humidity_threshold', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Humidity Threshold', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 42, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Humidity Threshold', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-42', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_low_battery_report', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Low Battery Report', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 39, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Low Battery Report', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-39', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_lower_limit_value_of_humidity_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lower limit value of humidity sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 52, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Lower limit value of humidity sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-52', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_lower_limit_value_of_lighting_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lower limit value of Lighting sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 54, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Lower limit value of Lighting sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-54', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_lower_limit_value_of_ultraviolet_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lower limit value of ultraviolet sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 56, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Lower limit value of ultraviolet sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-56', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_lower_temperature_limit', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lower temperature limit', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 50, + 'property_key': 4294901760, + 'property_key_name': None, + 'property_name': 'Lower temperature limit', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-50-4294901760', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_luminance_sensor_calibration', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Luminance Sensor Calibration', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 203, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Luminance Sensor Calibration', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-203', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_luminance_threshold', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Luminance Threshold', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 43, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Luminance Threshold', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-43', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_motion_sensor_reset_timeout', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion Sensor reset timeout', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 3, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Motion Sensor reset timeout', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-3', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_recover_limit_value_of_humidity_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Recover limit value of humidity sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 58, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Recover limit value of humidity sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-58', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_recover_limit_value_of_lighting_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Recover limit value of Lighting sensor.', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 59, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Recover limit value of Lighting sensor.', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-59', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_recover_limit_value_of_temperature_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Recover limit value of temperature sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 57, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Recover limit value of temperature sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-57', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_recover_limit_value_of_ultraviolet_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Recover limit value of Ultraviolet sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 60, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Recover limit value of Ultraviolet sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-60', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_send_a_report_if_the_measurement_is_out_of_limits', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Send a report if the measurement is out of limits', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 48, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Send a report if the measurement is out of limits', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-48', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_temperature_calibration', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature Calibration', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 201, + 'property_key': 65280, + 'property_key_name': None, + 'property_name': 'Temperature Calibration', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-201-65280', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_temperature_threshold', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature Threshold', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 41, + 'property_key': 16776960, + 'property_key_name': None, + 'property_name': 'Temperature Threshold', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-41-16776960', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_timeout_after_wake_up', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Timeout after wake up', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 8, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Timeout after wake up', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-8', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_ultraviolet_sensor_calibration', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ultraviolet Sensor Calibration', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 204, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Ultraviolet Sensor Calibration', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-204', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_ultraviolet_threshold', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ultraviolet Threshold', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 45, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Ultraviolet Threshold', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-45', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_upper_limit_value_of_humidity_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Upper limit value of humidity sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 51, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Upper limit value of humidity sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-51', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_upper_limit_value_of_lighting_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Upper limit value of Lighting sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 53, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Upper limit value of Lighting sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-53', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_upper_limit_value_of_ultraviolet_sensor', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Upper limit value of ultraviolet sensor', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 55, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Upper limit value of ultraviolet sensor', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-55', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'number', + 'entity_category': 'config', + 'entity_id': 'number.multisensor_6_upper_temperature_limit', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Upper temperature limit', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 49, + 'property_key': 4294901760, + 'property_key_name': None, + 'property_name': 'Upper temperature limit', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-49-4294901760', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_disable_enable_configuration_lock', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Disable/Enable Configuration Lock', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 252, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Disable/Enable Configuration Lock', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-252', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_led_function', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED function', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 81, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'LED function', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-81', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_motion_sensor_sensitivity', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion sensor sensitivity', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 4, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Motion sensor sensitivity', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-4', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_motion_sensor_triggered_command', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion Sensor Triggered Command', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 5, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Motion Sensor Triggered Command', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-5', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_selective_reporting', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Selective Reporting', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 40, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Selective Reporting', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-40', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_send_alarm_report_if_low_temperature', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Send Alarm Report if low temperature', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 46, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Send Alarm Report if low temperature', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-46', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_stay_awake_in_battery_mode', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stay Awake in Battery Mode', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 2, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Stay Awake in Battery Mode', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-2', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_temperature_calibration_unit', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature Calibration (Unit)', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 201, + 'property_key': 255, + 'property_key_name': None, + 'property_name': 'Temperature Calibration (Unit)', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-201-255', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'select', + 'entity_category': 'config', + 'entity_id': 'select.multisensor_6_temperature_threshold_unit', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature Threshold (Unit)', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 41, + 'property_key': 15, + 'property_key_name': None, + 'property_name': 'Temperature Threshold (Unit)', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-41-15', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.multisensor_6_air_temperature', + 'hidden_by': None, + 'original_device_class': 'temperature', + 'original_icon': None, + 'original_name': 'Air temperature', + 'primary_value': dict({ + 'command_class': 49, + 'command_class_name': 'Multilevel Sensor', + 'endpoint': 0, + 'property': 'Air temperature', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Air temperature', + }), + 'supported_features': 0, + 'unit_of_measurement': '°C', + 'value_id': '52-49-0-Air temperature', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': 'diagnostic', + 'entity_id': 'sensor.multisensor_6_battery_level', + 'hidden_by': None, + 'original_device_class': 'battery', + 'original_icon': None, + 'original_name': 'Battery level', + 'primary_value': dict({ + 'command_class': 128, + 'command_class_name': 'Battery', + 'endpoint': 0, + 'property': 'level', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'level', + }), + 'supported_features': 0, + 'unit_of_measurement': '%', + 'value_id': '52-128-0-level', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.multisensor_6_humidity', + 'hidden_by': None, + 'original_device_class': 'humidity', + 'original_icon': None, + 'original_name': 'Humidity', + 'primary_value': dict({ + 'command_class': 49, + 'command_class_name': 'Multilevel Sensor', + 'endpoint': 0, + 'property': 'Humidity', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Humidity', + }), + 'supported_features': 0, + 'unit_of_measurement': '%', + 'value_id': '52-49-0-Humidity', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.multisensor_6_illuminance', + 'hidden_by': None, + 'original_device_class': 'illuminance', + 'original_icon': None, + 'original_name': 'Illuminance', + 'primary_value': dict({ + 'command_class': 49, + 'command_class_name': 'Multilevel Sensor', + 'endpoint': 0, + 'property': 'Illuminance', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Illuminance', + }), + 'supported_features': 0, + 'unit_of_measurement': 'lx', + 'value_id': '52-49-0-Illuminance', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': 'diagnostic', + 'entity_id': 'sensor.multisensor_6_out_of_limit_state_of_the_sensors', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Out-of-limit state of the Sensors', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 61, + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Out-of-limit state of the Sensors', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-61', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': 'diagnostic', + 'entity_id': 'sensor.multisensor_6_power_mode', + 'hidden_by': None, + 'original_device_class': 'enum', + 'original_icon': None, + 'original_name': 'Power Mode', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 9, + 'property_key': 256, + 'property_key_name': None, + 'property_name': 'Power Mode', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-9-256', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': 'diagnostic', + 'entity_id': 'sensor.multisensor_6_sleep_state', + 'hidden_by': None, + 'original_device_class': 'enum', + 'original_icon': None, + 'original_name': 'Sleep State', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 9, + 'property_key': 1, + 'property_key_name': None, + 'property_name': 'Sleep State', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-9-1', + }), + dict({ + 'disabled': False, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.multisensor_6_ultraviolet', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ultraviolet', + 'primary_value': dict({ + 'command_class': 49, + 'command_class_name': 'Multilevel Sensor', + 'endpoint': 0, + 'property': 'Ultraviolet', + 'property_key': None, + 'property_key_name': None, + 'property_name': 'Ultraviolet', + }), + 'supported_features': 0, + 'unit_of_measurement': 'UV index', + 'value_id': '52-49-0-Ultraviolet', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_1_send_battery_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 1: Send battery reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 101, + 'property_key': 1, + 'property_key_name': None, + 'property_name': 'Group 1: Send battery reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-101-1', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_1_send_humidity_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 1: Send humidity reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 101, + 'property_key': 64, + 'property_key_name': None, + 'property_name': 'Group 1: Send humidity reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-101-64', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_1_send_luminance_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 1: Send luminance reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 101, + 'property_key': 128, + 'property_key_name': None, + 'property_name': 'Group 1: Send luminance reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-101-128', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_1_send_temperature_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 1: Send temperature reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 101, + 'property_key': 32, + 'property_key_name': None, + 'property_name': 'Group 1: Send temperature reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-101-32', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_1_send_ultraviolet_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 1: Send ultraviolet reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 101, + 'property_key': 16, + 'property_key_name': None, + 'property_name': 'Group 1: Send ultraviolet reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-101-16', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_2_send_battery_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 2: Send battery reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 102, + 'property_key': 1, + 'property_key_name': None, + 'property_name': 'Group 2: Send battery reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-102-1', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_2_send_humidity_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 2: Send humidity reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 102, + 'property_key': 64, + 'property_key_name': None, + 'property_name': 'Group 2: Send humidity reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-102-64', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_2_send_luminance_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 2: Send luminance reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 102, + 'property_key': 128, + 'property_key_name': None, + 'property_name': 'Group 2: Send luminance reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-102-128', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_2_send_temperature_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 2: Send temperature reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 102, + 'property_key': 32, + 'property_key_name': None, + 'property_name': 'Group 2: Send temperature reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-102-32', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_2_send_ultraviolet_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 2: Send ultraviolet reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 102, + 'property_key': 16, + 'property_key_name': None, + 'property_name': 'Group 2: Send ultraviolet reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-102-16', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_3_send_battery_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 3: Send battery reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 103, + 'property_key': 1, + 'property_key_name': None, + 'property_name': 'Group 3: Send battery reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-103-1', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_3_send_humidity_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 3: Send humidity reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 103, + 'property_key': 64, + 'property_key_name': None, + 'property_name': 'Group 3: Send humidity reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-103-64', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_3_send_luminance_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 3: Send luminance reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 103, + 'property_key': 128, + 'property_key_name': None, + 'property_name': 'Group 3: Send luminance reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-103-128', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_3_send_temperature_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 3: Send temperature reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 103, + 'property_key': 32, + 'property_key_name': None, + 'property_name': 'Group 3: Send temperature reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-103-32', + }), + dict({ + 'disabled': True, + 'disabled_by': 'integration', + 'domain': 'switch', + 'entity_category': 'config', + 'entity_id': 'switch.multisensor_6_group_3_send_ultraviolet_reports', + 'hidden_by': None, + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Group 3: Send ultraviolet reports', + 'primary_value': dict({ + 'command_class': 112, + 'command_class_name': 'Configuration', + 'endpoint': 0, + 'property': 103, + 'property_key': 16, + 'property_key_name': None, + 'property_name': 'Group 3: Send ultraviolet reports', + }), + 'supported_features': 0, + 'unit_of_measurement': None, + 'value_id': '52-112-0-103-16', + }), + ]), + 'state': dict({ + 'deviceClass': dict({ + 'basic': dict({ + 'key': 2, + 'label': 'Static Controller', + }), + 'generic': dict({ + 'key': 21, + 'label': 'Multilevel Sensor', + }), + 'mandatoryControlledCCs': list([ + ]), + 'mandatorySupportedCCs': list([ + ]), + 'specific': dict({ + 'key': 1, + 'label': 'Routing Multilevel Sensor', + }), + }), + 'deviceConfig': dict({ + 'description': 'Multisensor 6', + 'devices': list([ + dict({ + 'productId': '0x0064', + 'productType': '0x0002', + }), + dict({ + 'productId': '0x0064', + 'productType': '0x0102', + }), + dict({ + 'productId': '0x0064', + 'productType': '0x0202', + }), + ]), + 'firmwareVersion': dict({ + 'max': '255.255', + 'min': '1.10', + }), + 'label': 'ZW100', + 'manufacturer': 'AEON Labs', + 'manufacturerId': 134, + 'paramInformation': dict({ + '_map': dict({ + }), + }), + }), + 'endpoints': dict({ + '0': dict({ + 'commandClasses': list([ + dict({ + 'id': 113, + 'isSecure': False, + 'name': 'Notification', + 'version': 8, + }), + ]), + 'index': 0, + 'installerIcon': 3079, + 'nodeId': 52, + 'userIcon': 3079, + }), + }), + 'firmwareVersion': '1.12', + 'highestSecurityClass': 7, + 'index': 0, + 'installerIcon': 3079, + 'interviewAttempts': 1, + 'isBeaming': True, + 'isControllerNode': False, + 'isFrequentListening': False, + 'isListening': True, + 'isRouting': True, + 'isSecure': False, + 'label': 'ZW100', + 'manufacturerId': 134, + 'maxBaudRate': 40000, + 'neighbors': list([ + 1, + 32, + ]), + 'nodeId': 52, + 'nodeType': 0, + 'productId': 100, + 'productType': 258, + 'ready': True, + 'roleType': 5, + 'status': 1, + 'userIcon': 3079, + 'values': dict({ + '52-112-0-100': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Reset 101-103 to defaults', + 'format': 0, + 'isFromConfig': True, + 'label': 'Set parameters 101-103 to default.', + 'max': 1, + 'min': 0, + 'readable': False, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 100, + 'propertyName': 'Set parameters 101-103 to default.', + }), + '52-112-0-101-1': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include battery information in periodic reports to Group 1', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 1: Send battery reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 101, + 'propertyKey': 1, + 'propertyName': 'Group 1: Send battery reports', + 'value': 1, + }), + '52-112-0-101-128': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include luminance information in periodic reports to Group 1', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 1: Send luminance reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 101, + 'propertyKey': 128, + 'propertyName': 'Group 1: Send luminance reports', + 'value': 1, + }), + '52-112-0-101-16': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include ultraviolet information in periodic reports to Group 1', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 1: Send ultraviolet reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 101, + 'propertyKey': 16, + 'propertyName': 'Group 1: Send ultraviolet reports', + 'value': 1, + }), + '52-112-0-101-32': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include temperature information in periodic reports to Group 1', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 1: Send temperature reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 101, + 'propertyKey': 32, + 'propertyName': 'Group 1: Send temperature reports', + 'value': 1, + }), + '52-112-0-101-64': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include humidity information in periodic reports to Group 1', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 1: Send humidity reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 101, + 'propertyKey': 64, + 'propertyName': 'Group 1: Send humidity reports', + 'value': 1, + }), + '52-112-0-102-1': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include battery information in periodic reports to Group 2', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 2: Send battery reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 102, + 'propertyKey': 1, + 'propertyName': 'Group 2: Send battery reports', + 'value': 0, + }), + '52-112-0-102-128': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include luminance information in periodic reports to Group 2', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 2: Send luminance reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 102, + 'propertyKey': 128, + 'propertyName': 'Group 2: Send luminance reports', + 'value': 0, + }), + '52-112-0-102-16': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include ultraviolet information in periodic reports to Group 2', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 2: Send ultraviolet reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 102, + 'propertyKey': 16, + 'propertyName': 'Group 2: Send ultraviolet reports', + 'value': 0, + }), + '52-112-0-102-32': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include temperature information in periodic reports to Group 2', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 2: Send temperature reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 102, + 'propertyKey': 32, + 'propertyName': 'Group 2: Send temperature reports', + 'value': 0, + }), + '52-112-0-102-64': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include humidity information in periodic reports to Group 2', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 2: Send humidity reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 102, + 'propertyKey': 64, + 'propertyName': 'Group 2: Send humidity reports', + 'value': 0, + }), + '52-112-0-103-1': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include battery information in periodic reports to Group 3', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 3: Send battery reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 103, + 'propertyKey': 1, + 'propertyName': 'Group 3: Send battery reports', + 'value': 0, + }), + '52-112-0-103-128': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include luminance information in periodic reports to Group 3', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 3: Send luminance reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 103, + 'propertyKey': 128, + 'propertyName': 'Group 3: Send luminance reports', + 'value': 0, + }), + '52-112-0-103-16': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include ultraviolet information in periodic reports to Group 3', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 3: Send ultraviolet reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 103, + 'propertyKey': 16, + 'propertyName': 'Group 3: Send ultraviolet reports', + 'value': 0, + }), + '52-112-0-103-32': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include temperature information in periodic reports to Group 3', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 3: Send temperature reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 103, + 'propertyKey': 32, + 'propertyName': 'Group 3: Send temperature reports', + 'value': 0, + }), + '52-112-0-103-64': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Include humidity information in periodic reports to Group 3', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 3: Send humidity reports', + 'max': 1, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 103, + 'propertyKey': 64, + 'propertyName': 'Group 3: Send humidity reports', + 'value': 0, + }), + '52-112-0-110': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Set parameters 111-113 to default.', + 'format': 0, + 'isFromConfig': True, + 'label': 'Set parameters 111-113 to default.', + 'max': 1, + 'min': 0, + 'readable': False, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 110, + 'propertyName': 'Set parameters 111-113 to default.', + }), + '52-112-0-111': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 3600, + 'description': 'How often to update Group 1', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 1 Report Interval', + 'max': 2678400, + 'min': 5, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 111, + 'propertyName': 'Group 1 Report Interval', + 'value': 3600, + }), + '52-112-0-112': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 3600, + 'description': 'Group 2 Report Interval', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 2 Report Interval', + 'max': 2678400, + 'min': 5, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 112, + 'propertyName': 'Group 2 Report Interval', + 'value': 3600, + }), + '52-112-0-113': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 3600, + 'description': 'Group 3 Report Interval', + 'format': 0, + 'isFromConfig': True, + 'label': 'Group 3 Report Interval', + 'max': 2678400, + 'min': 5, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 113, + 'propertyName': 'Group 3 Report Interval', + 'value': 3600, + }), + '52-112-0-2': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'description': 'Stay awake for 10 minutes at power on', + 'format': 0, + 'isFromConfig': True, + 'label': 'Stay Awake in Battery Mode', + 'max': 1, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Disable', + '1': 'Enable', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 2, + 'propertyName': 'Stay Awake in Battery Mode', + 'value': 0, + }), + '52-112-0-201-255': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 1, + 'format': 0, + 'isFromConfig': True, + 'label': 'Temperature Calibration (Unit)', + 'max': 2, + 'min': 1, + 'readable': True, + 'states': dict({ + '1': 'Celsius', + '2': 'Fahrenheit', + }), + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 201, + 'propertyKey': 255, + 'propertyName': 'Temperature Calibration (Unit)', + 'value': 2, + }), + '52-112-0-201-65280': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'format': 0, + 'isFromConfig': True, + 'label': 'Temperature Calibration', + 'max': 127, + 'min': -127, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 201, + 'propertyKey': 65280, + 'propertyName': 'Temperature Calibration', + 'value': 0, + }), + '52-112-0-202': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Humidity Sensor Calibration', + 'format': 0, + 'isFromConfig': True, + 'label': 'Humidity Sensor Calibration', + 'max': 50, + 'min': -50, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 202, + 'propertyName': 'Humidity Sensor Calibration', + 'value': 0, + }), + '52-112-0-203': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Luminance Sensor Calibration', + 'format': 0, + 'isFromConfig': True, + 'label': 'Luminance Sensor Calibration', + 'max': 1000, + 'min': -1000, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 203, + 'propertyName': 'Luminance Sensor Calibration', + 'value': 0, + }), + '52-112-0-204': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Ultraviolet Sensor Calibration', + 'format': 0, + 'isFromConfig': True, + 'label': 'Ultraviolet Sensor Calibration', + 'max': 10, + 'min': -10, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 204, + 'propertyName': 'Ultraviolet Sensor Calibration', + 'value': 0, + }), + '52-112-0-252': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'description': 'Disable/Enable Configuration Lock (0=Disable, 1=Enable)', + 'format': 0, + 'isFromConfig': True, + 'label': 'Disable/Enable Configuration Lock', + 'max': 1, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Disable', + '1': 'Enable', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 252, + 'propertyName': 'Disable/Enable Configuration Lock', + 'value': 0, + }), + '52-112-0-255': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'format': 0, + 'isFromConfig': True, + 'label': 'Reset to default factory settings', + 'max': 1431655765, + 'min': 0, + 'readable': False, + 'states': dict({ + '1': 'Resets all configuration parameters to defaults', + '1431655765': 'Reset to default factory settings and be excluded', + }), + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 255, + 'propertyName': 'Reset to default factory settings', + }), + '52-112-0-3': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 240, + 'description': 'Motion Sensor reset timeout', + 'format': 0, + 'isFromConfig': True, + 'label': 'Motion Sensor reset timeout', + 'max': 3600, + 'min': 10, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 3, + 'propertyName': 'Motion Sensor reset timeout', + 'value': 240, + }), + '52-112-0-39': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 20, + 'description': 'Report Low Battery if below this value', + 'format': 0, + 'isFromConfig': True, + 'label': 'Low Battery Report', + 'max': 50, + 'min': 10, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 39, + 'propertyName': 'Low Battery Report', + 'value': 20, + }), + '52-112-0-4': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 5, + 'description': 'Sensitivity level of PIR sensor (1=minimum, 5=maximum)', + 'format': 1, + 'isFromConfig': True, + 'label': 'Motion sensor sensitivity', + 'max': 255, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Disable', + '1': 'Enable, sensitivity level 1 (minimum)', + '2': 'Enable, sensitivity level 2', + '3': 'Enable, sensitivity level 3', + '4': 'Enable, sensitivity level 4', + '5': 'Enable, sensitivity level 5 (maximum)', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 4, + 'propertyName': 'Motion sensor sensitivity', + 'value': 5, + }), + '52-112-0-40': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'description': 'Select to report on thresholds', + 'format': 0, + 'isFromConfig': True, + 'label': 'Selective Reporting', + 'max': 1, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Disable', + '1': 'Enable', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 40, + 'propertyName': 'Selective Reporting', + 'value': 0, + }), + '52-112-0-41-15': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 1, + 'format': 0, + 'isFromConfig': True, + 'label': 'Temperature Threshold (Unit)', + 'max': 2, + 'min': 1, + 'readable': True, + 'states': dict({ + '1': 'Celsius', + '2': 'Fahrenheit', + }), + 'type': 'number', + 'valueSize': 3, + 'writeable': True, + }), + 'property': 41, + 'propertyKey': 15, + 'propertyName': 'Temperature Threshold (Unit)', + 'value': 0, + }), + '52-112-0-41-16776960': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 20, + 'description': 'Threshold change in temperature to induce an automatic report.', + 'format': 0, + 'isFromConfig': True, + 'label': 'Temperature Threshold', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 3, + 'writeable': True, + }), + 'property': 41, + 'propertyKey': 16776960, + 'propertyName': 'Temperature Threshold', + 'value': 5122, + }), + '52-112-0-42': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 10, + 'description': 'Humidity percent change threshold', + 'format': 0, + 'isFromConfig': True, + 'label': 'Humidity Threshold', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 42, + 'propertyName': 'Humidity Threshold', + 'value': 10, + }), + '52-112-0-43': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 100, + 'description': 'Luminance change threshold', + 'format': 0, + 'isFromConfig': True, + 'label': 'Luminance Threshold', + 'max': 1000, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 43, + 'propertyName': 'Luminance Threshold', + 'value': 100, + }), + '52-112-0-44': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 10, + 'description': 'Battery level threshold', + 'format': 0, + 'isFromConfig': True, + 'label': 'Battery Threshold', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 44, + 'propertyName': 'Battery Threshold', + 'value': 10, + }), + '52-112-0-45': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 2, + 'description': 'Ultraviolet change threshold', + 'format': 0, + 'isFromConfig': True, + 'label': 'Ultraviolet Threshold', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 45, + 'propertyName': 'Ultraviolet Threshold', + 'value': 2, + }), + '52-112-0-46': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'description': 'Send an alarm report if temperature is less than -15 °C', + 'format': 1, + 'isFromConfig': True, + 'label': 'Send Alarm Report if low temperature', + 'max': 255, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Disable', + '1': 'Enable', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 46, + 'propertyName': 'Send Alarm Report if low temperature', + 'value': 0, + }), + '52-112-0-48': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Send report when measurement is at upper/lower limit', + 'format': 1, + 'isFromConfig': True, + 'label': 'Send a report if the measurement is out of limits', + 'max': 255, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 48, + 'propertyName': 'Send a report if the measurement is out of limits', + 'value': 0, + }), + '52-112-0-49-4294901760': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 280, + 'format': 0, + 'isFromConfig': True, + 'label': 'Upper temperature limit', + 'max': 2120, + 'min': -400, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 49, + 'propertyKey': 4294901760, + 'propertyName': 'Upper temperature limit', + 'value': 824, + }), + '52-112-0-49-65280': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 1, + 'format': 0, + 'isFromConfig': True, + 'label': 'Upper temperature limit (Unit)', + 'max': 2, + 'min': 1, + 'readable': False, + 'states': dict({ + '1': 'Celsius', + '2': 'Fahrenheit', + }), + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 49, + 'propertyKey': 65280, + 'propertyName': 'Upper temperature limit (Unit)', + 'value': 2, + }), + '52-112-0-5': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 1, + 'format': 1, + 'isFromConfig': True, + 'label': 'Motion Sensor Triggered Command', + 'max': 255, + 'min': 0, + 'readable': True, + 'states': dict({ + '1': 'Send Basic Set CC', + '2': 'Send Sensor Binary Report CC', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 5, + 'propertyName': 'Motion Sensor Triggered Command', + 'value': 1, + }), + '52-112-0-50-4294901760': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'format': 0, + 'isFromConfig': True, + 'label': 'Lower temperature limit', + 'max': 2120, + 'min': -400, + 'readable': True, + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 50, + 'propertyKey': 4294901760, + 'propertyName': 'Lower temperature limit', + 'value': 320, + }), + '52-112-0-50-65280': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 1, + 'format': 0, + 'isFromConfig': True, + 'label': 'Lower temperature limit (Unit)', + 'max': 2, + 'min': 1, + 'readable': False, + 'states': dict({ + '1': 'Celsius', + '2': 'Fahrenheit', + }), + 'type': 'number', + 'valueSize': 4, + 'writeable': True, + }), + 'property': 50, + 'propertyKey': 65280, + 'propertyName': 'Lower temperature limit (Unit)', + 'value': 2, + }), + '52-112-0-51': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 60, + 'description': 'Upper limit value of humidity sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Upper limit value of humidity sensor', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 51, + 'propertyName': 'Upper limit value of humidity sensor', + 'value': 60, + }), + '52-112-0-52': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 50, + 'description': 'Lower limit value of humidity sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Lower limit value of humidity sensor', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 52, + 'propertyName': 'Lower limit value of humidity sensor', + 'value': 50, + }), + '52-112-0-53': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1000, + 'description': 'Upper limit value of Lighting sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Upper limit value of Lighting sensor', + 'max': 30000, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 53, + 'propertyName': 'Upper limit value of Lighting sensor', + 'value': 1000, + }), + '52-112-0-54': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 100, + 'description': 'Lower limit value of Lighting sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Lower limit value of Lighting sensor', + 'max': 30000, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 54, + 'propertyName': 'Lower limit value of Lighting sensor', + 'value': 100, + }), + '52-112-0-55': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 8, + 'description': 'Upper limit value of ultraviolet sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Upper limit value of ultraviolet sensor', + 'max': 11, + 'min': 1, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 55, + 'propertyName': 'Upper limit value of ultraviolet sensor', + 'value': 8, + }), + '52-112-0-56': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 4, + 'description': 'Lower limit value of ultraviolet sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Lower limit value of ultraviolet sensor', + 'max': 11, + 'min': 1, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 56, + 'propertyName': 'Lower limit value of ultraviolet sensor', + 'value': 4, + }), + '52-112-0-57': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Recover limit value of temperature sensor', + 'format': 1, + 'isFromConfig': True, + 'label': 'Recover limit value of temperature sensor', + 'max': 65535, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 2, + 'writeable': True, + }), + 'property': 57, + 'propertyName': 'Recover limit value of temperature sensor', + 'value': 5122, + }), + '52-112-0-58': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 5, + 'description': 'Recover limit value of humidity sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Recover limit value of humidity sensor', + 'max': 50, + 'min': 1, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 58, + 'propertyName': 'Recover limit value of humidity sensor', + 'value': 5, + }), + '52-112-0-59': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 10, + 'description': 'Recover limit value of Lighting sensor.', + 'format': 1, + 'isFromConfig': True, + 'label': 'Recover limit value of Lighting sensor.', + 'max': 255, + 'min': 1, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 59, + 'propertyName': 'Recover limit value of Lighting sensor.', + 'value': 10, + }), + '52-112-0-60': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 2, + 'description': 'Recover limit value of Ultraviolet sensor', + 'format': 0, + 'isFromConfig': True, + 'label': 'Recover limit value of Ultraviolet sensor', + 'max': 5, + 'min': 1, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 60, + 'propertyName': 'Recover limit value of Ultraviolet sensor', + 'value': 2, + }), + '52-112-0-61': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 0, + 'description': 'Out-of-limit state of the Sensors', + 'format': 1, + 'isFromConfig': True, + 'label': 'Out-of-limit state of the Sensors', + 'max': 255, + 'min': 0, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': False, + }), + 'property': 61, + 'propertyName': 'Out-of-limit state of the Sensors', + 'value': 0, + }), + '52-112-0-64': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 1, + 'description': 'Default unit of the automatic temperature report', + 'format': 0, + 'isFromConfig': True, + 'label': 'Default unit of the automatic temperature report', + 'max': 2, + 'min': 1, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 64, + 'propertyName': 'Default unit of the automatic temperature report', + 'value': 2, + }), + '52-112-0-8': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': True, + 'default': 30, + 'description': 'Set the timeout of awake after the Wake Up CC is sent out...', + 'format': 1, + 'isFromConfig': True, + 'label': 'Timeout after wake up', + 'max': 255, + 'min': 8, + 'readable': True, + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 8, + 'propertyName': 'Timeout after wake up', + 'value': 15, + }), + '52-112-0-81': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'description': 'Disable/Enable LED function', + 'format': 0, + 'isFromConfig': True, + 'label': 'LED function', + 'max': 2, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Enable LED blinking', + '1': 'Disable PIR LED', + '2': 'Disable ALL', + }), + 'type': 'number', + 'valueSize': 1, + 'writeable': True, + }), + 'property': 81, + 'propertyName': 'LED function', + 'value': 0, + }), + '52-112-0-9-1': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'format': 0, + 'isFromConfig': True, + 'label': 'Sleep State', + 'max': 1, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'Asleep', + '1': 'Awake', + }), + 'type': 'number', + 'valueSize': 2, + 'writeable': False, + }), + 'property': 9, + 'propertyKey': 1, + 'propertyName': 'Sleep State', + 'value': 0, + }), + '52-112-0-9-256': dict({ + 'commandClass': 112, + 'commandClassName': 'Configuration', + 'endpoint': 0, + 'metadata': dict({ + 'allowManualEntry': False, + 'default': 0, + 'format': 0, + 'isFromConfig': True, + 'label': 'Power Mode', + 'max': 1, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'USB', + '1': 'Battery', + }), + 'type': 'number', + 'valueSize': 2, + 'writeable': False, + }), + 'property': 9, + 'propertyKey': 256, + 'propertyName': 'Power Mode', + 'value': 0, + }), + '52-113-0-Home Security-Cover status': dict({ + 'commandClass': 113, + 'commandClassName': 'Notification', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'notificationType': 7, + }), + 'label': 'Cover status', + 'max': 255, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'idle', + '3': 'Tampering, product cover removed', + }), + 'type': 'number', + 'writeable': False, + }), + 'property': 'Home Security', + 'propertyKey': 'Cover status', + 'propertyKeyName': 'Cover status', + 'propertyName': 'Home Security', + 'value': 0, + }), + '52-113-0-Home Security-Motion sensor status': dict({ + 'commandClass': 113, + 'commandClassName': 'Notification', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'notificationType': 7, + }), + 'label': 'Motion sensor status', + 'max': 255, + 'min': 0, + 'readable': True, + 'states': dict({ + '0': 'idle', + '8': 'Motion detection', + }), + 'type': 'number', + 'writeable': False, + }), + 'property': 'Home Security', + 'propertyKey': 'Motion sensor status', + 'propertyKeyName': 'Motion sensor status', + 'propertyName': 'Home Security', + 'value': 8, + }), + '52-114-0-manufacturerId': dict({ + 'commandClass': 114, + 'commandClassName': 'Manufacturer Specific', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Manufacturer ID', + 'max': 65535, + 'min': 0, + 'readable': True, + 'type': 'number', + 'writeable': False, + }), + 'property': 'manufacturerId', + 'propertyName': 'manufacturerId', + 'value': 134, + }), + '52-114-0-productId': dict({ + 'commandClass': 114, + 'commandClassName': 'Manufacturer Specific', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Product ID', + 'max': 65535, + 'min': 0, + 'readable': True, + 'type': 'number', + 'writeable': False, + }), + 'property': 'productId', + 'propertyName': 'productId', + 'value': 100, + }), + '52-114-0-productType': dict({ + 'commandClass': 114, + 'commandClassName': 'Manufacturer Specific', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Product type', + 'max': 65535, + 'min': 0, + 'readable': True, + 'type': 'number', + 'writeable': False, + }), + 'property': 'productType', + 'propertyName': 'productType', + 'value': 258, + }), + '52-128-0-isLow': dict({ + 'commandClass': 128, + 'commandClassName': 'Battery', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Low battery level', + 'readable': True, + 'type': 'boolean', + 'writeable': False, + }), + 'property': 'isLow', + 'propertyName': 'isLow', + 'value': False, + }), + '52-128-0-level': dict({ + 'commandClass': 128, + 'commandClassName': 'Battery', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Battery level', + 'max': 100, + 'min': 0, + 'readable': True, + 'type': 'number', + 'unit': '%', + 'writeable': False, + }), + 'property': 'level', + 'propertyName': 'level', + 'value': 100, + }), + '52-132-0-controllerNodeId': dict({ + 'commandClass': 132, + 'commandClassName': 'Wake Up', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Node ID of the controller', + 'readable': True, + 'type': 'any', + 'writeable': False, + }), + 'property': 'controllerNodeId', + 'propertyName': 'controllerNodeId', + 'value': 1, + }), + '52-132-0-wakeUpInterval': dict({ + 'commandClass': 132, + 'commandClassName': 'Wake Up', + 'endpoint': 0, + 'metadata': dict({ + 'default': 3600, + 'label': 'Wake Up interval', + 'max': 3600, + 'min': 240, + 'readable': False, + 'steps': 60, + 'type': 'number', + 'writeable': True, + }), + 'property': 'wakeUpInterval', + 'propertyName': 'wakeUpInterval', + 'value': 3600, + }), + '52-134-0-firmwareVersions': dict({ + 'commandClass': 134, + 'commandClassName': 'Version', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Z-Wave chip firmware versions', + 'readable': True, + 'type': 'any', + 'writeable': False, + }), + 'property': 'firmwareVersions', + 'propertyName': 'firmwareVersions', + 'value': list([ + '1.12', + ]), + }), + '52-134-0-hardwareVersion': dict({ + 'commandClass': 134, + 'commandClassName': 'Version', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Z-Wave chip hardware version', + 'readable': True, + 'type': 'any', + 'writeable': False, + }), + 'property': 'hardwareVersion', + 'propertyName': 'hardwareVersion', + }), + '52-134-0-libraryType': dict({ + 'commandClass': 134, + 'commandClassName': 'Version', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Libary type', + 'readable': True, + 'type': 'any', + 'writeable': False, + }), + 'property': 'libraryType', + 'propertyName': 'libraryType', + 'value': 3, + }), + '52-134-0-protocolVersion': dict({ + 'commandClass': 134, + 'commandClassName': 'Version', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Z-Wave protocol version', + 'readable': True, + 'type': 'any', + 'writeable': False, + }), + 'property': 'protocolVersion', + 'propertyName': 'protocolVersion', + 'value': '4.54', + }), + '52-32-0-currentValue': dict({ + 'commandClass': 32, + 'commandClassName': 'Basic', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Current value', + 'max': 99, + 'min': 0, + 'readable': True, + 'type': 'number', + 'writeable': False, + }), + 'property': 'currentValue', + 'propertyName': 'currentValue', + 'value': 255, + }), + '52-32-0-targetValue': dict({ + 'commandClass': 32, + 'commandClassName': 'Basic', + 'endpoint': 0, + 'metadata': dict({ + 'label': 'Target value', + 'max': 99, + 'min': 0, + 'readable': True, + 'type': 'number', + 'writeable': True, + }), + 'property': 'targetValue', + 'propertyName': 'targetValue', + }), + '52-48-0-Any': dict({ + 'commandClass': 48, + 'commandClassName': 'Binary Sensor', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'sensorType': 255, + }), + 'label': 'Any', + 'readable': True, + 'type': 'boolean', + 'writeable': False, + }), + 'property': 'Any', + 'propertyName': 'Any', + 'value': False, + }), + '52-49-0-Air temperature': dict({ + 'commandClass': 49, + 'commandClassName': 'Multilevel Sensor', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'scale': 0, + 'sensorType': 1, + }), + 'label': 'Air temperature', + 'readable': True, + 'type': 'number', + 'unit': '°C', + 'writeable': False, + }), + 'property': 'Air temperature', + 'propertyName': 'Air temperature', + 'value': 9, + }), + '52-49-0-Humidity': dict({ + 'commandClass': 49, + 'commandClassName': 'Multilevel Sensor', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'scale': 0, + 'sensorType': 5, + }), + 'label': 'Humidity', + 'readable': True, + 'type': 'number', + 'unit': '%', + 'writeable': False, + }), + 'property': 'Humidity', + 'propertyName': 'Humidity', + 'value': 65, + }), + '52-49-0-Illuminance': dict({ + 'commandClass': 49, + 'commandClassName': 'Multilevel Sensor', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'scale': 1, + 'sensorType': 3, + }), + 'label': 'Illuminance', + 'readable': True, + 'type': 'number', + 'unit': 'Lux', + 'writeable': False, + }), + 'property': 'Illuminance', + 'propertyName': 'Illuminance', + 'value': 0, + }), + '52-49-0-Ultraviolet': dict({ + 'commandClass': 49, + 'commandClassName': 'Multilevel Sensor', + 'endpoint': 0, + 'metadata': dict({ + 'ccSpecific': dict({ + 'scale': 0, + 'sensorType': 27, + }), + 'label': 'Ultraviolet', + 'readable': True, + 'type': 'number', + 'writeable': False, + }), + 'property': 'Ultraviolet', + 'propertyName': 'Ultraviolet', + 'value': 1, + }), + }), + 'version': 4, + 'zwavePlusVersion': 1, + }), + 'versionInfo': dict({ + 'driverVersion': '6.0.0-beta.0', + 'maxSchemaVersion': 0, + 'minSchemaVersion': 0, + 'serverVersion': '1.0.0', + }), + }) +# --- diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 0437f9d9085..a3f70e92dcf 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5,7 +5,7 @@ from http import HTTPStatus from io import BytesIO import json from typing import Any -from unittest.mock import patch +from unittest.mock import MagicMock, PropertyMock, patch import pytest from zwave_js_server.const import ( @@ -78,17 +78,26 @@ from homeassistant.components.zwave_js.api import ( TYPE, UUID, VALUE, + VALUE_FORMAT, + VALUE_SIZE, VERSION, ) from homeassistant.components.zwave_js.const import ( + ATTR_COMMAND_CLASS, + ATTR_ENDPOINT, + ATTR_METHOD_NAME, + ATTR_PARAMETERS, + ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, + CONF_INSTALLER_MODE, DOMAIN, ) from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component -from tests.common import MockUser +from tests.common import MockConfigEntry, MockUser from tests.typing import ClientSessionGenerator, WebSocketGenerator CONTROLLER_PATCH_PREFIX = "zwave_js_server.model.controller.Controller" @@ -489,6 +498,7 @@ async def test_node_alerts( async def test_add_node( hass: HomeAssistant, + nortek_thermostat, nortek_thermostat_added_event, integration, client, @@ -524,7 +534,7 @@ async def test_add_node( data={ "source": "controller", "event": "inclusion started", - "secure": False, + "strategy": 2, }, ) client.driver.receive_event(event) @@ -590,6 +600,7 @@ async def test_add_node( "status": 0, "ready": False, "low_security": False, + "low_security_reason": None, } assert msg["event"]["node"] == node_details @@ -935,12 +946,46 @@ async def test_add_node( assert msg["error"]["code"] == "zwave_error" assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + # Test inclusion already in progress + client.async_send_command.reset_mock() + type(client.driver.controller).inclusion_state = PropertyMock( + return_value=InclusionState.INCLUDING + ) + + # Create a node that's not ready + node_data = deepcopy(nortek_thermostat.data) # Copy to allow modification in tests. + node_data["ready"] = False + node_data["values"] = {} + node_data["endpoints"] = {} + node = Node(client, node_data) + client.driver.controller.nodes[node.node_id] = node + + await ws_client.send_json( + { + ID: 11, + TYPE: "zwave_js/add_node", + ENTRY_ID: entry.entry_id, + INCLUSION_STRATEGY: InclusionStrategy.DEFAULT.value, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + + # Verify no command was sent since inclusion is already in progress + assert len(client.async_send_command.call_args_list) == 0 + + # Verify we got a node added event + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "node added" + assert msg["event"]["node"]["node_id"] == node.node_id + # Test sending command with not loaded entry fails await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() await ws_client.send_json( - {ID: 11, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id} + {ID: 12, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id} ) msg = await ws_client.receive_json() @@ -1822,7 +1867,7 @@ async def test_replace_failed_node( data={ "source": "controller", "event": "inclusion started", - "secure": False, + "strategy": 2, }, ) client.driver.receive_event(event) @@ -3048,9 +3093,21 @@ async def test_get_config_parameters( assert result[key]["property"] == 2 assert result[key]["property_key"] is None assert result[key]["endpoint"] == 0 - assert result[key]["metadata"]["type"] == "number" assert result[key]["configuration_value_type"] == "enumerated" assert result[key]["metadata"]["states"] + assert ( + result[key]["metadata"]["description"] + == "Stay awake for 10 minutes at power on" + ) + assert result[key]["metadata"]["label"] == "Stay Awake in Battery Mode" + assert result[key]["metadata"]["type"] == "number" + assert result[key]["metadata"]["min"] == 0 + assert result[key]["metadata"]["max"] == 1 + assert result[key]["metadata"]["unit"] is None + assert result[key]["metadata"]["writeable"] is True + assert result[key]["metadata"]["readable"] is True + assert result[key]["metadata"]["default"] == 0 + assert result[key]["value"] == 0 key = "52-112-0-201-255" assert result[key]["property_key"] == 255 @@ -3084,6 +3141,180 @@ async def test_get_config_parameters( assert msg["error"]["code"] == ERR_NOT_LOADED +async def test_set_raw_config_parameter( + hass: HomeAssistant, + client, + multisensor_6, + integration, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test that the set_raw_config_parameter WS API call works.""" + entry = integration + ws_client = await hass_ws_client(hass) + device = get_device(hass, multisensor_6) + + # Change from async_send_command to async_send_command_no_wait + client.async_send_command_no_wait.return_value = None + + # Test setting a raw config parameter value + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"]["status"] == "queued" + + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args["command"] == "endpoint.set_raw_config_parameter_value" + assert args["nodeId"] == multisensor_6.node_id + assert args["parameter"] == 102 + assert args["value"] == 1 + assert args["valueSize"] == 2 + assert args["valueFormat"] == 1 + + # Reset the mock for async_send_command_no_wait instead + client.async_send_command_no_wait.reset_mock() + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: "fake_device", + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + +async def test_get_raw_config_parameter( + hass: HomeAssistant, + multisensor_6, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the get_raw_config_parameter websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + device = get_device(hass, multisensor_6) + + client.async_send_command.return_value = {"value": 1} + + # Test getting a raw config parameter value + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"]["value"] == 1 + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "endpoint.get_raw_config_parameter_value" + assert args["nodeId"] == multisensor_6.node_id + assert args["parameter"] == 102 + + client.async_send_command.reset_mock() + + # Test FailedZWaveCommand is caught + with patch( + "zwave_js_server.model.node.Node.async_get_raw_config_parameter_value", + side_effect=FailedZWaveCommand("failed_command", 1, "error message"), + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "zwave_error" + assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: "fake_device", + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test FailedCommand exception + client.async_send_command.side_effect = FailedCommand("test", "test") + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "test" + assert msg["error"]["message"] == "Command failed: test" + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + @pytest.mark.parametrize( ("firmware_data", "expected_data"), [({"target": "1"}, {"firmware_target": 1}), ({}, {})], @@ -4780,3 +5011,315 @@ async def test_hard_reset_controller( assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_node_capabilities( + hass: HomeAssistant, + multisensor_6: Node, + integration: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the node_capabilities websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + node = multisensor_6 + device = get_device(hass, node) + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_capabilities", + DEVICE_ID: device.id, + } + ) + msg = await ws_client.receive_json() + assert msg["result"] == { + "0": [ + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": False, + "is_secure": False, + } + ] + } + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_status", + DEVICE_ID: "fake_device", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_status", + DEVICE_ID: device.id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + +async def test_invoke_cc_api( + hass: HomeAssistant, + client, + climate_radio_thermostat_ct100_plus_different_endpoints: Node, + integration: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the invoke_cc_api websocket command.""" + ws_client = await hass_ws_client(hass) + + device_radio_thermostat = get_device( + hass, climate_radio_thermostat_ct100_plus_different_endpoints + ) + assert device_radio_thermostat + + # Test successful invoke_cc_api call with a static endpoint + client.async_send_command.return_value = {"response": True} + client.async_send_command_no_wait.return_value = {"response": True} + + # Test with wait_for_result=False (default) + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is None # We did not specify wait_for_result=True + + await hass.async_block_till_done() + + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args == { + "command": "endpoint.invoke_cc_api", + "nodeId": 26, + "endpoint": 0, + "commandClass": 67, + "methodName": "someMethod", + "args": [1, 2], + } + + client.async_send_command_no_wait.reset_mock() + + # Test with wait_for_result=True + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_ENDPOINT: 0, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + ATTR_WAIT_FOR_RESULT: True, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is True + + await hass.async_block_till_done() + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args == { + "command": "endpoint.invoke_cc_api", + "nodeId": 26, + "endpoint": 0, + "commandClass": 67, + "methodName": "someMethod", + "args": [1, 2], + } + + client.async_send_command.side_effect = NotFoundError + + # Ensure an error is returned + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_ENDPOINT: 0, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + ATTR_WAIT_FOR_RESULT: True, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"] == {"code": "NotFoundError", "message": ""} + + +@pytest.mark.parametrize( + ("config", "installer_mode"), [({}, False), ({CONF_INSTALLER_MODE: True}, True)] +) +async def test_get_integration_settings( + config: dict[str, Any], + installer_mode: bool, + hass: HomeAssistant, + client: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test that the get_integration_settings WS API call works.""" + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) + entry.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_integration_settings", + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] == { + CONF_INSTALLER_MODE: installer_mode, + } + + +async def test_cancel_secure_bootstrap_s2( + hass: HomeAssistant, client, integration, hass_ws_client: WebSocketGenerator +) -> None: + """Test that the cancel_secure_bootstrap_s2 WS API call works.""" + entry = integration + ws_client = await hass_ws_client(hass) + + # Test successful cancellation + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "controller.cancel_secure_bootstrap_s2" + + # Test FailedZWaveCommand is caught + with patch( + f"{CONTROLLER_PATCH_PREFIX}.async_cancel_secure_bootstrap_s2", + side_effect=FailedZWaveCommand("failed_command", 1, "error message"), + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "zwave_error" + assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + # Test sending command with invalid entry ID fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/cancel_secure_bootstrap_s2", + ENTRY_ID: "invalid_entry_id", + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_subscribe_s2_inclusion( + hass: HomeAssistant, integration, client, hass_ws_client: WebSocketGenerator +) -> None: + """Test the subscribe_s2_inclusion websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/subscribe_s2_inclusion", + ENTRY_ID: entry.entry_id, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is None + + # Test receiving DSK request event + event = Event( + type="validate dsk and enter pin", + data={ + "source": "controller", + "event": "validate dsk and enter pin", + "dsk": "test_dsk", + }, + ) + client.driver.receive_event(event) + + msg = await ws_client.receive_json() + assert msg["event"] == { + "event": "validate dsk and enter pin", + "dsk": "test_dsk", + } + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/subscribe_s2_inclusion", + ENTRY_ID: entry.entry_id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + # Test invalid config entry id + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/subscribe_s2_inclusion", + ENTRY_ID: "INVALID", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND diff --git a/tests/components/zwave_js/test_climate.py b/tests/components/zwave_js/test_climate.py index 9a4559de1a5..5d711528a28 100644 --- a/tests/components/zwave_js/test_climate.py +++ b/tests/components/zwave_js/test_climate.py @@ -812,8 +812,8 @@ async def test_thermostat_heatit_z_trm2fx( | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - assert state.attributes[ATTR_MIN_TEMP] == 7 - assert state.attributes[ATTR_MAX_TEMP] == 35 + assert state.attributes[ATTR_MIN_TEMP] == 0 + assert state.attributes[ATTR_MAX_TEMP] == 50 # Try switching to external sensor event = Event( diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index fe16f38257a..b60515cacd4 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -6,7 +6,10 @@ from copy import copy from ipaddress import ip_address from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch +from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions, Discovery import aiohttp import pytest from serial.tools.list_ports_common import ListPortInfo @@ -14,13 +17,12 @@ from zwave_js_server.version import VersionInfo from homeassistant import config_entries from homeassistant.components import usb -from homeassistant.components.hassio import HassioServiceInfo -from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.components.zwave_js.config_flow import SERVER_VERSION_TIMEOUT, TITLE from homeassistant.components.zwave_js.const import ADDON_SLUG, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -554,7 +556,19 @@ async def test_abort_hassio_discovery_for_other_addon( assert result2["reason"] == "not_zwave_js_addon" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_usb_discovery( hass: HomeAssistant, supervisor, @@ -584,7 +598,7 @@ async def test_usb_discovery( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -602,10 +616,9 @@ async def test_usb_discovery( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": USB_DISCOVERY_INFO.device, "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -614,7 +627,7 @@ async def test_usb_discovery( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -633,7 +646,7 @@ async def test_usb_discovery( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -653,7 +666,19 @@ async def test_usb_discovery( assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_usb_discovery_addon_not_running( hass: HomeAssistant, supervisor, @@ -703,10 +728,9 @@ async def test_usb_discovery_addon_not_running( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": USB_DISCOVERY_INFO.device, "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -715,7 +739,7 @@ async def test_usb_discovery_addon_not_running( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -734,7 +758,7 @@ async def test_usb_discovery_addon_not_running( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -798,10 +822,9 @@ async def test_discovery_addon_not_running( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -810,7 +833,7 @@ async def test_discovery_addon_not_running( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -829,7 +852,7 @@ async def test_discovery_addon_not_running( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -882,7 +905,7 @@ async def test_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -901,10 +924,9 @@ async def test_discovery_addon_not_installed( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -913,7 +935,7 @@ async def test_discovery_addon_not_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -932,7 +954,7 @@ async def test_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -1093,7 +1115,19 @@ async def test_not_addon(hass: HomeAssistant, supervisor) -> None: assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running( hass: HomeAssistant, supervisor, @@ -1159,31 +1193,52 @@ async def test_addon_running( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, None, "addon_get_discovery_info_failed", ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, TimeoutError, None, "cannot_connect", ), ( - None, + [], None, None, None, "addon_get_discovery_info_failed", ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, None, - HassioAPIError(), + SupervisorError(), "addon_info_failed", ), ], @@ -1215,7 +1270,19 @@ async def test_addon_running_failures( assert result["reason"] == abort_reason -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running_already_configured( hass: HomeAssistant, supervisor, @@ -1274,7 +1341,19 @@ async def test_addon_running_already_configured( assert entry.data["lr_s2_authenticated_key"] == "new321" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed( hass: HomeAssistant, supervisor, @@ -1314,10 +1393,9 @@ async def test_addon_installed( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1326,7 +1404,7 @@ async def test_addon_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1345,7 +1423,7 @@ async def test_addon_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -1367,7 +1445,17 @@ async def test_addon_installed( @pytest.mark.parametrize( ("discovery_info", "start_addon_side_effect"), - [({"config": ADDON_DISCOVERY_INFO}, HassioAPIError())], + [ + ( + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ), + SupervisorError(), + ) + ], ) async def test_addon_installed_start_failure( hass: HomeAssistant, @@ -1408,10 +1496,9 @@ async def test_addon_installed_start_failure( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1420,7 +1507,7 @@ async def test_addon_installed_start_failure( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1429,7 +1516,7 @@ async def test_addon_installed_start_failure( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" @@ -1439,11 +1526,18 @@ async def test_addon_installed_start_failure( ("discovery_info", "server_version_side_effect"), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], TimeoutError, ), ( - None, + [], None, ), ], @@ -1487,10 +1581,9 @@ async def test_addon_installed_failures( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1499,7 +1592,7 @@ async def test_addon_installed_failures( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1508,7 +1601,7 @@ async def test_addon_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" @@ -1516,7 +1609,19 @@ async def test_addon_installed_failures( @pytest.mark.parametrize( ("set_addon_options_side_effect", "discovery_info"), - [(HassioAPIError(), {"config": ADDON_DISCOVERY_INFO})], + [ + ( + SupervisorError(), + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + ) + ], ) async def test_addon_installed_set_options_failure( hass: HomeAssistant, @@ -1557,10 +1662,9 @@ async def test_addon_installed_set_options_failure( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1569,7 +1673,7 @@ async def test_addon_installed_set_options_failure( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.ABORT @@ -1578,7 +1682,19 @@ async def test_addon_installed_set_options_failure( assert start_addon.call_count == 0 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed_already_configured( hass: HomeAssistant, supervisor, @@ -1635,10 +1751,9 @@ async def test_addon_installed_already_configured( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/new", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1647,7 +1762,7 @@ async def test_addon_installed_already_configured( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1656,7 +1771,7 @@ async def test_addon_installed_already_configured( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -1670,7 +1785,19 @@ async def test_addon_installed_already_configured( assert entry.data["lr_s2_authenticated_key"] == "new321" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_not_installed( hass: HomeAssistant, supervisor, @@ -1701,7 +1828,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -1720,10 +1847,9 @@ async def test_addon_not_installed( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1732,7 +1858,7 @@ async def test_addon_not_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1751,7 +1877,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -1775,7 +1901,7 @@ async def test_install_addon_failure( hass: HomeAssistant, supervisor, addon_not_installed, install_addon ) -> None: """Test add-on install failure.""" - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -1795,7 +1921,7 @@ async def test_install_addon_failure( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" @@ -1896,7 +2022,14 @@ async def test_options_not_addon( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -1922,7 +2055,14 @@ async def test_options_not_addon( 0, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {"use_addon": True}, { "device": "/test", @@ -1995,9 +2135,8 @@ async def test_options_addon_running( new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, "core_zwave_js", - {"options": new_addon_options}, + AddonsOptions(config=new_addon_options), ) assert client.disconnect.call_count == disconnect_calls @@ -2008,7 +2147,7 @@ async def test_options_addon_running( result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert restart_addon.call_args == call(hass, "core_zwave_js") + assert restart_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert entry.data["url"] == "ws://host1:3001" @@ -2043,7 +2182,14 @@ async def test_options_addon_running( ("discovery_info", "entry_data", "old_addon_options", "new_addon_options"), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2170,7 +2316,14 @@ async def different_device_server_version(*args): ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2199,7 +2352,14 @@ async def different_device_server_version(*args): different_device_server_version, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2276,9 +2436,7 @@ async def test_options_different_device( assert set_addon_options.call_count == 1 new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": new_addon_options}, + "core_zwave_js", AddonsOptions(config=new_addon_options) ) assert client.disconnect.call_count == disconnect_calls assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -2287,7 +2445,7 @@ async def test_options_different_device( await hass.async_block_till_done() assert restart_addon.call_count == 1 - assert restart_addon.call_args == call(hass, "core_zwave_js") + assert restart_addon.call_args == call("core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2299,9 +2457,7 @@ async def test_options_different_device( assert set_addon_options.call_count == 2 assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": addon_options}, + "core_zwave_js", AddonsOptions(config=addon_options) ) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -2309,7 +2465,7 @@ async def test_options_different_device( await hass.async_block_till_done() assert restart_addon.call_count == 2 - assert restart_addon.call_args == call(hass, "core_zwave_js") + assert restart_addon.call_args == call("core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2332,7 +2488,14 @@ async def test_options_different_device( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2358,10 +2521,17 @@ async def test_options_different_device( "emulate_hardware": False, }, 0, - [HassioAPIError(), None], + [SupervisorError(), None], ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2388,8 +2558,8 @@ async def test_options_different_device( }, 0, [ - HassioAPIError(), - HassioAPIError(), + SupervisorError(), + SupervisorError(), ], ), ], @@ -2442,9 +2612,7 @@ async def test_options_addon_restart_failed( assert set_addon_options.call_count == 1 new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": new_addon_options}, + "core_zwave_js", AddonsOptions(config=new_addon_options) ) assert client.disconnect.call_count == disconnect_calls assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -2453,7 +2621,7 @@ async def test_options_addon_restart_failed( await hass.async_block_till_done() assert restart_addon.call_count == 1 - assert restart_addon.call_args == call(hass, "core_zwave_js") + assert restart_addon.call_args == call("core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2462,9 +2630,7 @@ async def test_options_addon_restart_failed( old_addon_options.pop("network_key") assert set_addon_options.call_count == 2 assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": old_addon_options}, + "core_zwave_js", AddonsOptions(config=old_addon_options) ) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -2472,7 +2638,7 @@ async def test_options_addon_restart_failed( await hass.async_block_till_done() assert restart_addon.call_count == 2 - assert restart_addon.call_args == call(hass, "core_zwave_js") + assert restart_addon.call_args == call("core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2495,7 +2661,14 @@ async def test_options_addon_restart_failed( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2588,7 +2761,14 @@ async def test_options_addon_running_server_info_failure( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2614,7 +2794,14 @@ async def test_options_addon_running_server_info_failure( 0, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {"use_addon": True}, { "device": "/test", @@ -2686,7 +2873,7 @@ async def test_options_addon_not_installed( result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -2698,9 +2885,7 @@ async def test_options_addon_not_installed( new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": new_addon_options}, + "core_zwave_js", AddonsOptions(config=new_addon_options) ) assert client.disconnect.call_count == disconnect_calls @@ -2710,7 +2895,7 @@ async def test_options_addon_not_installed( await hass.async_block_till_done() assert start_addon.call_count == 1 - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2726,104 +2911,6 @@ async def test_options_addon_not_installed( assert client.disconnect.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) -async def test_import_addon_installed( - hass: HomeAssistant, - supervisor, - addon_installed, - addon_options, - set_addon_options, - start_addon, - get_addon_discovery_info, - serial_port, -) -> None: - """Test import step while add-on already installed on Supervisor.""" - serial_port.device = "/test/imported" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"usb_path": "/test/imported", "network_key": "imported123"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "on_supervisor" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"use_addon": True} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "configure_addon" - - # the default input should be the imported data - default_input = result["data_schema"]({}) - - assert default_input == { - "usb_path": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - } - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], default_input - ) - - assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - { - "options": { - "device": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - } - }, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - - with ( - patch( - "homeassistant.components.zwave_js.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.zwave_js.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - await hass.async_block_till_done() - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() - - assert start_addon.call_args == call(hass, "core_zwave_js") - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TITLE - assert result["data"] == { - "url": "ws://host1:3001", - "usb_path": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - "use_addon": True, - "integration_created_addon": False, - } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_zeroconf(hass: HomeAssistant) -> None: """Test zeroconf discovery.""" diff --git a/tests/components/zwave_js/test_config_validation.py b/tests/components/zwave_js/test_config_validation.py index 8428972bde1..cebbde3c9b1 100644 --- a/tests/components/zwave_js/test_config_validation.py +++ b/tests/components/zwave_js/test_config_validation.py @@ -1,27 +1,31 @@ """Test the Z-Wave JS config validation helpers.""" +from typing import Any + import pytest import voluptuous as vol -from homeassistant.components.zwave_js.config_validation import boolean +from homeassistant.components.zwave_js.config_validation import VALUE_SCHEMA, boolean -def test_boolean_validation() -> None: - """Test boolean config validator.""" - # test bool - assert boolean(True) - assert not boolean(False) - # test strings - assert boolean("TRUE") - assert not boolean("FALSE") - assert boolean("ON") - assert not boolean("NO") - # ensure 1's and 0's don't get converted to bool +@pytest.mark.parametrize( + ("test_cases", "expected_value"), + [ + ([True, "true", "yes", "on", "ON", "enable"], True), + ([False, "false", "no", "off", "NO", "disable"], False), + ([1.1, "1.1"], 1.1), + ([1.0, "1.0"], 1.0), + ([1, "1"], 1), + ], +) +def test_validation(test_cases: list[Any], expected_value: Any) -> None: + """Test config validation.""" + for case in test_cases: + assert VALUE_SCHEMA(case) == expected_value + + +@pytest.mark.parametrize("value", ["invalid", "1", "0", 1, 0]) +def test_invalid_boolean_validation(value: str | int) -> None: + """Test invalid cases for boolean config validator.""" with pytest.raises(vol.Invalid): - boolean("1") - with pytest.raises(vol.Invalid): - boolean("0") - with pytest.raises(vol.Invalid): - boolean(1) - with pytest.raises(vol.Invalid): - boolean(0) + boolean(value) diff --git a/tests/components/zwave_js/test_cover.py b/tests/components/zwave_js/test_cover.py index 4ecd697f4d1..b13d4f9787f 100644 --- a/tests/components/zwave_js/test_cover.py +++ b/tests/components/zwave_js/test_cover.py @@ -15,7 +15,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -26,6 +26,7 @@ from homeassistant.components.cover import ( SERVICE_STOP_COVER_TILT, CoverDeviceClass, CoverEntityFeature, + CoverState, ) from homeassistant.components.zwave_js.const import LOGGER from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher @@ -33,10 +34,6 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -63,12 +60,12 @@ async def test_window_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.WINDOW - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Test setting position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY, ATTR_POSITION: 50}, blocking=True, @@ -89,7 +86,7 @@ async def test_window_cover( # Test setting position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY, ATTR_POSITION: 0}, blocking=True, @@ -110,7 +107,7 @@ async def test_window_cover( # Test opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -131,7 +128,7 @@ async def test_window_cover( # Test stop after opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -170,11 +167,11 @@ async def test_window_cover( client.async_send_command.reset_mock() state = hass.states.get(WINDOW_COVER_ENTITY) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -194,7 +191,7 @@ async def test_window_cover( # Test stop after closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -233,7 +230,7 @@ async def test_window_cover( node.receive_event(event) state = hass.states.get(WINDOW_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async def test_fibaro_fgr222_shutter_cover( @@ -244,12 +241,12 @@ async def test_fibaro_fgr222_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.SHUTTER - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Test opening tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY}, blocking=True, @@ -271,7 +268,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test closing tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY}, blocking=True, @@ -293,7 +290,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test setting tilt position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY, ATTR_TILT_POSITION: 12}, blocking=True, @@ -345,12 +342,12 @@ async def test_fibaro_fgr223_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.SHUTTER - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Test opening tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY}, blocking=True, @@ -370,7 +367,7 @@ async def test_fibaro_fgr223_shutter_cover( client.async_send_command.reset_mock() # Test closing tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY}, blocking=True, @@ -390,7 +387,7 @@ async def test_fibaro_fgr223_shutter_cover( client.async_send_command.reset_mock() # Test setting tilt position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY, ATTR_TILT_POSITION: 12}, blocking=True, @@ -441,12 +438,12 @@ async def test_aeotec_nano_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.WINDOW - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Test opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -467,7 +464,7 @@ async def test_aeotec_nano_shutter_cover( # Test stop after opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -507,11 +504,11 @@ async def test_aeotec_nano_shutter_cover( client.async_send_command.reset_mock() state = hass.states.get(AEOTEC_SHUTTER_COVER_ENTITY) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -531,7 +528,7 @@ async def test_aeotec_nano_shutter_cover( # Test stop after closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -579,11 +576,14 @@ async def test_motor_barrier_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.GARAGE - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # Test open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, + blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -599,13 +599,16 @@ async def test_motor_barrier_cover( # state doesn't change until currentState value update is received state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED client.async_send_command.reset_mock() # Test close await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, + blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -621,7 +624,7 @@ async def test_motor_barrier_cover( # state doesn't change until currentState value update is received state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED client.async_send_command.reset_mock() @@ -646,7 +649,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Barrier sends an opened state event = Event( @@ -669,7 +672,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Barrier sends a closing state event = Event( @@ -692,7 +695,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Barrier sends a closed state event = Event( @@ -715,7 +718,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # Barrier sends a stopped state event = Event( @@ -821,7 +824,7 @@ async def test_fibaro_fgr223_shutter_cover_no_tilt( state = hass.states.get(FIBARO_FGR_223_SHUTTER_COVER_ENTITY) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_CURRENT_POSITION in state.attributes assert ATTR_CURRENT_TILT_POSITION not in state.attributes @@ -846,7 +849,7 @@ async def test_iblinds_v3_cover( assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -867,7 +870,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -888,7 +891,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: entity_id, ATTR_TILT_POSITION: 12}, blocking=True, @@ -909,7 +912,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -938,7 +941,7 @@ async def test_nice_ibt4zwave_cover( state = hass.states.get(entity_id) assert state # This device has no state because there is no position value - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_SUPPORTED_FEATURES] == ( CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN @@ -950,7 +953,7 @@ async def test_nice_ibt4zwave_cover( assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.GATE await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -970,7 +973,7 @@ async def test_nice_ibt4zwave_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -988,3 +991,106 @@ async def test_nice_ibt4zwave_cover( assert args["value"] == 99 client.async_send_command.reset_mock() + + +async def test_window_covering_open_close( + hass: HomeAssistant, client, window_covering_outbound_bottom, integration +) -> None: + """Test Window Covering device open and close commands. + + A Window Covering device with position support + should be able to open/close with the start/stop level change properties. + """ + entity_id = "cover.node_2_outbound_bottom" + state = hass.states.get(entity_id) + + # The entity has position support, but not tilt + assert state + assert ATTR_CURRENT_POSITION in state.attributes + assert ATTR_CURRENT_TILT_POSITION not in state.attributes + + # Test opening + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == 2 + assert args["valueId"] == { + "commandClass": 106, + "endpoint": 0, + "property": "levelChangeUp", + "propertyKey": 13, + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Test stop after opening + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == 2 + assert args["valueId"] == { + "commandClass": 106, + "endpoint": 0, + "property": "levelChangeUp", + "propertyKey": 13, + } + assert args["value"] is False + + client.async_send_command.reset_mock() + + # Test closing + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == 2 + assert args["valueId"] == { + "commandClass": 106, + "endpoint": 0, + "property": "levelChangeDown", + "propertyKey": 13, + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Test stop after closing + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == 2 + assert args["valueId"] == { + "commandClass": 106, + "endpoint": 0, + "property": "levelChangeUp", + "propertyKey": 13, + } + assert args["value"] is False + + client.async_send_command.reset_mock() diff --git a/tests/components/zwave_js/test_diagnostics.py b/tests/components/zwave_js/test_diagnostics.py index 0e6645d9d61..835b85177fe 100644 --- a/tests/components/zwave_js/test_diagnostics.py +++ b/tests/components/zwave_js/test_diagnostics.py @@ -1,9 +1,11 @@ """Test the Z-Wave JS diagnostics.""" import copy +from typing import Any, cast from unittest.mock import patch import pytest +from syrupy.assertion import SnapshotAssertion from zwave_js_server.const import CommandClass from zwave_js_server.event import Event from zwave_js_server.model.node import Node @@ -13,7 +15,6 @@ from homeassistant.components.zwave_js.diagnostics import ( ZwaveValueMatcher, async_get_device_diagnostics, ) -from homeassistant.components.zwave_js.discovery import async_discover_node_values from homeassistant.components.zwave_js.helpers import ( get_device_id, get_value_id_from_unique_id, @@ -58,6 +59,7 @@ async def test_device_diagnostics( integration, hass_client: ClientSessionGenerator, version_state, + snapshot: SnapshotAssertion, ) -> None: """Test the device level diagnostics data dump.""" device = device_registry.async_get_device( @@ -113,18 +115,18 @@ async def test_device_diagnostics( # Entities that are created outside of discovery (e.g. node status sensor and # ping button) as well as helper entities created from other integrations should # not be in dump. - assert len(diagnostics_data["entities"]) == len( - list(async_discover_node_values(multisensor_6, device, {device.id: set()})) - ) + assert diagnostics_data == snapshot + assert any( - entity.entity_id == "test.unrelated_entity" - for entity in er.async_entries_for_device(entity_registry, device.id) + entity_entry.entity_id == "test.unrelated_entity" + for entity_entry in er.async_entries_for_device(entity_registry, device.id) ) # Explicitly check that the entity that is not part of this config entry is not # in the dump. + diagnostics_entities = cast(list[dict[str, Any]], diagnostics_data["entities"]) assert not any( entity["entity_id"] == "test.unrelated_entity" - for entity in diagnostics_data["entities"] + for entity in diagnostics_entities ) assert diagnostics_data["state"] == { **multisensor_6.data, @@ -171,6 +173,7 @@ async def test_device_diagnostics_missing_primary_value( entity_id = "sensor.multisensor_6_air_temperature" entry = entity_registry.async_get(entity_id) + assert entry # check that the primary value for the entity exists in the diagnostics diagnostics_data = await get_diagnostics_for_device( @@ -180,9 +183,8 @@ async def test_device_diagnostics_missing_primary_value( value = multisensor_6.values.get(get_value_id_from_unique_id(entry.unique_id)) assert value - air_entity = next( - x for x in diagnostics_data["entities"] if x["entity_id"] == entity_id - ) + diagnostics_entities = cast(list[dict[str, Any]], diagnostics_data["entities"]) + air_entity = next(x for x in diagnostics_entities if x["entity_id"] == entity_id) assert air_entity["value_id"] == value.value_id assert air_entity["primary_value"] == { @@ -218,9 +220,8 @@ async def test_device_diagnostics_missing_primary_value( hass, hass_client, integration, device ) - air_entity = next( - x for x in diagnostics_data["entities"] if x["entity_id"] == entity_id - ) + diagnostics_entities = cast(list[dict[str, Any]], diagnostics_data["entities"]) + air_entity = next(x for x in diagnostics_entities if x["entity_id"] == entity_id) assert air_entity["value_id"] == value.value_id assert air_entity["primary_value"] is None @@ -266,5 +267,6 @@ async def test_device_diagnostics_secret_value( diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device ) - test_value = _find_ultraviolet_val(diagnostics_data["state"]) + diagnostics_node_state = cast(dict[str, Any], diagnostics_data["state"]) + test_value = _find_ultraviolet_val(diagnostics_node_state) assert test_value["value"] == REDACTED diff --git a/tests/components/zwave_js/test_discovery.py b/tests/components/zwave_js/test_discovery.py index 57841ef2a83..0be0cca78c8 100644 --- a/tests/components/zwave_js/test_discovery.py +++ b/tests/components/zwave_js/test_discovery.py @@ -1,9 +1,12 @@ """Test entity discovery for device-specific schemas for the Z-Wave JS integration.""" import pytest +from zwave_js_server.event import Event +from zwave_js_server.model.node import Node from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.light import ATTR_SUPPORTED_COLOR_MODES, ColorMode from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, @@ -28,6 +31,8 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_UNKNOWN, Entity from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from tests.common import MockConfigEntry + async def test_aeon_smart_switch_6_state( hass: HomeAssistant, client, aeon_smart_switch_6, integration @@ -380,3 +385,61 @@ async def test_light_device_class_is_null( node = light_device_class_is_null assert node.device_class is None assert hass.states.get("light.bar_display_cases") + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_rediscovery( + hass: HomeAssistant, + siren_neo_coolcam: Node, + integration: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we don't rediscover known values.""" + node = siren_neo_coolcam + entity_id = "select.siren_alarm_doorbell_sound_selection" + state = hass.states.get(entity_id) + + assert state + assert state.state == "Beep" + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": 36, + "args": { + "commandClassName": "Configuration", + "commandClass": 112, + "endpoint": 0, + "property": 6, + "newValue": 9, + "prevValue": 10, + "propertyName": "Doorbell Sound Selection", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + + assert state + assert state.state == "Beep Beep" + assert "Platform zwave_js does not generate unique IDs" not in caplog.text + + +async def test_aeotec_smart_switch_7( + hass: HomeAssistant, + aeotec_smart_switch_7: Node, + integration: MockConfigEntry, +) -> None: + """Test that Smart Switch 7 has a light and a switch entity.""" + state = hass.states.get("light.smart_switch_7") + assert state + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + ColorMode.HS, + ] + + state = hass.states.get("switch.smart_switch_7") + assert state diff --git a/tests/components/zwave_js/test_init.py b/tests/components/zwave_js/test_init.py index 51aeee72c1d..4f858f3e545 100644 --- a/tests/components/zwave_js/test_init.py +++ b/tests/components/zwave_js/test_init.py @@ -5,6 +5,8 @@ from copy import deepcopy import logging from unittest.mock import AsyncMock, call, patch +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions import pytest from zwave_js_server.client import Client from zwave_js_server.event import Event @@ -12,7 +14,7 @@ from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVers from zwave_js_server.model.node import Node from zwave_js_server.model.version import VersionInfo -from homeassistant.components.hassio.handler import HassioAPIError +from homeassistant.components.hassio import HassioAPIError from homeassistant.components.logger import DOMAIN as LOGGER_DOMAIN, SERVICE_SET_LEVEL from homeassistant.components.persistent_notification import async_dismiss from homeassistant.components.zwave_js import DOMAIN @@ -553,10 +555,10 @@ async def test_start_addon( assert install_addon.call_count == 0 assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "core_zwave_js", {"options": addon_options} + "core_zwave_js", AddonsOptions(config=addon_options) ) assert start_addon.call_count == 1 - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") async def test_install_addon( @@ -599,16 +601,16 @@ async def test_install_addon( assert entry.state is ConfigEntryState.SETUP_RETRY assert install_addon.call_count == 1 - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "core_zwave_js", {"options": addon_options} + "core_zwave_js", AddonsOptions(config=addon_options) ) assert start_addon.call_count == 1 - assert start_addon.call_args == call(hass, "core_zwave_js") + assert start_addon.call_args == call("core_zwave_js") -@pytest.mark.parametrize("addon_info_side_effect", [HassioAPIError("Boom")]) +@pytest.mark.parametrize("addon_info_side_effect", [SupervisorError("Boom")]) async def test_addon_info_failure( hass: HomeAssistant, addon_installed, @@ -746,7 +748,7 @@ async def test_addon_options_changed( [ ("1.0.0", True, 1, 1, None, None), ("1.0.0", False, 0, 0, None, None), - ("1.0.0", True, 1, 1, HassioAPIError("Boom"), None), + ("1.0.0", True, 1, 1, SupervisorError("Boom"), None), ("1.0.0", True, 0, 1, None, HassioAPIError("Boom")), ], ) @@ -772,8 +774,8 @@ async def test_update_addon( network_key = "abc123" addon_options["device"] = device addon_options["network_key"] = network_key - addon_info.return_value["version"] = addon_version - addon_info.return_value["update_available"] = update_available + addon_info.return_value.version = addon_version + addon_info.return_value.update_available = update_available create_backup.side_effect = create_backup_side_effect update_addon.side_effect = update_addon_side_effect client.connect.side_effect = InvalidServerVersion( @@ -845,7 +847,7 @@ async def test_issue_registry( ("stop_addon_side_effect", "entry_state"), [ (None, ConfigEntryState.NOT_LOADED), - (HassioAPIError("Boom"), ConfigEntryState.LOADED), + (SupervisorError("Boom"), ConfigEntryState.LOADED), ], ) async def test_stop_addon( @@ -888,7 +890,7 @@ async def test_stop_addon( assert entry.state == entry_state assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_zwave_js") + assert stop_addon.call_args == call("core_zwave_js") async def test_remove_entry( @@ -927,7 +929,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_zwave_js") + assert stop_addon.call_args == call("core_zwave_js") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -935,7 +937,7 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call(hass, "core_zwave_js") + assert uninstall_addon.call_args == call("core_zwave_js") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 stop_addon.reset_mock() @@ -945,12 +947,12 @@ async def test_remove_entry( # test add-on stop failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - stop_addon.side_effect = HassioAPIError() + stop_addon.side_effect = SupervisorError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_zwave_js") + assert stop_addon.call_args == call("core_zwave_js") assert create_backup.call_count == 0 assert uninstall_addon.call_count == 0 assert entry.state is ConfigEntryState.NOT_LOADED @@ -969,7 +971,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_zwave_js") + assert stop_addon.call_args == call("core_zwave_js") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -988,12 +990,12 @@ async def test_remove_entry( # test add-on uninstall failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - uninstall_addon.side_effect = HassioAPIError() + uninstall_addon.side_effect = SupervisorError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call(hass, "core_zwave_js") + assert stop_addon.call_args == call("core_zwave_js") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -1001,7 +1003,7 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call(hass, "core_zwave_js") + assert uninstall_addon.call_args == call("core_zwave_js") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 assert "Failed to uninstall the Z-Wave JS add-on" in caplog.text @@ -1573,13 +1575,9 @@ async def test_disabled_entity_on_value_removed( hass: HomeAssistant, entity_registry: er.EntityRegistry, zp3111, client, integration ) -> None: """Test that when entity primary values are removed the entity is removed.""" - # re-enable this default-disabled entity - sensor_cover_entity = "sensor.4_in_1_sensor_home_security_cover_status" idle_cover_status_button_entity = ( "button.4_in_1_sensor_idle_home_security_cover_status" ) - entity_registry.async_update_entity(entity_id=sensor_cover_entity, disabled_by=None) - await hass.async_block_till_done() # must reload the integration when enabling an entity await hass.config_entries.async_unload(integration.entry_id) @@ -1590,10 +1588,6 @@ async def test_disabled_entity_on_value_removed( await hass.async_block_till_done() assert integration.state is ConfigEntryState.LOADED - state = hass.states.get(sensor_cover_entity) - assert state - assert state.state != STATE_UNAVAILABLE - state = hass.states.get(idle_cover_status_button_entity) assert state assert state.state != STATE_UNAVAILABLE @@ -1687,10 +1681,6 @@ async def test_disabled_entity_on_value_removed( assert state assert state.state == STATE_UNAVAILABLE - state = hass.states.get(sensor_cover_entity) - assert state - assert state.state == STATE_UNAVAILABLE - state = hass.states.get(idle_cover_status_button_entity) assert state assert state.state == STATE_UNAVAILABLE @@ -1706,7 +1696,6 @@ async def test_disabled_entity_on_value_removed( | { battery_level_entity, binary_cover_entity, - sensor_cover_entity, idle_cover_status_button_entity, } == new_unavailable_entities diff --git a/tests/components/zwave_js/test_light.py b/tests/components/zwave_js/test_light.py index 376bd700a2a..21a6c0a8fae 100644 --- a/tests/components/zwave_js/test_light.py +++ b/tests/components/zwave_js/test_light.py @@ -7,9 +7,10 @@ from zwave_js_server.event import Event from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_COLOR_TEMP_KELVIN, + ATTR_HS_COLOR, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_SUPPORTED_COLOR_MODES, @@ -37,8 +38,8 @@ from .common import ( ZEN_31_ENTITY, ) -HSM200_V1_ENTITY = "light.hsm200" ZDB5100_ENTITY = "light.matrix_office" +HSM200_V1_ENTITY = "light.hsm200" async def test_light( @@ -50,8 +51,8 @@ async def test_light( assert state assert state.state == STATE_OFF - assert state.attributes[ATTR_MIN_MIREDS] == 153 - assert state.attributes[ATTR_MAX_MIREDS] == 370 + assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 + assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -129,7 +130,7 @@ async def test_light( assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == "color_temp" assert state.attributes[ATTR_BRIGHTNESS] == 255 - assert state.attributes[ATTR_COLOR_TEMP] == 370 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2702 assert state.attributes[ATTR_RGB_COLOR] is not None # Test turning on with same brightness @@ -255,7 +256,7 @@ async def test_light( assert state.attributes[ATTR_COLOR_MODE] == "hs" assert state.attributes[ATTR_BRIGHTNESS] == 255 assert state.attributes[ATTR_RGB_COLOR] == (255, 76, 255) - assert state.attributes[ATTR_COLOR_TEMP] is None + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None client.async_send_command.reset_mock() @@ -292,7 +293,7 @@ async def test_light( await hass.services.async_call( "light", "turn_on", - {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP: 170}, + {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP_KELVIN: 5881}, blocking=True, ) @@ -357,14 +358,14 @@ async def test_light( assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == "color_temp" assert state.attributes[ATTR_BRIGHTNESS] == 255 - assert state.attributes[ATTR_COLOR_TEMP] == 170 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5881 assert ATTR_RGB_COLOR in state.attributes # Test turning on with same color temp await hass.services.async_call( "light", "turn_on", - {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP: 170}, + {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP_KELVIN: 5881}, blocking=True, ) @@ -378,7 +379,7 @@ async def test_light( "turn_on", { "entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, - ATTR_COLOR_TEMP: 170, + ATTR_COLOR_TEMP_KELVIN: 5881, ATTR_TRANSITION: 35, }, blocking=True, @@ -510,14 +511,388 @@ async def test_light_none_color_value( assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["hs"] -async def test_black_is_off( +async def test_light_on_off_color( + hass: HomeAssistant, client, logic_group_zdb5100, integration +) -> None: + """Test the light entity for RGB lights without dimming support.""" + node = logic_group_zdb5100 + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + async def update_color(red: int, green: int, blue: int) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 2, # red + "newValue": red, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "red", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 3, # green + "newValue": green, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "green", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 4, # blue + "newValue": blue, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "blue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "newValue": { + "red": red, + "green": green, + "blue": blue, + }, + "prevValue": None, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + async def update_switch_state(state: bool) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Binary Switch", + "commandClass": 37, + "endpoint": 1, + "property": "currentValue", + "newValue": state, + "prevValue": None, + "propertyName": "currentValue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + # Turn on the light. Since this is the first call, the light should default to white + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 255, + "green": 255, + "blue": 255, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + # Force the light to turn off + await update_switch_state(False) + + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + # Force the light to turn on (green) + await update_color(0, 255, 0) + await update_switch_state(True) + + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_ON + + client.async_send_command.reset_mock() + + # Set the brightness to 128. This should be encoded in the color value + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 0, + "green": 128, + "blue": 0, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Force the light to turn on (green, 50%) + await update_color(0, 128, 0) + + # Set the color to red. This should preserve the previous brightness value + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_HS_COLOR: (0, 100)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 128, + "green": 0, + "blue": 0, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Force the light to turn on (red, 50%) + await update_color(128, 0, 0) + + # Turn the device off. This should only affect the binary switch, not the color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is False + + client.async_send_command.reset_mock() + + # Force the light to turn off + await update_switch_state(False) + + # Turn the device on again. This should only affect the binary switch, not the color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + +async def test_light_color_only( hass: HomeAssistant, client, express_controls_ezmultipli, integration ) -> None: - """Test the black is off light entity.""" + """Test the light entity for RGB lights with Color Switch CC only.""" node = express_controls_ezmultipli state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON + async def update_color(red: int, green: int, blue: int) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 2, # red + "newValue": red, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "red", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 3, # green + "newValue": green, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "green", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 4, # blue + "newValue": blue, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "blue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "newValue": { + "red": red, + "green": green, + "blue": blue, + }, + "prevValue": None, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + # Attempt to turn on the light and ensure it defaults to white await hass.services.async_call( LIGHT_DOMAIN, @@ -539,64 +914,14 @@ async def test_black_is_off( client.async_send_command.reset_mock() # Force the light to turn off - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + await update_color(0, 0, 0) + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_OFF - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + # Force the light to turn on (50% green) + await update_color(0, 128, 0) + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON @@ -619,6 +944,9 @@ async def test_black_is_off( client.async_send_command.reset_mock() + # Force the light to turn off + await update_color(0, 0, 0) + # Assert that the last color is restored await hass.services.async_call( LIGHT_DOMAIN, @@ -635,11 +963,131 @@ async def test_black_is_off( "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 0, "green": 255, "blue": 0} + assert args["value"] == {"red": 0, "green": 128, "blue": 0} client.async_send_command.reset_mock() - # Force the light to turn on + # Force the light to turn on (50% green) + await update_color(0, 128, 0) + + state = hass.states.get(HSM200_V1_ENTITY) + assert state.state == STATE_ON + + client.async_send_command.reset_mock() + + # Assert that the brightness is preserved when changing colors + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_RGB_COLOR: (255, 0, 0)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 128, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + # Force the light to turn on (50% red) + await update_color(128, 0, 0) + + state = hass.states.get(HSM200_V1_ENTITY) + assert state.state == STATE_ON + + # Assert that the color is preserved when changing brightness + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 69}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 69, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + await update_color(69, 0, 0) + + # Turn off again + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, + blocking=True, + ) + await update_color(0, 0, 0) + + client.async_send_command.reset_mock() + + # Assert that the color is preserved when turning on with brightness + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 123}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 123, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + await update_color(123, 0, 0) + + # Turn off again + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, + blocking=True, + ) + await update_color(0, 0, 0) + + client.async_send_command.reset_mock() + + # Assert that the brightness is preserved when turning on with color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_HS_COLOR: (240, 100)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 0, "green": 0, "blue": 123} + + client.async_send_command.reset_mock() + + # Clear the color value to trigger an unknown state event = Event( type="value updated", data={ @@ -652,17 +1100,14 @@ async def test_black_is_off( "endpoint": 0, "property": "currentColor", "newValue": None, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, + "prevValue": None, "propertyName": "currentColor", }, }, ) node.receive_event(event) await hass.async_block_till_done() + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_UNKNOWN @@ -687,183 +1132,6 @@ async def test_black_is_off( assert args["value"] == {"red": 255, "green": 76, "blue": 255} -async def test_black_is_off_zdb5100( - hass: HomeAssistant, client, logic_group_zdb5100, integration -) -> None: - """Test the black is off light entity.""" - node = logic_group_zdb5100 - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF - - # Attempt to turn on the light and ensure it defaults to white - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 255, "green": 255, "blue": 255} - - client.async_send_command.reset_mock() - - # Force the light to turn off - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF - - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_ON - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 0, "green": 0, "blue": 0} - - client.async_send_command.reset_mock() - - # Assert that the last color is restored - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 0, "green": 255, "blue": 0} - - client.async_send_command.reset_mock() - - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": None, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_UNKNOWN - - client.async_send_command.reset_mock() - - # Assert that call fails if attribute is added to service call - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_RGBW_COLOR: (255, 76, 255, 0)}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 255, "green": 76, "blue": 255} - - async def test_basic_cc_light( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/zwave_js/test_lock.py b/tests/components/zwave_js/test_lock.py index e8a8a2035d8..47e680570f0 100644 --- a/tests/components/zwave_js/test_lock.py +++ b/tests/components/zwave_js/test_lock.py @@ -15,6 +15,7 @@ from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, + LockState, ) from homeassistant.components.zwave_js.const import ( ATTR_LOCK_TIMEOUT, @@ -27,13 +28,7 @@ from homeassistant.components.zwave_js.lock import ( SERVICE_SET_LOCK_CONFIGURATION, SERVICE_SET_LOCK_USERCODE, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_LOCKED, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - STATE_UNLOCKED, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -52,7 +47,7 @@ async def test_door_lock( state = hass.states.get(SCHLAGE_BE469_LOCK_ENTITY) assert state - assert state.state == STATE_UNLOCKED + assert state.state == LockState.UNLOCKED # Test locking await hass.services.async_call( @@ -95,7 +90,9 @@ async def test_door_lock( ) node.receive_event(event) - assert hass.states.get(SCHLAGE_BE469_LOCK_ENTITY).state == STATE_LOCKED + state = hass.states.get(SCHLAGE_BE469_LOCK_ENTITY) + assert state + assert state.state == LockState.LOCKED client.async_send_command.reset_mock() @@ -194,6 +191,7 @@ async def test_door_lock( "insideHandlesCanOpenDoorConfiguration": [True, True, True, True], "operationType": 2, "outsideHandlesCanOpenDoorConfiguration": [True, True, True, True], + "lockTimeoutConfiguration": 1, } ] assert args["commandClass"] == 98 @@ -239,6 +237,7 @@ async def test_door_lock( "insideHandlesCanOpenDoorConfiguration": [True, True, True, True], "operationType": 2, "outsideHandlesCanOpenDoorConfiguration": [True, True, True, True], + "lockTimeoutConfiguration": 1, } ] assert args["commandClass"] == 98 @@ -294,7 +293,9 @@ async def test_door_lock( node.receive_event(event) assert node.status == NodeStatus.DEAD - assert hass.states.get(SCHLAGE_BE469_LOCK_ENTITY).state == STATE_UNAVAILABLE + state = hass.states.get(SCHLAGE_BE469_LOCK_ENTITY) + assert state + assert state.state == STATE_UNAVAILABLE async def test_only_one_lock( diff --git a/tests/components/zwave_js/test_repairs.py b/tests/components/zwave_js/test_repairs.py index c103a06c5fa..d237a6e410a 100644 --- a/tests/components/zwave_js/test_repairs.py +++ b/tests/components/zwave_js/test_repairs.py @@ -1,25 +1,23 @@ """Test the Z-Wave JS repairs module.""" from copy import deepcopy -from http import HTTPStatus from unittest.mock import patch +import pytest from zwave_js_server.event import Event from zwave_js_server.model.node import Node -from homeassistant.components.repairs.issue_handler import ( - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.components.zwave_js import DOMAIN from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.core import HomeAssistant import homeassistant.helpers.device_registry as dr import homeassistant.helpers.issue_registry as ir +from tests.components.repairs import ( + async_process_repairs_platforms, + process_repair_fix_flow, + start_repair_fix_flow, +) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -84,30 +82,21 @@ async def test_device_config_file_changed_confirm_step( assert issue["issue_id"] == issue_id assert issue["translation_placeholders"] == {"device_name": device.name} - url = RepairsFlowIndexView.url - resp = await http_client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "init" assert data["description_placeholders"] == {"device_name": device.name} - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - # Show menu - resp = await http_client.post(url) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(http_client, flow_id) assert data["type"] == "menu" # Apply fix - resp = await http_client.post(url, json={"next_step_id": "confirm"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow( + http_client, flow_id, json={"next_step_id": "confirm"} + ) assert data["type"] == "create_entry" @@ -159,30 +148,21 @@ async def test_device_config_file_changed_ignore_step( assert issue["issue_id"] == issue_id assert issue["translation_placeholders"] == {"device_name": device.name} - url = RepairsFlowIndexView.url - resp = await http_client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "init" assert data["description_placeholders"] == {"device_name": device.name} - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - # Show menu - resp = await http_client.post(url) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(http_client, flow_id) assert data["type"] == "menu" # Ignore the issue - resp = await http_client.post(url, json={"next_step_id": "ignore"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow( + http_client, flow_id, json={"next_step_id": "ignore"} + ) assert data["type"] == "abort" assert data["reason"] == "issue_ignored" @@ -200,6 +180,10 @@ async def test_device_config_file_changed_ignore_step( assert msg["result"]["issues"][0].get("dismissed_version") is not None +@pytest.mark.parametrize( + "ignore_translations", + ["component.zwave_js.issues.invalid_issue.title"], +) async def test_invalid_issue( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -228,22 +212,13 @@ async def test_invalid_issue( issue = msg["result"]["issues"][0] assert issue["issue_id"] == "invalid_issue_id" - url = RepairsFlowIndexView.url - resp = await http_client.post( - url, json={"handler": DOMAIN, "issue_id": "invalid_issue_id"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, DOMAIN, "invalid_issue_id") flow_id = data["flow_id"] assert data["step_id"] == "confirm" # Apply fix - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await http_client.post(url) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow(http_client, flow_id) assert data["type"] == "create_entry" @@ -278,10 +253,7 @@ async def test_abort_confirm( await hass_ws_client(hass) http_client = await hass_client() - url = RepairsFlowIndexView.url - resp = await http_client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await start_repair_fix_flow(http_client, DOMAIN, issue_id) flow_id = data["flow_id"] assert data["step_id"] == "init" @@ -290,11 +262,9 @@ async def test_abort_confirm( await hass.config_entries.async_unload(integration.entry_id) # Apply fix - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await http_client.post(url, json={"next_step_id": "confirm"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() + data = await process_repair_fix_flow( + http_client, flow_id, json={"next_step_id": "confirm"} + ) assert data["type"] == "abort" assert data["reason"] == "cannot_connect" diff --git a/tests/components/zwave_js/test_sensor.py b/tests/components/zwave_js/test_sensor.py index 02b3df17e22..c93b722334b 100644 --- a/tests/components/zwave_js/test_sensor.py +++ b/tests/components/zwave_js/test_sensor.py @@ -9,7 +9,6 @@ from zwave_js_server.exceptions import FailedZWaveCommand from zwave_js_server.model.node import Node from homeassistant.components.sensor import ( - ATTR_OPTIONS, ATTR_STATE_CLASS, SensorDeviceClass, SensorStateClass, @@ -23,6 +22,10 @@ from homeassistant.components.zwave_js.const import ( SERVICE_RESET_METER, ) from homeassistant.components.zwave_js.helpers import get_valueless_base_unique_id +from homeassistant.components.zwave_js.sensor import ( + CONTROLLER_STATISTICS_KEY_MAP, + NODE_STATISTICS_KEY_MAP, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -50,11 +53,12 @@ from .common import ( ENERGY_SENSOR, HUMIDITY_SENSOR, METER_ENERGY_SENSOR, - NOTIFICATION_MOTION_SENSOR, POWER_SENSOR, VOLTAGE_SENSOR, ) +from tests.common import MockConfigEntry + async def test_numeric_sensor( hass: HomeAssistant, @@ -221,60 +225,6 @@ async def test_basic_cc_sensor( assert state.state == "255.0" -async def test_disabled_notification_sensor( - hass: HomeAssistant, entity_registry: er.EntityRegistry, multisensor_6, integration -) -> None: - """Test sensor is created from Notification CC and is disabled.""" - entity_entry = entity_registry.async_get(NOTIFICATION_MOTION_SENSOR) - - assert entity_entry - assert entity_entry.disabled - assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - # Test enabling entity - updated_entry = entity_registry.async_update_entity( - entity_entry.entity_id, disabled_by=None - ) - assert updated_entry != entity_entry - assert updated_entry.disabled is False - - # reload integration and check if entity is correctly there - await hass.config_entries.async_reload(integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(NOTIFICATION_MOTION_SENSOR) - assert state.state == "Motion detection" - assert state.attributes[ATTR_VALUE] == 8 - assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.ENUM - assert state.attributes[ATTR_OPTIONS] == ["idle", "Motion detection"] - - event = Event( - "value updated", - { - "source": "node", - "event": "value updated", - "nodeId": multisensor_6.node_id, - "args": { - "commandClassName": "Notification", - "commandClass": 113, - "endpoint": 0, - "property": "Home Security", - "propertyKey": "Motion sensor status", - "newValue": None, - "prevValue": 0, - "propertyName": "Home Security", - "propertyKeyName": "Motion sensor status", - }, - }, - ) - - multisensor_6.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(NOTIFICATION_MOTION_SENSOR) - assert state - assert state.state == STATE_UNKNOWN - - async def test_config_parameter_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -522,7 +472,7 @@ async def test_reset_meter( "test", 1, "test" ) - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( DOMAIN, SERVICE_RESET_METER, @@ -530,6 +480,11 @@ async def test_reset_meter( blocking=True, ) + assert str(err.value) == ( + "Failed to reset meters on node Node(node_id=102) endpoint 0: " + "zwave_error: Z-Wave error 1 - test" + ) + async def test_meter_attributes( hass: HomeAssistant, client, aeon_smart_switch_6, integration @@ -751,6 +706,54 @@ NODE_STATISTICS_SUFFIXES_UNKNOWN = { } +async def test_statistics_sensors_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + zp3111_state, + client, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test statistics migration sensor.""" + node = Node(client, copy.deepcopy(zp3111_state)) + client.driver.controller.nodes[node.node_id] = node + + entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) + entry.add_to_hass(hass) + + controller_base_unique_id = f"{client.driver.controller.home_id}.1.statistics" + node_base_unique_id = f"{client.driver.controller.home_id}.22.statistics" + + # Create entity registry records for the old statistics keys + for base_unique_id, key_map in ( + (controller_base_unique_id, CONTROLLER_STATISTICS_KEY_MAP), + (node_base_unique_id, NODE_STATISTICS_KEY_MAP), + ): + # old key + for key in key_map.values(): + entity_registry.async_get_or_create( + "sensor", DOMAIN, f"{base_unique_id}_{key}" + ) + + # Set up integration + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + # Validate that entity unique ID's have changed + for base_unique_id, key_map in ( + (controller_base_unique_id, CONTROLLER_STATISTICS_KEY_MAP), + (node_base_unique_id, NODE_STATISTICS_KEY_MAP), + ): + for new_key, old_key in key_map.items(): + # If the key has changed, the old entity should not exist + if new_key != old_key: + assert not entity_registry.async_get_entity_id( + "sensor", DOMAIN, f"{base_unique_id}_{old_key}" + ) + assert entity_registry.async_get_entity_id( + "sensor", DOMAIN, f"{base_unique_id}_{new_key}" + ) + + async def test_statistics_sensors_no_last_seen( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/zwave_js/test_services.py b/tests/components/zwave_js/test_services.py index ec13d0262f8..14084a6b846 100644 --- a/tests/components/zwave_js/test_services.py +++ b/tests/components/zwave_js/test_services.py @@ -255,11 +255,10 @@ async def test_set_config_parameter( assert args["command"] == "endpoint.set_raw_config_parameter_value" assert args["nodeId"] == 52 assert args["endpoint"] == 0 - options = args["options"] - assert options["parameter"] == 2 - assert options["value"] == 1 - assert options["valueSize"] == 2 - assert options["valueFormat"] == 1 + assert args["parameter"] == 2 + assert args["value"] == 1 + assert args["valueSize"] == 2 + assert args["valueFormat"] == 1 client.async_send_command_no_wait.reset_mock() @@ -284,11 +283,10 @@ async def test_set_config_parameter( assert args["command"] == "endpoint.set_raw_config_parameter_value" assert args["nodeId"] == 2 assert args["endpoint"] == 1 - options = args["options"] - assert options["parameter"] == 32 - assert options["value"] == 1 - assert options["valueSize"] == 2 - assert options["valueFormat"] == 1 + assert args["parameter"] == 32 + assert args["value"] == 1 + assert args["valueSize"] == 2 + assert args["valueFormat"] == 1 client.async_send_command_no_wait.reset_mock() client.async_send_command.reset_mock() @@ -497,13 +495,12 @@ async def test_set_config_parameter( caplog.clear() - config_value = aeotec_zw164_siren.values["2-112-0-32"] cmd_result = SetConfigParameterResult("accepted", {"status": 255}) # Test accepted return with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=(config_value, cmd_result), + return_value=cmd_result, ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, @@ -534,7 +531,7 @@ async def test_set_config_parameter( cmd_result.status = "queued" with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=(config_value, cmd_result), + return_value=cmd_result, ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, diff --git a/tests/components/zwave_js/test_switch.py b/tests/components/zwave_js/test_switch.py index c18c0c4359e..30486186a4e 100644 --- a/tests/components/zwave_js/test_switch.py +++ b/tests/components/zwave_js/test_switch.py @@ -6,7 +6,11 @@ from zwave_js_server.event import Event from zwave_js_server.exceptions import FailedZWaveCommand from zwave_js_server.model.node import Node -from homeassistant.components.switch import DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory from homeassistant.core import HomeAssistant @@ -95,7 +99,7 @@ async def test_barrier_signaling_switch( # Test turning off await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {"entity_id": entity}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {"entity_id": entity}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -120,7 +124,7 @@ async def test_barrier_signaling_switch( # Test turning on await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {"entity_id": entity}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_ON, {"entity_id": entity}, blocking=True ) # Note: the valueId's value is still 255 because we never @@ -250,7 +254,7 @@ async def test_config_parameter_switch( # Test turning on await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {"entity_id": switch_entity_id}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_ON, {"entity_id": switch_entity_id}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -268,7 +272,7 @@ async def test_config_parameter_switch( # Test turning off await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -286,7 +290,14 @@ async def test_config_parameter_switch( client.async_send_command.side_effect = FailedZWaveCommand("test", 1, "test") # Test turning off error raises proper exception - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {"entity_id": switch_entity_id}, + blocking=True, ) + + assert str(err.value) == ( + "Unable to set value 32-112-0-20: zwave_error: Z-Wave error 1 - test" + ) diff --git a/tests/components/zwave_js/test_trigger.py b/tests/components/zwave_js/test_trigger.py index 5822afe7b9f..8c345619a90 100644 --- a/tests/components/zwave_js/test_trigger.py +++ b/tests/components/zwave_js/test_trigger.py @@ -549,7 +549,7 @@ async def test_zwave_js_event( "config_entry_id": integration.entry_id, "event_source": "controller", "event": "inclusion started", - "event_data": {"secure": True}, + "event_data": {"strategy": 0}, }, "action": { "event": "controller_event_data_filter", @@ -667,7 +667,7 @@ async def test_zwave_js_event( data={ "source": "controller", "event": "inclusion started", - "secure": False, + "strategy": 2, }, ) client.driver.controller.receive_event(event) @@ -691,7 +691,7 @@ async def test_zwave_js_event( data={ "source": "controller", "event": "inclusion started", - "secure": True, + "strategy": 0, }, ) client.driver.controller.receive_event(event) diff --git a/tests/components/zwave_js/test_update.py b/tests/components/zwave_js/test_update.py index abdceb155f7..d6683fa24cb 100644 --- a/tests/components/zwave_js/test_update.py +++ b/tests/components/zwave_js/test_update.py @@ -16,6 +16,7 @@ from homeassistant.components.update import ( ATTR_LATEST_VERSION, ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, SERVICE_SKIP, @@ -155,9 +156,10 @@ async def test_update_entity_states( attrs = state.attributes assert not attrs[ATTR_AUTO_UPDATE] assert attrs[ATTR_INSTALLED_VERSION] == "10.7" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert attrs[ATTR_RELEASE_URL] is None + assert attrs[ATTR_UPDATE_PERCENTAGE] is None await ws_client.send_json( { @@ -417,6 +419,7 @@ async def test_update_entity_progress( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] is None event = Event( type="firmware update progress", @@ -439,7 +442,8 @@ async def test_update_entity_progress( state = hass.states.get(UPDATE_ENTITY) assert state attrs = state.attributes - assert attrs[ATTR_IN_PROGRESS] == 5 + assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] == 5 event = Event( type="firmware update finished", @@ -463,6 +467,7 @@ async def test_update_entity_progress( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_INSTALLED_VERSION] == "11.2.4" assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert state.state == STATE_OFF @@ -532,7 +537,8 @@ async def test_update_entity_install_failed( state = hass.states.get(UPDATE_ENTITY) assert state attrs = state.attributes - assert attrs[ATTR_IN_PROGRESS] == 5 + assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] == 5 event = Event( type="firmware update finished", @@ -556,6 +562,7 @@ async def test_update_entity_install_failed( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_INSTALLED_VERSION] == "10.7" assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert state.state == STATE_ON @@ -594,7 +601,8 @@ async def test_update_entity_reload( attrs = state.attributes assert not attrs[ATTR_AUTO_UPDATE] assert attrs[ATTR_INSTALLED_VERSION] == "10.7" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert attrs[ATTR_RELEASE_URL] is None @@ -833,6 +841,7 @@ async def test_update_entity_full_restore_data_update_available( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert len(client.async_send_command.call_args_list) == 2 assert client.async_send_command.call_args_list[1][0][0] == { diff --git a/tests/conftest.py b/tests/conftest.py index df183f955cb..2cefe72f414 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,6 +36,7 @@ import pytest_socket import requests_mock import respx from syrupy.assertion import SnapshotAssertion +from syrupy.session import SnapshotSession from homeassistant import block_async_io from homeassistant.exceptions import ServiceNotFound @@ -51,11 +52,15 @@ from homeassistant.auth.const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY from homeassistant.auth.models import Credentials from homeassistant.auth.providers import homeassistant from homeassistant.components.device_tracker.legacy import Device + +# pylint: disable-next=hass-component-root-import from homeassistant.components.websocket_api.auth import ( TYPE_AUTH, TYPE_AUTH_OK, TYPE_AUTH_REQUIRED, ) + +# pylint: disable-next=hass-component-root-import from homeassistant.components.websocket_api.http import URL from homeassistant.config import YAML_CONFIG_FILE from homeassistant.config_entries import ConfigEntries, ConfigEntry, ConfigEntryState @@ -88,7 +93,7 @@ from homeassistant.util.async_ import create_eager_task, get_scheduled_timer_han from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS -from .syrupy import HomeAssistantSnapshotExtension +from .syrupy import HomeAssistantSnapshotExtension, override_syrupy_finish from .typing import ( ClientSessionGenerator, MockHAClientWebSocket, @@ -145,6 +150,11 @@ def pytest_configure(config: pytest.Config) -> None: if config.getoption("verbose") > 0: logging.getLogger().setLevel(logging.DEBUG) + # Override default finish to detect unused snapshots despite xdist + # Temporary workaround until it is finalised inside syrupy + # See https://github.com/syrupy-project/syrupy/pull/901 + SnapshotSession.finish = override_syrupy_finish + def pytest_runtest_setup() -> None: """Prepare pytest_socket and freezegun. @@ -414,7 +424,7 @@ def reset_hass_threading_local_object() -> Generator[None]: ha._hass.__dict__.clear() -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(autouse=True, scope="session") def bcrypt_cost() -> Generator[None]: """Run with reduced rounds during tests, to speed up uses.""" gensalt_orig = bcrypt.gensalt @@ -500,30 +510,31 @@ def aiohttp_client( clients = [] async def go( - __param: Application | BaseTestServer, + param: Application | BaseTestServer, + /, *args: Any, server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient: - if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] - __param, (Application, BaseTestServer) + if isinstance(param, Callable) and not isinstance( # type: ignore[arg-type] + param, (Application, BaseTestServer) ): - __param = __param(loop, *args, **kwargs) + param = param(loop, *args, **kwargs) kwargs = {} else: assert not args, "args should be empty" client: TestClient - if isinstance(__param, Application): + if isinstance(param, Application): server_kwargs = server_kwargs or {} - server = TestServer(__param, loop=loop, **server_kwargs) + server = TestServer(param, loop=loop, **server_kwargs) # Registering a view after starting the server should still work. server.app._router.freeze = lambda: None client = CoalescingClient(server, loop=loop, **kwargs) - elif isinstance(__param, BaseTestServer): - client = TestClient(__param, loop=loop, **kwargs) + elif isinstance(param, BaseTestServer): + client = TestClient(param, loop=loop, **kwargs) else: - raise TypeError(f"Unknown argument type: {type(__param)!r}") + raise TypeError(f"Unknown argument type: {type(param)!r}") await client.start_server() clients.append(client) @@ -1181,7 +1192,12 @@ def mock_get_source_ip() -> Generator[_patch]: @pytest.fixture(autouse=True, scope="session") def translations_once() -> Generator[_patch]: - """Only load translations once per session.""" + """Only load translations once per session. + + Warning: having this as a session fixture can cause issues with tests that + create mock integrations, overriding the real integration translations + with empty ones. Translations should be reset after such tests (see #131628) + """ cache = _TranslationsCacheData({}, {}) patcher = patch( "homeassistant.helpers.translation._TranslationsCacheData", @@ -1293,11 +1309,21 @@ def enable_nightly_purge() -> bool: @pytest.fixture -def enable_migrate_context_ids() -> bool: +def enable_migrate_event_context_ids() -> bool: """Fixture to control enabling of recorder's context id migration. To enable context id migration, tests can be marked with: - @pytest.mark.parametrize("enable_migrate_context_ids", [True]) + @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) + """ + return False + + +@pytest.fixture +def enable_migrate_state_context_ids() -> bool: + """Fixture to control enabling of recorder's context id migration. + + To enable context id migration, tests can be marked with: + @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) """ return False @@ -1465,7 +1491,8 @@ async def async_test_recorder( enable_statistics: bool, enable_missing_statistics: bool, enable_schema_validation: bool, - enable_migrate_context_ids: bool, + enable_migrate_event_context_ids: bool, + enable_migrate_state_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, enable_migrate_event_ids: bool, @@ -1527,12 +1554,12 @@ async def async_test_recorder( ) migrate_states_context_ids = ( migration.StatesContextIDMigration.migrate_data - if enable_migrate_context_ids + if enable_migrate_state_context_ids else None ) migrate_events_context_ids = ( migration.EventsContextIDMigration.migrate_data - if enable_migrate_context_ids + if enable_migrate_event_context_ids else None ) migrate_event_type_ids = ( @@ -1700,7 +1727,7 @@ async def mock_enable_bluetooth( await hass.async_block_till_done() -@pytest.fixture(scope="session") +@pytest.fixture(autouse=True, scope="session") def mock_bluetooth_adapters() -> Generator[None]: """Fixture to mock bluetooth adapters.""" with ( @@ -1751,10 +1778,30 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]: @pytest.fixture -def mock_integration_frame() -> Generator[Mock]: - """Mock as if we're calling code from inside an integration.""" +def integration_frame_path() -> str: + """Return the path to the integration frame. + + Can be parametrized with + `@pytest.mark.parametrize("integration_frame_path", ["path_to_frame"])` + + - "custom_components/XYZ" for a custom integration + - "homeassistant/components/XYZ" for a core integration + - "homeassistant/XYZ" for core (no integration) + + Defaults to core component `hue` + """ + return "homeassistant/components/hue" + + +@pytest.fixture +def mock_integration_frame(integration_frame_path: str) -> Generator[Mock]: + """Mock where we are calling code from. + + Defaults to calling from `hue` core integration, and can be parametrized + with `integration_frame_path`. + """ correct_frame = Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", + filename=f"/home/paulus/{integration_frame_path}/light.py", lineno="23", line="self.light.is_on", ) @@ -1852,7 +1899,7 @@ def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: return_response: bool = False, ) -> ServiceResponse: calls.append( - ServiceCall(domain, service, service_data, context, return_response) + ServiceCall(hass, domain, service, service_data, context, return_response) ) try: return await _original_async_call( diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index f3b008a6113..b9259596c65 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -4,7 +4,7 @@ from pathlib import Path import pytest -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration from script.hassfest.requirements import validate_requirements_format @@ -12,7 +12,13 @@ from script.hassfest.requirements import validate_requirements_format def integration(): """Fixture for hassfest integration model.""" return Integration( - path=Path("homeassistant/components/test"), + path=Path("homeassistant/components/test").absolute(), + _config=Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + ), _manifest={ "domain": "test", "documentation": "https://example.com", @@ -80,3 +86,22 @@ def test_validate_requirements_format_successful(integration: Integration) -> No ] assert validate_requirements_format(integration) assert len(integration.errors) == 0 + + +def test_validate_requirements_format_github_core(integration: Integration) -> None: + """Test requirement that points to github fails with core component.""" + integration.manifest["requirements"] = [ + "git+https://github.com/user/project.git@1.2.3", + ] + assert not validate_requirements_format(integration) + assert len(integration.errors) == 1 + + +def test_validate_requirements_format_github_custom(integration: Integration) -> None: + """Test requirement that points to github succeeds with custom component.""" + integration.manifest["requirements"] = [ + "git+https://github.com/user/project.git@1.2.3", + ] + integration.path = Path("") + assert validate_requirements_format(integration) + assert len(integration.errors) == 0 diff --git a/tests/hassfest/test_version.py b/tests/hassfest/test_version.py index bfe15018fe2..20c3d93bda5 100644 --- a/tests/hassfest/test_version.py +++ b/tests/hassfest/test_version.py @@ -1,5 +1,7 @@ """Tests for hassfest version.""" +from pathlib import Path + import pytest import voluptuous as vol @@ -7,13 +9,21 @@ from script.hassfest.manifest import ( CUSTOM_INTEGRATION_MANIFEST_SCHEMA, validate_version, ) -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration @pytest.fixture def integration(): """Fixture for hassfest integration model.""" - integration = Integration("") + integration = Integration( + Path(), + _config=Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + ), + ) integration._manifest = { "domain": "test", "documentation": "https://example.com", diff --git a/tests/helpers/snapshots/test_entity_platform.ambr b/tests/helpers/snapshots/test_entity_platform.ambr new file mode 100644 index 00000000000..84cbb07bd73 --- /dev/null +++ b/tests/helpers/snapshots/test_entity_platform.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device_info_called + DeviceRegistryEntrySnapshot({ + 'area_id': 'heliport', + 'config_entries': , + 'configuration_url': 'http://192.168.0.100/config', + 'connections': set({ + tuple( + 'mac', + 'abcd', + ), + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': 'test-hw', + 'id': , + 'identifiers': set({ + tuple( + 'hue', + '1234', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'test-manuf', + 'model': 'test-model', + 'model_id': None, + 'name': 'test-name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'Heliport', + 'sw_version': 'test-sw', + 'via_device_id': , + }) +# --- diff --git a/tests/helpers/snapshots/test_template.ambr b/tests/helpers/snapshots/test_template.ambr new file mode 100644 index 00000000000..af38433f1a4 --- /dev/null +++ b/tests/helpers/snapshots/test_template.ambr @@ -0,0 +1,337 @@ +# serializer version: 1 +# name: test_merge_response[calendar][a_response] + dict({ + 'calendar.local_furry_events': dict({ + 'events': list([ + ]), + }), + 'calendar.sports': dict({ + 'events': list([ + dict({ + 'description': '', + 'end': '2024-02-27T18:00:00-06:00', + 'start': '2024-02-27T17:00:00-06:00', + 'summary': 'Basketball vs. Rockets', + }), + ]), + }), + 'calendar.yap_house_schedules': dict({ + 'events': list([ + dict({ + 'description': '', + 'end': '2024-02-26T09:00:00-06:00', + 'start': '2024-02-26T08:00:00-06:00', + 'summary': 'Dr. Appt', + }), + dict({ + 'description': 'something good', + 'end': '2024-02-28T21:00:00-06:00', + 'start': '2024-02-28T20:00:00-06:00', + 'summary': 'Bake a cake', + }), + ]), + }), + }) +# --- +# name: test_merge_response[calendar][b_rendered] + Wrapper([ + dict({ + 'description': '', + 'end': '2024-02-27T18:00:00-06:00', + 'entity_id': 'calendar.sports', + 'start': '2024-02-27T17:00:00-06:00', + 'summary': 'Basketball vs. Rockets', + 'value_key': 'events', + }), + dict({ + 'description': '', + 'end': '2024-02-26T09:00:00-06:00', + 'entity_id': 'calendar.yap_house_schedules', + 'start': '2024-02-26T08:00:00-06:00', + 'summary': 'Dr. Appt', + 'value_key': 'events', + }), + dict({ + 'description': 'something good', + 'end': '2024-02-28T21:00:00-06:00', + 'entity_id': 'calendar.yap_house_schedules', + 'start': '2024-02-28T20:00:00-06:00', + 'summary': 'Bake a cake', + 'value_key': 'events', + }), + ]) +# --- +# name: test_merge_response[vacuum][a_response] + dict({ + 'vacuum.deebot_n8_plus_1': dict({ + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + 'vacuum.deebot_n8_plus_2': dict({ + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + }) +# --- +# name: test_merge_response[vacuum][b_rendered] + Wrapper([ + dict({ + 'entity_id': 'vacuum.deebot_n8_plus_1', + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + dict({ + 'entity_id': 'vacuum.deebot_n8_plus_2', + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + ]) +# --- +# name: test_merge_response[weather][a_response] + dict({ + 'weather.forecast_home': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2024-03-31T10:00:00+00:00', + 'humidity': 71, + 'precipitation': 0, + 'precipitation_probability': 6.6, + 'temperature': 10.9, + 'templow': 6.5, + 'wind_bearing': 71.8, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2024-04-01T10:00:00+00:00', + 'humidity': 79, + 'precipitation': 0, + 'precipitation_probability': 8, + 'temperature': 10.2, + 'templow': 3.4, + 'wind_bearing': 350.6, + 'wind_gust_speed': 38.2, + 'wind_speed': 21.6, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2024-04-02T10:00:00+00:00', + 'humidity': 77, + 'precipitation': 2.3, + 'precipitation_probability': 67.4, + 'temperature': 3, + 'templow': 0, + 'wind_bearing': 24.5, + 'wind_gust_speed': 64.8, + 'wind_speed': 37.4, + }), + ]), + }), + 'weather.smhi_home': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-03-31T16:00:00', + 'humidity': 87, + 'precipitation': 0.2, + 'pressure': 998, + 'temperature': 10, + 'templow': 4, + 'wind_bearing': 79, + 'wind_gust_speed': 21.6, + 'wind_speed': 11.88, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2024-04-01T12:00:00', + 'humidity': 88, + 'precipitation': 2.2, + 'pressure': 999, + 'temperature': 6, + 'templow': 1, + 'wind_bearing': 17, + 'wind_gust_speed': 20.52, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-04-02T12:00:00', + 'humidity': 71, + 'precipitation': 1.3, + 'pressure': 1003, + 'temperature': 0, + 'templow': -3, + 'wind_bearing': 17, + 'wind_gust_speed': 57.24, + 'wind_speed': 30.6, + }), + ]), + }), + }) +# --- +# name: test_merge_response[weather][b_rendered] + Wrapper([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-03-31T16:00:00', + 'entity_id': 'weather.smhi_home', + 'humidity': 87, + 'precipitation': 0.2, + 'pressure': 998, + 'temperature': 10, + 'templow': 4, + 'value_key': 'forecast', + 'wind_bearing': 79, + 'wind_gust_speed': 21.6, + 'wind_speed': 11.88, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2024-04-01T12:00:00', + 'entity_id': 'weather.smhi_home', + 'humidity': 88, + 'precipitation': 2.2, + 'pressure': 999, + 'temperature': 6, + 'templow': 1, + 'value_key': 'forecast', + 'wind_bearing': 17, + 'wind_gust_speed': 20.52, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-04-02T12:00:00', + 'entity_id': 'weather.smhi_home', + 'humidity': 71, + 'precipitation': 1.3, + 'pressure': 1003, + 'temperature': 0, + 'templow': -3, + 'value_key': 'forecast', + 'wind_bearing': 17, + 'wind_gust_speed': 57.24, + 'wind_speed': 30.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2024-03-31T10:00:00+00:00', + 'entity_id': 'weather.forecast_home', + 'humidity': 71, + 'precipitation': 0, + 'precipitation_probability': 6.6, + 'temperature': 10.9, + 'templow': 6.5, + 'value_key': 'forecast', + 'wind_bearing': 71.8, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2024-04-01T10:00:00+00:00', + 'entity_id': 'weather.forecast_home', + 'humidity': 79, + 'precipitation': 0, + 'precipitation_probability': 8, + 'temperature': 10.2, + 'templow': 3.4, + 'value_key': 'forecast', + 'wind_bearing': 350.6, + 'wind_gust_speed': 38.2, + 'wind_speed': 21.6, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2024-04-02T10:00:00+00:00', + 'entity_id': 'weather.forecast_home', + 'humidity': 77, + 'precipitation': 2.3, + 'precipitation_probability': 67.4, + 'temperature': 3, + 'templow': 0, + 'value_key': 'forecast', + 'wind_bearing': 24.5, + 'wind_gust_speed': 64.8, + 'wind_speed': 37.4, + }), + ]) +# --- +# name: test_merge_response[workday][a_response] + dict({ + 'binary_sensor.workday': dict({ + 'workday': True, + }), + 'binary_sensor.workday2': dict({ + 'workday': False, + }), + }) +# --- +# name: test_merge_response[workday][b_rendered] + Wrapper([ + dict({ + 'entity_id': 'binary_sensor.workday', + 'workday': True, + }), + dict({ + 'entity_id': 'binary_sensor.workday2', + 'workday': False, + }), + ]) +# --- +# name: test_merge_response_with_empty_response[a_response] + dict({ + 'calendar.local_furry_events': dict({ + 'events': list([ + ]), + }), + 'calendar.sports': dict({ + 'events': list([ + ]), + }), + 'calendar.yap_house_schedules': dict({ + 'events': list([ + ]), + }), + }) +# --- +# name: test_merge_response_with_empty_response[b_rendered] + Wrapper([ + ]) +# --- diff --git a/tests/helpers/test_aiohttp_client.py b/tests/helpers/test_aiohttp_client.py index 4feb03493e9..1788da74c3b 100644 --- a/tests/helpers/test_aiohttp_client.py +++ b/tests/helpers/test_aiohttp_client.py @@ -23,6 +23,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant import homeassistant.helpers.aiohttp_client as client from homeassistant.util.color import RGBColor +from homeassistant.util.ssl import SSLCipherList from tests.common import ( MockConfigEntry, @@ -62,11 +63,14 @@ async def test_get_clientsession_with_ssl(hass: HomeAssistant) -> None: """Test init clientsession with ssl.""" client.async_get_clientsession(hass) verify_ssl = True + ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 - client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family)] + client_session = hass.data[client.DATA_CLIENTSESSION][ + (verify_ssl, family, ssl_cipher) + ] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] assert isinstance(connector, aiohttp.TCPConnector) @@ -74,33 +78,63 @@ async def test_get_clientsession_without_ssl(hass: HomeAssistant) -> None: """Test init clientsession without ssl.""" client.async_get_clientsession(hass, verify_ssl=False) verify_ssl = False + ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 - client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family)] + client_session = hass.data[client.DATA_CLIENTSESSION][ + (verify_ssl, family, ssl_cipher) + ] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] assert isinstance(connector, aiohttp.TCPConnector) @pytest.mark.parametrize( - ("verify_ssl", "expected_family"), + ("verify_ssl", "expected_family", "ssl_cipher"), [ - (True, socket.AF_UNSPEC), - (False, socket.AF_UNSPEC), - (True, socket.AF_INET), - (False, socket.AF_INET), - (True, socket.AF_INET6), - (False, socket.AF_INET6), + (True, socket.AF_UNSPEC, SSLCipherList.PYTHON_DEFAULT), + (True, socket.AF_INET, SSLCipherList.PYTHON_DEFAULT), + (True, socket.AF_INET6, SSLCipherList.PYTHON_DEFAULT), + (True, socket.AF_UNSPEC, SSLCipherList.INTERMEDIATE), + (True, socket.AF_INET, SSLCipherList.INTERMEDIATE), + (True, socket.AF_INET6, SSLCipherList.INTERMEDIATE), + (True, socket.AF_UNSPEC, SSLCipherList.MODERN), + (True, socket.AF_INET, SSLCipherList.MODERN), + (True, socket.AF_INET6, SSLCipherList.MODERN), + (True, socket.AF_UNSPEC, SSLCipherList.INSECURE), + (True, socket.AF_INET, SSLCipherList.INSECURE), + (True, socket.AF_INET6, SSLCipherList.INSECURE), + (False, socket.AF_UNSPEC, SSLCipherList.PYTHON_DEFAULT), + (False, socket.AF_INET, SSLCipherList.PYTHON_DEFAULT), + (False, socket.AF_INET6, SSLCipherList.PYTHON_DEFAULT), + (False, socket.AF_UNSPEC, SSLCipherList.INTERMEDIATE), + (False, socket.AF_INET, SSLCipherList.INTERMEDIATE), + (False, socket.AF_INET6, SSLCipherList.INTERMEDIATE), + (False, socket.AF_UNSPEC, SSLCipherList.MODERN), + (False, socket.AF_INET, SSLCipherList.MODERN), + (False, socket.AF_INET6, SSLCipherList.MODERN), + (False, socket.AF_UNSPEC, SSLCipherList.INSECURE), + (False, socket.AF_INET, SSLCipherList.INSECURE), + (False, socket.AF_INET6, SSLCipherList.INSECURE), ], ) async def test_get_clientsession( - hass: HomeAssistant, verify_ssl: bool, expected_family: int + hass: HomeAssistant, + verify_ssl: bool, + expected_family: int, + ssl_cipher: SSLCipherList, ) -> None: """Test init clientsession combinations.""" - client.async_get_clientsession(hass, verify_ssl=verify_ssl, family=expected_family) - client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, expected_family)] + client.async_get_clientsession( + hass, verify_ssl=verify_ssl, family=expected_family, ssl_cipher=ssl_cipher + ) + client_session = hass.data[client.DATA_CLIENTSESSION][ + (verify_ssl, expected_family, ssl_cipher) + ] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, expected_family)] + connector = hass.data[client.DATA_CONNECTOR][ + (verify_ssl, expected_family, ssl_cipher) + ] assert isinstance(connector, aiohttp.TCPConnector) @@ -110,10 +144,11 @@ async def test_create_clientsession_with_ssl_and_cookies(hass: HomeAssistant) -> assert isinstance(session, aiohttp.ClientSession) verify_ssl = True + ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 assert client.DATA_CLIENTSESSION not in hass.data - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] assert isinstance(connector, aiohttp.TCPConnector) @@ -125,26 +160,61 @@ async def test_create_clientsession_without_ssl_and_cookies( assert isinstance(session, aiohttp.ClientSession) verify_ssl = False + ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 assert client.DATA_CLIENTSESSION not in hass.data - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] assert isinstance(connector, aiohttp.TCPConnector) @pytest.mark.parametrize( - ("verify_ssl", "expected_family"), - [(True, 0), (False, 0), (True, 4), (False, 4), (True, 6), (False, 6)], + ("verify_ssl", "expected_family", "ssl_cipher"), + [ + (True, 0, SSLCipherList.PYTHON_DEFAULT), + (True, 4, SSLCipherList.PYTHON_DEFAULT), + (True, 6, SSLCipherList.PYTHON_DEFAULT), + (True, 0, SSLCipherList.INTERMEDIATE), + (True, 4, SSLCipherList.INTERMEDIATE), + (True, 6, SSLCipherList.INTERMEDIATE), + (True, 0, SSLCipherList.MODERN), + (True, 4, SSLCipherList.MODERN), + (True, 6, SSLCipherList.MODERN), + (True, 0, SSLCipherList.INSECURE), + (True, 4, SSLCipherList.INSECURE), + (True, 6, SSLCipherList.INSECURE), + (False, 0, SSLCipherList.PYTHON_DEFAULT), + (False, 4, SSLCipherList.PYTHON_DEFAULT), + (False, 6, SSLCipherList.PYTHON_DEFAULT), + (False, 0, SSLCipherList.INTERMEDIATE), + (False, 4, SSLCipherList.INTERMEDIATE), + (False, 6, SSLCipherList.INTERMEDIATE), + (False, 0, SSLCipherList.MODERN), + (False, 4, SSLCipherList.MODERN), + (False, 6, SSLCipherList.MODERN), + (False, 0, SSLCipherList.INSECURE), + (False, 4, SSLCipherList.INSECURE), + (False, 6, SSLCipherList.INSECURE), + ], ) async def test_get_clientsession_cleanup( - hass: HomeAssistant, verify_ssl: bool, expected_family: int + hass: HomeAssistant, + verify_ssl: bool, + expected_family: int, + ssl_cipher: SSLCipherList, ) -> None: """Test init clientsession cleanup.""" - client.async_get_clientsession(hass, verify_ssl=verify_ssl, family=expected_family) + client.async_get_clientsession( + hass, verify_ssl=verify_ssl, family=expected_family, ssl_cipher=ssl_cipher + ) - client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, expected_family)] + client_session = hass.data[client.DATA_CLIENTSESSION][ + (verify_ssl, expected_family, ssl_cipher) + ] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, expected_family)] + connector = hass.data[client.DATA_CONNECTOR][ + (verify_ssl, expected_family, ssl_cipher) + ] assert isinstance(connector, aiohttp.TCPConnector) hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE) @@ -158,17 +228,19 @@ async def test_get_clientsession_patched_close(hass: HomeAssistant) -> None: """Test closing clientsession does not work.""" verify_ssl = True + ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 with patch("aiohttp.ClientSession.close") as mock_close: session = client.async_get_clientsession(hass) assert isinstance( - hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family)], + hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family, ssl_cipher)], aiohttp.ClientSession, ) assert isinstance( - hass.data[client.DATA_CONNECTOR][(verify_ssl, family)], aiohttp.TCPConnector + hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)], + aiohttp.TCPConnector, ) with pytest.raises(RuntimeError): @@ -214,8 +286,8 @@ async def test_warning_close_session_integration( await session.close() assert ( "Detected that integration 'hue' closes the Home Assistant aiohttp session at " - "homeassistant/components/hue/light.py, line 23: await session.close(), " - "please create a bug report at https://github.com/home-assistant/core/issues?" + "homeassistant/components/hue/light.py, line 23: await session.close(). " + "Please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" ) in caplog.text @@ -258,8 +330,8 @@ async def test_warning_close_session_custom( await session.close() assert ( "Detected that custom integration 'hue' closes the Home Assistant aiohttp " - "session at custom_components/hue/light.py, line 23: await session.close(), " - "please report it to the author of the 'hue' custom integration" + "session at custom_components/hue/light.py, line 23: await session.close(). " + "Please report it to the author of the 'hue' custom integration" ) in caplog.text diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index ad571ac50cc..74f55c86a6c 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -45,7 +45,6 @@ async def test_create_area( id=ANY, labels=set(), name="mock", - normalized_name=ANY, picture=None, created_at=utcnow(), modified_at=utcnow(), @@ -77,7 +76,6 @@ async def test_create_area( id=ANY, labels={"label1", "label2"}, name="mock 2", - normalized_name=ANY, picture="/image/example.png", created_at=utcnow(), modified_at=utcnow(), @@ -196,7 +194,6 @@ async def test_update_area( id=ANY, labels={"label1", "label2"}, name="mock1", - normalized_name=ANY, picture="/image/example.png", created_at=created_at, modified_at=modified_at, @@ -242,9 +239,12 @@ async def test_update_area_with_same_name_change_case( async def test_update_area_with_name_already_in_use( area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, ) -> None: """Make sure that we can't update an area with a name already in use.""" - area1 = area_registry.async_create("mock1") + floor = floor_registry.async_create("mock") + floor_id = floor.floor_id + area1 = area_registry.async_create("mock1", floor_id=floor_id) area2 = area_registry.async_create("mock2") with pytest.raises(ValueError) as e_info: @@ -255,6 +255,8 @@ async def test_update_area_with_name_already_in_use( assert area2.name == "mock2" assert len(area_registry.areas) == 2 + assert area_registry.areas.get_areas_for_floor(floor_id) == [area1] + async def test_update_area_with_normalized_name_already_in_use( area_registry: ar.AreaRegistry, diff --git a/tests/helpers/test_collection.py b/tests/helpers/test_collection.py index f0287218d7f..f564f85ec3b 100644 --- a/tests/helpers/test_collection.py +++ b/tests/helpers/test_collection.py @@ -2,8 +2,10 @@ from __future__ import annotations +from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -15,6 +17,7 @@ from homeassistant.helpers import ( storage, ) from homeassistant.helpers.typing import ConfigType +from homeassistant.util.dt import utcnow from tests.common import flush_store from tests.typing import WebSocketGenerator @@ -254,6 +257,84 @@ async def test_storage_collection(hass: HomeAssistant) -> None: } +async def test_storage_collection_update_modifiet_at( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that updating a storage collection will update the modified_at datetime in the entity registry.""" + + entities: dict[str, TestEntity] = {} + + class TestEntity(MockEntity): + """Entity that is config based.""" + + def __init__(self, config: ConfigType) -> None: + """Initialize entity.""" + super().__init__(config) + self._state = "initial" + + @classmethod + def from_storage(cls, config: ConfigType) -> TestEntity: + """Create instance from storage.""" + obj = super().from_storage(config) + entities[obj.unique_id] = obj + return obj + + @property + def state(self) -> str: + """Return state of entity.""" + return self._state + + def set_state(self, value: str) -> None: + """Set value.""" + self._state = value + self.async_write_ha_state() + + store = storage.Store(hass, 1, "test-data") + data = {"id": "mock-1", "name": "Mock 1", "data": 1} + await store.async_save( + { + "items": [ + data, + ] + } + ) + id_manager = collection.IDManager() + ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) + await ent_comp.async_setup({}) + coll = MockStorageCollection(store, id_manager) + collection.sync_entity_lifecycle(hass, "test", "test", ent_comp, coll, TestEntity) + changes = track_changes(coll) + + await coll.async_load() + assert id_manager.has_id("mock-1") + assert len(changes) == 1 + assert changes[0] == (collection.CHANGE_ADDED, "mock-1", data) + + modified_1 = entity_registry.async_get("test.mock_1").modified_at + assert modified_1 == utcnow() + + freezer.tick(timedelta(minutes=1)) + + updated_item = await coll.async_update_item("mock-1", {"data": 2}) + assert id_manager.has_id("mock-1") + assert updated_item == {"id": "mock-1", "name": "Mock 1", "data": 2} + assert len(changes) == 2 + assert changes[1] == (collection.CHANGE_UPDATED, "mock-1", updated_item) + + modified_2 = entity_registry.async_get("test.mock_1").modified_at + assert modified_2 > modified_1 + assert modified_2 == utcnow() + + freezer.tick(timedelta(minutes=1)) + + entities["mock-1"].set_state("second") + + modified_3 = entity_registry.async_get("test.mock_1").modified_at + assert modified_3 == modified_2 + + async def test_attach_entity_component_collection(hass: HomeAssistant) -> None: """Test attaching collection to entity component.""" ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) diff --git a/tests/helpers/test_condition.py b/tests/helpers/test_condition.py index 31f813469cc..1ec78b20535 100644 --- a/tests/helpers/test_condition.py +++ b/tests/helpers/test_condition.py @@ -15,6 +15,8 @@ from homeassistant.const import ( CONF_CONDITION, CONF_DEVICE_ID, CONF_DOMAIN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET, ) @@ -992,6 +994,83 @@ async def test_time_using_input_datetime(hass: HomeAssistant) -> None: condition.time(hass, before="input_datetime.not_existing") +async def test_time_using_time(hass: HomeAssistant) -> None: + """Test time conditions using time entities.""" + hass.states.async_set( + "time.am", + "06:00:00", # 6 am local time + ) + hass.states.async_set( + "time.pm", + "18:00:00", # 6 pm local time + ) + hass.states.async_set( + "time.unknown_state", + STATE_UNKNOWN, + ) + hass.states.async_set( + "time.unavailable_state", + STATE_UNAVAILABLE, + ) + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=3), + ): + assert not condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.pm", before="time.am") + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=9), + ): + assert condition.time(hass, after="time.am", before="time.pm") + assert not condition.time(hass, after="time.pm", before="time.am") + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=15), + ): + assert condition.time(hass, after="time.am", before="time.pm") + assert not condition.time(hass, after="time.pm", before="time.am") + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=21), + ): + assert not condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.pm", before="time.am") + + # Trigger on PM time + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=18, minute=0, second=0), + ): + assert condition.time(hass, after="time.pm", before="time.am") + assert not condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.pm") + assert not condition.time(hass, before="time.pm") + + # Trigger on AM time + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=6, minute=0, second=0), + ): + assert not condition.time(hass, after="time.pm", before="time.am") + assert condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.am") + assert not condition.time(hass, before="time.am") + + assert not condition.time(hass, after="time.unknown_state") + assert not condition.time(hass, before="time.unavailable_state") + + with pytest.raises(ConditionError): + condition.time(hass, after="time.not_existing") + + with pytest.raises(ConditionError): + condition.time(hass, before="time.not_existing") + + async def test_time_using_sensor(hass: HomeAssistant) -> None: """Test time conditions using sensor entities.""" hass.states.async_set( diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 498e57d45a4..13e28bb8840 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -6,8 +6,8 @@ from unittest.mock import Mock, PropertyMock, patch import pytest from homeassistant import config_entries, data_entry_flow, setup -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import config_entry_flow from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 973f504df08..7202cef6f5f 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -6,6 +6,7 @@ import enum from functools import partial import logging import os +import re from socket import _GLOBAL_DEFAULT_TIMEOUT import threading from typing import Any @@ -25,6 +26,7 @@ from homeassistant.helpers import ( selector, template, ) +from homeassistant.helpers.config_validation import TRIGGER_SCHEMA def test_boolean() -> None: @@ -671,10 +673,12 @@ def test_template(hass: HomeAssistant) -> None: "Hello", "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Filter added as an extension by Home Assistant + # Filter 'expand' added as an extension by Home Assistant "{{ ['group.foo']|expand|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: schema(value) @@ -700,8 +704,11 @@ async def test_template_no_hass(hass: HomeAssistant) -> None: "Hello", "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant, no error + # because non existing functions are not detected by Jinja2 "{{ expand('group.foo')|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: await hass.async_add_executor_job(schema, value) @@ -725,10 +732,12 @@ def test_dynamic_template(hass: HomeAssistant) -> None: options = ( "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Filter added as an extension by Home Assistant + # Filter 'expand' added as an extension by Home Assistant "{{ ['group.foo']|expand|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: schema(value) @@ -754,8 +763,11 @@ async def test_dynamic_template_no_hass(hass: HomeAssistant) -> None: options = ( "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant, no error + # because non existing functions are not detected by Jinja2 "{{ expand('group.foo')|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: await hass.async_add_executor_job(schema, value) @@ -1805,3 +1817,135 @@ async def test_async_validate(hass: HomeAssistant, tmpdir: py.path.local) -> Non "string": [hass.loop_thread_id], } validator_calls = {} + + +async def test_nested_trigger_list() -> None: + """Test triggers within nested lists are flattened.""" + + trigger_config = [ + { + "triggers": { + "platform": "event", + "event_type": "trigger_1", + }, + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + {"triggers": []}, + {"triggers": None}, + { + "triggers": [ + { + "platform": "event", + "event_type": "trigger_3", + }, + { + "trigger": "event", + "event_type": "trigger_4", + }, + ], + }, + ] + + validated_triggers = TRIGGER_SCHEMA(trigger_config) + + assert validated_triggers == [ + { + "platform": "event", + "event_type": "trigger_1", + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + { + "platform": "event", + "event_type": "trigger_3", + }, + { + "platform": "event", + "event_type": "trigger_4", + }, + ] + + +async def test_nested_trigger_list_extra() -> None: + """Test triggers key with extra keys is not modified.""" + + trigger_config = [ + { + "platform": "other", + "triggers": [ + { + "platform": "event", + "event_type": "trigger_1", + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + ], + }, + ] + + validated_triggers = TRIGGER_SCHEMA(trigger_config) + + assert validated_triggers == [ + { + "platform": "other", + "triggers": [ + { + "platform": "event", + "event_type": "trigger_1", + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + ], + }, + ] + + +async def test_trigger_backwards_compatibility() -> None: + """Test triggers with backwards compatibility.""" + + assert cv._trigger_pre_validator("str") == "str" + assert cv._trigger_pre_validator({"platform": "abc"}) == {"platform": "abc"} + assert cv._trigger_pre_validator({"trigger": "abc"}) == {"platform": "abc"} + with pytest.raises( + vol.Invalid, + match="Cannot specify both 'platform' and 'trigger'. Please use 'trigger' only.", + ): + cv._trigger_pre_validator({"trigger": "abc", "platform": "def"}) + with pytest.raises( + vol.Invalid, + match=re.escape("required key not provided @ data['trigger']"), + ): + cv._trigger_pre_validator({}) + + +async def test_is_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test cv.is_entity_service_schema.""" + for schema in ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + vol.Any(cv.make_entity_service_schema({"some": str})), + ): + assert cv.is_entity_service_schema(schema) is False + + for schema in ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.Schema(vol.All(cv.make_entity_service_schema({"some": str}))), + vol.Schema(vol.Schema(cv.make_entity_service_schema({"some": str}))), + vol.All(cv.make_entity_service_schema({"some": str})), + vol.All(vol.All(cv.make_entity_service_schema({"some": str}))), + vol.All(vol.Schema(cv.make_entity_service_schema({"some": str}))), + ): + assert cv.is_entity_service_schema(schema) is True diff --git a/tests/helpers/test_debounce.py b/tests/helpers/test_debounce.py index 84b3d19b6d7..6fa758aec6e 100644 --- a/tests/helpers/test_debounce.py +++ b/tests/helpers/test_debounce.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import debounce from homeassistant.util.dt import utcnow -from ..common import async_fire_time_changed +from tests.common import async_fire_time_changed _LOGGER = logging.getLogger(__name__) diff --git a/tests/helpers/test_deprecation.py b/tests/helpers/test_deprecation.py index b48e70eff82..4cf7e851af3 100644 --- a/tests/helpers/test_deprecation.py +++ b/tests/helpers/test_deprecation.py @@ -13,6 +13,7 @@ from homeassistant.helpers.deprecation import ( DeprecatedAlias, DeprecatedConstant, DeprecatedConstantEnum, + EnumWithDeprecatedMembers, check_if_deprecated_constant, deprecated_class, deprecated_function, @@ -520,3 +521,119 @@ def test_dir_with_deprecated_constants( ) -> None: """Test dir() with deprecated constants.""" assert dir_with_deprecated_constants([*module_globals.keys()]) == expected + + +@pytest.mark.parametrize( + ("module_name", "extra_extra_msg"), + [ + ("homeassistant.components.hue.light", ""), # builtin integration + ( + "config.custom_components.hue.light", + ", please report it to the author of the 'hue' custom integration", + ), # custom component integration + ], +) +def test_enum_with_deprecated_members( + caplog: pytest.LogCaptureFixture, + module_name: str, + extra_extra_msg: str, +) -> None: + """Test EnumWithDeprecatedMembers.""" + filename = f"/home/paulus/{module_name.replace('.', '/')}.py" + + class TestEnum( + StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "CATS": ("TestEnum.CATS_PER_CM", "2025.11.0"), + "DOGS": ("TestEnum.DOGS_PER_CM", None), + }, + ): + """Zoo units.""" + + CATS_PER_CM = "cats/cm" + DOGS_PER_CM = "dogs/cm" + CATS = "cats/cm" + DOGS = "dogs/cm" + + # mock sys.modules for homeassistant/helpers/frame.py#get_integration_frame + with ( + patch.dict(sys.modules, {module_name: Mock(__file__=filename)}), + patch( + "homeassistant.helpers.frame.linecache.getline", + return_value="await session.close()", + ), + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename=filename, + lineno="23", + line="await session.close()", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ), + ), + ): + TestEnum.CATS # noqa: B018 + TestEnum.DOGS # noqa: B018 + + assert len(caplog.record_tuples) == 2 + assert ( + "tests.helpers.test_deprecation", + logging.WARNING, + ( + "TestEnum.CATS was used from hue, this is a deprecated enum member which " + "will be removed in HA Core 2025.11.0. Use TestEnum.CATS_PER_CM instead" + f"{extra_extra_msg}" + ), + ) in caplog.record_tuples + assert ( + "tests.helpers.test_deprecation", + logging.WARNING, + ( + "TestEnum.DOGS was used from hue, this is a deprecated enum member. Use " + f"TestEnum.DOGS_PER_CM instead{extra_extra_msg}" + ), + ) in caplog.record_tuples + + +def test_enum_with_deprecated_members_integration_not_found( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test check_if_deprecated_constant.""" + + class TestEnum( + StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "CATS": ("TestEnum.CATS_PER_CM", "2025.11.0"), + "DOGS": ("TestEnum.DOGS_PER_CM", None), + }, + ): + """Zoo units.""" + + CATS_PER_CM = "cats/cm" + DOGS_PER_CM = "dogs/cm" + CATS = "cats/cm" + DOGS = "dogs/cm" + + with patch( + "homeassistant.helpers.frame.get_current_frame", + side_effect=MissingIntegrationFrame, + ): + TestEnum.CATS # noqa: B018 + TestEnum.DOGS # noqa: B018 + + assert len(caplog.record_tuples) == 0 diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index 129c6b0d37c..cf7bbe7d1e2 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -23,13 +23,7 @@ from homeassistant.helpers import ( ) from homeassistant.util.dt import utcnow -from tests.common import ( - MockConfigEntry, - async_capture_events, - flush_store, - help_test_all, - import_and_test_deprecated_constant_enum, -) +from tests.common import MockConfigEntry, async_capture_events, flush_store @pytest.fixture @@ -308,12 +302,12 @@ async def test_loading_from_storage( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_1_to_1_7( +async def test_migration_from_1_1( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.1 to 1.7.""" + """Test migration from version 1.1.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 1, @@ -332,7 +326,7 @@ async def test_migration_1_1_to_1_7( }, # Invalid entry type { - "config_entries": [None], + "config_entries": ["234567"], "connections": [], "entry_type": "INVALID_VALUE", "id": "invalid-entry-type", @@ -412,7 +406,7 @@ async def test_migration_1_1_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -451,12 +445,12 @@ async def test_migration_1_1_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_2_to_1_7( +async def test_migration_from_1_2( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.2 to 1.7.""" + """Test migration from version 1.2.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 2, @@ -482,7 +476,7 @@ async def test_migration_1_2_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -556,7 +550,7 @@ async def test_migration_1_2_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -585,12 +579,12 @@ async def test_migration_1_2_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_3_to_1_7( +async def test_migration_fom_1_3( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.3 to 1.7.""" + """Test migration from version 1.3.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 3, @@ -616,7 +610,7 @@ async def test_migration_1_3_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -690,7 +684,7 @@ async def test_migration_1_3_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -719,12 +713,12 @@ async def test_migration_1_3_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_4_to_1_7( +async def test_migration_from_1_4( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.4 to 1.7.""" + """Test migration from version 1.4.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 4, @@ -751,7 +745,7 @@ async def test_migration_1_4_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -826,7 +820,7 @@ async def test_migration_1_4_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -855,12 +849,12 @@ async def test_migration_1_4_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_5_to_1_7( +async def test_migration_from_1_5( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.5 to 1.7.""" + """Test migration from version 1.5.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 5, @@ -888,7 +882,7 @@ async def test_migration_1_5_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -964,7 +958,7 @@ async def test_migration_1_5_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -993,12 +987,12 @@ async def test_migration_1_5_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_6_to_1_8( +async def test_migration_from_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.6 to 1.8.""" + """Test migration from version 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 6, @@ -1027,7 +1021,7 @@ async def test_migration_1_6_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -1104,7 +1098,7 @@ async def test_migration_1_6_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -1133,12 +1127,12 @@ async def test_migration_1_6_to_1_8( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_7_to_1_8( +async def test_migration_from_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.7 to 1.8.""" + """Test migration from version 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 7, @@ -1168,7 +1162,7 @@ async def test_migration_1_7_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -1246,7 +1240,7 @@ async def test_migration_1_7_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -1482,7 +1476,9 @@ async def test_removing_area_id( async def test_specifying_via_device_create( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, ) -> None: """Test specifying a via_device and removal of the hub device.""" config_entry_1 = MockConfigEntry() @@ -1513,9 +1509,32 @@ async def test_specifying_via_device_create( light = device_registry.async_get_device(identifiers={("hue", "456")}) assert light.via_device_id is None + # A device with a non existing via_device reference should create + light_via_nonexisting_parent_device = device_registry.async_get_or_create( + config_entry_id=config_entry_2.entry_id, + connections=set(), + identifiers={("hue", "789")}, + manufacturer="manufacturer", + model="light", + via_device=("hue", "non_existing_123"), + ) + assert { + "calls `device_registry.async_get_or_create` " + "referencing a non existing `via_device` " + '("hue","non_existing_123")' in caplog.text + } + assert light_via_nonexisting_parent_device is not None + assert light_via_nonexisting_parent_device.via_device_id is None + nonexisting_parent_device = device_registry.async_get_device( + identifiers={("hue", "non_existing_123")} + ) + assert nonexisting_parent_device is None + async def test_specifying_via_device_update( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, ) -> None: """Test specifying a via_device and updating.""" config_entry_1 = MockConfigEntry() @@ -1529,6 +1548,7 @@ async def test_specifying_via_device_update( identifiers={("hue", "456")}, manufacturer="manufacturer", model="light", + name="Light", via_device=("hue", "0123"), ) @@ -1552,6 +1572,26 @@ async def test_specifying_via_device_update( ) assert light.via_device_id == via.id + assert light.name == "Light" + + # Try updating with a non existing via device + light = device_registry.async_get_or_create( + config_entry_id=config_entry_2.entry_id, + connections=set(), + identifiers={("hue", "456")}, + manufacturer="manufacturer", + model="light", + name="New light", + via_device=("hue", "non_existing_abc"), + ) + assert { + "calls `device_registry.async_get_or_create` " + "referencing a non existing `via_device` " + '("hue","non_existing_123")' in caplog.text + } + # Assert the name was updated correctly + assert light.via_device_id == via.id + assert light.name == "New light" async def test_loading_saving_data( @@ -2858,20 +2898,6 @@ async def test_loading_invalid_configuration_url_from_storage( assert entry.configuration_url == "invalid" -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(dr) - - -@pytest.mark.parametrize(("enum"), list(dr.DeviceEntryDisabler)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: dr.DeviceEntryDisabler, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, dr, enum, "DISABLED_", "2025.1") - - async def test_removing_labels( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: diff --git a/tests/helpers/test_discovery_flow.py b/tests/helpers/test_discovery_flow.py index 0fa315d684b..dde0f209706 100644 --- a/tests/helpers/test_discovery_flow.py +++ b/tests/helpers/test_discovery_flow.py @@ -8,7 +8,8 @@ import pytest from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STARTED from homeassistant.core import CoreState, HomeAssistant -from homeassistant.helpers import discovery_flow +from homeassistant.helpers import discovery_flow, json as json_helper +from homeassistant.helpers.discovery_flow import DiscoveryKey @pytest.fixture @@ -20,8 +21,29 @@ def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]: yield mock_init +@pytest.mark.parametrize( + ("discovery_key", "context"), + [ + (None, {}), + ( + DiscoveryKey(domain="test", key="string_key", version=1), + {"discovery_key": DiscoveryKey(domain="test", key="string_key", version=1)}, + ), + ( + DiscoveryKey(domain="test", key=("one", "two"), version=1), + { + "discovery_key": DiscoveryKey( + domain="test", key=("one", "two"), version=1 + ) + }, + ), + ], +) async def test_async_create_flow( - hass: HomeAssistant, mock_flow_init: AsyncMock + hass: HomeAssistant, + mock_flow_init: AsyncMock, + discovery_key: DiscoveryKey | None, + context: {}, ) -> None: """Test we can create a flow.""" discovery_flow.async_create_flow( @@ -29,11 +51,12 @@ async def test_async_create_flow( "hue", {"source": config_entries.SOURCE_HOMEKIT}, {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + discovery_key=discovery_key, ) assert mock_flow_init.mock_calls == [ call( "hue", - context={"source": "homekit"}, + context={"source": "homekit"} | context, data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, ) ] @@ -68,7 +91,7 @@ async def test_async_create_flow_checks_existing_flows_after_startup( """Test existing flows prevent an identical ones from being after startup.""" hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) with patch( - "homeassistant.data_entry_flow.FlowManager.async_has_matching_flow", + "homeassistant.config_entries.ConfigEntriesFlowManager.async_has_matching_discovery_flow", return_value=True, ): discovery_flow.async_create_flow( @@ -118,3 +141,16 @@ async def test_async_create_flow_does_nothing_after_stop( {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, ) assert len(mock_flow_init.mock_calls) == 0 + + +@pytest.mark.parametrize("key", ["test", ("blah", "bleh")]) +def test_discovery_key_serialize_deserialize(key: str | tuple[str]) -> None: + """Test serialize and deserialize discovery key.""" + discovery_key_1 = discovery_flow.DiscoveryKey( + domain="test_domain", key=key, version=1 + ) + serialized = json_helper.json_dumps(discovery_key_1) + assert ( + discovery_flow.DiscoveryKey.from_json_dict(json_helper.json_loads(serialized)) + == discovery_key_1 + ) diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index 0350b2e6e3a..edd18d54db4 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -73,7 +73,7 @@ async def test_signal_type_format(hass: HomeAssistant) -> None: assert calls == [("Hello", 2)] # Test compatibility with string keys - async_dispatcher_send(hass, "test-{}".format("unique-id"), "x", 4) + async_dispatcher_send(hass, "test-unique-id", "x", 4) await hass.async_block_till_done() assert calls == [("Hello", 2), ("x", 4)] diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 58554059fb4..dc579ab6e8d 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -4,14 +4,13 @@ import asyncio from collections.abc import Iterable import dataclasses from datetime import timedelta -from enum import IntFlag -from functools import cached_property import logging import threading from typing import Any from unittest.mock import MagicMock, PropertyMock, patch from freezegun.api import FrozenDateTimeFactory +from propcache import cached_property import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol @@ -2314,7 +2313,12 @@ async def test_update_capabilities_too_often_cooldown( @pytest.mark.parametrize( - ("property", "default_value", "values"), [("attribution", None, ["abcd", "efgh"])] + ("property", "default_value", "values"), + [ + ("attribution", None, ["abcd", "efgh"]), + ("attribution", None, [True, 1]), + ("attribution", None, [1.0, 1]), + ], ) async def test_cached_entity_properties( hass: HomeAssistant, property: str, default_value: Any, values: Any @@ -2323,22 +2327,30 @@ async def test_cached_entity_properties( ent1 = entity.Entity() ent2 = entity.Entity() assert getattr(ent1, property) == default_value + assert type(getattr(ent1, property)) is type(default_value) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) # Test set setattr(ent1, f"_attr_{property}", values[0]) assert getattr(ent1, property) == values[0] + assert type(getattr(ent1, property)) is type(values[0]) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) # Test update setattr(ent1, f"_attr_{property}", values[1]) assert getattr(ent1, property) == values[1] + assert type(getattr(ent1, property)) is type(values[1]) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) # Test delete delattr(ent1, f"_attr_{property}") assert getattr(ent1, property) == default_value + assert type(getattr(ent1, property)) is type(default_value) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) async def test_cached_entity_property_delete_attr(hass: HomeAssistant) -> None: @@ -2473,31 +2485,6 @@ async def test_cached_entity_property_override(hass: HomeAssistant) -> None: return "🤡" -async def test_entity_report_deprecated_supported_features_values( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test reporting deprecated supported feature values only happens once.""" - ent = entity.Entity() - - class MockEntityFeatures(IntFlag): - VALUE1 = 1 - VALUE2 = 2 - - ent._report_deprecated_supported_features_values(MockEntityFeatures(2)) - assert ( - "is using deprecated supported features values which will be removed" - in caplog.text - ) - assert "MockEntityFeatures.VALUE2" in caplog.text - - caplog.clear() - ent._report_deprecated_supported_features_values(MockEntityFeatures(2)) - assert ( - "is using deprecated supported features values which will be removed" - not in caplog.text - ) - - async def test_remove_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 5ce0292c2ec..940bd3e37fd 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -23,7 +23,7 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError, PlatformNotReady -from homeassistant.helpers import discovery +from homeassistant.helpers import config_validation as cv, discovery from homeassistant.helpers.entity_component import EntityComponent, async_update_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -189,13 +189,14 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non ] ) - call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) + call_1 = ServiceCall(hass, "test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_1)) ) == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( + hass, "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, @@ -256,17 +257,18 @@ async def test_extract_from_service_fails_if_no_entity_id(hass: HomeAssistant) - ) assert ( - await component.async_extract_from_service(ServiceCall("test", "service")) == [] + await component.async_extract_from_service(ServiceCall(hass, "test", "service")) + == [] ) assert ( await component.async_extract_from_service( - ServiceCall("test", "service", {"entity_id": ENTITY_MATCH_NONE}) + ServiceCall(hass, "test", "service", {"entity_id": ENTITY_MATCH_NONE}) ) == [] ) assert ( await component.async_extract_from_service( - ServiceCall("test", "service", {"area_id": ENTITY_MATCH_NONE}) + ServiceCall(hass, "test", "service", {"area_id": ENTITY_MATCH_NONE}) ) == [] ) @@ -283,6 +285,7 @@ async def test_extract_from_service_filter_out_non_existing_entities( ) call = ServiceCall( + hass, "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, @@ -299,7 +302,7 @@ async def test_extract_from_service_no_group_expand(hass: HomeAssistant) -> None await component.async_setup({}) await component.async_add_entities([MockEntity(entity_id="group.test_group")]) - call = ServiceCall("test", "service", {"entity_id": ["group.test_group"]}) + call = ServiceCall(hass, "test", "service", {"entity_id": ["group.test_group"]}) extracted = await component.async_extract_from_service(call, expand_group=False) assert len(extracted) == 1 @@ -465,7 +468,7 @@ async def test_extract_all_omit_entity_id( [MockEntity(name="test_1"), MockEntity(name="test_2")] ) - call = ServiceCall("test", "service") + call = ServiceCall(hass, "test", "service") assert ( sorted( @@ -485,7 +488,7 @@ async def test_extract_all_use_match_all( [MockEntity(name="test_1"), MockEntity(name="test_2")] ) - call = ServiceCall("test", "service", {"entity_id": "all"}) + call = ServiceCall(hass, "test", "service", {"entity_id": "all"}) assert sorted( ent.entity_id for ent in await component.async_extract_from_service(call) @@ -557,30 +560,32 @@ async def test_register_entity_service( async def test_register_entity_service_non_entity_service_schema( - hass: HomeAssistant, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: - """Test attempting to register a service with an incomplete schema.""" + """Test attempting to register a service with a non entity service schema.""" component = EntityComponent(_LOGGER, DOMAIN, hass) + expected_message = "registers an entity service with a non entity service schema" - with pytest.raises( - HomeAssistantError, - match=( - "The schema does not include all required keys: entity_id, device_id, area_id, " - "floor_id, label_id" - ), - ): - component.async_register_entity_service( - "hello", vol.Schema({"some": str}), Mock() + for idx, schema in enumerate( + ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), ) + ): + component.async_register_entity_service(f"hello_{idx}", schema, Mock()) + assert expected_message in caplog.text + caplog.clear() - # The check currently does not recurse into vol.All or vol.Any allowing these - # non-compliant schemas to pass - component.async_register_entity_service( - "hello", vol.All(vol.Schema({"some": str})), Mock() - ) - component.async_register_entity_service( - "hello", vol.Any(vol.Schema({"some": str})), Mock() - ) + for idx, schema in enumerate( + ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.All(cv.make_entity_service_schema({"some": str})), + ) + ): + component.async_register_entity_service(f"test_service_{idx}", schema, Mock()) + assert expected_message not in caplog.text async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 2cc3348626c..e80006dff84 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -8,6 +8,7 @@ from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch import pytest +from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -23,6 +24,7 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError, PlatformNotReady from homeassistant.helpers import ( area_registry as ar, + config_validation as cv, device_registry as dr, entity_platform, entity_registry as er, @@ -877,9 +879,9 @@ async def test_setup_entry( assert full_name in hass.config.components assert len(hass.states.async_entity_ids()) == 1 assert len(entity_registry.entities) == 1 - assert ( - entity_registry.entities["test_domain.test1"].config_entry_id == "super-mock-id" - ) + + entity_registry_entry = entity_registry.entities["test_domain.test1"] + assert entity_registry_entry.config_entry_id == "super-mock-id" async def test_setup_entry_platform_not_ready( @@ -1130,7 +1132,9 @@ async def test_add_entity_with_invalid_id( async def test_device_info_called( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test device info is forwarded correctly.""" config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1184,18 +1188,9 @@ async def test_device_info_called( assert len(hass.states.async_entity_ids()) == 2 device = device_registry.async_get_device(identifiers={("hue", "1234")}) - assert device is not None - assert device.identifiers == {("hue", "1234")} - assert device.configuration_url == "http://192.168.0.100/config" - assert device.connections == {(dr.CONNECTION_NETWORK_MAC, "abcd")} - assert device.entry_type is dr.DeviceEntryType.SERVICE - assert device.manufacturer == "test-manuf" - assert device.model == "test-model" - assert device.name == "test-name" + assert device == snapshot + assert device.config_entries == {config_entry.entry_id} assert device.primary_config_entry == config_entry.entry_id - assert device.suggested_area == "Heliport" - assert device.sw_version == "test-sw" - assert device.hw_version == "test-hw" assert device.via_device_id == via.id @@ -1810,33 +1805,36 @@ async def test_register_entity_service_none_schema( async def test_register_entity_service_non_entity_service_schema( - hass: HomeAssistant, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: - """Test attempting to register a service with an incomplete schema.""" + """Test attempting to register a service with a non entity service schema.""" entity_platform = MockEntityPlatform( hass, domain="mock_integration", platform_name="mock_platform", platform=None ) + expected_message = "registers an entity service with a non entity service schema" - with pytest.raises( - HomeAssistantError, - match=( - "The schema does not include all required keys: entity_id, device_id, area_id, " - "floor_id, label_id" - ), + for idx, schema in enumerate( + ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + ) + ): + entity_platform.async_register_entity_service(f"hello_{idx}", schema, Mock()) + assert expected_message in caplog.text + caplog.clear() + + for idx, schema in enumerate( + ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.All(cv.make_entity_service_schema({"some": str})), + ) ): entity_platform.async_register_entity_service( - "hello", - vol.Schema({"some": str}), - Mock(), + f"test_service_{idx}", schema, Mock() ) - # The check currently does not recurse into vol.All or vol.Any allowing these - # non-compliant schemas to pass - entity_platform.async_register_entity_service( - "hello", vol.All(vol.Schema({"some": str})), Mock() - ) - entity_platform.async_register_entity_service( - "hello", vol.Any(vol.Schema({"some": str})), Mock() - ) + assert expected_message not in caplog.text @pytest.mark.parametrize("update_before_add", [True, False]) diff --git a/tests/helpers/test_entity_registry.py b/tests/helpers/test_entity_registry.py index 9b1d68c7777..97f7e1dcc56 100644 --- a/tests/helpers/test_entity_registry.py +++ b/tests/helpers/test_entity_registry.py @@ -653,36 +653,36 @@ async def test_deleted_entity_removing_config_entry_id( entity_registry: er.EntityRegistry, ) -> None: """Test that we update config entry id in registry on deleted entity.""" - mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") + mock_config1 = MockConfigEntry(domain="light", entry_id="mock-id-1") + mock_config2 = MockConfigEntry(domain="light", entry_id="mock-id-2") - entry = entity_registry.async_get_or_create( - "light", "hue", "5678", config_entry=mock_config + entry1 = entity_registry.async_get_or_create( + "light", "hue", "5678", config_entry=mock_config1 ) - assert entry.config_entry_id == "mock-id-1" - entity_registry.async_remove(entry.entity_id) + assert entry1.config_entry_id == "mock-id-1" + entry2 = entity_registry.async_get_or_create( + "light", "hue", "1234", config_entry=mock_config2 + ) + assert entry2.config_entry_id == "mock-id-2" + entity_registry.async_remove(entry1.entity_id) + entity_registry.async_remove(entry2.entity_id) assert len(entity_registry.entities) == 0 - assert len(entity_registry.deleted_entities) == 1 - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].config_entry_id - == "mock-id-1" - ) - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].orphaned_timestamp - is None - ) + assert len(entity_registry.deleted_entities) == 2 + deleted_entry1 = entity_registry.deleted_entities[("light", "hue", "5678")] + assert deleted_entry1.config_entry_id == "mock-id-1" + assert deleted_entry1.orphaned_timestamp is None + deleted_entry2 = entity_registry.deleted_entities[("light", "hue", "1234")] + assert deleted_entry2.config_entry_id == "mock-id-2" + assert deleted_entry2.orphaned_timestamp is None entity_registry.async_clear_config_entry("mock-id-1") assert len(entity_registry.entities) == 0 - assert len(entity_registry.deleted_entities) == 1 - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].config_entry_id - is None - ) - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].orphaned_timestamp - is not None - ) + assert len(entity_registry.deleted_entities) == 2 + deleted_entry1 = entity_registry.deleted_entities[("light", "hue", "5678")] + assert deleted_entry1.config_entry_id is None + assert deleted_entry1.orphaned_timestamp is not None + assert entity_registry.deleted_entities[("light", "hue", "1234")] == deleted_entry2 async def test_removing_area_id(entity_registry: er.EntityRegistry) -> None: @@ -842,6 +842,123 @@ async def test_migration_1_7(hass: HomeAssistant, hass_storage: dict[str, Any]) assert entry.original_device_class == "class_by_integration" +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_1_11( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.11. + + This is the first version which has deleted entities, make sure deleted entities + are updated. + """ + hass_storage[er.STORAGE_KEY] = { + "version": 1, + "minor_version": 11, + "data": { + "entities": [ + { + "aliases": [], + "area_id": None, + "capabilities": {}, + "config_entry_id": None, + "device_id": None, + "disabled_by": None, + "entity_category": None, + "entity_id": "test.entity", + "has_entity_name": False, + "hidden_by": None, + "icon": None, + "id": "12345", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": None, + "options": {}, + "original_device_class": "best_class", + "original_icon": None, + "original_name": None, + "platform": "super_platform", + "supported_features": 0, + "translation_key": None, + "unique_id": "very_unique", + "unit_of_measurement": None, + "device_class": None, + } + ], + "deleted_entities": [ + { + "config_entry_id": None, + "entity_id": "test.deleted_entity", + "id": "23456", + "orphaned_timestamp": None, + "platform": "super_duper_platform", + "unique_id": "very_very_unique", + } + ], + }, + } + + await er.async_load(hass) + registry = er.async_get(hass) + + entry = registry.async_get_or_create("test", "super_platform", "very_unique") + + assert entry.device_class is None + assert entry.original_device_class == "best_class" + + # Check migrated data + await flush_store(registry._store) + assert hass_storage[er.STORAGE_KEY] == { + "version": er.STORAGE_VERSION_MAJOR, + "minor_version": er.STORAGE_VERSION_MINOR, + "key": er.STORAGE_KEY, + "data": { + "entities": [ + { + "aliases": [], + "area_id": None, + "capabilities": {}, + "categories": {}, + "config_entry_id": None, + "created_at": "1970-01-01T00:00:00+00:00", + "device_id": None, + "disabled_by": None, + "entity_category": None, + "entity_id": "test.entity", + "has_entity_name": False, + "hidden_by": None, + "icon": None, + "id": ANY, + "labels": [], + "modified_at": "1970-01-01T00:00:00+00:00", + "name": None, + "options": {}, + "original_device_class": "best_class", + "original_icon": None, + "original_name": None, + "platform": "super_platform", + "previous_unique_id": None, + "supported_features": 0, + "translation_key": None, + "unique_id": "very_unique", + "unit_of_measurement": None, + "device_class": None, + } + ], + "deleted_entities": [ + { + "config_entry_id": None, + "created_at": "1970-01-01T00:00:00+00:00", + "entity_id": "test.deleted_entity", + "id": "23456", + "modified_at": "1970-01-01T00:00:00+00:00", + "orphaned_timestamp": None, + "platform": "super_duper_platform", + "unique_id": "very_very_unique", + } + ], + }, + } + + async def test_update_entity_unique_id(entity_registry: er.EntityRegistry) -> None: """Test entity's unique_id is updated.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") @@ -1030,14 +1147,17 @@ async def test_disabled_by(entity_registry: er.EntityRegistry) -> None: "light", "hue", "5678", disabled_by=er.RegistryEntryDisabler.HASS ) assert entry.disabled_by is er.RegistryEntryDisabler.HASS + assert entry.disabled is True entry = entity_registry.async_get_or_create( "light", "hue", "5678", disabled_by=er.RegistryEntryDisabler.INTEGRATION ) assert entry.disabled_by is er.RegistryEntryDisabler.HASS + assert entry.disabled is True entry2 = entity_registry.async_get_or_create("light", "hue", "1234") assert entry2.disabled_by is None + assert entry2.disabled is False async def test_disabled_by_config_entry_pref( @@ -1064,6 +1184,25 @@ async def test_disabled_by_config_entry_pref( assert entry2.disabled_by is er.RegistryEntryDisabler.USER +async def test_hidden_by(entity_registry: er.EntityRegistry) -> None: + """Test that we can hide an entry when we create it.""" + entry = entity_registry.async_get_or_create( + "light", "hue", "5678", hidden_by=er.RegistryEntryHider.USER + ) + assert entry.hidden_by is er.RegistryEntryHider.USER + assert entry.hidden is True + + entry = entity_registry.async_get_or_create( + "light", "hue", "5678", disabled_by=er.RegistryEntryHider.INTEGRATION + ) + assert entry.hidden_by is er.RegistryEntryHider.USER + assert entry.hidden is True + + entry2 = entity_registry.async_get_or_create("light", "hue", "1234") + assert entry2.hidden_by is None + assert entry2.hidden is False + + async def test_restore_states( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index 6c71f1d8a7c..a0014587cd0 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -1892,10 +1892,10 @@ async def test_track_template_result_complex(hass: HomeAssistant) -> None: "time": False, } - hass.states.async_set("binary_sensor.single", "binary_sensor_on") + hass.states.async_set("binary_sensor.single", "on") await hass.async_block_till_done() assert len(specific_runs) == 9 - assert specific_runs[8] == "binary_sensor_on" + assert specific_runs[8] == "on" assert info.listeners == { "all": False, "domains": set(), @@ -4387,8 +4387,8 @@ async def test_call_later(hass: HomeAssistant) -> None: schedule_utctime = dt_util.utcnow() @callback - def action(__utcnow: datetime): - _current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() + def action(utcnow: datetime, /): + _current_delay = utcnow.timestamp() - schedule_utctime.timestamp() future.set_result(delay < _current_delay < (delay + delay_tolerance)) async_call_later(hass, delay, action) @@ -4407,8 +4407,8 @@ async def test_async_call_later(hass: HomeAssistant) -> None: schedule_utctime = dt_util.utcnow() @callback - def action(__utcnow: datetime): - _current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() + def action(utcnow: datetime, /): + _current_delay = utcnow.timestamp() - schedule_utctime.timestamp() future.set_result(delay < _current_delay < (delay + delay_tolerance)) remove = async_call_later(hass, delay, action) @@ -4429,8 +4429,8 @@ async def test_async_call_later_timedelta(hass: HomeAssistant) -> None: schedule_utctime = dt_util.utcnow() @callback - def action(__utcnow: datetime): - _current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() + def action(utcnow: datetime, /): + _current_delay = utcnow.timestamp() - schedule_utctime.timestamp() future.set_result(delay < _current_delay < (delay + delay_tolerance)) remove = async_call_later(hass, timedelta(seconds=delay), action) @@ -4450,7 +4450,7 @@ async def test_async_call_later_cancel(hass: HomeAssistant) -> None: delay_tolerance = 0.1 @callback - def action(__now: datetime): + def action(now: datetime, /): future.set_result(False) remove = async_call_later(hass, delay, action) @@ -4895,7 +4895,7 @@ async def test_track_state_change_deprecated( assert ( "Detected code that calls `async_track_state_change` instead " "of `async_track_state_change_event` which is deprecated and " - "will be removed in Home Assistant 2025.5. Please report this issue." + "will be removed in Home Assistant 2025.5. Please report this issue" ) in caplog.text @@ -4938,3 +4938,45 @@ async def test_async_track_state_report_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(tracker_called) == 2 unsub() + + +async def test_async_track_template_no_hass_deprecated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test async_track_template with a template without hass is deprecated.""" + message = ( + "Detected code that calls async_track_template_result with template without " + "hass. This will stop working in Home Assistant 2025.10, please " + "report this issue" + ) + + async_track_template(hass, Template("blah"), lambda x, y, z: None) + assert message in caplog.text + caplog.clear() + + async_track_template(hass, Template("blah", hass), lambda x, y, z: None) + assert message not in caplog.text + caplog.clear() + + +async def test_async_track_template_result_no_hass_deprecated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test async_track_template_result with a template without hass is deprecated.""" + message = ( + "Detected code that calls async_track_template_result with template without " + "hass. This will stop working in Home Assistant 2025.10, please " + "report this issue" + ) + + async_track_template_result( + hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None + ) + assert message in caplog.text + caplog.clear() + + async_track_template_result( + hass, [TrackTemplate(Template("blah", hass), None)], lambda x, y, z: None + ) + assert message not in caplog.text + caplog.clear() diff --git a/tests/helpers/test_floor_registry.py b/tests/helpers/test_floor_registry.py index c39ac3c40b4..6a672399522 100644 --- a/tests/helpers/test_floor_registry.py +++ b/tests/helpers/test_floor_registry.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar, floor_registry as fr from homeassistant.util.dt import utcnow -from tests.common import ANY, async_capture_events, flush_store +from tests.common import async_capture_events, flush_store async def test_list_floors(floor_registry: fr.FloorRegistry) -> None: @@ -43,7 +43,6 @@ async def test_create_floor( level=1, created_at=utcnow(), modified_at=utcnow(), - normalized_name=ANY, ) assert len(floor_registry.floors) == 1 @@ -145,7 +144,6 @@ async def test_update_floor( level=None, created_at=created_at, modified_at=created_at, - normalized_name=ANY, ) assert len(floor_registry.floors) == 1 @@ -169,7 +167,6 @@ async def test_update_floor( level=2, created_at=created_at, modified_at=modified_at, - normalized_name=ANY, ) assert len(floor_registry.floors) == 1 diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index b3fbb0faaf4..fb98111fd42 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -1,11 +1,13 @@ """Test the frame helper.""" +from typing import Any from unittest.mock import ANY, Mock, patch import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import frame +from homeassistant.loader import async_get_integration from tests.common import extract_stack_to_frame @@ -156,6 +158,97 @@ async def test_get_integration_logger_no_integration( assert logger.name == __name__ +@pytest.mark.parametrize( + ("integration_frame_path", "keywords", "expected_error", "expected_log"), + [ + pytest.param( + "homeassistant/test_core", + {}, + True, + 0, + id="core default", + ), + pytest.param( + "homeassistant/components/test_core_integration", + {}, + False, + 1, + id="core integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {}, + False, + 1, + id="custom integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {"custom_integration_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="custom integration ignore", + ), + pytest.param( + "custom_components/test_custom_integration", + {"custom_integration_behavior": frame.ReportBehavior.ERROR}, + True, + 1, + id="custom integration error", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"core_integration_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="core_integration_behavior ignore", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"core_integration_behavior": frame.ReportBehavior.ERROR}, + True, + 1, + id="core_integration_behavior error", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"core_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="core_behavior ignore", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"core_behavior": frame.ReportBehavior.LOG}, + False, + 1, + id="core_behavior log", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report_usage( + caplog: pytest.LogCaptureFixture, + keywords: dict[str, Any], + expected_error: bool, + expected_log: int, +) -> None: + """Test report.""" + + what = "test_report_string" + + errored = False + try: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage(what, **keywords) + except RuntimeError: + errored = True + + assert errored == expected_error + + assert caplog.text.count(what) == expected_log + + @patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_prevent_flooding( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock @@ -169,8 +262,8 @@ async def test_prevent_flooding( expected_message = ( f"Detected that integration '{integration}' {what} at {filename}, line " - f"{mock_integration_frame.lineno}: {mock_integration_frame.line}, " - f"please create a bug report at https://github.com/home-assistant/core/issues?" + f"{mock_integration_frame.lineno}: {mock_integration_frame.line}. " + f"Please create a bug report at https://github.com/home-assistant/core/issues?" f"q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+{integration}%22" ) @@ -187,6 +280,28 @@ async def test_prevent_flooding( assert len(frame._REPORTED_INTEGRATIONS) == 1 +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_breaks_in_ha_version( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock +) -> None: + """Test to ensure a report is only written once to the log.""" + + what = "accessed hi instead of hello" + integration = "hue" + filename = "homeassistant/components/hue/light.py" + + expected_message = ( + f"Detected that integration '{integration}' {what} at {filename}, line " + f"{mock_integration_frame.lineno}: {mock_integration_frame.line}. " + f"This will stop working in Home Assistant 2024.11, please create a bug " + "report at https://github.com/home-assistant/core/issues?" + f"q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+{integration}%22" + ) + + frame.report_usage(what, breaks_in_ha_version="2024.11") + assert expected_message in caplog.text + + async def test_report_missing_integration_frame( caplog: pytest.LogCaptureFixture, ) -> None: @@ -247,3 +362,173 @@ async def test_report_error_if_integration( ), ): frame.report("did a bad thing", error_if_integration=True) + + +@pytest.mark.parametrize( + ("integration_frame_path", "keywords", "expected_error", "expected_log"), + [ + pytest.param( + "homeassistant/test_core", + {}, + True, + 0, + id="core default", + ), + pytest.param( + "homeassistant/components/test_core_integration", + {}, + False, + 1, + id="core integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {}, + False, + 1, + id="custom integration default", + ), + pytest.param( + "custom_components/test_integration_frame", + {"log_custom_component_only": True}, + False, + 1, + id="log_custom_component_only with custom integration", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"log_custom_component_only": True}, + False, + 0, + id="log_custom_component_only with core integration", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"error_if_core": False}, + False, + 1, + id="disable error_if_core", + ), + pytest.param( + "custom_components/test_integration_frame", + {"error_if_integration": True}, + True, + 1, + id="error_if_integration with custom integration", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"error_if_integration": True}, + True, + 1, + id="error_if_integration with core integration", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report( + caplog: pytest.LogCaptureFixture, + keywords: dict[str, Any], + expected_error: bool, + expected_log: int, +) -> None: + """Test report.""" + + what = "test_report_string" + + errored = False + try: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report(what, **keywords) + except RuntimeError: + errored = True + + assert errored == expected_error + + assert caplog.text.count(what) == expected_log + + +@pytest.mark.parametrize( + ("behavior", "integration_domain", "source", "logs_again"), + [ + pytest.param( + "core_behavior", + None, + "code that", + True, + id="core", + ), + pytest.param( + "core_behavior", + "unknown_integration", + "code that", + True, + id="unknown integration", + ), + pytest.param( + "core_integration_behavior", + "sensor", + "that integration 'sensor'", + False, + id="core integration", + ), + pytest.param( + "custom_integration_behavior", + "test_package", + "that custom integration 'test_package'", + False, + id="custom integration", + ), + ], +) +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_report_integration_domain( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + behavior: str, + integration_domain: str | None, + source: str, + logs_again: bool, +) -> None: + """Test report.""" + await async_get_integration(hass, "sensor") + await async_get_integration(hass, "test_package") + + what = "test_report_string" + lookup_text = f"Detected {source} {what}" + + caplog.clear() + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.IGNORE}, + integration_domain=integration_domain, + ) + + assert lookup_text not in caplog.text + + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.LOG}, + integration_domain=integration_domain, + ) + + assert lookup_text in caplog.text + + # Check that it does not log again + caplog.clear() + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.LOG}, + integration_domain=integration_domain, + ) + + assert (lookup_text in caplog.text) == logs_again + + # Check that it raises + with pytest.raises(RuntimeError, match=lookup_text): + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.ERROR}, + integration_domain=integration_domain, + ) diff --git a/tests/helpers/test_httpx_client.py b/tests/helpers/test_httpx_client.py index ccfccb3d698..684778fe1b1 100644 --- a/tests/helpers/test_httpx_client.py +++ b/tests/helpers/test_httpx_client.py @@ -138,8 +138,8 @@ async def test_warning_close_session_integration( assert ( "Detected that integration 'hue' closes the Home Assistant httpx client at " - "homeassistant/components/hue/light.py, line 23: await session.aclose(), " - "please create a bug report at https://github.com/home-assistant/core/issues?" + "homeassistant/components/hue/light.py, line 23: await session.aclose(). " + "Please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" ) in caplog.text @@ -182,6 +182,6 @@ async def test_warning_close_session_custom( await httpx_session.aclose() assert ( "Detected that custom integration 'hue' closes the Home Assistant httpx client " - "at custom_components/hue/light.py, line 23: await session.aclose(), " - "please report it to the author of the 'hue' custom integration" + "at custom_components/hue/light.py, line 23: await session.aclose(). " + "Please report it to the author of the 'hue' custom integration" ) in caplog.text diff --git a/tests/helpers/test_icon.py b/tests/helpers/test_icon.py index 732f9971ac0..ad5c852ded9 100644 --- a/tests/helpers/test_icon.py +++ b/tests/helpers/test_icon.py @@ -25,12 +25,8 @@ def test_battery_icon() -> None: iconbase = "mdi:battery" for level in range(0, 100, 5): print( # noqa: T201 - "Level: %d. icon: %s, charging: %s" - % ( - level, - icon.icon_for_battery_level(level, False), - icon.icon_for_battery_level(level, True), - ) + f"Level: {level}. icon: {icon.icon_for_battery_level(level, False)}, " + f"charging: {icon.icon_for_battery_level(level, True)}" ) if level <= 10: postfix_charging = "-outline" @@ -101,7 +97,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: # Test services icons are available icons = await icon.async_get_icons(hass, "services") assert len(icons) == 1 - assert icons["switch"]["turn_off"] == "mdi:toggle-switch-variant-off" + assert icons["switch"]["turn_off"] == {"service": "mdi:toggle-switch-variant-off"} # Ensure icons file for platform isn't loaded, as that isn't supported icons = await icon.async_get_icons(hass, "entity") @@ -126,7 +122,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: icons = await icon.async_get_icons(hass, "services") assert len(icons) == 2 - assert icons["test_package"]["enable_god_mode"] == "mdi:shield" + assert icons["test_package"]["enable_god_mode"] == {"service": "mdi:shield"} # Load another one hass.config.components.add("test_embedded") diff --git a/tests/helpers/test_json.py b/tests/helpers/test_json.py index 123731de68d..94f21da1781 100644 --- a/tests/helpers/test_json.py +++ b/tests/helpers/test_json.py @@ -18,6 +18,7 @@ from homeassistant.helpers.json import ( ExtendedJSONEncoder, JSONEncoder as DefaultHASSJSONEncoder, find_paths_unserializable_data, + json_bytes_sorted, json_bytes_strip_null, json_dumps, json_dumps_sorted, @@ -107,6 +108,14 @@ def test_json_dumps_sorted() -> None: ) +def test_json_bytes_sorted() -> None: + """Test the json bytes sorted function.""" + data = {"c": 3, "a": 1, "b": 2} + assert json_bytes_sorted(data) == json.dumps( + data, sort_keys=True, separators=(",", ":") + ).encode("utf-8") + + def test_json_dumps_float_subclass() -> None: """Test the json dumps a float subclass.""" diff --git a/tests/helpers/test_label_registry.py b/tests/helpers/test_label_registry.py index f466edad874..ca1d4ac6fd3 100644 --- a/tests/helpers/test_label_registry.py +++ b/tests/helpers/test_label_registry.py @@ -16,7 +16,7 @@ from homeassistant.helpers import ( ) from homeassistant.util.dt import utcnow -from tests.common import ANY, MockConfigEntry, async_capture_events, flush_store +from tests.common import MockConfigEntry, async_capture_events, flush_store async def test_list_labels(label_registry: lr.LabelRegistry) -> None: @@ -46,7 +46,6 @@ async def test_create_label( description="This label is for testing", created_at=utcnow(), modified_at=utcnow(), - normalized_name=ANY, ) assert len(label_registry.labels) == 1 @@ -147,7 +146,6 @@ async def test_update_label( description=None, created_at=created_at, modified_at=created_at, - normalized_name=ANY, ) modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") @@ -169,7 +167,6 @@ async def test_update_label( description="Updated description", created_at=created_at, modified_at=modified_at, - normalized_name=ANY, ) assert len(label_registry.labels) == 1 diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 4d14abb9819..3787526c433 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -306,6 +306,7 @@ async def test_assist_api_tools( "HassSetPosition", "HassStartTimer", "HassCancelTimer", + "HassCancelAllTimers", "HassIncreaseTimer", "HassDecreaseTimer", "HassPauseTimer", @@ -374,11 +375,16 @@ async def test_assist_api_prompt( "beer": {"description": "Number of beers"}, "wine": {}, }, - } + }, + "script_with_no_fields": { + "description": "This is another test script", + "sequence": [], + }, } }, ) async_expose_entity(hass, "conversation", "script.test_script", True) + async_expose_entity(hass, "conversation", "script.script_with_no_fields", True) entry = MockConfigEntry(title=None) entry.add_to_hass(hass) @@ -646,7 +652,10 @@ async def test_script_tool( "script": { "test_script": { "description": "This is a test script", - "sequence": [], + "sequence": [ + {"variables": {"result": {"drinks": 2}}}, + {"stop": True, "response_variable": "result"}, + ], "fields": { "beer": {"description": "Number of beers", "required": True}, "wine": {"selector": {"number": {"min": 0, "max": 3}}}, @@ -657,6 +666,10 @@ async def test_script_tool( "extra_field": {"selector": {"area": {}}}, }, }, + "script_with_no_fields": { + "description": "This is another test script", + "sequence": [], + }, "unexposed_script": { "sequence": [], }, @@ -664,6 +677,7 @@ async def test_script_tool( }, ) async_expose_entity(hass, "conversation", "script.test_script", True) + async_expose_entity(hass, "conversation", "script.script_with_no_fields", True) entity_registry.async_update_entity( "script.test_script", name="script name", aliases={"script alias"} @@ -677,7 +691,7 @@ async def test_script_tool( api = await llm.async_get_api(hass, "assist", llm_context) tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] - assert len(tools) == 1 + assert len(tools) == 2 tool = tools[0] assert tool.name == "test_script" @@ -700,9 +714,11 @@ async def test_script_tool( "test_script": ( "This is a test script. Aliases: ['script name', 'script alias']", vol.Schema(schema), - ) + ), + "script_with_no_fields": ("This is another test script", vol.Schema({})), } + # Test script with response tool_input = llm.ToolInput( tool_name="test_script", tool_args={ @@ -715,26 +731,56 @@ async def test_script_tool( }, ) - with patch("homeassistant.core.ServiceRegistry.async_call") as mock_service_call: + with patch( + "homeassistant.core.ServiceRegistry.async_call", + side_effect=hass.services.async_call, + ) as mock_service_call: response = await api.async_call_tool(tool_input) mock_service_call.assert_awaited_once_with( "script", - "turn_on", + "test_script", { - "entity_id": "script.test_script", - "variables": { - "beer": "3", - "wine": 0, - "where": area.id, - "area_list": [area.id], - "floor": floor.floor_id, - "floor_list": [floor.floor_id], - }, + "beer": "3", + "wine": 0, + "where": area.id, + "area_list": [area.id], + "floor": floor.floor_id, + "floor_list": [floor.floor_id], }, context=context, + blocking=True, + return_response=True, ) - assert response == {"success": True} + assert response == { + "success": True, + "result": {"drinks": 2}, + } + + # Test script with no response + tool_input = llm.ToolInput( + tool_name="script_with_no_fields", + tool_args={}, + ) + + with patch( + "homeassistant.core.ServiceRegistry.async_call", + side_effect=hass.services.async_call, + ) as mock_service_call: + response = await api.async_call_tool(tool_input) + + mock_service_call.assert_awaited_once_with( + "script", + "script_with_no_fields", + {}, + context=context, + blocking=True, + return_response=True, + ) + assert response == { + "success": True, + "result": {}, + } # Test reload script with new parameters config = { @@ -766,7 +812,7 @@ async def test_script_tool( api = await llm.async_get_api(hass, "assist", llm_context) tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] - assert len(tools) == 1 + assert len(tools) == 2 tool = tools[0] assert tool.name == "test_script" @@ -781,7 +827,8 @@ async def test_script_tool( "test_script": ( "This is a new test script. Aliases: ['script name', 'script alias']", vol.Schema(schema), - ) + ), + "script_with_no_fields": ("This is another test script", vol.Schema({})), } diff --git a/tests/helpers/test_network.py b/tests/helpers/test_network.py index 3c9594bca38..3064b215f2f 100644 --- a/tests/helpers/test_network.py +++ b/tests/helpers/test_network.py @@ -2,11 +2,14 @@ from unittest.mock import Mock, patch +from aiohttp import hdrs +from multidict import CIMultiDict, CIMultiDictProxy import pytest +from yarl import URL from homeassistant.components import cloud -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers.network import ( NoURLAvailableError, _get_cloud_url, @@ -584,19 +587,82 @@ async def test_get_url(hass: HomeAssistant) -> None: assert get_url(hass, allow_internal=False) -async def test_get_request_host(hass: HomeAssistant) -> None: +async def test_get_request_host_with_port(hass: HomeAssistant) -> None: """Test getting the host of the current web request from the request context.""" with pytest.raises(NoURLAvailableError): _get_request_host() with patch("homeassistant.components.http.current_request") as mock_request_context: mock_request = Mock() - mock_request.url = "http://example.com:8123/test/request" + mock_request.headers = CIMultiDictProxy( + CIMultiDict({hdrs.HOST: "example.com:8123"}) + ) + mock_request.url = URL("http://example.com:8123/test/request") + mock_request.host = "example.com:8123" mock_request_context.get = Mock(return_value=mock_request) assert _get_request_host() == "example.com" +async def test_get_request_host_without_port(hass: HomeAssistant) -> None: + """Test getting the host of the current web request from the request context.""" + with pytest.raises(NoURLAvailableError): + _get_request_host() + + with patch("homeassistant.components.http.current_request") as mock_request_context: + mock_request = Mock() + mock_request.headers = CIMultiDictProxy(CIMultiDict({hdrs.HOST: "example.com"})) + mock_request.url = URL("http://example.com/test/request") + mock_request.host = "example.com" + mock_request_context.get = Mock(return_value=mock_request) + + assert _get_request_host() == "example.com" + + +async def test_get_request_ipv6_address(hass: HomeAssistant) -> None: + """Test getting the ipv6 host of the current web request from the request context.""" + with pytest.raises(NoURLAvailableError): + _get_request_host() + + with patch("homeassistant.components.http.current_request") as mock_request_context: + mock_request = Mock() + mock_request.headers = CIMultiDictProxy(CIMultiDict({hdrs.HOST: "[::1]:8123"})) + mock_request.url = URL("http://[::1]:8123/test/request") + mock_request.host = "[::1]:8123" + mock_request_context.get = Mock(return_value=mock_request) + + assert _get_request_host() == "::1" + + +async def test_get_request_ipv6_address_without_port(hass: HomeAssistant) -> None: + """Test getting the ipv6 host of the current web request from the request context.""" + with pytest.raises(NoURLAvailableError): + _get_request_host() + + with patch("homeassistant.components.http.current_request") as mock_request_context: + mock_request = Mock() + mock_request.headers = CIMultiDictProxy(CIMultiDict({hdrs.HOST: "[::1]"})) + mock_request.url = URL("http://[::1]/test/request") + mock_request.host = "[::1]" + mock_request_context.get = Mock(return_value=mock_request) + + assert _get_request_host() == "::1" + + +async def test_get_request_host_no_host_header(hass: HomeAssistant) -> None: + """Test getting the host of the current web request from the request context.""" + with pytest.raises(NoURLAvailableError): + _get_request_host() + + with patch("homeassistant.components.http.current_request") as mock_request_context: + mock_request = Mock() + mock_request.headers = CIMultiDictProxy(CIMultiDict()) + mock_request.url = URL("/test/request") + mock_request_context.get = Mock(return_value=mock_request) + + assert _get_request_host() is None + + @patch("homeassistant.components.hassio.is_hassio", Mock(return_value=True)) @patch( "homeassistant.components.hassio.get_host_info", @@ -661,7 +727,7 @@ async def test_get_current_request_url_with_known_host( @patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.helpers.network.is_hassio", Mock(return_value={"hostname": "homeassistant"}), ) @patch( @@ -682,10 +748,20 @@ async def test_is_internal_request(hass: HomeAssistant, mock_current_request) -> mock_current_request.return_value = None assert not is_internal_request(hass) - mock_current_request.return_value = Mock(url="http://example.local:8123") + mock_current_request.return_value = Mock( + headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: "example.local:8123"})), + host="example.local:8123", + url=URL("http://example.local:8123"), + ) assert is_internal_request(hass) - mock_current_request.return_value = Mock(url="http://no_match.example.local:8123") + mock_current_request.return_value = Mock( + headers=CIMultiDictProxy( + CIMultiDict({hdrs.HOST: "no_match.example.local:8123"}) + ), + host="no_match.example.local:8123", + url=URL("http://no_match.example.local:8123"), + ) assert not is_internal_request(hass) # Test with internal URL: http://192.168.0.1:8123 @@ -697,18 +773,30 @@ async def test_is_internal_request(hass: HomeAssistant, mock_current_request) -> assert hass.config.internal_url == "http://192.168.0.1:8123" assert not is_internal_request(hass) - mock_current_request.return_value = Mock(url="http://192.168.0.1:8123") + mock_current_request.return_value = Mock( + headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: "192.168.0.1:8123"})), + host="192.168.0.1:8123", + url=URL("http://192.168.0.1:8123"), + ) assert is_internal_request(hass) # Test for matching against local IP hass.config.api = Mock(use_ssl=False, local_ip="192.168.123.123", port=8123) for allowed in ("127.0.0.1", "192.168.123.123"): - mock_current_request.return_value = Mock(url=f"http://{allowed}:8123") + mock_current_request.return_value = Mock( + headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: f"{allowed}:8123"})), + host=f"{allowed}:8123", + url=URL(f"http://{allowed}:8123"), + ) assert is_internal_request(hass), mock_current_request.return_value.url # Test for matching against HassOS hostname for allowed in ("hellohost", "hellohost.local"): - mock_current_request.return_value = Mock(url=f"http://{allowed}:8123") + mock_current_request.return_value = Mock( + headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: f"{allowed}:8123"})), + host=f"{allowed}:8123", + url=URL(f"http://{allowed}:8123"), + ) assert is_internal_request(hass), mock_current_request.return_value.url diff --git a/tests/helpers/test_normalized_name_base_registry.py b/tests/helpers/test_normalized_name_base_registry.py index 9783e64eeff..4795c759f9f 100644 --- a/tests/helpers/test_normalized_name_base_registry.py +++ b/tests/helpers/test_normalized_name_base_registry.py @@ -26,18 +26,14 @@ def test_registry_items( registry_items: NormalizedNameBaseRegistryItems[NormalizedNameBaseRegistryEntry], ) -> None: """Test registry items.""" - entry = NormalizedNameBaseRegistryEntry( - name="Hello World", normalized_name="helloworld" - ) + entry = NormalizedNameBaseRegistryEntry(name="Hello World") registry_items["key"] = entry assert registry_items["key"] == entry assert list(registry_items.values()) == [entry] assert registry_items.get_by_name("Hello World") == entry # test update entry - entry2 = NormalizedNameBaseRegistryEntry( - name="Hello World 2", normalized_name="helloworld2" - ) + entry2 = NormalizedNameBaseRegistryEntry(name="Hello World 2") registry_items["key"] = entry2 assert registry_items["key"] == entry2 assert list(registry_items.values()) == [entry2] @@ -53,16 +49,12 @@ def test_key_already_in_use( registry_items: NormalizedNameBaseRegistryItems[NormalizedNameBaseRegistryEntry], ) -> None: """Test key already in use.""" - entry = NormalizedNameBaseRegistryEntry( - name="Hello World", normalized_name="helloworld" - ) + entry = NormalizedNameBaseRegistryEntry(name="Hello World") registry_items["key"] = entry # should raise ValueError if we update a # key with a entry with the same normalized name - entry = NormalizedNameBaseRegistryEntry( - name="Hello World 2", normalized_name="helloworld2" - ) + entry = NormalizedNameBaseRegistryEntry(name="Hello World 2") registry_items["key2"] = entry with pytest.raises(ValueError): registry_items["key"] = entry diff --git a/tests/helpers/test_restore_state.py b/tests/helpers/test_restore_state.py index 865ee5efaf7..7adb3dd5b5e 100644 --- a/tests/helpers/test_restore_state.py +++ b/tests/helpers/test_restore_state.py @@ -6,8 +6,6 @@ import logging from typing import Any from unittest.mock import Mock, patch -import pytest - from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP from homeassistant.core import CoreState, HomeAssistant, State from homeassistant.exceptions import HomeAssistantError @@ -94,20 +92,6 @@ async def test_caching_data(hass: HomeAssistant) -> None: assert mock_write_data.called -async def test_async_get_instance_backwards_compatibility(hass: HomeAssistant) -> None: - """Test async_get_instance backwards compatibility.""" - await async_load(hass) - data = async_get(hass) - # When called from core it should raise - with pytest.raises(RuntimeError): - await RestoreStateData.async_get_instance(hass) - - # When called from a component it should not raise - # but it should report - with patch("homeassistant.helpers.restore_state.report"): - assert data is await RestoreStateData.async_get_instance(hass) - - async def test_periodic_write(hass: HomeAssistant) -> None: """Test that we write periodiclly but not after stop.""" data = async_get(hass) diff --git a/tests/helpers/test_schema_config_entry_flow.py b/tests/helpers/test_schema_config_entry_flow.py index 877e3762d3b..e67525253bc 100644 --- a/tests/helpers/test_schema_config_entry_flow.py +++ b/tests/helpers/test_schema_config_entry_flow.py @@ -648,6 +648,10 @@ async def test_options_flow_state(hass: HomeAssistant) -> None: options_handler = hass.config_entries.options._progress[result["flow_id"]] assert options_handler._common_handler.flow_state == {"idx": None} + # Ensure that self.options and self._common_handler.options refer to the + # same mutable copy of the options + assert options_handler.options is options_handler._common_handler.options + # In step 1, flow state is updated with user input result = await hass.config_entries.options.async_configure( result["flow_id"], {"option1": "blublu"} diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 1bc33140124..c438e333ae6 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -943,18 +943,9 @@ async def test_wait_basic(hass: HomeAssistant, action_type) -> None: assert not script_obj.is_running assert script_obj.last_action is None - if action_type == "template": - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"completed": True, "remaining": None}}, - "variables": {"wait": {"completed": True, "remaining": None}}, - } - ], - } - ) - else: + expected_var = {"completed": True, "remaining": None} + + if action_type == "trigger": expected_trigger = { "alias": None, "attribute": None, @@ -967,23 +958,18 @@ async def test_wait_basic(hass: HomeAssistant, action_type) -> None: "platform": "state", "to_state": ANY, } - assert_action_trace( - { - "0": [ - { - "result": { - "wait": { - "trigger": expected_trigger, - "remaining": None, - } - }, - "variables": { - "wait": {"remaining": None, "trigger": expected_trigger} - }, - } - ], - } - ) + expected_var["trigger"] = expected_trigger + + assert_action_trace( + { + "0": [ + { + "result": {"wait": expected_var}, + "variables": {"wait": expected_var}, + } + ], + } + ) async def test_wait_for_trigger_variables(hass: HomeAssistant) -> None: @@ -1059,28 +1045,21 @@ async def test_wait_basic_times_out(hass: HomeAssistant, action_type) -> None: assert timed_out - if action_type == "template": - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"completed": False, "remaining": None}}, - "variables": {"wait": {"completed": False, "remaining": None}}, - } - ], - } - ) - else: - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, - } - ], - } - ) + expected_var = {"completed": False, "remaining": None} + + if action_type == "trigger": + expected_var["trigger"] = None + + assert_action_trace( + { + "0": [ + { + "result": {"wait": expected_var}, + "variables": {"wait": expected_var}, + } + ], + } + ) @pytest.mark.parametrize("action_type", ["template", "trigger"]) @@ -1183,30 +1162,22 @@ async def test_cancel_wait(hass: HomeAssistant, action_type) -> None: assert not script_obj.is_running assert len(events) == 0 - if action_type == "template": - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"completed": False, "remaining": None}}, - "variables": {"wait": {"completed": False, "remaining": None}}, - } - ], - }, - expected_script_execution="cancelled", - ) - else: - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, - } - ], - }, - expected_script_execution="cancelled", - ) + expected_var = {"completed": False, "remaining": None} + + if action_type == "trigger": + expected_var["trigger"] = None + + assert_action_trace( + { + "0": [ + { + "result": {"wait": expected_var}, + "variables": {"wait": expected_var}, + } + ], + }, + expected_script_execution="cancelled", + ) async def test_wait_template_not_schedule(hass: HomeAssistant) -> None: @@ -1294,10 +1265,11 @@ async def test_wait_timeout( assert len(events) == 1 assert "(timeout: 0:00:05)" in caplog.text - if action_type == "template": - variable_wait = {"wait": {"completed": False, "remaining": 0.0}} - else: - variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"completed": False, "remaining": 0.0}} + + if action_type == "trigger": + variable_wait["wait"]["trigger"] = None + expected_trace = { "0": [ { @@ -1345,7 +1317,7 @@ async def test_wait_trigger_with_zero_timeout( assert len(events) == 1 assert "(timeout: 0:00:00)" in caplog.text - variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"completed": False, "trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1393,7 +1365,7 @@ async def test_wait_trigger_matches_with_zero_timeout( assert len(events) == 1 assert "(timeout: 0:00:00)" in caplog.text - variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"completed": False, "trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1533,12 +1505,11 @@ async def test_wait_continue_on_timeout( assert not script_obj.is_running assert len(events) == n_events - if action_type == "template": - result_wait = {"wait": {"completed": False, "remaining": 0.0}} - variable_wait = dict(result_wait) - else: - result_wait = {"wait": {"trigger": None, "remaining": 0.0}} - variable_wait = dict(result_wait) + result_wait = {"wait": {"completed": False, "remaining": 0.0}} + if action_type == "trigger": + result_wait["wait"]["trigger"] = None + + variable_wait = dict(result_wait) expected_trace = { "0": [{"result": result_wait, "variables": variable_wait}], } @@ -1766,8 +1737,12 @@ async def test_wait_for_trigger_bad( { "0": [ { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, + "result": { + "wait": {"completed": False, "trigger": None, "remaining": None} + }, + "variables": { + "wait": {"completed": False, "remaining": None, "trigger": None} + }, } ], } @@ -1807,8 +1782,12 @@ async def test_wait_for_trigger_generated_exception( { "0": [ { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, + "result": { + "wait": {"completed": False, "trigger": None, "remaining": None} + }, + "variables": { + "wait": {"completed": False, "remaining": None, "trigger": None} + }, } ], } @@ -3717,11 +3696,18 @@ async def test_parallel(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { "result": { "wait": { + "completed": True, + "remaining": None, + "trigger": expected_trigger, + } + }, + "variables": { + "wait": { + "completed": True, "remaining": None, "trigger": expected_trigger, } }, - "variables": {"wait": {"remaining": None, "trigger": expected_trigger}}, } ], "0/parallel/1/sequence/0": [ @@ -5646,6 +5632,91 @@ async def test_stop_action_subscript( ) +@pytest.mark.parametrize( + ("var", "response"), + [(1, "If: Then"), (2, "Testing 123")], +) +async def test_stop_action_response_variables( + hass: HomeAssistant, + var: int, + response: str, +) -> None: + """Test setting stop response_variable in a subscript.""" + sequence = cv.SCRIPT_SCHEMA( + [ + {"variables": {"output": {"value": "Testing 123"}}}, + { + "if": { + "condition": "template", + "value_template": "{{ var == 1 }}", + }, + "then": [ + {"variables": {"output": {"value": "If: Then"}}}, + {"stop": "In the name of love", "response_variable": "output"}, + ], + }, + {"stop": "In the name of love", "response_variable": "output"}, + ] + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + run_vars = MappingProxyType({"var": var}) + result = await script_obj.async_run(run_vars, context=Context()) + assert result.service_response == {"value": response} + + +@pytest.mark.parametrize( + ("var", "if_result", "choice", "response"), + [(1, True, "then", "If: Then"), (2, False, "else", "If: Else")], +) +async def test_stop_action_nested_response_variables( + hass: HomeAssistant, + var: int, + if_result: bool, + choice: str, + response: str, +) -> None: + """Test setting stop response_variable in a subscript.""" + sequence = cv.SCRIPT_SCHEMA( + [ + {"variables": {"output": {"value": "Testing 123"}}}, + { + "if": { + "condition": "template", + "value_template": "{{ var == 1 }}", + }, + "then": [ + {"variables": {"output": {"value": "If: Then"}}}, + {"stop": "In the name of love", "response_variable": "output"}, + ], + "else": [ + {"variables": {"output": {"value": "If: Else"}}}, + {"stop": "In the name of love", "response_variable": "output"}, + ], + }, + ] + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + run_vars = MappingProxyType({"var": var}) + result = await script_obj.async_run(run_vars, context=Context()) + assert result.service_response == {"value": response} + + expected_trace = { + "0": [ + { + "variables": {"var": var, "output": {"value": "Testing 123"}}, + } + ], + "1": [{"result": {"choice": choice}}], + "1/if": [{"result": {"result": if_result}}], + "1/if/condition/0": [{"result": {"result": var == 1, "entities": []}}], + f"1/{choice}/0": [{"variables": {"output": {"value": response}}}], + f"1/{choice}/1": [{"result": {"stop": "In the name of love", "error": False}}], + } + assert_action_trace(expected_trace) + + async def test_stop_action_with_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index de8c3555831..f73808a0625 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -1,6 +1,7 @@ """Test selectors.""" from enum import Enum +from typing import Any import pytest import voluptuous as vol @@ -1107,6 +1108,13 @@ def test_condition_selector_schema( ( {}, ( + [ + { + "platform": "numeric_state", + "entity_id": ["sensor.temperature"], + "below": 20, + } + ], [ { "platform": "numeric_state", @@ -1122,7 +1130,24 @@ def test_condition_selector_schema( ) def test_trigger_selector_schema(schema, valid_selections, invalid_selections) -> None: """Test trigger sequence selector.""" - _test_selector("trigger", schema, valid_selections, invalid_selections) + + def _custom_trigger_serializer( + triggers: list[dict[str, Any]], + ) -> list[dict[str, Any]]: + res = [] + for trigger in triggers: + if "trigger" in trigger: + trigger["platform"] = trigger.pop("trigger") + res.append(trigger) + return res + + _test_selector( + "trigger", + schema, + valid_selections, + invalid_selections, + _custom_trigger_serializer, + ) @pytest.mark.parametrize( diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 81cc189e1af..6d03e09cdf7 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -39,7 +39,6 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, service, - template, ) import homeassistant.helpers.config_validation as cv from homeassistant.loader import async_get_integration @@ -120,7 +119,6 @@ def floor_area_mock(hass: HomeAssistant) -> None: id="test-area", name="Test area", aliases={}, - normalized_name="test-area", floor_id="test-floor", icon=None, picture=None, @@ -129,7 +127,6 @@ def floor_area_mock(hass: HomeAssistant) -> None: id="area-a", name="Area A", aliases={}, - normalized_name="area-a", floor_id="floor-a", icon=None, picture=None, @@ -283,7 +280,6 @@ def label_mock(hass: HomeAssistant) -> None: id="area-with-labels", name="Area with labels", aliases={}, - normalized_name="with_labels", floor_id=None, icon=None, labels={"label_area"}, @@ -293,7 +289,6 @@ def label_mock(hass: HomeAssistant) -> None: id="area-no-labels", name="Area without labels", aliases={}, - normalized_name="without_labels", floor_id=None, icon=None, labels=set(), @@ -352,6 +347,13 @@ def label_mock(hass: HomeAssistant) -> None: platform="test", device_id=device_has_label1.id, ) + entity_with_label1_from_device_and_different_area = er.RegistryEntry( + entity_id="light.with_label1_from_device_diff_area", + unique_id="with_label1_from_device_diff_area", + platform="test", + device_id=device_has_label1.id, + area_id=area_without_labels.id, + ) entity_with_label1_and_label2_from_device = er.RegistryEntry( entity_id="light.with_label1_and_label2_from_device", unique_id="with_label1_and_label2_from_device", @@ -378,6 +380,7 @@ def label_mock(hass: HomeAssistant) -> None: config_entity_with_my_label.entity_id: config_entity_with_my_label, entity_with_label1_and_label2_from_device.entity_id: entity_with_label1_and_label2_from_device, entity_with_label1_from_device.entity_id: entity_with_label1_from_device, + entity_with_label1_from_device_and_different_area.entity_id: entity_with_label1_from_device_and_different_area, entity_with_labels_from_device.entity_id: entity_with_labels_from_device, entity_with_my_label.entity_id: entity_with_my_label, entity_with_no_labels.entity_id: entity_with_no_labels, @@ -565,9 +568,6 @@ async def test_not_mutate_input(hass: HomeAssistant) -> None: config = cv.SERVICE_SCHEMA(config) orig = cv.SERVICE_SCHEMA(orig) - # Only change after call is each template getting hass attached - template.attach(hass, orig) - await service.async_call_from_config(hass, config, validate_config=False) assert orig == config @@ -642,11 +642,11 @@ async def test_extract_entity_ids(hass: HomeAssistant) -> None: order=None, ) - call = ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) + call = ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) + call = ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call @@ -659,7 +659,7 @@ async def test_extract_entity_ids(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( hass, - ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), + ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) @@ -669,20 +669,22 @@ async def test_extract_entity_ids_from_area( hass: HomeAssistant, floor_area_mock ) -> None: """Test extract_entity_ids method with areas.""" - call = ServiceCall("light", "turn_on", {"area_id": "own-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "own-area"}) assert { "light.in_own_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"area_id": "test-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "test-area"}) assert { "light.in_area", "light.assigned_to_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) + call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"]} + ) assert { "light.in_area", @@ -692,7 +694,7 @@ async def test_extract_entity_ids_from_area( assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -703,13 +705,13 @@ async def test_extract_entity_ids_from_devices( ) -> None: """Test extract_entity_ids method with devices.""" assert await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "device-no-area-id"}) + hass, ServiceCall(hass, "light", "turn_on", {"device_id": "device-no-area-id"}) ) == { "light.no_area", } assert await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "device-area-a-id"}) + hass, ServiceCall(hass, "light", "turn_on", {"device_id": "device-area-a-id"}) ) == { "light.in_area_a", "light.in_area_b", @@ -717,7 +719,8 @@ async def test_extract_entity_ids_from_devices( assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "non-existing-id"}) + hass, + ServiceCall(hass, "light", "turn_on", {"device_id": "non-existing-id"}), ) == set() ) @@ -726,14 +729,16 @@ async def test_extract_entity_ids_from_devices( @pytest.mark.usefixtures("floor_area_mock") async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: """Test extract_entity_ids method with floors.""" - call = ServiceCall("light", "turn_on", {"floor_id": "test-floor"}) + call = ServiceCall(hass, "light", "turn_on", {"floor_id": "test-floor"}) assert { "light.in_area", "light.assigned_to_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"floor_id": ["test-floor", "floor-a"]}) + call = ServiceCall( + hass, "light", "turn_on", {"floor_id": ["test-floor", "floor-a"]} + ) assert { "light.in_area", @@ -743,7 +748,7 @@ async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"floor_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"floor_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -752,28 +757,29 @@ async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("label_mock") async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: """Test extract_entity_ids method with labels.""" - call = ServiceCall("light", "turn_on", {"label_id": "my-label"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "my-label"}) assert { "light.with_my_label", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": "label1"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "label1"}) assert { "light.with_label1_from_device", + "light.with_label1_from_device_diff_area", "light.with_labels_from_device", "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": ["label2"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["label2"]}) assert { "light.with_labels_from_device", "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": ["label_area"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["label_area"]}) assert { "light.with_labels_from_device", @@ -781,7 +787,7 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"label_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"label_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -1273,12 +1279,14 @@ async def test_register_with_mixed_case(hass: HomeAssistant) -> None: async def test_call_with_required_features(hass: HomeAssistant, mock_entities) -> None: """Test service calls invoked only if entity has required features.""" + # Set up homeassistant component to fetch the translations + await async_setup_component(hass, "homeassistant", {}) test_service_mock = AsyncMock(return_value=None) await service.entity_service_call( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) @@ -1292,13 +1300,17 @@ async def test_call_with_required_features(hass: HomeAssistant, mock_entities) - # Test we raise if we target entity ID that does not support the service test_service_mock.reset_mock() - with pytest.raises(exceptions.HomeAssistantError): + with pytest.raises( + exceptions.ServiceNotSupported, + match="Entity light.living_room does not " + "support action test_domain.test_service", + ): await service.entity_service_call( hass, mock_entities, HassJob(test_service_mock), ServiceCall( - "test_domain", "test_service", {"entity_id": "light.living_room"} + hass, "test_domain", "test_service", {"entity_id": "light.living_room"} ), required_features=[SUPPORT_A], ) @@ -1314,7 +1326,7 @@ async def test_call_with_both_required_features( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) @@ -1333,7 +1345,7 @@ async def test_call_with_one_of_required_features( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) @@ -1354,7 +1366,9 @@ async def test_call_with_sync_func(hass: HomeAssistant, mock_entities) -> None: hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), + ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"} + ), ) assert test_service_mock.call_count == 1 @@ -1367,6 +1381,7 @@ async def test_call_with_sync_attr(hass: HomeAssistant, mock_entities) -> None: mock_entities, "sync_method", ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, @@ -1385,6 +1400,7 @@ async def test_call_context_user_not_exist(hass: HomeAssistant) -> None: {}, Mock(), ServiceCall( + hass, "test_domain", "test_service", context=Context(user_id="non-existing"), @@ -1412,6 +1428,7 @@ async def test_call_context_target_all( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, @@ -1440,6 +1457,7 @@ async def test_call_context_target_specific( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"}, @@ -1467,6 +1485,7 @@ async def test_call_context_target_specific_no_auth( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"}, @@ -1487,7 +1506,7 @@ async def test_call_no_context_target_all( mock_entities, Mock(), ServiceCall( - "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} + hass, "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) @@ -1506,6 +1525,7 @@ async def test_call_no_context_target_specific( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, @@ -1527,7 +1547,7 @@ async def test_call_with_match_all( hass, mock_entities, Mock(), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 @@ -1544,7 +1564,7 @@ async def test_call_with_omit_entity_id( hass, mock_entities, Mock(), - ServiceCall("test_domain", "test_service"), + ServiceCall(hass, "test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 @@ -1790,7 +1810,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] - call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) + call_1 = ServiceCall(hass, "test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert [ ent.entity_id @@ -1798,6 +1818,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non ] == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( + hass, "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, @@ -1813,6 +1834,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non hass, entities, ServiceCall( + hass, "test", "service", data={"entity_id": ENTITY_MATCH_NONE}, @@ -1828,7 +1850,7 @@ async def test_extract_from_service_empty_if_no_entity_id(hass: HomeAssistant) - MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] - call = ServiceCall("test", "service") + call = ServiceCall(hass, "test", "service") assert [ ent.entity_id @@ -1846,6 +1868,7 @@ async def test_extract_from_service_filter_out_non_existing_entities( ] call = ServiceCall( + hass, "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, @@ -1867,12 +1890,14 @@ async def test_extract_from_service_area_id( MockEntity(name="diff_area", entity_id="light.diff_area"), ] - call = ServiceCall("light", "turn_on", {"area_id": "test-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" - call = ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) + call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"]} + ) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ @@ -1881,6 +1906,7 @@ async def test_extract_from_service_area_id( ] call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"], "device_id": "device-no-area-id"}, @@ -1905,17 +1931,17 @@ async def test_extract_from_service_label_id(hass: HomeAssistant) -> None: ), ] - call = ServiceCall("light", "turn_on", {"label_id": "label_area"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "label_area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.with_labels_from_device" - call = ServiceCall("light", "turn_on", {"label_id": "my-label"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "my-label"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.with_my_label" - call = ServiceCall("light", "turn_on", {"label_id": ["my-label", "label1"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["my-label", "label1"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ @@ -1924,6 +1950,7 @@ async def test_extract_from_service_label_id(hass: HomeAssistant) -> None: ] call = ServiceCall( + hass, "light", "turn_on", {"label_id": ["my-label", "label1"], "device_id": "device-no-labels"}, @@ -1942,6 +1969,7 @@ async def test_entity_service_call_warn_referenced( ) -> None: """Test we only warn for referenced entities in entity_service_call.""" call = ServiceCall( + hass, "light", "turn_on", { @@ -1965,6 +1993,7 @@ async def test_async_extract_entities_warn_referenced( ) -> None: """Test we only warn for referenced entities in async_extract_entities.""" call = ServiceCall( + hass, "light", "turn_on", { @@ -1990,6 +2019,7 @@ async def test_async_extract_config_entry_ids(hass: HomeAssistant) -> None: device_no_entities = dr.DeviceEntry(id="device-no-entities", config_entries={"abc"}) call = ServiceCall( + hass, "homeassistant", "reload_config_entry", { @@ -2035,17 +2065,33 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloader = service.ReloadServiceHelper(reload_service_handler, reload_targets) tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, target1) # while the first task is reloaded, note that target1 can't be deduplicated # because it's already being reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered( @@ -2056,13 +2102,21 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, all) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service(ServiceCall(hass, "test", "test")), ] await asyncio.gather(*tasks) assert reloaded == unordered(["target1", "target2", "target3", "target4", "all"]) @@ -2071,13 +2125,21 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (all) - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), # These reload tasks will be deduplicated to (target1, target2, target3, target4) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"]) @@ -2086,21 +2148,45 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, target1) # while the first task is reloaded, note that target1 can't be deduplicated # because it's already being reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered( @@ -2111,14 +2197,22 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, all) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test")), - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service(ServiceCall(hass, "test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), ] await asyncio.gather(*tasks) assert reloaded == unordered(["target1", "target2", "target3", "target4", "all"]) @@ -2127,17 +2221,33 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (all) - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), # These reload tasks will be deduplicated to (target1, target2, target3, target4) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"]) diff --git a/tests/helpers/test_state.py b/tests/helpers/test_state.py index 150f31f5fe9..ea7c1f6827f 100644 --- a/tests/helpers/test_state.py +++ b/tests/helpers/test_state.py @@ -5,18 +5,17 @@ from unittest.mock import patch import pytest +from homeassistant.components.lock import LockState from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_CLOSED, STATE_HOME, - STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, - STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import state @@ -143,11 +142,17 @@ async def test_as_number_states(hass: HomeAssistant) -> None: zero_states = ( STATE_OFF, STATE_CLOSED, - STATE_UNLOCKED, + LockState.UNLOCKED, STATE_BELOW_HORIZON, STATE_NOT_HOME, ) - one_states = (STATE_ON, STATE_OPEN, STATE_LOCKED, STATE_ABOVE_HORIZON, STATE_HOME) + one_states = ( + STATE_ON, + STATE_OPEN, + LockState.LOCKED, + STATE_ABOVE_HORIZON, + STATE_HOME, + ) for _state in zero_states: assert state.state_as_number(State("domain.test", _state, {})) == 0 for _state in one_states: diff --git a/tests/helpers/test_system_info.py b/tests/helpers/test_system_info.py index 16b5b8b652b..2c4b95302fc 100644 --- a/tests/helpers/test_system_info.py +++ b/tests/helpers/test_system_info.py @@ -93,10 +93,9 @@ async def test_container_installationtype(hass: HomeAssistant) -> None: assert info["installation_type"] == "Unsupported Third Party Container" -async def test_getuser_keyerror(hass: HomeAssistant) -> None: - """Test getuser keyerror.""" - with patch( - "homeassistant.helpers.system_info.cached_get_user", side_effect=KeyError - ): +@pytest.mark.parametrize("error", [KeyError, OSError]) +async def test_getuser_oserror(hass: HomeAssistant, error: Exception) -> None: + """Test getuser oserror.""" + with patch("homeassistant.helpers.system_info.cached_get_user", side_effect=error): info = await async_get_system_info(hass) assert info["user"] is None diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 0676ae21ab7..628aea20900 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -15,6 +15,7 @@ from unittest.mock import patch from freezegun import freeze_time import orjson import pytest +from syrupy import SnapshotAssertion import voluptuous as vol from homeassistant import config_entries @@ -23,6 +24,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, STATE_ON, STATE_UNAVAILABLE, + UnitOfArea, UnitOfLength, UnitOfMass, UnitOfPrecipitationDepth, @@ -60,6 +62,7 @@ def _set_up_units(hass: HomeAssistant) -> None: hass.config.units = UnitSystem( "custom", accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -4548,7 +4551,7 @@ async def test_async_render_to_info_with_wildcard_matching_state( hass.states.async_set("cover.office_window", "closed") hass.states.async_set("cover.office_skylight", "open") hass.states.async_set("cover.x_skylight", "open") - hass.states.async_set("binary_sensor.door", "open") + hass.states.async_set("binary_sensor.door", "on") await hass.async_block_till_done() info = render_to_info(hass, template_complex_str) @@ -4558,7 +4561,7 @@ async def test_async_render_to_info_with_wildcard_matching_state( assert info.all_states is True assert info.rate_limit == template.ALL_STATES_RATE_LIMIT - hass.states.async_set("binary_sensor.door", "closed") + hass.states.async_set("binary_sensor.door", "off") info = render_to_info(hass, template_complex_str) assert not info.domains @@ -6281,3 +6284,303 @@ def test_unzip(hass: HomeAssistant, col, expected) -> None: ).async_render({"col": col}) == expected ) + + +def test_template_output_exceeds_maximum_size(hass: HomeAssistant) -> None: + """Test template output exceeds maximum size.""" + tpl = template.Template("{{ 'a' * 1024 * 257 }}", hass) + with pytest.raises(TemplateError): + tpl.async_render() + + +@pytest.mark.parametrize( + ("service_response"), + [ + { + "calendar.sports": { + "events": [ + { + "start": "2024-02-27T17:00:00-06:00", + "end": "2024-02-27T18:00:00-06:00", + "summary": "Basketball vs. Rockets", + "description": "", + } + ] + }, + "calendar.local_furry_events": {"events": []}, + "calendar.yap_house_schedules": { + "events": [ + { + "start": "2024-02-26T08:00:00-06:00", + "end": "2024-02-26T09:00:00-06:00", + "summary": "Dr. Appt", + "description": "", + }, + { + "start": "2024-02-28T20:00:00-06:00", + "end": "2024-02-28T21:00:00-06:00", + "summary": "Bake a cake", + "description": "something good", + }, + ] + }, + }, + { + "binary_sensor.workday": {"workday": True}, + "binary_sensor.workday2": {"workday": False}, + }, + { + "weather.smhi_home": { + "forecast": [ + { + "datetime": "2024-03-31T16:00:00", + "condition": "cloudy", + "wind_bearing": 79, + "cloud_coverage": 100, + "temperature": 10, + "templow": 4, + "pressure": 998, + "wind_gust_speed": 21.6, + "wind_speed": 11.88, + "precipitation": 0.2, + "humidity": 87, + }, + { + "datetime": "2024-04-01T12:00:00", + "condition": "rainy", + "wind_bearing": 17, + "cloud_coverage": 100, + "temperature": 6, + "templow": 1, + "pressure": 999, + "wind_gust_speed": 20.52, + "wind_speed": 8.64, + "precipitation": 2.2, + "humidity": 88, + }, + { + "datetime": "2024-04-02T12:00:00", + "condition": "cloudy", + "wind_bearing": 17, + "cloud_coverage": 100, + "temperature": 0, + "templow": -3, + "pressure": 1003, + "wind_gust_speed": 57.24, + "wind_speed": 30.6, + "precipitation": 1.3, + "humidity": 71, + }, + ] + }, + "weather.forecast_home": { + "forecast": [ + { + "condition": "cloudy", + "precipitation_probability": 6.6, + "datetime": "2024-03-31T10:00:00+00:00", + "wind_bearing": 71.8, + "temperature": 10.9, + "templow": 6.5, + "wind_gust_speed": 24.1, + "wind_speed": 13.7, + "precipitation": 0, + "humidity": 71, + }, + { + "condition": "cloudy", + "precipitation_probability": 8, + "datetime": "2024-04-01T10:00:00+00:00", + "wind_bearing": 350.6, + "temperature": 10.2, + "templow": 3.4, + "wind_gust_speed": 38.2, + "wind_speed": 21.6, + "precipitation": 0, + "humidity": 79, + }, + { + "condition": "snowy", + "precipitation_probability": 67.4, + "datetime": "2024-04-02T10:00:00+00:00", + "wind_bearing": 24.5, + "temperature": 3, + "templow": 0, + "wind_gust_speed": 64.8, + "wind_speed": 37.4, + "precipitation": 2.3, + "humidity": 77, + }, + ] + }, + }, + { + "vacuum.deebot_n8_plus_1": { + "payloadType": "j", + "resp": { + "body": { + "msg": "ok", + } + }, + "header": { + "ver": "0.0.1", + }, + }, + "vacuum.deebot_n8_plus_2": { + "payloadType": "j", + "resp": { + "body": { + "msg": "ok", + } + }, + "header": { + "ver": "0.0.1", + }, + }, + }, + ], + ids=["calendar", "workday", "weather", "vacuum"], +) +async def test_merge_response( + hass: HomeAssistant, + service_response: dict, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter.""" + + _template = "{{ merge_response(" + str(service_response) + ") }}" + + tpl = template.Template(_template, hass) + assert service_response == snapshot(name="a_response") + assert tpl.async_render() == snapshot(name="b_rendered") + + +async def test_merge_response_with_entity_id_in_response( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with empty lists.""" + + service_response = { + "test.response": {"some_key": True, "entity_id": "test.response"}, + "test.response2": {"some_key": False, "entity_id": "test.response2"}, + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + with pytest.raises( + TemplateError, + match="ValueError: Response dictionary already contains key 'entity_id'", + ): + template.Template(_template, hass).async_render() + + service_response = { + "test.response": { + "happening": [ + { + "start": "2024-02-27T17:00:00-06:00", + "end": "2024-02-27T18:00:00-06:00", + "summary": "Magic day", + "entity_id": "test.response", + } + ] + } + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + with pytest.raises( + TemplateError, + match="ValueError: Response dictionary already contains key 'entity_id'", + ): + template.Template(_template, hass).async_render() + + +async def test_merge_response_with_empty_response( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with empty lists.""" + + service_response = { + "calendar.sports": {"events": []}, + "calendar.local_furry_events": {"events": []}, + "calendar.yap_house_schedules": {"events": []}, + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + assert service_response == snapshot(name="a_response") + assert tpl.async_render() == snapshot(name="b_rendered") + + +async def test_response_empty_dict( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with empty dict.""" + + service_response = {} + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + assert tpl.async_render() == [] + + +async def test_response_incorrect_value( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with incorrect response.""" + + service_response = "incorrect" + _template = "{{ merge_response(" + str(service_response) + ") }}" + with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): + template.Template(_template, hass).async_render() + + +async def test_merge_response_with_incorrect_response(hass: HomeAssistant) -> None: + """Test the merge_response function/filter with empty response should raise.""" + + service_response = {"calendar.sports": []} + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): + tpl.async_render() + + service_response = { + "binary_sensor.workday": [], + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): + tpl.async_render() + + +def test_warn_no_hass(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> None: + """Test deprecation warning when instantiating Template without hass.""" + + message = "Detected code that creates a template object without passing hass" + template.Template("blah") + assert message in caplog.text + caplog.clear() + + template.Template("blah", None) + assert message in caplog.text + caplog.clear() + + template.Template("blah", hass) + assert message not in caplog.text + caplog.clear() + + +async def test_merge_response_not_mutate_original_object( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test the merge_response does not mutate original service response value.""" + + value = '{"calendar.family": {"events": [{"summary": "An event"}]}' + _template = ( + "{% set calendar_response = " + value + "} %}" + "{{ merge_response(calendar_response) }}" + # We should be able to merge the same response again + # as the merge is working on a copy of the original object (response) + "{{ merge_response(calendar_response) }}" + ) + + tpl = template.Template(_template, hass) + assert tpl.async_render() diff --git a/tests/helpers/test_translation.py b/tests/helpers/test_translation.py index 73cd243a0c6..d4a78807e2b 100644 --- a/tests/helpers/test_translation.py +++ b/tests/helpers/test_translation.py @@ -64,10 +64,16 @@ def test_load_translations_files_by_language( "test": { "entity": { "switch": { - "other1": {"name": "Other 1"}, + "other1": { + "name": "Other 1", + "unit_of_measurement": "units", + }, "other2": {"name": "Other 2"}, "other3": {"name": "Other 3"}, - "other4": {"name": "Other 4"}, + "other4": { + "name": "Other 4", + "unit_of_measurement": "quantities", + }, "outlet": {"name": "Outlet " "{placeholder}"}, } }, @@ -87,9 +93,11 @@ def test_load_translations_files_by_language( "en", { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", }, [], @@ -98,9 +106,11 @@ def test_load_translations_files_by_language( "es", { "component.test.entity.switch.other1.name": "Otra 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Otra 2", "component.test.entity.switch.other3.name": "Otra 3", "component.test.entity.switch.other4.name": "Otra 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Enchufe {placeholder}", }, [], @@ -110,12 +120,14 @@ def test_load_translations_files_by_language( { # Correct "component.test.entity.switch.other1.name": "Anderes 1", + "component.test.entity.switch.other1.unit_of_measurement": "einheiten", # Translation has placeholder missing in English "component.test.entity.switch.other2.name": "Other 2", # Correct (empty translation) "component.test.entity.switch.other3.name": "", # Translation missing "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", # Mismatch in placeholders "component.test.entity.switch.outlet.name": "Outlet {placeholder}", }, @@ -166,9 +178,11 @@ async def test_get_translations(hass: HomeAssistant, mock_config_flows) -> None: assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -176,24 +190,33 @@ async def test_get_translations(hass: HomeAssistant, mock_config_flows) -> None: hass, "de", "entity", {"test"} ) + # Test a partial translation assert translations == { + # Correct "component.test.entity.switch.other1.name": "Anderes 1", + "component.test.entity.switch.other1.unit_of_measurement": "einheiten", + # Translation has placeholder missing in English "component.test.entity.switch.other2.name": "Other 2", + # Correct (empty translation) "component.test.entity.switch.other3.name": "", + # Translation missing "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", + # Mismatch in placeholders "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } - # Test a partial translation translations = await translation.async_get_translations( hass, "es", "entity", {"test"} ) assert translations == { "component.test.entity.switch.other1.name": "Otra 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Otra 2", "component.test.entity.switch.other3.name": "Otra 3", "component.test.entity.switch.other4.name": "Otra 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Enchufe {placeholder}", } @@ -204,9 +227,11 @@ async def test_get_translations(hass: HomeAssistant, mock_config_flows) -> None: assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -425,10 +450,10 @@ async def test_caching(hass: HomeAssistant) -> None: side_effect=translation.build_resources, ) as mock_build_resources: load1 = await translation.async_get_translations(hass, "en", "entity_component") - assert len(mock_build_resources.mock_calls) == 6 + assert len(mock_build_resources.mock_calls) == 7 load2 = await translation.async_get_translations(hass, "en", "entity_component") - assert len(mock_build_resources.mock_calls) == 6 + assert len(mock_build_resources.mock_calls) == 7 assert load1 == load2 @@ -507,9 +532,11 @@ async def test_get_cached_translations(hass: HomeAssistant, mock_config_flows) - ) assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -522,9 +549,11 @@ async def test_get_cached_translations(hass: HomeAssistant, mock_config_flows) - assert translations == { "component.test.entity.switch.other1.name": "Otra 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Otra 2", "component.test.entity.switch.other3.name": "Otra 3", "component.test.entity.switch.other4.name": "Otra 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Enchufe {placeholder}", } @@ -539,9 +568,11 @@ async def test_get_cached_translations(hass: HomeAssistant, mock_config_flows) - assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -678,7 +709,6 @@ async def test_get_translations_still_has_title_without_translations_files( ) assert translations == translations_again - assert translations == { "component.component1.title": "Component 1", } diff --git a/tests/helpers/test_trigger.py b/tests/helpers/test_trigger.py index 0bd5da0707c..77f48be170b 100644 --- a/tests/helpers/test_trigger.py +++ b/tests/helpers/test_trigger.py @@ -20,7 +20,7 @@ async def test_bad_trigger_platform(hass: HomeAssistant) -> None: """Test bad trigger platform.""" with pytest.raises(vol.Invalid) as ex: await async_validate_trigger_config(hass, [{"platform": "not_a_platform"}]) - assert "Invalid platform 'not_a_platform' specified" in str(ex) + assert "Invalid trigger 'not_a_platform' specified" in str(ex) async def test_trigger_subtype(hass: HomeAssistant) -> None: @@ -159,6 +159,70 @@ async def test_trigger_enabled_templates( assert len(service_calls) == 2 +async def test_nested_trigger_list( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: + """Test triggers within nested list.""" + + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": [ + { + "triggers": { + "platform": "event", + "event_type": "trigger_1", + }, + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + {"triggers": []}, + {"triggers": None}, + { + "triggers": [ + { + "platform": "event", + "event_type": "trigger_3", + }, + { + "platform": "event", + "event_type": "trigger_4", + }, + ], + }, + ], + "action": { + "service": "test.automation", + }, + } + }, + ) + + hass.bus.async_fire("trigger_1") + await hass.async_block_till_done() + assert len(service_calls) == 1 + + hass.bus.async_fire("trigger_2") + await hass.async_block_till_done() + assert len(service_calls) == 2 + + hass.bus.async_fire("trigger_none") + await hass.async_block_till_done() + assert len(service_calls) == 2 + + hass.bus.async_fire("trigger_3") + await hass.async_block_till_done() + assert len(service_calls) == 3 + + hass.bus.async_fire("trigger_4") + await hass.async_block_till_done() + assert len(service_calls) == 4 + + async def test_trigger_enabled_template_limited( hass: HomeAssistant, service_calls: list[ServiceCall], diff --git a/tests/components/template/test_manual_trigger_entity.py b/tests/helpers/test_trigger_template_entity.py similarity index 100% rename from tests/components/template/test_manual_trigger_entity.py rename to tests/helpers/test_trigger_template_entity.py diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index d450d924f1f..539762a60ff 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -18,7 +18,7 @@ from homeassistant.exceptions import ( ConfigEntryError, ConfigEntryNotReady, ) -from homeassistant.helpers import update_coordinator +from homeassistant.helpers import frame, update_coordinator from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -57,7 +57,9 @@ KNOWN_ERRORS: list[tuple[Exception, type[Exception], str]] = [ def get_crd( - hass: HomeAssistant, update_interval: timedelta | None + hass: HomeAssistant, + update_interval: timedelta | None, + config_entry: config_entries.ConfigEntry | None = None, ) -> update_coordinator.DataUpdateCoordinator[int]: """Make coordinator mocks.""" calls = 0 @@ -70,6 +72,7 @@ def get_crd( return update_coordinator.DataUpdateCoordinator[int]( hass, _LOGGER, + config_entry=config_entry, name="test", update_method=refresh, update_interval=update_interval, @@ -121,8 +124,7 @@ async def test_async_refresh( async def test_shutdown( - hass: HomeAssistant, - crd: update_coordinator.DataUpdateCoordinator[int], + hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] ) -> None: """Test async_shutdown for update coordinator.""" assert crd.data is None @@ -158,8 +160,7 @@ async def test_shutdown( async def test_shutdown_on_entry_unload( - hass: HomeAssistant, - crd: update_coordinator.DataUpdateCoordinator[int], + hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] ) -> None: """Test shutdown is requested on entry unload.""" entry = MockConfigEntry() @@ -191,8 +192,7 @@ async def test_shutdown_on_entry_unload( async def test_shutdown_on_hass_stop( - hass: HomeAssistant, - crd: update_coordinator.DataUpdateCoordinator[int], + hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] ) -> None: """Test shutdown can be shutdown on STOP event.""" calls = 0 @@ -539,8 +539,8 @@ async def test_stop_refresh_on_ha_stop( ["update_method", "setup_method"], ) async def test_async_config_entry_first_refresh_failure( + hass: HomeAssistant, err_msg: tuple[Exception, type[Exception], str], - crd: update_coordinator.DataUpdateCoordinator[int], method: str, caplog: pytest.LogCaptureFixture, ) -> None: @@ -550,6 +550,11 @@ async def test_async_config_entry_first_refresh_failure( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) setattr(crd, method, AsyncMock(side_effect=err_msg[0])) with pytest.raises(ConfigEntryNotReady): @@ -572,8 +577,8 @@ async def test_async_config_entry_first_refresh_failure( ["update_method", "setup_method"], ) async def test_async_config_entry_first_refresh_failure_passed_through( + hass: HomeAssistant, err_msg: tuple[Exception, type[Exception], str], - crd: update_coordinator.DataUpdateCoordinator[int], method: str, caplog: pytest.LogCaptureFixture, ) -> None: @@ -583,6 +588,11 @@ async def test_async_config_entry_first_refresh_failure_passed_through( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) setattr(crd, method, AsyncMock(side_effect=err_msg[0])) with pytest.raises(err_msg[1]): @@ -593,11 +603,13 @@ async def test_async_config_entry_first_refresh_failure_passed_through( assert err_msg[2] not in caplog.text -async def test_async_config_entry_first_refresh_success( - crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture -) -> None: +async def test_async_config_entry_first_refresh_success(hass: HomeAssistant) -> None: """Test first refresh successfully.""" - + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) crd.setup_method = AsyncMock() await crd.async_config_entry_first_refresh() @@ -605,13 +617,69 @@ async def test_async_config_entry_first_refresh_success( crd.setup_method.assert_called_once() +async def test_async_config_entry_first_refresh_invalid_state( + hass: HomeAssistant, +) -> None: + """Test first refresh fails due to invalid state.""" + entry = MockConfigEntry() + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) + crd.setup_method = AsyncMock() + with pytest.raises( + RuntimeError, + match="Detected code that uses `async_config_entry_first_refresh`, which " + "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " + "but it is in state ConfigEntryState.NOT_LOADED. Please report this issue", + ): + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is True + crd.setup_method.assert_not_called() + + +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_async_config_entry_first_refresh_invalid_state_in_integration( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test first refresh successfully, despite wrong state.""" + entry = MockConfigEntry() + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) + crd.setup_method = AsyncMock() + + await crd.async_config_entry_first_refresh() + assert crd.last_update_success is True + crd.setup_method.assert_called() + assert ( + "Detected that integration 'hue' uses `async_config_entry_first_refresh`, which " + "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " + "but it is in state ConfigEntryState.NOT_LOADED at " + "homeassistant/components/hue/light.py, line 23: self.light.is_on. " + "This will stop working in Home Assistant 2025.11" + ) in caplog.text + + +async def test_async_config_entry_first_refresh_no_entry(hass: HomeAssistant) -> None: + """Test first refresh successfully.""" + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, None) + crd.setup_method = AsyncMock() + with pytest.raises( + RuntimeError, + match="Detected code that uses `async_config_entry_first_refresh`, " + "which is only supported for coordinators with a config entry. " + "Please report this issue", + ): + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is True + crd.setup_method.assert_not_called() + + async def test_not_schedule_refresh_if_system_option_disable_polling( hass: HomeAssistant, ) -> None: """Test we do not schedule a refresh if disable polling in config entry.""" entry = MockConfigEntry(pref_disable_polling=True) - config_entries.current_entry.set(entry) - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) crd.async_add_listener(lambda: None) assert crd._unsub_refresh is None @@ -651,7 +719,7 @@ async def test_async_set_update_error( async def test_only_callback_on_change_when_always_update_is_false( - crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test we do not callback listeners unless something has actually changed when always_update is false.""" update_callback = Mock() @@ -721,7 +789,7 @@ async def test_only_callback_on_change_when_always_update_is_false( async def test_always_callback_when_always_update_is_true( - crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test we callback listeners even though the data is the same when always_update is True.""" update_callback = Mock() @@ -795,3 +863,38 @@ async def test_timestamp_date_update_coordinator(hass: HomeAssistant) -> None: unsub() await crd.async_refresh() assert len(last_update_success_times) == 1 + + +async def test_config_entry(hass: HomeAssistant) -> None: + """Test behavior of coordinator.entry.""" + entry = MockConfigEntry() + + # Default without context should be None + crd = update_coordinator.DataUpdateCoordinator[int](hass, _LOGGER, name="test") + assert crd.config_entry is None + + # Explicit None is OK + crd = update_coordinator.DataUpdateCoordinator[int]( + hass, _LOGGER, name="test", config_entry=None + ) + assert crd.config_entry is None + + # Explicit entry is OK + crd = update_coordinator.DataUpdateCoordinator[int]( + hass, _LOGGER, name="test", config_entry=entry + ) + assert crd.config_entry is entry + + # set ContextVar + config_entries.current_entry.set(entry) + + # Default with ContextVar should match the ContextVar + crd = update_coordinator.DataUpdateCoordinator[int](hass, _LOGGER, name="test") + assert crd.config_entry is entry + + # Explicit entry different from ContextVar not recommended, but should work + another_entry = MockConfigEntry() + crd = update_coordinator.DataUpdateCoordinator[int]( + hass, _LOGGER, name="test", config_entry=another_entry + ) + assert crd.config_entry is another_entry diff --git a/tests/pylint/conftest.py b/tests/pylint/conftest.py index 90e535a7b0e..8ae291ac0b7 100644 --- a/tests/pylint/conftest.py +++ b/tests/pylint/conftest.py @@ -104,22 +104,37 @@ def enforce_sorted_platforms_checker_fixture( return enforce_sorted_platforms_checker -@pytest.fixture(name="hass_enforce_coordinator_module", scope="package") -def hass_enforce_coordinator_module_fixture() -> ModuleType: - """Fixture to the content for the hass_enforce_coordinator_module check.""" +@pytest.fixture(name="hass_enforce_class_module", scope="package") +def hass_enforce_class_module_fixture() -> ModuleType: + """Fixture to the content for the hass_enforce_class_module check.""" return _load_plugin_from_file( - "hass_enforce_coordinator_module", - "pylint/plugins/hass_enforce_coordinator_module.py", + "hass_enforce_class_module", + "pylint/plugins/hass_enforce_class_module.py", ) -@pytest.fixture(name="enforce_coordinator_module_checker") -def enforce_coordinator_module_fixture( - hass_enforce_coordinator_module, linter -) -> BaseChecker: - """Fixture to provide a hass_enforce_coordinator_module checker.""" - enforce_coordinator_module_checker = ( - hass_enforce_coordinator_module.HassEnforceCoordinatorModule(linter) +@pytest.fixture(name="enforce_class_module_checker") +def enforce_class_module_fixture(hass_enforce_class_module, linter) -> BaseChecker: + """Fixture to provide a hass_enforce_class_module checker.""" + enforce_class_module_checker = hass_enforce_class_module.HassEnforceClassModule( + linter ) - enforce_coordinator_module_checker.module = "homeassistant.components.pylint_test" - return enforce_coordinator_module_checker + enforce_class_module_checker.module = "homeassistant.components.pylint_test" + return enforce_class_module_checker + + +@pytest.fixture(name="hass_decorator", scope="package") +def hass_decorator_fixture() -> ModuleType: + """Fixture to provide a pylint plugin.""" + return _load_plugin_from_file( + "hass_imports", + "pylint/plugins/hass_decorator.py", + ) + + +@pytest.fixture(name="decorator_checker") +def decorator_checker_fixture(hass_decorator, linter) -> BaseChecker: + """Fixture to provide a pylint checker.""" + type_hint_checker = hass_decorator.HassDecoratorChecker(linter) + type_hint_checker.module = "homeassistant.components.pylint_test" + return type_hint_checker diff --git a/tests/pylint/test_decorator.py b/tests/pylint/test_decorator.py new file mode 100644 index 00000000000..c2e45e5a433 --- /dev/null +++ b/tests/pylint/test_decorator.py @@ -0,0 +1,268 @@ +"""Tests for pylint hass_enforce_type_hints plugin.""" + +from __future__ import annotations + +import astroid +from pylint.checkers import BaseChecker +from pylint.interfaces import UNDEFINED +from pylint.testutils import MessageTest +from pylint.testutils.unittest_linter import UnittestLinter +from pylint.utils.ast_walker import ASTWalker +import pytest + +from . import assert_adds_messages, assert_no_messages + + +def test_good_callback(linter: UnittestLinter, decorator_checker: BaseChecker) -> None: + """Test good `@callback` decorator.""" + code = """ + from homeassistant.core import callback + + @callback + def setup( + arg1, arg2 + ): + pass + """ + + root_node = astroid.parse(code) + walker = ASTWalker(linter) + walker.add_checker(decorator_checker) + + with assert_no_messages(linter): + walker.walk(root_node) + + +def test_bad_callback(linter: UnittestLinter, decorator_checker: BaseChecker) -> None: + """Test bad `@callback` decorator.""" + code = """ + from homeassistant.core import callback + + @callback + async def setup( + arg1, arg2 + ): + pass + """ + + root_node = astroid.parse(code) + walker = ASTWalker(linter) + walker.add_checker(decorator_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-async-callback-decorator", + line=5, + node=root_node.body[1], + args=None, + confidence=UNDEFINED, + col_offset=0, + end_line=5, + end_col_offset=15, + ), + ): + walker.walk(root_node) + + +@pytest.mark.parametrize( + ("keywords", "path"), + [ + ('scope="function"', "tests.test_bootstrap"), + ('scope="class"', "tests.test_bootstrap"), + ('scope="module"', "tests.test_bootstrap"), + ('scope="package"', "tests.test_bootstrap"), + ('scope="session", autouse=True', "tests.test_bootstrap"), + ('scope="function"', "tests.components.conftest"), + ('scope="class"', "tests.components.conftest"), + ('scope="module"', "tests.components.conftest"), + ('scope="package"', "tests.components.conftest"), + ('scope="session", autouse=True', "tests.components.conftest"), + ( + 'scope="session", autouse=find_spec("zeroconf") is not None', + "tests.components.conftest", + ), + ('scope="function"', "tests.components.pylint_tests.conftest"), + ('scope="class"', "tests.components.pylint_tests.conftest"), + ('scope="module"', "tests.components.pylint_tests.conftest"), + ('scope="package"', "tests.components.pylint_tests.conftest"), + ('scope="function"', "tests.components.pylint_test"), + ('scope="class"', "tests.components.pylint_test"), + ('scope="module"', "tests.components.pylint_test"), + ], +) +def test_good_fixture( + linter: UnittestLinter, decorator_checker: BaseChecker, keywords: str, path: str +) -> None: + """Test good `@pytest.fixture` decorator.""" + code = f""" + import pytest + + @pytest.fixture + def setup( + arg1, arg2 + ): + pass + + @pytest.fixture({keywords}) + def setup_session( + arg1, arg2 + ): + pass + """ + + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(decorator_checker) + + with assert_no_messages(linter): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "tests.components.pylint_test", + "tests.components.pylint_test.conftest", + "tests.components.pylint_test.module", + ], +) +def test_bad_fixture_session_scope( + linter: UnittestLinter, decorator_checker: BaseChecker, path: str +) -> None: + """Test bad `@pytest.fixture` decorator.""" + code = """ + import pytest + + @pytest.fixture + def setup( + arg1, arg2 + ): + pass + + @pytest.fixture(scope="session") + def setup_session( + arg1, arg2 + ): + pass + """ + + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(decorator_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-pytest-fixture-decorator", + line=10, + node=root_node.body[2].decorators.nodes[0], + args=("scope `session`", "use `package` or lower"), + confidence=UNDEFINED, + col_offset=1, + end_line=10, + end_col_offset=32, + ), + ): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "tests.components.pylint_test", + "tests.components.pylint_test.module", + ], +) +def test_bad_fixture_package_scope( + linter: UnittestLinter, decorator_checker: BaseChecker, path: str +) -> None: + """Test bad `@pytest.fixture` decorator.""" + code = """ + import pytest + + @pytest.fixture + def setup( + arg1, arg2 + ): + pass + + @pytest.fixture(scope="package") + def setup_session( + arg1, arg2 + ): + pass + """ + + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(decorator_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-pytest-fixture-decorator", + line=10, + node=root_node.body[2].decorators.nodes[0], + args=("scope `package`", "use `module` or lower"), + confidence=UNDEFINED, + col_offset=1, + end_line=10, + end_col_offset=32, + ), + ): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "keywords", + [ + 'scope="session"', + 'scope="session", autouse=False', + ], +) +@pytest.mark.parametrize( + "path", + [ + "tests.test_bootstrap", + "tests.components.conftest", + ], +) +def test_bad_fixture_autouse( + linter: UnittestLinter, decorator_checker: BaseChecker, keywords: str, path: str +) -> None: + """Test bad `@pytest.fixture` decorator.""" + code = f""" + import pytest + + @pytest.fixture + def setup( + arg1, arg2 + ): + pass + + @pytest.fixture({keywords}) + def setup_session( + arg1, arg2 + ): + pass + """ + + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(decorator_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-pytest-fixture-decorator", + line=10, + node=root_node.body[2].decorators.nodes[0], + args=("scope/autouse combination", "set `autouse=True` or reduce scope"), + confidence=UNDEFINED, + col_offset=1, + end_line=10, + end_col_offset=17 + len(keywords), + ), + ): + walker.walk(root_node) diff --git a/tests/pylint/test_enforce_class_module.py b/tests/pylint/test_enforce_class_module.py new file mode 100644 index 00000000000..8b3ac563c6a --- /dev/null +++ b/tests/pylint/test_enforce_class_module.py @@ -0,0 +1,286 @@ +"""Tests for pylint hass_enforce_class_module plugin.""" + +from __future__ import annotations + +import astroid +from pylint.checkers import BaseChecker +from pylint.interfaces import UNDEFINED +from pylint.testutils import MessageTest +from pylint.testutils.unittest_linter import UnittestLinter +from pylint.utils.ast_walker import ASTWalker +import pytest + +from . import assert_adds_messages, assert_no_messages + + +@pytest.mark.parametrize( + "code", + [ + pytest.param( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + """, + id="simple", + ), + pytest.param( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + + class TestCoordinator2(TestCoordinator): + pass + """, + id="nested", + ), + ], +) +@pytest.mark.parametrize( + "path", + [ + "homeassistant.components.pylint_test.coordinator", + "homeassistant.components.pylint_test.coordinator.my_coordinator", + ], +) +def test_enforce_class_module_good( + linter: UnittestLinter, + enforce_class_module_checker: BaseChecker, + code: str, + path: str, +) -> None: + """Good test cases.""" + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(enforce_class_module_checker) + + with assert_no_messages(linter): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "homeassistant.components.sensor", + "homeassistant.components.sensor.entity", + "homeassistant.components.pylint_test.sensor", + "homeassistant.components.pylint_test.sensor.entity", + ], +) +def test_enforce_class_platform_good( + linter: UnittestLinter, + enforce_class_module_checker: BaseChecker, + path: str, +) -> None: + """Good test cases.""" + code = """ + class SensorEntity: + pass + + class CustomSensorEntity(SensorEntity): + pass + + class CoordinatorEntity: + pass + + class CustomCoordinatorSensorEntity(CoordinatorEntity, SensorEntity): + pass + """ + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(enforce_class_module_checker) + + with assert_no_messages(linter): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "homeassistant.components.pylint_test", + "homeassistant.components.pylint_test.my_coordinator", + "homeassistant.components.pylint_test.coordinator_other", + "homeassistant.components.pylint_test.sensor", + ], +) +def test_enforce_class_module_bad_simple( + linter: UnittestLinter, + enforce_class_module_checker: BaseChecker, + path: str, +) -> None: + """Bad test case with coordinator extending directly.""" + root_node = astroid.parse( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + + class CoordinatorEntity: + pass + + class CustomCoordinatorSensorEntity(CoordinatorEntity): + pass + """, + path, + ) + walker = ASTWalker(linter) + walker.add_checker(enforce_class_module_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-enforce-class-module", + line=5, + node=root_node.body[1], + args=("DataUpdateCoordinator", "coordinator"), + confidence=UNDEFINED, + col_offset=0, + end_line=5, + end_col_offset=21, + ), + MessageTest( + msg_id="hass-enforce-class-module", + line=11, + node=root_node.body[3], + args=("CoordinatorEntity", "entity"), + confidence=UNDEFINED, + col_offset=0, + end_line=11, + end_col_offset=35, + ), + ): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "homeassistant.components.pylint_test", + "homeassistant.components.pylint_test.my_coordinator", + "homeassistant.components.pylint_test.coordinator_other", + "homeassistant.components.pylint_test.sensor", + ], +) +def test_enforce_class_module_bad_nested( + linter: UnittestLinter, + enforce_class_module_checker: BaseChecker, + path: str, +) -> None: + """Bad test case with nested coordinators.""" + root_node = astroid.parse( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + + class NopeCoordinator(TestCoordinator): + pass + """, + path, + ) + walker = ASTWalker(linter) + walker.add_checker(enforce_class_module_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-enforce-class-module", + line=5, + node=root_node.body[1], + args=("DataUpdateCoordinator", "coordinator"), + confidence=UNDEFINED, + col_offset=0, + end_line=5, + end_col_offset=21, + ), + MessageTest( + msg_id="hass-enforce-class-module", + line=8, + node=root_node.body[2], + args=("DataUpdateCoordinator", "coordinator"), + confidence=UNDEFINED, + col_offset=0, + end_line=8, + end_col_offset=21, + ), + ): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "homeassistant.components.sensor", + "homeassistant.components.sensor.entity", + "homeassistant.components.pylint_test.entity", + ], +) +def test_enforce_entity_good( + linter: UnittestLinter, + enforce_class_module_checker: BaseChecker, + path: str, +) -> None: + """Good test cases.""" + code = """ + class Entity: + pass + + class CustomEntity(Entity): + pass + """ + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(enforce_class_module_checker) + + with assert_no_messages(linter): + walker.walk(root_node) + + +@pytest.mark.parametrize( + "path", + [ + "homeassistant.components.pylint_test", + "homeassistant.components.pylint_test.select", + "homeassistant.components.pylint_test.select.entity", + ], +) +def test_enforce_entity_bad( + linter: UnittestLinter, + enforce_class_module_checker: BaseChecker, + path: str, +) -> None: + """Good test cases.""" + code = """ + class Entity: + pass + + class CustomEntity(Entity): + pass + """ + root_node = astroid.parse(code, path) + walker = ASTWalker(linter) + walker.add_checker(enforce_class_module_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-enforce-class-module", + line=5, + node=root_node.body[1], + args=("Entity", "entity"), + confidence=UNDEFINED, + col_offset=0, + end_line=5, + end_col_offset=18, + ), + ): + walker.walk(root_node) diff --git a/tests/pylint/test_enforce_coordinator_module.py b/tests/pylint/test_enforce_coordinator_module.py deleted file mode 100644 index 90d88246974..00000000000 --- a/tests/pylint/test_enforce_coordinator_module.py +++ /dev/null @@ -1,134 +0,0 @@ -"""Tests for pylint hass_enforce_coordinator_module plugin.""" - -from __future__ import annotations - -import astroid -from pylint.checkers import BaseChecker -from pylint.interfaces import UNDEFINED -from pylint.testutils import MessageTest -from pylint.testutils.unittest_linter import UnittestLinter -from pylint.utils.ast_walker import ASTWalker -import pytest - -from . import assert_adds_messages, assert_no_messages - - -@pytest.mark.parametrize( - "code", - [ - pytest.param( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - """, - id="simple", - ), - pytest.param( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - - class TestCoordinator2(TestCoordinator): - pass - """, - id="nested", - ), - ], -) -def test_enforce_coordinator_module_good( - linter: UnittestLinter, enforce_coordinator_module_checker: BaseChecker, code: str -) -> None: - """Good test cases.""" - root_node = astroid.parse(code, "homeassistant.components.pylint_test.coordinator") - walker = ASTWalker(linter) - walker.add_checker(enforce_coordinator_module_checker) - - with assert_no_messages(linter): - walker.walk(root_node) - - -def test_enforce_coordinator_module_bad_simple( - linter: UnittestLinter, - enforce_coordinator_module_checker: BaseChecker, -) -> None: - """Bad test case with coordinator extending directly.""" - root_node = astroid.parse( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - """, - "homeassistant.components.pylint_test", - ) - walker = ASTWalker(linter) - walker.add_checker(enforce_coordinator_module_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-enforce-coordinator-module", - line=5, - node=root_node.body[1], - args=None, - confidence=UNDEFINED, - col_offset=0, - end_line=5, - end_col_offset=21, - ), - ): - walker.walk(root_node) - - -def test_enforce_coordinator_module_bad_nested( - linter: UnittestLinter, - enforce_coordinator_module_checker: BaseChecker, -) -> None: - """Bad test case with nested coordinators.""" - root_node = astroid.parse( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - - class NopeCoordinator(TestCoordinator): - pass - """, - "homeassistant.components.pylint_test", - ) - walker = ASTWalker(linter) - walker.add_checker(enforce_coordinator_module_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-enforce-coordinator-module", - line=5, - node=root_node.body[1], - args=None, - confidence=UNDEFINED, - col_offset=0, - end_line=5, - end_col_offset=21, - ), - MessageTest( - msg_id="hass-enforce-coordinator-module", - line=8, - node=root_node.body[2], - args=None, - confidence=UNDEFINED, - col_offset=0, - end_line=8, - end_col_offset=21, - ), - ): - walker.walk(root_node) diff --git a/tests/pylint/test_enforce_type_hints.py b/tests/pylint/test_enforce_type_hints.py index b1692d1d60d..6c53e9832d9 100644 --- a/tests/pylint/test_enforce_type_hints.py +++ b/tests/pylint/test_enforce_type_hints.py @@ -313,7 +313,9 @@ def test_invalid_config_flow_step( linter: UnittestLinter, type_hint_checker: BaseChecker ) -> None: """Ensure invalid hints are rejected for ConfigFlow step.""" - class_node, func_node, arg_node = astroid.extract_node( + type_hint_checker.linter.config.ignore_missing_annotations = True + + class_node, func_node, arg_node, func_node2 = astroid.extract_node( """ class FlowHandler(): pass @@ -329,6 +331,12 @@ def test_invalid_config_flow_step( device_config: dict #@ ): pass + + async def async_step_custom( #@ + self, + user_input + ): + pass """, "homeassistant.components.pylint_test.config_flow", ) @@ -354,6 +362,15 @@ def test_invalid_config_flow_step( end_line=11, end_col_offset=33, ), + pylint.testutils.MessageTest( + msg_id="hass-return-type", + node=func_node2, + args=("ConfigFlowResult", "async_step_custom"), + line=17, + col_offset=4, + end_line=17, + end_col_offset=31, + ), ): type_hint_checker.visit_classdef(class_node) diff --git a/tests/pylint/test_imports.py b/tests/pylint/test_imports.py index e53b8206848..5044e73d253 100644 --- a/tests/pylint/test_imports.py +++ b/tests/pylint/test_imports.py @@ -208,6 +208,10 @@ def test_good_root_import( "from homeassistant.components.climate.const import ClimateEntityFeature", "homeassistant.components.pylint_test.climate", ), + ( + "from homeassistant.components.climate.entity import ClimateEntityFeature", + "homeassistant.components.pylint_test.climate", + ), ( "from homeassistant.components.climate import const", "tests.components.pylint_test.climate", @@ -220,6 +224,10 @@ def test_good_root_import( "import homeassistant.components.climate.const as climate", "tests.components.pylint_test.climate", ), + ( + "import homeassistant.components.climate.entity as climate", + "tests.components.pylint_test.climate", + ), ], ) def test_bad_root_import( @@ -309,3 +317,54 @@ def test_bad_namespace_import( ), ): imports_checker.visit_importfrom(node) + + +@pytest.mark.parametrize( + ("module_name", "import_string", "end_col_offset"), + [ + ( + "homeassistant.components.pylint_test.sensor", + "from homeassistant.components.other import DOMAIN as OTHER_DOMAIN", + -1, + ), + ( + "homeassistant.components.pylint_test.sensor", + "from homeassistant.components.other import DOMAIN", + 49, + ), + ], +) +def test_domain_alias( + linter: UnittestLinter, + imports_checker: BaseChecker, + module_name: str, + import_string: str, + end_col_offset: int, +) -> None: + """Ensure good imports pass through ok.""" + + import_node = astroid.extract_node( + f"{import_string} #@", + module_name, + ) + imports_checker.visit_module(import_node.parent) + + expected_messages = [] + if end_col_offset > 0: + expected_messages.append( + pylint.testutils.MessageTest( + msg_id="hass-import-constant-alias", + node=import_node, + args=("DOMAIN", "DOMAIN", "OTHER_DOMAIN"), + line=1, + col_offset=0, + end_line=1, + end_col_offset=end_col_offset, + ) + ) + + with assert_adds_messages(linter, *expected_messages): + if import_string.startswith("import"): + imports_checker.visit_import(import_node) + else: + imports_checker.visit_importfrom(import_node) diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py index 793b3de63c5..519a5c21855 100644 --- a/tests/script/test_gen_requirements_all.py +++ b/tests/script/test_gen_requirements_all.py @@ -1,5 +1,7 @@ """Tests for the gen_requirements_all script.""" +from unittest.mock import patch + from script import gen_requirements_all @@ -23,3 +25,27 @@ def test_include_overrides_subsets() -> None: for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): for req in overrides["include"]: assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL + + +def test_requirement_override_markers() -> None: + """Test override markers are applied to the correct requirements.""" + data = { + "pytest": { + "exclude": set(), + "include": set(), + "markers": {"env-canada": "python_version<'3.13'"}, + } + } + with patch.dict( + gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS, data, clear=True + ): + assert ( + gen_requirements_all.process_action_requirement( + "env-canada==0.7.2", "pytest" + ) + == "env-canada==0.7.2;python_version<'3.13'" + ) + assert ( + gen_requirements_all.process_action_requirement("other==1.0", "pytest") + == "other==1.0" + ) diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index 136749dfb14..08b532677f4 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -5,6 +5,8 @@ 'data': dict({ }), 'disabled_by': None, + 'discovery_keys': dict({ + }), 'domain': 'test', 'entry_id': 'mock-entry', 'minor_version': 1, @@ -14,8 +16,90 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, }) # --- +# name: test_unique_id_collision_issues + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.11.0', + 'created': , + 'data': dict({ + 'issue_type': 'config_entry_unique_id_collision', + 'unique_id': 'group_1', + }), + 'dismissed_version': None, + 'domain': 'homeassistant', + 'is_fixable': False, + 'is_persistent': False, + 'issue_domain': 'test2', + 'issue_id': 'config_entry_unique_id_collision_test2_group_1', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'config_entry_unique_id_collision', + 'translation_placeholders': dict({ + 'configure_url': '/config/integrations/integration/test2', + 'domain': 'test2', + 'titles': "'Mock Title', 'Mock Title', 'Mock Title'", + 'unique_id': 'group_1', + }), + }) +# --- +# name: test_unique_id_collision_issues.1 + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.11.0', + 'created': , + 'data': dict({ + 'issue_type': 'config_entry_unique_id_collision', + 'unique_id': 'not_unique', + }), + 'dismissed_version': None, + 'domain': 'homeassistant', + 'is_fixable': False, + 'is_persistent': False, + 'issue_domain': 'test3', + 'issue_id': 'config_entry_unique_id_collision_test3_not_unique', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'config_entry_unique_id_collision_many', + 'translation_placeholders': dict({ + 'configure_url': '/config/integrations/integration/test3', + 'domain': 'test3', + 'number_of_entries': '6', + 'title_limit': '5', + 'titles': "'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title'", + 'unique_id': 'not_unique', + }), + }) +# --- +# name: test_unique_id_collision_issues.2 + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.11.0', + 'created': , + 'data': dict({ + 'issue_type': 'config_entry_unique_id_collision', + 'unique_id': 'not_unique', + }), + 'dismissed_version': None, + 'domain': 'homeassistant', + 'is_fixable': False, + 'is_persistent': False, + 'issue_domain': 'test3', + 'issue_id': 'config_entry_unique_id_collision_test3_not_unique', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'config_entry_unique_id_collision', + 'translation_placeholders': dict({ + 'configure_url': '/config/integrations/integration/test3', + 'domain': 'test3', + 'titles': "'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title'", + 'unique_id': 'not_unique', + }), + }) +# --- diff --git a/tests/syrupy.py b/tests/syrupy.py index 0bdbcf99e2b..a3b3f763063 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -5,14 +5,22 @@ from __future__ import annotations from contextlib import suppress import dataclasses from enum import IntFlag +import json +import os from pathlib import Path from typing import Any import attr import attrs +import pytest +from syrupy.constants import EXIT_STATUS_FAIL_UNUSED +from syrupy.data import Snapshot, SnapshotCollection, SnapshotCollections from syrupy.extensions.amber import AmberDataSerializer, AmberSnapshotExtension from syrupy.location import PyTestLocation +from syrupy.report import SnapshotReport +from syrupy.session import ItemStatus, SnapshotSession from syrupy.types import PropertyFilter, PropertyMatcher, PropertyPath, SerializableData +from syrupy.utils import is_xdist_controller, is_xdist_worker import voluptuous as vol import voluptuous_serialize @@ -132,6 +140,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): """Prepare a Home Assistant area registry entry for serialization.""" serialized = AreaRegistryEntrySnapshot(dataclasses.asdict(data) | {"id": ANY}) serialized.pop("_json_repr") + serialized.pop("_cache") return serialized @classmethod @@ -156,6 +165,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serialized["via_device_id"] = ANY if serialized["primary_config_entry"] is not None: serialized["primary_config_entry"] = ANY + serialized.pop("_cache") return cls._remove_created_and_modified_at(serialized) @classmethod @@ -182,6 +192,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): } ) serialized.pop("categories") + serialized.pop("_cache") return cls._remove_created_and_modified_at(serialized) @classmethod @@ -194,7 +205,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): cls, data: ir.IssueEntry ) -> SerializableData: """Prepare a Home Assistant issue registry entry for serialization.""" - return IssueRegistryItemSnapshot(data.to_json() | {"created": ANY}) + return IssueRegistryItemSnapshot(dataclasses.asdict(data) | {"created": ANY}) @classmethod def _serializable_state(cls, data: State) -> SerializableData: @@ -243,3 +254,164 @@ class HomeAssistantSnapshotExtension(AmberSnapshotExtension): """ test_dir = Path(test_location.filepath).parent return str(test_dir.joinpath("snapshots")) + + +# Classes and Methods to override default finish behavior in syrupy +# This is needed to handle the xdist plugin in pytest +# The default implementation does not handle the xdist plugin +# and will not work correctly when running tests in parallel +# with pytest-xdist. +# Temporary workaround until it is finalised inside syrupy +# See https://github.com/syrupy-project/syrupy/pull/901 + + +class _FakePytestObject: + """Fake object.""" + + def __init__(self, collected_item: dict[str, str]) -> None: + """Initialise fake object.""" + self.__module__ = collected_item["modulename"] + self.__name__ = collected_item["methodname"] + + +class _FakePytestItem: + """Fake pytest.Item object.""" + + def __init__(self, collected_item: dict[str, str]) -> None: + """Initialise fake pytest.Item object.""" + self.nodeid = collected_item["nodeid"] + self.name = collected_item["name"] + self.path = Path(collected_item["path"]) + self.obj = _FakePytestObject(collected_item) + + +def _serialize_collections(collections: SnapshotCollections) -> dict[str, Any]: + return { + k: [c.name for c in v] for k, v in collections._snapshot_collections.items() + } + + +def _serialize_report( + report: SnapshotReport, + collected_items: set[pytest.Item], + selected_items: dict[str, ItemStatus], +) -> dict[str, Any]: + return { + "discovered": _serialize_collections(report.discovered), + "created": _serialize_collections(report.created), + "failed": _serialize_collections(report.failed), + "matched": _serialize_collections(report.matched), + "updated": _serialize_collections(report.updated), + "used": _serialize_collections(report.used), + "_collected_items": [ + { + "nodeid": c.nodeid, + "name": c.name, + "path": str(c.path), + "modulename": c.obj.__module__, + "methodname": c.obj.__name__, + } + for c in list(collected_items) + ], + "_selected_items": { + key: status.value for key, status in selected_items.items() + }, + } + + +def _merge_serialized_collections( + collections: SnapshotCollections, json_data: dict[str, list[str]] +) -> None: + if not json_data: + return + for location, names in json_data.items(): + snapshot_collection = SnapshotCollection(location=location) + for name in names: + snapshot_collection.add(Snapshot(name)) + collections.update(snapshot_collection) + + +def _merge_serialized_report(report: SnapshotReport, json_data: dict[str, Any]) -> None: + _merge_serialized_collections(report.discovered, json_data["discovered"]) + _merge_serialized_collections(report.created, json_data["created"]) + _merge_serialized_collections(report.failed, json_data["failed"]) + _merge_serialized_collections(report.matched, json_data["matched"]) + _merge_serialized_collections(report.updated, json_data["updated"]) + _merge_serialized_collections(report.used, json_data["used"]) + for collected_item in json_data["_collected_items"]: + custom_item = _FakePytestItem(collected_item) + if not any( + t.nodeid == custom_item.nodeid and t.name == custom_item.nodeid + for t in report.collected_items + ): + report.collected_items.add(custom_item) + for key, selected_item in json_data["_selected_items"].items(): + if key in report.selected_items: + status = ItemStatus(selected_item) + if status != ItemStatus.NOT_RUN: + report.selected_items[key] = status + else: + report.selected_items[key] = ItemStatus(selected_item) + + +def override_syrupy_finish(self: SnapshotSession) -> int: + """Override the finish method to allow for custom handling.""" + exitstatus = 0 + self.flush_snapshot_write_queue() + self.report = SnapshotReport( + base_dir=self.pytest_session.config.rootpath, + collected_items=self._collected_items, + selected_items=self._selected_items, + assertions=self._assertions, + options=self.pytest_session.config.option, + ) + + needs_xdist_merge = self.update_snapshots or bool( + self.pytest_session.config.option.include_snapshot_details + ) + + if is_xdist_worker(): + if not needs_xdist_merge: + return exitstatus + with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f: + f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT")) + with open( + f".pytest_syrupy_{os.getenv("PYTEST_XDIST_WORKER")}_result", + "w", + encoding="utf-8", + ) as f: + json.dump( + _serialize_report( + self.report, self._collected_items, self._selected_items + ), + f, + indent=2, + ) + return exitstatus + if is_xdist_controller(): + return exitstatus + + if needs_xdist_merge: + worker_count = None + try: + with open(".pytest_syrupy_worker_count", encoding="utf-8") as f: + worker_count = f.read() + os.remove(".pytest_syrupy_worker_count") + except FileNotFoundError: + pass + + if worker_count: + for i in range(int(worker_count)): + with open(f".pytest_syrupy_gw{i}_result", encoding="utf-8") as f: + _merge_serialized_report(self.report, json.load(f)) + os.remove(f".pytest_syrupy_gw{i}_result") + + if self.report.num_unused: + if self.update_snapshots: + self.remove_unused_snapshots( + unused_snapshot_collections=self.report.unused, + used_snapshot_collections=self.report.used, + ) + elif not self.warn_unused_snapshots: + exitstatus |= EXIT_STATUS_FAIL_UNUSED + return exitstatus diff --git a/tests/test_backports.py b/tests/test_backports.py index 4df0a9e3f57..af485abbc36 100644 --- a/tests/test_backports.py +++ b/tests/test_backports.py @@ -3,7 +3,7 @@ from __future__ import annotations from enum import StrEnum -from functools import cached_property +from functools import cached_property # pylint: disable=hass-deprecated-import from types import ModuleType from typing import Any diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py new file mode 100644 index 00000000000..bce5eca4292 --- /dev/null +++ b/tests/test_backup_restore.py @@ -0,0 +1,379 @@ +"""Test methods in backup_restore.""" + +from pathlib import Path +import tarfile +from unittest import mock + +import pytest + +from homeassistant import backup_restore + +from .common import get_test_config_dir + + +@pytest.mark.parametrize( + ("side_effect", "content", "expected"), + [ + (FileNotFoundError, "", None), + (None, "", None), + ( + None, + '{"path": "test"}', + None, + ), + ( + None, + '{"path": "test", "password": "psw", "remove_after_restore": false, "restore_database": false, "restore_homeassistant": true}', + backup_restore.RestoreBackupFileContent( + backup_file_path=Path("test"), + password="psw", + remove_after_restore=False, + restore_database=False, + restore_homeassistant=True, + ), + ), + ( + None, + '{"path": "test", "password": null, "remove_after_restore": true, "restore_database": true, "restore_homeassistant": false}', + backup_restore.RestoreBackupFileContent( + backup_file_path=Path("test"), + password=None, + remove_after_restore=True, + restore_database=True, + restore_homeassistant=False, + ), + ), + ], +) +def test_reading_the_instruction_contents( + side_effect: Exception | None, + content: str, + expected: backup_restore.RestoreBackupFileContent | None, +) -> None: + """Test reading the content of the .HA_RESTORE file.""" + with ( + mock.patch( + "pathlib.Path.read_text", + return_value=content, + side_effect=side_effect, + ), + ): + read_content = backup_restore.restore_backup_file_content( + Path(get_test_config_dir()) + ) + assert read_content == expected + + +def test_restoring_backup_that_does_not_exist() -> None: + """Test restoring a backup that does not exist.""" + backup_file_path = Path(get_test_config_dir("backups", "test")) + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch("pathlib.Path.read_text", side_effect=FileNotFoundError), + pytest.raises( + ValueError, match=f"Backup file {backup_file_path} does not exist" + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_restoring_backup_when_instructions_can_not_be_read() -> None: + """Test restoring a backup when instructions can not be read.""" + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=None, + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_restoring_backup_that_is_not_a_file() -> None: + """Test restoring a backup that is not a file.""" + backup_file_path = Path(get_test_config_dir("backups", "test")) + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch("pathlib.Path.exists", return_value=True), + mock.patch("pathlib.Path.is_file", return_value=False), + pytest.raises( + ValueError, match=f"Backup file {backup_file_path} does not exist" + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_aborting_for_older_versions() -> None: + """Test that we abort for older versions.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "9999.99.99"}, "compressed": false}' + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch("securetar.SecureTarFile"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), + pytest.raises( + ValueError, + match="You need at least Home Assistant version 9999.99.99 to restore this backup", + ), + ): + assert backup_restore.restore_backup(config_dir) is True + + +@pytest.mark.parametrize( + ( + "restore_backup_content", + "expected_removed_files", + "expected_removed_directories", + "expected_copied_files", + "expected_copied_trees", + ), + [ + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ( + ".HA_RESTORE", + ".HA_VERSION", + "home-assistant_v2.db", + "home-assistant_v2.db-wal", + ), + ("tmp_backups", "www"), + (), + ("data",), + ), + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + restore_database=False, + remove_after_restore=False, + restore_homeassistant=True, + ), + (".HA_RESTORE", ".HA_VERSION"), + ("tmp_backups", "www"), + (), + ("data",), + ), + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + restore_database=True, + remove_after_restore=False, + restore_homeassistant=False, + ), + ("home-assistant_v2.db", "home-assistant_v2.db-wal"), + (), + ("home-assistant_v2.db", "home-assistant_v2.db-wal"), + (), + ), + ], +) +def test_removal_of_current_configuration_when_restoring( + restore_backup_content: backup_restore.RestoreBackupFileContent, + expected_removed_files: tuple[str, ...], + expected_removed_directories: tuple[str, ...], + expected_copied_files: tuple[str, ...], + expected_copied_trees: tuple[str, ...], +) -> None: + """Test that we are removing the current configuration directory.""" + config_dir = Path(get_test_config_dir()) + restore_backup_content.backup_file_path = Path(config_dir, "backups", "test.tar") + mock_config_dir = [ + {"path": Path(config_dir, ".HA_RESTORE"), "is_file": True}, + {"path": Path(config_dir, ".HA_VERSION"), "is_file": True}, + {"path": Path(config_dir, "home-assistant_v2.db"), "is_file": True}, + {"path": Path(config_dir, "home-assistant_v2.db-wal"), "is_file": True}, + {"path": Path(config_dir, "backups"), "is_file": False}, + {"path": Path(config_dir, "tmp_backups"), "is_file": False}, + {"path": Path(config_dir, "www"), "is_file": False}, + ] + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' + + def _patched_path_is_file(path: Path, **kwargs): + return [x for x in mock_config_dir if x["path"] == path][0]["is_file"] + + def _patched_path_is_dir(path: Path, **kwargs): + return not [x for x in mock_config_dir if x["path"] == path][0]["is_file"] + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=restore_backup_content, + ), + mock.patch("securetar.SecureTarFile"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory") as temp_dir_mock, + mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("pathlib.Path.is_file", _patched_path_is_file), + mock.patch("pathlib.Path.is_dir", _patched_path_is_dir), + mock.patch( + "pathlib.Path.iterdir", + return_value=[x["path"] for x in mock_config_dir], + ), + mock.patch("pathlib.Path.unlink", autospec=True) as unlink_mock, + mock.patch("shutil.copy") as copy_mock, + mock.patch("shutil.copytree") as copytree_mock, + mock.patch("shutil.rmtree") as rmtree_mock, + ): + temp_dir_mock.return_value.__enter__.return_value = "tmp" + + assert backup_restore.restore_backup(config_dir) is True + + tmp_ha = Path("tmp", "homeassistant") + assert copy_mock.call_count == len(expected_copied_files) + copied_files = {Path(call.args[0]) for call in copy_mock.mock_calls} + assert copied_files == {Path(tmp_ha, "data", f) for f in expected_copied_files} + + assert copytree_mock.call_count == len(expected_copied_trees) + copied_trees = {Path(call.args[0]) for call in copytree_mock.mock_calls} + assert copied_trees == {Path(tmp_ha, t) for t in expected_copied_trees} + + assert unlink_mock.call_count == len(expected_removed_files) + removed_files = {Path(call.args[0]) for call in unlink_mock.mock_calls} + assert removed_files == {Path(config_dir, f) for f in expected_removed_files} + + assert rmtree_mock.call_count == len(expected_removed_directories) + removed_directories = {Path(call.args[0]) for call in rmtree_mock.mock_calls} + assert removed_directories == { + Path(config_dir, d) for d in expected_removed_directories + } + + +def test_extracting_the_contents_of_a_backup_file() -> None: + """Test extracting the contents of a backup file.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' + + getmembers_mock = mock.MagicMock( + return_value=[ + tarfile.TarInfo(name="../data/test"), + tarfile.TarInfo(name="data"), + tarfile.TarInfo(name="data/.HA_VERSION"), + tarfile.TarInfo(name="data/.storage"), + tarfile.TarInfo(name="data/www"), + ] + ) + extractall_mock = mock.MagicMock() + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch( + "tarfile.open", + return_value=mock.MagicMock( + getmembers=getmembers_mock, + extractall=extractall_mock, + __iter__=lambda x: iter(getmembers_mock.return_value), + ), + ), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("pathlib.Path.is_file", return_value=False), + mock.patch("pathlib.Path.iterdir", return_value=[]), + mock.patch("shutil.copytree"), + ): + assert backup_restore.restore_backup(config_dir) is True + assert extractall_mock.call_count == 2 + + assert { + member.name for member in extractall_mock.mock_calls[-1].kwargs["members"] + } == {"data", "data/.HA_VERSION", "data/.storage", "data/www"} + + +@pytest.mark.parametrize( + ("remove_after_restore", "unlink_calls"), [(True, 1), (False, 0)] +) +def test_remove_backup_file_after_restore( + remove_after_restore: bool, unlink_calls: int +) -> None: + """Test removing a backup file after restore.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=remove_after_restore, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch("homeassistant.backup_restore._extract_backup"), + mock.patch("pathlib.Path.unlink", autospec=True) as mock_unlink, + ): + assert backup_restore.restore_backup(config_dir) is True + assert mock_unlink.call_count == unlink_calls + for call in mock_unlink.mock_calls: + assert call.args[0] == backup_file_path + + +@pytest.mark.parametrize( + ("password", "expected"), + [ + ("test", b"\xf0\x9b\xb9\x1f\xdc,\xff\xd5x\xd6\xd6\x8fz\x19.\x0f"), + ("lorem ipsum...", b"#\xe0\xfc\xe0\xdb?_\x1f,$\rQ\xf4\xf5\xd8\xfb"), + ], +) +def test_pw_to_key(password: str | None, expected: bytes | None) -> None: + """Test password to key conversion.""" + assert backup_restore.password_to_key(password) == expected + + +def test_pw_to_key_none() -> None: + """Test password to key conversion.""" + with pytest.raises(AttributeError): + backup_restore.password_to_key(None) diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index dc2b096f595..dd23d4e9709 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -429,6 +429,12 @@ async def test_protect_loop_load_verify_locations( context.load_verify_locations("/dev/null") assert "Detected blocking call to load_verify_locations" in caplog.text + # ignore with only cadata + caplog.clear() + with pytest.raises(ssl.SSLError): + context.load_verify_locations(cadata="xxx") + assert "Detected blocking call to load_verify_locations" not in caplog.text + async def test_protect_loop_load_cert_chain( hass: HomeAssistant, caplog: pytest.LogCaptureFixture diff --git a/tests/test_config.py b/tests/test_config.py index 02f8e1fc078..c8c5b081119 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -4,62 +4,32 @@ import asyncio from collections import OrderedDict from collections.abc import Generator import contextlib -import copy import logging import os from pathlib import Path -from typing import Any from unittest import mock from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol -from voluptuous import Invalid, MultipleInvalid import yaml from homeassistant import loader import homeassistant.config as config_util -from homeassistant.const import ( - ATTR_ASSUMED_STATE, - ATTR_FRIENDLY_NAME, - CONF_AUTH_MFA_MODULES, - CONF_AUTH_PROVIDERS, - CONF_CUSTOMIZE, - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - CONF_PACKAGES, - __version__, -) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - ConfigSource, - HomeAssistant, - State, -) +from homeassistant.const import CONF_PACKAGES, __version__ +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigValidationError, HomeAssistantError -from homeassistant.helpers import ( - check_config, - config_validation as cv, - issue_registry as ir, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers import check_config, config_validation as cv from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration, async_get_integration from homeassistant.setup import async_setup_component -from homeassistant.util.unit_system import ( - METRIC_SYSTEM, - US_CUSTOMARY_SYSTEM, - UnitSystem, -) from homeassistant.util.yaml import SECRET_YAML from homeassistant.util.yaml.objects import NodeDictClass from .common import ( MockModule, MockPlatform, - MockUser, get_test_config_dir, mock_integration, mock_platform, @@ -509,104 +479,6 @@ async def test_create_default_config_returns_none_if_write_error( assert mock_print.called -def test_core_config_schema() -> None: - """Test core config schema.""" - for value in ( - {"unit_system": "K"}, - {"time_zone": "non-exist"}, - {"latitude": "91"}, - {"longitude": -181}, - {"external_url": "not an url"}, - {"internal_url": "not an url"}, - {"currency", 100}, - {"customize": "bla"}, - {"customize": {"light.sensor": 100}}, - {"customize": {"entity_id": []}}, - {"country": "xx"}, - {"language": "xx"}, - {"radius": -10}, - ): - with pytest.raises(MultipleInvalid): - config_util.CORE_CONFIG_SCHEMA(value) - - config_util.CORE_CONFIG_SCHEMA( - { - "name": "Test name", - "latitude": "-23.45", - "longitude": "123.45", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "unit_system": "metric", - "currency": "USD", - "customize": {"sensor.temperature": {"hidden": True}}, - "country": "SE", - "language": "sv", - "radius": "10", - } - ) - - -def test_core_config_schema_internal_external_warning( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that we warn for internal/external URL with path.""" - config_util.CORE_CONFIG_SCHEMA( - { - "external_url": "https://www.example.com/bla", - "internal_url": "http://example.local/yo", - } - ) - - assert "Invalid external_url set" in caplog.text - assert "Invalid internal_url set" in caplog.text - - -def test_customize_dict_schema() -> None: - """Test basic customize config validation.""" - values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) - - for val in values: - with pytest.raises(MultipleInvalid): - config_util.CUSTOMIZE_DICT_SCHEMA(val) - - assert config_util.CUSTOMIZE_DICT_SCHEMA( - {ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"} - ) == {ATTR_FRIENDLY_NAME: "2", ATTR_ASSUMED_STATE: False} - - -def test_customize_glob_is_ordered() -> None: - """Test that customize_glob preserves order.""" - conf = config_util.CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) - assert isinstance(conf["customize_glob"], OrderedDict) - - -async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: - await config_util.async_process_ha_core_config(hass, config) - - entity = Entity() - entity.entity_id = "test.test" - entity.hass = hass - entity.schedule_update_ha_state() - - await hass.async_block_till_done() - - return hass.states.get("test.test") - - -async def test_entity_customization(hass: HomeAssistant) -> None: - """Test entity customization through configuration.""" - config = { - CONF_LATITUDE: 50, - CONF_LONGITUDE: 50, - CONF_NAME: "Test", - CONF_CUSTOMIZE: {"test.test": {"hidden": True}}, - } - - state = await _compute_state(hass, config) - - assert state.attributes["hidden"] - - @patch("homeassistant.config.shutil") @patch("homeassistant.config.os") @patch("homeassistant.config.is_docker_env", return_value=False) @@ -696,361 +568,6 @@ def test_config_upgrade_no_file(hass: HomeAssistant) -> None: assert opened_file.write.call_args == mock.call(__version__) -async def test_loading_configuration_from_storage( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - "key": "core.config", - "version": 1, - "minor_version": 4, - } - await config_util.async_process_ha_core_config( - hass, {"allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 55 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert hass.config.external_url == "https://www.example.com" - assert hass.config.internal_url == "http://example.local" - assert hass.config.currency == "EUR" - assert hass.config.country == "SE" - assert hass.config.language == "sv" - assert hass.config.radius == 150 - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.config_source is ConfigSource.STORAGE - - -async def test_loading_configuration_from_storage_with_yaml_only( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core and YAML config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - }, - "key": "core.config", - "version": 1, - } - await config_util.async_process_ha_core_config( - hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 55 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"mymedia": "/usr"} - assert hass.config.config_source is ConfigSource.STORAGE - - -async def test_migration_and_updating_configuration( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test updating configuration stores the new configuration.""" - core_data = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "imperial", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "currency": "BTC", - }, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await config_util.async_process_ha_core_config( - hass, {"allowlist_external_dirs": "/etc"} - ) - await hass.config.async_update(latitude=50, currency="USD") - - expected_new_core_data = copy.deepcopy(core_data) - # From async_update above - expected_new_core_data["data"]["latitude"] = 50 - expected_new_core_data["data"]["currency"] = "USD" - # 1.1 -> 1.2 store migration with migrated unit system - expected_new_core_data["data"]["unit_system_v2"] = "us_customary" - # 1.1 -> 1.3 defaults for country and language - expected_new_core_data["data"]["country"] = None - expected_new_core_data["data"]["language"] = "en" - # 1.1 -> 1.4 defaults for zone radius - expected_new_core_data["data"]["radius"] = 100 - # Bumped minor version - expected_new_core_data["minor_version"] = 4 - assert hass_storage["core.config"] == expected_new_core_data - assert hass.config.latitude == 50 - assert hass.config.currency == "USD" - assert hass.config.country is None - assert hass.config.language == "en" - assert hass.config.radius == 100 - - -async def test_override_stored_configuration( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core and YAML config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - }, - "key": "core.config", - "version": 1, - } - await config_util.async_process_ha_core_config( - hass, {"latitude": 60, "allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 60 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.config_source is ConfigSource.YAML - - -async def test_loading_configuration(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "America/New_York", - "allowlist_external_dirs": "/etc", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "media_dirs": {"mymedia": "/usr"}, - "debug": True, - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - ) - - assert hass.config.latitude == 60 - assert hass.config.longitude == 50 - assert hass.config.elevation == 25 - assert hass.config.location_name == "Huis" - assert hass.config.units is US_CUSTOMARY_SYSTEM - assert hass.config.time_zone == "America/New_York" - assert hass.config.external_url == "https://www.example.com" - assert hass.config.internal_url == "http://example.local" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert "/usr" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"mymedia": "/usr"} - assert hass.config.config_source is ConfigSource.YAML - assert hass.config.debug is True - assert hass.config.currency == "EUR" - assert hass.config.country == "SE" - assert hass.config.language == "sv" - assert hass.config.radius == 150 - - -@pytest.mark.parametrize( - ("minor_version", "users", "user_data", "default_language"), - [ - (2, (), {}, "en"), - (2, ({"is_owner": True},), {}, "en"), - ( - 2, - ({"id": "user1", "is_owner": True},), - {"user1": {"language": {"language": "sv"}}}, - "sv", - ), - ( - 2, - ({"id": "user1", "is_owner": False},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - (3, (), {}, "en"), - (3, ({"is_owner": True},), {}, "en"), - ( - 3, - ({"id": "user1", "is_owner": True},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - ( - 3, - ({"id": "user1", "is_owner": False},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - ], -) -async def test_language_default( - hass: HomeAssistant, - hass_storage: dict[str, Any], - minor_version, - users, - user_data, - default_language, -) -> None: - """Test language config default to owner user's language during migration. - - This should only happen if the core store version < 1.3 - """ - core_data = { - "data": {}, - "key": "core.config", - "version": 1, - "minor_version": minor_version, - } - hass_storage["core.config"] = dict(core_data) - - for user_config in users: - user = MockUser(**user_config).add_to_hass(hass) - if user.id not in user_data: - continue - storage_key = f"frontend.user_data_{user.id}" - hass_storage[storage_key] = { - "key": storage_key, - "version": 1, - "data": user_data[user.id], - } - - await config_util.async_process_ha_core_config( - hass, - {}, - ) - assert hass.config.language == default_language - - -async def test_loading_configuration_default_media_dirs_docker( - hass: HomeAssistant, -) -> None: - """Test loading core config onto hass object.""" - with patch("homeassistant.config.is_docker_env", return_value=True): - await config_util.async_process_ha_core_config( - hass, - { - "name": "Huis", - }, - ) - - assert hass.config.location_name == "Huis" - assert len(hass.config.allowlist_external_dirs) == 2 - assert "/media" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"local": "/media"} - - -async def test_loading_configuration_from_packages(hass: HomeAssistant) -> None: - """Test loading packages config onto hass object config.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 39, - "longitude": -1, - "elevation": 500, - "name": "Huis", - "unit_system": "metric", - "time_zone": "Europe/Madrid", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "packages": { - "package_1": {"wake_on_lan": None}, - "package_2": { - "light": {"platform": "hue"}, - "media_extractor": None, - "sun": None, - }, - }, - }, - ) - - # Empty packages not allowed - with pytest.raises(MultipleInvalid): - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 39, - "longitude": -1, - "elevation": 500, - "name": "Huis", - "unit_system": "metric", - "time_zone": "Europe/Madrid", - "packages": {"empty_package": None}, - }, - ) - - -@pytest.mark.parametrize( - ("unit_system_name", "expected_unit_system"), - [ - ("metric", METRIC_SYSTEM), - ("imperial", US_CUSTOMARY_SYSTEM), - ("us_customary", US_CUSTOMARY_SYSTEM), - ], -) -async def test_loading_configuration_unit_system( - hass: HomeAssistant, unit_system_name: str, expected_unit_system: UnitSystem -) -> None: - """Test backward compatibility when loading core config.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": unit_system_name, - "time_zone": "America/New_York", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - }, - ) - - assert hass.config.units is expected_unit_system - - @patch("homeassistant.helpers.check_config.async_check_ha_config_file") async def test_check_ha_config_file_correct(mock_check, hass: HomeAssistant) -> None: """Check that restart propagates to stop.""" @@ -1302,148 +819,6 @@ async def test_merge_duplicate_keys( assert len(config["input_select"]) == 1 -async def test_merge_customize(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - "customize": {"a.a": {"friendly_name": "A"}}, - "packages": { - "pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}} - }, - } - await config_util.async_process_ha_core_config(hass, core_config) - - assert hass.data[config_util.DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"} - - -async def test_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading auth provider config onto hass object.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [ - {"type": "homeassistant"}, - ], - CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}], - } - if hasattr(hass, "auth"): - del hass.auth - await config_util.async_process_ha_core_config(hass, core_config) - - assert len(hass.auth.auth_providers) == 1 - assert hass.auth.auth_providers[0].type == "homeassistant" - assert len(hass.auth.auth_mfa_modules) == 2 - assert hass.auth.auth_mfa_modules[0].id == "totp" - assert hass.auth.auth_mfa_modules[1].id == "second" - - -async def test_auth_provider_config_default(hass: HomeAssistant) -> None: - """Test loading default auth provider config.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - } - if hasattr(hass, "auth"): - del hass.auth - await config_util.async_process_ha_core_config(hass, core_config) - - assert len(hass.auth.auth_providers) == 1 - assert hass.auth.auth_providers[0].type == "homeassistant" - assert len(hass.auth.auth_mfa_modules) == 1 - assert hass.auth.auth_mfa_modules[0].id == "totp" - - -async def test_disallowed_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth provider is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [ - { - "type": "insecure_example", - "users": [ - { - "username": "test-user", - "password": "test-pass", - "name": "Test Name", - } - ], - } - ], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_duplicated_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth provider is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_auth_mfa_module_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth mfa module is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_MFA_MODULES: [ - { - "type": "insecure_example", - "data": [{"user_id": "mock-user", "pin": "test-pin"}], - } - ], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_duplicated_auth_mfa_module_config( - hass: HomeAssistant, -) -> None: - """Test loading insecure example auth mfa module is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - async def test_merge_split_component_definition(hass: HomeAssistant) -> None: """Test components with trailing description in packages are merged.""" packages = { @@ -1995,74 +1370,6 @@ def test_identify_config_schema(domain, schema, expected) -> None: ) -async def test_core_config_schema_historic_currency( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test core config schema.""" - await config_util.async_process_ha_core_config(hass, {"currency": "LTT"}) - - issue = issue_registry.async_get_issue("homeassistant", "historic_currency") - assert issue - assert issue.translation_placeholders == {"currency": "LTT"} - - -async def test_core_store_historic_currency( - hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry -) -> None: - """Test core config store.""" - core_data = { - "data": { - "currency": "LTT", - }, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await config_util.async_process_ha_core_config(hass, {}) - - issue_id = "historic_currency" - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert issue - assert issue.translation_placeholders == {"currency": "LTT"} - - await hass.config.async_update(currency="EUR") - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert not issue - - -async def test_core_config_schema_no_country( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test core config schema.""" - await config_util.async_process_ha_core_config(hass, {}) - - issue = issue_registry.async_get_issue("homeassistant", "country_not_configured") - assert issue - - -async def test_core_store_no_country( - hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry -) -> None: - """Test core config store.""" - core_data = { - "data": {}, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await config_util.async_process_ha_core_config(hass, {}) - - issue_id = "country_not_configured" - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert issue - - await hass.config.async_update(country="SE") - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert not issue - - async def test_safe_mode(hass: HomeAssistant) -> None: """Test safe mode.""" assert config_util.safe_mode_enabled(hass.config.config_dir) is False @@ -2482,30 +1789,3 @@ async def test_loading_platforms_gathers(hass: HomeAssistant) -> None: ("platform_int", "sensor"), ("platform_int2", "sensor"), ] - - -async def test_configuration_legacy_template_is_removed(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "America/New_York", - "allowlist_external_dirs": "/etc", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "media_dirs": {"mymedia": "/usr"}, - "legacy_templates": True, - "debug": True, - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - ) - - assert not getattr(hass.config, "legacy_templates") diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index dccebff13e5..1ad152e8e42 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4,10 +4,11 @@ from __future__ import annotations import asyncio from collections.abc import Generator +from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import timedelta -from functools import cached_property import logging -from typing import Any +import re +from typing import Any, Self from unittest.mock import ANY, AsyncMock, Mock, patch from freezegun import freeze_time @@ -17,9 +18,9 @@ from syrupy.assertion import SnapshotAssertion from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + CONF_NAME, EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, @@ -37,13 +38,17 @@ from homeassistant.exceptions import ( ConfigEntryNotReady, HomeAssistantError, ) -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er, frame, issue_registry as ir +from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.json import json_dumps +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.setup import async_set_domains_to_be_loaded, async_setup_component from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util +from homeassistant.util.json import json_loads from .common import ( MockConfigEntry, @@ -83,8 +88,27 @@ def mock_handlers() -> Generator[None]: """Mock Reauth.""" return await self.async_step_reauth_confirm() + class MockFlowHandler2(config_entries.ConfigFlow): + """Define a second mock flow handler.""" + + VERSION = 1 + + async def async_step_reauth(self, data): + """Mock Reauth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm(self, user_input=None): + """Test reauth confirm step.""" + if user_input is None: + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={CONF_NAME: "Custom title"}, + ) + return self.async_abort(reason="test") + with patch.dict( - config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} + config_entries.HANDLERS, + {"comp": MockFlowHandler, "test": MockFlowHandler, "test2": MockFlowHandler2}, ): yield @@ -511,6 +535,41 @@ async def test_remove_entry( assert not entity_entry_list +async def test_remove_entry_non_unique_unique_id( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + entity_registry: er.EntityRegistry, +) -> None: + """Test that we can remove entry with colliding unique_id.""" + entry_1 = MockConfigEntry( + domain="test_other", entry_id="test1", unique_id="not_unique" + ) + entry_1.add_to_manager(manager) + entry_2 = MockConfigEntry( + domain="test_other", entry_id="test2", unique_id="not_unique" + ) + entry_2.add_to_manager(manager) + entry_3 = MockConfigEntry( + domain="test_other", entry_id="test3", unique_id="not_unique" + ) + entry_3.add_to_manager(manager) + + # Check all config entries exist + assert manager.async_entry_ids() == [ + "test1", + "test2", + "test3", + ] + + # Remove entries + assert await manager.async_remove("test1") == {"require_restart": False} + await hass.async_block_till_done() + assert await manager.async_remove("test2") == {"require_restart": False} + await hass.async_block_till_done() + assert await manager.async_remove("test3") == {"require_restart": False} + await hass.async_block_till_done() + + async def test_remove_entry_cancels_reauth( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -675,7 +734,7 @@ async def test_add_entry_calls_setup_entry( """Test user step.""" return self.async_create_entry(title="title", data={"token": "supersecret"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): + with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -847,7 +906,7 @@ async def test_entries_excludes_ignore_and_disabled( async def test_saving_and_loading( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, freezer: FrozenDateTimeFactory, hass_storage: dict[str, Any] ) -> None: """Test that we're saving and loading correctly.""" mock_integration( @@ -864,9 +923,19 @@ async def test_saving_and_loading( async def async_step_user(self, user_input=None): """Test user step.""" await self.async_set_unique_id("unique") - return self.async_create_entry(title="Test Title", data={"token": "abcd"}) + subentries = [ + config_entries.ConfigSubentryData( + data={"foo": "bar"}, title="subentry 1" + ), + config_entries.ConfigSubentryData( + data={"sun": "moon"}, title="subentry 2", unique_id="very_unique" + ), + ] + return self.async_create_entry( + title="Test Title", data={"token": "abcd"}, subentries=subentries + ) - with patch.dict(config_entries.HANDLERS, {"test": TestFlow}): + with mock_config_flow("test", TestFlow): await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_USER} ) @@ -884,10 +953,21 @@ async def test_saving_and_loading( with patch("homeassistant.config_entries.HANDLERS.get", return_value=Test2Flow): await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_USER} + "test", + context={ + "source": config_entries.SOURCE_USER, + "discovery_key": DiscoveryKey(domain="test", key=("blah"), version=1), + }, + ) + await hass.config_entries.flow.async_init( + "test", + context={ + "source": config_entries.SOURCE_USER, + "discovery_key": DiscoveryKey(domain="test", key=("a", "b"), version=1), + }, ) - assert len(hass.config_entries.async_entries()) == 2 + assert len(hass.config_entries.async_entries()) == 3 entry_1 = hass.config_entries.async_entries()[0] hass.config_entries.async_update_entry( @@ -902,11 +982,103 @@ async def test_saving_and_loading( # To execute the save await hass.async_block_till_done() + stored_data = hass_storage["core.config_entries"] + assert stored_data == { + "data": { + "entries": [ + { + "created_at": ANY, + "data": { + "token": "abcd", + }, + "disabled_by": None, + "discovery_keys": {}, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": True, + "pref_disable_polling": True, + "source": "user", + "subentries": [ + { + "data": {"foo": "bar"}, + "subentry_id": ANY, + "title": "subentry 1", + "unique_id": None, + }, + { + "data": {"sun": "moon"}, + "subentry_id": ANY, + "title": "subentry 2", + "unique_id": "very_unique", + }, + ], + "title": "Test Title", + "unique_id": "unique", + "version": 5, + }, + { + "created_at": ANY, + "data": { + "username": "bla", + }, + "disabled_by": None, + "discovery_keys": { + "test": [ + {"domain": "test", "key": "blah", "version": 1}, + ], + }, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "user", + "subentries": [], + "title": "Test 2 Title", + "unique_id": None, + "version": 3, + }, + { + "created_at": ANY, + "data": { + "username": "bla", + }, + "disabled_by": None, + "discovery_keys": { + "test": [ + {"domain": "test", "key": ["a", "b"], "version": 1}, + ], + }, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "user", + "subentries": [], + "title": "Test 2 Title", + "unique_id": None, + "version": 3, + }, + ], + }, + "key": "core.config_entries", + "minor_version": 5, + "version": 1, + } + # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) await manager.async_initialize() - assert len(manager.async_entries()) == 2 + assert len(manager.async_entries()) == 3 # Ensure same order for orig, loaded in zip( @@ -914,6 +1086,25 @@ async def test_saving_and_loading( ): assert orig.as_dict() == loaded.as_dict() + hass.config_entries.async_update_entry( + entry_1, + pref_disable_polling=False, + pref_disable_new_entities=False, + ) + + # To trigger the call_later + freezer.tick(1.0) + async_fire_time_changed(hass) + # To execute the save + await hass.async_block_till_done() + + # Assert no data is lost when storing again + expected_stored_data = stored_data + expected_stored_data["data"]["entries"][0]["modified_at"] = ANY + expected_stored_data["data"]["entries"][0]["pref_disable_new_entities"] = False + expected_stored_data["data"]["entries"][0]["pref_disable_polling"] = False + assert hass_storage["core.config_entries"] == expected_stored_data | {} + @freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: @@ -941,7 +1132,6 @@ async def test_as_dict(snapshot: SnapshotAssertion) -> None: "_tries", "_setup_again_job", "_supports_options", - "_reconfigure_lock", "supports_reconfigure", } @@ -954,7 +1144,7 @@ async def test_as_dict(snapshot: SnapshotAssertion) -> None: if ( key.startswith("__") or callable(func) - or type(func) in (cached_property, property) + or type(func).__name__ in ("cached_property", "property") ): continue assert key in dict_repr or key in excluded_from_dict @@ -1047,8 +1237,8 @@ async def test_async_forward_entry_setup_deprecated( assert ( "Detected code that calls async_forward_entry_setup for integration, " f"original with title: Mock Title and entry_id: {entry_id}, " - "which is deprecated and will stop working in Home Assistant 2025.6, " - "await async_forward_entry_setups instead. Please report this issue." + "which is deprecated, await async_forward_entry_setups instead. " + "This will stop working in Home Assistant 2025.6, please report this issue" ) in caplog.text @@ -1059,23 +1249,20 @@ async def test_discovery_notification( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - with patch.dict(config_entries.HANDLERS): + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" - class TestFlow(config_entries.ConfigFlow, domain="test"): - """Test flow.""" + VERSION = 5 - VERSION = 5 + async def async_step_discovery(self, discovery_info): + """Test discovery step.""" + return self.async_show_form(step_id="discovery_confirm") - async def async_step_discovery(self, discovery_info): - """Test discovery step.""" - return self.async_show_form(step_id="discovery_confirm") - - async def async_step_discovery_confirm(self, discovery_info): - """Test discovery confirm step.""" - return self.async_create_entry( - title="Test Title", data={"token": "abcd"} - ) + async def async_step_discovery_confirm(self, discovery_info): + """Test discovery confirm step.""" + return self.async_create_entry(title="Test Title", data={"token": "abcd"}) + with mock_config_flow("test", TestFlow): notifications = async_get_persistent_notifications(hass) assert "config_entry_discovery" not in notifications @@ -1113,29 +1300,31 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - with patch.dict(config_entries.HANDLERS): + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) - class TestFlow(config_entries.ConfigFlow, domain="test"): - """Test flow.""" + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" - VERSION = 5 + VERSION = 5 - async def async_step_user(self, user_input): - """Test user step.""" - return self.async_show_form(step_id="user_confirm") + async def async_step_user(self, user_input): + """Test user step.""" + return self.async_show_form(step_id="user_confirm") - async def async_step_user_confirm(self, user_input): - """Test user confirm step.""" - return self.async_show_form(step_id="user_confirm") + async def async_step_user_confirm(self, user_input): + """Test user confirm step.""" + return self.async_show_form(step_id="user_confirm") - async def async_step_reauth(self, user_input): - """Test reauth step.""" - return self.async_show_form(step_id="reauth_confirm") + async def async_step_reauth(self, user_input): + """Test reauth step.""" + return self.async_show_form(step_id="reauth_confirm") - async def async_step_reauth_confirm(self, user_input): - """Test reauth confirm step.""" - return self.async_abort(reason="test") + async def async_step_reauth_confirm(self, user_input): + """Test reauth confirm step.""" + return self.async_abort(reason="test") + with mock_config_flow("test", TestFlow): # Start user flow to assert that reconfigure notification doesn't fire await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_USER} @@ -1147,7 +1336,11 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: # Start first reauth flow to assert that reconfigure notification fires flow1 = await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_REAUTH} + "test", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, ) await hass.async_block_till_done() @@ -1157,7 +1350,11 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: # Start a second reauth flow so we can finish the first and assert that # the reconfigure notification persists until the second one is complete flow2 = await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_REAUTH} + "test", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, ) flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {}) @@ -1235,7 +1432,7 @@ async def test_discovery_notification_not_created(hass: HomeAssistant) -> None: """Test discovery step.""" return self.async_abort(reason="test") - with patch.dict(config_entries.HANDLERS, {"test": TestFlow}): + with mock_config_flow("test", TestFlow): await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_DISCOVERY} ) @@ -1341,6 +1538,42 @@ async def test_update_entry_options_and_trigger_listener( assert len(update_listener_calls) == 1 +async def test_update_subentry_and_trigger_listener( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can update subentry and trigger listener.""" + entry = MockConfigEntry(domain="test", options={"first": True}) + entry.add_to_manager(manager) + update_listener_calls = [] + + subentry = config_entries.ConfigSubentry( + data={"test": "test"}, unique_id="test", title="Mock title" + ) + + async def update_listener( + hass: HomeAssistant, entry: config_entries.ConfigEntry + ) -> None: + """Test function.""" + assert entry.subentries == expected_subentries + update_listener_calls.append(None) + + entry.add_update_listener(update_listener) + + expected_subentries = {subentry.subentry_id: subentry} + assert manager.async_add_subentry(entry, subentry) is True + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.subentries == expected_subentries + assert len(update_listener_calls) == 1 + + expected_subentries = {} + assert manager.async_remove_subentry(entry, subentry.subentry_id) is True + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.subentries == expected_subentries + assert len(update_listener_calls) == 2 + + async def test_setup_raise_not_ready( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -1570,7 +1803,7 @@ async def test_create_entry_options( options={"example": user_input["option"]}, ) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): assert await async_setup_component(hass, "comp", {}) await hass.async_block_till_done() @@ -1667,17 +1900,453 @@ async def test_entry_options_unknown_config_entry( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - class TestFlow: + with pytest.raises(config_entries.UnknownEntry): + await manager.options.async_create_flow( + "blah", context={"source": "test"}, data=None + ) + + +async def test_create_entry_subentries( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test a config entry being created with subentries.""" + + subentrydata = config_entries.ConfigSubentryData( + data={"test": "test"}, + title="Mock title", + unique_id="test", + ) + + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Mock setup.""" + hass.async_create_task( + hass.config_entries.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_IMPORT}, + data={"data": "data", "subentry": subentrydata}, + ) + ) + return True + + async_setup_entry = AsyncMock(return_value=True) + mock_integration( + hass, + MockModule( + "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry + ), + ) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_import(self, user_input): + """Test import step creating entry, with subentry.""" + return self.async_create_entry( + title="title", + data={"example": user_input["data"]}, + subentries=[user_input["subentry"]], + ) + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + assert await async_setup_component(hass, "comp", {}) + + await hass.async_block_till_done() + + assert len(async_setup_entry.mock_calls) == 1 + + entries = hass.config_entries.async_entries("comp") + assert len(entries) == 1 + assert entries[0].supported_subentries == () + assert entries[0].data == {"example": "data"} + assert len(entries[0].subentries) == 1 + subentry_id = list(entries[0].subentries)[0] + subentry = config_entries.ConfigSubentry( + data=subentrydata["data"], + subentry_id=subentry_id, + title=subentrydata["title"], + unique_id="test", + ) + assert entries[0].subentries == {subentry_id: subentry} + + +async def test_entry_subentry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can add a subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): """Test flow.""" @staticmethod @callback - def async_get_options_flow(config_entry): - """Test options flow.""" + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": "test", + }, + ) + + assert entry.data == {"first": True} + assert entry.options == {} + subentry_id = list(entry.subentries)[0] + assert entry.subentries == { + subentry_id: config_entries.ConfigSubentry( + data={"second": True}, + subentry_id=subentry_id, + title="Mock title", + unique_id="test", + ) + } + assert entry.supported_subentries == ("test",) + + +async def test_entry_subentry_non_string( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test adding an invalid subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + with pytest.raises(HomeAssistantError): + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": 123, + }, + ) + + +@pytest.mark.parametrize("context", [None, {}, {"bla": "bleh"}]) +async def test_entry_subentry_no_context( + hass: HomeAssistant, manager: config_entries.ConfigEntries, context: dict | None +) -> None: + """Test starting a subentry flow without "source" in context.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow), pytest.raises(KeyError): + await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context=context, data=None + ) + + +@pytest.mark.parametrize( + ("unique_id", "expected_result"), + [(None, does_not_raise()), ("test", pytest.raises(HomeAssistantError))], +) +async def test_entry_subentry_duplicate( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + unique_id: str | None, + expected_result: AbstractContextManager, +) -> None: + """Test adding a duplicated subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry( + domain="test", + data={"first": True}, + subentries_data=[ + config_entries.ConfigSubentryData( + data={}, + subentry_id="blabla", + title="Mock title", + unique_id=unique_id, + ) + ], + ) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + with expected_result: + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": unique_id, + }, + ) + + +async def test_entry_subentry_abort( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can abort subentry flow.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + assert await manager.subentries.async_finish_flow( + flow, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "test"} + ) + + +async def test_entry_subentry_unknown_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for an unknown config entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) with pytest.raises(config_entries.UnknownEntry): - await manager.options.async_create_flow( - "blah", context={"source": "test"}, data=None + await manager.subentries.async_create_flow( + ("blah", "blah"), context={"source": "test"}, data=None + ) + + +async def test_entry_subentry_deleted_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to finish a subentry flow for a deleted config entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + await hass.config_entries.async_remove(entry.entry_id) + + with pytest.raises(config_entries.UnknownEntry): + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": "test", + }, + ) + + +async def test_entry_subentry_unsupported( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for a config entry without support.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with ( + mock_config_flow("test", TestFlow), + pytest.raises(data_entry_flow.UnknownHandler), + ): + await manager.subentries.async_create_flow( + ( + entry.entry_id, + "unknown", + ), + context={"source": "test"}, + data=None, + ) + + +async def test_entry_subentry_unsupported_subentry_type( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for a config entry without support.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + with ( + mock_config_flow("test", TestFlow), + pytest.raises(data_entry_flow.UnknownHandler), + ): + await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None ) @@ -2317,7 +2986,7 @@ async def test_unique_id_persisted( await self.async_set_unique_id("mock-unique-id") return self.async_create_entry(title="mock-title", data={}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -2368,7 +3037,7 @@ async def test_unique_id_existing_entry( return self.async_create_entry(title="mock-title", data={"via": "flow"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -2414,7 +3083,7 @@ async def test_entry_id_existing_entry( with ( pytest.raises(HomeAssistantError), - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ulid_util.ulid_now", return_value=collide_entry_id, @@ -2457,7 +3126,7 @@ async def test_unique_id_update_existing_entry_without_reload( ) with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2507,7 +3176,7 @@ async def test_unique_id_update_existing_entry_with_reload( ) with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2527,7 +3196,7 @@ async def test_unique_id_update_existing_entry_with_reload( updates["host"] = "2.2.2.2" entry._async_set_state(hass, config_entries.ConfigEntryState.NOT_LOADED, None) with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2584,7 +3253,7 @@ async def test_unique_id_from_discovery_in_setup_retry( # Verify we do not reload from a user source with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2600,7 +3269,7 @@ async def test_unique_id_from_discovery_in_setup_retry( # Verify do reload from a discovery source with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2652,7 +3321,7 @@ async def test_unique_id_not_update_existing_entry( ) with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2686,7 +3355,7 @@ async def test_unique_id_in_progress( await self.async_set_unique_id("mock-unique-id") return self.async_show_form(step_id="discovery") - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} @@ -2726,7 +3395,7 @@ async def test_finish_flow_aborts_progress( return self.async_create_entry(title="yo", data={}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} @@ -2743,8 +3412,24 @@ async def test_finish_flow_aborts_progress( assert len(hass.config_entries.flow.async_progress()) == 0 +@pytest.mark.parametrize( + ("extra_context", "expected_entry_discovery_keys"), + [ + ( + {}, + {}, + ), + ( + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, + ), + ], +) async def test_unique_id_ignore( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + extra_context: dict, + expected_entry_discovery_keys: dict, ) -> None: """Test that we can ignore flows that are in progress and have a unique ID.""" async_setup_entry = AsyncMock(return_value=False) @@ -2761,7 +3446,7 @@ async def test_unique_id_ignore( await self.async_set_unique_id("mock-unique-id") return self.async_show_form(step_id="discovery") - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} @@ -2770,7 +3455,7 @@ async def test_unique_id_ignore( result2 = await manager.flow.async_init( "comp", - context={"source": config_entries.SOURCE_IGNORE}, + context={"source": config_entries.SOURCE_IGNORE} | extra_context, data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, ) @@ -2786,6 +3471,8 @@ async def test_unique_id_ignore( assert entry.source == "ignore" assert entry.unique_id == "mock-unique-id" assert entry.title == "Ignored Title" + assert entry.data == {} + assert entry.discovery_keys == expected_entry_discovery_keys async def test_manual_add_overrides_ignored_entry( @@ -2825,7 +3512,7 @@ async def test_manual_add_overrides_ignored_entry( raise NotImplementedError with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2869,7 +3556,7 @@ async def test_manual_add_overrides_ignored_entry_singleton( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): + with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -2882,6 +3569,254 @@ async def test_manual_add_overrides_ignored_entry_singleton( assert p_entry.data == {"token": "supersecret"} +@pytest.mark.parametrize( + ( + "discovery_keys", + "entry_unique_id", + "flow_context", + "flow_source", + "flow_result", + "updated_discovery_keys", + ), + [ + # No discovery key + ( + {}, + "mock-unique-id", + {}, + config_entries.SOURCE_ZEROCONF, + data_entry_flow.FlowResultType.ABORT, + {}, + ), + # Discovery key added to ignored entry data + ( + {}, + "mock-unique-id", + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + config_entries.SOURCE_ZEROCONF, + data_entry_flow.FlowResultType.ABORT, + {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, + ), + # Discovery key added to ignored entry data + ( + {"test": (DiscoveryKey(domain="test", key="bleh", version=1),)}, + "mock-unique-id", + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + config_entries.SOURCE_ZEROCONF, + data_entry_flow.FlowResultType.ABORT, + { + "test": ( + DiscoveryKey(domain="test", key="bleh", version=1), + DiscoveryKey(domain="test", key="blah", version=1), + ) + }, + ), + # Discovery key added to ignored entry data + ( + { + "test": ( + DiscoveryKey(domain="test", key="1", version=1), + DiscoveryKey(domain="test", key="2", version=1), + DiscoveryKey(domain="test", key="3", version=1), + DiscoveryKey(domain="test", key="4", version=1), + DiscoveryKey(domain="test", key="5", version=1), + DiscoveryKey(domain="test", key="6", version=1), + DiscoveryKey(domain="test", key="7", version=1), + DiscoveryKey(domain="test", key="8", version=1), + DiscoveryKey(domain="test", key="9", version=1), + DiscoveryKey(domain="test", key="10", version=1), + ) + }, + "mock-unique-id", + {"discovery_key": DiscoveryKey(domain="test", key="11", version=1)}, + config_entries.SOURCE_ZEROCONF, + data_entry_flow.FlowResultType.ABORT, + { + "test": ( + DiscoveryKey(domain="test", key="2", version=1), + DiscoveryKey(domain="test", key="3", version=1), + DiscoveryKey(domain="test", key="4", version=1), + DiscoveryKey(domain="test", key="5", version=1), + DiscoveryKey(domain="test", key="6", version=1), + DiscoveryKey(domain="test", key="7", version=1), + DiscoveryKey(domain="test", key="8", version=1), + DiscoveryKey(domain="test", key="9", version=1), + DiscoveryKey(domain="test", key="10", version=1), + DiscoveryKey(domain="test", key="11", version=1), + ) + }, + ), + # Discovery key already in ignored entry data + ( + {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, + "mock-unique-id", + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + config_entries.SOURCE_ZEROCONF, + data_entry_flow.FlowResultType.ABORT, + {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, + ), + # Flow not aborted when unique id is not matching + ( + {}, + "mock-unique-id-2", + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + config_entries.SOURCE_ZEROCONF, + data_entry_flow.FlowResultType.FORM, + {}, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_USER, + config_entries.SOURCE_ZEROCONF, + ], +) +async def test_update_discovery_keys( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + discovery_keys: tuple, + entry_source: str, + entry_unique_id: str, + flow_context: dict, + flow_source: str, + flow_result: data_entry_flow.FlowResultType, + updated_discovery_keys: tuple, +) -> None: + """Test that discovery keys of an entry can be updated.""" + hass.config.components.add("comp") + entry = MockConfigEntry( + domain="comp", + discovery_keys=discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + mock_integration(hass, MockModule("comp")) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + await self.async_set_unique_id("mock-unique-id") + self._abort_if_unique_id_configured(reload_on_update=False) + return self.async_show_form(step_id="step2") + + async def async_step_step2(self, user_input=None): + raise NotImplementedError + + async def async_step_zeroconf(self, discovery_info=None): + """Test zeroconf step.""" + return await self.async_step_user(discovery_info) + + with ( + mock_config_flow("comp", TestFlow), + patch( + "homeassistant.config_entries.ConfigEntries.async_reload" + ) as async_reload, + ): + result = await manager.flow.async_init( + "comp", context={"source": flow_source} | flow_context + ) + await hass.async_block_till_done() + + assert result["type"] == flow_result + assert entry.data == {} + assert entry.discovery_keys == updated_discovery_keys + assert len(async_reload.mock_calls) == 0 + + +@pytest.mark.parametrize( + ( + "discovery_keys", + "entry_source", + "entry_unique_id", + "flow_context", + "flow_source", + "flow_result", + "updated_discovery_keys", + ), + [ + # Flow not aborted when user initiated flow + ( + {}, + config_entries.SOURCE_IGNORE, + "mock-unique-id-2", + {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, + config_entries.SOURCE_USER, + data_entry_flow.FlowResultType.FORM, + {}, + ), + ], +) +async def test_update_discovery_keys_2( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + discovery_keys: tuple, + entry_source: str, + entry_unique_id: str, + flow_context: dict, + flow_source: str, + flow_result: data_entry_flow.FlowResultType, + updated_discovery_keys: tuple, +) -> None: + """Test that discovery keys of an entry can be updated.""" + hass.config.components.add("comp") + entry = MockConfigEntry( + domain="comp", + discovery_keys=discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + mock_integration(hass, MockModule("comp")) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + await self.async_set_unique_id("mock-unique-id") + self._abort_if_unique_id_configured(reload_on_update=False) + return self.async_show_form(step_id="step2") + + async def async_step_step2(self, user_input=None): + raise NotImplementedError + + async def async_step_zeroconf(self, discovery_info=None): + """Test zeroconf step.""" + return await self.async_step_user(discovery_info) + + with ( + mock_config_flow("comp", TestFlow), + patch( + "homeassistant.config_entries.ConfigEntries.async_reload" + ) as async_reload, + ): + result = await manager.flow.async_init( + "comp", context={"source": flow_source} | flow_context + ) + await hass.async_block_till_done() + + assert result["type"] == flow_result + assert entry.data == {} + assert entry.discovery_keys == updated_discovery_keys + assert len(async_reload.mock_calls) == 0 + + async def test_async_current_entries_does_not_skip_ignore_non_user( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -2910,7 +3845,7 @@ async def test_async_current_entries_does_not_skip_ignore_non_user( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): + with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_IMPORT} ) @@ -2947,7 +3882,7 @@ async def test_async_current_entries_explicit_skip_ignore( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): + with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_IMPORT} ) @@ -2988,7 +3923,7 @@ async def test_async_current_entries_explicit_include_ignore( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): + with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_IMPORT} ) @@ -2997,129 +3932,6 @@ async def test_async_current_entries_explicit_include_ignore( assert len(mock_setup_entry.mock_calls) == 0 -async def test_unignore_step_form( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can ignore flows that are in progress and have a unique ID, then rediscover them.""" - async_setup_entry = AsyncMock(return_value=True) - mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - async def async_step_unignore(self, user_input): - """Test unignore step.""" - unique_id = user_input["unique_id"] - await self.async_set_unique_id(unique_id) - return self.async_show_form(step_id="discovery") - - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): - result = await manager.flow.async_init( - "comp", - context={"source": config_entries.SOURCE_IGNORE}, - data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, - ) - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - - entry = hass.config_entries.async_entries("comp")[0] - assert entry.source == "ignore" - assert entry.unique_id == "mock-unique-id" - assert entry.domain == "comp" - assert entry.title == "Ignored Title" - - await manager.async_remove(entry.entry_id) - - # But after a 'tick' the unignore step has run and we can see an active flow again. - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 1 - - # and still not config entries - assert len(hass.config_entries.async_entries("comp")) == 0 - - -async def test_unignore_create_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can ignore flows that are in progress and have a unique ID, then rediscover them.""" - async_setup_entry = AsyncMock(return_value=True) - mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - async def async_step_unignore(self, user_input): - """Test unignore step.""" - unique_id = user_input["unique_id"] - await self.async_set_unique_id(unique_id) - return self.async_create_entry(title="yo", data={}) - - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): - result = await manager.flow.async_init( - "comp", - context={"source": config_entries.SOURCE_IGNORE}, - data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, - ) - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - - entry = hass.config_entries.async_entries("comp")[0] - assert entry.source == "ignore" - assert entry.unique_id == "mock-unique-id" - assert entry.domain == "comp" - assert entry.title == "Ignored Title" - - await manager.async_remove(entry.entry_id) - - # But after a 'tick' the unignore step has run and we can see a config entry. - await hass.async_block_till_done() - entry = hass.config_entries.async_entries("comp")[0] - assert entry.source == config_entries.SOURCE_UNIGNORE - assert entry.unique_id == "mock-unique-id" - assert entry.title == "yo" - - # And still no active flow - assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 0 - - -async def test_unignore_default_impl( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that resdicovery is a no-op by default.""" - async_setup_entry = AsyncMock(return_value=True) - mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): - result = await manager.flow.async_init( - "comp", - context={"source": config_entries.SOURCE_IGNORE}, - data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, - ) - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - - entry = hass.config_entries.async_entries("comp")[0] - assert entry.source == "ignore" - assert entry.unique_id == "mock-unique-id" - assert entry.domain == "comp" - assert entry.title == "Ignored Title" - - await manager.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.config_entries.async_entries("comp")) == 0 - assert len(hass.config_entries.flow.async_progress()) == 0 - - async def test_partial_flows_hidden( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -3151,7 +3963,7 @@ async def test_partial_flows_hidden( async def async_step_someform(self, user_input=None): raise NotImplementedError - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # Start a config entry flow and wait for it to be blocked init_task = asyncio.ensure_future( manager.flow.async_init( @@ -3217,7 +4029,7 @@ async def test_async_setup_init_entry( """Test import step creating entry.""" return self.async_create_entry(title="title", data={}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): assert await async_setup_component(hass, "comp", {}) await hass.async_block_till_done() @@ -3278,7 +4090,7 @@ async def test_async_setup_init_entry_completes_before_loaded_event_fires( # This test must not use hass.async_block_till_done() # as its explicitly testing what happens without it - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): assert await async_setup_component(hass, "comp", {}) assert len(async_setup_entry.mock_calls) == 1 assert load_events[0].event_type == EVENT_COMPONENT_LOADED @@ -3334,7 +4146,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: ) return self.async_abort(reason="yo") - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): assert await async_setup_component(hass, "comp", {}) entries = hass.config_entries.async_entries("comp") @@ -3383,7 +4195,7 @@ async def test_flow_with_default_discovery( return self.async_create_entry(title="yo", data={}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": discovery_source[0]}, data=discovery_source[1] @@ -3433,7 +4245,7 @@ async def test_flow_with_default_discovery_with_unique_id( async def async_step_mock(self, user_input=None): raise NotImplementedError - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY} ) @@ -3460,7 +4272,7 @@ async def test_default_discovery_abort_existing_entries( VERSION = 1 - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY} ) @@ -3489,7 +4301,7 @@ async def test_default_discovery_in_progress( async def async_step_mock(self, user_input=None): raise NotImplementedError - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY}, @@ -3529,7 +4341,7 @@ async def test_default_discovery_abort_on_new_unique_flow( async def async_step_mock(self, user_input=None): raise NotImplementedError - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # First discovery with default, no unique ID result2 = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={} @@ -3576,7 +4388,7 @@ async def test_default_discovery_abort_on_user_flow_complete( async def async_step_mock(self, user_input=None): raise NotImplementedError - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # First discovery with default, no unique ID flow1 = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={} @@ -3640,7 +4452,7 @@ async def test_flow_same_device_multiple_sources( return self.async_show_form(step_id="link") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): # Create one to be in progress flow1 = manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_ZEROCONF} @@ -3693,21 +4505,20 @@ async def test_updating_entry_with_and_without_changes( assert manager.async_update_entry(entry) is False - for change in ( - {"data": {"second": True, "third": 456}}, - {"data": {"second": True}}, - {"minor_version": 2}, - {"options": {"hello": True}}, - {"pref_disable_new_entities": True}, - {"pref_disable_polling": True}, - {"title": "sometitle"}, - {"unique_id": "abcd1234"}, - {"version": 2}, + for change, expected_value in ( + ({"data": {"second": True, "third": 456}}, {"second": True, "third": 456}), + ({"data": {"second": True}}, {"second": True}), + ({"minor_version": 2}, 2), + ({"options": {"hello": True}}, {"hello": True}), + ({"pref_disable_new_entities": True}, True), + ({"pref_disable_polling": True}, True), + ({"title": "sometitle"}, "sometitle"), + ({"unique_id": "abcd1234"}, "abcd1234"), + ({"version": 2}, 2), ): assert manager.async_update_entry(entry, **change) is True key = next(iter(change)) - value = next(iter(change.values())) - assert getattr(entry, key) == value + assert getattr(entry, key) == expected_value assert manager.async_update_entry(entry, **change) is False assert manager.async_entry_for_domain_unique_id("test", "abc123") is None @@ -4159,7 +4970,7 @@ async def test_async_abort_entries_match( self._async_abort_entries_match(matchers) return self.async_abort(reason="no_match") - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): + with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -4297,29 +5108,28 @@ async def test_loading_old_data( assert entry.pref_disable_new_entities is True -async def test_deprecated_disabled_by_str_ctor( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: +async def test_deprecated_disabled_by_str_ctor() -> None: """Test deprecated str disabled_by constructor enumizes and logs a warning.""" - entry = MockConfigEntry(disabled_by=config_entries.ConfigEntryDisabler.USER.value) - assert entry.disabled_by is config_entries.ConfigEntryDisabler.USER - assert " str for config entry disabled_by. This is deprecated " in caplog.text + with pytest.raises( + TypeError, match="disabled_by must be a ConfigEntryDisabler value, got user" + ): + MockConfigEntry(disabled_by=config_entries.ConfigEntryDisabler.USER.value) async def test_deprecated_disabled_by_str_set( hass: HomeAssistant, manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, ) -> None: """Test deprecated str set disabled_by enumizes and logs a warning.""" entry = MockConfigEntry(domain="comp") entry.add_to_manager(manager) hass.config.components.add("comp") - assert await manager.async_set_disabled_by( - entry.entry_id, config_entries.ConfigEntryDisabler.USER.value - ) - assert entry.disabled_by is config_entries.ConfigEntryDisabler.USER - assert " str for config entry disabled_by. This is deprecated " in caplog.text + with pytest.raises( + TypeError, match="disabled_by must be a ConfigEntryDisabler value, got user" + ): + await manager.async_set_disabled_by( + entry.entry_id, config_entries.ConfigEntryDisabler.USER.value + ) async def test_entry_reload_concurrency( @@ -4456,7 +5266,7 @@ async def test_unique_id_update_while_setup_in_progress( ) with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -4562,6 +5372,75 @@ async def test_reauth( assert len(hass.config_entries.flow.async_progress()) == 1 +@pytest.mark.parametrize( + "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] +) +async def test_reauth_reconfigure_missing_entry( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + source: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the async_reauth_helper.""" + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + RuntimeError, + match=f"Detected code that initialises a {source} flow without a link " + "to the config entry. Please report this issue", + ): + await manager.flow.async_init("test", context={"source": source}) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +@pytest.mark.parametrize( + "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] +) +async def test_reauth_reconfigure_missing_entry_component( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + source: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the async_reauth_helper.""" + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + await manager.flow.async_init("test", context={"source": source}) + await hass.async_block_till_done() + + # Flow still created, but deprecation logged + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == source + + assert ( + f"Detected that integration 'hue' initialises a {source} flow" + " without a link to the config entry at homeassistant/components" in caplog.text + ) + + async def test_reconfigure( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -4578,38 +5457,66 @@ async def test_reconfigure( await manager.async_setup(entry.entry_id) await hass.async_block_till_done() - flow = hass.config_entries.flow - with patch.object(flow, "async_init", wraps=flow.async_init) as mock_init: - entry.async_start_reconfigure( - hass, - context={"extra_context": "some_extra_context"}, - data={"extra_data": 1234}, + def _async_start_reconfigure(config_entry: MockConfigEntry) -> None: + hass.async_create_task( + manager.flow.async_init( + config_entry.domain, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ), + f"config entry reconfigure {config_entry.title} " + f"{config_entry.domain} {config_entry.entry_id}", ) - await hass.async_block_till_done() + + _async_start_reconfigure(entry) + await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["entry_id"] == entry.entry_id assert flows[0]["context"]["source"] == config_entries.SOURCE_RECONFIGURE - assert flows[0]["context"]["title_placeholders"] == {"name": "test_title"} - assert flows[0]["context"]["extra_context"] == "some_extra_context" - - assert mock_init.call_args.kwargs["data"]["extra_data"] == 1234 assert entry.entry_id != entry2.entry_id - # Check that we can't start duplicate reconfigure flows - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + # Check that we can start duplicate reconfigure flows + # (may need revisiting) + _async_start_reconfigure(entry) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 - - # Check that we can't start duplicate reconfigure flows when the context is different - entry.async_start_reconfigure(hass, {"diff": "diff"}) - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 + assert len(hass.config_entries.flow.async_progress()) == 2 # Check that we can start a reconfigure flow for a different entry - entry2.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + _async_start_reconfigure(entry2) + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 3 + + # Abort all existing flows + for flow in hass.config_entries.flow.async_progress(): + hass.config_entries.flow.async_abort(flow["flow_id"]) + await hass.async_block_till_done() + + # Check that we can start duplicate reconfigure flows + # without blocking between flows + # (may need revisiting) + _async_start_reconfigure(entry) + _async_start_reconfigure(entry) + _async_start_reconfigure(entry) + _async_start_reconfigure(entry) + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 4 + + # Abort all existing flows + for flow in hass.config_entries.flow.async_progress(): + hass.config_entries.flow.async_abort(flow["flow_id"]) + await hass.async_block_till_done() + + # Check that we can start reconfigure flows with active reauth flow + # (may need revisiting) + entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 1 + _async_start_reconfigure(entry) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 2 @@ -4618,35 +5525,8 @@ async def test_reconfigure( hass.config_entries.flow.async_abort(flow["flow_id"]) await hass.async_block_till_done() - # Check that we can't start duplicate reconfigure flows - # without blocking between flows - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 - - # Abort all existing flows - for flow in hass.config_entries.flow.async_progress(): - hass.config_entries.flow.async_abort(flow["flow_id"]) - await hass.async_block_till_done() - - # Check that we can't start reconfigure flows with active reauth flow - entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 - - # Abort all existing flows - for flow in hass.config_entries.flow.async_progress(): - hass.config_entries.flow.async_abort(flow["flow_id"]) - await hass.async_block_till_done() - # Check that we can't start reauth flows with active reconfigure flow - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + _async_start_reconfigure(entry) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 1 entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) @@ -4753,20 +5633,46 @@ async def test_async_wait_component_startup(hass: HomeAssistant) -> None: assert "test" in hass.config.components -async def test_options_flow_options_not_mutated() -> None: +@pytest.mark.parametrize( + "integration_frame_path", + ["homeassistant/components/my_integration", "homeassistant.core"], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_options_flow_with_config_entry_core() -> None: + """Test that OptionsFlowWithConfigEntry cannot be used in core.""" + entry = MockConfigEntry( + domain="hue", + data={"first": True}, + options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, + ) + + with pytest.raises(RuntimeError, match="inherits from OptionsFlowWithConfigEntry"): + _ = config_entries.OptionsFlowWithConfigEntry(entry) + + +@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) -> None: """Test that OptionsFlowWithConfigEntry doesn't mutate entry options.""" entry = MockConfigEntry( - domain="test", + domain="hue", data={"first": True}, options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, ) options_flow = config_entries.OptionsFlowWithConfigEntry(entry) + assert caplog.text == "" # No deprecation warning for custom components - options_flow._options["sub_dict"]["2"] = "two" - options_flow._options["sub_list"].append("two") + # Ensure available at startup + assert options_flow.config_entry is entry + assert options_flow.options == entry.options - assert options_flow._options == { + options_flow.options["sub_dict"]["2"] = "two" + options_flow.options["sub_list"].append("two") + + # Ensure it does not mutate the entry options + assert options_flow.options == { "sub_dict": {"1": "one", "2": "two"}, "sub_list": ["one", "two"], } @@ -4794,7 +5700,9 @@ async def test_initializing_flows_canceled_on_shutdown( config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} ): task = asyncio.create_task( - manager.flow.async_init("test", context={"source": "reauth"}) + manager.flow.async_init( + "test", context={"source": "reauth", "entry_id": "abc"} + ) ) await hass.async_block_till_done() manager.flow.async_shutdown() @@ -4930,71 +5838,153 @@ def test_raise_trying_to_add_same_config_entry_twice( @pytest.mark.parametrize( ( - "title", - "unique_id", - "data_vendor", - "options_vendor", "kwargs", + "expected_title", + "expected_unique_id", + "expected_data", + "expected_options", "calls_entry_load_unload", + "raises", ), [ ( - ("Test", "Updated title"), - ("1234", "5678"), - ("data", "data2"), - ("options", "options2"), - {}, + { + "unique_id": "5678", + "title": "Updated title", + "data": {"vendor": "data2"}, + "options": {"vendor": "options2"}, + }, + "Updated title", + "5678", + {"vendor": "data2"}, + {"vendor": "options2"}, (2, 1), + None, ), ( - ("Test", "Test"), - ("1234", "1234"), - ("data", "data"), - ("options", "options"), - {}, + { + "unique_id": "1234", + "title": "Test", + "data": {"vendor": "data"}, + "options": {"vendor": "options"}, + }, + "Test", + "1234", + {"vendor": "data"}, + {"vendor": "options"}, (2, 1), + None, ), ( - ("Test", "Updated title"), - ("1234", "5678"), - ("data", "data2"), - ("options", "options2"), - {"reload_even_if_entry_is_unchanged": True}, + { + "unique_id": "5678", + "title": "Updated title", + "data": {"vendor": "data2"}, + "options": {"vendor": "options2"}, + "reload_even_if_entry_is_unchanged": True, + }, + "Updated title", + "5678", + {"vendor": "data2"}, + {"vendor": "options2"}, (2, 1), + None, ), ( - ("Test", "Test"), - ("1234", "1234"), - ("data", "data"), - ("options", "options"), - {"reload_even_if_entry_is_unchanged": False}, + { + "unique_id": "1234", + "title": "Test", + "data": {"vendor": "data"}, + "options": {"vendor": "options"}, + "reload_even_if_entry_is_unchanged": False, + }, + "Test", + "1234", + {"vendor": "data"}, + {"vendor": "options"}, (1, 0), + None, + ), + ( + {}, + "Test", + "1234", + {"vendor": "data"}, + {"vendor": "options"}, + (2, 1), + None, + ), + ( + {"data": {"buyer": "me"}, "options": {}}, + "Test", + "1234", + {"buyer": "me"}, + {}, + (2, 1), + None, + ), + ( + {"data_updates": {"buyer": "me"}}, + "Test", + "1234", + {"vendor": "data", "buyer": "me"}, + {"vendor": "options"}, + (2, 1), + None, + ), + ( + { + "unique_id": "5678", + "title": "Updated title", + "data": {"vendor": "data2"}, + "options": {"vendor": "options2"}, + "data_updates": {"buyer": "me"}, + }, + "Test", + "1234", + {"vendor": "data"}, + {"vendor": "options"}, + (1, 0), + ValueError, ), ], ids=[ "changed_entry_default", "unchanged_entry_default", "changed_entry_explicit_reload", - "changed_entry_no_reload", + "unchanged_entry_no_reload", + "no_kwargs", + "replace_data", + "update_data", + "update_and_data_raises", + ], +) +@pytest.mark.parametrize( + ("source", "reason"), + [ + (config_entries.SOURCE_REAUTH, "reauth_successful"), + (config_entries.SOURCE_RECONFIGURE, "reconfigure_successful"), ], ) async def test_update_entry_and_reload( hass: HomeAssistant, - manager: config_entries.ConfigEntries, - title: tuple[str, str], - unique_id: tuple[str, str], - data_vendor: tuple[str, str], - options_vendor: tuple[str, str], + source: str, + reason: str, + expected_title: str, + expected_unique_id: str, + expected_data: dict[str, Any], + expected_options: dict[str, Any], kwargs: dict[str, Any], calls_entry_load_unload: tuple[int, int], + raises: type[Exception] | None, ) -> None: """Test updating an entry and reloading.""" entry = MockConfigEntry( domain="comp", - unique_id=unique_id[0], - title=title[0], - data={"vendor": data_vendor[0]}, - options={"vendor": options_vendor[0]}, + unique_id="1234", + title="Test", + data={"vendor": "data"}, + options={"vendor": "options"}, ) entry.add_to_hass(hass) @@ -5015,53 +6005,174 @@ async def test_update_entry_and_reload( async def async_step_reauth(self, data): """Mock Reauth.""" - return self.async_update_reload_and_abort( - entry=entry, - unique_id=unique_id[1], - title=title[1], - data={"vendor": data_vendor[1]}, - options={"vendor": options_vendor[1]}, - **kwargs, - ) + return self.async_update_reload_and_abort(entry, **kwargs) - with patch.dict(config_entries.HANDLERS, {"comp": MockFlowHandler}): - task = await manager.flow.async_init("comp", context={"source": "reauth"}) - await hass.async_block_till_done() + async def async_step_reconfigure(self, data): + """Mock Reconfigure.""" + return self.async_update_reload_and_abort(entry, **kwargs) - assert entry.title == title[1] - assert entry.unique_id == unique_id[1] - assert entry.data == {"vendor": data_vendor[1]} - assert entry.options == {"vendor": options_vendor[1]} - assert entry.state == config_entries.ConfigEntryState.LOADED - assert task["type"] == FlowResultType.ABORT - assert task["reason"] == "reauth_successful" - # Assert entry was reloaded - assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] - assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] + err: Exception + with mock_config_flow("comp", MockFlowHandler): + try: + if source == config_entries.SOURCE_REAUTH: + result = await entry.start_reauth_flow(hass) + elif source == config_entries.SOURCE_RECONFIGURE: + result = await entry.start_reconfigure_flow(hass) + except Exception as ex: # noqa: BLE001 + err = ex + + await hass.async_block_till_done() + + assert entry.title == expected_title + assert entry.unique_id == expected_unique_id + assert entry.data == expected_data + assert entry.options == expected_options + assert entry.state == config_entries.ConfigEntryState.LOADED + if raises: + assert isinstance(err, raises) + else: + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == reason + # Assert entry was reloaded + assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] + assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] @pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) -async def test_unhashable_unique_id( +async def test_unhashable_unique_id_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any ) -> None: - """Test the ConfigEntryItems user dict handles unhashable unique_id.""" + """Test the ConfigEntryItems user dict fails unhashable unique_id.""" entries = config_entries.ConfigEntryItems(hass) entry = config_entries.ConfigEntry( data={}, + discovery_keys={}, domain="test", entry_id="mock_id", minor_version=1, options={}, source="test", + subentries_data=(), + title="title", + unique_id=unique_id, + version=1, + ) + + unique_id_string = re.escape(str(unique_id)) + with pytest.raises( + HomeAssistantError, + match=f"The entry unique id {unique_id_string} is not a string.", + ): + entries[entry.entry_id] = entry + + assert entry.entry_id not in entries + + with pytest.raises( + HomeAssistantError, + match=f"The entry unique id {unique_id_string} is not a string.", + ): + entries.get_entry_by_domain_and_unique_id("test", unique_id) + + +@pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) +async def test_unhashable_unique_id_fails_on_update( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any +) -> None: + """Test the ConfigEntryItems user dict fails non-hashable unique_id on update.""" + entries = config_entries.ConfigEntryItems(hass) + entry = config_entries.ConfigEntry( + data={}, + discovery_keys={}, + domain="test", + entry_id="mock_id", + minor_version=1, + options={}, + source="test", + subentries_data=(), + title="title", + unique_id="123", + version=1, + ) + + entries[entry.entry_id] = entry + assert entry.entry_id in entries + + unique_id_string = re.escape(str(unique_id)) + with pytest.raises( + HomeAssistantError, + match=f"The entry unique id {unique_id_string} is not a string.", + ): + entries.update_unique_id(entry, unique_id) + + +async def test_string_unique_id_no_warning( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the ConfigEntryItems user dict string unique id doesn't log warning.""" + entries = config_entries.ConfigEntryItems(hass) + entry = config_entries.ConfigEntry( + data={}, + discovery_keys={}, + domain="test", + entry_id="mock_id", + minor_version=1, + options={}, + source="test", + subentries_data=(), + title="title", + unique_id="123", + version=1, + ) + + entries[entry.entry_id] = entry + + assert ( + "Config entry 'title' from integration test has an invalid unique_id" + ) not in caplog.text + + assert entry.entry_id in entries + assert entries[entry.entry_id] is entry + assert entries.get_entry_by_domain_and_unique_id("test", "123") == entry + del entries[entry.entry_id] + assert not entries + assert entries.get_entry_by_domain_and_unique_id("test", "123") is None + + +@pytest.mark.parametrize( + ("unique_id", "type_name"), + [ + (123, "int"), + (2.3, "float"), + ], +) +async def test_hashable_unique_id( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + unique_id: Any, + type_name: str, +) -> None: + """Test the ConfigEntryItems user dict handles hashable non string unique_id.""" + entries = config_entries.ConfigEntryItems(hass) + entry = config_entries.ConfigEntry( + data={}, + discovery_keys={}, + domain="test", + entry_id="mock_id", + minor_version=1, + options={}, + source="test", + subentries_data=(), title="title", unique_id=unique_id, version=1, ) entries[entry.entry_id] = entry + assert ( - "Config entry 'title' from integration test has an invalid unique_id " - f"'{unique_id!s}'" + "Config entry 'title' from integration test has an invalid unique_id" + f" '{unique_id}' of type {type_name} when a string is expected" ) in caplog.text assert entry.entry_id in entries @@ -5072,62 +6183,56 @@ async def test_unhashable_unique_id( assert entries.get_entry_by_domain_and_unique_id("test", unique_id) is None -@pytest.mark.parametrize("unique_id", [123]) -async def test_hashable_non_string_unique_id( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any +async def test_no_unique_id_no_warning( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, ) -> None: - """Test the ConfigEntryItems user dict handles hashable non string unique_id.""" + """Test the ConfigEntryItems user dict don't log warning with no unique id.""" entries = config_entries.ConfigEntryItems(hass) entry = config_entries.ConfigEntry( data={}, + discovery_keys={}, domain="test", entry_id="mock_id", minor_version=1, options={}, source="test", + subentries_data=(), title="title", - unique_id=unique_id, + unique_id=None, version=1, ) entries[entry.entry_id] = entry + assert ( "Config entry 'title' from integration test has an invalid unique_id" ) not in caplog.text assert entry.entry_id in entries assert entries[entry.entry_id] is entry - assert entries.get_entry_by_domain_and_unique_id("test", unique_id) == entry - del entries[entry.entry_id] - assert not entries - assert entries.get_entry_by_domain_and_unique_id("test", unique_id) is None @pytest.mark.parametrize( - ("source", "user_input", "expected_result"), + ("context", "user_input", "expected_result"), [ ( - config_entries.SOURCE_IGNORE, + {"source": config_entries.SOURCE_IGNORE}, {"unique_id": "blah", "title": "blah"}, {"type": data_entry_flow.FlowResultType.CREATE_ENTRY}, ), ( - config_entries.SOURCE_REAUTH, + {"source": config_entries.SOURCE_REAUTH, "entry_id": "1234"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_RECONFIGURE, + {"source": config_entries.SOURCE_RECONFIGURE, "entry_id": "1234"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_UNIGNORE, - None, - {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, - ), - ( - config_entries.SOURCE_USER, + {"source": config_entries.SOURCE_USER}, None, { "type": data_entry_flow.FlowResultType.ABORT, @@ -5140,7 +6245,7 @@ async def test_hashable_non_string_unique_id( async def test_starting_config_flow_on_single_config_entry( hass: HomeAssistant, manager: config_entries.ConfigEntries, - source: str, + context: dict[str, Any], user_input: dict, expected_result: dict, ) -> None: @@ -5163,6 +6268,7 @@ async def test_starting_config_flow_on_single_config_entry( entry = MockConfigEntry( domain="comp", unique_id="1234", + entry_id="1234", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5171,6 +6277,7 @@ async def test_starting_config_flow_on_single_config_entry( ignored_entry = MockConfigEntry( domain="comp", unique_id="2345", + entry_id="2345", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5185,47 +6292,50 @@ async def test_starting_config_flow_on_single_config_entry( return_value=integration, ): result = await hass.config_entries.flow.async_init( - "comp", context={"source": source}, data=user_input + "comp", context=context, data=user_input ) - for key in expected_result: - assert result[key] == expected_result[key] + for key, value in expected_result.items(): + assert result[key] == value @pytest.mark.parametrize( - ("source", "user_input", "expected_result"), + ("context", "user_input", "expected_result"), [ ( - config_entries.SOURCE_IGNORE, + {"source": config_entries.SOURCE_IGNORE}, {"unique_id": "blah", "title": "blah"}, {"type": data_entry_flow.FlowResultType.CREATE_ENTRY}, ), ( - config_entries.SOURCE_REAUTH, + {"source": config_entries.SOURCE_REAUTH, "entry_id": "2345"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_RECONFIGURE, + {"source": config_entries.SOURCE_RECONFIGURE, "entry_id": "2345"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_UNIGNORE, + {"source": config_entries.SOURCE_USER}, None, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, ), ( - config_entries.SOURCE_USER, + {"source": config_entries.SOURCE_ZEROCONF}, None, - {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, + { + "type": data_entry_flow.FlowResultType.ABORT, + "reason": "single_instance_allowed", + }, ), ], ) async def test_starting_config_flow_on_single_config_entry_2( hass: HomeAssistant, manager: config_entries.ConfigEntries, - source: str, + context: dict[str, Any], user_input: dict, expected_result: dict, ) -> None: @@ -5248,6 +6358,7 @@ async def test_starting_config_flow_on_single_config_entry_2( ignored_entry = MockConfigEntry( domain="comp", unique_id="2345", + entry_id="2345", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5262,11 +6373,11 @@ async def test_starting_config_flow_on_single_config_entry_2( return_value=integration, ): result = await hass.config_entries.flow.async_init( - "comp", context={"source": source}, data=user_input + "comp", context=context, data=user_input ) - for key in expected_result: - assert result[key] == expected_result[key] + for key, value in expected_result.items(): + assert result[key] == value async def test_avoid_adding_second_config_entry_on_single_config_entry( @@ -5306,7 +6417,7 @@ async def test_avoid_adding_second_config_entry_on_single_config_entry( "homeassistant.loader.async_get_integration", return_value=integration, ), - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), ): # Start a flow result = await manager.flow.async_init( @@ -5333,8 +6444,20 @@ async def test_avoid_adding_second_config_entry_on_single_config_entry( assert result["translation_domain"] == HOMEASSISTANT_DOMAIN +@pytest.mark.parametrize( + ("flow_1_unique_id", "flow_2_unique_id"), + [ + (None, None), + ("very_unique", "very_unique"), + (None, config_entries.DEFAULT_DISCOVERY_UNIQUE_ID), + ("very_unique", config_entries.DEFAULT_DISCOVERY_UNIQUE_ID), + ], +) async def test_in_progress_get_canceled_when_entry_is_created( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + flow_1_unique_id: str | None, + flow_2_unique_id: str | None, ) -> None: """Test that we abort all in progress flows when a new entry is created on a single instance only integration.""" integration = loader.Integration( @@ -5362,10 +6485,19 @@ async def test_in_progress_get_canceled_when_entry_is_created( if user_input is not None: return self.async_create_entry(title="Test Title", data=user_input) + await self.async_set_unique_id(flow_1_unique_id, raise_on_progress=False) + return self.async_show_form(step_id="user") + + async def async_step_zeroconfg(self, user_input=None): + """Test user step.""" + if user_input is not None: + return self.async_create_entry(title="Test Title", data=user_input) + + await self.async_set_unique_id(flow_2_unique_id, raise_on_progress=False) return self.async_show_form(step_id="user") with ( - patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), + mock_config_flow("comp", TestFlow), patch( "homeassistant.loader.async_get_integration", return_value=integration, @@ -5437,13 +6569,8 @@ async def test_report_direct_mutation_of_config_entry( entry = MockConfigEntry(domain="test") entry.add_to_hass(hass) - setattr(entry, field, "new_value") - - assert ( - f'Detected code that sets "{field}" directly to update a config entry. ' - "This is deprecated and will stop working in Home Assistant 2024.9, " - "it should be updated to use async_update_entry instead. Please report this issue." - ) in caplog.text + with pytest.raises(AttributeError): + setattr(entry, field, "new_value") async def test_updating_non_added_entry_raises(hass: HomeAssistant) -> None: @@ -5715,7 +6842,7 @@ async def test_non_awaited_async_forward_entry_setups( "test with title: Mock Title and entry_id: test2, during setup without " "awaiting async_forward_entry_setups, which can cause the setup lock " "to be released before the setup is done. This will stop working in " - "Home Assistant 2025.1. Please report this issue." + "Home Assistant 2025.1, please report this issue" ) in caplog.text @@ -5787,7 +6914,7 @@ async def test_non_awaited_async_forward_entry_setup( "test with title: Mock Title and entry_id: test2, during setup without " "awaiting async_forward_entry_setup, which can cause the setup lock " "to be released before the setup is done. This will stop working in " - "Home Assistant 2025.1. Please report this issue." + "Home Assistant 2025.1, please report this issue" ) in caplog.text @@ -5986,6 +7113,7 @@ async def test_migration_from_1_2( "created_at": "1970-01-01T00:00:00+00:00", "data": {}, "disabled_by": None, + "discovery_keys": {}, "domain": "sun", "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", "minor_version": 1, @@ -5994,6 +7122,7 @@ async def test_migration_from_1_2( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "import", + "subentries": {}, "title": "Sun", "unique_id": None, "version": 1, @@ -6001,3 +7130,1088 @@ async def test_migration_from_1_2( ] }, } + + +async def test_async_loaded_entries( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can get loaded config entries.""" + entry1 = MockConfigEntry(domain="comp") + entry1.add_to_hass(hass) + entry2 = MockConfigEntry(domain="comp", source=config_entries.SOURCE_IGNORE) + entry2.add_to_hass(hass) + entry3 = MockConfigEntry( + domain="comp", disabled_by=config_entries.ConfigEntryDisabler.USER + ) + entry3.add_to_hass(hass) + + mock_setup = AsyncMock(return_value=True) + mock_setup_entry = AsyncMock(return_value=True) + mock_unload_entry = AsyncMock(return_value=True) + + mock_integration( + hass, + MockModule( + "comp", + async_setup=mock_setup, + async_setup_entry=mock_setup_entry, + async_unload_entry=mock_unload_entry, + ), + ) + mock_platform(hass, "comp.config_flow", None) + + assert hass.config_entries.async_loaded_entries("comp") == [] + + assert await manager.async_setup(entry1.entry_id) + assert not await manager.async_setup(entry2.entry_id) + assert not await manager.async_setup(entry3.entry_id) + + assert hass.config_entries.async_loaded_entries("comp") == [entry1] + + assert await hass.config_entries.async_unload(entry1.entry_id) + + assert hass.config_entries.async_loaded_entries("comp") == [] + + +async def test_async_has_matching_discovery_flow( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test we can check for matching discovery flows.""" + assert ( + manager.flow.async_has_matching_discovery_flow( + "test", + {"source": config_entries.SOURCE_HOMEKIT}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is False + ) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 5 + + async def async_step_init(self, user_input=None): + return self.async_show_progress( + step_id="init", + progress_action="task_one", + ) + + async def async_step_homekit(self, discovery_info=None): + return await self.async_step_init(discovery_info) + + with mock_config_flow("test", TestFlow): + result = await manager.flow.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + assert result["type"] == data_entry_flow.FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "task_one" + assert len(manager.flow.async_progress()) == 1 + assert len(manager.flow.async_progress_by_handler("test")) == 1 + assert ( + len( + manager.flow.async_progress_by_handler( + "test", match_context={"source": config_entries.SOURCE_HOMEKIT} + ) + ) + == 1 + ) + assert ( + len( + manager.flow.async_progress_by_handler( + "test", match_context={"source": config_entries.SOURCE_BLUETOOTH} + ) + ) + == 0 + ) + assert manager.flow.async_get(result["flow_id"])["handler"] == "test" + + assert ( + manager.flow.async_has_matching_discovery_flow( + "test", + {"source": config_entries.SOURCE_HOMEKIT}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is True + ) + assert ( + manager.flow.async_has_matching_discovery_flow( + "test", + {"source": config_entries.SOURCE_SSDP}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is False + ) + assert ( + manager.flow.async_has_matching_discovery_flow( + "other", + {"source": config_entries.SOURCE_HOMEKIT}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is False + ) + + +async def test_async_has_matching_flow( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test check for matching flows when there is no active flow.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 5 + + async def async_step_init(self, user_input=None): + return self.async_show_progress( + step_id="init", + progress_action="task_one", + ) + + async def async_step_homekit(self, discovery_info=None): + return await self.async_step_init(discovery_info) + + def is_matching(self, other_flow: Self) -> bool: + """Return True if other_flow is matching this flow.""" + return True + + # Initiate a flow + with mock_config_flow("test", TestFlow): + await manager.flow.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + flow = list(manager.flow._handler_progress_index.get("test"))[0] + + assert manager.flow.async_has_matching_flow(flow) is False + + # Initiate another flow + with mock_config_flow("test", TestFlow): + await manager.flow.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + + assert manager.flow.async_has_matching_flow(flow) is True + + +async def test_async_has_matching_flow_no_flows( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test check for matching flows when there is no active flow.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 5 + + async def async_step_init(self, user_input=None): + return self.async_show_progress( + step_id="init", + progress_action="task_one", + ) + + async def async_step_homekit(self, discovery_info=None): + return await self.async_step_init(discovery_info) + + with mock_config_flow("test", TestFlow): + result = await manager.flow.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + flow = list(manager.flow._handler_progress_index.get("test"))[0] + + # Abort the flow before checking for matching flows + manager.flow.async_abort(result["flow_id"]) + + assert manager.flow.async_has_matching_flow(flow) is False + + +async def test_async_has_matching_flow_not_implemented( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test check for matching flows when there is no active flow.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 5 + + async def async_step_init(self, user_input=None): + return self.async_show_progress( + step_id="init", + progress_action="task_one", + ) + + async def async_step_homekit(self, discovery_info=None): + return await self.async_step_init(discovery_info) + + # Initiate a flow + with mock_config_flow("test", TestFlow): + await manager.flow.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + flow = list(manager.flow._handler_progress_index.get("test"))[0] + + # Initiate another flow + with mock_config_flow("test", TestFlow): + await manager.flow.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + + # The flow does not implement is_matching + with pytest.raises(NotImplementedError): + manager.flow.async_has_matching_flow(flow) + + +async def test_get_reauth_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test _get_context_entry behavior.""" + entry = MockConfigEntry( + title="test_title", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=None, + ) + entry.add_to_hass(hass) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + return await self._async_step_confirm() + + async def async_step_reauth(self, entry_data): + """Test reauth step.""" + return await self._async_step_confirm() + + async def async_step_reconfigure(self, user_input=None): + """Test reauth step.""" + return await self._async_step_confirm() + + async def _async_step_confirm(self): + """Confirm input.""" + try: + entry = self._get_reauth_entry() + except ValueError as err: + reason = str(err) + except config_entries.UnknownEntry: + reason = "Entry not found" + else: + reason = f"Found entry {entry.title}" + try: + entry_id = self._reauth_entry_id + except ValueError: + reason = f"{reason}: -" + else: + reason = f"{reason}: {entry_id}" + return self.async_abort(reason=reason) + + # A reauth flow finds the config entry from context + with mock_config_flow("test", TestFlow): + result = await entry.start_reauth_flow(hass) + assert result["reason"] == "Found entry test_title: 01J915Q6T9F6G5V0QJX6HBC94T" + + # The config entry is removed before the reauth flow is aborted + with mock_config_flow("test", TestFlow): + result = await entry.start_reauth_flow(hass, context={"entry_id": "01JRemoved"}) + assert result["reason"] == "Entry not found: 01JRemoved" + + # A reconfigure flow does not have access to the config entry + with mock_config_flow("test", TestFlow): + result = await entry.start_reconfigure_flow(hass) + assert result["reason"] == "Source is reconfigure, expected reauth: -" + + # A user flow does not have access to the config entry + with mock_config_flow("test", TestFlow): + result = await manager.flow.async_init( + "test", context={"source": config_entries.SOURCE_USER} + ) + assert result["reason"] == "Source is user, expected reauth: -" + + +async def test_get_reconfigure_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test _get_context_entry behavior.""" + entry = MockConfigEntry( + title="test_title", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=None, + ) + entry.add_to_hass(hass) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + return await self._async_step_confirm() + + async def async_step_reauth(self, entry_data): + """Test reauth step.""" + return await self._async_step_confirm() + + async def async_step_reconfigure(self, user_input=None): + """Test reauth step.""" + return await self._async_step_confirm() + + async def _async_step_confirm(self): + """Confirm input.""" + try: + entry = self._get_reconfigure_entry() + except ValueError as err: + reason = str(err) + except config_entries.UnknownEntry: + reason = "Entry not found" + else: + reason = f"Found entry {entry.title}" + try: + entry_id = self._reconfigure_entry_id + except ValueError: + reason = f"{reason}: -" + else: + reason = f"{reason}: {entry_id}" + return self.async_abort(reason=reason) + + # A reauth flow does not have access to the config entry from context + with mock_config_flow("test", TestFlow): + result = await entry.start_reauth_flow(hass) + assert result["reason"] == "Source is reauth, expected reconfigure: -" + + # A reconfigure flow finds the config entry + with mock_config_flow("test", TestFlow): + result = await entry.start_reconfigure_flow(hass) + assert result["reason"] == "Found entry test_title: 01J915Q6T9F6G5V0QJX6HBC94T" + + # The entry_id no longer exists + with mock_config_flow("test", TestFlow): + result = await manager.flow.async_init( + "test", + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": "01JRemoved", + }, + ) + assert result["reason"] == "Entry not found: 01JRemoved" + + # A user flow does not have access to the config entry + with mock_config_flow("test", TestFlow): + result = await manager.flow.async_init( + "test", context={"source": config_entries.SOURCE_USER} + ) + assert result["reason"] == "Source is user, expected reconfigure: -" + + +async def test_reauth_helper_alignment( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test `start_reauth_flow` helper alignment. + + It should be aligned with `ConfigEntry._async_init_reauth`. + """ + entry = MockConfigEntry( + title="test_title", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=None, + ) + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock( + side_effect=ConfigEntryAuthFailed("The password is no longer valid") + ) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + # Check context via auto-generated reauth + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert "could not authenticate: The password is no longer valid" in caplog.text + + assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR + assert entry.reason == "The password is no longer valid" + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + reauth_flow_context = flows[0]["context"] + reauth_flow_init_data = hass.config_entries.flow._progress[ + flows[0]["flow_id"] + ].init_data + + # Clear to make way for `start_reauth_flow` helper + manager.flow.async_abort(flows[0]["flow_id"]) + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + # Check context via `start_reauth_flow` helper + await entry.start_reauth_flow(hass) + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + helper_flow_context = flows[0]["context"] + helper_flow_init_data = hass.config_entries.flow._progress[ + flows[0]["flow_id"] + ].init_data + + # Ensure context and init data are aligned + assert helper_flow_context == reauth_flow_context + assert helper_flow_init_data == reauth_flow_init_data + + +@pytest.mark.parametrize( + ("original_unique_id", "new_unique_id", "reason"), + [ + ("unique", "unique", "success"), + (None, None, "success"), + ("unique", "new", "unique_id_mismatch"), + ("unique", None, "unique_id_mismatch"), + (None, "new", "unique_id_mismatch"), + ], +) +@pytest.mark.parametrize( + "source", + [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE], +) +async def test_abort_if_unique_id_mismatch( + hass: HomeAssistant, + source: str, + original_unique_id: str | None, + new_unique_id: str | None, + reason: str, +) -> None: + """Test to check if_unique_id_mismatch behavior.""" + entry = MockConfigEntry( + title="From config flow", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=original_unique_id, + ) + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + return await self._async_step_confirm() + + async def async_step_reauth(self, entry_data): + """Test reauth step.""" + return await self._async_step_confirm() + + async def async_step_reconfigure(self, user_input=None): + """Test reauth step.""" + return await self._async_step_confirm() + + async def _async_step_confirm(self): + """Confirm input.""" + await self.async_set_unique_id(new_unique_id) + self._abort_if_unique_id_mismatch() + return self.async_abort(reason="success") + + with mock_config_flow("test", TestFlow): + if source == config_entries.SOURCE_REAUTH: + result = await entry.start_reauth_flow(hass) + elif source == config_entries.SOURCE_RECONFIGURE: + result = await entry.start_reconfigure_flow(hass) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + + +def test_state_not_stored_in_storage() -> None: + """Test that state is not stored in storage. + + Verify we don't start accidentally storing state in storage. + """ + entry = MockConfigEntry(domain="test") + loaded = json_loads(json_dumps(entry.as_storage_fragment)) + for key in config_entries.STATE_KEYS: + assert key not in loaded + + +def test_storage_cache_is_cleared_on_entry_update(hass: HomeAssistant) -> None: + """Test that the storage cache is cleared when an entry is updated.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + _ = entry.as_storage_fragment + hass.config_entries.async_update_entry(entry, data={"new": "data"}) + loaded = json_loads(json_dumps(entry.as_storage_fragment)) + assert "new" in loaded["data"] + + +async def test_storage_cache_is_cleared_on_entry_disable(hass: HomeAssistant) -> None: + """Test that the storage cache is cleared when an entry is disabled.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + _ = entry.as_storage_fragment + await hass.config_entries.async_set_disabled_by( + entry.entry_id, config_entries.ConfigEntryDisabler.USER + ) + loaded = json_loads(json_dumps(entry.as_storage_fragment)) + assert loaded["disabled_by"] == "user" + + +async def test_state_cache_is_cleared_on_entry_disable(hass: HomeAssistant) -> None: + """Test that the state cache is cleared when an entry is disabled.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + _ = entry.as_storage_fragment + await hass.config_entries.async_set_disabled_by( + entry.entry_id, config_entries.ConfigEntryDisabler.USER + ) + loaded = json_loads(json_dumps(entry.as_json_fragment)) + assert loaded["disabled_by"] == "user" + + +@pytest.mark.parametrize( + ("original_unique_id", "new_unique_id", "count"), + [ + ("unique", "unique", 1), + ("unique", "new", 2), + ("unique", None, 2), + (None, "unique", 2), + ], +) +@pytest.mark.parametrize( + "source", + [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE], +) +async def test_create_entry_reauth_reconfigure( + hass: HomeAssistant, + source: str, + original_unique_id: str | None, + new_unique_id: str | None, + count: int, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test to highlight unexpected behavior on create_entry.""" + entry = MockConfigEntry( + title="From config flow", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=original_unique_id, + ) + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + return await self._async_step_confirm() + + async def async_step_reauth(self, entry_data): + """Test reauth step.""" + return await self._async_step_confirm() + + async def async_step_reconfigure(self, user_input=None): + """Test reauth step.""" + return await self._async_step_confirm() + + async def _async_step_confirm(self): + """Confirm input.""" + await self.async_set_unique_id(new_unique_id) + return self.async_create_entry( + title="From config flow", + data={"token": "supersecret"}, + ) + + assert len(hass.config_entries.async_entries("test")) == 1 + + with ( + mock_config_flow("test", TestFlow), + patch.object(frame, "_REPORTED_INTEGRATIONS", set()), + ): + result = await getattr(entry, f"start_{source}_flow")(hass) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + + entries = hass.config_entries.async_entries("test") + assert len(entries) == count + if count == 1: + # Show that the previous entry got binned and recreated + assert entries[0].entry_id != entry.entry_id + + assert ( + f"Detected that integration 'test' creates a new entry in a '{source}' flow, " + "when it is expected to update an existing entry and abort. This will stop " + "working in Home Assistant 2025.11, please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+" + "label%3A%22integration%3A+test%22" + ) in caplog.text + + +async def test_async_update_entry_unique_id_collision( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we warn when async_update_entry creates a unique_id collision. + + Also test an issue registry issue is created. + """ + assert len(issue_registry.issues) == 0 + + entry1 = MockConfigEntry(domain="test", unique_id=None) + entry2 = MockConfigEntry(domain="test", unique_id="not none") + entry3 = MockConfigEntry(domain="test", unique_id="very unique") + entry4 = MockConfigEntry(domain="test", unique_id="also very unique") + entry1.add_to_manager(manager) + entry2.add_to_manager(manager) + entry3.add_to_manager(manager) + entry4.add_to_manager(manager) + + manager.async_update_entry(entry2, unique_id=None) + assert len(issue_registry.issues) == 0 + assert len(caplog.record_tuples) == 0 + + manager.async_update_entry(entry4, unique_id="very unique") + assert len(issue_registry.issues) == 1 + assert len(caplog.record_tuples) == 1 + + assert ( + "Unique id of config entry 'Mock Title' from integration test changed to " + "'very unique' which is already in use" + ) in caplog.text + + issue_id = "config_entry_unique_id_collision_test_very unique" + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) + + +@pytest.mark.parametrize("domain", ["flipr"]) +async def test_async_update_entry_unique_id_collision_allowed_domain( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, + issue_registry: ir.IssueRegistry, + domain: str, +) -> None: + """Test we warn when async_update_entry creates a unique_id collision. + + This tests we don't warn and don't create issues for domains which have + their own migration path. + """ + assert len(issue_registry.issues) == 0 + + entry1 = MockConfigEntry(domain=domain, unique_id=None) + entry2 = MockConfigEntry(domain=domain, unique_id="not none") + entry3 = MockConfigEntry(domain=domain, unique_id="very unique") + entry4 = MockConfigEntry(domain=domain, unique_id="also very unique") + entry1.add_to_manager(manager) + entry2.add_to_manager(manager) + entry3.add_to_manager(manager) + entry4.add_to_manager(manager) + + manager.async_update_entry(entry2, unique_id=None) + assert len(issue_registry.issues) == 0 + assert len(caplog.record_tuples) == 0 + + manager.async_update_entry(entry4, unique_id="very unique") + assert len(issue_registry.issues) == 0 + assert len(caplog.record_tuples) == 0 + + assert ("already in use") not in caplog.text + + +async def test_unique_id_collision_issues( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, + issue_registry: ir.IssueRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test issue registry issues are created and remove on unique id collision.""" + assert len(issue_registry.issues) == 0 + + mock_setup_entry = AsyncMock(return_value=True) + for i in range(3): + mock_integration( + hass, MockModule(f"test{i+1}", async_setup_entry=mock_setup_entry) + ) + mock_platform(hass, f"test{i+1}.config_flow", None) + + test2_group_1: list[MockConfigEntry] = [] + test2_group_2: list[MockConfigEntry] = [] + test3: list[MockConfigEntry] = [] + for _ in range(3): + await manager.async_add(MockConfigEntry(domain="test1", unique_id=None)) + test2_group_1.append(MockConfigEntry(domain="test2", unique_id="group_1")) + test2_group_2.append(MockConfigEntry(domain="test2", unique_id="group_2")) + await manager.async_add(test2_group_1[-1]) + await manager.async_add(test2_group_2[-1]) + for _ in range(6): + test3.append(MockConfigEntry(domain="test3", unique_id="not_unique")) + await manager.async_add(test3[-1]) + # Add an ignored config entry + await manager.async_add( + MockConfigEntry( + domain="test2", unique_id="group_1", source=config_entries.SOURCE_IGNORE + ) + ) + + # Check we get one issue for domain test2 and one issue for domain test3 + assert len(issue_registry.issues) == 2 + issue_id = "config_entry_unique_id_collision_test2_group_1" + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot + issue_id = "config_entry_unique_id_collision_test3_not_unique" + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot + + # Remove one config entry for domain test3, the translations should be updated + await manager.async_remove(test3[0].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test3_not_unique"), + } + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot + + # Remove all but two config entries for domain test 3 + for i in range(3): + await manager.async_remove(test3[1 + i].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test3_not_unique"), + } + + # Remove the last test3 duplicate, the issue is cleared + await manager.async_remove(test3[-1].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + } + + await manager.async_remove(test2_group_1[0].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + } + + # Remove the last test2 group1 duplicate, a new issue is created + await manager.async_remove(test2_group_1[1].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), + } + + await manager.async_remove(test2_group_2[0].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), + } + + # Remove the last test2 group2 duplicate, the issue is cleared + await manager.async_remove(test2_group_2[1].entry_id) + assert not issue_registry.issues + + +async def test_context_no_leak(hass: HomeAssistant) -> None: + """Test ensure that config entry context does not leak. + + Unlikely to happen in real world, but occurs often in tests. + """ + + connected_future = asyncio.Future() + bg_tasks = [] + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Mock setup entry.""" + + async def _async_set_runtime_data(): + # Show that config_entries.current_entry is preserved for child tasks + await connected_future + entry.runtime_data = config_entries.current_entry.get() + + bg_tasks.append(hass.loop.create_task(_async_set_runtime_data())) + + return True + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Mock unload entry.""" + return True + + mock_integration( + hass, + MockModule( + "comp", + async_setup_entry=async_setup_entry, + async_unload_entry=async_unload_entry, + ), + ) + mock_platform(hass, "comp.config_flow", None) + + entry1 = MockConfigEntry(domain="comp") + entry1.add_to_hass(hass) + + await hass.config_entries.async_setup(entry1.entry_id) + assert entry1.state is config_entries.ConfigEntryState.LOADED + assert config_entries.current_entry.get() is None + + # Load an existing config entry + entry2 = MockConfigEntry(domain="comp") + entry2.add_to_hass(hass) + await hass.config_entries.async_setup(entry2.entry_id) + assert entry2.state is config_entries.ConfigEntryState.LOADED + assert config_entries.current_entry.get() is None + + # Add a new config entry (eg. from config flow) + entry3 = MockConfigEntry(domain="comp") + await hass.config_entries.async_add(entry3) + assert entry3.state is config_entries.ConfigEntryState.LOADED + assert config_entries.current_entry.get() is None + + for entry in (entry1, entry2, entry3): + assert entry.state is config_entries.ConfigEntryState.LOADED + assert not hasattr(entry, "runtime_data") + assert config_entries.current_entry.get() is None + + connected_future.set_result(None) + await asyncio.gather(*bg_tasks) + + for entry in (entry1, entry2, entry3): + assert entry.state is config_entries.ConfigEntryState.LOADED + assert entry.runtime_data is entry + assert config_entries.current_entry.get() is None + + +async def test_options_flow_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test _config_entry_id and config_entry properties in options flow.""" + original_entry = MockConfigEntry(domain="test", data={}) + original_entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + + class _OptionsFlow(config_entries.OptionsFlow): + """Test flow.""" + + def __init__(self) -> None: + """Test initialisation.""" + try: + self.init_entry_id = self._config_entry_id + except ValueError as err: + self.init_entry_id = err + try: + self.init_entry = self.config_entry + except ValueError as err: + self.init_entry = err + + async def async_step_init(self, user_input=None): + """Test user step.""" + errors = {} + if user_input is not None: + if user_input.get("abort"): + return self.async_abort(reason="abort") + + errors["entry_id"] = self._config_entry_id + try: + errors["entry"] = self.config_entry + except config_entries.UnknownEntry as err: + errors["entry"] = err + + return self.async_show_form(step_id="init", errors=errors) + + return _OptionsFlow() + + with mock_config_flow("test", TestFlow): + result = await hass.config_entries.options.async_init(original_entry.entry_id) + + options_flow = hass.config_entries.options._progress.get(result["flow_id"]) + assert isinstance(options_flow, config_entries.OptionsFlow) + assert options_flow.handler == original_entry.entry_id + assert isinstance(options_flow.init_entry_id, ValueError) + assert ( + str(options_flow.init_entry_id) + == "The config entry id is not available during initialisation" + ) + assert isinstance(options_flow.init_entry, ValueError) + assert ( + str(options_flow.init_entry) + == "The config entry is not available during initialisation" + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] == {} + + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"]["entry_id"] == original_entry.entry_id + assert result["errors"]["entry"] is original_entry + + # Bad handler - not linked to a config entry + options_flow.handler = "123" + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"]["entry_id"] == "123" + assert isinstance(result["errors"]["entry"], config_entries.UnknownEntry) + # Reset handler + options_flow.handler = original_entry.entry_id + + result = await hass.config_entries.options.async_configure( + result["flow_id"], {"abort": True} + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "abort" + + +@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_deprecated_config_entry_setter( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that setting config_entry explicitly still works.""" + original_entry = MockConfigEntry(domain="my_integration", data={}) + original_entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration( + hass, MockModule("my_integration", async_setup_entry=mock_setup_entry) + ) + mock_platform(hass, "my_integration.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + + class _OptionsFlow(config_entries.OptionsFlow): + """Test flow.""" + + def __init__(self, entry) -> None: + """Test initialisation.""" + self.config_entry = entry + + async def async_step_init(self, user_input=None): + """Test user step.""" + errors = {} + if user_input is not None: + if user_input.get("abort"): + return self.async_abort(reason="abort") + + errors["entry_id"] = self._config_entry_id + try: + errors["entry"] = self.config_entry + except config_entries.UnknownEntry as err: + errors["entry"] = err + + return self.async_show_form(step_id="init", errors=errors) + + return _OptionsFlow(config_entry) + + with mock_config_flow("my_integration", TestFlow): + result = await hass.config_entries.options.async_init(original_entry.entry_id) + + options_flow = hass.config_entries.options._progress.get(result["flow_id"]) + assert options_flow.config_entry is original_entry + + assert ( + "Detected that custom integration 'my_integration' sets option flow " + "config_entry explicitly, which is deprecated at " + "custom_components/my_integration/light.py, line 23: " + "self.light.is_on. This will stop working in Home Assistant 2025.12, please " + "create a bug report at " in caplog.text + ) + + +async def test_add_description_placeholder_automatically( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, +) -> None: + """Test entry title is added automatically to reauth flows description placeholder.""" + + entry = MockConfigEntry(title="test_title", domain="test") + + mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + entry.add_to_hass(hass) + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress_by_handler("test") + assert len(flows) == 1 + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) + assert result["type"] == FlowResultType.FORM + assert result["description_placeholders"] == {"name": "test_title"} + + +async def test_add_description_placeholder_automatically_not_overwrites( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, +) -> None: + """Test entry title is not added automatically to reauth flows when custom name exist.""" + + entry = MockConfigEntry(title="test_title", domain="test2") + + mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) + mock_integration(hass, MockModule("test2", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test2.config_flow", None) + + entry.add_to_hass(hass) + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress_by_handler("test2") + assert len(flows) == 1 + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) + assert result["type"] == FlowResultType.FORM + assert result["description_placeholders"] == {"name": "Custom title"} diff --git a/tests/test_const.py b/tests/test_const.py index 64ccb875cf5..a039545a004 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -1,13 +1,17 @@ """Test const module.""" from enum import Enum +import logging +import sys +from unittest.mock import Mock, patch import pytest from homeassistant import const -from homeassistant.components import sensor +from homeassistant.components import alarm_control_panel, lock from .common import ( + extract_stack_to_frame, help_test_all, import_and_test_deprecated_constant, import_and_test_deprecated_constant_enum, @@ -26,152 +30,16 @@ def test_all() -> None: @pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(const.EntityCategory, "ENTITY_CATEGORY_") - + _create_tuples( - [ - sensor.SensorDeviceClass.AQI, - sensor.SensorDeviceClass.BATTERY, - sensor.SensorDeviceClass.CO, - sensor.SensorDeviceClass.CO2, - sensor.SensorDeviceClass.CURRENT, - sensor.SensorDeviceClass.DATE, - sensor.SensorDeviceClass.ENERGY, - sensor.SensorDeviceClass.FREQUENCY, - sensor.SensorDeviceClass.GAS, - sensor.SensorDeviceClass.HUMIDITY, - sensor.SensorDeviceClass.ILLUMINANCE, - sensor.SensorDeviceClass.MONETARY, - sensor.SensorDeviceClass.NITROGEN_DIOXIDE, - sensor.SensorDeviceClass.NITROGEN_MONOXIDE, - sensor.SensorDeviceClass.NITROUS_OXIDE, - sensor.SensorDeviceClass.OZONE, - sensor.SensorDeviceClass.PM1, - sensor.SensorDeviceClass.PM10, - sensor.SensorDeviceClass.PM25, - sensor.SensorDeviceClass.POWER_FACTOR, - sensor.SensorDeviceClass.POWER, - sensor.SensorDeviceClass.PRESSURE, - sensor.SensorDeviceClass.SIGNAL_STRENGTH, - sensor.SensorDeviceClass.SULPHUR_DIOXIDE, - sensor.SensorDeviceClass.TEMPERATURE, - sensor.SensorDeviceClass.TIMESTAMP, - sensor.SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - sensor.SensorDeviceClass.VOLTAGE, - ], - "DEVICE_CLASS_", - ) - + _create_tuples(const.UnitOfApparentPower, "POWER_") - + _create_tuples(const.UnitOfPower, "POWER_") - + _create_tuples( - [ - const.UnitOfEnergy.KILO_WATT_HOUR, - const.UnitOfEnergy.MEGA_WATT_HOUR, - const.UnitOfEnergy.WATT_HOUR, - ], - "ENERGY_", - ) - + _create_tuples(const.UnitOfElectricCurrent, "ELECTRIC_CURRENT_") - + _create_tuples(const.UnitOfElectricPotential, "ELECTRIC_POTENTIAL_") - + _create_tuples(const.UnitOfTemperature, "TEMP_") - + _create_tuples(const.UnitOfTime, "TIME_") - + _create_tuples( - [ - const.UnitOfLength.MILLIMETERS, - const.UnitOfLength.CENTIMETERS, - const.UnitOfLength.METERS, - const.UnitOfLength.KILOMETERS, - const.UnitOfLength.INCHES, - const.UnitOfLength.FEET, - const.UnitOfLength.MILES, - ], - "LENGTH_", - ) - + _create_tuples(const.UnitOfFrequency, "FREQUENCY_") - + _create_tuples(const.UnitOfPressure, "PRESSURE_") - + _create_tuples( - [ - const.UnitOfVolume.CUBIC_FEET, - const.UnitOfVolume.CUBIC_METERS, - const.UnitOfVolume.LITERS, - const.UnitOfVolume.MILLILITERS, - const.UnitOfVolume.GALLONS, - ], - "VOLUME_", - ) - + _create_tuples( - [ - const.UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - const.UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, - ], - "VOLUME_FLOW_RATE_", - ) - + _create_tuples( - [ - const.UnitOfMass.GRAMS, - const.UnitOfMass.KILOGRAMS, - const.UnitOfMass.MILLIGRAMS, - const.UnitOfMass.MICROGRAMS, - const.UnitOfMass.OUNCES, - const.UnitOfMass.POUNDS, - ], - "MASS_", - ) - + _create_tuples(const.UnitOfIrradiance, "IRRADIATION_") - + _create_tuples( - [ - const.UnitOfPrecipitationDepth.INCHES, - const.UnitOfPrecipitationDepth.MILLIMETERS, - const.UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, - const.UnitOfVolumetricFlux.INCHES_PER_HOUR, - ], - "PRECIPITATION_", - ) - + _create_tuples( - [ - const.UnitOfSpeed.FEET_PER_SECOND, - const.UnitOfSpeed.METERS_PER_SECOND, - const.UnitOfSpeed.KILOMETERS_PER_HOUR, - const.UnitOfSpeed.KNOTS, - const.UnitOfSpeed.MILES_PER_HOUR, - ], - "SPEED_", - ) - + _create_tuples( - [ - const.UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, - const.UnitOfVolumetricFlux.INCHES_PER_DAY, - const.UnitOfVolumetricFlux.INCHES_PER_HOUR, - ], - "SPEED_", - ) - + _create_tuples(const.UnitOfInformation, "DATA_") - + _create_tuples(const.UnitOfDataRate, "DATA_RATE_"), -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, const, enum, constant_prefix, "2025.1" - ) - - -@pytest.mark.parametrize( - ("replacement", "constant_name"), + ("replacement", "constant_name", "breaks_in_version"), [ - (const.UnitOfLength.YARDS, "LENGTH_YARD"), - (const.UnitOfSoundPressure.DECIBEL, "SOUND_PRESSURE_DB"), - (const.UnitOfSoundPressure.WEIGHTED_DECIBEL_A, "SOUND_PRESSURE_WEIGHTED_DBA"), - (const.UnitOfVolume.FLUID_OUNCES, "VOLUME_FLUID_OUNCE"), + (const.UnitOfArea.SQUARE_METERS, "AREA_SQUARE_METERS", "2025.12"), ], ) def test_deprecated_constant_name_changes( caplog: pytest.LogCaptureFixture, replacement: Enum, constant_name: str, + breaks_in_version: str, ) -> None: """Test deprecated constants, where the name is not the same as the enum value.""" import_and_test_deprecated_constant( @@ -180,5 +48,142 @@ def test_deprecated_constant_name_changes( constant_name, f"{replacement.__class__.__name__}.{replacement.name}", replacement, - "2025.1", + breaks_in_version, ) + + +def _create_tuples_lock_states( + enum: type[Enum], constant_prefix: str, remove_in_version: str +) -> list[tuple[Enum, str]]: + return [ + (enum_field, constant_prefix, remove_in_version) + for enum_field in enum + if enum_field + not in [ + lock.LockState.OPEN, + lock.LockState.OPENING, + ] + ] + + +@pytest.mark.parametrize( + ("enum", "constant_prefix", "remove_in_version"), + _create_tuples_lock_states(lock.LockState, "STATE_", "2025.10"), +) +def test_deprecated_constants_lock( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + remove_in_version: str, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, const, enum, constant_prefix, remove_in_version + ) + + +def _create_tuples_alarm_states( + enum: type[Enum], constant_prefix: str, remove_in_version: str +) -> list[tuple[Enum, str]]: + return [ + (enum_field, constant_prefix, remove_in_version) + for enum_field in enum + if enum_field + not in [ + lock.LockState.OPEN, + lock.LockState.OPENING, + ] + ] + + +@pytest.mark.parametrize( + ("enum", "constant_prefix", "remove_in_version"), + _create_tuples_lock_states( + alarm_control_panel.AlarmControlPanelState, "STATE_ALARM_", "2025.11" + ), +) +def test_deprecated_constants_alarm( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + remove_in_version: str, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, const, enum, constant_prefix, remove_in_version + ) + + +def test_deprecated_unit_of_conductivity_alias() -> None: + """Test UnitOfConductivity deprecation.""" + + # Test the deprecated members are aliases + assert set(const.UnitOfConductivity) == {"S/cm", "µS/cm", "mS/cm"} + + +def test_deprecated_unit_of_conductivity_members( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test UnitOfConductivity deprecation.""" + + module_name = "config.custom_components.hue.light" + filename = f"/home/paulus/{module_name.replace('.', '/')}.py" + + with ( + patch.dict(sys.modules, {module_name: Mock(__file__=filename)}), + patch( + "homeassistant.helpers.frame.linecache.getline", + return_value="await session.close()", + ), + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename=filename, + lineno="23", + line="await session.close()", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ), + ), + ): + const.UnitOfConductivity.SIEMENS # noqa: B018 + const.UnitOfConductivity.MICROSIEMENS # noqa: B018 + const.UnitOfConductivity.MILLISIEMENS # noqa: B018 + + assert len(caplog.record_tuples) == 3 + + def deprecation_message(member: str, replacement: str) -> str: + return ( + f"UnitOfConductivity.{member} was used from hue, this is a deprecated enum " + "member which will be removed in HA Core 2025.11.0. Use UnitOfConductivity." + f"{replacement} instead, please report it to the author of the 'hue' custom" + " integration" + ) + + assert ( + const.__name__, + logging.WARNING, + deprecation_message("SIEMENS", "SIEMENS_PER_CM"), + ) in caplog.record_tuples + assert ( + const.__name__, + logging.WARNING, + deprecation_message("MICROSIEMENS", "MICROSIEMENS_PER_CM"), + ) in caplog.record_tuples + assert ( + const.__name__, + logging.WARNING, + deprecation_message("MILLISIEMENS", "MILLISIEMENS_PER_CM"), + ) in caplog.record_tuples diff --git a/tests/test_core.py b/tests/test_core.py index 9ca57d1563f..60b907d57ca 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -9,13 +9,11 @@ import functools import gc import logging import os -from pathlib import Path import re -from tempfile import TemporaryDirectory import threading import time from typing import Any -from unittest.mock import MagicMock, Mock, PropertyMock, patch +from unittest.mock import MagicMock, patch from freezegun import freeze_time import pytest @@ -24,7 +22,6 @@ import voluptuous as vol from homeassistant.const import ( ATTR_FRIENDLY_NAME, - CONF_UNIT_SYSTEM, EVENT_CALL_SERVICE, EVENT_CORE_CONFIG_UPDATE, EVENT_HOMEASSISTANT_CLOSE, @@ -37,7 +34,6 @@ from homeassistant.const import ( EVENT_STATE_CHANGED, EVENT_STATE_REPORTED, MATCH_ALL, - __version__, ) import homeassistant.core as ha from homeassistant.core import ( @@ -52,6 +48,7 @@ from homeassistant.core import ( callback, get_release_channel, ) +from homeassistant.core_config import Config from homeassistant.exceptions import ( HomeAssistantError, InvalidEntityFormatError, @@ -65,13 +62,12 @@ from homeassistant.setup import async_setup_component from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from homeassistant.util.read_only_dict import ReadOnlyDict -from homeassistant.util.unit_system import METRIC_SYSTEM from .common import ( async_capture_events, async_mock_service, help_test_all, - import_and_test_deprecated_constant_enum, + import_and_test_deprecated_alias, ) PST = dt_util.get_time_zone("America/Los_Angeles") @@ -1566,10 +1562,10 @@ async def test_statemachine_avoids_updating_attributes(hass: HomeAssistant) -> N def test_service_call_repr() -> None: """Test ServiceCall repr.""" - call = ha.ServiceCall("homeassistant", "start") + call = ha.ServiceCall(None, "homeassistant", "start") assert str(call) == f"" - call2 = ha.ServiceCall("homeassistant", "start", {"fast": "yes"}) + call2 = ha.ServiceCall(None, "homeassistant", "start", {"fast": "yes"}) assert ( str(call2) == f"" @@ -1918,173 +1914,6 @@ async def test_serviceregistry_return_response_optional( assert response_data == expected_response_data -async def test_config_defaults() -> None: - """Test config defaults.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - assert config.hass is hass - assert config.latitude == 0 - assert config.longitude == 0 - assert config.elevation == 0 - assert config.location_name == "Home" - assert config.time_zone == "UTC" - assert config.internal_url is None - assert config.external_url is None - assert config.config_source is ha.ConfigSource.DEFAULT - assert config.skip_pip is False - assert config.skip_pip_packages == [] - assert config.components == set() - assert config.api is None - assert config.config_dir == "/test/ha-config" - assert config.allowlist_external_dirs == set() - assert config.allowlist_external_urls == set() - assert config.media_dirs == {} - assert config.recovery_mode is False - assert config.legacy_templates is False - assert config.currency == "EUR" - assert config.country is None - assert config.language == "en" - assert config.radius == 100 - - -async def test_config_path_with_file() -> None: - """Test get_config_path method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - assert config.path("test.conf") == "/test/ha-config/test.conf" - - -async def test_config_path_with_dir_and_file() -> None: - """Test get_config_path method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf" - - -async def test_config_as_dict() -> None: - """Test as dict.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - type(config.hass.state).value = PropertyMock(return_value="RUNNING") - expected = { - "latitude": 0, - "longitude": 0, - "elevation": 0, - CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), - "location_name": "Home", - "time_zone": "UTC", - "components": [], - "config_dir": "/test/ha-config", - "whitelist_external_dirs": [], - "allowlist_external_dirs": [], - "allowlist_external_urls": [], - "version": __version__, - "config_source": ha.ConfigSource.DEFAULT, - "recovery_mode": False, - "state": "RUNNING", - "external_url": None, - "internal_url": None, - "currency": "EUR", - "country": None, - "language": "en", - "safe_mode": False, - "debug": False, - "radius": 100, - } - - assert expected == config.as_dict() - - -async def test_config_is_allowed_path() -> None: - """Test is_allowed_path method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - with TemporaryDirectory() as tmp_dir: - # The created dir is in /tmp. This is a symlink on OS X - # causing this test to fail unless we resolve path first. - config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} - - test_file = os.path.join(tmp_dir, "test.jpg") - await asyncio.get_running_loop().run_in_executor( - None, Path(test_file).write_text, "test" - ) - - valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] - for path in valid: - assert config.is_allowed_path(path) - - config.allowlist_external_dirs = {"/home", "/var"} - - invalid = [ - "/hass/config/secure", - "/etc/passwd", - "/root/secure_file", - "/var/../etc/passwd", - test_file, - ] - for path in invalid: - assert not config.is_allowed_path(path) - - with pytest.raises(AssertionError): - config.is_allowed_path(None) - - -async def test_config_is_allowed_external_url() -> None: - """Test is_allowed_external_url method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - config.allowlist_external_urls = [ - "http://x.com/", - "https://y.com/bla/", - "https://z.com/images/1.jpg/", - ] - - valid = [ - "http://x.com/1.jpg", - "http://x.com", - "https://y.com/bla/", - "https://y.com/bla/2.png", - "https://z.com/images/1.jpg", - ] - for url in valid: - assert config.is_allowed_external_url(url) - - invalid = [ - "https://a.co", - "https://y.com/bla_wrong", - "https://y.com/bla/../image.jpg", - "https://z.com/images", - ] - for url in invalid: - assert not config.is_allowed_external_url(url) - - -async def test_event_on_update(hass: HomeAssistant) -> None: - """Test that event is fired on update.""" - events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) - - assert hass.config.latitude != 12 - - await hass.config.async_update(latitude=12) - await hass.async_block_till_done() - - assert hass.config.latitude == 12 - assert len(events) == 1 - assert events[0].data == {"latitude": 12} - - -async def test_bad_timezone_raises_value_error(hass: HomeAssistant) -> None: - """Test bad timezone raises ValueError.""" - with pytest.raises(ValueError): - await hass.config.async_update(time_zone="not_a_timezone") - - async def test_start_taking_too_long(caplog: pytest.LogCaptureFixture) -> None: """Test when async_start takes too long.""" hass = ha.HomeAssistant("/test/ha-config") @@ -2194,7 +2023,7 @@ async def test_async_functions_with_callback(hass: HomeAssistant) -> None: runs = [] @ha.callback - async def test(): + async def test(): # pylint: disable=hass-async-callback-decorator runs.append(True) await hass.async_add_job(test) @@ -2205,7 +2034,7 @@ async def test_async_functions_with_callback(hass: HomeAssistant) -> None: assert len(runs) == 2 @ha.callback - async def service_handler(call): + async def service_handler(call): # pylint: disable=hass-async-callback-decorator runs.append(True) hass.services.async_register("test_domain", "test_service", service_handler) @@ -2299,53 +2128,6 @@ def test_valid_domain() -> None: assert ha.valid_domain(valid), valid -async def test_additional_data_in_core_config( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test that we can handle additional data in core configuration.""" - config = ha.Config(hass, "/test/ha-config") - config.async_initialize() - hass_storage[ha.CORE_STORAGE_KEY] = { - "version": 1, - "data": {"location_name": "Test Name", "additional_valid_key": "value"}, - } - await config.async_load() - assert config.location_name == "Test Name" - - -async def test_incorrect_internal_external_url( - hass: HomeAssistant, hass_storage: dict[str, Any], caplog: pytest.LogCaptureFixture -) -> None: - """Test that we warn when detecting invalid internal/external url.""" - config = ha.Config(hass, "/test/ha-config") - config.async_initialize() - - hass_storage[ha.CORE_STORAGE_KEY] = { - "version": 1, - "data": { - "internal_url": None, - "external_url": None, - }, - } - await config.async_load() - assert "Invalid external_url set" not in caplog.text - assert "Invalid internal_url set" not in caplog.text - - config = ha.Config(hass, "/test/ha-config") - config.async_initialize() - - hass_storage[ha.CORE_STORAGE_KEY] = { - "version": 1, - "data": { - "internal_url": "https://community.home-assistant.io/profile", - "external_url": "https://www.home-assistant.io/blue", - }, - } - await config.async_load() - assert "Invalid external_url set" in caplog.text - assert "Invalid internal_url set" in caplog.text - - async def test_start_events(hass: HomeAssistant) -> None: """Test events fired when starting Home Assistant.""" hass.state = ha.CoreState.not_running @@ -3197,20 +2979,9 @@ def test_all() -> None: help_test_all(ha) -@pytest.mark.parametrize( - ("enum"), - [ - ha.ConfigSource.DISCOVERED, - ha.ConfigSource.YAML, - ha.ConfigSource.STORAGE, - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: ha.ConfigSource, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, ha, enum, "SOURCE_", "2025.1") +def test_deprecated_config(caplog: pytest.LogCaptureFixture) -> None: + """Test deprecated Config class.""" + import_and_test_deprecated_alias(caplog, ha, "Config", Config, "2025.11") def test_one_time_listener_repr(hass: HomeAssistant) -> None: @@ -3252,10 +3023,9 @@ async def test_async_run_job_deprecated( hass.async_run_job(_test) assert ( - "Detected code that calls `async_run_job`, which is deprecated " - "and will be removed in Home Assistant 2025.4; Please review " + "Detected code that calls `async_run_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" - " for replacement options" + " for replacement options. This will stop working in Home Assistant 2025.4" ) in caplog.text @@ -3269,10 +3039,9 @@ async def test_async_add_job_deprecated( hass.async_add_job(_test) assert ( - "Detected code that calls `async_add_job`, which is deprecated " - "and will be removed in Home Assistant 2025.4; Please review " + "Detected code that calls `async_add_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" - " for replacement options" + " for replacement options. This will stop working in Home Assistant 2025.4" ) in caplog.text @@ -3286,10 +3055,9 @@ async def test_async_add_hass_job_deprecated( hass.async_add_hass_job(HassJob(_test)) assert ( - "Detected code that calls `async_add_hass_job`, which is deprecated " - "and will be removed in Home Assistant 2025.5; Please review " + "Detected code that calls `async_add_hass_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" - " for replacement options" + " for replacement options. This will stop working in Home Assistant 2025.5" ) in caplog.text @@ -3457,33 +3225,11 @@ async def test_async_listen_with_run_immediately_deprecated( func = getattr(hass.bus, method) func(EVENT_HOMEASSISTANT_START, _test, run_immediately=run_immediately) assert ( - f"Detected code that calls `{method}` with run_immediately, which is " - "deprecated and will be removed in Home Assistant 2025.5." + f"Detected code that calls `{method}` with run_immediately. " + "This will stop working in Home Assistant 2025.5" ) in caplog.text -async def test_top_level_components(hass: HomeAssistant) -> None: - """Test top level components are updated when components change.""" - hass.config.components.add("homeassistant") - assert hass.config.components == {"homeassistant"} - assert hass.config.top_level_components == {"homeassistant"} - hass.config.components.add("homeassistant.scene") - assert hass.config.components == {"homeassistant", "homeassistant.scene"} - assert hass.config.top_level_components == {"homeassistant"} - hass.config.components.remove("homeassistant") - assert hass.config.components == {"homeassistant.scene"} - assert hass.config.top_level_components == set() - with pytest.raises(ValueError): - hass.config.components.remove("homeassistant.scene") - with pytest.raises(NotImplementedError): - hass.config.components.discard("homeassistant") - - -async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: - """Test debug mode defaults to off.""" - assert not hass.config.debug - - async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: """Test async_fire thread safety.""" events = async_capture_events(hass, "test_event") @@ -3544,25 +3290,12 @@ async def test_thread_safety_message(hass: HomeAssistant) -> None: "which may cause Home Assistant to crash or data to corrupt. For more " "information, see " "https://developers.home-assistant.io/docs/asyncio_thread_safety/#test" - ". Please report this issue.", + ". Please report this issue", ), ): await hass.async_add_executor_job(hass.verify_event_loop_thread, "test") -async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: - """Test set_time_zone is deprecated.""" - with pytest.raises( - RuntimeError, - match=re.escape( - "Detected code that set the time zone using set_time_zone instead of " - "async_set_time_zone which will stop working in Home Assistant 2025.6. " - "Please report this issue.", - ), - ): - await hass.config.set_time_zone("America/New_York") - - async def test_async_set_updates_last_reported(hass: HomeAssistant) -> None: """Test async_set method updates last_reported AND last_reported_timestamp.""" hass.states.async_set("light.bowl", "on", {}) diff --git a/tests/test_core_config.py b/tests/test_core_config.py new file mode 100644 index 00000000000..dae50bae097 --- /dev/null +++ b/tests/test_core_config.py @@ -0,0 +1,1106 @@ +"""Test core_config.""" + +import asyncio +from collections import OrderedDict +import copy +import os +from pathlib import Path +import re +from tempfile import TemporaryDirectory +from typing import Any +from unittest.mock import Mock, PropertyMock, patch + +import pytest +from voluptuous import Invalid, MultipleInvalid +from webrtc_models import RTCConfiguration, RTCIceServer + +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + ATTR_FRIENDLY_NAME, + CONF_AUTH_MFA_MODULES, + CONF_AUTH_PROVIDERS, + CONF_CUSTOMIZE, + CONF_LATITUDE, + CONF_LONGITUDE, + CONF_NAME, + CONF_UNIT_SYSTEM, + EVENT_CORE_CONFIG_UPDATE, + __version__, +) +from homeassistant.core import HomeAssistant, State +from homeassistant.core_config import ( + _CUSTOMIZE_DICT_SCHEMA, + CORE_CONFIG_SCHEMA, + CORE_STORAGE_KEY, + DATA_CUSTOMIZE, + Config, + ConfigSource, + _validate_stun_or_turn_url, + async_process_ha_core_config, +) +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.entity import Entity +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) + +from .common import MockUser, async_capture_events + + +def test_core_config_schema() -> None: + """Test core config schema.""" + for value in ( + {"unit_system": "K"}, + {"time_zone": "non-exist"}, + {"latitude": "91"}, + {"longitude": -181}, + {"external_url": "not an url"}, + {"internal_url": "not an url"}, + {"currency", 100}, + {"customize": "bla"}, + {"customize": {"light.sensor": 100}}, + {"customize": {"entity_id": []}}, + {"country": "xx"}, + {"language": "xx"}, + {"radius": -10}, + {"webrtc": "bla"}, + {"webrtc": {}}, + ): + with pytest.raises(MultipleInvalid): + CORE_CONFIG_SCHEMA(value) + + CORE_CONFIG_SCHEMA( + { + "name": "Test name", + "latitude": "-23.45", + "longitude": "123.45", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "unit_system": "metric", + "currency": "USD", + "customize": {"sensor.temperature": {"hidden": True}}, + "country": "SE", + "language": "sv", + "radius": "10", + "webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, + } + ) + + +def test_core_config_schema_internal_external_warning( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we warn for internal/external URL with path.""" + CORE_CONFIG_SCHEMA( + { + "external_url": "https://www.example.com/bla", + "internal_url": "http://example.local/yo", + } + ) + + assert "Invalid external_url set" in caplog.text + assert "Invalid internal_url set" in caplog.text + + +def test_customize_dict_schema() -> None: + """Test basic customize config validation.""" + values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) + + for val in values: + with pytest.raises(MultipleInvalid): + _CUSTOMIZE_DICT_SCHEMA(val) + + assert _CUSTOMIZE_DICT_SCHEMA({ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"}) == { + ATTR_FRIENDLY_NAME: "2", + ATTR_ASSUMED_STATE: False, + } + + +def test_webrtc_schema() -> None: + """Test webrtc config validation.""" + invalid_webrtc_configs = ( + "bla", + {}, + {"ice_servers": [], "unknown_key": 123}, + {"ice_servers": [{}]}, + {"ice_servers": [{"invalid_key": 123}]}, + ) + + valid_webrtc_configs = ( + ( + {"ice_servers": []}, + {"ice_servers": []}, + ), + ( + {"ice_servers": {"url": "stun:custom_stun_server:3478"}}, + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + ), + ( + {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + ), + ( + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + ), + ( + { + "ice_servers": [ + { + "url": ["stun:custom_stun_server:3478"], + "username": "bla", + "credential": "hunter2", + } + ] + }, + { + "ice_servers": [ + { + "url": ["stun:custom_stun_server:3478"], + "username": "bla", + "credential": "hunter2", + } + ] + }, + ), + ) + + for config in invalid_webrtc_configs: + with pytest.raises(MultipleInvalid): + CORE_CONFIG_SCHEMA({"webrtc": config}) + + for config, validated_webrtc in valid_webrtc_configs: + validated = CORE_CONFIG_SCHEMA({"webrtc": config}) + assert validated["webrtc"] == validated_webrtc + + +def test_validate_stun_or_turn_url() -> None: + """Test _validate_stun_or_turn_url.""" + invalid_urls = ( + "custom_stun_server", + "custom_stun_server:3478", + "bum:custom_stun_server:3478" "http://blah.com:80", + ) + + valid_urls = ( + "stun:custom_stun_server:3478", + "turn:custom_stun_server:3478", + "stuns:custom_stun_server:3478", + "turns:custom_stun_server:3478", + # The validator does not reject urls with path + "stun:custom_stun_server:3478/path", + "turn:custom_stun_server:3478/path", + "stuns:custom_stun_server:3478/path", + "turns:custom_stun_server:3478/path", + # The validator allows any query + "stun:custom_stun_server:3478?query", + "turn:custom_stun_server:3478?query", + "stuns:custom_stun_server:3478?query", + "turns:custom_stun_server:3478?query", + ) + + for url in invalid_urls: + with pytest.raises(Invalid): + _validate_stun_or_turn_url(url) + + for url in valid_urls: + assert _validate_stun_or_turn_url(url) == url + + +def test_customize_glob_is_ordered() -> None: + """Test that customize_glob preserves order.""" + conf = CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) + assert isinstance(conf["customize_glob"], OrderedDict) + + +async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: + await async_process_ha_core_config(hass, config) + + entity = Entity() + entity.entity_id = "test.test" + entity.hass = hass + entity.schedule_update_ha_state() + + await hass.async_block_till_done() + + return hass.states.get("test.test") + + +async def test_entity_customization(hass: HomeAssistant) -> None: + """Test entity customization through configuration.""" + config = { + CONF_LATITUDE: 50, + CONF_LONGITUDE: 50, + CONF_NAME: "Test", + CONF_CUSTOMIZE: {"test.test": {"hidden": True}}, + } + + state = await _compute_state(hass, config) + + assert state.attributes["hidden"] + + +async def test_loading_configuration_from_storage( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + "key": "core.config", + "version": 1, + "minor_version": 4, + } + await async_process_ha_core_config(hass, {"allowlist_external_dirs": "/etc"}) + + assert hass.config.latitude == 55 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert hass.config.external_url == "https://www.example.com" + assert hass.config.internal_url == "http://example.local" + assert hass.config.currency == "EUR" + assert hass.config.country == "SE" + assert hass.config.language == "sv" + assert hass.config.radius == 150 + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.config_source is ConfigSource.STORAGE + + +async def test_loading_configuration_from_storage_with_yaml_only( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core and YAML config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + }, + "key": "core.config", + "version": 1, + } + await async_process_ha_core_config( + hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 55 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"mymedia": "/usr"} + assert hass.config.config_source is ConfigSource.STORAGE + + +async def test_migration_and_updating_configuration( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test updating configuration stores the new configuration.""" + core_data = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "imperial", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "currency": "BTC", + }, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await async_process_ha_core_config(hass, {"allowlist_external_dirs": "/etc"}) + await hass.config.async_update(latitude=50, currency="USD") + + expected_new_core_data = copy.deepcopy(core_data) + # From async_update above + expected_new_core_data["data"]["latitude"] = 50 + expected_new_core_data["data"]["currency"] = "USD" + # 1.1 -> 1.2 store migration with migrated unit system + expected_new_core_data["data"]["unit_system_v2"] = "us_customary" + # 1.1 -> 1.3 defaults for country and language + expected_new_core_data["data"]["country"] = None + expected_new_core_data["data"]["language"] = "en" + # 1.1 -> 1.4 defaults for zone radius + expected_new_core_data["data"]["radius"] = 100 + # Bumped minor version + expected_new_core_data["minor_version"] = 4 + assert hass_storage["core.config"] == expected_new_core_data + assert hass.config.latitude == 50 + assert hass.config.currency == "USD" + assert hass.config.country is None + assert hass.config.language == "en" + assert hass.config.radius == 100 + + +async def test_override_stored_configuration( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core and YAML config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + }, + "key": "core.config", + "version": 1, + } + await async_process_ha_core_config( + hass, {"latitude": 60, "allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.config_source is ConfigSource.YAML + + +async def test_loading_configuration(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "America/New_York", + "allowlist_external_dirs": "/etc", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "media_dirs": {"mymedia": "/usr"}, + "debug": True, + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + "webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, + }, + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 50 + assert hass.config.elevation == 25 + assert hass.config.location_name == "Huis" + assert hass.config.units is US_CUSTOMARY_SYSTEM + assert hass.config.time_zone == "America/New_York" + assert hass.config.external_url == "https://www.example.com" + assert hass.config.internal_url == "http://example.local" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert "/usr" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"mymedia": "/usr"} + assert hass.config.config_source is ConfigSource.YAML + assert hass.config.debug is True + assert hass.config.currency == "EUR" + assert hass.config.country == "SE" + assert hass.config.language == "sv" + assert hass.config.radius == 150 + assert hass.config.webrtc == RTCConfiguration( + [RTCIceServer(urls=["stun:custom_stun_server:3478"])] + ) + + +@pytest.mark.parametrize( + ("minor_version", "users", "user_data", "default_language"), + [ + (2, (), {}, "en"), + (2, ({"is_owner": True},), {}, "en"), + ( + 2, + ({"id": "user1", "is_owner": True},), + {"user1": {"language": {"language": "sv"}}}, + "sv", + ), + ( + 2, + ({"id": "user1", "is_owner": False},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + (3, (), {}, "en"), + (3, ({"is_owner": True},), {}, "en"), + ( + 3, + ({"id": "user1", "is_owner": True},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + ( + 3, + ({"id": "user1", "is_owner": False},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + ], +) +async def test_language_default( + hass: HomeAssistant, + hass_storage: dict[str, Any], + minor_version, + users, + user_data, + default_language, +) -> None: + """Test language config default to owner user's language during migration. + + This should only happen if the core store version < 1.3 + """ + core_data = { + "data": {}, + "key": "core.config", + "version": 1, + "minor_version": minor_version, + } + hass_storage["core.config"] = dict(core_data) + + for user_config in users: + user = MockUser(**user_config).add_to_hass(hass) + if user.id not in user_data: + continue + storage_key = f"frontend.user_data_{user.id}" + hass_storage[storage_key] = { + "key": storage_key, + "version": 1, + "data": user_data[user.id], + } + + await async_process_ha_core_config( + hass, + {}, + ) + assert hass.config.language == default_language + + +async def test_loading_configuration_default_media_dirs_docker( + hass: HomeAssistant, +) -> None: + """Test loading core config onto hass object.""" + with patch("homeassistant.core_config.is_docker_env", return_value=True): + await async_process_ha_core_config( + hass, + { + "name": "Huis", + }, + ) + + assert hass.config.location_name == "Huis" + assert len(hass.config.allowlist_external_dirs) == 2 + assert "/media" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"local": "/media"} + + +async def test_loading_configuration_from_packages(hass: HomeAssistant) -> None: + """Test loading packages config onto hass object config.""" + await async_process_ha_core_config( + hass, + { + "latitude": 39, + "longitude": -1, + "elevation": 500, + "name": "Huis", + "unit_system": "metric", + "time_zone": "Europe/Madrid", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "packages": { + "package_1": {"wake_on_lan": None}, + "package_2": { + "light": {"platform": "hue"}, + "media_extractor": None, + "sun": None, + }, + }, + }, + ) + + # Empty packages not allowed + with pytest.raises(MultipleInvalid): + await async_process_ha_core_config( + hass, + { + "latitude": 39, + "longitude": -1, + "elevation": 500, + "name": "Huis", + "unit_system": "metric", + "time_zone": "Europe/Madrid", + "packages": {"empty_package": None}, + }, + ) + + +@pytest.mark.parametrize( + ("unit_system_name", "expected_unit_system"), + [ + ("metric", METRIC_SYSTEM), + ("imperial", US_CUSTOMARY_SYSTEM), + ("us_customary", US_CUSTOMARY_SYSTEM), + ], +) +async def test_loading_configuration_unit_system( + hass: HomeAssistant, unit_system_name: str, expected_unit_system: UnitSystem +) -> None: + """Test backward compatibility when loading core config.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": unit_system_name, + "time_zone": "America/New_York", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + }, + ) + + assert hass.config.units is expected_unit_system + + +async def test_merge_customize(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + "customize": {"a.a": {"friendly_name": "A"}}, + "packages": { + "pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}} + }, + } + await async_process_ha_core_config(hass, core_config) + + assert hass.data[DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"} + + +async def test_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading auth provider config onto hass object.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [ + {"type": "homeassistant"}, + ], + CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}], + } + if hasattr(hass, "auth"): + del hass.auth + await async_process_ha_core_config(hass, core_config) + + assert len(hass.auth.auth_providers) == 1 + assert hass.auth.auth_providers[0].type == "homeassistant" + assert len(hass.auth.auth_mfa_modules) == 2 + assert hass.auth.auth_mfa_modules[0].id == "totp" + assert hass.auth.auth_mfa_modules[1].id == "second" + + +async def test_auth_provider_config_default(hass: HomeAssistant) -> None: + """Test loading default auth provider config.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + } + if hasattr(hass, "auth"): + del hass.auth + await async_process_ha_core_config(hass, core_config) + + assert len(hass.auth.auth_providers) == 1 + assert hass.auth.auth_providers[0].type == "homeassistant" + assert len(hass.auth.auth_mfa_modules) == 1 + assert hass.auth.auth_mfa_modules[0].id == "totp" + + +async def test_disallowed_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth provider is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [ + { + "type": "insecure_example", + "users": [ + { + "username": "test-user", + "password": "test-pass", + "name": "Test Name", + } + ], + } + ], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_duplicated_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth provider is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_auth_mfa_module_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth mfa module is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_MFA_MODULES: [ + { + "type": "insecure_example", + "data": [{"user_id": "mock-user", "pin": "test-pin"}], + } + ], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_duplicated_auth_mfa_module_config( + hass: HomeAssistant, +) -> None: + """Test loading insecure example auth mfa module is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_core_config_schema_historic_currency( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config(hass, {"currency": "LTT"}) + + issue = issue_registry.async_get_issue("homeassistant", "historic_currency") + assert issue + assert issue.translation_placeholders == {"currency": "LTT"} + + +async def test_core_store_historic_currency( + hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry +) -> None: + """Test core config store.""" + core_data = { + "data": { + "currency": "LTT", + }, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await async_process_ha_core_config(hass, {}) + + issue_id = "historic_currency" + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert issue + assert issue.translation_placeholders == {"currency": "LTT"} + + await hass.config.async_update(currency="EUR") + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert not issue + + +async def test_core_config_schema_no_country( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config(hass, {}) + + issue = issue_registry.async_get_issue("homeassistant", "country_not_configured") + assert issue + + +async def test_core_store_no_country( + hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry +) -> None: + """Test core config store.""" + core_data = { + "data": {}, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await async_process_ha_core_config(hass, {}) + + issue_id = "country_not_configured" + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert issue + + await hass.config.async_update(country="SE") + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert not issue + + +async def test_configuration_legacy_template_is_removed(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "America/New_York", + "allowlist_external_dirs": "/etc", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "media_dirs": {"mymedia": "/usr"}, + "legacy_templates": True, + "debug": True, + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + ) + + assert not getattr(hass.config, "legacy_templates") + + +async def test_config_defaults() -> None: + """Test config defaults.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + assert config.hass is hass + assert config.latitude == 0 + assert config.longitude == 0 + assert config.elevation == 0 + assert config.location_name == "Home" + assert config.time_zone == "UTC" + assert config.internal_url is None + assert config.external_url is None + assert config.config_source is ConfigSource.DEFAULT + assert config.skip_pip is False + assert config.skip_pip_packages == [] + assert config.components == set() + assert config.api is None + assert config.config_dir == "/test/ha-config" + assert config.allowlist_external_dirs == set() + assert config.allowlist_external_urls == set() + assert config.media_dirs == {} + assert config.recovery_mode is False + assert config.legacy_templates is False + assert config.currency == "EUR" + assert config.country is None + assert config.language == "en" + assert config.radius == 100 + + +async def test_config_path_with_file() -> None: + """Test get_config_path method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + assert config.path("test.conf") == "/test/ha-config/test.conf" + + +async def test_config_path_with_dir_and_file() -> None: + """Test get_config_path method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf" + + +async def test_config_as_dict() -> None: + """Test as dict.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + type(config.hass.state).value = PropertyMock(return_value="RUNNING") + expected = { + "latitude": 0, + "longitude": 0, + "elevation": 0, + CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), + "location_name": "Home", + "time_zone": "UTC", + "components": [], + "config_dir": "/test/ha-config", + "whitelist_external_dirs": [], + "allowlist_external_dirs": [], + "allowlist_external_urls": [], + "version": __version__, + "config_source": ConfigSource.DEFAULT, + "recovery_mode": False, + "state": "RUNNING", + "external_url": None, + "internal_url": None, + "currency": "EUR", + "country": None, + "language": "en", + "safe_mode": False, + "debug": False, + "radius": 100, + } + + assert expected == config.as_dict() + + +async def test_config_is_allowed_path() -> None: + """Test is_allowed_path method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + with TemporaryDirectory() as tmp_dir: + # The created dir is in /tmp. This is a symlink on OS X + # causing this test to fail unless we resolve path first. + config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} + + test_file = os.path.join(tmp_dir, "test.jpg") + await asyncio.get_running_loop().run_in_executor( + None, Path(test_file).write_text, "test" + ) + + valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] + for path in valid: + assert config.is_allowed_path(path) + + config.allowlist_external_dirs = {"/home", "/var"} + + invalid = [ + "/hass/config/secure", + "/etc/passwd", + "/root/secure_file", + "/var/../etc/passwd", + test_file, + ] + for path in invalid: + assert not config.is_allowed_path(path) + + with pytest.raises(AssertionError): + config.is_allowed_path(None) + + +async def test_config_is_allowed_external_url() -> None: + """Test is_allowed_external_url method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + config.allowlist_external_urls = [ + "http://x.com/", + "https://y.com/bla/", + "https://z.com/images/1.jpg/", + ] + + valid = [ + "http://x.com/1.jpg", + "http://x.com", + "https://y.com/bla/", + "https://y.com/bla/2.png", + "https://z.com/images/1.jpg", + ] + for url in valid: + assert config.is_allowed_external_url(url) + + invalid = [ + "https://a.co", + "https://y.com/bla_wrong", + "https://y.com/bla/../image.jpg", + "https://z.com/images", + ] + for url in invalid: + assert not config.is_allowed_external_url(url) + + +async def test_event_on_update(hass: HomeAssistant) -> None: + """Test that event is fired on update.""" + events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) + + assert hass.config.latitude != 12 + + await hass.config.async_update(latitude=12) + await hass.async_block_till_done() + + assert hass.config.latitude == 12 + assert len(events) == 1 + assert events[0].data == {"latitude": 12} + + +async def test_bad_timezone_raises_value_error(hass: HomeAssistant) -> None: + """Test bad timezone raises ValueError.""" + with pytest.raises(ValueError): + await hass.config.async_update(time_zone="not_a_timezone") + + +async def test_additional_data_in_core_config( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test that we can handle additional data in core configuration.""" + config = Config(hass, "/test/ha-config") + config.async_initialize() + hass_storage[CORE_STORAGE_KEY] = { + "version": 1, + "data": {"location_name": "Test Name", "additional_valid_key": "value"}, + } + await config.async_load() + assert config.location_name == "Test Name" + + +async def test_incorrect_internal_external_url( + hass: HomeAssistant, hass_storage: dict[str, Any], caplog: pytest.LogCaptureFixture +) -> None: + """Test that we warn when detecting invalid internal/external url.""" + config = Config(hass, "/test/ha-config") + config.async_initialize() + + hass_storage[CORE_STORAGE_KEY] = { + "version": 1, + "data": { + "internal_url": None, + "external_url": None, + }, + } + await config.async_load() + assert "Invalid external_url set" not in caplog.text + assert "Invalid internal_url set" not in caplog.text + + config = Config(hass, "/test/ha-config") + config.async_initialize() + + hass_storage[CORE_STORAGE_KEY] = { + "version": 1, + "data": { + "internal_url": "https://community.home-assistant.io/profile", + "external_url": "https://www.home-assistant.io/blue", + }, + } + await config.async_load() + assert "Invalid external_url set" in caplog.text + assert "Invalid internal_url set" in caplog.text + + +async def test_top_level_components(hass: HomeAssistant) -> None: + """Test top level components are updated when components change.""" + hass.config.components.add("homeassistant") + assert hass.config.components == {"homeassistant"} + assert hass.config.top_level_components == {"homeassistant"} + hass.config.components.add("homeassistant.scene") + assert hass.config.components == {"homeassistant", "homeassistant.scene"} + assert hass.config.top_level_components == {"homeassistant"} + hass.config.components.remove("homeassistant") + assert hass.config.components == {"homeassistant.scene"} + assert hass.config.top_level_components == set() + with pytest.raises(ValueError): + hass.config.components.remove("homeassistant.scene") + with pytest.raises(NotImplementedError): + hass.config.components.discard("homeassistant") + + +async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: + """Test debug mode defaults to off.""" + assert not hass.config.debug + + +async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: + """Test set_time_zone is deprecated.""" + with pytest.raises( + RuntimeError, + match=re.escape( + "Detected code that sets the time zone using set_time_zone instead of " + "async_set_time_zone. Please report this issue" + ), + ): + await hass.config.set_time_zone("America/New_York") + + +async def test_core_config_schema_imperial_unit( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Home", + "unit_system": "imperial", + "time_zone": "America/New_York", + "currency": "USD", + "country": "US", + "language": "en", + "radius": 150, + }, + ) + + issue = issue_registry.async_get_issue("homeassistant", "imperial_unit_system") + assert issue diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 967b2565206..74a55cb4989 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -13,11 +13,7 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.util.decorator import Registry -from .common import ( - async_capture_events, - help_test_all, - import_and_test_deprecated_constant_enum, -) +from .common import async_capture_events class MockFlowManager(data_entry_flow.FlowManager): @@ -781,83 +777,6 @@ async def test_async_get_unknown_flow(manager: MockFlowManager) -> None: await manager.async_get("does_not_exist") -async def test_async_has_matching_flow( - hass: HomeAssistant, manager: MockFlowManager -) -> None: - """Test we can check for matching flows.""" - manager.hass = hass - assert ( - manager.async_has_matching_flow( - "test", - {"source": config_entries.SOURCE_HOMEKIT}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is False - ) - - @manager.mock_reg_handler("test") - class TestFlow(data_entry_flow.FlowHandler): - VERSION = 5 - - async def async_step_init(self, user_input=None): - return self.async_show_progress( - step_id="init", - progress_action="task_one", - ) - - result = await manager.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - assert result["type"] == data_entry_flow.FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "task_one" - assert len(manager.async_progress()) == 1 - assert len(manager.async_progress_by_handler("test")) == 1 - assert ( - len( - manager.async_progress_by_handler( - "test", match_context={"source": config_entries.SOURCE_HOMEKIT} - ) - ) - == 1 - ) - assert ( - len( - manager.async_progress_by_handler( - "test", match_context={"source": config_entries.SOURCE_BLUETOOTH} - ) - ) - == 0 - ) - assert manager.async_get(result["flow_id"])["handler"] == "test" - - assert ( - manager.async_has_matching_flow( - "test", - {"source": config_entries.SOURCE_HOMEKIT}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is True - ) - assert ( - manager.async_has_matching_flow( - "test", - {"source": config_entries.SOURCE_SSDP}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is False - ) - assert ( - manager.async_has_matching_flow( - "other", - {"source": config_entries.SOURCE_HOMEKIT}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is False - ) - - async def test_move_to_unknown_step_raises_and_removes_from_in_progress( manager: MockFlowManager, ) -> None: @@ -1062,22 +981,6 @@ async def test_find_flows_by_init_data_type(manager: MockFlowManager) -> None: assert len(manager.async_progress()) == 0 -def test_all() -> None: - """Test module.__all__ is correctly set.""" - help_test_all(data_entry_flow) - - -@pytest.mark.parametrize(("enum"), list(data_entry_flow.FlowResultType)) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: data_entry_flow.FlowResultType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, data_entry_flow, enum, "RESULT_TYPE_", "2025.1" - ) - - def test_section_in_serializer() -> None: """Test section with custom_serializer.""" assert cv.custom_serializer( @@ -1098,3 +1001,27 @@ def test_section_in_serializer() -> None: ], "type": "expandable", } + + +def test_nested_section_in_serializer() -> None: + """Test section with custom_serializer.""" + with pytest.raises( + ValueError, match="Nesting expandable sections is not supported" + ): + cv.custom_serializer( + data_entry_flow.section( + vol.Schema( + { + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional("option_1", default=False): bool, + vol.Required("option_2"): int, + } + ) + ) + } + ), + {"collapsed": False}, + ) + ) diff --git a/tests/test_loader.py b/tests/test_loader.py index 01305dde002..4c3c4eb309f 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -6,7 +6,7 @@ import pathlib import sys import threading from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, patch from awesomeversion import AwesomeVersion import pytest @@ -547,6 +547,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: ], "mqtt": ["hue/discovery"], "version": "1.0.0", + "quality_scale": "gold", }, ) assert integration.name == "Philips Hue" @@ -583,7 +584,9 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.dependencies == ["test-dep"] assert integration.requirements == ["test-req==1.0.0"] assert integration.is_built_in is True + assert integration.overwrites_built_in is False assert integration.version == "1.0.0" + assert integration.quality_scale == "gold" integration = loader.Integration( hass, @@ -594,9 +597,11 @@ def test_integration_properties(hass: HomeAssistant) -> None: "domain": "hue", "dependencies": ["test-dep"], "requirements": ["test-req==1.0.0"], + "quality_scale": "gold", }, ) assert integration.is_built_in is False + assert integration.overwrites_built_in is True assert integration.homekit is None assert integration.zeroconf is None assert integration.dhcp is None @@ -605,6 +610,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.ssdp is None assert integration.mqtt is None assert integration.version is None + assert integration.quality_scale == "custom" integration = loader.Integration( hass, @@ -619,6 +625,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: }, ) assert integration.is_built_in is False + assert integration.overwrites_built_in is True assert integration.homekit is None assert integration.zeroconf == [{"type": "_hue._tcp.local.", "name": "hue*"}] assert integration.dhcp is None @@ -815,7 +822,7 @@ async def test_get_custom_components(hass: HomeAssistant) -> None: test_1_integration = _get_test_integration(hass, "test_1", False) test_2_integration = _get_test_integration(hass, "test_2", True) - name = "homeassistant.loader._async_get_custom_components" + name = "homeassistant.loader._get_custom_components" with patch(name) as mock_get: mock_get.return_value = { "test_1": test_1_integration, @@ -828,6 +835,29 @@ async def test_get_custom_components(hass: HomeAssistant) -> None: mock_get.assert_called_once_with(hass) +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_custom_component_overwriting_core(hass: HomeAssistant) -> None: + """Test loading a custom component that overwrites a core component.""" + # First load the core 'light' component + core_light = await loader.async_get_integration(hass, "light") + assert core_light.is_built_in is True + + # create a mock custom 'light' component + mock_integration( + hass, + MockModule("light", partial_manifest={"version": "1.0.0"}), + built_in=False, + ) + + # Try to load the 'light' component again + custom_light = await loader.async_get_integration(hass, "light") + + # Assert that we got the custom component instead of the core one + assert custom_light.is_built_in is False + assert custom_light.overwrites_built_in is True + assert custom_light.version == "1.0.0" + + async def test_get_config_flows(hass: HomeAssistant) -> None: """Verify that custom components with config_flow are available.""" test_1_integration = _get_test_integration(hass, "test_1", False) @@ -1269,26 +1299,29 @@ async def test_config_folder_not_in_path() -> None: import tests.testing_config.check_config_not_in_path # noqa: F401 -async def test_hass_components_use_reported( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock -) -> None: - """Test that use of hass.components is reported.""" - mock_integration_frame.filename = ( - "/home/paulus/homeassistant/custom_components/demo/light.py" - ) - integration_frame = frame.IntegrationFrame( - custom_integration=True, - frame=mock_integration_frame, - integration="test_integration_frame", - module="custom_components.test_integration_frame", - relative_filename="custom_components/test_integration_frame/__init__.py", - ) - - with ( - patch( - "homeassistant.helpers.frame.get_integration_frame", - return_value=integration_frame, +@pytest.mark.parametrize( + ("integration_frame_path", "expected"), + [ + pytest.param( + "custom_components/test_integration_frame", True, id="custom integration" ), + pytest.param( + "homeassistant/components/test_integration_frame", + False, + id="core integration", + ), + pytest.param("homeassistant/test_integration_frame", False, id="core"), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_hass_components_use_reported( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + expected: bool, +) -> None: + """Test whether use of hass.components is reported.""" + with ( patch( "homeassistant.components.http.start_http_server_and_save_config", return_value=None, @@ -1296,10 +1329,11 @@ async def test_hass_components_use_reported( ): await hass.components.http.start_http_server_and_save_config(hass, [], None) - assert ( + reported = ( "Detected that custom integration 'test_integration_frame'" - " accesses hass.components.http. This is deprecated" + " accesses hass.components.http, which should be updated" ) in caplog.text + assert reported == expected async def test_async_get_component_preloads_config_and_config_flow( @@ -1961,24 +1995,29 @@ async def test_has_services(hass: HomeAssistant) -> None: assert integration.has_services is True -async def test_hass_helpers_use_reported( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock -) -> None: - """Test that use of hass.components is reported.""" - integration_frame = frame.IntegrationFrame( - custom_integration=True, - frame=mock_integration_frame, - integration="test_integration_frame", - module="custom_components.test_integration_frame", - relative_filename="custom_components/test_integration_frame/__init__.py", - ) - - with ( - patch.object(frame, "_REPORTED_INTEGRATIONS", new=set()), - patch( - "homeassistant.helpers.frame.get_integration_frame", - return_value=integration_frame, +@pytest.mark.parametrize( + ("integration_frame_path", "expected"), + [ + pytest.param( + "custom_components/test_integration_frame", True, id="custom integration" ), + pytest.param( + "homeassistant/components/test_integration_frame", + False, + id="core integration", + ), + pytest.param("homeassistant/test_integration_frame", False, id="core"), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_hass_helpers_use_reported( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + expected: bool, +) -> None: + """Test whether use of hass.helpers is reported.""" + with ( patch( "homeassistant.helpers.aiohttp_client.async_get_clientsession", return_value=None, @@ -1986,10 +2025,11 @@ async def test_hass_helpers_use_reported( ): hass.helpers.aiohttp_client.async_get_clientsession() - assert ( + reported = ( "Detected that custom integration 'test_integration_frame' " - "accesses hass.helpers.aiohttp_client. This is deprecated" + "accesses hass.helpers.aiohttp_client, which should be updated" ) in caplog.text + assert reported == expected async def test_manifest_json_fragment_round_trip(hass: HomeAssistant) -> None: diff --git a/tests/test_main.py b/tests/test_main.py index 080787311a0..d32ca59a846 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -3,7 +3,7 @@ from unittest.mock import PropertyMock, patch from homeassistant import __main__ as main -from homeassistant.const import REQUIRED_PYTHON_VER +from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE @patch("sys.exit") @@ -86,3 +86,13 @@ def test_skip_pip_mutually_exclusive(mock_exit) -> None: assert mock_exit.called is False args = parse_args("--skip-pip", "--skip-pip-packages", "foo") assert mock_exit.called is True + + +def test_restart_after_backup_restore() -> None: + """Test restarting if we restored a backup.""" + with ( + patch("sys.argv", ["python"]), + patch("homeassistant.__main__.restore_backup", return_value=True), + ): + exit_code = main.main() + assert exit_code == RESTART_EXIT_CODE diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 2885fa30036..191e1b7368c 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -585,7 +585,8 @@ async def test_discovery_requirements_mqtt(hass: HomeAssistant) -> None: ) as mock_process: await async_get_integration_with_requirements(hass, "mqtt_comp") - assert len(mock_process.mock_calls) == 1 + assert len(mock_process.mock_calls) == 2 + # one for mqtt and one for hassio assert mock_process.mock_calls[0][1][1] == mqtt.requirements diff --git a/tests/test_setup.py b/tests/test_setup.py index c50f8392d66..2d15c670cf7 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -248,22 +248,39 @@ async def test_component_not_found( hass: HomeAssistant, issue_registry: IssueRegistry ) -> None: """setup_component should raise a repair issue if component doesn't exist.""" + MockConfigEntry(domain="non_existing").add_to_hass(hass) assert await setup.async_setup_component(hass, "non_existing", {}) is False assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "integration_not_found.non_existing" - ) - assert issue - assert issue.translation_key == "integration_not_found" + assert ( + HOMEASSISTANT_DOMAIN, + "integration_not_found.non_existing", + ) in issue_registry.issues + + +async def test_yaml_component_not_found( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should only raise an exception for missing config entry integrations.""" + assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 0 + assert ( + HOMEASSISTANT_DOMAIN, + "integration_not_found.non_existing", + ) not in issue_registry.issues async def test_component_missing_not_raising_in_safe_mode( hass: HomeAssistant, issue_registry: IssueRegistry ) -> None: """setup_component should not raise an issue if component doesn't exist in safe.""" + MockConfigEntry(domain="non_existing").add_to_hass(hass) hass.config.safe_mode = True assert await setup.async_setup_component(hass, "non_existing", {}) is False assert len(issue_registry.issues) == 0 + assert ( + HOMEASSISTANT_DOMAIN, + "integration_not_found.non_existing", + ) not in issue_registry.issues async def test_component_not_double_initialized(hass: HomeAssistant) -> None: diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index 04d6db509e0..633f98dc5b3 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -5,6 +5,7 @@ from collections.abc import Iterator from contextlib import contextmanager from http import HTTPStatus import re +from types import TracebackType from typing import Any from unittest import mock from urllib.parse import parse_qs @@ -166,7 +167,7 @@ class AiohttpClientMockResponse: def __init__( self, method, - url, + url: URL, status=HTTPStatus.OK, response=None, json=None, @@ -297,6 +298,18 @@ class AiohttpClientMockResponse: raise ClientConnectionError("Connection closed") return self._response + async def __aenter__(self): + """Enter the context manager.""" + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exit the context manager.""" + @contextmanager def mock_aiohttp_client() -> Iterator[AiohttpClientMocker]: diff --git a/tests/testing_config/blueprints/automation/test_event_service.yaml b/tests/testing_config/blueprints/automation/test_event_service.yaml index ba7462ed2e0..ec11f24fc63 100644 --- a/tests/testing_config/blueprints/automation/test_event_service.yaml +++ b/tests/testing_config/blueprints/automation/test_event_service.yaml @@ -10,9 +10,9 @@ blueprint: selector: number: mode: "box" -trigger: - platform: event +triggers: + trigger: event event_type: !input trigger_event -action: +actions: service: !input service_to_call entity_id: light.kitchen diff --git a/tests/testing_config/blueprints/automation/test_event_service_legacy_schema.yaml b/tests/testing_config/blueprints/automation/test_event_service_legacy_schema.yaml new file mode 100644 index 00000000000..ba7462ed2e0 --- /dev/null +++ b/tests/testing_config/blueprints/automation/test_event_service_legacy_schema.yaml @@ -0,0 +1,18 @@ +blueprint: + name: "Call service based on event" + domain: automation + input: + trigger_event: + selector: + text: + service_to_call: + a_number: + selector: + number: + mode: "box" +trigger: + platform: event + event_type: !input trigger_event +action: + service: !input service_to_call + entity_id: light.kitchen diff --git a/tests/testing_config/custom_components/test/camera.py b/tests/testing_config/custom_components/test/camera.py new file mode 100644 index 00000000000..b2aa1bbc53b --- /dev/null +++ b/tests/testing_config/custom_components/test/camera.py @@ -0,0 +1,41 @@ +"""Provide a mock remote platform. + +Call init before using it in your tests to ensure clean test data. +""" + +from homeassistant.components.camera import Camera, CameraEntityFeature, StreamType +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType + + +async def async_setup_platform( + hass: HomeAssistant, + config: ConfigType, + async_add_entities_callback: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Return mock entities.""" + async_add_entities_callback( + [AttrFrontendStreamTypeCamera(), PropertyFrontendStreamTypeCamera()] + ) + + +class AttrFrontendStreamTypeCamera(Camera): + """attr frontend stream type Camera.""" + + _attr_name = "attr frontend stream type" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + _attr_frontend_stream_type: StreamType = StreamType.WEB_RTC + + +class PropertyFrontendStreamTypeCamera(Camera): + """property frontend stream type Camera.""" + + _attr_name = "property frontend stream type" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + + @property + def frontend_stream_type(self) -> StreamType | None: + """Return the stream type of the camera.""" + return StreamType.WEB_RTC diff --git a/tests/testing_config/custom_components/test/translations/de.json b/tests/testing_config/custom_components/test/translations/de.json index 57d26f28ec0..8cac140c753 100644 --- a/tests/testing_config/custom_components/test/translations/de.json +++ b/tests/testing_config/custom_components/test/translations/de.json @@ -1,7 +1,10 @@ { "entity": { "switch": { - "other1": { "name": "Anderes 1" }, + "other1": { + "name": "Anderes 1", + "unit_of_measurement": "einheiten" + }, "other2": { "name": "Anderes 2 {placeholder}" }, "other3": { "name": "" }, "outlet": { "name": "Steckdose {something}" } diff --git a/tests/testing_config/custom_components/test/translations/en.json b/tests/testing_config/custom_components/test/translations/en.json index 7ed32c224a7..802c859e922 100644 --- a/tests/testing_config/custom_components/test/translations/en.json +++ b/tests/testing_config/custom_components/test/translations/en.json @@ -1,10 +1,13 @@ { "entity": { "switch": { - "other1": { "name": "Other 1" }, + "other1": { "name": "Other 1", "unit_of_measurement": "units" }, "other2": { "name": "Other 2" }, "other3": { "name": "Other 3" }, - "other4": { "name": "Other 4" }, + "other4": { + "name": "Other 4", + "unit_of_measurement": "quantities" + }, "outlet": { "name": "Outlet {placeholder}" } } }, diff --git a/tests/testing_config/custom_sentences/en/beer.yaml b/tests/testing_config/custom_sentences/en/beer.yaml index f318e0221b2..7222ffcb0ca 100644 --- a/tests/testing_config/custom_sentences/en/beer.yaml +++ b/tests/testing_config/custom_sentences/en/beer.yaml @@ -3,11 +3,11 @@ intents: OrderBeer: data: - sentences: - - "I'd like to order a {beer_style} [please]" + - "[I'd like to ]order a {beer_style} [please]" OrderFood: data: - sentences: - - "I'd like to order {food_name:name} [please]" + - "[I'd like to ]order {food_name:name} [please]" lists: beer_style: values: diff --git a/tests/util/test_async.py b/tests/util/test_async.py index cda10b69c3f..cfa78228f0c 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -140,7 +140,7 @@ async def test_create_eager_task_from_thread(hass: HomeAssistant) -> None: with pytest.raises( RuntimeError, match=( - "Detected code that attempted to create an asyncio task from a thread. Please report this issue." + "Detected code that attempted to create an asyncio task from a thread. Please report this issue" ), ): await hass.async_add_executor_job(create_task) diff --git a/tests/util/test_color.py b/tests/util/test_color.py index c8a5e0c8587..165552b8792 100644 --- a/tests/util/test_color.py +++ b/tests/util/test_color.py @@ -181,7 +181,7 @@ def test_color_hs_to_xy() -> None: assert color_util.color_hs_to_xy(350, 12.5) == (0.356, 0.321) - assert color_util.color_hs_to_xy(140, 50) == (0.229, 0.474) + assert color_util.color_hs_to_xy(140, 50) == (0.23, 0.474) assert color_util.color_hs_to_xy(0, 40) == (0.474, 0.317) diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 0e8432bbb83..347e92d6056 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -116,10 +116,14 @@ def test_utc_from_timestamp() -> None: ) -def test_timestamp_to_utc() -> None: +def test_timestamp_to_utc(caplog: pytest.LogCaptureFixture) -> None: """Test we can convert a utc datetime to a timestamp.""" utc_now = dt_util.utcnow() assert dt_util.utc_to_timestamp(utc_now) == utc_now.timestamp() + assert ( + "utc_to_timestamp is a deprecated function which will be removed " + "in HA Core 2026.1. Use datetime.timestamp instead" in caplog.text + ) def test_as_timestamp() -> None: diff --git a/tests/util/test_package.py b/tests/util/test_package.py index 2ead327bf10..b7497d620cd 100644 --- a/tests/util/test_package.py +++ b/tests/util/test_package.py @@ -1,12 +1,13 @@ """Test Home Assistant package util methods.""" import asyncio +from collections.abc import Generator from importlib.metadata import metadata import logging import os from subprocess import PIPE import sys -from unittest.mock import MagicMock, call, patch +from unittest.mock import MagicMock, Mock, call, patch import pytest @@ -18,13 +19,11 @@ RESOURCE_DIR = os.path.abspath( TEST_NEW_REQ = "pyhelloworld3==1.0.0" -TEST_ZIP_REQ = "file://{}#{}".format( - os.path.join(RESOURCE_DIR, "pyhelloworld3.zip"), TEST_NEW_REQ -) +TEST_ZIP_REQ = f"file://{RESOURCE_DIR}/pyhelloworld3.zip#{TEST_NEW_REQ}" @pytest.fixture -def mock_sys(): +def mock_sys() -> Generator[MagicMock]: """Mock sys.""" with patch("homeassistant.util.package.sys", spec=object) as sys_mock: sys_mock.executable = "python3" @@ -32,19 +31,19 @@ def mock_sys(): @pytest.fixture -def deps_dir(): +def deps_dir() -> str: """Return path to deps directory.""" return os.path.abspath("/deps_dir") @pytest.fixture -def lib_dir(deps_dir): +def lib_dir(deps_dir) -> str: """Return path to lib directory.""" return os.path.join(deps_dir, "lib_dir") @pytest.fixture -def mock_popen(lib_dir): +def mock_popen(lib_dir) -> Generator[MagicMock]: """Return a Popen mock.""" with patch("homeassistant.util.package.Popen") as popen_mock: popen_mock.return_value.__enter__ = popen_mock @@ -57,7 +56,7 @@ def mock_popen(lib_dir): @pytest.fixture -def mock_env_copy(): +def mock_env_copy() -> Generator[Mock]: """Mock os.environ.copy.""" with patch("homeassistant.util.package.os.environ.copy") as env_copy: env_copy.return_value = {} @@ -65,14 +64,14 @@ def mock_env_copy(): @pytest.fixture -def mock_venv(): +def mock_venv() -> Generator[MagicMock]: """Mock homeassistant.util.package.is_virtual_env.""" with patch("homeassistant.util.package.is_virtual_env") as mock: mock.return_value = True yield mock -def mock_async_subprocess(): +def mock_async_subprocess() -> Generator[MagicMock]: """Return an async Popen mock.""" async_popen = MagicMock() @@ -85,13 +84,26 @@ def mock_async_subprocess(): return async_popen -def test_install(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install( + mock_popen: MagicMock, mock_env_copy: MagicMock, mock_sys: MagicMock +) -> None: """Test an install attempt on a package that doesn't exist.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ, False) assert mock_popen.call_count == 2 assert mock_popen.mock_calls[0] == call( - [mock_sys.executable, "-m", "pip", "install", "--quiet", TEST_NEW_REQ], + [ + mock_sys.executable, + "-m", + "uv", + "pip", + "install", + "--quiet", + TEST_NEW_REQ, + "--index-strategy", + "unsafe-first-match", + ], stdin=PIPE, stdout=PIPE, stderr=PIPE, @@ -101,7 +113,38 @@ def test_install(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: assert mock_popen.return_value.communicate.call_count == 1 -def test_install_upgrade(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install_with_timeout( + mock_popen: MagicMock, mock_env_copy: MagicMock, mock_sys: MagicMock +) -> None: + """Test an install attempt on a package that doesn't exist with a timeout set.""" + env = mock_env_copy() + assert package.install_package(TEST_NEW_REQ, False, timeout=10) + assert mock_popen.call_count == 2 + env["HTTP_TIMEOUT"] = "10" + assert mock_popen.mock_calls[0] == call( + [ + mock_sys.executable, + "-m", + "uv", + "pip", + "install", + "--quiet", + TEST_NEW_REQ, + "--index-strategy", + "unsafe-first-match", + ], + stdin=PIPE, + stdout=PIPE, + stderr=PIPE, + env=env, + close_fds=False, + ) + assert mock_popen.return_value.communicate.call_count == 1 + + +@pytest.mark.usefixtures("mock_venv") +def test_install_upgrade(mock_popen, mock_env_copy, mock_sys) -> None: """Test an upgrade attempt on a package.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ) @@ -110,10 +153,13 @@ def test_install_upgrade(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None [ mock_sys.executable, "-m", + "uv", "pip", "install", "--quiet", TEST_NEW_REQ, + "--index-strategy", + "unsafe-first-match", "--upgrade", ], stdin=PIPE, @@ -125,21 +171,39 @@ def test_install_upgrade(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None assert mock_popen.return_value.communicate.call_count == 1 -def test_install_target(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: +@pytest.mark.parametrize( + "is_venv", + [ + True, + False, + ], +) +def test_install_target( + mock_sys: MagicMock, + mock_popen: MagicMock, + mock_env_copy: MagicMock, + mock_venv: MagicMock, + is_venv: bool, +) -> None: """Test an install with a target.""" target = "target_folder" env = mock_env_copy() - env["PYTHONUSERBASE"] = os.path.abspath(target) - mock_venv.return_value = False + abs_target = os.path.abspath(target) + env["PYTHONUSERBASE"] = abs_target + mock_venv.return_value = is_venv mock_sys.platform = "linux" args = [ mock_sys.executable, "-m", + "uv", "pip", "install", "--quiet", TEST_NEW_REQ, - "--user", + "--index-strategy", + "unsafe-first-match", + "--target", + abs_target, ] assert package.install_package(TEST_NEW_REQ, False, target=target) @@ -150,16 +214,91 @@ def test_install_target(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: assert mock_popen.return_value.communicate.call_count == 1 -def test_install_target_venv(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: - """Test an install with a target in a virtual environment.""" - target = "target_folder" - with pytest.raises(AssertionError): - package.install_package(TEST_NEW_REQ, False, target=target) - - -def test_install_error( - caplog: pytest.LogCaptureFixture, mock_sys, mock_popen, mock_venv +@pytest.mark.parametrize( + ("in_venv", "additional_env_vars"), + [ + (True, {}), + (False, {"UV_SYSTEM_PYTHON": "true"}), + (False, {"UV_PYTHON": "python3"}), + (False, {"UV_SYSTEM_PYTHON": "true", "UV_PYTHON": "python3"}), + ], + ids=["in_venv", "UV_SYSTEM_PYTHON", "UV_PYTHON", "UV_SYSTEM_PYTHON and UV_PYTHON"], +) +def test_install_pip_compatibility_no_workaround( + mock_sys: MagicMock, + mock_popen: MagicMock, + mock_env_copy: MagicMock, + mock_venv: MagicMock, + in_venv: bool, + additional_env_vars: dict[str, str], ) -> None: + """Test install will not use pip fallback.""" + env = mock_env_copy() + env.update(additional_env_vars) + mock_venv.return_value = in_venv + mock_sys.platform = "linux" + args = [ + mock_sys.executable, + "-m", + "uv", + "pip", + "install", + "--quiet", + TEST_NEW_REQ, + "--index-strategy", + "unsafe-first-match", + ] + + assert package.install_package(TEST_NEW_REQ, False) + assert mock_popen.call_count == 2 + assert mock_popen.mock_calls[0] == call( + args, stdin=PIPE, stdout=PIPE, stderr=PIPE, env=env, close_fds=False + ) + assert mock_popen.return_value.communicate.call_count == 1 + + +def test_install_pip_compatibility_use_workaround( + mock_sys: MagicMock, + mock_popen: MagicMock, + mock_env_copy: MagicMock, + mock_venv: MagicMock, +) -> None: + """Test install will use pip compatibility fallback.""" + env = mock_env_copy() + mock_venv.return_value = False + mock_sys.platform = "linux" + python = "python3" + mock_sys.executable = python + site_dir = "/site_dir" + args = [ + mock_sys.executable, + "-m", + "uv", + "pip", + "install", + "--quiet", + TEST_NEW_REQ, + "--index-strategy", + "unsafe-first-match", + "--python", + python, + "--target", + site_dir, + ] + + with patch("homeassistant.util.package.site", autospec=True) as site_mock: + site_mock.getusersitepackages.return_value = site_dir + assert package.install_package(TEST_NEW_REQ, False) + + assert mock_popen.call_count == 2 + assert mock_popen.mock_calls[0] == call( + args, stdin=PIPE, stdout=PIPE, stderr=PIPE, env=env, close_fds=False + ) + assert mock_popen.return_value.communicate.call_count == 1 + + +@pytest.mark.usefixtures("mock_sys", "mock_venv") +def test_install_error(caplog: pytest.LogCaptureFixture, mock_popen) -> None: """Test an install that errors out.""" caplog.set_level(logging.WARNING) mock_popen.return_value.returncode = 1 @@ -169,7 +308,8 @@ def test_install_error( assert record.levelname == "ERROR" -def test_install_constraint(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install_constraint(mock_popen, mock_env_copy, mock_sys) -> None: """Test install with constraint file on not installed package.""" env = mock_env_copy() constraints = "constraints_file.txt" @@ -179,10 +319,13 @@ def test_install_constraint(mock_sys, mock_popen, mock_env_copy, mock_venv) -> N [ mock_sys.executable, "-m", + "uv", "pip", "install", "--quiet", TEST_NEW_REQ, + "--index-strategy", + "unsafe-first-match", "--constraint", constraints, ], diff --git a/tests/util/test_ssl.py b/tests/util/test_ssl.py index d0c7ce3bfb6..c0cd2fdba10 100644 --- a/tests/util/test_ssl.py +++ b/tests/util/test_ssl.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock, Mock, patch import pytest from homeassistant.util.ssl import ( - SSL_CIPHER_LISTS, SSLCipherList, client_context, create_no_verify_ssl_context, @@ -25,14 +24,13 @@ def test_client_context(mock_sslcontext) -> None: mock_sslcontext.set_ciphers.assert_not_called() client_context(SSLCipherList.MODERN) - mock_sslcontext.set_ciphers.assert_called_with( - SSL_CIPHER_LISTS[SSLCipherList.MODERN] - ) + mock_sslcontext.set_ciphers.assert_not_called() client_context(SSLCipherList.INTERMEDIATE) - mock_sslcontext.set_ciphers.assert_called_with( - SSL_CIPHER_LISTS[SSLCipherList.INTERMEDIATE] - ) + mock_sslcontext.set_ciphers.assert_not_called() + + client_context(SSLCipherList.INSECURE) + mock_sslcontext.set_ciphers.assert_not_called() def test_no_verify_ssl_context(mock_sslcontext) -> None: @@ -42,14 +40,13 @@ def test_no_verify_ssl_context(mock_sslcontext) -> None: mock_sslcontext.set_ciphers.assert_not_called() create_no_verify_ssl_context(SSLCipherList.MODERN) - mock_sslcontext.set_ciphers.assert_called_with( - SSL_CIPHER_LISTS[SSLCipherList.MODERN] - ) + mock_sslcontext.set_ciphers.assert_not_called() create_no_verify_ssl_context(SSLCipherList.INTERMEDIATE) - mock_sslcontext.set_ciphers.assert_called_with( - SSL_CIPHER_LISTS[SSLCipherList.INTERMEDIATE] - ) + mock_sslcontext.set_ciphers.assert_not_called() + + create_no_verify_ssl_context(SSLCipherList.INSECURE) + mock_sslcontext.set_ciphers.assert_not_called() def test_ssl_context_caching() -> None: diff --git a/tests/util/test_timeout.py b/tests/util/test_timeout.py index 1c4b06d99b4..5e8261c4c02 100644 --- a/tests/util/test_timeout.py +++ b/tests/util/test_timeout.py @@ -146,6 +146,62 @@ async def test_simple_global_timeout_freeze_with_executor_job( await hass.async_add_executor_job(time.sleep, 0.3) +async def test_simple_global_timeout_does_not_leak_upward( + hass: HomeAssistant, +) -> None: + """Test a global timeout does not leak upward.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + async with timeout.async_timeout(0.1): + cancelling_inside_timeout = current_task.cancelling() + await asyncio.sleep(0.3) + + assert cancelling_inside_timeout == 0 + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +async def test_simple_global_timeout_does_swallow_cancellation( + hass: HomeAssistant, +) -> None: + """Test a global timeout does not swallow cancellation.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + async def task_with_timeout() -> None: + nonlocal cancelling_inside_timeout + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + cancelling_inside_timeout = new_task.cancelling() + async with timeout.async_timeout(0.1): + await asyncio.sleep(0.3) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + + assert cancelling_inside_timeout == 0 + # Cancellation should not leak into the current task + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + async def test_simple_global_timeout_freeze_reset() -> None: """Test a simple global timeout freeze reset.""" timeout = TimeoutManager() @@ -166,6 +222,62 @@ async def test_simple_zone_timeout() -> None: await asyncio.sleep(0.3) +async def test_simple_zone_timeout_does_not_leak_upward( + hass: HomeAssistant, +) -> None: + """Test a zone timeout does not leak upward.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + async with timeout.async_timeout(0.1, "test"): + cancelling_inside_timeout = current_task.cancelling() + await asyncio.sleep(0.3) + + assert cancelling_inside_timeout == 0 + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +async def test_simple_zone_timeout_does_swallow_cancellation( + hass: HomeAssistant, +) -> None: + """Test a zone timeout does not swallow cancellation.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + async def task_with_timeout() -> None: + nonlocal cancelling_inside_timeout + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + async with timeout.async_timeout(0.1, "test"): + cancelling_inside_timeout = current_task.cancelling() + await asyncio.sleep(0.3) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + + # Cancellation should not leak into the current task + assert cancelling_inside_timeout == 0 + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + async def test_multiple_zone_timeout() -> None: """Test a simple zone timeout.""" timeout = TimeoutManager() @@ -327,7 +439,7 @@ async def test_simple_zone_timeout_freeze_without_timeout_exeption() -> None: await asyncio.sleep(0.4) -async def test_simple_zone_timeout_zone_with_timeout_exeption() -> None: +async def test_simple_zone_timeout_zone_with_timeout_exception() -> None: """Test a simple zone timeout freeze on a zone that does not have a timeout set.""" timeout = TimeoutManager() diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 98a6a1da5a6..4be32b2851e 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -11,6 +11,8 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -31,7 +33,9 @@ from homeassistant.const import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.util import unit_conversion from homeassistant.util.unit_conversion import ( + AreaConverter, BaseUnitConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -59,6 +63,8 @@ INVALID_SYMBOL = "bob" _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { converter: sorted(converter.VALID_UNITS, key=lambda x: (x is None, x)) for converter in ( + AreaConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -80,9 +86,15 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { # Dict containing all converters with a corresponding unit ratio. _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, float]] = { + AreaConverter: (UnitOfArea.SQUARE_KILOMETERS, UnitOfArea.SQUARE_METERS, 0.000001), + BloodGlucoseConcentrationConverter: ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + 18, + ), ConductivityConverter: ( - UnitOfConductivity.MICROSIEMENS, - UnitOfConductivity.MILLISIEMENS, + UnitOfConductivity.MICROSIEMENS_PER_CM, + UnitOfConductivity.MILLISIEMENS_PER_CM, 1000, ), DataRateConverter: ( @@ -130,13 +142,155 @@ _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, flo _CONVERTED_VALUE: dict[ type[BaseUnitConverter], list[tuple[float, str | None, float, str | None]] ] = { + AreaConverter: [ + # Square Meters to other units + (5, UnitOfArea.SQUARE_METERS, 50000, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.SQUARE_METERS, 5000000, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.SQUARE_METERS, 0.000005, UnitOfArea.SQUARE_KILOMETERS), + (5, UnitOfArea.SQUARE_METERS, 7750.015500031001, UnitOfArea.SQUARE_INCHES), + (5, UnitOfArea.SQUARE_METERS, 53.81955, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.SQUARE_METERS, 5.979950231505403, UnitOfArea.SQUARE_YARDS), + (5, UnitOfArea.SQUARE_METERS, 1.9305107927122295e-06, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.SQUARE_METERS, 0.0012355269073358272, UnitOfArea.ACRES), + (5, UnitOfArea.SQUARE_METERS, 0.0005, UnitOfArea.HECTARES), + # Square Kilometers to other units + (1, UnitOfArea.SQUARE_KILOMETERS, 1000000, UnitOfArea.SQUARE_METERS), + (1, UnitOfArea.SQUARE_KILOMETERS, 1e10, UnitOfArea.SQUARE_CENTIMETERS), + (1, UnitOfArea.SQUARE_KILOMETERS, 1e12, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.SQUARE_KILOMETERS, 1.9305107927122296, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.SQUARE_KILOMETERS, 1235.5269073358272, UnitOfArea.ACRES), + (5, UnitOfArea.SQUARE_KILOMETERS, 500, UnitOfArea.HECTARES), + # Acres to other units + (5, UnitOfArea.ACRES, 20234.3, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.ACRES, 202342821.11999995, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.ACRES, 20234282111.999992, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.ACRES, 0.0202343, UnitOfArea.SQUARE_KILOMETERS), + (5, UnitOfArea.ACRES, 217800, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.ACRES, 24200.0, UnitOfArea.SQUARE_YARDS), + (5, UnitOfArea.ACRES, 0.0078125, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.ACRES, 2.02343, UnitOfArea.HECTARES), + # Hectares to other units + (5, UnitOfArea.HECTARES, 50000, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.HECTARES, 500000000, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.HECTARES, 50000000000.0, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.HECTARES, 0.019305107927122298, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.HECTARES, 538195.5, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.HECTARES, 59799.50231505403, UnitOfArea.SQUARE_YARDS), + (5, UnitOfArea.HECTARES, 12.355269073358272, UnitOfArea.ACRES), + # Square Miles to other units + (5, UnitOfArea.SQUARE_MILES, 12949940.551679997, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.SQUARE_MILES, 129499405516.79997, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.SQUARE_MILES, 12949940551679.996, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.SQUARE_MILES, 1294.9940551679997, UnitOfArea.HECTARES), + (5, UnitOfArea.SQUARE_MILES, 3200, UnitOfArea.ACRES), + # Square Yards to other units + (5, UnitOfArea.SQUARE_YARDS, 4.1806367999999985, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.SQUARE_YARDS, 41806.4, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.SQUARE_YARDS, 4180636.7999999984, UnitOfArea.SQUARE_MILLIMETERS), + ( + 5, + UnitOfArea.SQUARE_YARDS, + 4.180636799999998e-06, + UnitOfArea.SQUARE_KILOMETERS, + ), + (5, UnitOfArea.SQUARE_YARDS, 45.0, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.SQUARE_YARDS, 6479.999999999998, UnitOfArea.SQUARE_INCHES), + (5, UnitOfArea.SQUARE_YARDS, 1.6141528925619832e-06, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.SQUARE_YARDS, 0.0010330578512396695, UnitOfArea.ACRES), + ], + BloodGlucoseConcentrationConverter: [ + ( + 90, + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + 5, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + ), + ( + 1, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + 18, + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + ), + ], ConductivityConverter: [ + # Deprecated to deprecated (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), (5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS), (5, UnitOfConductivity.MILLISIEMENS, 5e3, UnitOfConductivity.MICROSIEMENS), (5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS), (5e6, UnitOfConductivity.MICROSIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), (5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS), + # Deprecated to new + (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS_PER_CM), + (5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS_PER_CM), + ( + 5, + UnitOfConductivity.MILLISIEMENS, + 5e3, + UnitOfConductivity.MICROSIEMENS_PER_CM, + ), + (5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS_PER_CM), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS, + 5e3, + UnitOfConductivity.MILLISIEMENS_PER_CM, + ), + (5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS_PER_CM), + # New to deprecated + (5, UnitOfConductivity.SIEMENS_PER_CM, 5e3, UnitOfConductivity.MILLISIEMENS), + (5, UnitOfConductivity.SIEMENS_PER_CM, 5e6, UnitOfConductivity.MICROSIEMENS), + ( + 5, + UnitOfConductivity.MILLISIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MICROSIEMENS, + ), + (5, UnitOfConductivity.MILLISIEMENS_PER_CM, 5e-3, UnitOfConductivity.SIEMENS), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MILLISIEMENS, + ), + (5e6, UnitOfConductivity.MICROSIEMENS_PER_CM, 5, UnitOfConductivity.SIEMENS), + # New to new + ( + 5, + UnitOfConductivity.SIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MILLISIEMENS_PER_CM, + ), + ( + 5, + UnitOfConductivity.SIEMENS_PER_CM, + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + ), + ( + 5, + UnitOfConductivity.MILLISIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MICROSIEMENS_PER_CM, + ), + ( + 5, + UnitOfConductivity.MILLISIEMENS_PER_CM, + 5e-3, + UnitOfConductivity.SIEMENS_PER_CM, + ), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MILLISIEMENS_PER_CM, + ), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + 5, + UnitOfConductivity.SIEMENS_PER_CM, + ), ], DataRateConverter: [ (8e3, UnitOfDataRate.BITS_PER_SECOND, 8, UnitOfDataRate.KILOBITS_PER_SECOND), @@ -173,6 +327,13 @@ _CONVERTED_VALUE: dict[ (5, UnitOfLength.MILES, 8800.0, UnitOfLength.YARDS), (5, UnitOfLength.MILES, 26400.0008448, UnitOfLength.FEET), (5, UnitOfLength.MILES, 316800.171072, UnitOfLength.INCHES), + (5, UnitOfLength.NAUTICAL_MILES, 9.26, UnitOfLength.KILOMETERS), + (5, UnitOfLength.NAUTICAL_MILES, 9260.0, UnitOfLength.METERS), + (5, UnitOfLength.NAUTICAL_MILES, 926000.0, UnitOfLength.CENTIMETERS), + (5, UnitOfLength.NAUTICAL_MILES, 9260000.0, UnitOfLength.MILLIMETERS), + (5, UnitOfLength.NAUTICAL_MILES, 10126.859142607176, UnitOfLength.YARDS), + (5, UnitOfLength.NAUTICAL_MILES, 30380.57742782153, UnitOfLength.FEET), + (5, UnitOfLength.NAUTICAL_MILES, 364566.9291338583, UnitOfLength.INCHES), (5, UnitOfLength.YARDS, 0.004572, UnitOfLength.KILOMETERS), (5, UnitOfLength.YARDS, 4.572, UnitOfLength.METERS), (5, UnitOfLength.YARDS, 457.2, UnitOfLength.CENTIMETERS), @@ -273,19 +434,43 @@ _CONVERTED_VALUE: dict[ ], ElectricPotentialConverter: [ (5, UnitOfElectricPotential.VOLT, 5000, UnitOfElectricPotential.MILLIVOLT), + (5, UnitOfElectricPotential.VOLT, 5e6, UnitOfElectricPotential.MICROVOLT), (5, UnitOfElectricPotential.MILLIVOLT, 0.005, UnitOfElectricPotential.VOLT), + (5, UnitOfElectricPotential.MILLIVOLT, 5e3, UnitOfElectricPotential.MICROVOLT), + (5, UnitOfElectricPotential.MICROVOLT, 5e-3, UnitOfElectricPotential.MILLIVOLT), + (5, UnitOfElectricPotential.MICROVOLT, 5e-6, UnitOfElectricPotential.VOLT), ], EnergyConverter: [ + (10, UnitOfEnergy.MILLIWATT_HOUR, 0.00001, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 10000, UnitOfEnergy.MILLIWATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 0.00000000001, UnitOfEnergy.TERA_WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 10000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 0.01, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_JOULE, 10000 / 3.6, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_JOULE, 10 / 3.6, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.MEGA_JOULE, 10 / 3.6, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.MEGA_JOULE, 0.010 / 3.6, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e6, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e9, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.TERA_WATT_HOUR, 10e9, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.TERA_WATT_HOUR, 10e12, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.GIGA_JOULE, 2777.78, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_JOULE, 2.77778, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.MEGA_JOULE, 2.77778, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.MEGA_JOULE, 2.77778e-3, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.KILO_JOULE, 2.77778, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.KILO_JOULE, 2.77778e-6, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.JOULE, 2.77778e-3, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.JOULE, 2.390057, UnitOfEnergy.CALORIE), + (10, UnitOfEnergy.CALORIE, 0.01, UnitOfEnergy.KILO_CALORIE), + (10, UnitOfEnergy.CALORIE, 0.011622222, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.KILO_CALORIE, 0.01, UnitOfEnergy.MEGA_CALORIE), + (10, UnitOfEnergy.KILO_CALORIE, 0.011622222, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.MEGA_CALORIE, 0.01, UnitOfEnergy.GIGA_CALORIE), + (10, UnitOfEnergy.MEGA_CALORIE, 0.011622222, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.GIGA_CALORIE, 10000, UnitOfEnergy.MEGA_CALORIE), + (10, UnitOfEnergy.GIGA_CALORIE, 11.622222, UnitOfEnergy.MEGA_WATT_HOUR), ], InformationConverter: [ (8e3, UnitOfInformation.BITS, 8, UnitOfInformation.KILOBITS), @@ -348,6 +533,9 @@ _CONVERTED_VALUE: dict[ ], PowerConverter: [ (10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT), + (10, UnitOfPower.MEGA_WATT, 10e6, UnitOfPower.WATT), + (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), + (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), ], PressureConverter: [ @@ -419,6 +607,20 @@ _CONVERTED_VALUE: dict[ 708661.42, UnitOfVolumetricFlux.INCHES_PER_HOUR, ), + # 5 m/s * 1000 = 5000 mm/s + ( + 5, + UnitOfSpeed.METERS_PER_SECOND, + 5000, + UnitOfSpeed.MILLIMETERS_PER_SECOND, + ), + # 5 m/s ÷ 0.0254 = 196.8503937 in/s + ( + 5, + UnitOfSpeed.METERS_PER_SECOND, + 5 / 0.0254, + UnitOfSpeed.INCHES_PER_SECOND, + ), # 5000 in/h / 39.3701 in/m / 3600 s/h = 0.03528 m/s ( 5000, @@ -543,6 +745,18 @@ _CONVERTED_VALUE: dict[ 7.48051948, UnitOfVolumeFlowRate.GALLONS_PER_MINUTE, ), + ( + 9, + UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + 2500, + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, + ), + ( + 3, + UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + 50, + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, + ), ], } diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index 15500777212..ddefe92de42 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -7,12 +7,14 @@ import pytest from homeassistant.components.sensor import DEVICE_CLASS_UNITS, SensorDeviceClass from homeassistant.const import ( ACCUMULATED_PRECIPITATION, + AREA, LENGTH, MASS, PRESSURE, TEMPERATURE, VOLUME, WIND_SPEED, + UnitOfArea, UnitOfLength, UnitOfMass, UnitOfPrecipitationDepth, @@ -22,6 +24,8 @@ from homeassistant.const import ( UnitOfVolume, UnitOfVolumetricFlux, ) +from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.util.unit_system import ( # pylint: disable=hass-deprecated-import _CONF_UNIT_SYSTEM_IMPERIAL, @@ -44,6 +48,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -57,6 +62,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=INVALID_UNIT, mass=UnitOfMass.GRAMS, @@ -70,6 +76,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -83,6 +90,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -96,6 +104,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=INVALID_UNIT, @@ -109,6 +118,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -122,6 +132,21 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=INVALID_UNIT, + area=UnitOfArea.SQUARE_METERS, + conversions={}, + length=UnitOfLength.METERS, + mass=UnitOfMass.GRAMS, + pressure=UnitOfPressure.PA, + temperature=UnitOfTemperature.CELSIUS, + volume=UnitOfVolume.LITERS, + wind_speed=UnitOfSpeed.METERS_PER_SECOND, + ) + + with pytest.raises(ValueError): + UnitSystem( + SYSTEM_NAME, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=INVALID_UNIT, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -146,6 +171,8 @@ def test_invalid_value() -> None: METRIC_SYSTEM.pressure("50Pa", UnitOfPressure.PA) with pytest.raises(TypeError): METRIC_SYSTEM.accumulated_precipitation("50mm", UnitOfLength.MILLIMETERS) + with pytest.raises(TypeError): + METRIC_SYSTEM.area("2m²", UnitOfArea.SQUARE_METERS) def test_as_dict() -> None: @@ -158,6 +185,7 @@ def test_as_dict() -> None: MASS: UnitOfMass.GRAMS, PRESSURE: UnitOfPressure.PA, ACCUMULATED_PRECIPITATION: UnitOfLength.MILLIMETERS, + AREA: UnitOfArea.SQUARE_METERS, } assert expected == METRIC_SYSTEM.as_dict() @@ -303,6 +331,29 @@ def test_accumulated_precipitation_to_imperial() -> None: ) == pytest.approx(10, abs=1e-4) +def test_area_same_unit() -> None: + """Test no conversion happens if to unit is same as from unit.""" + assert METRIC_SYSTEM.area(5, METRIC_SYSTEM.area_unit) == 5 + + +def test_area_unknown_unit() -> None: + """Test no conversion happens if unknown unit.""" + with pytest.raises(HomeAssistantError, match="is not a recognized .* unit"): + METRIC_SYSTEM.area(5, "abc") + + +def test_area_to_metric() -> None: + """Test area conversion to metric system.""" + assert METRIC_SYSTEM.area(25, METRIC_SYSTEM.area_unit) == 25 + assert round(METRIC_SYSTEM.area(10, IMPERIAL_SYSTEM.area_unit), 1) == 0.9 + + +def test_area_to_imperial() -> None: + """Test area conversion to imperial system.""" + assert IMPERIAL_SYSTEM.area(77, IMPERIAL_SYSTEM.area_unit) == 77 + assert IMPERIAL_SYSTEM.area(25, METRIC_SYSTEM.area_unit) == 269.09776041774313 + + def test_properties() -> None: """Test the unit properties are returned as expected.""" assert METRIC_SYSTEM.length_unit == UnitOfLength.KILOMETERS @@ -312,6 +363,7 @@ def test_properties() -> None: assert METRIC_SYSTEM.volume_unit == UnitOfVolume.LITERS assert METRIC_SYSTEM.pressure_unit == UnitOfPressure.PA assert METRIC_SYSTEM.accumulated_precipitation_unit == UnitOfLength.MILLIMETERS + assert METRIC_SYSTEM.area_unit == UnitOfArea.SQUARE_METERS @pytest.mark.parametrize( @@ -338,6 +390,18 @@ def test_get_unit_system_invalid(key: str) -> None: @pytest.mark.parametrize( ("device_class", "original_unit", "state_unit"), [ + # Test area conversion + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_FEET, UnitOfArea.SQUARE_METERS), + ( + SensorDeviceClass.AREA, + UnitOfArea.SQUARE_INCHES, + UnitOfArea.SQUARE_CENTIMETERS, + ), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_MILES, UnitOfArea.SQUARE_KILOMETERS), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_YARDS, UnitOfArea.SQUARE_METERS), + (SensorDeviceClass.AREA, UnitOfArea.ACRES, UnitOfArea.HECTARES), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_KILOMETERS, None), + (SensorDeviceClass.AREA, "very_long", None), # Test atmospheric pressure ( SensorDeviceClass.ATMOSPHERIC_PRESSURE, @@ -413,6 +477,11 @@ def test_get_unit_system_invalid(key: str) -> None: UnitOfSpeed.FEET_PER_SECOND, UnitOfSpeed.KILOMETERS_PER_HOUR, ), + ( + SensorDeviceClass.SPEED, + UnitOfSpeed.INCHES_PER_SECOND, + UnitOfSpeed.MILLIMETERS_PER_SECOND, + ), ( SensorDeviceClass.SPEED, UnitOfSpeed.MILES_PER_HOUR, @@ -490,6 +559,13 @@ def test_get_metric_converted_unit_( UNCONVERTED_UNITS_METRIC_SYSTEM = { + SensorDeviceClass.AREA: ( + UnitOfArea.SQUARE_MILLIMETERS, + UnitOfArea.SQUARE_CENTIMETERS, + UnitOfArea.SQUARE_METERS, + UnitOfArea.SQUARE_KILOMETERS, + UnitOfArea.HECTARES, + ), SensorDeviceClass.ATMOSPHERIC_PRESSURE: (UnitOfPressure.HPA,), SensorDeviceClass.DISTANCE: ( UnitOfLength.CENTIMETERS, @@ -520,6 +596,7 @@ UNCONVERTED_UNITS_METRIC_SYSTEM = { UnitOfSpeed.KILOMETERS_PER_HOUR, UnitOfSpeed.KNOTS, UnitOfSpeed.METERS_PER_SECOND, + UnitOfSpeed.MILLIMETERS_PER_SECOND, UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, ), @@ -538,6 +615,7 @@ UNCONVERTED_UNITS_METRIC_SYSTEM = { @pytest.mark.parametrize( "device_class", [ + SensorDeviceClass.AREA, SensorDeviceClass.ATMOSPHERIC_PRESSURE, SensorDeviceClass.DISTANCE, SensorDeviceClass.GAS, @@ -566,6 +644,21 @@ def test_metric_converted_units(device_class: SensorDeviceClass) -> None: @pytest.mark.parametrize( ("device_class", "original_unit", "state_unit"), [ + # Test area conversion + ( + SensorDeviceClass.AREA, + UnitOfArea.SQUARE_MILLIMETERS, + UnitOfArea.SQUARE_INCHES, + ), + ( + SensorDeviceClass.AREA, + UnitOfArea.SQUARE_CENTIMETERS, + UnitOfArea.SQUARE_INCHES, + ), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_METERS, UnitOfArea.SQUARE_FEET), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_KILOMETERS, UnitOfArea.SQUARE_MILES), + (SensorDeviceClass.AREA, UnitOfArea.HECTARES, UnitOfArea.ACRES), + (SensorDeviceClass.AREA, "very_area", None), # Test atmospheric pressure ( SensorDeviceClass.ATMOSPHERIC_PRESSURE, @@ -661,6 +754,11 @@ def test_metric_converted_units(device_class: SensorDeviceClass) -> None: ), (SensorDeviceClass.SPEED, UnitOfVolumetricFlux.INCHES_PER_DAY, None), (SensorDeviceClass.SPEED, UnitOfVolumetricFlux.INCHES_PER_HOUR, None), + ( + SensorDeviceClass.SPEED, + UnitOfSpeed.MILLIMETERS_PER_SECOND, + UnitOfSpeed.INCHES_PER_SECOND, + ), (SensorDeviceClass.SPEED, "very_fast", None), # Test volume conversion (SensorDeviceClass.VOLUME, UnitOfVolume.CUBIC_METERS, UnitOfVolume.CUBIC_FEET), @@ -710,10 +808,18 @@ def test_get_us_converted_unit( UNCONVERTED_UNITS_US_SYSTEM = { + SensorDeviceClass.AREA: ( + UnitOfArea.SQUARE_FEET, + UnitOfArea.SQUARE_INCHES, + UnitOfArea.SQUARE_MILES, + UnitOfArea.SQUARE_YARDS, + UnitOfArea.ACRES, + ), SensorDeviceClass.ATMOSPHERIC_PRESSURE: (UnitOfPressure.INHG,), SensorDeviceClass.DISTANCE: ( UnitOfLength.FEET, UnitOfLength.INCHES, + UnitOfLength.NAUTICAL_MILES, UnitOfLength.MILES, UnitOfLength.YARDS, ), @@ -729,6 +835,7 @@ UNCONVERTED_UNITS_US_SYSTEM = { UnitOfSpeed.FEET_PER_SECOND, UnitOfSpeed.KNOTS, UnitOfSpeed.MILES_PER_HOUR, + UnitOfSpeed.INCHES_PER_SECOND, UnitOfVolumetricFlux.INCHES_PER_DAY, UnitOfVolumetricFlux.INCHES_PER_HOUR, ), @@ -772,3 +879,35 @@ def test_imperial_converted_units(device_class: SensorDeviceClass) -> None: assert (device_class, unit) not in unit_system._conversions continue assert (device_class, unit) in unit_system._conversions + + +async def test_imperial_deprecated_log_warning( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test deprecated imperial unit system logs warning.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Home", + "unit_system": "imperial", + "time_zone": "America/New_York", + "currency": "USD", + "country": "US", + "language": "en", + "radius": 150, + }, + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 50 + assert hass.config.elevation == 25 + assert hass.config.location_name == "Home" + assert hass.config.units is US_CUSTOMARY_SYSTEM + assert hass.config.time_zone == "America/New_York" + assert hass.config.currency == "USD" + assert hass.config.country == "US" + assert hass.config.language == "en" + assert hass.config.radius == 150 diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index dbd7f1d2e99..12a7eca5f9d 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -6,7 +6,6 @@ import io import os import pathlib from typing import Any -import unittest from unittest.mock import Mock, patch import pytest @@ -19,7 +18,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util import yaml from homeassistant.util.yaml import loader as yaml_loader -from tests.common import extract_stack_to_frame, get_test_config_dir, patch_yaml_files +from tests.common import extract_stack_to_frame @pytest.fixture(params=["enable_c_loader", "disable_c_loader"]) @@ -396,145 +395,6 @@ def test_dump_unicode() -> None: assert yaml.dump({"a": None, "b": "привет"}) == "a:\nb: привет\n" -FILES = {} - - -def load_yaml(fname, string, secrets=None): - """Write a string to file and return the parsed yaml.""" - FILES[fname] = string - with patch_yaml_files(FILES): - return load_yaml_config_file(fname, secrets) - - -class TestSecrets(unittest.TestCase): - """Test the secrets parameter in the yaml utility.""" - - def setUp(self): - """Create & load secrets file.""" - config_dir = get_test_config_dir() - self._yaml_path = os.path.join(config_dir, YAML_CONFIG_FILE) - self._secret_path = os.path.join(config_dir, yaml.SECRET_YAML) - self._sub_folder_path = os.path.join(config_dir, "subFolder") - self._unrelated_path = os.path.join(config_dir, "unrelated") - - load_yaml( - self._secret_path, - ( - "http_pw: pwhttp\n" - "comp1_un: un1\n" - "comp1_pw: pw1\n" - "stale_pw: not_used\n" - "logger: debug\n" - ), - ) - self._yaml = load_yaml( - self._yaml_path, - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - yaml_loader.Secrets(config_dir), - ) - - def tearDown(self): - """Clean up secrets.""" - FILES.clear() - - def test_secrets_from_yaml(self): - """Did secrets load ok.""" - expected = {"api_password": "pwhttp"} - assert expected == self._yaml["http"] - - expected = {"username": "un1", "password": "pw1"} - assert expected == self._yaml["component"] - - def test_secrets_from_parent_folder(self): - """Test loading secrets from parent folder.""" - expected = {"api_password": "pwhttp"} - self._yaml = load_yaml( - os.path.join(self._sub_folder_path, "sub.yaml"), - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - yaml_loader.Secrets(get_test_config_dir()), - ) - - assert expected == self._yaml["http"] - - def test_secret_overrides_parent(self): - """Test loading current directory secret overrides the parent.""" - expected = {"api_password": "override"} - load_yaml( - os.path.join(self._sub_folder_path, yaml.SECRET_YAML), "http_pw: override" - ) - self._yaml = load_yaml( - os.path.join(self._sub_folder_path, "sub.yaml"), - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - yaml_loader.Secrets(get_test_config_dir()), - ) - - assert expected == self._yaml["http"] - - def test_secrets_from_unrelated_fails(self): - """Test loading secrets from unrelated folder fails.""" - load_yaml(os.path.join(self._unrelated_path, yaml.SECRET_YAML), "test: failure") - with pytest.raises(HomeAssistantError): - load_yaml( - os.path.join(self._sub_folder_path, "sub.yaml"), - "http:\n api_password: !secret test", - ) - - def test_secrets_logger_removed(self): - """Ensure logger: debug was removed.""" - with pytest.raises(HomeAssistantError): - load_yaml(self._yaml_path, "api_password: !secret logger") - - @patch("homeassistant.util.yaml.loader._LOGGER.error") - def test_bad_logger_value(self, mock_error): - """Ensure logger: debug was removed.""" - load_yaml(self._secret_path, "logger: info\npw: abc") - load_yaml( - self._yaml_path, - "api_password: !secret pw", - yaml_loader.Secrets(get_test_config_dir()), - ) - assert mock_error.call_count == 1, "Expected an error about logger: value" - - def test_secrets_are_not_dict(self): - """Did secrets handle non-dict file.""" - FILES[self._secret_path] = ( - "- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n" - ) - with pytest.raises(HomeAssistantError): - load_yaml( - self._yaml_path, - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - ) - - @pytest.mark.parametrize("hass_config_yaml", ['key: [1, "2", 3]']) @pytest.mark.usefixtures("try_both_dumpers", "mock_hass_config_yaml") def test_representing_yaml_loaded_data() -> None: @@ -634,31 +494,6 @@ def mock_integration_frame() -> Generator[Mock]: yield correct_frame -@pytest.mark.parametrize( - ("loader_class", "message"), - [ - (yaml.loader.SafeLoader, "'SafeLoader' instead of 'FastSafeLoader'"), - ( - yaml.loader.SafeLineLoader, - "'SafeLineLoader' instead of 'PythonSafeLoader'", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_deprecated_loaders( - caplog: pytest.LogCaptureFixture, - loader_class: type, - message: str, -) -> None: - """Test instantiating the deprecated yaml loaders logs a warning.""" - with ( - pytest.raises(TypeError), - patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()), - ): - loader_class() - assert (f"Detected that integration 'hue' uses deprecated {message}") in caplog.text - - @pytest.mark.usefixtures("try_both_loaders") def test_string_annotated() -> None: """Test strings are annotated with file + line.""" diff --git a/tests/util/yaml/test_secrets.py b/tests/util/yaml/test_secrets.py new file mode 100644 index 00000000000..35b5ae319c4 --- /dev/null +++ b/tests/util/yaml/test_secrets.py @@ -0,0 +1,185 @@ +"""Test Home Assistant secret substitution in YAML files.""" + +from dataclasses import dataclass +import logging +from pathlib import Path + +import pytest + +from homeassistant.config import YAML_CONFIG_FILE, load_yaml_config_file +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util import yaml +from homeassistant.util.yaml import loader as yaml_loader + +from tests.common import get_test_config_dir, patch_yaml_files + + +@dataclass(frozen=True) +class YamlFile: + """Represents a .yaml file used for testing.""" + + path: Path + contents: str + + +def load_config_file(config_file_path: Path, files: list[YamlFile]): + """Patch secret files and return the loaded config file.""" + patch_files = {x.path.as_posix(): x.contents for x in files} + with patch_yaml_files(patch_files): + return load_yaml_config_file( + config_file_path.as_posix(), + yaml_loader.Secrets(Path(get_test_config_dir())), + ) + + +@pytest.fixture +def filepaths() -> dict[str, Path]: + """Return a dictionary of filepaths for testing.""" + config_dir = Path(get_test_config_dir()) + return { + "config": config_dir, + "sub_folder": config_dir / "subFolder", + "unrelated": config_dir / "unrelated", + } + + +@pytest.fixture +def default_config(filepaths: dict[str, Path]) -> YamlFile: + """Return the default config file for testing.""" + return YamlFile( + path=filepaths["config"] / YAML_CONFIG_FILE, + contents=( + "http:\n" + " api_password: !secret http_pw\n" + "component:\n" + " username: !secret comp1_un\n" + " password: !secret comp1_pw\n" + "" + ), + ) + + +@pytest.fixture +def default_secrets(filepaths: dict[str, Path]) -> YamlFile: + """Return the default secrets file for testing.""" + return YamlFile( + path=filepaths["config"] / yaml.SECRET_YAML, + contents=( + "http_pw: pwhttp\n" + "comp1_un: un1\n" + "comp1_pw: pw1\n" + "stale_pw: not_used\n" + "logger: debug\n" + ), + ) + + +def test_secrets_from_yaml(default_config: YamlFile, default_secrets: YamlFile) -> None: + """Did secrets load ok.""" + loaded_file = load_config_file( + default_config.path, [default_config, default_secrets] + ) + expected = {"api_password": "pwhttp"} + assert expected == loaded_file["http"] + + expected = {"username": "un1", "password": "pw1"} + assert expected == loaded_file["component"] + + +def test_secrets_from_parent_folder( + filepaths: dict[str, Path], + default_config: YamlFile, + default_secrets: YamlFile, +) -> None: + """Test loading secrets from parent folder.""" + config_file = YamlFile( + path=filepaths["sub_folder"] / "sub.yaml", + contents=default_config.contents, + ) + loaded_file = load_config_file(config_file.path, [config_file, default_secrets]) + expected = {"api_password": "pwhttp"} + + assert expected == loaded_file["http"] + + +def test_secret_overrides_parent( + filepaths: dict[str, Path], + default_config: YamlFile, + default_secrets: YamlFile, +) -> None: + """Test loading current directory secret overrides the parent.""" + config_file = YamlFile( + path=filepaths["sub_folder"] / "sub.yaml", contents=default_config.contents + ) + sub_secrets = YamlFile( + path=filepaths["sub_folder"] / yaml.SECRET_YAML, contents="http_pw: override" + ) + + loaded_file = load_config_file( + config_file.path, [config_file, default_secrets, sub_secrets] + ) + + expected = {"api_password": "override"} + assert loaded_file["http"] == expected + + +def test_secrets_from_unrelated_fails( + filepaths: dict[str, Path], + default_secrets: YamlFile, +) -> None: + """Test loading secrets from unrelated folder fails.""" + config_file = YamlFile( + path=filepaths["sub_folder"] / "sub.yaml", + contents="http:\n api_password: !secret test", + ) + unrelated_secrets = YamlFile( + path=filepaths["unrelated"] / yaml.SECRET_YAML, contents="test: failure" + ) + with pytest.raises(HomeAssistantError, match="Secret test not defined"): + load_config_file( + config_file.path, [config_file, default_secrets, unrelated_secrets] + ) + + +def test_secrets_logger_removed( + filepaths: dict[str, Path], + default_secrets: YamlFile, +) -> None: + """Ensure logger: debug gets removed from secrets file once logger is configured.""" + config_file = YamlFile( + path=filepaths["config"] / YAML_CONFIG_FILE, + contents="api_password: !secret logger", + ) + with pytest.raises(HomeAssistantError, match="Secret logger not defined"): + load_config_file(config_file.path, [config_file, default_secrets]) + + +def test_bad_logger_value( + caplog: pytest.LogCaptureFixture, filepaths: dict[str, Path] +) -> None: + """Ensure only logger: debug is allowed in secret file.""" + config_file = YamlFile( + path=filepaths["config"] / YAML_CONFIG_FILE, contents="api_password: !secret pw" + ) + secrets_file = YamlFile( + path=filepaths["config"] / yaml.SECRET_YAML, contents="logger: info\npw: abc" + ) + with caplog.at_level(logging.ERROR): + load_config_file(config_file.path, [config_file, secrets_file]) + assert ( + "Error in secrets.yaml: 'logger: debug' expected, but 'logger: info' found" + in caplog.messages + ) + + +def test_secrets_are_not_dict( + filepaths: dict[str, Path], + default_config: YamlFile, +) -> None: + """Did secrets handle non-dict file.""" + non_dict_secrets = YamlFile( + path=filepaths["config"] / yaml.SECRET_YAML, + contents="- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n", + ) + with pytest.raises(HomeAssistantError, match="Secrets is not a dictionary"): + load_config_file(default_config.path, [default_config, non_dict_secrets])